From d3fb9f798ad655427326478062cfae0bb71ac649 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Lauren=C8=9Biu=20Nicola?= Date: Sun, 31 Mar 2024 09:57:00 +0300 Subject: [PATCH] Merge commit 'f5a9250147f6569d8d89334dc9cca79c0322729f' into sync-from-ra --- .github/workflows/ci.yaml | 4 +- .github/workflows/publish-libs.yaml | 1 + .github/workflows/release.yaml | 17 +- Cargo.lock | 43 +- Cargo.toml | 11 +- crates/base-db/src/input.rs | 84 +- crates/base-db/src/lib.rs | 6 +- crates/flycheck/src/lib.rs | 8 +- crates/flycheck/src/test_runner.rs | 5 +- crates/hir-def/src/attr.rs | 8 +- crates/hir-def/src/data.rs | 9 +- crates/hir-def/src/generics.rs | 80 +- crates/hir-def/src/item_tree/pretty.rs | 2 +- crates/hir-def/src/lang_item.rs | 2 +- crates/hir-def/src/lib.rs | 4 +- .../hir-def/src/macro_expansion_tests/mbe.rs | 85 + crates/hir-def/src/nameres.rs | 6 +- crates/hir-def/src/nameres/attr_resolution.rs | 2 + crates/hir-def/src/nameres/collector.rs | 42 +- crates/hir-def/src/nameres/path_resolution.rs | 2 +- crates/hir-def/src/nameres/proc_macro.rs | 14 +- crates/hir-def/src/resolver.rs | 20 + crates/hir-expand/src/attrs.rs | 165 +- crates/hir-expand/src/builtin_fn_macro.rs | 4 +- crates/hir-expand/src/cfg_process.rs | 12 +- crates/hir-expand/src/db.rs | 65 +- crates/hir-expand/src/declarative.rs | 38 +- crates/hir-expand/src/lib.rs | 21 +- crates/hir-expand/src/mod_path.rs | 42 +- crates/hir-expand/src/proc_macro.rs | 2 +- crates/hir-ty/Cargo.toml | 3 +- crates/hir-ty/src/builder.rs | 34 +- crates/hir-ty/src/chalk_db.rs | 13 + crates/hir-ty/src/chalk_ext.rs | 14 + crates/hir-ty/src/consteval/tests.rs | 27 + crates/hir-ty/src/db.rs | 16 +- crates/hir-ty/src/diagnostics/expr.rs | 17 +- .../diagnostics/match_check/pat_analysis.rs | 84 +- crates/hir-ty/src/display.rs | 82 +- crates/hir-ty/src/infer.rs | 159 +- crates/hir-ty/src/infer/coerce.rs | 17 + crates/hir-ty/src/infer/expr.rs | 39 +- crates/hir-ty/src/infer/path.rs | 9 +- crates/hir-ty/src/infer/unify.rs | 28 +- crates/hir-ty/src/layout.rs | 3 + crates/hir-ty/src/lib.rs | 95 +- crates/hir-ty/src/lower.rs | 239 +- crates/hir-ty/src/mapping.rs | 8 + crates/hir-ty/src/method_resolution.rs | 15 +- crates/hir-ty/src/mir/eval.rs | 56 +- crates/hir-ty/src/mir/eval/shim.rs | 65 +- crates/hir-ty/src/mir/monomorphization.rs | 15 +- crates/hir-ty/src/tests.rs | 2 +- .../hir-ty/src/tests/display_source_code.rs | 2 +- crates/hir-ty/src/tests/patterns.rs | 2 +- crates/hir-ty/src/tests/regression.rs | 4 +- crates/hir-ty/src/tests/simple.rs | 6 +- crates/hir-ty/src/tests/traits.rs | 109 + crates/hir-ty/src/utils.rs | 166 +- crates/hir/src/display.rs | 33 +- crates/hir/src/lib.rs | 60 +- crates/hir/src/term_search/tactics.rs | 4 +- .../src/handlers/extract_function.rs | 2 +- .../src/handlers/generate_delegate_methods.rs | 2 +- crates/ide-completion/src/completions/dot.rs | 4 +- .../src/completions/item_list/trait_impl.rs | 8 +- .../ide-completion/src/completions/keyword.rs | 6 +- crates/ide-completion/src/context.rs | 2 +- crates/ide-completion/src/item.rs | 3 +- crates/ide-completion/src/render.rs | 49 +- crates/ide-completion/src/render/function.rs | 8 +- crates/ide-completion/src/tests/expression.rs | 2 +- crates/ide-completion/src/tests/predicate.rs | 4 +- crates/ide-completion/src/tests/special.rs | 47 +- crates/ide-completion/src/tests/type_pos.rs | 8 +- crates/ide-db/src/lib.rs | 1 + crates/ide-diagnostics/Cargo.toml | 1 + .../src/handlers/missing_match_arms.rs | 27 + .../src/handlers/mutability_errors.rs | 50 +- .../src/handlers/remove_trailing_return.rs | 19 +- .../src/handlers/remove_unnecessary_else.rs | 23 +- .../src/handlers/unlinked_file.rs | 7 +- .../src/handlers/unused_variables.rs | 28 +- crates/ide-diagnostics/src/lib.rs | 32 +- crates/ide-diagnostics/src/tests.rs | 4 + crates/ide/Cargo.toml | 1 + crates/ide/src/doc_links.rs | 16 +- crates/ide/src/doc_links/tests.rs | 30 +- crates/ide/src/file_structure.rs | 13 +- crates/ide/src/goto_implementation.rs | 71 + crates/ide/src/hover.rs | 1 + crates/ide/src/hover/render.rs | 3 + crates/ide/src/hover/tests.rs | 175 +- crates/ide/src/inlay_hints.rs | 96 +- crates/ide/src/inlay_hints/adjustment.rs | 1 - crates/ide/src/inlay_hints/bind_pat.rs | 9 +- crates/ide/src/inlay_hints/binding_mode.rs | 2 - crates/ide/src/inlay_hints/chaining.rs | 1 - crates/ide/src/inlay_hints/closing_brace.rs | 1 - .../ide/src/inlay_hints/closure_captures.rs | 5 - crates/ide/src/inlay_hints/closure_ret.rs | 1 - crates/ide/src/inlay_hints/discriminant.rs | 1 - crates/ide/src/inlay_hints/fn_lifetime_fn.rs | 3 - crates/ide/src/inlay_hints/implicit_drop.rs | 1 - crates/ide/src/inlay_hints/implicit_static.rs | 1 - crates/ide/src/inlay_hints/param_name.rs | 1 - crates/ide/src/inlay_hints/range_exclusive.rs | 1 - crates/ide/src/lib.rs | 28 +- crates/ide/src/static_index.rs | 1 + crates/ide/src/status.rs | 10 +- .../ide/src/syntax_highlighting/highlight.rs | 48 +- crates/ide/src/syntax_highlighting/html.rs | 1 + crates/ide/src/syntax_highlighting/tags.rs | 9 +- .../test_data/highlight_assoc_functions.html | 7 +- .../test_data/highlight_attributes.html | 8 +- .../test_data/highlight_block_mod_items.html | 3 +- .../test_data/highlight_const.html | 83 + .../test_data/highlight_crate_root.html | 9 +- .../test_data/highlight_default_library.html | 3 +- .../test_data/highlight_doctest.html | 15 +- .../test_data/highlight_extern_crate.html | 1 + .../test_data/highlight_general.html | 57 +- .../test_data/highlight_injection.html | 1 + .../test_data/highlight_keywords.html | 1 + .../test_data/highlight_lifetimes.html | 1 + .../test_data/highlight_macros.html | 1 + .../highlight_module_docs_inline.html | 1 + .../highlight_module_docs_outline.html | 1 + .../test_data/highlight_operators.html | 1 + .../test_data/highlight_rainbow.html | 1 + .../test_data/highlight_strings.html | 5 +- .../test_data/highlight_unsafe.html | 11 +- crates/ide/src/syntax_highlighting/tests.rs | 76 +- crates/ide/src/test_explorer.rs | 68 +- crates/ide/src/typing.rs | 33 +- crates/load-cargo/Cargo.toml | 1 + crates/load-cargo/src/lib.rs | 4 +- crates/mbe/src/benchmark.rs | 20 +- crates/mbe/src/expander.rs | 45 +- crates/mbe/src/expander/matcher.rs | 160 +- crates/mbe/src/expander/transcriber.rs | 99 +- crates/mbe/src/lib.rs | 64 +- crates/mbe/src/parser.rs | 100 +- crates/mbe/src/syntax_bridge.rs | 56 +- crates/mbe/src/to_parser_input.rs | 6 +- crates/mbe/src/tt_iter.rs | 14 +- crates/parser/src/grammar/expressions/atom.rs | 58 +- .../inline/err/0034_match_arms_recovery.rast | 113 + .../inline/err/0034_match_arms_recovery.rs | 11 + crates/paths/Cargo.toml | 6 +- crates/paths/src/lib.rs | 242 +- crates/proc-macro-api/Cargo.toml | 2 +- crates/proc-macro-api/src/lib.rs | 5 +- crates/proc-macro-api/src/msg.rs | 12 +- crates/proc-macro-api/src/msg/flat.rs | 2 - crates/proc-macro-api/src/process.rs | 4 +- crates/proc-macro-srv/src/dylib.rs | 47 +- crates/proc-macro-srv/src/lib.rs | 26 +- crates/proc-macro-srv/src/proc_macros.rs | 2 +- .../proc-macro-srv/src/server/token_stream.rs | 4 +- crates/proc-macro-srv/src/tests/mod.rs | 16 +- crates/project-model/Cargo.toml | 3 +- crates/project-model/src/build_scripts.rs | 19 +- crates/project-model/src/cargo_workspace.rs | 21 +- crates/project-model/src/lib.rs | 4 +- crates/project-model/src/project_json.rs | 20 +- crates/project-model/src/sysroot.rs | 6 +- crates/project-model/src/tests.rs | 20 +- crates/project-model/src/workspace.rs | 86 +- crates/rust-analyzer/Cargo.toml | 3 +- crates/rust-analyzer/src/bin/main.rs | 2 +- .../rust-analyzer/src/cli/analysis_stats.rs | 5 +- crates/rust-analyzer/src/cli/diagnostics.rs | 2 +- crates/rust-analyzer/src/cli/flags.rs | 1 + crates/rust-analyzer/src/cli/lsif.rs | 2 +- crates/rust-analyzer/src/cli/rustc_tests.rs | 2 +- crates/rust-analyzer/src/cli/scip.rs | 12 +- crates/rust-analyzer/src/config.rs | 49 +- .../rust-analyzer/src/diagnostics/to_proto.rs | 5 +- crates/rust-analyzer/src/global_state.rs | 18 +- .../src/handlers/notification.rs | 2 +- crates/rust-analyzer/src/handlers/request.rs | 31 +- .../src/integrated_benchmarks.rs | 6 +- crates/rust-analyzer/src/lsp/ext.rs | 4 +- .../rust-analyzer/src/lsp/semantic_tokens.rs | 3 +- crates/rust-analyzer/src/lsp/to_proto.rs | 107 +- crates/rust-analyzer/src/main_loop.rs | 36 +- crates/rust-analyzer/src/reload.rs | 92 +- crates/rust-analyzer/tests/crate_graph.rs | 2 +- crates/rust-analyzer/tests/slow-tests/main.rs | 13 +- .../rust-analyzer/tests/slow-tests/support.rs | 6 +- .../rust-analyzer/tests/slow-tests/testdir.rs | 22 +- crates/span/src/lib.rs | 50 + crates/syntax/Cargo.toml | 4 - crates/syntax/rust.ungram | 2 +- crates/syntax/src/ast/generated/nodes.rs | 3536 ++++++++--------- crates/syntax/src/ast/generated/tokens.rs | 80 +- crates/syntax/src/tests.rs | 25 +- crates/test-fixture/src/lib.rs | 25 +- crates/toolchain/Cargo.toml | 3 +- crates/toolchain/src/lib.rs | 43 +- crates/tt/Cargo.toml | 5 +- crates/tt/src/lib.rs | 11 +- crates/vfs-notify/src/lib.rs | 6 +- crates/vfs/src/lib.rs | 57 +- crates/vfs/src/loader.rs | 2 +- crates/vfs/src/vfs_path.rs | 2 +- docs/dev/lsp-extensions.md | 14 +- docs/user/generated_config.adoc | 5 + editors/code/package.json | 9 + editors/code/src/config.ts | 25 +- editors/code/src/debug.ts | 6 +- editors/code/src/lsp_ext.ts | 6 +- editors/code/src/run.ts | 28 +- editors/code/src/tasks.ts | 50 +- editors/code/src/test_explorer.ts | 63 +- xtask/Cargo.toml | 4 + xtask/src/codegen.rs | 2 + .../src/codegen/grammar.rs | 72 +- .../src/codegen/grammar}/ast_src.rs | 0 xtask/src/dist.rs | 21 +- xtask/src/flags.rs | 141 +- xtask/src/install.rs | 16 +- 223 files changed, 5810 insertions(+), 3711 deletions(-) create mode 100644 crates/ide/src/syntax_highlighting/test_data/highlight_const.html create mode 100644 crates/parser/test_data/parser/inline/err/0034_match_arms_recovery.rast create mode 100644 crates/parser/test_data/parser/inline/err/0034_match_arms_recovery.rs rename crates/syntax/src/tests/sourcegen_ast.rs => xtask/src/codegen/grammar.rs (93%) rename {crates/syntax/src/tests => xtask/src/codegen/grammar}/ast_src.rs (100%) diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index 2d8946520d5e7..08ad10c29718c 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -71,7 +71,7 @@ jobs: run: echo "::add-matcher::.github/rust.json" - name: Cache Dependencies - uses: Swatinem/rust-cache@988c164c3d0e93c4dbab36aaf5bbeb77425b2894 + uses: Swatinem/rust-cache@640a22190e7a783d4c409684cea558f081f92012 with: key: ${{ env.RUST_CHANNEL }} @@ -140,7 +140,7 @@ jobs: rustup target add ${{ env.targets }} ${{ env.targets_ide }} - name: Cache Dependencies - uses: Swatinem/rust-cache@988c164c3d0e93c4dbab36aaf5bbeb77425b2894 + uses: Swatinem/rust-cache@640a22190e7a783d4c409684cea558f081f92012 - name: Check run: | diff --git a/.github/workflows/publish-libs.yaml b/.github/workflows/publish-libs.yaml index 862373ec1cce0..34ca53e2e53ad 100644 --- a/.github/workflows/publish-libs.yaml +++ b/.github/workflows/publish-libs.yaml @@ -32,4 +32,5 @@ jobs: git config --global user.name "GitHub Action" # Remove r-a crates from the workspaces so we don't auto-publish them as well sed -i 's/ "crates\/\*"//' ./Cargo.toml + sed -i 's/ "xtask\/"//' ./Cargo.toml cargo workspaces publish --yes --exact --from-git --no-git-commit --allow-dirty diff --git a/.github/workflows/release.yaml b/.github/workflows/release.yaml index dc0a6c2d91f4b..11014338d72f5 100644 --- a/.github/workflows/release.yaml +++ b/.github/workflows/release.yaml @@ -36,6 +36,7 @@ jobs: - os: ubuntu-20.04 target: x86_64-unknown-linux-gnu code-target: linux-x64 + container: rockylinux:8 - os: ubuntu-20.04 target: aarch64-unknown-linux-gnu code-target: linux-arm64 @@ -58,10 +59,18 @@ jobs: steps: - name: Checkout repository - uses: actions/checkout@v3 + uses: actions/checkout@v4 with: fetch-depth: ${{ env.FETCH_DEPTH }} + - name: Install toolchain dependencies + if: matrix.container == 'rockylinux:8' + shell: bash + run: | + dnf install -y gcc + curl --proto '=https' --tlsv1.2 --retry 10 --retry-connrefused -fsSL "https://sh.rustup.rs" | sh -s -- --profile minimal --default-toolchain none -y + echo "${CARGO_HOME:-$HOME/.cargo}/bin" >> $GITHUB_PATH + - name: Install Rust toolchain run: | rustup update --no-self-update stable @@ -69,9 +78,9 @@ jobs: rustup component add rust-src - name: Install Node.js - uses: actions/setup-node@v3 + uses: actions/setup-node@v4 with: - node-version: 16 + node-version: 18 - name: Update apt repositories if: matrix.target == 'aarch64-unknown-linux-gnu' || matrix.target == 'arm-unknown-linux-gnueabihf' @@ -181,7 +190,7 @@ jobs: - name: Install Nodejs uses: actions/setup-node@v4 with: - node-version: 18 + node-version: 20 - run: echo "TAG=$(date --iso -u)" >> $GITHUB_ENV if: github.ref == 'refs/heads/release' diff --git a/Cargo.lock b/Cargo.lock index 68ed32391b7fe..c7cf4479b3301 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -594,6 +594,7 @@ dependencies = [ "rustc-hash", "scoped-tls", "smallvec", + "span", "stdx", "syntax", "test-fixture", @@ -637,6 +638,7 @@ dependencies = [ "pulldown-cmark", "pulldown-cmark-to-cmark", "smallvec", + "span", "stdx", "syntax", "test-fixture", @@ -732,6 +734,7 @@ dependencies = [ "ide-db", "itertools", "once_cell", + "paths", "serde_json", "stdx", "syntax", @@ -931,6 +934,7 @@ dependencies = [ "hir-expand", "ide-db", "itertools", + "paths", "proc-macro-api", "project-model", "span", @@ -1225,6 +1229,9 @@ checksum = "de3145af08024dea9fa9914f381a17b8fc6034dfb00f3a84013f7ff43f29ed4c" [[package]] name = "paths" version = "0.0.0" +dependencies = [ + "camino", +] [[package]] name = "percent-encoding" @@ -1375,6 +1382,7 @@ dependencies = [ "semver", "serde", "serde_json", + "span", "stdx", "toolchain", "tracing", @@ -1432,9 +1440,9 @@ dependencies = [ [[package]] name = "ra-ap-rustc_abi" -version = "0.42.0" +version = "0.44.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c2ae52e2d5b08762c9464b541345f519b8719d57b643b73632bade43ecece9dc" +checksum = "b8709df2a746f055316bc0c62bd30948695a25e734863bf6e1f9755403e010ab" dependencies = [ "bitflags 2.4.2", "ra-ap-rustc_index", @@ -1443,9 +1451,9 @@ dependencies = [ [[package]] name = "ra-ap-rustc_index" -version = "0.42.0" +version = "0.44.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bfd7e10c7853fe79443d46e1d2d8ab09fe99926118e59653fb8b480d5045f126" +checksum = "9ad68bacffb87dcdbb23a3ce11261375078aaa06b85d348c49f39ffd5510dc20" dependencies = [ "arrayvec", "ra-ap-rustc_index_macros", @@ -1454,9 +1462,9 @@ dependencies = [ [[package]] name = "ra-ap-rustc_index_macros" -version = "0.42.0" +version = "0.44.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "47f1d1c589be6c9a9e852fadee0e60329c0f862e87442ac2fe5adae30663cc76" +checksum = "8782aaf3a113837c533dfb1c45df91cd17e1fdd1d2f9a20c2e0d1976025c4f1f" dependencies = [ "proc-macro2", "quote", @@ -1466,9 +1474,9 @@ dependencies = [ [[package]] name = "ra-ap-rustc_lexer" -version = "0.42.0" +version = "0.44.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fa852373a757b4c723bbdc96ced7f575cad68a1e266e45fee12bc4c69a482d80" +checksum = "aab683fc8579d09eb72033bd5dc9ba6d701aa9645b5fed087ef19af71184dff3" dependencies = [ "unicode-properties", "unicode-xid", @@ -1476,9 +1484,9 @@ dependencies = [ [[package]] name = "ra-ap-rustc_parse_format" -version = "0.42.0" +version = "0.44.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2afe3c49accd95a53ac4d72ae13bafc7d115bdd80c8cd56ab09e6fc68f482210" +checksum = "0bcf9ff5edbf784b67b8ad5e03a068f1300fcc24062c0d476b3018965135d933" dependencies = [ "ra-ap-rustc_index", "ra-ap-rustc_lexer", @@ -1486,9 +1494,9 @@ dependencies = [ [[package]] name = "ra-ap-rustc_pattern_analysis" -version = "0.42.0" +version = "0.44.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1253da23515d80c377a3998731e0ec3794997b62b989fd47db73efbde6a0bd7c" +checksum = "d63d1e1d5b2a13273cee1a10011147418f40e12b70f70578ce1dee0f1cafc334" dependencies = [ "ra-ap-rustc_index", "rustc-hash", @@ -1598,6 +1606,7 @@ dependencies = [ "oorandom", "parking_lot", "parser", + "paths", "proc-macro-api", "profile", "project-model", @@ -1869,20 +1878,16 @@ dependencies = [ "itertools", "once_cell", "parser", - "proc-macro2", - "quote", "ra-ap-rustc_lexer", "rayon", "rowan", "rustc-hash", "smol_str", - "sourcegen", "stdx", "test-utils", "text-edit", "tracing", "triomphe", - "ungrammar", ] [[package]] @@ -2024,6 +2029,7 @@ checksum = "1f3ccbac311fea05f86f61904b462b55fb3df8837a366dfc601a0161d0532f20" name = "toolchain" version = "0.0.0" dependencies = [ + "camino", "home", ] @@ -2109,7 +2115,6 @@ name = "tt" version = "0.0.0" dependencies = [ "smol_str", - "span", "stdx", "text-size", ] @@ -2438,8 +2443,12 @@ version = "0.1.0" dependencies = [ "anyhow", "flate2", + "itertools", + "proc-macro2", + "quote", "stdx", "time", + "ungrammar", "write-json", "xflags", "xshell", diff --git a/Cargo.toml b/Cargo.toml index 0679522efd667..d9343d2b96325 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -84,11 +84,11 @@ tt = { path = "./crates/tt", version = "0.0.0" } vfs-notify = { path = "./crates/vfs-notify", version = "0.0.0" } vfs = { path = "./crates/vfs", version = "0.0.0" } -ra-ap-rustc_lexer = { version = "0.42.0", default-features = false } -ra-ap-rustc_parse_format = { version = "0.42.0", default-features = false } -ra-ap-rustc_index = { version = "0.42.0", default-features = false } -ra-ap-rustc_abi = { version = "0.42.0", default-features = false } -ra-ap-rustc_pattern_analysis = { version = "0.42.0", default-features = false } +ra-ap-rustc_lexer = { version = "0.44.0", default-features = false } +ra-ap-rustc_parse_format = { version = "0.44.0", default-features = false } +ra-ap-rustc_index = { version = "0.44.0", default-features = false } +ra-ap-rustc_abi = { version = "0.44.0", default-features = false } +ra-ap-rustc_pattern_analysis = { version = "0.44.0", default-features = false } # local crates that aren't published to crates.io. These should not have versions. sourcegen = { path = "./crates/sourcegen" } @@ -105,6 +105,7 @@ anyhow = "1.0.75" arrayvec = "0.7.4" bitflags = "2.4.1" cargo_metadata = "0.18.1" +camino = "1.1.6" chalk-solve = { version = "0.96.0", default-features = false } chalk-ir = "0.96.0" chalk-recursive = { version = "0.96.0", default-features = false } diff --git a/crates/base-db/src/input.rs b/crates/base-db/src/input.rs index b243b37b77b92..27eb05cd4dc56 100644 --- a/crates/base-db/src/input.rs +++ b/crates/base-db/src/input.rs @@ -6,11 +6,12 @@ //! actual IO. See `vfs` and `project_model` in the `rust-analyzer` crate for how //! actual IO is done and lowered to input. -use std::{fmt, mem, ops, str::FromStr}; +use std::{fmt, mem, ops}; use cfg::CfgOptions; use la_arena::{Arena, Idx, RawIdx}; use rustc_hash::{FxHashMap, FxHashSet}; +use span::Edition; use syntax::SmolStr; use triomphe::Arc; use vfs::{file_set::FileSet, AbsPathBuf, AnchoredPath, FileId, VfsPath}; @@ -293,42 +294,11 @@ pub struct CrateData { pub is_proc_macro: bool, } -#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)] -pub enum Edition { - Edition2015, - Edition2018, - Edition2021, - Edition2024, -} - -impl Edition { - pub const CURRENT: Edition = Edition::Edition2021; - pub const DEFAULT: Edition = Edition::Edition2015; -} - #[derive(Default, Debug, Clone, PartialEq, Eq)] pub struct Env { entries: FxHashMap, } -impl Env { - pub fn new_for_test_fixture() -> Self { - Env { - entries: FxHashMap::from_iter([( - String::from("__ra_is_test_fixture"), - String::from("__ra_is_test_fixture"), - )]), - } - } -} - -#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] -pub enum DependencyKind { - Normal, - Dev, - Build, -} - #[derive(Debug, Clone, PartialEq, Eq, Hash)] pub struct Dependency { pub crate_id: CrateId, @@ -530,13 +500,6 @@ impl CrateGraph { } } - // FIXME: this only finds one crate with the given root; we could have multiple - pub fn crate_id_for_crate_root(&self, file_id: FileId) -> Option { - let (crate_id, _) = - self.arena.iter().find(|(_crate_id, data)| data.root_file_id == file_id)?; - Some(crate_id) - } - pub fn sort_deps(&mut self) { self.arena .iter_mut() @@ -653,6 +616,10 @@ impl CrateGraph { } id_map } + + pub fn shrink_to_fit(&mut self) { + self.arena.shrink_to_fit(); + } } impl ops::Index for CrateGraph { @@ -670,32 +637,6 @@ impl CrateData { } } -impl FromStr for Edition { - type Err = ParseEditionError; - - fn from_str(s: &str) -> Result { - let res = match s { - "2015" => Edition::Edition2015, - "2018" => Edition::Edition2018, - "2021" => Edition::Edition2021, - "2024" => Edition::Edition2024, - _ => return Err(ParseEditionError { invalid_input: s.to_owned() }), - }; - Ok(res) - } -} - -impl fmt::Display for Edition { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - f.write_str(match self { - Edition::Edition2015 => "2015", - Edition::Edition2018 => "2018", - Edition::Edition2021 => "2021", - Edition::Edition2024 => "2024", - }) - } -} - impl Extend<(String, String)> for Env { fn extend>(&mut self, iter: T) { self.entries.extend(iter); @@ -722,19 +663,6 @@ impl Env { } } -#[derive(Debug)] -pub struct ParseEditionError { - invalid_input: String, -} - -impl fmt::Display for ParseEditionError { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - write!(f, "invalid edition: {:?}", self.invalid_input) - } -} - -impl std::error::Error for ParseEditionError {} - #[derive(Debug)] pub struct CyclicDependenciesError { path: Vec<(CrateId, Option)>, diff --git a/crates/base-db/src/lib.rs b/crates/base-db/src/lib.rs index 5dcb580723fa0..785ff9ceffada 100644 --- a/crates/base-db/src/lib.rs +++ b/crates/base-db/src/lib.rs @@ -14,9 +14,9 @@ use triomphe::Arc; pub use crate::{ change::FileChange, input::{ - CrateData, CrateDisplayName, CrateGraph, CrateId, CrateName, CrateOrigin, Dependency, - DependencyKind, Edition, Env, LangCrateOrigin, ProcMacroPaths, ReleaseChannel, SourceRoot, - SourceRootId, TargetLayoutLoadResult, + CrateData, CrateDisplayName, CrateGraph, CrateId, CrateName, CrateOrigin, Dependency, Env, + LangCrateOrigin, ProcMacroPaths, ReleaseChannel, SourceRoot, SourceRootId, + TargetLayoutLoadResult, }, }; pub use salsa::{self, Cancelled}; diff --git a/crates/flycheck/src/lib.rs b/crates/flycheck/src/lib.rs index f8efb52022205..4ee86954acd92 100644 --- a/crates/flycheck/src/lib.rs +++ b/crates/flycheck/src/lib.rs @@ -8,10 +8,10 @@ #![warn(rust_2018_idioms, unused_lifetimes)] -use std::{fmt, io, path::PathBuf, process::Command, time::Duration}; +use std::{fmt, io, process::Command, time::Duration}; use crossbeam_channel::{never, select, unbounded, Receiver, Sender}; -use paths::{AbsPath, AbsPathBuf}; +use paths::{AbsPath, AbsPathBuf, Utf8PathBuf}; use rustc_hash::FxHashMap; use serde::Deserialize; @@ -53,7 +53,7 @@ pub enum FlycheckConfig { extra_args: Vec, extra_env: FxHashMap, ansi_color_output: bool, - target_dir: Option, + target_dir: Option, }, CustomCommand { command: String, @@ -363,7 +363,7 @@ impl FlycheckActor { }); cmd.arg("--manifest-path"); - cmd.arg(self.root.join("Cargo.toml").as_os_str()); + cmd.arg(self.root.join("Cargo.toml")); for target in target_triples { cmd.args(["--target", target.as_str()]); diff --git a/crates/flycheck/src/test_runner.rs b/crates/flycheck/src/test_runner.rs index 31378716b3ec4..9f761c9ead18a 100644 --- a/crates/flycheck/src/test_runner.rs +++ b/crates/flycheck/src/test_runner.rs @@ -55,13 +55,16 @@ pub struct CargoTestHandle { } // Example of a cargo test command: -// cargo test -- module::func -Z unstable-options --format=json +// cargo test --workspace --no-fail-fast -- module::func -Z unstable-options --format=json impl CargoTestHandle { pub fn new(path: Option<&str>) -> std::io::Result { let mut cmd = Command::new(Tool::Cargo.path()); cmd.env("RUSTC_BOOTSTRAP", "1"); cmd.arg("test"); + cmd.arg("--workspace"); + // --no-fail-fast is needed to ensure that all requested tests will run + cmd.arg("--no-fail-fast"); cmd.arg("--"); if let Some(path) = path { cmd.arg(path); diff --git a/crates/hir-def/src/attr.rs b/crates/hir-def/src/attr.rs index 21536098b82dd..fa7730f302ec7 100644 --- a/crates/hir-def/src/attr.rs +++ b/crates/hir-def/src/attr.rs @@ -148,12 +148,12 @@ impl Attrs { } } - pub fn lang(&self) -> Option<&SmolStr> { + pub fn lang(&self) -> Option<&str> { self.by_key("lang").string_value() } pub fn lang_item(&self) -> Option { - self.by_key("lang").string_value().and_then(|it| LangItem::from_str(it)) + self.by_key("lang").string_value().and_then(LangItem::from_str) } pub fn has_doc_hidden(&self) -> bool { @@ -178,7 +178,7 @@ impl Attrs { self.doc_exprs().flat_map(|doc_expr| doc_expr.aliases().to_vec()) } - pub fn export_name(&self) -> Option<&SmolStr> { + pub fn export_name(&self) -> Option<&str> { self.by_key("export_name").string_value() } @@ -565,7 +565,7 @@ impl<'attr> AttrQuery<'attr> { self.attrs().filter_map(|attr| attr.token_tree_value()) } - pub fn string_value(self) -> Option<&'attr SmolStr> { + pub fn string_value(self) -> Option<&'attr str> { self.attrs().find_map(|attr| attr.string_value()) } diff --git a/crates/hir-def/src/data.rs b/crates/hir-def/src/data.rs index b815c9b73ef83..da790f11516d9 100644 --- a/crates/hir-def/src/data.rs +++ b/crates/hir-def/src/data.rs @@ -453,8 +453,8 @@ impl ProcMacroData { ( def.name, match def.kind { - ProcMacroKind::CustomDerive { helpers } => Some(helpers), - ProcMacroKind::FnLike | ProcMacroKind::Attr => None, + ProcMacroKind::Derive { helpers } => Some(helpers), + ProcMacroKind::Bang | ProcMacroKind::Attr => None, }, ) } else { @@ -484,10 +484,11 @@ impl ExternCrateDeclData { let extern_crate = &item_tree[loc.id.value]; let name = extern_crate.name.clone(); + let krate = loc.container.krate(); let crate_id = if name == hir_expand::name![self] { - Some(loc.container.krate()) + Some(krate) } else { - db.crate_def_map(loc.container.krate()) + db.crate_def_map(krate) .extern_prelude() .find(|&(prelude_name, ..)| *prelude_name == name) .map(|(_, (root, _))| root.krate()) diff --git a/crates/hir-def/src/generics.rs b/crates/hir-def/src/generics.rs index 1d2c7c3a55fd0..4638b3771973c 100644 --- a/crates/hir-def/src/generics.rs +++ b/crates/hir-def/src/generics.rs @@ -22,8 +22,8 @@ use crate::{ lower::LowerCtx, nameres::{DefMap, MacroSubNs}, type_ref::{ConstRef, LifetimeRef, TypeBound, TypeRef}, - AdtId, ConstParamId, GenericDefId, HasModule, ItemTreeLoc, LocalTypeOrConstParamId, Lookup, - TypeOrConstParamId, TypeParamId, + AdtId, ConstParamId, GenericDefId, HasModule, ItemTreeLoc, LifetimeParamId, + LocalTypeOrConstParamId, Lookup, TypeOrConstParamId, TypeParamId, }; /// Data about a generic type parameter (to a function, struct, impl, ...). @@ -102,6 +102,52 @@ impl TypeOrConstParamData { impl_from!(TypeParamData, ConstParamData for TypeOrConstParamData); +#[derive(Clone, PartialEq, Eq, Debug, Hash)] +pub enum GenericParamData { + TypeParamData(TypeParamData), + ConstParamData(ConstParamData), + LifetimeParamData(LifetimeParamData), +} + +impl GenericParamData { + pub fn name(&self) -> Option<&Name> { + match self { + GenericParamData::TypeParamData(it) => it.name.as_ref(), + GenericParamData::ConstParamData(it) => Some(&it.name), + GenericParamData::LifetimeParamData(it) => Some(&it.name), + } + } + + pub fn type_param(&self) -> Option<&TypeParamData> { + match self { + GenericParamData::TypeParamData(it) => Some(it), + _ => None, + } + } + + pub fn const_param(&self) -> Option<&ConstParamData> { + match self { + GenericParamData::ConstParamData(it) => Some(it), + _ => None, + } + } + + pub fn lifetime_param(&self) -> Option<&LifetimeParamData> { + match self { + GenericParamData::LifetimeParamData(it) => Some(it), + _ => None, + } + } +} + +impl_from!(TypeParamData, ConstParamData, LifetimeParamData for GenericParamData); + +pub enum GenericParamDataRef<'a> { + TypeParamData(&'a TypeParamData), + ConstParamData(&'a ConstParamData), + LifetimeParamData(&'a LifetimeParamData), +} + /// Data about the generic parameters of a function, struct, impl, etc. #[derive(Clone, PartialEq, Eq, Debug, Hash)] pub struct GenericParams { @@ -358,6 +404,15 @@ impl GenericParamsCollector { } impl GenericParams { + /// Number of Generic parameters (type_or_consts + lifetimes) + pub fn len(&self) -> usize { + self.type_or_consts.len() + self.lifetimes.len() + } + + pub fn is_empty(&self) -> bool { + self.len() == 0 + } + /// Iterator of type_or_consts field pub fn iter( &self, @@ -365,6 +420,13 @@ impl GenericParams { self.type_or_consts.iter() } + /// Iterator of lifetimes field + pub fn iter_lt( + &self, + ) -> impl DoubleEndedIterator, &LifetimeParamData)> { + self.lifetimes.iter() + } + pub(crate) fn generic_params_query( db: &dyn DefDatabase, def: GenericDefId, @@ -507,4 +569,18 @@ impl GenericParams { .then(|| id) }) } + + pub fn find_lifetime_by_name( + &self, + name: &Name, + parent: GenericDefId, + ) -> Option { + self.lifetimes.iter().find_map(|(id, p)| { + if &p.name == name { + Some(LifetimeParamId { local_id: id, parent }) + } else { + None + } + }) + } } diff --git a/crates/hir-def/src/item_tree/pretty.rs b/crates/hir-def/src/item_tree/pretty.rs index 953bf6b85d64a..0c84057950bdf 100644 --- a/crates/hir-def/src/item_tree/pretty.rs +++ b/crates/hir-def/src/item_tree/pretty.rs @@ -526,7 +526,7 @@ impl Printer<'_> { } fn print_generic_params(&mut self, params: &GenericParams) { - if params.type_or_consts.is_empty() && params.lifetimes.is_empty() { + if params.is_empty() { return; } diff --git a/crates/hir-def/src/lang_item.rs b/crates/hir-def/src/lang_item.rs index 7d98f6cfe88c1..3a07c678428e4 100644 --- a/crates/hir-def/src/lang_item.rs +++ b/crates/hir-def/src/lang_item.rs @@ -192,7 +192,7 @@ impl LangItems { pub(crate) fn lang_attr(db: &dyn DefDatabase, item: AttrDefId) -> Option { let attrs = db.attrs(item); - attrs.by_key("lang").string_value().and_then(|it| LangItem::from_str(it)) + attrs.by_key("lang").string_value().and_then(LangItem::from_str) } pub(crate) fn notable_traits_in_deps( diff --git a/crates/hir-def/src/lib.rs b/crates/hir-def/src/lib.rs index 828842de7e8a8..46898ce542d6d 100644 --- a/crates/hir-def/src/lib.rs +++ b/crates/hir-def/src/lib.rs @@ -73,7 +73,7 @@ use std::{ use base_db::{ impl_intern_key, salsa::{self, impl_intern_value_trivial}, - CrateId, Edition, + CrateId, }; use hir_expand::{ builtin_attr_macro::BuiltinAttrExpander, @@ -90,7 +90,7 @@ use hir_expand::{ use item_tree::ExternBlock; use la_arena::Idx; use nameres::DefMap; -use span::{AstIdNode, FileAstId, FileId, SyntaxContextId}; +use span::{AstIdNode, Edition, FileAstId, FileId, SyntaxContextId}; use stdx::impl_from; use syntax::{ast, AstNode}; diff --git a/crates/hir-def/src/macro_expansion_tests/mbe.rs b/crates/hir-def/src/macro_expansion_tests/mbe.rs index 965f329acb9eb..c5c26e26bc0e7 100644 --- a/crates/hir-def/src/macro_expansion_tests/mbe.rs +++ b/crates/hir-def/src/macro_expansion_tests/mbe.rs @@ -1449,6 +1449,7 @@ ok!(); #[test] fn test_new_std_matches() { check( + //- edition:2021 r#" macro_rules! matches { ($expression:expr, $pattern:pat $(if $guard:expr)? $(,)?) => { @@ -1480,6 +1481,90 @@ fn main() { ); } +#[test] +fn test_hygienic_pat() { + check( + r#" +//- /new.rs crate:new deps:old edition:2015 +old::make!(); +fn main() { + matches!(0, 0 | 1 if true); +} +//- /old.rs crate:old edition:2021 +#[macro_export] +macro_rules! make { + () => { + macro_rules! matches { + ($expression:expr, $pattern:pat if $guard:expr ) => { + match $expression { + $pattern if $guard => true, + _ => false + } + }; + } + } +} + "#, + expect![[r#" +macro_rules !matches { + ($expression: expr, $pattern: pat if $guard: expr) = > { + match $expression { + $pattern if $guard = > true , _ = > false + } + } + ; +} +fn main() { + match 0 { + 0|1 if true =>true , _=>false + }; +} +"#]], + ); + check( + r#" +//- /new.rs crate:new deps:old edition:2021 +old::make!(); +fn main() { + matches/*+errors*/!(0, 0 | 1 if true); +} +//- /old.rs crate:old edition:2015 +#[macro_export] +macro_rules! make { + () => { + macro_rules! matches { + ($expression:expr, $pattern:pat if $guard:expr ) => { + match $expression { + $pattern if $guard => true, + _ => false + } + }; + } + } +} + "#, + expect![[r#" +macro_rules !matches { + ($expression: expr, $pattern: pat if $guard: expr) = > { + match $expression { + $pattern if $guard = > true , _ = > false + } + } + ; +} +fn main() { + /* error: unexpected token in input *//* parse error: expected expression */ +/* parse error: expected FAT_ARROW */ +/* parse error: expected `,` */ +/* parse error: expected pattern */ +match 0 { + 0 if $guard=>true , _=>false + }; +} +"#]], + ); +} + #[test] fn test_dollar_crate_lhs_is_not_meta() { check( diff --git a/crates/hir-def/src/nameres.rs b/crates/hir-def/src/nameres.rs index b56dee3efb554..a528c4cc6972d 100644 --- a/crates/hir-def/src/nameres.rs +++ b/crates/hir-def/src/nameres.rs @@ -59,14 +59,14 @@ mod tests; use std::ops::Deref; -use base_db::{CrateId, Edition, FileId}; +use base_db::{CrateId, FileId}; use hir_expand::{ name::Name, proc_macro::ProcMacroKind, ErasedAstId, HirFileId, InFile, MacroCallId, MacroDefId, }; use itertools::Itertools; use la_arena::Arena; use rustc_hash::{FxHashMap, FxHashSet}; -use span::{FileAstId, ROOT_ERASED_FILE_AST_ID}; +use span::{Edition, FileAstId, ROOT_ERASED_FILE_AST_ID}; use stdx::format_to; use syntax::{ast, SmolStr}; use triomphe::Arc; @@ -737,7 +737,7 @@ impl MacroSubNs { MacroId::ProcMacroId(it) => { return match it.lookup(db).kind { ProcMacroKind::CustomDerive | ProcMacroKind::Attr => Self::Attr, - ProcMacroKind::FuncLike => Self::Bang, + ProcMacroKind::Bang => Self::Bang, }; } }; diff --git a/crates/hir-def/src/nameres/attr_resolution.rs b/crates/hir-def/src/nameres/attr_resolution.rs index 662c80edf3247..eb7f4c05ae217 100644 --- a/crates/hir-def/src/nameres/attr_resolution.rs +++ b/crates/hir-def/src/nameres/attr_resolution.rs @@ -136,6 +136,7 @@ pub(super) fn derive_macro_as_call_id( call_site: SyntaxContextId, krate: CrateId, resolver: impl Fn(path::ModPath) -> Option<(MacroId, MacroDefId)>, + derive_macro_id: MacroCallId, ) -> Result<(MacroId, MacroDefId, MacroCallId), UnresolvedMacro> { let (macro_id, def_id) = resolver(item_attr.path.clone()) .filter(|(_, def_id)| def_id.is_derive()) @@ -147,6 +148,7 @@ pub(super) fn derive_macro_as_call_id( ast_id: item_attr.ast_id, derive_index: derive_pos, derive_attr_index, + derive_macro_id, }, call_site, ); diff --git a/crates/hir-def/src/nameres/collector.rs b/crates/hir-def/src/nameres/collector.rs index 3d026447fb743..ae8f028e488dc 100644 --- a/crates/hir-def/src/nameres/collector.rs +++ b/crates/hir-def/src/nameres/collector.rs @@ -5,7 +5,7 @@ use std::{cmp::Ordering, iter, mem, ops::Not}; -use base_db::{CrateId, Dependency, Edition, FileId}; +use base_db::{CrateId, Dependency, FileId}; use cfg::{CfgExpr, CfgOptions}; use either::Either; use hir_expand::{ @@ -22,9 +22,9 @@ use itertools::{izip, Itertools}; use la_arena::Idx; use limit::Limit; use rustc_hash::{FxHashMap, FxHashSet}; -use span::{ErasedFileAstId, FileAstId, Span, SyntaxContextId}; +use span::{Edition, ErasedFileAstId, FileAstId, Span, SyntaxContextId}; use stdx::always; -use syntax::{ast, SmolStr}; +use syntax::ast; use triomphe::Arc; use crate::{ @@ -237,6 +237,8 @@ enum MacroDirectiveKind { derive_attr: AttrId, derive_pos: usize, ctxt: SyntaxContextId, + /// The "parent" macro it is resolved to. + derive_macro_id: MacroCallId, }, Attr { ast_id: AstIdWithPath, @@ -312,7 +314,7 @@ impl DefCollector<'_> { } } () if *attr_name == hir_expand::name![crate_type] => { - if let Some("proc-macro") = attr.string_value().map(SmolStr::as_str) { + if let Some("proc-macro") = attr.string_value() { self.is_proc_macro = true; } } @@ -602,7 +604,7 @@ impl DefCollector<'_> { .intern(self.db); self.define_proc_macro(def.name.clone(), proc_macro_id); let crate_data = Arc::get_mut(&mut self.def_map.data).unwrap(); - if let ProcMacroKind::CustomDerive { helpers } = def.kind { + if let ProcMacroKind::Derive { helpers } = def.kind { crate_data.exported_derives.insert(self.db.macro_def(proc_macro_id.into()), helpers); } crate_data.fn_proc_macro_mapping.insert(fn_id, proc_macro_id); @@ -1146,7 +1148,13 @@ impl DefCollector<'_> { return Resolved::Yes; } } - MacroDirectiveKind::Derive { ast_id, derive_attr, derive_pos, ctxt: call_site } => { + MacroDirectiveKind::Derive { + ast_id, + derive_attr, + derive_pos, + ctxt: call_site, + derive_macro_id, + } => { let id = derive_macro_as_call_id( self.db, ast_id, @@ -1155,6 +1163,7 @@ impl DefCollector<'_> { *call_site, self.def_map.krate, resolver, + *derive_macro_id, ); if let Ok((macro_id, def_id, call_id)) = id { @@ -1224,6 +1233,8 @@ impl DefCollector<'_> { _ => return Resolved::No, }; + let call_id = + attr_macro_as_call_id(self.db, file_ast_id, attr, self.def_map.krate, def); if let MacroDefId { kind: MacroDefKind::BuiltInAttr( @@ -1252,6 +1263,7 @@ impl DefCollector<'_> { return recollect_without(self); } }; + let ast_id = ast_id.with_value(ast_adt_id); match attr.parse_path_comma_token_tree(self.db.upcast()) { @@ -1267,6 +1279,7 @@ impl DefCollector<'_> { derive_attr: attr.id, derive_pos: idx, ctxt: call_site.ctx, + derive_macro_id: call_id, }, container: directive.container, }); @@ -1301,10 +1314,6 @@ impl DefCollector<'_> { return recollect_without(self); } - // Not resolved to a derive helper or the derive attribute, so try to treat as a normal attribute. - let call_id = - attr_macro_as_call_id(self.db, file_ast_id, attr, self.def_map.krate, def); - // Skip #[test]/#[bench] expansion, which would merely result in more memory usage // due to duplicating functions into macro expansions if matches!( @@ -1460,13 +1469,20 @@ impl DefCollector<'_> { )); } } - MacroDirectiveKind::Derive { ast_id, derive_attr, derive_pos, ctxt: _ } => { + MacroDirectiveKind::Derive { + ast_id, + derive_attr, + derive_pos, + derive_macro_id, + .. + } => { self.def_map.diagnostics.push(DefDiagnostic::unresolved_macro_call( directive.module_id, MacroCallKind::Derive { ast_id: ast_id.ast_id, derive_attr_index: *derive_attr, derive_index: *derive_pos as u32, + derive_macro_id: *derive_macro_id, }, ast_id.path.clone(), )); @@ -1902,7 +1918,7 @@ impl ModCollector<'_, '_> { } fn collect_module(&mut self, module_id: FileItemTreeId, attrs: &Attrs) { - let path_attr = attrs.by_key("path").string_value().map(SmolStr::as_str); + let path_attr = attrs.by_key("path").string_value(); let is_macro_use = attrs.by_key("macro_use").exists(); let module = &self.item_tree[module_id]; match &module.kind { @@ -2146,7 +2162,7 @@ impl ModCollector<'_, '_> { Some(it) => { // FIXME: a hacky way to create a Name from string. name = tt::Ident { - text: it.clone(), + text: it.into(), span: Span { range: syntax::TextRange::empty(syntax::TextSize::new(0)), anchor: span::SpanAnchor { diff --git a/crates/hir-def/src/nameres/path_resolution.rs b/crates/hir-def/src/nameres/path_resolution.rs index 9e53b03728304..ee29b89f3d3b5 100644 --- a/crates/hir-def/src/nameres/path_resolution.rs +++ b/crates/hir-def/src/nameres/path_resolution.rs @@ -10,8 +10,8 @@ //! //! `ReachedFixedPoint` signals about this. -use base_db::Edition; use hir_expand::{name::Name, Lookup}; +use span::Edition; use triomphe::Arc; use crate::{ diff --git a/crates/hir-def/src/nameres/proc_macro.rs b/crates/hir-def/src/nameres/proc_macro.rs index c126fdac1c62f..5052708dc9391 100644 --- a/crates/hir-def/src/nameres/proc_macro.rs +++ b/crates/hir-def/src/nameres/proc_macro.rs @@ -13,18 +13,16 @@ pub struct ProcMacroDef { #[derive(Debug, PartialEq, Eq)] pub enum ProcMacroKind { - CustomDerive { helpers: Box<[Name]> }, - FnLike, + Derive { helpers: Box<[Name]> }, + Bang, Attr, } impl ProcMacroKind { pub(super) fn to_basedb_kind(&self) -> hir_expand::proc_macro::ProcMacroKind { match self { - ProcMacroKind::CustomDerive { .. } => { - hir_expand::proc_macro::ProcMacroKind::CustomDerive - } - ProcMacroKind::FnLike => hir_expand::proc_macro::ProcMacroKind::FuncLike, + ProcMacroKind::Derive { .. } => hir_expand::proc_macro::ProcMacroKind::CustomDerive, + ProcMacroKind::Bang => hir_expand::proc_macro::ProcMacroKind::Bang, ProcMacroKind::Attr => hir_expand::proc_macro::ProcMacroKind::Attr, } } @@ -34,13 +32,13 @@ impl Attrs { #[rustfmt::skip] pub fn parse_proc_macro_decl(&self, func_name: &Name) -> Option { if self.is_proc_macro() { - Some(ProcMacroDef { name: func_name.clone(), kind: ProcMacroKind::FnLike }) + Some(ProcMacroDef { name: func_name.clone(), kind: ProcMacroKind::Bang }) } else if self.is_proc_macro_attribute() { Some(ProcMacroDef { name: func_name.clone(), kind: ProcMacroKind::Attr }) } else if self.by_key("proc_macro_derive").exists() { let derive = self.by_key("proc_macro_derive").tt_values().next()?; let def = parse_macro_name_and_helper_attrs(&derive.token_trees) - .map(|(name, helpers)| ProcMacroDef { name, kind: ProcMacroKind::CustomDerive { helpers } }); + .map(|(name, helpers)| ProcMacroDef { name, kind: ProcMacroKind::Derive { helpers } }); if def.is_none() { tracing::trace!("malformed `#[proc_macro_derive]`: {}", derive); diff --git a/crates/hir-def/src/resolver.rs b/crates/hir-def/src/resolver.rs index 226d6f513f5d6..fadab858aa149 100644 --- a/crates/hir-def/src/resolver.rs +++ b/crates/hir-def/src/resolver.rs @@ -24,6 +24,7 @@ use crate::{ nameres::{DefMap, MacroSubNs}, path::{ModPath, Path, PathKind}, per_ns::PerNs, + type_ref::LifetimeRef, visibility::{RawVisibility, Visibility}, AdtId, ConstId, ConstParamId, CrateRootModuleId, DefWithBodyId, EnumId, EnumVariantId, ExternBlockId, ExternCrateId, FunctionId, GenericDefId, GenericParamId, HasModule, ImplId, @@ -120,6 +121,12 @@ pub enum ValueNs { GenericParam(ConstParamId), } +#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] +pub enum LifetimeNs { + Static, + LifetimeParam(LifetimeParamId), +} + impl Resolver { /// Resolve known trait from std, like `std::futures::Future` pub fn resolve_known_trait(&self, db: &dyn DefDatabase, path: &ModPath) -> Option { @@ -418,6 +425,19 @@ impl Resolver { self.resolve_path_as_macro(db, path, expected_macro_kind).map(|(it, _)| db.macro_def(it)) } + pub fn resolve_lifetime(&self, lifetime: &LifetimeRef) -> Option { + if lifetime.name == name::known::STATIC_LIFETIME { + return Some(LifetimeNs::Static); + } + + self.scopes().find_map(|scope| match scope { + Scope::GenericParams { def, params } => { + params.find_lifetime_by_name(&lifetime.name, *def).map(LifetimeNs::LifetimeParam) + } + _ => None, + }) + } + /// Returns a set of names available in the current scope. /// /// Note that this is a somewhat fuzzy concept -- internally, the compiler diff --git a/crates/hir-expand/src/attrs.rs b/crates/hir-expand/src/attrs.rs index af3ecdcd5e32a..f1540498f2664 100644 --- a/crates/hir-expand/src/attrs.rs +++ b/crates/hir-expand/src/attrs.rs @@ -8,8 +8,8 @@ use intern::Interned; use mbe::{syntax_node_to_token_tree, DelimiterKind, Punct}; use smallvec::{smallvec, SmallVec}; use span::{Span, SyntaxContextId}; -use syntax::{ast, match_ast, AstNode, AstToken, SmolStr, SyntaxNode}; -use triomphe::Arc; +use syntax::{ast, format_smolstr, match_ast, AstNode, AstToken, SmolStr, SyntaxNode}; +use triomphe::ThinArc; use crate::{ db::ExpandDatabase, @@ -22,8 +22,7 @@ use crate::{ /// Syntactical attributes, without filtering of `cfg_attr`s. #[derive(Default, Debug, Clone, PartialEq, Eq)] pub struct RawAttrs { - // FIXME: Make this a ThinArc - entries: Option>, + entries: Option>, } impl ops::Deref for RawAttrs { @@ -31,7 +30,7 @@ impl ops::Deref for RawAttrs { fn deref(&self) -> &[Attr] { match &self.entries { - Some(it) => it, + Some(it) => &it.slice, None => &[], } } @@ -45,20 +44,34 @@ impl RawAttrs { owner: &dyn ast::HasAttrs, span_map: SpanMapRef<'_>, ) -> Self { - let entries = collect_attrs(owner).filter_map(|(id, attr)| match attr { - Either::Left(attr) => { - attr.meta().and_then(|meta| Attr::from_src(db, meta, span_map, id)) - } - Either::Right(comment) => comment.doc_comment().map(|doc| Attr { - id, - input: Some(Interned::new(AttrInput::Literal(SmolStr::new(doc)))), - path: Interned::new(ModPath::from(crate::name!(doc))), - ctxt: span_map.span_for_range(comment.syntax().text_range()).ctx, - }), - }); - let entries: Arc<[Attr]> = Arc::from_iter(entries); + let entries: Vec<_> = collect_attrs(owner) + .filter_map(|(id, attr)| match attr { + Either::Left(attr) => { + attr.meta().and_then(|meta| Attr::from_src(db, meta, span_map, id)) + } + Either::Right(comment) => comment.doc_comment().map(|doc| { + let span = span_map.span_for_range(comment.syntax().text_range()); + Attr { + id, + input: Some(Interned::new(AttrInput::Literal(tt::Literal { + // FIXME: Escape quotes from comment content + text: SmolStr::new(format_smolstr!("\"{doc}\"",)), + span, + }))), + path: Interned::new(ModPath::from(crate::name!(doc))), + ctxt: span.ctx, + } + }), + }) + .collect(); - Self { entries: if entries.is_empty() { None } else { Some(entries) } } + let entries = if entries.is_empty() { + None + } else { + Some(ThinArc::from_header_and_iter((), entries.into_iter())) + }; + + RawAttrs { entries } } pub fn from_attrs_owner( @@ -75,16 +88,20 @@ impl RawAttrs { (None, entries @ Some(_)) => Self { entries }, (Some(entries), None) => Self { entries: Some(entries.clone()) }, (Some(a), Some(b)) => { - let last_ast_index = a.last().map_or(0, |it| it.id.ast_index() + 1) as u32; - Self { - entries: Some(Arc::from_iter(a.iter().cloned().chain(b.iter().map(|it| { + let last_ast_index = a.slice.last().map_or(0, |it| it.id.ast_index() + 1) as u32; + let items = a + .slice + .iter() + .cloned() + .chain(b.slice.iter().map(|it| { let mut it = it.clone(); it.id.id = (it.id.ast_index() as u32 + last_ast_index) | (it.id.cfg_attr_index().unwrap_or(0) as u32) << AttrId::AST_INDEX_BITS; it - })))), - } + })) + .collect::>(); + Self { entries: Some(ThinArc::from_header_and_iter((), items.into_iter())) } } } } @@ -100,41 +117,47 @@ impl RawAttrs { } let crate_graph = db.crate_graph(); - let new_attrs = Arc::from_iter(self.iter().flat_map(|attr| -> SmallVec<[_; 1]> { - let is_cfg_attr = - attr.path.as_ident().map_or(false, |name| *name == crate::name![cfg_attr]); - if !is_cfg_attr { - return smallvec![attr.clone()]; - } - - let subtree = match attr.token_tree_value() { - Some(it) => it, - _ => return smallvec![attr.clone()], - }; - - let (cfg, parts) = match parse_cfg_attr_input(subtree) { - Some(it) => it, - None => return smallvec![attr.clone()], - }; - let index = attr.id; - let attrs = parts - .enumerate() - .take(1 << AttrId::CFG_ATTR_BITS) - .filter_map(|(idx, attr)| Attr::from_tt(db, attr, index.with_cfg_attr(idx))); - - let cfg_options = &crate_graph[krate].cfg_options; - let cfg = Subtree { delimiter: subtree.delimiter, token_trees: Box::from(cfg) }; - let cfg = CfgExpr::parse(&cfg); - if cfg_options.check(&cfg) == Some(false) { - smallvec![] - } else { - cov_mark::hit!(cfg_attr_active); - - attrs.collect() - } - })); + let new_attrs = + self.iter() + .flat_map(|attr| -> SmallVec<[_; 1]> { + let is_cfg_attr = + attr.path.as_ident().map_or(false, |name| *name == crate::name![cfg_attr]); + if !is_cfg_attr { + return smallvec![attr.clone()]; + } - RawAttrs { entries: Some(new_attrs) } + let subtree = match attr.token_tree_value() { + Some(it) => it, + _ => return smallvec![attr.clone()], + }; + + let (cfg, parts) = match parse_cfg_attr_input(subtree) { + Some(it) => it, + None => return smallvec![attr.clone()], + }; + let index = attr.id; + let attrs = parts.enumerate().take(1 << AttrId::CFG_ATTR_BITS).filter_map( + |(idx, attr)| Attr::from_tt(db, attr, index.with_cfg_attr(idx)), + ); + + let cfg_options = &crate_graph[krate].cfg_options; + let cfg = Subtree { delimiter: subtree.delimiter, token_trees: Box::from(cfg) }; + let cfg = CfgExpr::parse(&cfg); + if cfg_options.check(&cfg) == Some(false) { + smallvec![] + } else { + cov_mark::hit!(cfg_attr_active); + + attrs.collect() + } + }) + .collect::>(); + let entries = if new_attrs.is_empty() { + None + } else { + Some(ThinArc::from_header_and_iter((), new_attrs.into_iter())) + }; + RawAttrs { entries } } } @@ -179,8 +202,7 @@ pub struct Attr { #[derive(Debug, Clone, PartialEq, Eq, Hash)] pub enum AttrInput { /// `#[attr = "string"]` - // FIXME: This is losing span - Literal(SmolStr), + Literal(tt::Literal), /// `#[attr(subtree)]` TokenTree(Box), } @@ -188,7 +210,7 @@ pub enum AttrInput { impl fmt::Display for AttrInput { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { match self { - AttrInput::Literal(lit) => write!(f, " = \"{}\"", lit.escape_debug()), + AttrInput::Literal(lit) => write!(f, " = {lit}"), AttrInput::TokenTree(tt) => tt.fmt(f), } } @@ -208,11 +230,10 @@ impl Attr { })?); let span = span_map.span_for_range(range); let input = if let Some(ast::Expr::Literal(lit)) = ast.expr() { - let value = match lit.kind() { - ast::LiteralKind::String(string) => string.value()?.into(), - _ => lit.syntax().first_token()?.text().trim_matches('"').into(), - }; - Some(Interned::new(AttrInput::Literal(value))) + Some(Interned::new(AttrInput::Literal(tt::Literal { + text: lit.token().text().into(), + span, + }))) } else if let Some(tt) = ast.token_tree() { let tree = syntax_node_to_token_tree(tt.syntax(), span_map, span); Some(Interned::new(AttrInput::TokenTree(Box::new(tree)))) @@ -245,9 +266,8 @@ impl Attr { } Some(tt::TokenTree::Leaf(tt::Leaf::Punct(tt::Punct { char: '=', .. }))) => { let input = match input.get(1) { - Some(tt::TokenTree::Leaf(tt::Leaf::Literal(tt::Literal { text, .. }))) => { - //FIXME the trimming here isn't quite right, raw strings are not handled - Some(Interned::new(AttrInput::Literal(text.trim_matches('"').into()))) + Some(tt::TokenTree::Leaf(tt::Leaf::Literal(lit))) => { + Some(Interned::new(AttrInput::Literal(lit.clone()))) } _ => None, }; @@ -265,9 +285,14 @@ impl Attr { impl Attr { /// #[path = "string"] - pub fn string_value(&self) -> Option<&SmolStr> { + pub fn string_value(&self) -> Option<&str> { match self.input.as_deref()? { - AttrInput::Literal(it) => Some(it), + AttrInput::Literal(it) => match it.text.strip_prefix('r') { + Some(it) => it.trim_matches('#'), + None => it.text.as_str(), + } + .strip_prefix('"')? + .strip_suffix('"'), _ => None, } } diff --git a/crates/hir-expand/src/builtin_fn_macro.rs b/crates/hir-expand/src/builtin_fn_macro.rs index 9fb6a0b2346ba..fd3e4e7a4dba4 100644 --- a/crates/hir-expand/src/builtin_fn_macro.rs +++ b/crates/hir-expand/src/builtin_fn_macro.rs @@ -1,11 +1,11 @@ //! Builtin macro -use base_db::{AnchoredPath, Edition, FileId}; +use base_db::{AnchoredPath, FileId}; use cfg::CfgExpr; use either::Either; use itertools::Itertools; use mbe::{parse_exprs_with_sep, parse_to_token_tree}; -use span::{Span, SpanAnchor, SyntaxContextId, ROOT_ERASED_FILE_AST_ID}; +use span::{Edition, Span, SpanAnchor, SyntaxContextId, ROOT_ERASED_FILE_AST_ID}; use syntax::ast::{self, AstToken}; use crate::{ diff --git a/crates/hir-expand/src/cfg_process.rs b/crates/hir-expand/src/cfg_process.rs index c74c13a6fd3c5..db3558a84e9e3 100644 --- a/crates/hir-expand/src/cfg_process.rs +++ b/crates/hir-expand/src/cfg_process.rs @@ -10,7 +10,7 @@ use syntax::{ use tracing::{debug, warn}; use tt::SmolStr; -use crate::{db::ExpandDatabase, MacroCallKind, MacroCallLoc}; +use crate::{db::ExpandDatabase, proc_macro::ProcMacroKind, MacroCallLoc, MacroDefKind}; fn check_cfg_attr(attr: &Attr, loc: &MacroCallLoc, db: &dyn ExpandDatabase) -> Option { if !attr.simple_name().as_deref().map(|v| v == "cfg")? { @@ -139,7 +139,7 @@ fn process_enum( 'variant: for variant in variants.variants() { for attr in variant.attrs() { if check_cfg_attr(&attr, loc, db).map(|enabled| !enabled).unwrap_or_default() { - // Rustc does not strip the attribute if it is enabled. So we will will leave it + // Rustc does not strip the attribute if it is enabled. So we will leave it debug!("censoring type {:?}", variant.syntax()); remove.insert(variant.syntax().clone().into()); // We need to remove the , as well @@ -180,7 +180,13 @@ pub(crate) fn process_cfg_attrs( db: &dyn ExpandDatabase, ) -> Option> { // FIXME: #[cfg_eval] is not implemented. But it is not stable yet - if !matches!(loc.kind, MacroCallKind::Derive { .. }) { + let is_derive = match loc.def.kind { + MacroDefKind::BuiltInDerive(..) + | MacroDefKind::ProcMacro(_, ProcMacroKind::CustomDerive, _) => true, + MacroDefKind::BuiltInAttr(expander, _) => expander.is_derive(), + _ => false, + }; + if !is_derive { return None; } let mut remove = FxHashSet::default(); diff --git a/crates/hir-expand/src/db.rs b/crates/hir-expand/src/db.rs index ec68f2f96e5e1..5461c1c49a32b 100644 --- a/crates/hir-expand/src/db.rs +++ b/crates/hir-expand/src/db.rs @@ -24,7 +24,8 @@ use crate::{ HirFileId, HirFileIdRepr, MacroCallId, MacroCallKind, MacroCallLoc, MacroDefId, MacroDefKind, MacroFileId, }; - +/// This is just to ensure the types of smart_macro_arg and macro_arg are the same +type MacroArgResult = (Arc, SyntaxFixupUndoInfo, Span); /// Total limit on the number of tokens produced by any macro invocation. /// /// If an invocation produces more tokens than this limit, it will not be stored in the database and @@ -98,7 +99,13 @@ pub trait ExpandDatabase: SourceDatabase { /// Lowers syntactic macro call to a token tree representation. That's a firewall /// query, only typing in the macro call itself changes the returned /// subtree. - fn macro_arg(&self, id: MacroCallId) -> (Arc, SyntaxFixupUndoInfo, Span); + fn macro_arg(&self, id: MacroCallId) -> MacroArgResult; + #[salsa::transparent] + fn macro_arg_considering_derives( + &self, + id: MacroCallId, + kind: &MacroCallKind, + ) -> MacroArgResult; /// Fetches the expander for this macro. #[salsa::transparent] #[salsa::invoke(TokenExpander::macro_expander)] @@ -144,7 +151,7 @@ pub fn expand_speculative( let span_map = RealSpanMap::absolute(FileId::BOGUS); let span_map = SpanMapRef::RealSpanMap(&span_map); - let (_, _, span) = db.macro_arg(actual_macro_call); + let (_, _, span) = db.macro_arg_considering_derives(actual_macro_call, &loc.kind); // Build the subtree and token mapping for the speculative args let (mut tt, undo_info) = match loc.kind { @@ -339,12 +346,24 @@ pub(crate) fn parse_with_map( } } -// FIXME: for derive attributes, this will return separate copies of the same structures! Though -// they may differ in spans due to differing call sites... -fn macro_arg( +/// This resolves the [MacroCallId] to check if it is a derive macro if so get the [macro_arg] for the derive. +/// Other wise return the [macro_arg] for the macro_call_id. +/// +/// This is not connected to the database so it does not cached the result. However, the inner [macro_arg] query is +fn macro_arg_considering_derives( db: &dyn ExpandDatabase, id: MacroCallId, -) -> (Arc, SyntaxFixupUndoInfo, Span) { + kind: &MacroCallKind, +) -> MacroArgResult { + match kind { + // Get the macro arg for the derive macro + MacroCallKind::Derive { derive_macro_id, .. } => db.macro_arg(*derive_macro_id), + // Normal macro arg + _ => db.macro_arg(id), + } +} + +fn macro_arg(db: &dyn ExpandDatabase, id: MacroCallId) -> MacroArgResult { let loc = db.lookup_intern_macro_call(id); if let MacroCallLoc { @@ -414,29 +433,30 @@ fn macro_arg( } return (Arc::new(tt), SyntaxFixupUndoInfo::NONE, span); } - MacroCallKind::Derive { ast_id, derive_attr_index, .. } => { - let node = ast_id.to_ptr(db).to_node(&root); - let censor_derive_input = censor_derive_input(derive_attr_index, &node); - let item_node = node.into(); - let attr_source = attr_source(derive_attr_index, &item_node); - // FIXME: This is wrong, this should point to the path of the derive attribute` - let span = - map.span_for_range(attr_source.as_ref().and_then(|it| it.path()).map_or_else( - || item_node.syntax().text_range(), - |it| it.syntax().text_range(), - )); - (censor_derive_input, item_node, span) + // MacroCallKind::Derive should not be here. As we are getting the argument for the derive macro + MacroCallKind::Derive { .. } => { + unreachable!("`ExpandDatabase::macro_arg` called with `MacroCallKind::Derive`") } MacroCallKind::Attr { ast_id, invoc_attr_index, .. } => { let node = ast_id.to_ptr(db).to_node(&root); let attr_source = attr_source(invoc_attr_index, &node); + let span = map.span_for_range( attr_source .as_ref() .and_then(|it| it.path()) .map_or_else(|| node.syntax().text_range(), |it| it.syntax().text_range()), ); - (attr_source.into_iter().map(|it| it.syntax().clone().into()).collect(), node, span) + // If derive attribute we need to censor the derive input + if matches!(loc.def.kind, MacroDefKind::BuiltInAttr(expander, ..) if expander.is_derive()) + && ast::Adt::can_cast(node.syntax().kind()) + { + let adt = ast::Adt::cast(node.syntax().clone()).unwrap(); + let censor_derive_input = censor_derive_input(invoc_attr_index, &adt); + (censor_derive_input, node, span) + } else { + (attr_source.into_iter().map(|it| it.syntax().clone().into()).collect(), node, span) + } } }; @@ -526,7 +546,8 @@ fn macro_expand( let (ExpandResult { value: tt, err }, span) = match loc.def.kind { MacroDefKind::ProcMacro(..) => return db.expand_proc_macro(macro_call_id).map(CowArc::Arc), _ => { - let (macro_arg, undo_info, span) = db.macro_arg(macro_call_id); + let (macro_arg, undo_info, span) = + db.macro_arg_considering_derives(macro_call_id, &loc.kind); let arg = &*macro_arg; let res = @@ -603,7 +624,7 @@ fn proc_macro_span(db: &dyn ExpandDatabase, ast: AstId) -> Span { fn expand_proc_macro(db: &dyn ExpandDatabase, id: MacroCallId) -> ExpandResult> { let loc = db.lookup_intern_macro_call(id); - let (macro_arg, undo_info, span) = db.macro_arg(id); + let (macro_arg, undo_info, span) = db.macro_arg_considering_derives(id, &loc.kind); let (expander, ast) = match loc.def.kind { MacroDefKind::ProcMacro(expander, _, ast) => (expander, ast), diff --git a/crates/hir-expand/src/declarative.rs b/crates/hir-expand/src/declarative.rs index 33643c02724f7..9a0b218e6d1d2 100644 --- a/crates/hir-expand/src/declarative.rs +++ b/crates/hir-expand/src/declarative.rs @@ -1,8 +1,8 @@ //! Compiled declarative macro expanders (`macro_rules!`` and `macro`) use std::sync::OnceLock; -use base_db::{CrateId, Edition, VersionReq}; -use span::{MacroCallId, Span}; +use base_db::{CrateId, VersionReq}; +use span::{MacroCallId, Span, SyntaxContextId}; use syntax::{ast, AstNode}; use triomphe::Arc; @@ -10,13 +10,13 @@ use crate::{ attrs::RawAttrs, db::ExpandDatabase, hygiene::{apply_mark, Transparency}, - tt, AstId, ExpandError, ExpandResult, + tt, AstId, ExpandError, ExpandResult, Lookup, }; /// Old-style `macro_rules` or the new macros 2.0 #[derive(Debug, Clone, Eq, PartialEq)] pub struct DeclarativeMacroExpander { - pub mac: mbe::DeclarativeMacro, + pub mac: mbe::DeclarativeMacro, pub transparency: Transparency, } @@ -94,8 +94,6 @@ impl DeclarativeMacroExpander { def_crate: CrateId, id: AstId, ) -> Arc { - let crate_data = &db.crate_graph()[def_crate]; - let is_2021 = crate_data.edition >= Edition::Edition2021; let (root, map) = crate::db::parse_with_map(db, id.file_id); let root = root.syntax_node(); @@ -133,6 +131,16 @@ impl DeclarativeMacroExpander { ) }); + let edition = |ctx: SyntaxContextId| { + let crate_graph = db.crate_graph(); + if ctx.is_root() { + crate_graph[def_crate].edition + } else { + let data = db.lookup_intern_syntax_context(ctx); + // UNWRAP-SAFETY: Only the root context has no outer expansion + crate_graph[data.outer_expn.unwrap().lookup(db).def.krate].edition + } + }; let (mac, transparency) = match id.to_ptr(db).to_node(&root) { ast::Macro::MacroRules(macro_rules) => ( match macro_rules.token_tree() { @@ -145,12 +153,11 @@ impl DeclarativeMacroExpander { ), ); - mbe::DeclarativeMacro::parse_macro_rules(&tt, is_2021, new_meta_vars) + mbe::DeclarativeMacro::parse_macro_rules(&tt, edition, new_meta_vars) } - None => mbe::DeclarativeMacro::from_err( - mbe::ParseError::Expected("expected a token tree".into()), - is_2021, - ), + None => mbe::DeclarativeMacro::from_err(mbe::ParseError::Expected( + "expected a token tree".into(), + )), }, transparency(¯o_rules).unwrap_or(Transparency::SemiTransparent), ), @@ -163,12 +170,11 @@ impl DeclarativeMacroExpander { map.span_for_range(macro_def.macro_token().unwrap().text_range()), ); - mbe::DeclarativeMacro::parse_macro2(&tt, is_2021, new_meta_vars) + mbe::DeclarativeMacro::parse_macro2(&tt, edition, new_meta_vars) } - None => mbe::DeclarativeMacro::from_err( - mbe::ParseError::Expected("expected a token tree".into()), - is_2021, - ), + None => mbe::DeclarativeMacro::from_err(mbe::ParseError::Expected( + "expected a token tree".into(), + )), }, transparency(¯o_def).unwrap_or(Transparency::Opaque), ), diff --git a/crates/hir-expand/src/lib.rs b/crates/hir-expand/src/lib.rs index 5d4f7dc1462a8..db8bbeccef8f9 100644 --- a/crates/hir-expand/src/lib.rs +++ b/crates/hir-expand/src/lib.rs @@ -30,10 +30,11 @@ use triomphe::Arc; use std::{fmt, hash::Hash}; -use base_db::{salsa::impl_intern_value_trivial, CrateId, Edition, FileId}; +use base_db::{salsa::impl_intern_value_trivial, CrateId, FileId}; use either::Either; use span::{ - ErasedFileAstId, FileRange, HirFileIdRepr, Span, SpanAnchor, SyntaxContextData, SyntaxContextId, + Edition, ErasedFileAstId, FileRange, HirFileIdRepr, Span, SpanAnchor, SyntaxContextData, + SyntaxContextId, }; use syntax::{ ast::{self, AstNode}, @@ -53,11 +54,9 @@ use crate::{ pub use crate::files::{AstId, ErasedAstId, InFile, InMacroFile, InRealFile}; -pub use mbe::ValueResult; +pub use mbe::{DeclarativeMacro, ValueResult}; pub use span::{HirFileId, MacroCallId, MacroFileId}; -pub type DeclarativeMacro = ::mbe::DeclarativeMacro; - pub mod tt { pub use span::Span; pub use tt::{DelimiterKind, Spacing}; @@ -201,7 +200,7 @@ pub struct EagerCallInfo { /// Call id of the eager macro's input file (this is the macro file for its fully expanded input). arg_id: MacroCallId, error: Option, - /// TODO: Doc + /// The call site span of the eager macro span: Span, } @@ -212,7 +211,7 @@ pub enum MacroCallKind { expand_to: ExpandTo, /// Some if this is a macro call for an eager macro. Note that this is `None` /// for the eager input macro file. - // FIXME: This is being interned, subtrees can vary quickly differ just slightly causing + // FIXME: This is being interned, subtrees can vary quickly differing just slightly causing // leakage problems here eager: Option>, }, @@ -225,6 +224,9 @@ pub enum MacroCallKind { derive_attr_index: AttrId, /// Index of the derive macro in the derive attribute derive_index: u32, + /// The "parent" macro call. + /// We will resolve the same token tree for all derive macros in the same derive attribute. + derive_macro_id: MacroCallId, }, Attr { ast_id: AstId, @@ -484,7 +486,7 @@ impl MacroDefId { matches!( self.kind, MacroDefKind::BuiltIn(..) - | MacroDefKind::ProcMacro(_, ProcMacroKind::FuncLike, _) + | MacroDefKind::ProcMacro(_, ProcMacroKind::Bang, _) | MacroDefKind::BuiltInEager(..) | MacroDefKind::Declarative(..) ) @@ -806,7 +808,8 @@ impl ExpansionInfo { let (parse, exp_map) = db.parse_macro_expansion(macro_file).value; let expanded = InMacroFile { file_id: macro_file, value: parse.syntax_node() }; - let (macro_arg, _, _) = db.macro_arg(macro_file.macro_call_id); + let (macro_arg, _, _) = + db.macro_arg_considering_derives(macro_file.macro_call_id, &loc.kind); let def = loc.def.ast_id().left().and_then(|id| { let def_tt = match id.to_node(db) { diff --git a/crates/hir-expand/src/mod_path.rs b/crates/hir-expand/src/mod_path.rs index fc186d2c26d35..46f8c2b9d8c4c 100644 --- a/crates/hir-expand/src/mod_path.rs +++ b/crates/hir-expand/src/mod_path.rs @@ -225,6 +225,26 @@ fn convert_path( let mut segments = path.segments(); let segment = &segments.next()?; + let handle_super_kw = &mut |init_deg| { + let mut deg = init_deg; + let mut next_segment = None; + for segment in segments.by_ref() { + match segment.kind()? { + ast::PathSegmentKind::SuperKw => deg += 1, + ast::PathSegmentKind::Name(name) => { + next_segment = Some(name.as_name()); + break; + } + ast::PathSegmentKind::Type { .. } + | ast::PathSegmentKind::SelfTypeKw + | ast::PathSegmentKind::SelfKw + | ast::PathSegmentKind::CrateKw => return None, + } + } + + Some(ModPath::from_segments(PathKind::Super(deg), next_segment)) + }; + let mut mod_path = match segment.kind()? { ast::PathSegmentKind::Name(name_ref) => { if name_ref.text() == "$crate" { @@ -245,26 +265,8 @@ fn convert_path( ModPath::from_segments(PathKind::Plain, Some(known::SELF_TYPE)) } ast::PathSegmentKind::CrateKw => ModPath::from_segments(PathKind::Crate, iter::empty()), - ast::PathSegmentKind::SelfKw => ModPath::from_segments(PathKind::Super(0), iter::empty()), - ast::PathSegmentKind::SuperKw => { - let mut deg = 1; - let mut next_segment = None; - for segment in segments.by_ref() { - match segment.kind()? { - ast::PathSegmentKind::SuperKw => deg += 1, - ast::PathSegmentKind::Name(name) => { - next_segment = Some(name.as_name()); - break; - } - ast::PathSegmentKind::Type { .. } - | ast::PathSegmentKind::SelfTypeKw - | ast::PathSegmentKind::SelfKw - | ast::PathSegmentKind::CrateKw => return None, - } - } - - ModPath::from_segments(PathKind::Super(deg), next_segment) - } + ast::PathSegmentKind::SelfKw => handle_super_kw(0)?, + ast::PathSegmentKind::SuperKw => handle_super_kw(1)?, ast::PathSegmentKind::Type { .. } => { // not allowed in imports return None; diff --git a/crates/hir-expand/src/proc_macro.rs b/crates/hir-expand/src/proc_macro.rs index ca6fc0afe2d7d..abed16fecdebc 100644 --- a/crates/hir-expand/src/proc_macro.rs +++ b/crates/hir-expand/src/proc_macro.rs @@ -23,7 +23,7 @@ impl ProcMacroId { #[derive(Copy, Clone, Eq, PartialEq, Debug, Hash)] pub enum ProcMacroKind { CustomDerive, - FuncLike, + Bang, Attr, } diff --git a/crates/hir-ty/Cargo.toml b/crates/hir-ty/Cargo.toml index 3cfedcdcb4dc7..bf473740166c8 100644 --- a/crates/hir-ty/Cargo.toml +++ b/crates/hir-ty/Cargo.toml @@ -47,13 +47,14 @@ hir-expand.workspace = true base-db.workspace = true syntax.workspace = true limit.workspace = true +span.workspace = true [dev-dependencies] expect-test = "1.4.0" tracing.workspace = true tracing-subscriber.workspace = true tracing-tree.workspace = true -project-model = { path = "../project-model" } +project-model.workspace = true # local deps test-utils.workspace = true diff --git a/crates/hir-ty/src/builder.rs b/crates/hir-ty/src/builder.rs index c485c9b2e80b3..cb118a3684860 100644 --- a/crates/hir-ty/src/builder.rs +++ b/crates/hir-ty/src/builder.rs @@ -9,21 +9,21 @@ use chalk_ir::{ AdtId, DebruijnIndex, Scalar, }; use hir_def::{ - builtin_type::BuiltinType, generics::TypeOrConstParamData, ConstParamId, DefWithBodyId, - GenericDefId, TraitId, TypeAliasId, + builtin_type::BuiltinType, DefWithBodyId, GenericDefId, GenericParamId, TraitId, TypeAliasId, }; use smallvec::SmallVec; use crate::{ - consteval::unknown_const_as_generic, db::HirDatabase, infer::unify::InferenceTable, primitive, - to_assoc_type_id, to_chalk_trait_id, utils::generics, Binders, BoundVar, CallableSig, - GenericArg, GenericArgData, Interner, ProjectionTy, Substitution, TraitRef, Ty, TyDefId, TyExt, - TyKind, + consteval::unknown_const_as_generic, db::HirDatabase, error_lifetime, + infer::unify::InferenceTable, primitive, to_assoc_type_id, to_chalk_trait_id, utils::generics, + Binders, BoundVar, CallableSig, GenericArg, GenericArgData, Interner, ProjectionTy, + Substitution, TraitRef, Ty, TyDefId, TyExt, TyKind, }; #[derive(Debug, Clone, PartialEq, Eq)] pub enum ParamKind { Type, + Lifetime, Const(Ty), } @@ -107,6 +107,9 @@ impl TyBuilder { ParamKind::Const(ty) => { BoundVar::new(debruijn, idx).to_const(Interner, ty.clone()).cast(Interner) } + ParamKind::Lifetime => { + BoundVar::new(debruijn, idx).to_lifetime(Interner).cast(Interner) + } }); this.vec.extend(filler.take(this.remaining()).casted(Interner)); assert_eq!(this.remaining(), 0); @@ -119,6 +122,7 @@ impl TyBuilder { let filler = this.param_kinds[this.vec.len()..].iter().map(|x| match x { ParamKind::Type => TyKind::Error.intern(Interner).cast(Interner), ParamKind::Const(ty) => unknown_const_as_generic(ty.clone()), + ParamKind::Lifetime => error_lifetime().cast(Interner), }); this.vec.extend(filler.casted(Interner)); assert_eq!(this.remaining(), 0); @@ -130,6 +134,7 @@ impl TyBuilder { self.fill(|x| match x { ParamKind::Type => table.new_type_var().cast(Interner), ParamKind::Const(ty) => table.new_const_var(ty.clone()).cast(Interner), + ParamKind::Lifetime => table.new_lifetime_var().cast(Interner), }) } @@ -142,7 +147,8 @@ impl TyBuilder { fn assert_match_kind(&self, a: &chalk_ir::GenericArg, e: &ParamKind) { match (a.data(Interner), e) { (GenericArgData::Ty(_), ParamKind::Type) - | (GenericArgData::Const(_), ParamKind::Const(_)) => (), + | (GenericArgData::Const(_), ParamKind::Const(_)) + | (GenericArgData::Lifetime(_), ParamKind::Lifetime) => (), _ => panic!("Mismatched kinds: {a:?}, {:?}, {:?}", self.vec, self.param_kinds), } } @@ -201,10 +207,11 @@ impl TyBuilder<()> { Substitution::from_iter( Interner, params.iter_id().map(|id| match id { - either::Either::Left(_) => TyKind::Error.intern(Interner).cast(Interner), - either::Either::Right(id) => { + GenericParamId::TypeParamId(_) => TyKind::Error.intern(Interner).cast(Interner), + GenericParamId::ConstParamId(id) => { unknown_const_as_generic(db.const_param_ty(id)).cast(Interner) } + GenericParamId::LifetimeParamId(_) => error_lifetime().cast(Interner), }), ) } @@ -219,11 +226,10 @@ impl TyBuilder<()> { assert!(generics.parent_generics().is_some() == parent_subst.is_some()); let params = generics .iter_self() - .map(|(id, data)| match data { - TypeOrConstParamData::TypeParamData(_) => ParamKind::Type, - TypeOrConstParamData::ConstParamData(_) => { - ParamKind::Const(db.const_param_ty(ConstParamId::from_unchecked(id))) - } + .map(|(id, _data)| match id { + GenericParamId::TypeParamId(_) => ParamKind::Type, + GenericParamId::ConstParamId(id) => ParamKind::Const(db.const_param_ty(id)), + GenericParamId::LifetimeParamId(_) => ParamKind::Lifetime, }) .collect(); TyBuilder::new((), params, parent_subst) diff --git a/crates/hir-ty/src/chalk_db.rs b/crates/hir-ty/src/chalk_db.rs index e678a2fee1321..46612242b090c 100644 --- a/crates/hir-ty/src/chalk_db.rs +++ b/crates/hir-ty/src/chalk_db.rs @@ -272,6 +272,19 @@ impl chalk_solve::RustIrDatabase for ChalkContext<'_> { }; chalk_ir::Binders::new(binders, bound) } + crate::ImplTraitId::AssociatedTypeImplTrait(alias, idx) => { + let datas = self + .db + .type_alias_impl_traits(alias) + .expect("impl trait id without impl traits"); + let (datas, binders) = (*datas).as_ref().into_value_and_skipped_binders(); + let data = &datas.impl_traits[idx]; + let bound = OpaqueTyDatumBound { + bounds: make_single_type_binders(data.bounds.skip_binders().to_vec()), + where_clauses: chalk_ir::Binders::empty(Interner, vec![]), + }; + chalk_ir::Binders::new(binders, bound) + } crate::ImplTraitId::AsyncBlockTypeImplTrait(..) => { if let Some((future_trait, future_output)) = self .db diff --git a/crates/hir-ty/src/chalk_ext.rs b/crates/hir-ty/src/chalk_ext.rs index 795a5996912b2..d1aebeff261c5 100644 --- a/crates/hir-ty/src/chalk_ext.rs +++ b/crates/hir-ty/src/chalk_ext.rs @@ -268,6 +268,13 @@ impl TyExt for Ty { data.substitute(Interner, &subst).into_value_and_skipped_binders().0 }) } + ImplTraitId::AssociatedTypeImplTrait(alias, idx) => { + db.type_alias_impl_traits(alias).map(|it| { + let data = + (*it).as_ref().map(|rpit| rpit.impl_traits[idx].bounds.clone()); + data.substitute(Interner, &subst).into_value_and_skipped_binders().0 + }) + } } } TyKind::Alias(AliasTy::Opaque(opaque_ty)) => { @@ -280,6 +287,13 @@ impl TyExt for Ty { data.substitute(Interner, &opaque_ty.substitution) }) } + ImplTraitId::AssociatedTypeImplTrait(alias, idx) => { + db.type_alias_impl_traits(alias).map(|it| { + let data = + (*it).as_ref().map(|rpit| rpit.impl_traits[idx].bounds.clone()); + data.substitute(Interner, &opaque_ty.substitution) + }) + } // It always has an parameter for Future::Output type. ImplTraitId::AsyncBlockTypeImplTrait(..) => unreachable!(), }; diff --git a/crates/hir-ty/src/consteval/tests.rs b/crates/hir-ty/src/consteval/tests.rs index 98384c47490a9..d1ffd5046c3f3 100644 --- a/crates/hir-ty/src/consteval/tests.rs +++ b/crates/hir-ty/src/consteval/tests.rs @@ -2825,3 +2825,30 @@ fn unsized_local() { |e| matches!(e, ConstEvalError::MirLowerError(MirLowerError::UnsizedTemporary(_))), ); } + +#[test] +fn recursive_adt() { + check_fail( + r#" + //- minicore: coerce_unsized, index, slice + pub enum TagTree { + Leaf, + Choice(&'static [TagTree]), + } + const GOAL: TagTree = { + const TAG_TREE: TagTree = TagTree::Choice(&[ + { + const VARIANT_TAG_TREE: TagTree = TagTree::Choice( + &[ + TagTree::Leaf, + ], + ); + VARIANT_TAG_TREE + }, + ]); + TAG_TREE + }; + "#, + |e| matches!(e, ConstEvalError::MirEvalError(MirEvalError::StackOverflow)), + ); +} diff --git a/crates/hir-ty/src/db.rs b/crates/hir-ty/src/db.rs index 28c497989fe98..90bf46b5056cc 100644 --- a/crates/hir-ty/src/db.rs +++ b/crates/hir-ty/src/db.rs @@ -11,7 +11,7 @@ use base_db::{ use hir_def::{ db::DefDatabase, hir::ExprId, layout::TargetDataLayout, AdtId, BlockId, ConstParamId, DefWithBodyId, EnumVariantId, FunctionId, GeneralConstId, GenericDefId, ImplId, - LifetimeParamId, LocalFieldId, StaticId, TypeOrConstParamId, VariantId, + LifetimeParamId, LocalFieldId, StaticId, TypeAliasId, TypeOrConstParamId, VariantId, }; use la_arena::ArenaMap; use smallvec::SmallVec; @@ -23,9 +23,9 @@ use crate::{ layout::{Layout, LayoutError}, method_resolution::{InherentImpls, TraitImpls, TyFingerprint}, mir::{BorrowckResult, MirBody, MirLowerError}, - Binders, CallableDefId, ClosureId, Const, FnDefId, GenericArg, ImplTraitId, InferenceResult, - Interner, PolyFnSig, QuantifiedWhereClause, ReturnTypeImplTraits, Substitution, - TraitEnvironment, TraitRef, Ty, TyDefId, ValueTyDefId, + Binders, CallableDefId, ClosureId, Const, FnDefId, GenericArg, ImplTraitId, ImplTraits, + InferenceResult, Interner, PolyFnSig, QuantifiedWhereClause, Substitution, TraitEnvironment, + TraitRef, Ty, TyDefId, ValueTyDefId, }; use hir_expand::name::Name; @@ -132,10 +132,10 @@ pub trait HirDatabase: DefDatabase + Upcast { fn callable_item_signature(&self, def: CallableDefId) -> PolyFnSig; #[salsa::invoke(crate::lower::return_type_impl_traits)] - fn return_type_impl_traits( - &self, - def: FunctionId, - ) -> Option>>; + fn return_type_impl_traits(&self, def: FunctionId) -> Option>>; + + #[salsa::invoke(crate::lower::type_alias_impl_traits)] + fn type_alias_impl_traits(&self, def: TypeAliasId) -> Option>>; #[salsa::invoke(crate::lower::generic_predicates_for_param_query)] #[salsa::cycle(crate::lower::generic_predicates_for_param_recover)] diff --git a/crates/hir-ty/src/diagnostics/expr.rs b/crates/hir-ty/src/diagnostics/expr.rs index 67cfbc294df7f..20b0da441daa8 100644 --- a/crates/hir-ty/src/diagnostics/expr.rs +++ b/crates/hir-ty/src/diagnostics/expr.rs @@ -11,7 +11,6 @@ use hir_def::{ItemContainerId, Lookup}; use hir_expand::name; use itertools::Itertools; use rustc_hash::FxHashSet; -use rustc_pattern_analysis::usefulness::{compute_match_usefulness, ValidityConstraint}; use syntax::{ast, AstNode}; use tracing::debug; use triomphe::Arc; @@ -234,13 +233,7 @@ impl ExprValidator { return; } - let report = match compute_match_usefulness( - &cx, - m_arms.as_slice(), - scrut_ty.clone(), - ValidityConstraint::ValidOnly, - None, - ) { + let report = match cx.compute_match_usefulness(m_arms.as_slice(), scrut_ty.clone()) { Ok(report) => report, Err(()) => return, }; @@ -282,13 +275,7 @@ impl ExprValidator { continue; } - let report = match compute_match_usefulness( - &cx, - &[match_arm], - ty.clone(), - ValidityConstraint::ValidOnly, - None, - ) { + let report = match cx.compute_match_usefulness(&[match_arm], ty.clone()) { Ok(v) => v, Err(e) => { debug!(?e, "match usefulness error"); diff --git a/crates/hir-ty/src/diagnostics/match_check/pat_analysis.rs b/crates/hir-ty/src/diagnostics/match_check/pat_analysis.rs index ca05842879657..f45beb4c92bfa 100644 --- a/crates/hir-ty/src/diagnostics/match_check/pat_analysis.rs +++ b/crates/hir-ty/src/diagnostics/match_check/pat_analysis.rs @@ -8,7 +8,8 @@ use rustc_hash::FxHashMap; use rustc_pattern_analysis::{ constructor::{Constructor, ConstructorSet, VariantVisibility}, index::IdxContainer, - Captures, PrivateUninhabitedField, TypeCx, + usefulness::{compute_match_usefulness, PlaceValidity, UsefulnessReport}, + Captures, PatCx, PrivateUninhabitedField, }; use smallvec::{smallvec, SmallVec}; use stdx::never; @@ -59,6 +60,18 @@ impl<'p> MatchCheckCtx<'p> { Self { module, body, db, exhaustive_patterns, min_exhaustive_patterns } } + pub(crate) fn compute_match_usefulness( + &self, + arms: &[MatchArm<'p>], + scrut_ty: Ty, + ) -> Result, ()> { + // FIXME: Determine place validity correctly. For now, err on the safe side. + let place_validity = PlaceValidity::MaybeInvalid; + // Measured to take ~100ms on modern hardware. + let complexity_limit = Some(500000); + compute_match_usefulness(self, arms, scrut_ty, place_validity, complexity_limit) + } + fn is_uninhabited(&self, ty: &Ty) -> bool { is_ty_uninhabited_from(ty, self.module, self.db) } @@ -107,15 +120,17 @@ impl<'p> MatchCheckCtx<'p> { } pub(crate) fn lower_pat(&self, pat: &Pat) -> DeconstructedPat<'p> { - let singleton = |pat| vec![pat]; + let singleton = |pat: DeconstructedPat<'p>| vec![pat.at_index(0)]; let ctor; - let fields: Vec<_>; + let mut fields: Vec<_>; + let arity; match pat.kind.as_ref() { PatKind::Binding { subpattern: Some(subpat), .. } => return self.lower_pat(subpat), PatKind::Binding { subpattern: None, .. } | PatKind::Wild => { ctor = Wildcard; fields = Vec::new(); + arity = 0; } PatKind::Deref { subpattern } => { ctor = match pat.ty.kind(Interner) { @@ -128,23 +143,22 @@ impl<'p> MatchCheckCtx<'p> { } }; fields = singleton(self.lower_pat(subpattern)); + arity = 1; } PatKind::Leaf { subpatterns } | PatKind::Variant { subpatterns, .. } => { + fields = subpatterns + .iter() + .map(|pat| { + let idx: u32 = pat.field.into_raw().into(); + self.lower_pat(&pat.pattern).at_index(idx as usize) + }) + .collect(); match pat.ty.kind(Interner) { TyKind::Tuple(_, substs) => { ctor = Struct; - let mut wilds: Vec<_> = substs - .iter(Interner) - .map(|arg| arg.assert_ty_ref(Interner).clone()) - .map(DeconstructedPat::wildcard) - .collect(); - for pat in subpatterns { - let idx: u32 = pat.field.into_raw().into(); - wilds[idx as usize] = self.lower_pat(&pat.pattern); - } - fields = wilds + arity = substs.len(Interner); } - TyKind::Adt(adt, substs) if is_box(self.db, adt.0) => { + TyKind::Adt(adt, _) if is_box(self.db, adt.0) => { // The only legal patterns of type `Box` (outside `std`) are `_` and box // patterns. If we're here we can assume this is a box pattern. // FIXME(Nadrieril): A `Box` can in theory be matched either with `Box(_, @@ -157,16 +171,9 @@ impl<'p> MatchCheckCtx<'p> { // normally or through box-patterns. We'll have to figure out a proper // solution when we introduce generalized deref patterns. Also need to // prevent mixing of those two options. - let pat = - subpatterns.iter().find(|pat| pat.field.into_raw() == 0u32.into()); - let field = if let Some(pat) = pat { - self.lower_pat(&pat.pattern) - } else { - let ty = substs.at(Interner, 0).assert_ty_ref(Interner).clone(); - DeconstructedPat::wildcard(ty) - }; + fields.retain(|ipat| ipat.idx == 0); ctor = Struct; - fields = singleton(field); + arity = 1; } &TyKind::Adt(adt, _) => { ctor = match pat.kind.as_ref() { @@ -181,37 +188,33 @@ impl<'p> MatchCheckCtx<'p> { } }; let variant = Self::variant_id_for_adt(&ctor, adt.0).unwrap(); - // Fill a vec with wildcards, then place the fields we have at the right - // index. - let mut wilds: Vec<_> = self - .list_variant_fields(&pat.ty, variant) - .map(|(_, ty)| ty) - .map(DeconstructedPat::wildcard) - .collect(); - for pat in subpatterns { - let field_id: u32 = pat.field.into_raw().into(); - wilds[field_id as usize] = self.lower_pat(&pat.pattern); - } - fields = wilds; + arity = variant.variant_data(self.db.upcast()).fields().len(); } _ => { never!("pattern has unexpected type: pat: {:?}, ty: {:?}", pat, &pat.ty); ctor = Wildcard; - fields = Vec::new(); + fields.clear(); + arity = 0; } } } &PatKind::LiteralBool { value } => { ctor = Bool(value); fields = Vec::new(); + arity = 0; } PatKind::Or { pats } => { ctor = Or; - fields = pats.iter().map(|pat| self.lower_pat(pat)).collect(); + fields = pats + .iter() + .enumerate() + .map(|(i, pat)| self.lower_pat(pat).at_index(i)) + .collect(); + arity = pats.len(); } } let data = PatData { db: self.db }; - DeconstructedPat::new(ctor, fields, pat.ty.clone(), data) + DeconstructedPat::new(ctor, fields, arity, pat.ty.clone(), data) } pub(crate) fn hoist_witness_pat(&self, pat: &WitnessPat<'p>) -> Pat { @@ -271,7 +274,7 @@ impl<'p> MatchCheckCtx<'p> { } } -impl<'p> TypeCx for MatchCheckCtx<'p> { +impl<'p> PatCx for MatchCheckCtx<'p> { type Error = (); type Ty = Ty; type VariantIdx = EnumVariantId; @@ -453,7 +456,7 @@ impl<'p> TypeCx for MatchCheckCtx<'p> { let variant = pat.ty().as_adt().and_then(|(adt, _)| Self::variant_id_for_adt(pat.ctor(), adt)); - let db = pat.data().unwrap().db; + let db = pat.data().db; if let Some(variant) = variant { match variant { VariantId::EnumVariantId(v) => { @@ -475,7 +478,6 @@ impl<'p> TypeCx for MatchCheckCtx<'p> { } fn complexity_exceeded(&self) -> Result<(), Self::Error> { - // FIXME(Nadrieril): make use of the complexity counter. Err(()) } } diff --git a/crates/hir-ty/src/display.rs b/crates/hir-ty/src/display.rs index 20964f5acbd01..8740ae6797cb1 100644 --- a/crates/hir-ty/src/display.rs +++ b/crates/hir-ty/src/display.rs @@ -938,18 +938,32 @@ impl HirDisplay for Ty { f.end_location_link(); if parameters.len(Interner) > 0 { let generics = generics(db.upcast(), def.into()); - let (parent_params, self_param, type_params, const_params, _impl_trait_params) = - generics.provenance_split(); - let total_len = parent_params + self_param + type_params + const_params; + let ( + parent_params, + self_param, + type_params, + const_params, + _impl_trait_params, + lifetime_params, + ) = generics.provenance_split(); + let total_len = + parent_params + self_param + type_params + const_params + lifetime_params; // We print all params except implicit impl Trait params. Still a bit weird; should we leave out parent and self? if total_len > 0 { - // `parameters` are in the order of fn's params (including impl traits), + // `parameters` are in the order of fn's params (including impl traits), fn's lifetimes // parent's params (those from enclosing impl or trait, if any). let parameters = parameters.as_slice(Interner); let fn_params_len = self_param + type_params + const_params; + // This will give slice till last type or const let fn_params = parameters.get(..fn_params_len); + let fn_lt_params = + parameters.get(fn_params_len..(fn_params_len + lifetime_params)); let parent_params = parameters.get(parameters.len() - parent_params..); - let params = parent_params.into_iter().chain(fn_params).flatten(); + let params = parent_params + .into_iter() + .chain(fn_lt_params) + .chain(fn_params) + .flatten(); write!(f, "<")?; f.write_joined(params, ", ")?; write!(f, ">")?; @@ -1063,6 +1077,20 @@ impl HirDisplay for Ty { )?; // FIXME: it would maybe be good to distinguish this from the alias type (when debug printing), and to show the substitution } + ImplTraitId::AssociatedTypeImplTrait(alias, idx) => { + let datas = + db.type_alias_impl_traits(alias).expect("impl trait id without data"); + let data = + (*datas).as_ref().map(|rpit| rpit.impl_traits[idx].bounds.clone()); + let bounds = data.substitute(Interner, ¶meters); + let krate = alias.krate(db.upcast()); + write_bounds_like_dyn_trait_with_prefix( + f, + "impl", + bounds.skip_binders(), + SizedByDefault::Sized { anchor: krate }, + )?; + } ImplTraitId::AsyncBlockTypeImplTrait(body, ..) => { let future_trait = db .lang_item(body.module(db.upcast()).krate(), LangItem::Future) @@ -1228,6 +1256,20 @@ impl HirDisplay for Ty { SizedByDefault::Sized { anchor: krate }, )?; } + ImplTraitId::AssociatedTypeImplTrait(alias, idx) => { + let datas = + db.type_alias_impl_traits(alias).expect("impl trait id without data"); + let data = + (*datas).as_ref().map(|rpit| rpit.impl_traits[idx].bounds.clone()); + let bounds = data.substitute(Interner, &opaque_ty.substitution); + let krate = alias.krate(db.upcast()); + write_bounds_like_dyn_trait_with_prefix( + f, + "impl", + bounds.skip_binders(), + SizedByDefault::Sized { anchor: krate }, + )?; + } ImplTraitId::AsyncBlockTypeImplTrait(..) => { write!(f, "{{async block}}")?; } @@ -1280,8 +1322,17 @@ fn hir_fmt_generics( generic_def: Option, ) -> Result<(), HirDisplayError> { let db = f.db; - let lifetime_args_count = generic_def.map_or(0, |g| db.generic_params(g).lifetimes.len()); - if parameters.len(Interner) + lifetime_args_count > 0 { + if parameters.len(Interner) > 0 { + use std::cmp::Ordering; + let param_compare = + |a: &GenericArg, b: &GenericArg| match (a.data(Interner), b.data(Interner)) { + (crate::GenericArgData::Lifetime(_), crate::GenericArgData::Lifetime(_)) => { + Ordering::Equal + } + (crate::GenericArgData::Lifetime(_), _) => Ordering::Less, + (_, crate::GenericArgData::Lifetime(_)) => Ordering::Less, + (_, _) => Ordering::Equal, + }; let parameters_to_write = if f.display_target.is_source_code() || f.omit_verbose_types() { match generic_def .map(|generic_def_id| db.generic_defaults(generic_def_id)) @@ -1307,6 +1358,11 @@ fn hir_fmt_generics( return true; } } + if parameter.lifetime(Interner).map(|it| it.data(Interner)) + == Some(&crate::LifetimeData::Static) + { + return true; + } let default_parameter = match default_parameters.get(i) { Some(it) => it, None => return true, @@ -1327,16 +1383,12 @@ fn hir_fmt_generics( } else { parameters.as_slice(Interner) }; - if !parameters_to_write.is_empty() || lifetime_args_count != 0 { + //FIXME: Should handle the ordering of lifetimes when creating substitutions + let mut parameters_to_write = parameters_to_write.to_vec(); + parameters_to_write.sort_by(param_compare); + if !parameters_to_write.is_empty() { write!(f, "<")?; let mut first = true; - for _ in 0..lifetime_args_count { - if !first { - write!(f, ", ")?; - } - first = false; - write!(f, "'_")?; - } for generic_arg in parameters_to_write { if !first { write!(f, ", ")?; diff --git a/crates/hir-ty/src/infer.rs b/crates/hir-ty/src/infer.rs index 34ba17f145e0a..be3b50e1411da 100644 --- a/crates/hir-ty/src/infer.rs +++ b/crates/hir-ty/src/infer.rs @@ -25,8 +25,11 @@ pub(crate) mod unify; use std::{convert::identity, iter, ops::Index}; use chalk_ir::{ - cast::Cast, fold::TypeFoldable, interner::HasInterner, DebruijnIndex, Mutability, Safety, - Scalar, TyKind, TypeFlags, Variance, + cast::Cast, + fold::TypeFoldable, + interner::HasInterner, + visit::{TypeSuperVisitable, TypeVisitable, TypeVisitor}, + DebruijnIndex, Mutability, Safety, Scalar, TyKind, TypeFlags, Variance, }; use either::Either; use hir_def::{ @@ -39,7 +42,7 @@ use hir_def::{ layout::Integer, path::{ModPath, Path}, resolver::{HasResolver, ResolveValueResult, Resolver, TypeNs, ValueNs}, - type_ref::TypeRef, + type_ref::{LifetimeRef, TypeRef}, AdtId, AssocItemId, DefWithBodyId, FieldId, FunctionId, ItemContainerId, Lookup, TraitId, TupleFieldId, TupleId, TypeAliasId, VariantId, }; @@ -53,14 +56,14 @@ use triomphe::Arc; use crate::{ db::HirDatabase, fold_tys, - infer::coerce::CoerceMany, + infer::{coerce::CoerceMany, unify::InferenceTable}, lower::ImplTraitLoweringMode, static_lifetime, to_assoc_type_id, traits::FnTrait, utils::{InTypeConstIdMetadata, UnevaluatedConstEvaluatorFolder}, AliasEq, AliasTy, Binders, ClosureId, Const, DomainGoal, GenericArg, Goal, ImplTraitId, - InEnvironment, Interner, Lifetime, ProjectionTy, RpitId, Substitution, TraitEnvironment, - TraitRef, Ty, TyBuilder, TyExt, + ImplTraitIdx, InEnvironment, Interner, Lifetime, OpaqueTyId, ProjectionTy, Substitution, + TraitEnvironment, Ty, TyBuilder, TyExt, }; // This lint has a false positive here. See the link below for details. @@ -422,7 +425,7 @@ pub struct InferenceResult { /// unresolved or missing subpatterns or subpatterns of mismatched types. pub type_of_pat: ArenaMap, pub type_of_binding: ArenaMap, - pub type_of_rpit: ArenaMap, + pub type_of_rpit: ArenaMap, /// Type of the result of `.into_iter()` on the for. `ExprId` is the one of the whole for loop. pub type_of_for_iterator: FxHashMap, type_mismatches: FxHashMap, @@ -752,7 +755,12 @@ impl<'a> InferenceContext<'a> { } fn collect_const(&mut self, data: &ConstData) { - self.return_ty = self.make_ty(&data.type_ref); + let return_ty = self.make_ty(&data.type_ref); + + // Constants might be associated items that define ATPITs. + self.insert_atpit_coercion_table(iter::once(&return_ty)); + + self.return_ty = return_ty; } fn collect_static(&mut self, data: &StaticData) { @@ -785,11 +793,13 @@ impl<'a> InferenceContext<'a> { self.write_binding_ty(self_param, ty); } } + let mut params_and_ret_tys = Vec::new(); for (ty, pat) in param_tys.zip(&*self.body.params) { let ty = self.insert_type_vars(ty); let ty = self.normalize_associated_types_in(ty); self.infer_top_pat(*pat, &ty); + params_and_ret_tys.push(ty); } let return_ty = &*data.ret_type; @@ -801,8 +811,11 @@ impl<'a> InferenceContext<'a> { let return_ty = if let Some(rpits) = self.db.return_type_impl_traits(func) { // RPIT opaque types use substitution of their parent function. let fn_placeholders = TyBuilder::placeholder_subst(self.db, func); - let result = - self.insert_inference_vars_for_rpit(return_ty, rpits.clone(), fn_placeholders); + let result = self.insert_inference_vars_for_impl_trait( + return_ty, + rpits.clone(), + fn_placeholders, + ); let rpits = rpits.skip_binders(); for (id, _) in rpits.impl_traits.iter() { if let Entry::Vacant(e) = self.result.type_of_rpit.entry(id) { @@ -817,13 +830,19 @@ impl<'a> InferenceContext<'a> { self.return_ty = self.normalize_associated_types_in(return_ty); self.return_coercion = Some(CoerceMany::new(self.return_ty.clone())); + + // Functions might be associated items that define ATPITs. + // To define an ATPITs, that ATPIT must appear in the function's signatures. + // So, it suffices to check for params and return types. + params_and_ret_tys.push(self.return_ty.clone()); + self.insert_atpit_coercion_table(params_and_ret_tys.iter()); } - fn insert_inference_vars_for_rpit( + fn insert_inference_vars_for_impl_trait( &mut self, t: T, - rpits: Arc>, - fn_placeholders: Substitution, + rpits: Arc>, + placeholders: Substitution, ) -> T where T: crate::HasInterner + crate::TypeFoldable, @@ -837,6 +856,7 @@ impl<'a> InferenceContext<'a> { }; let idx = match self.db.lookup_intern_impl_trait_id(opaque_ty_id.into()) { ImplTraitId::ReturnTypeImplTrait(_, idx) => idx, + ImplTraitId::AssociatedTypeImplTrait(_, idx) => idx, _ => unreachable!(), }; let bounds = @@ -844,15 +864,14 @@ impl<'a> InferenceContext<'a> { let var = self.table.new_type_var(); let var_subst = Substitution::from1(Interner, var.clone()); for bound in bounds { - let predicate = - bound.map(|it| it.cloned()).substitute(Interner, &fn_placeholders); + let predicate = bound.map(|it| it.cloned()).substitute(Interner, &placeholders); let (var_predicate, binders) = predicate.substitute(Interner, &var_subst).into_value_and_skipped_binders(); always!(binders.is_empty(Interner)); // quantified where clauses not yet handled - let var_predicate = self.insert_inference_vars_for_rpit( + let var_predicate = self.insert_inference_vars_for_impl_trait( var_predicate, rpits.clone(), - fn_placeholders.clone(), + placeholders.clone(), ); self.push_obligation(var_predicate.cast(Interner)); } @@ -863,6 +882,106 @@ impl<'a> InferenceContext<'a> { ) } + /// The coercion of a non-inference var into an opaque type should fail, + /// but not in the defining sites of the ATPITs. + /// In such cases, we insert an proxy inference var for each ATPIT, + /// and coerce into it instead of ATPIT itself. + /// + /// The inference var stretagy is effective because; + /// + /// - It can still unify types that coerced into ATPIT + /// - We are pushing `impl Trait` bounds into it + /// + /// This function inserts a map that maps the opaque type to that proxy inference var. + fn insert_atpit_coercion_table<'b>(&mut self, tys: impl Iterator) { + struct OpaqueTyCollector<'a, 'b> { + table: &'b mut InferenceTable<'a>, + opaque_tys: FxHashMap, + } + + impl<'a, 'b> TypeVisitor for OpaqueTyCollector<'a, 'b> { + type BreakTy = (); + + fn as_dyn(&mut self) -> &mut dyn TypeVisitor { + self + } + + fn interner(&self) -> Interner { + Interner + } + + fn visit_ty( + &mut self, + ty: &chalk_ir::Ty, + outer_binder: DebruijnIndex, + ) -> std::ops::ControlFlow { + let ty = self.table.resolve_ty_shallow(ty); + + if let TyKind::OpaqueType(id, _) = ty.kind(Interner) { + self.opaque_tys.insert(*id, ty.clone()); + } + + ty.super_visit_with(self, outer_binder) + } + } + + // Early return if this is not happening inside the impl block + let impl_id = if let Some(impl_id) = self.resolver.impl_def() { + impl_id + } else { + return; + }; + + let assoc_tys: FxHashSet<_> = self + .db + .impl_data(impl_id) + .items + .iter() + .filter_map(|item| match item { + AssocItemId::TypeAliasId(alias) => Some(*alias), + _ => None, + }) + .collect(); + if assoc_tys.is_empty() { + return; + } + + let mut collector = + OpaqueTyCollector { table: &mut self.table, opaque_tys: FxHashMap::default() }; + for ty in tys { + ty.visit_with(collector.as_dyn(), DebruijnIndex::INNERMOST); + } + let atpit_coercion_table: FxHashMap<_, _> = collector + .opaque_tys + .into_iter() + .filter_map(|(opaque_ty_id, ty)| { + if let ImplTraitId::AssociatedTypeImplTrait(alias_id, _) = + self.db.lookup_intern_impl_trait_id(opaque_ty_id.into()) + { + if assoc_tys.contains(&alias_id) { + let atpits = self + .db + .type_alias_impl_traits(alias_id) + .expect("Marked as ATPIT but no impl traits!"); + let alias_placeholders = TyBuilder::placeholder_subst(self.db, alias_id); + let ty = self.insert_inference_vars_for_impl_trait( + ty, + atpits, + alias_placeholders, + ); + return Some((opaque_ty_id, ty)); + } + } + + None + }) + .collect(); + + if !atpit_coercion_table.is_empty() { + self.table.atpit_coercion_table = Some(atpit_coercion_table); + } + } + fn infer_body(&mut self) { match self.return_coercion { Some(_) => self.infer_return(self.body.body_expr), @@ -918,6 +1037,12 @@ impl<'a> InferenceContext<'a> { self.result.standard_types.unknown.clone() } + fn make_lifetime(&mut self, lifetime_ref: &LifetimeRef) -> Lifetime { + let ctx = crate::lower::TyLoweringContext::new(self.db, &self.resolver, self.owner.into()); + let lt = ctx.lower_lifetime(lifetime_ref); + self.insert_type_vars(lt) + } + /// Replaces `Ty::Error` by a new type var, so we can maybe still infer it. fn insert_type_vars_shallow(&mut self, ty: Ty) -> Ty { self.table.insert_type_vars_shallow(ty) diff --git a/crates/hir-ty/src/infer/coerce.rs b/crates/hir-ty/src/infer/coerce.rs index ff6de61ba6493..cfbbc9dd6c04f 100644 --- a/crates/hir-ty/src/infer/coerce.rs +++ b/crates/hir-ty/src/infer/coerce.rs @@ -276,6 +276,23 @@ impl InferenceTable<'_> { return success(simple(Adjust::NeverToAny)(to_ty.clone()), to_ty.clone(), vec![]); } + // If we are coercing into an ATPIT, coerce into its proxy inference var, instead. + let mut to_ty = to_ty; + let _to; + if let Some(atpit_table) = &self.atpit_coercion_table { + if let TyKind::OpaqueType(opaque_ty_id, _) = to_ty.kind(Interner) { + if !matches!( + from_ty.kind(Interner), + TyKind::InferenceVar(..) | TyKind::OpaqueType(..) + ) { + if let Some(ty) = atpit_table.get(opaque_ty_id) { + _to = ty.clone(); + to_ty = &_to; + } + } + } + } + // Consider coercing the subtype to a DST if let Ok(ret) = self.try_coerce_unsized(&from_ty, to_ty) { return Ok(ret); diff --git a/crates/hir-ty/src/infer/expr.rs b/crates/hir-ty/src/infer/expr.rs index a3dab1fd9d54c..35d59679355a6 100644 --- a/crates/hir-ty/src/infer/expr.rs +++ b/crates/hir-ty/src/infer/expr.rs @@ -8,13 +8,12 @@ use std::{ use chalk_ir::{cast::Cast, fold::Shift, DebruijnIndex, Mutability, TyVariableKind}; use either::Either; use hir_def::{ - generics::TypeOrConstParamData, hir::{ ArithOp, Array, BinaryOp, ClosureKind, Expr, ExprId, LabelId, Literal, Statement, UnaryOp, }, lang_item::{LangItem, LangItemTarget}, - path::{GenericArg, GenericArgs, Path}, - BlockId, ConstParamId, FieldId, ItemContainerId, Lookup, TupleFieldId, TupleId, + path::{GenericArgs, Path}, + BlockId, FieldId, GenericParamId, ItemContainerId, Lookup, TupleFieldId, TupleId, }; use hir_expand::name::{name, Name}; use stdx::always; @@ -1816,10 +1815,17 @@ impl InferenceContext<'_> { def_generics: Generics, generic_args: Option<&GenericArgs>, ) -> Substitution { - let (parent_params, self_params, type_params, const_params, impl_trait_params) = - def_generics.provenance_split(); + let ( + parent_params, + self_params, + type_params, + const_params, + impl_trait_params, + lifetime_params, + ) = def_generics.provenance_split(); assert_eq!(self_params, 0); // method shouldn't have another Self param - let total_len = parent_params + type_params + const_params + impl_trait_params; + let total_len = + parent_params + type_params + const_params + impl_trait_params + lifetime_params; let mut substs = Vec::with_capacity(total_len); // handle provided arguments @@ -1828,8 +1834,7 @@ impl InferenceContext<'_> { for (arg, kind_id) in generic_args .args .iter() - .filter(|arg| !matches!(arg, GenericArg::Lifetime(_))) - .take(type_params + const_params) + .take(type_params + const_params + lifetime_params) .zip(def_generics.iter_id()) { if let Some(g) = generic_arg_to_chalk( @@ -1850,6 +1855,7 @@ impl InferenceContext<'_> { DebruijnIndex::INNERMOST, ) }, + |this, lt_ref| this.make_lifetime(lt_ref), ) { substs.push(g); } @@ -1858,16 +1864,17 @@ impl InferenceContext<'_> { // Handle everything else as unknown. This also handles generic arguments for the method's // parent (impl or trait), which should come after those for the method. - for (id, data) in def_generics.iter().skip(substs.len()) { - match data { - TypeOrConstParamData::TypeParamData(_) => { + for (id, _data) in def_generics.iter().skip(substs.len()) { + match id { + GenericParamId::TypeParamId(_) => { substs.push(self.table.new_type_var().cast(Interner)) } - TypeOrConstParamData::ConstParamData(_) => substs.push( - self.table - .new_const_var(self.db.const_param_ty(ConstParamId::from_unchecked(id))) - .cast(Interner), - ), + GenericParamId::ConstParamId(id) => { + substs.push(self.table.new_const_var(self.db.const_param_ty(id)).cast(Interner)) + } + GenericParamId::LifetimeParamId(_) => { + substs.push(self.table.new_lifetime_var().cast(Interner)) + } } } assert_eq!(substs.len(), total_len); diff --git a/crates/hir-ty/src/infer/path.rs b/crates/hir-ty/src/infer/path.rs index 8f537bb448b9e..9a1835b625b3c 100644 --- a/crates/hir-ty/src/infer/path.rs +++ b/crates/hir-ty/src/infer/path.rs @@ -11,15 +11,15 @@ use stdx::never; use crate::{ builder::ParamKind, - consteval, + consteval, error_lifetime, method_resolution::{self, VisibleFromModule}, to_chalk_trait_id, utils::generics, - InferenceDiagnostic, Interner, Substitution, TraitRefExt, Ty, TyBuilder, TyExt, TyKind, - ValueTyDefId, + InferenceDiagnostic, Interner, Substitution, TraitRef, TraitRefExt, Ty, TyBuilder, TyExt, + TyKind, ValueTyDefId, }; -use super::{ExprOrPatId, InferenceContext, TraitRef}; +use super::{ExprOrPatId, InferenceContext}; impl InferenceContext<'_> { pub(super) fn infer_path(&mut self, path: &Path, id: ExprOrPatId) -> Option { @@ -111,6 +111,7 @@ impl InferenceContext<'_> { it.next().unwrap_or_else(|| match x { ParamKind::Type => self.result.standard_types.unknown.clone().cast(Interner), ParamKind::Const(ty) => consteval::unknown_const_as_generic(ty.clone()), + ParamKind::Lifetime => error_lifetime().cast(Interner), }) }) .build(); diff --git a/crates/hir-ty/src/infer/unify.rs b/crates/hir-ty/src/infer/unify.rs index be7547f9bae59..afb89fe1e5b1c 100644 --- a/crates/hir-ty/src/infer/unify.rs +++ b/crates/hir-ty/src/infer/unify.rs @@ -10,16 +10,18 @@ use chalk_solve::infer::ParameterEnaVariableExt; use either::Either; use ena::unify::UnifyKey; use hir_expand::name; +use rustc_hash::FxHashMap; use smallvec::SmallVec; use triomphe::Arc; use super::{InferOk, InferResult, InferenceContext, TypeError}; use crate::{ - consteval::unknown_const, db::HirDatabase, fold_tys_and_consts, static_lifetime, - to_chalk_trait_id, traits::FnTrait, AliasEq, AliasTy, BoundVar, Canonical, Const, ConstValue, - DebruijnIndex, DomainGoal, GenericArg, GenericArgData, Goal, GoalData, Guidance, InEnvironment, - InferenceVar, Interner, Lifetime, ParamKind, ProjectionTy, ProjectionTyExt, Scalar, Solution, - Substitution, TraitEnvironment, Ty, TyBuilder, TyExt, TyKind, VariableKind, WhereClause, + consteval::unknown_const, db::HirDatabase, fold_generic_args, fold_tys_and_consts, + static_lifetime, to_chalk_trait_id, traits::FnTrait, AliasEq, AliasTy, BoundVar, Canonical, + Const, ConstValue, DebruijnIndex, DomainGoal, GenericArg, GenericArgData, Goal, GoalData, + Guidance, InEnvironment, InferenceVar, Interner, Lifetime, OpaqueTyId, ParamKind, ProjectionTy, + ProjectionTyExt, Scalar, Solution, Substitution, TraitEnvironment, Ty, TyBuilder, TyExt, + TyKind, VariableKind, WhereClause, }; impl InferenceContext<'_> { @@ -239,6 +241,7 @@ type ChalkInferenceTable = chalk_solve::infer::InferenceTable; pub(crate) struct InferenceTable<'a> { pub(crate) db: &'a dyn HirDatabase, pub(crate) trait_env: Arc, + pub(crate) atpit_coercion_table: Option>, var_unification_table: ChalkInferenceTable, type_variable_table: SmallVec<[TypeVariableFlags; 16]>, pending_obligations: Vec>>, @@ -258,6 +261,7 @@ impl<'a> InferenceTable<'a> { InferenceTable { db, trait_env, + atpit_coercion_table: None, var_unification_table: ChalkInferenceTable::new(), type_variable_table: SmallVec::new(), pending_obligations: Vec::new(), @@ -803,6 +807,7 @@ impl<'a> InferenceTable<'a> { .fill(|it| { let arg = match it { ParamKind::Type => self.new_type_var(), + ParamKind::Lifetime => unreachable!("Tuple with lifetime parameter"), ParamKind::Const(_) => unreachable!("Tuple with const parameter"), }; arg_tys.push(arg.clone()); @@ -857,11 +862,16 @@ impl<'a> InferenceTable<'a> { where T: HasInterner + TypeFoldable, { - fold_tys_and_consts( + fold_generic_args( ty, - |it, _| match it { - Either::Left(ty) => Either::Left(self.insert_type_vars_shallow(ty)), - Either::Right(c) => Either::Right(self.insert_const_vars_shallow(c)), + |arg, _| match arg { + GenericArgData::Ty(ty) => GenericArgData::Ty(self.insert_type_vars_shallow(ty)), + // FIXME: insert lifetime vars once LifetimeData::InferenceVar + // and specific error variant for lifetimes start being constructed + GenericArgData::Lifetime(lt) => GenericArgData::Lifetime(lt), + GenericArgData::Const(c) => { + GenericArgData::Const(self.insert_const_vars_shallow(c)) + } }, DebruijnIndex::INNERMOST, ) diff --git a/crates/hir-ty/src/layout.rs b/crates/hir-ty/src/layout.rs index 9655981cc9ccd..dd949e26c2ab2 100644 --- a/crates/hir-ty/src/layout.rs +++ b/crates/hir-ty/src/layout.rs @@ -389,6 +389,9 @@ pub fn layout_of_ty_query( let infer = db.infer(func.into()); return db.layout_of_ty(infer.type_of_rpit[idx].clone(), trait_env); } + crate::ImplTraitId::AssociatedTypeImplTrait(..) => { + return Err(LayoutError::NotImplemented); + } crate::ImplTraitId::AsyncBlockTypeImplTrait(_, _) => { return Err(LayoutError::NotImplemented) } diff --git a/crates/hir-ty/src/lib.rs b/crates/hir-ty/src/lib.rs index ec97bdc2c4343..ba64f5c8d7ea8 100644 --- a/crates/hir-ty/src/lib.rs +++ b/crates/hir-ty/src/lib.rs @@ -15,7 +15,8 @@ extern crate rustc_abi; #[cfg(not(feature = "in-rust-tree"))] extern crate ra_ap_rustc_abi as rustc_abi; -// No need to use the in-tree one. +// Use the crates.io version unconditionally until the API settles enough that we can switch to +// using the in-tree one. extern crate ra_ap_rustc_pattern_analysis as rustc_pattern_analysis; mod builder; @@ -89,8 +90,8 @@ pub use lower::{ }; pub use mapping::{ from_assoc_type_id, from_chalk_trait_id, from_foreign_def_id, from_placeholder_idx, - lt_from_placeholder_idx, to_assoc_type_id, to_chalk_trait_id, to_foreign_def_id, - to_placeholder_idx, + lt_from_placeholder_idx, lt_to_placeholder_idx, to_assoc_type_id, to_chalk_trait_id, + to_foreign_def_id, to_placeholder_idx, }; pub use method_resolution::check_orphan_rules; pub use traits::TraitEnvironment; @@ -334,11 +335,23 @@ pub(crate) fn make_binders_with_count>( generics: &Generics, value: T, ) -> Binders { - let it = generics.iter_id().take(count).map(|id| match id { - Either::Left(_) => None, - Either::Right(id) => Some(db.const_param_ty(id)), - }); - crate::make_type_and_const_binders(it, value) + let it = generics.iter_id().take(count); + + Binders::new( + VariableKinds::from_iter( + Interner, + it.map(|x| match x { + hir_def::GenericParamId::ConstParamId(id) => { + chalk_ir::VariableKind::Const(db.const_param_ty(id)) + } + hir_def::GenericParamId::TypeParamId(_) => { + chalk_ir::VariableKind::Ty(chalk_ir::TyVariableKind::General) + } + hir_def::GenericParamId::LifetimeParamId(_) => chalk_ir::VariableKind::Lifetime, + }), + ), + value, + ) } pub(crate) fn make_binders>( @@ -584,29 +597,34 @@ impl TypeFoldable for CallableSig { #[derive(Copy, Clone, PartialEq, Eq, Debug, Hash)] pub enum ImplTraitId { - ReturnTypeImplTrait(hir_def::FunctionId, RpitId), + ReturnTypeImplTrait(hir_def::FunctionId, ImplTraitIdx), + AssociatedTypeImplTrait(hir_def::TypeAliasId, ImplTraitIdx), AsyncBlockTypeImplTrait(hir_def::DefWithBodyId, ExprId), } impl_intern_value_trivial!(ImplTraitId); #[derive(Clone, PartialEq, Eq, Debug, Hash)] -pub struct ReturnTypeImplTraits { - pub(crate) impl_traits: Arena, +pub struct ImplTraits { + pub(crate) impl_traits: Arena, } -has_interner!(ReturnTypeImplTraits); +has_interner!(ImplTraits); #[derive(Clone, PartialEq, Eq, Debug, Hash)] -pub struct ReturnTypeImplTrait { +pub struct ImplTrait { pub(crate) bounds: Binders>, } -pub type RpitId = Idx; +pub type ImplTraitIdx = Idx; pub fn static_lifetime() -> Lifetime { LifetimeData::Static.intern(Interner) } +pub fn error_lifetime() -> Lifetime { + static_lifetime() +} + pub(crate) fn fold_free_vars + TypeFoldable>( t: T, for_ty: impl FnMut(BoundVar, DebruijnIndex) -> Ty, @@ -696,6 +714,55 @@ pub(crate) fn fold_tys_and_consts + TypeFold t.fold_with(&mut TyFolder(f), binders) } +pub(crate) fn fold_generic_args + TypeFoldable>( + t: T, + f: impl FnMut(GenericArgData, DebruijnIndex) -> GenericArgData, + binders: DebruijnIndex, +) -> T { + use chalk_ir::fold::{TypeFolder, TypeSuperFoldable}; + #[derive(chalk_derive::FallibleTypeFolder)] + #[has_interner(Interner)] + struct TyFolder GenericArgData>(F); + impl GenericArgData> TypeFolder + for TyFolder + { + fn as_dyn(&mut self) -> &mut dyn TypeFolder { + self + } + + fn interner(&self) -> Interner { + Interner + } + + fn fold_ty(&mut self, ty: Ty, outer_binder: DebruijnIndex) -> Ty { + let ty = ty.super_fold_with(self.as_dyn(), outer_binder); + self.0(GenericArgData::Ty(ty), outer_binder) + .intern(Interner) + .ty(Interner) + .unwrap() + .clone() + } + + fn fold_const(&mut self, c: Const, outer_binder: DebruijnIndex) -> Const { + self.0(GenericArgData::Const(c), outer_binder) + .intern(Interner) + .constant(Interner) + .unwrap() + .clone() + } + + fn fold_lifetime(&mut self, lt: Lifetime, outer_binder: DebruijnIndex) -> Lifetime { + let lt = lt.super_fold_with(self.as_dyn(), outer_binder); + self.0(GenericArgData::Lifetime(lt), outer_binder) + .intern(Interner) + .lifetime(Interner) + .unwrap() + .clone() + } + } + t.fold_with(&mut TyFolder(f), binders) +} + /// 'Canonicalizes' the `t` by replacing any errors with new variables. Also /// ensures there are no unbound variables or inference variables anywhere in /// the `t`. diff --git a/crates/hir-ty/src/lower.rs b/crates/hir-ty/src/lower.rs index dac20f2259717..25ccc84c13c85 100644 --- a/crates/hir-ty/src/lower.rs +++ b/crates/hir-ty/src/lower.rs @@ -24,17 +24,20 @@ use hir_def::{ data::adt::StructKind, expander::Expander, generics::{ - TypeOrConstParamData, TypeParamProvenance, WherePredicate, WherePredicateTypeTarget, + GenericParamDataRef, TypeOrConstParamData, TypeParamProvenance, WherePredicate, + WherePredicateTypeTarget, }, lang_item::LangItem, nameres::MacroSubNs, path::{GenericArg, GenericArgs, ModPath, Path, PathKind, PathSegment, PathSegments}, - resolver::{HasResolver, Resolver, TypeNs}, - type_ref::{ConstRef, TraitBoundModifier, TraitRef as HirTraitRef, TypeBound, TypeRef}, + resolver::{HasResolver, LifetimeNs, Resolver, TypeNs}, + type_ref::{ + ConstRef, LifetimeRef, TraitBoundModifier, TraitRef as HirTraitRef, TypeBound, TypeRef, + }, AdtId, AssocItemId, ConstId, ConstParamId, DefWithBodyId, EnumId, EnumVariantId, FunctionId, - GenericDefId, HasModule, ImplId, InTypeConstLoc, ItemContainerId, LocalFieldId, Lookup, - ModuleDefId, StaticId, StructId, TraitId, TypeAliasId, TypeOrConstParamId, TypeOwnerId, - TypeParamId, UnionId, VariantId, + GenericDefId, GenericParamId, HasModule, ImplId, InTypeConstLoc, ItemContainerId, LocalFieldId, + Lookup, ModuleDefId, StaticId, StructId, TraitId, TypeAliasId, TypeOrConstParamId, TypeOwnerId, + UnionId, VariantId, }; use hir_expand::{name::Name, ExpandResult}; use intern::Interned; @@ -52,18 +55,18 @@ use crate::{ unknown_const_as_generic, }, db::HirDatabase, - make_binders, - mapping::{from_chalk_trait_id, ToChalk}, + error_lifetime, make_binders, + mapping::{from_chalk_trait_id, lt_to_placeholder_idx, ToChalk}, static_lifetime, to_assoc_type_id, to_chalk_trait_id, to_placeholder_idx, - utils::Generics, utils::{ - all_super_trait_refs, associated_type_by_name_including_super_traits, generics, + all_super_trait_refs, associated_type_by_name_including_super_traits, generics, Generics, InTypeConstIdMetadata, }, AliasEq, AliasTy, Binders, BoundVar, CallableSig, Const, ConstScalar, DebruijnIndex, DynTy, - FnAbi, FnPointer, FnSig, FnSubst, ImplTraitId, Interner, ParamKind, PolyFnSig, ProjectionTy, - QuantifiedWhereClause, QuantifiedWhereClauses, ReturnTypeImplTrait, ReturnTypeImplTraits, - Substitution, TraitEnvironment, TraitRef, TraitRefExt, Ty, TyBuilder, TyKind, WhereClause, + FnAbi, FnPointer, FnSig, FnSubst, ImplTrait, ImplTraitId, ImplTraits, Interner, Lifetime, + LifetimeData, ParamKind, PolyFnSig, ProjectionTy, QuantifiedWhereClause, + QuantifiedWhereClauses, Substitution, TraitEnvironment, TraitRef, TraitRefExt, Ty, TyBuilder, + TyKind, WhereClause, }; #[derive(Debug)] @@ -76,7 +79,7 @@ enum ImplTraitLoweringState { /// we're grouping the mutable data (the counter and this field) together /// with the immutable context (the references to the DB and resolver). /// Splitting this up would be a possible fix. - Opaque(RefCell>), + Opaque(RefCell>), Param(Cell), Variable(Cell), Disallowed, @@ -275,9 +278,11 @@ impl<'a> TyLoweringContext<'a> { let inner_ty = self.lower_ty(inner); TyKind::Slice(inner_ty).intern(Interner) } - TypeRef::Reference(inner, _, mutability) => { + TypeRef::Reference(inner, lifetime, mutability) => { let inner_ty = self.lower_ty(inner); - let lifetime = static_lifetime(); + // FIXME: It should infer the eldided lifetimes instead of stubbing with static + let lifetime = + lifetime.as_ref().map_or_else(static_lifetime, |lr| self.lower_lifetime(lr)); TyKind::Ref(lower_to_chalk_mutability(*mutability), lifetime, inner_ty) .intern(Interner) } @@ -301,15 +306,18 @@ impl<'a> TyLoweringContext<'a> { TypeRef::ImplTrait(bounds) => { match &self.impl_trait_mode { ImplTraitLoweringState::Opaque(opaque_type_data) => { - let func = match self.resolver.generic_def() { - Some(GenericDefId::FunctionId(f)) => f, - _ => panic!("opaque impl trait lowering in non-function"), + let origin = match self.resolver.generic_def() { + Some(GenericDefId::FunctionId(it)) => Either::Left(it), + Some(GenericDefId::TypeAliasId(it)) => Either::Right(it), + _ => panic!( + "opaque impl trait lowering must be in function or type alias" + ), }; // this dance is to make sure the data is in the right // place even if we encounter more opaque types while // lowering the bounds - let idx = opaque_type_data.borrow_mut().alloc(ReturnTypeImplTrait { + let idx = opaque_type_data.borrow_mut().alloc(ImplTrait { bounds: crate::make_single_type_binders(Vec::new()), }); // We don't want to lower the bounds inside the binders @@ -323,13 +331,17 @@ impl<'a> TyLoweringContext<'a> { // away instead of two. let actual_opaque_type_data = self .with_debruijn(DebruijnIndex::INNERMOST, |ctx| { - ctx.lower_impl_trait(bounds, func) + ctx.lower_impl_trait(bounds, self.resolver.krate()) }); opaque_type_data.borrow_mut()[idx] = actual_opaque_type_data; - let impl_trait_id = ImplTraitId::ReturnTypeImplTrait(func, idx); + let impl_trait_id = origin.either( + |f| ImplTraitId::ReturnTypeImplTrait(f, idx), + |a| ImplTraitId::AssociatedTypeImplTrait(a, idx), + ); let opaque_ty_id = self.db.intern_impl_trait_id(impl_trait_id).into(); - let generics = generics(self.db.upcast(), func.into()); + let generics = + generics(self.db.upcast(), origin.either(|f| f.into(), |a| a.into())); let parameters = generics.bound_vars_subst(self.db, self.in_binders); TyKind::OpaqueType(opaque_ty_id, parameters).intern(Interner) } @@ -344,13 +356,18 @@ impl<'a> TyLoweringContext<'a> { .filter(|(_, data)| { matches!( data, - TypeOrConstParamData::TypeParamData(data) + GenericParamDataRef::TypeParamData(data) if data.provenance == TypeParamProvenance::ArgumentImplTrait ) }) .nth(idx as usize) .map_or(TyKind::Error, |(id, _)| { - TyKind::Placeholder(to_placeholder_idx(self.db, id)) + if let GenericParamId::TypeParamId(id) = id { + TyKind::Placeholder(to_placeholder_idx(self.db, id.into())) + } else { + // we just filtered them out + unreachable!("Unexpected lifetime or const argument"); + } }); param.intern(Interner) } else { @@ -367,11 +384,12 @@ impl<'a> TyLoweringContext<'a> { list_params, const_params, _impl_trait_params, + _lifetime_params, ) = if let Some(def) = self.resolver.generic_def() { let generics = generics(self.db.upcast(), def); generics.provenance_split() } else { - (0, 0, 0, 0, 0) + (0, 0, 0, 0, 0, 0) }; TyKind::BoundVar(BoundVar::new( self.in_binders, @@ -808,9 +826,16 @@ impl<'a> TyLoweringContext<'a> { return Substitution::empty(Interner); }; let def_generics = generics(self.db.upcast(), def); - let (parent_params, self_params, type_params, const_params, impl_trait_params) = - def_generics.provenance_split(); - let item_len = self_params + type_params + const_params + impl_trait_params; + let ( + parent_params, + self_params, + type_params, + const_params, + impl_trait_params, + lifetime_params, + ) = def_generics.provenance_split(); + let item_len = + self_params + type_params + const_params + impl_trait_params + lifetime_params; let total_len = parent_params + item_len; let ty_error = TyKind::Error.intern(Interner).cast(Interner); @@ -825,7 +850,10 @@ impl<'a> TyLoweringContext<'a> { .take(self_params) { if let Some(id) = def_generic_iter.next() { - assert!(id.is_left()); + assert!(matches!( + id, + GenericParamId::TypeParamId(_) | GenericParamId::LifetimeParamId(_) + )); substs.push(x); } } @@ -858,6 +886,7 @@ impl<'a> TyLoweringContext<'a> { &mut (), |_, type_ref| self.lower_ty(type_ref), |_, const_ref, ty| self.lower_const(const_ref, ty), + |_, lifetime_ref| self.lower_lifetime(lifetime_ref), ) { had_explicit_args = true; substs.push(x); @@ -867,15 +896,45 @@ impl<'a> TyLoweringContext<'a> { } } } + + for arg in generic_args + .args + .iter() + .filter(|arg| matches!(arg, GenericArg::Lifetime(_))) + .take(lifetime_params) + { + // Taking into the fact that def_generic_iter will always have lifetimes at the end + // Should have some test cases tho to test this behaviour more properly + if let Some(id) = def_generic_iter.next() { + if let Some(x) = generic_arg_to_chalk( + self.db, + id, + arg, + &mut (), + |_, type_ref| self.lower_ty(type_ref), + |_, const_ref, ty| self.lower_const(const_ref, ty), + |_, lifetime_ref| self.lower_lifetime(lifetime_ref), + ) { + had_explicit_args = true; + substs.push(x); + } else { + // Never return a None explicitly + never!("Unexpected None by generic_arg_to_chalk"); + } + } + } } else { fill_self_params(); } // These params include those of parent. let remaining_params: SmallVec<[_; 2]> = def_generic_iter - .map(|eid| match eid { - Either::Left(_) => ty_error.clone(), - Either::Right(x) => unknown_const_as_generic(self.db.const_param_ty(x)), + .map(|id| match id { + GenericParamId::ConstParamId(x) => { + unknown_const_as_generic(self.db.const_param_ty(x)) + } + GenericParamId::TypeParamId(_) => ty_error.clone(), + GenericParamId::LifetimeParamId(_) => error_lifetime().cast(Interner), }) .collect(); assert_eq!(remaining_params.len() + substs.len(), total_len); @@ -1107,8 +1166,12 @@ impl<'a> TyLoweringContext<'a> { binding.type_ref.as_ref().map_or(0, |_| 1) + binding.bounds.len(), ); if let Some(type_ref) = &binding.type_ref { - if let (TypeRef::ImplTrait(bounds), ImplTraitLoweringState::Disallowed) = - (type_ref, &self.impl_trait_mode) + if let ( + TypeRef::ImplTrait(bounds), + ImplTraitLoweringState::Param(_) + | ImplTraitLoweringState::Variable(_) + | ImplTraitLoweringState::Disallowed, + ) = (type_ref, &self.impl_trait_mode) { for bound in bounds { predicates.extend( @@ -1270,11 +1333,7 @@ impl<'a> TyLoweringContext<'a> { } } - fn lower_impl_trait( - &self, - bounds: &[Interned], - func: FunctionId, - ) -> ReturnTypeImplTrait { + fn lower_impl_trait(&self, bounds: &[Interned], krate: CrateId) -> ImplTrait { cov_mark::hit!(lower_rpit); let self_ty = TyKind::BoundVar(BoundVar::new(DebruijnIndex::INNERMOST, 0)).intern(Interner); let predicates = self.with_shifted_in(DebruijnIndex::ONE, |ctx| { @@ -1284,7 +1343,6 @@ impl<'a> TyLoweringContext<'a> { .collect(); if !ctx.unsized_types.borrow().contains(&self_ty) { - let krate = func.krate(ctx.db.upcast()); let sized_trait = ctx .db .lang_item(krate, LangItem::Sized) @@ -1301,7 +1359,34 @@ impl<'a> TyLoweringContext<'a> { } predicates }); - ReturnTypeImplTrait { bounds: crate::make_single_type_binders(predicates) } + ImplTrait { bounds: crate::make_single_type_binders(predicates) } + } + + pub fn lower_lifetime(&self, lifetime: &LifetimeRef) -> Lifetime { + match self.resolver.resolve_lifetime(lifetime) { + Some(resolution) => match resolution { + LifetimeNs::Static => static_lifetime(), + LifetimeNs::LifetimeParam(id) => match self.type_param_mode { + ParamLoweringMode::Placeholder => { + LifetimeData::Placeholder(lt_to_placeholder_idx(self.db, id)) + } + ParamLoweringMode::Variable => { + let generics = generics( + self.db.upcast(), + self.resolver.generic_def().expect("generics in scope"), + ); + let idx = match generics.lifetime_idx(id) { + None => return error_lifetime(), + Some(idx) => idx, + }; + + LifetimeData::BoundVar(BoundVar::new(self.in_binders, idx)) + } + } + .intern(Interner), + }, + None => error_lifetime(), + } } } @@ -1685,7 +1770,7 @@ pub(crate) fn generic_defaults_query( let defaults = Arc::from_iter(generic_params.iter().enumerate().map(|(idx, (id, p))| { match p { - TypeOrConstParamData::TypeParamData(p) => { + GenericParamDataRef::TypeParamData(p) => { let mut ty = p.default.as_ref().map_or(TyKind::Error.intern(Interner), |t| ctx.lower_ty(t)); // Each default can only refer to previous parameters. @@ -1694,13 +1779,13 @@ pub(crate) fn generic_defaults_query( ty = fallback_bound_vars(ty, idx, parent_start_idx); crate::make_binders(db, &generic_params, ty.cast(Interner)) } - TypeOrConstParamData::ConstParamData(p) => { + GenericParamDataRef::ConstParamData(p) => { + let GenericParamId::ConstParamId(id) = id else { + unreachable!("Unexpected lifetime or type argument") + }; + let mut val = p.default.as_ref().map_or_else( - || { - unknown_const_as_generic( - db.const_param_ty(ConstParamId::from_unchecked(id)), - ) - }, + || unknown_const_as_generic(db.const_param_ty(id)), |c| { let c = ctx.lower_const(c, ctx.lower_ty(&p.ty)); c.cast(Interner) @@ -1710,6 +1795,10 @@ pub(crate) fn generic_defaults_query( val = fallback_bound_vars(val, idx, parent_start_idx); make_binders(db, &generic_params, val) } + GenericParamDataRef::LifetimeParamData(_) => { + // using static because it requires defaults + make_binders(db, &generic_params, static_lifetime().cast(Interner)) + } } })); @@ -1726,8 +1815,9 @@ pub(crate) fn generic_defaults_recover( // we still need one default per parameter let defaults = Arc::from_iter(generic_params.iter_id().map(|id| { let val = match id { - Either::Left(_) => TyKind::Error.intern(Interner).cast(Interner), - Either::Right(id) => unknown_const_as_generic(db.const_param_ty(id)), + GenericParamId::TypeParamId(_) => TyKind::Error.intern(Interner).cast(Interner), + GenericParamId::ConstParamId(id) => unknown_const_as_generic(db.const_param_ty(id)), + GenericParamId::LifetimeParamId(_) => static_lifetime().cast(Interner), }; crate::make_binders(db, &generic_params, val) })); @@ -1869,6 +1959,7 @@ fn type_for_type_alias(db: &dyn HirDatabase, t: TypeAliasId) -> Binders { let generics = generics(db.upcast(), t.into()); let resolver = t.resolver(db.upcast()); let ctx = TyLoweringContext::new(db, &resolver, t.into()) + .with_impl_trait_mode(ImplTraitLoweringMode::Opaque) .with_type_param_mode(ParamLoweringMode::Variable); let type_alias_data = db.type_alias_data(t); if type_alias_data.is_extern { @@ -2029,7 +2120,7 @@ pub(crate) fn impl_trait_query(db: &dyn HirDatabase, impl_id: ImplId) -> Option< pub(crate) fn return_type_impl_traits( db: &dyn HirDatabase, def: hir_def::FunctionId, -) -> Option>> { +) -> Option>> { // FIXME unify with fn_sig_for_fn instead of doing lowering twice, maybe let data = db.function_data(def); let resolver = def.resolver(db.upcast()); @@ -2038,7 +2129,7 @@ pub(crate) fn return_type_impl_traits( .with_type_param_mode(ParamLoweringMode::Variable); let _ret = ctx_ret.lower_ty(&data.ret_type); let generics = generics(db.upcast(), def.into()); - let return_type_impl_traits = ReturnTypeImplTraits { + let return_type_impl_traits = ImplTraits { impl_traits: match ctx_ret.impl_trait_mode { ImplTraitLoweringState::Opaque(x) => x.into_inner(), _ => unreachable!(), @@ -2051,6 +2142,32 @@ pub(crate) fn return_type_impl_traits( } } +pub(crate) fn type_alias_impl_traits( + db: &dyn HirDatabase, + def: hir_def::TypeAliasId, +) -> Option>> { + let data = db.type_alias_data(def); + let resolver = def.resolver(db.upcast()); + let ctx = TyLoweringContext::new(db, &resolver, def.into()) + .with_impl_trait_mode(ImplTraitLoweringMode::Opaque) + .with_type_param_mode(ParamLoweringMode::Variable); + if let Some(type_ref) = &data.type_ref { + let _ty = ctx.lower_ty(type_ref); + } + let generics = generics(db.upcast(), def.into()); + let type_alias_impl_traits = ImplTraits { + impl_traits: match ctx.impl_trait_mode { + ImplTraitLoweringState::Opaque(x) => x.into_inner(), + _ => unreachable!(), + }, + }; + if type_alias_impl_traits.impl_traits.is_empty() { + None + } else { + Some(Arc::new(make_binders(db, &generics, type_alias_impl_traits))) + } +} + pub(crate) fn lower_to_chalk_mutability(m: hir_def::type_ref::Mutability) -> Mutability { match m { hir_def::type_ref::Mutability::Shared => Mutability::Not, @@ -2064,23 +2181,29 @@ pub(crate) fn lower_to_chalk_mutability(m: hir_def::type_ref::Mutability) -> Mut /// Returns `Some` of the lowered generic arg. `None` if the provided arg is a lifetime. pub(crate) fn generic_arg_to_chalk<'a, T>( db: &dyn HirDatabase, - kind_id: Either, + kind_id: GenericParamId, arg: &'a GenericArg, this: &mut T, for_type: impl FnOnce(&mut T, &TypeRef) -> Ty + 'a, for_const: impl FnOnce(&mut T, &ConstRef, Ty) -> Const + 'a, + for_lifetime: impl FnOnce(&mut T, &LifetimeRef) -> Lifetime + 'a, ) -> Option { let kind = match kind_id { - Either::Left(_) => ParamKind::Type, - Either::Right(id) => { + GenericParamId::TypeParamId(_) => ParamKind::Type, + GenericParamId::ConstParamId(id) => { let ty = db.const_param_ty(id); ParamKind::Const(ty) } + GenericParamId::LifetimeParamId(_) => ParamKind::Lifetime, }; Some(match (arg, kind) { (GenericArg::Type(type_ref), ParamKind::Type) => for_type(this, type_ref).cast(Interner), (GenericArg::Const(c), ParamKind::Const(c_ty)) => for_const(this, c, c_ty).cast(Interner), + (GenericArg::Lifetime(lifetime_ref), ParamKind::Lifetime) => { + for_lifetime(this, lifetime_ref).cast(Interner) + } (GenericArg::Const(_), ParamKind::Type) => TyKind::Error.intern(Interner).cast(Interner), + (GenericArg::Lifetime(_), ParamKind::Type) => TyKind::Error.intern(Interner).cast(Interner), (GenericArg::Type(t), ParamKind::Const(c_ty)) => { // We want to recover simple idents, which parser detects them // as types. Maybe here is not the best place to do it, but @@ -2096,7 +2219,9 @@ pub(crate) fn generic_arg_to_chalk<'a, T>( } unknown_const_as_generic(c_ty) } - (GenericArg::Lifetime(_), _) => return None, + (GenericArg::Lifetime(_), ParamKind::Const(c_ty)) => unknown_const_as_generic(c_ty), + (GenericArg::Type(_), ParamKind::Lifetime) => error_lifetime().cast(Interner), + (GenericArg::Const(_), ParamKind::Lifetime) => error_lifetime().cast(Interner), }) } diff --git a/crates/hir-ty/src/mapping.rs b/crates/hir-ty/src/mapping.rs index fba760974f24b..c61d82771429a 100644 --- a/crates/hir-ty/src/mapping.rs +++ b/crates/hir-ty/src/mapping.rs @@ -151,6 +151,14 @@ pub fn lt_from_placeholder_idx(db: &dyn HirDatabase, idx: PlaceholderIndex) -> L db.lookup_intern_lifetime_param_id(interned_id) } +pub fn lt_to_placeholder_idx(db: &dyn HirDatabase, id: LifetimeParamId) -> PlaceholderIndex { + let interned_id = db.intern_lifetime_param_id(id); + PlaceholderIndex { + ui: chalk_ir::UniverseIndex::ROOT, + idx: salsa::InternKey::as_intern_id(&interned_id).as_usize(), + } +} + pub fn to_chalk_trait_id(id: TraitId) -> ChalkTraitId { chalk_ir::TraitId(salsa::InternKey::as_intern_id(&id)) } diff --git a/crates/hir-ty/src/method_resolution.rs b/crates/hir-ty/src/method_resolution.rs index a679a114b4b93..73b07df56f7ea 100644 --- a/crates/hir-ty/src/method_resolution.rs +++ b/crates/hir-ty/src/method_resolution.rs @@ -4,7 +4,7 @@ //! and the corresponding code mostly in rustc_hir_analysis/check/method/probe.rs. use std::ops::ControlFlow; -use base_db::{CrateId, Edition}; +use base_db::CrateId; use chalk_ir::{cast::Cast, Mutability, TyKind, UniverseIndex, WhereClause}; use hir_def::{ data::{adt::StructFlags, ImplData}, @@ -15,6 +15,7 @@ use hir_def::{ use hir_expand::name::Name; use rustc_hash::{FxHashMap, FxHashSet}; use smallvec::{smallvec, SmallVec}; +use span::Edition; use stdx::never; use triomphe::Arc; @@ -643,7 +644,7 @@ pub fn is_dyn_method( let ItemContainerId::TraitId(trait_id) = func.lookup(db.upcast()).container else { return None; }; - let trait_params = db.generic_params(trait_id.into()).type_or_consts.len(); + let trait_params = db.generic_params(trait_id.into()).len(); let fn_params = fn_subst.len(Interner) - trait_params; let trait_ref = TraitRef { trait_id: to_chalk_trait_id(trait_id), @@ -685,7 +686,7 @@ pub(crate) fn lookup_impl_method_query( let ItemContainerId::TraitId(trait_id) = func.lookup(db.upcast()).container else { return (func, fn_subst); }; - let trait_params = db.generic_params(trait_id.into()).type_or_consts.len(); + let trait_params = db.generic_params(trait_id.into()).len(); let fn_params = fn_subst.len(Interner) - trait_params; let trait_ref = TraitRef { trait_id: to_chalk_trait_id(trait_id), @@ -966,7 +967,7 @@ pub fn iterate_method_candidates_dyn( // the methods by autoderef order of *receiver types*, not *self // types*. - let mut table = InferenceTable::new(db, env.clone()); + let mut table = InferenceTable::new(db, env); let ty = table.instantiate_canonical(ty.clone()); let deref_chain = autoderef_method_receiver(&mut table, ty); @@ -1044,7 +1045,7 @@ fn iterate_method_candidates_with_autoref( let ref_muted = Canonical { value: TyKind::Ref(Mutability::Mut, static_lifetime(), receiver_ty.value.clone()) .intern(Interner), - binders: receiver_ty.binders.clone(), + binders: receiver_ty.binders, }; iterate_method_candidates_by_receiver(ref_muted, first_adjustment.with_autoref(Mutability::Mut)) @@ -1060,7 +1061,7 @@ fn iterate_method_candidates_by_receiver( name: Option<&Name>, mut callback: &mut dyn FnMut(ReceiverAdjustments, AssocItemId, bool) -> ControlFlow<()>, ) -> ControlFlow<()> { - let receiver_ty = table.instantiate_canonical(receiver_ty.clone()); + let receiver_ty = table.instantiate_canonical(receiver_ty); // We're looking for methods with *receiver* type receiver_ty. These could // be found in any of the derefs of receiver_ty, so we have to go through // that, including raw derefs. @@ -1456,7 +1457,7 @@ fn is_valid_trait_method_candidate( if let Some(receiver_ty) = receiver_ty { check_that!(data.has_self_param()); - let fn_subst = TyBuilder::subst_for_def(db, fn_id, Some(impl_subst.clone())) + let fn_subst = TyBuilder::subst_for_def(db, fn_id, Some(impl_subst)) .fill_with_inference_vars(table) .build(); diff --git a/crates/hir-ty/src/mir/eval.rs b/crates/hir-ty/src/mir/eval.rs index fd98141af63e6..045ffb418c8de 100644 --- a/crates/hir-ty/src/mir/eval.rs +++ b/crates/hir-ty/src/mir/eval.rs @@ -1931,7 +1931,11 @@ impl Evaluator<'_> { ty: &Ty, locals: &Locals, mm: &mut ComplexMemoryMap, + stack_depth_limit: usize, ) -> Result<()> { + if stack_depth_limit.checked_sub(1).is_none() { + return Err(MirEvalError::StackOverflow); + } match ty.kind(Interner) { TyKind::Ref(_, _, t) => { let size = this.size_align_of(t, locals)?; @@ -1970,7 +1974,14 @@ impl Evaluator<'_> { if let Some(ty) = check_inner { for i in 0..count { let offset = element_size * i; - rec(this, &b[offset..offset + element_size], ty, locals, mm)?; + rec( + this, + &b[offset..offset + element_size], + ty, + locals, + mm, + stack_depth_limit - 1, + )?; } } } @@ -1984,7 +1995,14 @@ impl Evaluator<'_> { let size = this.size_of_sized(inner, locals, "inner of array")?; for i in 0..len { let offset = i * size; - rec(this, &bytes[offset..offset + size], inner, locals, mm)?; + rec( + this, + &bytes[offset..offset + size], + inner, + locals, + mm, + stack_depth_limit - 1, + )?; } } chalk_ir::TyKind::Tuple(_, subst) => { @@ -1993,7 +2011,14 @@ impl Evaluator<'_> { let ty = ty.assert_ty_ref(Interner); // Tuple only has type argument let offset = layout.fields.offset(id).bytes_usize(); let size = this.layout(ty)?.size.bytes_usize(); - rec(this, &bytes[offset..offset + size], ty, locals, mm)?; + rec( + this, + &bytes[offset..offset + size], + ty, + locals, + mm, + stack_depth_limit - 1, + )?; } } chalk_ir::TyKind::Adt(adt, subst) => match adt.0 { @@ -2008,7 +2033,14 @@ impl Evaluator<'_> { .bytes_usize(); let ty = &field_types[f].clone().substitute(Interner, subst); let size = this.layout(ty)?.size.bytes_usize(); - rec(this, &bytes[offset..offset + size], ty, locals, mm)?; + rec( + this, + &bytes[offset..offset + size], + ty, + locals, + mm, + stack_depth_limit - 1, + )?; } } AdtId::EnumId(e) => { @@ -2027,7 +2059,14 @@ impl Evaluator<'_> { l.fields.offset(u32::from(f.into_raw()) as usize).bytes_usize(); let ty = &field_types[f].clone().substitute(Interner, subst); let size = this.layout(ty)?.size.bytes_usize(); - rec(this, &bytes[offset..offset + size], ty, locals, mm)?; + rec( + this, + &bytes[offset..offset + size], + ty, + locals, + mm, + stack_depth_limit - 1, + )?; } } } @@ -2038,7 +2077,7 @@ impl Evaluator<'_> { Ok(()) } let mut mm = ComplexMemoryMap::default(); - rec(self, bytes, ty, locals, &mut mm)?; + rec(self, bytes, ty, locals, &mut mm, self.stack_depth_limit - 1)?; Ok(mm) } @@ -2317,7 +2356,7 @@ impl Evaluator<'_> { fn exec_fn_with_args( &mut self, - def: FunctionId, + mut def: FunctionId, args: &[IntervalAndTy], generic_args: Substitution, locals: &Locals, @@ -2335,6 +2374,9 @@ impl Evaluator<'_> { )? { return Ok(None); } + if let Some(redirect_def) = self.detect_and_redirect_special_function(def)? { + def = redirect_def; + } let arg_bytes = args.iter().map(|it| IntervalOrOwned::Borrowed(it.interval)); match self.get_mir_or_dyn_index(def, generic_args.clone(), locals, span)? { MirOrDynIndex::Dyn(self_ty_idx) => { diff --git a/crates/hir-ty/src/mir/eval/shim.rs b/crates/hir-ty/src/mir/eval/shim.rs index 628a1fe2d2838..d4d669182f2e3 100644 --- a/crates/hir-ty/src/mir/eval/shim.rs +++ b/crates/hir-ty/src/mir/eval/shim.rs @@ -13,7 +13,7 @@ use crate::mir::eval::{ name, pad16, static_lifetime, Address, AdtId, Arc, BuiltinType, Evaluator, FunctionId, HasModule, HirDisplay, Interned, InternedClosure, Interner, Interval, IntervalAndTy, IntervalOrOwned, ItemContainerId, LangItem, Layout, Locals, Lookup, MirEvalError, MirSpan, - ModPath, Mutability, Result, Substitution, Ty, TyBuilder, TyExt, + Mutability, Result, Substitution, Ty, TyBuilder, TyExt, }; mod simd; @@ -158,6 +158,25 @@ impl Evaluator<'_> { Ok(false) } + pub(super) fn detect_and_redirect_special_function( + &mut self, + def: FunctionId, + ) -> Result> { + // `PanicFmt` is redirected to `ConstPanicFmt` + if let Some(LangItem::PanicFmt) = self.db.lang_attr(def.into()) { + let resolver = + self.db.crate_def_map(self.crate_id).crate_root().resolver(self.db.upcast()); + + let Some(hir_def::lang_item::LangItemTarget::Function(const_panic_fmt)) = + self.db.lang_item(resolver.krate(), LangItem::ConstPanicFmt) + else { + not_supported!("const_panic_fmt lang item not found or not a function"); + }; + return Ok(Some(const_panic_fmt)); + } + Ok(None) + } + /// Clone has special impls for tuples and function pointers fn exec_clone( &mut self, @@ -291,9 +310,14 @@ impl Evaluator<'_> { use LangItem::*; let candidate = self.db.lang_attr(def.into())?; // We want to execute these functions with special logic - if [PanicFmt, BeginPanic, SliceLen, DropInPlace].contains(&candidate) { + // `PanicFmt` is not detected here as it's redirected later. + if [BeginPanic, SliceLen, DropInPlace].contains(&candidate) { return Some(candidate); } + if self.db.attrs(def.into()).by_key("rustc_const_panic_str").exists() { + // `#[rustc_const_panic_str]` is treated like `lang = "begin_panic"` by rustc CTFE. + return Some(LangItem::BeginPanic); + } None } @@ -309,43 +333,6 @@ impl Evaluator<'_> { let mut args = args.iter(); match it { BeginPanic => Err(MirEvalError::Panic("".to_owned())), - PanicFmt => { - let message = (|| { - let resolver = self - .db - .crate_def_map(self.crate_id) - .crate_root() - .resolver(self.db.upcast()); - let Some(format_fn) = resolver.resolve_path_in_value_ns_fully( - self.db.upcast(), - &hir_def::path::Path::from_known_path_with_no_generic( - ModPath::from_segments( - hir_expand::mod_path::PathKind::Abs, - [name![std], name![fmt], name![format]], - ), - ), - ) else { - not_supported!("std::fmt::format not found"); - }; - let hir_def::resolver::ValueNs::FunctionId(format_fn) = format_fn else { - not_supported!("std::fmt::format is not a function") - }; - let interval = self.interpret_mir( - self.db - .mir_body(format_fn.into()) - .map_err(|e| MirEvalError::MirLowerError(format_fn, e))?, - args.map(|x| IntervalOrOwned::Owned(x.clone())), - )?; - let message_string = interval.get(self)?; - let addr = - Address::from_bytes(&message_string[self.ptr_size()..2 * self.ptr_size()])?; - let size = from_bytes!(usize, message_string[2 * self.ptr_size()..]); - Ok(std::string::String::from_utf8_lossy(self.read_memory(addr, size)?) - .into_owned()) - })() - .unwrap_or_else(|e| format!("Failed to render panic format args: {e:?}")); - Err(MirEvalError::Panic(message)) - } SliceLen => { let arg = args.next().ok_or(MirEvalError::InternalError( "argument of <[T]>::len() is not provided".into(), diff --git a/crates/hir-ty/src/mir/monomorphization.rs b/crates/hir-ty/src/mir/monomorphization.rs index d2e413f0a33a4..d6557c3a8160a 100644 --- a/crates/hir-ty/src/mir/monomorphization.rs +++ b/crates/hir-ty/src/mir/monomorphization.rs @@ -82,6 +82,9 @@ impl FallibleTypeFolder for Filler<'_> { }; filler.try_fold_ty(infer.type_of_rpit[idx].clone(), outer_binder) } + crate::ImplTraitId::AssociatedTypeImplTrait(..) => { + not_supported!("associated type impl trait"); + } crate::ImplTraitId::AsyncBlockTypeImplTrait(_, _) => { not_supported!("async block impl trait"); } @@ -181,8 +184,16 @@ impl Filler<'_> { self.generics .as_ref() .and_then(|it| it.iter().nth(b.index)) - .unwrap() - .0, + .and_then(|(id, _)| match id { + hir_def::GenericParamId::ConstParamId(id) => { + Some(hir_def::TypeOrConstParamId::from(id)) + } + hir_def::GenericParamId::TypeParamId(id) => { + Some(hir_def::TypeOrConstParamId::from(id)) + } + _ => None, + }) + .unwrap(), self.subst.clone(), ) })? diff --git a/crates/hir-ty/src/tests.rs b/crates/hir-ty/src/tests.rs index d699067b5a645..2a46becbfda95 100644 --- a/crates/hir-ty/src/tests.rs +++ b/crates/hir-ty/src/tests.rs @@ -298,7 +298,7 @@ fn infer_with_mismatches(content: &str, include_mismatches: bool) -> String { if let Some(syntax_ptr) = body_source_map.self_param_syntax() { let root = db.parse_or_expand(syntax_ptr.file_id); let node = syntax_ptr.map(|ptr| ptr.to_node(&root).syntax().clone()); - types.push((node.clone(), ty)); + types.push((node, ty)); } } diff --git a/crates/hir-ty/src/tests/display_source_code.rs b/crates/hir-ty/src/tests/display_source_code.rs index e75b037e38d3e..506926749960e 100644 --- a/crates/hir-ty/src/tests/display_source_code.rs +++ b/crates/hir-ty/src/tests/display_source_code.rs @@ -85,7 +85,7 @@ fn render_dyn_for_ty() { trait Foo<'a> {} fn foo(foo: &dyn for<'a> Foo<'a>) {} - // ^^^ &dyn Foo + // ^^^ &dyn Foo<'static> "#, ); } diff --git a/crates/hir-ty/src/tests/patterns.rs b/crates/hir-ty/src/tests/patterns.rs index 963b4a2aba05f..80d5a0ae001b0 100644 --- a/crates/hir-ty/src/tests/patterns.rs +++ b/crates/hir-ty/src/tests/patterns.rs @@ -1109,7 +1109,7 @@ fn var_args() { #[lang = "va_list"] pub struct VaListImpl<'f>; fn my_fn(foo: ...) {} - //^^^ VaListImpl<'_> + //^^^ VaListImpl<'static> "#, ); } diff --git a/crates/hir-ty/src/tests/regression.rs b/crates/hir-ty/src/tests/regression.rs index 9a8ebd07d0159..8565b60210ba7 100644 --- a/crates/hir-ty/src/tests/regression.rs +++ b/crates/hir-ty/src/tests/regression.rs @@ -896,13 +896,13 @@ fn flush(&self) { "#, expect![[r#" 123..127 'self': &Mutex - 150..152 '{}': MutexGuard<'_, T> + 150..152 '{}': MutexGuard<'static, T> 234..238 'self': &{unknown} 240..290 '{ ...()); }': () 250..251 'w': &Mutex 276..287 '*(w.lock())': BufWriter 278..279 'w': &Mutex - 278..286 'w.lock()': MutexGuard<'_, BufWriter> + 278..286 'w.lock()': MutexGuard<'static, BufWriter> "#]], ); } diff --git a/crates/hir-ty/src/tests/simple.rs b/crates/hir-ty/src/tests/simple.rs index 917e9f440852d..f39404593e5d8 100644 --- a/crates/hir-ty/src/tests/simple.rs +++ b/crates/hir-ty/src/tests/simple.rs @@ -3092,7 +3092,7 @@ fn main() { 389..394 'boxed': Box> 389..406 'boxed....nner()': &i32 416..421 'good1': &i32 - 424..438 'Foo::get_inner': fn get_inner(&Box>) -> &i32 + 424..438 'Foo::get_inner': fn get_inner(&Box>) -> &i32 424..446 'Foo::g...boxed)': &i32 439..445 '&boxed': &Box> 440..445 'boxed': Box> @@ -3100,7 +3100,7 @@ fn main() { 464..469 'boxed': Box> 464..480 'boxed....self()': &Foo 490..495 'good2': &Foo - 498..511 'Foo::get_self': fn get_self(&Box>) -> &Foo + 498..511 'Foo::get_self': fn get_self(&Box>) -> &Foo 498..519 'Foo::g...boxed)': &Foo 512..518 '&boxed': &Box> 513..518 'boxed': Box> @@ -3659,7 +3659,7 @@ fn main() { let are = "are"; let count = 10; builtin#format_args("hello {count:02} {} friends, we {are:?} {0}{last}", "fancy", last = "!"); - // ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ type: Arguments<'_> + // ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ type: Arguments<'static> } "#, ); diff --git a/crates/hir-ty/src/tests/traits.rs b/crates/hir-ty/src/tests/traits.rs index b80cfe18e4cf9..759af18c98bf5 100644 --- a/crates/hir-ty/src/tests/traits.rs +++ b/crates/hir-ty/src/tests/traits.rs @@ -1278,6 +1278,40 @@ fn bar() { ); } +#[test] +fn argument_assoc_impl_trait() { + check_infer( + r#" +trait Outer { + type Item; +} + +trait Inner { } + +fn foo>(baz: T) { +} + +impl Outer for usize { + type Item = usize; +} + +impl Inner for usize {} + +fn main() { + foo(2); +} +"#, + expect![[r#" + 85..88 'baz': T + 93..96 '{ }': () + 182..197 '{ foo(2); }': () + 188..191 'foo': fn foo(usize) + 188..194 'foo(2)': () + 192..193 '2': usize + "#]], + ); +} + #[test] fn simple_return_pos_impl_trait() { cov_mark::check!(lower_rpit); @@ -4655,3 +4689,78 @@ fn f() { "#, ); } + +#[test] +fn associated_type_impl_trait() { + check_types( + r#" +trait Foo {} +struct S1; +impl Foo for S1 {} + +trait Bar { + type Item; + fn bar(&self) -> Self::Item; +} +struct S2; +impl Bar for S2 { + type Item = impl Foo; + fn bar(&self) -> Self::Item { + S1 + } +} + +fn test() { + let x = S2.bar(); + //^ impl Foo + ?Sized +} + "#, + ); +} + +#[test] +fn associated_type_impl_traits_complex() { + check_types( + r#" +struct Unary(T); +struct Binary(T, U); + +trait Foo {} +struct S1; +impl Foo for S1 {} + +trait Bar { + type Item; + fn bar(&self) -> Unary; +} +struct S2; +impl Bar for S2 { + type Item = Unary; + fn bar(&self) -> Unary<::Item> { + Unary(Unary(S1)) + } +} + +trait Baz { + type Target1; + type Target2; + fn baz(&self) -> Binary; +} +struct S3; +impl Baz for S3 { + type Target1 = impl Foo; + type Target2 = Unary; + fn baz(&self) -> Binary { + Binary(S1, Unary(S2)) + } +} + +fn test() { + let x = S3.baz(); + //^ Binary> + let y = x.1.0.bar(); + //^ Unary> +} + "#, + ); +} diff --git a/crates/hir-ty/src/utils.rs b/crates/hir-ty/src/utils.rs index 8bd57820d2cdb..afd4d1f271df2 100644 --- a/crates/hir-ty/src/utils.rs +++ b/crates/hir-ty/src/utils.rs @@ -9,18 +9,18 @@ use chalk_ir::{ fold::{FallibleTypeFolder, Shift}, BoundVar, DebruijnIndex, }; -use either::Either; use hir_def::{ db::DefDatabase, generics::{ - GenericParams, TypeOrConstParamData, TypeParamProvenance, WherePredicate, - WherePredicateTypeTarget, + GenericParamDataRef, GenericParams, LifetimeParamData, TypeOrConstParamData, + TypeParamProvenance, WherePredicate, WherePredicateTypeTarget, }, lang_item::LangItem, resolver::{HasResolver, TypeNs}, type_ref::{TraitBoundModifier, TypeRef}, - ConstParamId, EnumId, EnumVariantId, FunctionId, GenericDefId, ItemContainerId, Lookup, - OpaqueInternableThing, TraitId, TypeAliasId, TypeOrConstParamId, TypeParamId, + ConstParamId, EnumId, EnumVariantId, FunctionId, GenericDefId, GenericParamId, ItemContainerId, + LifetimeParamId, Lookup, OpaqueInternableThing, TraitId, TypeAliasId, TypeOrConstParamId, + TypeParamId, }; use hir_expand::name::Name; use intern::Interned; @@ -270,64 +270,130 @@ pub(crate) struct Generics { } impl Generics { - pub(crate) fn iter_id(&self) -> impl Iterator> + '_ { - self.iter().map(|(id, data)| match data { - TypeOrConstParamData::TypeParamData(_) => Either::Left(TypeParamId::from_unchecked(id)), - TypeOrConstParamData::ConstParamData(_) => { - Either::Right(ConstParamId::from_unchecked(id)) - } - }) + pub(crate) fn iter_id(&self) -> impl Iterator + '_ { + self.iter().map(|(id, _)| id) } /// Iterator over types and const params of self, then parent. pub(crate) fn iter<'a>( &'a self, - ) -> impl DoubleEndedIterator + 'a { - let to_toc_id = |it: &'a Generics| { - move |(local_id, p)| (TypeOrConstParamId { parent: it.def, local_id }, p) + ) -> impl DoubleEndedIterator)> + 'a { + let from_toc_id = |it: &'a Generics| { + move |(local_id, p): (_, &'a TypeOrConstParamData)| { + let id = TypeOrConstParamId { parent: it.def, local_id }; + match p { + TypeOrConstParamData::TypeParamData(p) => ( + GenericParamId::TypeParamId(TypeParamId::from_unchecked(id)), + GenericParamDataRef::TypeParamData(p), + ), + TypeOrConstParamData::ConstParamData(p) => ( + GenericParamId::ConstParamId(ConstParamId::from_unchecked(id)), + GenericParamDataRef::ConstParamData(p), + ), + } + } }; - self.params.iter().map(to_toc_id(self)).chain(self.iter_parent()) + + let from_lt_id = |it: &'a Generics| { + move |(local_id, p): (_, &'a LifetimeParamData)| { + ( + GenericParamId::LifetimeParamId(LifetimeParamId { parent: it.def, local_id }), + GenericParamDataRef::LifetimeParamData(p), + ) + } + }; + + let lt_iter = self.params.iter_lt().map(from_lt_id(self)); + self.params.iter().map(from_toc_id(self)).chain(lt_iter).chain(self.iter_parent()) } /// Iterate over types and const params without parent params. pub(crate) fn iter_self<'a>( &'a self, - ) -> impl DoubleEndedIterator + 'a { - let to_toc_id = |it: &'a Generics| { - move |(local_id, p)| (TypeOrConstParamId { parent: it.def, local_id }, p) + ) -> impl DoubleEndedIterator)> + 'a { + let from_toc_id = |it: &'a Generics| { + move |(local_id, p): (_, &'a TypeOrConstParamData)| { + let id = TypeOrConstParamId { parent: it.def, local_id }; + match p { + TypeOrConstParamData::TypeParamData(p) => ( + GenericParamId::TypeParamId(TypeParamId::from_unchecked(id)), + GenericParamDataRef::TypeParamData(p), + ), + TypeOrConstParamData::ConstParamData(p) => ( + GenericParamId::ConstParamId(ConstParamId::from_unchecked(id)), + GenericParamDataRef::ConstParamData(p), + ), + } + } + }; + + let from_lt_id = |it: &'a Generics| { + move |(local_id, p): (_, &'a LifetimeParamData)| { + ( + GenericParamId::LifetimeParamId(LifetimeParamId { parent: it.def, local_id }), + GenericParamDataRef::LifetimeParamData(p), + ) + } }; - self.params.iter().map(to_toc_id(self)) + + self.params.iter().map(from_toc_id(self)).chain(self.params.iter_lt().map(from_lt_id(self))) } /// Iterator over types and const params of parent. - pub(crate) fn iter_parent( - &self, - ) -> impl DoubleEndedIterator { + #[allow(clippy::needless_lifetimes)] + pub(crate) fn iter_parent<'a>( + &'a self, + ) -> impl DoubleEndedIterator)> + 'a { self.parent_generics().into_iter().flat_map(|it| { - let to_toc_id = - move |(local_id, p)| (TypeOrConstParamId { parent: it.def, local_id }, p); - it.params.iter().map(to_toc_id) + let from_toc_id = move |(local_id, p): (_, &'a TypeOrConstParamData)| { + let id = TypeOrConstParamId { parent: it.def, local_id }; + match p { + TypeOrConstParamData::TypeParamData(p) => ( + GenericParamId::TypeParamId(TypeParamId::from_unchecked(id)), + GenericParamDataRef::TypeParamData(p), + ), + TypeOrConstParamData::ConstParamData(p) => ( + GenericParamId::ConstParamId(ConstParamId::from_unchecked(id)), + GenericParamDataRef::ConstParamData(p), + ), + } + }; + + let from_lt_id = move |(local_id, p): (_, &'a LifetimeParamData)| { + ( + GenericParamId::LifetimeParamId(LifetimeParamId { parent: it.def, local_id }), + GenericParamDataRef::LifetimeParamData(p), + ) + }; + let lt_iter = it.params.iter_lt().map(from_lt_id); + it.params.iter().map(from_toc_id).chain(lt_iter) }) } /// Returns total number of generic parameters in scope, including those from parent. pub(crate) fn len(&self) -> usize { let parent = self.parent_generics().map_or(0, Generics::len); - let child = self.params.type_or_consts.len(); + let child = self.params.len(); parent + child } - /// Returns numbers of generic parameters excluding those from parent. + /// Returns numbers of generic parameters and lifetimes excluding those from parent. pub(crate) fn len_self(&self) -> usize { + self.params.len() + } + + /// Returns number of generic parameter excluding those from parent + fn len_params(&self) -> usize { self.params.type_or_consts.len() } - /// (parent total, self param, type param list, const param list, impl trait) - pub(crate) fn provenance_split(&self) -> (usize, usize, usize, usize, usize) { + /// (parent total, self param, type params, const params, impl trait list, lifetimes) + pub(crate) fn provenance_split(&self) -> (usize, usize, usize, usize, usize, usize) { let mut self_params = 0; let mut type_params = 0; let mut impl_trait_params = 0; let mut const_params = 0; + let mut lifetime_params = 0; self.params.iter().for_each(|(_, data)| match data { TypeOrConstParamData::TypeParamData(p) => match p.provenance { TypeParamProvenance::TypeParamList => type_params += 1, @@ -337,8 +403,10 @@ impl Generics { TypeOrConstParamData::ConstParamData(_) => const_params += 1, }); + self.params.iter_lt().for_each(|(_, _)| lifetime_params += 1); + let parent_len = self.parent_generics().map_or(0, Generics::len); - (parent_len, self_params, type_params, const_params, impl_trait_params) + (parent_len, self_params, type_params, const_params, impl_trait_params, lifetime_params) } pub(crate) fn param_idx(&self, param: TypeOrConstParamId) -> Option { @@ -358,6 +426,26 @@ impl Generics { } } + pub(crate) fn lifetime_idx(&self, lifetime: LifetimeParamId) -> Option { + Some(self.find_lifetime(lifetime)?.0) + } + + fn find_lifetime(&self, lifetime: LifetimeParamId) -> Option<(usize, &LifetimeParamData)> { + if lifetime.parent == self.def { + let (idx, (_local_id, data)) = self + .params + .iter_lt() + .enumerate() + .find(|(_, (idx, _))| *idx == lifetime.local_id)?; + + Some((self.len_params() + idx, data)) + } else { + self.parent_generics() + .and_then(|g| g.find_lifetime(lifetime)) + .map(|(idx, data)| (self.len_self() + idx, data)) + } + } + pub(crate) fn parent_generics(&self) -> Option<&Generics> { self.parent_generics.as_deref() } @@ -371,10 +459,15 @@ impl Generics { Substitution::from_iter( Interner, self.iter_id().enumerate().map(|(idx, id)| match id { - Either::Left(_) => BoundVar::new(debruijn, idx).to_ty(Interner).cast(Interner), - Either::Right(id) => BoundVar::new(debruijn, idx) + GenericParamId::ConstParamId(id) => BoundVar::new(debruijn, idx) .to_const(Interner, db.const_param_ty(id)) .cast(Interner), + GenericParamId::TypeParamId(_) => { + BoundVar::new(debruijn, idx).to_ty(Interner).cast(Interner) + } + GenericParamId::LifetimeParamId(_) => { + BoundVar::new(debruijn, idx).to_lifetime(Interner).cast(Interner) + } }), ) } @@ -384,12 +477,15 @@ impl Generics { Substitution::from_iter( Interner, self.iter_id().map(|id| match id { - Either::Left(id) => { + GenericParamId::TypeParamId(id) => { crate::to_placeholder_idx(db, id.into()).to_ty(Interner).cast(Interner) } - Either::Right(id) => crate::to_placeholder_idx(db, id.into()) + GenericParamId::ConstParamId(id) => crate::to_placeholder_idx(db, id.into()) .to_const(Interner, db.const_param_ty(id)) .cast(Interner), + GenericParamId::LifetimeParamId(id) => { + crate::lt_to_placeholder_idx(db, id).to_lifetime(Interner).cast(Interner) + } }), ) } diff --git a/crates/hir/src/display.rs b/crates/hir/src/display.rs index c5d44c11f2c1b..23c6b078b9696 100644 --- a/crates/hir/src/display.rs +++ b/crates/hir/src/display.rs @@ -186,18 +186,29 @@ impl HirDisplay for Struct { } StructKind::Record => { let has_where_clause = write_where_clause(def_id, f)?; - let fields = self.fields(f.db); - f.write_char(if !has_where_clause { ' ' } else { '\n' })?; - if fields.is_empty() { - f.write_str("{}")?; - } else { - f.write_str("{\n")?; - for field in self.fields(f.db) { - f.write_str(" ")?; - field.hir_fmt(f)?; - f.write_str(",\n")?; + if let Some(limit) = f.entity_limit { + let fields = self.fields(f.db); + let count = fields.len().min(limit); + f.write_char(if !has_where_clause { ' ' } else { '\n' })?; + if count == 0 { + if fields.is_empty() { + f.write_str("{}")?; + } else { + f.write_str("{ /* … */ }")?; + } + } else { + f.write_str(" {\n")?; + for field in &fields[..count] { + f.write_str(" ")?; + field.hir_fmt(f)?; + f.write_str(",\n")?; + } + + if fields.len() > count { + f.write_str(" /* … */\n")?; + } + f.write_str("}")?; } - f.write_str("}")?; } } StructKind::Unit => _ = write_where_clause(def_id, f)?, diff --git a/crates/hir/src/lib.rs b/crates/hir/src/lib.rs index b922aa8e46dbe..106056c2fc3a9 100644 --- a/crates/hir/src/lib.rs +++ b/crates/hir/src/lib.rs @@ -38,7 +38,7 @@ mod display; use std::{iter, mem::discriminant, ops::ControlFlow}; use arrayvec::ArrayVec; -use base_db::{CrateDisplayName, CrateId, CrateOrigin, Edition, FileId}; +use base_db::{CrateDisplayName, CrateId, CrateOrigin, FileId}; use either::Either; use hir_def::{ body::{BodyDiagnostic, SyntheticSyntax}, @@ -65,7 +65,7 @@ use hir_ty::{ consteval::{try_const_usize, unknown_const_as_generic, ConstExt}, db::InternedClosure, diagnostics::BodyValidationDiagnostic, - known_const_to_ast, + error_lifetime, known_const_to_ast, layout::{Layout as TyLayout, RustcEnumVariantIdx, RustcFieldIdx, TagEncoding}, method_resolution::{self, TyFingerprint}, mir::{interpret_mir, MutBorrowKind}, @@ -79,6 +79,7 @@ use hir_ty::{ use itertools::Itertools; use nameres::diagnostics::DefDiagnosticKind; use rustc_hash::FxHashSet; +use span::Edition; use stdx::{impl_from, never}; use syntax::{ ast::{self, HasAttrs as _, HasName}, @@ -971,7 +972,7 @@ fn precise_macro_call_location( MacroKind::ProcMacro, ) } - MacroCallKind::Derive { ast_id, derive_attr_index, derive_index } => { + MacroCallKind::Derive { ast_id, derive_attr_index, derive_index, .. } => { let node = ast_id.to_node(db.upcast()); // Compute the precise location of the macro name's token in the derive // list. @@ -1099,13 +1100,14 @@ impl Field { VariantDef::Union(it) => it.id.into(), VariantDef::Variant(it) => it.parent_enum(db).id.into(), }; - let mut generics = generics.map(|it| it.ty.clone()); + let mut generics = generics.map(|it| it.ty); let substs = TyBuilder::subst_for_def(db, def_id, None) .fill(|x| match x { ParamKind::Type => { generics.next().unwrap_or_else(|| TyKind::Error.intern(Interner)).cast(Interner) } ParamKind::Const(ty) => unknown_const_as_generic(ty.clone()), + ParamKind::Lifetime => error_lifetime().cast(Interner), }) .build(); let ty = db.field_types(var_id)[self.id].clone().substitute(Interner, &substs); @@ -1416,7 +1418,7 @@ impl Adt { } pub fn layout(self, db: &dyn HirDatabase) -> Result { - if db.generic_params(self.into()).iter().count() != 0 { + if !db.generic_params(self.into()).is_empty() { return Err(LayoutError::HasPlaceholder); } let krate = self.krate(db).id; @@ -1440,13 +1442,14 @@ impl Adt { /// the greatest API, FIXME find a better one. pub fn ty_with_args(self, db: &dyn HirDatabase, args: impl Iterator) -> Type { let id = AdtId::from(self); - let mut it = args.map(|t| t.ty.clone()); + let mut it = args.map(|t| t.ty); let ty = TyBuilder::def_ty(db, id.into(), None) .fill(|x| { let r = it.next().unwrap_or_else(|| TyKind::Error.intern(Interner)); match x { ParamKind::Type => r.cast(Interner), ParamKind::Const(ty) => unknown_const_as_generic(ty.clone()), + ParamKind::Lifetime => error_lifetime().cast(Interner), } }) .build(); @@ -1859,12 +1862,13 @@ impl Function { ItemContainerId::TraitId(it) => Some(it.into()), ItemContainerId::ModuleId(_) | ItemContainerId::ExternBlockId(_) => None, }; - let mut generics = generics.map(|it| it.ty.clone()); + let mut generics = generics.map(|it| it.ty); let mut filler = |x: &_| match x { ParamKind::Type => { generics.next().unwrap_or_else(|| TyKind::Error.intern(Interner)).cast(Interner) } ParamKind::Const(ty) => unknown_const_as_generic(ty.clone()), + ParamKind::Lifetime => error_lifetime().cast(Interner), }; let parent_substs = @@ -1954,7 +1958,7 @@ impl Function { ItemContainerId::TraitId(it) => Some(it.into()), ItemContainerId::ModuleId(_) | ItemContainerId::ExternBlockId(_) => None, }; - let mut generics = generics.map(|it| it.ty.clone()); + let mut generics = generics.map(|it| it.ty); let parent_substs = parent_id.map(|id| { TyBuilder::subst_for_def(db, id, None) .fill(|x| match x { @@ -1963,6 +1967,7 @@ impl Function { .unwrap_or_else(|| TyKind::Error.intern(Interner)) .cast(Interner), ParamKind::Const(ty) => unknown_const_as_generic(ty.clone()), + ParamKind::Lifetime => error_lifetime().cast(Interner), }) .build() }); @@ -2007,8 +2012,7 @@ impl Function { } let data = db.function_data(self.id); - data.name.to_smol_str() == "main" - || data.attrs.export_name().map(core::ops::Deref::deref) == Some("main") + data.name.to_smol_str() == "main" || data.attrs.export_name() == Some("main") } /// Does this function have the ignore attribute? @@ -2215,12 +2219,13 @@ impl SelfParam { } }; - let mut generics = generics.map(|it| it.ty.clone()); + let mut generics = generics.map(|it| it.ty); let mut filler = |x: &_| match x { ParamKind::Type => { generics.next().unwrap_or_else(|| TyKind::Error.intern(Interner)).cast(Interner) } ParamKind::Const(ty) => unknown_const_as_generic(ty.clone()), + ParamKind::Lifetime => error_lifetime().cast(Interner), }; let parent_substs = TyBuilder::subst_for_def(db, parent_id, None).fill(&mut filler).build(); @@ -2592,7 +2597,7 @@ impl Macro { }, MacroId::ProcMacroId(it) => match it.lookup(db.upcast()).kind { ProcMacroKind::CustomDerive => MacroKind::Derive, - ProcMacroKind::FuncLike => MacroKind::ProcMacro, + ProcMacroKind::Bang => MacroKind::ProcMacro, ProcMacroKind::Attr => MacroKind::Attr, }, } @@ -3628,16 +3633,41 @@ impl Impl { .filter(filter), ); } + + if let Some(block) = + ty.adt_id(Interner).and_then(|def| def.0.module(db.upcast()).containing_block()) + { + if let Some(inherent_impls) = db.inherent_impls_in_block(block) { + all.extend( + inherent_impls.for_self_ty(&ty).iter().cloned().map(Self::from).filter(filter), + ); + } + if let Some(trait_impls) = db.trait_impls_in_block(block) { + all.extend( + trait_impls + .for_self_ty_without_blanket_impls(fp) + .map(Self::from) + .filter(filter), + ); + } + } + all } pub fn all_for_trait(db: &dyn HirDatabase, trait_: Trait) -> Vec { - let krate = trait_.module(db).krate(); + let module = trait_.module(db); + let krate = module.krate(); let mut all = Vec::new(); for Crate { id } in krate.transitive_reverse_dependencies(db) { let impls = db.trait_impls_in_crate(id); all.extend(impls.for_trait(trait_.id).map(Self::from)) } + if let Some(block) = module.id.containing_block() { + if let Some(trait_impls) = db.trait_impls_in_block(block) { + all.extend(trait_impls.for_trait(trait_.id).map(Self::from)); + } + } all } @@ -3683,7 +3713,7 @@ impl Impl { let macro_file = src.file_id.macro_file()?; let loc = macro_file.macro_call_id.lookup(db.upcast()); let (derive_attr, derive_index) = match loc.kind { - MacroCallKind::Derive { ast_id, derive_attr_index, derive_index } => { + MacroCallKind::Derive { ast_id, derive_attr_index, derive_index, .. } => { let module_id = self.id.lookup(db.upcast()).container; ( db.crate_def_map(module_id.krate())[module_id.local_id] @@ -4114,6 +4144,7 @@ impl Type { // FIXME: this code is not covered in tests. unknown_const_as_generic(ty.clone()) } + ParamKind::Lifetime => error_lifetime().cast(Interner), } }) .build(); @@ -4144,6 +4175,7 @@ impl Type { match it { ParamKind::Type => args.next().unwrap().ty.clone().cast(Interner), ParamKind::Const(ty) => unknown_const_as_generic(ty.clone()), + ParamKind::Lifetime => error_lifetime().cast(Interner), } }) .build(); diff --git a/crates/hir/src/term_search/tactics.rs b/crates/hir/src/term_search/tactics.rs index 102e0ca4c3d7f..63b2a2506f895 100644 --- a/crates/hir/src/term_search/tactics.rs +++ b/crates/hir/src/term_search/tactics.rs @@ -177,7 +177,7 @@ pub(super) fn type_constructor<'a, DB: HirDatabase>( // Note that we need special case for 0 param constructors because of multi cartesian // product let variant_exprs: Vec = if param_exprs.is_empty() { - vec![Expr::Variant { variant, generics: generics.clone(), params: Vec::new() }] + vec![Expr::Variant { variant, generics, params: Vec::new() }] } else { param_exprs .into_iter() @@ -462,7 +462,7 @@ pub(super) fn free_function<'a, DB: HirDatabase>( /// # Impl method tactic /// -/// Attempts to to call methods on types from lookup table. +/// Attempts to call methods on types from lookup table. /// This includes both functions from direct impl blocks as well as functions from traits. /// Methods defined in impl blocks that are generic and methods that are themselves have /// generics are ignored for performance reasons. diff --git a/crates/ide-assists/src/handlers/extract_function.rs b/crates/ide-assists/src/handlers/extract_function.rs index d111005c2ec44..65ce3e822c5b5 100644 --- a/crates/ide-assists/src/handlers/extract_function.rs +++ b/crates/ide-assists/src/handlers/extract_function.rs @@ -5617,7 +5617,7 @@ fn func(i: Struct<'_, T>) { fun_name(i); } -fn $0fun_name(i: Struct<'_, T>) { +fn $0fun_name(i: Struct<'static, T>) { foo(i); } "#, diff --git a/crates/ide-assists/src/handlers/generate_delegate_methods.rs b/crates/ide-assists/src/handlers/generate_delegate_methods.rs index 38f40b8d58b4c..2150003bc1437 100644 --- a/crates/ide-assists/src/handlers/generate_delegate_methods.rs +++ b/crates/ide-assists/src/handlers/generate_delegate_methods.rs @@ -614,7 +614,7 @@ struct Foo<'a, T> { } impl<'a, T> Foo<'a, T> { - $0fn bar(self, mut b: Vec<&'a Bar<'_, T>>) -> &'a Bar<'_, T> { + $0fn bar(self, mut b: Vec<&'a Bar<'a, T>>) -> &'a Bar<'a, T> { self.field.bar(b) } } diff --git a/crates/ide-completion/src/completions/dot.rs b/crates/ide-completion/src/completions/dot.rs index 24a1f9492e226..a4f092cc498ef 100644 --- a/crates/ide-completion/src/completions/dot.rs +++ b/crates/ide-completion/src/completions/dot.rs @@ -961,11 +961,11 @@ struct Foo { field: i32 } impl Foo { fn foo(&self) { $0 } }"#, expect![[r#" fd self.field i32 + me self.foo() fn(&self) lc self &Foo sp Self Foo st Foo Foo bt u32 u32 - me self.foo() fn(&self) "#]], ); check( @@ -975,11 +975,11 @@ struct Foo(i32); impl Foo { fn foo(&mut self) { $0 } }"#, expect![[r#" fd self.0 i32 + me self.foo() fn(&mut self) lc self &mut Foo sp Self Foo st Foo Foo bt u32 u32 - me self.foo() fn(&mut self) "#]], ); } diff --git a/crates/ide-completion/src/completions/item_list/trait_impl.rs b/crates/ide-completion/src/completions/item_list/trait_impl.rs index 7946784150224..c48672e80aca1 100644 --- a/crates/ide-completion/src/completions/item_list/trait_impl.rs +++ b/crates/ide-completion/src/completions/item_list/trait_impl.rs @@ -186,11 +186,11 @@ fn add_function_impl( if func.assoc_fn_params(ctx.db).is_empty() { "" } else { ".." } ); - let completion_kind = if func.has_self_param(ctx.db) { - CompletionItemKind::Method + let completion_kind = CompletionItemKind::SymbolKind(if func.has_self_param(ctx.db) { + SymbolKind::Method } else { - CompletionItemKind::SymbolKind(SymbolKind::Function) - }; + SymbolKind::Function + }); let mut item = CompletionItem::new(completion_kind, replacement_range, label); item.lookup_by(format!("fn {}", fn_name.display(ctx.db))) diff --git a/crates/ide-completion/src/completions/keyword.rs b/crates/ide-completion/src/completions/keyword.rs index ed32a5db23e73..1322c05e30e97 100644 --- a/crates/ide-completion/src/completions/keyword.rs +++ b/crates/ide-completion/src/completions/keyword.rs @@ -75,8 +75,8 @@ impl Future for A {} fn foo(a: A) { a.$0 } "#, expect![[r#" - kw await expr.await me into_future() (as IntoFuture) fn(self) -> ::IntoFuture + kw await expr.await sn box Box::new(expr) sn call function(expr) sn dbg dbg!(expr) @@ -102,8 +102,8 @@ fn foo() { } "#, expect![[r#" - kw await expr.await me into_future() (use core::future::IntoFuture) fn(self) -> ::IntoFuture + kw await expr.await sn box Box::new(expr) sn call function(expr) sn dbg dbg!(expr) @@ -131,8 +131,8 @@ impl IntoFuture for A {} fn foo(a: A) { a.$0 } "#, expect![[r#" - kw await expr.await me into_future() (as IntoFuture) fn(self) -> ::IntoFuture + kw await expr.await sn box Box::new(expr) sn call function(expr) sn dbg dbg!(expr) diff --git a/crates/ide-completion/src/context.rs b/crates/ide-completion/src/context.rs index aa22155feffe2..995e3f482532e 100644 --- a/crates/ide-completion/src/context.rs +++ b/crates/ide-completion/src/context.rs @@ -540,7 +540,7 @@ impl CompletionContext<'_> { /// Whether the given trait is an operator trait or not. pub(crate) fn is_ops_trait(&self, trait_: hir::Trait) -> bool { match trait_.attrs(self.db).lang() { - Some(lang) => OP_TRAIT_LANG_NAMES.contains(&lang.as_str()), + Some(lang) => OP_TRAIT_LANG_NAMES.contains(&lang), None => false, } } diff --git a/crates/ide-completion/src/item.rs b/crates/ide-completion/src/item.rs index 357060817c7ba..b9a2c383bdd1b 100644 --- a/crates/ide-completion/src/item.rs +++ b/crates/ide-completion/src/item.rs @@ -342,7 +342,6 @@ pub enum CompletionItemKind { BuiltinType, InferredType, Keyword, - Method, Snippet, UnresolvedReference, Expression, @@ -369,6 +368,7 @@ impl CompletionItemKind { SymbolKind::LifetimeParam => "lt", SymbolKind::Local => "lc", SymbolKind::Macro => "ma", + SymbolKind::Method => "me", SymbolKind::ProcMacro => "pm", SymbolKind::Module => "md", SymbolKind::SelfParam => "sp", @@ -388,7 +388,6 @@ impl CompletionItemKind { CompletionItemKind::BuiltinType => "bt", CompletionItemKind::InferredType => "it", CompletionItemKind::Keyword => "kw", - CompletionItemKind::Method => "me", CompletionItemKind::Snippet => "sn", CompletionItemKind::UnresolvedReference => "??", CompletionItemKind::Expression => "ex", diff --git a/crates/ide-completion/src/render.rs b/crates/ide-completion/src/render.rs index 6d1a5a0bc5295..ca0424809edeb 100644 --- a/crates/ide-completion/src/render.rs +++ b/crates/ide-completion/src/render.rs @@ -312,7 +312,7 @@ pub(crate) fn render_expr( None => ctx.source_range(), }; - let mut item = CompletionItem::new(CompletionItemKind::Expression, source_range, label.clone()); + let mut item = CompletionItem::new(CompletionItemKind::Expression, source_range, label); let snippet = format!( "{}$0", @@ -677,10 +677,11 @@ mod tests { #[track_caller] fn check_function_relevance(ra_fixture: &str, expect: Expect) { - let actual: Vec<_> = do_completion(ra_fixture, CompletionItemKind::Method) - .into_iter() - .map(|item| (item.detail.unwrap_or_default(), item.relevance.function)) - .collect(); + let actual: Vec<_> = + do_completion(ra_fixture, CompletionItemKind::SymbolKind(SymbolKind::Method)) + .into_iter() + .map(|item| (item.detail.unwrap_or_default(), item.relevance.function)) + .collect(); expect.assert_debug_eq(&actual); } @@ -1392,7 +1393,10 @@ impl S { /// Method docs fn bar(self) { self.$0 } }"#, - &[CompletionItemKind::Method, CompletionItemKind::SymbolKind(SymbolKind::Field)], + &[ + CompletionItemKind::SymbolKind(SymbolKind::Method), + CompletionItemKind::SymbolKind(SymbolKind::Field), + ], expect![[r#" [ CompletionItem { @@ -1400,7 +1404,9 @@ impl S { source_range: 94..94, delete: 94..94, insert: "bar()$0", - kind: Method, + kind: SymbolKind( + Method, + ), lookup: "bar", detail: "fn(self)", documentation: Documentation( @@ -1520,7 +1526,7 @@ impl S { } fn foo(s: S) { s.$0 } "#, - CompletionItemKind::Method, + CompletionItemKind::SymbolKind(SymbolKind::Method), expect![[r#" [ CompletionItem { @@ -1528,7 +1534,9 @@ fn foo(s: S) { s.$0 } source_range: 81..81, delete: 81..81, insert: "the_method()$0", - kind: Method, + kind: SymbolKind( + Method, + ), lookup: "the_method", detail: "fn(&self)", relevance: CompletionRelevance { @@ -2408,7 +2416,10 @@ impl Foo { fn baz(&self) -> u32 { 0 } } fn foo(f: Foo) { let _: &u32 = f.b$0 } "#, - &[CompletionItemKind::Method, CompletionItemKind::SymbolKind(SymbolKind::Field)], + &[ + CompletionItemKind::SymbolKind(SymbolKind::Method), + CompletionItemKind::SymbolKind(SymbolKind::Field), + ], expect![[r#" [ CompletionItem { @@ -2416,7 +2427,9 @@ fn foo(f: Foo) { let _: &u32 = f.b$0 } source_range: 109..110, delete: 109..110, insert: "baz()$0", - kind: Method, + kind: SymbolKind( + Method, + ), lookup: "baz", detail: "fn(&self) -> u32", relevance: CompletionRelevance { @@ -2631,7 +2644,7 @@ fn main() { let _: bool = (9 > 2).not$0; } "#, - &[CompletionItemKind::Snippet, CompletionItemKind::Method], + &[CompletionItemKind::Snippet, CompletionItemKind::SymbolKind(SymbolKind::Method)], expect![[r#" sn not [snippet] me not() (use ops::Not) [type_could_unify+requires_import] @@ -2664,7 +2677,7 @@ fn main() { S.$0 } "#, - &[CompletionItemKind::Snippet, CompletionItemKind::Method], + &[CompletionItemKind::Snippet, CompletionItemKind::SymbolKind(SymbolKind::Method)], expect![[r#" me f() [] sn ref [] @@ -2907,7 +2920,7 @@ fn main() { } "#, &[ - CompletionItemKind::Method, + CompletionItemKind::SymbolKind(SymbolKind::Method), CompletionItemKind::SymbolKind(SymbolKind::Field), CompletionItemKind::SymbolKind(SymbolKind::Function), ], @@ -2918,7 +2931,9 @@ fn main() { source_range: 193..193, delete: 193..193, insert: "flush()$0", - kind: Method, + kind: SymbolKind( + Method, + ), lookup: "flush", detail: "fn(&self)", relevance: CompletionRelevance { @@ -2941,7 +2956,9 @@ fn main() { source_range: 193..193, delete: 193..193, insert: "write()$0", - kind: Method, + kind: SymbolKind( + Method, + ), lookup: "write", detail: "fn(&self)", relevance: CompletionRelevance { diff --git a/crates/ide-completion/src/render/function.rs b/crates/ide-completion/src/render/function.rs index cf9fe1ab30728..1634b0a920611 100644 --- a/crates/ide-completion/src/render/function.rs +++ b/crates/ide-completion/src/render/function.rs @@ -68,11 +68,11 @@ fn render( }; let has_self_param = func.self_param(db).is_some(); let mut item = CompletionItem::new( - if has_self_param { - CompletionItemKind::Method + CompletionItemKind::SymbolKind(if has_self_param { + SymbolKind::Method } else { - CompletionItemKind::SymbolKind(SymbolKind::Function) - }, + SymbolKind::Function + }), ctx.source_range(), call.clone(), ); diff --git a/crates/ide-completion/src/tests/expression.rs b/crates/ide-completion/src/tests/expression.rs index 7749fac40b9dc..a653314233d39 100644 --- a/crates/ide-completion/src/tests/expression.rs +++ b/crates/ide-completion/src/tests/expression.rs @@ -127,6 +127,7 @@ impl Unit { en Enum Enum fn function() fn() fn local_func() fn() + me self.foo() fn(self) lc self Unit ma makro!(…) macro_rules! makro md module @@ -166,7 +167,6 @@ impl Unit { kw use kw while kw while let - me self.foo() fn(self) sn macro_rules sn pd sn ppd diff --git a/crates/ide-completion/src/tests/predicate.rs b/crates/ide-completion/src/tests/predicate.rs index 46a3e97d3e92d..3718dff56e88f 100644 --- a/crates/ide-completion/src/tests/predicate.rs +++ b/crates/ide-completion/src/tests/predicate.rs @@ -19,7 +19,7 @@ struct Foo<'lt, T, const C: usize> where $0 {} en Enum Enum ma makro!(…) macro_rules! makro md module - st Foo<…> Foo<'_, {unknown}, _> + st Foo<…> Foo<'static, {unknown}, _> st Record Record st Tuple Tuple st Unit Unit @@ -92,7 +92,7 @@ struct Foo<'lt, T, const C: usize> where for<'a> $0 {} en Enum Enum ma makro!(…) macro_rules! makro md module - st Foo<…> Foo<'_, {unknown}, _> + st Foo<…> Foo<'static, {unknown}, _> st Record Record st Tuple Tuple st Unit Unit diff --git a/crates/ide-completion/src/tests/special.rs b/crates/ide-completion/src/tests/special.rs index ff32eccfbff4c..69d8fe91040b0 100644 --- a/crates/ide-completion/src/tests/special.rs +++ b/crates/ide-completion/src/tests/special.rs @@ -1,6 +1,7 @@ //! Tests that don't fit into a specific category. use expect_test::{expect, Expect}; +use ide_db::SymbolKind; use crate::{ tests::{ @@ -316,15 +317,15 @@ trait Sub: Super { fn foo() { T::$0 } "#, expect![[r#" - ct C2 (as Sub) const C2: () - ct CONST (as Super) const CONST: u8 - fn func() (as Super) fn() - fn subfunc() (as Sub) fn() - ta SubTy (as Sub) type SubTy - ta Ty (as Super) type Ty - me method(…) (as Super) fn(&self) - me submethod(…) (as Sub) fn(&self) - "#]], + ct C2 (as Sub) const C2: () + ct CONST (as Super) const CONST: u8 + fn func() (as Super) fn() + fn subfunc() (as Sub) fn() + me method(…) (as Super) fn(&self) + me submethod(…) (as Sub) fn(&self) + ta SubTy (as Sub) type SubTy + ta Ty (as Super) type Ty + "#]], ); } @@ -356,15 +357,15 @@ impl Sub for Wrap { } "#, expect![[r#" - ct C2 (as Sub) const C2: () - ct CONST (as Super) const CONST: u8 - fn func() (as Super) fn() - fn subfunc() (as Sub) fn() - ta SubTy (as Sub) type SubTy - ta Ty (as Super) type Ty - me method(…) (as Super) fn(&self) - me submethod(…) (as Sub) fn(&self) - "#]], + ct C2 (as Sub) const C2: () + ct CONST (as Super) const CONST: u8 + fn func() (as Super) fn() + fn subfunc() (as Sub) fn() + me method(…) (as Super) fn(&self) + me submethod(…) (as Sub) fn(&self) + ta SubTy (as Sub) type SubTy + ta Ty (as Super) type Ty + "#]], ); } @@ -555,10 +556,10 @@ impl Foo { } "#, expect![[r#" - ev Bar Bar - ev Baz Baz - me foo(…) fn(self) - "#]], + me foo(…) fn(self) + ev Bar Bar + ev Baz Baz + "#]], ); } @@ -1399,7 +1400,7 @@ fn main() { bar.b$0 } "#, - CompletionItemKind::Method, + CompletionItemKind::SymbolKind(SymbolKind::Method), expect!("const fn(&'foo mut self, &Foo) -> !"), expect!("pub const fn baz<'foo>(&'foo mut self, x: &'foo Foo) -> !"), ); diff --git a/crates/ide-completion/src/tests/type_pos.rs b/crates/ide-completion/src/tests/type_pos.rs index db4ac9381cedb..97709728656da 100644 --- a/crates/ide-completion/src/tests/type_pos.rs +++ b/crates/ide-completion/src/tests/type_pos.rs @@ -20,8 +20,8 @@ struct Foo<'lt, T, const C: usize> { en Enum Enum ma makro!(…) macro_rules! makro md module - sp Self Foo<'_, {unknown}, _> - st Foo<…> Foo<'_, {unknown}, _> + sp Self Foo<'static, {unknown}, _> + st Foo<…> Foo<'static, {unknown}, _> st Record Record st Tuple Tuple st Unit Unit @@ -45,8 +45,8 @@ struct Foo<'lt, T, const C: usize>(f$0); en Enum Enum ma makro!(…) macro_rules! makro md module - sp Self Foo<'_, {unknown}, _> - st Foo<…> Foo<'_, {unknown}, _> + sp Self Foo<'static, {unknown}, _> + st Foo<…> Foo<'static, {unknown}, _> st Record Record st Tuple Tuple st Unit Unit diff --git a/crates/ide-db/src/lib.rs b/crates/ide-db/src/lib.rs index 0d5a93f7b8e50..357209ceb0b4b 100644 --- a/crates/ide-db/src/lib.rs +++ b/crates/ide-db/src/lib.rs @@ -346,6 +346,7 @@ pub enum SymbolKind { Enum, Field, Function, + Method, Impl, Label, LifetimeParam, diff --git a/crates/ide-diagnostics/Cargo.toml b/crates/ide-diagnostics/Cargo.toml index 8ccea99e9e13d..edd05009332e5 100644 --- a/crates/ide-diagnostics/Cargo.toml +++ b/crates/ide-diagnostics/Cargo.toml @@ -26,6 +26,7 @@ text-edit.workspace = true cfg.workspace = true hir.workspace = true ide-db.workspace = true +paths.workspace = true [dev-dependencies] expect-test = "1.4.0" diff --git a/crates/ide-diagnostics/src/handlers/missing_match_arms.rs b/crates/ide-diagnostics/src/handlers/missing_match_arms.rs index 67daa172b27e5..045154614f80f 100644 --- a/crates/ide-diagnostics/src/handlers/missing_match_arms.rs +++ b/crates/ide-diagnostics/src/handlers/missing_match_arms.rs @@ -23,6 +23,7 @@ mod tests { }, DiagnosticsConfig, }; + use test_utils::skip_slow_tests; #[track_caller] fn check_diagnostics_no_bails(ra_fixture: &str) { @@ -1004,6 +1005,32 @@ fn f() { ); } + #[test] + fn exponential_match() { + if skip_slow_tests() { + return; + } + // Constructs a match where match checking takes exponential time. Ensures we bail early. + use std::fmt::Write; + let struct_arity = 50; + let mut code = String::new(); + write!(code, "struct BigStruct {{").unwrap(); + for i in 0..struct_arity { + write!(code, " field{i}: bool,").unwrap(); + } + write!(code, "}}").unwrap(); + write!(code, "fn big_match(s: BigStruct) {{").unwrap(); + write!(code, " match s {{").unwrap(); + for i in 0..struct_arity { + write!(code, " BigStruct {{ field{i}: true, ..}} => {{}},").unwrap(); + write!(code, " BigStruct {{ field{i}: false, ..}} => {{}},").unwrap(); + } + write!(code, " _ => {{}},").unwrap(); + write!(code, " }}").unwrap(); + write!(code, "}}").unwrap(); + check_diagnostics_no_bails(&code); + } + mod rust_unstable { use super::*; diff --git a/crates/ide-diagnostics/src/handlers/mutability_errors.rs b/crates/ide-diagnostics/src/handlers/mutability_errors.rs index 34a0038295fd9..00352266ddbdc 100644 --- a/crates/ide-diagnostics/src/handlers/mutability_errors.rs +++ b/crates/ide-diagnostics/src/handlers/mutability_errors.rs @@ -7,7 +7,11 @@ use crate::{fix, Diagnostic, DiagnosticCode, DiagnosticsContext}; // Diagnostic: need-mut // // This diagnostic is triggered on mutating an immutable variable. -pub(crate) fn need_mut(ctx: &DiagnosticsContext<'_>, d: &hir::NeedMut) -> Diagnostic { +pub(crate) fn need_mut(ctx: &DiagnosticsContext<'_>, d: &hir::NeedMut) -> Option { + if d.span.file_id.macro_file().is_some() { + // FIXME: Our infra can't handle allow from within macro expansions rn + return None; + } let fixes = (|| { if d.local.is_ref(ctx.sema.db) { // There is no simple way to add `mut` to `ref x` and `ref mut x` @@ -29,24 +33,30 @@ pub(crate) fn need_mut(ctx: &DiagnosticsContext<'_>, d: &hir::NeedMut) -> Diagno use_range, )]) })(); - Diagnostic::new_with_syntax_node_ptr( - ctx, - // FIXME: `E0384` is not the only error that this diagnostic handles - DiagnosticCode::RustcHardError("E0384"), - format!( - "cannot mutate immutable variable `{}`", - d.local.name(ctx.sema.db).display(ctx.sema.db) - ), - d.span, + Some( + Diagnostic::new_with_syntax_node_ptr( + ctx, + // FIXME: `E0384` is not the only error that this diagnostic handles + DiagnosticCode::RustcHardError("E0384"), + format!( + "cannot mutate immutable variable `{}`", + d.local.name(ctx.sema.db).display(ctx.sema.db) + ), + d.span, + ) + .with_fixes(fixes), ) - .with_fixes(fixes) } // Diagnostic: unused-mut // // This diagnostic is triggered when a mutable variable isn't actually mutated. -pub(crate) fn unused_mut(ctx: &DiagnosticsContext<'_>, d: &hir::UnusedMut) -> Diagnostic { +pub(crate) fn unused_mut(ctx: &DiagnosticsContext<'_>, d: &hir::UnusedMut) -> Option { let ast = d.local.primary_source(ctx.sema.db).syntax_ptr(); + if ast.file_id.macro_file().is_some() { + // FIXME: Our infra can't handle allow from within macro expansions rn + return None; + } let fixes = (|| { let file_id = ast.file_id.file_id()?; let mut edit_builder = TextEdit::builder(); @@ -70,14 +80,16 @@ pub(crate) fn unused_mut(ctx: &DiagnosticsContext<'_>, d: &hir::UnusedMut) -> Di )]) })(); let ast = d.local.primary_source(ctx.sema.db).syntax_ptr(); - Diagnostic::new_with_syntax_node_ptr( - ctx, - DiagnosticCode::RustcLint("unused_mut"), - "variable does not need to be mutable", - ast, + Some( + Diagnostic::new_with_syntax_node_ptr( + ctx, + DiagnosticCode::RustcLint("unused_mut"), + "variable does not need to be mutable", + ast, + ) + .experimental() // Not supporting `#[allow(unused_mut)]` in proc macros leads to false positive. + .with_fixes(fixes), ) - .experimental() // Not supporting `#[allow(unused_mut)]` in proc macros leads to false positive. - .with_fixes(fixes) } pub(super) fn token(parent: &SyntaxNode, kind: SyntaxKind) -> Option { diff --git a/crates/ide-diagnostics/src/handlers/remove_trailing_return.rs b/crates/ide-diagnostics/src/handlers/remove_trailing_return.rs index 7a040e46e3386..d831878044d32 100644 --- a/crates/ide-diagnostics/src/handlers/remove_trailing_return.rs +++ b/crates/ide-diagnostics/src/handlers/remove_trailing_return.rs @@ -12,7 +12,12 @@ use crate::{adjusted_display_range, fix, Diagnostic, DiagnosticCode, Diagnostics pub(crate) fn remove_trailing_return( ctx: &DiagnosticsContext<'_>, d: &RemoveTrailingReturn, -) -> Diagnostic { +) -> Option { + if d.return_expr.file_id.macro_file().is_some() { + // FIXME: Our infra can't handle allow from within macro expansions rn + return None; + } + let display_range = adjusted_display_range(ctx, d.return_expr, &|return_expr| { return_expr .syntax() @@ -20,12 +25,14 @@ pub(crate) fn remove_trailing_return( .and_then(ast::ExprStmt::cast) .map(|stmt| stmt.syntax().text_range()) }); - Diagnostic::new( - DiagnosticCode::Clippy("needless_return"), - "replace return ; with ", - display_range, + Some( + Diagnostic::new( + DiagnosticCode::Clippy("needless_return"), + "replace return ; with ", + display_range, + ) + .with_fixes(fixes(ctx, d)), ) - .with_fixes(fixes(ctx, d)) } fn fixes(ctx: &DiagnosticsContext<'_>, d: &RemoveTrailingReturn) -> Option> { diff --git a/crates/ide-diagnostics/src/handlers/remove_unnecessary_else.rs b/crates/ide-diagnostics/src/handlers/remove_unnecessary_else.rs index f68e698238562..448df1ca163a6 100644 --- a/crates/ide-diagnostics/src/handlers/remove_unnecessary_else.rs +++ b/crates/ide-diagnostics/src/handlers/remove_unnecessary_else.rs @@ -21,23 +21,30 @@ use crate::{ pub(crate) fn remove_unnecessary_else( ctx: &DiagnosticsContext<'_>, d: &RemoveUnnecessaryElse, -) -> Diagnostic { +) -> Option { + if d.if_expr.file_id.macro_file().is_some() { + // FIXME: Our infra can't handle allow from within macro expansions rn + return None; + } + let display_range = adjusted_display_range(ctx, d.if_expr, &|if_expr| { if_expr.else_token().as_ref().map(SyntaxToken::text_range) }); - Diagnostic::new( - DiagnosticCode::Ra("remove-unnecessary-else", Severity::WeakWarning), - "remove unnecessary else block", - display_range, + Some( + Diagnostic::new( + DiagnosticCode::Ra("remove-unnecessary-else", Severity::WeakWarning), + "remove unnecessary else block", + display_range, + ) + .experimental() + .with_fixes(fixes(ctx, d)), ) - .experimental() - .with_fixes(fixes(ctx, d)) } fn fixes(ctx: &DiagnosticsContext<'_>, d: &RemoveUnnecessaryElse) -> Option> { let root = ctx.sema.db.parse_or_expand(d.if_expr.file_id); let if_expr = d.if_expr.value.to_node(&root); - let if_expr = ctx.sema.original_ast_node(if_expr.clone())?; + let if_expr = ctx.sema.original_ast_node(if_expr)?; let mut indent = IndentLevel::from_node(if_expr.syntax()); let has_parent_if_expr = if_expr.syntax().parent().and_then(ast::IfExpr::cast).is_some(); diff --git a/crates/ide-diagnostics/src/handlers/unlinked_file.rs b/crates/ide-diagnostics/src/handlers/unlinked_file.rs index becc24ab21ecb..b9327f85567cb 100644 --- a/crates/ide-diagnostics/src/handlers/unlinked_file.rs +++ b/crates/ide-diagnostics/src/handlers/unlinked_file.rs @@ -8,6 +8,7 @@ use ide_db::{ source_change::SourceChange, RootDatabase, }; +use paths::Utf8Component; use syntax::{ ast::{self, edit::IndentLevel, HasModuleItem, HasName}, AstNode, TextRange, @@ -84,10 +85,10 @@ fn fixes(ctx: &DiagnosticsContext<'_>, file_id: FileId) -> Option> { // try resolving the relative difference of the paths as inline modules let mut current = root_module; - for ele in rel.as_ref().components() { + for ele in rel.as_utf8_path().components() { let seg = match ele { - std::path::Component::Normal(seg) => seg.to_str()?, - std::path::Component::RootDir => continue, + Utf8Component::Normal(seg) => seg, + Utf8Component::RootDir => continue, // shouldn't occur _ => continue 'crates, }; diff --git a/crates/ide-diagnostics/src/handlers/unused_variables.rs b/crates/ide-diagnostics/src/handlers/unused_variables.rs index a9e1d07d7c52d..cd251faab9ad5 100644 --- a/crates/ide-diagnostics/src/handlers/unused_variables.rs +++ b/crates/ide-diagnostics/src/handlers/unused_variables.rs @@ -14,18 +14,24 @@ use crate::{Diagnostic, DiagnosticCode, DiagnosticsContext}; pub(crate) fn unused_variables( ctx: &DiagnosticsContext<'_>, d: &hir::UnusedVariable, -) -> Diagnostic { +) -> Option { let ast = d.local.primary_source(ctx.sema.db).syntax_ptr(); + if ast.file_id.macro_file().is_some() { + // FIXME: Our infra can't handle allow from within macro expansions rn + return None; + } let diagnostic_range = ctx.sema.diagnostics_display_range(ast); let var_name = d.local.primary_source(ctx.sema.db).syntax().to_string(); - Diagnostic::new_with_syntax_node_ptr( - ctx, - DiagnosticCode::RustcLint("unused_variables"), - "unused variable", - ast, + Some( + Diagnostic::new_with_syntax_node_ptr( + ctx, + DiagnosticCode::RustcLint("unused_variables"), + "unused variable", + ast, + ) + .with_fixes(fixes(&var_name, diagnostic_range, ast.file_id.is_macro())) + .experimental(), ) - .with_fixes(fixes(&var_name, diagnostic_range, ast.file_id.is_macro())) - .experimental() } fn fixes(var_name: &String, diagnostic_range: FileRange, is_in_marco: bool) -> Option> { @@ -47,7 +53,7 @@ fn fixes(var_name: &String, diagnostic_range: FileRange, is_in_marco: bool) -> O #[cfg(test)] mod tests { - use crate::tests::{check_diagnostics, check_fix, check_no_fix}; + use crate::tests::{check_diagnostics, check_fix}; #[test] fn unused_variables_simple() { @@ -193,7 +199,7 @@ fn main() { #[test] fn no_fix_for_marco() { - check_no_fix( + check_diagnostics( r#" macro_rules! my_macro { () => { @@ -202,7 +208,7 @@ macro_rules! my_macro { } fn main() { - $0my_macro!(); + my_macro!(); } "#, ); diff --git a/crates/ide-diagnostics/src/lib.rs b/crates/ide-diagnostics/src/lib.rs index 0df6f0e0373c8..270cf844c65b9 100644 --- a/crates/ide-diagnostics/src/lib.rs +++ b/crates/ide-diagnostics/src/lib.rs @@ -330,7 +330,6 @@ pub fn diagnostics( } for diag in diags { - #[rustfmt::skip] let d = match diag { AnyDiagnostic::ExpectedFunction(d) => handlers::expected_function::expected_function(&ctx, &d), AnyDiagnostic::InactiveCode(d) => match handlers::inactive_code::inactive_code(&ctx, &d) { @@ -361,7 +360,10 @@ pub fn diagnostics( AnyDiagnostic::MissingMatchArms(d) => handlers::missing_match_arms::missing_match_arms(&ctx, &d), AnyDiagnostic::MissingUnsafe(d) => handlers::missing_unsafe::missing_unsafe(&ctx, &d), AnyDiagnostic::MovedOutOfRef(d) => handlers::moved_out_of_ref::moved_out_of_ref(&ctx, &d), - AnyDiagnostic::NeedMut(d) => handlers::mutability_errors::need_mut(&ctx, &d), + AnyDiagnostic::NeedMut(d) => match handlers::mutability_errors::need_mut(&ctx, &d) { + Some(it) => it, + None => continue, + }, AnyDiagnostic::NonExhaustiveLet(d) => handlers::non_exhaustive_let::non_exhaustive_let(&ctx, &d), AnyDiagnostic::NoSuchField(d) => handlers::no_such_field::no_such_field(&ctx, &d), AnyDiagnostic::PrivateAssocItem(d) => handlers::private_assoc_item::private_assoc_item(&ctx, &d), @@ -385,12 +387,24 @@ pub fn diagnostics( AnyDiagnostic::UnresolvedMethodCall(d) => handlers::unresolved_method::unresolved_method(&ctx, &d), AnyDiagnostic::UnresolvedModule(d) => handlers::unresolved_module::unresolved_module(&ctx, &d), AnyDiagnostic::UnresolvedProcMacro(d) => handlers::unresolved_proc_macro::unresolved_proc_macro(&ctx, &d, config.proc_macros_enabled, config.proc_attr_macros_enabled), - AnyDiagnostic::UnusedMut(d) => handlers::mutability_errors::unused_mut(&ctx, &d), - AnyDiagnostic::UnusedVariable(d) => handlers::unused_variables::unused_variables(&ctx, &d), + AnyDiagnostic::UnusedMut(d) => match handlers::mutability_errors::unused_mut(&ctx, &d) { + Some(it) => it, + None => continue, + }, + AnyDiagnostic::UnusedVariable(d) => match handlers::unused_variables::unused_variables(&ctx, &d) { + Some(it) => it, + None => continue, + }, AnyDiagnostic::BreakOutsideOfLoop(d) => handlers::break_outside_of_loop::break_outside_of_loop(&ctx, &d), AnyDiagnostic::MismatchedTupleStructPatArgCount(d) => handlers::mismatched_arg_count::mismatched_tuple_struct_pat_arg_count(&ctx, &d), - AnyDiagnostic::RemoveTrailingReturn(d) => handlers::remove_trailing_return::remove_trailing_return(&ctx, &d), - AnyDiagnostic::RemoveUnnecessaryElse(d) => handlers::remove_unnecessary_else::remove_unnecessary_else(&ctx, &d), + AnyDiagnostic::RemoveTrailingReturn(d) => match handlers::remove_trailing_return::remove_trailing_return(&ctx, &d) { + Some(it) => it, + None => continue, + }, + AnyDiagnostic::RemoveUnnecessaryElse(d) => match handlers::remove_unnecessary_else::remove_unnecessary_else(&ctx, &d) { + Some(it) => it, + None => continue, + }, }; res.push(d) } @@ -399,9 +413,9 @@ pub fn diagnostics( .iter_mut() .filter_map(|it| { Some(( - it.main_node - .map(|ptr| ptr.map(|node| node.to_node(&ctx.sema.parse_or_expand(ptr.file_id)))) - .clone()?, + it.main_node.map(|ptr| { + ptr.map(|node| node.to_node(&ctx.sema.parse_or_expand(ptr.file_id))) + })?, it, )) }) diff --git a/crates/ide-diagnostics/src/tests.rs b/crates/ide-diagnostics/src/tests.rs index dcaa212089233..bb5c2b791392d 100644 --- a/crates/ide-diagnostics/src/tests.rs +++ b/crates/ide-diagnostics/src/tests.rs @@ -283,6 +283,10 @@ fn test_disabled_diagnostics() { #[test] fn minicore_smoke_test() { + if test_utils::skip_slow_tests() { + return; + } + fn check(minicore: MiniCore) { let source = minicore.source_code(); let mut config = DiagnosticsConfig::test_sample(); diff --git a/crates/ide/Cargo.toml b/crates/ide/Cargo.toml index aca7d613e11fc..a535015a27f09 100644 --- a/crates/ide/Cargo.toml +++ b/crates/ide/Cargo.toml @@ -36,6 +36,7 @@ ide-ssr.workspace = true profile.workspace = true stdx.workspace = true syntax.workspace = true +span.workspace = true text-edit.workspace = true # ide should depend only on the top-level `hir` package. if you need # something from some `hir-xxx` subpackage, reexport the API via `hir`. diff --git a/crates/ide/src/doc_links.rs b/crates/ide/src/doc_links.rs index d10bdca50d81e..64b4ccc5bd81c 100644 --- a/crates/ide/src/doc_links.rs +++ b/crates/ide/src/doc_links.rs @@ -5,8 +5,6 @@ mod tests; mod intra_doc_links; -use std::ffi::OsStr; - use pulldown_cmark::{BrokenLink, CowStr, Event, InlineStr, LinkType, Options, Parser, Tag}; use pulldown_cmark_to_cmark::{cmark_resume_with_options, Options as CMarkOptions}; use stdx::format_to; @@ -134,8 +132,8 @@ pub(crate) fn remove_links(markdown: &str) -> String { pub(crate) fn external_docs( db: &RootDatabase, FilePosition { file_id, offset }: FilePosition, - target_dir: Option<&OsStr>, - sysroot: Option<&OsStr>, + target_dir: Option<&str>, + sysroot: Option<&str>, ) -> Option { let sema = &Semantics::new(db); let file = sema.parse(file_id).syntax().clone(); @@ -331,8 +329,8 @@ fn broken_link_clone_cb(link: BrokenLink<'_>) -> Option<(CowStr<'_>, CowStr<'_>) fn get_doc_links( db: &RootDatabase, def: Definition, - target_dir: Option<&OsStr>, - sysroot: Option<&OsStr>, + target_dir: Option<&str>, + sysroot: Option<&str>, ) -> DocumentationLinks { let join_url = |base_url: Option, path: &str| -> Option { base_url.and_then(|url| url.join(path).ok()) @@ -479,15 +477,13 @@ fn map_links<'e>( fn get_doc_base_urls( db: &RootDatabase, def: Definition, - target_dir: Option<&OsStr>, - sysroot: Option<&OsStr>, + target_dir: Option<&str>, + sysroot: Option<&str>, ) -> (Option, Option) { let local_doc = target_dir - .and_then(|path| path.to_str()) .and_then(|path| Url::parse(&format!("file:///{path}/")).ok()) .and_then(|it| it.join("doc/").ok()); let system_doc = sysroot - .and_then(|it| it.to_str()) .map(|sysroot| format!("file:///{sysroot}/share/doc/rust/html/")) .and_then(|it| Url::parse(&it).ok()); diff --git a/crates/ide/src/doc_links/tests.rs b/crates/ide/src/doc_links/tests.rs index 60e8d29a7163a..ac44908a90214 100644 --- a/crates/ide/src/doc_links/tests.rs +++ b/crates/ide/src/doc_links/tests.rs @@ -1,4 +1,4 @@ -use std::{ffi::OsStr, iter}; +use std::iter; use expect_test::{expect, Expect}; use hir::Semantics; @@ -18,10 +18,10 @@ use crate::{ fn check_external_docs( ra_fixture: &str, - target_dir: Option<&OsStr>, + target_dir: Option<&str>, expect_web_url: Option, expect_local_url: Option, - sysroot: Option<&OsStr>, + sysroot: Option<&str>, ) { let (analysis, position) = fixture::position(ra_fixture); let links = analysis.external_docs(position, target_dir, sysroot).unwrap(); @@ -127,10 +127,10 @@ fn external_docs_doc_builtin_type() { //- /main.rs crate:foo let x: u3$02 = 0; "#, - Some(OsStr::new("/home/user/project")), + Some("/home/user/project"), Some(expect![[r#"https://doc.rust-lang.org/nightly/core/primitive.u32.html"#]]), Some(expect![[r#"file:///sysroot/share/doc/rust/html/core/primitive.u32.html"#]]), - Some(OsStr::new("/sysroot")), + Some("/sysroot"), ); } @@ -143,10 +143,10 @@ use foo$0::Foo; //- /lib.rs crate:foo pub struct Foo; "#, - Some(OsStr::new("/home/user/project")), + Some("/home/user/project"), Some(expect![[r#"https://docs.rs/foo/*/foo/index.html"#]]), Some(expect![[r#"file:///home/user/project/doc/foo/index.html"#]]), - Some(OsStr::new("/sysroot")), + Some("/sysroot"), ); } @@ -157,10 +157,10 @@ fn external_docs_doc_url_std_crate() { //- /main.rs crate:std use self$0; "#, - Some(OsStr::new("/home/user/project")), + Some("/home/user/project"), Some(expect!["https://doc.rust-lang.org/stable/std/index.html"]), Some(expect!["file:///sysroot/share/doc/rust/html/std/index.html"]), - Some(OsStr::new("/sysroot")), + Some("/sysroot"), ); } @@ -171,10 +171,10 @@ fn external_docs_doc_url_struct() { //- /main.rs crate:foo pub struct Fo$0o; "#, - Some(OsStr::new("/home/user/project")), + Some("/home/user/project"), Some(expect![[r#"https://docs.rs/foo/*/foo/struct.Foo.html"#]]), Some(expect![[r#"file:///home/user/project/doc/foo/struct.Foo.html"#]]), - Some(OsStr::new("/sysroot")), + Some("/sysroot"), ); } @@ -185,10 +185,10 @@ fn external_docs_doc_url_windows_backslash_path() { //- /main.rs crate:foo pub struct Fo$0o; "#, - Some(OsStr::new(r"C:\Users\user\project")), + Some(r"C:\Users\user\project"), Some(expect![[r#"https://docs.rs/foo/*/foo/struct.Foo.html"#]]), Some(expect![[r#"file:///C:/Users/user/project/doc/foo/struct.Foo.html"#]]), - Some(OsStr::new("/sysroot")), + Some("/sysroot"), ); } @@ -199,10 +199,10 @@ fn external_docs_doc_url_windows_slash_path() { //- /main.rs crate:foo pub struct Fo$0o; "#, - Some(OsStr::new(r"C:/Users/user/project")), + Some("C:/Users/user/project"), Some(expect![[r#"https://docs.rs/foo/*/foo/struct.Foo.html"#]]), Some(expect![[r#"file:///C:/Users/user/project/doc/foo/struct.Foo.html"#]]), - Some(OsStr::new("/sysroot")), + Some("/sysroot"), ); } diff --git a/crates/ide/src/file_structure.rs b/crates/ide/src/file_structure.rs index 0e790e14205f4..813691540f92f 100644 --- a/crates/ide/src/file_structure.rs +++ b/crates/ide/src/file_structure.rs @@ -134,15 +134,22 @@ fn structure_node(node: &SyntaxNode) -> Option { if let Some(type_param_list) = it.generic_param_list() { collapse_ws(type_param_list.syntax(), &mut detail); } - if let Some(param_list) = it.param_list() { + let has_self_param = if let Some(param_list) = it.param_list() { collapse_ws(param_list.syntax(), &mut detail); - } + param_list.self_param().is_some() + } else { + false + }; if let Some(ret_type) = it.ret_type() { detail.push(' '); collapse_ws(ret_type.syntax(), &mut detail); } - decl_with_detail(&it, Some(detail), StructureNodeKind::SymbolKind(SymbolKind::Function)) + decl_with_detail(&it, Some(detail), StructureNodeKind::SymbolKind(if has_self_param { + SymbolKind::Method + } else { + SymbolKind::Function + })) }, ast::Struct(it) => decl(it, StructureNodeKind::SymbolKind(SymbolKind::Struct)), ast::Union(it) => decl(it, StructureNodeKind::SymbolKind(SymbolKind::Union)), diff --git a/crates/ide/src/goto_implementation.rs b/crates/ide/src/goto_implementation.rs index 8a12cbacccc70..76e5e9dd9288a 100644 --- a/crates/ide/src/goto_implementation.rs +++ b/crates/ide/src/goto_implementation.rs @@ -337,6 +337,77 @@ impl Tr for S { const C: usize = 4; //^ } +"#, + ); + } + + #[test] + fn goto_adt_implementation_inside_block() { + check( + r#" +//- minicore: copy, derive +trait Bar {} + +fn test() { + #[derive(Copy)] + //^^^^^^^^^^^^^^^ + struct Foo$0; + + impl Foo {} + //^^^ + + trait Baz {} + + impl Bar for Foo {} + //^^^ + + impl Baz for Foo {} + //^^^ +} +"#, + ); + } + + #[test] + fn goto_trait_implementation_inside_block() { + check( + r#" +struct Bar; + +fn test() { + trait Foo$0 {} + + struct Baz; + + impl Foo for Bar {} + //^^^ + + impl Foo for Baz {} + //^^^ +} +"#, + ); + check( + r#" +struct Bar; + +fn test() { + trait Foo { + fn foo$0() {} + } + + struct Baz; + + impl Foo for Bar { + fn foo() {} + //^^^ + } + + impl Foo for Baz { + fn foo() {} + //^^^ + } +} "#, ); } diff --git a/crates/ide/src/hover.rs b/crates/ide/src/hover.rs index 8f4c629b58130..822751c0e4ce7 100644 --- a/crates/ide/src/hover.rs +++ b/crates/ide/src/hover.rs @@ -33,6 +33,7 @@ pub struct HoverConfig { pub keywords: bool, pub format: HoverDocFormat, pub max_trait_assoc_items_count: Option, + pub max_struct_field_count: Option, } #[derive(Copy, Clone, Debug, PartialEq, Eq)] diff --git a/crates/ide/src/hover/render.rs b/crates/ide/src/hover/render.rs index 63777d4910503..abedbff831a50 100644 --- a/crates/ide/src/hover/render.rs +++ b/crates/ide/src/hover/render.rs @@ -410,6 +410,9 @@ pub(super) fn definition( Definition::Trait(trait_) => { trait_.display_limited(db, config.max_trait_assoc_items_count).to_string() } + Definition::Adt(Adt::Struct(struct_)) => { + struct_.display_limited(db, config.max_struct_field_count).to_string() + } _ => def.label(db), }; let docs = def.docs(db, famous_defs); diff --git a/crates/ide/src/hover/tests.rs b/crates/ide/src/hover/tests.rs index 4451e31870f26..08925fcdff580 100644 --- a/crates/ide/src/hover/tests.rs +++ b/crates/ide/src/hover/tests.rs @@ -18,6 +18,7 @@ const HOVER_BASE_CONFIG: HoverConfig = HoverConfig { format: HoverDocFormat::Markdown, keywords: true, max_trait_assoc_items_count: None, + max_struct_field_count: None, }; fn check_hover_no_result(ra_fixture: &str) { @@ -49,6 +50,28 @@ fn check(ra_fixture: &str, expect: Expect) { expect.assert_eq(&actual) } +#[track_caller] +fn check_hover_struct_limit(count: usize, ra_fixture: &str, expect: Expect) { + let (analysis, position) = fixture::position(ra_fixture); + let hover = analysis + .hover( + &HoverConfig { + links_in_hover: true, + max_struct_field_count: Some(count), + ..HOVER_BASE_CONFIG + }, + FileRange { file_id: position.file_id, range: TextRange::empty(position.offset) }, + ) + .unwrap() + .unwrap(); + + let content = analysis.db.file_text(position.file_id); + let hovered_element = &content[hover.range]; + + let actual = format!("*{hovered_element}*\n{}\n", hover.info.markup); + expect.assert_eq(&actual) +} + #[track_caller] fn check_assoc_count(count: usize, ra_fixture: &str, expect: Expect) { let (analysis, position) = fixture::position(ra_fixture); @@ -853,9 +876,7 @@ struct Foo$0 { field: u32 } ```rust // size = 4, align = 4 - struct Foo { - field: u32, - } + struct Foo ``` "#]], ); @@ -875,8 +896,74 @@ struct Foo$0 where u32: Copy { field: u32 } struct Foo where u32: Copy, - { - field: u32, + ``` + "#]], + ); +} + +#[test] +fn hover_record_struct_limit() { + check_hover_struct_limit( + 3, + r#" + struct Foo$0 { a: u32, b: i32, c: i32 } + "#, + expect![[r#" + *Foo* + + ```rust + test + ``` + + ```rust + // size = 12 (0xC), align = 4 + struct Foo { + a: u32, + b: i32, + c: i32, + } + ``` + "#]], + ); + check_hover_struct_limit( + 3, + r#" + struct Foo$0 { a: u32 } + "#, + expect![[r#" + *Foo* + + ```rust + test + ``` + + ```rust + // size = 4, align = 4 + struct Foo { + a: u32, + } + ``` + "#]], + ); + check_hover_struct_limit( + 3, + r#" + struct Foo$0 { a: u32, b: i32, c: i32, d: u32 } + "#, + expect![[r#" + *Foo* + + ```rust + test + ``` + + ```rust + // size = 16 (0x10), align = 4 + struct Foo { + a: u32, + b: i32, + c: i32, + /* … */ } ``` "#]], @@ -1344,9 +1431,7 @@ impl Thing { ``` ```rust - struct Thing { - x: u32, - } + struct Thing ``` "#]], ); @@ -1365,9 +1450,7 @@ impl Thing { ``` ```rust - struct Thing { - x: u32, - } + struct Thing ``` "#]], ); @@ -2599,7 +2682,7 @@ fn main() { let s$0t = S{ f1:0 }; } focus_range: 7..8, name: "S", kind: Struct, - description: "struct S {\n f1: u32,\n}", + description: "struct S", }, }, ], @@ -2645,7 +2728,7 @@ fn main() { let s$0t = S{ f1:Arg(0) }; } focus_range: 24..25, name: "S", kind: Struct, - description: "struct S {\n f1: T,\n}", + description: "struct S", }, }, ], @@ -2704,7 +2787,7 @@ fn main() { let s$0t = S{ f1: S{ f1: Arg(0) } }; } focus_range: 24..25, name: "S", kind: Struct, - description: "struct S {\n f1: T,\n}", + description: "struct S", }, }, ], @@ -2957,7 +3040,7 @@ fn main() { let s$0t = foo(); } focus_range: 39..41, name: "S1", kind: Struct, - description: "struct S1 {}", + description: "struct S1", }, }, HoverGotoTypeData { @@ -2970,7 +3053,7 @@ fn main() { let s$0t = foo(); } focus_range: 52..54, name: "S2", kind: Struct, - description: "struct S2 {}", + description: "struct S2", }, }, ], @@ -3061,7 +3144,7 @@ fn foo(ar$0g: &impl Foo + Bar) {} focus_range: 36..37, name: "S", kind: Struct, - description: "struct S {}", + description: "struct S", }, }, ], @@ -3161,7 +3244,7 @@ fn foo(ar$0g: &impl Foo) {} focus_range: 23..24, name: "S", kind: Struct, - description: "struct S {}", + description: "struct S", }, }, ], @@ -3198,7 +3281,7 @@ fn main() { let s$0t = foo(); } focus_range: 49..50, name: "B", kind: Struct, - description: "struct B {}", + description: "struct B", }, }, HoverGotoTypeData { @@ -3287,7 +3370,7 @@ fn foo(ar$0g: &dyn Foo) {} focus_range: 23..24, name: "S", kind: Struct, - description: "struct S {}", + description: "struct S", }, }, ], @@ -3322,7 +3405,7 @@ fn foo(a$0rg: &impl ImplTrait>>>) {} focus_range: 50..51, name: "B", kind: Struct, - description: "struct B {}", + description: "struct B", }, }, HoverGotoTypeData { @@ -3361,7 +3444,7 @@ fn foo(a$0rg: &impl ImplTrait>>>) {} focus_range: 65..66, name: "S", kind: Struct, - description: "struct S {}", + description: "struct S", }, }, ], @@ -5105,6 +5188,32 @@ fn foo(e: E) { ); } +#[test] +fn hover_const_value() { + check( + r#" +pub enum AA { + BB, +} +const CONST: AA = AA::BB; +pub fn the_function() -> AA { + CON$0ST +} +"#, + expect![[r#" + *CONST* + + ```rust + test + ``` + + ```rust + const CONST: AA = BB + ``` + "#]], + ); +} + #[test] fn array_repeat_exp() { check( @@ -7747,3 +7856,25 @@ impl Iterator for S { "#]], ); } + +#[test] +fn hover_lifetime_regression_16963() { + check( + r#" +struct Pedro$0<'a> { + hola: &'a str +} +"#, + expect![[r#" + *Pedro* + + ```rust + test + ``` + + ```rust + struct Pedro<'a> + ``` + "#]], + ) +} diff --git a/crates/ide/src/inlay_hints.rs b/crates/ide/src/inlay_hints.rs index 8311e770b4b41..dda38ce77e0c5 100644 --- a/crates/ide/src/inlay_hints.rs +++ b/crates/ide/src/inlay_hints.rs @@ -1,5 +1,6 @@ use std::{ fmt::{self, Write}, + hash::{BuildHasher, BuildHasherDefault}, mem::take, }; @@ -8,7 +9,7 @@ use hir::{ known, ClosureStyle, HasVisibility, HirDisplay, HirDisplayError, HirWrite, ModuleDef, ModuleDefId, Semantics, }; -use ide_db::{base_db::FileRange, famous_defs::FamousDefs, RootDatabase}; +use ide_db::{base_db::FileRange, famous_defs::FamousDefs, FxHasher, RootDatabase}; use itertools::Itertools; use smallvec::{smallvec, SmallVec}; use stdx::never; @@ -116,7 +117,7 @@ pub enum AdjustmentHintsMode { PreferPostfix, } -#[derive(Copy, Clone, Debug, PartialEq, Eq)] +#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash)] pub enum InlayKind { Adjustment, BindingMode, @@ -132,7 +133,7 @@ pub enum InlayKind { RangeExclusive, } -#[derive(Debug)] +#[derive(Debug, Hash)] pub enum InlayHintPosition { Before, After, @@ -151,13 +152,23 @@ pub struct InlayHint { pub label: InlayHintLabel, /// Text edit to apply when "accepting" this inlay hint. pub text_edit: Option, - pub needs_resolve: bool, +} + +impl std::hash::Hash for InlayHint { + fn hash(&self, state: &mut H) { + self.range.hash(state); + self.position.hash(state); + self.pad_left.hash(state); + self.pad_right.hash(state); + self.kind.hash(state); + self.label.hash(state); + self.text_edit.is_some().hash(state); + } } impl InlayHint { fn closing_paren_after(kind: InlayKind, range: TextRange) -> InlayHint { InlayHint { - needs_resolve: false, range, kind, label: InlayHintLabel::from(")"), @@ -167,9 +178,9 @@ impl InlayHint { pad_right: false, } } + fn opening_paren_before(kind: InlayKind, range: TextRange) -> InlayHint { InlayHint { - needs_resolve: false, range, kind, label: InlayHintLabel::from("("), @@ -179,15 +190,19 @@ impl InlayHint { pad_right: false, } } + + pub fn needs_resolve(&self) -> bool { + self.text_edit.is_some() || self.label.needs_resolve() + } } -#[derive(Debug)] +#[derive(Debug, Hash)] pub enum InlayTooltip { String(String), Markdown(String), } -#[derive(Default)] +#[derive(Default, Hash)] pub struct InlayHintLabel { pub parts: SmallVec<[InlayHintLabelPart; 1]>, } @@ -265,6 +280,7 @@ impl fmt::Debug for InlayHintLabel { } } +#[derive(Hash)] pub struct InlayHintLabelPart { pub text: String, /// Source location represented by this label part. The client will use this to fetch the part's @@ -313,9 +329,7 @@ impl fmt::Write for InlayHintLabelBuilder<'_> { impl HirWrite for InlayHintLabelBuilder<'_> { fn start_location_link(&mut self, def: ModuleDefId) { - if self.location.is_some() { - never!("location link is already started"); - } + never!(self.location.is_some(), "location link is already started"); self.make_new_part(); let Some(location) = ModuleDef::from(def).try_to_nav(self.db) else { return }; let location = location.call_site(); @@ -425,11 +439,6 @@ fn ty_to_text_edit( Some(builder.finish()) } -pub enum RangeLimit { - Fixed(TextRange), - NearestParent(TextSize), -} - // Feature: Inlay Hints // // rust-analyzer shows additional information inline with the source code. @@ -451,7 +460,7 @@ pub enum RangeLimit { pub(crate) fn inlay_hints( db: &RootDatabase, file_id: FileId, - range_limit: Option, + range_limit: Option, config: &InlayHintsConfig, ) -> Vec { let _p = tracing::span!(tracing::Level::INFO, "inlay_hints").entered(); @@ -466,31 +475,13 @@ pub(crate) fn inlay_hints( let hints = |node| hints(&mut acc, &famous_defs, config, file_id, node); match range_limit { - Some(RangeLimit::Fixed(range)) => match file.covering_element(range) { + Some(range) => match file.covering_element(range) { NodeOrToken::Token(_) => return acc, NodeOrToken::Node(n) => n .descendants() .filter(|descendant| range.intersect(descendant.text_range()).is_some()) .for_each(hints), }, - Some(RangeLimit::NearestParent(position)) => { - match file.token_at_offset(position).left_biased() { - Some(token) => { - if let Some(parent_block) = - token.parent_ancestors().find_map(ast::BlockExpr::cast) - { - parent_block.syntax().descendants().for_each(hints) - } else if let Some(parent_item) = - token.parent_ancestors().find_map(ast::Item::cast) - { - parent_item.syntax().descendants().for_each(hints) - } else { - return acc; - } - } - None => return acc, - } - } None => file.descendants().for_each(hints), }; } @@ -498,6 +489,39 @@ pub(crate) fn inlay_hints( acc } +pub(crate) fn inlay_hints_resolve( + db: &RootDatabase, + file_id: FileId, + position: TextSize, + hash: u64, + config: &InlayHintsConfig, +) -> Option { + let _p = tracing::span!(tracing::Level::INFO, "inlay_hints").entered(); + let sema = Semantics::new(db); + let file = sema.parse(file_id); + let file = file.syntax(); + + let scope = sema.scope(file)?; + let famous_defs = FamousDefs(&sema, scope.krate()); + let mut acc = Vec::new(); + + let hints = |node| hints(&mut acc, &famous_defs, config, file_id, node); + match file.token_at_offset(position).left_biased() { + Some(token) => { + if let Some(parent_block) = token.parent_ancestors().find_map(ast::BlockExpr::cast) { + parent_block.syntax().descendants().for_each(hints) + } else if let Some(parent_item) = token.parent_ancestors().find_map(ast::Item::cast) { + parent_item.syntax().descendants().for_each(hints) + } else { + return None; + } + } + None => return None, + } + + acc.into_iter().find(|hint| BuildHasherDefault::::default().hash_one(hint) == hash) +} + fn hints( hints: &mut Vec, famous_defs @ FamousDefs(sema, _): &FamousDefs<'_, '_>, diff --git a/crates/ide/src/inlay_hints/adjustment.rs b/crates/ide/src/inlay_hints/adjustment.rs index 631807d99a7e6..20128a286f26d 100644 --- a/crates/ide/src/inlay_hints/adjustment.rs +++ b/crates/ide/src/inlay_hints/adjustment.rs @@ -147,7 +147,6 @@ pub(super) fn hints( None, ); acc.push(InlayHint { - needs_resolve: label.needs_resolve(), range: expr.syntax().text_range(), pad_left: false, pad_right: false, diff --git a/crates/ide/src/inlay_hints/bind_pat.rs b/crates/ide/src/inlay_hints/bind_pat.rs index 45b51e3557034..07b9f9cc1fff6 100644 --- a/crates/ide/src/inlay_hints/bind_pat.rs +++ b/crates/ide/src/inlay_hints/bind_pat.rs @@ -99,7 +99,6 @@ pub(super) fn hints( None => pat.syntax().text_range(), }; acc.push(InlayHint { - needs_resolve: label.needs_resolve() || text_edit.is_some(), range: match type_ascriptable { Some(Some(t)) => text_range.cover(t.text_range()), _ => text_range, @@ -177,11 +176,7 @@ mod tests { use syntax::{TextRange, TextSize}; use test_utils::extract_annotations; - use crate::{ - fixture, - inlay_hints::{InlayHintsConfig, RangeLimit}, - ClosureReturnTypeHints, - }; + use crate::{fixture, inlay_hints::InlayHintsConfig, ClosureReturnTypeHints}; use crate::inlay_hints::tests::{ check, check_edit, check_no_edit, check_with_config, DISABLED_CONFIG, TEST_CONFIG, @@ -404,7 +399,7 @@ fn main() { .inlay_hints( &InlayHintsConfig { type_hints: true, ..DISABLED_CONFIG }, file_id, - Some(RangeLimit::Fixed(TextRange::new(TextSize::from(500), TextSize::from(600)))), + Some(TextRange::new(TextSize::from(500), TextSize::from(600))), ) .unwrap(); let actual = diff --git a/crates/ide/src/inlay_hints/binding_mode.rs b/crates/ide/src/inlay_hints/binding_mode.rs index 35504ffa7859f..f27390ee898bb 100644 --- a/crates/ide/src/inlay_hints/binding_mode.rs +++ b/crates/ide/src/inlay_hints/binding_mode.rs @@ -50,7 +50,6 @@ pub(super) fn hints( _ => return, }; acc.push(InlayHint { - needs_resolve: false, range, kind: InlayKind::BindingMode, label: r.into(), @@ -69,7 +68,6 @@ pub(super) fn hints( hir::BindingMode::Ref(Mutability::Shared) => "ref", }; acc.push(InlayHint { - needs_resolve: false, range: pat.syntax().text_range(), kind: InlayKind::BindingMode, label: bm.into(), diff --git a/crates/ide/src/inlay_hints/chaining.rs b/crates/ide/src/inlay_hints/chaining.rs index b6063978e9233..d86487d4b41b8 100644 --- a/crates/ide/src/inlay_hints/chaining.rs +++ b/crates/ide/src/inlay_hints/chaining.rs @@ -59,7 +59,6 @@ pub(super) fn hints( } let label = label_of_ty(famous_defs, config, &ty)?; acc.push(InlayHint { - needs_resolve: label.needs_resolve(), range: expr.syntax().text_range(), kind: InlayKind::Chaining, label, diff --git a/crates/ide/src/inlay_hints/closing_brace.rs b/crates/ide/src/inlay_hints/closing_brace.rs index 2b68538c198c4..2cefd5acdc2e4 100644 --- a/crates/ide/src/inlay_hints/closing_brace.rs +++ b/crates/ide/src/inlay_hints/closing_brace.rs @@ -109,7 +109,6 @@ pub(super) fn hints( let linked_location = name_range.map(|range| FileRange { file_id, range }); acc.push(InlayHint { - needs_resolve: linked_location.is_some(), range: closing_token.text_range(), kind: InlayKind::ClosingBrace, label: InlayHintLabel::simple(label, None, linked_location), diff --git a/crates/ide/src/inlay_hints/closure_captures.rs b/crates/ide/src/inlay_hints/closure_captures.rs index 2f8b959516df8..f1b524e088095 100644 --- a/crates/ide/src/inlay_hints/closure_captures.rs +++ b/crates/ide/src/inlay_hints/closure_captures.rs @@ -32,7 +32,6 @@ pub(super) fn hints( let range = closure.syntax().first_token()?.prev_token()?.text_range(); let range = TextRange::new(range.end() - TextSize::from(1), range.end()); acc.push(InlayHint { - needs_resolve: false, range, kind: InlayKind::ClosureCapture, label: InlayHintLabel::from("move"), @@ -45,7 +44,6 @@ pub(super) fn hints( } }; acc.push(InlayHint { - needs_resolve: false, range: move_kw_range, kind: InlayKind::ClosureCapture, label: InlayHintLabel::from("("), @@ -79,7 +77,6 @@ pub(super) fn hints( }), ); acc.push(InlayHint { - needs_resolve: label.needs_resolve(), range: move_kw_range, kind: InlayKind::ClosureCapture, label, @@ -91,7 +88,6 @@ pub(super) fn hints( if idx != last { acc.push(InlayHint { - needs_resolve: false, range: move_kw_range, kind: InlayKind::ClosureCapture, label: InlayHintLabel::from(", "), @@ -103,7 +99,6 @@ pub(super) fn hints( } } acc.push(InlayHint { - needs_resolve: false, range: move_kw_range, kind: InlayKind::ClosureCapture, label: InlayHintLabel::from(")"), diff --git a/crates/ide/src/inlay_hints/closure_ret.rs b/crates/ide/src/inlay_hints/closure_ret.rs index 204967cd7ca89..3b41db0f13d0a 100644 --- a/crates/ide/src/inlay_hints/closure_ret.rs +++ b/crates/ide/src/inlay_hints/closure_ret.rs @@ -64,7 +64,6 @@ pub(super) fn hints( }; acc.push(InlayHint { - needs_resolve: label.needs_resolve() || text_edit.is_some(), range: param_list.syntax().text_range(), kind: InlayKind::Type, label, diff --git a/crates/ide/src/inlay_hints/discriminant.rs b/crates/ide/src/inlay_hints/discriminant.rs index 06cce147d2a16..202954100fb97 100644 --- a/crates/ide/src/inlay_hints/discriminant.rs +++ b/crates/ide/src/inlay_hints/discriminant.rs @@ -79,7 +79,6 @@ fn variant_hints( None, ); acc.push(InlayHint { - needs_resolve: label.needs_resolve(), range: match eq_token { Some(t) => range.cover(t.text_range()), _ => range, diff --git a/crates/ide/src/inlay_hints/fn_lifetime_fn.rs b/crates/ide/src/inlay_hints/fn_lifetime_fn.rs index 6e5f23bed09fb..d3666754e2be9 100644 --- a/crates/ide/src/inlay_hints/fn_lifetime_fn.rs +++ b/crates/ide/src/inlay_hints/fn_lifetime_fn.rs @@ -22,7 +22,6 @@ pub(super) fn hints( } let mk_lt_hint = |t: SyntaxToken, label: String| InlayHint { - needs_resolve: false, range: t.text_range(), kind: InlayKind::Lifetime, label: label.into(), @@ -184,7 +183,6 @@ pub(super) fn hints( let angle_tok = gpl.l_angle_token()?; let is_empty = gpl.generic_params().next().is_none(); acc.push(InlayHint { - needs_resolve: false, range: angle_tok.text_range(), kind: InlayKind::Lifetime, label: format!( @@ -200,7 +198,6 @@ pub(super) fn hints( }); } (None, allocated_lifetimes) => acc.push(InlayHint { - needs_resolve: false, range: func.name()?.syntax().text_range(), kind: InlayKind::GenericParamList, label: format!("<{}>", allocated_lifetimes.iter().format(", "),).into(), diff --git a/crates/ide/src/inlay_hints/implicit_drop.rs b/crates/ide/src/inlay_hints/implicit_drop.rs index 5ba4e514e1f08..31f0c79037453 100644 --- a/crates/ide/src/inlay_hints/implicit_drop.rs +++ b/crates/ide/src/inlay_hints/implicit_drop.rs @@ -105,7 +105,6 @@ pub(super) fn hints( pad_left: true, pad_right: true, kind: InlayKind::Drop, - needs_resolve: label.needs_resolve(), label, text_edit: None, }) diff --git a/crates/ide/src/inlay_hints/implicit_static.rs b/crates/ide/src/inlay_hints/implicit_static.rs index f18e6421cbcbd..42223ddf580e3 100644 --- a/crates/ide/src/inlay_hints/implicit_static.rs +++ b/crates/ide/src/inlay_hints/implicit_static.rs @@ -31,7 +31,6 @@ pub(super) fn hints( if ty.lifetime().is_none() { let t = ty.amp_token()?; acc.push(InlayHint { - needs_resolve: false, range: t.text_range(), kind: InlayKind::Lifetime, label: "'static".into(), diff --git a/crates/ide/src/inlay_hints/param_name.rs b/crates/ide/src/inlay_hints/param_name.rs index 418fc002a8beb..96e845b2f3235 100644 --- a/crates/ide/src/inlay_hints/param_name.rs +++ b/crates/ide/src/inlay_hints/param_name.rs @@ -57,7 +57,6 @@ pub(super) fn hints( let label = InlayHintLabel::simple(format!("{param_name}{colon}"), None, linked_location); InlayHint { - needs_resolve: label.needs_resolve(), range, kind: InlayKind::Parameter, label, diff --git a/crates/ide/src/inlay_hints/range_exclusive.rs b/crates/ide/src/inlay_hints/range_exclusive.rs index c4b0c199fc239..bfb92838857ca 100644 --- a/crates/ide/src/inlay_hints/range_exclusive.rs +++ b/crates/ide/src/inlay_hints/range_exclusive.rs @@ -30,7 +30,6 @@ fn inlay_hint(token: SyntaxToken) -> InlayHint { kind: crate::InlayKind::RangeExclusive, label: crate::InlayHintLabel::from("<"), text_edit: None, - needs_resolve: false, } } diff --git a/crates/ide/src/lib.rs b/crates/ide/src/lib.rs index 6955e14a10a14..ad48d803895f9 100644 --- a/crates/ide/src/lib.rs +++ b/crates/ide/src/lib.rs @@ -58,8 +58,6 @@ mod view_item_tree; mod view_memory_layout; mod view_mir; -use std::ffi::OsStr; - use cfg::CfgOptions; use fetch_crates::CrateInfo; use hir::ChangeWithProcMacros; @@ -90,7 +88,7 @@ pub use crate::{ inlay_hints::{ AdjustmentHints, AdjustmentHintsMode, ClosureReturnTypeHints, DiscriminantHints, InlayFieldsToResolve, InlayHint, InlayHintLabel, InlayHintLabelPart, InlayHintPosition, - InlayHintsConfig, InlayKind, InlayTooltip, LifetimeElisionHints, RangeLimit, + InlayHintsConfig, InlayKind, InlayTooltip, LifetimeElisionHints, }, join_lines::JoinLinesConfig, markup::Markup, @@ -121,8 +119,8 @@ pub use ide_completion::{ }; pub use ide_db::{ base_db::{ - Cancelled, CrateGraph, CrateId, Edition, FileChange, FileId, FilePosition, FileRange, - SourceRoot, SourceRootId, + Cancelled, CrateGraph, CrateId, FileChange, FileId, FilePosition, FileRange, SourceRoot, + SourceRootId, }, documentation::Documentation, label::Label, @@ -137,6 +135,7 @@ pub use ide_diagnostics::{ Diagnostic, DiagnosticCode, DiagnosticsConfig, ExprFillDefaultMode, Severity, }; pub use ide_ssr::SsrError; +pub use span::Edition; pub use syntax::{TextRange, TextSize}; pub use text_edit::{Indel, TextEdit}; @@ -354,6 +353,10 @@ impl Analysis { self.with_db(|db| test_explorer::discover_tests_in_crate(db, crate_id)) } + pub fn discover_tests_in_file(&self, file_id: FileId) -> Cancellable> { + self.with_db(|db| test_explorer::discover_tests_in_file(db, file_id)) + } + /// Renders the crate graph to GraphViz "dot" syntax. pub fn view_crate_graph(&self, full: bool) -> Cancellable> { self.with_db(|db| view_crate_graph::view_crate_graph(db, full)) @@ -415,10 +418,19 @@ impl Analysis { &self, config: &InlayHintsConfig, file_id: FileId, - range: Option, + range: Option, ) -> Cancellable> { self.with_db(|db| inlay_hints::inlay_hints(db, file_id, range, config)) } + pub fn inlay_hints_resolve( + &self, + config: &InlayHintsConfig, + file_id: FileId, + position: TextSize, + hash: u64, + ) -> Cancellable> { + self.with_db(|db| inlay_hints::inlay_hints_resolve(db, file_id, position, hash, config)) + } /// Returns the set of folding ranges. pub fn folding_ranges(&self, file_id: FileId) -> Cancellable> { @@ -502,8 +514,8 @@ impl Analysis { pub fn external_docs( &self, position: FilePosition, - target_dir: Option<&OsStr>, - sysroot: Option<&OsStr>, + target_dir: Option<&str>, + sysroot: Option<&str>, ) -> Cancellable { self.with_db(|db| { doc_links::external_docs(db, position, target_dir, sysroot).unwrap_or_default() diff --git a/crates/ide/src/static_index.rs b/crates/ide/src/static_index.rs index fe063081f7999..3fef16df25e36 100644 --- a/crates/ide/src/static_index.rs +++ b/crates/ide/src/static_index.rs @@ -167,6 +167,7 @@ impl StaticIndex<'_> { keywords: true, format: crate::HoverDocFormat::Markdown, max_trait_assoc_items_count: None, + max_struct_field_count: None, }; let tokens = tokens.filter(|token| { matches!( diff --git a/crates/ide/src/status.rs b/crates/ide/src/status.rs index c3d85e38936d9..8e7767c8e5d79 100644 --- a/crates/ide/src/status.rs +++ b/crates/ide/src/status.rs @@ -10,7 +10,7 @@ use ide_db::{ debug::{DebugQueryTable, TableEntry}, Query, QueryTable, }, - CrateData, FileId, FileTextQuery, ParseQuery, SourceDatabase, SourceRootId, + CompressedFileTextQuery, CrateData, FileId, ParseQuery, SourceDatabase, SourceRootId, }, symbol_index::ModuleSymbolsQuery, }; @@ -38,7 +38,7 @@ use triomphe::Arc; pub(crate) fn status(db: &RootDatabase, file_id: Option) -> String { let mut buf = String::new(); - format_to!(buf, "{}\n", collect_query(FileTextQuery.in_db(db))); + format_to!(buf, "{}\n", collect_query(CompressedFileTextQuery.in_db(db))); format_to!(buf, "{}\n", collect_query(ParseQuery.in_db(db))); format_to!(buf, "{}\n", collect_query(ParseMacroExpansionQuery.in_db(db))); format_to!(buf, "{}\n", collect_query(LibrarySymbolsQuery.in_db(db))); @@ -160,7 +160,7 @@ impl QueryCollect for ParseMacroExpansionQuery { type Collector = SyntaxTreeStats; } -impl QueryCollect for FileTextQuery { +impl QueryCollect for CompressedFileTextQuery { type Collector = FilesStats; } @@ -188,8 +188,8 @@ impl fmt::Display for FilesStats { } } -impl StatCollect> for FilesStats { - fn collect_entry(&mut self, _: FileId, value: Option>) { +impl StatCollect> for FilesStats { + fn collect_entry(&mut self, _: FileId, value: Option>) { self.total += 1; self.size += value.unwrap().len(); } diff --git a/crates/ide/src/syntax_highlighting/highlight.rs b/crates/ide/src/syntax_highlighting/highlight.rs index 96c7c47559421..e7346cbb9925c 100644 --- a/crates/ide/src/syntax_highlighting/highlight.rs +++ b/crates/ide/src/syntax_highlighting/highlight.rs @@ -178,6 +178,23 @@ fn keyword( T![do] | T![yeet] if parent_matches::(&token) => h | HlMod::ControlFlow, T![for] if parent_matches::(&token) => h | HlMod::ControlFlow, T![unsafe] => h | HlMod::Unsafe, + T![const] + if token.parent().map_or(false, |it| { + matches!( + it.kind(), + SyntaxKind::CONST + | SyntaxKind::FN + | SyntaxKind::IMPL + | SyntaxKind::BLOCK_EXPR + | SyntaxKind::CLOSURE_EXPR + | SyntaxKind::FN_PTR_TYPE + | SyntaxKind::TYPE_BOUND + | SyntaxKind::CONST_BLOCK_PAT + ) + }) => + { + h | HlMod::Const + } T![true] | T![false] => HlTag::BoolLiteral.into(), // crate is handled just as a token if it's in an `extern crate` T![crate] if parent_matches::(&token) => h, @@ -377,14 +394,17 @@ pub(super) fn highlight_def( if let Some(item) = func.as_assoc_item(db) { h |= HlMod::Associated; match func.self_param(db) { - Some(sp) => match sp.access(db) { - hir::Access::Exclusive => { - h |= HlMod::Mutable; - h |= HlMod::Reference; + Some(sp) => { + h.tag = HlTag::Symbol(SymbolKind::Method); + match sp.access(db) { + hir::Access::Exclusive => { + h |= HlMod::Mutable; + h |= HlMod::Reference; + } + hir::Access::Shared => h |= HlMod::Reference, + hir::Access::Owned => h |= HlMod::Consuming, } - hir::Access::Shared => h |= HlMod::Reference, - hir::Access::Owned => h |= HlMod::Consuming, - }, + } None => h |= HlMod::Static, } @@ -406,6 +426,9 @@ pub(super) fn highlight_def( if func.is_async(db) { h |= HlMod::Async; } + if func.is_const(db) { + h |= HlMod::Const; + } h } @@ -420,10 +443,11 @@ pub(super) fn highlight_def( } Definition::Variant(_) => Highlight::new(HlTag::Symbol(SymbolKind::Variant)), Definition::Const(konst) => { - let mut h = Highlight::new(HlTag::Symbol(SymbolKind::Const)); + let mut h = Highlight::new(HlTag::Symbol(SymbolKind::Const)) | HlMod::Const; if let Some(item) = konst.as_assoc_item(db) { h |= HlMod::Associated; + h |= HlMod::Static; match item.container(db) { hir::AssocItemContainer::Impl(i) => { if i.trait_(db).is_some() { @@ -445,6 +469,7 @@ pub(super) fn highlight_def( if let Some(item) = type_.as_assoc_item(db) { h |= HlMod::Associated; + h |= HlMod::Static; match item.container(db) { hir::AssocItemContainer::Impl(i) => { if i.trait_(db).is_some() { @@ -474,7 +499,7 @@ pub(super) fn highlight_def( Definition::GenericParam(it) => match it { hir::GenericParam::TypeParam(_) => Highlight::new(HlTag::Symbol(SymbolKind::TypeParam)), hir::GenericParam::ConstParam(_) => { - Highlight::new(HlTag::Symbol(SymbolKind::ConstParam)) + Highlight::new(HlTag::Symbol(SymbolKind::ConstParam)) | HlMod::Const } hir::GenericParam::LifetimeParam(_) => { Highlight::new(HlTag::Symbol(SymbolKind::LifetimeParam)) @@ -550,8 +575,7 @@ fn highlight_method_call( ) -> Option { let func = sema.resolve_method_call(method_call)?; - let mut h = SymbolKind::Function.into(); - h |= HlMod::Associated; + let mut h = SymbolKind::Method.into(); if func.is_unsafe_to_call(sema.db) || sema.is_unsafe_method_call(method_call) { h |= HlMod::Unsafe; @@ -647,7 +671,7 @@ fn highlight_name_ref_by_syntax( match parent.kind() { METHOD_CALL_EXPR => ast::MethodCallExpr::cast(parent) .and_then(|it| highlight_method_call(sema, krate, &it)) - .unwrap_or_else(|| SymbolKind::Function.into()), + .unwrap_or_else(|| SymbolKind::Method.into()), FIELD_EXPR => { let h = HlTag::Symbol(SymbolKind::Field); let is_union = ast::FieldExpr::cast(parent) diff --git a/crates/ide/src/syntax_highlighting/html.rs b/crates/ide/src/syntax_highlighting/html.rs index a6dca0541e5f0..e754b702deed1 100644 --- a/crates/ide/src/syntax_highlighting/html.rs +++ b/crates/ide/src/syntax_highlighting/html.rs @@ -109,6 +109,7 @@ pre { color: #DCDCCC; background: #3F3F3F; font-size: 22px; padd .keyword { color: #F0DFAF; font-weight: bold; } .control { font-style: italic; } .reference { font-style: italic; font-weight: bold; } +.const { font-weight: bolder; } .invalid_escape_sequence { color: #FC5555; text-decoration: wavy underline; } .unresolved_reference { color: #FC5555; text-decoration: wavy underline; } diff --git a/crates/ide/src/syntax_highlighting/tags.rs b/crates/ide/src/syntax_highlighting/tags.rs index 5163a0de41770..5c7a463ccdc95 100644 --- a/crates/ide/src/syntax_highlighting/tags.rs +++ b/crates/ide/src/syntax_highlighting/tags.rs @@ -46,7 +46,7 @@ pub enum HlTag { #[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, PartialOrd, Ord)] #[repr(u8)] pub enum HlMod { - /// Used for items in traits and impls. + /// Used for associated items. Associated = 0, /// Used with keywords like `async` and `await`. Async, @@ -54,6 +54,8 @@ pub enum HlMod { Attribute, /// Callable item or value. Callable, + /// Constant operation. + Const, /// Value that is being consumed in a function call Consuming, /// Used with keywords like `if` and `break`. @@ -82,7 +84,7 @@ pub enum HlMod { Public, /// Immutable reference. Reference, - /// Used for associated functions. + /// Used for associated items, except Methods. (Some languages call these static members) Static, /// Used for items in traits and trait impls. Trait, @@ -147,6 +149,7 @@ impl HlTag { SymbolKind::LifetimeParam => "lifetime", SymbolKind::Local => "variable", SymbolKind::Macro => "macro", + SymbolKind::Method => "method", SymbolKind::ProcMacro => "proc_macro", SymbolKind::Module => "module", SymbolKind::SelfParam => "self_keyword", @@ -211,6 +214,7 @@ impl HlMod { HlMod::Async, HlMod::Attribute, HlMod::Callable, + HlMod::Const, HlMod::Consuming, HlMod::ControlFlow, HlMod::CrateRoot, @@ -237,6 +241,7 @@ impl HlMod { HlMod::Attribute => "attribute", HlMod::Callable => "callable", HlMod::Consuming => "consuming", + HlMod::Const => "const", HlMod::ControlFlow => "control", HlMod::CrateRoot => "crate_root", HlMod::DefaultLibrary => "default_library", diff --git a/crates/ide/src/syntax_highlighting/test_data/highlight_assoc_functions.html b/crates/ide/src/syntax_highlighting/test_data/highlight_assoc_functions.html index 6994cb3d5c542..9c7f03fc1587e 100644 --- a/crates/ide/src/syntax_highlighting/test_data/highlight_assoc_functions.html +++ b/crates/ide/src/syntax_highlighting/test_data/highlight_assoc_functions.html @@ -40,6 +40,7 @@ .keyword { color: #F0DFAF; font-weight: bold; } .control { font-style: italic; } .reference { font-style: italic; font-weight: bold; } +.const { font-weight: bolder; } .invalid_escape_sequence { color: #FC5555; text-decoration: wavy underline; } .unresolved_reference { color: #FC5555; text-decoration: wavy underline; } @@ -50,15 +51,15 @@ impl foo { pub fn is_static() {} - pub fn is_not_static(&self) {} + pub fn is_not_static(&self) {} } trait t { fn t_is_static() {} - fn t_is_not_static(&self) {} + fn t_is_not_static(&self) {} } impl t for foo { pub fn is_static() {} - pub fn is_not_static(&self) {} + pub fn is_not_static(&self) {} } \ No newline at end of file diff --git a/crates/ide/src/syntax_highlighting/test_data/highlight_attributes.html b/crates/ide/src/syntax_highlighting/test_data/highlight_attributes.html index dc2d103b5815f..de902b5137d0b 100644 --- a/crates/ide/src/syntax_highlighting/test_data/highlight_attributes.html +++ b/crates/ide/src/syntax_highlighting/test_data/highlight_attributes.html @@ -40,6 +40,7 @@ .keyword { color: #F0DFAF; font-weight: bold; } .control { font-style: italic; } .reference { font-style: italic; font-weight: bold; } +.const { font-weight: bolder; } .invalid_escape_sequence { color: #FC5555; text-decoration: wavy underline; } .unresolved_reference { color: #FC5555; text-decoration: wavy underline; } @@ -47,7 +48,7 @@
#[allow(dead_code)]
 #[rustfmt::skip]
 #[proc_macros::identity]
-#[derive(Copy)]
+#[derive(Default)]
 /// This is a doc comment
 // This is a normal comment
 /// This is a doc comment
@@ -57,4 +58,7 @@
 // This is another normal comment
 #[derive(Copy, Unresolved)]
 // The reason for these being here is to test AttrIds
-struct Foo;
\ No newline at end of file +enum Foo { + #[default] + Bar +} \ No newline at end of file diff --git a/crates/ide/src/syntax_highlighting/test_data/highlight_block_mod_items.html b/crates/ide/src/syntax_highlighting/test_data/highlight_block_mod_items.html index 093cc2358a676..eed3968a9068d 100644 --- a/crates/ide/src/syntax_highlighting/test_data/highlight_block_mod_items.html +++ b/crates/ide/src/syntax_highlighting/test_data/highlight_block_mod_items.html @@ -40,6 +40,7 @@ .keyword { color: #F0DFAF; font-weight: bold; } .control { font-style: italic; } .reference { font-style: italic; font-weight: bold; } +.const { font-weight: bolder; } .invalid_escape_sequence { color: #FC5555; text-decoration: wavy underline; } .unresolved_reference { color: #FC5555; text-decoration: wavy underline; } @@ -58,7 +59,7 @@ foo!(Bar); fn func() { mod inner { - struct Innerest<const C: usize> { field: [(); {C}] } + struct Innerest<const C: usize> { field: [(); {C}] } } } } diff --git a/crates/ide/src/syntax_highlighting/test_data/highlight_const.html b/crates/ide/src/syntax_highlighting/test_data/highlight_const.html new file mode 100644 index 0000000000000..cd6ffc2c3ae3b --- /dev/null +++ b/crates/ide/src/syntax_highlighting/test_data/highlight_const.html @@ -0,0 +1,83 @@ + + +
macro_rules! id {
+    ($($tt:tt)*) => {
+        $($tt)*
+    };
+}
+const CONST_ITEM: *const () = &raw const ();
+const fn const_fn<const CONST_PARAM: ()>(const {}: const fn()) where (): const ConstTrait {
+    CONST_ITEM;
+    CONST_PARAM;
+    const {
+        const || {}
+    }
+    id!(
+        CONST_ITEM;
+        CONST_PARAM;
+        const {
+            const || {}
+        };
+        &raw const ();
+        const
+    );
+}
+trait ConstTrait {
+    const ASSOC_CONST: () = ();
+    const fn assoc_const_fn() {}
+}
+impl const ConstTrait for () {
+    const ASSOC_CONST: () = ();
+    const fn assoc_const_fn() {}
+}
+
+macro_rules! unsafe_deref {
+    () => {
+        *(&() as *const ())
+    };
+}
\ No newline at end of file diff --git a/crates/ide/src/syntax_highlighting/test_data/highlight_crate_root.html b/crates/ide/src/syntax_highlighting/test_data/highlight_crate_root.html index 154b823fffb16..63e1560b92107 100644 --- a/crates/ide/src/syntax_highlighting/test_data/highlight_crate_root.html +++ b/crates/ide/src/syntax_highlighting/test_data/highlight_crate_root.html @@ -40,6 +40,7 @@ .keyword { color: #F0DFAF; font-weight: bold; } .control { font-style: italic; } .reference { font-style: italic; font-weight: bold; } +.const { font-weight: bolder; } .invalid_escape_sequence { color: #FC5555; text-decoration: wavy underline; } .unresolved_reference { color: #FC5555; text-decoration: wavy underline; } @@ -47,7 +48,7 @@
extern crate foo;
 use core::iter;
 
-pub const NINETY_TWO: u8 = 92;
+pub const NINETY_TWO: u8 = 92;
 
 use foo as foooo;
 
@@ -56,13 +57,13 @@
 }
 
 mod bar {
-    pub(in super) const FORTY_TWO: u8 = 42;
+    pub(in super) const FORTY_TWO: u8 = 42;
 
     mod baz {
-        use super::super::NINETY_TWO;
+        use super::super::NINETY_TWO;
         use crate::foooo::Point;
 
-        pub(in super::super) const TWENTY_NINE: u8 = 29;
+        pub(in super::super) const TWENTY_NINE: u8 = 29;
     }
 }
 
\ No newline at end of file diff --git a/crates/ide/src/syntax_highlighting/test_data/highlight_default_library.html b/crates/ide/src/syntax_highlighting/test_data/highlight_default_library.html index 58613bf151050..366895ce7ec70 100644 --- a/crates/ide/src/syntax_highlighting/test_data/highlight_default_library.html +++ b/crates/ide/src/syntax_highlighting/test_data/highlight_default_library.html @@ -40,6 +40,7 @@ .keyword { color: #F0DFAF; font-weight: bold; } .control { font-style: italic; } .reference { font-style: italic; font-weight: bold; } +.const { font-weight: bolder; } .invalid_escape_sequence { color: #FC5555; text-decoration: wavy underline; } .unresolved_reference { color: #FC5555; text-decoration: wavy underline; } @@ -48,5 +49,5 @@ fn main() { let foo = Some(92); - let nums = iter::repeat(foo.unwrap()); + let nums = iter::repeat(foo.unwrap()); } \ No newline at end of file diff --git a/crates/ide/src/syntax_highlighting/test_data/highlight_doctest.html b/crates/ide/src/syntax_highlighting/test_data/highlight_doctest.html index 34274932afc17..b2ca6e1ca4b66 100644 --- a/crates/ide/src/syntax_highlighting/test_data/highlight_doctest.html +++ b/crates/ide/src/syntax_highlighting/test_data/highlight_doctest.html @@ -40,6 +40,7 @@ .keyword { color: #F0DFAF; font-weight: bold; } .control { font-style: italic; } .reference { font-style: italic; font-weight: bold; } +.const { font-weight: bolder; } .invalid_escape_sequence { color: #FC5555; text-decoration: wavy underline; } .unresolved_reference { color: #FC5555; text-decoration: wavy underline; } @@ -71,7 +72,7 @@ // KILLER WHALE /// Ishmael."; /// ``` - pub const bar: bool = true; + pub const bar: bool = true; /// Constructs a new `Foo`. /// @@ -81,7 +82,7 @@ /// # #![allow(unused_mut)] /// let mut foo: Foo = Foo::new(); /// ``` - pub const fn new() -> Foo { + pub const fn new() -> Foo { Foo { bar: true } } @@ -109,25 +110,25 @@ /// ``` /// /// ```rust,no_run - /// let foobar = Foo::new().bar(); + /// let foobar = Foo::new().bar(); /// ``` /// /// ~~~rust,no_run /// // code block with tilde. - /// let foobar = Foo::new().bar(); + /// let foobar = Foo::new().bar(); /// ~~~ /// /// ``` /// // functions - /// fn foo<T, const X: usize>(arg: i32) { - /// let x: T = X; + /// fn foo<T, const X: usize>(arg: i32) { + /// let x: T = X; /// } /// ``` /// /// ```sh /// echo 1 /// ``` - pub fn foo(&self) -> bool { + pub fn foo(&self) -> bool { true } } diff --git a/crates/ide/src/syntax_highlighting/test_data/highlight_extern_crate.html b/crates/ide/src/syntax_highlighting/test_data/highlight_extern_crate.html index 729e4791f557a..129b287e52f2d 100644 --- a/crates/ide/src/syntax_highlighting/test_data/highlight_extern_crate.html +++ b/crates/ide/src/syntax_highlighting/test_data/highlight_extern_crate.html @@ -40,6 +40,7 @@ .keyword { color: #F0DFAF; font-weight: bold; } .control { font-style: italic; } .reference { font-style: italic; font-weight: bold; } +.const { font-weight: bolder; } .invalid_escape_sequence { color: #FC5555; text-decoration: wavy underline; } .unresolved_reference { color: #FC5555; text-decoration: wavy underline; } diff --git a/crates/ide/src/syntax_highlighting/test_data/highlight_general.html b/crates/ide/src/syntax_highlighting/test_data/highlight_general.html index 066fcfb1dfe6f..7ba1194d675f5 100644 --- a/crates/ide/src/syntax_highlighting/test_data/highlight_general.html +++ b/crates/ide/src/syntax_highlighting/test_data/highlight_general.html @@ -40,6 +40,7 @@ .keyword { color: #F0DFAF; font-weight: bold; } .control { font-style: italic; } .reference { font-style: italic; font-weight: bold; } +.const { font-weight: bolder; } .invalid_escape_sequence { color: #FC5555; text-decoration: wavy underline; } .unresolved_reference { color: #FC5555; text-decoration: wavy underline; } @@ -63,25 +64,25 @@ } trait Bar { - fn bar(&self) -> i32; + fn bar(&self) -> i32; } impl Bar for Foo { - fn bar(&self) -> i32 { + fn bar(&self) -> i32 { self.x } } impl Foo { - fn baz(mut self, f: Foo) -> i32 { - f.baz(self) + fn baz(mut self, f: Foo) -> i32 { + f.baz(self) } - fn qux(&mut self) { + fn qux(&mut self) { self.x = 0; } - fn quop(&self) -> i32 { + fn quop(&self) -> i32 { self.x } } @@ -94,15 +95,15 @@ } impl FooCopy { - fn baz(self, f: FooCopy) -> u32 { - f.baz(self) + fn baz(self, f: FooCopy) -> u32 { + f.baz(self) } - fn qux(&mut self) { + fn qux(&mut self) { self.x = 0; } - fn quop(&self) -> u32 { + fn quop(&self) -> u32 { self.x } } @@ -119,9 +120,9 @@ loop {} } -fn const_param<const FOO: usize>() -> usize { - const_param::<{ FOO }>(); - FOO +fn const_param<const FOO: usize>() -> usize { + const_param::<{ FOO }>(); + FOO } use ops::Fn; @@ -148,17 +149,17 @@ let mut foo = Foo { x, y: x }; let foo2 = Foo { x, y: x }; - foo.quop(); - foo.qux(); - foo.baz(foo2); + foo.quop(); + foo.qux(); + foo.baz(foo2); let mut copy = FooCopy { x }; - copy.quop(); - copy.qux(); - copy.baz(copy); + copy.quop(); + copy.qux(); + copy.baz(copy); let a = |x| x; - let bar = Foo::baz; + let bar = Foo::baz; let baz = (-42,); let baz = -baz.0; @@ -178,7 +179,7 @@ use Option::*; impl<T> Option<T> { - fn and<U>(self, other: Option<U>) -> Option<(T, U)> { + fn and<U>(self, other: Option<U>) -> Option<(T, U)> { match other { None => unimplemented!(), Nope => Nope, @@ -200,12 +201,12 @@ fn use_foo_items() { let bob = foo::Person { name: "Bob", - age: foo::consts::NUMBER, + age: foo::consts::NUMBER, }; let control_flow = foo::identity(foo::ControlFlow::Continue); - if control_flow.should_die() { + if control_flow.should_die() { foo::die!(); } } @@ -213,23 +214,23 @@ pub enum Bool { True, False } impl Bool { - pub const fn to_primitive(self) -> bool { + pub const fn to_primitive(self) -> bool { true } } -const USAGE_OF_BOOL:bool = Bool::True.to_primitive(); +const USAGE_OF_BOOL:bool = Bool::True.to_primitive(); trait Baz { - type Qux; + type Qux; } fn baz<T>(t: T) where T: Baz, - <T as Baz>::Qux: Bar {} + <T as Baz>::Qux: Bar {} fn gp_shadows_trait<Baz: Bar>() { - Baz::bar; + Baz::bar; } \ No newline at end of file diff --git a/crates/ide/src/syntax_highlighting/test_data/highlight_injection.html b/crates/ide/src/syntax_highlighting/test_data/highlight_injection.html index 58a147dd80a4f..6c3fbcfcf41e2 100644 --- a/crates/ide/src/syntax_highlighting/test_data/highlight_injection.html +++ b/crates/ide/src/syntax_highlighting/test_data/highlight_injection.html @@ -40,6 +40,7 @@ .keyword { color: #F0DFAF; font-weight: bold; } .control { font-style: italic; } .reference { font-style: italic; font-weight: bold; } +.const { font-weight: bolder; } .invalid_escape_sequence { color: #FC5555; text-decoration: wavy underline; } .unresolved_reference { color: #FC5555; text-decoration: wavy underline; } diff --git a/crates/ide/src/syntax_highlighting/test_data/highlight_keywords.html b/crates/ide/src/syntax_highlighting/test_data/highlight_keywords.html index 22ae5c82a4b55..7064a877070c7 100644 --- a/crates/ide/src/syntax_highlighting/test_data/highlight_keywords.html +++ b/crates/ide/src/syntax_highlighting/test_data/highlight_keywords.html @@ -40,6 +40,7 @@ .keyword { color: #F0DFAF; font-weight: bold; } .control { font-style: italic; } .reference { font-style: italic; font-weight: bold; } +.const { font-weight: bolder; } .invalid_escape_sequence { color: #FC5555; text-decoration: wavy underline; } .unresolved_reference { color: #FC5555; text-decoration: wavy underline; } diff --git a/crates/ide/src/syntax_highlighting/test_data/highlight_lifetimes.html b/crates/ide/src/syntax_highlighting/test_data/highlight_lifetimes.html index af7799965937a..8428b81580036 100644 --- a/crates/ide/src/syntax_highlighting/test_data/highlight_lifetimes.html +++ b/crates/ide/src/syntax_highlighting/test_data/highlight_lifetimes.html @@ -40,6 +40,7 @@ .keyword { color: #F0DFAF; font-weight: bold; } .control { font-style: italic; } .reference { font-style: italic; font-weight: bold; } +.const { font-weight: bolder; } .invalid_escape_sequence { color: #FC5555; text-decoration: wavy underline; } .unresolved_reference { color: #FC5555; text-decoration: wavy underline; } diff --git a/crates/ide/src/syntax_highlighting/test_data/highlight_macros.html b/crates/ide/src/syntax_highlighting/test_data/highlight_macros.html index 32ac6a94d8689..573b3d4bd5279 100644 --- a/crates/ide/src/syntax_highlighting/test_data/highlight_macros.html +++ b/crates/ide/src/syntax_highlighting/test_data/highlight_macros.html @@ -40,6 +40,7 @@ .keyword { color: #F0DFAF; font-weight: bold; } .control { font-style: italic; } .reference { font-style: italic; font-weight: bold; } +.const { font-weight: bolder; } .invalid_escape_sequence { color: #FC5555; text-decoration: wavy underline; } .unresolved_reference { color: #FC5555; text-decoration: wavy underline; } diff --git a/crates/ide/src/syntax_highlighting/test_data/highlight_module_docs_inline.html b/crates/ide/src/syntax_highlighting/test_data/highlight_module_docs_inline.html index ef8a48ca1c1d3..947d1bf1e3570 100644 --- a/crates/ide/src/syntax_highlighting/test_data/highlight_module_docs_inline.html +++ b/crates/ide/src/syntax_highlighting/test_data/highlight_module_docs_inline.html @@ -40,6 +40,7 @@ .keyword { color: #F0DFAF; font-weight: bold; } .control { font-style: italic; } .reference { font-style: italic; font-weight: bold; } +.const { font-weight: bolder; } .invalid_escape_sequence { color: #FC5555; text-decoration: wavy underline; } .unresolved_reference { color: #FC5555; text-decoration: wavy underline; } diff --git a/crates/ide/src/syntax_highlighting/test_data/highlight_module_docs_outline.html b/crates/ide/src/syntax_highlighting/test_data/highlight_module_docs_outline.html index a2ded15fd1baf..0fe2b6f274d03 100644 --- a/crates/ide/src/syntax_highlighting/test_data/highlight_module_docs_outline.html +++ b/crates/ide/src/syntax_highlighting/test_data/highlight_module_docs_outline.html @@ -40,6 +40,7 @@ .keyword { color: #F0DFAF; font-weight: bold; } .control { font-style: italic; } .reference { font-style: italic; font-weight: bold; } +.const { font-weight: bolder; } .invalid_escape_sequence { color: #FC5555; text-decoration: wavy underline; } .unresolved_reference { color: #FC5555; text-decoration: wavy underline; } diff --git a/crates/ide/src/syntax_highlighting/test_data/highlight_operators.html b/crates/ide/src/syntax_highlighting/test_data/highlight_operators.html index d123ee049765e..c60b6ab27bba5 100644 --- a/crates/ide/src/syntax_highlighting/test_data/highlight_operators.html +++ b/crates/ide/src/syntax_highlighting/test_data/highlight_operators.html @@ -40,6 +40,7 @@ .keyword { color: #F0DFAF; font-weight: bold; } .control { font-style: italic; } .reference { font-style: italic; font-weight: bold; } +.const { font-weight: bolder; } .invalid_escape_sequence { color: #FC5555; text-decoration: wavy underline; } .unresolved_reference { color: #FC5555; text-decoration: wavy underline; } diff --git a/crates/ide/src/syntax_highlighting/test_data/highlight_rainbow.html b/crates/ide/src/syntax_highlighting/test_data/highlight_rainbow.html index 4429e5d933aca..5d51b1497895c 100644 --- a/crates/ide/src/syntax_highlighting/test_data/highlight_rainbow.html +++ b/crates/ide/src/syntax_highlighting/test_data/highlight_rainbow.html @@ -40,6 +40,7 @@ .keyword { color: #F0DFAF; font-weight: bold; } .control { font-style: italic; } .reference { font-style: italic; font-weight: bold; } +.const { font-weight: bolder; } .invalid_escape_sequence { color: #FC5555; text-decoration: wavy underline; } .unresolved_reference { color: #FC5555; text-decoration: wavy underline; } diff --git a/crates/ide/src/syntax_highlighting/test_data/highlight_strings.html b/crates/ide/src/syntax_highlighting/test_data/highlight_strings.html index b6458fa7ca098..7a07d17b2718d 100644 --- a/crates/ide/src/syntax_highlighting/test_data/highlight_strings.html +++ b/crates/ide/src/syntax_highlighting/test_data/highlight_strings.html @@ -40,6 +40,7 @@ .keyword { color: #F0DFAF; font-weight: bold; } .control { font-style: italic; } .reference { font-style: italic; font-weight: bold; } +.const { font-weight: bolder; } .invalid_escape_sequence { color: #FC5555; text-decoration: wavy underline; } .unresolved_reference { color: #FC5555; text-decoration: wavy underline; } @@ -171,9 +172,9 @@ in(reg) i, ); - const CONSTANT: () = (): + const CONSTANT: () = (): let mut m = (); format_args!(concat!("{}"), "{}"); - format_args!("{} {} {} {} {} {} {backslash} {CONSTANT} {m}", backslash, format_args!("{}", 0), foo, "bar", toho!(), backslash); + format_args!("{} {} {} {} {} {} {backslash} {CONSTANT} {m}", backslash, format_args!("{}", 0), foo, "bar", toho!(), backslash); reuse_twice!("{backslash}"); } \ No newline at end of file diff --git a/crates/ide/src/syntax_highlighting/test_data/highlight_unsafe.html b/crates/ide/src/syntax_highlighting/test_data/highlight_unsafe.html index 49b588baa58e1..15d6be6334c6f 100644 --- a/crates/ide/src/syntax_highlighting/test_data/highlight_unsafe.html +++ b/crates/ide/src/syntax_highlighting/test_data/highlight_unsafe.html @@ -40,6 +40,7 @@ .keyword { color: #F0DFAF; font-weight: bold; } .control { font-style: italic; } .reference { font-style: italic; font-weight: bold; } +.const { font-weight: bolder; } .invalid_escape_sequence { color: #FC5555; text-decoration: wavy underline; } .unresolved_reference { color: #FC5555; text-decoration: wavy underline; } @@ -65,7 +66,7 @@ struct Struct { field: i32 } impl Struct { - unsafe fn unsafe_method(&self) {} + unsafe fn unsafe_method(&self) {} } #[repr(packed)] @@ -80,11 +81,11 @@ fn unsafe_trait_bound<T: UnsafeTrait>(_: T) {} trait DoTheAutoref { - fn calls_autoref(&self); + fn calls_autoref(&self); } impl DoTheAutoref for u16 { - fn calls_autoref(&self) {} + fn calls_autoref(&self) {} } fn main() { @@ -106,7 +107,7 @@ Union { b: 0 } => (), Union { a } => (), } - Struct { field: 0 }.unsafe_method(); + Struct { field: 0 }.unsafe_method(); // unsafe deref *x; @@ -123,6 +124,6 @@ let Packed { a: ref _a } = packed; // unsafe auto ref of packed field - packed.a.calls_autoref(); + packed.a.calls_autoref(); } } \ No newline at end of file diff --git a/crates/ide/src/syntax_highlighting/tests.rs b/crates/ide/src/syntax_highlighting/tests.rs index 6fed7d783e81e..c2990fd76eaca 100644 --- a/crates/ide/src/syntax_highlighting/tests.rs +++ b/crates/ide/src/syntax_highlighting/tests.rs @@ -22,11 +22,11 @@ fn attributes() { check_highlighting( r#" //- proc_macros: identity -//- minicore: derive, copy +//- minicore: derive, copy, default #[allow(dead_code)] #[rustfmt::skip] #[proc_macros::identity] -#[derive(Copy)] +#[derive(Default)] /// This is a doc comment // This is a normal comment /// This is a doc comment @@ -36,7 +36,10 @@ fn attributes() { // This is another normal comment #[derive(Copy, Unresolved)] // The reason for these being here is to test AttrIds -struct Foo; +enum Foo { + #[default] + Bar +} "#, expect_file!["./test_data/highlight_attributes.html"], false, @@ -629,6 +632,52 @@ fn main() { ); } +#[test] +fn test_const_highlighting() { + check_highlighting( + r#" +macro_rules! id { + ($($tt:tt)*) => { + $($tt)* + }; +} +const CONST_ITEM: *const () = &raw const (); +const fn const_fn(const {}: const fn()) where (): const ConstTrait { + CONST_ITEM; + CONST_PARAM; + const { + const || {} + } + id!( + CONST_ITEM; + CONST_PARAM; + const { + const || {} + }; + &raw const (); + const + ); +} +trait ConstTrait { + const ASSOC_CONST: () = (); + const fn assoc_const_fn() {} +} +impl const ConstTrait for () { + const ASSOC_CONST: () = (); + const fn assoc_const_fn() {} +} + +macro_rules! unsafe_deref { + () => { + *(&() as *const ()) + }; +} +"#, + expect_file!["./test_data/highlight_const.html"], + false, + ); +} + #[test] fn test_highlight_doc_comment() { check_highlighting( @@ -1173,8 +1222,27 @@ fn benchmark_syntax_highlighting_parser() { .highlight(HL_CONFIG, file_id) .unwrap() .iter() - .filter(|it| it.highlight.tag == HlTag::Symbol(SymbolKind::Function)) + .filter(|it| { + matches!(it.highlight.tag, HlTag::Symbol(SymbolKind::Function | SymbolKind::Method)) + }) .count() }; assert_eq!(hash, 1169); } + +#[test] +fn highlight_trait_with_lifetimes_regression_16958() { + let (analysis, file_id) = fixture::file( + r#" +pub trait Deserialize<'de> { + fn deserialize(); +} + +fn f<'de, T: Deserialize<'de>>() { + T::deserialize(); +} +"# + .trim(), + ); + let _ = analysis.highlight(HL_CONFIG, file_id).unwrap(); +} diff --git a/crates/ide/src/test_explorer.rs b/crates/ide/src/test_explorer.rs index ca4713997030e..99e2430860794 100644 --- a/crates/ide/src/test_explorer.rs +++ b/crates/ide/src/test_explorer.rs @@ -7,7 +7,7 @@ use ide_db::{ }; use syntax::TextRange; -use crate::{navigation_target::ToNav, runnables::runnable_fn, Runnable, TryToNav}; +use crate::{runnables::runnable_fn, NavigationTarget, Runnable, TryToNav}; #[derive(Debug)] pub enum TestItemKind { @@ -56,7 +56,12 @@ fn find_crate_by_id(crate_graph: &CrateGraph, crate_id: &str) -> Option }) } -fn discover_tests_in_module(db: &RootDatabase, module: Module, prefix_id: String) -> Vec { +fn discover_tests_in_module( + db: &RootDatabase, + module: Module, + prefix_id: String, + only_in_this_file: bool, +) -> Vec { let sema = Semantics::new(db); let mut r = vec![]; @@ -64,9 +69,9 @@ fn discover_tests_in_module(db: &RootDatabase, module: Module, prefix_id: String let module_name = c.name(db).as_ref().and_then(|n| n.as_str()).unwrap_or("[mod without name]").to_owned(); let module_id = format!("{prefix_id}::{module_name}"); - let module_children = discover_tests_in_module(db, c, module_id.clone()); + let module_children = discover_tests_in_module(db, c, module_id.clone(), only_in_this_file); if !module_children.is_empty() { - let nav = c.to_nav(db).call_site; + let nav = NavigationTarget::from_module_to_decl(sema.db, c).call_site; r.push(TestItem { id: module_id, kind: TestItemKind::Module, @@ -76,7 +81,9 @@ fn discover_tests_in_module(db: &RootDatabase, module: Module, prefix_id: String text_range: Some(nav.focus_or_full_range()), runnable: None, }); - r.extend(module_children); + if !only_in_this_file || c.is_inline(db) { + r.extend(module_children); + } } } for def in module.declarations(db) { @@ -112,6 +119,55 @@ pub(crate) fn discover_tests_in_crate_by_test_id( discover_tests_in_crate(db, crate_id) } +pub(crate) fn discover_tests_in_file(db: &RootDatabase, file_id: FileId) -> Vec { + let sema = Semantics::new(db); + + let Some(module) = sema.file_to_module_def(file_id) else { return vec![] }; + let Some((mut tests, id)) = find_module_id_and_test_parents(&sema, module) else { + return vec![]; + }; + tests.extend(discover_tests_in_module(db, module, id, true)); + tests +} + +fn find_module_id_and_test_parents( + sema: &Semantics<'_, RootDatabase>, + module: Module, +) -> Option<(Vec, String)> { + let Some(parent) = module.parent(sema.db) else { + let name = module.krate().display_name(sema.db)?.to_string(); + return Some(( + vec![TestItem { + id: name.clone(), + kind: TestItemKind::Crate(module.krate().into()), + label: name.clone(), + parent: None, + file: None, + text_range: None, + runnable: None, + }], + name, + )); + }; + let (mut r, mut id) = find_module_id_and_test_parents(sema, parent)?; + let parent = Some(id.clone()); + id += "::"; + let module_name = &module.name(sema.db); + let module_name = module_name.as_ref().and_then(|n| n.as_str()).unwrap_or("[mod without name]"); + id += module_name; + let nav = NavigationTarget::from_module_to_decl(sema.db, module).call_site; + r.push(TestItem { + id: id.clone(), + kind: TestItemKind::Module, + label: module_name.to_owned(), + parent, + file: Some(nav.file_id), + text_range: Some(nav.focus_or_full_range()), + runnable: None, + }); + Some((r, id)) +} + pub(crate) fn discover_tests_in_crate(db: &RootDatabase, crate_id: CrateId) -> Vec { let crate_graph = db.crate_graph(); if !crate_graph[crate_id].origin.is_local() { @@ -133,6 +189,6 @@ pub(crate) fn discover_tests_in_crate(db: &RootDatabase, crate_id: CrateId) -> V text_range: None, runnable: None, }]; - r.extend(discover_tests_in_module(db, module, crate_test_id)); + r.extend(discover_tests_in_module(db, module, crate_test_id, false)); r } diff --git a/crates/ide/src/typing.rs b/crates/ide/src/typing.rs index e87fc89fea2a8..d3eee0e02e483 100644 --- a/crates/ide/src/typing.rs +++ b/crates/ide/src/typing.rs @@ -175,9 +175,21 @@ fn on_opening_bracket_typed( } } - // If it's a statement in a block, we don't know how many statements should be included - if ast::ExprStmt::can_cast(expr.syntax().parent()?.kind()) { - return None; + if let Some(parent) = expr.syntax().parent().and_then(ast::Expr::cast) { + let mut node = expr.syntax().clone(); + let all_prev_sib_attr = loop { + match node.prev_sibling() { + Some(sib) if sib.kind().is_trivia() || sib.kind() == SyntaxKind::ATTR => { + node = sib + } + Some(_) => break false, + None => break true, + }; + }; + + if all_prev_sib_attr { + expr = parent; + } } // Insert the closing bracket right after the expression. @@ -824,6 +836,21 @@ fn f() { 0 => {()}, 1 => (), } +} + "#, + ); + type_char( + '{', + r#" +fn main() { + #[allow(unreachable_code)] + $0g(); +} + "#, + r#" +fn main() { + #[allow(unreachable_code)] + {g()}; } "#, ); diff --git a/crates/load-cargo/Cargo.toml b/crates/load-cargo/Cargo.toml index 05412e176b65b..48e84a7b25ebf 100644 --- a/crates/load-cargo/Cargo.toml +++ b/crates/load-cargo/Cargo.toml @@ -20,6 +20,7 @@ tracing.workspace = true hir-expand.workspace = true ide-db.workspace = true +paths.workspace = true proc-macro-api.workspace = true project-model.workspace = true span.workspace = true diff --git a/crates/load-cargo/src/lib.rs b/crates/load-cargo/src/lib.rs index fe680e47fef8f..79d6fe36b5618 100644 --- a/crates/load-cargo/src/lib.rs +++ b/crates/load-cargo/src/lib.rs @@ -38,7 +38,7 @@ pub fn load_workspace_at( load_config: &LoadCargoConfig, progress: &dyn Fn(String), ) -> anyhow::Result<(RootDatabase, vfs::Vfs, Option)> { - let root = AbsPathBuf::assert(std::env::current_dir()?.join(root)); + let root = AbsPathBuf::assert_utf8(std::env::current_dir()?.join(root)); let root = ProjectManifest::discover_single(&root)?; let mut workspace = ProjectWorkspace::load(root, cargo_config, progress)?; @@ -387,7 +387,7 @@ fn expander_to_proc_macro( let name = From::from(expander.name()); let kind = match expander.kind() { proc_macro_api::ProcMacroKind::CustomDerive => ProcMacroKind::CustomDerive, - proc_macro_api::ProcMacroKind::FuncLike => ProcMacroKind::FuncLike, + proc_macro_api::ProcMacroKind::Bang => ProcMacroKind::Bang, proc_macro_api::ProcMacroKind::Attr => ProcMacroKind::Attr, }; let disabled = ignored_macros.iter().any(|replace| **replace == name); diff --git a/crates/mbe/src/benchmark.rs b/crates/mbe/src/benchmark.rs index ac7f07117845e..4d5531ae307fb 100644 --- a/crates/mbe/src/benchmark.rs +++ b/crates/mbe/src/benchmark.rs @@ -23,7 +23,11 @@ fn benchmark_parse_macro_rules() { let _pt = bench("mbe parse macro rules"); rules .values() - .map(|it| DeclarativeMacro::parse_macro_rules(it, true, true).rules.len()) + .map(|it| { + DeclarativeMacro::parse_macro_rules(it, |_| span::Edition::CURRENT, true) + .rules + .len() + }) .sum() }; assert_eq!(hash, 1144); @@ -51,10 +55,12 @@ fn benchmark_expand_macro_rules() { assert_eq!(hash, 69413); } -fn macro_rules_fixtures() -> FxHashMap> { +fn macro_rules_fixtures() -> FxHashMap { macro_rules_fixtures_tt() .into_iter() - .map(|(id, tt)| (id, DeclarativeMacro::parse_macro_rules(&tt, true, true))) + .map(|(id, tt)| { + (id, DeclarativeMacro::parse_macro_rules(&tt, |_| span::Edition::CURRENT, true)) + }) .collect() } @@ -80,7 +86,7 @@ fn macro_rules_fixtures_tt() -> FxHashMap> { /// Generate random invocation fixtures from rules fn invocation_fixtures( - rules: &FxHashMap>, + rules: &FxHashMap, ) -> Vec<(String, tt::Subtree)> { let mut seed = 123456789; let mut res = Vec::new(); @@ -128,11 +134,7 @@ fn invocation_fixtures( } return res; - fn collect_from_op( - op: &Op, - token_trees: &mut Vec>, - seed: &mut usize, - ) { + fn collect_from_op(op: &Op, token_trees: &mut Vec>, seed: &mut usize) { return match op { Op::Var { kind, .. } => match kind.as_ref() { Some(MetaVarKind::Ident) => token_trees.push(make_ident("foo")), diff --git a/crates/mbe/src/expander.rs b/crates/mbe/src/expander.rs index 9366048fd9558..2f2c0aa6ff595 100644 --- a/crates/mbe/src/expander.rs +++ b/crates/mbe/src/expander.rs @@ -6,22 +6,21 @@ mod matcher; mod transcriber; use rustc_hash::FxHashMap; +use span::Span; use syntax::SmolStr; -use tt::Span; use crate::{parser::MetaVarKind, ExpandError, ExpandResult}; -pub(crate) fn expand_rules( - rules: &[crate::Rule], - input: &tt::Subtree, - marker: impl Fn(&mut S) + Copy, - is_2021: bool, +pub(crate) fn expand_rules( + rules: &[crate::Rule], + input: &tt::Subtree, + marker: impl Fn(&mut Span) + Copy, new_meta_vars: bool, - call_site: S, -) -> ExpandResult> { - let mut match_: Option<(matcher::Match, &crate::Rule)> = None; + call_site: Span, +) -> ExpandResult> { + let mut match_: Option<(matcher::Match, &crate::Rule)> = None; for rule in rules { - let new_match = matcher::match_(&rule.lhs, input, is_2021); + let new_match = matcher::match_(&rule.lhs, input); if new_match.err.is_none() { // If we find a rule that applies without errors, we're done. @@ -110,30 +109,24 @@ pub(crate) fn expand_rules( /// In other words, `Bindings` is a *multi* mapping from `SmolStr` to /// `tt::TokenTree`, where the index to select a particular `TokenTree` among /// many is not a plain `usize`, but a `&[usize]`. -#[derive(Debug, Clone, PartialEq, Eq)] -struct Bindings { - inner: FxHashMap>, -} - -impl Default for Bindings { - fn default() -> Self { - Self { inner: Default::default() } - } +#[derive(Debug, Default, Clone, PartialEq, Eq)] +struct Bindings { + inner: FxHashMap, } #[derive(Debug, Clone, PartialEq, Eq)] -enum Binding { - Fragment(Fragment), - Nested(Vec>), +enum Binding { + Fragment(Fragment), + Nested(Vec), Empty, Missing(MetaVarKind), } #[derive(Debug, Clone, PartialEq, Eq)] -enum Fragment { +enum Fragment { Empty, /// token fragments are just copy-pasted into the output - Tokens(tt::TokenTree), + Tokens(tt::TokenTree), /// Expr ast fragments are surrounded with `()` on insertion to preserve /// precedence. Note that this impl is different from the one currently in /// `rustc` -- `rustc` doesn't translate fragments into token trees at all. @@ -141,7 +134,7 @@ enum Fragment { /// At one point in time, we tried to use "fake" delimiters here Ă  la /// proc-macro delimiter=none. As we later discovered, "none" delimiters are /// tricky to handle in the parser, and rustc doesn't handle those either. - Expr(tt::Subtree), + Expr(tt::Subtree), /// There are roughly two types of paths: paths in expression context, where a /// separator `::` between an identifier and its following generic argument list /// is mandatory, and paths in type context, where `::` can be omitted. @@ -151,5 +144,5 @@ enum Fragment { /// and is trasncribed as an expression-context path, verbatim transcription /// would cause a syntax error. We need to fix it up just before transcribing; /// see `transcriber::fix_up_and_push_path_tt()`. - Path(tt::Subtree), + Path(tt::Subtree), } diff --git a/crates/mbe/src/expander/matcher.rs b/crates/mbe/src/expander/matcher.rs index eea92cfba4c20..3170834d54ffb 100644 --- a/crates/mbe/src/expander/matcher.rs +++ b/crates/mbe/src/expander/matcher.rs @@ -62,8 +62,9 @@ use std::rc::Rc; use smallvec::{smallvec, SmallVec}; +use span::Span; use syntax::SmolStr; -use tt::{DelimSpan, Span}; +use tt::DelimSpan; use crate::{ expander::{Binding, Bindings, ExpandResult, Fragment}, @@ -72,7 +73,7 @@ use crate::{ ExpandError, MetaTemplate, ValueResult, }; -impl Bindings { +impl Bindings { fn push_optional(&mut self, name: &SmolStr) { self.inner.insert(name.clone(), Binding::Fragment(Fragment::Empty)); } @@ -81,14 +82,14 @@ impl Bindings { self.inner.insert(name.clone(), Binding::Empty); } - fn bindings(&self) -> impl Iterator> { + fn bindings(&self) -> impl Iterator { self.inner.values() } } -#[derive(Clone, Debug, PartialEq, Eq)] -pub(super) struct Match { - pub(super) bindings: Bindings, +#[derive(Clone, Default, Debug, PartialEq, Eq)] +pub(super) struct Match { + pub(super) bindings: Bindings, /// We currently just keep the first error and count the rest to compare matches. pub(super) err: Option, pub(super) err_count: usize, @@ -98,19 +99,7 @@ pub(super) struct Match { pub(super) bound_count: usize, } -impl Default for Match { - fn default() -> Self { - Self { - bindings: Default::default(), - err: Default::default(), - err_count: Default::default(), - unmatched_tts: Default::default(), - bound_count: Default::default(), - } - } -} - -impl Match { +impl Match { fn add_err(&mut self, err: ExpandError) { let prev_err = self.err.take(); self.err = prev_err.or(Some(err)); @@ -119,16 +108,12 @@ impl Match { } /// Matching errors are added to the `Match`. -pub(super) fn match_( - pattern: &MetaTemplate, - input: &tt::Subtree, - is_2021: bool, -) -> Match { - let mut res = match_loop(pattern, input, is_2021); +pub(super) fn match_(pattern: &MetaTemplate, input: &tt::Subtree) -> Match { + let mut res = match_loop(pattern, input); res.bound_count = count(res.bindings.bindings()); return res; - fn count<'a, S: 'a>(bindings: impl Iterator>) -> usize { + fn count<'a>(bindings: impl Iterator) -> usize { bindings .map(|it| match it { Binding::Fragment(_) => 1, @@ -141,10 +126,10 @@ pub(super) fn match_( } #[derive(Debug, Clone)] -enum BindingKind { +enum BindingKind { Empty(SmolStr), Optional(SmolStr), - Fragment(SmolStr, Fragment), + Fragment(SmolStr, Fragment), Missing(SmolStr, MetaVarKind), Nested(usize, usize), } @@ -158,18 +143,13 @@ enum LinkNode { Parent { idx: usize, len: usize }, } -struct BindingsBuilder { - nodes: Vec>>>>, +#[derive(Default)] +struct BindingsBuilder { + nodes: Vec>>>, nested: Vec>>, } -impl Default for BindingsBuilder { - fn default() -> Self { - Self { nodes: Default::default(), nested: Default::default() } - } -} - -impl BindingsBuilder { +impl BindingsBuilder { fn alloc(&mut self) -> BindingsIdx { let idx = self.nodes.len(); self.nodes.push(Vec::new()); @@ -206,7 +186,7 @@ impl BindingsBuilder { self.nodes[idx.0].push(LinkNode::Node(Rc::new(BindingKind::Optional(var.clone())))); } - fn push_fragment(&mut self, idx: &mut BindingsIdx, var: &SmolStr, fragment: Fragment) { + fn push_fragment(&mut self, idx: &mut BindingsIdx, var: &SmolStr, fragment: Fragment) { self.nodes[idx.0] .push(LinkNode::Node(Rc::new(BindingKind::Fragment(var.clone(), fragment)))); } @@ -227,11 +207,11 @@ impl BindingsBuilder { idx.0 = new_idx; } - fn build(self, idx: &BindingsIdx) -> Bindings { + fn build(self, idx: &BindingsIdx) -> Bindings { self.build_inner(&self.nodes[idx.0]) } - fn build_inner(&self, link_nodes: &[LinkNode>>]) -> Bindings { + fn build_inner(&self, link_nodes: &[LinkNode>]) -> Bindings { let mut bindings = Bindings::default(); let mut nodes = Vec::new(); self.collect_nodes(link_nodes, &mut nodes); @@ -281,7 +261,7 @@ impl BindingsBuilder { &'a self, id: usize, len: usize, - nested_refs: &mut Vec<&'a [LinkNode>>]>, + nested_refs: &mut Vec<&'a [LinkNode>]>, ) { self.nested[id].iter().take(len).for_each(|it| match it { LinkNode::Node(id) => nested_refs.push(&self.nodes[*id]), @@ -289,7 +269,7 @@ impl BindingsBuilder { }); } - fn collect_nested(&self, idx: usize, nested_idx: usize, nested: &mut Vec>) { + fn collect_nested(&self, idx: usize, nested_idx: usize, nested: &mut Vec) { let last = &self.nodes[idx]; let mut nested_refs: Vec<&[_]> = Vec::new(); self.nested[nested_idx].iter().for_each(|it| match *it { @@ -300,7 +280,7 @@ impl BindingsBuilder { nested.extend(nested_refs.into_iter().map(|iter| self.build_inner(iter))); } - fn collect_nodes_ref<'a>(&'a self, id: usize, len: usize, nodes: &mut Vec<&'a BindingKind>) { + fn collect_nodes_ref<'a>(&'a self, id: usize, len: usize, nodes: &mut Vec<&'a BindingKind>) { self.nodes[id].iter().take(len).for_each(|it| match it { LinkNode::Node(it) => nodes.push(it), LinkNode::Parent { idx, len } => self.collect_nodes_ref(*idx, *len, nodes), @@ -309,8 +289,8 @@ impl BindingsBuilder { fn collect_nodes<'a>( &'a self, - link_nodes: &'a [LinkNode>>], - nodes: &mut Vec<&'a BindingKind>, + link_nodes: &'a [LinkNode>], + nodes: &mut Vec<&'a BindingKind>, ) { link_nodes.iter().for_each(|it| match it { LinkNode::Node(it) => nodes.push(it), @@ -320,22 +300,22 @@ impl BindingsBuilder { } #[derive(Debug, Clone)] -struct MatchState<'t, S> { +struct MatchState<'t> { /// The position of the "dot" in this matcher - dot: OpDelimitedIter<'t, S>, + dot: OpDelimitedIter<'t>, /// Token subtree stack /// When matching against matchers with nested delimited submatchers (e.g., `pat ( pat ( .. ) /// pat ) pat`), we need to keep track of the matchers we are descending into. This stack does /// that where the bottom of the stack is the outermost matcher. - stack: SmallVec<[OpDelimitedIter<'t, S>; 4]>, + stack: SmallVec<[OpDelimitedIter<'t>; 4]>, /// The "parent" matcher position if we are in a repetition. That is, the matcher position just /// before we enter the repetition. - up: Option>>, + up: Option>>, /// The separator if we are in a repetition. - sep: Option>, + sep: Option, /// The KleeneOp of this sequence if we are in a repetition. sep_kind: Option, @@ -347,7 +327,7 @@ struct MatchState<'t, S> { bindings: BindingsIdx, /// Cached result of meta variable parsing - meta_result: Option<(TtIter<'t, S>, ExpandResult>>)>, + meta_result: Option<(TtIter<'t, Span>, ExpandResult>)>, /// Is error occurred in this state, will `poised` to "parent" is_error: bool, @@ -372,18 +352,17 @@ struct MatchState<'t, S> { /// - `bb_items`: the set of items that are waiting for the black-box parser. /// - `error_items`: the set of items in errors, used for error-resilient parsing #[inline] -fn match_loop_inner<'t, S: Span>( - src: TtIter<'t, S>, - stack: &[TtIter<'t, S>], - res: &mut Match, - bindings_builder: &mut BindingsBuilder, - cur_items: &mut SmallVec<[MatchState<'t, S>; 1]>, - bb_items: &mut SmallVec<[MatchState<'t, S>; 1]>, - next_items: &mut Vec>, - eof_items: &mut SmallVec<[MatchState<'t, S>; 1]>, - error_items: &mut SmallVec<[MatchState<'t, S>; 1]>, - is_2021: bool, - delim_span: tt::DelimSpan, +fn match_loop_inner<'t>( + src: TtIter<'t, Span>, + stack: &[TtIter<'t, Span>], + res: &mut Match, + bindings_builder: &mut BindingsBuilder, + cur_items: &mut SmallVec<[MatchState<'t>; 1]>, + bb_items: &mut SmallVec<[MatchState<'t>; 1]>, + next_items: &mut Vec>, + eof_items: &mut SmallVec<[MatchState<'t>; 1]>, + error_items: &mut SmallVec<[MatchState<'t>; 1]>, + delim_span: tt::DelimSpan, ) { macro_rules! try_push { ($items: expr, $it:expr) => { @@ -494,7 +473,7 @@ fn match_loop_inner<'t, S: Span>( OpDelimited::Op(Op::Var { kind, name, .. }) => { if let &Some(kind) = kind { let mut fork = src.clone(); - let match_res = match_meta_var(kind, &mut fork, is_2021, delim_span); + let match_res = match_meta_var(kind, &mut fork, delim_span); match match_res.err { None => { // Some meta variables are optional (e.g. vis) @@ -607,10 +586,10 @@ fn match_loop_inner<'t, S: Span>( } } -fn match_loop(pattern: &MetaTemplate, src: &tt::Subtree, is_2021: bool) -> Match { +fn match_loop(pattern: &MetaTemplate, src: &tt::Subtree) -> Match { let span = src.delimiter.delim_span(); let mut src = TtIter::new(src); - let mut stack: SmallVec<[TtIter<'_, S>; 1]> = SmallVec::new(); + let mut stack: SmallVec<[TtIter<'_, Span>; 1]> = SmallVec::new(); let mut res = Match::default(); let mut error_recover_item = None; @@ -647,7 +626,6 @@ fn match_loop(pattern: &MetaTemplate, src: &tt::Subtree, is_2021: &mut next_items, &mut eof_items, &mut error_items, - is_2021, span, ); stdx::always!(cur_items.is_empty()); @@ -758,12 +736,11 @@ fn match_loop(pattern: &MetaTemplate, src: &tt::Subtree, is_2021: } } -fn match_meta_var( +fn match_meta_var( kind: MetaVarKind, - input: &mut TtIter<'_, S>, - is_2021: bool, - delim_span: DelimSpan, -) -> ExpandResult>> { + input: &mut TtIter<'_, Span>, + delim_span: DelimSpan, +) -> ExpandResult> { let fragment = match kind { MetaVarKind::Path => { return input.expect_fragment(parser::PrefixEntryPoint::Path).map(|it| { @@ -771,8 +748,7 @@ fn match_meta_var( }); } MetaVarKind::Ty => parser::PrefixEntryPoint::Ty, - MetaVarKind::Pat if is_2021 => parser::PrefixEntryPoint::PatTop, - MetaVarKind::Pat => parser::PrefixEntryPoint::Pat, + MetaVarKind::Pat => parser::PrefixEntryPoint::PatTop, MetaVarKind::PatParam => parser::PrefixEntryPoint::Pat, MetaVarKind::Stmt => parser::PrefixEntryPoint::Stmt, MetaVarKind::Block => parser::PrefixEntryPoint::Block, @@ -846,7 +822,7 @@ fn match_meta_var( input.expect_fragment(fragment).map(|it| it.map(Fragment::Tokens)) } -fn collect_vars(collector_fun: &mut impl FnMut(SmolStr), pattern: &MetaTemplate) { +fn collect_vars(collector_fun: &mut impl FnMut(SmolStr), pattern: &MetaTemplate) { for op in pattern.iter() { match op { Op::Var { name, .. } => collector_fun(name.clone()), @@ -859,11 +835,11 @@ fn collect_vars(collector_fun: &mut impl FnMut(SmolStr), pattern: &Meta } } } -impl MetaTemplate { - fn iter_delimited_with(&self, delimiter: tt::Delimiter) -> OpDelimitedIter<'_, S> { +impl MetaTemplate { + fn iter_delimited_with(&self, delimiter: tt::Delimiter) -> OpDelimitedIter<'_> { OpDelimitedIter { inner: &self.0, idx: 0, delimited: delimiter } } - fn iter_delimited(&self, span: tt::DelimSpan) -> OpDelimitedIter<'_, S> { + fn iter_delimited(&self, span: tt::DelimSpan) -> OpDelimitedIter<'_> { OpDelimitedIter { inner: &self.0, idx: 0, @@ -873,27 +849,27 @@ impl MetaTemplate { } #[derive(Debug, Clone, Copy)] -enum OpDelimited<'a, S> { - Op(&'a Op), +enum OpDelimited<'a> { + Op(&'a Op), Open, Close, } #[derive(Debug, Clone, Copy)] -struct OpDelimitedIter<'a, S> { - inner: &'a [Op], - delimited: tt::Delimiter, +struct OpDelimitedIter<'a> { + inner: &'a [Op], + delimited: tt::Delimiter, idx: usize, } -impl<'a, S: Span> OpDelimitedIter<'a, S> { +impl<'a> OpDelimitedIter<'a> { fn is_eof(&self) -> bool { let len = self.inner.len() + if self.delimited.kind != tt::DelimiterKind::Invisible { 2 } else { 0 }; self.idx >= len } - fn peek(&self) -> Option> { + fn peek(&self) -> Option> { match self.delimited.kind { tt::DelimiterKind::Invisible => self.inner.get(self.idx).map(OpDelimited::Op), _ => match self.idx { @@ -909,8 +885,8 @@ impl<'a, S: Span> OpDelimitedIter<'a, S> { } } -impl<'a, S: Span> Iterator for OpDelimitedIter<'a, S> { - type Item = OpDelimited<'a, S>; +impl<'a> Iterator for OpDelimitedIter<'a> { + type Item = OpDelimited<'a>; fn next(&mut self) -> Option { let res = self.peek(); @@ -926,8 +902,8 @@ impl<'a, S: Span> Iterator for OpDelimitedIter<'a, S> { } } -impl TtIter<'_, S> { - fn expect_separator(&mut self, separator: &Separator) -> bool { +impl TtIter<'_, Span> { + fn expect_separator(&mut self, separator: &Separator) -> bool { let mut fork = self.clone(); let ok = match separator { Separator::Ident(lhs) => match fork.expect_ident_or_underscore() { @@ -957,7 +933,7 @@ impl TtIter<'_, S> { ok } - fn expect_tt(&mut self) -> Result, ()> { + fn expect_tt(&mut self) -> Result, ()> { if let Some(tt::TokenTree::Leaf(tt::Leaf::Punct(punct))) = self.peek_n(0) { if punct.char == '\'' { self.expect_lifetime() @@ -976,7 +952,7 @@ impl TtIter<'_, S> { } } - fn expect_lifetime(&mut self) -> Result, ()> { + fn expect_lifetime(&mut self) -> Result, ()> { let punct = self.expect_single_punct()?; if punct.char != '\'' { return Err(()); @@ -997,7 +973,7 @@ impl TtIter<'_, S> { .into()) } - fn eat_char(&mut self, c: char) -> Option> { + fn eat_char(&mut self, c: char) -> Option> { let mut fork = self.clone(); match fork.expect_char(c) { Ok(_) => { diff --git a/crates/mbe/src/expander/transcriber.rs b/crates/mbe/src/expander/transcriber.rs index 6d3055da28608..5e6e45f1521b8 100644 --- a/crates/mbe/src/expander/transcriber.rs +++ b/crates/mbe/src/expander/transcriber.rs @@ -1,8 +1,9 @@ //! Transcriber takes a template, like `fn $ident() {}`, a set of bindings like //! `$ident => foo`, interpolates variables in the template, to get `fn foo() {}` +use span::Span; use syntax::SmolStr; -use tt::{Delimiter, Span}; +use tt::Delimiter; use crate::{ expander::{Binding, Bindings, Fragment}, @@ -10,8 +11,8 @@ use crate::{ CountError, ExpandError, ExpandResult, MetaTemplate, }; -impl Bindings { - fn get(&self, name: &str) -> Result<&Binding, ExpandError> { +impl Bindings { + fn get(&self, name: &str) -> Result<&Binding, ExpandError> { match self.inner.get(name) { Some(binding) => Ok(binding), None => Err(ExpandError::UnresolvedBinding(Box::new(Box::from(name)))), @@ -21,10 +22,10 @@ impl Bindings { fn get_fragment( &self, name: &str, - mut span: S, + mut span: Span, nesting: &mut [NestingState], - marker: impl Fn(&mut S), - ) -> Result, ExpandError> { + marker: impl Fn(&mut Span), + ) -> Result { macro_rules! binding_err { ($($arg:tt)*) => { ExpandError::binding_error(format!($($arg)*)) }; } @@ -134,15 +135,15 @@ impl Bindings { } } -pub(super) fn transcribe( - template: &MetaTemplate, - bindings: &Bindings, - marker: impl Fn(&mut S) + Copy, +pub(super) fn transcribe( + template: &MetaTemplate, + bindings: &Bindings, + marker: impl Fn(&mut Span) + Copy, new_meta_vars: bool, - call_site: S, -) -> ExpandResult> { + call_site: Span, +) -> ExpandResult> { let mut ctx = ExpandCtx { bindings, nesting: Vec::new(), new_meta_vars, call_site }; - let mut arena: Vec> = Vec::new(); + let mut arena: Vec> = Vec::new(); expand_subtree(&mut ctx, template, None, &mut arena, marker) } @@ -158,20 +159,20 @@ struct NestingState { } #[derive(Debug)] -struct ExpandCtx<'a, S> { - bindings: &'a Bindings, +struct ExpandCtx<'a> { + bindings: &'a Bindings, nesting: Vec, new_meta_vars: bool, - call_site: S, + call_site: Span, } -fn expand_subtree( - ctx: &mut ExpandCtx<'_, S>, - template: &MetaTemplate, - delimiter: Option>, - arena: &mut Vec>, - marker: impl Fn(&mut S) + Copy, -) -> ExpandResult> { +fn expand_subtree( + ctx: &mut ExpandCtx<'_>, + template: &MetaTemplate, + delimiter: Option>, + arena: &mut Vec>, + marker: impl Fn(&mut Span) + Copy, +) -> ExpandResult> { // remember how many elements are in the arena now - when returning, we want to drain exactly how many elements we added. This way, the recursive uses of the arena get their own "view" of the arena, but will reuse the allocation let start_elements = arena.len(); let mut err = None; @@ -332,12 +333,12 @@ fn expand_subtree( } } -fn expand_var( - ctx: &mut ExpandCtx<'_, S>, +fn expand_var( + ctx: &mut ExpandCtx<'_>, v: &SmolStr, - id: S, - marker: impl Fn(&mut S), -) -> ExpandResult> { + id: Span, + marker: impl Fn(&mut Span), +) -> ExpandResult { // We already handle $crate case in mbe parser debug_assert!(v != "crate"); @@ -378,15 +379,15 @@ fn expand_var( } } -fn expand_repeat( - ctx: &mut ExpandCtx<'_, S>, - template: &MetaTemplate, +fn expand_repeat( + ctx: &mut ExpandCtx<'_>, + template: &MetaTemplate, kind: RepeatKind, - separator: &Option>, - arena: &mut Vec>, - marker: impl Fn(&mut S) + Copy, -) -> ExpandResult> { - let mut buf: Vec> = Vec::new(); + separator: &Option, + arena: &mut Vec>, + marker: impl Fn(&mut Span) + Copy, +) -> ExpandResult { + let mut buf: Vec> = Vec::new(); ctx.nesting.push(NestingState { idx: 0, at_end: false, hit: false }); // Dirty hack to make macro-expansion terminate. // This should be replaced by a proper macro-by-example implementation @@ -478,11 +479,7 @@ fn expand_repeat( ExpandResult { value: Fragment::Tokens(tt), err } } -fn push_fragment( - ctx: &ExpandCtx<'_, S>, - buf: &mut Vec>, - fragment: Fragment, -) { +fn push_fragment(ctx: &ExpandCtx<'_>, buf: &mut Vec>, fragment: Fragment) { match fragment { Fragment::Tokens(tt::TokenTree::Subtree(tt)) => push_subtree(buf, tt), Fragment::Expr(sub) => { @@ -494,7 +491,7 @@ fn push_fragment( } } -fn push_subtree(buf: &mut Vec>, tt: tt::Subtree) { +fn push_subtree(buf: &mut Vec>, tt: tt::Subtree) { match tt.delimiter.kind { tt::DelimiterKind::Invisible => buf.extend(Vec::from(tt.token_trees)), _ => buf.push(tt.into()), @@ -504,10 +501,10 @@ fn push_subtree(buf: &mut Vec>, tt: tt::Subtree) { /// Inserts the path separator `::` between an identifier and its following generic /// argument list, and then pushes into the buffer. See [`Fragment::Path`] for why /// we need this fixup. -fn fix_up_and_push_path_tt( - ctx: &ExpandCtx<'_, S>, - buf: &mut Vec>, - subtree: tt::Subtree, +fn fix_up_and_push_path_tt( + ctx: &ExpandCtx<'_>, + buf: &mut Vec>, + subtree: tt::Subtree, ) { stdx::always!(matches!(subtree.delimiter.kind, tt::DelimiterKind::Invisible)); let mut prev_was_ident = false; @@ -546,11 +543,7 @@ fn fix_up_and_push_path_tt( /// Handles `${count(t, depth)}`. `our_depth` is the recursion depth and `count_depth` is the depth /// defined by the metavar expression. -fn count( - binding: &Binding, - depth_curr: usize, - depth_max: usize, -) -> Result { +fn count(binding: &Binding, depth_curr: usize, depth_max: usize) -> Result { match binding { Binding::Nested(bs) => { if depth_curr == depth_max { @@ -564,8 +557,8 @@ fn count( } } -fn count_old( - binding: &Binding, +fn count_old( + binding: &Binding, our_depth: usize, count_depth: Option, ) -> Result { diff --git a/crates/mbe/src/lib.rs b/crates/mbe/src/lib.rs index 62fdce36892f2..3a853512660eb 100644 --- a/crates/mbe/src/lib.rs +++ b/crates/mbe/src/lib.rs @@ -17,8 +17,8 @@ mod tt_iter; #[cfg(test)] mod benchmark; +use span::{Edition, Span, SyntaxContextId}; use stdx::impl_from; -use tt::Span; use std::fmt; @@ -127,32 +127,29 @@ impl fmt::Display for CountError { /// `tt::TokenTree`, but there's a crucial difference: in macro rules, `$ident` /// and `$()*` have special meaning (see `Var` and `Repeat` data structures) #[derive(Clone, Debug, PartialEq, Eq)] -pub struct DeclarativeMacro { - rules: Box<[Rule]>, - // This is used for correctly determining the behavior of the pat fragment - // FIXME: This should be tracked by hygiene of the fragment identifier! - is_2021: bool, +pub struct DeclarativeMacro { + rules: Box<[Rule]>, err: Option>, } #[derive(Clone, Debug, PartialEq, Eq)] -struct Rule { - lhs: MetaTemplate, - rhs: MetaTemplate, +struct Rule { + lhs: MetaTemplate, + rhs: MetaTemplate, } -impl DeclarativeMacro { - pub fn from_err(err: ParseError, is_2021: bool) -> DeclarativeMacro { - DeclarativeMacro { rules: Box::default(), is_2021, err: Some(Box::new(err)) } +impl DeclarativeMacro { + pub fn from_err(err: ParseError) -> DeclarativeMacro { + DeclarativeMacro { rules: Box::default(), err: Some(Box::new(err)) } } /// The old, `macro_rules! m {}` flavor. pub fn parse_macro_rules( - tt: &tt::Subtree, - is_2021: bool, + tt: &tt::Subtree, + edition: impl Copy + Fn(SyntaxContextId) -> Edition, // FIXME: Remove this once we drop support for rust 1.76 (defaults to true then) new_meta_vars: bool, - ) -> DeclarativeMacro { + ) -> DeclarativeMacro { // Note: this parsing can be implemented using mbe machinery itself, by // matching against `$($lhs:tt => $rhs:tt);*` pattern, but implementing // manually seems easier. @@ -161,7 +158,7 @@ impl DeclarativeMacro { let mut err = None; while src.len() > 0 { - let rule = match Rule::parse(&mut src, true, new_meta_vars) { + let rule = match Rule::parse(edition, &mut src, true, new_meta_vars) { Ok(it) => it, Err(e) => { err = Some(Box::new(e)); @@ -184,16 +181,16 @@ impl DeclarativeMacro { } } - DeclarativeMacro { rules: rules.into_boxed_slice(), is_2021, err } + DeclarativeMacro { rules: rules.into_boxed_slice(), err } } /// The new, unstable `macro m {}` flavor. pub fn parse_macro2( - tt: &tt::Subtree, - is_2021: bool, + tt: &tt::Subtree, + edition: impl Copy + Fn(SyntaxContextId) -> Edition, // FIXME: Remove this once we drop support for rust 1.76 (defaults to true then) new_meta_vars: bool, - ) -> DeclarativeMacro { + ) -> DeclarativeMacro { let mut src = TtIter::new(tt); let mut rules = Vec::new(); let mut err = None; @@ -201,7 +198,7 @@ impl DeclarativeMacro { if tt::DelimiterKind::Brace == tt.delimiter.kind { cov_mark::hit!(parse_macro_def_rules); while src.len() > 0 { - let rule = match Rule::parse(&mut src, true, new_meta_vars) { + let rule = match Rule::parse(edition, &mut src, true, new_meta_vars) { Ok(it) => it, Err(e) => { err = Some(Box::new(e)); @@ -220,7 +217,7 @@ impl DeclarativeMacro { } } else { cov_mark::hit!(parse_macro_def_simple); - match Rule::parse(&mut src, false, new_meta_vars) { + match Rule::parse(edition, &mut src, false, new_meta_vars) { Ok(rule) => { if src.len() != 0 { err = Some(Box::new(ParseError::expected("remaining tokens in macro def"))); @@ -240,7 +237,7 @@ impl DeclarativeMacro { } } - DeclarativeMacro { rules: rules.into_boxed_slice(), is_2021, err } + DeclarativeMacro { rules: rules.into_boxed_slice(), err } } pub fn err(&self) -> Option<&ParseError> { @@ -249,18 +246,19 @@ impl DeclarativeMacro { pub fn expand( &self, - tt: &tt::Subtree, - marker: impl Fn(&mut S) + Copy, + tt: &tt::Subtree, + marker: impl Fn(&mut Span) + Copy, new_meta_vars: bool, - call_site: S, - ) -> ExpandResult> { - expander::expand_rules(&self.rules, tt, marker, self.is_2021, new_meta_vars, call_site) + call_site: Span, + ) -> ExpandResult> { + expander::expand_rules(&self.rules, tt, marker, new_meta_vars, call_site) } } -impl Rule { +impl Rule { fn parse( - src: &mut TtIter<'_, S>, + edition: impl Copy + Fn(SyntaxContextId) -> Edition, + src: &mut TtIter<'_, Span>, expect_arrow: bool, new_meta_vars: bool, ) -> Result { @@ -271,14 +269,14 @@ impl Rule { } let rhs = src.expect_subtree().map_err(|()| ParseError::expected("expected subtree"))?; - let lhs = MetaTemplate::parse_pattern(lhs)?; - let rhs = MetaTemplate::parse_template(rhs, new_meta_vars)?; + let lhs = MetaTemplate::parse_pattern(edition, lhs)?; + let rhs = MetaTemplate::parse_template(edition, rhs, new_meta_vars)?; Ok(crate::Rule { lhs, rhs }) } } -fn validate(pattern: &MetaTemplate) -> Result<(), ParseError> { +fn validate(pattern: &MetaTemplate) -> Result<(), ParseError> { for op in pattern.iter() { match op { Op::Subtree { tokens, .. } => validate(tokens)?, diff --git a/crates/mbe/src/parser.rs b/crates/mbe/src/parser.rs index afdbbef231476..eaf2fd8c273f8 100644 --- a/crates/mbe/src/parser.rs +++ b/crates/mbe/src/parser.rs @@ -2,8 +2,8 @@ //! trees. use smallvec::{smallvec, SmallVec}; +use span::{Edition, Span, SyntaxContextId}; use syntax::SmolStr; -use tt::Span; use crate::{tt_iter::TtIter, ParseError}; @@ -21,30 +21,39 @@ use crate::{tt_iter::TtIter, ParseError}; /// Stuff to the right is a [`MetaTemplate`] template which is used to produce /// output. #[derive(Clone, Debug, PartialEq, Eq)] -pub(crate) struct MetaTemplate(pub(crate) Box<[Op]>); +pub(crate) struct MetaTemplate(pub(crate) Box<[Op]>); -impl MetaTemplate { - pub(crate) fn parse_pattern(pattern: &tt::Subtree) -> Result { - MetaTemplate::parse(pattern, Mode::Pattern, false) +impl MetaTemplate { + pub(crate) fn parse_pattern( + edition: impl Copy + Fn(SyntaxContextId) -> Edition, + pattern: &tt::Subtree, + ) -> Result { + MetaTemplate::parse(edition, pattern, Mode::Pattern, false) } pub(crate) fn parse_template( - template: &tt::Subtree, + edition: impl Copy + Fn(SyntaxContextId) -> Edition, + template: &tt::Subtree, new_meta_vars: bool, ) -> Result { - MetaTemplate::parse(template, Mode::Template, new_meta_vars) + MetaTemplate::parse(edition, template, Mode::Template, new_meta_vars) } - pub(crate) fn iter(&self) -> impl Iterator> { + pub(crate) fn iter(&self) -> impl Iterator { self.0.iter() } - fn parse(tt: &tt::Subtree, mode: Mode, new_meta_vars: bool) -> Result { + fn parse( + edition: impl Copy + Fn(SyntaxContextId) -> Edition, + tt: &tt::Subtree, + mode: Mode, + new_meta_vars: bool, + ) -> Result { let mut src = TtIter::new(tt); let mut res = Vec::new(); while let Some(first) = src.peek_n(0) { - let op = next_op(first, &mut src, mode, new_meta_vars)?; + let op = next_op(edition, first, &mut src, mode, new_meta_vars)?; res.push(op); } @@ -53,15 +62,15 @@ impl MetaTemplate { } #[derive(Clone, Debug, PartialEq, Eq)] -pub(crate) enum Op { +pub(crate) enum Op { Var { name: SmolStr, kind: Option, - id: S, + id: Span, }, Ignore { name: SmolStr, - id: S, + id: Span, }, Index { depth: usize, @@ -75,17 +84,17 @@ pub(crate) enum Op { depth: Option, }, Repeat { - tokens: MetaTemplate, + tokens: MetaTemplate, kind: RepeatKind, - separator: Option>, + separator: Option, }, Subtree { - tokens: MetaTemplate, - delimiter: tt::Delimiter, + tokens: MetaTemplate, + delimiter: tt::Delimiter, }, - Literal(tt::Literal), - Punct(SmallVec<[tt::Punct; 3]>), - Ident(tt::Ident), + Literal(tt::Literal), + Punct(SmallVec<[tt::Punct; 3]>), + Ident(tt::Ident), } #[derive(Copy, Clone, Debug, PartialEq, Eq)] @@ -114,15 +123,15 @@ pub(crate) enum MetaVarKind { } #[derive(Clone, Debug, Eq)] -pub(crate) enum Separator { - Literal(tt::Literal), - Ident(tt::Ident), - Puncts(SmallVec<[tt::Punct; 3]>), +pub(crate) enum Separator { + Literal(tt::Literal), + Ident(tt::Ident), + Puncts(SmallVec<[tt::Punct; 3]>), } // Note that when we compare a Separator, we just care about its textual value. -impl PartialEq for Separator { - fn eq(&self, other: &Separator) -> bool { +impl PartialEq for Separator { + fn eq(&self, other: &Separator) -> bool { use Separator::*; match (self, other) { @@ -144,12 +153,13 @@ enum Mode { Template, } -fn next_op( - first_peeked: &tt::TokenTree, - src: &mut TtIter<'_, S>, +fn next_op( + edition: impl Copy + Fn(SyntaxContextId) -> Edition, + first_peeked: &tt::TokenTree, + src: &mut TtIter<'_, Span>, mode: Mode, new_meta_vars: bool, -) -> Result, ParseError> { +) -> Result { let res = match first_peeked { tt::TokenTree::Leaf(tt::Leaf::Punct(p @ tt::Punct { char: '$', .. })) => { src.next().expect("first token already peeked"); @@ -162,7 +172,7 @@ fn next_op( tt::TokenTree::Subtree(subtree) => match subtree.delimiter.kind { tt::DelimiterKind::Parenthesis => { let (separator, kind) = parse_repeat(src)?; - let tokens = MetaTemplate::parse(subtree, mode, new_meta_vars)?; + let tokens = MetaTemplate::parse(edition, subtree, mode, new_meta_vars)?; Op::Repeat { tokens, separator, kind } } tt::DelimiterKind::Brace => match mode { @@ -189,13 +199,13 @@ fn next_op( Op::Ident(tt::Ident { text: "$crate".into(), span: ident.span }) } tt::Leaf::Ident(ident) => { - let kind = eat_fragment_kind(src, mode)?; + let kind = eat_fragment_kind(edition, src, mode)?; let name = ident.text.clone(); let id = ident.span; Op::Var { name, kind, id } } tt::Leaf::Literal(lit) if is_boolean_literal(lit) => { - let kind = eat_fragment_kind(src, mode)?; + let kind = eat_fragment_kind(edition, src, mode)?; let name = lit.text.clone(); let id = lit.span; Op::Var { name, kind, id } @@ -233,15 +243,16 @@ fn next_op( tt::TokenTree::Subtree(subtree) => { src.next().expect("first token already peeked"); - let tokens = MetaTemplate::parse(subtree, mode, new_meta_vars)?; + let tokens = MetaTemplate::parse(edition, subtree, mode, new_meta_vars)?; Op::Subtree { tokens, delimiter: subtree.delimiter } } }; Ok(res) } -fn eat_fragment_kind( - src: &mut TtIter<'_, S>, +fn eat_fragment_kind( + edition: impl Copy + Fn(SyntaxContextId) -> Edition, + src: &mut TtIter<'_, Span>, mode: Mode, ) -> Result, ParseError> { if let Mode::Pattern = mode { @@ -252,7 +263,10 @@ fn eat_fragment_kind( let kind = match ident.text.as_str() { "path" => MetaVarKind::Path, "ty" => MetaVarKind::Ty, - "pat" => MetaVarKind::Pat, + "pat" => match edition(ident.span.ctx) { + Edition::Edition2015 | Edition::Edition2018 => MetaVarKind::PatParam, + Edition::Edition2021 | Edition::Edition2024 => MetaVarKind::Pat, + }, "pat_param" => MetaVarKind::PatParam, "stmt" => MetaVarKind::Stmt, "block" => MetaVarKind::Block, @@ -271,13 +285,11 @@ fn eat_fragment_kind( Ok(None) } -fn is_boolean_literal(lit: &tt::Literal) -> bool { +fn is_boolean_literal(lit: &tt::Literal) -> bool { matches!(lit.text.as_str(), "true" | "false") } -fn parse_repeat( - src: &mut TtIter<'_, S>, -) -> Result<(Option>, RepeatKind), ParseError> { +fn parse_repeat(src: &mut TtIter<'_, Span>) -> Result<(Option, RepeatKind), ParseError> { let mut separator = Separator::Puncts(SmallVec::new()); for tt in src { let tt = match tt { @@ -314,7 +326,7 @@ fn parse_repeat( Err(ParseError::InvalidRepeat) } -fn parse_metavar_expr(new_meta_vars: bool, src: &mut TtIter<'_, S>) -> Result, ()> { +fn parse_metavar_expr(new_meta_vars: bool, src: &mut TtIter<'_, Span>) -> Result { let func = src.expect_ident()?; let args = src.expect_subtree()?; @@ -352,7 +364,7 @@ fn parse_metavar_expr(new_meta_vars: bool, src: &mut TtIter<'_, S>) -> Ok(op) } -fn parse_depth(src: &mut TtIter<'_, S>) -> Result { +fn parse_depth(src: &mut TtIter<'_, Span>) -> Result { if src.len() == 0 { Ok(0) } else if let tt::Leaf::Literal(lit) = src.expect_literal()? { @@ -363,7 +375,7 @@ fn parse_depth(src: &mut TtIter<'_, S>) -> Result { } } -fn try_eat_comma(src: &mut TtIter<'_, S>) -> bool { +fn try_eat_comma(src: &mut TtIter<'_, Span>) -> bool { if let Some(tt::TokenTree::Leaf(tt::Leaf::Punct(tt::Punct { char: ',', .. }))) = src.peek_n(0) { let _ = src.next(); return true; diff --git a/crates/mbe/src/syntax_bridge.rs b/crates/mbe/src/syntax_bridge.rs index 57d6082dd7051..c934db6b715a8 100644 --- a/crates/mbe/src/syntax_bridge.rs +++ b/crates/mbe/src/syntax_bridge.rs @@ -1,5 +1,7 @@ //! Conversions between [`SyntaxNode`] and [`tt::TokenTree`]. +use std::fmt; + use rustc_hash::{FxHashMap, FxHashSet}; use span::{SpanAnchor, SpanData, SpanMap}; use stdx::{never, non_empty_vec::NonEmptyVec}; @@ -9,30 +11,27 @@ use syntax::{ SyntaxKind::*, SyntaxNode, SyntaxToken, SyntaxTreeBuilder, TextRange, TextSize, WalkEvent, T, }; -use tt::{ - buffer::{Cursor, TokenBuffer}, - Span, -}; +use tt::buffer::{Cursor, TokenBuffer}; use crate::{to_parser_input::to_parser_input, tt_iter::TtIter}; #[cfg(test)] mod tests; -pub trait SpanMapper { +pub trait SpanMapper { fn span_for(&self, range: TextRange) -> S; } impl SpanMapper> for SpanMap where - SpanData: Span, + SpanData: Copy, { fn span_for(&self, range: TextRange) -> SpanData { self.span_at(range.start()) } } -impl> SpanMapper for &SM { +impl> SpanMapper for &SM { fn span_for(&self, range: TextRange) -> S { SM::span_for(self, range) } @@ -78,8 +77,7 @@ pub fn syntax_node_to_token_tree( span: SpanData, ) -> tt::Subtree> where - SpanData: Span, - Ctx: Copy, + SpanData: Copy + fmt::Debug, SpanMap: SpanMapper>, { let mut c = Converter::new(node, map, Default::default(), Default::default(), span); @@ -98,8 +96,7 @@ pub fn syntax_node_to_token_tree_modified( ) -> tt::Subtree> where SpanMap: SpanMapper>, - SpanData: Span, - Ctx: Copy, + SpanData: Copy + fmt::Debug, { let mut c = Converter::new(node, map, append, remove, call_site); convert_tokens(&mut c) @@ -124,8 +121,7 @@ pub fn token_tree_to_syntax_node( entry_point: parser::TopEntryPoint, ) -> (Parse, SpanMap) where - SpanData: Span, - Ctx: Copy, + SpanData: Copy + fmt::Debug, { let buffer = match tt { tt::Subtree { @@ -161,7 +157,7 @@ pub fn parse_to_token_tree( text: &str, ) -> Option>> where - SpanData: Span, + SpanData: Copy + fmt::Debug, Ctx: Copy, { let lexed = parser::LexedStr::new(text); @@ -175,7 +171,7 @@ where /// Convert a string to a `TokenTree`. The passed span will be used for all spans of the produced subtree. pub fn parse_to_token_tree_static_span(span: S, text: &str) -> Option> where - S: Span, + S: Copy + fmt::Debug, { let lexed = parser::LexedStr::new(text); if lexed.errors().next().is_some() { @@ -186,11 +182,10 @@ where } /// Split token tree with separate expr: $($e:expr)SEP* -pub fn parse_exprs_with_sep( - tt: &tt::Subtree, - sep: char, - span: S, -) -> Vec> { +pub fn parse_exprs_with_sep(tt: &tt::Subtree, sep: char, span: S) -> Vec> +where + S: Copy + fmt::Debug, +{ if tt.token_trees.is_empty() { return Vec::new(); } @@ -226,7 +221,8 @@ pub fn parse_exprs_with_sep( fn convert_tokens(conv: &mut C) -> tt::Subtree where C: TokenConverter, - S: Span, + S: Copy + fmt::Debug, + C::Token: fmt::Debug, { let entry = tt::SubtreeBuilder { delimiter: tt::Delimiter::invisible_spanned(conv.call_site()), @@ -485,7 +481,7 @@ struct StaticRawConverter<'a, S> { span: S, } -trait SrcToken: std::fmt::Debug { +trait SrcToken { fn kind(&self, ctx: &Ctx) -> SyntaxKind; fn to_char(&self, ctx: &Ctx) -> Option; @@ -525,7 +521,7 @@ impl SrcToken, S> for usize { } } -impl SrcToken, S> for usize { +impl SrcToken, S> for usize { fn kind(&self, ctx: &StaticRawConverter<'_, S>) -> SyntaxKind { ctx.lexed.kind(*self) } @@ -541,7 +537,7 @@ impl SrcToken, S> for usize { impl TokenConverter> for RawConverter<'_, Ctx> where - SpanData: Span, + SpanData: Copy, { type Token = usize; @@ -584,7 +580,7 @@ where impl TokenConverter for StaticRawConverter<'_, S> where - S: Span, + S: Copy, { type Token = usize; @@ -709,7 +705,7 @@ impl SynToken { } } -impl SrcToken, S> for SynToken { +impl SrcToken, S> for SynToken { fn kind(&self, _ctx: &Converter) -> SyntaxKind { match self { SynToken::Ordinary(token) => token.kind(), @@ -748,7 +744,7 @@ impl SrcToken, S> for SynToke impl TokenConverter for Converter where - S: Span, + S: Copy, SpanMap: SpanMapper, { type Token = SynToken; @@ -828,7 +824,7 @@ where struct TtTreeSink<'a, Ctx> where - SpanData: Span, + SpanData: Copy, { buf: String, cursor: Cursor<'a, SpanData>, @@ -839,7 +835,7 @@ where impl<'a, Ctx> TtTreeSink<'a, Ctx> where - SpanData: Span, + SpanData: Copy, { fn new(cursor: Cursor<'a, SpanData>) -> Self { TtTreeSink { @@ -871,7 +867,7 @@ fn delim_to_str(d: tt::DelimiterKind, closing: bool) -> Option<&'static str> { impl TtTreeSink<'_, Ctx> where - SpanData: Span, + SpanData: Copy, { /// Parses a float literal as if it was a one to two name ref nodes with a dot inbetween. /// This occurs when a float literal is used as a field access. diff --git a/crates/mbe/src/to_parser_input.rs b/crates/mbe/src/to_parser_input.rs index 00a14f04686e2..3f70149aa5eae 100644 --- a/crates/mbe/src/to_parser_input.rs +++ b/crates/mbe/src/to_parser_input.rs @@ -1,11 +1,13 @@ //! Convert macro-by-example tokens which are specific to macro expansion into a //! format that works for our parser. +use std::fmt; + use syntax::{SyntaxKind, SyntaxKind::*, T}; -use tt::{buffer::TokenBuffer, Span}; +use tt::buffer::TokenBuffer; -pub(crate) fn to_parser_input(buffer: &TokenBuffer<'_, S>) -> parser::Input { +pub(crate) fn to_parser_input(buffer: &TokenBuffer<'_, S>) -> parser::Input { let mut res = parser::Input::default(); let mut current = buffer.begin(); diff --git a/crates/mbe/src/tt_iter.rs b/crates/mbe/src/tt_iter.rs index f9913cb6f9b8e..e3d12d87078b3 100644 --- a/crates/mbe/src/tt_iter.rs +++ b/crates/mbe/src/tt_iter.rs @@ -1,9 +1,10 @@ //! A "Parser" structure for token trees. We use this when parsing a declarative //! macro definition into a list of patterns and templates. +use core::fmt; + use smallvec::{smallvec, SmallVec}; use syntax::SyntaxKind; -use tt::Span; use crate::{to_parser_input::to_parser_input, ExpandError, ExpandResult}; @@ -12,7 +13,7 @@ pub(crate) struct TtIter<'a, S> { pub(crate) inner: std::slice::Iter<'a, tt::TokenTree>, } -impl<'a, S: Span> TtIter<'a, S> { +impl<'a, S: Copy> TtIter<'a, S> { pub(crate) fn new(subtree: &'a tt::Subtree) -> TtIter<'a, S> { TtIter { inner: subtree.token_trees.iter() } } @@ -130,7 +131,12 @@ impl<'a, S: Span> TtIter<'a, S> { _ => Ok(smallvec![first]), } } + pub(crate) fn peek_n(&self, n: usize) -> Option<&'a tt::TokenTree> { + self.inner.as_slice().get(n) + } +} +impl<'a, S: Copy + fmt::Debug> TtIter<'a, S> { pub(crate) fn expect_fragment( &mut self, entry_point: parser::PrefixEntryPoint, @@ -185,10 +191,6 @@ impl<'a, S: Span> TtIter<'a, S> { }; ExpandResult { value: res, err } } - - pub(crate) fn peek_n(&self, n: usize) -> Option<&'a tt::TokenTree> { - self.inner.as_slice().get(n) - } } impl<'a, S> Iterator for TtIter<'a, S> { diff --git a/crates/parser/src/grammar/expressions/atom.rs b/crates/parser/src/grammar/expressions/atom.rs index 72848a1f2b79f..54ed5f0ba2368 100644 --- a/crates/parser/src/grammar/expressions/atom.rs +++ b/crates/parser/src/grammar/expressions/atom.rs @@ -490,6 +490,18 @@ fn match_expr(p: &mut Parser<'_>) -> CompletedMarker { m.complete(p, MATCH_EXPR) } +// test_err match_arms_recovery +// fn foo() { +// match () { +// _ => (),, +// _ => , +// _ => (), +// => (), +// if true => (), +// _ => (), +// () if => (), +// } +// } pub(crate) fn match_arm_list(p: &mut Parser<'_>) { assert!(p.at(T!['{'])); let m = p.start(); @@ -511,6 +523,10 @@ pub(crate) fn match_arm_list(p: &mut Parser<'_>) { error_block(p, "expected match arm"); continue; } + if p.at(T![,]) { + p.err_and_bump("expected pattern"); + continue; + } match_arm(p); } p.expect(T!['}']); @@ -544,26 +560,30 @@ fn match_arm(p: &mut Parser<'_>) { // } attributes::outer_attrs(p); - patterns::pattern_top_r(p, TokenSet::EMPTY); + patterns::pattern_top_r(p, TokenSet::new(&[T![=], T![if]])); if p.at(T![if]) { match_guard(p); } p.expect(T![=>]); - let blocklike = match expr_stmt(p, None) { - Some((_, blocklike)) => blocklike, - None => BlockLike::NotBlock, - }; - - // test match_arms_commas - // fn foo() { - // match () { - // _ => (), - // _ => {} - // _ => () - // } - // } - if !p.eat(T![,]) && !blocklike.is_block() && !p.at(T!['}']) { - p.error("expected `,`"); + if p.eat(T![,]) { + p.error("expected expression"); + } else { + let blocklike = match expr_stmt(p, None) { + Some((_, blocklike)) => blocklike, + None => BlockLike::NotBlock, + }; + + // test match_arms_commas + // fn foo() { + // match () { + // _ => (), + // _ => {} + // _ => () + // } + // } + if !p.eat(T![,]) && !blocklike.is_block() && !p.at(T!['}']) { + p.error("expected `,`"); + } } m.complete(p, MATCH_ARM); } @@ -579,7 +599,11 @@ fn match_guard(p: &mut Parser<'_>) -> CompletedMarker { assert!(p.at(T![if])); let m = p.start(); p.bump(T![if]); - expr(p); + if p.at(T![=]) { + p.error("expected expression"); + } else { + expr(p); + } m.complete(p, MATCH_GUARD) } diff --git a/crates/parser/test_data/parser/inline/err/0034_match_arms_recovery.rast b/crates/parser/test_data/parser/inline/err/0034_match_arms_recovery.rast new file mode 100644 index 0000000000000..5b191945e4573 --- /dev/null +++ b/crates/parser/test_data/parser/inline/err/0034_match_arms_recovery.rast @@ -0,0 +1,113 @@ +SOURCE_FILE + FN + FN_KW "fn" + WHITESPACE " " + NAME + IDENT "foo" + PARAM_LIST + L_PAREN "(" + R_PAREN ")" + WHITESPACE " " + BLOCK_EXPR + STMT_LIST + L_CURLY "{" + WHITESPACE "\n " + MATCH_EXPR + MATCH_KW "match" + WHITESPACE " " + TUPLE_EXPR + L_PAREN "(" + R_PAREN ")" + WHITESPACE " " + MATCH_ARM_LIST + L_CURLY "{" + WHITESPACE "\n " + MATCH_ARM + WILDCARD_PAT + UNDERSCORE "_" + WHITESPACE " " + FAT_ARROW "=>" + WHITESPACE " " + TUPLE_EXPR + L_PAREN "(" + R_PAREN ")" + COMMA "," + ERROR + COMMA "," + WHITESPACE "\n " + MATCH_ARM + WILDCARD_PAT + UNDERSCORE "_" + WHITESPACE " " + FAT_ARROW "=>" + WHITESPACE " " + COMMA "," + WHITESPACE "\n " + MATCH_ARM + WILDCARD_PAT + UNDERSCORE "_" + WHITESPACE " " + FAT_ARROW "=>" + WHITESPACE " " + TUPLE_EXPR + L_PAREN "(" + R_PAREN ")" + COMMA "," + WHITESPACE "\n " + MATCH_ARM + FAT_ARROW "=>" + WHITESPACE " " + TUPLE_EXPR + L_PAREN "(" + R_PAREN ")" + COMMA "," + WHITESPACE "\n " + MATCH_ARM + MATCH_GUARD + IF_KW "if" + WHITESPACE " " + LITERAL + TRUE_KW "true" + WHITESPACE " " + FAT_ARROW "=>" + WHITESPACE " " + TUPLE_EXPR + L_PAREN "(" + R_PAREN ")" + COMMA "," + WHITESPACE "\n " + MATCH_ARM + WILDCARD_PAT + UNDERSCORE "_" + WHITESPACE " " + FAT_ARROW "=>" + WHITESPACE " " + TUPLE_EXPR + L_PAREN "(" + R_PAREN ")" + COMMA "," + WHITESPACE "\n " + MATCH_ARM + TUPLE_PAT + L_PAREN "(" + R_PAREN ")" + WHITESPACE " " + MATCH_GUARD + IF_KW "if" + WHITESPACE " " + FAT_ARROW "=>" + WHITESPACE " " + TUPLE_EXPR + L_PAREN "(" + R_PAREN ")" + COMMA "," + WHITESPACE "\n " + R_CURLY "}" + WHITESPACE "\n" + R_CURLY "}" + WHITESPACE "\n" +error 42: expected pattern +error 58: expected expression +error 85: expected pattern +error 100: expected pattern +error 145: expected expression diff --git a/crates/parser/test_data/parser/inline/err/0034_match_arms_recovery.rs b/crates/parser/test_data/parser/inline/err/0034_match_arms_recovery.rs new file mode 100644 index 0000000000000..173103b2e3797 --- /dev/null +++ b/crates/parser/test_data/parser/inline/err/0034_match_arms_recovery.rs @@ -0,0 +1,11 @@ +fn foo() { + match () { + _ => (),, + _ => , + _ => (), + => (), + if true => (), + _ => (), + () if => (), + } +} diff --git a/crates/paths/Cargo.toml b/crates/paths/Cargo.toml index 3d8752b5a829d..59a4ad9a25587 100644 --- a/crates/paths/Cargo.toml +++ b/crates/paths/Cargo.toml @@ -12,10 +12,14 @@ rust-version.workspace = true doctest = false [dependencies] +camino.workspace = true # Adding this dep sadly puts a lot of rust-analyzer crates after the # serde-derive crate. Even though we don't activate the derive feature here, # someone else in the crate graph certainly does! # serde.workspace = true +[features] +serde1 = ["camino/serde1"] + [lints] -workspace = true \ No newline at end of file +workspace = true diff --git a/crates/paths/src/lib.rs b/crates/paths/src/lib.rs index a63d251c20d45..2d3653401d2f5 100644 --- a/crates/paths/src/lib.rs +++ b/crates/paths/src/lib.rs @@ -1,4 +1,4 @@ -//! Thin wrappers around `std::path`, distinguishing between absolute and +//! Thin wrappers around `std::path`/`camino::path`, distinguishing between absolute and //! relative paths. #![warn(rust_2018_idioms, unused_lifetimes)] @@ -7,16 +7,24 @@ use std::{ borrow::Borrow, ffi::OsStr, fmt, ops, - path::{Component, Path, PathBuf}, + path::{Path, PathBuf}, }; -/// Wrapper around an absolute [`PathBuf`]. -#[derive(Debug, Clone, Ord, PartialOrd, Eq, PartialEq, Hash)] -pub struct AbsPathBuf(PathBuf); +pub use camino::*; + +/// Wrapper around an absolute [`Utf8PathBuf`]. +#[derive(Debug, Clone, Ord, PartialOrd, Eq, Hash)] +pub struct AbsPathBuf(Utf8PathBuf); + +impl From for Utf8PathBuf { + fn from(AbsPathBuf(path_buf): AbsPathBuf) -> Utf8PathBuf { + path_buf + } +} impl From for PathBuf { fn from(AbsPathBuf(path_buf): AbsPathBuf) -> PathBuf { - path_buf + path_buf.into() } } @@ -27,9 +35,21 @@ impl ops::Deref for AbsPathBuf { } } +impl AsRef for AbsPathBuf { + fn as_ref(&self) -> &Utf8Path { + self.0.as_path() + } +} + +impl AsRef for AbsPathBuf { + fn as_ref(&self) -> &OsStr { + self.0.as_ref() + } +} + impl AsRef for AbsPathBuf { fn as_ref(&self) -> &Path { - self.0.as_path() + self.0.as_ref() } } @@ -45,26 +65,36 @@ impl Borrow for AbsPathBuf { } } +impl TryFrom for AbsPathBuf { + type Error = Utf8PathBuf; + fn try_from(path_buf: Utf8PathBuf) -> Result { + if !path_buf.is_absolute() { + return Err(path_buf); + } + Ok(AbsPathBuf(path_buf)) + } +} + impl TryFrom for AbsPathBuf { type Error = PathBuf; fn try_from(path_buf: PathBuf) -> Result { if !path_buf.is_absolute() { return Err(path_buf); } - Ok(AbsPathBuf(path_buf)) + Ok(AbsPathBuf(Utf8PathBuf::from_path_buf(path_buf)?)) } } impl TryFrom<&str> for AbsPathBuf { - type Error = PathBuf; - fn try_from(path: &str) -> Result { - AbsPathBuf::try_from(PathBuf::from(path)) + type Error = Utf8PathBuf; + fn try_from(path: &str) -> Result { + AbsPathBuf::try_from(Utf8PathBuf::from(path)) } } -impl PartialEq for AbsPathBuf { - fn eq(&self, other: &AbsPath) -> bool { - self.as_path() == other +impl + ?Sized> PartialEq

for AbsPathBuf { + fn eq(&self, other: &P) -> bool { + self.0.as_std_path() == other.as_ref() } } @@ -74,19 +104,31 @@ impl AbsPathBuf { /// # Panics /// /// Panics if `path` is not absolute. - pub fn assert(path: PathBuf) -> AbsPathBuf { + pub fn assert(path: Utf8PathBuf) -> AbsPathBuf { AbsPathBuf::try_from(path) - .unwrap_or_else(|path| panic!("expected absolute path, got {}", path.display())) + .unwrap_or_else(|path| panic!("expected absolute path, got {}", path)) + } + + /// Wrap the given absolute path in `AbsPathBuf` + /// + /// # Panics + /// + /// Panics if `path` is not absolute. + pub fn assert_utf8(path: PathBuf) -> AbsPathBuf { + AbsPathBuf::assert( + Utf8PathBuf::from_path_buf(path) + .unwrap_or_else(|path| panic!("expected utf8 path, got {}", path.display())), + ) } /// Coerces to an `AbsPath` slice. /// - /// Equivalent of [`PathBuf::as_path`] for `AbsPathBuf`. + /// Equivalent of [`Utf8PathBuf::as_path`] for `AbsPathBuf`. pub fn as_path(&self) -> &AbsPath { AbsPath::assert(self.0.as_path()) } - /// Equivalent of [`PathBuf::pop`] for `AbsPathBuf`. + /// Equivalent of [`Utf8PathBuf::pop`] for `AbsPathBuf`. /// /// Note that this won't remove the root component, so `self` will still be /// absolute. @@ -97,18 +139,36 @@ impl AbsPathBuf { impl fmt::Display for AbsPathBuf { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - fmt::Display::fmt(&self.0.display(), f) + fmt::Display::fmt(&self.0, f) } } -/// Wrapper around an absolute [`Path`]. -#[derive(Debug, Ord, PartialOrd, Eq, PartialEq, Hash)] +/// Wrapper around an absolute [`Utf8Path`]. +#[derive(Debug, Ord, PartialOrd, Eq, Hash)] #[repr(transparent)] -pub struct AbsPath(Path); +pub struct AbsPath(Utf8Path); + +impl + ?Sized> PartialEq

for AbsPath { + fn eq(&self, other: &P) -> bool { + self.0.as_std_path() == other.as_ref() + } +} + +impl AsRef for AbsPath { + fn as_ref(&self) -> &Utf8Path { + &self.0 + } +} impl AsRef for AbsPath { fn as_ref(&self) -> &Path { - &self.0 + self.0.as_ref() + } +} + +impl AsRef for AbsPath { + fn as_ref(&self) -> &OsStr { + self.0.as_ref() } } @@ -120,9 +180,9 @@ impl ToOwned for AbsPath { } } -impl<'a> TryFrom<&'a Path> for &'a AbsPath { - type Error = &'a Path; - fn try_from(path: &'a Path) -> Result<&'a AbsPath, &'a Path> { +impl<'a> TryFrom<&'a Utf8Path> for &'a AbsPath { + type Error = &'a Utf8Path; + fn try_from(path: &'a Utf8Path) -> Result<&'a AbsPath, &'a Utf8Path> { if !path.is_absolute() { return Err(path); } @@ -136,24 +196,24 @@ impl AbsPath { /// # Panics /// /// Panics if `path` is not absolute. - pub fn assert(path: &Path) -> &AbsPath { + pub fn assert(path: &Utf8Path) -> &AbsPath { assert!(path.is_absolute()); - unsafe { &*(path as *const Path as *const AbsPath) } + unsafe { &*(path as *const Utf8Path as *const AbsPath) } } - /// Equivalent of [`Path::parent`] for `AbsPath`. + /// Equivalent of [`Utf8Path::parent`] for `AbsPath`. pub fn parent(&self) -> Option<&AbsPath> { self.0.parent().map(AbsPath::assert) } - /// Equivalent of [`Path::join`] for `AbsPath` with an additional normalize step afterwards. - pub fn absolutize(&self, path: impl AsRef) -> AbsPathBuf { + /// Equivalent of [`Utf8Path::join`] for `AbsPath` with an additional normalize step afterwards. + pub fn absolutize(&self, path: impl AsRef) -> AbsPathBuf { self.join(path).normalize() } - /// Equivalent of [`Path::join`] for `AbsPath`. - pub fn join(&self, path: impl AsRef) -> AbsPathBuf { - self.as_ref().join(path).try_into().unwrap() + /// Equivalent of [`Utf8Path::join`] for `AbsPath`. + pub fn join(&self, path: impl AsRef) -> AbsPathBuf { + Utf8Path::join(self.as_ref(), path).try_into().unwrap() } /// Normalize the given path: @@ -172,7 +232,7 @@ impl AbsPath { AbsPathBuf(normalize_path(&self.0)) } - /// Equivalent of [`Path::to_path_buf`] for `AbsPath`. + /// Equivalent of [`Utf8Path::to_path_buf`] for `AbsPath`. pub fn to_path_buf(&self) -> AbsPathBuf { AbsPathBuf::try_from(self.0.to_path_buf()).unwrap() } @@ -181,7 +241,7 @@ impl AbsPath { panic!("We explicitly do not provide canonicalization API, as that is almost always a wrong solution, see #14430") } - /// Equivalent of [`Path::strip_prefix`] for `AbsPath`. + /// Equivalent of [`Utf8Path::strip_prefix`] for `AbsPath`. /// /// Returns a relative path. pub fn strip_prefix(&self, base: &AbsPath) -> Option<&RelPath> { @@ -195,57 +255,61 @@ impl AbsPath { } pub fn name_and_extension(&self) -> Option<(&str, Option<&str>)> { - Some(( - self.file_stem()?.to_str()?, - self.extension().and_then(|extension| extension.to_str()), - )) + Some((self.file_stem()?, self.extension())) } // region:delegate-methods - // Note that we deliberately don't implement `Deref` here. + // Note that we deliberately don't implement `Deref` here. // - // The problem with `Path` is that it directly exposes convenience IO-ing - // methods. For example, `Path::exists` delegates to `fs::metadata`. + // The problem with `Utf8Path` is that it directly exposes convenience IO-ing + // methods. For example, `Utf8Path::exists` delegates to `fs::metadata`. // // For `AbsPath`, we want to make sure that this is a POD type, and that all // IO goes via `fs`. That way, it becomes easier to mock IO when we need it. - pub fn file_name(&self) -> Option<&OsStr> { + pub fn file_name(&self) -> Option<&str> { self.0.file_name() } - pub fn extension(&self) -> Option<&OsStr> { + pub fn extension(&self) -> Option<&str> { self.0.extension() } - pub fn file_stem(&self) -> Option<&OsStr> { + pub fn file_stem(&self) -> Option<&str> { self.0.file_stem() } pub fn as_os_str(&self) -> &OsStr { self.0.as_os_str() } + pub fn as_str(&self) -> &str { + self.0.as_str() + } #[deprecated(note = "use Display instead")] - pub fn display(&self) -> std::path::Display<'_> { - self.0.display() + pub fn display(&self) -> ! { + unimplemented!() } #[deprecated(note = "use std::fs::metadata().is_ok() instead")] - pub fn exists(&self) -> bool { - self.0.exists() + pub fn exists(&self) -> ! { + unimplemented!() + } + + pub fn components(&self) -> Utf8Components<'_> { + self.0.components() } // endregion:delegate-methods } impl fmt::Display for AbsPath { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - fmt::Display::fmt(&self.0.display(), f) + fmt::Display::fmt(&self.0, f) } } -/// Wrapper around a relative [`PathBuf`]. +/// Wrapper around a relative [`Utf8PathBuf`]. #[derive(Debug, Clone, Ord, PartialOrd, Eq, PartialEq, Hash)] -pub struct RelPathBuf(PathBuf); +pub struct RelPathBuf(Utf8PathBuf); -impl From for PathBuf { - fn from(RelPathBuf(path_buf): RelPathBuf) -> PathBuf { +impl From for Utf8PathBuf { + fn from(RelPathBuf(path_buf): RelPathBuf) -> Utf8PathBuf { path_buf } } @@ -257,15 +321,21 @@ impl ops::Deref for RelPathBuf { } } +impl AsRef for RelPathBuf { + fn as_ref(&self) -> &Utf8Path { + self.0.as_path() + } +} + impl AsRef for RelPathBuf { fn as_ref(&self) -> &Path { - self.0.as_path() + self.0.as_ref() } } -impl TryFrom for RelPathBuf { - type Error = PathBuf; - fn try_from(path_buf: PathBuf) -> Result { +impl TryFrom for RelPathBuf { + type Error = Utf8PathBuf; + fn try_from(path_buf: Utf8PathBuf) -> Result { if !path_buf.is_relative() { return Err(path_buf); } @@ -274,65 +344,79 @@ impl TryFrom for RelPathBuf { } impl TryFrom<&str> for RelPathBuf { - type Error = PathBuf; - fn try_from(path: &str) -> Result { - RelPathBuf::try_from(PathBuf::from(path)) + type Error = Utf8PathBuf; + fn try_from(path: &str) -> Result { + RelPathBuf::try_from(Utf8PathBuf::from(path)) } } impl RelPathBuf { /// Coerces to a `RelPath` slice. /// - /// Equivalent of [`PathBuf::as_path`] for `RelPathBuf`. + /// Equivalent of [`Utf8PathBuf::as_path`] for `RelPathBuf`. pub fn as_path(&self) -> &RelPath { RelPath::new_unchecked(self.0.as_path()) } } -/// Wrapper around a relative [`Path`]. +/// Wrapper around a relative [`Utf8Path`]. #[derive(Debug, Ord, PartialOrd, Eq, PartialEq, Hash)] #[repr(transparent)] -pub struct RelPath(Path); +pub struct RelPath(Utf8Path); + +impl AsRef for RelPath { + fn as_ref(&self) -> &Utf8Path { + &self.0 + } +} impl AsRef for RelPath { fn as_ref(&self) -> &Path { - &self.0 + self.0.as_ref() } } impl RelPath { /// Creates a new `RelPath` from `path`, without checking if it is relative. - pub fn new_unchecked(path: &Path) -> &RelPath { - unsafe { &*(path as *const Path as *const RelPath) } + pub fn new_unchecked(path: &Utf8Path) -> &RelPath { + unsafe { &*(path as *const Utf8Path as *const RelPath) } } - /// Equivalent of [`Path::to_path_buf`] for `RelPath`. + /// Equivalent of [`Utf8Path::to_path_buf`] for `RelPath`. pub fn to_path_buf(&self) -> RelPathBuf { RelPathBuf::try_from(self.0.to_path_buf()).unwrap() } + + pub fn as_utf8_path(&self) -> &Utf8Path { + self.as_ref() + } + + pub fn as_str(&self) -> &str { + self.0.as_str() + } } /// Taken from -fn normalize_path(path: &Path) -> PathBuf { +fn normalize_path(path: &Utf8Path) -> Utf8PathBuf { let mut components = path.components().peekable(); - let mut ret = if let Some(c @ Component::Prefix(..)) = components.peek().copied() { + let mut ret = if let Some(c @ Utf8Component::Prefix(..)) = components.peek().copied() { components.next(); - PathBuf::from(c.as_os_str()) + Utf8PathBuf::from(c.as_str()) } else { - PathBuf::new() + Utf8PathBuf::new() }; for component in components { match component { - Component::Prefix(..) => unreachable!(), - Component::RootDir => { - ret.push(component.as_os_str()); + Utf8Component::Prefix(..) => unreachable!(), + Utf8Component::RootDir => { + ret.push(component.as_str()); } - Component::CurDir => {} - Component::ParentDir => { + Utf8Component::CurDir => {} + Utf8Component::ParentDir => { ret.pop(); } - Component::Normal(c) => { + Utf8Component::Normal(c) => { ret.push(c); } } diff --git a/crates/proc-macro-api/Cargo.toml b/crates/proc-macro-api/Cargo.toml index 978ad155609d5..f30f6a0f23b91 100644 --- a/crates/proc-macro-api/Cargo.toml +++ b/crates/proc-macro-api/Cargo.toml @@ -23,7 +23,7 @@ snap = "1.1.0" indexmap = "2.1.0" # local deps -paths.workspace = true +paths = { workspace = true, features = ["serde1"] } tt.workspace = true stdx.workspace = true text-size.workspace = true diff --git a/crates/proc-macro-api/src/lib.rs b/crates/proc-macro-api/src/lib.rs index 6b16711a8d87b..fd491644648a2 100644 --- a/crates/proc-macro-api/src/lib.rs +++ b/crates/proc-macro-api/src/lib.rs @@ -35,8 +35,11 @@ pub use version::{read_dylib_info, read_version, RustCInfo}; #[derive(Copy, Clone, Eq, PartialEq, Debug, Serialize, Deserialize)] pub enum ProcMacroKind { CustomDerive, - FuncLike, Attr, + // This used to be called FuncLike, so that's what the server expects currently. + #[serde(alias = "bang")] + #[serde(rename(serialize = "FuncLike", deserialize = "FuncLike"))] + Bang, } /// A handle to an external process which load dylibs with macros (.so or .dll) diff --git a/crates/proc-macro-api/src/msg.rs b/crates/proc-macro-api/src/msg.rs index aa5aff455fd80..ad0e1f187b622 100644 --- a/crates/proc-macro-api/src/msg.rs +++ b/crates/proc-macro-api/src/msg.rs @@ -1,11 +1,9 @@ //! Defines messages for cross-process message passing based on `ndjson` wire protocol pub(crate) mod flat; -use std::{ - io::{self, BufRead, Write}, - path::PathBuf, -}; +use std::io::{self, BufRead, Write}; +use paths::Utf8PathBuf; use serde::{de::DeserializeOwned, Deserialize, Serialize}; use crate::ProcMacroKind; @@ -27,7 +25,7 @@ pub const CURRENT_API_VERSION: u32 = RUST_ANALYZER_SPAN_SUPPORT; #[derive(Debug, Serialize, Deserialize)] pub enum Request { /// Since [`NO_VERSION_CHECK_VERSION`] - ListMacros { dylib_path: PathBuf }, + ListMacros { dylib_path: Utf8PathBuf }, /// Since [`NO_VERSION_CHECK_VERSION`] ExpandMacro(Box), /// Since [`VERSION_CHECK_VERSION`] @@ -89,7 +87,7 @@ pub struct ExpandMacro { /// Possible attributes for the attribute-like macros. pub attributes: Option, - pub lib: PathBuf, + pub lib: Utf8PathBuf, /// Environment variables to set during macro expansion. pub env: Vec<(String, String)>, @@ -273,7 +271,7 @@ mod tests { macro_body: FlatTree::new(&tt, CURRENT_API_VERSION, &mut span_data_table), macro_name: Default::default(), attributes: None, - lib: std::env::current_dir().unwrap(), + lib: Utf8PathBuf::from_path_buf(std::env::current_dir().unwrap()).unwrap(), env: Default::default(), current_dir: Default::default(), has_global_spans: ExpnGlobals { diff --git a/crates/proc-macro-api/src/msg/flat.rs b/crates/proc-macro-api/src/msg/flat.rs index caf9e237fdd7d..99cdbd930e1c6 100644 --- a/crates/proc-macro-api/src/msg/flat.rs +++ b/crates/proc-macro-api/src/msg/flat.rs @@ -88,8 +88,6 @@ impl std::fmt::Debug for TokenId { } } -impl tt::Span for TokenId {} - #[derive(Serialize, Deserialize, Debug)] pub struct FlatTree { subtree: Vec, diff --git a/crates/proc-macro-api/src/process.rs b/crates/proc-macro-api/src/process.rs index 72f95643c8b5e..35d48a155433f 100644 --- a/crates/proc-macro-api/src/process.rs +++ b/crates/proc-macro-api/src/process.rs @@ -175,7 +175,7 @@ fn mk_child( env: &FxHashMap, null_stderr: bool, ) -> io::Result { - let mut cmd = Command::new(path.as_os_str()); + let mut cmd = Command::new(path); cmd.envs(env) .env("RUST_ANALYZER_INTERNALS_DO_NOT_USE", "this is unstable") .stdin(Stdio::piped()) @@ -183,7 +183,7 @@ fn mk_child( .stderr(if null_stderr { Stdio::null() } else { Stdio::inherit() }); if cfg!(windows) { let mut path_var = std::ffi::OsString::new(); - path_var.push(path.parent().unwrap().parent().unwrap().as_os_str()); + path_var.push(path.parent().unwrap().parent().unwrap()); path_var.push("\\bin;"); path_var.push(std::env::var_os("PATH").unwrap_or_default()); cmd.env("PATH", path_var); diff --git a/crates/proc-macro-srv/src/dylib.rs b/crates/proc-macro-srv/src/dylib.rs index 52b4cced5f584..22c34ff1678d8 100644 --- a/crates/proc-macro-srv/src/dylib.rs +++ b/crates/proc-macro-srv/src/dylib.rs @@ -1,16 +1,11 @@ //! Handles dynamic library loading for proc macro -use std::{ - fmt, - fs::File, - io, - path::{Path, PathBuf}, -}; +use std::{fmt, fs::File, io}; use libloading::Library; use memmap2::Mmap; use object::Object; -use paths::AbsPath; +use paths::{AbsPath, Utf8Path, Utf8PathBuf}; use proc_macro::bridge; use proc_macro_api::{read_dylib_info, ProcMacroKind}; @@ -26,7 +21,7 @@ fn is_derive_registrar_symbol(symbol: &str) -> bool { symbol.contains(NEW_REGISTRAR_SYMBOL) } -fn find_registrar_symbol(file: &Path) -> io::Result> { +fn find_registrar_symbol(file: &Utf8Path) -> io::Result> { let file = File::open(file)?; let buffer = unsafe { Mmap::map(&file)? }; @@ -62,12 +57,12 @@ fn find_registrar_symbol(file: &Path) -> io::Result> { /// /// It seems that on Windows that behaviour is default, so we do nothing in that case. #[cfg(windows)] -fn load_library(file: &Path) -> Result { +fn load_library(file: &Utf8Path) -> Result { unsafe { Library::new(file) } } #[cfg(unix)] -fn load_library(file: &Path) -> Result { +fn load_library(file: &Utf8Path) -> Result { use libloading::os::unix::Library as UnixLibrary; use std::os::raw::c_int; @@ -116,14 +111,14 @@ struct ProcMacroLibraryLibloading { } impl ProcMacroLibraryLibloading { - fn open(file: &Path) -> Result { + fn open(file: &Utf8Path) -> Result { let symbol_name = find_registrar_symbol(file)?.ok_or_else(|| { - invalid_data_err(format!("Cannot find registrar symbol in file {}", file.display())) + invalid_data_err(format!("Cannot find registrar symbol in file {file}")) })?; - let abs_file: &AbsPath = file.try_into().map_err(|_| { - invalid_data_err(format!("expected an absolute path, got {}", file.display())) - })?; + let abs_file: &AbsPath = file + .try_into() + .map_err(|_| invalid_data_err(format!("expected an absolute path, got {file}")))?; let version_info = read_dylib_info(abs_file)?; let lib = load_library(file).map_err(invalid_data_err)?; @@ -138,10 +133,10 @@ pub struct Expander { } impl Expander { - pub fn new(lib: &Path) -> Result { + pub fn new(lib: &Utf8Path) -> Result { // Some libraries for dynamic loading require canonicalized path even when it is // already absolute - let lib = lib.canonicalize()?; + let lib = lib.canonicalize_utf8()?; let lib = ensure_file_with_lock_free_access(&lib)?; @@ -176,30 +171,26 @@ impl Expander { /// Copy the dylib to temp directory to prevent locking in Windows #[cfg(windows)] -fn ensure_file_with_lock_free_access(path: &Path) -> io::Result { +fn ensure_file_with_lock_free_access(path: &Utf8Path) -> io::Result { use std::collections::hash_map::RandomState; - use std::ffi::OsString; use std::hash::{BuildHasher, Hasher}; if std::env::var("RA_DONT_COPY_PROC_MACRO_DLL").is_ok() { return Ok(path.to_path_buf()); } - let mut to = std::env::temp_dir(); + let mut to = Utf8PathBuf::from_path_buf(std::env::temp_dir()).unwrap(); let file_name = path.file_name().ok_or_else(|| { - io::Error::new( - io::ErrorKind::InvalidInput, - format!("File path is invalid: {}", path.display()), - ) + io::Error::new(io::ErrorKind::InvalidInput, format!("File path is invalid: {path}")) })?; // Generate a unique number by abusing `HashMap`'s hasher. // Maybe this will also "inspire" a libs team member to finally put `rand` in libstd. let t = RandomState::new().build_hasher().finish(); - let mut unique_name = OsString::from(t.to_string()); - unique_name.push(file_name); + let mut unique_name = t.to_string(); + unique_name.push_str(file_name); to.push(unique_name); std::fs::copy(path, &to).unwrap(); @@ -207,6 +198,6 @@ fn ensure_file_with_lock_free_access(path: &Path) -> io::Result { } #[cfg(unix)] -fn ensure_file_with_lock_free_access(path: &Path) -> io::Result { - Ok(path.to_path_buf()) +fn ensure_file_with_lock_free_access(path: &Utf8Path) -> io::Result { + Ok(path.to_owned()) } diff --git a/crates/proc-macro-srv/src/lib.rs b/crates/proc-macro-srv/src/lib.rs index 831632c64c0a2..2472c1e3119c1 100644 --- a/crates/proc-macro-srv/src/lib.rs +++ b/crates/proc-macro-srv/src/lib.rs @@ -33,12 +33,11 @@ use std::{ collections::{hash_map::Entry, HashMap}, env, ffi::OsString, - fs, - path::{Path, PathBuf}, - thread, + fs, thread, time::SystemTime, }; +use paths::{Utf8Path, Utf8PathBuf}; use proc_macro_api::{ msg::{ self, deserialize_span_data_index_map, serialize_span_data_index_map, ExpnGlobals, @@ -53,7 +52,7 @@ use crate::server::TokenStream; // see `build.rs` include!(concat!(env!("OUT_DIR"), "/rustc_version.rs")); -trait ProcMacroSrvSpan: tt::Span { +trait ProcMacroSrvSpan: Copy { type Server: proc_macro::bridge::server::Server>; fn make_server(call_site: Self, def_site: Self, mixed_site: Self) -> Self::Server; } @@ -81,7 +80,7 @@ impl ProcMacroSrvSpan for Span { #[derive(Default)] pub struct ProcMacroSrv { - expanders: HashMap<(PathBuf, SystemTime), dylib::Expander>, + expanders: HashMap<(Utf8PathBuf, SystemTime), dylib::Expander>, span_mode: SpanMode, } @@ -149,23 +148,22 @@ impl ProcMacroSrv { pub fn list_macros( &mut self, - dylib_path: &Path, + dylib_path: &Utf8Path, ) -> Result, String> { let expander = self.expander(dylib_path)?; Ok(expander.list_macros()) } - fn expander(&mut self, path: &Path) -> Result<&dylib::Expander, String> { + fn expander(&mut self, path: &Utf8Path) -> Result<&dylib::Expander, String> { let time = fs::metadata(path) .and_then(|it| it.modified()) - .map_err(|err| format!("Failed to get file metadata for {}: {err}", path.display()))?; + .map_err(|err| format!("Failed to get file metadata for {path}: {err}",))?; Ok(match self.expanders.entry((path.to_path_buf(), time)) { - Entry::Vacant(v) => { - v.insert(dylib::Expander::new(path).map_err(|err| { - format!("Cannot create expander for {}: {err}", path.display()) - })?) - } + Entry::Vacant(v) => v.insert( + dylib::Expander::new(path) + .map_err(|err| format!("Cannot create expander for {path}: {err}",))?, + ), Entry::Occupied(e) => e.into_mut(), }) } @@ -306,6 +304,6 @@ impl Drop for EnvSnapshot { mod tests; #[cfg(test)] -pub fn proc_macro_test_dylib_path() -> std::path::PathBuf { +pub fn proc_macro_test_dylib_path() -> paths::Utf8PathBuf { proc_macro_test::PROC_MACRO_TEST_LOCATION.into() } diff --git a/crates/proc-macro-srv/src/proc_macros.rs b/crates/proc-macro-srv/src/proc_macros.rs index 686d5b0438aa9..631fd84aa248a 100644 --- a/crates/proc-macro-srv/src/proc_macros.rs +++ b/crates/proc-macro-srv/src/proc_macros.rs @@ -108,7 +108,7 @@ impl ProcMacros { (trait_name.to_string(), ProcMacroKind::CustomDerive) } bridge::client::ProcMacro::Bang { name, .. } => { - (name.to_string(), ProcMacroKind::FuncLike) + (name.to_string(), ProcMacroKind::Bang) } bridge::client::ProcMacro::Attr { name, .. } => { (name.to_string(), ProcMacroKind::Attr) diff --git a/crates/proc-macro-srv/src/server/token_stream.rs b/crates/proc-macro-srv/src/server/token_stream.rs index 408db60e872be..b1a448427c603 100644 --- a/crates/proc-macro-srv/src/server/token_stream.rs +++ b/crates/proc-macro-srv/src/server/token_stream.rs @@ -101,6 +101,8 @@ pub(super) struct TokenStreamBuilder { /// pub(super)lic implementation details for the `TokenStream` type, such as iterators. pub(super) mod token_stream { + use core::fmt; + use super::{TokenStream, TokenTree}; /// An iterator over `TokenStream`'s `TokenTree`s. @@ -122,7 +124,7 @@ pub(super) mod token_stream { /// /// NOTE: some errors may cause panics instead of returning `LexError`. We reserve the right to /// change these errors into `LexError`s later. - impl TokenStream { + impl TokenStream { pub(crate) fn from_str(src: &str, call_site: S) -> Result, String> { let subtree = mbe::parse_to_token_tree_static_span(call_site, src).ok_or("lexing error")?; diff --git a/crates/proc-macro-srv/src/tests/mod.rs b/crates/proc-macro-srv/src/tests/mod.rs index 11b008fc0b4bc..6334282538089 100644 --- a/crates/proc-macro-srv/src/tests/mod.rs +++ b/crates/proc-macro-srv/src/tests/mod.rs @@ -254,14 +254,14 @@ fn list_test_macros() { let res = list().join("\n"); expect![[r#" - fn_like_noop [FuncLike] - fn_like_panic [FuncLike] - fn_like_error [FuncLike] - fn_like_clone_tokens [FuncLike] - fn_like_mk_literals [FuncLike] - fn_like_mk_idents [FuncLike] - fn_like_span_join [FuncLike] - fn_like_span_ops [FuncLike] + fn_like_noop [Bang] + fn_like_panic [Bang] + fn_like_error [Bang] + fn_like_clone_tokens [Bang] + fn_like_mk_literals [Bang] + fn_like_mk_idents [Bang] + fn_like_span_join [Bang] + fn_like_span_ops [Bang] attr_noop [Attr] attr_panic [Attr] attr_error [Attr] diff --git a/crates/project-model/Cargo.toml b/crates/project-model/Cargo.toml index 924a4a89e216a..097ee1f75cd75 100644 --- a/crates/project-model/Cargo.toml +++ b/crates/project-model/Cargo.toml @@ -25,8 +25,9 @@ itertools.workspace = true # local deps base-db.workspace = true +span.workspace = true cfg.workspace = true -paths.workspace = true +paths = { workspace = true, features = ["serde1"] } stdx.workspace = true toolchain.workspace = true diff --git a/crates/project-model/src/build_scripts.rs b/crates/project-model/src/build_scripts.rs index 709fc03717471..d40eb26063de8 100644 --- a/crates/project-model/src/build_scripts.rs +++ b/crates/project-model/src/build_scripts.rs @@ -77,7 +77,7 @@ impl WorkspaceBuildScripts { cmd.args(&config.extra_args); cmd.arg("--manifest-path"); - cmd.arg(workspace_root.join("Cargo.toml").as_os_str()); + cmd.arg(workspace_root.join("Cargo.toml")); if let Some(target_dir) = &config.target_dir { cmd.arg("--target-dir").arg(target_dir); @@ -354,16 +354,11 @@ impl WorkspaceBuildScripts { } // cargo_metadata crate returns default (empty) path for // older cargos, which is not absolute, so work around that. - let out_dir = mem::take(&mut message.out_dir).into_os_string(); - if !out_dir.is_empty() { - let out_dir = AbsPathBuf::assert(PathBuf::from(out_dir)); + let out_dir = mem::take(&mut message.out_dir); + if !out_dir.as_str().is_empty() { + let out_dir = AbsPathBuf::assert(out_dir); // inject_cargo_env(package, package_build_data); - // NOTE: cargo and rustc seem to hide non-UTF-8 strings from env! and option_env!() - if let Some(out_dir) = - out_dir.as_os_str().to_str().map(|s| s.to_owned()) - { - data.envs.push(("OUT_DIR".to_owned(), out_dir)); - } + data.envs.push(("OUT_DIR".to_owned(), out_dir.as_str().to_owned())); data.out_dir = Some(out_dir); data.cfgs = cfgs; } @@ -377,8 +372,8 @@ impl WorkspaceBuildScripts { if let Some(filename) = message.filenames.iter().find(|name| is_dylib(name)) { - let filename = AbsPathBuf::assert(PathBuf::from(&filename)); - data.proc_macro_dylib_path = Some(filename); + let filename = AbsPath::assert(filename); + data.proc_macro_dylib_path = Some(filename.to_owned()); } } }); diff --git a/crates/project-model/src/cargo_workspace.rs b/crates/project-model/src/cargo_workspace.rs index 53b41ea1e87b6..51c1b094f7b37 100644 --- a/crates/project-model/src/cargo_workspace.rs +++ b/crates/project-model/src/cargo_workspace.rs @@ -1,17 +1,16 @@ //! See [`CargoWorkspace`]. use std::ops; -use std::path::PathBuf; use std::str::from_utf8; use anyhow::Context; -use base_db::Edition; use cargo_metadata::{CargoOpt, MetadataCommand}; use la_arena::{Arena, Idx}; -use paths::{AbsPath, AbsPathBuf}; +use paths::{AbsPath, AbsPathBuf, Utf8PathBuf}; use rustc_hash::{FxHashMap, FxHashSet}; use serde::Deserialize; use serde_json::from_value; +use span::Edition; use toolchain::Tool; use crate::{utf8_stdout, InvocationLocation, ManifestPath, Sysroot}; @@ -100,7 +99,7 @@ pub struct CargoConfig { pub invocation_strategy: InvocationStrategy, pub invocation_location: InvocationLocation, /// Optional path to use instead of `target` when building - pub target_dir: Option, + pub target_dir: Option, } pub type Package = Idx; @@ -262,7 +261,7 @@ impl CargoWorkspace { } } } - meta.current_dir(current_dir.as_os_str()); + meta.current_dir(current_dir); let mut other_options = vec![]; // cargo metadata only supports a subset of flags of what cargo usually accepts, and usually @@ -351,7 +350,7 @@ impl CargoWorkspace { id: id.repr.clone(), name, version, - manifest: AbsPathBuf::assert(manifest_path.into()).try_into().unwrap(), + manifest: AbsPathBuf::assert(manifest_path).try_into().unwrap(), targets: Vec::new(), is_local, is_member, @@ -370,7 +369,7 @@ impl CargoWorkspace { let tgt = targets.alloc(TargetData { package: pkg, name, - root: AbsPathBuf::assert(src_path.into()), + root: AbsPathBuf::assert(src_path), kind: TargetKind::new(&kind), required_features, }); @@ -393,11 +392,9 @@ impl CargoWorkspace { packages[source].active_features.extend(node.features); } - let workspace_root = - AbsPathBuf::assert(PathBuf::from(meta.workspace_root.into_os_string())); + let workspace_root = AbsPathBuf::assert(meta.workspace_root); - let target_directory = - AbsPathBuf::assert(PathBuf::from(meta.target_directory.into_os_string())); + let target_directory = AbsPathBuf::assert(meta.target_directory); CargoWorkspace { packages, targets, workspace_root, target_directory } } @@ -409,7 +406,7 @@ impl CargoWorkspace { pub fn target_by_root(&self, root: &AbsPath) -> Option { self.packages() .filter(|&pkg| self[pkg].is_member) - .find_map(|pkg| self[pkg].targets.iter().find(|&&it| &self[it].root == root)) + .find_map(|pkg| self[pkg].targets.iter().find(|&&it| self[it].root == root)) .copied() } diff --git a/crates/project-model/src/lib.rs b/crates/project-model/src/lib.rs index 5b91f5d805893..28696aa327759 100644 --- a/crates/project-model/src/lib.rs +++ b/crates/project-model/src/lib.rs @@ -127,8 +127,8 @@ impl ProjectManifest { .filter_map(Result::ok) .map(|it| it.path().join("Cargo.toml")) .filter(|it| it.exists()) - .map(AbsPathBuf::assert) - .filter_map(|it| it.try_into().ok()) + .map(AbsPathBuf::try_from) + .filter_map(|it| it.ok()?.try_into().ok()) .collect() } } diff --git a/crates/project-model/src/project_json.rs b/crates/project-model/src/project_json.rs index fba0aaa8ce9f4..512588cc8f8f3 100644 --- a/crates/project-model/src/project_json.rs +++ b/crates/project-model/src/project_json.rs @@ -49,12 +49,12 @@ //! user explores them belongs to that extension (it's totally valid to change //! rust-project.json over time via configuration request!) -use base_db::{CrateDisplayName, CrateId, CrateName, Dependency, Edition}; +use base_db::{CrateDisplayName, CrateId, CrateName, Dependency}; use la_arena::RawIdx; -use paths::{AbsPath, AbsPathBuf}; +use paths::{AbsPath, AbsPathBuf, Utf8PathBuf}; use rustc_hash::FxHashMap; use serde::{de, Deserialize}; -use std::path::PathBuf; +use span::Edition; use crate::cfg_flag::CfgFlag; @@ -113,7 +113,7 @@ impl ProjectJson { .unwrap_or_else(|| root_module.starts_with(base)); let (include, exclude) = match crate_data.source { Some(src) => { - let absolutize = |dirs: Vec| { + let absolutize = |dirs: Vec| { dirs.into_iter().map(absolutize_on_base).collect::>() }; (absolutize(src.include_dirs), absolutize(src.exclude_dirs)) @@ -176,15 +176,15 @@ impl ProjectJson { #[derive(Deserialize, Debug, Clone)] pub struct ProjectJsonData { - sysroot: Option, - sysroot_src: Option, + sysroot: Option, + sysroot_src: Option, crates: Vec, } #[derive(Deserialize, Debug, Clone)] struct CrateData { display_name: Option, - root_module: PathBuf, + root_module: Utf8PathBuf, edition: EditionData, #[serde(default)] version: Option, @@ -194,7 +194,7 @@ struct CrateData { target: Option, #[serde(default)] env: FxHashMap, - proc_macro_dylib_path: Option, + proc_macro_dylib_path: Option, is_workspace_member: Option, source: Option, #[serde(default)] @@ -238,8 +238,8 @@ struct DepData { #[derive(Deserialize, Debug, Clone)] struct CrateSource { - include_dirs: Vec, - exclude_dirs: Vec, + include_dirs: Vec, + exclude_dirs: Vec, } fn deserialize_crate_name<'de, D>(de: D) -> std::result::Result diff --git a/crates/project-model/src/sysroot.rs b/crates/project-model/src/sysroot.rs index 3127bae8b0c9e..1142d6243d29a 100644 --- a/crates/project-model/src/sysroot.rs +++ b/crates/project-model/src/sysroot.rs @@ -4,13 +4,13 @@ //! but we can't process `.rlib` and need source code instead. The source code //! is typically installed with `rustup component add rust-src` command. -use std::{env, fs, iter, ops, path::PathBuf, process::Command, sync::Arc}; +use std::{env, fs, iter, ops, process::Command, sync::Arc}; use anyhow::{format_err, Result}; use base_db::CrateName; use itertools::Itertools; use la_arena::{Arena, Idx}; -use paths::{AbsPath, AbsPathBuf}; +use paths::{AbsPath, AbsPathBuf, Utf8PathBuf}; use rustc_hash::FxHashMap; use toolchain::{probe_for_binary, Tool}; @@ -419,7 +419,7 @@ fn discover_sysroot_dir( rustc.current_dir(current_dir).args(["--print", "sysroot"]); tracing::debug!("Discovering sysroot by {:?}", rustc); let stdout = utf8_stdout(rustc)?; - Ok(AbsPathBuf::assert(PathBuf::from(stdout))) + Ok(AbsPathBuf::assert(Utf8PathBuf::from(stdout))) } fn discover_sysroot_src_dir(sysroot_path: &AbsPathBuf) -> Option { diff --git a/crates/project-model/src/tests.rs b/crates/project-model/src/tests.rs index b9b1b701f6d40..fc0b507b33280 100644 --- a/crates/project-model/src/tests.rs +++ b/crates/project-model/src/tests.rs @@ -1,12 +1,9 @@ -use std::{ - ops::Deref, - path::{Path, PathBuf}, -}; +use std::ops::Deref; use base_db::{CrateGraph, FileId, ProcMacroPaths}; use cfg::{CfgAtom, CfgDiff}; use expect_test::{expect_file, ExpectFile}; -use paths::{AbsPath, AbsPathBuf}; +use paths::{AbsPath, AbsPathBuf, Utf8Path, Utf8PathBuf}; use rustc_hash::FxHashMap; use serde::de::DeserializeOwned; use triomphe::Arc; @@ -113,17 +110,16 @@ fn replace_root(s: &mut String, direction: bool) { fn replace_fake_sys_root(s: &mut String) { let fake_sysroot_path = get_test_path("fake-sysroot"); let fake_sysroot_path = if cfg!(windows) { - let normalized_path = - fake_sysroot_path.to_str().expect("expected str").replace('\\', r#"\\"#); + let normalized_path = fake_sysroot_path.as_str().replace('\\', r#"\\"#); format!(r#"{}\\"#, normalized_path) } else { - format!("{}/", fake_sysroot_path.to_str().expect("expected str")) + format!("{}/", fake_sysroot_path.as_str()) }; *s = s.replace(&fake_sysroot_path, "$FAKESYSROOT$") } -fn get_test_path(file: &str) -> PathBuf { - let base = PathBuf::from(env!("CARGO_MANIFEST_DIR")); +fn get_test_path(file: &str) -> Utf8PathBuf { + let base = Utf8PathBuf::from(env!("CARGO_MANIFEST_DIR")); base.join("test_data").join(file) } @@ -139,7 +135,7 @@ fn get_fake_sysroot() -> Sysroot { fn rooted_project_json(data: ProjectJsonData) -> ProjectJson { let mut root = "$ROOT$".to_owned(); replace_root(&mut root, true); - let path = Path::new(&root); + let path = Utf8Path::new(&root); let base = AbsPath::assert(path); ProjectJson::new(base, data) } @@ -268,7 +264,7 @@ fn smoke_test_real_sysroot_cargo() { let cargo_workspace = CargoWorkspace::new(meta); let sysroot = Ok(Sysroot::discover( - AbsPath::assert(Path::new(env!("CARGO_MANIFEST_DIR"))), + AbsPath::assert(Utf8Path::new(env!("CARGO_MANIFEST_DIR"))), &Default::default(), true, ) diff --git a/crates/project-model/src/workspace.rs b/crates/project-model/src/workspace.rs index 1a138b17bad41..b8c5885108d3d 100644 --- a/crates/project-model/src/workspace.rs +++ b/crates/project-model/src/workspace.rs @@ -6,13 +6,14 @@ use std::{collections::VecDeque, fmt, fs, iter, str::FromStr, sync}; use anyhow::{format_err, Context}; use base_db::{ - CrateDisplayName, CrateGraph, CrateId, CrateName, CrateOrigin, Dependency, Edition, Env, - FileId, LangCrateOrigin, ProcMacroPaths, TargetLayoutLoadResult, + CrateDisplayName, CrateGraph, CrateId, CrateName, CrateOrigin, Dependency, Env, FileId, + LangCrateOrigin, ProcMacroPaths, TargetLayoutLoadResult, }; use cfg::{CfgAtom, CfgDiff, CfgOptions}; use paths::{AbsPath, AbsPathBuf}; use rustc_hash::{FxHashMap, FxHashSet}; use semver::Version; +use span::Edition; use stdx::always; use toolchain::Tool; use triomphe::Arc; @@ -718,19 +719,22 @@ impl ProjectWorkspace { ) -> (CrateGraph, ProcMacroPaths) { let _p = tracing::span!(tracing::Level::INFO, "ProjectWorkspace::to_crate_graph").entered(); - let (mut crate_graph, proc_macros) = match self { + let ((mut crate_graph, proc_macros), sysroot) = match self { ProjectWorkspace::Json { project, sysroot, rustc_cfg, toolchain: _, target_layout: _, - } => project_json_to_crate_graph( - rustc_cfg.clone(), - load, - project, - sysroot.as_ref().ok(), - extra_env, + } => ( + project_json_to_crate_graph( + rustc_cfg.clone(), + load, + project, + sysroot.as_ref().ok(), + extra_env, + ), + sysroot, ), ProjectWorkspace::Cargo { cargo, @@ -742,14 +746,17 @@ impl ProjectWorkspace { toolchain: _, target_layout: _, cargo_config_extra_env: _, - } => cargo_to_crate_graph( - load, - rustc.as_ref().map(|a| a.as_ref()).ok(), - cargo, - sysroot.as_ref().ok(), - rustc_cfg.clone(), - cfg_overrides, - build_scripts, + } => ( + cargo_to_crate_graph( + load, + rustc.as_ref().map(|a| a.as_ref()).ok(), + cargo, + sysroot.as_ref().ok(), + rustc_cfg.clone(), + cfg_overrides, + build_scripts, + ), + sysroot, ), ProjectWorkspace::DetachedFiles { files, @@ -757,11 +764,20 @@ impl ProjectWorkspace { rustc_cfg, toolchain: _, target_layout: _, - } => { - detached_files_to_crate_graph(rustc_cfg.clone(), load, files, sysroot.as_ref().ok()) - } + } => ( + detached_files_to_crate_graph( + rustc_cfg.clone(), + load, + files, + sysroot.as_ref().ok(), + ), + sysroot, + ), }; - if crate_graph.patch_cfg_if() { + + if matches!(sysroot.as_ref().map(|it| it.mode()), Ok(SysrootMode::Workspace(_))) + && crate_graph.patch_cfg_if() + { tracing::debug!("Patched std to depend on cfg-if") } else { tracing::debug!("Did not patch std to depend on cfg-if") @@ -1077,6 +1093,8 @@ fn cargo_to_crate_graph( } } + let mut delayed_dev_deps = vec![]; + // Now add a dep edge from all targets of upstream to the lib // target of downstream. for pkg in cargo.packages() { @@ -1091,11 +1109,31 @@ fn cargo_to_crate_graph( continue; } + // If the dependency is a dev-dependency with both crates being member libraries of + // the workspace we delay adding the edge. The reason can be read up on in + // https://github.com/rust-lang/rust-analyzer/issues/14167 + // but in short, such an edge is able to cause some form of cycle in the crate graph + // for normal dependencies. If we do run into a cycle like this, we want to prefer + // the non dev-dependency edge, and so the easiest way to do that is by adding the + // dev-dependency edges last. + if dep.kind == DepKind::Dev + && matches!(kind, TargetKind::Lib { .. }) + && cargo[dep.pkg].is_member + && cargo[pkg].is_member + { + delayed_dev_deps.push((from, name.clone(), to)); + continue; + } + add_dep(crate_graph, from, name.clone(), to) } } } + for (from, name, to) in delayed_dev_deps { + add_dep(crate_graph, from, name, to); + } + if has_private { // If the user provided a path to rustc sources, we add all the rustc_private crates // and create dependencies on them for the crates which opt-in to that @@ -1151,7 +1189,6 @@ fn detached_files_to_crate_graph( }; let display_name = detached_file .file_stem() - .and_then(|os_str| os_str.to_str()) .map(|file_stem| CrateDisplayName::from_canonical_name(file_stem.to_owned())); let detached_file_crate = crate_graph.add_crate_root( file_id, @@ -1228,6 +1265,7 @@ fn handle_rustc_crates( let kind @ TargetKind::Lib { is_proc_macro } = rustc_workspace[tgt].kind else { continue; }; + let pkg_crates = &mut rustc_pkg_crates.entry(pkg).or_insert_with(Vec::new); if let Some(file_id) = load(&rustc_workspace[tgt].root) { let crate_id = add_target_crate_root( crate_graph, @@ -1246,7 +1284,7 @@ fn handle_rustc_crates( if let Some(proc_macro) = libproc_macro { add_proc_macro_dep(crate_graph, crate_id, proc_macro, is_proc_macro); } - rustc_pkg_crates.entry(pkg).or_insert_with(Vec::new).push(crate_id); + pkg_crates.push(crate_id); } } } @@ -1533,7 +1571,7 @@ fn inject_cargo_env(package: &PackageData, env: &mut Env) { // CARGO_BIN_NAME, CARGO_BIN_EXE_ let manifest_dir = package.manifest.parent(); - env.set("CARGO_MANIFEST_DIR", manifest_dir.as_os_str().to_string_lossy().into_owned()); + env.set("CARGO_MANIFEST_DIR", manifest_dir.as_str().to_owned()); // Not always right, but works for common cases. env.set("CARGO", "cargo".into()); diff --git a/crates/rust-analyzer/Cargo.toml b/crates/rust-analyzer/Cargo.toml index e6984d6f41b7f..6d70124188d9c 100644 --- a/crates/rust-analyzer/Cargo.toml +++ b/crates/rust-analyzer/Cargo.toml @@ -43,6 +43,7 @@ nohash-hasher.workspace = true always-assert = "0.2.0" walkdir = "2.3.2" semver.workspace = true +memchr = "2.7.1" cfg.workspace = true flycheck.workspace = true @@ -63,7 +64,7 @@ parser.workspace = true toolchain.workspace = true vfs-notify.workspace = true vfs.workspace = true -memchr = "2.7.1" +paths.workspace = true [target.'cfg(windows)'.dependencies] winapi = "0.3.9" diff --git a/crates/rust-analyzer/src/bin/main.rs b/crates/rust-analyzer/src/bin/main.rs index e747ec87b1c18..78920f3abace9 100644 --- a/crates/rust-analyzer/src/bin/main.rs +++ b/crates/rust-analyzer/src/bin/main.rs @@ -190,7 +190,7 @@ fn run_server() -> anyhow::Result<()> { Some(it) => it, None => { let cwd = env::current_dir()?; - AbsPathBuf::assert(cwd) + AbsPathBuf::assert_utf8(cwd) } }; diff --git a/crates/rust-analyzer/src/cli/analysis_stats.rs b/crates/rust-analyzer/src/cli/analysis_stats.rs index 5c474908e7aed..fdd77199aa004 100644 --- a/crates/rust-analyzer/src/cli/analysis_stats.rs +++ b/crates/rust-analyzer/src/cli/analysis_stats.rs @@ -70,7 +70,7 @@ impl flags::AnalysisStats { let mut db_load_sw = self.stop_watch(); - let path = AbsPathBuf::assert(env::current_dir()?.join(&self.path)); + let path = AbsPathBuf::assert_utf8(env::current_dir()?.join(&self.path)); let manifest = ProjectManifest::discover_single(&path)?; let mut workspace = ProjectWorkspace::load(manifest, &cargo_config, no_progress)?; @@ -279,7 +279,8 @@ impl flags::AnalysisStats { let mut all = 0; let mut fail = 0; for &a in adts { - if db.generic_params(a.into()).iter().next().is_some() { + let generic_params = db.generic_params(a.into()); + if generic_params.iter().next().is_some() || generic_params.iter_lt().next().is_some() { // Data types with generics don't have layout. continue; } diff --git a/crates/rust-analyzer/src/cli/diagnostics.rs b/crates/rust-analyzer/src/cli/diagnostics.rs index bd2646126dcbf..79d6226debf99 100644 --- a/crates/rust-analyzer/src/cli/diagnostics.rs +++ b/crates/rust-analyzer/src/cli/diagnostics.rs @@ -16,7 +16,7 @@ impl flags::Diagnostics { let cargo_config = CargoConfig { sysroot: Some(RustLibSource::Discover), ..Default::default() }; let with_proc_macro_server = if let Some(p) = &self.proc_macro_srv { - let path = vfs::AbsPathBuf::assert(std::env::current_dir()?.join(p)); + let path = vfs::AbsPathBuf::assert_utf8(std::env::current_dir()?.join(p)); ProcMacroServerChoice::Explicit(path) } else { ProcMacroServerChoice::Sysroot diff --git a/crates/rust-analyzer/src/cli/flags.rs b/crates/rust-analyzer/src/cli/flags.rs index 3f68c5d053b7b..b3b8ab9a40499 100644 --- a/crates/rust-analyzer/src/cli/flags.rs +++ b/crates/rust-analyzer/src/cli/flags.rs @@ -235,6 +235,7 @@ pub struct RunTests { #[derive(Debug)] pub struct RustcTests { pub rustc_repo: PathBuf, + pub filter: Option, } diff --git a/crates/rust-analyzer/src/cli/lsif.rs b/crates/rust-analyzer/src/cli/lsif.rs index f3f5ec1ebde2f..3ff9be7102fbc 100644 --- a/crates/rust-analyzer/src/cli/lsif.rs +++ b/crates/rust-analyzer/src/cli/lsif.rs @@ -283,7 +283,7 @@ impl flags::Lsif { with_proc_macro_server: ProcMacroServerChoice::Sysroot, prefill_caches: false, }; - let path = AbsPathBuf::assert(env::current_dir()?.join(self.path)); + let path = AbsPathBuf::assert_utf8(env::current_dir()?.join(self.path)); let manifest = ProjectManifest::discover_single(&path)?; let workspace = ProjectWorkspace::load(manifest, &cargo_config, no_progress)?; diff --git a/crates/rust-analyzer/src/cli/rustc_tests.rs b/crates/rust-analyzer/src/cli/rustc_tests.rs index 84f2e6008746b..eeec13a14bee5 100644 --- a/crates/rust-analyzer/src/cli/rustc_tests.rs +++ b/crates/rust-analyzer/src/cli/rustc_tests.rs @@ -76,7 +76,7 @@ impl Tester { ); let workspace = ProjectWorkspace::DetachedFiles { - files: vec![tmp_file.clone()], + files: vec![tmp_file], sysroot, rustc_cfg: vec![], toolchain: None, diff --git a/crates/rust-analyzer/src/cli/scip.rs b/crates/rust-analyzer/src/cli/scip.rs index 1061a433a58f9..aef2c1be22494 100644 --- a/crates/rust-analyzer/src/cli/scip.rs +++ b/crates/rust-analyzer/src/cli/scip.rs @@ -27,7 +27,8 @@ impl flags::Scip { with_proc_macro_server: ProcMacroServerChoice::Sysroot, prefill_caches: true, }; - let root = vfs::AbsPathBuf::assert(std::env::current_dir()?.join(&self.path)).normalize(); + let root = + vfs::AbsPathBuf::assert_utf8(std::env::current_dir()?.join(&self.path)).normalize(); let mut config = crate::config::Config::new( root.clone(), @@ -63,12 +64,7 @@ impl flags::Scip { special_fields: Default::default(), }) .into(), - project_root: format!( - "file://{}", - root.as_os_str() - .to_str() - .ok_or(anyhow::format_err!("Unable to normalize project_root path"))? - ), + project_root: format!("file://{root}"), text_document_encoding: scip_types::TextEncoding::UTF8.into(), special_fields: Default::default(), }; @@ -216,7 +212,7 @@ fn get_relative_filepath( rootpath: &vfs::AbsPathBuf, file_id: ide::FileId, ) -> Option { - Some(vfs.file_path(file_id).as_path()?.strip_prefix(rootpath)?.as_ref().to_str()?.to_owned()) + Some(vfs.file_path(file_id).as_path()?.strip_prefix(rootpath)?.as_str().to_owned()) } // SCIP Ranges have a (very large) optimization that ranges if they are on the same line diff --git a/crates/rust-analyzer/src/config.rs b/crates/rust-analyzer/src/config.rs index cbf1524659090..7475a8e6e6d96 100644 --- a/crates/rust-analyzer/src/config.rs +++ b/crates/rust-analyzer/src/config.rs @@ -7,11 +7,7 @@ //! configure the server itself, feature flags are passed into analysis, and //! tweak things like automatic insertion of `()` in completions. -use std::{ - fmt, iter, - ops::Not, - path::{Path, PathBuf}, -}; +use std::{fmt, iter, ops::Not}; use cfg::{CfgAtom, CfgDiff}; use flycheck::FlycheckConfig; @@ -27,6 +23,7 @@ use ide_db::{ }; use itertools::Itertools; use lsp_types::{ClientCapabilities, MarkupKind}; +use paths::{Utf8Path, Utf8PathBuf}; use project_model::{ CargoConfig, CargoFeatures, ProjectJson, ProjectJsonData, ProjectManifest, RustLibSource, }; @@ -327,7 +324,7 @@ config_data! { /// These directories will be ignored by rust-analyzer. They are /// relative to the workspace root, and globs are not supported. You may /// also need to add the folders to Code's `files.watcherExclude`. - files_excludeDirs: Vec = "[]", + files_excludeDirs: Vec = "[]", /// Controls file watching implementation. files_watcher: FilesWatcherDef = "\"client\"", @@ -378,6 +375,8 @@ config_data! { /// How to render the size information in a memory layout hover. hover_memoryLayout_size: Option = "\"both\"", + /// How many fields of a struct to display when hovering a struct. + hover_show_structFields: Option = "null", /// How many associated items of a trait to display when hovering a trait. hover_show_traitAssocItems: Option = "null", @@ -516,7 +515,7 @@ config_data! { /// This config takes a map of crate names with the exported proc-macro names to ignore as values. procMacro_ignored: FxHashMap, Box<[Box]>> = "{}", /// Internal config, path to proc-macro server executable. - procMacro_server: Option = "null", + procMacro_server: Option = "null", /// Exclude imports from find-all-references. references_excludeImports: bool = "false", @@ -864,7 +863,7 @@ impl Config { } let mut errors = Vec::new(); self.detached_files = - get_field::>(&mut json, &mut errors, "detachedFiles", None, "[]") + get_field::>(&mut json, &mut errors, "detachedFiles", None, "[]") .into_iter() .map(AbsPathBuf::assert) .collect(); @@ -956,7 +955,7 @@ impl Config { pub fn has_linked_projects(&self) -> bool { !self.data.linkedProjects.is_empty() } - pub fn linked_manifests(&self) -> impl Iterator + '_ { + pub fn linked_manifests(&self) -> impl Iterator + '_ { self.data.linkedProjects.iter().filter_map(|it| match it { ManifestOrProjectJson::Manifest(p) => Some(&**p), ManifestOrProjectJson::ProjectJson(_) => None, @@ -1014,6 +1013,17 @@ impl Config { ) } + pub fn did_change_watched_files_relative_pattern_support(&self) -> bool { + try_or_def!( + self.caps + .workspace + .as_ref()? + .did_change_watched_files + .as_ref()? + .relative_pattern_support? + ) + } + pub fn prefill_caches(&self) -> bool { self.data.cachePriming_enable } @@ -1410,9 +1420,11 @@ impl Config { } } - fn target_dir_from_config(&self) -> Option { + fn target_dir_from_config(&self) -> Option { self.data.cargo_targetDir.as_ref().and_then(|target_dir| match target_dir { - TargetDirectory::UseSubdirectory(true) => Some(PathBuf::from("target/rust-analyzer")), + TargetDirectory::UseSubdirectory(true) => { + Some(Utf8PathBuf::from("target/rust-analyzer")) + } TargetDirectory::UseSubdirectory(false) => None, TargetDirectory::Directory(dir) if dir.is_relative() => Some(dir.clone()), TargetDirectory::Directory(_) => None, @@ -1691,6 +1703,7 @@ impl Config { }, keywords: self.data.hover_documentation_keywords_enable, max_trait_assoc_items_count: self.data.hover_show_traitAssocItems, + max_struct_field_count: self.data.hover_show_structFields, } } @@ -1951,7 +1964,7 @@ where #[derive(Deserialize, Debug, Clone)] #[serde(untagged)] enum ManifestOrProjectJson { - Manifest(PathBuf), + Manifest(Utf8PathBuf), ProjectJson(ProjectJsonData), } @@ -2134,7 +2147,7 @@ pub enum MemoryLayoutHoverRenderKindDef { #[serde(untagged)] pub enum TargetDirectory { UseSubdirectory(bool), - Directory(PathBuf), + Directory(Utf8PathBuf), } macro_rules! _config_data { @@ -2263,7 +2276,7 @@ fn field_props(field: &str, ty: &str, doc: &[&str], default: &str) -> serde_json "type": "array", "items": { "type": "string" }, }, - "Vec" => set! { + "Vec" => set! { "type": "array", "items": { "type": "string" }, }, @@ -2291,7 +2304,7 @@ fn field_props(field: &str, ty: &str, doc: &[&str], default: &str) -> serde_json "Option" => set! { "type": ["null", "string"], }, - "Option" => set! { + "Option" => set! { "type": ["null", "string"], }, "Option" => set! { @@ -2774,7 +2787,7 @@ mod tests { .unwrap(); assert_eq!(config.data.cargo_targetDir, Some(TargetDirectory::UseSubdirectory(true))); assert!( - matches!(config.flycheck(), FlycheckConfig::CargoCommand { target_dir, .. } if target_dir == Some(PathBuf::from("target/rust-analyzer"))) + matches!(config.flycheck(), FlycheckConfig::CargoCommand { target_dir, .. } if target_dir == Some(Utf8PathBuf::from("target/rust-analyzer"))) ); } @@ -2793,10 +2806,10 @@ mod tests { .unwrap(); assert_eq!( config.data.cargo_targetDir, - Some(TargetDirectory::Directory(PathBuf::from("other_folder"))) + Some(TargetDirectory::Directory(Utf8PathBuf::from("other_folder"))) ); assert!( - matches!(config.flycheck(), FlycheckConfig::CargoCommand { target_dir, .. } if target_dir == Some(PathBuf::from("other_folder"))) + matches!(config.flycheck(), FlycheckConfig::CargoCommand { target_dir, .. } if target_dir == Some(Utf8PathBuf::from("other_folder"))) ); } } diff --git a/crates/rust-analyzer/src/diagnostics/to_proto.rs b/crates/rust-analyzer/src/diagnostics/to_proto.rs index 6e6cc53c251c4..7c4deac93f21f 100644 --- a/crates/rust-analyzer/src/diagnostics/to_proto.rs +++ b/crates/rust-analyzer/src/diagnostics/to_proto.rs @@ -519,14 +519,13 @@ fn clippy_code_description(code: Option<&str>) -> Option anyhow::Result<()> { - for change in params.changes { + for change in params.changes.iter().unique_by(|&it| &it.uri) { if let Ok(path) = from_proto::abs_path(&change.uri) { state.loader.handle.invalidate(path); } diff --git a/crates/rust-analyzer/src/handlers/request.rs b/crates/rust-analyzer/src/handlers/request.rs index 1d98457add330..77692ed3ae767 100644 --- a/crates/rust-analyzer/src/handlers/request.rs +++ b/crates/rust-analyzer/src/handlers/request.rs @@ -4,7 +4,6 @@ use std::{ fs, io::Write as _, - path::PathBuf, process::{self, Stdio}, }; @@ -12,8 +11,8 @@ use anyhow::Context; use ide::{ AnnotationConfig, AssistKind, AssistResolveStrategy, Cancellable, FilePosition, FileRange, - HoverAction, HoverGotoTypeData, InlayFieldsToResolve, Query, RangeInfo, RangeLimit, - ReferenceCategory, Runnable, RunnableKind, SingleResolve, SourceChange, TextEdit, + HoverAction, HoverGotoTypeData, InlayFieldsToResolve, Query, RangeInfo, ReferenceCategory, + Runnable, RunnableKind, SingleResolve, SourceChange, TextEdit, }; use ide_db::SymbolKind; use itertools::Itertools; @@ -27,6 +26,7 @@ use lsp_types::{ SemanticTokensParams, SemanticTokensRangeParams, SemanticTokensRangeResult, SemanticTokensResult, SymbolInformation, SymbolTag, TextDocumentIdentifier, Url, WorkspaceEdit, }; +use paths::Utf8PathBuf; use project_model::{ManifestPath, ProjectWorkspace, TargetKind}; use serde_json::json; use stdx::{format_to, never}; @@ -238,9 +238,12 @@ pub(crate) fn handle_discover_test( let (tests, scope) = match params.test_id { Some(id) => { let crate_id = id.split_once("::").map(|it| it.0).unwrap_or(&id); - (snap.analysis.discover_tests_in_crate_by_test_id(crate_id)?, vec![crate_id.to_owned()]) + ( + snap.analysis.discover_tests_in_crate_by_test_id(crate_id)?, + Some(vec![crate_id.to_owned()]), + ) } - None => (snap.analysis.discover_test_roots()?, vec![]), + None => (snap.analysis.discover_test_roots()?, None), }; for t in &tests { hack_recover_crate_name::insert_name(t.id.clone()); @@ -248,12 +251,13 @@ pub(crate) fn handle_discover_test( Ok(lsp_ext::DiscoverTestResults { tests: tests .into_iter() - .map(|t| { + .filter_map(|t| { let line_index = t.file.and_then(|f| snap.file_line_index(f).ok()); to_proto::test_item(&snap, t, line_index.as_ref()) }) .collect(), scope, + scope_file: None, }) } @@ -1465,7 +1469,7 @@ pub(crate) fn handle_inlay_hints( let inlay_hints_config = snap.config.inlay_hints(); Ok(Some( snap.analysis - .inlay_hints(&inlay_hints_config, file_id, Some(RangeLimit::Fixed(range)))? + .inlay_hints(&inlay_hints_config, file_id, Some(range))? .into_iter() .map(|it| { to_proto::inlay_hint( @@ -1499,10 +1503,11 @@ pub(crate) fn handle_inlay_hints_resolve( let hint_position = from_proto::offset(&line_index, original_hint.position)?; let mut forced_resolve_inlay_hints_config = snap.config.inlay_hints(); forced_resolve_inlay_hints_config.fields_to_resolve = InlayFieldsToResolve::empty(); - let resolve_hints = snap.analysis.inlay_hints( + let resolve_hints = snap.analysis.inlay_hints_resolve( &forced_resolve_inlay_hints_config, file_id, - Some(RangeLimit::NearestParent(hint_position)), + hint_position, + resolve_data.hash, )?; let mut resolved_hints = resolve_hints @@ -1542,7 +1547,7 @@ pub(crate) fn handle_call_hierarchy_prepare( let RangeInfo { range: _, info: navs } = nav_info; let res = navs .into_iter() - .filter(|it| it.kind == Some(SymbolKind::Function)) + .filter(|it| matches!(it.kind, Some(SymbolKind::Function | SymbolKind::Method))) .map(|it| to_proto::call_hierarchy_item(&snap, it)) .collect::>>()?; @@ -1736,8 +1741,8 @@ pub(crate) fn handle_open_docs( _ => (None, None), }; - let sysroot = sysroot.map(|p| p.root().as_os_str()); - let target_dir = cargo.map(|cargo| cargo.target_directory()).map(|p| p.as_os_str()); + let sysroot = sysroot.map(|p| p.root().as_str()); + let target_dir = cargo.map(|cargo| cargo.target_directory()).map(|p| p.as_str()); let Ok(remote_urls) = snap.analysis.external_docs(position, target_dir, sysroot) else { return if snap.config.local_docs() { @@ -2042,7 +2047,7 @@ fn run_rustfmt( cmd } RustfmtConfig::CustomCommand { command, args } => { - let cmd = PathBuf::from(&command); + let cmd = Utf8PathBuf::from(&command); let workspace = CargoTargetSpec::for_file(snap, file_id)?; let mut cmd = match workspace { Some(spec) => { diff --git a/crates/rust-analyzer/src/integrated_benchmarks.rs b/crates/rust-analyzer/src/integrated_benchmarks.rs index 1c5a862c70356..2731e845f351e 100644 --- a/crates/rust-analyzer/src/integrated_benchmarks.rs +++ b/crates/rust-analyzer/src/integrated_benchmarks.rs @@ -52,7 +52,7 @@ fn integrated_highlighting_benchmark() { let file_id = { let file = workspace_to_load.join(file); - let path = VfsPath::from(AbsPathBuf::assert(file)); + let path = VfsPath::from(AbsPathBuf::assert_utf8(file)); vfs.file_id(&path).unwrap_or_else(|| panic!("can't find virtual file for {path}")) }; @@ -112,7 +112,7 @@ fn integrated_completion_benchmark() { let file_id = { let file = workspace_to_load.join(file); - let path = VfsPath::from(AbsPathBuf::assert(file)); + let path = VfsPath::from(AbsPathBuf::assert_utf8(file)); vfs.file_id(&path).unwrap_or_else(|| panic!("can't find virtual file for {path}")) }; @@ -274,7 +274,7 @@ fn integrated_diagnostics_benchmark() { let file_id = { let file = workspace_to_load.join(file); - let path = VfsPath::from(AbsPathBuf::assert(file)); + let path = VfsPath::from(AbsPathBuf::assert_utf8(file)); vfs.file_id(&path).unwrap_or_else(|| panic!("can't find virtual file for {path}")) }; diff --git a/crates/rust-analyzer/src/lsp/ext.rs b/crates/rust-analyzer/src/lsp/ext.rs index 710ce7f8acbb1..eac982f1b27b2 100644 --- a/crates/rust-analyzer/src/lsp/ext.rs +++ b/crates/rust-analyzer/src/lsp/ext.rs @@ -194,7 +194,8 @@ pub struct TestItem { #[serde(rename_all = "camelCase")] pub struct DiscoverTestResults { pub tests: Vec, - pub scope: Vec, + pub scope: Option>, + pub scope_file: Option>, } pub enum DiscoverTest {} @@ -800,6 +801,7 @@ pub struct CompletionResolveData { #[derive(Debug, Serialize, Deserialize)] pub struct InlayHintResolveData { pub file_id: u32, + pub hash: u64, } #[derive(Debug, Serialize, Deserialize)] diff --git a/crates/rust-analyzer/src/lsp/semantic_tokens.rs b/crates/rust-analyzer/src/lsp/semantic_tokens.rs index c5081c4bea02c..3e00222b752d3 100644 --- a/crates/rust-analyzer/src/lsp/semantic_tokens.rs +++ b/crates/rust-analyzer/src/lsp/semantic_tokens.rs @@ -127,13 +127,14 @@ macro_rules! define_semantic_token_modifiers { define_semantic_token_modifiers![ standard { + ASYNC, DOCUMENTATION, DECLARATION, STATIC, DEFAULT_LIBRARY, } custom { - (ASYNC, "async"), + (ASSOCIATED, "associated"), (ATTRIBUTE_MODIFIER, "attribute"), (CALLABLE, "callable"), (CONSTANT, "constant"), diff --git a/crates/rust-analyzer/src/lsp/to_proto.rs b/crates/rust-analyzer/src/lsp/to_proto.rs index e77d0c13bf2d9..d8bb12528b984 100644 --- a/crates/rust-analyzer/src/lsp/to_proto.rs +++ b/crates/rust-analyzer/src/lsp/to_proto.rs @@ -1,7 +1,7 @@ //! Conversion of rust-analyzer specific types to lsp_types equivalents. use std::{ iter::once, - mem, path, + mem, sync::atomic::{AtomicU32, Ordering}, }; @@ -13,8 +13,9 @@ use ide::{ NavigationTarget, ReferenceCategory, RenameError, Runnable, Severity, SignatureHelp, SnippetEdit, SourceChange, StructureNodeKind, SymbolKind, TextEdit, TextRange, TextSize, }; -use ide_db::rust_doc::format_docs; +use ide_db::{rust_doc::format_docs, FxHasher}; use itertools::Itertools; +use paths::{Utf8Component, Utf8Prefix}; use semver::VersionReq; use serde_json::to_value; use vfs::AbsPath; @@ -52,6 +53,7 @@ pub(crate) fn range(line_index: &LineIndex, range: TextRange) -> lsp_types::Rang pub(crate) fn symbol_kind(symbol_kind: SymbolKind) -> lsp_types::SymbolKind { match symbol_kind { SymbolKind::Function => lsp_types::SymbolKind::FUNCTION, + SymbolKind::Method => lsp_types::SymbolKind::METHOD, SymbolKind::Struct => lsp_types::SymbolKind::STRUCT, SymbolKind::Enum => lsp_types::SymbolKind::ENUM, SymbolKind::Variant => lsp_types::SymbolKind::ENUM_MEMBER, @@ -122,12 +124,12 @@ pub(crate) fn completion_item_kind( CompletionItemKind::BuiltinType => lsp_types::CompletionItemKind::STRUCT, CompletionItemKind::InferredType => lsp_types::CompletionItemKind::SNIPPET, CompletionItemKind::Keyword => lsp_types::CompletionItemKind::KEYWORD, - CompletionItemKind::Method => lsp_types::CompletionItemKind::METHOD, CompletionItemKind::Snippet => lsp_types::CompletionItemKind::SNIPPET, CompletionItemKind::UnresolvedReference => lsp_types::CompletionItemKind::REFERENCE, CompletionItemKind::Expression => lsp_types::CompletionItemKind::SNIPPET, CompletionItemKind::SymbolKind(symbol) => match symbol { SymbolKind::Attribute => lsp_types::CompletionItemKind::FUNCTION, + SymbolKind::Method => lsp_types::CompletionItemKind::METHOD, SymbolKind::Const => lsp_types::CompletionItemKind::CONSTANT, SymbolKind::ConstParam => lsp_types::CompletionItemKind::TYPE_PARAMETER, SymbolKind::Derive => lsp_types::CompletionItemKind::FUNCTION, @@ -444,30 +446,42 @@ pub(crate) fn inlay_hint( fields_to_resolve: &InlayFieldsToResolve, line_index: &LineIndex, file_id: FileId, - inlay_hint: InlayHint, + mut inlay_hint: InlayHint, ) -> Cancellable { - let needs_resolve = inlay_hint.needs_resolve; - let (label, tooltip, mut something_to_resolve) = - inlay_hint_label(snap, fields_to_resolve, needs_resolve, inlay_hint.label)?; + let resolve_hash = inlay_hint.needs_resolve().then(|| { + std::hash::BuildHasher::hash_one( + &std::hash::BuildHasherDefault::::default(), + &inlay_hint, + ) + }); + let mut something_to_resolve = false; let text_edits = if snap .config .visual_studio_code_version() // https://github.com/microsoft/vscode/issues/193124 .map_or(true, |version| VersionReq::parse(">=1.86.0").unwrap().matches(version)) - && needs_resolve + && resolve_hash.is_some() && fields_to_resolve.resolve_text_edits { something_to_resolve |= inlay_hint.text_edit.is_some(); None } else { - inlay_hint.text_edit.map(|it| text_edit_vec(line_index, it)) + inlay_hint.text_edit.take().map(|it| text_edit_vec(line_index, it)) }; - - let data = if needs_resolve && something_to_resolve { - Some(to_value(lsp_ext::InlayHintResolveData { file_id: file_id.index() }).unwrap()) - } else { - None + let (label, tooltip) = inlay_hint_label( + snap, + fields_to_resolve, + &mut something_to_resolve, + resolve_hash.is_some(), + inlay_hint.label, + )?; + + let data = match resolve_hash { + Some(hash) if something_to_resolve => Some( + to_value(lsp_ext::InlayHintResolveData { file_id: file_id.index(), hash }).unwrap(), + ), + _ => None, }; Ok(lsp_types::InlayHint { @@ -492,15 +506,15 @@ pub(crate) fn inlay_hint( fn inlay_hint_label( snap: &GlobalStateSnapshot, fields_to_resolve: &InlayFieldsToResolve, + something_to_resolve: &mut bool, needs_resolve: bool, mut label: InlayHintLabel, -) -> Cancellable<(lsp_types::InlayHintLabel, Option, bool)> { - let mut something_to_resolve = false; +) -> Cancellable<(lsp_types::InlayHintLabel, Option)> { let (label, tooltip) = match &*label.parts { [InlayHintLabelPart { linked_location: None, .. }] => { let InlayHintLabelPart { text, tooltip, .. } = label.parts.pop().unwrap(); let hint_tooltip = if needs_resolve && fields_to_resolve.resolve_hint_tooltip { - something_to_resolve |= tooltip.is_some(); + *something_to_resolve |= tooltip.is_some(); None } else { match tooltip { @@ -524,7 +538,7 @@ fn inlay_hint_label( .into_iter() .map(|part| { let tooltip = if needs_resolve && fields_to_resolve.resolve_label_tooltip { - something_to_resolve |= part.tooltip.is_some(); + *something_to_resolve |= part.tooltip.is_some(); None } else { match part.tooltip { @@ -543,7 +557,7 @@ fn inlay_hint_label( } }; let location = if needs_resolve && fields_to_resolve.resolve_label_location { - something_to_resolve |= part.linked_location.is_some(); + *something_to_resolve |= part.linked_location.is_some(); None } else { part.linked_location.map(|range| location(snap, range)).transpose()? @@ -559,7 +573,7 @@ fn inlay_hint_label( (lsp_types::InlayHintLabel::LabelParts(parts), None) } }; - Ok((label, tooltip, something_to_resolve)) + Ok((label, tooltip)) } static TOKEN_RESULT_COUNTER: AtomicU32 = AtomicU32::new(1); @@ -636,8 +650,7 @@ pub(crate) fn semantic_token_delta( fn semantic_token_type_and_modifiers( highlight: Highlight, ) -> (lsp_types::SemanticTokenType, semantic_tokens::ModifierSet) { - let mut mods = semantic_tokens::ModifierSet::default(); - let type_ = match highlight.tag { + let ty = match highlight.tag { HlTag::Symbol(symbol) => match symbol { SymbolKind::Attribute => semantic_tokens::DECORATOR, SymbolKind::Derive => semantic_tokens::DERIVE, @@ -653,22 +666,10 @@ fn semantic_token_type_and_modifiers( SymbolKind::SelfParam => semantic_tokens::SELF_KEYWORD, SymbolKind::SelfType => semantic_tokens::SELF_TYPE_KEYWORD, SymbolKind::Local => semantic_tokens::VARIABLE, - SymbolKind::Function => { - if highlight.mods.contains(HlMod::Associated) { - semantic_tokens::METHOD - } else { - semantic_tokens::FUNCTION - } - } - SymbolKind::Const => { - mods |= semantic_tokens::CONSTANT; - mods |= semantic_tokens::STATIC; - semantic_tokens::VARIABLE - } - SymbolKind::Static => { - mods |= semantic_tokens::STATIC; - semantic_tokens::VARIABLE - } + SymbolKind::Method => semantic_tokens::METHOD, + SymbolKind::Function => semantic_tokens::FUNCTION, + SymbolKind::Const => semantic_tokens::VARIABLE, + SymbolKind::Static => semantic_tokens::VARIABLE, SymbolKind::Struct => semantic_tokens::STRUCT, SymbolKind::Enum => semantic_tokens::ENUM, SymbolKind::Variant => semantic_tokens::ENUM_MEMBER, @@ -715,12 +716,14 @@ fn semantic_token_type_and_modifiers( }, }; + let mut mods = semantic_tokens::ModifierSet::default(); for modifier in highlight.mods.iter() { let modifier = match modifier { - HlMod::Associated => continue, + HlMod::Associated => semantic_tokens::ASSOCIATED, HlMod::Async => semantic_tokens::ASYNC, HlMod::Attribute => semantic_tokens::ATTRIBUTE_MODIFIER, HlMod::Callable => semantic_tokens::CALLABLE, + HlMod::Const => semantic_tokens::CONSTANT, HlMod::Consuming => semantic_tokens::CONSUMING, HlMod::ControlFlow => semantic_tokens::CONTROL_FLOW, HlMod::CrateRoot => semantic_tokens::CRATE_ROOT, @@ -742,7 +745,7 @@ fn semantic_token_type_and_modifiers( mods |= modifier; } - (type_, mods) + (ty, mods) } pub(crate) fn folding_range( @@ -814,9 +817,9 @@ pub(crate) fn url(snap: &GlobalStateSnapshot, file_id: FileId) -> lsp_types::Url /// When processing non-windows path, this is essentially the same as `Url::from_file_path`. pub(crate) fn url_from_abs_path(path: &AbsPath) -> lsp_types::Url { let url = lsp_types::Url::from_file_path(path).unwrap(); - match path.as_ref().components().next() { - Some(path::Component::Prefix(prefix)) - if matches!(prefix.kind(), path::Prefix::Disk(_) | path::Prefix::VerbatimDisk(_)) => + match path.components().next() { + Some(Utf8Component::Prefix(prefix)) + if matches!(prefix.kind(), Utf8Prefix::Disk(_) | Utf8Prefix::VerbatimDisk(_)) => { // Need to lowercase driver letter } @@ -1514,8 +1517,8 @@ pub(crate) fn test_item( snap: &GlobalStateSnapshot, test_item: ide::TestItem, line_index: Option<&LineIndex>, -) -> lsp_ext::TestItem { - lsp_ext::TestItem { +) -> Option { + Some(lsp_ext::TestItem { id: test_item.id, label: test_item.label, kind: match test_item.kind { @@ -1530,9 +1533,9 @@ pub(crate) fn test_item( | project_model::TargetKind::Example | project_model::TargetKind::BuildScript | project_model::TargetKind::Other => lsp_ext::TestItemKind::Package, - project_model::TargetKind::Test | project_model::TargetKind::Bench => { - lsp_ext::TestItemKind::Test - } + project_model::TargetKind::Test => lsp_ext::TestItemKind::Test, + // benches are not tests needed to be shown in the test explorer + project_model::TargetKind::Bench => return None, } } ide::TestItemKind::Module => lsp_ext::TestItemKind::Module, @@ -1548,7 +1551,7 @@ pub(crate) fn test_item( .map(|f| lsp_types::TextDocumentIdentifier { uri: url(snap, f) }), range: line_index.and_then(|l| Some(range(l, test_item.text_range?))), runnable: test_item.runnable.and_then(|r| runnable(snap, r).ok()), - } + }) } pub(crate) mod command { @@ -2728,12 +2731,12 @@ struct ProcMacro { #[test] #[cfg(target_os = "windows")] fn test_lowercase_drive_letter() { - use std::path::Path; + use paths::Utf8Path; - let url = url_from_abs_path(Path::new("C:\\Test").try_into().unwrap()); + let url = url_from_abs_path(Utf8Path::new("C:\\Test").try_into().unwrap()); assert_eq!(url.to_string(), "file:///c:/Test"); - let url = url_from_abs_path(Path::new(r#"\\localhost\C$\my_dir"#).try_into().unwrap()); + let url = url_from_abs_path(Utf8Path::new(r#"\\localhost\C$\my_dir"#).try_into().unwrap()); assert_eq!(url.to_string(), "file://localhost/C$/my_dir"); } } diff --git a/crates/rust-analyzer/src/main_loop.rs b/crates/rust-analyzer/src/main_loop.rs index ffe56e41435c7..38df32351256d 100644 --- a/crates/rust-analyzer/src/main_loop.rs +++ b/crates/rust-analyzer/src/main_loop.rs @@ -9,9 +9,8 @@ use std::{ use always_assert::always; use crossbeam_channel::{never, select, Receiver}; use ide_db::base_db::{SourceDatabase, SourceDatabaseExt, VfsPath}; -use itertools::Itertools; use lsp_server::{Connection, Notification, Request}; -use lsp_types::notification::Notification as _; +use lsp_types::{notification::Notification as _, TextDocumentIdentifier}; use stdx::thread::ThreadIntent; use vfs::FileId; @@ -533,31 +532,29 @@ impl GlobalState { let snapshot = self.snapshot(); move || { let tests = subscriptions - .into_iter() - .filter_map(|f| snapshot.analysis.crates_for(f).ok()) - .flatten() - .unique() - .filter_map(|c| snapshot.analysis.discover_tests_in_crate(c).ok()) + .iter() + .copied() + .filter_map(|f| snapshot.analysis.discover_tests_in_file(f).ok()) .flatten() .collect::>(); for t in &tests { hack_recover_crate_name::insert_name(t.id.clone()); } - let scope = tests - .iter() - .filter_map(|t| Some(t.id.split_once("::")?.0)) - .unique() - .map(|it| it.to_owned()) - .collect(); Task::DiscoverTest(lsp_ext::DiscoverTestResults { tests: tests .into_iter() - .map(|t| { + .filter_map(|t| { let line_index = t.file.and_then(|f| snapshot.file_line_index(f).ok()); to_proto::test_item(&snapshot, t, line_index.as_ref()) }) .collect(), - scope, + scope: None, + scope_file: Some( + subscriptions + .into_iter() + .map(|f| TextDocumentIdentifier { uri: to_proto::url(&snapshot, f) }) + .collect(), + ), }) } }); @@ -653,7 +650,7 @@ impl GlobalState { }; if let Some(state) = state { - self.report_progress("Building", state, msg, None, None); + self.report_progress("Building build-artifacts", state, msg, None, None); } } Task::LoadProcMacros(progress) => { @@ -669,7 +666,7 @@ impl GlobalState { }; if let Some(state) = state { - self.report_progress("Loading", state, msg, None, None); + self.report_progress("Loading proc-macros", state, msg, None, None); } } Task::BuildDepsHaveChanged => self.build_deps_changed = true, @@ -714,10 +711,9 @@ impl GlobalState { message += &format!( ": {}", match dir.strip_prefix(self.config.root_path()) { - Some(relative_path) => relative_path.as_ref(), + Some(relative_path) => relative_path.as_utf8_path(), None => dir.as_ref(), } - .display() ); } @@ -861,7 +857,7 @@ impl GlobalState { let title = if self.flycheck.len() == 1 { format!("{}", self.config.flycheck()) } else { - format!("cargo check (#{})", id + 1) + format!("{} (#{})", self.config.flycheck(), id + 1) }; self.report_progress( &title, diff --git a/crates/rust-analyzer/src/reload.rs b/crates/rust-analyzer/src/reload.rs index c2725e1fad919..771a5599f6f59 100644 --- a/crates/rust-analyzer/src/reload.rs +++ b/crates/rust-analyzer/src/reload.rs @@ -26,7 +26,6 @@ use itertools::Itertools; use load_cargo::{load_proc_macro, ProjectFolders}; use proc_macro_api::ProcMacroServer; use project_model::{ProjectWorkspace, WorkspaceBuildScripts}; -use rustc_hash::FxHashSet; use stdx::{format_to, thread::ThreadIntent}; use triomphe::Arc; use vfs::{AbsPath, AbsPathBuf, ChangeKind}; @@ -185,10 +184,8 @@ impl GlobalState { message.push_str( "`rust-analyzer.linkedProjects` have been specified, which may be incorrect. Specified project paths:\n", ); - message.push_str(&format!( - " {}", - self.config.linked_manifests().map(|it| it.display()).format("\n ") - )); + message + .push_str(&format!(" {}", self.config.linked_manifests().format("\n "))); if self.config.has_linked_project_jsons() { message.push_str("\nAdditionally, one or more project jsons are specified") } @@ -431,27 +428,46 @@ impl GlobalState { } if let FilesWatcher::Client = self.config.files().watcher { - let registration_options = lsp_types::DidChangeWatchedFilesRegistrationOptions { - watchers: self - .workspaces - .iter() - .flat_map(|ws| ws.to_roots()) - .filter(|it| it.is_local) + let filter = + self.workspaces.iter().flat_map(|ws| ws.to_roots()).filter(|it| it.is_local); + + let watchers = if self.config.did_change_watched_files_relative_pattern_support() { + // When relative patterns are supported by the client, prefer using them + filter + .flat_map(|root| { + root.include.into_iter().flat_map(|base| { + [(base.clone(), "**/*.rs"), (base, "**/Cargo.{lock,toml}")] + }) + }) + .map(|(base, pat)| lsp_types::FileSystemWatcher { + glob_pattern: lsp_types::GlobPattern::Relative( + lsp_types::RelativePattern { + base_uri: lsp_types::OneOf::Right( + lsp_types::Url::from_file_path(base).unwrap(), + ), + pattern: pat.to_owned(), + }, + ), + kind: None, + }) + .collect() + } else { + // When they're not, integrate the base to make them into absolute patterns + filter .flat_map(|root| { - root.include.into_iter().flat_map(|it| { - [ - format!("{it}/**/*.rs"), - format!("{it}/**/Cargo.toml"), - format!("{it}/**/Cargo.lock"), - ] + root.include.into_iter().flat_map(|base| { + [format!("{base}/**/*.rs"), format!("{base}/**/Cargo.{{lock,toml}}")] }) }) .map(|glob_pattern| lsp_types::FileSystemWatcher { glob_pattern: lsp_types::GlobPattern::String(glob_pattern), kind: None, }) - .collect(), + .collect() }; + + let registration_options = + lsp_types::DidChangeWatchedFilesRegistrationOptions { watchers }; let registration = lsp_types::Registration { id: "workspace/didChangeWatchedFiles".to_owned(), method: "workspace/didChangeWatchedFiles".to_owned(), @@ -525,26 +541,23 @@ impl GlobalState { fn recreate_crate_graph(&mut self, cause: String) { // crate graph construction relies on these paths, record them so when one of them gets // deleted or created we trigger a reconstruction of the crate graph - let mut crate_graph_file_dependencies = FxHashSet::default(); + let mut crate_graph_file_dependencies = mem::take(&mut self.crate_graph_file_dependencies); + self.report_progress( + "Building CrateGraph", + crate::lsp::utils::Progress::Begin, + None, + None, + None, + ); let (crate_graph, proc_macro_paths, layouts, toolchains) = { // Create crate graph from all the workspaces let vfs = &mut self.vfs.write().0; - let loader = &mut self.loader; let load = |path: &AbsPath| { - let _p = tracing::span!(tracing::Level::DEBUG, "switch_workspaces::load").entered(); let vfs_path = vfs::VfsPath::from(path.to_path_buf()); crate_graph_file_dependencies.insert(vfs_path.clone()); - match vfs.file_id(&vfs_path) { - Some(file_id) => Some(file_id), - None => { - // FIXME: Consider not loading this here? - let contents = loader.handle.load_sync(path); - vfs.set_file_contents(vfs_path.clone(), contents); - vfs.file_id(&vfs_path) - } - } + vfs.file_id(&vfs_path) }; ws_to_crate_graph(&self.workspaces, self.config.extra_env(), load) @@ -564,6 +577,13 @@ impl GlobalState { change.set_toolchains(toolchains); self.analysis_host.apply_change(change); self.crate_graph_file_dependencies = crate_graph_file_dependencies; + self.report_progress( + "Building CrateGraph", + crate::lsp::utils::Progress::End, + None, + None, + None, + ); self.process_changes(); self.reload_flycheck(); @@ -732,6 +752,8 @@ pub fn ws_to_crate_graph( }); proc_macro_paths.push(crate_proc_macros); } + crate_graph.shrink_to_fit(); + proc_macro_paths.shrink_to_fit(); (crate_graph, proc_macro_paths, layouts, toolchains) } @@ -739,7 +761,7 @@ pub(crate) fn should_refresh_for_change(path: &AbsPath, change_kind: ChangeKind) const IMPLICIT_TARGET_FILES: &[&str] = &["build.rs", "src/main.rs", "src/lib.rs"]; const IMPLICIT_TARGET_DIRS: &[&str] = &["src/bin", "examples", "tests", "benches"]; - let file_name = match path.file_name().unwrap_or_default().to_str() { + let file_name = match path.file_name() { Some(it) => it, None => return false, }; @@ -754,18 +776,18 @@ pub(crate) fn should_refresh_for_change(path: &AbsPath, change_kind: ChangeKind) // .cargo/config{.toml} if path.extension().unwrap_or_default() != "rs" { let is_cargo_config = matches!(file_name, "config.toml" | "config") - && path.parent().map(|parent| parent.as_ref().ends_with(".cargo")).unwrap_or(false); + && path.parent().map(|parent| parent.as_str().ends_with(".cargo")).unwrap_or(false); return is_cargo_config; } - if IMPLICIT_TARGET_FILES.iter().any(|it| path.as_ref().ends_with(it)) { + if IMPLICIT_TARGET_FILES.iter().any(|it| path.as_str().ends_with(it)) { return true; } let parent = match path.parent() { Some(it) => it, None => return false, }; - if IMPLICIT_TARGET_DIRS.iter().any(|it| parent.as_ref().ends_with(it)) { + if IMPLICIT_TARGET_DIRS.iter().any(|it| parent.as_str().ends_with(it)) { return true; } if file_name == "main.rs" { @@ -773,7 +795,7 @@ pub(crate) fn should_refresh_for_change(path: &AbsPath, change_kind: ChangeKind) Some(it) => it, None => return false, }; - if IMPLICIT_TARGET_DIRS.iter().any(|it| grand_parent.as_ref().ends_with(it)) { + if IMPLICIT_TARGET_DIRS.iter().any(|it| grand_parent.as_str().ends_with(it)) { return true; } } diff --git a/crates/rust-analyzer/tests/crate_graph.rs b/crates/rust-analyzer/tests/crate_graph.rs index efd42fadf7e96..cf38032b94182 100644 --- a/crates/rust-analyzer/tests/crate_graph.rs +++ b/crates/rust-analyzer/tests/crate_graph.rs @@ -60,7 +60,7 @@ fn get_fake_sysroot() -> Sysroot { let sysroot_path = get_fake_sysroot_path(); // there's no `libexec/` directory with a `proc-macro-srv` binary in that // fake sysroot, so we give them both the same path: - let sysroot_dir = AbsPathBuf::assert(sysroot_path); + let sysroot_dir = AbsPathBuf::assert_utf8(sysroot_path); let sysroot_src_dir = sysroot_dir.clone(); Sysroot::load(sysroot_dir, Some(Ok(sysroot_src_dir)), false) } diff --git a/crates/rust-analyzer/tests/slow-tests/main.rs b/crates/rust-analyzer/tests/slow-tests/main.rs index 960f5b531d44f..439b006977d3e 100644 --- a/crates/rust-analyzer/tests/slow-tests/main.rs +++ b/crates/rust-analyzer/tests/slow-tests/main.rs @@ -903,6 +903,7 @@ fn out_dirs_check() { } #[test] +#[cfg(not(windows))] // windows requires elevated permissions to create symlinks fn root_contains_symlink_out_dirs_check() { out_dirs_check_impl(true); } @@ -917,7 +918,7 @@ fn resolve_proc_macro() { } let sysroot = project_model::Sysroot::discover_no_source( - &AbsPathBuf::assert(std::env::current_dir().unwrap()), + &AbsPathBuf::assert_utf8(std::env::current_dir().unwrap()), &Default::default(), ) .unwrap(); @@ -1002,7 +1003,7 @@ pub fn foo(_input: TokenStream) -> TokenStream { }, "procMacro": { "enable": true, - "server": proc_macro_server_path.as_path().as_ref(), + "server": proc_macro_server_path.as_path().as_str(), } })) .root("foo") @@ -1039,7 +1040,7 @@ fn test_will_rename_files_same_level() { let tmp_dir = TestDir::new(); let tmp_dir_path = tmp_dir.path().to_owned(); - let tmp_dir_str = tmp_dir_path.to_str().unwrap(); + let tmp_dir_str = tmp_dir_path.as_str(); let base_path = PathBuf::from(format!("file://{tmp_dir_str}")); let code = r#" @@ -1084,7 +1085,7 @@ use crate::old_folder::nested::foo as bar; "documentChanges": [ { "textDocument": { - "uri": format!("file://{}", tmp_dir_path.join("src").join("lib.rs").to_str().unwrap().to_owned().replace("C:\\", "/c:/").replace('\\', "/")), + "uri": format!("file://{}", tmp_dir_path.join("src").join("lib.rs").as_str().to_owned().replace("C:\\", "/c:/").replace('\\', "/")), "version": null }, "edits": [ @@ -1141,7 +1142,7 @@ use crate::old_folder::nested::foo as bar; "documentChanges": [ { "textDocument": { - "uri": format!("file://{}", tmp_dir_path.join("src").join("lib.rs").to_str().unwrap().to_owned().replace("C:\\", "/c:/").replace('\\', "/")), + "uri": format!("file://{}", tmp_dir_path.join("src").join("lib.rs").as_str().to_owned().replace("C:\\", "/c:/").replace('\\', "/")), "version": null }, "edits": [ @@ -1162,7 +1163,7 @@ use crate::old_folder::nested::foo as bar; }, { "textDocument": { - "uri": format!("file://{}", tmp_dir_path.join("src").join("old_folder").join("nested.rs").to_str().unwrap().to_owned().replace("C:\\", "/c:/").replace('\\', "/")), + "uri": format!("file://{}", tmp_dir_path.join("src").join("old_folder").join("nested.rs").as_str().to_owned().replace("C:\\", "/c:/").replace('\\', "/")), "version": null }, "edits": [ diff --git a/crates/rust-analyzer/tests/slow-tests/support.rs b/crates/rust-analyzer/tests/slow-tests/support.rs index 1d831b8b10532..8bbe6ff37247b 100644 --- a/crates/rust-analyzer/tests/slow-tests/support.rs +++ b/crates/rust-analyzer/tests/slow-tests/support.rs @@ -1,7 +1,6 @@ use std::{ cell::{Cell, RefCell}, fs, - path::{Path, PathBuf}, sync::Once, time::Duration, }; @@ -9,6 +8,7 @@ use std::{ use crossbeam_channel::{after, select, Receiver}; use lsp_server::{Connection, Message, Notification, Request}; use lsp_types::{notification::Exit, request::Shutdown, TextDocumentIdentifier, Url}; +use paths::{Utf8Path, Utf8PathBuf}; use rust_analyzer::{config::Config, lsp, main_loop}; use serde::Serialize; use serde_json::{json, to_string_pretty, Value}; @@ -21,7 +21,7 @@ use crate::testdir::TestDir; pub(crate) struct Project<'a> { fixture: &'a str, tmp_dir: Option, - roots: Vec, + roots: Vec, config: serde_json::Value, root_dir_contains_symlink: bool, } @@ -359,7 +359,7 @@ impl Server { self.client.sender.send(Message::Notification(not)).unwrap(); } - pub(crate) fn path(&self) -> &Path { + pub(crate) fn path(&self) -> &Utf8Path { self.dir.path() } } diff --git a/crates/rust-analyzer/tests/slow-tests/testdir.rs b/crates/rust-analyzer/tests/slow-tests/testdir.rs index b3ee7fa3d03fe..d113bd512789a 100644 --- a/crates/rust-analyzer/tests/slow-tests/testdir.rs +++ b/crates/rust-analyzer/tests/slow-tests/testdir.rs @@ -1,11 +1,12 @@ use std::{ fs, io, - path::{Path, PathBuf}, sync::atomic::{AtomicUsize, Ordering}, }; +use paths::{Utf8Path, Utf8PathBuf}; + pub(crate) struct TestDir { - path: PathBuf, + path: Utf8PathBuf, keep: bool, } @@ -51,10 +52,13 @@ impl TestDir { #[cfg(target_os = "windows")] std::os::windows::fs::symlink_dir(path, &symlink_path).unwrap(); - return TestDir { path: symlink_path, keep: false }; + return TestDir { + path: Utf8PathBuf::from_path_buf(symlink_path).unwrap(), + keep: false, + }; } - return TestDir { path, keep: false }; + return TestDir { path: Utf8PathBuf::from_path_buf(path).unwrap(), keep: false }; } panic!("Failed to create a temporary directory") } @@ -64,7 +68,7 @@ impl TestDir { self.keep = true; self } - pub(crate) fn path(&self) -> &Path { + pub(crate) fn path(&self) -> &Utf8Path { &self.path } } @@ -79,7 +83,7 @@ impl Drop for TestDir { let actual_path = filetype.is_symlink().then(|| fs::read_link(&self.path).unwrap()); if let Some(actual_path) = actual_path { - remove_dir_all(&actual_path).unwrap_or_else(|err| { + remove_dir_all(Utf8Path::from_path(&actual_path).unwrap()).unwrap_or_else(|err| { panic!( "failed to remove temporary link to directory {}: {err}", actual_path.display() @@ -88,18 +92,18 @@ impl Drop for TestDir { } remove_dir_all(&self.path).unwrap_or_else(|err| { - panic!("failed to remove temporary directory {}: {err}", self.path.display()) + panic!("failed to remove temporary directory {}: {err}", self.path) }); } } #[cfg(not(windows))] -fn remove_dir_all(path: &Path) -> io::Result<()> { +fn remove_dir_all(path: &Utf8Path) -> io::Result<()> { fs::remove_dir_all(path) } #[cfg(windows)] -fn remove_dir_all(path: &Path) -> io::Result<()> { +fn remove_dir_all(path: &Utf8Path) -> io::Result<()> { for _ in 0..99 { if fs::remove_dir_all(path).is_ok() { return Ok(()); diff --git a/crates/span/src/lib.rs b/crates/span/src/lib.rs index 6b849ce373812..c9109c72d0d2e 100644 --- a/crates/span/src/lib.rs +++ b/crates/span/src/lib.rs @@ -16,6 +16,56 @@ pub use self::{ pub use syntax::{TextRange, TextSize}; pub use vfs::FileId; +#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)] +pub enum Edition { + Edition2015, + Edition2018, + Edition2021, + Edition2024, +} + +impl Edition { + pub const CURRENT: Edition = Edition::Edition2021; + pub const DEFAULT: Edition = Edition::Edition2015; +} + +#[derive(Debug)] +pub struct ParseEditionError { + invalid_input: String, +} + +impl std::error::Error for ParseEditionError {} +impl fmt::Display for ParseEditionError { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + write!(f, "invalid edition: {:?}", self.invalid_input) + } +} + +impl std::str::FromStr for Edition { + type Err = ParseEditionError; + + fn from_str(s: &str) -> Result { + let res = match s { + "2015" => Edition::Edition2015, + "2018" => Edition::Edition2018, + "2021" => Edition::Edition2021, + "2024" => Edition::Edition2024, + _ => return Err(ParseEditionError { invalid_input: s.to_owned() }), + }; + Ok(res) + } +} + +impl fmt::Display for Edition { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + f.write_str(match self { + Edition::Edition2015 => "2015", + Edition::Edition2018 => "2018", + Edition::Edition2021 => "2021", + Edition::Edition2024 => "2024", + }) + } +} #[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)] pub struct FilePosition { pub file_id: FileId, diff --git a/crates/syntax/Cargo.toml b/crates/syntax/Cargo.toml index 9a8d73cf7ff47..1809ca7dea5d6 100644 --- a/crates/syntax/Cargo.toml +++ b/crates/syntax/Cargo.toml @@ -33,12 +33,8 @@ text-edit.workspace = true [dev-dependencies] rayon.workspace = true expect-test = "1.4.0" -proc-macro2 = "1.0.47" -quote = "1.0.20" -ungrammar = "1.16.1" test-utils.workspace = true -sourcegen.workspace = true [features] in-rust-tree = [] diff --git a/crates/syntax/rust.ungram b/crates/syntax/rust.ungram index c3d8e97c436cc..e1765b25fd822 100644 --- a/crates/syntax/rust.ungram +++ b/crates/syntax/rust.ungram @@ -414,7 +414,7 @@ StmtList = '}' RefExpr = - Attr* '&' ('raw' | 'mut' | 'const') Expr + Attr* '&' (('raw' 'const'?)| ('raw'? 'mut') ) Expr TryExpr = Attr* Expr '?' diff --git a/crates/syntax/src/ast/generated/nodes.rs b/crates/syntax/src/ast/generated/nodes.rs index 75971861aa80e..c82bc4151ac0f 100644 --- a/crates/syntax/src/ast/generated/nodes.rs +++ b/crates/syntax/src/ast/generated/nodes.rs @@ -8,110 +8,71 @@ use crate::{ }; #[derive(Debug, Clone, PartialEq, Eq, Hash)] -pub struct Name { - pub(crate) syntax: SyntaxNode, -} -impl Name { - pub fn ident_token(&self) -> Option { support::token(&self.syntax, T![ident]) } - pub fn self_token(&self) -> Option { support::token(&self.syntax, T![self]) } -} - -#[derive(Debug, Clone, PartialEq, Eq, Hash)] -pub struct NameRef { +pub struct Abi { pub(crate) syntax: SyntaxNode, } -impl NameRef { - pub fn ident_token(&self) -> Option { support::token(&self.syntax, T![ident]) } - pub fn self_token(&self) -> Option { support::token(&self.syntax, T![self]) } - pub fn super_token(&self) -> Option { support::token(&self.syntax, T![super]) } - pub fn crate_token(&self) -> Option { support::token(&self.syntax, T![crate]) } - pub fn Self_token(&self) -> Option { support::token(&self.syntax, T![Self]) } +impl Abi { + pub fn extern_token(&self) -> Option { support::token(&self.syntax, T![extern]) } } #[derive(Debug, Clone, PartialEq, Eq, Hash)] -pub struct Lifetime { +pub struct ArgList { pub(crate) syntax: SyntaxNode, } -impl Lifetime { - pub fn lifetime_ident_token(&self) -> Option { - support::token(&self.syntax, T![lifetime_ident]) - } +impl ArgList { + pub fn args(&self) -> AstChildren { support::children(&self.syntax) } + pub fn l_paren_token(&self) -> Option { support::token(&self.syntax, T!['(']) } + pub fn r_paren_token(&self) -> Option { support::token(&self.syntax, T![')']) } } #[derive(Debug, Clone, PartialEq, Eq, Hash)] -pub struct Path { +pub struct ArrayExpr { pub(crate) syntax: SyntaxNode, } -impl Path { - pub fn qualifier(&self) -> Option { support::child(&self.syntax) } - pub fn coloncolon_token(&self) -> Option { support::token(&self.syntax, T![::]) } - pub fn segment(&self) -> Option { support::child(&self.syntax) } +impl ast::HasAttrs for ArrayExpr {} +impl ArrayExpr { + pub fn expr(&self) -> Option { support::child(&self.syntax) } + pub fn exprs(&self) -> AstChildren { support::children(&self.syntax) } + pub fn l_brack_token(&self) -> Option { support::token(&self.syntax, T!['[']) } + pub fn r_brack_token(&self) -> Option { support::token(&self.syntax, T![']']) } + pub fn semicolon_token(&self) -> Option { support::token(&self.syntax, T![;]) } } #[derive(Debug, Clone, PartialEq, Eq, Hash)] -pub struct PathSegment { +pub struct ArrayType { pub(crate) syntax: SyntaxNode, } -impl PathSegment { - pub fn coloncolon_token(&self) -> Option { support::token(&self.syntax, T![::]) } - pub fn name_ref(&self) -> Option { support::child(&self.syntax) } - pub fn generic_arg_list(&self) -> Option { support::child(&self.syntax) } - pub fn param_list(&self) -> Option { support::child(&self.syntax) } - pub fn ret_type(&self) -> Option { support::child(&self.syntax) } - pub fn l_angle_token(&self) -> Option { support::token(&self.syntax, T![<]) } +impl ArrayType { + pub fn const_arg(&self) -> Option { support::child(&self.syntax) } pub fn ty(&self) -> Option { support::child(&self.syntax) } - pub fn as_token(&self) -> Option { support::token(&self.syntax, T![as]) } - pub fn path_type(&self) -> Option { support::child(&self.syntax) } - pub fn r_angle_token(&self) -> Option { support::token(&self.syntax, T![>]) } -} - -#[derive(Debug, Clone, PartialEq, Eq, Hash)] -pub struct GenericArgList { - pub(crate) syntax: SyntaxNode, -} -impl GenericArgList { - pub fn coloncolon_token(&self) -> Option { support::token(&self.syntax, T![::]) } - pub fn l_angle_token(&self) -> Option { support::token(&self.syntax, T![<]) } - pub fn generic_args(&self) -> AstChildren { support::children(&self.syntax) } - pub fn r_angle_token(&self) -> Option { support::token(&self.syntax, T![>]) } + pub fn l_brack_token(&self) -> Option { support::token(&self.syntax, T!['[']) } + pub fn r_brack_token(&self) -> Option { support::token(&self.syntax, T![']']) } + pub fn semicolon_token(&self) -> Option { support::token(&self.syntax, T![;]) } } #[derive(Debug, Clone, PartialEq, Eq, Hash)] -pub struct ParamList { +pub struct AsmExpr { pub(crate) syntax: SyntaxNode, } -impl ParamList { +impl ast::HasAttrs for AsmExpr {} +impl AsmExpr { + pub fn expr(&self) -> Option { support::child(&self.syntax) } + pub fn pound_token(&self) -> Option { support::token(&self.syntax, T![#]) } pub fn l_paren_token(&self) -> Option { support::token(&self.syntax, T!['(']) } - pub fn self_param(&self) -> Option { support::child(&self.syntax) } - pub fn comma_token(&self) -> Option { support::token(&self.syntax, T![,]) } - pub fn params(&self) -> AstChildren { support::children(&self.syntax) } pub fn r_paren_token(&self) -> Option { support::token(&self.syntax, T![')']) } - pub fn pipe_token(&self) -> Option { support::token(&self.syntax, T![|]) } -} - -#[derive(Debug, Clone, PartialEq, Eq, Hash)] -pub struct RetType { - pub(crate) syntax: SyntaxNode, -} -impl RetType { - pub fn thin_arrow_token(&self) -> Option { support::token(&self.syntax, T![->]) } - pub fn ty(&self) -> Option { support::child(&self.syntax) } -} - -#[derive(Debug, Clone, PartialEq, Eq, Hash)] -pub struct PathType { - pub(crate) syntax: SyntaxNode, -} -impl PathType { - pub fn path(&self) -> Option { support::child(&self.syntax) } + pub fn asm_token(&self) -> Option { support::token(&self.syntax, T![asm]) } + pub fn builtin_token(&self) -> Option { support::token(&self.syntax, T![builtin]) } } #[derive(Debug, Clone, PartialEq, Eq, Hash)] -pub struct TypeArg { +pub struct AssocItemList { pub(crate) syntax: SyntaxNode, } -impl TypeArg { - pub fn ty(&self) -> Option { support::child(&self.syntax) } +impl ast::HasAttrs for AssocItemList {} +impl AssocItemList { + pub fn assoc_items(&self) -> AstChildren { support::children(&self.syntax) } + pub fn l_curly_token(&self) -> Option { support::token(&self.syntax, T!['{']) } + pub fn r_curly_token(&self) -> Option { support::token(&self.syntax, T!['}']) } } #[derive(Debug, Clone, PartialEq, Eq, Hash)] @@ -120,116 +81,125 @@ pub struct AssocTypeArg { } impl ast::HasTypeBounds for AssocTypeArg {} impl AssocTypeArg { - pub fn name_ref(&self) -> Option { support::child(&self.syntax) } + pub fn const_arg(&self) -> Option { support::child(&self.syntax) } pub fn generic_arg_list(&self) -> Option { support::child(&self.syntax) } + pub fn name_ref(&self) -> Option { support::child(&self.syntax) } pub fn param_list(&self) -> Option { support::child(&self.syntax) } pub fn ret_type(&self) -> Option { support::child(&self.syntax) } - pub fn eq_token(&self) -> Option { support::token(&self.syntax, T![=]) } pub fn ty(&self) -> Option { support::child(&self.syntax) } - pub fn const_arg(&self) -> Option { support::child(&self.syntax) } + pub fn eq_token(&self) -> Option { support::token(&self.syntax, T![=]) } } #[derive(Debug, Clone, PartialEq, Eq, Hash)] -pub struct LifetimeArg { +pub struct Attr { pub(crate) syntax: SyntaxNode, } -impl LifetimeArg { - pub fn lifetime(&self) -> Option { support::child(&self.syntax) } +impl Attr { + pub fn meta(&self) -> Option { support::child(&self.syntax) } + pub fn excl_token(&self) -> Option { support::token(&self.syntax, T![!]) } + pub fn pound_token(&self) -> Option { support::token(&self.syntax, T![#]) } + pub fn l_brack_token(&self) -> Option { support::token(&self.syntax, T!['[']) } + pub fn r_brack_token(&self) -> Option { support::token(&self.syntax, T![']']) } } #[derive(Debug, Clone, PartialEq, Eq, Hash)] -pub struct ConstArg { +pub struct AwaitExpr { pub(crate) syntax: SyntaxNode, } -impl ConstArg { +impl ast::HasAttrs for AwaitExpr {} +impl AwaitExpr { pub fn expr(&self) -> Option { support::child(&self.syntax) } + pub fn dot_token(&self) -> Option { support::token(&self.syntax, T![.]) } + pub fn await_token(&self) -> Option { support::token(&self.syntax, T![await]) } } #[derive(Debug, Clone, PartialEq, Eq, Hash)] -pub struct TypeBoundList { +pub struct BecomeExpr { pub(crate) syntax: SyntaxNode, } -impl TypeBoundList { - pub fn bounds(&self) -> AstChildren { support::children(&self.syntax) } +impl ast::HasAttrs for BecomeExpr {} +impl BecomeExpr { + pub fn expr(&self) -> Option { support::child(&self.syntax) } + pub fn become_token(&self) -> Option { support::token(&self.syntax, T![become]) } } #[derive(Debug, Clone, PartialEq, Eq, Hash)] -pub struct MacroCall { +pub struct BinExpr { pub(crate) syntax: SyntaxNode, } -impl ast::HasAttrs for MacroCall {} -impl ast::HasDocComments for MacroCall {} -impl MacroCall { - pub fn path(&self) -> Option { support::child(&self.syntax) } - pub fn excl_token(&self) -> Option { support::token(&self.syntax, T![!]) } - pub fn token_tree(&self) -> Option { support::child(&self.syntax) } - pub fn semicolon_token(&self) -> Option { support::token(&self.syntax, T![;]) } -} +impl ast::HasAttrs for BinExpr {} +impl BinExpr {} #[derive(Debug, Clone, PartialEq, Eq, Hash)] -pub struct Attr { +pub struct BlockExpr { pub(crate) syntax: SyntaxNode, } -impl Attr { - pub fn pound_token(&self) -> Option { support::token(&self.syntax, T![#]) } - pub fn excl_token(&self) -> Option { support::token(&self.syntax, T![!]) } - pub fn l_brack_token(&self) -> Option { support::token(&self.syntax, T!['[']) } - pub fn meta(&self) -> Option { support::child(&self.syntax) } - pub fn r_brack_token(&self) -> Option { support::token(&self.syntax, T![']']) } +impl ast::HasAttrs for BlockExpr {} +impl BlockExpr { + pub fn label(&self) -> Option