Unnamed repository; edit this file 'description' to name the repository.
-rw-r--r--.github/ISSUE_TEMPLATE/bug_report.md2
-rw-r--r--.github/workflows/ci.yaml2
-rw-r--r--.github/workflows/metrics.yaml16
-rw-r--r--CONTRIBUTING.md30
-rw-r--r--Cargo.lock75
-rw-r--r--Cargo.toml11
-rw-r--r--README.md5
-rw-r--r--crates/base-db/src/input.rs55
-rw-r--r--crates/base-db/src/lib.rs5
-rw-r--r--crates/cfg/src/lib.rs7
-rw-r--r--crates/cfg/src/tests.rs10
-rw-r--r--crates/flycheck/src/command.rs10
-rw-r--r--crates/flycheck/src/lib.rs104
-rw-r--r--crates/flycheck/src/test_runner.rs26
-rw-r--r--crates/hir-def/Cargo.toml2
-rw-r--r--crates/hir-def/src/attr.rs10
-rw-r--r--crates/hir-def/src/attr/tests.rs2
-rw-r--r--crates/hir-def/src/data.rs4
-rw-r--r--crates/hir-def/src/data/adt.rs22
-rw-r--r--crates/hir-def/src/db.rs5
-rw-r--r--crates/hir-def/src/expander.rs11
-rw-r--r--crates/hir-def/src/find_path.rs7
-rw-r--r--crates/hir-def/src/generics.rs32
-rw-r--r--crates/hir-def/src/item_scope.rs32
-rw-r--r--crates/hir-def/src/item_tree.rs1
-rw-r--r--crates/hir-def/src/item_tree/lower.rs2
-rw-r--r--crates/hir-def/src/item_tree/pretty.rs2
-rw-r--r--crates/hir-def/src/lib.rs18
-rw-r--r--crates/hir-def/src/macro_expansion_tests/builtin_derive_macro.rs14
-rw-r--r--crates/hir-def/src/macro_expansion_tests/mod.rs7
-rw-r--r--crates/hir-def/src/nameres/collector.rs16
-rw-r--r--crates/hir-expand/Cargo.toml4
-rw-r--r--crates/hir-expand/src/attrs.rs49
-rw-r--r--crates/hir-expand/src/builtin_derive_macro.rs6
-rw-r--r--crates/hir-expand/src/builtin_fn_macro.rs2
-rw-r--r--crates/hir-expand/src/cfg_process.rs121
-rw-r--r--crates/hir-expand/src/db.rs118
-rw-r--r--crates/hir-expand/src/declarative.rs15
-rw-r--r--crates/hir-expand/src/fixup.rs8
-rw-r--r--crates/hir-expand/src/lib.rs2
-rw-r--r--crates/hir-expand/src/name.rs1
-rw-r--r--crates/hir-ty/Cargo.toml2
-rw-r--r--crates/hir-ty/src/builder.rs12
-rw-r--r--crates/hir-ty/src/chalk_ext.rs9
-rw-r--r--crates/hir-ty/src/consteval.rs33
-rw-r--r--crates/hir-ty/src/diagnostics/decl_check.rs2
-rw-r--r--crates/hir-ty/src/diagnostics/expr.rs93
-rw-r--r--crates/hir-ty/src/diagnostics/match_check/pat_analysis.rs115
-rw-r--r--crates/hir-ty/src/diagnostics/unsafe_check.rs28
-rw-r--r--crates/hir-ty/src/display.rs349
-rw-r--r--crates/hir-ty/src/infer.rs34
-rw-r--r--crates/hir-ty/src/infer/cast.rs4
-rw-r--r--crates/hir-ty/src/infer/closure.rs26
-rw-r--r--crates/hir-ty/src/infer/coerce.rs6
-rw-r--r--crates/hir-ty/src/infer/expr.rs23
-rw-r--r--crates/hir-ty/src/infer/pat.rs57
-rw-r--r--crates/hir-ty/src/infer/unify.rs62
-rw-r--r--crates/hir-ty/src/inhabitedness.rs68
-rw-r--r--crates/hir-ty/src/lib.rs17
-rw-r--r--crates/hir-ty/src/lower.rs271
-rw-r--r--crates/hir-ty/src/method_resolution.rs10
-rw-r--r--crates/hir-ty/src/mir/eval/shim.rs15
-rw-r--r--crates/hir-ty/src/mir/lower.rs38
-rw-r--r--crates/hir-ty/src/mir/lower/as_place.rs6
-rw-r--r--crates/hir-ty/src/mir/monomorphization.rs4
-rw-r--r--crates/hir-ty/src/tests/diagnostics.rs17
-rw-r--r--crates/hir-ty/src/tests/display_source_code.rs2
-rw-r--r--crates/hir-ty/src/tests/patterns.rs2
-rw-r--r--crates/hir-ty/src/tests/regression.rs4
-rw-r--r--crates/hir-ty/src/tests/simple.rs6
-rw-r--r--crates/hir-ty/src/tests/traits.rs16
-rw-r--r--crates/hir-ty/src/utils.rs73
-rw-r--r--crates/hir/Cargo.toml2
-rw-r--r--crates/hir/src/db.rs41
-rw-r--r--crates/hir/src/display.rs62
-rw-r--r--crates/hir/src/lib.rs61
-rw-r--r--crates/hir/src/semantics.rs15
-rw-r--r--crates/hir/src/semantics/source_to_def.rs6
-rw-r--r--crates/ide-assists/src/handlers/add_missing_impl_members.rs2
-rw-r--r--crates/ide-assists/src/handlers/auto_import.rs78
-rw-r--r--crates/ide-assists/src/handlers/convert_from_to_tryfrom.rs250
-rw-r--r--crates/ide-assists/src/handlers/extract_function.rs10
-rw-r--r--crates/ide-assists/src/handlers/generate_function.rs70
-rw-r--r--crates/ide-assists/src/handlers/inline_call.rs4
-rw-r--r--crates/ide-assists/src/handlers/promote_local_to_const.rs13
-rw-r--r--crates/ide-assists/src/handlers/remove_unused_imports.rs2
-rw-r--r--crates/ide-assists/src/handlers/toggle_ignore.rs4
-rw-r--r--crates/ide-assists/src/handlers/wrap_unwrap_cfg_attr.rs581
-rw-r--r--crates/ide-assists/src/lib.rs5
-rw-r--r--crates/ide-assists/src/tests/generated.rs53
-rw-r--r--crates/ide-assists/src/utils.rs15
-rw-r--r--crates/ide-completion/src/completions/postfix.rs5
-rw-r--r--crates/ide-completion/src/completions/type.rs8
-rw-r--r--crates/ide-completion/src/context.rs5
-rw-r--r--crates/ide-completion/src/snippet.rs2
-rw-r--r--crates/ide-completion/src/tests/predicate.rs4
-rw-r--r--crates/ide-completion/src/tests/type_pos.rs8
-rw-r--r--crates/ide-db/Cargo.toml1
-rw-r--r--crates/ide-db/src/apply_change.rs128
-rw-r--r--crates/ide-db/src/documentation.rs13
-rw-r--r--crates/ide-db/src/imports/insert_use.rs2
-rw-r--r--crates/ide-db/src/imports/insert_use/tests.rs8
-rw-r--r--crates/ide-db/src/label.rs4
-rw-r--r--crates/ide-db/src/search.rs85
-rw-r--r--crates/ide-diagnostics/src/handlers/missing_match_arms.rs3
-rw-r--r--crates/ide-diagnostics/src/handlers/unresolved_field.rs17
-rw-r--r--crates/ide-diagnostics/src/handlers/unused_variables.rs53
-rw-r--r--crates/ide-diagnostics/src/lib.rs41
-rw-r--r--crates/ide-ssr/src/fragments.rs4
-rw-r--r--crates/ide/src/annotations/fn_references.rs4
-rw-r--r--crates/ide/src/expand_macro.rs8
-rw-r--r--crates/ide/src/file_structure.rs2
-rwxr-xr-xcrates/ide/src/folding_ranges.rs2
-rw-r--r--crates/ide/src/highlight_related.rs83
-rw-r--r--crates/ide/src/hover.rs45
-rw-r--r--crates/ide/src/hover/render.rs12
-rw-r--r--crates/ide/src/hover/tests.rs156
-rw-r--r--crates/ide/src/inlay_hints.rs24
-rw-r--r--crates/ide/src/join_lines.rs4
-rw-r--r--crates/ide/src/lib.rs18
-rw-r--r--crates/ide/src/matching_brace.rs2
-rw-r--r--crates/ide/src/references.rs123
-rw-r--r--crates/ide/src/runnables.rs78
-rw-r--r--crates/ide/src/static_index.rs9
-rw-r--r--crates/ide/src/syntax_highlighting/highlight.rs1
-rw-r--r--crates/ide/src/syntax_highlighting/tags.rs8
-rw-r--r--crates/ide/src/syntax_highlighting/test_data/highlight_general.html2
-rw-r--r--crates/ide/src/syntax_highlighting/tests.rs2
-rw-r--r--crates/ide/src/syntax_tree.rs2
-rw-r--r--crates/ide/src/typing.rs5
-rw-r--r--crates/intern/src/lib.rs1
-rw-r--r--crates/limit/Cargo.toml3
-rw-r--r--crates/load-cargo/Cargo.toml3
-rw-r--r--crates/load-cargo/src/lib.rs5
-rw-r--r--crates/mbe/src/benchmark.rs10
-rw-r--r--crates/mbe/src/expander.rs36
-rw-r--r--crates/mbe/src/expander/matcher.rs35
-rw-r--r--crates/mbe/src/lib.rs12
-rw-r--r--crates/mbe/src/syntax_bridge.rs14
-rw-r--r--crates/mbe/src/syntax_bridge/tests.rs2
-rw-r--r--crates/mbe/src/tt_iter.rs3
-rw-r--r--crates/parser/src/edition.rs55
-rw-r--r--crates/parser/src/lib.rs14
-rw-r--r--crates/parser/src/parser.rs6
-rw-r--r--crates/parser/src/syntax_kind/generated.rs31
-rw-r--r--crates/parser/src/tests.rs2
-rw-r--r--crates/parser/src/tests/prefix_entries.rs2
-rw-r--r--crates/parser/src/token_set.rs34
-rw-r--r--crates/proc-macro-api/src/lib.rs12
-rw-r--r--crates/proc-macro-srv-cli/build.rs5
-rw-r--r--crates/proc-macro-srv/build.rs2
-rw-r--r--crates/proc-macro-srv/proc-macro-test/build.rs2
-rw-r--r--crates/proc-macro-srv/proc-macro-test/imp/build.rs5
-rw-r--r--crates/project-model/src/build_scripts.rs10
-rw-r--r--crates/project-model/src/cargo_workspace.rs58
-rw-r--r--crates/project-model/src/cfg.rs (renamed from crates/project-model/src/cfg_flag.rs)30
-rw-r--r--crates/project-model/src/env.rs85
-rw-r--r--crates/project-model/src/lib.rs6
-rw-r--r--crates/project-model/src/project_json.rs63
-rw-r--r--crates/project-model/src/rustc_cfg.rs5
-rw-r--r--crates/project-model/src/tests.rs8
-rw-r--r--crates/project-model/src/workspace.rs586
-rw-r--r--crates/project-model/test_data/output/cargo_hello_world_project_model.txt100
-rw-r--r--crates/project-model/test_data/output/cargo_hello_world_project_model_with_selective_overrides.txt100
-rw-r--r--crates/project-model/test_data/output/cargo_hello_world_project_model_with_wildcard_overrides.txt100
-rw-r--r--crates/project-model/test_data/output/rust_project_hello_world_project_model.txt10
-rw-r--r--crates/rust-analyzer/Cargo.toml3
-rw-r--r--crates/rust-analyzer/src/cargo_target_spec.rs59
-rw-r--r--crates/rust-analyzer/src/cli/analysis_stats.rs4
-rw-r--r--crates/rust-analyzer/src/cli/parse.rs3
-rw-r--r--crates/rust-analyzer/src/cli/progress_report.rs2
-rw-r--r--crates/rust-analyzer/src/cli/rustc_tests.rs6
-rw-r--r--crates/rust-analyzer/src/config.rs2120
-rw-r--r--crates/rust-analyzer/src/diagnostics.rs4
-rw-r--r--crates/rust-analyzer/src/diagnostics/to_proto.rs17
-rw-r--r--crates/rust-analyzer/src/global_state.rs17
-rw-r--r--crates/rust-analyzer/src/handlers/notification.rs16
-rw-r--r--crates/rust-analyzer/src/handlers/request.rs128
-rw-r--r--crates/rust-analyzer/src/integrated_benchmarks.rs8
-rw-r--r--crates/rust-analyzer/src/lsp/ext.rs10
-rw-r--r--crates/rust-analyzer/src/lsp/semantic_tokens.rs37
-rw-r--r--crates/rust-analyzer/src/lsp/to_proto.rs181
-rw-r--r--crates/rust-analyzer/src/main_loop.rs39
-rw-r--r--crates/rust-analyzer/src/reload.rs48
-rw-r--r--crates/rust-analyzer/tests/slow-tests/main.rs151
-rw-r--r--crates/rust-analyzer/tests/slow-tests/support.rs34
-rw-r--r--crates/rust-analyzer/tests/slow-tests/tidy.rs2
-rw-r--r--crates/salsa/salsa-macros/src/database_storage.rs3
-rw-r--r--crates/salsa/salsa-macros/src/parenthesized.rs2
-rw-r--r--crates/salsa/salsa-macros/src/query_group.rs2
-rw-r--r--crates/salsa/src/derived.rs1
-rw-r--r--crates/salsa/src/derived/slot.rs1
-rw-r--r--crates/salsa/src/durability.rs1
-rw-r--r--crates/salsa/src/hash.rs1
-rw-r--r--crates/salsa/src/input.rs1
-rw-r--r--crates/salsa/src/intern_id.rs1
-rw-r--r--crates/salsa/src/interned.rs1
-rw-r--r--crates/salsa/src/lib.rs15
-rw-r--r--crates/salsa/src/lru.rs1
-rw-r--r--crates/salsa/src/plumbing.rs1
-rw-r--r--crates/salsa/src/revision.rs1
-rw-r--r--crates/salsa/src/runtime.rs3
-rw-r--r--crates/salsa/src/runtime/dependency_graph.rs1
-rw-r--r--crates/salsa/src/runtime/local_state.rs1
-rw-r--r--crates/salsa/src/storage.rs1
-rw-r--r--crates/sourcegen/src/lib.rs2
-rw-r--r--crates/span/Cargo.toml1
-rw-r--r--crates/span/src/lib.rs53
-rw-r--r--crates/span/src/map.rs9
-rw-r--r--crates/syntax/src/algo.rs6
-rw-r--r--crates/syntax/src/ast.rs13
-rw-r--r--crates/syntax/src/ast/edit_in_place.rs3
-rw-r--r--crates/syntax/src/ast/expr_ext.rs5
-rw-r--r--crates/syntax/src/ast/make.rs13
-rw-r--r--crates/syntax/src/fuzz.rs9
-rw-r--r--crates/syntax/src/hacks.rs4
-rw-r--r--crates/syntax/src/lib.rs24
-rw-r--r--crates/syntax/src/parsing.rs4
-rw-r--r--crates/syntax/src/parsing/reparsing.rs12
-rw-r--r--crates/syntax/src/ptr.rs2
-rw-r--r--crates/syntax/src/tests.rs9
-rw-r--r--crates/test-fixture/src/lib.rs14
-rw-r--r--crates/test-utils/src/fixture.rs2
-rw-r--r--crates/vfs-notify/src/lib.rs28
-rw-r--r--crates/vfs/src/file_set.rs4
-rw-r--r--crates/vfs/src/file_set/tests.rs23
-rw-r--r--docs/dev/lsp-extensions.md4
-rw-r--r--docs/user/generated_config.adoc123
-rw-r--r--docs/user/manual.adoc5
-rw-r--r--editors/code/language-configuration.json6
-rw-r--r--editors/code/package.json77
-rw-r--r--editors/code/src/client.ts5
-rw-r--r--editors/code/src/run.ts19
-rw-r--r--editors/code/src/tasks.ts71
-rw-r--r--xtask/src/codegen.rs2
-rw-r--r--xtask/src/codegen/grammar/ast_src.rs10
-rw-r--r--xtask/src/codegen/lints.rs2
237 files changed, 6206 insertions, 3270 deletions
diff --git a/.github/ISSUE_TEMPLATE/bug_report.md b/.github/ISSUE_TEMPLATE/bug_report.md
index 97c1b64494..0d99d06bcd 100644
--- a/.github/ISSUE_TEMPLATE/bug_report.md
+++ b/.github/ISSUE_TEMPLATE/bug_report.md
@@ -22,6 +22,8 @@ Otherwise please try to provide information which will help us to fix the issue
**rustc version**: (eg. output of `rustc -V`)
+**editor or extension**: (eg. VSCode, Vim, Emacs, etc. For VSCode users, specify your extension version; for users of other editors, provide the distribution if applicable)
+
**relevant settings**: (eg. client settings, or environment variables like `CARGO`, `RUSTC`, `RUSTUP_HOME` or `CARGO_HOME`)
**repository link (if public, optional)**: (eg. [rust-analyzer](https://github.com/rust-lang/rust-analyzer))
diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml
index 08ad10c297..a10345a706 100644
--- a/.github/workflows/ci.yaml
+++ b/.github/workflows/ci.yaml
@@ -91,7 +91,7 @@ jobs:
run: cargo build --quiet ${{ env.USE_SYSROOT_ABI }}
- name: Test
- if: matrix.os == 'ubuntu-latest' || github.event_name == 'push'
+ if: matrix.os == 'ubuntu-latest' || matrix.os == 'windows-latest' || github.event_name == 'push'
run: cargo test ${{ env.USE_SYSROOT_ABI }} -- --nocapture --quiet
- name: Switch to stable toolchain
diff --git a/.github/workflows/metrics.yaml b/.github/workflows/metrics.yaml
index de61b2389a..b6cd4a795a 100644
--- a/.github/workflows/metrics.yaml
+++ b/.github/workflows/metrics.yaml
@@ -58,7 +58,7 @@ jobs:
key: ${{ runner.os }}-target-${{ github.sha }}
- name: Upload build metrics
- uses: actions/upload-artifact@v3
+ uses: actions/upload-artifact@v4
with:
name: build-${{ github.sha }}
path: target/build.json
@@ -95,7 +95,7 @@ jobs:
run: cargo xtask metrics "${{ matrix.names }}"
- name: Upload metrics
- uses: actions/upload-artifact@v3
+ uses: actions/upload-artifact@v4
with:
name: ${{ matrix.names }}-${{ github.sha }}
path: target/${{ matrix.names }}.json
@@ -109,32 +109,32 @@ jobs:
uses: actions/checkout@v4
- name: Download build metrics
- uses: actions/download-artifact@v3
+ uses: actions/download-artifact@v4
with:
name: build-${{ github.sha }}
- name: Download self metrics
- uses: actions/download-artifact@v3
+ uses: actions/download-artifact@v4
with:
name: self-${{ github.sha }}
- name: Download ripgrep-13.0.0 metrics
- uses: actions/download-artifact@v3
+ uses: actions/download-artifact@v4
with:
name: ripgrep-13.0.0-${{ github.sha }}
- name: Download webrender-2022 metrics
- uses: actions/download-artifact@v3
+ uses: actions/download-artifact@v4
with:
name: webrender-2022-${{ github.sha }}
- name: Download diesel-1.4.8 metrics
- uses: actions/download-artifact@v3
+ uses: actions/download-artifact@v4
with:
name: diesel-1.4.8-${{ github.sha }}
- name: Download hyper-0.14.18 metrics
- uses: actions/download-artifact@v3
+ uses: actions/download-artifact@v4
with:
name: hyper-0.14.18-${{ github.sha }}
diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md
new file mode 100644
index 0000000000..da65b034be
--- /dev/null
+++ b/CONTRIBUTING.md
@@ -0,0 +1,30 @@
+# Contributing to rust-analyzer
+
+Thank you for your interest in contributing to rust-analyzer! There are many ways to contribute
+and we appreciate all of them.
+
+To get a quick overview of the crates and structure of the project take a look at the
+[./docs/dev](./docs/dev) folder.
+
+If you have any questions please ask them in the [rust-analyzer zulip stream](
+https://rust-lang.zulipchat.com/#narrow/stream/185405-t-compiler.2Frust-analyzer) or if unsure where
+to start out when working on a concrete issue drop a comment on the related issue for mentoring
+instructions (general discussions are recommended to happen on zulip though).
+
+## Fixing a bug or improving a feature
+
+Generally it's fine to just work on these kinds of things and put a pull-request out for it. If there
+is an issue accompanying it make sure to link it in the pull request description so it can be closed
+afterwards or linked for context.
+
+If you want to find something to fix or work on keep a look out for the `C-bug` and `C-enhancement`
+labels.
+
+## Implementing a new feature
+
+It's advised to first open an issue for any kind of new feature so the team can tell upfront whether
+the feature is desirable or not before any implementation work happens. We want to minimize the
+possibility of someone putting a lot of work into a feature that is then going to waste as we deem
+it out of scope (be it due to generally not fitting in with rust-analyzer, or just not having the
+maintenance capacity). If there already is a feature issue open but it is not clear whether it is
+considered accepted feel free to just drop a comment and ask!
diff --git a/Cargo.lock b/Cargo.lock
index c7cf4479b3..a6e460134f 100644
--- a/Cargo.lock
+++ b/Cargo.lock
@@ -161,9 +161,9 @@ checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd"
[[package]]
name = "chalk-derive"
-version = "0.96.0"
+version = "0.97.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "5676cea088c32290fe65c82895be9d06dd21e0fa49bb97ca840529e9417ab71a"
+checksum = "92a0aedc4ac2adc5c0b7dc9ec38c5c816284ad28da6d4ecd01873b9683f54972"
dependencies = [
"proc-macro2",
"quote",
@@ -173,20 +173,19 @@ dependencies = [
[[package]]
name = "chalk-ir"
-version = "0.96.0"
+version = "0.97.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "ff550c2cdd63ff74394214dce03d06386928a641c0f08837535f04af573a966d"
+checksum = "db18493569b190f7266a04901e520fc3a5c00564475154287906f8a27302c119"
dependencies = [
"bitflags 2.4.2",
"chalk-derive",
- "lazy_static",
]
[[package]]
name = "chalk-recursive"
-version = "0.96.0"
+version = "0.97.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "4c4559e5c9b200240453b07d893f9c3c74413b53b0d33cbe272c68b0b77aa1c3"
+checksum = "ae4ba8ce5bd2e1b59f1f79495bc8704db09a8285e51cc5ddf01d9baee1bf447d"
dependencies = [
"chalk-derive",
"chalk-ir",
@@ -197,9 +196,9 @@ dependencies = [
[[package]]
name = "chalk-solve"
-version = "0.96.0"
+version = "0.97.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "0882e68ce9eb5a0a2413806538494d19df6ee520ab17d1faf489e952f32e98b8"
+checksum = "b2ec1b3b7f7b1ec38f099ef39c2bc3ea29335be1b8316d114baff46d96d131e9"
dependencies = [
"chalk-derive",
"chalk-ir",
@@ -552,6 +551,7 @@ dependencies = [
"la-arena 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)",
"limit",
"mbe",
+ "parser",
"rustc-hash",
"smallvec",
"span",
@@ -695,6 +695,7 @@ version = "0.0.0"
dependencies = [
"arrayvec",
"base-db",
+ "bitflags 2.4.2",
"cov-mark",
"crossbeam-channel",
"either",
@@ -781,6 +782,7 @@ checksum = "7b0b929d511467233429c45a44ac1dcaa21ba0f5ba11e4879e6ed28ddb4f9df4"
dependencies = [
"equivalent",
"hashbrown",
+ "serde",
]
[[package]]
@@ -1594,6 +1596,7 @@ dependencies = [
"ide",
"ide-db",
"ide-ssr",
+ "indexmap",
"itertools",
"load-cargo",
"lsp-server 0.7.6 (registry+https://github.com/rust-lang/crates.io-index)",
@@ -1622,6 +1625,7 @@ dependencies = [
"test-fixture",
"test-utils",
"tikv-jemallocator",
+ "toml",
"toolchain",
"tracing",
"tracing-subscriber",
@@ -1776,6 +1780,15 @@ dependencies = [
]
[[package]]
+name = "serde_spanned"
+version = "0.6.5"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "eb3622f419d1296904700073ea6cc23ad690adbd66f13ea683df73298736f0c1"
+dependencies = [
+ "serde",
+]
+
+[[package]]
name = "sharded-slab"
version = "0.1.7"
source = "registry+https://github.com/rust-lang/crates.io-index"
@@ -1822,6 +1835,7 @@ dependencies = [
"salsa",
"stdx",
"syntax",
+ "text-size",
"vfs",
]
@@ -2026,6 +2040,40 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1f3ccbac311fea05f86f61904b462b55fb3df8837a366dfc601a0161d0532f20"
[[package]]
+name = "toml"
+version = "0.8.8"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "a1a195ec8c9da26928f773888e0742ca3ca1040c6cd859c919c9f59c1954ab35"
+dependencies = [
+ "serde",
+ "serde_spanned",
+ "toml_datetime",
+ "toml_edit",
+]
+
+[[package]]
+name = "toml_datetime"
+version = "0.6.5"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "3550f4e9685620ac18a50ed434eb3aec30db8ba93b0287467bca5826ea25baf1"
+dependencies = [
+ "serde",
+]
+
+[[package]]
+name = "toml_edit"
+version = "0.21.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "d34d383cd00a163b4a5b85053df514d45bc330f6de7737edfe0a93311d1eaa03"
+dependencies = [
+ "indexmap",
+ "serde",
+ "serde_spanned",
+ "toml_datetime",
+ "winnow",
+]
+
+[[package]]
name = "toolchain"
version = "0.0.0"
dependencies = [
@@ -2402,6 +2450,15 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "32b752e52a2da0ddfbdbcc6fceadfeede4c939ed16d13e648833a61dfb611ed8"
[[package]]
+name = "winnow"
+version = "0.5.32"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "8434aeec7b290e8da5c3f0d628cb0eac6cabcb31d14bb74f779a08109a5914d6"
+dependencies = [
+ "memchr",
+]
+
+[[package]]
name = "write-json"
version = "0.1.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
diff --git a/Cargo.toml b/Cargo.toml
index d9343d2b96..f7e3ae51df 100644
--- a/Cargo.toml
+++ b/Cargo.toml
@@ -22,6 +22,7 @@ smol_str.opt-level = 3
text-size.opt-level = 3
# This speeds up `cargo xtask dist`.
miniz_oxide.opt-level = 3
+salsa.opt-level = 3
[profile.release]
incremental = true
@@ -106,10 +107,10 @@ arrayvec = "0.7.4"
bitflags = "2.4.1"
cargo_metadata = "0.18.1"
camino = "1.1.6"
-chalk-solve = { version = "0.96.0", default-features = false }
-chalk-ir = "0.96.0"
-chalk-recursive = { version = "0.96.0", default-features = false }
-chalk-derive = "0.96.0"
+chalk-solve = { version = "0.97.0", default-features = false }
+chalk-ir = "0.97.0"
+chalk-recursive = { version = "0.97.0", default-features = false }
+chalk-derive = "0.97.0"
command-group = "2.0.1"
crossbeam-channel = "0.5.8"
dissimilar = "1.0.7"
@@ -188,6 +189,8 @@ enum_variant_names = "allow"
new_ret_no_self = "allow"
# Has a bunch of false positives
useless_asref = "allow"
+# Has false positives
+assigning_clones = "allow"
## Following lints should be tackled at some point
too_many_arguments = "allow"
diff --git a/README.md b/README.md
index 8c3f6f8468..552f71f151 100644
--- a/README.md
+++ b/README.md
@@ -13,8 +13,9 @@ https://rust-analyzer.github.io/manual.html#installation
## Documentation
-If you want to **contribute** to rust-analyzer or are just curious about how
-things work under the hood, check the [./docs/dev](./docs/dev) folder.
+If you want to **contribute** to rust-analyzer check out the [CONTRIBUTING.md](./CONTRIBUTING.md) or
+if you are just curious about how things work under the hood, check the [./docs/dev](./docs/dev)
+folder.
If you want to **use** rust-analyzer's language server with your editor of
choice, check [the manual](https://rust-analyzer.github.io/manual.html) folder.
diff --git a/crates/base-db/src/input.rs b/crates/base-db/src/input.rs
index 27eb05cd4d..240af7925c 100644
--- a/crates/base-db/src/input.rs
+++ b/crates/base-db/src/input.rs
@@ -19,6 +19,10 @@ use vfs::{file_set::FileSet, AbsPathBuf, AnchoredPath, FileId, VfsPath};
// Map from crate id to the name of the crate and path of the proc-macro. If the value is `None`,
// then the crate for the proc-macro hasn't been build yet as the build data is missing.
pub type ProcMacroPaths = FxHashMap<CrateId, Result<(Option<String>, AbsPathBuf), String>>;
+
+#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, PartialOrd, Ord)]
+pub struct SourceRootId(pub u32);
+
/// Files are grouped into source roots. A source root is a directory on the
/// file systems which is watched for changes. Typically it corresponds to a
/// Rust crate. Source roots *might* be nested: in this case, a file belongs to
@@ -26,9 +30,6 @@ pub type ProcMacroPaths = FxHashMap<CrateId, Result<(Option<String>, AbsPathBuf)
/// source root, and the analyzer does not know the root path of the source root at
/// all. So, a file from one source root can't refer to a file in another source
/// root by path.
-#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, PartialOrd, Ord)]
-pub struct SourceRootId(pub u32);
-
#[derive(Clone, Debug, PartialEq, Eq)]
pub struct SourceRoot {
/// Sysroot or crates.io library.
@@ -285,20 +286,39 @@ pub struct CrateData {
/// For purposes of analysis, crates are anonymous (only names in
/// `Dependency` matters), this name should only be used for UI.
pub display_name: Option<CrateDisplayName>,
- pub cfg_options: CfgOptions,
+ pub cfg_options: Arc<CfgOptions>,
/// The cfg options that could be used by the crate
- pub potential_cfg_options: Option<CfgOptions>,
+ pub potential_cfg_options: Option<Arc<CfgOptions>>,
pub env: Env,
pub dependencies: Vec<Dependency>,
pub origin: CrateOrigin,
pub is_proc_macro: bool,
}
-#[derive(Default, Debug, Clone, PartialEq, Eq)]
+#[derive(Default, Clone, PartialEq, Eq)]
pub struct Env {
entries: FxHashMap<String, String>,
}
+impl fmt::Debug for Env {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ struct EnvDebug<'s>(Vec<(&'s String, &'s String)>);
+
+ impl fmt::Debug for EnvDebug<'_> {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ f.debug_map().entries(self.0.iter().copied()).finish()
+ }
+ }
+ f.debug_struct("Env")
+ .field("entries", &{
+ let mut entries: Vec<_> = self.entries.iter().collect();
+ entries.sort();
+ EnvDebug(entries)
+ })
+ .finish()
+ }
+}
+
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub struct Dependency {
pub crate_id: CrateId,
@@ -328,12 +348,13 @@ impl CrateGraph {
edition: Edition,
display_name: Option<CrateDisplayName>,
version: Option<String>,
- cfg_options: CfgOptions,
- potential_cfg_options: Option<CfgOptions>,
- env: Env,
+ cfg_options: Arc<CfgOptions>,
+ potential_cfg_options: Option<Arc<CfgOptions>>,
+ mut env: Env,
is_proc_macro: bool,
origin: CrateOrigin,
) -> CrateId {
+ env.entries.shrink_to_fit();
let data = CrateData {
root_file_id,
edition,
@@ -650,16 +671,24 @@ impl FromIterator<(String, String)> for Env {
}
impl Env {
- pub fn set(&mut self, env: &str, value: String) {
- self.entries.insert(env.to_owned(), value);
+ pub fn set(&mut self, env: &str, value: impl Into<String>) {
+ self.entries.insert(env.to_owned(), value.into());
}
pub fn get(&self, env: &str) -> Option<String> {
self.entries.get(env).cloned()
}
- pub fn iter(&self) -> impl Iterator<Item = (&str, &str)> {
- self.entries.iter().map(|(k, v)| (k.as_str(), v.as_str()))
+ pub fn extend_from_other(&mut self, other: &Env) {
+ self.entries.extend(other.entries.iter().map(|(x, y)| (x.to_owned(), y.to_owned())));
+ }
+}
+
+impl From<Env> for Vec<(String, String)> {
+ fn from(env: Env) -> Vec<(String, String)> {
+ let mut entries: Vec<_> = env.entries.into_iter().collect();
+ entries.sort();
+ entries
}
}
diff --git a/crates/base-db/src/lib.rs b/crates/base-db/src/lib.rs
index 785ff9ceff..2b64a07a5a 100644
--- a/crates/base-db/src/lib.rs
+++ b/crates/base-db/src/lib.rs
@@ -45,7 +45,7 @@ pub trait Upcast<T: ?Sized> {
pub const DEFAULT_FILE_TEXT_LRU_CAP: usize = 16;
pub const DEFAULT_PARSE_LRU_CAP: usize = 128;
-pub const DEFAULT_BORROWCK_LRU_CAP: usize = 1024;
+pub const DEFAULT_BORROWCK_LRU_CAP: usize = 2024;
pub trait FileLoader {
/// Text of the file.
@@ -83,7 +83,8 @@ fn toolchain_channel(db: &dyn SourceDatabase, krate: CrateId) -> Option<ReleaseC
fn parse(db: &dyn SourceDatabase, file_id: FileId) -> Parse<ast::SourceFile> {
let _p = tracing::span!(tracing::Level::INFO, "parse_query", ?file_id).entered();
let text = db.file_text(file_id);
- SourceFile::parse(&text)
+ // FIXME: Edition based parsing
+ SourceFile::parse(&text, span::Edition::CURRENT)
}
/// We don't want to give HIR knowledge of source roots, hence we extract these
diff --git a/crates/cfg/src/lib.rs b/crates/cfg/src/lib.rs
index 454d6fc538..9a365889e6 100644
--- a/crates/cfg/src/lib.rs
+++ b/crates/cfg/src/lib.rs
@@ -58,13 +58,6 @@ impl CfgOptions {
self.enabled.insert(CfgAtom::KeyValue { key, value });
}
- pub fn difference<'a>(
- &'a self,
- other: &'a CfgOptions,
- ) -> impl Iterator<Item = &'a CfgAtom> + 'a {
- self.enabled.difference(&other.enabled)
- }
-
pub fn apply_diff(&mut self, diff: CfgDiff) {
for atom in diff.enable {
self.enabled.insert(atom);
diff --git a/crates/cfg/src/tests.rs b/crates/cfg/src/tests.rs
index 62fb429a63..a1ae15fcdd 100644
--- a/crates/cfg/src/tests.rs
+++ b/crates/cfg/src/tests.rs
@@ -1,12 +1,12 @@
use arbitrary::{Arbitrary, Unstructured};
use expect_test::{expect, Expect};
use mbe::{syntax_node_to_token_tree, DummyTestSpanMap, DUMMY};
-use syntax::{ast, AstNode};
+use syntax::{ast, AstNode, Edition};
use crate::{CfgAtom, CfgExpr, CfgOptions, DnfExpr};
fn assert_parse_result(input: &str, expected: CfgExpr) {
- let source_file = ast::SourceFile::parse(input).ok().unwrap();
+ let source_file = ast::SourceFile::parse(input, Edition::CURRENT).ok().unwrap();
let tt = source_file.syntax().descendants().find_map(ast::TokenTree::cast).unwrap();
let tt = syntax_node_to_token_tree(tt.syntax(), DummyTestSpanMap, DUMMY);
let cfg = CfgExpr::parse(&tt);
@@ -14,7 +14,7 @@ fn assert_parse_result(input: &str, expected: CfgExpr) {
}
fn check_dnf(input: &str, expect: Expect) {
- let source_file = ast::SourceFile::parse(input).ok().unwrap();
+ let source_file = ast::SourceFile::parse(input, Edition::CURRENT).ok().unwrap();
let tt = source_file.syntax().descendants().find_map(ast::TokenTree::cast).unwrap();
let tt = syntax_node_to_token_tree(tt.syntax(), DummyTestSpanMap, DUMMY);
let cfg = CfgExpr::parse(&tt);
@@ -23,7 +23,7 @@ fn check_dnf(input: &str, expect: Expect) {
}
fn check_why_inactive(input: &str, opts: &CfgOptions, expect: Expect) {
- let source_file = ast::SourceFile::parse(input).ok().unwrap();
+ let source_file = ast::SourceFile::parse(input, Edition::CURRENT).ok().unwrap();
let tt = source_file.syntax().descendants().find_map(ast::TokenTree::cast).unwrap();
let tt = syntax_node_to_token_tree(tt.syntax(), DummyTestSpanMap, DUMMY);
let cfg = CfgExpr::parse(&tt);
@@ -34,7 +34,7 @@ fn check_why_inactive(input: &str, opts: &CfgOptions, expect: Expect) {
#[track_caller]
fn check_enable_hints(input: &str, opts: &CfgOptions, expected_hints: &[&str]) {
- let source_file = ast::SourceFile::parse(input).ok().unwrap();
+ let source_file = ast::SourceFile::parse(input, Edition::CURRENT).ok().unwrap();
let tt = source_file.syntax().descendants().find_map(ast::TokenTree::cast).unwrap();
let tt = syntax_node_to_token_tree(tt.syntax(), DummyTestSpanMap, DUMMY);
let cfg = CfgExpr::parse(&tt);
diff --git a/crates/flycheck/src/command.rs b/crates/flycheck/src/command.rs
index 091146a001..8ba7018316 100644
--- a/crates/flycheck/src/command.rs
+++ b/crates/flycheck/src/command.rs
@@ -4,12 +4,13 @@
use std::{
ffi::OsString,
fmt, io,
+ marker::PhantomData,
path::PathBuf,
process::{ChildStderr, ChildStdout, Command, Stdio},
};
use command_group::{CommandGroup, GroupChild};
-use crossbeam_channel::{unbounded, Receiver, Sender};
+use crossbeam_channel::Sender;
use stdx::process::streaming_output;
/// Cargo output is structured as a one JSON per line. This trait abstracts parsing one line of
@@ -99,10 +100,10 @@ pub(crate) struct CommandHandle<T> {
/// a read syscall dropping and therefore terminating the process is our best option.
child: JodGroupChild,
thread: stdx::thread::JoinHandle<io::Result<(bool, String)>>,
- pub(crate) receiver: Receiver<T>,
program: OsString,
arguments: Vec<OsString>,
current_dir: Option<PathBuf>,
+ _phantom: PhantomData<T>,
}
impl<T> fmt::Debug for CommandHandle<T> {
@@ -116,7 +117,7 @@ impl<T> fmt::Debug for CommandHandle<T> {
}
impl<T: ParseFromLine> CommandHandle<T> {
- pub(crate) fn spawn(mut command: Command) -> std::io::Result<Self> {
+ pub(crate) fn spawn(mut command: Command, sender: Sender<T>) -> std::io::Result<Self> {
command.stdout(Stdio::piped()).stderr(Stdio::piped()).stdin(Stdio::null());
let mut child = command.group_spawn().map(JodGroupChild)?;
@@ -127,13 +128,12 @@ impl<T: ParseFromLine> CommandHandle<T> {
let stdout = child.0.inner().stdout.take().unwrap();
let stderr = child.0.inner().stderr.take().unwrap();
- let (sender, receiver) = unbounded();
let actor = CargoActor::<T>::new(sender, stdout, stderr);
let thread = stdx::thread::Builder::new(stdx::thread::ThreadIntent::Worker)
.name("CommandHandle".to_owned())
.spawn(move || actor.run())
.expect("failed to spawn thread");
- Ok(CommandHandle { program, arguments, current_dir, child, thread, receiver })
+ Ok(CommandHandle { program, arguments, current_dir, child, thread, _phantom: PhantomData })
}
pub(crate) fn cancel(mut self) {
diff --git a/crates/flycheck/src/lib.rs b/crates/flycheck/src/lib.rs
index 4ee86954ac..5dfaaf7742 100644
--- a/crates/flycheck/src/lib.rs
+++ b/crates/flycheck/src/lib.rs
@@ -42,18 +42,49 @@ pub enum InvocationLocation {
}
#[derive(Clone, Debug, PartialEq, Eq)]
+pub struct CargoOptions {
+ pub target_triples: Vec<String>,
+ pub all_targets: bool,
+ pub no_default_features: bool,
+ pub all_features: bool,
+ pub features: Vec<String>,
+ pub extra_args: Vec<String>,
+ pub extra_env: FxHashMap<String, String>,
+ pub target_dir: Option<Utf8PathBuf>,
+}
+
+impl CargoOptions {
+ fn apply_on_command(&self, cmd: &mut Command) {
+ for target in &self.target_triples {
+ cmd.args(["--target", target.as_str()]);
+ }
+ if self.all_targets {
+ cmd.arg("--all-targets");
+ }
+ if self.all_features {
+ cmd.arg("--all-features");
+ } else {
+ if self.no_default_features {
+ cmd.arg("--no-default-features");
+ }
+ if !self.features.is_empty() {
+ cmd.arg("--features");
+ cmd.arg(self.features.join(" "));
+ }
+ }
+ if let Some(target_dir) = &self.target_dir {
+ cmd.arg("--target-dir").arg(target_dir);
+ }
+ cmd.envs(&self.extra_env);
+ }
+}
+
+#[derive(Clone, Debug, PartialEq, Eq)]
pub enum FlycheckConfig {
CargoCommand {
command: String,
- target_triples: Vec<String>,
- all_targets: bool,
- no_default_features: bool,
- all_features: bool,
- features: Vec<String>,
- extra_args: Vec<String>,
- extra_env: FxHashMap<String, String>,
+ options: CargoOptions,
ansi_color_output: bool,
- target_dir: Option<Utf8PathBuf>,
},
CustomCommand {
command: String,
@@ -184,6 +215,8 @@ struct FlycheckActor {
/// have to wrap sub-processes output handling in a thread and pass messages
/// back over a channel.
command_handle: Option<CommandHandle<CargoCheckMessage>>,
+ /// The receiver side of the channel mentioned above.
+ command_receiver: Option<Receiver<CargoCheckMessage>>,
}
enum Event {
@@ -209,6 +242,7 @@ impl FlycheckActor {
sysroot_root,
root: workspace_root,
command_handle: None,
+ command_receiver: None,
}
}
@@ -217,14 +251,13 @@ impl FlycheckActor {
}
fn next_event(&self, inbox: &Receiver<StateChange>) -> Option<Event> {
- let check_chan = self.command_handle.as_ref().map(|cargo| &cargo.receiver);
if let Ok(msg) = inbox.try_recv() {
// give restarts a preference so check outputs don't block a restart or stop
return Some(Event::RequestStateChange(msg));
}
select! {
recv(inbox) -> msg => msg.ok().map(Event::RequestStateChange),
- recv(check_chan.unwrap_or(&never())) -> msg => Some(Event::CheckEvent(msg.ok())),
+ recv(self.command_receiver.as_ref().unwrap_or(&never())) -> msg => Some(Event::CheckEvent(msg.ok())),
}
}
@@ -253,10 +286,12 @@ impl FlycheckActor {
let formatted_command = format!("{:?}", command);
tracing::debug!(?command, "will restart flycheck");
- match CommandHandle::spawn(command) {
+ let (sender, receiver) = unbounded();
+ match CommandHandle::spawn(command, sender) {
Ok(command_handle) => {
- tracing::debug!(command = formatted_command, "did restart flycheck");
+ tracing::debug!(command = formatted_command, "did restart flycheck");
self.command_handle = Some(command_handle);
+ self.command_receiver = Some(receiver);
self.report_progress(Progress::DidStart);
}
Err(error) => {
@@ -272,13 +307,15 @@ impl FlycheckActor {
// Watcher finished
let command_handle = self.command_handle.take().unwrap();
+ self.command_receiver.take();
let formatted_handle = format!("{:?}", command_handle);
let res = command_handle.join();
- if res.is_err() {
+ if let Err(error) = &res {
tracing::error!(
- "Flycheck failed to run the following command: {}",
- formatted_handle
+ "Flycheck failed to run the following command: {}, error={}",
+ formatted_handle,
+ error
);
}
self.report_progress(Progress::DidFinish(res));
@@ -332,18 +369,7 @@ impl FlycheckActor {
saved_file: Option<&AbsPath>,
) -> Option<Command> {
let (mut cmd, args) = match &self.config {
- FlycheckConfig::CargoCommand {
- command,
- target_triples,
- no_default_features,
- all_targets,
- all_features,
- extra_args,
- features,
- extra_env,
- ansi_color_output,
- target_dir,
- } => {
+ FlycheckConfig::CargoCommand { command, options, ansi_color_output } => {
let mut cmd = Command::new(Tool::Cargo.path());
if let Some(sysroot_root) = &self.sysroot_root {
cmd.env("RUSTUP_TOOLCHAIN", AsRef::<std::path::Path>::as_ref(sysroot_root));
@@ -365,28 +391,8 @@ impl FlycheckActor {
cmd.arg("--manifest-path");
cmd.arg(self.root.join("Cargo.toml"));
- for target in target_triples {
- cmd.args(["--target", target.as_str()]);
- }
- if *all_targets {
- cmd.arg("--all-targets");
- }
- if *all_features {
- cmd.arg("--all-features");
- } else {
- if *no_default_features {
- cmd.arg("--no-default-features");
- }
- if !features.is_empty() {
- cmd.arg("--features");
- cmd.arg(features.join(" "));
- }
- }
- if let Some(target_dir) = target_dir {
- cmd.arg("--target-dir").arg(target_dir);
- }
- cmd.envs(extra_env);
- (cmd, extra_args.clone())
+ options.apply_on_command(&mut cmd);
+ (cmd, options.extra_args.clone())
}
FlycheckConfig::CustomCommand {
command,
diff --git a/crates/flycheck/src/test_runner.rs b/crates/flycheck/src/test_runner.rs
index 9f761c9ead..c136dd1366 100644
--- a/crates/flycheck/src/test_runner.rs
+++ b/crates/flycheck/src/test_runner.rs
@@ -3,11 +3,15 @@
use std::process::Command;
-use crossbeam_channel::Receiver;
+use crossbeam_channel::Sender;
+use paths::AbsPath;
use serde::Deserialize;
use toolchain::Tool;
-use crate::command::{CommandHandle, ParseFromLine};
+use crate::{
+ command::{CommandHandle, ParseFromLine},
+ CargoOptions,
+};
#[derive(Debug, Deserialize)]
#[serde(tag = "event", rename_all = "camelCase")]
@@ -51,30 +55,34 @@ impl ParseFromLine for CargoTestMessage {
#[derive(Debug)]
pub struct CargoTestHandle {
- handle: CommandHandle<CargoTestMessage>,
+ _handle: CommandHandle<CargoTestMessage>,
}
// Example of a cargo test command:
// cargo test --workspace --no-fail-fast -- module::func -Z unstable-options --format=json
impl CargoTestHandle {
- pub fn new(path: Option<&str>) -> std::io::Result<Self> {
+ pub fn new(
+ path: Option<&str>,
+ options: CargoOptions,
+ root: &AbsPath,
+ sender: Sender<CargoTestMessage>,
+ ) -> std::io::Result<Self> {
let mut cmd = Command::new(Tool::Cargo.path());
cmd.env("RUSTC_BOOTSTRAP", "1");
cmd.arg("test");
cmd.arg("--workspace");
// --no-fail-fast is needed to ensure that all requested tests will run
cmd.arg("--no-fail-fast");
+ cmd.arg("--manifest-path");
+ cmd.arg(root.join("Cargo.toml"));
+ options.apply_on_command(&mut cmd);
cmd.arg("--");
if let Some(path) = path {
cmd.arg(path);
}
cmd.args(["-Z", "unstable-options"]);
cmd.arg("--format=json");
- Ok(Self { handle: CommandHandle::spawn(cmd)? })
- }
-
- pub fn receiver(&self) -> &Receiver<CargoTestMessage> {
- &self.handle.receiver
+ Ok(Self { _handle: CommandHandle::spawn(cmd, sender)? })
}
}
diff --git a/crates/hir-def/Cargo.toml b/crates/hir-def/Cargo.toml
index 523ff6fc40..41c59ea0d9 100644
--- a/crates/hir-def/Cargo.toml
+++ b/crates/hir-def/Cargo.toml
@@ -54,7 +54,7 @@ test-utils.workspace = true
test-fixture.workspace = true
[features]
-in-rust-tree = []
+in-rust-tree = ["hir-expand/in-rust-tree"]
[lints]
workspace = true
diff --git a/crates/hir-def/src/attr.rs b/crates/hir-def/src/attr.rs
index fa7730f302..d9eeffd798 100644
--- a/crates/hir-def/src/attr.rs
+++ b/crates/hir-def/src/attr.rs
@@ -5,7 +5,7 @@ pub mod builtin;
#[cfg(test)]
mod tests;
-use std::{hash::Hash, ops, slice::Iter as SliceIter};
+use std::{borrow::Cow, hash::Hash, ops, slice::Iter as SliceIter};
use base_db::CrateId;
use cfg::{CfgExpr, CfgOptions};
@@ -141,6 +141,10 @@ impl Attrs {
}
}
+ pub fn cfgs(&self) -> impl Iterator<Item = CfgExpr> + '_ {
+ self.by_key("cfg").tt_values().map(CfgExpr::parse)
+ }
+
pub(crate) fn is_cfg_enabled(&self, cfg_options: &CfgOptions) -> bool {
match self.cfg() {
None => true,
@@ -569,6 +573,10 @@ impl<'attr> AttrQuery<'attr> {
self.attrs().find_map(|attr| attr.string_value())
}
+ pub fn string_value_unescape(self) -> Option<Cow<'attr, str>> {
+ self.attrs().find_map(|attr| attr.string_value_unescape())
+ }
+
pub fn exists(self) -> bool {
self.attrs().next().is_some()
}
diff --git a/crates/hir-def/src/attr/tests.rs b/crates/hir-def/src/attr/tests.rs
index 1a63e96bfa..9b68797fbf 100644
--- a/crates/hir-def/src/attr/tests.rs
+++ b/crates/hir-def/src/attr/tests.rs
@@ -11,7 +11,7 @@ use syntax::{ast, AstNode, TextRange};
use crate::attr::{DocAtom, DocExpr};
fn assert_parse_result(input: &str, expected: DocExpr) {
- let source_file = ast::SourceFile::parse(input).ok().unwrap();
+ let source_file = ast::SourceFile::parse(input, span::Edition::CURRENT).ok().unwrap();
let tt = source_file.syntax().descendants().find_map(ast::TokenTree::cast).unwrap();
let map = SpanMap::RealSpanMap(Arc::new(RealSpanMap::absolute(FileId::from_raw(0))));
let tt = syntax_node_to_token_tree(
diff --git a/crates/hir-def/src/data.rs b/crates/hir-def/src/data.rs
index da790f1151..e3d750d33c 100644
--- a/crates/hir-def/src/data.rs
+++ b/crates/hir-def/src/data.rs
@@ -510,6 +510,7 @@ pub struct ConstData {
pub type_ref: Interned<TypeRef>,
pub visibility: RawVisibility,
pub rustc_allow_incoherent_impl: bool,
+ pub has_body: bool,
}
impl ConstData {
@@ -533,6 +534,7 @@ impl ConstData {
type_ref: konst.type_ref.clone(),
visibility,
rustc_allow_incoherent_impl,
+ has_body: konst.has_body,
})
}
}
@@ -737,7 +739,7 @@ impl<'a> AssocItemCollector<'a> {
&AstIdWithPath::new(file_id, ast_id, Clone::clone(path)),
ctxt,
expand_to,
- self.expander.module.krate(),
+ self.expander.krate(),
resolver,
) {
Ok(Some(call_id)) => {
diff --git a/crates/hir-def/src/data/adt.rs b/crates/hir-def/src/data/adt.rs
index a7461b78af..0fe73418e5 100644
--- a/crates/hir-def/src/data/adt.rs
+++ b/crates/hir-def/src/data/adt.rs
@@ -26,7 +26,7 @@ use crate::{
tt::{Delimiter, DelimiterKind, Leaf, Subtree, TokenTree},
type_ref::TypeRef,
visibility::RawVisibility,
- EnumId, EnumVariantId, LocalFieldId, LocalModuleId, Lookup, StructId, UnionId,
+ EnumId, EnumVariantId, LocalFieldId, LocalModuleId, Lookup, StructId, UnionId, VariantId,
};
/// Note that we use `StructData` for unions as well!
@@ -191,8 +191,6 @@ impl StructData {
let krate = loc.container.krate;
let item_tree = loc.id.item_tree(db);
let repr = repr_from_value(db, krate, &item_tree, ModItem::from(loc.id.value).into());
- let cfg_options = db.crate_graph()[krate].cfg_options.clone();
-
let attrs = item_tree.attrs(db, krate, ModItem::from(loc.id.value).into());
let mut flags = StructFlags::NO_FLAGS;
@@ -219,7 +217,7 @@ impl StructData {
loc.id.file_id(),
loc.container.local_id,
&item_tree,
- &cfg_options,
+ &db.crate_graph()[krate].cfg_options,
&strukt.fields,
None,
);
@@ -248,8 +246,6 @@ impl StructData {
let krate = loc.container.krate;
let item_tree = loc.id.item_tree(db);
let repr = repr_from_value(db, krate, &item_tree, ModItem::from(loc.id.value).into());
- let cfg_options = db.crate_graph()[krate].cfg_options.clone();
-
let attrs = item_tree.attrs(db, krate, ModItem::from(loc.id.value).into());
let mut flags = StructFlags::NO_FLAGS;
if attrs.by_key("rustc_has_incoherent_inherent_impls").exists() {
@@ -266,7 +262,7 @@ impl StructData {
loc.id.file_id(),
loc.container.local_id,
&item_tree,
- &cfg_options,
+ &db.crate_graph()[krate].cfg_options,
&union.fields,
None,
);
@@ -338,7 +334,6 @@ impl EnumVariantData {
let container = loc.parent.lookup(db).container;
let krate = container.krate;
let item_tree = loc.id.item_tree(db);
- let cfg_options = db.crate_graph()[krate].cfg_options.clone();
let variant = &item_tree[loc.id.value];
let (var_data, diagnostics) = lower_fields(
@@ -347,7 +342,7 @@ impl EnumVariantData {
loc.id.file_id(),
container.local_id,
&item_tree,
- &cfg_options,
+ &db.crate_graph()[krate].cfg_options,
&variant.fields,
Some(item_tree[loc.parent.lookup(db).id.value].visibility),
);
@@ -383,6 +378,15 @@ impl VariantData {
VariantData::Unit => StructKind::Unit,
}
}
+
+ #[allow(clippy::self_named_constructors)]
+ pub(crate) fn variant_data(db: &dyn DefDatabase, id: VariantId) -> Arc<VariantData> {
+ match id {
+ VariantId::StructId(it) => db.struct_data(it).variant_data.clone(),
+ VariantId::EnumVariantId(it) => db.enum_variant_data(it).variant_data.clone(),
+ VariantId::UnionId(it) => db.union_data(it).variant_data.clone(),
+ }
+ }
}
#[derive(Debug, Copy, Clone, PartialEq, Eq)]
diff --git a/crates/hir-def/src/db.rs b/crates/hir-def/src/db.rs
index 30d52d87f1..55ecabdc38 100644
--- a/crates/hir-def/src/db.rs
+++ b/crates/hir-def/src/db.rs
@@ -12,7 +12,7 @@ use crate::{
attr::{Attrs, AttrsWithOwner},
body::{scope::ExprScopes, Body, BodySourceMap},
data::{
- adt::{EnumData, EnumVariantData, StructData},
+ adt::{EnumData, EnumVariantData, StructData, VariantData},
ConstData, ExternCrateDeclData, FunctionData, ImplData, Macro2Data, MacroRulesData,
ProcMacroData, StaticData, TraitAliasData, TraitData, TypeAliasData,
},
@@ -128,6 +128,9 @@ pub trait DefDatabase: InternDatabase + ExpandDatabase + Upcast<dyn ExpandDataba
) -> (Arc<EnumVariantData>, DefDiagnostics);
#[salsa::transparent]
+ #[salsa::invoke(VariantData::variant_data)]
+ fn variant_data(&self, id: VariantId) -> Arc<VariantData>;
+ #[salsa::transparent]
#[salsa::invoke(ImplData::impl_data_query)]
fn impl_data(&self, e: ImplId) -> Arc<ImplData>;
diff --git a/crates/hir-def/src/expander.rs b/crates/hir-def/src/expander.rs
index b0872fcdc0..73ce942c58 100644
--- a/crates/hir-def/src/expander.rs
+++ b/crates/hir-def/src/expander.rs
@@ -11,6 +11,7 @@ use hir_expand::{
};
use limit::Limit;
use syntax::{ast, Parse};
+use triomphe::Arc;
use crate::{
attr::Attrs, db::DefDatabase, lower::LowerCtx, path::Path, AsMacroCall, MacroId, ModuleId,
@@ -19,9 +20,8 @@ use crate::{
#[derive(Debug)]
pub struct Expander {
- cfg_options: CfgOptions,
+ cfg_options: Arc<CfgOptions>,
span_map: OnceCell<SpanMap>,
- krate: CrateId,
current_file_id: HirFileId,
pub(crate) module: ModuleId,
/// `recursion_depth == usize::MAX` indicates that the recursion limit has been reached.
@@ -45,10 +45,13 @@ impl Expander {
recursion_limit,
cfg_options: db.crate_graph()[module.krate].cfg_options.clone(),
span_map: OnceCell::new(),
- krate: module.krate,
}
}
+ pub fn krate(&self) -> CrateId {
+ self.module.krate
+ }
+
pub fn enter_expand<T: ast::AstNode>(
&mut self,
db: &dyn DefDatabase,
@@ -112,7 +115,7 @@ impl Expander {
pub(crate) fn parse_attrs(&self, db: &dyn DefDatabase, owner: &dyn ast::HasAttrs) -> Attrs {
Attrs::filter(
db,
- self.krate,
+ self.krate(),
RawAttrs::new(
db.upcast(),
owner,
diff --git a/crates/hir-def/src/find_path.rs b/crates/hir-def/src/find_path.rs
index 0cd4a5db8c..bf728a7107 100644
--- a/crates/hir-def/src/find_path.rs
+++ b/crates/hir-def/src/find_path.rs
@@ -30,6 +30,8 @@ pub fn find_path(
find_path_inner(FindPathCtx { db, prefixed: None, prefer_no_std, prefer_prelude }, item, from)
}
+/// Find a path that can be used to refer to a certain item. This can depend on
+/// *from where* you're referring to the item, hence the `from` parameter.
pub fn find_path_prefixed(
db: &dyn DefDatabase,
item: ItemInNs,
@@ -255,7 +257,7 @@ fn find_in_scope(
item: ItemInNs,
) -> Option<Name> {
def_map.with_ancestor_maps(db, from.local_id, &mut |def_map, local_id| {
- def_map[local_id].scope.name_of(item).map(|(name, _, _)| name.clone())
+ def_map[local_id].scope.names_of(item, |name, _, _| Some(name.clone()))
})
}
@@ -608,7 +610,8 @@ mod tests {
) {
let (db, pos) = TestDB::with_position(ra_fixture);
let module = db.module_at_position(pos);
- let parsed_path_file = syntax::SourceFile::parse(&format!("use {path};"));
+ let parsed_path_file =
+ syntax::SourceFile::parse(&format!("use {path};"), span::Edition::CURRENT);
let ast_path =
parsed_path_file.syntax_node().descendants().find_map(syntax::ast::Path::cast).unwrap();
let mod_path = ModPath::from_src(&db, ast_path, &mut |range| {
diff --git a/crates/hir-def/src/generics.rs b/crates/hir-def/src/generics.rs
index 4638b37719..acc60e1d9e 100644
--- a/crates/hir-def/src/generics.rs
+++ b/crates/hir-def/src/generics.rs
@@ -3,13 +3,15 @@
//! generic parameters. See also the `Generics` type and the `generics_of` query
//! in rustc.
+use std::ops;
+
use either::Either;
use hir_expand::{
name::{AsName, Name},
ExpandResult,
};
use intern::Interned;
-use la_arena::{Arena, Idx};
+use la_arena::Arena;
use once_cell::unsync::Lazy;
use stdx::impl_from;
use syntax::ast::{self, HasGenericParams, HasName, HasTypeBounds};
@@ -23,12 +25,14 @@ use crate::{
nameres::{DefMap, MacroSubNs},
type_ref::{ConstRef, LifetimeRef, TypeBound, TypeRef},
AdtId, ConstParamId, GenericDefId, HasModule, ItemTreeLoc, LifetimeParamId,
- LocalTypeOrConstParamId, Lookup, TypeOrConstParamId, TypeParamId,
+ LocalLifetimeParamId, LocalTypeOrConstParamId, Lookup, TypeOrConstParamId, TypeParamId,
};
/// Data about a generic type parameter (to a function, struct, impl, ...).
#[derive(Clone, PartialEq, Eq, Debug, Hash)]
pub struct TypeParamData {
+ /// [`None`] only if the type ref is an [`TypeRef::ImplTrait`]. FIXME: Might be better to just
+ /// make it always be a value, giving impl trait a special name.
pub name: Option<Name>,
pub default: Option<Interned<TypeRef>>,
pub provenance: TypeParamProvenance,
@@ -156,6 +160,20 @@ pub struct GenericParams {
pub where_predicates: Box<[WherePredicate]>,
}
+impl ops::Index<LocalTypeOrConstParamId> for GenericParams {
+ type Output = TypeOrConstParamData;
+ fn index(&self, index: LocalTypeOrConstParamId) -> &TypeOrConstParamData {
+ &self.type_or_consts[index]
+ }
+}
+
+impl ops::Index<LocalLifetimeParamId> for GenericParams {
+ type Output = LifetimeParamData;
+ fn index(&self, index: LocalLifetimeParamId) -> &LifetimeParamData {
+ &self.lifetimes[index]
+ }
+}
+
/// A single predicate from a where clause, i.e. `where Type: Trait`. Combined
/// where clauses like `where T: Foo + Bar` are turned into multiple of these.
/// It might still result in multiple actual predicates though, because of
@@ -197,7 +215,7 @@ impl GenericParamsCollector {
lower_ctx: &LowerCtx<'_>,
node: &dyn HasGenericParams,
add_param_attrs: impl FnMut(
- Either<Idx<TypeOrConstParamData>, Idx<LifetimeParamData>>,
+ Either<LocalTypeOrConstParamId, LocalLifetimeParamId>,
ast::GenericParam,
),
) {
@@ -225,7 +243,7 @@ impl GenericParamsCollector {
lower_ctx: &LowerCtx<'_>,
params: ast::GenericParamList,
mut add_param_attrs: impl FnMut(
- Either<Idx<TypeOrConstParamData>, Idx<LifetimeParamData>>,
+ Either<LocalTypeOrConstParamId, LocalLifetimeParamId>,
ast::GenericParam,
),
) {
@@ -414,16 +432,16 @@ impl GenericParams {
}
/// Iterator of type_or_consts field
- pub fn iter(
+ pub fn iter_type_or_consts(
&self,
- ) -> impl DoubleEndedIterator<Item = (Idx<TypeOrConstParamData>, &TypeOrConstParamData)> {
+ ) -> impl DoubleEndedIterator<Item = (LocalTypeOrConstParamId, &TypeOrConstParamData)> {
self.type_or_consts.iter()
}
/// Iterator of lifetimes field
pub fn iter_lt(
&self,
- ) -> impl DoubleEndedIterator<Item = (Idx<LifetimeParamData>, &LifetimeParamData)> {
+ ) -> impl DoubleEndedIterator<Item = (LocalLifetimeParamId, &LifetimeParamData)> {
self.lifetimes.iter()
}
diff --git a/crates/hir-def/src/item_scope.rs b/crates/hir-def/src/item_scope.rs
index 2b059d1f8d..a60b9f9f3a 100644
--- a/crates/hir-def/src/item_scope.rs
+++ b/crates/hir-def/src/item_scope.rs
@@ -277,13 +277,43 @@ impl ItemScope {
ItemInNs::Types(def) => self.types.iter().find_map(|(name, &(other_def, vis, i))| {
(other_def == def).then_some((name, vis, i.is_none()))
}),
-
ItemInNs::Values(def) => self.values.iter().find_map(|(name, &(other_def, vis, i))| {
(other_def == def).then_some((name, vis, i.is_none()))
}),
}
}
+ /// XXX: this is O(N) rather than O(1), try to not introduce new usages.
+ pub(crate) fn names_of<T>(
+ &self,
+ item: ItemInNs,
+ mut cb: impl FnMut(&Name, Visibility, bool) -> Option<T>,
+ ) -> Option<T> {
+ match item {
+ ItemInNs::Macros(def) => self
+ .macros
+ .iter()
+ .filter_map(|(name, &(other_def, vis, i))| {
+ (other_def == def).then_some((name, vis, i.is_none()))
+ })
+ .find_map(|(a, b, c)| cb(a, b, c)),
+ ItemInNs::Types(def) => self
+ .types
+ .iter()
+ .filter_map(|(name, &(other_def, vis, i))| {
+ (other_def == def).then_some((name, vis, i.is_none()))
+ })
+ .find_map(|(a, b, c)| cb(a, b, c)),
+ ItemInNs::Values(def) => self
+ .values
+ .iter()
+ .filter_map(|(name, &(other_def, vis, i))| {
+ (other_def == def).then_some((name, vis, i.is_none()))
+ })
+ .find_map(|(a, b, c)| cb(a, b, c)),
+ }
+ }
+
pub(crate) fn traits(&self) -> impl Iterator<Item = TraitId> + '_ {
self.types
.values()
diff --git a/crates/hir-def/src/item_tree.rs b/crates/hir-def/src/item_tree.rs
index 585e93ce21..610480736c 100644
--- a/crates/hir-def/src/item_tree.rs
+++ b/crates/hir-def/src/item_tree.rs
@@ -716,6 +716,7 @@ pub struct Const {
pub visibility: RawVisibilityId,
pub type_ref: Interned<TypeRef>,
pub ast_id: FileAstId<ast::Const>,
+ pub has_body: bool,
}
#[derive(Debug, Clone, Eq, PartialEq)]
diff --git a/crates/hir-def/src/item_tree/lower.rs b/crates/hir-def/src/item_tree/lower.rs
index f02163cbe4..4b5ef56d78 100644
--- a/crates/hir-def/src/item_tree/lower.rs
+++ b/crates/hir-def/src/item_tree/lower.rs
@@ -446,7 +446,7 @@ impl<'a> Ctx<'a> {
let type_ref = self.lower_type_ref_opt(konst.ty());
let visibility = self.lower_visibility(konst);
let ast_id = self.source_ast_id_map.ast_id(konst);
- let res = Const { name, visibility, type_ref, ast_id };
+ let res = Const { name, visibility, type_ref, ast_id, has_body: konst.body().is_some() };
id(self.data().consts.alloc(res))
}
diff --git a/crates/hir-def/src/item_tree/pretty.rs b/crates/hir-def/src/item_tree/pretty.rs
index 0c84057950..cef2a3fb86 100644
--- a/crates/hir-def/src/item_tree/pretty.rs
+++ b/crates/hir-def/src/item_tree/pretty.rs
@@ -357,7 +357,7 @@ impl Printer<'_> {
wln!(self, "}}");
}
ModItem::Const(it) => {
- let Const { name, visibility, type_ref, ast_id } = &self.tree[it];
+ let Const { name, visibility, type_ref, ast_id, has_body: _ } = &self.tree[it];
self.print_ast_id(ast_id.erase());
self.print_visibility(*visibility);
w!(self, "const ");
diff --git a/crates/hir-def/src/lib.rs b/crates/hir-def/src/lib.rs
index 46898ce542..88d4572196 100644
--- a/crates/hir-def/src/lib.rs
+++ b/crates/hir-def/src/lib.rs
@@ -422,6 +422,10 @@ impl ModuleId {
}
}
+ pub fn crate_def_map(self, db: &dyn DefDatabase) -> Arc<DefMap> {
+ db.crate_def_map(self.krate)
+ }
+
pub fn krate(self) -> CrateId {
self.krate
}
@@ -438,6 +442,8 @@ impl ModuleId {
})
}
+ /// Returns the module containing `self`, either the parent `mod`, or the module (or block) containing
+ /// the block, if `self` corresponds to a block expression.
pub fn containing_module(self, db: &dyn DefDatabase) -> Option<ModuleId> {
self.def_map(db).containing_module(self.local_id)
}
@@ -929,6 +935,18 @@ impl GenericDefId {
GenericDefId::EnumVariantId(_) => (FileId::BOGUS.into(), None),
}
}
+
+ pub fn assoc_trait_container(self, db: &dyn DefDatabase) -> Option<TraitId> {
+ match match self {
+ GenericDefId::FunctionId(f) => f.lookup(db).container,
+ GenericDefId::TypeAliasId(t) => t.lookup(db).container,
+ GenericDefId::ConstId(c) => c.lookup(db).container,
+ _ => return None,
+ } {
+ ItemContainerId::TraitId(trait_) => Some(trait_),
+ _ => None,
+ }
+ }
}
impl From<AssocItemId> for GenericDefId {
diff --git a/crates/hir-def/src/macro_expansion_tests/builtin_derive_macro.rs b/crates/hir-def/src/macro_expansion_tests/builtin_derive_macro.rs
index 89c1b44608..163211fea5 100644
--- a/crates/hir-def/src/macro_expansion_tests/builtin_derive_macro.rs
+++ b/crates/hir-def/src/macro_expansion_tests/builtin_derive_macro.rs
@@ -610,6 +610,10 @@ struct Foo {
field1: i32,
#[cfg(never)]
field2: (),
+ #[cfg(feature = "never")]
+ field3: (),
+ #[cfg(not(feature = "never"))]
+ field4: (),
}
#[derive(Default)]
enum Bar {
@@ -618,12 +622,16 @@ enum Bar {
Bar,
}
"#,
- expect![[r#"
+ expect![[r##"
#[derive(Default)]
struct Foo {
field1: i32,
#[cfg(never)]
field2: (),
+ #[cfg(feature = "never")]
+ field3: (),
+ #[cfg(not(feature = "never"))]
+ field4: (),
}
#[derive(Default)]
enum Bar {
@@ -635,7 +643,7 @@ enum Bar {
impl < > $crate::default::Default for Foo< > where {
fn default() -> Self {
Foo {
- field1: $crate::default::Default::default(),
+ field1: $crate::default::Default::default(), field4: $crate::default::Default::default(),
}
}
}
@@ -643,6 +651,6 @@ impl < > $crate::default::Default for Bar< > where {
fn default() -> Self {
Bar::Bar
}
-}"#]],
+}"##]],
);
}
diff --git a/crates/hir-def/src/macro_expansion_tests/mod.rs b/crates/hir-def/src/macro_expansion_tests/mod.rs
index 23b10cfd8e..8904aca9f2 100644
--- a/crates/hir-def/src/macro_expansion_tests/mod.rs
+++ b/crates/hir-def/src/macro_expansion_tests/mod.rs
@@ -316,8 +316,11 @@ impl ProcMacroExpander for IdentityWhenValidProcMacroExpander {
_: Span,
_: Span,
) -> Result<Subtree, ProcMacroExpansionError> {
- let (parse, _) =
- ::mbe::token_tree_to_syntax_node(subtree, ::mbe::TopEntryPoint::MacroItems);
+ let (parse, _) = ::mbe::token_tree_to_syntax_node(
+ subtree,
+ ::mbe::TopEntryPoint::MacroItems,
+ span::Edition::CURRENT,
+ );
if parse.errors().is_empty() {
Ok(subtree.clone())
} else {
diff --git a/crates/hir-def/src/nameres/collector.rs b/crates/hir-def/src/nameres/collector.rs
index ae8f028e48..0a6cd0fe9e 100644
--- a/crates/hir-def/src/nameres/collector.rs
+++ b/crates/hir-def/src/nameres/collector.rs
@@ -534,8 +534,7 @@ impl DefCollector<'_> {
Edition::Edition2015 => name![rust_2015],
Edition::Edition2018 => name![rust_2018],
Edition::Edition2021 => name![rust_2021],
- // FIXME: update this when rust_2024 exists
- Edition::Edition2024 => name![rust_2021],
+ Edition::Edition2024 => name![rust_2024],
};
let path_kind = match self.def_map.data.edition {
@@ -1918,7 +1917,7 @@ impl ModCollector<'_, '_> {
}
fn collect_module(&mut self, module_id: FileItemTreeId<Mod>, attrs: &Attrs) {
- let path_attr = attrs.by_key("path").string_value();
+ let path_attr = attrs.by_key("path").string_value_unescape();
let is_macro_use = attrs.by_key("macro_use").exists();
let module = &self.item_tree[module_id];
match &module.kind {
@@ -1932,7 +1931,8 @@ impl ModCollector<'_, '_> {
module_id,
);
- let Some(mod_dir) = self.mod_dir.descend_into_definition(&module.name, path_attr)
+ let Some(mod_dir) =
+ self.mod_dir.descend_into_definition(&module.name, path_attr.as_deref())
else {
return;
};
@@ -1953,8 +1953,12 @@ impl ModCollector<'_, '_> {
ModKind::Outline => {
let ast_id = AstId::new(self.file_id(), module.ast_id);
let db = self.def_collector.db;
- match self.mod_dir.resolve_declaration(db, self.file_id(), &module.name, path_attr)
- {
+ match self.mod_dir.resolve_declaration(
+ db,
+ self.file_id(),
+ &module.name,
+ path_attr.as_deref(),
+ ) {
Ok((file_id, is_mod_rs, mod_dir)) => {
let item_tree = db.file_item_tree(file_id.into());
let krate = self.def_collector.def_map.krate;
diff --git a/crates/hir-expand/Cargo.toml b/crates/hir-expand/Cargo.toml
index 4f30808015..ca05618aec 100644
--- a/crates/hir-expand/Cargo.toml
+++ b/crates/hir-expand/Cargo.toml
@@ -32,9 +32,13 @@ tt.workspace = true
mbe.workspace = true
limit.workspace = true
span.workspace = true
+parser.workspace = true
[dev-dependencies]
expect-test = "1.4.0"
+[features]
+in-rust-tree = ["syntax/in-rust-tree"]
+
[lints]
workspace = true
diff --git a/crates/hir-expand/src/attrs.rs b/crates/hir-expand/src/attrs.rs
index f1540498f2..f8bf88d83c 100644
--- a/crates/hir-expand/src/attrs.rs
+++ b/crates/hir-expand/src/attrs.rs
@@ -1,5 +1,5 @@
//! A higher level attributes based on TokenTree, with also some shortcuts.
-use std::{fmt, ops};
+use std::{borrow::Cow, fmt, ops};
use base_db::CrateId;
use cfg::CfgExpr;
@@ -8,6 +8,7 @@ use intern::Interned;
use mbe::{syntax_node_to_token_tree, DelimiterKind, Punct};
use smallvec::{smallvec, SmallVec};
use span::{Span, SyntaxContextId};
+use syntax::unescape;
use syntax::{ast, format_smolstr, match_ast, AstNode, AstToken, SmolStr, SyntaxNode};
use triomphe::ThinArc;
@@ -54,8 +55,7 @@ impl RawAttrs {
Attr {
id,
input: Some(Interned::new(AttrInput::Literal(tt::Literal {
- // FIXME: Escape quotes from comment content
- text: SmolStr::new(format_smolstr!("\"{doc}\"",)),
+ text: SmolStr::new(format_smolstr!("\"{}\"", Self::escape_chars(doc))),
span,
}))),
path: Interned::new(ModPath::from(crate::name!(doc))),
@@ -74,6 +74,10 @@ impl RawAttrs {
RawAttrs { entries }
}
+ fn escape_chars(s: &str) -> String {
+ s.replace('\\', r#"\\"#).replace('"', r#"\""#)
+ }
+
pub fn from_attrs_owner(
db: &dyn ExpandDatabase,
owner: InFile<&dyn ast::HasAttrs>,
@@ -297,6 +301,18 @@ impl Attr {
}
}
+ pub fn string_value_unescape(&self) -> Option<Cow<'_, str>> {
+ match self.input.as_deref()? {
+ AttrInput::Literal(it) => match it.text.strip_prefix('r') {
+ Some(it) => {
+ it.trim_matches('#').strip_prefix('"')?.strip_suffix('"').map(Cow::Borrowed)
+ }
+ None => it.text.strip_prefix('"')?.strip_suffix('"').and_then(unescape),
+ },
+ _ => None,
+ }
+ }
+
/// #[path(ident)]
pub fn single_ident_value(&self) -> Option<&tt::Ident> {
match self.input.as_deref()? {
@@ -346,6 +362,33 @@ impl Attr {
}
}
+fn unescape(s: &str) -> Option<Cow<'_, str>> {
+ let mut buf = String::new();
+ let mut prev_end = 0;
+ let mut has_error = false;
+ unescape::unescape_unicode(s, unescape::Mode::Str, &mut |char_range, unescaped_char| match (
+ unescaped_char,
+ buf.capacity() == 0,
+ ) {
+ (Ok(c), false) => buf.push(c),
+ (Ok(_), true) if char_range.len() == 1 && char_range.start == prev_end => {
+ prev_end = char_range.end
+ }
+ (Ok(c), true) => {
+ buf.reserve_exact(s.len());
+ buf.push_str(&s[..prev_end]);
+ buf.push(c);
+ }
+ (Err(_), _) => has_error = true,
+ });
+
+ match (has_error, buf.capacity() == 0) {
+ (true, _) => None,
+ (false, false) => Some(Cow::Owned(buf)),
+ (false, true) => Some(Cow::Borrowed(s)),
+ }
+}
+
pub fn collect_attrs(
owner: &dyn ast::HasAttrs,
) -> impl Iterator<Item = (AttrId, Either<ast::Attr, ast::Comment>)> {
diff --git a/crates/hir-expand/src/builtin_derive_macro.rs b/crates/hir-expand/src/builtin_derive_macro.rs
index 528038a9cc..94681b42a9 100644
--- a/crates/hir-expand/src/builtin_derive_macro.rs
+++ b/crates/hir-expand/src/builtin_derive_macro.rs
@@ -204,7 +204,11 @@ struct BasicAdtInfo {
}
fn parse_adt(tt: &tt::Subtree, call_site: Span) -> Result<BasicAdtInfo, ExpandError> {
- let (parsed, tm) = &mbe::token_tree_to_syntax_node(tt, mbe::TopEntryPoint::MacroItems);
+ let (parsed, tm) = &mbe::token_tree_to_syntax_node(
+ tt,
+ mbe::TopEntryPoint::MacroItems,
+ parser::Edition::CURRENT,
+ );
let macro_items = ast::MacroItems::cast(parsed.syntax_node())
.ok_or_else(|| ExpandError::other("invalid item definition"))?;
let item = macro_items.items().next().ok_or_else(|| ExpandError::other("no item found"))?;
diff --git a/crates/hir-expand/src/builtin_fn_macro.rs b/crates/hir-expand/src/builtin_fn_macro.rs
index fd3e4e7a4d..4d6fe6db39 100644
--- a/crates/hir-expand/src/builtin_fn_macro.rs
+++ b/crates/hir-expand/src/builtin_fn_macro.rs
@@ -219,7 +219,7 @@ fn assert_expand(
span: Span,
) -> ExpandResult<tt::Subtree> {
let call_site_span = span_with_call_site_ctxt(db, span, id);
- let args = parse_exprs_with_sep(tt, ',', call_site_span);
+ let args = parse_exprs_with_sep(tt, ',', call_site_span, Edition::CURRENT);
let dollar_crate = dollar_crate(span);
let expanded = match &*args {
[cond, panic_args @ ..] => {
diff --git a/crates/hir-expand/src/cfg_process.rs b/crates/hir-expand/src/cfg_process.rs
index db3558a84e..9dd44262ba 100644
--- a/crates/hir-expand/src/cfg_process.rs
+++ b/crates/hir-expand/src/cfg_process.rs
@@ -1,57 +1,59 @@
//! Processes out #[cfg] and #[cfg_attr] attributes from the input for the derive macro
use std::iter::Peekable;
+use base_db::CrateId;
use cfg::{CfgAtom, CfgExpr};
use rustc_hash::FxHashSet;
use syntax::{
ast::{self, Attr, HasAttrs, Meta, VariantList},
- AstNode, NodeOrToken, SyntaxElement, SyntaxNode, T,
+ AstNode, NodeOrToken, SyntaxElement, SyntaxKind, SyntaxNode, T,
};
use tracing::{debug, warn};
use tt::SmolStr;
use crate::{db::ExpandDatabase, proc_macro::ProcMacroKind, MacroCallLoc, MacroDefKind};
-fn check_cfg_attr(attr: &Attr, loc: &MacroCallLoc, db: &dyn ExpandDatabase) -> Option<bool> {
+fn check_cfg(db: &dyn ExpandDatabase, attr: &Attr, krate: CrateId) -> Option<bool> {
if !attr.simple_name().as_deref().map(|v| v == "cfg")? {
return None;
}
- debug!("Evaluating cfg {}", attr);
let cfg = parse_from_attr_meta(attr.meta()?)?;
- debug!("Checking cfg {:?}", cfg);
- let enabled = db.crate_graph()[loc.krate].cfg_options.check(&cfg) != Some(false);
+ let enabled = db.crate_graph()[krate].cfg_options.check(&cfg) != Some(false);
Some(enabled)
}
-fn check_cfg_attr_attr(attr: &Attr, loc: &MacroCallLoc, db: &dyn ExpandDatabase) -> Option<bool> {
+fn check_cfg_attr(db: &dyn ExpandDatabase, attr: &Attr, krate: CrateId) -> Option<bool> {
if !attr.simple_name().as_deref().map(|v| v == "cfg_attr")? {
return None;
}
- debug!("Evaluating cfg_attr {}", attr);
let cfg_expr = parse_from_attr_meta(attr.meta()?)?;
- debug!("Checking cfg_attr {:?}", cfg_expr);
- let enabled = db.crate_graph()[loc.krate].cfg_options.check(&cfg_expr) != Some(false);
+ let enabled = db.crate_graph()[krate].cfg_options.check(&cfg_expr) != Some(false);
Some(enabled)
}
fn process_has_attrs_with_possible_comma<I: HasAttrs>(
- items: impl Iterator<Item = I>,
- loc: &MacroCallLoc,
db: &dyn ExpandDatabase,
+ items: impl Iterator<Item = I>,
+ krate: CrateId,
remove: &mut FxHashSet<SyntaxElement>,
) -> Option<()> {
for item in items {
let field_attrs = item.attrs();
'attrs: for attr in field_attrs {
- if check_cfg_attr(&attr, loc, db).map(|enabled| !enabled).unwrap_or_default() {
- debug!("censoring type {:?}", item.syntax());
- remove.insert(item.syntax().clone().into());
- // We need to remove the , as well
- remove_possible_comma(&item, remove);
- break 'attrs;
+ if let Some(enabled) = check_cfg(db, &attr, krate) {
+ if enabled {
+ debug!("censoring {:?}", attr.syntax());
+ remove.insert(attr.syntax().clone().into());
+ } else {
+ debug!("censoring {:?}", item.syntax());
+ remove.insert(item.syntax().clone().into());
+ // We need to remove the , as well
+ remove_possible_comma(&item, remove);
+ break 'attrs;
+ }
}
- if let Some(enabled) = check_cfg_attr_attr(&attr, loc, db) {
+ if let Some(enabled) = check_cfg_attr(db, &attr, krate) {
if enabled {
debug!("Removing cfg_attr tokens {:?}", attr);
let meta = attr.meta()?;
@@ -60,13 +62,13 @@ fn process_has_attrs_with_possible_comma<I: HasAttrs>(
} else {
debug!("censoring type cfg_attr {:?}", item.syntax());
remove.insert(attr.syntax().clone().into());
- continue;
}
}
}
}
Some(())
}
+
#[derive(Debug, PartialEq, Eq, Clone, Copy)]
enum CfgExprStage {
/// Stripping the CFGExpr part of the attribute
@@ -78,6 +80,7 @@ enum CfgExprStage {
// Related Issue: https://github.com/rust-lang/rust-analyzer/issues/10110
EverythingElse,
}
+
/// This function creates its own set of tokens to remove. To help prevent malformed syntax as input.
fn remove_tokens_within_cfg_attr(meta: Meta) -> Option<FxHashSet<SyntaxElement>> {
let mut remove: FxHashSet<SyntaxElement> = FxHashSet::default();
@@ -131,23 +134,28 @@ fn remove_possible_comma(item: &impl AstNode, res: &mut FxHashSet<SyntaxElement>
}
}
fn process_enum(
- variants: VariantList,
- loc: &MacroCallLoc,
db: &dyn ExpandDatabase,
+ variants: VariantList,
+ krate: CrateId,
remove: &mut FxHashSet<SyntaxElement>,
) -> Option<()> {
'variant: for variant in variants.variants() {
for attr in variant.attrs() {
- if check_cfg_attr(&attr, loc, db).map(|enabled| !enabled).unwrap_or_default() {
- // Rustc does not strip the attribute if it is enabled. So we will leave it
- debug!("censoring type {:?}", variant.syntax());
- remove.insert(variant.syntax().clone().into());
- // We need to remove the , as well
- remove_possible_comma(&variant, remove);
- continue 'variant;
- };
+ if let Some(enabled) = check_cfg(db, &attr, krate) {
+ if enabled {
+ debug!("censoring {:?}", attr.syntax());
+ remove.insert(attr.syntax().clone().into());
+ } else {
+ // Rustc does not strip the attribute if it is enabled. So we will leave it
+ debug!("censoring type {:?}", variant.syntax());
+ remove.insert(variant.syntax().clone().into());
+ // We need to remove the , as well
+ remove_possible_comma(&variant, remove);
+ continue 'variant;
+ }
+ }
- if let Some(enabled) = check_cfg_attr_attr(&attr, loc, db) {
+ if let Some(enabled) = check_cfg_attr(db, &attr, krate) {
if enabled {
debug!("Removing cfg_attr tokens {:?}", attr);
let meta = attr.meta()?;
@@ -156,17 +164,16 @@ fn process_enum(
} else {
debug!("censoring type cfg_attr {:?}", variant.syntax());
remove.insert(attr.syntax().clone().into());
- continue;
}
}
}
if let Some(fields) = variant.field_list() {
match fields {
ast::FieldList::RecordFieldList(fields) => {
- process_has_attrs_with_possible_comma(fields.fields(), loc, db, remove)?;
+ process_has_attrs_with_possible_comma(db, fields.fields(), krate, remove)?;
}
ast::FieldList::TupleFieldList(fields) => {
- process_has_attrs_with_possible_comma(fields.fields(), loc, db, remove)?;
+ process_has_attrs_with_possible_comma(db, fields.fields(), krate, remove)?;
}
}
}
@@ -175,9 +182,9 @@ fn process_enum(
}
pub(crate) fn process_cfg_attrs(
+ db: &dyn ExpandDatabase,
node: &SyntaxNode,
loc: &MacroCallLoc,
- db: &dyn ExpandDatabase,
) -> Option<FxHashSet<SyntaxElement>> {
// FIXME: #[cfg_eval] is not implemented. But it is not stable yet
let is_derive = match loc.def.kind {
@@ -193,36 +200,35 @@ pub(crate) fn process_cfg_attrs(
let item = ast::Item::cast(node.clone())?;
for attr in item.attrs() {
- if let Some(enabled) = check_cfg_attr_attr(&attr, loc, db) {
+ if let Some(enabled) = check_cfg_attr(db, &attr, loc.krate) {
if enabled {
debug!("Removing cfg_attr tokens {:?}", attr);
let meta = attr.meta()?;
let removes_from_cfg_attr = remove_tokens_within_cfg_attr(meta)?;
remove.extend(removes_from_cfg_attr);
} else {
- debug!("censoring type cfg_attr {:?}", item.syntax());
+ debug!("Removing type cfg_attr {:?}", item.syntax());
remove.insert(attr.syntax().clone().into());
- continue;
}
}
}
match item {
ast::Item::Struct(it) => match it.field_list()? {
ast::FieldList::RecordFieldList(fields) => {
- process_has_attrs_with_possible_comma(fields.fields(), loc, db, &mut remove)?;
+ process_has_attrs_with_possible_comma(db, fields.fields(), loc.krate, &mut remove)?;
}
ast::FieldList::TupleFieldList(fields) => {
- process_has_attrs_with_possible_comma(fields.fields(), loc, db, &mut remove)?;
+ process_has_attrs_with_possible_comma(db, fields.fields(), loc.krate, &mut remove)?;
}
},
ast::Item::Enum(it) => {
- process_enum(it.variant_list()?, loc, db, &mut remove)?;
+ process_enum(db, it.variant_list()?, loc.krate, &mut remove)?;
}
ast::Item::Union(it) => {
process_has_attrs_with_possible_comma(
- it.record_field_list()?.fields(),
- loc,
db,
+ it.record_field_list()?.fields(),
+ loc.krate,
&mut remove,
)?;
}
@@ -234,10 +240,22 @@ pub(crate) fn process_cfg_attrs(
/// Parses a `cfg` attribute from the meta
fn parse_from_attr_meta(meta: Meta) -> Option<CfgExpr> {
let tt = meta.token_tree()?;
- let mut iter = tt.token_trees_and_tokens().skip(1).peekable();
+ let mut iter = tt
+ .token_trees_and_tokens()
+ .filter(is_not_whitespace)
+ .skip(1)
+ .take_while(is_not_closing_paren)
+ .peekable();
next_cfg_expr_from_syntax(&mut iter)
}
+fn is_not_closing_paren(element: &NodeOrToken<ast::TokenTree, syntax::SyntaxToken>) -> bool {
+ !matches!(element, NodeOrToken::Token(token) if (token.kind() == syntax::T![')']))
+}
+fn is_not_whitespace(element: &NodeOrToken<ast::TokenTree, syntax::SyntaxToken>) -> bool {
+ !matches!(element, NodeOrToken::Token(token) if (token.kind() == SyntaxKind::WHITESPACE))
+}
+
fn next_cfg_expr_from_syntax<I>(iter: &mut Peekable<I>) -> Option<CfgExpr>
where
I: Iterator<Item = NodeOrToken<ast::TokenTree, syntax::SyntaxToken>>,
@@ -256,14 +274,13 @@ where
let Some(NodeOrToken::Node(tree)) = iter.next() else {
return Some(CfgExpr::Invalid);
};
- let mut tree_iter = tree.token_trees_and_tokens().skip(1).peekable();
- while tree_iter
- .peek()
- .filter(
- |element| matches!(element, NodeOrToken::Token(token) if (token.kind() != syntax::T![')'])),
- )
- .is_some()
- {
+ let mut tree_iter = tree
+ .token_trees_and_tokens()
+ .filter(is_not_whitespace)
+ .skip(1)
+ .take_while(is_not_closing_paren)
+ .peekable();
+ while tree_iter.peek().is_some() {
let pred = next_cfg_expr_from_syntax(&mut tree_iter);
if let Some(pred) = pred {
preds.push(pred);
@@ -310,7 +327,7 @@ mod tests {
use crate::cfg_process::parse_from_attr_meta;
fn check_dnf_from_syntax(input: &str, expect: Expect) {
- let parse = SourceFile::parse(input);
+ let parse = SourceFile::parse(input, span::Edition::CURRENT);
let node = match parse.tree().syntax().descendants().find_map(Attr::cast) {
Some(it) => it,
None => {
diff --git a/crates/hir-expand/src/db.rs b/crates/hir-expand/src/db.rs
index 5461c1c49a..d7233a8923 100644
--- a/crates/hir-expand/src/db.rs
+++ b/crates/hir-expand/src/db.rs
@@ -3,7 +3,7 @@
use base_db::{salsa, CrateId, FileId, SourceDatabase};
use either::Either;
use limit::Limit;
-use mbe::syntax_node_to_token_tree;
+use mbe::{syntax_node_to_token_tree, MatchedArmIndex};
use rustc_hash::FxHashSet;
use span::{AstIdMap, Span, SyntaxContextData, SyntaxContextId};
use syntax::{ast, AstNode, Parse, SyntaxElement, SyntaxError, SyntaxNode, SyntaxToken, T};
@@ -175,7 +175,7 @@ pub fn expand_speculative(
};
let censor_cfg =
- cfg_process::process_cfg_attrs(speculative_args, &loc, db).unwrap_or_default();
+ cfg_process::process_cfg_attrs(db, speculative_args, &loc).unwrap_or_default();
let mut fixups = fixup::fixup_syntax(span_map, speculative_args, span);
fixups.append.retain(|it, _| match it {
syntax::NodeOrToken::Token(_) => true,
@@ -225,43 +225,45 @@ pub fn expand_speculative(
// Do the actual expansion, we need to directly expand the proc macro due to the attribute args
// Otherwise the expand query will fetch the non speculative attribute args and pass those instead.
- let mut speculative_expansion = match loc.def.kind {
- MacroDefKind::ProcMacro(expander, _, ast) => {
- let span = db.proc_macro_span(ast);
- tt.delimiter = tt::Delimiter::invisible_spanned(span);
- expander.expand(
- db,
- loc.def.krate,
- loc.krate,
- &tt,
- attr_arg.as_ref(),
- span_with_def_site_ctxt(db, span, actual_macro_call),
- span_with_call_site_ctxt(db, span, actual_macro_call),
- span_with_mixed_site_ctxt(db, span, actual_macro_call),
- )
- }
- MacroDefKind::BuiltInAttr(BuiltinAttrExpander::Derive, _) => {
- pseudo_derive_attr_expansion(&tt, attr_arg.as_ref()?, span)
- }
- MacroDefKind::Declarative(it) => {
- db.decl_macro_expander(loc.krate, it).expand_unhygienic(db, tt, loc.def.krate, span)
- }
- MacroDefKind::BuiltIn(it, _) => {
- it.expand(db, actual_macro_call, &tt, span).map_err(Into::into)
- }
- MacroDefKind::BuiltInDerive(it, ..) => {
- it.expand(db, actual_macro_call, &tt, span).map_err(Into::into)
- }
- MacroDefKind::BuiltInEager(it, _) => {
- it.expand(db, actual_macro_call, &tt, span).map_err(Into::into)
- }
- MacroDefKind::BuiltInAttr(it, _) => it.expand(db, actual_macro_call, &tt, span),
- };
+ let mut speculative_expansion =
+ match loc.def.kind {
+ MacroDefKind::ProcMacro(expander, _, ast) => {
+ let span = db.proc_macro_span(ast);
+ tt.delimiter = tt::Delimiter::invisible_spanned(span);
+ expander.expand(
+ db,
+ loc.def.krate,
+ loc.krate,
+ &tt,
+ attr_arg.as_ref(),
+ span_with_def_site_ctxt(db, span, actual_macro_call),
+ span_with_call_site_ctxt(db, span, actual_macro_call),
+ span_with_mixed_site_ctxt(db, span, actual_macro_call),
+ )
+ }
+ MacroDefKind::BuiltInAttr(BuiltinAttrExpander::Derive, _) => {
+ pseudo_derive_attr_expansion(&tt, attr_arg.as_ref()?, span)
+ }
+ MacroDefKind::Declarative(it) => db
+ .decl_macro_expander(loc.krate, it)
+ .expand_unhygienic(db, tt, loc.def.krate, span, loc.def.edition),
+ MacroDefKind::BuiltIn(it, _) => {
+ it.expand(db, actual_macro_call, &tt, span).map_err(Into::into)
+ }
+ MacroDefKind::BuiltInDerive(it, ..) => {
+ it.expand(db, actual_macro_call, &tt, span).map_err(Into::into)
+ }
+ MacroDefKind::BuiltInEager(it, _) => {
+ it.expand(db, actual_macro_call, &tt, span).map_err(Into::into)
+ }
+ MacroDefKind::BuiltInAttr(it, _) => it.expand(db, actual_macro_call, &tt, span),
+ };
let expand_to = loc.expand_to();
fixup::reverse_fixups(&mut speculative_expansion.value, &undo_info);
- let (node, rev_tmap) = token_tree_to_syntax_node(&speculative_expansion.value, expand_to);
+ let (node, rev_tmap) =
+ token_tree_to_syntax_node(&speculative_expansion.value, expand_to, loc.def.edition);
let syntax_node = node.syntax_node();
let token = rev_tmap
@@ -309,16 +311,20 @@ fn parse_macro_expansion(
) -> ExpandResult<(Parse<SyntaxNode>, Arc<ExpansionSpanMap>)> {
let _p = tracing::span!(tracing::Level::INFO, "parse_macro_expansion").entered();
let loc = db.lookup_intern_macro_call(macro_file.macro_call_id);
+ let edition = loc.def.edition;
let expand_to = loc.expand_to();
- let mbe::ValueResult { value: tt, err } = macro_expand(db, macro_file.macro_call_id, loc);
+ let mbe::ValueResult { value: (tt, matched_arm), err } =
+ macro_expand(db, macro_file.macro_call_id, loc);
- let (parse, rev_token_map) = token_tree_to_syntax_node(
+ let (parse, mut rev_token_map) = token_tree_to_syntax_node(
match &tt {
CowArc::Arc(it) => it,
CowArc::Owned(it) => it,
},
expand_to,
+ edition,
);
+ rev_token_map.matched_arm = matched_arm;
ExpandResult { value: (parse, Arc::new(rev_token_map)), err }
}
@@ -462,7 +468,7 @@ fn macro_arg(db: &dyn ExpandDatabase, id: MacroCallId) -> MacroArgResult {
let (mut tt, undo_info) = {
let syntax = item_node.syntax();
- let censor_cfg = cfg_process::process_cfg_attrs(syntax, &loc, db).unwrap_or_default();
+ let censor_cfg = cfg_process::process_cfg_attrs(db, syntax, &loc).unwrap_or_default();
let mut fixups = fixup::fixup_syntax(map.as_ref(), syntax, span);
fixups.append.retain(|it, _| match it {
syntax::NodeOrToken::Token(_) => true,
@@ -540,11 +546,13 @@ fn macro_expand(
db: &dyn ExpandDatabase,
macro_call_id: MacroCallId,
loc: MacroCallLoc,
-) -> ExpandResult<CowArc<tt::Subtree>> {
+) -> ExpandResult<(CowArc<tt::Subtree>, MatchedArmIndex)> {
let _p = tracing::span!(tracing::Level::INFO, "macro_expand").entered();
- let (ExpandResult { value: tt, err }, span) = match loc.def.kind {
- MacroDefKind::ProcMacro(..) => return db.expand_proc_macro(macro_call_id).map(CowArc::Arc),
+ let (ExpandResult { value: (tt, matched_arm), err }, span) = match loc.def.kind {
+ MacroDefKind::ProcMacro(..) => {
+ return db.expand_proc_macro(macro_call_id).map(CowArc::Arc).zip_val(None)
+ }
_ => {
let (macro_arg, undo_info, span) =
db.macro_arg_considering_derives(macro_call_id, &loc.kind);
@@ -556,10 +564,10 @@ fn macro_expand(
.decl_macro_expander(loc.def.krate, id)
.expand(db, arg.clone(), macro_call_id, span),
MacroDefKind::BuiltIn(it, _) => {
- it.expand(db, macro_call_id, arg, span).map_err(Into::into)
+ it.expand(db, macro_call_id, arg, span).map_err(Into::into).zip_val(None)
}
MacroDefKind::BuiltInDerive(it, _) => {
- it.expand(db, macro_call_id, arg, span).map_err(Into::into)
+ it.expand(db, macro_call_id, arg, span).map_err(Into::into).zip_val(None)
}
MacroDefKind::BuiltInEager(it, _) => {
// This might look a bit odd, but we do not expand the inputs to eager macros here.
@@ -570,7 +578,8 @@ fn macro_expand(
// As such we just return the input subtree here.
let eager = match &loc.kind {
MacroCallKind::FnLike { eager: None, .. } => {
- return ExpandResult::ok(CowArc::Arc(macro_arg.clone()));
+ return ExpandResult::ok(CowArc::Arc(macro_arg.clone()))
+ .zip_val(None);
}
MacroCallKind::FnLike { eager: Some(eager), .. } => Some(&**eager),
_ => None,
@@ -582,12 +591,12 @@ fn macro_expand(
// FIXME: We should report both errors!
res.err = error.clone().or(res.err);
}
- res
+ res.zip_val(None)
}
MacroDefKind::BuiltInAttr(it, _) => {
let mut res = it.expand(db, macro_call_id, arg, span);
fixup::reverse_fixups(&mut res.value, &undo_info);
- res
+ res.zip_val(None)
}
_ => unreachable!(),
};
@@ -599,16 +608,18 @@ fn macro_expand(
if !loc.def.is_include() {
// Set a hard limit for the expanded tt
if let Err(value) = check_tt_count(&tt) {
- return value.map(|()| {
- CowArc::Owned(tt::Subtree {
- delimiter: tt::Delimiter::invisible_spanned(span),
- token_trees: Box::new([]),
+ return value
+ .map(|()| {
+ CowArc::Owned(tt::Subtree {
+ delimiter: tt::Delimiter::invisible_spanned(span),
+ token_trees: Box::new([]),
+ })
})
- });
+ .zip_val(matched_arm);
}
}
- ExpandResult { value: CowArc::Owned(tt), err }
+ ExpandResult { value: (CowArc::Owned(tt), matched_arm), err }
}
fn proc_macro_span(db: &dyn ExpandDatabase, ast: AstId<ast::Fn>) -> Span {
@@ -668,6 +679,7 @@ fn expand_proc_macro(db: &dyn ExpandDatabase, id: MacroCallId) -> ExpandResult<A
fn token_tree_to_syntax_node(
tt: &tt::Subtree,
expand_to: ExpandTo,
+ edition: parser::Edition,
) -> (Parse<SyntaxNode>, ExpansionSpanMap) {
let entry_point = match expand_to {
ExpandTo::Statements => mbe::TopEntryPoint::MacroStmts,
@@ -676,7 +688,7 @@ fn token_tree_to_syntax_node(
ExpandTo::Type => mbe::TopEntryPoint::Type,
ExpandTo::Expr => mbe::TopEntryPoint::Expr,
};
- mbe::token_tree_to_syntax_node(tt, entry_point)
+ mbe::token_tree_to_syntax_node(tt, entry_point, edition)
}
fn check_tt_count(tt: &tt::Subtree) -> Result<(), ExpandResult<()>> {
diff --git a/crates/hir-expand/src/declarative.rs b/crates/hir-expand/src/declarative.rs
index 9a0b218e6d..66465ce600 100644
--- a/crates/hir-expand/src/declarative.rs
+++ b/crates/hir-expand/src/declarative.rs
@@ -2,7 +2,8 @@
use std::sync::OnceLock;
use base_db::{CrateId, VersionReq};
-use span::{MacroCallId, Span, SyntaxContextId};
+use span::{Edition, MacroCallId, Span, SyntaxContextId};
+use stdx::TupleExt;
use syntax::{ast, AstNode};
use triomphe::Arc;
@@ -30,7 +31,7 @@ impl DeclarativeMacroExpander {
tt: tt::Subtree,
call_id: MacroCallId,
span: Span,
- ) -> ExpandResult<tt::Subtree> {
+ ) -> ExpandResult<(tt::Subtree, Option<u32>)> {
let loc = db.lookup_intern_macro_call(call_id);
let toolchain = db.toolchain(loc.def.krate);
let new_meta_vars = toolchain.as_ref().map_or(false, |version| {
@@ -46,7 +47,7 @@ impl DeclarativeMacroExpander {
});
match self.mac.err() {
Some(_) => ExpandResult::new(
- tt::Subtree::empty(tt::DelimSpan { open: span, close: span }),
+ (tt::Subtree::empty(tt::DelimSpan { open: span, close: span }), None),
ExpandError::MacroDefinition,
),
None => self
@@ -56,6 +57,7 @@ impl DeclarativeMacroExpander {
|s| s.ctx = apply_mark(db, s.ctx, call_id, self.transparency),
new_meta_vars,
span,
+ loc.def.edition,
)
.map_err(Into::into),
}
@@ -67,6 +69,7 @@ impl DeclarativeMacroExpander {
tt: tt::Subtree,
krate: CrateId,
call_site: Span,
+ def_site_edition: Edition,
) -> ExpandResult<tt::Subtree> {
let toolchain = db.toolchain(krate);
let new_meta_vars = toolchain.as_ref().map_or(false, |version| {
@@ -85,7 +88,11 @@ impl DeclarativeMacroExpander {
tt::Subtree::empty(tt::DelimSpan { open: call_site, close: call_site }),
ExpandError::MacroDefinition,
),
- None => self.mac.expand(&tt, |_| (), new_meta_vars, call_site).map_err(Into::into),
+ None => self
+ .mac
+ .expand(&tt, |_| (), new_meta_vars, call_site, def_site_edition)
+ .map(TupleExt::head)
+ .map_err(Into::into),
}
}
diff --git a/crates/hir-expand/src/fixup.rs b/crates/hir-expand/src/fixup.rs
index 959595afb5..711acfeb3d 100644
--- a/crates/hir-expand/src/fixup.rs
+++ b/crates/hir-expand/src/fixup.rs
@@ -396,7 +396,7 @@ mod tests {
#[track_caller]
fn check(ra_fixture: &str, mut expect: Expect) {
- let parsed = syntax::SourceFile::parse(ra_fixture);
+ let parsed = syntax::SourceFile::parse(ra_fixture, span::Edition::CURRENT);
let span_map = SpanMap::RealSpanMap(Arc::new(RealSpanMap::absolute(FileId::from_raw(0))));
let fixups = super::fixup_syntax(
span_map.as_ref(),
@@ -417,7 +417,11 @@ mod tests {
expect.assert_eq(&actual);
// the fixed-up tree should be syntactically valid
- let (parse, _) = mbe::token_tree_to_syntax_node(&tt, ::mbe::TopEntryPoint::MacroItems);
+ let (parse, _) = mbe::token_tree_to_syntax_node(
+ &tt,
+ ::mbe::TopEntryPoint::MacroItems,
+ parser::Edition::CURRENT,
+ );
assert!(
parse.errors().is_empty(),
"parse has syntax errors. parse tree:\n{:#?}",
diff --git a/crates/hir-expand/src/lib.rs b/crates/hir-expand/src/lib.rs
index db8bbeccef..338bd25ede 100644
--- a/crates/hir-expand/src/lib.rs
+++ b/crates/hir-expand/src/lib.rs
@@ -3,7 +3,7 @@
//! Specifically, it implements a concept of `MacroFile` -- a file whose syntax
//! tree originates not from the text of some `FileId`, but from some macro
//! expansion.
-
+#![cfg_attr(feature = "in-rust-tree", feature(rustc_private))]
#![warn(rust_2018_idioms, unused_lifetimes)]
pub mod attrs;
diff --git a/crates/hir-expand/src/name.rs b/crates/hir-expand/src/name.rs
index 0b69799e6b..8f74bffc2b 100644
--- a/crates/hir-expand/src/name.rs
+++ b/crates/hir-expand/src/name.rs
@@ -303,6 +303,7 @@ pub mod known {
rust_2015,
rust_2018,
rust_2021,
+ rust_2024,
v1,
new_display,
new_debug,
diff --git a/crates/hir-ty/Cargo.toml b/crates/hir-ty/Cargo.toml
index bf47374016..a83ee9824e 100644
--- a/crates/hir-ty/Cargo.toml
+++ b/crates/hir-ty/Cargo.toml
@@ -61,7 +61,7 @@ test-utils.workspace = true
test-fixture.workspace = true
[features]
-in-rust-tree = []
+in-rust-tree = ["hir-expand/in-rust-tree"]
[lints]
workspace = true
diff --git a/crates/hir-ty/src/builder.rs b/crates/hir-ty/src/builder.rs
index cb118a3684..41acd3555e 100644
--- a/crates/hir-ty/src/builder.rs
+++ b/crates/hir-ty/src/builder.rs
@@ -74,6 +74,10 @@ impl<D> TyBuilder<D> {
(self.data, subst)
}
+ pub fn build_into_subst(self) -> Substitution {
+ self.build_internal().1
+ }
+
pub fn push(mut self, arg: impl CastTo<GenericArg>) -> Self {
assert!(self.remaining() > 0);
let arg = arg.cast(Interner);
@@ -291,7 +295,6 @@ impl TyBuilder<hir_def::AdtId> {
) -> Self {
// Note that we're building ADT, so we never have parent generic parameters.
let defaults = db.generic_defaults(self.data.into());
- let dummy_ty = TyKind::Error.intern(Interner).cast(Interner);
for default_ty in defaults.iter().skip(self.vec.len()) {
// NOTE(skip_binders): we only check if the arg type is error type.
if let Some(x) = default_ty.skip_binders().ty(Interner) {
@@ -301,13 +304,16 @@ impl TyBuilder<hir_def::AdtId> {
}
}
// Each default can only depend on the previous parameters.
- // FIXME: we don't handle const generics here.
let subst_so_far = Substitution::from_iter(
Interner,
self.vec
.iter()
.cloned()
- .chain(iter::repeat(dummy_ty.clone()))
+ .chain(self.param_kinds[self.vec.len()..].iter().map(|it| match it {
+ ParamKind::Type => TyKind::Error.intern(Interner).cast(Interner),
+ ParamKind::Lifetime => error_lifetime().cast(Interner),
+ ParamKind::Const(ty) => unknown_const_as_generic(ty.clone()),
+ }))
.take(self.param_kinds.len()),
);
self.vec.push(default_ty.clone().substitute(Interner, &subst_so_far).cast(Interner));
diff --git a/crates/hir-ty/src/chalk_ext.rs b/crates/hir-ty/src/chalk_ext.rs
index d1aebeff26..0bf01b0bc6 100644
--- a/crates/hir-ty/src/chalk_ext.rs
+++ b/crates/hir-ty/src/chalk_ext.rs
@@ -1,6 +1,8 @@
//! Various extensions traits for Chalk types.
-use chalk_ir::{cast::Cast, FloatTy, IntTy, Mutability, Scalar, TyVariableKind, UintTy};
+use chalk_ir::{
+ cast::Cast, FloatTy, IntTy, Mutability, Scalar, TyVariableKind, TypeOutlives, UintTy,
+};
use hir_def::{
builtin_type::{BuiltinFloat, BuiltinInt, BuiltinType, BuiltinUint},
generics::TypeOrConstParamData,
@@ -312,7 +314,7 @@ impl TyExt for Ty {
.generic_predicates(id.parent)
.iter()
.map(|pred| pred.clone().substitute(Interner, &substs))
- .filter(|wc| match &wc.skip_binders() {
+ .filter(|wc| match wc.skip_binders() {
WhereClause::Implemented(tr) => {
&tr.self_type_parameter(Interner) == self
}
@@ -320,6 +322,9 @@ impl TyExt for Ty {
alias: AliasTy::Projection(proj),
ty: _,
}) => &proj.self_type_parameter(db) == self,
+ WhereClause::TypeOutlives(TypeOutlives { ty, lifetime: _ }) => {
+ ty == self
+ }
_ => false,
})
.collect::<Vec<_>>();
diff --git a/crates/hir-ty/src/consteval.rs b/crates/hir-ty/src/consteval.rs
index 705609ba68..f09277a92e 100644
--- a/crates/hir-ty/src/consteval.rs
+++ b/crates/hir-ty/src/consteval.rs
@@ -77,30 +77,32 @@ pub(crate) fn path_to_const(
resolver: &Resolver,
path: &Path,
mode: ParamLoweringMode,
- args_lazy: impl FnOnce() -> Generics,
+ args: impl FnOnce() -> Option<Generics>,
debruijn: DebruijnIndex,
expected_ty: Ty,
) -> Option<Const> {
match resolver.resolve_path_in_value_ns_fully(db.upcast(), path) {
Some(ValueNs::GenericParam(p)) => {
let ty = db.const_param_ty(p);
- let args = args_lazy();
let value = match mode {
ParamLoweringMode::Placeholder => {
ConstValue::Placeholder(to_placeholder_idx(db, p.into()))
}
- ParamLoweringMode::Variable => match args.param_idx(p.into()) {
- Some(it) => ConstValue::BoundVar(BoundVar::new(debruijn, it)),
- None => {
- never!(
- "Generic list doesn't contain this param: {:?}, {:?}, {:?}",
- args,
- path,
- p
- );
- return None;
+ ParamLoweringMode::Variable => {
+ let args = args();
+ match args.as_ref().and_then(|args| args.type_or_const_param_idx(p.into())) {
+ Some(it) => ConstValue::BoundVar(BoundVar::new(debruijn, it)),
+ None => {
+ never!(
+ "Generic list doesn't contain this param: {:?}, {:?}, {:?}",
+ args,
+ path,
+ p
+ );
+ return None;
+ }
}
- },
+ }
};
Some(ConstData { ty, value }.intern(Interner))
}
@@ -285,7 +287,6 @@ pub(crate) fn eval_to_const(
expr: ExprId,
mode: ParamLoweringMode,
ctx: &mut InferenceContext<'_>,
- args: impl FnOnce() -> Generics,
debruijn: DebruijnIndex,
) -> Const {
let db = ctx.db;
@@ -304,7 +305,9 @@ pub(crate) fn eval_to_const(
}
if let Expr::Path(p) = &ctx.body.exprs[expr] {
let resolver = &ctx.resolver;
- if let Some(c) = path_to_const(db, resolver, p, mode, args, debruijn, infer[expr].clone()) {
+ if let Some(c) =
+ path_to_const(db, resolver, p, mode, || ctx.generics(), debruijn, infer[expr].clone())
+ {
return c;
}
}
diff --git a/crates/hir-ty/src/diagnostics/decl_check.rs b/crates/hir-ty/src/diagnostics/decl_check.rs
index 38eb3371e3..ecbb1d4c60 100644
--- a/crates/hir-ty/src/diagnostics/decl_check.rs
+++ b/crates/hir-ty/src/diagnostics/decl_check.rs
@@ -43,7 +43,7 @@ mod allow {
}
pub fn incorrect_case(db: &dyn HirDatabase, owner: ModuleDefId) -> Vec<IncorrectCase> {
- let _p = tracing::span!(tracing::Level::INFO, "validate_module_item").entered();
+ let _p = tracing::span!(tracing::Level::INFO, "incorrect_case").entered();
let mut validator = DeclValidator::new(db);
validator.validate_item(owner);
validator.sink
diff --git a/crates/hir-ty/src/diagnostics/expr.rs b/crates/hir-ty/src/diagnostics/expr.rs
index 20b0da441d..a5a42c52af 100644
--- a/crates/hir-ty/src/diagnostics/expr.rs
+++ b/crates/hir-ty/src/diagnostics/expr.rs
@@ -11,6 +11,7 @@ use hir_def::{ItemContainerId, Lookup};
use hir_expand::name;
use itertools::Itertools;
use rustc_hash::FxHashSet;
+use rustc_pattern_analysis::constructor::Constructor;
use syntax::{ast, AstNode};
use tracing::debug;
use triomphe::Arc;
@@ -190,45 +191,45 @@ impl ExprValidator {
let pattern_arena = Arena::new();
let mut m_arms = Vec::with_capacity(arms.len());
let mut has_lowering_errors = false;
+ // Note: Skipping the entire diagnostic rather than just not including a faulty match arm is
+ // preferred to avoid the chance of false positives.
for arm in arms {
- if let Some(pat_ty) = self.infer.type_of_pat.get(arm.pat) {
- // We only include patterns whose type matches the type
- // of the scrutinee expression. If we had an InvalidMatchArmPattern
- // diagnostic or similar we could raise that in an else
- // block here.
- //
- // When comparing the types, we also have to consider that rustc
- // will automatically de-reference the scrutinee expression type if
- // necessary.
- //
- // FIXME we should use the type checker for this.
- if (pat_ty == scrut_ty
- || scrut_ty
- .as_reference()
- .map(|(match_expr_ty, ..)| match_expr_ty == pat_ty)
- .unwrap_or(false))
- && types_of_subpatterns_do_match(arm.pat, &self.body, &self.infer)
- {
- // If we had a NotUsefulMatchArm diagnostic, we could
- // check the usefulness of each pattern as we added it
- // to the matrix here.
- let pat = self.lower_pattern(&cx, arm.pat, db, &mut has_lowering_errors);
- let m_arm = pat_analysis::MatchArm {
- pat: pattern_arena.alloc(pat),
- has_guard: arm.guard.is_some(),
- arm_data: (),
- };
- m_arms.push(m_arm);
- if !has_lowering_errors {
- continue;
- }
+ let Some(pat_ty) = self.infer.type_of_pat.get(arm.pat) else {
+ return;
+ };
+
+ // We only include patterns whose type matches the type
+ // of the scrutinee expression. If we had an InvalidMatchArmPattern
+ // diagnostic or similar we could raise that in an else
+ // block here.
+ //
+ // When comparing the types, we also have to consider that rustc
+ // will automatically de-reference the scrutinee expression type if
+ // necessary.
+ //
+ // FIXME we should use the type checker for this.
+ if (pat_ty == scrut_ty
+ || scrut_ty
+ .as_reference()
+ .map(|(match_expr_ty, ..)| match_expr_ty == pat_ty)
+ .unwrap_or(false))
+ && types_of_subpatterns_do_match(arm.pat, &self.body, &self.infer)
+ {
+ // If we had a NotUsefulMatchArm diagnostic, we could
+ // check the usefulness of each pattern as we added it
+ // to the matrix here.
+ let pat = self.lower_pattern(&cx, arm.pat, db, &mut has_lowering_errors);
+ let m_arm = pat_analysis::MatchArm {
+ pat: pattern_arena.alloc(pat),
+ has_guard: arm.guard.is_some(),
+ arm_data: (),
+ };
+ m_arms.push(m_arm);
+ if !has_lowering_errors {
+ continue;
}
}
-
- // If we can't resolve the type of a pattern, or the pattern type doesn't
- // fit the match expression, we skip this diagnostic. Skipping the entire
- // diagnostic rather than just not including this match arm is preferred
- // to avoid the chance of false positives.
+ // If the pattern type doesn't fit the match expression, we skip this diagnostic.
cov_mark::hit!(validate_match_bailed_out);
return;
}
@@ -266,15 +267,17 @@ impl ExprValidator {
let mut have_errors = false;
let deconstructed_pat = self.lower_pattern(&cx, pat, db, &mut have_errors);
+
+ // optimization, wildcard trivially hold
+ if have_errors || matches!(deconstructed_pat.ctor(), Constructor::Wildcard) {
+ continue;
+ }
+
let match_arm = rustc_pattern_analysis::MatchArm {
pat: pattern_arena.alloc(deconstructed_pat),
has_guard: false,
arm_data: (),
};
- if have_errors {
- continue;
- }
-
let report = match cx.compute_match_usefulness(&[match_arm], ty.clone()) {
Ok(v) => v,
Err(e) => {
@@ -531,8 +534,16 @@ fn types_of_subpatterns_do_match(pat: PatId, body: &Body, infer: &InferenceResul
fn walk(pat: PatId, body: &Body, infer: &InferenceResult, has_type_mismatches: &mut bool) {
match infer.type_mismatch_for_pat(pat) {
Some(_) => *has_type_mismatches = true,
+ None if *has_type_mismatches => (),
None => {
- body[pat].walk_child_pats(|subpat| walk(subpat, body, infer, has_type_mismatches))
+ let pat = &body[pat];
+ if let Pat::ConstBlock(expr) | Pat::Lit(expr) = *pat {
+ *has_type_mismatches |= infer.type_mismatch_for_expr(expr).is_some();
+ if *has_type_mismatches {
+ return;
+ }
+ }
+ pat.walk_child_pats(|subpat| walk(subpat, body, infer, has_type_mismatches))
}
}
}
diff --git a/crates/hir-ty/src/diagnostics/match_check/pat_analysis.rs b/crates/hir-ty/src/diagnostics/match_check/pat_analysis.rs
index f45beb4c92..c171dbc170 100644
--- a/crates/hir-ty/src/diagnostics/match_check/pat_analysis.rs
+++ b/crates/hir-ty/src/diagnostics/match_check/pat_analysis.rs
@@ -1,9 +1,9 @@
//! Interface with `rustc_pattern_analysis`.
use std::fmt;
-use tracing::debug;
-use hir_def::{DefWithBodyId, EnumVariantId, HasModule, LocalFieldId, ModuleId, VariantId};
+use hir_def::{DefWithBodyId, EnumId, EnumVariantId, HasModule, LocalFieldId, ModuleId, VariantId};
+use once_cell::unsync::Lazy;
use rustc_hash::FxHashMap;
use rustc_pattern_analysis::{
constructor::{Constructor, ConstructorSet, VariantVisibility},
@@ -36,6 +36,24 @@ pub(crate) type WitnessPat<'p> = rustc_pattern_analysis::pat::WitnessPat<MatchCh
#[derive(Copy, Clone, Debug, PartialEq, Eq)]
pub(crate) enum Void {}
+/// An index type for enum variants. This ranges from 0 to `variants.len()`, whereas `EnumVariantId`
+/// can take arbitrary large values (and hence mustn't be used with `IndexVec`/`BitSet`).
+#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash)]
+pub(crate) struct EnumVariantContiguousIndex(usize);
+
+impl EnumVariantContiguousIndex {
+ fn from_enum_variant_id(db: &dyn HirDatabase, target_evid: EnumVariantId) -> Self {
+ // Find the index of this variant in the list of variants.
+ use hir_def::Lookup;
+ let i = target_evid.lookup(db.upcast()).index as usize;
+ EnumVariantContiguousIndex(i)
+ }
+
+ fn to_enum_variant_id(self, db: &dyn HirDatabase, eid: EnumId) -> EnumVariantId {
+ db.enum_data(eid).variants[self.0].0
+ }
+}
+
#[derive(Clone)]
pub(crate) struct MatchCheckCtx<'p> {
module: ModuleId,
@@ -73,25 +91,27 @@ impl<'p> MatchCheckCtx<'p> {
}
fn is_uninhabited(&self, ty: &Ty) -> bool {
- is_ty_uninhabited_from(ty, self.module, self.db)
+ is_ty_uninhabited_from(self.db, ty, self.module)
}
- /// Returns whether the given type is an enum from another crate declared `#[non_exhaustive]`.
- fn is_foreign_non_exhaustive_enum(&self, ty: &Ty) -> bool {
- match ty.as_adt() {
- Some((adt @ hir_def::AdtId::EnumId(_), _)) => {
- let has_non_exhaustive_attr =
- self.db.attrs(adt.into()).by_key("non_exhaustive").exists();
- let is_local = adt.module(self.db.upcast()).krate() == self.module.krate();
- has_non_exhaustive_attr && !is_local
- }
- _ => false,
- }
+ /// Returns whether the given ADT is from another crate declared `#[non_exhaustive]`.
+ fn is_foreign_non_exhaustive(&self, adt: hir_def::AdtId) -> bool {
+ let is_local = adt.krate(self.db.upcast()) == self.module.krate();
+ !is_local && self.db.attrs(adt.into()).by_key("non_exhaustive").exists()
}
- fn variant_id_for_adt(ctor: &Constructor<Self>, adt: hir_def::AdtId) -> Option<VariantId> {
+ fn variant_id_for_adt(
+ db: &'p dyn HirDatabase,
+ ctor: &Constructor<Self>,
+ adt: hir_def::AdtId,
+ ) -> Option<VariantId> {
match ctor {
- &Variant(id) => Some(id.into()),
+ Variant(id) => {
+ let hir_def::AdtId::EnumId(eid) = adt else {
+ panic!("bad constructor {ctor:?} for adt {adt:?}")
+ };
+ Some(id.to_enum_variant_id(db, eid).into())
+ }
Struct | UnionField => match adt {
hir_def::AdtId::EnumId(_) => None,
hir_def::AdtId::StructId(id) => Some(id.into()),
@@ -175,19 +195,24 @@ impl<'p> MatchCheckCtx<'p> {
ctor = Struct;
arity = 1;
}
- &TyKind::Adt(adt, _) => {
+ &TyKind::Adt(AdtId(adt), _) => {
ctor = match pat.kind.as_ref() {
- PatKind::Leaf { .. } if matches!(adt.0, hir_def::AdtId::UnionId(_)) => {
+ PatKind::Leaf { .. } if matches!(adt, hir_def::AdtId::UnionId(_)) => {
UnionField
}
PatKind::Leaf { .. } => Struct,
- PatKind::Variant { enum_variant, .. } => Variant(*enum_variant),
+ PatKind::Variant { enum_variant, .. } => {
+ Variant(EnumVariantContiguousIndex::from_enum_variant_id(
+ self.db,
+ *enum_variant,
+ ))
+ }
_ => {
never!();
Wildcard
}
};
- let variant = Self::variant_id_for_adt(&ctor, adt.0).unwrap();
+ let variant = Self::variant_id_for_adt(self.db, &ctor, adt).unwrap();
arity = variant.variant_data(self.db.upcast()).fields().len();
}
_ => {
@@ -239,7 +264,7 @@ impl<'p> MatchCheckCtx<'p> {
PatKind::Deref { subpattern: subpatterns.next().unwrap() }
}
TyKind::Adt(adt, substs) => {
- let variant = Self::variant_id_for_adt(pat.ctor(), adt.0).unwrap();
+ let variant = Self::variant_id_for_adt(self.db, pat.ctor(), adt.0).unwrap();
let subpatterns = self
.list_variant_fields(pat.ty(), variant)
.zip(subpatterns)
@@ -277,7 +302,7 @@ impl<'p> MatchCheckCtx<'p> {
impl<'p> PatCx for MatchCheckCtx<'p> {
type Error = ();
type Ty = Ty;
- type VariantIdx = EnumVariantId;
+ type VariantIdx = EnumVariantContiguousIndex;
type StrLit = Void;
type ArmData = ();
type PatData = PatData<'p>;
@@ -303,7 +328,7 @@ impl<'p> PatCx for MatchCheckCtx<'p> {
// patterns. If we're here we can assume this is a box pattern.
1
} else {
- let variant = Self::variant_id_for_adt(ctor, adt).unwrap();
+ let variant = Self::variant_id_for_adt(self.db, ctor, adt).unwrap();
variant.variant_data(self.db.upcast()).fields().len()
}
}
@@ -343,25 +368,22 @@ impl<'p> PatCx for MatchCheckCtx<'p> {
let subst_ty = substs.at(Interner, 0).assert_ty_ref(Interner).clone();
single(subst_ty)
} else {
- let variant = Self::variant_id_for_adt(ctor, adt).unwrap();
- let (adt, _) = ty.as_adt().unwrap();
+ let variant = Self::variant_id_for_adt(self.db, ctor, adt).unwrap();
- let adt_is_local =
- variant.module(self.db.upcast()).krate() == self.module.krate();
// Whether we must not match the fields of this variant exhaustively.
- let is_non_exhaustive =
- self.db.attrs(variant.into()).by_key("non_exhaustive").exists()
- && !adt_is_local;
- let visibilities = self.db.field_visibilities(variant);
+ let is_non_exhaustive = Lazy::new(|| self.is_foreign_non_exhaustive(adt));
+ let visibilities = Lazy::new(|| self.db.field_visibilities(variant));
self.list_variant_fields(ty, variant)
.map(move |(fid, ty)| {
- let is_visible = matches!(adt, hir_def::AdtId::EnumId(..))
- || visibilities[fid]
- .is_visible_from(self.db.upcast(), self.module);
+ let is_visible = || {
+ matches!(adt, hir_def::AdtId::EnumId(..))
+ || visibilities[fid]
+ .is_visible_from(self.db.upcast(), self.module)
+ };
let is_uninhabited = self.is_uninhabited(&ty);
let private_uninhabited =
- is_uninhabited && (!is_visible || is_non_exhaustive);
+ is_uninhabited && (!is_visible() || *is_non_exhaustive);
(ty, PrivateUninhabitedField(private_uninhabited))
})
.collect()
@@ -413,23 +435,26 @@ impl<'p> PatCx for MatchCheckCtx<'p> {
TyKind::Scalar(Scalar::Char) => unhandled(),
TyKind::Scalar(Scalar::Int(..) | Scalar::Uint(..)) => unhandled(),
TyKind::Array(..) | TyKind::Slice(..) => unhandled(),
- TyKind::Adt(AdtId(hir_def::AdtId::EnumId(enum_id)), subst) => {
- let enum_data = cx.db.enum_data(*enum_id);
- let is_declared_nonexhaustive = cx.is_foreign_non_exhaustive_enum(ty);
+ &TyKind::Adt(AdtId(adt @ hir_def::AdtId::EnumId(enum_id)), ref subst) => {
+ let enum_data = cx.db.enum_data(enum_id);
+ let is_declared_nonexhaustive = cx.is_foreign_non_exhaustive(adt);
if enum_data.variants.is_empty() && !is_declared_nonexhaustive {
ConstructorSet::NoConstructors
} else {
- let mut variants = FxHashMap::default();
- for &(variant, _) in enum_data.variants.iter() {
+ let mut variants = FxHashMap::with_capacity_and_hasher(
+ enum_data.variants.len(),
+ Default::default(),
+ );
+ for (i, &(variant, _)) in enum_data.variants.iter().enumerate() {
let is_uninhabited =
- is_enum_variant_uninhabited_from(variant, subst, cx.module, cx.db);
+ is_enum_variant_uninhabited_from(cx.db, variant, subst, cx.module);
let visibility = if is_uninhabited {
VariantVisibility::Empty
} else {
VariantVisibility::Visible
};
- variants.insert(variant, visibility);
+ variants.insert(EnumVariantContiguousIndex(i), visibility);
}
ConstructorSet::Variants {
@@ -453,10 +478,10 @@ impl<'p> PatCx for MatchCheckCtx<'p> {
f: &mut fmt::Formatter<'_>,
pat: &rustc_pattern_analysis::pat::DeconstructedPat<Self>,
) -> fmt::Result {
+ let db = pat.data().db;
let variant =
- pat.ty().as_adt().and_then(|(adt, _)| Self::variant_id_for_adt(pat.ctor(), adt));
+ pat.ty().as_adt().and_then(|(adt, _)| Self::variant_id_for_adt(db, pat.ctor(), adt));
- let db = pat.data().db;
if let Some(variant) = variant {
match variant {
VariantId::EnumVariantId(v) => {
@@ -474,7 +499,7 @@ impl<'p> PatCx for MatchCheckCtx<'p> {
}
fn bug(&self, fmt: fmt::Arguments<'_>) {
- debug!("{}", fmt)
+ never!("{}", fmt)
}
fn complexity_exceeded(&self) -> Result<(), Self::Error> {
diff --git a/crates/hir-ty/src/diagnostics/unsafe_check.rs b/crates/hir-ty/src/diagnostics/unsafe_check.rs
index cbca0e801d..081b4d83a8 100644
--- a/crates/hir-ty/src/diagnostics/unsafe_check.rs
+++ b/crates/hir-ty/src/diagnostics/unsafe_check.rs
@@ -4,7 +4,7 @@
use hir_def::{
body::Body,
hir::{Expr, ExprId, UnaryOp},
- resolver::{resolver_for_expr, ResolveValueResult, ValueNs},
+ resolver::{resolver_for_expr, ResolveValueResult, Resolver, ValueNs},
DefWithBodyId,
};
@@ -13,9 +13,9 @@ use crate::{
};
pub fn missing_unsafe(db: &dyn HirDatabase, def: DefWithBodyId) -> Vec<ExprId> {
- let infer = db.infer(def);
- let mut res = Vec::new();
+ let _p = tracing::span!(tracing::Level::INFO, "missing_unsafe").entered();
+ let mut res = Vec::new();
let is_unsafe = match def {
DefWithBodyId::FunctionId(it) => db.function_data(it).has_unsafe_kw(),
DefWithBodyId::StaticId(_)
@@ -28,6 +28,7 @@ pub fn missing_unsafe(db: &dyn HirDatabase, def: DefWithBodyId) -> Vec<ExprId> {
}
let body = db.body(def);
+ let infer = db.infer(def);
unsafe_expressions(db, &infer, def, &body, body.body_expr, &mut |expr| {
if !expr.inside_unsafe_block {
res.push(expr.expr);
@@ -51,14 +52,24 @@ pub fn unsafe_expressions(
current: ExprId,
unsafe_expr_cb: &mut dyn FnMut(UnsafeExpr),
) {
- walk_unsafe(db, infer, def, body, current, false, unsafe_expr_cb)
+ walk_unsafe(
+ db,
+ infer,
+ body,
+ &mut resolver_for_expr(db.upcast(), def, current),
+ def,
+ current,
+ false,
+ unsafe_expr_cb,
+ )
}
fn walk_unsafe(
db: &dyn HirDatabase,
infer: &InferenceResult,
- def: DefWithBodyId,
body: &Body,
+ resolver: &mut Resolver,
+ def: DefWithBodyId,
current: ExprId,
inside_unsafe_block: bool,
unsafe_expr_cb: &mut dyn FnMut(UnsafeExpr),
@@ -73,13 +84,14 @@ fn walk_unsafe(
}
}
Expr::Path(path) => {
- let resolver = resolver_for_expr(db.upcast(), def, current);
+ let g = resolver.update_to_inner_scope(db.upcast(), def, current);
let value_or_partial = resolver.resolve_path_in_value_ns(db.upcast(), path);
if let Some(ResolveValueResult::ValueNs(ValueNs::StaticId(id), _)) = value_or_partial {
if db.static_data(id).mutable {
unsafe_expr_cb(UnsafeExpr { expr: current, inside_unsafe_block });
}
}
+ resolver.reset_to_guard(g);
}
Expr::MethodCall { .. } => {
if infer
@@ -97,13 +109,13 @@ fn walk_unsafe(
}
Expr::Unsafe { .. } => {
return expr.walk_child_exprs(|child| {
- walk_unsafe(db, infer, def, body, child, true, unsafe_expr_cb);
+ walk_unsafe(db, infer, body, resolver, def, child, true, unsafe_expr_cb);
});
}
_ => {}
}
expr.walk_child_exprs(|child| {
- walk_unsafe(db, infer, def, body, child, inside_unsafe_block, unsafe_expr_cb);
+ walk_unsafe(db, infer, body, resolver, def, child, inside_unsafe_block, unsafe_expr_cb);
});
}
diff --git a/crates/hir-ty/src/display.rs b/crates/hir-ty/src/display.rs
index 8740ae6797..a357e85035 100644
--- a/crates/hir-ty/src/display.rs
+++ b/crates/hir-ty/src/display.rs
@@ -9,6 +9,7 @@ use std::{
use base_db::CrateId;
use chalk_ir::{BoundVar, Safety, TyKind};
+use either::Either;
use hir_def::{
data::adt::VariantData,
db::DefDatabase,
@@ -27,7 +28,7 @@ use intern::{Internable, Interned};
use itertools::Itertools;
use la_arena::ArenaMap;
use smallvec::SmallVec;
-use stdx::never;
+use stdx::{never, IsNoneOr};
use triomphe::Arc;
use crate::{
@@ -40,10 +41,11 @@ use crate::{
mir::pad16,
primitive, to_assoc_type_id,
utils::{self, detect_variant_from_bytes, generics, ClosureSubst},
- AdtId, AliasEq, AliasTy, Binders, CallableDefId, CallableSig, Const, ConstScalar, ConstValue,
- DomainGoal, FnAbi, GenericArg, ImplTraitId, Interner, Lifetime, LifetimeData, LifetimeOutlives,
- MemoryMap, Mutability, OpaqueTy, ProjectionTy, ProjectionTyExt, QuantifiedWhereClause, Scalar,
- Substitution, TraitEnvironment, TraitRef, TraitRefExt, Ty, TyExt, WhereClause,
+ AdtId, AliasEq, AliasTy, Binders, CallableDefId, CallableSig, ConcreteConst, Const,
+ ConstScalar, ConstValue, DomainGoal, FnAbi, GenericArg, ImplTraitId, Interner, Lifetime,
+ LifetimeData, LifetimeOutlives, MemoryMap, Mutability, OpaqueTy, ProjectionTy, ProjectionTyExt,
+ QuantifiedWhereClause, Scalar, Substitution, TraitEnvironment, TraitRef, TraitRefExt, Ty,
+ TyExt, WhereClause,
};
pub trait HirWrite: fmt::Write {
@@ -58,11 +60,18 @@ impl HirWrite for String {}
impl HirWrite for fmt::Formatter<'_> {}
pub struct HirFormatter<'a> {
+ /// The database handle
pub db: &'a dyn HirDatabase,
+ /// The sink to write into
fmt: &'a mut dyn HirWrite,
+ /// A buffer to intercept writes with, this allows us to track the overall size of the formatted output.
buf: String,
+ /// The current size of the formatted output.
curr_size: usize,
- pub(crate) max_size: Option<usize>,
+ /// Size from which we should truncate the output.
+ max_size: Option<usize>,
+ /// When rendering something that has a concept of "children" (like fields in a struct), this limits
+ /// how many should be rendered.
pub entity_limit: Option<usize>,
omit_verbose_types: bool,
closure_style: ClosureStyle,
@@ -302,7 +311,6 @@ impl DisplayTarget {
#[derive(Debug)]
pub enum DisplaySourceCodeError {
PathNotFound,
- UnknownType,
Coroutine,
OpaqueType,
}
@@ -414,12 +422,7 @@ impl HirDisplay for ProjectionTy {
let proj_params_count =
self.substitution.len(Interner) - trait_ref.substitution.len(Interner);
let proj_params = &self.substitution.as_slice(Interner)[..proj_params_count];
- if !proj_params.is_empty() {
- write!(f, "<")?;
- f.write_joined(proj_params, ", ")?;
- write!(f, ">")?;
- }
- Ok(())
+ hir_fmt_generics(f, proj_params, None)
}
}
@@ -452,7 +455,7 @@ impl HirDisplay for Const {
ConstValue::Placeholder(idx) => {
let id = from_placeholder_idx(f.db, *idx);
let generics = generics(f.db.upcast(), id.parent);
- let param_data = &generics.params.type_or_consts[id.local_id];
+ let param_data = &generics.params[id.local_id];
write!(f, "{}", param_data.name().unwrap().display(f.db.upcast()))?;
Ok(())
}
@@ -460,7 +463,11 @@ impl HirDisplay for Const {
ConstScalar::Bytes(b, m) => render_const_scalar(f, b, m, &data.ty),
ConstScalar::UnevaluatedConst(c, parameters) => {
write!(f, "{}", c.name(f.db.upcast()))?;
- hir_fmt_generics(f, parameters, c.generic_def(f.db.upcast()))?;
+ hir_fmt_generics(
+ f,
+ parameters.as_slice(Interner),
+ c.generic_def(f.db.upcast()),
+ )?;
Ok(())
}
ConstScalar::Unknown => f.write_char('_'),
@@ -936,36 +943,31 @@ impl HirDisplay for Ty {
}
};
f.end_location_link();
+
if parameters.len(Interner) > 0 {
let generics = generics(db.upcast(), def.into());
- let (
- parent_params,
- self_param,
- type_params,
- const_params,
- _impl_trait_params,
- lifetime_params,
- ) = generics.provenance_split();
- let total_len =
- parent_params + self_param + type_params + const_params + lifetime_params;
+ let (parent_len, self_, type_, const_, impl_, lifetime) =
+ generics.provenance_split();
+ let parameters = parameters.as_slice(Interner);
// We print all params except implicit impl Trait params. Still a bit weird; should we leave out parent and self?
- if total_len > 0 {
+ if parameters.len() - impl_ > 0 {
// `parameters` are in the order of fn's params (including impl traits), fn's lifetimes
// parent's params (those from enclosing impl or trait, if any).
- let parameters = parameters.as_slice(Interner);
- let fn_params_len = self_param + type_params + const_params;
- // This will give slice till last type or const
- let fn_params = parameters.get(..fn_params_len);
- let fn_lt_params =
- parameters.get(fn_params_len..(fn_params_len + lifetime_params));
- let parent_params = parameters.get(parameters.len() - parent_params..);
- let params = parent_params
- .into_iter()
- .chain(fn_lt_params)
- .chain(fn_params)
- .flatten();
+ let (fn_params, other) =
+ parameters.split_at(self_ + type_ + const_ + lifetime);
+ let (_impl, parent_params) = other.split_at(impl_);
+ debug_assert_eq!(parent_params.len(), parent_len);
+
+ let parent_params =
+ generic_args_sans_defaults(f, Some(def.into()), parent_params);
+ let fn_params = generic_args_sans_defaults(f, Some(def.into()), fn_params);
+
write!(f, "<")?;
- f.write_joined(params, ", ")?;
+ hir_fmt_generic_arguments(f, parent_params)?;
+ if !parent_params.is_empty() && !fn_params.is_empty() {
+ write!(f, ", ")?;
+ }
+ hir_fmt_generic_arguments(f, fn_params)?;
write!(f, ">")?;
}
}
@@ -1009,7 +1011,7 @@ impl HirDisplay for Ty {
let generic_def = self.as_generic_def(db);
- hir_fmt_generics(f, parameters, generic_def)?;
+ hir_fmt_generics(f, parameters.as_slice(Interner), generic_def)?;
}
TyKind::AssociatedType(assoc_type_id, parameters) => {
let type_alias = from_assoc_type_id(*assoc_type_id);
@@ -1032,20 +1034,15 @@ impl HirDisplay for Ty {
f.end_location_link();
// Note that the generic args for the associated type come before those for the
// trait (including the self type).
- // FIXME: reconsider the generic args order upon formatting?
- if parameters.len(Interner) > 0 {
- write!(f, "<")?;
- f.write_joined(parameters.as_slice(Interner), ", ")?;
- write!(f, ">")?;
- }
+ hir_fmt_generics(f, parameters.as_slice(Interner), None)
} else {
let projection_ty = ProjectionTy {
associated_ty_id: to_assoc_type_id(type_alias),
substitution: parameters.clone(),
};
- projection_ty.hir_fmt(f)?;
- }
+ projection_ty.hir_fmt(f)
+ }?;
}
TyKind::Foreign(type_alias) => {
let alias = from_foreign_def_id(*type_alias);
@@ -1072,6 +1069,7 @@ impl HirDisplay for Ty {
write_bounds_like_dyn_trait_with_prefix(
f,
"impl",
+ Either::Left(self),
bounds.skip_binders(),
SizedByDefault::Sized { anchor: krate },
)?;
@@ -1087,6 +1085,7 @@ impl HirDisplay for Ty {
write_bounds_like_dyn_trait_with_prefix(
f,
"impl",
+ Either::Left(self),
bounds.skip_binders(),
SizedByDefault::Sized { anchor: krate },
)?;
@@ -1137,7 +1136,7 @@ impl HirDisplay for Ty {
}
ClosureStyle::ClosureWithSubst => {
write!(f, "{{closure#{:?}}}", id.0.as_u32())?;
- return hir_fmt_generics(f, substs, None);
+ return hir_fmt_generics(f, substs.as_slice(Interner), None);
}
_ => (),
}
@@ -1173,7 +1172,7 @@ impl HirDisplay for Ty {
TyKind::Placeholder(idx) => {
let id = from_placeholder_idx(db, *idx);
let generics = generics(db.upcast(), id.parent);
- let param_data = &generics.params.type_or_consts[id.local_id];
+ let param_data = &generics.params[id.local_id];
match param_data {
TypeOrConstParamData::TypeParamData(p) => match p.provenance {
TypeParamProvenance::TypeParamList | TypeParamProvenance::TraitSelf => {
@@ -1189,21 +1188,24 @@ impl HirDisplay for Ty {
.generic_predicates(id.parent)
.iter()
.map(|pred| pred.clone().substitute(Interner, &substs))
- .filter(|wc| match &wc.skip_binders() {
+ .filter(|wc| match wc.skip_binders() {
WhereClause::Implemented(tr) => {
- &tr.self_type_parameter(Interner) == self
+ tr.self_type_parameter(Interner) == *self
}
WhereClause::AliasEq(AliasEq {
alias: AliasTy::Projection(proj),
ty: _,
- }) => &proj.self_type_parameter(db) == self,
- _ => false,
+ }) => proj.self_type_parameter(db) == *self,
+ WhereClause::AliasEq(_) => false,
+ WhereClause::TypeOutlives(to) => to.ty == *self,
+ WhereClause::LifetimeOutlives(_) => false,
})
.collect::<Vec<_>>();
let krate = id.parent.module(db.upcast()).krate();
write_bounds_like_dyn_trait_with_prefix(
f,
"impl",
+ Either::Left(self),
&bounds,
SizedByDefault::Sized { anchor: krate },
)?;
@@ -1229,6 +1231,7 @@ impl HirDisplay for Ty {
write_bounds_like_dyn_trait_with_prefix(
f,
"dyn",
+ Either::Left(self),
&bounds,
SizedByDefault::NotSized,
)?;
@@ -1252,6 +1255,7 @@ impl HirDisplay for Ty {
write_bounds_like_dyn_trait_with_prefix(
f,
"impl",
+ Either::Left(self),
bounds.skip_binders(),
SizedByDefault::Sized { anchor: krate },
)?;
@@ -1266,6 +1270,7 @@ impl HirDisplay for Ty {
write_bounds_like_dyn_trait_with_prefix(
f,
"impl",
+ Either::Left(self),
bounds.skip_binders(),
SizedByDefault::Sized { anchor: krate },
)?;
@@ -1277,11 +1282,10 @@ impl HirDisplay for Ty {
}
TyKind::Error => {
if f.display_target.is_source_code() {
- return Err(HirDisplayError::DisplaySourceCodeError(
- DisplaySourceCodeError::UnknownType,
- ));
+ f.write_char('_')?;
+ } else {
+ write!(f, "{{unknown}}")?;
}
- write!(f, "{{unknown}}")?;
}
TyKind::InferenceVar(..) => write!(f, "_")?,
TyKind::Coroutine(_, subst) => {
@@ -1318,93 +1322,92 @@ impl HirDisplay for Ty {
fn hir_fmt_generics(
f: &mut HirFormatter<'_>,
- parameters: &Substitution,
+ parameters: &[GenericArg],
generic_def: Option<hir_def::GenericDefId>,
) -> Result<(), HirDisplayError> {
- let db = f.db;
- if parameters.len(Interner) > 0 {
- use std::cmp::Ordering;
- let param_compare =
- |a: &GenericArg, b: &GenericArg| match (a.data(Interner), b.data(Interner)) {
- (crate::GenericArgData::Lifetime(_), crate::GenericArgData::Lifetime(_)) => {
- Ordering::Equal
- }
- (crate::GenericArgData::Lifetime(_), _) => Ordering::Less,
- (_, crate::GenericArgData::Lifetime(_)) => Ordering::Less,
- (_, _) => Ordering::Equal,
- };
- let parameters_to_write = if f.display_target.is_source_code() || f.omit_verbose_types() {
- match generic_def
- .map(|generic_def_id| db.generic_defaults(generic_def_id))
- .filter(|defaults| !defaults.is_empty())
- {
- None => parameters.as_slice(Interner),
- Some(default_parameters) => {
- fn should_show(
- parameter: &GenericArg,
- default_parameters: &[Binders<GenericArg>],
- i: usize,
- parameters: &Substitution,
- ) -> bool {
- if parameter.ty(Interner).map(|it| it.kind(Interner))
- == Some(&TyKind::Error)
- {
- return true;
- }
- if let Some(ConstValue::Concrete(c)) =
- parameter.constant(Interner).map(|it| &it.data(Interner).value)
- {
- if c.interned == ConstScalar::Unknown {
- return true;
- }
- }
- if parameter.lifetime(Interner).map(|it| it.data(Interner))
- == Some(&crate::LifetimeData::Static)
- {
- return true;
+ if parameters.is_empty() {
+ return Ok(());
+ }
+
+ let parameters_to_write = generic_args_sans_defaults(f, generic_def, parameters);
+ if !parameters_to_write.is_empty() {
+ write!(f, "<")?;
+ hir_fmt_generic_arguments(f, parameters_to_write)?;
+ write!(f, ">")?;
+ }
+
+ Ok(())
+}
+
+fn generic_args_sans_defaults<'ga>(
+ f: &mut HirFormatter<'_>,
+ generic_def: Option<hir_def::GenericDefId>,
+ parameters: &'ga [GenericArg],
+) -> &'ga [GenericArg] {
+ if f.display_target.is_source_code() || f.omit_verbose_types() {
+ match generic_def
+ .map(|generic_def_id| f.db.generic_defaults(generic_def_id))
+ .filter(|it| !it.is_empty())
+ {
+ None => parameters,
+ Some(default_parameters) => {
+ let should_show = |arg: &GenericArg, i: usize| {
+ let is_err = |arg: &GenericArg| match arg.data(Interner) {
+ chalk_ir::GenericArgData::Lifetime(it) => {
+ *it.data(Interner) == LifetimeData::Error
}
- let default_parameter = match default_parameters.get(i) {
- Some(it) => it,
- None => return true,
- };
- let actual_default =
- default_parameter.clone().substitute(Interner, &parameters);
- parameter != &actual_default
+ chalk_ir::GenericArgData::Ty(it) => *it.kind(Interner) == TyKind::Error,
+ chalk_ir::GenericArgData::Const(it) => matches!(
+ it.data(Interner).value,
+ ConstValue::Concrete(ConcreteConst {
+ interned: ConstScalar::Unknown,
+ ..
+ })
+ ),
+ };
+ // if the arg is error like, render it to inform the user
+ if is_err(arg) {
+ return true;
}
- let mut default_from = 0;
- for (i, parameter) in parameters.iter(Interner).enumerate() {
- if should_show(parameter, &default_parameters, i, parameters) {
- default_from = i + 1;
- }
+ // otherwise, if the arg is equal to the param default, hide it (unless the
+ // default is an error which can happen for the trait Self type)
+ default_parameters.get(i).is_none_or(|default_parameter| {
+ // !is_err(default_parameter.skip_binders())
+ // &&
+ arg != &default_parameter.clone().substitute(Interner, &parameters)
+ })
+ };
+ let mut default_from = 0;
+ for (i, parameter) in parameters.iter().enumerate() {
+ if should_show(parameter, i) {
+ default_from = i + 1;
}
- &parameters.as_slice(Interner)[0..default_from]
- }
- }
- } else {
- parameters.as_slice(Interner)
- };
- //FIXME: Should handle the ordering of lifetimes when creating substitutions
- let mut parameters_to_write = parameters_to_write.to_vec();
- parameters_to_write.sort_by(param_compare);
- if !parameters_to_write.is_empty() {
- write!(f, "<")?;
- let mut first = true;
- for generic_arg in parameters_to_write {
- if !first {
- write!(f, ", ")?;
- }
- first = false;
- if f.display_target.is_source_code()
- && generic_arg.ty(Interner).map(|ty| ty.kind(Interner)) == Some(&TyKind::Error)
- {
- write!(f, "_")?;
- } else {
- generic_arg.hir_fmt(f)?;
}
+ &parameters[0..default_from]
}
+ }
+ } else {
+ parameters
+ }
+}
- write!(f, ">")?;
+fn hir_fmt_generic_arguments(
+ f: &mut HirFormatter<'_>,
+ parameters: &[GenericArg],
+) -> Result<(), HirDisplayError> {
+ let mut first = true;
+ let lifetime_offset = parameters.iter().position(|arg| arg.lifetime(Interner).is_some());
+
+ let (ty_or_const, lifetimes) = match lifetime_offset {
+ Some(offset) => parameters.split_at(offset),
+ None => (parameters, &[][..]),
+ };
+ for generic_arg in lifetimes.iter().chain(ty_or_const) {
+ if !first {
+ write!(f, ", ")?;
}
+ first = false;
+ generic_arg.hir_fmt(f)?;
}
Ok(())
}
@@ -1468,6 +1471,7 @@ impl SizedByDefault {
pub fn write_bounds_like_dyn_trait_with_prefix(
f: &mut HirFormatter<'_>,
prefix: &str,
+ this: Either<&Ty, &Lifetime>,
predicates: &[QuantifiedWhereClause],
default_sized: SizedByDefault,
) -> Result<(), HirDisplayError> {
@@ -1476,7 +1480,7 @@ pub fn write_bounds_like_dyn_trait_with_prefix(
|| predicates.is_empty() && matches!(default_sized, SizedByDefault::Sized { .. })
{
write!(f, " ")?;
- write_bounds_like_dyn_trait(f, predicates, default_sized)
+ write_bounds_like_dyn_trait(f, this, predicates, default_sized)
} else {
Ok(())
}
@@ -1484,6 +1488,7 @@ pub fn write_bounds_like_dyn_trait_with_prefix(
fn write_bounds_like_dyn_trait(
f: &mut HirFormatter<'_>,
+ this: Either<&Ty, &Lifetime>,
predicates: &[QuantifiedWhereClause],
default_sized: SizedByDefault,
) -> Result<(), HirDisplayError> {
@@ -1524,23 +1529,54 @@ fn write_bounds_like_dyn_trait(
f.start_location_link(trait_.into());
write!(f, "{}", f.db.trait_data(trait_).name.display(f.db.upcast()))?;
f.end_location_link();
- if let [_, params @ ..] = trait_ref.substitution.as_slice(Interner) {
- if is_fn_trait {
+ if is_fn_trait {
+ if let [_self, params @ ..] = trait_ref.substitution.as_slice(Interner) {
if let Some(args) =
params.first().and_then(|it| it.assert_ty_ref(Interner).as_tuple())
{
write!(f, "(")?;
- f.write_joined(args.as_slice(Interner), ", ")?;
+ hir_fmt_generic_arguments(f, args.as_slice(Interner))?;
write!(f, ")")?;
}
- } else if !params.is_empty() {
- write!(f, "<")?;
- f.write_joined(params, ", ")?;
- // there might be assoc type bindings, so we leave the angle brackets open
- angle_open = true;
}
+ } else {
+ let params = generic_args_sans_defaults(
+ f,
+ Some(trait_.into()),
+ trait_ref.substitution.as_slice(Interner),
+ );
+ if let [_self, params @ ..] = params {
+ if !params.is_empty() {
+ write!(f, "<")?;
+ hir_fmt_generic_arguments(f, params)?;
+ // there might be assoc type bindings, so we leave the angle brackets open
+ angle_open = true;
+ }
+ }
+ }
+ }
+ WhereClause::TypeOutlives(to) if Either::Left(&to.ty) == this => {
+ if !is_fn_trait && angle_open {
+ write!(f, ">")?;
+ angle_open = false;
+ }
+ if !first {
+ write!(f, " + ")?;
}
+ to.lifetime.hir_fmt(f)?;
}
+ WhereClause::TypeOutlives(_) => {}
+ WhereClause::LifetimeOutlives(lo) if Either::Right(&lo.a) == this => {
+ if !is_fn_trait && angle_open {
+ write!(f, ">")?;
+ angle_open = false;
+ }
+ if !first {
+ write!(f, " + ")?;
+ }
+ lo.b.hir_fmt(f)?;
+ }
+ WhereClause::LifetimeOutlives(_) => {}
WhereClause::AliasEq(alias_eq) if is_fn_trait => {
is_fn_trait = false;
if !alias_eq.ty.is_unit() {
@@ -1567,9 +1603,9 @@ fn write_bounds_like_dyn_trait(
let proj_arg_count = generics(f.db.upcast(), assoc_ty_id.into()).len_self();
if proj_arg_count > 0 {
write!(f, "<")?;
- f.write_joined(
+ hir_fmt_generic_arguments(
+ f,
&proj.substitution.as_slice(Interner)[..proj_arg_count],
- ", ",
)?;
write!(f, ">")?;
}
@@ -1577,10 +1613,6 @@ fn write_bounds_like_dyn_trait(
}
ty.hir_fmt(f)?;
}
-
- // FIXME implement these
- WhereClause::LifetimeOutlives(_) => {}
- WhereClause::TypeOutlives(_) => {}
}
first = false;
}
@@ -1630,12 +1662,7 @@ fn fmt_trait_ref(
f.start_location_link(trait_.into());
write!(f, "{}", f.db.trait_data(trait_).name.display(f.db.upcast()))?;
f.end_location_link();
- if tr.substitution.len(Interner) > 1 {
- write!(f, "<")?;
- f.write_joined(&tr.substitution.as_slice(Interner)[1..], ", ")?;
- write!(f, ">")?;
- }
- Ok(())
+ hir_fmt_generics(f, &tr.substitution.as_slice(Interner)[1..], None)
}
impl HirDisplay for TraitRef {
@@ -1690,16 +1717,18 @@ impl HirDisplay for Lifetime {
impl HirDisplay for LifetimeData {
fn hir_fmt(&self, f: &mut HirFormatter<'_>) -> Result<(), HirDisplayError> {
match self {
- LifetimeData::BoundVar(idx) => idx.hir_fmt(f),
- LifetimeData::InferenceVar(_) => write!(f, "_"),
LifetimeData::Placeholder(idx) => {
let id = lt_from_placeholder_idx(f.db, *idx);
let generics = generics(f.db.upcast(), id.parent);
- let param_data = &generics.params.lifetimes[id.local_id];
+ let param_data = &generics.params[id.local_id];
write!(f, "{}", param_data.name.display(f.db.upcast()))?;
Ok(())
}
+ _ if f.display_target.is_source_code() => write!(f, "'_"),
+ LifetimeData::BoundVar(idx) => idx.hir_fmt(f),
+ LifetimeData::InferenceVar(_) => write!(f, "_"),
LifetimeData::Static => write!(f, "'static"),
+ LifetimeData::Error => write!(f, "'{{error}}"),
LifetimeData::Erased => Ok(()),
LifetimeData::Phantom(_, _) => Ok(()),
}
diff --git a/crates/hir-ty/src/infer.rs b/crates/hir-ty/src/infer.rs
index be3b50e141..281386e136 100644
--- a/crates/hir-ty/src/infer.rs
+++ b/crates/hir-ty/src/infer.rs
@@ -55,12 +55,12 @@ use triomphe::Arc;
use crate::{
db::HirDatabase,
- fold_tys,
+ error_lifetime, fold_tys,
infer::{coerce::CoerceMany, unify::InferenceTable},
lower::ImplTraitLoweringMode,
- static_lifetime, to_assoc_type_id,
+ to_assoc_type_id,
traits::FnTrait,
- utils::{InTypeConstIdMetadata, UnevaluatedConstEvaluatorFolder},
+ utils::{Generics, InTypeConstIdMetadata, UnevaluatedConstEvaluatorFolder},
AliasEq, AliasTy, Binders, ClosureId, Const, DomainGoal, GenericArg, Goal, ImplTraitId,
ImplTraitIdx, InEnvironment, Interner, Lifetime, OpaqueTyId, ProjectionTy, Substitution,
TraitEnvironment, Ty, TyBuilder, TyExt,
@@ -326,7 +326,7 @@ pub struct Adjustment {
impl Adjustment {
pub fn borrow(m: Mutability, ty: Ty) -> Self {
- let ty = TyKind::Ref(m, static_lifetime(), ty).intern(Interner);
+ let ty = TyKind::Ref(m, error_lifetime(), ty).intern(Interner);
Adjustment { kind: Adjust::Borrow(AutoBorrow::Ref(m)), target: ty }
}
}
@@ -429,7 +429,10 @@ pub struct InferenceResult {
/// Type of the result of `.into_iter()` on the for. `ExprId` is the one of the whole for loop.
pub type_of_for_iterator: FxHashMap<ExprId, Ty>,
type_mismatches: FxHashMap<ExprOrPatId, TypeMismatch>,
+ /// Whether there are any type-mismatching errors in the result.
+ pub(crate) has_errors: bool,
/// Interned common types to return references to.
+ // FIXME: Move this into `InferenceContext`
standard_types: InternedStandardTypes,
/// Stores the types which were implicitly dereferenced in pattern binding modes.
pub pat_adjustments: FxHashMap<PatId, Vec<Ty>>,
@@ -628,6 +631,10 @@ impl<'a> InferenceContext<'a> {
}
}
+ pub(crate) fn generics(&self) -> Option<Generics> {
+ Some(crate::utils::generics(self.db.upcast(), self.resolver.generic_def()?))
+ }
+
// FIXME: This function should be private in module. It is currently only used in the consteval, since we need
// `InferenceResult` in the middle of inference. See the fixme comment in `consteval::eval_to_const`. If you
// used this function for another workaround, mention it here. If you really need this function and believe that
@@ -654,6 +661,7 @@ impl<'a> InferenceContext<'a> {
type_of_rpit,
type_of_for_iterator,
type_mismatches,
+ has_errors,
standard_types: _,
pat_adjustments,
binding_modes: _,
@@ -695,6 +703,9 @@ impl<'a> InferenceContext<'a> {
for ty in type_of_for_iterator.values_mut() {
*ty = table.resolve_completely(ty.clone());
}
+
+ *has_errors = !type_mismatches.is_empty();
+
type_mismatches.retain(|_, mismatch| {
mismatch.expected = table.resolve_completely(mismatch.expected.clone());
mismatch.actual = table.resolve_completely(mismatch.actual.clone());
@@ -1646,9 +1657,11 @@ impl std::ops::BitOrAssign for Diverges {
*self = *self | other;
}
}
-/// A zipper that checks for unequal `{unknown}` occurrences in the two types. Used to filter out
-/// mismatch diagnostics that only differ in `{unknown}`. These mismatches are usually not helpful.
-/// As the cause is usually an underlying name resolution problem.
+
+/// A zipper that checks for unequal occurrences of `{unknown}` and unresolved projections
+/// in the two types. Used to filter out mismatch diagnostics that only differ in
+/// `{unknown}` and unresolved projections. These mismatches are usually not helpful.
+/// As the cause is usually an underlying name resolution problem
struct UnknownMismatch<'db>(&'db dyn HirDatabase);
impl chalk_ir::zip::Zipper<Interner> for UnknownMismatch<'_> {
fn zip_tys(&mut self, variance: Variance, a: &Ty, b: &Ty) -> chalk_ir::Fallible<()> {
@@ -1721,7 +1734,12 @@ impl chalk_ir::zip::Zipper<Interner> for UnknownMismatch<'_> {
zip_substs(self, None, &fn_ptr_a.substitution.0, &fn_ptr_b.substitution.0)?
}
(TyKind::Error, TyKind::Error) => (),
- (TyKind::Error, _) | (_, TyKind::Error) => return Err(chalk_ir::NoSolution),
+ (TyKind::Error, _)
+ | (_, TyKind::Error)
+ | (TyKind::Alias(AliasTy::Projection(_)) | TyKind::AssociatedType(_, _), _)
+ | (_, TyKind::Alias(AliasTy::Projection(_)) | TyKind::AssociatedType(_, _)) => {
+ return Err(chalk_ir::NoSolution)
+ }
_ => (),
}
diff --git a/crates/hir-ty/src/infer/cast.rs b/crates/hir-ty/src/infer/cast.rs
index f8c03ee288..060b5f36f2 100644
--- a/crates/hir-ty/src/infer/cast.rs
+++ b/crates/hir-ty/src/infer/cast.rs
@@ -19,10 +19,6 @@ impl CastCheck {
let expr_ty = table.resolve_ty_shallow(&self.expr_ty);
let cast_ty = table.resolve_ty_shallow(&self.cast_ty);
- if expr_ty.contains_unknown() || cast_ty.contains_unknown() {
- return;
- }
-
if table.coerce(&expr_ty, &cast_ty).is_ok() {
return;
}
diff --git a/crates/hir-ty/src/infer/closure.rs b/crates/hir-ty/src/infer/closure.rs
index 32845ac2e3..a25498eff3 100644
--- a/crates/hir-ty/src/infer/closure.rs
+++ b/crates/hir-ty/src/infer/closure.rs
@@ -22,11 +22,11 @@ use stdx::never;
use crate::{
db::{HirDatabase, InternedClosure},
- from_chalk_trait_id, from_placeholder_idx, make_binders,
+ error_lifetime, from_chalk_trait_id, from_placeholder_idx, make_binders,
mir::{BorrowKind, MirSpan, MutBorrowKind, ProjectionElem},
- static_lifetime, to_chalk_trait_id,
+ to_chalk_trait_id,
traits::FnTrait,
- utils::{self, elaborate_clause_supertraits, generics, Generics},
+ utils::{self, elaborate_clause_supertraits, Generics},
Adjust, Adjustment, AliasEq, AliasTy, Binders, BindingMode, ChalkTraitId, ClosureId, DynTy,
DynTyExt, FnAbi, FnPointer, FnSig, Interner, OpaqueTy, ProjectionTyExt, Substitution, Ty,
TyExt, WhereClause,
@@ -324,21 +324,17 @@ impl CapturedItemWithoutTy {
BorrowKind::Mut { .. } => Mutability::Mut,
_ => Mutability::Not,
};
- TyKind::Ref(m, static_lifetime(), ty).intern(Interner)
+ TyKind::Ref(m, error_lifetime(), ty).intern(Interner)
}
};
return CapturedItem {
place: self.place,
kind: self.kind,
span: self.span,
- ty: replace_placeholder_with_binder(ctx.db, ctx.owner, ty),
+ ty: replace_placeholder_with_binder(ctx, ty),
};
- fn replace_placeholder_with_binder(
- db: &dyn HirDatabase,
- owner: DefWithBodyId,
- ty: Ty,
- ) -> Binders<Ty> {
+ fn replace_placeholder_with_binder(ctx: &mut InferenceContext<'_>, ty: Ty) -> Binders<Ty> {
struct Filler<'a> {
db: &'a dyn HirDatabase,
generics: Generics,
@@ -361,7 +357,7 @@ impl CapturedItemWithoutTy {
outer_binder: DebruijnIndex,
) -> Result<chalk_ir::Const<Interner>, Self::Error> {
let x = from_placeholder_idx(self.db, idx);
- let Some(idx) = self.generics.param_idx(x) else {
+ let Some(idx) = self.generics.type_or_const_param_idx(x) else {
return Err(());
};
Ok(BoundVar::new(outer_binder, idx).to_const(Interner, ty))
@@ -373,18 +369,18 @@ impl CapturedItemWithoutTy {
outer_binder: DebruijnIndex,
) -> std::result::Result<Ty, Self::Error> {
let x = from_placeholder_idx(self.db, idx);
- let Some(idx) = self.generics.param_idx(x) else {
+ let Some(idx) = self.generics.type_or_const_param_idx(x) else {
return Err(());
};
Ok(BoundVar::new(outer_binder, idx).to_ty(Interner))
}
}
- let Some(generic_def) = owner.as_generic_def_id() else {
+ let Some(generics) = ctx.generics() else {
return Binders::empty(Interner, ty);
};
- let filler = &mut Filler { db, generics: generics(db.upcast(), generic_def) };
+ let filler = &mut Filler { db: ctx.db, generics };
let result = ty.clone().try_fold_with(filler, DebruijnIndex::INNERMOST).unwrap_or(ty);
- make_binders(db, &filler.generics, result)
+ make_binders(ctx.db, &filler.generics, result)
}
}
}
diff --git a/crates/hir-ty/src/infer/coerce.rs b/crates/hir-ty/src/infer/coerce.rs
index cfbbc9dd6c..72928851f1 100644
--- a/crates/hir-ty/src/infer/coerce.rs
+++ b/crates/hir-ty/src/infer/coerce.rs
@@ -18,11 +18,11 @@ use triomphe::Arc;
use crate::{
autoderef::{Autoderef, AutoderefKind},
db::HirDatabase,
+ error_lifetime,
infer::{
Adjust, Adjustment, AutoBorrow, InferOk, InferenceContext, OverloadedDeref, PointerCast,
TypeError, TypeMismatch,
},
- static_lifetime,
utils::ClosureSubst,
Canonical, DomainGoal, FnAbi, FnPointer, FnSig, Guidance, InEnvironment, Interner, Solution,
Substitution, TraitEnvironment, Ty, TyBuilder, TyExt,
@@ -427,7 +427,7 @@ impl InferenceTable<'_> {
// compare those. Note that this means we use the target
// mutability [1], since it may be that we are coercing
// from `&mut T` to `&U`.
- let lt = static_lifetime(); // FIXME: handle lifetimes correctly, see rustc
+ let lt = error_lifetime(); // FIXME: handle lifetimes correctly, see rustc
let derefd_from_ty = TyKind::Ref(to_mt, lt, referent_ty).intern(Interner);
match autoderef.table.try_unify(&derefd_from_ty, to_ty) {
Ok(result) => {
@@ -621,7 +621,7 @@ impl InferenceTable<'_> {
(TyKind::Ref(from_mt, _, from_inner), &TyKind::Ref(to_mt, _, _)) => {
coerce_mutabilities(*from_mt, to_mt)?;
- let lt = static_lifetime();
+ let lt = error_lifetime();
Some((
Adjustment { kind: Adjust::Deref(None), target: from_inner.clone() },
Adjustment {
diff --git a/crates/hir-ty/src/infer/expr.rs b/crates/hir-ty/src/infer/expr.rs
index 35d5967935..d011a62e77 100644
--- a/crates/hir-ty/src/infer/expr.rs
+++ b/crates/hir-ty/src/infer/expr.rs
@@ -23,6 +23,7 @@ use crate::{
autoderef::{builtin_deref, deref_by_trait, Autoderef},
consteval,
db::{InternedClosure, InternedCoroutine},
+ error_lifetime,
infer::{
coerce::{CoerceMany, CoercionCause},
find_continuable,
@@ -630,7 +631,7 @@ impl InferenceContext<'_> {
let inner_ty = self.infer_expr_inner(*expr, &expectation);
match rawness {
Rawness::RawPtr => TyKind::Raw(mutability, inner_ty),
- Rawness::Ref => TyKind::Ref(mutability, static_lifetime(), inner_ty),
+ Rawness::Ref => TyKind::Ref(mutability, error_lifetime(), inner_ty),
}
.intern(Interner)
}
@@ -1039,18 +1040,12 @@ impl InferenceContext<'_> {
(
elem_ty,
- if let Some(g_def) = self.owner.as_generic_def_id() {
- let generics = generics(self.db.upcast(), g_def);
- consteval::eval_to_const(
- repeat,
- ParamLoweringMode::Placeholder,
- self,
- || generics,
- DebruijnIndex::INNERMOST,
- )
- } else {
- consteval::usize_const(self.db, None, krate)
- },
+ consteval::eval_to_const(
+ repeat,
+ ParamLoweringMode::Placeholder,
+ self,
+ DebruijnIndex::INNERMOST,
+ ),
)
}
};
@@ -1851,7 +1846,7 @@ impl InferenceContext<'_> {
ty,
c,
ParamLoweringMode::Placeholder,
- || generics(this.db.upcast(), this.resolver.generic_def().unwrap()),
+ || this.generics(),
DebruijnIndex::INNERMOST,
)
},
diff --git a/crates/hir-ty/src/infer/pat.rs b/crates/hir-ty/src/infer/pat.rs
index 09a4d998ee..1b354935a5 100644
--- a/crates/hir-ty/src/infer/pat.rs
+++ b/crates/hir-ty/src/infer/pat.rs
@@ -2,21 +2,22 @@
use std::iter::repeat_with;
-use chalk_ir::Mutability;
use hir_def::{
body::Body,
hir::{Binding, BindingAnnotation, BindingId, Expr, ExprId, ExprOrPatId, Literal, Pat, PatId},
path::Path,
};
use hir_expand::name::Name;
+use stdx::TupleExt;
use crate::{
consteval::{try_const_usize, usize_const},
+ error_lifetime,
infer::{BindingMode, Expectation, InferenceContext, TypeMismatch},
lower::lower_to_chalk_mutability,
primitive::UintTy,
- static_lifetime, InferenceDiagnostic, Interner, Scalar, Substitution, Ty, TyBuilder, TyExt,
- TyKind,
+ static_lifetime, InferenceDiagnostic, Interner, Mutability, Scalar, Substitution, Ty,
+ TyBuilder, TyExt, TyKind,
};
/// Used to generalize patterns and assignee expressions.
@@ -89,9 +90,6 @@ impl InferenceContext<'_> {
self.unify(&ty, expected);
- let substs =
- ty.as_adt().map(|(_, s)| s.clone()).unwrap_or_else(|| Substitution::empty(Interner));
-
match def {
_ if subs.is_empty() => {}
Some(def) => {
@@ -108,8 +106,10 @@ impl InferenceContext<'_> {
let pre_iter = pre.iter().enumerate();
let post_iter = (post_idx_offset..).zip(post.iter());
+ let substs = ty.as_adt().map(TupleExt::tail);
+
for (i, &subpat) in pre_iter.chain(post_iter) {
- let field_def = {
+ let expected_ty = {
match variant_data.field(&Name::new_tuple_field(i)) {
Some(local_id) => {
if !visibilities[local_id]
@@ -117,17 +117,17 @@ impl InferenceContext<'_> {
{
// FIXME(DIAGNOSE): private tuple field
}
- Some(local_id)
+ let f = field_types[local_id].clone();
+ let expected_ty = match substs {
+ Some(substs) => f.substitute(Interner, substs),
+ None => f.substitute(Interner, &Substitution::empty(Interner)),
+ };
+ self.normalize_associated_types_in(expected_ty)
}
- None => None,
+ None => self.err_ty(),
}
};
- let expected_ty = field_def.map_or(self.err_ty(), |f| {
- field_types[f].clone().substitute(Interner, &substs)
- });
- let expected_ty = self.normalize_associated_types_in(expected_ty);
-
T::infer(self, subpat, &expected_ty, default_bm);
}
}
@@ -149,7 +149,7 @@ impl InferenceContext<'_> {
expected: &Ty,
default_bm: T::BindingMode,
id: T,
- subs: impl Iterator<Item = (Name, T)> + ExactSizeIterator,
+ subs: impl ExactSizeIterator<Item = (Name, T)>,
) -> Ty {
let (ty, def) = self.resolve_variant(path, false);
if let Some(variant) = def {
@@ -158,9 +158,6 @@ impl InferenceContext<'_> {
self.unify(&ty, expected);
- let substs =
- ty.as_adt().map(|(_, s)| s.clone()).unwrap_or_else(|| Substitution::empty(Interner));
-
match def {
_ if subs.len() == 0 => {}
Some(def) => {
@@ -168,8 +165,10 @@ impl InferenceContext<'_> {
let variant_data = def.variant_data(self.db.upcast());
let visibilities = self.db.field_visibilities(def);
+ let substs = ty.as_adt().map(TupleExt::tail);
+
for (name, inner) in subs {
- let field_def = {
+ let expected_ty = {
match variant_data.field(&name) {
Some(local_id) => {
if !visibilities[local_id]
@@ -180,23 +179,23 @@ impl InferenceContext<'_> {
private: true,
});
}
- Some(local_id)
+ let f = field_types[local_id].clone();
+ let expected_ty = match substs {
+ Some(substs) => f.substitute(Interner, substs),
+ None => f.substitute(Interner, &Substitution::empty(Interner)),
+ };
+ self.normalize_associated_types_in(expected_ty)
}
None => {
self.push_diagnostic(InferenceDiagnostic::NoSuchField {
field: inner.into(),
private: false,
});
- None
+ self.err_ty()
}
}
};
- let expected_ty = field_def.map_or(self.err_ty(), |f| {
- field_types[f].clone().substitute(Interner, &substs)
- });
- let expected_ty = self.normalize_associated_types_in(expected_ty);
-
T::infer(self, inner, &expected_ty, default_bm);
}
}
@@ -396,14 +395,14 @@ impl InferenceContext<'_> {
None => {
let inner_ty = self.table.new_type_var();
let ref_ty =
- TyKind::Ref(mutability, static_lifetime(), inner_ty.clone()).intern(Interner);
+ TyKind::Ref(mutability, error_lifetime(), inner_ty.clone()).intern(Interner);
// Unification failure will be reported by the caller.
self.unify(&ref_ty, expected);
inner_ty
}
};
let subty = self.infer_pat(inner_pat, &expectation, default_bm);
- TyKind::Ref(mutability, static_lifetime(), subty).intern(Interner)
+ TyKind::Ref(mutability, error_lifetime(), subty).intern(Interner)
}
fn infer_bind_pat(
@@ -430,7 +429,7 @@ impl InferenceContext<'_> {
let bound_ty = match mode {
BindingMode::Ref(mutability) => {
- TyKind::Ref(mutability, static_lifetime(), inner_ty.clone()).intern(Interner)
+ TyKind::Ref(mutability, error_lifetime(), inner_ty.clone()).intern(Interner)
}
BindingMode::Move => inner_ty.clone(),
};
diff --git a/crates/hir-ty/src/infer/unify.rs b/crates/hir-ty/src/infer/unify.rs
index afb89fe1e5..b68fefc515 100644
--- a/crates/hir-ty/src/infer/unify.rs
+++ b/crates/hir-ty/src/infer/unify.rs
@@ -16,8 +16,8 @@ use triomphe::Arc;
use super::{InferOk, InferResult, InferenceContext, TypeError};
use crate::{
- consteval::unknown_const, db::HirDatabase, fold_generic_args, fold_tys_and_consts,
- static_lifetime, to_chalk_trait_id, traits::FnTrait, AliasEq, AliasTy, BoundVar, Canonical,
+ consteval::unknown_const, db::HirDatabase, error_lifetime, fold_generic_args,
+ fold_tys_and_consts, to_chalk_trait_id, traits::FnTrait, AliasEq, AliasTy, BoundVar, Canonical,
Const, ConstValue, DebruijnIndex, DomainGoal, GenericArg, GenericArgData, Goal, GoalData,
Guidance, InEnvironment, InferenceVar, Interner, Lifetime, OpaqueTyId, ParamKind, ProjectionTy,
ProjectionTyExt, Scalar, Solution, Substitution, TraitEnvironment, Ty, TyBuilder, TyExt,
@@ -43,40 +43,21 @@ impl InferenceContext<'_> {
let obligations = pending_obligations
.iter()
.filter_map(|obligation| match obligation.value.value.goal.data(Interner) {
- GoalData::DomainGoal(DomainGoal::Holds(
- clause @ WhereClause::AliasEq(AliasEq {
- alias: AliasTy::Projection(projection),
- ..
- }),
- )) => {
- let projection_self = projection.self_type_parameter(self.db);
- let uncanonical = chalk_ir::Substitute::apply(
- &obligation.free_vars,
- projection_self,
- Interner,
- );
- if matches!(
- self.resolve_ty_shallow(&uncanonical).kind(Interner),
- TyKind::InferenceVar(iv, TyVariableKind::General) if *iv == root,
- ) {
- Some(chalk_ir::Substitute::apply(
- &obligation.free_vars,
- clause.clone(),
- Interner,
- ))
- } else {
- None
- }
- }
- GoalData::DomainGoal(DomainGoal::Holds(
- clause @ WhereClause::Implemented(trait_ref),
- )) => {
- let trait_ref_self = trait_ref.self_type_parameter(Interner);
- let uncanonical = chalk_ir::Substitute::apply(
- &obligation.free_vars,
- trait_ref_self,
- Interner,
- );
+ GoalData::DomainGoal(DomainGoal::Holds(clause)) => {
+ let ty = match clause {
+ WhereClause::AliasEq(AliasEq {
+ alias: AliasTy::Projection(projection),
+ ..
+ }) => projection.self_type_parameter(self.db),
+ WhereClause::Implemented(trait_ref) => {
+ trait_ref.self_type_parameter(Interner)
+ }
+ WhereClause::TypeOutlives(to) => to.ty.clone(),
+ _ => return None,
+ };
+
+ let uncanonical =
+ chalk_ir::Substitute::apply(&obligation.free_vars, ty, Interner);
if matches!(
self.resolve_ty_shallow(&uncanonical).kind(Interner),
TyKind::InferenceVar(iv, TyVariableKind::General) if *iv == root,
@@ -121,8 +102,9 @@ impl<T: HasInterner<Interner = Interner>> Canonicalized<T> {
VariableKind::Ty(TyVariableKind::General) => ctx.new_type_var().cast(Interner),
VariableKind::Ty(TyVariableKind::Integer) => ctx.new_integer_var().cast(Interner),
VariableKind::Ty(TyVariableKind::Float) => ctx.new_float_var().cast(Interner),
- // Chalk can sometimes return new lifetime variables. We just use the static lifetime everywhere
- VariableKind::Lifetime => static_lifetime().cast(Interner),
+ // Chalk can sometimes return new lifetime variables. We just replace them by errors
+ // for now.
+ VariableKind::Lifetime => error_lifetime().cast(Interner),
VariableKind::Const(ty) => ctx.new_const_var(ty.clone()).cast(Interner),
}),
);
@@ -1020,11 +1002,11 @@ mod resolve {
_var: InferenceVar,
_outer_binder: DebruijnIndex,
) -> Lifetime {
- // fall back all lifetimes to 'static -- currently we don't deal
+ // fall back all lifetimes to 'error -- currently we don't deal
// with any lifetimes, but we can sometimes get some lifetime
// variables through Chalk's unification, and this at least makes
// sure we don't leak them outside of inference
- crate::static_lifetime()
+ crate::error_lifetime()
}
}
}
diff --git a/crates/hir-ty/src/inhabitedness.rs b/crates/hir-ty/src/inhabitedness.rs
index 532b650e8f..7546369d8d 100644
--- a/crates/hir-ty/src/inhabitedness.rs
+++ b/crates/hir-ty/src/inhabitedness.rs
@@ -5,42 +5,36 @@ use chalk_ir::{
visit::{TypeSuperVisitable, TypeVisitable, TypeVisitor},
DebruijnIndex,
};
-use hir_def::{
- attr::Attrs, data::adt::VariantData, visibility::Visibility, AdtId, EnumVariantId, HasModule,
- ModuleId, VariantId,
-};
+use hir_def::{visibility::Visibility, AdtId, EnumVariantId, HasModule, ModuleId, VariantId};
use rustc_hash::FxHashSet;
use crate::{
consteval::try_const_usize, db::HirDatabase, Binders, Interner, Substitution, Ty, TyKind,
};
+// FIXME: Turn this into a query, it can be quite slow
/// Checks whether a type is visibly uninhabited from a particular module.
-pub(crate) fn is_ty_uninhabited_from(ty: &Ty, target_mod: ModuleId, db: &dyn HirDatabase) -> bool {
+pub(crate) fn is_ty_uninhabited_from(db: &dyn HirDatabase, ty: &Ty, target_mod: ModuleId) -> bool {
+ let _p = tracing::span!(tracing::Level::INFO, "is_ty_uninhabited_from", ?ty).entered();
let mut uninhabited_from =
UninhabitedFrom { target_mod, db, max_depth: 500, recursive_ty: FxHashSet::default() };
let inhabitedness = ty.visit_with(&mut uninhabited_from, DebruijnIndex::INNERMOST);
inhabitedness == BREAK_VISIBLY_UNINHABITED
}
+// FIXME: Turn this into a query, it can be quite slow
/// Checks whether a variant is visibly uninhabited from a particular module.
pub(crate) fn is_enum_variant_uninhabited_from(
+ db: &dyn HirDatabase,
variant: EnumVariantId,
subst: &Substitution,
target_mod: ModuleId,
- db: &dyn HirDatabase,
) -> bool {
- let is_local = variant.module(db.upcast()).krate() == target_mod.krate();
+ let _p = tracing::span!(tracing::Level::INFO, "is_enum_variant_uninhabited_from").entered();
let mut uninhabited_from =
UninhabitedFrom { target_mod, db, max_depth: 500, recursive_ty: FxHashSet::default() };
- let inhabitedness = uninhabited_from.visit_variant(
- variant.into(),
- &db.enum_variant_data(variant).variant_data,
- subst,
- &db.attrs(variant.into()),
- is_local,
- );
+ let inhabitedness = uninhabited_from.visit_variant(variant.into(), subst);
inhabitedness == BREAK_VISIBLY_UNINHABITED
}
@@ -98,34 +92,18 @@ impl TypeVisitor<Interner> for UninhabitedFrom<'_> {
impl UninhabitedFrom<'_> {
fn visit_adt(&mut self, adt: AdtId, subst: &Substitution) -> ControlFlow<VisiblyUninhabited> {
- let attrs = self.db.attrs(adt.into());
- let adt_non_exhaustive = attrs.by_key("non_exhaustive").exists();
- let is_local = adt.module(self.db.upcast()).krate() == self.target_mod.krate();
- if adt_non_exhaustive && !is_local {
- return CONTINUE_OPAQUELY_INHABITED;
- }
-
// An ADT is uninhabited iff all its variants uninhabited.
match adt {
// rustc: For now, `union`s are never considered uninhabited.
AdtId::UnionId(_) => CONTINUE_OPAQUELY_INHABITED,
- AdtId::StructId(s) => {
- let struct_data = self.db.struct_data(s);
- self.visit_variant(s.into(), &struct_data.variant_data, subst, &attrs, is_local)
- }
+ AdtId::StructId(s) => self.visit_variant(s.into(), subst),
AdtId::EnumId(e) => {
let enum_data = self.db.enum_data(e);
for &(variant, _) in enum_data.variants.iter() {
- let variant_inhabitedness = self.visit_variant(
- variant.into(),
- &self.db.enum_variant_data(variant).variant_data,
- subst,
- &self.db.attrs(variant.into()),
- is_local,
- );
+ let variant_inhabitedness = self.visit_variant(variant.into(), subst);
match variant_inhabitedness {
- Break(VisiblyUninhabited) => continue,
+ Break(VisiblyUninhabited) => (),
Continue(()) => return CONTINUE_OPAQUELY_INHABITED,
}
}
@@ -137,34 +115,36 @@ impl UninhabitedFrom<'_> {
fn visit_variant(
&mut self,
variant: VariantId,
- variant_data: &VariantData,
subst: &Substitution,
- attrs: &Attrs,
- is_local: bool,
) -> ControlFlow<VisiblyUninhabited> {
- let non_exhaustive_field_list = attrs.by_key("non_exhaustive").exists();
- if non_exhaustive_field_list && !is_local {
+ let is_local = variant.krate(self.db.upcast()) == self.target_mod.krate();
+ if !is_local && self.db.attrs(variant.into()).by_key("non_exhaustive").exists() {
+ return CONTINUE_OPAQUELY_INHABITED;
+ }
+
+ let variant_data = self.db.variant_data(variant);
+ let fields = variant_data.fields();
+ if fields.is_empty() {
return CONTINUE_OPAQUELY_INHABITED;
}
let is_enum = matches!(variant, VariantId::EnumVariantId(..));
let field_tys = self.db.field_types(variant);
- let field_vis = self.db.field_visibilities(variant);
+ let field_vis = if is_enum { None } else { Some(self.db.field_visibilities(variant)) };
- for (fid, _) in variant_data.fields().iter() {
- self.visit_field(field_vis[fid], &field_tys[fid], subst, is_enum)?;
+ for (fid, _) in fields.iter() {
+ self.visit_field(field_vis.as_ref().map(|it| it[fid]), &field_tys[fid], subst)?;
}
CONTINUE_OPAQUELY_INHABITED
}
fn visit_field(
&mut self,
- vis: Visibility,
+ vis: Option<Visibility>,
ty: &Binders<Ty>,
subst: &Substitution,
- is_enum: bool,
) -> ControlFlow<VisiblyUninhabited> {
- if is_enum || vis.is_visible_from(self.db.upcast(), self.target_mod) {
+ if vis.map_or(true, |it| it.is_visible_from(self.db.upcast(), self.target_mod)) {
let ty = ty.clone().substitute(Interner, subst);
ty.visit_with(self, DebruijnIndex::INNERMOST)
} else {
diff --git a/crates/hir-ty/src/lib.rs b/crates/hir-ty/src/lib.rs
index ba64f5c8d7..1727cec989 100644
--- a/crates/hir-ty/src/lib.rs
+++ b/crates/hir-ty/src/lib.rs
@@ -56,7 +56,6 @@ use base_db::salsa::impl_intern_value_trivial;
use chalk_ir::{
fold::{Shift, TypeFoldable},
interner::HasInterner,
- visit::{TypeSuperVisitable, TypeVisitable, TypeVisitor},
NoSolution,
};
use either::Either;
@@ -98,7 +97,9 @@ pub use traits::TraitEnvironment;
pub use utils::{all_super_traits, is_fn_unsafe_to_call};
pub use chalk_ir::{
- cast::Cast, AdtId, BoundVar, DebruijnIndex, Mutability, Safety, Scalar, TyVariableKind,
+ cast::Cast,
+ visit::{TypeSuperVisitable, TypeVisitable, TypeVisitor},
+ AdtId, BoundVar, DebruijnIndex, Mutability, Safety, Scalar, TyVariableKind,
};
pub type ForeignDefId = chalk_ir::ForeignDefId<Interner>;
@@ -288,7 +289,7 @@ impl Hash for ConstScalar {
/// Return an index of a parameter in the generic type parameter list by it's id.
pub fn param_idx(db: &dyn HirDatabase, id: TypeOrConstParamId) -> Option<usize> {
- generics(db.upcast(), id.parent).param_idx(id)
+ generics(db.upcast(), id.parent).type_or_const_param_idx(id)
}
pub(crate) fn wrap_empty_binders<T>(value: T) -> Binders<T>
@@ -603,14 +604,14 @@ pub enum ImplTraitId {
}
impl_intern_value_trivial!(ImplTraitId);
-#[derive(Clone, PartialEq, Eq, Debug, Hash)]
+#[derive(PartialEq, Eq, Debug, Hash)]
pub struct ImplTraits {
pub(crate) impl_traits: Arena<ImplTrait>,
}
has_interner!(ImplTraits);
-#[derive(Clone, PartialEq, Eq, Debug, Hash)]
+#[derive(PartialEq, Eq, Debug, Hash)]
pub struct ImplTrait {
pub(crate) bounds: Binders<Vec<QuantifiedWhereClause>>,
}
@@ -622,7 +623,7 @@ pub fn static_lifetime() -> Lifetime {
}
pub fn error_lifetime() -> Lifetime {
- static_lifetime()
+ LifetimeData::Error.intern(Interner)
}
pub(crate) fn fold_free_vars<T: HasInterner<Interner = Interner> + TypeFoldable<Interner>>(
@@ -861,7 +862,7 @@ where
if cfg!(debug_assertions) {
Err(NoSolution)
} else {
- Ok(static_lifetime())
+ Ok(error_lifetime())
}
}
@@ -873,7 +874,7 @@ where
if cfg!(debug_assertions) {
Err(NoSolution)
} else {
- Ok(static_lifetime())
+ Ok(error_lifetime())
}
}
}
diff --git a/crates/hir-ty/src/lower.rs b/crates/hir-ty/src/lower.rs
index 25ccc84c13..4d0516ead6 100644
--- a/crates/hir-ty/src/lower.rs
+++ b/crates/hir-ty/src/lower.rs
@@ -15,7 +15,10 @@ use base_db::{
CrateId,
};
use chalk_ir::{
- cast::Cast, fold::Shift, fold::TypeFoldable, interner::HasInterner, Mutability, Safety,
+ cast::Cast,
+ fold::{Shift, TypeFoldable},
+ interner::HasInterner,
+ Mutability, Safety, TypeOutlives,
};
use either::Either;
@@ -59,14 +62,14 @@ use crate::{
mapping::{from_chalk_trait_id, lt_to_placeholder_idx, ToChalk},
static_lifetime, to_assoc_type_id, to_chalk_trait_id, to_placeholder_idx,
utils::{
- all_super_trait_refs, associated_type_by_name_including_super_traits, generics, Generics,
- InTypeConstIdMetadata,
+ self, all_super_trait_refs, associated_type_by_name_including_super_traits, generics,
+ Generics, InTypeConstIdMetadata,
},
AliasEq, AliasTy, Binders, BoundVar, CallableSig, Const, ConstScalar, DebruijnIndex, DynTy,
FnAbi, FnPointer, FnSig, FnSubst, ImplTrait, ImplTraitId, ImplTraits, Interner, Lifetime,
- LifetimeData, ParamKind, PolyFnSig, ProjectionTy, QuantifiedWhereClause,
- QuantifiedWhereClauses, Substitution, TraitEnvironment, TraitRef, TraitRefExt, Ty, TyBuilder,
- TyKind, WhereClause,
+ LifetimeData, LifetimeOutlives, ParamKind, PolyFnSig, ProgramClause, ProjectionTy,
+ QuantifiedWhereClause, QuantifiedWhereClauses, Substitution, TraitEnvironment, TraitRef,
+ TraitRefExt, Ty, TyBuilder, TyKind, WhereClause,
};
#[derive(Debug)]
@@ -242,13 +245,8 @@ impl<'a> TyLoweringContext<'a> {
)
}
- fn generics(&self) -> Generics {
- generics(
- self.db.upcast(),
- self.resolver
- .generic_def()
- .expect("there should be generics if there's a generic param"),
- )
+ fn generics(&self) -> Option<Generics> {
+ Some(generics(self.db.upcast(), self.resolver.generic_def()?))
}
pub fn lower_ty_ext(&self, type_ref: &TypeRef) -> (Ty, Option<TypeNs>) {
@@ -282,7 +280,7 @@ impl<'a> TyLoweringContext<'a> {
let inner_ty = self.lower_ty(inner);
// FIXME: It should infer the eldided lifetimes instead of stubbing with static
let lifetime =
- lifetime.as_ref().map_or_else(static_lifetime, |lr| self.lower_lifetime(lr));
+ lifetime.as_ref().map_or_else(error_lifetime, |lr| self.lower_lifetime(lr));
TyKind::Ref(lower_to_chalk_mutability(*mutability), lifetime, inner_ty)
.intern(Interner)
}
@@ -318,7 +316,7 @@ impl<'a> TyLoweringContext<'a> {
// place even if we encounter more opaque types while
// lowering the bounds
let idx = opaque_type_data.borrow_mut().alloc(ImplTrait {
- bounds: crate::make_single_type_binders(Vec::new()),
+ bounds: crate::make_single_type_binders(Vec::default()),
});
// We don't want to lower the bounds inside the binders
// we're currently in, because they don't end up inside
@@ -349,8 +347,7 @@ impl<'a> TyLoweringContext<'a> {
let idx = counter.get();
// FIXME we're probably doing something wrong here
counter.set(idx + count_impl_traits(type_ref) as u16);
- if let Some(def) = self.resolver.generic_def() {
- let generics = generics(self.db.upcast(), def);
+ if let Some(generics) = self.generics() {
let param = generics
.iter()
.filter(|(_, data)| {
@@ -385,8 +382,7 @@ impl<'a> TyLoweringContext<'a> {
const_params,
_impl_trait_params,
_lifetime_params,
- ) = if let Some(def) = self.resolver.generic_def() {
- let generics = generics(self.db.upcast(), def);
+ ) = if let Some(generics) = self.generics() {
generics.provenance_split()
} else {
(0, 0, 0, 0, 0, 0)
@@ -574,44 +570,40 @@ impl<'a> TyLoweringContext<'a> {
// FIXME(trait_alias): Implement trait alias.
return (TyKind::Error.intern(Interner), None);
}
- TypeNs::GenericParam(param_id) => {
- let generics = generics(
- self.db.upcast(),
- self.resolver.generic_def().expect("generics in scope"),
- );
- match self.type_param_mode {
- ParamLoweringMode::Placeholder => {
- TyKind::Placeholder(to_placeholder_idx(self.db, param_id.into()))
- }
- ParamLoweringMode::Variable => {
- let idx = match generics.param_idx(param_id.into()) {
- None => {
- never!("no matching generics");
- return (TyKind::Error.intern(Interner), None);
- }
- Some(idx) => idx,
- };
+ TypeNs::GenericParam(param_id) => match self.type_param_mode {
+ ParamLoweringMode::Placeholder => {
+ TyKind::Placeholder(to_placeholder_idx(self.db, param_id.into()))
+ }
+ ParamLoweringMode::Variable => {
+ let idx = match self
+ .generics()
+ .expect("generics in scope")
+ .type_or_const_param_idx(param_id.into())
+ {
+ None => {
+ never!("no matching generics");
+ return (TyKind::Error.intern(Interner), None);
+ }
+ Some(idx) => idx,
+ };
- TyKind::BoundVar(BoundVar::new(self.in_binders, idx))
- }
+ TyKind::BoundVar(BoundVar::new(self.in_binders, idx))
}
- .intern(Interner)
}
+ .intern(Interner),
TypeNs::SelfType(impl_id) => {
- let def =
- self.resolver.generic_def().expect("impl should have generic param scope");
- let generics = generics(self.db.upcast(), def);
+ let generics = self.generics().expect("impl should have generic param scope");
match self.type_param_mode {
ParamLoweringMode::Placeholder => {
// `def` can be either impl itself or item within, and we need impl itself
// now.
- let generics = generics.parent_generics().unwrap_or(&generics);
+ let generics = generics.parent_or_self();
let subst = generics.placeholder_subst(self.db);
self.db.impl_self_ty(impl_id).substitute(Interner, &subst)
}
ParamLoweringMode::Variable => {
- let starting_from = match def {
+ let starting_from = match generics.def() {
GenericDefId::ImplId(_) => 0,
// `def` is an item within impl. We need to substitute `BoundVar`s but
// remember that they are for parent (i.e. impl) generic params so they
@@ -679,12 +671,12 @@ impl<'a> TyLoweringContext<'a> {
}
fn select_associated_type(&self, res: Option<TypeNs>, segment: PathSegment<'_>) -> Ty {
- let Some((def, res)) = self.resolver.generic_def().zip(res) else {
+ let Some((generics, res)) = self.generics().zip(res) else {
return TyKind::Error.intern(Interner);
};
let ty = named_associated_type_shorthand_candidates(
self.db,
- def,
+ generics.def(),
res,
Some(segment.name.clone()),
move |name, t, associated_ty| {
@@ -696,7 +688,6 @@ impl<'a> TyLoweringContext<'a> {
let parent_subst = match self.type_param_mode {
ParamLoweringMode::Placeholder => {
// if we're lowering to placeholders, we have to put them in now.
- let generics = generics(self.db.upcast(), def);
let s = generics.placeholder_subst(self.db);
s.apply(parent_subst, Interner)
}
@@ -718,7 +709,7 @@ impl<'a> TyLoweringContext<'a> {
None,
);
- let len_self = generics(self.db.upcast(), associated_ty.into()).len_self();
+ let len_self = utils::generics(self.db.upcast(), associated_ty.into()).len_self();
let substs = Substitution::from_iter(
Interner,
@@ -1016,40 +1007,43 @@ impl<'a> TyLoweringContext<'a> {
self.substs_from_path_segment(segment, Some(resolved.into()), false, explicit_self_ty)
}
- pub(crate) fn lower_where_predicate(
- &self,
- where_predicate: &WherePredicate,
+ pub(crate) fn lower_where_predicate<'b>(
+ &'b self,
+ where_predicate: &'b WherePredicate,
ignore_bindings: bool,
- ) -> impl Iterator<Item = QuantifiedWhereClause> {
+ ) -> impl Iterator<Item = QuantifiedWhereClause> + 'b {
match where_predicate {
WherePredicate::ForLifetime { target, bound, .. }
| WherePredicate::TypeBound { target, bound } => {
let self_ty = match target {
WherePredicateTypeTarget::TypeRef(type_ref) => self.lower_ty(type_ref),
- WherePredicateTypeTarget::TypeOrConstParam(param_id) => {
- let generic_def = self.resolver.generic_def().expect("generics in scope");
- let generics = generics(self.db.upcast(), generic_def);
- let param_id = hir_def::TypeOrConstParamId {
- parent: generic_def,
- local_id: *param_id,
- };
- let placeholder = to_placeholder_idx(self.db, param_id);
+ &WherePredicateTypeTarget::TypeOrConstParam(local_id) => {
+ let def = self.resolver.generic_def().expect("generics in scope");
+ let param_id = hir_def::TypeOrConstParamId { parent: def, local_id };
match self.type_param_mode {
- ParamLoweringMode::Placeholder => TyKind::Placeholder(placeholder),
+ ParamLoweringMode::Placeholder => {
+ TyKind::Placeholder(to_placeholder_idx(self.db, param_id))
+ }
ParamLoweringMode::Variable => {
- let idx = generics.param_idx(param_id).expect("matching generics");
+ let idx = generics(self.db.upcast(), def)
+ .type_or_const_param_idx(param_id)
+ .expect("matching generics");
TyKind::BoundVar(BoundVar::new(DebruijnIndex::INNERMOST, idx))
}
}
.intern(Interner)
}
};
- self.lower_type_bound(bound, self_ty, ignore_bindings)
- .collect::<Vec<_>>()
- .into_iter()
+ Either::Left(self.lower_type_bound(bound, self_ty, ignore_bindings))
}
- WherePredicate::Lifetime { .. } => vec![].into_iter(),
+ WherePredicate::Lifetime { bound, target } => Either::Right(iter::once(
+ crate::wrap_empty_binders(WhereClause::LifetimeOutlives(LifetimeOutlives {
+ a: self.lower_lifetime(bound),
+ b: self.lower_lifetime(target),
+ })),
+ )),
}
+ .into_iter()
}
pub(crate) fn lower_type_bound(
@@ -1058,11 +1052,11 @@ impl<'a> TyLoweringContext<'a> {
self_ty: Ty,
ignore_bindings: bool,
) -> impl Iterator<Item = QuantifiedWhereClause> + 'a {
- let mut bindings = None;
- let trait_ref = match bound.as_ref() {
+ let mut trait_ref = None;
+ let clause = match bound.as_ref() {
TypeBound::Path(path, TraitBoundModifier::None) => {
- bindings = self.lower_trait_ref_from_path(path, Some(self_ty));
- bindings
+ trait_ref = self.lower_trait_ref_from_path(path, Some(self_ty));
+ trait_ref
.clone()
.filter(|tr| {
// ignore `T: Drop` or `T: Destruct` bounds.
@@ -1098,14 +1092,20 @@ impl<'a> TyLoweringContext<'a> {
}
TypeBound::ForLifetime(_, path) => {
// FIXME Don't silently drop the hrtb lifetimes here
- bindings = self.lower_trait_ref_from_path(path, Some(self_ty));
- bindings.clone().map(WhereClause::Implemented).map(crate::wrap_empty_binders)
+ trait_ref = self.lower_trait_ref_from_path(path, Some(self_ty));
+ trait_ref.clone().map(WhereClause::Implemented).map(crate::wrap_empty_binders)
+ }
+ TypeBound::Lifetime(l) => {
+ let lifetime = self.lower_lifetime(l);
+ Some(crate::wrap_empty_binders(WhereClause::TypeOutlives(TypeOutlives {
+ ty: self_ty,
+ lifetime,
+ })))
}
- TypeBound::Lifetime(_) => None,
TypeBound::Error => None,
};
- trait_ref.into_iter().chain(
- bindings
+ clause.into_iter().chain(
+ trait_ref
.into_iter()
.filter(move |_| !ignore_bindings)
.flat_map(move |tr| self.assoc_type_bindings_from_type_bound(bound, tr)),
@@ -1203,8 +1203,8 @@ impl<'a> TyLoweringContext<'a> {
});
if let Some(target_param_idx) = target_param_idx {
let mut counter = 0;
- for (idx, data) in self.generics().params.type_or_consts.iter()
- {
+ let generics = self.generics().expect("generics in scope");
+ for (idx, data) in generics.params.type_or_consts.iter() {
// Count the number of `impl Trait` things that appear before
// the target of our `bound`.
// Our counter within `impl_trait_mode` should be that number
@@ -1264,10 +1264,19 @@ impl<'a> TyLoweringContext<'a> {
// bounds in the input.
// INVARIANT: If this function returns `DynTy`, there should be at least one trait bound.
// These invariants are utilized by `TyExt::dyn_trait()` and chalk.
+ let mut lifetime = None;
let bounds = self.with_shifted_in(DebruijnIndex::ONE, |ctx| {
let mut bounds: Vec<_> = bounds
.iter()
.flat_map(|b| ctx.lower_type_bound(b, self_ty.clone(), false))
+ .filter(|b| match b.skip_binders() {
+ WhereClause::Implemented(_) | WhereClause::AliasEq(_) => true,
+ WhereClause::LifetimeOutlives(_) => false,
+ WhereClause::TypeOutlives(t) => {
+ lifetime = Some(t.lifetime.clone());
+ false
+ }
+ })
.collect();
let mut multiple_regular_traits = false;
@@ -1305,7 +1314,7 @@ impl<'a> TyLoweringContext<'a> {
_ => unreachable!(),
}
}
- // We don't produce `WhereClause::{TypeOutlives, LifetimeOutlives}` yet.
+ // `WhereClause::{TypeOutlives, LifetimeOutlives}` have been filtered out
_ => unreachable!(),
}
});
@@ -1325,7 +1334,21 @@ impl<'a> TyLoweringContext<'a> {
if let Some(bounds) = bounds {
let bounds = crate::make_single_type_binders(bounds);
- TyKind::Dyn(DynTy { bounds, lifetime: static_lifetime() }).intern(Interner)
+ TyKind::Dyn(DynTy {
+ bounds,
+ lifetime: match lifetime {
+ Some(it) => match it.bound_var(Interner) {
+ Some(bound_var) => LifetimeData::BoundVar(BoundVar::new(
+ DebruijnIndex::INNERMOST,
+ bound_var.index,
+ ))
+ .intern(Interner),
+ None => it,
+ },
+ None => static_lifetime(),
+ },
+ })
+ .intern(Interner)
} else {
// FIXME: report error
// (additional non-auto traits, associated type rebound, or no resolved trait)
@@ -1355,8 +1378,8 @@ impl<'a> TyLoweringContext<'a> {
crate::wrap_empty_binders(clause)
});
predicates.extend(sized_clause);
- predicates.shrink_to_fit();
}
+ predicates.shrink_to_fit();
predicates
});
ImplTrait { bounds: crate::make_single_type_binders(predicates) }
@@ -1371,10 +1394,7 @@ impl<'a> TyLoweringContext<'a> {
LifetimeData::Placeholder(lt_to_placeholder_idx(self.db, id))
}
ParamLoweringMode::Variable => {
- let generics = generics(
- self.db.upcast(),
- self.resolver.generic_def().expect("generics in scope"),
- );
+ let generics = self.generics().expect("generics in scope");
let idx = match generics.lifetime_idx(id) {
None => return error_lifetime(),
Some(idx) => idx,
@@ -1485,7 +1505,7 @@ fn named_associated_type_shorthand_candidates<R>(
// Handle `Self::Type` referring to own associated type in trait definitions
if let GenericDefId::TraitId(trait_id) = param_id.parent() {
let trait_generics = generics(db.upcast(), trait_id.into());
- if trait_generics.params.type_or_consts[param_id.local_id()].is_trait_self() {
+ if trait_generics.params[param_id.local_id()].is_trait_self() {
let def_generics = generics(db.upcast(), def);
let starting_idx = match def {
GenericDefId::TraitId(_) => 0,
@@ -1604,10 +1624,14 @@ pub(crate) fn generic_predicates_for_param_query(
let subst = generics.bound_vars_subst(db, DebruijnIndex::INNERMOST);
let explicitly_unsized_tys = ctx.unsized_types.into_inner();
- let implicitly_sized_predicates =
+ if let Some(implicitly_sized_predicates) =
implicitly_sized_clauses(db, param_id.parent, &explicitly_unsized_tys, &subst, &resolver)
- .map(|p| make_binders(db, &generics, crate::wrap_empty_binders(p)));
- predicates.extend(implicitly_sized_predicates);
+ {
+ predicates.extend(
+ implicitly_sized_predicates
+ .map(|p| make_binders(db, &generics, crate::wrap_empty_binders(p))),
+ );
+ }
predicates.into()
}
@@ -1657,18 +1681,7 @@ pub(crate) fn trait_environment_query(
}
}
- let container: Option<ItemContainerId> = match def {
- // FIXME: is there a function for this?
- GenericDefId::FunctionId(f) => Some(f.lookup(db.upcast()).container),
- GenericDefId::AdtId(_) => None,
- GenericDefId::TraitId(_) => None,
- GenericDefId::TraitAliasId(_) => None,
- GenericDefId::TypeAliasId(t) => Some(t.lookup(db.upcast()).container),
- GenericDefId::ImplId(_) => None,
- GenericDefId::EnumVariantId(_) => None,
- GenericDefId::ConstId(c) => Some(c.lookup(db.upcast()).container),
- };
- if let Some(ItemContainerId::TraitId(trait_id)) = container {
+ if let Some(trait_id) = def.assoc_trait_container(db.upcast()) {
// add `Self: Trait<T1, T2, ...>` to the environment in trait
// function default implementations (and speculative code
// inside consts or type aliases)
@@ -1676,24 +1689,23 @@ pub(crate) fn trait_environment_query(
let substs = TyBuilder::placeholder_subst(db, trait_id);
let trait_ref = TraitRef { trait_id: to_chalk_trait_id(trait_id), substitution: substs };
let pred = WhereClause::Implemented(trait_ref);
- let program_clause: chalk_ir::ProgramClause<Interner> = pred.cast(Interner);
- clauses.push(program_clause.into_from_env_clause(Interner));
+ clauses.push(pred.cast::<ProgramClause>(Interner).into_from_env_clause(Interner));
}
let subst = generics(db.upcast(), def).placeholder_subst(db);
let explicitly_unsized_tys = ctx.unsized_types.into_inner();
- let implicitly_sized_clauses =
- implicitly_sized_clauses(db, def, &explicitly_unsized_tys, &subst, &resolver).map(|pred| {
- let program_clause: chalk_ir::ProgramClause<Interner> = pred.cast(Interner);
- program_clause.into_from_env_clause(Interner)
- });
- clauses.extend(implicitly_sized_clauses);
-
- let krate = def.module(db.upcast()).krate();
+ if let Some(implicitly_sized_clauses) =
+ implicitly_sized_clauses(db, def, &explicitly_unsized_tys, &subst, &resolver)
+ {
+ clauses.extend(
+ implicitly_sized_clauses
+ .map(|pred| pred.cast::<ProgramClause>(Interner).into_from_env_clause(Interner)),
+ );
+ }
let env = chalk_ir::Environment::new(Interner).add_clauses(Interner, clauses);
- TraitEnvironment::new(krate, None, traits_in_scope.into_boxed_slice(), env)
+ TraitEnvironment::new(resolver.krate(), None, traits_in_scope.into_boxed_slice(), env)
}
/// Resolve the where clause(s) of an item with generics.
@@ -1721,10 +1733,14 @@ pub(crate) fn generic_predicates_query(
let subst = generics.bound_vars_subst(db, DebruijnIndex::INNERMOST);
let explicitly_unsized_tys = ctx.unsized_types.into_inner();
- let implicitly_sized_predicates =
+ if let Some(implicitly_sized_predicates) =
implicitly_sized_clauses(db, def, &explicitly_unsized_tys, &subst, &resolver)
- .map(|p| make_binders(db, &generics, crate::wrap_empty_binders(p)));
- predicates.extend(implicitly_sized_predicates);
+ {
+ predicates.extend(
+ implicitly_sized_predicates
+ .map(|p| make_binders(db, &generics, crate::wrap_empty_binders(p))),
+ );
+ }
predicates.into()
}
@@ -1736,24 +1752,24 @@ fn implicitly_sized_clauses<'a>(
explicitly_unsized_tys: &'a FxHashSet<Ty>,
substitution: &'a Substitution,
resolver: &Resolver,
-) -> impl Iterator<Item = WhereClause> + 'a {
+) -> Option<impl Iterator<Item = WhereClause> + 'a> {
let is_trait_def = matches!(def, GenericDefId::TraitId(..));
let generic_args = &substitution.as_slice(Interner)[is_trait_def as usize..];
let sized_trait = db
.lang_item(resolver.krate(), LangItem::Sized)
.and_then(|lang_item| lang_item.as_trait().map(to_chalk_trait_id));
- sized_trait.into_iter().flat_map(move |sized_trait| {
- let implicitly_sized_tys = generic_args
+ sized_trait.map(move |sized_trait| {
+ generic_args
.iter()
.filter_map(|generic_arg| generic_arg.ty(Interner))
- .filter(move |&self_ty| !explicitly_unsized_tys.contains(self_ty));
- implicitly_sized_tys.map(move |self_ty| {
- WhereClause::Implemented(TraitRef {
- trait_id: sized_trait,
- substitution: Substitution::from1(Interner, self_ty.clone()),
+ .filter(move |&self_ty| !explicitly_unsized_tys.contains(self_ty))
+ .map(move |self_ty| {
+ WhereClause::Implemented(TraitRef {
+ trait_id: sized_trait,
+ substitution: Substitution::from1(Interner, self_ty.clone()),
+ })
})
- })
})
}
@@ -1796,8 +1812,7 @@ pub(crate) fn generic_defaults_query(
make_binders(db, &generic_params, val)
}
GenericParamDataRef::LifetimeParamData(_) => {
- // using static because it requires defaults
- make_binders(db, &generic_params, static_lifetime().cast(Interner))
+ make_binders(db, &generic_params, error_lifetime().cast(Interner))
}
}
}));
@@ -1817,7 +1832,7 @@ pub(crate) fn generic_defaults_recover(
let val = match id {
GenericParamId::TypeParamId(_) => TyKind::Error.intern(Interner).cast(Interner),
GenericParamId::ConstParamId(id) => unknown_const_as_generic(db.const_param_ty(id)),
- GenericParamId::LifetimeParamId(_) => static_lifetime().cast(Interner),
+ GenericParamId::LifetimeParamId(_) => error_lifetime().cast(Interner),
};
crate::make_binders(db, &generic_params, val)
}));
@@ -2232,7 +2247,7 @@ pub(crate) fn const_or_path_to_chalk(
expected_ty: Ty,
value: &ConstRef,
mode: ParamLoweringMode,
- args: impl FnOnce() -> Generics,
+ args: impl FnOnce() -> Option<Generics>,
debruijn: DebruijnIndex,
) -> Const {
match value {
@@ -2251,7 +2266,7 @@ pub(crate) fn const_or_path_to_chalk(
.unwrap_or_else(|| unknown_const(expected_ty))
}
&ConstRef::Complex(it) => {
- let crate_data = &db.crate_graph()[owner.module(db.upcast()).krate()];
+ let crate_data = &db.crate_graph()[resolver.krate()];
if crate_data.env.get("__ra_is_test_fixture").is_none() && crate_data.origin.is_local()
{
// FIXME: current `InTypeConstId` is very unstable, so we only use it in non local crate
diff --git a/crates/hir-ty/src/method_resolution.rs b/crates/hir-ty/src/method_resolution.rs
index 73b07df56f..cd72349471 100644
--- a/crates/hir-ty/src/method_resolution.rs
+++ b/crates/hir-ty/src/method_resolution.rs
@@ -22,10 +22,10 @@ use triomphe::Arc;
use crate::{
autoderef::{self, AutoderefKind},
db::HirDatabase,
- from_chalk_trait_id, from_foreign_def_id,
+ error_lifetime, from_chalk_trait_id, from_foreign_def_id,
infer::{unify::InferenceTable, Adjust, Adjustment, OverloadedDeref, PointerCast},
primitive::{FloatTy, IntTy, UintTy},
- static_lifetime, to_chalk_trait_id,
+ to_chalk_trait_id,
utils::all_super_traits,
AdtId, Canonical, CanonicalVarKinds, DebruijnIndex, DynTyExt, ForeignDefId, Goal, Guidance,
InEnvironment, Interner, Scalar, Solution, Substitution, TraitEnvironment, TraitRef,
@@ -1035,7 +1035,7 @@ fn iterate_method_candidates_with_autoref(
iterate_method_candidates_by_receiver(receiver_ty.clone(), maybe_reborrowed)?;
let refed = Canonical {
- value: TyKind::Ref(Mutability::Not, static_lifetime(), receiver_ty.value.clone())
+ value: TyKind::Ref(Mutability::Not, error_lifetime(), receiver_ty.value.clone())
.intern(Interner),
binders: receiver_ty.binders.clone(),
};
@@ -1043,7 +1043,7 @@ fn iterate_method_candidates_with_autoref(
iterate_method_candidates_by_receiver(refed, first_adjustment.with_autoref(Mutability::Not))?;
let ref_muted = Canonical {
- value: TyKind::Ref(Mutability::Mut, static_lifetime(), receiver_ty.value.clone())
+ value: TyKind::Ref(Mutability::Mut, error_lifetime(), receiver_ty.value.clone())
.intern(Interner),
binders: receiver_ty.binders,
};
@@ -1369,6 +1369,7 @@ pub(crate) fn resolve_indexing_op(
None
}
+// FIXME: Replace this with a `Try` impl once stable
macro_rules! check_that {
($cond:expr) => {
if !$cond {
@@ -1377,6 +1378,7 @@ macro_rules! check_that {
};
}
+#[derive(Debug)]
enum IsValidCandidate {
Yes,
No,
diff --git a/crates/hir-ty/src/mir/eval/shim.rs b/crates/hir-ty/src/mir/eval/shim.rs
index d4d669182f..fee3dd3ada 100644
--- a/crates/hir-ty/src/mir/eval/shim.rs
+++ b/crates/hir-ty/src/mir/eval/shim.rs
@@ -9,11 +9,14 @@ use hir_def::{
resolver::HasResolver,
};
-use crate::mir::eval::{
- name, pad16, static_lifetime, Address, AdtId, Arc, BuiltinType, Evaluator, FunctionId,
- HasModule, HirDisplay, Interned, InternedClosure, Interner, Interval, IntervalAndTy,
- IntervalOrOwned, ItemContainerId, LangItem, Layout, Locals, Lookup, MirEvalError, MirSpan,
- Mutability, Result, Substitution, Ty, TyBuilder, TyExt,
+use crate::{
+ error_lifetime,
+ mir::eval::{
+ name, pad16, Address, AdtId, Arc, BuiltinType, Evaluator, FunctionId, HasModule,
+ HirDisplay, Interned, InternedClosure, Interner, Interval, IntervalAndTy, IntervalOrOwned,
+ ItemContainerId, LangItem, Layout, Locals, Lookup, MirEvalError, MirSpan, Mutability,
+ Result, Substitution, Ty, TyBuilder, TyExt,
+ },
};
mod simd;
@@ -247,7 +250,7 @@ impl Evaluator<'_> {
let tmp = self.heap_allocate(self.ptr_size(), self.ptr_size())?;
let arg = IntervalAndTy {
interval: Interval { addr: tmp, size: self.ptr_size() },
- ty: TyKind::Ref(Mutability::Not, static_lifetime(), ty.clone()).intern(Interner),
+ ty: TyKind::Ref(Mutability::Not, error_lifetime(), ty.clone()).intern(Interner),
};
let offset = layout.fields.offset(i).bytes_usize();
self.write_memory(tmp, &addr.offset(offset).to_bytes())?;
diff --git a/crates/hir-ty/src/mir/lower.rs b/crates/hir-ty/src/mir/lower.rs
index 7e582c03ef..151f65cfbb 100644
--- a/crates/hir-ty/src/mir/lower.rs
+++ b/crates/hir-ty/src/mir/lower.rs
@@ -27,6 +27,7 @@ use crate::{
consteval::ConstEvalError,
db::{HirDatabase, InternedClosure},
display::HirDisplay,
+ error_lifetime,
infer::{CaptureKind, CapturedItem, TypeMismatch},
inhabitedness::is_ty_uninhabited_from,
layout::LayoutError,
@@ -90,7 +91,7 @@ pub enum MirLowerError {
UnresolvedField,
UnsizedTemporary(Ty),
MissingFunctionDefinition(DefWithBodyId, ExprId),
- TypeMismatch(TypeMismatch),
+ TypeMismatch(Option<TypeMismatch>),
/// This should never happen. Type mismatch should catch everything.
TypeError(&'static str),
NotSupported(String),
@@ -170,14 +171,15 @@ impl MirLowerError {
body.pretty_print_expr(db.upcast(), *owner, *it)
)?;
}
- MirLowerError::TypeMismatch(e) => {
- writeln!(
+ MirLowerError::TypeMismatch(e) => match e {
+ Some(e) => writeln!(
f,
"Type mismatch: Expected {}, found {}",
e.expected.display(db),
e.actual.display(db),
- )?;
- }
+ )?,
+ None => writeln!(f, "Type mismatch: types mismatch with {{unknown}}",)?,
+ },
MirLowerError::GenericArgNotProvided(id, subst) => {
let parent = id.parent;
let param = &db.generic_params(parent).type_or_consts[id.local_id];
@@ -493,9 +495,11 @@ impl<'ctx> MirLowerCtx<'ctx> {
ty,
value: chalk_ir::ConstValue::BoundVar(BoundVar::new(
DebruijnIndex::INNERMOST,
- gen.param_idx(p.into()).ok_or(MirLowerError::TypeError(
- "fail to lower const generic param",
- ))?,
+ gen.type_or_const_param_idx(p.into()).ok_or(
+ MirLowerError::TypeError(
+ "fail to lower const generic param",
+ ),
+ )?,
)),
}
.intern(Interner),
@@ -1702,7 +1706,7 @@ impl<'ctx> MirLowerCtx<'ctx> {
}
fn is_uninhabited(&self, expr_id: ExprId) -> bool {
- is_ty_uninhabited_from(&self.infer[expr_id], self.owner.module(self.db.upcast()), self.db)
+ is_ty_uninhabited_from(self.db, &self.infer[expr_id], self.owner.module(self.db.upcast()))
}
/// This function push `StorageLive` statement for the binding, and applies changes to add `StorageDead` and
@@ -2032,10 +2036,12 @@ pub fn mir_body_for_closure_query(
let closure_local = ctx.result.locals.alloc(Local {
ty: match kind {
FnTrait::FnOnce => infer[expr].clone(),
- FnTrait::FnMut => TyKind::Ref(Mutability::Mut, static_lifetime(), infer[expr].clone())
- .intern(Interner),
- FnTrait::Fn => TyKind::Ref(Mutability::Not, static_lifetime(), infer[expr].clone())
- .intern(Interner),
+ FnTrait::FnMut => {
+ TyKind::Ref(Mutability::Mut, error_lifetime(), infer[expr].clone()).intern(Interner)
+ }
+ FnTrait::Fn => {
+ TyKind::Ref(Mutability::Not, error_lifetime(), infer[expr].clone()).intern(Interner)
+ }
},
});
ctx.result.param_locals.push(closure_local);
@@ -2152,8 +2158,10 @@ pub fn lower_to_mir(
// need to take this input explicitly.
root_expr: ExprId,
) -> Result<MirBody> {
- if let Some((_, it)) = infer.type_mismatches().next() {
- return Err(MirLowerError::TypeMismatch(it.clone()));
+ if infer.has_errors {
+ return Err(MirLowerError::TypeMismatch(
+ infer.type_mismatches().next().map(|(_, it)| it.clone()),
+ ));
}
let mut ctx = MirLowerCtx::new(db, owner, body, infer);
// 0 is return local
diff --git a/crates/hir-ty/src/mir/lower/as_place.rs b/crates/hir-ty/src/mir/lower/as_place.rs
index be81915bb4..4ad00909e4 100644
--- a/crates/hir-ty/src/mir/lower/as_place.rs
+++ b/crates/hir-ty/src/mir/lower/as_place.rs
@@ -290,7 +290,7 @@ impl MirLowerCtx<'_> {
Some((_, _, mutability)) => mutability,
None => Mutability::Not,
};
- let result_ref = TyKind::Ref(mutability, static_lifetime(), result_ty).intern(Interner);
+ let result_ref = TyKind::Ref(mutability, error_lifetime(), result_ty).intern(Interner);
let mut result: Place = self.temp(result_ref, current, span)?.into();
let index_fn_op = Operand::const_zst(
TyKind::FnDef(
@@ -333,8 +333,8 @@ impl MirLowerCtx<'_> {
BorrowKind::Mut { kind: MutBorrowKind::Default },
)
};
- let ty_ref = TyKind::Ref(chalk_mut, static_lifetime(), source_ty.clone()).intern(Interner);
- let target_ty_ref = TyKind::Ref(chalk_mut, static_lifetime(), target_ty).intern(Interner);
+ let ty_ref = TyKind::Ref(chalk_mut, error_lifetime(), source_ty.clone()).intern(Interner);
+ let target_ty_ref = TyKind::Ref(chalk_mut, error_lifetime(), target_ty).intern(Interner);
let ref_place: Place = self.temp(ty_ref, current, span)?.into();
self.push_assignment(current, ref_place, Rvalue::Ref(borrow_kind, place), span);
let deref_trait = self
diff --git a/crates/hir-ty/src/mir/monomorphization.rs b/crates/hir-ty/src/mir/monomorphization.rs
index d6557c3a81..a384c9306e 100644
--- a/crates/hir-ty/src/mir/monomorphization.rs
+++ b/crates/hir-ty/src/mir/monomorphization.rs
@@ -101,7 +101,7 @@ impl FallibleTypeFolder<Interner> for Filler<'_> {
_outer_binder: DebruijnIndex,
) -> std::result::Result<chalk_ir::Const<Interner>, Self::Error> {
let it = from_placeholder_idx(self.db, idx);
- let Some(idx) = self.generics.as_ref().and_then(|g| g.param_idx(it)) else {
+ let Some(idx) = self.generics.as_ref().and_then(|g| g.type_or_const_param_idx(it)) else {
not_supported!("missing idx in generics");
};
Ok(self
@@ -119,7 +119,7 @@ impl FallibleTypeFolder<Interner> for Filler<'_> {
_outer_binder: DebruijnIndex,
) -> std::result::Result<Ty, Self::Error> {
let it = from_placeholder_idx(self.db, idx);
- let Some(idx) = self.generics.as_ref().and_then(|g| g.param_idx(it)) else {
+ let Some(idx) = self.generics.as_ref().and_then(|g| g.type_or_const_param_idx(it)) else {
not_supported!("missing idx in generics");
};
Ok(self
diff --git a/crates/hir-ty/src/tests/diagnostics.rs b/crates/hir-ty/src/tests/diagnostics.rs
index 80f92eaf43..119de7f050 100644
--- a/crates/hir-ty/src/tests/diagnostics.rs
+++ b/crates/hir-ty/src/tests/diagnostics.rs
@@ -136,3 +136,20 @@ impl Trait for () {
"#,
);
}
+
+#[test]
+fn no_mismatches_with_unresolved_projections() {
+ check_no_mismatches(
+ r#"
+// `Thing` is `{unknown}`
+fn create() -> Option<(i32, Thing)> {
+ Some((69420, Thing))
+}
+
+fn consume() -> Option<()> {
+ let (number, thing) = create()?;
+ Some(())
+}
+"#,
+ );
+}
diff --git a/crates/hir-ty/src/tests/display_source_code.rs b/crates/hir-ty/src/tests/display_source_code.rs
index 5069267499..e8369caa77 100644
--- a/crates/hir-ty/src/tests/display_source_code.rs
+++ b/crates/hir-ty/src/tests/display_source_code.rs
@@ -85,7 +85,7 @@ fn render_dyn_for_ty() {
trait Foo<'a> {}
fn foo(foo: &dyn for<'a> Foo<'a>) {}
- // ^^^ &dyn Foo<'static>
+ // ^^^ &dyn Foo<'_>
"#,
);
}
diff --git a/crates/hir-ty/src/tests/patterns.rs b/crates/hir-ty/src/tests/patterns.rs
index 80d5a0ae00..4355881d72 100644
--- a/crates/hir-ty/src/tests/patterns.rs
+++ b/crates/hir-ty/src/tests/patterns.rs
@@ -1109,7 +1109,7 @@ fn var_args() {
#[lang = "va_list"]
pub struct VaListImpl<'f>;
fn my_fn(foo: ...) {}
- //^^^ VaListImpl<'static>
+ //^^^ VaListImpl<'{error}>
"#,
);
}
diff --git a/crates/hir-ty/src/tests/regression.rs b/crates/hir-ty/src/tests/regression.rs
index 8565b60210..c2d2047e6f 100644
--- a/crates/hir-ty/src/tests/regression.rs
+++ b/crates/hir-ty/src/tests/regression.rs
@@ -896,13 +896,13 @@ fn flush(&self) {
"#,
expect![[r#"
123..127 'self': &Mutex<T>
- 150..152 '{}': MutexGuard<'static, T>
+ 150..152 '{}': MutexGuard<'{error}, T>
234..238 'self': &{unknown}
240..290 '{ ...()); }': ()
250..251 'w': &Mutex<BufWriter>
276..287 '*(w.lock())': BufWriter
278..279 'w': &Mutex<BufWriter>
- 278..286 'w.lock()': MutexGuard<'static, BufWriter>
+ 278..286 'w.lock()': MutexGuard<'{error}, BufWriter>
"#]],
);
}
diff --git a/crates/hir-ty/src/tests/simple.rs b/crates/hir-ty/src/tests/simple.rs
index f39404593e..a9d28ebfef 100644
--- a/crates/hir-ty/src/tests/simple.rs
+++ b/crates/hir-ty/src/tests/simple.rs
@@ -3092,7 +3092,7 @@ fn main() {
389..394 'boxed': Box<Foo<i32>>
389..406 'boxed....nner()': &i32
416..421 'good1': &i32
- 424..438 'Foo::get_inner': fn get_inner<i32, 'static>(&Box<Foo<i32>>) -> &i32
+ 424..438 'Foo::get_inner': fn get_inner<i32, '{error}>(&Box<Foo<i32>>) -> &i32
424..446 'Foo::g...boxed)': &i32
439..445 '&boxed': &Box<Foo<i32>>
440..445 'boxed': Box<Foo<i32>>
@@ -3100,7 +3100,7 @@ fn main() {
464..469 'boxed': Box<Foo<i32>>
464..480 'boxed....self()': &Foo<i32>
490..495 'good2': &Foo<i32>
- 498..511 'Foo::get_self': fn get_self<i32, 'static>(&Box<Foo<i32>>) -> &Foo<i32>
+ 498..511 'Foo::get_self': fn get_self<i32, '{error}>(&Box<Foo<i32>>) -> &Foo<i32>
498..519 'Foo::g...boxed)': &Foo<i32>
512..518 '&boxed': &Box<Foo<i32>>
513..518 'boxed': Box<Foo<i32>>
@@ -3659,7 +3659,7 @@ fn main() {
let are = "are";
let count = 10;
builtin#format_args("hello {count:02} {} friends, we {are:?} {0}{last}", "fancy", last = "!");
- // ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ type: Arguments<'static>
+ // ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ type: Arguments<'{error}>
}
"#,
);
diff --git a/crates/hir-ty/src/tests/traits.rs b/crates/hir-ty/src/tests/traits.rs
index 759af18c98..dfcd322a39 100644
--- a/crates/hir-ty/src/tests/traits.rs
+++ b/crates/hir-ty/src/tests/traits.rs
@@ -1602,7 +1602,7 @@ fn weird_bounds() {
r#"
//- minicore: sized
trait Trait {}
-fn test(
+fn test<'lifetime>(
a: impl Trait + 'lifetime,
b: impl 'lifetime,
c: impl (Trait),
@@ -1612,13 +1612,13 @@ fn test(
) {}
"#,
expect![[r#"
- 28..29 'a': impl Trait
- 59..60 'b': impl Sized
- 82..83 'c': impl Trait
- 103..104 'd': impl Sized
- 128..129 'e': impl ?Sized
- 148..149 'f': impl Trait + ?Sized
- 173..175 '{}': ()
+ 39..40 'a': impl Trait + 'lifetime
+ 70..71 'b': impl 'lifetime
+ 93..94 'c': impl Trait
+ 114..115 'd': impl 'lifetime
+ 139..140 'e': impl ?Sized
+ 159..160 'f': impl Trait + ?Sized
+ 184..186 '{}': ()
"#]],
);
}
diff --git a/crates/hir-ty/src/utils.rs b/crates/hir-ty/src/utils.rs
index afd4d1f271..42c7a84032 100644
--- a/crates/hir-ty/src/utils.rs
+++ b/crates/hir-ty/src/utils.rs
@@ -262,7 +262,7 @@ impl<'a> ClosureSubst<'a> {
}
}
-#[derive(Debug)]
+#[derive(Clone, Debug)]
pub(crate) struct Generics {
def: GenericDefId,
pub(crate) params: Interned<GenericParams>,
@@ -274,6 +274,10 @@ impl Generics {
self.iter().map(|(id, _)| id)
}
+ pub(crate) fn def(&self) -> GenericDefId {
+ self.def
+ }
+
/// Iterator over types and const params of self, then parent.
pub(crate) fn iter<'a>(
&'a self,
@@ -304,7 +308,11 @@ impl Generics {
};
let lt_iter = self.params.iter_lt().map(from_lt_id(self));
- self.params.iter().map(from_toc_id(self)).chain(lt_iter).chain(self.iter_parent())
+ self.params
+ .iter_type_or_consts()
+ .map(from_toc_id(self))
+ .chain(lt_iter)
+ .chain(self.iter_parent())
}
/// Iterate over types and const params without parent params.
@@ -336,16 +344,19 @@ impl Generics {
}
};
- self.params.iter().map(from_toc_id(self)).chain(self.params.iter_lt().map(from_lt_id(self)))
+ self.params
+ .iter_type_or_consts()
+ .map(from_toc_id(self))
+ .chain(self.params.iter_lt().map(from_lt_id(self)))
}
/// Iterator over types and const params of parent.
- #[allow(clippy::needless_lifetimes)]
- pub(crate) fn iter_parent<'a>(
- &'a self,
- ) -> impl DoubleEndedIterator<Item = (GenericParamId, GenericParamDataRef<'a>)> + 'a {
+ pub(crate) fn iter_parent(
+ &self,
+ ) -> impl DoubleEndedIterator<Item = (GenericParamId, GenericParamDataRef<'_>)> + '_ {
self.parent_generics().into_iter().flat_map(|it| {
- let from_toc_id = move |(local_id, p): (_, &'a TypeOrConstParamData)| {
+ let from_toc_id = move |(local_id, p)| {
+ let p: &_ = p;
let id = TypeOrConstParamId { parent: it.def, local_id };
match p {
TypeOrConstParamData::TypeParamData(p) => (
@@ -359,14 +370,14 @@ impl Generics {
}
};
- let from_lt_id = move |(local_id, p): (_, &'a LifetimeParamData)| {
+ let from_lt_id = move |(local_id, p): (_, _)| {
(
GenericParamId::LifetimeParamId(LifetimeParamId { parent: it.def, local_id }),
GenericParamDataRef::LifetimeParamData(p),
)
};
let lt_iter = it.params.iter_lt().map(from_lt_id);
- it.params.iter().map(from_toc_id).chain(lt_iter)
+ it.params.iter_type_or_consts().map(from_toc_id).chain(lt_iter)
})
}
@@ -383,7 +394,7 @@ impl Generics {
}
/// Returns number of generic parameter excluding those from parent
- fn len_params(&self) -> usize {
+ fn len_type_and_const_params(&self) -> usize {
self.params.type_or_consts.len()
}
@@ -394,7 +405,7 @@ impl Generics {
let mut impl_trait_params = 0;
let mut const_params = 0;
let mut lifetime_params = 0;
- self.params.iter().for_each(|(_, data)| match data {
+ self.params.iter_type_or_consts().for_each(|(_, data)| match data {
TypeOrConstParamData::TypeParamData(p) => match p.provenance {
TypeParamProvenance::TypeParamList => type_params += 1,
TypeParamProvenance::TraitSelf => self_params += 1,
@@ -409,18 +420,23 @@ impl Generics {
(parent_len, self_params, type_params, const_params, impl_trait_params, lifetime_params)
}
- pub(crate) fn param_idx(&self, param: TypeOrConstParamId) -> Option<usize> {
- Some(self.find_param(param)?.0)
+ pub(crate) fn type_or_const_param_idx(&self, param: TypeOrConstParamId) -> Option<usize> {
+ Some(self.find_type_or_const_param(param)?.0)
}
- fn find_param(&self, param: TypeOrConstParamId) -> Option<(usize, &TypeOrConstParamData)> {
+ fn find_type_or_const_param(
+ &self,
+ param: TypeOrConstParamId,
+ ) -> Option<(usize, &TypeOrConstParamData)> {
if param.parent == self.def {
- let (idx, (_local_id, data)) =
- self.params.iter().enumerate().find(|(_, (idx, _))| *idx == param.local_id)?;
- Some((idx, data))
+ let idx = param.local_id.into_raw().into_u32() as usize;
+ if idx >= self.params.type_or_consts.len() {
+ return None;
+ }
+ Some((idx, &self.params.type_or_consts[param.local_id]))
} else {
self.parent_generics()
- .and_then(|g| g.find_param(param))
+ .and_then(|g| g.find_type_or_const_param(param))
// Remember that parent parameters come after parameters for self.
.map(|(idx, data)| (self.len_self() + idx, data))
}
@@ -432,13 +448,14 @@ impl Generics {
fn find_lifetime(&self, lifetime: LifetimeParamId) -> Option<(usize, &LifetimeParamData)> {
if lifetime.parent == self.def {
- let (idx, (_local_id, data)) = self
- .params
- .iter_lt()
- .enumerate()
- .find(|(_, (idx, _))| *idx == lifetime.local_id)?;
-
- Some((self.len_params() + idx, data))
+ let idx = lifetime.local_id.into_raw().into_u32() as usize;
+ if idx >= self.params.lifetimes.len() {
+ return None;
+ }
+ Some((
+ self.len_type_and_const_params() + idx,
+ &self.params.lifetimes[lifetime.local_id],
+ ))
} else {
self.parent_generics()
.and_then(|g| g.find_lifetime(lifetime))
@@ -450,6 +467,10 @@ impl Generics {
self.parent_generics.as_deref()
}
+ pub(crate) fn parent_or_self(&self) -> &Generics {
+ self.parent_generics.as_deref().unwrap_or(self)
+ }
+
/// Returns a Substitution that replaces each parameter by a bound variable.
pub(crate) fn bound_vars_subst(
&self,
diff --git a/crates/hir/Cargo.toml b/crates/hir/Cargo.toml
index 190722075a..6d7ecd1e50 100644
--- a/crates/hir/Cargo.toml
+++ b/crates/hir/Cargo.toml
@@ -33,7 +33,7 @@ tt.workspace = true
span.workspace = true
[features]
-in-rust-tree = []
+in-rust-tree = ["hir-expand/in-rust-tree"]
[lints]
workspace = true
diff --git a/crates/hir/src/db.rs b/crates/hir/src/db.rs
index 1d74f9a4bb..cb5f5b06ae 100644
--- a/crates/hir/src/db.rs
+++ b/crates/hir/src/db.rs
@@ -4,24 +4,35 @@
//!
//! But we need this for at least LRU caching at the query level.
pub use hir_def::db::{
- AttrsQuery, BlockDefMapQuery, BodyQuery, BodyWithSourceMapQuery, ConstDataQuery,
- ConstVisibilityQuery, CrateLangItemsQuery, CrateSupportsNoStdQuery, DefDatabase,
- DefDatabaseStorage, EnumDataQuery, EnumVariantDataWithDiagnosticsQuery, ExprScopesQuery,
- ExternCrateDeclDataQuery, FieldVisibilitiesQuery, FieldsAttrsQuery, FieldsAttrsSourceMapQuery,
- FileItemTreeQuery, FunctionDataQuery, FunctionVisibilityQuery, GenericParamsQuery,
- ImplDataWithDiagnosticsQuery, ImportMapQuery, InternAnonymousConstQuery, InternBlockQuery,
- InternConstQuery, InternDatabase, InternDatabaseStorage, InternEnumQuery,
- InternExternBlockQuery, InternExternCrateQuery, InternFunctionQuery, InternImplQuery,
- InternInTypeConstQuery, InternMacro2Query, InternMacroRulesQuery, InternProcMacroQuery,
- InternStaticQuery, InternStructQuery, InternTraitAliasQuery, InternTraitQuery,
- InternTypeAliasQuery, InternUnionQuery, InternUseQuery, LangItemQuery, Macro2DataQuery,
- MacroRulesDataQuery, ProcMacroDataQuery, StaticDataQuery, StructDataWithDiagnosticsQuery,
- TraitAliasDataQuery, TraitDataWithDiagnosticsQuery, TypeAliasDataQuery,
- UnionDataWithDiagnosticsQuery,
+ AttrsQuery, BlockDefMapQuery, BlockItemTreeQuery, BodyQuery, BodyWithSourceMapQuery,
+ ConstDataQuery, ConstVisibilityQuery, CrateDefMapQuery, CrateLangItemsQuery,
+ CrateNotableTraitsQuery, CrateSupportsNoStdQuery, DefDatabase, DefDatabaseStorage,
+ EnumDataQuery, EnumVariantDataWithDiagnosticsQuery, ExprScopesQuery, ExternCrateDeclDataQuery,
+ FieldVisibilitiesQuery, FieldsAttrsQuery, FieldsAttrsSourceMapQuery, FileItemTreeQuery,
+ FunctionDataQuery, FunctionVisibilityQuery, GenericParamsQuery, ImplDataWithDiagnosticsQuery,
+ ImportMapQuery, InternAnonymousConstQuery, InternBlockQuery, InternConstQuery, InternDatabase,
+ InternDatabaseStorage, InternEnumQuery, InternExternBlockQuery, InternExternCrateQuery,
+ InternFunctionQuery, InternImplQuery, InternInTypeConstQuery, InternMacro2Query,
+ InternMacroRulesQuery, InternProcMacroQuery, InternStaticQuery, InternStructQuery,
+ InternTraitAliasQuery, InternTraitQuery, InternTypeAliasQuery, InternUnionQuery,
+ InternUseQuery, LangItemQuery, Macro2DataQuery, MacroRulesDataQuery, ProcMacroDataQuery,
+ StaticDataQuery, StructDataWithDiagnosticsQuery, TraitAliasDataQuery,
+ TraitDataWithDiagnosticsQuery, TypeAliasDataQuery, UnionDataWithDiagnosticsQuery,
};
pub use hir_expand::db::{
AstIdMapQuery, DeclMacroExpanderQuery, ExpandDatabase, ExpandDatabaseStorage,
ExpandProcMacroQuery, InternMacroCallQuery, InternSyntaxContextQuery, MacroArgQuery,
ParseMacroExpansionErrorQuery, ParseMacroExpansionQuery, ProcMacrosQuery, RealSpanMapQuery,
};
-pub use hir_ty::db::*;
+pub use hir_ty::db::{
+ AdtDatumQuery, AdtVarianceQuery, AssociatedTyDataQuery, AssociatedTyValueQuery, BorrowckQuery,
+ CallableItemSignatureQuery, ConstEvalDiscriminantQuery, ConstEvalQuery, ConstEvalStaticQuery,
+ ConstParamTyQuery, FieldTypesQuery, FnDefDatumQuery, FnDefVarianceQuery, GenericDefaultsQuery,
+ GenericPredicatesForParamQuery, GenericPredicatesQuery, HirDatabase, HirDatabaseStorage,
+ ImplDatumQuery, ImplSelfTyQuery, ImplTraitQuery, IncoherentInherentImplCratesQuery,
+ InherentImplsInBlockQuery, InherentImplsInCrateQuery, InternCallableDefQuery,
+ InternClosureQuery, InternCoroutineQuery, InternImplTraitIdQuery, InternLifetimeParamIdQuery,
+ InternTypeOrConstParamIdQuery, LayoutOfAdtQuery, MirBodyQuery, ProgramClausesForChalkEnvQuery,
+ ReturnTypeImplTraitsQuery, TargetDataLayoutQuery, TraitDatumQuery, TraitEnvironmentQuery,
+ TraitImplsInBlockQuery, TraitImplsInCrateQuery, TraitImplsInDepsQuery, TyQuery, ValueTyQuery,
+};
diff --git a/crates/hir/src/display.rs b/crates/hir/src/display.rs
index 23c6b078b9..84f03d111f 100644
--- a/crates/hir/src/display.rs
+++ b/crates/hir/src/display.rs
@@ -1,4 +1,5 @@
//! HirDisplay implementations for various hir types.
+use either::Either;
use hir_def::{
data::adt::{StructKind, VariantData},
generics::{
@@ -13,7 +14,7 @@ use hir_ty::{
write_bounds_like_dyn_trait_with_prefix, write_visibility, HirDisplay, HirDisplayError,
HirFormatter, SizedByDefault,
},
- Interner, TraitRefExt, WhereClause,
+ AliasEq, AliasTy, Interner, ProjectionTyExt, TraitRefExt, TyKind, WhereClause,
};
use crate::{
@@ -363,16 +364,52 @@ impl HirDisplay for TypeOrConstParam {
impl HirDisplay for TypeParam {
fn hir_fmt(&self, f: &mut HirFormatter<'_>) -> Result<(), HirDisplayError> {
- write!(f, "{}", self.name(f.db).display(f.db.upcast()))?;
+ let params = f.db.generic_params(self.id.parent());
+ let param_data = &params.type_or_consts[self.id.local_id()];
+ let substs = TyBuilder::placeholder_subst(f.db, self.id.parent());
+ let krate = self.id.parent().krate(f.db).id;
+ let ty =
+ TyKind::Placeholder(hir_ty::to_placeholder_idx(f.db, self.id.into())).intern(Interner);
+ let predicates = f.db.generic_predicates(self.id.parent());
+ let predicates = predicates
+ .iter()
+ .cloned()
+ .map(|pred| pred.substitute(Interner, &substs))
+ .filter(|wc| match wc.skip_binders() {
+ WhereClause::Implemented(tr) => tr.self_type_parameter(Interner) == ty,
+ WhereClause::AliasEq(AliasEq { alias: AliasTy::Projection(proj), ty: _ }) => {
+ proj.self_type_parameter(f.db) == ty
+ }
+ WhereClause::AliasEq(_) => false,
+ WhereClause::TypeOutlives(to) => to.ty == ty,
+ WhereClause::LifetimeOutlives(_) => false,
+ })
+ .collect::<Vec<_>>();
+
+ match param_data {
+ TypeOrConstParamData::TypeParamData(p) => match p.provenance {
+ TypeParamProvenance::TypeParamList | TypeParamProvenance::TraitSelf => {
+ write!(f, "{}", p.name.clone().unwrap().display(f.db.upcast()))?
+ }
+ TypeParamProvenance::ArgumentImplTrait => {
+ return write_bounds_like_dyn_trait_with_prefix(
+ f,
+ "impl",
+ Either::Left(&ty),
+ &predicates,
+ SizedByDefault::Sized { anchor: krate },
+ );
+ }
+ },
+ TypeOrConstParamData::ConstParamData(p) => {
+ write!(f, "{}", p.name.display(f.db.upcast()))?;
+ }
+ }
+
if f.omit_verbose_types() {
return Ok(());
}
- let bounds = f.db.generic_predicates_for_param(self.id.parent(), self.id.into(), None);
- let substs = TyBuilder::placeholder_subst(f.db, self.id.parent());
- let predicates: Vec<_> =
- bounds.iter().cloned().map(|b| b.substitute(Interner, &substs)).collect();
- let krate = self.id.parent().krate(f.db).id;
let sized_trait =
f.db.lang_item(krate, LangItem::Sized).and_then(|lang_item| lang_item.as_trait());
let has_only_sized_bound = predicates.iter().all(move |pred| match pred.skip_binders() {
@@ -382,7 +419,16 @@ impl HirDisplay for TypeParam {
let has_only_not_sized_bound = predicates.is_empty();
if !has_only_sized_bound || has_only_not_sized_bound {
let default_sized = SizedByDefault::Sized { anchor: krate };
- write_bounds_like_dyn_trait_with_prefix(f, ":", &predicates, default_sized)?;
+ write_bounds_like_dyn_trait_with_prefix(
+ f,
+ ":",
+ Either::Left(
+ &hir_ty::TyKind::Placeholder(hir_ty::to_placeholder_idx(f.db, self.id.into()))
+ .intern(Interner),
+ ),
+ &predicates,
+ default_sized,
+ )?;
}
Ok(())
}
diff --git a/crates/hir/src/lib.rs b/crates/hir/src/lib.rs
index 106056c2fc..bcd94a611a 100644
--- a/crates/hir/src/lib.rs
+++ b/crates/hir/src/lib.rs
@@ -239,7 +239,7 @@ impl Crate {
db: &dyn DefDatabase,
query: import_map::Query,
) -> impl Iterator<Item = Either<ModuleDef, Macro>> {
- let _p = tracing::span!(tracing::Level::INFO, "query_external_importables");
+ let _p = tracing::span!(tracing::Level::INFO, "query_external_importables").entered();
import_map::search_dependencies(db, self.into(), &query).into_iter().map(|item| {
match ItemInNs::from(item) {
ItemInNs::Types(mod_id) | ItemInNs::Values(mod_id) => Either::Left(mod_id),
@@ -260,11 +260,11 @@ impl Crate {
doc_url.map(|s| s.trim_matches('"').trim_end_matches('/').to_owned() + "/")
}
- pub fn cfg(&self, db: &dyn HirDatabase) -> CfgOptions {
+ pub fn cfg(&self, db: &dyn HirDatabase) -> Arc<CfgOptions> {
db.crate_graph()[self.id].cfg_options.clone()
}
- pub fn potential_cfg(&self, db: &dyn HirDatabase) -> CfgOptions {
+ pub fn potential_cfg(&self, db: &dyn HirDatabase) -> Arc<CfgOptions> {
let data = &db.crate_graph()[self.id];
data.potential_cfg_options.clone().unwrap_or_else(|| data.cfg_options.clone())
}
@@ -548,8 +548,8 @@ impl Module {
acc: &mut Vec<AnyDiagnostic>,
style_lints: bool,
) {
- let name = self.name(db);
- let _p = tracing::span!(tracing::Level::INFO, "Module::diagnostics", ?name);
+ let _p = tracing::span!(tracing::Level::INFO, "Module::diagnostics", name = ?self.name(db))
+ .entered();
let def_map = self.id.def_map(db.upcast());
for diag in def_map.diagnostics() {
if diag.in_module != self.id.local_id {
@@ -653,7 +653,7 @@ impl Module {
GenericParamId::LifetimeParamId(LifetimeParamId { parent, local_id })
});
let type_params = generic_params
- .iter()
+ .iter_type_or_consts()
.filter(|(_, it)| it.type_param().is_some())
.map(|(local_id, _)| {
GenericParamId::TypeParamId(TypeParamId::from_unchecked(
@@ -684,7 +684,7 @@ impl Module {
let items = &db.trait_data(trait_.into()).items;
let required_items = items.iter().filter(|&(_, assoc)| match *assoc {
AssocItemId::FunctionId(it) => !db.function_data(it).has_body(),
- AssocItemId::ConstId(id) => Const::from(id).value(db).is_none(),
+ AssocItemId::ConstId(id) => !db.const_data(id).has_body,
AssocItemId::TypeAliasId(it) => db.type_alias_data(it).type_ref.is_none(),
});
impl_assoc_items_scratch.extend(db.impl_data(impl_def.id).items.iter().filter_map(
@@ -1418,16 +1418,14 @@ impl Adt {
}
pub fn layout(self, db: &dyn HirDatabase) -> Result<Layout, LayoutError> {
- if !db.generic_params(self.into()).is_empty() {
- return Err(LayoutError::HasPlaceholder);
- }
- let krate = self.krate(db).id;
db.layout_of_adt(
self.into(),
- Substitution::empty(Interner),
+ TyBuilder::adt(db, self.into())
+ .fill_with_defaults(db, || TyKind::Error.intern(Interner))
+ .build_into_subst(),
db.trait_environment(self.into()),
)
- .map(|layout| Layout(layout, db.target_data_layout(krate).unwrap()))
+ .map(|layout| Layout(layout, db.target_data_layout(self.krate(db).id).unwrap()))
}
/// Turns this ADT into a type. Any type parameters of the ADT will be
@@ -1630,7 +1628,6 @@ impl DefWithBody {
acc: &mut Vec<AnyDiagnostic>,
style_lints: bool,
) {
- db.unwind_if_cancelled();
let krate = self.module(db).id.krate();
let (body, source_map) = db.body_with_source_map(self.into());
@@ -1678,6 +1675,7 @@ impl DefWithBody {
for d in &infer.diagnostics {
acc.extend(AnyDiagnostic::inference_diagnostic(db, self.into(), d, &source_map));
}
+
for (pat_or_expr, mismatch) in infer.type_mismatches() {
let expr_or_pat = match pat_or_expr {
ExprOrPatId::ExprId(expr) => source_map.expr_syntax(expr).map(Either::Left),
@@ -1763,7 +1761,9 @@ impl DefWithBody {
need_mut = &mir::MutabilityReason::Not;
}
let local = Local { parent: self.into(), binding_id };
- match (need_mut, local.is_mut(db)) {
+ let is_mut = body[binding_id].mode == BindingAnnotation::Mutable;
+
+ match (need_mut, is_mut) {
(mir::MutabilityReason::Unused, _) => {
let should_ignore = matches!(body[binding_id].name.as_str(), Some(it) if it.starts_with('_'));
if !should_ignore {
@@ -2007,12 +2007,15 @@ impl Function {
/// is this a `fn main` or a function with an `export_name` of `main`?
pub fn is_main(self, db: &dyn HirDatabase) -> bool {
- if !self.module(db).is_crate_root() {
- return false;
- }
let data = db.function_data(self.id);
+ data.attrs.export_name() == Some("main")
+ || self.module(db).is_crate_root() && data.name.to_smol_str() == "main"
+ }
- data.name.to_smol_str() == "main" || data.attrs.export_name() == Some("main")
+ /// Is this a function with an `export_name` of `main`?
+ pub fn exported_main(self, db: &dyn HirDatabase) -> bool {
+ let data = db.function_data(self.id);
+ data.attrs.export_name() == Some("main")
}
/// Does this function have the ignore attribute?
@@ -3909,7 +3912,7 @@ impl Type {
inner.derived(
TyKind::Ref(
if m.is_mut() { hir_ty::Mutability::Mut } else { hir_ty::Mutability::Not },
- hir_ty::static_lifetime(),
+ hir_ty::error_lifetime(),
inner.ty.clone(),
)
.intern(Interner),
@@ -4492,7 +4495,7 @@ impl Type {
name: Option<&Name>,
mut callback: impl FnMut(Function) -> Option<T>,
) -> Option<T> {
- let _p = tracing::span!(tracing::Level::INFO, "iterate_method_candidates");
+ let _p = tracing::span!(tracing::Level::INFO, "iterate_method_candidates").entered();
let mut slot = None;
self.iterate_method_candidates_dyn(
@@ -4580,7 +4583,7 @@ impl Type {
name: Option<&Name>,
mut callback: impl FnMut(AssocItem) -> Option<T>,
) -> Option<T> {
- let _p = tracing::span!(tracing::Level::INFO, "iterate_path_candidates");
+ let _p = tracing::span!(tracing::Level::INFO, "iterate_path_candidates").entered();
let mut slot = None;
self.iterate_path_candidates_dyn(
db,
@@ -4647,7 +4650,7 @@ impl Type {
&'a self,
db: &'a dyn HirDatabase,
) -> impl Iterator<Item = Trait> + 'a {
- let _p = tracing::span!(tracing::Level::INFO, "applicable_inherent_traits");
+ let _p = tracing::span!(tracing::Level::INFO, "applicable_inherent_traits").entered();
self.autoderef_(db)
.filter_map(|ty| ty.dyn_trait())
.flat_map(move |dyn_trait_id| hir_ty::all_super_traits(db.upcast(), dyn_trait_id))
@@ -4655,7 +4658,7 @@ impl Type {
}
pub fn env_traits<'a>(&'a self, db: &'a dyn HirDatabase) -> impl Iterator<Item = Trait> + 'a {
- let _p = tracing::span!(tracing::Level::INFO, "env_traits");
+ let _p = tracing::span!(tracing::Level::INFO, "env_traits").entered();
self.autoderef_(db)
.filter(|ty| matches!(ty.kind(Interner), TyKind::Placeholder(_)))
.flat_map(|ty| {
@@ -4709,10 +4712,12 @@ impl Type {
if let WhereClause::Implemented(trait_ref) = pred.skip_binders() {
cb(type_.clone());
// skip the self type. it's likely the type we just got the bounds from
- for ty in
- trait_ref.substitution.iter(Interner).skip(1).filter_map(|a| a.ty(Interner))
- {
- walk_type(db, &type_.derived(ty.clone()), cb);
+ if let [self_ty, params @ ..] = trait_ref.substitution.as_slice(Interner) {
+ for ty in
+ params.iter().filter(|&ty| ty != self_ty).filter_map(|a| a.ty(Interner))
+ {
+ walk_type(db, &type_.derived(ty.clone()), cb);
+ }
}
}
}
diff --git a/crates/hir/src/semantics.rs b/crates/hir/src/semantics.rs
index 9796009cb4..e792e159ac 100644
--- a/crates/hir/src/semantics.rs
+++ b/crates/hir/src/semantics.rs
@@ -722,7 +722,7 @@ impl<'db> SemanticsImpl<'db> {
mut token: SyntaxToken,
f: &mut dyn FnMut(InFile<SyntaxToken>) -> ControlFlow<()>,
) {
- let _p = tracing::span!(tracing::Level::INFO, "descend_into_macros");
+ let _p = tracing::span!(tracing::Level::INFO, "descend_into_macros").entered();
let (sa, span, file_id) =
match token.parent().and_then(|parent| self.analyze_no_infer(&parent)) {
Some(sa) => match sa.file_id.file_id() {
@@ -1246,6 +1246,17 @@ impl<'db> SemanticsImpl<'db> {
.map_or(false, |m| matches!(m.id, MacroId::ProcMacroId(..)))
}
+ pub fn resolve_macro_call_arm(&self, macro_call: &ast::MacroCall) -> Option<u32> {
+ let sa = self.analyze(macro_call.syntax())?;
+ self.db
+ .parse_macro_expansion(
+ sa.expand(self.db, self.wrap_node_infile(macro_call.clone()).as_ref())?,
+ )
+ .value
+ .1
+ .matched_arm
+ }
+
pub fn is_unsafe_macro_call(&self, macro_call: &ast::MacroCall) -> bool {
let sa = match self.analyze(macro_call.syntax()) {
Some(it) => it,
@@ -1359,7 +1370,7 @@ impl<'db> SemanticsImpl<'db> {
offset: Option<TextSize>,
infer_body: bool,
) -> Option<SourceAnalyzer> {
- let _p = tracing::span!(tracing::Level::INFO, "Semantics::analyze_impl");
+ let _p = tracing::span!(tracing::Level::INFO, "Semantics::analyze_impl").entered();
let node = self.find_file(node);
let container = self.with_ctx(|ctx| ctx.find_container(node))?;
diff --git a/crates/hir/src/semantics/source_to_def.rs b/crates/hir/src/semantics/source_to_def.rs
index d4d6f0b243..434e4b5a0c 100644
--- a/crates/hir/src/semantics/source_to_def.rs
+++ b/crates/hir/src/semantics/source_to_def.rs
@@ -118,7 +118,7 @@ pub(super) struct SourceToDefCtx<'a, 'b> {
impl SourceToDefCtx<'_, '_> {
pub(super) fn file_to_def(&self, file: FileId) -> SmallVec<[ModuleId; 1]> {
- let _p = tracing::span!(tracing::Level::INFO, "SourceBinder::file_to_module_def");
+ let _p = tracing::span!(tracing::Level::INFO, "SourceBinder::file_to_module_def").entered();
let mut mods = SmallVec::new();
for &crate_id in self.db.relevant_crates(file).iter() {
// FIXME: inner items
@@ -133,7 +133,7 @@ impl SourceToDefCtx<'_, '_> {
}
pub(super) fn module_to_def(&mut self, src: InFile<ast::Module>) -> Option<ModuleId> {
- let _p = tracing::span!(tracing::Level::INFO, "module_to_def");
+ let _p = tracing::span!(tracing::Level::INFO, "module_to_def").entered();
let parent_declaration = src
.syntax()
.ancestors_with_macros_skip_attr_item(self.db.upcast())
@@ -158,7 +158,7 @@ impl SourceToDefCtx<'_, '_> {
}
pub(super) fn source_file_to_def(&self, src: InFile<ast::SourceFile>) -> Option<ModuleId> {
- let _p = tracing::span!(tracing::Level::INFO, "source_file_to_def");
+ let _p = tracing::span!(tracing::Level::INFO, "source_file_to_def").entered();
let file_id = src.file_id.original_file(self.db.upcast());
self.file_to_def(file_id).first().copied()
}
diff --git a/crates/ide-assists/src/handlers/add_missing_impl_members.rs b/crates/ide-assists/src/handlers/add_missing_impl_members.rs
index c1b95bb1e2..24a32086f3 100644
--- a/crates/ide-assists/src/handlers/add_missing_impl_members.rs
+++ b/crates/ide-assists/src/handlers/add_missing_impl_members.rs
@@ -105,7 +105,7 @@ fn add_missing_impl_members_inner(
assist_id: &'static str,
label: &'static str,
) -> Option<()> {
- let _p = tracing::span!(tracing::Level::INFO, "add_missing_impl_members_inner");
+ let _p = tracing::span!(tracing::Level::INFO, "add_missing_impl_members_inner").entered();
let impl_def = ctx.find_node_at_offset::<ast::Impl>()?;
let impl_ = ctx.sema.to_def(&impl_def)?;
diff --git a/crates/ide-assists/src/handlers/auto_import.rs b/crates/ide-assists/src/handlers/auto_import.rs
index 62696d1a9a..b90bccb48e 100644
--- a/crates/ide-assists/src/handlers/auto_import.rs
+++ b/crates/ide-assists/src/handlers/auto_import.rs
@@ -1588,4 +1588,82 @@ mod bar {
"#,
);
}
+
+ #[test]
+ fn local_inline_import_has_alias() {
+ // FIXME
+ check_assist_not_applicable(
+ auto_import,
+ r#"
+struct S<T>(T);
+use S as IoResult;
+
+mod foo {
+ pub fn bar() -> S$0<()> {}
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn alias_local() {
+ // FIXME
+ check_assist_not_applicable(
+ auto_import,
+ r#"
+struct S<T>(T);
+use S as IoResult;
+
+mod foo {
+ pub fn bar() -> IoResult$0<()> {}
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn preserve_raw_identifiers_strict() {
+ check_assist(
+ auto_import,
+ r"
+ r#as$0
+
+ pub mod ffi_mod {
+ pub fn r#as() {};
+ }
+ ",
+ r"
+ use ffi_mod::r#as;
+
+ r#as
+
+ pub mod ffi_mod {
+ pub fn r#as() {};
+ }
+ ",
+ );
+ }
+
+ #[test]
+ fn preserve_raw_identifiers_reserved() {
+ check_assist(
+ auto_import,
+ r"
+ r#abstract$0
+
+ pub mod ffi_mod {
+ pub fn r#abstract() {};
+ }
+ ",
+ r"
+ use ffi_mod::r#abstract;
+
+ r#abstract
+
+ pub mod ffi_mod {
+ pub fn r#abstract() {};
+ }
+ ",
+ );
+ }
}
diff --git a/crates/ide-assists/src/handlers/convert_from_to_tryfrom.rs b/crates/ide-assists/src/handlers/convert_from_to_tryfrom.rs
new file mode 100644
index 0000000000..5459bd334c
--- /dev/null
+++ b/crates/ide-assists/src/handlers/convert_from_to_tryfrom.rs
@@ -0,0 +1,250 @@
+use ide_db::{famous_defs::FamousDefs, traits::resolve_target_trait};
+use itertools::Itertools;
+use syntax::{
+ ast::{self, make, AstNode, HasName},
+ ted,
+};
+
+use crate::{AssistContext, AssistId, AssistKind, Assists};
+
+// Assist: convert_from_to_tryfrom
+//
+// Converts a From impl to a TryFrom impl, wrapping returns in `Ok`.
+//
+// ```
+// # //- minicore: from
+// impl $0From<usize> for Thing {
+// fn from(val: usize) -> Self {
+// Thing {
+// b: val.to_string(),
+// a: val
+// }
+// }
+// }
+// ```
+// ->
+// ```
+// impl TryFrom<usize> for Thing {
+// type Error = ${0:()};
+//
+// fn try_from(val: usize) -> Result<Self, Self::Error> {
+// Ok(Thing {
+// b: val.to_string(),
+// a: val
+// })
+// }
+// }
+// ```
+pub(crate) fn convert_from_to_tryfrom(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
+ let impl_ = ctx.find_node_at_offset::<ast::Impl>()?;
+ let trait_ty = impl_.trait_()?;
+
+ let module = ctx.sema.scope(impl_.syntax())?.module();
+
+ let from_type = match &trait_ty {
+ ast::Type::PathType(path) => {
+ path.path()?.segment()?.generic_arg_list()?.generic_args().next()?
+ }
+ _ => return None,
+ };
+
+ let associated_items = impl_.assoc_item_list()?;
+ let from_fn = associated_items.assoc_items().find_map(|item| {
+ if let ast::AssocItem::Fn(f) = item {
+ if f.name()?.text() == "from" {
+ return Some(f);
+ }
+ };
+ None
+ })?;
+
+ let from_fn_name = from_fn.name()?;
+ let from_fn_return_type = from_fn.ret_type()?.ty()?;
+
+ let return_exprs = from_fn.body()?.syntax().descendants().filter_map(ast::ReturnExpr::cast);
+ let tail_expr = from_fn.body()?.tail_expr()?;
+
+ if resolve_target_trait(&ctx.sema, &impl_)?
+ != FamousDefs(&ctx.sema, module.krate()).core_convert_From()?
+ {
+ return None;
+ }
+
+ acc.add(
+ AssistId("convert_from_to_tryfrom", AssistKind::RefactorRewrite),
+ "Convert From to TryFrom",
+ impl_.syntax().text_range(),
+ |builder| {
+ let trait_ty = builder.make_mut(trait_ty);
+ let from_fn_return_type = builder.make_mut(from_fn_return_type);
+ let from_fn_name = builder.make_mut(from_fn_name);
+ let tail_expr = builder.make_mut(tail_expr);
+ let return_exprs = return_exprs.map(|r| builder.make_mut(r)).collect_vec();
+ let associated_items = builder.make_mut(associated_items).clone();
+
+ ted::replace(
+ trait_ty.syntax(),
+ make::ty(&format!("TryFrom<{from_type}>")).syntax().clone_for_update(),
+ );
+ ted::replace(
+ from_fn_return_type.syntax(),
+ make::ty("Result<Self, Self::Error>").syntax().clone_for_update(),
+ );
+ ted::replace(from_fn_name.syntax(), make::name("try_from").syntax().clone_for_update());
+ ted::replace(
+ tail_expr.syntax(),
+ wrap_ok(tail_expr.clone()).syntax().clone_for_update(),
+ );
+
+ for r in return_exprs {
+ let t = r.expr().unwrap_or_else(make::expr_unit);
+ ted::replace(t.syntax(), wrap_ok(t.clone()).syntax().clone_for_update());
+ }
+
+ let error_type = ast::AssocItem::TypeAlias(make::ty_alias(
+ "Error",
+ None,
+ None,
+ None,
+ Some((make::ty_unit(), None)),
+ ))
+ .clone_for_update();
+
+ if let Some(cap) = ctx.config.snippet_cap {
+ if let ast::AssocItem::TypeAlias(type_alias) = &error_type {
+ if let Some(ty) = type_alias.ty() {
+ builder.add_placeholder_snippet(cap, ty);
+ }
+ }
+ }
+
+ associated_items.add_item_at_start(error_type);
+ },
+ )
+}
+
+fn wrap_ok(expr: ast::Expr) -> ast::Expr {
+ make::expr_call(
+ make::expr_path(make::ext::ident_path("Ok")),
+ make::arg_list(std::iter::once(expr)),
+ )
+}
+
+#[cfg(test)]
+mod tests {
+ use super::*;
+
+ use crate::tests::{check_assist, check_assist_not_applicable};
+
+ #[test]
+ fn converts_from_to_tryfrom() {
+ check_assist(
+ convert_from_to_tryfrom,
+ r#"
+//- minicore: from
+struct Foo(String);
+
+impl $0From<String> for Foo {
+ fn from(val: String) -> Self {
+ if val == "bar" {
+ return Foo(val);
+ }
+ Self(val)
+ }
+}
+ "#,
+ r#"
+struct Foo(String);
+
+impl TryFrom<String> for Foo {
+ type Error = ${0:()};
+
+ fn try_from(val: String) -> Result<Self, Self::Error> {
+ if val == "bar" {
+ return Ok(Foo(val));
+ }
+ Ok(Self(val))
+ }
+}
+ "#,
+ );
+ }
+
+ #[test]
+ fn converts_from_to_tryfrom_nested_type() {
+ check_assist(
+ convert_from_to_tryfrom,
+ r#"
+//- minicore: from
+struct Foo(String);
+
+impl $0From<Option<String>> for Foo {
+ fn from(val: Option<String>) -> Self {
+ match val {
+ Some(val) => Foo(val),
+ None => Foo("".to_string())
+ }
+ }
+}
+ "#,
+ r#"
+struct Foo(String);
+
+impl TryFrom<Option<String>> for Foo {
+ type Error = ${0:()};
+
+ fn try_from(val: Option<String>) -> Result<Self, Self::Error> {
+ Ok(match val {
+ Some(val) => Foo(val),
+ None => Foo("".to_string())
+ })
+ }
+}
+ "#,
+ );
+ }
+
+ #[test]
+ fn converts_from_to_tryfrom_preserves_lifetimes() {
+ check_assist(
+ convert_from_to_tryfrom,
+ r#"
+//- minicore: from
+struct Foo<'a>(&'a str);
+
+impl<'a> $0From<&'a str> for Foo<'a> {
+ fn from(val: &'a str) -> Self {
+ Self(val)
+ }
+}
+ "#,
+ r#"
+struct Foo<'a>(&'a str);
+
+impl<'a> TryFrom<&'a str> for Foo<'a> {
+ type Error = ${0:()};
+
+ fn try_from(val: &'a str) -> Result<Self, Self::Error> {
+ Ok(Self(val))
+ }
+}
+ "#,
+ );
+ }
+
+ #[test]
+ fn other_trait_not_applicable() {
+ check_assist_not_applicable(
+ convert_from_to_tryfrom,
+ r#"
+struct Foo(String);
+
+impl $0TryFrom<String> for Foo {
+ fn try_from(val: String) -> Result<Self, Self::Error> {
+ Ok(Self(val))
+ }
+}
+ "#,
+ );
+ }
+}
diff --git a/crates/ide-assists/src/handlers/extract_function.rs b/crates/ide-assists/src/handlers/extract_function.rs
index 65ce3e822c..34326294d2 100644
--- a/crates/ide-assists/src/handlers/extract_function.rs
+++ b/crates/ide-assists/src/handlers/extract_function.rs
@@ -1149,8 +1149,14 @@ fn reference_is_exclusive(
node: &dyn HasTokenAtOffset,
ctx: &AssistContext<'_>,
) -> bool {
+ // FIXME: this quite an incorrect way to go about doing this :-)
+ // `FileReference` is an IDE-type --- it encapsulates data communicated to the human,
+ // but doesn't necessary fully reflect all the intricacies of the underlying language semantics
+ // The correct approach here would be to expose this entire analysis as a method on some hir
+ // type. Something like `body.free_variables(statement_range)`.
+
// we directly modify variable with set: `n = 0`, `n += 1`
- if reference.category == Some(ReferenceCategory::Write) {
+ if reference.category.contains(ReferenceCategory::WRITE) {
return true;
}
@@ -5617,7 +5623,7 @@ fn func<T: Debug>(i: Struct<'_, T>) {
fun_name(i);
}
-fn $0fun_name(i: Struct<'static, T>) {
+fn $0fun_name(i: Struct<'_, T>) {
foo(i);
}
"#,
diff --git a/crates/ide-assists/src/handlers/generate_function.rs b/crates/ide-assists/src/handlers/generate_function.rs
index ff051fa870..db94a21a6d 100644
--- a/crates/ide-assists/src/handlers/generate_function.rs
+++ b/crates/ide-assists/src/handlers/generate_function.rs
@@ -175,8 +175,7 @@ fn add_func_to_accumulator(
edit.edit_file(file);
let target = function_builder.target.clone();
- let function_template = function_builder.render();
- let func = function_template.to_ast(ctx.config.snippet_cap, edit);
+ let func = function_builder.render(ctx.config.snippet_cap, edit);
if let Some(name) = adt_name {
let name = make::ty_path(make::ext::ident_path(&format!("{}", name.display(ctx.db()))));
@@ -205,37 +204,6 @@ fn get_adt_source(
find_struct_impl(ctx, &adt_source, &[fn_name.to_owned()]).map(|impl_| (impl_, range.file_id))
}
-struct FunctionTemplate {
- fn_def: ast::Fn,
- ret_type: Option<ast::RetType>,
- should_focus_return_type: bool,
- tail_expr: ast::Expr,
-}
-
-impl FunctionTemplate {
- fn to_ast(&self, cap: Option<SnippetCap>, edit: &mut SourceChangeBuilder) -> ast::Fn {
- let Self { fn_def, ret_type, should_focus_return_type, tail_expr } = self;
-
- if let Some(cap) = cap {
- if *should_focus_return_type {
- // Focus the return type if there is one
- match ret_type {
- Some(ret_type) => {
- edit.add_placeholder_snippet(cap, ret_type.clone());
- }
- None => {
- edit.add_placeholder_snippet(cap, tail_expr.clone());
- }
- }
- } else {
- edit.add_placeholder_snippet(cap, tail_expr.clone());
- }
- }
-
- fn_def.clone()
- }
-}
-
struct FunctionBuilder {
target: GeneratedFunctionTarget,
fn_name: ast::Name,
@@ -339,7 +307,7 @@ impl FunctionBuilder {
})
}
- fn render(self) -> FunctionTemplate {
+ fn render(self, cap: Option<SnippetCap>, edit: &mut SourceChangeBuilder) -> ast::Fn {
let placeholder_expr = make::ext::expr_todo();
let fn_body = make::block_expr(vec![], Some(placeholder_expr));
let visibility = match self.visibility {
@@ -361,17 +329,31 @@ impl FunctionBuilder {
)
.clone_for_update();
- FunctionTemplate {
- ret_type: fn_def.ret_type(),
- // PANIC: we guarantee we always create a function body with a tail expr
- tail_expr: fn_def
- .body()
- .expect("generated function should have a body")
- .tail_expr()
- .expect("function body should have a tail expression"),
- should_focus_return_type: self.should_focus_return_type,
- fn_def,
+ let ret_type = fn_def.ret_type();
+ // PANIC: we guarantee we always create a function body with a tail expr
+ let tail_expr = fn_def
+ .body()
+ .expect("generated function should have a body")
+ .tail_expr()
+ .expect("function body should have a tail expression");
+
+ if let Some(cap) = cap {
+ if self.should_focus_return_type {
+ // Focus the return type if there is one
+ match ret_type {
+ Some(ret_type) => {
+ edit.add_placeholder_snippet(cap, ret_type.clone());
+ }
+ None => {
+ edit.add_placeholder_snippet(cap, tail_expr.clone());
+ }
+ }
+ } else {
+ edit.add_placeholder_snippet(cap, tail_expr.clone());
+ }
}
+
+ fn_def
}
}
diff --git a/crates/ide-assists/src/handlers/inline_call.rs b/crates/ide-assists/src/handlers/inline_call.rs
index a90fe83857..44307ffd75 100644
--- a/crates/ide-assists/src/handlers/inline_call.rs
+++ b/crates/ide-assists/src/handlers/inline_call.rs
@@ -49,13 +49,13 @@ use crate::{
//
// fn bar() {
// {
-// let word = "안녕하세요";
+// let word: &str = "안녕하세요";
// if !word.is_empty() {
// print(word);
// }
// };
// {
-// let word = "여러분";
+// let word: &str = "여러분";
// if !word.is_empty() {
// print(word);
// }
diff --git a/crates/ide-assists/src/handlers/promote_local_to_const.rs b/crates/ide-assists/src/handlers/promote_local_to_const.rs
index 67fea772c7..7c2dc0e0c1 100644
--- a/crates/ide-assists/src/handlers/promote_local_to_const.rs
+++ b/crates/ide-assists/src/handlers/promote_local_to_const.rs
@@ -59,10 +59,7 @@ pub(crate) fn promote_local_to_const(acc: &mut Assists, ctx: &AssistContext<'_>)
let ty = match ty.display_source_code(ctx.db(), module.into(), false) {
Ok(ty) => ty,
- Err(_) => {
- cov_mark::hit!(promote_local_not_applicable_if_ty_not_inferred);
- return None;
- }
+ Err(_) => return None,
};
let initializer = let_stmt.initializer()?;
@@ -315,14 +312,18 @@ fn foo() {
#[test]
fn not_applicable_unknown_ty() {
- cov_mark::check!(promote_local_not_applicable_if_ty_not_inferred);
- check_assist_not_applicable(
+ check_assist(
promote_local_to_const,
r"
fn foo() {
let x$0 = bar();
}
",
+ r"
+fn foo() {
+ const $0X: _ = bar();
+}
+",
);
}
diff --git a/crates/ide-assists/src/handlers/remove_unused_imports.rs b/crates/ide-assists/src/handlers/remove_unused_imports.rs
index d67b259d2f..0f0f13bbc8 100644
--- a/crates/ide-assists/src/handlers/remove_unused_imports.rs
+++ b/crates/ide-assists/src/handlers/remove_unused_imports.rs
@@ -145,7 +145,7 @@ fn used_once_in_scope(ctx: &AssistContext<'_>, def: Definition, scopes: &Vec<Sea
for scope in scopes {
let mut search_non_import = |_, r: FileReference| {
// The import itself is a use; we must skip that.
- if r.category != Some(ReferenceCategory::Import) {
+ if !r.category.contains(ReferenceCategory::IMPORT) {
found = true;
true
} else {
diff --git a/crates/ide-assists/src/handlers/toggle_ignore.rs b/crates/ide-assists/src/handlers/toggle_ignore.rs
index f864ee50c8..264a2f0326 100644
--- a/crates/ide-assists/src/handlers/toggle_ignore.rs
+++ b/crates/ide-assists/src/handlers/toggle_ignore.rs
@@ -3,7 +3,7 @@ use syntax::{
AstNode, AstToken,
};
-use crate::{utils::test_related_attribute, AssistContext, AssistId, AssistKind, Assists};
+use crate::{utils::test_related_attribute_syn, AssistContext, AssistId, AssistKind, Assists};
// Assist: toggle_ignore
//
@@ -26,7 +26,7 @@ use crate::{utils::test_related_attribute, AssistContext, AssistId, AssistKind,
pub(crate) fn toggle_ignore(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
let attr: ast::Attr = ctx.find_node_at_offset()?;
let func = attr.syntax().parent().and_then(ast::Fn::cast)?;
- let attr = test_related_attribute(&func)?;
+ let attr = test_related_attribute_syn(&func)?;
match has_ignore_attribute(&func) {
None => acc.add(
diff --git a/crates/ide-assists/src/handlers/wrap_unwrap_cfg_attr.rs b/crates/ide-assists/src/handlers/wrap_unwrap_cfg_attr.rs
new file mode 100644
index 0000000000..0fa46ef43a
--- /dev/null
+++ b/crates/ide-assists/src/handlers/wrap_unwrap_cfg_attr.rs
@@ -0,0 +1,581 @@
+use ide_db::source_change::SourceChangeBuilder;
+use itertools::Itertools;
+use syntax::{
+ algo,
+ ast::{self, make, AstNode},
+ ted::{self, Position},
+ NodeOrToken, SyntaxToken, TextRange, T,
+};
+
+use crate::{AssistContext, AssistId, AssistKind, Assists};
+
+// Assist: wrap_unwrap_cfg_attr
+//
+// Wraps an attribute to a cfg_attr attribute or unwraps a cfg_attr attribute to the inner attributes.
+//
+// ```
+// #[derive$0(Debug)]
+// struct S {
+// field: i32
+// }
+// ```
+// ->
+// ```
+// #[cfg_attr($0, derive(Debug))]
+// struct S {
+// field: i32
+// }
+
+enum WrapUnwrapOption {
+ WrapDerive { derive: TextRange, attr: ast::Attr },
+ WrapAttr(ast::Attr),
+}
+
+/// Attempts to get the derive attribute from a derive attribute list
+///
+/// This will collect all the tokens in the "path" within the derive attribute list
+/// But a derive attribute list doesn't have paths. So we need to collect all the tokens before and after the ident
+///
+/// If this functions return None just map to WrapAttr
+fn attempt_get_derive(attr: ast::Attr, ident: SyntaxToken) -> WrapUnwrapOption {
+ let attempt_attr = || {
+ {
+ let mut derive = ident.text_range();
+ // TokenTree is all the tokens between the `(` and `)`. They do not have paths. So a path `serde::Serialize` would be [Ident Colon Colon Ident]
+ // So lets say we have derive(Debug, serde::Serialize, Copy) ident would be on Serialize
+ // We need to grab all previous tokens until we find a `,` or `(` and all following tokens until we find a `,` or `)`
+ // We also want to consume the following comma if it exists
+
+ let mut prev = algo::skip_trivia_token(
+ ident.prev_sibling_or_token()?.into_token()?,
+ syntax::Direction::Prev,
+ )?;
+ let mut following = algo::skip_trivia_token(
+ ident.next_sibling_or_token()?.into_token()?,
+ syntax::Direction::Next,
+ )?;
+ if (prev.kind() == T![,] || prev.kind() == T!['('])
+ && (following.kind() == T![,] || following.kind() == T![')'])
+ {
+ // This would be a single ident such as Debug. As no path is present
+ if following.kind() == T![,] {
+ derive = derive.cover(following.text_range());
+ } else if following.kind() == T![')'] && prev.kind() == T![,] {
+ derive = derive.cover(prev.text_range());
+ }
+
+ Some(WrapUnwrapOption::WrapDerive { derive, attr: attr.clone() })
+ } else {
+ let mut consumed_comma = false;
+ // Collect the path
+ while let Some(prev_token) = algo::skip_trivia_token(prev, syntax::Direction::Prev)
+ {
+ let kind = prev_token.kind();
+ if kind == T![,] {
+ consumed_comma = true;
+ derive = derive.cover(prev_token.text_range());
+ break;
+ } else if kind == T!['('] {
+ break;
+ } else {
+ derive = derive.cover(prev_token.text_range());
+ }
+ prev = prev_token.prev_sibling_or_token()?.into_token()?;
+ }
+ while let Some(next_token) =
+ algo::skip_trivia_token(following.clone(), syntax::Direction::Next)
+ {
+ let kind = next_token.kind();
+ match kind {
+ T![,] if !consumed_comma => {
+ derive = derive.cover(next_token.text_range());
+ break;
+ }
+ T![')'] | T![,] => break,
+ _ => derive = derive.cover(next_token.text_range()),
+ }
+ following = next_token.next_sibling_or_token()?.into_token()?;
+ }
+ Some(WrapUnwrapOption::WrapDerive { derive, attr: attr.clone() })
+ }
+ }
+ };
+ if ident.parent().and_then(ast::TokenTree::cast).is_none()
+ || !attr.simple_name().map(|v| v.eq("derive")).unwrap_or_default()
+ {
+ WrapUnwrapOption::WrapAttr(attr)
+ } else {
+ attempt_attr().unwrap_or(WrapUnwrapOption::WrapAttr(attr))
+ }
+}
+pub(crate) fn wrap_unwrap_cfg_attr(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
+ let option = if ctx.has_empty_selection() {
+ let ident = ctx.find_token_syntax_at_offset(T![ident]);
+ let attr = ctx.find_node_at_offset::<ast::Attr>();
+ match (attr, ident) {
+ (Some(attr), Some(ident))
+ if attr.simple_name().map(|v| v.eq("derive")).unwrap_or_default() =>
+ {
+ Some(attempt_get_derive(attr.clone(), ident))
+ }
+
+ (Some(attr), _) => Some(WrapUnwrapOption::WrapAttr(attr)),
+ _ => None,
+ }
+ } else {
+ let covering_element = ctx.covering_element();
+ match covering_element {
+ NodeOrToken::Node(node) => ast::Attr::cast(node).map(WrapUnwrapOption::WrapAttr),
+ NodeOrToken::Token(ident) if ident.kind() == syntax::T![ident] => {
+ let attr = ident.parent_ancestors().find_map(ast::Attr::cast)?;
+ Some(attempt_get_derive(attr.clone(), ident))
+ }
+ _ => None,
+ }
+ }?;
+ match option {
+ WrapUnwrapOption::WrapAttr(attr) if attr.simple_name().as_deref() == Some("cfg_attr") => {
+ unwrap_cfg_attr(acc, attr)
+ }
+ WrapUnwrapOption::WrapAttr(attr) => wrap_cfg_attr(acc, ctx, attr),
+ WrapUnwrapOption::WrapDerive { derive, attr } => wrap_derive(acc, ctx, attr, derive),
+ }
+}
+
+fn wrap_derive(
+ acc: &mut Assists,
+ ctx: &AssistContext<'_>,
+ attr: ast::Attr,
+ derive_element: TextRange,
+) -> Option<()> {
+ let range = attr.syntax().text_range();
+ let token_tree = attr.token_tree()?;
+ let mut path_text = String::new();
+
+ let mut cfg_derive_tokens = Vec::new();
+ let mut new_derive = Vec::new();
+
+ for tt in token_tree.token_trees_and_tokens() {
+ let NodeOrToken::Token(token) = tt else {
+ continue;
+ };
+ if token.kind() == T!['('] || token.kind() == T![')'] {
+ continue;
+ }
+
+ if derive_element.contains_range(token.text_range()) {
+ if token.kind() != T![,] && token.kind() != syntax::SyntaxKind::WHITESPACE {
+ path_text.push_str(token.text());
+ cfg_derive_tokens.push(NodeOrToken::Token(token));
+ }
+ } else {
+ new_derive.push(NodeOrToken::Token(token));
+ }
+ }
+ let handle_source_change = |edit: &mut SourceChangeBuilder| {
+ let new_derive = make::attr_outer(make::meta_token_tree(
+ make::ext::ident_path("derive"),
+ make::token_tree(T!['('], new_derive),
+ ))
+ .clone_for_update();
+ let meta = make::meta_token_tree(
+ make::ext::ident_path("cfg_attr"),
+ make::token_tree(
+ T!['('],
+ vec![
+ NodeOrToken::Token(make::token(T![,])),
+ NodeOrToken::Token(make::tokens::whitespace(" ")),
+ NodeOrToken::Token(make::tokens::ident("derive")),
+ NodeOrToken::Node(make::token_tree(T!['('], cfg_derive_tokens)),
+ ],
+ ),
+ );
+ // Remove the derive attribute
+ let edit_attr = edit.make_syntax_mut(attr.syntax().clone());
+
+ ted::replace(edit_attr, new_derive.syntax().clone());
+ let cfg_attr = make::attr_outer(meta).clone_for_update();
+
+ ted::insert_all_raw(
+ Position::after(new_derive.syntax().clone()),
+ vec![make::tokens::whitespace("\n").into(), cfg_attr.syntax().clone().into()],
+ );
+ if let Some(snippet_cap) = ctx.config.snippet_cap {
+ if let Some(first_meta) =
+ cfg_attr.meta().and_then(|meta| meta.token_tree()).and_then(|tt| tt.l_paren_token())
+ {
+ edit.add_tabstop_after_token(snippet_cap, first_meta)
+ }
+ }
+ };
+
+ acc.add(
+ AssistId("wrap_unwrap_cfg_attr", AssistKind::Refactor),
+ format!("Wrap #[derive({path_text})] in `cfg_attr`",),
+ range,
+ handle_source_change,
+ );
+ Some(())
+}
+fn wrap_cfg_attr(acc: &mut Assists, ctx: &AssistContext<'_>, attr: ast::Attr) -> Option<()> {
+ let range = attr.syntax().text_range();
+ let path = attr.path()?;
+ let handle_source_change = |edit: &mut SourceChangeBuilder| {
+ let mut raw_tokens = vec![
+ NodeOrToken::Token(make::token(T![,])),
+ NodeOrToken::Token(make::tokens::whitespace(" ")),
+ ];
+ path.syntax().descendants_with_tokens().for_each(|it| {
+ if let NodeOrToken::Token(token) = it {
+ raw_tokens.push(NodeOrToken::Token(token));
+ }
+ });
+ if let Some(meta) = attr.meta() {
+ if let (Some(eq), Some(expr)) = (meta.eq_token(), meta.expr()) {
+ raw_tokens.push(NodeOrToken::Token(make::tokens::whitespace(" ")));
+ raw_tokens.push(NodeOrToken::Token(eq.clone()));
+ raw_tokens.push(NodeOrToken::Token(make::tokens::whitespace(" ")));
+
+ expr.syntax().descendants_with_tokens().for_each(|it| {
+ if let NodeOrToken::Token(token) = it {
+ raw_tokens.push(NodeOrToken::Token(token));
+ }
+ });
+ } else if let Some(tt) = meta.token_tree() {
+ raw_tokens.extend(tt.token_trees_and_tokens());
+ }
+ }
+ let meta = make::meta_token_tree(
+ make::ext::ident_path("cfg_attr"),
+ make::token_tree(T!['('], raw_tokens),
+ );
+ let cfg_attr = if attr.excl_token().is_some() {
+ make::attr_inner(meta)
+ } else {
+ make::attr_outer(meta)
+ }
+ .clone_for_update();
+ let attr_syntax = edit.make_syntax_mut(attr.syntax().clone());
+ ted::replace(attr_syntax, cfg_attr.syntax());
+
+ if let Some(snippet_cap) = ctx.config.snippet_cap {
+ if let Some(first_meta) =
+ cfg_attr.meta().and_then(|meta| meta.token_tree()).and_then(|tt| tt.l_paren_token())
+ {
+ edit.add_tabstop_after_token(snippet_cap, first_meta)
+ }
+ }
+ };
+ acc.add(
+ AssistId("wrap_unwrap_cfg_attr", AssistKind::Refactor),
+ "Convert to `cfg_attr`",
+ range,
+ handle_source_change,
+ );
+ Some(())
+}
+fn unwrap_cfg_attr(acc: &mut Assists, attr: ast::Attr) -> Option<()> {
+ let range = attr.syntax().text_range();
+ let meta = attr.meta()?;
+ let meta_tt = meta.token_tree()?;
+ let mut inner_attrs = Vec::with_capacity(1);
+ let mut found_comma = false;
+ let mut iter = meta_tt.token_trees_and_tokens().skip(1).peekable();
+ while let Some(tt) = iter.next() {
+ if let NodeOrToken::Token(token) = &tt {
+ if token.kind() == T![')'] {
+ break;
+ }
+ if token.kind() == T![,] {
+ found_comma = true;
+ continue;
+ }
+ }
+ if !found_comma {
+ continue;
+ }
+ let Some(attr_name) = tt.into_token().and_then(|token| {
+ if token.kind() == T![ident] {
+ Some(make::ext::ident_path(token.text()))
+ } else {
+ None
+ }
+ }) else {
+ continue;
+ };
+ let next_tt = iter.next()?;
+ let meta = match next_tt {
+ NodeOrToken::Node(tt) => make::meta_token_tree(attr_name, tt),
+ NodeOrToken::Token(token) if token.kind() == T![,] || token.kind() == T![')'] => {
+ make::meta_path(attr_name)
+ }
+ NodeOrToken::Token(token) => {
+ let equals = algo::skip_trivia_token(token, syntax::Direction::Next)?;
+ if equals.kind() != T![=] {
+ return None;
+ }
+ let expr_token =
+ algo::skip_trivia_token(equals.next_token()?, syntax::Direction::Next)
+ .and_then(|it| {
+ if it.kind().is_literal() {
+ Some(make::expr_literal(it.text()))
+ } else {
+ None
+ }
+ })?;
+ make::meta_expr(attr_name, ast::Expr::Literal(expr_token))
+ }
+ };
+ if attr.excl_token().is_some() {
+ inner_attrs.push(make::attr_inner(meta));
+ } else {
+ inner_attrs.push(make::attr_outer(meta));
+ }
+ }
+ if inner_attrs.is_empty() {
+ return None;
+ }
+ let handle_source_change = |f: &mut SourceChangeBuilder| {
+ let inner_attrs = inner_attrs.iter().map(|it| it.to_string()).join("\n");
+ f.replace(range, inner_attrs);
+ };
+ acc.add(
+ AssistId("wrap_unwrap_cfg_attr", AssistKind::Refactor),
+ "Extract Inner Attributes from `cfg_attr`",
+ range,
+ handle_source_change,
+ );
+ Some(())
+}
+#[cfg(test)]
+mod tests {
+ use crate::tests::check_assist;
+
+ use super::*;
+
+ #[test]
+ fn test_basic_to_from_cfg_attr() {
+ check_assist(
+ wrap_unwrap_cfg_attr,
+ r#"
+ #[derive$0(Debug)]
+ pub struct Test {
+ test: u32,
+ }
+ "#,
+ r#"
+ #[cfg_attr($0, derive(Debug))]
+ pub struct Test {
+ test: u32,
+ }
+ "#,
+ );
+ check_assist(
+ wrap_unwrap_cfg_attr,
+ r#"
+ #[cfg_attr(debug_assertions, $0 derive(Debug))]
+ pub struct Test {
+ test: u32,
+ }
+ "#,
+ r#"
+ #[derive(Debug)]
+ pub struct Test {
+ test: u32,
+ }
+ "#,
+ );
+ }
+ #[test]
+ fn to_from_path_attr() {
+ check_assist(
+ wrap_unwrap_cfg_attr,
+ r#"
+ pub struct Test {
+ #[foo$0]
+ test: u32,
+ }
+ "#,
+ r#"
+ pub struct Test {
+ #[cfg_attr($0, foo)]
+ test: u32,
+ }
+ "#,
+ );
+ check_assist(
+ wrap_unwrap_cfg_attr,
+ r#"
+ pub struct Test {
+ #[cfg_attr(debug_assertions$0, foo)]
+ test: u32,
+ }
+ "#,
+ r#"
+ pub struct Test {
+ #[foo]
+ test: u32,
+ }
+ "#,
+ );
+ }
+ #[test]
+ fn to_from_eq_attr() {
+ check_assist(
+ wrap_unwrap_cfg_attr,
+ r#"
+ pub struct Test {
+ #[foo = "bar"$0]
+ test: u32,
+ }
+ "#,
+ r#"
+ pub struct Test {
+ #[cfg_attr($0, foo = "bar")]
+ test: u32,
+ }
+ "#,
+ );
+ check_assist(
+ wrap_unwrap_cfg_attr,
+ r#"
+ pub struct Test {
+ #[cfg_attr(debug_assertions$0, foo = "bar")]
+ test: u32,
+ }
+ "#,
+ r#"
+ pub struct Test {
+ #[foo = "bar"]
+ test: u32,
+ }
+ "#,
+ );
+ }
+ #[test]
+ fn inner_attrs() {
+ check_assist(
+ wrap_unwrap_cfg_attr,
+ r#"
+ #![no_std$0]
+ "#,
+ r#"
+ #![cfg_attr($0, no_std)]
+ "#,
+ );
+ check_assist(
+ wrap_unwrap_cfg_attr,
+ r#"
+ #![cfg_attr(not(feature = "std")$0, no_std)]
+ "#,
+ r#"
+ #![no_std]
+ "#,
+ );
+ }
+ #[test]
+ fn test_derive_wrap() {
+ check_assist(
+ wrap_unwrap_cfg_attr,
+ r#"
+ #[derive(Debug$0, Clone, Copy)]
+ pub struct Test {
+ test: u32,
+ }
+ "#,
+ r#"
+ #[derive( Clone, Copy)]
+ #[cfg_attr($0, derive(Debug))]
+ pub struct Test {
+ test: u32,
+ }
+ "#,
+ );
+ check_assist(
+ wrap_unwrap_cfg_attr,
+ r#"
+ #[derive(Clone, Debug$0, Copy)]
+ pub struct Test {
+ test: u32,
+ }
+ "#,
+ r#"
+ #[derive(Clone, Copy)]
+ #[cfg_attr($0, derive(Debug))]
+ pub struct Test {
+ test: u32,
+ }
+ "#,
+ );
+ }
+ #[test]
+ fn test_derive_wrap_with_path() {
+ check_assist(
+ wrap_unwrap_cfg_attr,
+ r#"
+ #[derive(std::fmt::Debug$0, Clone, Copy)]
+ pub struct Test {
+ test: u32,
+ }
+ "#,
+ r#"
+ #[derive( Clone, Copy)]
+ #[cfg_attr($0, derive(std::fmt::Debug))]
+ pub struct Test {
+ test: u32,
+ }
+ "#,
+ );
+ check_assist(
+ wrap_unwrap_cfg_attr,
+ r#"
+ #[derive(Clone, std::fmt::Debug$0, Copy)]
+ pub struct Test {
+ test: u32,
+ }
+ "#,
+ r#"
+ #[derive(Clone, Copy)]
+ #[cfg_attr($0, derive(std::fmt::Debug))]
+ pub struct Test {
+ test: u32,
+ }
+ "#,
+ );
+ }
+ #[test]
+ fn test_derive_wrap_at_end() {
+ check_assist(
+ wrap_unwrap_cfg_attr,
+ r#"
+ #[derive(std::fmt::Debug, Clone, Cop$0y)]
+ pub struct Test {
+ test: u32,
+ }
+ "#,
+ r#"
+ #[derive(std::fmt::Debug, Clone)]
+ #[cfg_attr($0, derive(Copy))]
+ pub struct Test {
+ test: u32,
+ }
+ "#,
+ );
+ check_assist(
+ wrap_unwrap_cfg_attr,
+ r#"
+ #[derive(Clone, Copy, std::fmt::D$0ebug)]
+ pub struct Test {
+ test: u32,
+ }
+ "#,
+ r#"
+ #[derive(Clone, Copy)]
+ #[cfg_attr($0, derive(std::fmt::Debug))]
+ pub struct Test {
+ test: u32,
+ }
+ "#,
+ );
+ }
+}
diff --git a/crates/ide-assists/src/lib.rs b/crates/ide-assists/src/lib.rs
index 8f0b8f861c..0df5e913a5 100644
--- a/crates/ide-assists/src/lib.rs
+++ b/crates/ide-assists/src/lib.rs
@@ -116,6 +116,7 @@ mod handlers {
mod change_visibility;
mod convert_bool_then;
mod convert_comment_block;
+ mod convert_from_to_tryfrom;
mod convert_integer_literal;
mod convert_into_to_from;
mod convert_iter_for_each_to_for;
@@ -217,6 +218,7 @@ mod handlers {
mod unwrap_result_return_type;
mod unwrap_tuple;
mod wrap_return_type_in_result;
+ mod wrap_unwrap_cfg_attr;
pub(crate) fn all() -> &'static [Handler] {
&[
@@ -237,6 +239,7 @@ mod handlers {
convert_bool_then::convert_bool_then_to_if,
convert_bool_then::convert_if_to_bool_then,
convert_comment_block::convert_comment_block,
+ convert_from_to_tryfrom::convert_from_to_tryfrom,
convert_integer_literal::convert_integer_literal,
convert_into_to_from::convert_into_to_from,
convert_iter_for_each_to_for::convert_iter_for_each_to_for,
@@ -342,6 +345,8 @@ mod handlers {
unwrap_tuple::unwrap_tuple,
unqualify_method_call::unqualify_method_call,
wrap_return_type_in_result::wrap_return_type_in_result,
+ wrap_unwrap_cfg_attr::wrap_unwrap_cfg_attr,
+
// These are manually sorted for better priorities. By default,
// priority is determined by the size of the target range (smaller
// target wins). If the ranges are equal, position in this list is
diff --git a/crates/ide-assists/src/tests/generated.rs b/crates/ide-assists/src/tests/generated.rs
index a66e199a75..937e78f8d7 100644
--- a/crates/ide-assists/src/tests/generated.rs
+++ b/crates/ide-assists/src/tests/generated.rs
@@ -391,6 +391,36 @@ fn main() {
}
#[test]
+fn doctest_convert_from_to_tryfrom() {
+ check_doc_test(
+ "convert_from_to_tryfrom",
+ r#####"
+//- minicore: from
+impl $0From<usize> for Thing {
+ fn from(val: usize) -> Self {
+ Thing {
+ b: val.to_string(),
+ a: val
+ }
+ }
+}
+"#####,
+ r#####"
+impl TryFrom<usize> for Thing {
+ type Error = ${0:()};
+
+ fn try_from(val: usize) -> Result<Self, Self::Error> {
+ Ok(Thing {
+ b: val.to_string(),
+ a: val
+ })
+ }
+}
+"#####,
+ )
+}
+
+#[test]
fn doctest_convert_if_to_bool_then() {
check_doc_test(
"convert_if_to_bool_then",
@@ -1820,13 +1850,13 @@ fn print(_: &str) {}
fn bar() {
{
- let word = "안녕하세요";
+ let word: &str = "안녕하세요";
if !word.is_empty() {
print(word);
}
};
{
- let word = "여러분";
+ let word: &str = "여러분";
if !word.is_empty() {
print(word);
}
@@ -3151,3 +3181,22 @@ fn foo() -> Result<i32, ${0:_}> { Ok(42i32) }
"#####,
)
}
+
+#[test]
+fn doctest_wrap_unwrap_cfg_attr() {
+ check_doc_test(
+ "wrap_unwrap_cfg_attr",
+ r#####"
+#[derive$0(Debug)]
+struct S {
+ field: i32
+}
+"#####,
+ r#####"
+#[cfg_attr($0, derive(Debug))]
+struct S {
+ field: i32
+}
+"#####,
+ )
+}
diff --git a/crates/ide-assists/src/utils.rs b/crates/ide-assists/src/utils.rs
index 8bd5d17933..bc0c9b79c7 100644
--- a/crates/ide-assists/src/utils.rs
+++ b/crates/ide-assists/src/utils.rs
@@ -71,7 +71,7 @@ pub fn extract_trivial_expression(block_expr: &ast::BlockExpr) -> Option<ast::Ex
///
/// It may produce false positives, for example, `#[wasm_bindgen_test]` requires a different command to run the test,
/// but it's better than not to have the runnables for the tests at all.
-pub fn test_related_attribute(fn_def: &ast::Fn) -> Option<ast::Attr> {
+pub fn test_related_attribute_syn(fn_def: &ast::Fn) -> Option<ast::Attr> {
fn_def.attrs().find_map(|attr| {
let path = attr.path()?;
let text = path.syntax().text().to_string();
@@ -83,6 +83,19 @@ pub fn test_related_attribute(fn_def: &ast::Fn) -> Option<ast::Attr> {
})
}
+pub fn has_test_related_attribute(attrs: &hir::AttrsWithOwner) -> bool {
+ attrs.iter().any(|attr| {
+ let path = attr.path();
+ (|| {
+ Some(
+ path.segments().first()?.as_text()?.starts_with("test")
+ || path.segments().last()?.as_text()?.ends_with("test"),
+ )
+ })()
+ .unwrap_or_default()
+ })
+}
+
#[derive(Clone, Copy, PartialEq)]
pub enum IgnoreAssocItems {
DocHiddenAttrPresent,
diff --git a/crates/ide-completion/src/completions/postfix.rs b/crates/ide-completion/src/completions/postfix.rs
index 361ad821f4..c6e243b31a 100644
--- a/crates/ide-completion/src/completions/postfix.rs
+++ b/crates/ide-completion/src/completions/postfix.rs
@@ -9,6 +9,7 @@ use ide_db::{
ty_filter::TryEnum,
SnippetCap,
};
+use stdx::never;
use syntax::{
ast::{self, make, AstNode, AstToken},
SyntaxKind::{BLOCK_EXPR, EXPR_STMT, FOR_EXPR, IF_EXPR, LOOP_EXPR, STMT_LIST, WHILE_EXPR},
@@ -319,7 +320,9 @@ fn build_postfix_snippet_builder<'ctx>(
) -> Option<impl Fn(&str, &str, &str) -> Builder + 'ctx> {
let receiver_range = ctx.sema.original_range_opt(receiver.syntax())?.range;
if ctx.source_range().end() < receiver_range.start() {
- // This shouldn't happen, yet it does. I assume this might be due to an incorrect token mapping.
+ // This shouldn't happen, yet it does. I assume this might be due to an incorrect token
+ // mapping.
+ never!();
return None;
}
let delete_range = TextRange::new(receiver_range.start(), ctx.source_range().end());
diff --git a/crates/ide-completion/src/completions/type.rs b/crates/ide-completion/src/completions/type.rs
index e467808946..2361d14aae 100644
--- a/crates/ide-completion/src/completions/type.rs
+++ b/crates/ide-completion/src/completions/type.rs
@@ -226,7 +226,7 @@ pub(crate) fn complete_ascribed_type(
if !path_ctx.is_trivial_path() {
return None;
}
- let x = match ascription {
+ let ty = match ascription {
TypeAscriptionTarget::Let(pat) | TypeAscriptionTarget::FnParam(pat) => {
ctx.sema.type_of_pat(pat.as_ref()?)
}
@@ -235,7 +235,9 @@ pub(crate) fn complete_ascribed_type(
}
}?
.adjusted();
- let ty_string = x.display_source_code(ctx.db, ctx.module.into(), true).ok()?;
- acc.add(render_type_inference(ty_string, ctx));
+ if !ty.is_unknown() {
+ let ty_string = ty.display_source_code(ctx.db, ctx.module.into(), true).ok()?;
+ acc.add(render_type_inference(ty_string, ctx));
+ }
None
}
diff --git a/crates/ide-completion/src/context.rs b/crates/ide-completion/src/context.rs
index 995e3f4825..8b435f419c 100644
--- a/crates/ide-completion/src/context.rs
+++ b/crates/ide-completion/src/context.rs
@@ -17,7 +17,7 @@ use ide_db::{
};
use syntax::{
ast::{self, AttrKind, NameOrNameRef},
- AstNode, SmolStr,
+ AstNode, Edition, SmolStr,
SyntaxKind::{self, *},
SyntaxToken, TextRange, TextSize, T,
};
@@ -667,7 +667,8 @@ impl<'a> CompletionContext<'a> {
let file_with_fake_ident = {
let parse = db.parse(file_id);
let edit = Indel::insert(offset, COMPLETION_MARKER.to_owned());
- parse.reparse(&edit).tree()
+ // FIXME: Edition
+ parse.reparse(&edit, Edition::CURRENT).tree()
};
// always pick the token to the immediate left of the cursor, as that is what we are actually
diff --git a/crates/ide-completion/src/snippet.rs b/crates/ide-completion/src/snippet.rs
index e667e2e016..7d710f1e13 100644
--- a/crates/ide-completion/src/snippet.rs
+++ b/crates/ide-completion/src/snippet.rs
@@ -200,7 +200,7 @@ fn validate_snippet(
) -> Option<(Box<[GreenNode]>, String, Option<Box<str>>)> {
let mut imports = Vec::with_capacity(requires.len());
for path in requires.iter() {
- let use_path = ast::SourceFile::parse(&format!("use {path};"))
+ let use_path = ast::SourceFile::parse(&format!("use {path};"), syntax::Edition::CURRENT)
.syntax_node()
.descendants()
.find_map(ast::Path::cast)?;
diff --git a/crates/ide-completion/src/tests/predicate.rs b/crates/ide-completion/src/tests/predicate.rs
index 3718dff56e..64a32dee3d 100644
--- a/crates/ide-completion/src/tests/predicate.rs
+++ b/crates/ide-completion/src/tests/predicate.rs
@@ -19,7 +19,7 @@ struct Foo<'lt, T, const C: usize> where $0 {}
en Enum Enum
ma makro!(…) macro_rules! makro
md module
- st Foo<…> Foo<'static, {unknown}, _>
+ st Foo<…> Foo<'{error}, {unknown}, _>
st Record Record
st Tuple Tuple
st Unit Unit
@@ -92,7 +92,7 @@ struct Foo<'lt, T, const C: usize> where for<'a> $0 {}
en Enum Enum
ma makro!(…) macro_rules! makro
md module
- st Foo<…> Foo<'static, {unknown}, _>
+ st Foo<…> Foo<'{error}, {unknown}, _>
st Record Record
st Tuple Tuple
st Unit Unit
diff --git a/crates/ide-completion/src/tests/type_pos.rs b/crates/ide-completion/src/tests/type_pos.rs
index 9770972865..66f1bff7c1 100644
--- a/crates/ide-completion/src/tests/type_pos.rs
+++ b/crates/ide-completion/src/tests/type_pos.rs
@@ -20,8 +20,8 @@ struct Foo<'lt, T, const C: usize> {
en Enum Enum
ma makro!(…) macro_rules! makro
md module
- sp Self Foo<'static, {unknown}, _>
- st Foo<…> Foo<'static, {unknown}, _>
+ sp Self Foo<'{error}, {unknown}, _>
+ st Foo<…> Foo<'{error}, {unknown}, _>
st Record Record
st Tuple Tuple
st Unit Unit
@@ -45,8 +45,8 @@ struct Foo<'lt, T, const C: usize>(f$0);
en Enum Enum
ma makro!(…) macro_rules! makro
md module
- sp Self Foo<'static, {unknown}, _>
- st Foo<…> Foo<'static, {unknown}, _>
+ sp Self Foo<'{error}, {unknown}, _>
+ st Foo<…> Foo<'{error}, {unknown}, _>
st Record Record
st Tuple Tuple
st Unit Unit
diff --git a/crates/ide-db/Cargo.toml b/crates/ide-db/Cargo.toml
index 071e1b4717..9a6826a5c4 100644
--- a/crates/ide-db/Cargo.toml
+++ b/crates/ide-db/Cargo.toml
@@ -26,6 +26,7 @@ indexmap.workspace = true
memchr = "2.6.4"
triomphe.workspace = true
nohash-hasher.workspace = true
+bitflags.workspace = true
# local deps
base-db.workspace = true
diff --git a/crates/ide-db/src/apply_change.rs b/crates/ide-db/src/apply_change.rs
index ec05f6d13d..ce9a5f0dd2 100644
--- a/crates/ide-db/src/apply_change.rs
+++ b/crates/ide-db/src/apply_change.rs
@@ -91,82 +91,101 @@ impl RootDatabase {
crate::symbol_index::LocalRootsQuery
crate::symbol_index::LibraryRootsQuery
// HirDatabase
- hir::db::MirBodyQuery
+ hir::db::AdtDatumQuery
+ hir::db::AdtVarianceQuery
+ hir::db::AssociatedTyDataQuery
+ hir::db::AssociatedTyValueQuery
hir::db::BorrowckQuery
- hir::db::TyQuery
- hir::db::ValueTyQuery
- hir::db::ImplSelfTyQuery
- hir::db::ConstParamTyQuery
- hir::db::ConstEvalQuery
+ hir::db::CallableItemSignatureQuery
hir::db::ConstEvalDiscriminantQuery
- hir::db::ImplTraitQuery
+ hir::db::ConstEvalQuery
+ hir::db::ConstEvalStaticQuery
+ hir::db::ConstParamTyQuery
hir::db::FieldTypesQuery
- hir::db::LayoutOfAdtQuery
- hir::db::TargetDataLayoutQuery
- hir::db::CallableItemSignatureQuery
- hir::db::ReturnTypeImplTraitsQuery
+ hir::db::FnDefDatumQuery
+ hir::db::FnDefVarianceQuery
+ hir::db::GenericDefaultsQuery
hir::db::GenericPredicatesForParamQuery
hir::db::GenericPredicatesQuery
- hir::db::TraitEnvironmentQuery
- hir::db::GenericDefaultsQuery
- hir::db::InherentImplsInCrateQuery
- hir::db::InherentImplsInBlockQuery
+ hir::db::ImplDatumQuery
+ hir::db::ImplSelfTyQuery
+ hir::db::ImplTraitQuery
hir::db::IncoherentInherentImplCratesQuery
- hir::db::TraitImplsInCrateQuery
- hir::db::TraitImplsInBlockQuery
- hir::db::TraitImplsInDepsQuery
+ hir::db::InherentImplsInBlockQuery
+ hir::db::InherentImplsInCrateQuery
hir::db::InternCallableDefQuery
- hir::db::InternLifetimeParamIdQuery
- hir::db::InternImplTraitIdQuery
- hir::db::InternTypeOrConstParamIdQuery
hir::db::InternClosureQuery
hir::db::InternCoroutineQuery
- hir::db::AssociatedTyDataQuery
- hir::db::TraitDatumQuery
- hir::db::AdtDatumQuery
- hir::db::ImplDatumQuery
- hir::db::FnDefDatumQuery
- hir::db::FnDefVarianceQuery
- hir::db::AdtVarianceQuery
- hir::db::AssociatedTyValueQuery
+ hir::db::InternImplTraitIdQuery
+ hir::db::InternLifetimeParamIdQuery
+ hir::db::InternTypeOrConstParamIdQuery
+ hir::db::LayoutOfAdtQuery
+ hir::db::MirBodyQuery
hir::db::ProgramClausesForChalkEnvQuery
+ hir::db::ReturnTypeImplTraitsQuery
+ hir::db::TargetDataLayoutQuery
+ hir::db::TraitDatumQuery
+ hir::db::TraitEnvironmentQuery
+ hir::db::TraitImplsInBlockQuery
+ hir::db::TraitImplsInCrateQuery
+ hir::db::TraitImplsInDepsQuery
+ hir::db::TyQuery
+ hir::db::ValueTyQuery
// DefDatabase
- hir::db::FileItemTreeQuery
+ hir::db::AttrsQuery
hir::db::BlockDefMapQuery
- hir::db::StructDataWithDiagnosticsQuery
- hir::db::UnionDataWithDiagnosticsQuery
+ hir::db::BlockItemTreeQuery
+ hir::db::BodyQuery
+ hir::db::BodyWithSourceMapQuery
+ hir::db::ConstDataQuery
+ hir::db::ConstVisibilityQuery
+ hir::db::CrateDefMapQuery
+ hir::db::CrateLangItemsQuery
+ hir::db::CrateNotableTraitsQuery
+ hir::db::CrateSupportsNoStdQuery
hir::db::EnumDataQuery
hir::db::EnumVariantDataWithDiagnosticsQuery
- hir::db::ImplDataWithDiagnosticsQuery
- hir::db::TraitDataWithDiagnosticsQuery
- hir::db::TraitAliasDataQuery
- hir::db::TypeAliasDataQuery
- hir::db::FunctionDataQuery
- hir::db::ConstDataQuery
- hir::db::StaticDataQuery
- hir::db::Macro2DataQuery
- hir::db::MacroRulesDataQuery
- hir::db::ProcMacroDataQuery
- hir::db::BodyWithSourceMapQuery
- hir::db::BodyQuery
hir::db::ExprScopesQuery
- hir::db::GenericParamsQuery
+ hir::db::ExternCrateDeclDataQuery
+ hir::db::FieldVisibilitiesQuery
hir::db::FieldsAttrsQuery
hir::db::FieldsAttrsSourceMapQuery
- hir::db::AttrsQuery
- hir::db::CrateLangItemsQuery
- hir::db::LangItemQuery
- hir::db::ImportMapQuery
- hir::db::FieldVisibilitiesQuery
+ hir::db::FileItemTreeQuery
+ hir::db::FunctionDataQuery
hir::db::FunctionVisibilityQuery
- hir::db::ConstVisibilityQuery
- hir::db::CrateSupportsNoStdQuery
- hir::db::ExternCrateDeclDataQuery
+ hir::db::GenericParamsQuery
+ hir::db::ImplDataWithDiagnosticsQuery
+ hir::db::ImportMapQuery
hir::db::InternAnonymousConstQuery
+ hir::db::InternBlockQuery
+ hir::db::InternConstQuery
+ hir::db::InternEnumQuery
+ hir::db::InternExternBlockQuery
hir::db::InternExternCrateQuery
+ hir::db::InternFunctionQuery
+ hir::db::InternImplQuery
hir::db::InternInTypeConstQuery
+ hir::db::InternMacro2Query
+ hir::db::InternMacroRulesQuery
+ hir::db::InternProcMacroQuery
+ hir::db::InternStaticQuery
+ hir::db::InternStructQuery
+ hir::db::InternTraitAliasQuery
+ hir::db::InternTraitQuery
+ hir::db::InternTypeAliasQuery
+ hir::db::InternUnionQuery
hir::db::InternUseQuery
+ hir::db::LangItemQuery
+ hir::db::Macro2DataQuery
+ hir::db::MacroRulesDataQuery
+ hir::db::ProcMacroDataQuery
+ hir::db::StaticDataQuery
+ hir::db::StructDataWithDiagnosticsQuery
+ hir::db::TraitAliasDataQuery
+ hir::db::TraitDataWithDiagnosticsQuery
+ hir::db::TypeAliasDataQuery
+ hir::db::UnionDataWithDiagnosticsQuery
// InternDatabase
hir::db::InternFunctionQuery
@@ -192,9 +211,10 @@ impl RootDatabase {
hir::db::InternMacroCallQuery
hir::db::InternSyntaxContextQuery
hir::db::MacroArgQuery
+ hir::db::ParseMacroExpansionErrorQuery
hir::db::ParseMacroExpansionQuery
- hir::db::RealSpanMapQuery
hir::db::ProcMacrosQuery
+ hir::db::RealSpanMapQuery
// LineIndexDatabase
crate::LineIndexQuery
diff --git a/crates/ide-db/src/documentation.rs b/crates/ide-db/src/documentation.rs
index 72ca354365..58e77b95c3 100644
--- a/crates/ide-db/src/documentation.rs
+++ b/crates/ide-db/src/documentation.rs
@@ -91,8 +91,10 @@ pub fn docs_with_rangemap(
db: &dyn DefDatabase,
attrs: &AttrsWithOwner,
) -> Option<(Documentation, DocsRangeMap)> {
- let docs =
- attrs.by_key("doc").attrs().filter_map(|attr| attr.string_value().map(|s| (s, attr.id)));
+ let docs = attrs
+ .by_key("doc")
+ .attrs()
+ .filter_map(|attr| attr.string_value_unescape().map(|s| (s, attr.id)));
let indent = doc_indent(attrs);
let mut buf = String::new();
let mut mapping = Vec::new();
@@ -132,7 +134,7 @@ pub fn docs_with_rangemap(
}
pub fn docs_from_attrs(attrs: &hir::Attrs) -> Option<String> {
- let docs = attrs.by_key("doc").attrs().filter_map(|attr| attr.string_value());
+ let docs = attrs.by_key("doc").attrs().filter_map(|attr| attr.string_value_unescape());
let indent = doc_indent(attrs);
let mut buf = String::new();
for doc in docs {
@@ -270,10 +272,9 @@ fn doc_indent(attrs: &hir::Attrs) -> usize {
attrs
.by_key("doc")
.attrs()
- .filter_map(|attr| attr.string_value())
+ .filter_map(|attr| attr.string_value()) // no need to use unescape version here
.flat_map(|s| s.lines())
- .filter(|line| !line.chars().all(|c| c.is_whitespace()))
- .map(|line| line.chars().take_while(|c| c.is_whitespace()).count())
+ .filter_map(|line| line.chars().position(|c| !c.is_whitespace()))
.min()
.unwrap_or(0)
}
diff --git a/crates/ide-db/src/imports/insert_use.rs b/crates/ide-db/src/imports/insert_use.rs
index bd5c464c55..e97f1b8614 100644
--- a/crates/ide-db/src/imports/insert_use.rs
+++ b/crates/ide-db/src/imports/insert_use.rs
@@ -176,7 +176,7 @@ pub fn insert_use(scope: &ImportScope, path: ast::Path, cfg: &InsertUseConfig) {
pub fn insert_use_as_alias(scope: &ImportScope, path: ast::Path, cfg: &InsertUseConfig) {
let text: &str = "use foo as _";
- let parse = syntax::SourceFile::parse(text);
+ let parse = syntax::SourceFile::parse(text, span::Edition::CURRENT);
let node = parse
.tree()
.syntax()
diff --git a/crates/ide-db/src/imports/insert_use/tests.rs b/crates/ide-db/src/imports/insert_use/tests.rs
index 10c285a13f..9d1f1cc09c 100644
--- a/crates/ide-db/src/imports/insert_use/tests.rs
+++ b/crates/ide-db/src/imports/insert_use/tests.rs
@@ -1243,7 +1243,7 @@ fn check_with_config(
.and_then(|it| ImportScope::find_insert_use_container(&it, sema))
.or_else(|| ImportScope::from(syntax))
.unwrap();
- let path = ast::SourceFile::parse(&format!("use {path};"))
+ let path = ast::SourceFile::parse(&format!("use {path};"), span::Edition::CURRENT)
.tree()
.syntax()
.descendants()
@@ -1292,14 +1292,14 @@ fn check_one(path: &str, ra_fixture_before: &str, ra_fixture_after: &str) {
}
fn check_merge_only_fail(ra_fixture0: &str, ra_fixture1: &str, mb: MergeBehavior) {
- let use0 = ast::SourceFile::parse(ra_fixture0)
+ let use0 = ast::SourceFile::parse(ra_fixture0, span::Edition::CURRENT)
.tree()
.syntax()
.descendants()
.find_map(ast::Use::cast)
.unwrap();
- let use1 = ast::SourceFile::parse(ra_fixture1)
+ let use1 = ast::SourceFile::parse(ra_fixture1, span::Edition::CURRENT)
.tree()
.syntax()
.descendants()
@@ -1311,7 +1311,7 @@ fn check_merge_only_fail(ra_fixture0: &str, ra_fixture1: &str, mb: MergeBehavior
}
fn check_guess(ra_fixture: &str, expected: ImportGranularityGuess) {
- let syntax = ast::SourceFile::parse(ra_fixture).tree().syntax().clone();
+ let syntax = ast::SourceFile::parse(ra_fixture, span::Edition::CURRENT).tree().syntax().clone();
let file = ImportScope::from(syntax).unwrap();
assert_eq!(super::guess_granularity_from_scope(&file), expected);
}
diff --git a/crates/ide-db/src/label.rs b/crates/ide-db/src/label.rs
index 4b6d54b5ea..919c1273e5 100644
--- a/crates/ide-db/src/label.rs
+++ b/crates/ide-db/src/label.rs
@@ -1,6 +1,8 @@
//! See [`Label`]
use std::fmt;
+use stdx::always;
+
/// A type to specify UI label, like an entry in the list of assists. Enforces
/// proper casing:
///
@@ -30,7 +32,7 @@ impl From<Label> for String {
impl Label {
pub fn new(label: String) -> Label {
- assert!(label.starts_with(char::is_uppercase) && !label.ends_with('.'));
+ always!(label.starts_with(char::is_uppercase) && !label.ends_with('.'));
Label(label)
}
}
diff --git a/crates/ide-db/src/search.rs b/crates/ide-db/src/search.rs
index a3ecc10360..cb103313c9 100644
--- a/crates/ide-db/src/search.rs
+++ b/crates/ide-db/src/search.rs
@@ -64,7 +64,7 @@ pub struct FileReference {
pub range: TextRange,
/// The node of the reference in the (macro-)file
pub name: FileReferenceNode,
- pub category: Option<ReferenceCategory>,
+ pub category: ReferenceCategory,
}
#[derive(Debug, Clone)]
@@ -124,17 +124,16 @@ impl FileReferenceNode {
}
}
-#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
-pub enum ReferenceCategory {
- // FIXME: Add this variant and delete the `retain_adt_literal_usages` function.
- // Create
- Write,
- Read,
- Import,
- // FIXME: Some day should be able to search in doc comments. Would probably
- // need to switch from enum to bitflags then?
- // DocComment
- Test,
+bitflags::bitflags! {
+ #[derive(Copy, Clone, Default, PartialEq, Eq, Hash, Debug)]
+ pub struct ReferenceCategory: u8 {
+ // FIXME: Add this variant and delete the `retain_adt_literal_usages` function.
+ // const CREATE = 1 << 0;
+ const WRITE = 1 << 0;
+ const READ = 1 << 1;
+ const IMPORT = 1 << 2;
+ const TEST = 1 << 3;
+ }
}
/// Generally, `search_scope` returns files that might contain references for the element.
@@ -660,7 +659,7 @@ impl<'a> FindUsages<'a> {
let reference = FileReference {
range,
name: FileReferenceNode::NameRef(name_ref.clone()),
- category: None,
+ category: ReferenceCategory::empty(),
};
sink(file_id, reference)
}
@@ -676,10 +675,15 @@ impl<'a> FindUsages<'a> {
match NameRefClass::classify(self.sema, name_ref) {
Some(NameRefClass::Definition(def @ Definition::Module(_))) if def == self.def => {
let FileRange { file_id, range } = self.sema.original_range(name_ref.syntax());
+ let category = if is_name_ref_in_import(name_ref) {
+ ReferenceCategory::IMPORT
+ } else {
+ ReferenceCategory::empty()
+ };
let reference = FileReference {
range,
name: FileReferenceNode::NameRef(name_ref.clone()),
- category: is_name_ref_in_import(name_ref).then_some(ReferenceCategory::Import),
+ category,
};
sink(file_id, reference)
}
@@ -700,7 +704,7 @@ impl<'a> FindUsages<'a> {
let reference = FileReference {
range,
name: FileReferenceNode::FormatStringEntry(token, range),
- category: Some(ReferenceCategory::Read),
+ category: ReferenceCategory::READ,
};
sink(file_id, reference)
}
@@ -719,7 +723,7 @@ impl<'a> FindUsages<'a> {
let reference = FileReference {
range,
name: FileReferenceNode::Lifetime(lifetime.clone()),
- category: None,
+ category: ReferenceCategory::empty(),
};
sink(file_id, reference)
}
@@ -817,7 +821,7 @@ impl<'a> FindUsages<'a> {
range,
name: FileReferenceNode::Name(name.clone()),
// FIXME: mutable patterns should have `Write` access
- category: Some(ReferenceCategory::Read),
+ category: ReferenceCategory::READ,
};
sink(file_id, reference)
}
@@ -826,7 +830,7 @@ impl<'a> FindUsages<'a> {
let reference = FileReference {
range,
name: FileReferenceNode::Name(name.clone()),
- category: None,
+ category: ReferenceCategory::empty(),
};
sink(file_id, reference)
}
@@ -851,7 +855,7 @@ impl<'a> FindUsages<'a> {
let reference = FileReference {
range,
name: FileReferenceNode::Name(name.clone()),
- category: None,
+ category: ReferenceCategory::empty(),
};
sink(file_id, reference)
}
@@ -875,38 +879,41 @@ impl ReferenceCategory {
sema: &Semantics<'_, RootDatabase>,
def: &Definition,
r: &ast::NameRef,
- ) -> Option<ReferenceCategory> {
+ ) -> ReferenceCategory {
+ let mut result = ReferenceCategory::empty();
if is_name_ref_in_test(sema, r) {
- return Some(ReferenceCategory::Test);
+ result |= ReferenceCategory::TEST;
}
// Only Locals and Fields have accesses for now.
if !matches!(def, Definition::Local(_) | Definition::Field(_)) {
- return is_name_ref_in_import(r).then_some(ReferenceCategory::Import);
+ if is_name_ref_in_import(r) {
+ result |= ReferenceCategory::IMPORT;
+ }
+ return result;
}
let mode = r.syntax().ancestors().find_map(|node| {
- match_ast! {
- match node {
- ast::BinExpr(expr) => {
- if matches!(expr.op_kind()?, ast::BinaryOp::Assignment { .. }) {
- // If the variable or field ends on the LHS's end then it's a Write (covers fields and locals).
- // FIXME: This is not terribly accurate.
- if let Some(lhs) = expr.lhs() {
- if lhs.syntax().text_range().end() == r.syntax().text_range().end() {
- return Some(ReferenceCategory::Write);
+ match_ast! {
+ match node {
+ ast::BinExpr(expr) => {
+ if matches!(expr.op_kind()?, ast::BinaryOp::Assignment { .. }) {
+ // If the variable or field ends on the LHS's end then it's a Write
+ // (covers fields and locals). FIXME: This is not terribly accurate.
+ if let Some(lhs) = expr.lhs() {
+ if lhs.syntax().text_range().end() == r.syntax().text_range().end() {
+ return Some(ReferenceCategory::WRITE)
+ }
}
}
- }
- Some(ReferenceCategory::Read)
- },
- _ => None
+ Some(ReferenceCategory::READ)
+ },
+ _ => None,
+ }
}
- }
- });
+ }).unwrap_or(ReferenceCategory::READ);
- // Default Locals and Fields to read
- mode.or(Some(ReferenceCategory::Read))
+ result | mode
}
}
diff --git a/crates/ide-diagnostics/src/handlers/missing_match_arms.rs b/crates/ide-diagnostics/src/handlers/missing_match_arms.rs
index 045154614f..6d0119fb57 100644
--- a/crates/ide-diagnostics/src/handlers/missing_match_arms.rs
+++ b/crates/ide-diagnostics/src/handlers/missing_match_arms.rs
@@ -317,7 +317,8 @@ fn main() {
#[test]
fn mismatched_types_issue_15883() {
// Check we don't panic.
- check_diagnostics_no_bails(
+ cov_mark::check!(validate_match_bailed_out);
+ check_diagnostics(
r#"
//- minicore: option
fn main() {
diff --git a/crates/ide-diagnostics/src/handlers/unresolved_field.rs b/crates/ide-diagnostics/src/handlers/unresolved_field.rs
index 7a03f176ac..41357b5962 100644
--- a/crates/ide-diagnostics/src/handlers/unresolved_field.rs
+++ b/crates/ide-diagnostics/src/handlers/unresolved_field.rs
@@ -81,14 +81,15 @@ fn field_fix(ctx: &DiagnosticsContext<'_>, d: &hir::UnresolvedField) -> Option<A
let adt = d.receiver.strip_references().as_adt()?;
let target_module = adt.module(ctx.sema.db);
- let suggested_type =
- if let Some(new_field_type) = ctx.sema.type_of_expr(&expr).map(|v| v.adjusted()) {
- let display =
- new_field_type.display_source_code(ctx.sema.db, target_module.into(), false).ok();
- make::ty(display.as_deref().unwrap_or("()"))
- } else {
- make::ty("()")
- };
+ let suggested_type = if let Some(new_field_type) =
+ ctx.sema.type_of_expr(&expr).map(|v| v.adjusted()).filter(|it| !it.is_unknown())
+ {
+ let display =
+ new_field_type.display_source_code(ctx.sema.db, target_module.into(), false).ok();
+ make::ty(display.as_deref().unwrap_or("()"))
+ } else {
+ make::ty("()")
+ };
if !is_editable_crate(target_module.krate(), ctx.sema.db) {
return None;
diff --git a/crates/ide-diagnostics/src/handlers/unused_variables.rs b/crates/ide-diagnostics/src/handlers/unused_variables.rs
index cd251faab9..fdd4e862ca 100644
--- a/crates/ide-diagnostics/src/handlers/unused_variables.rs
+++ b/crates/ide-diagnostics/src/handlers/unused_variables.rs
@@ -1,9 +1,12 @@
+use hir::Name;
use ide_db::{
assists::{Assist, AssistId, AssistKind},
base_db::FileRange,
label::Label,
source_change::SourceChange,
+ RootDatabase,
};
+use syntax::TextRange;
use text_edit::TextEdit;
use crate::{Diagnostic, DiagnosticCode, DiagnosticsContext};
@@ -21,7 +24,17 @@ pub(crate) fn unused_variables(
return None;
}
let diagnostic_range = ctx.sema.diagnostics_display_range(ast);
- let var_name = d.local.primary_source(ctx.sema.db).syntax().to_string();
+ // The range for the Actual Name. We don't want to replace the entire declarition. Using the diagnostic range causes issues within in Array Destructuring.
+ let name_range = d
+ .local
+ .primary_source(ctx.sema.db)
+ .name()
+ .map(|v| v.syntax().original_file_range_rooted(ctx.sema.db))
+ .filter(|it| {
+ Some(it.file_id) == ast.file_id.file_id()
+ && diagnostic_range.range.contains_range(it.range)
+ });
+ let var_name = d.local.name(ctx.sema.db);
Some(
Diagnostic::new_with_syntax_node_ptr(
ctx,
@@ -29,23 +42,36 @@ pub(crate) fn unused_variables(
"unused variable",
ast,
)
- .with_fixes(fixes(&var_name, diagnostic_range, ast.file_id.is_macro()))
+ .with_fixes(name_range.and_then(|it| {
+ fixes(ctx.sema.db, var_name, it.range, diagnostic_range, ast.file_id.is_macro())
+ }))
.experimental(),
)
}
-fn fixes(var_name: &String, diagnostic_range: FileRange, is_in_marco: bool) -> Option<Vec<Assist>> {
+fn fixes(
+ db: &RootDatabase,
+ var_name: Name,
+ name_range: TextRange,
+ diagnostic_range: FileRange,
+ is_in_marco: bool,
+) -> Option<Vec<Assist>> {
if is_in_marco {
return None;
}
+
Some(vec![Assist {
id: AssistId("unscore_unused_variable_name", AssistKind::QuickFix),
- label: Label::new(format!("Rename unused {} to _{}", var_name, var_name)),
+ label: Label::new(format!(
+ "Rename unused {} to _{}",
+ var_name.display(db),
+ var_name.display(db)
+ )),
group: None,
target: diagnostic_range.range,
source_change: Some(SourceChange::from_text_edit(
diagnostic_range.file_id,
- TextEdit::replace(diagnostic_range.range, format!("_{}", var_name)),
+ TextEdit::replace(name_range, format!("_{}", var_name.display(db))),
)),
trigger_signature_help: false,
}])
@@ -213,4 +239,21 @@ fn main() {
"#,
);
}
+ #[test]
+ fn unused_variable_in_array_destructure() {
+ check_fix(
+ r#"
+fn main() {
+ let arr = [1, 2, 3, 4, 5];
+ let [_x, y$0 @ ..] = arr;
+}
+"#,
+ r#"
+fn main() {
+ let arr = [1, 2, 3, 4, 5];
+ let [_x, _y @ ..] = arr;
+}
+"#,
+ );
+ }
}
diff --git a/crates/ide-diagnostics/src/lib.rs b/crates/ide-diagnostics/src/lib.rs
index 270cf844c6..c3ced36a69 100644
--- a/crates/ide-diagnostics/src/lib.rs
+++ b/crates/ide-diagnostics/src/lib.rs
@@ -320,13 +320,11 @@ pub fn diagnostics(
let module = sema.file_to_module_def(file_id);
let ctx = DiagnosticsContext { config, sema, resolve };
- if module.is_none() {
- handlers::unlinked_file::unlinked_file(&ctx, &mut res, file_id);
- }
let mut diags = Vec::new();
- if let Some(m) = module {
- m.diagnostics(db, &mut diags, config.style_lints);
+ match module {
+ Some(m) => m.diagnostics(db, &mut diags, config.style_lints),
+ None => handlers::unlinked_file::unlinked_file(&ctx, &mut res, file_id),
}
for diag in diags {
@@ -409,6 +407,11 @@ pub fn diagnostics(
res.push(d)
}
+ res.retain(|d| {
+ !(ctx.config.disabled.contains(d.code.as_str())
+ || ctx.config.disable_experimental && d.experimental)
+ });
+
let mut diagnostics_of_range = res
.iter_mut()
.filter_map(|it| {
@@ -421,9 +424,14 @@ pub fn diagnostics(
})
.collect::<FxHashMap<_, _>>();
+ if diagnostics_of_range.is_empty() {
+ return res;
+ }
+
let mut rustc_stack: FxHashMap<String, Vec<Severity>> = FxHashMap::default();
let mut clippy_stack: FxHashMap<String, Vec<Severity>> = FxHashMap::default();
+ // FIXME: This becomes quite expensive for big files
handle_lint_attributes(
&ctx.sema,
parse.syntax(),
@@ -432,11 +440,7 @@ pub fn diagnostics(
&mut diagnostics_of_range,
);
- res.retain(|d| {
- d.severity != Severity::Allow
- && !ctx.config.disabled.contains(d.code.as_str())
- && !(ctx.config.disable_experimental && d.experimental)
- });
+ res.retain(|d| d.severity != Severity::Allow);
res
}
@@ -476,6 +480,7 @@ fn handle_lint_attributes(
clippy_stack: &mut FxHashMap<String, Vec<Severity>>,
diagnostics_of_range: &mut FxHashMap<InFile<SyntaxNode>, &mut Diagnostic>,
) {
+ let _g = tracing::span!(tracing::Level::INFO, "handle_lint_attributes").entered();
let file_id = sema.hir_file_for(root);
let preorder = root.preorder();
for ev in preorder {
@@ -486,24 +491,24 @@ fn handle_lint_attributes(
stack.push(severity);
});
}
- if let Some(x) =
+ if let Some(it) =
diagnostics_of_range.get_mut(&InFile { file_id, value: node.clone() })
{
const EMPTY_LINTS: &[&str] = &[];
- let (names, stack) = match x.code {
+ let (names, stack) = match it.code {
DiagnosticCode::RustcLint(name) => (
- RUSTC_LINT_GROUPS_DICT.get(name).map_or(EMPTY_LINTS, |x| &**x),
+ RUSTC_LINT_GROUPS_DICT.get(name).map_or(EMPTY_LINTS, |it| &**it),
&mut *rustc_stack,
),
DiagnosticCode::Clippy(name) => (
- CLIPPY_LINT_GROUPS_DICT.get(name).map_or(EMPTY_LINTS, |x| &**x),
+ CLIPPY_LINT_GROUPS_DICT.get(name).map_or(EMPTY_LINTS, |it| &**it),
&mut *clippy_stack,
),
_ => continue,
};
for &name in names {
- if let Some(s) = stack.get(name).and_then(|x| x.last()) {
- x.severity = *s;
+ if let Some(s) = stack.get(name).and_then(|it| it.last()) {
+ it.severity = *s;
}
}
}
@@ -571,8 +576,8 @@ fn parse_lint_attribute(
if let Some(lint) = lint.as_single_name_ref() {
job(rustc_stack.entry(lint.to_string()).or_default(), severity);
}
- if let Some(tool) = lint.qualifier().and_then(|x| x.as_single_name_ref()) {
- if let Some(name_ref) = &lint.segment().and_then(|x| x.name_ref()) {
+ if let Some(tool) = lint.qualifier().and_then(|it| it.as_single_name_ref()) {
+ if let Some(name_ref) = &lint.segment().and_then(|it| it.name_ref()) {
if tool.to_string() == "clippy" {
job(clippy_stack.entry(name_ref.to_string()).or_default(), severity);
}
diff --git a/crates/ide-ssr/src/fragments.rs b/crates/ide-ssr/src/fragments.rs
index 4d6809efbe..ca937a03f8 100644
--- a/crates/ide-ssr/src/fragments.rs
+++ b/crates/ide-ssr/src/fragments.rs
@@ -27,7 +27,7 @@ pub(crate) fn expr(s: &str) -> Result<SyntaxNode, ()> {
pub(crate) fn stmt(s: &str) -> Result<SyntaxNode, ()> {
let template = "const _: () = { {}; };";
let input = template.replace("{}", s);
- let parse = syntax::SourceFile::parse(&input);
+ let parse = syntax::SourceFile::parse(&input, syntax::Edition::CURRENT);
if !parse.errors().is_empty() {
return Err(());
}
@@ -48,7 +48,7 @@ pub(crate) fn stmt(s: &str) -> Result<SyntaxNode, ()> {
fn fragment<T: AstNode>(template: &str, s: &str) -> Result<SyntaxNode, ()> {
let s = s.trim();
let input = template.replace("{}", s);
- let parse = syntax::SourceFile::parse(&input);
+ let parse = syntax::SourceFile::parse(&input, syntax::Edition::CURRENT);
if !parse.errors().is_empty() {
return Err(());
}
diff --git a/crates/ide/src/annotations/fn_references.rs b/crates/ide/src/annotations/fn_references.rs
index a090b60413..2e7e230e5a 100644
--- a/crates/ide/src/annotations/fn_references.rs
+++ b/crates/ide/src/annotations/fn_references.rs
@@ -2,7 +2,7 @@
//! We have to skip tests, so cannot reuse file_structure module.
use hir::Semantics;
-use ide_assists::utils::test_related_attribute;
+use ide_assists::utils::test_related_attribute_syn;
use ide_db::RootDatabase;
use syntax::{ast, ast::HasName, AstNode, SyntaxNode, TextRange};
@@ -19,7 +19,7 @@ pub(super) fn find_all_methods(
fn method_range(item: SyntaxNode) -> Option<(TextRange, Option<TextRange>)> {
ast::Fn::cast(item).and_then(|fn_def| {
- if test_related_attribute(&fn_def).is_some() {
+ if test_related_attribute_syn(&fn_def).is_some() {
None
} else {
Some((
diff --git a/crates/ide/src/expand_macro.rs b/crates/ide/src/expand_macro.rs
index 4b0961cbbe..e2d629a02f 100644
--- a/crates/ide/src/expand_macro.rs
+++ b/crates/ide/src/expand_macro.rs
@@ -177,7 +177,9 @@ fn _format(
use ide_db::base_db::{FileLoader, SourceDatabase};
// hack until we get hygiene working (same character amount to preserve formatting as much as possible)
const DOLLAR_CRATE_REPLACE: &str = "__r_a_";
- let expansion = expansion.replace("$crate", DOLLAR_CRATE_REPLACE);
+ const BUILTIN_REPLACE: &str = "builtin__POUND";
+ let expansion =
+ expansion.replace("$crate", DOLLAR_CRATE_REPLACE).replace("builtin #", BUILTIN_REPLACE);
let (prefix, suffix) = match kind {
SyntaxKind::MACRO_PAT => ("fn __(", ": u32);"),
SyntaxKind::MACRO_EXPR | SyntaxKind::MACRO_STMTS => ("fn __() {", "}"),
@@ -206,7 +208,9 @@ fn _format(
let captured_stdout = String::from_utf8(output.stdout).ok()?;
if output.status.success() && !captured_stdout.trim().is_empty() {
- let output = captured_stdout.replace(DOLLAR_CRATE_REPLACE, "$crate");
+ let output = captured_stdout
+ .replace(DOLLAR_CRATE_REPLACE, "$crate")
+ .replace(BUILTIN_REPLACE, "builtin #");
let output = output.trim().strip_prefix(prefix)?;
let output = match kind {
SyntaxKind::MACRO_PAT => {
diff --git a/crates/ide/src/file_structure.rs b/crates/ide/src/file_structure.rs
index 813691540f..568906a098 100644
--- a/crates/ide/src/file_structure.rs
+++ b/crates/ide/src/file_structure.rs
@@ -220,7 +220,7 @@ mod tests {
use super::*;
fn check(ra_fixture: &str, expect: Expect) {
- let file = SourceFile::parse(ra_fixture).ok().unwrap();
+ let file = SourceFile::parse(ra_fixture, span::Edition::CURRENT).ok().unwrap();
let structure = file_structure(&file);
expect.assert_debug_eq(&structure)
}
diff --git a/crates/ide/src/folding_ranges.rs b/crates/ide/src/folding_ranges.rs
index 2bc0721123..c1b7693a65 100755
--- a/crates/ide/src/folding_ranges.rs
+++ b/crates/ide/src/folding_ranges.rs
@@ -289,7 +289,7 @@ mod tests {
fn check(ra_fixture: &str) {
let (ranges, text) = extract_tags(ra_fixture, "fold");
- let parse = SourceFile::parse(&text);
+ let parse = SourceFile::parse(&text, span::Edition::CURRENT);
let mut folds = folding_ranges(&parse.tree());
folds.sort_by_key(|fold| (fold.range.start(), fold.range.end()));
diff --git a/crates/ide/src/highlight_related.rs b/crates/ide/src/highlight_related.rs
index e20e0b67f4..6f32ce76b2 100644
--- a/crates/ide/src/highlight_related.rs
+++ b/crates/ide/src/highlight_related.rs
@@ -26,7 +26,7 @@ pub struct HighlightedRange {
// FIXME: This needs to be more precise. Reference category makes sense only
// for references, but we also have defs. And things like exit points are
// neither.
- pub category: Option<ReferenceCategory>,
+ pub category: ReferenceCategory,
}
#[derive(Default, Clone)]
@@ -113,7 +113,11 @@ fn highlight_closure_captures(
range,
category,
});
- let category = local.is_mut(sema.db).then_some(ReferenceCategory::Write);
+ let category = if local.is_mut(sema.db) {
+ ReferenceCategory::WRITE
+ } else {
+ ReferenceCategory::empty()
+ };
local
.sources(sema.db)
.into_iter()
@@ -137,7 +141,9 @@ fn highlight_references(
{
match resolution.map(Definition::from) {
Some(def) => iter::once(def).collect(),
- None => return Some(vec![HighlightedRange { range, category: None }]),
+ None => {
+ return Some(vec![HighlightedRange { range, category: ReferenceCategory::empty() }])
+ }
}
} else {
find_defs(sema, token.clone())
@@ -211,7 +217,11 @@ fn highlight_references(
// highlight the defs themselves
match def {
Definition::Local(local) => {
- let category = local.is_mut(sema.db).then_some(ReferenceCategory::Write);
+ let category = if local.is_mut(sema.db) {
+ ReferenceCategory::WRITE
+ } else {
+ ReferenceCategory::empty()
+ };
local
.sources(sema.db)
.into_iter()
@@ -238,8 +248,11 @@ fn highlight_references(
continue;
}
let hl_range = nav.focus_range.map(|range| {
- let category = matches!(def, Definition::Local(l) if l.is_mut(sema.db))
- .then_some(ReferenceCategory::Write);
+ let category = if matches!(def, Definition::Local(l) if l.is_mut(sema.db)) {
+ ReferenceCategory::WRITE
+ } else {
+ ReferenceCategory::empty()
+ };
HighlightedRange { range, category }
});
if let Some(hl_range) = hl_range {
@@ -272,24 +285,30 @@ fn highlight_exit_points(
def_ranges
.into_iter()
.flatten()
- .map(|range| HighlightedRange { category: None, range }),
+ .map(|range| HighlightedRange { category: ReferenceCategory::empty(), range }),
);
let body = body?;
walk_expr(&body, &mut |expr| match expr {
ast::Expr::ReturnExpr(expr) => {
if let Some(token) = expr.return_token() {
- highlights.push(HighlightedRange { category: None, range: token.text_range() });
+ highlights.push(HighlightedRange {
+ category: ReferenceCategory::empty(),
+ range: token.text_range(),
+ });
}
}
ast::Expr::TryExpr(try_) => {
if let Some(token) = try_.question_mark_token() {
- highlights.push(HighlightedRange { category: None, range: token.text_range() });
+ highlights.push(HighlightedRange {
+ category: ReferenceCategory::empty(),
+ range: token.text_range(),
+ });
}
}
ast::Expr::MethodCallExpr(_) | ast::Expr::CallExpr(_) | ast::Expr::MacroExpr(_) => {
if sema.type_of_expr(&expr).map_or(false, |ty| ty.original.is_never()) {
highlights.push(HighlightedRange {
- category: None,
+ category: ReferenceCategory::empty(),
range: expr.syntax().text_range(),
});
}
@@ -309,7 +328,7 @@ fn highlight_exit_points(
.map_or_else(|| tail.syntax().text_range(), |tok| tok.text_range()),
_ => tail.syntax().text_range(),
};
- highlights.push(HighlightedRange { category: None, range })
+ highlights.push(HighlightedRange { category: ReferenceCategory::empty(), range })
});
}
Some(highlights)
@@ -354,7 +373,9 @@ fn highlight_break_points(token: SyntaxToken) -> Option<Vec<HighlightedRange>> {
token.map(|tok| tok.text_range()),
label.as_ref().map(|it| it.syntax().text_range()),
);
- highlights.extend(range.map(|range| HighlightedRange { category: None, range }));
+ highlights.extend(
+ range.map(|range| HighlightedRange { category: ReferenceCategory::empty(), range }),
+ );
for_each_break_and_continue_expr(label, body, &mut |expr| {
let range: Option<TextRange> = match (cursor_token_kind, expr) {
(T![for] | T![while] | T![loop] | T![break], ast::Expr::BreakExpr(break_)) => {
@@ -372,7 +393,9 @@ fn highlight_break_points(token: SyntaxToken) -> Option<Vec<HighlightedRange>> {
),
_ => None,
};
- highlights.extend(range.map(|range| HighlightedRange { category: None, range }));
+ highlights.extend(
+ range.map(|range| HighlightedRange { category: ReferenceCategory::empty(), range }),
+ );
});
Some(highlights)
}
@@ -430,14 +453,18 @@ fn highlight_yield_points(token: SyntaxToken) -> Option<Vec<HighlightedRange>> {
async_token: Option<SyntaxToken>,
body: Option<ast::Expr>,
) -> Option<Vec<HighlightedRange>> {
- let mut highlights =
- vec![HighlightedRange { category: None, range: async_token?.text_range() }];
+ let mut highlights = vec![HighlightedRange {
+ category: ReferenceCategory::empty(),
+ range: async_token?.text_range(),
+ }];
if let Some(body) = body {
walk_expr(&body, &mut |expr| {
if let ast::Expr::AwaitExpr(expr) = expr {
if let Some(token) = expr.await_token() {
- highlights
- .push(HighlightedRange { category: None, range: token.text_range() });
+ highlights.push(HighlightedRange {
+ category: ReferenceCategory::empty(),
+ range: token.text_range(),
+ });
}
}
});
@@ -481,6 +508,8 @@ fn find_defs(sema: &Semantics<'_, RootDatabase>, token: SyntaxToken) -> FxHashSe
#[cfg(test)]
mod tests {
+ use itertools::Itertools;
+
use crate::fixture;
use super::*;
@@ -504,28 +533,18 @@ mod tests {
let hls = analysis.highlight_related(config, pos).unwrap().unwrap_or_default();
- let mut expected = annotations
- .into_iter()
- .map(|(r, access)| (r.range, (!access.is_empty()).then_some(access)))
- .collect::<Vec<_>>();
+ let mut expected =
+ annotations.into_iter().map(|(r, access)| (r.range, access)).collect::<Vec<_>>();
- let mut actual = hls
+ let mut actual: Vec<(TextRange, String)> = hls
.into_iter()
.map(|hl| {
(
hl.range,
- hl.category.map(|it| {
- match it {
- ReferenceCategory::Read => "read",
- ReferenceCategory::Write => "write",
- ReferenceCategory::Import => "import",
- ReferenceCategory::Test => "test",
- }
- .to_owned()
- }),
+ hl.category.iter_names().map(|(name, _flag)| name.to_lowercase()).join(","),
)
})
- .collect::<Vec<_>>();
+ .collect();
actual.sort_by_key(|(range, _)| range.start());
expected.sort_by_key(|(range, _)| range.start());
diff --git a/crates/ide/src/hover.rs b/crates/ide/src/hover.rs
index 822751c0e4..95de3c88c8 100644
--- a/crates/ide/src/hover.rs
+++ b/crates/ide/src/hover.rs
@@ -14,7 +14,7 @@ use ide_db::{
helpers::pick_best_token,
FxIndexSet, RootDatabase,
};
-use itertools::Itertools;
+use itertools::{multizip, Itertools};
use syntax::{ast, match_ast, AstNode, AstToken, SyntaxKind::*, SyntaxNode, T};
use crate::{
@@ -149,7 +149,7 @@ fn hover_simple(
if let Some(doc_comment) = token_as_doc_comment(&original_token) {
cov_mark::hit!(no_highlight_on_comment_hover);
return doc_comment.get_definition_with_descend_at(sema, offset, |def, node, range| {
- let res = hover_for_definition(sema, file_id, def, &node, config);
+ let res = hover_for_definition(sema, file_id, def, &node, None, config);
Some(RangeInfo::new(range, res))
});
}
@@ -162,6 +162,7 @@ fn hover_simple(
file_id,
Definition::from(resolution?),
&original_token.parent()?,
+ None,
config,
);
return Some(RangeInfo::new(range, res));
@@ -196,6 +197,29 @@ fn hover_simple(
descended()
.filter_map(|token| {
let node = token.parent()?;
+
+ // special case macro calls, we wanna render the invoked arm index
+ if let Some(name) = ast::NameRef::cast(node.clone()) {
+ if let Some(path_seg) =
+ name.syntax().parent().and_then(ast::PathSegment::cast)
+ {
+ if let Some(macro_call) = path_seg
+ .parent_path()
+ .syntax()
+ .parent()
+ .and_then(ast::MacroCall::cast)
+ {
+ if let Some(macro_) = sema.resolve_macro_call(&macro_call) {
+ return Some(vec![(
+ Definition::Macro(macro_),
+ sema.resolve_macro_call_arm(&macro_call),
+ node,
+ )]);
+ }
+ }
+ }
+ }
+
match IdentClass::classify_node(sema, &node)? {
// It's better for us to fall back to the keyword hover here,
// rendering poll is very confusing
@@ -204,20 +228,19 @@ fn hover_simple(
IdentClass::NameRefClass(NameRefClass::ExternCrateShorthand {
decl,
..
- }) => Some(vec![(Definition::ExternCrateDecl(decl), node)]),
+ }) => Some(vec![(Definition::ExternCrateDecl(decl), None, node)]),
class => Some(
- class
- .definitions()
- .into_iter()
- .zip(iter::repeat(node))
+ multizip((class.definitions(), iter::repeat(None), iter::repeat(node)))
.collect::<Vec<_>>(),
),
}
})
.flatten()
- .unique_by(|&(def, _)| def)
- .map(|(def, node)| hover_for_definition(sema, file_id, def, &node, config))
+ .unique_by(|&(def, _, _)| def)
+ .map(|(def, macro_arm, node)| {
+ hover_for_definition(sema, file_id, def, &node, macro_arm, config)
+ })
.reduce(|mut acc: HoverResult, HoverResult { markup, actions }| {
acc.actions.extend(actions);
acc.markup = Markup::from(format!("{}\n---\n{markup}", acc.markup));
@@ -374,6 +397,7 @@ pub(crate) fn hover_for_definition(
file_id: FileId,
def: Definition,
scope_node: &SyntaxNode,
+ macro_arm: Option<u32>,
config: &HoverConfig,
) -> HoverResult {
let famous_defs = match &def {
@@ -398,7 +422,8 @@ pub(crate) fn hover_for_definition(
};
let notable_traits = def_ty.map(|ty| notable_traits(db, &ty)).unwrap_or_default();
- let markup = render::definition(sema.db, def, famous_defs.as_ref(), &notable_traits, config);
+ let markup =
+ render::definition(sema.db, def, famous_defs.as_ref(), &notable_traits, macro_arm, config);
HoverResult {
markup: render::process_markup(sema.db, def, &markup, config),
actions: [
diff --git a/crates/ide/src/hover/render.rs b/crates/ide/src/hover/render.rs
index abedbff831..3f0fc85134 100644
--- a/crates/ide/src/hover/render.rs
+++ b/crates/ide/src/hover/render.rs
@@ -101,7 +101,7 @@ pub(super) fn try_expr(
if let Some((inner, body)) = error_type_args {
inner_ty = inner;
body_ty = body;
- s = "Try Error".to_owned();
+ "Try Error".clone_into(&mut s);
}
}
}
@@ -403,6 +403,7 @@ pub(super) fn definition(
def: Definition,
famous_defs: Option<&FamousDefs<'_, '_>>,
notable_traits: &[(Trait, Vec<(Option<Type>, Name)>)],
+ macro_arm: Option<u32>,
config: &HoverConfig,
) -> Markup {
let mod_path = definition_mod_path(db, &def);
@@ -413,6 +414,13 @@ pub(super) fn definition(
Definition::Adt(Adt::Struct(struct_)) => {
struct_.display_limited(db, config.max_struct_field_count).to_string()
}
+ Definition::Macro(it) => {
+ let mut label = it.display(db).to_string();
+ if let Some(macro_arm) = macro_arm {
+ format_to!(label, " // matched arm #{}", macro_arm);
+ }
+ label
+ }
_ => def.label(db),
};
let docs = def.docs(db, famous_defs);
@@ -637,7 +645,7 @@ fn closure_ty(
})
.join("\n");
if captures_rendered.trim().is_empty() {
- captures_rendered = "This closure captures nothing".to_owned();
+ "This closure captures nothing".clone_into(&mut captures_rendered);
}
let mut targets: Vec<hir::ModuleDef> = Vec::new();
let mut push_new_def = |item: hir::ModuleDef| {
diff --git a/crates/ide/src/hover/tests.rs b/crates/ide/src/hover/tests.rs
index 08925fcdff..6bbc8b380d 100644
--- a/crates/ide/src/hover/tests.rs
+++ b/crates/ide/src/hover/tests.rs
@@ -1560,21 +1560,21 @@ fn y() {
fn test_hover_macro_invocation() {
check(
r#"
-macro_rules! foo { () => {} }
+macro_rules! foo { (a) => {}; () => {} }
fn f() { fo$0o!(); }
"#,
expect![[r#"
- *foo*
+ *foo*
- ```rust
- test
- ```
+ ```rust
+ test
+ ```
- ```rust
- macro_rules! foo
- ```
- "#]],
+ ```rust
+ macro_rules! foo // matched arm #1
+ ```
+ "#]],
)
}
@@ -1590,22 +1590,22 @@ macro foo() {}
fn f() { fo$0o!(); }
"#,
expect![[r#"
- *foo*
+ *foo*
- ```rust
- test
- ```
+ ```rust
+ test
+ ```
- ```rust
- macro foo
- ```
+ ```rust
+ macro foo // matched arm #0
+ ```
- ---
+ ---
- foo bar
+ foo bar
- foo bar baz
- "#]],
+ foo bar baz
+ "#]],
)
}
@@ -2323,6 +2323,49 @@ fn test_hover_layout_of_variant() {
}
#[test]
+fn test_hover_layout_of_variant_generic() {
+ check(
+ r#"enum Option<T> {
+ Some(T),
+ None$0
+}"#,
+ expect![[r#"
+ *None*
+
+ ```rust
+ test::Option
+ ```
+
+ ```rust
+ None
+ ```
+ "#]],
+ );
+}
+
+#[test]
+fn test_hover_layout_generic_unused() {
+ check(
+ r#"
+//- minicore: phantom_data
+struct S$0<T>(core::marker::PhantomData<T>);
+"#,
+ expect![[r#"
+ *S*
+
+ ```rust
+ test
+ ```
+
+ ```rust
+ // size = 0, align = 1
+ struct S<T>(PhantomData<T>)
+ ```
+ "#]],
+ );
+}
+
+#[test]
fn test_hover_layout_of_enum() {
check(
r#"enum $0Foo {
@@ -3258,12 +3301,12 @@ fn foo(ar$0g: &impl Foo<S>) {}
fn test_hover_dyn_return_has_goto_type_action() {
check_actions(
r#"
-trait Foo {}
+trait Foo<T> {}
struct S;
-impl Foo for S {}
+impl Foo<S> for S {}
struct B<T>{}
-fn foo() -> B<dyn Foo> {}
+fn foo() -> B<dyn Foo<S>> {}
fn main() { let s$0t = foo(); }
"#,
@@ -3277,8 +3320,8 @@ fn main() { let s$0t = foo(); }
file_id: FileId(
0,
),
- full_range: 42..55,
- focus_range: 49..50,
+ full_range: 48..61,
+ focus_range: 55..56,
name: "B",
kind: Struct,
description: "struct B<T>",
@@ -3290,11 +3333,24 @@ fn main() { let s$0t = foo(); }
file_id: FileId(
0,
),
- full_range: 0..12,
+ full_range: 0..15,
focus_range: 6..9,
name: "Foo",
kind: Trait,
- description: "trait Foo",
+ description: "trait Foo<T>",
+ },
+ },
+ HoverGotoTypeData {
+ mod_path: "test::S",
+ nav: NavigationTarget {
+ file_id: FileId(
+ 0,
+ ),
+ full_range: 16..25,
+ focus_range: 23..24,
+ name: "S",
+ kind: Struct,
+ description: "struct S",
},
},
],
@@ -3673,6 +3729,7 @@ struct S$0T<const C: usize = 1, T = Foo>(T);
```
```rust
+ // size = 0, align = 1
struct ST<const C: usize = 1, T = Foo>(T)
```
"#]],
@@ -3694,6 +3751,7 @@ struct S$0T<const C: usize = {40 + 2}, T = Foo>(T);
```
```rust
+ // size = 0, align = 1
struct ST<const C: usize = {const}, T = Foo>(T)
```
"#]],
@@ -3716,6 +3774,7 @@ struct S$0T<const C: usize = VAL, T = Foo>(T);
```
```rust
+ // size = 0, align = 1
struct ST<const C: usize = VAL, T = Foo>(T)
```
"#]],
@@ -4040,7 +4099,6 @@ impl<T> Foo<T$0> {}
```
"#]],
);
- // lifetimes bounds arent being tracked yet
check(
r#"
//- minicore: sized
@@ -4051,7 +4109,7 @@ impl<T: 'static> Foo<T$0> {}
*T*
```rust
- T
+ T: 'static
```
"#]],
);
@@ -4215,6 +4273,10 @@ fn foo<T$0: ?Sized + Sized + Sized>() {}
```
"#]],
);
+ }
+
+ #[test]
+ fn mixed2() {
check(
r#"
//- minicore: sized
@@ -7873,8 +7935,44 @@ struct Pedro$0<'a> {
```
```rust
+ // size = 16 (0x10), align = 8, niches = 1
struct Pedro<'a>
```
"#]],
)
}
+
+#[test]
+fn hover_impl_trait_arg_self() {
+ check(
+ r#"
+trait T<Rhs = Self> {}
+fn main(a$0: impl T) {}
+"#,
+ expect![[r#"
+ *a*
+
+ ```rust
+ a: impl T + ?Sized
+ ```
+ "#]],
+ );
+}
+
+#[test]
+fn hover_struct_default_arg_self() {
+ check(
+ r#"
+struct T<Rhs = Self> {}
+fn main(a$0: T) {}
+"#,
+ expect![[r#"
+ *a*
+
+ ```rust
+ // size = 0, align = 1
+ a: T
+ ```
+ "#]],
+ );
+}
diff --git a/crates/ide/src/inlay_hints.rs b/crates/ide/src/inlay_hints.rs
index dda38ce77e..15eecd1b54 100644
--- a/crates/ide/src/inlay_hints.rs
+++ b/crates/ide/src/inlay_hints.rs
@@ -1,6 +1,5 @@
use std::{
fmt::{self, Write},
- hash::{BuildHasher, BuildHasherDefault},
mem::take,
};
@@ -9,7 +8,7 @@ use hir::{
known, ClosureStyle, HasVisibility, HirDisplay, HirDisplayError, HirWrite, ModuleDef,
ModuleDefId, Semantics,
};
-use ide_db::{base_db::FileRange, famous_defs::FamousDefs, FxHasher, RootDatabase};
+use ide_db::{base_db::FileRange, famous_defs::FamousDefs, RootDatabase};
use itertools::Itertools;
use smallvec::{smallvec, SmallVec};
use stdx::never;
@@ -495,6 +494,7 @@ pub(crate) fn inlay_hints_resolve(
position: TextSize,
hash: u64,
config: &InlayHintsConfig,
+ hasher: impl Fn(&InlayHint) -> u64,
) -> Option<InlayHint> {
let _p = tracing::span!(tracing::Level::INFO, "inlay_hints").entered();
let sema = Semantics::new(db);
@@ -506,20 +506,16 @@ pub(crate) fn inlay_hints_resolve(
let mut acc = Vec::new();
let hints = |node| hints(&mut acc, &famous_defs, config, file_id, node);
- match file.token_at_offset(position).left_biased() {
- Some(token) => {
- if let Some(parent_block) = token.parent_ancestors().find_map(ast::BlockExpr::cast) {
- parent_block.syntax().descendants().for_each(hints)
- } else if let Some(parent_item) = token.parent_ancestors().find_map(ast::Item::cast) {
- parent_item.syntax().descendants().for_each(hints)
- } else {
- return None;
- }
- }
- None => return None,
+ let token = file.token_at_offset(position).left_biased()?;
+ if let Some(parent_block) = token.parent_ancestors().find_map(ast::BlockExpr::cast) {
+ parent_block.syntax().descendants().for_each(hints)
+ } else if let Some(parent_item) = token.parent_ancestors().find_map(ast::Item::cast) {
+ parent_item.syntax().descendants().for_each(hints)
+ } else {
+ return None;
}
- acc.into_iter().find(|hint| BuildHasherDefault::<FxHasher>::default().hash_one(hint) == hash)
+ acc.into_iter().find(|hint| hasher(hint) == hash)
}
fn hints(
diff --git a/crates/ide/src/join_lines.rs b/crates/ide/src/join_lines.rs
index 815a4ba7fd..9d8ba90b2f 100644
--- a/crates/ide/src/join_lines.rs
+++ b/crates/ide/src/join_lines.rs
@@ -316,7 +316,7 @@ mod tests {
};
let (before_cursor_pos, before) = extract_offset(ra_fixture_before);
- let file = SourceFile::parse(&before).ok().unwrap();
+ let file = SourceFile::parse(&before, span::Edition::CURRENT).ok().unwrap();
let range = TextRange::empty(before_cursor_pos);
let result = join_lines(&config, &file, range);
@@ -342,7 +342,7 @@ mod tests {
};
let (sel, before) = extract_range(ra_fixture_before);
- let parse = SourceFile::parse(&before);
+ let parse = SourceFile::parse(&before, span::Edition::CURRENT);
let result = join_lines(&config, &parse.tree(), sel);
let actual = {
let mut actual = before;
diff --git a/crates/ide/src/lib.rs b/crates/ide/src/lib.rs
index ad48d80389..431aa30e56 100644
--- a/crates/ide/src/lib.rs
+++ b/crates/ide/src/lib.rs
@@ -58,13 +58,15 @@ mod view_item_tree;
mod view_memory_layout;
mod view_mir;
+use std::panic::UnwindSafe;
+
use cfg::CfgOptions;
use fetch_crates::CrateInfo;
use hir::ChangeWithProcMacros;
use ide_db::{
base_db::{
salsa::{self, ParallelDatabase},
- CrateOrigin, Env, FileLoader, FileSet, SourceDatabase, VfsPath,
+ CrateOrigin, Env, FileLoader, FileSet, SourceDatabase, SourceDatabaseExt, VfsPath,
},
prime_caches, symbol_index, FxHashMap, FxIndexSet, LineIndexDatabase,
};
@@ -252,7 +254,7 @@ impl Analysis {
Edition::CURRENT,
None,
None,
- cfg_options.clone(),
+ Arc::new(cfg_options),
None,
Env::default(),
false,
@@ -271,6 +273,10 @@ impl Analysis {
self.with_db(|db| status::status(db, file_id))
}
+ pub fn source_root(&self, file_id: FileId) -> Cancellable<SourceRootId> {
+ self.with_db(|db| db.file_source_root(file_id))
+ }
+
pub fn parallel_prime_caches<F>(&self, num_worker_threads: u8, cb: F) -> Cancellable<()>
where
F: Fn(ParallelPrimeCachesProgress) + Sync + std::panic::UnwindSafe,
@@ -280,7 +286,7 @@ impl Analysis {
/// Gets the text of the source file.
pub fn file_text(&self, file_id: FileId) -> Cancellable<Arc<str>> {
- self.with_db(|db| db.file_text(file_id))
+ self.with_db(|db| SourceDatabaseExt::file_text(db, file_id))
}
/// Gets the syntax tree of the file.
@@ -290,7 +296,6 @@ impl Analysis {
/// Returns true if this file belongs to an immutable library.
pub fn is_library_file(&self, file_id: FileId) -> Cancellable<bool> {
- use ide_db::base_db::SourceDatabaseExt;
self.with_db(|db| db.source_root(db.file_source_root(file_id)).is_library)
}
@@ -428,8 +433,11 @@ impl Analysis {
file_id: FileId,
position: TextSize,
hash: u64,
+ hasher: impl Fn(&InlayHint) -> u64 + Send + UnwindSafe,
) -> Cancellable<Option<InlayHint>> {
- self.with_db(|db| inlay_hints::inlay_hints_resolve(db, file_id, position, hash, config))
+ self.with_db(|db| {
+ inlay_hints::inlay_hints_resolve(db, file_id, position, hash, config, hasher)
+ })
}
/// Returns the set of folding ranges.
diff --git a/crates/ide/src/matching_brace.rs b/crates/ide/src/matching_brace.rs
index 6e8a6d020c..5735615283 100644
--- a/crates/ide/src/matching_brace.rs
+++ b/crates/ide/src/matching_brace.rs
@@ -50,7 +50,7 @@ mod tests {
fn test_matching_brace() {
fn do_check(before: &str, after: &str) {
let (pos, before) = extract_offset(before);
- let parse = SourceFile::parse(&before);
+ let parse = SourceFile::parse(&before, span::Edition::CURRENT);
let new_pos = match matching_brace(&parse.tree(), pos) {
None => pos,
Some(pos) => pos,
diff --git a/crates/ide/src/references.rs b/crates/ide/src/references.rs
index fef2aba3c6..01af864cdf 100644
--- a/crates/ide/src/references.rs
+++ b/crates/ide/src/references.rs
@@ -30,7 +30,7 @@ use crate::{FilePosition, NavigationTarget, TryToNav};
#[derive(Debug, Clone)]
pub struct ReferenceSearchResult {
pub declaration: Option<Declaration>,
- pub references: IntMap<FileId, Vec<(TextRange, Option<ReferenceCategory>)>>,
+ pub references: IntMap<FileId, Vec<(TextRange, ReferenceCategory)>>,
}
#[derive(Debug, Clone)]
@@ -66,7 +66,7 @@ pub(crate) fn find_all_refs(
retain_adt_literal_usages(&mut usages, def, sema);
}
- let mut references = usages
+ let mut references: IntMap<FileId, Vec<(TextRange, ReferenceCategory)>> = usages
.into_iter()
.map(|(file_id, refs)| {
(
@@ -77,7 +77,7 @@ pub(crate) fn find_all_refs(
.collect(),
)
})
- .collect::<IntMap<_, Vec<_>>>();
+ .collect();
let declaration = match def {
Definition::Module(module) => {
Some(NavigationTarget::from_module_to_decl(sema.db, module))
@@ -93,7 +93,7 @@ pub(crate) fn find_all_refs(
references
.entry(extra_ref.file_id)
.or_default()
- .push((extra_ref.focus_or_full_range(), None));
+ .push((extra_ref.focus_or_full_range(), ReferenceCategory::empty()));
}
Declaration {
is_mut: matches!(def, Definition::Local(l) if l.is_mut(sema.db)),
@@ -300,7 +300,7 @@ fn is_lit_name_ref(name_ref: &ast::NameRef) -> bool {
#[cfg(test)]
mod tests {
use expect_test::{expect, Expect};
- use ide_db::{base_db::FileId, search::ReferenceCategory};
+ use ide_db::base_db::FileId;
use stdx::format_to;
use crate::{fixture, SearchScope};
@@ -324,7 +324,7 @@ fn test() {
test_func Function FileId(0) 0..17 3..12
FileId(0) 35..44
- FileId(0) 75..84 Test
+ FileId(0) 75..84 test
"#]],
);
@@ -345,7 +345,28 @@ fn test() {
test_func Function FileId(0) 0..17 3..12
FileId(0) 35..44
- FileId(0) 96..105 Test
+ FileId(0) 96..105 test
+ "#]],
+ );
+ }
+
+ #[test]
+ fn test_access() {
+ check(
+ r#"
+struct S { f$0: u32 }
+
+#[test]
+fn test() {
+ let mut x = S { f: 92 };
+ x.f = 92;
+}
+"#,
+ expect![[r#"
+ f Field FileId(0) 11..17 11..12
+
+ FileId(0) 61..62 read test
+ FileId(0) 76..77 write test
"#]],
);
}
@@ -600,12 +621,12 @@ fn main() {
i = 5;
}"#,
expect![[r#"
- i Local FileId(0) 20..25 24..25 Write
+ i Local FileId(0) 20..25 24..25 write
- FileId(0) 50..51 Write
- FileId(0) 54..55 Read
- FileId(0) 76..77 Write
- FileId(0) 94..95 Write
+ FileId(0) 50..51 write
+ FileId(0) 54..55 read
+ FileId(0) 76..77 write
+ FileId(0) 94..95 write
"#]],
);
}
@@ -626,8 +647,8 @@ fn bar() {
expect![[r#"
spam Local FileId(0) 19..23 19..23
- FileId(0) 34..38 Read
- FileId(0) 41..45 Read
+ FileId(0) 34..38 read
+ FileId(0) 41..45 read
"#]],
);
}
@@ -641,7 +662,7 @@ fn foo(i : u32) -> u32 { i$0 }
expect![[r#"
i ValueParam FileId(0) 7..8 7..8
- FileId(0) 25..26 Read
+ FileId(0) 25..26 read
"#]],
);
}
@@ -655,7 +676,7 @@ fn foo(i$0 : u32) -> u32 { i }
expect![[r#"
i ValueParam FileId(0) 7..8 7..8
- FileId(0) 25..26 Read
+ FileId(0) 25..26 read
"#]],
);
}
@@ -676,7 +697,7 @@ fn main(s: Foo) {
expect![[r#"
spam Field FileId(0) 17..30 21..25
- FileId(0) 67..71 Read
+ FileId(0) 67..71 read
"#]],
);
}
@@ -824,7 +845,7 @@ pub struct Foo {
expect![[r#"
foo Module FileId(0) 0..8 4..7
- FileId(0) 14..17 Import
+ FileId(0) 14..17 import
"#]],
);
}
@@ -842,7 +863,7 @@ use self$0;
expect![[r#"
foo Module FileId(0) 0..8 4..7
- FileId(1) 4..8 Import
+ FileId(1) 4..8 import
"#]],
);
}
@@ -857,7 +878,7 @@ use self$0;
expect![[r#"
Module FileId(0) 0..10
- FileId(0) 4..8 Import
+ FileId(0) 4..8 import
"#]],
);
}
@@ -885,7 +906,7 @@ pub(super) struct Foo$0 {
expect![[r#"
Foo Struct FileId(2) 0..41 18..21 some
- FileId(1) 20..23 Import
+ FileId(1) 20..23 import
FileId(1) 47..50
"#]],
);
@@ -960,10 +981,10 @@ fn foo() {
}
"#,
expect![[r#"
- i Local FileId(0) 19..24 23..24 Write
+ i Local FileId(0) 19..24 23..24 write
- FileId(0) 34..35 Write
- FileId(0) 38..39 Read
+ FileId(0) 34..35 write
+ FileId(0) 38..39 read
"#]],
);
}
@@ -984,8 +1005,8 @@ fn foo() {
expect![[r#"
f Field FileId(0) 15..21 15..16
- FileId(0) 55..56 Read
- FileId(0) 68..69 Write
+ FileId(0) 55..56 read
+ FileId(0) 68..69 write
"#]],
);
}
@@ -1002,7 +1023,7 @@ fn foo() {
expect![[r#"
i Local FileId(0) 19..20 19..20
- FileId(0) 26..27 Write
+ FileId(0) 26..27 write
"#]],
);
}
@@ -1048,7 +1069,7 @@ fn g() { f(); }
expect![[r#"
f Function FileId(0) 22..31 25..26
- FileId(1) 11..12 Import
+ FileId(1) 11..12 import
FileId(1) 24..25
"#]],
);
@@ -1071,7 +1092,7 @@ fn f(s: S) {
expect![[r#"
field Field FileId(0) 15..24 15..20
- FileId(0) 68..73 Read
+ FileId(0) 68..73 read
"#]],
);
}
@@ -1095,7 +1116,7 @@ fn f(e: En) {
expect![[r#"
field Field FileId(0) 32..41 32..37
- FileId(0) 102..107 Read
+ FileId(0) 102..107 read
"#]],
);
}
@@ -1119,7 +1140,7 @@ fn f() -> m::En {
expect![[r#"
field Field FileId(0) 56..65 56..61
- FileId(0) 125..130 Read
+ FileId(0) 125..130 read
"#]],
);
}
@@ -1144,8 +1165,8 @@ impl Foo {
expect![[r#"
self SelfParam FileId(0) 47..51 47..51
- FileId(0) 71..75 Read
- FileId(0) 152..156 Read
+ FileId(0) 71..75 read
+ FileId(0) 152..156 read
"#]],
);
}
@@ -1165,7 +1186,7 @@ impl Foo {
expect![[r#"
self SelfParam FileId(0) 47..51 47..51
- FileId(0) 63..67 Read
+ FileId(0) 63..67 read
"#]],
);
}
@@ -1185,16 +1206,16 @@ impl Foo {
if let Some(decl) = refs.declaration {
format_to!(actual, "{}", decl.nav.debug_render());
if decl.is_mut {
- format_to!(actual, " {:?}", ReferenceCategory::Write)
+ format_to!(actual, " write",)
}
actual += "\n\n";
}
for (file_id, references) in &refs.references {
- for (range, access) in references {
+ for (range, category) in references {
format_to!(actual, "{:?} {:?}", file_id, range);
- if let Some(access) = access {
- format_to!(actual, " {:?}", access);
+ for (name, _flag) in category.iter_names() {
+ format_to!(actual, " {}", name.to_lowercase());
}
actual += "\n";
}
@@ -1281,7 +1302,7 @@ fn main() {
expect![[r#"
a Local FileId(0) 59..60 59..60
- FileId(0) 80..81 Read
+ FileId(0) 80..81 read
"#]],
);
}
@@ -1299,7 +1320,7 @@ fn main() {
expect![[r#"
a Local FileId(0) 59..60 59..60
- FileId(0) 80..81 Read
+ FileId(0) 80..81 read
"#]],
);
}
@@ -1479,7 +1500,7 @@ fn test$0() {
expect![[r#"
test Function FileId(0) 0..33 11..15
- FileId(0) 24..28 Test
+ FileId(0) 24..28 test
"#]],
);
}
@@ -1538,9 +1559,9 @@ pub use level1::Foo;
expect![[r#"
Foo Struct FileId(0) 0..15 11..14
- FileId(1) 16..19 Import
- FileId(2) 16..19 Import
- FileId(3) 16..19 Import
+ FileId(1) 16..19 import
+ FileId(2) 16..19 import
+ FileId(3) 16..19 import
"#]],
);
}
@@ -1568,7 +1589,7 @@ lib::foo!();
expect![[r#"
foo Macro FileId(1) 0..61 29..32
- FileId(0) 46..49 Import
+ FileId(0) 46..49 import
FileId(2) 0..3
FileId(3) 5..8
"#]],
@@ -1731,7 +1752,7 @@ struct Foo;
expect![[r#"
derive_identity Derive FileId(2) 1..107 45..60
- FileId(0) 17..31 Import
+ FileId(0) 17..31 import
FileId(0) 56..70
"#]],
);
@@ -2055,7 +2076,7 @@ fn method() {}
expect![[r#"
method Field FileId(0) 60..70 60..66
- FileId(0) 136..142 Read
+ FileId(0) 136..142 read
"#]],
);
check(
@@ -2101,7 +2122,7 @@ fn method() {}
expect![[r#"
method Field FileId(0) 60..70 60..66
- FileId(0) 136..142 Read
+ FileId(0) 136..142 read
"#]],
);
check(
@@ -2160,9 +2181,9 @@ fn test() {
expect![[r#"
a Local FileId(0) 20..21 20..21
- FileId(0) 56..57 Read
- FileId(0) 60..61 Read
- FileId(0) 68..69 Read
+ FileId(0) 56..57 read
+ FileId(0) 60..61 read
+ FileId(0) 68..69 read
"#]],
);
}
diff --git a/crates/ide/src/runnables.rs b/crates/ide/src/runnables.rs
index 79324bf387..b6c6753755 100644
--- a/crates/ide/src/runnables.rs
+++ b/crates/ide/src/runnables.rs
@@ -1,9 +1,11 @@
use std::fmt;
use ast::HasName;
-use cfg::CfgExpr;
-use hir::{db::HirDatabase, AsAssocItem, HasAttrs, HasSource, HirFileIdExt, Semantics};
-use ide_assists::utils::test_related_attribute;
+use cfg::{CfgAtom, CfgExpr};
+use hir::{
+ db::HirDatabase, AsAssocItem, AttrsWithOwner, HasAttrs, HasSource, HirFileIdExt, Semantics,
+};
+use ide_assists::utils::{has_test_related_attribute, test_related_attribute_syn};
use ide_db::{
base_db::{FilePosition, FileRange},
defs::Definition,
@@ -280,7 +282,7 @@ fn find_related_tests_in_module(
}
fn as_test_runnable(sema: &Semantics<'_, RootDatabase>, fn_def: &ast::Fn) -> Option<Runnable> {
- if test_related_attribute(fn_def).is_some() {
+ if test_related_attribute_syn(fn_def).is_some() {
let function = sema.to_def(fn_def)?;
runnable_fn(sema, function)
} else {
@@ -293,7 +295,7 @@ fn parent_test_module(sema: &Semantics<'_, RootDatabase>, fn_def: &ast::Fn) -> O
let module = ast::Module::cast(node)?;
let module = sema.to_def(&module)?;
- if has_test_function_or_multiple_test_submodules(sema, &module) {
+ if has_test_function_or_multiple_test_submodules(sema, &module, false) {
Some(module)
} else {
None
@@ -305,7 +307,8 @@ pub(crate) fn runnable_fn(
sema: &Semantics<'_, RootDatabase>,
def: hir::Function,
) -> Option<Runnable> {
- let kind = if def.is_main(sema.db) {
+ let under_cfg_test = has_cfg_test(def.module(sema.db).attrs(sema.db));
+ let kind = if !under_cfg_test && def.is_main(sema.db) {
RunnableKind::Bin
} else {
let test_id = || {
@@ -342,7 +345,8 @@ pub(crate) fn runnable_mod(
sema: &Semantics<'_, RootDatabase>,
def: hir::Module,
) -> Option<Runnable> {
- if !has_test_function_or_multiple_test_submodules(sema, &def) {
+ if !has_test_function_or_multiple_test_submodules(sema, &def, has_cfg_test(def.attrs(sema.db)))
+ {
return None;
}
let path = def
@@ -384,12 +388,17 @@ pub(crate) fn runnable_impl(
Some(Runnable { use_name_in_title: false, nav, kind: RunnableKind::DocTest { test_id }, cfg })
}
+fn has_cfg_test(attrs: AttrsWithOwner) -> bool {
+ attrs.cfgs().any(|cfg| matches!(cfg, CfgExpr::Atom(CfgAtom::Flag(s)) if s == "test"))
+}
+
/// Creates a test mod runnable for outline modules at the top of their definition.
fn runnable_mod_outline_definition(
sema: &Semantics<'_, RootDatabase>,
def: hir::Module,
) -> Option<Runnable> {
- if !has_test_function_or_multiple_test_submodules(sema, &def) {
+ if !has_test_function_or_multiple_test_submodules(sema, &def, has_cfg_test(def.attrs(sema.db)))
+ {
return None;
}
let path = def
@@ -522,20 +531,28 @@ fn has_runnable_doc_test(attrs: &hir::Attrs) -> bool {
fn has_test_function_or_multiple_test_submodules(
sema: &Semantics<'_, RootDatabase>,
module: &hir::Module,
+ consider_exported_main: bool,
) -> bool {
let mut number_of_test_submodules = 0;
for item in module.declarations(sema.db) {
match item {
hir::ModuleDef::Function(f) => {
- if let Some(it) = f.source(sema.db) {
- if test_related_attribute(&it.value).is_some() {
- return true;
- }
+ if has_test_related_attribute(&f.attrs(sema.db)) {
+ return true;
+ }
+ if consider_exported_main && f.exported_main(sema.db) {
+ // an exported main in a test module can be considered a test wrt to custom test
+ // runners
+ return true;
}
}
hir::ModuleDef::Module(submodule) => {
- if has_test_function_or_multiple_test_submodules(sema, &submodule) {
+ if has_test_function_or_multiple_test_submodules(
+ sema,
+ &submodule,
+ consider_exported_main,
+ ) {
number_of_test_submodules += 1;
}
}
@@ -1484,4 +1501,39 @@ mod r#mod {
"#]],
)
}
+
+ #[test]
+ fn exported_main_is_test_in_cfg_test_mod() {
+ check(
+ r#"
+//- /lib.rs crate:foo cfg:test
+$0
+mod not_a_test_module_inline {
+ #[export_name = "main"]
+ fn exp_main() {}
+}
+#[cfg(test)]
+mod test_mod_inline {
+ #[export_name = "main"]
+ fn exp_main() {}
+}
+mod not_a_test_module;
+#[cfg(test)]
+mod test_mod;
+//- /not_a_test_module.rs
+#[export_name = "main"]
+fn exp_main() {}
+//- /test_mod.rs
+#[export_name = "main"]
+fn exp_main() {}
+"#,
+ expect![[r#"
+ [
+ "(Bin, NavigationTarget { file_id: FileId(0), full_range: 36..80, focus_range: 67..75, name: \"exp_main\", kind: Function })",
+ "(TestMod, NavigationTarget { file_id: FileId(0), full_range: 83..168, focus_range: 100..115, name: \"test_mod_inline\", kind: Module, description: \"mod test_mod_inline\" }, Atom(Flag(\"test\")))",
+ "(TestMod, NavigationTarget { file_id: FileId(0), full_range: 192..218, focus_range: 209..217, name: \"test_mod\", kind: Module, description: \"mod test_mod\" }, Atom(Flag(\"test\")))",
+ ]
+ "#]],
+ )
+ }
}
diff --git a/crates/ide/src/static_index.rs b/crates/ide/src/static_index.rs
index 3fef16df25..ca013da709 100644
--- a/crates/ide/src/static_index.rs
+++ b/crates/ide/src/static_index.rs
@@ -188,7 +188,14 @@ impl StaticIndex<'_> {
} else {
let it = self.tokens.insert(TokenStaticData {
documentation: documentation_for_definition(&sema, def, &node),
- hover: Some(hover_for_definition(&sema, file_id, def, &node, &hover_config)),
+ hover: Some(hover_for_definition(
+ &sema,
+ file_id,
+ def,
+ &node,
+ None,
+ &hover_config,
+ )),
definition: def.try_to_nav(self.db).map(UpmappingResult::call_site).map(|it| {
FileRange { file_id: it.file_id, range: it.focus_or_full_range() }
}),
diff --git a/crates/ide/src/syntax_highlighting/highlight.rs b/crates/ide/src/syntax_highlighting/highlight.rs
index e7346cbb99..a72f505eb8 100644
--- a/crates/ide/src/syntax_highlighting/highlight.rs
+++ b/crates/ide/src/syntax_highlighting/highlight.rs
@@ -444,7 +444,6 @@ pub(super) fn highlight_def(
Definition::Variant(_) => Highlight::new(HlTag::Symbol(SymbolKind::Variant)),
Definition::Const(konst) => {
let mut h = Highlight::new(HlTag::Symbol(SymbolKind::Const)) | HlMod::Const;
-
if let Some(item) = konst.as_assoc_item(db) {
h |= HlMod::Associated;
h |= HlMod::Static;
diff --git a/crates/ide/src/syntax_highlighting/tags.rs b/crates/ide/src/syntax_highlighting/tags.rs
index 5c7a463ccd..e329023606 100644
--- a/crates/ide/src/syntax_highlighting/tags.rs
+++ b/crates/ide/src/syntax_highlighting/tags.rs
@@ -77,6 +77,7 @@ pub enum HlMod {
Library,
/// Used to differentiate individual elements within macro calls.
Macro,
+ /// Used to differentiate individual elements within proc-macro calls.
ProcMacro,
/// Mutable binding.
Mutable,
@@ -113,7 +114,7 @@ pub enum HlPunct {
Semi,
/// ! (only for macro calls)
MacroBang,
- ///
+ /// Other punctutations
Other,
}
@@ -127,7 +128,7 @@ pub enum HlOperator {
Logical,
/// >, <, ==, >=, <=, !=
Comparison,
- ///
+ /// Other operators
Other,
}
@@ -225,8 +226,8 @@ impl HlMod {
HlMod::IntraDocLink,
HlMod::Library,
HlMod::Macro,
- HlMod::ProcMacro,
HlMod::Mutable,
+ HlMod::ProcMacro,
HlMod::Public,
HlMod::Reference,
HlMod::Static,
@@ -262,6 +263,7 @@ impl HlMod {
}
fn mask(self) -> u32 {
+ debug_assert!(Self::ALL.len() <= 32, "HlMod::mask is not enough to cover all variants");
1 << (self as u32)
}
}
diff --git a/crates/ide/src/syntax_highlighting/test_data/highlight_general.html b/crates/ide/src/syntax_highlighting/test_data/highlight_general.html
index 7ba1194d67..5234d362c2 100644
--- a/crates/ide/src/syntax_highlighting/test_data/highlight_general.html
+++ b/crates/ide/src/syntax_highlighting/test_data/highlight_general.html
@@ -218,7 +218,7 @@ pre { color: #DCDCCC; background: #3F3F3F; font-size: 22px; padd
<span class="bool_literal">true</span>
<span class="brace">}</span>
<span class="brace">}</span>
-<span class="keyword const">const</span> <span class="constant const declaration">USAGE_OF_BOOL</span><span class="colon">:</span><span class="builtin_type">bool</span> <span class="operator">=</span> <span class="enum public">Bool</span><span class="operator">::</span><span class="enum_variant public">True</span><span class="operator">.</span><span class="method consuming public">to_primitive</span><span class="parenthesis">(</span><span class="parenthesis">)</span><span class="semicolon">;</span>
+<span class="keyword const">const</span> <span class="constant const declaration">USAGE_OF_BOOL</span><span class="colon">:</span> <span class="builtin_type">bool</span> <span class="operator">=</span> <span class="enum public">Bool</span><span class="operator">::</span><span class="enum_variant public">True</span><span class="operator">.</span><span class="method consuming public">to_primitive</span><span class="parenthesis">(</span><span class="parenthesis">)</span><span class="semicolon">;</span>
<span class="keyword">trait</span> <span class="trait declaration">Baz</span> <span class="brace">{</span>
<span class="keyword">type</span> <span class="type_alias associated declaration static trait">Qux</span><span class="semicolon">;</span>
diff --git a/crates/ide/src/syntax_highlighting/tests.rs b/crates/ide/src/syntax_highlighting/tests.rs
index c2990fd76e..901e41d27c 100644
--- a/crates/ide/src/syntax_highlighting/tests.rs
+++ b/crates/ide/src/syntax_highlighting/tests.rs
@@ -300,7 +300,7 @@ impl Bool {
true
}
}
-const USAGE_OF_BOOL:bool = Bool::True.to_primitive();
+const USAGE_OF_BOOL: bool = Bool::True.to_primitive();
trait Baz {
type Qux;
diff --git a/crates/ide/src/syntax_tree.rs b/crates/ide/src/syntax_tree.rs
index 1065d5899a..05cdf430ef 100644
--- a/crates/ide/src/syntax_tree.rs
+++ b/crates/ide/src/syntax_tree.rs
@@ -88,7 +88,7 @@ fn syntax_tree_for_token(node: &SyntaxToken, text_range: TextRange) -> Option<St
// Remove custom markers
.replace("$0", "");
- let parsed = SourceFile::parse(&text);
+ let parsed = SourceFile::parse(&text, span::Edition::CURRENT);
// If the "file" parsed without errors,
// return its syntax
diff --git a/crates/ide/src/typing.rs b/crates/ide/src/typing.rs
index d3eee0e02e..b899304ef2 100644
--- a/crates/ide/src/typing.rs
+++ b/crates/ide/src/typing.rs
@@ -127,7 +127,8 @@ fn on_opening_bracket_typed(
if !stdx::always!(range.len() == TextSize::of(opening_bracket)) {
return None;
}
- let file = file.reparse(&Indel::delete(range));
+ // FIXME: Edition
+ let file = file.reparse(&Indel::delete(range), span::Edition::CURRENT);
if let Some(edit) = bracket_expr(&file.tree(), offset, opening_bracket, closing_bracket) {
return Some(edit);
@@ -411,7 +412,7 @@ mod tests {
let (offset, mut before) = extract_offset(before);
let edit = TextEdit::insert(offset, char_typed.to_string());
edit.apply(&mut before);
- let parse = SourceFile::parse(&before);
+ let parse = SourceFile::parse(&before, span::Edition::CURRENT);
on_char_typed_inner(&parse, offset, char_typed).map(|it| {
it.apply(&mut before);
before.to_string()
diff --git a/crates/intern/src/lib.rs b/crates/intern/src/lib.rs
index d784321c7c..40d18b1cf8 100644
--- a/crates/intern/src/lib.rs
+++ b/crates/intern/src/lib.rs
@@ -174,6 +174,7 @@ pub struct InternStorage<T: ?Sized> {
map: OnceLock<InternMap<T>>,
}
+#[allow(clippy::new_without_default)] // this a const fn, so it can't be default
impl<T: ?Sized> InternStorage<T> {
pub const fn new() -> Self {
Self { map: OnceLock::new() }
diff --git a/crates/limit/Cargo.toml b/crates/limit/Cargo.toml
index c89722cc40..c1a768833b 100644
--- a/crates/limit/Cargo.toml
+++ b/crates/limit/Cargo.toml
@@ -10,7 +10,6 @@ rust-version.workspace = true
[features]
tracking = []
-default = ["tracking"]
[lints]
-workspace = true \ No newline at end of file
+workspace = true
diff --git a/crates/load-cargo/Cargo.toml b/crates/load-cargo/Cargo.toml
index 48e84a7b25..b6f90ec53b 100644
--- a/crates/load-cargo/Cargo.toml
+++ b/crates/load-cargo/Cargo.toml
@@ -28,5 +28,8 @@ tt.workspace = true
vfs-notify.workspace = true
vfs.workspace = true
+[features]
+in-rust-tree = ["hir-expand/in-rust-tree"]
+
[lints]
workspace = true
diff --git a/crates/load-cargo/src/lib.rs b/crates/load-cargo/src/lib.rs
index 79d6fe36b5..31b0c8cdec 100644
--- a/crates/load-cargo/src/lib.rs
+++ b/crates/load-cargo/src/lib.rs
@@ -335,7 +335,7 @@ fn load_crate_graph(
) -> RootDatabase {
let (ProjectWorkspace::Cargo { toolchain, target_layout, .. }
| ProjectWorkspace::Json { toolchain, target_layout, .. }
- | ProjectWorkspace::DetachedFiles { toolchain, target_layout, .. }) = ws;
+ | ProjectWorkspace::DetachedFile { toolchain, target_layout, .. }) = ws;
let lru_cap = std::env::var("RA_LRU_CAP").ok().and_then(|it| it.parse::<usize>().ok());
let mut db = RootDatabase::new(lru_cap);
@@ -407,8 +407,7 @@ impl ProcMacroExpander for Expander {
call_site: Span,
mixed_site: Span,
) -> Result<tt::Subtree<Span>, ProcMacroExpansionError> {
- let env = env.iter().map(|(k, v)| (k.to_owned(), v.to_owned())).collect();
- match self.0.expand(subtree, attrs, env, def_site, call_site, mixed_site) {
+ match self.0.expand(subtree, attrs, env.clone(), def_site, call_site, mixed_site) {
Ok(Ok(subtree)) => Ok(subtree),
Ok(Err(err)) => Err(ProcMacroExpansionError::Panic(err.0)),
Err(err) => Err(ProcMacroExpansionError::System(err.to_string())),
diff --git a/crates/mbe/src/benchmark.rs b/crates/mbe/src/benchmark.rs
index 4d5531ae30..f4bbaef7af 100644
--- a/crates/mbe/src/benchmark.rs
+++ b/crates/mbe/src/benchmark.rs
@@ -1,7 +1,7 @@
//! This module add real world mbe example for benchmark tests
use rustc_hash::FxHashMap;
-use span::Span;
+use span::{Edition, Span};
use syntax::{
ast::{self, HasName},
AstNode, SmolStr,
@@ -46,9 +46,9 @@ fn benchmark_expand_macro_rules() {
invocations
.into_iter()
.map(|(id, tt)| {
- let res = rules[&id].expand(&tt, |_| (), true, DUMMY);
+ let res = rules[&id].expand(&tt, |_| (), true, DUMMY, Edition::CURRENT);
assert!(res.err.is_none());
- res.value.token_trees.len()
+ res.value.0.token_trees.len()
})
.sum()
};
@@ -66,7 +66,7 @@ fn macro_rules_fixtures() -> FxHashMap<String, DeclarativeMacro> {
fn macro_rules_fixtures_tt() -> FxHashMap<String, tt::Subtree<Span>> {
let fixture = bench_fixture::numerous_macro_rules();
- let source_file = ast::SourceFile::parse(&fixture).ok().unwrap();
+ let source_file = ast::SourceFile::parse(&fixture, span::Edition::CURRENT).ok().unwrap();
source_file
.syntax()
@@ -120,7 +120,7 @@ fn invocation_fixtures(
},
token_trees: token_trees.into_boxed_slice(),
};
- if it.expand(&subtree, |_| (), true, DUMMY).err.is_none() {
+ if it.expand(&subtree, |_| (), true, DUMMY, Edition::CURRENT).err.is_none() {
res.push((name.clone(), subtree));
break;
}
diff --git a/crates/mbe/src/expander.rs b/crates/mbe/src/expander.rs
index 2f2c0aa6ff..cfad8bcc0b 100644
--- a/crates/mbe/src/expander.rs
+++ b/crates/mbe/src/expander.rs
@@ -6,10 +6,10 @@ mod matcher;
mod transcriber;
use rustc_hash::FxHashMap;
-use span::Span;
+use span::{Edition, Span};
use syntax::SmolStr;
-use crate::{parser::MetaVarKind, ExpandError, ExpandResult};
+use crate::{parser::MetaVarKind, ExpandError, ExpandResult, MatchedArmIndex};
pub(crate) fn expand_rules(
rules: &[crate::Rule],
@@ -17,10 +17,11 @@ pub(crate) fn expand_rules(
marker: impl Fn(&mut Span) + Copy,
new_meta_vars: bool,
call_site: Span,
-) -> ExpandResult<tt::Subtree<Span>> {
- let mut match_: Option<(matcher::Match, &crate::Rule)> = None;
- for rule in rules {
- let new_match = matcher::match_(&rule.lhs, input);
+ def_site_edition: Edition,
+) -> ExpandResult<(tt::Subtree<Span>, MatchedArmIndex)> {
+ let mut match_: Option<(matcher::Match, &crate::Rule, usize)> = None;
+ for (idx, rule) in rules.iter().enumerate() {
+ let new_match = matcher::match_(&rule.lhs, input, def_site_edition);
if new_match.err.is_none() {
// If we find a rule that applies without errors, we're done.
@@ -34,31 +35,34 @@ pub(crate) fn expand_rules(
call_site,
);
if transcribe_err.is_none() {
- return ExpandResult::ok(value);
+ return ExpandResult::ok((value, Some(idx as u32)));
}
}
// Use the rule if we matched more tokens, or bound variables count
- if let Some((prev_match, _)) = &match_ {
+ if let Some((prev_match, _, _)) = &match_ {
if (new_match.unmatched_tts, -(new_match.bound_count as i32))
< (prev_match.unmatched_tts, -(prev_match.bound_count as i32))
{
- match_ = Some((new_match, rule));
+ match_ = Some((new_match, rule, idx));
}
} else {
- match_ = Some((new_match, rule));
+ match_ = Some((new_match, rule, idx));
}
}
- if let Some((match_, rule)) = match_ {
+ if let Some((match_, rule, idx)) = match_ {
// if we got here, there was no match without errors
let ExpandResult { value, err: transcribe_err } =
transcriber::transcribe(&rule.rhs, &match_.bindings, marker, new_meta_vars, call_site);
- ExpandResult { value, err: match_.err.or(transcribe_err) }
+ ExpandResult { value: (value, idx.try_into().ok()), err: match_.err.or(transcribe_err) }
} else {
ExpandResult::new(
- tt::Subtree {
- delimiter: tt::Delimiter::invisible_spanned(call_site),
- token_trees: Box::new([]),
- },
+ (
+ tt::Subtree {
+ delimiter: tt::Delimiter::invisible_spanned(call_site),
+ token_trees: Box::default(),
+ },
+ None,
+ ),
ExpandError::NoMatchingRule,
)
}
diff --git a/crates/mbe/src/expander/matcher.rs b/crates/mbe/src/expander/matcher.rs
index 3170834d54..78d4bfee2a 100644
--- a/crates/mbe/src/expander/matcher.rs
+++ b/crates/mbe/src/expander/matcher.rs
@@ -62,7 +62,7 @@
use std::rc::Rc;
use smallvec::{smallvec, SmallVec};
-use span::Span;
+use span::{Edition, Span};
use syntax::SmolStr;
use tt::DelimSpan;
@@ -108,8 +108,8 @@ impl Match {
}
/// Matching errors are added to the `Match`.
-pub(super) fn match_(pattern: &MetaTemplate, input: &tt::Subtree<Span>) -> Match {
- let mut res = match_loop(pattern, input);
+pub(super) fn match_(pattern: &MetaTemplate, input: &tt::Subtree<Span>, edition: Edition) -> Match {
+ let mut res = match_loop(pattern, input, edition);
res.bound_count = count(res.bindings.bindings());
return res;
@@ -363,6 +363,7 @@ fn match_loop_inner<'t>(
eof_items: &mut SmallVec<[MatchState<'t>; 1]>,
error_items: &mut SmallVec<[MatchState<'t>; 1]>,
delim_span: tt::DelimSpan<Span>,
+ edition: Edition,
) {
macro_rules! try_push {
($items: expr, $it:expr) => {
@@ -473,7 +474,7 @@ fn match_loop_inner<'t>(
OpDelimited::Op(Op::Var { kind, name, .. }) => {
if let &Some(kind) = kind {
let mut fork = src.clone();
- let match_res = match_meta_var(kind, &mut fork, delim_span);
+ let match_res = match_meta_var(kind, &mut fork, delim_span, edition);
match match_res.err {
None => {
// Some meta variables are optional (e.g. vis)
@@ -586,7 +587,7 @@ fn match_loop_inner<'t>(
}
}
-fn match_loop(pattern: &MetaTemplate, src: &tt::Subtree<Span>) -> Match {
+fn match_loop(pattern: &MetaTemplate, src: &tt::Subtree<Span>, edition: Edition) -> Match {
let span = src.delimiter.delim_span();
let mut src = TtIter::new(src);
let mut stack: SmallVec<[TtIter<'_, Span>; 1]> = SmallVec::new();
@@ -627,6 +628,7 @@ fn match_loop(pattern: &MetaTemplate, src: &tt::Subtree<Span>) -> Match {
&mut eof_items,
&mut error_items,
span,
+ edition,
);
stdx::always!(cur_items.is_empty());
@@ -740,21 +742,14 @@ fn match_meta_var(
kind: MetaVarKind,
input: &mut TtIter<'_, Span>,
delim_span: DelimSpan<Span>,
+ edition: Edition,
) -> ExpandResult<Option<Fragment>> {
let fragment = match kind {
MetaVarKind::Path => {
- return input.expect_fragment(parser::PrefixEntryPoint::Path).map(|it| {
+ return input.expect_fragment(parser::PrefixEntryPoint::Path, edition).map(|it| {
it.map(|it| tt::TokenTree::subtree_or_wrap(it, delim_span)).map(Fragment::Path)
});
}
- MetaVarKind::Ty => parser::PrefixEntryPoint::Ty,
- MetaVarKind::Pat => parser::PrefixEntryPoint::PatTop,
- MetaVarKind::PatParam => parser::PrefixEntryPoint::Pat,
- MetaVarKind::Stmt => parser::PrefixEntryPoint::Stmt,
- MetaVarKind::Block => parser::PrefixEntryPoint::Block,
- MetaVarKind::Meta => parser::PrefixEntryPoint::MetaItem,
- MetaVarKind::Item => parser::PrefixEntryPoint::Item,
- MetaVarKind::Vis => parser::PrefixEntryPoint::Vis,
MetaVarKind::Expr => {
// `expr` should not match underscores, let expressions, or inline const. The latter
// two are for [backwards compatibility][0].
@@ -770,7 +765,7 @@ fn match_meta_var(
}
_ => {}
};
- return input.expect_fragment(parser::PrefixEntryPoint::Expr).map(|tt| {
+ return input.expect_fragment(parser::PrefixEntryPoint::Expr, edition).map(|tt| {
tt.map(|tt| match tt {
tt::TokenTree::Leaf(leaf) => tt::Subtree {
delimiter: tt::Delimiter::invisible_spanned(*leaf.span()),
@@ -818,8 +813,16 @@ fn match_meta_var(
};
return tt_result.map(|it| Some(Fragment::Tokens(it))).into();
}
+ MetaVarKind::Ty => parser::PrefixEntryPoint::Ty,
+ MetaVarKind::Pat => parser::PrefixEntryPoint::PatTop,
+ MetaVarKind::PatParam => parser::PrefixEntryPoint::Pat,
+ MetaVarKind::Stmt => parser::PrefixEntryPoint::Stmt,
+ MetaVarKind::Block => parser::PrefixEntryPoint::Block,
+ MetaVarKind::Meta => parser::PrefixEntryPoint::MetaItem,
+ MetaVarKind::Item => parser::PrefixEntryPoint::Item,
+ MetaVarKind::Vis => parser::PrefixEntryPoint::Vis,
};
- input.expect_fragment(fragment).map(|it| it.map(Fragment::Tokens))
+ input.expect_fragment(fragment, edition).map(|it| it.map(Fragment::Tokens))
}
fn collect_vars(collector_fun: &mut impl FnMut(SmolStr), pattern: &MetaTemplate) {
diff --git a/crates/mbe/src/lib.rs b/crates/mbe/src/lib.rs
index 3a85351266..d5de56312a 100644
--- a/crates/mbe/src/lib.rs
+++ b/crates/mbe/src/lib.rs
@@ -122,6 +122,9 @@ impl fmt::Display for CountError {
}
}
+/// Index of the matched macro arm on successful expansion.
+pub type MatchedArmIndex = Option<u32>;
+
/// This struct contains AST for a single `macro_rules` definition. What might
/// be very confusing is that AST has almost exactly the same shape as
/// `tt::TokenTree`, but there's a crucial difference: in macro rules, `$ident`
@@ -250,8 +253,9 @@ impl DeclarativeMacro {
marker: impl Fn(&mut Span) + Copy,
new_meta_vars: bool,
call_site: Span,
- ) -> ExpandResult<tt::Subtree<Span>> {
- expander::expand_rules(&self.rules, tt, marker, new_meta_vars, call_site)
+ def_site_edition: Edition,
+ ) -> ExpandResult<(tt::Subtree<Span>, MatchedArmIndex)> {
+ expander::expand_rules(&self.rules, tt, marker, new_meta_vars, call_site, def_site_edition)
}
}
@@ -329,6 +333,10 @@ impl<T, E> ValueResult<T, E> {
Self { value: Default::default(), err: Some(err) }
}
+ pub fn zip_val<U>(self, other: U) -> ValueResult<(T, U), E> {
+ ValueResult { value: (self.value, other), err: self.err }
+ }
+
pub fn map<U>(self, f: impl FnOnce(T) -> U) -> ValueResult<U, E> {
ValueResult { value: f(self.value), err: self.err }
}
diff --git a/crates/mbe/src/syntax_bridge.rs b/crates/mbe/src/syntax_bridge.rs
index c934db6b71..3230eeb5bd 100644
--- a/crates/mbe/src/syntax_bridge.rs
+++ b/crates/mbe/src/syntax_bridge.rs
@@ -3,7 +3,7 @@
use std::fmt;
use rustc_hash::{FxHashMap, FxHashSet};
-use span::{SpanAnchor, SpanData, SpanMap};
+use span::{Edition, SpanAnchor, SpanData, SpanMap};
use stdx::{never, non_empty_vec::NonEmptyVec};
use syntax::{
ast::{self, make::tokens::doc_comment},
@@ -119,6 +119,7 @@ where
pub fn token_tree_to_syntax_node<Ctx>(
tt: &tt::Subtree<SpanData<Ctx>>,
entry_point: parser::TopEntryPoint,
+ edition: parser::Edition,
) -> (Parse<SyntaxNode>, SpanMap<Ctx>)
where
SpanData<Ctx>: Copy + fmt::Debug,
@@ -131,7 +132,7 @@ where
_ => TokenBuffer::from_subtree(tt),
};
let parser_input = to_parser_input(&buffer);
- let parser_output = entry_point.parse(&parser_input);
+ let parser_output = entry_point.parse(&parser_input, edition);
let mut tree_sink = TtTreeSink::new(buffer.begin());
for event in parser_output.iter() {
match event {
@@ -182,7 +183,12 @@ where
}
/// Split token tree with separate expr: $($e:expr)SEP*
-pub fn parse_exprs_with_sep<S>(tt: &tt::Subtree<S>, sep: char, span: S) -> Vec<tt::Subtree<S>>
+pub fn parse_exprs_with_sep<S>(
+ tt: &tt::Subtree<S>,
+ sep: char,
+ span: S,
+ edition: Edition,
+) -> Vec<tt::Subtree<S>>
where
S: Copy + fmt::Debug,
{
@@ -194,7 +200,7 @@ where
let mut res = Vec::new();
while iter.peek_n(0).is_some() {
- let expanded = iter.expect_fragment(parser::PrefixEntryPoint::Expr);
+ let expanded = iter.expect_fragment(parser::PrefixEntryPoint::Expr, edition);
res.push(match expanded.value {
None => break,
diff --git a/crates/mbe/src/syntax_bridge/tests.rs b/crates/mbe/src/syntax_bridge/tests.rs
index a261b1d431..bbfe378200 100644
--- a/crates/mbe/src/syntax_bridge/tests.rs
+++ b/crates/mbe/src/syntax_bridge/tests.rs
@@ -10,7 +10,7 @@ use tt::{
use crate::{syntax_node_to_token_tree, DummyTestSpanMap, DUMMY};
fn check_punct_spacing(fixture: &str) {
- let source_file = ast::SourceFile::parse(fixture).ok().unwrap();
+ let source_file = ast::SourceFile::parse(fixture, span::Edition::CURRENT).ok().unwrap();
let subtree = syntax_node_to_token_tree(source_file.syntax(), DummyTestSpanMap, DUMMY);
let mut annotations: FxHashMap<_, _> = extract_annotations(fixture)
.into_iter()
diff --git a/crates/mbe/src/tt_iter.rs b/crates/mbe/src/tt_iter.rs
index e3d12d8707..9c7d7af7b1 100644
--- a/crates/mbe/src/tt_iter.rs
+++ b/crates/mbe/src/tt_iter.rs
@@ -140,10 +140,11 @@ impl<'a, S: Copy + fmt::Debug> TtIter<'a, S> {
pub(crate) fn expect_fragment(
&mut self,
entry_point: parser::PrefixEntryPoint,
+ edition: parser::Edition,
) -> ExpandResult<Option<tt::TokenTree<S>>> {
let buffer = tt::buffer::TokenBuffer::from_tokens(self.inner.as_slice());
let parser_input = to_parser_input(&buffer);
- let tree_traversal = entry_point.parse(&parser_input);
+ let tree_traversal = entry_point.parse(&parser_input, edition);
let mut cursor = buffer.begin();
let mut error = false;
for step in tree_traversal.iter() {
diff --git a/crates/parser/src/edition.rs b/crates/parser/src/edition.rs
new file mode 100644
index 0000000000..26178544f9
--- /dev/null
+++ b/crates/parser/src/edition.rs
@@ -0,0 +1,55 @@
+//! The edition of the Rust language used in a crate.
+// Ideally this would be defined in the span crate, but the dependency chain is all over the place
+// wrt to span, parser and syntax.
+use std::fmt;
+
+#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
+pub enum Edition {
+ Edition2015,
+ Edition2018,
+ Edition2021,
+ Edition2024,
+}
+
+impl Edition {
+ pub const CURRENT: Edition = Edition::Edition2021;
+ pub const DEFAULT: Edition = Edition::Edition2015;
+}
+
+#[derive(Debug)]
+pub struct ParseEditionError {
+ invalid_input: String,
+}
+
+impl std::error::Error for ParseEditionError {}
+impl fmt::Display for ParseEditionError {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ write!(f, "invalid edition: {:?}", self.invalid_input)
+ }
+}
+
+impl std::str::FromStr for Edition {
+ type Err = ParseEditionError;
+
+ fn from_str(s: &str) -> Result<Self, Self::Err> {
+ let res = match s {
+ "2015" => Edition::Edition2015,
+ "2018" => Edition::Edition2018,
+ "2021" => Edition::Edition2021,
+ "2024" => Edition::Edition2024,
+ _ => return Err(ParseEditionError { invalid_input: s.to_owned() }),
+ };
+ Ok(res)
+ }
+}
+
+impl fmt::Display for Edition {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ f.write_str(match self {
+ Edition::Edition2015 => "2015",
+ Edition::Edition2018 => "2018",
+ Edition::Edition2021 => "2021",
+ Edition::Edition2024 => "2024",
+ })
+ }
+}
diff --git a/crates/parser/src/lib.rs b/crates/parser/src/lib.rs
index 86c771c000..c7ad025f6b 100644
--- a/crates/parser/src/lib.rs
+++ b/crates/parser/src/lib.rs
@@ -26,6 +26,7 @@ extern crate ra_ap_rustc_lexer as rustc_lexer;
#[cfg(feature = "in-rust-tree")]
extern crate rustc_lexer;
+mod edition;
mod event;
mod grammar;
mod input;
@@ -42,6 +43,7 @@ mod tests;
pub(crate) use token_set::TokenSet;
pub use crate::{
+ edition::Edition,
input::Input,
lexed_str::LexedStr,
output::{Output, Step},
@@ -86,7 +88,7 @@ pub enum TopEntryPoint {
}
impl TopEntryPoint {
- pub fn parse(&self, input: &Input) -> Output {
+ pub fn parse(&self, input: &Input, edition: Edition) -> Output {
let _p = tracing::span!(tracing::Level::INFO, "TopEntryPoint::parse", ?self).entered();
let entry_point: fn(&'_ mut parser::Parser<'_>) = match self {
TopEntryPoint::SourceFile => grammar::entry::top::source_file,
@@ -98,7 +100,7 @@ impl TopEntryPoint {
TopEntryPoint::MetaItem => grammar::entry::top::meta_item,
TopEntryPoint::MacroEagerInput => grammar::entry::top::eager_macro_input,
};
- let mut p = parser::Parser::new(input);
+ let mut p = parser::Parser::new(input, edition);
entry_point(&mut p);
let events = p.finish();
let res = event::process(events);
@@ -150,7 +152,7 @@ pub enum PrefixEntryPoint {
}
impl PrefixEntryPoint {
- pub fn parse(&self, input: &Input) -> Output {
+ pub fn parse(&self, input: &Input, edition: Edition) -> Output {
let entry_point: fn(&'_ mut parser::Parser<'_>) = match self {
PrefixEntryPoint::Vis => grammar::entry::prefix::vis,
PrefixEntryPoint::Block => grammar::entry::prefix::block,
@@ -163,7 +165,7 @@ impl PrefixEntryPoint {
PrefixEntryPoint::Item => grammar::entry::prefix::item,
PrefixEntryPoint::MetaItem => grammar::entry::prefix::meta_item,
};
- let mut p = parser::Parser::new(input);
+ let mut p = parser::Parser::new(input, edition);
entry_point(&mut p);
let events = p.finish();
event::process(events)
@@ -187,9 +189,9 @@ impl Reparser {
///
/// Tokens must start with `{`, end with `}` and form a valid brace
/// sequence.
- pub fn parse(self, tokens: &Input) -> Output {
+ pub fn parse(self, tokens: &Input, edition: Edition) -> Output {
let Reparser(r) = self;
- let mut p = parser::Parser::new(tokens);
+ let mut p = parser::Parser::new(tokens, edition);
r(&mut p);
let events = p.finish();
event::process(events)
diff --git a/crates/parser/src/parser.rs b/crates/parser/src/parser.rs
index 051461243a..5b901f911d 100644
--- a/crates/parser/src/parser.rs
+++ b/crates/parser/src/parser.rs
@@ -8,6 +8,7 @@ use limit::Limit;
use crate::{
event::Event,
input::Input,
+ Edition,
SyntaxKind::{self, EOF, ERROR, TOMBSTONE},
TokenSet, T,
};
@@ -26,13 +27,14 @@ pub(crate) struct Parser<'t> {
pos: usize,
events: Vec<Event>,
steps: Cell<u32>,
+ _edition: Edition,
}
static PARSER_STEP_LIMIT: Limit = Limit::new(15_000_000);
impl<'t> Parser<'t> {
- pub(super) fn new(inp: &'t Input) -> Parser<'t> {
- Parser { inp, pos: 0, events: Vec::new(), steps: Cell::new(0) }
+ pub(super) fn new(inp: &'t Input, edition: Edition) -> Parser<'t> {
+ Parser { inp, pos: 0, events: Vec::new(), steps: Cell::new(0), _edition: edition }
}
pub(crate) fn finish(self) -> Vec<Event> {
diff --git a/crates/parser/src/syntax_kind/generated.rs b/crates/parser/src/syntax_kind/generated.rs
index 6ecfdc9f46..ef83420c52 100644
--- a/crates/parser/src/syntax_kind/generated.rs
+++ b/crates/parser/src/syntax_kind/generated.rs
@@ -61,9 +61,11 @@ pub enum SyntaxKind {
SHR,
SHLEQ,
SHREQ,
+ ABSTRACT_KW,
AS_KW,
ASYNC_KW,
AWAIT_KW,
+ BECOME_KW,
BOX_KW,
BREAK_KW,
CONST_KW,
@@ -75,6 +77,7 @@ pub enum SyntaxKind {
ENUM_KW,
EXTERN_KW,
FALSE_KW,
+ FINAL_KW,
FN_KW,
FOR_KW,
IF_KW,
@@ -87,10 +90,11 @@ pub enum SyntaxKind {
MOD_KW,
MOVE_KW,
MUT_KW,
+ OVERRIDE_KW,
+ PRIV_KW,
PUB_KW,
REF_KW,
RETURN_KW,
- BECOME_KW,
SELF_KW,
SELF_TYPE_KW,
STATIC_KW,
@@ -100,8 +104,11 @@ pub enum SyntaxKind {
TRUE_KW,
TRY_KW,
TYPE_KW,
+ TYPEOF_KW,
UNSAFE_KW,
+ UNSIZED_KW,
USE_KW,
+ VIRTUAL_KW,
WHERE_KW,
WHILE_KW,
YIELD_KW,
@@ -280,9 +287,11 @@ impl SyntaxKind {
pub fn is_keyword(self) -> bool {
matches!(
self,
- AS_KW
+ ABSTRACT_KW
+ | AS_KW
| ASYNC_KW
| AWAIT_KW
+ | BECOME_KW
| BOX_KW
| BREAK_KW
| CONST_KW
@@ -294,6 +303,7 @@ impl SyntaxKind {
| ENUM_KW
| EXTERN_KW
| FALSE_KW
+ | FINAL_KW
| FN_KW
| FOR_KW
| IF_KW
@@ -306,10 +316,11 @@ impl SyntaxKind {
| MOD_KW
| MOVE_KW
| MUT_KW
+ | OVERRIDE_KW
+ | PRIV_KW
| PUB_KW
| REF_KW
| RETURN_KW
- | BECOME_KW
| SELF_KW
| SELF_TYPE_KW
| STATIC_KW
@@ -319,8 +330,11 @@ impl SyntaxKind {
| TRUE_KW
| TRY_KW
| TYPE_KW
+ | TYPEOF_KW
| UNSAFE_KW
+ | UNSIZED_KW
| USE_KW
+ | VIRTUAL_KW
| WHERE_KW
| WHILE_KW
| YIELD_KW
@@ -399,9 +413,11 @@ impl SyntaxKind {
}
pub fn from_keyword(ident: &str) -> Option<SyntaxKind> {
let kw = match ident {
+ "abstract" => ABSTRACT_KW,
"as" => AS_KW,
"async" => ASYNC_KW,
"await" => AWAIT_KW,
+ "become" => BECOME_KW,
"box" => BOX_KW,
"break" => BREAK_KW,
"const" => CONST_KW,
@@ -413,6 +429,7 @@ impl SyntaxKind {
"enum" => ENUM_KW,
"extern" => EXTERN_KW,
"false" => FALSE_KW,
+ "final" => FINAL_KW,
"fn" => FN_KW,
"for" => FOR_KW,
"if" => IF_KW,
@@ -425,10 +442,11 @@ impl SyntaxKind {
"mod" => MOD_KW,
"move" => MOVE_KW,
"mut" => MUT_KW,
+ "override" => OVERRIDE_KW,
+ "priv" => PRIV_KW,
"pub" => PUB_KW,
"ref" => REF_KW,
"return" => RETURN_KW,
- "become" => BECOME_KW,
"self" => SELF_KW,
"Self" => SELF_TYPE_KW,
"static" => STATIC_KW,
@@ -438,8 +456,11 @@ impl SyntaxKind {
"true" => TRUE_KW,
"try" => TRY_KW,
"type" => TYPE_KW,
+ "typeof" => TYPEOF_KW,
"unsafe" => UNSAFE_KW,
+ "unsized" => UNSIZED_KW,
"use" => USE_KW,
+ "virtual" => VIRTUAL_KW,
"where" => WHERE_KW,
"while" => WHILE_KW,
"yield" => YIELD_KW,
@@ -500,4 +521,4 @@ impl SyntaxKind {
}
}
#[macro_export]
-macro_rules ! T { [;] => { $ crate :: SyntaxKind :: SEMICOLON } ; [,] => { $ crate :: SyntaxKind :: COMMA } ; ['('] => { $ crate :: SyntaxKind :: L_PAREN } ; [')'] => { $ crate :: SyntaxKind :: R_PAREN } ; ['{'] => { $ crate :: SyntaxKind :: L_CURLY } ; ['}'] => { $ crate :: SyntaxKind :: R_CURLY } ; ['['] => { $ crate :: SyntaxKind :: L_BRACK } ; [']'] => { $ crate :: SyntaxKind :: R_BRACK } ; [<] => { $ crate :: SyntaxKind :: L_ANGLE } ; [>] => { $ crate :: SyntaxKind :: R_ANGLE } ; [@] => { $ crate :: SyntaxKind :: AT } ; [#] => { $ crate :: SyntaxKind :: POUND } ; [~] => { $ crate :: SyntaxKind :: TILDE } ; [?] => { $ crate :: SyntaxKind :: QUESTION } ; [$] => { $ crate :: SyntaxKind :: DOLLAR } ; [&] => { $ crate :: SyntaxKind :: AMP } ; [|] => { $ crate :: SyntaxKind :: PIPE } ; [+] => { $ crate :: SyntaxKind :: PLUS } ; [*] => { $ crate :: SyntaxKind :: STAR } ; [/] => { $ crate :: SyntaxKind :: SLASH } ; [^] => { $ crate :: SyntaxKind :: CARET } ; [%] => { $ crate :: SyntaxKind :: PERCENT } ; [_] => { $ crate :: SyntaxKind :: UNDERSCORE } ; [.] => { $ crate :: SyntaxKind :: DOT } ; [..] => { $ crate :: SyntaxKind :: DOT2 } ; [...] => { $ crate :: SyntaxKind :: DOT3 } ; [..=] => { $ crate :: SyntaxKind :: DOT2EQ } ; [:] => { $ crate :: SyntaxKind :: COLON } ; [::] => { $ crate :: SyntaxKind :: COLON2 } ; [=] => { $ crate :: SyntaxKind :: EQ } ; [==] => { $ crate :: SyntaxKind :: EQ2 } ; [=>] => { $ crate :: SyntaxKind :: FAT_ARROW } ; [!] => { $ crate :: SyntaxKind :: BANG } ; [!=] => { $ crate :: SyntaxKind :: NEQ } ; [-] => { $ crate :: SyntaxKind :: MINUS } ; [->] => { $ crate :: SyntaxKind :: THIN_ARROW } ; [<=] => { $ crate :: SyntaxKind :: LTEQ } ; [>=] => { $ crate :: SyntaxKind :: GTEQ } ; [+=] => { $ crate :: SyntaxKind :: PLUSEQ } ; [-=] => { $ crate :: SyntaxKind :: MINUSEQ } ; [|=] => { $ crate :: SyntaxKind :: PIPEEQ } ; [&=] => { $ crate :: SyntaxKind :: AMPEQ } ; [^=] => { $ crate :: SyntaxKind :: CARETEQ } ; [/=] => { $ crate :: SyntaxKind :: SLASHEQ } ; [*=] => { $ crate :: SyntaxKind :: STAREQ } ; [%=] => { $ crate :: SyntaxKind :: PERCENTEQ } ; [&&] => { $ crate :: SyntaxKind :: AMP2 } ; [||] => { $ crate :: SyntaxKind :: PIPE2 } ; [<<] => { $ crate :: SyntaxKind :: SHL } ; [>>] => { $ crate :: SyntaxKind :: SHR } ; [<<=] => { $ crate :: SyntaxKind :: SHLEQ } ; [>>=] => { $ crate :: SyntaxKind :: SHREQ } ; [as] => { $ crate :: SyntaxKind :: AS_KW } ; [async] => { $ crate :: SyntaxKind :: ASYNC_KW } ; [await] => { $ crate :: SyntaxKind :: AWAIT_KW } ; [box] => { $ crate :: SyntaxKind :: BOX_KW } ; [break] => { $ crate :: SyntaxKind :: BREAK_KW } ; [const] => { $ crate :: SyntaxKind :: CONST_KW } ; [continue] => { $ crate :: SyntaxKind :: CONTINUE_KW } ; [crate] => { $ crate :: SyntaxKind :: CRATE_KW } ; [do] => { $ crate :: SyntaxKind :: DO_KW } ; [dyn] => { $ crate :: SyntaxKind :: DYN_KW } ; [else] => { $ crate :: SyntaxKind :: ELSE_KW } ; [enum] => { $ crate :: SyntaxKind :: ENUM_KW } ; [extern] => { $ crate :: SyntaxKind :: EXTERN_KW } ; [false] => { $ crate :: SyntaxKind :: FALSE_KW } ; [fn] => { $ crate :: SyntaxKind :: FN_KW } ; [for] => { $ crate :: SyntaxKind :: FOR_KW } ; [if] => { $ crate :: SyntaxKind :: IF_KW } ; [impl] => { $ crate :: SyntaxKind :: IMPL_KW } ; [in] => { $ crate :: SyntaxKind :: IN_KW } ; [let] => { $ crate :: SyntaxKind :: LET_KW } ; [loop] => { $ crate :: SyntaxKind :: LOOP_KW } ; [macro] => { $ crate :: SyntaxKind :: MACRO_KW } ; [match] => { $ crate :: SyntaxKind :: MATCH_KW } ; [mod] => { $ crate :: SyntaxKind :: MOD_KW } ; [move] => { $ crate :: SyntaxKind :: MOVE_KW } ; [mut] => { $ crate :: SyntaxKind :: MUT_KW } ; [pub] => { $ crate :: SyntaxKind :: PUB_KW } ; [ref] => { $ crate :: SyntaxKind :: REF_KW } ; [return] => { $ crate :: SyntaxKind :: RETURN_KW } ; [become] => { $ crate :: SyntaxKind :: BECOME_KW } ; [self] => { $ crate :: SyntaxKind :: SELF_KW } ; [Self] => { $ crate :: SyntaxKind :: SELF_TYPE_KW } ; [static] => { $ crate :: SyntaxKind :: STATIC_KW } ; [struct] => { $ crate :: SyntaxKind :: STRUCT_KW } ; [super] => { $ crate :: SyntaxKind :: SUPER_KW } ; [trait] => { $ crate :: SyntaxKind :: TRAIT_KW } ; [true] => { $ crate :: SyntaxKind :: TRUE_KW } ; [try] => { $ crate :: SyntaxKind :: TRY_KW } ; [type] => { $ crate :: SyntaxKind :: TYPE_KW } ; [unsafe] => { $ crate :: SyntaxKind :: UNSAFE_KW } ; [use] => { $ crate :: SyntaxKind :: USE_KW } ; [where] => { $ crate :: SyntaxKind :: WHERE_KW } ; [while] => { $ crate :: SyntaxKind :: WHILE_KW } ; [yield] => { $ crate :: SyntaxKind :: YIELD_KW } ; [auto] => { $ crate :: SyntaxKind :: AUTO_KW } ; [builtin] => { $ crate :: SyntaxKind :: BUILTIN_KW } ; [default] => { $ crate :: SyntaxKind :: DEFAULT_KW } ; [existential] => { $ crate :: SyntaxKind :: EXISTENTIAL_KW } ; [union] => { $ crate :: SyntaxKind :: UNION_KW } ; [raw] => { $ crate :: SyntaxKind :: RAW_KW } ; [macro_rules] => { $ crate :: SyntaxKind :: MACRO_RULES_KW } ; [yeet] => { $ crate :: SyntaxKind :: YEET_KW } ; [offset_of] => { $ crate :: SyntaxKind :: OFFSET_OF_KW } ; [asm] => { $ crate :: SyntaxKind :: ASM_KW } ; [format_args] => { $ crate :: SyntaxKind :: FORMAT_ARGS_KW } ; [lifetime_ident] => { $ crate :: SyntaxKind :: LIFETIME_IDENT } ; [ident] => { $ crate :: SyntaxKind :: IDENT } ; [shebang] => { $ crate :: SyntaxKind :: SHEBANG } ; }
+macro_rules ! T { [;] => { $ crate :: SyntaxKind :: SEMICOLON } ; [,] => { $ crate :: SyntaxKind :: COMMA } ; ['('] => { $ crate :: SyntaxKind :: L_PAREN } ; [')'] => { $ crate :: SyntaxKind :: R_PAREN } ; ['{'] => { $ crate :: SyntaxKind :: L_CURLY } ; ['}'] => { $ crate :: SyntaxKind :: R_CURLY } ; ['['] => { $ crate :: SyntaxKind :: L_BRACK } ; [']'] => { $ crate :: SyntaxKind :: R_BRACK } ; [<] => { $ crate :: SyntaxKind :: L_ANGLE } ; [>] => { $ crate :: SyntaxKind :: R_ANGLE } ; [@] => { $ crate :: SyntaxKind :: AT } ; [#] => { $ crate :: SyntaxKind :: POUND } ; [~] => { $ crate :: SyntaxKind :: TILDE } ; [?] => { $ crate :: SyntaxKind :: QUESTION } ; [$] => { $ crate :: SyntaxKind :: DOLLAR } ; [&] => { $ crate :: SyntaxKind :: AMP } ; [|] => { $ crate :: SyntaxKind :: PIPE } ; [+] => { $ crate :: SyntaxKind :: PLUS } ; [*] => { $ crate :: SyntaxKind :: STAR } ; [/] => { $ crate :: SyntaxKind :: SLASH } ; [^] => { $ crate :: SyntaxKind :: CARET } ; [%] => { $ crate :: SyntaxKind :: PERCENT } ; [_] => { $ crate :: SyntaxKind :: UNDERSCORE } ; [.] => { $ crate :: SyntaxKind :: DOT } ; [..] => { $ crate :: SyntaxKind :: DOT2 } ; [...] => { $ crate :: SyntaxKind :: DOT3 } ; [..=] => { $ crate :: SyntaxKind :: DOT2EQ } ; [:] => { $ crate :: SyntaxKind :: COLON } ; [::] => { $ crate :: SyntaxKind :: COLON2 } ; [=] => { $ crate :: SyntaxKind :: EQ } ; [==] => { $ crate :: SyntaxKind :: EQ2 } ; [=>] => { $ crate :: SyntaxKind :: FAT_ARROW } ; [!] => { $ crate :: SyntaxKind :: BANG } ; [!=] => { $ crate :: SyntaxKind :: NEQ } ; [-] => { $ crate :: SyntaxKind :: MINUS } ; [->] => { $ crate :: SyntaxKind :: THIN_ARROW } ; [<=] => { $ crate :: SyntaxKind :: LTEQ } ; [>=] => { $ crate :: SyntaxKind :: GTEQ } ; [+=] => { $ crate :: SyntaxKind :: PLUSEQ } ; [-=] => { $ crate :: SyntaxKind :: MINUSEQ } ; [|=] => { $ crate :: SyntaxKind :: PIPEEQ } ; [&=] => { $ crate :: SyntaxKind :: AMPEQ } ; [^=] => { $ crate :: SyntaxKind :: CARETEQ } ; [/=] => { $ crate :: SyntaxKind :: SLASHEQ } ; [*=] => { $ crate :: SyntaxKind :: STAREQ } ; [%=] => { $ crate :: SyntaxKind :: PERCENTEQ } ; [&&] => { $ crate :: SyntaxKind :: AMP2 } ; [||] => { $ crate :: SyntaxKind :: PIPE2 } ; [<<] => { $ crate :: SyntaxKind :: SHL } ; [>>] => { $ crate :: SyntaxKind :: SHR } ; [<<=] => { $ crate :: SyntaxKind :: SHLEQ } ; [>>=] => { $ crate :: SyntaxKind :: SHREQ } ; [abstract] => { $ crate :: SyntaxKind :: ABSTRACT_KW } ; [as] => { $ crate :: SyntaxKind :: AS_KW } ; [async] => { $ crate :: SyntaxKind :: ASYNC_KW } ; [await] => { $ crate :: SyntaxKind :: AWAIT_KW } ; [become] => { $ crate :: SyntaxKind :: BECOME_KW } ; [box] => { $ crate :: SyntaxKind :: BOX_KW } ; [break] => { $ crate :: SyntaxKind :: BREAK_KW } ; [const] => { $ crate :: SyntaxKind :: CONST_KW } ; [continue] => { $ crate :: SyntaxKind :: CONTINUE_KW } ; [crate] => { $ crate :: SyntaxKind :: CRATE_KW } ; [do] => { $ crate :: SyntaxKind :: DO_KW } ; [dyn] => { $ crate :: SyntaxKind :: DYN_KW } ; [else] => { $ crate :: SyntaxKind :: ELSE_KW } ; [enum] => { $ crate :: SyntaxKind :: ENUM_KW } ; [extern] => { $ crate :: SyntaxKind :: EXTERN_KW } ; [false] => { $ crate :: SyntaxKind :: FALSE_KW } ; [final] => { $ crate :: SyntaxKind :: FINAL_KW } ; [fn] => { $ crate :: SyntaxKind :: FN_KW } ; [for] => { $ crate :: SyntaxKind :: FOR_KW } ; [if] => { $ crate :: SyntaxKind :: IF_KW } ; [impl] => { $ crate :: SyntaxKind :: IMPL_KW } ; [in] => { $ crate :: SyntaxKind :: IN_KW } ; [let] => { $ crate :: SyntaxKind :: LET_KW } ; [loop] => { $ crate :: SyntaxKind :: LOOP_KW } ; [macro] => { $ crate :: SyntaxKind :: MACRO_KW } ; [match] => { $ crate :: SyntaxKind :: MATCH_KW } ; [mod] => { $ crate :: SyntaxKind :: MOD_KW } ; [move] => { $ crate :: SyntaxKind :: MOVE_KW } ; [mut] => { $ crate :: SyntaxKind :: MUT_KW } ; [override] => { $ crate :: SyntaxKind :: OVERRIDE_KW } ; [priv] => { $ crate :: SyntaxKind :: PRIV_KW } ; [pub] => { $ crate :: SyntaxKind :: PUB_KW } ; [ref] => { $ crate :: SyntaxKind :: REF_KW } ; [return] => { $ crate :: SyntaxKind :: RETURN_KW } ; [self] => { $ crate :: SyntaxKind :: SELF_KW } ; [Self] => { $ crate :: SyntaxKind :: SELF_TYPE_KW } ; [static] => { $ crate :: SyntaxKind :: STATIC_KW } ; [struct] => { $ crate :: SyntaxKind :: STRUCT_KW } ; [super] => { $ crate :: SyntaxKind :: SUPER_KW } ; [trait] => { $ crate :: SyntaxKind :: TRAIT_KW } ; [true] => { $ crate :: SyntaxKind :: TRUE_KW } ; [try] => { $ crate :: SyntaxKind :: TRY_KW } ; [type] => { $ crate :: SyntaxKind :: TYPE_KW } ; [typeof] => { $ crate :: SyntaxKind :: TYPEOF_KW } ; [unsafe] => { $ crate :: SyntaxKind :: UNSAFE_KW } ; [unsized] => { $ crate :: SyntaxKind :: UNSIZED_KW } ; [use] => { $ crate :: SyntaxKind :: USE_KW } ; [virtual] => { $ crate :: SyntaxKind :: VIRTUAL_KW } ; [where] => { $ crate :: SyntaxKind :: WHERE_KW } ; [while] => { $ crate :: SyntaxKind :: WHILE_KW } ; [yield] => { $ crate :: SyntaxKind :: YIELD_KW } ; [auto] => { $ crate :: SyntaxKind :: AUTO_KW } ; [builtin] => { $ crate :: SyntaxKind :: BUILTIN_KW } ; [default] => { $ crate :: SyntaxKind :: DEFAULT_KW } ; [existential] => { $ crate :: SyntaxKind :: EXISTENTIAL_KW } ; [union] => { $ crate :: SyntaxKind :: UNION_KW } ; [raw] => { $ crate :: SyntaxKind :: RAW_KW } ; [macro_rules] => { $ crate :: SyntaxKind :: MACRO_RULES_KW } ; [yeet] => { $ crate :: SyntaxKind :: YEET_KW } ; [offset_of] => { $ crate :: SyntaxKind :: OFFSET_OF_KW } ; [asm] => { $ crate :: SyntaxKind :: ASM_KW } ; [format_args] => { $ crate :: SyntaxKind :: FORMAT_ARGS_KW } ; [lifetime_ident] => { $ crate :: SyntaxKind :: LIFETIME_IDENT } ; [ident] => { $ crate :: SyntaxKind :: IDENT } ; [shebang] => { $ crate :: SyntaxKind :: SHEBANG } ; }
diff --git a/crates/parser/src/tests.rs b/crates/parser/src/tests.rs
index c65219b28d..0e04096526 100644
--- a/crates/parser/src/tests.rs
+++ b/crates/parser/src/tests.rs
@@ -88,7 +88,7 @@ fn parse_inline_err() {
fn parse(entry: TopEntryPoint, text: &str) -> (String, bool) {
let lexed = LexedStr::new(text);
let input = lexed.to_input();
- let output = entry.parse(&input);
+ let output = entry.parse(&input, crate::Edition::CURRENT);
let mut buf = String::new();
let mut errors = Vec::new();
diff --git a/crates/parser/src/tests/prefix_entries.rs b/crates/parser/src/tests/prefix_entries.rs
index 2f3c7febc0..f92b39edb7 100644
--- a/crates/parser/src/tests/prefix_entries.rs
+++ b/crates/parser/src/tests/prefix_entries.rs
@@ -86,7 +86,7 @@ fn check(entry: PrefixEntryPoint, input: &str, prefix: &str) {
let input = lexed.to_input();
let mut n_tokens = 0;
- for step in entry.parse(&input).iter() {
+ for step in entry.parse(&input, crate::Edition::CURRENT).iter() {
match step {
Step::Token { n_input_tokens, .. } => n_tokens += n_input_tokens as usize,
Step::FloatSplit { .. } => n_tokens += 1,
diff --git a/crates/parser/src/token_set.rs b/crates/parser/src/token_set.rs
index cd4894c1e8..88a89a53a7 100644
--- a/crates/parser/src/token_set.rs
+++ b/crates/parser/src/token_set.rs
@@ -4,34 +4,48 @@ use crate::SyntaxKind;
/// A bit-set of `SyntaxKind`s
#[derive(Clone, Copy)]
-pub(crate) struct TokenSet(u128);
+pub(crate) struct TokenSet([u64; 3]);
+
+/// `TokenSet`s should only include token `SyntaxKind`s, so the discriminant of any passed/included
+/// `SyntaxKind` must *not* be greater than that of the last token `SyntaxKind`.
+/// See #17037.
+const LAST_TOKEN_KIND_DISCRIMINANT: usize = SyntaxKind::SHEBANG as usize;
impl TokenSet {
- pub(crate) const EMPTY: TokenSet = TokenSet(0);
+ pub(crate) const EMPTY: TokenSet = TokenSet([0; 3]);
pub(crate) const fn new(kinds: &[SyntaxKind]) -> TokenSet {
- let mut res = 0u128;
+ let mut res = [0; 3];
let mut i = 0;
while i < kinds.len() {
- res |= mask(kinds[i]);
+ let discriminant = kinds[i] as usize;
+ debug_assert!(
+ discriminant <= LAST_TOKEN_KIND_DISCRIMINANT,
+ "Expected a token `SyntaxKind`"
+ );
+ let idx = discriminant / 64;
+ res[idx] |= 1 << (discriminant % 64);
i += 1;
}
TokenSet(res)
}
pub(crate) const fn union(self, other: TokenSet) -> TokenSet {
- TokenSet(self.0 | other.0)
+ TokenSet([self.0[0] | other.0[0], self.0[1] | other.0[1], self.0[2] | other.0[2]])
}
pub(crate) const fn contains(&self, kind: SyntaxKind) -> bool {
- self.0 & mask(kind) != 0
+ let discriminant = kind as usize;
+ debug_assert!(
+ discriminant <= LAST_TOKEN_KIND_DISCRIMINANT,
+ "Expected a token `SyntaxKind`"
+ );
+ let idx = discriminant / 64;
+ let mask = 1 << (discriminant % 64);
+ self.0[idx] & mask != 0
}
}
-const fn mask(kind: SyntaxKind) -> u128 {
- 1u128 << (kind as usize)
-}
-
#[test]
fn token_set_works_for_tokens() {
use crate::SyntaxKind::*;
diff --git a/crates/proc-macro-api/src/lib.rs b/crates/proc-macro-api/src/lib.rs
index fd49164464..0ab16c38c8 100644
--- a/crates/proc-macro-api/src/lib.rs
+++ b/crates/proc-macro-api/src/lib.rs
@@ -11,6 +11,7 @@ pub mod msg;
mod process;
mod version;
+use base_db::Env;
use indexmap::IndexSet;
use paths::AbsPathBuf;
use rustc_hash::FxHashMap;
@@ -37,7 +38,7 @@ pub enum ProcMacroKind {
CustomDerive,
Attr,
// This used to be called FuncLike, so that's what the server expects currently.
- #[serde(alias = "bang")]
+ #[serde(alias = "Bang")]
#[serde(rename(serialize = "FuncLike", deserialize = "FuncLike"))]
Bang,
}
@@ -152,16 +153,13 @@ impl ProcMacro {
&self,
subtree: &tt::Subtree<Span>,
attr: Option<&tt::Subtree<Span>>,
- env: Vec<(String, String)>,
+ env: Env,
def_site: Span,
call_site: Span,
mixed_site: Span,
) -> Result<Result<tt::Subtree<Span>, PanicMessage>, ServerError> {
let version = self.process.lock().unwrap_or_else(|e| e.into_inner()).version();
- let current_dir = env
- .iter()
- .find(|(name, _)| name == "CARGO_MANIFEST_DIR")
- .map(|(_, value)| value.clone());
+ let current_dir = env.get("CARGO_MANIFEST_DIR");
let mut span_data_table = IndexSet::default();
let def_site = span_data_table.insert_full(def_site).0;
@@ -172,7 +170,7 @@ impl ProcMacro {
macro_name: self.name.to_string(),
attributes: attr.map(|subtree| FlatTree::new(subtree, version, &mut span_data_table)),
lib: self.dylib_path.to_path_buf().into(),
- env,
+ env: env.into(),
current_dir,
has_global_spans: ExpnGlobals {
serialize: version >= HAS_GLOBAL_SPANS,
diff --git a/crates/proc-macro-srv-cli/build.rs b/crates/proc-macro-srv-cli/build.rs
new file mode 100644
index 0000000000..07f914fece
--- /dev/null
+++ b/crates/proc-macro-srv-cli/build.rs
@@ -0,0 +1,5 @@
+//! This teaches cargo about our cfg(rust_analyzer)
+
+fn main() {
+ println!("cargo:rustc-check-cfg=cfg(rust_analyzer)");
+}
diff --git a/crates/proc-macro-srv/build.rs b/crates/proc-macro-srv/build.rs
index a8c732f315..874d1c6cd3 100644
--- a/crates/proc-macro-srv/build.rs
+++ b/crates/proc-macro-srv/build.rs
@@ -4,6 +4,8 @@
use std::{env, fs::File, io::Write, path::PathBuf, process::Command};
fn main() {
+ println!("cargo:rustc-check-cfg=cfg(rust_analyzer)");
+
let mut path = PathBuf::from(env::var_os("OUT_DIR").unwrap());
path.push("rustc_version.rs");
let mut f = File::create(&path).unwrap();
diff --git a/crates/proc-macro-srv/proc-macro-test/build.rs b/crates/proc-macro-srv/proc-macro-test/build.rs
index c76c201d69..6a0ae362d8 100644
--- a/crates/proc-macro-srv/proc-macro-test/build.rs
+++ b/crates/proc-macro-srv/proc-macro-test/build.rs
@@ -53,7 +53,7 @@ fn main() {
println!("Creating {}", src_dir.display());
std::fs::create_dir_all(src_dir).unwrap();
- for item_els in [&["Cargo.toml"][..], &["src", "lib.rs"]] {
+ for item_els in [&["Cargo.toml"][..], &["build.rs"][..], &["src", "lib.rs"]] {
let mut src = imp_dir.clone();
let mut dst = staging_dir.clone();
for el in item_els {
diff --git a/crates/proc-macro-srv/proc-macro-test/imp/build.rs b/crates/proc-macro-srv/proc-macro-test/imp/build.rs
new file mode 100644
index 0000000000..07f914fece
--- /dev/null
+++ b/crates/proc-macro-srv/proc-macro-test/imp/build.rs
@@ -0,0 +1,5 @@
+//! This teaches cargo about our cfg(rust_analyzer)
+
+fn main() {
+ println!("cargo:rustc-check-cfg=cfg(rust_analyzer)");
+}
diff --git a/crates/project-model/src/build_scripts.rs b/crates/project-model/src/build_scripts.rs
index d40eb26063..fbd423c9ea 100644
--- a/crates/project-model/src/build_scripts.rs
+++ b/crates/project-model/src/build_scripts.rs
@@ -23,16 +23,18 @@ use serde::Deserialize;
use toolchain::Tool;
use crate::{
- cfg_flag::CfgFlag, utf8_stdout, CargoConfig, CargoFeatures, CargoWorkspace, InvocationLocation,
+ cfg::CfgFlag, utf8_stdout, CargoConfig, CargoFeatures, CargoWorkspace, InvocationLocation,
InvocationStrategy, Package, Sysroot, TargetKind,
};
+/// Output of the build script and proc-macro building steps for a workspace.
#[derive(Debug, Default, Clone, PartialEq, Eq)]
pub struct WorkspaceBuildScripts {
outputs: ArenaMap<Package, BuildScriptOutput>,
error: Option<String>,
}
+/// Output of the build script and proc-macro building step for a concrete package.
#[derive(Debug, Clone, Default, PartialEq, Eq)]
pub(crate) struct BuildScriptOutput {
/// List of config flags defined by this package's build script.
@@ -86,7 +88,9 @@ impl WorkspaceBuildScripts {
// --all-targets includes tests, benches and examples in addition to the
// default lib and bins. This is an independent concept from the --target
// flag below.
- cmd.arg("--all-targets");
+ if config.all_targets {
+ cmd.arg("--all-targets");
+ }
if let Some(target) = &config.target {
cmd.args(["--target", target]);
@@ -235,7 +239,7 @@ impl WorkspaceBuildScripts {
},
progress,
)?;
- res.iter_mut().for_each(|it| it.error = errors.clone());
+ res.iter_mut().for_each(|it| it.error.clone_from(&errors));
collisions.into_iter().for_each(|(id, workspace, package)| {
if let Some(&(p, w)) = by_id.get(id) {
res[workspace].outputs[package] = res[w].outputs[p].clone();
diff --git a/crates/project-model/src/cargo_workspace.rs b/crates/project-model/src/cargo_workspace.rs
index 51c1b094f7..ff7cf144aa 100644
--- a/crates/project-model/src/cargo_workspace.rs
+++ b/crates/project-model/src/cargo_workspace.rs
@@ -76,6 +76,8 @@ impl Default for CargoFeatures {
#[derive(Default, Clone, Debug, PartialEq, Eq)]
pub struct CargoConfig {
+ /// Whether to pass `--all-targets` to cargo invocations.
+ pub all_targets: bool,
/// List of features to activate.
pub features: CargoFeatures,
/// rustc target
@@ -133,6 +135,20 @@ pub struct PackageData {
pub active_features: Vec<String>,
/// String representation of package id
pub id: String,
+ /// Authors as given in the `Cargo.toml`
+ pub authors: Vec<String>,
+ /// Description as given in the `Cargo.toml`
+ pub description: Option<String>,
+ /// Homepage as given in the `Cargo.toml`
+ pub homepage: Option<String>,
+ /// License as given in the `Cargo.toml`
+ pub license: Option<String>,
+ /// License file as given in the `Cargo.toml`
+ pub license_file: Option<Utf8PathBuf>,
+ /// Readme file as given in the `Cargo.toml`
+ pub readme: Option<Utf8PathBuf>,
+ /// Rust version as given in the `Cargo.toml`
+ pub rust_version: Option<semver::Version>,
/// The contents of [package.metadata.rust-analyzer]
pub metadata: RustAnalyzerPackageMetaData,
}
@@ -223,6 +239,10 @@ impl TargetKind {
}
TargetKind::Other
}
+
+ pub fn is_executable(self) -> bool {
+ matches!(self, TargetKind::Bin | TargetKind::Example)
+ }
}
// Deserialize helper for the cargo metadata
@@ -285,6 +305,12 @@ impl CargoWorkspace {
.collect(),
);
}
+ // The manifest is a rust file, so this means its a script manifest
+ if cargo_toml.extension().is_some_and(|ext| ext == "rs") {
+ // Deliberately don't set up RUSTC_BOOTSTRAP or a nightly override here, the user should
+ // opt into it themselves.
+ other_options.push("-Zscript".to_owned());
+ }
meta.other_options(other_options);
// FIXME: Fetching metadata is a slow process, as it might require
@@ -328,6 +354,13 @@ impl CargoWorkspace {
repository,
edition,
metadata,
+ authors,
+ description,
+ homepage,
+ license,
+ license_file,
+ readme,
+ rust_version,
..
} = meta_pkg;
let meta = from_value::<PackageMetadata>(metadata).unwrap_or_default();
@@ -346,16 +379,24 @@ impl CargoWorkspace {
let is_local = source.is_none();
let is_member = ws_members.contains(&id);
+ let manifest = AbsPathBuf::assert(manifest_path);
let pkg = packages.alloc(PackageData {
id: id.repr.clone(),
name,
version,
- manifest: AbsPathBuf::assert(manifest_path).try_into().unwrap(),
+ manifest: manifest.clone().try_into().unwrap(),
targets: Vec::new(),
is_local,
is_member,
edition,
repository,
+ authors,
+ description,
+ homepage,
+ license,
+ license_file,
+ readme,
+ rust_version,
dependencies: Vec::new(),
features: features.into_iter().collect(),
active_features: Vec::new(),
@@ -366,11 +407,22 @@ impl CargoWorkspace {
for meta_tgt in meta_targets {
let cargo_metadata::Target { name, kind, required_features, src_path, .. } =
meta_tgt;
+ let kind = TargetKind::new(&kind);
let tgt = targets.alloc(TargetData {
package: pkg,
name,
- root: AbsPathBuf::assert(src_path),
- kind: TargetKind::new(&kind),
+ root: if kind == TargetKind::Bin
+ && manifest.extension().is_some_and(|ext| ext == "rs")
+ {
+ // cargo strips the script part of a cargo script away and places the
+ // modified manifest file into a special target dir which is then used as
+ // the source path. We don't want that, we want the original here so map it
+ // back
+ manifest.clone()
+ } else {
+ AbsPathBuf::assert(src_path)
+ },
+ kind,
required_features,
});
pkg_data.targets.push(tgt);
diff --git a/crates/project-model/src/cfg_flag.rs b/crates/project-model/src/cfg.rs
index af682904b1..b409bc1ce7 100644
--- a/crates/project-model/src/cfg_flag.rs
+++ b/crates/project-model/src/cfg.rs
@@ -3,9 +3,11 @@
//! rustc main.rs --cfg foo --cfg 'feature="bar"'
use std::{fmt, str::FromStr};
-use cfg::CfgOptions;
+use cfg::{CfgDiff, CfgOptions};
+use rustc_hash::FxHashMap;
+use serde::Serialize;
-#[derive(Clone, Eq, PartialEq, Debug)]
+#[derive(Clone, Eq, PartialEq, Debug, Serialize)]
pub enum CfgFlag {
Atom(String),
KeyValue { key: String, value: String },
@@ -69,3 +71,27 @@ impl fmt::Display for CfgFlag {
}
}
}
+
+/// A set of cfg-overrides per crate.
+#[derive(Default, Debug, Clone, Eq, PartialEq)]
+pub struct CfgOverrides {
+ /// A global set of overrides matching all crates.
+ pub global: CfgDiff,
+ /// A set of overrides matching specific crates.
+ pub selective: FxHashMap<String, CfgDiff>,
+}
+
+impl CfgOverrides {
+ pub fn len(&self) -> usize {
+ self.global.len() + self.selective.values().map(|it| it.len()).sum::<usize>()
+ }
+
+ pub fn apply(&self, cfg_options: &mut CfgOptions, name: &str) {
+ if !self.global.is_empty() {
+ cfg_options.apply_diff(self.global.clone());
+ };
+ if let Some(diff) = self.selective.get(name) {
+ cfg_options.apply_diff(diff.clone());
+ };
+ }
+}
diff --git a/crates/project-model/src/env.rs b/crates/project-model/src/env.rs
new file mode 100644
index 0000000000..762e01c917
--- /dev/null
+++ b/crates/project-model/src/env.rs
@@ -0,0 +1,85 @@
+//! Cargo-like environment variables injection.
+use base_db::Env;
+use rustc_hash::FxHashMap;
+use toolchain::Tool;
+
+use crate::{utf8_stdout, ManifestPath, PackageData, Sysroot, TargetKind};
+
+/// Recreates the compile-time environment variables that Cargo sets.
+///
+/// Should be synced with
+/// <https://doc.rust-lang.org/cargo/reference/environment-variables.html#environment-variables-cargo-sets-for-crates>
+///
+/// FIXME: ask Cargo to provide this data instead of re-deriving.
+pub(crate) fn inject_cargo_package_env(env: &mut Env, package: &PackageData) {
+ // FIXME: Missing variables:
+ // CARGO_BIN_NAME, CARGO_BIN_EXE_<name>
+
+ let manifest_dir = package.manifest.parent();
+ env.set("CARGO_MANIFEST_DIR", manifest_dir.as_str());
+
+ env.set("CARGO_PKG_VERSION", package.version.to_string());
+ env.set("CARGO_PKG_VERSION_MAJOR", package.version.major.to_string());
+ env.set("CARGO_PKG_VERSION_MINOR", package.version.minor.to_string());
+ env.set("CARGO_PKG_VERSION_PATCH", package.version.patch.to_string());
+ env.set("CARGO_PKG_VERSION_PRE", package.version.pre.to_string());
+
+ env.set("CARGO_PKG_AUTHORS", package.authors.join(":").clone());
+
+ env.set("CARGO_PKG_NAME", package.name.clone());
+ env.set("CARGO_PKG_DESCRIPTION", package.description.as_deref().unwrap_or_default());
+ env.set("CARGO_PKG_HOMEPAGE", package.homepage.as_deref().unwrap_or_default());
+ env.set("CARGO_PKG_REPOSITORY", package.repository.as_deref().unwrap_or_default());
+ env.set("CARGO_PKG_LICENSE", package.license.as_deref().unwrap_or_default());
+ env.set(
+ "CARGO_PKG_LICENSE_FILE",
+ package.license_file.as_ref().map(ToString::to_string).unwrap_or_default(),
+ );
+ env.set(
+ "CARGO_PKG_README",
+ package.readme.as_ref().map(ToString::to_string).unwrap_or_default(),
+ );
+
+ env.set(
+ "CARGO_PKG_RUST_VERSION",
+ package.rust_version.as_ref().map(ToString::to_string).unwrap_or_default(),
+ );
+}
+
+pub(crate) fn inject_cargo_env(env: &mut Env) {
+ env.set("CARGO", Tool::Cargo.path().to_string());
+}
+
+pub(crate) fn inject_rustc_tool_env(env: &mut Env, cargo_name: &str, kind: TargetKind) {
+ _ = kind;
+ // FIXME
+ // if kind.is_executable() {
+ // env.set("CARGO_BIN_NAME", cargo_name);
+ // }
+ env.set("CARGO_CRATE_NAME", cargo_name.replace('-', "_"));
+}
+
+pub(crate) fn cargo_config_env(
+ cargo_toml: &ManifestPath,
+ extra_env: &FxHashMap<String, String>,
+ sysroot: Option<&Sysroot>,
+) -> FxHashMap<String, String> {
+ let mut cargo_config = Sysroot::tool(sysroot, Tool::Cargo);
+ cargo_config.envs(extra_env);
+ cargo_config
+ .current_dir(cargo_toml.parent())
+ .args(["-Z", "unstable-options", "config", "get", "env"])
+ .env("RUSTC_BOOTSTRAP", "1");
+ // if successful we receive `env.key.value = "value" per entry
+ tracing::debug!("Discovering cargo config env by {:?}", cargo_config);
+ utf8_stdout(cargo_config).map(parse_output_cargo_config_env).unwrap_or_default()
+}
+
+fn parse_output_cargo_config_env(stdout: String) -> FxHashMap<String, String> {
+ stdout
+ .lines()
+ .filter_map(|l| l.strip_prefix("env."))
+ .filter_map(|l| l.split_once(".value = "))
+ .map(|(key, value)| (key.to_owned(), value.trim_matches('"').to_owned()))
+ .collect()
+}
diff --git a/crates/project-model/src/lib.rs b/crates/project-model/src/lib.rs
index 28696aa327..7f3e35ca5d 100644
--- a/crates/project-model/src/lib.rs
+++ b/crates/project-model/src/lib.rs
@@ -19,7 +19,8 @@
mod build_scripts;
mod cargo_workspace;
-mod cfg_flag;
+mod cfg;
+mod env;
mod manifest_path;
mod project_json;
mod rustc_cfg;
@@ -47,10 +48,11 @@ pub use crate::{
CargoConfig, CargoFeatures, CargoWorkspace, Package, PackageData, PackageDependency,
RustLibSource, Target, TargetData, TargetKind,
},
+ cfg::CfgOverrides,
manifest_path::ManifestPath,
project_json::{ProjectJson, ProjectJsonData},
sysroot::Sysroot,
- workspace::{CfgOverrides, PackageRoot, ProjectWorkspace},
+ workspace::{FileLoader, PackageRoot, ProjectWorkspace},
};
#[derive(Debug, Clone, PartialEq, Eq, Hash, Ord, PartialOrd)]
diff --git a/crates/project-model/src/project_json.rs b/crates/project-model/src/project_json.rs
index 512588cc8f..fac6eb8ad3 100644
--- a/crates/project-model/src/project_json.rs
+++ b/crates/project-model/src/project_json.rs
@@ -49,14 +49,13 @@
//! user explores them belongs to that extension (it's totally valid to change
//! rust-project.json over time via configuration request!)
-use base_db::{CrateDisplayName, CrateId, CrateName, Dependency};
-use la_arena::RawIdx;
+use base_db::{CrateDisplayName, CrateName};
use paths::{AbsPath, AbsPathBuf, Utf8PathBuf};
use rustc_hash::FxHashMap;
-use serde::{de, Deserialize};
+use serde::{de, Deserialize, Serialize};
use span::Edition;
-use crate::cfg_flag::CfgFlag;
+use crate::cfg::CfgFlag;
/// Roots and crates that compose this Rust project.
#[derive(Clone, Debug, Eq, PartialEq)]
@@ -74,10 +73,10 @@ pub struct ProjectJson {
#[derive(Clone, Debug, Eq, PartialEq)]
pub struct Crate {
pub(crate) display_name: Option<CrateDisplayName>,
- pub(crate) root_module: AbsPathBuf,
+ pub root_module: AbsPathBuf,
pub(crate) edition: Edition,
pub(crate) version: Option<String>,
- pub(crate) deps: Vec<Dependency>,
+ pub(crate) deps: Vec<Dep>,
pub(crate) cfg: Vec<CfgFlag>,
pub(crate) target: Option<String>,
pub(crate) env: FxHashMap<String, String>,
@@ -128,16 +127,7 @@ impl ProjectJson {
root_module,
edition: crate_data.edition.into(),
version: crate_data.version.as_ref().map(ToString::to_string),
- deps: crate_data
- .deps
- .into_iter()
- .map(|dep_data| {
- Dependency::new(
- dep_data.name,
- CrateId::from_raw(RawIdx::from(dep_data.krate as u32)),
- )
- })
- .collect::<Vec<_>>(),
+ deps: crate_data.deps,
cfg: crate_data.cfg,
target: crate_data.target,
env: crate_data.env,
@@ -161,11 +151,8 @@ impl ProjectJson {
}
/// Returns an iterator over the crates in the project.
- pub fn crates(&self) -> impl Iterator<Item = (CrateId, &Crate)> + '_ {
- self.crates
- .iter()
- .enumerate()
- .map(|(idx, krate)| (CrateId::from_raw(RawIdx::from(idx as u32)), krate))
+ pub fn crates(&self) -> impl Iterator<Item = (CrateArrayIdx, &Crate)> {
+ self.crates.iter().enumerate().map(|(idx, krate)| (CrateArrayIdx(idx), krate))
}
/// Returns the path to the project's root folder.
@@ -174,21 +161,21 @@ impl ProjectJson {
}
}
-#[derive(Deserialize, Debug, Clone)]
+#[derive(Serialize, Deserialize, Debug, Clone)]
pub struct ProjectJsonData {
sysroot: Option<Utf8PathBuf>,
sysroot_src: Option<Utf8PathBuf>,
crates: Vec<CrateData>,
}
-#[derive(Deserialize, Debug, Clone)]
+#[derive(Serialize, Deserialize, Debug, Clone)]
struct CrateData {
display_name: Option<String>,
root_module: Utf8PathBuf,
edition: EditionData,
#[serde(default)]
version: Option<semver::Version>,
- deps: Vec<DepData>,
+ deps: Vec<Dep>,
#[serde(default)]
cfg: Vec<CfgFlag>,
target: Option<String>,
@@ -203,7 +190,7 @@ struct CrateData {
repository: Option<String>,
}
-#[derive(Deserialize, Debug, Clone)]
+#[derive(Serialize, Deserialize, Debug, Clone)]
#[serde(rename = "edition")]
enum EditionData {
#[serde(rename = "2015")]
@@ -227,16 +214,25 @@ impl From<EditionData> for Edition {
}
}
-#[derive(Deserialize, Debug, Clone)]
-struct DepData {
+/// Identifies a crate by position in the crates array.
+///
+/// This will differ from `CrateId` when multiple `ProjectJson`
+/// workspaces are loaded.
+#[derive(Serialize, Deserialize, Debug, Clone, Copy, Eq, PartialEq, Hash)]
+#[serde(transparent)]
+pub struct CrateArrayIdx(pub usize);
+
+#[derive(Serialize, Deserialize, Debug, Clone, Eq, PartialEq)]
+pub(crate) struct Dep {
/// Identifies a crate by position in the crates array.
#[serde(rename = "crate")]
- krate: usize,
+ pub(crate) krate: CrateArrayIdx,
+ #[serde(serialize_with = "serialize_crate_name")]
#[serde(deserialize_with = "deserialize_crate_name")]
- name: CrateName,
+ pub(crate) name: CrateName,
}
-#[derive(Deserialize, Debug, Clone)]
+#[derive(Serialize, Deserialize, Debug, Clone)]
struct CrateSource {
include_dirs: Vec<Utf8PathBuf>,
exclude_dirs: Vec<Utf8PathBuf>,
@@ -249,3 +245,10 @@ where
let name = String::deserialize(de)?;
CrateName::new(&name).map_err(|err| de::Error::custom(format!("invalid crate name: {err:?}")))
}
+
+fn serialize_crate_name<S>(name: &CrateName, se: S) -> Result<S::Ok, S::Error>
+where
+ S: serde::Serializer,
+{
+ se.serialize_str(name)
+}
diff --git a/crates/project-model/src/rustc_cfg.rs b/crates/project-model/src/rustc_cfg.rs
index 501b1fdc8c..4f69b2b96f 100644
--- a/crates/project-model/src/rustc_cfg.rs
+++ b/crates/project-model/src/rustc_cfg.rs
@@ -4,7 +4,7 @@ use anyhow::Context;
use rustc_hash::FxHashMap;
use toolchain::Tool;
-use crate::{cfg_flag::CfgFlag, utf8_stdout, ManifestPath, Sysroot};
+use crate::{cfg::CfgFlag, utf8_stdout, ManifestPath, Sysroot};
/// Determines how `rustc --print cfg` is discovered and invoked.
pub(crate) enum RustcCfgConfig<'a> {
@@ -32,9 +32,6 @@ pub(crate) fn get(
}
}
- // Add miri cfg, which is useful for mir eval in stdlib
- res.push(CfgFlag::Atom("miri".into()));
-
let rustc_cfgs = get_rust_cfgs(target, extra_env, config);
let rustc_cfgs = match rustc_cfgs {
diff --git a/crates/project-model/src/tests.rs b/crates/project-model/src/tests.rs
index fc0b507b33..fd09dff503 100644
--- a/crates/project-model/src/tests.rs
+++ b/crates/project-model/src/tests.rs
@@ -75,6 +75,7 @@ fn load_rust_project(file: &str) -> (CrateGraph, ProcMacroPaths) {
rustc_cfg: Vec::new(),
toolchain: None,
target_layout: Err(Arc::from("test has no data layout")),
+ cfg_overrides: Default::default(),
};
to_crate_graph(project_workspace)
}
@@ -97,6 +98,11 @@ fn get_test_json_file<T: DeserializeOwned>(file: &str) -> T {
}
}
+fn replace_cargo(s: &mut String) {
+ let path = toolchain::Tool::Cargo.path().to_string().escape_debug().collect::<String>();
+ *s = s.replace(&path, "$CARGO$");
+}
+
fn replace_root(s: &mut String, direction: bool) {
if direction {
let root = if cfg!(windows) { r#"C:\\ROOT\"# } else { "/ROOT/" };
@@ -155,7 +161,9 @@ fn to_crate_graph(project_workspace: ProjectWorkspace) -> (CrateGraph, ProcMacro
fn check_crate_graph(crate_graph: CrateGraph, expect: ExpectFile) {
let mut crate_graph = format!("{crate_graph:#?}");
+
replace_root(&mut crate_graph, false);
+ replace_cargo(&mut crate_graph);
replace_fake_sys_root(&mut crate_graph);
expect.assert_eq(&crate_graph);
}
diff --git a/crates/project-model/src/workspace.rs b/crates/project-model/src/workspace.rs
index b8c5885108..98c5a02dcd 100644
--- a/crates/project-model/src/workspace.rs
+++ b/crates/project-model/src/workspace.rs
@@ -2,7 +2,7 @@
//! metadata` or `rust-project.json`) into representation stored in the salsa
//! database -- `CrateGraph`.
-use std::{collections::VecDeque, fmt, fs, iter, str::FromStr, sync};
+use std::{collections::VecDeque, fmt, fs, iter, sync};
use anyhow::{format_err, Context};
use base_db::{
@@ -21,8 +21,9 @@ use triomphe::Arc;
use crate::{
build_scripts::BuildScriptOutput,
cargo_workspace::{DepKind, PackageData, RustLibSource},
- cfg_flag::CfgFlag,
- project_json::Crate,
+ cfg::{CfgFlag, CfgOverrides},
+ env::{cargo_config_env, inject_cargo_env, inject_cargo_package_env, inject_rustc_tool_env},
+ project_json::{Crate, CrateArrayIdx},
rustc_cfg::{self, RustcCfgConfig},
sysroot::{SysrootCrate, SysrootMode},
target_data_layout::{self, RustcDataLayoutConfig},
@@ -30,20 +31,7 @@ use crate::{
ProjectJson, ProjectManifest, Sysroot, TargetData, TargetKind, WorkspaceBuildScripts,
};
-/// A set of cfg-overrides per crate.
-#[derive(Default, Debug, Clone, Eq, PartialEq)]
-pub struct CfgOverrides {
- /// A global set of overrides matching all crates.
- pub global: CfgDiff,
- /// A set of overrides matching specific crates.
- pub selective: FxHashMap<String, CfgDiff>,
-}
-
-impl CfgOverrides {
- pub fn len(&self) -> usize {
- self.global.len() + self.selective.values().map(|it| it.len()).sum::<usize>()
- }
-}
+pub type FileLoader<'a> = &'a mut dyn for<'b> FnMut(&'b AbsPath) -> Option<FileId>;
/// `PackageRoot` describes a package root folder.
/// Which may be an external dependency, or a member of
@@ -60,30 +48,46 @@ pub struct PackageRoot {
pub enum ProjectWorkspace {
/// Project workspace was discovered by running `cargo metadata` and `rustc --print sysroot`.
Cargo {
+ /// The workspace as returned by `cargo metadata`.
cargo: CargoWorkspace,
+ /// The build script results for the workspace.
build_scripts: WorkspaceBuildScripts,
+ /// The sysroot loaded for this workspace.
sysroot: Result<Sysroot, Option<String>>,
+ /// The rustc workspace loaded for this workspace. An `Err(None)` means loading has been
+ /// disabled or was otherwise not requested.
rustc: Result<Box<(CargoWorkspace, WorkspaceBuildScripts)>, Option<String>>,
/// Holds cfg flags for the current target. We get those by running
/// `rustc --print cfg`.
- ///
- /// FIXME: make this a per-crate map, as, eg, build.rs might have a
- /// different target.
+ // FIXME: make this a per-crate map, as, eg, build.rs might have a
+ // different target.
rustc_cfg: Vec<CfgFlag>,
+ /// A set of cfg overrides for this workspace.
cfg_overrides: CfgOverrides,
+ /// The toolchain version used by this workspace.
toolchain: Option<Version>,
+ /// The target data layout queried for workspace.
target_layout: TargetLayoutLoadResult,
+ /// Environment variables set in the `.cargo/config` file.
cargo_config_extra_env: FxHashMap<String, String>,
},
/// Project workspace was manually specified using a `rust-project.json` file.
Json {
+ /// The loaded project json file.
project: ProjectJson,
+ /// The sysroot loaded for this workspace.
sysroot: Result<Sysroot, Option<String>>,
/// Holds cfg flags for the current target. We get those by running
/// `rustc --print cfg`.
+ // FIXME: make this a per-crate map, as, eg, build.rs might have a
+ // different target.
rustc_cfg: Vec<CfgFlag>,
+ /// The toolchain version used by this workspace.
toolchain: Option<Version>,
+ /// The target data layout queried for workspace.
target_layout: TargetLayoutLoadResult,
+ /// A set of cfg overrides for this workspace.
+ cfg_overrides: CfgOverrides,
},
// FIXME: The primary limitation of this approach is that the set of detached files needs to be fixed at the beginning.
// That's not the end user experience we should strive for.
@@ -95,14 +99,24 @@ pub enum ProjectWorkspace {
// //
/// Project with a set of disjoint files, not belonging to any particular workspace.
/// Backed by basic sysroot crates for basic completion and highlighting.
- DetachedFiles {
- files: Vec<AbsPathBuf>,
+ DetachedFile {
+ /// The file in question.
+ file: AbsPathBuf,
+ /// The sysroot loaded for this workspace.
sysroot: Result<Sysroot, Option<String>>,
/// Holds cfg flags for the current target. We get those by running
/// `rustc --print cfg`.
+ // FIXME: make this a per-crate map, as, eg, build.rs might have a
+ // different target.
rustc_cfg: Vec<CfgFlag>,
+ /// The toolchain version used by this workspace.
toolchain: Option<Version>,
+ /// The target data layout queried for workspace.
target_layout: TargetLayoutLoadResult,
+ /// A set of cfg overrides for the files.
+ cfg_overrides: CfgOverrides,
+ /// Is this file a cargo script file?
+ cargo_script: Option<CargoWorkspace>,
},
}
@@ -141,6 +155,7 @@ impl fmt::Debug for ProjectWorkspace {
rustc_cfg,
toolchain,
target_layout: data_layout,
+ cfg_overrides,
} => {
let mut debug_struct = f.debug_struct("Json");
debug_struct.field("n_crates", &project.n_crates());
@@ -150,22 +165,28 @@ impl fmt::Debug for ProjectWorkspace {
debug_struct
.field("n_rustc_cfg", &rustc_cfg.len())
.field("toolchain", &toolchain)
- .field("data_layout", &data_layout);
+ .field("data_layout", &data_layout)
+ .field("n_cfg_overrides", &cfg_overrides.len());
debug_struct.finish()
}
- ProjectWorkspace::DetachedFiles {
- files,
+ ProjectWorkspace::DetachedFile {
+ file,
sysroot,
rustc_cfg,
toolchain,
target_layout,
+ cfg_overrides,
+ cargo_script,
} => f
.debug_struct("DetachedFiles")
- .field("n_files", &files.len())
+ .field("file", &file)
+ .field("cargo_script", &cargo_script.is_some())
.field("sysroot", &sysroot.is_ok())
+ .field("cargo_script", &cargo_script.is_some())
.field("n_rustc_cfg", &rustc_cfg.len())
.field("toolchain", &toolchain)
.field("data_layout", &target_layout)
+ .field("n_cfg_overrides", &cfg_overrides.len())
.finish(),
}
}
@@ -219,6 +240,7 @@ impl ProjectWorkspace {
project_json,
config.target.as_deref(),
&config.extra_env,
+ &config.cfg_overrides,
)
}
ProjectManifest::CargoToml(cargo_toml) => {
@@ -360,6 +382,7 @@ impl ProjectWorkspace {
project_json: ProjectJson,
target: Option<&str>,
extra_env: &FxHashMap<String, String>,
+ cfg_overrides: &CfgOverrides,
) -> ProjectWorkspace {
let sysroot = match (project_json.sysroot.clone(), project_json.sysroot_src.clone()) {
(Some(sysroot), Some(sysroot_src)) => {
@@ -406,57 +429,86 @@ impl ProjectWorkspace {
rustc_cfg,
toolchain,
target_layout: data_layout.map(Arc::from).map_err(|it| Arc::from(it.to_string())),
+ cfg_overrides: cfg_overrides.clone(),
}
}
pub fn load_detached_files(
detached_files: Vec<AbsPathBuf>,
config: &CargoConfig,
- ) -> anyhow::Result<ProjectWorkspace> {
- let dir = detached_files
- .first()
- .and_then(|it| it.parent())
- .ok_or_else(|| format_err!("No detached files to load"))?;
- let sysroot = match &config.sysroot {
- Some(RustLibSource::Path(path)) => {
- Sysroot::with_sysroot_dir(path.clone(), config.sysroot_query_metadata)
- .map_err(|e| Some(format!("Failed to find sysroot at {path}:{e}")))
- }
- Some(RustLibSource::Discover) => Sysroot::discover(
- dir,
- &config.extra_env,
- config.sysroot_query_metadata,
- )
- .map_err(|e| {
- Some(format!("Failed to find sysroot for {dir}. Is rust-src installed? {e}"))
- }),
- None => Err(None),
- };
+ ) -> Vec<anyhow::Result<ProjectWorkspace>> {
+ detached_files
+ .into_iter()
+ .map(|detached_file| {
+ let dir = detached_file
+ .parent()
+ .ok_or_else(|| format_err!("detached file has no parent"))?;
+ let sysroot = match &config.sysroot {
+ Some(RustLibSource::Path(path)) => {
+ Sysroot::with_sysroot_dir(path.clone(), config.sysroot_query_metadata)
+ .map_err(|e| Some(format!("Failed to find sysroot at {path}:{e}")))
+ }
+ Some(RustLibSource::Discover) => {
+ Sysroot::discover(dir, &config.extra_env, config.sysroot_query_metadata)
+ .map_err(|e| {
+ Some(format!(
+ "Failed to find sysroot for {dir}. Is rust-src installed? {e}"
+ ))
+ })
+ }
+ None => Err(None),
+ };
- let sysroot_ref = sysroot.as_ref().ok();
- let toolchain =
- match get_toolchain_version(dir, sysroot_ref, Tool::Rustc, &config.extra_env, "rustc ")
- {
- Ok(it) => it,
- Err(e) => {
- tracing::error!("{e}");
- None
- }
- };
+ let sysroot_ref = sysroot.as_ref().ok();
+ let toolchain = match get_toolchain_version(
+ dir,
+ sysroot_ref,
+ Tool::Rustc,
+ &config.extra_env,
+ "rustc ",
+ ) {
+ Ok(it) => it,
+ Err(e) => {
+ tracing::error!("{e}");
+ None
+ }
+ };
- let rustc_cfg = rustc_cfg::get(None, &config.extra_env, RustcCfgConfig::Rustc(sysroot_ref));
- let data_layout = target_data_layout::get(
- RustcDataLayoutConfig::Rustc(sysroot_ref),
- None,
- &config.extra_env,
- );
- Ok(ProjectWorkspace::DetachedFiles {
- files: detached_files,
- sysroot,
- rustc_cfg,
- toolchain,
- target_layout: data_layout.map(Arc::from).map_err(|it| Arc::from(it.to_string())),
- })
+ let rustc_cfg =
+ rustc_cfg::get(None, &config.extra_env, RustcCfgConfig::Rustc(sysroot_ref));
+ let data_layout = target_data_layout::get(
+ RustcDataLayoutConfig::Rustc(sysroot_ref),
+ None,
+ &config.extra_env,
+ );
+
+ let cargo_script = ManifestPath::try_from(detached_file.clone())
+ .ok()
+ .and_then(|file| {
+ CargoWorkspace::fetch_metadata(
+ &file,
+ file.parent(),
+ config,
+ sysroot_ref,
+ &|_| (),
+ )
+ .ok()
+ })
+ .map(CargoWorkspace::new);
+
+ Ok(ProjectWorkspace::DetachedFile {
+ file: detached_file,
+ sysroot,
+ rustc_cfg,
+ toolchain,
+ target_layout: data_layout
+ .map(Arc::from)
+ .map_err(|it| Arc::from(it.to_string())),
+ cfg_overrides: config.cfg_overrides.clone(),
+ cargo_script,
+ })
+ })
+ .collect()
}
/// Runs the build scripts for this [`ProjectWorkspace`].
@@ -466,7 +518,13 @@ impl ProjectWorkspace {
progress: &dyn Fn(String),
) -> anyhow::Result<WorkspaceBuildScripts> {
match self {
- ProjectWorkspace::Cargo { cargo, toolchain, sysroot, .. } => {
+ ProjectWorkspace::DetachedFile {
+ cargo_script: Some(cargo),
+ toolchain,
+ sysroot,
+ ..
+ }
+ | ProjectWorkspace::Cargo { cargo, toolchain, sysroot, .. } => {
WorkspaceBuildScripts::run_for_workspace(
config,
cargo,
@@ -478,9 +536,8 @@ impl ProjectWorkspace {
format!("Failed to run build scripts for {}", cargo.workspace_root())
})
}
- ProjectWorkspace::Json { .. } | ProjectWorkspace::DetachedFiles { .. } => {
- Ok(WorkspaceBuildScripts::default())
- }
+ ProjectWorkspace::DetachedFile { cargo_script: None, .. }
+ | ProjectWorkspace::Json { .. } => Ok(WorkspaceBuildScripts::default()),
}
}
@@ -536,11 +593,11 @@ impl ProjectWorkspace {
}
}
- pub fn workspace_definition_path(&self) -> Option<&AbsPath> {
+ pub fn workspace_definition_path(&self) -> &AbsPath {
match self {
- ProjectWorkspace::Cargo { cargo, .. } => Some(cargo.workspace_root()),
- ProjectWorkspace::Json { project, .. } => Some(project.path()),
- ProjectWorkspace::DetachedFiles { .. } => None,
+ ProjectWorkspace::Cargo { cargo, .. } => cargo.workspace_root(),
+ ProjectWorkspace::Json { project, .. } => project.path(),
+ ProjectWorkspace::DetachedFile { file, .. } => file,
}
}
@@ -548,10 +605,10 @@ impl ProjectWorkspace {
match self {
ProjectWorkspace::Cargo { sysroot: Ok(sysroot), .. }
| ProjectWorkspace::Json { sysroot: Ok(sysroot), .. }
- | ProjectWorkspace::DetachedFiles { sysroot: Ok(sysroot), .. } => {
+ | ProjectWorkspace::DetachedFile { sysroot: Ok(sysroot), .. } => {
sysroot.discover_proc_macro_srv()
}
- ProjectWorkspace::DetachedFiles { .. } => {
+ ProjectWorkspace::DetachedFile { .. } => {
Err(anyhow::format_err!("cannot find proc-macro server, no sysroot was found"))
}
ProjectWorkspace::Cargo { cargo, .. } => Err(anyhow::format_err!(
@@ -611,6 +668,7 @@ impl ProjectWorkspace {
rustc_cfg: _,
toolchain: _,
target_layout: _,
+ cfg_overrides: _,
} => project
.crates()
.map(|(_, krate)| PackageRoot {
@@ -681,15 +739,50 @@ impl ProjectWorkspace {
}))
.collect()
}
- ProjectWorkspace::DetachedFiles { files, sysroot, .. } => files
- .iter()
- .map(|detached_file| PackageRoot {
+ ProjectWorkspace::DetachedFile { file, cargo_script, sysroot, .. } => {
+ iter::once(PackageRoot {
is_local: true,
- include: vec![detached_file.clone()],
+ include: vec![file.clone()],
exclude: Vec::new(),
})
+ .chain(cargo_script.iter().flat_map(|cargo| {
+ cargo.packages().map(|pkg| {
+ let is_local = cargo[pkg].is_local;
+ let pkg_root = cargo[pkg].manifest.parent().to_path_buf();
+
+ let mut include = vec![pkg_root.clone()];
+
+ // In case target's path is manually set in Cargo.toml to be
+ // outside the package root, add its parent as an extra include.
+ // An example of this situation would look like this:
+ //
+ // ```toml
+ // [lib]
+ // path = "../../src/lib.rs"
+ // ```
+ let extra_targets = cargo[pkg]
+ .targets
+ .iter()
+ .filter(|&&tgt| matches!(cargo[tgt].kind, TargetKind::Lib { .. }))
+ .filter_map(|&tgt| cargo[tgt].root.parent())
+ .map(|tgt| tgt.normalize().to_path_buf())
+ .filter(|path| !path.starts_with(&pkg_root));
+ include.extend(extra_targets);
+
+ let mut exclude = vec![pkg_root.join(".git")];
+ if is_local {
+ exclude.push(pkg_root.join("target"));
+ } else {
+ exclude.push(pkg_root.join("tests"));
+ exclude.push(pkg_root.join("examples"));
+ exclude.push(pkg_root.join("benches"));
+ }
+ PackageRoot { is_local, include, exclude }
+ })
+ }))
.chain(mk_sysroot(sysroot.as_ref()))
- .collect(),
+ .collect()
+ }
}
}
@@ -705,16 +798,17 @@ impl ProjectWorkspace {
let sysroot_package_len = sysroot.as_ref().map_or(0, |it| it.num_packages());
cargo.packages().len() + sysroot_package_len + rustc_package_len
}
- ProjectWorkspace::DetachedFiles { sysroot, files, .. } => {
+ ProjectWorkspace::DetachedFile { sysroot, cargo_script, .. } => {
let sysroot_package_len = sysroot.as_ref().map_or(0, |it| it.num_packages());
- sysroot_package_len + files.len()
+ sysroot_package_len
+ + cargo_script.as_ref().map_or(1, |cargo| cargo.packages().len())
}
}
}
pub fn to_crate_graph(
&self,
- load: &mut dyn FnMut(&AbsPath) -> Option<FileId>,
+ load: FileLoader<'_>,
extra_env: &FxHashMap<String, String>,
) -> (CrateGraph, ProcMacroPaths) {
let _p = tracing::span!(tracing::Level::INFO, "ProjectWorkspace::to_crate_graph").entered();
@@ -726,6 +820,7 @@ impl ProjectWorkspace {
rustc_cfg,
toolchain: _,
target_layout: _,
+ cfg_overrides,
} => (
project_json_to_crate_graph(
rustc_cfg.clone(),
@@ -733,6 +828,7 @@ impl ProjectWorkspace {
project,
sysroot.as_ref().ok(),
extra_env,
+ cfg_overrides,
),
sysroot,
),
@@ -758,24 +854,39 @@ impl ProjectWorkspace {
),
sysroot,
),
- ProjectWorkspace::DetachedFiles {
- files,
+ ProjectWorkspace::DetachedFile {
+ file,
sysroot,
rustc_cfg,
toolchain: _,
target_layout: _,
+ cfg_overrides,
+ cargo_script,
} => (
- detached_files_to_crate_graph(
- rustc_cfg.clone(),
- load,
- files,
- sysroot.as_ref().ok(),
- ),
+ if let Some(cargo) = cargo_script {
+ cargo_to_crate_graph(
+ &mut |path| load(path),
+ None,
+ cargo,
+ sysroot.as_ref().ok(),
+ rustc_cfg.clone(),
+ cfg_overrides,
+ &WorkspaceBuildScripts::default(),
+ )
+ } else {
+ detached_file_to_crate_graph(
+ rustc_cfg.clone(),
+ load,
+ file,
+ sysroot.as_ref().ok(),
+ cfg_overrides,
+ )
+ },
sysroot,
),
};
- if matches!(sysroot.as_ref().map(|it| it.mode()), Ok(SysrootMode::Workspace(_)))
+ if matches!(sysroot.as_ref().map(|it| it.mode()), Ok(SysrootMode::Stitched(_)))
&& crate_graph.patch_cfg_if()
{
tracing::debug!("Patched std to depend on cfg-if")
@@ -820,35 +931,56 @@ impl ProjectWorkspace {
&& cargo_config_extra_env == o_cargo_config_extra_env
}
(
- Self::Json { project, sysroot, rustc_cfg, toolchain, target_layout: _ },
+ Self::Json {
+ project,
+ sysroot,
+ rustc_cfg,
+ toolchain,
+ target_layout: _,
+ cfg_overrides,
+ },
Self::Json {
project: o_project,
sysroot: o_sysroot,
rustc_cfg: o_rustc_cfg,
toolchain: o_toolchain,
target_layout: _,
+ cfg_overrides: o_cfg_overrides,
},
) => {
project == o_project
&& rustc_cfg == o_rustc_cfg
&& sysroot == o_sysroot
&& toolchain == o_toolchain
+ && cfg_overrides == o_cfg_overrides
}
(
- Self::DetachedFiles { files, sysroot, rustc_cfg, toolchain, target_layout },
- Self::DetachedFiles {
- files: o_files,
+ Self::DetachedFile {
+ file,
+ sysroot,
+ rustc_cfg,
+ cargo_script,
+ toolchain,
+ target_layout,
+ cfg_overrides,
+ },
+ Self::DetachedFile {
+ file: o_file,
sysroot: o_sysroot,
rustc_cfg: o_rustc_cfg,
+ cargo_script: o_cargo_script,
toolchain: o_toolchain,
target_layout: o_target_layout,
+ cfg_overrides: o_cfg_overrides,
},
) => {
- files == o_files
+ file == o_file
&& sysroot == o_sysroot
&& rustc_cfg == o_rustc_cfg
&& toolchain == o_toolchain
&& target_layout == o_target_layout
+ && cfg_overrides == o_cfg_overrides
+ && cargo_script == o_cargo_script
}
_ => false,
}
@@ -865,10 +997,11 @@ impl ProjectWorkspace {
fn project_json_to_crate_graph(
rustc_cfg: Vec<CfgFlag>,
- load: &mut dyn FnMut(&AbsPath) -> Option<FileId>,
+ load: FileLoader<'_>,
project: &ProjectJson,
sysroot: Option<&Sysroot>,
extra_env: &FxHashMap<String, String>,
+ override_cfg: &CfgOverrides,
) -> (CrateGraph, ProcMacroPaths) {
let mut res = (CrateGraph::default(), ProcMacroPaths::default());
let (crate_graph, proc_macros) = &mut res;
@@ -878,12 +1011,13 @@ fn project_json_to_crate_graph(
let r_a_cfg_flag = CfgFlag::Atom("rust_analyzer".to_owned());
let mut cfg_cache: FxHashMap<&str, Vec<CfgFlag>> = FxHashMap::default();
- let crates: FxHashMap<CrateId, CrateId> = project
+
+ let idx_to_crate_id: FxHashMap<CrateArrayIdx, CrateId> = project
.crates()
- .filter_map(|(crate_id, krate)| Some((crate_id, krate, load(&krate.root_module)?)))
+ .filter_map(|(idx, krate)| Some((idx, krate, load(&krate.root_module)?)))
.map(
|(
- crate_id,
+ idx,
Crate {
display_name,
edition,
@@ -907,17 +1041,22 @@ fn project_json_to_crate_graph(
None => &rustc_cfg,
};
+ let mut cfg_options = target_cfgs
+ .iter()
+ .chain(cfg.iter())
+ .chain(iter::once(&r_a_cfg_flag))
+ .cloned()
+ .collect();
+ override_cfg.apply(
+ &mut cfg_options,
+ display_name.as_ref().map(|it| it.canonical_name()).unwrap_or_default(),
+ );
let crate_graph_crate_id = crate_graph.add_crate_root(
file_id,
*edition,
display_name.clone(),
version.clone(),
- target_cfgs
- .iter()
- .chain(cfg.iter())
- .chain(iter::once(&r_a_cfg_flag))
- .cloned()
- .collect(),
+ Arc::new(cfg_options),
None,
env,
*is_proc_macro,
@@ -939,13 +1078,13 @@ fn project_json_to_crate_graph(
proc_macros.insert(crate_graph_crate_id, node);
}
}
- (crate_id, crate_graph_crate_id)
+ (idx, crate_graph_crate_id)
},
)
.collect();
- for (from, krate) in project.crates() {
- if let Some(&from) = crates.get(&from) {
+ for (from_idx, krate) in project.crates() {
+ if let Some(&from) = idx_to_crate_id.get(&from_idx) {
if let Some((public_deps, libproc_macro)) = &sysroot_deps {
public_deps.add_to_crate_graph(crate_graph, from);
if let Some(proc_macro) = libproc_macro {
@@ -954,8 +1093,8 @@ fn project_json_to_crate_graph(
}
for dep in &krate.deps {
- if let Some(&to) = crates.get(&dep.crate_id) {
- add_dep(crate_graph, from, dep.name.clone(), to)
+ if let Some(&to) = idx_to_crate_id.get(&dep.krate) {
+ add_dep(crate_graph, from, dep.name.clone(), to);
}
}
}
@@ -964,7 +1103,7 @@ fn project_json_to_crate_graph(
}
fn cargo_to_crate_graph(
- load: &mut dyn FnMut(&AbsPath) -> Option<FileId>,
+ load: FileLoader<'_>,
rustc: Option<&(CargoWorkspace, WorkspaceBuildScripts)>,
cargo: &CargoWorkspace,
sysroot: Option<&Sysroot>,
@@ -981,7 +1120,7 @@ fn cargo_to_crate_graph(
None => (SysrootPublicDeps::default(), None),
};
- let cfg_options = create_cfg_options(rustc_cfg);
+ let cfg_options = CfgOptions::from_iter(rustc_cfg);
// Mapping of a package to its library target
let mut pkg_to_lib_crate = FxHashMap::default();
@@ -996,25 +1135,13 @@ fn cargo_to_crate_graph(
let cfg_options = {
let mut cfg_options = cfg_options.clone();
- // Add test cfg for local crates
if cargo[pkg].is_local {
+ // Add test cfg for local crates
cfg_options.insert_atom("test".into());
cfg_options.insert_atom("rust_analyzer".into());
}
- if !override_cfg.global.is_empty() {
- cfg_options.apply_diff(override_cfg.global.clone());
- };
- if let Some(diff) = override_cfg.selective.get(&cargo[pkg].name) {
- // FIXME: this is sort of a hack to deal with #![cfg(not(test))] vanishing such as seen
- // in ed25519_dalek (#7243), and libcore (#9203) (although you only hit that one while
- // working on rust-lang/rust as that's the only time it appears outside sysroot).
- //
- // A more ideal solution might be to reanalyze crates based on where the cursor is and
- // figure out the set of cfgs that would have to apply to make it active.
-
- cfg_options.apply_diff(diff.clone());
- };
+ override_cfg.apply(&mut cfg_options, &cargo[pkg].name);
cfg_options
};
@@ -1150,6 +1277,7 @@ fn cargo_to_crate_graph(
&pkg_crates,
&cfg_options,
override_cfg,
+ // FIXME: Remove this once rustc switched over to rust-project.json
if rustc_workspace.workspace_root() == cargo.workspace_root() {
// the rustc workspace does not use the installed toolchain's proc-macro server
// so we need to make sure we don't use the pre compiled proc-macros there either
@@ -1163,11 +1291,12 @@ fn cargo_to_crate_graph(
res
}
-fn detached_files_to_crate_graph(
+fn detached_file_to_crate_graph(
rustc_cfg: Vec<CfgFlag>,
- load: &mut dyn FnMut(&AbsPath) -> Option<FileId>,
- detached_files: &[AbsPathBuf],
+ load: FileLoader<'_>,
+ detached_file: &AbsPathBuf,
sysroot: Option<&Sysroot>,
+ override_cfg: &CfgOverrides,
) -> (CrateGraph, ProcMacroPaths) {
let _p = tracing::span!(tracing::Level::INFO, "detached_files_to_crate_graph").entered();
let mut crate_graph = CrateGraph::default();
@@ -1176,37 +1305,38 @@ fn detached_files_to_crate_graph(
None => (SysrootPublicDeps::default(), None),
};
- let mut cfg_options = create_cfg_options(rustc_cfg);
+ let mut cfg_options = CfgOptions::from_iter(rustc_cfg);
+ cfg_options.insert_atom("test".into());
cfg_options.insert_atom("rust_analyzer".into());
+ override_cfg.apply(&mut cfg_options, "");
+ let cfg_options = Arc::new(cfg_options);
+
+ let file_id = match load(detached_file) {
+ Some(file_id) => file_id,
+ None => {
+ tracing::error!("Failed to load detached file {:?}", detached_file);
+ return (crate_graph, FxHashMap::default());
+ }
+ };
+ let display_name = detached_file
+ .file_stem()
+ .map(|file_stem| CrateDisplayName::from_canonical_name(file_stem.to_owned()));
+ let detached_file_crate = crate_graph.add_crate_root(
+ file_id,
+ Edition::CURRENT,
+ display_name.clone(),
+ None,
+ cfg_options.clone(),
+ None,
+ Env::default(),
+ false,
+ CrateOrigin::Local {
+ repo: None,
+ name: display_name.map(|n| n.canonical_name().to_owned()),
+ },
+ );
- for detached_file in detached_files {
- let file_id = match load(detached_file) {
- Some(file_id) => file_id,
- None => {
- tracing::error!("Failed to load detached file {:?}", detached_file);
- continue;
- }
- };
- let display_name = detached_file
- .file_stem()
- .map(|file_stem| CrateDisplayName::from_canonical_name(file_stem.to_owned()));
- let detached_file_crate = crate_graph.add_crate_root(
- file_id,
- Edition::CURRENT,
- display_name.clone(),
- None,
- cfg_options.clone(),
- None,
- Env::default(),
- false,
- CrateOrigin::Local {
- repo: None,
- name: display_name.map(|n| n.canonical_name().to_owned()),
- },
- );
-
- public_deps.add_to_crate_graph(&mut crate_graph, detached_file_crate);
- }
+ public_deps.add_to_crate_graph(&mut crate_graph, detached_file_crate);
(crate_graph, FxHashMap::default())
}
@@ -1214,7 +1344,7 @@ fn handle_rustc_crates(
crate_graph: &mut CrateGraph,
proc_macros: &mut ProcMacroPaths,
pkg_to_lib_crate: &mut FxHashMap<Package, CrateId>,
- load: &mut dyn FnMut(&AbsPath) -> Option<FileId>,
+ load: FileLoader<'_>,
rustc_workspace: &CargoWorkspace,
cargo: &CargoWorkspace,
public_deps: &SysrootPublicDeps,
@@ -1246,20 +1376,7 @@ fn handle_rustc_crates(
}
let mut cfg_options = cfg_options.clone();
-
- if !override_cfg.global.is_empty() {
- cfg_options.apply_diff(override_cfg.global.clone());
- };
- if let Some(diff) = override_cfg.selective.get(&rustc_workspace[pkg].name) {
- // FIXME: this is sort of a hack to deal with #![cfg(not(test))] vanishing such as seen
- // in ed25519_dalek (#7243), and libcore (#9203) (although you only hit that one while
- // working on rust-lang/rust as that's the only time it appears outside sysroot).
- //
- // A more ideal solution might be to reanalyze crates based on where the cursor is and
- // figure out the set of cfgs that would have to apply to make it active.
-
- cfg_options.apply_diff(diff.clone());
- };
+ override_cfg.apply(&mut cfg_options, &rustc_workspace[pkg].name);
for &tgt in rustc_workspace[pkg].targets.iter() {
let kind @ TargetKind::Lib { is_proc_macro } = rustc_workspace[tgt].kind else {
@@ -1361,26 +1478,22 @@ fn add_target_crate_root(
};
let mut env = Env::default();
- inject_cargo_env(pkg, &mut env);
- if let Ok(cname) = String::from_str(cargo_name) {
- // CARGO_CRATE_NAME is the name of the Cargo target with - converted to _, such as the name of the library, binary, example, integration test, or benchmark.
- env.set("CARGO_CRATE_NAME", cname.replace('-', "_"));
- }
+ inject_cargo_package_env(&mut env, pkg);
+ inject_cargo_env(&mut env);
+ inject_rustc_tool_env(&mut env, cargo_name, kind);
if let Some(envs) = build_data.map(|it| &it.envs) {
for (k, v) in envs {
env.set(k, v.clone());
}
}
-
- let display_name = CrateDisplayName::from_canonical_name(cargo_name.to_owned());
let crate_id = crate_graph.add_crate_root(
file_id,
edition,
- Some(display_name),
+ Some(CrateDisplayName::from_canonical_name(cargo_name.to_owned())),
Some(pkg.version.to_string()),
- cfg_options,
- potential_cfg_options,
+ Arc::new(cfg_options),
+ potential_cfg_options.map(Arc::new),
env,
matches!(kind, TargetKind::Lib { is_proc_macro: true }),
origin,
@@ -1416,7 +1529,7 @@ fn sysroot_to_crate_graph(
crate_graph: &mut CrateGraph,
sysroot: &Sysroot,
rustc_cfg: Vec<CfgFlag>,
- load: &mut dyn FnMut(&AbsPath) -> Option<FileId>,
+ load: FileLoader<'_>,
) -> (SysrootPublicDeps, Option<CrateId>) {
let _p = tracing::span!(tracing::Level::INFO, "sysroot_to_crate_graph").entered();
match sysroot.mode() {
@@ -1427,7 +1540,17 @@ fn sysroot_to_crate_graph(
cargo,
None,
rustc_cfg,
- &CfgOverrides::default(),
+ &CfgOverrides {
+ global: CfgDiff::new(
+ vec![
+ CfgAtom::Flag("debug_assertions".into()),
+ CfgAtom::Flag("miri".into()),
+ ],
+ vec![],
+ )
+ .unwrap(),
+ ..Default::default()
+ },
&WorkspaceBuildScripts::default(),
);
@@ -1436,7 +1559,7 @@ fn sysroot_to_crate_graph(
let diff = CfgDiff::new(vec![], vec![CfgAtom::Flag("test".into())]).unwrap();
for (cid, c) in cg.iter_mut() {
// uninject `test` flag so `core` keeps working.
- c.cfg_options.apply_diff(diff.clone());
+ Arc::make_mut(&mut c.cfg_options).apply_diff(diff.clone());
// patch the origin
if c.origin.is_local() {
let lang_crate = LangCrateOrigin::from(
@@ -1485,13 +1608,18 @@ fn sysroot_to_crate_graph(
(SysrootPublicDeps { deps: pub_deps }, libproc_macro)
}
SysrootMode::Stitched(stitched) => {
- let cfg_options = create_cfg_options(rustc_cfg);
+ let cfg_options = Arc::new({
+ let mut cfg_options = CfgOptions::default();
+ cfg_options.extend(rustc_cfg);
+ cfg_options.insert_atom("debug_assertions".into());
+ cfg_options.insert_atom("miri".into());
+ cfg_options
+ });
let sysroot_crates: FxHashMap<SysrootCrate, CrateId> = stitched
.crates()
.filter_map(|krate| {
let file_id = load(&stitched[krate].root)?;
- let env = Env::default();
let display_name =
CrateDisplayName::from_canonical_name(stitched[krate].name.clone());
let crate_id = crate_graph.add_crate_root(
@@ -1501,7 +1629,7 @@ fn sysroot_to_crate_graph(
None,
cfg_options.clone(),
None,
- env,
+ Env::default(),
false,
CrateOrigin::Lang(LangCrateOrigin::from(&*stitched[krate].name)),
);
@@ -1559,71 +1687,3 @@ fn add_dep_inner(graph: &mut CrateGraph, from: CrateId, dep: Dependency) {
tracing::error!("{}", err)
}
}
-
-/// Recreates the compile-time environment variables that Cargo sets.
-///
-/// Should be synced with
-/// <https://doc.rust-lang.org/cargo/reference/environment-variables.html#environment-variables-cargo-sets-for-crates>
-///
-/// FIXME: ask Cargo to provide this data instead of re-deriving.
-fn inject_cargo_env(package: &PackageData, env: &mut Env) {
- // FIXME: Missing variables:
- // CARGO_BIN_NAME, CARGO_BIN_EXE_<name>
-
- let manifest_dir = package.manifest.parent();
- env.set("CARGO_MANIFEST_DIR", manifest_dir.as_str().to_owned());
-
- // Not always right, but works for common cases.
- env.set("CARGO", "cargo".into());
-
- env.set("CARGO_PKG_VERSION", package.version.to_string());
- env.set("CARGO_PKG_VERSION_MAJOR", package.version.major.to_string());
- env.set("CARGO_PKG_VERSION_MINOR", package.version.minor.to_string());
- env.set("CARGO_PKG_VERSION_PATCH", package.version.patch.to_string());
- env.set("CARGO_PKG_VERSION_PRE", package.version.pre.to_string());
-
- env.set("CARGO_PKG_AUTHORS", String::new());
-
- env.set("CARGO_PKG_NAME", package.name.clone());
- // FIXME: This isn't really correct (a package can have many crates with different names), but
- // it's better than leaving the variable unset.
- env.set("CARGO_CRATE_NAME", CrateName::normalize_dashes(&package.name).to_string());
- env.set("CARGO_PKG_DESCRIPTION", String::new());
- env.set("CARGO_PKG_HOMEPAGE", String::new());
- env.set("CARGO_PKG_REPOSITORY", String::new());
- env.set("CARGO_PKG_LICENSE", String::new());
-
- env.set("CARGO_PKG_LICENSE_FILE", String::new());
-}
-
-fn create_cfg_options(rustc_cfg: Vec<CfgFlag>) -> CfgOptions {
- let mut cfg_options = CfgOptions::default();
- cfg_options.extend(rustc_cfg);
- cfg_options.insert_atom("debug_assertions".into());
- cfg_options
-}
-
-fn cargo_config_env(
- cargo_toml: &ManifestPath,
- extra_env: &FxHashMap<String, String>,
- sysroot: Option<&Sysroot>,
-) -> FxHashMap<String, String> {
- let mut cargo_config = Sysroot::tool(sysroot, Tool::Cargo);
- cargo_config.envs(extra_env);
- cargo_config
- .current_dir(cargo_toml.parent())
- .args(["-Z", "unstable-options", "config", "get", "env"])
- .env("RUSTC_BOOTSTRAP", "1");
- // if successful we receive `env.key.value = "value" per entry
- tracing::debug!("Discovering cargo config env by {:?}", cargo_config);
- utf8_stdout(cargo_config).map(parse_output_cargo_config_env).unwrap_or_default()
-}
-
-fn parse_output_cargo_config_env(stdout: String) -> FxHashMap<String, String> {
- stdout
- .lines()
- .filter_map(|l| l.strip_prefix("env."))
- .filter_map(|l| l.split_once(".value = "))
- .map(|(key, value)| (key.to_owned(), value.trim_matches('"').to_owned()))
- .collect()
-}
diff --git a/crates/project-model/test_data/output/cargo_hello_world_project_model.txt b/crates/project-model/test_data/output/cargo_hello_world_project_model.txt
index 0ad19ca9f7..c2a2d6ed91 100644
--- a/crates/project-model/test_data/output/cargo_hello_world_project_model.txt
+++ b/crates/project-model/test_data/output/cargo_hello_world_project_model.txt
@@ -17,7 +17,6 @@
),
cfg_options: CfgOptions(
[
- "debug_assertions",
"rust_analyzer",
"test",
],
@@ -25,20 +24,22 @@
potential_cfg_options: None,
env: Env {
entries: {
- "CARGO_PKG_LICENSE": "",
- "CARGO_PKG_VERSION_MAJOR": "0",
+ "CARGO": "$CARGO$",
+ "CARGO_CRATE_NAME": "hello_world",
"CARGO_MANIFEST_DIR": "$ROOT$hello-world",
- "CARGO_PKG_VERSION": "0.1.0",
"CARGO_PKG_AUTHORS": "",
- "CARGO_CRATE_NAME": "hello_world",
- "CARGO_PKG_LICENSE_FILE": "",
- "CARGO_PKG_HOMEPAGE": "",
"CARGO_PKG_DESCRIPTION": "",
+ "CARGO_PKG_HOMEPAGE": "",
+ "CARGO_PKG_LICENSE": "",
+ "CARGO_PKG_LICENSE_FILE": "",
"CARGO_PKG_NAME": "hello-world",
- "CARGO_PKG_VERSION_PATCH": "0",
- "CARGO": "cargo",
+ "CARGO_PKG_README": "",
"CARGO_PKG_REPOSITORY": "",
+ "CARGO_PKG_RUST_VERSION": "",
+ "CARGO_PKG_VERSION": "0.1.0",
+ "CARGO_PKG_VERSION_MAJOR": "0",
"CARGO_PKG_VERSION_MINOR": "1",
+ "CARGO_PKG_VERSION_PATCH": "0",
"CARGO_PKG_VERSION_PRE": "",
},
},
@@ -77,7 +78,6 @@
),
cfg_options: CfgOptions(
[
- "debug_assertions",
"rust_analyzer",
"test",
],
@@ -85,20 +85,22 @@
potential_cfg_options: None,
env: Env {
entries: {
- "CARGO_PKG_LICENSE": "",
- "CARGO_PKG_VERSION_MAJOR": "0",
+ "CARGO": "$CARGO$",
+ "CARGO_CRATE_NAME": "hello_world",
"CARGO_MANIFEST_DIR": "$ROOT$hello-world",
- "CARGO_PKG_VERSION": "0.1.0",
"CARGO_PKG_AUTHORS": "",
- "CARGO_CRATE_NAME": "hello_world",
- "CARGO_PKG_LICENSE_FILE": "",
- "CARGO_PKG_HOMEPAGE": "",
"CARGO_PKG_DESCRIPTION": "",
+ "CARGO_PKG_HOMEPAGE": "",
+ "CARGO_PKG_LICENSE": "",
+ "CARGO_PKG_LICENSE_FILE": "",
"CARGO_PKG_NAME": "hello-world",
- "CARGO_PKG_VERSION_PATCH": "0",
- "CARGO": "cargo",
+ "CARGO_PKG_README": "",
"CARGO_PKG_REPOSITORY": "",
+ "CARGO_PKG_RUST_VERSION": "",
+ "CARGO_PKG_VERSION": "0.1.0",
+ "CARGO_PKG_VERSION_MAJOR": "0",
"CARGO_PKG_VERSION_MINOR": "1",
+ "CARGO_PKG_VERSION_PATCH": "0",
"CARGO_PKG_VERSION_PRE": "",
},
},
@@ -144,7 +146,6 @@
),
cfg_options: CfgOptions(
[
- "debug_assertions",
"rust_analyzer",
"test",
],
@@ -152,20 +153,22 @@
potential_cfg_options: None,
env: Env {
entries: {
- "CARGO_PKG_LICENSE": "",
- "CARGO_PKG_VERSION_MAJOR": "0",
+ "CARGO": "$CARGO$",
+ "CARGO_CRATE_NAME": "an_example",
"CARGO_MANIFEST_DIR": "$ROOT$hello-world",
- "CARGO_PKG_VERSION": "0.1.0",
"CARGO_PKG_AUTHORS": "",
- "CARGO_CRATE_NAME": "an_example",
- "CARGO_PKG_LICENSE_FILE": "",
- "CARGO_PKG_HOMEPAGE": "",
"CARGO_PKG_DESCRIPTION": "",
+ "CARGO_PKG_HOMEPAGE": "",
+ "CARGO_PKG_LICENSE": "",
+ "CARGO_PKG_LICENSE_FILE": "",
"CARGO_PKG_NAME": "hello-world",
- "CARGO_PKG_VERSION_PATCH": "0",
- "CARGO": "cargo",
+ "CARGO_PKG_README": "",
"CARGO_PKG_REPOSITORY": "",
+ "CARGO_PKG_RUST_VERSION": "",
+ "CARGO_PKG_VERSION": "0.1.0",
+ "CARGO_PKG_VERSION_MAJOR": "0",
"CARGO_PKG_VERSION_MINOR": "1",
+ "CARGO_PKG_VERSION_PATCH": "0",
"CARGO_PKG_VERSION_PRE": "",
},
},
@@ -211,7 +214,6 @@
),
cfg_options: CfgOptions(
[
- "debug_assertions",
"rust_analyzer",
"test",
],
@@ -219,20 +221,22 @@
potential_cfg_options: None,
env: Env {
entries: {
- "CARGO_PKG_LICENSE": "",
- "CARGO_PKG_VERSION_MAJOR": "0",
+ "CARGO": "$CARGO$",
+ "CARGO_CRATE_NAME": "it",
"CARGO_MANIFEST_DIR": "$ROOT$hello-world",
- "CARGO_PKG_VERSION": "0.1.0",
"CARGO_PKG_AUTHORS": "",
- "CARGO_CRATE_NAME": "it",
- "CARGO_PKG_LICENSE_FILE": "",
- "CARGO_PKG_HOMEPAGE": "",
"CARGO_PKG_DESCRIPTION": "",
+ "CARGO_PKG_HOMEPAGE": "",
+ "CARGO_PKG_LICENSE": "",
+ "CARGO_PKG_LICENSE_FILE": "",
"CARGO_PKG_NAME": "hello-world",
- "CARGO_PKG_VERSION_PATCH": "0",
- "CARGO": "cargo",
+ "CARGO_PKG_README": "",
"CARGO_PKG_REPOSITORY": "",
+ "CARGO_PKG_RUST_VERSION": "",
+ "CARGO_PKG_VERSION": "0.1.0",
+ "CARGO_PKG_VERSION_MAJOR": "0",
"CARGO_PKG_VERSION_MINOR": "1",
+ "CARGO_PKG_VERSION_PATCH": "0",
"CARGO_PKG_VERSION_PRE": "",
},
},
@@ -278,7 +282,6 @@
),
cfg_options: CfgOptions(
[
- "debug_assertions",
"feature=default",
"feature=std",
],
@@ -286,7 +289,6 @@
potential_cfg_options: Some(
CfgOptions(
[
- "debug_assertions",
"feature=align",
"feature=const-extern-fn",
"feature=default",
@@ -299,20 +301,22 @@
),
env: Env {
entries: {
- "CARGO_PKG_LICENSE": "",
- "CARGO_PKG_VERSION_MAJOR": "0",
- "CARGO_MANIFEST_DIR": "$ROOT$.cargo/registry/src/github.com-1ecc6299db9ec823/libc-0.2.98",
- "CARGO_PKG_VERSION": "0.2.98",
- "CARGO_PKG_AUTHORS": "",
+ "CARGO": "$CARGO$",
"CARGO_CRATE_NAME": "libc",
+ "CARGO_MANIFEST_DIR": "$ROOT$.cargo/registry/src/github.com-1ecc6299db9ec823/libc-0.2.98",
+ "CARGO_PKG_AUTHORS": "The Rust Project Developers",
+ "CARGO_PKG_DESCRIPTION": "Raw FFI bindings to platform libraries like libc.\n",
+ "CARGO_PKG_HOMEPAGE": "https://github.com/rust-lang/libc",
+ "CARGO_PKG_LICENSE": "MIT OR Apache-2.0",
"CARGO_PKG_LICENSE_FILE": "",
- "CARGO_PKG_HOMEPAGE": "",
- "CARGO_PKG_DESCRIPTION": "",
"CARGO_PKG_NAME": "libc",
- "CARGO_PKG_VERSION_PATCH": "98",
- "CARGO": "cargo",
- "CARGO_PKG_REPOSITORY": "",
+ "CARGO_PKG_README": "README.md",
+ "CARGO_PKG_REPOSITORY": "https://github.com/rust-lang/libc",
+ "CARGO_PKG_RUST_VERSION": "",
+ "CARGO_PKG_VERSION": "0.2.98",
+ "CARGO_PKG_VERSION_MAJOR": "0",
"CARGO_PKG_VERSION_MINOR": "2",
+ "CARGO_PKG_VERSION_PATCH": "98",
"CARGO_PKG_VERSION_PRE": "",
},
},
diff --git a/crates/project-model/test_data/output/cargo_hello_world_project_model_with_selective_overrides.txt b/crates/project-model/test_data/output/cargo_hello_world_project_model_with_selective_overrides.txt
index 0ad19ca9f7..c2a2d6ed91 100644
--- a/crates/project-model/test_data/output/cargo_hello_world_project_model_with_selective_overrides.txt
+++ b/crates/project-model/test_data/output/cargo_hello_world_project_model_with_selective_overrides.txt
@@ -17,7 +17,6 @@
),
cfg_options: CfgOptions(
[
- "debug_assertions",
"rust_analyzer",
"test",
],
@@ -25,20 +24,22 @@
potential_cfg_options: None,
env: Env {
entries: {
- "CARGO_PKG_LICENSE": "",
- "CARGO_PKG_VERSION_MAJOR": "0",
+ "CARGO": "$CARGO$",
+ "CARGO_CRATE_NAME": "hello_world",
"CARGO_MANIFEST_DIR": "$ROOT$hello-world",
- "CARGO_PKG_VERSION": "0.1.0",
"CARGO_PKG_AUTHORS": "",
- "CARGO_CRATE_NAME": "hello_world",
- "CARGO_PKG_LICENSE_FILE": "",
- "CARGO_PKG_HOMEPAGE": "",
"CARGO_PKG_DESCRIPTION": "",
+ "CARGO_PKG_HOMEPAGE": "",
+ "CARGO_PKG_LICENSE": "",
+ "CARGO_PKG_LICENSE_FILE": "",
"CARGO_PKG_NAME": "hello-world",
- "CARGO_PKG_VERSION_PATCH": "0",
- "CARGO": "cargo",
+ "CARGO_PKG_README": "",
"CARGO_PKG_REPOSITORY": "",
+ "CARGO_PKG_RUST_VERSION": "",
+ "CARGO_PKG_VERSION": "0.1.0",
+ "CARGO_PKG_VERSION_MAJOR": "0",
"CARGO_PKG_VERSION_MINOR": "1",
+ "CARGO_PKG_VERSION_PATCH": "0",
"CARGO_PKG_VERSION_PRE": "",
},
},
@@ -77,7 +78,6 @@
),
cfg_options: CfgOptions(
[
- "debug_assertions",
"rust_analyzer",
"test",
],
@@ -85,20 +85,22 @@
potential_cfg_options: None,
env: Env {
entries: {
- "CARGO_PKG_LICENSE": "",
- "CARGO_PKG_VERSION_MAJOR": "0",
+ "CARGO": "$CARGO$",
+ "CARGO_CRATE_NAME": "hello_world",
"CARGO_MANIFEST_DIR": "$ROOT$hello-world",
- "CARGO_PKG_VERSION": "0.1.0",
"CARGO_PKG_AUTHORS": "",
- "CARGO_CRATE_NAME": "hello_world",
- "CARGO_PKG_LICENSE_FILE": "",
- "CARGO_PKG_HOMEPAGE": "",
"CARGO_PKG_DESCRIPTION": "",
+ "CARGO_PKG_HOMEPAGE": "",
+ "CARGO_PKG_LICENSE": "",
+ "CARGO_PKG_LICENSE_FILE": "",
"CARGO_PKG_NAME": "hello-world",
- "CARGO_PKG_VERSION_PATCH": "0",
- "CARGO": "cargo",
+ "CARGO_PKG_README": "",
"CARGO_PKG_REPOSITORY": "",
+ "CARGO_PKG_RUST_VERSION": "",
+ "CARGO_PKG_VERSION": "0.1.0",
+ "CARGO_PKG_VERSION_MAJOR": "0",
"CARGO_PKG_VERSION_MINOR": "1",
+ "CARGO_PKG_VERSION_PATCH": "0",
"CARGO_PKG_VERSION_PRE": "",
},
},
@@ -144,7 +146,6 @@
),
cfg_options: CfgOptions(
[
- "debug_assertions",
"rust_analyzer",
"test",
],
@@ -152,20 +153,22 @@
potential_cfg_options: None,
env: Env {
entries: {
- "CARGO_PKG_LICENSE": "",
- "CARGO_PKG_VERSION_MAJOR": "0",
+ "CARGO": "$CARGO$",
+ "CARGO_CRATE_NAME": "an_example",
"CARGO_MANIFEST_DIR": "$ROOT$hello-world",
- "CARGO_PKG_VERSION": "0.1.0",
"CARGO_PKG_AUTHORS": "",
- "CARGO_CRATE_NAME": "an_example",
- "CARGO_PKG_LICENSE_FILE": "",
- "CARGO_PKG_HOMEPAGE": "",
"CARGO_PKG_DESCRIPTION": "",
+ "CARGO_PKG_HOMEPAGE": "",
+ "CARGO_PKG_LICENSE": "",
+ "CARGO_PKG_LICENSE_FILE": "",
"CARGO_PKG_NAME": "hello-world",
- "CARGO_PKG_VERSION_PATCH": "0",
- "CARGO": "cargo",
+ "CARGO_PKG_README": "",
"CARGO_PKG_REPOSITORY": "",
+ "CARGO_PKG_RUST_VERSION": "",
+ "CARGO_PKG_VERSION": "0.1.0",
+ "CARGO_PKG_VERSION_MAJOR": "0",
"CARGO_PKG_VERSION_MINOR": "1",
+ "CARGO_PKG_VERSION_PATCH": "0",
"CARGO_PKG_VERSION_PRE": "",
},
},
@@ -211,7 +214,6 @@
),
cfg_options: CfgOptions(
[
- "debug_assertions",
"rust_analyzer",
"test",
],
@@ -219,20 +221,22 @@
potential_cfg_options: None,
env: Env {
entries: {
- "CARGO_PKG_LICENSE": "",
- "CARGO_PKG_VERSION_MAJOR": "0",
+ "CARGO": "$CARGO$",
+ "CARGO_CRATE_NAME": "it",
"CARGO_MANIFEST_DIR": "$ROOT$hello-world",
- "CARGO_PKG_VERSION": "0.1.0",
"CARGO_PKG_AUTHORS": "",
- "CARGO_CRATE_NAME": "it",
- "CARGO_PKG_LICENSE_FILE": "",
- "CARGO_PKG_HOMEPAGE": "",
"CARGO_PKG_DESCRIPTION": "",
+ "CARGO_PKG_HOMEPAGE": "",
+ "CARGO_PKG_LICENSE": "",
+ "CARGO_PKG_LICENSE_FILE": "",
"CARGO_PKG_NAME": "hello-world",
- "CARGO_PKG_VERSION_PATCH": "0",
- "CARGO": "cargo",
+ "CARGO_PKG_README": "",
"CARGO_PKG_REPOSITORY": "",
+ "CARGO_PKG_RUST_VERSION": "",
+ "CARGO_PKG_VERSION": "0.1.0",
+ "CARGO_PKG_VERSION_MAJOR": "0",
"CARGO_PKG_VERSION_MINOR": "1",
+ "CARGO_PKG_VERSION_PATCH": "0",
"CARGO_PKG_VERSION_PRE": "",
},
},
@@ -278,7 +282,6 @@
),
cfg_options: CfgOptions(
[
- "debug_assertions",
"feature=default",
"feature=std",
],
@@ -286,7 +289,6 @@
potential_cfg_options: Some(
CfgOptions(
[
- "debug_assertions",
"feature=align",
"feature=const-extern-fn",
"feature=default",
@@ -299,20 +301,22 @@
),
env: Env {
entries: {
- "CARGO_PKG_LICENSE": "",
- "CARGO_PKG_VERSION_MAJOR": "0",
- "CARGO_MANIFEST_DIR": "$ROOT$.cargo/registry/src/github.com-1ecc6299db9ec823/libc-0.2.98",
- "CARGO_PKG_VERSION": "0.2.98",
- "CARGO_PKG_AUTHORS": "",
+ "CARGO": "$CARGO$",
"CARGO_CRATE_NAME": "libc",
+ "CARGO_MANIFEST_DIR": "$ROOT$.cargo/registry/src/github.com-1ecc6299db9ec823/libc-0.2.98",
+ "CARGO_PKG_AUTHORS": "The Rust Project Developers",
+ "CARGO_PKG_DESCRIPTION": "Raw FFI bindings to platform libraries like libc.\n",
+ "CARGO_PKG_HOMEPAGE": "https://github.com/rust-lang/libc",
+ "CARGO_PKG_LICENSE": "MIT OR Apache-2.0",
"CARGO_PKG_LICENSE_FILE": "",
- "CARGO_PKG_HOMEPAGE": "",
- "CARGO_PKG_DESCRIPTION": "",
"CARGO_PKG_NAME": "libc",
- "CARGO_PKG_VERSION_PATCH": "98",
- "CARGO": "cargo",
- "CARGO_PKG_REPOSITORY": "",
+ "CARGO_PKG_README": "README.md",
+ "CARGO_PKG_REPOSITORY": "https://github.com/rust-lang/libc",
+ "CARGO_PKG_RUST_VERSION": "",
+ "CARGO_PKG_VERSION": "0.2.98",
+ "CARGO_PKG_VERSION_MAJOR": "0",
"CARGO_PKG_VERSION_MINOR": "2",
+ "CARGO_PKG_VERSION_PATCH": "98",
"CARGO_PKG_VERSION_PRE": "",
},
},
diff --git a/crates/project-model/test_data/output/cargo_hello_world_project_model_with_wildcard_overrides.txt b/crates/project-model/test_data/output/cargo_hello_world_project_model_with_wildcard_overrides.txt
index e2334dca87..c291ffcca7 100644
--- a/crates/project-model/test_data/output/cargo_hello_world_project_model_with_wildcard_overrides.txt
+++ b/crates/project-model/test_data/output/cargo_hello_world_project_model_with_wildcard_overrides.txt
@@ -17,27 +17,28 @@
),
cfg_options: CfgOptions(
[
- "debug_assertions",
"rust_analyzer",
],
),
potential_cfg_options: None,
env: Env {
entries: {
- "CARGO_PKG_LICENSE": "",
- "CARGO_PKG_VERSION_MAJOR": "0",
+ "CARGO": "$CARGO$",
+ "CARGO_CRATE_NAME": "hello_world",
"CARGO_MANIFEST_DIR": "$ROOT$hello-world",
- "CARGO_PKG_VERSION": "0.1.0",
"CARGO_PKG_AUTHORS": "",
- "CARGO_CRATE_NAME": "hello_world",
- "CARGO_PKG_LICENSE_FILE": "",
- "CARGO_PKG_HOMEPAGE": "",
"CARGO_PKG_DESCRIPTION": "",
+ "CARGO_PKG_HOMEPAGE": "",
+ "CARGO_PKG_LICENSE": "",
+ "CARGO_PKG_LICENSE_FILE": "",
"CARGO_PKG_NAME": "hello-world",
- "CARGO_PKG_VERSION_PATCH": "0",
- "CARGO": "cargo",
+ "CARGO_PKG_README": "",
"CARGO_PKG_REPOSITORY": "",
+ "CARGO_PKG_RUST_VERSION": "",
+ "CARGO_PKG_VERSION": "0.1.0",
+ "CARGO_PKG_VERSION_MAJOR": "0",
"CARGO_PKG_VERSION_MINOR": "1",
+ "CARGO_PKG_VERSION_PATCH": "0",
"CARGO_PKG_VERSION_PRE": "",
},
},
@@ -76,27 +77,28 @@
),
cfg_options: CfgOptions(
[
- "debug_assertions",
"rust_analyzer",
],
),
potential_cfg_options: None,
env: Env {
entries: {
- "CARGO_PKG_LICENSE": "",
- "CARGO_PKG_VERSION_MAJOR": "0",
+ "CARGO": "$CARGO$",
+ "CARGO_CRATE_NAME": "hello_world",
"CARGO_MANIFEST_DIR": "$ROOT$hello-world",
- "CARGO_PKG_VERSION": "0.1.0",
"CARGO_PKG_AUTHORS": "",
- "CARGO_CRATE_NAME": "hello_world",
- "CARGO_PKG_LICENSE_FILE": "",
- "CARGO_PKG_HOMEPAGE": "",
"CARGO_PKG_DESCRIPTION": "",
+ "CARGO_PKG_HOMEPAGE": "",
+ "CARGO_PKG_LICENSE": "",
+ "CARGO_PKG_LICENSE_FILE": "",
"CARGO_PKG_NAME": "hello-world",
- "CARGO_PKG_VERSION_PATCH": "0",
- "CARGO": "cargo",
+ "CARGO_PKG_README": "",
"CARGO_PKG_REPOSITORY": "",
+ "CARGO_PKG_RUST_VERSION": "",
+ "CARGO_PKG_VERSION": "0.1.0",
+ "CARGO_PKG_VERSION_MAJOR": "0",
"CARGO_PKG_VERSION_MINOR": "1",
+ "CARGO_PKG_VERSION_PATCH": "0",
"CARGO_PKG_VERSION_PRE": "",
},
},
@@ -142,27 +144,28 @@
),
cfg_options: CfgOptions(
[
- "debug_assertions",
"rust_analyzer",
],
),
potential_cfg_options: None,
env: Env {
entries: {
- "CARGO_PKG_LICENSE": "",
- "CARGO_PKG_VERSION_MAJOR": "0",
+ "CARGO": "$CARGO$",
+ "CARGO_CRATE_NAME": "an_example",
"CARGO_MANIFEST_DIR": "$ROOT$hello-world",
- "CARGO_PKG_VERSION": "0.1.0",
"CARGO_PKG_AUTHORS": "",
- "CARGO_CRATE_NAME": "an_example",
- "CARGO_PKG_LICENSE_FILE": "",
- "CARGO_PKG_HOMEPAGE": "",
"CARGO_PKG_DESCRIPTION": "",
+ "CARGO_PKG_HOMEPAGE": "",
+ "CARGO_PKG_LICENSE": "",
+ "CARGO_PKG_LICENSE_FILE": "",
"CARGO_PKG_NAME": "hello-world",
- "CARGO_PKG_VERSION_PATCH": "0",
- "CARGO": "cargo",
+ "CARGO_PKG_README": "",
"CARGO_PKG_REPOSITORY": "",
+ "CARGO_PKG_RUST_VERSION": "",
+ "CARGO_PKG_VERSION": "0.1.0",
+ "CARGO_PKG_VERSION_MAJOR": "0",
"CARGO_PKG_VERSION_MINOR": "1",
+ "CARGO_PKG_VERSION_PATCH": "0",
"CARGO_PKG_VERSION_PRE": "",
},
},
@@ -208,27 +211,28 @@
),
cfg_options: CfgOptions(
[
- "debug_assertions",
"rust_analyzer",
],
),
potential_cfg_options: None,
env: Env {
entries: {
- "CARGO_PKG_LICENSE": "",
- "CARGO_PKG_VERSION_MAJOR": "0",
+ "CARGO": "$CARGO$",
+ "CARGO_CRATE_NAME": "it",
"CARGO_MANIFEST_DIR": "$ROOT$hello-world",
- "CARGO_PKG_VERSION": "0.1.0",
"CARGO_PKG_AUTHORS": "",
- "CARGO_CRATE_NAME": "it",
- "CARGO_PKG_LICENSE_FILE": "",
- "CARGO_PKG_HOMEPAGE": "",
"CARGO_PKG_DESCRIPTION": "",
+ "CARGO_PKG_HOMEPAGE": "",
+ "CARGO_PKG_LICENSE": "",
+ "CARGO_PKG_LICENSE_FILE": "",
"CARGO_PKG_NAME": "hello-world",
- "CARGO_PKG_VERSION_PATCH": "0",
- "CARGO": "cargo",
+ "CARGO_PKG_README": "",
"CARGO_PKG_REPOSITORY": "",
+ "CARGO_PKG_RUST_VERSION": "",
+ "CARGO_PKG_VERSION": "0.1.0",
+ "CARGO_PKG_VERSION_MAJOR": "0",
"CARGO_PKG_VERSION_MINOR": "1",
+ "CARGO_PKG_VERSION_PATCH": "0",
"CARGO_PKG_VERSION_PRE": "",
},
},
@@ -274,7 +278,6 @@
),
cfg_options: CfgOptions(
[
- "debug_assertions",
"feature=default",
"feature=std",
],
@@ -282,7 +285,6 @@
potential_cfg_options: Some(
CfgOptions(
[
- "debug_assertions",
"feature=align",
"feature=const-extern-fn",
"feature=default",
@@ -295,20 +297,22 @@
),
env: Env {
entries: {
- "CARGO_PKG_LICENSE": "",
- "CARGO_PKG_VERSION_MAJOR": "0",
- "CARGO_MANIFEST_DIR": "$ROOT$.cargo/registry/src/github.com-1ecc6299db9ec823/libc-0.2.98",
- "CARGO_PKG_VERSION": "0.2.98",
- "CARGO_PKG_AUTHORS": "",
+ "CARGO": "$CARGO$",
"CARGO_CRATE_NAME": "libc",
+ "CARGO_MANIFEST_DIR": "$ROOT$.cargo/registry/src/github.com-1ecc6299db9ec823/libc-0.2.98",
+ "CARGO_PKG_AUTHORS": "The Rust Project Developers",
+ "CARGO_PKG_DESCRIPTION": "Raw FFI bindings to platform libraries like libc.\n",
+ "CARGO_PKG_HOMEPAGE": "https://github.com/rust-lang/libc",
+ "CARGO_PKG_LICENSE": "MIT OR Apache-2.0",
"CARGO_PKG_LICENSE_FILE": "",
- "CARGO_PKG_HOMEPAGE": "",
- "CARGO_PKG_DESCRIPTION": "",
"CARGO_PKG_NAME": "libc",
- "CARGO_PKG_VERSION_PATCH": "98",
- "CARGO": "cargo",
- "CARGO_PKG_REPOSITORY": "",
+ "CARGO_PKG_README": "README.md",
+ "CARGO_PKG_REPOSITORY": "https://github.com/rust-lang/libc",
+ "CARGO_PKG_RUST_VERSION": "",
+ "CARGO_PKG_VERSION": "0.2.98",
+ "CARGO_PKG_VERSION_MAJOR": "0",
"CARGO_PKG_VERSION_MINOR": "2",
+ "CARGO_PKG_VERSION_PATCH": "98",
"CARGO_PKG_VERSION_PRE": "",
},
},
diff --git a/crates/project-model/test_data/output/rust_project_hello_world_project_model.txt b/crates/project-model/test_data/output/rust_project_hello_world_project_model.txt
index ccaba963de..80c9136589 100644
--- a/crates/project-model/test_data/output/rust_project_hello_world_project_model.txt
+++ b/crates/project-model/test_data/output/rust_project_hello_world_project_model.txt
@@ -16,6 +16,7 @@
cfg_options: CfgOptions(
[
"debug_assertions",
+ "miri",
],
),
potential_cfg_options: None,
@@ -53,6 +54,7 @@
cfg_options: CfgOptions(
[
"debug_assertions",
+ "miri",
],
),
potential_cfg_options: None,
@@ -82,6 +84,7 @@
cfg_options: CfgOptions(
[
"debug_assertions",
+ "miri",
],
),
potential_cfg_options: None,
@@ -111,6 +114,7 @@
cfg_options: CfgOptions(
[
"debug_assertions",
+ "miri",
],
),
potential_cfg_options: None,
@@ -140,6 +144,7 @@
cfg_options: CfgOptions(
[
"debug_assertions",
+ "miri",
],
),
potential_cfg_options: None,
@@ -184,6 +189,7 @@
cfg_options: CfgOptions(
[
"debug_assertions",
+ "miri",
],
),
potential_cfg_options: None,
@@ -213,6 +219,7 @@
cfg_options: CfgOptions(
[
"debug_assertions",
+ "miri",
],
),
potential_cfg_options: None,
@@ -299,6 +306,7 @@
cfg_options: CfgOptions(
[
"debug_assertions",
+ "miri",
],
),
potential_cfg_options: None,
@@ -328,6 +336,7 @@
cfg_options: CfgOptions(
[
"debug_assertions",
+ "miri",
],
),
potential_cfg_options: None,
@@ -357,6 +366,7 @@
cfg_options: CfgOptions(
[
"debug_assertions",
+ "miri",
],
),
potential_cfg_options: None,
diff --git a/crates/rust-analyzer/Cargo.toml b/crates/rust-analyzer/Cargo.toml
index 6d70124188..cd3349899e 100644
--- a/crates/rust-analyzer/Cargo.toml
+++ b/crates/rust-analyzer/Cargo.toml
@@ -39,11 +39,13 @@ tracing.workspace = true
tracing-subscriber.workspace = true
tracing-tree.workspace = true
triomphe.workspace = true
+toml = "0.8.8"
nohash-hasher.workspace = true
always-assert = "0.2.0"
walkdir = "2.3.2"
semver.workspace = true
memchr = "2.7.1"
+indexmap = { workspace = true, features = ["serde"] }
cfg.workspace = true
flycheck.workspace = true
@@ -92,6 +94,7 @@ in-rust-tree = [
"hir/in-rust-tree",
"hir-def/in-rust-tree",
"hir-ty/in-rust-tree",
+ "load-cargo/in-rust-tree",
]
[lints]
diff --git a/crates/rust-analyzer/src/cargo_target_spec.rs b/crates/rust-analyzer/src/cargo_target_spec.rs
index 815a98980b..6a5f7b0262 100644
--- a/crates/rust-analyzer/src/cargo_target_spec.rs
+++ b/crates/rust-analyzer/src/cargo_target_spec.rs
@@ -33,53 +33,55 @@ impl CargoTargetSpec {
kind: &RunnableKind,
cfg: &Option<CfgExpr>,
) -> (Vec<String>, Vec<String>) {
- let mut args = Vec::new();
- let mut extra_args = Vec::new();
+ let extra_test_binary_args = snap.config.runnables().extra_test_binary_args;
+
+ let mut cargo_args = Vec::new();
+ let mut executable_args = Vec::new();
match kind {
RunnableKind::Test { test_id, attr } => {
- args.push("test".to_owned());
- extra_args.push(test_id.to_string());
+ cargo_args.push("test".to_owned());
+ executable_args.push(test_id.to_string());
if let TestId::Path(_) = test_id {
- extra_args.push("--exact".to_owned());
+ executable_args.push("--exact".to_owned());
}
- extra_args.push("--nocapture".to_owned());
+ executable_args.extend(extra_test_binary_args);
if attr.ignore {
- extra_args.push("--ignored".to_owned());
+ executable_args.push("--ignored".to_owned());
}
}
RunnableKind::TestMod { path } => {
- args.push("test".to_owned());
- extra_args.push(path.clone());
- extra_args.push("--nocapture".to_owned());
+ cargo_args.push("test".to_owned());
+ executable_args.push(path.clone());
+ executable_args.extend(extra_test_binary_args);
}
RunnableKind::Bench { test_id } => {
- args.push("bench".to_owned());
- extra_args.push(test_id.to_string());
+ cargo_args.push("bench".to_owned());
+ executable_args.push(test_id.to_string());
if let TestId::Path(_) = test_id {
- extra_args.push("--exact".to_owned());
+ executable_args.push("--exact".to_owned());
}
- extra_args.push("--nocapture".to_owned());
+ executable_args.extend(extra_test_binary_args);
}
RunnableKind::DocTest { test_id } => {
- args.push("test".to_owned());
- args.push("--doc".to_owned());
- extra_args.push(test_id.to_string());
- extra_args.push("--nocapture".to_owned());
+ cargo_args.push("test".to_owned());
+ cargo_args.push("--doc".to_owned());
+ executable_args.push(test_id.to_string());
+ executable_args.extend(extra_test_binary_args);
}
RunnableKind::Bin => {
let subcommand = match spec {
Some(CargoTargetSpec { target_kind: TargetKind::Test, .. }) => "test",
_ => "run",
};
- args.push(subcommand.to_owned());
+ cargo_args.push(subcommand.to_owned());
}
}
let (allowed_features, target_required_features) = if let Some(mut spec) = spec {
let allowed_features = mem::take(&mut spec.features);
let required_features = mem::take(&mut spec.required_features);
- spec.push_to(&mut args, kind);
+ spec.push_to(&mut cargo_args, kind);
(allowed_features, required_features)
} else {
(Default::default(), Default::default())
@@ -89,10 +91,10 @@ impl CargoTargetSpec {
match &cargo_config.features {
CargoFeatures::All => {
- args.push("--all-features".to_owned());
+ cargo_args.push("--all-features".to_owned());
for feature in target_required_features {
- args.push("--features".to_owned());
- args.push(feature);
+ cargo_args.push("--features".to_owned());
+ cargo_args.push(feature);
}
}
CargoFeatures::Selected { features, no_default_features } => {
@@ -108,16 +110,16 @@ impl CargoTargetSpec {
feats.dedup();
for feature in feats {
- args.push("--features".to_owned());
- args.push(feature);
+ cargo_args.push("--features".to_owned());
+ cargo_args.push(feature);
}
if *no_default_features {
- args.push("--no-default-features".to_owned());
+ cargo_args.push("--no-default-features".to_owned());
}
}
}
- (args, extra_args)
+ (cargo_args, executable_args)
}
pub(crate) fn for_file(
@@ -208,6 +210,7 @@ fn required_features(cfg_expr: &CfgExpr, features: &mut Vec<String>) {
mod tests {
use super::*;
+ use ide::Edition;
use mbe::{syntax_node_to_token_tree, DummyTestSpanMap, DUMMY};
use syntax::{
ast::{self, AstNode},
@@ -216,7 +219,7 @@ mod tests {
fn check(cfg: &str, expected_features: &[&str]) {
let cfg_expr = {
- let source_file = ast::SourceFile::parse(cfg).ok().unwrap();
+ let source_file = ast::SourceFile::parse(cfg, Edition::CURRENT).ok().unwrap();
let tt = source_file.syntax().descendants().find_map(ast::TokenTree::cast).unwrap();
let tt = syntax_node_to_token_tree(tt.syntax(), &DummyTestSpanMap, DUMMY);
CfgExpr::parse(&tt)
diff --git a/crates/rust-analyzer/src/cli/analysis_stats.rs b/crates/rust-analyzer/src/cli/analysis_stats.rs
index fdd77199aa..a1eea8839e 100644
--- a/crates/rust-analyzer/src/cli/analysis_stats.rs
+++ b/crates/rust-analyzer/src/cli/analysis_stats.rs
@@ -280,7 +280,9 @@ impl flags::AnalysisStats {
let mut fail = 0;
for &a in adts {
let generic_params = db.generic_params(a.into());
- if generic_params.iter().next().is_some() || generic_params.iter_lt().next().is_some() {
+ if generic_params.iter_type_or_consts().next().is_some()
+ || generic_params.iter_lt().next().is_some()
+ {
// Data types with generics don't have layout.
continue;
}
diff --git a/crates/rust-analyzer/src/cli/parse.rs b/crates/rust-analyzer/src/cli/parse.rs
index 757f2dd70c..ead4d706e6 100644
--- a/crates/rust-analyzer/src/cli/parse.rs
+++ b/crates/rust-analyzer/src/cli/parse.rs
@@ -1,4 +1,5 @@
//! Read Rust code on stdin, print syntax tree on stdout.
+use ide::Edition;
use syntax::{AstNode, SourceFile};
use crate::cli::{flags, read_stdin};
@@ -7,7 +8,7 @@ impl flags::Parse {
pub fn run(self) -> anyhow::Result<()> {
let _p = tracing::span!(tracing::Level::INFO, "parsing").entered();
let text = read_stdin()?;
- let file = SourceFile::parse(&text).tree();
+ let file = SourceFile::parse(&text, Edition::CURRENT).tree();
if !self.no_dump {
println!("{:#?}", file.syntax());
}
diff --git a/crates/rust-analyzer/src/cli/progress_report.rs b/crates/rust-analyzer/src/cli/progress_report.rs
index b233730099..6964977840 100644
--- a/crates/rust-analyzer/src/cli/progress_report.rs
+++ b/crates/rust-analyzer/src/cli/progress_report.rs
@@ -92,7 +92,7 @@ impl<'a> ProgressReport<'a> {
let _ = io::stdout().write(output.as_bytes());
let _ = io::stdout().flush();
- self.text = text.to_owned();
+ text.clone_into(&mut self.text);
}
fn set_value(&mut self, value: f32) {
diff --git a/crates/rust-analyzer/src/cli/rustc_tests.rs b/crates/rust-analyzer/src/cli/rustc_tests.rs
index eeec13a14b..2f9394d0ee 100644
--- a/crates/rust-analyzer/src/cli/rustc_tests.rs
+++ b/crates/rust-analyzer/src/cli/rustc_tests.rs
@@ -75,12 +75,14 @@ impl Tester {
&cargo_config.extra_env,
);
- let workspace = ProjectWorkspace::DetachedFiles {
- files: vec![tmp_file],
+ let workspace = ProjectWorkspace::DetachedFile {
+ file: tmp_file,
sysroot,
rustc_cfg: vec![],
toolchain: None,
target_layout: data_layout.map(Arc::from).map_err(|it| Arc::from(it.to_string())),
+ cfg_overrides: Default::default(),
+ cargo_script: None,
};
let load_cargo_config = LoadCargoConfig {
load_out_dirs_from_check: false,
diff --git a/crates/rust-analyzer/src/config.rs b/crates/rust-analyzer/src/config.rs
index 7475a8e6e6..e956791d9d 100644
--- a/crates/rust-analyzer/src/config.rs
+++ b/crates/rust-analyzer/src/config.rs
@@ -6,21 +6,21 @@
//! Of particular interest is the `feature_flags` hash map: while other fields
//! configure the server itself, feature flags are passed into analysis, and
//! tweak things like automatic insertion of `()` in completions.
-
use std::{fmt, iter, ops::Not};
use cfg::{CfgAtom, CfgDiff};
-use flycheck::FlycheckConfig;
+use flycheck::{CargoOptions, FlycheckConfig};
use ide::{
AssistConfig, CallableSnippets, CompletionConfig, DiagnosticsConfig, ExprFillDefaultMode,
HighlightConfig, HighlightRelatedConfig, HoverConfig, HoverDocFormat, InlayFieldsToResolve,
InlayHintsConfig, JoinLinesConfig, MemoryLayoutHoverConfig, MemoryLayoutHoverRenderKind,
- Snippet, SnippetScope,
+ Snippet, SnippetScope, SourceRootId,
};
use ide_db::{
imports::insert_use::{ImportGranularity, InsertUseConfig, PrefixKind},
SnippetCap,
};
+use indexmap::IndexMap;
use itertools::Itertools;
use lsp_types::{ClientCapabilities, MarkupKind};
use paths::{Utf8Path, Utf8PathBuf};
@@ -29,7 +29,7 @@ use project_model::{
};
use rustc_hash::{FxHashMap, FxHashSet};
use semver::Version;
-use serde::{de::DeserializeOwned, Deserialize};
+use serde::{de::DeserializeOwned, Deserialize, Serialize};
use stdx::format_to_acc;
use vfs::{AbsPath, AbsPathBuf};
@@ -59,36 +59,45 @@ mod patch_old_style;
// To deprecate an option by replacing it with another name use `new_name | `old_name` so that we keep
// parsing the old name.
config_data! {
- struct ConfigData {
+ /// Configs that apply on a workspace-wide scope. There are 3 levels on which a global configuration can be configured
+ ///
+ /// 1. `rust-analyzer.toml` file under user's config directory (e.g ~/.config/rust-analyzer.toml)
+ /// 2. Client's own configurations (e.g `settings.json` on VS Code)
+ /// 3. `rust-analyzer.toml` file located at the workspace root
+ ///
+ /// A config is searched for by traversing a "config tree" in a bottom up fashion. It is chosen by the nearest first principle.
+ global: struct GlobalDefaultConfigData <- GlobalConfigInput -> {
/// Whether to insert #[must_use] when generating `as_` methods
/// for enum variants.
- assist_emitMustUse: bool = "false",
+ assist_emitMustUse: bool = false,
/// Placeholder expression to use for missing expressions in assists.
- assist_expressionFillDefault: ExprFillDefaultDef = "\"todo\"",
+ assist_expressionFillDefault: ExprFillDefaultDef = ExprFillDefaultDef::Todo,
/// Warm up caches on project load.
- cachePriming_enable: bool = "true",
+ cachePriming_enable: bool = true,
/// How many worker threads to handle priming caches. The default `0` means to pick automatically.
- cachePriming_numThreads: ParallelCachePrimingNumThreads = "0",
+ cachePriming_numThreads: ParallelCachePrimingNumThreads = 0u8,
+ /// Pass `--all-targets` to cargo invocation.
+ cargo_allTargets: bool = true,
/// Automatically refresh project info via `cargo metadata` on
/// `Cargo.toml` or `.cargo/config.toml` changes.
- cargo_autoreload: bool = "true",
+ pub(crate) cargo_autoreload: bool = true,
/// Run build scripts (`build.rs`) for more precise code analysis.
- cargo_buildScripts_enable: bool = "true",
+ cargo_buildScripts_enable: bool = true,
/// Specifies the working directory for running build scripts.
/// - "workspace": run build scripts for a workspace in the workspace's root directory.
/// This is incompatible with `#rust-analyzer.cargo.buildScripts.invocationStrategy#` set to `once`.
/// - "root": run build scripts in the project's root directory.
/// This config only has an effect when `#rust-analyzer.cargo.buildScripts.overrideCommand#`
/// is set.
- cargo_buildScripts_invocationLocation: InvocationLocation = "\"workspace\"",
+ cargo_buildScripts_invocationLocation: InvocationLocation = InvocationLocation::Workspace,
/// Specifies the invocation strategy to use when running the build scripts command.
/// If `per_workspace` is set, the command will be executed for each workspace.
/// If `once` is set, the command will be executed once.
/// This config only has an effect when `#rust-analyzer.cargo.buildScripts.overrideCommand#`
/// is set.
- cargo_buildScripts_invocationStrategy: InvocationStrategy = "\"per_workspace\"",
+ cargo_buildScripts_invocationStrategy: InvocationStrategy = InvocationStrategy::PerWorkspace,
/// Override the command rust-analyzer uses to run build scripts and
/// build procedural macros. The command is required to output json
/// and should therefore include `--message-format=json` or a similar
@@ -107,80 +116,84 @@ config_data! {
/// cargo check --quiet --workspace --message-format=json --all-targets
/// ```
/// .
- cargo_buildScripts_overrideCommand: Option<Vec<String>> = "null",
+ cargo_buildScripts_overrideCommand: Option<Vec<String>> = None,
/// Rerun proc-macros building/build-scripts running when proc-macro
/// or build-script sources change and are saved.
- cargo_buildScripts_rebuildOnSave: bool = "true",
+ cargo_buildScripts_rebuildOnSave: bool = true,
/// Use `RUSTC_WRAPPER=rust-analyzer` when running build scripts to
/// avoid checking unnecessary things.
- cargo_buildScripts_useRustcWrapper: bool = "true",
+ cargo_buildScripts_useRustcWrapper: bool = true,
/// List of cfg options to enable with the given values.
- cargo_cfgs: FxHashMap<String, String> = "{}",
+ cargo_cfgs: FxHashMap<String, Option<String>> = {
+ let mut m = FxHashMap::default();
+ m.insert("debug_assertions".to_owned(), None);
+ m.insert("miri".to_owned(), None);
+ m
+ },
/// Extra arguments that are passed to every cargo invocation.
- cargo_extraArgs: Vec<String> = "[]",
+ cargo_extraArgs: Vec<String> = vec![],
/// Extra environment variables that will be set when running cargo, rustc
/// or other commands within the workspace. Useful for setting RUSTFLAGS.
- cargo_extraEnv: FxHashMap<String, String> = "{}",
+ cargo_extraEnv: FxHashMap<String, String> = FxHashMap::default(),
/// List of features to activate.
///
/// Set this to `"all"` to pass `--all-features` to cargo.
- cargo_features: CargoFeaturesDef = "[]",
+ cargo_features: CargoFeaturesDef = CargoFeaturesDef::Selected(vec![]),
/// Whether to pass `--no-default-features` to cargo.
- cargo_noDefaultFeatures: bool = "false",
+ cargo_noDefaultFeatures: bool = false,
/// Relative path to the sysroot, or "discover" to try to automatically find it via
/// "rustc --print sysroot".
///
/// Unsetting this disables sysroot loading.
///
/// This option does not take effect until rust-analyzer is restarted.
- cargo_sysroot: Option<String> = "\"discover\"",
+ cargo_sysroot: Option<String> = Some("discover".to_owned()),
/// Whether to run cargo metadata on the sysroot library allowing rust-analyzer to analyze
/// third-party dependencies of the standard libraries.
///
/// This will cause `cargo` to create a lockfile in your sysroot directory. rust-analyzer
/// will attempt to clean up afterwards, but nevertheless requires the location to be
/// writable to.
- cargo_sysrootQueryMetadata: bool = "false",
+ cargo_sysrootQueryMetadata: bool = false,
/// Relative path to the sysroot library sources. If left unset, this will default to
/// `{cargo.sysroot}/lib/rustlib/src/rust/library`.
///
/// This option does not take effect until rust-analyzer is restarted.
- cargo_sysrootSrc: Option<String> = "null",
+ cargo_sysrootSrc: Option<String> = None,
/// Compilation target override (target triple).
// FIXME(@poliorcetics): move to multiple targets here too, but this will need more work
// than `checkOnSave_target`
- cargo_target: Option<String> = "null",
+ cargo_target: Option<String> = None,
/// Optional path to a rust-analyzer specific target directory.
/// This prevents rust-analyzer's `cargo check` and initial build-script and proc-macro
/// building from locking the `Cargo.lock` at the expense of duplicating build artifacts.
///
/// Set to `true` to use a subdirectory of the existing target directory or
/// set to a path relative to the workspace to use that path.
- cargo_targetDir | rust_analyzerTargetDir: Option<TargetDirectory> = "null",
- /// Unsets the implicit `#[cfg(test)]` for the specified crates.
- cargo_unsetTest: Vec<String> = "[\"core\"]",
+ cargo_targetDir | rust_analyzerTargetDir: Option<TargetDirectory> = None,
/// Run the check command for diagnostics on save.
- checkOnSave | checkOnSave_enable: bool = "true",
+ checkOnSave | checkOnSave_enable: bool = true,
- /// Check all targets and tests (`--all-targets`).
- check_allTargets | checkOnSave_allTargets: bool = "true",
+ /// Check all targets and tests (`--all-targets`). Defaults to
+ /// `#rust-analyzer.cargo.allTargets#`.
+ check_allTargets | checkOnSave_allTargets: Option<bool> = None,
/// Cargo command to use for `cargo check`.
- check_command | checkOnSave_command: String = "\"check\"",
+ check_command | checkOnSave_command: String = "check".to_owned(),
/// Extra arguments for `cargo check`.
- check_extraArgs | checkOnSave_extraArgs: Vec<String> = "[]",
+ check_extraArgs | checkOnSave_extraArgs: Vec<String> = vec![],
/// Extra environment variables that will be set when running `cargo check`.
/// Extends `#rust-analyzer.cargo.extraEnv#`.
- check_extraEnv | checkOnSave_extraEnv: FxHashMap<String, String> = "{}",
+ check_extraEnv | checkOnSave_extraEnv: FxHashMap<String, String> = FxHashMap::default(),
/// List of features to activate. Defaults to
/// `#rust-analyzer.cargo.features#`.
///
/// Set to `"all"` to pass `--all-features` to Cargo.
- check_features | checkOnSave_features: Option<CargoFeaturesDef> = "null",
+ check_features | checkOnSave_features: Option<CargoFeaturesDef> = None,
/// List of `cargo check` (or other command specified in `check.command`) diagnostics to ignore.
///
/// For example for `cargo check`: `dead_code`, `unused_imports`, `unused_variables`,...
- check_ignore: FxHashSet<String> = "[]",
+ check_ignore: FxHashSet<String> = FxHashSet::default(),
/// Specifies the working directory for running checks.
/// - "workspace": run checks for workspaces in the corresponding workspaces' root directories.
// FIXME: Ideally we would support this in some way
@@ -188,16 +201,16 @@ config_data! {
/// - "root": run checks in the project's root directory.
/// This config only has an effect when `#rust-analyzer.check.overrideCommand#`
/// is set.
- check_invocationLocation | checkOnSave_invocationLocation: InvocationLocation = "\"workspace\"",
+ check_invocationLocation | checkOnSave_invocationLocation: InvocationLocation = InvocationLocation::Workspace,
/// Specifies the invocation strategy to use when running the check command.
/// If `per_workspace` is set, the command will be executed for each workspace.
/// If `once` is set, the command will be executed once.
/// This config only has an effect when `#rust-analyzer.check.overrideCommand#`
/// is set.
- check_invocationStrategy | checkOnSave_invocationStrategy: InvocationStrategy = "\"per_workspace\"",
+ check_invocationStrategy | checkOnSave_invocationStrategy: InvocationStrategy = InvocationStrategy::PerWorkspace,
/// Whether to pass `--no-default-features` to Cargo. Defaults to
/// `#rust-analyzer.cargo.noDefaultFeatures#`.
- check_noDefaultFeatures | checkOnSave_noDefaultFeatures: Option<bool> = "null",
+ check_noDefaultFeatures | checkOnSave_noDefaultFeatures: Option<bool> = None,
/// Override the command rust-analyzer uses instead of `cargo check` for
/// diagnostics on save. The command is required to output json and
/// should therefore include `--message-format=json` or a similar option
@@ -225,309 +238,171 @@ config_data! {
/// cargo check --workspace --message-format=json --all-targets
/// ```
/// .
- check_overrideCommand | checkOnSave_overrideCommand: Option<Vec<String>> = "null",
+ check_overrideCommand | checkOnSave_overrideCommand: Option<Vec<String>> = None,
/// Check for specific targets. Defaults to `#rust-analyzer.cargo.target#` if empty.
///
/// Can be a single target, e.g. `"x86_64-unknown-linux-gnu"` or a list of targets, e.g.
/// `["aarch64-apple-darwin", "x86_64-apple-darwin"]`.
///
/// Aliased as `"checkOnSave.targets"`.
- check_targets | checkOnSave_targets | checkOnSave_target: Option<CheckOnSaveTargets> = "null",
+ check_targets | checkOnSave_targets | checkOnSave_target: Option<CheckOnSaveTargets> = None,
/// Whether `--workspace` should be passed to `cargo check`.
/// If false, `-p <package>` will be passed instead.
- check_workspace: bool = "true",
-
- /// Toggles the additional completions that automatically add imports when completed.
- /// Note that your client must specify the `additionalTextEdits` LSP client capability to truly have this feature enabled.
- completion_autoimport_enable: bool = "true",
- /// Toggles the additional completions that automatically show method calls and field accesses
- /// with `self` prefixed to them when inside a method.
- completion_autoself_enable: bool = "true",
- /// Whether to add parenthesis and argument snippets when completing function.
- completion_callable_snippets: CallableCompletionDef = "\"fill_arguments\"",
- /// Whether to show full function/method signatures in completion docs.
- completion_fullFunctionSignatures_enable: bool = "false",
- /// Maximum number of completions to return. If `None`, the limit is infinite.
- completion_limit: Option<usize> = "null",
- /// Whether to show postfix snippets like `dbg`, `if`, `not`, etc.
- completion_postfix_enable: bool = "true",
- /// Enables completions of private items and fields that are defined in the current workspace even if they are not visible at the current position.
- completion_privateEditable_enable: bool = "false",
- /// Custom completion snippets.
- // NOTE: Keep this list in sync with the feature docs of user snippets.
- completion_snippets_custom: FxHashMap<String, SnippetDef> = r#"{
- "Arc::new": {
- "postfix": "arc",
- "body": "Arc::new(${receiver})",
- "requires": "std::sync::Arc",
- "description": "Put the expression into an `Arc`",
- "scope": "expr"
- },
- "Rc::new": {
- "postfix": "rc",
- "body": "Rc::new(${receiver})",
- "requires": "std::rc::Rc",
- "description": "Put the expression into an `Rc`",
- "scope": "expr"
- },
- "Box::pin": {
- "postfix": "pinbox",
- "body": "Box::pin(${receiver})",
- "requires": "std::boxed::Box",
- "description": "Put the expression into a pinned `Box`",
- "scope": "expr"
- },
- "Ok": {
- "postfix": "ok",
- "body": "Ok(${receiver})",
- "description": "Wrap the expression in a `Result::Ok`",
- "scope": "expr"
- },
- "Err": {
- "postfix": "err",
- "body": "Err(${receiver})",
- "description": "Wrap the expression in a `Result::Err`",
- "scope": "expr"
- },
- "Some": {
- "postfix": "some",
- "body": "Some(${receiver})",
- "description": "Wrap the expression in an `Option::Some`",
- "scope": "expr"
- }
- }"#,
- /// Whether to enable term search based snippets like `Some(foo.bar().baz())`.
- completion_termSearch_enable: bool = "false",
+ check_workspace: bool = true,
/// List of rust-analyzer diagnostics to disable.
- diagnostics_disabled: FxHashSet<String> = "[]",
+ diagnostics_disabled: FxHashSet<String> = FxHashSet::default(),
/// Whether to show native rust-analyzer diagnostics.
- diagnostics_enable: bool = "true",
+ diagnostics_enable: bool = true,
/// Whether to show experimental rust-analyzer diagnostics that might
/// have more false positives than usual.
- diagnostics_experimental_enable: bool = "false",
+ diagnostics_experimental_enable: bool = false,
/// Map of prefixes to be substituted when parsing diagnostic file paths.
/// This should be the reverse mapping of what is passed to `rustc` as `--remap-path-prefix`.
- diagnostics_remapPrefix: FxHashMap<String, String> = "{}",
+ diagnostics_remapPrefix: FxHashMap<String, String> = FxHashMap::default(),
/// Whether to run additional style lints.
- diagnostics_styleLints_enable: bool = "false",
+ diagnostics_styleLints_enable: bool = false,
/// List of warnings that should be displayed with hint severity.
///
/// The warnings will be indicated by faded text or three dots in code
/// and will not show up in the `Problems Panel`.
- diagnostics_warningsAsHint: Vec<String> = "[]",
+ diagnostics_warningsAsHint: Vec<String> = vec![],
/// List of warnings that should be displayed with info severity.
///
/// The warnings will be indicated by a blue squiggly underline in code
/// and a blue icon in the `Problems Panel`.
- diagnostics_warningsAsInfo: Vec<String> = "[]",
+ diagnostics_warningsAsInfo: Vec<String> = vec![],
/// These directories will be ignored by rust-analyzer. They are
/// relative to the workspace root, and globs are not supported. You may
/// also need to add the folders to Code's `files.watcherExclude`.
- files_excludeDirs: Vec<Utf8PathBuf> = "[]",
+ files_excludeDirs: Vec<Utf8PathBuf> = vec![],
/// Controls file watching implementation.
- files_watcher: FilesWatcherDef = "\"client\"",
-
- /// Enables highlighting of related references while the cursor is on `break`, `loop`, `while`, or `for` keywords.
- highlightRelated_breakPoints_enable: bool = "true",
- /// Enables highlighting of all captures of a closure while the cursor is on the `|` or move keyword of a closure.
- highlightRelated_closureCaptures_enable: bool = "true",
- /// Enables highlighting of all exit points while the cursor is on any `return`, `?`, `fn`, or return type arrow (`->`).
- highlightRelated_exitPoints_enable: bool = "true",
- /// Enables highlighting of related references while the cursor is on any identifier.
- highlightRelated_references_enable: bool = "true",
- /// Enables highlighting of all break points for a loop or block context while the cursor is on any `async` or `await` keywords.
- highlightRelated_yieldPoints_enable: bool = "true",
+ files_watcher: FilesWatcherDef = FilesWatcherDef::Client,
/// Whether to show `Debug` action. Only applies when
/// `#rust-analyzer.hover.actions.enable#` is set.
- hover_actions_debug_enable: bool = "true",
+ hover_actions_debug_enable: bool = true,
/// Whether to show HoverActions in Rust files.
- hover_actions_enable: bool = "true",
+ hover_actions_enable: bool = true,
/// Whether to show `Go to Type Definition` action. Only applies when
/// `#rust-analyzer.hover.actions.enable#` is set.
- hover_actions_gotoTypeDef_enable: bool = "true",
+ hover_actions_gotoTypeDef_enable: bool = true,
/// Whether to show `Implementations` action. Only applies when
/// `#rust-analyzer.hover.actions.enable#` is set.
- hover_actions_implementations_enable: bool = "true",
+ hover_actions_implementations_enable: bool = true,
/// Whether to show `References` action. Only applies when
/// `#rust-analyzer.hover.actions.enable#` is set.
- hover_actions_references_enable: bool = "false",
+ hover_actions_references_enable: bool = false,
/// Whether to show `Run` action. Only applies when
/// `#rust-analyzer.hover.actions.enable#` is set.
- hover_actions_run_enable: bool = "true",
+ hover_actions_run_enable: bool = true,
/// Whether to show documentation on hover.
- hover_documentation_enable: bool = "true",
+ hover_documentation_enable: bool = true,
/// Whether to show keyword hover popups. Only applies when
/// `#rust-analyzer.hover.documentation.enable#` is set.
- hover_documentation_keywords_enable: bool = "true",
+ hover_documentation_keywords_enable: bool = true,
/// Use markdown syntax for links on hover.
- hover_links_enable: bool = "true",
+ hover_links_enable: bool = true,
/// How to render the align information in a memory layout hover.
- hover_memoryLayout_alignment: Option<MemoryLayoutHoverRenderKindDef> = "\"hexadecimal\"",
+ hover_memoryLayout_alignment: Option<MemoryLayoutHoverRenderKindDef> = Some(MemoryLayoutHoverRenderKindDef::Hexadecimal),
/// Whether to show memory layout data on hover.
- hover_memoryLayout_enable: bool = "true",
+ hover_memoryLayout_enable: bool = true,
/// How to render the niche information in a memory layout hover.
- hover_memoryLayout_niches: Option<bool> = "false",
+ hover_memoryLayout_niches: Option<bool> = Some(false),
/// How to render the offset information in a memory layout hover.
- hover_memoryLayout_offset: Option<MemoryLayoutHoverRenderKindDef> = "\"hexadecimal\"",
+ hover_memoryLayout_offset: Option<MemoryLayoutHoverRenderKindDef> = Some(MemoryLayoutHoverRenderKindDef::Hexadecimal),
/// How to render the size information in a memory layout hover.
- hover_memoryLayout_size: Option<MemoryLayoutHoverRenderKindDef> = "\"both\"",
+ hover_memoryLayout_size: Option<MemoryLayoutHoverRenderKindDef> = Some(MemoryLayoutHoverRenderKindDef::Both),
/// How many fields of a struct to display when hovering a struct.
- hover_show_structFields: Option<usize> = "null",
+ hover_show_structFields: Option<usize> = None,
/// How many associated items of a trait to display when hovering a trait.
- hover_show_traitAssocItems: Option<usize> = "null",
+ hover_show_traitAssocItems: Option<usize> = None,
- /// Whether to enforce the import granularity setting for all files. If set to false rust-analyzer will try to keep import styles consistent per file.
- imports_granularity_enforce: bool = "false",
- /// How imports should be grouped into use statements.
- imports_granularity_group: ImportGranularityDef = "\"crate\"",
- /// Group inserted imports by the https://rust-analyzer.github.io/manual.html#auto-import[following order]. Groups are separated by newlines.
- imports_group_enable: bool = "true",
- /// Whether to allow import insertion to merge new imports into single path glob imports like `use std::fmt::*;`.
- imports_merge_glob: bool = "true",
- /// Prefer to unconditionally use imports of the core and alloc crate, over the std crate.
- imports_preferNoStd | imports_prefer_no_std: bool = "false",
- /// Whether to prefer import paths containing a `prelude` module.
- imports_preferPrelude: bool = "false",
- /// The path structure for newly inserted paths to use.
- imports_prefix: ImportPrefixDef = "\"plain\"",
-
- /// Whether to show inlay type hints for binding modes.
- inlayHints_bindingModeHints_enable: bool = "false",
- /// Whether to show inlay type hints for method chains.
- inlayHints_chainingHints_enable: bool = "true",
- /// Whether to show inlay hints after a closing `}` to indicate what item it belongs to.
- inlayHints_closingBraceHints_enable: bool = "true",
- /// Minimum number of lines required before the `}` until the hint is shown (set to 0 or 1
- /// to always show them).
- inlayHints_closingBraceHints_minLines: usize = "25",
- /// Whether to show inlay hints for closure captures.
- inlayHints_closureCaptureHints_enable: bool = "false",
- /// Whether to show inlay type hints for return types of closures.
- inlayHints_closureReturnTypeHints_enable: ClosureReturnTypeHintsDef = "\"never\"",
- /// Closure notation in type and chaining inlay hints.
- inlayHints_closureStyle: ClosureStyle = "\"impl_fn\"",
- /// Whether to show enum variant discriminant hints.
- inlayHints_discriminantHints_enable: DiscriminantHintsDef = "\"never\"",
- /// Whether to show inlay hints for type adjustments.
- inlayHints_expressionAdjustmentHints_enable: AdjustmentHintsDef = "\"never\"",
- /// Whether to hide inlay hints for type adjustments outside of `unsafe` blocks.
- inlayHints_expressionAdjustmentHints_hideOutsideUnsafe: bool = "false",
- /// Whether to show inlay hints as postfix ops (`.*` instead of `*`, etc).
- inlayHints_expressionAdjustmentHints_mode: AdjustmentHintsModeDef = "\"prefix\"",
- /// Whether to show implicit drop hints.
- inlayHints_implicitDrops_enable: bool = "false",
- /// Whether to show inlay type hints for elided lifetimes in function signatures.
- inlayHints_lifetimeElisionHints_enable: LifetimeElisionDef = "\"never\"",
- /// Whether to prefer using parameter names as the name for elided lifetime hints if possible.
- inlayHints_lifetimeElisionHints_useParameterNames: bool = "false",
- /// Maximum length for inlay hints. Set to null to have an unlimited length.
- inlayHints_maxLength: Option<usize> = "25",
- /// Whether to show function parameter name inlay hints at the call
- /// site.
- inlayHints_parameterHints_enable: bool = "true",
- /// Whether to show exclusive range inlay hints.
- inlayHints_rangeExclusiveHints_enable: bool = "false",
- /// Whether to show inlay hints for compiler inserted reborrows.
- /// This setting is deprecated in favor of #rust-analyzer.inlayHints.expressionAdjustmentHints.enable#.
- inlayHints_reborrowHints_enable: ReborrowHintsDef = "\"never\"",
- /// Whether to render leading colons for type hints, and trailing colons for parameter hints.
- inlayHints_renderColons: bool = "true",
- /// Whether to show inlay type hints for variables.
- inlayHints_typeHints_enable: bool = "true",
- /// Whether to hide inlay type hints for `let` statements that initialize to a closure.
- /// Only applies to closures with blocks, same as `#rust-analyzer.inlayHints.closureReturnTypeHints.enable#`.
- inlayHints_typeHints_hideClosureInitialization: bool = "false",
- /// Whether to hide inlay type hints for constructors.
- inlayHints_typeHints_hideNamedConstructor: bool = "false",
/// Enables the experimental support for interpreting tests.
- interpret_tests: bool = "false",
-
- /// Join lines merges consecutive declaration and initialization of an assignment.
- joinLines_joinAssignments: bool = "true",
- /// Join lines inserts else between consecutive ifs.
- joinLines_joinElseIf: bool = "true",
- /// Join lines removes trailing commas.
- joinLines_removeTrailingComma: bool = "true",
- /// Join lines unwraps trivial blocks.
- joinLines_unwrapTrivialBlock: bool = "true",
-
+ interpret_tests: bool = false,
/// Whether to show `Debug` lens. Only applies when
/// `#rust-analyzer.lens.enable#` is set.
- lens_debug_enable: bool = "true",
+ lens_debug_enable: bool = true,
/// Whether to show CodeLens in Rust files.
- lens_enable: bool = "true",
+ lens_enable: bool = true,
/// Internal config: use custom client-side commands even when the
/// client doesn't set the corresponding capability.
- lens_forceCustomCommands: bool = "true",
+ lens_forceCustomCommands: bool = true,
/// Whether to show `Implementations` lens. Only applies when
/// `#rust-analyzer.lens.enable#` is set.
- lens_implementations_enable: bool = "true",
+ lens_implementations_enable: bool = true,
/// Where to render annotations.
- lens_location: AnnotationLocation = "\"above_name\"",
+ lens_location: AnnotationLocation = AnnotationLocation::AboveName,
/// Whether to show `References` lens for Struct, Enum, and Union.
/// Only applies when `#rust-analyzer.lens.enable#` is set.
- lens_references_adt_enable: bool = "false",
+ lens_references_adt_enable: bool = false,
/// Whether to show `References` lens for Enum Variants.
/// Only applies when `#rust-analyzer.lens.enable#` is set.
- lens_references_enumVariant_enable: bool = "false",
+ lens_references_enumVariant_enable: bool = false,
/// Whether to show `Method References` lens. Only applies when
/// `#rust-analyzer.lens.enable#` is set.
- lens_references_method_enable: bool = "false",
+ lens_references_method_enable: bool = false,
/// Whether to show `References` lens for Trait.
/// Only applies when `#rust-analyzer.lens.enable#` is set.
- lens_references_trait_enable: bool = "false",
+ lens_references_trait_enable: bool = false,
/// Whether to show `Run` lens. Only applies when
/// `#rust-analyzer.lens.enable#` is set.
- lens_run_enable: bool = "true",
+ lens_run_enable: bool = true,
/// Disable project auto-discovery in favor of explicitly specified set
/// of projects.
///
/// Elements must be paths pointing to `Cargo.toml`,
/// `rust-project.json`, or JSON objects in `rust-project.json` format.
- linkedProjects: Vec<ManifestOrProjectJson> = "[]",
+ linkedProjects: Vec<ManifestOrProjectJson> = vec![],
/// Number of syntax trees rust-analyzer keeps in memory. Defaults to 128.
- lru_capacity: Option<usize> = "null",
+ lru_capacity: Option<usize> = None,
/// Sets the LRU capacity of the specified queries.
- lru_query_capacities: FxHashMap<Box<str>, usize> = "{}",
+ lru_query_capacities: FxHashMap<Box<str>, usize> = FxHashMap::default(),
/// Whether to show `can't find Cargo.toml` error message.
- notifications_cargoTomlNotFound: bool = "true",
+ notifications_cargoTomlNotFound: bool = true,
/// Whether to send an UnindexedProject notification to the client.
- notifications_unindexedProject: bool = "false",
+ notifications_unindexedProject: bool = false,
/// How many worker threads in the main loop. The default `null` means to pick automatically.
- numThreads: Option<usize> = "null",
+ numThreads: Option<usize> = None,
/// Expand attribute macros. Requires `#rust-analyzer.procMacro.enable#` to be set.
- procMacro_attributes_enable: bool = "true",
+ procMacro_attributes_enable: bool = true,
/// Enable support for procedural macros, implies `#rust-analyzer.cargo.buildScripts.enable#`.
- procMacro_enable: bool = "true",
+ procMacro_enable: bool = true,
/// These proc-macros will be ignored when trying to expand them.
///
/// This config takes a map of crate names with the exported proc-macro names to ignore as values.
- procMacro_ignored: FxHashMap<Box<str>, Box<[Box<str>]>> = "{}",
+ procMacro_ignored: FxHashMap<Box<str>, Box<[Box<str>]>> = FxHashMap::default(),
/// Internal config, path to proc-macro server executable.
- procMacro_server: Option<Utf8PathBuf> = "null",
+ procMacro_server: Option<Utf8PathBuf> = None,
/// Exclude imports from find-all-references.
- references_excludeImports: bool = "false",
+ references_excludeImports: bool = false,
/// Exclude tests from find-all-references.
- references_excludeTests: bool = "false",
+ references_excludeTests: bool = false,
/// Command to be executed instead of 'cargo' for runnables.
- runnables_command: Option<String> = "null",
+ runnables_command: Option<String> = None,
/// Additional arguments to be passed to cargo for runnables such as
/// tests or binaries. For example, it may be `--release`.
- runnables_extraArgs: Vec<String> = "[]",
+ runnables_extraArgs: Vec<String> = vec![],
+ /// Additional arguments to be passed through Cargo to launched tests, benchmarks, or
+ /// doc-tests.
+ ///
+ /// Unless the launched target uses a
+ /// [custom test harness](https://doc.rust-lang.org/cargo/reference/cargo-targets.html#the-harness-field),
+ /// they will end up being interpreted as options to
+ /// [`rustc`’s built-in test harness (“libtest”)](https://doc.rust-lang.org/rustc/tests/index.html#cli-arguments).
+ runnables_extraTestBinaryArgs: Vec<String> = vec!["--show-output".to_owned()],
/// Path to the Cargo.toml of the rust compiler workspace, for usage in rustc_private
/// projects, or "discover" to try to automatically find it if the `rustc-dev` component
@@ -537,81 +412,236 @@ config_data! {
/// crates must set `[package.metadata.rust-analyzer] rustc_private=true` to use it.
///
/// This option does not take effect until rust-analyzer is restarted.
- rustc_source: Option<String> = "null",
+ rustc_source: Option<String> = None,
/// Additional arguments to `rustfmt`.
- rustfmt_extraArgs: Vec<String> = "[]",
+ rustfmt_extraArgs: Vec<String> = vec![],
/// Advanced option, fully override the command rust-analyzer uses for
/// formatting. This should be the equivalent of `rustfmt` here, and
/// not that of `cargo fmt`. The file contents will be passed on the
/// standard input and the formatted result will be read from the
/// standard output.
- rustfmt_overrideCommand: Option<Vec<String>> = "null",
+ rustfmt_overrideCommand: Option<Vec<String>> = None,
/// Enables the use of rustfmt's unstable range formatting command for the
/// `textDocument/rangeFormatting` request. The rustfmt option is unstable and only
/// available on a nightly build.
- rustfmt_rangeFormatting_enable: bool = "false",
+ rustfmt_rangeFormatting_enable: bool = false,
+
+
+ /// Show full signature of the callable. Only shows parameters if disabled.
+ signatureInfo_detail: SignatureDetail = SignatureDetail::Full,
+ /// Show documentation.
+ signatureInfo_documentation_enable: bool = true,
+
+ /// Whether to insert closing angle brackets when typing an opening angle bracket of a generic argument list.
+ typing_autoClosingAngleBrackets_enable: bool = false,
+
+ /// Workspace symbol search kind.
+ workspace_symbol_search_kind: WorkspaceSymbolSearchKindDef = WorkspaceSymbolSearchKindDef::OnlyTypes,
+ /// Limits the number of items returned from a workspace symbol search (Defaults to 128).
+ /// Some clients like vs-code issue new searches on result filtering and don't require all results to be returned in the initial search.
+ /// Other clients requires all results upfront and might require a higher limit.
+ workspace_symbol_search_limit: usize = 128,
+ /// Workspace symbol search scope.
+ workspace_symbol_search_scope: WorkspaceSymbolSearchScopeDef = WorkspaceSymbolSearchScopeDef::Workspace,
+ }
+}
+
+config_data! {
+ /// Local configurations can be overridden for every crate by placing a `rust-analyzer.toml` on crate root.
+ /// A config is searched for by traversing a "config tree" in a bottom up fashion. It is chosen by the nearest first principle.
+ local: struct LocalDefaultConfigData <- LocalConfigInput -> {
+ /// Toggles the additional completions that automatically add imports when completed.
+ /// Note that your client must specify the `additionalTextEdits` LSP client capability to truly have this feature enabled.
+ completion_autoimport_enable: bool = true,
+ /// Toggles the additional completions that automatically show method calls and field accesses
+ /// with `self` prefixed to them when inside a method.
+ completion_autoself_enable: bool = true,
+ /// Whether to add parenthesis and argument snippets when completing function.
+ completion_callable_snippets: CallableCompletionDef = CallableCompletionDef::FillArguments,
+ /// Whether to show full function/method signatures in completion docs.
+ completion_fullFunctionSignatures_enable: bool = false,
+ /// Maximum number of completions to return. If `None`, the limit is infinite.
+ completion_limit: Option<usize> = None,
+ /// Whether to show postfix snippets like `dbg`, `if`, `not`, etc.
+ completion_postfix_enable: bool = true,
+ /// Enables completions of private items and fields that are defined in the current workspace even if they are not visible at the current position.
+ completion_privateEditable_enable: bool = false,
+ /// Custom completion snippets.
+ // NOTE: we use IndexMap for deterministic serialization ordering
+ completion_snippets_custom: IndexMap<String, SnippetDef> = serde_json::from_str(r#"{
+ "Arc::new": {
+ "postfix": "arc",
+ "body": "Arc::new(${receiver})",
+ "requires": "std::sync::Arc",
+ "description": "Put the expression into an `Arc`",
+ "scope": "expr"
+ },
+ "Rc::new": {
+ "postfix": "rc",
+ "body": "Rc::new(${receiver})",
+ "requires": "std::rc::Rc",
+ "description": "Put the expression into an `Rc`",
+ "scope": "expr"
+ },
+ "Box::pin": {
+ "postfix": "pinbox",
+ "body": "Box::pin(${receiver})",
+ "requires": "std::boxed::Box",
+ "description": "Put the expression into a pinned `Box`",
+ "scope": "expr"
+ },
+ "Ok": {
+ "postfix": "ok",
+ "body": "Ok(${receiver})",
+ "description": "Wrap the expression in a `Result::Ok`",
+ "scope": "expr"
+ },
+ "Err": {
+ "postfix": "err",
+ "body": "Err(${receiver})",
+ "description": "Wrap the expression in a `Result::Err`",
+ "scope": "expr"
+ },
+ "Some": {
+ "postfix": "some",
+ "body": "Some(${receiver})",
+ "description": "Wrap the expression in an `Option::Some`",
+ "scope": "expr"
+ }
+ }"#).unwrap(),
+ /// Whether to enable term search based snippets like `Some(foo.bar().baz())`.
+ completion_termSearch_enable: bool = false,
+
+ /// Enables highlighting of related references while the cursor is on `break`, `loop`, `while`, or `for` keywords.
+ highlightRelated_breakPoints_enable: bool = true,
+ /// Enables highlighting of all captures of a closure while the cursor is on the `|` or move keyword of a closure.
+ highlightRelated_closureCaptures_enable: bool = true,
+ /// Enables highlighting of all exit points while the cursor is on any `return`, `?`, `fn`, or return type arrow (`->`).
+ highlightRelated_exitPoints_enable: bool = true,
+ /// Enables highlighting of related references while the cursor is on any identifier.
+ highlightRelated_references_enable: bool = true,
+ /// Enables highlighting of all break points for a loop or block context while the cursor is on any `async` or `await` keywords.
+ highlightRelated_yieldPoints_enable: bool = true,
+
+ /// Whether to enforce the import granularity setting for all files. If set to false rust-analyzer will try to keep import styles consistent per file.
+ imports_granularity_enforce: bool = false,
+ /// How imports should be grouped into use statements.
+ imports_granularity_group: ImportGranularityDef = ImportGranularityDef::Crate,
+ /// Group inserted imports by the https://rust-analyzer.github.io/manual.html#auto-import[following order]. Groups are separated by newlines.
+ imports_group_enable: bool = true,
+ /// Whether to allow import insertion to merge new imports into single path glob imports like `use std::fmt::*;`.
+ imports_merge_glob: bool = true,
+ /// Prefer to unconditionally use imports of the core and alloc crate, over the std crate.
+ imports_preferNoStd | imports_prefer_no_std: bool = false,
+ /// Whether to prefer import paths containing a `prelude` module.
+ imports_preferPrelude: bool = false,
+ /// The path structure for newly inserted paths to use.
+ imports_prefix: ImportPrefixDef = ImportPrefixDef::Plain,
+
+
+ /// Whether to show inlay type hints for binding modes.
+ inlayHints_bindingModeHints_enable: bool = false,
+ /// Whether to show inlay type hints for method chains.
+ inlayHints_chainingHints_enable: bool = true,
+ /// Whether to show inlay hints after a closing `}` to indicate what item it belongs to.
+ inlayHints_closingBraceHints_enable: bool = true,
+ /// Minimum number of lines required before the `}` until the hint is shown (set to 0 or 1
+ /// to always show them).
+ inlayHints_closingBraceHints_minLines: usize = 25,
+ /// Whether to show inlay hints for closure captures.
+ inlayHints_closureCaptureHints_enable: bool = false,
+ /// Whether to show inlay type hints for return types of closures.
+ inlayHints_closureReturnTypeHints_enable: ClosureReturnTypeHintsDef = ClosureReturnTypeHintsDef::Never,
+ /// Closure notation in type and chaining inlay hints.
+ inlayHints_closureStyle: ClosureStyle = ClosureStyle::ImplFn,
+ /// Whether to show enum variant discriminant hints.
+ inlayHints_discriminantHints_enable: DiscriminantHintsDef = DiscriminantHintsDef::Never,
+ /// Whether to show inlay hints for type adjustments.
+ inlayHints_expressionAdjustmentHints_enable: AdjustmentHintsDef = AdjustmentHintsDef::Never,
+ /// Whether to hide inlay hints for type adjustments outside of `unsafe` blocks.
+ inlayHints_expressionAdjustmentHints_hideOutsideUnsafe: bool = false,
+ /// Whether to show inlay hints as postfix ops (`.*` instead of `*`, etc).
+ inlayHints_expressionAdjustmentHints_mode: AdjustmentHintsModeDef = AdjustmentHintsModeDef::Prefix,
+ /// Whether to show implicit drop hints.
+ inlayHints_implicitDrops_enable: bool = false,
+ /// Whether to show inlay type hints for elided lifetimes in function signatures.
+ inlayHints_lifetimeElisionHints_enable: LifetimeElisionDef = LifetimeElisionDef::Never,
+ /// Whether to prefer using parameter names as the name for elided lifetime hints if possible.
+ inlayHints_lifetimeElisionHints_useParameterNames: bool = false,
+ /// Maximum length for inlay hints. Set to null to have an unlimited length.
+ inlayHints_maxLength: Option<usize> = Some(25),
+ /// Whether to show function parameter name inlay hints at the call
+ /// site.
+ inlayHints_parameterHints_enable: bool = true,
+ /// Whether to show exclusive range inlay hints.
+ inlayHints_rangeExclusiveHints_enable: bool = false,
+ /// Whether to show inlay hints for compiler inserted reborrows.
+ /// This setting is deprecated in favor of #rust-analyzer.inlayHints.expressionAdjustmentHints.enable#.
+ inlayHints_reborrowHints_enable: ReborrowHintsDef = ReborrowHintsDef::Never,
+ /// Whether to render leading colons for type hints, and trailing colons for parameter hints.
+ inlayHints_renderColons: bool = true,
+ /// Whether to show inlay type hints for variables.
+ inlayHints_typeHints_enable: bool = true,
+ /// Whether to hide inlay type hints for `let` statements that initialize to a closure.
+ /// Only applies to closures with blocks, same as `#rust-analyzer.inlayHints.closureReturnTypeHints.enable#`.
+ inlayHints_typeHints_hideClosureInitialization: bool = false,
+ /// Whether to hide inlay type hints for constructors.
+ inlayHints_typeHints_hideNamedConstructor: bool = false,
+
+
+ /// Join lines merges consecutive declaration and initialization of an assignment.
+ joinLines_joinAssignments: bool = true,
+ /// Join lines inserts else between consecutive ifs.
+ joinLines_joinElseIf: bool = true,
+ /// Join lines removes trailing commas.
+ joinLines_removeTrailingComma: bool = true,
+ /// Join lines unwraps trivial blocks.
+ joinLines_unwrapTrivialBlock: bool = true,
/// Inject additional highlighting into doc comments.
///
/// When enabled, rust-analyzer will highlight rust source in doc comments as well as intra
/// doc links.
- semanticHighlighting_doc_comment_inject_enable: bool = "true",
+ semanticHighlighting_doc_comment_inject_enable: bool = true,
/// Whether the server is allowed to emit non-standard tokens and modifiers.
- semanticHighlighting_nonStandardTokens: bool = "true",
+ semanticHighlighting_nonStandardTokens: bool = true,
/// Use semantic tokens for operators.
///
/// When disabled, rust-analyzer will emit semantic tokens only for operator tokens when
/// they are tagged with modifiers.
- semanticHighlighting_operator_enable: bool = "true",
+ semanticHighlighting_operator_enable: bool = true,
/// Use specialized semantic tokens for operators.
///
/// When enabled, rust-analyzer will emit special token types for operator tokens instead
/// of the generic `operator` token type.
- semanticHighlighting_operator_specialization_enable: bool = "false",
+ semanticHighlighting_operator_specialization_enable: bool = false,
/// Use semantic tokens for punctuation.
///
/// When disabled, rust-analyzer will emit semantic tokens only for punctuation tokens when
/// they are tagged with modifiers or have a special role.
- semanticHighlighting_punctuation_enable: bool = "false",
+ semanticHighlighting_punctuation_enable: bool = false,
/// When enabled, rust-analyzer will emit a punctuation semantic token for the `!` of macro
/// calls.
- semanticHighlighting_punctuation_separate_macro_bang: bool = "false",
+ semanticHighlighting_punctuation_separate_macro_bang: bool = false,
/// Use specialized semantic tokens for punctuation.
///
/// When enabled, rust-analyzer will emit special token types for punctuation tokens instead
/// of the generic `punctuation` token type.
- semanticHighlighting_punctuation_specialization_enable: bool = "false",
+ semanticHighlighting_punctuation_specialization_enable: bool = false,
/// Use semantic tokens for strings.
///
/// In some editors (e.g. vscode) semantic tokens override other highlighting grammars.
/// By disabling semantic tokens for strings, other grammars can be used to highlight
/// their contents.
- semanticHighlighting_strings_enable: bool = "true",
-
- /// Show full signature of the callable. Only shows parameters if disabled.
- signatureInfo_detail: SignatureDetail = "\"full\"",
- /// Show documentation.
- signatureInfo_documentation_enable: bool = "true",
-
- /// Whether to insert closing angle brackets when typing an opening angle bracket of a generic argument list.
- typing_autoClosingAngleBrackets_enable: bool = "false",
-
- /// Workspace symbol search kind.
- workspace_symbol_search_kind: WorkspaceSymbolSearchKindDef = "\"only_types\"",
- /// Limits the number of items returned from a workspace symbol search (Defaults to 128).
- /// Some clients like vs-code issue new searches on result filtering and don't require all results to be returned in the initial search.
- /// Other clients requires all results upfront and might require a higher limit.
- workspace_symbol_search_limit: usize = "128",
- /// Workspace symbol search scope.
- workspace_symbol_search_scope: WorkspaceSymbolSearchScopeDef = "\"workspace\"",
+ semanticHighlighting_strings_enable: bool = true,
}
}
-impl Default for ConfigData {
- fn default() -> Self {
- ConfigData::from_json(serde_json::Value::Null, &mut Vec::new())
- }
+config_data! {
+ /// Configs that only make sense when they are set by a client. As such they can only be defined
+ /// by setting them using client's settings (e.g `settings.json` on VS Code).
+ client: struct ClientDefaultConfigData <- ClientConfigInput -> {}
}
#[derive(Debug, Clone)]
@@ -621,10 +651,40 @@ pub struct Config {
workspace_roots: Vec<AbsPathBuf>,
caps: lsp_types::ClientCapabilities,
root_path: AbsPathBuf,
- data: ConfigData,
detached_files: Vec<AbsPathBuf>,
snippets: Vec<Snippet>,
visual_studio_code_version: Option<Version>,
+
+ default_config: DefaultConfigData,
+ client_config: FullConfigInput,
+ user_config: GlobalLocalConfigInput,
+ #[allow(dead_code)]
+ ratoml_files: FxHashMap<SourceRootId, RatomlNode>,
+}
+
+#[derive(Clone, Debug)]
+struct RatomlNode {
+ #[allow(dead_code)]
+ node: GlobalLocalConfigInput,
+ #[allow(dead_code)]
+ parent: Option<SourceRootId>,
+}
+
+macro_rules! try_ {
+ ($expr:expr) => {
+ || -> _ { Some($expr) }()
+ };
+}
+macro_rules! try_or {
+ ($expr:expr, $or:expr) => {
+ try_!($expr).unwrap_or($or)
+ };
+}
+
+macro_rules! try_or_def {
+ ($expr:expr) => {
+ try_!($expr).unwrap_or_default()
+ };
}
type ParallelCachePrimingNumThreads = u8;
@@ -672,7 +732,7 @@ pub struct LensConfig {
pub location: AnnotationLocation,
}
-#[derive(Copy, Clone, Debug, PartialEq, Eq, Deserialize)]
+#[derive(Copy, Clone, Debug, PartialEq, Eq, Serialize, Deserialize)]
#[serde(rename_all = "snake_case")]
pub enum AnnotationLocation {
AboveName,
@@ -774,6 +834,8 @@ pub struct RunnablesConfig {
pub override_cargo: Option<String>,
/// Additional arguments for the `cargo`, e.g. `--release`.
pub cargo_extra_args: Vec<String>,
+ /// Additional arguments for the binary being run, if it is a test or benchmark.
+ pub extra_test_binary_args: Vec<String>,
}
/// Configuration for workspace symbol search requests.
@@ -827,13 +889,16 @@ impl Config {
) -> Self {
Config {
caps,
- data: ConfigData::default(),
detached_files: Vec::new(),
discovered_projects: Vec::new(),
root_path,
snippets: Default::default(),
workspace_roots,
visual_studio_code_version,
+ client_config: FullConfigInput::default(),
+ user_config: GlobalLocalConfigInput::default(),
+ ratoml_files: FxHashMap::default(),
+ default_config: DefaultConfigData::default(),
}
}
@@ -863,15 +928,19 @@ impl Config {
}
let mut errors = Vec::new();
self.detached_files =
- get_field::<Vec<Utf8PathBuf>>(&mut json, &mut errors, "detachedFiles", None, "[]")
+ get_field::<Vec<Utf8PathBuf>>(&mut json, &mut errors, "detachedFiles", None)
+ .unwrap_or_default()
.into_iter()
.map(AbsPathBuf::assert)
.collect();
patch_old_style::patch_json_for_outdated_configs(&mut json);
- self.data = ConfigData::from_json(json, &mut errors);
- tracing::debug!("deserialized config data: {:#?}", self.data);
+ self.client_config = FullConfigInput::from_json(json, &mut errors);
+ tracing::debug!(?self.client_config, "deserialized config data");
self.snippets.clear();
- for (name, def) in self.data.completion_snippets_custom.iter() {
+
+ let snips = self.completion_snippets_custom(None).to_owned();
+
+ for (name, def) in snips.iter() {
if def.prefix.is_empty() && def.postfix.is_empty() {
continue;
}
@@ -909,7 +978,7 @@ impl Config {
fn validate(&self, error_sink: &mut Vec<(String, serde_json::Error)>) {
use serde::de::Error;
- if self.data.check_command.is_empty() {
+ if self.check_command().is_empty() {
error_sink.push((
"/check/command".to_owned(),
serde_json::Error::custom("expected a non-empty string"),
@@ -918,7 +987,7 @@ impl Config {
}
pub fn json_schema() -> serde_json::Value {
- ConfigData::json_schema()
+ FullConfigInput::json_schema()
}
pub fn root_path(&self) -> &AbsPathBuf {
@@ -934,44 +1003,302 @@ impl Config {
}
}
-macro_rules! try_ {
- ($expr:expr) => {
- || -> _ { Some($expr) }()
- };
-}
-macro_rules! try_or {
- ($expr:expr, $or:expr) => {
- try_!($expr).unwrap_or($or)
- };
-}
+impl Config {
+ pub fn assist(&self, source_root: Option<SourceRootId>) -> AssistConfig {
+ AssistConfig {
+ snippet_cap: SnippetCap::new(self.experimental("snippetTextEdit")),
+ allowed: None,
+ insert_use: self.insert_use_config(source_root),
+ prefer_no_std: self.imports_preferNoStd(source_root).to_owned(),
+ assist_emit_must_use: self.assist_emitMustUse().to_owned(),
+ prefer_prelude: self.imports_preferPrelude(source_root).to_owned(),
+ }
+ }
-macro_rules! try_or_def {
- ($expr:expr) => {
- try_!($expr).unwrap_or_default()
- };
-}
+ pub fn completion(&self, source_root: Option<SourceRootId>) -> CompletionConfig {
+ CompletionConfig {
+ enable_postfix_completions: self.completion_postfix_enable(source_root).to_owned(),
+ enable_imports_on_the_fly: self.completion_autoimport_enable(source_root).to_owned()
+ && completion_item_edit_resolve(&self.caps),
+ enable_self_on_the_fly: self.completion_autoself_enable(source_root).to_owned(),
+ enable_private_editable: self.completion_privateEditable_enable(source_root).to_owned(),
+ full_function_signatures: self
+ .completion_fullFunctionSignatures_enable(source_root)
+ .to_owned(),
+ callable: match self.completion_callable_snippets(source_root) {
+ CallableCompletionDef::FillArguments => Some(CallableSnippets::FillArguments),
+ CallableCompletionDef::AddParentheses => Some(CallableSnippets::AddParentheses),
+ CallableCompletionDef::None => None,
+ },
+ insert_use: self.insert_use_config(source_root),
+ prefer_no_std: self.imports_preferNoStd(source_root).to_owned(),
+ snippet_cap: SnippetCap::new(try_or_def!(
+ self.caps
+ .text_document
+ .as_ref()?
+ .completion
+ .as_ref()?
+ .completion_item
+ .as_ref()?
+ .snippet_support?
+ )),
+ snippets: self.snippets.clone().to_vec(),
+ limit: self.completion_limit(source_root).to_owned(),
+ enable_term_search: self.completion_termSearch_enable(source_root).to_owned(),
+ prefer_prelude: self.imports_preferPrelude(source_root).to_owned(),
+ }
+ }
+
+ pub fn diagnostics(&self, source_root: Option<SourceRootId>) -> DiagnosticsConfig {
+ DiagnosticsConfig {
+ enabled: *self.diagnostics_enable(),
+ proc_attr_macros_enabled: self.expand_proc_attr_macros(),
+ proc_macros_enabled: *self.procMacro_enable(),
+ disable_experimental: !self.diagnostics_experimental_enable(),
+ disabled: self.diagnostics_disabled().clone(),
+ expr_fill_default: match self.assist_expressionFillDefault() {
+ ExprFillDefaultDef::Todo => ExprFillDefaultMode::Todo,
+ ExprFillDefaultDef::Default => ExprFillDefaultMode::Default,
+ },
+ insert_use: self.insert_use_config(source_root),
+ prefer_no_std: self.imports_preferNoStd(source_root).to_owned(),
+ prefer_prelude: self.imports_preferPrelude(source_root).to_owned(),
+ style_lints: self.diagnostics_styleLints_enable().to_owned(),
+ }
+ }
+ pub fn expand_proc_attr_macros(&self) -> bool {
+ self.procMacro_enable().to_owned() && self.procMacro_attributes_enable().to_owned()
+ }
+
+ pub fn highlight_related(&self, source_root: Option<SourceRootId>) -> HighlightRelatedConfig {
+ HighlightRelatedConfig {
+ references: self.highlightRelated_references_enable(source_root).to_owned(),
+ break_points: self.highlightRelated_breakPoints_enable(source_root).to_owned(),
+ exit_points: self.highlightRelated_exitPoints_enable(source_root).to_owned(),
+ yield_points: self.highlightRelated_yieldPoints_enable(source_root).to_owned(),
+ closure_captures: self.highlightRelated_closureCaptures_enable(source_root).to_owned(),
+ }
+ }
+
+ pub fn hover_actions(&self) -> HoverActionsConfig {
+ let enable = self.experimental("hoverActions") && self.hover_actions_enable().to_owned();
+ HoverActionsConfig {
+ implementations: enable && self.hover_actions_implementations_enable().to_owned(),
+ references: enable && self.hover_actions_references_enable().to_owned(),
+ run: enable && self.hover_actions_run_enable().to_owned(),
+ debug: enable && self.hover_actions_debug_enable().to_owned(),
+ goto_type_def: enable && self.hover_actions_gotoTypeDef_enable().to_owned(),
+ }
+ }
+
+ pub fn hover(&self) -> HoverConfig {
+ let mem_kind = |kind| match kind {
+ MemoryLayoutHoverRenderKindDef::Both => MemoryLayoutHoverRenderKind::Both,
+ MemoryLayoutHoverRenderKindDef::Decimal => MemoryLayoutHoverRenderKind::Decimal,
+ MemoryLayoutHoverRenderKindDef::Hexadecimal => MemoryLayoutHoverRenderKind::Hexadecimal,
+ };
+ HoverConfig {
+ links_in_hover: self.hover_links_enable().to_owned(),
+ memory_layout: self.hover_memoryLayout_enable().then_some(MemoryLayoutHoverConfig {
+ size: self.hover_memoryLayout_size().map(mem_kind),
+ offset: self.hover_memoryLayout_offset().map(mem_kind),
+ alignment: self.hover_memoryLayout_alignment().map(mem_kind),
+ niches: self.hover_memoryLayout_niches().unwrap_or_default(),
+ }),
+ documentation: self.hover_documentation_enable().to_owned(),
+ format: {
+ let is_markdown = try_or_def!(self
+ .caps
+ .text_document
+ .as_ref()?
+ .hover
+ .as_ref()?
+ .content_format
+ .as_ref()?
+ .as_slice())
+ .contains(&MarkupKind::Markdown);
+ if is_markdown {
+ HoverDocFormat::Markdown
+ } else {
+ HoverDocFormat::PlainText
+ }
+ },
+ keywords: self.hover_documentation_keywords_enable().to_owned(),
+ max_trait_assoc_items_count: self.hover_show_traitAssocItems().to_owned(),
+ max_struct_field_count: self.hover_show_structFields().to_owned(),
+ }
+ }
+
+ pub fn inlay_hints(&self, source_root: Option<SourceRootId>) -> InlayHintsConfig {
+ let client_capability_fields = self
+ .caps
+ .text_document
+ .as_ref()
+ .and_then(|text| text.inlay_hint.as_ref())
+ .and_then(|inlay_hint_caps| inlay_hint_caps.resolve_support.as_ref())
+ .map(|inlay_resolve| inlay_resolve.properties.iter())
+ .into_iter()
+ .flatten()
+ .cloned()
+ .collect::<FxHashSet<_>>();
+
+ InlayHintsConfig {
+ render_colons: self.inlayHints_renderColons(source_root).to_owned(),
+ type_hints: self.inlayHints_typeHints_enable(source_root).to_owned(),
+ parameter_hints: self.inlayHints_parameterHints_enable(source_root).to_owned(),
+ chaining_hints: self.inlayHints_chainingHints_enable(source_root).to_owned(),
+ discriminant_hints: match self.inlayHints_discriminantHints_enable(source_root) {
+ DiscriminantHintsDef::Always => ide::DiscriminantHints::Always,
+ DiscriminantHintsDef::Never => ide::DiscriminantHints::Never,
+ DiscriminantHintsDef::Fieldless => ide::DiscriminantHints::Fieldless,
+ },
+ closure_return_type_hints: match self
+ .inlayHints_closureReturnTypeHints_enable(source_root)
+ {
+ ClosureReturnTypeHintsDef::Always => ide::ClosureReturnTypeHints::Always,
+ ClosureReturnTypeHintsDef::Never => ide::ClosureReturnTypeHints::Never,
+ ClosureReturnTypeHintsDef::WithBlock => ide::ClosureReturnTypeHints::WithBlock,
+ },
+ lifetime_elision_hints: match self.inlayHints_lifetimeElisionHints_enable(source_root) {
+ LifetimeElisionDef::Always => ide::LifetimeElisionHints::Always,
+ LifetimeElisionDef::Never => ide::LifetimeElisionHints::Never,
+ LifetimeElisionDef::SkipTrivial => ide::LifetimeElisionHints::SkipTrivial,
+ },
+ hide_named_constructor_hints: self
+ .inlayHints_typeHints_hideNamedConstructor(source_root)
+ .to_owned(),
+ hide_closure_initialization_hints: self
+ .inlayHints_typeHints_hideClosureInitialization(source_root)
+ .to_owned(),
+ closure_style: match self.inlayHints_closureStyle(source_root) {
+ ClosureStyle::ImplFn => hir::ClosureStyle::ImplFn,
+ ClosureStyle::RustAnalyzer => hir::ClosureStyle::RANotation,
+ ClosureStyle::WithId => hir::ClosureStyle::ClosureWithId,
+ ClosureStyle::Hide => hir::ClosureStyle::Hide,
+ },
+ closure_capture_hints: self
+ .inlayHints_closureCaptureHints_enable(source_root)
+ .to_owned(),
+ adjustment_hints: match self.inlayHints_expressionAdjustmentHints_enable(source_root) {
+ AdjustmentHintsDef::Always => ide::AdjustmentHints::Always,
+ AdjustmentHintsDef::Never => {
+ match self.inlayHints_reborrowHints_enable(source_root) {
+ ReborrowHintsDef::Always | ReborrowHintsDef::Mutable => {
+ ide::AdjustmentHints::ReborrowOnly
+ }
+ ReborrowHintsDef::Never => ide::AdjustmentHints::Never,
+ }
+ }
+ AdjustmentHintsDef::Reborrow => ide::AdjustmentHints::ReborrowOnly,
+ },
+ adjustment_hints_mode: match self.inlayHints_expressionAdjustmentHints_mode(source_root)
+ {
+ AdjustmentHintsModeDef::Prefix => ide::AdjustmentHintsMode::Prefix,
+ AdjustmentHintsModeDef::Postfix => ide::AdjustmentHintsMode::Postfix,
+ AdjustmentHintsModeDef::PreferPrefix => ide::AdjustmentHintsMode::PreferPrefix,
+ AdjustmentHintsModeDef::PreferPostfix => ide::AdjustmentHintsMode::PreferPostfix,
+ },
+ adjustment_hints_hide_outside_unsafe: self
+ .inlayHints_expressionAdjustmentHints_hideOutsideUnsafe(source_root)
+ .to_owned(),
+ binding_mode_hints: self.inlayHints_bindingModeHints_enable(source_root).to_owned(),
+ param_names_for_lifetime_elision_hints: self
+ .inlayHints_lifetimeElisionHints_useParameterNames(source_root)
+ .to_owned(),
+ max_length: self.inlayHints_maxLength(source_root).to_owned(),
+ closing_brace_hints_min_lines: if self
+ .inlayHints_closingBraceHints_enable(source_root)
+ .to_owned()
+ {
+ Some(self.inlayHints_closingBraceHints_minLines(source_root).to_owned())
+ } else {
+ None
+ },
+ fields_to_resolve: InlayFieldsToResolve {
+ resolve_text_edits: client_capability_fields.contains("textEdits"),
+ resolve_hint_tooltip: client_capability_fields.contains("tooltip"),
+ resolve_label_tooltip: client_capability_fields.contains("label.tooltip"),
+ resolve_label_location: client_capability_fields.contains("label.location"),
+ resolve_label_command: client_capability_fields.contains("label.command"),
+ },
+ implicit_drop_hints: self.inlayHints_implicitDrops_enable(source_root).to_owned(),
+ range_exclusive_hints: self
+ .inlayHints_rangeExclusiveHints_enable(source_root)
+ .to_owned(),
+ }
+ }
+
+ fn insert_use_config(&self, source_root: Option<SourceRootId>) -> InsertUseConfig {
+ InsertUseConfig {
+ granularity: match self.imports_granularity_group(source_root) {
+ ImportGranularityDef::Preserve => ImportGranularity::Preserve,
+ ImportGranularityDef::Item => ImportGranularity::Item,
+ ImportGranularityDef::Crate => ImportGranularity::Crate,
+ ImportGranularityDef::Module => ImportGranularity::Module,
+ ImportGranularityDef::One => ImportGranularity::One,
+ },
+ enforce_granularity: self.imports_granularity_enforce(source_root).to_owned(),
+ prefix_kind: match self.imports_prefix(source_root) {
+ ImportPrefixDef::Plain => PrefixKind::Plain,
+ ImportPrefixDef::ByCrate => PrefixKind::ByCrate,
+ ImportPrefixDef::BySelf => PrefixKind::BySelf,
+ },
+ group: self.imports_group_enable(source_root).to_owned(),
+ skip_glob_imports: !self.imports_merge_glob(source_root),
+ }
+ }
+
+ pub fn join_lines(&self, source_root: Option<SourceRootId>) -> JoinLinesConfig {
+ JoinLinesConfig {
+ join_else_if: self.joinLines_joinElseIf(source_root).to_owned(),
+ remove_trailing_comma: self.joinLines_removeTrailingComma(source_root).to_owned(),
+ unwrap_trivial_blocks: self.joinLines_unwrapTrivialBlock(source_root).to_owned(),
+ join_assignments: self.joinLines_joinAssignments(source_root).to_owned(),
+ }
+ }
+
+ pub fn highlighting_non_standard_tokens(&self, source_root: Option<SourceRootId>) -> bool {
+ self.semanticHighlighting_nonStandardTokens(source_root).to_owned()
+ }
+
+ pub fn highlighting_config(&self, source_root: Option<SourceRootId>) -> HighlightConfig {
+ HighlightConfig {
+ strings: self.semanticHighlighting_strings_enable(source_root).to_owned(),
+ punctuation: self.semanticHighlighting_punctuation_enable(source_root).to_owned(),
+ specialize_punctuation: self
+ .semanticHighlighting_punctuation_specialization_enable(source_root)
+ .to_owned(),
+ macro_bang: self
+ .semanticHighlighting_punctuation_separate_macro_bang(source_root)
+ .to_owned(),
+ operator: self.semanticHighlighting_operator_enable(source_root).to_owned(),
+ specialize_operator: self
+ .semanticHighlighting_operator_specialization_enable(source_root)
+ .to_owned(),
+ inject_doc_comment: self
+ .semanticHighlighting_doc_comment_inject_enable(source_root)
+ .to_owned(),
+ syntactic_name_ref_highlighting: false,
+ }
+ }
-impl Config {
pub fn has_linked_projects(&self) -> bool {
- !self.data.linkedProjects.is_empty()
+ !self.linkedProjects().is_empty()
}
pub fn linked_manifests(&self) -> impl Iterator<Item = &Utf8Path> + '_ {
- self.data.linkedProjects.iter().filter_map(|it| match it {
+ self.linkedProjects().iter().filter_map(|it| match it {
ManifestOrProjectJson::Manifest(p) => Some(&**p),
ManifestOrProjectJson::ProjectJson(_) => None,
})
}
pub fn has_linked_project_jsons(&self) -> bool {
- self.data
- .linkedProjects
- .iter()
- .any(|it| matches!(it, ManifestOrProjectJson::ProjectJson(_)))
+ self.linkedProjects().iter().any(|it| matches!(it, ManifestOrProjectJson::ProjectJson(_)))
}
pub fn linked_or_discovered_projects(&self) -> Vec<LinkedProject> {
- match self.data.linkedProjects.as_slice() {
+ match self.linkedProjects().as_slice() {
[] => {
let exclude_dirs: Vec<_> =
- self.data.files_excludeDirs.iter().map(|p| self.root_path.join(p)).collect();
+ self.files_excludeDirs().iter().map(|p| self.root_path.join(p)).collect();
self.discovered_projects
.iter()
.filter(
@@ -1025,7 +1352,7 @@ impl Config {
}
pub fn prefill_caches(&self) -> bool {
- self.data.cachePriming_enable
+ self.cachePriming_enable().to_owned()
}
pub fn location_link(&self) -> bool {
@@ -1162,117 +1489,95 @@ impl Config {
}
pub fn publish_diagnostics(&self) -> bool {
- self.data.diagnostics_enable
- }
-
- pub fn diagnostics(&self) -> DiagnosticsConfig {
- DiagnosticsConfig {
- enabled: self.data.diagnostics_enable,
- proc_attr_macros_enabled: self.expand_proc_attr_macros(),
- proc_macros_enabled: self.data.procMacro_enable,
- disable_experimental: !self.data.diagnostics_experimental_enable,
- disabled: self.data.diagnostics_disabled.clone(),
- expr_fill_default: match self.data.assist_expressionFillDefault {
- ExprFillDefaultDef::Todo => ExprFillDefaultMode::Todo,
- ExprFillDefaultDef::Default => ExprFillDefaultMode::Default,
- },
- insert_use: self.insert_use_config(),
- prefer_no_std: self.data.imports_preferNoStd,
- prefer_prelude: self.data.imports_preferPrelude,
- style_lints: self.data.diagnostics_styleLints_enable,
- }
+ self.diagnostics_enable().to_owned()
}
pub fn diagnostics_map(&self) -> DiagnosticsMapConfig {
DiagnosticsMapConfig {
- remap_prefix: self.data.diagnostics_remapPrefix.clone(),
- warnings_as_info: self.data.diagnostics_warningsAsInfo.clone(),
- warnings_as_hint: self.data.diagnostics_warningsAsHint.clone(),
- check_ignore: self.data.check_ignore.clone(),
+ remap_prefix: self.diagnostics_remapPrefix().clone(),
+ warnings_as_info: self.diagnostics_warningsAsInfo().clone(),
+ warnings_as_hint: self.diagnostics_warningsAsHint().clone(),
+ check_ignore: self.check_ignore().clone(),
}
}
pub fn extra_args(&self) -> &Vec<String> {
- &self.data.cargo_extraArgs
+ self.cargo_extraArgs()
}
pub fn extra_env(&self) -> &FxHashMap<String, String> {
- &self.data.cargo_extraEnv
+ self.cargo_extraEnv()
}
pub fn check_extra_args(&self) -> Vec<String> {
let mut extra_args = self.extra_args().clone();
- extra_args.extend_from_slice(&self.data.check_extraArgs);
+ extra_args.extend_from_slice(self.check_extraArgs());
extra_args
}
pub fn check_extra_env(&self) -> FxHashMap<String, String> {
- let mut extra_env = self.data.cargo_extraEnv.clone();
- extra_env.extend(self.data.check_extraEnv.clone());
+ let mut extra_env = self.cargo_extraEnv().clone();
+ extra_env.extend(self.check_extraEnv().clone());
extra_env
}
pub fn lru_parse_query_capacity(&self) -> Option<usize> {
- self.data.lru_capacity
+ self.lru_capacity().to_owned()
}
- pub fn lru_query_capacities(&self) -> Option<&FxHashMap<Box<str>, usize>> {
- self.data.lru_query_capacities.is_empty().not().then_some(&self.data.lru_query_capacities)
+ pub fn lru_query_capacities_config(&self) -> Option<&FxHashMap<Box<str>, usize>> {
+ self.lru_query_capacities().is_empty().not().then(|| self.lru_query_capacities())
}
pub fn proc_macro_srv(&self) -> Option<AbsPathBuf> {
- let path = self.data.procMacro_server.clone()?;
+ let path = self.procMacro_server().clone()?;
Some(AbsPathBuf::try_from(path).unwrap_or_else(|path| self.root_path.join(path)))
}
pub fn ignored_proc_macros(&self) -> &FxHashMap<Box<str>, Box<[Box<str>]>> {
- &self.data.procMacro_ignored
+ self.procMacro_ignored()
}
pub fn expand_proc_macros(&self) -> bool {
- self.data.procMacro_enable
- }
-
- pub fn expand_proc_attr_macros(&self) -> bool {
- self.data.procMacro_enable && self.data.procMacro_attributes_enable
+ self.procMacro_enable().to_owned()
}
pub fn files(&self) -> FilesConfig {
FilesConfig {
- watcher: match self.data.files_watcher {
+ watcher: match self.files_watcher() {
FilesWatcherDef::Client if self.did_change_watched_files_dynamic_registration() => {
FilesWatcher::Client
}
_ => FilesWatcher::Server,
},
- exclude: self.data.files_excludeDirs.iter().map(|it| self.root_path.join(it)).collect(),
+ exclude: self.files_excludeDirs().iter().map(|it| self.root_path.join(it)).collect(),
}
}
pub fn notifications(&self) -> NotificationsConfig {
NotificationsConfig {
- cargo_toml_not_found: self.data.notifications_cargoTomlNotFound,
- unindexed_project: self.data.notifications_unindexedProject,
+ cargo_toml_not_found: self.notifications_cargoTomlNotFound().to_owned(),
+ unindexed_project: self.notifications_unindexedProject().to_owned(),
}
}
- pub fn cargo_autoreload(&self) -> bool {
- self.data.cargo_autoreload
+ pub fn cargo_autoreload_config(&self) -> bool {
+ self.cargo_autoreload().to_owned()
}
pub fn run_build_scripts(&self) -> bool {
- self.data.cargo_buildScripts_enable || self.data.procMacro_enable
+ self.cargo_buildScripts_enable().to_owned() || self.procMacro_enable().to_owned()
}
pub fn cargo(&self) -> CargoConfig {
- let rustc_source = self.data.rustc_source.as_ref().map(|rustc_src| {
+ let rustc_source = self.rustc_source().as_ref().map(|rustc_src| {
if rustc_src == "discover" {
RustLibSource::Discover
} else {
RustLibSource::Path(self.root_path.join(rustc_src))
}
});
- let sysroot = self.data.cargo_sysroot.as_ref().map(|sysroot| {
+ let sysroot = self.cargo_sysroot().as_ref().map(|sysroot| {
if sysroot == "discover" {
RustLibSource::Discover
} else {
@@ -1280,88 +1585,91 @@ impl Config {
}
});
let sysroot_src =
- self.data.cargo_sysrootSrc.as_ref().map(|sysroot| self.root_path.join(sysroot));
- let sysroot_query_metadata = self.data.cargo_sysrootQueryMetadata;
+ self.cargo_sysrootSrc().as_ref().map(|sysroot| self.root_path.join(sysroot));
+ let sysroot_query_metadata = self.cargo_sysrootQueryMetadata();
CargoConfig {
- features: match &self.data.cargo_features {
+ all_targets: *self.cargo_allTargets(),
+ features: match &self.cargo_features() {
CargoFeaturesDef::All => CargoFeatures::All,
CargoFeaturesDef::Selected(features) => CargoFeatures::Selected {
features: features.clone(),
- no_default_features: self.data.cargo_noDefaultFeatures,
+ no_default_features: self.cargo_noDefaultFeatures().to_owned(),
},
},
- target: self.data.cargo_target.clone(),
+ target: self.cargo_target().clone(),
sysroot,
- sysroot_query_metadata,
+ sysroot_query_metadata: *sysroot_query_metadata,
sysroot_src,
rustc_source,
cfg_overrides: project_model::CfgOverrides {
global: CfgDiff::new(
- self.data
- .cargo_cfgs
+ self.cargo_cfgs()
.iter()
- .map(|(key, val)| {
- if val.is_empty() {
- CfgAtom::Flag(key.into())
- } else {
- CfgAtom::KeyValue { key: key.into(), value: val.into() }
- }
+ .map(|(key, val)| match val {
+ Some(val) => CfgAtom::KeyValue { key: key.into(), value: val.into() },
+ None => CfgAtom::Flag(key.into()),
})
.collect(),
vec![],
)
.unwrap(),
- selective: self
- .data
- .cargo_unsetTest
- .iter()
- .map(|it| {
- (
- it.clone(),
- CfgDiff::new(vec![], vec![CfgAtom::Flag("test".into())]).unwrap(),
- )
- })
- .collect(),
+ selective: Default::default(),
},
- wrap_rustc_in_build_scripts: self.data.cargo_buildScripts_useRustcWrapper,
- invocation_strategy: match self.data.cargo_buildScripts_invocationStrategy {
+ wrap_rustc_in_build_scripts: *self.cargo_buildScripts_useRustcWrapper(),
+ invocation_strategy: match self.cargo_buildScripts_invocationStrategy() {
InvocationStrategy::Once => project_model::InvocationStrategy::Once,
InvocationStrategy::PerWorkspace => project_model::InvocationStrategy::PerWorkspace,
},
- invocation_location: match self.data.cargo_buildScripts_invocationLocation {
+ invocation_location: match self.cargo_buildScripts_invocationLocation() {
InvocationLocation::Root => {
project_model::InvocationLocation::Root(self.root_path.clone())
}
InvocationLocation::Workspace => project_model::InvocationLocation::Workspace,
},
- run_build_script_command: self.data.cargo_buildScripts_overrideCommand.clone(),
- extra_args: self.data.cargo_extraArgs.clone(),
- extra_env: self.data.cargo_extraEnv.clone(),
+ run_build_script_command: self.cargo_buildScripts_overrideCommand().clone(),
+ extra_args: self.cargo_extraArgs().clone(),
+ extra_env: self.cargo_extraEnv().clone(),
target_dir: self.target_dir_from_config(),
}
}
pub fn rustfmt(&self) -> RustfmtConfig {
- match &self.data.rustfmt_overrideCommand {
+ match &self.rustfmt_overrideCommand() {
Some(args) if !args.is_empty() => {
let mut args = args.clone();
let command = args.remove(0);
RustfmtConfig::CustomCommand { command, args }
}
Some(_) | None => RustfmtConfig::Rustfmt {
- extra_args: self.data.rustfmt_extraArgs.clone(),
- enable_range_formatting: self.data.rustfmt_rangeFormatting_enable,
+ extra_args: self.rustfmt_extraArgs().clone(),
+ enable_range_formatting: *self.rustfmt_rangeFormatting_enable(),
},
}
}
pub fn flycheck_workspace(&self) -> bool {
- self.data.check_workspace
+ *self.check_workspace()
+ }
+
+ pub fn cargo_test_options(&self) -> CargoOptions {
+ CargoOptions {
+ target_triples: self.cargo_target().clone().into_iter().collect(),
+ all_targets: false,
+ no_default_features: *self.cargo_noDefaultFeatures(),
+ all_features: matches!(self.cargo_features(), CargoFeaturesDef::All),
+ features: match self.cargo_features().clone() {
+ CargoFeaturesDef::All => vec![],
+ CargoFeaturesDef::Selected(it) => it,
+ },
+ extra_args: self.extra_args().clone(),
+ extra_env: self.extra_env().clone(),
+ target_dir: self.target_dir_from_config(),
+ }
}
pub fn flycheck(&self) -> FlycheckConfig {
- match &self.data.check_overrideCommand {
+ match &self.check_overrideCommand() {
Some(args) if !args.is_empty() => {
let mut args = args.clone();
let command = args.remove(0);
@@ -1369,13 +1677,13 @@ impl Config {
command,
args,
extra_env: self.check_extra_env(),
- invocation_strategy: match self.data.check_invocationStrategy {
+ invocation_strategy: match self.check_invocationStrategy() {
InvocationStrategy::Once => flycheck::InvocationStrategy::Once,
InvocationStrategy::PerWorkspace => {
flycheck::InvocationStrategy::PerWorkspace
}
},
- invocation_location: match self.data.check_invocationLocation {
+ invocation_location: match self.check_invocationLocation() {
InvocationLocation::Root => {
flycheck::InvocationLocation::Root(self.root_path.clone())
}
@@ -1384,44 +1692,43 @@ impl Config {
}
}
Some(_) | None => FlycheckConfig::CargoCommand {
- command: self.data.check_command.clone(),
- target_triples: self
- .data
- .check_targets
- .clone()
- .and_then(|targets| match &targets.0[..] {
- [] => None,
- targets => Some(targets.into()),
- })
- .unwrap_or_else(|| self.data.cargo_target.clone().into_iter().collect()),
- all_targets: self.data.check_allTargets,
- no_default_features: self
- .data
- .check_noDefaultFeatures
- .unwrap_or(self.data.cargo_noDefaultFeatures),
- all_features: matches!(
- self.data.check_features.as_ref().unwrap_or(&self.data.cargo_features),
- CargoFeaturesDef::All
- ),
- features: match self
- .data
- .check_features
- .clone()
- .unwrap_or_else(|| self.data.cargo_features.clone())
- {
- CargoFeaturesDef::All => vec![],
- CargoFeaturesDef::Selected(it) => it,
+ command: self.check_command().clone(),
+ options: CargoOptions {
+ target_triples: self
+ .check_targets()
+ .clone()
+ .and_then(|targets| match &targets.0[..] {
+ [] => None,
+ targets => Some(targets.into()),
+ })
+ .unwrap_or_else(|| self.cargo_target().clone().into_iter().collect()),
+ all_targets: self.check_allTargets().unwrap_or(*self.cargo_allTargets()),
+ no_default_features: self
+ .check_noDefaultFeatures()
+ .unwrap_or(*self.cargo_noDefaultFeatures()),
+ all_features: matches!(
+ self.check_features().as_ref().unwrap_or(self.cargo_features()),
+ CargoFeaturesDef::All
+ ),
+ features: match self
+ .check_features()
+ .clone()
+ .unwrap_or_else(|| self.cargo_features().clone())
+ {
+ CargoFeaturesDef::All => vec![],
+ CargoFeaturesDef::Selected(it) => it,
+ },
+ extra_args: self.check_extra_args(),
+ extra_env: self.check_extra_env(),
+ target_dir: self.target_dir_from_config(),
},
- extra_args: self.check_extra_args(),
- extra_env: self.check_extra_env(),
ansi_color_output: self.color_diagnostic_output(),
- target_dir: self.target_dir_from_config(),
},
}
}
fn target_dir_from_config(&self) -> Option<Utf8PathBuf> {
- self.data.cargo_targetDir.as_ref().and_then(|target_dir| match target_dir {
+ self.cargo_targetDir().as_ref().and_then(|target_dir| match target_dir {
TargetDirectory::UseSubdirectory(true) => {
Some(Utf8PathBuf::from("target/rust-analyzer"))
}
@@ -1432,294 +1739,67 @@ impl Config {
}
pub fn check_on_save(&self) -> bool {
- self.data.checkOnSave
+ *self.checkOnSave()
}
pub fn script_rebuild_on_save(&self) -> bool {
- self.data.cargo_buildScripts_rebuildOnSave
+ *self.cargo_buildScripts_rebuildOnSave()
}
pub fn runnables(&self) -> RunnablesConfig {
RunnablesConfig {
- override_cargo: self.data.runnables_command.clone(),
- cargo_extra_args: self.data.runnables_extraArgs.clone(),
- }
- }
-
- pub fn inlay_hints(&self) -> InlayHintsConfig {
- let client_capability_fields = self
- .caps
- .text_document
- .as_ref()
- .and_then(|text| text.inlay_hint.as_ref())
- .and_then(|inlay_hint_caps| inlay_hint_caps.resolve_support.as_ref())
- .map(|inlay_resolve| inlay_resolve.properties.iter())
- .into_iter()
- .flatten()
- .cloned()
- .collect::<FxHashSet<_>>();
-
- InlayHintsConfig {
- render_colons: self.data.inlayHints_renderColons,
- type_hints: self.data.inlayHints_typeHints_enable,
- parameter_hints: self.data.inlayHints_parameterHints_enable,
- chaining_hints: self.data.inlayHints_chainingHints_enable,
- implicit_drop_hints: self.data.inlayHints_implicitDrops_enable,
- discriminant_hints: match self.data.inlayHints_discriminantHints_enable {
- DiscriminantHintsDef::Always => ide::DiscriminantHints::Always,
- DiscriminantHintsDef::Never => ide::DiscriminantHints::Never,
- DiscriminantHintsDef::Fieldless => ide::DiscriminantHints::Fieldless,
- },
- closure_return_type_hints: match self.data.inlayHints_closureReturnTypeHints_enable {
- ClosureReturnTypeHintsDef::Always => ide::ClosureReturnTypeHints::Always,
- ClosureReturnTypeHintsDef::Never => ide::ClosureReturnTypeHints::Never,
- ClosureReturnTypeHintsDef::WithBlock => ide::ClosureReturnTypeHints::WithBlock,
- },
- lifetime_elision_hints: match self.data.inlayHints_lifetimeElisionHints_enable {
- LifetimeElisionDef::Always => ide::LifetimeElisionHints::Always,
- LifetimeElisionDef::Never => ide::LifetimeElisionHints::Never,
- LifetimeElisionDef::SkipTrivial => ide::LifetimeElisionHints::SkipTrivial,
- },
- hide_named_constructor_hints: self.data.inlayHints_typeHints_hideNamedConstructor,
- hide_closure_initialization_hints: self
- .data
- .inlayHints_typeHints_hideClosureInitialization,
- closure_style: match self.data.inlayHints_closureStyle {
- ClosureStyle::ImplFn => hir::ClosureStyle::ImplFn,
- ClosureStyle::RustAnalyzer => hir::ClosureStyle::RANotation,
- ClosureStyle::WithId => hir::ClosureStyle::ClosureWithId,
- ClosureStyle::Hide => hir::ClosureStyle::Hide,
- },
- closure_capture_hints: self.data.inlayHints_closureCaptureHints_enable,
- adjustment_hints: match self.data.inlayHints_expressionAdjustmentHints_enable {
- AdjustmentHintsDef::Always => ide::AdjustmentHints::Always,
- AdjustmentHintsDef::Never => match self.data.inlayHints_reborrowHints_enable {
- ReborrowHintsDef::Always | ReborrowHintsDef::Mutable => {
- ide::AdjustmentHints::ReborrowOnly
- }
- ReborrowHintsDef::Never => ide::AdjustmentHints::Never,
- },
- AdjustmentHintsDef::Reborrow => ide::AdjustmentHints::ReborrowOnly,
- },
- adjustment_hints_mode: match self.data.inlayHints_expressionAdjustmentHints_mode {
- AdjustmentHintsModeDef::Prefix => ide::AdjustmentHintsMode::Prefix,
- AdjustmentHintsModeDef::Postfix => ide::AdjustmentHintsMode::Postfix,
- AdjustmentHintsModeDef::PreferPrefix => ide::AdjustmentHintsMode::PreferPrefix,
- AdjustmentHintsModeDef::PreferPostfix => ide::AdjustmentHintsMode::PreferPostfix,
- },
- adjustment_hints_hide_outside_unsafe: self
- .data
- .inlayHints_expressionAdjustmentHints_hideOutsideUnsafe,
- binding_mode_hints: self.data.inlayHints_bindingModeHints_enable,
- param_names_for_lifetime_elision_hints: self
- .data
- .inlayHints_lifetimeElisionHints_useParameterNames,
- max_length: self.data.inlayHints_maxLength,
- closing_brace_hints_min_lines: if self.data.inlayHints_closingBraceHints_enable {
- Some(self.data.inlayHints_closingBraceHints_minLines)
- } else {
- None
- },
- range_exclusive_hints: self.data.inlayHints_rangeExclusiveHints_enable,
- fields_to_resolve: InlayFieldsToResolve {
- resolve_text_edits: client_capability_fields.contains("textEdits"),
- resolve_hint_tooltip: client_capability_fields.contains("tooltip"),
- resolve_label_tooltip: client_capability_fields.contains("label.tooltip"),
- resolve_label_location: client_capability_fields.contains("label.location"),
- resolve_label_command: client_capability_fields.contains("label.command"),
- },
- }
- }
-
- fn insert_use_config(&self) -> InsertUseConfig {
- InsertUseConfig {
- granularity: match self.data.imports_granularity_group {
- ImportGranularityDef::Preserve => ImportGranularity::Preserve,
- ImportGranularityDef::Item => ImportGranularity::Item,
- ImportGranularityDef::Crate => ImportGranularity::Crate,
- ImportGranularityDef::Module => ImportGranularity::Module,
- ImportGranularityDef::One => ImportGranularity::One,
- },
- enforce_granularity: self.data.imports_granularity_enforce,
- prefix_kind: match self.data.imports_prefix {
- ImportPrefixDef::Plain => PrefixKind::Plain,
- ImportPrefixDef::ByCrate => PrefixKind::ByCrate,
- ImportPrefixDef::BySelf => PrefixKind::BySelf,
- },
- group: self.data.imports_group_enable,
- skip_glob_imports: !self.data.imports_merge_glob,
- }
- }
-
- pub fn completion(&self) -> CompletionConfig {
- CompletionConfig {
- enable_postfix_completions: self.data.completion_postfix_enable,
- enable_imports_on_the_fly: self.data.completion_autoimport_enable
- && completion_item_edit_resolve(&self.caps),
- enable_self_on_the_fly: self.data.completion_autoself_enable,
- enable_private_editable: self.data.completion_privateEditable_enable,
- enable_term_search: self.data.completion_termSearch_enable,
- full_function_signatures: self.data.completion_fullFunctionSignatures_enable,
- callable: match self.data.completion_callable_snippets {
- CallableCompletionDef::FillArguments => Some(CallableSnippets::FillArguments),
- CallableCompletionDef::AddParentheses => Some(CallableSnippets::AddParentheses),
- CallableCompletionDef::None => None,
- },
- insert_use: self.insert_use_config(),
- prefer_no_std: self.data.imports_preferNoStd,
- prefer_prelude: self.data.imports_preferPrelude,
- snippet_cap: SnippetCap::new(try_or_def!(
- self.caps
- .text_document
- .as_ref()?
- .completion
- .as_ref()?
- .completion_item
- .as_ref()?
- .snippet_support?
- )),
- snippets: self.snippets.clone(),
- limit: self.data.completion_limit,
+ override_cargo: self.runnables_command().clone(),
+ cargo_extra_args: self.runnables_extraArgs().clone(),
+ extra_test_binary_args: self.runnables_extraTestBinaryArgs().clone(),
}
}
pub fn find_all_refs_exclude_imports(&self) -> bool {
- self.data.references_excludeImports
+ *self.references_excludeImports()
}
pub fn find_all_refs_exclude_tests(&self) -> bool {
- self.data.references_excludeTests
+ *self.references_excludeTests()
}
pub fn snippet_cap(&self) -> bool {
self.experimental("snippetTextEdit")
}
- pub fn assist(&self) -> AssistConfig {
- AssistConfig {
- snippet_cap: SnippetCap::new(self.experimental("snippetTextEdit")),
- allowed: None,
- insert_use: self.insert_use_config(),
- prefer_no_std: self.data.imports_preferNoStd,
- prefer_prelude: self.data.imports_preferPrelude,
- assist_emit_must_use: self.data.assist_emitMustUse,
- }
- }
-
- pub fn join_lines(&self) -> JoinLinesConfig {
- JoinLinesConfig {
- join_else_if: self.data.joinLines_joinElseIf,
- remove_trailing_comma: self.data.joinLines_removeTrailingComma,
- unwrap_trivial_blocks: self.data.joinLines_unwrapTrivialBlock,
- join_assignments: self.data.joinLines_joinAssignments,
- }
- }
-
pub fn call_info(&self) -> CallInfoConfig {
CallInfoConfig {
- params_only: matches!(self.data.signatureInfo_detail, SignatureDetail::Parameters),
- docs: self.data.signatureInfo_documentation_enable,
+ params_only: matches!(self.signatureInfo_detail(), SignatureDetail::Parameters),
+ docs: *self.signatureInfo_documentation_enable(),
}
}
pub fn lens(&self) -> LensConfig {
LensConfig {
- run: self.data.lens_enable && self.data.lens_run_enable,
- debug: self.data.lens_enable && self.data.lens_debug_enable,
- interpret: self.data.lens_enable
- && self.data.lens_run_enable
- && self.data.interpret_tests,
- implementations: self.data.lens_enable && self.data.lens_implementations_enable,
- method_refs: self.data.lens_enable && self.data.lens_references_method_enable,
- refs_adt: self.data.lens_enable && self.data.lens_references_adt_enable,
- refs_trait: self.data.lens_enable && self.data.lens_references_trait_enable,
- enum_variant_refs: self.data.lens_enable
- && self.data.lens_references_enumVariant_enable,
- location: self.data.lens_location,
- }
- }
-
- pub fn hover_actions(&self) -> HoverActionsConfig {
- let enable = self.experimental("hoverActions") && self.data.hover_actions_enable;
- HoverActionsConfig {
- implementations: enable && self.data.hover_actions_implementations_enable,
- references: enable && self.data.hover_actions_references_enable,
- run: enable && self.data.hover_actions_run_enable,
- debug: enable && self.data.hover_actions_debug_enable,
- goto_type_def: enable && self.data.hover_actions_gotoTypeDef_enable,
- }
- }
-
- pub fn highlighting_non_standard_tokens(&self) -> bool {
- self.data.semanticHighlighting_nonStandardTokens
- }
-
- pub fn highlighting_config(&self) -> HighlightConfig {
- HighlightConfig {
- strings: self.data.semanticHighlighting_strings_enable,
- punctuation: self.data.semanticHighlighting_punctuation_enable,
- specialize_punctuation: self
- .data
- .semanticHighlighting_punctuation_specialization_enable,
- macro_bang: self.data.semanticHighlighting_punctuation_separate_macro_bang,
- operator: self.data.semanticHighlighting_operator_enable,
- specialize_operator: self.data.semanticHighlighting_operator_specialization_enable,
- inject_doc_comment: self.data.semanticHighlighting_doc_comment_inject_enable,
- syntactic_name_ref_highlighting: false,
- }
- }
-
- pub fn hover(&self) -> HoverConfig {
- let mem_kind = |kind| match kind {
- MemoryLayoutHoverRenderKindDef::Both => MemoryLayoutHoverRenderKind::Both,
- MemoryLayoutHoverRenderKindDef::Decimal => MemoryLayoutHoverRenderKind::Decimal,
- MemoryLayoutHoverRenderKindDef::Hexadecimal => MemoryLayoutHoverRenderKind::Hexadecimal,
- };
- HoverConfig {
- links_in_hover: self.data.hover_links_enable,
- memory_layout: self.data.hover_memoryLayout_enable.then_some(MemoryLayoutHoverConfig {
- size: self.data.hover_memoryLayout_size.map(mem_kind),
- offset: self.data.hover_memoryLayout_offset.map(mem_kind),
- alignment: self.data.hover_memoryLayout_alignment.map(mem_kind),
- niches: self.data.hover_memoryLayout_niches.unwrap_or_default(),
- }),
- documentation: self.data.hover_documentation_enable,
- format: {
- let is_markdown = try_or_def!(self
- .caps
- .text_document
- .as_ref()?
- .hover
- .as_ref()?
- .content_format
- .as_ref()?
- .as_slice())
- .contains(&MarkupKind::Markdown);
- if is_markdown {
- HoverDocFormat::Markdown
- } else {
- HoverDocFormat::PlainText
- }
- },
- keywords: self.data.hover_documentation_keywords_enable,
- max_trait_assoc_items_count: self.data.hover_show_traitAssocItems,
- max_struct_field_count: self.data.hover_show_structFields,
+ run: *self.lens_run_enable(),
+ debug: *self.lens_enable() && *self.lens_debug_enable(),
+ interpret: *self.lens_enable() && *self.lens_run_enable() && *self.interpret_tests(),
+ implementations: *self.lens_enable() && *self.lens_implementations_enable(),
+ method_refs: *self.lens_enable() && *self.lens_references_method_enable(),
+ refs_adt: *self.lens_enable() && *self.lens_references_adt_enable(),
+ refs_trait: *self.lens_enable() && *self.lens_references_trait_enable(),
+ enum_variant_refs: *self.lens_enable() && *self.lens_references_enumVariant_enable(),
+ location: *self.lens_location(),
}
}
pub fn workspace_symbol(&self) -> WorkspaceSymbolConfig {
WorkspaceSymbolConfig {
- search_scope: match self.data.workspace_symbol_search_scope {
+ search_scope: match self.workspace_symbol_search_scope() {
WorkspaceSymbolSearchScopeDef::Workspace => WorkspaceSymbolSearchScope::Workspace,
WorkspaceSymbolSearchScopeDef::WorkspaceAndDependencies => {
WorkspaceSymbolSearchScope::WorkspaceAndDependencies
}
},
- search_kind: match self.data.workspace_symbol_search_kind {
+ search_kind: match self.workspace_symbol_search_kind() {
WorkspaceSymbolSearchKindDef::OnlyTypes => WorkspaceSymbolSearchKind::OnlyTypes,
WorkspaceSymbolSearchKindDef::AllSymbols => WorkspaceSymbolSearchKind::AllSymbols,
},
- search_limit: self.data.workspace_symbol_search_limit,
+ search_limit: *self.workspace_symbol_search_limit(),
}
}
@@ -1753,7 +1833,7 @@ impl Config {
try_or!(self.caps.experimental.as_ref()?.get("commands")?, &serde_json::Value::Null);
let commands: Option<lsp_ext::ClientCommandOptions> =
serde_json::from_value(commands.clone()).ok();
- let force = commands.is_none() && self.data.lens_forceCustomCommands;
+ let force = commands.is_none() && *self.lens_forceCustomCommands();
let commands = commands.map(|it| it.commands).unwrap_or_default();
let get = |name: &str| commands.iter().any(|it| it == name) || force;
@@ -1767,29 +1847,19 @@ impl Config {
}
}
- pub fn highlight_related(&self) -> HighlightRelatedConfig {
- HighlightRelatedConfig {
- references: self.data.highlightRelated_references_enable,
- break_points: self.data.highlightRelated_breakPoints_enable,
- exit_points: self.data.highlightRelated_exitPoints_enable,
- yield_points: self.data.highlightRelated_yieldPoints_enable,
- closure_captures: self.data.highlightRelated_closureCaptures_enable,
- }
- }
-
pub fn prime_caches_num_threads(&self) -> u8 {
- match self.data.cachePriming_numThreads {
+ match *self.cachePriming_numThreads() {
0 => num_cpus::get_physical().try_into().unwrap_or(u8::MAX),
n => n,
}
}
pub fn main_loop_num_threads(&self) -> usize {
- self.data.numThreads.unwrap_or(num_cpus::get_physical())
+ self.numThreads().unwrap_or(num_cpus::get_physical())
}
pub fn typing_autoclose_angle(&self) -> bool {
- self.data.typing_autoClosingAngleBrackets_enable
+ *self.typing_autoClosingAngleBrackets_enable()
}
// VSCode is our reference implementation, so we allow ourselves to work around issues by
@@ -1800,100 +1870,120 @@ impl Config {
}
// Deserialization definitions
-macro_rules! create_bool_or_string_de {
+macro_rules! create_bool_or_string_serde {
($ident:ident<$bool:literal, $string:literal>) => {
- fn $ident<'de, D>(d: D) -> Result<(), D::Error>
- where
- D: serde::Deserializer<'de>,
- {
- struct V;
- impl<'de> serde::de::Visitor<'de> for V {
- type Value = ();
-
- fn expecting(&self, formatter: &mut fmt::Formatter<'_>) -> fmt::Result {
- formatter.write_str(concat!(
- stringify!($bool),
- " or \"",
- stringify!($string),
- "\""
- ))
- }
+ mod $ident {
+ pub(super) fn deserialize<'de, D>(d: D) -> Result<(), D::Error>
+ where
+ D: serde::Deserializer<'de>,
+ {
+ struct V;
+ impl<'de> serde::de::Visitor<'de> for V {
+ type Value = ();
+
+ fn expecting(
+ &self,
+ formatter: &mut std::fmt::Formatter<'_>,
+ ) -> std::fmt::Result {
+ formatter.write_str(concat!(
+ stringify!($bool),
+ " or \"",
+ stringify!($string),
+ "\""
+ ))
+ }
- fn visit_bool<E>(self, v: bool) -> Result<Self::Value, E>
- where
- E: serde::de::Error,
- {
- match v {
- $bool => Ok(()),
- _ => Err(serde::de::Error::invalid_value(
- serde::de::Unexpected::Bool(v),
- &self,
- )),
+ fn visit_bool<E>(self, v: bool) -> Result<Self::Value, E>
+ where
+ E: serde::de::Error,
+ {
+ match v {
+ $bool => Ok(()),
+ _ => Err(serde::de::Error::invalid_value(
+ serde::de::Unexpected::Bool(v),
+ &self,
+ )),
+ }
}
- }
- fn visit_str<E>(self, v: &str) -> Result<Self::Value, E>
- where
- E: serde::de::Error,
- {
- match v {
- $string => Ok(()),
- _ => Err(serde::de::Error::invalid_value(
- serde::de::Unexpected::Str(v),
- &self,
- )),
+ fn visit_str<E>(self, v: &str) -> Result<Self::Value, E>
+ where
+ E: serde::de::Error,
+ {
+ match v {
+ $string => Ok(()),
+ _ => Err(serde::de::Error::invalid_value(
+ serde::de::Unexpected::Str(v),
+ &self,
+ )),
+ }
}
- }
- fn visit_enum<A>(self, a: A) -> Result<Self::Value, A::Error>
- where
- A: serde::de::EnumAccess<'de>,
- {
- use serde::de::VariantAccess;
- let (variant, va) = a.variant::<&'de str>()?;
- va.unit_variant()?;
- match variant {
- $string => Ok(()),
- _ => Err(serde::de::Error::invalid_value(
- serde::de::Unexpected::Str(variant),
- &self,
- )),
+ fn visit_enum<A>(self, a: A) -> Result<Self::Value, A::Error>
+ where
+ A: serde::de::EnumAccess<'de>,
+ {
+ use serde::de::VariantAccess;
+ let (variant, va) = a.variant::<&'de str>()?;
+ va.unit_variant()?;
+ match variant {
+ $string => Ok(()),
+ _ => Err(serde::de::Error::invalid_value(
+ serde::de::Unexpected::Str(variant),
+ &self,
+ )),
+ }
}
}
+ d.deserialize_any(V)
+ }
+
+ pub(super) fn serialize<S>(serializer: S) -> Result<S::Ok, S::Error>
+ where
+ S: serde::Serializer,
+ {
+ serializer.serialize_str($string)
}
- d.deserialize_any(V)
}
};
}
-create_bool_or_string_de!(true_or_always<true, "always">);
-create_bool_or_string_de!(false_or_never<false, "never">);
+create_bool_or_string_serde!(true_or_always<true, "always">);
+create_bool_or_string_serde!(false_or_never<false, "never">);
macro_rules! named_unit_variant {
($variant:ident) => {
- pub(super) fn $variant<'de, D>(deserializer: D) -> Result<(), D::Error>
- where
- D: serde::Deserializer<'de>,
- {
- struct V;
- impl<'de> serde::de::Visitor<'de> for V {
- type Value = ();
- fn expecting(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
- f.write_str(concat!("\"", stringify!($variant), "\""))
- }
- fn visit_str<E: serde::de::Error>(self, value: &str) -> Result<Self::Value, E> {
- if value == stringify!($variant) {
- Ok(())
- } else {
- Err(E::invalid_value(serde::de::Unexpected::Str(value), &self))
+ pub(super) mod $variant {
+ pub(in super::super) fn deserialize<'de, D>(deserializer: D) -> Result<(), D::Error>
+ where
+ D: serde::Deserializer<'de>,
+ {
+ struct V;
+ impl<'de> serde::de::Visitor<'de> for V {
+ type Value = ();
+ fn expecting(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ f.write_str(concat!("\"", stringify!($variant), "\""))
+ }
+ fn visit_str<E: serde::de::Error>(self, value: &str) -> Result<Self::Value, E> {
+ if value == stringify!($variant) {
+ Ok(())
+ } else {
+ Err(E::invalid_value(serde::de::Unexpected::Str(value), &self))
+ }
}
}
+ deserializer.deserialize_str(V)
+ }
+ pub(in super::super) fn serialize<S>(serializer: S) -> Result<S::Ok, S::Error>
+ where
+ S: serde::Serializer,
+ {
+ serializer.serialize_str(stringify!($variant))
}
- deserializer.deserialize_str(V)
}
};
}
-mod de_unit_v {
+mod unit_v {
named_unit_variant!(all);
named_unit_variant!(skip_trivial);
named_unit_variant!(mutable);
@@ -1905,7 +1995,7 @@ mod de_unit_v {
named_unit_variant!(both);
}
-#[derive(Deserialize, Debug, Clone, Copy)]
+#[derive(Serialize, Deserialize, Debug, Clone, Copy, PartialEq)]
#[serde(rename_all = "snake_case")]
#[derive(Default)]
enum SnippetScopeDef {
@@ -1915,67 +2005,92 @@ enum SnippetScopeDef {
Type,
}
-#[derive(Deserialize, Debug, Clone, Default)]
+#[derive(Serialize, Deserialize, Debug, Clone, Default)]
#[serde(default)]
struct SnippetDef {
- #[serde(deserialize_with = "single_or_array")]
+ #[serde(with = "single_or_array")]
+ #[serde(skip_serializing_if = "Vec::is_empty")]
prefix: Vec<String>,
- #[serde(deserialize_with = "single_or_array")]
+
+ #[serde(with = "single_or_array")]
+ #[serde(skip_serializing_if = "Vec::is_empty")]
postfix: Vec<String>,
- description: Option<String>,
- #[serde(deserialize_with = "single_or_array")]
+
+ #[serde(with = "single_or_array")]
+ #[serde(skip_serializing_if = "Vec::is_empty")]
body: Vec<String>,
- #[serde(deserialize_with = "single_or_array")]
+
+ #[serde(with = "single_or_array")]
+ #[serde(skip_serializing_if = "Vec::is_empty")]
requires: Vec<String>,
+
+ #[serde(skip_serializing_if = "Option::is_none")]
+ description: Option<String>,
+
scope: SnippetScopeDef,
}
-fn single_or_array<'de, D>(deserializer: D) -> Result<Vec<String>, D::Error>
-where
- D: serde::Deserializer<'de>,
-{
- struct SingleOrVec;
+mod single_or_array {
+ use serde::{Deserialize, Serialize};
- impl<'de> serde::de::Visitor<'de> for SingleOrVec {
- type Value = Vec<String>;
+ pub(super) fn deserialize<'de, D>(deserializer: D) -> Result<Vec<String>, D::Error>
+ where
+ D: serde::Deserializer<'de>,
+ {
+ struct SingleOrVec;
- fn expecting(&self, formatter: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
- formatter.write_str("string or array of strings")
- }
+ impl<'de> serde::de::Visitor<'de> for SingleOrVec {
+ type Value = Vec<String>;
- fn visit_str<E>(self, value: &str) -> Result<Self::Value, E>
- where
- E: serde::de::Error,
- {
- Ok(vec![value.to_owned()])
- }
+ fn expecting(&self, formatter: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ formatter.write_str("string or array of strings")
+ }
+
+ fn visit_str<E>(self, value: &str) -> Result<Self::Value, E>
+ where
+ E: serde::de::Error,
+ {
+ Ok(vec![value.to_owned()])
+ }
- fn visit_seq<A>(self, seq: A) -> Result<Self::Value, A::Error>
- where
- A: serde::de::SeqAccess<'de>,
- {
- Deserialize::deserialize(serde::de::value::SeqAccessDeserializer::new(seq))
+ fn visit_seq<A>(self, seq: A) -> Result<Self::Value, A::Error>
+ where
+ A: serde::de::SeqAccess<'de>,
+ {
+ Deserialize::deserialize(serde::de::value::SeqAccessDeserializer::new(seq))
+ }
}
+
+ deserializer.deserialize_any(SingleOrVec)
}
- deserializer.deserialize_any(SingleOrVec)
+ pub(super) fn serialize<S>(vec: &[String], serializer: S) -> Result<S::Ok, S::Error>
+ where
+ S: serde::Serializer,
+ {
+ match vec {
+ // [] case is handled by skip_serializing_if
+ [single] => serializer.serialize_str(single),
+ slice => slice.serialize(serializer),
+ }
+ }
}
-#[derive(Deserialize, Debug, Clone)]
+#[derive(Serialize, Deserialize, Debug, Clone)]
#[serde(untagged)]
enum ManifestOrProjectJson {
Manifest(Utf8PathBuf),
ProjectJson(ProjectJsonData),
}
-#[derive(Deserialize, Debug, Clone)]
+#[derive(Serialize, Deserialize, Debug, Clone)]
#[serde(rename_all = "snake_case")]
enum ExprFillDefaultDef {
Todo,
Default,
}
-#[derive(Deserialize, Debug, Clone)]
+#[derive(Serialize, Deserialize, Debug, Clone)]
#[serde(rename_all = "snake_case")]
enum ImportGranularityDef {
Preserve,
@@ -1985,7 +2100,7 @@ enum ImportGranularityDef {
One,
}
-#[derive(Deserialize, Debug, Copy, Clone)]
+#[derive(Serialize, Deserialize, Debug, Copy, Clone)]
#[serde(rename_all = "snake_case")]
enum CallableCompletionDef {
FillArguments,
@@ -1993,54 +2108,54 @@ enum CallableCompletionDef {
None,
}
-#[derive(Deserialize, Debug, Clone)]
+#[derive(Serialize, Deserialize, Debug, Clone)]
#[serde(untagged)]
enum CargoFeaturesDef {
- #[serde(deserialize_with = "de_unit_v::all")]
+ #[serde(with = "unit_v::all")]
All,
Selected(Vec<String>),
}
-#[derive(Deserialize, Debug, Clone)]
+#[derive(Serialize, Deserialize, Debug, Clone)]
#[serde(rename_all = "snake_case")]
-enum InvocationStrategy {
+pub(crate) enum InvocationStrategy {
Once,
PerWorkspace,
}
-#[derive(Deserialize, Debug, Clone)]
-struct CheckOnSaveTargets(#[serde(deserialize_with = "single_or_array")] Vec<String>);
+#[derive(Serialize, Deserialize, Debug, Clone)]
+struct CheckOnSaveTargets(#[serde(with = "single_or_array")] Vec<String>);
-#[derive(Deserialize, Debug, Clone)]
+#[derive(Serialize, Deserialize, Debug, Clone)]
#[serde(rename_all = "snake_case")]
enum InvocationLocation {
Root,
Workspace,
}
-#[derive(Deserialize, Debug, Clone)]
+#[derive(Serialize, Deserialize, Debug, Clone)]
#[serde(untagged)]
enum LifetimeElisionDef {
- #[serde(deserialize_with = "true_or_always")]
+ #[serde(with = "true_or_always")]
Always,
- #[serde(deserialize_with = "false_or_never")]
+ #[serde(with = "false_or_never")]
Never,
- #[serde(deserialize_with = "de_unit_v::skip_trivial")]
+ #[serde(with = "unit_v::skip_trivial")]
SkipTrivial,
}
-#[derive(Deserialize, Debug, Clone)]
+#[derive(Serialize, Deserialize, Debug, Clone)]
#[serde(untagged)]
enum ClosureReturnTypeHintsDef {
- #[serde(deserialize_with = "true_or_always")]
+ #[serde(with = "true_or_always")]
Always,
- #[serde(deserialize_with = "false_or_never")]
+ #[serde(with = "false_or_never")]
Never,
- #[serde(deserialize_with = "de_unit_v::with_block")]
+ #[serde(with = "unit_v::with_block")]
WithBlock,
}
-#[derive(Deserialize, Debug, Clone)]
+#[derive(Serialize, Deserialize, Debug, Clone)]
#[serde(rename_all = "snake_case")]
enum ClosureStyle {
ImplFn,
@@ -2049,40 +2164,40 @@ enum ClosureStyle {
Hide,
}
-#[derive(Deserialize, Debug, Clone)]
+#[derive(Serialize, Deserialize, Debug, Clone)]
#[serde(untagged)]
enum ReborrowHintsDef {
- #[serde(deserialize_with = "true_or_always")]
+ #[serde(with = "true_or_always")]
Always,
- #[serde(deserialize_with = "false_or_never")]
+ #[serde(with = "false_or_never")]
Never,
- #[serde(deserialize_with = "de_unit_v::mutable")]
+ #[serde(with = "unit_v::mutable")]
Mutable,
}
-#[derive(Deserialize, Debug, Clone)]
+#[derive(Serialize, Deserialize, Debug, Clone)]
#[serde(untagged)]
enum AdjustmentHintsDef {
- #[serde(deserialize_with = "true_or_always")]
+ #[serde(with = "true_or_always")]
Always,
- #[serde(deserialize_with = "false_or_never")]
+ #[serde(with = "false_or_never")]
Never,
- #[serde(deserialize_with = "de_unit_v::reborrow")]
+ #[serde(with = "unit_v::reborrow")]
Reborrow,
}
-#[derive(Deserialize, Debug, Clone)]
+#[derive(Serialize, Deserialize, Debug, Clone)]
#[serde(untagged)]
enum DiscriminantHintsDef {
- #[serde(deserialize_with = "true_or_always")]
+ #[serde(with = "true_or_always")]
Always,
- #[serde(deserialize_with = "false_or_never")]
+ #[serde(with = "false_or_never")]
Never,
- #[serde(deserialize_with = "de_unit_v::fieldless")]
+ #[serde(with = "unit_v::fieldless")]
Fieldless,
}
-#[derive(Deserialize, Debug, Clone)]
+#[derive(Serialize, Deserialize, Debug, Clone)]
#[serde(rename_all = "snake_case")]
enum AdjustmentHintsModeDef {
Prefix,
@@ -2091,7 +2206,7 @@ enum AdjustmentHintsModeDef {
PreferPostfix,
}
-#[derive(Deserialize, Debug, Clone)]
+#[derive(Serialize, Deserialize, Debug, Clone)]
#[serde(rename_all = "snake_case")]
enum FilesWatcherDef {
Client,
@@ -2099,7 +2214,7 @@ enum FilesWatcherDef {
Server,
}
-#[derive(Deserialize, Debug, Clone)]
+#[derive(Serialize, Deserialize, Debug, Clone)]
#[serde(rename_all = "snake_case")]
enum ImportPrefixDef {
Plain,
@@ -2109,40 +2224,51 @@ enum ImportPrefixDef {
ByCrate,
}
-#[derive(Deserialize, Debug, Clone)]
+#[derive(Serialize, Deserialize, Debug, Clone)]
#[serde(rename_all = "snake_case")]
enum WorkspaceSymbolSearchScopeDef {
Workspace,
WorkspaceAndDependencies,
}
-#[derive(Deserialize, Debug, Clone)]
+#[derive(Serialize, Deserialize, Debug, Clone)]
#[serde(rename_all = "snake_case")]
enum SignatureDetail {
Full,
Parameters,
}
-#[derive(Deserialize, Debug, Clone)]
+#[derive(Serialize, Deserialize, Debug, Clone)]
#[serde(rename_all = "snake_case")]
enum WorkspaceSymbolSearchKindDef {
OnlyTypes,
AllSymbols,
}
-#[derive(Deserialize, Debug, Copy, Clone)]
+#[derive(Serialize, Deserialize, Debug, Copy, Clone, PartialEq)]
#[serde(rename_all = "snake_case")]
#[serde(untagged)]
-pub enum MemoryLayoutHoverRenderKindDef {
- #[serde(deserialize_with = "de_unit_v::decimal")]
+enum MemoryLayoutHoverRenderKindDef {
+ #[serde(with = "unit_v::decimal")]
Decimal,
- #[serde(deserialize_with = "de_unit_v::hexadecimal")]
+ #[serde(with = "unit_v::hexadecimal")]
Hexadecimal,
- #[serde(deserialize_with = "de_unit_v::both")]
+ #[serde(with = "unit_v::both")]
Both,
}
-#[derive(Deserialize, Debug, Clone, PartialEq)]
+#[test]
+fn untagged_option_hover_render_kind() {
+ let hex = MemoryLayoutHoverRenderKindDef::Hexadecimal;
+
+ let ser = serde_json::to_string(&Some(hex)).unwrap();
+ assert_eq!(&ser, "\"hexadecimal\"");
+
+ let opt: Option<_> = serde_json::from_str("\"hexadecimal\"").unwrap();
+ assert_eq!(opt, Some(hex));
+}
+
+#[derive(Serialize, Deserialize, Debug, Clone, PartialEq)]
#[serde(rename_all = "snake_case")]
#[serde(untagged)]
pub enum TargetDirectory {
@@ -2150,68 +2276,326 @@ pub enum TargetDirectory {
Directory(Utf8PathBuf),
}
+macro_rules! _default_val {
+ (@verbatim: $s:literal, $ty:ty) => {{
+ let default_: $ty = serde_json::from_str(&$s).unwrap();
+ default_
+ }};
+ ($default:expr, $ty:ty) => {{
+ let default_: $ty = $default;
+ default_
+ }};
+}
+use _default_val as default_val;
+
+macro_rules! _default_str {
+ (@verbatim: $s:literal, $_ty:ty) => {
+ $s.to_owned()
+ };
+ ($default:expr, $ty:ty) => {{
+ let val = default_val!($default, $ty);
+ serde_json::to_string_pretty(&val).unwrap()
+ }};
+}
+use _default_str as default_str;
+
+macro_rules! _impl_for_config_data {
+ (local, $(
+ $(#[doc=$doc:literal])*
+ $vis:vis $field:ident : $ty:ty = $default:expr,
+ )*
+ ) => {
+ impl Config {
+ $(
+ $($doc)*
+ #[allow(non_snake_case)]
+ $vis fn $field(&self, _source_root: Option<SourceRootId>) -> &$ty {
+ if let Some(v) = self.client_config.local.$field.as_ref() {
+ return &v;
+ }
+
+ if let Some(v) = self.user_config.local.$field.as_ref() {
+ return &v;
+ }
+
+ &self.default_config.local.$field
+ }
+ )*
+ }
+ };
+ (global, $(
+ $(#[doc=$doc:literal])*
+ $vis:vis $field:ident : $ty:ty = $default:expr,
+ )*
+ ) => {
+ impl Config {
+ $(
+ $($doc)*
+ #[allow(non_snake_case)]
+ $vis fn $field(&self) -> &$ty {
+ if let Some(v) = self.client_config.global.$field.as_ref() {
+ return &v;
+ }
+
+ if let Some(v) = self.user_config.global.$field.as_ref() {
+ return &v;
+ }
+
+ &self.default_config.global.$field
+ }
+ )*
+ }
+ };
+ (client, $(
+ $(#[doc=$doc:literal])*
+ $vis:vis $field:ident : $ty:ty = $default:expr,
+ )*
+ ) => {
+ impl Config {
+ $(
+ $($doc)*
+ #[allow(non_snake_case)]
+ $vis fn $field(&self) -> &$ty {
+ if let Some(v) = self.client_config.global.$field.as_ref() {
+ return &v;
+ }
+
+ &self.default_config.client.$field
+ }
+ )*
+ }
+ };
+}
+use _impl_for_config_data as impl_for_config_data;
+
macro_rules! _config_data {
- (struct $name:ident {
+ // modname is for the tests
+ ($(#[doc=$dox:literal])* $modname:ident: struct $name:ident <- $input:ident -> {
$(
$(#[doc=$doc:literal])*
- $field:ident $(| $alias:ident)*: $ty:ty = $default:expr,
+ $vis:vis $field:ident $(| $alias:ident)*: $ty:ty = $(@$marker:ident: )? $default:expr,
)*
}) => {
+ /// Default config values for this grouping.
#[allow(non_snake_case)]
- #[derive(Debug, Clone)]
+ #[derive(Debug, Clone, Serialize)]
struct $name { $($field: $ty,)* }
- impl $name {
- fn from_json(mut json: serde_json::Value, error_sink: &mut Vec<(String, serde_json::Error)>) -> $name {
+
+ impl_for_config_data!{
+ $modname,
+ $(
+ $vis $field : $ty = $default,
+ )*
+ }
+
+ /// All fields `Option<T>`, `None` representing fields not set in a particular JSON/TOML blob.
+ #[allow(non_snake_case)]
+ #[derive(Clone, Serialize, Default)]
+ struct $input { $(
+ #[serde(skip_serializing_if = "Option::is_none")]
+ $field: Option<$ty>,
+ )* }
+
+ impl std::fmt::Debug for $input {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ let mut s = f.debug_struct(stringify!($input));
+ $(
+ if let Some(val) = self.$field.as_ref() {
+ s.field(stringify!($field), val);
+ }
+ )*
+ s.finish()
+ }
+ }
+
+ impl Default for $name {
+ fn default() -> Self {
$name {$(
+ $field: default_val!($(@$marker:)? $default, $ty),
+ )*}
+ }
+ }
+
+ #[allow(unused)]
+ impl $name {
+ /// Applies overrides from some more local config blob, to self.
+ fn apply_input(&mut self, input: $input) {
+ $(
+ if let Some(value) = input.$field {
+ self.$field = value;
+ }
+ )*
+ }
+
+ fn clone_with_overrides(&self, input: $input) -> Self {
+ Self {$(
+ $field: input.$field.unwrap_or_else(|| self.$field.clone()),
+ )*}
+ }
+ }
+
+ #[allow(unused, clippy::ptr_arg)]
+ impl $input {
+ fn from_json(json: &mut serde_json::Value, error_sink: &mut Vec<(String, serde_json::Error)>) -> Self {
+ Self {$(
$field: get_field(
- &mut json,
+ json,
error_sink,
stringify!($field),
None$(.or(Some(stringify!($alias))))*,
- $default,
),
)*}
}
- fn json_schema() -> serde_json::Value {
- schema(&[
- $({
- let field = stringify!($field);
- let ty = stringify!($ty);
-
- (field, ty, &[$($doc),*], $default)
- },)*
- ])
+ fn from_toml(toml: &mut toml::Table, error_sink: &mut Vec<(String, toml::de::Error)>) -> Self {
+ Self {$(
+ $field: get_field_toml::<$ty>(
+ toml,
+ error_sink,
+ stringify!($field),
+ None$(.or(Some(stringify!($alias))))*,
+ ),
+ )*}
}
- #[cfg(test)]
- fn manual() -> String {
- manual(&[
+ fn schema_fields(sink: &mut Vec<SchemaField>) {
+ sink.extend_from_slice(&[
$({
let field = stringify!($field);
let ty = stringify!($ty);
+ let default = default_str!($(@$marker:)? $default, $ty);
- (field, ty, &[$($doc),*], $default)
+ (field, ty, &[$($doc),*], default)
},)*
])
}
}
- #[test]
- fn fields_are_sorted() {
- [$(stringify!($field)),*].windows(2).for_each(|w| assert!(w[0] <= w[1], "{} <= {} does not hold", w[0], w[1]));
+ mod $modname {
+ #[test]
+ fn fields_are_sorted() {
+ let field_names: &'static [&'static str] = &[$(stringify!($field)),*];
+ field_names.windows(2).for_each(|w| assert!(w[0] <= w[1], "{} <= {} does not hold", w[0], w[1]));
+ }
}
};
}
use _config_data as config_data;
+#[derive(Default, Debug, Clone)]
+struct DefaultConfigData {
+ global: GlobalDefaultConfigData,
+ local: LocalDefaultConfigData,
+ #[allow(dead_code)]
+ client: ClientDefaultConfigData,
+}
+
+/// All of the config levels, all fields `Option<T>`, to describe fields that are actually set by
+/// some rust-analyzer.toml file or JSON blob. An empty rust-analyzer.toml corresponds to
+/// all fields being None.
+#[derive(Debug, Clone, Default)]
+struct FullConfigInput {
+ global: GlobalConfigInput,
+ local: LocalConfigInput,
+ #[allow(dead_code)]
+ client: ClientConfigInput,
+}
+
+impl FullConfigInput {
+ fn from_json(
+ mut json: serde_json::Value,
+ error_sink: &mut Vec<(String, serde_json::Error)>,
+ ) -> FullConfigInput {
+ FullConfigInput {
+ global: GlobalConfigInput::from_json(&mut json, error_sink),
+ local: LocalConfigInput::from_json(&mut json, error_sink),
+ client: ClientConfigInput::from_json(&mut json, error_sink),
+ }
+ }
+
+ fn schema_fields() -> Vec<SchemaField> {
+ let mut fields = Vec::new();
+ GlobalConfigInput::schema_fields(&mut fields);
+ LocalConfigInput::schema_fields(&mut fields);
+ ClientConfigInput::schema_fields(&mut fields);
+ // HACK: sort the fields, so the diffs on the generated docs/schema are smaller
+ fields.sort_by_key(|&(x, ..)| x);
+ fields
+ }
+
+ fn json_schema() -> serde_json::Value {
+ schema(&Self::schema_fields())
+ }
+
+ #[cfg(test)]
+ fn manual() -> String {
+ manual(&Self::schema_fields())
+ }
+}
+
+/// All of the config levels, all fields `Option<T>`, to describe fields that are actually set by
+/// some rust-analyzer.toml file or JSON blob. An empty rust-analyzer.toml corresponds to
+/// all fields being None.
+#[derive(Debug, Clone, Default)]
+struct GlobalLocalConfigInput {
+ global: GlobalConfigInput,
+ local: LocalConfigInput,
+}
+
+impl GlobalLocalConfigInput {
+ #[allow(dead_code)]
+ fn from_toml(
+ mut toml: toml::Table,
+ error_sink: &mut Vec<(String, toml::de::Error)>,
+ ) -> GlobalLocalConfigInput {
+ GlobalLocalConfigInput {
+ global: GlobalConfigInput::from_toml(&mut toml, error_sink),
+ local: LocalConfigInput::from_toml(&mut toml, error_sink),
+ }
+ }
+}
+
+fn get_field_toml<T: DeserializeOwned>(
+ val: &toml::Table,
+ error_sink: &mut Vec<(String, toml::de::Error)>,
+ field: &'static str,
+ alias: Option<&'static str>,
+) -> Option<T> {
+ alias
+ .into_iter()
+ .chain(iter::once(field))
+ .filter_map(move |field| {
+ let subkeys = field.split('_');
+ let mut v = val;
+ for subkey in subkeys {
+ if let Some(val) = v.get(subkey) {
+ if let Some(map) = val.as_table() {
+ v = map;
+ } else {
+ return Some(toml::Value::try_into(val.clone()).map_err(|e| (e, v)));
+ }
+ } else {
+ return None;
+ }
+ }
+ None
+ })
+ .find(Result::is_ok)
+ .and_then(|res| match res {
+ Ok(it) => Some(it),
+ Err((e, pointer)) => {
+ error_sink.push((pointer.to_string(), e));
+ None
+ }
+ })
+}
+
fn get_field<T: DeserializeOwned>(
json: &mut serde_json::Value,
error_sink: &mut Vec<(String, serde_json::Error)>,
field: &'static str,
alias: Option<&'static str>,
- default: &str,
-) -> T {
+) -> Option<T> {
// XXX: check alias first, to work around the VS Code where it pre-fills the
// defaults instead of sending an empty object.
alias
@@ -2232,12 +2616,11 @@ fn get_field<T: DeserializeOwned>(
None
}
})
- .unwrap_or_else(|| {
- serde_json::from_str(default).unwrap_or_else(|e| panic!("{e} on: `{default}`"))
- })
}
-fn schema(fields: &[(&'static str, &'static str, &[&str], &str)]) -> serde_json::Value {
+type SchemaField = (&'static str, &'static str, &'static [&'static str], String);
+
+fn schema(fields: &[SchemaField]) -> serde_json::Value {
let map = fields
.iter()
.map(|(field, ty, doc, default)| {
@@ -2288,7 +2671,7 @@ fn field_props(field: &str, ty: &str, doc: &[&str], default: &str) -> serde_json
"FxHashMap<Box<str>, Box<[Box<str>]>>" => set! {
"type": "object",
},
- "FxHashMap<String, SnippetDef>" => set! {
+ "IndexMap<String, SnippetDef>" => set! {
"type": "object",
},
"FxHashMap<String, String>" => set! {
@@ -2297,6 +2680,9 @@ fn field_props(field: &str, ty: &str, doc: &[&str], default: &str) -> serde_json
"FxHashMap<Box<str>, usize>" => set! {
"type": "object",
},
+ "FxHashMap<String, Option<String>>" => set! {
+ "type": "object",
+ },
"Option<usize>" => set! {
"type": ["null", "integer"],
"minimum": 0,
@@ -2599,7 +2985,7 @@ fn field_props(field: &str, ty: &str, doc: &[&str], default: &str) -> serde_json
}
#[cfg(test)]
-fn manual(fields: &[(&'static str, &'static str, &[&str], &str)]) -> String {
+fn manual(fields: &[SchemaField]) -> String {
fields.iter().fold(String::new(), |mut acc, (field, _ty, doc, default)| {
let name = format!("rust-analyzer.{}", field.replace('_', "."));
let doc = doc_comment_to_string(doc);
@@ -2694,7 +3080,7 @@ mod tests {
#[test]
fn generate_config_documentation() {
let docs_path = project_root().join("docs/user/generated_config.adoc");
- let expected = ConfigData::manual();
+ let expected = FullConfigInput::manual();
ensure_file_contents(&docs_path, &expected);
}
@@ -2766,9 +3152,9 @@ mod tests {
"rust": { "analyzerTargetDir": null }
}))
.unwrap();
- assert_eq!(config.data.cargo_targetDir, None);
+ assert_eq!(config.cargo_targetDir(), &None);
assert!(
- matches!(config.flycheck(), FlycheckConfig::CargoCommand { target_dir, .. } if target_dir.is_none())
+ matches!(config.flycheck(), FlycheckConfig::CargoCommand { options, .. } if options.target_dir.is_none())
);
}
@@ -2785,9 +3171,9 @@ mod tests {
"rust": { "analyzerTargetDir": true }
}))
.unwrap();
- assert_eq!(config.data.cargo_targetDir, Some(TargetDirectory::UseSubdirectory(true)));
+ assert_eq!(config.cargo_targetDir(), &Some(TargetDirectory::UseSubdirectory(true)));
assert!(
- matches!(config.flycheck(), FlycheckConfig::CargoCommand { target_dir, .. } if target_dir == Some(Utf8PathBuf::from("target/rust-analyzer")))
+ matches!(config.flycheck(), FlycheckConfig::CargoCommand { options, .. } if options.target_dir == Some(Utf8PathBuf::from("target/rust-analyzer")))
);
}
@@ -2805,11 +3191,11 @@ mod tests {
}))
.unwrap();
assert_eq!(
- config.data.cargo_targetDir,
- Some(TargetDirectory::Directory(Utf8PathBuf::from("other_folder")))
+ config.cargo_targetDir(),
+ &Some(TargetDirectory::Directory(Utf8PathBuf::from("other_folder")))
);
assert!(
- matches!(config.flycheck(), FlycheckConfig::CargoCommand { target_dir, .. } if target_dir == Some(Utf8PathBuf::from("other_folder")))
+ matches!(config.flycheck(), FlycheckConfig::CargoCommand { options, .. } if options.target_dir == Some(Utf8PathBuf::from("other_folder")))
);
}
}
diff --git a/crates/rust-analyzer/src/diagnostics.rs b/crates/rust-analyzer/src/diagnostics.rs
index a0a53f545c..65a9a49149 100644
--- a/crates/rust-analyzer/src/diagnostics.rs
+++ b/crates/rust-analyzer/src/diagnostics.rs
@@ -154,10 +154,12 @@ pub(crate) fn fetch_native_diagnostics(
.copied()
.filter_map(|file_id| {
let line_index = snapshot.file_line_index(file_id).ok()?;
+ let source_root = snapshot.analysis.source_root(file_id).ok()?;
+
let diagnostics = snapshot
.analysis
.diagnostics(
- &snapshot.config.diagnostics(),
+ &snapshot.config.diagnostics(Some(source_root)),
ide::AssistResolveStrategy::None,
file_id,
)
diff --git a/crates/rust-analyzer/src/diagnostics/to_proto.rs b/crates/rust-analyzer/src/diagnostics/to_proto.rs
index 7c4deac93f..3d3f944019 100644
--- a/crates/rust-analyzer/src/diagnostics/to_proto.rs
+++ b/crates/rust-analyzer/src/diagnostics/to_proto.rs
@@ -68,8 +68,13 @@ fn location(
let range = {
let position_encoding = snap.config.position_encoding();
lsp_types::Range::new(
- position(&position_encoding, span, span.line_start, span.column_start),
- position(&position_encoding, span, span.line_end, span.column_end),
+ position(
+ &position_encoding,
+ span,
+ span.line_start,
+ span.column_start.saturating_sub(1),
+ ),
+ position(&position_encoding, span, span.line_end, span.column_end.saturating_sub(1)),
)
};
lsp_types::Location::new(uri, range)
@@ -78,10 +83,10 @@ fn location(
fn position(
position_encoding: &PositionEncoding,
span: &DiagnosticSpan,
- line_offset: usize,
+ line_number: usize,
column_offset_utf32: usize,
) -> lsp_types::Position {
- let line_index = line_offset - span.line_start;
+ let line_index = line_number - span.line_start;
let column_offset_encoded = match span.text.get(line_index) {
// Fast path.
@@ -104,8 +109,8 @@ fn position(
};
lsp_types::Position {
- line: (line_offset as u32).saturating_sub(1),
- character: (column_offset_encoded as u32).saturating_sub(1),
+ line: (line_number as u32).saturating_sub(1),
+ character: column_offset_encoded as u32,
}
}
diff --git a/crates/rust-analyzer/src/global_state.rs b/crates/rust-analyzer/src/global_state.rs
index 8516ffa0df..e9bca19af6 100644
--- a/crates/rust-analyzer/src/global_state.rs
+++ b/crates/rust-analyzer/src/global_state.rs
@@ -72,7 +72,6 @@ pub(crate) struct GlobalState {
// status
pub(crate) shutdown_requested: bool,
- pub(crate) send_hint_refresh_query: bool,
pub(crate) last_reported_status: Option<lsp_ext::ServerStatusParams>,
// proc macros
@@ -86,7 +85,10 @@ pub(crate) struct GlobalState {
pub(crate) last_flycheck_error: Option<String>,
// Test explorer
- pub(crate) test_run_session: Option<flycheck::CargoTestHandle>,
+ pub(crate) test_run_session: Option<Vec<flycheck::CargoTestHandle>>,
+ pub(crate) test_run_sender: Sender<flycheck::CargoTestMessage>,
+ pub(crate) test_run_receiver: Receiver<flycheck::CargoTestMessage>,
+ pub(crate) test_run_remaining_jobs: usize,
// VFS
pub(crate) loader: Handle<Box<dyn vfs::loader::Handle>, Receiver<vfs::loader::Message>>,
@@ -123,6 +125,7 @@ pub(crate) struct GlobalState {
/// to invalidate any salsa caches.
pub(crate) workspaces: Arc<Vec<ProjectWorkspace>>,
pub(crate) crate_graph_file_dependencies: FxHashSet<vfs::VfsPath>,
+ pub(crate) detached_files: FxHashSet<vfs::AbsPathBuf>,
// op queues
pub(crate) fetch_workspaces_queue:
@@ -187,10 +190,11 @@ impl GlobalState {
};
let mut analysis_host = AnalysisHost::new(config.lru_parse_query_capacity());
- if let Some(capacities) = config.lru_query_capacities() {
+ if let Some(capacities) = config.lru_query_capacities_config() {
analysis_host.update_lru_capacities(capacities);
}
let (flycheck_sender, flycheck_receiver) = unbounded();
+ let (test_run_sender, test_run_receiver) = unbounded();
let mut this = GlobalState {
sender,
req_queue: ReqQueue::default(),
@@ -203,7 +207,6 @@ impl GlobalState {
mem_docs: MemDocs::default(),
semantic_tokens_cache: Arc::new(Default::default()),
shutdown_requested: false,
- send_hint_refresh_query: false,
last_reported_status: None,
source_root_config: SourceRootConfig::default(),
local_roots_parent_map: FxHashMap::default(),
@@ -219,6 +222,9 @@ impl GlobalState {
last_flycheck_error: None,
test_run_session: None,
+ test_run_sender,
+ test_run_receiver,
+ test_run_remaining_jobs: 0,
vfs: Arc::new(RwLock::new((vfs::Vfs::default(), IntMap::default()))),
vfs_config_version: 0,
@@ -228,6 +234,7 @@ impl GlobalState {
workspaces: Arc::from(Vec::new()),
crate_graph_file_dependencies: FxHashSet::default(),
+ detached_files: FxHashSet::default(),
fetch_workspaces_queue: OpQueue::default(),
fetch_build_data_queue: OpQueue::default(),
fetch_proc_macros_queue: OpQueue::default(),
@@ -514,7 +521,7 @@ impl GlobalStateSnapshot {
cargo.target_by_root(path).map(|it| (cargo, it))
}
ProjectWorkspace::Json { .. } => None,
- ProjectWorkspace::DetachedFiles { .. } => None,
+ ProjectWorkspace::DetachedFile { .. } => None,
})
}
diff --git a/crates/rust-analyzer/src/handlers/notification.rs b/crates/rust-analyzer/src/handlers/notification.rs
index b5c4a4f435..4b8c3d06ce 100644
--- a/crates/rust-analyzer/src/handlers/notification.rs
+++ b/crates/rust-analyzer/src/handlers/notification.rs
@@ -105,7 +105,7 @@ pub(crate) fn handle_did_change_text_document(
)
.into_bytes();
if *data != new_contents {
- *data = new_contents.clone();
+ data.clone_from(&new_contents);
state.vfs.write().0.set_file_contents(path, Some(new_contents));
}
}
@@ -154,6 +154,10 @@ pub(crate) fn handle_did_save_text_document(
state
.fetch_workspaces_queue
.request_op(format!("workspace vfs file change saved {abs_path}"), false);
+ } else if state.detached_files.contains(abs_path) {
+ state
+ .fetch_workspaces_queue
+ .request_op(format!("detached file saved {abs_path}"), false);
}
}
@@ -296,15 +300,15 @@ fn run_flycheck(state: &mut GlobalState, vfs_path: VfsPath) -> bool {
})
}
project_model::ProjectWorkspace::Json { project, .. } => {
- if !project
- .crates()
- .any(|(c, _)| crate_ids.iter().any(|&crate_id| crate_id == c))
- {
+ if !project.crates().any(|(_, krate)| {
+ crate_root_paths.contains(&krate.root_module.as_path())
+ }) {
return None;
}
None
}
- project_model::ProjectWorkspace::DetachedFiles { .. } => return None,
+ // FIXME
+ project_model::ProjectWorkspace::DetachedFile { .. } => return None,
};
Some((idx, package))
});
diff --git a/crates/rust-analyzer/src/handlers/request.rs b/crates/rust-analyzer/src/handlers/request.rs
index 77692ed3ae..cf97d7d9d2 100644
--- a/crates/rust-analyzer/src/handlers/request.rs
+++ b/crates/rust-analyzer/src/handlers/request.rs
@@ -101,7 +101,7 @@ pub(crate) fn handle_analyzer_status(
"Workspace root folders: {:?}",
snap.workspaces
.iter()
- .flat_map(|ws| ws.workspace_definition_path())
+ .map(|ws| ws.workspace_definition_path())
.collect::<Vec<&AbsPath>>()
);
}
@@ -219,14 +219,28 @@ pub(crate) fn handle_run_test(
.unwrap_or_default(),
None => "".to_owned(),
};
- let handle = if lca.is_empty() {
- flycheck::CargoTestHandle::new(None)
+ let test_path = if lca.is_empty() {
+ None
} else if let Some((_, path)) = lca.split_once("::") {
- flycheck::CargoTestHandle::new(Some(path))
+ Some(path)
} else {
- flycheck::CargoTestHandle::new(None)
+ None
};
- state.test_run_session = Some(handle?);
+ let mut handles = vec![];
+ for ws in &*state.workspaces {
+ if let ProjectWorkspace::Cargo { cargo, .. } = ws {
+ let handle = flycheck::CargoTestHandle::new(
+ test_path,
+ state.config.cargo_test_options(),
+ cargo.workspace_root(),
+ state.test_run_sender.clone(),
+ )?;
+ handles.push(handle);
+ }
+ }
+ // Each process send finished signal twice, once for stdout and once for stderr
+ state.test_run_remaining_jobs = 2 * handles.len();
+ state.test_run_session = Some(handles);
Ok(())
}
@@ -355,8 +369,9 @@ pub(crate) fn handle_join_lines(
) -> anyhow::Result<Vec<lsp_types::TextEdit>> {
let _p = tracing::span!(tracing::Level::INFO, "handle_join_lines").entered();
- let config = snap.config.join_lines();
let file_id = from_proto::file_id(&snap, &params.text_document.uri)?;
+ let source_root = snap.analysis.source_root(file_id)?;
+ let config = snap.config.join_lines(Some(source_root));
let line_index = snap.file_line_index(file_id)?;
let mut res = TextEdit::default();
@@ -923,7 +938,8 @@ pub(crate) fn handle_completion(
let completion_trigger_character =
params.context.and_then(|ctx| ctx.trigger_character).and_then(|s| s.chars().next());
- let completion_config = &snap.config.completion();
+ let source_root = snap.analysis.source_root(position.file_id)?;
+ let completion_config = &snap.config.completion(Some(source_root));
let items = match snap.analysis.completions(
completion_config,
position,
@@ -964,11 +980,12 @@ pub(crate) fn handle_completion_resolve(
let file_id = from_proto::file_id(&snap, &resolve_data.position.text_document.uri)?;
let line_index = snap.file_line_index(file_id)?;
let offset = from_proto::offset(&line_index, resolve_data.position.position)?;
+ let source_root = snap.analysis.source_root(file_id)?;
let additional_edits = snap
.analysis
.resolve_completion_edits(
- &snap.config.completion(),
+ &snap.config.completion(Some(source_root)),
FilePosition { file_id, offset },
resolve_data
.imports
@@ -1038,16 +1055,17 @@ pub(crate) fn handle_hover(
PositionOrRange::Position(position) => Range::new(position, position),
PositionOrRange::Range(range) => range,
};
-
let file_range = from_proto::file_range(&snap, &params.text_document, range)?;
- let info = match snap.analysis.hover(&snap.config.hover(), file_range)? {
+
+ let hover = snap.config.hover();
+ let info = match snap.analysis.hover(&hover, file_range)? {
None => return Ok(None),
Some(info) => info,
};
let line_index = snap.file_line_index(file_range.file_id)?;
let range = to_proto::range(&line_index, info.range);
- let markup_kind = snap.config.hover().format;
+ let markup_kind = hover.format;
let hover = lsp_ext::Hover {
hover: lsp_types::Hover {
contents: HoverContents::Markup(to_proto::markup_content(
@@ -1146,8 +1164,8 @@ pub(crate) fn handle_references(
.flat_map(|(file_id, refs)| {
refs.into_iter()
.filter(|&(_, category)| {
- (!exclude_imports || category != Some(ReferenceCategory::Import))
- && (!exclude_tests || category != Some(ReferenceCategory::Test))
+ (!exclude_imports || !category.contains(ReferenceCategory::IMPORT))
+ && (!exclude_tests || !category.contains(ReferenceCategory::TEST))
})
.map(move |(range, _)| FileRange { file_id, range })
})
@@ -1191,11 +1209,12 @@ pub(crate) fn handle_code_action(
return Ok(None);
}
- let line_index =
- snap.file_line_index(from_proto::file_id(&snap, &params.text_document.uri)?)?;
+ let file_id = from_proto::file_id(&snap, &params.text_document.uri)?;
+ let line_index = snap.file_line_index(file_id)?;
let frange = from_proto::file_range(&snap, &params.text_document, params.range)?;
+ let source_root = snap.analysis.source_root(file_id)?;
- let mut assists_config = snap.config.assist();
+ let mut assists_config = snap.config.assist(Some(source_root));
assists_config.allowed = params
.context
.only
@@ -1212,7 +1231,7 @@ pub(crate) fn handle_code_action(
};
let assists = snap.analysis.assists_with_fixes(
&assists_config,
- &snap.config.diagnostics(),
+ &snap.config.diagnostics(Some(source_root)),
resolve,
frange,
)?;
@@ -1266,8 +1285,9 @@ pub(crate) fn handle_code_action_resolve(
let line_index = snap.file_line_index(file_id)?;
let range = from_proto::text_range(&line_index, params.code_action_params.range)?;
let frange = FileRange { file_id, range };
+ let source_root = snap.analysis.source_root(file_id)?;
- let mut assists_config = snap.config.assist();
+ let mut assists_config = snap.config.assist(Some(source_root));
assists_config.allowed = params
.code_action_params
.context
@@ -1290,7 +1310,7 @@ pub(crate) fn handle_code_action_resolve(
let assists = snap.analysis.assists_with_fixes(
&assists_config,
- &snap.config.diagnostics(),
+ &snap.config.diagnostics(Some(source_root)),
AssistResolveStrategy::Single(assist_resolve),
frange,
)?;
@@ -1419,8 +1439,12 @@ pub(crate) fn handle_document_highlight(
let _p = tracing::span!(tracing::Level::INFO, "handle_document_highlight").entered();
let position = from_proto::file_position(&snap, params.text_document_position_params)?;
let line_index = snap.file_line_index(position.file_id)?;
+ let source_root = snap.analysis.source_root(position.file_id)?;
- let refs = match snap.analysis.highlight_related(snap.config.highlight_related(), position)? {
+ let refs = match snap
+ .analysis
+ .highlight_related(snap.config.highlight_related(Some(source_root)), position)?
+ {
None => return Ok(None),
Some(refs) => refs,
};
@@ -1428,7 +1452,7 @@ pub(crate) fn handle_document_highlight(
.into_iter()
.map(|ide::HighlightedRange { range, category }| lsp_types::DocumentHighlight {
range: to_proto::range(&line_index, range),
- kind: category.and_then(to_proto::document_highlight_kind),
+ kind: to_proto::document_highlight_kind(category),
})
.collect();
Ok(Some(res))
@@ -1466,7 +1490,9 @@ pub(crate) fn handle_inlay_hints(
params.range,
)?;
let line_index = snap.file_line_index(file_id)?;
- let inlay_hints_config = snap.config.inlay_hints();
+ let source_root = snap.analysis.source_root(file_id)?;
+
+ let inlay_hints_config = snap.config.inlay_hints(Some(source_root));
Ok(Some(
snap.analysis
.inlay_hints(&inlay_hints_config, file_id, Some(range))?
@@ -1490,29 +1516,33 @@ pub(crate) fn handle_inlay_hints_resolve(
) -> anyhow::Result<InlayHint> {
let _p = tracing::span!(tracing::Level::INFO, "handle_inlay_hints_resolve").entered();
- let data = match original_hint.data.take() {
- Some(it) => it,
- None => return Ok(original_hint),
- };
-
+ let Some(data) = original_hint.data.take() else { return Ok(original_hint) };
let resolve_data: lsp_ext::InlayHintResolveData = serde_json::from_value(data)?;
+ let Some(hash) = resolve_data.hash.parse().ok() else { return Ok(original_hint) };
let file_id = FileId::from_raw(resolve_data.file_id);
anyhow::ensure!(snap.file_exists(file_id), "Invalid LSP resolve data");
let line_index = snap.file_line_index(file_id)?;
let hint_position = from_proto::offset(&line_index, original_hint.position)?;
- let mut forced_resolve_inlay_hints_config = snap.config.inlay_hints();
+ let source_root = snap.analysis.source_root(file_id)?;
+
+ let mut forced_resolve_inlay_hints_config = snap.config.inlay_hints(Some(source_root));
forced_resolve_inlay_hints_config.fields_to_resolve = InlayFieldsToResolve::empty();
let resolve_hints = snap.analysis.inlay_hints_resolve(
&forced_resolve_inlay_hints_config,
file_id,
hint_position,
- resolve_data.hash,
+ hash,
+ |hint| {
+ std::hash::BuildHasher::hash_one(
+ &std::hash::BuildHasherDefault::<ide_db::FxHasher>::default(),
+ hint,
+ )
+ },
)?;
- let mut resolved_hints = resolve_hints
- .into_iter()
- .filter_map(|it| {
+ Ok(resolve_hints
+ .and_then(|it| {
to_proto::inlay_hint(
&snap,
&forced_resolve_inlay_hints_config.fields_to_resolve,
@@ -1523,13 +1553,8 @@ pub(crate) fn handle_inlay_hints_resolve(
.ok()
})
.filter(|hint| hint.position == original_hint.position)
- .filter(|hint| hint.kind == original_hint.kind);
- if let Some(resolved_hint) = resolved_hints.next() {
- if resolved_hints.next().is_none() {
- return Ok(resolved_hint);
- }
- }
- Ok(original_hint)
+ .filter(|hint| hint.kind == original_hint.kind)
+ .unwrap_or(original_hint))
}
pub(crate) fn handle_call_hierarchy_prepare(
@@ -1633,8 +1658,9 @@ pub(crate) fn handle_semantic_tokens_full(
let file_id = from_proto::file_id(&snap, &params.text_document.uri)?;
let text = snap.analysis.file_text(file_id)?;
let line_index = snap.file_line_index(file_id)?;
+ let source_root = snap.analysis.source_root(file_id)?;
- let mut highlight_config = snap.config.highlighting_config();
+ let mut highlight_config = snap.config.highlighting_config(Some(source_root));
// Avoid flashing a bunch of unresolved references when the proc-macro servers haven't been spawned yet.
highlight_config.syntactic_name_ref_highlighting =
snap.workspaces.is_empty() || !snap.proc_macros_loaded;
@@ -1645,7 +1671,7 @@ pub(crate) fn handle_semantic_tokens_full(
&line_index,
highlights,
snap.config.semantics_tokens_augments_syntax_tokens(),
- snap.config.highlighting_non_standard_tokens(),
+ snap.config.highlighting_non_standard_tokens(Some(source_root)),
);
// Unconditionally cache the tokens
@@ -1663,8 +1689,9 @@ pub(crate) fn handle_semantic_tokens_full_delta(
let file_id = from_proto::file_id(&snap, &params.text_document.uri)?;
let text = snap.analysis.file_text(file_id)?;
let line_index = snap.file_line_index(file_id)?;
+ let source_root = snap.analysis.source_root(file_id)?;
- let mut highlight_config = snap.config.highlighting_config();
+ let mut highlight_config = snap.config.highlighting_config(Some(source_root));
// Avoid flashing a bunch of unresolved references when the proc-macro servers haven't been spawned yet.
highlight_config.syntactic_name_ref_highlighting =
snap.workspaces.is_empty() || !snap.proc_macros_loaded;
@@ -1675,7 +1702,7 @@ pub(crate) fn handle_semantic_tokens_full_delta(
&line_index,
highlights,
snap.config.semantics_tokens_augments_syntax_tokens(),
- snap.config.highlighting_non_standard_tokens(),
+ snap.config.highlighting_non_standard_tokens(Some(source_root)),
);
let cached_tokens = snap.semantic_tokens_cache.lock().remove(&params.text_document.uri);
@@ -1706,8 +1733,9 @@ pub(crate) fn handle_semantic_tokens_range(
let frange = from_proto::file_range(&snap, &params.text_document, params.range)?;
let text = snap.analysis.file_text(frange.file_id)?;
let line_index = snap.file_line_index(frange.file_id)?;
+ let source_root = snap.analysis.source_root(frange.file_id)?;
- let mut highlight_config = snap.config.highlighting_config();
+ let mut highlight_config = snap.config.highlighting_config(Some(source_root));
// Avoid flashing a bunch of unresolved references when the proc-macro servers haven't been spawned yet.
highlight_config.syntactic_name_ref_highlighting =
snap.workspaces.is_empty() || !snap.proc_macros_loaded;
@@ -1718,7 +1746,7 @@ pub(crate) fn handle_semantic_tokens_range(
&line_index,
highlights,
snap.config.semantics_tokens_augments_syntax_tokens(),
- snap.config.highlighting_non_standard_tokens(),
+ snap.config.highlighting_non_standard_tokens(Some(source_root)),
);
Ok(Some(semantic_tokens.into()))
}
@@ -1733,7 +1761,9 @@ pub(crate) fn handle_open_docs(
let ws_and_sysroot = snap.workspaces.iter().find_map(|ws| match ws {
ProjectWorkspace::Cargo { cargo, sysroot, .. } => Some((cargo, sysroot.as_ref().ok())),
ProjectWorkspace::Json { .. } => None,
- ProjectWorkspace::DetachedFiles { .. } => None,
+ ProjectWorkspace::DetachedFile { cargo_script, sysroot, .. } => {
+ cargo_script.as_ref().zip(Some(sysroot.as_ref().ok()))
+ }
});
let (cargo, sysroot) = match ws_and_sysroot {
@@ -1931,8 +1961,8 @@ fn goto_type_action_links(
snap: &GlobalStateSnapshot,
nav_targets: &[HoverGotoTypeData],
) -> Option<lsp_ext::CommandLinkGroup> {
- if !snap.config.hover_actions().goto_type_def
- || nav_targets.is_empty()
+ if nav_targets.is_empty()
+ || !snap.config.hover_actions().goto_type_def
|| !snap.config.client_commands().goto_location
{
return None;
diff --git a/crates/rust-analyzer/src/integrated_benchmarks.rs b/crates/rust-analyzer/src/integrated_benchmarks.rs
index 2731e845f3..7b385ca9d9 100644
--- a/crates/rust-analyzer/src/integrated_benchmarks.rs
+++ b/crates/rust-analyzer/src/integrated_benchmarks.rs
@@ -40,7 +40,7 @@ fn integrated_highlighting_benchmark() {
};
let load_cargo_config = LoadCargoConfig {
load_out_dirs_from_check: true,
- with_proc_macro_server: ProcMacroServerChoice::None,
+ with_proc_macro_server: ProcMacroServerChoice::Sysroot,
prefill_caches: false,
};
@@ -100,7 +100,7 @@ fn integrated_completion_benchmark() {
};
let load_cargo_config = LoadCargoConfig {
load_out_dirs_from_check: true,
- with_proc_macro_server: ProcMacroServerChoice::None,
+ with_proc_macro_server: ProcMacroServerChoice::Sysroot,
prefill_caches: true,
};
@@ -262,7 +262,7 @@ fn integrated_diagnostics_benchmark() {
};
let load_cargo_config = LoadCargoConfig {
load_out_dirs_from_check: true,
- with_proc_macro_server: ProcMacroServerChoice::None,
+ with_proc_macro_server: ProcMacroServerChoice::Sysroot,
prefill_caches: true,
};
@@ -300,7 +300,7 @@ fn integrated_diagnostics_benchmark() {
.diagnostics(&diagnostics_config, ide::AssistResolveStrategy::None, file_id)
.unwrap();
- let _g = crate::tracing::hprof::init("*>1");
+ let _g = crate::tracing::hprof::init("*");
{
let _it = stdx::timeit("change");
diff --git a/crates/rust-analyzer/src/lsp/ext.rs b/crates/rust-analyzer/src/lsp/ext.rs
index eac982f1b2..12f8e71c98 100644
--- a/crates/rust-analyzer/src/lsp/ext.rs
+++ b/crates/rust-analyzer/src/lsp/ext.rs
@@ -463,13 +463,6 @@ pub struct TestInfo {
pub runnable: Runnable,
}
-#[derive(Serialize, Deserialize, Debug)]
-#[serde(rename_all = "camelCase")]
-pub struct InlayHintsParams {
- pub text_document: TextDocumentIdentifier,
- pub range: Option<lsp_types::Range>,
-}
-
pub enum Ssr {}
impl Request for Ssr {
@@ -801,7 +794,8 @@ pub struct CompletionResolveData {
#[derive(Debug, Serialize, Deserialize)]
pub struct InlayHintResolveData {
pub file_id: u32,
- pub hash: u64,
+ // This is a string instead of a u64 as javascript can't represent u64 fully
+ pub hash: String,
}
#[derive(Debug, Serialize, Deserialize)]
diff --git a/crates/rust-analyzer/src/lsp/semantic_tokens.rs b/crates/rust-analyzer/src/lsp/semantic_tokens.rs
index 3e00222b75..991c10743f 100644
--- a/crates/rust-analyzer/src/lsp/semantic_tokens.rs
+++ b/crates/rust-analyzer/src/lsp/semantic_tokens.rs
@@ -17,15 +17,19 @@ macro_rules! define_semantic_token_types {
}
) => {
- $(pub(crate) const $standard: SemanticTokenType = SemanticTokenType::$standard;)*
- $(pub(crate) const $custom: SemanticTokenType = SemanticTokenType::new($string);)*
+ pub(crate) mod types {
+ use super::SemanticTokenType;
+ $(pub(crate) const $standard: SemanticTokenType = SemanticTokenType::$standard;)*
+ $(pub(crate) const $custom: SemanticTokenType = SemanticTokenType::new($string);)*
+ }
pub(crate) const SUPPORTED_TYPES: &[SemanticTokenType] = &[
$(SemanticTokenType::$standard,)*
- $($custom),*
+ $(self::types::$custom),*
];
pub(crate) fn standard_fallback_type(token: SemanticTokenType) -> Option<SemanticTokenType> {
+ use self::types::*;
$(
if token == $custom {
None $(.or(Some(SemanticTokenType::$fallback)))?
@@ -61,39 +65,41 @@ define_semantic_token_types![
custom {
(ANGLE, "angle"),
(ARITHMETIC, "arithmetic") => OPERATOR,
- (ATTRIBUTE, "attribute") => DECORATOR,
(ATTRIBUTE_BRACKET, "attributeBracket") => DECORATOR,
+ (ATTRIBUTE, "attribute") => DECORATOR,
(BITWISE, "bitwise") => OPERATOR,
(BOOLEAN, "boolean"),
(BRACE, "brace"),
(BRACKET, "bracket"),
(BUILTIN_ATTRIBUTE, "builtinAttribute") => DECORATOR,
- (BUILTIN_TYPE, "builtinType"),
+ (BUILTIN_TYPE, "builtinType") => TYPE,
(CHAR, "character") => STRING,
(COLON, "colon"),
(COMMA, "comma"),
(COMPARISON, "comparison") => OPERATOR,
(CONST_PARAMETER, "constParameter"),
- (DERIVE, "derive") => DECORATOR,
+ (CONST, "const") => VARIABLE,
(DERIVE_HELPER, "deriveHelper") => DECORATOR,
+ (DERIVE, "derive") => DECORATOR,
(DOT, "dot"),
(ESCAPE_SEQUENCE, "escapeSequence") => STRING,
- (INVALID_ESCAPE_SEQUENCE, "invalidEscapeSequence") => STRING,
(FORMAT_SPECIFIER, "formatSpecifier") => STRING,
(GENERIC, "generic") => TYPE_PARAMETER,
+ (INVALID_ESCAPE_SEQUENCE, "invalidEscapeSequence") => STRING,
(LABEL, "label"),
(LIFETIME, "lifetime"),
(LOGICAL, "logical") => OPERATOR,
(MACRO_BANG, "macroBang") => MACRO,
- (PROC_MACRO, "procMacro") => MACRO,
(PARENTHESIS, "parenthesis"),
+ (PROC_MACRO, "procMacro") => MACRO,
(PUNCTUATION, "punctuation"),
(SELF_KEYWORD, "selfKeyword") => KEYWORD,
(SELF_TYPE_KEYWORD, "selfTypeKeyword") => KEYWORD,
(SEMICOLON, "semicolon"),
- (TYPE_ALIAS, "typeAlias"),
+ (STATIC, "static") => VARIABLE,
(TOOL_MODULE, "toolModule") => DECORATOR,
- (UNION, "union"),
+ (TYPE_ALIAS, "typeAlias") => TYPE,
+ (UNION, "union") => TYPE,
(UNRESOLVED_REFERENCE, "unresolvedReference"),
}
];
@@ -112,13 +118,16 @@ macro_rules! define_semantic_token_modifiers {
}
) => {
+ pub(crate) mod modifiers {
+ use super::SemanticTokenModifier;
- $(pub(crate) const $standard: SemanticTokenModifier = SemanticTokenModifier::$standard;)*
- $(pub(crate) const $custom: SemanticTokenModifier = SemanticTokenModifier::new($string);)*
+ $(pub(crate) const $standard: SemanticTokenModifier = SemanticTokenModifier::$standard;)*
+ $(pub(crate) const $custom: SemanticTokenModifier = SemanticTokenModifier::new($string);)*
+ }
pub(crate) const SUPPORTED_MODIFIERS: &[SemanticTokenModifier] = &[
$(SemanticTokenModifier::$standard,)*
- $($custom),*
+ $(self::modifiers::$custom),*
];
const LAST_STANDARD_MOD: usize = count_tts!($($standard)*);
@@ -145,8 +154,8 @@ define_semantic_token_modifiers![
(INTRA_DOC_LINK, "intraDocLink"),
(LIBRARY, "library"),
(MACRO_MODIFIER, "macro"),
- (PROC_MACRO_MODIFIER, "proc_macro"),
(MUTABLE, "mutable"),
+ (PROC_MACRO_MODIFIER, "procMacro"),
(PUBLIC, "public"),
(REFERENCE, "reference"),
(TRAIT_MODIFIER, "trait"),
diff --git a/crates/rust-analyzer/src/lsp/to_proto.rs b/crates/rust-analyzer/src/lsp/to_proto.rs
index d8bb12528b..d02f4612dc 100644
--- a/crates/rust-analyzer/src/lsp/to_proto.rs
+++ b/crates/rust-analyzer/src/lsp/to_proto.rs
@@ -92,12 +92,13 @@ pub(crate) fn structure_node_kind(kind: StructureNodeKind) -> lsp_types::SymbolK
pub(crate) fn document_highlight_kind(
category: ReferenceCategory,
) -> Option<lsp_types::DocumentHighlightKind> {
- match category {
- ReferenceCategory::Read => Some(lsp_types::DocumentHighlightKind::READ),
- ReferenceCategory::Write => Some(lsp_types::DocumentHighlightKind::WRITE),
- ReferenceCategory::Import => None,
- ReferenceCategory::Test => None,
+ if category.contains(ReferenceCategory::WRITE) {
+ return Some(lsp_types::DocumentHighlightKind::WRITE);
}
+ if category.contains(ReferenceCategory::READ) {
+ return Some(lsp_types::DocumentHighlightKind::READ);
+ }
+ None
}
pub(crate) fn diagnostic_severity(severity: Severity) -> lsp_types::DiagnosticSeverity {
@@ -233,7 +234,7 @@ pub(crate) fn completion_items(
completion_item(&mut res, config, line_index, &tdpp, max_relevance, item);
}
- if let Some(limit) = config.completion().limit {
+ if let Some(limit) = config.completion(None).limit {
res.sort_by(|item1, item2| item1.sort_text.cmp(&item2.sort_text));
res.truncate(limit);
}
@@ -317,7 +318,7 @@ fn completion_item(
set_score(&mut lsp_item, max_relevance, item.relevance);
- if config.completion().enable_imports_on_the_fly && !item.import_to_add.is_empty() {
+ if config.completion(None).enable_imports_on_the_fly && !item.import_to_add.is_empty() {
let imports = item
.import_to_add
.into_iter()
@@ -479,7 +480,11 @@ pub(crate) fn inlay_hint(
let data = match resolve_hash {
Some(hash) if something_to_resolve => Some(
- to_value(lsp_ext::InlayHintResolveData { file_id: file_id.index(), hash }).unwrap(),
+ to_value(lsp_ext::InlayHintResolveData {
+ file_id: file_id.index(),
+ hash: hash.to_string(),
+ })
+ .unwrap(),
),
_ => None,
};
@@ -650,97 +655,99 @@ pub(crate) fn semantic_token_delta(
fn semantic_token_type_and_modifiers(
highlight: Highlight,
) -> (lsp_types::SemanticTokenType, semantic_tokens::ModifierSet) {
+ use semantic_tokens::{modifiers as mods, types};
+
let ty = match highlight.tag {
HlTag::Symbol(symbol) => match symbol {
- SymbolKind::Attribute => semantic_tokens::DECORATOR,
- SymbolKind::Derive => semantic_tokens::DERIVE,
- SymbolKind::DeriveHelper => semantic_tokens::DERIVE_HELPER,
- SymbolKind::Module => semantic_tokens::NAMESPACE,
- SymbolKind::Impl => semantic_tokens::TYPE_ALIAS,
- SymbolKind::Field => semantic_tokens::PROPERTY,
- SymbolKind::TypeParam => semantic_tokens::TYPE_PARAMETER,
- SymbolKind::ConstParam => semantic_tokens::CONST_PARAMETER,
- SymbolKind::LifetimeParam => semantic_tokens::LIFETIME,
- SymbolKind::Label => semantic_tokens::LABEL,
- SymbolKind::ValueParam => semantic_tokens::PARAMETER,
- SymbolKind::SelfParam => semantic_tokens::SELF_KEYWORD,
- SymbolKind::SelfType => semantic_tokens::SELF_TYPE_KEYWORD,
- SymbolKind::Local => semantic_tokens::VARIABLE,
- SymbolKind::Method => semantic_tokens::METHOD,
- SymbolKind::Function => semantic_tokens::FUNCTION,
- SymbolKind::Const => semantic_tokens::VARIABLE,
- SymbolKind::Static => semantic_tokens::VARIABLE,
- SymbolKind::Struct => semantic_tokens::STRUCT,
- SymbolKind::Enum => semantic_tokens::ENUM,
- SymbolKind::Variant => semantic_tokens::ENUM_MEMBER,
- SymbolKind::Union => semantic_tokens::UNION,
- SymbolKind::TypeAlias => semantic_tokens::TYPE_ALIAS,
- SymbolKind::Trait => semantic_tokens::INTERFACE,
- SymbolKind::TraitAlias => semantic_tokens::INTERFACE,
- SymbolKind::Macro => semantic_tokens::MACRO,
- SymbolKind::ProcMacro => semantic_tokens::PROC_MACRO,
- SymbolKind::BuiltinAttr => semantic_tokens::BUILTIN_ATTRIBUTE,
- SymbolKind::ToolModule => semantic_tokens::TOOL_MODULE,
+ SymbolKind::Attribute => types::DECORATOR,
+ SymbolKind::Derive => types::DERIVE,
+ SymbolKind::DeriveHelper => types::DERIVE_HELPER,
+ SymbolKind::Module => types::NAMESPACE,
+ SymbolKind::Impl => types::TYPE_ALIAS,
+ SymbolKind::Field => types::PROPERTY,
+ SymbolKind::TypeParam => types::TYPE_PARAMETER,
+ SymbolKind::ConstParam => types::CONST_PARAMETER,
+ SymbolKind::LifetimeParam => types::LIFETIME,
+ SymbolKind::Label => types::LABEL,
+ SymbolKind::ValueParam => types::PARAMETER,
+ SymbolKind::SelfParam => types::SELF_KEYWORD,
+ SymbolKind::SelfType => types::SELF_TYPE_KEYWORD,
+ SymbolKind::Local => types::VARIABLE,
+ SymbolKind::Method => types::METHOD,
+ SymbolKind::Function => types::FUNCTION,
+ SymbolKind::Const => types::CONST,
+ SymbolKind::Static => types::STATIC,
+ SymbolKind::Struct => types::STRUCT,
+ SymbolKind::Enum => types::ENUM,
+ SymbolKind::Variant => types::ENUM_MEMBER,
+ SymbolKind::Union => types::UNION,
+ SymbolKind::TypeAlias => types::TYPE_ALIAS,
+ SymbolKind::Trait => types::INTERFACE,
+ SymbolKind::TraitAlias => types::INTERFACE,
+ SymbolKind::Macro => types::MACRO,
+ SymbolKind::ProcMacro => types::PROC_MACRO,
+ SymbolKind::BuiltinAttr => types::BUILTIN_ATTRIBUTE,
+ SymbolKind::ToolModule => types::TOOL_MODULE,
},
- HlTag::AttributeBracket => semantic_tokens::ATTRIBUTE_BRACKET,
- HlTag::BoolLiteral => semantic_tokens::BOOLEAN,
- HlTag::BuiltinType => semantic_tokens::BUILTIN_TYPE,
- HlTag::ByteLiteral | HlTag::NumericLiteral => semantic_tokens::NUMBER,
- HlTag::CharLiteral => semantic_tokens::CHAR,
- HlTag::Comment => semantic_tokens::COMMENT,
- HlTag::EscapeSequence => semantic_tokens::ESCAPE_SEQUENCE,
- HlTag::InvalidEscapeSequence => semantic_tokens::INVALID_ESCAPE_SEQUENCE,
- HlTag::FormatSpecifier => semantic_tokens::FORMAT_SPECIFIER,
- HlTag::Keyword => semantic_tokens::KEYWORD,
- HlTag::None => semantic_tokens::GENERIC,
+ HlTag::AttributeBracket => types::ATTRIBUTE_BRACKET,
+ HlTag::BoolLiteral => types::BOOLEAN,
+ HlTag::BuiltinType => types::BUILTIN_TYPE,
+ HlTag::ByteLiteral | HlTag::NumericLiteral => types::NUMBER,
+ HlTag::CharLiteral => types::CHAR,
+ HlTag::Comment => types::COMMENT,
+ HlTag::EscapeSequence => types::ESCAPE_SEQUENCE,
+ HlTag::InvalidEscapeSequence => types::INVALID_ESCAPE_SEQUENCE,
+ HlTag::FormatSpecifier => types::FORMAT_SPECIFIER,
+ HlTag::Keyword => types::KEYWORD,
+ HlTag::None => types::GENERIC,
HlTag::Operator(op) => match op {
- HlOperator::Bitwise => semantic_tokens::BITWISE,
- HlOperator::Arithmetic => semantic_tokens::ARITHMETIC,
- HlOperator::Logical => semantic_tokens::LOGICAL,
- HlOperator::Comparison => semantic_tokens::COMPARISON,
- HlOperator::Other => semantic_tokens::OPERATOR,
+ HlOperator::Bitwise => types::BITWISE,
+ HlOperator::Arithmetic => types::ARITHMETIC,
+ HlOperator::Logical => types::LOGICAL,
+ HlOperator::Comparison => types::COMPARISON,
+ HlOperator::Other => types::OPERATOR,
},
- HlTag::StringLiteral => semantic_tokens::STRING,
- HlTag::UnresolvedReference => semantic_tokens::UNRESOLVED_REFERENCE,
+ HlTag::StringLiteral => types::STRING,
+ HlTag::UnresolvedReference => types::UNRESOLVED_REFERENCE,
HlTag::Punctuation(punct) => match punct {
- HlPunct::Bracket => semantic_tokens::BRACKET,
- HlPunct::Brace => semantic_tokens::BRACE,
- HlPunct::Parenthesis => semantic_tokens::PARENTHESIS,
- HlPunct::Angle => semantic_tokens::ANGLE,
- HlPunct::Comma => semantic_tokens::COMMA,
- HlPunct::Dot => semantic_tokens::DOT,
- HlPunct::Colon => semantic_tokens::COLON,
- HlPunct::Semi => semantic_tokens::SEMICOLON,
- HlPunct::Other => semantic_tokens::PUNCTUATION,
- HlPunct::MacroBang => semantic_tokens::MACRO_BANG,
+ HlPunct::Bracket => types::BRACKET,
+ HlPunct::Brace => types::BRACE,
+ HlPunct::Parenthesis => types::PARENTHESIS,
+ HlPunct::Angle => types::ANGLE,
+ HlPunct::Comma => types::COMMA,
+ HlPunct::Dot => types::DOT,
+ HlPunct::Colon => types::COLON,
+ HlPunct::Semi => types::SEMICOLON,
+ HlPunct::Other => types::PUNCTUATION,
+ HlPunct::MacroBang => types::MACRO_BANG,
},
};
let mut mods = semantic_tokens::ModifierSet::default();
for modifier in highlight.mods.iter() {
let modifier = match modifier {
- HlMod::Associated => semantic_tokens::ASSOCIATED,
- HlMod::Async => semantic_tokens::ASYNC,
- HlMod::Attribute => semantic_tokens::ATTRIBUTE_MODIFIER,
- HlMod::Callable => semantic_tokens::CALLABLE,
- HlMod::Const => semantic_tokens::CONSTANT,
- HlMod::Consuming => semantic_tokens::CONSUMING,
- HlMod::ControlFlow => semantic_tokens::CONTROL_FLOW,
- HlMod::CrateRoot => semantic_tokens::CRATE_ROOT,
- HlMod::DefaultLibrary => semantic_tokens::DEFAULT_LIBRARY,
- HlMod::Definition => semantic_tokens::DECLARATION,
- HlMod::Documentation => semantic_tokens::DOCUMENTATION,
- HlMod::Injected => semantic_tokens::INJECTED,
- HlMod::IntraDocLink => semantic_tokens::INTRA_DOC_LINK,
- HlMod::Library => semantic_tokens::LIBRARY,
- HlMod::Macro => semantic_tokens::MACRO_MODIFIER,
- HlMod::ProcMacro => semantic_tokens::PROC_MACRO_MODIFIER,
- HlMod::Mutable => semantic_tokens::MUTABLE,
- HlMod::Public => semantic_tokens::PUBLIC,
- HlMod::Reference => semantic_tokens::REFERENCE,
- HlMod::Static => semantic_tokens::STATIC,
- HlMod::Trait => semantic_tokens::TRAIT_MODIFIER,
- HlMod::Unsafe => semantic_tokens::UNSAFE,
+ HlMod::Associated => mods::ASSOCIATED,
+ HlMod::Async => mods::ASYNC,
+ HlMod::Attribute => mods::ATTRIBUTE_MODIFIER,
+ HlMod::Callable => mods::CALLABLE,
+ HlMod::Const => mods::CONSTANT,
+ HlMod::Consuming => mods::CONSUMING,
+ HlMod::ControlFlow => mods::CONTROL_FLOW,
+ HlMod::CrateRoot => mods::CRATE_ROOT,
+ HlMod::DefaultLibrary => mods::DEFAULT_LIBRARY,
+ HlMod::Definition => mods::DECLARATION,
+ HlMod::Documentation => mods::DOCUMENTATION,
+ HlMod::Injected => mods::INJECTED,
+ HlMod::IntraDocLink => mods::INTRA_DOC_LINK,
+ HlMod::Library => mods::LIBRARY,
+ HlMod::Macro => mods::MACRO_MODIFIER,
+ HlMod::ProcMacro => mods::PROC_MACRO_MODIFIER,
+ HlMod::Mutable => mods::MUTABLE,
+ HlMod::Public => mods::PUBLIC,
+ HlMod::Reference => mods::REFERENCE,
+ HlMod::Static => mods::STATIC,
+ HlMod::Trait => mods::TRAIT_MODIFIER,
+ HlMod::Unsafe => mods::UNSAFE,
};
mods |= modifier;
}
diff --git a/crates/rust-analyzer/src/main_loop.rs b/crates/rust-analyzer/src/main_loop.rs
index 38df323512..f37b25fb95 100644
--- a/crates/rust-analyzer/src/main_loop.rs
+++ b/crates/rust-analyzer/src/main_loop.rs
@@ -7,7 +7,7 @@ use std::{
};
use always_assert::always;
-use crossbeam_channel::{never, select, Receiver};
+use crossbeam_channel::{select, Receiver};
use ide_db::base_db::{SourceDatabase, SourceDatabaseExt, VfsPath};
use lsp_server::{Connection, Notification, Request};
use lsp_types::{notification::Notification as _, TextDocumentIdentifier};
@@ -220,7 +220,7 @@ impl GlobalState {
recv(self.flycheck_receiver) -> task =>
Some(Event::Flycheck(task.unwrap())),
- recv(self.test_run_session.as_ref().map(|s| s.receiver()).unwrap_or(&never())) -> task =>
+ recv(self.test_run_receiver) -> task =>
Some(Event::TestResult(task.unwrap())),
}
@@ -337,9 +337,7 @@ impl GlobalState {
.entered();
self.handle_cargo_test_msg(message);
// Coalesce many test result event into a single loop turn
- while let Some(message) =
- self.test_run_session.as_ref().and_then(|r| r.receiver().try_recv().ok())
- {
+ while let Ok(message) = self.test_run_receiver.try_recv() {
self.handle_cargo_test_msg(message);
}
}
@@ -350,11 +348,7 @@ impl GlobalState {
let memdocs_added_or_removed = self.mem_docs.take_changes();
if self.is_quiescent() {
- let became_quiescent = !(was_quiescent
- || self.fetch_workspaces_queue.op_requested()
- || self.fetch_build_data_queue.op_requested()
- || self.fetch_proc_macros_queue.op_requested());
-
+ let became_quiescent = !was_quiescent;
if became_quiescent {
if self.config.check_on_save() {
// Project has loaded properly, kick off initial flycheck
@@ -365,7 +359,7 @@ impl GlobalState {
}
}
- let client_refresh = !was_quiescent || state_changed;
+ let client_refresh = became_quiescent || state_changed;
if client_refresh {
// Refresh semantic tokens if the client supports it.
if self.config.semantic_tokens_refresh() {
@@ -379,17 +373,17 @@ impl GlobalState {
}
// Refresh inlay hints if the client supports it.
- if self.send_hint_refresh_query && self.config.inlay_hints_refresh() {
+ if self.config.inlay_hints_refresh() {
self.send_request::<lsp_types::request::InlayHintRefreshRequest>((), |_, _| ());
- self.send_hint_refresh_query = false;
}
}
- let things_changed = !was_quiescent || state_changed || memdocs_added_or_removed;
- if things_changed && self.config.publish_diagnostics() {
+ let project_or_mem_docs_changed =
+ became_quiescent || state_changed || memdocs_added_or_removed;
+ if project_or_mem_docs_changed && self.config.publish_diagnostics() {
self.update_diagnostics();
}
- if things_changed && self.config.test_explorer() {
+ if project_or_mem_docs_changed && self.config.test_explorer() {
self.update_tests();
}
}
@@ -411,7 +405,7 @@ impl GlobalState {
// See https://github.com/rust-lang/rust-analyzer/issues/13130
let patch_empty = |message: &mut String| {
if message.is_empty() {
- *message = " ".to_owned();
+ " ".clone_into(message);
}
};
@@ -434,7 +428,7 @@ impl GlobalState {
}
}
- if self.config.cargo_autoreload() {
+ if self.config.cargo_autoreload_config() {
if let Some((cause, force_crate_graph_reload)) =
self.fetch_workspaces_queue.should_start_op()
{
@@ -643,7 +637,6 @@ impl GlobalState {
}
self.switch_workspaces("fetched build data".to_owned());
- self.send_hint_refresh_query = true;
(Some(Progress::End), None)
}
@@ -660,7 +653,6 @@ impl GlobalState {
ProcMacroProgress::End(proc_macro_load_result) => {
self.fetch_proc_macros_queue.op_completed(true);
self.set_proc_macros(proc_macro_load_result);
- self.send_hint_refresh_query = true;
(Some(Progress::End), None)
}
};
@@ -792,8 +784,11 @@ impl GlobalState {
}
flycheck::CargoTestMessage::Suite => (),
flycheck::CargoTestMessage::Finished => {
- self.send_notification::<lsp_ext::EndRunTest>(());
- self.test_run_session = None;
+ self.test_run_remaining_jobs = self.test_run_remaining_jobs.saturating_sub(1);
+ if self.test_run_remaining_jobs == 0 {
+ self.send_notification::<lsp_ext::EndRunTest>(());
+ self.test_run_session = None;
+ }
}
flycheck::CargoTestMessage::Custom { text } => {
self.send_notification::<lsp_ext::AppendOutputToRunTest>(text);
diff --git a/crates/rust-analyzer/src/reload.rs b/crates/rust-analyzer/src/reload.rs
index 771a5599f6..5d8a66cabc 100644
--- a/crates/rust-analyzer/src/reload.rs
+++ b/crates/rust-analyzer/src/reload.rs
@@ -76,9 +76,9 @@ impl GlobalState {
if self.config.lru_parse_query_capacity() != old_config.lru_parse_query_capacity() {
self.analysis_host.update_lru_capacity(self.config.lru_parse_query_capacity());
}
- if self.config.lru_query_capacities() != old_config.lru_query_capacities() {
+ if self.config.lru_query_capacities_config() != old_config.lru_query_capacities_config() {
self.analysis_host.update_lru_capacities(
- &self.config.lru_query_capacities().cloned().unwrap_or_default(),
+ &self.config.lru_query_capacities_config().cloned().unwrap_or_default(),
);
}
if self.config.linked_or_discovered_projects() != old_config.linked_or_discovered_projects()
@@ -153,7 +153,7 @@ impl GlobalState {
for ws in self.workspaces.iter() {
let (ProjectWorkspace::Cargo { sysroot, .. }
| ProjectWorkspace::Json { sysroot, .. }
- | ProjectWorkspace::DetachedFiles { sysroot, .. }) = ws;
+ | ProjectWorkspace::DetachedFile { sysroot, .. }) = ws;
match sysroot {
Err(None) => (),
Err(Some(e)) => {
@@ -234,6 +234,7 @@ impl GlobalState {
it.clone(),
cargo_config.target.as_deref(),
&cargo_config.extra_env,
+ &cargo_config.cfg_overrides,
))
}
})
@@ -254,7 +255,7 @@ impl GlobalState {
}
if !detached_files.is_empty() {
- workspaces.push(project_model::ProjectWorkspace::load_detached_files(
+ workspaces.extend(project_model::ProjectWorkspace::load_detached_files(
detached_files,
&cargo_config,
));
@@ -539,9 +540,6 @@ impl GlobalState {
}
fn recreate_crate_graph(&mut self, cause: String) {
- // crate graph construction relies on these paths, record them so when one of them gets
- // deleted or created we trigger a reconstruction of the crate graph
- let mut crate_graph_file_dependencies = mem::take(&mut self.crate_graph_file_dependencies);
self.report_progress(
"Building CrateGraph",
crate::lsp::utils::Progress::Begin,
@@ -550,13 +548,25 @@ impl GlobalState {
None,
);
+ // crate graph construction relies on these paths, record them so when one of them gets
+ // deleted or created we trigger a reconstruction of the crate graph
+ self.crate_graph_file_dependencies.clear();
+ self.detached_files = self
+ .workspaces
+ .iter()
+ .filter_map(|ws| match ws {
+ ProjectWorkspace::DetachedFile { file, .. } => Some(file.clone()),
+ _ => None,
+ })
+ .collect();
+
let (crate_graph, proc_macro_paths, layouts, toolchains) = {
// Create crate graph from all the workspaces
let vfs = &mut self.vfs.write().0;
let load = |path: &AbsPath| {
let vfs_path = vfs::VfsPath::from(path.to_path_buf());
- crate_graph_file_dependencies.insert(vfs_path.clone());
+ self.crate_graph_file_dependencies.insert(vfs_path.clone());
vfs.file_id(&vfs_path)
};
@@ -576,7 +586,6 @@ impl GlobalState {
change.set_target_data_layouts(layouts);
change.set_toolchains(toolchains);
self.analysis_host.apply_change(change);
- self.crate_graph_file_dependencies = crate_graph_file_dependencies;
self.report_progress(
"Building CrateGraph",
crate::lsp::utils::Progress::End,
@@ -673,7 +682,8 @@ impl GlobalState {
_ => None,
}
}
- ProjectWorkspace::DetachedFiles { .. } => None,
+ // FIXME
+ ProjectWorkspace::DetachedFile { .. } => None,
})
.map(|(id, root, sysroot_root)| {
let sender = sender.clone();
@@ -712,15 +722,9 @@ pub fn ws_to_crate_graph(
let (other, mut crate_proc_macros) = ws.to_crate_graph(&mut load, extra_env);
let num_layouts = layouts.len();
let num_toolchains = toolchains.len();
- let (toolchain, layout) = match ws {
- ProjectWorkspace::Cargo { toolchain, target_layout, .. }
- | ProjectWorkspace::Json { toolchain, target_layout, .. } => {
- (toolchain.clone(), target_layout.clone())
- }
- ProjectWorkspace::DetachedFiles { .. } => {
- (None, Err("detached files have no layout".into()))
- }
- };
+ let (ProjectWorkspace::Cargo { toolchain, target_layout, .. }
+ | ProjectWorkspace::Json { toolchain, target_layout, .. }
+ | ProjectWorkspace::DetachedFile { toolchain, target_layout, .. }) = ws;
let mapping = crate_graph.extend(
other,
@@ -729,7 +733,7 @@ pub fn ws_to_crate_graph(
// if the newly created crate graph's layout is equal to the crate of the merged graph, then
// we can merge the crates.
let id = cg_id.into_raw().into_u32() as usize;
- layouts[id] == layout && toolchains[id] == toolchain && cg_data == o_data
+ layouts[id] == *target_layout && toolchains[id] == *toolchain && cg_data == o_data
},
);
// Populate the side tables for the newly merged crates
@@ -741,13 +745,13 @@ pub fn ws_to_crate_graph(
if layouts.len() <= idx {
layouts.resize(idx + 1, e.clone());
}
- layouts[idx] = layout.clone();
+ layouts[idx].clone_from(target_layout);
}
if idx >= num_toolchains {
if toolchains.len() <= idx {
toolchains.resize(idx + 1, None);
}
- toolchains[idx] = toolchain.clone();
+ toolchains[idx].clone_from(toolchain);
}
});
proc_macro_paths.push(crate_proc_macros);
diff --git a/crates/rust-analyzer/tests/slow-tests/main.rs b/crates/rust-analyzer/tests/slow-tests/main.rs
index 439b006977..b87f02947b 100644
--- a/crates/rust-analyzer/tests/slow-tests/main.rs
+++ b/crates/rust-analyzer/tests/slow-tests/main.rs
@@ -23,12 +23,12 @@ use lsp_types::{
notification::DidOpenTextDocument,
request::{
CodeActionRequest, Completion, Formatting, GotoTypeDefinition, HoverRequest,
- WillRenameFiles, WorkspaceSymbolRequest,
+ InlayHintRequest, InlayHintResolveRequest, WillRenameFiles, WorkspaceSymbolRequest,
},
CodeActionContext, CodeActionParams, CompletionParams, DidOpenTextDocumentParams,
DocumentFormattingParams, FileRename, FormattingOptions, GotoDefinitionParams, HoverParams,
- PartialResultParams, Position, Range, RenameFilesParams, TextDocumentItem,
- TextDocumentPositionParams, WorkDoneProgressParams,
+ InlayHint, InlayHintLabel, InlayHintParams, PartialResultParams, Position, Range,
+ RenameFilesParams, TextDocumentItem, TextDocumentPositionParams, WorkDoneProgressParams,
};
use rust_analyzer::lsp::ext::{OnEnter, Runnables, RunnablesParams, UnindexedProject};
use serde_json::json;
@@ -76,6 +76,147 @@ use std::collections::Spam;
}
#[test]
+fn resolves_inlay_hints() {
+ if skip_slow_tests() {
+ return;
+ }
+
+ let server = Project::with_fixture(
+ r#"
+//- /Cargo.toml
+[package]
+name = "foo"
+version = "0.0.0"
+
+//- /src/lib.rs
+struct Foo;
+fn f() {
+ let x = Foo;
+}
+"#,
+ )
+ .server()
+ .wait_until_workspace_is_loaded();
+
+ let res = server.send_request::<InlayHintRequest>(InlayHintParams {
+ range: Range::new(Position::new(0, 0), Position::new(3, 1)),
+ text_document: server.doc_id("src/lib.rs"),
+ work_done_progress_params: WorkDoneProgressParams::default(),
+ });
+ let mut hints = serde_json::from_value::<Option<Vec<InlayHint>>>(res).unwrap().unwrap();
+ let hint = hints.pop().unwrap();
+ assert!(hint.data.is_some());
+ assert!(
+ matches!(&hint.label, InlayHintLabel::LabelParts(parts) if parts[1].location.is_none())
+ );
+ let res = server.send_request::<InlayHintResolveRequest>(hint);
+ let hint = serde_json::from_value::<InlayHint>(res).unwrap();
+ assert!(hint.data.is_none());
+ assert!(
+ matches!(&hint.label, InlayHintLabel::LabelParts(parts) if parts[1].location.is_some())
+ );
+}
+
+#[test]
+fn completes_items_from_standard_library_in_cargo_script() {
+ // this test requires nightly so CI can't run it
+ if skip_slow_tests() || std::env::var("CI").is_ok() {
+ return;
+ }
+
+ let server = Project::with_fixture(
+ r#"
+//- /dependency/Cargo.toml
+[package]
+name = "dependency"
+version = "0.1.0"
+//- /dependency/src/lib.rs
+pub struct SpecialHashMap;
+//- /dependency2/Cargo.toml
+[package]
+name = "dependency2"
+version = "0.1.0"
+//- /dependency2/src/lib.rs
+pub struct SpecialHashMap2;
+//- /src/lib.rs
+#!/usr/bin/env -S cargo +nightly -Zscript
+---
+[dependencies]
+dependency = { path = "../dependency" }
+---
+use dependency::Spam;
+use dependency2::Spam;
+"#,
+ )
+ .with_config(serde_json::json!({
+ "cargo": { "sysroot": null },
+ }))
+ .server()
+ .wait_until_workspace_is_loaded();
+
+ let res = server.send_request::<Completion>(CompletionParams {
+ text_document_position: TextDocumentPositionParams::new(
+ server.doc_id("src/lib.rs"),
+ Position::new(5, 18),
+ ),
+ context: None,
+ partial_result_params: PartialResultParams::default(),
+ work_done_progress_params: WorkDoneProgressParams::default(),
+ });
+ assert!(res.to_string().contains("SpecialHashMap"), "{}", res.to_string());
+
+ let res = server.send_request::<Completion>(CompletionParams {
+ text_document_position: TextDocumentPositionParams::new(
+ server.doc_id("src/lib.rs"),
+ Position::new(6, 18),
+ ),
+ context: None,
+ partial_result_params: PartialResultParams::default(),
+ work_done_progress_params: WorkDoneProgressParams::default(),
+ });
+ assert!(!res.to_string().contains("SpecialHashMap"));
+
+ server.write_file_and_save(
+ "src/lib.rs",
+ r#"#!/usr/bin/env -S cargo +nightly -Zscript
+---
+[dependencies]
+dependency2 = { path = "../dependency2" }
+---
+use dependency::Spam;
+use dependency2::Spam;
+"#
+ .to_owned(),
+ );
+
+ let server = server.wait_until_workspace_is_loaded();
+
+ std::thread::sleep(std::time::Duration::from_secs(3));
+
+ let res = server.send_request::<Completion>(CompletionParams {
+ text_document_position: TextDocumentPositionParams::new(
+ server.doc_id("src/lib.rs"),
+ Position::new(5, 18),
+ ),
+ context: None,
+ partial_result_params: PartialResultParams::default(),
+ work_done_progress_params: WorkDoneProgressParams::default(),
+ });
+ assert!(!res.to_string().contains("SpecialHashMap"));
+
+ let res = server.send_request::<Completion>(CompletionParams {
+ text_document_position: TextDocumentPositionParams::new(
+ server.doc_id("src/lib.rs"),
+ Position::new(6, 18),
+ ),
+ context: None,
+ partial_result_params: PartialResultParams::default(),
+ work_done_progress_params: WorkDoneProgressParams::default(),
+ });
+ assert!(res.to_string().contains("SpecialHashMap"));
+}
+
+#[test]
fn test_runnables_project() {
if skip_slow_tests() {
return;
@@ -115,7 +256,7 @@ fn main() {}
{
"args": {
"cargoArgs": ["test", "--package", "foo", "--test", "spam"],
- "executableArgs": ["test_eggs", "--exact", "--nocapture"],
+ "executableArgs": ["test_eggs", "--exact", "--show-output"],
"cargoExtraArgs": [],
"overrideCargo": null,
"workspaceRoot": server.path().join("foo")
@@ -148,7 +289,7 @@ fn main() {}
"cargoExtraArgs": [],
"executableArgs": [
"",
- "--nocapture"
+ "--show-output"
]
},
"kind": "cargo",
diff --git a/crates/rust-analyzer/tests/slow-tests/support.rs b/crates/rust-analyzer/tests/slow-tests/support.rs
index 8bbe6ff372..f04962a7a2 100644
--- a/crates/rust-analyzer/tests/slow-tests/support.rs
+++ b/crates/rust-analyzer/tests/slow-tests/support.rs
@@ -125,7 +125,7 @@ impl Project<'_> {
}
let mut config = Config::new(
- tmp_dir_path,
+ tmp_dir_path.clone(),
lsp_types::ClientCapabilities {
workspace: Some(lsp_types::WorkspaceClientCapabilities {
did_change_watched_files: Some(
@@ -159,6 +159,18 @@ impl Project<'_> {
content_format: Some(vec![lsp_types::MarkupKind::Markdown]),
..Default::default()
}),
+ inlay_hint: Some(lsp_types::InlayHintClientCapabilities {
+ resolve_support: Some(lsp_types::InlayHintResolveClientCapabilities {
+ properties: vec![
+ "textEdits".to_owned(),
+ "tooltip".to_owned(),
+ "label.tooltip".to_owned(),
+ "label.location".to_owned(),
+ "label.command".to_owned(),
+ ],
+ }),
+ ..Default::default()
+ }),
..Default::default()
}),
window: Some(lsp_types::WindowClientCapabilities {
@@ -173,10 +185,14 @@ impl Project<'_> {
roots,
None,
);
- config.update(self.config).expect("invalid config");
+ // TODO: don't hardcode src/lib.rs as detached file
+ let mut c = self.config;
+ let p = tmp_dir_path.join("src/lib.rs").to_string();
+ c["detachedFiles"] = serde_json::json!([p]);
+ config.update(c).expect("invalid config");
config.rediscover_workspaces();
- Server::new(tmp_dir, config)
+ Server::new(tmp_dir.keep(), config)
}
}
@@ -271,6 +287,7 @@ impl Server {
}
}
+ #[track_caller]
pub(crate) fn send_request<R>(&self, params: R::Params) -> Value
where
R: lsp_types::request::Request,
@@ -282,6 +299,7 @@ impl Server {
let r = Request::new(id.into(), R::METHOD.to_owned(), params);
self.send_request_(r)
}
+ #[track_caller]
fn send_request_(&self, r: Request) -> Value {
let id = r.id.clone();
self.client.sender.send(r.clone().into()).unwrap();
@@ -362,6 +380,16 @@ impl Server {
pub(crate) fn path(&self) -> &Utf8Path {
self.dir.path()
}
+
+ pub(crate) fn write_file_and_save(&self, path: &str, text: String) {
+ fs::write(self.dir.path().join(path), &text).unwrap();
+ self.notification::<lsp_types::notification::DidSaveTextDocument>(
+ lsp_types::DidSaveTextDocumentParams {
+ text_document: self.doc_id(path),
+ text: Some(text),
+ },
+ )
+ }
}
impl Drop for Server {
diff --git a/crates/rust-analyzer/tests/slow-tests/tidy.rs b/crates/rust-analyzer/tests/slow-tests/tidy.rs
index 78da4487d4..3443939133 100644
--- a/crates/rust-analyzer/tests/slow-tests/tidy.rs
+++ b/crates/rust-analyzer/tests/slow-tests/tidy.rs
@@ -243,7 +243,7 @@ struct TidyDocs {
impl TidyDocs {
fn visit(&mut self, path: &Path, text: &str) {
// Tests and diagnostic fixes don't need module level comments.
- if is_exclude_dir(path, &["tests", "test_data", "fixes", "grammar"]) {
+ if is_exclude_dir(path, &["tests", "test_data", "fixes", "grammar", "salsa"]) {
return;
}
diff --git a/crates/salsa/salsa-macros/src/database_storage.rs b/crates/salsa/salsa-macros/src/database_storage.rs
index 223da9b529..14238e2fed 100644
--- a/crates/salsa/salsa-macros/src/database_storage.rs
+++ b/crates/salsa/salsa-macros/src/database_storage.rs
@@ -1,4 +1,5 @@
-//!
+//! Implementation for `[salsa::database]` decorator.
+
use heck::ToSnakeCase;
use proc_macro::TokenStream;
use syn::parse::{Parse, ParseStream};
diff --git a/crates/salsa/salsa-macros/src/parenthesized.rs b/crates/salsa/salsa-macros/src/parenthesized.rs
index 9df41e03c1..5ecd1b8a05 100644
--- a/crates/salsa/salsa-macros/src/parenthesized.rs
+++ b/crates/salsa/salsa-macros/src/parenthesized.rs
@@ -1,4 +1,4 @@
-//!
+//! Parenthesis helper
pub(crate) struct Parenthesized<T>(pub(crate) T);
impl<T> syn::parse::Parse for Parenthesized<T>
diff --git a/crates/salsa/salsa-macros/src/query_group.rs b/crates/salsa/salsa-macros/src/query_group.rs
index 5983765eec..659797d6d4 100644
--- a/crates/salsa/salsa-macros/src/query_group.rs
+++ b/crates/salsa/salsa-macros/src/query_group.rs
@@ -1,4 +1,4 @@
-//!
+//! Implementation for `[salsa::query_group]` decorator.
use crate::parenthesized::Parenthesized;
use heck::ToUpperCamelCase;
diff --git a/crates/salsa/src/derived.rs b/crates/salsa/src/derived.rs
index 3b5bd7f9e3..fd31ab2041 100644
--- a/crates/salsa/src/derived.rs
+++ b/crates/salsa/src/derived.rs
@@ -1,4 +1,3 @@
-//!
use crate::debug::TableEntry;
use crate::durability::Durability;
use crate::hash::FxIndexMap;
diff --git a/crates/salsa/src/derived/slot.rs b/crates/salsa/src/derived/slot.rs
index 75204c8ff6..cfafa40ce3 100644
--- a/crates/salsa/src/derived/slot.rs
+++ b/crates/salsa/src/derived/slot.rs
@@ -1,4 +1,3 @@
-//!
use crate::debug::TableEntry;
use crate::derived::MemoizationPolicy;
use crate::durability::Durability;
diff --git a/crates/salsa/src/durability.rs b/crates/salsa/src/durability.rs
index 44abae3170..7b8e6840fc 100644
--- a/crates/salsa/src/durability.rs
+++ b/crates/salsa/src/durability.rs
@@ -1,4 +1,3 @@
-//!
/// Describes how likely a value is to change -- how "durable" it is.
/// By default, inputs have `Durability::LOW` and interned values have
/// `Durability::HIGH`. But inputs can be explicitly set with other
diff --git a/crates/salsa/src/hash.rs b/crates/salsa/src/hash.rs
index 47a2dd1ce0..3b2d7df3fb 100644
--- a/crates/salsa/src/hash.rs
+++ b/crates/salsa/src/hash.rs
@@ -1,4 +1,3 @@
-//!
pub(crate) type FxHasher = std::hash::BuildHasherDefault<rustc_hash::FxHasher>;
pub(crate) type FxIndexSet<K> = indexmap::IndexSet<K, FxHasher>;
pub(crate) type FxIndexMap<K, V> = indexmap::IndexMap<K, V, FxHasher>;
diff --git a/crates/salsa/src/input.rs b/crates/salsa/src/input.rs
index 922ec5a775..f04f48e3ba 100644
--- a/crates/salsa/src/input.rs
+++ b/crates/salsa/src/input.rs
@@ -1,4 +1,3 @@
-//!
use crate::debug::TableEntry;
use crate::durability::Durability;
use crate::hash::FxIndexMap;
diff --git a/crates/salsa/src/intern_id.rs b/crates/salsa/src/intern_id.rs
index a7bbc088f9..b060d8aab6 100644
--- a/crates/salsa/src/intern_id.rs
+++ b/crates/salsa/src/intern_id.rs
@@ -1,4 +1,3 @@
-//!
use std::fmt;
use std::num::NonZeroU32;
diff --git a/crates/salsa/src/interned.rs b/crates/salsa/src/interned.rs
index c065e7e2bd..bfa9cc0591 100644
--- a/crates/salsa/src/interned.rs
+++ b/crates/salsa/src/interned.rs
@@ -1,4 +1,3 @@
-//!
use crate::debug::TableEntry;
use crate::durability::Durability;
use crate::intern_id::InternId;
diff --git a/crates/salsa/src/lib.rs b/crates/salsa/src/lib.rs
index fe80759887..f86683ee77 100644
--- a/crates/salsa/src/lib.rs
+++ b/crates/salsa/src/lib.rs
@@ -1,8 +1,7 @@
-//!
#![allow(clippy::type_complexity)]
#![allow(clippy::question_mark)]
+#![allow(missing_docs)]
#![warn(rust_2018_idioms)]
-#![warn(missing_docs)]
//! The salsa crate is a crate for incremental recomputation. It
//! permits you to define a "database" of queries with both inputs and
@@ -124,9 +123,9 @@ pub struct Event {
impl Event {
/// Returns a type that gives a user-readable debug output.
/// Use like `println!("{:?}", index.debug(db))`.
- pub fn debug<'me, D: ?Sized>(&'me self, db: &'me D) -> impl std::fmt::Debug + 'me
+ pub fn debug<'me, D>(&'me self, db: &'me D) -> impl std::fmt::Debug + 'me
where
- D: plumbing::DatabaseOps,
+ D: ?Sized + plumbing::DatabaseOps,
{
EventDebug { event: self, db }
}
@@ -206,9 +205,9 @@ pub enum EventKind {
impl EventKind {
/// Returns a type that gives a user-readable debug output.
/// Use like `println!("{:?}", index.debug(db))`.
- pub fn debug<'me, D: ?Sized>(&'me self, db: &'me D) -> impl std::fmt::Debug + 'me
+ pub fn debug<'me, D>(&'me self, db: &'me D) -> impl std::fmt::Debug + 'me
where
- D: plumbing::DatabaseOps,
+ D: ?Sized + plumbing::DatabaseOps,
{
EventKindDebug { kind: self, db }
}
@@ -400,9 +399,9 @@ impl DatabaseKeyIndex {
/// Returns a type that gives a user-readable debug output.
/// Use like `println!("{:?}", index.debug(db))`.
- pub fn debug<D: ?Sized>(self, db: &D) -> impl std::fmt::Debug + '_
+ pub fn debug<D>(self, db: &D) -> impl std::fmt::Debug + '_
where
- D: plumbing::DatabaseOps,
+ D: ?Sized + plumbing::DatabaseOps,
{
DatabaseKeyIndexDebug { index: self, db }
}
diff --git a/crates/salsa/src/lru.rs b/crates/salsa/src/lru.rs
index 1ff85a3ea4..edad551842 100644
--- a/crates/salsa/src/lru.rs
+++ b/crates/salsa/src/lru.rs
@@ -1,4 +1,3 @@
-//!
use oorandom::Rand64;
use parking_lot::Mutex;
use std::fmt::Debug;
diff --git a/crates/salsa/src/plumbing.rs b/crates/salsa/src/plumbing.rs
index 1a8ff33b2e..1dfde63986 100644
--- a/crates/salsa/src/plumbing.rs
+++ b/crates/salsa/src/plumbing.rs
@@ -1,4 +1,3 @@
-//!
#![allow(missing_docs)]
use crate::debug::TableEntry;
diff --git a/crates/salsa/src/revision.rs b/crates/salsa/src/revision.rs
index 559b033860..204c0883b8 100644
--- a/crates/salsa/src/revision.rs
+++ b/crates/salsa/src/revision.rs
@@ -1,4 +1,3 @@
-//!
use std::num::NonZeroU32;
use std::sync::atomic::{AtomicU32, Ordering};
diff --git a/crates/salsa/src/runtime.rs b/crates/salsa/src/runtime.rs
index e11cabfe11..4f3341f515 100644
--- a/crates/salsa/src/runtime.rs
+++ b/crates/salsa/src/runtime.rs
@@ -1,4 +1,3 @@
-//!
use crate::durability::Durability;
use crate::hash::FxIndexSet;
use crate::plumbing::CycleRecoveryStrategy;
@@ -605,7 +604,7 @@ impl ActiveQuery {
pub(crate) fn take_inputs_from(&mut self, cycle_query: &ActiveQuery) {
self.changed_at = cycle_query.changed_at;
self.durability = cycle_query.durability;
- self.dependencies = cycle_query.dependencies.clone();
+ self.dependencies.clone_from(&cycle_query.dependencies);
}
}
diff --git a/crates/salsa/src/runtime/dependency_graph.rs b/crates/salsa/src/runtime/dependency_graph.rs
index dd223eeeba..ed1d499f63 100644
--- a/crates/salsa/src/runtime/dependency_graph.rs
+++ b/crates/salsa/src/runtime/dependency_graph.rs
@@ -1,4 +1,3 @@
-//!
use triomphe::Arc;
use crate::{DatabaseKeyIndex, RuntimeId};
diff --git a/crates/salsa/src/runtime/local_state.rs b/crates/salsa/src/runtime/local_state.rs
index 7ac21dec1a..0dbea1d563 100644
--- a/crates/salsa/src/runtime/local_state.rs
+++ b/crates/salsa/src/runtime/local_state.rs
@@ -1,4 +1,3 @@
-//!
use tracing::debug;
use triomphe::ThinArc;
diff --git a/crates/salsa/src/storage.rs b/crates/salsa/src/storage.rs
index c0e6416f4a..e0acf44041 100644
--- a/crates/salsa/src/storage.rs
+++ b/crates/salsa/src/storage.rs
@@ -1,4 +1,3 @@
-//!
use crate::{plumbing::DatabaseStorageTypes, Runtime};
use triomphe::Arc;
diff --git a/crates/sourcegen/src/lib.rs b/crates/sourcegen/src/lib.rs
index 295b716b4e..829b4d5b0f 100644
--- a/crates/sourcegen/src/lib.rs
+++ b/crates/sourcegen/src/lib.rs
@@ -69,7 +69,7 @@ impl CommentBlock {
panic!("Use plain (non-doc) comments with tags like {tag}:\n {first}");
}
- block.id = id.trim().to_owned();
+ id.trim().clone_into(&mut block.id);
true
});
blocks
diff --git a/crates/span/Cargo.toml b/crates/span/Cargo.toml
index cbda91f0a5..9f85f0107c 100644
--- a/crates/span/Cargo.toml
+++ b/crates/span/Cargo.toml
@@ -14,6 +14,7 @@ la-arena.workspace = true
salsa.workspace = true
rustc-hash.workspace = true
hashbrown.workspace = true
+text-size.workspace = true
# local deps
vfs.workspace = true
diff --git a/crates/span/src/lib.rs b/crates/span/src/lib.rs
index c9109c72d0..8ca7bc2d38 100644
--- a/crates/span/src/lib.rs
+++ b/crates/span/src/lib.rs
@@ -13,59 +13,10 @@ pub use self::{
map::{RealSpanMap, SpanMap},
};
-pub use syntax::{TextRange, TextSize};
+pub use syntax::Edition;
+pub use text_size::{TextRange, TextSize};
pub use vfs::FileId;
-#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
-pub enum Edition {
- Edition2015,
- Edition2018,
- Edition2021,
- Edition2024,
-}
-
-impl Edition {
- pub const CURRENT: Edition = Edition::Edition2021;
- pub const DEFAULT: Edition = Edition::Edition2015;
-}
-
-#[derive(Debug)]
-pub struct ParseEditionError {
- invalid_input: String,
-}
-
-impl std::error::Error for ParseEditionError {}
-impl fmt::Display for ParseEditionError {
- fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
- write!(f, "invalid edition: {:?}", self.invalid_input)
- }
-}
-
-impl std::str::FromStr for Edition {
- type Err = ParseEditionError;
-
- fn from_str(s: &str) -> Result<Self, Self::Err> {
- let res = match s {
- "2015" => Edition::Edition2015,
- "2018" => Edition::Edition2018,
- "2021" => Edition::Edition2021,
- "2024" => Edition::Edition2024,
- _ => return Err(ParseEditionError { invalid_input: s.to_owned() }),
- };
- Ok(res)
- }
-}
-
-impl fmt::Display for Edition {
- fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
- f.write_str(match self {
- Edition::Edition2015 => "2015",
- Edition::Edition2018 => "2018",
- Edition::Edition2021 => "2021",
- Edition::Edition2024 => "2024",
- })
- }
-}
#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
pub struct FilePosition {
pub file_id: FileId,
diff --git a/crates/span/src/map.rs b/crates/span/src/map.rs
index 1f396a1e97..81fc56c961 100644
--- a/crates/span/src/map.rs
+++ b/crates/span/src/map.rs
@@ -4,17 +4,20 @@
use std::{fmt, hash::Hash};
use stdx::{always, itertools::Itertools};
-use syntax::{TextRange, TextSize};
use vfs::FileId;
use crate::{
- ErasedFileAstId, Span, SpanAnchor, SpanData, SyntaxContextId, ROOT_ERASED_FILE_AST_ID,
+ ErasedFileAstId, Span, SpanAnchor, SpanData, SyntaxContextId, TextRange, TextSize,
+ ROOT_ERASED_FILE_AST_ID,
};
/// Maps absolute text ranges for the corresponding file to the relevant span data.
#[derive(Debug, PartialEq, Eq, Clone, Hash)]
pub struct SpanMap<S> {
spans: Vec<(TextSize, SpanData<S>)>,
+ /// Index of the matched macro arm on successful expansion for declarative macros.
+ // FIXME: Does it make sense to have this here?
+ pub matched_arm: Option<u32>,
}
impl<S> SpanMap<S>
@@ -23,7 +26,7 @@ where
{
/// Creates a new empty [`SpanMap`].
pub fn empty() -> Self {
- Self { spans: Vec::new() }
+ Self { spans: Vec::new(), matched_arm: None }
}
/// Finalizes the [`SpanMap`], shrinking its backing storage and validating that the offsets are
diff --git a/crates/syntax/src/algo.rs b/crates/syntax/src/algo.rs
index 01f2af419e..0e62de5feb 100644
--- a/crates/syntax/src/algo.rs
+++ b/crates/syntax/src/algo.rs
@@ -255,7 +255,7 @@ pub fn diff(from: &SyntaxNode, to: &SyntaxNode) -> TreeDiff {
mod tests {
use expect_test::{expect, Expect};
use itertools::Itertools;
- use parser::SyntaxKind;
+ use parser::{Edition, SyntaxKind};
use text_edit::TextEdit;
use crate::{AstNode, SyntaxElement};
@@ -607,8 +607,8 @@ fn main() {
}
fn check_diff(from: &str, to: &str, expected_diff: Expect) {
- let from_node = crate::SourceFile::parse(from).tree().syntax().clone();
- let to_node = crate::SourceFile::parse(to).tree().syntax().clone();
+ let from_node = crate::SourceFile::parse(from, Edition::CURRENT).tree().syntax().clone();
+ let to_node = crate::SourceFile::parse(to, Edition::CURRENT).tree().syntax().clone();
let diff = super::diff(&from_node, &to_node);
let line_number =
diff --git a/crates/syntax/src/ast.rs b/crates/syntax/src/ast.rs
index e9ab7a4320..168ca9f132 100644
--- a/crates/syntax/src/ast.rs
+++ b/crates/syntax/src/ast.rs
@@ -174,6 +174,7 @@ fn test_doc_comment_none() {
// non-doc
mod foo {}
"#,
+ parser::Edition::CURRENT,
)
.ok()
.unwrap();
@@ -189,6 +190,7 @@ fn test_outer_doc_comment_of_items() {
// non-doc
mod foo {}
"#,
+ parser::Edition::CURRENT,
)
.ok()
.unwrap();
@@ -204,6 +206,7 @@ fn test_inner_doc_comment_of_items() {
// non-doc
mod foo {}
"#,
+ parser::Edition::CURRENT,
)
.ok()
.unwrap();
@@ -218,6 +221,7 @@ fn test_doc_comment_of_statics() {
/// Number of levels
static LEVELS: i32 = 0;
"#,
+ parser::Edition::CURRENT,
)
.ok()
.unwrap();
@@ -237,6 +241,7 @@ fn test_doc_comment_preserves_indents() {
/// ```
mod foo {}
"#,
+ parser::Edition::CURRENT,
)
.ok()
.unwrap();
@@ -257,6 +262,7 @@ fn test_doc_comment_preserves_newlines() {
/// foo
mod foo {}
"#,
+ parser::Edition::CURRENT,
)
.ok()
.unwrap();
@@ -271,6 +277,7 @@ fn test_doc_comment_single_line_block_strips_suffix() {
/** this is mod foo*/
mod foo {}
"#,
+ parser::Edition::CURRENT,
)
.ok()
.unwrap();
@@ -285,6 +292,7 @@ fn test_doc_comment_single_line_block_strips_suffix_whitespace() {
/** this is mod foo */
mod foo {}
"#,
+ parser::Edition::CURRENT,
)
.ok()
.unwrap();
@@ -303,6 +311,7 @@ fn test_doc_comment_multi_line_block_strips_suffix() {
*/
mod foo {}
"#,
+ parser::Edition::CURRENT,
)
.ok()
.unwrap();
@@ -316,7 +325,7 @@ fn test_doc_comment_multi_line_block_strips_suffix() {
#[test]
fn test_comments_preserve_trailing_whitespace() {
let file = SourceFile::parse(
- "\n/// Representation of a Realm. \n/// In the specification these are called Realm Records.\nstruct Realm {}",
+ "\n/// Representation of a Realm. \n/// In the specification these are called Realm Records.\nstruct Realm {}", parser::Edition::CURRENT,
)
.ok()
.unwrap();
@@ -335,6 +344,7 @@ fn test_four_slash_line_comment() {
/// doc comment
mod foo {}
"#,
+ parser::Edition::CURRENT,
)
.ok()
.unwrap();
@@ -360,6 +370,7 @@ where
for<'a> F: Fn(&'a str)
{}
"#,
+ parser::Edition::CURRENT,
)
.ok()
.unwrap();
diff --git a/crates/syntax/src/ast/edit_in_place.rs b/crates/syntax/src/ast/edit_in_place.rs
index 41d33c457c..2445e4f1a3 100644
--- a/crates/syntax/src/ast/edit_in_place.rs
+++ b/crates/syntax/src/ast/edit_in_place.rs
@@ -1054,6 +1054,7 @@ impl<N: AstNode + Clone> Indent for N {}
mod tests {
use std::fmt;
+ use parser::Edition;
use stdx::trim_indent;
use test_utils::assert_eq_text;
@@ -1062,7 +1063,7 @@ mod tests {
use super::*;
fn ast_mut_from_text<N: AstNode>(text: &str) -> N {
- let parse = SourceFile::parse(text);
+ let parse = SourceFile::parse(text, Edition::CURRENT);
parse.tree().syntax().descendants().find_map(N::cast).unwrap().clone_for_update()
}
diff --git a/crates/syntax/src/ast/expr_ext.rs b/crates/syntax/src/ast/expr_ext.rs
index 18a56e2823..28a9dadace 100644
--- a/crates/syntax/src/ast/expr_ext.rs
+++ b/crates/syntax/src/ast/expr_ext.rs
@@ -89,6 +89,7 @@ fn if_block_condition() {
else { "else" }
}
"#,
+ parser::Edition::CURRENT,
);
let if_ = parse.tree().syntax().descendants().find_map(ast::IfExpr::cast).unwrap();
assert_eq!(if_.then_branch().unwrap().syntax().text(), r#"{ "if" }"#);
@@ -123,6 +124,7 @@ fn if_condition_with_if_inside() {
else { "else" }
}
"#,
+ parser::Edition::CURRENT,
);
let if_ = parse.tree().syntax().descendants().find_map(ast::IfExpr::cast).unwrap();
assert_eq!(if_.then_branch().unwrap().syntax().text(), r#"{ "if" }"#);
@@ -386,7 +388,8 @@ impl ast::BlockExpr {
#[test]
fn test_literal_with_attr() {
- let parse = ast::SourceFile::parse(r#"const _: &str = { #[attr] "Hello" };"#);
+ let parse =
+ ast::SourceFile::parse(r#"const _: &str = { #[attr] "Hello" };"#, parser::Edition::CURRENT);
let lit = parse.tree().syntax().descendants().find_map(ast::Literal::cast).unwrap();
assert_eq!(lit.token().text(), r#""Hello""#);
}
diff --git a/crates/syntax/src/ast/make.rs b/crates/syntax/src/ast/make.rs
index ff18fee9ba..186f1b01da 100644
--- a/crates/syntax/src/ast/make.rs
+++ b/crates/syntax/src/ast/make.rs
@@ -11,7 +11,7 @@
//! term, it will be replaced with direct tree manipulation.
use itertools::Itertools;
-use parser::T;
+use parser::{Edition, T};
use rowan::NodeOrToken;
use stdx::{format_to, format_to_acc, never};
@@ -1127,7 +1127,7 @@ pub fn token_tree(
#[track_caller]
fn ast_from_text<N: AstNode>(text: &str) -> N {
- let parse = SourceFile::parse(text);
+ let parse = SourceFile::parse(text, Edition::CURRENT);
let node = match parse.tree().syntax().descendants().find_map(N::cast) {
Some(it) => it,
None => {
@@ -1153,12 +1153,13 @@ pub fn token(kind: SyntaxKind) -> SyntaxToken {
pub mod tokens {
use once_cell::sync::Lazy;
+ use parser::Edition;
use crate::{ast, AstNode, Parse, SourceFile, SyntaxKind::*, SyntaxToken};
pub(super) static SOURCE_FILE: Lazy<Parse<SourceFile>> = Lazy::new(|| {
SourceFile::parse(
- "const C: <()>::Item = ( true && true , true || true , 1 != 1, 2 == 2, 3 < 3, 4 <= 4, 5 > 5, 6 >= 6, !true, *p, &p , &mut p, { let a @ [] })\n;\n\nimpl A for B where: {}",
+ "const C: <()>::Item = ( true && true , true || true , 1 != 1, 2 == 2, 3 < 3, 4 <= 4, 5 > 5, 6 >= 6, !true, *p, &p , &mut p, { let a @ [] })\n;\n\nimpl A for B where: {}", Edition::CURRENT,
)
});
@@ -1186,13 +1187,13 @@ pub mod tokens {
pub fn whitespace(text: &str) -> SyntaxToken {
assert!(text.trim().is_empty());
- let sf = SourceFile::parse(text).ok().unwrap();
+ let sf = SourceFile::parse(text, Edition::CURRENT).ok().unwrap();
sf.syntax().clone_for_update().first_child_or_token().unwrap().into_token().unwrap()
}
pub fn doc_comment(text: &str) -> SyntaxToken {
assert!(!text.trim().is_empty());
- let sf = SourceFile::parse(text).ok().unwrap();
+ let sf = SourceFile::parse(text, Edition::CURRENT).ok().unwrap();
sf.syntax().first_child_or_token().unwrap().into_token().unwrap()
}
@@ -1240,7 +1241,7 @@ pub mod tokens {
impl WsBuilder {
pub fn new(text: &str) -> WsBuilder {
- WsBuilder(SourceFile::parse(text).ok().unwrap())
+ WsBuilder(SourceFile::parse(text, Edition::CURRENT).ok().unwrap())
}
pub fn ws(&self) -> SyntaxToken {
self.0.syntax().first_child_or_token().unwrap().into_token().unwrap()
diff --git a/crates/syntax/src/fuzz.rs b/crates/syntax/src/fuzz.rs
index 2873867179..682dcd7cc4 100644
--- a/crates/syntax/src/fuzz.rs
+++ b/crates/syntax/src/fuzz.rs
@@ -4,6 +4,7 @@
use std::str::{self, FromStr};
+use parser::Edition;
use text_edit::Indel;
use crate::{validation, AstNode, SourceFile, TextRange};
@@ -14,7 +15,7 @@ fn check_file_invariants(file: &SourceFile) {
}
pub fn check_parser(text: &str) {
- let file = SourceFile::parse(text);
+ let file = SourceFile::parse(text, Edition::CURRENT);
check_file_invariants(&file.tree());
}
@@ -48,11 +49,11 @@ impl CheckReparse {
#[allow(clippy::print_stderr)]
pub fn run(&self) {
- let parse = SourceFile::parse(&self.text);
- let new_parse = parse.reparse(&self.edit);
+ let parse = SourceFile::parse(&self.text, Edition::CURRENT);
+ let new_parse = parse.reparse(&self.edit, Edition::CURRENT);
check_file_invariants(&new_parse.tree());
assert_eq!(&new_parse.tree().syntax().text().to_string(), &self.edited_text);
- let full_reparse = SourceFile::parse(&self.edited_text);
+ let full_reparse = SourceFile::parse(&self.edited_text, Edition::CURRENT);
for (a, b) in
new_parse.tree().syntax().descendants().zip(full_reparse.tree().syntax().descendants())
{
diff --git a/crates/syntax/src/hacks.rs b/crates/syntax/src/hacks.rs
index a3023c3195..36615d11d8 100644
--- a/crates/syntax/src/hacks.rs
+++ b/crates/syntax/src/hacks.rs
@@ -2,11 +2,13 @@
//!
//! Please avoid adding new usages of the functions in this module
+use parser::Edition;
+
use crate::{ast, AstNode};
pub fn parse_expr_from_str(s: &str) -> Option<ast::Expr> {
let s = s.trim();
- let file = ast::SourceFile::parse(&format!("const _: () = {s};"));
+ let file = ast::SourceFile::parse(&format!("const _: () = {s};"), Edition::CURRENT);
let expr = file.syntax_node().descendants().find_map(ast::Expr::cast)?;
if expr.syntax().text() != s {
return None;
diff --git a/crates/syntax/src/lib.rs b/crates/syntax/src/lib.rs
index 1bb82cc191..3a9ebafe87 100644
--- a/crates/syntax/src/lib.rs
+++ b/crates/syntax/src/lib.rs
@@ -60,11 +60,12 @@ pub use crate::{
},
token_text::TokenText,
};
-pub use parser::{SyntaxKind, T};
+pub use parser::{Edition, SyntaxKind, T};
pub use rowan::{
api::Preorder, Direction, GreenNode, NodeOrToken, SyntaxText, TextRange, TextSize,
TokenAtOffset, WalkEvent,
};
+pub use rustc_lexer::unescape;
pub use smol_str::{format_smolstr, SmolStr};
/// `Parse` is the result of the parsing: a syntax tree and a collection of
@@ -141,8 +142,8 @@ impl Parse<SourceFile> {
buf
}
- pub fn reparse(&self, indel: &Indel) -> Parse<SourceFile> {
- self.incremental_reparse(indel).unwrap_or_else(|| self.full_reparse(indel))
+ pub fn reparse(&self, indel: &Indel, edition: Edition) -> Parse<SourceFile> {
+ self.incremental_reparse(indel).unwrap_or_else(|| self.full_reparse(indel, edition))
}
fn incremental_reparse(&self, indel: &Indel) -> Option<Parse<SourceFile>> {
@@ -159,10 +160,10 @@ impl Parse<SourceFile> {
})
}
- fn full_reparse(&self, indel: &Indel) -> Parse<SourceFile> {
+ fn full_reparse(&self, indel: &Indel, edition: Edition) -> Parse<SourceFile> {
let mut text = self.tree().syntax().text().to_string();
indel.apply(&mut text);
- SourceFile::parse(&text)
+ SourceFile::parse(&text, edition)
}
}
@@ -170,9 +171,9 @@ impl Parse<SourceFile> {
pub use crate::ast::SourceFile;
impl SourceFile {
- pub fn parse(text: &str) -> Parse<SourceFile> {
+ pub fn parse(text: &str, edition: Edition) -> Parse<SourceFile> {
let _p = tracing::span!(tracing::Level::INFO, "SourceFile::parse").entered();
- let (green, errors) = parsing::parse_text(text);
+ let (green, errors) = parsing::parse_text(text, edition);
let root = SyntaxNode::new_root(green.clone());
assert_eq!(root.kind(), SyntaxKind::SOURCE_FILE);
@@ -185,7 +186,10 @@ impl SourceFile {
}
impl ast::TokenTree {
- pub fn reparse_as_comma_separated_expr(self) -> Parse<ast::MacroEagerInput> {
+ pub fn reparse_as_comma_separated_expr(
+ self,
+ edition: parser::Edition,
+ ) -> Parse<ast::MacroEagerInput> {
let tokens = self.syntax().descendants_with_tokens().filter_map(NodeOrToken::into_token);
let mut parser_input = parser::Input::default();
@@ -219,7 +223,7 @@ impl ast::TokenTree {
}
}
- let parser_output = parser::TopEntryPoint::MacroEagerInput.parse(&parser_input);
+ let parser_output = parser::TopEntryPoint::MacroEagerInput.parse(&parser_input, edition);
let mut tokens =
self.syntax().descendants_with_tokens().filter_map(NodeOrToken::into_token);
@@ -337,7 +341,7 @@ fn api_walkthrough() {
//
// The `parse` method returns a `Parse` -- a pair of syntax tree and a list
// of errors. That is, syntax tree is constructed even in presence of errors.
- let parse = SourceFile::parse(source_code);
+ let parse = SourceFile::parse(source_code, parser::Edition::CURRENT);
assert!(parse.errors().is_empty());
// The `tree` method returns an owned syntax node of type `SourceFile`.
diff --git a/crates/syntax/src/parsing.rs b/crates/syntax/src/parsing.rs
index d750476f63..420f4938e5 100644
--- a/crates/syntax/src/parsing.rs
+++ b/crates/syntax/src/parsing.rs
@@ -9,11 +9,11 @@ use crate::{syntax_node::GreenNode, SyntaxError, SyntaxTreeBuilder};
pub(crate) use crate::parsing::reparsing::incremental_reparse;
-pub(crate) fn parse_text(text: &str) -> (GreenNode, Vec<SyntaxError>) {
+pub(crate) fn parse_text(text: &str, edition: parser::Edition) -> (GreenNode, Vec<SyntaxError>) {
let _p = tracing::span!(tracing::Level::INFO, "parse_text").entered();
let lexed = parser::LexedStr::new(text);
let parser_input = lexed.to_input();
- let parser_output = parser::TopEntryPoint::SourceFile.parse(&parser_input);
+ let parser_output = parser::TopEntryPoint::SourceFile.parse(&parser_input, edition);
let (node, errors, _eof) = build_tree(lexed, parser_output);
(node, errors)
}
diff --git a/crates/syntax/src/parsing/reparsing.rs b/crates/syntax/src/parsing/reparsing.rs
index 14715b5725..354b89fd49 100644
--- a/crates/syntax/src/parsing/reparsing.rs
+++ b/crates/syntax/src/parsing/reparsing.rs
@@ -26,7 +26,9 @@ pub(crate) fn incremental_reparse(
return Some((green, merge_errors(errors, new_errors, old_range, edit), old_range));
}
- if let Some((green, new_errors, old_range)) = reparse_block(node, edit) {
+ if let Some((green, new_errors, old_range)) =
+ reparse_block(node, edit, parser::Edition::CURRENT)
+ {
return Some((green, merge_errors(errors, new_errors, old_range, edit), old_range));
}
None
@@ -84,6 +86,7 @@ fn reparse_token(
fn reparse_block(
root: &SyntaxNode,
edit: &Indel,
+ edition: parser::Edition,
) -> Option<(GreenNode, Vec<SyntaxError>, TextRange)> {
let (node, reparser) = find_reparsable_node(root, edit.delete)?;
let text = get_text_after_edit(node.clone().into(), edit);
@@ -94,7 +97,7 @@ fn reparse_block(
return None;
}
- let tree_traversal = reparser.parse(&parser_input);
+ let tree_traversal = reparser.parse(&parser_input, edition);
let (green, new_parser_errors, _eof) = build_tree(lexed, tree_traversal);
@@ -174,6 +177,7 @@ fn merge_errors(
#[cfg(test)]
mod tests {
+ use parser::Edition;
use test_utils::{assert_eq_text, extract_range};
use super::*;
@@ -188,9 +192,9 @@ mod tests {
after
};
- let fully_reparsed = SourceFile::parse(&after);
+ let fully_reparsed = SourceFile::parse(&after, Edition::CURRENT);
let incrementally_reparsed: Parse<SourceFile> = {
- let before = SourceFile::parse(&before);
+ let before = SourceFile::parse(&before, Edition::CURRENT);
let (green, new_errors, range) = incremental_reparse(
before.tree().syntax(),
&edit,
diff --git a/crates/syntax/src/ptr.rs b/crates/syntax/src/ptr.rs
index fb8aee9c3b..ed4894f9b9 100644
--- a/crates/syntax/src/ptr.rs
+++ b/crates/syntax/src/ptr.rs
@@ -120,7 +120,7 @@ impl<N: AstNode> From<AstPtr<N>> for SyntaxNodePtr {
fn test_local_syntax_ptr() {
use crate::{ast, AstNode, SourceFile};
- let file = SourceFile::parse("struct Foo { f: u32, }").ok().unwrap();
+ let file = SourceFile::parse("struct Foo { f: u32, }", parser::Edition::CURRENT).ok().unwrap();
let field = file.syntax().descendants().find_map(ast::RecordField::cast).unwrap();
let ptr = SyntaxNodePtr::new(field.syntax());
let field_syntax = ptr.to_node(file.syntax());
diff --git a/crates/syntax/src/tests.rs b/crates/syntax/src/tests.rs
index 439daa358a..f0d58efc01 100644
--- a/crates/syntax/src/tests.rs
+++ b/crates/syntax/src/tests.rs
@@ -5,6 +5,7 @@ use std::{
use ast::HasName;
use expect_test::expect_file;
+use parser::Edition;
use rayon::prelude::*;
use stdx::format_to_acc;
use test_utils::{bench, bench_fixture, project_root};
@@ -19,7 +20,7 @@ fn main() {
}
"#;
- let parse = SourceFile::parse(code);
+ let parse = SourceFile::parse(code, Edition::CURRENT);
// eprintln!("{:#?}", parse.syntax_node());
assert!(parse.ok().is_ok());
}
@@ -33,7 +34,7 @@ fn benchmark_parser() {
let data = bench_fixture::glorious_old_parser();
let tree = {
let _b = bench("parsing");
- let p = SourceFile::parse(&data);
+ let p = SourceFile::parse(&data, Edition::CURRENT);
assert!(p.errors().is_empty());
assert_eq!(p.tree().syntax.text_range().len(), 352474.into());
p.tree()
@@ -50,7 +51,7 @@ fn benchmark_parser() {
#[test]
fn validation_tests() {
dir_tests(&test_data_dir(), &["parser/validation"], "rast", |text, path| {
- let parse = SourceFile::parse(text);
+ let parse = SourceFile::parse(text, Edition::CURRENT);
let errors = parse.errors();
assert_errors_are_present(&errors, path);
parse.debug_dump()
@@ -110,7 +111,7 @@ fn self_hosting_parsing() {
.into_par_iter()
.filter_map(|file| {
let text = read_text(&file);
- match SourceFile::parse(&text).ok() {
+ match SourceFile::parse(&text, Edition::CURRENT).ok() {
Ok(_) => None,
Err(err) => Some((file, err)),
}
diff --git a/crates/test-fixture/src/lib.rs b/crates/test-fixture/src/lib.rs
index c8d785f83e..89ed6a6157 100644
--- a/crates/test-fixture/src/lib.rs
+++ b/crates/test-fixture/src/lib.rs
@@ -189,8 +189,8 @@ impl ChangeFixture {
meta.edition,
Some(crate_name.clone().into()),
version,
- meta.cfg.clone(),
- Some(meta.cfg),
+ From::from(meta.cfg.clone()),
+ Some(From::from(meta.cfg)),
meta.env,
false,
origin,
@@ -209,7 +209,7 @@ impl ChangeFixture {
assert!(default_crate_root.is_none());
default_crate_root = Some(file_id);
default_cfg.extend(meta.cfg.into_iter());
- default_env.extend(meta.env.iter().map(|(x, y)| (x.to_owned(), y.to_owned())));
+ default_env.extend_from_other(&meta.env);
}
source_change.change_file(file_id, Some(text));
@@ -227,8 +227,8 @@ impl ChangeFixture {
Edition::CURRENT,
Some(CrateName::new("test").unwrap().into()),
None,
- default_cfg.clone(),
- Some(default_cfg),
+ From::from(default_cfg.clone()),
+ Some(From::from(default_cfg)),
default_env,
false,
CrateOrigin::Local { repo: None, name: None },
@@ -260,7 +260,7 @@ impl ChangeFixture {
let core_crate = crate_graph.add_crate_root(
core_file,
- Edition::Edition2021,
+ Edition::CURRENT,
Some(CrateDisplayName::from_canonical_name("core".to_owned())),
None,
Default::default(),
@@ -299,7 +299,7 @@ impl ChangeFixture {
let proc_macros_crate = crate_graph.add_crate_root(
proc_lib_file,
- Edition::Edition2021,
+ Edition::CURRENT,
Some(CrateDisplayName::from_canonical_name("proc_macros".to_owned())),
None,
Default::default(),
diff --git a/crates/test-utils/src/fixture.rs b/crates/test-utils/src/fixture.rs
index 7e34c36189..aafe4fb5b1 100644
--- a/crates/test-utils/src/fixture.rs
+++ b/crates/test-utils/src/fixture.rs
@@ -186,7 +186,7 @@ impl FixtureWithProjectMeta {
if let Some(meta) = fixture.strip_prefix("//- target_data_layout:") {
let (meta, remain) = meta.split_once('\n').unwrap();
- target_data_layout = meta.trim().to_owned();
+ meta.trim().clone_into(&mut target_data_layout);
fixture = remain;
}
diff --git a/crates/vfs-notify/src/lib.rs b/crates/vfs-notify/src/lib.rs
index 4cfdec2b5c..45bb777d4d 100644
--- a/crates/vfs-notify/src/lib.rs
+++ b/crates/vfs-notify/src/lib.rs
@@ -9,7 +9,10 @@
#![warn(rust_2018_idioms, unused_lifetimes)]
-use std::fs;
+use std::{
+ fs,
+ path::{Component, Path},
+};
use crossbeam_channel::{never, select, unbounded, Receiver, Sender};
use notify::{Config, RecommendedWatcher, RecursiveMode, Watcher};
@@ -206,6 +209,11 @@ impl NotifyActor {
return true;
}
let path = entry.path();
+
+ if path_is_parent_symlink(path) {
+ return false;
+ }
+
root == path
|| dirs.exclude.iter().chain(&dirs.include).all(|it| it != path)
});
@@ -258,3 +266,21 @@ fn read(path: &AbsPath) -> Option<Vec<u8>> {
fn log_notify_error<T>(res: notify::Result<T>) -> Option<T> {
res.map_err(|err| tracing::warn!("notify error: {}", err)).ok()
}
+
+/// Is `path` a symlink to a parent directory?
+///
+/// Including this path is guaranteed to cause an infinite loop. This
+/// heuristic is not sufficient to catch all symlink cycles (it's
+/// possible to construct cycle using two or more symlinks), but it
+/// catches common cases.
+fn path_is_parent_symlink(path: &Path) -> bool {
+ let Ok(destination) = std::fs::read_link(path) else {
+ return false;
+ };
+
+ // If the symlink is of the form "../..", it's a parent symlink.
+ let is_relative_parent =
+ destination.components().all(|c| matches!(c, Component::CurDir | Component::ParentDir));
+
+ is_relative_parent || path.starts_with(destination)
+}
diff --git a/crates/vfs/src/file_set.rs b/crates/vfs/src/file_set.rs
index 7eeb10d544..d7d283c3eb 100644
--- a/crates/vfs/src/file_set.rs
+++ b/crates/vfs/src/file_set.rs
@@ -132,6 +132,10 @@ impl FileSetConfig {
///
/// `scratch_space` is used as a buffer and will be entirely replaced.
fn classify(&self, path: &VfsPath, scratch_space: &mut Vec<u8>) -> usize {
+ // `path` is a file, but r-a only cares about the containing directory. We don't
+ // want `/foo/bar_baz.rs` to be attributed to source root directory `/foo/bar`.
+ let path = path.parent().unwrap_or_else(|| path.clone());
+
scratch_space.clear();
path.encode(scratch_space);
let automaton = PrefixOf::new(scratch_space.as_slice());
diff --git a/crates/vfs/src/file_set/tests.rs b/crates/vfs/src/file_set/tests.rs
index 2146df185d..3cdb60dcb2 100644
--- a/crates/vfs/src/file_set/tests.rs
+++ b/crates/vfs/src/file_set/tests.rs
@@ -40,3 +40,26 @@ fn name_prefix() {
let partition = file_set.partition(&vfs).into_iter().map(|it| it.len()).collect::<Vec<_>>();
assert_eq!(partition, vec![1, 1, 0]);
}
+
+/// Ensure that we don't consider `/foo/bar_baz.rs` to be in the
+/// `/foo/bar/` root.
+#[test]
+fn name_prefix_partially_matches() {
+ let mut file_set = FileSetConfig::builder();
+ file_set.add_file_set(vec![VfsPath::new_virtual_path("/foo".into())]);
+ file_set.add_file_set(vec![VfsPath::new_virtual_path("/foo/bar".into())]);
+ let file_set = file_set.build();
+
+ let mut vfs = Vfs::default();
+
+ // These two are both in /foo.
+ vfs.set_file_contents(VfsPath::new_virtual_path("/foo/lib.rs".into()), Some(Vec::new()));
+ vfs.set_file_contents(VfsPath::new_virtual_path("/foo/bar_baz.rs".into()), Some(Vec::new()));
+
+ // Only this file is in /foo/bar.
+ vfs.set_file_contents(VfsPath::new_virtual_path("/foo/bar/biz.rs".into()), Some(Vec::new()));
+
+ let partition = file_set.partition(&vfs).into_iter().map(|it| it.len()).collect::<Vec<_>>();
+
+ assert_eq!(partition, vec![2, 1, 0]);
+}
diff --git a/docs/dev/lsp-extensions.md b/docs/dev/lsp-extensions.md
index 939b1819c7..f1815082e2 100644
--- a/docs/dev/lsp-extensions.md
+++ b/docs/dev/lsp-extensions.md
@@ -1,5 +1,5 @@
<!---
-lsp/ext.rs hash: 223f48a89a5126a0
+lsp/ext.rs hash: dd51139b0530147e
If you need to change the above hash to make the test pass, please check if you
need to adjust this doc as well and ping this issue:
@@ -444,7 +444,7 @@ interface DiscoverTestResults {
// For each file which its uri is in this list, the response
// contains all tests that are located in this file, and
// client should remove old tests not included in the response.
- scopeFile: lc.TextDocumentIdentifier[] | undefined;
+ scopeFile: lc.TextDocumentIdentifier[] | undefined;
}
```
diff --git a/docs/user/generated_config.adoc b/docs/user/generated_config.adoc
index c4024f6d28..a03ab0031d 100644
--- a/docs/user/generated_config.adoc
+++ b/docs/user/generated_config.adoc
@@ -19,6 +19,11 @@ Warm up caches on project load.
--
How many worker threads to handle priming caches. The default `0` means to pick automatically.
--
+[[rust-analyzer.cargo.allTargets]]rust-analyzer.cargo.allTargets (default: `true`)::
++
+--
+Pass `--all-targets` to cargo invocation.
+--
[[rust-analyzer.cargo.autoreload]]rust-analyzer.cargo.autoreload (default: `true`)::
+
--
@@ -83,10 +88,18 @@ or build-script sources change and are saved.
Use `RUSTC_WRAPPER=rust-analyzer` when running build scripts to
avoid checking unnecessary things.
--
-[[rust-analyzer.cargo.cfgs]]rust-analyzer.cargo.cfgs (default: `{}`)::
+[[rust-analyzer.cargo.cfgs]]rust-analyzer.cargo.cfgs::
+
--
+Default:
+----
+{
+ "debug_assertions": null,
+ "miri": null
+}
+----
List of cfg options to enable with the given values.
+
--
[[rust-analyzer.cargo.extraArgs]]rust-analyzer.cargo.extraArgs (default: `[]`)::
+
@@ -154,20 +167,16 @@ building from locking the `Cargo.lock` at the expense of duplicating build artif
Set to `true` to use a subdirectory of the existing target directory or
set to a path relative to the workspace to use that path.
--
-[[rust-analyzer.cargo.unsetTest]]rust-analyzer.cargo.unsetTest (default: `["core"]`)::
-+
---
-Unsets the implicit `#[cfg(test)]` for the specified crates.
---
[[rust-analyzer.checkOnSave]]rust-analyzer.checkOnSave (default: `true`)::
+
--
Run the check command for diagnostics on save.
--
-[[rust-analyzer.check.allTargets]]rust-analyzer.check.allTargets (default: `true`)::
+[[rust-analyzer.check.allTargets]]rust-analyzer.check.allTargets (default: `null`)::
+
--
-Check all targets and tests (`--all-targets`).
+Check all targets and tests (`--all-targets`). Defaults to
+`#rust-analyzer.cargo.allTargets#`.
--
[[rust-analyzer.check.command]]rust-analyzer.check.command (default: `"check"`)::
+
@@ -315,46 +324,46 @@ Enables completions of private items and fields that are defined in the current
Default:
----
{
- "Arc::new": {
- "postfix": "arc",
- "body": "Arc::new(${receiver})",
- "requires": "std::sync::Arc",
- "description": "Put the expression into an `Arc`",
- "scope": "expr"
- },
- "Rc::new": {
- "postfix": "rc",
- "body": "Rc::new(${receiver})",
- "requires": "std::rc::Rc",
- "description": "Put the expression into an `Rc`",
- "scope": "expr"
- },
- "Box::pin": {
- "postfix": "pinbox",
- "body": "Box::pin(${receiver})",
- "requires": "std::boxed::Box",
- "description": "Put the expression into a pinned `Box`",
- "scope": "expr"
- },
- "Ok": {
- "postfix": "ok",
- "body": "Ok(${receiver})",
- "description": "Wrap the expression in a `Result::Ok`",
- "scope": "expr"
- },
- "Err": {
- "postfix": "err",
- "body": "Err(${receiver})",
- "description": "Wrap the expression in a `Result::Err`",
- "scope": "expr"
- },
- "Some": {
- "postfix": "some",
- "body": "Some(${receiver})",
- "description": "Wrap the expression in an `Option::Some`",
- "scope": "expr"
- }
- }
+ "Arc::new": {
+ "postfix": "arc",
+ "body": "Arc::new(${receiver})",
+ "requires": "std::sync::Arc",
+ "description": "Put the expression into an `Arc`",
+ "scope": "expr"
+ },
+ "Rc::new": {
+ "postfix": "rc",
+ "body": "Rc::new(${receiver})",
+ "requires": "std::rc::Rc",
+ "description": "Put the expression into an `Rc`",
+ "scope": "expr"
+ },
+ "Box::pin": {
+ "postfix": "pinbox",
+ "body": "Box::pin(${receiver})",
+ "requires": "std::boxed::Box",
+ "description": "Put the expression into a pinned `Box`",
+ "scope": "expr"
+ },
+ "Ok": {
+ "postfix": "ok",
+ "body": "Ok(${receiver})",
+ "description": "Wrap the expression in a `Result::Ok`",
+ "scope": "expr"
+ },
+ "Err": {
+ "postfix": "err",
+ "body": "Err(${receiver})",
+ "description": "Wrap the expression in a `Result::Err`",
+ "scope": "expr"
+ },
+ "Some": {
+ "postfix": "some",
+ "body": "Some(${receiver})",
+ "description": "Wrap the expression in an `Option::Some`",
+ "scope": "expr"
+ }
+}
----
Custom completion snippets.
@@ -839,6 +848,24 @@ Command to be executed instead of 'cargo' for runnables.
Additional arguments to be passed to cargo for runnables such as
tests or binaries. For example, it may be `--release`.
--
+[[rust-analyzer.runnables.extraTestBinaryArgs]]rust-analyzer.runnables.extraTestBinaryArgs::
++
+--
+Default:
+----
+[
+ "--show-output"
+]
+----
+Additional arguments to be passed through Cargo to launched tests, benchmarks, or
+doc-tests.
+
+Unless the launched target uses a
+[custom test harness](https://doc.rust-lang.org/cargo/reference/cargo-targets.html#the-harness-field),
+they will end up being interpreted as options to
+[`rustc`’s built-in test harness (“libtest”)](https://doc.rust-lang.org/rustc/tests/index.html#cli-arguments).
+
+--
[[rust-analyzer.rustc.source]]rust-analyzer.rustc.source (default: `null`)::
+
--
diff --git a/docs/user/manual.adoc b/docs/user/manual.adoc
index 8bc11fd481..6521b6d8b3 100644
--- a/docs/user/manual.adoc
+++ b/docs/user/manual.adoc
@@ -210,7 +210,10 @@ While the sandbox can be disabled for some directories, `/usr/bin` will always b
This prevents access to the system's C compiler, a system-wide installation of Rust, or any other libraries you might want to link to.
Some compilers and libraries can be acquired as Flatpak SDKs, such as `org.freedesktop.Sdk.Extension.rust-stable` or `org.freedesktop.Sdk.Extension.llvm15`.
-If you use a Flatpak SDK for Rust, there should be no extra steps necessary.
+If you use a Flatpak SDK for Rust, it must be in your `PATH`:
+
+ * install the SDK extensions with `flatpak install org.freedesktop.Sdk.Extension.{llvm15,rust-stable}//23.08`
+ * enable SDK extensions in the editor with the environment variable `FLATPAK_ENABLE_SDK_EXT=llvm15,rust-stable` (this can be done using flatseal or `flatpak override`)
If you want to use Flatpak in combination with `rustup`, the following steps might help:
diff --git a/editors/code/language-configuration.json b/editors/code/language-configuration.json
index bdae0e6ba9..6619d0c85c 100644
--- a/editors/code/language-configuration.json
+++ b/editors/code/language-configuration.json
@@ -19,7 +19,8 @@
{ "open": "(", "close": ")" },
{ "open": "\"", "close": "\"", "notIn": ["string"] },
{ "open": "/*", "close": " */", "notIn": ["string"] },
- { "open": "`", "close": "`", "notIn": ["string"] }
+ { "open": "`", "close": "`", "notIn": ["string"] },
+ { "open": "```", "close": "```", "notIn": ["string"] }
],
"autoCloseBefore": ";:.,=}])> \n\t",
"surroundingPairs": [
@@ -29,7 +30,8 @@
["<", ">"],
["\"", "\""],
["'", "'"],
- ["`", "`"]
+ ["`", "`"],
+ ["```", "```"]
],
"indentationRules": {
"increaseIndentPattern": "^.*\\{[^}\"']*$|^.*\\([^\\)\"']*$",
diff --git a/editors/code/package.json b/editors/code/package.json
index c3ea1ceeb6..389e1b8742 100644
--- a/editors/code/package.json
+++ b/editors/code/package.json
@@ -546,6 +546,11 @@
"minimum": 0,
"maximum": 255
},
+ "rust-analyzer.cargo.allTargets": {
+ "markdownDescription": "Pass `--all-targets` to cargo invocation.",
+ "default": true,
+ "type": "boolean"
+ },
"rust-analyzer.cargo.autoreload": {
"markdownDescription": "Automatically refresh project info via `cargo metadata` on\n`Cargo.toml` or `.cargo/config.toml` changes.",
"default": true,
@@ -605,7 +610,10 @@
},
"rust-analyzer.cargo.cfgs": {
"markdownDescription": "List of cfg options to enable with the given values.",
- "default": {},
+ "default": {
+ "debug_assertions": null,
+ "miri": null
+ },
"type": "object"
},
"rust-analyzer.cargo.extraArgs": {
@@ -691,25 +699,18 @@
}
]
},
- "rust-analyzer.cargo.unsetTest": {
- "markdownDescription": "Unsets the implicit `#[cfg(test)]` for the specified crates.",
- "default": [
- "core"
- ],
- "type": "array",
- "items": {
- "type": "string"
- }
- },
"rust-analyzer.checkOnSave": {
"markdownDescription": "Run the check command for diagnostics on save.",
"default": true,
"type": "boolean"
},
"rust-analyzer.check.allTargets": {
- "markdownDescription": "Check all targets and tests (`--all-targets`).",
- "default": true,
- "type": "boolean"
+ "markdownDescription": "Check all targets and tests (`--all-targets`). Defaults to\n`#rust-analyzer.cargo.allTargets#`.",
+ "default": null,
+ "type": [
+ "null",
+ "boolean"
+ ]
},
"rust-analyzer.check.command": {
"markdownDescription": "Cargo command to use for `cargo check`.",
@@ -1586,6 +1587,16 @@
"type": "string"
}
},
+ "rust-analyzer.runnables.extraTestBinaryArgs": {
+ "markdownDescription": "Additional arguments to be passed through Cargo to launched tests, benchmarks, or\ndoc-tests.\n\nUnless the launched target uses a\n[custom test harness](https://doc.rust-lang.org/cargo/reference/cargo-targets.html#the-harness-field),\nthey will end up being interpreted as options to\n[`rustc`’s built-in test harness (“libtest”)](https://doc.rust-lang.org/rustc/tests/index.html#cli-arguments).",
+ "default": [
+ "--show-output"
+ ],
+ "type": "array",
+ "items": {
+ "type": "string"
+ }
+ },
"rust-analyzer.rustc.source": {
"markdownDescription": "Path to the Cargo.toml of the rust compiler workspace, for usage in rustc_private\nprojects, or \"discover\" to try to automatically find it if the `rustc-dev` component\nis installed.\n\nAny project which uses rust-analyzer with the rustcPrivate\ncrates must set `[package.metadata.rust-analyzer] rustc_private=true` to use it.\n\nThis option does not take effect until rust-analyzer is restarted.",
"default": null,
@@ -1927,6 +1938,11 @@
"description": "Style for const generics"
},
{
+ "id": "const",
+ "description": "Style for consts",
+ "superType": "variable"
+ },
+ {
"id": "derive",
"description": "Style for derives",
"superType": "attribute"
@@ -1972,20 +1988,25 @@
"superType": "punctuation"
},
{
- "id": "operator",
- "description": "Style for operators",
- "superType": "punctuation"
- },
- {
"id": "parenthesis",
"description": "Style for ( or )",
"superType": "punctuation"
},
{
+ "id": "procMacro",
+ "description": "Style for proc macro code",
+ "superType": "macro"
+ },
+ {
"id": "punctuation",
"description": "Style for generic punctuation"
},
{
+ "id": "operator",
+ "description": "Style for operators",
+ "superType": "punctuation"
+ },
+ {
"id": "selfKeyword",
"description": "Style for the self keyword",
"superType": "keyword"
@@ -2001,6 +2022,16 @@
"superType": "punctuation"
},
{
+ "id": "static",
+ "description": "Style for statics",
+ "superType": "variable"
+ },
+ {
+ "id": "toolModule",
+ "description": "Style for tool module attributes",
+ "superType": "decorator"
+ },
+ {
"id": "typeAlias",
"description": "Style for type aliases",
"superType": "type"
@@ -2057,10 +2088,18 @@
"description": "Style for items that are defined outside of the current crate"
},
{
+ "id": "macro",
+ "description": "Style for tokens inside of macro calls"
+ },
+ {
"id": "mutable",
"description": "Style for mutable locals and statics as well as functions taking `&mut self`"
},
{
+ "id": "procMacro",
+ "description": "Style for tokens inside of proc-macro calls"
+ },
+ {
"id": "public",
"description": "Style for items that are from the current crate and are `pub`"
},
diff --git a/editors/code/src/client.ts b/editors/code/src/client.ts
index 1cbf247297..372dc8bedf 100644
--- a/editors/code/src/client.ts
+++ b/editors/code/src/client.ts
@@ -131,7 +131,10 @@ export async function createClient(
? diag.code
: diag.code?.value;
if (
- value === "unlinked-file" &&
+ // FIXME: We currently emit this diagnostic way too early, before we have
+ // loaded the project fully
+ // value === "unlinked-file" &&
+ value === "temporary-disabled" &&
!unlinkedFiles.includes(uri) &&
diag.message !== "file not included in module tree"
) {
diff --git a/editors/code/src/run.ts b/editors/code/src/run.ts
index 02ccbb6956..4470689cd8 100644
--- a/editors/code/src/run.ts
+++ b/editors/code/src/run.ts
@@ -2,7 +2,6 @@ import * as vscode from "vscode";
import type * as lc from "vscode-languageclient";
import * as ra from "./lsp_ext";
import * as tasks from "./tasks";
-import * as toolchain from "./toolchain";
import type { CtxInit } from "./ctx";
import { makeDebugConfig } from "./debug";
@@ -112,22 +111,12 @@ export async function createTask(runnable: ra.Runnable, config: Config): Promise
throw `Unexpected runnable kind: ${runnable.kind}`;
}
- let program: string;
- let args = createArgs(runnable);
- if (runnable.args.overrideCargo) {
- // Split on spaces to allow overrides like "wrapper cargo".
- const cargoParts = runnable.args.overrideCargo.split(" ");
+ const args = createArgs(runnable);
- program = unwrapUndefinable(cargoParts[0]);
- args = [...cargoParts.slice(1), ...args];
- } else {
- program = await toolchain.cargoPath();
- }
-
- const definition: tasks.RustTargetDefinition = {
+ const definition: tasks.CargoTaskDefinition = {
type: tasks.TASK_TYPE,
- program,
- args,
+ command: unwrapUndefinable(args[0]), // run, test, etc...
+ args: args.slice(1),
cwd: runnable.args.workspaceRoot || ".",
env: prepareEnv(runnable, config.runnablesExtraEnv),
overrideCargo: runnable.args.overrideCargo,
diff --git a/editors/code/src/tasks.ts b/editors/code/src/tasks.ts
index 89abb37b0e..2b3abc5d65 100644
--- a/editors/code/src/tasks.ts
+++ b/editors/code/src/tasks.ts
@@ -2,17 +2,26 @@ import * as vscode from "vscode";
import * as toolchain from "./toolchain";
import type { Config } from "./config";
import { log } from "./util";
+import { unwrapUndefinable } from "./undefinable";
// This ends up as the `type` key in tasks.json. RLS also uses `cargo` and
// our configuration should be compatible with it so use the same key.
export const TASK_TYPE = "cargo";
+
export const TASK_SOURCE = "rust";
-export interface RustTargetDefinition extends vscode.TaskDefinition {
- program: string;
- args: string[];
+export interface CargoTaskDefinition extends vscode.TaskDefinition {
+ // The cargo command, such as "run" or "check".
+ command: string;
+ // Additional arguments passed to the cargo command.
+ args?: string[];
+ // The working directory to run the cargo command in.
cwd?: string;
+ // The shell environment.
env?: { [key: string]: string };
+ // Override the cargo executable name, such as
+ // "my_custom_cargo_bin".
+ overrideCargo?: string;
}
class RustTaskProvider implements vscode.TaskProvider {
@@ -37,14 +46,12 @@ class RustTaskProvider implements vscode.TaskProvider {
{ command: "run", group: undefined },
];
- const cargoPath = await toolchain.cargoPath();
-
const tasks: vscode.Task[] = [];
for (const workspaceTarget of vscode.workspace.workspaceFolders || []) {
for (const def of defs) {
const vscodeTask = await buildRustTask(
workspaceTarget,
- { type: TASK_TYPE, program: cargoPath, args: [def.command] },
+ { type: TASK_TYPE, command: def.command },
`cargo ${def.command}`,
this.config.problemMatcher,
this.config.cargoRunner,
@@ -62,7 +69,7 @@ class RustTaskProvider implements vscode.TaskProvider {
// we need to inform VSCode how to execute that command by creating
// a ShellExecution for it.
- const definition = task.definition as RustTargetDefinition;
+ const definition = task.definition as CargoTaskDefinition;
if (definition.type === TASK_TYPE) {
return await buildRustTask(
@@ -80,16 +87,34 @@ class RustTaskProvider implements vscode.TaskProvider {
export async function buildRustTask(
scope: vscode.WorkspaceFolder | vscode.TaskScope | undefined,
- definition: RustTargetDefinition,
+ definition: CargoTaskDefinition,
name: string,
problemMatcher: string[],
customRunner?: string,
throwOnError: boolean = false,
): Promise<vscode.Task> {
- let exec: vscode.ProcessExecution | vscode.ShellExecution | undefined = undefined;
+ const exec = await cargoToExecution(definition, customRunner, throwOnError);
+
+ return new vscode.Task(
+ definition,
+ // scope can sometimes be undefined. in these situations we default to the workspace taskscope as
+ // recommended by the official docs: https://code.visualstudio.com/api/extension-guides/task-provider#task-provider)
+ scope ?? vscode.TaskScope.Workspace,
+ name,
+ TASK_SOURCE,
+ exec,
+ problemMatcher,
+ );
+}
+async function cargoToExecution(
+ definition: CargoTaskDefinition,
+ customRunner: string | undefined,
+ throwOnError: boolean,
+): Promise<vscode.ProcessExecution | vscode.ShellExecution> {
if (customRunner) {
const runnerCommand = `${customRunner}.buildShellExecution`;
+
try {
const runnerArgs = {
kind: TASK_TYPE,
@@ -100,7 +125,7 @@ export async function buildRustTask(
const customExec = await vscode.commands.executeCommand(runnerCommand, runnerArgs);
if (customExec) {
if (customExec instanceof vscode.ShellExecution) {
- exec = customExec;
+ return customExec;
} else {
log.debug("Invalid cargo ShellExecution", customExec);
throw "Invalid cargo ShellExecution.";
@@ -113,20 +138,20 @@ export async function buildRustTask(
}
}
- if (!exec) {
- exec = new vscode.ProcessExecution(definition.program, definition.args, definition);
- }
+ // Check whether we must use a user-defined substitute for cargo.
+ // Split on spaces to allow overrides like "wrapper cargo".
+ const cargoPath = await toolchain.cargoPath();
+ const cargoCommand = definition.overrideCargo?.split(" ") ?? [cargoPath];
- return new vscode.Task(
- definition,
- // scope can sometimes be undefined. in these situations we default to the workspace taskscope as
- // recommended by the official docs: https://code.visualstudio.com/api/extension-guides/task-provider#task-provider)
- scope ?? vscode.TaskScope.Workspace,
- name,
- TASK_SOURCE,
- exec,
- problemMatcher,
- );
+ const args = [definition.command].concat(definition.args ?? []);
+ const fullCommand = [...cargoCommand, ...args];
+
+ const processName = unwrapUndefinable(fullCommand[0]);
+
+ return new vscode.ProcessExecution(processName, fullCommand.slice(1), {
+ cwd: definition.cwd,
+ env: definition.env,
+ });
}
export function activateTaskProvider(config: Config): vscode.Disposable {
diff --git a/xtask/src/codegen.rs b/xtask/src/codegen.rs
index 7dc1b40783..b23d700263 100644
--- a/xtask/src/codegen.rs
+++ b/xtask/src/codegen.rs
@@ -84,7 +84,7 @@ impl CommentBlock {
panic!("Use plain (non-doc) comments with tags like {tag}:\n {first}");
}
- block.id = id.trim().to_owned();
+ id.trim().clone_into(&mut block.id);
true
});
blocks
diff --git a/xtask/src/codegen/grammar/ast_src.rs b/xtask/src/codegen/grammar/ast_src.rs
index 8221c57789..c246ee9950 100644
--- a/xtask/src/codegen/grammar/ast_src.rs
+++ b/xtask/src/codegen/grammar/ast_src.rs
@@ -65,11 +65,11 @@ pub(crate) const KINDS_SRC: KindsSrc<'_> = KindsSrc {
(">>=", "SHREQ"),
],
keywords: &[
- "as", "async", "await", "box", "break", "const", "continue", "crate", "do", "dyn", "else",
- "enum", "extern", "false", "fn", "for", "if", "impl", "in", "let", "loop", "macro",
- "match", "mod", "move", "mut", "pub", "ref", "return", "become", "self", "Self", "static",
- "struct", "super", "trait", "true", "try", "type", "unsafe", "use", "where", "while",
- "yield",
+ "abstract", "as", "async", "await", "become", "box", "break", "const", "continue", "crate",
+ "do", "dyn", "else", "enum", "extern", "false", "final", "fn", "for", "if", "impl", "in",
+ "let", "loop", "macro", "match", "mod", "move", "mut", "override", "priv", "pub", "ref",
+ "return", "self", "Self", "static", "struct", "super", "trait", "true", "try", "type",
+ "typeof", "unsafe", "unsized", "use", "virtual", "where", "while", "yield",
],
contextual_keywords: &[
"auto",
diff --git a/xtask/src/codegen/lints.rs b/xtask/src/codegen/lints.rs
index 63abcfc090..6975f9328e 100644
--- a/xtask/src/codegen/lints.rs
+++ b/xtask/src/codegen/lints.rs
@@ -280,7 +280,7 @@ fn generate_descriptor_clippy(buf: &mut String, path: &Path) {
let line = &line[..up_to];
let clippy_lint = clippy_lints.last_mut().expect("clippy lint must already exist");
- clippy_lint.help = unescape(line).trim().to_owned();
+ unescape(line).trim().clone_into(&mut clippy_lint.help);
}
}
clippy_lints.sort_by(|lint, lint2| lint.id.cmp(&lint2.id));