Unnamed repository; edit this file 'description' to name the repository.
Merge pull request #2263 from BoxyUwU/rustc-pull2
Rustc pull
Boxy 2025-02-26
parent 55226a8 · parent 30ddc42 · commit 9170e53
-rw-r--r--.git-blame-ignore-revs1
-rw-r--r--.github/workflows/autopublish.yaml1
-rw-r--r--.github/workflows/ci.yaml6
-rw-r--r--.github/workflows/publish-libs.yaml1
-rw-r--r--.github/workflows/release.yaml17
-rw-r--r--.gitignore7
-rw-r--r--CONTRIBUTING.md2
-rw-r--r--Cargo.lock60
-rw-r--r--Cargo.toml12
-rw-r--r--PRIVACY.md2
-rw-r--r--README.md13
-rw-r--r--crates/base-db/src/input.rs47
-rw-r--r--crates/base-db/src/lib.rs4
-rw-r--r--crates/hir-def/Cargo.toml2
-rw-r--r--crates/hir-def/src/data.rs18
-rw-r--r--crates/hir-def/src/data/adt.rs9
-rw-r--r--crates/hir-def/src/db.rs2
-rw-r--r--crates/hir-def/src/dyn_map.rs7
-rw-r--r--crates/hir-def/src/expander.rs9
-rw-r--r--crates/hir-def/src/expr_store.rs (renamed from crates/hir-def/src/body.rs)359
-rw-r--r--crates/hir-def/src/expr_store/body.rs175
-rw-r--r--crates/hir-def/src/expr_store/lower.rs (renamed from crates/hir-def/src/body/lower.rs)461
-rw-r--r--crates/hir-def/src/expr_store/lower/asm.rs (renamed from crates/hir-def/src/body/lower/asm.rs)2
-rw-r--r--crates/hir-def/src/expr_store/pretty.rs (renamed from crates/hir-def/src/body/pretty.rs)60
-rw-r--r--crates/hir-def/src/expr_store/scope.rs (renamed from crates/hir-def/src/body/scope.rs)54
-rw-r--r--crates/hir-def/src/expr_store/tests.rs (renamed from crates/hir-def/src/body/tests.rs)46
-rw-r--r--crates/hir-def/src/expr_store/tests/block.rs (renamed from crates/hir-def/src/body/tests/block.rs)0
-rw-r--r--crates/hir-def/src/generics.rs2
-rw-r--r--crates/hir-def/src/hir.rs21
-rw-r--r--crates/hir-def/src/import_map.rs22
-rw-r--r--crates/hir-def/src/item_scope.rs18
-rw-r--r--crates/hir-def/src/item_tree.rs8
-rw-r--r--crates/hir-def/src/lang_item.rs1
-rw-r--r--crates/hir-def/src/lib.rs43
-rw-r--r--crates/hir-def/src/macro_expansion_tests/proc_macros.rs20
-rw-r--r--crates/hir-def/src/nameres.rs2
-rw-r--r--crates/hir-def/src/nameres/collector.rs37
-rw-r--r--crates/hir-def/src/nameres/mod_resolution.rs5
-rw-r--r--crates/hir-def/src/path.rs10
-rw-r--r--crates/hir-def/src/resolver.rs12
-rw-r--r--crates/hir-expand/Cargo.toml1
-rw-r--r--crates/hir-expand/src/builtin/fn_macro.rs2
-rw-r--r--crates/hir-expand/src/cfg_process.rs56
-rw-r--r--crates/hir-expand/src/db.rs12
-rw-r--r--crates/hir-expand/src/name.rs6
-rw-r--r--crates/hir-expand/src/prettify_macro_expansion_.rs4
-rw-r--r--crates/hir-expand/src/proc_macro.rs8
-rw-r--r--crates/hir-ty/Cargo.toml2
-rw-r--r--crates/hir-ty/src/autoderef.rs11
-rw-r--r--crates/hir-ty/src/consteval.rs3
-rw-r--r--crates/hir-ty/src/diagnostics/expr.rs6
-rw-r--r--crates/hir-ty/src/diagnostics/match_check.rs3
-rw-r--r--crates/hir-ty/src/diagnostics/match_check/pat_analysis.rs12
-rw-r--r--crates/hir-ty/src/diagnostics/unsafe_check.rs134
-rw-r--r--crates/hir-ty/src/generics.rs3
-rw-r--r--crates/hir-ty/src/infer.rs86
-rw-r--r--crates/hir-ty/src/infer/cast.rs1
-rw-r--r--crates/hir-ty/src/infer/coerce.rs40
-rw-r--r--crates/hir-ty/src/infer/diagnostics.rs91
-rw-r--r--crates/hir-ty/src/infer/expr.rs286
-rw-r--r--crates/hir-ty/src/infer/pat.rs136
-rw-r--r--crates/hir-ty/src/infer/path.rs177
-rw-r--r--crates/hir-ty/src/layout.rs23
-rw-r--r--crates/hir-ty/src/lib.rs27
-rw-r--r--crates/hir-ty/src/lower.rs997
-rw-r--r--crates/hir-ty/src/lower/diagnostics.rs2
-rw-r--r--crates/hir-ty/src/lower/path.rs917
-rw-r--r--crates/hir-ty/src/method_resolution.rs21
-rw-r--r--crates/hir-ty/src/mir.rs13
-rw-r--r--crates/hir-ty/src/mir/eval.rs13
-rw-r--r--crates/hir-ty/src/mir/eval/shim.rs79
-rw-r--r--crates/hir-ty/src/mir/eval/tests.rs69
-rw-r--r--crates/hir-ty/src/mir/lower.rs29
-rw-r--r--crates/hir-ty/src/mir/lower/pattern_matching.rs4
-rw-r--r--crates/hir-ty/src/mir/pretty.rs2
-rw-r--r--crates/hir-ty/src/tests.rs4
-rw-r--r--crates/hir-ty/src/tests/coercion.rs3
-rw-r--r--crates/hir-ty/src/tests/diagnostics.rs50
-rw-r--r--crates/hir-ty/src/tests/method_resolution.rs6
-rw-r--r--crates/hir-ty/src/tests/simple.rs47
-rw-r--r--crates/hir-ty/src/utils.rs78
-rw-r--r--crates/hir-ty/src/variance.rs1
-rw-r--r--crates/hir/src/diagnostics.rs85
-rw-r--r--crates/hir/src/display.rs4
-rw-r--r--crates/hir/src/from_id.rs3
-rw-r--r--crates/hir/src/has_source.rs4
-rw-r--r--crates/hir/src/lib.rs145
-rw-r--r--crates/hir/src/semantics.rs8
-rw-r--r--crates/hir/src/semantics/child_by_source.rs3
-rw-r--r--crates/hir/src/semantics/source_to_def.rs16
-rw-r--r--crates/hir/src/source_analyzer.rs118
-rw-r--r--crates/hir/src/term_search.rs2
-rw-r--r--crates/hir/src/term_search/tactics.rs13
-rw-r--r--crates/ide-assists/src/assist_config.rs1
-rw-r--r--crates/ide-assists/src/handlers/add_missing_match_arms.rs136
-rw-r--r--crates/ide-assists/src/handlers/apply_demorgan.rs4
-rw-r--r--crates/ide-assists/src/handlers/auto_import.rs6
-rw-r--r--crates/ide-assists/src/handlers/convert_tuple_struct_to_named_struct.rs102
-rw-r--r--crates/ide-assists/src/handlers/expand_glob_import.rs365
-rw-r--r--crates/ide-assists/src/handlers/generate_delegate_methods.rs25
-rw-r--r--crates/ide-assists/src/handlers/generate_delegate_trait.rs37
-rw-r--r--crates/ide-assists/src/lib.rs1
-rw-r--r--crates/ide-assists/src/tests.rs36
-rw-r--r--crates/ide-assists/src/tests/generated.rs25
-rw-r--r--crates/ide-assists/src/utils.rs4
-rw-r--r--crates/ide-completion/src/completions/dot.rs28
-rw-r--r--crates/ide-completion/src/completions/flyimport.rs8
-rw-r--r--crates/ide-completion/src/completions/postfix.rs91
-rw-r--r--crates/ide-completion/src/completions/postfix/format_like.rs2
-rw-r--r--crates/ide-completion/src/context.rs4
-rw-r--r--crates/ide-completion/src/context/analysis.rs41
-rw-r--r--crates/ide-completion/src/lib.rs8
-rw-r--r--crates/ide-completion/src/render/function.rs18
-rw-r--r--crates/ide-completion/src/snippet.rs12
-rw-r--r--crates/ide-completion/src/tests/expression.rs50
-rw-r--r--crates/ide-db/Cargo.toml1
-rw-r--r--crates/ide-db/src/apply_change.rs9
-rw-r--r--crates/ide-db/src/defs.rs3
-rw-r--r--crates/ide-db/src/generated/lints.rs4
-rw-r--r--crates/ide-db/src/imports/import_assets.rs4
-rw-r--r--crates/ide-db/src/items_locator.rs3
-rw-r--r--crates/ide-db/src/prime_caches.rs123
-rw-r--r--crates/ide-db/src/search.rs1
-rw-r--r--crates/ide-db/src/symbol_index.rs8
-rw-r--r--crates/ide-diagnostics/src/handlers/incorrect_case.rs4
-rw-r--r--crates/ide-diagnostics/src/handlers/invalid_cast.rs35
-rw-r--r--crates/ide-diagnostics/src/handlers/macro_error.rs4
-rw-r--r--crates/ide-diagnostics/src/handlers/mismatched_arg_count.rs2
-rw-r--r--crates/ide-diagnostics/src/handlers/missing_unsafe.rs88
-rw-r--r--crates/ide-diagnostics/src/handlers/mutability_errors.rs3
-rw-r--r--crates/ide-diagnostics/src/handlers/parenthesized_generic_args_without_fn_trait.rs59
-rw-r--r--crates/ide-diagnostics/src/handlers/type_mismatch.rs25
-rw-r--r--crates/ide-diagnostics/src/handlers/unresolved_field.rs7
-rw-r--r--crates/ide-diagnostics/src/handlers/unresolved_method.rs53
-rw-r--r--crates/ide-diagnostics/src/handlers/unused_variables.rs13
-rw-r--r--crates/ide-diagnostics/src/lib.rs8
-rw-r--r--crates/ide-ssr/src/lib.rs18
-rw-r--r--crates/ide/src/annotations.rs69
-rw-r--r--crates/ide/src/doc_links.rs8
-rw-r--r--crates/ide/src/expand_macro.rs10
-rw-r--r--crates/ide/src/extend_selection.rs10
-rw-r--r--crates/ide/src/fetch_crates.rs10
-rw-r--r--crates/ide/src/file_structure.rs11
-rw-r--r--crates/ide/src/goto_definition.rs62
-rw-r--r--crates/ide/src/goto_implementation.rs10
-rw-r--r--crates/ide/src/goto_type_definition.rs10
-rw-r--r--crates/ide/src/highlight_related.rs12
-rw-r--r--crates/ide/src/hover.rs2
-rw-r--r--crates/ide/src/hover/render.rs1
-rw-r--r--crates/ide/src/inlay_hints.rs10
-rw-r--r--crates/ide/src/inlay_hints/adjustment.rs102
-rw-r--r--crates/ide/src/inlay_hints/bind_pat.rs33
-rw-r--r--crates/ide/src/inlay_hints/closure_captures.rs2
-rw-r--r--crates/ide/src/inlay_hints/extern_block.rs19
-rw-r--r--crates/ide/src/inlay_hints/implicit_drop.rs38
-rw-r--r--crates/ide/src/interpret.rs8
-rw-r--r--crates/ide/src/join_lines.rs14
-rw-r--r--crates/ide/src/lib.rs4
-rw-r--r--crates/ide/src/matching_brace.rs10
-rw-r--r--crates/ide/src/moniker.rs22
-rw-r--r--crates/ide/src/move_item.rs8
-rw-r--r--crates/ide/src/parent_module.rs10
-rw-r--r--crates/ide/src/references.rs10
-rw-r--r--crates/ide/src/rename.rs32
-rw-r--r--crates/ide/src/runnables.rs84
-rw-r--r--crates/ide/src/signature_help.rs4
-rw-r--r--crates/ide/src/static_index.rs38
-rw-r--r--crates/ide/src/status.rs11
-rw-r--r--crates/ide/src/syntax_highlighting.rs190
-rw-r--r--crates/ide/src/syntax_highlighting/format.rs4
-rw-r--r--crates/ide/src/syntax_highlighting/highlight.rs46
-rw-r--r--crates/ide/src/syntax_highlighting/test_data/highlight_doctest.html9
-rw-r--r--crates/ide/src/syntax_highlighting/test_data/highlight_general.html15
-rw-r--r--crates/ide/src/syntax_highlighting/test_data/highlight_strings.html4
-rw-r--r--crates/ide/src/syntax_highlighting/tests.rs31
-rw-r--r--crates/ide/src/typing.rs11
-rw-r--r--crates/ide/src/typing/on_enter.rs24
-rw-r--r--crates/ide/src/view_crate_graph.rs11
-rw-r--r--crates/ide/src/view_hir.rs9
-rw-r--r--crates/ide/src/view_item_tree.rs8
-rw-r--r--crates/ide/src/view_memory_layout.rs8
-rw-r--r--crates/ide/src/view_mir.rs6
-rw-r--r--crates/ide/src/view_syntax_tree.rs107
-rw-r--r--crates/intern/src/symbol/symbols.rs37
-rw-r--r--crates/limit/Cargo.toml16
-rw-r--r--crates/limit/src/lib.rs67
-rw-r--r--crates/load-cargo/src/lib.rs5
-rw-r--r--crates/parser/Cargo.toml1
-rw-r--r--crates/parser/src/grammar/expressions.rs18
-rw-r--r--crates/parser/src/grammar/items/adt.rs5
-rw-r--r--crates/parser/src/parser.rs5
-rw-r--r--crates/parser/src/syntax_kind/generated.rs2
-rw-r--r--crates/parser/test_data/generated/runner.rs12
-rw-r--r--crates/parser/test_data/parser/inline/err/comma_after_default_values_syntax.rast59
-rw-r--r--crates/parser/test_data/parser/inline/err/comma_after_default_values_syntax.rs4
-rw-r--r--crates/parser/test_data/parser/inline/err/record_literal_before_ellipsis_recovery.rast70
-rw-r--r--crates/parser/test_data/parser/inline/err/record_literal_before_ellipsis_recovery.rs2
-rw-r--r--crates/parser/test_data/parser/inline/ok/record_field_default_values.rast28
-rw-r--r--crates/parser/test_data/parser/inline/ok/record_field_default_values.rs1
-rw-r--r--crates/parser/test_data/parser/inline/ok/record_lit.rast47
-rw-r--r--crates/parser/test_data/parser/inline/ok/record_lit.rs2
-rw-r--r--crates/parser/test_data/parser/inline/ok/struct_initializer_with_defaults.rast39
-rw-r--r--crates/parser/test_data/parser/inline/ok/struct_initializer_with_defaults.rs3
-rw-r--r--crates/proc-macro-srv-cli/src/main_loop.rs10
-rw-r--r--crates/proc-macro-srv/src/lib.rs39
-rw-r--r--crates/proc-macro-srv/src/tests/utils.rs2
-rw-r--r--crates/project-model/src/cargo_workspace.rs3
-rw-r--r--crates/project-model/src/lib.rs10
-rw-r--r--crates/project-model/src/project_json.rs7
-rw-r--r--crates/project-model/src/sysroot.rs160
-rw-r--r--crates/project-model/src/tests.rs25
-rw-r--r--crates/project-model/src/workspace.rs422
-rw-r--r--crates/project-model/test_data/output/cargo_hello_world_project_model.txt25
-rw-r--r--crates/project-model/test_data/output/cargo_hello_world_project_model_with_selective_overrides.txt25
-rw-r--r--crates/project-model/test_data/output/cargo_hello_world_project_model_with_wildcard_overrides.txt25
-rw-r--r--crates/project-model/test_data/output/rust_project_cfg_groups.txt14
-rw-r--r--crates/project-model/test_data/output/rust_project_hello_world_project_model.txt12
-rw-r--r--crates/rust-analyzer/Cargo.toml2
-rw-r--r--crates/rust-analyzer/src/cli/analysis_stats.rs3
-rw-r--r--crates/rust-analyzer/src/cli/diagnostics.rs4
-rw-r--r--crates/rust-analyzer/src/cli/rustc_tests.rs12
-rw-r--r--crates/rust-analyzer/src/cli/scip.rs83
-rw-r--r--crates/rust-analyzer/src/cli/unresolved_references.rs4
-rw-r--r--crates/rust-analyzer/src/config.rs42
-rw-r--r--crates/rust-analyzer/src/global_state.rs30
-rw-r--r--crates/rust-analyzer/src/handlers/dispatch.rs18
-rw-r--r--crates/rust-analyzer/src/handlers/notification.rs19
-rw-r--r--crates/rust-analyzer/src/handlers/request.rs194
-rw-r--r--crates/rust-analyzer/src/integrated_benchmarks.rs14
-rw-r--r--crates/rust-analyzer/src/lib.rs93
-rw-r--r--crates/rust-analyzer/src/lsp/from_proto.rs30
-rw-r--r--crates/rust-analyzer/src/lsp/semantic_tokens.rs5
-rw-r--r--crates/rust-analyzer/src/main_loop.rs59
-rw-r--r--crates/rust-analyzer/src/reload.rs9
-rw-r--r--crates/rust-analyzer/src/task_pool.rs6
-rw-r--r--crates/rust-analyzer/tests/slow-tests/cli.rs156
-rw-r--r--crates/rust-analyzer/tests/slow-tests/main.rs116
-rw-r--r--crates/rust-analyzer/tests/slow-tests/testdir.rs9
-rw-r--r--crates/stdx/src/panic_context.rs40
-rw-r--r--crates/stdx/src/thread/pool.rs21
-rw-r--r--crates/syntax/rust.ungram2
-rw-r--r--crates/syntax/src/ast/edit_in_place.rs46
-rw-r--r--crates/syntax/src/ast/generated/nodes.rs6
-rw-r--r--crates/syntax/src/ast/generated/tokens.rs2
-rw-r--r--crates/syntax/src/ast/make.rs3
-rw-r--r--crates/syntax/src/ast/prec.rs117
-rw-r--r--crates/test-fixture/src/lib.rs67
-rw-r--r--crates/test-utils/src/lib.rs9
-rw-r--r--crates/test-utils/src/minicore.rs6
-rw-r--r--crates/vfs-notify/src/lib.rs5
-rw-r--r--crates/vfs/src/lib.rs27
-rw-r--r--docs/book/README.md29
-rw-r--r--docs/book/book.toml41
-rw-r--r--docs/book/src/README.md21
-rw-r--r--docs/book/src/SUMMARY.md24
-rw-r--r--docs/book/src/assists.md8
-rw-r--r--docs/book/src/assists_generated.md3846
-rw-r--r--docs/book/src/configuration.md51
-rw-r--r--docs/book/src/configuration_generated.md1206
-rw-r--r--docs/book/src/contributing/README.md (renamed from docs/dev/README.md)14
-rw-r--r--docs/book/src/contributing/architecture.md (renamed from docs/dev/architecture.md)15
-rw-r--r--docs/book/src/contributing/debugging.md (renamed from docs/dev/debugging.md)8
-rw-r--r--docs/book/src/contributing/guide.md (renamed from docs/dev/guide.md)13
-rw-r--r--docs/book/src/contributing/lsp-extensions.md (renamed from docs/dev/lsp-extensions.md)2
-rw-r--r--docs/book/src/contributing/setup.md (renamed from docs/dev/setup.md)0
-rw-r--r--docs/book/src/contributing/style.md (renamed from docs/dev/style.md)8
-rw-r--r--docs/book/src/contributing/syntax.md (renamed from docs/dev/syntax.md)0
-rw-r--r--docs/book/src/diagnostics.md16
-rw-r--r--docs/book/src/diagnostics_generated.md516
-rw-r--r--docs/book/src/editor_features.md203
-rw-r--r--docs/book/src/features.md3
-rw-r--r--docs/book/src/features_generated.md940
-rw-r--r--docs/book/src/installation.md40
-rw-r--r--docs/book/src/non_cargo_based_projects.md246
-rw-r--r--docs/book/src/other_editors.md425
-rw-r--r--docs/book/src/privacy.md15
-rw-r--r--docs/book/src/rust_analyzer_binary.md74
-rw-r--r--docs/book/src/security.md19
-rw-r--r--docs/book/src/troubleshooting.md50
-rw-r--r--docs/book/src/vs_code.md121
-rw-r--r--docs/user/.gitignore1
-rw-r--r--docs/user/generated_config.adoc1197
-rw-r--r--docs/user/manual.adoc1121
-rw-r--r--editors/code/package-lock.json307
-rw-r--r--editors/code/package.json20
-rw-r--r--editors/code/src/commands.ts14
-rw-r--r--editors/code/src/ctx.ts4
-rw-r--r--editors/code/src/syntax_tree_provider.ts140
-rw-r--r--editors/code/walkthrough-setup-tips.md1
-rw-r--r--lib/line-index/src/lib.rs12
-rw-r--r--rust-version2
-rw-r--r--xtask/Cargo.toml3
-rw-r--r--xtask/src/codegen.rs11
-rw-r--r--xtask/src/codegen/assists_doc_tests.rs10
-rw-r--r--xtask/src/codegen/diagnostics_docs.rs6
-rw-r--r--xtask/src/codegen/feature_docs.rs16
-rw-r--r--xtask/src/codegen/grammar.rs3
-rw-r--r--xtask/src/codegen/parser_inline_tests.rs133
-rw-r--r--xtask/src/publish/notes.rs6
-rw-r--r--xtask/src/release.rs21
-rw-r--r--xtask/src/tidy.rs7
-rw-r--r--xtask/test_data/expected.md8
302 files changed, 15087 insertions, 6451 deletions
diff --git a/.git-blame-ignore-revs b/.git-blame-ignore-revs
index 2ccdc8c042..651502965e 100644
--- a/.git-blame-ignore-revs
+++ b/.git-blame-ignore-revs
@@ -14,3 +14,4 @@ f247090558c9ba3c551566eae5882b7ca865225f
b2f6fd4f961fc7e4fbfdb80cae2e6065f8436f15
c48062fe2ab9a2d913d1985a6b0aec4bf936bfc1
f532576ac53ddcc666bc8d59e0b6437065e2f599
+4704881b641884de50645637108b6b6f5b68aaf9
diff --git a/.github/workflows/autopublish.yaml b/.github/workflows/autopublish.yaml
index 5258d9ddd3..e4fa94643b 100644
--- a/.github/workflows/autopublish.yaml
+++ b/.github/workflows/autopublish.yaml
@@ -11,6 +11,7 @@ on:
jobs:
publish:
+ if: ${{ github.repository == 'rust-lang/rust-analyzer' || github.event.action == 'workflow_dispatch' }}
name: publish
runs-on: ubuntu-latest
steps:
diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml
index ec33009239..81b55712d7 100644
--- a/.github/workflows/ci.yaml
+++ b/.github/workflows/ci.yaml
@@ -64,7 +64,11 @@ jobs:
run: |
rustup update --no-self-update ${{ env.RUST_CHANNEL }}
rustup default ${{ env.RUST_CHANNEL }}
- rustup component add --toolchain ${{ env.RUST_CHANNEL }} rustfmt rust-src
+ rustup component add --toolchain ${{ env.RUST_CHANNEL }} rust-src
+ # We always use a nightly rustfmt, regardless of channel, because we need
+ # --file-lines.
+ rustup toolchain add nightly --profile minimal
+ rustup component add --toolchain nightly rustfmt
# https://github.com/actions-rust-lang/setup-rust-toolchain/blob/main/rust.json
- name: Install Rust Problem Matcher
if: matrix.os == 'ubuntu-latest'
diff --git a/.github/workflows/publish-libs.yaml b/.github/workflows/publish-libs.yaml
index f1533bf26e..5023a634fd 100644
--- a/.github/workflows/publish-libs.yaml
+++ b/.github/workflows/publish-libs.yaml
@@ -9,6 +9,7 @@ on:
jobs:
publish-libs:
+ if: ${{ github.repository == 'rust-lang/rust-analyzer' || github.event.action == 'workflow_dispatch' }}
name: publish
runs-on: ubuntu-latest
steps:
diff --git a/.github/workflows/release.yaml b/.github/workflows/release.yaml
index 39ac652de0..fe090267dc 100644
--- a/.github/workflows/release.yaml
+++ b/.github/workflows/release.yaml
@@ -22,6 +22,7 @@ env:
jobs:
dist:
+ if: ${{ github.repository == 'rust-lang/rust-analyzer' || github.event.action == 'workflow_dispatch' }}
strategy:
matrix:
include:
@@ -33,14 +34,14 @@ jobs:
- os: windows-latest
target: aarch64-pc-windows-msvc
code-target: win32-arm64
- - os: ubuntu-20.04
+ - os: ubuntu-latest
target: x86_64-unknown-linux-gnu
code-target: linux-x64
container: rockylinux:8
- - os: ubuntu-20.04
+ - os: ubuntu-latest
target: aarch64-unknown-linux-gnu
code-target: linux-arm64
- - os: ubuntu-20.04
+ - os: ubuntu-latest
target: arm-unknown-linux-gnueabihf
code-target: linux-armhf
- os: macos-13
@@ -138,6 +139,7 @@ jobs:
path: ./dist
dist-x86_64-unknown-linux-musl:
+ if: ${{ github.repository == 'rust-lang/rust-analyzer' || github.event.action == 'workflow_dispatch' }}
name: dist (x86_64-unknown-linux-musl)
runs-on: ubuntu-latest
env:
@@ -183,6 +185,7 @@ jobs:
path: ./dist
publish:
+ if: ${{ github.repository == 'rust-lang/rust-analyzer' || github.event.action == 'workflow_dispatch' }}
name: publish
runs-on: ubuntu-latest
needs: ["dist", "dist-x86_64-unknown-linux-musl"]
@@ -257,24 +260,24 @@ jobs:
working-directory: ./editors/code
- name: Publish Extension (Code Marketplace, release)
- if: github.ref == 'refs/heads/release' && (github.repository == 'rust-analyzer/rust-analyzer' || github.repository == 'rust-lang/rust-analyzer')
+ if: github.ref == 'refs/heads/release' && github.repository == 'rust-lang/rust-analyzer'
working-directory: ./editors/code
# token from https://dev.azure.com/rust-analyzer/
run: npx vsce publish --pat ${{ secrets.MARKETPLACE_TOKEN }} --packagePath ../../dist/rust-analyzer-*.vsix
- name: Publish Extension (OpenVSX, release)
- if: github.ref == 'refs/heads/release' && (github.repository == 'rust-analyzer/rust-analyzer' || github.repository == 'rust-lang/rust-analyzer')
+ if: github.ref == 'refs/heads/release' && github.repository == 'rust-lang/rust-analyzer'
working-directory: ./editors/code
run: npx ovsx publish --pat ${{ secrets.OPENVSX_TOKEN }} --packagePath ../../dist/rust-analyzer-*.vsix
timeout-minutes: 2
- name: Publish Extension (Code Marketplace, nightly)
- if: github.ref != 'refs/heads/release' && (github.repository == 'rust-analyzer/rust-analyzer' || github.repository == 'rust-lang/rust-analyzer')
+ if: github.ref != 'refs/heads/release' && github.repository == 'rust-lang/rust-analyzer'
working-directory: ./editors/code
run: npx vsce publish --pat ${{ secrets.MARKETPLACE_TOKEN }} --packagePath ../../dist/rust-analyzer-*.vsix --pre-release
- name: Publish Extension (OpenVSX, nightly)
- if: github.ref != 'refs/heads/release' && (github.repository == 'rust-analyzer/rust-analyzer' || github.repository == 'rust-lang/rust-analyzer')
+ if: github.ref != 'refs/heads/release' && github.repository == 'rust-lang/rust-analyzer'
working-directory: ./editors/code
run: npx ovsx publish --pat ${{ secrets.OPENVSX_TOKEN }} --packagePath ../../dist/rust-analyzer-*.vsix
timeout-minutes: 2
diff --git a/.gitignore b/.gitignore
index c4470a4507..7192e685e2 100644
--- a/.gitignore
+++ b/.gitignore
@@ -6,10 +6,11 @@ target/
*.log
*.iml
.vscode/settings.json
-generated_assists.adoc
-generated_features.adoc
-generated_diagnostic.adoc
.DS_Store
/out/
/dump.lsif
.envrc
+docs/book/book
+docs/book/src/assists_generated.md
+docs/book/src/diagnostics_generated.md
+docs/book/src/features_generated.md
diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md
index da65b034be..6f270fc63b 100644
--- a/CONTRIBUTING.md
+++ b/CONTRIBUTING.md
@@ -4,7 +4,7 @@ Thank you for your interest in contributing to rust-analyzer! There are many way
and we appreciate all of them.
To get a quick overview of the crates and structure of the project take a look at the
-[./docs/dev](./docs/dev) folder.
+[Contributing](https://rust-analyzer.github.io/book/contributing) section of the manual.
If you have any questions please ask them in the [rust-analyzer zulip stream](
https://rust-lang.zulipchat.com/#narrow/stream/185405-t-compiler.2Frust-analyzer) or if unsure where
diff --git a/Cargo.lock b/Cargo.lock
index 2dfca7c480..01e6a39f7c 100644
--- a/Cargo.lock
+++ b/Cargo.lock
@@ -559,9 +559,9 @@ dependencies = [
"intern",
"itertools",
"la-arena 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)",
- "limit",
"mbe",
"ra-ap-rustc_abi",
+ "ra-ap-rustc_hashes",
"ra-ap-rustc_parse_format",
"rustc-hash 2.0.0",
"rustc_apfloat",
@@ -591,7 +591,6 @@ dependencies = [
"intern",
"itertools",
"la-arena 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)",
- "limit",
"mbe",
"parser",
"rustc-hash 2.0.0",
@@ -626,11 +625,11 @@ dependencies = [
"intern",
"itertools",
"la-arena 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)",
- "limit",
"nohash-hasher",
"oorandom",
"project-model",
"ra-ap-rustc_abi",
+ "ra-ap-rustc_hashes",
"ra-ap-rustc_index",
"ra-ap-rustc_pattern_analysis",
"rustc-hash 2.0.0",
@@ -744,7 +743,6 @@ dependencies = [
"hir",
"indexmap",
"itertools",
- "limit",
"line-index 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)",
"memchr",
"nohash-hasher",
@@ -944,10 +942,6 @@ dependencies = [
]
[[package]]
-name = "limit"
-version = "0.0.0"
-
-[[package]]
name = "line-index"
version = "0.1.2"
dependencies = [
@@ -1279,7 +1273,6 @@ dependencies = [
"drop_bomb",
"edition",
"expect-test",
- "limit",
"ra-ap-rustc_lexer",
"stdx",
"tracing",
@@ -1408,9 +1401,9 @@ dependencies = [
[[package]]
name = "proc-macro2"
-version = "1.0.86"
+version = "1.0.93"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "5e719e8df665df0d1c8fbfd238015744736151d4445ec0836b8e628aae103b77"
+checksum = "60946a68e5f9d28b0dc1c21bb8a97ee7d018a8b322fa57838ba31cc878e22d99"
dependencies = [
"unicode-ident",
]
@@ -1514,20 +1507,30 @@ dependencies = [
[[package]]
name = "ra-ap-rustc_abi"
-version = "0.91.0"
+version = "0.97.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "d5246e9e1f450333a990877eabbc36fe0567e7cedd56d5365db319e14079cf2a"
+checksum = "3829c3355d1681ffeaf1450ec71edcdace6820fe2e86469d8fc1ad45e2c96460"
dependencies = [
"bitflags 2.7.0",
+ "ra-ap-rustc_hashes",
"ra-ap-rustc_index",
"tracing",
]
[[package]]
+name = "ra-ap-rustc_hashes"
+version = "0.97.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "1bd4d6d4c434bec08e02370a4f64a4985312097215a62e82d0f757f3a98e502e"
+dependencies = [
+ "rustc-stable-hash",
+]
+
+[[package]]
name = "ra-ap-rustc_index"
-version = "0.91.0"
+version = "0.97.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "59fd8e4f5b34c434ec111efb0e0614954db048b9307d3b2e4cc3c915da9d2160"
+checksum = "bad6fc4bd7522e31096e2de5b0351144fe0684b608791ee26c842bf2da1b19ae"
dependencies = [
"ra-ap-rustc_index_macros",
"smallvec",
@@ -1535,9 +1538,9 @@ dependencies = [
[[package]]
name = "ra-ap-rustc_index_macros"
-version = "0.91.0"
+version = "0.97.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "2d34973fe081392bd1edb022e865e9952fcaa093f9cdae183edce64472e5e889"
+checksum = "cfb234e1f84b92be45276c3025bee18789e9bc95bec8789bec961e78edb01c52"
dependencies = [
"proc-macro2",
"quote",
@@ -1546,19 +1549,20 @@ dependencies = [
[[package]]
name = "ra-ap-rustc_lexer"
-version = "0.91.0"
+version = "0.97.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "52fa42c582e21b35e8f61a5afe3c63a9c722d995826762eb19b18beeccf5157f"
+checksum = "7a3a40bd11dc43d1cb110e730b80620cf8102f4cca8920a02b65954da0ed931f"
dependencies = [
+ "memchr",
"unicode-properties",
"unicode-xid",
]
[[package]]
name = "ra-ap-rustc_parse_format"
-version = "0.91.0"
+version = "0.97.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "740383328d7033393e5385f4a6073b880d5811b0fc0fd2559e481f905940f2f8"
+checksum = "5feb877478994cb4c0c0c7a5116a352eefc0634aefc8636feb00a893fa5b7135"
dependencies = [
"ra-ap-rustc_index",
"ra-ap-rustc_lexer",
@@ -1566,9 +1570,9 @@ dependencies = [
[[package]]
name = "ra-ap-rustc_pattern_analysis"
-version = "0.91.0"
+version = "0.97.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "c39f544728f32cebffb1a8b92ba3c1f3dcb4144081438d192137ed197d479a9d"
+checksum = "a76774d35934d464c4115908cde16f76a4f7e540fe1eea6b79336c556e37bdd3"
dependencies = [
"ra-ap-rustc_index",
"rustc-hash 2.0.0",
@@ -1744,6 +1748,12 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "583034fd73374156e66797ed8e5b0d5690409c9226b22d87cb7f19821c05d152"
[[package]]
+name = "rustc-stable-hash"
+version = "0.1.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "2febf9acc5ee5e99d1ad0afcdbccc02d87aa3f857a1f01f825b80eacf8edfcd1"
+
+[[package]]
name = "rustc_apfloat"
version = "0.2.1+llvm-462a31f5a5ab"
source = "registry+https://github.com/rust-lang/crates.io-index"
@@ -2001,9 +2011,9 @@ dependencies = [
[[package]]
name = "tenthash"
-version = "0.4.0"
+version = "1.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "d67f9f3cf70e0852941d7bc3cb884b49b24b8ee956baf91ad0abae31f5ef11fb"
+checksum = "2d092d622df8bb64e5de8dc86a3667702d5f1e0fe2f0604c6035540703c8cd1e"
[[package]]
name = "test-fixture"
diff --git a/Cargo.toml b/Cargo.toml
index c42ae171d8..1ff36a68e8 100644
--- a/Cargo.toml
+++ b/Cargo.toml
@@ -64,7 +64,6 @@ ide-db = { path = "./crates/ide-db", version = "0.0.0" }
ide-diagnostics = { path = "./crates/ide-diagnostics", version = "0.0.0" }
ide-ssr = { path = "./crates/ide-ssr", version = "0.0.0" }
intern = { path = "./crates/intern", version = "0.0.0" }
-limit = { path = "./crates/limit", version = "0.0.0" }
load-cargo = { path = "./crates/load-cargo", version = "0.0.0" }
mbe = { path = "./crates/mbe", version = "0.0.0" }
parser = { path = "./crates/parser", version = "0.0.0" }
@@ -87,11 +86,12 @@ vfs-notify = { path = "./crates/vfs-notify", version = "0.0.0" }
vfs = { path = "./crates/vfs", version = "0.0.0" }
edition = { path = "./crates/edition", version = "0.0.0" }
-ra-ap-rustc_lexer = { version = "0.91", default-features = false }
-ra-ap-rustc_parse_format = { version = "0.91", default-features = false }
-ra-ap-rustc_index = { version = "0.91", default-features = false }
-ra-ap-rustc_abi = { version = "0.91", default-features = false }
-ra-ap-rustc_pattern_analysis = { version = "0.91", default-features = false }
+ra-ap-rustc_hashes = { version = "0.97", default-features = false }
+ra-ap-rustc_lexer = { version = "0.97", default-features = false }
+ra-ap-rustc_parse_format = { version = "0.97", default-features = false }
+ra-ap-rustc_index = { version = "0.97", default-features = false }
+ra-ap-rustc_abi = { version = "0.97", default-features = false }
+ra-ap-rustc_pattern_analysis = { version = "0.97", default-features = false }
# local crates that aren't published to crates.io. These should not have versions.
diff --git a/PRIVACY.md b/PRIVACY.md
index 89e252be73..ef9c2437ab 100644
--- a/PRIVACY.md
+++ b/PRIVACY.md
@@ -1 +1 @@
-See the [Privacy](https://rust-analyzer.github.io/manual.html#privacy) section of the user manual.
+See the [Privacy](https://rust-analyzer.github.io/book/privacy.html) section of the user manual.
diff --git a/README.md b/README.md
index 552f71f151..4360dea4a1 100644
--- a/README.md
+++ b/README.md
@@ -1,6 +1,6 @@
<p align="center">
<img
- src="https://raw.githubusercontent.com/rust-analyzer/rust-analyzer/master/assets/logo-wide.svg"
+ src="https://raw.githubusercontent.com/rust-lang/rust-analyzer/master/assets/logo-wide.svg"
alt="rust-analyzer logo">
</p>
@@ -9,21 +9,22 @@ It is a part of a larger rls-2.0 effort to create excellent IDE support for Rust
## Quick Start
-https://rust-analyzer.github.io/manual.html#installation
+https://rust-analyzer.github.io/book/installation.html
## Documentation
If you want to **contribute** to rust-analyzer check out the [CONTRIBUTING.md](./CONTRIBUTING.md) or
-if you are just curious about how things work under the hood, check the [./docs/dev](./docs/dev)
-folder.
+if you are just curious about how things work under the hood, see the
+[Contributing](https://rust-analyzer.github.io/book/contributing) section of the manual.
If you want to **use** rust-analyzer's language server with your editor of
-choice, check [the manual](https://rust-analyzer.github.io/manual.html) folder.
+choice, check [the manual](https://rust-analyzer.github.io/book/).
It also contains some tips & tricks to help you be more productive when using rust-analyzer.
## Security and Privacy
-See the corresponding sections of [the manual](https://rust-analyzer.github.io/manual.html#security).
+See the [security](https://rust-analyzer.github.io/book/security.html) and
+[privacy](https://rust-analyzer.github.io/book/privacy.html) sections of the manual.
## Communication
diff --git a/crates/base-db/src/input.rs b/crates/base-db/src/input.rs
index a0fc8c31ea..bd08387b58 100644
--- a/crates/base-db/src/input.rs
+++ b/crates/base-db/src/input.rs
@@ -129,9 +129,9 @@ impl fmt::Display for CrateName {
}
impl ops::Deref for CrateName {
- type Target = str;
- fn deref(&self) -> &str {
- self.0.as_str()
+ type Target = Symbol;
+ fn deref(&self) -> &Symbol {
+ &self.0
}
}
@@ -230,8 +230,8 @@ impl fmt::Display for CrateDisplayName {
}
impl ops::Deref for CrateDisplayName {
- type Target = str;
- fn deref(&self) -> &str {
+ type Target = Symbol;
+ fn deref(&self) -> &Symbol {
&self.crate_name
}
}
@@ -296,6 +296,9 @@ pub struct CrateData {
pub dependencies: Vec<Dependency>,
pub origin: CrateOrigin,
pub is_proc_macro: bool,
+ /// The working directory to run proc-macros in. This is the workspace root of the cargo workspace
+ /// for workspace members, the crate manifest dir otherwise.
+ pub proc_macro_cwd: Option<AbsPathBuf>,
}
#[derive(Default, Clone, PartialEq, Eq)]
@@ -360,8 +363,9 @@ impl CrateGraph {
cfg_options: Arc<CfgOptions>,
potential_cfg_options: Option<Arc<CfgOptions>>,
mut env: Env,
- is_proc_macro: bool,
origin: CrateOrigin,
+ is_proc_macro: bool,
+ proc_macro_cwd: Option<AbsPathBuf>,
) -> CrateId {
env.entries.shrink_to_fit();
let data = CrateData {
@@ -375,6 +379,7 @@ impl CrateGraph {
dependencies: Vec::new(),
origin,
is_proc_macro,
+ proc_macro_cwd,
};
self.arena.alloc(data)
}
@@ -698,8 +703,9 @@ mod tests {
Default::default(),
Default::default(),
Env::default(),
- false,
CrateOrigin::Local { repo: None, name: None },
+ false,
+ None,
);
let crate2 = graph.add_crate_root(
FileId::from_raw(2u32),
@@ -709,8 +715,9 @@ mod tests {
Default::default(),
Default::default(),
Env::default(),
- false,
CrateOrigin::Local { repo: None, name: None },
+ false,
+ None,
);
let crate3 = graph.add_crate_root(
FileId::from_raw(3u32),
@@ -720,8 +727,9 @@ mod tests {
Default::default(),
Default::default(),
Env::default(),
- false,
CrateOrigin::Local { repo: None, name: None },
+ false,
+ None,
);
assert!(graph
.add_dep(crate1, Dependency::new(CrateName::new("crate2").unwrap(), crate2,))
@@ -745,8 +753,9 @@ mod tests {
Default::default(),
Default::default(),
Env::default(),
- false,
CrateOrigin::Local { repo: None, name: None },
+ false,
+ None,
);
let crate2 = graph.add_crate_root(
FileId::from_raw(2u32),
@@ -756,8 +765,9 @@ mod tests {
Default::default(),
Default::default(),
Env::default(),
- false,
CrateOrigin::Local { repo: None, name: None },
+ false,
+ None,
);
assert!(graph
.add_dep(crate1, Dependency::new(CrateName::new("crate2").unwrap(), crate2,))
@@ -778,8 +788,9 @@ mod tests {
Default::default(),
Default::default(),
Env::default(),
- false,
CrateOrigin::Local { repo: None, name: None },
+ false,
+ None,
);
let crate2 = graph.add_crate_root(
FileId::from_raw(2u32),
@@ -789,8 +800,9 @@ mod tests {
Default::default(),
Default::default(),
Env::default(),
- false,
CrateOrigin::Local { repo: None, name: None },
+ false,
+ None,
);
let crate3 = graph.add_crate_root(
FileId::from_raw(3u32),
@@ -800,8 +812,9 @@ mod tests {
Default::default(),
Default::default(),
Env::default(),
- false,
CrateOrigin::Local { repo: None, name: None },
+ false,
+ None,
);
assert!(graph
.add_dep(crate1, Dependency::new(CrateName::new("crate2").unwrap(), crate2,))
@@ -822,8 +835,9 @@ mod tests {
Default::default(),
Default::default(),
Env::default(),
- false,
CrateOrigin::Local { repo: None, name: None },
+ false,
+ None,
);
let crate2 = graph.add_crate_root(
FileId::from_raw(2u32),
@@ -833,8 +847,9 @@ mod tests {
Default::default(),
Default::default(),
Env::default(),
- false,
CrateOrigin::Local { repo: None, name: None },
+ false,
+ None,
);
assert!(graph
.add_dep(
diff --git a/crates/base-db/src/lib.rs b/crates/base-db/src/lib.rs
index c7e4168f6b..eed8c88683 100644
--- a/crates/base-db/src/lib.rs
+++ b/crates/base-db/src/lib.rs
@@ -10,7 +10,7 @@ use rustc_hash::FxHashMap;
use span::EditionedFileId;
use syntax::{ast, Parse, SourceFile, SyntaxError};
use triomphe::Arc;
-use vfs::{AbsPathBuf, FileId};
+use vfs::FileId;
pub use crate::{
change::FileChange,
@@ -85,8 +85,6 @@ pub trait SourceDatabase: FileLoader + std::fmt::Debug {
/// Crate related data shared by the whole workspace.
#[derive(Debug, PartialEq, Eq, Hash, Clone)]
pub struct CrateWorkspaceData {
- /// The working directory to run proc-macros in. This is usually the workspace root of cargo workspaces.
- pub proc_macro_cwd: Option<AbsPathBuf>,
// FIXME: Consider removing this, making HirDatabase::target_data_layout an input query
pub data_layout: TargetLayoutLoadResult,
/// Toolchain version used to compile the crate.
diff --git a/crates/hir-def/Cargo.toml b/crates/hir-def/Cargo.toml
index 375f18d9fe..9a448ec14e 100644
--- a/crates/hir-def/Cargo.toml
+++ b/crates/hir-def/Cargo.toml
@@ -31,6 +31,7 @@ triomphe.workspace = true
rustc_apfloat = "0.2.0"
text-size.workspace = true
+ra-ap-rustc_hashes.workspace = true
ra-ap-rustc_parse_format.workspace = true
ra-ap-rustc_abi.workspace = true
@@ -43,7 +44,6 @@ hir-expand.workspace = true
mbe.workspace = true
cfg.workspace = true
tt.workspace = true
-limit.workspace = true
span.workspace = true
diff --git a/crates/hir-def/src/data.rs b/crates/hir-def/src/data.rs
index 12f5f6ad79..bec6627877 100644
--- a/crates/hir-def/src/data.rs
+++ b/crates/hir-def/src/data.rs
@@ -95,10 +95,14 @@ impl FunctionData {
.map(Box::new);
let rustc_allow_incoherent_impl = attrs.by_key(&sym::rustc_allow_incoherent_impl).exists();
if flags.contains(FnFlags::HAS_UNSAFE_KW)
- && !crate_graph[krate].edition.at_least_2024()
&& attrs.by_key(&sym::rustc_deprecated_safe_2024).exists()
{
flags.remove(FnFlags::HAS_UNSAFE_KW);
+ flags.insert(FnFlags::DEPRECATED_SAFE_2024);
+ }
+
+ if attrs.by_key(&sym::target_feature).exists() {
+ flags.insert(FnFlags::HAS_TARGET_FEATURE);
}
Arc::new(FunctionData {
@@ -148,6 +152,10 @@ impl FunctionData {
self.flags.contains(FnFlags::HAS_UNSAFE_KW)
}
+ pub fn is_deprecated_safe_2024(&self) -> bool {
+ self.flags.contains(FnFlags::DEPRECATED_SAFE_2024)
+ }
+
pub fn is_safe(&self) -> bool {
self.flags.contains(FnFlags::HAS_SAFE_KW)
}
@@ -155,6 +163,10 @@ impl FunctionData {
pub fn is_varargs(&self) -> bool {
self.flags.contains(FnFlags::IS_VARARGS)
}
+
+ pub fn has_target_feature(&self) -> bool {
+ self.flags.contains(FnFlags::HAS_TARGET_FEATURE)
+ }
}
fn parse_rustc_legacy_const_generics(tt: &crate::tt::TopSubtree) -> Box<[u32]> {
@@ -238,6 +250,7 @@ bitflags::bitflags! {
const RUSTC_HAS_INCOHERENT_INHERENT_IMPLS = 1 << 3;
const SKIP_ARRAY_DURING_METHOD_DISPATCH = 1 << 4;
const SKIP_BOXED_SLICE_DURING_METHOD_DISPATCH = 1 << 5;
+ const RUSTC_PAREN_SUGAR = 1 << 6;
}
}
@@ -282,6 +295,9 @@ impl TraitData {
if attrs.by_key(&sym::rustc_has_incoherent_inherent_impls).exists() {
flags |= TraitFlags::RUSTC_HAS_INCOHERENT_INHERENT_IMPLS;
}
+ if attrs.by_key(&sym::rustc_paren_sugar).exists() {
+ flags |= TraitFlags::RUSTC_PAREN_SUGAR;
+ }
let mut skip_array_during_method_dispatch =
attrs.by_key(&sym::rustc_skip_array_during_method_dispatch).exists();
diff --git a/crates/hir-def/src/data/adt.rs b/crates/hir-def/src/data/adt.rs
index 8fc1985403..c94622016d 100644
--- a/crates/hir-def/src/data/adt.rs
+++ b/crates/hir-def/src/data/adt.rs
@@ -9,6 +9,7 @@ use hir_expand::name::Name;
use intern::sym;
use la_arena::Arena;
use rustc_abi::{Align, Integer, IntegerType, ReprFlags, ReprOptions};
+use rustc_hashes::Hash64;
use triomphe::Arc;
use tt::iter::TtElement;
@@ -172,7 +173,13 @@ fn parse_repr_tt(tt: &TopSubtree) -> Option<ReprOptions> {
}
}
- Some(ReprOptions { int, align: max_align, pack: min_pack, flags, field_shuffle_seed: 0 })
+ Some(ReprOptions {
+ int,
+ align: max_align,
+ pack: min_pack,
+ flags,
+ field_shuffle_seed: Hash64::ZERO,
+ })
}
impl StructData {
diff --git a/crates/hir-def/src/db.rs b/crates/hir-def/src/db.rs
index bf6cc1dcad..598a850898 100644
--- a/crates/hir-def/src/db.rs
+++ b/crates/hir-def/src/db.rs
@@ -10,12 +10,12 @@ use triomphe::Arc;
use crate::{
attr::{Attrs, AttrsWithOwner},
- body::{scope::ExprScopes, Body, BodySourceMap},
data::{
adt::{EnumData, EnumVariantData, StructData, VariantData},
ConstData, ExternCrateDeclData, FunctionData, ImplData, Macro2Data, MacroRulesData,
ProcMacroData, StaticData, TraitAliasData, TraitData, TypeAliasData,
},
+ expr_store::{scope::ExprScopes, Body, BodySourceMap},
generics::GenericParams,
import_map::ImportMap,
item_tree::{AttrOwner, ItemTree, ItemTreeSourceMaps},
diff --git a/crates/hir-def/src/dyn_map.rs b/crates/hir-def/src/dyn_map.rs
index 0f73595347..e9318d146d 100644
--- a/crates/hir-def/src/dyn_map.rs
+++ b/crates/hir-def/src/dyn_map.rs
@@ -31,9 +31,9 @@ pub mod keys {
use crate::{
dyn_map::{DynMap, Policy},
- BlockId, ConstId, EnumId, EnumVariantId, ExternCrateId, FieldId, FunctionId, ImplId,
- LifetimeParamId, Macro2Id, MacroRulesId, ProcMacroId, StaticId, StructId, TraitAliasId,
- TraitId, TypeAliasId, TypeOrConstParamId, UnionId, UseId,
+ BlockId, ConstId, EnumId, EnumVariantId, ExternBlockId, ExternCrateId, FieldId, FunctionId,
+ ImplId, LifetimeParamId, Macro2Id, MacroRulesId, ProcMacroId, StaticId, StructId,
+ TraitAliasId, TraitId, TypeAliasId, TypeOrConstParamId, UnionId, UseId,
};
pub type Key<K, V> = crate::dyn_map::Key<AstPtr<K>, V, AstPtrPolicy<K, V>>;
@@ -44,6 +44,7 @@ pub mod keys {
pub const STATIC: Key<ast::Static, StaticId> = Key::new();
pub const TYPE_ALIAS: Key<ast::TypeAlias, TypeAliasId> = Key::new();
pub const IMPL: Key<ast::Impl, ImplId> = Key::new();
+ pub const EXTERN_BLOCK: Key<ast::ExternBlock, ExternBlockId> = Key::new();
pub const TRAIT: Key<ast::Trait, TraitId> = Key::new();
pub const TRAIT_ALIAS: Key<ast::TraitAlias, TraitAliasId> = Key::new();
pub const STRUCT: Key<ast::Struct, StructId> = Key::new();
diff --git a/crates/hir-def/src/expander.rs b/crates/hir-def/src/expander.rs
index 108258d5a1..a1b3123c99 100644
--- a/crates/hir-def/src/expander.rs
+++ b/crates/hir-def/src/expander.rs
@@ -9,7 +9,6 @@ use hir_expand::{
attrs::RawAttrs, mod_path::ModPath, span_map::SpanMap, ExpandError, ExpandErrorKind,
ExpandResult, HirFileId, InFile, Lookup, MacroCallId,
};
-use limit::Limit;
use span::{Edition, SyntaxContextId};
use syntax::{ast, Parse};
use triomphe::Arc;
@@ -28,18 +27,18 @@ pub struct Expander {
pub(crate) module: ModuleId,
/// `recursion_depth == usize::MAX` indicates that the recursion limit has been reached.
recursion_depth: u32,
- recursion_limit: Limit,
+ recursion_limit: usize,
}
impl Expander {
pub fn new(db: &dyn DefDatabase, current_file_id: HirFileId, module: ModuleId) -> Expander {
let recursion_limit = module.def_map(db).recursion_limit() as usize;
- let recursion_limit = Limit::new(if cfg!(test) {
+ let recursion_limit = if cfg!(test) {
// Without this, `body::tests::your_stack_belongs_to_me` stack-overflows in debug
std::cmp::min(32, recursion_limit)
} else {
recursion_limit
- });
+ };
Expander {
current_file_id,
module,
@@ -194,7 +193,7 @@ impl Expander {
let Some(call_id) = value else {
return ExpandResult { value: None, err };
};
- if self.recursion_limit.check(self.recursion_depth as usize + 1).is_err() {
+ if self.recursion_depth as usize > self.recursion_limit {
self.recursion_depth = u32::MAX;
cov_mark::hit!(your_stack_belongs_to_me);
return ExpandResult::only_err(ExpandError::new(
diff --git a/crates/hir-def/src/body.rs b/crates/hir-def/src/expr_store.rs
index de43924930..5ff6a7ffe5 100644
--- a/crates/hir-def/src/body.rs
+++ b/crates/hir-def/src/expr_store.rs
@@ -1,18 +1,19 @@
-//! Defines `Body`: a lowered representation of bodies of functions, statics and
+//! Defines `ExpressionStore`: a lowered representation of functions, statics and
//! consts.
+mod body;
mod lower;
mod pretty;
pub mod scope;
+
#[cfg(test)]
mod tests;
use std::ops::{Deref, Index};
-use base_db::CrateId;
use cfg::{CfgExpr, CfgOptions};
use either::Either;
use hir_expand::{name::Name, ExpandError, InFile};
-use la_arena::{Arena, ArenaMap, Idx, RawIdx};
+use la_arena::{Arena, ArenaMap};
use rustc_hash::FxHashMap;
use smallvec::SmallVec;
use span::{Edition, MacroFileId, SyntaxContextData};
@@ -22,19 +23,18 @@ use tt::TextRange;
use crate::{
db::DefDatabase,
- expander::Expander,
hir::{
- dummy_expr_id, Array, AsmOperand, Binding, BindingId, Expr, ExprId, ExprOrPatId, Label,
- LabelId, Pat, PatId, RecordFieldPat, Statement,
+ Array, AsmOperand, Binding, BindingId, Expr, ExprId, ExprOrPatId, Label, LabelId, Pat,
+ PatId, RecordFieldPat, Statement,
},
- item_tree::AttrOwner,
nameres::DefMap,
path::{ModPath, Path},
- src::HasSource,
type_ref::{TypeRef, TypeRefId, TypesMap, TypesSourceMap},
- BlockId, DefWithBodyId, HasModule, Lookup, SyntheticSyntax,
+ BlockId, DefWithBodyId, Lookup, SyntheticSyntax,
};
+pub use self::body::{Body, BodySourceMap};
+
/// A wrapper around [`span::SyntaxContextId`] that is intended only for comparisons.
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub struct HygieneId(span::SyntaxContextId);
@@ -58,9 +58,29 @@ impl HygieneId {
}
}
-/// The body of an item (function, const etc.).
+pub type ExprPtr = AstPtr<ast::Expr>;
+pub type ExprSource = InFile<ExprPtr>;
+
+pub type PatPtr = AstPtr<ast::Pat>;
+pub type PatSource = InFile<PatPtr>;
+
+pub type LabelPtr = AstPtr<ast::Label>;
+pub type LabelSource = InFile<LabelPtr>;
+
+pub type FieldPtr = AstPtr<ast::RecordExprField>;
+pub type FieldSource = InFile<FieldPtr>;
+
+pub type PatFieldPtr = AstPtr<Either<ast::RecordExprField, ast::RecordPatField>>;
+pub type PatFieldSource = InFile<PatFieldPtr>;
+
+pub type ExprOrPatPtr = AstPtr<Either<ast::Expr, ast::Pat>>;
+pub type ExprOrPatSource = InFile<ExprOrPatPtr>;
+
+pub type SelfParamPtr = AstPtr<ast::SelfParam>;
+pub type MacroCallPtr = AstPtr<ast::MacroCall>;
+
#[derive(Debug, Eq, PartialEq)]
-pub struct Body {
+pub struct ExpressionStore {
pub exprs: Arena<Expr>,
pub pats: Arena<Pat>,
pub bindings: Arena<Binding>,
@@ -68,19 +88,9 @@ pub struct Body {
/// Id of the closure/coroutine that owns the corresponding binding. If a binding is owned by the
/// top level expression, it will not be listed in here.
pub binding_owners: FxHashMap<BindingId, ExprId>,
- /// The patterns for the function's parameters. While the parameter types are
- /// part of the function signature, the patterns are not (they don't change
- /// the external type of the function).
- ///
- /// If this `Body` is for the body of a constant, this will just be
- /// empty.
- pub params: Box<[PatId]>,
- pub self_param: Option<BindingId>,
- /// The `ExprId` of the actual body expression.
- pub body_expr: ExprId,
pub types: TypesMap,
- /// Block expressions in this body that may contain inner items.
- block_scopes: Vec<BlockId>,
+ /// Block expressions in this store that may contain inner items.
+ block_scopes: Box<[BlockId]>,
/// A map from binding to its hygiene ID.
///
@@ -92,56 +102,24 @@ pub struct Body {
binding_hygiene: FxHashMap<BindingId, HygieneId>,
/// A map from an variable usages to their hygiene ID.
///
- /// Expressions that can be recorded here are single segment path, although not all single segments path refer
+ /// Expressions (and destructuing patterns) that can be recorded here are single segment path, although not all single segments path refer
/// to variables and have hygiene (some refer to items, we don't know at this stage).
- expr_hygiene: FxHashMap<ExprId, HygieneId>,
- /// A map from a destructuring assignment possible variable usages to their hygiene ID.
- pat_hygiene: FxHashMap<PatId, HygieneId>,
+ ident_hygiene: FxHashMap<ExprOrPatId, HygieneId>,
}
-pub type ExprPtr = AstPtr<ast::Expr>;
-pub type ExprSource = InFile<ExprPtr>;
-
-pub type PatPtr = AstPtr<ast::Pat>;
-pub type PatSource = InFile<PatPtr>;
-
-pub type LabelPtr = AstPtr<ast::Label>;
-pub type LabelSource = InFile<LabelPtr>;
-
-pub type FieldPtr = AstPtr<ast::RecordExprField>;
-pub type FieldSource = InFile<FieldPtr>;
-
-pub type PatFieldPtr = AstPtr<Either<ast::RecordExprField, ast::RecordPatField>>;
-pub type PatFieldSource = InFile<PatFieldPtr>;
-
-pub type ExprOrPatPtr = AstPtr<Either<ast::Expr, ast::Pat>>;
-pub type ExprOrPatSource = InFile<ExprOrPatPtr>;
-
-/// An item body together with the mapping from syntax nodes to HIR expression
-/// IDs. This is needed to go from e.g. a position in a file to the HIR
-/// expression containing it; but for type inference etc., we want to operate on
-/// a structure that is agnostic to the actual positions of expressions in the
-/// file, so that we don't recompute types whenever some whitespace is typed.
-///
-/// One complication here is that, due to macro expansion, a single `Body` might
-/// be spread across several files. So, for each ExprId and PatId, we record
-/// both the HirFileId and the position inside the file. However, we only store
-/// AST -> ExprId mapping for non-macro files, as it is not clear how to handle
-/// this properly for macros.
-#[derive(Default, Debug, Eq, PartialEq)]
-pub struct BodySourceMap {
+#[derive(Debug, Eq, PartialEq, Default)]
+pub struct ExpressionStoreSourceMap {
// AST expressions can create patterns in destructuring assignments. Therefore, `ExprSource` can also map
// to `PatId`, and `PatId` can also map to `ExprSource` (the other way around is unaffected).
expr_map: FxHashMap<ExprSource, ExprOrPatId>,
- expr_map_back: ArenaMap<ExprId, ExprSource>,
+ expr_map_back: ArenaMap<ExprId, ExprOrPatSource>,
- pat_map: FxHashMap<PatSource, PatId>,
+ pat_map: FxHashMap<PatSource, ExprOrPatId>,
pat_map_back: ArenaMap<PatId, ExprOrPatSource>,
label_map: FxHashMap<LabelSource, LabelId>,
label_map_back: ArenaMap<LabelId, LabelSource>,
- self_param: Option<InFile<AstPtr<ast::SelfParam>>>,
binding_definitions: FxHashMap<BindingId, SmallVec<[PatId; 4]>>,
/// We don't create explicit nodes for record fields (`S { record_field: 92 }`).
@@ -153,11 +131,25 @@ pub struct BodySourceMap {
template_map: Option<Box<FormatTemplate>>,
- expansions: FxHashMap<InFile<AstPtr<ast::MacroCall>>, MacroFileId>,
+ expansions: FxHashMap<InFile<MacroCallPtr>, MacroFileId>,
- /// Diagnostics accumulated during body lowering. These contain `AstPtr`s and so are stored in
+ /// Diagnostics accumulated during lowering. These contain `AstPtr`s and so are stored in
/// the source map (since they're just as volatile).
- diagnostics: Vec<BodyDiagnostic>,
+ diagnostics: Vec<ExpressionStoreDiagnostics>,
+}
+
+/// The body of an item (function, const etc.).
+#[derive(Debug, Eq, PartialEq, Default)]
+pub struct ExpressionStoreBuilder {
+ pub exprs: Arena<Expr>,
+ pub pats: Arena<Pat>,
+ pub bindings: Arena<Binding>,
+ pub labels: Arena<Label>,
+ pub binding_owners: FxHashMap<BindingId, ExprId>,
+ pub types: TypesMap,
+ block_scopes: Vec<BlockId>,
+ binding_hygiene: FxHashMap<BindingId, HygieneId>,
+ ident_hygiene: FxHashMap<ExprOrPatId, HygieneId>,
}
#[derive(Default, Debug, Eq, PartialEq)]
@@ -171,166 +163,62 @@ struct FormatTemplate {
/// The value stored for each capture is its template literal and offset inside it. The template literal
/// is from the `format_args[_nl]!()` macro and so needs to be mapped up once to go to the user-written
/// template.
- implicit_capture_to_source: FxHashMap<ExprId, InFile<(AstPtr<ast::Expr>, TextRange)>>,
+ implicit_capture_to_source: FxHashMap<ExprId, InFile<(ExprPtr, TextRange)>>,
}
#[derive(Debug, Eq, PartialEq)]
-pub enum BodyDiagnostic {
+pub enum ExpressionStoreDiagnostics {
InactiveCode { node: InFile<SyntaxNodePtr>, cfg: CfgExpr, opts: CfgOptions },
- MacroError { node: InFile<AstPtr<ast::MacroCall>>, err: ExpandError },
- UnresolvedMacroCall { node: InFile<AstPtr<ast::MacroCall>>, path: ModPath },
+ MacroError { node: InFile<MacroCallPtr>, err: ExpandError },
+ UnresolvedMacroCall { node: InFile<MacroCallPtr>, path: ModPath },
UnreachableLabel { node: InFile<AstPtr<ast::Lifetime>>, name: Name },
AwaitOutsideOfAsync { node: InFile<AstPtr<ast::AwaitExpr>>, location: String },
UndeclaredLabel { node: InFile<AstPtr<ast::Lifetime>>, name: Name },
}
-impl Body {
- pub(crate) fn body_with_source_map_query(
- db: &dyn DefDatabase,
- def: DefWithBodyId,
- ) -> (Arc<Body>, Arc<BodySourceMap>) {
- let _p = tracing::info_span!("body_with_source_map_query").entered();
- let mut params = None;
-
- let mut is_async_fn = false;
- let InFile { file_id, value: body } = {
- match def {
- DefWithBodyId::FunctionId(f) => {
- let data = db.function_data(f);
- let f = f.lookup(db);
- let src = f.source(db);
- params = src.value.param_list().map(move |param_list| {
- let item_tree = f.id.item_tree(db);
- let func = &item_tree[f.id.value];
- let krate = f.container.module(db).krate;
- let crate_graph = db.crate_graph();
- (
- param_list,
- (0..func.params.len()).map(move |idx| {
- item_tree
- .attrs(
- db,
- krate,
- AttrOwner::Param(
- f.id.value,
- Idx::from_raw(RawIdx::from(idx as u32)),
- ),
- )
- .is_cfg_enabled(&crate_graph[krate].cfg_options)
- }),
- )
- });
- is_async_fn = data.is_async();
- src.map(|it| it.body().map(ast::Expr::from))
- }
- DefWithBodyId::ConstId(c) => {
- let c = c.lookup(db);
- let src = c.source(db);
- src.map(|it| it.body())
- }
- DefWithBodyId::StaticId(s) => {
- let s = s.lookup(db);
- let src = s.source(db);
- src.map(|it| it.body())
- }
- DefWithBodyId::VariantId(v) => {
- let s = v.lookup(db);
- let src = s.source(db);
- src.map(|it| it.expr())
- }
- DefWithBodyId::InTypeConstId(c) => c.lookup(db).id.map(|_| c.source(db).expr()),
- }
- };
- let module = def.module(db);
- let expander = Expander::new(db, file_id, module);
- let (mut body, mut source_map) =
- Body::new(db, def, expander, params, body, module.krate, is_async_fn);
- body.shrink_to_fit();
- source_map.shrink_to_fit();
-
- (Arc::new(body), Arc::new(source_map))
- }
-
- pub(crate) fn body_query(db: &dyn DefDatabase, def: DefWithBodyId) -> Arc<Body> {
- db.body_with_source_map(def).0
- }
-
- /// Returns an iterator over all block expressions in this body that define inner items.
- pub fn blocks<'a>(
- &'a self,
- db: &'a dyn DefDatabase,
- ) -> impl Iterator<Item = (BlockId, Arc<DefMap>)> + 'a {
- self.block_scopes.iter().map(move |&block| (block, db.block_def_map(block)))
- }
-
- pub fn pretty_print(
- &self,
- db: &dyn DefDatabase,
- owner: DefWithBodyId,
- edition: Edition,
- ) -> String {
- pretty::print_body_hir(db, self, owner, edition)
- }
-
- pub fn pretty_print_expr(
- &self,
- db: &dyn DefDatabase,
- owner: DefWithBodyId,
- expr: ExprId,
- edition: Edition,
- ) -> String {
- pretty::print_expr_hir(db, self, owner, expr, edition)
- }
-
- pub fn pretty_print_pat(
- &self,
- db: &dyn DefDatabase,
- owner: DefWithBodyId,
- pat: PatId,
- oneline: bool,
- edition: Edition,
- ) -> String {
- pretty::print_pat_hir(db, self, owner, pat, oneline, edition)
- }
-
- fn new(
- db: &dyn DefDatabase,
- owner: DefWithBodyId,
- expander: Expander,
- params: Option<(ast::ParamList, impl Iterator<Item = bool>)>,
- body: Option<ast::Expr>,
- krate: CrateId,
- is_async_fn: bool,
- ) -> (Body, BodySourceMap) {
- lower::lower(db, owner, expander, params, body, krate, is_async_fn)
- }
-
- fn shrink_to_fit(&mut self) {
+impl ExpressionStoreBuilder {
+ fn finish(self) -> ExpressionStore {
let Self {
- body_expr: _,
- params: _,
- self_param: _,
block_scopes,
- exprs,
- labels,
- pats,
- bindings,
- binding_owners,
- binding_hygiene,
- expr_hygiene,
- pat_hygiene,
- types,
+ mut exprs,
+ mut labels,
+ mut pats,
+ mut bindings,
+ mut binding_owners,
+ mut binding_hygiene,
+ mut ident_hygiene,
+ mut types,
} = self;
- block_scopes.shrink_to_fit();
exprs.shrink_to_fit();
labels.shrink_to_fit();
pats.shrink_to_fit();
bindings.shrink_to_fit();
binding_owners.shrink_to_fit();
binding_hygiene.shrink_to_fit();
- expr_hygiene.shrink_to_fit();
- pat_hygiene.shrink_to_fit();
+ ident_hygiene.shrink_to_fit();
types.shrink_to_fit();
+
+ ExpressionStore {
+ exprs,
+ pats,
+ bindings,
+ labels,
+ binding_owners,
+ types,
+ block_scopes: block_scopes.into_boxed_slice(),
+ binding_hygiene,
+ ident_hygiene,
+ }
+ }
+}
+
+impl ExpressionStore {
+ /// Returns an iterator over all block expressions in this store that define inner items.
+ pub fn blocks<'a>(
+ &'a self,
+ db: &'a dyn DefDatabase,
+ ) -> impl Iterator<Item = (BlockId, Arc<DefMap>)> + 'a {
+ self.block_scopes.iter().map(move |&block| (block, db.block_def_map(block)))
}
pub fn walk_bindings_in_pat(&self, pat_id: PatId, mut f: impl FnMut(BindingId)) {
@@ -658,11 +546,11 @@ impl Body {
}
pub fn expr_path_hygiene(&self, expr: ExprId) -> HygieneId {
- self.expr_hygiene.get(&expr).copied().unwrap_or(HygieneId::ROOT)
+ self.ident_hygiene.get(&expr.into()).copied().unwrap_or(HygieneId::ROOT)
}
pub fn pat_path_hygiene(&self, pat: PatId) -> HygieneId {
- self.pat_hygiene.get(&pat).copied().unwrap_or(HygieneId::ROOT)
+ self.ident_hygiene.get(&pat.into()).copied().unwrap_or(HygieneId::ROOT)
}
pub fn expr_or_pat_path_hygiene(&self, id: ExprOrPatId) -> HygieneId {
@@ -673,27 +561,7 @@ impl Body {
}
}
-impl Default for Body {
- fn default() -> Self {
- Self {
- body_expr: dummy_expr_id(),
- exprs: Default::default(),
- pats: Default::default(),
- bindings: Default::default(),
- labels: Default::default(),
- params: Default::default(),
- block_scopes: Default::default(),
- binding_owners: Default::default(),
- self_param: Default::default(),
- binding_hygiene: Default::default(),
- expr_hygiene: Default::default(),
- pat_hygiene: Default::default(),
- types: Default::default(),
- }
- }
-}
-
-impl Index<ExprId> for Body {
+impl Index<ExprId> for ExpressionStore {
type Output = Expr;
fn index(&self, expr: ExprId) -> &Expr {
@@ -701,7 +569,7 @@ impl Index<ExprId> for Body {
}
}
-impl Index<PatId> for Body {
+impl Index<PatId> for ExpressionStore {
type Output = Pat;
fn index(&self, pat: PatId) -> &Pat {
@@ -709,7 +577,7 @@ impl Index<PatId> for Body {
}
}
-impl Index<LabelId> for Body {
+impl Index<LabelId> for ExpressionStore {
type Output = Label;
fn index(&self, label: LabelId) -> &Label {
@@ -717,7 +585,7 @@ impl Index<LabelId> for Body {
}
}
-impl Index<BindingId> for Body {
+impl Index<BindingId> for ExpressionStore {
type Output = Binding;
fn index(&self, b: BindingId) -> &Binding {
@@ -725,7 +593,7 @@ impl Index<BindingId> for Body {
}
}
-impl Index<TypeRefId> for Body {
+impl Index<TypeRefId> for ExpressionStore {
type Output = TypeRef;
fn index(&self, b: TypeRefId) -> &TypeRef {
@@ -735,15 +603,15 @@ impl Index<TypeRefId> for Body {
// FIXME: Change `node_` prefix to something more reasonable.
// Perhaps `expr_syntax` and `expr_id`?
-impl BodySourceMap {
+impl ExpressionStoreSourceMap {
pub fn expr_or_pat_syntax(&self, id: ExprOrPatId) -> Result<ExprOrPatSource, SyntheticSyntax> {
match id {
- ExprOrPatId::ExprId(id) => self.expr_syntax(id).map(|it| it.map(AstPtr::wrap_left)),
+ ExprOrPatId::ExprId(id) => self.expr_syntax(id),
ExprOrPatId::PatId(id) => self.pat_syntax(id),
}
}
- pub fn expr_syntax(&self, expr: ExprId) -> Result<ExprSource, SyntheticSyntax> {
+ pub fn expr_syntax(&self, expr: ExprId) -> Result<ExprOrPatSource, SyntheticSyntax> {
self.expr_map_back.get(expr).cloned().ok_or(SyntheticSyntax)
}
@@ -757,9 +625,7 @@ impl BodySourceMap {
self.expansions.get(&src).cloned()
}
- pub fn macro_calls(
- &self,
- ) -> impl Iterator<Item = (InFile<AstPtr<ast::MacroCall>>, MacroFileId)> + '_ {
+ pub fn macro_calls(&self) -> impl Iterator<Item = (InFile<MacroCallPtr>, MacroFileId)> + '_ {
self.expansions.iter().map(|(&a, &b)| (a, b))
}
@@ -767,11 +633,7 @@ impl BodySourceMap {
self.pat_map_back.get(pat).cloned().ok_or(SyntheticSyntax)
}
- pub fn self_param_syntax(&self) -> Option<InFile<AstPtr<ast::SelfParam>>> {
- self.self_param
- }
-
- pub fn node_pat(&self, node: InFile<&ast::Pat>) -> Option<PatId> {
+ pub fn node_pat(&self, node: InFile<&ast::Pat>) -> Option<ExprOrPatId> {
self.pat_map.get(&node.map(AstPtr::new)).cloned()
}
@@ -801,9 +663,7 @@ impl BodySourceMap {
self.expr_map.get(&src).copied()
}
- pub fn expansions(
- &self,
- ) -> impl Iterator<Item = (&InFile<AstPtr<ast::MacroCall>>, &MacroFileId)> {
+ pub fn expansions(&self) -> impl Iterator<Item = (&InFile<MacroCallPtr>, &MacroFileId)> {
self.expansions.iter()
}
@@ -823,7 +683,7 @@ impl BodySourceMap {
pub fn format_args_implicit_capture(
&self,
capture_expr: ExprId,
- ) -> Option<InFile<(AstPtr<ast::Expr>, TextRange)>> {
+ ) -> Option<InFile<(ExprPtr, TextRange)>> {
self.template_map.as_ref()?.implicit_capture_to_source.get(&capture_expr).copied()
}
@@ -837,14 +697,13 @@ impl BodySourceMap {
.zip(self.template_map.as_ref()?.asm_to_captures.get(&expr).map(std::ops::Deref::deref))
}
- /// Get a reference to the body source map's diagnostics.
- pub fn diagnostics(&self) -> &[BodyDiagnostic] {
+ /// Get a reference to the source map's diagnostics.
+ pub fn diagnostics(&self) -> &[ExpressionStoreDiagnostics] {
&self.diagnostics
}
fn shrink_to_fit(&mut self) {
let Self {
- self_param: _,
expr_map,
expr_map_back,
pat_map,
diff --git a/crates/hir-def/src/expr_store/body.rs b/crates/hir-def/src/expr_store/body.rs
new file mode 100644
index 0000000000..a55fec4f8b
--- /dev/null
+++ b/crates/hir-def/src/expr_store/body.rs
@@ -0,0 +1,175 @@
+//! Defines `Body`: a lowered representation of functions, statics and
+//! consts.
+use std::ops;
+
+use hir_expand::{InFile, Lookup};
+use la_arena::{Idx, RawIdx};
+use span::Edition;
+use syntax::ast;
+use triomphe::Arc;
+
+use crate::{
+ db::DefDatabase,
+ expander::Expander,
+ expr_store::{lower, pretty, ExpressionStore, ExpressionStoreSourceMap, SelfParamPtr},
+ hir::{BindingId, ExprId, PatId},
+ item_tree::AttrOwner,
+ src::HasSource,
+ DefWithBodyId, HasModule,
+};
+
+/// The body of an item (function, const etc.).
+#[derive(Debug, Eq, PartialEq)]
+pub struct Body {
+ pub store: ExpressionStore,
+ /// The patterns for the function's parameters. While the parameter types are
+ /// part of the function signature, the patterns are not (they don't change
+ /// the external type of the function).
+ ///
+ /// If this `Body` is for the body of a constant, this will just be
+ /// empty.
+ pub params: Box<[PatId]>,
+ pub self_param: Option<BindingId>,
+ /// The `ExprId` of the actual body expression.
+ pub body_expr: ExprId,
+}
+
+impl ops::Deref for Body {
+ type Target = ExpressionStore;
+
+ fn deref(&self) -> &Self::Target {
+ &self.store
+ }
+}
+
+/// An item body together with the mapping from syntax nodes to HIR expression
+/// IDs. This is needed to go from e.g. a position in a file to the HIR
+/// expression containing it; but for type inference etc., we want to operate on
+/// a structure that is agnostic to the actual positions of expressions in the
+/// file, so that we don't recompute types whenever some whitespace is typed.
+///
+/// One complication here is that, due to macro expansion, a single `Body` might
+/// be spread across several files. So, for each ExprId and PatId, we record
+/// both the HirFileId and the position inside the file. However, we only store
+/// AST -> ExprId mapping for non-macro files, as it is not clear how to handle
+/// this properly for macros.
+#[derive(Default, Debug, Eq, PartialEq)]
+pub struct BodySourceMap {
+ pub self_param: Option<InFile<SelfParamPtr>>,
+ pub store: ExpressionStoreSourceMap,
+}
+
+impl ops::Deref for BodySourceMap {
+ type Target = ExpressionStoreSourceMap;
+
+ fn deref(&self) -> &Self::Target {
+ &self.store
+ }
+}
+
+impl Body {
+ pub(crate) fn body_with_source_map_query(
+ db: &dyn DefDatabase,
+ def: DefWithBodyId,
+ ) -> (Arc<Body>, Arc<BodySourceMap>) {
+ let _p = tracing::info_span!("body_with_source_map_query").entered();
+ let mut params = None;
+
+ let mut is_async_fn = false;
+ let InFile { file_id, value: body } = {
+ match def {
+ DefWithBodyId::FunctionId(f) => {
+ let data = db.function_data(f);
+ let f = f.lookup(db);
+ let src = f.source(db);
+ params = src.value.param_list().map(move |param_list| {
+ let item_tree = f.id.item_tree(db);
+ let func = &item_tree[f.id.value];
+ let krate = f.container.module(db).krate;
+ let crate_graph = db.crate_graph();
+ (
+ param_list,
+ (0..func.params.len()).map(move |idx| {
+ item_tree
+ .attrs(
+ db,
+ krate,
+ AttrOwner::Param(
+ f.id.value,
+ Idx::from_raw(RawIdx::from(idx as u32)),
+ ),
+ )
+ .is_cfg_enabled(&crate_graph[krate].cfg_options)
+ }),
+ )
+ });
+ is_async_fn = data.is_async();
+ src.map(|it| it.body().map(ast::Expr::from))
+ }
+ DefWithBodyId::ConstId(c) => {
+ let c = c.lookup(db);
+ let src = c.source(db);
+ src.map(|it| it.body())
+ }
+ DefWithBodyId::StaticId(s) => {
+ let s = s.lookup(db);
+ let src = s.source(db);
+ src.map(|it| it.body())
+ }
+ DefWithBodyId::VariantId(v) => {
+ let s = v.lookup(db);
+ let src = s.source(db);
+ src.map(|it| it.expr())
+ }
+ DefWithBodyId::InTypeConstId(c) => c.lookup(db).id.map(|_| c.source(db).expr()),
+ }
+ };
+ let module = def.module(db);
+ let expander = Expander::new(db, file_id, module);
+ let (body, mut source_map) =
+ lower::lower_body(db, def, expander, params, body, module.krate, is_async_fn);
+ source_map.store.shrink_to_fit();
+
+ (Arc::new(body), Arc::new(source_map))
+ }
+
+ pub(crate) fn body_query(db: &dyn DefDatabase, def: DefWithBodyId) -> Arc<Body> {
+ db.body_with_source_map(def).0
+ }
+
+ pub fn pretty_print(
+ &self,
+ db: &dyn DefDatabase,
+ owner: DefWithBodyId,
+ edition: Edition,
+ ) -> String {
+ pretty::print_body_hir(db, self, owner, edition)
+ }
+
+ pub fn pretty_print_expr(
+ &self,
+ db: &dyn DefDatabase,
+ owner: DefWithBodyId,
+ expr: ExprId,
+ edition: Edition,
+ ) -> String {
+ pretty::print_expr_hir(db, self, owner, expr, edition)
+ }
+
+ pub fn pretty_print_pat(
+ &self,
+ db: &dyn DefDatabase,
+ owner: DefWithBodyId,
+ pat: PatId,
+ oneline: bool,
+ edition: Edition,
+ ) -> String {
+ pretty::print_pat_hir(db, self, owner, pat, oneline, edition)
+ }
+}
+
+impl BodySourceMap {
+ pub fn self_param_syntax(&self) -> Option<InFile<SelfParamPtr>> {
+ self.self_param
+ }
+}
diff --git a/crates/hir-def/src/body/lower.rs b/crates/hir-def/src/expr_store/lower.rs
index 16c7b5ca00..6e505a6b11 100644
--- a/crates/hir-def/src/body/lower.rs
+++ b/crates/hir-def/src/expr_store/lower.rs
@@ -29,11 +29,14 @@ use triomphe::Arc;
use crate::{
attr::Attrs,
- body::{Body, BodyDiagnostic, BodySourceMap, ExprPtr, HygieneId, LabelPtr, PatPtr},
builtin_type::BuiltinUint,
data::adt::StructKind,
db::DefDatabase,
expander::Expander,
+ expr_store::{
+ Body, BodySourceMap, ExprPtr, ExpressionStore, ExpressionStoreBuilder,
+ ExpressionStoreDiagnostics, ExpressionStoreSourceMap, HygieneId, LabelPtr, PatPtr,
+ },
hir::{
format_args::{
self, FormatAlignment, FormatArgs, FormatArgsPiece, FormatArgument, FormatArgumentKind,
@@ -41,8 +44,8 @@ use crate::{
FormatPlaceholder, FormatSign, FormatTrait,
},
Array, Binding, BindingAnnotation, BindingId, BindingProblems, CaptureBy, ClosureKind,
- Expr, ExprId, Item, Label, LabelId, Literal, LiteralOrConst, MatchArm, Movability,
- OffsetOf, Pat, PatId, RecordFieldPat, RecordLitField, Statement,
+ Expr, ExprId, Item, Label, LabelId, Literal, MatchArm, Movability, OffsetOf, Pat, PatId,
+ RecordFieldPat, RecordLitField, Statement,
},
item_scope::BuiltinShadowMode,
lang_item::LangItem,
@@ -55,11 +58,11 @@ use crate::{
type FxIndexSet<K> = indexmap::IndexSet<K, std::hash::BuildHasherDefault<rustc_hash::FxHasher>>;
-pub(super) fn lower(
+pub(super) fn lower_body(
db: &dyn DefDatabase,
owner: DefWithBodyId,
expander: Expander,
- params: Option<(ast::ParamList, impl Iterator<Item = bool>)>,
+ parameters: Option<(ast::ParamList, impl Iterator<Item = bool>)>,
body: Option<ast::Expr>,
krate: CrateId,
is_async_fn: bool,
@@ -75,35 +78,146 @@ pub(super) fn lower(
};
Arc::clone(span_map)
});
- ExprCollector {
- db,
- owner,
- krate,
- def_map: expander.module.def_map(db),
- source_map: BodySourceMap::default(),
- ast_id_map: db.ast_id_map(expander.current_file_id()),
- body: Body::default(),
- expander,
- current_try_block_label: None,
- is_lowering_coroutine: false,
- label_ribs: Vec::new(),
- current_binding_owner: None,
- awaitable_context: None,
- current_span_map: span_map,
- current_block_legacy_macro_defs_count: FxHashMap::default(),
- }
- .collect(params, body, is_async_fn)
+
+ let mut self_param = None;
+ let mut source_map_self_param = None;
+ let mut params = vec![];
+ let mut collector = ExprCollector::new(db, owner, expander, krate, span_map);
+
+ let skip_body = match owner {
+ DefWithBodyId::FunctionId(it) => db.attrs(it.into()),
+ DefWithBodyId::StaticId(it) => db.attrs(it.into()),
+ DefWithBodyId::ConstId(it) => db.attrs(it.into()),
+ DefWithBodyId::InTypeConstId(_) => Attrs::EMPTY,
+ DefWithBodyId::VariantId(it) => db.attrs(it.into()),
+ }
+ .rust_analyzer_tool()
+ .any(|attr| *attr.path() == tool_path![skip]);
+ // If #[rust_analyzer::skip] annotated, only construct enough information for the signature
+ // and skip the body.
+ if skip_body {
+ if let Some((param_list, mut attr_enabled)) = parameters {
+ if let Some(self_param_syn) =
+ param_list.self_param().filter(|_| attr_enabled.next().unwrap_or(false))
+ {
+ let is_mutable =
+ self_param_syn.mut_token().is_some() && self_param_syn.amp_token().is_none();
+ let binding_id: la_arena::Idx<Binding> = collector.alloc_binding(
+ Name::new_symbol_root(sym::self_.clone()),
+ BindingAnnotation::new(is_mutable, false),
+ );
+ self_param = Some(binding_id);
+ source_map_self_param =
+ Some(collector.expander.in_file(AstPtr::new(&self_param_syn)));
+ }
+ params = param_list
+ .params()
+ .zip(attr_enabled)
+ .filter(|(_, enabled)| *enabled)
+ .map(|_| collector.missing_pat())
+ .collect();
+ };
+ let body_expr = collector.missing_expr();
+ return (
+ Body {
+ store: collector.store.finish(),
+ params: params.into_boxed_slice(),
+ self_param,
+ body_expr,
+ },
+ BodySourceMap { self_param: source_map_self_param, store: collector.source_map },
+ );
+ }
+
+ if let Some((param_list, mut attr_enabled)) = parameters {
+ if let Some(self_param_syn) =
+ param_list.self_param().filter(|_| attr_enabled.next().unwrap_or(false))
+ {
+ let is_mutable =
+ self_param_syn.mut_token().is_some() && self_param_syn.amp_token().is_none();
+ let binding_id: la_arena::Idx<Binding> = collector.alloc_binding(
+ Name::new_symbol_root(sym::self_.clone()),
+ BindingAnnotation::new(is_mutable, false),
+ );
+ let hygiene = self_param_syn
+ .name()
+ .map(|name| collector.hygiene_id_for(name.syntax().text_range().start()))
+ .unwrap_or(HygieneId::ROOT);
+ if !hygiene.is_root() {
+ collector.store.binding_hygiene.insert(binding_id, hygiene);
+ }
+ self_param = Some(binding_id);
+ source_map_self_param = Some(collector.expander.in_file(AstPtr::new(&self_param_syn)));
+ }
+
+ for (param, _) in param_list.params().zip(attr_enabled).filter(|(_, enabled)| *enabled) {
+ let param_pat = collector.collect_pat_top(param.pat());
+ params.push(param_pat);
+ }
+ };
+
+ let body_expr = collector.collect(
+ body,
+ if is_async_fn {
+ Awaitable::Yes
+ } else {
+ match owner {
+ DefWithBodyId::FunctionId(..) => Awaitable::No("non-async function"),
+ DefWithBodyId::StaticId(..) => Awaitable::No("static"),
+ DefWithBodyId::ConstId(..) | DefWithBodyId::InTypeConstId(..) => {
+ Awaitable::No("constant")
+ }
+ DefWithBodyId::VariantId(..) => Awaitable::No("enum variant"),
+ }
+ },
+ );
+
+ (
+ Body {
+ store: collector.store.finish(),
+ params: params.into_boxed_slice(),
+ self_param,
+ body_expr,
+ },
+ BodySourceMap { self_param: source_map_self_param, store: collector.source_map },
+ )
+}
+
+#[allow(dead_code)]
+pub(super) fn lower(
+ db: &dyn DefDatabase,
+ owner: ExprStoreOwnerId,
+ expander: Expander,
+ body: Option<ast::Expr>,
+ krate: CrateId,
+) -> (ExpressionStore, ExpressionStoreSourceMap) {
+ // We cannot leave the root span map empty and let any identifier from it be treated as root,
+ // because when inside nested macros `SyntaxContextId`s from the outer macro will be interleaved
+ // with the inner macro, and that will cause confusion because they won't be the same as `ROOT`
+ // even though they should be the same. Also, when the body comes from multiple expansions, their
+ // hygiene is different.
+ let span_map = expander.current_file_id().macro_file().map(|_| {
+ let SpanMap::ExpansionSpanMap(span_map) = expander.span_map(db) else {
+ panic!("in a macro file there should be `ExpansionSpanMap`");
+ };
+ Arc::clone(span_map)
+ });
+ let mut expr_collector = ExprCollector::new(db, owner, expander, krate, span_map);
+ expr_collector.collect(body, Awaitable::No("?"));
+ (expr_collector.store.finish(), expr_collector.source_map)
}
+type ExprStoreOwnerId = DefWithBodyId;
+
struct ExprCollector<'a> {
db: &'a dyn DefDatabase,
expander: Expander,
- owner: DefWithBodyId,
+ owner: ExprStoreOwnerId,
def_map: Arc<DefMap>,
ast_id_map: Arc<AstIdMap>,
krate: CrateId,
- body: Body,
- source_map: BodySourceMap,
+ store: ExpressionStoreBuilder,
+ source_map: ExpressionStoreSourceMap,
is_lowering_coroutine: bool,
@@ -157,6 +271,7 @@ impl RibKind {
}
}
+#[derive(PartialEq, Eq, Debug, Copy, Clone)]
enum Awaitable {
Yes,
No(&'static str),
@@ -180,12 +295,12 @@ impl BindingList {
let id = *self.map.entry((name, hygiene)).or_insert_with_key(|(name, _)| {
let id = ec.alloc_binding(name.clone(), mode);
if !hygiene.is_root() {
- ec.body.binding_hygiene.insert(id, hygiene);
+ ec.store.binding_hygiene.insert(id, hygiene);
}
id
});
- if ec.body.bindings[id].mode != mode {
- ec.body.bindings[id].problems = Some(BindingProblems::BoundInconsistently);
+ if ec.store.bindings[id].mode != mode {
+ ec.store.bindings[id].problems = Some(BindingProblems::BoundInconsistently);
}
self.check_is_used(ec, id);
id
@@ -195,11 +310,11 @@ impl BindingList {
match self.is_used.get(&id) {
None => {
if self.reject_new {
- ec.body.bindings[id].problems = Some(BindingProblems::NotBoundAcrossAll);
+ ec.store.bindings[id].problems = Some(BindingProblems::NotBoundAcrossAll);
}
}
Some(true) => {
- ec.body.bindings[id].problems = Some(BindingProblems::BoundMoreThanOnce);
+ ec.store.bindings[id].problems = Some(BindingProblems::BoundMoreThanOnce);
}
Some(false) => {}
}
@@ -208,93 +323,37 @@ impl BindingList {
}
impl ExprCollector<'_> {
- fn collect(
- mut self,
- param_list: Option<(ast::ParamList, impl Iterator<Item = bool>)>,
- body: Option<ast::Expr>,
- is_async_fn: bool,
- ) -> (Body, BodySourceMap) {
- let skip_body = match self.owner {
- DefWithBodyId::FunctionId(it) => self.db.attrs(it.into()),
- DefWithBodyId::StaticId(it) => self.db.attrs(it.into()),
- DefWithBodyId::ConstId(it) => self.db.attrs(it.into()),
- DefWithBodyId::InTypeConstId(_) => Attrs::EMPTY,
- DefWithBodyId::VariantId(it) => self.db.attrs(it.into()),
+ fn new(
+ db: &dyn DefDatabase,
+ owner: ExprStoreOwnerId,
+ expander: Expander,
+ krate: CrateId,
+ span_map: Option<Arc<ExpansionSpanMap>>,
+ ) -> ExprCollector<'_> {
+ ExprCollector {
+ db,
+ owner,
+ krate,
+ def_map: expander.module.def_map(db),
+ source_map: ExpressionStoreSourceMap::default(),
+ ast_id_map: db.ast_id_map(expander.current_file_id()),
+ store: ExpressionStoreBuilder::default(),
+ expander,
+ current_try_block_label: None,
+ is_lowering_coroutine: false,
+ label_ribs: Vec::new(),
+ current_binding_owner: None,
+ awaitable_context: None,
+ current_span_map: span_map,
+ current_block_legacy_macro_defs_count: FxHashMap::default(),
}
- .rust_analyzer_tool()
- .any(|attr| *attr.path() == tool_path![skip]);
- // If #[rust_analyzer::skip] annotated, only construct enough information for the signature
- // and skip the body.
- if skip_body {
- self.body.body_expr = self.missing_expr();
- if let Some((param_list, mut attr_enabled)) = param_list {
- if let Some(self_param) =
- param_list.self_param().filter(|_| attr_enabled.next().unwrap_or(false))
- {
- let is_mutable =
- self_param.mut_token().is_some() && self_param.amp_token().is_none();
- let binding_id: la_arena::Idx<Binding> = self.alloc_binding(
- Name::new_symbol_root(sym::self_.clone()),
- BindingAnnotation::new(is_mutable, false),
- );
- self.body.self_param = Some(binding_id);
- self.source_map.self_param =
- Some(self.expander.in_file(AstPtr::new(&self_param)));
- }
- self.body.params = param_list
- .params()
- .zip(attr_enabled)
- .filter(|(_, enabled)| *enabled)
- .map(|_| self.missing_pat())
- .collect();
- };
- return (self.body, self.source_map);
- }
-
- self.awaitable_context.replace(if is_async_fn {
- Awaitable::Yes
- } else {
- match self.owner {
- DefWithBodyId::FunctionId(..) => Awaitable::No("non-async function"),
- DefWithBodyId::StaticId(..) => Awaitable::No("static"),
- DefWithBodyId::ConstId(..) | DefWithBodyId::InTypeConstId(..) => {
- Awaitable::No("constant")
- }
- DefWithBodyId::VariantId(..) => Awaitable::No("enum variant"),
- }
- });
- if let Some((param_list, mut attr_enabled)) = param_list {
- let mut params = vec![];
- if let Some(self_param) =
- param_list.self_param().filter(|_| attr_enabled.next().unwrap_or(false))
- {
- let is_mutable =
- self_param.mut_token().is_some() && self_param.amp_token().is_none();
- let binding_id: la_arena::Idx<Binding> = self.alloc_binding(
- Name::new_symbol_root(sym::self_.clone()),
- BindingAnnotation::new(is_mutable, false),
- );
- let hygiene = self_param
- .name()
- .map(|name| self.hygiene_id_for(name.syntax().text_range().start()))
- .unwrap_or(HygieneId::ROOT);
- if !hygiene.is_root() {
- self.body.binding_hygiene.insert(binding_id, hygiene);
- }
- self.body.self_param = Some(binding_id);
- self.source_map.self_param = Some(self.expander.in_file(AstPtr::new(&self_param)));
- }
+ }
- for (param, _) in param_list.params().zip(attr_enabled).filter(|(_, enabled)| *enabled)
- {
- let param_pat = self.collect_pat_top(param.pat());
- params.push(param_pat);
- }
- self.body.params = params.into_boxed_slice();
- };
- self.body.body_expr = self.with_label_rib(RibKind::Closure, |this| {
- if is_async_fn {
- match body {
+ fn collect(&mut self, expr: Option<ast::Expr>, awaitable: Awaitable) -> ExprId {
+ self.awaitable_context.replace(awaitable);
+ self.with_label_rib(RibKind::Closure, |this| {
+ if awaitable == Awaitable::Yes {
+ match expr {
Some(e) => {
let syntax_ptr = AstPtr::new(&e);
let expr = this.collect_expr(e);
@@ -306,15 +365,13 @@ impl ExprCollector<'_> {
None => this.missing_expr(),
}
} else {
- this.collect_expr_opt(body)
+ this.collect_expr_opt(expr)
}
- });
-
- (self.body, self.source_map)
+ })
}
fn ctx(&mut self) -> LowerCtx<'_> {
- self.expander.ctx(self.db, &mut self.body.types, &mut self.source_map.types)
+ self.expander.ctx(self.db, &mut self.store.types, &mut self.source_map.types)
}
fn collect_expr(&mut self, expr: ast::Expr) -> ExprId {
@@ -390,7 +447,7 @@ impl ExprCollector<'_> {
parent: this.owner,
root: inner_expr,
});
- this.body.exprs[result_expr_id] = Expr::Const(it);
+ this.store.exprs[result_expr_id] = Expr::Const(it);
this.current_binding_owner = prev_binding_owner;
result_expr_id
})
@@ -480,7 +537,7 @@ impl ExprCollector<'_> {
.unwrap_or((Expr::Missing, HygieneId::ROOT));
let expr_id = self.alloc_expr(path, syntax_ptr);
if !hygiene.is_root() {
- self.body.expr_hygiene.insert(expr_id, hygiene);
+ self.store.ident_hygiene.insert(expr_id.into(), hygiene);
}
expr_id
}
@@ -562,10 +619,12 @@ impl ExprCollector<'_> {
ast::Expr::AwaitExpr(e) => {
let expr = self.collect_expr_opt(e.expr());
if let Awaitable::No(location) = self.is_lowering_awaitable_block() {
- self.source_map.diagnostics.push(BodyDiagnostic::AwaitOutsideOfAsync {
- node: InFile::new(self.expander.current_file_id(), AstPtr::new(&e)),
- location: location.to_string(),
- });
+ self.source_map.diagnostics.push(
+ ExpressionStoreDiagnostics::AwaitOutsideOfAsync {
+ node: InFile::new(self.expander.current_file_id(), AstPtr::new(&e)),
+ location: location.to_string(),
+ },
+ );
}
self.alloc_expr(Expr::Await { expr }, syntax_ptr)
}
@@ -646,7 +705,7 @@ impl ExprCollector<'_> {
this.is_lowering_coroutine = prev_is_lowering_coroutine;
this.current_binding_owner = prev_binding_owner;
this.current_try_block_label = prev_try_block_label;
- this.body.exprs[result_expr_id] = Expr::Closure {
+ this.store.exprs[result_expr_id] = Expr::Closure {
args: args.into(),
arg_types: arg_types.into(),
ret_type,
@@ -752,7 +811,7 @@ impl ExprCollector<'_> {
}
fn parse_path(&mut self, path: ast::Path) -> Option<Path> {
- self.expander.parse_path(self.db, path, &mut self.body.types, &mut self.source_map.types)
+ self.expander.parse_path(self.db, path, &mut self.store.types, &mut self.source_map.types)
}
fn collect_expr_path(&mut self, e: ast::PathExpr) -> Option<(Path, HygieneId)> {
@@ -781,7 +840,7 @@ impl ExprCollector<'_> {
let src = self.expander.in_file(AstPtr::new(&expr).wrap_left());
let expr = self.collect_expr(expr);
// Do not use `alloc_pat_from_expr()` here, it will override the entry in `expr_map`.
- let id = self.body.pats.alloc(Pat::Expr(expr));
+ let id = self.store.pats.alloc(Pat::Expr(expr));
self.source_map.pat_map_back.insert(id, src);
id
})
@@ -835,7 +894,7 @@ impl ExprCollector<'_> {
.unwrap_or((Pat::Missing, HygieneId::ROOT));
let pat_id = self.alloc_pat_from_expr(path, syntax_ptr);
if !hygiene.is_root() {
- self.body.pat_hygiene.insert(pat_id, hygiene);
+ self.store.ident_hygiene.insert(pat_id.into(), hygiene);
}
pat_id
}
@@ -967,7 +1026,7 @@ impl ExprCollector<'_> {
) -> ExprId {
let (id, prev_owner) = self.initialize_binding_owner(syntax_ptr);
let tmp = job(self);
- self.body.exprs[id] = mem::replace(&mut self.body.exprs[tmp], Expr::Missing);
+ self.store.exprs[id] = mem::replace(&mut self.store.exprs[tmp], Expr::Missing);
self.current_binding_owner = prev_owner;
id
}
@@ -979,8 +1038,9 @@ impl ExprCollector<'_> {
let Some(try_from_output) = self.lang_path(LangItem::TryTraitFromOutput) else {
return self.collect_block(e);
};
- let label = self
- .alloc_label_desugared(Label { name: Name::generate_new_name(self.body.labels.len()) });
+ let label = self.alloc_label_desugared(Label {
+ name: Name::generate_new_name(self.store.labels.len()),
+ });
let old_label = self.current_try_block_label.replace(label);
let ptr = AstPtr::new(&e).upcast();
@@ -1006,7 +1066,7 @@ impl ExprCollector<'_> {
)
}
};
- let Expr::Block { tail, .. } = &mut self.body.exprs[expr_id] else {
+ let Expr::Block { tail, .. } = &mut self.store.exprs[expr_id] else {
unreachable!("block was lowered to non-block");
};
*tail = Some(next_tail);
@@ -1112,7 +1172,7 @@ impl ExprCollector<'_> {
this.collect_expr_opt(e.loop_body().map(|it| it.into()))
}),
};
- let iter_name = Name::generate_new_name(self.body.exprs.len());
+ let iter_name = Name::generate_new_name(self.store.exprs.len());
let iter_expr = self.alloc_expr(Expr::Path(Path::from(iter_name.clone())), syntax_ptr);
let iter_expr_mut = self.alloc_expr(
Expr::Ref { expr: iter_expr, rawness: Rawness::Ref, mutability: Mutability::Mut },
@@ -1177,7 +1237,7 @@ impl ExprCollector<'_> {
let try_branch = self.alloc_expr(Expr::Path(try_branch), syntax_ptr);
let expr = self
.alloc_expr(Expr::Call { callee: try_branch, args: Box::new([operand]) }, syntax_ptr);
- let continue_name = Name::generate_new_name(self.body.bindings.len());
+ let continue_name = Name::generate_new_name(self.store.bindings.len());
let continue_binding =
self.alloc_binding(continue_name.clone(), BindingAnnotation::Unannotated);
let continue_bpat =
@@ -1192,7 +1252,7 @@ impl ExprCollector<'_> {
guard: None,
expr: self.alloc_expr(Expr::Path(Path::from(continue_name)), syntax_ptr),
};
- let break_name = Name::generate_new_name(self.body.bindings.len());
+ let break_name = Name::generate_new_name(self.store.bindings.len());
let break_binding = self.alloc_binding(break_name.clone(), BindingAnnotation::Unannotated);
let break_bpat = self.alloc_pat_desugared(Pat::Bind { id: break_binding, subpat: None });
self.add_definition_to_binding(break_binding, break_bpat);
@@ -1261,17 +1321,19 @@ impl ExprCollector<'_> {
Ok(res) => res,
Err(UnresolvedMacro { path }) => {
if record_diagnostics {
- self.source_map.diagnostics.push(BodyDiagnostic::UnresolvedMacroCall {
- node: InFile::new(outer_file, syntax_ptr),
- path,
- });
+ self.source_map.diagnostics.push(
+ ExpressionStoreDiagnostics::UnresolvedMacroCall {
+ node: InFile::new(outer_file, syntax_ptr),
+ path,
+ },
+ );
}
return collector(self, None);
}
};
if record_diagnostics {
if let Some(err) = res.err {
- self.source_map.diagnostics.push(BodyDiagnostic::MacroError {
+ self.source_map.diagnostics.push(ExpressionStoreDiagnostics::MacroError {
node: InFile::new(outer_file, syntax_ptr),
err,
});
@@ -1464,7 +1526,7 @@ impl ExprCollector<'_> {
let (module, def_map) =
match block_id.map(|block_id| (self.db.block_def_map(block_id), block_id)) {
Some((def_map, block_id)) => {
- self.body.block_scopes.push(block_id);
+ self.store.block_scopes.push(block_id);
(def_map.module_id(DefMap::ROOT), def_map)
}
None => (self.expander.module, self.def_map.clone()),
@@ -1621,7 +1683,7 @@ impl ExprCollector<'_> {
pats.push(self.collect_pat(rest, binding_list));
for (&id, &is_used) in binding_list.is_used.iter() {
if !is_used {
- self.body.bindings[id].problems =
+ self.store.bindings[id].problems =
Some(BindingProblems::NotBoundAcrossAll);
}
}
@@ -1722,23 +1784,33 @@ impl ExprCollector<'_> {
self.collect_macro_call(call, macro_ptr, true, |this, expanded_pat| {
this.collect_pat_opt(expanded_pat, binding_list)
});
- self.source_map.pat_map.insert(src, pat);
+ self.source_map.pat_map.insert(src, pat.into());
return pat;
}
None => Pat::Missing,
},
- // FIXME: implement in a way that also builds source map and calculates assoc resolutions in type inference.
ast::Pat::RangePat(p) => {
- let mut range_part_lower = |p: Option<ast::Pat>| {
- p.and_then(|it| match &it {
- ast::Pat::LiteralPat(it) => {
- Some(Box::new(LiteralOrConst::Literal(pat_literal_to_hir(it)?.0)))
- }
- pat @ (ast::Pat::IdentPat(_) | ast::Pat::PathPat(_)) => {
- let subpat = self.collect_pat(pat.clone(), binding_list);
- Some(Box::new(LiteralOrConst::Const(subpat)))
+ let mut range_part_lower = |p: Option<ast::Pat>| -> Option<ExprId> {
+ p.and_then(|it| {
+ let ptr = PatPtr::new(&it);
+ match &it {
+ ast::Pat::LiteralPat(it) => Some(self.alloc_expr_from_pat(
+ Expr::Literal(pat_literal_to_hir(it)?.0),
+ ptr,
+ )),
+ ast::Pat::IdentPat(ident) if ident.is_simple_ident() => ident
+ .name()
+ .map(|name| name.as_name())
+ .map(Path::from)
+ .map(|path| self.alloc_expr_from_pat(Expr::Path(path), ptr)),
+ ast::Pat::PathPat(p) => p
+ .path()
+ .and_then(|path| self.parse_path(path))
+ .map(|parsed| self.alloc_expr_from_pat(Expr::Path(parsed), ptr)),
+ // We only need to handle literal, ident (if bare) and path patterns here,
+ // as any other pattern as a range pattern operand is semantically invalid.
+ _ => None,
}
- _ => None,
})
};
let start = range_part_lower(p.start());
@@ -1801,7 +1873,7 @@ impl ExprCollector<'_> {
}
});
if let Some(pat) = pat.left() {
- self.source_map.pat_map.insert(src, pat);
+ self.source_map.pat_map.insert(src, pat.into());
}
pat
}
@@ -1825,7 +1897,7 @@ impl ExprCollector<'_> {
return Some(());
}
- self.source_map.diagnostics.push(BodyDiagnostic::InactiveCode {
+ self.source_map.diagnostics.push(ExpressionStoreDiagnostics::InactiveCode {
node: self.expander.in_file(SyntaxNodePtr::new(owner.syntax())),
cfg,
opts: self.expander.cfg_options().clone(),
@@ -1853,7 +1925,7 @@ impl ExprCollector<'_> {
fn resolve_label(
&self,
lifetime: Option<ast::Lifetime>,
- ) -> Result<Option<LabelId>, BodyDiagnostic> {
+ ) -> Result<Option<LabelId>, ExpressionStoreDiagnostics> {
let Some(lifetime) = lifetime else { return Ok(None) };
let (mut hygiene_id, mut hygiene_info) = match &self.current_span_map {
None => (HygieneId::ROOT, None),
@@ -1877,7 +1949,7 @@ impl ExprCollector<'_> {
return if self.is_label_valid_from_rib(rib_idx) {
Ok(Some(*id))
} else {
- Err(BodyDiagnostic::UnreachableLabel {
+ Err(ExpressionStoreDiagnostics::UnreachableLabel {
name,
node: self.expander.in_file(AstPtr::new(&lifetime)),
})
@@ -1903,7 +1975,7 @@ impl ExprCollector<'_> {
}
}
- Err(BodyDiagnostic::UndeclaredLabel {
+ Err(ExpressionStoreDiagnostics::UndeclaredLabel {
name,
node: self.expander.in_file(AstPtr::new(&lifetime)),
})
@@ -1934,7 +2006,7 @@ impl ExprCollector<'_> {
f: impl FnOnce(&mut Self) -> T,
) -> T {
self.label_ribs.push(LabelRib::new(RibKind::Normal(
- self.body[label].name.clone(),
+ self.store.labels[label].name.clone(),
label,
hygiene,
)));
@@ -2023,7 +2095,7 @@ impl ExprCollector<'_> {
);
}
if !hygiene.is_root() {
- self.body.expr_hygiene.insert(expr_id, hygiene);
+ self.store.ident_hygiene.insert(expr_id.into(), hygiene);
}
expr_id
},
@@ -2171,17 +2243,27 @@ impl ExprCollector<'_> {
let unsafe_arg_new = self.alloc_expr_desugared(Expr::Path(unsafe_arg_new));
let unsafe_arg_new =
self.alloc_expr_desugared(Expr::Call { callee: unsafe_arg_new, args: Box::default() });
- let unsafe_arg_new = self.alloc_expr_desugared(Expr::Unsafe {
+ let mut unsafe_arg_new = self.alloc_expr_desugared(Expr::Unsafe {
id: None,
- // We collect the unused expressions here so that we still infer them instead of
- // dropping them out of the expression tree
- statements: fmt
- .orphans
- .into_iter()
- .map(|expr| Statement::Expr { expr, has_semi: true })
- .collect(),
+ statements: Box::new([]),
tail: Some(unsafe_arg_new),
});
+ if !fmt.orphans.is_empty() {
+ unsafe_arg_new = self.alloc_expr_desugared(Expr::Block {
+ id: None,
+ // We collect the unused expressions here so that we still infer them instead of
+ // dropping them out of the expression tree. We cannot store them in the `Unsafe`
+ // block because then unsafe blocks within them will get a false "unused unsafe"
+ // diagnostic (rustc has a notion of builtin unsafe blocks, but we don't).
+ statements: fmt
+ .orphans
+ .into_iter()
+ .map(|expr| Statement::Expr { expr, has_semi: true })
+ .collect(),
+ tail: Some(unsafe_arg_new),
+ label: None,
+ });
+ }
let idx = self.alloc_expr(
Expr::Call {
@@ -2417,20 +2499,20 @@ fn pat_literal_to_hir(lit: &ast::LiteralPat) -> Option<(Literal, ast::Literal)>
impl ExprCollector<'_> {
fn alloc_expr(&mut self, expr: Expr, ptr: ExprPtr) -> ExprId {
let src = self.expander.in_file(ptr);
- let id = self.body.exprs.alloc(expr);
- self.source_map.expr_map_back.insert(id, src);
+ let id = self.store.exprs.alloc(expr);
+ self.source_map.expr_map_back.insert(id, src.map(AstPtr::wrap_left));
self.source_map.expr_map.insert(src, id.into());
id
}
// FIXME: desugared exprs don't have ptr, that's wrong and should be fixed.
// Migrate to alloc_expr_desugared_with_ptr and then rename back
fn alloc_expr_desugared(&mut self, expr: Expr) -> ExprId {
- self.body.exprs.alloc(expr)
+ self.store.exprs.alloc(expr)
}
fn alloc_expr_desugared_with_ptr(&mut self, expr: Expr, ptr: ExprPtr) -> ExprId {
let src = self.expander.in_file(ptr);
- let id = self.body.exprs.alloc(expr);
- self.source_map.expr_map_back.insert(id, src);
+ let id = self.store.exprs.alloc(expr);
+ self.source_map.expr_map_back.insert(id, src.map(AstPtr::wrap_left));
// We intentionally don't fill this as it could overwrite a non-desugared entry
// self.source_map.expr_map.insert(src, id);
id
@@ -2440,45 +2522,54 @@ impl ExprCollector<'_> {
}
fn alloc_binding(&mut self, name: Name, mode: BindingAnnotation) -> BindingId {
- let binding = self.body.bindings.alloc(Binding { name, mode, problems: None });
+ let binding = self.store.bindings.alloc(Binding { name, mode, problems: None });
if let Some(owner) = self.current_binding_owner {
- self.body.binding_owners.insert(binding, owner);
+ self.store.binding_owners.insert(binding, owner);
}
binding
}
fn alloc_pat_from_expr(&mut self, pat: Pat, ptr: ExprPtr) -> PatId {
let src = self.expander.in_file(ptr);
- let id = self.body.pats.alloc(pat);
+ let id = self.store.pats.alloc(pat);
self.source_map.expr_map.insert(src, id.into());
self.source_map.pat_map_back.insert(id, src.map(AstPtr::wrap_left));
id
}
+
+ fn alloc_expr_from_pat(&mut self, expr: Expr, ptr: PatPtr) -> ExprId {
+ let src = self.expander.in_file(ptr);
+ let id = self.store.exprs.alloc(expr);
+ self.source_map.pat_map.insert(src, id.into());
+ self.source_map.expr_map_back.insert(id, src.map(AstPtr::wrap_right));
+ id
+ }
+
fn alloc_pat(&mut self, pat: Pat, ptr: PatPtr) -> PatId {
let src = self.expander.in_file(ptr);
- let id = self.body.pats.alloc(pat);
+ let id = self.store.pats.alloc(pat);
self.source_map.pat_map_back.insert(id, src.map(AstPtr::wrap_right));
- self.source_map.pat_map.insert(src, id);
+ self.source_map.pat_map.insert(src, id.into());
id
}
// FIXME: desugared pats don't have ptr, that's wrong and should be fixed somehow.
fn alloc_pat_desugared(&mut self, pat: Pat) -> PatId {
- self.body.pats.alloc(pat)
+ self.store.pats.alloc(pat)
}
fn missing_pat(&mut self) -> PatId {
- self.body.pats.alloc(Pat::Missing)
+ self.store.pats.alloc(Pat::Missing)
}
fn alloc_label(&mut self, label: Label, ptr: LabelPtr) -> LabelId {
let src = self.expander.in_file(ptr);
- let id = self.body.labels.alloc(label);
+ let id = self.store.labels.alloc(label);
self.source_map.label_map_back.insert(id, src);
self.source_map.label_map.insert(src, id);
id
}
// FIXME: desugared labels don't have ptr, that's wrong and should be fixed somehow.
fn alloc_label_desugared(&mut self, label: Label) -> LabelId {
- self.body.labels.alloc(label)
+ self.store.labels.alloc(label)
}
fn is_lowering_awaitable_block(&self) -> &Awaitable {
diff --git a/crates/hir-def/src/body/lower/asm.rs b/crates/hir-def/src/expr_store/lower/asm.rs
index 994ba2aa06..032c18688e 100644
--- a/crates/hir-def/src/body/lower/asm.rs
+++ b/crates/hir-def/src/expr_store/lower/asm.rs
@@ -9,7 +9,7 @@ use syntax::{
use tt::TextRange;
use crate::{
- body::lower::{ExprCollector, FxIndexSet},
+ expr_store::lower::{ExprCollector, FxIndexSet},
hir::{AsmOperand, AsmOptions, Expr, ExprId, InlineAsm, InlineAsmRegOrRegClass},
};
diff --git a/crates/hir-def/src/body/pretty.rs b/crates/hir-def/src/expr_store/pretty.rs
index 52b91b522a..82ad756dc2 100644
--- a/crates/hir-def/src/body/pretty.rs
+++ b/crates/hir-def/src/expr_store/pretty.rs
@@ -6,10 +6,7 @@ use itertools::Itertools;
use span::Edition;
use crate::{
- hir::{
- Array, BindingAnnotation, CaptureBy, ClosureKind, Literal, LiteralOrConst, Movability,
- Statement,
- },
+ hir::{Array, BindingAnnotation, CaptureBy, ClosureKind, Literal, Movability, Statement},
pretty::{print_generic_args, print_path, print_type_ref},
};
@@ -60,7 +57,7 @@ pub(super) fn print_body_hir(
let mut p = Printer {
db,
- body,
+ store: body,
buf: header,
indent_level: 0,
line_format: LineFormat::Newline,
@@ -103,14 +100,14 @@ pub(super) fn print_body_hir(
pub(super) fn print_expr_hir(
db: &dyn DefDatabase,
- body: &Body,
+ store: &ExpressionStore,
_owner: DefWithBodyId,
expr: ExprId,
edition: Edition,
) -> String {
let mut p = Printer {
db,
- body,
+ store,
buf: String::new(),
indent_level: 0,
line_format: LineFormat::Newline,
@@ -122,7 +119,7 @@ pub(super) fn print_expr_hir(
pub(super) fn print_pat_hir(
db: &dyn DefDatabase,
- body: &Body,
+ store: &ExpressionStore,
_owner: DefWithBodyId,
pat: PatId,
oneline: bool,
@@ -130,7 +127,7 @@ pub(super) fn print_pat_hir(
) -> String {
let mut p = Printer {
db,
- body,
+ store,
buf: String::new(),
indent_level: 0,
line_format: if oneline { LineFormat::Oneline } else { LineFormat::Newline },
@@ -157,7 +154,7 @@ macro_rules! wln {
struct Printer<'a> {
db: &'a dyn DefDatabase,
- body: &'a Body,
+ store: &'a ExpressionStore,
buf: String,
indent_level: usize,
line_format: LineFormat,
@@ -233,7 +230,7 @@ impl Printer<'_> {
}
fn print_expr(&mut self, expr: ExprId) {
- let expr = &self.body[expr];
+ let expr = &self.store[expr];
match expr {
Expr::Missing => w!(self, "�"),
@@ -241,7 +238,7 @@ impl Printer<'_> {
Expr::InlineAsm(_) => w!(self, "builtin#asm(_)"),
Expr::OffsetOf(offset_of) => {
w!(self, "builtin#offset_of(");
- self.print_type_ref(offset_of.container, &self.body.types);
+ self.print_type_ref(offset_of.container, &self.store.types);
let edition = self.edition;
w!(
self,
@@ -271,7 +268,7 @@ impl Printer<'_> {
}
Expr::Loop { body, label } => {
if let Some(lbl) = label {
- w!(self, "{}: ", self.body[*lbl].name.display(self.db.upcast(), self.edition));
+ w!(self, "{}: ", self.store[*lbl].name.display(self.db.upcast(), self.edition));
}
w!(self, "loop ");
self.print_expr(*body);
@@ -295,7 +292,7 @@ impl Printer<'_> {
if let Some(args) = generic_args {
w!(self, "::<");
let edition = self.edition;
- print_generic_args(self.db, args, &self.body.types, self, edition).unwrap();
+ print_generic_args(self.db, args, &self.store.types, self, edition).unwrap();
w!(self, ">");
}
w!(self, "(");
@@ -330,13 +327,13 @@ impl Printer<'_> {
Expr::Continue { label } => {
w!(self, "continue");
if let Some(lbl) = label {
- w!(self, " {}", self.body[*lbl].name.display(self.db.upcast(), self.edition));
+ w!(self, " {}", self.store[*lbl].name.display(self.db.upcast(), self.edition));
}
}
Expr::Break { expr, label } => {
w!(self, "break");
if let Some(lbl) = label {
- w!(self, " {}", self.body[*lbl].name.display(self.db.upcast(), self.edition));
+ w!(self, " {}", self.store[*lbl].name.display(self.db.upcast(), self.edition));
}
if let Some(expr) = expr {
self.whitespace();
@@ -404,7 +401,7 @@ impl Printer<'_> {
Expr::Cast { expr, type_ref } => {
self.print_expr(*expr);
w!(self, " as ");
- self.print_type_ref(*type_ref, &self.body.types);
+ self.print_type_ref(*type_ref, &self.store.types);
}
Expr::Ref { expr, rawness, mutability } => {
w!(self, "&");
@@ -492,13 +489,13 @@ impl Printer<'_> {
self.print_pat(*pat);
if let Some(ty) = ty {
w!(self, ": ");
- self.print_type_ref(*ty, &self.body.types);
+ self.print_type_ref(*ty, &self.store.types);
}
}
w!(self, "|");
if let Some(ret_ty) = ret_type {
w!(self, " -> ");
- self.print_type_ref(*ret_ty, &self.body.types);
+ self.print_type_ref(*ret_ty, &self.store.types);
}
self.whitespace();
self.print_expr(*body);
@@ -534,7 +531,7 @@ impl Printer<'_> {
Expr::Literal(lit) => self.print_literal(lit),
Expr::Block { id: _, statements, tail, label } => {
let label = label.map(|lbl| {
- format!("{}: ", self.body[lbl].name.display(self.db.upcast(), self.edition))
+ format!("{}: ", self.store[lbl].name.display(self.db.upcast(), self.edition))
});
self.print_block(label.as_deref(), statements, tail);
}
@@ -581,7 +578,7 @@ impl Printer<'_> {
}
fn print_pat(&mut self, pat: PatId) {
- let pat = &self.body[pat];
+ let pat = &self.store[pat];
match pat {
Pat::Missing => w!(self, "�"),
@@ -623,9 +620,9 @@ impl Printer<'_> {
let field_name = arg.name.display(self.db.upcast(), edition).to_string();
let mut same_name = false;
- if let Pat::Bind { id, subpat: None } = &self.body[arg.pat] {
+ if let Pat::Bind { id, subpat: None } = &self.store[arg.pat] {
if let Binding { name, mode: BindingAnnotation::Unannotated, .. } =
- &self.body.bindings[*id]
+ &self.store.bindings[*id]
{
if name.as_str() == field_name {
same_name = true;
@@ -656,11 +653,11 @@ impl Printer<'_> {
}
Pat::Range { start, end } => {
if let Some(start) = start {
- self.print_literal_or_const(start);
+ self.print_expr(*start);
}
w!(self, "..=");
if let Some(end) = end {
- self.print_literal_or_const(end);
+ self.print_expr(*end);
}
}
Pat::Slice { prefix, slice, suffix } => {
@@ -734,7 +731,7 @@ impl Printer<'_> {
self.print_pat(*pat);
if let Some(ty) = type_ref {
w!(self, ": ");
- self.print_type_ref(*ty, &self.body.types);
+ self.print_type_ref(*ty, &self.store.types);
}
if let Some(init) = initializer {
w!(self, " = ");
@@ -757,13 +754,6 @@ impl Printer<'_> {
}
}
- fn print_literal_or_const(&mut self, literal_or_const: &LiteralOrConst) {
- match literal_or_const {
- LiteralOrConst::Literal(l) => self.print_literal(l),
- LiteralOrConst::Const(c) => self.print_pat(*c),
- }
- }
-
fn print_literal(&mut self, literal: &Literal) {
match literal {
Literal::String(it) => w!(self, "{:?}", it),
@@ -799,11 +789,11 @@ impl Printer<'_> {
fn print_path(&mut self, path: &Path) {
let edition = self.edition;
- print_path(self.db, path, &self.body.types, self, edition).unwrap();
+ print_path(self.db, path, &self.store.types, self, edition).unwrap();
}
fn print_binding(&mut self, id: BindingId) {
- let Binding { name, mode, .. } = &self.body.bindings[id];
+ let Binding { name, mode, .. } = &self.store.bindings[id];
let mode = match mode {
BindingAnnotation::Unannotated => "",
BindingAnnotation::Mutable => "mut ",
diff --git a/crates/hir-def/src/body/scope.rs b/crates/hir-def/src/expr_store/scope.rs
index 08af470b96..859a706177 100644
--- a/crates/hir-def/src/body/scope.rs
+++ b/crates/hir-def/src/expr_store/scope.rs
@@ -4,8 +4,8 @@ use la_arena::{Arena, ArenaMap, Idx, IdxRange, RawIdx};
use triomphe::Arc;
use crate::{
- body::{Body, HygieneId},
db::DefDatabase,
+ expr_store::{Body, ExpressionStore, HygieneId},
hir::{Binding, BindingId, Expr, ExprId, Item, LabelId, Pat, PatId, Statement},
BlockId, ConstBlockId, DefWithBodyId,
};
@@ -53,7 +53,7 @@ pub struct ScopeData {
impl ExprScopes {
pub(crate) fn expr_scopes_query(db: &dyn DefDatabase, def: DefWithBodyId) -> Arc<ExprScopes> {
let body = db.body(def);
- let mut scopes = ExprScopes::new(&body, |const_block| {
+ let mut scopes = ExprScopes::new_body(&body, |const_block| {
db.lookup_intern_anonymous_const(const_block).root
});
scopes.shrink_to_fit();
@@ -104,7 +104,7 @@ fn empty_entries(idx: usize) -> IdxRange<ScopeEntry> {
}
impl ExprScopes {
- fn new(
+ fn new_body(
body: &Body,
resolve_const_block: impl (Fn(ConstBlockId) -> ExprId) + Copy,
) -> ExprScopes {
@@ -179,28 +179,28 @@ impl ExprScopes {
fn add_bindings(
&mut self,
- body: &Body,
+ store: &ExpressionStore,
scope: ScopeId,
binding: BindingId,
hygiene: HygieneId,
) {
- let Binding { name, .. } = &body.bindings[binding];
+ let Binding { name, .. } = &store.bindings[binding];
let entry = self.scope_entries.alloc(ScopeEntry { name: name.clone(), binding, hygiene });
self.scopes[scope].entries =
IdxRange::new_inclusive(self.scopes[scope].entries.start()..=entry);
}
- fn add_pat_bindings(&mut self, body: &Body, scope: ScopeId, pat: PatId) {
- let pattern = &body[pat];
+ fn add_pat_bindings(&mut self, store: &ExpressionStore, scope: ScopeId, pat: PatId) {
+ let pattern = &store[pat];
if let Pat::Bind { id, .. } = *pattern {
- self.add_bindings(body, scope, id, body.binding_hygiene(id));
+ self.add_bindings(store, scope, id, store.binding_hygiene(id));
}
- pattern.walk_child_pats(|pat| self.add_pat_bindings(body, scope, pat));
+ pattern.walk_child_pats(|pat| self.add_pat_bindings(store, scope, pat));
}
- fn add_params_bindings(&mut self, body: &Body, scope: ScopeId, params: &[PatId]) {
- params.iter().for_each(|pat| self.add_pat_bindings(body, scope, *pat));
+ fn add_params_bindings(&mut self, store: &ExpressionStore, scope: ScopeId, params: &[PatId]) {
+ params.iter().for_each(|pat| self.add_pat_bindings(store, scope, *pat));
}
fn set_scope(&mut self, node: ExprId, scope: ScopeId) {
@@ -218,7 +218,7 @@ impl ExprScopes {
fn compute_block_scopes(
statements: &[Statement],
tail: Option<ExprId>,
- body: &Body,
+ store: &ExpressionStore,
scopes: &mut ExprScopes,
scope: &mut ScopeId,
resolve_const_block: impl (Fn(ConstBlockId) -> ExprId) + Copy,
@@ -227,17 +227,17 @@ fn compute_block_scopes(
match stmt {
Statement::Let { pat, initializer, else_branch, .. } => {
if let Some(expr) = initializer {
- compute_expr_scopes(*expr, body, scopes, scope, resolve_const_block);
+ compute_expr_scopes(*expr, store, scopes, scope, resolve_const_block);
}
if let Some(expr) = else_branch {
- compute_expr_scopes(*expr, body, scopes, scope, resolve_const_block);
+ compute_expr_scopes(*expr, store, scopes, scope, resolve_const_block);
}
*scope = scopes.new_scope(*scope);
- scopes.add_pat_bindings(body, *scope, *pat);
+ scopes.add_pat_bindings(store, *scope, *pat);
}
Statement::Expr { expr, .. } => {
- compute_expr_scopes(*expr, body, scopes, scope, resolve_const_block);
+ compute_expr_scopes(*expr, store, scopes, scope, resolve_const_block);
}
Statement::Item(Item::MacroDef(macro_id)) => {
*scope = scopes.new_macro_def_scope(*scope, macro_id.clone());
@@ -246,32 +246,32 @@ fn compute_block_scopes(
}
}
if let Some(expr) = tail {
- compute_expr_scopes(expr, body, scopes, scope, resolve_const_block);
+ compute_expr_scopes(expr, store, scopes, scope, resolve_const_block);
}
}
fn compute_expr_scopes(
expr: ExprId,
- body: &Body,
+ store: &ExpressionStore,
scopes: &mut ExprScopes,
scope: &mut ScopeId,
resolve_const_block: impl (Fn(ConstBlockId) -> ExprId) + Copy,
) {
let make_label =
- |label: &Option<LabelId>| label.map(|label| (label, body.labels[label].name.clone()));
+ |label: &Option<LabelId>| label.map(|label| (label, store.labels[label].name.clone()));
let compute_expr_scopes = |scopes: &mut ExprScopes, expr: ExprId, scope: &mut ScopeId| {
- compute_expr_scopes(expr, body, scopes, scope, resolve_const_block)
+ compute_expr_scopes(expr, store, scopes, scope, resolve_const_block)
};
scopes.set_scope(expr, *scope);
- match &body[expr] {
+ match &store[expr] {
Expr::Block { statements, tail, id, label } => {
let mut scope = scopes.new_block_scope(*scope, *id, make_label(label));
// Overwrite the old scope for the block expr, so that every block scope can be found
// via the block itself (important for blocks that only contain items, no expressions).
scopes.set_scope(expr, scope);
- compute_block_scopes(statements, *tail, body, scopes, &mut scope, resolve_const_block);
+ compute_block_scopes(statements, *tail, store, scopes, &mut scope, resolve_const_block);
}
Expr::Const(id) => {
let mut scope = scopes.root_scope();
@@ -282,7 +282,7 @@ fn compute_expr_scopes(
// Overwrite the old scope for the block expr, so that every block scope can be found
// via the block itself (important for blocks that only contain items, no expressions).
scopes.set_scope(expr, scope);
- compute_block_scopes(statements, *tail, body, scopes, &mut scope, resolve_const_block);
+ compute_block_scopes(statements, *tail, store, scopes, &mut scope, resolve_const_block);
}
Expr::Loop { body: body_expr, label } => {
let mut scope = scopes.new_labeled_scope(*scope, make_label(label));
@@ -290,14 +290,14 @@ fn compute_expr_scopes(
}
Expr::Closure { args, body: body_expr, .. } => {
let mut scope = scopes.new_scope(*scope);
- scopes.add_params_bindings(body, scope, args);
+ scopes.add_params_bindings(store, scope, args);
compute_expr_scopes(scopes, *body_expr, &mut scope);
}
Expr::Match { expr, arms } => {
compute_expr_scopes(scopes, *expr, scope);
for arm in arms.iter() {
let mut scope = scopes.new_scope(*scope);
- scopes.add_pat_bindings(body, scope, arm.pat);
+ scopes.add_pat_bindings(store, scope, arm.pat);
if let Some(guard) = arm.guard {
scope = scopes.new_scope(scope);
compute_expr_scopes(scopes, guard, &mut scope);
@@ -316,9 +316,9 @@ fn compute_expr_scopes(
&Expr::Let { pat, expr } => {
compute_expr_scopes(scopes, expr, scope);
*scope = scopes.new_scope(*scope);
- scopes.add_pat_bindings(body, *scope, pat);
+ scopes.add_pat_bindings(store, *scope, pat);
}
- _ => body.walk_child_exprs(expr, |e| compute_expr_scopes(scopes, e, scope)),
+ _ => store.walk_child_exprs(expr, |e| compute_expr_scopes(scopes, e, scope)),
};
}
diff --git a/crates/hir-def/src/body/tests.rs b/crates/hir-def/src/expr_store/tests.rs
index edc7c4c1f2..16bf46d3e3 100644
--- a/crates/hir-def/src/body/tests.rs
+++ b/crates/hir-def/src/expr_store/tests.rs
@@ -1,10 +1,10 @@
mod block;
+use crate::{hir::MatchArm, test_db::TestDB, ModuleDefId};
use expect_test::{expect, Expect};
+use la_arena::RawIdx;
use test_fixture::WithFixture;
-use crate::{test_db::TestDB, ModuleDefId};
-
use super::*;
fn lower(#[rust_analyzer::rust_fixture] ra_fixture: &str) -> (TestDB, Arc<Body>, DefWithBodyId) {
@@ -459,3 +459,45 @@ async fn foo(a: (), b: i32) -> u32 {
expect!["fn foo(�: (), �: i32) -> impl ::core::future::Future::<Output = u32> �"]
.assert_eq(&printed);
}
+
+#[test]
+fn range_bounds_are_hir_exprs() {
+ let (_, body, _) = lower(
+ r#"
+pub const L: i32 = 6;
+mod x {
+ pub const R: i32 = 100;
+}
+const fn f(x: i32) -> i32 {
+ match x {
+ -1..=5 => x * 10,
+ L..=x::R => x * 100,
+ _ => x,
+ }
+}"#,
+ );
+
+ let mtch_arms = body
+ .exprs
+ .iter()
+ .find_map(|(_, expr)| {
+ if let Expr::Match { arms, .. } = expr {
+ return Some(arms);
+ }
+
+ None
+ })
+ .unwrap();
+
+ let MatchArm { pat, .. } = mtch_arms[1];
+ match body.pats[pat] {
+ Pat::Range { start, end } => {
+ let hir_start = &body.exprs[start.unwrap()];
+ let hir_end = &body.exprs[end.unwrap()];
+
+ assert!(matches!(hir_start, Expr::Path { .. }));
+ assert!(matches!(hir_end, Expr::Path { .. }));
+ }
+ _ => {}
+ }
+}
diff --git a/crates/hir-def/src/body/tests/block.rs b/crates/hir-def/src/expr_store/tests/block.rs
index e136dd18a5..e136dd18a5 100644
--- a/crates/hir-def/src/body/tests/block.rs
+++ b/crates/hir-def/src/expr_store/tests/block.rs
diff --git a/crates/hir-def/src/generics.rs b/crates/hir-def/src/generics.rs
index 7b3f1d06d2..e2b36da79b 100644
--- a/crates/hir-def/src/generics.rs
+++ b/crates/hir-def/src/generics.rs
@@ -433,7 +433,7 @@ impl GenericParams {
GenericDefId::TraitAliasId(id) => id_to_generics(db, id, enabled_params),
GenericDefId::TypeAliasId(id) => id_to_generics(db, id, enabled_params),
GenericDefId::ImplId(id) => id_to_generics(db, id, enabled_params),
- GenericDefId::ConstId(_) => (
+ GenericDefId::ConstId(_) | GenericDefId::StaticId(_) => (
Arc::new(GenericParams {
type_or_consts: Default::default(),
lifetimes: Default::default(),
diff --git a/crates/hir-def/src/hir.rs b/crates/hir-def/src/hir.rs
index 8596346943..494644d8ef 100644
--- a/crates/hir-def/src/hir.rs
+++ b/crates/hir-def/src/hir.rs
@@ -19,7 +19,7 @@ use std::fmt;
use hir_expand::{name::Name, MacroDefId};
use intern::Symbol;
-use la_arena::{Idx, RawIdx};
+use la_arena::Idx;
use rustc_apfloat::ieee::{Half as f16, Quad as f128};
use syntax::ast;
use type_ref::TypeRefId;
@@ -37,13 +37,10 @@ pub type BindingId = Idx<Binding>;
pub type ExprId = Idx<Expr>;
-/// FIXME: this is a hacky function which should be removed
-pub(crate) fn dummy_expr_id() -> ExprId {
- ExprId::from_raw(RawIdx::from(u32::MAX))
-}
-
pub type PatId = Idx<Pat>;
+// FIXME: Encode this as a single u32, we won't ever reach all 32 bits especially given these counts
+// are local to the body.
#[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)]
pub enum ExprOrPatId {
ExprId(ExprId),
@@ -58,12 +55,20 @@ impl ExprOrPatId {
}
}
+ pub fn is_expr(&self) -> bool {
+ matches!(self, Self::ExprId(_))
+ }
+
pub fn as_pat(self) -> Option<PatId> {
match self {
Self::PatId(v) => Some(v),
_ => None,
}
}
+
+ pub fn is_pat(&self) -> bool {
+ matches!(self, Self::PatId(_))
+ }
}
stdx::impl_from!(ExprId, PatId for ExprOrPatId);
@@ -574,8 +579,8 @@ pub enum Pat {
ellipsis: bool,
},
Range {
- start: Option<Box<LiteralOrConst>>,
- end: Option<Box<LiteralOrConst>>,
+ start: Option<ExprId>,
+ end: Option<ExprId>,
},
Slice {
prefix: Box<[PatId]>,
diff --git a/crates/hir-def/src/import_map.rs b/crates/hir-def/src/import_map.rs
index 34635997bd..d43776b8a6 100644
--- a/crates/hir-def/src/import_map.rs
+++ b/crates/hir-def/src/import_map.rs
@@ -320,7 +320,7 @@ impl SearchMode {
};
match m {
Some((index, _)) => {
- name = &name[index + 1..];
+ name = name[index..].strip_prefix(|_: char| true).unwrap_or_default();
true
}
None => false,
@@ -519,7 +519,7 @@ mod tests {
crate_graph[krate]
.display_name
.as_ref()
- .is_some_and(|it| &**it.crate_name() == crate_name)
+ .is_some_and(|it| it.crate_name().as_str() == crate_name)
})
.expect("could not find crate");
@@ -1039,4 +1039,22 @@ pub mod fmt {
"#]],
);
}
+
+ #[test]
+ fn unicode_fn_name() {
+ let ra_fixture = r#"
+ //- /main.rs crate:main deps:dep
+ //- /dep.rs crate:dep
+ pub fn あい() {}
+ "#;
+
+ check_search(
+ ra_fixture,
+ "main",
+ Query::new("あ".to_owned()).fuzzy(),
+ expect![[r#"
+ dep::あい (f)
+ "#]],
+ );
+ }
}
diff --git a/crates/hir-def/src/item_scope.rs b/crates/hir-def/src/item_scope.rs
index 65a39c5656..0ca1eb9bcf 100644
--- a/crates/hir-def/src/item_scope.rs
+++ b/crates/hir-def/src/item_scope.rs
@@ -18,8 +18,8 @@ use crate::{
db::DefDatabase,
per_ns::{Item, MacrosItem, PerNs, TypesItem, ValuesItem},
visibility::{Visibility, VisibilityExplicitness},
- AdtId, BuiltinType, ConstId, ExternCrateId, FxIndexMap, HasModule, ImplId, LocalModuleId,
- Lookup, MacroId, ModuleDefId, ModuleId, TraitId, UseId,
+ AdtId, BuiltinType, ConstId, ExternBlockId, ExternCrateId, FxIndexMap, HasModule, ImplId,
+ LocalModuleId, Lookup, MacroId, ModuleDefId, ModuleId, TraitId, UseId,
};
#[derive(Debug, Default)]
@@ -158,6 +158,8 @@ pub struct ItemScope {
declarations: Vec<ModuleDefId>,
impls: Vec<ImplId>,
+ #[allow(clippy::box_collection)]
+ extern_blocks: Option<Box<Vec<ExternBlockId>>>,
unnamed_consts: Vec<ConstId>,
/// Traits imported via `use Trait as _;`.
unnamed_trait_imports: FxHashMap<TraitId, Item<()>>,
@@ -319,6 +321,10 @@ impl ItemScope {
self.extern_crate_decls.iter().copied()
}
+ pub fn extern_blocks(&self) -> impl Iterator<Item = ExternBlockId> + '_ {
+ self.extern_blocks.iter().flat_map(|it| it.iter()).copied()
+ }
+
pub fn use_decls(&self) -> impl ExactSizeIterator<Item = UseId> + '_ {
self.use_decls.iter().copied()
}
@@ -469,6 +475,10 @@ impl ItemScope {
self.impls.push(imp);
}
+ pub(crate) fn define_extern_block(&mut self, extern_block: ExternBlockId) {
+ self.extern_blocks.get_or_insert_default().push(extern_block);
+ }
+
pub(crate) fn define_extern_crate_decl(&mut self, extern_crate: ExternCrateId) {
self.extern_crate_decls.push(extern_crate);
}
@@ -806,7 +816,11 @@ impl ItemScope {
use_imports_types,
use_imports_macros,
macro_invocations,
+ extern_blocks,
} = self;
+ if let Some(it) = extern_blocks {
+ it.shrink_to_fit();
+ }
types.shrink_to_fit();
values.shrink_to_fit();
macros.shrink_to_fit();
diff --git a/crates/hir-def/src/item_tree.rs b/crates/hir-def/src/item_tree.rs
index b5bf2feb82..8d5b3eeb28 100644
--- a/crates/hir-def/src/item_tree.rs
+++ b/crates/hir-def/src/item_tree.rs
@@ -937,7 +937,7 @@ pub struct Param {
bitflags::bitflags! {
#[derive(Debug, Clone, Copy, Eq, PartialEq, Default)]
- pub(crate) struct FnFlags: u8 {
+ pub(crate) struct FnFlags: u16 {
const HAS_SELF_PARAM = 1 << 0;
const HAS_BODY = 1 << 1;
const HAS_DEFAULT_KW = 1 << 2;
@@ -946,6 +946,12 @@ bitflags::bitflags! {
const HAS_UNSAFE_KW = 1 << 5;
const IS_VARARGS = 1 << 6;
const HAS_SAFE_KW = 1 << 7;
+ /// The `#[target_feature]` attribute is necessary to check safety (with RFC 2396),
+ /// but keeping it for all functions will consume a lot of memory when there are
+ /// only very few functions with it. So we only encode its existence here, and lookup
+ /// it if needed.
+ const HAS_TARGET_FEATURE = 1 << 8;
+ const DEPRECATED_SAFE_2024 = 1 << 9;
}
}
diff --git a/crates/hir-def/src/lang_item.rs b/crates/hir-def/src/lang_item.rs
index 38733577d1..59f51db9f7 100644
--- a/crates/hir-def/src/lang_item.rs
+++ b/crates/hir-def/src/lang_item.rs
@@ -502,4 +502,5 @@ language_item_table! {
String, sym::String, string, Target::Struct, GenericRequirement::None;
CStr, sym::CStr, c_str, Target::Struct, GenericRequirement::None;
+ Ordering, sym::Ordering, ordering, Target::Enum, GenericRequirement::None;
}
diff --git a/crates/hir-def/src/lib.rs b/crates/hir-def/src/lib.rs
index c78818c642..9c947df35e 100644
--- a/crates/hir-def/src/lib.rs
+++ b/crates/hir-def/src/lib.rs
@@ -18,9 +18,15 @@ extern crate ra_ap_rustc_parse_format as rustc_parse_format;
#[cfg(feature = "in-rust-tree")]
extern crate rustc_abi;
+#[cfg(feature = "in-rust-tree")]
+extern crate rustc_hashes;
+
#[cfg(not(feature = "in-rust-tree"))]
extern crate ra_ap_rustc_abi as rustc_abi;
+#[cfg(not(feature = "in-rust-tree"))]
+extern crate ra_ap_rustc_hashes as rustc_hashes;
+
pub mod db;
pub mod attr;
@@ -42,7 +48,7 @@ pub mod lang_item;
pub mod hir;
pub use self::hir::type_ref;
-pub mod body;
+pub mod expr_store;
pub mod resolver;
pub mod nameres;
@@ -693,6 +699,7 @@ impl TypeOwnerId {
Some(match self {
TypeOwnerId::FunctionId(it) => GenericDefId::FunctionId(it),
TypeOwnerId::ConstId(it) => GenericDefId::ConstId(it),
+ TypeOwnerId::StaticId(it) => GenericDefId::StaticId(it),
TypeOwnerId::AdtId(it) => GenericDefId::AdtId(it),
TypeOwnerId::TraitId(it) => GenericDefId::TraitId(it),
TypeOwnerId::TraitAliasId(it) => GenericDefId::TraitAliasId(it),
@@ -701,7 +708,7 @@ impl TypeOwnerId {
TypeOwnerId::EnumVariantId(it) => {
GenericDefId::AdtId(AdtId::EnumId(it.lookup(db).parent))
}
- TypeOwnerId::InTypeConstId(_) | TypeOwnerId::StaticId(_) => return None,
+ TypeOwnerId::InTypeConstId(_) => return None,
})
}
}
@@ -743,6 +750,7 @@ impl From<GenericDefId> for TypeOwnerId {
GenericDefId::TypeAliasId(it) => it.into(),
GenericDefId::ImplId(it) => it.into(),
GenericDefId::ConstId(it) => it.into(),
+ GenericDefId::StaticId(it) => it.into(),
}
}
}
@@ -851,7 +859,7 @@ impl GeneralConstId {
pub fn generic_def(self, db: &dyn DefDatabase) -> Option<GenericDefId> {
match self {
GeneralConstId::ConstId(it) => Some(it.into()),
- GeneralConstId::StaticId(_) => None,
+ GeneralConstId::StaticId(it) => Some(it.into()),
GeneralConstId::ConstBlockId(it) => it.lookup(db).parent.as_generic_def_id(db),
GeneralConstId::InTypeConstId(it) => it.lookup(db).owner.as_generic_def_id(db),
}
@@ -897,7 +905,7 @@ impl DefWithBodyId {
pub fn as_generic_def_id(self, db: &dyn DefDatabase) -> Option<GenericDefId> {
match self {
DefWithBodyId::FunctionId(f) => Some(f.into()),
- DefWithBodyId::StaticId(_) => None,
+ DefWithBodyId::StaticId(s) => Some(s.into()),
DefWithBodyId::ConstId(c) => Some(c.into()),
DefWithBodyId::VariantId(c) => Some(c.lookup(db).parent.into()),
// FIXME: stable rust doesn't allow generics in constants, but we should
@@ -922,23 +930,28 @@ impl_from!(FunctionId, ConstId, TypeAliasId for AssocItemId);
#[derive(Clone, Copy, PartialEq, Eq, Debug, Hash)]
pub enum GenericDefId {
- FunctionId(FunctionId),
AdtId(AdtId),
- TraitId(TraitId),
- TraitAliasId(TraitAliasId),
- TypeAliasId(TypeAliasId),
- ImplId(ImplId),
// consts can have type parameters from their parents (i.e. associated consts of traits)
ConstId(ConstId),
+ FunctionId(FunctionId),
+ ImplId(ImplId),
+ // can't actually have generics currently, but they might in the future
+ // More importantly, this completes the set of items that contain type references
+ // which is to be used by the signature expression store in the future.
+ StaticId(StaticId),
+ TraitAliasId(TraitAliasId),
+ TraitId(TraitId),
+ TypeAliasId(TypeAliasId),
}
impl_from!(
- FunctionId,
AdtId(StructId, EnumId, UnionId),
- TraitId,
- TraitAliasId,
- TypeAliasId,
+ ConstId,
+ FunctionId,
ImplId,
- ConstId
+ StaticId,
+ TraitAliasId,
+ TraitId,
+ TypeAliasId
for GenericDefId
);
@@ -969,6 +982,7 @@ impl GenericDefId {
GenericDefId::TraitAliasId(it) => file_id_and_params_of_item_loc(db, it),
GenericDefId::ImplId(it) => file_id_and_params_of_item_loc(db, it),
GenericDefId::ConstId(it) => (it.lookup(db).id.file_id(), None),
+ GenericDefId::StaticId(it) => (it.lookup(db).id.file_id(), None),
}
}
@@ -1350,6 +1364,7 @@ impl HasModule for GenericDefId {
GenericDefId::TypeAliasId(it) => it.module(db),
GenericDefId::ImplId(it) => it.module(db),
GenericDefId::ConstId(it) => it.module(db),
+ GenericDefId::StaticId(it) => it.module(db),
}
}
}
diff --git a/crates/hir-def/src/macro_expansion_tests/proc_macros.rs b/crates/hir-def/src/macro_expansion_tests/proc_macros.rs
index 70e3e1ed4e..a43c0eb9d7 100644
--- a/crates/hir-def/src/macro_expansion_tests/proc_macros.rs
+++ b/crates/hir-def/src/macro_expansion_tests/proc_macros.rs
@@ -5,7 +5,7 @@
//! in-memory macros.
use expect_test::expect;
-use crate::macro_expansion_tests::check;
+use crate::macro_expansion_tests::{check, check_errors};
#[test]
fn attribute_macro_attr_censoring() {
@@ -216,3 +216,21 @@ struct S;
#[doc = "doc attr"] struct S;"##]],
);
}
+
+#[test]
+fn cfg_evaluated_before_attr_macros() {
+ check_errors(
+ r#"
+//- proc_macros: disallow_cfg
+
+use proc_macros::disallow_cfg;
+
+#[disallow_cfg] #[cfg(false)] fn foo() {}
+// True cfg are kept.
+// #[disallow_cfg] #[cfg(true)] fn bar() {}
+#[disallow_cfg] #[cfg_attr(false, inline)] fn baz() {}
+#[disallow_cfg] #[cfg_attr(true, inline)] fn qux() {}
+ "#,
+ expect![[r#""#]],
+ );
+}
diff --git a/crates/hir-def/src/nameres.rs b/crates/hir-def/src/nameres.rs
index 39d383f015..3b6e3c5916 100644
--- a/crates/hir-def/src/nameres.rs
+++ b/crates/hir-def/src/nameres.rs
@@ -337,7 +337,7 @@ impl DefMap {
pub(crate) fn crate_def_map_query(db: &dyn DefDatabase, crate_id: CrateId) -> Arc<DefMap> {
let crate_graph = db.crate_graph();
let krate = &crate_graph[crate_id];
- let name = krate.display_name.as_deref().unwrap_or_default();
+ let name = krate.display_name.as_deref().map(Symbol::as_str).unwrap_or_default();
let _p = tracing::info_span!("crate_def_map_query", ?name).entered();
let module_data = ModuleData::new(
diff --git a/crates/hir-def/src/nameres/collector.rs b/crates/hir-def/src/nameres/collector.rs
index 06276335b7..16f3fd56eb 100644
--- a/crates/hir-def/src/nameres/collector.rs
+++ b/crates/hir-def/src/nameres/collector.rs
@@ -19,7 +19,6 @@ use hir_expand::{
use intern::{sym, Interned};
use itertools::{izip, Itertools};
use la_arena::Idx;
-use limit::Limit;
use rustc_hash::{FxHashMap, FxHashSet};
use span::{Edition, EditionedFileId, FileAstId, SyntaxContextId};
use syntax::ast;
@@ -55,8 +54,8 @@ use crate::{
UnresolvedMacro, UseId, UseLoc,
};
-static GLOB_RECURSION_LIMIT: Limit = Limit::new(100);
-static FIXED_POINT_LIMIT: Limit = Limit::new(8192);
+const GLOB_RECURSION_LIMIT: usize = 100;
+const FIXED_POINT_LIMIT: usize = 8192;
pub(super) fn collect_defs(db: &dyn DefDatabase, def_map: DefMap, tree_id: TreeId) -> DefMap {
let crate_graph = db.crate_graph();
@@ -393,7 +392,7 @@ impl DefCollector<'_> {
}
i += 1;
- if FIXED_POINT_LIMIT.check(i).is_err() {
+ if i > FIXED_POINT_LIMIT {
tracing::error!("name resolution is stuck");
break 'resolve_attr;
}
@@ -993,7 +992,7 @@ impl DefCollector<'_> {
import: Option<ImportOrExternCrate>,
depth: usize,
) {
- if GLOB_RECURSION_LIMIT.check(depth).is_err() {
+ if depth > GLOB_RECURSION_LIMIT {
// prevent stack overflows (but this shouldn't be possible)
panic!("infinite recursion in glob imports!");
}
@@ -1470,8 +1469,7 @@ impl DefCollector<'_> {
depth: usize,
container: ItemContainerId,
) {
- let recursion_limit = Limit::new(self.def_map.recursion_limit() as usize);
- if recursion_limit.check(depth).is_err() {
+ if depth > self.def_map.recursion_limit() as usize {
cov_mark::hit!(macro_expansion_overflow);
tracing::warn!("macro expansion is too deep");
return;
@@ -1499,7 +1497,6 @@ impl DefCollector<'_> {
fn finish(mut self) -> DefMap {
// Emit diagnostics for all remaining unexpanded macros.
-
let _p = tracing::info_span!("DefCollector::finish").entered();
for directive in &self.unresolved_macros {
@@ -1759,16 +1756,20 @@ impl ModCollector<'_, '_> {
);
}
}
- ModItem::ExternBlock(block) => self.collect(
- &self.item_tree[block].children,
- ItemContainerId::ExternBlockId(
- ExternBlockLoc {
- container: module,
- id: ItemTreeId::new(self.tree_id, block),
- }
- .intern(db),
- ),
- ),
+ ModItem::ExternBlock(block) => {
+ let extern_block_id = ExternBlockLoc {
+ container: module,
+ id: ItemTreeId::new(self.tree_id, block),
+ }
+ .intern(db);
+ self.def_collector.def_map.modules[self.module_id]
+ .scope
+ .define_extern_block(extern_block_id);
+ self.collect(
+ &self.item_tree[block].children,
+ ItemContainerId::ExternBlockId(extern_block_id),
+ )
+ }
ModItem::MacroCall(mac) => self.collect_macro_call(&self.item_tree[mac], container),
ModItem::MacroRules(id) => self.collect_macro_rules(id, module),
ModItem::Macro2(id) => self.collect_macro_def(id, module),
diff --git a/crates/hir-def/src/nameres/mod_resolution.rs b/crates/hir-def/src/nameres/mod_resolution.rs
index d7e4ca41cd..afee42ecec 100644
--- a/crates/hir-def/src/nameres/mod_resolution.rs
+++ b/crates/hir-def/src/nameres/mod_resolution.rs
@@ -2,12 +2,11 @@
use arrayvec::ArrayVec;
use base_db::AnchoredPath;
use hir_expand::{name::Name, HirFileIdExt};
-use limit::Limit;
use span::EditionedFileId;
use crate::{db::DefDatabase, HirFileId};
-static MOD_DEPTH_LIMIT: Limit = Limit::new(32);
+const MOD_DEPTH_LIMIT: usize = 32;
#[derive(Clone, Debug)]
pub(super) struct ModDir {
@@ -50,7 +49,7 @@ impl ModDir {
fn child(&self, dir_path: DirPath, root_non_dir_owner: bool) -> Option<ModDir> {
let depth = self.depth + 1;
- if MOD_DEPTH_LIMIT.check(depth as usize).is_err() {
+ if depth as usize > MOD_DEPTH_LIMIT {
tracing::error!("MOD_DEPTH_LIMIT exceeded");
cov_mark::hit!(circular_mods);
return None;
diff --git a/crates/hir-def/src/path.rs b/crates/hir-def/src/path.rs
index e59c37104d..e6c2504d07 100644
--- a/crates/hir-def/src/path.rs
+++ b/crates/hir-def/src/path.rs
@@ -173,10 +173,7 @@ impl Path {
segments: path.mod_path().segments(),
generic_args: Some(path.generic_args()),
},
- Path::LangItem(_, seg) => PathSegments {
- segments: seg.as_ref().map_or(&[], |seg| std::slice::from_ref(seg)),
- generic_args: None,
- },
+ Path::LangItem(_, seg) => PathSegments { segments: seg.as_slice(), generic_args: None },
}
}
@@ -240,6 +237,11 @@ pub struct PathSegment<'a> {
pub args_and_bindings: Option<&'a GenericArgs>,
}
+impl PathSegment<'_> {
+ pub const MISSING: PathSegment<'static> =
+ PathSegment { name: &Name::missing(), args_and_bindings: None };
+}
+
#[derive(Debug, Clone, Copy)]
pub struct PathSegments<'a> {
segments: &'a [Name],
diff --git a/crates/hir-def/src/resolver.rs b/crates/hir-def/src/resolver.rs
index 8c556d8a8c..9dfb6e3cc4 100644
--- a/crates/hir-def/src/resolver.rs
+++ b/crates/hir-def/src/resolver.rs
@@ -10,13 +10,13 @@ use smallvec::{smallvec, SmallVec};
use triomphe::Arc;
use crate::{
- body::{
- scope::{ExprScopes, ScopeId},
- HygieneId,
- },
builtin_type::BuiltinType,
data::ExternCrateDeclData,
db::DefDatabase,
+ expr_store::{
+ scope::{ExprScopes, ScopeId},
+ HygieneId,
+ },
generics::{GenericParams, TypeOrConstParamData},
hir::{BindingId, ExprId, LabelId},
item_scope::{BuiltinShadowMode, ImportOrExternCrate, ImportOrGlob, BUILTIN_SCOPE},
@@ -327,8 +327,9 @@ impl Resolver {
| LangItemTarget::ImplDef(_)
| LangItemTarget::Static(_) => return None,
};
+ // Remaining segments start from 0 because lang paths have no segments other than the remaining.
return Some((
- ResolveValueResult::Partial(type_ns, 1, None),
+ ResolveValueResult::Partial(type_ns, 0, None),
ResolvePathResultPrefixInfo::default(),
));
}
@@ -1264,6 +1265,7 @@ impl HasResolver for GenericDefId {
GenericDefId::TypeAliasId(inner) => inner.resolver(db),
GenericDefId::ImplId(inner) => inner.resolver(db),
GenericDefId::ConstId(inner) => inner.resolver(db),
+ GenericDefId::StaticId(inner) => inner.resolver(db),
}
}
}
diff --git a/crates/hir-expand/Cargo.toml b/crates/hir-expand/Cargo.toml
index 03a9d54d2e..b193a34a01 100644
--- a/crates/hir-expand/Cargo.toml
+++ b/crates/hir-expand/Cargo.toml
@@ -31,7 +31,6 @@ cfg.workspace = true
syntax.workspace = true
tt.workspace = true
mbe.workspace = true
-limit.workspace = true
span.workspace = true
parser.workspace = true
syntax-bridge.workspace = true
diff --git a/crates/hir-expand/src/builtin/fn_macro.rs b/crates/hir-expand/src/builtin/fn_macro.rs
index 310ddaaf9e..55242ab3e5 100644
--- a/crates/hir-expand/src/builtin/fn_macro.rs
+++ b/crates/hir-expand/src/builtin/fn_macro.rs
@@ -833,7 +833,7 @@ fn env_expand(
if key.as_str() == "OUT_DIR" {
err = Some(ExpandError::other(
span,
- r#"`OUT_DIR` not set, enable "build scripts" to fix"#,
+ r#"`OUT_DIR` not set, build scripts may have failed to run"#,
));
}
diff --git a/crates/hir-expand/src/cfg_process.rs b/crates/hir-expand/src/cfg_process.rs
index 01a3103af8..626a82ae08 100644
--- a/crates/hir-expand/src/cfg_process.rs
+++ b/crates/hir-expand/src/cfg_process.rs
@@ -201,9 +201,6 @@ pub(crate) fn process_cfg_attrs(
MacroDefKind::BuiltInAttr(_, expander) => expander.is_derive(),
_ => false,
};
- if !is_derive {
- return None;
- }
let mut remove = FxHashSet::default();
let item = ast::Item::cast(node.clone())?;
@@ -220,28 +217,43 @@ pub(crate) fn process_cfg_attrs(
}
}
}
- match item {
- ast::Item::Struct(it) => match it.field_list()? {
- ast::FieldList::RecordFieldList(fields) => {
- process_has_attrs_with_possible_comma(db, fields.fields(), loc.krate, &mut remove)?;
+
+ if is_derive {
+ // Only derives get their code cfg-clean, normal attribute macros process only the cfg at their level
+ // (cfg_attr is handled above, cfg is handled in the def map).
+ match item {
+ ast::Item::Struct(it) => match it.field_list()? {
+ ast::FieldList::RecordFieldList(fields) => {
+ process_has_attrs_with_possible_comma(
+ db,
+ fields.fields(),
+ loc.krate,
+ &mut remove,
+ )?;
+ }
+ ast::FieldList::TupleFieldList(fields) => {
+ process_has_attrs_with_possible_comma(
+ db,
+ fields.fields(),
+ loc.krate,
+ &mut remove,
+ )?;
+ }
+ },
+ ast::Item::Enum(it) => {
+ process_enum(db, it.variant_list()?, loc.krate, &mut remove)?;
}
- ast::FieldList::TupleFieldList(fields) => {
- process_has_attrs_with_possible_comma(db, fields.fields(), loc.krate, &mut remove)?;
+ ast::Item::Union(it) => {
+ process_has_attrs_with_possible_comma(
+ db,
+ it.record_field_list()?.fields(),
+ loc.krate,
+ &mut remove,
+ )?;
}
- },
- ast::Item::Enum(it) => {
- process_enum(db, it.variant_list()?, loc.krate, &mut remove)?;
- }
- ast::Item::Union(it) => {
- process_has_attrs_with_possible_comma(
- db,
- it.record_field_list()?.fields(),
- loc.krate,
- &mut remove,
- )?;
+ // FIXME: Implement for other items if necessary. As we do not support #[cfg_eval] yet, we do not need to implement it for now
+ _ => {}
}
- // FIXME: Implement for other items if necessary. As we do not support #[cfg_eval] yet, we do not need to implement it for now
- _ => {}
}
Some(remove)
}
diff --git a/crates/hir-expand/src/db.rs b/crates/hir-expand/src/db.rs
index b7804f888a..8ca8bf1ba4 100644
--- a/crates/hir-expand/src/db.rs
+++ b/crates/hir-expand/src/db.rs
@@ -2,7 +2,6 @@
use base_db::{ra_salsa, CrateId, SourceDatabase};
use either::Either;
-use limit::Limit;
use mbe::MatchedArmIndex;
use rustc_hash::FxHashSet;
use span::{AstIdMap, Edition, EditionedFileId, Span, SyntaxContextData, SyntaxContextId};
@@ -35,7 +34,7 @@ type MacroArgResult = (Arc<tt::TopSubtree>, SyntaxFixupUndoInfo, Span);
/// an error will be emitted.
///
/// Actual max for `analysis-stats .` at some point: 30672.
-static TOKEN_LIMIT: Limit = Limit::new(2_097_152);
+const TOKEN_LIMIT: usize = 2_097_152;
#[derive(Debug, Clone, Eq, PartialEq)]
pub enum TokenExpander {
@@ -740,20 +739,19 @@ pub(crate) fn token_tree_to_syntax_node(
fn check_tt_count(tt: &tt::TopSubtree) -> Result<(), ExpandResult<()>> {
let tt = tt.top_subtree();
let count = tt.count();
- if TOKEN_LIMIT.check(count).is_err() {
+ if count <= TOKEN_LIMIT {
+ Ok(())
+ } else {
Err(ExpandResult {
value: (),
err: Some(ExpandError::other(
tt.delimiter.open,
format!(
"macro invocation exceeds token limit: produced {} tokens, limit is {}",
- count,
- TOKEN_LIMIT.inner(),
+ count, TOKEN_LIMIT,
),
)),
})
- } else {
- Ok(())
}
}
diff --git a/crates/hir-expand/src/name.rs b/crates/hir-expand/src/name.rs
index 848870c3a3..0758bd4515 100644
--- a/crates/hir-expand/src/name.rs
+++ b/crates/hir-expand/src/name.rs
@@ -142,8 +142,8 @@ impl Name {
/// Ideally, we want a `gensym` semantics for missing names -- each missing
/// name is equal only to itself. It's not clear how to implement this in
/// salsa though, so we punt on that bit for a moment.
- pub fn missing() -> Name {
- Name { symbol: sym::MISSING_NAME.clone(), ctx: () }
+ pub const fn missing() -> Name {
+ Name { symbol: sym::consts::MISSING_NAME, ctx: () }
}
/// Returns true if this is a fake name for things missing in the source code. See
@@ -262,6 +262,6 @@ impl AsName for ast::FieldKind {
impl AsName for base_db::Dependency {
fn as_name(&self) -> Name {
- Name::new_root(&self.name)
+ Name::new_symbol_root((*self.name).clone())
}
}
diff --git a/crates/hir-expand/src/prettify_macro_expansion_.rs b/crates/hir-expand/src/prettify_macro_expansion_.rs
index 6ff7831fd8..c744fbce77 100644
--- a/crates/hir-expand/src/prettify_macro_expansion_.rs
+++ b/crates/hir-expand/src/prettify_macro_expansion_.rs
@@ -41,9 +41,9 @@ pub fn prettify_macro_expansion(
} else if let Some(dep) =
target_crate.dependencies.iter().find(|dep| dep.crate_id == macro_def_crate)
{
- make::tokens::ident(&dep.name)
+ make::tokens::ident(dep.name.as_str())
} else if let Some(crate_name) = &crate_graph[macro_def_crate].display_name {
- make::tokens::ident(crate_name.crate_name())
+ make::tokens::ident(crate_name.crate_name().as_str())
} else {
return dollar_crate.clone();
}
diff --git a/crates/hir-expand/src/proc_macro.rs b/crates/hir-expand/src/proc_macro.rs
index 07808fea85..3dc3dcd760 100644
--- a/crates/hir-expand/src/proc_macro.rs
+++ b/crates/hir-expand/src/proc_macro.rs
@@ -238,6 +238,9 @@ impl CustomProcMacroExpander {
let krate_graph = db.crate_graph();
// Proc macros have access to the environment variables of the invoking crate.
let env = &krate_graph[calling_crate].env;
+ let current_dir =
+ krate_graph[calling_crate].proc_macro_cwd.as_deref().map(ToString::to_string);
+
match proc_macro.expander.expand(
tt,
attr_arg,
@@ -245,10 +248,7 @@ impl CustomProcMacroExpander {
def_site,
call_site,
mixed_site,
- db.crate_workspace_data()[&calling_crate]
- .proc_macro_cwd
- .as_ref()
- .map(ToString::to_string),
+ current_dir,
) {
Ok(t) => ExpandResult::ok(t),
Err(err) => match err {
diff --git a/crates/hir-ty/Cargo.toml b/crates/hir-ty/Cargo.toml
index 989f0955e1..4d36de0b38 100644
--- a/crates/hir-ty/Cargo.toml
+++ b/crates/hir-ty/Cargo.toml
@@ -36,6 +36,7 @@ indexmap.workspace = true
rustc_apfloat = "0.2.0"
ra-ap-rustc_abi.workspace = true
+ra-ap-rustc_hashes.workspace = true
ra-ap-rustc_index.workspace = true
ra-ap-rustc_pattern_analysis.workspace = true
@@ -47,7 +48,6 @@ hir-def.workspace = true
hir-expand.workspace = true
base-db.workspace = true
syntax.workspace = true
-limit.workspace = true
span.workspace = true
[dev-dependencies]
diff --git a/crates/hir-ty/src/autoderef.rs b/crates/hir-ty/src/autoderef.rs
index 62feca5f8c..171ba001c4 100644
--- a/crates/hir-ty/src/autoderef.rs
+++ b/crates/hir-ty/src/autoderef.rs
@@ -9,7 +9,6 @@ use chalk_ir::cast::Cast;
use hir_def::lang_item::LangItem;
use hir_expand::name::Name;
use intern::sym;
-use limit::Limit;
use triomphe::Arc;
use crate::{
@@ -17,7 +16,7 @@ use crate::{
TraitEnvironment, Ty, TyBuilder, TyKind,
};
-static AUTODEREF_RECURSION_LIMIT: Limit = Limit::new(20);
+const AUTODEREF_RECURSION_LIMIT: usize = 20;
#[derive(Debug)]
pub(crate) enum AutoderefKind {
@@ -140,7 +139,7 @@ impl<T: TrackAutoderefSteps> Iterator for Autoderef<'_, '_, T> {
return Some((self.ty.clone(), 0));
}
- if AUTODEREF_RECURSION_LIMIT.check(self.steps.len() + 1).is_err() {
+ if self.steps.len() > AUTODEREF_RECURSION_LIMIT {
return None;
}
@@ -194,7 +193,11 @@ pub(crate) fn deref_by_trait(
}
let trait_id = || {
- if use_receiver_trait {
+ // FIXME: Remove the `false` once `Receiver` needs to be stabilized, doing so will
+ // effectively bump the MSRV of rust-analyzer to 1.84 due to 1.83 and below lacking the
+ // blanked impl on `Deref`.
+ #[expect(clippy::overly_complex_bool_expr)]
+ if use_receiver_trait && false {
if let Some(receiver) =
db.lang_item(table.trait_env.krate, LangItem::Receiver).and_then(|l| l.as_trait())
{
diff --git a/crates/hir-ty/src/consteval.rs b/crates/hir-ty/src/consteval.rs
index 142766c039..7839589994 100644
--- a/crates/hir-ty/src/consteval.rs
+++ b/crates/hir-ty/src/consteval.rs
@@ -3,7 +3,7 @@
use base_db::{ra_salsa::Cycle, CrateId};
use chalk_ir::{cast::Cast, BoundVar, DebruijnIndex};
use hir_def::{
- body::{Body, HygieneId},
+ expr_store::{Body, HygieneId},
hir::{Expr, ExprId},
path::Path,
resolver::{Resolver, ValueNs},
@@ -124,6 +124,7 @@ pub(crate) fn path_to_const<'g>(
ConstScalar::UnevaluatedConst(c.into(), Substitution::empty(Interner)),
expected_ty,
)),
+ // FIXME: With feature(adt_const_params), we also need to consider other things here, e.g. struct constructors.
_ => None,
}
}
diff --git a/crates/hir-ty/src/diagnostics/expr.rs b/crates/hir-ty/src/diagnostics/expr.rs
index 7f9f0c0de1..59aaf85164 100644
--- a/crates/hir-ty/src/diagnostics/expr.rs
+++ b/crates/hir-ty/src/diagnostics/expr.rs
@@ -36,7 +36,7 @@ use crate::{
};
pub(crate) use hir_def::{
- body::Body,
+ expr_store::Body,
hir::{Expr, ExprId, MatchArm, Pat, PatId, Statement},
LocalFieldId, VariantId,
};
@@ -440,7 +440,9 @@ impl ExprValidator {
return;
};
let root = source_ptr.file_syntax(db.upcast());
- let ast::Expr::IfExpr(if_expr) = source_ptr.value.to_node(&root) else {
+ let either::Left(ast::Expr::IfExpr(if_expr)) =
+ source_ptr.value.to_node(&root)
+ else {
return;
};
let mut top_if_expr = if_expr;
diff --git a/crates/hir-ty/src/diagnostics/match_check.rs b/crates/hir-ty/src/diagnostics/match_check.rs
index c5d8c95661..b0f9fc53e2 100644
--- a/crates/hir-ty/src/diagnostics/match_check.rs
+++ b/crates/hir-ty/src/diagnostics/match_check.rs
@@ -11,7 +11,8 @@ pub(crate) mod pat_analysis;
use chalk_ir::Mutability;
use hir_def::{
- body::Body, data::adt::VariantData, hir::PatId, AdtId, EnumVariantId, LocalFieldId, VariantId,
+ data::adt::VariantData, expr_store::Body, hir::PatId, AdtId, EnumVariantId, LocalFieldId,
+ VariantId,
};
use hir_expand::name::Name;
use span::Edition;
diff --git a/crates/hir-ty/src/diagnostics/match_check/pat_analysis.rs b/crates/hir-ty/src/diagnostics/match_check/pat_analysis.rs
index 2b854310a1..91eb59fb31 100644
--- a/crates/hir-ty/src/diagnostics/match_check/pat_analysis.rs
+++ b/crates/hir-ty/src/diagnostics/match_check/pat_analysis.rs
@@ -95,7 +95,7 @@ impl<'db> MatchCheckCtx<'db> {
let place_validity = PlaceValidity::from_bool(known_valid_scrutinee.unwrap_or(true));
// Measured to take ~100ms on modern hardware.
- let complexity_limit = Some(500000);
+ let complexity_limit = 500000;
compute_match_usefulness(self, arms, scrut_ty, place_validity, complexity_limit)
}
@@ -361,11 +361,11 @@ impl PatCx for MatchCheckCtx<'_> {
}
}
- fn ctor_sub_tys<'a>(
- &'a self,
- ctor: &'a rustc_pattern_analysis::constructor::Constructor<Self>,
- ty: &'a Self::Ty,
- ) -> impl ExactSizeIterator<Item = (Self::Ty, PrivateUninhabitedField)> + Captures<'a> {
+ fn ctor_sub_tys(
+ &self,
+ ctor: &rustc_pattern_analysis::constructor::Constructor<Self>,
+ ty: &Self::Ty,
+ ) -> impl ExactSizeIterator<Item = (Self::Ty, PrivateUninhabitedField)> {
let single = |ty| smallvec![(ty, PrivateUninhabitedField(false))];
let tys: SmallVec<[_; 2]> = match ctor {
Struct | Variant(_) | UnionField => match ty.kind(Interner) {
diff --git a/crates/hir-ty/src/diagnostics/unsafe_check.rs b/crates/hir-ty/src/diagnostics/unsafe_check.rs
index 6bba83fac9..ac849b0762 100644
--- a/crates/hir-ty/src/diagnostics/unsafe_check.rs
+++ b/crates/hir-ty/src/diagnostics/unsafe_check.rs
@@ -5,28 +5,31 @@ use std::mem;
use either::Either;
use hir_def::{
- body::Body,
+ expr_store::Body,
hir::{Expr, ExprId, ExprOrPatId, Pat, PatId, Statement, UnaryOp},
path::Path,
resolver::{HasResolver, ResolveValueResult, Resolver, ValueNs},
type_ref::Rawness,
- AdtId, DefWithBodyId, FieldId, VariantId,
+ AdtId, DefWithBodyId, FieldId, FunctionId, VariantId,
};
+use span::Edition;
use crate::{
- db::HirDatabase, utils::is_fn_unsafe_to_call, InferenceResult, Interner, TyExt, TyKind,
+ db::HirDatabase, utils::is_fn_unsafe_to_call, InferenceResult, Interner, TargetFeatures, TyExt,
+ TyKind,
};
-/// Returns `(unsafe_exprs, fn_is_unsafe)`.
-///
-/// If `fn_is_unsafe` is false, `unsafe_exprs` are hard errors. If true, they're `unsafe_op_in_unsafe_fn`.
-pub fn missing_unsafe(
- db: &dyn HirDatabase,
- def: DefWithBodyId,
-) -> (Vec<(ExprOrPatId, UnsafetyReason)>, bool) {
+#[derive(Debug, Default)]
+pub struct MissingUnsafeResult {
+ pub unsafe_exprs: Vec<(ExprOrPatId, UnsafetyReason)>,
+ /// If `fn_is_unsafe` is false, `unsafe_exprs` are hard errors. If true, they're `unsafe_op_in_unsafe_fn`.
+ pub fn_is_unsafe: bool,
+ pub deprecated_safe_calls: Vec<ExprId>,
+}
+
+pub fn missing_unsafe(db: &dyn HirDatabase, def: DefWithBodyId) -> MissingUnsafeResult {
let _p = tracing::info_span!("missing_unsafe").entered();
- let mut res = Vec::new();
let is_unsafe = match def {
DefWithBodyId::FunctionId(it) => db.function_data(it).is_unsafe(),
DefWithBodyId::StaticId(_)
@@ -35,11 +38,19 @@ pub fn missing_unsafe(
| DefWithBodyId::InTypeConstId(_) => false,
};
+ let mut res = MissingUnsafeResult { fn_is_unsafe: is_unsafe, ..MissingUnsafeResult::default() };
let body = db.body(def);
let infer = db.infer(def);
- let mut callback = |node, inside_unsafe_block, reason| {
- if inside_unsafe_block == InsideUnsafeBlock::No {
- res.push((node, reason));
+ let mut callback = |diag| match diag {
+ UnsafeDiagnostic::UnsafeOperation { node, inside_unsafe_block, reason } => {
+ if inside_unsafe_block == InsideUnsafeBlock::No {
+ res.unsafe_exprs.push((node, reason));
+ }
+ }
+ UnsafeDiagnostic::DeprecatedSafe2024 { node, inside_unsafe_block } => {
+ if inside_unsafe_block == InsideUnsafeBlock::No {
+ res.deprecated_safe_calls.push(node)
+ }
}
};
let mut visitor = UnsafeVisitor::new(db, &infer, &body, def, &mut callback);
@@ -54,7 +65,7 @@ pub fn missing_unsafe(
}
}
- (res, is_unsafe)
+ res
}
#[derive(Debug, Clone, Copy)]
@@ -73,15 +84,31 @@ pub enum InsideUnsafeBlock {
Yes,
}
+#[derive(Debug)]
+enum UnsafeDiagnostic {
+ UnsafeOperation {
+ node: ExprOrPatId,
+ inside_unsafe_block: InsideUnsafeBlock,
+ reason: UnsafetyReason,
+ },
+ /// A lint.
+ DeprecatedSafe2024 { node: ExprId, inside_unsafe_block: InsideUnsafeBlock },
+}
+
pub fn unsafe_expressions(
db: &dyn HirDatabase,
infer: &InferenceResult,
def: DefWithBodyId,
body: &Body,
current: ExprId,
- unsafe_expr_cb: &mut dyn FnMut(ExprOrPatId, InsideUnsafeBlock, UnsafetyReason),
+ callback: &mut dyn FnMut(InsideUnsafeBlock),
) {
- let mut visitor = UnsafeVisitor::new(db, infer, body, def, unsafe_expr_cb);
+ let mut visitor_callback = |diag| {
+ if let UnsafeDiagnostic::UnsafeOperation { inside_unsafe_block, .. } = diag {
+ callback(inside_unsafe_block);
+ }
+ };
+ let mut visitor = UnsafeVisitor::new(db, infer, body, def, &mut visitor_callback);
_ = visitor.resolver.update_to_inner_scope(db.upcast(), def, current);
visitor.walk_expr(current);
}
@@ -95,7 +122,10 @@ struct UnsafeVisitor<'a> {
inside_unsafe_block: InsideUnsafeBlock,
inside_assignment: bool,
inside_union_destructure: bool,
- unsafe_expr_cb: &'a mut dyn FnMut(ExprOrPatId, InsideUnsafeBlock, UnsafetyReason),
+ callback: &'a mut dyn FnMut(UnsafeDiagnostic),
+ def_target_features: TargetFeatures,
+ // FIXME: This needs to be the edition of the span of each call.
+ edition: Edition,
}
impl<'a> UnsafeVisitor<'a> {
@@ -104,9 +134,14 @@ impl<'a> UnsafeVisitor<'a> {
infer: &'a InferenceResult,
body: &'a Body,
def: DefWithBodyId,
- unsafe_expr_cb: &'a mut dyn FnMut(ExprOrPatId, InsideUnsafeBlock, UnsafetyReason),
+ unsafe_expr_cb: &'a mut dyn FnMut(UnsafeDiagnostic),
) -> Self {
let resolver = def.resolver(db.upcast());
+ let def_target_features = match def {
+ DefWithBodyId::FunctionId(func) => TargetFeatures::from_attrs(&db.attrs(func.into())),
+ _ => TargetFeatures::default(),
+ };
+ let edition = db.crate_graph()[resolver.module().krate()].edition;
Self {
db,
infer,
@@ -116,12 +151,34 @@ impl<'a> UnsafeVisitor<'a> {
inside_unsafe_block: InsideUnsafeBlock::No,
inside_assignment: false,
inside_union_destructure: false,
- unsafe_expr_cb,
+ callback: unsafe_expr_cb,
+ def_target_features,
+ edition,
}
}
- fn call_cb(&mut self, node: ExprOrPatId, reason: UnsafetyReason) {
- (self.unsafe_expr_cb)(node, self.inside_unsafe_block, reason);
+ fn on_unsafe_op(&mut self, node: ExprOrPatId, reason: UnsafetyReason) {
+ (self.callback)(UnsafeDiagnostic::UnsafeOperation {
+ node,
+ inside_unsafe_block: self.inside_unsafe_block,
+ reason,
+ });
+ }
+
+ fn check_call(&mut self, node: ExprId, func: FunctionId) {
+ let unsafety = is_fn_unsafe_to_call(self.db, func, &self.def_target_features, self.edition);
+ match unsafety {
+ crate::utils::Unsafety::Safe => {}
+ crate::utils::Unsafety::Unsafe => {
+ self.on_unsafe_op(node.into(), UnsafetyReason::UnsafeFnCall)
+ }
+ crate::utils::Unsafety::DeprecatedSafe2024 => {
+ (self.callback)(UnsafeDiagnostic::DeprecatedSafe2024 {
+ node,
+ inside_unsafe_block: self.inside_unsafe_block,
+ })
+ }
+ }
}
fn walk_pats_top(&mut self, pats: impl Iterator<Item = PatId>, parent_expr: ExprId) {
@@ -146,7 +203,9 @@ impl<'a> UnsafeVisitor<'a> {
| Pat::Ref { .. }
| Pat::Box { .. }
| Pat::Expr(..)
- | Pat::ConstBlock(..) => self.call_cb(current.into(), UnsafetyReason::UnionField),
+ | Pat::ConstBlock(..) => {
+ self.on_unsafe_op(current.into(), UnsafetyReason::UnionField)
+ }
// `Or` only wraps other patterns, and `Missing`/`Wild` do not constitute a read.
Pat::Missing | Pat::Wild | Pat::Or(_) => {}
}
@@ -180,9 +239,13 @@ impl<'a> UnsafeVisitor<'a> {
let inside_assignment = mem::replace(&mut self.inside_assignment, false);
match expr {
&Expr::Call { callee, .. } => {
- if let Some(func) = self.infer[callee].as_fn_def(self.db) {
- if is_fn_unsafe_to_call(self.db, func) {
- self.call_cb(current.into(), UnsafetyReason::UnsafeFnCall);
+ let callee = &self.infer[callee];
+ if let Some(func) = callee.as_fn_def(self.db) {
+ self.check_call(current, func);
+ }
+ if let TyKind::Function(fn_ptr) = callee.kind(Interner) {
+ if fn_ptr.sig.safety == chalk_ir::Safety::Unsafe {
+ self.on_unsafe_op(current.into(), UnsafetyReason::UnsafeFnCall);
}
}
}
@@ -209,18 +272,13 @@ impl<'a> UnsafeVisitor<'a> {
}
}
Expr::MethodCall { .. } => {
- if self
- .infer
- .method_resolution(current)
- .map(|(func, _)| is_fn_unsafe_to_call(self.db, func))
- .unwrap_or(false)
- {
- self.call_cb(current.into(), UnsafetyReason::UnsafeFnCall);
+ if let Some((func, _)) = self.infer.method_resolution(current) {
+ self.check_call(current, func);
}
}
Expr::UnaryOp { expr, op: UnaryOp::Deref } => {
if let TyKind::Raw(..) = &self.infer[*expr].kind(Interner) {
- self.call_cb(current.into(), UnsafetyReason::RawPtrDeref);
+ self.on_unsafe_op(current.into(), UnsafetyReason::RawPtrDeref);
}
}
Expr::Unsafe { .. } => {
@@ -235,7 +293,7 @@ impl<'a> UnsafeVisitor<'a> {
self.walk_pats_top(std::iter::once(target), current);
self.inside_assignment = old_inside_assignment;
}
- Expr::InlineAsm(_) => self.call_cb(current.into(), UnsafetyReason::InlineAsm),
+ Expr::InlineAsm(_) => self.on_unsafe_op(current.into(), UnsafetyReason::InlineAsm),
// rustc allows union assignment to propagate through field accesses and casts.
Expr::Cast { .. } => self.inside_assignment = inside_assignment,
Expr::Field { .. } => {
@@ -244,7 +302,7 @@ impl<'a> UnsafeVisitor<'a> {
if let Some(Either::Left(FieldId { parent: VariantId::UnionId(_), .. })) =
self.infer.field_resolution(current)
{
- self.call_cb(current.into(), UnsafetyReason::UnionField);
+ self.on_unsafe_op(current.into(), UnsafetyReason::UnionField);
}
}
}
@@ -279,9 +337,9 @@ impl<'a> UnsafeVisitor<'a> {
if let Some(ResolveValueResult::ValueNs(ValueNs::StaticId(id), _)) = value_or_partial {
let static_data = self.db.static_data(id);
if static_data.mutable {
- self.call_cb(node, UnsafetyReason::MutableStatic);
+ self.on_unsafe_op(node, UnsafetyReason::MutableStatic);
} else if static_data.is_extern && !static_data.has_safe_kw {
- self.call_cb(node, UnsafetyReason::ExternStatic);
+ self.on_unsafe_op(node, UnsafetyReason::ExternStatic);
}
}
}
diff --git a/crates/hir-ty/src/generics.rs b/crates/hir-ty/src/generics.rs
index abbf2a4f2e..18cf6e5ce3 100644
--- a/crates/hir-ty/src/generics.rs
+++ b/crates/hir-ty/src/generics.rs
@@ -262,7 +262,8 @@ fn parent_generic_def(db: &dyn DefDatabase, def: GenericDefId) -> Option<Generic
GenericDefId::FunctionId(it) => it.lookup(db).container,
GenericDefId::TypeAliasId(it) => it.lookup(db).container,
GenericDefId::ConstId(it) => it.lookup(db).container,
- GenericDefId::AdtId(_)
+ GenericDefId::StaticId(_)
+ | GenericDefId::AdtId(_)
| GenericDefId::TraitId(_)
| GenericDefId::ImplId(_)
| GenericDefId::TraitAliasId(_) => return None,
diff --git a/crates/hir-ty/src/infer.rs b/crates/hir-ty/src/infer.rs
index 25bb3a76de..0cb7002f44 100644
--- a/crates/hir-ty/src/infer.rs
+++ b/crates/hir-ty/src/infer.rs
@@ -16,7 +16,7 @@
pub(crate) mod cast;
pub(crate) mod closure;
mod coerce;
-mod diagnostics;
+pub(crate) mod diagnostics;
mod expr;
mod mutability;
mod pat;
@@ -34,9 +34,9 @@ use chalk_ir::{
};
use either::Either;
use hir_def::{
- body::{Body, HygieneId},
builtin_type::{BuiltinInt, BuiltinType, BuiltinUint},
data::{ConstData, StaticData},
+ expr_store::{Body, HygieneId},
hir::{BindingAnnotation, BindingId, ExprId, ExprOrPatId, LabelId, PatId},
lang_item::{LangItem, LangItemTarget},
layout::Integer,
@@ -236,7 +236,7 @@ pub enum InferenceDiagnostic {
name: Name,
/// Contains the type the field resolves to
field_with_same_name: Option<Ty>,
- assoc_func_with_same_name: Option<AssocItemId>,
+ assoc_func_with_same_name: Option<FunctionId>,
},
UnresolvedAssocItem {
id: ExprOrPatId,
@@ -466,6 +466,9 @@ pub struct InferenceResult {
pub type_of_for_iterator: FxHashMap<ExprId, Ty>,
type_mismatches: FxHashMap<ExprOrPatId, TypeMismatch>,
/// Whether there are any type-mismatching errors in the result.
+ // FIXME: This isn't as useful as initially thought due to us falling back placeholders to
+ // `TyKind::Error`.
+ // Which will then mark this field.
pub(crate) has_errors: bool,
/// Interned common types to return references to.
// FIXME: Move this into `InferenceContext`
@@ -943,7 +946,7 @@ impl<'a> InferenceContext<'a> {
let ty = self.insert_type_vars(ty);
let ty = self.normalize_associated_types_in(ty);
- self.infer_top_pat(*pat, &ty);
+ self.infer_top_pat(*pat, &ty, None);
if ty
.data(Interner)
.flags
@@ -1236,7 +1239,29 @@ impl<'a> InferenceContext<'a> {
}
fn write_expr_adj(&mut self, expr: ExprId, adjustments: Vec<Adjustment>) {
- self.result.expr_adjustments.insert(expr, adjustments);
+ if adjustments.is_empty() {
+ return;
+ }
+ match self.result.expr_adjustments.entry(expr) {
+ std::collections::hash_map::Entry::Occupied(mut entry) => {
+ match (&mut entry.get_mut()[..], &adjustments[..]) {
+ (
+ [Adjustment { kind: Adjust::NeverToAny, target }],
+ [.., Adjustment { target: new_target, .. }],
+ ) => {
+ // NeverToAny coercion can target any type, so instead of adding a new
+ // adjustment on top we can change the target.
+ *target = new_target.clone();
+ }
+ _ => {
+ *entry.get_mut() = adjustments;
+ }
+ }
+ }
+ std::collections::hash_map::Entry::Vacant(entry) => {
+ entry.insert(adjustments);
+ }
+ }
}
fn write_method_resolution(&mut self, expr: ExprId, func: FunctionId, subst: Substitution) {
@@ -1477,21 +1502,22 @@ impl<'a> InferenceContext<'a> {
&self.diagnostics,
InferenceTyDiagnosticSource::Body,
);
+ let mut path_ctx = ctx.at_path(path, node);
let (resolution, unresolved) = if value_ns {
- let Some(res) = ctx.resolve_path_in_value_ns(path, node, HygieneId::ROOT) else {
+ let Some(res) = path_ctx.resolve_path_in_value_ns(HygieneId::ROOT) else {
return (self.err_ty(), None);
};
match res {
ResolveValueResult::ValueNs(value, _) => match value {
ValueNs::EnumVariantId(var) => {
- let substs = ctx.substs_from_path(path, var.into(), true);
+ let substs = path_ctx.substs_from_path(var.into(), true);
drop(ctx);
let ty = self.db.ty(var.lookup(self.db.upcast()).parent.into());
let ty = self.insert_type_vars(ty.substitute(Interner, &substs));
return (ty, Some(var.into()));
}
ValueNs::StructId(strukt) => {
- let substs = ctx.substs_from_path(path, strukt.into(), true);
+ let substs = path_ctx.substs_from_path(strukt.into(), true);
drop(ctx);
let ty = self.db.ty(strukt.into());
let ty = self.insert_type_vars(ty.substitute(Interner, &substs));
@@ -1506,7 +1532,7 @@ impl<'a> InferenceContext<'a> {
ResolveValueResult::Partial(typens, unresolved, _) => (typens, Some(unresolved)),
}
} else {
- match ctx.resolve_path_in_type_ns(path, node) {
+ match path_ctx.resolve_path_in_type_ns() {
Some((it, idx)) => (it, idx),
None => return (self.err_ty(), None),
}
@@ -1517,21 +1543,21 @@ impl<'a> InferenceContext<'a> {
};
return match resolution {
TypeNs::AdtId(AdtId::StructId(strukt)) => {
- let substs = ctx.substs_from_path(path, strukt.into(), true);
+ let substs = path_ctx.substs_from_path(strukt.into(), true);
drop(ctx);
let ty = self.db.ty(strukt.into());
let ty = self.insert_type_vars(ty.substitute(Interner, &substs));
forbid_unresolved_segments((ty, Some(strukt.into())), unresolved)
}
TypeNs::AdtId(AdtId::UnionId(u)) => {
- let substs = ctx.substs_from_path(path, u.into(), true);
+ let substs = path_ctx.substs_from_path(u.into(), true);
drop(ctx);
let ty = self.db.ty(u.into());
let ty = self.insert_type_vars(ty.substitute(Interner, &substs));
forbid_unresolved_segments((ty, Some(u.into())), unresolved)
}
TypeNs::EnumVariantId(var) => {
- let substs = ctx.substs_from_path(path, var.into(), true);
+ let substs = path_ctx.substs_from_path(var.into(), true);
drop(ctx);
let ty = self.db.ty(var.lookup(self.db.upcast()).parent.into());
let ty = self.insert_type_vars(ty.substitute(Interner, &substs));
@@ -1542,31 +1568,32 @@ impl<'a> InferenceContext<'a> {
let substs = generics.placeholder_subst(self.db);
let mut ty = self.db.impl_self_ty(impl_id).substitute(Interner, &substs);
- let Some(mut remaining_idx) = unresolved else {
+ let Some(remaining_idx) = unresolved else {
drop(ctx);
return self.resolve_variant_on_alias(ty, None, mod_path);
};
let mut remaining_segments = path.segments().skip(remaining_idx);
+ if remaining_segments.len() >= 2 {
+ path_ctx.ignore_last_segment();
+ }
+
// We need to try resolving unresolved segments one by one because each may resolve
// to a projection, which `TyLoweringContext` cannot handle on its own.
let mut tried_resolving_once = false;
- while !remaining_segments.is_empty() {
- let resolved_segment = path.segments().get(remaining_idx - 1).unwrap();
- let current_segment = remaining_segments.take(1);
-
+ while let Some(current_segment) = remaining_segments.first() {
// If we can resolve to an enum variant, it takes priority over associated type
// of the same name.
if let Some((AdtId::EnumId(id), _)) = ty.as_adt() {
let enum_data = self.db.enum_data(id);
- let name = current_segment.first().unwrap().name;
- if let Some(variant) = enum_data.variant(name) {
+ if let Some(variant) = enum_data.variant(current_segment.name) {
return if remaining_segments.len() == 1 {
(ty, Some(variant.into()))
} else {
// We still have unresolved paths, but enum variants never have
// associated types!
+ // FIXME: Report an error.
(self.err_ty(), None)
};
}
@@ -1575,23 +1602,13 @@ impl<'a> InferenceContext<'a> {
if tried_resolving_once {
// FIXME: with `inherent_associated_types` this is allowed, but our `lower_partly_resolved_path()`
// will need to be updated to err at the correct segment.
- //
- // We need to stop here because otherwise the segment index passed to `lower_partly_resolved_path()`
- // will be incorrect, and that can mess up error reporting.
break;
}
// `lower_partly_resolved_path()` returns `None` as type namespace unless
// `remaining_segments` is empty, which is never the case here. We don't know
// which namespace the new `ty` is in until normalized anyway.
- (ty, _) = ctx.lower_partly_resolved_path(
- node,
- resolution,
- resolved_segment,
- current_segment,
- (remaining_idx - 1) as u32,
- false,
- );
+ (ty, _) = path_ctx.lower_partly_resolved_path(resolution, false);
tried_resolving_once = true;
ty = self.table.insert_type_vars(ty);
@@ -1601,8 +1618,6 @@ impl<'a> InferenceContext<'a> {
return (self.err_ty(), None);
}
- // FIXME(inherent_associated_types): update `resolution` based on `ty` here.
- remaining_idx += 1;
remaining_segments = remaining_segments.skip(1);
}
drop(ctx);
@@ -1618,12 +1633,7 @@ impl<'a> InferenceContext<'a> {
(ty, variant)
}
TypeNs::TypeAliasId(it) => {
- let resolved_seg = match unresolved {
- None => path.segments().last().unwrap(),
- Some(n) => path.segments().get(path.segments().len() - n - 1).unwrap(),
- };
- let substs =
- ctx.substs_from_path_segment(resolved_seg, Some(it.into()), true, None);
+ let substs = path_ctx.substs_from_path_segment(it.into(), true, None);
drop(ctx);
let ty = self.db.ty(it.into());
let ty = self.insert_type_vars(ty.substitute(Interner, &substs));
diff --git a/crates/hir-ty/src/infer/cast.rs b/crates/hir-ty/src/infer/cast.rs
index 21d0be6ed5..eb193686e9 100644
--- a/crates/hir-ty/src/infer/cast.rs
+++ b/crates/hir-ty/src/infer/cast.rs
@@ -374,6 +374,7 @@ enum PointerKind {
fn pointer_kind(ty: &Ty, table: &mut InferenceTable<'_>) -> Result<Option<PointerKind>, ()> {
let ty = table.resolve_ty_shallow(ty);
+ let ty = table.normalize_associated_types_in(ty);
if table.is_sized(&ty) {
return Ok(Some(PointerKind::Thin));
diff --git a/crates/hir-ty/src/infer/coerce.rs b/crates/hir-ty/src/infer/coerce.rs
index d40816ba8c..acd86b1f3e 100644
--- a/crates/hir-ty/src/infer/coerce.rs
+++ b/crates/hir-ty/src/infer/coerce.rs
@@ -163,10 +163,27 @@ impl CoerceMany {
// type is a type variable and the new one is `!`, trying it the other
// way around first would mean we make the type variable `!`, instead of
// just marking it as possibly diverging.
- if let Ok(res) = ctx.coerce(expr, &expr_ty, &self.merged_ty(), CoerceNever::Yes) {
- self.final_ty = Some(res);
- } else if let Ok(res) = ctx.coerce(expr, &self.merged_ty(), &expr_ty, CoerceNever::Yes) {
+ //
+ // - [Comment from rustc](https://github.com/rust-lang/rust/blob/5ff18d0eaefd1bd9ab8ec33dab2404a44e7631ed/compiler/rustc_hir_typeck/src/coercion.rs#L1334-L1335)
+ // First try to coerce the new expression to the type of the previous ones,
+ // but only if the new expression has no coercion already applied to it.
+ if expr.is_none_or(|expr| !ctx.result.expr_adjustments.contains_key(&expr)) {
+ if let Ok(res) = ctx.coerce(expr, &expr_ty, &self.merged_ty(), CoerceNever::Yes) {
+ self.final_ty = Some(res);
+ if let Some(expr) = expr {
+ self.expressions.push(expr);
+ }
+ return;
+ }
+ }
+
+ if let Ok((adjustments, res)) =
+ ctx.coerce_inner(&self.merged_ty(), &expr_ty, CoerceNever::Yes)
+ {
self.final_ty = Some(res);
+ for &e in &self.expressions {
+ ctx.write_expr_adj(e, adjustments.clone());
+ }
} else {
match cause {
CoercionCause::Expr(id) => {
@@ -244,14 +261,23 @@ impl InferenceContext<'_> {
// between places and values.
coerce_never: CoerceNever,
) -> Result<Ty, TypeError> {
- let from_ty = self.resolve_ty_shallow(from_ty);
- let to_ty = self.resolve_ty_shallow(to_ty);
- let (adjustments, ty) = self.table.coerce(&from_ty, &to_ty, coerce_never)?;
+ let (adjustments, ty) = self.coerce_inner(from_ty, to_ty, coerce_never)?;
if let Some(expr) = expr {
self.write_expr_adj(expr, adjustments);
}
Ok(ty)
}
+
+ fn coerce_inner(
+ &mut self,
+ from_ty: &Ty,
+ to_ty: &Ty,
+ coerce_never: CoerceNever,
+ ) -> Result<(Vec<Adjustment>, Ty), TypeError> {
+ let from_ty = self.resolve_ty_shallow(from_ty);
+ let to_ty = self.resolve_ty_shallow(to_ty);
+ self.table.coerce(&from_ty, &to_ty, coerce_never)
+ }
}
impl InferenceTable<'_> {
@@ -373,7 +399,7 @@ impl InferenceTable<'_> {
// Check that the types which they point at are compatible.
let from_raw = TyKind::Raw(to_mt, from_inner.clone()).intern(Interner);
- // Although references and unsafe ptrs have the same
+ // Although references and raw ptrs have the same
// representation, we still register an Adjust::DerefRef so that
// regionck knows that the region for `a` must be valid here.
if is_ref {
diff --git a/crates/hir-ty/src/infer/diagnostics.rs b/crates/hir-ty/src/infer/diagnostics.rs
index 032dc37899..e4f5b5ed37 100644
--- a/crates/hir-ty/src/infer/diagnostics.rs
+++ b/crates/hir-ty/src/infer/diagnostics.rs
@@ -5,16 +5,14 @@
use std::cell::RefCell;
use std::ops::{Deref, DerefMut};
-use hir_def::body::HygieneId;
-use hir_def::hir::ExprOrPatId;
-use hir_def::path::{Path, PathSegment, PathSegments};
-use hir_def::resolver::{ResolveValueResult, Resolver, TypeNs};
-use hir_def::type_ref::TypesMap;
-use hir_def::TypeOwnerId;
-
-use crate::db::HirDatabase;
+use either::Either;
+use hir_def::{hir::ExprOrPatId, path::Path, resolver::Resolver, type_ref::TypesMap, TypeOwnerId};
+use la_arena::{Idx, RawIdx};
+
use crate::{
- InferenceDiagnostic, InferenceTyDiagnosticSource, Ty, TyLoweringContext, TyLoweringDiagnostic,
+ db::HirDatabase,
+ lower::path::{PathDiagnosticCallback, PathLoweringContext},
+ InferenceDiagnostic, InferenceTyDiagnosticSource, TyLoweringContext, TyLoweringDiagnostic,
};
// Unfortunately, this struct needs to use interior mutability (but we encapsulate it)
@@ -44,6 +42,11 @@ impl Diagnostics {
}
}
+pub(crate) struct PathDiagnosticCallbackData<'a> {
+ node: ExprOrPatId,
+ diagnostics: &'a Diagnostics,
+}
+
pub(super) struct InferenceTyLoweringContext<'a> {
ctx: TyLoweringContext<'a>,
diagnostics: &'a Diagnostics,
@@ -51,6 +54,7 @@ pub(super) struct InferenceTyLoweringContext<'a> {
}
impl<'a> InferenceTyLoweringContext<'a> {
+ #[inline]
pub(super) fn new(
db: &'a dyn HirDatabase,
resolver: &'a Resolver,
@@ -62,65 +66,62 @@ impl<'a> InferenceTyLoweringContext<'a> {
Self { ctx: TyLoweringContext::new(db, resolver, types_map, owner), diagnostics, source }
}
- pub(super) fn resolve_path_in_type_ns(
- &mut self,
- path: &Path,
+ #[inline]
+ pub(super) fn at_path<'b>(
+ &'b mut self,
+ path: &'b Path,
node: ExprOrPatId,
- ) -> Option<(TypeNs, Option<usize>)> {
- let diagnostics = self.diagnostics;
- self.ctx.resolve_path_in_type_ns(path, &mut |_, diag| {
- diagnostics.push(InferenceDiagnostic::PathDiagnostic { node, diag })
- })
+ ) -> PathLoweringContext<'b, 'a> {
+ let on_diagnostic = PathDiagnosticCallback {
+ data: Either::Right(PathDiagnosticCallbackData { diagnostics: self.diagnostics, node }),
+ callback: |data, _, diag| {
+ let data = data.as_ref().right().unwrap();
+ data.diagnostics
+ .push(InferenceDiagnostic::PathDiagnostic { node: data.node, diag });
+ },
+ };
+ PathLoweringContext::new(&mut self.ctx, on_diagnostic, path)
}
- pub(super) fn resolve_path_in_value_ns(
- &mut self,
- path: &Path,
- node: ExprOrPatId,
- hygiene_id: HygieneId,
- ) -> Option<ResolveValueResult> {
- let diagnostics = self.diagnostics;
- self.ctx.resolve_path_in_value_ns(path, hygiene_id, &mut |_, diag| {
- diagnostics.push(InferenceDiagnostic::PathDiagnostic { node, diag })
- })
+ #[inline]
+ pub(super) fn at_path_forget_diagnostics<'b>(
+ &'b mut self,
+ path: &'b Path,
+ ) -> PathLoweringContext<'b, 'a> {
+ let on_diagnostic = PathDiagnosticCallback {
+ data: Either::Right(PathDiagnosticCallbackData {
+ diagnostics: self.diagnostics,
+ node: ExprOrPatId::ExprId(Idx::from_raw(RawIdx::from_u32(0))),
+ }),
+ callback: |_data, _, _diag| {},
+ };
+ PathLoweringContext::new(&mut self.ctx, on_diagnostic, path)
}
- pub(super) fn lower_partly_resolved_path(
- &mut self,
- node: ExprOrPatId,
- resolution: TypeNs,
- resolved_segment: PathSegment<'_>,
- remaining_segments: PathSegments<'_>,
- resolved_segment_idx: u32,
- infer_args: bool,
- ) -> (Ty, Option<TypeNs>) {
- let diagnostics = self.diagnostics;
- self.ctx.lower_partly_resolved_path(
- resolution,
- resolved_segment,
- remaining_segments,
- resolved_segment_idx,
- infer_args,
- &mut |_, diag| diagnostics.push(InferenceDiagnostic::PathDiagnostic { node, diag }),
- )
+ #[inline]
+ pub(super) fn forget_diagnostics(&mut self) {
+ self.ctx.diagnostics.clear();
}
}
impl<'a> Deref for InferenceTyLoweringContext<'a> {
type Target = TyLoweringContext<'a>;
+ #[inline]
fn deref(&self) -> &Self::Target {
&self.ctx
}
}
impl DerefMut for InferenceTyLoweringContext<'_> {
+ #[inline]
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.ctx
}
}
impl Drop for InferenceTyLoweringContext<'_> {
+ #[inline]
fn drop(&mut self) {
self.diagnostics
.push_ty_diagnostics(self.source, std::mem::take(&mut self.ctx.diagnostics));
diff --git a/crates/hir-ty/src/infer/expr.rs b/crates/hir-ty/src/infer/expr.rs
index b951443897..80e3ca1fa2 100644
--- a/crates/hir-ty/src/infer/expr.rs
+++ b/crates/hir-ty/src/infer/expr.rs
@@ -43,9 +43,9 @@ use crate::{
primitive::{self, UintTy},
static_lifetime, to_chalk_trait_id,
traits::FnTrait,
- Adjust, Adjustment, AdtId, AutoBorrow, Binders, CallableDefId, CallableSig, FnAbi, FnPointer,
- FnSig, FnSubst, Interner, Rawness, Scalar, Substitution, TraitEnvironment, TraitRef, Ty,
- TyBuilder, TyExt, TyKind,
+ Adjust, Adjustment, AdtId, AutoBorrow, Binders, CallableDefId, CallableSig, DeclContext,
+ DeclOrigin, FnAbi, FnPointer, FnSig, FnSubst, Interner, Rawness, Scalar, Substitution,
+ TraitEnvironment, TraitRef, Ty, TyBuilder, TyExt, TyKind,
};
use super::{
@@ -334,7 +334,11 @@ impl InferenceContext<'_> {
ExprIsRead::No
};
let input_ty = self.infer_expr(expr, &Expectation::none(), child_is_read);
- self.infer_top_pat(pat, &input_ty);
+ self.infer_top_pat(
+ pat,
+ &input_ty,
+ Some(DeclContext { origin: DeclOrigin::LetExpr }),
+ );
self.result.standard_types.bool_.clone()
}
Expr::Block { statements, tail, label, id } => {
@@ -461,7 +465,7 @@ impl InferenceContext<'_> {
// Now go through the argument patterns
for (arg_pat, arg_ty) in args.iter().zip(&sig_tys) {
- self.infer_top_pat(*arg_pat, arg_ty);
+ self.infer_top_pat(*arg_pat, arg_ty, None);
}
// FIXME: lift these out into a struct
@@ -485,78 +489,7 @@ impl InferenceContext<'_> {
ty
}
- Expr::Call { callee, args, .. } => {
- let callee_ty = self.infer_expr(*callee, &Expectation::none(), ExprIsRead::Yes);
- let mut derefs = Autoderef::new(&mut self.table, callee_ty.clone(), false, true);
- let (res, derefed_callee) = loop {
- let Some((callee_deref_ty, _)) = derefs.next() else {
- break (None, callee_ty.clone());
- };
- if let Some(res) = derefs.table.callable_sig(&callee_deref_ty, args.len()) {
- break (Some(res), callee_deref_ty);
- }
- };
- // if the function is unresolved, we use is_varargs=true to
- // suppress the arg count diagnostic here
- let is_varargs =
- derefed_callee.callable_sig(self.db).is_some_and(|sig| sig.is_varargs)
- || res.is_none();
- let (param_tys, ret_ty) = match res {
- Some((func, params, ret_ty)) => {
- let mut adjustments = auto_deref_adjust_steps(&derefs);
- if let TyKind::Closure(c, _) =
- self.table.resolve_completely(callee_ty.clone()).kind(Interner)
- {
- if let Some(par) = self.current_closure {
- self.closure_dependencies.entry(par).or_default().push(*c);
- }
- self.deferred_closures.entry(*c).or_default().push((
- derefed_callee.clone(),
- callee_ty.clone(),
- params.clone(),
- tgt_expr,
- ));
- }
- if let Some(fn_x) = func {
- self.write_fn_trait_method_resolution(
- fn_x,
- &derefed_callee,
- &mut adjustments,
- &callee_ty,
- &params,
- tgt_expr,
- );
- }
- self.write_expr_adj(*callee, adjustments);
- (params, ret_ty)
- }
- None => {
- self.push_diagnostic(InferenceDiagnostic::ExpectedFunction {
- call_expr: tgt_expr,
- found: callee_ty.clone(),
- });
- (Vec::new(), self.err_ty())
- }
- };
- let indices_to_skip = self.check_legacy_const_generics(derefed_callee, args);
- self.register_obligations_for_call(&callee_ty);
-
- let expected_inputs = self.expected_inputs_for_expected_output(
- expected,
- ret_ty.clone(),
- param_tys.clone(),
- );
-
- self.check_call_arguments(
- tgt_expr,
- args,
- &expected_inputs,
- &param_tys,
- &indices_to_skip,
- is_varargs,
- );
- self.normalize_associated_types_in(ret_ty)
- }
+ Expr::Call { callee, args, .. } => self.infer_call(tgt_expr, *callee, args, expected),
Expr::MethodCall { receiver, args, method_name, generic_args } => self
.infer_method_call(
tgt_expr,
@@ -582,7 +515,7 @@ impl InferenceContext<'_> {
let mut all_arms_diverge = Diverges::Always;
for arm in arms.iter() {
let input_ty = self.resolve_ty_shallow(&input_ty);
- self.infer_top_pat(arm.pat, &input_ty);
+ self.infer_top_pat(arm.pat, &input_ty, None);
}
let expected = expected.adjust_for_branches(&mut self.table);
@@ -927,7 +860,7 @@ impl InferenceContext<'_> {
let resolver_guard =
self.resolver.update_to_inner_scope(self.db.upcast(), self.owner, tgt_expr);
self.inside_assignment = true;
- self.infer_top_pat(target, &rhs_ty);
+ self.infer_top_pat(target, &rhs_ty, None);
self.inside_assignment = false;
self.resolver.reset_to_guard(resolver_guard);
}
@@ -1632,8 +1565,11 @@ impl InferenceContext<'_> {
decl_ty
};
- this.infer_top_pat(*pat, &ty);
+ let decl = DeclContext {
+ origin: DeclOrigin::LocalDecl { has_else: else_branch.is_some() },
+ };
+ this.infer_top_pat(*pat, &ty, Some(decl));
if let Some(expr) = else_branch {
let previous_diverges =
mem::replace(&mut this.diverges, Diverges::Maybe);
@@ -1865,6 +1801,107 @@ impl InferenceContext<'_> {
}
}
+ fn infer_call(
+ &mut self,
+ tgt_expr: ExprId,
+ callee: ExprId,
+ args: &[ExprId],
+ expected: &Expectation,
+ ) -> Ty {
+ let callee_ty = self.infer_expr(callee, &Expectation::none(), ExprIsRead::Yes);
+ let mut derefs = Autoderef::new(&mut self.table, callee_ty.clone(), false, true);
+ let (res, derefed_callee) = loop {
+ let Some((callee_deref_ty, _)) = derefs.next() else {
+ break (None, callee_ty.clone());
+ };
+ if let Some(res) = derefs.table.callable_sig(&callee_deref_ty, args.len()) {
+ break (Some(res), callee_deref_ty);
+ }
+ };
+ // if the function is unresolved, we use is_varargs=true to
+ // suppress the arg count diagnostic here
+ let is_varargs =
+ derefed_callee.callable_sig(self.db).is_some_and(|sig| sig.is_varargs) || res.is_none();
+ let (param_tys, ret_ty) = match res {
+ Some((func, params, ret_ty)) => {
+ let mut adjustments = auto_deref_adjust_steps(&derefs);
+ if let TyKind::Closure(c, _) =
+ self.table.resolve_completely(callee_ty.clone()).kind(Interner)
+ {
+ if let Some(par) = self.current_closure {
+ self.closure_dependencies.entry(par).or_default().push(*c);
+ }
+ self.deferred_closures.entry(*c).or_default().push((
+ derefed_callee.clone(),
+ callee_ty.clone(),
+ params.clone(),
+ tgt_expr,
+ ));
+ }
+ if let Some(fn_x) = func {
+ self.write_fn_trait_method_resolution(
+ fn_x,
+ &derefed_callee,
+ &mut adjustments,
+ &callee_ty,
+ &params,
+ tgt_expr,
+ );
+ }
+ self.write_expr_adj(callee, adjustments);
+ (params, ret_ty)
+ }
+ None => {
+ self.push_diagnostic(InferenceDiagnostic::ExpectedFunction {
+ call_expr: tgt_expr,
+ found: callee_ty.clone(),
+ });
+ (Vec::new(), self.err_ty())
+ }
+ };
+ let indices_to_skip = self.check_legacy_const_generics(derefed_callee, args);
+ self.check_call(
+ tgt_expr,
+ args,
+ callee_ty,
+ &param_tys,
+ ret_ty,
+ &indices_to_skip,
+ is_varargs,
+ expected,
+ )
+ }
+
+ fn check_call(
+ &mut self,
+ tgt_expr: ExprId,
+ args: &[ExprId],
+ callee_ty: Ty,
+ param_tys: &[Ty],
+ ret_ty: Ty,
+ indices_to_skip: &[u32],
+ is_varargs: bool,
+ expected: &Expectation,
+ ) -> Ty {
+ self.register_obligations_for_call(&callee_ty);
+
+ let expected_inputs = self.expected_inputs_for_expected_output(
+ expected,
+ ret_ty.clone(),
+ param_tys.to_owned(),
+ );
+
+ self.check_call_arguments(
+ tgt_expr,
+ args,
+ &expected_inputs,
+ param_tys,
+ indices_to_skip,
+ is_varargs,
+ );
+ self.normalize_associated_types_in(ret_ty)
+ }
+
fn infer_method_call(
&mut self,
tgt_expr: ExprId,
@@ -1885,21 +1922,32 @@ impl InferenceContext<'_> {
VisibleFromModule::Filter(self.resolver.module()),
method_name,
);
- let (receiver_ty, method_ty, substs) = match resolved {
+ match resolved {
Some((adjust, func, visible)) => {
- let (ty, adjustments) = adjust.apply(&mut self.table, receiver_ty);
- let generics = generics(self.db.upcast(), func.into());
- let substs = self.substs_for_method_call(generics, generic_args);
- self.write_expr_adj(receiver, adjustments);
- self.write_method_resolution(tgt_expr, func, substs.clone());
if !visible {
self.push_diagnostic(InferenceDiagnostic::PrivateAssocItem {
id: tgt_expr.into(),
item: func.into(),
})
}
- (ty, self.db.value_ty(func.into()).unwrap(), substs)
+
+ let (ty, adjustments) = adjust.apply(&mut self.table, receiver_ty);
+ self.write_expr_adj(receiver, adjustments);
+
+ let generics = generics(self.db.upcast(), func.into());
+ let substs = self.substs_for_method_call(generics, generic_args);
+ self.write_method_resolution(tgt_expr, func, substs.clone());
+ self.check_method_call(
+ tgt_expr,
+ args,
+ self.db.value_ty(func.into()).expect("we have a function def"),
+ substs,
+ ty,
+ expected,
+ )
}
+ // Failed to resolve, report diagnostic and try to resolve as call to field access or
+ // assoc function
None => {
let field_with_same_name_exists = match self.lookup_field(&receiver_ty, method_name)
{
@@ -1919,12 +1967,11 @@ impl InferenceContext<'_> {
VisibleFromModule::Filter(self.resolver.module()),
Some(method_name),
method_resolution::LookupMode::Path,
- |_ty, item, visible| {
- if visible {
- Some(item)
- } else {
- None
+ |_ty, item, visible| match item {
+ hir_def::AssocItemId::FunctionId(function_id) if visible => {
+ Some(function_id)
}
+ _ => None,
},
);
@@ -1932,17 +1979,45 @@ impl InferenceContext<'_> {
expr: tgt_expr,
receiver: receiver_ty.clone(),
name: method_name.clone(),
- field_with_same_name: field_with_same_name_exists,
+ field_with_same_name: field_with_same_name_exists.clone(),
assoc_func_with_same_name,
});
- (
- receiver_ty,
- Binders::empty(Interner, self.err_ty()),
- Substitution::empty(Interner),
- )
+
+ let recovered = match assoc_func_with_same_name {
+ Some(f) => {
+ let generics = generics(self.db.upcast(), f.into());
+ let substs = self.substs_for_method_call(generics, generic_args);
+ let f = self
+ .db
+ .value_ty(f.into())
+ .expect("we have a function def")
+ .substitute(Interner, &substs);
+ let sig = f.callable_sig(self.db).expect("we have a function def");
+ Some((f, sig, true))
+ }
+ None => field_with_same_name_exists.and_then(|field_ty| {
+ let callable_sig = field_ty.callable_sig(self.db)?;
+ Some((field_ty, callable_sig, false))
+ }),
+ };
+ match recovered {
+ Some((callee_ty, sig, strip_first)) => self.check_call(
+ tgt_expr,
+ args,
+ callee_ty,
+ sig.params().get(strip_first as usize..).unwrap_or(&[]),
+ sig.ret().clone(),
+ &[],
+ true,
+ expected,
+ ),
+ None => {
+ self.check_call_arguments(tgt_expr, args, &[], &[], &[], true);
+ self.err_ty()
+ }
+ }
}
- };
- self.check_method_call(tgt_expr, args, method_ty, substs, receiver_ty, expected)
+ }
}
fn check_method_call(
@@ -2012,9 +2087,10 @@ impl InferenceContext<'_> {
expected_inputs: &[Ty],
param_tys: &[Ty],
skip_indices: &[u32],
- is_varargs: bool,
+ ignore_arg_param_mismatch: bool,
) {
- let arg_count_mismatch = args.len() != param_tys.len() + skip_indices.len() && !is_varargs;
+ let arg_count_mismatch =
+ !ignore_arg_param_mismatch && args.len() != param_tys.len() + skip_indices.len();
if arg_count_mismatch {
self.push_diagnostic(InferenceDiagnostic::MismatchedArgCount {
call_expr: expr,
@@ -2043,7 +2119,7 @@ impl InferenceContext<'_> {
continue;
}
- while skip_indices.peek().is_some_and(|i| *i < idx as u32) {
+ while skip_indices.peek().is_some_and(|&i| i < idx as u32) {
skip_indices.next();
}
if skip_indices.peek().copied() == Some(idx as u32) {
@@ -2125,8 +2201,8 @@ impl InferenceContext<'_> {
for kind_id in def_generics.iter_self_id().take(self_params) {
let arg = args.peek();
let arg = match (kind_id, arg) {
- // Lifetimes can be elided.
- // Once we have implemented lifetime elision correctly,
+ // Lifetimes can be inferred.
+ // Once we have implemented lifetime inference correctly,
// this should be handled in a proper way.
(
GenericParamId::LifetimeParamId(_),
diff --git a/crates/hir-ty/src/infer/pat.rs b/crates/hir-ty/src/infer/pat.rs
index 00398f019d..db93116f10 100644
--- a/crates/hir-ty/src/infer/pat.rs
+++ b/crates/hir-ty/src/infer/pat.rs
@@ -3,23 +3,24 @@
use std::iter::repeat_with;
use hir_def::{
- body::Body,
+ expr_store::Body,
hir::{Binding, BindingAnnotation, BindingId, Expr, ExprId, Literal, Pat, PatId},
path::Path,
+ HasModule,
};
use hir_expand::name::Name;
use stdx::TupleExt;
use crate::{
- consteval::{try_const_usize, usize_const},
+ consteval::{self, try_const_usize, usize_const},
infer::{
coerce::CoerceNever, expr::ExprIsRead, BindingMode, Expectation, InferenceContext,
TypeMismatch,
},
lower::lower_to_chalk_mutability,
primitive::UintTy,
- static_lifetime, InferenceDiagnostic, Interner, Mutability, Scalar, Substitution, Ty,
- TyBuilder, TyExt, TyKind,
+ static_lifetime, DeclContext, DeclOrigin, InferenceDiagnostic, Interner, Mutability, Scalar,
+ Substitution, Ty, TyBuilder, TyExt, TyKind,
};
impl InferenceContext<'_> {
@@ -34,6 +35,7 @@ impl InferenceContext<'_> {
id: PatId,
ellipsis: Option<u32>,
subs: &[PatId],
+ decl: Option<DeclContext>,
) -> Ty {
let (ty, def) = self.resolve_variant(id.into(), path, true);
let var_data = def.map(|it| it.variant_data(self.db.upcast()));
@@ -92,13 +94,13 @@ impl InferenceContext<'_> {
}
};
- self.infer_pat(subpat, &expected_ty, default_bm);
+ self.infer_pat(subpat, &expected_ty, default_bm, decl);
}
}
None => {
let err_ty = self.err_ty();
for &inner in subs {
- self.infer_pat(inner, &err_ty, default_bm);
+ self.infer_pat(inner, &err_ty, default_bm, decl);
}
}
}
@@ -114,6 +116,7 @@ impl InferenceContext<'_> {
default_bm: BindingMode,
id: PatId,
subs: impl ExactSizeIterator<Item = (Name, PatId)>,
+ decl: Option<DeclContext>,
) -> Ty {
let (ty, def) = self.resolve_variant(id.into(), path, false);
if let Some(variant) = def {
@@ -162,13 +165,13 @@ impl InferenceContext<'_> {
}
};
- self.infer_pat(inner, &expected_ty, default_bm);
+ self.infer_pat(inner, &expected_ty, default_bm, decl);
}
}
None => {
let err_ty = self.err_ty();
for (_, inner) in subs {
- self.infer_pat(inner, &err_ty, default_bm);
+ self.infer_pat(inner, &err_ty, default_bm, decl);
}
}
}
@@ -185,6 +188,7 @@ impl InferenceContext<'_> {
default_bm: BindingMode,
ellipsis: Option<u32>,
subs: &[PatId],
+ decl: Option<DeclContext>,
) -> Ty {
let expected = self.resolve_ty_shallow(expected);
let expectations = match expected.as_tuple() {
@@ -209,12 +213,12 @@ impl InferenceContext<'_> {
// Process pre
for (ty, pat) in inner_tys.iter_mut().zip(pre) {
- *ty = self.infer_pat(*pat, ty, default_bm);
+ *ty = self.infer_pat(*pat, ty, default_bm, decl);
}
// Process post
for (ty, pat) in inner_tys.iter_mut().skip(pre.len() + n_uncovered_patterns).zip(post) {
- *ty = self.infer_pat(*pat, ty, default_bm);
+ *ty = self.infer_pat(*pat, ty, default_bm, decl);
}
TyKind::Tuple(inner_tys.len(), Substitution::from_iter(Interner, inner_tys))
@@ -223,11 +227,17 @@ impl InferenceContext<'_> {
/// The resolver needs to be updated to the surrounding expression when inside assignment
/// (because there, `Pat::Path` can refer to a variable).
- pub(super) fn infer_top_pat(&mut self, pat: PatId, expected: &Ty) {
- self.infer_pat(pat, expected, BindingMode::default());
+ pub(super) fn infer_top_pat(&mut self, pat: PatId, expected: &Ty, decl: Option<DeclContext>) {
+ self.infer_pat(pat, expected, BindingMode::default(), decl);
}
- fn infer_pat(&mut self, pat: PatId, expected: &Ty, mut default_bm: BindingMode) -> Ty {
+ fn infer_pat(
+ &mut self,
+ pat: PatId,
+ expected: &Ty,
+ mut default_bm: BindingMode,
+ decl: Option<DeclContext>,
+ ) -> Ty {
let mut expected = self.resolve_ty_shallow(expected);
if matches!(&self.body[pat], Pat::Ref { .. }) || self.inside_assignment {
@@ -261,11 +271,11 @@ impl InferenceContext<'_> {
let ty = match &self.body[pat] {
Pat::Tuple { args, ellipsis } => {
- self.infer_tuple_pat_like(&expected, default_bm, *ellipsis, args)
+ self.infer_tuple_pat_like(&expected, default_bm, *ellipsis, args, decl)
}
Pat::Or(pats) => {
for pat in pats.iter() {
- self.infer_pat(*pat, &expected, default_bm);
+ self.infer_pat(*pat, &expected, default_bm, decl);
}
expected.clone()
}
@@ -274,6 +284,7 @@ impl InferenceContext<'_> {
lower_to_chalk_mutability(mutability),
&expected,
default_bm,
+ decl,
),
Pat::TupleStruct { path: p, args: subpats, ellipsis } => self
.infer_tuple_struct_pat_like(
@@ -283,10 +294,11 @@ impl InferenceContext<'_> {
pat,
*ellipsis,
subpats,
+ decl,
),
Pat::Record { path: p, args: fields, ellipsis: _ } => {
let subs = fields.iter().map(|f| (f.name.clone(), f.pat));
- self.infer_record_pat_like(p.as_deref(), &expected, default_bm, pat, subs)
+ self.infer_record_pat_like(p.as_deref(), &expected, default_bm, pat, subs, decl)
}
Pat::Path(path) => {
let ty = self.infer_path(path, pat.into()).unwrap_or_else(|| self.err_ty());
@@ -319,10 +331,10 @@ impl InferenceContext<'_> {
}
}
Pat::Bind { id, subpat } => {
- return self.infer_bind_pat(pat, *id, default_bm, *subpat, &expected);
+ return self.infer_bind_pat(pat, *id, default_bm, *subpat, &expected, decl);
}
Pat::Slice { prefix, slice, suffix } => {
- self.infer_slice_pat(&expected, prefix, slice, suffix, default_bm)
+ self.infer_slice_pat(&expected, prefix, slice, suffix, default_bm, decl)
}
Pat::Wild => expected.clone(),
Pat::Range { .. } => {
@@ -345,7 +357,7 @@ impl InferenceContext<'_> {
_ => (self.result.standard_types.unknown.clone(), None),
};
- let inner_ty = self.infer_pat(*inner, &inner_ty, default_bm);
+ let inner_ty = self.infer_pat(*inner, &inner_ty, default_bm, decl);
let mut b = TyBuilder::adt(self.db, box_adt).push(inner_ty);
if let Some(alloc_ty) = alloc_ty {
@@ -420,6 +432,7 @@ impl InferenceContext<'_> {
mutability: Mutability,
expected: &Ty,
default_bm: BindingMode,
+ decl: Option<DeclContext>,
) -> Ty {
let (expectation_type, expectation_lt) = match expected.as_reference() {
Some((inner_ty, lifetime, _exp_mut)) => (inner_ty.clone(), lifetime.clone()),
@@ -433,7 +446,7 @@ impl InferenceContext<'_> {
(inner_ty, inner_lt)
}
};
- let subty = self.infer_pat(inner_pat, &expectation_type, default_bm);
+ let subty = self.infer_pat(inner_pat, &expectation_type, default_bm, decl);
TyKind::Ref(mutability, expectation_lt, subty).intern(Interner)
}
@@ -444,6 +457,7 @@ impl InferenceContext<'_> {
default_bm: BindingMode,
subpat: Option<PatId>,
expected: &Ty,
+ decl: Option<DeclContext>,
) -> Ty {
let Binding { mode, .. } = self.body.bindings[binding];
let mode = if mode == BindingAnnotation::Unannotated {
@@ -454,7 +468,7 @@ impl InferenceContext<'_> {
self.result.binding_modes.insert(pat, mode);
let inner_ty = match subpat {
- Some(subpat) => self.infer_pat(subpat, expected, default_bm),
+ Some(subpat) => self.infer_pat(subpat, expected, default_bm, decl),
None => expected.clone(),
};
let inner_ty = self.insert_type_vars_shallow(inner_ty);
@@ -478,14 +492,28 @@ impl InferenceContext<'_> {
slice: &Option<PatId>,
suffix: &[PatId],
default_bm: BindingMode,
+ decl: Option<DeclContext>,
) -> Ty {
+ let expected = self.resolve_ty_shallow(expected);
+
+ // If `expected` is an infer ty, we try to equate it to an array if the given pattern
+ // allows it. See issue #16609
+ if self.pat_is_irrefutable(decl) && expected.is_ty_var() {
+ if let Some(resolved_array_ty) =
+ self.try_resolve_slice_ty_to_array_ty(prefix, suffix, slice)
+ {
+ self.unify(&expected, &resolved_array_ty);
+ }
+ }
+
+ let expected = self.resolve_ty_shallow(&expected);
let elem_ty = match expected.kind(Interner) {
TyKind::Array(st, _) | TyKind::Slice(st) => st.clone(),
_ => self.err_ty(),
};
for &pat_id in prefix.iter().chain(suffix.iter()) {
- self.infer_pat(pat_id, &elem_ty, default_bm);
+ self.infer_pat(pat_id, &elem_ty, default_bm, decl);
}
if let &Some(slice_pat_id) = slice {
@@ -499,7 +527,7 @@ impl InferenceContext<'_> {
_ => TyKind::Slice(elem_ty.clone()),
}
.intern(Interner);
- self.infer_pat(slice_pat_id, &rest_pat_ty, default_bm);
+ self.infer_pat(slice_pat_id, &rest_pat_ty, default_bm, decl);
}
match expected.kind(Interner) {
@@ -528,7 +556,7 @@ impl InferenceContext<'_> {
self.infer_expr(expr, &Expectation::has_type(expected.clone()), ExprIsRead::Yes)
}
- fn is_non_ref_pat(&mut self, body: &hir_def::body::Body, pat: PatId) -> bool {
+ fn is_non_ref_pat(&mut self, body: &hir_def::expr_store::Body, pat: PatId) -> bool {
match &body[pat] {
Pat::Tuple { .. }
| Pat::TupleStruct { .. }
@@ -536,9 +564,10 @@ impl InferenceContext<'_> {
| Pat::Range { .. }
| Pat::Slice { .. } => true,
Pat::Or(pats) => pats.iter().all(|p| self.is_non_ref_pat(body, *p)),
- Pat::Path(p) => {
- let v = self.resolve_value_path_inner(p, pat.into());
- v.is_some_and(|x| !matches!(x.0, hir_def::resolver::ValueNs::ConstId(_)))
+ Pat::Path(path) => {
+ // A const is a reference pattern, but other value ns things aren't (see #16131).
+ let resolved = self.resolve_value_path_inner(path, pat.into(), true);
+ resolved.is_some_and(|it| !matches!(it.0, hir_def::resolver::ValueNs::ConstId(_)))
}
Pat::ConstBlock(..) => false,
Pat::Lit(expr) => !matches!(
@@ -553,6 +582,59 @@ impl InferenceContext<'_> {
| Pat::Expr(_) => false,
}
}
+
+ fn try_resolve_slice_ty_to_array_ty(
+ &mut self,
+ before: &[PatId],
+ suffix: &[PatId],
+ slice: &Option<PatId>,
+ ) -> Option<Ty> {
+ if !slice.is_none() {
+ return None;
+ }
+
+ let len = before.len() + suffix.len();
+ let size =
+ consteval::usize_const(self.db, Some(len as u128), self.owner.krate(self.db.upcast()));
+
+ let elem_ty = self.table.new_type_var();
+ let array_ty = TyKind::Array(elem_ty.clone(), size).intern(Interner);
+ Some(array_ty)
+ }
+
+ /// Used to determine whether we can infer the expected type in the slice pattern to be of type array.
+ /// This is only possible if we're in an irrefutable pattern. If we were to allow this in refutable
+ /// patterns we wouldn't e.g. report ambiguity in the following situation:
+ ///
+ /// ```ignore(rust)
+ /// struct Zeroes;
+ /// const ARR: [usize; 2] = [0; 2];
+ /// const ARR2: [usize; 2] = [2; 2];
+ ///
+ /// impl Into<&'static [usize; 2]> for Zeroes {
+ /// fn into(self) -> &'static [usize; 2] {
+ /// &ARR
+ /// }
+ /// }
+ ///
+ /// impl Into<&'static [usize]> for Zeroes {
+ /// fn into(self) -> &'static [usize] {
+ /// &ARR2
+ /// }
+ /// }
+ ///
+ /// fn main() {
+ /// let &[a, b]: &[usize] = Zeroes.into() else {
+ /// ..
+ /// };
+ /// }
+ /// ```
+ ///
+ /// If we're in an irrefutable pattern we prefer the array impl candidate given that
+ /// the slice impl candidate would be rejected anyway (if no ambiguity existed).
+ fn pat_is_irrefutable(&self, decl_ctxt: Option<DeclContext>) -> bool {
+ matches!(decl_ctxt, Some(DeclContext { origin: DeclOrigin::LocalDecl { has_else: false } }))
+ }
}
pub(super) fn contains_explicit_ref_binding(body: &Body, pat_id: PatId) -> bool {
diff --git a/crates/hir-ty/src/infer/path.rs b/crates/hir-ty/src/infer/path.rs
index 73bcefaf2a..6254bc1239 100644
--- a/crates/hir-ty/src/infer/path.rs
+++ b/crates/hir-ty/src/infer/path.rs
@@ -7,7 +7,6 @@ use hir_def::{
AdtId, AssocItemId, GenericDefId, ItemContainerId, Lookup,
};
use hir_expand::name::Name;
-use intern::sym;
use stdx::never;
use crate::{
@@ -41,7 +40,7 @@ impl InferenceContext<'_> {
}
fn resolve_value_path(&mut self, path: &Path, id: ExprOrPatId) -> Option<ValuePathResolution> {
- let (value, self_subst) = self.resolve_value_path_inner(path, id)?;
+ let (value, self_subst) = self.resolve_value_path_inner(path, id, false)?;
let value_def: ValueTyDefId = match value {
ValueNs::FunctionId(it) => it.into(),
@@ -86,16 +85,22 @@ impl InferenceContext<'_> {
}
};
- let generic_def_id = value_def.to_generic_def_id(self.db);
- let Some(generic_def) = generic_def_id else {
- // `value_def` is the kind of item that can never be generic (i.e. statics, at least
- // currently). We can just skip the binders to get its type.
+ let generic_def = value_def.to_generic_def_id(self.db);
+ if let GenericDefId::StaticId(_) = generic_def {
+ // `Static` is the kind of item that can never be generic currently. We can just skip the binders to get its type.
let (ty, binders) = self.db.value_ty(value_def)?.into_value_and_skipped_binders();
stdx::always!(binders.is_empty(Interner), "non-empty binders for non-generic def",);
return Some(ValuePathResolution::NonGeneric(ty));
};
- let substs = self.with_body_ty_lowering(|ctx| ctx.substs_from_path(path, value_def, true));
+ let substs = self.with_body_ty_lowering(|ctx| {
+ let mut path_ctx = ctx.at_path(path, id);
+ let last_segment = path.segments().len().checked_sub(1);
+ if let Some(last_segment) = last_segment {
+ path_ctx.set_current_segment(last_segment)
+ }
+ path_ctx.substs_from_path(value_def, true)
+ });
let substs = substs.as_slice(Interner);
if let ValueNs::EnumVariantId(_) = value {
@@ -122,7 +127,7 @@ impl InferenceContext<'_> {
}
let parent_substs = self_subst.or_else(|| {
- let generics = generics(self.db.upcast(), generic_def_id?);
+ let generics = generics(self.db.upcast(), generic_def);
let parent_params_len = generics.parent_generics()?.len();
let parent_args = &substs[substs.len() - parent_params_len..];
Some(Substitution::from_iter(Interner, parent_args))
@@ -147,6 +152,7 @@ impl InferenceContext<'_> {
&mut self,
path: &Path,
id: ExprOrPatId,
+ no_diagnostics: bool,
) -> Option<(ValueNs, Option<chalk_ir::Substitution<Interner>>)> {
// Don't use `self.make_ty()` here as we need `orig_ns`.
let mut ctx = TyLoweringContext::new(
@@ -157,33 +163,83 @@ impl InferenceContext<'_> {
&self.diagnostics,
InferenceTyDiagnosticSource::Body,
);
+ let mut path_ctx = if no_diagnostics {
+ ctx.at_path_forget_diagnostics(path)
+ } else {
+ ctx.at_path(path, id)
+ };
let (value, self_subst) = if let Some(type_ref) = path.type_anchor() {
let last = path.segments().last()?;
- let (ty, orig_ns) = ctx.lower_ty_ext(type_ref);
+ let (ty, orig_ns) = path_ctx.ty_ctx().lower_ty_ext(type_ref);
let ty = self.table.insert_type_vars(ty);
let ty = self.table.normalize_associated_types_in(ty);
- let remaining_segments_for_ty = path.segments().take(path.segments().len() - 1);
- let (ty, _) = ctx.lower_ty_relative_path(ty, orig_ns, remaining_segments_for_ty);
- drop(ctx);
+ path_ctx.ignore_last_segment();
+ let (ty, _) = path_ctx.lower_ty_relative_path(ty, orig_ns);
+ drop_ctx(ctx, no_diagnostics);
let ty = self.table.insert_type_vars(ty);
let ty = self.table.normalize_associated_types_in(ty);
self.resolve_ty_assoc_item(ty, last.name, id).map(|(it, substs)| (it, Some(substs)))?
} else {
let hygiene = self.body.expr_or_pat_path_hygiene(id);
// FIXME: report error, unresolved first path segment
- let value_or_partial = ctx.resolve_path_in_value_ns(path, id, hygiene)?;
- drop(ctx);
+ let value_or_partial = path_ctx.resolve_path_in_value_ns(hygiene)?;
match value_or_partial {
- ResolveValueResult::ValueNs(it, _) => (it, None),
- ResolveValueResult::Partial(def, remaining_index, _) => self
- .resolve_assoc_item(id, def, path, remaining_index, id)
- .map(|(it, substs)| (it, Some(substs)))?,
+ ResolveValueResult::ValueNs(it, _) => {
+ drop_ctx(ctx, no_diagnostics);
+ (it, None)
+ }
+ ResolveValueResult::Partial(def, remaining_index, _) => {
+ // there may be more intermediate segments between the resolved one and
+ // the end. Only the last segment needs to be resolved to a value; from
+ // the segments before that, we need to get either a type or a trait ref.
+
+ let remaining_segments = path.segments().skip(remaining_index);
+ let is_before_last = remaining_segments.len() == 1;
+ let last_segment = remaining_segments
+ .last()
+ .expect("there should be at least one segment here");
+
+ let (resolution, substs) = match (def, is_before_last) {
+ (TypeNs::TraitId(trait_), true) => {
+ let self_ty = self.table.new_type_var();
+ let trait_ref =
+ path_ctx.lower_trait_ref_from_resolved_path(trait_, self_ty);
+ drop_ctx(ctx, no_diagnostics);
+ self.resolve_trait_assoc_item(trait_ref, last_segment, id)
+ }
+ (def, _) => {
+ // Either we already have a type (e.g. `Vec::new`), or we have a
+ // trait but it's not the last segment, so the next segment
+ // should resolve to an associated type of that trait (e.g. `<T
+ // as Iterator>::Item::default`)
+ path_ctx.ignore_last_segment();
+ let (ty, _) = path_ctx.lower_partly_resolved_path(def, true);
+ drop_ctx(ctx, no_diagnostics);
+ if ty.is_unknown() {
+ return None;
+ }
+
+ let ty = self.insert_type_vars(ty);
+ let ty = self.normalize_associated_types_in(ty);
+
+ self.resolve_ty_assoc_item(ty, last_segment.name, id)
+ }
+ }?;
+ (resolution, Some(substs))
+ }
}
};
- Some((value, self_subst))
+ return Some((value, self_subst));
+
+ #[inline]
+ fn drop_ctx(mut ctx: TyLoweringContext<'_>, no_diagnostics: bool) {
+ if no_diagnostics {
+ ctx.forget_diagnostics();
+ }
+ }
}
fn add_required_obligations_for_value_path(&mut self, def: GenericDefId, subst: &Substitution) {
@@ -213,89 +269,6 @@ impl InferenceContext<'_> {
}
}
- fn resolve_assoc_item(
- &mut self,
- node: ExprOrPatId,
- def: TypeNs,
- path: &Path,
- remaining_index: usize,
- id: ExprOrPatId,
- ) -> Option<(ValueNs, Substitution)> {
- // there may be more intermediate segments between the resolved one and
- // the end. Only the last segment needs to be resolved to a value; from
- // the segments before that, we need to get either a type or a trait ref.
-
- let _d;
- let (resolved_segment, remaining_segments) = match path {
- Path::Normal { .. } | Path::BarePath(_) => {
- assert!(remaining_index < path.segments().len());
- (
- path.segments().get(remaining_index - 1).unwrap(),
- path.segments().skip(remaining_index),
- )
- }
- Path::LangItem(..) => (
- PathSegment {
- name: {
- _d = Name::new_symbol_root(sym::Unknown.clone());
- &_d
- },
- args_and_bindings: None,
- },
- path.segments(),
- ),
- };
- let is_before_last = remaining_segments.len() == 1;
-
- match (def, is_before_last) {
- (TypeNs::TraitId(trait_), true) => {
- let segment =
- remaining_segments.last().expect("there should be at least one segment here");
- let self_ty = self.table.new_type_var();
- let trait_ref = self.with_body_ty_lowering(|ctx| {
- ctx.lower_trait_ref_from_resolved_path(trait_, resolved_segment, self_ty)
- });
- self.resolve_trait_assoc_item(trait_ref, segment, id)
- }
- (def, _) => {
- // Either we already have a type (e.g. `Vec::new`), or we have a
- // trait but it's not the last segment, so the next segment
- // should resolve to an associated type of that trait (e.g. `<T
- // as Iterator>::Item::default`)
- let remaining_segments_for_ty =
- remaining_segments.take(remaining_segments.len() - 1);
- let mut ctx = TyLoweringContext::new(
- self.db,
- &self.resolver,
- &self.body.types,
- self.owner.into(),
- &self.diagnostics,
- InferenceTyDiagnosticSource::Body,
- );
- let (ty, _) = ctx.lower_partly_resolved_path(
- node,
- def,
- resolved_segment,
- remaining_segments_for_ty,
- (remaining_index - 1) as u32,
- true,
- );
- drop(ctx);
- if ty.is_unknown() {
- return None;
- }
-
- let ty = self.insert_type_vars(ty);
- let ty = self.normalize_associated_types_in(ty);
-
- let segment =
- remaining_segments.last().expect("there should be at least one segment here");
-
- self.resolve_ty_assoc_item(ty, segment.name, id)
- }
- }
- }
-
fn resolve_trait_assoc_item(
&mut self,
trait_ref: TraitRef,
diff --git a/crates/hir-ty/src/layout.rs b/crates/hir-ty/src/layout.rs
index 108171586e..e2ab336d2e 100644
--- a/crates/hir-ty/src/layout.rs
+++ b/crates/hir-ty/src/layout.rs
@@ -14,6 +14,7 @@ use hir_def::{
};
use la_arena::{Idx, RawIdx};
use rustc_abi::AddressSpace;
+use rustc_hashes::Hash64;
use rustc_index::{IndexSlice, IndexVec};
use triomphe::Arc;
@@ -178,7 +179,7 @@ fn layout_of_simd_ty(
.size
.checked_mul(e_len, dl)
.ok_or(LayoutError::BadCalc(LayoutCalculatorError::SizeOverflow))?;
- let align = dl.vector_align(size);
+ let align = dl.llvmlike_vector_align(size);
let size = size.align_to(align.abi);
// Compute the placement of the vector fields:
@@ -193,11 +194,12 @@ fn layout_of_simd_ty(
fields,
backend_repr: BackendRepr::Vector { element: e_abi, count: e_len },
largest_niche: e_ly.largest_niche,
+ uninhabited: false,
size,
align,
max_repr_align: None,
unadjusted_abi_align: align.abi,
- randomization_seed: 0,
+ randomization_seed: Hash64::ZERO,
}))
}
@@ -296,25 +298,22 @@ pub fn layout_of_ty_query(
.checked_mul(count, dl)
.ok_or(LayoutError::BadCalc(LayoutCalculatorError::SizeOverflow))?;
- let backend_repr =
- if count != 0 && matches!(element.backend_repr, BackendRepr::Uninhabited) {
- BackendRepr::Uninhabited
- } else {
- BackendRepr::Memory { sized: true }
- };
+ let backend_repr = BackendRepr::Memory { sized: true };
let largest_niche = if count != 0 { element.largest_niche } else { None };
+ let uninhabited = if count != 0 { element.uninhabited } else { false };
Layout {
variants: Variants::Single { index: struct_variant_idx() },
fields: FieldsShape::Array { stride: element.size, count },
backend_repr,
largest_niche,
+ uninhabited,
align: element.align,
size,
max_repr_align: None,
unadjusted_abi_align: element.align.abi,
- randomization_seed: 0,
+ randomization_seed: Hash64::ZERO,
}
}
TyKind::Slice(element) => {
@@ -324,11 +323,12 @@ pub fn layout_of_ty_query(
fields: FieldsShape::Array { stride: element.size, count: 0 },
backend_repr: BackendRepr::Memory { sized: false },
largest_niche: None,
+ uninhabited: false,
align: element.align,
size: Size::ZERO,
max_repr_align: None,
unadjusted_abi_align: element.align.abi,
- randomization_seed: 0,
+ randomization_seed: Hash64::ZERO,
}
}
TyKind::Str => Layout {
@@ -336,11 +336,12 @@ pub fn layout_of_ty_query(
fields: FieldsShape::Array { stride: Size::from_bytes(1), count: 0 },
backend_repr: BackendRepr::Memory { sized: false },
largest_niche: None,
+ uninhabited: false,
align: dl.i8_align,
size: Size::ZERO,
max_repr_align: None,
unadjusted_abi_align: dl.i8_align.abi,
- randomization_seed: 0,
+ randomization_seed: Hash64::ZERO,
},
// Potentially-wide pointers.
TyKind::Ref(_, _, pointee) | TyKind::Raw(_, pointee) => {
diff --git a/crates/hir-ty/src/lib.rs b/crates/hir-ty/src/lib.rs
index 3c18ea9281..daddcf0b24 100644
--- a/crates/hir-ty/src/lib.rs
+++ b/crates/hir-ty/src/lib.rs
@@ -12,6 +12,9 @@ extern crate ra_ap_rustc_index as rustc_index;
#[cfg(feature = "in-rust-tree")]
extern crate rustc_abi;
+#[cfg(feature = "in-rust-tree")]
+extern crate rustc_hashes;
+
#[cfg(not(feature = "in-rust-tree"))]
extern crate ra_ap_rustc_abi as rustc_abi;
@@ -21,6 +24,9 @@ extern crate rustc_pattern_analysis;
#[cfg(not(feature = "in-rust-tree"))]
extern crate ra_ap_rustc_pattern_analysis as rustc_pattern_analysis;
+#[cfg(not(feature = "in-rust-tree"))]
+extern crate ra_ap_rustc_hashes as rustc_hashes;
+
mod builder;
mod chalk_db;
mod chalk_ext;
@@ -100,7 +106,9 @@ pub use mapping::{
};
pub use method_resolution::check_orphan_rules;
pub use traits::TraitEnvironment;
-pub use utils::{all_super_traits, direct_super_traits, is_fn_unsafe_to_call};
+pub use utils::{
+ all_super_traits, direct_super_traits, is_fn_unsafe_to_call, TargetFeatures, Unsafety,
+};
pub use variance::Variance;
pub use chalk_ir::{
@@ -1047,3 +1055,20 @@ pub fn known_const_to_ast(
}
Some(make::expr_const_value(konst.display(db, edition).to_string().as_str()))
}
+
+#[derive(Debug, Copy, Clone)]
+pub(crate) enum DeclOrigin {
+ LetExpr,
+ /// from `let x = ..`
+ LocalDecl {
+ has_else: bool,
+ },
+}
+
+/// Provides context for checking patterns in declarations. More specifically this
+/// allows us to infer array types if the pattern is irrefutable and allows us to infer
+/// the size of the array. See issue rust-lang/rust#76342.
+#[derive(Debug, Copy, Clone)]
+pub(crate) struct DeclContext {
+ pub(crate) origin: DeclOrigin,
+}
diff --git a/crates/hir-ty/src/lower.rs b/crates/hir-ty/src/lower.rs
index 432b8f4d94..af73b5ed9a 100644
--- a/crates/hir-ty/src/lower.rs
+++ b/crates/hir-ty/src/lower.rs
@@ -6,6 +6,7 @@
//!
//! This usually involves resolving names, collecting generic arguments etc.
pub(crate) mod diagnostics;
+pub(crate) mod path;
use std::{
cell::OnceCell,
@@ -23,7 +24,6 @@ use chalk_ir::{
use either::Either;
use hir_def::{
- body::HygieneId,
builtin_type::BuiltinType,
data::{adt::StructKind, TraitFlags},
expander::Expander,
@@ -33,22 +33,20 @@ use hir_def::{
},
lang_item::LangItem,
nameres::MacroSubNs,
- path::{GenericArg, GenericArgs, ModPath, Path, PathKind, PathSegment, PathSegments},
- resolver::{HasResolver, LifetimeNs, ResolveValueResult, Resolver, TypeNs, ValueNs},
+ path::{GenericArg, ModPath, Path, PathKind},
+ resolver::{HasResolver, LifetimeNs, Resolver, TypeNs},
type_ref::{
ConstRef, LifetimeRef, PathId, TraitBoundModifier, TraitRef as HirTraitRef, TypeBound,
TypeRef, TypeRefId, TypesMap, TypesSourceMap,
},
AdtId, AssocItemId, CallableDefId, ConstId, ConstParamId, DefWithBodyId, EnumId, EnumVariantId,
- FunctionId, GenericDefId, GenericParamId, HasModule, ImplId, InTypeConstLoc, ItemContainerId,
- LocalFieldId, Lookup, StaticId, StructId, TraitId, TypeAliasId, TypeOrConstParamId,
- TypeOwnerId, UnionId, VariantId,
+ FunctionId, GenericDefId, GenericParamId, HasModule, ImplId, InTypeConstLoc, LocalFieldId,
+ Lookup, StaticId, StructId, TypeAliasId, TypeOrConstParamId, TypeOwnerId, UnionId, VariantId,
};
use hir_expand::{name::Name, ExpandResult};
use la_arena::{Arena, ArenaMap};
use rustc_hash::FxHashSet;
use rustc_pattern_analysis::Captures;
-use smallvec::SmallVec;
use stdx::{impl_from, never};
use syntax::ast;
use triomphe::{Arc, ThinArc};
@@ -62,18 +60,19 @@ use crate::{
db::HirDatabase,
error_lifetime,
generics::{generics, trait_self_param_idx, Generics},
- lower::diagnostics::*,
+ lower::{
+ diagnostics::*,
+ path::{PathDiagnosticCallback, PathLoweringContext},
+ },
make_binders,
mapping::{from_chalk_trait_id, lt_to_placeholder_idx, ToChalk},
- static_lifetime, to_assoc_type_id, to_chalk_trait_id, to_placeholder_idx,
- utils::{
- all_super_trait_refs, associated_type_by_name_including_super_traits, InTypeConstIdMetadata,
- },
- AliasEq, AliasTy, Binders, BoundVar, CallableSig, Const, ConstScalar, DebruijnIndex, DynTy,
- FnAbi, FnPointer, FnSig, FnSubst, ImplTrait, ImplTraitId, ImplTraits, Interner, Lifetime,
- LifetimeData, LifetimeOutlives, ParamKind, PolyFnSig, ProgramClause, ProjectionTy,
- QuantifiedWhereClause, QuantifiedWhereClauses, Substitution, TraitEnvironment, TraitRef,
- TraitRefExt, Ty, TyBuilder, TyKind, WhereClause,
+ static_lifetime, to_chalk_trait_id, to_placeholder_idx,
+ utils::{all_super_trait_refs, InTypeConstIdMetadata},
+ AliasTy, Binders, BoundVar, CallableSig, Const, ConstScalar, DebruijnIndex, DynTy, FnAbi,
+ FnPointer, FnSig, FnSubst, ImplTrait, ImplTraitId, ImplTraits, Interner, Lifetime,
+ LifetimeData, LifetimeOutlives, ParamKind, PolyFnSig, ProgramClause, QuantifiedWhereClause,
+ QuantifiedWhereClauses, Substitution, TraitEnvironment, TraitRef, TraitRefExt, Ty, TyBuilder,
+ TyKind, WhereClause,
};
#[derive(Debug, Default)]
@@ -106,6 +105,8 @@ impl ImplTraitLoweringState {
}
}
+pub(crate) struct PathDiagnosticCallbackData(TypeRefId);
+
#[derive(Debug)]
pub struct TyLoweringContext<'a> {
pub db: &'a dyn HirDatabase,
@@ -527,9 +528,8 @@ impl<'a> TyLoweringContext<'a> {
if path.segments().len() > 1 {
return None;
}
- let resolution = match self
- .resolve_path_in_type_ns(path, &mut Self::on_path_diagnostic_callback(type_ref_id))
- {
+ let mut ctx = self.at_path(PathId::from_type_ref_unchecked(type_ref_id));
+ let resolution = match ctx.resolve_path_in_type_ns() {
Some((it, None)) => it,
_ => return None,
};
@@ -539,409 +539,36 @@ impl<'a> TyLoweringContext<'a> {
}
}
- pub(crate) fn lower_ty_relative_path(
- &mut self,
- ty: Ty,
- // We need the original resolution to lower `Self::AssocTy` correctly
- res: Option<TypeNs>,
- remaining_segments: PathSegments<'_>,
- ) -> (Ty, Option<TypeNs>) {
- match remaining_segments.len() {
- 0 => (ty, res),
- 1 => {
- // resolve unselected assoc types
- let segment = remaining_segments.first().unwrap();
- (self.select_associated_type(res, segment), None)
- }
- _ => {
- // FIXME report error (ambiguous associated type)
- (TyKind::Error.intern(Interner), None)
- }
- }
- }
-
- pub(crate) fn lower_partly_resolved_path(
- &mut self,
- resolution: TypeNs,
- resolved_segment: PathSegment<'_>,
- remaining_segments: PathSegments<'_>,
- _resolved_segment_idx: u32,
- infer_args: bool,
- _on_diagnostic: &mut dyn FnMut(&mut Self, PathLoweringDiagnostic),
- ) -> (Ty, Option<TypeNs>) {
- let ty = match resolution {
- TypeNs::TraitId(trait_) => {
- let ty = match remaining_segments.len() {
- 1 => {
- let trait_ref = self.lower_trait_ref_from_resolved_path(
- trait_,
- resolved_segment,
- TyKind::Error.intern(Interner),
- );
- let segment = remaining_segments.first().unwrap();
- let found = self
- .db
- .trait_data(trait_ref.hir_trait_id())
- .associated_type_by_name(segment.name);
-
- match found {
- Some(associated_ty) => {
- // FIXME: `substs_from_path_segment()` pushes `TyKind::Error` for every parent
- // generic params. It's inefficient to splice the `Substitution`s, so we may want
- // that method to optionally take parent `Substitution` as we already know them at
- // this point (`trait_ref.substitution`).
- let substitution = self.substs_from_path_segment(
- segment,
- Some(associated_ty.into()),
- false,
- None,
- );
- let len_self =
- generics(self.db.upcast(), associated_ty.into()).len_self();
- let substitution = Substitution::from_iter(
- Interner,
- substitution
- .iter(Interner)
- .take(len_self)
- .chain(trait_ref.substitution.iter(Interner)),
- );
- TyKind::Alias(AliasTy::Projection(ProjectionTy {
- associated_ty_id: to_assoc_type_id(associated_ty),
- substitution,
- }))
- .intern(Interner)
- }
- None => {
- // FIXME: report error (associated type not found)
- TyKind::Error.intern(Interner)
- }
- }
- }
- 0 => {
- // Trait object type without dyn; this should be handled in upstream. See
- // `lower_path()`.
- stdx::never!("unexpected fully resolved trait path");
- TyKind::Error.intern(Interner)
- }
- _ => {
- // FIXME report error (ambiguous associated type)
- TyKind::Error.intern(Interner)
- }
- };
- return (ty, None);
- }
- TypeNs::TraitAliasId(_) => {
- // FIXME(trait_alias): Implement trait alias.
- return (TyKind::Error.intern(Interner), None);
- }
- TypeNs::GenericParam(param_id) => match self.type_param_mode {
- ParamLoweringMode::Placeholder => {
- TyKind::Placeholder(to_placeholder_idx(self.db, param_id.into()))
- }
- ParamLoweringMode::Variable => {
- let idx = match self
- .generics()
- .expect("generics in scope")
- .type_or_const_param_idx(param_id.into())
- {
- None => {
- never!("no matching generics");
- return (TyKind::Error.intern(Interner), None);
- }
- Some(idx) => idx,
- };
-
- TyKind::BoundVar(BoundVar::new(self.in_binders, idx))
- }
- }
- .intern(Interner),
- TypeNs::SelfType(impl_id) => {
- let generics = self.generics().expect("impl should have generic param scope");
-
- match self.type_param_mode {
- ParamLoweringMode::Placeholder => {
- // `def` can be either impl itself or item within, and we need impl itself
- // now.
- let generics = generics.parent_or_self();
- let subst = generics.placeholder_subst(self.db);
- self.db.impl_self_ty(impl_id).substitute(Interner, &subst)
- }
- ParamLoweringMode::Variable => {
- let starting_from = match generics.def() {
- GenericDefId::ImplId(_) => 0,
- // `def` is an item within impl. We need to substitute `BoundVar`s but
- // remember that they are for parent (i.e. impl) generic params so they
- // come after our own params.
- _ => generics.len_self(),
- };
- TyBuilder::impl_self_ty(self.db, impl_id)
- .fill_with_bound_vars(self.in_binders, starting_from)
- .build()
- }
- }
- }
- TypeNs::AdtSelfType(adt) => {
- let generics = generics(self.db.upcast(), adt.into());
- let substs = match self.type_param_mode {
- ParamLoweringMode::Placeholder => generics.placeholder_subst(self.db),
- ParamLoweringMode::Variable => {
- generics.bound_vars_subst(self.db, self.in_binders)
- }
- };
- self.db.ty(adt.into()).substitute(Interner, &substs)
- }
-
- TypeNs::AdtId(it) => self.lower_path_inner(resolved_segment, it.into(), infer_args),
- TypeNs::BuiltinType(it) => {
- self.lower_path_inner(resolved_segment, it.into(), infer_args)
- }
- TypeNs::TypeAliasId(it) => {
- self.lower_path_inner(resolved_segment, it.into(), infer_args)
- }
- // FIXME: report error
- TypeNs::EnumVariantId(_) => return (TyKind::Error.intern(Interner), None),
- };
- self.lower_ty_relative_path(ty, Some(resolution), remaining_segments)
- }
-
- fn handle_type_ns_resolution(
- &mut self,
- resolution: &TypeNs,
- resolved_segment: PathSegment<'_>,
- resolved_segment_idx: usize,
- on_diagnostic: &mut dyn FnMut(&mut Self, PathLoweringDiagnostic),
- ) {
- let mut prohibit_generics_on_resolved = |reason| {
- if resolved_segment.args_and_bindings.is_some() {
- on_diagnostic(
- self,
- PathLoweringDiagnostic::GenericArgsProhibited {
- segment: resolved_segment_idx as u32,
- reason,
- },
- );
- }
- };
-
- match resolution {
- TypeNs::SelfType(_) => {
- prohibit_generics_on_resolved(GenericArgsProhibitedReason::SelfTy)
- }
- TypeNs::GenericParam(_) => {
- prohibit_generics_on_resolved(GenericArgsProhibitedReason::TyParam)
- }
- TypeNs::AdtSelfType(_) => {
- prohibit_generics_on_resolved(GenericArgsProhibitedReason::SelfTy)
- }
- TypeNs::BuiltinType(_) => {
- prohibit_generics_on_resolved(GenericArgsProhibitedReason::PrimitiveTy)
- }
- TypeNs::AdtId(_)
- | TypeNs::EnumVariantId(_)
- | TypeNs::TypeAliasId(_)
- | TypeNs::TraitId(_)
- | TypeNs::TraitAliasId(_) => {}
- }
- }
-
- pub(crate) fn resolve_path_in_type_ns_fully(
- &mut self,
- path: &Path,
- on_diagnostic: &mut dyn FnMut(&mut Self, PathLoweringDiagnostic),
- ) -> Option<TypeNs> {
- let (res, unresolved) = self.resolve_path_in_type_ns(path, on_diagnostic)?;
- if unresolved.is_some() {
- return None;
- }
- Some(res)
- }
-
- pub(crate) fn resolve_path_in_type_ns(
- &mut self,
- path: &Path,
- on_diagnostic: &mut dyn FnMut(&mut Self, PathLoweringDiagnostic),
- ) -> Option<(TypeNs, Option<usize>)> {
- let (resolution, remaining_index, _, prefix_info) =
- self.resolver.resolve_path_in_type_ns_with_prefix_info(self.db.upcast(), path)?;
- let segments = path.segments();
-
- match path {
- // `segments.is_empty()` can occur with `self`.
- Path::Normal(..) if !segments.is_empty() => (),
- _ => return Some((resolution, remaining_index)),
- };
-
- let (module_segments, resolved_segment_idx, enum_segment) = match remaining_index {
- None if prefix_info.enum_variant => {
- (segments.strip_last_two(), segments.len() - 1, Some(segments.len() - 2))
- }
- None => (segments.strip_last(), segments.len() - 1, None),
- Some(i) => (segments.take(i - 1), i - 1, None),
- };
-
- for (i, mod_segment) in module_segments.iter().enumerate() {
- if mod_segment.args_and_bindings.is_some() {
- on_diagnostic(
- self,
- PathLoweringDiagnostic::GenericArgsProhibited {
- segment: i as u32,
- reason: GenericArgsProhibitedReason::Module,
- },
- );
- }
- }
-
- if let Some(enum_segment) = enum_segment {
- if segments.get(enum_segment).is_some_and(|it| it.args_and_bindings.is_some())
- && segments.get(enum_segment + 1).is_some_and(|it| it.args_and_bindings.is_some())
- {
- on_diagnostic(
- self,
- PathLoweringDiagnostic::GenericArgsProhibited {
- segment: (enum_segment + 1) as u32,
- reason: GenericArgsProhibitedReason::EnumVariant,
- },
- );
- }
- }
-
- self.handle_type_ns_resolution(
- &resolution,
- segments.get(resolved_segment_idx).expect("should have resolved segment"),
- resolved_segment_idx,
- on_diagnostic,
- );
-
- Some((resolution, remaining_index))
- }
-
- pub(crate) fn resolve_path_in_value_ns(
- &mut self,
- path: &Path,
- hygiene_id: HygieneId,
- on_diagnostic: &mut dyn FnMut(&mut Self, PathLoweringDiagnostic),
- ) -> Option<ResolveValueResult> {
- let (res, prefix_info) = self.resolver.resolve_path_in_value_ns_with_prefix_info(
- self.db.upcast(),
- path,
- hygiene_id,
- )?;
-
- let segments = path.segments();
- match path {
- // `segments.is_empty()` can occur with `self`.
- Path::Normal(..) if !segments.is_empty() => (),
- _ => return Some(res),
- };
-
- let (mod_segments, enum_segment) = match res {
- ResolveValueResult::Partial(_, unresolved_segment, _) => {
- (segments.take(unresolved_segment - 1), None)
- }
- ResolveValueResult::ValueNs(ValueNs::EnumVariantId(_), _)
- if prefix_info.enum_variant =>
- {
- (segments.strip_last_two(), segments.len().checked_sub(2))
- }
- ResolveValueResult::ValueNs(..) => (segments.strip_last(), None),
- };
- for (i, mod_segment) in mod_segments.iter().enumerate() {
- if mod_segment.args_and_bindings.is_some() {
- on_diagnostic(
- self,
- PathLoweringDiagnostic::GenericArgsProhibited {
- segment: i as u32,
- reason: GenericArgsProhibitedReason::Module,
- },
- );
- }
- }
-
- if let Some(enum_segment) = enum_segment {
- if segments.get(enum_segment).is_some_and(|it| it.args_and_bindings.is_some())
- && segments.get(enum_segment + 1).is_some_and(|it| it.args_and_bindings.is_some())
- {
- on_diagnostic(
- self,
- PathLoweringDiagnostic::GenericArgsProhibited {
- segment: (enum_segment + 1) as u32,
- reason: GenericArgsProhibitedReason::EnumVariant,
- },
- );
- }
+ #[inline]
+ fn on_path_diagnostic_callback(type_ref: TypeRefId) -> PathDiagnosticCallback<'static> {
+ PathDiagnosticCallback {
+ data: Either::Left(PathDiagnosticCallbackData(type_ref)),
+ callback: |data, this, diag| {
+ let type_ref = data.as_ref().left().unwrap().0;
+ this.push_diagnostic(type_ref, TyLoweringDiagnosticKind::PathDiagnostic(diag))
+ },
}
-
- match &res {
- ResolveValueResult::ValueNs(resolution, _) => {
- let resolved_segment_idx =
- segments.len().checked_sub(1).unwrap_or_else(|| panic!("{path:?}"));
- let resolved_segment = segments.last().unwrap();
-
- let mut prohibit_generics_on_resolved = |reason| {
- if resolved_segment.args_and_bindings.is_some() {
- on_diagnostic(
- self,
- PathLoweringDiagnostic::GenericArgsProhibited {
- segment: resolved_segment_idx as u32,
- reason,
- },
- );
- }
- };
-
- match resolution {
- ValueNs::ImplSelf(_) => {
- prohibit_generics_on_resolved(GenericArgsProhibitedReason::SelfTy)
- }
- // FIXME: rustc generates E0107 (incorrect number of generic arguments) and not
- // E0109 (generic arguments provided for a type that doesn't accept them) for
- // consts and statics, presumably as a defense against future in which consts
- // and statics can be generic, or just because it was easier for rustc implementors.
- // That means we'll show the wrong error code. Because of us it's easier to do it
- // this way :)
- ValueNs::GenericParam(_) | ValueNs::ConstId(_) => {
- prohibit_generics_on_resolved(GenericArgsProhibitedReason::Const)
- }
- ValueNs::StaticId(_) => {
- prohibit_generics_on_resolved(GenericArgsProhibitedReason::Static)
- }
- ValueNs::FunctionId(_) | ValueNs::StructId(_) | ValueNs::EnumVariantId(_) => {}
- ValueNs::LocalBinding(_) => {}
- }
- }
- ResolveValueResult::Partial(resolution, unresolved_idx, _) => {
- let resolved_segment_idx = unresolved_idx - 1;
- let resolved_segment = segments.get(resolved_segment_idx).unwrap();
- self.handle_type_ns_resolution(
- resolution,
- resolved_segment,
- resolved_segment_idx,
- on_diagnostic,
- );
- }
- };
- Some(res)
}
- fn on_path_diagnostic_callback(
- type_ref: TypeRefId,
- ) -> impl FnMut(&mut Self, PathLoweringDiagnostic) {
- move |this, diag| {
- this.push_diagnostic(type_ref, TyLoweringDiagnosticKind::PathDiagnostic(diag))
- }
+ #[inline]
+ fn at_path(&mut self, path_id: PathId) -> PathLoweringContext<'_, 'a> {
+ PathLoweringContext::new(
+ self,
+ Self::on_path_diagnostic_callback(path_id.type_ref()),
+ &self.types_map[path_id],
+ )
}
pub(crate) fn lower_path(&mut self, path: &Path, path_id: PathId) -> (Ty, Option<TypeNs>) {
// Resolve the path (in type namespace)
if let Some(type_ref) = path.type_anchor() {
let (ty, res) = self.lower_ty_ext(type_ref);
- return self.lower_ty_relative_path(ty, res, path.segments());
+ let mut ctx = self.at_path(path_id);
+ return ctx.lower_ty_relative_path(ty, res);
}
- let (resolution, remaining_index) = match self.resolve_path_in_type_ns(
- path,
- &mut Self::on_path_diagnostic_callback(path_id.type_ref()),
- ) {
+ let mut ctx = self.at_path(path_id);
+ let (resolution, remaining_index) = match ctx.resolve_path_in_type_ns() {
Some(it) => it,
None => return (TyKind::Error.intern(Interner), None),
};
@@ -953,354 +580,21 @@ impl<'a> TyLoweringContext<'a> {
return (ty, None);
}
- let (resolved_segment_idx, resolved_segment, remaining_segments) = match remaining_index {
- None => (
- path.segments().len() - 1,
- path.segments().last().expect("resolved path has at least one element"),
- PathSegments::EMPTY,
- ),
- Some(i) => (i - 1, path.segments().get(i - 1).unwrap(), path.segments().skip(i)),
- };
-
- self.lower_partly_resolved_path(
- resolution,
- resolved_segment,
- remaining_segments,
- resolved_segment_idx as u32,
- false,
- &mut Self::on_path_diagnostic_callback(path_id.type_ref()),
- )
- }
-
- fn select_associated_type(&mut self, res: Option<TypeNs>, segment: PathSegment<'_>) -> Ty {
- let Some((generics, res)) = self.generics().zip(res) else {
- return TyKind::Error.intern(Interner);
- };
- let ty = named_associated_type_shorthand_candidates(
- self.db,
- generics.def(),
- res,
- Some(segment.name.clone()),
- move |name, t, associated_ty| {
- let generics = self.generics().unwrap();
-
- if name != segment.name {
- return None;
- }
-
- let parent_subst = t.substitution.clone();
- let parent_subst = match self.type_param_mode {
- ParamLoweringMode::Placeholder => {
- // if we're lowering to placeholders, we have to put them in now.
- let s = generics.placeholder_subst(self.db);
- s.apply(parent_subst, Interner)
- }
- ParamLoweringMode::Variable => {
- // We need to shift in the bound vars, since
- // `named_associated_type_shorthand_candidates` does not do that.
- parent_subst.shifted_in_from(Interner, self.in_binders)
- }
- };
-
- // FIXME: `substs_from_path_segment()` pushes `TyKind::Error` for every parent
- // generic params. It's inefficient to splice the `Substitution`s, so we may want
- // that method to optionally take parent `Substitution` as we already know them at
- // this point (`t.substitution`).
- let substs =
- self.substs_from_path_segment(segment, Some(associated_ty.into()), false, None);
-
- let len_self =
- crate::generics::generics(self.db.upcast(), associated_ty.into()).len_self();
-
- let substs = Substitution::from_iter(
- Interner,
- substs.iter(Interner).take(len_self).chain(parent_subst.iter(Interner)),
- );
-
- Some(
- TyKind::Alias(AliasTy::Projection(ProjectionTy {
- associated_ty_id: to_assoc_type_id(associated_ty),
- substitution: substs,
- }))
- .intern(Interner),
- )
- },
- );
-
- ty.unwrap_or_else(|| TyKind::Error.intern(Interner))
- }
-
- fn lower_path_inner(
- &mut self,
- segment: PathSegment<'_>,
- typeable: TyDefId,
- infer_args: bool,
- ) -> Ty {
- let generic_def = match typeable {
- TyDefId::BuiltinType(_) => None,
- TyDefId::AdtId(it) => Some(it.into()),
- TyDefId::TypeAliasId(it) => Some(it.into()),
- };
- let substs = self.substs_from_path_segment(segment, generic_def, infer_args, None);
- self.db.ty(typeable).substitute(Interner, &substs)
- }
-
- /// Collect generic arguments from a path into a `Substs`. See also
- /// `create_substs_for_ast_path` and `def_to_ty` in rustc.
- pub(super) fn substs_from_path(
- &mut self,
- path: &Path,
- // Note that we don't call `db.value_type(resolved)` here,
- // `ValueTyDefId` is just a convenient way to pass generics and
- // special-case enum variants
- resolved: ValueTyDefId,
- infer_args: bool,
- ) -> Substitution {
- let last = path.segments().last();
- let (segment, generic_def) = match resolved {
- ValueTyDefId::FunctionId(it) => (last, Some(it.into())),
- ValueTyDefId::StructId(it) => (last, Some(it.into())),
- ValueTyDefId::UnionId(it) => (last, Some(it.into())),
- ValueTyDefId::ConstId(it) => (last, Some(it.into())),
- ValueTyDefId::StaticId(_) => (last, None),
- ValueTyDefId::EnumVariantId(var) => {
- // the generic args for an enum variant may be either specified
- // on the segment referring to the enum, or on the segment
- // referring to the variant. So `Option::<T>::None` and
- // `Option::None::<T>` are both allowed (though the former is
- // preferred). See also `def_ids_for_path_segments` in rustc.
- let len = path.segments().len();
- let penultimate = len.checked_sub(2).and_then(|idx| path.segments().get(idx));
- let segment = match penultimate {
- Some(segment) if segment.args_and_bindings.is_some() => Some(segment),
- _ => last,
- };
- (segment, Some(var.lookup(self.db.upcast()).parent.into()))
- }
- };
- if let Some(segment) = segment {
- self.substs_from_path_segment(segment, generic_def, infer_args, None)
- } else if let Some(generic_def) = generic_def {
- // lang item
- self.substs_from_args_and_bindings(None, Some(generic_def), infer_args, None)
- } else {
- Substitution::empty(Interner)
- }
- }
-
- pub(super) fn substs_from_path_segment(
- &mut self,
- segment: PathSegment<'_>,
- def: Option<GenericDefId>,
- infer_args: bool,
- explicit_self_ty: Option<Ty>,
- ) -> Substitution {
- self.substs_from_args_and_bindings(
- segment.args_and_bindings,
- def,
- infer_args,
- explicit_self_ty,
- )
- }
-
- fn substs_from_args_and_bindings(
- &mut self,
- args_and_bindings: Option<&GenericArgs>,
- def: Option<GenericDefId>,
- infer_args: bool,
- explicit_self_ty: Option<Ty>,
- ) -> Substitution {
- let Some(def) = def else { return Substitution::empty(Interner) };
-
- // Order is
- // - Optional Self parameter
- // - Lifetime parameters
- // - Type or Const parameters
- // - Parent parameters
- let def_generics = generics(self.db.upcast(), def);
- let (
- parent_params,
- self_param,
- type_params,
- const_params,
- impl_trait_params,
- lifetime_params,
- ) = def_generics.provenance_split();
- let item_len =
- self_param as usize + type_params + const_params + impl_trait_params + lifetime_params;
- let total_len = parent_params + item_len;
-
- let mut substs = Vec::new();
-
- // we need to iterate the lifetime and type/const params separately as our order of them
- // differs from the supplied syntax
-
- let ty_error = || TyKind::Error.intern(Interner).cast(Interner);
- let mut def_toc_iter = def_generics.iter_self_type_or_consts_id();
- let fill_self_param = || {
- if self_param {
- let self_ty = explicit_self_ty.map(|x| x.cast(Interner)).unwrap_or_else(ty_error);
-
- if let Some(id) = def_toc_iter.next() {
- assert!(matches!(id, GenericParamId::TypeParamId(_)));
- substs.push(self_ty);
- }
- }
- };
- let mut had_explicit_args = false;
-
- if let Some(&GenericArgs { ref args, has_self_type, .. }) = args_and_bindings {
- // Fill in the self param first
- if has_self_type && self_param {
- had_explicit_args = true;
- if let Some(id) = def_toc_iter.next() {
- assert!(matches!(id, GenericParamId::TypeParamId(_)));
- had_explicit_args = true;
- if let GenericArg::Type(ty) = &args[0] {
- substs.push(self.lower_ty(*ty).cast(Interner));
- }
- }
- } else {
- fill_self_param()
- };
-
- // Then fill in the supplied lifetime args, or error lifetimes if there are too few
- // (default lifetimes aren't a thing)
- for arg in args
- .iter()
- .filter_map(|arg| match arg {
- GenericArg::Lifetime(arg) => Some(self.lower_lifetime(arg)),
- _ => None,
- })
- .chain(iter::repeat(error_lifetime()))
- .take(lifetime_params)
- {
- substs.push(arg.cast(Interner));
- }
-
- let skip = if has_self_type { 1 } else { 0 };
- // Fill in supplied type and const args
- // Note if non-lifetime args are provided, it should be all of them, but we can't rely on that
- for (arg, id) in args
- .iter()
- .filter(|arg| !matches!(arg, GenericArg::Lifetime(_)))
- .skip(skip)
- .take(type_params + const_params)
- .zip(def_toc_iter)
- {
- had_explicit_args = true;
- let arg = generic_arg_to_chalk(
- self.db,
- id,
- arg,
- self,
- self.types_map,
- |this, type_ref| this.lower_ty(type_ref),
- |this, const_ref, ty| this.lower_const(const_ref, ty),
- |this, lifetime_ref| this.lower_lifetime(lifetime_ref),
- );
- substs.push(arg);
- }
- } else {
- fill_self_param();
- }
-
- let param_to_err = |id| match id {
- GenericParamId::ConstParamId(x) => unknown_const_as_generic(self.db.const_param_ty(x)),
- GenericParamId::TypeParamId(_) => ty_error(),
- GenericParamId::LifetimeParamId(_) => error_lifetime().cast(Interner),
- };
- // handle defaults. In expression or pattern path segments without
- // explicitly specified type arguments, missing type arguments are inferred
- // (i.e. defaults aren't used).
- // Generic parameters for associated types are not supposed to have defaults, so we just
- // ignore them.
- let is_assoc_ty = || match def {
- GenericDefId::TypeAliasId(id) => {
- matches!(id.lookup(self.db.upcast()).container, ItemContainerId::TraitId(_))
- }
- _ => false,
- };
- let fill_defaults = (!infer_args || had_explicit_args) && !is_assoc_ty();
- if fill_defaults {
- let defaults = &*self.db.generic_defaults(def);
- let (item, _parent) = defaults.split_at(item_len);
- let parent_from = item_len - substs.len();
-
- let mut rem =
- def_generics.iter_id().skip(substs.len()).map(param_to_err).collect::<Vec<_>>();
- // Fill in defaults for type/const params
- for (idx, default_ty) in item[substs.len()..].iter().enumerate() {
- // each default can depend on the previous parameters
- let substs_so_far = Substitution::from_iter(
- Interner,
- substs.iter().cloned().chain(rem[idx..].iter().cloned()),
- );
- substs.push(default_ty.clone().substitute(Interner, &substs_so_far));
- }
- // Fill in remaining parent params
- substs.extend(rem.drain(parent_from..));
- } else {
- // Fill in remaining def params and parent params
- substs.extend(def_generics.iter_id().skip(substs.len()).map(param_to_err));
- }
-
- assert_eq!(substs.len(), total_len, "expected {} substs, got {}", total_len, substs.len());
- Substitution::from_iter(Interner, substs)
- }
-
- pub(crate) fn lower_trait_ref_from_resolved_path(
- &mut self,
- resolved: TraitId,
- segment: PathSegment<'_>,
- explicit_self_ty: Ty,
- ) -> TraitRef {
- let substs = self.trait_ref_substs_from_path(segment, resolved, explicit_self_ty);
- TraitRef { trait_id: to_chalk_trait_id(resolved), substitution: substs }
- }
-
- fn prohibit_generics(
- &mut self,
- path_id: PathId,
- idx: u32,
- segments: PathSegments<'_>,
- reason: GenericArgsProhibitedReason,
- ) {
- segments.iter().zip(idx..).for_each(|(segment, idx)| {
- if segment.args_and_bindings.is_some() {
- self.push_diagnostic(
- path_id.type_ref(),
- TyLoweringDiagnosticKind::PathDiagnostic(
- PathLoweringDiagnostic::GenericArgsProhibited { segment: idx, reason },
- ),
- );
- }
- });
+ ctx.lower_partly_resolved_path(resolution, false)
}
fn lower_trait_ref_from_path(
&mut self,
path_id: PathId,
explicit_self_ty: Ty,
- ) -> Option<TraitRef> {
- let path = &self.types_map[path_id];
- let resolved = match self.resolve_path_in_type_ns_fully(
- path,
- &mut Self::on_path_diagnostic_callback(path_id.type_ref()),
- )? {
+ ) -> Option<(TraitRef, PathLoweringContext<'_, 'a>)> {
+ let mut ctx = self.at_path(path_id);
+ let resolved = match ctx.resolve_path_in_type_ns_fully()? {
// FIXME(trait_alias): We need to handle trait alias here.
TypeNs::TraitId(tr) => tr,
_ => return None,
};
- // Do this after we verify it's indeed a trait to not confuse the user if they're not modules.
- self.prohibit_generics(
- path_id,
- 0,
- path.segments().strip_last(),
- GenericArgsProhibitedReason::Module,
- );
- let segment = path.segments().last().expect("path should have at least one segment");
- Some(self.lower_trait_ref_from_resolved_path(resolved, segment, explicit_self_ty))
+ Some((ctx.lower_trait_ref_from_resolved_path(resolved, explicit_self_ty), ctx))
}
fn lower_trait_ref(
@@ -1308,16 +602,7 @@ impl<'a> TyLoweringContext<'a> {
trait_ref: &HirTraitRef,
explicit_self_ty: Ty,
) -> Option<TraitRef> {
- self.lower_trait_ref_from_path(trait_ref.path, explicit_self_ty)
- }
-
- fn trait_ref_substs_from_path(
- &mut self,
- segment: PathSegment<'_>,
- resolved: TraitId,
- explicit_self_ty: Ty,
- ) -> Substitution {
- self.substs_from_path_segment(segment, Some(resolved.into()), false, Some(explicit_self_ty))
+ self.lower_trait_ref_from_path(trait_ref.path, explicit_self_ty).map(|it| it.0)
}
pub(crate) fn lower_where_predicate<'b>(
@@ -1365,11 +650,18 @@ impl<'a> TyLoweringContext<'a> {
self_ty: Ty,
ignore_bindings: bool,
) -> impl Iterator<Item = QuantifiedWhereClause> + use<'b, 'a> {
- let mut trait_ref = None;
- let clause = match bound {
- &TypeBound::Path(path, TraitBoundModifier::None) => {
- trait_ref = self.lower_trait_ref_from_path(path, self_ty);
- trait_ref.clone().map(WhereClause::Implemented).map(crate::wrap_empty_binders)
+ let mut assoc_bounds = None;
+ let mut clause = None;
+ match bound {
+ &TypeBound::Path(path, TraitBoundModifier::None) | &TypeBound::ForLifetime(_, path) => {
+ // FIXME Don't silently drop the hrtb lifetimes here
+ if let Some((trait_ref, ctx)) = self.lower_trait_ref_from_path(path, self_ty) {
+ if !ignore_bindings {
+ assoc_bounds =
+ ctx.assoc_type_bindings_from_type_bound(bound, trait_ref.clone());
+ }
+ clause = Some(crate::wrap_empty_binders(WhereClause::Implemented(trait_ref)));
+ }
}
&TypeBound::Path(path, TraitBoundModifier::Maybe) => {
let sized_trait = self
@@ -1381,170 +673,21 @@ impl<'a> TyLoweringContext<'a> {
// If we got another trait here ignore the bound completely.
let trait_id = self
.lower_trait_ref_from_path(path, self_ty.clone())
- .map(|trait_ref| trait_ref.hir_trait_id());
+ .map(|(trait_ref, _)| trait_ref.hir_trait_id());
if trait_id == sized_trait {
self.unsized_types.insert(self_ty);
}
- None
- }
- &TypeBound::ForLifetime(_, path) => {
- // FIXME Don't silently drop the hrtb lifetimes here
- trait_ref = self.lower_trait_ref_from_path(path, self_ty);
- trait_ref.clone().map(WhereClause::Implemented).map(crate::wrap_empty_binders)
}
TypeBound::Lifetime(l) => {
let lifetime = self.lower_lifetime(l);
- Some(crate::wrap_empty_binders(WhereClause::TypeOutlives(TypeOutlives {
+ clause = Some(crate::wrap_empty_binders(WhereClause::TypeOutlives(TypeOutlives {
ty: self_ty,
lifetime,
- })))
+ })));
}
- TypeBound::Use(_) | TypeBound::Error => None,
- };
- clause.into_iter().chain(
- trait_ref
- .filter(move |_| !ignore_bindings)
- .map(move |tr| self.assoc_type_bindings_from_type_bound(bound, tr))
- .into_iter()
- .flatten(),
- )
- }
-
- fn assoc_type_bindings_from_type_bound<'b>(
- &'b mut self,
- bound: &'b TypeBound,
- trait_ref: TraitRef,
- ) -> impl Iterator<Item = QuantifiedWhereClause> + use<'b, 'a> {
- let last_segment = match bound {
- &TypeBound::Path(path, TraitBoundModifier::None) | &TypeBound::ForLifetime(_, path) => {
- self.types_map[path].segments().last()
- }
- TypeBound::Path(_, TraitBoundModifier::Maybe)
- | TypeBound::Use(_)
- | TypeBound::Error
- | TypeBound::Lifetime(_) => None,
- };
- last_segment
- .into_iter()
- .filter_map(|segment| segment.args_and_bindings)
- .flat_map(|args_and_bindings| args_and_bindings.bindings.iter())
- .flat_map(move |binding| {
- let found = associated_type_by_name_including_super_traits(
- self.db,
- trait_ref.clone(),
- &binding.name,
- );
- let (super_trait_ref, associated_ty) = match found {
- None => return SmallVec::new(),
- Some(t) => t,
- };
- // FIXME: `substs_from_path_segment()` pushes `TyKind::Error` for every parent
- // generic params. It's inefficient to splice the `Substitution`s, so we may want
- // that method to optionally take parent `Substitution` as we already know them at
- // this point (`super_trait_ref.substitution`).
- let substitution = self.substs_from_path_segment(
- // FIXME: This is hack. We shouldn't really build `PathSegment` directly.
- PathSegment { name: &binding.name, args_and_bindings: binding.args.as_ref() },
- Some(associated_ty.into()),
- false, // this is not relevant
- Some(super_trait_ref.self_type_parameter(Interner)),
- );
- let self_params = generics(self.db.upcast(), associated_ty.into()).len_self();
- let substitution = Substitution::from_iter(
- Interner,
- substitution
- .iter(Interner)
- .take(self_params)
- .chain(super_trait_ref.substitution.iter(Interner)),
- );
- let projection_ty = ProjectionTy {
- associated_ty_id: to_assoc_type_id(associated_ty),
- substitution,
- };
- let mut predicates: SmallVec<[_; 1]> = SmallVec::with_capacity(
- binding.type_ref.as_ref().map_or(0, |_| 1) + binding.bounds.len(),
- );
- if let Some(type_ref) = binding.type_ref {
- match (&self.types_map[type_ref], self.impl_trait_mode.mode) {
- (TypeRef::ImplTrait(_), ImplTraitLoweringMode::Disallowed) => (),
- (_, ImplTraitLoweringMode::Disallowed | ImplTraitLoweringMode::Opaque) => {
- let ty = self.lower_ty(type_ref);
- let alias_eq =
- AliasEq { alias: AliasTy::Projection(projection_ty.clone()), ty };
- predicates
- .push(crate::wrap_empty_binders(WhereClause::AliasEq(alias_eq)));
- }
- (_, ImplTraitLoweringMode::Param | ImplTraitLoweringMode::Variable) => {
- // Find the generic index for the target of our `bound`
- let target_param_idx = self
- .resolver
- .where_predicates_in_scope()
- .find_map(|(p, _)| match p {
- WherePredicate::TypeBound {
- target: WherePredicateTypeTarget::TypeOrConstParam(idx),
- bound: b,
- } if b == bound => Some(idx),
- _ => None,
- });
- let ty = if let Some(target_param_idx) = target_param_idx {
- let mut counter = 0;
- let generics = self.generics().expect("generics in scope");
- for (idx, data) in generics.iter_self_type_or_consts() {
- // Count the number of `impl Trait` things that appear before
- // the target of our `bound`.
- // Our counter within `impl_trait_mode` should be that number
- // to properly lower each types within `type_ref`
- if data.type_param().is_some_and(|p| {
- p.provenance == TypeParamProvenance::ArgumentImplTrait
- }) {
- counter += 1;
- }
- if idx == *target_param_idx {
- break;
- }
- }
- let mut ext = TyLoweringContext::new_maybe_unowned(
- self.db,
- self.resolver,
- self.types_map,
- self.types_source_map,
- self.owner,
- )
- .with_type_param_mode(self.type_param_mode);
- match self.impl_trait_mode.mode {
- ImplTraitLoweringMode::Param => {
- ext.impl_trait_mode =
- ImplTraitLoweringState::param(counter);
- }
- ImplTraitLoweringMode::Variable => {
- ext.impl_trait_mode =
- ImplTraitLoweringState::variable(counter);
- }
- _ => unreachable!(),
- }
- let ty = ext.lower_ty(type_ref);
- self.diagnostics.extend(ext.diagnostics);
- ty
- } else {
- self.lower_ty(type_ref)
- };
-
- let alias_eq =
- AliasEq { alias: AliasTy::Projection(projection_ty.clone()), ty };
- predicates
- .push(crate::wrap_empty_binders(WhereClause::AliasEq(alias_eq)));
- }
- }
- }
- for bound in binding.bounds.iter() {
- predicates.extend(self.lower_type_bound(
- bound,
- TyKind::Alias(AliasTy::Projection(projection_ty.clone())).intern(Interner),
- false,
- ));
- }
- predicates
- })
+ TypeBound::Use(_) | TypeBound::Error => {}
+ }
+ clause.into_iter().chain(assoc_bounds.into_iter().flatten())
}
fn lower_dyn_trait(&mut self, bounds: &[TypeBound]) -> Ty {
@@ -2471,14 +1614,14 @@ pub enum ValueTyDefId {
impl_from!(FunctionId, StructId, UnionId, EnumVariantId, ConstId, StaticId for ValueTyDefId);
impl ValueTyDefId {
- pub(crate) fn to_generic_def_id(self, db: &dyn HirDatabase) -> Option<GenericDefId> {
+ pub(crate) fn to_generic_def_id(self, db: &dyn HirDatabase) -> GenericDefId {
match self {
- Self::FunctionId(id) => Some(id.into()),
- Self::StructId(id) => Some(id.into()),
- Self::UnionId(id) => Some(id.into()),
- Self::EnumVariantId(var) => Some(var.lookup(db.upcast()).parent.into()),
- Self::ConstId(id) => Some(id.into()),
- Self::StaticId(_) => None,
+ Self::FunctionId(id) => id.into(),
+ Self::StructId(id) => id.into(),
+ Self::UnionId(id) => id.into(),
+ Self::EnumVariantId(var) => var.lookup(db.upcast()).parent.into(),
+ Self::ConstId(id) => id.into(),
+ Self::StaticId(id) => id.into(),
}
}
}
diff --git a/crates/hir-ty/src/lower/diagnostics.rs b/crates/hir-ty/src/lower/diagnostics.rs
index 7fe196cdbb..5c77bcd073 100644
--- a/crates/hir-ty/src/lower/diagnostics.rs
+++ b/crates/hir-ty/src/lower/diagnostics.rs
@@ -26,11 +26,11 @@ pub enum GenericArgsProhibitedReason {
Static,
/// When there is a generic enum, within the expression `Enum::Variant`,
/// either `Enum` or `Variant` are allowed to have generic arguments, but not both.
- // FIXME: This is not used now but it should be.
EnumVariant,
}
#[derive(Debug, PartialEq, Eq, Clone)]
pub enum PathLoweringDiagnostic {
GenericArgsProhibited { segment: u32, reason: GenericArgsProhibitedReason },
+ ParenthesizedGenericArgsWithoutFnTrait { segment: u32 },
}
diff --git a/crates/hir-ty/src/lower/path.rs b/crates/hir-ty/src/lower/path.rs
new file mode 100644
index 0000000000..a165932ddc
--- /dev/null
+++ b/crates/hir-ty/src/lower/path.rs
@@ -0,0 +1,917 @@
+//! A wrapper around [`TyLoweringContext`] specifically for lowering paths.
+
+use std::iter;
+
+use chalk_ir::{cast::Cast, fold::Shift, BoundVar};
+use either::Either;
+use hir_def::{
+ data::TraitFlags,
+ expr_store::HygieneId,
+ generics::{TypeParamProvenance, WherePredicate, WherePredicateTypeTarget},
+ path::{GenericArg, GenericArgs, Path, PathSegment, PathSegments},
+ resolver::{ResolveValueResult, TypeNs, ValueNs},
+ type_ref::{TypeBound, TypeRef, TypesMap},
+ GenericDefId, GenericParamId, ItemContainerId, Lookup, TraitId,
+};
+use smallvec::SmallVec;
+use stdx::never;
+
+use crate::{
+ consteval::unknown_const_as_generic,
+ error_lifetime,
+ generics::generics,
+ lower::{
+ generic_arg_to_chalk, named_associated_type_shorthand_candidates, ImplTraitLoweringState,
+ },
+ to_assoc_type_id, to_chalk_trait_id, to_placeholder_idx,
+ utils::associated_type_by_name_including_super_traits,
+ AliasEq, AliasTy, GenericArgsProhibitedReason, ImplTraitLoweringMode, Interner,
+ ParamLoweringMode, PathLoweringDiagnostic, ProjectionTy, QuantifiedWhereClause, Substitution,
+ TraitRef, Ty, TyBuilder, TyDefId, TyKind, TyLoweringContext, ValueTyDefId, WhereClause,
+};
+
+type CallbackData<'a> = Either<
+ super::PathDiagnosticCallbackData,
+ crate::infer::diagnostics::PathDiagnosticCallbackData<'a>,
+>;
+
+// We cannot use `&mut dyn FnMut()` because of lifetime issues, and we don't want to use `Box<dyn FnMut()>`
+// because of the allocation, so we create a lifetime-less callback, tailored for our needs.
+pub(crate) struct PathDiagnosticCallback<'a> {
+ pub(crate) data: CallbackData<'a>,
+ pub(crate) callback: fn(&CallbackData<'_>, &mut TyLoweringContext<'_>, PathLoweringDiagnostic),
+}
+
+pub(crate) struct PathLoweringContext<'a, 'b> {
+ ctx: &'a mut TyLoweringContext<'b>,
+ on_diagnostic: PathDiagnosticCallback<'a>,
+ path: &'a Path,
+ segments: PathSegments<'a>,
+ current_segment_idx: usize,
+ /// Contains the previous segment if `current_segment_idx == segments.len()`
+ current_or_prev_segment: PathSegment<'a>,
+}
+
+impl<'a, 'b> PathLoweringContext<'a, 'b> {
+ #[inline]
+ pub(crate) fn new(
+ ctx: &'a mut TyLoweringContext<'b>,
+ on_diagnostic: PathDiagnosticCallback<'a>,
+ path: &'a Path,
+ ) -> Self {
+ let segments = path.segments();
+ let first_segment = segments.first().unwrap_or(PathSegment::MISSING);
+ Self {
+ ctx,
+ on_diagnostic,
+ path,
+ segments,
+ current_segment_idx: 0,
+ current_or_prev_segment: first_segment,
+ }
+ }
+
+ #[inline]
+ #[cold]
+ fn on_diagnostic(&mut self, diag: PathLoweringDiagnostic) {
+ (self.on_diagnostic.callback)(&self.on_diagnostic.data, self.ctx, diag);
+ }
+
+ #[inline]
+ pub(crate) fn ty_ctx(&mut self) -> &mut TyLoweringContext<'b> {
+ self.ctx
+ }
+
+ #[inline]
+ fn current_segment_u32(&self) -> u32 {
+ self.current_segment_idx as u32
+ }
+
+ #[inline]
+ fn skip_resolved_segment(&mut self) {
+ if !matches!(self.path, Path::LangItem(..)) {
+ // In lang items, the resolved "segment" is not one of the segments. Perhaps we should've put it
+ // point at -1, but I don't feel this is clearer.
+ self.current_segment_idx += 1;
+ }
+ self.update_current_segment();
+ }
+
+ #[inline]
+ fn update_current_segment(&mut self) {
+ self.current_or_prev_segment =
+ self.segments.get(self.current_segment_idx).unwrap_or(self.current_or_prev_segment);
+ }
+
+ #[inline]
+ pub(crate) fn ignore_last_segment(&mut self) {
+ self.segments = self.segments.strip_last();
+ }
+
+ #[inline]
+ pub(crate) fn set_current_segment(&mut self, segment: usize) {
+ self.current_segment_idx = segment;
+ self.current_or_prev_segment = self
+ .segments
+ .get(segment)
+ .expect("invalid segment passed to PathLoweringContext::set_current_segment()");
+ }
+
+ pub(crate) fn lower_ty_relative_path(
+ &mut self,
+ ty: Ty,
+ // We need the original resolution to lower `Self::AssocTy` correctly
+ res: Option<TypeNs>,
+ ) -> (Ty, Option<TypeNs>) {
+ match self.segments.len() - self.current_segment_idx {
+ 0 => (ty, res),
+ 1 => {
+ // resolve unselected assoc types
+ (self.select_associated_type(res), None)
+ }
+ _ => {
+ // FIXME report error (ambiguous associated type)
+ (TyKind::Error.intern(Interner), None)
+ }
+ }
+ }
+
+ fn prohibit_parenthesized_generic_args(&mut self) -> bool {
+ if let Some(generic_args) = self.current_or_prev_segment.args_and_bindings {
+ if generic_args.desugared_from_fn {
+ let segment = self.current_segment_u32();
+ self.on_diagnostic(
+ PathLoweringDiagnostic::ParenthesizedGenericArgsWithoutFnTrait { segment },
+ );
+ return true;
+ }
+ }
+ false
+ }
+
+ // When calling this, the current segment is the resolved segment (we don't advance it yet).
+ pub(crate) fn lower_partly_resolved_path(
+ &mut self,
+ resolution: TypeNs,
+ infer_args: bool,
+ ) -> (Ty, Option<TypeNs>) {
+ let remaining_segments = self.segments.skip(self.current_segment_idx + 1);
+
+ let ty = match resolution {
+ TypeNs::TraitId(trait_) => {
+ let ty = match remaining_segments.len() {
+ 1 => {
+ let trait_ref = self.lower_trait_ref_from_resolved_path(
+ trait_,
+ TyKind::Error.intern(Interner),
+ );
+
+ self.skip_resolved_segment();
+ let segment = self.current_or_prev_segment;
+ let found =
+ self.ctx.db.trait_data(trait_).associated_type_by_name(segment.name);
+
+ match found {
+ Some(associated_ty) => {
+ // FIXME: `substs_from_path_segment()` pushes `TyKind::Error` for every parent
+ // generic params. It's inefficient to splice the `Substitution`s, so we may want
+ // that method to optionally take parent `Substitution` as we already know them at
+ // this point (`trait_ref.substitution`).
+ let substitution = self.substs_from_path_segment(
+ associated_ty.into(),
+ false,
+ None,
+ );
+ let len_self =
+ generics(self.ctx.db.upcast(), associated_ty.into()).len_self();
+ let substitution = Substitution::from_iter(
+ Interner,
+ substitution
+ .iter(Interner)
+ .take(len_self)
+ .chain(trait_ref.substitution.iter(Interner)),
+ );
+ TyKind::Alias(AliasTy::Projection(ProjectionTy {
+ associated_ty_id: to_assoc_type_id(associated_ty),
+ substitution,
+ }))
+ .intern(Interner)
+ }
+ None => {
+ // FIXME: report error (associated type not found)
+ TyKind::Error.intern(Interner)
+ }
+ }
+ }
+ 0 => {
+ // Trait object type without dyn; this should be handled in upstream. See
+ // `lower_path()`.
+ stdx::never!("unexpected fully resolved trait path");
+ TyKind::Error.intern(Interner)
+ }
+ _ => {
+ // FIXME report error (ambiguous associated type)
+ TyKind::Error.intern(Interner)
+ }
+ };
+ return (ty, None);
+ }
+ TypeNs::TraitAliasId(_) => {
+ // FIXME(trait_alias): Implement trait alias.
+ return (TyKind::Error.intern(Interner), None);
+ }
+ TypeNs::GenericParam(param_id) => match self.ctx.type_param_mode {
+ ParamLoweringMode::Placeholder => {
+ TyKind::Placeholder(to_placeholder_idx(self.ctx.db, param_id.into()))
+ }
+ ParamLoweringMode::Variable => {
+ let idx = match self
+ .ctx
+ .generics()
+ .expect("generics in scope")
+ .type_or_const_param_idx(param_id.into())
+ {
+ None => {
+ never!("no matching generics");
+ return (TyKind::Error.intern(Interner), None);
+ }
+ Some(idx) => idx,
+ };
+
+ TyKind::BoundVar(BoundVar::new(self.ctx.in_binders, idx))
+ }
+ }
+ .intern(Interner),
+ TypeNs::SelfType(impl_id) => {
+ let generics = self.ctx.generics().expect("impl should have generic param scope");
+
+ match self.ctx.type_param_mode {
+ ParamLoweringMode::Placeholder => {
+ // `def` can be either impl itself or item within, and we need impl itself
+ // now.
+ let generics = generics.parent_or_self();
+ let subst = generics.placeholder_subst(self.ctx.db);
+ self.ctx.db.impl_self_ty(impl_id).substitute(Interner, &subst)
+ }
+ ParamLoweringMode::Variable => {
+ let starting_from = match generics.def() {
+ GenericDefId::ImplId(_) => 0,
+ // `def` is an item within impl. We need to substitute `BoundVar`s but
+ // remember that they are for parent (i.e. impl) generic params so they
+ // come after our own params.
+ _ => generics.len_self(),
+ };
+ TyBuilder::impl_self_ty(self.ctx.db, impl_id)
+ .fill_with_bound_vars(self.ctx.in_binders, starting_from)
+ .build()
+ }
+ }
+ }
+ TypeNs::AdtSelfType(adt) => {
+ let generics = generics(self.ctx.db.upcast(), adt.into());
+ let substs = match self.ctx.type_param_mode {
+ ParamLoweringMode::Placeholder => generics.placeholder_subst(self.ctx.db),
+ ParamLoweringMode::Variable => {
+ generics.bound_vars_subst(self.ctx.db, self.ctx.in_binders)
+ }
+ };
+ self.ctx.db.ty(adt.into()).substitute(Interner, &substs)
+ }
+
+ TypeNs::AdtId(it) => self.lower_path_inner(it.into(), infer_args),
+ TypeNs::BuiltinType(it) => self.lower_path_inner(it.into(), infer_args),
+ TypeNs::TypeAliasId(it) => self.lower_path_inner(it.into(), infer_args),
+ // FIXME: report error
+ TypeNs::EnumVariantId(_) => return (TyKind::Error.intern(Interner), None),
+ };
+
+ self.skip_resolved_segment();
+ self.lower_ty_relative_path(ty, Some(resolution))
+ }
+
+ fn handle_type_ns_resolution(&mut self, resolution: &TypeNs) {
+ let mut prohibit_generics_on_resolved = |reason| {
+ if self.current_or_prev_segment.args_and_bindings.is_some() {
+ let segment = self.current_segment_u32();
+ self.on_diagnostic(PathLoweringDiagnostic::GenericArgsProhibited {
+ segment,
+ reason,
+ });
+ }
+ };
+
+ match resolution {
+ TypeNs::SelfType(_) => {
+ prohibit_generics_on_resolved(GenericArgsProhibitedReason::SelfTy)
+ }
+ TypeNs::GenericParam(_) => {
+ prohibit_generics_on_resolved(GenericArgsProhibitedReason::TyParam)
+ }
+ TypeNs::AdtSelfType(_) => {
+ prohibit_generics_on_resolved(GenericArgsProhibitedReason::SelfTy)
+ }
+ TypeNs::BuiltinType(_) => {
+ prohibit_generics_on_resolved(GenericArgsProhibitedReason::PrimitiveTy)
+ }
+ TypeNs::AdtId(_)
+ | TypeNs::EnumVariantId(_)
+ | TypeNs::TypeAliasId(_)
+ | TypeNs::TraitId(_)
+ | TypeNs::TraitAliasId(_) => {}
+ }
+ }
+
+ pub(crate) fn resolve_path_in_type_ns_fully(&mut self) -> Option<TypeNs> {
+ let (res, unresolved) = self.resolve_path_in_type_ns()?;
+ if unresolved.is_some() {
+ return None;
+ }
+ Some(res)
+ }
+
+ pub(crate) fn resolve_path_in_type_ns(&mut self) -> Option<(TypeNs, Option<usize>)> {
+ let (resolution, remaining_index, _, prefix_info) = self
+ .ctx
+ .resolver
+ .resolve_path_in_type_ns_with_prefix_info(self.ctx.db.upcast(), self.path)?;
+
+ let segments = self.segments;
+ if segments.is_empty() || matches!(self.path, Path::LangItem(..)) {
+ // `segments.is_empty()` can occur with `self`.
+ return Some((resolution, remaining_index));
+ }
+
+ let (module_segments, resolved_segment_idx, enum_segment) = match remaining_index {
+ None if prefix_info.enum_variant => {
+ (segments.strip_last_two(), segments.len() - 1, Some(segments.len() - 2))
+ }
+ None => (segments.strip_last(), segments.len() - 1, None),
+ Some(i) => (segments.take(i - 1), i - 1, None),
+ };
+
+ self.current_segment_idx = resolved_segment_idx;
+ self.current_or_prev_segment =
+ segments.get(resolved_segment_idx).expect("should have resolved segment");
+
+ if matches!(self.path, Path::BarePath(..)) {
+ // Bare paths cannot have generics, so skip them as an optimization.
+ return Some((resolution, remaining_index));
+ }
+
+ for (i, mod_segment) in module_segments.iter().enumerate() {
+ if mod_segment.args_and_bindings.is_some() {
+ self.on_diagnostic(PathLoweringDiagnostic::GenericArgsProhibited {
+ segment: i as u32,
+ reason: GenericArgsProhibitedReason::Module,
+ });
+ }
+ }
+
+ if let Some(enum_segment) = enum_segment {
+ if segments.get(enum_segment).is_some_and(|it| it.args_and_bindings.is_some())
+ && segments.get(enum_segment + 1).is_some_and(|it| it.args_and_bindings.is_some())
+ {
+ self.on_diagnostic(PathLoweringDiagnostic::GenericArgsProhibited {
+ segment: (enum_segment + 1) as u32,
+ reason: GenericArgsProhibitedReason::EnumVariant,
+ });
+ }
+ }
+
+ self.handle_type_ns_resolution(&resolution);
+
+ Some((resolution, remaining_index))
+ }
+
+ pub(crate) fn resolve_path_in_value_ns(
+ &mut self,
+ hygiene_id: HygieneId,
+ ) -> Option<ResolveValueResult> {
+ let (res, prefix_info) = self.ctx.resolver.resolve_path_in_value_ns_with_prefix_info(
+ self.ctx.db.upcast(),
+ self.path,
+ hygiene_id,
+ )?;
+
+ let segments = self.segments;
+ if segments.is_empty() || matches!(self.path, Path::LangItem(..)) {
+ // `segments.is_empty()` can occur with `self`.
+ return Some(res);
+ }
+
+ let (mod_segments, enum_segment, resolved_segment_idx) = match res {
+ ResolveValueResult::Partial(_, unresolved_segment, _) => {
+ (segments.take(unresolved_segment - 1), None, unresolved_segment - 1)
+ }
+ ResolveValueResult::ValueNs(ValueNs::EnumVariantId(_), _)
+ if prefix_info.enum_variant =>
+ {
+ (segments.strip_last_two(), segments.len().checked_sub(2), segments.len() - 1)
+ }
+ ResolveValueResult::ValueNs(..) => (segments.strip_last(), None, segments.len() - 1),
+ };
+
+ self.current_segment_idx = resolved_segment_idx;
+ self.current_or_prev_segment =
+ segments.get(resolved_segment_idx).expect("should have resolved segment");
+
+ for (i, mod_segment) in mod_segments.iter().enumerate() {
+ if mod_segment.args_and_bindings.is_some() {
+ self.on_diagnostic(PathLoweringDiagnostic::GenericArgsProhibited {
+ segment: i as u32,
+ reason: GenericArgsProhibitedReason::Module,
+ });
+ }
+ }
+
+ if let Some(enum_segment) = enum_segment {
+ if segments.get(enum_segment).is_some_and(|it| it.args_and_bindings.is_some())
+ && segments.get(enum_segment + 1).is_some_and(|it| it.args_and_bindings.is_some())
+ {
+ self.on_diagnostic(PathLoweringDiagnostic::GenericArgsProhibited {
+ segment: (enum_segment + 1) as u32,
+ reason: GenericArgsProhibitedReason::EnumVariant,
+ });
+ }
+ }
+
+ match &res {
+ ResolveValueResult::ValueNs(resolution, _) => {
+ let resolved_segment_idx = self.current_segment_u32();
+ let resolved_segment = self.current_or_prev_segment;
+
+ let mut prohibit_generics_on_resolved = |reason| {
+ if resolved_segment.args_and_bindings.is_some() {
+ self.on_diagnostic(PathLoweringDiagnostic::GenericArgsProhibited {
+ segment: resolved_segment_idx,
+ reason,
+ });
+ }
+ };
+
+ match resolution {
+ ValueNs::ImplSelf(_) => {
+ prohibit_generics_on_resolved(GenericArgsProhibitedReason::SelfTy)
+ }
+ // FIXME: rustc generates E0107 (incorrect number of generic arguments) and not
+ // E0109 (generic arguments provided for a type that doesn't accept them) for
+ // consts and statics, presumably as a defense against future in which consts
+ // and statics can be generic, or just because it was easier for rustc implementors.
+ // That means we'll show the wrong error code. Because of us it's easier to do it
+ // this way :)
+ ValueNs::GenericParam(_) | ValueNs::ConstId(_) => {
+ prohibit_generics_on_resolved(GenericArgsProhibitedReason::Const)
+ }
+ ValueNs::StaticId(_) => {
+ prohibit_generics_on_resolved(GenericArgsProhibitedReason::Static)
+ }
+ ValueNs::FunctionId(_) | ValueNs::StructId(_) | ValueNs::EnumVariantId(_) => {}
+ ValueNs::LocalBinding(_) => {}
+ }
+ }
+ ResolveValueResult::Partial(resolution, _, _) => {
+ self.handle_type_ns_resolution(resolution);
+ }
+ };
+ Some(res)
+ }
+
+ fn select_associated_type(&mut self, res: Option<TypeNs>) -> Ty {
+ let Some((generics, res)) = self.ctx.generics().zip(res) else {
+ return TyKind::Error.intern(Interner);
+ };
+ let segment = self.current_or_prev_segment;
+ let ty = named_associated_type_shorthand_candidates(
+ self.ctx.db,
+ generics.def(),
+ res,
+ Some(segment.name.clone()),
+ move |name, t, associated_ty| {
+ let generics = self.ctx.generics().unwrap();
+
+ if name != segment.name {
+ return None;
+ }
+
+ let parent_subst = t.substitution.clone();
+ let parent_subst = match self.ctx.type_param_mode {
+ ParamLoweringMode::Placeholder => {
+ // if we're lowering to placeholders, we have to put them in now.
+ let s = generics.placeholder_subst(self.ctx.db);
+ s.apply(parent_subst, Interner)
+ }
+ ParamLoweringMode::Variable => {
+ // We need to shift in the bound vars, since
+ // `named_associated_type_shorthand_candidates` does not do that.
+ parent_subst.shifted_in_from(Interner, self.ctx.in_binders)
+ }
+ };
+
+ // FIXME: `substs_from_path_segment()` pushes `TyKind::Error` for every parent
+ // generic params. It's inefficient to splice the `Substitution`s, so we may want
+ // that method to optionally take parent `Substitution` as we already know them at
+ // this point (`t.substitution`).
+ let substs = self.substs_from_path_segment(associated_ty.into(), false, None);
+
+ let len_self =
+ crate::generics::generics(self.ctx.db.upcast(), associated_ty.into())
+ .len_self();
+
+ let substs = Substitution::from_iter(
+ Interner,
+ substs.iter(Interner).take(len_self).chain(parent_subst.iter(Interner)),
+ );
+
+ Some(
+ TyKind::Alias(AliasTy::Projection(ProjectionTy {
+ associated_ty_id: to_assoc_type_id(associated_ty),
+ substitution: substs,
+ }))
+ .intern(Interner),
+ )
+ },
+ );
+
+ ty.unwrap_or_else(|| TyKind::Error.intern(Interner))
+ }
+
+ fn lower_path_inner(&mut self, typeable: TyDefId, infer_args: bool) -> Ty {
+ let generic_def = match typeable {
+ TyDefId::BuiltinType(builtin) => return TyBuilder::builtin(builtin),
+ TyDefId::AdtId(it) => it.into(),
+ TyDefId::TypeAliasId(it) => it.into(),
+ };
+ let substs = self.substs_from_path_segment(generic_def, infer_args, None);
+ self.ctx.db.ty(typeable).substitute(Interner, &substs)
+ }
+
+ /// Collect generic arguments from a path into a `Substs`. See also
+ /// `create_substs_for_ast_path` and `def_to_ty` in rustc.
+ pub(crate) fn substs_from_path(
+ &mut self,
+ // Note that we don't call `db.value_type(resolved)` here,
+ // `ValueTyDefId` is just a convenient way to pass generics and
+ // special-case enum variants
+ resolved: ValueTyDefId,
+ infer_args: bool,
+ ) -> Substitution {
+ let prev_current_segment_idx = self.current_segment_idx;
+ let prev_current_segment = self.current_or_prev_segment;
+
+ let generic_def = match resolved {
+ ValueTyDefId::FunctionId(it) => it.into(),
+ ValueTyDefId::StructId(it) => it.into(),
+ ValueTyDefId::UnionId(it) => it.into(),
+ ValueTyDefId::ConstId(it) => it.into(),
+ ValueTyDefId::StaticId(_) => return Substitution::empty(Interner),
+ ValueTyDefId::EnumVariantId(var) => {
+ // the generic args for an enum variant may be either specified
+ // on the segment referring to the enum, or on the segment
+ // referring to the variant. So `Option::<T>::None` and
+ // `Option::None::<T>` are both allowed (though the former is
+ // FIXME: This isn't strictly correct, enum variants may be used not through the enum
+ // (via `use Enum::Variant`). The resolver returns whether they were, but we don't have its result
+ // available here. The worst that can happen is that we will show some confusing diagnostics to the user,
+ // if generics exist on the module and they don't match with the variant.
+ // preferred). See also `def_ids_for_path_segments` in rustc.
+ //
+ // `wrapping_sub(1)` will return a number which `get` will return None for if current_segment_idx<2.
+ // This simplifies the code a bit.
+ let penultimate_idx = self.current_segment_idx.wrapping_sub(1);
+ let penultimate = self.segments.get(penultimate_idx);
+ if let Some(penultimate) = penultimate {
+ if self.current_or_prev_segment.args_and_bindings.is_none()
+ && penultimate.args_and_bindings.is_some()
+ {
+ self.current_segment_idx = penultimate_idx;
+ self.current_or_prev_segment = penultimate;
+ }
+ }
+ var.lookup(self.ctx.db.upcast()).parent.into()
+ }
+ };
+ let result = self.substs_from_path_segment(generic_def, infer_args, None);
+ self.current_segment_idx = prev_current_segment_idx;
+ self.current_or_prev_segment = prev_current_segment;
+ result
+ }
+
+ pub(crate) fn substs_from_path_segment(
+ &mut self,
+ def: GenericDefId,
+ infer_args: bool,
+ explicit_self_ty: Option<Ty>,
+ ) -> Substitution {
+ let prohibit_parens = match def {
+ GenericDefId::TraitId(trait_) => {
+ let trait_data = self.ctx.db.trait_data(trait_);
+ !trait_data.flags.contains(TraitFlags::RUSTC_PAREN_SUGAR)
+ }
+ _ => true,
+ };
+ if prohibit_parens && self.prohibit_parenthesized_generic_args() {
+ return TyBuilder::unknown_subst(self.ctx.db, def);
+ }
+
+ self.substs_from_args_and_bindings(
+ self.current_or_prev_segment.args_and_bindings,
+ def,
+ infer_args,
+ explicit_self_ty,
+ )
+ }
+
+ pub(super) fn substs_from_args_and_bindings(
+ &mut self,
+ args_and_bindings: Option<&GenericArgs>,
+ def: GenericDefId,
+ infer_args: bool,
+ explicit_self_ty: Option<Ty>,
+ ) -> Substitution {
+ // Order is
+ // - Optional Self parameter
+ // - Lifetime parameters
+ // - Type or Const parameters
+ // - Parent parameters
+ let def_generics = generics(self.ctx.db.upcast(), def);
+ let (
+ parent_params,
+ self_param,
+ type_params,
+ const_params,
+ impl_trait_params,
+ lifetime_params,
+ ) = def_generics.provenance_split();
+ let item_len =
+ self_param as usize + type_params + const_params + impl_trait_params + lifetime_params;
+ let total_len = parent_params + item_len;
+
+ let mut substs = Vec::new();
+
+ // we need to iterate the lifetime and type/const params separately as our order of them
+ // differs from the supplied syntax
+
+ let ty_error = || TyKind::Error.intern(Interner).cast(Interner);
+ let mut def_toc_iter = def_generics.iter_self_type_or_consts_id();
+ let fill_self_param = || {
+ if self_param {
+ let self_ty = explicit_self_ty.map(|x| x.cast(Interner)).unwrap_or_else(ty_error);
+
+ if let Some(id) = def_toc_iter.next() {
+ assert!(matches!(id, GenericParamId::TypeParamId(_)));
+ substs.push(self_ty);
+ }
+ }
+ };
+ let mut had_explicit_args = false;
+
+ if let Some(&GenericArgs { ref args, has_self_type, .. }) = args_and_bindings {
+ // Fill in the self param first
+ if has_self_type && self_param {
+ had_explicit_args = true;
+ if let Some(id) = def_toc_iter.next() {
+ assert!(matches!(id, GenericParamId::TypeParamId(_)));
+ had_explicit_args = true;
+ if let GenericArg::Type(ty) = &args[0] {
+ substs.push(self.ctx.lower_ty(*ty).cast(Interner));
+ }
+ }
+ } else {
+ fill_self_param()
+ };
+
+ // Then fill in the supplied lifetime args, or error lifetimes if there are too few
+ // (default lifetimes aren't a thing)
+ for arg in args
+ .iter()
+ .filter_map(|arg| match arg {
+ GenericArg::Lifetime(arg) => Some(self.ctx.lower_lifetime(arg)),
+ _ => None,
+ })
+ .chain(iter::repeat(error_lifetime()))
+ .take(lifetime_params)
+ {
+ substs.push(arg.cast(Interner));
+ }
+
+ let skip = if has_self_type { 1 } else { 0 };
+ // Fill in supplied type and const args
+ // Note if non-lifetime args are provided, it should be all of them, but we can't rely on that
+ for (arg, id) in args
+ .iter()
+ .filter(|arg| !matches!(arg, GenericArg::Lifetime(_)))
+ .skip(skip)
+ .take(type_params + const_params)
+ .zip(def_toc_iter)
+ {
+ had_explicit_args = true;
+ let arg = generic_arg_to_chalk(
+ self.ctx.db,
+ id,
+ arg,
+ self.ctx,
+ self.ctx.types_map,
+ |ctx, type_ref| ctx.lower_ty(type_ref),
+ |ctx, const_ref, ty| ctx.lower_const(const_ref, ty),
+ |ctx, lifetime_ref| ctx.lower_lifetime(lifetime_ref),
+ );
+ substs.push(arg);
+ }
+ } else {
+ fill_self_param();
+ }
+
+ let param_to_err = |id| match id {
+ GenericParamId::ConstParamId(x) => {
+ unknown_const_as_generic(self.ctx.db.const_param_ty(x))
+ }
+ GenericParamId::TypeParamId(_) => ty_error(),
+ GenericParamId::LifetimeParamId(_) => error_lifetime().cast(Interner),
+ };
+ // handle defaults. In expression or pattern path segments without
+ // explicitly specified type arguments, missing type arguments are inferred
+ // (i.e. defaults aren't used).
+ // Generic parameters for associated types are not supposed to have defaults, so we just
+ // ignore them.
+ let is_assoc_ty = || match def {
+ GenericDefId::TypeAliasId(id) => {
+ matches!(id.lookup(self.ctx.db.upcast()).container, ItemContainerId::TraitId(_))
+ }
+ _ => false,
+ };
+ let fill_defaults = (!infer_args || had_explicit_args) && !is_assoc_ty();
+ if fill_defaults {
+ let defaults = &*self.ctx.db.generic_defaults(def);
+ let (item, _parent) = defaults.split_at(item_len);
+ let parent_from = item_len - substs.len();
+
+ let mut rem =
+ def_generics.iter_id().skip(substs.len()).map(param_to_err).collect::<Vec<_>>();
+ // Fill in defaults for type/const params
+ for (idx, default_ty) in item[substs.len()..].iter().enumerate() {
+ // each default can depend on the previous parameters
+ let substs_so_far = Substitution::from_iter(
+ Interner,
+ substs.iter().cloned().chain(rem[idx..].iter().cloned()),
+ );
+ substs.push(default_ty.clone().substitute(Interner, &substs_so_far));
+ }
+ // Fill in remaining parent params
+ substs.extend(rem.drain(parent_from..));
+ } else {
+ // Fill in remaining def params and parent params
+ substs.extend(def_generics.iter_id().skip(substs.len()).map(param_to_err));
+ }
+
+ assert_eq!(substs.len(), total_len, "expected {} substs, got {}", total_len, substs.len());
+ Substitution::from_iter(Interner, substs)
+ }
+
+ pub(crate) fn lower_trait_ref_from_resolved_path(
+ &mut self,
+ resolved: TraitId,
+ explicit_self_ty: Ty,
+ ) -> TraitRef {
+ let substs = self.trait_ref_substs_from_path(resolved, explicit_self_ty);
+ TraitRef { trait_id: to_chalk_trait_id(resolved), substitution: substs }
+ }
+
+ fn trait_ref_substs_from_path(
+ &mut self,
+ resolved: TraitId,
+ explicit_self_ty: Ty,
+ ) -> Substitution {
+ self.substs_from_path_segment(resolved.into(), false, Some(explicit_self_ty))
+ }
+
+ pub(super) fn assoc_type_bindings_from_type_bound<'c>(
+ mut self,
+ bound: &'c TypeBound,
+ trait_ref: TraitRef,
+ ) -> Option<impl Iterator<Item = QuantifiedWhereClause> + use<'a, 'b, 'c>> {
+ self.current_or_prev_segment.args_and_bindings.map(|args_and_bindings| {
+ args_and_bindings.bindings.iter().flat_map(move |binding| {
+ let found = associated_type_by_name_including_super_traits(
+ self.ctx.db,
+ trait_ref.clone(),
+ &binding.name,
+ );
+ let (super_trait_ref, associated_ty) = match found {
+ None => return SmallVec::new(),
+ Some(t) => t,
+ };
+ // FIXME: `substs_from_path_segment()` pushes `TyKind::Error` for every parent
+ // generic params. It's inefficient to splice the `Substitution`s, so we may want
+ // that method to optionally take parent `Substitution` as we already know them at
+ // this point (`super_trait_ref.substitution`).
+ let substitution = self.substs_from_args_and_bindings(
+ binding.args.as_ref(),
+ associated_ty.into(),
+ false, // this is not relevant
+ Some(super_trait_ref.self_type_parameter(Interner)),
+ );
+ let self_params = generics(self.ctx.db.upcast(), associated_ty.into()).len_self();
+ let substitution = Substitution::from_iter(
+ Interner,
+ substitution
+ .iter(Interner)
+ .take(self_params)
+ .chain(super_trait_ref.substitution.iter(Interner)),
+ );
+ let projection_ty = ProjectionTy {
+ associated_ty_id: to_assoc_type_id(associated_ty),
+ substitution,
+ };
+ let mut predicates: SmallVec<[_; 1]> = SmallVec::with_capacity(
+ binding.type_ref.as_ref().map_or(0, |_| 1) + binding.bounds.len(),
+ );
+ if let Some(type_ref) = binding.type_ref {
+ match (&self.ctx.types_map[type_ref], self.ctx.impl_trait_mode.mode) {
+ (TypeRef::ImplTrait(_), ImplTraitLoweringMode::Disallowed) => (),
+ (_, ImplTraitLoweringMode::Disallowed | ImplTraitLoweringMode::Opaque) => {
+ let ty = self.ctx.lower_ty(type_ref);
+ let alias_eq =
+ AliasEq { alias: AliasTy::Projection(projection_ty.clone()), ty };
+ predicates
+ .push(crate::wrap_empty_binders(WhereClause::AliasEq(alias_eq)));
+ }
+ (_, ImplTraitLoweringMode::Param | ImplTraitLoweringMode::Variable) => {
+ // Find the generic index for the target of our `bound`
+ let target_param_idx =
+ self.ctx.resolver.where_predicates_in_scope().find_map(
+ |(p, (_, types_map))| match p {
+ WherePredicate::TypeBound {
+ target: WherePredicateTypeTarget::TypeOrConstParam(idx),
+ bound: b,
+ } if std::ptr::eq::<TypesMap>(
+ self.ctx.types_map,
+ types_map,
+ ) && bound == b =>
+ {
+ Some(idx)
+ }
+ _ => None,
+ },
+ );
+ let ty = if let Some(target_param_idx) = target_param_idx {
+ let mut counter = 0;
+ let generics = self.ctx.generics().expect("generics in scope");
+ for (idx, data) in generics.iter_self_type_or_consts() {
+ // Count the number of `impl Trait` things that appear before
+ // the target of our `bound`.
+ // Our counter within `impl_trait_mode` should be that number
+ // to properly lower each types within `type_ref`
+ if data.type_param().is_some_and(|p| {
+ p.provenance == TypeParamProvenance::ArgumentImplTrait
+ }) {
+ counter += 1;
+ }
+ if idx == *target_param_idx {
+ break;
+ }
+ }
+ let mut ext = TyLoweringContext::new_maybe_unowned(
+ self.ctx.db,
+ self.ctx.resolver,
+ self.ctx.types_map,
+ self.ctx.types_source_map,
+ self.ctx.owner,
+ )
+ .with_type_param_mode(self.ctx.type_param_mode);
+ match self.ctx.impl_trait_mode.mode {
+ ImplTraitLoweringMode::Param => {
+ ext.impl_trait_mode =
+ ImplTraitLoweringState::param(counter);
+ }
+ ImplTraitLoweringMode::Variable => {
+ ext.impl_trait_mode =
+ ImplTraitLoweringState::variable(counter);
+ }
+ _ => unreachable!(),
+ }
+ let ty = ext.lower_ty(type_ref);
+ self.ctx.diagnostics.extend(ext.diagnostics);
+ ty
+ } else {
+ self.ctx.lower_ty(type_ref)
+ };
+
+ let alias_eq =
+ AliasEq { alias: AliasTy::Projection(projection_ty.clone()), ty };
+ predicates
+ .push(crate::wrap_empty_binders(WhereClause::AliasEq(alias_eq)));
+ }
+ }
+ }
+ for bound in binding.bounds.iter() {
+ predicates.extend(self.ctx.lower_type_bound(
+ bound,
+ TyKind::Alias(AliasTy::Projection(projection_ty.clone())).intern(Interner),
+ false,
+ ));
+ }
+ predicates
+ })
+ })
+ }
+}
diff --git a/crates/hir-ty/src/method_resolution.rs b/crates/hir-ty/src/method_resolution.rs
index 1cea67ee96..db94351dcc 100644
--- a/crates/hir-ty/src/method_resolution.rs
+++ b/crates/hir-ty/src/method_resolution.rs
@@ -4,6 +4,7 @@
//! and the corresponding code mostly in rustc_hir_analysis/check/method/probe.rs.
use std::ops::ControlFlow;
+use arrayvec::ArrayVec;
use base_db::CrateId;
use chalk_ir::{cast::Cast, UniverseIndex, WithKind};
use hir_def::{
@@ -732,15 +733,27 @@ fn lookup_impl_assoc_item_for_trait_ref(
let self_ty = trait_ref.self_type_parameter(Interner);
let self_ty_fp = TyFingerprint::for_trait_impl(&self_ty)?;
let impls = db.trait_impls_in_deps(env.krate);
- let self_impls = match self_ty.kind(Interner) {
- TyKind::Adt(id, _) => {
- id.0.module(db.upcast()).containing_block().and_then(|it| db.trait_impls_in_block(it))
+
+ let trait_module = hir_trait_id.module(db.upcast());
+ let type_module = match self_ty_fp {
+ TyFingerprint::Adt(adt_id) => Some(adt_id.module(db.upcast())),
+ TyFingerprint::ForeignType(type_id) => {
+ Some(from_foreign_def_id(type_id).module(db.upcast()))
}
+ TyFingerprint::Dyn(trait_id) => Some(trait_id.module(db.upcast())),
_ => None,
};
+
+ let def_blocks: ArrayVec<_, 2> =
+ [trait_module.containing_block(), type_module.and_then(|it| it.containing_block())]
+ .into_iter()
+ .flatten()
+ .filter_map(|block_id| db.trait_impls_in_block(block_id))
+ .collect();
+
let impls = impls
.iter()
- .chain(self_impls.as_ref())
+ .chain(&def_blocks)
.flat_map(|impls| impls.for_trait_and_self_ty(hir_trait_id, self_ty_fp));
let table = InferenceTable::new(db, env);
diff --git a/crates/hir-ty/src/mir.rs b/crates/hir-ty/src/mir.rs
index 59c583afb2..41304bbd8a 100644
--- a/crates/hir-ty/src/mir.rs
+++ b/crates/hir-ty/src/mir.rs
@@ -10,13 +10,13 @@ use crate::{
lang_items::is_box,
mapping::ToChalk,
CallableDefId, ClosureId, Const, ConstScalar, InferenceResult, Interner, MemoryMap,
- Substitution, TraitEnvironment, Ty, TyKind,
+ Substitution, TraitEnvironment, Ty, TyExt, TyKind,
};
use base_db::CrateId;
use chalk_ir::Mutability;
use either::Either;
use hir_def::{
- body::Body,
+ expr_store::Body,
hir::{BindingAnnotation, BindingId, Expr, ExprId, Ordering, PatId},
DefWithBodyId, FieldId, StaticId, TupleFieldId, UnionId, VariantId,
};
@@ -144,6 +144,13 @@ impl<V, T> ProjectionElem<V, T> {
closure_field: impl FnOnce(ClosureId, &Substitution, usize) -> Ty,
krate: CrateId,
) -> Ty {
+ // we only bail on mir building when there are type mismatches
+ // but error types may pop up resulting in us still attempting to build the mir
+ // so just propagate the error type
+ if base.is_unknown() {
+ return TyKind::Error.intern(Interner);
+ }
+
if matches!(base.kind(Interner), TyKind::Alias(_) | TyKind::AssociatedType(..)) {
base = normalize(
db,
@@ -166,7 +173,7 @@ impl<V, T> ProjectionElem<V, T> {
TyKind::Error.intern(Interner)
}
},
- ProjectionElem::Field(Either::Left(f)) => match &base.kind(Interner) {
+ ProjectionElem::Field(Either::Left(f)) => match base.kind(Interner) {
TyKind::Adt(_, subst) => {
db.field_types(f.parent)[f.local_id].clone().substitute(Interner, subst)
}
diff --git a/crates/hir-ty/src/mir/eval.rs b/crates/hir-ty/src/mir/eval.rs
index dcae6877ba..6b20522cf3 100644
--- a/crates/hir-ty/src/mir/eval.rs
+++ b/crates/hir-ty/src/mir/eval.rs
@@ -6,9 +6,9 @@ use base_db::CrateId;
use chalk_ir::{cast::Cast, Mutability};
use either::Either;
use hir_def::{
- body::HygieneId,
builtin_type::BuiltinType,
data::adt::{StructFlags, VariantData},
+ expr_store::HygieneId,
lang_item::LangItem,
layout::{TagEncoding, Variants},
resolver::{HasResolver, TypeNs, ValueNs},
@@ -1644,14 +1644,15 @@ impl Evaluator<'_> {
Variants::Multiple { tag, tag_encoding, variants, .. } => {
let size = tag.size(&*self.target_data_layout).bytes_usize();
let offset = layout.fields.offset(0).bytes_usize(); // The only field on enum variants is the tag field
+ let is_signed = tag.is_signed();
match tag_encoding {
TagEncoding::Direct => {
let tag = &bytes[offset..offset + size];
- Ok(i128::from_le_bytes(pad16(tag, false)))
+ Ok(i128::from_le_bytes(pad16(tag, is_signed)))
}
TagEncoding::Niche { untagged_variant, niche_start, .. } => {
let tag = &bytes[offset..offset + size];
- let candidate_tag = i128::from_le_bytes(pad16(tag, false))
+ let candidate_tag = i128::from_le_bytes(pad16(tag, is_signed))
.wrapping_sub(*niche_start as i128)
as usize;
let idx = variants
@@ -2943,10 +2944,10 @@ pub fn render_const_using_debug_impl(
// a3 = ::core::fmt::Arguments::new_v1(a1, a2)
// FIXME: similarly, we should call function here, not directly working with memory.
let a3 = evaluator.heap_allocate(evaluator.ptr_size() * 6, evaluator.ptr_size())?;
- evaluator.write_memory(a3.offset(2 * evaluator.ptr_size()), &a1.to_bytes())?;
+ evaluator.write_memory(a3, &a1.to_bytes())?;
+ evaluator.write_memory(a3.offset(evaluator.ptr_size()), &[1])?;
+ evaluator.write_memory(a3.offset(2 * evaluator.ptr_size()), &a2.to_bytes())?;
evaluator.write_memory(a3.offset(3 * evaluator.ptr_size()), &[1])?;
- evaluator.write_memory(a3.offset(4 * evaluator.ptr_size()), &a2.to_bytes())?;
- evaluator.write_memory(a3.offset(5 * evaluator.ptr_size()), &[1])?;
let Some(ValueNs::FunctionId(format_fn)) = resolver.resolve_path_in_value_ns_fully(
db.upcast(),
&hir_def::path::Path::from_known_path_with_no_generic(path![std::fmt::format]),
diff --git a/crates/hir-ty/src/mir/eval/shim.rs b/crates/hir-ty/src/mir/eval/shim.rs
index 0a78f4a5b2..38b189a517 100644
--- a/crates/hir-ty/src/mir/eval/shim.rs
+++ b/crates/hir-ty/src/mir/eval/shim.rs
@@ -1,11 +1,12 @@
//! Interpret intrinsics, lang items and `extern "C"` wellknown functions which their implementation
//! is not available.
//!
-use std::cmp;
+use std::cmp::{self, Ordering};
use chalk_ir::TyKind;
use hir_def::{
builtin_type::{BuiltinInt, BuiltinUint},
+ lang_item::LangItemTarget,
resolver::HasResolver,
};
use hir_expand::name::Name;
@@ -1317,6 +1318,82 @@ impl Evaluator<'_> {
self.write_memory_using_ref(dst, size)?.fill(val);
Ok(())
}
+ "ptr_metadata" => {
+ let [ptr] = args else {
+ return Err(MirEvalError::InternalError(
+ "ptr_metadata args are not provided".into(),
+ ));
+ };
+ let arg = ptr.interval.get(self)?.to_owned();
+ let metadata = &arg[self.ptr_size()..];
+ destination.write_from_bytes(self, metadata)?;
+ Ok(())
+ }
+ "three_way_compare" => {
+ let [lhs, rhs] = args else {
+ return Err(MirEvalError::InternalError(
+ "three_way_compare args are not provided".into(),
+ ));
+ };
+ let Some(ty) =
+ generic_args.as_slice(Interner).first().and_then(|it| it.ty(Interner))
+ else {
+ return Err(MirEvalError::InternalError(
+ "three_way_compare generic arg is not provided".into(),
+ ));
+ };
+ let signed = match ty.as_builtin().unwrap() {
+ BuiltinType::Int(_) => true,
+ BuiltinType::Uint(_) => false,
+ _ => {
+ return Err(MirEvalError::InternalError(
+ "three_way_compare expects an integral type".into(),
+ ))
+ }
+ };
+ let rhs = rhs.get(self)?;
+ let lhs = lhs.get(self)?;
+ let mut result = Ordering::Equal;
+ for (l, r) in lhs.iter().zip(rhs).rev() {
+ let it = l.cmp(r);
+ if it != Ordering::Equal {
+ result = it;
+ break;
+ }
+ }
+ if signed {
+ if let Some((&l, &r)) = lhs.iter().zip(rhs).next_back() {
+ if l != r {
+ result = (l as i8).cmp(&(r as i8));
+ }
+ }
+ }
+ if let Some(LangItemTarget::EnumId(e)) =
+ self.db.lang_item(self.crate_id, LangItem::Ordering)
+ {
+ let ty = self.db.ty(e.into());
+ let r = self
+ .compute_discriminant(ty.skip_binders().clone(), &[result as i8 as u8])?;
+ destination.write_from_bytes(self, &r.to_le_bytes()[0..destination.size])?;
+ Ok(())
+ } else {
+ Err(MirEvalError::InternalError("Ordering enum not found".into()))
+ }
+ }
+ "aggregate_raw_ptr" => {
+ let [data, meta] = args else {
+ return Err(MirEvalError::InternalError(
+ "aggregate_raw_ptr args are not provided".into(),
+ ));
+ };
+ destination.write_from_interval(self, data.interval)?;
+ Interval {
+ addr: destination.addr.offset(data.interval.size),
+ size: destination.size - data.interval.size,
+ }
+ .write_from_interval(self, meta.interval)?;
+ Ok(())
+ }
_ if needs_override => not_supported!("intrinsic {name} is not implemented"),
_ => return Ok(false),
}
diff --git a/crates/hir-ty/src/mir/eval/tests.rs b/crates/hir-ty/src/mir/eval/tests.rs
index f1e86daea2..2b5486fc5f 100644
--- a/crates/hir-ty/src/mir/eval/tests.rs
+++ b/crates/hir-ty/src/mir/eval/tests.rs
@@ -3,7 +3,7 @@ use span::{Edition, EditionedFileId};
use syntax::{TextRange, TextSize};
use test_fixture::WithFixture;
-use crate::{db::HirDatabase, test_db::TestDB, Interner, Substitution};
+use crate::{db::HirDatabase, mir::MirLowerError, test_db::TestDB, Interner, Substitution};
use super::{interpret_mir, MirEvalError};
@@ -84,6 +84,16 @@ fn check_panic(#[rust_analyzer::rust_fixture] ra_fixture: &str, expected_panic:
assert_eq!(e.is_panic().unwrap_or_else(|| panic!("unexpected error: {e:?}")), expected_panic);
}
+fn check_error_with(
+ #[rust_analyzer::rust_fixture] ra_fixture: &str,
+ expect_err: impl FnOnce(MirEvalError) -> bool,
+) {
+ let (db, file_ids) = TestDB::with_many_files(ra_fixture);
+ let file_id = *file_ids.last().unwrap();
+ let e = eval_main(&db, file_id).unwrap_err();
+ assert!(expect_err(e));
+}
+
#[test]
fn function_with_extern_c_abi() {
check_pass(
@@ -912,3 +922,60 @@ fn main() {
"",
);
}
+
+#[test]
+fn regression_19021() {
+ check_pass(
+ r#"
+//- minicore: deref
+use core::ops::Deref;
+
+#[lang = "owned_box"]
+struct Box<T>(T);
+
+impl<T> Deref for Box<T> {
+ type Target = T;
+
+ fn deref(&self) -> &Self::Target {
+ &self.0
+ }
+}
+
+struct Foo;
+
+fn main() {
+ let x = Box(Foo);
+ let y = &Foo;
+
+ || match x {
+ ref x => x,
+ _ => y,
+ };
+}
+"#,
+ );
+}
+
+#[test]
+fn regression_19177() {
+ check_error_with(
+ r#"
+//- minicore: copy
+trait Foo {}
+trait Bar {}
+trait Baz {}
+trait Qux {
+ type Assoc;
+}
+
+fn main<'a, T: Foo + Bar + Baz>(
+ x: &T,
+ y: (),
+ z: &'a dyn Qux<Assoc = T>,
+ w: impl Foo + Bar,
+) {
+}
+"#,
+ |e| matches!(e, MirEvalError::MirLowerError(_, MirLowerError::GenericArgNotProvided(..))),
+ );
+}
diff --git a/crates/hir-ty/src/mir/lower.rs b/crates/hir-ty/src/mir/lower.rs
index 1d1044df6e..f88696692e 100644
--- a/crates/hir-ty/src/mir/lower.rs
+++ b/crates/hir-ty/src/mir/lower.rs
@@ -5,11 +5,11 @@ use std::{fmt::Write, iter, mem};
use base_db::ra_salsa::Cycle;
use chalk_ir::{BoundVar, ConstData, DebruijnIndex, TyKind};
use hir_def::{
- body::{Body, HygieneId},
data::adt::{StructKind, VariantData},
+ expr_store::{Body, HygieneId},
hir::{
- ArithOp, Array, BinaryOp, BindingAnnotation, BindingId, ExprId, LabelId, Literal,
- LiteralOrConst, MatchArm, Pat, PatId, RecordFieldPat, RecordLitField,
+ ArithOp, Array, BinaryOp, BindingAnnotation, BindingId, ExprId, LabelId, Literal, MatchArm,
+ Pat, PatId, RecordFieldPat, RecordLitField,
},
lang_item::{LangItem, LangItemTarget},
path::Path,
@@ -1358,20 +1358,10 @@ impl<'ctx> MirLowerCtx<'ctx> {
Ok(())
}
- fn lower_literal_or_const_to_operand(
- &mut self,
- ty: Ty,
- loc: &LiteralOrConst,
- ) -> Result<Operand> {
- match loc {
- LiteralOrConst::Literal(l) => self.lower_literal_to_operand(ty, l),
- LiteralOrConst::Const(c) => {
- let c = match &self.body.pats[*c] {
- Pat::Path(p) => p,
- _ => not_supported!(
- "only `char` and numeric types are allowed in range patterns"
- ),
- };
+ fn lower_literal_or_const_to_operand(&mut self, ty: Ty, loc: &ExprId) -> Result<Operand> {
+ match &self.body.exprs[*loc] {
+ Expr::Literal(l) => self.lower_literal_to_operand(ty, l),
+ Expr::Path(c) => {
let edition = self.edition();
let unresolved_name =
|| MirLowerError::unresolved_path(self.db, c, edition, &self.body.types);
@@ -1392,6 +1382,9 @@ impl<'ctx> MirLowerCtx<'ctx> {
}
}
}
+ _ => {
+ not_supported!("only `char` and numeric types are allowed in range patterns");
+ }
}
}
@@ -2156,7 +2149,7 @@ pub fn lower_to_mir(
// need to take this input explicitly.
root_expr: ExprId,
) -> Result<MirBody> {
- if infer.has_errors {
+ if infer.type_mismatches().next().is_some() {
return Err(MirLowerError::HasErrors);
}
let mut ctx = MirLowerCtx::new(db, owner, body, infer);
diff --git a/crates/hir-ty/src/mir/lower/pattern_matching.rs b/crates/hir-ty/src/mir/lower/pattern_matching.rs
index 2ffea34c85..289175feef 100644
--- a/crates/hir-ty/src/mir/lower/pattern_matching.rs
+++ b/crates/hir-ty/src/mir/lower/pattern_matching.rs
@@ -1,6 +1,6 @@
//! MIR lowering for patterns
-use hir_def::{hir::LiteralOrConst, AssocItemId};
+use hir_def::{hir::ExprId, AssocItemId};
use crate::{
mir::{
@@ -207,7 +207,7 @@ impl MirLowerCtx<'_> {
)?
}
Pat::Range { start, end } => {
- let mut add_check = |l: &LiteralOrConst, binop| -> Result<()> {
+ let mut add_check = |l: &ExprId, binop| -> Result<()> {
let lv =
self.lower_literal_or_const_to_operand(self.infer[pattern].clone(), l)?;
let else_target = *current_else.get_or_insert_with(|| self.new_basic_block());
diff --git a/crates/hir-ty/src/mir/pretty.rs b/crates/hir-ty/src/mir/pretty.rs
index 06765a104c..2a26101ac4 100644
--- a/crates/hir-ty/src/mir/pretty.rs
+++ b/crates/hir-ty/src/mir/pretty.rs
@@ -6,7 +6,7 @@ use std::{
};
use either::Either;
-use hir_def::{body::Body, hir::BindingId};
+use hir_def::{expr_store::Body, hir::BindingId};
use hir_expand::{name::Name, Lookup};
use la_arena::ArenaMap;
use span::Edition;
diff --git a/crates/hir-ty/src/tests.rs b/crates/hir-ty/src/tests.rs
index 00da9b2517..f5a4d4ff35 100644
--- a/crates/hir-ty/src/tests.rs
+++ b/crates/hir-ty/src/tests.rs
@@ -18,8 +18,8 @@ use std::sync::LazyLock;
use base_db::SourceDatabaseFileInputExt as _;
use expect_test::Expect;
use hir_def::{
- body::{Body, BodySourceMap},
db::DefDatabase,
+ expr_store::{Body, BodySourceMap},
hir::{ExprId, Pat, PatId},
item_scope::ItemScope,
nameres::DefMap,
@@ -117,7 +117,7 @@ fn check_impl(
expected.trim_start_matches("adjustments:").trim().to_owned(),
);
} else {
- panic!("unexpected annotation: {expected}");
+ panic!("unexpected annotation: {expected} @ {range:?}");
}
had_annotations = true;
}
diff --git a/crates/hir-ty/src/tests/coercion.rs b/crates/hir-ty/src/tests/coercion.rs
index 7992f1feee..7e7c1f835c 100644
--- a/crates/hir-ty/src/tests/coercion.rs
+++ b/crates/hir-ty/src/tests/coercion.rs
@@ -185,11 +185,10 @@ fn test() {
let t = &mut 1;
let x = match 1 {
1 => t as *mut i32,
+ //^^^^^^^^^^^^^ adjustments: Pointer(MutToConstPointer)
2 => t as &i32,
//^^^^^^^^^ expected *mut i32, got &'? i32
_ => t as *const i32,
- // ^^^^^^^^^^^^^^^ adjustments: Pointer(MutToConstPointer)
-
};
x;
//^ type: *const i32
diff --git a/crates/hir-ty/src/tests/diagnostics.rs b/crates/hir-ty/src/tests/diagnostics.rs
index def06f2d59..855034117c 100644
--- a/crates/hir-ty/src/tests/diagnostics.rs
+++ b/crates/hir-ty/src/tests/diagnostics.rs
@@ -153,3 +153,53 @@ fn consume() -> Option<()> {
"#,
);
}
+
+#[test]
+fn method_call_on_field() {
+ check(
+ r#"
+struct S {
+ field: fn(f32) -> u32,
+ field2: u32
+}
+
+fn main() {
+ let s = S { field: |_| 0, field2: 0 };
+ s.field(0);
+ // ^ expected f32, got i32
+ // ^^^^^^^^^^ type: u32
+ s.field2(0);
+ // ^ type: i32
+ // ^^^^^^^^^^^ type: {unknown}
+ s.not_a_field(0);
+ // ^ type: i32
+ // ^^^^^^^^^^^^^^^^ type: {unknown}
+}
+"#,
+ );
+}
+
+#[test]
+fn method_call_on_assoc() {
+ check(
+ r#"
+struct S;
+
+impl S {
+ fn not_a_method() -> f32 { 0.0 }
+ fn not_a_method2(this: Self, param: f32) -> Self { this }
+ fn not_a_method3(param: f32) -> Self { S }
+}
+
+fn main() {
+ S.not_a_method(0);
+ // ^^^^^^^^^^^^^^^^^ type: f32
+ S.not_a_method2(0);
+ // ^ expected f32, got i32
+ // ^^^^^^^^^^^^^^^^^^ type: S
+ S.not_a_method3(0);
+ // ^^^^^^^^^^^^^^^^^^ type: S
+}
+"#,
+ );
+}
diff --git a/crates/hir-ty/src/tests/method_resolution.rs b/crates/hir-ty/src/tests/method_resolution.rs
index 8866de22df..3a258ecad1 100644
--- a/crates/hir-ty/src/tests/method_resolution.rs
+++ b/crates/hir-ty/src/tests/method_resolution.rs
@@ -1210,7 +1210,7 @@ impl<T> Slice<T> {
fn main() {
let foo: Slice<u32>;
foo.into_vec(); // we shouldn't crash on this at least
-} //^^^^^^^^^^^^^^ {unknown}
+} //^^^^^^^^^^^^^^ ()
"#,
);
}
@@ -2163,9 +2163,9 @@ impl Receiver for Bar {
fn main() {
let bar = Bar;
let _v1 = bar.foo1();
- //^^^ type: i32
+ //^^^ type: {unknown}
let _v2 = bar.foo2();
- //^^^ type: bool
+ //^^^ type: {unknown}
}
"#,
);
diff --git a/crates/hir-ty/src/tests/simple.rs b/crates/hir-ty/src/tests/simple.rs
index 1563660457..50a1ecd006 100644
--- a/crates/hir-ty/src/tests/simple.rs
+++ b/crates/hir-ty/src/tests/simple.rs
@@ -3814,3 +3814,50 @@ async fn foo(a: (), b: i32) -> u32 {
"#,
);
}
+
+#[test]
+fn irrefutable_slices() {
+ check_infer(
+ r#"
+//- minicore: from
+struct A;
+
+impl From<A> for [u8; 2] {
+ fn from(a: A) -> Self {
+ [0; 2]
+ }
+}
+impl From<A> for [u8; 3] {
+ fn from(a: A) -> Self {
+ [0; 3]
+ }
+}
+
+
+fn main() {
+ let a = A;
+ let [b, c] = a.into();
+}
+"#,
+ expect![[r#"
+ 50..51 'a': A
+ 64..86 '{ ... }': [u8; 2]
+ 74..80 '[0; 2]': [u8; 2]
+ 75..76 '0': u8
+ 78..79 '2': usize
+ 128..129 'a': A
+ 142..164 '{ ... }': [u8; 3]
+ 152..158 '[0; 3]': [u8; 3]
+ 153..154 '0': u8
+ 156..157 '3': usize
+ 179..224 '{ ...o(); }': ()
+ 189..190 'a': A
+ 193..194 'A': A
+ 204..210 '[b, c]': [u8; 2]
+ 205..206 'b': u8
+ 208..209 'c': u8
+ 213..214 'a': A
+ 213..221 'a.into()': [u8; 2]
+ "#]],
+ );
+}
diff --git a/crates/hir-ty/src/utils.rs b/crates/hir-ty/src/utils.rs
index bf7892f69b..c131e97bc4 100644
--- a/crates/hir-ty/src/utils.rs
+++ b/crates/hir-ty/src/utils.rs
@@ -9,19 +9,22 @@ use chalk_ir::{
DebruijnIndex,
};
use hir_def::{
+ attr::Attrs,
db::DefDatabase,
generics::{WherePredicate, WherePredicateTypeTarget},
lang_item::LangItem,
resolver::{HasResolver, TypeNs},
+ tt,
type_ref::{TraitBoundModifier, TypeRef},
EnumId, EnumVariantId, FunctionId, Lookup, OpaqueInternableThing, TraitId, TypeAliasId,
TypeOrConstParamId,
};
use hir_expand::name::Name;
-use intern::sym;
+use intern::{sym, Symbol};
use rustc_abi::TargetDataLayout;
use rustc_hash::FxHashSet;
use smallvec::{smallvec, SmallVec};
+use span::Edition;
use stdx::never;
use crate::{
@@ -264,10 +267,65 @@ impl<'a> ClosureSubst<'a> {
}
}
-pub fn is_fn_unsafe_to_call(db: &dyn HirDatabase, func: FunctionId) -> bool {
+#[derive(Debug, Default)]
+pub struct TargetFeatures {
+ enabled: FxHashSet<Symbol>,
+}
+
+impl TargetFeatures {
+ pub fn from_attrs(attrs: &Attrs) -> Self {
+ let enabled = attrs
+ .by_key(&sym::target_feature)
+ .tt_values()
+ .filter_map(|tt| {
+ match tt.token_trees().flat_tokens() {
+ [
+ tt::TokenTree::Leaf(tt::Leaf::Ident(enable_ident)),
+ tt::TokenTree::Leaf(tt::Leaf::Punct(tt::Punct { char: '=', .. })),
+ tt::TokenTree::Leaf(tt::Leaf::Literal(tt::Literal { kind: tt::LitKind::Str, symbol: features, .. })),
+ ] if enable_ident.sym == sym::enable => Some(features),
+ _ => None,
+ }
+ })
+ .flat_map(|features| features.as_str().split(',').map(Symbol::intern))
+ .collect();
+ Self { enabled }
+ }
+}
+
+#[derive(Debug, Clone, Copy, PartialEq, Eq)]
+pub enum Unsafety {
+ Safe,
+ Unsafe,
+ /// A lint.
+ DeprecatedSafe2024,
+}
+
+pub fn is_fn_unsafe_to_call(
+ db: &dyn HirDatabase,
+ func: FunctionId,
+ caller_target_features: &TargetFeatures,
+ call_edition: Edition,
+) -> Unsafety {
let data = db.function_data(func);
if data.is_unsafe() {
- return true;
+ return Unsafety::Unsafe;
+ }
+
+ if data.has_target_feature() {
+ // RFC 2396 <https://rust-lang.github.io/rfcs/2396-target-feature-1.1.html>.
+ let callee_target_features = TargetFeatures::from_attrs(&db.attrs(func.into()));
+ if !caller_target_features.enabled.is_superset(&callee_target_features.enabled) {
+ return Unsafety::Unsafe;
+ }
+ }
+
+ if data.is_deprecated_safe_2024() {
+ if call_edition.at_least_2024() {
+ return Unsafety::Unsafe;
+ } else {
+ return Unsafety::DeprecatedSafe2024;
+ }
}
let loc = func.lookup(db.upcast());
@@ -279,14 +337,22 @@ pub fn is_fn_unsafe_to_call(db: &dyn HirDatabase, func: FunctionId) -> bool {
if is_intrinsic_block {
// legacy intrinsics
// extern "rust-intrinsic" intrinsics are unsafe unless they have the rustc_safe_intrinsic attribute
- !db.attrs(func.into()).by_key(&sym::rustc_safe_intrinsic).exists()
+ if db.attrs(func.into()).by_key(&sym::rustc_safe_intrinsic).exists() {
+ Unsafety::Safe
+ } else {
+ Unsafety::Unsafe
+ }
} else {
// Function in an `extern` block are always unsafe to call, except when
// it is marked as `safe`.
- !data.is_safe()
+ if data.is_safe() {
+ Unsafety::Safe
+ } else {
+ Unsafety::Unsafe
+ }
}
}
- _ => false,
+ _ => Unsafety::Safe,
}
}
diff --git a/crates/hir-ty/src/variance.rs b/crates/hir-ty/src/variance.rs
index afd163fbd9..3a22158ce6 100644
--- a/crates/hir-ty/src/variance.rs
+++ b/crates/hir-ty/src/variance.rs
@@ -1028,6 +1028,7 @@ struct FixedPoint<T, U, V>(&'static FixedPoint<(), T, U>, V);
}
GenericDefId::ImplId(_) => return None,
GenericDefId::ConstId(_) => return None,
+ GenericDefId::StaticId(_) => return None,
},
))
})
diff --git a/crates/hir/src/diagnostics.rs b/crates/hir/src/diagnostics.rs
index fc77d1889c..1ed0daa375 100644
--- a/crates/hir/src/diagnostics.rs
+++ b/crates/hir/src/diagnostics.rs
@@ -6,10 +6,11 @@
use cfg::{CfgExpr, CfgOptions};
use either::Either;
use hir_def::{
+ expr_store::ExprOrPatPtr,
hir::ExprOrPatId,
path::{hir_segment_to_ast_segment, ModPath},
type_ref::TypesSourceMap,
- AssocItemId, DefWithBodyId, SyntheticSyntax,
+ DefWithBodyId, SyntheticSyntax,
};
use hir_expand::{name::Name, HirFileId, InFile};
use hir_ty::{
@@ -24,7 +25,7 @@ use syntax::{
};
use triomphe::Arc;
-use crate::{AssocItem, Field, Local, Trait, Type};
+use crate::{AssocItem, Field, Function, Local, Trait, Type};
pub use hir_def::VariantId;
pub use hir_ty::{
@@ -111,18 +112,19 @@ diagnostics![
UnusedMut,
UnusedVariable,
GenericArgsProhibited,
+ ParenthesizedGenericArgsWithoutFnTrait,
];
#[derive(Debug)]
pub struct BreakOutsideOfLoop {
- pub expr: InFile<AstPtr<ast::Expr>>,
+ pub expr: InFile<ExprOrPatPtr>,
pub is_break: bool,
pub bad_value_break: bool,
}
#[derive(Debug)]
pub struct TypedHole {
- pub expr: InFile<AstPtr<ast::Expr>>,
+ pub expr: InFile<ExprOrPatPtr>,
pub expected: Type,
}
@@ -221,26 +223,26 @@ pub struct NoSuchField {
#[derive(Debug)]
pub struct PrivateAssocItem {
- pub expr_or_pat: InFile<AstPtr<Either<ast::Expr, ast::Pat>>>,
+ pub expr_or_pat: InFile<ExprOrPatPtr>,
pub item: AssocItem,
}
#[derive(Debug)]
pub struct MismatchedTupleStructPatArgCount {
- pub expr_or_pat: InFile<AstPtr<Either<ast::Expr, ast::Pat>>>,
+ pub expr_or_pat: InFile<ExprOrPatPtr>,
pub expected: usize,
pub found: usize,
}
#[derive(Debug)]
pub struct ExpectedFunction {
- pub call: InFile<AstPtr<ast::Expr>>,
+ pub call: InFile<ExprOrPatPtr>,
pub found: Type,
}
#[derive(Debug)]
pub struct UnresolvedField {
- pub expr: InFile<AstPtr<ast::Expr>>,
+ pub expr: InFile<ExprOrPatPtr>,
pub receiver: Type,
pub name: Name,
pub method_with_same_name_exists: bool,
@@ -248,34 +250,40 @@ pub struct UnresolvedField {
#[derive(Debug)]
pub struct UnresolvedMethodCall {
- pub expr: InFile<AstPtr<ast::Expr>>,
+ pub expr: InFile<ExprOrPatPtr>,
pub receiver: Type,
pub name: Name,
pub field_with_same_name: Option<Type>,
- pub assoc_func_with_same_name: Option<AssocItemId>,
+ pub assoc_func_with_same_name: Option<Function>,
}
#[derive(Debug)]
pub struct UnresolvedAssocItem {
- pub expr_or_pat: InFile<AstPtr<Either<ast::Expr, ast::Pat>>>,
+ pub expr_or_pat: InFile<ExprOrPatPtr>,
}
#[derive(Debug)]
pub struct UnresolvedIdent {
- pub node: InFile<(AstPtr<Either<ast::Expr, ast::Pat>>, Option<TextRange>)>,
+ pub node: InFile<(ExprOrPatPtr, Option<TextRange>)>,
}
#[derive(Debug)]
pub struct PrivateField {
- pub expr: InFile<AstPtr<ast::Expr>>,
+ pub expr: InFile<ExprOrPatPtr>,
pub field: Field,
}
+#[derive(Debug, Clone, Copy, PartialEq, Eq)]
+pub enum UnsafeLint {
+ HardError,
+ UnsafeOpInUnsafeFn,
+ DeprecatedSafe2024,
+}
+
#[derive(Debug)]
pub struct MissingUnsafe {
- pub node: InFile<AstPtr<Either<ast::Expr, ast::Pat>>>,
- /// If true, the diagnostics is an `unsafe_op_in_unsafe_fn` lint instead of a hard error.
- pub only_lint: bool,
+ pub node: InFile<ExprOrPatPtr>,
+ pub lint: UnsafeLint,
pub reason: UnsafetyReason,
}
@@ -296,7 +304,7 @@ pub struct ReplaceFilterMapNextWithFindMap {
#[derive(Debug)]
pub struct MismatchedArgCount {
- pub call_expr: InFile<AstPtr<ast::Expr>>,
+ pub call_expr: InFile<ExprOrPatPtr>,
pub expected: usize,
pub found: usize,
}
@@ -315,7 +323,7 @@ pub struct NonExhaustiveLet {
#[derive(Debug)]
pub struct TypeMismatch {
- pub expr_or_pat: InFile<AstPtr<Either<ast::Expr, ast::Pat>>>,
+ pub expr_or_pat: InFile<ExprOrPatPtr>,
pub expected: Type,
pub actual: Type,
}
@@ -389,13 +397,13 @@ pub struct RemoveUnnecessaryElse {
#[derive(Debug)]
pub struct CastToUnsized {
- pub expr: InFile<AstPtr<ast::Expr>>,
+ pub expr: InFile<ExprOrPatPtr>,
pub cast_ty: Type,
}
#[derive(Debug)]
pub struct InvalidCast {
- pub expr: InFile<AstPtr<ast::Expr>>,
+ pub expr: InFile<ExprOrPatPtr>,
pub error: CastError,
pub expr_ty: Type,
pub cast_ty: Type,
@@ -407,11 +415,16 @@ pub struct GenericArgsProhibited {
pub reason: GenericArgsProhibitedReason,
}
+#[derive(Debug)]
+pub struct ParenthesizedGenericArgsWithoutFnTrait {
+ pub args: InFile<AstPtr<ast::ParenthesizedArgList>>,
+}
+
impl AnyDiagnostic {
pub(crate) fn body_validation_diagnostic(
db: &dyn HirDatabase,
diagnostic: BodyValidationDiagnostic,
- source_map: &hir_def::body::BodySourceMap,
+ source_map: &hir_def::expr_store::BodySourceMap,
) -> Option<AnyDiagnostic> {
match diagnostic {
BodyValidationDiagnostic::RecordMissingFields { record, variant, missed_fields } => {
@@ -422,9 +435,7 @@ impl AnyDiagnostic {
.collect();
let record = match record {
- Either::Left(record_expr) => {
- source_map.expr_syntax(record_expr).ok()?.map(AstPtr::wrap_left)
- }
+ Either::Left(record_expr) => source_map.expr_syntax(record_expr).ok()?,
Either::Right(record_pat) => source_map.pat_syntax(record_pat).ok()?,
};
let file = record.file_id;
@@ -468,7 +479,7 @@ impl AnyDiagnostic {
return Some(
ReplaceFilterMapNextWithFindMap {
file: next_source_ptr.file_id,
- next_expr: next_source_ptr.value,
+ next_expr: next_source_ptr.value.cast()?,
}
.into(),
);
@@ -478,7 +489,9 @@ impl AnyDiagnostic {
match source_map.expr_syntax(match_expr) {
Ok(source_ptr) => {
let root = source_ptr.file_syntax(db.upcast());
- if let ast::Expr::MatchExpr(match_expr) = &source_ptr.value.to_node(&root) {
+ if let Either::Left(ast::Expr::MatchExpr(match_expr)) =
+ &source_ptr.value.to_node(&root)
+ {
match match_expr.expr() {
Some(scrut_expr) if match_expr.match_arm_list().is_some() => {
return Some(
@@ -547,7 +560,7 @@ impl AnyDiagnostic {
def: DefWithBodyId,
d: &InferenceDiagnostic,
outer_types_source_map: &TypesSourceMap,
- source_map: &hir_def::body::BodySourceMap,
+ source_map: &hir_def::expr_store::BodySourceMap,
) -> Option<AnyDiagnostic> {
let expr_syntax = |expr| {
source_map.expr_syntax(expr).inspect_err(|_| stdx::never!("synthetic syntax")).ok()
@@ -555,7 +568,7 @@ impl AnyDiagnostic {
let pat_syntax =
|pat| source_map.pat_syntax(pat).inspect_err(|_| stdx::never!("synthetic syntax")).ok();
let expr_or_pat_syntax = |id| match id {
- ExprOrPatId::ExprId(expr) => expr_syntax(expr).map(|it| it.map(AstPtr::wrap_left)),
+ ExprOrPatId::ExprId(expr) => expr_syntax(expr),
ExprOrPatId::PatId(pat) => pat_syntax(pat),
};
Some(match d {
@@ -616,7 +629,7 @@ impl AnyDiagnostic {
field_with_same_name: field_with_same_name
.clone()
.map(|ty| Type::new(db, def, ty)),
- assoc_func_with_same_name: *assoc_func_with_same_name,
+ assoc_func_with_same_name: assoc_func_with_same_name.map(Into::into),
}
.into()
}
@@ -627,7 +640,7 @@ impl AnyDiagnostic {
&InferenceDiagnostic::UnresolvedIdent { id } => {
let node = match id {
ExprOrPatId::ExprId(id) => match source_map.expr_syntax(id) {
- Ok(syntax) => syntax.map(|it| (it.wrap_left(), None)),
+ Ok(syntax) => syntax.map(|it| (it, None)),
Err(SyntheticSyntax) => source_map
.format_args_implicit_capture(id)?
.map(|(node, range)| (node.wrap_left(), Some(range))),
@@ -646,7 +659,7 @@ impl AnyDiagnostic {
}
&InferenceDiagnostic::MismatchedTupleStructPatArgCount { pat, expected, found } => {
let expr_or_pat = match pat {
- ExprOrPatId::ExprId(expr) => expr_syntax(expr)?.map(AstPtr::wrap_left),
+ ExprOrPatId::ExprId(expr) => expr_syntax(expr)?,
ExprOrPatId::PatId(pat) => {
let InFile { file_id, value } = pat_syntax(pat)?;
@@ -696,8 +709,8 @@ impl AnyDiagnostic {
diag: &PathLoweringDiagnostic,
path: InFile<ast::Path>,
) -> Option<AnyDiagnostic> {
- Some(match diag {
- &PathLoweringDiagnostic::GenericArgsProhibited { segment, reason } => {
+ Some(match *diag {
+ PathLoweringDiagnostic::GenericArgsProhibited { segment, reason } => {
let segment = hir_segment_to_ast_segment(&path.value, segment)?;
let args = if let Some(generics) = segment.generic_arg_list() {
AstPtr::new(&generics).wrap_left()
@@ -707,6 +720,12 @@ impl AnyDiagnostic {
let args = path.with_value(args);
GenericArgsProhibited { args, reason }.into()
}
+ PathLoweringDiagnostic::ParenthesizedGenericArgsWithoutFnTrait { segment } => {
+ let segment = hir_segment_to_ast_segment(&path.value, segment)?;
+ let args = AstPtr::new(&segment.parenthesized_arg_list()?);
+ let args = path.with_value(args);
+ ParenthesizedGenericArgsWithoutFnTrait { args }.into()
+ }
})
}
diff --git a/crates/hir/src/display.rs b/crates/hir/src/display.rs
index b29c91694d..6f4168ab08 100644
--- a/crates/hir/src/display.rs
+++ b/crates/hir/src/display.rs
@@ -80,7 +80,9 @@ impl HirDisplay for Function {
if data.is_async() {
f.write_str("async ")?;
}
- if self.is_unsafe_to_call(db) {
+ // FIXME: This will show `unsafe` for functions that are `#[target_feature]` but not unsafe
+ // (they are conditionally unsafe to call). We probably should show something else.
+ if self.is_unsafe_to_call(db, None, f.edition()) {
f.write_str("unsafe ")?;
}
if let Some(abi) = &data.abi {
diff --git a/crates/hir/src/from_id.rs b/crates/hir/src/from_id.rs
index 2ad39817b2..72df07ef8c 100644
--- a/crates/hir/src/from_id.rs
+++ b/crates/hir/src/from_id.rs
@@ -49,6 +49,7 @@ from_id![
(hir_def::LifetimeParamId, crate::LifetimeParam),
(hir_def::MacroId, crate::Macro),
(hir_def::ExternCrateId, crate::ExternCrateDecl),
+ (hir_def::ExternBlockId, crate::ExternBlock),
];
impl From<AdtId> for Adt {
@@ -183,6 +184,7 @@ impl From<GenericDef> for GenericDefId {
GenericDef::TypeAlias(it) => GenericDefId::TypeAliasId(it.id),
GenericDef::Impl(it) => GenericDefId::ImplId(it.id),
GenericDef::Const(it) => GenericDefId::ConstId(it.id),
+ GenericDef::Static(it) => GenericDefId::StaticId(it.id),
}
}
}
@@ -197,6 +199,7 @@ impl From<GenericDefId> for GenericDef {
GenericDefId::TypeAliasId(it) => GenericDef::TypeAlias(it.into()),
GenericDefId::ImplId(it) => GenericDef::Impl(it.into()),
GenericDefId::ConstId(it) => GenericDef::Const(it.into()),
+ GenericDefId::StaticId(it) => GenericDef::Static(it.into()),
}
}
}
diff --git a/crates/hir/src/has_source.rs b/crates/hir/src/has_source.rs
index 82c90ac301..a34b498083 100644
--- a/crates/hir/src/has_source.rs
+++ b/crates/hir/src/has_source.rs
@@ -248,7 +248,7 @@ impl HasSource for Param {
let ast @ InFile { file_id, value } = source_map.expr_syntax(expr_id).ok()?;
let root = db.parse_or_expand(file_id);
match value.to_node(&root) {
- ast::Expr::ClosureExpr(it) => it
+ Either::Left(ast::Expr::ClosureExpr(it)) => it
.param_list()?
.params()
.nth(self.idx)
@@ -301,7 +301,7 @@ impl HasSource for InlineAsmOperand {
let root = src.file_syntax(db.upcast());
return src
.map(|ast| match ast.to_node(&root) {
- ast::Expr::AsmExpr(asm) => asm
+ Either::Left(ast::Expr::AsmExpr(asm)) => asm
.asm_pieces()
.filter_map(|it| match it {
ast::AsmPiece::AsmOperandNamed(it) => Some(it),
diff --git a/crates/hir/src/lib.rs b/crates/hir/src/lib.rs
index 0cbc75726b..5923a1bc30 100644
--- a/crates/hir/src/lib.rs
+++ b/crates/hir/src/lib.rs
@@ -42,8 +42,8 @@ use arrayvec::ArrayVec;
use base_db::{CrateDisplayName, CrateId, CrateOrigin};
use either::Either;
use hir_def::{
- body::BodyDiagnostic,
data::{adt::VariantData, TraitFlags},
+ expr_store::ExpressionStoreDiagnostics,
generics::{LifetimeParamData, TypeOrConstParamData, TypeParamProvenance},
hir::{BindingAnnotation, BindingId, Expr, ExprId, ExprOrPatId, LabelId, Pat},
item_tree::{AttrOwner, FieldParent, ItemTreeFieldId, ItemTreeNode},
@@ -55,8 +55,8 @@ use hir_def::{
resolver::{HasResolver, Resolver},
type_ref::TypesSourceMap,
AdtId, AssocItemId, AssocItemLoc, AttrDefId, CallableDefId, ConstId, ConstParamId,
- CrateRootModuleId, DefWithBodyId, EnumId, EnumVariantId, ExternCrateId, FunctionId,
- GenericDefId, GenericParamId, HasModule, ImplId, InTypeConstId, ItemContainerId,
+ CrateRootModuleId, DefWithBodyId, EnumId, EnumVariantId, ExternBlockId, ExternCrateId,
+ FunctionId, GenericDefId, GenericParamId, HasModule, ImplId, InTypeConstId, ItemContainerId,
LifetimeParamId, LocalFieldId, Lookup, MacroExpander, MacroId, ModuleId, StaticId, StructId,
SyntheticSyntax, TraitAliasId, TupleId, TypeAliasId, TypeOrConstParamId, TypeParamId, UnionId,
};
@@ -1892,10 +1892,10 @@ impl DefWithBody {
for diag in source_map.diagnostics() {
acc.push(match diag {
- BodyDiagnostic::InactiveCode { node, cfg, opts } => {
+ ExpressionStoreDiagnostics::InactiveCode { node, cfg, opts } => {
InactiveCode { node: *node, cfg: cfg.clone(), opts: opts.clone() }.into()
}
- BodyDiagnostic::MacroError { node, err } => {
+ ExpressionStoreDiagnostics::MacroError { node, err } => {
let RenderedExpandError { message, error, kind } =
err.render_to_string(db.upcast());
@@ -1919,20 +1919,22 @@ impl DefWithBody {
}
.into()
}
- BodyDiagnostic::UnresolvedMacroCall { node, path } => UnresolvedMacroCall {
- macro_call: (*node).map(|ast_ptr| ast_ptr.into()),
- precise_location: None,
- path: path.clone(),
- is_bang: true,
+ ExpressionStoreDiagnostics::UnresolvedMacroCall { node, path } => {
+ UnresolvedMacroCall {
+ macro_call: (*node).map(|ast_ptr| ast_ptr.into()),
+ precise_location: None,
+ path: path.clone(),
+ is_bang: true,
+ }
+ .into()
}
- .into(),
- BodyDiagnostic::AwaitOutsideOfAsync { node, location } => {
+ ExpressionStoreDiagnostics::AwaitOutsideOfAsync { node, location } => {
AwaitOutsideOfAsync { node: *node, location: location.clone() }.into()
}
- BodyDiagnostic::UnreachableLabel { node, name } => {
+ ExpressionStoreDiagnostics::UnreachableLabel { node, name } => {
UnreachableLabel { node: *node, name: name.clone() }.into()
}
- BodyDiagnostic::UndeclaredLabel { node, name } => {
+ ExpressionStoreDiagnostics::UndeclaredLabel { node, name } => {
UndeclaredLabel { node: *node, name: name.clone() }.into()
}
});
@@ -1955,7 +1957,7 @@ impl DefWithBody {
ExprOrPatId::PatId(pat) => source_map.pat_syntax(pat).map(Either::Right),
};
let expr_or_pat = match expr_or_pat {
- Ok(Either::Left(expr)) => expr.map(AstPtr::wrap_left),
+ Ok(Either::Left(expr)) => expr,
Ok(Either::Right(InFile { file_id, value: pat })) => {
// cast from Either<Pat, SelfParam> -> Either<_, Pat>
let Some(ptr) = AstPtr::try_from_raw(pat.syntax_node_ptr()) else {
@@ -1976,16 +1978,40 @@ impl DefWithBody {
);
}
- let (unsafe_exprs, only_lint) = hir_ty::diagnostics::missing_unsafe(db, self.into());
- for (node, reason) in unsafe_exprs {
+ let missing_unsafe = hir_ty::diagnostics::missing_unsafe(db, self.into());
+ for (node, reason) in missing_unsafe.unsafe_exprs {
match source_map.expr_or_pat_syntax(node) {
- Ok(node) => acc.push(MissingUnsafe { node, only_lint, reason }.into()),
+ Ok(node) => acc.push(
+ MissingUnsafe {
+ node,
+ lint: if missing_unsafe.fn_is_unsafe {
+ UnsafeLint::UnsafeOpInUnsafeFn
+ } else {
+ UnsafeLint::HardError
+ },
+ reason,
+ }
+ .into(),
+ ),
Err(SyntheticSyntax) => {
// FIXME: Here and elsewhere in this file, the `expr` was
// desugared, report or assert that this doesn't happen.
}
}
}
+ for node in missing_unsafe.deprecated_safe_calls {
+ match source_map.expr_syntax(node) {
+ Ok(node) => acc.push(
+ MissingUnsafe {
+ node,
+ lint: UnsafeLint::DeprecatedSafe2024,
+ reason: UnsafetyReason::UnsafeFnCall,
+ }
+ .into(),
+ ),
+ Err(SyntheticSyntax) => never!("synthetic DeprecatedSafe2024"),
+ }
+ }
if let Ok(borrowck_results) = db.borrowck(self.into()) {
for borrowck_result in borrowck_results.iter() {
@@ -2301,6 +2327,13 @@ impl Function {
db.function_data(self.id).is_async()
}
+ pub fn extern_block(self, db: &dyn HirDatabase) -> Option<ExternBlock> {
+ match self.id.lookup(db.upcast()).container {
+ ItemContainerId::ExternBlockId(id) => Some(ExternBlock { id }),
+ _ => None,
+ }
+ }
+
pub fn returns_impl_future(self, db: &dyn HirDatabase) -> bool {
if self.is_async(db) {
return true;
@@ -2361,8 +2394,19 @@ impl Function {
db.attrs(self.id.into()).is_unstable()
}
- pub fn is_unsafe_to_call(self, db: &dyn HirDatabase) -> bool {
- hir_ty::is_fn_unsafe_to_call(db, self.id)
+ pub fn is_unsafe_to_call(
+ self,
+ db: &dyn HirDatabase,
+ caller: Option<Function>,
+ call_edition: Edition,
+ ) -> bool {
+ let target_features = caller
+ .map(|caller| hir_ty::TargetFeatures::from_attrs(&db.attrs(caller.id.into())))
+ .unwrap_or_default();
+ matches!(
+ hir_ty::is_fn_unsafe_to_call(db, self.id, &target_features, call_edition),
+ hir_ty::Unsafety::Unsafe
+ )
}
/// Whether this function declaration has a definition.
@@ -2724,6 +2768,13 @@ impl Static {
Type::from_value_def(db, self.id)
}
+ pub fn extern_block(self, db: &dyn HirDatabase) -> Option<ExternBlock> {
+ match self.id.lookup(db.upcast()).container {
+ ItemContainerId::ExternBlockId(id) => Some(ExternBlock { id }),
+ _ => None,
+ }
+ }
+
/// Evaluate the static initializer.
pub fn eval(self, db: &dyn HirDatabase) -> Result<EvaluatedConst, ConstEvalError> {
db.const_eval(self.id.into(), Substitution::empty(Interner), None)
@@ -2892,6 +2943,17 @@ impl HasVisibility for TypeAlias {
}
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
+pub struct ExternBlock {
+ pub(crate) id: ExternBlockId,
+}
+
+impl ExternBlock {
+ pub fn module(self, db: &dyn HirDatabase) -> Module {
+ Module { id: self.id.module(db.upcast()) }
+ }
+}
+
+#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub struct StaticLifetime;
impl StaticLifetime {
@@ -3453,6 +3515,7 @@ pub enum GenericDef {
Impl(Impl),
// consts can have type parameters from their parents (i.e. associated consts of traits)
Const(Const),
+ Static(Static),
}
impl_from!(
Function,
@@ -3461,7 +3524,8 @@ impl_from!(
TraitAlias,
TypeAlias,
Impl,
- Const
+ Const,
+ Static
for GenericDef
);
@@ -3511,6 +3575,7 @@ impl GenericDef {
GenericDef::TypeAlias(it) => it.id.into(),
GenericDef::Impl(it) => it.id.into(),
GenericDef::Const(it) => it.id.into(),
+ GenericDef::Static(it) => it.id.into(),
}
}
@@ -3568,6 +3633,7 @@ impl GenericDef {
item_tree_source_maps.impl_(id.value).generics()
}
GenericDefId::ConstId(_) => return,
+ GenericDefId::StaticId(_) => return,
},
};
@@ -4551,10 +4617,7 @@ impl CaptureUsages {
match span {
mir::MirSpan::ExprId(expr) => {
if let Ok(expr) = source_map.expr_syntax(expr) {
- result.push(CaptureUsageSource {
- is_ref,
- source: expr.map(AstPtr::wrap_left),
- })
+ result.push(CaptureUsageSource { is_ref, source: expr })
}
}
mir::MirSpan::PatId(pat) => {
@@ -4624,17 +4687,6 @@ impl Type {
Type { env: TraitEnvironment::empty(krate), ty }
}
- pub fn reference(inner: &Type, m: Mutability) -> Type {
- inner.derived(
- TyKind::Ref(
- if m.is_mut() { hir_ty::Mutability::Mut } else { hir_ty::Mutability::Not },
- hir_ty::error_lifetime(),
- inner.ty.clone(),
- )
- .intern(Interner),
- )
- }
-
fn new(db: &dyn HirDatabase, lexical_env: impl HasResolver, ty: Ty) -> Type {
let resolver = lexical_env.resolver(db.upcast());
let environment = resolver
@@ -4866,6 +4918,17 @@ impl Type {
self.normalize_trait_assoc_type(db, &[], iterator_item.into())
}
+ pub fn impls_iterator(self, db: &dyn HirDatabase) -> bool {
+ let Some(iterator_trait) =
+ db.lang_item(self.env.krate, LangItem::Iterator).and_then(|it| it.as_trait())
+ else {
+ return false;
+ };
+ let canonical_ty =
+ Canonical { value: self.ty.clone(), binders: CanonicalVarKinds::empty(Interner) };
+ method_resolution::implements_trait_unique(&canonical_ty, db, &self.env, iterator_trait)
+ }
+
/// Resolves the projection `<Self as IntoIterator>::IntoIter` and returns the resulting type
pub fn into_iterator_iter(self, db: &dyn HirDatabase) -> Option<Type> {
let trait_ = db.lang_item(self.env.krate, LangItem::IntoIterIntoIter).and_then(|it| {
@@ -6139,9 +6202,15 @@ impl HasContainer for TraitAlias {
}
}
+impl HasContainer for ExternBlock {
+ fn container(&self, db: &dyn HirDatabase) -> ItemContainer {
+ ItemContainer::Module(Module { id: self.id.lookup(db.upcast()).container })
+ }
+}
+
fn container_id_to_hir(c: ItemContainerId) -> ItemContainer {
match c {
- ItemContainerId::ExternBlockId(_id) => ItemContainer::ExternBlock(),
+ ItemContainerId::ExternBlockId(id) => ItemContainer::ExternBlock(ExternBlock { id }),
ItemContainerId::ModuleId(id) => ItemContainer::Module(Module { id }),
ItemContainerId::ImplId(id) => ItemContainer::Impl(Impl { id }),
ItemContainerId::TraitId(id) => ItemContainer::Trait(Trait { id }),
@@ -6153,7 +6222,7 @@ pub enum ItemContainer {
Trait(Trait),
Impl(Impl),
Module(Module),
- ExternBlock(),
+ ExternBlock(ExternBlock),
Crate(CrateId),
}
diff --git a/crates/hir/src/semantics.rs b/crates/hir/src/semantics.rs
index 09470bed9c..c9145f7d21 100644
--- a/crates/hir/src/semantics.rs
+++ b/crates/hir/src/semantics.rs
@@ -1998,6 +1998,7 @@ to_def_impls![
(crate::Adt, ast::Adt, adt_to_def),
(crate::ExternCrateDecl, ast::ExternCrate, extern_crate_to_def),
(crate::InlineAsmOperand, ast::AsmOperandNamed, asm_operand_to_def),
+ (crate::ExternBlock, ast::ExternBlock, extern_block_to_def),
(MacroCallId, ast::MacroCall, macro_call_to_macro_call),
];
@@ -2040,6 +2041,13 @@ impl SemanticsScope<'_> {
Crate { id: self.resolver.krate() }
}
+ pub fn containing_function(&self) -> Option<Function> {
+ self.resolver.body_owner().and_then(|owner| match owner {
+ DefWithBodyId::FunctionId(id) => Some(id.into()),
+ _ => None,
+ })
+ }
+
pub(crate) fn resolver(&self) -> &Resolver {
&self.resolver
}
diff --git a/crates/hir/src/semantics/child_by_source.rs b/crates/hir/src/semantics/child_by_source.rs
index d5dfb98571..d0fdf5cbdf 100644
--- a/crates/hir/src/semantics/child_by_source.rs
+++ b/crates/hir/src/semantics/child_by_source.rs
@@ -74,6 +74,9 @@ impl ChildBySource for ItemScope {
fn child_by_source_to(&self, db: &dyn DefDatabase, res: &mut DynMap, file_id: HirFileId) {
self.declarations().for_each(|item| add_module_def(db, res, file_id, item));
self.impls().for_each(|imp| insert_item_loc(db, res, file_id, imp, keys::IMPL));
+ self.extern_blocks().for_each(|extern_block| {
+ insert_item_loc(db, res, file_id, extern_block, keys::EXTERN_BLOCK)
+ });
self.extern_crate_decls()
.for_each(|ext| insert_item_loc(db, res, file_id, ext, keys::EXTERN_CRATE));
self.use_decls().for_each(|ext| insert_item_loc(db, res, file_id, ext, keys::USE));
diff --git a/crates/hir/src/semantics/source_to_def.rs b/crates/hir/src/semantics/source_to_def.rs
index 3c9e7065c4..4481b8855f 100644
--- a/crates/hir/src/semantics/source_to_def.rs
+++ b/crates/hir/src/semantics/source_to_def.rs
@@ -92,10 +92,10 @@ use hir_def::{
DynMap,
},
hir::{BindingId, Expr, LabelId},
- AdtId, BlockId, ConstId, ConstParamId, DefWithBodyId, EnumId, EnumVariantId, ExternCrateId,
- FieldId, FunctionId, GenericDefId, GenericParamId, ImplId, LifetimeParamId, Lookup, MacroId,
- ModuleId, StaticId, StructId, TraitAliasId, TraitId, TypeAliasId, TypeParamId, UnionId, UseId,
- VariantId,
+ AdtId, BlockId, ConstId, ConstParamId, DefWithBodyId, EnumId, EnumVariantId, ExternBlockId,
+ ExternCrateId, FieldId, FunctionId, GenericDefId, GenericParamId, ImplId, LifetimeParamId,
+ Lookup, MacroId, ModuleId, StaticId, StructId, TraitAliasId, TraitId, TypeAliasId, TypeParamId,
+ UnionId, UseId, VariantId,
};
use hir_expand::{
attrs::AttrId, name::AsName, ExpansionInfo, HirFileId, HirFileIdExt, InMacroFile, MacroCallId,
@@ -308,6 +308,12 @@ impl SourceToDefCtx<'_, '_> {
) -> Option<ExternCrateId> {
self.to_def(src, keys::EXTERN_CRATE)
}
+ pub(super) fn extern_block_to_def(
+ &mut self,
+ src: InFile<&ast::ExternBlock>,
+ ) -> Option<ExternBlockId> {
+ self.to_def(src, keys::EXTERN_BLOCK)
+ }
#[allow(dead_code)]
pub(super) fn use_to_def(&mut self, src: InFile<&ast::Use>) -> Option<UseId> {
self.to_def(src, keys::USE)
@@ -352,7 +358,7 @@ impl SourceToDefCtx<'_, '_> {
let src = src.cloned().map(ast::Pat::from);
let pat_id = source_map.node_pat(src.as_ref())?;
// the pattern could resolve to a constant, verify that this is not the case
- if let crate::Pat::Bind { id, .. } = body[pat_id] {
+ if let crate::Pat::Bind { id, .. } = body[pat_id.as_pat()?] {
Some((container, id))
} else {
None
diff --git a/crates/hir/src/source_analyzer.rs b/crates/hir/src/source_analyzer.rs
index b699ccde41..9019863f7f 100644
--- a/crates/hir/src/source_analyzer.rs
+++ b/crates/hir/src/source_analyzer.rs
@@ -14,11 +14,11 @@ use crate::{
};
use either::Either;
use hir_def::{
- body::{
+ expr_store::{
scope::{ExprScopes, ScopeId},
Body, BodySourceMap, HygieneId,
},
- hir::{BindingId, Expr, ExprId, ExprOrPatId, Pat, PatId},
+ hir::{BindingId, Expr, ExprId, ExprOrPatId, Pat},
lang_item::LangItem,
lower::LowerCtx,
nameres::MacroSubNs,
@@ -139,7 +139,7 @@ impl SourceAnalyzer {
sm.node_expr(src.as_ref())
}
- fn pat_id(&self, pat: &ast::Pat) -> Option<PatId> {
+ fn pat_id(&self, pat: &ast::Pat) -> Option<ExprOrPatId> {
// FIXME: macros, see `expr_id`
let src = InFile { file_id: self.file_id, value: pat };
self.body_source_map()?.node_pat(src)
@@ -147,7 +147,7 @@ impl SourceAnalyzer {
fn binding_id_of_pat(&self, pat: &ast::IdentPat) -> Option<BindingId> {
let pat_id = self.pat_id(&pat.clone().into())?;
- if let Pat::Bind { id, .. } = self.body()?.pats[pat_id] {
+ if let Pat::Bind { id, .. } = self.body()?.pats[pat_id.as_pat()?] {
Some(id)
} else {
None
@@ -210,11 +210,20 @@ impl SourceAnalyzer {
db: &dyn HirDatabase,
pat: &ast::Pat,
) -> Option<(Type, Option<Type>)> {
- let pat_id = self.pat_id(pat)?;
+ let expr_or_pat_id = self.pat_id(pat)?;
let infer = self.infer.as_ref()?;
- let coerced =
- infer.pat_adjustments.get(&pat_id).and_then(|adjusts| adjusts.last().cloned());
- let ty = infer[pat_id].clone();
+ let coerced = match expr_or_pat_id {
+ ExprOrPatId::ExprId(idx) => infer
+ .expr_adjustments
+ .get(&idx)
+ .and_then(|adjusts| adjusts.last().cloned())
+ .map(|adjust| adjust.target),
+ ExprOrPatId::PatId(idx) => {
+ infer.pat_adjustments.get(&idx).and_then(|adjusts| adjusts.last().cloned())
+ }
+ };
+
+ let ty = infer[expr_or_pat_id].clone();
let mk_ty = |ty| Type::new_with_resolver(db, &self.resolver, ty);
Some((mk_ty(ty), coerced.map(mk_ty)))
}
@@ -248,7 +257,7 @@ impl SourceAnalyzer {
) -> Option<BindingMode> {
let id = self.pat_id(&pat.clone().into())?;
let infer = self.infer.as_ref()?;
- infer.binding_modes.get(id).map(|bm| match bm {
+ infer.binding_modes.get(id.as_pat()?).map(|bm| match bm {
hir_ty::BindingMode::Move => BindingMode::Move,
hir_ty::BindingMode::Ref(hir_ty::Mutability::Mut) => BindingMode::Ref(Mutability::Mut),
hir_ty::BindingMode::Ref(hir_ty::Mutability::Not) => {
@@ -266,7 +275,7 @@ impl SourceAnalyzer {
Some(
infer
.pat_adjustments
- .get(&pat_id)?
+ .get(&pat_id.as_pat()?)?
.iter()
.map(|ty| Type::new_with_resolver(db, &self.resolver, ty.clone()))
.collect(),
@@ -649,10 +658,10 @@ impl SourceAnalyzer {
let field_name = field.field_name()?.as_name();
let record_pat = ast::RecordPat::cast(field.syntax().parent().and_then(|p| p.parent())?)?;
let pat_id = self.pat_id(&record_pat.into())?;
- let variant = self.infer.as_ref()?.variant_resolution_for_pat(pat_id)?;
+ let variant = self.infer.as_ref()?.variant_resolution_for_pat(pat_id.as_pat()?)?;
let variant_data = variant.variant_data(db.upcast());
let field = FieldId { parent: variant, local_id: variant_data.field(&field_name)? };
- let (adt, subst) = self.infer.as_ref()?.type_of_pat.get(pat_id)?.as_adt()?;
+ let (adt, subst) = self.infer.as_ref()?.type_of_pat.get(pat_id.as_pat()?)?.as_adt()?;
let field_ty =
db.field_types(variant).get(field.local_id)?.clone().substitute(Interner, subst);
Some((
@@ -682,12 +691,20 @@ impl SourceAnalyzer {
db: &dyn HirDatabase,
pat: &ast::IdentPat,
) -> Option<ModuleDef> {
- let pat_id = self.pat_id(&pat.clone().into())?;
+ let expr_or_pat_id = self.pat_id(&pat.clone().into())?;
let body = self.body()?;
- let path = match &body[pat_id] {
- Pat::Path(path) => path,
- _ => return None,
+
+ let path = match expr_or_pat_id {
+ ExprOrPatId::ExprId(idx) => match &body[idx] {
+ Expr::Path(path) => path,
+ _ => return None,
+ },
+ ExprOrPatId::PatId(idx) => match &body[idx] {
+ Pat::Path(path) => path,
+ _ => return None,
+ },
};
+
let res = resolve_hir_path(db, &self.resolver, path, HygieneId::ROOT, TypesMap::EMPTY)?;
match res {
PathResolution::Def(def) => Some(def),
@@ -782,8 +799,9 @@ impl SourceAnalyzer {
}
prefer_value_ns = true;
} else if let Some(path_pat) = parent().and_then(ast::PathPat::cast) {
- let pat_id = self.pat_id(&path_pat.into())?;
- if let Some((assoc, subs)) = infer.assoc_resolutions_for_pat(pat_id) {
+ let expr_or_pat_id = self.pat_id(&path_pat.into())?;
+ if let Some((assoc, subs)) = infer.assoc_resolutions_for_expr_or_pat(expr_or_pat_id)
+ {
let (assoc, subst) = match assoc {
AssocItemId::ConstId(const_id) => {
let (konst, subst) =
@@ -807,7 +825,7 @@ impl SourceAnalyzer {
return Some((PathResolution::Def(AssocItem::from(assoc).into()), Some(subst)));
}
if let Some(VariantId::EnumVariantId(variant)) =
- infer.variant_resolution_for_pat(pat_id)
+ infer.variant_resolution_for_expr_or_pat(expr_or_pat_id)
{
return Some((PathResolution::Def(ModuleDef::Variant(variant.into())), None));
}
@@ -824,7 +842,7 @@ impl SourceAnalyzer {
|| parent().and_then(ast::TupleStructPat::cast).map(ast::Pat::from);
if let Some(pat) = record_pat.or_else(tuple_struct_pat) {
let pat_id = self.pat_id(&pat)?;
- let variant_res_for_pat = infer.variant_resolution_for_pat(pat_id);
+ let variant_res_for_pat = infer.variant_resolution_for_pat(pat_id.as_pat()?);
if let Some(VariantId::EnumVariantId(variant)) = variant_res_for_pat {
return Some((
PathResolution::Def(ModuleDef::Variant(variant.into())),
@@ -866,7 +884,8 @@ impl SourceAnalyzer {
// Case where path is a qualifier of another path, e.g. foo::bar::Baz where we are
// trying to resolve foo::bar.
- if path.parent_path().is_some() {
+ if let Some(parent_path) = path.parent_path() {
+ let parent_hir_path = Path::from_src(&mut ctx, parent_path);
return match resolve_hir_path_qualifier(db, &self.resolver, &hir_path, &types_map) {
None if meta_path.is_some() => path
.first_segment()
@@ -876,6 +895,42 @@ impl SourceAnalyzer {
.map(PathResolution::ToolModule)
})
.map(|it| (it, None)),
+ // Case the type name conflict with use module,
+ // e.g.
+ // ```
+ // use std::str;
+ // fn main() {
+ // str::from_utf8(); // as module std::str
+ // str::len(); // as primitive type str
+ // str::no_exist_item(); // as primitive type str
+ // }
+ // ```
+ Some(it) if matches!(it, PathResolution::Def(ModuleDef::BuiltinType(_))) => {
+ if let (Some(mod_path), Some(parent_hir_path)) =
+ (hir_path.mod_path(), parent_hir_path)
+ {
+ if let Some(ModuleDefId::ModuleId(id)) = self
+ .resolver
+ .resolve_module_path_in_items(db.upcast(), mod_path)
+ .take_types()
+ {
+ let parent_hir_name =
+ parent_hir_path.segments().get(1).map(|it| it.name);
+ let module = crate::Module { id };
+ if module
+ .scope(db, None)
+ .into_iter()
+ .any(|(name, _)| Some(&name) == parent_hir_name)
+ {
+ return Some((
+ PathResolution::Def(ModuleDef::Module(module)),
+ None,
+ ));
+ };
+ }
+ }
+ Some((it, None))
+ }
// FIXME: We do not show substitutions for parts of path, because this is really complex
// due to the interactions with associated items of `impl`s and associated items of associated
// types.
@@ -1043,7 +1098,7 @@ impl SourceAnalyzer {
let body = self.body()?;
let infer = self.infer.as_ref()?;
- let pat_id = self.pat_id(&pattern.clone().into())?;
+ let pat_id = self.pat_id(&pattern.clone().into())?.as_pat()?;
let substs = infer.type_of_pat[pat_id].as_adt()?.1;
let (variant, missing_fields, _exhaustive) =
@@ -1105,16 +1160,9 @@ impl SourceAnalyzer {
if let Some(expanded_expr) = sm.macro_expansion_expr(macro_expr) {
let mut is_unsafe = false;
let mut walk_expr = |expr_id| {
- unsafe_expressions(
- db,
- infer,
- *def,
- body,
- expr_id,
- &mut |_, inside_unsafe_block, _| {
- is_unsafe |= inside_unsafe_block == InsideUnsafeBlock::No
- },
- )
+ unsafe_expressions(db, infer, *def, body, expr_id, &mut |inside_unsafe_block| {
+ is_unsafe |= inside_unsafe_block == InsideUnsafeBlock::No
+ })
};
match expanded_expr {
ExprOrPatId::ExprId(expanded_expr) => walk_expr(expanded_expr),
@@ -1259,7 +1307,11 @@ fn scope_for(
node: InFile<&SyntaxNode>,
) -> Option<ScopeId> {
node.ancestors_with_macros(db.upcast())
- .take_while(|it| !ast::Item::can_cast(it.kind()) || ast::MacroCall::can_cast(it.kind()))
+ .take_while(|it| {
+ !ast::Item::can_cast(it.kind())
+ || ast::MacroCall::can_cast(it.kind())
+ || ast::Use::can_cast(it.kind())
+ })
.filter_map(|it| it.map(ast::Expr::cast).transpose())
.filter_map(|it| source_map.node_expr(it.as_ref())?.as_expr())
.find_map(|it| scopes.scope_for(it))
diff --git a/crates/hir/src/term_search.rs b/crates/hir/src/term_search.rs
index 6f84513708..af72179305 100644
--- a/crates/hir/src/term_search.rs
+++ b/crates/hir/src/term_search.rs
@@ -145,7 +145,7 @@ impl LookupTable {
self.data
.iter()
.find(|(t, _)| {
- Type::reference(t, Mutability::Shared).could_unify_with_deeply(db, ty)
+ t.add_reference(Mutability::Shared).could_unify_with_deeply(db, ty)
})
.map(|(t, it)| {
it.exprs(t)
diff --git a/crates/hir/src/term_search/tactics.rs b/crates/hir/src/term_search/tactics.rs
index 1b0e6f8bd5..847304d503 100644
--- a/crates/hir/src/term_search/tactics.rs
+++ b/crates/hir/src/term_search/tactics.rs
@@ -15,6 +15,7 @@ use hir_ty::mir::BorrowKind;
use hir_ty::TyBuilder;
use itertools::Itertools;
use rustc_hash::FxHashSet;
+use span::Edition;
use crate::{
Adt, AssocItem, GenericDef, GenericParam, HasAttrs, HasVisibility, Impl, ModuleDef, ScopeDef,
@@ -365,7 +366,7 @@ pub(super) fn free_function<'a, DB: HirDatabase>(
let ret_ty = it.ret_type_with_args(db, generics.iter().cloned());
// Filter out private and unsafe functions
if !it.is_visible_from(db, module)
- || it.is_unsafe_to_call(db)
+ || it.is_unsafe_to_call(db, None, Edition::CURRENT_FIXME)
|| it.is_unstable(db)
|| ctx.config.enable_borrowcheck && ret_ty.contains_reference(db)
|| ret_ty.is_raw_ptr()
@@ -470,7 +471,10 @@ pub(super) fn impl_method<'a, DB: HirDatabase>(
}
// Filter out private and unsafe functions
- if !it.is_visible_from(db, module) || it.is_unsafe_to_call(db) || it.is_unstable(db) {
+ if !it.is_visible_from(db, module)
+ || it.is_unsafe_to_call(db, None, Edition::CURRENT_FIXME)
+ || it.is_unstable(db)
+ {
return None;
}
@@ -658,7 +662,10 @@ pub(super) fn impl_static_method<'a, DB: HirDatabase>(
}
// Filter out private and unsafe functions
- if !it.is_visible_from(db, module) || it.is_unsafe_to_call(db) || it.is_unstable(db) {
+ if !it.is_visible_from(db, module)
+ || it.is_unsafe_to_call(db, None, Edition::CURRENT_FIXME)
+ || it.is_unstable(db)
+ {
return None;
}
diff --git a/crates/ide-assists/src/assist_config.rs b/crates/ide-assists/src/assist_config.rs
index fb533077d9..05105c8c92 100644
--- a/crates/ide-assists/src/assist_config.rs
+++ b/crates/ide-assists/src/assist_config.rs
@@ -20,6 +20,7 @@ pub struct AssistConfig {
pub assist_emit_must_use: bool,
pub term_search_fuel: u64,
pub term_search_borrowck: bool,
+ pub code_action_grouping: bool,
}
impl AssistConfig {
diff --git a/crates/ide-assists/src/handlers/add_missing_match_arms.rs b/crates/ide-assists/src/handlers/add_missing_match_arms.rs
index 5899ec5a00..4a9e2256e9 100644
--- a/crates/ide-assists/src/handlers/add_missing_match_arms.rs
+++ b/crates/ide-assists/src/handlers/add_missing_match_arms.rs
@@ -6,7 +6,9 @@ use ide_db::syntax_helpers::suggest_name;
use ide_db::RootDatabase;
use ide_db::{famous_defs::FamousDefs, helpers::mod_path_to_ast};
use itertools::Itertools;
-use syntax::ast::edit_in_place::Removable;
+use syntax::ast::edit::IndentLevel;
+use syntax::ast::edit_in_place::Indent;
+use syntax::ast::syntax_factory::SyntaxFactory;
use syntax::ast::{self, make, AstNode, MatchArmList, MatchExpr, Pat};
use crate::{utils, AssistContext, AssistId, AssistKind, Assists};
@@ -200,8 +202,8 @@ pub(crate) fn add_missing_match_arms(acc: &mut Assists, ctx: &AssistContext<'_>)
AssistId("add_missing_match_arms", AssistKind::QuickFix),
"Fill match arms",
ctx.sema.original_range(match_expr.syntax()).range,
- |edit| {
- let new_match_arm_list = match_arm_list.clone_for_update();
+ |builder| {
+ let make = SyntaxFactory::new();
// having any hidden variants means that we need a catch-all arm
needs_catch_all_arm |= has_hidden_variants;
@@ -211,89 +213,85 @@ pub(crate) fn add_missing_match_arms(acc: &mut Assists, ctx: &AssistContext<'_>)
// filter out hidden patterns because they're handled by the catch-all arm
!hidden
})
- .map(|(pat, _)| {
- make::match_arm(pat, None, make::ext::expr_todo()).clone_for_update()
- });
+ .map(|(pat, _)| make.match_arm(pat, None, make::ext::expr_todo()));
- let catch_all_arm = new_match_arm_list
+ let mut arms: Vec<_> = match_arm_list
.arms()
- .find(|arm| matches!(arm.pat(), Some(ast::Pat::WildcardPat(_))));
- if let Some(arm) = catch_all_arm {
- let is_empty_expr = arm.expr().is_none_or(|e| match e {
- ast::Expr::BlockExpr(b) => {
- b.statements().next().is_none() && b.tail_expr().is_none()
+ .filter(|arm| {
+ if matches!(arm.pat(), Some(ast::Pat::WildcardPat(_))) {
+ let is_empty_expr = arm.expr().is_none_or(|e| match e {
+ ast::Expr::BlockExpr(b) => {
+ b.statements().next().is_none() && b.tail_expr().is_none()
+ }
+ ast::Expr::TupleExpr(t) => t.fields().next().is_none(),
+ _ => false,
+ });
+ if is_empty_expr {
+ false
+ } else {
+ cov_mark::hit!(add_missing_match_arms_empty_expr);
+ true
+ }
+ } else {
+ true
}
- ast::Expr::TupleExpr(t) => t.fields().next().is_none(),
- _ => false,
- });
- if is_empty_expr {
- arm.remove();
- } else {
- cov_mark::hit!(add_missing_match_arms_empty_expr);
- }
- }
+ })
+ .collect();
- let mut added_arms = Vec::new();
- let mut todo_placeholders = Vec::new();
- for arm in missing_arms {
- todo_placeholders.push(arm.expr().unwrap());
- added_arms.push(arm);
- }
+ let first_new_arm_idx = arms.len();
+ arms.extend(missing_arms);
if needs_catch_all_arm && !has_catch_all_arm {
cov_mark::hit!(added_wildcard_pattern);
- let arm =
- make::match_arm(make::wildcard_pat().into(), None, make::ext::expr_todo())
- .clone_for_update();
- todo_placeholders.push(arm.expr().unwrap());
- added_arms.push(arm);
- }
-
- let first_new_arm = added_arms.first().cloned();
- let last_new_arm = added_arms.last().cloned();
-
- for arm in added_arms {
- new_match_arm_list.add_arm(arm);
+ let arm = make.match_arm(make::wildcard_pat().into(), None, make::ext::expr_todo());
+ arms.push(arm);
}
- if let Some(cap) = ctx.config.snippet_cap {
- if let Some(it) = first_new_arm
- .and_then(|arm| arm.syntax().descendants().find_map(ast::WildcardPat::cast))
- {
- edit.add_placeholder_snippet(cap, it);
- }
-
- for placeholder in todo_placeholders {
- edit.add_placeholder_snippet(cap, placeholder);
- }
-
- if let Some(arm) = last_new_arm {
- edit.add_tabstop_after(cap, arm);
- }
- }
+ let new_match_arm_list = make.match_arm_list(arms);
- // FIXME: Hack for mutable syntax trees not having great support for macros
+ // FIXME: Hack for syntax trees not having great support for macros
// Just replace the element that the original range came from
let old_place = {
// Find the original element
let file = ctx.sema.parse(arm_list_range.file_id);
let old_place = file.syntax().covering_element(arm_list_range.range);
- // Make `old_place` mut
match old_place {
- syntax::SyntaxElement::Node(it) => {
- syntax::SyntaxElement::from(edit.make_syntax_mut(it))
- }
+ syntax::SyntaxElement::Node(it) => it,
syntax::SyntaxElement::Token(it) => {
// If a token is found, it is '{' or '}'
// The parent is `{ ... }`
- let parent = it.parent().expect("Token must have a parent.");
- syntax::SyntaxElement::from(edit.make_syntax_mut(parent))
+ it.parent().expect("Token must have a parent.")
}
}
};
- syntax::ted::replace(old_place, new_match_arm_list.syntax());
+ let mut editor = builder.make_editor(&old_place);
+ new_match_arm_list.indent(IndentLevel::from_node(&old_place));
+ editor.replace(old_place, new_match_arm_list.syntax());
+
+ if let Some(cap) = ctx.config.snippet_cap {
+ if let Some(it) = new_match_arm_list
+ .arms()
+ .nth(first_new_arm_idx)
+ .and_then(|arm| arm.syntax().descendants().find_map(ast::WildcardPat::cast))
+ {
+ editor.add_annotation(it.syntax(), builder.make_placeholder_snippet(cap));
+ }
+
+ for arm in new_match_arm_list.arms().skip(first_new_arm_idx) {
+ if let Some(expr) = arm.expr() {
+ editor.add_annotation(expr.syntax(), builder.make_placeholder_snippet(cap));
+ }
+ }
+
+ if let Some(arm) = new_match_arm_list.arms().skip(first_new_arm_idx).last() {
+ editor.add_annotation(arm.syntax(), builder.make_tabstop_after(cap));
+ }
+ }
+
+ editor.add_mappings(make.finish_with_mappings());
+ builder.add_file_edits(ctx.file_id(), editor);
},
)
}
@@ -1377,6 +1375,9 @@ fn main() {
);
}
+ // FIXME: Preserving comments is quite hard in the current transitional syntax editing model.
+ // Once we migrate to new trivia model addressed in #6854, remove the ignore attribute.
+ #[ignore]
#[test]
fn add_missing_match_arms_preserves_comments() {
check_assist(
@@ -1405,6 +1406,9 @@ fn foo(a: A) {
);
}
+ // FIXME: Preserving comments is quite hard in the current transitional syntax editing model.
+ // Once we migrate to new trivia model addressed in #6854, remove the ignore attribute.
+ #[ignore]
#[test]
fn add_missing_match_arms_preserves_comments_empty() {
check_assist(
@@ -1502,10 +1506,10 @@ enum Test {
fn foo(t: Test) {
m!(match t {
- Test::A => ${1:todo!()},
- Test::B => ${2:todo!()},
- Test::C => ${3:todo!()},$0
-});
+ Test::A => ${1:todo!()},
+ Test::B => ${2:todo!()},
+ Test::C => ${3:todo!()},$0
+ });
}"#,
);
}
diff --git a/crates/ide-assists/src/handlers/apply_demorgan.rs b/crates/ide-assists/src/handlers/apply_demorgan.rs
index 70fb568005..491727a30a 100644
--- a/crates/ide-assists/src/handlers/apply_demorgan.rs
+++ b/crates/ide-assists/src/handlers/apply_demorgan.rs
@@ -15,7 +15,7 @@ use crate::{utils::invert_boolean_expression, AssistContext, AssistId, AssistKin
// Assist: apply_demorgan
//
-// Apply https://en.wikipedia.org/wiki/De_Morgan%27s_laws[De Morgan's law].
+// Apply [De Morgan's law](https://en.wikipedia.org/wiki/De_Morgan%27s_laws).
// This transforms expressions of the form `!l || !r` into `!(l && r)`.
// This also works with `&&`. This assist can only be applied with the cursor
// on either `||` or `&&`.
@@ -131,7 +131,7 @@ pub(crate) fn apply_demorgan(acc: &mut Assists, ctx: &AssistContext<'_>) -> Opti
// Assist: apply_demorgan_iterator
//
-// Apply https://en.wikipedia.org/wiki/De_Morgan%27s_laws[De Morgan's law] to
+// Apply [De Morgan's law](https://en.wikipedia.org/wiki/De_Morgan%27s_laws) to
// `Iterator::all` and `Iterator::any`.
//
// This transforms expressions of the form `!iter.any(|x| predicate(x))` into
diff --git a/crates/ide-assists/src/handlers/auto_import.rs b/crates/ide-assists/src/handlers/auto_import.rs
index d86948818b..a92a000c3f 100644
--- a/crates/ide-assists/src/handlers/auto_import.rs
+++ b/crates/ide-assists/src/handlers/auto_import.rs
@@ -38,7 +38,7 @@ use crate::{AssistContext, AssistId, AssistKind, Assists, GroupLabel};
// use super::AssistContext;
// ```
//
-// .Import Granularity
+// #### Import Granularity
//
// It is possible to configure how use-trees are merged with the `imports.granularity.group` setting.
// It has the following configurations:
@@ -54,7 +54,7 @@ use crate::{AssistContext, AssistId, AssistKind, Assists, GroupLabel};
//
// In `VS Code` the configuration for this is `rust-analyzer.imports.granularity.group`.
//
-// .Import Prefix
+// #### Import Prefix
//
// The style of imports in the same crate is configurable through the `imports.prefix` setting.
// It has the following configurations:
@@ -68,7 +68,7 @@ use crate::{AssistContext, AssistId, AssistKind, Assists, GroupLabel};
//
// In `VS Code` the configuration for this is `rust-analyzer.imports.prefix`.
//
-// image::https://user-images.githubusercontent.com/48062697/113020673-b85be580-917a-11eb-9022-59585f35d4f8.gif[]
+// ![Auto Import](https://user-images.githubusercontent.com/48062697/113020673-b85be580-917a-11eb-9022-59585f35d4f8.gif)
// Assist: auto_import
//
diff --git a/crates/ide-assists/src/handlers/convert_tuple_struct_to_named_struct.rs b/crates/ide-assists/src/handlers/convert_tuple_struct_to_named_struct.rs
index 3c84f83906..f6e516db88 100644
--- a/crates/ide-assists/src/handlers/convert_tuple_struct_to_named_struct.rs
+++ b/crates/ide-assists/src/handlers/convert_tuple_struct_to_named_struct.rs
@@ -140,8 +140,10 @@ fn edit_struct_references(
match_ast! {
match node {
ast::TupleStructPat(tuple_struct_pat) => {
+ let file_range = ctx.sema.original_range_opt(&node)?;
+ edit.edit_file(file_range.file_id);
edit.replace(
- tuple_struct_pat.syntax().text_range(),
+ file_range.range,
ast::make::record_pat_with_fields(
tuple_struct_pat.path()?,
ast::make::record_pat_field_list(tuple_struct_pat.fields().zip(names).map(
@@ -924,4 +926,102 @@ pub struct Foo { #[my_custom_attr] field1: u32 }
"#,
);
}
+
+ #[test]
+ fn convert_in_macro_pattern_args() {
+ check_assist(
+ convert_tuple_struct_to_named_struct,
+ r#"
+macro_rules! foo {
+ ($expression:expr, $pattern:pat) => {
+ match $expression {
+ $pattern => true,
+ _ => false
+ }
+ };
+}
+enum Expr {
+ A$0(usize),
+}
+fn main() {
+ let e = Expr::A(0);
+ foo!(e, Expr::A(0));
+}
+"#,
+ r#"
+macro_rules! foo {
+ ($expression:expr, $pattern:pat) => {
+ match $expression {
+ $pattern => true,
+ _ => false
+ }
+ };
+}
+enum Expr {
+ A { field1: usize },
+}
+fn main() {
+ let e = Expr::A { field1: 0 };
+ foo!(e, Expr::A { field1: 0 });
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn convert_in_multi_file_macro_pattern_args() {
+ check_assist(
+ convert_tuple_struct_to_named_struct,
+ r#"
+//- /main.rs
+mod foo;
+
+enum Test {
+ A$0(i32)
+}
+
+//- /foo.rs
+use crate::Test;
+
+macro_rules! foo {
+ ($expression:expr, $pattern:pat) => {
+ match $expression {
+ $pattern => true,
+ _ => false
+ }
+ };
+}
+
+fn foo() {
+ let a = Test::A(0);
+ foo!(a, Test::A(0));
+}
+"#,
+ r#"
+//- /main.rs
+mod foo;
+
+enum Test {
+ A { field1: i32 }
+}
+
+//- /foo.rs
+use crate::Test;
+
+macro_rules! foo {
+ ($expression:expr, $pattern:pat) => {
+ match $expression {
+ $pattern => true,
+ _ => false
+ }
+ };
+}
+
+fn foo() {
+ let a = Test::A { field1: 0 };
+ foo!(a, Test::A { field1: 0 });
+}
+"#,
+ );
+ }
}
diff --git a/crates/ide-assists/src/handlers/expand_glob_import.rs b/crates/ide-assists/src/handlers/expand_glob_import.rs
index 094fdc46eb..0b95d6177f 100644
--- a/crates/ide-assists/src/handlers/expand_glob_import.rs
+++ b/crates/ide-assists/src/handlers/expand_glob_import.rs
@@ -3,10 +3,11 @@ use hir::{AssocItem, Enum, HasVisibility, Module, ModuleDef, Name, PathResolutio
use ide_db::{
defs::{Definition, NameRefClass},
search::SearchScope,
+ source_change::SourceChangeBuilder,
};
use stdx::never;
use syntax::{
- ast::{self, make},
+ ast::{self, make, Use, UseTree, VisibilityKind},
ted, AstNode, Direction, SyntaxNode, SyntaxToken, T,
};
@@ -43,6 +44,7 @@ use crate::{
pub(crate) fn expand_glob_import(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
let star = ctx.find_token_syntax_at_offset(T![*])?;
let use_tree = star.parent().and_then(ast::UseTree::cast)?;
+ let use_item = star.parent_ancestors().find_map(ast::Use::cast)?;
let (parent, mod_path) = find_parent_and_path(&star)?;
let target_module = match ctx.sema.resolve_path(&mod_path)? {
PathResolution::Def(ModuleDef::Module(it)) => Expandable::Module(it),
@@ -53,8 +55,9 @@ pub(crate) fn expand_glob_import(acc: &mut Assists, ctx: &AssistContext<'_>) ->
let current_scope = ctx.sema.scope(&star.parent()?)?;
let current_module = current_scope.module();
- let refs_in_target = find_refs_in_mod(ctx, target_module, current_module)?;
- let imported_defs = find_imported_defs(ctx, star)?;
+ if !is_visible_from(ctx, &target_module, current_module) {
+ return None;
+ }
let target = parent.either(|n| n.syntax().clone(), |n| n.syntax().clone());
acc.add(
@@ -62,37 +65,149 @@ pub(crate) fn expand_glob_import(acc: &mut Assists, ctx: &AssistContext<'_>) ->
"Expand glob import",
target.text_range(),
|builder| {
- let use_tree = builder.make_mut(use_tree);
-
- let names_to_import = find_names_to_import(ctx, refs_in_target, imported_defs);
- let expanded = make::use_tree_list(names_to_import.iter().map(|n| {
- let path = make::ext::ident_path(
- &n.display(ctx.db(), current_module.krate().edition(ctx.db())).to_string(),
- );
- make::use_tree(path, None, None, false)
- }))
- .clone_for_update();
-
- match use_tree.star_token() {
- Some(star) => {
- let needs_braces = use_tree.path().is_some() && names_to_import.len() != 1;
- if needs_braces {
- ted::replace(star, expanded.syntax())
- } else {
- let without_braces = expanded
- .syntax()
- .children_with_tokens()
- .filter(|child| !matches!(child.kind(), T!['{'] | T!['}']))
- .collect();
- ted::replace_with_many(star, without_braces)
- }
- }
- None => never!(),
- }
+ build_expanded_import(
+ ctx,
+ builder,
+ use_tree,
+ use_item,
+ target_module,
+ current_module,
+ false,
+ )
+ },
+ )
+}
+
+// Assist: expand_glob_reexport
+//
+// Expands non-private glob imports.
+//
+// ```
+// mod foo {
+// pub struct Bar;
+// pub struct Baz;
+// }
+//
+// pub use foo::*$0;
+// ```
+// ->
+// ```
+// mod foo {
+// pub struct Bar;
+// pub struct Baz;
+// }
+//
+// pub use foo::{Bar, Baz};
+// ```
+pub(crate) fn expand_glob_reexport(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
+ let star = ctx.find_token_syntax_at_offset(T![*])?;
+ let use_tree = star.parent().and_then(ast::UseTree::cast)?;
+ let use_item = star.parent_ancestors().find_map(ast::Use::cast)?;
+ let (parent, mod_path) = find_parent_and_path(&star)?;
+ let target_module = match ctx.sema.resolve_path(&mod_path)? {
+ PathResolution::Def(ModuleDef::Module(it)) => Expandable::Module(it),
+ PathResolution::Def(ModuleDef::Adt(hir::Adt::Enum(e))) => Expandable::Enum(e),
+ _ => return None,
+ };
+
+ let current_scope = ctx.sema.scope(&star.parent()?)?;
+ let current_module = current_scope.module();
+
+ if let VisibilityKind::PubSelf = get_export_visibility_kind(&use_item) {
+ return None;
+ }
+ if !is_visible_from(ctx, &target_module, current_module) {
+ return None;
+ }
+
+ let target = parent.either(|n| n.syntax().clone(), |n| n.syntax().clone());
+ acc.add(
+ AssistId("expand_glob_reexport", AssistKind::RefactorRewrite),
+ "Expand glob reexport",
+ target.text_range(),
+ |builder| {
+ build_expanded_import(
+ ctx,
+ builder,
+ use_tree,
+ use_item,
+ target_module,
+ current_module,
+ true,
+ )
},
)
}
+fn build_expanded_import(
+ ctx: &AssistContext<'_>,
+ builder: &mut SourceChangeBuilder,
+ use_tree: UseTree,
+ use_item: Use,
+ target_module: Expandable,
+ current_module: Module,
+ reexport_public_items: bool,
+) {
+ let (must_be_pub, visible_from) = if !reexport_public_items {
+ (false, current_module)
+ } else {
+ match get_export_visibility_kind(&use_item) {
+ VisibilityKind::Pub => (true, current_module.krate().root_module()),
+ VisibilityKind::PubCrate => (false, current_module.krate().root_module()),
+ _ => (false, current_module),
+ }
+ };
+
+ let refs_in_target = find_refs_in_mod(ctx, target_module, visible_from, must_be_pub);
+ let imported_defs = find_imported_defs(ctx, use_item);
+
+ let filtered_defs =
+ if reexport_public_items { refs_in_target } else { refs_in_target.used_refs(ctx) };
+
+ let use_tree = builder.make_mut(use_tree);
+
+ let names_to_import = find_names_to_import(filtered_defs, imported_defs);
+ let expanded = make::use_tree_list(names_to_import.iter().map(|n| {
+ let path = make::ext::ident_path(
+ &n.display(ctx.db(), current_module.krate().edition(ctx.db())).to_string(),
+ );
+ make::use_tree(path, None, None, false)
+ }))
+ .clone_for_update();
+
+ match use_tree.star_token() {
+ Some(star) => {
+ let needs_braces = use_tree.path().is_some() && names_to_import.len() != 1;
+ if needs_braces {
+ ted::replace(star, expanded.syntax())
+ } else {
+ let without_braces = expanded
+ .syntax()
+ .children_with_tokens()
+ .filter(|child| !matches!(child.kind(), T!['{'] | T!['}']))
+ .collect();
+ ted::replace_with_many(star, without_braces)
+ }
+ }
+ None => never!(),
+ }
+}
+
+fn get_export_visibility_kind(use_item: &Use) -> VisibilityKind {
+ use syntax::ast::HasVisibility as _;
+ match use_item.visibility() {
+ Some(vis) => match vis.kind() {
+ VisibilityKind::PubCrate => VisibilityKind::PubCrate,
+ VisibilityKind::Pub => VisibilityKind::Pub,
+ VisibilityKind::PubSelf => VisibilityKind::PubSelf,
+ // We don't handle pub(in ...) and pub(super) yet
+ VisibilityKind::In(_) => VisibilityKind::PubSelf,
+ VisibilityKind::PubSuper => VisibilityKind::PubSelf,
+ },
+ None => VisibilityKind::PubSelf,
+ }
+}
+
enum Expandable {
Module(Module),
Enum(Enum),
@@ -130,14 +245,17 @@ struct Ref {
// could be alias
visible_name: Name,
def: Definition,
+ is_pub: bool,
}
impl Ref {
- fn from_scope_def(name: Name, scope_def: ScopeDef) -> Option<Self> {
+ fn from_scope_def(ctx: &AssistContext<'_>, name: Name, scope_def: ScopeDef) -> Option<Self> {
match scope_def {
- ScopeDef::ModuleDef(def) => {
- Some(Ref { visible_name: name, def: Definition::from(def) })
- }
+ ScopeDef::ModuleDef(def) => Some(Ref {
+ visible_name: name,
+ def: Definition::from(def),
+ is_pub: matches!(def.visibility(ctx.db()), hir::Visibility::Public),
+ }),
_ => None,
}
}
@@ -180,32 +298,32 @@ fn find_refs_in_mod(
ctx: &AssistContext<'_>,
expandable: Expandable,
visible_from: Module,
-) -> Option<Refs> {
- if !is_expandable_visible_from(ctx, &expandable, visible_from) {
- return None;
- }
-
+ must_be_pub: bool,
+) -> Refs {
match expandable {
Expandable::Module(module) => {
let module_scope = module.scope(ctx.db(), Some(visible_from));
- let refs =
- module_scope.into_iter().filter_map(|(n, d)| Ref::from_scope_def(n, d)).collect();
- Some(Refs(refs))
+ let refs = module_scope
+ .into_iter()
+ .filter_map(|(n, d)| Ref::from_scope_def(ctx, n, d))
+ .filter(|r| !must_be_pub || r.is_pub)
+ .collect();
+ Refs(refs)
}
- Expandable::Enum(enm) => Some(Refs(
+ Expandable::Enum(enm) => Refs(
enm.variants(ctx.db())
.into_iter()
- .map(|v| Ref { visible_name: v.name(ctx.db()), def: Definition::Variant(v) })
+ .map(|v| Ref {
+ visible_name: v.name(ctx.db()),
+ def: Definition::Variant(v),
+ is_pub: true,
+ })
.collect(),
- )),
+ ),
}
}
-fn is_expandable_visible_from(
- ctx: &AssistContext<'_>,
- expandable: &Expandable,
- from: Module,
-) -> bool {
+fn is_visible_from(ctx: &AssistContext<'_>, expandable: &Expandable, from: Module) -> bool {
fn is_mod_visible_from(ctx: &AssistContext<'_>, module: Module, from: Module) -> bool {
match module.parent(ctx.db()) {
Some(parent) => {
@@ -246,50 +364,34 @@ fn is_expandable_visible_from(
// use foo::*$0;
// use baz::Baz;
// ↑ ---------------
-fn find_imported_defs(ctx: &AssistContext<'_>, star: SyntaxToken) -> Option<Vec<Definition>> {
- let parent_use_item_syntax = star.parent_ancestors().find_map(|n| {
- if ast::Use::can_cast(n.kind()) {
- Some(n)
- } else {
- None
- }
- })?;
-
- Some(
- [Direction::Prev, Direction::Next]
- .into_iter()
- .flat_map(|dir| {
- parent_use_item_syntax
- .siblings(dir.to_owned())
- .filter(|n| ast::Use::can_cast(n.kind()))
- })
- .flat_map(|n| n.descendants().filter_map(ast::NameRef::cast))
- .filter_map(|r| match NameRefClass::classify(&ctx.sema, &r)? {
- NameRefClass::Definition(
- def @ (Definition::Macro(_)
- | Definition::Module(_)
- | Definition::Function(_)
- | Definition::Adt(_)
- | Definition::Variant(_)
- | Definition::Const(_)
- | Definition::Static(_)
- | Definition::Trait(_)
- | Definition::TypeAlias(_)),
- _,
- ) => Some(def),
- _ => None,
- })
- .collect(),
- )
+fn find_imported_defs(ctx: &AssistContext<'_>, use_item: Use) -> Vec<Definition> {
+ [Direction::Prev, Direction::Next]
+ .into_iter()
+ .flat_map(|dir| {
+ use_item.syntax().siblings(dir.to_owned()).filter(|n| ast::Use::can_cast(n.kind()))
+ })
+ .flat_map(|n| n.descendants().filter_map(ast::NameRef::cast))
+ .filter_map(|r| match NameRefClass::classify(&ctx.sema, &r)? {
+ NameRefClass::Definition(
+ def @ (Definition::Macro(_)
+ | Definition::Module(_)
+ | Definition::Function(_)
+ | Definition::Adt(_)
+ | Definition::Variant(_)
+ | Definition::Const(_)
+ | Definition::Static(_)
+ | Definition::Trait(_)
+ | Definition::TypeAlias(_)),
+ _,
+ ) => Some(def),
+ _ => None,
+ })
+ .collect()
}
-fn find_names_to_import(
- ctx: &AssistContext<'_>,
- refs_in_target: Refs,
- imported_defs: Vec<Definition>,
-) -> Vec<Name> {
- let used_refs = refs_in_target.used_refs(ctx).filter_out_by_defs(imported_defs);
- used_refs.0.iter().map(|r| r.visible_name.clone()).collect()
+fn find_names_to_import(refs_in_target: Refs, imported_defs: Vec<Definition>) -> Vec<Name> {
+ let final_refs = refs_in_target.filter_out_by_defs(imported_defs);
+ final_refs.0.iter().map(|r| r.visible_name.clone()).collect()
}
#[cfg(test)]
@@ -1036,4 +1138,83 @@ mod abc {
}"#,
)
}
+
+ #[test]
+ fn expanding_glob_reexport() {
+ check_assist(
+ expand_glob_reexport,
+ r"
+mod foo {
+ pub struct Bar;
+ pub struct Baz;
+ struct Qux;
+
+ pub fn f() {}
+
+ pub(crate) fn g() {}
+ pub(self) fn h() {}
+}
+
+pub use foo::*$0;
+",
+ r"
+mod foo {
+ pub struct Bar;
+ pub struct Baz;
+ struct Qux;
+
+ pub fn f() {}
+
+ pub(crate) fn g() {}
+ pub(self) fn h() {}
+}
+
+pub use foo::{Bar, Baz, f};
+",
+ )
+ }
+
+ #[test]
+ fn expanding_recursive_glob_reexport() {
+ check_assist(
+ expand_glob_reexport,
+ r"
+mod foo {
+ pub use bar::*;
+ mod bar {
+ pub struct Bar;
+ pub struct Baz;
+ }
+}
+
+pub use foo::*$0;
+",
+ r"
+mod foo {
+ pub use bar::*;
+ mod bar {
+ pub struct Bar;
+ pub struct Baz;
+ }
+}
+
+pub use foo::{Bar, Baz};
+",
+ )
+ }
+
+ #[test]
+ fn expanding_reexport_is_not_applicable_for_private_import() {
+ check_assist_not_applicable(
+ expand_glob_reexport,
+ r"
+mod foo {
+ pub struct Bar;
+ pub struct Baz;
+}
+
+use foo::*$0;
+",
+ );
+ }
}
diff --git a/crates/ide-assists/src/handlers/generate_delegate_methods.rs b/crates/ide-assists/src/handlers/generate_delegate_methods.rs
index 081e36b4ff..220259451e 100644
--- a/crates/ide-assists/src/handlers/generate_delegate_methods.rs
+++ b/crates/ide-assists/src/handlers/generate_delegate_methods.rs
@@ -48,6 +48,10 @@ use crate::{
// }
// ```
pub(crate) fn generate_delegate_methods(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
+ if !ctx.config.code_action_grouping {
+ return None;
+ }
+
let strukt = ctx.find_node_at_offset::<ast::Struct>()?;
let strukt_name = strukt.name()?;
let current_module = ctx.sema.scope(strukt.syntax())?.module();
@@ -213,7 +217,9 @@ pub(crate) fn generate_delegate_methods(acc: &mut Assists, ctx: &AssistContext<'
#[cfg(test)]
mod tests {
- use crate::tests::{check_assist, check_assist_not_applicable};
+ use crate::tests::{
+ check_assist, check_assist_not_applicable, check_assist_not_applicable_no_grouping,
+ };
use super::*;
@@ -717,4 +723,21 @@ impl Person {
"#,
);
}
+
+ #[test]
+ fn delegate_method_skipped_when_no_grouping() {
+ check_assist_not_applicable_no_grouping(
+ generate_delegate_methods,
+ r#"
+struct Age(u8);
+impl Age {
+ fn age(&self) -> u8 {
+ self.0
+ }
+}
+struct Person {
+ ag$0e: Age,
+}"#,
+ );
+ }
}
diff --git a/crates/ide-assists/src/handlers/generate_delegate_trait.rs b/crates/ide-assists/src/handlers/generate_delegate_trait.rs
index 66bf9b0186..55b860d0ff 100644
--- a/crates/ide-assists/src/handlers/generate_delegate_trait.rs
+++ b/crates/ide-assists/src/handlers/generate_delegate_trait.rs
@@ -88,6 +88,10 @@ use syntax::{
// }
// ```
pub(crate) fn generate_delegate_trait(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
+ if !ctx.config.code_action_grouping {
+ return None;
+ }
+
let strukt = Struct::new(ctx.find_node_at_offset::<ast::Struct>()?)?;
let field: Field = match ctx.find_node_at_offset::<ast::RecordField>() {
@@ -788,7 +792,9 @@ fn qualified_path(qual_path_ty: ast::Path, path_expr_seg: ast::Path) -> ast::Pat
mod test {
use super::*;
- use crate::tests::{check_assist, check_assist_not_applicable};
+ use crate::tests::{
+ check_assist, check_assist_not_applicable, check_assist_not_applicable_no_grouping,
+ };
#[test]
fn test_tuple_struct_basic() {
@@ -1836,4 +1842,33 @@ impl<D, T: C<A>> C<D> for B<T> {
"#,
)
}
+
+ #[test]
+ fn delegate_trait_skipped_when_no_grouping() {
+ check_assist_not_applicable_no_grouping(
+ generate_delegate_trait,
+ r#"
+trait SomeTrait {
+ type T;
+ fn fn_(arg: u32) -> u32;
+ fn method_(&mut self) -> bool;
+}
+struct A;
+impl SomeTrait for A {
+ type T = u32;
+
+ fn fn_(arg: u32) -> u32 {
+ 42
+ }
+
+ fn method_(&mut self) -> bool {
+ false
+ }
+}
+struct B {
+ a$0 : A,
+}
+"#,
+ );
+ }
}
diff --git a/crates/ide-assists/src/lib.rs b/crates/ide-assists/src/lib.rs
index 5c95b25f28..179742f91b 100644
--- a/crates/ide-assists/src/lib.rs
+++ b/crates/ide-assists/src/lib.rs
@@ -270,6 +270,7 @@ mod handlers {
destructure_tuple_binding::destructure_tuple_binding,
destructure_struct_binding::destructure_struct_binding,
expand_glob_import::expand_glob_import,
+ expand_glob_import::expand_glob_reexport,
explicit_enum_discriminant::explicit_enum_discriminant,
extract_expressions_from_format_string::extract_expressions_from_format_string,
extract_struct_from_enum_variant::extract_struct_from_enum_variant,
diff --git a/crates/ide-assists/src/tests.rs b/crates/ide-assists/src/tests.rs
index 48d2af6d3f..11aeb21c77 100644
--- a/crates/ide-assists/src/tests.rs
+++ b/crates/ide-assists/src/tests.rs
@@ -34,6 +34,26 @@ pub(crate) const TEST_CONFIG: AssistConfig = AssistConfig {
assist_emit_must_use: false,
term_search_fuel: 400,
term_search_borrowck: true,
+ code_action_grouping: true,
+};
+
+pub(crate) const TEST_CONFIG_NO_GROUPING: AssistConfig = AssistConfig {
+ snippet_cap: SnippetCap::new(true),
+ allowed: None,
+ insert_use: InsertUseConfig {
+ granularity: ImportGranularity::Crate,
+ prefix_kind: hir::PrefixKind::Plain,
+ enforce_granularity: true,
+ group: true,
+ skip_glob_imports: true,
+ },
+ prefer_no_std: false,
+ prefer_prelude: true,
+ prefer_absolute: false,
+ assist_emit_must_use: false,
+ term_search_fuel: 400,
+ term_search_borrowck: true,
+ code_action_grouping: false,
};
pub(crate) const TEST_CONFIG_NO_SNIPPET_CAP: AssistConfig = AssistConfig {
@@ -52,6 +72,7 @@ pub(crate) const TEST_CONFIG_NO_SNIPPET_CAP: AssistConfig = AssistConfig {
assist_emit_must_use: false,
term_search_fuel: 400,
term_search_borrowck: true,
+ code_action_grouping: true,
};
pub(crate) const TEST_CONFIG_IMPORT_ONE: AssistConfig = AssistConfig {
@@ -70,6 +91,7 @@ pub(crate) const TEST_CONFIG_IMPORT_ONE: AssistConfig = AssistConfig {
assist_emit_must_use: false,
term_search_fuel: 400,
term_search_borrowck: true,
+ code_action_grouping: true,
};
pub(crate) fn with_single_file(text: &str) -> (RootDatabase, EditionedFileId) {
@@ -173,6 +195,20 @@ pub(crate) fn check_assist_not_applicable_for_import_one(
);
}
+#[track_caller]
+pub(crate) fn check_assist_not_applicable_no_grouping(
+ assist: Handler,
+ #[rust_analyzer::rust_fixture] ra_fixture: &str,
+) {
+ check_with_config(
+ TEST_CONFIG_NO_GROUPING,
+ assist,
+ ra_fixture,
+ ExpectedResult::NotApplicable,
+ None,
+ );
+}
+
/// Check assist in unresolved state. Useful to check assists for lazy computation.
#[track_caller]
pub(crate) fn check_assist_unresolved(
diff --git a/crates/ide-assists/src/tests/generated.rs b/crates/ide-assists/src/tests/generated.rs
index 54e42f126b..0662527a38 100644
--- a/crates/ide-assists/src/tests/generated.rs
+++ b/crates/ide-assists/src/tests/generated.rs
@@ -1,4 +1,4 @@
-//! Generated by `cargo codegen assists-doc-tests`, do not edit by hand.
+//! Generated by `cargo xtask codegen assists-doc-tests`, do not edit by hand.
use super::check_doc_test;
@@ -910,6 +910,29 @@ fn qux(bar: Bar, baz: Baz) {}
}
#[test]
+fn doctest_expand_glob_reexport() {
+ check_doc_test(
+ "expand_glob_reexport",
+ r#####"
+mod foo {
+ pub struct Bar;
+ pub struct Baz;
+}
+
+pub use foo::*$0;
+"#####,
+ r#####"
+mod foo {
+ pub struct Bar;
+ pub struct Baz;
+}
+
+pub use foo::{Bar, Baz};
+"#####,
+ )
+}
+
+#[test]
fn doctest_explicit_enum_discriminant() {
check_doc_test(
"explicit_enum_discriminant",
diff --git a/crates/ide-assists/src/utils.rs b/crates/ide-assists/src/utils.rs
index 78ff441791..c1332d99bf 100644
--- a/crates/ide-assists/src/utils.rs
+++ b/crates/ide-assists/src/utils.rs
@@ -793,8 +793,8 @@ pub(crate) fn convert_reference_type(
}
fn could_deref_to_target(ty: &hir::Type, target: &hir::Type, db: &dyn HirDatabase) -> bool {
- let ty_ref = hir::Type::reference(ty, hir::Mutability::Shared);
- let target_ref = hir::Type::reference(target, hir::Mutability::Shared);
+ let ty_ref = ty.add_reference(hir::Mutability::Shared);
+ let target_ref = target.add_reference(hir::Mutability::Shared);
ty_ref.could_coerce_to(db, &target_ref)
}
diff --git a/crates/ide-completion/src/completions/dot.rs b/crates/ide-completion/src/completions/dot.rs
index d12654665c..b38b9ac1f5 100644
--- a/crates/ide-completion/src/completions/dot.rs
+++ b/crates/ide-completion/src/completions/dot.rs
@@ -89,7 +89,7 @@ pub(crate) fn complete_dot(
acc.add_method(ctx, dot_access, func, None, None)
});
- if ctx.config.enable_auto_iter {
+ if ctx.config.enable_auto_iter && !receiver_ty.strip_references().impls_iterator(ctx.db) {
// FIXME:
// Checking for the existence of `iter()` is complicated in our setup, because we need to substitute
// its return type, so we instead check for `<&Self as IntoIterator>::IntoIter`.
@@ -1500,9 +1500,31 @@ fn main() {
bar.$0
}
"#,
+ expect![[r#""#]],
+ );
+ }
+
+ #[test]
+ fn no_iter_suggestion_on_iterator() {
+ check_no_kw(
+ r#"
+//- minicore: iterator
+struct MyIter;
+impl Iterator for MyIter {
+ type Item = ();
+ fn next(&mut self) -> Option<Self::Item> { None }
+}
+
+fn main() {
+ MyIter.$0
+}
+"#,
expect![[r#"
- me foo() fn(self: Bar)
-"#]],
+ me by_ref() (as Iterator) fn(&mut self) -> &mut Self
+ me into_iter() (as IntoIterator) fn(self) -> <Self as IntoIterator>::IntoIter
+ me next() (as Iterator) fn(&mut self) -> Option<<Self as Iterator>::Item>
+ me nth(…) (as Iterator) fn(&mut self, usize) -> Option<<Self as Iterator>::Item>
+ "#]],
);
}
}
diff --git a/crates/ide-completion/src/completions/flyimport.rs b/crates/ide-completion/src/completions/flyimport.rs
index 24243f57b4..b5555e6610 100644
--- a/crates/ide-completion/src/completions/flyimport.rs
+++ b/crates/ide-completion/src/completions/flyimport.rs
@@ -83,19 +83,19 @@ use crate::{
// NOTE: currently, if an assoc item comes from a trait that's not currently imported, and it also has an unresolved and/or partially-qualified path,
// no imports will be proposed.
//
-// .Fuzzy search details
+// #### Fuzzy search details
//
// To avoid an excessive amount of the results returned, completion input is checked for inclusion in the names only
// (i.e. in `HashMap` in the `std::collections::HashMap` path).
// For the same reasons, avoids searching for any path imports for inputs with their length less than 2 symbols
// (but shows all associated items for any input length).
//
-// .Import configuration
+// #### Import configuration
//
// It is possible to configure how use-trees are merged with the `imports.granularity.group` setting.
// Mimics the corresponding behavior of the `Auto Import` feature.
//
-// .LSP and performance implications
+// #### LSP and performance implications
//
// The feature is enabled only if the LSP client supports LSP protocol version 3.16+ and reports the `additionalTextEdits`
// (case-sensitive) resolve client capability in its client capabilities.
@@ -103,7 +103,7 @@ use crate::{
// For clients with no such support, all edits have to be calculated on the completion request, including the fuzzy search completion ones,
// which might be slow ergo the feature is automatically disabled.
//
-// .Feature toggle
+// #### Feature toggle
//
// The feature can be forcefully turned off in the settings with the `rust-analyzer.completion.autoimport.enable` flag.
// Note that having this flag set to `true` does not guarantee that the feature is enabled: your client needs to have the corresponding
diff --git a/crates/ide-completion/src/completions/postfix.rs b/crates/ide-completion/src/completions/postfix.rs
index 2c39a8fdfe..28e2853096 100644
--- a/crates/ide-completion/src/completions/postfix.rs
+++ b/crates/ide-completion/src/completions/postfix.rs
@@ -2,17 +2,18 @@
mod format_like;
-use hir::ItemInNs;
-use ide_db::text_edit::TextEdit;
+use base_db::SourceDatabase;
+use hir::{ItemInNs, Semantics};
use ide_db::{
documentation::{Documentation, HasDocs},
imports::insert_use::ImportScope,
+ text_edit::TextEdit,
ty_filter::TryEnum,
- SnippetCap,
+ RootDatabase, SnippetCap,
};
use stdx::never;
use syntax::{
- ast::{self, make, AstNode, AstToken},
+ ast::{self, AstNode, AstToken},
SyntaxKind::{BLOCK_EXPR, EXPR_STMT, FOR_EXPR, IF_EXPR, LOOP_EXPR, STMT_LIST, WHILE_EXPR},
TextRange, TextSize,
};
@@ -48,7 +49,8 @@ pub(crate) fn complete_postfix(
};
let expr_ctx = &dot_access.ctx;
- let receiver_text = get_receiver_text(dot_receiver, receiver_is_ambiguous_float_literal);
+ let receiver_text =
+ get_receiver_text(&ctx.sema, dot_receiver, receiver_is_ambiguous_float_literal);
let cap = match ctx.config.snippet_cap {
Some(it) => it,
@@ -172,13 +174,15 @@ pub(crate) fn complete_postfix(
// The rest of the postfix completions create an expression that moves an argument,
// so it's better to consider references now to avoid breaking the compilation
- let (dot_receiver, node_to_replace_with) = include_references(dot_receiver);
- let receiver_text =
- get_receiver_text(&node_to_replace_with, receiver_is_ambiguous_float_literal);
- let postfix_snippet = match build_postfix_snippet_builder(ctx, cap, &dot_receiver) {
- Some(it) => it,
- None => return,
- };
+ let (dot_receiver_including_refs, prefix) = include_references(dot_receiver);
+ let mut receiver_text =
+ get_receiver_text(&ctx.sema, dot_receiver, receiver_is_ambiguous_float_literal);
+ receiver_text.insert_str(0, &prefix);
+ let postfix_snippet =
+ match build_postfix_snippet_builder(ctx, cap, &dot_receiver_including_refs) {
+ Some(it) => it,
+ None => return,
+ };
if !ctx.config.snippets.is_empty() {
add_custom_postfix_completions(acc, ctx, &postfix_snippet, &receiver_text);
@@ -222,7 +226,7 @@ pub(crate) fn complete_postfix(
postfix_snippet("call", "function(expr)", &format!("${{1}}({receiver_text})"))
.add_to(acc, ctx.db);
- if let Some(parent) = dot_receiver.syntax().parent().and_then(|p| p.parent()) {
+ if let Some(parent) = dot_receiver_including_refs.syntax().parent().and_then(|p| p.parent()) {
if matches!(parent.kind(), STMT_LIST | EXPR_STMT) {
postfix_snippet("let", "let", &format!("let $0 = {receiver_text};"))
.add_to(acc, ctx.db);
@@ -231,9 +235,9 @@ pub(crate) fn complete_postfix(
}
}
- if let ast::Expr::Literal(literal) = dot_receiver.clone() {
+ if let ast::Expr::Literal(literal) = dot_receiver_including_refs.clone() {
if let Some(literal_text) = ast::String::cast(literal.token()) {
- add_format_like_completions(acc, ctx, &dot_receiver, cap, &literal_text);
+ add_format_like_completions(acc, ctx, &dot_receiver_including_refs, cap, &literal_text);
}
}
@@ -260,14 +264,20 @@ pub(crate) fn complete_postfix(
}
}
-fn get_receiver_text(receiver: &ast::Expr, receiver_is_ambiguous_float_literal: bool) -> String {
- let mut text = if receiver_is_ambiguous_float_literal {
- let text = receiver.syntax().text();
- let without_dot = ..text.len() - TextSize::of('.');
- text.slice(without_dot).to_string()
- } else {
- receiver.to_string()
+fn get_receiver_text(
+ sema: &Semantics<'_, RootDatabase>,
+ receiver: &ast::Expr,
+ receiver_is_ambiguous_float_literal: bool,
+) -> String {
+ // Do not just call `receiver.to_string()`, as that will mess up whitespaces inside macros.
+ let Some(mut range) = sema.original_range_opt(receiver.syntax()) else {
+ return receiver.to_string();
};
+ if receiver_is_ambiguous_float_literal {
+ range.range = TextRange::at(range.range.start(), range.range.len() - TextSize::of('.'))
+ }
+ let file_text = sema.db.file_text(range.file_id.file_id());
+ let mut text = file_text[range.range].to_owned();
// The receiver texts should be interpreted as-is, as they are expected to be
// normal Rust expressions.
@@ -284,7 +294,7 @@ fn escape_snippet_bits(text: &mut String) {
stdx::replace(text, '$', "\\$");
}
-fn include_references(initial_element: &ast::Expr) -> (ast::Expr, ast::Expr) {
+fn include_references(initial_element: &ast::Expr) -> (ast::Expr, String) {
let mut resulting_element = initial_element.clone();
while let Some(field_expr) = resulting_element.syntax().parent().and_then(ast::FieldExpr::cast)
@@ -292,7 +302,7 @@ fn include_references(initial_element: &ast::Expr) -> (ast::Expr, ast::Expr) {
resulting_element = ast::Expr::from(field_expr);
}
- let mut new_element_opt = initial_element.clone();
+ let mut prefix = String::new();
while let Some(parent_deref_element) =
resulting_element.syntax().parent().and_then(ast::PrefixExpr::cast)
@@ -303,7 +313,7 @@ fn include_references(initial_element: &ast::Expr) -> (ast::Expr, ast::Expr) {
resulting_element = ast::Expr::from(parent_deref_element);
- new_element_opt = make::expr_prefix(syntax::T![*], new_element_opt).into();
+ prefix.insert(0, '*');
}
if let Some(first_ref_expr) = resulting_element.syntax().parent().and_then(ast::RefExpr::cast) {
@@ -317,7 +327,7 @@ fn include_references(initial_element: &ast::Expr) -> (ast::Expr, ast::Expr) {
let exclusive = parent_ref_element.mut_token().is_some();
resulting_element = ast::Expr::from(parent_ref_element);
- new_element_opt = make::expr_ref(new_element_opt, exclusive);
+ prefix.insert_str(0, if exclusive { "&mut " } else { "&" });
}
} else {
// If we do not find any ref expressions, restore
@@ -325,7 +335,7 @@ fn include_references(initial_element: &ast::Expr) -> (ast::Expr, ast::Expr) {
resulting_element = initial_element.clone();
}
- (resulting_element, new_element_opt)
+ (resulting_element, prefix)
}
fn build_postfix_snippet_builder<'ctx>(
@@ -901,4 +911,31 @@ fn main() {
"#,
);
}
+
+ #[test]
+ fn inside_macro() {
+ check_edit(
+ "box",
+ r#"
+macro_rules! assert {
+ ( $it:expr $(,)? ) => { $it };
+}
+
+fn foo() {
+ let a = true;
+ assert!(if a == false { true } else { false }.$0);
+}
+ "#,
+ r#"
+macro_rules! assert {
+ ( $it:expr $(,)? ) => { $it };
+}
+
+fn foo() {
+ let a = true;
+ assert!(Box::new(if a == false { true } else { false }));
+}
+ "#,
+ );
+ }
}
diff --git a/crates/ide-completion/src/completions/postfix/format_like.rs b/crates/ide-completion/src/completions/postfix/format_like.rs
index 2755329bb3..c612170eb5 100644
--- a/crates/ide-completion/src/completions/postfix/format_like.rs
+++ b/crates/ide-completion/src/completions/postfix/format_like.rs
@@ -14,7 +14,7 @@
// ** `logw` -> `log::warn!(...)`
// ** `loge` -> `log::error!(...)`
//
-// image::https://user-images.githubusercontent.com/48062697/113020656-b560f500-917a-11eb-87de-02991f61beb8.gif[]
+// ![Format String Completion](https://user-images.githubusercontent.com/48062697/113020656-b560f500-917a-11eb-87de-02991f61beb8.gif)
use ide_db::{
syntax_helpers::format_string_exprs::{parse_format_exprs, with_placeholders, Arg},
diff --git a/crates/ide-completion/src/context.rs b/crates/ide-completion/src/context.rs
index 2f1860cbb5..7862b25878 100644
--- a/crates/ide-completion/src/context.rs
+++ b/crates/ide-completion/src/context.rs
@@ -442,6 +442,8 @@ pub(crate) struct CompletionContext<'a> {
pub(crate) krate: hir::Crate,
/// The module of the `scope`.
pub(crate) module: hir::Module,
+ /// The function where we're completing, if inside a function.
+ pub(crate) containing_function: Option<hir::Function>,
/// Whether nightly toolchain is used. Cached since this is looked up a lot.
pub(crate) is_nightly: bool,
/// The edition of the current crate
@@ -760,6 +762,7 @@ impl<'a> CompletionContext<'a> {
let krate = scope.krate();
let module = scope.module();
+ let containing_function = scope.containing_function();
let edition = krate.edition(db);
let toolchain = db.toolchain_channel(krate.into());
@@ -874,6 +877,7 @@ impl<'a> CompletionContext<'a> {
token,
krate,
module,
+ containing_function,
is_nightly,
edition,
expected_name,
diff --git a/crates/ide-completion/src/context/analysis.rs b/crates/ide-completion/src/context/analysis.rs
index f5a50ae819..eecd412bc4 100644
--- a/crates/ide-completion/src/context/analysis.rs
+++ b/crates/ide-completion/src/context/analysis.rs
@@ -59,7 +59,7 @@ pub(super) fn expand_and_analyze(
// make the offset point to the start of the original token, as that is what the
// intermediate offsets calculated in expansion always points to
let offset = offset - relative_offset;
- let expansion = expand(
+ let expansion = expand_maybe_stop(
sema,
original_file.clone(),
speculative_file.clone(),
@@ -118,7 +118,7 @@ fn token_at_offset_ignore_whitespace(file: &SyntaxNode, offset: TextSize) -> Opt
/// that we check, we subtract `COMPLETION_MARKER.len()`. This may not be accurate because proc macros
/// can insert the text of the completion marker in other places while removing the span, but this is
/// the best we can do.
-fn expand(
+fn expand_maybe_stop(
sema: &Semantics<'_, RootDatabase>,
original_file: SyntaxNode,
speculative_file: SyntaxNode,
@@ -126,23 +126,48 @@ fn expand(
fake_ident_token: SyntaxToken,
relative_offset: TextSize,
) -> Option<ExpansionResult> {
- let _p = tracing::info_span!("CompletionContext::expand").entered();
+ if let result @ Some(_) = expand(
+ sema,
+ original_file.clone(),
+ speculative_file.clone(),
+ original_offset,
+ fake_ident_token.clone(),
+ relative_offset,
+ ) {
+ return result;
+ }
+ // This needs to come after the recursive call, because our "inside macro" detection is subtly wrong
+ // with regard to attribute macros named `test` that are not std's test. So hopefully we will expand
+ // them successfully above and be able to analyze.
// Left biased since there may already be an identifier token there, and we appended to it.
if !sema.might_be_inside_macro_call(&fake_ident_token)
&& token_at_offset_ignore_whitespace(&original_file, original_offset + relative_offset)
.is_some_and(|original_token| !sema.might_be_inside_macro_call(&original_token))
{
// Recursion base case.
- return Some(ExpansionResult {
+ Some(ExpansionResult {
original_file,
speculative_file,
original_offset,
speculative_offset: fake_ident_token.text_range().start(),
fake_ident_token,
derive_ctx: None,
- });
+ })
+ } else {
+ None
}
+}
+
+fn expand(
+ sema: &Semantics<'_, RootDatabase>,
+ original_file: SyntaxNode,
+ speculative_file: SyntaxNode,
+ original_offset: TextSize,
+ fake_ident_token: SyntaxToken,
+ relative_offset: TextSize,
+) -> Option<ExpansionResult> {
+ let _p = tracing::info_span!("CompletionContext::expand").entered();
let parent_item =
|item: &ast::Item| item.syntax().ancestors().skip(1).find_map(ast::Item::cast);
@@ -197,7 +222,7 @@ fn expand(
// stop here to prevent problems from happening
return None;
}
- let result = expand(
+ let result = expand_maybe_stop(
sema,
actual_expansion.clone(),
fake_expansion.clone(),
@@ -317,7 +342,7 @@ fn expand(
// stop here to prevent problems from happening
return None;
}
- let result = expand(
+ let result = expand_maybe_stop(
sema,
actual_expansion.clone(),
fake_expansion.clone(),
@@ -386,7 +411,7 @@ fn expand(
// stop here to prevent problems from happening
return None;
}
- let result = expand(
+ let result = expand_maybe_stop(
sema,
actual_expansion.clone(),
fake_expansion.clone(),
diff --git a/crates/ide-completion/src/lib.rs b/crates/ide-completion/src/lib.rs
index 8051d48ca5..a1f2eaeb1b 100644
--- a/crates/ide-completion/src/lib.rs
+++ b/crates/ide-completion/src/lib.rs
@@ -106,11 +106,13 @@ impl CompletionFieldsToResolve {
//
// There also snippet completions:
//
-// .Expressions
+// #### Expressions
+//
// - `pd` -> `eprintln!(" = {:?}", );`
// - `ppd` -> `eprintln!(" = {:#?}", );`
//
-// .Items
+// #### Items
+//
// - `tfn` -> `#[test] fn feature(){}`
// - `tmod` ->
// ```rust
@@ -127,7 +129,7 @@ impl CompletionFieldsToResolve {
// Those are the additional completion options with automatic `use` import and options from all project importable items,
// fuzzy matched against the completion input.
//
-// image::https://user-images.githubusercontent.com/48062697/113020667-b72ab880-917a-11eb-8778-716cf26a0eb3.gif[]
+// ![Magic Completions](https://user-images.githubusercontent.com/48062697/113020667-b72ab880-917a-11eb-8778-716cf26a0eb3.gif)
/// Main entry point for completion. We run completion as a two-phase process.
///
diff --git a/crates/ide-completion/src/render/function.rs b/crates/ide-completion/src/render/function.rs
index c3354902c3..fd90613964 100644
--- a/crates/ide-completion/src/render/function.rs
+++ b/crates/ide-completion/src/render/function.rs
@@ -144,7 +144,7 @@ fn render(
let detail = if ctx.completion.config.full_function_signatures {
detail_full(db, func, ctx.completion.edition)
} else {
- detail(db, func, ctx.completion.edition)
+ detail(ctx.completion, func, ctx.completion.edition)
};
item.set_documentation(ctx.docs(func))
.set_deprecated(ctx.is_deprecated(func) || ctx.is_deprecated_assoc_item(func))
@@ -307,26 +307,26 @@ fn ref_of_param(ctx: &CompletionContext<'_>, arg: &str, ty: &hir::Type) -> &'sta
""
}
-fn detail(db: &dyn HirDatabase, func: hir::Function, edition: Edition) -> String {
- let mut ret_ty = func.ret_type(db);
+fn detail(ctx: &CompletionContext<'_>, func: hir::Function, edition: Edition) -> String {
+ let mut ret_ty = func.ret_type(ctx.db);
let mut detail = String::new();
- if func.is_const(db) {
+ if func.is_const(ctx.db) {
format_to!(detail, "const ");
}
- if func.is_async(db) {
+ if func.is_async(ctx.db) {
format_to!(detail, "async ");
- if let Some(async_ret) = func.async_ret_type(db) {
+ if let Some(async_ret) = func.async_ret_type(ctx.db) {
ret_ty = async_ret;
}
}
- if func.is_unsafe_to_call(db) {
+ if func.is_unsafe_to_call(ctx.db, ctx.containing_function, ctx.edition) {
format_to!(detail, "unsafe ");
}
- format_to!(detail, "fn({})", params_display(db, func, edition));
+ format_to!(detail, "fn({})", params_display(ctx.db, func, edition));
if !ret_ty.is_unit() {
- format_to!(detail, " -> {}", ret_ty.display(db, edition));
+ format_to!(detail, " -> {}", ret_ty.display(ctx.db, edition));
}
detail
}
diff --git a/crates/ide-completion/src/snippet.rs b/crates/ide-completion/src/snippet.rs
index 866b83a614..07f33a826e 100644
--- a/crates/ide-completion/src/snippet.rs
+++ b/crates/ide-completion/src/snippet.rs
@@ -8,8 +8,7 @@
//
// A custom snippet can be defined by adding it to the `rust-analyzer.completion.snippets.custom` object respectively.
//
-// [source,json]
-// ----
+// ```json
// {
// "rust-analyzer.completion.snippets.custom": {
// "thread spawn": {
@@ -25,7 +24,7 @@
// }
// }
// }
-// ----
+// ```
//
// In the example above:
//
@@ -39,6 +38,7 @@
// * `description` is an optional description of the snippet, if unset the snippet name will be used.
//
// * `requires` is an optional list of item paths that have to be resolvable in the current crate where the completion is rendered.
+
// On failure of resolution the snippet won't be applicable, otherwise the snippet will insert an import for the items on insertion if
// the items aren't yet in scope.
//
@@ -55,8 +55,8 @@
//
// For the VSCode editor, rust-analyzer also ships with a small set of defaults which can be removed
// by overwriting the settings object mentioned above, the defaults are:
-// [source,json]
-// ----
+//
+// ```json
// {
// "Arc::new": {
// "postfix": "arc",
@@ -98,7 +98,7 @@
// "scope": "expr"
// }
// }
-// ----
+// ````
use hir::{ModPath, Name, Symbol};
use ide_db::imports::import_assets::LocatedImport;
diff --git a/crates/ide-completion/src/tests/expression.rs b/crates/ide-completion/src/tests/expression.rs
index 663a038580..3755751283 100644
--- a/crates/ide-completion/src/tests/expression.rs
+++ b/crates/ide-completion/src/tests/expression.rs
@@ -1986,3 +1986,53 @@ fn foo() {
"#]],
);
}
+
+#[test]
+fn non_std_test_attr_macro() {
+ check(
+ r#"
+//- proc_macros: identity
+use proc_macros::identity as test;
+
+#[test]
+fn foo() {
+ $0
+}
+ "#,
+ expect![[r#"
+ fn foo() fn()
+ md proc_macros
+ bt u32 u32
+ kw async
+ kw const
+ kw crate::
+ kw enum
+ kw extern
+ kw false
+ kw fn
+ kw for
+ kw if
+ kw if let
+ kw impl
+ kw let
+ kw loop
+ kw match
+ kw mod
+ kw return
+ kw self::
+ kw static
+ kw struct
+ kw trait
+ kw true
+ kw type
+ kw union
+ kw unsafe
+ kw use
+ kw while
+ kw while let
+ sn macro_rules
+ sn pd
+ sn ppd
+ "#]],
+ );
+}
diff --git a/crates/ide-db/Cargo.toml b/crates/ide-db/Cargo.toml
index 17f0e69bde..c8a8a2d169 100644
--- a/crates/ide-db/Cargo.toml
+++ b/crates/ide-db/Cargo.toml
@@ -30,7 +30,6 @@ bitflags.workspace = true
# local deps
base-db.workspace = true
-limit.workspace = true
parser.workspace = true
profile.workspace = true
stdx.workspace = true
diff --git a/crates/ide-db/src/apply_change.rs b/crates/ide-db/src/apply_change.rs
index 35e3a8d9bf..46ff4fbf9e 100644
--- a/crates/ide-db/src/apply_change.rs
+++ b/crates/ide-db/src/apply_change.rs
@@ -44,12 +44,11 @@ impl RootDatabase {
//
// Clears rust-analyzer's internal database and prints memory usage statistics.
//
- // |===
- // | Editor | Action Name
- //
+ // | Editor | Action Name |
+ // |---------|-------------|
// | VS Code | **rust-analyzer: Memory Usage (Clears Database)**
- // |===
- // image::https://user-images.githubusercontent.com/48062697/113065592-08559f00-91b1-11eb-8c96-64b88068ec02.gif[]
+
+ // ![Memory Usage](https://user-images.githubusercontent.com/48062697/113065592-08559f00-91b1-11eb-8c96-64b88068ec02.gif)
pub fn per_query_memory_usage(&mut self) -> Vec<(String, Bytes, usize)> {
let mut acc: Vec<(String, Bytes, usize)> = vec![];
diff --git a/crates/ide-db/src/defs.rs b/crates/ide-db/src/defs.rs
index d12bda0816..6f71c3d9bd 100644
--- a/crates/ide-db/src/defs.rs
+++ b/crates/ide-db/src/defs.rs
@@ -108,7 +108,7 @@ impl Definition {
ItemContainer::Trait(it) => Some(it.into()),
ItemContainer::Impl(it) => Some(it.into()),
ItemContainer::Module(it) => Some(it.into()),
- ItemContainer::ExternBlock() | ItemContainer::Crate(_) => None,
+ ItemContainer::ExternBlock(_) | ItemContainer::Crate(_) => None,
}
}
match self {
@@ -986,6 +986,7 @@ impl From<GenericDef> for Definition {
GenericDef::TypeAlias(it) => it.into(),
GenericDef::Impl(it) => it.into(),
GenericDef::Const(it) => it.into(),
+ GenericDef::Static(it) => it.into(),
}
}
}
diff --git a/crates/ide-db/src/generated/lints.rs b/crates/ide-db/src/generated/lints.rs
index 14af22c319..ed9d6c6750 100644
--- a/crates/ide-db/src/generated/lints.rs
+++ b/crates/ide-db/src/generated/lints.rs
@@ -9107,8 +9107,8 @@ The tracking issue for this feature is: [#27721]
deny_since: None,
},
Lint {
- label: "pattern_complexity",
- description: r##"# `pattern_complexity`
+ label: "pattern_complexity_limit",
+ description: r##"# `pattern_complexity_limit`
This feature has no tracking issue, and is therefore likely internal to the compiler, not being intended for general use.
diff --git a/crates/ide-db/src/imports/import_assets.rs b/crates/ide-db/src/imports/import_assets.rs
index ad86d855b5..77fc59b4ec 100644
--- a/crates/ide-db/src/imports/import_assets.rs
+++ b/crates/ide-db/src/imports/import_assets.rs
@@ -357,7 +357,7 @@ fn path_applicable_imports(
let mod_path = mod_path(item)?;
Some(LocatedImport::new(mod_path, item, item))
})
- .take(DEFAULT_QUERY_SEARCH_LIMIT.inner())
+ .take(DEFAULT_QUERY_SEARCH_LIMIT)
.collect()
}
// we have some unresolved qualifier that we search an import for
@@ -383,7 +383,7 @@ fn path_applicable_imports(
qualifier_rest,
)
})
- .take(DEFAULT_QUERY_SEARCH_LIMIT.inner())
+ .take(DEFAULT_QUERY_SEARCH_LIMIT)
.collect(),
}
}
diff --git a/crates/ide-db/src/items_locator.rs b/crates/ide-db/src/items_locator.rs
index a2062f36d3..4d9c051354 100644
--- a/crates/ide-db/src/items_locator.rs
+++ b/crates/ide-db/src/items_locator.rs
@@ -6,7 +6,6 @@ use std::ops::ControlFlow;
use either::Either;
use hir::{import_map, Crate, ItemInNs, Module, Semantics};
-use limit::Limit;
use crate::{
imports::import_assets::NameToImport,
@@ -15,7 +14,7 @@ use crate::{
};
/// A value to use, when uncertain which limit to pick.
-pub static DEFAULT_QUERY_SEARCH_LIMIT: Limit = Limit::new(100);
+pub const DEFAULT_QUERY_SEARCH_LIMIT: usize = 100;
pub use import_map::AssocSearchMode;
diff --git a/crates/ide-db/src/prime_caches.rs b/crates/ide-db/src/prime_caches.rs
index 0002fda0ba..22dc3d9e29 100644
--- a/crates/ide-db/src/prime_caches.rs
+++ b/crates/ide-db/src/prime_caches.rs
@@ -6,12 +6,13 @@ mod topologic_sort;
use std::time::Duration;
-use hir::db::DefDatabase;
+use hir::{db::DefDatabase, Symbol};
+use itertools::Itertools;
use crate::{
base_db::{
ra_salsa::{Database, ParallelDatabase, Snapshot},
- Cancelled, CrateId, SourceDatabase, SourceRootDatabase,
+ Cancelled, CrateId, SourceDatabase,
},
symbol_index::SymbolsDatabase,
FxIndexMap, RootDatabase,
@@ -21,11 +22,12 @@ use crate::{
#[derive(Debug)]
pub struct ParallelPrimeCachesProgress {
/// the crates that we are currently priming.
- pub crates_currently_indexing: Vec<String>,
+ pub crates_currently_indexing: Vec<Symbol>,
/// the total number of crates we want to prime.
pub crates_total: usize,
/// the total number of crates that have finished priming
pub crates_done: usize,
+ pub work_type: &'static str,
}
pub fn parallel_prime_caches(
@@ -47,41 +49,32 @@ pub fn parallel_prime_caches(
};
enum ParallelPrimeCacheWorkerProgress {
- BeginCrate { crate_id: CrateId, crate_name: String },
+ BeginCrate { crate_id: CrateId, crate_name: Symbol },
EndCrate { crate_id: CrateId },
}
+ // We split off def map computation from other work,
+ // as the def map is the relevant one. Once the defmaps are computed
+ // the project is ready to go, the other indices are just nice to have for some IDE features.
+ #[derive(PartialOrd, Ord, PartialEq, Eq, Copy, Clone)]
+ enum PrimingPhase {
+ DefMap,
+ ImportMap,
+ CrateSymbols,
+ }
+
let (work_sender, progress_receiver) = {
let (progress_sender, progress_receiver) = crossbeam_channel::unbounded();
let (work_sender, work_receiver) = crossbeam_channel::unbounded();
- let graph = graph.clone();
- let local_roots = db.local_roots();
let prime_caches_worker = move |db: Snapshot<RootDatabase>| {
- while let Ok((crate_id, crate_name)) = work_receiver.recv() {
+ while let Ok((crate_id, crate_name, kind)) = work_receiver.recv() {
progress_sender
.send(ParallelPrimeCacheWorkerProgress::BeginCrate { crate_id, crate_name })?;
- // Compute the DefMap and possibly ImportMap
- let file_id = graph[crate_id].root_file_id;
- let root_id = db.file_source_root(file_id);
- if db.source_root(root_id).is_library {
- db.crate_def_map(crate_id);
- } else {
- // This also computes the DefMap
- db.import_map(crate_id);
- }
-
- // Compute the symbol search index.
- // This primes the cache for `ide_db::symbol_index::world_symbols()`.
- //
- // We do this for workspace crates only (members of local_roots), because doing it
- // for all dependencies could be *very* unnecessarily slow in a large project.
- //
- // FIXME: We should do it unconditionally if the configuration is set to default to
- // searching dependencies (rust-analyzer.workspace.symbol.search.scope), but we
- // would need to pipe that configuration information down here.
- if local_roots.contains(&root_id) {
- db.crate_symbols(crate_id.into());
+ match kind {
+ PrimingPhase::DefMap => _ = db.crate_def_map(crate_id),
+ PrimingPhase::ImportMap => _ = db.import_map(crate_id),
+ PrimingPhase::CrateSymbols => _ = db.crate_symbols(crate_id.into()),
}
progress_sender.send(ParallelPrimeCacheWorkerProgress::EndCrate { crate_id })?;
@@ -112,16 +105,34 @@ pub fn parallel_prime_caches(
let mut crates_currently_indexing =
FxIndexMap::with_capacity_and_hasher(num_worker_threads, Default::default());
+ let mut additional_phases = vec![];
+
while crates_done < crates_total {
db.unwind_if_cancelled();
for crate_id in &mut crates_to_prime {
- work_sender
- .send((
- crate_id,
- graph[crate_id].display_name.as_deref().unwrap_or_default().to_owned(),
- ))
- .ok();
+ let krate = &graph[crate_id];
+ let name = krate
+ .display_name
+ .as_deref()
+ .cloned()
+ .unwrap_or_else(|| Symbol::integer(crate_id.into_raw().into_u32() as usize));
+ if krate.origin.is_lang() {
+ additional_phases.push((crate_id, name.clone(), PrimingPhase::ImportMap));
+ } else if krate.origin.is_local() {
+ // Compute the symbol search index.
+ // This primes the cache for `ide_db::symbol_index::world_symbols()`.
+ //
+ // We do this for workspace crates only (members of local_roots), because doing it
+ // for all dependencies could be *very* unnecessarily slow in a large project.
+ //
+ // FIXME: We should do it unconditionally if the configuration is set to default to
+ // searching dependencies (rust-analyzer.workspace.symbol.search.scope), but we
+ // would need to pipe that configuration information down here.
+ additional_phases.push((crate_id, name.clone(), PrimingPhase::CrateSymbols));
+ }
+
+ work_sender.send((crate_id, name, PrimingPhase::DefMap)).ok();
}
// recv_timeout is somewhat a hack, we need a way to from this thread check to see if the current salsa revision
@@ -153,6 +164,50 @@ pub fn parallel_prime_caches(
crates_currently_indexing: crates_currently_indexing.values().cloned().collect(),
crates_done,
crates_total,
+ work_type: "Indexing",
+ };
+
+ cb(progress);
+ }
+
+ let mut crates_done = 0;
+ let crates_total = additional_phases.len();
+ for w in additional_phases.into_iter().sorted_by_key(|&(_, _, phase)| phase) {
+ work_sender.send(w).ok();
+ }
+
+ while crates_done < crates_total {
+ db.unwind_if_cancelled();
+
+ // recv_timeout is somewhat a hack, we need a way to from this thread check to see if the current salsa revision
+ // is cancelled on a regular basis. workers will only exit if they are processing a task that is cancelled, or
+ // if this thread exits, and closes the work channel.
+ let worker_progress = match progress_receiver.recv_timeout(Duration::from_millis(10)) {
+ Ok(p) => p,
+ Err(crossbeam_channel::RecvTimeoutError::Timeout) => {
+ continue;
+ }
+ Err(crossbeam_channel::RecvTimeoutError::Disconnected) => {
+ // our workers may have died from a cancelled task, so we'll check and re-raise here.
+ db.unwind_if_cancelled();
+ break;
+ }
+ };
+ match worker_progress {
+ ParallelPrimeCacheWorkerProgress::BeginCrate { crate_id, crate_name } => {
+ crates_currently_indexing.insert(crate_id, crate_name);
+ }
+ ParallelPrimeCacheWorkerProgress::EndCrate { crate_id } => {
+ crates_currently_indexing.swap_remove(&crate_id);
+ crates_done += 1;
+ }
+ };
+
+ let progress = ParallelPrimeCachesProgress {
+ crates_currently_indexing: crates_currently_indexing.values().cloned().collect(),
+ crates_done,
+ crates_total,
+ work_type: "Populating symbols",
};
cb(progress);
diff --git a/crates/ide-db/src/search.rs b/crates/ide-db/src/search.rs
index 7fc563a424..7963e8ae4f 100644
--- a/crates/ide-db/src/search.rs
+++ b/crates/ide-db/src/search.rs
@@ -354,6 +354,7 @@ impl Definition {
hir::GenericDef::TypeAlias(it) => it.source(db).map(|src| src.syntax().cloned()),
hir::GenericDef::Impl(it) => it.source(db).map(|src| src.syntax().cloned()),
hir::GenericDef::Const(it) => it.source(db).map(|src| src.syntax().cloned()),
+ hir::GenericDef::Static(it) => it.source(db).map(|src| src.syntax().cloned()),
};
return match def {
Some(def) => SearchScope::file_range(
diff --git a/crates/ide-db/src/symbol_index.rs b/crates/ide-db/src/symbol_index.rs
index e5ce10a771..bb4c289c90 100644
--- a/crates/ide-db/src/symbol_index.rs
+++ b/crates/ide-db/src/symbol_index.rs
@@ -193,11 +193,9 @@ impl<DB> std::ops::Deref for Snap<DB> {
// `rust-analyzer.workspace.symbol.search.kind` settings. Symbols prefixed
// with `__` are hidden from the search results unless configured otherwise.
//
-// |===
-// | Editor | Shortcut
-//
-// | VS Code | kbd:[Ctrl+T]
-// |===
+// | Editor | Shortcut |
+// |---------|-----------|
+// | VS Code | <kbd>Ctrl+T</kbd>
pub fn world_symbols(db: &RootDatabase, query: Query) -> Vec<FileSymbol> {
let _p = tracing::info_span!("world_symbols", query = ?query.query).entered();
diff --git a/crates/ide-diagnostics/src/handlers/incorrect_case.rs b/crates/ide-diagnostics/src/handlers/incorrect_case.rs
index bbdeb7cf08..246330e6ef 100644
--- a/crates/ide-diagnostics/src/handlers/incorrect_case.rs
+++ b/crates/ide-diagnostics/src/handlers/incorrect_case.rs
@@ -12,7 +12,7 @@ use crate::{
// Diagnostic: incorrect-ident-case
//
-// This diagnostic is triggered if an item name doesn't follow https://doc.rust-lang.org/1.0.0/style/style/naming/README.html[Rust naming convention].
+// This diagnostic is triggered if an item name doesn't follow [Rust naming convention](https://doc.rust-lang.org/1.0.0/style/style/naming/README.html).
pub(crate) fn incorrect_case(ctx: &DiagnosticsContext<'_>, d: &hir::IncorrectCase) -> Diagnostic {
let code = match d.expected_case {
CaseType::LowerSnakeCase => DiagnosticCode::RustcLint("non_snake_case"),
@@ -936,6 +936,7 @@ fn func() {
fn override_lint_level() {
check_diagnostics(
r#"
+#![allow(unused_variables)]
#[warn(nonstandard_style)]
fn foo() {
let BAR;
@@ -992,6 +993,7 @@ struct QUX;
const foo: i32 = 0;
fn BAR() {
let BAZ;
+ _ = BAZ;
}
"#,
);
diff --git a/crates/ide-diagnostics/src/handlers/invalid_cast.rs b/crates/ide-diagnostics/src/handlers/invalid_cast.rs
index c7cdcf4982..5730508436 100644
--- a/crates/ide-diagnostics/src/handlers/invalid_cast.rs
+++ b/crates/ide-diagnostics/src/handlers/invalid_cast.rs
@@ -1129,4 +1129,39 @@ fn main() {
"#,
);
}
+
+ #[test]
+ fn regression_18682() {
+ check_diagnostics(
+ r#"
+//- minicore: coerce_unsized
+struct Flexible {
+ body: [u8],
+}
+
+trait Field {
+ type Type: ?Sized;
+}
+
+impl Field for Flexible {
+ type Type = [u8];
+}
+
+trait KnownLayout {
+ type MaybeUninit: ?Sized;
+}
+
+
+impl<T> KnownLayout for [T] {
+ type MaybeUninit = [T];
+}
+
+struct ZerocopyKnownLayoutMaybeUninit(<<Flexible as Field>::Type as KnownLayout>::MaybeUninit);
+
+fn test(ptr: *mut [u8]) -> *mut ZerocopyKnownLayoutMaybeUninit {
+ ptr as *mut _
+}
+"#,
+ );
+ }
}
diff --git a/crates/ide-diagnostics/src/handlers/macro_error.rs b/crates/ide-diagnostics/src/handlers/macro_error.rs
index 99894fefef..2f13298589 100644
--- a/crates/ide-diagnostics/src/handlers/macro_error.rs
+++ b/crates/ide-diagnostics/src/handlers/macro_error.rs
@@ -133,7 +133,7 @@ macro_rules! env { () => {} }
macro_rules! concat { () => {} }
include!(concat!(env!("OUT_DIR"), "/out.rs"));
- //^^^^^^^^^ error: `OUT_DIR` not set, enable "build scripts" to fix
+ //^^^^^^^^^ error: `OUT_DIR` not set, build scripts may have failed to run
"#,
);
}
@@ -186,7 +186,7 @@ fn main() {
//^^^^^^^ error: expected string literal
env!("OUT_DIR");
- //^^^^^^^^^ error: `OUT_DIR` not set, enable "build scripts" to fix
+ //^^^^^^^^^ error: `OUT_DIR` not set, build scripts may have failed to run
compile_error!("compile_error works");
//^^^^^^^^^^^^^ error: compile_error works
diff --git a/crates/ide-diagnostics/src/handlers/mismatched_arg_count.rs b/crates/ide-diagnostics/src/handlers/mismatched_arg_count.rs
index 7126617cde..0520bb3fe9 100644
--- a/crates/ide-diagnostics/src/handlers/mismatched_arg_count.rs
+++ b/crates/ide-diagnostics/src/handlers/mismatched_arg_count.rs
@@ -40,7 +40,7 @@ pub(crate) fn mismatched_arg_count(
Diagnostic::new(
DiagnosticCode::RustcHardError("E0107"),
message,
- invalid_args_range(ctx, d.call_expr.map(AstPtr::wrap_left), d.expected, d.found),
+ invalid_args_range(ctx, d.call_expr, d.expected, d.found),
)
}
diff --git a/crates/ide-diagnostics/src/handlers/missing_unsafe.rs b/crates/ide-diagnostics/src/handlers/missing_unsafe.rs
index 8117401a53..323a5723d4 100644
--- a/crates/ide-diagnostics/src/handlers/missing_unsafe.rs
+++ b/crates/ide-diagnostics/src/handlers/missing_unsafe.rs
@@ -1,5 +1,5 @@
use hir::db::ExpandDatabase;
-use hir::{HirFileIdExt, UnsafetyReason};
+use hir::{HirFileIdExt, UnsafeLint, UnsafetyReason};
use ide_db::text_edit::TextEdit;
use ide_db::{assists::Assist, source_change::SourceChange};
use syntax::{ast, SyntaxNode};
@@ -11,10 +11,10 @@ use crate::{fix, Diagnostic, DiagnosticCode, DiagnosticsContext};
//
// This diagnostic is triggered if an operation marked as `unsafe` is used outside of an `unsafe` function or block.
pub(crate) fn missing_unsafe(ctx: &DiagnosticsContext<'_>, d: &hir::MissingUnsafe) -> Diagnostic {
- let code = if d.only_lint {
- DiagnosticCode::RustcLint("unsafe_op_in_unsafe_fn")
- } else {
- DiagnosticCode::RustcHardError("E0133")
+ let code = match d.lint {
+ UnsafeLint::HardError => DiagnosticCode::RustcHardError("E0133"),
+ UnsafeLint::UnsafeOpInUnsafeFn => DiagnosticCode::RustcLint("unsafe_op_in_unsafe_fn"),
+ UnsafeLint::DeprecatedSafe2024 => DiagnosticCode::RustcLint("deprecated_safe_2024"),
};
let operation = display_unsafety_reason(d.reason);
Diagnostic::new_with_syntax_node_ptr(
@@ -585,25 +585,59 @@ fn main() {
r#"
//- /ed2021.rs crate:ed2021 edition:2021
#[rustc_deprecated_safe_2024]
-unsafe fn safe() -> u8 {
+unsafe fn deprecated_safe() -> u8 {
0
}
+
//- /ed2024.rs crate:ed2024 edition:2024
#[rustc_deprecated_safe_2024]
-unsafe fn not_safe() -> u8 {
+unsafe fn deprecated_safe() -> u8 {
0
}
-//- /main.rs crate:main deps:ed2021,ed2024
+
+//- /dep1.rs crate:dep1 deps:ed2021,ed2024 edition:2021
+fn main() {
+ ed2021::deprecated_safe();
+ ed2024::deprecated_safe();
+}
+
+//- /dep2.rs crate:dep2 deps:ed2021,ed2024 edition:2024
+fn main() {
+ ed2021::deprecated_safe();
+ // ^^^^^^^^^^^^^^^^^^^^^^^^^💡 error: call to unsafe function is unsafe and requires an unsafe function or block
+ ed2024::deprecated_safe();
+ // ^^^^^^^^^^^^^^^^^^^^^^^^^💡 error: call to unsafe function is unsafe and requires an unsafe function or block
+}
+
+//- /dep3.rs crate:dep3 deps:ed2021,ed2024 edition:2021
+#![warn(deprecated_safe)]
+
fn main() {
- ed2021::safe();
- ed2024::not_safe();
- //^^^^^^^^^^^^^^^^^^💡 error: call to unsafe function is unsafe and requires an unsafe function or block
+ ed2021::deprecated_safe();
+ // ^^^^^^^^^^^^^^^^^^^^^^^^^💡 warn: call to unsafe function is unsafe and requires an unsafe function or block
+ ed2024::deprecated_safe();
+ // ^^^^^^^^^^^^^^^^^^^^^^^^^💡 warn: call to unsafe function is unsafe and requires an unsafe function or block
}
"#,
)
}
#[test]
+ fn orphan_unsafe_format_args() {
+ // Checks that we don't place orphan arguments for formatting under an unsafe block.
+ check_diagnostics(
+ r#"
+//- minicore: fmt
+fn foo() {
+ let p = 0xDEADBEEF as *const i32;
+ format_args!("", *p);
+ // ^^ error: dereference of raw pointer is unsafe and requires an unsafe function or block
+}
+ "#,
+ );
+ }
+
+ #[test]
fn unsafe_op_in_unsafe_fn_allowed_by_default_in_edition_2021() {
check_diagnostics(
r#"
@@ -812,4 +846,36 @@ fn main() {
"#,
)
}
+
+ #[test]
+ fn target_feature() {
+ check_diagnostics(
+ r#"
+#[target_feature(enable = "avx")]
+fn foo() {}
+
+#[target_feature(enable = "avx,avx2")]
+fn bar() {
+ foo();
+}
+
+fn baz() {
+ foo();
+ // ^^^^^ 💡 error: call to unsafe function is unsafe and requires an unsafe function or block
+}
+ "#,
+ );
+ }
+
+ #[test]
+ fn unsafe_fn_ptr_call() {
+ check_diagnostics(
+ r#"
+fn f(it: unsafe fn()){
+ it();
+ // ^^^^ 💡 error: call to unsafe function is unsafe and requires an unsafe function or block
+}
+ "#,
+ );
+ }
}
diff --git a/crates/ide-diagnostics/src/handlers/mutability_errors.rs b/crates/ide-diagnostics/src/handlers/mutability_errors.rs
index 1397979144..0e3c4c7aa3 100644
--- a/crates/ide-diagnostics/src/handlers/mutability_errors.rs
+++ b/crates/ide-diagnostics/src/handlers/mutability_errors.rs
@@ -831,13 +831,14 @@ fn f() {
#[test]
fn or_pattern() {
- // FIXME: `None` is inferred as unknown here for some reason
check_diagnostics(
r#"
//- minicore: option
fn f(_: i32) {}
fn main() {
let ((Some(mut x), None) | (_, Some(mut x))) = (None, Some(7)) else { return };
+ //^^^^^ 💡 warn: variable does not need to be mutable
+
f(x);
}
"#,
diff --git a/crates/ide-diagnostics/src/handlers/parenthesized_generic_args_without_fn_trait.rs b/crates/ide-diagnostics/src/handlers/parenthesized_generic_args_without_fn_trait.rs
new file mode 100644
index 0000000000..ccf5172341
--- /dev/null
+++ b/crates/ide-diagnostics/src/handlers/parenthesized_generic_args_without_fn_trait.rs
@@ -0,0 +1,59 @@
+use crate::{Diagnostic, DiagnosticCode, DiagnosticsContext};
+
+// Diagnostic: parenthesized-generic-args-without-fn-trait
+//
+// This diagnostic is shown when a `Fn`-trait-style generic parameters (`Trait(A, B) -> C`)
+// was used on non-`Fn` trait/type.
+pub(crate) fn parenthesized_generic_args_without_fn_trait(
+ ctx: &DiagnosticsContext<'_>,
+ d: &hir::ParenthesizedGenericArgsWithoutFnTrait,
+) -> Diagnostic {
+ Diagnostic::new_with_syntax_node_ptr(
+ ctx,
+ DiagnosticCode::RustcHardError("E0214"),
+ "parenthesized type parameters may only be used with a `Fn` trait",
+ d.args.map(Into::into),
+ )
+}
+
+#[cfg(test)]
+mod tests {
+ use crate::tests::check_diagnostics;
+
+ #[test]
+ fn fn_traits_work() {
+ check_diagnostics(
+ r#"
+//- minicore: async_fn, fn
+fn foo<
+ A: Fn(),
+ B: FnMut() -> i32,
+ C: FnOnce(&str, bool),
+ D: AsyncFn::(u32) -> u32,
+ E: AsyncFnMut(),
+ F: AsyncFnOnce() -> bool,
+>() {}
+ "#,
+ );
+ }
+
+ #[test]
+ fn non_fn_trait() {
+ check_diagnostics(
+ r#"
+struct Struct<T>(T);
+enum Enum<T> { EnumVariant(T) }
+type TypeAlias<T> = bool;
+
+type Foo = TypeAlias() -> bool;
+ // ^^ error: parenthesized type parameters may only be used with a `Fn` trait
+
+fn foo(_a: Struct(i32)) {
+ // ^^^^^ error: parenthesized type parameters may only be used with a `Fn` trait
+ let _ = <Enum::(u32)>::EnumVariant(0);
+ // ^^^^^^^ error: parenthesized type parameters may only be used with a `Fn` trait
+}
+ "#,
+ );
+ }
+}
diff --git a/crates/ide-diagnostics/src/handlers/type_mismatch.rs b/crates/ide-diagnostics/src/handlers/type_mismatch.rs
index 56afb38cc8..7cf8282d05 100644
--- a/crates/ide-diagnostics/src/handlers/type_mismatch.rs
+++ b/crates/ide-diagnostics/src/handlers/type_mismatch.rs
@@ -1,5 +1,5 @@
use either::Either;
-use hir::{db::ExpandDatabase, CallableKind, ClosureStyle, HirDisplay, HirFileIdExt, InFile, Type};
+use hir::{db::ExpandDatabase, CallableKind, ClosureStyle, HirDisplay, HirFileIdExt, InFile};
use ide_db::{
famous_defs::FamousDefs,
source_change::{SourceChange, SourceChangeBuilder},
@@ -88,7 +88,7 @@ fn add_reference(
let range = ctx.sema.diagnostics_display_range((*expr_ptr).map(|it| it.into()));
let (_, mutability) = d.expected.as_reference()?;
- let actual_with_ref = Type::reference(&d.actual, mutability);
+ let actual_with_ref = d.actual.add_reference(mutability);
if !actual_with_ref.could_coerce_to(ctx.sema.db, &d.expected) {
return None;
}
@@ -1235,4 +1235,25 @@ fn f() {
"#,
);
}
+
+ #[test]
+ fn complex_enum_variant_non_ref_pat() {
+ check_diagnostics(
+ r#"
+enum Enum { Variant }
+
+trait Trait {
+ type Assoc;
+}
+impl Trait for () {
+ type Assoc = Enum;
+}
+
+fn foo(v: &Enum) {
+ let <Enum>::Variant = v;
+ let <() as Trait>::Assoc::Variant = v;
+}
+ "#,
+ );
+ }
}
diff --git a/crates/ide-diagnostics/src/handlers/unresolved_field.rs b/crates/ide-diagnostics/src/handlers/unresolved_field.rs
index 4accd181ca..dfb03eee73 100644
--- a/crates/ide-diagnostics/src/handlers/unresolved_field.rs
+++ b/crates/ide-diagnostics/src/handlers/unresolved_field.rs
@@ -1,5 +1,6 @@
use std::iter;
+use either::Either;
use hir::{db::ExpandDatabase, Adt, FileRange, HasSource, HirDisplay, InFile, Struct, Union};
use ide_db::text_edit::TextEdit;
use ide_db::{
@@ -41,7 +42,7 @@ pub(crate) fn unresolved_field(
),
adjusted_display_range(ctx, d.expr, &|expr| {
Some(
- match expr {
+ match expr.left()? {
ast::Expr::MethodCallExpr(it) => it.name_ref(),
ast::Expr::FieldExpr(it) => it.name_ref(),
_ => None,
@@ -72,7 +73,7 @@ fn fixes(ctx: &DiagnosticsContext<'_>, d: &hir::UnresolvedField) -> Option<Vec<A
fn field_fix(ctx: &DiagnosticsContext<'_>, d: &hir::UnresolvedField) -> Option<Assist> {
// Get the FileRange of the invalid field access
let root = ctx.sema.db.parse_or_expand(d.expr.file_id);
- let expr = d.expr.value.to_node(&root);
+ let expr = d.expr.value.to_node(&root).left()?;
let error_range = ctx.sema.original_range_opt(expr.syntax())?;
let field_name = d.name.as_str();
@@ -263,7 +264,7 @@ fn record_field_layout(
// FIXME: We should fill out the call here, move the cursor and trigger signature help
fn method_fix(
ctx: &DiagnosticsContext<'_>,
- expr_ptr: &InFile<AstPtr<ast::Expr>>,
+ expr_ptr: &InFile<AstPtr<Either<ast::Expr, ast::Pat>>>,
) -> Option<Assist> {
let root = ctx.sema.db.parse_or_expand(expr_ptr.file_id);
let expr = expr_ptr.value.to_node(&root);
diff --git a/crates/ide-diagnostics/src/handlers/unresolved_method.rs b/crates/ide-diagnostics/src/handlers/unresolved_method.rs
index 4ab649cc16..e4de107249 100644
--- a/crates/ide-diagnostics/src/handlers/unresolved_method.rs
+++ b/crates/ide-diagnostics/src/handlers/unresolved_method.rs
@@ -1,4 +1,4 @@
-use hir::{db::ExpandDatabase, AssocItem, FileRange, HirDisplay, InFile};
+use hir::{db::ExpandDatabase, FileRange, HirDisplay, InFile};
use ide_db::text_edit::TextEdit;
use ide_db::{
assists::{Assist, AssistId, AssistKind},
@@ -35,7 +35,7 @@ pub(crate) fn unresolved_method(
),
adjusted_display_range(ctx, d.expr, &|expr| {
Some(
- match expr {
+ match expr.left()? {
ast::Expr::MethodCallExpr(it) => it.name_ref(),
ast::Expr::FieldExpr(it) => it.name_ref(),
_ => None,
@@ -85,7 +85,7 @@ fn field_fix(
let expr_ptr = &d.expr;
let root = ctx.sema.db.parse_or_expand(expr_ptr.file_id);
let expr = expr_ptr.value.to_node(&root);
- let (file_id, range) = match expr {
+ let (file_id, range) = match expr.left()? {
ast::Expr::MethodCallExpr(mcall) => {
let FileRange { range, file_id } =
ctx.sema.original_range_opt(mcall.receiver()?.syntax())?;
@@ -112,12 +112,12 @@ fn field_fix(
}
fn assoc_func_fix(ctx: &DiagnosticsContext<'_>, d: &hir::UnresolvedMethodCall) -> Option<Assist> {
- if let Some(assoc_item_id) = d.assoc_func_with_same_name {
+ if let Some(f) = d.assoc_func_with_same_name {
let db = ctx.sema.db;
let expr_ptr = &d.expr;
let root = db.parse_or_expand(expr_ptr.file_id);
- let expr: ast::Expr = expr_ptr.value.to_node(&root);
+ let expr: ast::Expr = expr_ptr.value.to_node(&root).left()?;
let call = ast::MethodCallExpr::cast(expr.syntax().clone())?;
let range = InFile::new(expr_ptr.file_id, call.syntax().text_range())
@@ -127,30 +127,25 @@ fn assoc_func_fix(ctx: &DiagnosticsContext<'_>, d: &hir::UnresolvedMethodCall) -
let receiver = call.receiver()?;
let receiver_type = &ctx.sema.type_of_expr(&receiver)?.original;
- let need_to_take_receiver_as_first_arg = match hir::AssocItem::from(assoc_item_id) {
- AssocItem::Function(f) => {
- let assoc_fn_params = f.assoc_fn_params(db);
- if assoc_fn_params.is_empty() {
- false
- } else {
- assoc_fn_params
- .first()
- .map(|first_arg| {
- // For generic type, say `Box`, take `Box::into_raw(b: Self)` as example,
- // type of `b` is `Self`, which is `Box<T, A>`, containing unspecified generics.
- // However, type of `receiver` is specified, it could be `Box<i32, Global>` or something like that,
- // so `first_arg.ty() == receiver_type` evaluate to `false` here.
- // Here add `first_arg.ty().as_adt() == receiver_type.as_adt()` as guard,
- // apply `.as_adt()` over `Box<T, A>` or `Box<i32, Global>` gets `Box`, so we get `true` here.
-
- // FIXME: it fails when type of `b` is `Box` with other generic param different from `receiver`
- first_arg.ty() == receiver_type
- || first_arg.ty().as_adt() == receiver_type.as_adt()
- })
- .unwrap_or(false)
- }
- }
- _ => false,
+ let assoc_fn_params = f.assoc_fn_params(db);
+ let need_to_take_receiver_as_first_arg = if assoc_fn_params.is_empty() {
+ false
+ } else {
+ assoc_fn_params
+ .first()
+ .map(|first_arg| {
+ // For generic type, say `Box`, take `Box::into_raw(b: Self)` as example,
+ // type of `b` is `Self`, which is `Box<T, A>`, containing unspecified generics.
+ // However, type of `receiver` is specified, it could be `Box<i32, Global>` or something like that,
+ // so `first_arg.ty() == receiver_type` evaluate to `false` here.
+ // Here add `first_arg.ty().as_adt() == receiver_type.as_adt()` as guard,
+ // apply `.as_adt()` over `Box<T, A>` or `Box<i32, Global>` gets `Box`, so we get `true` here.
+
+ // FIXME: it fails when type of `b` is `Box` with other generic param different from `receiver`
+ first_arg.ty() == receiver_type
+ || first_arg.ty().as_adt() == receiver_type.as_adt()
+ })
+ .unwrap_or(false)
};
let mut receiver_type_adt_name =
diff --git a/crates/ide-diagnostics/src/handlers/unused_variables.rs b/crates/ide-diagnostics/src/handlers/unused_variables.rs
index 67ece56694..d5caf4de33 100644
--- a/crates/ide-diagnostics/src/handlers/unused_variables.rs
+++ b/crates/ide-diagnostics/src/handlers/unused_variables.rs
@@ -263,4 +263,17 @@ fn main() {
"#,
);
}
+
+ // regression test as we used to panic in this scenario
+ #[test]
+ fn unknown_struct_pattern_param_type() {
+ check_diagnostics(
+ r#"
+struct S { field : u32 }
+fn f(S { field }: error) {
+ // ^^^^^ 💡 warn: unused variable
+}
+"#,
+ );
+ }
}
diff --git a/crates/ide-diagnostics/src/lib.rs b/crates/ide-diagnostics/src/lib.rs
index 50c91a6960..3ea41aa7e8 100644
--- a/crates/ide-diagnostics/src/lib.rs
+++ b/crates/ide-diagnostics/src/lib.rs
@@ -43,6 +43,7 @@ mod handlers {
pub(crate) mod mutability_errors;
pub(crate) mod no_such_field;
pub(crate) mod non_exhaustive_let;
+ pub(crate) mod parenthesized_generic_args_without_fn_trait;
pub(crate) mod private_assoc_item;
pub(crate) mod private_field;
pub(crate) mod remove_trailing_return;
@@ -466,7 +467,12 @@ pub fn semantic_diagnostics(
Some(it) => it,
None => continue,
},
- AnyDiagnostic::GenericArgsProhibited(d) => handlers::generic_args_prohibited::generic_args_prohibited(&ctx, &d)
+ AnyDiagnostic::GenericArgsProhibited(d) => {
+ handlers::generic_args_prohibited::generic_args_prohibited(&ctx, &d)
+ }
+ AnyDiagnostic::ParenthesizedGenericArgsWithoutFnTrait(d) => {
+ handlers::parenthesized_generic_args_without_fn_trait::parenthesized_generic_args_without_fn_trait(&ctx, &d)
+ }
};
res.push(d)
}
diff --git a/crates/ide-ssr/src/lib.rs b/crates/ide-ssr/src/lib.rs
index 6b654f8934..889258c94c 100644
--- a/crates/ide-ssr/src/lib.rs
+++ b/crates/ide-ssr/src/lib.rs
@@ -33,12 +33,10 @@
//
// Supported constraints:
//
-// |===
-// | Constraint | Restricts placeholder
-//
-// | kind(literal) | Is a literal (e.g. `42` or `"forty two"`)
-// | not(a) | Negates the constraint `a`
-// |===
+// | Constraint | Restricts placeholder |
+// |---------------|------------------------|
+// | kind(literal) | Is a literal (e.g. `42` or `"forty two"`) |
+// | not(a) | Negates the constraint `a` |
//
// Available via the command `rust-analyzer.ssr`.
//
@@ -52,11 +50,9 @@
// String::from((y + 5).foo(z))
// ```
//
-// |===
-// | Editor | Action Name
-//
-// | VS Code | **rust-analyzer: Structural Search Replace**
-// |===
+// | Editor | Action Name |
+// |---------|--------------|
+// | VS Code | **rust-analyzer: Structural Search Replace** |
//
// Also available as an assist, by writing a comment containing the structural
// search and replace rule. You will only see the assist if the comment can
diff --git a/crates/ide/src/annotations.rs b/crates/ide/src/annotations.rs
index 18f866eb9f..e47891bbdf 100644
--- a/crates/ide/src/annotations.rs
+++ b/crates/ide/src/annotations.rs
@@ -1,6 +1,6 @@
use hir::{HasSource, InFile, InRealFile, Semantics};
use ide_db::{
- defs::Definition, helpers::visit_file_defs, FileId, FilePosition, FileRange, FxHashSet,
+ defs::Definition, helpers::visit_file_defs, FileId, FilePosition, FileRange, FxIndexSet,
RootDatabase,
};
use itertools::Itertools;
@@ -21,7 +21,7 @@ mod fn_references;
// Provides user with annotations above items for looking up references or impl blocks
// and running/debugging binaries.
//
-// image::https://user-images.githubusercontent.com/48062697/113020672-b7c34f00-917a-11eb-8f6e-858735660a0e.png[]
+// ![Annotations](https://user-images.githubusercontent.com/48062697/113020672-b7c34f00-917a-11eb-8f6e-858735660a0e.png)
#[derive(Debug, Hash, PartialEq, Eq)]
pub struct Annotation {
pub range: TextRange,
@@ -55,7 +55,7 @@ pub(crate) fn annotations(
config: &AnnotationConfig,
file_id: FileId,
) -> Vec<Annotation> {
- let mut annotations = FxHashSet::default();
+ let mut annotations = FxIndexSet::default();
if config.annotate_runnables {
for runnable in runnables(db, file_id) {
@@ -170,7 +170,12 @@ pub(crate) fn annotations(
}));
}
- annotations.into_iter().sorted_by_key(|a| (a.range.start(), a.range.end())).collect()
+ annotations
+ .into_iter()
+ .sorted_by_key(|a| {
+ (a.range.start(), a.range.end(), matches!(a.kind, AnnotationKind::Runnable(..)))
+ })
+ .collect()
}
pub(crate) fn resolve_annotation(db: &RootDatabase, mut annotation: Annotation) -> Annotation {
@@ -537,6 +542,20 @@ fn main() {
},
Annotation {
range: 69..73,
+ kind: HasReferences {
+ pos: FilePositionWrapper {
+ file_id: FileId(
+ 0,
+ ),
+ offset: 69,
+ },
+ data: Some(
+ [],
+ ),
+ },
+ },
+ Annotation {
+ range: 69..73,
kind: Runnable(
Runnable {
use_name_in_title: false,
@@ -559,20 +578,6 @@ fn main() {
},
),
},
- Annotation {
- range: 69..73,
- kind: HasReferences {
- pos: FilePositionWrapper {
- file_id: FileId(
- 0,
- ),
- offset: 69,
- },
- data: Some(
- [],
- ),
- },
- },
]
"#]],
);
@@ -719,6 +724,20 @@ fn main() {
},
Annotation {
range: 61..65,
+ kind: HasReferences {
+ pos: FilePositionWrapper {
+ file_id: FileId(
+ 0,
+ ),
+ offset: 61,
+ },
+ data: Some(
+ [],
+ ),
+ },
+ },
+ Annotation {
+ range: 61..65,
kind: Runnable(
Runnable {
use_name_in_title: false,
@@ -741,20 +760,6 @@ fn main() {
},
),
},
- Annotation {
- range: 61..65,
- kind: HasReferences {
- pos: FilePositionWrapper {
- file_id: FileId(
- 0,
- ),
- offset: 61,
- },
- data: Some(
- [],
- ),
- },
- },
]
"#]],
);
diff --git a/crates/ide/src/doc_links.rs b/crates/ide/src/doc_links.rs
index cfd8919730..e35e47e747 100644
--- a/crates/ide/src/doc_links.rs
+++ b/crates/ide/src/doc_links.rs
@@ -122,11 +122,9 @@ pub(crate) fn remove_links(markdown: &str) -> String {
// The simplest way to use this feature is via the context menu. Right-click on
// the selected item. The context menu opens. Select **Open Docs**.
//
-// |===
-// | Editor | Action Name
-//
-// | VS Code | **rust-analyzer: Open Docs**
-// |===
+// | Editor | Action Name |
+// |---------|-------------|
+// | VS Code | **rust-analyzer: Open Docs** |
pub(crate) fn external_docs(
db: &RootDatabase,
FilePosition { file_id, offset }: FilePosition,
diff --git a/crates/ide/src/expand_macro.rs b/crates/ide/src/expand_macro.rs
index 0ad894427b..ad4308e06a 100644
--- a/crates/ide/src/expand_macro.rs
+++ b/crates/ide/src/expand_macro.rs
@@ -19,13 +19,11 @@ pub struct ExpandedMacro {
//
// Shows the full macro expansion of the macro at the current caret position.
//
-// |===
-// | Editor | Action Name
+// | Editor | Action Name |
+// |---------|-------------|
+// | VS Code | **rust-analyzer: Expand macro recursively at caret** |
//
-// | VS Code | **rust-analyzer: Expand macro recursively at caret**
-// |===
-//
-// image::https://user-images.githubusercontent.com/48062697/113020648-b3973180-917a-11eb-84a9-ecb921293dc5.gif[]
+// ![Expand Macro Recursively](https://user-images.githubusercontent.com/48062697/113020648-b3973180-917a-11eb-84a9-ecb921293dc5.gif)
pub(crate) fn expand_macro(db: &RootDatabase, position: FilePosition) -> Option<ExpandedMacro> {
let sema = Semantics::new(db);
let file = sema.parse_guess_edition(position.file_id);
diff --git a/crates/ide/src/extend_selection.rs b/crates/ide/src/extend_selection.rs
index 3d49082f28..76414854e9 100644
--- a/crates/ide/src/extend_selection.rs
+++ b/crates/ide/src/extend_selection.rs
@@ -17,13 +17,11 @@ use crate::FileRange;
// Extends or shrinks the current selection to the encompassing syntactic construct
// (expression, statement, item, module, etc). It works with multiple cursors.
//
-// |===
-// | Editor | Shortcut
+// | Editor | Shortcut |
+// |---------|----------|
+// | VS Code | <kbd>Alt+Shift+→</kbd>, <kbd>Alt+Shift+←</kbd> |
//
-// | VS Code | kbd:[Alt+Shift+→], kbd:[Alt+Shift+←]
-// |===
-//
-// image::https://user-images.githubusercontent.com/48062697/113020651-b42fc800-917a-11eb-8a4f-cf1a07859fac.gif[]
+// ![Expand and Shrink Selection](https://user-images.githubusercontent.com/48062697/113020651-b42fc800-917a-11eb-8a4f-cf1a07859fac.gif)
pub(crate) fn extend_selection(db: &RootDatabase, frange: FileRange) -> TextRange {
let sema = Semantics::new(db);
let src = sema.parse_guess_edition(frange.file_id);
diff --git a/crates/ide/src/fetch_crates.rs b/crates/ide/src/fetch_crates.rs
index 37b3cb03b3..5ed2144430 100644
--- a/crates/ide/src/fetch_crates.rs
+++ b/crates/ide/src/fetch_crates.rs
@@ -14,13 +14,11 @@ pub struct CrateInfo {
//
// Shows a view tree with all the dependencies of this project
//
-// |===
-// | Editor | Panel Name
+// | Editor | Panel Name |
+// |---------|------------|
+// | VS Code | **Rust Dependencies** |
//
-// | VS Code | **Rust Dependencies**
-// |===
-//
-// image::https://user-images.githubusercontent.com/5748995/229394139-2625beab-f4c9-484b-84ed-ad5dee0b1e1a.png[]
+// ![Show Dependency Tree](https://user-images.githubusercontent.com/5748995/229394139-2625beab-f4c9-484b-84ed-ad5dee0b1e1a.png)
pub(crate) fn fetch_crates(db: &RootDatabase) -> FxIndexSet<CrateInfo> {
let crate_graph = db.crate_graph();
crate_graph
diff --git a/crates/ide/src/file_structure.rs b/crates/ide/src/file_structure.rs
index 50977ee840..52fbab6fa1 100644
--- a/crates/ide/src/file_structure.rs
+++ b/crates/ide/src/file_structure.rs
@@ -31,14 +31,11 @@ pub enum StructureNodeKind {
// * draw breadcrumbs to describe the context around the cursor
// * draw outline of the file
//
-// |===
-// | Editor | Shortcut
+// | Editor | Shortcut |
+// |---------|----------|
+// | VS Code | <kbd>Ctrl+Shift+O</kbd> |
//
-// | VS Code | kbd:[Ctrl+Shift+O]
-// |===
-//
-// image::https://user-images.githubusercontent.com/48062697/113020654-b42fc800-917a-11eb-8388-e7dc4d92b02e.gif[]
-
+// ![File Structure](https://user-images.githubusercontent.com/48062697/113020654-b42fc800-917a-11eb-8388-e7dc4d92b02e.gif)
pub(crate) fn file_structure(file: &SourceFile) -> Vec<StructureNode> {
let mut res = Vec::new();
let mut stack = Vec::new();
diff --git a/crates/ide/src/goto_definition.rs b/crates/ide/src/goto_definition.rs
index d18732a6b8..60a904233a 100644
--- a/crates/ide/src/goto_definition.rs
+++ b/crates/ide/src/goto_definition.rs
@@ -31,13 +31,11 @@ use syntax::{
//
// For outline modules, this will navigate to the source file of the module.
//
-// |===
-// | Editor | Shortcut
+// | Editor | Shortcut |
+// |---------|----------|
+// | VS Code | <kbd>F12</kbd> |
//
-// | VS Code | kbd:[F12]
-// |===
-//
-// image::https://user-images.githubusercontent.com/48062697/113065563-025fbe00-91b1-11eb-83e4-a5a703610b23.gif[]
+// ![Go to Definition](https://user-images.githubusercontent.com/48062697/113065563-025fbe00-91b1-11eb-83e4-a5a703610b23.gif)
pub(crate) fn goto_definition(
db: &RootDatabase,
FilePosition { file_id, offset }: FilePosition,
@@ -3274,4 +3272,56 @@ fn f() {
"#,
);
}
+
+ #[test]
+ fn use_inside_body() {
+ check(
+ r#"
+fn main() {
+ mod nice_module {
+ pub(super) struct NiceStruct;
+ // ^^^^^^^^^^
+ }
+
+ use nice_module::NiceStruct$0;
+
+ let _ = NiceStruct;
+}
+ "#,
+ );
+ }
+
+ #[test]
+ fn shadow_builtin_type_by_module() {
+ check(
+ r#"
+mod Foo{
+pub mod str {
+ // ^^^
+ pub fn foo() {}
+}
+}
+
+fn main() {
+ use Foo::str;
+ let s = st$0r::foo();
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn not_goto_module_because_str_is_builtin_type() {
+ check(
+ r#"
+mod str {
+pub fn foo() {}
+}
+
+fn main() {
+ let s = st$0r::f();
+}
+"#,
+ );
+ }
}
diff --git a/crates/ide/src/goto_implementation.rs b/crates/ide/src/goto_implementation.rs
index e926378367..e1d834b5d1 100644
--- a/crates/ide/src/goto_implementation.rs
+++ b/crates/ide/src/goto_implementation.rs
@@ -12,13 +12,11 @@ use crate::{FilePosition, NavigationTarget, RangeInfo, TryToNav};
//
// Navigates to the impl items of types.
//
-// |===
-// | Editor | Shortcut
+// | Editor | Shortcut |
+// |---------|----------|
+// | VS Code | <kbd>Ctrl+F12</kbd>
//
-// | VS Code | kbd:[Ctrl+F12]
-// |===
-//
-// image::https://user-images.githubusercontent.com/48062697/113065566-02f85480-91b1-11eb-9288-aaad8abd8841.gif[]
+// ![Go to Implementation](https://user-images.githubusercontent.com/48062697/113065566-02f85480-91b1-11eb-9288-aaad8abd8841.gif)
pub(crate) fn goto_implementation(
db: &RootDatabase,
FilePosition { file_id, offset }: FilePosition,
diff --git a/crates/ide/src/goto_type_definition.rs b/crates/ide/src/goto_type_definition.rs
index 2610d6c886..ddc274a830 100644
--- a/crates/ide/src/goto_type_definition.rs
+++ b/crates/ide/src/goto_type_definition.rs
@@ -8,13 +8,11 @@ use crate::{FilePosition, NavigationTarget, RangeInfo, TryToNav};
//
// Navigates to the type of an identifier.
//
-// |===
-// | Editor | Action Name
+// | Editor | Action Name |
+// |---------|-------------|
+// | VS Code | **Go to Type Definition** |
//
-// | VS Code | **Go to Type Definition**
-// |===
-//
-// image::https://user-images.githubusercontent.com/48062697/113020657-b560f500-917a-11eb-9007-0f809733a338.gif[]
+// ![Go to Type Definition](https://user-images.githubusercontent.com/48062697/113020657-b560f500-917a-11eb-9007-0f809733a338.gif)
pub(crate) fn goto_type_definition(
db: &RootDatabase,
FilePosition { file_id, offset }: FilePosition,
diff --git a/crates/ide/src/highlight_related.rs b/crates/ide/src/highlight_related.rs
index 612bc36f62..6463206596 100644
--- a/crates/ide/src/highlight_related.rs
+++ b/crates/ide/src/highlight_related.rs
@@ -43,12 +43,12 @@ pub struct HighlightRelatedConfig {
//
// Highlights constructs related to the thing under the cursor:
//
-// . if on an identifier, highlights all references to that identifier in the current file
-// .. additionally, if the identifier is a trait in a where clause, type parameter trait bound or use item, highlights all references to that trait's assoc items in the corresponding scope
-// . if on an `async` or `await` token, highlights all yield points for that async context
-// . if on a `return` or `fn` keyword, `?` character or `->` return type arrow, highlights all exit points for that context
-// . if on a `break`, `loop`, `while` or `for` token, highlights all break points for that loop or block context
-// . if on a `move` or `|` token that belongs to a closure, highlights all captures of the closure.
+// 1. if on an identifier, highlights all references to that identifier in the current file
+// * additionally, if the identifier is a trait in a where clause, type parameter trait bound or use item, highlights all references to that trait's assoc items in the corresponding scope
+// 1. if on an `async` or `await` token, highlights all yield points for that async context
+// 1. if on a `return` or `fn` keyword, `?` character or `->` return type arrow, highlights all exit points for that context
+// 1. if on a `break`, `loop`, `while` or `for` token, highlights all break points for that loop or block context
+// 1. if on a `move` or `|` token that belongs to a closure, highlights all captures of the closure.
//
// Note: `?`, `|` and `->` do not currently trigger this behavior in the VSCode editor.
pub(crate) fn highlight_related(
diff --git a/crates/ide/src/hover.rs b/crates/ide/src/hover.rs
index 9d4c103fc2..95a720e7e4 100644
--- a/crates/ide/src/hover.rs
+++ b/crates/ide/src/hover.rs
@@ -118,7 +118,7 @@ pub struct HoverResult {
// Shows additional information, like the type of an expression or the documentation for a definition when "focusing" code.
// Focusing is usually hovering with a mouse, but can also be triggered with a shortcut.
//
-// image::https://user-images.githubusercontent.com/48062697/113020658-b5f98b80-917a-11eb-9f88-3dbc27320c95.gif[]
+// ![Hover](https://user-images.githubusercontent.com/48062697/113020658-b5f98b80-917a-11eb-9f88-3dbc27320c95.gif)
pub(crate) fn hover(
db: &RootDatabase,
frange @ FileRange { file_id, range }: FileRange,
diff --git a/crates/ide/src/hover/render.rs b/crates/ide/src/hover/render.rs
index 40f3406b72..c996230c3a 100644
--- a/crates/ide/src/hover/render.rs
+++ b/crates/ide/src/hover/render.rs
@@ -434,6 +434,7 @@ fn definition_owner_name(db: &RootDatabase, def: Definition, edition: Edition) -
None => it.name(db),
}
}
+ hir::GenericDef::Static(it) => Some(it.name(db)),
},
Definition::DeriveHelper(derive_helper) => Some(derive_helper.derive().name(db)),
d => {
diff --git a/crates/ide/src/inlay_hints.rs b/crates/ide/src/inlay_hints.rs
index 1f723c85df..63039b1cd3 100644
--- a/crates/ide/src/inlay_hints.rs
+++ b/crates/ide/src/inlay_hints.rs
@@ -59,7 +59,7 @@ mod range_exclusive;
//
// Note: inlay hints for function argument names are heuristically omitted to reduce noise and will not appear if
// any of the
-// link:https://github.com/rust-lang/rust-analyzer/blob/6b8b8ff4c56118ddee6c531cde06add1aad4a6af/crates/ide/src/inlay_hints/param_name.rs#L92-L99[following criteria]
+// [following criteria](https://github.com/rust-lang/rust-analyzer/blob/6b8b8ff4c56118ddee6c531cde06add1aad4a6af/crates/ide/src/inlay_hints/param_name.rs#L92-L99)
// are met:
//
// * the parameter name is a suffix of the function's name
@@ -68,13 +68,13 @@ mod range_exclusive;
// of argument with _ splitting it off
// * the parameter name starts with `ra_fixture`
// * the parameter name is a
-// link:https://github.com/rust-lang/rust-analyzer/blob/6b8b8ff4c56118ddee6c531cde06add1aad4a6af/crates/ide/src/inlay_hints/param_name.rs#L200[well known name]
+// [well known name](https://github.com/rust-lang/rust-analyzer/blob/6b8b8ff4c56118ddee6c531cde06add1aad4a6af/crates/ide/src/inlay_hints/param_name.rs#L200)
// in a unary function
// * the parameter name is a
-// link:https://github.com/rust-lang/rust-analyzer/blob/6b8b8ff4c56118ddee6c531cde06add1aad4a6af/crates/ide/src/inlay_hints/param_name.rs#L201[single character]
+// [single character](https://github.com/rust-lang/rust-analyzer/blob/6b8b8ff4c56118ddee6c531cde06add1aad4a6af/crates/ide/src/inlay_hints/param_name.rs#L201)
// in a unary function
//
-// image::https://user-images.githubusercontent.com/48062697/113020660-b5f98b80-917a-11eb-8d70-3be3fd558cdd.png[]
+// ![Inlay hints](https://user-images.githubusercontent.com/48062697/113020660-b5f98b80-917a-11eb-8d70-3be3fd558cdd.png)
pub(crate) fn inlay_hints(
db: &RootDatabase,
file_id: FileId,
@@ -294,6 +294,7 @@ pub struct InlayHintsConfig {
pub param_names_for_lifetime_elision_hints: bool,
pub hide_named_constructor_hints: bool,
pub hide_closure_initialization_hints: bool,
+ pub hide_closure_parameter_hints: bool,
pub range_exclusive_hints: bool,
pub closure_style: ClosureStyle,
pub max_length: Option<usize>,
@@ -860,6 +861,7 @@ mod tests {
binding_mode_hints: false,
hide_named_constructor_hints: false,
hide_closure_initialization_hints: false,
+ hide_closure_parameter_hints: false,
closure_style: ClosureStyle::ImplFn,
param_names_for_lifetime_elision_hints: false,
max_length: None,
diff --git a/crates/ide/src/inlay_hints/adjustment.rs b/crates/ide/src/inlay_hints/adjustment.rs
index 2acd4021cc..d3b95750f7 100644
--- a/crates/ide/src/inlay_hints/adjustment.rs
+++ b/crates/ide/src/inlay_hints/adjustment.rs
@@ -13,11 +13,7 @@ use ide_db::famous_defs::FamousDefs;
use ide_db::text_edit::TextEditBuilder;
use span::EditionedFileId;
-use stdx::never;
-use syntax::{
- ast::{self, make, AstNode},
- ted,
-};
+use syntax::ast::{self, prec::ExprPrecedence, AstNode};
use crate::{
AdjustmentHints, AdjustmentHintsMode, InlayHint, InlayHintLabel, InlayHintLabelPart,
@@ -104,12 +100,14 @@ pub(super) fn hints(
};
let iter: &mut dyn Iterator<Item = _> = iter.as_mut().either(|it| it as _, |it| it as _);
+ let mut has_adjustments = false;
let mut allow_edit = !postfix;
for Adjustment { source, target, kind } in iter {
if source == target {
cov_mark::hit!(same_type_adjustment);
continue;
}
+ has_adjustments = true;
// FIXME: Add some nicer tooltips to each of these
let (text, coercion) = match kind {
@@ -172,6 +170,10 @@ pub(super) fn hints(
};
if postfix { &mut post } else { &mut pre }.label.append_part(label);
}
+ if !has_adjustments {
+ return None;
+ }
+
if !postfix && needs_inner_parens {
pre.label.append_str("(");
}
@@ -254,71 +256,31 @@ fn mode_and_needs_parens_for_adjustment_hints(
/// Returns whatever we need to add parentheses on the inside and/or outside of `expr`,
/// if we are going to add (`postfix`) adjustments hints to it.
fn needs_parens_for_adjustment_hints(expr: &ast::Expr, postfix: bool) -> (bool, bool) {
- // This is a very miserable pile of hacks...
- //
- // `Expr::needs_parens_in` requires that the expression is the child of the other expression,
- // that is supposed to be its parent.
- //
- // But we want to check what would happen if we add `*`/`.*` to the inner expression.
- // To check for inner we need `` expr.needs_parens_in(`*expr`) ``,
- // to check for outer we need `` `*expr`.needs_parens_in(parent) ``,
- // where "expr" is the `expr` parameter, `*expr` is the edited `expr`,
- // and "parent" is the parent of the original expression...
- //
- // For this we utilize mutable trees, which is a HACK, but it works.
- //
- // FIXME: comeup with a better API for `needs_parens_in`, so that we don't have to do *this*
-
- // Make `&expr`/`expr?`
- let dummy_expr = {
- // `make::*` function go through a string, so they parse wrongly.
- // for example `` make::expr_try(`|| a`) `` would result in a
- // `|| (a?)` and not `(|| a)?`.
- //
- // Thus we need dummy parens to preserve the relationship we want.
- // The parens are then simply ignored by the following code.
- let dummy_paren = make::expr_paren(expr.clone());
- if postfix {
- make::expr_try(dummy_paren)
- } else {
- make::expr_ref(dummy_paren, false)
- }
- };
-
- // Do the dark mutable tree magic.
- // This essentially makes `dummy_expr` and `expr` switch places (families),
- // so that `expr`'s parent is not `dummy_expr`'s parent.
- let dummy_expr = dummy_expr.clone_for_update();
- let expr = expr.clone_for_update();
- ted::replace(expr.syntax(), dummy_expr.syntax());
-
- let parent = dummy_expr.syntax().parent();
- let Some(expr) = (|| {
- if postfix {
- let ast::Expr::TryExpr(e) = &dummy_expr else { return None };
- let Some(ast::Expr::ParenExpr(e)) = e.expr() else { return None };
-
- e.expr()
- } else {
- let ast::Expr::RefExpr(e) = &dummy_expr else { return None };
- let Some(ast::Expr::ParenExpr(e)) = e.expr() else { return None };
-
- e.expr()
- }
- })() else {
- never!("broken syntax tree?\n{:?}\n{:?}", expr, dummy_expr);
- return (true, true);
- };
-
- // At this point
- // - `parent` is the parent of the original expression
- // - `dummy_expr` is the original expression wrapped in the operator we want (`*`/`.*`)
- // - `expr` is the clone of the original expression (with `dummy_expr` as the parent)
-
- let needs_outer_parens = parent.is_some_and(|p| dummy_expr.needs_parens_in(p));
- let needs_inner_parens = expr.needs_parens_in(dummy_expr.syntax().clone());
-
- (needs_outer_parens, needs_inner_parens)
+ let prec = expr.precedence();
+ if postfix {
+ // postfix ops have higher precedence than any other operator, so we need to wrap
+ // any inner expression that is below (except for jumps if they don't have a value)
+ let needs_inner_parens = prec < ExprPrecedence::Unambiguous && {
+ prec != ExprPrecedence::Jump || !expr.is_ret_like_with_no_value()
+ };
+ // given we are the higher precedence, no parent expression will have stronger requirements
+ let needs_outer_parens = false;
+ (needs_outer_parens, needs_inner_parens)
+ } else {
+ // We need to wrap all binary like things, thats everything below prefix except for jumps
+ let needs_inner_parens = prec < ExprPrecedence::Prefix && prec != ExprPrecedence::Jump;
+ let parent = expr
+ .syntax()
+ .parent()
+ .and_then(ast::Expr::cast)
+ // if we are already wrapped, great, no need to wrap again
+ .filter(|it| !matches!(it, ast::Expr::ParenExpr(_)))
+ .map(|it| it.precedence());
+ // if we have no parent, we don't need outer parens to disambiguate
+ // otherwise anything with higher precedence than what we insert needs to wrap us
+ let needs_outer_parens = parent.is_some_and(|prec| prec > ExprPrecedence::Prefix);
+ (needs_outer_parens, needs_inner_parens)
+ }
}
#[cfg(test)]
diff --git a/crates/ide/src/inlay_hints/bind_pat.rs b/crates/ide/src/inlay_hints/bind_pat.rs
index 01a1a4545c..c2986a9aa6 100644
--- a/crates/ide/src/inlay_hints/bind_pat.rs
+++ b/crates/ide/src/inlay_hints/bind_pat.rs
@@ -36,6 +36,9 @@ pub(super) fn hints(
if it.ty().is_some() {
return None;
}
+ if config.hide_closure_parameter_hints && it.syntax().ancestors().nth(2).is_none_or(|n| matches!(ast::Expr::cast(n), Some(ast::Expr::ClosureExpr(_)))) {
+ return None;
+ }
Some(it.colon_token())
},
ast::LetStmt(it) => {
@@ -950,6 +953,36 @@ fn bar(f: impl FnOnce(u8) -> u8) -> impl FnOnce(u8) -> u8 {
}
#[test]
+ fn skip_closure_parameter_hints() {
+ check_with_config(
+ InlayHintsConfig {
+ type_hints: true,
+ hide_closure_parameter_hints: true,
+ ..DISABLED_CONFIG
+ },
+ r#"
+//- minicore: fn
+struct Foo;
+impl Foo {
+ fn foo(self: Self) {}
+ fn bar(self: &Self) {}
+}
+fn main() {
+ let closure = |x, y| x + y;
+ // ^^^^^^^ impl Fn(i32, i32) -> {unknown}
+ closure(2, 3);
+ let point = (10, 20);
+ // ^^^^^ (i32, i32)
+ let (x, y) = point;
+ // ^ i32 ^ i32
+ Foo::foo(Foo);
+ Foo::bar(&Foo);
+}
+"#,
+ );
+ }
+
+ #[test]
fn hint_truncation() {
check_with_config(
InlayHintsConfig { max_length: Some(8), ..TEST_CONFIG },
diff --git a/crates/ide/src/inlay_hints/closure_captures.rs b/crates/ide/src/inlay_hints/closure_captures.rs
index 3e91618d08..9b981c0a3a 100644
--- a/crates/ide/src/inlay_hints/closure_captures.rs
+++ b/crates/ide/src/inlay_hints/closure_captures.rs
@@ -1,4 +1,4 @@
-//! Implementation of "closure return type" inlay hints.
+//! Implementation of "closure captures" inlay hints.
//!
//! Tests live in [`bind_pat`][super::bind_pat] module.
use ide_db::famous_defs::FamousDefs;
diff --git a/crates/ide/src/inlay_hints/extern_block.rs b/crates/ide/src/inlay_hints/extern_block.rs
index 2bc91b68ed..652dff0bc5 100644
--- a/crates/ide/src/inlay_hints/extern_block.rs
+++ b/crates/ide/src/inlay_hints/extern_block.rs
@@ -7,7 +7,7 @@ use crate::{InlayHint, InlayHintsConfig};
pub(super) fn extern_block_hints(
acc: &mut Vec<InlayHint>,
- FamousDefs(_sema, _): &FamousDefs<'_, '_>,
+ FamousDefs(sema, _): &FamousDefs<'_, '_>,
config: &InlayHintsConfig,
_file_id: EditionedFileId,
extern_block: ast::ExternBlock,
@@ -16,6 +16,7 @@ pub(super) fn extern_block_hints(
return None;
}
let abi = extern_block.abi()?;
+ sema.to_def(&extern_block)?;
acc.push(InlayHint {
range: abi.syntax().text_range(),
position: crate::InlayHintPosition::Before,
@@ -33,7 +34,7 @@ pub(super) fn extern_block_hints(
pub(super) fn fn_hints(
acc: &mut Vec<InlayHint>,
- FamousDefs(_sema, _): &FamousDefs<'_, '_>,
+ FamousDefs(sema, _): &FamousDefs<'_, '_>,
config: &InlayHintsConfig,
_file_id: EditionedFileId,
fn_: &ast::Fn,
@@ -43,14 +44,16 @@ pub(super) fn fn_hints(
if !implicit_unsafe {
return None;
}
- let fn_ = fn_.fn_token()?;
- acc.push(item_hint(config, extern_block, fn_));
+ let fn_token = fn_.fn_token()?;
+ if sema.to_def(fn_).is_some_and(|def| def.extern_block(sema.db).is_some()) {
+ acc.push(item_hint(config, extern_block, fn_token));
+ }
Some(())
}
pub(super) fn static_hints(
acc: &mut Vec<InlayHint>,
- FamousDefs(_sema, _): &FamousDefs<'_, '_>,
+ FamousDefs(sema, _): &FamousDefs<'_, '_>,
config: &InlayHintsConfig,
_file_id: EditionedFileId,
static_: &ast::Static,
@@ -60,8 +63,10 @@ pub(super) fn static_hints(
if !implicit_unsafe {
return None;
}
- let static_ = static_.static_token()?;
- acc.push(item_hint(config, extern_block, static_));
+ let static_token = static_.static_token()?;
+ if sema.to_def(static_).is_some_and(|def| def.extern_block(sema.db).is_some()) {
+ acc.push(item_hint(config, extern_block, static_token));
+ }
Some(())
}
diff --git a/crates/ide/src/inlay_hints/implicit_drop.rs b/crates/ide/src/inlay_hints/implicit_drop.rs
index 27c7c3d498..390139d214 100644
--- a/crates/ide/src/inlay_hints/implicit_drop.rs
+++ b/crates/ide/src/inlay_hints/implicit_drop.rs
@@ -54,7 +54,8 @@ pub(super) fn hints(
};
let range = match terminator.span {
MirSpan::ExprId(e) => match source_map.expr_syntax(e) {
- Ok(s) => {
+ // don't show inlay hint for macro
+ Ok(s) if !s.file_id.is_macro() => {
let root = &s.file_syntax(sema.db);
let expr = s.value.to_node(root);
let expr = expr.syntax();
@@ -69,11 +70,11 @@ pub(super) fn hints(
}
}
}
- Err(_) => continue,
+ _ => continue,
},
MirSpan::PatId(p) => match source_map.pat_syntax(p) {
- Ok(s) => s.value.text_range(),
- Err(_) => continue,
+ Ok(s) if !s.file_id.is_macro() => s.value.text_range(),
+ _ => continue,
},
MirSpan::BindingId(b) => {
match source_map
@@ -81,13 +82,13 @@ pub(super) fn hints(
.iter()
.find_map(|p| source_map.pat_syntax(*p).ok())
{
- Some(s) => s.value.text_range(),
- None => continue,
+ Some(s) if !s.file_id.is_macro() => s.value.text_range(),
+ _ => continue,
}
}
MirSpan::SelfParam => match source_map.self_param_syntax() {
- Some(s) => s.value.text_range(),
- None => continue,
+ Some(s) if !s.file_id.is_macro() => s.value.text_range(),
+ _ => continue,
},
MirSpan::Unknown => continue,
};
@@ -231,4 +232,25 @@ mod tests {
"#,
);
}
+
+ #[test]
+ fn ignore_inlay_hint_for_macro_call() {
+ check_with_config(
+ ONLY_DROP_CONFIG,
+ r#"
+ struct X;
+
+ macro_rules! my_macro {
+ () => {{
+ let bbb = X;
+ bbb
+ }};
+ }
+
+ fn test() -> X {
+ my_macro!()
+ }
+"#,
+ );
+ }
}
diff --git a/crates/ide/src/interpret.rs b/crates/ide/src/interpret.rs
index e0fdc3dd6f..ae11072e34 100644
--- a/crates/ide/src/interpret.rs
+++ b/crates/ide/src/interpret.rs
@@ -7,11 +7,9 @@ use syntax::{algo::ancestors_at_offset, ast, AstNode, TextRange};
// Feature: Interpret A Function, Static Or Const.
//
-// |===
-// | Editor | Action Name
-//
-// | VS Code | **rust-analyzer: Interpret**
-// |===
+// | Editor | Action Name |
+// |---------|-------------|
+// | VS Code | **rust-analyzer: Interpret** |
pub(crate) fn interpret(db: &RootDatabase, position: FilePosition) -> String {
match find_and_interpret(db, position) {
Some((duration, mut result)) => {
diff --git a/crates/ide/src/join_lines.rs b/crates/ide/src/join_lines.rs
index e4670177ec..ea18a97070 100644
--- a/crates/ide/src/join_lines.rs
+++ b/crates/ide/src/join_lines.rs
@@ -21,17 +21,13 @@ pub struct JoinLinesConfig {
//
// Join selected lines into one, smartly fixing up whitespace, trailing commas, and braces.
//
-// See
-// https://user-images.githubusercontent.com/1711539/124515923-4504e800-dde9-11eb-8d58-d97945a1a785.gif[this gif]
-// for the cases handled specially by joined lines.
+// See [this gif](https://user-images.githubusercontent.com/1711539/124515923-4504e800-dde9-11eb-8d58-d97945a1a785.gif) for the cases handled specially by joined lines.
//
-// |===
-// | Editor | Action Name
+// | Editor | Action Name |
+// |---------|-------------|
+// | VS Code | **rust-analyzer: Join lines** |
//
-// | VS Code | **rust-analyzer: Join lines**
-// |===
-//
-// image::https://user-images.githubusercontent.com/48062697/113020661-b6922200-917a-11eb-87c4-b75acc028f11.gif[]
+// ![Join Lines](https://user-images.githubusercontent.com/48062697/113020661-b6922200-917a-11eb-87c4-b75acc028f11.gif)
pub(crate) fn join_lines(
config: &JoinLinesConfig,
file: &SourceFile,
diff --git a/crates/ide/src/lib.rs b/crates/ide/src/lib.rs
index e942f5a6aa..27a1a510b4 100644
--- a/crates/ide/src/lib.rs
+++ b/crates/ide/src/lib.rs
@@ -252,14 +252,14 @@ impl Analysis {
Arc::new(cfg_options),
None,
Env::default(),
- false,
CrateOrigin::Local { repo: None, name: None },
+ false,
+ None,
);
change.change_file(file_id, Some(text));
let ws_data = crate_graph
.iter()
.zip(iter::repeat(Arc::new(CrateWorkspaceData {
- proc_macro_cwd: None,
data_layout: Err("fixture has no layout".into()),
toolchain: None,
})))
diff --git a/crates/ide/src/matching_brace.rs b/crates/ide/src/matching_brace.rs
index 5735615283..67346ea9cf 100644
--- a/crates/ide/src/matching_brace.rs
+++ b/crates/ide/src/matching_brace.rs
@@ -9,13 +9,11 @@ use syntax::{
// moves cursor to the matching brace. It uses the actual parser to determine
// braces, so it won't confuse generics with comparisons.
//
-// |===
-// | Editor | Action Name
+// | Editor | Action Name |
+// |---------|-------------|
+// | VS Code | **rust-analyzer: Find matching brace** |
//
-// | VS Code | **rust-analyzer: Find matching brace**
-// |===
-//
-// image::https://user-images.githubusercontent.com/48062697/113065573-04298180-91b1-11eb-8dec-d4e2a202f304.gif[]
+// ![Matching Brace](https://user-images.githubusercontent.com/48062697/113065573-04298180-91b1-11eb-8dec-d4e2a202f304.gif)
pub(crate) fn matching_brace(file: &SourceFile, offset: TextSize) -> Option<TextSize> {
const BRACES: &[SyntaxKind] =
&[T!['{'], T!['}'], T!['['], T![']'], T!['('], T![')'], T![<], T![>], T![|], T![|]];
diff --git a/crates/ide/src/moniker.rs b/crates/ide/src/moniker.rs
index d97c12ebaf..66ea49a98a 100644
--- a/crates/ide/src/moniker.rs
+++ b/crates/ide/src/moniker.rs
@@ -289,7 +289,10 @@ fn def_to_non_local_moniker(
definition: Definition,
from_crate: Crate,
) -> Option<Moniker> {
- let module = definition.module(db)?;
+ let module = match definition {
+ Definition::Module(module) if module.is_crate_root() => module,
+ _ => definition.module(db)?,
+ };
let krate = module.krate();
let edition = krate.edition(db);
@@ -322,12 +325,18 @@ fn def_to_non_local_moniker(
name: name.display(db, edition).to_string(),
desc: def_to_kind(db, def).into(),
});
- } else if reverse_description.is_empty() {
- // Don't allow the last descriptor to be absent.
- return None;
} else {
match def {
- Definition::Module(module) if module.is_crate_root() => {}
+ Definition::Module(module) if module.is_crate_root() => {
+ // only include `crate` namespace by itself because we prefer
+ // `rust-analyzer cargo foo . bar/` over `rust-analyzer cargo foo . crate/bar/`
+ if reverse_description.is_empty() {
+ reverse_description.push(MonikerDescriptor {
+ name: "crate".to_owned(),
+ desc: MonikerDescriptorKind::Namespace,
+ });
+ }
+ }
_ => {
tracing::error!(?def, "Encountered enclosing definition with no name");
}
@@ -340,6 +349,9 @@ fn def_to_non_local_moniker(
};
def = next_def;
}
+ if reverse_description.is_empty() {
+ return None;
+ }
reverse_description.reverse();
let description = reverse_description;
diff --git a/crates/ide/src/move_item.rs b/crates/ide/src/move_item.rs
index b0df9257ba..3fb3a788b9 100644
--- a/crates/ide/src/move_item.rs
+++ b/crates/ide/src/move_item.rs
@@ -17,14 +17,12 @@ pub enum Direction {
//
// Move item under cursor or selection up and down.
//
-// |===
-// | Editor | Action Name
-//
+// | Editor | Action Name |
+// |---------|-------------|
// | VS Code | **rust-analyzer: Move item up**
// | VS Code | **rust-analyzer: Move item down**
-// |===
//
-// image::https://user-images.githubusercontent.com/48062697/113065576-04298180-91b1-11eb-91ce-4505e99ed598.gif[]
+// ![Move Item](https://user-images.githubusercontent.com/48062697/113065576-04298180-91b1-11eb-91ce-4505e99ed598.gif)
pub(crate) fn move_item(
db: &RootDatabase,
range: FileRange,
diff --git a/crates/ide/src/parent_module.rs b/crates/ide/src/parent_module.rs
index 7a0c28d925..6d82f9b063 100644
--- a/crates/ide/src/parent_module.rs
+++ b/crates/ide/src/parent_module.rs
@@ -15,13 +15,11 @@ use crate::NavigationTarget;
//
// Navigates to the parent module of the current module.
//
-// |===
-// | Editor | Action Name
+// | Editor | Action Name |
+// |---------|-------------|
+// | VS Code | **rust-analyzer: Locate parent module** |
//
-// | VS Code | **rust-analyzer: Locate parent module**
-// |===
-//
-// image::https://user-images.githubusercontent.com/48062697/113065580-04c21800-91b1-11eb-9a32-00086161c0bd.gif[]
+// ![Parent Module](https://user-images.githubusercontent.com/48062697/113065580-04c21800-91b1-11eb-9a32-00086161c0bd.gif)
/// This returns `Vec` because a module may be included from several places.
pub(crate) fn parent_module(db: &RootDatabase, position: FilePosition) -> Vec<NavigationTarget> {
diff --git a/crates/ide/src/references.rs b/crates/ide/src/references.rs
index b1079312d3..069818d50e 100644
--- a/crates/ide/src/references.rs
+++ b/crates/ide/src/references.rs
@@ -43,13 +43,11 @@ pub struct Declaration {
//
// Shows all references of the item at the cursor location
//
-// |===
-// | Editor | Shortcut
+// | Editor | Shortcut |
+// |---------|----------|
+// | VS Code | <kbd>Shift+Alt+F12</kbd> |
//
-// | VS Code | kbd:[Shift+Alt+F12]
-// |===
-//
-// image::https://user-images.githubusercontent.com/48062697/113020670-b7c34f00-917a-11eb-8003-370ac5f2b3cb.gif[]
+// ![Find All References](https://user-images.githubusercontent.com/48062697/113020670-b7c34f00-917a-11eb-8003-370ac5f2b3cb.gif)
pub(crate) fn find_all_refs(
sema: &Semantics<'_, RootDatabase>,
position: FilePosition,
diff --git a/crates/ide/src/rename.rs b/crates/ide/src/rename.rs
index 07dfd83c4e..3e8295e3f0 100644
--- a/crates/ide/src/rename.rs
+++ b/crates/ide/src/rename.rs
@@ -71,13 +71,11 @@ pub(crate) fn prepare_rename(
//
// Renames the item below the cursor and all of its references
//
-// |===
-// | Editor | Shortcut
+// | Editor | Shortcut |
+// |---------|----------|
+// | VS Code | <kbd>F2</kbd> |
//
-// | VS Code | kbd:[F2]
-// |===
-//
-// image::https://user-images.githubusercontent.com/48062697/113065582-055aae80-91b1-11eb-8ade-2b58e6d81883.gif[]
+// ![Rename](https://user-images.githubusercontent.com/48062697/113065582-055aae80-91b1-11eb-8ade-2b58e6d81883.gif)
pub(crate) fn rename(
db: &RootDatabase,
position: FilePosition,
@@ -2003,13 +2001,11 @@ impl Foo {
"foo",
r#"
fn f($0self) -> i32 {
- use self as _;
self.i
}
"#,
r#"
fn f(foo: _) -> i32 {
- use self as _;
foo.i
}
"#,
@@ -2017,6 +2013,26 @@ fn f(foo: _) -> i32 {
}
#[test]
+ fn no_type_value_ns_confuse() {
+ // Test that we don't rename items from different namespaces.
+ check(
+ "bar",
+ r#"
+struct foo {}
+fn f(foo$0: i32) -> i32 {
+ use foo as _;
+}
+"#,
+ r#"
+struct foo {}
+fn f(bar: i32) -> i32 {
+ use foo as _;
+}
+"#,
+ );
+ }
+
+ #[test]
fn test_self_in_path_to_parameter() {
check(
"foo",
diff --git a/crates/ide/src/runnables.rs b/crates/ide/src/runnables.rs
index 32edacee51..509ae3204c 100644
--- a/crates/ide/src/runnables.rs
+++ b/crates/ide/src/runnables.rs
@@ -4,8 +4,8 @@ use arrayvec::ArrayVec;
use ast::HasName;
use cfg::{CfgAtom, CfgExpr};
use hir::{
- db::HirDatabase, sym, AsAssocItem, AttrsWithOwner, HasAttrs, HasCrate, HasSource, HirFileIdExt,
- ModPath, Name, PathKind, Semantics, Symbol,
+ db::HirDatabase, sym, symbols::FxIndexSet, AsAssocItem, AttrsWithOwner, HasAttrs, HasCrate,
+ HasSource, HirFileIdExt, ModPath, Name, PathKind, Semantics, Symbol,
};
use ide_assists::utils::{has_test_related_attribute, test_related_attribute_syn};
use ide_db::{
@@ -13,7 +13,7 @@ use ide_db::{
documentation::docs_from_attrs,
helpers::visit_file_defs,
search::{FileReferenceNode, SearchScope},
- FilePosition, FxHashMap, FxHashSet, RootDatabase, SymbolKind,
+ FilePosition, FxHashMap, FxIndexMap, RootDatabase, SymbolKind,
};
use itertools::Itertools;
use smallvec::SmallVec;
@@ -61,8 +61,8 @@ pub enum RunnableKind {
#[derive(Debug, Clone, Hash, PartialEq, Eq, PartialOrd, Ord)]
enum RunnableDiscKind {
- Test,
TestMod,
+ Test,
DocTest,
Bench,
Bin,
@@ -119,19 +119,18 @@ impl Runnable {
// location**. Super useful for repeatedly running just a single test. Do bind this
// to a shortcut!
//
-// |===
-// | Editor | Action Name
+// | Editor | Action Name |
+// |---------|-------------|
+// | VS Code | **rust-analyzer: Run** |
//
-// | VS Code | **rust-analyzer: Run**
-// |===
-// image::https://user-images.githubusercontent.com/48062697/113065583-055aae80-91b1-11eb-958f-d67efcaf6a2f.gif[]
+// ![Run](https://user-images.githubusercontent.com/48062697/113065583-055aae80-91b1-11eb-958f-d67efcaf6a2f.gif)
pub(crate) fn runnables(db: &RootDatabase, file_id: FileId) -> Vec<Runnable> {
let sema = Semantics::new(db);
let mut res = Vec::new();
// Record all runnables that come from macro expansions here instead.
// In case an expansion creates multiple runnables we want to name them to avoid emitting a bunch of equally named runnables.
- let mut in_macro_expansion = FxHashMap::<hir::HirFileId, Vec<Runnable>>::default();
+ let mut in_macro_expansion = FxIndexMap::<hir::HirFileId, Vec<Runnable>>::default();
let mut add_opt = |runnable: Option<Runnable>, def| {
if let Some(runnable) = runnable.filter(|runnable| runnable.nav.file_id == file_id) {
if let Some(def) = def {
@@ -183,20 +182,7 @@ pub(crate) fn runnables(db: &RootDatabase, file_id: FileId) -> Vec<Runnable> {
r
})
}));
- res.sort_by(|Runnable { nav, kind, .. }, Runnable { nav: nav_b, kind: kind_b, .. }| {
- // full_range.start < focus_range.start < name, should give us a decent unique ordering
- nav.full_range
- .start()
- .cmp(&nav_b.full_range.start())
- .then_with(|| {
- let t_0 = || TextSize::from(0);
- nav.focus_range
- .map_or_else(t_0, |it| it.start())
- .cmp(&nav_b.focus_range.map_or_else(t_0, |it| it.start()))
- })
- .then_with(|| kind.disc().cmp(&kind_b.disc()))
- .then_with(|| nav.name.cmp(&nav_b.name))
- });
+ res.sort_by(cmp_runnables);
res
}
@@ -207,23 +193,39 @@ pub(crate) fn runnables(db: &RootDatabase, file_id: FileId) -> Vec<Runnable> {
// The simplest way to use this feature is via the context menu. Right-click on
// the selected item. The context menu opens. Select **Peek Related Tests**.
//
-// |===
-// | Editor | Action Name
-//
-// | VS Code | **rust-analyzer: Peek Related Tests**
-// |===
+// | Editor | Action Name |
+// |---------|-------------|
+// | VS Code | **rust-analyzer: Peek Related Tests** |
pub(crate) fn related_tests(
db: &RootDatabase,
position: FilePosition,
search_scope: Option<SearchScope>,
) -> Vec<Runnable> {
let sema = Semantics::new(db);
- let mut res: FxHashSet<Runnable> = FxHashSet::default();
+ let mut res: FxIndexSet<Runnable> = FxIndexSet::default();
let syntax = sema.parse_guess_edition(position.file_id).syntax().clone();
find_related_tests(&sema, &syntax, position, search_scope, &mut res);
- res.into_iter().collect()
+ res.into_iter().sorted_by(cmp_runnables).collect()
+}
+
+fn cmp_runnables(
+ Runnable { nav, kind, .. }: &Runnable,
+ Runnable { nav: nav_b, kind: kind_b, .. }: &Runnable,
+) -> std::cmp::Ordering {
+ // full_range.start < focus_range.start < name, should give us a decent unique ordering
+ nav.full_range
+ .start()
+ .cmp(&nav_b.full_range.start())
+ .then_with(|| {
+ let t_0 = || TextSize::from(0);
+ nav.focus_range
+ .map_or_else(t_0, |it| it.start())
+ .cmp(&nav_b.focus_range.map_or_else(t_0, |it| it.start()))
+ })
+ .then_with(|| kind.disc().cmp(&kind_b.disc()))
+ .then_with(|| nav.name.cmp(&nav_b.name))
}
fn find_related_tests(
@@ -231,7 +233,7 @@ fn find_related_tests(
syntax: &SyntaxNode,
position: FilePosition,
search_scope: Option<SearchScope>,
- tests: &mut FxHashSet<Runnable>,
+ tests: &mut FxIndexSet<Runnable>,
) {
// FIXME: why is this using references::find_defs, this should use ide_db::search
let defs = match references::find_defs(sema, syntax, position.offset) {
@@ -271,7 +273,7 @@ fn find_related_tests_in_module(
syntax: &SyntaxNode,
fn_def: &ast::Fn,
parent_module: &hir::Module,
- tests: &mut FxHashSet<Runnable>,
+ tests: &mut FxIndexSet<Runnable>,
) {
let fn_name = match fn_def.name() {
Some(it) => it,
@@ -1231,8 +1233,8 @@ gen_main!();
"(TestMod, NavigationTarget { file_id: FileId(0), full_range: 0..315, name: \"\", kind: Module })",
"(TestMod, NavigationTarget { file_id: FileId(0), full_range: 267..292, focus_range: 271..276, name: \"tests\", kind: Module, description: \"mod tests\" })",
"(Test, NavigationTarget { file_id: FileId(0), full_range: 283..290, name: \"foo_test\", kind: Function })",
- "(Test, NavigationTarget { file_id: FileId(0), full_range: 293..301, name: \"foo_test2\", kind: Function }, true)",
"(TestMod, NavigationTarget { file_id: FileId(0), full_range: 293..301, name: \"tests2\", kind: Module, description: \"mod tests2\" }, true)",
+ "(Test, NavigationTarget { file_id: FileId(0), full_range: 293..301, name: \"foo_test2\", kind: Function }, true)",
"(Bin, NavigationTarget { file_id: FileId(0), full_range: 302..314, name: \"main\", kind: Function })",
]
"#]],
@@ -1261,10 +1263,10 @@ foo!();
"#,
expect![[r#"
[
+ "(TestMod, NavigationTarget { file_id: FileId(0), full_range: 210..217, name: \"foo_tests\", kind: Module, description: \"mod foo_tests\" }, true)",
"(Test, NavigationTarget { file_id: FileId(0), full_range: 210..217, name: \"foo0\", kind: Function }, true)",
"(Test, NavigationTarget { file_id: FileId(0), full_range: 210..217, name: \"foo1\", kind: Function }, true)",
"(Test, NavigationTarget { file_id: FileId(0), full_range: 210..217, name: \"foo2\", kind: Function }, true)",
- "(TestMod, NavigationTarget { file_id: FileId(0), full_range: 210..217, name: \"foo_tests\", kind: Module, description: \"mod foo_tests\" }, true)",
]
"#]],
);
@@ -1504,18 +1506,18 @@ mod tests {
file_id: FileId(
0,
),
- full_range: 121..185,
- focus_range: 136..145,
- name: "foo2_test",
+ full_range: 52..115,
+ focus_range: 67..75,
+ name: "foo_test",
kind: Function,
},
NavigationTarget {
file_id: FileId(
0,
),
- full_range: 52..115,
- focus_range: 67..75,
- name: "foo_test",
+ full_range: 121..185,
+ focus_range: 136..145,
+ name: "foo2_test",
kind: Function,
},
]
diff --git a/crates/ide/src/signature_help.rs b/crates/ide/src/signature_help.rs
index f8c60418eb..f997211600 100644
--- a/crates/ide/src/signature_help.rs
+++ b/crates/ide/src/signature_help.rs
@@ -321,7 +321,9 @@ fn signature_help_for_generics(
format_to!(res.signature, "type {}", it.name(db).display(db, edition));
}
// These don't have generic args that can be specified
- hir::GenericDef::Impl(_) | hir::GenericDef::Const(_) => return None,
+ hir::GenericDef::Impl(_) | hir::GenericDef::Const(_) | hir::GenericDef::Static(_) => {
+ return None
+ }
}
let params = generics_def.params(sema.db);
diff --git a/crates/ide/src/static_index.rs b/crates/ide/src/static_index.rs
index 8050a38b3c..07553a87d2 100644
--- a/crates/ide/src/static_index.rs
+++ b/crates/ide/src/static_index.rs
@@ -154,6 +154,7 @@ impl StaticIndex<'_> {
implicit_drop_hints: false,
hide_named_constructor_hints: false,
hide_closure_initialization_hints: false,
+ hide_closure_parameter_hints: false,
closure_style: hir::ClosureStyle::ImplFn,
param_names_for_lifetime_elision_hints: false,
binding_mode_hints: false,
@@ -169,10 +170,10 @@ impl StaticIndex<'_> {
.unwrap();
// hovers
let sema = hir::Semantics::new(self.db);
- let tokens_or_nodes = sema.parse_guess_edition(file_id).syntax().clone();
+ let root = sema.parse_guess_edition(file_id).syntax().clone();
let edition =
sema.attach_first_edition(file_id).map(|it| it.edition()).unwrap_or(Edition::CURRENT);
- let tokens = tokens_or_nodes.descendants_with_tokens().filter_map(|it| match it {
+ let tokens = root.descendants_with_tokens().filter_map(|it| match it {
syntax::NodeOrToken::Node(_) => None,
syntax::NodeOrToken::Token(it) => Some(it),
});
@@ -194,24 +195,19 @@ impl StaticIndex<'_> {
)
});
let mut result = StaticIndexedFile { file_id, inlay_hints, folds, tokens: vec![] };
- for token in tokens {
- let range = token.text_range();
- let node = token.parent().unwrap();
- let def = match get_definition(&sema, token.clone()) {
- Some(it) => it,
- None => continue,
- };
+
+ let mut add_token = |def: Definition, range: TextRange, scope_node: &SyntaxNode| {
let id = if let Some(it) = self.def_map.get(&def) {
*it
} else {
let it = self.tokens.insert(TokenStaticData {
- documentation: documentation_for_definition(&sema, def, &node),
+ documentation: documentation_for_definition(&sema, def, scope_node),
hover: Some(hover_for_definition(
&sema,
file_id,
def,
None,
- &node,
+ scope_node,
None,
false,
&hover_config,
@@ -240,6 +236,22 @@ impl StaticIndex<'_> {
},
});
result.tokens.push((range, id));
+ };
+
+ if let Some(module) = sema.file_to_module_def(file_id) {
+ let def = Definition::Module(module);
+ let range = root.text_range();
+ add_token(def, range, &root);
+ }
+
+ for token in tokens {
+ let range = token.text_range();
+ let node = token.parent().unwrap();
+ let def = match get_definition(&sema, token.clone()) {
+ Some(it) => it,
+ None => continue,
+ };
+ add_token(def, range, &node);
}
self.files.push(result);
}
@@ -300,6 +312,10 @@ mod tests {
let mut range_set: FxHashSet<_> = ranges.iter().map(|it| it.0).collect();
for f in s.files {
for (range, _) in f.tokens {
+ if range.start() == TextSize::from(0) {
+ // ignore whole file range corresponding to module definition
+ continue;
+ }
let it = FileRange { file_id: f.file_id, range };
if !range_set.contains(&it) {
panic!("additional range {it:?}");
diff --git a/crates/ide/src/status.rs b/crates/ide/src/status.rs
index 9e823daa2b..f8ecaa8fdf 100644
--- a/crates/ide/src/status.rs
+++ b/crates/ide/src/status.rs
@@ -29,12 +29,11 @@ use triomphe::Arc;
//
// Shows internal statistic about memory usage of rust-analyzer.
//
-// |===
-// | Editor | Action Name
+// | Editor | Action Name |
+// |---------|-------------|
+// | VS Code | **rust-analyzer: Status** |
//
-// | VS Code | **rust-analyzer: Status**
-// |===
-// image::https://user-images.githubusercontent.com/48062697/113065584-05f34500-91b1-11eb-98cc-5c196f76be7f.gif[]
+// ![Status](https://user-images.githubusercontent.com/48062697/113065584-05f34500-91b1-11eb-98cc-5c196f76be7f.gif)
pub(crate) fn status(db: &RootDatabase, file_id: Option<FileId>) -> String {
let mut buf = String::new();
@@ -69,6 +68,7 @@ pub(crate) fn status(db: &RootDatabase, file_id: Option<FileId>) -> String {
dependencies,
origin,
is_proc_macro,
+ proc_macro_cwd,
} = &crate_graph[crate_id];
format_to!(
buf,
@@ -86,6 +86,7 @@ pub(crate) fn status(db: &RootDatabase, file_id: Option<FileId>) -> String {
format_to!(buf, " Env: {:?}\n", env);
format_to!(buf, " Origin: {:?}\n", origin);
format_to!(buf, " Is a proc macro crate: {}\n", is_proc_macro);
+ format_to!(buf, " Proc macro cwd: {:?}\n", proc_macro_cwd);
let deps = dependencies
.iter()
.map(|dep| format!("{}={}", dep.name, dep.crate_id.into_raw()))
diff --git a/crates/ide/src/syntax_highlighting.rs b/crates/ide/src/syntax_highlighting.rs
index f53f0aec09..1853e3a340 100644
--- a/crates/ide/src/syntax_highlighting.rs
+++ b/crates/ide/src/syntax_highlighting.rs
@@ -76,113 +76,118 @@ pub struct HighlightConfig {
// We also give special modifier for `mut` and `&mut` local variables.
//
//
-// .Token Tags
+// #### Token Tags
//
// Rust-analyzer currently emits the following token tags:
//
// - For items:
-// +
-// [horizontal]
-// attribute:: Emitted for attribute macros.
-// enum:: Emitted for enums.
-// function:: Emitted for free-standing functions.
-// derive:: Emitted for derive macros.
-// macro:: Emitted for function-like macros.
-// method:: Emitted for associated functions, also knowns as methods.
-// namespace:: Emitted for modules.
-// struct:: Emitted for structs.
-// trait:: Emitted for traits.
-// typeAlias:: Emitted for type aliases and `Self` in `impl`s.
-// union:: Emitted for unions.
+//
+// | | |
+// |-----------|--------------------------------|
+// | attribute | Emitted for attribute macros. |
+// |enum| Emitted for enums. |
+// |function| Emitted for free-standing functions. |
+// |derive| Emitted for derive macros. |
+// |macro| Emitted for function-like macros. |
+// |method| Emitted for associated functions, also knowns as methods. |
+// |namespace| Emitted for modules. |
+// |struct| Emitted for structs.|
+// |trait| Emitted for traits.|
+// |typeAlias| Emitted for type aliases and `Self` in `impl`s.|
+// |union| Emitted for unions.|
//
// - For literals:
-// +
-// [horizontal]
-// boolean:: Emitted for the boolean literals `true` and `false`.
-// character:: Emitted for character literals.
-// number:: Emitted for numeric literals.
-// string:: Emitted for string literals.
-// escapeSequence:: Emitted for escaped sequences inside strings like `\n`.
-// formatSpecifier:: Emitted for format specifiers `{:?}` in `format!`-like macros.
+//
+// | | |
+// |-----------|--------------------------------|
+// | boolean| Emitted for the boolean literals `true` and `false`.|
+// | character| Emitted for character literals.|
+// | number| Emitted for numeric literals.|
+// | string| Emitted for string literals.|
+// | escapeSequence| Emitted for escaped sequences inside strings like `\n`.|
+// | formatSpecifier| Emitted for format specifiers `{:?}` in `format!`-like macros.|
//
// - For operators:
-// +
-// [horizontal]
-// operator:: Emitted for general operators.
-// arithmetic:: Emitted for the arithmetic operators `+`, `-`, `*`, `/`, `+=`, `-=`, `*=`, `/=`.
-// bitwise:: Emitted for the bitwise operators `|`, `&`, `!`, `^`, `|=`, `&=`, `^=`.
-// comparison:: Emitted for the comparison operators `>`, `<`, `==`, `>=`, `<=`, `!=`.
-// logical:: Emitted for the logical operators `||`, `&&`, `!`.
+//
+// | | |
+// |-----------|--------------------------------|
+// |operator| Emitted for general operators.|
+// |arithmetic| Emitted for the arithmetic operators `+`, `-`, `*`, `/`, `+=`, `-=`, `*=`, `/=`.|
+// |bitwise| Emitted for the bitwise operators `|`, `&`, `!`, `^`, `|=`, `&=`, `^=`.|
+// |comparison| Emitted for the comparison oerators `>`, `<`, `==`, `>=`, `<=`, `!=`.|
+// |logical| Emitted for the logical operatos `||`, `&&`, `!`.|
//
// - For punctuation:
-// +
-// [horizontal]
-// punctuation:: Emitted for general punctuation.
-// attributeBracket:: Emitted for attribute invocation brackets, that is the `#[` and `]` tokens.
-// angle:: Emitted for `<>` angle brackets.
-// brace:: Emitted for `{}` braces.
-// bracket:: Emitted for `[]` brackets.
-// parenthesis:: Emitted for `()` parentheses.
-// colon:: Emitted for the `:` token.
-// comma:: Emitted for the `,` token.
-// dot:: Emitted for the `.` token.
-// semi:: Emitted for the `;` token.
-// macroBang:: Emitted for the `!` token in macro calls.
//
-// //-
+// | | |
+// |-----------|--------------------------------|
+// |punctuation| Emitted for general punctuation.|
+// |attributeBracket| Emitted for attribute invocation brackets, that is the `#[` and `]` tokens.|
+// |angle| Emitted for `<>` angle brackets.|
+// |brace| Emitted for `{}` braces.|
+// |bracket| Emitted for `[]` brackets.|
+// |parenthesis| Emitted for `()` parentheses.|
+// |colon| Emitted for the `:` token.|
+// |comma| Emitted for the `,` token.|
+// |dot| Emitted for the `.` token.|
+// |semi| Emitted for the `;` token.|
+// |macroBang| Emitted for the `!` token in macro calls.|
//
-// [horizontal]
-// builtinAttribute:: Emitted for names to builtin attributes in attribute path, the `repr` in `#[repr(u8)]` for example.
-// builtinType:: Emitted for builtin types like `u32`, `str` and `f32`.
-// comment:: Emitted for comments.
-// constParameter:: Emitted for const parameters.
-// deriveHelper:: Emitted for derive helper attributes.
-// enumMember:: Emitted for enum variants.
-// generic:: Emitted for generic tokens that have no mapping.
-// keyword:: Emitted for keywords.
-// label:: Emitted for labels.
-// lifetime:: Emitted for lifetimes.
-// parameter:: Emitted for non-self function parameters.
-// property:: Emitted for struct and union fields.
-// selfKeyword:: Emitted for the self function parameter and self path-specifier.
-// selfTypeKeyword:: Emitted for the Self type parameter.
-// toolModule:: Emitted for tool modules.
-// typeParameter:: Emitted for type parameters.
-// unresolvedReference:: Emitted for unresolved references, names that rust-analyzer can't find the definition of.
-// variable:: Emitted for locals, constants and statics.
+//-
//
+// | | |
+// |-----------|--------------------------------|
+// |builtinAttribute| Emitted for names to builtin attributes in attribute path, the `repr` in `#[repr(u8)]` for example.|
+// |builtinType| Emitted for builtin types like `u32`, `str` and `f32`.|
+// |comment| Emitted for comments.|
+// |constParameter| Emitted for const parameters.|
+// |deriveHelper| Emitted for derive helper attributes.|
+// |enumMember| Emitted for enum variants.|
+// |generic| Emitted for generic tokens that have no mapping.|
+// |keyword| Emitted for keywords.|
+// |label| Emitted for labels.|
+// |lifetime| Emitted for lifetimes.|
+// |parameter| Emitted for non-self function parameters.|
+// |property| Emitted for struct and union fields.|
+// |selfKeyword| Emitted for the self function parameter and self path-specifier.|
+// |selfTypeKeyword| Emitted for the Self type parameter.|
+// |toolModule| Emitted for tool modules.|
+// |typeParameter| Emitted for type parameters.|
+// |unresolvedReference| Emitted for unresolved references, names that rust-analyzer can't find the definition of.|
+// |variable| Emitted for locals, constants and statics.|
//
-// .Token Modifiers
+//
+// #### Token Modifiers
//
// Token modifiers allow to style some elements in the source code more precisely.
//
// Rust-analyzer currently emits the following token modifiers:
//
-// [horizontal]
-// async:: Emitted for async functions and the `async` and `await` keywords.
-// attribute:: Emitted for tokens inside attributes.
-// callable:: Emitted for locals whose types implements one of the `Fn*` traits.
-// constant:: Emitted for consts.
-// consuming:: Emitted for locals that are being consumed when use in a function call.
-// controlFlow:: Emitted for control-flow related tokens, this includes the `?` operator.
-// crateRoot:: Emitted for crate names, like `serde` and `crate`.
-// declaration:: Emitted for names of definitions, like `foo` in `fn foo() {}`.
-// defaultLibrary:: Emitted for items from built-in crates (std, core, alloc, test and proc_macro).
-// documentation:: Emitted for documentation comments.
-// injected:: Emitted for doc-string injected highlighting like rust source blocks in documentation.
-// intraDocLink:: Emitted for intra doc links in doc-strings.
-// library:: Emitted for items that are defined outside of the current crate.
-// macro:: Emitted for tokens inside macro calls.
-// mutable:: Emitted for mutable locals and statics as well as functions taking `&mut self`.
-// public:: Emitted for items that are from the current crate and are `pub`.
-// reference:: Emitted for locals behind a reference and functions taking `self` by reference.
-// static:: Emitted for "static" functions, also known as functions that do not take a `self` param, as well as statics and consts.
-// trait:: Emitted for associated trait items.
-// unsafe:: Emitted for unsafe operations, like unsafe function calls, as well as the `unsafe` token.
-//
+// | | |
+// |-----------|--------------------------------|
+// |async| Emitted for async functions and the `async` and `await` keywords.|
+// |attribute| Emitted for tokens inside attributes.|
+// |callable| Emitted for locals whose types implements one of the `Fn*` traits.|
+// |constant| Emitted for const.|
+// |consuming| Emitted for locals that are being consumed when use in a function call.|
+// |controlFlow| Emitted for control-flow related tokens, this includes th `?` operator.|
+// |crateRoot| Emitted for crate names, like `serde` and `crate.|
+// |declaration| Emitted for names of definitions, like `foo` in `fn foo(){}`.|
+// |defaultLibrary| Emitted for items from built-in crates (std, core, allc, test and proc_macro).|
+// |documentation| Emitted for documentation comment.|
+// |injected| Emitted for doc-string injected highlighting like rust source blocks in documentation.|
+// |intraDocLink| Emitted for intra doc links in doc-string.|
+// |library| Emitted for items that are defined outside of the current crae.|
+// |macro| Emitted for tokens inside macro call.|
+// |mutable| Emitted for mutable locals and statics as well as functions taking `&mut self`.|
+// |public| Emitted for items that are from the current crate and are `pub.|
+// |reference| Emitted for locals behind a reference and functions taking self` by reference.|
+// |static| Emitted for "static" functions, also known as functions that d not take a `self` param, as well as statics and consts.|
+// |trait| Emitted for associated trait item.|
+// |unsafe| Emitted for unsafe operations, like unsafe function calls, as ell as the `unsafe` token.|
//
-// image::https://user-images.githubusercontent.com/48062697/113164457-06cfb980-9239-11eb-819b-0f93e646acf8.png[]
-// image::https://user-images.githubusercontent.com/48062697/113187625-f7f50100-9250-11eb-825e-91c58f236071.png[]
+// ![Semantic Syntax Highlighting](https://user-images.githubusercontent.com/48062697/113164457-06cfb980-9239-11eb-819b-0f93e646acf8.png)
+// ![Semantic Syntax Highlighting](https://user-images.githubusercontent.com/48062697/113187625-f7f50100-9250-11eb-825e-91c58f236071.png)
pub(crate) fn highlight(
db: &RootDatabase,
config: HighlightConfig,
@@ -478,7 +483,15 @@ fn traverse(
{
continue;
}
- highlight_format_string(hl, sema, krate, &string, &expanded_string, range);
+ highlight_format_string(
+ hl,
+ sema,
+ krate,
+ &string,
+ &expanded_string,
+ range,
+ file_id.edition(),
+ );
if !string.is_raw() {
highlight_escape_string(hl, &string, range.start());
@@ -526,6 +539,7 @@ fn traverse(
&mut bindings_shadow_count,
config.syntactic_name_ref_highlighting,
name_like,
+ file_id.edition(),
),
NodeOrToken::Token(token) => {
highlight::token(sema, token, file_id.edition()).zip(Some(None))
diff --git a/crates/ide/src/syntax_highlighting/format.rs b/crates/ide/src/syntax_highlighting/format.rs
index 7234108701..43a6bdad7e 100644
--- a/crates/ide/src/syntax_highlighting/format.rs
+++ b/crates/ide/src/syntax_highlighting/format.rs
@@ -4,6 +4,7 @@ use ide_db::{
syntax_helpers::format_string::{is_format_string, lex_format_specifiers, FormatSpecifier},
SymbolKind,
};
+use span::Edition;
use syntax::{ast, TextRange};
use crate::{
@@ -18,6 +19,7 @@ pub(super) fn highlight_format_string(
string: &ast::String,
expanded_string: &ast::String,
range: TextRange,
+ edition: Edition,
) {
if is_format_string(expanded_string) {
// FIXME: Replace this with the HIR info we have now.
@@ -39,7 +41,7 @@ pub(super) fn highlight_format_string(
if let Some(res) = res {
stack.add(HlRange {
range,
- highlight: highlight_def(sema, krate, Definition::from(res)),
+ highlight: highlight_def(sema, krate, Definition::from(res), edition),
binding_hash: None,
})
}
diff --git a/crates/ide/src/syntax_highlighting/highlight.rs b/crates/ide/src/syntax_highlighting/highlight.rs
index 22a2fe4e9e..194fde1160 100644
--- a/crates/ide/src/syntax_highlighting/highlight.rs
+++ b/crates/ide/src/syntax_highlighting/highlight.rs
@@ -58,6 +58,7 @@ pub(super) fn name_like(
bindings_shadow_count: &mut FxHashMap<hir::Name, u32>,
syntactic_name_ref_highlighting: bool,
name_like: ast::NameLike,
+ edition: Edition,
) -> Option<(Highlight, Option<u64>)> {
let mut binding_hash = None;
let highlight = match name_like {
@@ -68,16 +69,17 @@ pub(super) fn name_like(
&mut binding_hash,
syntactic_name_ref_highlighting,
name_ref,
+ edition,
),
ast::NameLike::Name(name) => {
- highlight_name(sema, bindings_shadow_count, &mut binding_hash, krate, name)
+ highlight_name(sema, bindings_shadow_count, &mut binding_hash, krate, name, edition)
}
ast::NameLike::Lifetime(lifetime) => match IdentClass::classify_lifetime(sema, &lifetime) {
Some(IdentClass::NameClass(NameClass::Definition(def))) => {
- highlight_def(sema, krate, def) | HlMod::Definition
+ highlight_def(sema, krate, def, edition) | HlMod::Definition
}
Some(IdentClass::NameRefClass(NameRefClass::Definition(def, _))) => {
- highlight_def(sema, krate, def)
+ highlight_def(sema, krate, def, edition)
}
// FIXME: Fallback for 'static and '_, as we do not resolve these yet
_ => SymbolKind::LifetimeParam.into(),
@@ -234,16 +236,17 @@ fn highlight_name_ref(
binding_hash: &mut Option<u64>,
syntactic_name_ref_highlighting: bool,
name_ref: ast::NameRef,
+ edition: Edition,
) -> Highlight {
let db = sema.db;
- if let Some(res) = highlight_method_call_by_name_ref(sema, krate, &name_ref) {
+ if let Some(res) = highlight_method_call_by_name_ref(sema, krate, &name_ref, edition) {
return res;
}
let name_class = match NameRefClass::classify(sema, &name_ref) {
Some(name_kind) => name_kind,
None if syntactic_name_ref_highlighting => {
- return highlight_name_ref_by_syntax(name_ref, sema, krate)
+ return highlight_name_ref_by_syntax(name_ref, sema, krate, edition)
}
// FIXME: This is required for helper attributes used by proc-macros, as those do not map down
// to anything when used.
@@ -267,7 +270,7 @@ fn highlight_name_ref(
*binding_hash = Some(calc_binding_hash(&name, *shadow_count))
};
- let mut h = highlight_def(sema, krate, def);
+ let mut h = highlight_def(sema, krate, def, edition);
match def {
Definition::Local(local) if is_consumed_lvalue(name_ref.syntax(), &local, db) => {
@@ -305,7 +308,7 @@ fn highlight_name_ref(
h
}
NameRefClass::FieldShorthand { field_ref, .. } => {
- highlight_def(sema, krate, field_ref.into())
+ highlight_def(sema, krate, field_ref.into(), edition)
}
NameRefClass::ExternCrateShorthand { decl, krate: resolved_krate } => {
let mut h = HlTag::Symbol(SymbolKind::Module).into();
@@ -341,6 +344,7 @@ fn highlight_name(
binding_hash: &mut Option<u64>,
krate: hir::Crate,
name: ast::Name,
+ edition: Edition,
) -> Highlight {
let name_kind = NameClass::classify(sema, &name);
if let Some(NameClass::Definition(Definition::Local(local))) = &name_kind {
@@ -351,7 +355,7 @@ fn highlight_name(
};
match name_kind {
Some(NameClass::Definition(def)) => {
- let mut h = highlight_def(sema, krate, def) | HlMod::Definition;
+ let mut h = highlight_def(sema, krate, def, edition) | HlMod::Definition;
if let Definition::Trait(trait_) = &def {
if trait_.is_unsafe(sema.db) {
h |= HlMod::Unsafe;
@@ -359,7 +363,7 @@ fn highlight_name(
}
h
}
- Some(NameClass::ConstReference(def)) => highlight_def(sema, krate, def),
+ Some(NameClass::ConstReference(def)) => highlight_def(sema, krate, def, edition),
Some(NameClass::PatFieldShorthand { field_ref, .. }) => {
let mut h = HlTag::Symbol(SymbolKind::Field).into();
if let hir::VariantDef::Union(_) = field_ref.parent_def(sema.db) {
@@ -379,6 +383,7 @@ pub(super) fn highlight_def(
sema: &Semantics<'_, RootDatabase>,
krate: hir::Crate,
def: Definition,
+ edition: Edition,
) -> Highlight {
let db = sema.db;
let mut h = match def {
@@ -427,7 +432,12 @@ pub(super) fn highlight_def(
}
}
- if func.is_unsafe_to_call(db) {
+ // FIXME: Passing `None` here means not-unsafe functions with `#[target_feature]` will be
+ // highlighted as unsafe, even when the current target features set is a superset (RFC 2396).
+ // We probably should consider checking the current function, but I found no easy way to do
+ // that (also I'm worried about perf). There's also an instance below.
+ // FIXME: This should be the edition of the call.
+ if func.is_unsafe_to_call(db, None, edition) {
h |= HlMod::Unsafe;
}
if func.is_async(db) {
@@ -575,21 +585,23 @@ fn highlight_method_call_by_name_ref(
sema: &Semantics<'_, RootDatabase>,
krate: hir::Crate,
name_ref: &ast::NameRef,
+ edition: Edition,
) -> Option<Highlight> {
let mc = name_ref.syntax().parent().and_then(ast::MethodCallExpr::cast)?;
- highlight_method_call(sema, krate, &mc)
+ highlight_method_call(sema, krate, &mc, edition)
}
fn highlight_method_call(
sema: &Semantics<'_, RootDatabase>,
krate: hir::Crate,
method_call: &ast::MethodCallExpr,
+ edition: Edition,
) -> Option<Highlight> {
let func = sema.resolve_method_call(method_call)?;
let mut h = SymbolKind::Method.into();
- if func.is_unsafe_to_call(sema.db) || sema.is_unsafe_method_call(method_call) {
+ if func.is_unsafe_to_call(sema.db, None, edition) || sema.is_unsafe_method_call(method_call) {
h |= HlMod::Unsafe;
}
if func.is_async(sema.db) {
@@ -665,6 +677,12 @@ fn highlight_name_by_syntax(name: ast::Name) -> Highlight {
STATIC => SymbolKind::Static,
IDENT_PAT => SymbolKind::Local,
FORMAT_ARGS_ARG => SymbolKind::Local,
+ RENAME => SymbolKind::Local,
+ MACRO_RULES => SymbolKind::Macro,
+ CONST_PARAM => SymbolKind::ConstParam,
+ SELF_PARAM => SymbolKind::SelfParam,
+ TRAIT_ALIAS => SymbolKind::TraitAlias,
+ ASM_OPERAND_NAMED => SymbolKind::Local,
_ => return default.into(),
};
@@ -675,6 +693,7 @@ fn highlight_name_ref_by_syntax(
name: ast::NameRef,
sema: &Semantics<'_, RootDatabase>,
krate: hir::Crate,
+ edition: Edition,
) -> Highlight {
let default = HlTag::UnresolvedReference;
@@ -684,8 +703,9 @@ fn highlight_name_ref_by_syntax(
};
match parent.kind() {
+ EXTERN_CRATE => HlTag::Symbol(SymbolKind::Module) | HlMod::CrateRoot,
METHOD_CALL_EXPR => ast::MethodCallExpr::cast(parent)
- .and_then(|it| highlight_method_call(sema, krate, &it))
+ .and_then(|it| highlight_method_call(sema, krate, &it, edition))
.unwrap_or_else(|| SymbolKind::Method.into()),
FIELD_EXPR => {
let h = HlTag::Symbol(SymbolKind::Field);
diff --git a/crates/ide/src/syntax_highlighting/test_data/highlight_doctest.html b/crates/ide/src/syntax_highlighting/test_data/highlight_doctest.html
index 5ff96ae2a7..eb77c14c2a 100644
--- a/crates/ide/src/syntax_highlighting/test_data/highlight_doctest.html
+++ b/crates/ide/src/syntax_highlighting/test_data/highlight_doctest.html
@@ -50,6 +50,15 @@ pre { color: #DCDCCC; background: #3F3F3F; font-size: 22px; padd
<span class="comment documentation">//!</span><span class="comment documentation"> </span><span class="keyword injected">fn</span><span class="none injected"> </span><span class="function declaration injected">test</span><span class="parenthesis injected">(</span><span class="parenthesis injected">)</span><span class="none injected"> </span><span class="brace injected">{</span><span class="brace injected">}</span>
<span class="comment documentation">//! ```</span>
+<span class="comment documentation">//! ```rust</span>
+<span class="comment documentation">//!</span><span class="comment documentation"> </span><span class="keyword injected">extern</span><span class="none injected"> </span><span class="keyword injected">crate</span><span class="none injected"> </span><span class="self_keyword crate_root injected">self</span><span class="semicolon injected">;</span>
+<span class="comment documentation">//!</span><span class="comment documentation"> </span><span class="keyword injected">extern</span><span class="none injected"> </span><span class="keyword injected">crate</span><span class="none injected"> </span><span class="module crate_root injected">std</span><span class="semicolon injected">;</span>
+<span class="comment documentation">//!</span><span class="comment documentation"> </span><span class="keyword injected">extern</span><span class="none injected"> </span><span class="keyword injected">crate</span><span class="none injected"> </span><span class="module crate_root injected">core</span><span class="semicolon injected">;</span>
+<span class="comment documentation">//!</span><span class="comment documentation"> </span><span class="keyword injected">extern</span><span class="none injected"> </span><span class="keyword injected">crate</span><span class="none injected"> </span><span class="module crate_root injected">alloc</span><span class="semicolon injected">;</span>
+<span class="comment documentation">//!</span><span class="comment documentation"> </span><span class="keyword injected">extern</span><span class="none injected"> </span><span class="keyword injected">crate</span><span class="none injected"> </span><span class="module crate_root injected">proc_macro</span><span class="semicolon injected">;</span>
+<span class="comment documentation">//!</span><span class="comment documentation"> </span><span class="keyword injected">extern</span><span class="none injected"> </span><span class="keyword injected">crate</span><span class="none injected"> </span><span class="module crate_root injected">test</span><span class="semicolon injected">;</span>
+<span class="comment documentation">//!</span><span class="comment documentation"> </span><span class="keyword injected">extern</span><span class="none injected"> </span><span class="keyword injected">crate</span><span class="none injected"> </span><span class="module crate_root injected">Krate</span><span class="semicolon injected">;</span>
+<span class="comment documentation">//! ```</span>
<span class="keyword">mod</span> <span class="module declaration">outline_module</span><span class="semicolon">;</span>
<span class="comment documentation">/// ```</span>
diff --git a/crates/ide/src/syntax_highlighting/test_data/highlight_general.html b/crates/ide/src/syntax_highlighting/test_data/highlight_general.html
index 9be7c92fc7..9477d0d1b8 100644
--- a/crates/ide/src/syntax_highlighting/test_data/highlight_general.html
+++ b/crates/ide/src/syntax_highlighting/test_data/highlight_general.html
@@ -48,17 +48,6 @@ pre { color: #DCDCCC; background: #3F3F3F; font-size: 22px; padd
<pre><code><span class="keyword">use</span> <span class="module">inner</span><span class="operator">::</span><span class="brace">{</span><span class="self_keyword">self</span> <span class="keyword">as</span> <span class="module declaration">inner_mod</span><span class="brace">}</span><span class="semicolon">;</span>
<span class="keyword">mod</span> <span class="module declaration">inner</span> <span class="brace">{</span><span class="brace">}</span>
-<span class="keyword">pub</span> <span class="keyword">mod</span> <span class="module declaration public">ops</span> <span class="brace">{</span>
- <span class="attribute_bracket attribute">#</span><span class="attribute_bracket attribute">[</span><span class="builtin_attr attribute">lang</span> <span class="operator attribute">=</span> <span class="string_literal attribute">"fn_once"</span><span class="attribute_bracket attribute">]</span>
- <span class="keyword">pub</span> <span class="keyword">trait</span> <span class="trait declaration public">FnOnce</span><span class="angle">&lt;</span><span class="type_param declaration">Args</span><span class="angle">&gt;</span> <span class="brace">{</span><span class="brace">}</span>
-
- <span class="attribute_bracket attribute">#</span><span class="attribute_bracket attribute">[</span><span class="builtin_attr attribute">lang</span> <span class="operator attribute">=</span> <span class="string_literal attribute">"fn_mut"</span><span class="attribute_bracket attribute">]</span>
- <span class="keyword">pub</span> <span class="keyword">trait</span> <span class="trait declaration public">FnMut</span><span class="angle">&lt;</span><span class="type_param declaration">Args</span><span class="angle">&gt;</span><span class="colon">:</span> <span class="trait public">FnOnce</span><span class="angle">&lt;</span><span class="type_param">Args</span><span class="angle">&gt;</span> <span class="brace">{</span><span class="brace">}</span>
-
- <span class="attribute_bracket attribute">#</span><span class="attribute_bracket attribute">[</span><span class="builtin_attr attribute">lang</span> <span class="operator attribute">=</span> <span class="string_literal attribute">"fn"</span><span class="attribute_bracket attribute">]</span>
- <span class="keyword">pub</span> <span class="keyword">trait</span> <span class="trait declaration public">Fn</span><span class="angle">&lt;</span><span class="type_param declaration">Args</span><span class="angle">&gt;</span><span class="colon">:</span> <span class="trait public">FnMut</span><span class="angle">&lt;</span><span class="type_param">Args</span><span class="angle">&gt;</span> <span class="brace">{</span><span class="brace">}</span>
-<span class="brace">}</span>
-
<span class="keyword">struct</span> <span class="struct declaration">Foo</span> <span class="brace">{</span>
<span class="field declaration">x</span><span class="colon">:</span> <span class="builtin_type">u32</span><span class="comma">,</span>
<span class="brace">}</span>
@@ -125,8 +114,8 @@ pre { color: #DCDCCC; background: #3F3F3F; font-size: 22px; padd
<span class="const_param const">FOO</span>
<span class="brace">}</span>
-<span class="keyword">use</span> <span class="module public">ops</span><span class="operator">::</span><span class="trait public">Fn</span><span class="semicolon">;</span>
-<span class="keyword">fn</span> <span class="function declaration">baz</span><span class="angle">&lt;</span><span class="type_param declaration">F</span><span class="colon">:</span> <span class="trait public">Fn</span><span class="parenthesis">(</span><span class="parenthesis">)</span> <span class="operator">-&gt;</span> <span class="parenthesis">(</span><span class="parenthesis">)</span><span class="angle">&gt;</span><span class="parenthesis">(</span><span class="value_param callable declaration">f</span><span class="colon">:</span> <span class="type_param">F</span><span class="parenthesis">)</span> <span class="brace">{</span>
+<span class="keyword">use</span> <span class="module crate_root default_library library">core</span><span class="operator">::</span><span class="module default_library library">ops</span><span class="operator">::</span><span class="trait default_library library">Fn</span><span class="semicolon">;</span>
+<span class="keyword">fn</span> <span class="function declaration">baz</span><span class="angle">&lt;</span><span class="type_param declaration">F</span><span class="colon">:</span> <span class="trait default_library library">Fn</span><span class="parenthesis">(</span><span class="parenthesis">)</span> <span class="operator">-&gt;</span> <span class="parenthesis">(</span><span class="parenthesis">)</span><span class="angle">&gt;</span><span class="parenthesis">(</span><span class="value_param callable declaration">f</span><span class="colon">:</span> <span class="type_param">F</span><span class="parenthesis">)</span> <span class="brace">{</span>
<span class="value_param callable">f</span><span class="parenthesis">(</span><span class="parenthesis">)</span>
<span class="brace">}</span>
diff --git a/crates/ide/src/syntax_highlighting/test_data/highlight_strings.html b/crates/ide/src/syntax_highlighting/test_data/highlight_strings.html
index 0a7e273950..1794d7dbfe 100644
--- a/crates/ide/src/syntax_highlighting/test_data/highlight_strings.html
+++ b/crates/ide/src/syntax_highlighting/test_data/highlight_strings.html
@@ -82,6 +82,10 @@ pre { color: #DCDCCC; background: #3F3F3F; font-size: 22px; padd
<span class="parenthesis">(</span><span class="punctuation">$</span>literal<span class="colon">:</span>literal<span class="parenthesis">)</span> <span class="operator">=</span><span class="operator">&gt;</span> <span class="brace">{</span><span class="brace">{</span>stringify<span class="punctuation">!</span><span class="parenthesis">(</span><span class="punctuation">$</span>literal<span class="parenthesis">)</span><span class="semicolon">;</span> format_args<span class="punctuation">!</span><span class="parenthesis">(</span><span class="punctuation">$</span>literal<span class="parenthesis">)</span><span class="brace">}</span><span class="brace">}</span><span class="semicolon">;</span>
<span class="brace">}</span>
+<span class="keyword">use</span> <span class="unresolved_reference">foo</span><span class="operator">::</span><span class="unresolved_reference">bar</span> <span class="keyword">as</span> <span class="variable declaration">baz</span><span class="semicolon">;</span>
+<span class="keyword">trait</span> <span class="trait_alias declaration">Bar</span> <span class="operator">=</span> <span class="unresolved_reference">Baz</span><span class="semicolon">;</span>
+<span class="keyword">trait</span> <span class="trait_alias declaration">Foo</span> <span class="operator">=</span> <span class="trait_alias">Bar</span><span class="semicolon">;</span>
+
<span class="keyword">fn</span> <span class="function declaration">main</span><span class="parenthesis">(</span><span class="parenthesis">)</span> <span class="brace">{</span>
<span class="keyword">let</span> <span class="variable declaration">a</span> <span class="operator">=</span> <span class="char_literal">'</span><span class="escape_sequence">\n</span><span class="char_literal">'</span><span class="semicolon">;</span>
<span class="keyword">let</span> <span class="variable declaration">a</span> <span class="operator">=</span> <span class="char_literal">'</span><span class="escape_sequence">\t</span><span class="char_literal">'</span><span class="semicolon">;</span>
diff --git a/crates/ide/src/syntax_highlighting/tests.rs b/crates/ide/src/syntax_highlighting/tests.rs
index af52b33de6..3775265f23 100644
--- a/crates/ide/src/syntax_highlighting/tests.rs
+++ b/crates/ide/src/syntax_highlighting/tests.rs
@@ -136,22 +136,11 @@ use self::foo as bar;
fn test_highlighting() {
check_highlighting(
r#"
-//- minicore: derive, copy
+//- minicore: derive, copy, fn
//- /main.rs crate:main deps:foo
use inner::{self as inner_mod};
mod inner {}
-pub mod ops {
- #[lang = "fn_once"]
- pub trait FnOnce<Args> {}
-
- #[lang = "fn_mut"]
- pub trait FnMut<Args>: FnOnce<Args> {}
-
- #[lang = "fn"]
- pub trait Fn<Args>: FnMut<Args> {}
-}
-
struct Foo {
x: u32,
}
@@ -218,7 +207,7 @@ fn const_param<const FOO: usize>() -> usize {
FOO
}
-use ops::Fn;
+use core::ops::Fn;
fn baz<F: Fn() -> ()>(f: F) {
f()
}
@@ -466,6 +455,10 @@ macro_rules! reuse_twice {
($literal:literal) => {{stringify!($literal); format_args!($literal)}};
}
+use foo::bar as baz;
+trait Bar = Baz;
+trait Foo = Bar;
+
fn main() {
let a = '\n';
let a = '\t';
@@ -718,6 +711,15 @@ fn test_highlight_doc_comment() {
//! fn test() {}
//! ```
+//! ```rust
+//! extern crate self;
+//! extern crate std;
+//! extern crate core;
+//! extern crate alloc;
+//! extern crate proc_macro;
+//! extern crate test;
+//! extern crate Krate;
+//! ```
mod outline_module;
/// ```
@@ -1080,6 +1082,9 @@ pub struct Struct;
);
}
+// Rainbow highlighting uses a deterministic hash (fxhash) but the hashing does differ
+// depending on the pointer width so only runs this on 64-bit targets.
+#[cfg(target_pointer_width = "64")]
#[test]
fn test_rainbow_highlighting() {
check_highlighting(
diff --git a/crates/ide/src/typing.rs b/crates/ide/src/typing.rs
index 47d75f1c95..8c9dd05145 100644
--- a/crates/ide/src/typing.rs
+++ b/crates/ide/src/typing.rs
@@ -51,16 +51,15 @@ struct ExtendedTextEdit {
// - typing `{` in a use item adds a closing `}` in the right place
// - typing `>` to complete a return type `->` will insert a whitespace after it
//
-// VS Code::
+// #### VS Code
//
// Add the following to `settings.json`:
-// [source,json]
-// ----
+// ```json
// "editor.formatOnType": true,
-// ----
+// ```
//
-// image::https://user-images.githubusercontent.com/48062697/113166163-69758500-923a-11eb-81ee-eb33ec380399.gif[]
-// image::https://user-images.githubusercontent.com/48062697/113171066-105c2000-923f-11eb-87ab-f4a263346567.gif[]
+// ![On Typing Assists](https://user-images.githubusercontent.com/48062697/113166163-69758500-923a-11eb-81ee-eb33ec380399.gif)
+// ![On Typing Assists](https://user-images.githubusercontent.com/48062697/113171066-105c2000-923f-11eb-87ab-f4a263346567.gif)
pub(crate) fn on_char_typed(
db: &RootDatabase,
position: FilePosition,
diff --git a/crates/ide/src/typing/on_enter.rs b/crates/ide/src/typing/on_enter.rs
index e249c38c73..c6d1c283f4 100644
--- a/crates/ide/src/typing/on_enter.rs
+++ b/crates/ide/src/typing/on_enter.rs
@@ -16,12 +16,12 @@ use ide_db::text_edit::TextEdit;
// Feature: On Enter
//
-// rust-analyzer can override kbd:[Enter] key to make it smarter:
+// rust-analyzer can override <kbd>Enter</kbd> key to make it smarter:
//
-// - kbd:[Enter] inside triple-slash comments automatically inserts `///`
-// - kbd:[Enter] in the middle or after a trailing space in `//` inserts `//`
-// - kbd:[Enter] inside `//!` doc comments automatically inserts `//!`
-// - kbd:[Enter] after `{` indents contents and closing `}` of single-line block
+// - <kbd>Enter</kbd> inside triple-slash comments automatically inserts `///`
+// - <kbd>Enter</kbd> in the middle or after a trailing space in `//` inserts `//`
+// - <kbd>Enter</kbd> inside `//!` doc comments automatically inserts `//!`
+// - <kbd>Enter</kbd> after `{` indents contents and closing `}` of single-line block
//
// This action needs to be assigned to shortcut explicitly.
//
@@ -29,29 +29,27 @@ use ide_db::text_edit::TextEdit;
// Similarly, if rust-analyzer crashes or stops responding, `Enter` might not work.
// In that case, you can still press `Shift-Enter` to insert a newline.
//
-// VS Code::
+// #### VS Code
//
// Add the following to `keybindings.json`:
-// [source,json]
-// ----
+// ```json
// {
// "key": "Enter",
// "command": "rust-analyzer.onEnter",
// "when": "editorTextFocus && !suggestWidgetVisible && editorLangId == rust"
// }
-// ----
+// ````
//
// When using the Vim plugin:
-// [source,json]
-// ----
+// ```json
// {
// "key": "Enter",
// "command": "rust-analyzer.onEnter",
// "when": "editorTextFocus && !suggestWidgetVisible && editorLangId == rust && vim.mode == 'Insert'"
// }
-// ----
+// ````
//
-// image::https://user-images.githubusercontent.com/48062697/113065578-04c21800-91b1-11eb-82b8-22b8c481e645.gif[]
+// ![On Enter](https://user-images.githubusercontent.com/48062697/113065578-04c21800-91b1-11eb-82b8-22b8c481e645.gif)
pub(crate) fn on_enter(db: &RootDatabase, position: FilePosition) -> Option<TextEdit> {
let parse = db.parse(EditionedFileId::current_edition(position.file_id));
let file = parse.tree();
diff --git a/crates/ide/src/view_crate_graph.rs b/crates/ide/src/view_crate_graph.rs
index 9ff099f479..eb6eb7da1e 100644
--- a/crates/ide/src/view_crate_graph.rs
+++ b/crates/ide/src/view_crate_graph.rs
@@ -12,11 +12,9 @@ use triomphe::Arc;
//
// Only workspace crates are included, no crates.io dependencies or sysroot crates.
//
-// |===
-// | Editor | Action Name
-//
-// | VS Code | **rust-analyzer: View Crate Graph**
-// |===
+// | Editor | Action Name |
+// |---------|-------------|
+// | VS Code | **rust-analyzer: View Crate Graph** |
pub(crate) fn view_crate_graph(db: &RootDatabase, full: bool) -> Result<String, String> {
let crate_graph = db.crate_graph();
let crates_to_render = crate_graph
@@ -86,7 +84,8 @@ impl<'a> dot::Labeller<'a, CrateId, Edge<'a>> for DotCrateGraph {
}
fn node_label(&'a self, n: &CrateId) -> LabelText<'a> {
- let name = self.graph[*n].display_name.as_ref().map_or("(unnamed crate)", |name| name);
+ let name =
+ self.graph[*n].display_name.as_ref().map_or("(unnamed crate)", |name| name.as_str());
LabelText::LabelStr(name.into())
}
}
diff --git a/crates/ide/src/view_hir.rs b/crates/ide/src/view_hir.rs
index fe532f4cc5..bfdf9d0f33 100644
--- a/crates/ide/src/view_hir.rs
+++ b/crates/ide/src/view_hir.rs
@@ -4,12 +4,11 @@ use syntax::{algo::ancestors_at_offset, ast, AstNode};
// Feature: View Hir
//
-// |===
-// | Editor | Action Name
-//
+// | Editor | Action Name |
+// |---------|--------------|
// | VS Code | **rust-analyzer: View Hir**
-// |===
-// image::https://user-images.githubusercontent.com/48062697/113065588-068bdb80-91b1-11eb-9a78-0b4ef1e972fb.gif[]
+//
+// ![View Hir](https://user-images.githubusercontent.com/48062697/113065588-068bdb80-91b1-11eb-9a78-0b4ef1e972fb.gif)
pub(crate) fn view_hir(db: &RootDatabase, position: FilePosition) -> String {
body_hir(db, position).unwrap_or_else(|| "Not inside a function body".to_owned())
}
diff --git a/crates/ide/src/view_item_tree.rs b/crates/ide/src/view_item_tree.rs
index a6352b99d4..67c241cbb9 100644
--- a/crates/ide/src/view_item_tree.rs
+++ b/crates/ide/src/view_item_tree.rs
@@ -6,11 +6,9 @@ use span::EditionedFileId;
//
// Displays the ItemTree of the currently open file, for debugging.
//
-// |===
-// | Editor | Action Name
-//
-// | VS Code | **rust-analyzer: Debug ItemTree**
-// |===
+// | Editor | Action Name |
+// |---------|-------------|
+// | VS Code | **rust-analyzer: Debug ItemTree** |
pub(crate) fn view_item_tree(db: &RootDatabase, file_id: FileId) -> String {
let sema = Semantics::new(db);
let file_id = sema
diff --git a/crates/ide/src/view_memory_layout.rs b/crates/ide/src/view_memory_layout.rs
index ff74e05e94..edb83bc4ea 100644
--- a/crates/ide/src/view_memory_layout.rs
+++ b/crates/ide/src/view_memory_layout.rs
@@ -75,11 +75,9 @@ impl FieldOrTupleIdx {
//
// Displays the recursive memory layout of a datatype.
//
-// |===
-// | Editor | Action Name
-//
-// | VS Code | **rust-analyzer: View Memory Layout**
-// |===
+// | Editor | Action Name |
+// |---------|-------------|
+// | VS Code | **rust-analyzer: View Memory Layout** |
pub(crate) fn view_memory_layout(
db: &RootDatabase,
position: FilePosition,
diff --git a/crates/ide/src/view_mir.rs b/crates/ide/src/view_mir.rs
index 7a228375d5..aa4ff64a81 100644
--- a/crates/ide/src/view_mir.rs
+++ b/crates/ide/src/view_mir.rs
@@ -4,11 +4,9 @@ use syntax::{algo::ancestors_at_offset, ast, AstNode};
// Feature: View Mir
//
-// |===
-// | Editor | Action Name
-//
+// | Editor | Action Name |
+// |---------|-------------|
// | VS Code | **rust-analyzer: View Mir**
-// |===
pub(crate) fn view_mir(db: &RootDatabase, position: FilePosition) -> String {
body_mir(db, position).unwrap_or_else(|| "Not inside a function body".to_owned())
}
diff --git a/crates/ide/src/view_syntax_tree.rs b/crates/ide/src/view_syntax_tree.rs
index 218ee15a7d..407720864b 100644
--- a/crates/ide/src/view_syntax_tree.rs
+++ b/crates/ide/src/view_syntax_tree.rs
@@ -1,41 +1,50 @@
use hir::Semantics;
-use ide_db::{FileId, RootDatabase};
-use span::TextRange;
+use ide_db::{
+ line_index::{LineCol, LineIndex},
+ FileId, LineIndexDatabase, RootDatabase,
+};
+use span::{TextRange, TextSize};
use stdx::format_to;
use syntax::{
ast::{self, IsString},
AstNode, AstToken, NodeOrToken, SourceFile, SyntaxNode, SyntaxToken, WalkEvent,
};
+use triomphe::Arc;
// Feature: Show Syntax Tree
//
// Shows a tree view with the syntax tree of the current file
//
-// |===
-// | Editor | Panel Name
-//
-// | VS Code | **Rust Syntax Tree**
-// |===
+// | Editor | Panel Name |
+// |---------|-------------|
+// | VS Code | **Rust Syntax Tree** |
pub(crate) fn view_syntax_tree(db: &RootDatabase, file_id: FileId) -> String {
let sema = Semantics::new(db);
+ let line_index = db.line_index(file_id);
let parse = sema.parse_guess_edition(file_id);
- syntax_node_to_json(parse.syntax(), None)
+
+ let ctx = SyntaxTreeCtx { line_index, in_string: None };
+
+ syntax_node_to_json(parse.syntax(), &ctx)
}
-fn syntax_node_to_json(node: &SyntaxNode, ctx: Option<InStringCtx>) -> String {
+fn syntax_node_to_json(node: &SyntaxNode, ctx: &SyntaxTreeCtx) -> String {
let mut result = String::new();
for event in node.preorder_with_tokens() {
match event {
WalkEvent::Enter(it) => {
let kind = it.kind();
- let (text_range, inner_range_str) = match &ctx {
- Some(ctx) => {
+ let (text_range, inner_range_str) = match &ctx.in_string {
+ Some(in_string) => {
+ let start_pos = TextPosition::new(&ctx.line_index, it.text_range().start());
+ let end_pos = TextPosition::new(&ctx.line_index, it.text_range().end());
+
let inner_start: u32 = it.text_range().start().into();
- let inner_end: u32 = it.text_range().end().into();
+ let inner_end: u32 = it.text_range().start().into();
- let mut true_start = inner_start + ctx.offset;
- let mut true_end = inner_end + ctx.offset;
- for pos in &ctx.marker_positions {
+ let mut true_start = inner_start + in_string.offset;
+ let mut true_end = inner_end + in_string.offset;
+ for pos in &in_string.marker_positions {
if *pos >= inner_end {
break;
}
@@ -48,39 +57,33 @@ fn syntax_node_to_json(node: &SyntaxNode, ctx: Option<InStringCtx>) -> String {
let true_range = TextRange::new(true_start.into(), true_end.into());
- (
- true_range,
- format!(
- r#","istart":{:?},"iend":{:?}"#,
- it.text_range().start(),
- it.text_range().end()
- ),
- )
+ (true_range, format!(r#","istart":{start_pos},"iend":{end_pos}"#,))
}
None => (it.text_range(), "".to_owned()),
};
- let start = text_range.start();
- let end = text_range.end();
+
+ let start = TextPosition::new(&ctx.line_index, text_range.start());
+ let end = TextPosition::new(&ctx.line_index, text_range.end());
match it {
NodeOrToken::Node(_) => {
format_to!(
result,
- r#"{{"type":"Node","kind":"{kind:?}","start":{start:?},"end":{end:?}{inner_range_str},"children":["#
+ r#"{{"type":"Node","kind":"{kind:?}","start":{start},"end":{end}{inner_range_str},"children":["#
);
}
NodeOrToken::Token(token) => {
let comma = if token.next_sibling_or_token().is_some() { "," } else { "" };
- match parse_rust_string(token) {
+ match parse_rust_string(token, ctx) {
Some(parsed) => {
format_to!(
result,
- r#"{{"type":"Node","kind":"{kind:?}","start":{start:?},"end":{end:?}{inner_range_str},"children":[{parsed}]}}{comma}"#
+ r#"{{"type":"Node","kind":"{kind:?}","start":{start},"end":{end}{inner_range_str},"children":[{parsed}]}}{comma}"#
);
}
None => format_to!(
result,
- r#"{{"type":"Token","kind":"{kind:?}","start":{start:?},"end":{end:?}{inner_range_str}}}{comma}"#
+ r#"{{"type":"Token","kind":"{kind:?}","start":{start},"end":{end}{inner_range_str}}}{comma}"#
),
}
}
@@ -99,7 +102,26 @@ fn syntax_node_to_json(node: &SyntaxNode, ctx: Option<InStringCtx>) -> String {
result
}
-fn parse_rust_string(token: SyntaxToken) -> Option<String> {
+struct TextPosition {
+ offset: TextSize,
+ line: u32,
+ col: u32,
+}
+
+impl std::fmt::Display for TextPosition {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ write!(f, "[{:?},{},{}]", self.offset, self.line, self.col)
+ }
+}
+
+impl TextPosition {
+ pub(crate) fn new(line_index: &LineIndex, offset: TextSize) -> Self {
+ let LineCol { line, col } = line_index.line_col(offset);
+ Self { offset, line, col }
+ }
+}
+
+fn parse_rust_string(token: SyntaxToken, ctx: &SyntaxTreeCtx) -> Option<String> {
let string_node = ast::String::cast(token)?;
let text = string_node.value().ok()?;
@@ -128,13 +150,20 @@ fn parse_rust_string(token: SyntaxToken) -> Option<String> {
return None;
}
- Some(syntax_node_to_json(
- node,
- Some(InStringCtx {
+ let ctx = SyntaxTreeCtx {
+ line_index: ctx.line_index.clone(),
+ in_string: Some(InStringCtx {
offset: string_node.text_range_between_quotes()?.start().into(),
marker_positions,
}),
- ))
+ };
+
+ Some(syntax_node_to_json(node, &ctx))
+}
+
+struct SyntaxTreeCtx {
+ line_index: Arc<LineIndex>,
+ in_string: Option<InStringCtx>,
}
struct InStringCtx {
@@ -160,7 +189,7 @@ mod tests {
check(
r#"fn foo() {}"#,
expect![[
- r#"{"type":"Node","kind":"SOURCE_FILE","start":0,"end":11,"children":[{"type":"Node","kind":"FN","start":0,"end":11,"children":[{"type":"Token","kind":"FN_KW","start":0,"end":2},{"type":"Token","kind":"WHITESPACE","start":2,"end":3},{"type":"Node","kind":"NAME","start":3,"end":6,"children":[{"type":"Token","kind":"IDENT","start":3,"end":6}]},{"type":"Node","kind":"PARAM_LIST","start":6,"end":8,"children":[{"type":"Token","kind":"L_PAREN","start":6,"end":7},{"type":"Token","kind":"R_PAREN","start":7,"end":8}]},{"type":"Token","kind":"WHITESPACE","start":8,"end":9},{"type":"Node","kind":"BLOCK_EXPR","start":9,"end":11,"children":[{"type":"Node","kind":"STMT_LIST","start":9,"end":11,"children":[{"type":"Token","kind":"L_CURLY","start":9,"end":10},{"type":"Token","kind":"R_CURLY","start":10,"end":11}]}]}]}]}"#
+ r#"{"type":"Node","kind":"SOURCE_FILE","start":[0,0,0],"end":[11,0,11],"children":[{"type":"Node","kind":"FN","start":[0,0,0],"end":[11,0,11],"children":[{"type":"Token","kind":"FN_KW","start":[0,0,0],"end":[2,0,2]},{"type":"Token","kind":"WHITESPACE","start":[2,0,2],"end":[3,0,3]},{"type":"Node","kind":"NAME","start":[3,0,3],"end":[6,0,6],"children":[{"type":"Token","kind":"IDENT","start":[3,0,3],"end":[6,0,6]}]},{"type":"Node","kind":"PARAM_LIST","start":[6,0,6],"end":[8,0,8],"children":[{"type":"Token","kind":"L_PAREN","start":[6,0,6],"end":[7,0,7]},{"type":"Token","kind":"R_PAREN","start":[7,0,7],"end":[8,0,8]}]},{"type":"Token","kind":"WHITESPACE","start":[8,0,8],"end":[9,0,9]},{"type":"Node","kind":"BLOCK_EXPR","start":[9,0,9],"end":[11,0,11],"children":[{"type":"Node","kind":"STMT_LIST","start":[9,0,9],"end":[11,0,11],"children":[{"type":"Token","kind":"L_CURLY","start":[9,0,9],"end":[10,0,10]},{"type":"Token","kind":"R_CURLY","start":[10,0,10],"end":[11,0,11]}]}]}]}]}"#
]],
);
@@ -173,7 +202,7 @@ fn test() {
", "");
}"#,
expect![[
- r#"{"type":"Node","kind":"SOURCE_FILE","start":0,"end":60,"children":[{"type":"Node","kind":"FN","start":0,"end":60,"children":[{"type":"Token","kind":"FN_KW","start":0,"end":2},{"type":"Token","kind":"WHITESPACE","start":2,"end":3},{"type":"Node","kind":"NAME","start":3,"end":7,"children":[{"type":"Token","kind":"IDENT","start":3,"end":7}]},{"type":"Node","kind":"PARAM_LIST","start":7,"end":9,"children":[{"type":"Token","kind":"L_PAREN","start":7,"end":8},{"type":"Token","kind":"R_PAREN","start":8,"end":9}]},{"type":"Token","kind":"WHITESPACE","start":9,"end":10},{"type":"Node","kind":"BLOCK_EXPR","start":10,"end":60,"children":[{"type":"Node","kind":"STMT_LIST","start":10,"end":60,"children":[{"type":"Token","kind":"L_CURLY","start":10,"end":11},{"type":"Token","kind":"WHITESPACE","start":11,"end":16},{"type":"Node","kind":"EXPR_STMT","start":16,"end":58,"children":[{"type":"Node","kind":"MACRO_EXPR","start":16,"end":57,"children":[{"type":"Node","kind":"MACRO_CALL","start":16,"end":57,"children":[{"type":"Node","kind":"PATH","start":16,"end":22,"children":[{"type":"Node","kind":"PATH_SEGMENT","start":16,"end":22,"children":[{"type":"Node","kind":"NAME_REF","start":16,"end":22,"children":[{"type":"Token","kind":"IDENT","start":16,"end":22}]}]}]},{"type":"Token","kind":"BANG","start":22,"end":23},{"type":"Node","kind":"TOKEN_TREE","start":23,"end":57,"children":[{"type":"Token","kind":"L_PAREN","start":23,"end":24},{"type":"Node","kind":"STRING","start":24,"end":52,"children":[{"type":"Node","kind":"SOURCE_FILE","start":25,"end":51,"istart":0,"iend":26,"children":[{"type":"Token","kind":"WHITESPACE","start":25,"end":30,"istart":0,"iend":5},{"type":"Node","kind":"FN","start":30,"end":46,"istart":5,"iend":21,"children":[{"type":"Token","kind":"FN_KW","start":30,"end":32,"istart":5,"iend":7},{"type":"Token","kind":"WHITESPACE","start":32,"end":33,"istart":7,"iend":8},{"type":"Node","kind":"NAME","start":33,"end":36,"istart":8,"iend":11,"children":[{"type":"Token","kind":"IDENT","start":33,"end":36,"istart":8,"iend":11}]},{"type":"Node","kind":"PARAM_LIST","start":36,"end":38,"istart":11,"iend":13,"children":[{"type":"Token","kind":"L_PAREN","start":36,"end":37,"istart":11,"iend":12},{"type":"Token","kind":"R_PAREN","start":37,"end":38,"istart":12,"iend":13}]},{"type":"Token","kind":"WHITESPACE","start":38,"end":39,"istart":13,"iend":14},{"type":"Node","kind":"BLOCK_EXPR","start":39,"end":46,"istart":14,"iend":21,"children":[{"type":"Node","kind":"STMT_LIST","start":39,"end":46,"istart":14,"iend":21,"children":[{"type":"Token","kind":"L_CURLY","start":39,"end":40,"istart":14,"iend":15},{"type":"Token","kind":"WHITESPACE","start":40,"end":45,"istart":15,"iend":20},{"type":"Token","kind":"R_CURLY","start":45,"end":46,"istart":20,"iend":21}]}]}]},{"type":"Token","kind":"WHITESPACE","start":46,"end":51,"istart":21,"iend":26}]}]},{"type":"Token","kind":"COMMA","start":52,"end":53},{"type":"Token","kind":"WHITESPACE","start":53,"end":54},{"type":"Token","kind":"STRING","start":54,"end":56},{"type":"Token","kind":"R_PAREN","start":56,"end":57}]}]}]},{"type":"Token","kind":"SEMICOLON","start":57,"end":58}]},{"type":"Token","kind":"WHITESPACE","start":58,"end":59},{"type":"Token","kind":"R_CURLY","start":59,"end":60}]}]}]}]}"#
+ r#"{"type":"Node","kind":"SOURCE_FILE","start":[0,0,0],"end":[60,5,1],"children":[{"type":"Node","kind":"FN","start":[0,0,0],"end":[60,5,1],"children":[{"type":"Token","kind":"FN_KW","start":[0,0,0],"end":[2,0,2]},{"type":"Token","kind":"WHITESPACE","start":[2,0,2],"end":[3,0,3]},{"type":"Node","kind":"NAME","start":[3,0,3],"end":[7,0,7],"children":[{"type":"Token","kind":"IDENT","start":[3,0,3],"end":[7,0,7]}]},{"type":"Node","kind":"PARAM_LIST","start":[7,0,7],"end":[9,0,9],"children":[{"type":"Token","kind":"L_PAREN","start":[7,0,7],"end":[8,0,8]},{"type":"Token","kind":"R_PAREN","start":[8,0,8],"end":[9,0,9]}]},{"type":"Token","kind":"WHITESPACE","start":[9,0,9],"end":[10,0,10]},{"type":"Node","kind":"BLOCK_EXPR","start":[10,0,10],"end":[60,5,1],"children":[{"type":"Node","kind":"STMT_LIST","start":[10,0,10],"end":[60,5,1],"children":[{"type":"Token","kind":"L_CURLY","start":[10,0,10],"end":[11,0,11]},{"type":"Token","kind":"WHITESPACE","start":[11,0,11],"end":[16,1,4]},{"type":"Node","kind":"EXPR_STMT","start":[16,1,4],"end":[58,4,11],"children":[{"type":"Node","kind":"MACRO_EXPR","start":[16,1,4],"end":[57,4,10],"children":[{"type":"Node","kind":"MACRO_CALL","start":[16,1,4],"end":[57,4,10],"children":[{"type":"Node","kind":"PATH","start":[16,1,4],"end":[22,1,10],"children":[{"type":"Node","kind":"PATH_SEGMENT","start":[16,1,4],"end":[22,1,10],"children":[{"type":"Node","kind":"NAME_REF","start":[16,1,4],"end":[22,1,10],"children":[{"type":"Token","kind":"IDENT","start":[16,1,4],"end":[22,1,10]}]}]}]},{"type":"Token","kind":"BANG","start":[22,1,10],"end":[23,1,11]},{"type":"Node","kind":"TOKEN_TREE","start":[23,1,11],"end":[57,4,10],"children":[{"type":"Token","kind":"L_PAREN","start":[23,1,11],"end":[24,1,12]},{"type":"Node","kind":"STRING","start":[24,1,12],"end":[52,4,5],"children":[{"type":"Node","kind":"SOURCE_FILE","start":[25,1,13],"end":[25,1,13],"istart":[0,0,0],"iend":[26,2,0],"children":[{"type":"Token","kind":"WHITESPACE","start":[25,1,13],"end":[25,1,13],"istart":[0,0,0],"iend":[5,0,5]},{"type":"Node","kind":"FN","start":[30,2,4],"end":[30,2,4],"istart":[5,0,5],"iend":[21,1,9],"children":[{"type":"Token","kind":"FN_KW","start":[30,2,4],"end":[30,2,4],"istart":[5,0,5],"iend":[7,0,7]},{"type":"Token","kind":"WHITESPACE","start":[32,2,6],"end":[32,2,6],"istart":[7,0,7],"iend":[8,0,8]},{"type":"Node","kind":"NAME","start":[33,2,7],"end":[33,2,7],"istart":[8,0,8],"iend":[11,0,11],"children":[{"type":"Token","kind":"IDENT","start":[33,2,7],"end":[33,2,7],"istart":[8,0,8],"iend":[11,0,11]}]},{"type":"Node","kind":"PARAM_LIST","start":[36,2,10],"end":[36,2,10],"istart":[11,0,11],"iend":[13,1,1],"children":[{"type":"Token","kind":"L_PAREN","start":[36,2,10],"end":[36,2,10],"istart":[11,0,11],"iend":[12,1,0]},{"type":"Token","kind":"R_PAREN","start":[37,2,11],"end":[37,2,11],"istart":[12,1,0],"iend":[13,1,1]}]},{"type":"Token","kind":"WHITESPACE","start":[38,2,12],"end":[38,2,12],"istart":[13,1,1],"iend":[14,1,2]},{"type":"Node","kind":"BLOCK_EXPR","start":[39,2,13],"end":[39,2,13],"istart":[14,1,2],"iend":[21,1,9],"children":[{"type":"Node","kind":"STMT_LIST","start":[39,2,13],"end":[39,2,13],"istart":[14,1,2],"iend":[21,1,9],"children":[{"type":"Token","kind":"L_CURLY","start":[39,2,13],"end":[39,2,13],"istart":[14,1,2],"iend":[15,1,3]},{"type":"Token","kind":"WHITESPACE","start":[40,2,14],"end":[40,2,14],"istart":[15,1,3],"iend":[20,1,8]},{"type":"Token","kind":"R_CURLY","start":[45,3,4],"end":[45,3,4],"istart":[20,1,8],"iend":[21,1,9]}]}]}]},{"type":"Token","kind":"WHITESPACE","start":[46,3,5],"end":[46,3,5],"istart":[21,1,9],"iend":[26,2,0]}]}]},{"type":"Token","kind":"COMMA","start":[52,4,5],"end":[53,4,6]},{"type":"Token","kind":"WHITESPACE","start":[53,4,6],"end":[54,4,7]},{"type":"Token","kind":"STRING","start":[54,4,7],"end":[56,4,9]},{"type":"Token","kind":"R_PAREN","start":[56,4,9],"end":[57,4,10]}]}]}]},{"type":"Token","kind":"SEMICOLON","start":[57,4,10],"end":[58,4,11]}]},{"type":"Token","kind":"WHITESPACE","start":[58,4,11],"end":[59,5,0]},{"type":"Token","kind":"R_CURLY","start":[59,5,0],"end":[60,5,1]}]}]}]}]}"#
]],
)
}
@@ -190,7 +219,7 @@ fn bar() {
", "");
}"#,
expect![[
- r#"{"type":"Node","kind":"SOURCE_FILE","start":0,"end":65,"children":[{"type":"Node","kind":"FN","start":0,"end":65,"children":[{"type":"Token","kind":"FN_KW","start":0,"end":2},{"type":"Token","kind":"WHITESPACE","start":2,"end":3},{"type":"Node","kind":"NAME","start":3,"end":7,"children":[{"type":"Token","kind":"IDENT","start":3,"end":7}]},{"type":"Node","kind":"PARAM_LIST","start":7,"end":9,"children":[{"type":"Token","kind":"L_PAREN","start":7,"end":8},{"type":"Token","kind":"R_PAREN","start":8,"end":9}]},{"type":"Token","kind":"WHITESPACE","start":9,"end":10},{"type":"Node","kind":"BLOCK_EXPR","start":10,"end":65,"children":[{"type":"Node","kind":"STMT_LIST","start":10,"end":65,"children":[{"type":"Token","kind":"L_CURLY","start":10,"end":11},{"type":"Token","kind":"WHITESPACE","start":11,"end":16},{"type":"Node","kind":"EXPR_STMT","start":16,"end":63,"children":[{"type":"Node","kind":"MACRO_EXPR","start":16,"end":62,"children":[{"type":"Node","kind":"MACRO_CALL","start":16,"end":62,"children":[{"type":"Node","kind":"PATH","start":16,"end":22,"children":[{"type":"Node","kind":"PATH_SEGMENT","start":16,"end":22,"children":[{"type":"Node","kind":"NAME_REF","start":16,"end":22,"children":[{"type":"Token","kind":"IDENT","start":16,"end":22}]}]}]},{"type":"Token","kind":"BANG","start":22,"end":23},{"type":"Node","kind":"TOKEN_TREE","start":23,"end":62,"children":[{"type":"Token","kind":"L_PAREN","start":23,"end":24},{"type":"Node","kind":"STRING","start":24,"end":57,"children":[{"type":"Node","kind":"SOURCE_FILE","start":25,"end":56,"istart":0,"iend":31,"children":[{"type":"Token","kind":"WHITESPACE","start":25,"end":26,"istart":0,"iend":1},{"type":"Node","kind":"FN","start":26,"end":38,"istart":1,"iend":13,"children":[{"type":"Token","kind":"FN_KW","start":26,"end":28,"istart":1,"iend":3},{"type":"Token","kind":"WHITESPACE","start":28,"end":29,"istart":3,"iend":4},{"type":"Node","kind":"NAME","start":29,"end":32,"istart":4,"iend":7,"children":[{"type":"Token","kind":"IDENT","start":29,"end":32,"istart":4,"iend":7}]},{"type":"Node","kind":"PARAM_LIST","start":32,"end":34,"istart":7,"iend":9,"children":[{"type":"Token","kind":"L_PAREN","start":32,"end":33,"istart":7,"iend":8},{"type":"Token","kind":"R_PAREN","start":33,"end":34,"istart":8,"iend":9}]},{"type":"Token","kind":"WHITESPACE","start":34,"end":35,"istart":9,"iend":10},{"type":"Node","kind":"BLOCK_EXPR","start":35,"end":38,"istart":10,"iend":13,"children":[{"type":"Node","kind":"STMT_LIST","start":35,"end":38,"istart":10,"iend":13,"children":[{"type":"Token","kind":"L_CURLY","start":35,"end":36,"istart":10,"iend":11},{"type":"Token","kind":"WHITESPACE","start":36,"end":37,"istart":11,"iend":12},{"type":"Token","kind":"R_CURLY","start":37,"end":38,"istart":12,"iend":13}]}]}]},{"type":"Token","kind":"WHITESPACE","start":38,"end":39,"istart":13,"iend":14},{"type":"Node","kind":"FN","start":39,"end":51,"istart":14,"iend":26,"children":[{"type":"Token","kind":"FN_KW","start":39,"end":41,"istart":14,"iend":16},{"type":"Token","kind":"WHITESPACE","start":41,"end":42,"istart":16,"iend":17},{"type":"Node","kind":"NAME","start":42,"end":45,"istart":17,"iend":20,"children":[{"type":"Token","kind":"IDENT","start":42,"end":45,"istart":17,"iend":20}]},{"type":"Node","kind":"PARAM_LIST","start":45,"end":47,"istart":20,"iend":22,"children":[{"type":"Token","kind":"L_PAREN","start":45,"end":46,"istart":20,"iend":21},{"type":"Token","kind":"R_PAREN","start":46,"end":47,"istart":21,"iend":22}]},{"type":"Token","kind":"WHITESPACE","start":47,"end":48,"istart":22,"iend":23},{"type":"Node","kind":"BLOCK_EXPR","start":48,"end":51,"istart":23,"iend":26,"children":[{"type":"Node","kind":"STMT_LIST","start":48,"end":51,"istart":23,"iend":26,"children":[{"type":"Token","kind":"L_CURLY","start":48,"end":49,"istart":23,"iend":24},{"type":"Token","kind":"WHITESPACE","start":49,"end":50,"istart":24,"iend":25},{"type":"Token","kind":"R_CURLY","start":50,"end":51,"istart":25,"iend":26}]}]}]},{"type":"Token","kind":"WHITESPACE","start":51,"end":56,"istart":26,"iend":31}]}]},{"type":"Token","kind":"COMMA","start":57,"end":58},{"type":"Token","kind":"WHITESPACE","start":58,"end":59},{"type":"Token","kind":"STRING","start":59,"end":61},{"type":"Token","kind":"R_PAREN","start":61,"end":62}]}]}]},{"type":"Token","kind":"SEMICOLON","start":62,"end":63}]},{"type":"Token","kind":"WHITESPACE","start":63,"end":64},{"type":"Token","kind":"R_CURLY","start":64,"end":65}]}]}]}]}"#
+ r#"{"type":"Node","kind":"SOURCE_FILE","start":[0,0,0],"end":[65,7,1],"children":[{"type":"Node","kind":"FN","start":[0,0,0],"end":[65,7,1],"children":[{"type":"Token","kind":"FN_KW","start":[0,0,0],"end":[2,0,2]},{"type":"Token","kind":"WHITESPACE","start":[2,0,2],"end":[3,0,3]},{"type":"Node","kind":"NAME","start":[3,0,3],"end":[7,0,7],"children":[{"type":"Token","kind":"IDENT","start":[3,0,3],"end":[7,0,7]}]},{"type":"Node","kind":"PARAM_LIST","start":[7,0,7],"end":[9,0,9],"children":[{"type":"Token","kind":"L_PAREN","start":[7,0,7],"end":[8,0,8]},{"type":"Token","kind":"R_PAREN","start":[8,0,8],"end":[9,0,9]}]},{"type":"Token","kind":"WHITESPACE","start":[9,0,9],"end":[10,0,10]},{"type":"Node","kind":"BLOCK_EXPR","start":[10,0,10],"end":[65,7,1],"children":[{"type":"Node","kind":"STMT_LIST","start":[10,0,10],"end":[65,7,1],"children":[{"type":"Token","kind":"L_CURLY","start":[10,0,10],"end":[11,0,11]},{"type":"Token","kind":"WHITESPACE","start":[11,0,11],"end":[16,1,4]},{"type":"Node","kind":"EXPR_STMT","start":[16,1,4],"end":[63,6,11],"children":[{"type":"Node","kind":"MACRO_EXPR","start":[16,1,4],"end":[62,6,10],"children":[{"type":"Node","kind":"MACRO_CALL","start":[16,1,4],"end":[62,6,10],"children":[{"type":"Node","kind":"PATH","start":[16,1,4],"end":[22,1,10],"children":[{"type":"Node","kind":"PATH_SEGMENT","start":[16,1,4],"end":[22,1,10],"children":[{"type":"Node","kind":"NAME_REF","start":[16,1,4],"end":[22,1,10],"children":[{"type":"Token","kind":"IDENT","start":[16,1,4],"end":[22,1,10]}]}]}]},{"type":"Token","kind":"BANG","start":[22,1,10],"end":[23,1,11]},{"type":"Node","kind":"TOKEN_TREE","start":[23,1,11],"end":[62,6,10],"children":[{"type":"Token","kind":"L_PAREN","start":[23,1,11],"end":[24,1,12]},{"type":"Node","kind":"STRING","start":[24,1,12],"end":[57,6,5],"children":[{"type":"Node","kind":"SOURCE_FILE","start":[25,1,13],"end":[25,1,13],"istart":[0,0,0],"iend":[31,2,5],"children":[{"type":"Token","kind":"WHITESPACE","start":[25,1,13],"end":[25,1,13],"istart":[0,0,0],"iend":[1,0,1]},{"type":"Node","kind":"FN","start":[26,2,0],"end":[26,2,0],"istart":[1,0,1],"iend":[13,1,1],"children":[{"type":"Token","kind":"FN_KW","start":[26,2,0],"end":[26,2,0],"istart":[1,0,1],"iend":[3,0,3]},{"type":"Token","kind":"WHITESPACE","start":[28,2,2],"end":[28,2,2],"istart":[3,0,3],"iend":[4,0,4]},{"type":"Node","kind":"NAME","start":[29,2,3],"end":[29,2,3],"istart":[4,0,4],"iend":[7,0,7],"children":[{"type":"Token","kind":"IDENT","start":[29,2,3],"end":[29,2,3],"istart":[4,0,4],"iend":[7,0,7]}]},{"type":"Node","kind":"PARAM_LIST","start":[32,2,6],"end":[32,2,6],"istart":[7,0,7],"iend":[9,0,9],"children":[{"type":"Token","kind":"L_PAREN","start":[32,2,6],"end":[32,2,6],"istart":[7,0,7],"iend":[8,0,8]},{"type":"Token","kind":"R_PAREN","start":[33,2,7],"end":[33,2,7],"istart":[8,0,8],"iend":[9,0,9]}]},{"type":"Token","kind":"WHITESPACE","start":[34,2,8],"end":[34,2,8],"istart":[9,0,9],"iend":[10,0,10]},{"type":"Node","kind":"BLOCK_EXPR","start":[35,2,9],"end":[35,2,9],"istart":[10,0,10],"iend":[13,1,1],"children":[{"type":"Node","kind":"STMT_LIST","start":[35,2,9],"end":[35,2,9],"istart":[10,0,10],"iend":[13,1,1],"children":[{"type":"Token","kind":"L_CURLY","start":[35,2,9],"end":[35,2,9],"istart":[10,0,10],"iend":[11,0,11]},{"type":"Token","kind":"WHITESPACE","start":[36,2,10],"end":[36,2,10],"istart":[11,0,11],"iend":[12,1,0]},{"type":"Token","kind":"R_CURLY","start":[37,3,0],"end":[37,3,0],"istart":[12,1,0],"iend":[13,1,1]}]}]}]},{"type":"Token","kind":"WHITESPACE","start":[38,3,1],"end":[38,3,1],"istart":[13,1,1],"iend":[14,1,2]},{"type":"Node","kind":"FN","start":[39,4,0],"end":[39,4,0],"istart":[14,1,2],"iend":[26,2,0],"children":[{"type":"Token","kind":"FN_KW","start":[39,4,0],"end":[39,4,0],"istart":[14,1,2],"iend":[16,1,4]},{"type":"Token","kind":"WHITESPACE","start":[41,4,2],"end":[41,4,2],"istart":[16,1,4],"iend":[17,1,5]},{"type":"Node","kind":"NAME","start":[42,4,3],"end":[42,4,3],"istart":[17,1,5],"iend":[20,1,8],"children":[{"type":"Token","kind":"IDENT","start":[42,4,3],"end":[42,4,3],"istart":[17,1,5],"iend":[20,1,8]}]},{"type":"Node","kind":"PARAM_LIST","start":[45,4,6],"end":[45,4,6],"istart":[20,1,8],"iend":[22,1,10],"children":[{"type":"Token","kind":"L_PAREN","start":[45,4,6],"end":[45,4,6],"istart":[20,1,8],"iend":[21,1,9]},{"type":"Token","kind":"R_PAREN","start":[46,4,7],"end":[46,4,7],"istart":[21,1,9],"iend":[22,1,10]}]},{"type":"Token","kind":"WHITESPACE","start":[47,4,8],"end":[47,4,8],"istart":[22,1,10],"iend":[23,1,11]},{"type":"Node","kind":"BLOCK_EXPR","start":[48,4,9],"end":[48,4,9],"istart":[23,1,11],"iend":[26,2,0],"children":[{"type":"Node","kind":"STMT_LIST","start":[48,4,9],"end":[48,4,9],"istart":[23,1,11],"iend":[26,2,0],"children":[{"type":"Token","kind":"L_CURLY","start":[48,4,9],"end":[48,4,9],"istart":[23,1,11],"iend":[24,1,12]},{"type":"Token","kind":"WHITESPACE","start":[49,4,10],"end":[49,4,10],"istart":[24,1,12],"iend":[25,1,13]},{"type":"Token","kind":"R_CURLY","start":[50,5,0],"end":[50,5,0],"istart":[25,1,13],"iend":[26,2,0]}]}]}]},{"type":"Token","kind":"WHITESPACE","start":[51,5,1],"end":[51,5,1],"istart":[26,2,0],"iend":[31,2,5]}]}]},{"type":"Token","kind":"COMMA","start":[57,6,5],"end":[58,6,6]},{"type":"Token","kind":"WHITESPACE","start":[58,6,6],"end":[59,6,7]},{"type":"Token","kind":"STRING","start":[59,6,7],"end":[61,6,9]},{"type":"Token","kind":"R_PAREN","start":[61,6,9],"end":[62,6,10]}]}]}]},{"type":"Token","kind":"SEMICOLON","start":[62,6,10],"end":[63,6,11]}]},{"type":"Token","kind":"WHITESPACE","start":[63,6,11],"end":[64,7,0]},{"type":"Token","kind":"R_CURLY","start":[64,7,0],"end":[65,7,1]}]}]}]}]}"#
]],
);
@@ -205,7 +234,7 @@ fn bar() {
"#, "");
}"###,
expect![[
- r#"{"type":"Node","kind":"SOURCE_FILE","start":0,"end":68,"children":[{"type":"Node","kind":"FN","start":0,"end":68,"children":[{"type":"Token","kind":"FN_KW","start":0,"end":2},{"type":"Token","kind":"WHITESPACE","start":2,"end":3},{"type":"Node","kind":"NAME","start":3,"end":7,"children":[{"type":"Token","kind":"IDENT","start":3,"end":7}]},{"type":"Node","kind":"PARAM_LIST","start":7,"end":9,"children":[{"type":"Token","kind":"L_PAREN","start":7,"end":8},{"type":"Token","kind":"R_PAREN","start":8,"end":9}]},{"type":"Token","kind":"WHITESPACE","start":9,"end":10},{"type":"Node","kind":"BLOCK_EXPR","start":10,"end":68,"children":[{"type":"Node","kind":"STMT_LIST","start":10,"end":68,"children":[{"type":"Token","kind":"L_CURLY","start":10,"end":11},{"type":"Token","kind":"WHITESPACE","start":11,"end":16},{"type":"Node","kind":"EXPR_STMT","start":16,"end":66,"children":[{"type":"Node","kind":"MACRO_EXPR","start":16,"end":65,"children":[{"type":"Node","kind":"MACRO_CALL","start":16,"end":65,"children":[{"type":"Node","kind":"PATH","start":16,"end":22,"children":[{"type":"Node","kind":"PATH_SEGMENT","start":16,"end":22,"children":[{"type":"Node","kind":"NAME_REF","start":16,"end":22,"children":[{"type":"Token","kind":"IDENT","start":16,"end":22}]}]}]},{"type":"Token","kind":"BANG","start":22,"end":23},{"type":"Node","kind":"TOKEN_TREE","start":23,"end":65,"children":[{"type":"Token","kind":"L_PAREN","start":23,"end":24},{"type":"Node","kind":"STRING","start":24,"end":60,"children":[{"type":"Node","kind":"SOURCE_FILE","start":27,"end":58,"istart":0,"iend":31,"children":[{"type":"Token","kind":"WHITESPACE","start":27,"end":28,"istart":0,"iend":1},{"type":"Node","kind":"FN","start":28,"end":40,"istart":1,"iend":13,"children":[{"type":"Token","kind":"FN_KW","start":28,"end":30,"istart":1,"iend":3},{"type":"Token","kind":"WHITESPACE","start":30,"end":31,"istart":3,"iend":4},{"type":"Node","kind":"NAME","start":31,"end":34,"istart":4,"iend":7,"children":[{"type":"Token","kind":"IDENT","start":31,"end":34,"istart":4,"iend":7}]},{"type":"Node","kind":"PARAM_LIST","start":34,"end":36,"istart":7,"iend":9,"children":[{"type":"Token","kind":"L_PAREN","start":34,"end":35,"istart":7,"iend":8},{"type":"Token","kind":"R_PAREN","start":35,"end":36,"istart":8,"iend":9}]},{"type":"Token","kind":"WHITESPACE","start":36,"end":37,"istart":9,"iend":10},{"type":"Node","kind":"BLOCK_EXPR","start":37,"end":40,"istart":10,"iend":13,"children":[{"type":"Node","kind":"STMT_LIST","start":37,"end":40,"istart":10,"iend":13,"children":[{"type":"Token","kind":"L_CURLY","start":37,"end":38,"istart":10,"iend":11},{"type":"Token","kind":"WHITESPACE","start":38,"end":39,"istart":11,"iend":12},{"type":"Token","kind":"R_CURLY","start":39,"end":40,"istart":12,"iend":13}]}]}]},{"type":"Token","kind":"WHITESPACE","start":40,"end":41,"istart":13,"iend":14},{"type":"Node","kind":"FN","start":41,"end":53,"istart":14,"iend":26,"children":[{"type":"Token","kind":"FN_KW","start":41,"end":43,"istart":14,"iend":16},{"type":"Token","kind":"WHITESPACE","start":43,"end":44,"istart":16,"iend":17},{"type":"Node","kind":"NAME","start":44,"end":47,"istart":17,"iend":20,"children":[{"type":"Token","kind":"IDENT","start":44,"end":47,"istart":17,"iend":20}]},{"type":"Node","kind":"PARAM_LIST","start":47,"end":49,"istart":20,"iend":22,"children":[{"type":"Token","kind":"L_PAREN","start":47,"end":48,"istart":20,"iend":21},{"type":"Token","kind":"R_PAREN","start":48,"end":49,"istart":21,"iend":22}]},{"type":"Token","kind":"WHITESPACE","start":49,"end":50,"istart":22,"iend":23},{"type":"Node","kind":"BLOCK_EXPR","start":50,"end":53,"istart":23,"iend":26,"children":[{"type":"Node","kind":"STMT_LIST","start":50,"end":53,"istart":23,"iend":26,"children":[{"type":"Token","kind":"L_CURLY","start":50,"end":51,"istart":23,"iend":24},{"type":"Token","kind":"WHITESPACE","start":51,"end":52,"istart":24,"iend":25},{"type":"Token","kind":"R_CURLY","start":52,"end":53,"istart":25,"iend":26}]}]}]},{"type":"Token","kind":"WHITESPACE","start":53,"end":58,"istart":26,"iend":31}]}]},{"type":"Token","kind":"COMMA","start":60,"end":61},{"type":"Token","kind":"WHITESPACE","start":61,"end":62},{"type":"Token","kind":"STRING","start":62,"end":64},{"type":"Token","kind":"R_PAREN","start":64,"end":65}]}]}]},{"type":"Token","kind":"SEMICOLON","start":65,"end":66}]},{"type":"Token","kind":"WHITESPACE","start":66,"end":67},{"type":"Token","kind":"R_CURLY","start":67,"end":68}]}]}]}]}"#
+ r#"{"type":"Node","kind":"SOURCE_FILE","start":[0,0,0],"end":[68,7,1],"children":[{"type":"Node","kind":"FN","start":[0,0,0],"end":[68,7,1],"children":[{"type":"Token","kind":"FN_KW","start":[0,0,0],"end":[2,0,2]},{"type":"Token","kind":"WHITESPACE","start":[2,0,2],"end":[3,0,3]},{"type":"Node","kind":"NAME","start":[3,0,3],"end":[7,0,7],"children":[{"type":"Token","kind":"IDENT","start":[3,0,3],"end":[7,0,7]}]},{"type":"Node","kind":"PARAM_LIST","start":[7,0,7],"end":[9,0,9],"children":[{"type":"Token","kind":"L_PAREN","start":[7,0,7],"end":[8,0,8]},{"type":"Token","kind":"R_PAREN","start":[8,0,8],"end":[9,0,9]}]},{"type":"Token","kind":"WHITESPACE","start":[9,0,9],"end":[10,0,10]},{"type":"Node","kind":"BLOCK_EXPR","start":[10,0,10],"end":[68,7,1],"children":[{"type":"Node","kind":"STMT_LIST","start":[10,0,10],"end":[68,7,1],"children":[{"type":"Token","kind":"L_CURLY","start":[10,0,10],"end":[11,0,11]},{"type":"Token","kind":"WHITESPACE","start":[11,0,11],"end":[16,1,4]},{"type":"Node","kind":"EXPR_STMT","start":[16,1,4],"end":[66,6,12],"children":[{"type":"Node","kind":"MACRO_EXPR","start":[16,1,4],"end":[65,6,11],"children":[{"type":"Node","kind":"MACRO_CALL","start":[16,1,4],"end":[65,6,11],"children":[{"type":"Node","kind":"PATH","start":[16,1,4],"end":[22,1,10],"children":[{"type":"Node","kind":"PATH_SEGMENT","start":[16,1,4],"end":[22,1,10],"children":[{"type":"Node","kind":"NAME_REF","start":[16,1,4],"end":[22,1,10],"children":[{"type":"Token","kind":"IDENT","start":[16,1,4],"end":[22,1,10]}]}]}]},{"type":"Token","kind":"BANG","start":[22,1,10],"end":[23,1,11]},{"type":"Node","kind":"TOKEN_TREE","start":[23,1,11],"end":[65,6,11],"children":[{"type":"Token","kind":"L_PAREN","start":[23,1,11],"end":[24,1,12]},{"type":"Node","kind":"STRING","start":[24,1,12],"end":[60,6,6],"children":[{"type":"Node","kind":"SOURCE_FILE","start":[27,1,15],"end":[27,1,15],"istart":[0,0,0],"iend":[31,2,3],"children":[{"type":"Token","kind":"WHITESPACE","start":[27,1,15],"end":[27,1,15],"istart":[0,0,0],"iend":[1,0,1]},{"type":"Node","kind":"FN","start":[28,2,0],"end":[28,2,0],"istart":[1,0,1],"iend":[13,1,1],"children":[{"type":"Token","kind":"FN_KW","start":[28,2,0],"end":[28,2,0],"istart":[1,0,1],"iend":[3,0,3]},{"type":"Token","kind":"WHITESPACE","start":[30,2,2],"end":[30,2,2],"istart":[3,0,3],"iend":[4,0,4]},{"type":"Node","kind":"NAME","start":[31,2,3],"end":[31,2,3],"istart":[4,0,4],"iend":[7,0,7],"children":[{"type":"Token","kind":"IDENT","start":[31,2,3],"end":[31,2,3],"istart":[4,0,4],"iend":[7,0,7]}]},{"type":"Node","kind":"PARAM_LIST","start":[34,2,6],"end":[34,2,6],"istart":[7,0,7],"iend":[9,0,9],"children":[{"type":"Token","kind":"L_PAREN","start":[34,2,6],"end":[34,2,6],"istart":[7,0,7],"iend":[8,0,8]},{"type":"Token","kind":"R_PAREN","start":[35,2,7],"end":[35,2,7],"istart":[8,0,8],"iend":[9,0,9]}]},{"type":"Token","kind":"WHITESPACE","start":[36,2,8],"end":[36,2,8],"istart":[9,0,9],"iend":[10,0,10]},{"type":"Node","kind":"BLOCK_EXPR","start":[37,2,9],"end":[37,2,9],"istart":[10,0,10],"iend":[13,1,1],"children":[{"type":"Node","kind":"STMT_LIST","start":[37,2,9],"end":[37,2,9],"istart":[10,0,10],"iend":[13,1,1],"children":[{"type":"Token","kind":"L_CURLY","start":[37,2,9],"end":[37,2,9],"istart":[10,0,10],"iend":[11,0,11]},{"type":"Token","kind":"WHITESPACE","start":[38,2,10],"end":[38,2,10],"istart":[11,0,11],"iend":[12,1,0]},{"type":"Token","kind":"R_CURLY","start":[39,3,0],"end":[39,3,0],"istart":[12,1,0],"iend":[13,1,1]}]}]}]},{"type":"Token","kind":"WHITESPACE","start":[40,3,1],"end":[40,3,1],"istart":[13,1,1],"iend":[14,1,2]},{"type":"Node","kind":"FN","start":[41,4,0],"end":[41,4,0],"istart":[14,1,2],"iend":[26,1,14],"children":[{"type":"Token","kind":"FN_KW","start":[41,4,0],"end":[41,4,0],"istart":[14,1,2],"iend":[16,1,4]},{"type":"Token","kind":"WHITESPACE","start":[43,4,2],"end":[43,4,2],"istart":[16,1,4],"iend":[17,1,5]},{"type":"Node","kind":"NAME","start":[44,4,3],"end":[44,4,3],"istart":[17,1,5],"iend":[20,1,8],"children":[{"type":"Token","kind":"IDENT","start":[44,4,3],"end":[44,4,3],"istart":[17,1,5],"iend":[20,1,8]}]},{"type":"Node","kind":"PARAM_LIST","start":[47,4,6],"end":[47,4,6],"istart":[20,1,8],"iend":[22,1,10],"children":[{"type":"Token","kind":"L_PAREN","start":[47,4,6],"end":[47,4,6],"istart":[20,1,8],"iend":[21,1,9]},{"type":"Token","kind":"R_PAREN","start":[48,4,7],"end":[48,4,7],"istart":[21,1,9],"iend":[22,1,10]}]},{"type":"Token","kind":"WHITESPACE","start":[49,4,8],"end":[49,4,8],"istart":[22,1,10],"iend":[23,1,11]},{"type":"Node","kind":"BLOCK_EXPR","start":[50,4,9],"end":[50,4,9],"istart":[23,1,11],"iend":[26,1,14],"children":[{"type":"Node","kind":"STMT_LIST","start":[50,4,9],"end":[50,4,9],"istart":[23,1,11],"iend":[26,1,14],"children":[{"type":"Token","kind":"L_CURLY","start":[50,4,9],"end":[50,4,9],"istart":[23,1,11],"iend":[24,1,12]},{"type":"Token","kind":"WHITESPACE","start":[51,4,10],"end":[51,4,10],"istart":[24,1,12],"iend":[25,1,13]},{"type":"Token","kind":"R_CURLY","start":[52,5,0],"end":[52,5,0],"istart":[25,1,13],"iend":[26,1,14]}]}]}]},{"type":"Token","kind":"WHITESPACE","start":[53,5,1],"end":[53,5,1],"istart":[26,1,14],"iend":[31,2,3]}]}]},{"type":"Token","kind":"COMMA","start":[60,6,6],"end":[61,6,7]},{"type":"Token","kind":"WHITESPACE","start":[61,6,7],"end":[62,6,8]},{"type":"Token","kind":"STRING","start":[62,6,8],"end":[64,6,10]},{"type":"Token","kind":"R_PAREN","start":[64,6,10],"end":[65,6,11]}]}]}]},{"type":"Token","kind":"SEMICOLON","start":[65,6,11],"end":[66,6,12]}]},{"type":"Token","kind":"WHITESPACE","start":[66,6,12],"end":[67,7,0]},{"type":"Token","kind":"R_CURLY","start":[67,7,0],"end":[68,7,1]}]}]}]}]}"#
]],
);
@@ -219,7 +248,7 @@ fn bar() {
}"$0#, "");
}"###,
expect![[
- r#"{"type":"Node","kind":"SOURCE_FILE","start":0,"end":63,"children":[{"type":"Node","kind":"FN","start":0,"end":63,"children":[{"type":"Token","kind":"FN_KW","start":0,"end":2},{"type":"Token","kind":"WHITESPACE","start":2,"end":3},{"type":"Node","kind":"NAME","start":3,"end":7,"children":[{"type":"Token","kind":"IDENT","start":3,"end":7}]},{"type":"Node","kind":"PARAM_LIST","start":7,"end":9,"children":[{"type":"Token","kind":"L_PAREN","start":7,"end":8},{"type":"Token","kind":"R_PAREN","start":8,"end":9}]},{"type":"Token","kind":"WHITESPACE","start":9,"end":10},{"type":"Node","kind":"BLOCK_EXPR","start":10,"end":63,"children":[{"type":"Node","kind":"STMT_LIST","start":10,"end":63,"children":[{"type":"Token","kind":"L_CURLY","start":10,"end":11},{"type":"Token","kind":"WHITESPACE","start":11,"end":16},{"type":"Node","kind":"EXPR_STMT","start":16,"end":61,"children":[{"type":"Node","kind":"MACRO_EXPR","start":16,"end":60,"children":[{"type":"Node","kind":"MACRO_CALL","start":16,"end":60,"children":[{"type":"Node","kind":"PATH","start":16,"end":22,"children":[{"type":"Node","kind":"PATH_SEGMENT","start":16,"end":22,"children":[{"type":"Node","kind":"NAME_REF","start":16,"end":22,"children":[{"type":"Token","kind":"IDENT","start":16,"end":22}]}]}]},{"type":"Token","kind":"BANG","start":22,"end":23},{"type":"Node","kind":"TOKEN_TREE","start":23,"end":60,"children":[{"type":"Token","kind":"L_PAREN","start":23,"end":24},{"type":"Node","kind":"STRING","start":24,"end":55,"children":[{"type":"Node","kind":"SOURCE_FILE","start":27,"end":53,"istart":0,"iend":26,"children":[{"type":"Token","kind":"WHITESPACE","start":27,"end":28,"istart":0,"iend":1},{"type":"Node","kind":"FN","start":28,"end":40,"istart":1,"iend":13,"children":[{"type":"Token","kind":"FN_KW","start":28,"end":30,"istart":1,"iend":3},{"type":"Token","kind":"WHITESPACE","start":30,"end":31,"istart":3,"iend":4},{"type":"Node","kind":"NAME","start":31,"end":34,"istart":4,"iend":7,"children":[{"type":"Token","kind":"IDENT","start":31,"end":34,"istart":4,"iend":7}]},{"type":"Node","kind":"PARAM_LIST","start":34,"end":36,"istart":7,"iend":9,"children":[{"type":"Token","kind":"L_PAREN","start":34,"end":35,"istart":7,"iend":8},{"type":"Token","kind":"R_PAREN","start":35,"end":36,"istart":8,"iend":9}]},{"type":"Token","kind":"WHITESPACE","start":36,"end":37,"istart":9,"iend":10},{"type":"Node","kind":"BLOCK_EXPR","start":37,"end":40,"istart":10,"iend":13,"children":[{"type":"Node","kind":"STMT_LIST","start":37,"end":40,"istart":10,"iend":13,"children":[{"type":"Token","kind":"L_CURLY","start":37,"end":38,"istart":10,"iend":11},{"type":"Token","kind":"WHITESPACE","start":38,"end":39,"istart":11,"iend":12},{"type":"Token","kind":"R_CURLY","start":39,"end":40,"istart":12,"iend":13}]}]}]},{"type":"Token","kind":"WHITESPACE","start":40,"end":41,"istart":13,"iend":14},{"type":"Node","kind":"FN","start":41,"end":53,"istart":14,"iend":26,"children":[{"type":"Token","kind":"FN_KW","start":41,"end":43,"istart":14,"iend":16},{"type":"Token","kind":"WHITESPACE","start":43,"end":44,"istart":16,"iend":17},{"type":"Node","kind":"NAME","start":44,"end":47,"istart":17,"iend":20,"children":[{"type":"Token","kind":"IDENT","start":44,"end":47,"istart":17,"iend":20}]},{"type":"Node","kind":"PARAM_LIST","start":47,"end":49,"istart":20,"iend":22,"children":[{"type":"Token","kind":"L_PAREN","start":47,"end":48,"istart":20,"iend":21},{"type":"Token","kind":"R_PAREN","start":48,"end":49,"istart":21,"iend":22}]},{"type":"Token","kind":"WHITESPACE","start":49,"end":50,"istart":22,"iend":23},{"type":"Node","kind":"BLOCK_EXPR","start":50,"end":53,"istart":23,"iend":26,"children":[{"type":"Node","kind":"STMT_LIST","start":50,"end":53,"istart":23,"iend":26,"children":[{"type":"Token","kind":"L_CURLY","start":50,"end":51,"istart":23,"iend":24},{"type":"Token","kind":"WHITESPACE","start":51,"end":52,"istart":24,"iend":25},{"type":"Token","kind":"R_CURLY","start":52,"end":53,"istart":25,"iend":26}]}]}]}]}]},{"type":"Token","kind":"COMMA","start":55,"end":56},{"type":"Token","kind":"WHITESPACE","start":56,"end":57},{"type":"Token","kind":"STRING","start":57,"end":59},{"type":"Token","kind":"R_PAREN","start":59,"end":60}]}]}]},{"type":"Token","kind":"SEMICOLON","start":60,"end":61}]},{"type":"Token","kind":"WHITESPACE","start":61,"end":62},{"type":"Token","kind":"R_CURLY","start":62,"end":63}]}]}]}]}"#
+ r#"{"type":"Node","kind":"SOURCE_FILE","start":[0,0,0],"end":[63,6,1],"children":[{"type":"Node","kind":"FN","start":[0,0,0],"end":[63,6,1],"children":[{"type":"Token","kind":"FN_KW","start":[0,0,0],"end":[2,0,2]},{"type":"Token","kind":"WHITESPACE","start":[2,0,2],"end":[3,0,3]},{"type":"Node","kind":"NAME","start":[3,0,3],"end":[7,0,7],"children":[{"type":"Token","kind":"IDENT","start":[3,0,3],"end":[7,0,7]}]},{"type":"Node","kind":"PARAM_LIST","start":[7,0,7],"end":[9,0,9],"children":[{"type":"Token","kind":"L_PAREN","start":[7,0,7],"end":[8,0,8]},{"type":"Token","kind":"R_PAREN","start":[8,0,8],"end":[9,0,9]}]},{"type":"Token","kind":"WHITESPACE","start":[9,0,9],"end":[10,0,10]},{"type":"Node","kind":"BLOCK_EXPR","start":[10,0,10],"end":[63,6,1],"children":[{"type":"Node","kind":"STMT_LIST","start":[10,0,10],"end":[63,6,1],"children":[{"type":"Token","kind":"L_CURLY","start":[10,0,10],"end":[11,0,11]},{"type":"Token","kind":"WHITESPACE","start":[11,0,11],"end":[16,1,4]},{"type":"Node","kind":"EXPR_STMT","start":[16,1,4],"end":[61,5,9],"children":[{"type":"Node","kind":"MACRO_EXPR","start":[16,1,4],"end":[60,5,8],"children":[{"type":"Node","kind":"MACRO_CALL","start":[16,1,4],"end":[60,5,8],"children":[{"type":"Node","kind":"PATH","start":[16,1,4],"end":[22,1,10],"children":[{"type":"Node","kind":"PATH_SEGMENT","start":[16,1,4],"end":[22,1,10],"children":[{"type":"Node","kind":"NAME_REF","start":[16,1,4],"end":[22,1,10],"children":[{"type":"Token","kind":"IDENT","start":[16,1,4],"end":[22,1,10]}]}]}]},{"type":"Token","kind":"BANG","start":[22,1,10],"end":[23,1,11]},{"type":"Node","kind":"TOKEN_TREE","start":[23,1,11],"end":[60,5,8],"children":[{"type":"Token","kind":"L_PAREN","start":[23,1,11],"end":[24,1,12]},{"type":"Node","kind":"STRING","start":[24,1,12],"end":[55,5,3],"children":[{"type":"Node","kind":"SOURCE_FILE","start":[27,1,15],"end":[27,1,15],"istart":[0,0,0],"iend":[26,1,14],"children":[{"type":"Token","kind":"WHITESPACE","start":[27,1,15],"end":[27,1,15],"istart":[0,0,0],"iend":[1,0,1]},{"type":"Node","kind":"FN","start":[28,2,0],"end":[28,2,0],"istart":[1,0,1],"iend":[13,1,1],"children":[{"type":"Token","kind":"FN_KW","start":[28,2,0],"end":[28,2,0],"istart":[1,0,1],"iend":[3,0,3]},{"type":"Token","kind":"WHITESPACE","start":[30,2,2],"end":[30,2,2],"istart":[3,0,3],"iend":[4,0,4]},{"type":"Node","kind":"NAME","start":[31,2,3],"end":[31,2,3],"istart":[4,0,4],"iend":[7,0,7],"children":[{"type":"Token","kind":"IDENT","start":[31,2,3],"end":[31,2,3],"istart":[4,0,4],"iend":[7,0,7]}]},{"type":"Node","kind":"PARAM_LIST","start":[34,2,6],"end":[34,2,6],"istart":[7,0,7],"iend":[9,0,9],"children":[{"type":"Token","kind":"L_PAREN","start":[34,2,6],"end":[34,2,6],"istart":[7,0,7],"iend":[8,0,8]},{"type":"Token","kind":"R_PAREN","start":[35,2,7],"end":[35,2,7],"istart":[8,0,8],"iend":[9,0,9]}]},{"type":"Token","kind":"WHITESPACE","start":[36,2,8],"end":[36,2,8],"istart":[9,0,9],"iend":[10,0,10]},{"type":"Node","kind":"BLOCK_EXPR","start":[37,2,9],"end":[37,2,9],"istart":[10,0,10],"iend":[13,1,1],"children":[{"type":"Node","kind":"STMT_LIST","start":[37,2,9],"end":[37,2,9],"istart":[10,0,10],"iend":[13,1,1],"children":[{"type":"Token","kind":"L_CURLY","start":[37,2,9],"end":[37,2,9],"istart":[10,0,10],"iend":[11,0,11]},{"type":"Token","kind":"WHITESPACE","start":[38,2,10],"end":[38,2,10],"istart":[11,0,11],"iend":[12,1,0]},{"type":"Token","kind":"R_CURLY","start":[39,3,0],"end":[39,3,0],"istart":[12,1,0],"iend":[13,1,1]}]}]}]},{"type":"Token","kind":"WHITESPACE","start":[40,3,1],"end":[40,3,1],"istart":[13,1,1],"iend":[14,1,2]},{"type":"Node","kind":"FN","start":[41,4,0],"end":[41,4,0],"istart":[14,1,2],"iend":[26,1,14],"children":[{"type":"Token","kind":"FN_KW","start":[41,4,0],"end":[41,4,0],"istart":[14,1,2],"iend":[16,1,4]},{"type":"Token","kind":"WHITESPACE","start":[43,4,2],"end":[43,4,2],"istart":[16,1,4],"iend":[17,1,5]},{"type":"Node","kind":"NAME","start":[44,4,3],"end":[44,4,3],"istart":[17,1,5],"iend":[20,1,8],"children":[{"type":"Token","kind":"IDENT","start":[44,4,3],"end":[44,4,3],"istart":[17,1,5],"iend":[20,1,8]}]},{"type":"Node","kind":"PARAM_LIST","start":[47,4,6],"end":[47,4,6],"istart":[20,1,8],"iend":[22,1,10],"children":[{"type":"Token","kind":"L_PAREN","start":[47,4,6],"end":[47,4,6],"istart":[20,1,8],"iend":[21,1,9]},{"type":"Token","kind":"R_PAREN","start":[48,4,7],"end":[48,4,7],"istart":[21,1,9],"iend":[22,1,10]}]},{"type":"Token","kind":"WHITESPACE","start":[49,4,8],"end":[49,4,8],"istart":[22,1,10],"iend":[23,1,11]},{"type":"Node","kind":"BLOCK_EXPR","start":[50,4,9],"end":[50,4,9],"istart":[23,1,11],"iend":[26,1,14],"children":[{"type":"Node","kind":"STMT_LIST","start":[50,4,9],"end":[50,4,9],"istart":[23,1,11],"iend":[26,1,14],"children":[{"type":"Token","kind":"L_CURLY","start":[50,4,9],"end":[50,4,9],"istart":[23,1,11],"iend":[24,1,12]},{"type":"Token","kind":"WHITESPACE","start":[51,4,10],"end":[51,4,10],"istart":[24,1,12],"iend":[25,1,13]},{"type":"Token","kind":"R_CURLY","start":[52,5,0],"end":[52,5,0],"istart":[25,1,13],"iend":[26,1,14]}]}]}]}]}]},{"type":"Token","kind":"COMMA","start":[55,5,3],"end":[56,5,4]},{"type":"Token","kind":"WHITESPACE","start":[56,5,4],"end":[57,5,5]},{"type":"Token","kind":"STRING","start":[57,5,5],"end":[59,5,7]},{"type":"Token","kind":"R_PAREN","start":[59,5,7],"end":[60,5,8]}]}]}]},{"type":"Token","kind":"SEMICOLON","start":[60,5,8],"end":[61,5,9]}]},{"type":"Token","kind":"WHITESPACE","start":[61,5,9],"end":[62,6,0]},{"type":"Token","kind":"R_CURLY","start":[62,6,0],"end":[63,6,1]}]}]}]}]}"#
]],
);
}
diff --git a/crates/intern/src/symbol/symbols.rs b/crates/intern/src/symbol/symbols.rs
index ae1c6efe0c..be0de6c936 100644
--- a/crates/intern/src/symbol/symbols.rs
+++ b/crates/intern/src/symbol/symbols.rs
@@ -13,15 +13,35 @@ use crate::{
macro_rules! define_symbols {
(@WITH_NAME: $($alias:ident = $value:literal,)* @PLAIN: $($name:ident,)*) => {
- // Ideally we would be emitting `const` here, but then we no longer have stable addresses
- // which is what we are relying on for equality! In the future if consts can refer to
- // statics we should swap these for `const`s and have the string literal being pointed
- // to be statics to refer to such that their address is stable.
+ // We define symbols as both `const`s and `static`s because some const code requires const symbols,
+ // but code from before the transition relies on the lifetime of the predefined symbols and making them
+ // `const`s make it error (because now they're temporaries). In the future we probably should only
+ // use consts.
+
+ /// Predefined symbols as `const`s (instead of the default `static`s).
+ pub mod consts {
+ use super::{Symbol, TaggedArcPtr};
+
+ // The strings should be in `static`s so that symbol equality holds.
+ $(
+ pub const $name: Symbol = {
+ static SYMBOL_STR: &str = stringify!($name);
+ Symbol { repr: TaggedArcPtr::non_arc(&SYMBOL_STR) }
+ };
+ )*
+ $(
+ pub const $alias: Symbol = {
+ static SYMBOL_STR: &str = $value;
+ Symbol { repr: TaggedArcPtr::non_arc(&SYMBOL_STR) }
+ };
+ )*
+ }
+
$(
- pub static $name: Symbol = Symbol { repr: TaggedArcPtr::non_arc(&stringify!($name)) };
+ pub static $name: Symbol = consts::$name;
)*
$(
- pub static $alias: Symbol = Symbol { repr: TaggedArcPtr::non_arc(&$value) };
+ pub static $alias: Symbol = consts::$alias;
)*
@@ -347,6 +367,7 @@ define_symbols! {
option,
Option,
Ord,
+ Ordering,
Output,
CallRefFuture,
CallOnceFuture,
@@ -427,6 +448,7 @@ define_symbols! {
rustc_layout_scalar_valid_range_start,
rustc_legacy_const_generics,
rustc_macro_transparency,
+ rustc_paren_sugar,
rustc_reallocator,
rustc_reservation_impl,
rustc_safe_intrinsic,
@@ -458,6 +480,8 @@ define_symbols! {
system,
sysv64,
Target,
+ target_feature,
+ enable,
termination,
test_case,
test,
@@ -479,6 +503,7 @@ define_symbols! {
u64,
u8,
unadjusted,
+ unknown,
Unknown,
unpin,
unreachable_2015,
diff --git a/crates/limit/Cargo.toml b/crates/limit/Cargo.toml
deleted file mode 100644
index 30666f5219..0000000000
--- a/crates/limit/Cargo.toml
+++ /dev/null
@@ -1,16 +0,0 @@
-[package]
-name = "limit"
-version = "0.0.0"
-repository.workspace = true
-description = "A struct to enforce limits for rust-analyzer."
-
-authors.workspace = true
-edition.workspace = true
-license.workspace = true
-rust-version.workspace = true
-
-[features]
-tracking = []
-
-[lints]
-workspace = true
diff --git a/crates/limit/src/lib.rs b/crates/limit/src/lib.rs
deleted file mode 100644
index c1caeed2f8..0000000000
--- a/crates/limit/src/lib.rs
+++ /dev/null
@@ -1,67 +0,0 @@
-//! limit defines a struct to enforce limits.
-
-#[cfg(feature = "tracking")]
-use std::sync::atomic::AtomicUsize;
-
-/// Represents a struct used to enforce a numerical limit.
-#[derive(Debug)]
-pub struct Limit {
- upper_bound: usize,
- #[cfg(feature = "tracking")]
- max: AtomicUsize,
-}
-
-impl Limit {
- /// Creates a new limit.
- #[inline]
- pub const fn new(upper_bound: usize) -> Self {
- Self {
- upper_bound,
- #[cfg(feature = "tracking")]
- max: AtomicUsize::new(0),
- }
- }
-
- /// Creates a new limit.
- #[inline]
- #[cfg(feature = "tracking")]
- pub const fn new_tracking(upper_bound: usize) -> Self {
- Self {
- upper_bound,
- #[cfg(feature = "tracking")]
- max: AtomicUsize::new(1),
- }
- }
-
- /// Gets the underlying numeric limit.
- #[inline]
- pub const fn inner(&self) -> usize {
- self.upper_bound
- }
-
- /// Checks whether the given value is below the limit.
- /// Returns `Ok` when `other` is below `self`, and `Err` otherwise.
- #[inline]
- pub fn check(&self, other: usize) -> Result<(), ()> {
- if other > self.upper_bound {
- Err(())
- } else {
- #[cfg(feature = "tracking")]
- loop {
- use std::sync::atomic::Ordering;
- let old_max = self.max.load(Ordering::Relaxed);
- if other <= old_max || old_max == 0 {
- break;
- }
- _ = self.max.compare_exchange_weak(
- old_max,
- other,
- Ordering::Relaxed,
- Ordering::Relaxed,
- );
- }
-
- Ok(())
- }
- }
-}
diff --git a/crates/load-cargo/src/lib.rs b/crates/load-cargo/src/lib.rs
index 5654c04a59..76f1a7f48b 100644
--- a/crates/load-cargo/src/lib.rs
+++ b/crates/load-cargo/src/lib.rs
@@ -94,7 +94,9 @@ pub fn load_workspace(
let contents = loader.load_sync(path);
let path = vfs::VfsPath::from(path.to_path_buf());
vfs.set_file_contents(path.clone(), contents);
- vfs.file_id(&path)
+ vfs.file_id(&path).and_then(|(file_id, excluded)| {
+ (excluded == vfs::FileExcluded::No).then_some(file_id)
+ })
},
extra_env,
);
@@ -454,7 +456,6 @@ fn load_crate_graph(
let ws_data = crate_graph
.iter()
.zip(iter::repeat(From::from(CrateWorkspaceData {
- proc_macro_cwd: None,
data_layout: target_layout.clone(),
toolchain: toolchain.clone(),
})))
diff --git a/crates/parser/Cargo.toml b/crates/parser/Cargo.toml
index 3629d275c0..48436ec71f 100644
--- a/crates/parser/Cargo.toml
+++ b/crates/parser/Cargo.toml
@@ -15,7 +15,6 @@ doctest = false
[dependencies]
drop_bomb = "0.1.5"
ra-ap-rustc_lexer.workspace = true
-limit.workspace = true
tracing = { workspace = true, optional = true }
edition.workspace = true
diff --git a/crates/parser/src/grammar/expressions.rs b/crates/parser/src/grammar/expressions.rs
index 389c01933c..fe1316c9bf 100644
--- a/crates/parser/src/grammar/expressions.rs
+++ b/crates/parser/src/grammar/expressions.rs
@@ -678,6 +678,8 @@ fn path_expr(p: &mut Parser<'_>, r: Restrictions) -> (CompletedMarker, BlockLike
// S { x };
// S { x, y: 32, };
// S { x, y: 32, ..Default::default() };
+// S { x, y: 32, .. };
+// S { .. };
// S { x: ::default() };
// TupleStruct { 0: 1 };
// }
@@ -709,6 +711,8 @@ pub(crate) fn record_expr_field_list(p: &mut Parser<'_>) {
// fn main() {
// S { field ..S::default() }
// S { 0 ..S::default() }
+ // S { field .. }
+ // S { 0 .. }
// }
name_ref_or_index(p);
p.error("expected `:`");
@@ -739,7 +743,13 @@ pub(crate) fn record_expr_field_list(p: &mut Parser<'_>) {
// S { .. } = S {};
// }
- // We permit `.. }` on the left-hand side of a destructuring assignment.
+ // test struct_initializer_with_defaults
+ // fn foo() {
+ // let _s = S { .. };
+ // }
+
+ // We permit `.. }` on the left-hand side of a destructuring assignment
+ // or defaults values.
if !p.at(T!['}']) {
expr(p);
@@ -750,6 +760,12 @@ pub(crate) fn record_expr_field_list(p: &mut Parser<'_>) {
// S { ..x, a: 0 }
// }
+ // test_err comma_after_default_values_syntax
+ // fn foo() {
+ // S { .., };
+ // S { .., a: 0 }
+ // }
+
// Do not bump, so we can support additional fields after this comma.
p.error("cannot use a comma after the base struct");
}
diff --git a/crates/parser/src/grammar/items/adt.rs b/crates/parser/src/grammar/items/adt.rs
index 21078175c0..9a16c9db6d 100644
--- a/crates/parser/src/grammar/items/adt.rs
+++ b/crates/parser/src/grammar/items/adt.rs
@@ -135,6 +135,11 @@ pub(crate) fn record_field_list(p: &mut Parser<'_>) {
name(p);
p.expect(T![:]);
types::type_(p);
+ // test record_field_default_values
+ // struct S { f: f32 = 0.0 }
+ if p.eat(T![=]) {
+ expressions::expr(p);
+ }
m.complete(p, RECORD_FIELD);
} else {
m.abandon(p);
diff --git a/crates/parser/src/parser.rs b/crates/parser/src/parser.rs
index 2f6ba52574..b058686276 100644
--- a/crates/parser/src/parser.rs
+++ b/crates/parser/src/parser.rs
@@ -3,7 +3,6 @@
use std::cell::Cell;
use drop_bomb::DropBomb;
-use limit::Limit;
use crate::{
event::Event,
@@ -30,7 +29,7 @@ pub(crate) struct Parser<'t> {
edition: Edition,
}
-static PARSER_STEP_LIMIT: Limit = Limit::new(15_000_000);
+const PARSER_STEP_LIMIT: usize = 15_000_000;
impl<'t> Parser<'t> {
pub(super) fn new(inp: &'t Input, edition: Edition) -> Parser<'t> {
@@ -54,7 +53,7 @@ impl<'t> Parser<'t> {
assert!(n <= 3);
let steps = self.steps.get();
- assert!(PARSER_STEP_LIMIT.check(steps as usize).is_ok(), "the parser seems stuck");
+ assert!((steps as usize) < PARSER_STEP_LIMIT, "the parser seems stuck");
self.steps.set(steps + 1);
self.inp.kind(self.pos + n)
diff --git a/crates/parser/src/syntax_kind/generated.rs b/crates/parser/src/syntax_kind/generated.rs
index 318f71a2d4..79900425a1 100644
--- a/crates/parser/src/syntax_kind/generated.rs
+++ b/crates/parser/src/syntax_kind/generated.rs
@@ -1,4 +1,4 @@
-//! Generated by `cargo codegen grammar`, do not edit by hand.
+//! Generated by `cargo xtask codegen grammar`, do not edit by hand.
#![allow(bad_style, missing_docs, unreachable_pub)]
use crate::Edition;
diff --git a/crates/parser/test_data/generated/runner.rs b/crates/parser/test_data/generated/runner.rs
index b9f87b6af2..1a74773158 100644
--- a/crates/parser/test_data/generated/runner.rs
+++ b/crates/parser/test_data/generated/runner.rs
@@ -482,6 +482,10 @@ mod ok {
run_and_expect_no_errors("test_data/parser/inline/ok/record_field_attrs.rs");
}
#[test]
+ fn record_field_default_values() {
+ run_and_expect_no_errors("test_data/parser/inline/ok/record_field_default_values.rs");
+ }
+ #[test]
fn record_field_list() {
run_and_expect_no_errors("test_data/parser/inline/ok/record_field_list.rs");
}
@@ -544,6 +548,10 @@ mod ok {
run_and_expect_no_errors("test_data/parser/inline/ok/stmt_postfix_expr_ambiguity.rs");
}
#[test]
+ fn struct_initializer_with_defaults() {
+ run_and_expect_no_errors("test_data/parser/inline/ok/struct_initializer_with_defaults.rs");
+ }
+ #[test]
fn struct_item() { run_and_expect_no_errors("test_data/parser/inline/ok/struct_item.rs"); }
#[test]
fn trait_alias() { run_and_expect_no_errors("test_data/parser/inline/ok/trait_alias.rs"); }
@@ -713,6 +721,10 @@ mod err {
#[test]
fn bad_asm_expr() { run_and_expect_errors("test_data/parser/inline/err/bad_asm_expr.rs"); }
#[test]
+ fn comma_after_default_values_syntax() {
+ run_and_expect_errors("test_data/parser/inline/err/comma_after_default_values_syntax.rs");
+ }
+ #[test]
fn comma_after_functional_update_syntax() {
run_and_expect_errors(
"test_data/parser/inline/err/comma_after_functional_update_syntax.rs",
diff --git a/crates/parser/test_data/parser/inline/err/comma_after_default_values_syntax.rast b/crates/parser/test_data/parser/inline/err/comma_after_default_values_syntax.rast
new file mode 100644
index 0000000000..feb617e1aa
--- /dev/null
+++ b/crates/parser/test_data/parser/inline/err/comma_after_default_values_syntax.rast
@@ -0,0 +1,59 @@
+SOURCE_FILE
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "foo"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ EXPR_STMT
+ RECORD_EXPR
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "S"
+ WHITESPACE " "
+ RECORD_EXPR_FIELD_LIST
+ L_CURLY "{"
+ WHITESPACE " "
+ DOT2 ".."
+ ERROR
+ COMMA ","
+ WHITESPACE " "
+ R_CURLY "}"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ RECORD_EXPR
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "S"
+ WHITESPACE " "
+ RECORD_EXPR_FIELD_LIST
+ L_CURLY "{"
+ WHITESPACE " "
+ DOT2 ".."
+ ERROR
+ COMMA ","
+ WHITESPACE " "
+ RECORD_EXPR_FIELD
+ NAME_REF
+ IDENT "a"
+ COLON ":"
+ WHITESPACE " "
+ LITERAL
+ INT_NUMBER "0"
+ WHITESPACE " "
+ R_CURLY "}"
+ WHITESPACE "\n"
+ R_CURLY "}"
+ WHITESPACE "\n"
+error 21: expected expression
+error 36: expected expression
+error 37: expected COMMA
diff --git a/crates/parser/test_data/parser/inline/err/comma_after_default_values_syntax.rs b/crates/parser/test_data/parser/inline/err/comma_after_default_values_syntax.rs
new file mode 100644
index 0000000000..f1ecdf89fa
--- /dev/null
+++ b/crates/parser/test_data/parser/inline/err/comma_after_default_values_syntax.rs
@@ -0,0 +1,4 @@
+fn foo() {
+ S { .., };
+ S { .., a: 0 }
+}
diff --git a/crates/parser/test_data/parser/inline/err/record_literal_before_ellipsis_recovery.rast b/crates/parser/test_data/parser/inline/err/record_literal_before_ellipsis_recovery.rast
index 08ae906421..12b4e233e3 100644
--- a/crates/parser/test_data/parser/inline/err/record_literal_before_ellipsis_recovery.rast
+++ b/crates/parser/test_data/parser/inline/err/record_literal_before_ellipsis_recovery.rast
@@ -44,6 +44,56 @@ SOURCE_FILE
WHITESPACE " "
R_CURLY "}"
WHITESPACE "\n "
+ EXPR_STMT
+ RECORD_EXPR
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "S"
+ WHITESPACE " "
+ RECORD_EXPR_FIELD_LIST
+ L_CURLY "{"
+ WHITESPACE " "
+ RECORD_EXPR_FIELD
+ NAME_REF
+ INT_NUMBER "0"
+ WHITESPACE " "
+ DOT2 ".."
+ CALL_EXPR
+ PATH_EXPR
+ PATH
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "S"
+ COLON2 "::"
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "default"
+ ARG_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ R_CURLY "}"
+ WHITESPACE "\n "
+ EXPR_STMT
+ RECORD_EXPR
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "S"
+ WHITESPACE " "
+ RECORD_EXPR_FIELD_LIST
+ L_CURLY "{"
+ WHITESPACE " "
+ RECORD_EXPR_FIELD
+ NAME_REF
+ IDENT "field"
+ WHITESPACE " "
+ DOT2 ".."
+ WHITESPACE " "
+ R_CURLY "}"
+ WHITESPACE "\n "
RECORD_EXPR
PATH
PATH_SEGMENT
@@ -58,20 +108,6 @@ SOURCE_FILE
INT_NUMBER "0"
WHITESPACE " "
DOT2 ".."
- CALL_EXPR
- PATH_EXPR
- PATH
- PATH
- PATH_SEGMENT
- NAME_REF
- IDENT "S"
- COLON2 "::"
- PATH_SEGMENT
- NAME_REF
- IDENT "default"
- ARG_LIST
- L_PAREN "("
- R_PAREN ")"
WHITESPACE " "
R_CURLY "}"
WHITESPACE "\n"
@@ -82,3 +118,9 @@ error 25: expected COMMA
error 42: expected SEMICOLON
error 52: expected `:`
error 52: expected COMMA
+error 69: expected SEMICOLON
+error 83: expected `:`
+error 83: expected COMMA
+error 88: expected SEMICOLON
+error 98: expected `:`
+error 98: expected COMMA
diff --git a/crates/parser/test_data/parser/inline/err/record_literal_before_ellipsis_recovery.rs b/crates/parser/test_data/parser/inline/err/record_literal_before_ellipsis_recovery.rs
index 65398ccb88..416cd763fd 100644
--- a/crates/parser/test_data/parser/inline/err/record_literal_before_ellipsis_recovery.rs
+++ b/crates/parser/test_data/parser/inline/err/record_literal_before_ellipsis_recovery.rs
@@ -1,4 +1,6 @@
fn main() {
S { field ..S::default() }
S { 0 ..S::default() }
+ S { field .. }
+ S { 0 .. }
}
diff --git a/crates/parser/test_data/parser/inline/ok/record_field_default_values.rast b/crates/parser/test_data/parser/inline/ok/record_field_default_values.rast
new file mode 100644
index 0000000000..33088f2cab
--- /dev/null
+++ b/crates/parser/test_data/parser/inline/ok/record_field_default_values.rast
@@ -0,0 +1,28 @@
+SOURCE_FILE
+ STRUCT
+ STRUCT_KW "struct"
+ WHITESPACE " "
+ NAME
+ IDENT "S"
+ WHITESPACE " "
+ RECORD_FIELD_LIST
+ L_CURLY "{"
+ WHITESPACE " "
+ RECORD_FIELD
+ NAME
+ IDENT "f"
+ COLON ":"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "f32"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ LITERAL
+ FLOAT_NUMBER "0.0"
+ WHITESPACE " "
+ R_CURLY "}"
+ WHITESPACE "\n"
diff --git a/crates/parser/test_data/parser/inline/ok/record_field_default_values.rs b/crates/parser/test_data/parser/inline/ok/record_field_default_values.rs
new file mode 100644
index 0000000000..d7b38944a8
--- /dev/null
+++ b/crates/parser/test_data/parser/inline/ok/record_field_default_values.rs
@@ -0,0 +1 @@
+struct S { f: f32 = 0.0 }
diff --git a/crates/parser/test_data/parser/inline/ok/record_lit.rast b/crates/parser/test_data/parser/inline/ok/record_lit.rast
index 00948c322f..b868da55bc 100644
--- a/crates/parser/test_data/parser/inline/ok/record_lit.rast
+++ b/crates/parser/test_data/parser/inline/ok/record_lit.rast
@@ -131,6 +131,53 @@ SOURCE_FILE
L_CURLY "{"
WHITESPACE " "
RECORD_EXPR_FIELD
+ PATH_EXPR
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "x"
+ COMMA ","
+ WHITESPACE " "
+ RECORD_EXPR_FIELD
+ NAME_REF
+ IDENT "y"
+ COLON ":"
+ WHITESPACE " "
+ LITERAL
+ INT_NUMBER "32"
+ COMMA ","
+ WHITESPACE " "
+ DOT2 ".."
+ WHITESPACE " "
+ R_CURLY "}"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ EXPR_STMT
+ RECORD_EXPR
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "S"
+ WHITESPACE " "
+ RECORD_EXPR_FIELD_LIST
+ L_CURLY "{"
+ WHITESPACE " "
+ DOT2 ".."
+ WHITESPACE " "
+ R_CURLY "}"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ EXPR_STMT
+ RECORD_EXPR
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "S"
+ WHITESPACE " "
+ RECORD_EXPR_FIELD_LIST
+ L_CURLY "{"
+ WHITESPACE " "
+ RECORD_EXPR_FIELD
NAME_REF
IDENT "x"
COLON ":"
diff --git a/crates/parser/test_data/parser/inline/ok/record_lit.rs b/crates/parser/test_data/parser/inline/ok/record_lit.rs
index 86411fbb7d..42895f759b 100644
--- a/crates/parser/test_data/parser/inline/ok/record_lit.rs
+++ b/crates/parser/test_data/parser/inline/ok/record_lit.rs
@@ -3,6 +3,8 @@ fn foo() {
S { x };
S { x, y: 32, };
S { x, y: 32, ..Default::default() };
+ S { x, y: 32, .. };
+ S { .. };
S { x: ::default() };
TupleStruct { 0: 1 };
}
diff --git a/crates/parser/test_data/parser/inline/ok/struct_initializer_with_defaults.rast b/crates/parser/test_data/parser/inline/ok/struct_initializer_with_defaults.rast
new file mode 100644
index 0000000000..987e219ae8
--- /dev/null
+++ b/crates/parser/test_data/parser/inline/ok/struct_initializer_with_defaults.rast
@@ -0,0 +1,39 @@
+SOURCE_FILE
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "foo"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ LET_STMT
+ LET_KW "let"
+ WHITESPACE " "
+ IDENT_PAT
+ NAME
+ IDENT "_s"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ RECORD_EXPR
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "S"
+ WHITESPACE " "
+ RECORD_EXPR_FIELD_LIST
+ L_CURLY "{"
+ WHITESPACE " "
+ DOT2 ".."
+ WHITESPACE " "
+ R_CURLY "}"
+ SEMICOLON ";"
+ WHITESPACE "\n"
+ R_CURLY "}"
+ WHITESPACE "\n"
diff --git a/crates/parser/test_data/parser/inline/ok/struct_initializer_with_defaults.rs b/crates/parser/test_data/parser/inline/ok/struct_initializer_with_defaults.rs
new file mode 100644
index 0000000000..e08204f94c
--- /dev/null
+++ b/crates/parser/test_data/parser/inline/ok/struct_initializer_with_defaults.rs
@@ -0,0 +1,3 @@
+fn foo() {
+ let _s = S { .. };
+}
diff --git a/crates/proc-macro-srv-cli/src/main_loop.rs b/crates/proc-macro-srv-cli/src/main_loop.rs
index ba1fcd8e33..569070766f 100644
--- a/crates/proc-macro-srv-cli/src/main_loop.rs
+++ b/crates/proc-macro-srv-cli/src/main_loop.rs
@@ -21,18 +21,16 @@ pub(crate) fn run() -> io::Result<()> {
}
}
- let read_request =
- |buf: &mut String| msg::Request::read(read_json, &mut io::stdin().lock(), buf);
-
+ let mut buf = String::new();
+ let mut read_request = || msg::Request::read(read_json, &mut io::stdin().lock(), &mut buf);
let write_response = |msg: msg::Response| msg.write(write_json, &mut io::stdout().lock());
let env = EnvSnapshot::default();
- let mut srv = proc_macro_srv::ProcMacroSrv::new(&env);
- let mut buf = String::new();
+ let srv = proc_macro_srv::ProcMacroSrv::new(&env);
let mut span_mode = SpanMode::Id;
- while let Some(req) = read_request(&mut buf)? {
+ while let Some(req) = read_request()? {
let res = match req {
msg::Request::ListMacros { dylib_path } => {
msg::Response::ListMacros(srv.list_macros(&dylib_path).map(|macros| {
diff --git a/crates/proc-macro-srv/src/lib.rs b/crates/proc-macro-srv/src/lib.rs
index 7ae75713eb..f28821b4af 100644
--- a/crates/proc-macro-srv/src/lib.rs
+++ b/crates/proc-macro-srv/src/lib.rs
@@ -35,6 +35,7 @@ use std::{
ffi::OsString,
fs,
path::{Path, PathBuf},
+ sync::{Arc, Mutex, PoisonError},
thread,
};
@@ -53,7 +54,7 @@ pub enum ProcMacroKind {
pub const RUSTC_VERSION_STRING: &str = env!("RUSTC_VERSION");
pub struct ProcMacroSrv<'env> {
- expanders: HashMap<Utf8PathBuf, dylib::Expander>,
+ expanders: Mutex<HashMap<Utf8PathBuf, Arc<dylib::Expander>>>,
env: &'env EnvSnapshot,
}
@@ -67,7 +68,7 @@ const EXPANDER_STACK_SIZE: usize = 8 * 1024 * 1024;
impl ProcMacroSrv<'_> {
pub fn expand<S: ProcMacroSrvSpan>(
- &mut self,
+ &self,
lib: impl AsRef<Utf8Path>,
env: Vec<(String, String)>,
current_dir: Option<impl AsRef<Path>>,
@@ -118,29 +119,37 @@ impl ProcMacroSrv<'_> {
}
pub fn list_macros(
- &mut self,
+ &self,
dylib_path: &Utf8Path,
) -> Result<Vec<(String, ProcMacroKind)>, String> {
let expander = self.expander(dylib_path)?;
Ok(expander.list_macros())
}
- fn expander(&mut self, path: &Utf8Path) -> Result<&dylib::Expander, String> {
+ fn expander(&self, path: &Utf8Path) -> Result<Arc<dylib::Expander>, String> {
let expander = || {
- dylib::Expander::new(path)
- .map_err(|err| format!("Cannot create expander for {path}: {err}",))
+ let expander = dylib::Expander::new(path)
+ .map_err(|err| format!("Cannot create expander for {path}: {err}",));
+ expander.map(Arc::new)
};
- Ok(match self.expanders.entry(path.to_path_buf()) {
- Entry::Vacant(v) => v.insert(expander()?),
- Entry::Occupied(mut e) => {
- let time = fs::metadata(path).and_then(|it| it.modified()).ok();
- if Some(e.get().modified_time()) != time {
- e.insert(expander()?);
+ Ok(
+ match self
+ .expanders
+ .lock()
+ .unwrap_or_else(PoisonError::into_inner)
+ .entry(path.to_path_buf())
+ {
+ Entry::Vacant(v) => v.insert(expander()?).clone(),
+ Entry::Occupied(mut e) => {
+ let time = fs::metadata(path).and_then(|it| it.modified()).ok();
+ if Some(e.get().modified_time()) != time {
+ e.insert(expander()?);
+ }
+ e.get().clone()
}
- e.into_mut()
- }
- })
+ },
+ )
}
}
diff --git a/crates/proc-macro-srv/src/tests/utils.rs b/crates/proc-macro-srv/src/tests/utils.rs
index 4ce4544243..1b085520d5 100644
--- a/crates/proc-macro-srv/src/tests/utils.rs
+++ b/crates/proc-macro-srv/src/tests/utils.rs
@@ -107,7 +107,7 @@ fn assert_expand_impl(
pub(crate) fn list() -> Vec<String> {
let dylib_path = proc_macro_test_dylib_path();
let env = EnvSnapshot::default();
- let mut srv = ProcMacroSrv::new(&env);
+ let srv = ProcMacroSrv::new(&env);
let res = srv.list_macros(&dylib_path).unwrap();
res.into_iter().map(|(name, kind)| format!("{name} [{kind:?}]")).collect()
}
diff --git a/crates/project-model/src/cargo_workspace.rs b/crates/project-model/src/cargo_workspace.rs
index e4a6113462..b5f4e43a11 100644
--- a/crates/project-model/src/cargo_workspace.rs
+++ b/crates/project-model/src/cargo_workspace.rs
@@ -277,6 +277,9 @@ impl CargoWorkspace {
/// Fetches the metadata for the given `cargo_toml` manifest.
/// A successful result may contain another metadata error if the initial fetching failed but
/// the `--no-deps` retry succeeded.
+ ///
+ /// The sysroot is used to set the `RUSTUP_TOOLCHAIN` env var when invoking cargo
+ /// to ensure that the rustup proxy uses the correct toolchain.
pub fn fetch_metadata(
cargo_toml: &ManifestPath,
current_dir: &AbsPath,
diff --git a/crates/project-model/src/lib.rs b/crates/project-model/src/lib.rs
index fc1fd7b877..0c73447468 100644
--- a/crates/project-model/src/lib.rs
+++ b/crates/project-model/src/lib.rs
@@ -260,19 +260,19 @@ fn parse_cfg(s: &str) -> Result<cfg::CfgAtom, String> {
}
#[derive(Clone, Debug, PartialEq, Eq)]
-pub enum SysrootSourceWorkspaceConfig {
+pub enum RustSourceWorkspaceConfig {
CargoMetadata(CargoMetadataConfig),
Stitched,
}
-impl Default for SysrootSourceWorkspaceConfig {
+impl Default for RustSourceWorkspaceConfig {
fn default() -> Self {
- SysrootSourceWorkspaceConfig::default_cargo()
+ RustSourceWorkspaceConfig::default_cargo()
}
}
-impl SysrootSourceWorkspaceConfig {
+impl RustSourceWorkspaceConfig {
pub fn default_cargo() -> Self {
- SysrootSourceWorkspaceConfig::CargoMetadata(Default::default())
+ RustSourceWorkspaceConfig::CargoMetadata(Default::default())
}
}
diff --git a/crates/project-model/src/project_json.rs b/crates/project-model/src/project_json.rs
index a396396761..2f9612e3a4 100644
--- a/crates/project-model/src/project_json.rs
+++ b/crates/project-model/src/project_json.rs
@@ -164,6 +164,7 @@ impl ProjectJson {
is_proc_macro: crate_data.is_proc_macro,
repository: crate_data.repository,
build,
+ proc_macro_cwd: crate_data.proc_macro_cwd.map(absolutize_on_base),
}
})
.collect(),
@@ -240,6 +241,8 @@ pub struct Crate {
pub(crate) include: Vec<AbsPathBuf>,
pub(crate) exclude: Vec<AbsPathBuf>,
pub(crate) is_proc_macro: bool,
+ /// The working directory to run proc-macros in. This is usually the workspace root of cargo workspaces.
+ pub(crate) proc_macro_cwd: Option<AbsPathBuf>,
pub(crate) repository: Option<String>,
pub build: Option<Build>,
}
@@ -362,6 +365,8 @@ struct CrateData {
repository: Option<String>,
#[serde(default)]
build: Option<BuildData>,
+ #[serde(default)]
+ proc_macro_cwd: Option<Utf8PathBuf>,
}
mod cfg_ {
@@ -508,5 +513,5 @@ fn serialize_crate_name<S>(name: &CrateName, se: S) -> Result<S::Ok, S::Error>
where
S: serde::Serializer,
{
- se.serialize_str(name)
+ se.serialize_str(name.as_str())
}
diff --git a/crates/project-model/src/sysroot.rs b/crates/project-model/src/sysroot.rs
index 8f633d24be..fb752fe47b 100644
--- a/crates/project-model/src/sysroot.rs
+++ b/crates/project-model/src/sysroot.rs
@@ -22,38 +22,40 @@ use toolchain::{probe_for_binary, Tool};
use crate::{
cargo_workspace::CargoMetadataConfig, utf8_stdout, CargoWorkspace, ManifestPath,
- SysrootSourceWorkspaceConfig,
+ RustSourceWorkspaceConfig,
};
#[derive(Debug, Clone, PartialEq, Eq)]
pub struct Sysroot {
root: Option<AbsPathBuf>,
- src_root: Option<AbsPathBuf>,
- workspace: SysrootWorkspace,
+ rust_lib_src_root: Option<AbsPathBuf>,
+ workspace: RustLibSrcWorkspace,
error: Option<String>,
}
#[derive(Debug, Clone, Eq, PartialEq)]
-pub(crate) enum SysrootWorkspace {
+pub enum RustLibSrcWorkspace {
Workspace(CargoWorkspace),
Stitched(Stitched),
Empty,
}
#[derive(Debug, Clone, Eq, PartialEq)]
-pub(crate) struct Stitched {
- crates: Arena<SysrootCrateData>,
+pub struct Stitched {
+ crates: Arena<RustLibSrcCrateData>,
}
-impl ops::Index<SysrootCrate> for Stitched {
- type Output = SysrootCrateData;
- fn index(&self, index: SysrootCrate) -> &SysrootCrateData {
+impl ops::Index<RustLibSrcCrate> for Stitched {
+ type Output = RustLibSrcCrateData;
+ fn index(&self, index: RustLibSrcCrate) -> &RustLibSrcCrateData {
&self.crates[index]
}
}
impl Stitched {
- pub(crate) fn public_deps(&self) -> impl Iterator<Item = (CrateName, SysrootCrate, bool)> + '_ {
+ pub(crate) fn public_deps(
+ &self,
+ ) -> impl Iterator<Item = (CrateName, RustLibSrcCrate, bool)> + '_ {
// core is added as a dependency before std in order to
// mimic rustcs dependency order
[("core", true), ("alloc", false), ("std", true), ("test", false)].into_iter().filter_map(
@@ -63,32 +65,37 @@ impl Stitched {
)
}
- pub(crate) fn proc_macro(&self) -> Option<SysrootCrate> {
+ pub(crate) fn proc_macro(&self) -> Option<RustLibSrcCrate> {
self.by_name("proc_macro")
}
- pub(crate) fn crates(&self) -> impl ExactSizeIterator<Item = SysrootCrate> + '_ {
+ pub(crate) fn crates(&self) -> impl ExactSizeIterator<Item = RustLibSrcCrate> + '_ {
self.crates.iter().map(|(id, _data)| id)
}
- fn by_name(&self, name: &str) -> Option<SysrootCrate> {
+ fn by_name(&self, name: &str) -> Option<RustLibSrcCrate> {
let (id, _data) = self.crates.iter().find(|(_id, data)| data.name == name)?;
Some(id)
}
}
-pub(crate) type SysrootCrate = Idx<SysrootCrateData>;
+pub(crate) type RustLibSrcCrate = Idx<RustLibSrcCrateData>;
#[derive(Debug, Clone, Eq, PartialEq)]
-pub(crate) struct SysrootCrateData {
+pub(crate) struct RustLibSrcCrateData {
pub(crate) name: String,
pub(crate) root: ManifestPath,
- pub(crate) deps: Vec<SysrootCrate>,
+ pub(crate) deps: Vec<RustLibSrcCrate>,
}
impl Sysroot {
pub const fn empty() -> Sysroot {
- Sysroot { root: None, src_root: None, workspace: SysrootWorkspace::Empty, error: None }
+ Sysroot {
+ root: None,
+ rust_lib_src_root: None,
+ workspace: RustLibSrcWorkspace::Empty,
+ error: None,
+ }
}
/// Returns sysroot "root" directory, where `bin/`, `etc/`, `lib/`, `libexec/`
@@ -100,15 +107,15 @@ impl Sysroot {
/// Returns the sysroot "source" directory, where stdlib sources are located, like:
/// `$HOME/.rustup/toolchains/nightly-2022-07-23-x86_64-unknown-linux-gnu/lib/rustlib/src/rust/library`
- pub fn src_root(&self) -> Option<&AbsPath> {
- self.src_root.as_deref()
+ pub fn rust_lib_src_root(&self) -> Option<&AbsPath> {
+ self.rust_lib_src_root.as_deref()
}
- pub fn is_empty(&self) -> bool {
+ pub fn is_rust_lib_src_empty(&self) -> bool {
match &self.workspace {
- SysrootWorkspace::Workspace(ws) => ws.packages().next().is_none(),
- SysrootWorkspace::Stitched(stitched) => stitched.crates.is_empty(),
- SysrootWorkspace::Empty => true,
+ RustLibSrcWorkspace::Workspace(ws) => ws.packages().next().is_none(),
+ RustLibSrcWorkspace::Stitched(stitched) => stitched.crates.is_empty(),
+ RustLibSrcWorkspace::Empty => true,
}
}
@@ -118,13 +125,13 @@ impl Sysroot {
pub fn num_packages(&self) -> usize {
match &self.workspace {
- SysrootWorkspace::Workspace(ws) => ws.packages().count(),
- SysrootWorkspace::Stitched(c) => c.crates().count(),
- SysrootWorkspace::Empty => 0,
+ RustLibSrcWorkspace::Workspace(ws) => ws.packages().count(),
+ RustLibSrcWorkspace::Stitched(c) => c.crates().count(),
+ RustLibSrcWorkspace::Empty => 0,
}
}
- pub(crate) fn workspace(&self) -> &SysrootWorkspace {
+ pub(crate) fn workspace(&self) -> &RustLibSrcWorkspace {
&self.workspace
}
}
@@ -133,33 +140,33 @@ impl Sysroot {
/// Attempts to discover the toolchain's sysroot from the given `dir`.
pub fn discover(dir: &AbsPath, extra_env: &FxHashMap<String, String>) -> Sysroot {
let sysroot_dir = discover_sysroot_dir(dir, extra_env);
- let sysroot_src_dir = sysroot_dir.as_ref().ok().map(|sysroot_dir| {
- discover_sysroot_src_dir_or_add_component(sysroot_dir, dir, extra_env)
+ let rust_lib_src_dir = sysroot_dir.as_ref().ok().map(|sysroot_dir| {
+ discover_rust_lib_src_dir_or_add_component(sysroot_dir, dir, extra_env)
});
- Sysroot::assemble(Some(sysroot_dir), sysroot_src_dir)
+ Sysroot::assemble(Some(sysroot_dir), rust_lib_src_dir)
}
pub fn discover_with_src_override(
current_dir: &AbsPath,
extra_env: &FxHashMap<String, String>,
- sysroot_src_dir: AbsPathBuf,
+ rust_lib_src_dir: AbsPathBuf,
) -> Sysroot {
let sysroot_dir = discover_sysroot_dir(current_dir, extra_env);
- Sysroot::assemble(Some(sysroot_dir), Some(Ok(sysroot_src_dir)))
+ Sysroot::assemble(Some(sysroot_dir), Some(Ok(rust_lib_src_dir)))
}
- pub fn discover_sysroot_src_dir(sysroot_dir: AbsPathBuf) -> Sysroot {
- let sysroot_src_dir = discover_sysroot_src_dir(&sysroot_dir)
+ pub fn discover_rust_lib_src_dir(sysroot_dir: AbsPathBuf) -> Sysroot {
+ let rust_lib_src_dir = discover_rust_lib_src_dir(&sysroot_dir)
.ok_or_else(|| format_err!("can't find standard library sources in {sysroot_dir}"));
- Sysroot::assemble(Some(Ok(sysroot_dir)), Some(sysroot_src_dir))
+ Sysroot::assemble(Some(Ok(sysroot_dir)), Some(rust_lib_src_dir))
}
pub fn discover_rustc_src(&self) -> Option<ManifestPath> {
get_rustc_src(self.root()?)
}
- pub fn new(sysroot_dir: Option<AbsPathBuf>, sysroot_src_dir: Option<AbsPathBuf>) -> Sysroot {
- Self::assemble(sysroot_dir.map(Ok), sysroot_src_dir.map(Ok))
+ pub fn new(sysroot_dir: Option<AbsPathBuf>, rust_lib_src_dir: Option<AbsPathBuf>) -> Sysroot {
+ Self::assemble(sysroot_dir.map(Ok), rust_lib_src_dir.map(Ok))
}
/// Returns a command to run a tool preferring the cargo proxies if the sysroot exists.
@@ -200,7 +207,7 @@ impl Sysroot {
fn assemble(
sysroot_dir: Option<Result<AbsPathBuf, anyhow::Error>>,
- sysroot_src_dir: Option<Result<AbsPathBuf, anyhow::Error>>,
+ rust_lib_src_dir: Option<Result<AbsPathBuf, anyhow::Error>>,
) -> Sysroot {
let mut errors = String::new();
let root = match sysroot_dir {
@@ -211,8 +218,8 @@ impl Sysroot {
}
None => None,
};
- let src_root = match sysroot_src_dir {
- Some(Ok(sysroot_src_dir)) => Some(sysroot_src_dir),
+ let rust_lib_src_root = match rust_lib_src_dir {
+ Some(Ok(rust_lib_src_dir)) => Some(rust_lib_src_dir),
Some(Err(e)) => {
format_to!(errors, "{e}\n");
None
@@ -221,24 +228,28 @@ impl Sysroot {
};
Sysroot {
root,
- src_root,
- workspace: SysrootWorkspace::Empty,
+ rust_lib_src_root,
+ workspace: RustLibSrcWorkspace::Empty,
error: errors.is_empty().not().then_some(errors),
}
}
- pub fn load_workspace(&mut self, sysroot_source_config: &SysrootSourceWorkspaceConfig) {
- assert!(matches!(self.workspace, SysrootWorkspace::Empty), "workspace already loaded");
- let Self { root: _, src_root: Some(src_root), workspace, error: _ } = self else { return };
- if let SysrootSourceWorkspaceConfig::CargoMetadata(cargo_config) = sysroot_source_config {
+ pub fn load_workspace(
+ &self,
+ sysroot_source_config: &RustSourceWorkspaceConfig,
+ ) -> Option<RustLibSrcWorkspace> {
+ assert!(matches!(self.workspace, RustLibSrcWorkspace::Empty), "workspace already loaded");
+ let Self { root: _, rust_lib_src_root: Some(src_root), workspace: _, error: _ } = self
+ else {
+ return None;
+ };
+ if let RustSourceWorkspaceConfig::CargoMetadata(cargo_config) = sysroot_source_config {
let library_manifest = ManifestPath::try_from(src_root.join("Cargo.toml")).unwrap();
if fs::metadata(&library_manifest).is_ok() {
if let Some(loaded) =
- Self::load_library_via_cargo(library_manifest, src_root, cargo_config)
+ self.load_library_via_cargo(library_manifest, src_root, cargo_config)
{
- *workspace = loaded;
- self.load_core_check();
- return;
+ return Some(loaded);
}
}
}
@@ -255,7 +266,7 @@ impl Sysroot {
.find(|it| fs::metadata(it).is_ok());
if let Some(root) = root {
- stitched.crates.alloc(SysrootCrateData {
+ stitched.crates.alloc(RustLibSrcCrateData {
name: name.into(),
root,
deps: Vec::new(),
@@ -286,17 +297,19 @@ impl Sysroot {
}
}
}
- *workspace = SysrootWorkspace::Stitched(stitched);
- self.load_core_check();
+ Some(RustLibSrcWorkspace::Stitched(stitched))
}
- fn load_core_check(&mut self) {
+ pub fn set_workspace(&mut self, workspace: RustLibSrcWorkspace) {
+ self.workspace = workspace;
if self.error.is_none() {
- if let Some(src_root) = &self.src_root {
+ if let Some(src_root) = &self.rust_lib_src_root {
let has_core = match &self.workspace {
- SysrootWorkspace::Workspace(ws) => ws.packages().any(|p| ws[p].name == "core"),
- SysrootWorkspace::Stitched(stitched) => stitched.by_name("core").is_some(),
- SysrootWorkspace::Empty => true,
+ RustLibSrcWorkspace::Workspace(ws) => {
+ ws.packages().any(|p| ws[p].name == "core")
+ }
+ RustLibSrcWorkspace::Stitched(stitched) => stitched.by_name("core").is_some(),
+ RustLibSrcWorkspace::Empty => true,
};
if !has_core {
let var_note = if env::var_os("RUST_SRC_PATH").is_some() {
@@ -313,10 +326,11 @@ impl Sysroot {
}
fn load_library_via_cargo(
+ &self,
library_manifest: ManifestPath,
- sysroot_src_dir: &AbsPathBuf,
+ rust_lib_src_dir: &AbsPathBuf,
cargo_config: &CargoMetadataConfig,
- ) -> Option<SysrootWorkspace> {
+ ) -> Option<RustLibSrcWorkspace> {
tracing::debug!("Loading library metadata: {library_manifest}");
let mut cargo_config = cargo_config.clone();
// the sysroot uses `public-dependency`, so we make cargo think it's a nightly
@@ -327,9 +341,9 @@ impl Sysroot {
let (mut res, _) = match CargoWorkspace::fetch_metadata(
&library_manifest,
- sysroot_src_dir,
+ rust_lib_src_dir,
&cargo_config,
- &Sysroot::empty(),
+ self,
// Make sure we never attempt to write to the sysroot
true,
&|_| (),
@@ -391,7 +405,7 @@ impl Sysroot {
});
let cargo_workspace = CargoWorkspace::new(res, library_manifest, Default::default());
- Some(SysrootWorkspace::Workspace(cargo_workspace))
+ Some(RustLibSrcWorkspace::Workspace(cargo_workspace))
}
}
@@ -407,7 +421,7 @@ fn discover_sysroot_dir(
Ok(AbsPathBuf::assert(Utf8PathBuf::from(stdout)))
}
-fn discover_sysroot_src_dir(sysroot_path: &AbsPathBuf) -> Option<AbsPathBuf> {
+fn discover_rust_lib_src_dir(sysroot_path: &AbsPathBuf) -> Option<AbsPathBuf> {
if let Ok(path) = env::var("RUST_SRC_PATH") {
if let Ok(path) = AbsPathBuf::try_from(path.as_str()) {
let core = path.join("core");
@@ -421,22 +435,22 @@ fn discover_sysroot_src_dir(sysroot_path: &AbsPathBuf) -> Option<AbsPathBuf> {
}
}
- get_rust_src(sysroot_path)
+ get_rust_lib_src(sysroot_path)
}
-fn discover_sysroot_src_dir_or_add_component(
+fn discover_rust_lib_src_dir_or_add_component(
sysroot_path: &AbsPathBuf,
current_dir: &AbsPath,
extra_env: &FxHashMap<String, String>,
) -> Result<AbsPathBuf> {
- discover_sysroot_src_dir(sysroot_path)
+ discover_rust_lib_src_dir(sysroot_path)
.or_else(|| {
let mut rustup = toolchain::command(Tool::Rustup.prefer_proxy(), current_dir);
rustup.envs(extra_env);
rustup.args(["component", "add", "rust-src"]);
tracing::info!("adding rust-src component by {:?}", rustup);
utf8_stdout(&mut rustup).ok()?;
- get_rust_src(sysroot_path)
+ get_rust_lib_src(sysroot_path)
})
.ok_or_else(|| {
tracing::error!(%sysroot_path, "can't load standard library, try installing `rust-src`");
@@ -461,11 +475,11 @@ fn get_rustc_src(sysroot_path: &AbsPath) -> Option<ManifestPath> {
}
}
-fn get_rust_src(sysroot_path: &AbsPath) -> Option<AbsPathBuf> {
- let rust_src = sysroot_path.join("lib/rustlib/src/rust/library");
- tracing::debug!("checking sysroot library: {rust_src}");
- if fs::metadata(&rust_src).is_ok() {
- Some(rust_src)
+fn get_rust_lib_src(sysroot_path: &AbsPath) -> Option<AbsPathBuf> {
+ let rust_lib_src = sysroot_path.join("lib/rustlib/src/rust/library");
+ tracing::debug!("checking sysroot library: {rust_lib_src}");
+ if fs::metadata(&rust_lib_src).is_ok() {
+ Some(rust_lib_src)
} else {
None
}
diff --git a/crates/project-model/src/tests.rs b/crates/project-model/src/tests.rs
index f111383112..54eb0e3478 100644
--- a/crates/project-model/src/tests.rs
+++ b/crates/project-model/src/tests.rs
@@ -12,9 +12,9 @@ use span::FileId;
use triomphe::Arc;
use crate::{
- sysroot::SysrootWorkspace, workspace::ProjectWorkspaceKind, CargoWorkspace, CfgOverrides,
- ManifestPath, ProjectJson, ProjectJsonData, ProjectWorkspace, Sysroot,
- SysrootSourceWorkspaceConfig, WorkspaceBuildScripts,
+ sysroot::RustLibSrcWorkspace, workspace::ProjectWorkspaceKind, CargoWorkspace, CfgOverrides,
+ ManifestPath, ProjectJson, ProjectJsonData, ProjectWorkspace, RustSourceWorkspaceConfig,
+ Sysroot, WorkspaceBuildScripts,
};
fn load_cargo(file: &str) -> (CrateGraph, ProcMacroPaths) {
@@ -42,7 +42,6 @@ fn load_workspace_from_metadata(file: &str) -> ProjectWorkspace {
build_scripts: WorkspaceBuildScripts::default(),
rustc: Err(None),
error: None,
- set_test: true,
},
cfg_overrides: Default::default(),
sysroot: Sysroot::empty(),
@@ -50,6 +49,7 @@ fn load_workspace_from_metadata(file: &str) -> ProjectWorkspace {
toolchain: None,
target_layout: Err("target_data_layout not loaded".into()),
extra_includes: Vec::new(),
+ set_test: true,
}
}
@@ -65,6 +65,7 @@ fn load_rust_project(file: &str) -> (CrateGraph, ProcMacroPaths) {
target_layout: Err(Arc::from("test has no data layout")),
cfg_overrides: Default::default(),
extra_includes: Vec::new(),
+ set_test: true,
};
to_crate_graph(project_workspace, &mut Default::default())
}
@@ -125,7 +126,10 @@ fn get_fake_sysroot() -> Sysroot {
let sysroot_dir = AbsPathBuf::assert(sysroot_path);
let sysroot_src_dir = sysroot_dir.clone();
let mut sysroot = Sysroot::new(Some(sysroot_dir), Some(sysroot_src_dir));
- sysroot.load_workspace(&SysrootSourceWorkspaceConfig::default_cargo());
+ let loaded_sysroot = sysroot.load_workspace(&RustSourceWorkspaceConfig::default_cargo());
+ if let Some(loaded_sysroot) = loaded_sysroot {
+ sysroot.set_workspace(loaded_sysroot);
+ }
sysroot
}
@@ -230,7 +234,7 @@ fn rust_project_is_proc_macro_has_proc_macro_dep() {
let crate_data = &crate_graph[crate_id];
// Assert that the project crate with `is_proc_macro` has a dependency
// on the proc_macro sysroot crate.
- crate_data.dependencies.iter().find(|&dep| dep.name.deref() == "proc_macro").unwrap();
+ crate_data.dependencies.iter().find(|&dep| *dep.name.deref() == sym::proc_macro).unwrap();
}
#[test]
@@ -271,15 +275,17 @@ fn smoke_test_real_sysroot_cargo() {
AbsPath::assert(Utf8Path::new(env!("CARGO_MANIFEST_DIR"))),
&Default::default(),
);
- sysroot.load_workspace(&SysrootSourceWorkspaceConfig::default_cargo());
- assert!(matches!(sysroot.workspace(), SysrootWorkspace::Workspace(_)));
+ let loaded_sysroot = sysroot.load_workspace(&RustSourceWorkspaceConfig::default_cargo());
+ if let Some(loaded_sysroot) = loaded_sysroot {
+ sysroot.set_workspace(loaded_sysroot);
+ }
+ assert!(matches!(sysroot.workspace(), RustLibSrcWorkspace::Workspace(_)));
let project_workspace = ProjectWorkspace {
kind: ProjectWorkspaceKind::Cargo {
cargo: cargo_workspace,
build_scripts: WorkspaceBuildScripts::default(),
rustc: Err(None),
error: None,
- set_test: true,
},
sysroot,
rustc_cfg: Vec::new(),
@@ -287,6 +293,7 @@ fn smoke_test_real_sysroot_cargo() {
toolchain: None,
target_layout: Err("target_data_layout not loaded".into()),
extra_includes: Vec::new(),
+ set_test: true,
};
project_workspace.to_crate_graph(
&mut {
diff --git a/crates/project-model/src/workspace.rs b/crates/project-model/src/workspace.rs
index dcd62753cb..16b5bb11af 100644
--- a/crates/project-model/src/workspace.rs
+++ b/crates/project-model/src/workspace.rs
@@ -2,7 +2,7 @@
//! metadata` or `rust-project.json`) into representation stored in the salsa
//! database -- `CrateGraph`.
-use std::{collections::VecDeque, fmt, fs, iter, ops::Deref, sync};
+use std::{collections::VecDeque, fmt, fs, iter, ops::Deref, sync, thread};
use anyhow::Context;
use base_db::{
@@ -23,10 +23,10 @@ use crate::{
cargo_workspace::{CargoMetadataConfig, DepKind, PackageData, RustLibSource},
env::{cargo_config_env, inject_cargo_env, inject_cargo_package_env, inject_rustc_tool_env},
project_json::{Crate, CrateArrayIdx},
- sysroot::{SysrootCrate, SysrootWorkspace},
+ sysroot::{RustLibSrcCrate, RustLibSrcWorkspace},
toolchain_info::{rustc_cfg, target_data_layout, target_tuple, version, QueryConfig},
CargoConfig, CargoWorkspace, CfgOverrides, InvocationStrategy, ManifestPath, Package,
- ProjectJson, ProjectManifest, Sysroot, SysrootSourceWorkspaceConfig, TargetData, TargetKind,
+ ProjectJson, ProjectManifest, RustSourceWorkspaceConfig, Sysroot, TargetData, TargetKind,
WorkspaceBuildScripts,
};
use tracing::{debug, error, info};
@@ -64,6 +64,8 @@ pub struct ProjectWorkspace {
pub cfg_overrides: CfgOverrides,
/// Additional includes to add for the VFS.
pub extra_includes: Vec<AbsPathBuf>,
+ /// Set `cfg(test)` for local crates
+ pub set_test: bool,
}
#[derive(Clone)]
@@ -79,7 +81,6 @@ pub enum ProjectWorkspaceKind {
/// The rustc workspace loaded for this workspace. An `Err(None)` means loading has been
/// disabled or was otherwise not requested.
rustc: Result<Box<(CargoWorkspace, WorkspaceBuildScripts)>, Option<String>>,
- set_test: bool,
},
/// Project workspace was specified using a `rust-project.json` file.
Json(ProjectJson),
@@ -98,7 +99,6 @@ pub enum ProjectWorkspaceKind {
file: ManifestPath,
/// Is this file a cargo script file?
cargo: Option<(CargoWorkspace, WorkspaceBuildScripts, Option<Arc<anyhow::Error>>)>,
- set_test: bool,
},
}
@@ -113,9 +113,10 @@ impl fmt::Debug for ProjectWorkspace {
target_layout,
cfg_overrides,
extra_includes,
+ set_test,
} = self;
match kind {
- ProjectWorkspaceKind::Cargo { cargo, error: _, build_scripts, rustc, set_test } => f
+ ProjectWorkspaceKind::Cargo { cargo, error: _, build_scripts, rustc } => f
.debug_struct("Cargo")
.field("root", &cargo.workspace_root().file_name())
.field("n_packages", &cargo.packages().len())
@@ -141,11 +142,12 @@ impl fmt::Debug for ProjectWorkspace {
.field("toolchain", &toolchain)
.field("data_layout", &target_layout)
.field("n_cfg_overrides", &cfg_overrides.len())
- .field("n_extra_includes", &extra_includes.len());
+ .field("n_extra_includes", &extra_includes.len())
+ .field("set_test", set_test);
debug_struct.finish()
}
- ProjectWorkspaceKind::DetachedFile { file, cargo: cargo_script, set_test } => f
+ ProjectWorkspaceKind::DetachedFile { file, cargo: cargo_script } => f
.debug_struct("DetachedFiles")
.field("file", &file)
.field("cargo_script", &cargo_script.is_some())
@@ -186,7 +188,7 @@ impl ProjectWorkspace {
let project_location = project_json.parent().to_path_buf();
let project_json: ProjectJson =
ProjectJson::new(Some(project_json.clone()), &project_location, data);
- ProjectWorkspace::load_inline(project_json, config)
+ ProjectWorkspace::load_inline(project_json, config, progress)
}
ProjectManifest::CargoScript(rust_file) => {
ProjectWorkspace::load_detached_file(rust_file, config)?
@@ -204,19 +206,33 @@ impl ProjectWorkspace {
config: &CargoConfig,
progress: &dyn Fn(String),
) -> Result<ProjectWorkspace, anyhow::Error> {
- let mut sysroot = match (&config.sysroot, &config.sysroot_src) {
+ progress("Discovering sysroot".to_owned());
+ let CargoConfig {
+ features,
+ rustc_source,
+ extra_args,
+ extra_env,
+ set_test,
+ cfg_overrides,
+ extra_includes,
+ sysroot,
+ sysroot_src,
+ target,
+ ..
+ } = config;
+ let mut sysroot = match (sysroot, sysroot_src) {
(Some(RustLibSource::Discover), None) => {
- Sysroot::discover(cargo_toml.parent(), &config.extra_env)
+ Sysroot::discover(cargo_toml.parent(), extra_env)
}
(Some(RustLibSource::Discover), Some(sysroot_src)) => {
Sysroot::discover_with_src_override(
cargo_toml.parent(),
- &config.extra_env,
+ extra_env,
sysroot_src.clone(),
)
}
(Some(RustLibSource::Path(path)), None) => {
- Sysroot::discover_sysroot_src_dir(path.clone())
+ Sysroot::discover_rust_lib_src_dir(path.clone())
}
(Some(RustLibSource::Path(sysroot)), Some(sysroot_src)) => {
Sysroot::new(Some(sysroot.clone()), Some(sysroot_src.clone()))
@@ -224,100 +240,147 @@ impl ProjectWorkspace {
(None, _) => Sysroot::empty(),
};
- let rustc_dir = match &config.rustc_source {
- Some(RustLibSource::Path(path)) => ManifestPath::try_from(path.clone())
- .map_err(|p| Some(format!("rustc source path is not absolute: {p}"))),
- Some(RustLibSource::Discover) => sysroot
- .discover_rustc_src()
- .ok_or_else(|| Some("Failed to discover rustc source for sysroot.".to_owned())),
- None => Err(None),
- };
-
- tracing::info!(workspace = %cargo_toml, src_root = ?sysroot.src_root(), root = ?sysroot.root(), "Using sysroot");
+ tracing::info!(workspace = %cargo_toml, src_root = ?sysroot.rust_lib_src_root(), root = ?sysroot.root(), "Using sysroot");
+ progress("Querying project metadata".to_owned());
let toolchain_config = QueryConfig::Cargo(&sysroot, cargo_toml);
let targets =
- target_tuple::get(toolchain_config, config.target.as_deref(), &config.extra_env)
- .unwrap_or_default();
- let toolchain = version::get(toolchain_config, &config.extra_env)
- .inspect_err(|e| {
- tracing::error!(%e,
- "failed fetching toolchain version for {cargo_toml:?} workspace"
- )
- })
- .ok()
- .flatten();
- let rustc_cfg =
- rustc_cfg::get(toolchain_config, targets.first().map(Deref::deref), &config.extra_env);
- let cfg_overrides = config.cfg_overrides.clone();
- let data_layout = target_data_layout::get(
- toolchain_config,
- targets.first().map(Deref::deref),
- &config.extra_env,
- );
- if let Err(e) = &data_layout {
- tracing::error!(%e, "failed fetching data layout for {cargo_toml:?} workspace");
- }
- sysroot.load_workspace(&SysrootSourceWorkspaceConfig::CargoMetadata(
- sysroot_metadata_config(&config.extra_env, &targets),
- ));
+ target_tuple::get(toolchain_config, target.as_deref(), extra_env).unwrap_or_default();
+
+ // We spawn a bunch of processes to query various information about the workspace's
+ // toolchain and sysroot
+ // We can speed up loading a bit by spawning all of these processes in parallel (especially
+ // on systems were process spawning is delayed)
+ let join = thread::scope(|s| {
+ let workspace_dir = cargo_toml.parent();
+ let toolchain = s.spawn(|| {
+ version::get(toolchain_config, extra_env)
+ .inspect_err(|e| {
+ tracing::error!(%e,
+ "failed fetching toolchain version for {cargo_toml:?} workspace"
+ )
+ })
+ .ok()
+ .flatten()
+ });
- let rustc = rustc_dir.and_then(|rustc_dir| {
- info!(workspace = %cargo_toml, rustc_dir = %rustc_dir, "Using rustc source");
- match CargoWorkspace::fetch_metadata(
- &rustc_dir,
- cargo_toml.parent(),
- &CargoMetadataConfig {
- features: crate::CargoFeatures::default(),
- targets: targets.clone(),
- extra_args: config.extra_args.clone(),
- extra_env: config.extra_env.clone(),
- },
- &sysroot,
- false,
- progress,
- ) {
- Ok((meta, _error)) => {
- let workspace = CargoWorkspace::new(meta, cargo_toml.clone(), Env::default());
- let build_scripts = WorkspaceBuildScripts::rustc_crates(
- &workspace,
- cargo_toml.parent(),
- &config.extra_env,
+ let rustc_cfg = s.spawn(|| {
+ rustc_cfg::get(toolchain_config, targets.first().map(Deref::deref), extra_env)
+ });
+ let data_layout = s.spawn(|| {
+ target_data_layout::get(
+ toolchain_config,
+ targets.first().map(Deref::deref),
+ extra_env,
+ ).inspect_err(|e| {
+ tracing::error!(%e, "failed fetching data layout for {cargo_toml:?} workspace")
+ })
+ });
+
+ let rustc_dir = s.spawn(|| {
+ let rustc_dir = match rustc_source {
+ Some(RustLibSource::Path(path)) => ManifestPath::try_from(path.clone())
+ .map_err(|p| Some(format!("rustc source path is not absolute: {p}"))),
+ Some(RustLibSource::Discover) => {
+ sysroot.discover_rustc_src().ok_or_else(|| {
+ Some("Failed to discover rustc source for sysroot.".to_owned())
+ })
+ }
+ None => Err(None),
+ };
+ rustc_dir.and_then(|rustc_dir| {
+ info!(workspace = %cargo_toml, rustc_dir = %rustc_dir, "Using rustc source");
+ match CargoWorkspace::fetch_metadata(
+ &rustc_dir,
+ workspace_dir,
+ &CargoMetadataConfig {
+ features: crate::CargoFeatures::default(),
+ targets: targets.clone(),
+ extra_args: extra_args.clone(),
+ extra_env: extra_env.clone(),
+ },
&sysroot,
- );
- Ok(Box::new((workspace, build_scripts)))
- }
- Err(e) => {
- tracing::error!(
- %e,
- "Failed to read Cargo metadata from rustc source at {rustc_dir}",
- );
- Err(Some(format!(
- "Failed to read Cargo metadata from rustc source at {rustc_dir}: {e}"
- )))
- }
- }
+ false,
+ &|_| (),
+ ) {
+ Ok((meta, _error)) => {
+ let workspace =
+ CargoWorkspace::new(meta, cargo_toml.clone(), Env::default());
+ let build_scripts = WorkspaceBuildScripts::rustc_crates(
+ &workspace,
+ workspace_dir,
+ extra_env,
+ &sysroot,
+ );
+ Ok(Box::new((workspace, build_scripts)))
+ }
+ Err(e) => {
+ tracing::error!(
+ %e,
+ "Failed to read Cargo metadata from rustc source at {rustc_dir}",
+ );
+ Err(Some(format!(
+ "Failed to read Cargo metadata from rustc source at {rustc_dir}: {e}"
+ )))
+ }
+ }
+ })
+ });
+
+ let cargo_metadata = s.spawn(|| {
+ CargoWorkspace::fetch_metadata(
+ cargo_toml,
+ workspace_dir,
+ &CargoMetadataConfig {
+ features: features.clone(),
+ targets: targets.clone(),
+ extra_args: extra_args.clone(),
+ extra_env: extra_env.clone(),
+ },
+ &sysroot,
+ false,
+ &|_| (),
+ )
+ });
+ let loaded_sysroot = s.spawn(|| {
+ sysroot.load_workspace(&RustSourceWorkspaceConfig::CargoMetadata(
+ sysroot_metadata_config(extra_env, &targets),
+ ))
+ });
+ let cargo_config_extra_env =
+ s.spawn(|| cargo_config_env(cargo_toml, extra_env, &sysroot));
+ thread::Result::Ok((
+ toolchain.join()?,
+ rustc_cfg.join()?,
+ data_layout.join()?,
+ rustc_dir.join()?,
+ loaded_sysroot.join()?,
+ cargo_metadata.join()?,
+ cargo_config_extra_env.join()?,
+ ))
});
- let (meta, error) = CargoWorkspace::fetch_metadata(
- cargo_toml,
- cargo_toml.parent(),
- &CargoMetadataConfig {
- features: config.features.clone(),
- targets,
- extra_args: config.extra_args.clone(),
- extra_env: config.extra_env.clone(),
- },
- &sysroot,
- false,
- progress,
- )
- .with_context(|| {
+ let (
+ toolchain,
+ rustc_cfg,
+ data_layout,
+ rustc,
+ loaded_sysroot,
+ cargo_metadata,
+ cargo_config_extra_env,
+ ) = match join {
+ Ok(it) => it,
+ Err(e) => std::panic::resume_unwind(e),
+ };
+
+ let (meta, error) = cargo_metadata.with_context(|| {
format!(
"Failed to read Cargo metadata from Cargo.toml file {cargo_toml}, {toolchain:?}",
)
})?;
- let cargo_config_extra_env = cargo_config_env(cargo_toml, &config.extra_env, &sysroot);
let cargo = CargoWorkspace::new(meta, cargo_toml.clone(), cargo_config_extra_env);
+ if let Some(loaded_sysroot) = loaded_sysroot {
+ sysroot.set_workspace(loaded_sysroot);
+ }
Ok(ProjectWorkspace {
kind: ProjectWorkspaceKind::Cargo {
@@ -325,35 +388,70 @@ impl ProjectWorkspace {
build_scripts: WorkspaceBuildScripts::default(),
rustc,
error: error.map(Arc::new),
- set_test: config.set_test,
},
sysroot,
rustc_cfg,
- cfg_overrides,
+ cfg_overrides: cfg_overrides.clone(),
toolchain,
target_layout: data_layout.map(Arc::from).map_err(|it| Arc::from(it.to_string())),
- extra_includes: config.extra_includes.clone(),
+ extra_includes: extra_includes.clone(),
+ set_test: *set_test,
})
}
- pub fn load_inline(project_json: ProjectJson, config: &CargoConfig) -> ProjectWorkspace {
+ pub fn load_inline(
+ project_json: ProjectJson,
+ config: &CargoConfig,
+ progress: &dyn Fn(String),
+ ) -> ProjectWorkspace {
+ progress("Discovering sysroot".to_owned());
let mut sysroot =
Sysroot::new(project_json.sysroot.clone(), project_json.sysroot_src.clone());
- sysroot.load_workspace(&SysrootSourceWorkspaceConfig::Stitched);
+ let loaded_sysroot = sysroot.load_workspace(&RustSourceWorkspaceConfig::Stitched);
+ if let Some(loaded_sysroot) = loaded_sysroot {
+ sysroot.set_workspace(loaded_sysroot);
+ }
+
+ tracing::info!(workspace = %project_json.manifest_or_root(), src_root = ?sysroot.rust_lib_src_root(), root = ?sysroot.root(), "Using sysroot");
+ progress("Querying project metadata".to_owned());
let query_config = QueryConfig::Rustc(&sysroot, project_json.path().as_ref());
- let toolchain = version::get(query_config, &config.extra_env).ok().flatten();
+ let targets = target_tuple::get(query_config, config.target.as_deref(), &config.extra_env)
+ .unwrap_or_default();
+
+ // We spawn a bunch of processes to query various information about the workspace's
+ // toolchain and sysroot
+ // We can speed up loading a bit by spawning all of these processes in parallel (especially
+ // on systems were process spawning is delayed)
+ let join = thread::scope(|s| {
+ let toolchain =
+ s.spawn(|| version::get(query_config, &config.extra_env).ok().flatten());
+ let rustc_cfg = s.spawn(|| {
+ rustc_cfg::get(query_config, targets.first().map(Deref::deref), &config.extra_env)
+ });
+ let data_layout = s.spawn(|| {
+ target_data_layout::get(
+ query_config,
+ targets.first().map(Deref::deref),
+ &config.extra_env,
+ )
+ });
+ thread::Result::Ok((toolchain.join()?, rustc_cfg.join()?, data_layout.join()?))
+ });
+
+ let (toolchain, rustc_cfg, target_layout) = match join {
+ Ok(it) => it,
+ Err(e) => std::panic::resume_unwind(e),
+ };
- let target = config.target.as_deref();
- let rustc_cfg = rustc_cfg::get(query_config, target, &config.extra_env);
- let data_layout = target_data_layout::get(query_config, target, &config.extra_env);
ProjectWorkspace {
kind: ProjectWorkspaceKind::Json(project_json),
sysroot,
rustc_cfg,
toolchain,
- target_layout: data_layout.map(Arc::from).map_err(|it| Arc::from(it.to_string())),
+ target_layout: target_layout.map(Arc::from).map_err(|it| Arc::from(it.to_string())),
cfg_overrides: config.cfg_overrides.clone(),
extra_includes: config.extra_includes.clone(),
+ set_test: config.set_test,
}
}
@@ -363,7 +461,7 @@ impl ProjectWorkspace {
) -> anyhow::Result<ProjectWorkspace> {
let dir = detached_file.parent();
let mut sysroot = match &config.sysroot {
- Some(RustLibSource::Path(path)) => Sysroot::discover_sysroot_src_dir(path.clone()),
+ Some(RustLibSource::Path(path)) => Sysroot::discover_rust_lib_src_dir(path.clone()),
Some(RustLibSource::Discover) => Sysroot::discover(dir, &config.extra_env),
None => Sysroot::empty(),
};
@@ -374,9 +472,12 @@ impl ProjectWorkspace {
.unwrap_or_default();
let rustc_cfg = rustc_cfg::get(query_config, None, &config.extra_env);
let data_layout = target_data_layout::get(query_config, None, &config.extra_env);
- sysroot.load_workspace(&SysrootSourceWorkspaceConfig::CargoMetadata(
+ let loaded_sysroot = sysroot.load_workspace(&RustSourceWorkspaceConfig::CargoMetadata(
sysroot_metadata_config(&config.extra_env, &targets),
));
+ if let Some(loaded_sysroot) = loaded_sysroot {
+ sysroot.set_workspace(loaded_sysroot);
+ }
let cargo_script = CargoWorkspace::fetch_metadata(
detached_file,
@@ -406,7 +507,6 @@ impl ProjectWorkspace {
kind: ProjectWorkspaceKind::DetachedFile {
file: detached_file.to_owned(),
cargo: cargo_script,
- set_test: config.set_test,
},
sysroot,
rustc_cfg,
@@ -414,6 +514,7 @@ impl ProjectWorkspace {
target_layout: data_layout.map(Arc::from).map_err(|it| Arc::from(it.to_string())),
cfg_overrides: config.cfg_overrides.clone(),
extra_includes: config.extra_includes.clone(),
+ set_test: config.set_test,
})
}
@@ -545,7 +646,7 @@ impl ProjectWorkspace {
pub fn to_roots(&self) -> Vec<PackageRoot> {
let mk_sysroot = || {
let mut r = match self.sysroot.workspace() {
- SysrootWorkspace::Workspace(ws) => ws
+ RustLibSrcWorkspace::Workspace(ws) => ws
.packages()
.filter_map(|pkg| {
if ws[pkg].is_local {
@@ -566,12 +667,17 @@ impl ProjectWorkspace {
Some(PackageRoot { is_local: false, include, exclude })
})
.collect(),
- SysrootWorkspace::Stitched(_) | SysrootWorkspace::Empty => vec![],
+ RustLibSrcWorkspace::Stitched(_) | RustLibSrcWorkspace::Empty => vec![],
};
r.push(PackageRoot {
is_local: false,
- include: self.sysroot.src_root().map(|it| it.to_path_buf()).into_iter().collect(),
+ include: self
+ .sysroot
+ .rust_lib_src_root()
+ .map(|it| it.to_path_buf())
+ .into_iter()
+ .collect(),
exclude: Vec::new(),
});
r
@@ -593,7 +699,7 @@ impl ProjectWorkspace {
.into_iter()
.chain(mk_sysroot())
.collect::<Vec<_>>(),
- ProjectWorkspaceKind::Cargo { cargo, rustc, build_scripts, error: _, set_test: _ } => {
+ ProjectWorkspaceKind::Cargo { cargo, rustc, build_scripts, error: _ } => {
cargo
.packages()
.map(|pkg| {
@@ -728,8 +834,9 @@ impl ProjectWorkspace {
sysroot,
extra_env,
cfg_overrides,
+ self.set_test,
),
- ProjectWorkspaceKind::Cargo { cargo, rustc, build_scripts, error: _, set_test } => {
+ ProjectWorkspaceKind::Cargo { cargo, rustc, build_scripts, error: _ } => {
cargo_to_crate_graph(
load,
rustc.as_ref().map(|a| a.as_ref()).ok(),
@@ -738,10 +845,10 @@ impl ProjectWorkspace {
rustc_cfg.clone(),
cfg_overrides,
build_scripts,
- *set_test,
+ self.set_test,
)
}
- ProjectWorkspaceKind::DetachedFile { file, cargo: cargo_script, set_test, .. } => {
+ ProjectWorkspaceKind::DetachedFile { file, cargo: cargo_script, .. } => {
if let Some((cargo, build_scripts, _)) = cargo_script {
cargo_to_crate_graph(
&mut |path| load(path),
@@ -751,7 +858,7 @@ impl ProjectWorkspace {
rustc_cfg.clone(),
cfg_overrides,
build_scripts,
- *set_test,
+ self.set_test,
)
} else {
detached_file_to_crate_graph(
@@ -760,7 +867,7 @@ impl ProjectWorkspace {
file,
sysroot,
cfg_overrides,
- *set_test,
+ self.set_test,
)
}
}
@@ -782,34 +889,22 @@ impl ProjectWorkspace {
} = other;
(match (kind, o_kind) {
(
- ProjectWorkspaceKind::Cargo {
- cargo,
- rustc,
- build_scripts: _,
- error: _,
- set_test: _,
- },
+ ProjectWorkspaceKind::Cargo { cargo, rustc, build_scripts: _, error: _ },
ProjectWorkspaceKind::Cargo {
cargo: o_cargo,
rustc: o_rustc,
build_scripts: _,
error: _,
- set_test: _,
},
) => cargo == o_cargo && rustc == o_rustc,
(ProjectWorkspaceKind::Json(project), ProjectWorkspaceKind::Json(o_project)) => {
project == o_project
}
(
- ProjectWorkspaceKind::DetachedFile {
- file,
- cargo: Some((cargo_script, _, _)),
- set_test: _,
- },
+ ProjectWorkspaceKind::DetachedFile { file, cargo: Some((cargo_script, _, _)) },
ProjectWorkspaceKind::DetachedFile {
file: o_file,
cargo: Some((o_cargo_script, _, _)),
- set_test: _,
},
) => file == o_file && cargo_script == o_cargo_script,
_ => return false,
@@ -837,13 +932,13 @@ fn project_json_to_crate_graph(
sysroot: &Sysroot,
extra_env: &FxHashMap<String, String>,
override_cfg: &CfgOverrides,
+ set_test: bool,
) -> (CrateGraph, ProcMacroPaths) {
let mut res = (CrateGraph::default(), ProcMacroPaths::default());
let (crate_graph, proc_macros) = &mut res;
let (public_deps, libproc_macro) =
sysroot_to_crate_graph(crate_graph, sysroot, rustc_cfg.clone(), load);
- let r_a_cfg_flag = CfgAtom::Flag(sym::rust_analyzer.clone());
let mut cfg_cache: FxHashMap<&str, Vec<CfgAtom>> = FxHashMap::default();
let idx_to_crate_id: FxHashMap<CrateArrayIdx, CrateId> = project
@@ -862,6 +957,8 @@ fn project_json_to_crate_graph(
proc_macro_dylib_path,
is_proc_macro,
repository,
+ is_workspace_member,
+ proc_macro_cwd,
..
},
file_id,
@@ -879,19 +976,28 @@ fn project_json_to_crate_graph(
None => &rustc_cfg,
};
- let mut cfg_options = target_cfgs
- .iter()
- .chain(cfg.iter())
- .chain(iter::once(&r_a_cfg_flag))
- .cloned()
- .collect();
- override_cfg.apply(
- &mut cfg_options,
- display_name
- .as_ref()
- .map(|it| it.canonical_name().as_str())
- .unwrap_or_default(),
- );
+ let cfg_options = {
+ let mut cfg_options: CfgOptions =
+ target_cfgs.iter().chain(cfg.iter()).cloned().collect();
+
+ if *is_workspace_member {
+ if set_test {
+ // Add test cfg for local crates
+ cfg_options.insert_atom(sym::test.clone());
+ }
+ cfg_options.insert_atom(sym::rust_analyzer.clone());
+ }
+
+ override_cfg.apply(
+ &mut cfg_options,
+ display_name
+ .as_ref()
+ .map(|it| it.canonical_name().as_str())
+ .unwrap_or_default(),
+ );
+ cfg_options
+ };
+
let crate_graph_crate_id = crate_graph.add_crate_root(
file_id,
*edition,
@@ -900,7 +1006,6 @@ fn project_json_to_crate_graph(
Arc::new(cfg_options),
None,
env,
- *is_proc_macro,
if let Some(name) = display_name.clone() {
CrateOrigin::Local {
repo: repository.clone(),
@@ -909,6 +1014,8 @@ fn project_json_to_crate_graph(
} else {
CrateOrigin::Local { repo: None, name: None }
},
+ *is_proc_macro,
+ proc_macro_cwd.clone(),
);
debug!(
?crate_graph_crate_id,
@@ -1178,11 +1285,12 @@ fn detached_file_to_crate_graph(
cfg_options.clone(),
None,
Env::default(),
- false,
CrateOrigin::Local {
repo: None,
name: display_name.map(|n| n.canonical_name().to_owned()),
},
+ false,
+ None,
);
public_deps.add_to_crate_graph(&mut crate_graph, detached_file_crate);
@@ -1343,8 +1451,13 @@ fn add_target_crate_root(
Arc::new(cfg_options),
potential_cfg_options.map(Arc::new),
env,
- matches!(kind, TargetKind::Lib { is_proc_macro: true }),
origin,
+ matches!(kind, TargetKind::Lib { is_proc_macro: true }),
+ Some(if pkg.is_member {
+ cargo.workspace_root().to_path_buf()
+ } else {
+ pkg.manifest.parent().to_path_buf()
+ }),
);
if let TargetKind::Lib { is_proc_macro: true } = kind {
let proc_macro = match build_data {
@@ -1385,7 +1498,7 @@ fn sysroot_to_crate_graph(
) -> (SysrootPublicDeps, Option<CrateId>) {
let _p = tracing::info_span!("sysroot_to_crate_graph").entered();
match sysroot.workspace() {
- SysrootWorkspace::Workspace(cargo) => {
+ RustLibSrcWorkspace::Workspace(cargo) => {
let (mut cg, mut pm) = cargo_to_crate_graph(
load,
None,
@@ -1460,7 +1573,7 @@ fn sysroot_to_crate_graph(
(SysrootPublicDeps { deps: pub_deps }, libproc_macro)
}
- SysrootWorkspace::Stitched(stitched) => {
+ RustLibSrcWorkspace::Stitched(stitched) => {
let cfg_options = Arc::new({
let mut cfg_options = CfgOptions::default();
cfg_options.extend(rustc_cfg);
@@ -1468,7 +1581,7 @@ fn sysroot_to_crate_graph(
cfg_options.insert_atom(sym::miri.clone());
cfg_options
});
- let sysroot_crates: FxHashMap<SysrootCrate, CrateId> = stitched
+ let sysroot_crates: FxHashMap<RustLibSrcCrate, CrateId> = stitched
.crates()
.filter_map(|krate| {
let file_id = load(&stitched[krate].root)?;
@@ -1482,8 +1595,9 @@ fn sysroot_to_crate_graph(
cfg_options.clone(),
None,
Env::default(),
- false,
CrateOrigin::Lang(LangCrateOrigin::from(&*stitched[krate].name)),
+ false,
+ None,
);
Some((krate, crate_id))
})
@@ -1513,7 +1627,7 @@ fn sysroot_to_crate_graph(
stitched.proc_macro().and_then(|it| sysroot_crates.get(&it).copied());
(public_deps, libproc_macro)
}
- SysrootWorkspace::Empty => (SysrootPublicDeps { deps: vec![] }, None),
+ RustLibSrcWorkspace::Empty => (SysrootPublicDeps { deps: vec![] }, None),
}
}
diff --git a/crates/project-model/test_data/output/cargo_hello_world_project_model.txt b/crates/project-model/test_data/output/cargo_hello_world_project_model.txt
index 880e90c52a..fae0b6fcca 100644
--- a/crates/project-model/test_data/output/cargo_hello_world_project_model.txt
+++ b/crates/project-model/test_data/output/cargo_hello_world_project_model.txt
@@ -61,6 +61,11 @@
),
},
is_proc_macro: false,
+ proc_macro_cwd: Some(
+ AbsPathBuf(
+ "$ROOT$hello-world",
+ ),
+ ),
},
1: CrateData {
root_file_id: FileId(
@@ -132,6 +137,11 @@
),
},
is_proc_macro: false,
+ proc_macro_cwd: Some(
+ AbsPathBuf(
+ "$ROOT$hello-world",
+ ),
+ ),
},
2: CrateData {
root_file_id: FileId(
@@ -203,6 +213,11 @@
),
},
is_proc_macro: false,
+ proc_macro_cwd: Some(
+ AbsPathBuf(
+ "$ROOT$hello-world",
+ ),
+ ),
},
3: CrateData {
root_file_id: FileId(
@@ -274,6 +289,11 @@
),
},
is_proc_macro: false,
+ proc_macro_cwd: Some(
+ AbsPathBuf(
+ "$ROOT$hello-world",
+ ),
+ ),
},
4: CrateData {
root_file_id: FileId(
@@ -341,5 +361,10 @@
name: "libc",
},
is_proc_macro: false,
+ proc_macro_cwd: Some(
+ AbsPathBuf(
+ "$ROOT$.cargo/registry/src/github.com-1ecc6299db9ec823/libc-0.2.98",
+ ),
+ ),
},
} \ No newline at end of file
diff --git a/crates/project-model/test_data/output/cargo_hello_world_project_model_with_selective_overrides.txt b/crates/project-model/test_data/output/cargo_hello_world_project_model_with_selective_overrides.txt
index 880e90c52a..fae0b6fcca 100644
--- a/crates/project-model/test_data/output/cargo_hello_world_project_model_with_selective_overrides.txt
+++ b/crates/project-model/test_data/output/cargo_hello_world_project_model_with_selective_overrides.txt
@@ -61,6 +61,11 @@
),
},
is_proc_macro: false,
+ proc_macro_cwd: Some(
+ AbsPathBuf(
+ "$ROOT$hello-world",
+ ),
+ ),
},
1: CrateData {
root_file_id: FileId(
@@ -132,6 +137,11 @@
),
},
is_proc_macro: false,
+ proc_macro_cwd: Some(
+ AbsPathBuf(
+ "$ROOT$hello-world",
+ ),
+ ),
},
2: CrateData {
root_file_id: FileId(
@@ -203,6 +213,11 @@
),
},
is_proc_macro: false,
+ proc_macro_cwd: Some(
+ AbsPathBuf(
+ "$ROOT$hello-world",
+ ),
+ ),
},
3: CrateData {
root_file_id: FileId(
@@ -274,6 +289,11 @@
),
},
is_proc_macro: false,
+ proc_macro_cwd: Some(
+ AbsPathBuf(
+ "$ROOT$hello-world",
+ ),
+ ),
},
4: CrateData {
root_file_id: FileId(
@@ -341,5 +361,10 @@
name: "libc",
},
is_proc_macro: false,
+ proc_macro_cwd: Some(
+ AbsPathBuf(
+ "$ROOT$.cargo/registry/src/github.com-1ecc6299db9ec823/libc-0.2.98",
+ ),
+ ),
},
} \ No newline at end of file
diff --git a/crates/project-model/test_data/output/cargo_hello_world_project_model_with_wildcard_overrides.txt b/crates/project-model/test_data/output/cargo_hello_world_project_model_with_wildcard_overrides.txt
index 7746acd225..566174882d 100644
--- a/crates/project-model/test_data/output/cargo_hello_world_project_model_with_wildcard_overrides.txt
+++ b/crates/project-model/test_data/output/cargo_hello_world_project_model_with_wildcard_overrides.txt
@@ -60,6 +60,11 @@
),
},
is_proc_macro: false,
+ proc_macro_cwd: Some(
+ AbsPathBuf(
+ "$ROOT$hello-world",
+ ),
+ ),
},
1: CrateData {
root_file_id: FileId(
@@ -130,6 +135,11 @@
),
},
is_proc_macro: false,
+ proc_macro_cwd: Some(
+ AbsPathBuf(
+ "$ROOT$hello-world",
+ ),
+ ),
},
2: CrateData {
root_file_id: FileId(
@@ -200,6 +210,11 @@
),
},
is_proc_macro: false,
+ proc_macro_cwd: Some(
+ AbsPathBuf(
+ "$ROOT$hello-world",
+ ),
+ ),
},
3: CrateData {
root_file_id: FileId(
@@ -270,6 +285,11 @@
),
},
is_proc_macro: false,
+ proc_macro_cwd: Some(
+ AbsPathBuf(
+ "$ROOT$hello-world",
+ ),
+ ),
},
4: CrateData {
root_file_id: FileId(
@@ -337,5 +357,10 @@
name: "libc",
},
is_proc_macro: false,
+ proc_macro_cwd: Some(
+ AbsPathBuf(
+ "$ROOT$.cargo/registry/src/github.com-1ecc6299db9ec823/libc-0.2.98",
+ ),
+ ),
},
} \ No newline at end of file
diff --git a/crates/project-model/test_data/output/rust_project_cfg_groups.txt b/crates/project-model/test_data/output/rust_project_cfg_groups.txt
index 2026ab2b8c..9b4be19c41 100644
--- a/crates/project-model/test_data/output/rust_project_cfg_groups.txt
+++ b/crates/project-model/test_data/output/rust_project_cfg_groups.txt
@@ -38,6 +38,7 @@
Alloc,
),
is_proc_macro: false,
+ proc_macro_cwd: None,
},
1: CrateData {
root_file_id: FileId(
@@ -69,6 +70,7 @@
Core,
),
is_proc_macro: false,
+ proc_macro_cwd: None,
},
2: CrateData {
root_file_id: FileId(
@@ -100,6 +102,7 @@
Other,
),
is_proc_macro: false,
+ proc_macro_cwd: None,
},
3: CrateData {
root_file_id: FileId(
@@ -131,6 +134,7 @@
Other,
),
is_proc_macro: false,
+ proc_macro_cwd: None,
},
4: CrateData {
root_file_id: FileId(
@@ -179,6 +183,7 @@
ProcMacro,
),
is_proc_macro: false,
+ proc_macro_cwd: None,
},
5: CrateData {
root_file_id: FileId(
@@ -210,6 +215,7 @@
Other,
),
is_proc_macro: false,
+ proc_macro_cwd: None,
},
6: CrateData {
root_file_id: FileId(
@@ -306,6 +312,7 @@
Std,
),
is_proc_macro: false,
+ proc_macro_cwd: None,
},
7: CrateData {
root_file_id: FileId(
@@ -337,6 +344,7 @@
Other,
),
is_proc_macro: false,
+ proc_macro_cwd: None,
},
8: CrateData {
root_file_id: FileId(
@@ -368,6 +376,7 @@
Test,
),
is_proc_macro: false,
+ proc_macro_cwd: None,
},
9: CrateData {
root_file_id: FileId(
@@ -399,6 +408,7 @@
Other,
),
is_proc_macro: false,
+ proc_macro_cwd: None,
},
10: CrateData {
root_file_id: FileId(
@@ -420,6 +430,7 @@
"group1_other_cfg=other_config",
"group2_cfg=yet_another_config",
"rust_analyzer",
+ "test",
"true",
],
),
@@ -476,6 +487,7 @@
),
},
is_proc_macro: false,
+ proc_macro_cwd: None,
},
11: CrateData {
root_file_id: FileId(
@@ -496,6 +508,7 @@
"group2_cfg=fourth_config",
"group2_cfg=yet_another_config",
"rust_analyzer",
+ "test",
"true",
"unrelated_cfg",
],
@@ -553,5 +566,6 @@
),
},
is_proc_macro: false,
+ proc_macro_cwd: None,
},
} \ No newline at end of file
diff --git a/crates/project-model/test_data/output/rust_project_hello_world_project_model.txt b/crates/project-model/test_data/output/rust_project_hello_world_project_model.txt
index a0e14b8fcb..4c8e66e8e9 100644
--- a/crates/project-model/test_data/output/rust_project_hello_world_project_model.txt
+++ b/crates/project-model/test_data/output/rust_project_hello_world_project_model.txt
@@ -38,6 +38,7 @@
Alloc,
),
is_proc_macro: false,
+ proc_macro_cwd: None,
},
1: CrateData {
root_file_id: FileId(
@@ -69,6 +70,7 @@
Core,
),
is_proc_macro: false,
+ proc_macro_cwd: None,
},
2: CrateData {
root_file_id: FileId(
@@ -100,6 +102,7 @@
Other,
),
is_proc_macro: false,
+ proc_macro_cwd: None,
},
3: CrateData {
root_file_id: FileId(
@@ -131,6 +134,7 @@
Other,
),
is_proc_macro: false,
+ proc_macro_cwd: None,
},
4: CrateData {
root_file_id: FileId(
@@ -179,6 +183,7 @@
ProcMacro,
),
is_proc_macro: false,
+ proc_macro_cwd: None,
},
5: CrateData {
root_file_id: FileId(
@@ -210,6 +215,7 @@
Other,
),
is_proc_macro: false,
+ proc_macro_cwd: None,
},
6: CrateData {
root_file_id: FileId(
@@ -306,6 +312,7 @@
Std,
),
is_proc_macro: false,
+ proc_macro_cwd: None,
},
7: CrateData {
root_file_id: FileId(
@@ -337,6 +344,7 @@
Other,
),
is_proc_macro: false,
+ proc_macro_cwd: None,
},
8: CrateData {
root_file_id: FileId(
@@ -368,6 +376,7 @@
Test,
),
is_proc_macro: false,
+ proc_macro_cwd: None,
},
9: CrateData {
root_file_id: FileId(
@@ -399,6 +408,7 @@
Other,
),
is_proc_macro: false,
+ proc_macro_cwd: None,
},
10: CrateData {
root_file_id: FileId(
@@ -417,6 +427,7 @@
cfg_options: CfgOptions(
[
"rust_analyzer",
+ "test",
"true",
],
),
@@ -473,5 +484,6 @@
),
},
is_proc_macro: false,
+ proc_macro_cwd: None,
},
} \ No newline at end of file
diff --git a/crates/rust-analyzer/Cargo.toml b/crates/rust-analyzer/Cargo.toml
index c24cbb4a31..b8ce2b7430 100644
--- a/crates/rust-analyzer/Cargo.toml
+++ b/crates/rust-analyzer/Cargo.toml
@@ -37,7 +37,7 @@ rustc-hash.workspace = true
serde_json = { workspace = true, features = ["preserve_order"] }
serde.workspace = true
serde_derive.workspace = true
-tenthash = "0.4.0"
+tenthash = "1.0.0"
num_cpus = "1.15.0"
mimalloc = { version = "0.1.30", default-features = false, optional = true }
lsp-server.workspace = true
diff --git a/crates/rust-analyzer/src/cli/analysis_stats.rs b/crates/rust-analyzer/src/cli/analysis_stats.rs
index 18c27c8449..4fc6180920 100644
--- a/crates/rust-analyzer/src/cli/analysis_stats.rs
+++ b/crates/rust-analyzer/src/cli/analysis_stats.rs
@@ -13,7 +13,7 @@ use hir::{
ModuleDef, Name,
};
use hir_def::{
- body::BodySourceMap,
+ expr_store::BodySourceMap,
hir::{ExprId, PatId},
SyntheticSyntax,
};
@@ -1072,6 +1072,7 @@ impl flags::AnalysisStats {
param_names_for_lifetime_elision_hints: true,
hide_named_constructor_hints: false,
hide_closure_initialization_hints: false,
+ hide_closure_parameter_hints: false,
closure_style: hir::ClosureStyle::ImplFn,
max_length: Some(25),
closing_brace_hints_min_lines: Some(20),
diff --git a/crates/rust-analyzer/src/cli/diagnostics.rs b/crates/rust-analyzer/src/cli/diagnostics.rs
index 28f25975d6..6a3ceb640b 100644
--- a/crates/rust-analyzer/src/cli/diagnostics.rs
+++ b/crates/rust-analyzer/src/cli/diagnostics.rs
@@ -4,7 +4,7 @@
use project_model::{CargoConfig, RustLibSource};
use rustc_hash::FxHashSet;
-use hir::{db::HirDatabase, Crate, HirFileIdExt, Module};
+use hir::{db::HirDatabase, sym, Crate, HirFileIdExt, Module};
use ide::{AnalysisHost, AssistResolveStrategy, Diagnostic, DiagnosticsConfig, Severity};
use ide_db::{base_db::SourceRootDatabase, LineIndexDatabase};
use load_cargo::{load_workspace_at, LoadCargoConfig, ProcMacroServerChoice};
@@ -60,7 +60,7 @@ impl flags::Diagnostics {
let file_id = module.definition_source_file_id(db).original_file(db);
if !visited_files.contains(&file_id) {
let crate_name =
- module.krate().display_name(db).as_deref().unwrap_or("unknown").to_owned();
+ module.krate().display_name(db).as_deref().unwrap_or(&sym::unknown).to_owned();
println!(
"processing crate: {crate_name}, module: {}",
_vfs.file_path(file_id.into())
diff --git a/crates/rust-analyzer/src/cli/rustc_tests.rs b/crates/rust-analyzer/src/cli/rustc_tests.rs
index 199f61e70f..e9ca12deaf 100644
--- a/crates/rust-analyzer/src/cli/rustc_tests.rs
+++ b/crates/rust-analyzer/src/cli/rustc_tests.rs
@@ -12,8 +12,8 @@ use paths::Utf8PathBuf;
use profile::StopWatch;
use project_model::toolchain_info::{target_data_layout, QueryConfig};
use project_model::{
- CargoConfig, ManifestPath, ProjectWorkspace, ProjectWorkspaceKind, RustLibSource, Sysroot,
- SysrootSourceWorkspaceConfig,
+ CargoConfig, ManifestPath, ProjectWorkspace, ProjectWorkspaceKind, RustLibSource,
+ RustSourceWorkspaceConfig, Sysroot,
};
use load_cargo::{load_workspace, LoadCargoConfig, ProcMacroServerChoice};
@@ -75,7 +75,11 @@ impl Tester {
};
let mut sysroot = Sysroot::discover(tmp_file.parent().unwrap(), &cargo_config.extra_env);
- sysroot.load_workspace(&SysrootSourceWorkspaceConfig::default_cargo());
+ let loaded_sysroot = sysroot.load_workspace(&RustSourceWorkspaceConfig::default_cargo());
+ if let Some(loaded_sysroot) = loaded_sysroot {
+ sysroot.set_workspace(loaded_sysroot);
+ }
+
let data_layout = target_data_layout::get(
QueryConfig::Rustc(&sysroot, tmp_file.parent().unwrap().as_ref()),
None,
@@ -86,7 +90,6 @@ impl Tester {
kind: ProjectWorkspaceKind::DetachedFile {
file: ManifestPath::try_from(tmp_file).unwrap(),
cargo: None,
- set_test: true,
},
sysroot,
rustc_cfg: vec![],
@@ -94,6 +97,7 @@ impl Tester {
target_layout: data_layout.map(Arc::from).map_err(|it| Arc::from(it.to_string())),
cfg_overrides: Default::default(),
extra_includes: vec![],
+ set_test: true,
};
let load_cargo_config = LoadCargoConfig {
load_out_dirs_from_check: false,
diff --git a/crates/rust-analyzer/src/cli/scip.rs b/crates/rust-analyzer/src/cli/scip.rs
index dc0f722aae..fe75872105 100644
--- a/crates/rust-analyzer/src/cli/scip.rs
+++ b/crates/rust-analyzer/src/cli/scip.rs
@@ -139,45 +139,42 @@ impl flags::Scip {
let mut occurrences = Vec::new();
let mut symbols = Vec::new();
- tokens.into_iter().for_each(|(text_range, id)| {
+ for (text_range, id) in tokens.into_iter() {
let token = si.tokens.get(id).unwrap();
- let (symbol, enclosing_symbol, is_inherent_impl) =
- if let Some(TokenSymbols { symbol, enclosing_symbol, is_inherent_impl }) =
- symbol_generator.token_symbols(id, token)
- {
- (symbol, enclosing_symbol, is_inherent_impl)
- } else {
- ("".to_owned(), None, false)
- };
+ let Some(TokenSymbols { symbol, enclosing_symbol, is_inherent_impl }) =
+ symbol_generator.token_symbols(id, token)
+ else {
+ // token did not have a moniker, so there is no reasonable occurrence to emit
+ // see ide::moniker::def_to_moniker
+ continue;
+ };
- if !symbol.is_empty() {
- let is_defined_in_this_document = match token.definition {
- Some(def) => def.file_id == file_id,
- _ => false,
- };
- if is_defined_in_this_document {
- if token_ids_emitted.insert(id) {
- // token_ids_emitted does deduplication. This checks that this results
- // in unique emitted symbols, as otherwise references are ambiguous.
- let should_emit = record_error_if_symbol_already_used(
+ let is_defined_in_this_document = match token.definition {
+ Some(def) => def.file_id == file_id,
+ _ => false,
+ };
+ if is_defined_in_this_document {
+ if token_ids_emitted.insert(id) {
+ // token_ids_emitted does deduplication. This checks that this results
+ // in unique emitted symbols, as otherwise references are ambiguous.
+ let should_emit = record_error_if_symbol_already_used(
+ symbol.clone(),
+ is_inherent_impl,
+ relative_path.as_str(),
+ &line_index,
+ text_range,
+ );
+ if should_emit {
+ symbols.push(compute_symbol_info(
symbol.clone(),
- is_inherent_impl,
- relative_path.as_str(),
- &line_index,
- text_range,
- );
- if should_emit {
- symbols.push(compute_symbol_info(
- symbol.clone(),
- enclosing_symbol,
- token,
- ));
- }
+ enclosing_symbol,
+ token,
+ ));
}
- } else {
- token_ids_referenced.insert(id);
}
+ } else {
+ token_ids_referenced.insert(id);
}
// If the range of the def and the range of the token are the same, this must be the definition.
@@ -202,7 +199,7 @@ impl flags::Scip {
special_fields: Default::default(),
enclosing_range: Vec::new(),
});
- });
+ }
if occurrences.is_empty() {
continue;
@@ -444,14 +441,14 @@ impl SymbolGenerator {
MonikerResult::Moniker(moniker) => TokenSymbols {
symbol: scip::symbol::format_symbol(moniker_to_symbol(moniker)),
enclosing_symbol: None,
- is_inherent_impl: moniker
- .identifier
- .description
- .get(moniker.identifier.description.len() - 2)
- .is_some_and(|descriptor| {
+ is_inherent_impl: match &moniker.identifier.description[..] {
+ // inherent impls are represented as impl#[SelfType]
+ [.., descriptor, _] => {
descriptor.desc == MonikerDescriptorKind::Type
&& descriptor.name == "impl"
- }),
+ }
+ _ => false,
+ },
},
MonikerResult::Local { enclosing_moniker } => {
let local_symbol = scip::types::Symbol::new_local(local_count);
@@ -549,7 +546,9 @@ mod test {
continue;
}
for &(range, id) in &file.tokens {
- if range.contains(offset - TextSize::from(1)) {
+ // check if cursor is within token, ignoring token for the module defined by the file (whose range is the whole file)
+ if range.start() != TextSize::from(0) && range.contains(offset - TextSize::from(1))
+ {
let token = si.tokens.get(id).unwrap();
found_symbol = match token.moniker.as_ref() {
None => None,
@@ -885,7 +884,7 @@ pub mod example_mod {
);
let file = si.files.first().unwrap();
- let (_, token_id) = file.tokens.first().unwrap();
+ let (_, token_id) = file.tokens.get(1).unwrap(); // first token is file module, second is `bar`
let token = si.tokens.get(*token_id).unwrap();
assert_eq!(token.documentation.as_ref().map(|d| d.as_str()), Some("foo"));
diff --git a/crates/rust-analyzer/src/cli/unresolved_references.rs b/crates/rust-analyzer/src/cli/unresolved_references.rs
index 986bd018b4..021b1bff39 100644
--- a/crates/rust-analyzer/src/cli/unresolved_references.rs
+++ b/crates/rust-analyzer/src/cli/unresolved_references.rs
@@ -1,5 +1,5 @@
//! Reports references in code that the IDE layer cannot resolve.
-use hir::{db::HirDatabase, AnyDiagnostic, Crate, HirFileIdExt as _, Module, Semantics};
+use hir::{db::HirDatabase, sym, AnyDiagnostic, Crate, HirFileIdExt as _, Module, Semantics};
use ide::{AnalysisHost, RootDatabase, TextRange};
use ide_db::{
base_db::{SourceDatabase, SourceRootDatabase},
@@ -66,7 +66,7 @@ impl flags::UnresolvedReferences {
let file_id = module.definition_source_file_id(db).original_file(db);
if !visited_files.contains(&file_id) {
let crate_name =
- module.krate().display_name(db).as_deref().unwrap_or("unknown").to_owned();
+ module.krate().display_name(db).as_deref().unwrap_or(&sym::unknown).to_owned();
let file_path = vfs.file_path(file_id.into());
eprintln!("processing crate: {crate_name}, module: {file_path}",);
diff --git a/crates/rust-analyzer/src/config.rs b/crates/rust-analyzer/src/config.rs
index 44325fa1a2..1dce0bea1a 100644
--- a/crates/rust-analyzer/src/config.rs
+++ b/crates/rust-analyzer/src/config.rs
@@ -84,10 +84,10 @@ config_data! {
completion_snippets_custom: FxHashMap<String, SnippetDef> = Config::completion_snippets_default(),
- /// These directories will be ignored by rust-analyzer. They are
+ /// These paths (file/directories) will be ignored by rust-analyzer. They are
/// relative to the workspace root, and globs are not supported. You may
/// also need to add the folders to Code's `files.watcherExclude`.
- files_excludeDirs: Vec<Utf8PathBuf> = vec![],
+ files_exclude | files_excludeDirs: Vec<Utf8PathBuf> = vec![],
@@ -208,6 +208,8 @@ config_data! {
/// Whether to hide inlay type hints for `let` statements that initialize to a closure.
/// Only applies to closures with blocks, same as `#rust-analyzer.inlayHints.closureReturnTypeHints.enable#`.
inlayHints_typeHints_hideClosureInitialization: bool = false,
+ /// Whether to hide inlay parameter type hints for closures.
+ inlayHints_typeHints_hideClosureParameter:bool = false,
/// Whether to hide inlay type hints for constructors.
inlayHints_typeHints_hideNamedConstructor: bool = false,
@@ -528,7 +530,7 @@ config_data! {
imports_granularity_enforce: bool = false,
/// How imports should be grouped into use statements.
imports_granularity_group: ImportGranularityDef = ImportGranularityDef::Crate,
- /// Group inserted imports by the https://rust-analyzer.github.io/manual.html#auto-import[following order]. Groups are separated by newlines.
+ /// Group inserted imports by the [following order](https://rust-analyzer.github.io/manual.html#auto-import). Groups are separated by newlines.
imports_group_enable: bool = true,
/// Whether to allow import insertion to merge new imports into single path glob imports like `use std::fmt::*;`.
imports_merge_glob: bool = true,
@@ -1474,6 +1476,7 @@ impl Config {
prefer_absolute: self.imports_prefixExternPrelude(source_root).to_owned(),
term_search_fuel: self.assist_termSearch_fuel(source_root).to_owned() as u64,
term_search_borrowck: self.assist_termSearch_borrowcheck(source_root).to_owned(),
+ code_action_grouping: self.code_action_group(),
}
}
@@ -1666,6 +1669,9 @@ impl Config {
hide_closure_initialization_hints: self
.inlayHints_typeHints_hideClosureInitialization()
.to_owned(),
+ hide_closure_parameter_hints: self
+ .inlayHints_typeHints_hideClosureParameter()
+ .to_owned(),
closure_style: match self.inlayHints_closureStyle() {
ClosureStyle::ImplFn => hir::ClosureStyle::ImplFn,
ClosureStyle::RustAnalyzer => hir::ClosureStyle::RANotation,
@@ -1787,7 +1793,7 @@ impl Config {
fn discovered_projects(&self) -> Vec<ManifestOrProjectJson> {
let exclude_dirs: Vec<_> =
- self.files_excludeDirs().iter().map(|p| self.root_path.join(p)).collect();
+ self.files_exclude().iter().map(|p| self.root_path.join(p)).collect();
let mut projects = vec![];
for fs_proj in &self.discovered_projects_from_filesystem {
@@ -1909,10 +1915,14 @@ impl Config {
}
_ => FilesWatcher::Server,
},
- exclude: self.files_excludeDirs().iter().map(|it| self.root_path.join(it)).collect(),
+ exclude: self.excluded().collect(),
}
}
+ pub fn excluded(&self) -> impl Iterator<Item = AbsPathBuf> + use<'_> {
+ self.files_exclude().iter().map(|it| self.root_path.join(it))
+ }
+
pub fn notifications(&self) -> NotificationsConfig {
NotificationsConfig {
cargo_toml_not_found: self.notifications_cargoTomlNotFound().to_owned(),
@@ -3624,21 +3634,9 @@ fn manual(fields: &[SchemaField]) -> String {
let name = format!("rust-analyzer.{}", field.replace('_', "."));
let doc = doc_comment_to_string(doc);
if default.contains('\n') {
- format_to_acc!(
- acc,
- r#"[[{name}]]{name}::
-+
---
-Default:
-----
-{default}
-----
-{doc}
---
-"#
- )
+ format_to_acc!(acc, " **{name}**\n\nDefault:\n\n```{default}\n\n```\n\n {doc}\n\n ")
} else {
- format_to_acc!(acc, "[[{name}]]{name} (default: `{default}`)::\n+\n--\n{doc}--\n")
+ format_to_acc!(acc, "**{name}** (default: {default})\n\n {doc}\n\n")
}
})
}
@@ -3716,7 +3714,7 @@ mod tests {
#[test]
fn generate_config_documentation() {
- let docs_path = project_root().join("docs/user/generated_config.adoc");
+ let docs_path = project_root().join("docs/book/src/configuration_generated.md");
let expected = FullConfigInput::manual();
ensure_file_contents(docs_path.as_std_path(), &expected);
}
@@ -3805,8 +3803,10 @@ mod tests {
(config, _, _) = config.apply_change(change);
assert_eq!(config.cargo_targetDir(None), &Some(TargetDirectory::UseSubdirectory(true)));
+ let target =
+ Utf8PathBuf::from(std::env::var("CARGO_TARGET_DIR").unwrap_or("target".to_owned()));
assert!(
- matches!(config.flycheck(None), FlycheckConfig::CargoCommand { options, .. } if options.target_dir == Some(Utf8PathBuf::from("target/rust-analyzer")))
+ matches!(config.flycheck(None), FlycheckConfig::CargoCommand { options, .. } if options.target_dir == Some(target.join("rust-analyzer")))
);
}
diff --git a/crates/rust-analyzer/src/global_state.rs b/crates/rust-analyzer/src/global_state.rs
index 0f2d7823b7..70105cda00 100644
--- a/crates/rust-analyzer/src/global_state.rs
+++ b/crates/rust-analyzer/src/global_state.rs
@@ -396,6 +396,7 @@ impl GlobalState {
|| !self.config.same_source_root_parent_map(&self.local_roots_parent_map)
{
let config_change = {
+ let _p = span!(Level::INFO, "GlobalState::process_changes/config_change").entered();
let user_config_path = (|| {
let mut p = Config::user_config_dir_path()?;
p.push("rust-analyzer.toml");
@@ -569,12 +570,12 @@ impl GlobalState {
if let Some((method, start)) = self.req_queue.incoming.complete(&response.id) {
if let Some(err) = &response.error {
if err.message.starts_with("server panicked") {
- self.poke_rust_analyzer_developer(format!("{}, check the log", err.message))
+ self.poke_rust_analyzer_developer(format!("{}, check the log", err.message));
}
}
let duration = start.elapsed();
- tracing::debug!("handled {} - ({}) in {:0.2?}", method, response.id, duration);
+ tracing::debug!(name: "message response", method, %response.id, duration = format_args!("{:0.2?}", duration));
self.send(response.into());
}
}
@@ -649,7 +650,8 @@ impl GlobalStateSnapshot {
RwLockReadGuard::map(self.vfs.read(), |(it, _)| it)
}
- pub(crate) fn url_to_file_id(&self, url: &Url) -> anyhow::Result<FileId> {
+ /// Returns `None` if the file was excluded.
+ pub(crate) fn url_to_file_id(&self, url: &Url) -> anyhow::Result<Option<FileId>> {
url_to_file_id(&self.vfs_read(), url)
}
@@ -657,7 +659,8 @@ impl GlobalStateSnapshot {
file_id_to_url(&self.vfs_read(), id)
}
- pub(crate) fn vfs_path_to_file_id(&self, vfs_path: &VfsPath) -> anyhow::Result<FileId> {
+ /// Returns `None` if the file was excluded.
+ pub(crate) fn vfs_path_to_file_id(&self, vfs_path: &VfsPath) -> anyhow::Result<Option<FileId>> {
vfs_path_to_file_id(&self.vfs_read(), vfs_path)
}
@@ -749,14 +752,21 @@ pub(crate) fn file_id_to_url(vfs: &vfs::Vfs, id: FileId) -> Url {
url_from_abs_path(path)
}
-pub(crate) fn url_to_file_id(vfs: &vfs::Vfs, url: &Url) -> anyhow::Result<FileId> {
+/// Returns `None` if the file was excluded.
+pub(crate) fn url_to_file_id(vfs: &vfs::Vfs, url: &Url) -> anyhow::Result<Option<FileId>> {
let path = from_proto::vfs_path(url)?;
- let res = vfs.file_id(&path).ok_or_else(|| anyhow::format_err!("file not found: {path}"))?;
- Ok(res)
+ vfs_path_to_file_id(vfs, &path)
}
-pub(crate) fn vfs_path_to_file_id(vfs: &vfs::Vfs, vfs_path: &VfsPath) -> anyhow::Result<FileId> {
- let res =
+/// Returns `None` if the file was excluded.
+pub(crate) fn vfs_path_to_file_id(
+ vfs: &vfs::Vfs,
+ vfs_path: &VfsPath,
+) -> anyhow::Result<Option<FileId>> {
+ let (file_id, excluded) =
vfs.file_id(vfs_path).ok_or_else(|| anyhow::format_err!("file not found: {vfs_path}"))?;
- Ok(res)
+ match excluded {
+ vfs::FileExcluded::Yes => Ok(None),
+ vfs::FileExcluded::No => Ok(Some(file_id)),
+ }
}
diff --git a/crates/rust-analyzer/src/handlers/dispatch.rs b/crates/rust-analyzer/src/handlers/dispatch.rs
index ff50f7533a..4683877db6 100644
--- a/crates/rust-analyzer/src/handlers/dispatch.rs
+++ b/crates/rust-analyzer/src/handlers/dispatch.rs
@@ -118,7 +118,7 @@ impl RequestDispatcher<'_> {
}
return self;
}
- self.on_with_thread_intent::<true, ALLOW_RETRYING, R>(
+ self.on_with_thread_intent::<false, ALLOW_RETRYING, R>(
ThreadIntent::Worker,
f,
Self::content_modified_error,
@@ -147,7 +147,7 @@ impl RequestDispatcher<'_> {
}
return self;
}
- self.on_with_thread_intent::<true, false, R>(ThreadIntent::Worker, f, on_cancelled)
+ self.on_with_thread_intent::<false, false, R>(ThreadIntent::Worker, f, on_cancelled)
}
/// Dispatches a non-latency-sensitive request onto the thread pool. When the VFS is marked not
@@ -166,7 +166,7 @@ impl RequestDispatcher<'_> {
}
return self;
}
- self.on_with_thread_intent::<true, ALLOW_RETRYING, R>(
+ self.on_with_thread_intent::<false, ALLOW_RETRYING, R>(
ThreadIntent::Worker,
f,
Self::content_modified_error,
@@ -193,7 +193,7 @@ impl RequestDispatcher<'_> {
}
return self;
}
- self.on_with_thread_intent::<true, ALLOW_RETRYING, R>(
+ self.on_with_thread_intent::<false, ALLOW_RETRYING, R>(
ThreadIntent::LatencySensitive,
f,
Self::content_modified_error,
@@ -212,7 +212,7 @@ impl RequestDispatcher<'_> {
R::Params: DeserializeOwned + panic::UnwindSafe + Send + fmt::Debug,
R::Result: Serialize,
{
- self.on_with_thread_intent::<false, false, R>(
+ self.on_with_thread_intent::<true, false, R>(
ThreadIntent::LatencySensitive,
f,
Self::content_modified_error,
@@ -231,7 +231,7 @@ impl RequestDispatcher<'_> {
}
}
- fn on_with_thread_intent<const MAIN_POOL: bool, const ALLOW_RETRYING: bool, R>(
+ fn on_with_thread_intent<const RUSTFMT: bool, const ALLOW_RETRYING: bool, R>(
&mut self,
intent: ThreadIntent,
f: fn(GlobalStateSnapshot, R::Params) -> anyhow::Result<R::Result>,
@@ -251,10 +251,10 @@ impl RequestDispatcher<'_> {
tracing::debug!(?params);
let world = self.global_state.snapshot();
- if MAIN_POOL {
- &mut self.global_state.task_pool.handle
- } else {
+ if RUSTFMT {
&mut self.global_state.fmt_pool.handle
+ } else {
+ &mut self.global_state.task_pool.handle
}
.spawn(intent, move || {
let result = panic::catch_unwind(move || {
diff --git a/crates/rust-analyzer/src/handlers/notification.rs b/crates/rust-analyzer/src/handlers/notification.rs
index 84ba89d9f3..55344a4d6a 100644
--- a/crates/rust-analyzer/src/handlers/notification.rs
+++ b/crates/rust-analyzer/src/handlers/notification.rs
@@ -22,6 +22,7 @@ use crate::{
mem_docs::DocumentData,
reload,
target_spec::TargetSpec,
+ try_default,
};
pub(crate) fn handle_cancel(state: &mut GlobalState, params: CancelParams) -> anyhow::Result<()> {
@@ -74,7 +75,16 @@ pub(crate) fn handle_did_open_text_document(
tracing::error!("duplicate DidOpenTextDocument: {}", path);
}
- state.vfs.write().0.set_file_contents(path, Some(params.text_document.text.into_bytes()));
+ if let Some(abs_path) = path.as_path() {
+ if state.config.excluded().any(|excluded| abs_path.starts_with(&excluded)) {
+ tracing::trace!("opened excluded file {abs_path}");
+ state.vfs.write().0.insert_excluded_file(path);
+ return Ok(());
+ }
+ }
+
+ let contents = params.text_document.text.into_bytes();
+ state.vfs.write().0.set_file_contents(path, Some(contents));
if state.config.discover_workspace_config().is_some() {
tracing::debug!("queuing task");
let _ = state
@@ -126,7 +136,8 @@ pub(crate) fn handle_did_close_text_document(
tracing::error!("orphan DidCloseTextDocument: {}", path);
}
- if let Some(file_id) = state.vfs.read().0.file_id(&path) {
+ // Clear diagnostics also for excluded files, just in case.
+ if let Some((file_id, _)) = state.vfs.read().0.file_id(&path) {
state.diagnostics.clear_native_for(file_id);
}
@@ -145,7 +156,7 @@ pub(crate) fn handle_did_save_text_document(
) -> anyhow::Result<()> {
if let Ok(vfs_path) = from_proto::vfs_path(&params.text_document.uri) {
let snap = state.snapshot();
- let file_id = snap.vfs_path_to_file_id(&vfs_path)?;
+ let file_id = try_default!(snap.vfs_path_to_file_id(&vfs_path)?);
let sr = snap.analysis.source_root_id(file_id)?;
if state.config.script_rebuild_on_save(Some(sr)) && state.build_deps_changed {
@@ -289,7 +300,7 @@ fn run_flycheck(state: &mut GlobalState, vfs_path: VfsPath) -> bool {
let _p = tracing::info_span!("run_flycheck").entered();
let file_id = state.vfs.read().0.file_id(&vfs_path);
- if let Some(file_id) = file_id {
+ if let Some((file_id, vfs::FileExcluded::No)) = file_id {
let world = state.snapshot();
let invocation_strategy_once = state.config.flycheck(None).invocation_strategy_once();
let may_flycheck_workspace = state.config.flycheck_workspace(None);
diff --git a/crates/rust-analyzer/src/handlers/request.rs b/crates/rust-analyzer/src/handlers/request.rs
index 39cbf53eaa..1b144d9073 100644
--- a/crates/rust-analyzer/src/handlers/request.rs
+++ b/crates/rust-analyzer/src/handlers/request.rs
@@ -53,6 +53,7 @@ use crate::{
},
target_spec::{CargoTargetSpec, TargetSpec},
test_runner::{CargoTestHandle, TestTarget},
+ try_default,
};
pub(crate) fn handle_workspace_reload(state: &mut GlobalState, _: ()) -> anyhow::Result<()> {
@@ -83,7 +84,8 @@ pub(crate) fn handle_analyzer_status(
let mut file_id = None;
if let Some(tdi) = params.text_document {
match from_proto::file_id(&snap, &tdi.uri) {
- Ok(it) => file_id = Some(it),
+ Ok(Some(it)) => file_id = Some(it),
+ Ok(None) => {}
Err(_) => format_to!(buf, "file {} not found in vfs", tdi.uri),
}
}
@@ -141,7 +143,7 @@ pub(crate) fn handle_view_syntax_tree(
params: lsp_ext::ViewSyntaxTreeParams,
) -> anyhow::Result<String> {
let _p = tracing::info_span!("handle_view_syntax_tree").entered();
- let id = from_proto::file_id(&snap, &params.text_document.uri)?;
+ let id = try_default!(from_proto::file_id(&snap, &params.text_document.uri)?);
let res = snap.analysis.view_syntax_tree(id)?;
Ok(res)
}
@@ -151,7 +153,7 @@ pub(crate) fn handle_view_hir(
params: lsp_types::TextDocumentPositionParams,
) -> anyhow::Result<String> {
let _p = tracing::info_span!("handle_view_hir").entered();
- let position = from_proto::file_position(&snap, params)?;
+ let position = try_default!(from_proto::file_position(&snap, params)?);
let res = snap.analysis.view_hir(position)?;
Ok(res)
}
@@ -161,7 +163,7 @@ pub(crate) fn handle_view_mir(
params: lsp_types::TextDocumentPositionParams,
) -> anyhow::Result<String> {
let _p = tracing::info_span!("handle_view_mir").entered();
- let position = from_proto::file_position(&snap, params)?;
+ let position = try_default!(from_proto::file_position(&snap, params)?);
let res = snap.analysis.view_mir(position)?;
Ok(res)
}
@@ -171,7 +173,7 @@ pub(crate) fn handle_interpret_function(
params: lsp_types::TextDocumentPositionParams,
) -> anyhow::Result<String> {
let _p = tracing::info_span!("handle_interpret_function").entered();
- let position = from_proto::file_position(&snap, params)?;
+ let position = try_default!(from_proto::file_position(&snap, params)?);
let res = snap.analysis.interpret_function(position)?;
Ok(res)
}
@@ -180,7 +182,7 @@ pub(crate) fn handle_view_file_text(
snap: GlobalStateSnapshot,
params: lsp_types::TextDocumentIdentifier,
) -> anyhow::Result<String> {
- let file_id = from_proto::file_id(&snap, &params.uri)?;
+ let file_id = try_default!(from_proto::file_id(&snap, &params.uri)?);
Ok(snap.analysis.file_text(file_id)?.to_string())
}
@@ -189,7 +191,7 @@ pub(crate) fn handle_view_item_tree(
params: lsp_ext::ViewItemTreeParams,
) -> anyhow::Result<String> {
let _p = tracing::info_span!("handle_view_item_tree").entered();
- let file_id = from_proto::file_id(&snap, &params.text_document.uri)?;
+ let file_id = try_default!(from_proto::file_id(&snap, &params.text_document.uri)?);
let res = snap.analysis.view_item_tree(file_id)?;
Ok(res)
}
@@ -315,7 +317,7 @@ pub(crate) fn handle_expand_macro(
params: lsp_ext::ExpandMacroParams,
) -> anyhow::Result<Option<lsp_ext::ExpandedMacro>> {
let _p = tracing::info_span!("handle_expand_macro").entered();
- let file_id = from_proto::file_id(&snap, &params.text_document.uri)?;
+ let file_id = try_default!(from_proto::file_id(&snap, &params.text_document.uri)?);
let line_index = snap.file_line_index(file_id)?;
let offset = from_proto::offset(&line_index, params.position)?;
@@ -328,7 +330,7 @@ pub(crate) fn handle_selection_range(
params: lsp_types::SelectionRangeParams,
) -> anyhow::Result<Option<Vec<lsp_types::SelectionRange>>> {
let _p = tracing::info_span!("handle_selection_range").entered();
- let file_id = from_proto::file_id(&snap, &params.text_document.uri)?;
+ let file_id = try_default!(from_proto::file_id(&snap, &params.text_document.uri)?);
let line_index = snap.file_line_index(file_id)?;
let res: anyhow::Result<Vec<lsp_types::SelectionRange>> = params
.positions
@@ -371,7 +373,7 @@ pub(crate) fn handle_matching_brace(
params: lsp_ext::MatchingBraceParams,
) -> anyhow::Result<Vec<Position>> {
let _p = tracing::info_span!("handle_matching_brace").entered();
- let file_id = from_proto::file_id(&snap, &params.text_document.uri)?;
+ let file_id = try_default!(from_proto::file_id(&snap, &params.text_document.uri)?);
let line_index = snap.file_line_index(file_id)?;
params
.positions
@@ -395,7 +397,7 @@ pub(crate) fn handle_join_lines(
) -> anyhow::Result<Vec<lsp_types::TextEdit>> {
let _p = tracing::info_span!("handle_join_lines").entered();
- let file_id = from_proto::file_id(&snap, &params.text_document.uri)?;
+ let file_id = try_default!(from_proto::file_id(&snap, &params.text_document.uri)?);
let config = snap.config.join_lines();
let line_index = snap.file_line_index(file_id)?;
@@ -419,7 +421,7 @@ pub(crate) fn handle_on_enter(
params: lsp_types::TextDocumentPositionParams,
) -> anyhow::Result<Option<Vec<lsp_ext::SnippetTextEdit>>> {
let _p = tracing::info_span!("handle_on_enter").entered();
- let position = from_proto::file_position(&snap, params)?;
+ let position = try_default!(from_proto::file_position(&snap, params)?);
let edit = match snap.analysis.on_enter(position)? {
None => return Ok(None),
Some(it) => it,
@@ -439,7 +441,8 @@ pub(crate) fn handle_on_type_formatting(
return Ok(None);
}
- let mut position = from_proto::file_position(&snap, params.text_document_position)?;
+ let mut position =
+ try_default!(from_proto::file_position(&snap, params.text_document_position)?);
let line_index = snap.file_line_index(position.file_id)?;
// in `ide`, the `on_type` invariant is that
@@ -465,32 +468,33 @@ pub(crate) fn handle_on_type_formatting(
Ok(Some(change))
}
+pub(crate) fn empty_diagnostic_report() -> lsp_types::DocumentDiagnosticReportResult {
+ lsp_types::DocumentDiagnosticReportResult::Report(lsp_types::DocumentDiagnosticReport::Full(
+ lsp_types::RelatedFullDocumentDiagnosticReport {
+ related_documents: None,
+ full_document_diagnostic_report: lsp_types::FullDocumentDiagnosticReport {
+ result_id: Some("rust-analyzer".to_owned()),
+ items: vec![],
+ },
+ },
+ ))
+}
+
pub(crate) fn handle_document_diagnostics(
snap: GlobalStateSnapshot,
params: lsp_types::DocumentDiagnosticParams,
) -> anyhow::Result<lsp_types::DocumentDiagnosticReportResult> {
- let empty = || {
- lsp_types::DocumentDiagnosticReportResult::Report(
- lsp_types::DocumentDiagnosticReport::Full(
- lsp_types::RelatedFullDocumentDiagnosticReport {
- related_documents: None,
- full_document_diagnostic_report: lsp_types::FullDocumentDiagnosticReport {
- result_id: Some("rust-analyzer".to_owned()),
- items: vec![],
- },
- },
- ),
- )
+ let file_id = match from_proto::file_id(&snap, &params.text_document.uri)? {
+ Some(it) => it,
+ None => return Ok(empty_diagnostic_report()),
};
-
- let file_id = from_proto::file_id(&snap, &params.text_document.uri)?;
let source_root = snap.analysis.source_root_id(file_id)?;
if !snap.analysis.is_local_source_root(source_root)? {
- return Ok(empty());
+ return Ok(empty_diagnostic_report());
}
let config = snap.config.diagnostics(Some(source_root));
if !config.enabled {
- return Ok(empty());
+ return Ok(empty_diagnostic_report());
}
let line_index = snap.file_line_index(file_id)?;
let supports_related = snap.config.text_document_diagnostic_related_document_support();
@@ -546,7 +550,7 @@ pub(crate) fn handle_document_symbol(
params: lsp_types::DocumentSymbolParams,
) -> anyhow::Result<Option<lsp_types::DocumentSymbolResponse>> {
let _p = tracing::info_span!("handle_document_symbol").entered();
- let file_id = from_proto::file_id(&snap, &params.text_document.uri)?;
+ let file_id = try_default!(from_proto::file_id(&snap, &params.text_document.uri)?);
let line_index = snap.file_line_index(file_id)?;
let mut parents: Vec<(lsp_types::DocumentSymbol, Option<usize>)> = Vec::new();
@@ -760,7 +764,7 @@ pub(crate) fn handle_will_rename_files(
}
})
.filter_map(|(file_id, new_name)| {
- snap.analysis.will_rename_file(file_id, &new_name).ok()?
+ snap.analysis.will_rename_file(file_id?, &new_name).ok()?
})
.collect();
@@ -782,7 +786,8 @@ pub(crate) fn handle_goto_definition(
params: lsp_types::GotoDefinitionParams,
) -> anyhow::Result<Option<lsp_types::GotoDefinitionResponse>> {
let _p = tracing::info_span!("handle_goto_definition").entered();
- let position = from_proto::file_position(&snap, params.text_document_position_params)?;
+ let position =
+ try_default!(from_proto::file_position(&snap, params.text_document_position_params)?);
let nav_info = match snap.analysis.goto_definition(position)? {
None => return Ok(None),
Some(it) => it,
@@ -797,7 +802,10 @@ pub(crate) fn handle_goto_declaration(
params: lsp_types::request::GotoDeclarationParams,
) -> anyhow::Result<Option<lsp_types::request::GotoDeclarationResponse>> {
let _p = tracing::info_span!("handle_goto_declaration").entered();
- let position = from_proto::file_position(&snap, params.text_document_position_params.clone())?;
+ let position = try_default!(from_proto::file_position(
+ &snap,
+ params.text_document_position_params.clone()
+ )?);
let nav_info = match snap.analysis.goto_declaration(position)? {
None => return handle_goto_definition(snap, params),
Some(it) => it,
@@ -812,7 +820,8 @@ pub(crate) fn handle_goto_implementation(
params: lsp_types::request::GotoImplementationParams,
) -> anyhow::Result<Option<lsp_types::request::GotoImplementationResponse>> {
let _p = tracing::info_span!("handle_goto_implementation").entered();
- let position = from_proto::file_position(&snap, params.text_document_position_params)?;
+ let position =
+ try_default!(from_proto::file_position(&snap, params.text_document_position_params)?);
let nav_info = match snap.analysis.goto_implementation(position)? {
None => return Ok(None),
Some(it) => it,
@@ -827,7 +836,8 @@ pub(crate) fn handle_goto_type_definition(
params: lsp_types::request::GotoTypeDefinitionParams,
) -> anyhow::Result<Option<lsp_types::request::GotoTypeDefinitionResponse>> {
let _p = tracing::info_span!("handle_goto_type_definition").entered();
- let position = from_proto::file_position(&snap, params.text_document_position_params)?;
+ let position =
+ try_default!(from_proto::file_position(&snap, params.text_document_position_params)?);
let nav_info = match snap.analysis.goto_type_definition(position)? {
None => return Ok(None),
Some(it) => it,
@@ -880,7 +890,7 @@ pub(crate) fn handle_parent_module(
}
// check if invoked at the crate root
- let file_id = from_proto::file_id(&snap, &params.text_document.uri)?;
+ let file_id = try_default!(from_proto::file_id(&snap, &params.text_document.uri)?);
let crate_id = match snap.analysis.crates_for(file_id)?.first() {
Some(&crate_id) => crate_id,
None => return Ok(None),
@@ -904,7 +914,7 @@ pub(crate) fn handle_parent_module(
}
// locate parent module by semantics
- let position = from_proto::file_position(&snap, params)?;
+ let position = try_default!(from_proto::file_position(&snap, params)?);
let navs = snap.analysis.parent_module(position)?;
let res = to_proto::goto_definition_response(&snap, None, navs)?;
Ok(Some(res))
@@ -915,7 +925,7 @@ pub(crate) fn handle_runnables(
params: lsp_ext::RunnablesParams,
) -> anyhow::Result<Vec<lsp_ext::Runnable>> {
let _p = tracing::info_span!("handle_runnables").entered();
- let file_id = from_proto::file_id(&snap, &params.text_document.uri)?;
+ let file_id = try_default!(from_proto::file_id(&snap, &params.text_document.uri)?);
let source_root = snap.analysis.source_root_id(file_id).ok();
let line_index = snap.file_line_index(file_id)?;
let offset = params.position.and_then(|it| from_proto::offset(&line_index, it).ok());
@@ -1035,7 +1045,7 @@ pub(crate) fn handle_related_tests(
params: lsp_types::TextDocumentPositionParams,
) -> anyhow::Result<Vec<lsp_ext::TestInfo>> {
let _p = tracing::info_span!("handle_related_tests").entered();
- let position = from_proto::file_position(&snap, params)?;
+ let position = try_default!(from_proto::file_position(&snap, params)?);
let tests = snap.analysis.related_tests(position, None)?;
let mut res = Vec::new();
@@ -1053,7 +1063,8 @@ pub(crate) fn handle_completion(
lsp_types::CompletionParams { text_document_position, context,.. }: lsp_types::CompletionParams,
) -> anyhow::Result<Option<lsp_types::CompletionResponse>> {
let _p = tracing::info_span!("handle_completion").entered();
- let mut position = from_proto::file_position(&snap, text_document_position.clone())?;
+ let mut position =
+ try_default!(from_proto::file_position(&snap, text_document_position.clone())?);
let line_index = snap.file_line_index(position.file_id)?;
let completion_trigger_character =
context.and_then(|ctx| ctx.trigger_character).and_then(|s| s.chars().next());
@@ -1102,7 +1113,8 @@ pub(crate) fn handle_completion_resolve(
let resolve_data: lsp_ext::CompletionResolveData = serde_json::from_value(data)?;
- let file_id = from_proto::file_id(&snap, &resolve_data.position.text_document.uri)?;
+ let file_id = from_proto::file_id(&snap, &resolve_data.position.text_document.uri)?
+ .expect("we never provide completions for excluded files");
let line_index = snap.file_line_index(file_id)?;
// FIXME: We should fix up the position when retrying the cancelled request instead
let Ok(offset) = from_proto::offset(&line_index, resolve_data.position.position) else {
@@ -1185,7 +1197,7 @@ pub(crate) fn handle_folding_range(
params: FoldingRangeParams,
) -> anyhow::Result<Option<Vec<FoldingRange>>> {
let _p = tracing::info_span!("handle_folding_range").entered();
- let file_id = from_proto::file_id(&snap, &params.text_document.uri)?;
+ let file_id = try_default!(from_proto::file_id(&snap, &params.text_document.uri)?);
let folds = snap.analysis.folding_ranges(file_id)?;
let text = snap.analysis.file_text(file_id)?;
let line_index = snap.file_line_index(file_id)?;
@@ -1202,7 +1214,8 @@ pub(crate) fn handle_signature_help(
params: lsp_types::SignatureHelpParams,
) -> anyhow::Result<Option<lsp_types::SignatureHelp>> {
let _p = tracing::info_span!("handle_signature_help").entered();
- let position = from_proto::file_position(&snap, params.text_document_position_params)?;
+ let position =
+ try_default!(from_proto::file_position(&snap, params.text_document_position_params)?);
let help = match snap.analysis.signature_help(position)? {
Some(it) => it,
None => return Ok(None),
@@ -1221,7 +1234,7 @@ pub(crate) fn handle_hover(
PositionOrRange::Position(position) => Range::new(position, position),
PositionOrRange::Range(range) => range,
};
- let file_range = from_proto::file_range(&snap, &params.text_document, range)?;
+ let file_range = try_default!(from_proto::file_range(&snap, &params.text_document, range)?);
let hover = snap.config.hover();
let info = match snap.analysis.hover(&hover, file_range)? {
@@ -1255,7 +1268,7 @@ pub(crate) fn handle_prepare_rename(
params: lsp_types::TextDocumentPositionParams,
) -> anyhow::Result<Option<PrepareRenameResponse>> {
let _p = tracing::info_span!("handle_prepare_rename").entered();
- let position = from_proto::file_position(&snap, params)?;
+ let position = try_default!(from_proto::file_position(&snap, params)?);
let change = snap.analysis.prepare_rename(position)?.map_err(to_proto::rename_error)?;
@@ -1269,7 +1282,7 @@ pub(crate) fn handle_rename(
params: RenameParams,
) -> anyhow::Result<Option<WorkspaceEdit>> {
let _p = tracing::info_span!("handle_rename").entered();
- let position = from_proto::file_position(&snap, params.text_document_position)?;
+ let position = try_default!(from_proto::file_position(&snap, params.text_document_position)?);
let mut change =
snap.analysis.rename(position, &params.new_name)?.map_err(to_proto::rename_error)?;
@@ -1304,7 +1317,7 @@ pub(crate) fn handle_references(
params: lsp_types::ReferenceParams,
) -> anyhow::Result<Option<Vec<Location>>> {
let _p = tracing::info_span!("handle_references").entered();
- let position = from_proto::file_position(&snap, params.text_document_position)?;
+ let position = try_default!(from_proto::file_position(&snap, params.text_document_position)?);
let exclude_imports = snap.config.find_all_refs_exclude_imports();
let exclude_tests = snap.config.find_all_refs_exclude_tests();
@@ -1375,9 +1388,9 @@ pub(crate) fn handle_code_action(
return Ok(None);
}
- let file_id = from_proto::file_id(&snap, &params.text_document.uri)?;
+ let file_id = try_default!(from_proto::file_id(&snap, &params.text_document.uri)?);
let line_index = snap.file_line_index(file_id)?;
- let frange = from_proto::file_range(&snap, &params.text_document, params.range)?;
+ let frange = try_default!(from_proto::file_range(&snap, &params.text_document, params.range)?);
let source_root = snap.analysis.source_root_id(file_id)?;
let mut assists_config = snap.config.assist(Some(source_root));
@@ -1455,7 +1468,8 @@ pub(crate) fn handle_code_action_resolve(
return Err(invalid_params_error("code action without data".to_owned()).into());
};
- let file_id = from_proto::file_id(&snap, &params.code_action_params.text_document.uri)?;
+ let file_id = from_proto::file_id(&snap, &params.code_action_params.text_document.uri)?
+ .expect("we never provide code actions for excluded files");
if snap.file_version(file_id) != params.version {
return Err(invalid_params_error("stale code action".to_owned()).into());
}
@@ -1551,7 +1565,7 @@ pub(crate) fn handle_code_lens(
return Ok(Some(Vec::default()));
}
- let file_id = from_proto::file_id(&snap, &params.text_document.uri)?;
+ let file_id = try_default!(from_proto::file_id(&snap, &params.text_document.uri)?);
let target_spec = TargetSpec::for_file(&snap, file_id)?;
let annotations = snap.analysis.annotations(
@@ -1613,7 +1627,8 @@ pub(crate) fn handle_document_highlight(
params: lsp_types::DocumentHighlightParams,
) -> anyhow::Result<Option<Vec<lsp_types::DocumentHighlight>>> {
let _p = tracing::info_span!("handle_document_highlight").entered();
- let position = from_proto::file_position(&snap, params.text_document_position_params)?;
+ let position =
+ try_default!(from_proto::file_position(&snap, params.text_document_position_params)?);
let line_index = snap.file_line_index(position.file_id)?;
let source_root = snap.analysis.source_root_id(position.file_id)?;
@@ -1639,12 +1654,12 @@ pub(crate) fn handle_ssr(
params: lsp_ext::SsrParams,
) -> anyhow::Result<lsp_types::WorkspaceEdit> {
let _p = tracing::info_span!("handle_ssr").entered();
- let selections = params
+ let selections = try_default!(params
.selections
.iter()
.map(|range| from_proto::file_range(&snap, &params.position.text_document, *range))
- .collect::<Result<Vec<_>, _>>()?;
- let position = from_proto::file_position(&snap, params.position)?;
+ .collect::<Result<Option<Vec<_>>, _>>()?);
+ let position = try_default!(from_proto::file_position(&snap, params.position)?);
let source_change = snap.analysis.structural_search_replace(
&params.query,
params.parse_only,
@@ -1660,11 +1675,11 @@ pub(crate) fn handle_inlay_hints(
) -> anyhow::Result<Option<Vec<InlayHint>>> {
let _p = tracing::info_span!("handle_inlay_hints").entered();
let document_uri = &params.text_document.uri;
- let FileRange { file_id, range } = from_proto::file_range(
+ let FileRange { file_id, range } = try_default!(from_proto::file_range(
&snap,
&TextDocumentIdentifier::new(document_uri.to_owned()),
params.range,
- )?;
+ )?);
let line_index = snap.file_line_index(file_id)?;
let range = TextRange::new(
range.start().min(line_index.index.len()),
@@ -1744,7 +1759,8 @@ pub(crate) fn handle_call_hierarchy_prepare(
params: CallHierarchyPrepareParams,
) -> anyhow::Result<Option<Vec<CallHierarchyItem>>> {
let _p = tracing::info_span!("handle_call_hierarchy_prepare").entered();
- let position = from_proto::file_position(&snap, params.text_document_position_params)?;
+ let position =
+ try_default!(from_proto::file_position(&snap, params.text_document_position_params)?);
let nav_info = match snap.analysis.call_hierarchy(position)? {
None => return Ok(None),
@@ -1769,7 +1785,7 @@ pub(crate) fn handle_call_hierarchy_incoming(
let item = params.item;
let doc = TextDocumentIdentifier::new(item.uri);
- let frange = from_proto::file_range(&snap, &doc, item.selection_range)?;
+ let frange = try_default!(from_proto::file_range(&snap, &doc, item.selection_range)?);
let fpos = FilePosition { file_id: frange.file_id, offset: frange.range.start() };
let config = snap.config.call_hierarchy();
@@ -1807,7 +1823,7 @@ pub(crate) fn handle_call_hierarchy_outgoing(
let item = params.item;
let doc = TextDocumentIdentifier::new(item.uri);
- let frange = from_proto::file_range(&snap, &doc, item.selection_range)?;
+ let frange = try_default!(from_proto::file_range(&snap, &doc, item.selection_range)?);
let fpos = FilePosition { file_id: frange.file_id, offset: frange.range.start() };
let line_index = snap.file_line_index(fpos.file_id)?;
@@ -1842,7 +1858,7 @@ pub(crate) fn handle_semantic_tokens_full(
) -> anyhow::Result<Option<SemanticTokensResult>> {
let _p = tracing::info_span!("handle_semantic_tokens_full").entered();
- let file_id = from_proto::file_id(&snap, &params.text_document.uri)?;
+ let file_id = try_default!(from_proto::file_id(&snap, &params.text_document.uri)?);
let text = snap.analysis.file_text(file_id)?;
let line_index = snap.file_line_index(file_id)?;
@@ -1872,7 +1888,7 @@ pub(crate) fn handle_semantic_tokens_full_delta(
) -> anyhow::Result<Option<SemanticTokensFullDeltaResult>> {
let _p = tracing::info_span!("handle_semantic_tokens_full_delta").entered();
- let file_id = from_proto::file_id(&snap, &params.text_document.uri)?;
+ let file_id = try_default!(from_proto::file_id(&snap, &params.text_document.uri)?);
let text = snap.analysis.file_text(file_id)?;
let line_index = snap.file_line_index(file_id)?;
@@ -1915,7 +1931,7 @@ pub(crate) fn handle_semantic_tokens_range(
) -> anyhow::Result<Option<SemanticTokensRangeResult>> {
let _p = tracing::info_span!("handle_semantic_tokens_range").entered();
- let frange = from_proto::file_range(&snap, &params.text_document, params.range)?;
+ let frange = try_default!(from_proto::file_range(&snap, &params.text_document, params.range)?);
let text = snap.analysis.file_text(frange.file_id)?;
let line_index = snap.file_line_index(frange.file_id)?;
@@ -1940,7 +1956,7 @@ pub(crate) fn handle_open_docs(
params: lsp_types::TextDocumentPositionParams,
) -> anyhow::Result<ExternalDocsResponse> {
let _p = tracing::info_span!("handle_open_docs").entered();
- let position = from_proto::file_position(&snap, params)?;
+ let position = try_default!(from_proto::file_position(&snap, params)?);
let ws_and_sysroot = snap.workspaces.iter().find_map(|ws| match &ws.kind {
ProjectWorkspaceKind::Cargo { cargo, .. }
@@ -1982,7 +1998,7 @@ pub(crate) fn handle_open_cargo_toml(
params: lsp_ext::OpenCargoTomlParams,
) -> anyhow::Result<Option<lsp_types::GotoDefinitionResponse>> {
let _p = tracing::info_span!("handle_open_cargo_toml").entered();
- let file_id = from_proto::file_id(&snap, &params.text_document.uri)?;
+ let file_id = try_default!(from_proto::file_id(&snap, &params.text_document.uri)?);
let cargo_spec = match TargetSpec::for_file(&snap, file_id)? {
Some(TargetSpec::Cargo(it)) => it,
@@ -2000,8 +2016,8 @@ pub(crate) fn handle_move_item(
params: lsp_ext::MoveItemParams,
) -> anyhow::Result<Vec<lsp_ext::SnippetTextEdit>> {
let _p = tracing::info_span!("handle_move_item").entered();
- let file_id = from_proto::file_id(&snap, &params.text_document.uri)?;
- let range = from_proto::file_range(&snap, &params.text_document, params.range)?;
+ let file_id = try_default!(from_proto::file_id(&snap, &params.text_document.uri)?);
+ let range = try_default!(from_proto::file_range(&snap, &params.text_document, params.range)?);
let direction = match params.direction {
lsp_ext::MoveItemDirection::Up => ide::Direction::Up,
@@ -2022,7 +2038,7 @@ pub(crate) fn handle_view_recursive_memory_layout(
params: lsp_types::TextDocumentPositionParams,
) -> anyhow::Result<Option<lsp_ext::RecursiveMemoryLayout>> {
let _p = tracing::info_span!("handle_view_recursive_memory_layout").entered();
- let file_id = from_proto::file_id(&snap, &params.text_document.uri)?;
+ let file_id = try_default!(from_proto::file_id(&snap, &params.text_document.uri)?);
let line_index = snap.file_line_index(file_id)?;
let offset = from_proto::offset(&line_index, params.position)?;
@@ -2210,7 +2226,7 @@ fn run_rustfmt(
text_document: TextDocumentIdentifier,
range: Option<lsp_types::Range>,
) -> anyhow::Result<Option<Vec<lsp_types::TextEdit>>> {
- let file_id = from_proto::file_id(snap, &text_document.uri)?;
+ let file_id = try_default!(from_proto::file_id(snap, &text_document.uri)?);
let file = snap.analysis.file_text(file_id)?;
// Determine the edition of the crate the file belongs to (if there's multiple, we pick the
@@ -2275,7 +2291,7 @@ fn run_rustfmt(
.into());
}
- let frange = from_proto::file_range(snap, &text_document, range)?;
+ let frange = try_default!(from_proto::file_range(snap, &text_document, range)?);
let start_line = line_index.index.line_col(frange.range.start()).line;
let end_line = line_index.index.line_col(frange.range.end()).line;
@@ -2284,7 +2300,8 @@ fn run_rustfmt(
cmd.arg(
json!([{
"file": "stdin",
- "range": [start_line, end_line]
+ // LineCol is 0-based, but rustfmt is 1-based.
+ "range": [start_line + 1, end_line + 1]
}])
.to_string(),
);
@@ -2318,18 +2335,21 @@ fn run_rustfmt(
}
};
- tracing::debug!(?command, "created format command");
+ let output = {
+ let _p = tracing::info_span!("rustfmt", ?command).entered();
- let mut rustfmt = command
- .stdin(Stdio::piped())
- .stdout(Stdio::piped())
- .stderr(Stdio::piped())
- .spawn()
- .context(format!("Failed to spawn {command:?}"))?;
+ let mut rustfmt = command
+ .stdin(Stdio::piped())
+ .stdout(Stdio::piped())
+ .stderr(Stdio::piped())
+ .spawn()
+ .context(format!("Failed to spawn {command:?}"))?;
- rustfmt.stdin.as_mut().unwrap().write_all(file.as_bytes())?;
+ rustfmt.stdin.as_mut().unwrap().write_all(file.as_bytes())?;
+
+ rustfmt.wait_with_output()?
+ };
- let output = rustfmt.wait_with_output()?;
let captured_stdout = String::from_utf8(output.stdout)?;
let captured_stderr = String::from_utf8(output.stderr).unwrap_or_default();
@@ -2413,15 +2433,15 @@ pub(crate) fn internal_testing_fetch_config(
state: GlobalStateSnapshot,
params: InternalTestingFetchConfigParams,
) -> anyhow::Result<Option<InternalTestingFetchConfigResponse>> {
- let source_root = params
- .text_document
- .map(|it| {
+ let source_root = match params.text_document {
+ Some(it) => Some(
state
.analysis
- .source_root_id(from_proto::file_id(&state, &it.uri)?)
- .map_err(anyhow::Error::from)
- })
- .transpose()?;
+ .source_root_id(try_default!(from_proto::file_id(&state, &it.uri)?))
+ .map_err(anyhow::Error::from)?,
+ ),
+ None => None,
+ };
Ok(Some(match params.config {
InternalTestingFetchConfigOption::AssistEmitMustUse => {
InternalTestingFetchConfigResponse::AssistEmitMustUse(
diff --git a/crates/rust-analyzer/src/integrated_benchmarks.rs b/crates/rust-analyzer/src/integrated_benchmarks.rs
index 5cdc51a1c1..c6aa8ba170 100644
--- a/crates/rust-analyzer/src/integrated_benchmarks.rs
+++ b/crates/rust-analyzer/src/integrated_benchmarks.rs
@@ -25,6 +25,14 @@ use vfs::{AbsPathBuf, VfsPath};
use load_cargo::{load_workspace_at, LoadCargoConfig, ProcMacroServerChoice};
+#[track_caller]
+fn file_id(vfs: &vfs::Vfs, path: &VfsPath) -> vfs::FileId {
+ match vfs.file_id(path) {
+ Some((file_id, vfs::FileExcluded::No)) => file_id,
+ None | Some((_, vfs::FileExcluded::Yes)) => panic!("can't find virtual file for {path}"),
+ }
+}
+
#[test]
fn integrated_highlighting_benchmark() {
if std::env::var("RUN_SLOW_BENCHES").is_err() {
@@ -62,7 +70,7 @@ fn integrated_highlighting_benchmark() {
let file_id = {
let file = workspace_to_load.join(file);
let path = VfsPath::from(AbsPathBuf::assert(file));
- vfs.file_id(&path).unwrap_or_else(|| panic!("can't find virtual file for {path}"))
+ file_id(&vfs, &path)
};
{
@@ -130,7 +138,7 @@ fn integrated_completion_benchmark() {
let file_id = {
let file = workspace_to_load.join(file);
let path = VfsPath::from(AbsPathBuf::assert(file));
- vfs.file_id(&path).unwrap_or_else(|| panic!("can't find virtual file for {path}"))
+ file_id(&vfs, &path)
};
// kick off parsing and index population
@@ -324,7 +332,7 @@ fn integrated_diagnostics_benchmark() {
let file_id = {
let file = workspace_to_load.join(file);
let path = VfsPath::from(AbsPathBuf::assert(file));
- vfs.file_id(&path).unwrap_or_else(|| panic!("can't find virtual file for {path}"))
+ file_id(&vfs, &path)
};
let diagnostics_config = DiagnosticsConfig {
diff --git a/crates/rust-analyzer/src/lib.rs b/crates/rust-analyzer/src/lib.rs
index ccffa7a671..27d6225cdb 100644
--- a/crates/rust-analyzer/src/lib.rs
+++ b/crates/rust-analyzer/src/lib.rs
@@ -50,7 +50,7 @@ mod integrated_benchmarks;
use hir::Mutability;
use ide::{CompletionItem, CompletionItemRefMode, CompletionRelevance};
use serde::de::DeserializeOwned;
-use tenthash::TentHasher;
+use tenthash::TentHash;
pub use crate::{
lsp::capabilities::server_capabilities, main_loop::main_loop, reload::ws_to_crate_graph,
@@ -66,7 +66,7 @@ pub fn from_json<T: DeserializeOwned>(
}
fn completion_item_hash(item: &CompletionItem, is_ref_completion: bool) -> [u8; 20] {
- fn hash_completion_relevance(hasher: &mut TentHasher, relevance: &CompletionRelevance) {
+ fn hash_completion_relevance(hasher: &mut TentHash, relevance: &CompletionRelevance) {
use ide_completion::{
CompletionRelevancePostfixMatch, CompletionRelevanceReturnType,
CompletionRelevanceTypeMatch,
@@ -79,71 +79,108 @@ fn completion_item_hash(item: &CompletionItem, is_ref_completion: bool) -> [u8;
u8::from(relevance.requires_import),
u8::from(relevance.is_private_editable),
]);
- if let Some(type_match) = &relevance.type_match {
- let label = match type_match {
- CompletionRelevanceTypeMatch::CouldUnify => "could_unify",
- CompletionRelevanceTypeMatch::Exact => "exact",
- };
- hasher.update(label);
+
+ match relevance.type_match {
+ None => hasher.update([0u8]),
+ Some(CompletionRelevanceTypeMatch::CouldUnify) => hasher.update([1u8]),
+ Some(CompletionRelevanceTypeMatch::Exact) => hasher.update([2u8]),
}
+
+ hasher.update([u8::from(relevance.trait_.is_some())]);
if let Some(trait_) = &relevance.trait_ {
hasher.update([u8::from(trait_.is_op_method), u8::from(trait_.notable_trait)]);
}
- if let Some(postfix_match) = &relevance.postfix_match {
- let label = match postfix_match {
- CompletionRelevancePostfixMatch::NonExact => "non_exact",
- CompletionRelevancePostfixMatch::Exact => "exact",
- };
- hasher.update(label);
+
+ match relevance.postfix_match {
+ None => hasher.update([0u8]),
+ Some(CompletionRelevancePostfixMatch::NonExact) => hasher.update([1u8]),
+ Some(CompletionRelevancePostfixMatch::Exact) => hasher.update([2u8]),
}
+
+ hasher.update([u8::from(relevance.function.is_some())]);
if let Some(function) = &relevance.function {
hasher.update([u8::from(function.has_params), u8::from(function.has_self_param)]);
- let label = match function.return_type {
- CompletionRelevanceReturnType::Other => "other",
- CompletionRelevanceReturnType::DirectConstructor => "direct_constructor",
- CompletionRelevanceReturnType::Constructor => "constructor",
- CompletionRelevanceReturnType::Builder => "builder",
+ let discriminant: u8 = match function.return_type {
+ CompletionRelevanceReturnType::Other => 0,
+ CompletionRelevanceReturnType::DirectConstructor => 1,
+ CompletionRelevanceReturnType::Constructor => 2,
+ CompletionRelevanceReturnType::Builder => 3,
};
- hasher.update(label);
+ hasher.update([discriminant]);
}
}
- let mut hasher = TentHasher::new();
+ let mut hasher = TentHash::new();
hasher.update([
u8::from(is_ref_completion),
u8::from(item.is_snippet),
u8::from(item.deprecated),
u8::from(item.trigger_call_info),
]);
+
+ hasher.update(item.label.primary.len().to_ne_bytes());
hasher.update(&item.label.primary);
+
+ hasher.update([u8::from(item.label.detail_left.is_some())]);
if let Some(label_detail) = &item.label.detail_left {
+ hasher.update(label_detail.len().to_ne_bytes());
hasher.update(label_detail);
}
+
+ hasher.update([u8::from(item.label.detail_right.is_some())]);
if let Some(label_detail) = &item.label.detail_right {
+ hasher.update(label_detail.len().to_ne_bytes());
hasher.update(label_detail);
}
+
// NB: do not hash edits or source range, as those may change between the time the client sends the resolve request
// and the time it receives it: some editors do allow changing the buffer between that, leading to ranges being different.
//
// Documentation hashing is skipped too, as it's a large blob to process,
// while not really making completion properties more unique as they are already.
- hasher.update(item.kind.tag());
+
+ let kind_tag = item.kind.tag();
+ hasher.update(kind_tag.len().to_ne_bytes());
+ hasher.update(kind_tag);
+
+ hasher.update(item.lookup.len().to_ne_bytes());
hasher.update(&item.lookup);
+
+ hasher.update([u8::from(item.detail.is_some())]);
if let Some(detail) = &item.detail {
+ hasher.update(detail.len().to_ne_bytes());
hasher.update(detail);
}
+
hash_completion_relevance(&mut hasher, &item.relevance);
+
+ hasher.update([u8::from(item.ref_match.is_some())]);
if let Some((ref_mode, text_size)) = &item.ref_match {
- let prefix = match ref_mode {
- CompletionItemRefMode::Reference(Mutability::Shared) => "&",
- CompletionItemRefMode::Reference(Mutability::Mut) => "&mut ",
- CompletionItemRefMode::Dereference => "*",
+ let discriminant = match ref_mode {
+ CompletionItemRefMode::Reference(Mutability::Shared) => 0u8,
+ CompletionItemRefMode::Reference(Mutability::Mut) => 1u8,
+ CompletionItemRefMode::Dereference => 2u8,
};
- hasher.update(prefix);
- hasher.update(u32::from(*text_size).to_le_bytes());
+ hasher.update([discriminant]);
+ hasher.update(u32::from(*text_size).to_ne_bytes());
}
+
+ hasher.update(item.import_to_add.len().to_ne_bytes());
for import_path in &item.import_to_add {
+ hasher.update(import_path.len().to_ne_bytes());
hasher.update(import_path);
}
+
hasher.finalize()
}
+
+#[doc(hidden)]
+macro_rules! try_default_ {
+ ($it:expr $(,)?) => {
+ match $it {
+ Some(it) => it,
+ None => return Ok(Default::default()),
+ }
+ };
+}
+pub(crate) use try_default_ as try_default;
diff --git a/crates/rust-analyzer/src/lsp/from_proto.rs b/crates/rust-analyzer/src/lsp/from_proto.rs
index 47e9961cf1..6375a1a054 100644
--- a/crates/rust-analyzer/src/lsp/from_proto.rs
+++ b/crates/rust-analyzer/src/lsp/from_proto.rs
@@ -9,7 +9,7 @@ use vfs::AbsPathBuf;
use crate::{
global_state::GlobalStateSnapshot,
line_index::{LineIndex, PositionEncoding},
- lsp_ext,
+ lsp_ext, try_default,
};
pub(crate) fn abs_path(url: &lsp_types::Url) -> anyhow::Result<AbsPathBuf> {
@@ -61,37 +61,44 @@ pub(crate) fn text_range(
}
}
-pub(crate) fn file_id(snap: &GlobalStateSnapshot, url: &lsp_types::Url) -> anyhow::Result<FileId> {
+/// Returns `None` if the file was excluded.
+pub(crate) fn file_id(
+ snap: &GlobalStateSnapshot,
+ url: &lsp_types::Url,
+) -> anyhow::Result<Option<FileId>> {
snap.url_to_file_id(url)
}
+/// Returns `None` if the file was excluded.
pub(crate) fn file_position(
snap: &GlobalStateSnapshot,
tdpp: lsp_types::TextDocumentPositionParams,
-) -> anyhow::Result<FilePosition> {
- let file_id = file_id(snap, &tdpp.text_document.uri)?;
+) -> anyhow::Result<Option<FilePosition>> {
+ let file_id = try_default!(file_id(snap, &tdpp.text_document.uri)?);
let line_index = snap.file_line_index(file_id)?;
let offset = offset(&line_index, tdpp.position)?;
- Ok(FilePosition { file_id, offset })
+ Ok(Some(FilePosition { file_id, offset }))
}
+/// Returns `None` if the file was excluded.
pub(crate) fn file_range(
snap: &GlobalStateSnapshot,
text_document_identifier: &lsp_types::TextDocumentIdentifier,
range: lsp_types::Range,
-) -> anyhow::Result<FileRange> {
+) -> anyhow::Result<Option<FileRange>> {
file_range_uri(snap, &text_document_identifier.uri, range)
}
+/// Returns `None` if the file was excluded.
pub(crate) fn file_range_uri(
snap: &GlobalStateSnapshot,
document: &lsp_types::Url,
range: lsp_types::Range,
-) -> anyhow::Result<FileRange> {
- let file_id = file_id(snap, document)?;
+) -> anyhow::Result<Option<FileRange>> {
+ let file_id = try_default!(file_id(snap, document)?);
let line_index = snap.file_line_index(file_id)?;
let range = text_range(&line_index, range)?;
- Ok(FileRange { file_id, range })
+ Ok(Some(FileRange { file_id, range }))
}
pub(crate) fn assist_kind(kind: lsp_types::CodeActionKind) -> Option<AssistKind> {
@@ -108,6 +115,7 @@ pub(crate) fn assist_kind(kind: lsp_types::CodeActionKind) -> Option<AssistKind>
Some(assist_kind)
}
+/// Returns `None` if the file was excluded.
pub(crate) fn annotation(
snap: &GlobalStateSnapshot,
range: lsp_types::Range,
@@ -121,7 +129,7 @@ pub(crate) fn annotation(
return Ok(None);
}
let pos @ FilePosition { file_id, .. } =
- file_position(snap, params.text_document_position_params)?;
+ try_default!(file_position(snap, params.text_document_position_params)?);
let line_index = snap.file_line_index(file_id)?;
Ok(Annotation {
@@ -133,7 +141,7 @@ pub(crate) fn annotation(
if snap.url_file_version(&params.text_document.uri) != Some(data.version) {
return Ok(None);
}
- let pos @ FilePosition { file_id, .. } = file_position(snap, params)?;
+ let pos @ FilePosition { file_id, .. } = try_default!(file_position(snap, params)?);
let line_index = snap.file_line_index(file_id)?;
Ok(Annotation {
diff --git a/crates/rust-analyzer/src/lsp/semantic_tokens.rs b/crates/rust-analyzer/src/lsp/semantic_tokens.rs
index 991c10743f..3c21e19925 100644
--- a/crates/rust-analyzer/src/lsp/semantic_tokens.rs
+++ b/crates/rust-analyzer/src/lsp/semantic_tokens.rs
@@ -24,7 +24,7 @@ macro_rules! define_semantic_token_types {
}
pub(crate) const SUPPORTED_TYPES: &[SemanticTokenType] = &[
- $(SemanticTokenType::$standard,)*
+ $(self::types::$standard,)*
$(self::types::$custom),*
];
@@ -32,7 +32,7 @@ macro_rules! define_semantic_token_types {
use self::types::*;
$(
if token == $custom {
- None $(.or(Some(SemanticTokenType::$fallback)))?
+ None $(.or(Some(self::types::$fallback)))?
} else
)*
{ Some(token )}
@@ -60,6 +60,7 @@ define_semantic_token_types![
STRUCT,
TYPE_PARAMETER,
VARIABLE,
+ TYPE,
}
custom {
diff --git a/crates/rust-analyzer/src/main_loop.rs b/crates/rust-analyzer/src/main_loop.rs
index d6dc8b521f..f5d9469f26 100644
--- a/crates/rust-analyzer/src/main_loop.rs
+++ b/crates/rust-analyzer/src/main_loop.rs
@@ -27,7 +27,10 @@ use crate::{
FetchWorkspaceResponse, GlobalState,
},
hack_recover_crate_name,
- handlers::dispatch::{NotificationDispatcher, RequestDispatcher},
+ handlers::{
+ dispatch::{NotificationDispatcher, RequestDispatcher},
+ request::empty_diagnostic_report,
+ },
lsp::{
from_proto, to_proto,
utils::{notification_is, Progress},
@@ -253,6 +256,11 @@ impl GlobalState {
&self,
inbox: &Receiver<lsp_server::Message>,
) -> Result<Option<Event>, crossbeam_channel::RecvError> {
+ // Make sure we reply to formatting requests ASAP so the editor doesn't block
+ if let Ok(task) = self.fmt_pool.receiver.try_recv() {
+ return Ok(Some(Event::Task(task)));
+ }
+
select! {
recv(inbox) -> msg =>
return Ok(msg.ok().map(Event::Lsp)),
@@ -320,26 +328,30 @@ impl GlobalState {
}
for progress in prime_caches_progress {
- let (state, message, fraction);
+ let (state, message, fraction, title);
match progress {
PrimeCachesProgress::Begin => {
state = Progress::Begin;
message = None;
fraction = 0.0;
+ title = "Indexing";
}
PrimeCachesProgress::Report(report) => {
state = Progress::Report;
+ title = report.work_type;
- message = match &report.crates_currently_indexing[..] {
+ message = match &*report.crates_currently_indexing {
[crate_name] => Some(format!(
- "{}/{} ({crate_name})",
- report.crates_done, report.crates_total
+ "{}/{} ({})",
+ report.crates_done,
+ report.crates_total,
+ crate_name.as_str(),
)),
[crate_name, rest @ ..] => Some(format!(
"{}/{} ({} + {} more)",
report.crates_done,
report.crates_total,
- crate_name,
+ crate_name.as_str(),
rest.len()
)),
_ => None,
@@ -351,6 +363,7 @@ impl GlobalState {
state = Progress::End;
message = None;
fraction = 1.0;
+ title = "Indexing";
self.prime_caches_queue.op_completed(());
if cancelled {
@@ -360,7 +373,13 @@ impl GlobalState {
}
};
- self.report_progress("Indexing", state, message, Some(fraction), None);
+ self.report_progress(
+ title,
+ state,
+ message,
+ Some(fraction),
+ Some("rustAnalyzer/cachePriming".to_owned()),
+ );
}
}
Event::Vfs(message) => {
@@ -532,6 +551,9 @@ impl GlobalState {
self.mem_docs
.iter()
.map(|path| vfs.file_id(path).unwrap())
+ .filter_map(|(file_id, excluded)| {
+ (excluded == vfs::FileExcluded::No).then_some(file_id)
+ })
.filter(|&file_id| {
let source_root = db.file_source_root(file_id);
// Only publish diagnostics for files in the workspace, not from crates.io deps
@@ -616,6 +638,9 @@ impl GlobalState {
.mem_docs
.iter()
.map(|path| self.vfs.read().0.file_id(path).unwrap())
+ .filter_map(|(file_id, excluded)| {
+ (excluded == vfs::FileExcluded::No).then_some(file_id)
+ })
.filter(|&file_id| {
let source_root = db.file_source_root(file_id);
!db.source_root(source_root).is_library
@@ -863,7 +888,10 @@ impl GlobalState {
self.task_pool.handle.spawn_with_sender(ThreadIntent::Worker, move |sender| {
let _p = tracing::info_span!("GlobalState::check_if_indexed").entered();
tracing::debug!(?uri, "handling uri");
- let id = from_proto::file_id(&snap, &uri).expect("unable to get FileId");
+ let Some(id) = from_proto::file_id(&snap, &uri).expect("unable to get FileId")
+ else {
+ return;
+ };
if let Ok(crates) = &snap.analysis.crates_for(id) {
if crates.is_empty() {
if snap.config.discover_workspace_config().is_some() {
@@ -971,13 +999,14 @@ impl GlobalState {
);
for diag in diagnostics {
match url_to_file_id(&self.vfs.read().0, &diag.url) {
- Ok(file_id) => self.diagnostics.add_check_diagnostic(
+ Ok(Some(file_id)) => self.diagnostics.add_check_diagnostic(
id,
&package_id,
file_id,
diag.diagnostic,
diag.fix,
),
+ Ok(None) => {}
Err(err) => {
error!(
"flycheck {id}: File with cargo diagnostic not found in VFS: {}",
@@ -1099,17 +1128,7 @@ impl GlobalState {
.on_latency_sensitive::<NO_RETRY, lsp_request::SemanticTokensRangeRequest>(handlers::handle_semantic_tokens_range)
// FIXME: Some of these NO_RETRY could be retries if the file they are interested didn't change.
// All other request handlers
- .on_with_vfs_default::<lsp_request::DocumentDiagnosticRequest>(handlers::handle_document_diagnostics, || lsp_types::DocumentDiagnosticReportResult::Report(
- lsp_types::DocumentDiagnosticReport::Full(
- lsp_types::RelatedFullDocumentDiagnosticReport {
- related_documents: None,
- full_document_diagnostic_report: lsp_types::FullDocumentDiagnosticReport {
- result_id: Some("rust-analyzer".to_owned()),
- items: vec![],
- },
- },
- ),
- ), || lsp_server::ResponseError {
+ .on_with_vfs_default::<lsp_request::DocumentDiagnosticRequest>(handlers::handle_document_diagnostics, empty_diagnostic_report, || lsp_server::ResponseError {
code: lsp_server::ErrorCode::ServerCancelled as i32,
message: "server cancelled the request".to_owned(),
data: serde_json::to_value(lsp_types::DiagnosticServerCancellationData {
diff --git a/crates/rust-analyzer/src/reload.rs b/crates/rust-analyzer/src/reload.rs
index 0add2cdf5a..56dcad0eb1 100644
--- a/crates/rust-analyzer/src/reload.rs
+++ b/crates/rust-analyzer/src/reload.rs
@@ -316,6 +316,7 @@ impl GlobalState {
let workspace = project_model::ProjectWorkspace::load_inline(
it.clone(),
&cargo_config,
+ &progress,
);
Ok(workspace)
}
@@ -701,12 +702,13 @@ impl GlobalState {
let (crate_graph, proc_macro_paths, ws_data) = {
// Create crate graph from all the workspaces
- let vfs = &mut self.vfs.write().0;
-
+ let vfs = &self.vfs.read().0;
let load = |path: &AbsPath| {
let vfs_path = vfs::VfsPath::from(path.to_path_buf());
self.crate_graph_file_dependencies.insert(vfs_path.clone());
- vfs.file_id(&vfs_path)
+ vfs.file_id(&vfs_path).and_then(|(file_id, excluded)| {
+ (excluded == vfs::FileExcluded::No).then_some(file_id)
+ })
};
ws_to_crate_graph(&self.workspaces, self.config.extra_env(None), load)
@@ -883,7 +885,6 @@ pub fn ws_to_crate_graph(
ws_data.extend(mapping.values().copied().zip(iter::repeat(Arc::new(CrateWorkspaceData {
toolchain: toolchain.clone(),
data_layout: target_layout.clone(),
- proc_macro_cwd: Some(ws.workspace_root().to_owned()),
}))));
proc_macro_paths.push(crate_proc_macros);
}
diff --git a/crates/rust-analyzer/src/task_pool.rs b/crates/rust-analyzer/src/task_pool.rs
index 2bcd8505e8..c5de69bb9f 100644
--- a/crates/rust-analyzer/src/task_pool.rs
+++ b/crates/rust-analyzer/src/task_pool.rs
@@ -1,6 +1,8 @@
//! A thin wrapper around [`stdx::thread::Pool`] which threads a sender through spawned jobs.
//! It is used in [`crate::global_state::GlobalState`] throughout the main loop.
+use std::panic::UnwindSafe;
+
use crossbeam_channel::Sender;
use stdx::thread::{Pool, ThreadIntent};
@@ -18,7 +20,7 @@ impl<T> TaskPool<T> {
pub(crate) fn spawn<F>(&mut self, intent: ThreadIntent, task: F)
where
- F: FnOnce() -> T + Send + 'static,
+ F: FnOnce() -> T + Send + UnwindSafe + 'static,
T: Send + 'static,
{
self.pool.spawn(intent, {
@@ -29,7 +31,7 @@ impl<T> TaskPool<T> {
pub(crate) fn spawn_with_sender<F>(&mut self, intent: ThreadIntent, task: F)
where
- F: FnOnce(Sender<T>) + Send + 'static,
+ F: FnOnce(Sender<T>) + Send + UnwindSafe + 'static,
T: Send + 'static,
{
self.pool.spawn(intent, {
diff --git a/crates/rust-analyzer/tests/slow-tests/cli.rs b/crates/rust-analyzer/tests/slow-tests/cli.rs
index fba5466691..4ef930e985 100644
--- a/crates/rust-analyzer/tests/slow-tests/cli.rs
+++ b/crates/rust-analyzer/tests/slow-tests/cli.rs
@@ -43,89 +43,93 @@ mod tests {
expect![[r#"
{"id":2,"type":"vertex","label":"foldingRangeResult","result":[{"startLine":2,"startCharacter":43,"endLine":6,"endCharacter":1},{"startLine":3,"startCharacter":19,"endLine":5,"endCharacter":5},{"startLine":9,"startCharacter":10,"endLine":12,"endCharacter":1}]}
{"id":3,"type":"edge","label":"textDocument/foldingRange","inV":2,"outV":1}
- {"id":4,"type":"vertex","label":"range","start":{"line":0,"character":3},"end":{"line":0,"character":8}}
+ {"id":4,"type":"vertex","label":"range","start":{"line":0,"character":0},"end":{"line":13,"character":0}}
{"id":5,"type":"vertex","label":"resultSet"}
{"id":6,"type":"edge","label":"next","inV":5,"outV":4}
- {"id":7,"type":"vertex","label":"range","start":{"line":2,"character":13},"end":{"line":2,"character":43}}
+ {"id":7,"type":"vertex","label":"range","start":{"line":0,"character":3},"end":{"line":0,"character":8}}
{"id":8,"type":"vertex","label":"resultSet"}
{"id":9,"type":"edge","label":"next","inV":8,"outV":7}
- {"id":10,"type":"vertex","label":"range","start":{"line":8,"character":0},"end":{"line":8,"character":30}}
- {"id":11,"type":"edge","label":"next","inV":8,"outV":10}
- {"id":12,"type":"vertex","label":"range","start":{"line":8,"character":32},"end":{"line":8,"character":39}}
- {"id":13,"type":"vertex","label":"resultSet"}
- {"id":14,"type":"edge","label":"next","inV":13,"outV":12}
- {"id":15,"type":"vertex","label":"range","start":{"line":9,"character":4},"end":{"line":9,"character":9}}
+ {"id":10,"type":"vertex","label":"range","start":{"line":2,"character":13},"end":{"line":2,"character":43}}
+ {"id":11,"type":"vertex","label":"resultSet"}
+ {"id":12,"type":"edge","label":"next","inV":11,"outV":10}
+ {"id":13,"type":"vertex","label":"range","start":{"line":8,"character":0},"end":{"line":8,"character":30}}
+ {"id":14,"type":"edge","label":"next","inV":11,"outV":13}
+ {"id":15,"type":"vertex","label":"range","start":{"line":8,"character":32},"end":{"line":8,"character":39}}
{"id":16,"type":"vertex","label":"resultSet"}
{"id":17,"type":"edge","label":"next","inV":16,"outV":15}
- {"id":18,"type":"vertex","label":"range","start":{"line":10,"character":8},"end":{"line":10,"character":13}}
+ {"id":18,"type":"vertex","label":"range","start":{"line":9,"character":4},"end":{"line":9,"character":9}}
{"id":19,"type":"vertex","label":"resultSet"}
{"id":20,"type":"edge","label":"next","inV":19,"outV":18}
- {"id":21,"type":"vertex","label":"range","start":{"line":11,"character":4},"end":{"line":11,"character":34}}
- {"id":22,"type":"edge","label":"next","inV":8,"outV":21}
- {"id":23,"type":"vertex","label":"range","start":{"line":11,"character":36},"end":{"line":11,"character":43}}
- {"id":24,"type":"vertex","label":"resultSet"}
- {"id":25,"type":"edge","label":"next","inV":24,"outV":23}
- {"id":26,"type":"edge","label":"contains","inVs":[4,7,10,12,15,18,21,23],"outV":1}
- {"id":27,"type":"vertex","label":"hoverResult","result":{"contents":{"kind":"markdown","value":"\n```rust\n#[allow]\n```\n\n---\n\nValid forms are:\n\n* \\#\\[allow(lint1, lint2, ..., /\\*opt\\*/ reason = \"...\")\\]"}}}
- {"id":28,"type":"edge","label":"textDocument/hover","inV":27,"outV":5}
- {"id":29,"type":"vertex","label":"referenceResult"}
- {"id":30,"type":"edge","label":"textDocument/references","inV":29,"outV":5}
- {"id":31,"type":"edge","label":"item","document":1,"property":"references","inVs":[4],"outV":29}
- {"id":32,"type":"vertex","label":"hoverResult","result":{"contents":{"kind":"markdown","value":"\n```rust\nfoo\n```\n\n```rust\nmacro_rules! generate_const_from_identifier\n```"}}}
- {"id":33,"type":"edge","label":"textDocument/hover","inV":32,"outV":8}
- {"id":34,"type":"vertex","label":"packageInformation","name":"foo","manager":"cargo","version":"0.0.0"}
- {"id":35,"type":"vertex","label":"moniker","scheme":"rust-analyzer","identifier":"foo::generate_const_from_identifier","unique":"scheme","kind":"export"}
- {"id":36,"type":"edge","label":"packageInformation","inV":34,"outV":35}
- {"id":37,"type":"edge","label":"moniker","inV":35,"outV":8}
- {"id":38,"type":"vertex","label":"definitionResult"}
- {"id":39,"type":"edge","label":"item","document":1,"inVs":[7],"outV":38}
- {"id":40,"type":"edge","label":"textDocument/definition","inV":38,"outV":8}
- {"id":41,"type":"vertex","label":"referenceResult"}
- {"id":42,"type":"edge","label":"textDocument/references","inV":41,"outV":8}
- {"id":43,"type":"edge","label":"item","document":1,"property":"definitions","inVs":[7],"outV":41}
- {"id":44,"type":"edge","label":"item","document":1,"property":"references","inVs":[10,21],"outV":41}
- {"id":45,"type":"vertex","label":"hoverResult","result":{"contents":{"kind":"markdown","value":"\n```rust\nfoo\n```\n\n```rust\nconst REQ_001: &str = \"encoded_data\"\n```"}}}
- {"id":46,"type":"edge","label":"textDocument/hover","inV":45,"outV":13}
- {"id":47,"type":"vertex","label":"moniker","scheme":"rust-analyzer","identifier":"foo::REQ_001","unique":"scheme","kind":"export"}
- {"id":48,"type":"edge","label":"packageInformation","inV":34,"outV":47}
- {"id":49,"type":"edge","label":"moniker","inV":47,"outV":13}
- {"id":50,"type":"vertex","label":"definitionResult"}
- {"id":51,"type":"edge","label":"item","document":1,"inVs":[12],"outV":50}
- {"id":52,"type":"edge","label":"textDocument/definition","inV":50,"outV":13}
- {"id":53,"type":"vertex","label":"referenceResult"}
- {"id":54,"type":"edge","label":"textDocument/references","inV":53,"outV":13}
- {"id":55,"type":"edge","label":"item","document":1,"property":"definitions","inVs":[12],"outV":53}
- {"id":56,"type":"vertex","label":"hoverResult","result":{"contents":{"kind":"markdown","value":"\n```rust\nfoo\n```\n\n```rust\nmod tests\n```"}}}
- {"id":57,"type":"edge","label":"textDocument/hover","inV":56,"outV":16}
- {"id":58,"type":"vertex","label":"moniker","scheme":"rust-analyzer","identifier":"foo::tests","unique":"scheme","kind":"export"}
- {"id":59,"type":"edge","label":"packageInformation","inV":34,"outV":58}
- {"id":60,"type":"edge","label":"moniker","inV":58,"outV":16}
- {"id":61,"type":"vertex","label":"definitionResult"}
- {"id":62,"type":"edge","label":"item","document":1,"inVs":[15],"outV":61}
- {"id":63,"type":"edge","label":"textDocument/definition","inV":61,"outV":16}
- {"id":64,"type":"vertex","label":"referenceResult"}
- {"id":65,"type":"edge","label":"textDocument/references","inV":64,"outV":16}
- {"id":66,"type":"edge","label":"item","document":1,"property":"definitions","inVs":[15],"outV":64}
- {"id":67,"type":"vertex","label":"hoverResult","result":{"contents":{"kind":"markdown","value":"\n```rust\nextern crate foo\n```"}}}
- {"id":68,"type":"edge","label":"textDocument/hover","inV":67,"outV":19}
- {"id":69,"type":"vertex","label":"definitionResult"}
- {"id":70,"type":"vertex","label":"range","start":{"line":0,"character":0},"end":{"line":13,"character":0}}
- {"id":71,"type":"edge","label":"contains","inVs":[70],"outV":1}
- {"id":72,"type":"edge","label":"item","document":1,"inVs":[70],"outV":69}
- {"id":73,"type":"edge","label":"textDocument/definition","inV":69,"outV":19}
- {"id":74,"type":"vertex","label":"referenceResult"}
- {"id":75,"type":"edge","label":"textDocument/references","inV":74,"outV":19}
- {"id":76,"type":"edge","label":"item","document":1,"property":"references","inVs":[18],"outV":74}
- {"id":77,"type":"vertex","label":"hoverResult","result":{"contents":{"kind":"markdown","value":"\n```rust\nfoo::tests\n```\n\n```rust\nconst REQ_002: &str = \"encoded_data\"\n```"}}}
- {"id":78,"type":"edge","label":"textDocument/hover","inV":77,"outV":24}
- {"id":79,"type":"vertex","label":"moniker","scheme":"rust-analyzer","identifier":"foo::tests::REQ_002","unique":"scheme","kind":"export"}
- {"id":80,"type":"edge","label":"packageInformation","inV":34,"outV":79}
- {"id":81,"type":"edge","label":"moniker","inV":79,"outV":24}
- {"id":82,"type":"vertex","label":"definitionResult"}
- {"id":83,"type":"edge","label":"item","document":1,"inVs":[23],"outV":82}
- {"id":84,"type":"edge","label":"textDocument/definition","inV":82,"outV":24}
- {"id":85,"type":"vertex","label":"referenceResult"}
- {"id":86,"type":"edge","label":"textDocument/references","inV":85,"outV":24}
- {"id":87,"type":"edge","label":"item","document":1,"property":"definitions","inVs":[23],"outV":85}
+ {"id":21,"type":"vertex","label":"range","start":{"line":10,"character":8},"end":{"line":10,"character":13}}
+ {"id":22,"type":"edge","label":"next","inV":5,"outV":21}
+ {"id":23,"type":"vertex","label":"range","start":{"line":11,"character":4},"end":{"line":11,"character":34}}
+ {"id":24,"type":"edge","label":"next","inV":11,"outV":23}
+ {"id":25,"type":"vertex","label":"range","start":{"line":11,"character":36},"end":{"line":11,"character":43}}
+ {"id":26,"type":"vertex","label":"resultSet"}
+ {"id":27,"type":"edge","label":"next","inV":26,"outV":25}
+ {"id":28,"type":"edge","label":"contains","inVs":[4,7,10,13,15,18,21,23,25],"outV":1}
+ {"id":29,"type":"vertex","label":"hoverResult","result":{"contents":{"kind":"markdown","value":"\n```rust\nextern crate foo\n```"}}}
+ {"id":30,"type":"edge","label":"textDocument/hover","inV":29,"outV":5}
+ {"id":31,"type":"vertex","label":"packageInformation","name":"foo","manager":"cargo","version":"0.0.0"}
+ {"id":32,"type":"vertex","label":"moniker","scheme":"rust-analyzer","identifier":"foo::crate","unique":"scheme","kind":"export"}
+ {"id":33,"type":"edge","label":"packageInformation","inV":31,"outV":32}
+ {"id":34,"type":"edge","label":"moniker","inV":32,"outV":5}
+ {"id":35,"type":"vertex","label":"definitionResult"}
+ {"id":36,"type":"edge","label":"item","document":1,"inVs":[4],"outV":35}
+ {"id":37,"type":"edge","label":"textDocument/definition","inV":35,"outV":5}
+ {"id":38,"type":"vertex","label":"referenceResult"}
+ {"id":39,"type":"edge","label":"textDocument/references","inV":38,"outV":5}
+ {"id":40,"type":"edge","label":"item","document":1,"property":"definitions","inVs":[4],"outV":38}
+ {"id":41,"type":"edge","label":"item","document":1,"property":"references","inVs":[21],"outV":38}
+ {"id":42,"type":"vertex","label":"hoverResult","result":{"contents":{"kind":"markdown","value":"\n```rust\n#[allow]\n```\n\n---\n\nValid forms are:\n\n* \\#\\[allow(lint1, lint2, ..., /\\*opt\\*/ reason = \"...\")\\]"}}}
+ {"id":43,"type":"edge","label":"textDocument/hover","inV":42,"outV":8}
+ {"id":44,"type":"vertex","label":"referenceResult"}
+ {"id":45,"type":"edge","label":"textDocument/references","inV":44,"outV":8}
+ {"id":46,"type":"edge","label":"item","document":1,"property":"references","inVs":[7],"outV":44}
+ {"id":47,"type":"vertex","label":"hoverResult","result":{"contents":{"kind":"markdown","value":"\n```rust\nfoo\n```\n\n```rust\nmacro_rules! generate_const_from_identifier\n```"}}}
+ {"id":48,"type":"edge","label":"textDocument/hover","inV":47,"outV":11}
+ {"id":49,"type":"vertex","label":"moniker","scheme":"rust-analyzer","identifier":"foo::generate_const_from_identifier","unique":"scheme","kind":"export"}
+ {"id":50,"type":"edge","label":"packageInformation","inV":31,"outV":49}
+ {"id":51,"type":"edge","label":"moniker","inV":49,"outV":11}
+ {"id":52,"type":"vertex","label":"definitionResult"}
+ {"id":53,"type":"edge","label":"item","document":1,"inVs":[10],"outV":52}
+ {"id":54,"type":"edge","label":"textDocument/definition","inV":52,"outV":11}
+ {"id":55,"type":"vertex","label":"referenceResult"}
+ {"id":56,"type":"edge","label":"textDocument/references","inV":55,"outV":11}
+ {"id":57,"type":"edge","label":"item","document":1,"property":"definitions","inVs":[10],"outV":55}
+ {"id":58,"type":"edge","label":"item","document":1,"property":"references","inVs":[13,23],"outV":55}
+ {"id":59,"type":"vertex","label":"hoverResult","result":{"contents":{"kind":"markdown","value":"\n```rust\nfoo\n```\n\n```rust\nconst REQ_001: &str = \"encoded_data\"\n```"}}}
+ {"id":60,"type":"edge","label":"textDocument/hover","inV":59,"outV":16}
+ {"id":61,"type":"vertex","label":"moniker","scheme":"rust-analyzer","identifier":"foo::REQ_001","unique":"scheme","kind":"export"}
+ {"id":62,"type":"edge","label":"packageInformation","inV":31,"outV":61}
+ {"id":63,"type":"edge","label":"moniker","inV":61,"outV":16}
+ {"id":64,"type":"vertex","label":"definitionResult"}
+ {"id":65,"type":"edge","label":"item","document":1,"inVs":[15],"outV":64}
+ {"id":66,"type":"edge","label":"textDocument/definition","inV":64,"outV":16}
+ {"id":67,"type":"vertex","label":"referenceResult"}
+ {"id":68,"type":"edge","label":"textDocument/references","inV":67,"outV":16}
+ {"id":69,"type":"edge","label":"item","document":1,"property":"definitions","inVs":[15],"outV":67}
+ {"id":70,"type":"vertex","label":"hoverResult","result":{"contents":{"kind":"markdown","value":"\n```rust\nfoo\n```\n\n```rust\nmod tests\n```"}}}
+ {"id":71,"type":"edge","label":"textDocument/hover","inV":70,"outV":19}
+ {"id":72,"type":"vertex","label":"moniker","scheme":"rust-analyzer","identifier":"foo::tests","unique":"scheme","kind":"export"}
+ {"id":73,"type":"edge","label":"packageInformation","inV":31,"outV":72}
+ {"id":74,"type":"edge","label":"moniker","inV":72,"outV":19}
+ {"id":75,"type":"vertex","label":"definitionResult"}
+ {"id":76,"type":"edge","label":"item","document":1,"inVs":[18],"outV":75}
+ {"id":77,"type":"edge","label":"textDocument/definition","inV":75,"outV":19}
+ {"id":78,"type":"vertex","label":"referenceResult"}
+ {"id":79,"type":"edge","label":"textDocument/references","inV":78,"outV":19}
+ {"id":80,"type":"edge","label":"item","document":1,"property":"definitions","inVs":[18],"outV":78}
+ {"id":81,"type":"vertex","label":"hoverResult","result":{"contents":{"kind":"markdown","value":"\n```rust\nfoo::tests\n```\n\n```rust\nconst REQ_002: &str = \"encoded_data\"\n```"}}}
+ {"id":82,"type":"edge","label":"textDocument/hover","inV":81,"outV":26}
+ {"id":83,"type":"vertex","label":"moniker","scheme":"rust-analyzer","identifier":"foo::tests::REQ_002","unique":"scheme","kind":"export"}
+ {"id":84,"type":"edge","label":"packageInformation","inV":31,"outV":83}
+ {"id":85,"type":"edge","label":"moniker","inV":83,"outV":26}
+ {"id":86,"type":"vertex","label":"definitionResult"}
+ {"id":87,"type":"edge","label":"item","document":1,"inVs":[25],"outV":86}
+ {"id":88,"type":"edge","label":"textDocument/definition","inV":86,"outV":26}
+ {"id":89,"type":"vertex","label":"referenceResult"}
+ {"id":90,"type":"edge","label":"textDocument/references","inV":89,"outV":26}
+ {"id":91,"type":"edge","label":"item","document":1,"property":"definitions","inVs":[25],"outV":89}
"#]].assert_eq(stdout);
}
diff --git a/crates/rust-analyzer/tests/slow-tests/main.rs b/crates/rust-analyzer/tests/slow-tests/main.rs
index 2b3c0a47a2..6f26bdc2cf 100644
--- a/crates/rust-analyzer/tests/slow-tests/main.rs
+++ b/crates/rust-analyzer/tests/slow-tests/main.rs
@@ -21,12 +21,14 @@ use lsp_types::{
notification::DidOpenTextDocument,
request::{
CodeActionRequest, Completion, Formatting, GotoTypeDefinition, HoverRequest,
- InlayHintRequest, InlayHintResolveRequest, WillRenameFiles, WorkspaceSymbolRequest,
+ InlayHintRequest, InlayHintResolveRequest, RangeFormatting, WillRenameFiles,
+ WorkspaceSymbolRequest,
},
CodeActionContext, CodeActionParams, CompletionParams, DidOpenTextDocumentParams,
- DocumentFormattingParams, FileRename, FormattingOptions, GotoDefinitionParams, HoverParams,
- InlayHint, InlayHintLabel, InlayHintParams, PartialResultParams, Position, Range,
- RenameFilesParams, TextDocumentItem, TextDocumentPositionParams, WorkDoneProgressParams,
+ DocumentFormattingParams, DocumentRangeFormattingParams, FileRename, FormattingOptions,
+ GotoDefinitionParams, HoverParams, InlayHint, InlayHintLabel, InlayHintParams,
+ PartialResultParams, Position, Range, RenameFilesParams, TextDocumentItem,
+ TextDocumentPositionParams, WorkDoneProgressParams,
};
use rust_analyzer::lsp::ext::{OnEnter, Runnables, RunnablesParams};
use serde_json::json;
@@ -661,6 +663,70 @@ fn main() {}
}
#[test]
+fn test_format_document_range() {
+ if skip_slow_tests() {
+ return;
+ }
+
+ let server = Project::with_fixture(
+ r#"
+//- /Cargo.toml
+[package]
+name = "foo"
+version = "0.0.0"
+
+//- /src/lib.rs
+fn main() {
+ let unit_offsets_cache = collect(dwarf.units ()) ?;
+}
+"#,
+ )
+ .with_config(serde_json::json!({
+ "rustfmt": {
+ "overrideCommand": [ "rustfmt", "+nightly", ],
+ "rangeFormatting": { "enable": true }
+ },
+ }))
+ .server()
+ .wait_until_workspace_is_loaded();
+
+ server.request::<RangeFormatting>(
+ DocumentRangeFormattingParams {
+ range: Range {
+ end: Position { line: 1, character: 0 },
+ start: Position { line: 1, character: 0 },
+ },
+ text_document: server.doc_id("src/lib.rs"),
+ options: FormattingOptions {
+ tab_size: 4,
+ insert_spaces: false,
+ insert_final_newline: None,
+ trim_final_newlines: None,
+ trim_trailing_whitespace: None,
+ properties: HashMap::new(),
+ },
+ work_done_progress_params: WorkDoneProgressParams::default(),
+ },
+ json!([
+ {
+ "newText": "",
+ "range": {
+ "start": { "character": 48, "line": 1 },
+ "end": { "character": 50, "line": 1 },
+ },
+ },
+ {
+ "newText": "",
+ "range": {
+ "start": { "character": 53, "line": 1 },
+ "end": { "character": 55, "line": 1 },
+ },
+ }
+ ]),
+ );
+}
+
+#[test]
fn test_missing_module_code_action() {
if skip_slow_tests() {
return;
@@ -1086,7 +1152,11 @@ fn resolve_proc_macro() {
&AbsPathBuf::assert_utf8(std::env::current_dir().unwrap()),
&Default::default(),
);
- sysroot.load_workspace(&project_model::SysrootSourceWorkspaceConfig::default_cargo());
+ let loaded_sysroot =
+ sysroot.load_workspace(&project_model::RustSourceWorkspaceConfig::default_cargo());
+ if let Some(loaded_sysroot) = loaded_sysroot {
+ sysroot.set_workspace(loaded_sysroot);
+ }
let proc_macro_server_path = sysroot.discover_proc_macro_srv().unwrap();
@@ -1372,6 +1442,40 @@ pub fn foo() {}
name = "bar"
version = "0.0.0"
+[dependencies]
+foo = { path = "../foo" }
+
+//- /bar/src/lib.rs
+"#,
+ )
+ .root("foo")
+ .root("bar")
+ .root("baz")
+ .with_config(json!({
+ "files": {
+ "exclude": ["foo"]
+ }
+ }))
+ .server()
+ .wait_until_workspace_is_loaded();
+
+ server.request::<WorkspaceSymbolRequest>(Default::default(), json!([]));
+
+ let server = Project::with_fixture(
+ r#"
+//- /foo/Cargo.toml
+[package]
+name = "foo"
+version = "0.0.0"
+
+//- /foo/src/lib.rs
+pub fn foo() {}
+
+//- /bar/Cargo.toml
+[package]
+name = "bar"
+version = "0.0.0"
+
//- /bar/src/lib.rs
pub fn bar() {}
@@ -1388,7 +1492,7 @@ version = "0.0.0"
.root("baz")
.with_config(json!({
"files": {
- "excludeDirs": ["foo", "bar"]
+ "exclude": ["foo", "bar"]
}
}))
.server()
diff --git a/crates/rust-analyzer/tests/slow-tests/testdir.rs b/crates/rust-analyzer/tests/slow-tests/testdir.rs
index d113bd5127..409be2894f 100644
--- a/crates/rust-analyzer/tests/slow-tests/testdir.rs
+++ b/crates/rust-analyzer/tests/slow-tests/testdir.rs
@@ -43,10 +43,15 @@ impl TestDir {
}
fs::create_dir_all(&path).unwrap();
- #[cfg(any(target_os = "macos", target_os = "linux", target_os = "windows"))]
+ #[cfg(any(
+ target_os = "macos",
+ target_os = "linux",
+ target_os = "windows",
+ target_os = "freebsd"
+ ))]
if symlink {
let symlink_path = base.join(format!("{pid}_{cnt}_symlink"));
- #[cfg(any(target_os = "macos", target_os = "linux"))]
+ #[cfg(any(target_os = "macos", target_os = "linux", target_os = "freebsd"))]
std::os::unix::fs::symlink(path, &symlink_path).unwrap();
#[cfg(target_os = "windows")]
diff --git a/crates/stdx/src/panic_context.rs b/crates/stdx/src/panic_context.rs
index 4ec74c0742..a35d50b78d 100644
--- a/crates/stdx/src/panic_context.rs
+++ b/crates/stdx/src/panic_context.rs
@@ -1,28 +1,25 @@
//! A micro-crate to enhance panic messages with context info.
-//!
-//! FIXME: upstream to <https://github.com/kriomant/panic-context> ?
use std::{cell::RefCell, panic, sync::Once};
-pub fn enter(context: String) -> PanicContext {
- static ONCE: Once = Once::new();
- ONCE.call_once(PanicContext::init);
-
- with_ctx(|ctx| ctx.push(context));
- PanicContext { _priv: () }
-}
-
+/// Dummy for leveraging RAII cleanup to pop frames.
#[must_use]
pub struct PanicContext {
+ // prevent arbitrary construction
_priv: (),
}
-impl PanicContext {
+impl Drop for PanicContext {
+ fn drop(&mut self) {
+ with_ctx(|ctx| assert!(ctx.pop().is_some()));
+ }
+}
+
+pub fn enter(frame: String) -> PanicContext {
#[allow(clippy::print_stderr)]
- fn init() {
+ fn set_hook() {
let default_hook = panic::take_hook();
- #[allow(deprecated)]
- let hook = move |panic_info: &panic::PanicInfo<'_>| {
+ panic::set_hook(Box::new(move |panic_info| {
with_ctx(|ctx| {
if !ctx.is_empty() {
eprintln!("Panic context:");
@@ -30,17 +27,16 @@ impl PanicContext {
eprintln!("> {frame}\n");
}
}
- default_hook(panic_info);
});
- };
- panic::set_hook(Box::new(hook));
+ default_hook(panic_info);
+ }));
}
-}
-impl Drop for PanicContext {
- fn drop(&mut self) {
- with_ctx(|ctx| assert!(ctx.pop().is_some()));
- }
+ static SET_HOOK: Once = Once::new();
+ SET_HOOK.call_once(set_hook);
+
+ with_ctx(|ctx| ctx.push(frame));
+ PanicContext { _priv: () }
}
fn with_ctx(f: impl FnOnce(&mut Vec<String>)) {
diff --git a/crates/stdx/src/thread/pool.rs b/crates/stdx/src/thread/pool.rs
index 2ddd7da74c..9acc1de922 100644
--- a/crates/stdx/src/thread/pool.rs
+++ b/crates/stdx/src/thread/pool.rs
@@ -7,9 +7,12 @@
//! The thread pool is implemented entirely using
//! the threading utilities in [`crate::thread`].
-use std::sync::{
- atomic::{AtomicUsize, Ordering},
- Arc,
+use std::{
+ panic::{self, UnwindSafe},
+ sync::{
+ atomic::{AtomicUsize, Ordering},
+ Arc,
+ },
};
use crossbeam_channel::{Receiver, Sender};
@@ -25,13 +28,13 @@ pub struct Pool {
// so that the channel is actually closed
// before we join the worker threads!
job_sender: Sender<Job>,
- _handles: Vec<JoinHandle>,
+ _handles: Box<[JoinHandle]>,
extant_tasks: Arc<AtomicUsize>,
}
struct Job {
requested_intent: ThreadIntent,
- f: Box<dyn FnOnce() + Send + 'static>,
+ f: Box<dyn FnOnce() + Send + UnwindSafe + 'static>,
}
impl Pool {
@@ -47,6 +50,7 @@ impl Pool {
let handle = Builder::new(INITIAL_INTENT)
.stack_size(STACK_SIZE)
.name("Worker".into())
+ .allow_leak(true)
.spawn({
let extant_tasks = Arc::clone(&extant_tasks);
let job_receiver: Receiver<Job> = job_receiver.clone();
@@ -58,7 +62,8 @@ impl Pool {
current_intent = job.requested_intent;
}
extant_tasks.fetch_add(1, Ordering::SeqCst);
- (job.f)();
+ // discard the panic, we should've logged the backtrace already
+ _ = panic::catch_unwind(job.f);
extant_tasks.fetch_sub(1, Ordering::SeqCst);
}
}
@@ -68,12 +73,12 @@ impl Pool {
handles.push(handle);
}
- Pool { _handles: handles, extant_tasks, job_sender }
+ Pool { _handles: handles.into_boxed_slice(), extant_tasks, job_sender }
}
pub fn spawn<F>(&self, intent: ThreadIntent, f: F)
where
- F: FnOnce() + Send + 'static,
+ F: FnOnce() + Send + UnwindSafe + 'static,
{
let f = Box::new(move || {
if cfg!(debug_assertions) {
diff --git a/crates/syntax/rust.ungram b/crates/syntax/rust.ungram
index 4e2a70d6cd..bbb8413cbc 100644
--- a/crates/syntax/rust.ungram
+++ b/crates/syntax/rust.ungram
@@ -241,7 +241,7 @@ RecordFieldList =
RecordField =
Attr* Visibility?
- Name ':' Type
+ Name ':' Type ('=' Expr)?
TupleFieldList =
'(' fields:(TupleField (',' TupleField)* ','?)? ')'
diff --git a/crates/syntax/src/ast/edit_in_place.rs b/crates/syntax/src/ast/edit_in_place.rs
index 291fc646e2..aedf810b79 100644
--- a/crates/syntax/src/ast/edit_in_place.rs
+++ b/crates/syntax/src/ast/edit_in_place.rs
@@ -710,52 +710,6 @@ impl ast::Fn {
}
}
-impl Removable for ast::MatchArm {
- fn remove(&self) {
- if let Some(sibling) = self.syntax().prev_sibling_or_token() {
- if sibling.kind() == SyntaxKind::WHITESPACE {
- ted::remove(sibling);
- }
- }
- if let Some(sibling) = self.syntax().next_sibling_or_token() {
- if sibling.kind() == T![,] {
- ted::remove(sibling);
- }
- }
- ted::remove(self.syntax());
- }
-}
-
-impl ast::MatchArmList {
- pub fn add_arm(&self, arm: ast::MatchArm) {
- normalize_ws_between_braces(self.syntax());
- let mut elements = Vec::new();
- let position = match self.arms().last() {
- Some(last_arm) => {
- if needs_comma(&last_arm) {
- ted::append_child(last_arm.syntax(), make::token(SyntaxKind::COMMA));
- }
- Position::after(last_arm.syntax().clone())
- }
- None => match self.l_curly_token() {
- Some(it) => Position::after(it),
- None => Position::last_child_of(self.syntax()),
- },
- };
- let indent = IndentLevel::from_node(self.syntax()) + 1;
- elements.push(make::tokens::whitespace(&format!("\n{indent}")).into());
- elements.push(arm.syntax().clone().into());
- if needs_comma(&arm) {
- ted::append_child(arm.syntax(), make::token(SyntaxKind::COMMA));
- }
- ted::insert_all(position, elements);
-
- fn needs_comma(arm: &ast::MatchArm) -> bool {
- arm.expr().is_some_and(|e| !e.is_block_like()) && arm.comma_token().is_none()
- }
- }
-}
-
impl ast::LetStmt {
pub fn set_ty(&self, ty: Option<ast::Type>) {
match ty {
diff --git a/crates/syntax/src/ast/generated/nodes.rs b/crates/syntax/src/ast/generated/nodes.rs
index 69e2a9f9c1..58c76a456a 100644
--- a/crates/syntax/src/ast/generated/nodes.rs
+++ b/crates/syntax/src/ast/generated/nodes.rs
@@ -1,4 +1,4 @@
-//! Generated by `cargo codegen grammar`, do not edit by hand.
+//! Generated by `cargo xtask codegen grammar`, do not edit by hand.
#![allow(non_snake_case)]
use crate::{
@@ -1539,9 +1539,13 @@ impl ast::HasName for RecordField {}
impl ast::HasVisibility for RecordField {}
impl RecordField {
#[inline]
+ pub fn expr(&self) -> Option<Expr> { support::child(&self.syntax) }
+ #[inline]
pub fn ty(&self) -> Option<Type> { support::child(&self.syntax) }
#[inline]
pub fn colon_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![:]) }
+ #[inline]
+ pub fn eq_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![=]) }
}
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
diff --git a/crates/syntax/src/ast/generated/tokens.rs b/crates/syntax/src/ast/generated/tokens.rs
index 85d20c2bd8..df2e9619db 100644
--- a/crates/syntax/src/ast/generated/tokens.rs
+++ b/crates/syntax/src/ast/generated/tokens.rs
@@ -1,4 +1,4 @@
-//! Generated by `cargo codegen grammar`, do not edit by hand.
+//! Generated by `cargo xtask codegen grammar`, do not edit by hand.
use crate::{
ast::AstToken,
diff --git a/crates/syntax/src/ast/make.rs b/crates/syntax/src/ast/make.rs
index ff027ac584..9dc2d83253 100644
--- a/crates/syntax/src/ast/make.rs
+++ b/crates/syntax/src/ast/make.rs
@@ -837,7 +837,8 @@ pub fn match_guard(condition: ast::Expr) -> ast::MatchGuard {
pub fn match_arm_list(arms: impl IntoIterator<Item = ast::MatchArm>) -> ast::MatchArmList {
let arms_str = arms.into_iter().fold(String::new(), |mut acc, arm| {
- let needs_comma = arm.expr().is_none_or(|it| !it.is_block_like());
+ let needs_comma =
+ arm.comma_token().is_none() && arm.expr().is_none_or(|it| !it.is_block_like());
let comma = if needs_comma { "," } else { "" };
let arm = arm.syntax();
format_to_acc!(acc, " {arm}{comma}\n")
diff --git a/crates/syntax/src/ast/prec.rs b/crates/syntax/src/ast/prec.rs
index 28089ffb37..5d33f132ac 100644
--- a/crates/syntax/src/ast/prec.rs
+++ b/crates/syntax/src/ast/prec.rs
@@ -5,7 +5,122 @@ use crate::{
match_ast, AstNode, SyntaxNode,
};
+#[derive(Debug, Clone, Copy, PartialEq, PartialOrd)]
+pub enum ExprPrecedence {
+ // return, break, yield, closures
+ Jump,
+ // = += -= *= /= %= &= |= ^= <<= >>=
+ Assign,
+ // .. ..=
+ Range,
+ // ||
+ LOr,
+ // &&
+ LAnd,
+ // == != < > <= >=
+ Compare,
+ // |
+ BitOr,
+ // ^
+ BitXor,
+ // &
+ BitAnd,
+ // << >>
+ Shift,
+ // + -
+ Sum,
+ // * / %
+ Product,
+ // as
+ Cast,
+ // unary - * ! & &mut
+ Prefix,
+ // paths, loops, function calls, array indexing, field expressions, method calls
+ Unambiguous,
+}
+
+#[derive(PartialEq, Debug)]
+pub enum Fixity {
+ /// The operator is left-associative
+ Left,
+ /// The operator is right-associative
+ Right,
+ /// The operator is not associative
+ None,
+}
+
+pub fn precedence(expr: &ast::Expr) -> ExprPrecedence {
+ match expr {
+ Expr::ClosureExpr(closure) => match closure.ret_type() {
+ None => ExprPrecedence::Jump,
+ Some(_) => ExprPrecedence::Unambiguous,
+ },
+
+ Expr::BreakExpr(_)
+ | Expr::ContinueExpr(_)
+ | Expr::ReturnExpr(_)
+ | Expr::YeetExpr(_)
+ | Expr::YieldExpr(_) => ExprPrecedence::Jump,
+
+ Expr::RangeExpr(..) => ExprPrecedence::Range,
+
+ Expr::BinExpr(bin_expr) => match bin_expr.op_kind() {
+ Some(it) => match it {
+ BinaryOp::LogicOp(logic_op) => match logic_op {
+ ast::LogicOp::And => ExprPrecedence::LAnd,
+ ast::LogicOp::Or => ExprPrecedence::LOr,
+ },
+ BinaryOp::ArithOp(arith_op) => match arith_op {
+ ast::ArithOp::Add | ast::ArithOp::Sub => ExprPrecedence::Sum,
+ ast::ArithOp::Div | ast::ArithOp::Rem | ast::ArithOp::Mul => {
+ ExprPrecedence::Product
+ }
+ ast::ArithOp::Shl | ast::ArithOp::Shr => ExprPrecedence::Shift,
+ ast::ArithOp::BitXor => ExprPrecedence::BitXor,
+ ast::ArithOp::BitOr => ExprPrecedence::BitOr,
+ ast::ArithOp::BitAnd => ExprPrecedence::BitAnd,
+ },
+ BinaryOp::CmpOp(_) => ExprPrecedence::Compare,
+ BinaryOp::Assignment { .. } => ExprPrecedence::Assign,
+ },
+ None => ExprPrecedence::Unambiguous,
+ },
+ Expr::CastExpr(_) => ExprPrecedence::Cast,
+
+ Expr::LetExpr(_) | Expr::PrefixExpr(_) | Expr::RefExpr(_) => ExprPrecedence::Prefix,
+
+ Expr::ArrayExpr(_)
+ | Expr::AsmExpr(_)
+ | Expr::AwaitExpr(_)
+ | Expr::BecomeExpr(_)
+ | Expr::BlockExpr(_)
+ | Expr::CallExpr(_)
+ | Expr::FieldExpr(_)
+ | Expr::ForExpr(_)
+ | Expr::FormatArgsExpr(_)
+ | Expr::IfExpr(_)
+ | Expr::IndexExpr(_)
+ | Expr::Literal(_)
+ | Expr::LoopExpr(_)
+ | Expr::MacroExpr(_)
+ | Expr::MatchExpr(_)
+ | Expr::MethodCallExpr(_)
+ | Expr::OffsetOfExpr(_)
+ | Expr::ParenExpr(_)
+ | Expr::PathExpr(_)
+ | Expr::RecordExpr(_)
+ | Expr::TryExpr(_)
+ | Expr::TupleExpr(_)
+ | Expr::UnderscoreExpr(_)
+ | Expr::WhileExpr(_) => ExprPrecedence::Unambiguous,
+ }
+}
+
impl Expr {
+ pub fn precedence(&self) -> ExprPrecedence {
+ precedence(self)
+ }
+
// Implementation is based on
// - https://doc.rust-lang.org/reference/expressions.html#expression-precedence
// - https://matklad.github.io/2020/04/13/simple-but-powerful-pratt-parsing.html
@@ -261,7 +376,7 @@ impl Expr {
}
/// Returns true if self is one of `return`, `break`, `continue` or `yield` with **no associated value**.
- fn is_ret_like_with_no_value(&self) -> bool {
+ pub fn is_ret_like_with_no_value(&self) -> bool {
use Expr::*;
match self {
diff --git a/crates/test-fixture/src/lib.rs b/crates/test-fixture/src/lib.rs
index 866379d940..37dfb87721 100644
--- a/crates/test-fixture/src/lib.rs
+++ b/crates/test-fixture/src/lib.rs
@@ -17,7 +17,7 @@ use hir_expand::{
tt::{Leaf, TokenTree, TopSubtree, TopSubtreeBuilder, TtElement, TtIter},
FileRange,
};
-use intern::Symbol;
+use intern::{sym, Symbol};
use rustc_hash::FxHashMap;
use span::{Edition, EditionedFileId, FileId, Span};
use stdx::itertools::Itertools;
@@ -211,8 +211,9 @@ impl ChangeFixture {
From::from(meta.cfg.clone()),
Some(From::from(meta.cfg)),
meta.env,
- false,
origin,
+ false,
+ None,
);
let prev = crates.insert(crate_name.clone(), crate_id);
assert!(prev.is_none(), "multiple crates with same name: {crate_name}");
@@ -249,8 +250,9 @@ impl ChangeFixture {
From::from(default_cfg.clone()),
Some(From::from(default_cfg)),
default_env,
- false,
CrateOrigin::Local { repo: None, name: None },
+ false,
+ None,
);
} else {
for (from, to, prelude) in crate_deps {
@@ -258,15 +260,7 @@ impl ChangeFixture {
let to_id = crates[&to];
let sysroot = crate_graph[to_id].origin.is_lang();
crate_graph
- .add_dep(
- from_id,
- Dependency::with_prelude(
- CrateName::new(&to).unwrap(),
- to_id,
- prelude,
- sysroot,
- ),
- )
+ .add_dep(from_id, Dependency::with_prelude(to.clone(), to_id, prelude, sysroot))
.unwrap();
}
}
@@ -294,8 +288,9 @@ impl ChangeFixture {
String::from("__ra_is_test_fixture"),
String::from("__ra_is_test_fixture"),
)]),
- false,
CrateOrigin::Lang(LangCrateOrigin::Core),
+ false,
+ None,
);
for krate in all_crates {
@@ -341,8 +336,9 @@ impl ChangeFixture {
String::from("__ra_is_test_fixture"),
String::from("__ra_is_test_fixture"),
)]),
- true,
CrateOrigin::Local { repo: None, name: None },
+ true,
+ None,
);
proc_macros.insert(proc_macros_crate, Ok(proc_macro));
@@ -370,7 +366,6 @@ impl ChangeFixture {
crate_graph
.iter()
.zip(iter::repeat(From::from(CrateWorkspaceData {
- proc_macro_cwd: None,
data_layout: target_data_layout,
toolchain,
})))
@@ -519,6 +514,21 @@ pub fn issue_18898(_attr: TokenStream, input: TokenStream) -> TokenStream {
disabled: false,
},
),
+ (
+ r#"
+#[proc_macro_attribute]
+pub fn disallow_cfg(_attr: TokenStream, input: TokenStream) -> TokenStream {
+ input
+}
+"#
+ .into(),
+ ProcMacro {
+ name: Symbol::intern("disallow_cfg"),
+ kind: ProcMacroKind::Attr,
+ expander: sync::Arc::new(DisallowCfgProcMacroExpander),
+ disabled: false,
+ },
+ ),
])
}
@@ -873,3 +883,30 @@ impl ProcMacroExpander for Issue18898ProcMacroExpander {
})
}
}
+
+// Reads ident type within string quotes, for issue #17479.
+#[derive(Debug)]
+struct DisallowCfgProcMacroExpander;
+impl ProcMacroExpander for DisallowCfgProcMacroExpander {
+ fn expand(
+ &self,
+ subtree: &TopSubtree,
+ _: Option<&TopSubtree>,
+ _: &Env,
+ _: Span,
+ _: Span,
+ _: Span,
+ _: Option<String>,
+ ) -> Result<TopSubtree, ProcMacroExpansionError> {
+ for tt in subtree.token_trees().flat_tokens() {
+ if let tt::TokenTree::Leaf(tt::Leaf::Ident(ident)) = tt {
+ if ident.sym == sym::cfg || ident.sym == sym::cfg_attr {
+ return Err(ProcMacroExpansionError::Panic(
+ "cfg or cfg_attr found in DisallowCfgProcMacroExpander".to_owned(),
+ ));
+ }
+ }
+ }
+ Ok(subtree.clone())
+ }
+}
diff --git a/crates/test-utils/src/lib.rs b/crates/test-utils/src/lib.rs
index 36be9937d3..e7279fa1f6 100644
--- a/crates/test-utils/src/lib.rs
+++ b/crates/test-utils/src/lib.rs
@@ -396,12 +396,19 @@ pub fn skip_slow_tests() -> bool {
if should_skip {
eprintln!("ignoring slow test");
} else {
- let path = project_root().join("./target/.slow_tests_cookie");
+ let path = target_dir().join(".slow_tests_cookie");
fs::write(path, ".").unwrap();
}
should_skip
}
+pub fn target_dir() -> Utf8PathBuf {
+ match std::env::var("CARGO_TARGET_DIR") {
+ Ok(target) => Utf8PathBuf::from(target),
+ Err(_) => project_root().join("target"),
+ }
+}
+
/// Returns the path to the root directory of `rust-analyzer` project.
pub fn project_root() -> Utf8PathBuf {
let dir = env!("CARGO_MANIFEST_DIR");
diff --git a/crates/test-utils/src/minicore.rs b/crates/test-utils/src/minicore.rs
index 4ed68d18e8..202afebde7 100644
--- a/crates/test-utils/src/minicore.rs
+++ b/crates/test-utils/src/minicore.rs
@@ -647,18 +647,21 @@ pub mod ops {
#[lang = "fn"]
#[fundamental]
+ #[rustc_paren_sugar]
pub trait Fn<Args: Tuple>: FnMut<Args> {
extern "rust-call" fn call(&self, args: Args) -> Self::Output;
}
#[lang = "fn_mut"]
#[fundamental]
+ #[rustc_paren_sugar]
pub trait FnMut<Args: Tuple>: FnOnce<Args> {
extern "rust-call" fn call_mut(&mut self, args: Args) -> Self::Output;
}
#[lang = "fn_once"]
#[fundamental]
+ #[rustc_paren_sugar]
pub trait FnOnce<Args: Tuple> {
#[lang = "fn_once_output"]
type Output;
@@ -736,12 +739,14 @@ pub mod ops {
#[lang = "async_fn"]
#[fundamental]
+ #[rustc_paren_sugar]
pub trait AsyncFn<Args: Tuple>: AsyncFnMut<Args> {
extern "rust-call" fn async_call(&self, args: Args) -> Self::CallRefFuture<'_>;
}
#[lang = "async_fn_mut"]
#[fundamental]
+ #[rustc_paren_sugar]
pub trait AsyncFnMut<Args: Tuple>: AsyncFnOnce<Args> {
#[lang = "call_ref_future"]
type CallRefFuture<'a>: Future<Output = Self::Output>
@@ -752,6 +757,7 @@ pub mod ops {
#[lang = "async_fn_once"]
#[fundamental]
+ #[rustc_paren_sugar]
pub trait AsyncFnOnce<Args: Tuple> {
#[lang = "async_fn_once_output"]
type Output;
diff --git a/crates/vfs-notify/src/lib.rs b/crates/vfs-notify/src/lib.rs
index 0ae8b7baf4..3200334176 100644
--- a/crates/vfs-notify/src/lib.rs
+++ b/crates/vfs-notify/src/lib.rs
@@ -280,8 +280,9 @@ impl NotifyActor {
return false;
}
- root == path
- || dirs.exclude.iter().chain(&dirs.include).all(|it| it != path)
+ // We want to filter out subdirectories that are roots themselves, because they will be visited separately.
+ dirs.exclude.iter().all(|it| it != path)
+ && (root == path || dirs.include.iter().all(|it| it != path))
});
let files = walkdir.filter_map(|it| it.ok()).filter_map(|entry| {
diff --git a/crates/vfs/src/lib.rs b/crates/vfs/src/lib.rs
index a26444e9ea..3feca512e5 100644
--- a/crates/vfs/src/lib.rs
+++ b/crates/vfs/src/lib.rs
@@ -100,6 +100,9 @@ pub enum FileState {
Exists(u64),
/// The file is deleted.
Deleted,
+ /// The file was specifically excluded by the user. We still include excluded files
+ /// when they're opened (without their contents).
+ Excluded,
}
/// Changed file in the [`Vfs`].
@@ -164,10 +167,22 @@ pub enum ChangeKind {
Delete,
}
+#[derive(Debug, Clone, Copy, PartialEq, Eq)]
+pub enum FileExcluded {
+ Yes,
+ No,
+}
+
impl Vfs {
/// Id of the given path if it exists in the `Vfs` and is not deleted.
- pub fn file_id(&self, path: &VfsPath) -> Option<FileId> {
- self.interner.get(path).filter(|&it| matches!(self.get(it), FileState::Exists(_)))
+ pub fn file_id(&self, path: &VfsPath) -> Option<(FileId, FileExcluded)> {
+ let file_id = self.interner.get(path)?;
+ let file_state = self.get(file_id);
+ match file_state {
+ FileState::Exists(_) => Some((file_id, FileExcluded::No)),
+ FileState::Deleted => None,
+ FileState::Excluded => Some((file_id, FileExcluded::Yes)),
+ }
}
/// File path corresponding to the given `file_id`.
@@ -216,6 +231,7 @@ impl Vfs {
}
Change::Modify(v, new_hash)
}
+ (FileState::Excluded, _) => return false,
};
let mut set_data = |change_kind| {
@@ -297,6 +313,13 @@ impl Vfs {
fn get(&self, file_id: FileId) -> FileState {
self.data[file_id.0 as usize]
}
+
+ /// We cannot ignore excluded files, because this will lead to errors when the client
+ /// requests semantic information for them, so we instead mark them specially.
+ pub fn insert_excluded_file(&mut self, path: VfsPath) {
+ let file_id = self.alloc_file_id(path);
+ self.data[file_id.0 as usize] = FileState::Excluded;
+ }
}
impl fmt::Debug for Vfs {
diff --git a/docs/book/README.md b/docs/book/README.md
new file mode 100644
index 0000000000..043524b234
--- /dev/null
+++ b/docs/book/README.md
@@ -0,0 +1,29 @@
+# rust-analyzer documentation
+
+The rust analyzer manual uses [mdbook](https://rust-lang.github.io/mdBook/).
+
+## Quick start
+
+To run the documentation site locally:
+
+```shell
+cargo install mdbook
+cd docs/book
+mdbook serve
+# make changes to documentation files in doc/book/src
+# ...
+```
+
+mdbook will rebuild the documentation as changes are made.
+
+## Making updates
+
+While not required, installing the mdbook binary can be helfpul in order to see the changes.
+Start with the mdbook [User Guide](https://rust-lang.github.io/mdBook/guide/installation.html) to familiarize yourself with the tool.
+
+## Generated documentation
+
+Four sections are generated dynamically: assists, configuration, diagnostics and features. Their content is found in the `generated.md` files
+of the respective book section, for example `src/configuration_generated.md`, and are included in the book via mdbook's
+[include](https://rust-lang.github.io/mdBook/format/mdbook.html#including-files) functionality. Generated files can be rebuilt by running the various
+test cases that generate them, or by simply running all of the `rust-analyzer` tests with `cargo test` and `cargo xtask codegen`.
diff --git a/docs/book/book.toml b/docs/book/book.toml
new file mode 100644
index 0000000000..a6f6a6ed78
--- /dev/null
+++ b/docs/book/book.toml
@@ -0,0 +1,41 @@
+[book]
+authors = ["The rust-analyzer authors"]
+language = "en"
+multilingual = false
+src = "src"
+title = "rust-analyzer"
+
+[rust]
+edition = "2021"
+
+[output.html]
+edit-url-template = "https://github.com/rust-lang/rust-analyzer/edit/master/docs/book/{path}"
+git-repository-url = "https://github.com/rust-lang/rust-analyzer/tree/master/docs/book"
+mathjax-support = true
+site-url = "/book/"
+
+[output.html.playground]
+editable = true
+runnable = false
+line-numbers = true
+
+[output.html.search]
+boost-hierarchy = 2
+boost-paragraph = 1
+boost-title = 2
+expand = true
+heading-split-level = 2
+limit-results = 20
+use-boolean-and = true
+
+[output.html.redirect]
+"/manual.html" = "/index.html"
+
+[output.html.fold]
+enable = true
+level = 3
+
+[preprocessor.toc]
+command = "mdbook-toc"
+renderer = ["html"]
+max-level = 3
diff --git a/docs/book/src/README.md b/docs/book/src/README.md
new file mode 100644
index 0000000000..71f34e0346
--- /dev/null
+++ b/docs/book/src/README.md
@@ -0,0 +1,21 @@
+# rust-analyzer
+
+At its core, rust-analyzer is a **library** for semantic analysis of
+Rust code as it changes over time. This manual focuses on a specific
+usage of the library -- running it as part of a server that implements
+the [Language Server
+Protocol](https://microsoft.github.io/language-server-protocol/) (LSP).
+The LSP allows various code editors, like VS Code, Emacs or Vim, to
+implement semantic features like completion or goto definition by
+talking to an external language server process.
+
+To improve this document, send a pull request:
+[https://github.com/rust-lang/rust-analyzer](https://github.com/rust-lang/rust-analyzer/blob/master/docs/book/README.md)
+
+The manual is written in markdown and includes
+some extra files which are generated from the source code. Run
+`cargo test` and `cargo xtask codegen` to create these.
+
+If you have questions about using rust-analyzer, please ask them in the
+["IDEs and Editors"](https://users.rust-lang.org/c/ide/14) topic of Rust
+users forum.
diff --git a/docs/book/src/SUMMARY.md b/docs/book/src/SUMMARY.md
new file mode 100644
index 0000000000..1f211a97d7
--- /dev/null
+++ b/docs/book/src/SUMMARY.md
@@ -0,0 +1,24 @@
+# Summary
+
+- [Introduction](README.md)
+- [Installation](installation.md)
+ - [VS Code](vs_code.md)
+ - [rust-analyzer Binary](rust_analyzer_binary.md)
+ - [Other Editors](other_editors.md)
+- [Troubleshooting](troubleshooting.md)
+- [Configuration](configuration.md)
+ - [Non-Cargo Based Projects](non_cargo_based_projects.md)
+- [Security](security.md)
+- [Privacy](privacy.md)
+- [Features](features.md)
+ - [Assists (Code Actions)](assists.md)
+ - [Diagnostics](diagnostics.md)
+- [Editor Features](editor_features.md)
+- [Contributing](contributing/README.md)
+ - [Architecture](contributing/architecture.md)
+ - [Debugging](contributing/debugging.md)
+ - [Guide](contributing/guide.md)
+ - [LSP Extensions](contributing/lsp-extensions.md)
+ - [Setup](contributing/setup.md)
+ - [Style](contributing/style.md)
+ - [Syntax](contributing/syntax.md)
diff --git a/docs/book/src/assists.md b/docs/book/src/assists.md
new file mode 100644
index 0000000000..9d7c3bc1d5
--- /dev/null
+++ b/docs/book/src/assists.md
@@ -0,0 +1,8 @@
+# Assists
+
+Assists, or code actions, are small local refactorings, available in a
+particular context. They are usually triggered by a shortcut or by
+clicking a light bulb icon in the editor. Cursor position or selection
+is signified by `┃` character.
+
+{{#include assists_generated.md:2:}}
diff --git a/docs/book/src/assists_generated.md b/docs/book/src/assists_generated.md
new file mode 100644
index 0000000000..9d68a873ff
--- /dev/null
+++ b/docs/book/src/assists_generated.md
@@ -0,0 +1,3846 @@
+//! Generated by `cargo xtask codegen assists-doc-tests`, do not edit by hand.
+
+### `add_braces`
+**Source:** [add_braces.rs](https://github.com/rust-lang/rust-analyzer/blob/master/crates/ide-assists/src/handlers/add_braces.rs#L8)
+
+Adds braces to lambda and match arm expressions.
+
+#### Before
+```rust
+fn foo(n: i32) -> i32 {
+ match n {
+ 1 =>┃ n + 1,
+ _ => 0
+ }
+}
+```
+
+#### After
+```rust
+fn foo(n: i32) -> i32 {
+ match n {
+ 1 => {
+ n + 1
+ },
+ _ => 0
+ }
+}
+```
+
+
+### `add_explicit_type`
+**Source:** [add_explicit_type.rs](https://github.com/rust-lang/rust-analyzer/blob/master/crates/ide-assists/src/handlers/add_explicit_type.rs#L7)
+
+Specify type for a let binding.
+
+#### Before
+```rust
+fn main() {
+ let x┃ = 92;
+}
+```
+
+#### After
+```rust
+fn main() {
+ let x: i32 = 92;
+}
+```
+
+
+### `add_hash`
+**Source:** [raw_string.rs](https://github.com/rust-lang/rust-analyzer/blob/master/crates/ide-assists/src/handlers/raw_string.rs#L89)
+
+Adds a hash to a raw string literal.
+
+#### Before
+```rust
+fn main() {
+ r#"Hello,┃ World!"#;
+}
+```
+
+#### After
+```rust
+fn main() {
+ r##"Hello, World!"##;
+}
+```
+
+
+### `add_impl_default_members`
+**Source:** [add_missing_impl_members.rs](https://github.com/rust-lang/rust-analyzer/blob/master/crates/ide-assists/src/handlers/add_missing_impl_members.rs#L58)
+
+Adds scaffold for overriding default impl members.
+
+#### Before
+```rust
+trait Trait {
+ type X;
+ fn foo(&self);
+ fn bar(&self) {}
+}
+
+impl Trait for () {
+ type X = ();
+ fn foo(&self) {}┃
+}
+```
+
+#### After
+```rust
+trait Trait {
+ type X;
+ fn foo(&self);
+ fn bar(&self) {}
+}
+
+impl Trait for () {
+ type X = ();
+ fn foo(&self) {}
+
+ ┃fn bar(&self) {}
+}
+```
+
+
+### `add_impl_missing_members`
+**Source:** [add_missing_impl_members.rs](https://github.com/rust-lang/rust-analyzer/blob/master/crates/ide-assists/src/handlers/add_missing_impl_members.rs#L16)
+
+Adds scaffold for required impl members.
+
+#### Before
+```rust
+trait Trait<T> {
+ type X;
+ fn foo(&self) -> T;
+ fn bar(&self) {}
+}
+
+impl Trait<u32> for () {┃
+
+}
+```
+
+#### After
+```rust
+trait Trait<T> {
+ type X;
+ fn foo(&self) -> T;
+ fn bar(&self) {}
+}
+
+impl Trait<u32> for () {
+ ┃type X;
+
+ fn foo(&self) -> u32 {
+ todo!()
+ }
+}
+```
+
+
+### `add_label_to_loop`
+**Source:** [add_label_to_loop.rs](https://github.com/rust-lang/rust-analyzer/blob/master/crates/ide-assists/src/handlers/add_label_to_loop.rs#L9)
+
+Adds a label to a loop.
+
+#### Before
+```rust
+fn main() {
+ loop┃ {
+ break;
+ continue;
+ }
+}
+```
+
+#### After
+```rust
+fn main() {
+ 'l: loop {
+ break 'l;
+ continue 'l;
+ }
+}
+```
+
+
+### `add_lifetime_to_type`
+**Source:** [add_lifetime_to_type.rs](https://github.com/rust-lang/rust-analyzer/blob/master/crates/ide-assists/src/handlers/add_lifetime_to_type.rs#L5)
+
+Adds a new lifetime to a struct, enum or union.
+
+#### Before
+```rust
+struct Point {
+ x: &┃u32,
+ y: u32,
+}
+```
+
+#### After
+```rust
+struct Point<'a> {
+ x: &'a u32,
+ y: u32,
+}
+```
+
+
+### `add_missing_match_arms`
+**Source:** [add_missing_match_arms.rs](https://github.com/rust-lang/rust-analyzer/blob/master/crates/ide-assists/src/handlers/add_missing_match_arms.rs#L16)
+
+Adds missing clauses to a `match` expression.
+
+#### Before
+```rust
+enum Action { Move { distance: u32 }, Stop }
+
+fn handle(action: Action) {
+ match action {
+ ┃
+ }
+}
+```
+
+#### After
+```rust
+enum Action { Move { distance: u32 }, Stop }
+
+fn handle(action: Action) {
+ match action {
+ Action::Move { distance } => ${1:todo!()},
+ Action::Stop => ${2:todo!()},┃
+ }
+}
+```
+
+
+### `add_return_type`
+**Source:** [add_return_type.rs](https://github.com/rust-lang/rust-analyzer/blob/master/crates/ide-assists/src/handlers/add_return_type.rs#L6)
+
+Adds the return type to a function or closure inferred from its tail expression if it doesn't have a return
+type specified. This assists is useable in a functions or closures tail expression or return type position.
+
+#### Before
+```rust
+fn foo() { 4┃2i32 }
+```
+
+#### After
+```rust
+fn foo() -> i32 { 42i32 }
+```
+
+
+### `add_turbo_fish`
+**Source:** [add_turbo_fish.rs](https://github.com/rust-lang/rust-analyzer/blob/master/crates/ide-assists/src/handlers/add_turbo_fish.rs#L14)
+
+Adds `::<_>` to a call of a generic method or function.
+
+#### Before
+```rust
+fn make<T>() -> T { todo!() }
+fn main() {
+ let x = make┃();
+}
+```
+
+#### After
+```rust
+fn make<T>() -> T { todo!() }
+fn main() {
+ let x = make::<${0:_}>();
+}
+```
+
+
+### `apply_demorgan`
+**Source:** [apply_demorgan.rs](https://github.com/rust-lang/rust-analyzer/blob/master/crates/ide-assists/src/handlers/apply_demorgan.rs#L16)
+
+Apply [De Morgan's law](https://en.wikipedia.org/wiki/De_Morgan%27s_laws).
+This transforms expressions of the form `!l || !r` into `!(l && r)`.
+This also works with `&&`. This assist can only be applied with the cursor
+on either `||` or `&&`.
+
+#### Before
+```rust
+fn main() {
+ if x != 4 ||┃ y < 3.14 {}
+}
+```
+
+#### After
+```rust
+fn main() {
+ if !(x == 4 && y >= 3.14) {}
+}
+```
+
+
+### `apply_demorgan_iterator`
+**Source:** [apply_demorgan.rs](https://github.com/rust-lang/rust-analyzer/blob/master/crates/ide-assists/src/handlers/apply_demorgan.rs#L132)
+
+Apply [De Morgan's law](https://en.wikipedia.org/wiki/De_Morgan%27s_laws) to
+`Iterator::all` and `Iterator::any`.
+
+This transforms expressions of the form `!iter.any(|x| predicate(x))` into
+`iter.all(|x| !predicate(x))` and vice versa. This also works the other way for
+`Iterator::all` into `Iterator::any`.
+
+#### Before
+```rust
+fn main() {
+ let arr = [1, 2, 3];
+ if !arr.into_iter().┃any(|num| num == 4) {
+ println!("foo");
+ }
+}
+```
+
+#### After
+```rust
+fn main() {
+ let arr = [1, 2, 3];
+ if arr.into_iter().all(|num| num != 4) {
+ println!("foo");
+ }
+}
+```
+
+
+### `auto_import`
+**Source:** [auto_import.rs](https://github.com/rust-lang/rust-analyzer/blob/master/crates/ide-assists/src/handlers/auto_import.rs#L73)
+
+If the name is unresolved, provides all possible imports for it.
+
+#### Before
+```rust
+fn main() {
+ let map = HashMap┃::new();
+}
+```
+
+#### After
+```rust
+use std::collections::HashMap;
+
+fn main() {
+ let map = HashMap::new();
+}
+```
+
+
+### `bind_unused_param`
+**Source:** [bind_unused_param.rs](https://github.com/rust-lang/rust-analyzer/blob/master/crates/ide-assists/src/handlers/bind_unused_param.rs#L12)
+
+Binds unused function parameter to an underscore.
+
+#### Before
+```rust
+fn some_function(x: i32┃) {}
+```
+
+#### After
+```rust
+fn some_function(x: i32) {
+ let _ = x;
+}
+```
+
+
+### `bool_to_enum`
+**Source:** [bool_to_enum.rs](https://github.com/rust-lang/rust-analyzer/blob/master/crates/ide-assists/src/handlers/bool_to_enum.rs#L29)
+
+This converts boolean local variables, fields, constants, and statics into a new
+enum with two variants `Bool::True` and `Bool::False`, as well as replacing
+all assignments with the variants and replacing all usages with `== Bool::True` or
+`== Bool::False`.
+
+#### Before
+```rust
+fn main() {
+ let ┃bool = true;
+
+ if bool {
+ println!("foo");
+ }
+}
+```
+
+#### After
+```rust
+#[derive(PartialEq, Eq)]
+enum Bool { True, False }
+
+fn main() {
+ let bool = Bool::True;
+
+ if bool == Bool::True {
+ println!("foo");
+ }
+}
+```
+
+
+### `change_visibility`
+**Source:** [change_visibility.rs](https://github.com/rust-lang/rust-analyzer/blob/master/crates/ide-assists/src/handlers/change_visibility.rs#L13)
+
+Adds or changes existing visibility specifier.
+
+#### Before
+```rust
+┃fn frobnicate() {}
+```
+
+#### After
+```rust
+pub(crate) fn frobnicate() {}
+```
+
+
+### `comment_to_doc`
+**Source:** [convert_comment_from_or_to_doc.rs](https://github.com/rust-lang/rust-analyzer/blob/master/crates/ide-assists/src/handlers/convert_comment_from_or_to_doc.rs#L9)
+
+Converts comments to documentation.
+
+#### Before
+```rust
+// Wow what ┃a nice module
+// I sure hope this shows up when I hover over it
+```
+
+#### After
+```rust
+//! Wow what a nice module
+//! I sure hope this shows up when I hover over it
+```
+
+
+### `convert_bool_then_to_if`
+**Source:** [convert_bool_then.rs](https://github.com/rust-lang/rust-analyzer/blob/master/crates/ide-assists/src/handlers/convert_bool_then.rs#L131)
+
+Converts a `bool::then` method call to an equivalent if expression.
+
+#### Before
+```rust
+fn main() {
+ (0 == 0).then┃(|| val)
+}
+```
+
+#### After
+```rust
+fn main() {
+ if 0 == 0 {
+ Some(val)
+ } else {
+ None
+ }
+}
+```
+
+
+### `convert_closure_to_fn`
+**Source:** [convert_closure_to_fn.rs](https://github.com/rust-lang/rust-analyzer/blob/master/crates/ide-assists/src/handlers/convert_closure_to_fn.rs#L25)
+
+This converts a closure to a freestanding function, changing all captures to parameters.
+
+#### Before
+```rust
+fn main() {
+ let mut s = String::new();
+ let closure = |┃a| s.push_str(a);
+ closure("abc");
+}
+```
+
+#### After
+```rust
+fn main() {
+ let mut s = String::new();
+ fn closure(a: &str, s: &mut String) {
+ s.push_str(a)
+ }
+ closure("abc", &mut s);
+}
+```
+
+
+### `convert_for_loop_with_for_each`
+**Source:** [convert_iter_for_each_to_for.rs](https://github.com/rust-lang/rust-analyzer/blob/master/crates/ide-assists/src/handlers/convert_iter_for_each_to_for.rs#L76)
+
+Converts a for loop into a for_each loop on the Iterator.
+
+#### Before
+```rust
+fn main() {
+ let x = vec![1, 2, 3];
+ for┃ v in x {
+ let y = v * 2;
+ }
+}
+```
+
+#### After
+```rust
+fn main() {
+ let x = vec![1, 2, 3];
+ x.into_iter().for_each(|v| {
+ let y = v * 2;
+ });
+}
+```
+
+
+### `convert_from_to_tryfrom`
+**Source:** [convert_from_to_tryfrom.rs](https://github.com/rust-lang/rust-analyzer/blob/master/crates/ide-assists/src/handlers/convert_from_to_tryfrom.rs#L10)
+
+Converts a From impl to a TryFrom impl, wrapping returns in `Ok`.
+
+#### Before
+```rust
+impl ┃From<usize> for Thing {
+ fn from(val: usize) -> Self {
+ Thing {
+ b: val.to_string(),
+ a: val
+ }
+ }
+}
+```
+
+#### After
+```rust
+impl TryFrom<usize> for Thing {
+ type Error = ${0:()};
+
+ fn try_from(val: usize) -> Result<Self, Self::Error> {
+ Ok(Thing {
+ b: val.to_string(),
+ a: val
+ })
+ }
+}
+```
+
+
+### `convert_if_to_bool_then`
+**Source:** [convert_bool_then.rs](https://github.com/rust-lang/rust-analyzer/blob/master/crates/ide-assists/src/handlers/convert_bool_then.rs#L20)
+
+Converts an if expression into a corresponding `bool::then` call.
+
+#### Before
+```rust
+fn main() {
+ if┃ cond {
+ Some(val)
+ } else {
+ None
+ }
+}
+```
+
+#### After
+```rust
+fn main() {
+ cond.then(|| val)
+}
+```
+
+
+### `convert_integer_literal`
+**Source:** [convert_integer_literal.rs](https://github.com/rust-lang/rust-analyzer/blob/master/crates/ide-assists/src/handlers/convert_integer_literal.rs#L5)
+
+Converts the base of integer literals to other bases.
+
+#### Before
+```rust
+const _: i32 = 10┃;
+```
+
+#### After
+```rust
+const _: i32 = 0b1010;
+```
+
+
+### `convert_into_to_from`
+**Source:** [convert_into_to_from.rs](https://github.com/rust-lang/rust-analyzer/blob/master/crates/ide-assists/src/handlers/convert_into_to_from.rs#L8)
+
+Converts an Into impl to an equivalent From impl.
+
+#### Before
+```rust
+impl ┃Into<Thing> for usize {
+ fn into(self) -> Thing {
+ Thing {
+ b: self.to_string(),
+ a: self
+ }
+ }
+}
+```
+
+#### After
+```rust
+impl From<usize> for Thing {
+ fn from(val: usize) -> Self {
+ Thing {
+ b: val.to_string(),
+ a: val
+ }
+ }
+}
+```
+
+
+### `convert_iter_for_each_to_for`
+**Source:** [convert_iter_for_each_to_for.rs](https://github.com/rust-lang/rust-analyzer/blob/master/crates/ide-assists/src/handlers/convert_iter_for_each_to_for.rs#L11)
+
+Converts an Iterator::for_each function into a for loop.
+
+#### Before
+```rust
+fn main() {
+ let iter = iter::repeat((9, 2));
+ iter.for_each┃(|(x, y)| {
+ println!("x: {}, y: {}", x, y);
+ });
+}
+```
+
+#### After
+```rust
+fn main() {
+ let iter = iter::repeat((9, 2));
+ for (x, y) in iter {
+ println!("x: {}, y: {}", x, y);
+ }
+}
+```
+
+
+### `convert_let_else_to_match`
+**Source:** [convert_let_else_to_match.rs](https://github.com/rust-lang/rust-analyzer/blob/master/crates/ide-assists/src/handlers/convert_let_else_to_match.rs#L9)
+
+Converts let-else statement to let statement and match expression.
+
+#### Before
+```rust
+fn main() {
+ let Ok(mut x) = f() else┃ { return };
+}
+```
+
+#### After
+```rust
+fn main() {
+ let mut x = match f() {
+ Ok(x) => x,
+ _ => return,
+ };
+}
+```
+
+
+### `convert_match_to_let_else`
+**Source:** [convert_match_to_let_else.rs](https://github.com/rust-lang/rust-analyzer/blob/master/crates/ide-assists/src/handlers/convert_match_to_let_else.rs#L12)
+
+Converts let statement with match initializer to let-else statement.
+
+#### Before
+```rust
+fn foo(opt: Option<()>) {
+ let val┃ = match opt {
+ Some(it) => it,
+ None => return,
+ };
+}
+```
+
+#### After
+```rust
+fn foo(opt: Option<()>) {
+ let Some(val) = opt else { return };
+}
+```
+
+
+### `convert_named_struct_to_tuple_struct`
+**Source:** [convert_named_struct_to_tuple_struct.rs](https://github.com/rust-lang/rust-analyzer/blob/master/crates/ide-assists/src/handlers/convert_named_struct_to_tuple_struct.rs#L11)
+
+Converts struct with named fields to tuple struct, and analogously for enum variants with named
+fields.
+
+#### Before
+```rust
+struct Point┃ { x: f32, y: f32 }
+
+impl Point {
+ pub fn new(x: f32, y: f32) -> Self {
+ Point { x, y }
+ }
+
+ pub fn x(&self) -> f32 {
+ self.x
+ }
+
+ pub fn y(&self) -> f32 {
+ self.y
+ }
+}
+```
+
+#### After
+```rust
+struct Point(f32, f32);
+
+impl Point {
+ pub fn new(x: f32, y: f32) -> Self {
+ Point(x, y)
+ }
+
+ pub fn x(&self) -> f32 {
+ self.0
+ }
+
+ pub fn y(&self) -> f32 {
+ self.1
+ }
+}
+```
+
+
+### `convert_nested_function_to_closure`
+**Source:** [convert_nested_function_to_closure.rs](https://github.com/rust-lang/rust-analyzer/blob/master/crates/ide-assists/src/handlers/convert_nested_function_to_closure.rs#L7)
+
+Converts a function that is defined within the body of another function into a closure.
+
+#### Before
+```rust
+fn main() {
+ fn fo┃o(label: &str, number: u64) {
+ println!("{}: {}", label, number);
+ }
+
+ foo("Bar", 100);
+}
+```
+
+#### After
+```rust
+fn main() {
+ let foo = |label: &str, number: u64| {
+ println!("{}: {}", label, number);
+ };
+
+ foo("Bar", 100);
+}
+```
+
+
+### `convert_to_guarded_return`
+**Source:** [convert_to_guarded_return.rs](https://github.com/rust-lang/rust-analyzer/blob/master/crates/ide-assists/src/handlers/convert_to_guarded_return.rs#L24)
+
+Replace a large conditional with a guarded return.
+
+#### Before
+```rust
+fn main() {
+ ┃if cond {
+ foo();
+ bar();
+ }
+}
+```
+
+#### After
+```rust
+fn main() {
+ if !cond {
+ return;
+ }
+ foo();
+ bar();
+}
+```
+
+
+### `convert_tuple_return_type_to_struct`
+**Source:** [convert_tuple_return_type_to_struct.rs](https://github.com/rust-lang/rust-analyzer/blob/master/crates/ide-assists/src/handlers/convert_tuple_return_type_to_struct.rs#L20)
+
+This converts the return type of a function from a tuple type
+into a tuple struct and updates the body accordingly.
+
+#### Before
+```rust
+fn bar() {
+ let (a, b, c) = foo();
+}
+
+fn foo() -> (┃u32, u32, u32) {
+ (1, 2, 3)
+}
+```
+
+#### After
+```rust
+fn bar() {
+ let FooResult(a, b, c) = foo();
+}
+
+struct FooResult(u32, u32, u32);
+
+fn foo() -> FooResult {
+ FooResult(1, 2, 3)
+}
+```
+
+
+### `convert_tuple_struct_to_named_struct`
+**Source:** [convert_tuple_struct_to_named_struct.rs](https://github.com/rust-lang/rust-analyzer/blob/master/crates/ide-assists/src/handlers/convert_tuple_struct_to_named_struct.rs#L10)
+
+Converts tuple struct to struct with named fields, and analogously for tuple enum variants.
+
+#### Before
+```rust
+struct Point┃(f32, f32);
+
+impl Point {
+ pub fn new(x: f32, y: f32) -> Self {
+ Point(x, y)
+ }
+
+ pub fn x(&self) -> f32 {
+ self.0
+ }
+
+ pub fn y(&self) -> f32 {
+ self.1
+ }
+}
+```
+
+#### After
+```rust
+struct Point { field1: f32, field2: f32 }
+
+impl Point {
+ pub fn new(x: f32, y: f32) -> Self {
+ Point { field1: x, field2: y }
+ }
+
+ pub fn x(&self) -> f32 {
+ self.field1
+ }
+
+ pub fn y(&self) -> f32 {
+ self.field2
+ }
+}
+```
+
+
+### `convert_two_arm_bool_match_to_matches_macro`
+**Source:** [convert_two_arm_bool_match_to_matches_macro.rs](https://github.com/rust-lang/rust-analyzer/blob/master/crates/ide-assists/src/handlers/convert_two_arm_bool_match_to_matches_macro.rs#L8)
+
+Convert 2-arm match that evaluates to a boolean into the equivalent matches! invocation.
+
+#### Before
+```rust
+fn main() {
+ match scrutinee┃ {
+ Some(val) if val.cond() => true,
+ _ => false,
+ }
+}
+```
+
+#### After
+```rust
+fn main() {
+ matches!(scrutinee, Some(val) if val.cond())
+}
+```
+
+
+### `convert_while_to_loop`
+**Source:** [convert_while_to_loop.rs](https://github.com/rust-lang/rust-analyzer/blob/master/crates/ide-assists/src/handlers/convert_while_to_loop.rs#L20)
+
+Replace a while with a loop.
+
+#### Before
+```rust
+fn main() {
+ ┃while cond {
+ foo();
+ }
+}
+```
+
+#### After
+```rust
+fn main() {
+ loop {
+ if !cond {
+ break;
+ }
+ foo();
+ }
+}
+```
+
+
+### `destructure_struct_binding`
+**Source:** [destructure_struct_binding.rs](https://github.com/rust-lang/rust-analyzer/blob/master/crates/ide-assists/src/handlers/destructure_struct_binding.rs#L18)
+
+Destructures a struct binding in place.
+
+#### Before
+```rust
+struct Foo {
+ bar: i32,
+ baz: i32,
+}
+fn main() {
+ let ┃foo = Foo { bar: 1, baz: 2 };
+ let bar2 = foo.bar;
+ let baz2 = &foo.baz;
+}
+```
+
+#### After
+```rust
+struct Foo {
+ bar: i32,
+ baz: i32,
+}
+fn main() {
+ let Foo { bar, baz } = Foo { bar: 1, baz: 2 };
+ let bar2 = bar;
+ let baz2 = &baz;
+}
+```
+
+
+### `destructure_tuple_binding`
+**Source:** [destructure_tuple_binding.rs](https://github.com/rust-lang/rust-analyzer/blob/master/crates/ide-assists/src/handlers/destructure_tuple_binding.rs#L19)
+
+Destructures a tuple binding in place.
+
+#### Before
+```rust
+fn main() {
+ let ┃t = (1,2);
+ let v = t.0;
+}
+```
+
+#### After
+```rust
+fn main() {
+ let (┃_0, _1) = (1,2);
+ let v = _0;
+}
+```
+
+
+### `desugar_async_into_impl_future`
+**Source:** [toggle_async_sugar.rs](https://github.com/rust-lang/rust-analyzer/blob/master/crates/ide-assists/src/handlers/toggle_async_sugar.rs#L103)
+
+Rewrites asynchronous function from `async fn` into `-> impl Future`.
+This action does not touch the function body and therefore `0`
+block does not transform to `async { 0 }`.
+
+#### Before
+```rust
+pub as┃ync fn foo() -> usize {
+ 0
+}
+```
+
+#### After
+```rust
+pub fn foo() -> impl core::future::Future<Output = usize> {
+ 0
+}
+```
+
+
+### `desugar_doc_comment`
+**Source:** [desugar_doc_comment.rs](https://github.com/rust-lang/rust-analyzer/blob/master/crates/ide-assists/src/handlers/desugar_doc_comment.rs#L14)
+
+Desugars doc-comments to the attribute form.
+
+#### Before
+```rust
+/// Multi-line┃
+/// comment
+```
+
+#### After
+```rust
+#[doc = r"Multi-line
+comment"]
+```
+
+
+### `expand_glob_import`
+**Source:** [expand_glob_import.rs](https://github.com/rust-lang/rust-analyzer/blob/master/crates/ide-assists/src/handlers/expand_glob_import.rs#L19)
+
+Expands glob imports.
+
+#### Before
+```rust
+mod foo {
+ pub struct Bar;
+ pub struct Baz;
+}
+
+use foo::*┃;
+
+fn qux(bar: Bar, baz: Baz) {}
+```
+
+#### After
+```rust
+mod foo {
+ pub struct Bar;
+ pub struct Baz;
+}
+
+use foo::{Bar, Baz};
+
+fn qux(bar: Bar, baz: Baz) {}
+```
+
+
+### `expand_glob_reexport`
+**Source:** [expand_glob_import.rs](https://github.com/rust-lang/rust-analyzer/blob/master/crates/ide-assists/src/handlers/expand_glob_import.rs#L81)
+
+Expands non-private glob imports.
+
+#### Before
+```rust
+mod foo {
+ pub struct Bar;
+ pub struct Baz;
+}
+
+pub use foo::*┃;
+```
+
+#### After
+```rust
+mod foo {
+ pub struct Bar;
+ pub struct Baz;
+}
+
+pub use foo::{Bar, Baz};
+```
+
+
+### `explicit_enum_discriminant`
+**Source:** [explicit_enum_discriminant.rs](https://github.com/rust-lang/rust-analyzer/blob/master/crates/ide-assists/src/handlers/explicit_enum_discriminant.rs#L11)
+
+Adds explicit discriminant to all enum variants.
+
+#### Before
+```rust
+enum TheEnum┃ {
+ Foo,
+ Bar,
+ Baz = 42,
+ Quux,
+}
+```
+
+#### After
+```rust
+enum TheEnum {
+ Foo = 0,
+ Bar = 1,
+ Baz = 42,
+ Quux = 43,
+}
+```
+
+
+### `extract_constant`
+**Source:** [extract_variable.rs](https://github.com/rust-lang/rust-analyzer/blob/master/crates/ide-assists/src/handlers/extract_variable.rs#L35)
+
+Extracts subexpression into a constant.
+
+#### Before
+```rust
+fn main() {
+ ┃(1 + 2)┃ * 4;
+}
+```
+
+#### After
+```rust
+fn main() {
+ const ┃VAR_NAME: i32 = 1 + 2;
+ VAR_NAME * 4;
+}
+```
+
+
+### `extract_expressions_from_format_string`
+**Source:** [extract_expressions_from_format_string.rs](https://github.com/rust-lang/rust-analyzer/blob/master/crates/ide-assists/src/handlers/extract_expressions_from_format_string.rs#L14)
+
+Move an expression out of a format string.
+
+#### Before
+```rust
+fn main() {
+ print!("{var} {x + 1}┃");
+}
+```
+
+#### After
+```rust
+fn main() {
+ print!("{var} {}"┃, x + 1);
+}
+```
+
+
+### `extract_function`
+**Source:** [extract_function.rs](https://github.com/rust-lang/rust-analyzer/blob/master/crates/ide-assists/src/handlers/extract_function.rs#L39)
+
+Extracts selected statements and comments into new function.
+
+#### Before
+```rust
+fn main() {
+ let n = 1;
+ ┃let m = n + 2;
+ // calculate
+ let k = m + n;┃
+ let g = 3;
+}
+```
+
+#### After
+```rust
+fn main() {
+ let n = 1;
+ fun_name(n);
+ let g = 3;
+}
+
+fn ┃fun_name(n: i32) {
+ let m = n + 2;
+ // calculate
+ let k = m + n;
+}
+```
+
+
+### `extract_module`
+**Source:** [extract_module.rs](https://github.com/rust-lang/rust-analyzer/blob/master/crates/ide-assists/src/handlers/extract_module.rs#L29)
+
+Extracts a selected region as separate module. All the references, visibility and imports are
+resolved.
+
+#### Before
+```rust
+┃fn foo(name: i32) -> i32 {
+ name + 1
+}┃
+
+fn bar(name: i32) -> i32 {
+ name + 2
+}
+```
+
+#### After
+```rust
+mod modname {
+ pub(crate) fn foo(name: i32) -> i32 {
+ name + 1
+ }
+}
+
+fn bar(name: i32) -> i32 {
+ name + 2
+}
+```
+
+
+### `extract_static`
+**Source:** [extract_variable.rs](https://github.com/rust-lang/rust-analyzer/blob/master/crates/ide-assists/src/handlers/extract_variable.rs#L52)
+
+Extracts subexpression into a static.
+
+#### Before
+```rust
+fn main() {
+ ┃(1 + 2)┃ * 4;
+}
+```
+
+#### After
+```rust
+fn main() {
+ static ┃VAR_NAME: i32 = 1 + 2;
+ VAR_NAME * 4;
+}
+```
+
+
+### `extract_struct_from_enum_variant`
+**Source:** [extract_struct_from_enum_variant.rs](https://github.com/rust-lang/rust-analyzer/blob/master/crates/ide-assists/src/handlers/extract_struct_from_enum_variant.rs#L26)
+
+Extracts a struct from enum variant.
+
+#### Before
+```rust
+enum A { ┃One(u32, u32) }
+```
+
+#### After
+```rust
+struct One(u32, u32);
+
+enum A { One(One) }
+```
+
+
+### `extract_type_alias`
+**Source:** [extract_type_alias.rs](https://github.com/rust-lang/rust-analyzer/blob/master/crates/ide-assists/src/handlers/extract_type_alias.rs#L10)
+
+Extracts the selected type as a type alias.
+
+#### Before
+```rust
+struct S {
+ field: ┃(u8, u8, u8)┃,
+}
+```
+
+#### After
+```rust
+type ┃Type = (u8, u8, u8);
+
+struct S {
+ field: Type,
+}
+```
+
+
+### `extract_variable`
+**Source:** [extract_variable.rs](https://github.com/rust-lang/rust-analyzer/blob/master/crates/ide-assists/src/handlers/extract_variable.rs#L18)
+
+Extracts subexpression into a variable.
+
+#### Before
+```rust
+fn main() {
+ ┃(1 + 2)┃ * 4;
+}
+```
+
+#### After
+```rust
+fn main() {
+ let ┃var_name = 1 + 2;
+ var_name * 4;
+}
+```
+
+
+### `fill_record_pattern_fields`
+**Source:** [fill_record_pattern_fields.rs](https://github.com/rust-lang/rust-analyzer/blob/master/crates/ide-assists/src/handlers/fill_record_pattern_fields.rs#L8)
+
+Fills fields by replacing rest pattern in record patterns.
+
+#### Before
+```rust
+struct Bar { y: Y, z: Z }
+
+fn foo(bar: Bar) {
+ let Bar { ..┃ } = bar;
+}
+```
+
+#### After
+```rust
+struct Bar { y: Y, z: Z }
+
+fn foo(bar: Bar) {
+ let Bar { y, z } = bar;
+}
+```
+
+
+### `fix_visibility`
+**Source:** [fix_visibility.rs](https://github.com/rust-lang/rust-analyzer/blob/master/crates/ide-assists/src/handlers/fix_visibility.rs#L14)
+
+Makes inaccessible item public.
+
+#### Before
+```rust
+mod m {
+ fn frobnicate() {}
+}
+fn main() {
+ m::frobnicate┃();
+}
+```
+
+#### After
+```rust
+mod m {
+ ┃pub(crate) fn frobnicate() {}
+}
+fn main() {
+ m::frobnicate();
+}
+```
+
+
+### `flip_binexpr`
+**Source:** [flip_binexpr.rs](https://github.com/rust-lang/rust-analyzer/blob/master/crates/ide-assists/src/handlers/flip_binexpr.rs#L8)
+
+Flips operands of a binary expression.
+
+#### Before
+```rust
+fn main() {
+ let _ = 90 +┃ 2;
+}
+```
+
+#### After
+```rust
+fn main() {
+ let _ = 2 + 90;
+}
+```
+
+
+### `flip_comma`
+**Source:** [flip_comma.rs](https://github.com/rust-lang/rust-analyzer/blob/master/crates/ide-assists/src/handlers/flip_comma.rs#L10)
+
+Flips two comma-separated items.
+
+#### Before
+```rust
+fn main() {
+ ((1, 2),┃ (3, 4));
+}
+```
+
+#### After
+```rust
+fn main() {
+ ((3, 4), (1, 2));
+}
+```
+
+
+### `flip_trait_bound`
+**Source:** [flip_trait_bound.rs](https://github.com/rust-lang/rust-analyzer/blob/master/crates/ide-assists/src/handlers/flip_trait_bound.rs#L9)
+
+Flips two trait bounds.
+
+#### Before
+```rust
+fn foo<T: Clone +┃ Copy>() { }
+```
+
+#### After
+```rust
+fn foo<T: Copy + Clone>() { }
+```
+
+
+### `generate_constant`
+**Source:** [generate_constant.rs](https://github.com/rust-lang/rust-analyzer/blob/master/crates/ide-assists/src/handlers/generate_constant.rs#L14)
+
+Generate a named constant.
+
+#### Before
+```rust
+struct S { i: usize }
+impl S { pub fn new(n: usize) {} }
+fn main() {
+ let v = S::new(CAPA┃CITY);
+}
+```
+
+#### After
+```rust
+struct S { i: usize }
+impl S { pub fn new(n: usize) {} }
+fn main() {
+ const CAPACITY: usize = ┃;
+ let v = S::new(CAPACITY);
+}
+```
+
+
+### `generate_default_from_enum_variant`
+**Source:** [generate_default_from_enum_variant.rs](https://github.com/rust-lang/rust-analyzer/blob/master/crates/ide-assists/src/handlers/generate_default_from_enum_variant.rs#L6)
+
+Adds a Default impl for an enum using a variant.
+
+#### Before
+```rust
+enum Version {
+ Undefined,
+ Minor┃,
+ Major,
+}
+```
+
+#### After
+```rust
+enum Version {
+ Undefined,
+ Minor,
+ Major,
+}
+
+impl Default for Version {
+ fn default() -> Self {
+ Self::Minor
+ }
+}
+```
+
+
+### `generate_default_from_new`
+**Source:** [generate_default_from_new.rs](https://github.com/rust-lang/rust-analyzer/blob/master/crates/ide-assists/src/handlers/generate_default_from_new.rs#L13)
+
+Generates default implementation from new method.
+
+#### Before
+```rust
+struct Example { _inner: () }
+
+impl Example {
+ pub fn n┃ew() -> Self {
+ Self { _inner: () }
+ }
+}
+```
+
+#### After
+```rust
+struct Example { _inner: () }
+
+impl Example {
+ pub fn new() -> Self {
+ Self { _inner: () }
+ }
+}
+
+impl Default for Example {
+ fn default() -> Self {
+ Self::new()
+ }
+}
+```
+
+
+### `generate_delegate_methods`
+**Source:** [generate_delegate_methods.rs](https://github.com/rust-lang/rust-analyzer/blob/master/crates/ide-assists/src/handlers/generate_delegate_methods.rs#L15)
+
+Generate delegate methods.
+
+#### Before
+```rust
+struct Age(u8);
+impl Age {
+ fn age(&self) -> u8 {
+ self.0
+ }
+}
+
+struct Person {
+ ag┃e: Age,
+}
+```
+
+#### After
+```rust
+struct Age(u8);
+impl Age {
+ fn age(&self) -> u8 {
+ self.0
+ }
+}
+
+struct Person {
+ age: Age,
+}
+
+impl Person {
+ ┃fn age(&self) -> u8 {
+ self.age.age()
+ }
+}
+```
+
+
+### `generate_delegate_trait`
+**Source:** [generate_delegate_trait.rs](https://github.com/rust-lang/rust-analyzer/blob/master/crates/ide-assists/src/handlers/generate_delegate_trait.rs#L29)
+
+Generate delegate trait implementation for `StructField`s.
+
+#### Before
+```rust
+trait SomeTrait {
+ type T;
+ fn fn_(arg: u32) -> u32;
+ fn method_(&mut self) -> bool;
+}
+struct A;
+impl SomeTrait for A {
+ type T = u32;
+
+ fn fn_(arg: u32) -> u32 {
+ 42
+ }
+
+ fn method_(&mut self) -> bool {
+ false
+ }
+}
+struct B {
+ a┃: A,
+}
+```
+
+#### After
+```rust
+trait SomeTrait {
+ type T;
+ fn fn_(arg: u32) -> u32;
+ fn method_(&mut self) -> bool;
+}
+struct A;
+impl SomeTrait for A {
+ type T = u32;
+
+ fn fn_(arg: u32) -> u32 {
+ 42
+ }
+
+ fn method_(&mut self) -> bool {
+ false
+ }
+}
+struct B {
+ a: A,
+}
+
+impl SomeTrait for B {
+ type T = <A as SomeTrait>::T;
+
+ fn fn_(arg: u32) -> u32 {
+ <A as SomeTrait>::fn_(arg)
+ }
+
+ fn method_(&mut self) -> bool {
+ <A as SomeTrait>::method_(&mut self.a)
+ }
+}
+```
+
+
+### `generate_deref`
+**Source:** [generate_deref.rs](https://github.com/rust-lang/rust-analyzer/blob/master/crates/ide-assists/src/handlers/generate_deref.rs#L16)
+
+Generate `Deref` impl using the given struct field.
+
+#### Before
+```rust
+struct A;
+struct B {
+ ┃a: A
+}
+```
+
+#### After
+```rust
+struct A;
+struct B {
+ a: A
+}
+
+impl core::ops::Deref for B {
+ type Target = A;
+
+ fn deref(&self) -> &Self::Target {
+ &self.a
+ }
+}
+```
+
+
+### `generate_derive`
+**Source:** [generate_derive.rs](https://github.com/rust-lang/rust-analyzer/blob/master/crates/ide-assists/src/handlers/generate_derive.rs#L8)
+
+Adds a new `#[derive()]` clause to a struct or enum.
+
+#### Before
+```rust
+struct Point {
+ x: u32,
+ y: u32,┃
+}
+```
+
+#### After
+```rust
+#[derive(┃)]
+struct Point {
+ x: u32,
+ y: u32,
+}
+```
+
+
+### `generate_doc_example`
+**Source:** [generate_documentation_template.rs](https://github.com/rust-lang/rust-analyzer/blob/master/crates/ide-assists/src/handlers/generate_documentation_template.rs#L76)
+
+Generates a rustdoc example when editing an item's documentation.
+
+#### Before
+```rust
+/// Adds two numbers.┃
+pub fn add(a: i32, b: i32) -> i32 { a + b }
+```
+
+#### After
+```rust
+/// Adds two numbers.
+///
+/// # Examples
+///
+/// ```
+/// use ra_test_fixture::add;
+///
+/// assert_eq!(add(a, b), );
+/// ```
+pub fn add(a: i32, b: i32) -> i32 { a + b }
+```
+
+
+### `generate_documentation_template`
+**Source:** [generate_documentation_template.rs](https://github.com/rust-lang/rust-analyzer/blob/master/crates/ide-assists/src/handlers/generate_documentation_template.rs#L13)
+
+Adds a documentation template above a function definition / declaration.
+
+#### Before
+```rust
+pub struct S;
+impl S {
+ pub unsafe fn set_len┃(&mut self, len: usize) -> Result<(), std::io::Error> {
+ /* ... */
+ }
+}
+```
+
+#### After
+```rust
+pub struct S;
+impl S {
+ /// Sets the length of this [`S`].
+ ///
+ /// # Errors
+ ///
+ /// This function will return an error if .
+ ///
+ /// # Safety
+ ///
+ /// .
+ pub unsafe fn set_len(&mut self, len: usize) -> Result<(), std::io::Error> {
+ /* ... */
+ }
+}
+```
+
+
+### `generate_enum_as_method`
+**Source:** [generate_enum_projection_method.rs](https://github.com/rust-lang/rust-analyzer/blob/master/crates/ide-assists/src/handlers/generate_enum_projection_method.rs#L59)
+
+Generate an `as_` method for this enum variant.
+
+#### Before
+```rust
+enum Value {
+ Number(i32),
+ Text(String)┃,
+}
+```
+
+#### After
+```rust
+enum Value {
+ Number(i32),
+ Text(String),
+}
+
+impl Value {
+ fn as_text(&self) -> Option<&String> {
+ if let Self::Text(v) = self {
+ Some(v)
+ } else {
+ None
+ }
+ }
+}
+```
+
+
+### `generate_enum_is_method`
+**Source:** [generate_enum_is_method.rs](https://github.com/rust-lang/rust-analyzer/blob/master/crates/ide-assists/src/handlers/generate_enum_is_method.rs#L11)
+
+Generate an `is_` method for this enum variant.
+
+#### Before
+```rust
+enum Version {
+ Undefined,
+ Minor┃,
+ Major,
+}
+```
+
+#### After
+```rust
+enum Version {
+ Undefined,
+ Minor,
+ Major,
+}
+
+impl Version {
+ /// Returns `true` if the version is [`Minor`].
+ ///
+ /// [`Minor`]: Version::Minor
+ #[must_use]
+ fn is_minor(&self) -> bool {
+ matches!(self, Self::Minor)
+ }
+}
+```
+
+
+### `generate_enum_try_into_method`
+**Source:** [generate_enum_projection_method.rs](https://github.com/rust-lang/rust-analyzer/blob/master/crates/ide-assists/src/handlers/generate_enum_projection_method.rs#L12)
+
+Generate a `try_into_` method for this enum variant.
+
+#### Before
+```rust
+enum Value {
+ Number(i32),
+ Text(String)┃,
+}
+```
+
+#### After
+```rust
+enum Value {
+ Number(i32),
+ Text(String),
+}
+
+impl Value {
+ fn try_into_text(self) -> Result<String, Self> {
+ if let Self::Text(v) = self {
+ Ok(v)
+ } else {
+ Err(self)
+ }
+ }
+}
+```
+
+
+### `generate_enum_variant`
+**Source:** [generate_enum_variant.rs](https://github.com/rust-lang/rust-analyzer/blob/master/crates/ide-assists/src/handlers/generate_enum_variant.rs#L10)
+
+Adds a variant to an enum.
+
+#### Before
+```rust
+enum Countries {
+ Ghana,
+}
+
+fn main() {
+ let country = Countries::Lesotho┃;
+}
+```
+
+#### After
+```rust
+enum Countries {
+ Ghana,
+ Lesotho,
+}
+
+fn main() {
+ let country = Countries::Lesotho;
+}
+```
+
+
+### `generate_fn_type_alias_named`
+**Source:** [generate_fn_type_alias.rs](https://github.com/rust-lang/rust-analyzer/blob/master/crates/ide-assists/src/handlers/generate_fn_type_alias.rs#L10)
+
+Generate a type alias for the function with named parameters.
+
+#### Before
+```rust
+unsafe fn fo┃o(n: i32) -> i32 { 42i32 }
+```
+
+#### After
+```rust
+type ${0:FooFn} = unsafe fn(n: i32) -> i32;
+
+unsafe fn foo(n: i32) -> i32 { 42i32 }
+```
+
+
+### `generate_fn_type_alias_unnamed`
+**Source:** [generate_fn_type_alias.rs](https://github.com/rust-lang/rust-analyzer/blob/master/crates/ide-assists/src/handlers/generate_fn_type_alias.rs#L24)
+
+Generate a type alias for the function with unnamed parameters.
+
+#### Before
+```rust
+unsafe fn fo┃o(n: i32) -> i32 { 42i32 }
+```
+
+#### After
+```rust
+type ${0:FooFn} = unsafe fn(i32) -> i32;
+
+unsafe fn foo(n: i32) -> i32 { 42i32 }
+```
+
+
+### `generate_from_impl_for_enum`
+**Source:** [generate_from_impl_for_enum.rs](https://github.com/rust-lang/rust-analyzer/blob/master/crates/ide-assists/src/handlers/generate_from_impl_for_enum.rs#L8)
+
+Adds a From impl for this enum variant with one tuple field.
+
+#### Before
+```rust
+enum A { ┃One(u32) }
+```
+
+#### After
+```rust
+enum A { One(u32) }
+
+impl From<u32> for A {
+ fn from(v: u32) -> Self {
+ Self::One(v)
+ }
+}
+```
+
+
+### `generate_function`
+**Source:** [generate_function.rs](https://github.com/rust-lang/rust-analyzer/blob/master/crates/ide-assists/src/handlers/generate_function.rs#L28)
+
+Adds a stub function with a signature matching the function under the cursor.
+
+#### Before
+```rust
+struct Baz;
+fn baz() -> Baz { Baz }
+fn foo() {
+ bar┃("", baz());
+}
+
+```
+
+#### After
+```rust
+struct Baz;
+fn baz() -> Baz { Baz }
+fn foo() {
+ bar("", baz());
+}
+
+fn bar(arg: &str, baz: Baz) ${0:-> _} {
+ todo!()
+}
+
+```
+
+
+### `generate_getter`
+**Source:** [generate_getter_or_setter.rs](https://github.com/rust-lang/rust-analyzer/blob/master/crates/ide-assists/src/handlers/generate_getter_or_setter.rs#L73)
+
+Generate a getter method.
+
+#### Before
+```rust
+struct Person {
+ nam┃e: String,
+}
+```
+
+#### After
+```rust
+struct Person {
+ name: String,
+}
+
+impl Person {
+ fn ┃name(&self) -> &str {
+ &self.name
+ }
+}
+```
+
+
+### `generate_getter_mut`
+**Source:** [generate_getter_or_setter.rs](https://github.com/rust-lang/rust-analyzer/blob/master/crates/ide-assists/src/handlers/generate_getter_or_setter.rs#L127)
+
+Generate a mut getter method.
+
+#### Before
+```rust
+struct Person {
+ nam┃e: String,
+}
+```
+
+#### After
+```rust
+struct Person {
+ name: String,
+}
+
+impl Person {
+ fn ┃name_mut(&mut self) -> &mut String {
+ &mut self.name
+ }
+}
+```
+
+
+### `generate_impl`
+**Source:** [generate_impl.rs](https://github.com/rust-lang/rust-analyzer/blob/master/crates/ide-assists/src/handlers/generate_impl.rs#L20)
+
+Adds a new inherent impl for a type.
+
+#### Before
+```rust
+struct Ctx┃<T: Clone> {
+ data: T,
+}
+```
+
+#### After
+```rust
+struct Ctx<T: Clone> {
+ data: T,
+}
+
+impl<T: Clone> Ctx<T> {┃}
+```
+
+
+### `generate_is_empty_from_len`
+**Source:** [generate_is_empty_from_len.rs](https://github.com/rust-lang/rust-analyzer/blob/master/crates/ide-assists/src/handlers/generate_is_empty_from_len.rs#L12)
+
+Generates is_empty implementation from the len method.
+
+#### Before
+```rust
+struct MyStruct { data: Vec<String> }
+
+impl MyStruct {
+ #[must_use]
+ p┃ub fn len(&self) -> usize {
+ self.data.len()
+ }
+}
+```
+
+#### After
+```rust
+struct MyStruct { data: Vec<String> }
+
+impl MyStruct {
+ #[must_use]
+ pub fn len(&self) -> usize {
+ self.data.len()
+ }
+
+ #[must_use]
+ pub fn is_empty(&self) -> bool {
+ self.len() == 0
+ }
+}
+```
+
+
+### `generate_mut_trait_impl`
+**Source:** [generate_mut_trait_impl.rs](https://github.com/rust-lang/rust-analyzer/blob/master/crates/ide-assists/src/handlers/generate_mut_trait_impl.rs#L12)
+
+Adds a IndexMut impl from the `Index` trait.
+
+#### Before
+```rust
+pub enum Axis { X = 0, Y = 1, Z = 2 }
+
+impl<T> core::ops::Index┃<Axis> for [T; 3] {
+ type Output = T;
+
+ fn index(&self, index: Axis) -> &Self::Output {
+ &self[index as usize]
+ }
+}
+```
+
+#### After
+```rust
+pub enum Axis { X = 0, Y = 1, Z = 2 }
+
+┃impl<T> core::ops::IndexMut<Axis> for [T; 3] {
+ fn index_mut(&mut self, index: Axis) -> &mut Self::Output {
+ &self[index as usize]
+ }
+}
+
+impl<T> core::ops::Index<Axis> for [T; 3] {
+ type Output = T;
+
+ fn index(&self, index: Axis) -> &Self::Output {
+ &self[index as usize]
+ }
+}
+```
+
+
+### `generate_new`
+**Source:** [generate_new.rs](https://github.com/rust-lang/rust-analyzer/blob/master/crates/ide-assists/src/handlers/generate_new.rs#L14)
+
+Adds a `fn new` for a type.
+
+#### Before
+```rust
+struct Ctx<T: Clone> {
+ data: T,┃
+}
+```
+
+#### After
+```rust
+struct Ctx<T: Clone> {
+ data: T,
+}
+
+impl<T: Clone> Ctx<T> {
+ fn ┃new(data: T) -> Self {
+ Self { data }
+ }
+}
+```
+
+
+### `generate_setter`
+**Source:** [generate_getter_or_setter.rs](https://github.com/rust-lang/rust-analyzer/blob/master/crates/ide-assists/src/handlers/generate_getter_or_setter.rs#L13)
+
+Generate a setter method.
+
+#### Before
+```rust
+struct Person {
+ nam┃e: String,
+}
+```
+
+#### After
+```rust
+struct Person {
+ name: String,
+}
+
+impl Person {
+ fn ┃set_name(&mut self, name: String) {
+ self.name = name;
+ }
+}
+```
+
+
+### `generate_trait_from_impl`
+**Source:** [generate_trait_from_impl.rs](https://github.com/rust-lang/rust-analyzer/blob/master/crates/ide-assists/src/handlers/generate_trait_from_impl.rs#L18)
+
+Generate trait for an already defined inherent impl and convert impl to a trait impl.
+
+#### Before
+```rust
+struct Foo<const N: usize>([i32; N]);
+
+macro_rules! const_maker {
+ ($t:ty, $v:tt) => {
+ const CONST: $t = $v;
+ };
+}
+
+impl<const N: usize> Fo┃o<N> {
+ // Used as an associated constant.
+ const CONST_ASSOC: usize = N * 4;
+
+ fn create() -> Option<()> {
+ Some(())
+ }
+
+ const_maker! {i32, 7}
+}
+```
+
+#### After
+```rust
+struct Foo<const N: usize>([i32; N]);
+
+macro_rules! const_maker {
+ ($t:ty, $v:tt) => {
+ const CONST: $t = $v;
+ };
+}
+
+trait ${0:NewTrait}<const N: usize> {
+ // Used as an associated constant.
+ const CONST_ASSOC: usize = N * 4;
+
+ fn create() -> Option<()>;
+
+ const_maker! {i32, 7}
+}
+
+impl<const N: usize> ${0:NewTrait}<N> for Foo<N> {
+ // Used as an associated constant.
+ const CONST_ASSOC: usize = N * 4;
+
+ fn create() -> Option<()> {
+ Some(())
+ }
+
+ const_maker! {i32, 7}
+}
+```
+
+
+### `generate_trait_impl`
+**Source:** [generate_impl.rs](https://github.com/rust-lang/rust-analyzer/blob/master/crates/ide-assists/src/handlers/generate_impl.rs#L66)
+
+Adds a new trait impl for a type.
+
+#### Before
+```rust
+struct ┃Ctx<T: Clone> {
+ data: T,
+}
+```
+
+#### After
+```rust
+struct Ctx<T: Clone> {
+ data: T,
+}
+
+impl<T: Clone> ${0:_} for Ctx<T> {}
+```
+
+
+### `inline_call`
+**Source:** [inline_call.rs](https://github.com/rust-lang/rust-analyzer/blob/master/crates/ide-assists/src/handlers/inline_call.rs#L170)
+
+Inlines a function or method body creating a `let` statement per parameter unless the parameter
+can be inlined. The parameter will be inlined either if it the supplied argument is a simple local
+or if the parameter is only accessed inside the function body once.
+
+#### Before
+```rust
+fn foo(name: Option<&str>) {
+ let name = name.unwrap┃();
+}
+```
+
+#### After
+```rust
+fn foo(name: Option<&str>) {
+ let name = match name {
+ Some(val) => val,
+ None => panic!("called `Option::unwrap()` on a `None` value"),
+ };
+}
+```
+
+
+### `inline_const_as_literal`
+**Source:** [inline_const_as_literal.rs](https://github.com/rust-lang/rust-analyzer/blob/master/crates/ide-assists/src/handlers/inline_const_as_literal.rs#L6)
+
+Evaluate and inline const variable as literal.
+
+#### Before
+```rust
+const STRING: &str = "Hello, World!";
+
+fn something() -> &'static str {
+ STRING┃
+}
+```
+
+#### After
+```rust
+const STRING: &str = "Hello, World!";
+
+fn something() -> &'static str {
+ "Hello, World!"
+}
+```
+
+
+### `inline_into_callers`
+**Source:** [inline_call.rs](https://github.com/rust-lang/rust-analyzer/blob/master/crates/ide-assists/src/handlers/inline_call.rs#L32)
+
+Inline a function or method body into all of its callers where possible, creating a `let` statement per parameter
+unless the parameter can be inlined. The parameter will be inlined either if it the supplied argument is a simple local
+or if the parameter is only accessed inside the function body once.
+If all calls can be inlined the function will be removed.
+
+#### Before
+```rust
+fn print(_: &str) {}
+fn foo┃(word: &str) {
+ if !word.is_empty() {
+ print(word);
+ }
+}
+fn bar() {
+ foo("안녕하세요");
+ foo("여러분");
+}
+```
+
+#### After
+```rust
+fn print(_: &str) {}
+
+fn bar() {
+ {
+ let word: &str = "안녕하세요";
+ if !word.is_empty() {
+ print(word);
+ }
+ };
+ {
+ let word: &str = "여러분";
+ if !word.is_empty() {
+ print(word);
+ }
+ };
+}
+```
+
+
+### `inline_local_variable`
+**Source:** [inline_local_variable.rs](https://github.com/rust-lang/rust-analyzer/blob/master/crates/ide-assists/src/handlers/inline_local_variable.rs#L17)
+
+Inlines a local variable.
+
+#### Before
+```rust
+fn main() {
+ let x┃ = 1 + 2;
+ x * 4;
+}
+```
+
+#### After
+```rust
+fn main() {
+ (1 + 2) * 4;
+}
+```
+
+
+### `inline_macro`
+**Source:** [inline_macro.rs](https://github.com/rust-lang/rust-analyzer/blob/master/crates/ide-assists/src/handlers/inline_macro.rs#L7)
+
+Takes a macro and inlines it one step.
+
+#### Before
+```rust
+macro_rules! num {
+ (+$($t:tt)+) => (1 + num!($($t )+));
+ (-$($t:tt)+) => (-1 + num!($($t )+));
+ (+) => (1);
+ (-) => (-1);
+}
+
+fn main() {
+ let number = num┃!(+ + + - + +);
+ println!("{number}");
+}
+```
+
+#### After
+```rust
+macro_rules! num {
+ (+$($t:tt)+) => (1 + num!($($t )+));
+ (-$($t:tt)+) => (-1 + num!($($t )+));
+ (+) => (1);
+ (-) => (-1);
+}
+
+fn main() {
+ let number = 1+num!(+ + - + +);
+ println!("{number}");
+}
+```
+
+
+### `inline_type_alias`
+**Source:** [inline_type_alias.rs](https://github.com/rust-lang/rust-analyzer/blob/master/crates/ide-assists/src/handlers/inline_type_alias.rs#L106)
+
+Replace a type alias with its concrete type.
+
+#### Before
+```rust
+type A<T = u32> = Vec<T>;
+
+fn main() {
+ let a: ┃A;
+}
+```
+
+#### After
+```rust
+type A<T = u32> = Vec<T>;
+
+fn main() {
+ let a: Vec<u32>;
+}
+```
+
+
+### `inline_type_alias_uses`
+**Source:** [inline_type_alias.rs](https://github.com/rust-lang/rust-analyzer/blob/master/crates/ide-assists/src/handlers/inline_type_alias.rs#L24)
+
+Inline a type alias into all of its uses where possible.
+
+#### Before
+```rust
+type ┃A = i32;
+fn id(x: A) -> A {
+ x
+};
+fn foo() {
+ let _: A = 3;
+}
+```
+
+#### After
+```rust
+
+fn id(x: i32) -> i32 {
+ x
+};
+fn foo() {
+ let _: i32 = 3;
+}
+```
+
+
+### `into_to_qualified_from`
+**Source:** [into_to_qualified_from.rs](https://github.com/rust-lang/rust-analyzer/blob/master/crates/ide-assists/src/handlers/into_to_qualified_from.rs#L10)
+
+Convert an `into` method call to a fully qualified `from` call.
+
+#### Before
+```rust
+//- minicore: from
+struct B;
+impl From<i32> for B {
+ fn from(a: i32) -> Self {
+ B
+ }
+}
+
+fn main() -> () {
+ let a = 3;
+ let b: B = a.in┃to();
+}
+```
+
+#### After
+```rust
+struct B;
+impl From<i32> for B {
+ fn from(a: i32) -> Self {
+ B
+ }
+}
+
+fn main() -> () {
+ let a = 3;
+ let b: B = B::from(a);
+}
+```
+
+
+### `introduce_named_generic`
+**Source:** [introduce_named_generic.rs](https://github.com/rust-lang/rust-analyzer/blob/master/crates/ide-assists/src/handlers/introduce_named_generic.rs#L7)
+
+Replaces `impl Trait` function argument with the named generic.
+
+#### Before
+```rust
+fn foo(bar: ┃impl Bar) {}
+```
+
+#### After
+```rust
+fn foo<┃B: Bar>(bar: B) {}
+```
+
+
+### `introduce_named_lifetime`
+**Source:** [introduce_named_lifetime.rs](https://github.com/rust-lang/rust-analyzer/blob/master/crates/ide-assists/src/handlers/introduce_named_lifetime.rs#L13)
+
+Change an anonymous lifetime to a named lifetime.
+
+#### Before
+```rust
+impl Cursor<'_┃> {
+ fn node(self) -> &SyntaxNode {
+ match self {
+ Cursor::Replace(node) | Cursor::Before(node) => node,
+ }
+ }
+}
+```
+
+#### After
+```rust
+impl<'a> Cursor<'a> {
+ fn node(self) -> &SyntaxNode {
+ match self {
+ Cursor::Replace(node) | Cursor::Before(node) => node,
+ }
+ }
+}
+```
+
+
+### `invert_if`
+**Source:** [invert_if.rs](https://github.com/rust-lang/rust-analyzer/blob/master/crates/ide-assists/src/handlers/invert_if.rs#L13)
+
+This transforms if expressions of the form `if !x {A} else {B}` into `if x {B} else {A}`
+This also works with `!=`. This assist can only be applied with the cursor on `if`.
+
+#### Before
+```rust
+fn main() {
+ if┃ !y { A } else { B }
+}
+```
+
+#### After
+```rust
+fn main() {
+ if y { B } else { A }
+}
+```
+
+
+### `line_to_block`
+**Source:** [convert_comment_block.rs](https://github.com/rust-lang/rust-analyzer/blob/master/crates/ide-assists/src/handlers/convert_comment_block.rs#L9)
+
+Converts comments between block and single-line form.
+
+#### Before
+```rust
+ // Multi-line┃
+ // comment
+```
+
+#### After
+```rust
+ /*
+ Multi-line
+ comment
+ */
+```
+
+
+### `make_raw_string`
+**Source:** [raw_string.rs](https://github.com/rust-lang/rust-analyzer/blob/master/crates/ide-assists/src/handlers/raw_string.rs#L7)
+
+Adds `r#` to a plain string literal.
+
+#### Before
+```rust
+fn main() {
+ "Hello,┃ World!";
+}
+```
+
+#### After
+```rust
+fn main() {
+ r#"Hello, World!"#;
+}
+```
+
+
+### `make_usual_string`
+**Source:** [raw_string.rs](https://github.com/rust-lang/rust-analyzer/blob/master/crates/ide-assists/src/handlers/raw_string.rs#L47)
+
+Turns a raw string into a plain string.
+
+#### Before
+```rust
+fn main() {
+ r#"Hello,┃ "World!""#;
+}
+```
+
+#### After
+```rust
+fn main() {
+ "Hello, \"World!\"";
+}
+```
+
+
+### `merge_imports`
+**Source:** [merge_imports.rs](https://github.com/rust-lang/rust-analyzer/blob/master/crates/ide-assists/src/handlers/merge_imports.rs#L21)
+
+Merges neighbor imports with a common prefix.
+
+#### Before
+```rust
+use std::┃fmt::Formatter;
+use std::io;
+```
+
+#### After
+```rust
+use std::{fmt::Formatter, io};
+```
+
+
+### `merge_match_arms`
+**Source:** [merge_match_arms.rs](https://github.com/rust-lang/rust-analyzer/blob/master/crates/ide-assists/src/handlers/merge_match_arms.rs#L12)
+
+Merges the current match arm with the following if their bodies are identical.
+
+#### Before
+```rust
+enum Action { Move { distance: u32 }, Stop }
+
+fn handle(action: Action) {
+ match action {
+ ┃Action::Move(..) => foo(),
+ Action::Stop => foo(),
+ }
+}
+```
+
+#### After
+```rust
+enum Action { Move { distance: u32 }, Stop }
+
+fn handle(action: Action) {
+ match action {
+ Action::Move(..) | Action::Stop => foo(),
+ }
+}
+```
+
+
+### `merge_nested_if`
+**Source:** [merge_nested_if.rs](https://github.com/rust-lang/rust-analyzer/blob/master/crates/ide-assists/src/handlers/merge_nested_if.rs#L11)
+
+This transforms if expressions of the form `if x { if y {A} }` into `if x && y {A}`
+This assist can only be applied with the cursor on `if`.
+
+#### Before
+```rust
+fn main() {
+ i┃f x == 3 { if y == 4 { 1 } }
+}
+```
+
+#### After
+```rust
+fn main() {
+ if x == 3 && y == 4 { 1 }
+}
+```
+
+
+### `move_arm_cond_to_match_guard`
+**Source:** [move_guard.rs](https://github.com/rust-lang/rust-analyzer/blob/master/crates/ide-assists/src/handlers/move_guard.rs#L69)
+
+Moves if expression from match arm body into a guard.
+
+#### Before
+```rust
+enum Action { Move { distance: u32 }, Stop }
+
+fn handle(action: Action) {
+ match action {
+ Action::Move { distance } => ┃if distance > 10 { foo() },
+ _ => (),
+ }
+}
+```
+
+#### After
+```rust
+enum Action { Move { distance: u32 }, Stop }
+
+fn handle(action: Action) {
+ match action {
+ Action::Move { distance } if distance > 10 => foo(),
+ _ => (),
+ }
+}
+```
+
+
+### `move_bounds_to_where_clause`
+**Source:** [move_bounds.rs](https://github.com/rust-lang/rust-analyzer/blob/master/crates/ide-assists/src/handlers/move_bounds.rs#L12)
+
+Moves inline type bounds to a where clause.
+
+#### Before
+```rust
+fn apply<T, U, ┃F: FnOnce(T) -> U>(f: F, x: T) -> U {
+ f(x)
+}
+```
+
+#### After
+```rust
+fn apply<T, U, F>(f: F, x: T) -> U where F: FnOnce(T) -> U {
+ f(x)
+}
+```
+
+
+### `move_const_to_impl`
+**Source:** [move_const_to_impl.rs](https://github.com/rust-lang/rust-analyzer/blob/master/crates/ide-assists/src/handlers/move_const_to_impl.rs#L14)
+
+Move a local constant item in a method to impl's associated constant. All the references will be
+qualified with `Self::`.
+
+#### Before
+```rust
+struct S;
+impl S {
+ fn foo() -> usize {
+ /// The answer.
+ const C┃: usize = 42;
+
+ C * C
+ }
+}
+```
+
+#### After
+```rust
+struct S;
+impl S {
+ /// The answer.
+ const C: usize = 42;
+
+ fn foo() -> usize {
+ Self::C * Self::C
+ }
+}
+```
+
+
+### `move_from_mod_rs`
+**Source:** [move_from_mod_rs.rs](https://github.com/rust-lang/rust-analyzer/blob/master/crates/ide-assists/src/handlers/move_from_mod_rs.rs#L12)
+
+Moves xxx/mod.rs to xxx.rs.
+
+#### Before
+```rust
+//- /main.rs
+mod a;
+//- /a/mod.rs
+┃fn t() {}┃
+```
+
+#### After
+```rust
+fn t() {}
+```
+
+
+### `move_guard_to_arm_body`
+**Source:** [move_guard.rs](https://github.com/rust-lang/rust-analyzer/blob/master/crates/ide-assists/src/handlers/move_guard.rs#L8)
+
+Moves match guard into match arm body.
+
+#### Before
+```rust
+enum Action { Move { distance: u32 }, Stop }
+
+fn handle(action: Action) {
+ match action {
+ Action::Move { distance } ┃if distance > 10 => foo(),
+ _ => (),
+ }
+}
+```
+
+#### After
+```rust
+enum Action { Move { distance: u32 }, Stop }
+
+fn handle(action: Action) {
+ match action {
+ Action::Move { distance } => if distance > 10 {
+ foo()
+ },
+ _ => (),
+ }
+}
+```
+
+
+### `move_module_to_file`
+**Source:** [move_module_to_file.rs](https://github.com/rust-lang/rust-analyzer/blob/master/crates/ide-assists/src/handlers/move_module_to_file.rs#L15)
+
+Moves inline module's contents to a separate file.
+
+#### Before
+```rust
+mod ┃foo {
+ fn t() {}
+}
+```
+
+#### After
+```rust
+mod foo;
+```
+
+
+### `move_to_mod_rs`
+**Source:** [move_to_mod_rs.rs](https://github.com/rust-lang/rust-analyzer/blob/master/crates/ide-assists/src/handlers/move_to_mod_rs.rs#L12)
+
+Moves xxx.rs to xxx/mod.rs.
+
+#### Before
+```rust
+//- /main.rs
+mod a;
+//- /a.rs
+┃fn t() {}┃
+```
+
+#### After
+```rust
+fn t() {}
+```
+
+
+### `normalize_import`
+**Source:** [normalize_import.rs](https://github.com/rust-lang/rust-analyzer/blob/master/crates/ide-assists/src/handlers/normalize_import.rs#L9)
+
+Normalizes an import.
+
+#### Before
+```rust
+use┃ std::{io, {fmt::Formatter}};
+```
+
+#### After
+```rust
+use std::{fmt::Formatter, io};
+```
+
+
+### `promote_local_to_const`
+**Source:** [promote_local_to_const.rs](https://github.com/rust-lang/rust-analyzer/blob/master/crates/ide-assists/src/handlers/promote_local_to_const.rs#L17)
+
+Promotes a local variable to a const item changing its name to a `SCREAMING_SNAKE_CASE` variant
+if the local uses no non-const expressions.
+
+#### Before
+```rust
+fn main() {
+ let foo┃ = true;
+
+ if foo {
+ println!("It's true");
+ } else {
+ println!("It's false");
+ }
+}
+```
+
+#### After
+```rust
+fn main() {
+ const ┃FOO: bool = true;
+
+ if FOO {
+ println!("It's true");
+ } else {
+ println!("It's false");
+ }
+}
+```
+
+
+### `pull_assignment_up`
+**Source:** [pull_assignment_up.rs](https://github.com/rust-lang/rust-analyzer/blob/master/crates/ide-assists/src/handlers/pull_assignment_up.rs#L11)
+
+Extracts variable assignment to outside an if or match statement.
+
+#### Before
+```rust
+fn main() {
+ let mut foo = 6;
+
+ if true {
+ ┃foo = 5;
+ } else {
+ foo = 4;
+ }
+}
+```
+
+#### After
+```rust
+fn main() {
+ let mut foo = 6;
+
+ foo = if true {
+ 5
+ } else {
+ 4
+ };
+}
+```
+
+
+### `qualify_method_call`
+**Source:** [qualify_method_call.rs](https://github.com/rust-lang/rust-analyzer/blob/master/crates/ide-assists/src/handlers/qualify_method_call.rs#L10)
+
+Replaces the method call with a qualified function call.
+
+#### Before
+```rust
+struct Foo;
+impl Foo {
+ fn foo(&self) {}
+}
+fn main() {
+ let foo = Foo;
+ foo.fo┃o();
+}
+```
+
+#### After
+```rust
+struct Foo;
+impl Foo {
+ fn foo(&self) {}
+}
+fn main() {
+ let foo = Foo;
+ Foo::foo(&foo);
+}
+```
+
+
+### `qualify_path`
+**Source:** [qualify_path.rs](https://github.com/rust-lang/rust-analyzer/blob/master/crates/ide-assists/src/handlers/qualify_path.rs#L24)
+
+If the name is unresolved, provides all possible qualified paths for it.
+
+#### Before
+```rust
+fn main() {
+ let map = HashMap┃::new();
+}
+```
+
+#### After
+```rust
+fn main() {
+ let map = std::collections::HashMap::new();
+}
+```
+
+
+### `reformat_number_literal`
+**Source:** [number_representation.rs](https://github.com/rust-lang/rust-analyzer/blob/master/crates/ide-assists/src/handlers/number_representation.rs#L7)
+
+Adds or removes separators from integer literal.
+
+#### Before
+```rust
+const _: i32 = 1012345┃;
+```
+
+#### After
+```rust
+const _: i32 = 1_012_345;
+```
+
+
+### `remove_dbg`
+**Source:** [remove_dbg.rs](https://github.com/rust-lang/rust-analyzer/blob/master/crates/ide-assists/src/handlers/remove_dbg.rs#L9)
+
+Removes `dbg!()` macro call.
+
+#### Before
+```rust
+fn main() {
+ let x = ┃dbg!(42 * dbg!(4 + 2));┃
+}
+```
+
+#### After
+```rust
+fn main() {
+ let x = 42 * (4 + 2);
+}
+```
+
+
+### `remove_hash`
+**Source:** [raw_string.rs](https://github.com/rust-lang/rust-analyzer/blob/master/crates/ide-assists/src/handlers/raw_string.rs#L117)
+
+Removes a hash from a raw string literal.
+
+#### Before
+```rust
+fn main() {
+ r#"Hello,┃ World!"#;
+}
+```
+
+#### After
+```rust
+fn main() {
+ r"Hello, World!";
+}
+```
+
+
+### `remove_mut`
+**Source:** [remove_mut.rs](https://github.com/rust-lang/rust-analyzer/blob/master/crates/ide-assists/src/handlers/remove_mut.rs#L5)
+
+Removes the `mut` keyword.
+
+#### Before
+```rust
+impl Walrus {
+ fn feed(&mut┃ self, amount: u32) {}
+}
+```
+
+#### After
+```rust
+impl Walrus {
+ fn feed(&self, amount: u32) {}
+}
+```
+
+
+### `remove_parentheses`
+**Source:** [remove_parentheses.rs](https://github.com/rust-lang/rust-analyzer/blob/master/crates/ide-assists/src/handlers/remove_parentheses.rs#L5)
+
+Removes redundant parentheses.
+
+#### Before
+```rust
+fn main() {
+ _ = ┃(2) + 2;
+}
+```
+
+#### After
+```rust
+fn main() {
+ _ = 2 + 2;
+}
+```
+
+
+### `remove_unused_imports`
+**Source:** [remove_unused_imports.rs](https://github.com/rust-lang/rust-analyzer/blob/master/crates/ide-assists/src/handlers/remove_unused_imports.rs#L17)
+
+Removes any use statements in the current selection that are unused.
+
+#### Before
+```rust
+struct X();
+mod foo {
+ use super::X┃;
+}
+```
+
+#### After
+```rust
+struct X();
+mod foo {
+}
+```
+
+
+### `remove_unused_param`
+**Source:** [remove_unused_param.rs](https://github.com/rust-lang/rust-analyzer/blob/master/crates/ide-assists/src/handlers/remove_unused_param.rs#L15)
+
+Removes unused function parameter.
+
+#### Before
+```rust
+fn frobnicate(x: i32┃) {}
+
+fn main() {
+ frobnicate(92);
+}
+```
+
+#### After
+```rust
+fn frobnicate() {}
+
+fn main() {
+ frobnicate();
+}
+```
+
+
+### `reorder_fields`
+**Source:** [reorder_fields.rs](https://github.com/rust-lang/rust-analyzer/blob/master/crates/ide-assists/src/handlers/reorder_fields.rs#L8)
+
+Reorder the fields of record literals and record patterns in the same order as in
+the definition.
+
+#### Before
+```rust
+struct Foo {foo: i32, bar: i32};
+const test: Foo = ┃Foo {bar: 0, foo: 1}
+```
+
+#### After
+```rust
+struct Foo {foo: i32, bar: i32};
+const test: Foo = Foo {foo: 1, bar: 0}
+```
+
+
+### `reorder_impl_items`
+**Source:** [reorder_impl_items.rs](https://github.com/rust-lang/rust-analyzer/blob/master/crates/ide-assists/src/handlers/reorder_impl_items.rs#L11)
+
+Reorder the items of an `impl Trait`. The items will be ordered
+in the same order as in the trait definition.
+
+#### Before
+```rust
+trait Foo {
+ type A;
+ const B: u8;
+ fn c();
+}
+
+struct Bar;
+┃impl Foo for Bar┃ {
+ const B: u8 = 17;
+ fn c() {}
+ type A = String;
+}
+```
+
+#### After
+```rust
+trait Foo {
+ type A;
+ const B: u8;
+ fn c();
+}
+
+struct Bar;
+impl Foo for Bar {
+ type A = String;
+ const B: u8 = 17;
+ fn c() {}
+}
+```
+
+
+### `replace_arith_with_checked`
+**Source:** [replace_arith_op.rs](https://github.com/rust-lang/rust-analyzer/blob/master/crates/ide-assists/src/handlers/replace_arith_op.rs#L9)
+
+Replaces arithmetic on integers with the `checked_*` equivalent.
+
+#### Before
+```rust
+fn main() {
+ let x = 1 ┃+ 2;
+}
+```
+
+#### After
+```rust
+fn main() {
+ let x = 1.checked_add(2);
+}
+```
+
+
+### `replace_arith_with_saturating`
+**Source:** [replace_arith_op.rs](https://github.com/rust-lang/rust-analyzer/blob/master/crates/ide-assists/src/handlers/replace_arith_op.rs#L28)
+
+Replaces arithmetic on integers with the `saturating_*` equivalent.
+
+#### Before
+```rust
+fn main() {
+ let x = 1 ┃+ 2;
+}
+```
+
+#### After
+```rust
+fn main() {
+ let x = 1.saturating_add(2);
+}
+```
+
+
+### `replace_arith_with_wrapping`
+**Source:** [replace_arith_op.rs](https://github.com/rust-lang/rust-analyzer/blob/master/crates/ide-assists/src/handlers/replace_arith_op.rs#L50)
+
+Replaces arithmetic on integers with the `wrapping_*` equivalent.
+
+#### Before
+```rust
+fn main() {
+ let x = 1 ┃+ 2;
+}
+```
+
+#### After
+```rust
+fn main() {
+ let x = 1.wrapping_add(2);
+}
+```
+
+
+### `replace_char_with_string`
+**Source:** [replace_string_with_char.rs](https://github.com/rust-lang/rust-analyzer/blob/master/crates/ide-assists/src/handlers/replace_string_with_char.rs#L51)
+
+Replace a char literal with a string literal.
+
+#### Before
+```rust
+fn main() {
+ find('{┃');
+}
+```
+
+#### After
+```rust
+fn main() {
+ find("{");
+}
+```
+
+
+### `replace_derive_with_manual_impl`
+**Source:** [replace_derive_with_manual_impl.rs](https://github.com/rust-lang/rust-analyzer/blob/master/crates/ide-assists/src/handlers/replace_derive_with_manual_impl.rs#L20)
+
+Converts a `derive` impl into a manual one.
+
+#### Before
+```rust
+#[derive(Deb┃ug, Display)]
+struct S;
+```
+
+#### After
+```rust
+#[derive(Display)]
+struct S;
+
+impl Debug for S {
+ ┃fn fmt(&self, f: &mut Formatter) -> Result<()> {
+ f.debug_struct("S").finish()
+ }
+}
+```
+
+
+### `replace_if_let_with_match`
+**Source:** [replace_if_let_with_match.rs](https://github.com/rust-lang/rust-analyzer/blob/master/crates/ide-assists/src/handlers/replace_if_let_with_match.rs#L20)
+
+Replaces a `if let` expression with a `match` expression.
+
+#### Before
+```rust
+enum Action { Move { distance: u32 }, Stop }
+
+fn handle(action: Action) {
+ ┃if let Action::Move { distance } = action {
+ foo(distance)
+ } else {
+ bar()
+ }
+}
+```
+
+#### After
+```rust
+enum Action { Move { distance: u32 }, Stop }
+
+fn handle(action: Action) {
+ match action {
+ Action::Move { distance } => foo(distance),
+ _ => bar(),
+ }
+}
+```
+
+
+### `replace_is_some_with_if_let_some`
+**Source:** [replace_is_method_with_if_let_method.rs](https://github.com/rust-lang/rust-analyzer/blob/master/crates/ide-assists/src/handlers/replace_is_method_with_if_let_method.rs#L9)
+
+Replace `if x.is_some()` with `if let Some(_tmp) = x` or `if x.is_ok()` with `if let Ok(_tmp) = x`.
+
+#### Before
+```rust
+fn main() {
+ let x = Some(1);
+ if x.is_som┃e() {}
+}
+```
+
+#### After
+```rust
+fn main() {
+ let x = Some(1);
+ if let Some(${0:x1}) = x {}
+}
+```
+
+
+### `replace_let_with_if_let`
+**Source:** [replace_let_with_if_let.rs](https://github.com/rust-lang/rust-analyzer/blob/master/crates/ide-assists/src/handlers/replace_let_with_if_let.rs#L9)
+
+Replaces `let` with an `if let`.
+
+#### Before
+```rust
+
+fn main(action: Action) {
+ ┃let x = compute();
+}
+
+fn compute() -> Option<i32> { None }
+```
+
+#### After
+```rust
+
+fn main(action: Action) {
+ if let Some(x) = compute() {
+ }
+}
+
+fn compute() -> Option<i32> { None }
+```
+
+
+### `replace_match_with_if_let`
+**Source:** [replace_if_let_with_match.rs](https://github.com/rust-lang/rust-analyzer/blob/master/crates/ide-assists/src/handlers/replace_if_let_with_match.rs#L188)
+
+Replaces a binary `match` with a wildcard pattern and no guards with an `if let` expression.
+
+#### Before
+```rust
+enum Action { Move { distance: u32 }, Stop }
+
+fn handle(action: Action) {
+ ┃match action {
+ Action::Move { distance } => foo(distance),
+ _ => bar(),
+ }
+}
+```
+
+#### After
+```rust
+enum Action { Move { distance: u32 }, Stop }
+
+fn handle(action: Action) {
+ if let Action::Move { distance } = action {
+ foo(distance)
+ } else {
+ bar()
+ }
+}
+```
+
+
+### `replace_named_generic_with_impl`
+**Source:** [replace_named_generic_with_impl.rs](https://github.com/rust-lang/rust-analyzer/blob/master/crates/ide-assists/src/handlers/replace_named_generic_with_impl.rs#L18)
+
+Replaces named generic with an `impl Trait` in function argument.
+
+#### Before
+```rust
+fn new<P┃: AsRef<Path>>(location: P) -> Self {}
+```
+
+#### After
+```rust
+fn new(location: impl AsRef<Path>) -> Self {}
+```
+
+
+### `replace_qualified_name_with_use`
+**Source:** [replace_qualified_name_with_use.rs](https://github.com/rust-lang/rust-analyzer/blob/master/crates/ide-assists/src/handlers/replace_qualified_name_with_use.rs#L13)
+
+Adds a use statement for a given fully-qualified name.
+
+#### Before
+```rust
+fn process(map: std::collections::┃HashMap<String, String>) {}
+```
+
+#### After
+```rust
+use std::collections::HashMap;
+
+fn process(map: HashMap<String, String>) {}
+```
+
+
+### `replace_string_with_char`
+**Source:** [replace_string_with_char.rs](https://github.com/rust-lang/rust-analyzer/blob/master/crates/ide-assists/src/handlers/replace_string_with_char.rs#L11)
+
+Replace string literal with char literal.
+
+#### Before
+```rust
+fn main() {
+ find("{┃");
+}
+```
+
+#### After
+```rust
+fn main() {
+ find('{');
+}
+```
+
+
+### `replace_try_expr_with_match`
+**Source:** [replace_try_expr_with_match.rs](https://github.com/rust-lang/rust-analyzer/blob/master/crates/ide-assists/src/handlers/replace_try_expr_with_match.rs#L18)
+
+Replaces a `try` expression with a `match` expression.
+
+#### Before
+```rust
+fn handle() {
+ let pat = Some(true)┃?;
+}
+```
+
+#### After
+```rust
+fn handle() {
+ let pat = match Some(true) {
+ Some(it) => it,
+ None => return None,
+ };
+}
+```
+
+
+### `replace_turbofish_with_explicit_type`
+**Source:** [replace_turbofish_with_explicit_type.rs](https://github.com/rust-lang/rust-analyzer/blob/master/crates/ide-assists/src/handlers/replace_turbofish_with_explicit_type.rs#L12)
+
+Converts `::<_>` to an explicit type assignment.
+
+#### Before
+```rust
+fn make<T>() -> T { ) }
+fn main() {
+ let a = make┃::<i32>();
+}
+```
+
+#### After
+```rust
+fn make<T>() -> T { ) }
+fn main() {
+ let a: i32 = make();
+}
+```
+
+
+### `replace_with_eager_method`
+**Source:** [replace_method_eager_lazy.rs](https://github.com/rust-lang/rust-analyzer/blob/master/crates/ide-assists/src/handlers/replace_method_eager_lazy.rs#L89)
+
+Replace `unwrap_or_else` with `unwrap_or` and `ok_or_else` with `ok_or`.
+
+#### Before
+```rust
+fn foo() {
+ let a = Some(1);
+ a.unwra┃p_or_else(|| 2);
+}
+```
+
+#### After
+```rust
+fn foo() {
+ let a = Some(1);
+ a.unwrap_or(2);
+}
+```
+
+
+### `replace_with_lazy_method`
+**Source:** [replace_method_eager_lazy.rs](https://github.com/rust-lang/rust-analyzer/blob/master/crates/ide-assists/src/handlers/replace_method_eager_lazy.rs#L9)
+
+Replace `unwrap_or` with `unwrap_or_else` and `ok_or` with `ok_or_else`.
+
+#### Before
+```rust
+fn foo() {
+ let a = Some(1);
+ a.unwra┃p_or(2);
+}
+```
+
+#### After
+```rust
+fn foo() {
+ let a = Some(1);
+ a.unwrap_or_else(|| 2);
+}
+```
+
+
+### `sort_items`
+**Source:** [sort_items.rs](https://github.com/rust-lang/rust-analyzer/blob/master/crates/ide-assists/src/handlers/sort_items.rs#L12)
+
+Sorts item members alphabetically: fields, enum variants and methods.
+
+#### Before
+```rust
+struct ┃Foo┃ { second: u32, first: String }
+```
+
+#### After
+```rust
+struct Foo { first: String, second: u32 }
+```
+
+---
+
+#### Before
+```rust
+trait ┃Bar┃ {
+ fn second(&self) -> u32;
+ fn first(&self) -> String;
+}
+```
+
+#### After
+```rust
+trait Bar {
+ fn first(&self) -> String;
+ fn second(&self) -> u32;
+}
+```
+
+---
+
+#### Before
+```rust
+struct Baz;
+impl ┃Baz┃ {
+ fn second(&self) -> u32;
+ fn first(&self) -> String;
+}
+```
+
+#### After
+```rust
+struct Baz;
+impl Baz {
+ fn first(&self) -> String;
+ fn second(&self) -> u32;
+}
+```
+
+---
+There is a difference between sorting enum variants:
+
+#### Before
+```rust
+enum ┃Animal┃ {
+ Dog(String, f64),
+ Cat { weight: f64, name: String },
+}
+```
+
+#### After
+```rust
+enum Animal {
+ Cat { weight: f64, name: String },
+ Dog(String, f64),
+}
+```
+
+and sorting a single enum struct variant:
+
+#### Before
+```rust
+enum Animal {
+ Dog(String, f64),
+ Cat ┃{ weight: f64, name: String }┃,
+}
+```
+
+#### After
+```rust
+enum Animal {
+ Dog(String, f64),
+ Cat { name: String, weight: f64 },
+}
+```
+
+
+### `split_import`
+**Source:** [split_import.rs](https://github.com/rust-lang/rust-analyzer/blob/master/crates/ide-assists/src/handlers/split_import.rs#L5)
+
+Wraps the tail of import into braces.
+
+#### Before
+```rust
+use std::┃collections::HashMap;
+```
+
+#### After
+```rust
+use std::{collections::HashMap};
+```
+
+
+### `sugar_impl_future_into_async`
+**Source:** [toggle_async_sugar.rs](https://github.com/rust-lang/rust-analyzer/blob/master/crates/ide-assists/src/handlers/toggle_async_sugar.rs#L13)
+
+Rewrites asynchronous function from `-> impl Future` into `async fn`.
+This action does not touch the function body and therefore `async { 0 }`
+block does not transform to just `0`.
+
+#### Before
+```rust
+pub fn foo() -> impl core::future::F┃uture<Output = usize> {
+ async { 0 }
+}
+```
+
+#### After
+```rust
+pub async fn foo() -> usize {
+ async { 0 }
+}
+```
+
+
+### `toggle_ignore`
+**Source:** [toggle_ignore.rs](https://github.com/rust-lang/rust-analyzer/blob/master/crates/ide-assists/src/handlers/toggle_ignore.rs#L8)
+
+Adds `#[ignore]` attribute to the test.
+
+#### Before
+```rust
+┃#[test]
+fn arithmetics {
+ assert_eq!(2 + 2, 5);
+}
+```
+
+#### After
+```rust
+#[test]
+#[ignore]
+fn arithmetics {
+ assert_eq!(2 + 2, 5);
+}
+```
+
+
+### `toggle_macro_delimiter`
+**Source:** [toggle_macro_delimiter.rs](https://github.com/rust-lang/rust-analyzer/blob/master/crates/ide-assists/src/handlers/toggle_macro_delimiter.rs#L9)
+
+Change macro delimiters in the order of `( -> { -> [ -> (`.
+
+#### Before
+```rust
+macro_rules! sth {
+ () => {};
+}
+
+sth!┃( );
+```
+
+#### After
+```rust
+macro_rules! sth {
+ () => {};
+}
+
+sth!{ }
+```
+
+
+### `unmerge_match_arm`
+**Source:** [unmerge_match_arm.rs](https://github.com/rust-lang/rust-analyzer/blob/master/crates/ide-assists/src/handlers/unmerge_match_arm.rs#L10)
+
+Splits the current match with a `|` pattern into two arms with identical bodies.
+
+#### Before
+```rust
+enum Action { Move { distance: u32 }, Stop }
+
+fn handle(action: Action) {
+ match action {
+ Action::Move(..) ┃| Action::Stop => foo(),
+ }
+}
+```
+
+#### After
+```rust
+enum Action { Move { distance: u32 }, Stop }
+
+fn handle(action: Action) {
+ match action {
+ Action::Move(..) => foo(),
+ Action::Stop => foo(),
+ }
+}
+```
+
+
+### `unmerge_use`
+**Source:** [unmerge_use.rs](https://github.com/rust-lang/rust-analyzer/blob/master/crates/ide-assists/src/handlers/unmerge_use.rs#L12)
+
+Extracts single use item from use list.
+
+#### Before
+```rust
+use std::fmt::{Debug, Display┃};
+```
+
+#### After
+```rust
+use std::fmt::{Debug};
+use std::fmt::Display;
+```
+
+
+### `unnecessary_async`
+**Source:** [unnecessary_async.rs](https://github.com/rust-lang/rust-analyzer/blob/master/crates/ide-assists/src/handlers/unnecessary_async.rs#L17)
+
+Removes the `async` mark from functions which have no `.await` in their body.
+Looks for calls to the functions and removes the `.await` on the call site.
+
+#### Before
+```rust
+pub asy┃nc fn foo() {}
+pub async fn bar() { foo().await }
+```
+
+#### After
+```rust
+pub fn foo() {}
+pub async fn bar() { foo() }
+```
+
+
+### `unqualify_method_call`
+**Source:** [unqualify_method_call.rs](https://github.com/rust-lang/rust-analyzer/blob/master/crates/ide-assists/src/handlers/unqualify_method_call.rs#L9)
+
+Transforms universal function call syntax into a method call.
+
+#### Before
+```rust
+fn main() {
+ std::ops::Add::add┃(1, 2);
+}
+```
+
+#### After
+```rust
+use std::ops::Add;
+
+fn main() {
+ 1.add(2);
+}
+```
+
+
+### `unwrap_block`
+**Source:** [unwrap_block.rs](https://github.com/rust-lang/rust-analyzer/blob/master/crates/ide-assists/src/handlers/unwrap_block.rs#L12)
+
+This assist removes if...else, for, while and loop control statements to just keep the body.
+
+#### Before
+```rust
+fn foo() {
+ if true {┃
+ println!("foo");
+ }
+}
+```
+
+#### After
+```rust
+fn foo() {
+ println!("foo");
+}
+```
+
+
+### `unwrap_option_return_type`
+**Source:** [unwrap_return_type.rs](https://github.com/rust-lang/rust-analyzer/blob/master/crates/ide-assists/src/handlers/unwrap_return_type.rs#L13)
+
+Unwrap the function's return type.
+
+#### Before
+```rust
+fn foo() -> Option<i32>┃ { Some(42i32) }
+```
+
+#### After
+```rust
+fn foo() -> i32 { 42i32 }
+```
+
+
+### `unwrap_result_return_type`
+**Source:** [unwrap_return_type.rs](https://github.com/rust-lang/rust-analyzer/blob/master/crates/ide-assists/src/handlers/unwrap_return_type.rs#L26)
+
+Unwrap the function's return type.
+
+#### Before
+```rust
+fn foo() -> Result<i32>┃ { Ok(42i32) }
+```
+
+#### After
+```rust
+fn foo() -> i32 { 42i32 }
+```
+
+
+### `unwrap_tuple`
+**Source:** [unwrap_tuple.rs](https://github.com/rust-lang/rust-analyzer/blob/master/crates/ide-assists/src/handlers/unwrap_tuple.rs#L8)
+
+Unwrap the tuple to different variables.
+
+#### Before
+```rust
+fn main() {
+ ┃let (foo, bar) = ("Foo", "Bar");
+}
+```
+
+#### After
+```rust
+fn main() {
+ let foo = "Foo";
+ let bar = "Bar";
+}
+```
+
+
+### `wrap_return_type_in_option`
+**Source:** [wrap_return_type.rs](https://github.com/rust-lang/rust-analyzer/blob/master/crates/ide-assists/src/handlers/wrap_return_type.rs#L16)
+
+Wrap the function's return type into Option.
+
+#### Before
+```rust
+fn foo() -> i32┃ { 42i32 }
+```
+
+#### After
+```rust
+fn foo() -> Option<i32> { Some(42i32) }
+```
+
+
+### `wrap_return_type_in_result`
+**Source:** [wrap_return_type.rs](https://github.com/rust-lang/rust-analyzer/blob/master/crates/ide-assists/src/handlers/wrap_return_type.rs#L29)
+
+Wrap the function's return type into Result.
+
+#### Before
+```rust
+fn foo() -> i32┃ { 42i32 }
+```
+
+#### After
+```rust
+fn foo() -> Result<i32, ${0:_}> { Ok(42i32) }
+```
+
+
+### `wrap_unwrap_cfg_attr`
+**Source:** [wrap_unwrap_cfg_attr.rs](https://github.com/rust-lang/rust-analyzer/blob/master/crates/ide-assists/src/handlers/wrap_unwrap_cfg_attr.rs#L12)
+
+Wraps an attribute to a cfg_attr attribute or unwraps a cfg_attr attribute to the inner attributes.
+
+#### Before
+```rust
+#[derive┃(Debug)]
+struct S {
+ field: i32
+}
+```
+
+#### After
+```rust
+#[cfg_attr(┃, derive(Debug))]
+struct S {
+ field: i32
+}
+```
diff --git a/docs/book/src/configuration.md b/docs/book/src/configuration.md
new file mode 100644
index 0000000000..fd94a4221a
--- /dev/null
+++ b/docs/book/src/configuration.md
@@ -0,0 +1,51 @@
+# Configuration
+
+**Source:**
+[config.rs](https://github.com/rust-lang/rust-analyzer/blob/master/crates/rust-analyzer/src/config.rs)
+
+The [Installation](./installation.md) section contains details on
+configuration for some of the editors. In general `rust-analyzer` is
+configured via LSP messages, which means that it’s up to the editor to
+decide on the exact format and location of configuration files.
+
+Some clients, such as [VS Code](./vs_code.md) or [COC plugin in
+Vim](./other_editors.md#coc-rust-analyzer) provide `rust-analyzer` specific configuration
+UIs. Others may require you to know a bit more about the interaction
+with `rust-analyzer`.
+
+For the later category, it might help to know that the initial
+configuration is specified as a value of the `initializationOptions`
+field of the [`InitializeParams` message, in the LSP
+protocol](https://microsoft.github.io/language-server-protocol/specifications/specification-current/#initialize).
+The spec says that the field type is `any?`, but `rust-analyzer` is
+looking for a JSON object that is constructed using settings from the
+list below. Name of the setting, ignoring the `rust-analyzer.` prefix,
+is used as a path, and value of the setting becomes the JSON property
+value.
+
+For example, a very common configuration is to enable proc-macro
+support, can be achieved by sending this JSON:
+
+ {
+ "cargo": {
+ "buildScripts": {
+ "enable": true,
+ },
+ },
+ "procMacro": {
+ "enable": true,
+ }
+ }
+
+Please consult your editor’s documentation to learn more about how to
+configure [LSP
+servers](https://microsoft.github.io/language-server-protocol/).
+
+To verify which configuration is actually used by `rust-analyzer`, set
+`RA_LOG` environment variable to `rust_analyzer=info` and look for
+config-related messages. Logs should show both the JSON that
+`rust-analyzer` sees as well as the updated config.
+
+This is the list of config options `rust-analyzer` supports:
+
+{{#include configuration_generated.md}}
diff --git a/docs/book/src/configuration_generated.md b/docs/book/src/configuration_generated.md
new file mode 100644
index 0000000000..0c6674b140
--- /dev/null
+++ b/docs/book/src/configuration_generated.md
@@ -0,0 +1,1206 @@
+**rust-analyzer.assist.emitMustUse** (default: false)
+
+ Whether to insert #[must_use] when generating `as_` methods
+for enum variants.
+
+
+**rust-analyzer.assist.expressionFillDefault** (default: "todo")
+
+ Placeholder expression to use for missing expressions in assists.
+
+
+**rust-analyzer.assist.termSearch.borrowcheck** (default: true)
+
+ Enable borrow checking for term search code assists. If set to false, also there will be more suggestions, but some of them may not borrow-check.
+
+
+**rust-analyzer.assist.termSearch.fuel** (default: 1800)
+
+ Term search fuel in "units of work" for assists (Defaults to 1800).
+
+
+**rust-analyzer.cachePriming.enable** (default: true)
+
+ Warm up caches on project load.
+
+
+**rust-analyzer.cachePriming.numThreads** (default: "physical")
+
+ How many worker threads to handle priming caches. The default `0` means to pick automatically.
+
+
+**rust-analyzer.cargo.allTargets** (default: true)
+
+ Pass `--all-targets` to cargo invocation.
+
+
+**rust-analyzer.cargo.autoreload** (default: true)
+
+ Automatically refresh project info via `cargo metadata` on
+`Cargo.toml` or `.cargo/config.toml` changes.
+
+
+**rust-analyzer.cargo.buildScripts.enable** (default: true)
+
+ Run build scripts (`build.rs`) for more precise code analysis.
+
+
+**rust-analyzer.cargo.buildScripts.invocationStrategy** (default: "per_workspace")
+
+ Specifies the invocation strategy to use when running the build scripts command.
+If `per_workspace` is set, the command will be executed for each Rust workspace with the
+workspace as the working directory.
+If `once` is set, the command will be executed once with the opened project as the
+working directory.
+This config only has an effect when `#rust-analyzer.cargo.buildScripts.overrideCommand#`
+is set.
+
+
+**rust-analyzer.cargo.buildScripts.overrideCommand** (default: null)
+
+ Override the command rust-analyzer uses to run build scripts and
+build procedural macros. The command is required to output json
+and should therefore include `--message-format=json` or a similar
+option.
+
+If there are multiple linked projects/workspaces, this command is invoked for
+each of them, with the working directory being the workspace root
+(i.e., the folder containing the `Cargo.toml`). This can be overwritten
+by changing `#rust-analyzer.cargo.buildScripts.invocationStrategy#`.
+
+By default, a cargo invocation will be constructed for the configured
+targets and features, with the following base command line:
+
+```bash
+cargo check --quiet --workspace --message-format=json --all-targets --keep-going
+```
+.
+
+
+**rust-analyzer.cargo.buildScripts.rebuildOnSave** (default: true)
+
+ Rerun proc-macros building/build-scripts running when proc-macro
+or build-script sources change and are saved.
+
+
+**rust-analyzer.cargo.buildScripts.useRustcWrapper** (default: true)
+
+ Use `RUSTC_WRAPPER=rust-analyzer` when running build scripts to
+avoid checking unnecessary things.
+
+
+ **rust-analyzer.cargo.cfgs**
+
+Default:
+
+```[
+ "debug_assertions",
+ "miri"
+]
+
+```
+
+ List of cfg options to enable with the given values.
+
+
+ **rust-analyzer.cargo.extraArgs** (default: [])
+
+ Extra arguments that are passed to every cargo invocation.
+
+
+**rust-analyzer.cargo.extraEnv** (default: {})
+
+ Extra environment variables that will be set when running cargo, rustc
+or other commands within the workspace. Useful for setting RUSTFLAGS.
+
+
+**rust-analyzer.cargo.features** (default: [])
+
+ List of features to activate.
+
+Set this to `"all"` to pass `--all-features` to cargo.
+
+
+**rust-analyzer.cargo.noDefaultFeatures** (default: false)
+
+ Whether to pass `--no-default-features` to cargo.
+
+
+**rust-analyzer.cargo.sysroot** (default: "discover")
+
+ Relative path to the sysroot, or "discover" to try to automatically find it via
+"rustc --print sysroot".
+
+Unsetting this disables sysroot loading.
+
+This option does not take effect until rust-analyzer is restarted.
+
+
+**rust-analyzer.cargo.sysrootSrc** (default: null)
+
+ Relative path to the sysroot library sources. If left unset, this will default to
+`{cargo.sysroot}/lib/rustlib/src/rust/library`.
+
+This option does not take effect until rust-analyzer is restarted.
+
+
+**rust-analyzer.cargo.target** (default: null)
+
+ Compilation target override (target tuple).
+
+
+**rust-analyzer.cargo.targetDir** (default: null)
+
+ Optional path to a rust-analyzer specific target directory.
+This prevents rust-analyzer's `cargo check` and initial build-script and proc-macro
+building from locking the `Cargo.lock` at the expense of duplicating build artifacts.
+
+Set to `true` to use a subdirectory of the existing target directory or
+set to a path relative to the workspace to use that path.
+
+
+**rust-analyzer.cfg.setTest** (default: true)
+
+ Set `cfg(test)` for local crates. Defaults to true.
+
+
+**rust-analyzer.checkOnSave** (default: true)
+
+ Run the check command for diagnostics on save.
+
+
+**rust-analyzer.check.allTargets** (default: null)
+
+ Check all targets and tests (`--all-targets`). Defaults to
+`#rust-analyzer.cargo.allTargets#`.
+
+
+**rust-analyzer.check.command** (default: "check")
+
+ Cargo command to use for `cargo check`.
+
+
+**rust-analyzer.check.extraArgs** (default: [])
+
+ Extra arguments for `cargo check`.
+
+
+**rust-analyzer.check.extraEnv** (default: {})
+
+ Extra environment variables that will be set when running `cargo check`.
+Extends `#rust-analyzer.cargo.extraEnv#`.
+
+
+**rust-analyzer.check.features** (default: null)
+
+ List of features to activate. Defaults to
+`#rust-analyzer.cargo.features#`.
+
+Set to `"all"` to pass `--all-features` to Cargo.
+
+
+**rust-analyzer.check.ignore** (default: [])
+
+ List of `cargo check` (or other command specified in `check.command`) diagnostics to ignore.
+
+For example for `cargo check`: `dead_code`, `unused_imports`, `unused_variables`,...
+
+
+**rust-analyzer.check.invocationStrategy** (default: "per_workspace")
+
+ Specifies the invocation strategy to use when running the check command.
+If `per_workspace` is set, the command will be executed for each workspace.
+If `once` is set, the command will be executed once.
+This config only has an effect when `#rust-analyzer.check.overrideCommand#`
+is set.
+
+
+**rust-analyzer.check.noDefaultFeatures** (default: null)
+
+ Whether to pass `--no-default-features` to Cargo. Defaults to
+`#rust-analyzer.cargo.noDefaultFeatures#`.
+
+
+**rust-analyzer.check.overrideCommand** (default: null)
+
+ Override the command rust-analyzer uses instead of `cargo check` for
+diagnostics on save. The command is required to output json and
+should therefore include `--message-format=json` or a similar option
+(if your client supports the `colorDiagnosticOutput` experimental
+capability, you can use `--message-format=json-diagnostic-rendered-ansi`).
+
+If you're changing this because you're using some tool wrapping
+Cargo, you might also want to change
+`#rust-analyzer.cargo.buildScripts.overrideCommand#`.
+
+If there are multiple linked projects/workspaces, this command is invoked for
+each of them, with the working directory being the workspace root
+(i.e., the folder containing the `Cargo.toml`). This can be overwritten
+by changing `#rust-analyzer.check.invocationStrategy#`.
+
+If `$saved_file` is part of the command, rust-analyzer will pass
+the absolute path of the saved file to the provided command. This is
+intended to be used with non-Cargo build systems.
+Note that `$saved_file` is experimental and may be removed in the future.
+
+An example command would be:
+
+```bash
+cargo check --workspace --message-format=json --all-targets
+```
+.
+
+
+**rust-analyzer.check.targets** (default: null)
+
+ Check for specific targets. Defaults to `#rust-analyzer.cargo.target#` if empty.
+
+Can be a single target, e.g. `"x86_64-unknown-linux-gnu"` or a list of targets, e.g.
+`["aarch64-apple-darwin", "x86_64-apple-darwin"]`.
+
+Aliased as `"checkOnSave.targets"`.
+
+
+**rust-analyzer.check.workspace** (default: true)
+
+ Whether `--workspace` should be passed to `cargo check`.
+If false, `-p <package>` will be passed instead if applicable. In case it is not, no
+check will be performed.
+
+
+**rust-analyzer.completion.addSemicolonToUnit** (default: true)
+
+ Whether to automatically add a semicolon when completing unit-returning functions.
+
+In `match` arms it completes a comma instead.
+
+
+**rust-analyzer.completion.autoAwait.enable** (default: true)
+
+ Toggles the additional completions that automatically show method calls and field accesses with `await` prefixed to them when completing on a future.
+
+
+**rust-analyzer.completion.autoIter.enable** (default: true)
+
+ Toggles the additional completions that automatically show method calls with `iter()` or `into_iter()` prefixed to them when completing on a type that has them.
+
+
+**rust-analyzer.completion.autoimport.enable** (default: true)
+
+ Toggles the additional completions that automatically add imports when completed.
+Note that your client must specify the `additionalTextEdits` LSP client capability to truly have this feature enabled.
+
+
+ **rust-analyzer.completion.autoimport.exclude**
+
+Default:
+
+```[
+ {
+ "path": "core::borrow::Borrow",
+ "type": "methods"
+ },
+ {
+ "path": "core::borrow::BorrowMut",
+ "type": "methods"
+ }
+]
+
+```
+
+ A list of full paths to items to exclude from auto-importing completions.
+
+Traits in this list won't have their methods suggested in completions unless the trait
+is in scope.
+
+You can either specify a string path which defaults to type "always" or use the more verbose
+form `{ "path": "path::to::item", type: "always" }`.
+
+For traits the type "methods" can be used to only exclude the methods but not the trait itself.
+
+This setting also inherits `#rust-analyzer.completion.excludeTraits#`.
+
+
+ **rust-analyzer.completion.autoself.enable** (default: true)
+
+ Toggles the additional completions that automatically show method calls and field accesses
+with `self` prefixed to them when inside a method.
+
+
+**rust-analyzer.completion.callable.snippets** (default: "fill_arguments")
+
+ Whether to add parenthesis and argument snippets when completing function.
+
+
+**rust-analyzer.completion.excludeTraits** (default: [])
+
+ A list of full paths to traits whose methods to exclude from completion.
+
+Methods from these traits won't be completed, even if the trait is in scope. However, they will still be suggested on expressions whose type is `dyn Trait`, `impl Trait` or `T where T: Trait`.
+
+Note that the trait themselves can still be completed.
+
+
+**rust-analyzer.completion.fullFunctionSignatures.enable** (default: false)
+
+ Whether to show full function/method signatures in completion docs.
+
+
+**rust-analyzer.completion.hideDeprecated** (default: false)
+
+ Whether to omit deprecated items from autocompletion. By default they are marked as deprecated but not hidden.
+
+
+**rust-analyzer.completion.limit** (default: null)
+
+ Maximum number of completions to return. If `None`, the limit is infinite.
+
+
+**rust-analyzer.completion.postfix.enable** (default: true)
+
+ Whether to show postfix snippets like `dbg`, `if`, `not`, etc.
+
+
+**rust-analyzer.completion.privateEditable.enable** (default: false)
+
+ Enables completions of private items and fields that are defined in the current workspace even if they are not visible at the current position.
+
+
+ **rust-analyzer.completion.snippets.custom**
+
+Default:
+
+```{
+ "Ok": {
+ "postfix": "ok",
+ "body": "Ok(${receiver})",
+ "description": "Wrap the expression in a `Result::Ok`",
+ "scope": "expr"
+ },
+ "Box::pin": {
+ "postfix": "pinbox",
+ "body": "Box::pin(${receiver})",
+ "requires": "std::boxed::Box",
+ "description": "Put the expression into a pinned `Box`",
+ "scope": "expr"
+ },
+ "Arc::new": {
+ "postfix": "arc",
+ "body": "Arc::new(${receiver})",
+ "requires": "std::sync::Arc",
+ "description": "Put the expression into an `Arc`",
+ "scope": "expr"
+ },
+ "Some": {
+ "postfix": "some",
+ "body": "Some(${receiver})",
+ "description": "Wrap the expression in an `Option::Some`",
+ "scope": "expr"
+ },
+ "Err": {
+ "postfix": "err",
+ "body": "Err(${receiver})",
+ "description": "Wrap the expression in a `Result::Err`",
+ "scope": "expr"
+ },
+ "Rc::new": {
+ "postfix": "rc",
+ "body": "Rc::new(${receiver})",
+ "requires": "std::rc::Rc",
+ "description": "Put the expression into an `Rc`",
+ "scope": "expr"
+ }
+}
+
+```
+
+ Custom completion snippets.
+
+
+ **rust-analyzer.completion.termSearch.enable** (default: false)
+
+ Whether to enable term search based snippets like `Some(foo.bar().baz())`.
+
+
+**rust-analyzer.completion.termSearch.fuel** (default: 1000)
+
+ Term search fuel in "units of work" for autocompletion (Defaults to 1000).
+
+
+**rust-analyzer.diagnostics.disabled** (default: [])
+
+ List of rust-analyzer diagnostics to disable.
+
+
+**rust-analyzer.diagnostics.enable** (default: true)
+
+ Whether to show native rust-analyzer diagnostics.
+
+
+**rust-analyzer.diagnostics.experimental.enable** (default: false)
+
+ Whether to show experimental rust-analyzer diagnostics that might
+have more false positives than usual.
+
+
+**rust-analyzer.diagnostics.remapPrefix** (default: {})
+
+ Map of prefixes to be substituted when parsing diagnostic file paths.
+This should be the reverse mapping of what is passed to `rustc` as `--remap-path-prefix`.
+
+
+**rust-analyzer.diagnostics.styleLints.enable** (default: false)
+
+ Whether to run additional style lints.
+
+
+**rust-analyzer.diagnostics.warningsAsHint** (default: [])
+
+ List of warnings that should be displayed with hint severity.
+
+The warnings will be indicated by faded text or three dots in code
+and will not show up in the `Problems Panel`.
+
+
+**rust-analyzer.diagnostics.warningsAsInfo** (default: [])
+
+ List of warnings that should be displayed with info severity.
+
+The warnings will be indicated by a blue squiggly underline in code
+and a blue icon in the `Problems Panel`.
+
+
+**rust-analyzer.files.exclude** (default: [])
+
+ These paths (file/directories) will be ignored by rust-analyzer. They are
+relative to the workspace root, and globs are not supported. You may
+also need to add the folders to Code's `files.watcherExclude`.
+
+
+**rust-analyzer.files.watcher** (default: "client")
+
+ Controls file watching implementation.
+
+
+**rust-analyzer.highlightRelated.breakPoints.enable** (default: true)
+
+ Enables highlighting of related references while the cursor is on `break`, `loop`, `while`, or `for` keywords.
+
+
+**rust-analyzer.highlightRelated.closureCaptures.enable** (default: true)
+
+ Enables highlighting of all captures of a closure while the cursor is on the `|` or move keyword of a closure.
+
+
+**rust-analyzer.highlightRelated.exitPoints.enable** (default: true)
+
+ Enables highlighting of all exit points while the cursor is on any `return`, `?`, `fn`, or return type arrow (`->`).
+
+
+**rust-analyzer.highlightRelated.references.enable** (default: true)
+
+ Enables highlighting of related references while the cursor is on any identifier.
+
+
+**rust-analyzer.highlightRelated.yieldPoints.enable** (default: true)
+
+ Enables highlighting of all break points for a loop or block context while the cursor is on any `async` or `await` keywords.
+
+
+**rust-analyzer.hover.actions.debug.enable** (default: true)
+
+ Whether to show `Debug` action. Only applies when
+`#rust-analyzer.hover.actions.enable#` is set.
+
+
+**rust-analyzer.hover.actions.enable** (default: true)
+
+ Whether to show HoverActions in Rust files.
+
+
+**rust-analyzer.hover.actions.gotoTypeDef.enable** (default: true)
+
+ Whether to show `Go to Type Definition` action. Only applies when
+`#rust-analyzer.hover.actions.enable#` is set.
+
+
+**rust-analyzer.hover.actions.implementations.enable** (default: true)
+
+ Whether to show `Implementations` action. Only applies when
+`#rust-analyzer.hover.actions.enable#` is set.
+
+
+**rust-analyzer.hover.actions.references.enable** (default: false)
+
+ Whether to show `References` action. Only applies when
+`#rust-analyzer.hover.actions.enable#` is set.
+
+
+**rust-analyzer.hover.actions.run.enable** (default: true)
+
+ Whether to show `Run` action. Only applies when
+`#rust-analyzer.hover.actions.enable#` is set.
+
+
+**rust-analyzer.hover.actions.updateTest.enable** (default: true)
+
+ Whether to show `Update Test` action. Only applies when
+`#rust-analyzer.hover.actions.enable#` and `#rust-analyzer.hover.actions.run.enable#` are set.
+
+
+**rust-analyzer.hover.documentation.enable** (default: true)
+
+ Whether to show documentation on hover.
+
+
+**rust-analyzer.hover.documentation.keywords.enable** (default: true)
+
+ Whether to show keyword hover popups. Only applies when
+`#rust-analyzer.hover.documentation.enable#` is set.
+
+
+**rust-analyzer.hover.links.enable** (default: true)
+
+ Use markdown syntax for links on hover.
+
+
+**rust-analyzer.hover.maxSubstitutionLength** (default: 20)
+
+ Whether to show what types are used as generic arguments in calls etc. on hover, and what is their max length to show such types, beyond it they will be shown with ellipsis.
+
+This can take three values: `null` means "unlimited", the string `"hide"` means to not show generic substitutions at all, and a number means to limit them to X characters.
+
+The default is 20 characters.
+
+
+**rust-analyzer.hover.memoryLayout.alignment** (default: "hexadecimal")
+
+ How to render the align information in a memory layout hover.
+
+
+**rust-analyzer.hover.memoryLayout.enable** (default: true)
+
+ Whether to show memory layout data on hover.
+
+
+**rust-analyzer.hover.memoryLayout.niches** (default: false)
+
+ How to render the niche information in a memory layout hover.
+
+
+**rust-analyzer.hover.memoryLayout.offset** (default: "hexadecimal")
+
+ How to render the offset information in a memory layout hover.
+
+
+**rust-analyzer.hover.memoryLayout.size** (default: "both")
+
+ How to render the size information in a memory layout hover.
+
+
+**rust-analyzer.hover.show.enumVariants** (default: 5)
+
+ How many variants of an enum to display when hovering on. Show none if empty.
+
+
+**rust-analyzer.hover.show.fields** (default: 5)
+
+ How many fields of a struct, variant or union to display when hovering on. Show none if empty.
+
+
+**rust-analyzer.hover.show.traitAssocItems** (default: null)
+
+ How many associated items of a trait to display when hovering a trait.
+
+
+**rust-analyzer.imports.granularity.enforce** (default: false)
+
+ Whether to enforce the import granularity setting for all files. If set to false rust-analyzer will try to keep import styles consistent per file.
+
+
+**rust-analyzer.imports.granularity.group** (default: "crate")
+
+ How imports should be grouped into use statements.
+
+
+**rust-analyzer.imports.group.enable** (default: true)
+
+ Group inserted imports by the [following order](https://rust-analyzer.github.io/manual.html#auto-import). Groups are separated by newlines.
+
+
+**rust-analyzer.imports.merge.glob** (default: true)
+
+ Whether to allow import insertion to merge new imports into single path glob imports like `use std::fmt::*;`.
+
+
+**rust-analyzer.imports.preferNoStd** (default: false)
+
+ Prefer to unconditionally use imports of the core and alloc crate, over the std crate.
+
+
+**rust-analyzer.imports.preferPrelude** (default: false)
+
+ Whether to prefer import paths containing a `prelude` module.
+
+
+**rust-analyzer.imports.prefix** (default: "plain")
+
+ The path structure for newly inserted paths to use.
+
+
+**rust-analyzer.imports.prefixExternPrelude** (default: false)
+
+ Whether to prefix external (including std, core) crate imports with `::`. e.g. "use ::std::io::Read;".
+
+
+**rust-analyzer.inlayHints.bindingModeHints.enable** (default: false)
+
+ Whether to show inlay type hints for binding modes.
+
+
+**rust-analyzer.inlayHints.chainingHints.enable** (default: true)
+
+ Whether to show inlay type hints for method chains.
+
+
+**rust-analyzer.inlayHints.closingBraceHints.enable** (default: true)
+
+ Whether to show inlay hints after a closing `}` to indicate what item it belongs to.
+
+
+**rust-analyzer.inlayHints.closingBraceHints.minLines** (default: 25)
+
+ Minimum number of lines required before the `}` until the hint is shown (set to 0 or 1
+to always show them).
+
+
+**rust-analyzer.inlayHints.closureCaptureHints.enable** (default: false)
+
+ Whether to show inlay hints for closure captures.
+
+
+**rust-analyzer.inlayHints.closureReturnTypeHints.enable** (default: "never")
+
+ Whether to show inlay type hints for return types of closures.
+
+
+**rust-analyzer.inlayHints.closureStyle** (default: "impl_fn")
+
+ Closure notation in type and chaining inlay hints.
+
+
+**rust-analyzer.inlayHints.discriminantHints.enable** (default: "never")
+
+ Whether to show enum variant discriminant hints.
+
+
+**rust-analyzer.inlayHints.expressionAdjustmentHints.enable** (default: "never")
+
+ Whether to show inlay hints for type adjustments.
+
+
+**rust-analyzer.inlayHints.expressionAdjustmentHints.hideOutsideUnsafe** (default: false)
+
+ Whether to hide inlay hints for type adjustments outside of `unsafe` blocks.
+
+
+**rust-analyzer.inlayHints.expressionAdjustmentHints.mode** (default: "prefix")
+
+ Whether to show inlay hints as postfix ops (`.*` instead of `*`, etc).
+
+
+**rust-analyzer.inlayHints.genericParameterHints.const.enable** (default: true)
+
+ Whether to show const generic parameter name inlay hints.
+
+
+**rust-analyzer.inlayHints.genericParameterHints.lifetime.enable** (default: false)
+
+ Whether to show generic lifetime parameter name inlay hints.
+
+
+**rust-analyzer.inlayHints.genericParameterHints.type.enable** (default: false)
+
+ Whether to show generic type parameter name inlay hints.
+
+
+**rust-analyzer.inlayHints.implicitDrops.enable** (default: false)
+
+ Whether to show implicit drop hints.
+
+
+**rust-analyzer.inlayHints.implicitSizedBoundHints.enable** (default: false)
+
+ Whether to show inlay hints for the implied type parameter `Sized` bound.
+
+
+**rust-analyzer.inlayHints.lifetimeElisionHints.enable** (default: "never")
+
+ Whether to show inlay type hints for elided lifetimes in function signatures.
+
+
+**rust-analyzer.inlayHints.lifetimeElisionHints.useParameterNames** (default: false)
+
+ Whether to prefer using parameter names as the name for elided lifetime hints if possible.
+
+
+**rust-analyzer.inlayHints.maxLength** (default: 25)
+
+ Maximum length for inlay hints. Set to null to have an unlimited length.
+
+
+**rust-analyzer.inlayHints.parameterHints.enable** (default: true)
+
+ Whether to show function parameter name inlay hints at the call
+site.
+
+
+**rust-analyzer.inlayHints.rangeExclusiveHints.enable** (default: false)
+
+ Whether to show exclusive range inlay hints.
+
+
+**rust-analyzer.inlayHints.reborrowHints.enable** (default: "never")
+
+ Whether to show inlay hints for compiler inserted reborrows.
+This setting is deprecated in favor of #rust-analyzer.inlayHints.expressionAdjustmentHints.enable#.
+
+
+**rust-analyzer.inlayHints.renderColons** (default: true)
+
+ Whether to render leading colons for type hints, and trailing colons for parameter hints.
+
+
+**rust-analyzer.inlayHints.typeHints.enable** (default: true)
+
+ Whether to show inlay type hints for variables.
+
+
+**rust-analyzer.inlayHints.typeHints.hideClosureInitialization** (default: false)
+
+ Whether to hide inlay type hints for `let` statements that initialize to a closure.
+Only applies to closures with blocks, same as `#rust-analyzer.inlayHints.closureReturnTypeHints.enable#`.
+
+
+**rust-analyzer.inlayHints.typeHints.hideClosureParameter** (default: false)
+
+ Whether to hide inlay parameter type hints for closures.
+
+
+**rust-analyzer.inlayHints.typeHints.hideNamedConstructor** (default: false)
+
+ Whether to hide inlay type hints for constructors.
+
+
+**rust-analyzer.interpret.tests** (default: false)
+
+ Enables the experimental support for interpreting tests.
+
+
+**rust-analyzer.joinLines.joinAssignments** (default: true)
+
+ Join lines merges consecutive declaration and initialization of an assignment.
+
+
+**rust-analyzer.joinLines.joinElseIf** (default: true)
+
+ Join lines inserts else between consecutive ifs.
+
+
+**rust-analyzer.joinLines.removeTrailingComma** (default: true)
+
+ Join lines removes trailing commas.
+
+
+**rust-analyzer.joinLines.unwrapTrivialBlock** (default: true)
+
+ Join lines unwraps trivial blocks.
+
+
+**rust-analyzer.lens.debug.enable** (default: true)
+
+ Whether to show `Debug` lens. Only applies when
+`#rust-analyzer.lens.enable#` is set.
+
+
+**rust-analyzer.lens.enable** (default: true)
+
+ Whether to show CodeLens in Rust files.
+
+
+**rust-analyzer.lens.implementations.enable** (default: true)
+
+ Whether to show `Implementations` lens. Only applies when
+`#rust-analyzer.lens.enable#` is set.
+
+
+**rust-analyzer.lens.location** (default: "above_name")
+
+ Where to render annotations.
+
+
+**rust-analyzer.lens.references.adt.enable** (default: false)
+
+ Whether to show `References` lens for Struct, Enum, and Union.
+Only applies when `#rust-analyzer.lens.enable#` is set.
+
+
+**rust-analyzer.lens.references.enumVariant.enable** (default: false)
+
+ Whether to show `References` lens for Enum Variants.
+Only applies when `#rust-analyzer.lens.enable#` is set.
+
+
+**rust-analyzer.lens.references.method.enable** (default: false)
+
+ Whether to show `Method References` lens. Only applies when
+`#rust-analyzer.lens.enable#` is set.
+
+
+**rust-analyzer.lens.references.trait.enable** (default: false)
+
+ Whether to show `References` lens for Trait.
+Only applies when `#rust-analyzer.lens.enable#` is set.
+
+
+**rust-analyzer.lens.run.enable** (default: true)
+
+ Whether to show `Run` lens. Only applies when
+`#rust-analyzer.lens.enable#` is set.
+
+
+**rust-analyzer.lens.updateTest.enable** (default: true)
+
+ Whether to show `Update Test` lens. Only applies when
+`#rust-analyzer.lens.enable#` and `#rust-analyzer.lens.run.enable#` are set.
+
+
+**rust-analyzer.linkedProjects** (default: [])
+
+ Disable project auto-discovery in favor of explicitly specified set
+of projects.
+
+Elements must be paths pointing to `Cargo.toml`,
+`rust-project.json`, `.rs` files (which will be treated as standalone files) or JSON
+objects in `rust-project.json` format.
+
+
+**rust-analyzer.lru.capacity** (default: null)
+
+ Number of syntax trees rust-analyzer keeps in memory. Defaults to 128.
+
+
+**rust-analyzer.lru.query.capacities** (default: {})
+
+ Sets the LRU capacity of the specified queries.
+
+
+**rust-analyzer.notifications.cargoTomlNotFound** (default: true)
+
+ Whether to show `can't find Cargo.toml` error message.
+
+
+**rust-analyzer.numThreads** (default: null)
+
+ How many worker threads in the main loop. The default `null` means to pick automatically.
+
+
+**rust-analyzer.procMacro.attributes.enable** (default: true)
+
+ Expand attribute macros. Requires `#rust-analyzer.procMacro.enable#` to be set.
+
+
+**rust-analyzer.procMacro.enable** (default: true)
+
+ Enable support for procedural macros, implies `#rust-analyzer.cargo.buildScripts.enable#`.
+
+
+**rust-analyzer.procMacro.ignored** (default: {})
+
+ These proc-macros will be ignored when trying to expand them.
+
+This config takes a map of crate names with the exported proc-macro names to ignore as values.
+
+
+**rust-analyzer.procMacro.server** (default: null)
+
+ Internal config, path to proc-macro server executable.
+
+
+**rust-analyzer.references.excludeImports** (default: false)
+
+ Exclude imports from find-all-references.
+
+
+**rust-analyzer.references.excludeTests** (default: false)
+
+ Exclude tests from find-all-references and call-hierarchy.
+
+
+**rust-analyzer.runnables.command** (default: null)
+
+ Command to be executed instead of 'cargo' for runnables.
+
+
+**rust-analyzer.runnables.extraArgs** (default: [])
+
+ Additional arguments to be passed to cargo for runnables such as
+tests or binaries. For example, it may be `--release`.
+
+
+ **rust-analyzer.runnables.extraTestBinaryArgs**
+
+Default:
+
+```[
+ "--show-output"
+]
+
+```
+
+ Additional arguments to be passed through Cargo to launched tests, benchmarks, or
+doc-tests.
+
+Unless the launched target uses a
+[custom test harness](https://doc.rust-lang.org/cargo/reference/cargo-targets.html#the-harness-field),
+they will end up being interpreted as options to
+[`rustc`’s built-in test harness (“libtest”)](https://doc.rust-lang.org/rustc/tests/index.html#cli-arguments).
+
+
+ **rust-analyzer.rustc.source** (default: null)
+
+ Path to the Cargo.toml of the rust compiler workspace, for usage in rustc_private
+projects, or "discover" to try to automatically find it if the `rustc-dev` component
+is installed.
+
+Any project which uses rust-analyzer with the rustcPrivate
+crates must set `[package.metadata.rust-analyzer] rustc_private=true` to use it.
+
+This option does not take effect until rust-analyzer is restarted.
+
+
+**rust-analyzer.rustfmt.extraArgs** (default: [])
+
+ Additional arguments to `rustfmt`.
+
+
+**rust-analyzer.rustfmt.overrideCommand** (default: null)
+
+ Advanced option, fully override the command rust-analyzer uses for
+formatting. This should be the equivalent of `rustfmt` here, and
+not that of `cargo fmt`. The file contents will be passed on the
+standard input and the formatted result will be read from the
+standard output.
+
+
+**rust-analyzer.rustfmt.rangeFormatting.enable** (default: false)
+
+ Enables the use of rustfmt's unstable range formatting command for the
+`textDocument/rangeFormatting` request. The rustfmt option is unstable and only
+available on a nightly build.
+
+
+**rust-analyzer.semanticHighlighting.doc.comment.inject.enable** (default: true)
+
+ Inject additional highlighting into doc comments.
+
+When enabled, rust-analyzer will highlight rust source in doc comments as well as intra
+doc links.
+
+
+**rust-analyzer.semanticHighlighting.nonStandardTokens** (default: true)
+
+ Whether the server is allowed to emit non-standard tokens and modifiers.
+
+
+**rust-analyzer.semanticHighlighting.operator.enable** (default: true)
+
+ Use semantic tokens for operators.
+
+When disabled, rust-analyzer will emit semantic tokens only for operator tokens when
+they are tagged with modifiers.
+
+
+**rust-analyzer.semanticHighlighting.operator.specialization.enable** (default: false)
+
+ Use specialized semantic tokens for operators.
+
+When enabled, rust-analyzer will emit special token types for operator tokens instead
+of the generic `operator` token type.
+
+
+**rust-analyzer.semanticHighlighting.punctuation.enable** (default: false)
+
+ Use semantic tokens for punctuation.
+
+When disabled, rust-analyzer will emit semantic tokens only for punctuation tokens when
+they are tagged with modifiers or have a special role.
+
+
+**rust-analyzer.semanticHighlighting.punctuation.separate.macro.bang** (default: false)
+
+ When enabled, rust-analyzer will emit a punctuation semantic token for the `!` of macro
+calls.
+
+
+**rust-analyzer.semanticHighlighting.punctuation.specialization.enable** (default: false)
+
+ Use specialized semantic tokens for punctuation.
+
+When enabled, rust-analyzer will emit special token types for punctuation tokens instead
+of the generic `punctuation` token type.
+
+
+**rust-analyzer.semanticHighlighting.strings.enable** (default: true)
+
+ Use semantic tokens for strings.
+
+In some editors (e.g. vscode) semantic tokens override other highlighting grammars.
+By disabling semantic tokens for strings, other grammars can be used to highlight
+their contents.
+
+
+**rust-analyzer.signatureInfo.detail** (default: "full")
+
+ Show full signature of the callable. Only shows parameters if disabled.
+
+
+**rust-analyzer.signatureInfo.documentation.enable** (default: true)
+
+ Show documentation.
+
+
+**rust-analyzer.typing.triggerChars** (default: "=.")
+
+ Specify the characters allowed to invoke special on typing triggers.
+- typing `=` after `let` tries to smartly add `;` if `=` is followed by an existing expression
+- typing `=` between two expressions adds `;` when in statement position
+- typing `=` to turn an assignment into an equality comparison removes `;` when in expression position
+- typing `.` in a chain method call auto-indents
+- typing `{` or `(` in front of an expression inserts a closing `}` or `)` after the expression
+- typing `{` in a use item adds a closing `}` in the right place
+- typing `>` to complete a return type `->` will insert a whitespace after it
+- typing `<` in a path or type position inserts a closing `>` after the path or type.
+
+
+**rust-analyzer.vfs.extraIncludes** (default: [])
+
+ Additional paths to include in the VFS. Generally for code that is
+generated or otherwise managed by a build system outside of Cargo,
+though Cargo might be the eventual consumer.
+
+
+**rust-analyzer.workspace.discoverConfig** (default: null)
+
+ Enables automatic discovery of projects using [`DiscoverWorkspaceConfig::command`].
+
+[`DiscoverWorkspaceConfig`] also requires setting `progress_label` and `files_to_watch`.
+`progress_label` is used for the title in progress indicators, whereas `files_to_watch`
+is used to determine which build system-specific files should be watched in order to
+reload rust-analyzer.
+
+Below is an example of a valid configuration:
+```json
+"rust-analyzer.workspace.discoverConfig": {
+ "command": [
+ "rust-project",
+ "develop-json"
+ ],
+ "progressLabel": "rust-analyzer",
+ "filesToWatch": [
+ "BUCK"
+ ]
+}
+```
+
+## On `DiscoverWorkspaceConfig::command`
+
+**Warning**: This format is provisional and subject to change.
+
+[`DiscoverWorkspaceConfig::command`] *must* return a JSON object
+corresponding to `DiscoverProjectData::Finished`:
+
+```norun
+#[derive(Debug, Clone, Deserialize, Serialize)]
+#[serde(tag = "kind")]
+#[serde(rename_all = "snake_case")]
+enum DiscoverProjectData {
+ Finished { buildfile: Utf8PathBuf, project: ProjectJsonData },
+ Error { error: String, source: Option<String> },
+ Progress { message: String },
+}
+```
+
+As JSON, `DiscoverProjectData::Finished` is:
+
+```json
+{
+ // the internally-tagged representation of the enum.
+ "kind": "finished",
+ // the file used by a non-Cargo build system to define
+ // a package or target.
+ "buildfile": "rust-analyzer/BUILD",
+ // the contents of a rust-project.json, elided for brevity
+ "project": {
+ "sysroot": "foo",
+ "crates": []
+ }
+}
+```
+
+It is encouraged, but not required, to use the other variants on
+`DiscoverProjectData` to provide a more polished end-user experience.
+
+`DiscoverWorkspaceConfig::command` may *optionally* include an `{arg}`,
+which will be substituted with the JSON-serialized form of the following
+enum:
+
+```norun
+#[derive(PartialEq, Clone, Debug, Serialize)]
+#[serde(rename_all = "camelCase")]
+pub enum DiscoverArgument {
+ Path(AbsPathBuf),
+ Buildfile(AbsPathBuf),
+}
+```
+
+The JSON representation of `DiscoverArgument::Path` is:
+
+```json
+{
+ "path": "src/main.rs"
+}
+```
+
+Similarly, the JSON representation of `DiscoverArgument::Buildfile` is:
+
+```
+{
+ "buildfile": "BUILD"
+}
+```
+
+`DiscoverArgument::Path` is used to find and generate a `rust-project.json`,
+and therefore, a workspace, whereas `DiscoverArgument::buildfile` is used to
+to update an existing workspace. As a reference for implementors,
+buck2's `rust-project` will likely be useful:
+https://github.com/facebook/buck2/tree/main/integrations/rust-project.
+
+
+**rust-analyzer.workspace.symbol.search.kind** (default: "only_types")
+
+ Workspace symbol search kind.
+
+
+**rust-analyzer.workspace.symbol.search.limit** (default: 128)
+
+ Limits the number of items returned from a workspace symbol search (Defaults to 128).
+Some clients like vs-code issue new searches on result filtering and don't require all results to be returned in the initial search.
+Other clients requires all results upfront and might require a higher limit.
+
+
+**rust-analyzer.workspace.symbol.search.scope** (default: "workspace")
+
+ Workspace symbol search scope.
+
+
diff --git a/docs/dev/README.md b/docs/book/src/contributing/README.md
index c990212d58..cbbf6acf3e 100644
--- a/docs/dev/README.md
+++ b/docs/book/src/contributing/README.md
@@ -9,7 +9,7 @@ $ cargo test
should be enough to get you started!
-To learn more about how rust-analyzer works, see [./architecture.md](./architecture.md).
+To learn more about how rust-analyzer works, see [Architecture](architecture.md).
It also explains the high-level layout of the source code.
Do skim through that document.
@@ -24,7 +24,9 @@ rust-analyzer is a part of the [RLS-2.0 working
group](https://github.com/rust-lang/compiler-team/tree/6a769c13656c0a6959ebc09e7b1f7c09b86fb9c0/working-groups/rls-2.0).
Discussion happens in this Zulip stream:
-https://rust-lang.zulipchat.com/#narrow/stream/185405-t-compiler.2Frust-analyzer
+<https://rust-lang.zulipchat.com/#narrow/stream/185405-t-compiler.2Frust-analyzer>
+
+<!-- toc -->
# Issue Labels
@@ -54,7 +56,7 @@ https://rust-lang.zulipchat.com/#narrow/stream/185405-t-compiler.2Frust-analyzer
# Code Style & Review Process
-Do see [./style.md](./style.md).
+See the [Style Guide](style.md).
# Cookbook
@@ -88,11 +90,13 @@ As a sanity check after I'm done, I use `cargo xtask install --server` and **Rel
If the problem concerns only the VS Code extension, I use **Run Installed Extension** launch configuration from `launch.json`.
Notably, this uses the usual `rust-analyzer` binary from `PATH`.
For this, it is important to have the following in your `settings.json` file:
+
```json
{
"rust-analyzer.server.path": "rust-analyzer"
}
```
+
After I am done with the fix, I use `cargo xtask install --client` to try the new extension for real.
If I need to fix something in the `rust-analyzer` crate, I feel sad because it's on the boundary between the two processes, and working there is slow.
@@ -117,6 +121,7 @@ cd editors/code
npm ci
npm run lint
```
+
## How to ...
* ... add an assist? [#7535](https://github.com/rust-lang/rust-analyzer/pull/7535)
@@ -142,14 +147,15 @@ Note that `stdout` is used for the actual protocol, so `println!` will break thi
To log all communication between the server and the client, there are two choices:
* You can log on the server side, by running something like
+
```
env RA_LOG=lsp_server=debug code .
```
+
* You can log on the client side, by the `rust-analyzer: Toggle LSP Logs` command or enabling `"rust-analyzer.trace.server": "verbose"` workspace setting.
These logs are shown in a separate tab in the output and could be used with LSP inspector.
Kudos to [@DJMcNab](https://github.com/DJMcNab) for setting this awesome infra up!
-
There are also several VS Code commands which might be of interest:
* `rust-analyzer: Status` shows some memory-usage statistics.
diff --git a/docs/dev/architecture.md b/docs/book/src/contributing/architecture.md
index 9c9e05a429..1cc13b3b96 100644
--- a/docs/dev/architecture.md
+++ b/docs/book/src/contributing/architecture.md
@@ -8,19 +8,20 @@ It goes deeper than what is covered in this document, but will take some time to
See also these implementation-related blog posts:
-* https://rust-analyzer.github.io/blog/2019/11/13/find-usages.html
-* https://rust-analyzer.github.io/blog/2020/07/20/three-architectures-for-responsive-ide.html
-* https://rust-analyzer.github.io/blog/2020/09/16/challeging-LR-parsing.html
-* https://rust-analyzer.github.io/blog/2020/09/28/how-to-make-a-light-bulb.html
-* https://rust-analyzer.github.io/blog/2020/10/24/introducing-ungrammar.html
+* <https://rust-analyzer.github.io/blog/2019/11/13/find-usages.html>
+* <https://rust-analyzer.github.io/blog/2020/07/20/three-architectures-for-responsive-ide.html>
+* <https://rust-analyzer.github.io/blog/2020/09/16/challeging-LR-parsing.html>
+* <https://rust-analyzer.github.io/blog/2020/09/28/how-to-make-a-light-bulb.html>
+* <https://rust-analyzer.github.io/blog/2020/10/24/introducing-ungrammar.html>
For older, by now mostly outdated stuff, see the [guide](./guide.md) and [another playlist](https://www.youtube.com/playlist?list=PL85XCvVPmGQho7MZkdW-wtPtuJcFpzycE).
-
## Bird's Eye View
![](https://user-images.githubusercontent.com/4789492/107129398-0ab70f00-687a-11eb-9bfc-d4eb023aec06.png)
+<!-- toc -->
+
On the highest level, rust-analyzer is a thing which accepts input source code from the client and produces a structured semantic model of the code.
More specifically, input data consists of a set of test files (`(PathBuf, String)` pairs) and information about project structure, captured in the so called `CrateGraph`.
@@ -295,7 +296,7 @@ For this reason, all path APIs generally take some existing path as a "file syst
### `crates/stdx`
This crate contains various non-rust-analyzer specific utils, which could have been in std, as well
-as copies of unstable std items we would like to make use of already, like `std::str::split_once`.
+as copies of unstable std items we would like to make use of already.
### `crates/profile`
diff --git a/docs/dev/debugging.md b/docs/book/src/contributing/debugging.md
index 48caec1d8f..db3a28eed1 100644
--- a/docs/dev/debugging.md
+++ b/docs/book/src/contributing/debugging.md
@@ -8,6 +8,7 @@
<img height=150px src="https://user-images.githubusercontent.com/36276403/74611090-92ec5380-5101-11ea-8a41-598f51f3f3e3.png" alt="Debug options view">
- Install all TypeScript dependencies
+
```bash
cd editors/code
npm ci
@@ -19,7 +20,6 @@
where **only** the `rust-analyzer` extension being debugged is enabled.
* To activate the extension you need to open any Rust project folder in `[Extension Development Host]`.
-
## Debug TypeScript VSCode extension
- `Run Installed Extension` - runs the extension with the globally installed `rust-analyzer` binary.
@@ -36,12 +36,12 @@ To apply changes to an already running debug process, press <kbd>Ctrl+Shift+P</k
- When attaching a debugger to an already running `rust-analyzer` server on Linux you might need to enable `ptrace` for unrelated processes by running:
- ```
+ ```bash
echo 0 | sudo tee /proc/sys/kernel/yama/ptrace_scope
```
-
- By default, the LSP server is built without debug information. To enable it, you'll need to change `Cargo.toml`:
+
```toml
[profile.dev]
debug = 2
@@ -58,6 +58,7 @@ To apply changes to an already running debug process, press <kbd>Ctrl+Shift+P</k
- Go back to the `[Extension Development Host]` instance and hover over a Rust variable and your breakpoint should hit.
If you need to debug the server from the very beginning, including its initialization code, you can use the `--wait-dbg` command line argument or `RA_WAIT_DBG` environment variable. The server will spin at the beginning of the `try_main` function (see `crates\rust-analyzer\src\bin\main.rs`)
+
```rust
let mut d = 4;
while d == 4 { // set a breakpoint here and change the value
@@ -66,6 +67,7 @@ If you need to debug the server from the very beginning, including its initializ
```
However for this to work, you will need to enable debug_assertions in your build
+
```rust
RUSTFLAGS='--cfg debug_assertions' cargo build --release
```
diff --git a/docs/dev/guide.md b/docs/book/src/contributing/guide.md
index bb77aa0eaa..2a2a39af26 100644
--- a/docs/dev/guide.md
+++ b/docs/book/src/contributing/guide.md
@@ -12,6 +12,8 @@ https://youtu.be/ANKBNiSWyfc.
[guide-2019-01]: https://github.com/rust-lang/rust-analyzer/tree/guide-2019-01
[2024-01-01]: https://github.com/rust-lang/rust-analyzer/tree/2024-01-01
+<!-- toc -->
+
## The big picture
On the highest possible level, rust-analyzer is a stateful component. A client may
@@ -76,10 +78,10 @@ to study its methods to understand all the input data.
The `change_file` method controls the set of the input files, where each file
has an integer id (`FileId`, picked by the client) and text (`Option<Arc<str>>`).
-Paths are tricky; they'll be explained below, in source roots section,
+Paths are tricky; they'll be explained below, in source roots section,
together with the `set_roots` method. The "source root" [`is_library`] flag
-along with the concept of [`durability`] allows us to add a group of files which
-are assumed to rarely change. It's mostly an optimization and does not change
+along with the concept of [`durability`] allows us to add a group of files which
+are assumed to rarely change. It's mostly an optimization and does not change
the fundamental picture.
[`is_library`]: https://github.com/rust-lang/rust-analyzer/blob/2024-01-01/crates/base-db/src/input.rs#L38
@@ -141,7 +143,7 @@ the source root, even `/dev/random`.
## Language Server Protocol
-Now let's see how the `Analysis` API is exposed via the JSON RPC based language server protocol.
+Now let's see how the `Analysis` API is exposed via the JSON RPC based language server protocol.
The hard part here is managing changes (which can come either from the file system
or from the editor) and concurrency (we want to spawn background jobs for things
like syntax highlighting). We use the event loop pattern to manage the zoo, and
@@ -152,13 +154,12 @@ the loop is the [`GlobalState::run`] function initiated by [`main_loop`] after
[`GlobalState::new`]: https://github.com/rust-lang/rust-analyzer/blob/2024-01-01/crates/rust-analyzer/src/global_state.rs#L148-L215
[`GlobalState::run`]: https://github.com/rust-lang/rust-analyzer/blob/2024-01-01/crates/rust-analyzer/src/main_loop.rs#L114-L140
-
Let's walk through a typical analyzer session!
First, we need to figure out what to analyze. To do this, we run `cargo
metadata` to learn about Cargo packages for current workspace and dependencies,
and we run `rustc --print sysroot` and scan the "sysroot"
-(the directory containing the current Rust toolchain's files) to learn about crates
+(the directory containing the current Rust toolchain's files) to learn about crates
like `std`. This happens in the [`GlobalState::fetch_workspaces`] method.
We load this configuration at the start of the server in [`GlobalState::new`],
but it's also triggered by workspace change events and requests to reload the
diff --git a/docs/dev/lsp-extensions.md b/docs/book/src/contributing/lsp-extensions.md
index c7ee4e4023..14a3fd1ebd 100644
--- a/docs/dev/lsp-extensions.md
+++ b/docs/book/src/contributing/lsp-extensions.md
@@ -19,6 +19,8 @@ Requests, which are likely to always remain specific to `rust-analyzer` are unde
If you want to be notified about the changes to this document, subscribe to [#4604](https://github.com/rust-lang/rust-analyzer/issues/4604).
+<!-- toc -->
+
## Configuration in `initializationOptions`
**Upstream Issue:** https://github.com/microsoft/language-server-protocol/issues/567
diff --git a/docs/dev/setup.md b/docs/book/src/contributing/setup.md
index d8a7840d37..d8a7840d37 100644
--- a/docs/dev/setup.md
+++ b/docs/book/src/contributing/setup.md
diff --git a/docs/dev/style.md b/docs/book/src/contributing/style.md
index 4c5299bde3..5654e37753 100644
--- a/docs/dev/style.md
+++ b/docs/book/src/contributing/style.md
@@ -1,3 +1,5 @@
+# Style
+
Our approach to "clean code" is two-fold:
* We generally don't block PRs on style changes.
@@ -274,7 +276,7 @@ fn f() {
Assert liberally.
Prefer [`stdx::never!`](https://docs.rs/always-assert/0.1.2/always_assert/macro.never.html) to standard `assert!`.
-**Rationale:** See [cross cutting concern: error handling](https://github.com/rust-lang/rust-analyzer/blob/master/docs/dev/architecture.md#error-handling).
+**Rationale:** See [cross cutting concern: error handling](https://github.com/rust-lang/rust-analyzer/blob/master/docs/book/src/contributing/architecture.md#error-handling).
## Getters & Setters
@@ -873,7 +875,7 @@ Use `anyhow::format_err!` rather than `anyhow::anyhow`.
**Rationale:** consistent, boring, avoids stuttering.
There's no specific guidance on the formatting of error messages, see [anyhow/#209](https://github.com/dtolnay/anyhow/issues/209).
-Do not end error and context messages with `.` though.
+Do not end error and context messages with `.` though.
## Early Returns
@@ -1172,7 +1174,7 @@ MergeBehavior::Last => {
**Rationale:** writing a sentence (or maybe even a paragraph) rather just "a comment" creates a more appropriate frame of mind.
It tricks you into writing down more of the context you keep in your head while coding.
-For `.md` and `.adoc` files, prefer a sentence-per-line format, don't wrap lines.
+For `.md` files prefer a sentence-per-line format, don't wrap lines.
If the line is too long, you want to split the sentence in two :-)
**Rationale:** much easier to edit the text and read the diff, see [this link](https://asciidoctor.org/docs/asciidoc-recommended-practices/#one-sentence-per-line).
diff --git a/docs/dev/syntax.md b/docs/book/src/contributing/syntax.md
index 3dcd430cea..3dcd430cea 100644
--- a/docs/dev/syntax.md
+++ b/docs/book/src/contributing/syntax.md
diff --git a/docs/book/src/diagnostics.md b/docs/book/src/diagnostics.md
new file mode 100644
index 0000000000..60685c98da
--- /dev/null
+++ b/docs/book/src/diagnostics.md
@@ -0,0 +1,16 @@
+# Diagnostics
+
+While most errors and warnings provided by rust-analyzer come from the
+`cargo check` integration, there’s a growing number of diagnostics
+implemented using rust-analyzer’s own analysis. Some of these
+diagnostics don’t respect `#[allow]` or `#[deny]` attributes yet, but
+can be turned off using the `rust-analyzer.diagnostics.enable`,
+`rust-analyzer.diagnostics.experimental.enable` or
+`rust-analyzer.diagnostics.disabled` settings.
+
+## Clippy
+
+To run `cargo clippy` instead of `cargo check`, you can set
+`"rust-analyzer.check.command": "clippy"`.
+
+{{#include diagnostics_generated.md:2:}}
diff --git a/docs/book/src/diagnostics_generated.md b/docs/book/src/diagnostics_generated.md
new file mode 100644
index 0000000000..d34c459ad0
--- /dev/null
+++ b/docs/book/src/diagnostics_generated.md
@@ -0,0 +1,516 @@
+//! Generated by `cargo xtask codegen diagnostics-docs`, do not edit by hand.
+
+#### attribute-expansion-disabled
+
+Source: [macro_error.rs](crates/ide-diagnostics/src/handlers/macro_error.rs#7)
+
+
+This diagnostic is shown for attribute proc macros when attribute expansions have been disabled.
+
+
+
+
+#### await-outside-of-async
+
+Source: [await_outside_of_async.rs](crates/ide-diagnostics/src/handlers/await_outside_of_async.rs#3)
+
+
+This diagnostic is triggered if the `await` keyword is used outside of an async function or block
+
+
+
+
+#### break-outside-of-loop
+
+Source: [break_outside_of_loop.rs](crates/ide-diagnostics/src/handlers/break_outside_of_loop.rs#3)
+
+
+This diagnostic is triggered if the `break` keyword is used outside of a loop.
+
+
+
+
+#### cast-to-unsized
+
+Source: [invalid_cast.rs](crates/ide-diagnostics/src/handlers/invalid_cast.rs#106)
+
+
+This diagnostic is triggered when casting to an unsized type
+
+
+
+
+#### expected-function
+
+Source: [expected_function.rs](crates/ide-diagnostics/src/handlers/expected_function.rs#5)
+
+
+This diagnostic is triggered if a call is made on something that is not callable.
+
+
+
+
+#### generic-args-prohibited
+
+Source: [generic_args_prohibited.rs](crates/ide-diagnostics/src/handlers/generic_args_prohibited.rs#10)
+
+
+This diagnostic is shown when generic arguments are provided for a type that does not accept
+generic arguments.
+
+
+
+
+#### inactive-code
+
+Source: [inactive_code.rs](crates/ide-diagnostics/src/handlers/inactive_code.rs#6)
+
+
+This diagnostic is shown for code with inactive `#[cfg]` attributes.
+
+
+
+
+#### incoherent-impl
+
+Source: [incoherent_impl.rs](crates/ide-diagnostics/src/handlers/incoherent_impl.rs#6)
+
+
+This diagnostic is triggered if the targe type of an impl is from a foreign crate.
+
+
+
+
+#### incorrect-ident-case
+
+Source: [incorrect_case.rs](crates/ide-diagnostics/src/handlers/incorrect_case.rs#13)
+
+
+This diagnostic is triggered if an item name doesn't follow [Rust naming convention](https://doc.rust-lang.org/1.0.0/style/style/naming/README.html).
+
+
+
+
+#### invalid-cast
+
+Source: [invalid_cast.rs](crates/ide-diagnostics/src/handlers/invalid_cast.rs#18)
+
+
+This diagnostic is triggered if the code contains an illegal cast
+
+
+
+
+#### invalid-derive-target
+
+Source: [invalid_derive_target.rs](crates/ide-diagnostics/src/handlers/invalid_derive_target.rs#3)
+
+
+This diagnostic is shown when the derive attribute is used on an item other than a `struct`,
+`enum` or `union`.
+
+
+
+
+#### macro-def-error
+
+Source: [macro_error.rs](crates/ide-diagnostics/src/handlers/macro_error.rs#24)
+
+
+This diagnostic is shown for macro expansion errors.
+
+
+
+
+#### macro-error
+
+Source: [macro_error.rs](crates/ide-diagnostics/src/handlers/macro_error.rs#3)
+
+
+This diagnostic is shown for macro expansion errors.
+
+
+
+
+#### malformed-derive
+
+Source: [malformed_derive.rs](crates/ide-diagnostics/src/handlers/malformed_derive.rs#3)
+
+
+This diagnostic is shown when the derive attribute has invalid input.
+
+
+
+
+#### mismatched-arg-count
+
+Source: [mismatched_arg_count.rs](crates/ide-diagnostics/src/handlers/mismatched_arg_count.rs#31)
+
+
+This diagnostic is triggered if a function is invoked with an incorrect amount of arguments.
+
+
+
+
+#### mismatched-tuple-struct-pat-arg-count
+
+Source: [mismatched_arg_count.rs](crates/ide-diagnostics/src/handlers/mismatched_arg_count.rs#11)
+
+
+This diagnostic is triggered if a function is invoked with an incorrect amount of arguments.
+
+
+
+
+#### missing-fields
+
+Source: [missing_fields.rs](crates/ide-diagnostics/src/handlers/missing_fields.rs#19)
+
+
+This diagnostic is triggered if record lacks some fields that exist in the corresponding structure.
+
+Example:
+
+```rust
+struct A { a: u8, b: u8 }
+
+let a = A { a: 10 };
+```
+
+
+
+
+#### missing-match-arm
+
+Source: [missing_match_arms.rs](crates/ide-diagnostics/src/handlers/missing_match_arms.rs#3)
+
+
+This diagnostic is triggered if `match` block is missing one or more match arms.
+
+
+
+
+#### missing-unsafe
+
+Source: [missing_unsafe.rs](crates/ide-diagnostics/src/handlers/missing_unsafe.rs#10)
+
+
+This diagnostic is triggered if an operation marked as `unsafe` is used outside of an `unsafe` function or block.
+
+
+
+
+#### moved-out-of-ref
+
+Source: [moved_out_of_ref.rs](crates/ide-diagnostics/src/handlers/moved_out_of_ref.rs#4)
+
+
+This diagnostic is triggered on moving non copy things out of references.
+
+
+
+
+#### need-mut
+
+Source: [mutability_errors.rs](crates/ide-diagnostics/src/handlers/mutability_errors.rs#8)
+
+
+This diagnostic is triggered on mutating an immutable variable.
+
+
+
+
+#### no-such-field
+
+Source: [no_such_field.rs](crates/ide-diagnostics/src/handlers/no_such_field.rs#12)
+
+
+This diagnostic is triggered if created structure does not have field provided in record.
+
+
+
+
+#### non-exhaustive-let
+
+Source: [non_exhaustive_let.rs](crates/ide-diagnostics/src/handlers/non_exhaustive_let.rs#3)
+
+
+This diagnostic is triggered if a `let` statement without an `else` branch has a non-exhaustive
+pattern.
+
+
+
+
+#### private-assoc-item
+
+Source: [private_assoc_item.rs](crates/ide-diagnostics/src/handlers/private_assoc_item.rs#3)
+
+
+This diagnostic is triggered if the referenced associated item is not visible from the current
+module.
+
+
+
+
+#### private-field
+
+Source: [private_field.rs](crates/ide-diagnostics/src/handlers/private_field.rs#3)
+
+
+This diagnostic is triggered if the accessed field is not visible from the current module.
+
+
+
+
+#### proc-macro-disabled
+
+Source: [macro_error.rs](crates/ide-diagnostics/src/handlers/macro_error.rs#11)
+
+
+This diagnostic is shown for proc macros that have been specifically disabled via `rust-analyzer.procMacro.ignored`.
+
+
+
+
+#### remove-trailing-return
+
+Source: [remove_trailing_return.rs](crates/ide-diagnostics/src/handlers/remove_trailing_return.rs#8)
+
+
+This diagnostic is triggered when there is a redundant `return` at the end of a function
+or closure.
+
+
+
+
+#### remove-unnecessary-else
+
+Source: [remove_unnecessary_else.rs](crates/ide-diagnostics/src/handlers/remove_unnecessary_else.rs#17)
+
+
+This diagnostic is triggered when there is an `else` block for an `if` expression whose
+then branch diverges (e.g. ends with a `return`, `continue`, `break` e.t.c).
+
+
+
+
+#### replace-filter-map-next-with-find-map
+
+Source: [replace_filter_map_next_with_find_map.rs](crates/ide-diagnostics/src/handlers/replace_filter_map_next_with_find_map.rs#11)
+
+
+This diagnostic is triggered when `.filter_map(..).next()` is used, rather than the more concise `.find_map(..)`.
+
+
+
+
+#### trait-impl-incorrect-safety
+
+Source: [trait_impl_incorrect_safety.rs](crates/ide-diagnostics/src/handlers/trait_impl_incorrect_safety.rs#6)
+
+
+Diagnoses incorrect safety annotations of trait impls.
+
+
+
+
+#### trait-impl-missing-assoc_item
+
+Source: [trait_impl_missing_assoc_item.rs](crates/ide-diagnostics/src/handlers/trait_impl_missing_assoc_item.rs#7)
+
+
+Diagnoses missing trait items in a trait impl.
+
+
+
+
+#### trait-impl-orphan
+
+Source: [trait_impl_orphan.rs](crates/ide-diagnostics/src/handlers/trait_impl_orphan.rs#5)
+
+
+Only traits defined in the current crate can be implemented for arbitrary types
+
+
+
+
+#### trait-impl-redundant-assoc_item
+
+Source: [trait_impl_redundant_assoc_item.rs](crates/ide-diagnostics/src/handlers/trait_impl_redundant_assoc_item.rs#12)
+
+
+Diagnoses redundant trait items in a trait impl.
+
+
+
+
+#### type-mismatch
+
+Source: [type_mismatch.rs](crates/ide-diagnostics/src/handlers/type_mismatch.rs#20)
+
+
+This diagnostic is triggered when the type of an expression or pattern does not match
+the expected type.
+
+
+
+
+#### typed-hole
+
+Source: [typed_hole.rs](crates/ide-diagnostics/src/handlers/typed_hole.rs#18)
+
+
+This diagnostic is triggered when an underscore expression is used in an invalid position.
+
+
+
+
+#### undeclared-label
+
+Source: [undeclared_label.rs](crates/ide-diagnostics/src/handlers/undeclared_label.rs#3)
+
+
+
+
+
+
+#### unimplemented-builtin-macro
+
+Source: [unimplemented_builtin_macro.rs](crates/ide-diagnostics/src/handlers/unimplemented_builtin_macro.rs#3)
+
+
+This diagnostic is shown for builtin macros which are not yet implemented by rust-analyzer
+
+
+
+
+#### unlinked-file
+
+Source: [unlinked_file.rs](crates/ide-diagnostics/src/handlers/unlinked_file.rs#20)
+
+
+This diagnostic is shown for files that are not included in any crate, or files that are part of
+crates rust-analyzer failed to discover. The file will not have IDE features available.
+
+
+
+
+#### unnecessary-braces
+
+Source: [useless_braces.rs](crates/ide-diagnostics/src/handlers/useless_braces.rs#9)
+
+
+Diagnostic for unnecessary braces in `use` items.
+
+
+
+
+#### unreachable-label
+
+Source: [unreachable_label.rs](crates/ide-diagnostics/src/handlers/unreachable_label.rs#3)
+
+
+
+
+
+
+#### unresolved-assoc-item
+
+Source: [unresolved_assoc_item.rs](crates/ide-diagnostics/src/handlers/unresolved_assoc_item.rs#3)
+
+
+This diagnostic is triggered if the referenced associated item does not exist.
+
+
+
+
+#### unresolved-extern-crate
+
+Source: [unresolved_extern_crate.rs](crates/ide-diagnostics/src/handlers/unresolved_extern_crate.rs#3)
+
+
+This diagnostic is triggered if rust-analyzer is unable to discover referred extern crate.
+
+
+
+
+#### unresolved-field
+
+Source: [unresolved_field.rs](crates/ide-diagnostics/src/handlers/unresolved_field.rs#23)
+
+
+This diagnostic is triggered if a field does not exist on a given type.
+
+
+
+
+#### unresolved-ident
+
+Source: [unresolved_ident.rs](crates/ide-diagnostics/src/handlers/unresolved_ident.rs#3)
+
+
+This diagnostic is triggered if an expr-position ident is invalid.
+
+
+
+
+#### unresolved-import
+
+Source: [unresolved_import.rs](crates/ide-diagnostics/src/handlers/unresolved_import.rs#3)
+
+
+This diagnostic is triggered if rust-analyzer is unable to resolve a path in
+a `use` declaration.
+
+
+
+
+#### unresolved-macro-call
+
+Source: [unresolved_macro_call.rs](crates/ide-diagnostics/src/handlers/unresolved_macro_call.rs#3)
+
+
+This diagnostic is triggered if rust-analyzer is unable to resolve the path
+to a macro in a macro invocation.
+
+
+
+
+#### unresolved-method
+
+Source: [unresolved_method.rs](crates/ide-diagnostics/src/handlers/unresolved_method.rs#15)
+
+
+This diagnostic is triggered if a method does not exist on a given type.
+
+
+
+
+#### unresolved-module
+
+Source: [unresolved_module.rs](crates/ide-diagnostics/src/handlers/unresolved_module.rs#8)
+
+
+This diagnostic is triggered if rust-analyzer is unable to discover referred module.
+
+
+
+
+#### unused-mut
+
+Source: [mutability_errors.rs](crates/ide-diagnostics/src/handlers/mutability_errors.rs#62)
+
+
+This diagnostic is triggered when a mutable variable isn't actually mutated.
+
+
+
+
+#### unused-variables
+
+Source: [unused_variables.rs](crates/ide-diagnostics/src/handlers/unused_variables.rs#13)
+
+
+This diagnostic is triggered when a local variable is not used.
+
+
diff --git a/docs/book/src/editor_features.md b/docs/book/src/editor_features.md
new file mode 100644
index 0000000000..5521e395c7
--- /dev/null
+++ b/docs/book/src/editor_features.md
@@ -0,0 +1,203 @@
+# Editor Features
+
+## VS Code
+
+### Color configurations
+
+It is possible to change the foreground/background color and font
+family/size of inlay hints. Just add this to your `settings.json`:
+
+```json
+{
+ "editor.inlayHints.fontFamily": "Courier New",
+ "editor.inlayHints.fontSize": 11,
+
+ "workbench.colorCustomizations": {
+ // Name of the theme you are currently using
+ "[Default Dark+]": {
+ "editorInlayHint.foreground": "#868686f0",
+ "editorInlayHint.background": "#3d3d3d48",
+
+ // Overrides for specific kinds of inlay hints
+ "editorInlayHint.typeForeground": "#fdb6fdf0",
+ "editorInlayHint.parameterForeground": "#fdb6fdf0",
+ }
+ }
+}
+```
+
+### Semantic style customizations
+
+You can customize the look of different semantic elements in the source
+code. For example, mutable bindings are underlined by default and you
+can override this behavior by adding the following section to your
+`settings.json`:
+
+```json
+{
+ "editor.semanticTokenColorCustomizations": {
+ "rules": {
+ "*.mutable": {
+ "fontStyle": "", // underline is the default
+ },
+ }
+ },
+}
+```
+
+Most themes doesn’t support styling unsafe operations differently yet.
+You can fix this by adding overrides for the rules `operator.unsafe`,
+`function.unsafe`, and `method.unsafe`:
+
+```json
+{
+ "editor.semanticTokenColorCustomizations": {
+ "rules": {
+ "operator.unsafe": "#ff6600",
+ "function.unsafe": "#ff6600",
+ "method.unsafe": "#ff6600"
+ }
+ },
+}
+```
+
+In addition to the top-level rules you can specify overrides for
+specific themes. For example, if you wanted to use a darker text color
+on a specific light theme, you might write:
+
+```json
+{
+ "editor.semanticTokenColorCustomizations": {
+ "rules": {
+ "operator.unsafe": "#ff6600"
+ },
+ "[Ayu Light]": {
+ "rules": {
+ "operator.unsafe": "#572300"
+ }
+ }
+ },
+}
+```
+
+Make sure you include the brackets around the theme name. For example,
+use `"[Ayu Light]"` to customize the theme Ayu Light.
+
+### Special `when` clause context for keybindings.
+
+You may use `inRustProject` context to configure keybindings for rust
+projects only. For example:
+
+```json
+{
+ "key": "ctrl+alt+d",
+ "command": "rust-analyzer.openDocs",
+ "when": "inRustProject"
+}
+```
+
+More about `when` clause contexts
+[here](https://code.visualstudio.com/docs/getstarted/keybindings#_when-clause-contexts).
+
+### Setting runnable environment variables
+
+You can use "rust-analyzer.runnables.extraEnv" setting to define
+runnable environment-specific substitution variables. The simplest way
+for all runnables in a bunch:
+
+```json
+"rust-analyzer.runnables.extraEnv": {
+ "RUN_SLOW_TESTS": "1"
+}
+```
+
+Or it is possible to specify vars more granularly:
+
+```json
+"rust-analyzer.runnables.extraEnv": [
+ {
+ // "mask": null, // null mask means that this rule will be applied for all runnables
+ "env": {
+ "APP_ID": "1",
+ "APP_DATA": "asdf"
+ }
+ },
+ {
+ "mask": "test_name",
+ "env": {
+ "APP_ID": "2", // overwrites only APP_ID
+ }
+ }
+]
+```
+
+You can use any valid regular expression as a mask. Also note that a
+full runnable name is something like **run bin\_or\_example\_name**,
+**test some::mod::test\_name** or **test-mod some::mod**, so it is
+possible to distinguish binaries, single tests, and test modules with
+this masks: `"^run"`, `"^test "` (the trailing space matters!), and
+`"^test-mod"` respectively.
+
+If needed, you can set different values for different platforms:
+
+```json
+"rust-analyzer.runnables.extraEnv": [
+ {
+ "platform": "win32", // windows only
+ "env": {
+ "APP_DATA": "windows specific data"
+ }
+ },
+ {
+ "platform": ["linux"],
+ "env": {
+ "APP_DATA": "linux data",
+ }
+ },
+ { // for all platforms
+ "env": {
+ "APP_COMMON_DATA": "xxx",
+ }
+ }
+]
+```
+
+### Compiler feedback from external commands
+
+Instead of relying on the built-in `cargo check`, you can configure Code
+to run a command in the background and use the `$rustc-watch` problem
+matcher to generate inline error markers from its output.
+
+To do this you need to create a new [VS Code
+Task](https://code.visualstudio.com/docs/editor/tasks) and set
+`"rust-analyzer.checkOnSave": false` in preferences.
+
+For example, if you want to run
+[`cargo watch`](https://crates.io/crates/cargo-watch) instead, you might
+add the following to `.vscode/tasks.json`:
+
+```json
+{
+ "label": "Watch",
+ "group": "build",
+ "type": "shell",
+ "command": "cargo watch",
+ "problemMatcher": "$rustc-watch",
+ "isBackground": true
+}
+```
+
+### Live Share
+
+VS Code Live Share has partial support for rust-analyzer.
+
+Live Share *requires* the official Microsoft build of VS Code, OSS
+builds will not work correctly.
+
+The host’s rust-analyzer instance will be shared with all guests joining
+the session. The guests do not have to have the rust-analyzer extension
+installed for this to work.
+
+If you are joining a Live Share session and *do* have rust-analyzer
+installed locally, commands from the command palette will not work
+correctly since they will attempt to communicate with the local server.
diff --git a/docs/book/src/features.md b/docs/book/src/features.md
new file mode 100644
index 0000000000..0829a0213b
--- /dev/null
+++ b/docs/book/src/features.md
@@ -0,0 +1,3 @@
+# Features
+
+{{#include features_generated.md:2:}}
diff --git a/docs/book/src/features_generated.md b/docs/book/src/features_generated.md
new file mode 100644
index 0000000000..2c5829b1f5
--- /dev/null
+++ b/docs/book/src/features_generated.md
@@ -0,0 +1,940 @@
+//! Generated by `cargo xtask codegen feature-docs`, do not edit by hand.
+
+### Annotations
+**Source:** [annotations.rs](crates/ide/src/annotations.rs#19)
+
+Provides user with annotations above items for looking up references or impl blocks
+and running/debugging binaries.
+
+![Annotations](https://user-images.githubusercontent.com/48062697/113020672-b7c34f00-917a-11eb-8f6e-858735660a0e.png)
+
+
+### Auto Import
+**Source:** [auto_import.rs](crates/ide-assists/src/handlers/auto_import.rs#15)
+
+Using the `auto-import` assist it is possible to insert missing imports for unresolved items.
+When inserting an import it will do so in a structured manner by keeping imports grouped,
+separated by a newline in the following order:
+
+- `std` and `core`
+- External Crates
+- Current Crate, paths prefixed by `crate`
+- Current Module, paths prefixed by `self`
+- Super Module, paths prefixed by `super`
+
+Example:
+```rust
+use std::fs::File;
+
+use itertools::Itertools;
+use syntax::ast;
+
+use crate::utils::insert_use;
+
+use self::auto_import;
+
+use super::AssistContext;
+```
+
+#### Import Granularity
+
+It is possible to configure how use-trees are merged with the `imports.granularity.group` setting.
+It has the following configurations:
+
+- `crate`: Merge imports from the same crate into a single use statement. This kind of
+ nesting is only supported in Rust versions later than 1.24.
+- `module`: Merge imports from the same module into a single use statement.
+- `item`: Don't merge imports at all, creating one import per item.
+- `preserve`: Do not change the granularity of any imports. For auto-import this has the same
+ effect as `item`.
+- `one`: Merge all imports into a single use statement as long as they have the same visibility
+ and attributes.
+
+In `VS Code` the configuration for this is `rust-analyzer.imports.granularity.group`.
+
+#### Import Prefix
+
+The style of imports in the same crate is configurable through the `imports.prefix` setting.
+It has the following configurations:
+
+- `crate`: This setting will force paths to be always absolute, starting with the `crate`
+ prefix, unless the item is defined outside of the current crate.
+- `self`: This setting will force paths that are relative to the current module to always
+ start with `self`. This will result in paths that always start with either `crate`, `self`,
+ `super` or an extern crate identifier.
+- `plain`: This setting does not impose any restrictions in imports.
+
+In `VS Code` the configuration for this is `rust-analyzer.imports.prefix`.
+
+![Auto Import](https://user-images.githubusercontent.com/48062697/113020673-b85be580-917a-11eb-9022-59585f35d4f8.gif)
+
+
+### Completion With Autoimport
+**Source:** [flyimport.rs](crates/ide-completion/src/completions/flyimport.rs#20)
+
+When completing names in the current scope, proposes additional imports from other modules or crates,
+if they can be qualified in the scope, and their name contains all symbols from the completion input.
+
+To be considered applicable, the name must contain all input symbols in the given order, not necessarily adjacent.
+If any input symbol is not lowercased, the name must contain all symbols in exact case; otherwise the containing is checked case-insensitively.
+
+```
+fn main() {
+ pda$0
+}
+# pub mod std { pub mod marker { pub struct PhantomData { } } }
+```
+->
+```
+use std::marker::PhantomData;
+
+fn main() {
+ PhantomData
+}
+# pub mod std { pub mod marker { pub struct PhantomData { } } }
+```
+
+Also completes associated items, that require trait imports.
+If any unresolved and/or partially-qualified path precedes the input, it will be taken into account.
+Currently, only the imports with their import path ending with the whole qualifier will be proposed
+(no fuzzy matching for qualifier).
+
+```
+mod foo {
+ pub mod bar {
+ pub struct Item;
+
+ impl Item {
+ pub const TEST_ASSOC: usize = 3;
+ }
+ }
+}
+
+fn main() {
+ bar::Item::TEST_A$0
+}
+```
+->
+```
+use foo::bar;
+
+mod foo {
+ pub mod bar {
+ pub struct Item;
+
+ impl Item {
+ pub const TEST_ASSOC: usize = 3;
+ }
+ }
+}
+
+fn main() {
+ bar::Item::TEST_ASSOC
+}
+```
+
+NOTE: currently, if an assoc item comes from a trait that's not currently imported, and it also has an unresolved and/or partially-qualified path,
+no imports will be proposed.
+
+#### Fuzzy search details
+
+To avoid an excessive amount of the results returned, completion input is checked for inclusion in the names only
+(i.e. in `HashMap` in the `std::collections::HashMap` path).
+For the same reasons, avoids searching for any path imports for inputs with their length less than 2 symbols
+(but shows all associated items for any input length).
+
+#### Import configuration
+
+It is possible to configure how use-trees are merged with the `imports.granularity.group` setting.
+Mimics the corresponding behavior of the `Auto Import` feature.
+
+#### LSP and performance implications
+
+The feature is enabled only if the LSP client supports LSP protocol version 3.16+ and reports the `additionalTextEdits`
+(case-sensitive) resolve client capability in its client capabilities.
+This way the server is able to defer the costly computations, doing them for a selected completion item only.
+For clients with no such support, all edits have to be calculated on the completion request, including the fuzzy search completion ones,
+which might be slow ergo the feature is automatically disabled.
+
+#### Feature toggle
+
+The feature can be forcefully turned off in the settings with the `rust-analyzer.completion.autoimport.enable` flag.
+Note that having this flag set to `true` does not guarantee that the feature is enabled: your client needs to have the corresponding
+capability enabled.
+
+
+### Debug ItemTree
+**Source:** [view_item_tree.rs](crates/ide/src/view_item_tree.rs#5)
+
+Displays the ItemTree of the currently open file, for debugging.
+
+| Editor | Action Name |
+|---------|-------------|
+| VS Code | **rust-analyzer: Debug ItemTree** |
+
+
+### Expand Macro Recursively
+**Source:** [expand_macro.rs](crates/ide/src/expand_macro.rs#18)
+
+Shows the full macro expansion of the macro at the current caret position.
+
+| Editor | Action Name |
+|---------|-------------|
+| VS Code | **rust-analyzer: Expand macro recursively at caret** |
+
+![Expand Macro Recursively](https://user-images.githubusercontent.com/48062697/113020648-b3973180-917a-11eb-84a9-ecb921293dc5.gif)
+
+
+### Expand and Shrink Selection
+**Source:** [extend_selection.rs](crates/ide/src/extend_selection.rs#15)
+
+Extends or shrinks the current selection to the encompassing syntactic construct
+(expression, statement, item, module, etc). It works with multiple cursors.
+
+| Editor | Shortcut |
+|---------|----------|
+| VS Code | <kbd>Alt+Shift+→</kbd>, <kbd>Alt+Shift+←</kbd> |
+
+![Expand and Shrink Selection](https://user-images.githubusercontent.com/48062697/113020651-b42fc800-917a-11eb-8a4f-cf1a07859fac.gif)
+
+
+### File Structure
+**Source:** [file_structure.rs](crates/ide/src/file_structure.rs#26)
+
+Provides a tree of the symbols defined in the file. Can be used to
+
+* fuzzy search symbol in a file (super useful)
+* draw breadcrumbs to describe the context around the cursor
+* draw outline of the file
+
+| Editor | Shortcut |
+|---------|----------|
+| VS Code | <kbd>Ctrl+Shift+O</kbd> |
+
+![File Structure](https://user-images.githubusercontent.com/48062697/113020654-b42fc800-917a-11eb-8388-e7dc4d92b02e.gif)
+
+
+### Find All References
+**Source:** [references.rs](crates/ide/src/references.rs#42)
+
+Shows all references of the item at the cursor location
+
+| Editor | Shortcut |
+|---------|----------|
+| VS Code | <kbd>Shift+Alt+F12</kbd> |
+
+![Find All References](https://user-images.githubusercontent.com/48062697/113020670-b7c34f00-917a-11eb-8003-370ac5f2b3cb.gif)
+
+
+### Folding
+**Source:** [folding_ranges.rs](crates/ide/src/folding_ranges.rs#36)
+
+Defines folding regions for curly braced blocks, runs of consecutive use, mod, const or static
+items, and `region` / `endregion` comment markers.
+
+
+### Format String Completion
+**Source:** [format_like.rs](crates/ide-completion/src/completions/postfix/format_like.rs#0)
+
+`"Result {result} is {2 + 2}"` is expanded to the `"Result {} is {}", result, 2 + 2`.
+
+The following postfix snippets are available:
+
+* `format` -> `format!(...)`
+* `panic` -> `panic!(...)`
+* `println` -> `println!(...)`
+* `log`:
+** `logd` -> `log::debug!(...)`
+** `logt` -> `log::trace!(...)`
+** `logi` -> `log::info!(...)`
+** `logw` -> `log::warn!(...)`
+** `loge` -> `log::error!(...)`
+
+![Format String Completion](https://user-images.githubusercontent.com/48062697/113020656-b560f500-917a-11eb-87de-02991f61beb8.gif)
+
+
+### Go to Declaration
+**Source:** [goto_declaration.rs](crates/ide/src/goto_declaration.rs#13)
+
+Navigates to the declaration of an identifier.
+
+This is the same as `Go to Definition` with the following exceptions:
+- outline modules will navigate to the `mod name;` item declaration
+- trait assoc items will navigate to the assoc item of the trait declaration as opposed to the trait impl
+- fields in patterns will navigate to the field declaration of the struct, union or variant
+
+
+### Go to Definition
+**Source:** [goto_definition.rs](crates/ide/src/goto_definition.rs#28)
+
+Navigates to the definition of an identifier.
+
+For outline modules, this will navigate to the source file of the module.
+
+| Editor | Shortcut |
+|---------|----------|
+| VS Code | <kbd>F12</kbd> |
+
+![Go to Definition](https://user-images.githubusercontent.com/48062697/113065563-025fbe00-91b1-11eb-83e4-a5a703610b23.gif)
+
+
+### Go to Implementation
+**Source:** [goto_implementation.rs](crates/ide/src/goto_implementation.rs#11)
+
+Navigates to the impl items of types.
+
+| Editor | Shortcut |
+|---------|----------|
+| VS Code | <kbd>Ctrl+F12</kbd>
+
+![Go to Implementation](https://user-images.githubusercontent.com/48062697/113065566-02f85480-91b1-11eb-9288-aaad8abd8841.gif)
+
+
+### Go to Type Definition
+**Source:** [goto_type_definition.rs](crates/ide/src/goto_type_definition.rs#7)
+
+Navigates to the type of an identifier.
+
+| Editor | Action Name |
+|---------|-------------|
+| VS Code | **Go to Type Definition** |
+
+![Go to Type Definition](https://user-images.githubusercontent.com/48062697/113020657-b560f500-917a-11eb-9007-0f809733a338.gif)
+
+
+### Highlight Related
+**Source:** [highlight_related.rs](crates/ide/src/highlight_related.rs#42)
+
+Highlights constructs related to the thing under the cursor:
+
+1. if on an identifier, highlights all references to that identifier in the current file
+ * additionally, if the identifier is a trait in a where clause, type parameter trait bound or use item, highlights all references to that trait's assoc items in the corresponding scope
+1. if on an `async` or `await` token, highlights all yield points for that async context
+1. if on a `return` or `fn` keyword, `?` character or `->` return type arrow, highlights all exit points for that context
+1. if on a `break`, `loop`, `while` or `for` token, highlights all break points for that loop or block context
+1. if on a `move` or `|` token that belongs to a closure, highlights all captures of the closure.
+
+Note: `?`, `|` and `->` do not currently trigger this behavior in the VSCode editor.
+
+
+### Hover
+**Source:** [hover.rs](crates/ide/src/hover.rs#116)
+
+Shows additional information, like the type of an expression or the documentation for a definition when "focusing" code.
+Focusing is usually hovering with a mouse, but can also be triggered with a shortcut.
+
+![Hover](https://user-images.githubusercontent.com/48062697/113020658-b5f98b80-917a-11eb-9f88-3dbc27320c95.gif)
+
+
+### Inlay Hints
+**Source:** [inlay_hints.rs](crates/ide/src/inlay_hints.rs#41)
+
+rust-analyzer shows additional information inline with the source code.
+Editors usually render this using read-only virtual text snippets interspersed with code.
+
+rust-analyzer by default shows hints for
+
+* types of local variables
+* names of function arguments
+* names of const generic parameters
+* types of chained expressions
+
+Optionally, one can enable additional hints for
+
+* return types of closure expressions
+* elided lifetimes
+* compiler inserted reborrows
+* names of generic type and lifetime parameters
+
+Note: inlay hints for function argument names are heuristically omitted to reduce noise and will not appear if
+any of the
+[following criteria](https://github.com/rust-lang/rust-analyzer/blob/6b8b8ff4c56118ddee6c531cde06add1aad4a6af/crates/ide/src/inlay_hints/param_name.rs#L92-L99)
+are met:
+
+* the parameter name is a suffix of the function's name
+* the argument is a qualified constructing or call expression where the qualifier is an ADT
+* exact argument<->parameter match(ignoring leading underscore) or parameter is a prefix/suffix
+ of argument with _ splitting it off
+* the parameter name starts with `ra_fixture`
+* the parameter name is a
+[well known name](https://github.com/rust-lang/rust-analyzer/blob/6b8b8ff4c56118ddee6c531cde06add1aad4a6af/crates/ide/src/inlay_hints/param_name.rs#L200)
+in a unary function
+* the parameter name is a
+[single character](https://github.com/rust-lang/rust-analyzer/blob/6b8b8ff4c56118ddee6c531cde06add1aad4a6af/crates/ide/src/inlay_hints/param_name.rs#L201)
+in a unary function
+
+![Inlay hints](https://user-images.githubusercontent.com/48062697/113020660-b5f98b80-917a-11eb-8d70-3be3fd558cdd.png)
+
+
+### Interpret A Function, Static Or Const.
+**Source:** [interpret.rs](crates/ide/src/interpret.rs#8)
+
+| Editor | Action Name |
+|---------|-------------|
+| VS Code | **rust-analyzer: Interpret** |
+
+
+### Join Lines
+**Source:** [join_lines.rs](crates/ide/src/join_lines.rs#20)
+
+Join selected lines into one, smartly fixing up whitespace, trailing commas, and braces.
+
+See [this gif](https://user-images.githubusercontent.com/1711539/124515923-4504e800-dde9-11eb-8d58-d97945a1a785.gif) for the cases handled specially by joined lines.
+
+| Editor | Action Name |
+|---------|-------------|
+| VS Code | **rust-analyzer: Join lines** |
+
+![Join Lines](https://user-images.githubusercontent.com/48062697/113020661-b6922200-917a-11eb-87c4-b75acc028f11.gif)
+
+
+### Magic Completions
+**Source:** [lib.rs](crates/ide-completion/src/lib.rs#78)
+
+In addition to usual reference completion, rust-analyzer provides some ✨magic✨
+completions as well:
+
+Keywords like `if`, `else` `while`, `loop` are completed with braces, and cursor
+is placed at the appropriate position. Even though `if` is easy to type, you
+still want to complete it, to get ` { }` for free! `return` is inserted with a
+space or `;` depending on the return type of the function.
+
+When completing a function call, `()` are automatically inserted. If a function
+takes arguments, the cursor is positioned inside the parenthesis.
+
+There are postfix completions, which can be triggered by typing something like
+`foo().if`. The word after `.` determines postfix completion. Possible variants are:
+
+- `expr.if` -> `if expr {}` or `if let ... {}` for `Option` or `Result`
+- `expr.match` -> `match expr {}`
+- `expr.while` -> `while expr {}` or `while let ... {}` for `Option` or `Result`
+- `expr.ref` -> `&expr`
+- `expr.refm` -> `&mut expr`
+- `expr.let` -> `let $0 = expr;`
+- `expr.lete` -> `let $1 = expr else { $0 };`
+- `expr.letm` -> `let mut $0 = expr;`
+- `expr.not` -> `!expr`
+- `expr.dbg` -> `dbg!(expr)`
+- `expr.dbgr` -> `dbg!(&expr)`
+- `expr.call` -> `(expr)`
+
+There also snippet completions:
+
+#### Expressions
+
+- `pd` -> `eprintln!(" = {:?}", );`
+- `ppd` -> `eprintln!(" = {:#?}", );`
+
+#### Items
+
+- `tfn` -> `#[test] fn feature(){}`
+- `tmod` ->
+```rust
+#[cfg(test)]
+mod tests {
+ use super::*;
+
+ #[test]
+ fn test_name() {}
+}
+```
+
+And the auto import completions, enabled with the `rust-analyzer.completion.autoimport.enable` setting and the corresponding LSP client capabilities.
+Those are the additional completion options with automatic `use` import and options from all project importable items,
+fuzzy matched against the completion input.
+
+![Magic Completions](https://user-images.githubusercontent.com/48062697/113020667-b72ab880-917a-11eb-8778-716cf26a0eb3.gif)
+
+
+### Matching Brace
+**Source:** [matching_brace.rs](crates/ide/src/matching_brace.rs#6)
+
+If the cursor is on any brace (`<>(){}[]||`) which is a part of a brace-pair,
+moves cursor to the matching brace. It uses the actual parser to determine
+braces, so it won't confuse generics with comparisons.
+
+| Editor | Action Name |
+|---------|-------------|
+| VS Code | **rust-analyzer: Find matching brace** |
+
+![Matching Brace](https://user-images.githubusercontent.com/48062697/113065573-04298180-91b1-11eb-8dec-d4e2a202f304.gif)
+
+
+### Memory Usage
+**Source:** [apply_change.rs](crates/ide-db/src/apply_change.rs#43)
+
+Clears rust-analyzer's internal database and prints memory usage statistics.
+
+| Editor | Action Name |
+|---------|-------------|
+| VS Code | **rust-analyzer: Memory Usage (Clears Database)**
+
+
+### Move Item
+**Source:** [move_item.rs](crates/ide/src/move_item.rs#16)
+
+Move item under cursor or selection up and down.
+
+| Editor | Action Name |
+|---------|-------------|
+| VS Code | **rust-analyzer: Move item up**
+| VS Code | **rust-analyzer: Move item down**
+
+![Move Item](https://user-images.githubusercontent.com/48062697/113065576-04298180-91b1-11eb-91ce-4505e99ed598.gif)
+
+
+### On Enter
+**Source:** [on_enter.rs](crates/ide/src/typing/on_enter.rs#17)
+
+rust-analyzer can override <kbd>Enter</kbd> key to make it smarter:
+
+- <kbd>Enter</kbd> inside triple-slash comments automatically inserts `///`
+- <kbd>Enter</kbd> in the middle or after a trailing space in `//` inserts `//`
+- <kbd>Enter</kbd> inside `//!` doc comments automatically inserts `//!`
+- <kbd>Enter</kbd> after `{` indents contents and closing `}` of single-line block
+
+This action needs to be assigned to shortcut explicitly.
+
+Note that, depending on the other installed extensions, this feature can visibly slow down typing.
+Similarly, if rust-analyzer crashes or stops responding, `Enter` might not work.
+In that case, you can still press `Shift-Enter` to insert a newline.
+
+#### VS Code
+
+Add the following to `keybindings.json`:
+```json
+{
+ "key": "Enter",
+ "command": "rust-analyzer.onEnter",
+ "when": "editorTextFocus && !suggestWidgetVisible && editorLangId == rust"
+}
+````
+
+When using the Vim plugin:
+```json
+{
+ "key": "Enter",
+ "command": "rust-analyzer.onEnter",
+ "when": "editorTextFocus && !suggestWidgetVisible && editorLangId == rust && vim.mode == 'Insert'"
+}
+````
+
+![On Enter](https://user-images.githubusercontent.com/48062697/113065578-04c21800-91b1-11eb-82b8-22b8c481e645.gif)
+
+
+### On Typing Assists
+**Source:** [typing.rs](crates/ide/src/typing.rs#42)
+
+Some features trigger on typing certain characters:
+
+- typing `let =` tries to smartly add `;` if `=` is followed by an existing expression
+- typing `=` between two expressions adds `;` when in statement position
+- typing `=` to turn an assignment into an equality comparison removes `;` when in expression position
+- typing `.` in a chain method call auto-indents
+- typing `{` or `(` in front of an expression inserts a closing `}` or `)` after the expression
+- typing `{` in a use item adds a closing `}` in the right place
+- typing `>` to complete a return type `->` will insert a whitespace after it
+
+#### VS Code
+
+Add the following to `settings.json`:
+```json
+"editor.formatOnType": true,
+```
+
+![On Typing Assists](https://user-images.githubusercontent.com/48062697/113166163-69758500-923a-11eb-81ee-eb33ec380399.gif)
+![On Typing Assists](https://user-images.githubusercontent.com/48062697/113171066-105c2000-923f-11eb-87ab-f4a263346567.gif)
+
+
+### Open Docs
+**Source:** [doc_links.rs](crates/ide/src/doc_links.rs#118)
+
+Retrieve a links to documentation for the given symbol.
+
+The simplest way to use this feature is via the context menu. Right-click on
+the selected item. The context menu opens. Select **Open Docs**.
+
+| Editor | Action Name |
+|---------|-------------|
+| VS Code | **rust-analyzer: Open Docs** |
+
+
+### Parent Module
+**Source:** [parent_module.rs](crates/ide/src/parent_module.rs#14)
+
+Navigates to the parent module of the current module.
+
+| Editor | Action Name |
+|---------|-------------|
+| VS Code | **rust-analyzer: Locate parent module** |
+
+![Parent Module](https://user-images.githubusercontent.com/48062697/113065580-04c21800-91b1-11eb-9a32-00086161c0bd.gif)
+
+
+### Related Tests
+**Source:** [runnables.rs](crates/ide/src/runnables.rs#202)
+
+Provides a sneak peek of all tests where the current item is used.
+
+The simplest way to use this feature is via the context menu. Right-click on
+the selected item. The context menu opens. Select **Peek Related Tests**.
+
+| Editor | Action Name |
+|---------|-------------|
+| VS Code | **rust-analyzer: Peek Related Tests** |
+
+
+### Rename
+**Source:** [rename.rs](crates/ide/src/rename.rs#70)
+
+Renames the item below the cursor and all of its references
+
+| Editor | Shortcut |
+|---------|----------|
+| VS Code | <kbd>F2</kbd> |
+
+![Rename](https://user-images.githubusercontent.com/48062697/113065582-055aae80-91b1-11eb-8ade-2b58e6d81883.gif)
+
+
+### Run
+**Source:** [runnables.rs](crates/ide/src/runnables.rs#116)
+
+Shows a popup suggesting to run a test/benchmark/binary **at the current cursor
+location**. Super useful for repeatedly running just a single test. Do bind this
+to a shortcut!
+
+| Editor | Action Name |
+|---------|-------------|
+| VS Code | **rust-analyzer: Run** |
+
+![Run](https://user-images.githubusercontent.com/48062697/113065583-055aae80-91b1-11eb-958f-d67efcaf6a2f.gif)
+
+
+### Semantic Syntax Highlighting
+**Source:** [syntax_highlighting.rs](crates/ide/src/syntax_highlighting.rs#68)
+
+rust-analyzer highlights the code semantically.
+For example, `Bar` in `foo::Bar` might be colored differently depending on whether `Bar` is an enum or a trait.
+rust-analyzer does not specify colors directly, instead it assigns a tag (like `struct`) and a set of modifiers (like `declaration`) to each token.
+It's up to the client to map those to specific colors.
+
+The general rule is that a reference to an entity gets colored the same way as the entity itself.
+We also give special modifier for `mut` and `&mut` local variables.
+
+
+#### Token Tags
+
+Rust-analyzer currently emits the following token tags:
+
+- For items:
+
+| | |
+|-----------|--------------------------------|
+| attribute | Emitted for attribute macros. |
+|enum| Emitted for enums. |
+|function| Emitted for free-standing functions. |
+|derive| Emitted for derive macros. |
+|macro| Emitted for function-like macros. |
+|method| Emitted for associated functions, also knowns as methods. |
+|namespace| Emitted for modules. |
+|struct| Emitted for structs.|
+|trait| Emitted for traits.|
+|typeAlias| Emitted for type aliases and `Self` in `impl`s.|
+|union| Emitted for unions.|
+
+- For literals:
+
+| | |
+|-----------|--------------------------------|
+| boolean| Emitted for the boolean literals `true` and `false`.|
+| character| Emitted for character literals.|
+| number| Emitted for numeric literals.|
+| string| Emitted for string literals.|
+| escapeSequence| Emitted for escaped sequences inside strings like `\n`.|
+| formatSpecifier| Emitted for format specifiers `{:?}` in `format!`-like macros.|
+
+- For operators:
+
+| | |
+|-----------|--------------------------------|
+|operator| Emitted for general operators.|
+|arithmetic| Emitted for the arithmetic operators `+`, `-`, `*`, `/`, `+=`, `-=`, `*=`, `/=`.|
+|bitwise| Emitted for the bitwise operators `|`, `&`, `!`, `^`, `|=`, `&=`, `^=`.|
+|comparison| Emitted for the comparison oerators `>`, `<`, `==`, `>=`, `<=`, `!=`.|
+|logical| Emitted for the logical operatos `||`, `&&`, `!`.|
+
+- For punctuation:
+
+| | |
+|-----------|--------------------------------|
+|punctuation| Emitted for general punctuation.|
+|attributeBracket| Emitted for attribute invocation brackets, that is the `#[` and `]` tokens.|
+|angle| Emitted for `<>` angle brackets.|
+|brace| Emitted for `{}` braces.|
+|bracket| Emitted for `[]` brackets.|
+|parenthesis| Emitted for `()` parentheses.|
+|colon| Emitted for the `:` token.|
+|comma| Emitted for the `,` token.|
+|dot| Emitted for the `.` token.|
+|semi| Emitted for the `;` token.|
+|macroBang| Emitted for the `!` token in macro calls.|
+
+-
+
+| | |
+|-----------|--------------------------------|
+|builtinAttribute| Emitted for names to builtin attributes in attribute path, the `repr` in `#[repr(u8)]` for example.|
+|builtinType| Emitted for builtin types like `u32`, `str` and `f32`.|
+|comment| Emitted for comments.|
+|constParameter| Emitted for const parameters.|
+|deriveHelper| Emitted for derive helper attributes.|
+|enumMember| Emitted for enum variants.|
+|generic| Emitted for generic tokens that have no mapping.|
+|keyword| Emitted for keywords.|
+|label| Emitted for labels.|
+|lifetime| Emitted for lifetimes.|
+|parameter| Emitted for non-self function parameters.|
+|property| Emitted for struct and union fields.|
+|selfKeyword| Emitted for the self function parameter and self path-specifier.|
+|selfTypeKeyword| Emitted for the Self type parameter.|
+|toolModule| Emitted for tool modules.|
+|typeParameter| Emitted for type parameters.|
+|unresolvedReference| Emitted for unresolved references, names that rust-analyzer can't find the definition of.|
+|variable| Emitted for locals, constants and statics.|
+
+
+#### Token Modifiers
+
+Token modifiers allow to style some elements in the source code more precisely.
+
+Rust-analyzer currently emits the following token modifiers:
+
+| | |
+|-----------|--------------------------------|
+|async| Emitted for async functions and the `async` and `await` keywords.|
+|attribute| Emitted for tokens inside attributes.|
+|callable| Emitted for locals whose types implements one of the `Fn*` traits.|
+|constant| Emitted for const.|
+|consuming| Emitted for locals that are being consumed when use in a function call.|
+|controlFlow| Emitted for control-flow related tokens, this includes th `?` operator.|
+|crateRoot| Emitted for crate names, like `serde` and `crate.|
+|declaration| Emitted for names of definitions, like `foo` in `fn foo(){}`.|
+|defaultLibrary| Emitted for items from built-in crates (std, core, allc, test and proc_macro).|
+|documentation| Emitted for documentation comment.|
+|injected| Emitted for doc-string injected highlighting like rust source blocks in documentation.|
+|intraDocLink| Emitted for intra doc links in doc-string.|
+|library| Emitted for items that are defined outside of the current crae.|
+|macro| Emitted for tokens inside macro call.|
+|mutable| Emitted for mutable locals and statics as well as functions taking `&mut self`.|
+|public| Emitted for items that are from the current crate and are `pub.|
+|reference| Emitted for locals behind a reference and functions taking self` by reference.|
+|static| Emitted for "static" functions, also known as functions that d not take a `self` param, as well as statics and consts.|
+|trait| Emitted for associated trait item.|
+|unsafe| Emitted for unsafe operations, like unsafe function calls, as ell as the `unsafe` token.|
+
+![Semantic Syntax Highlighting](https://user-images.githubusercontent.com/48062697/113164457-06cfb980-9239-11eb-819b-0f93e646acf8.png)
+![Semantic Syntax Highlighting](https://user-images.githubusercontent.com/48062697/113187625-f7f50100-9250-11eb-825e-91c58f236071.png)
+
+
+### Show Dependency Tree
+**Source:** [fetch_crates.rs](crates/ide/src/fetch_crates.rs#13)
+
+Shows a view tree with all the dependencies of this project
+
+| Editor | Panel Name |
+|---------|------------|
+| VS Code | **Rust Dependencies** |
+
+![Show Dependency Tree](https://user-images.githubusercontent.com/5748995/229394139-2625beab-f4c9-484b-84ed-ad5dee0b1e1a.png)
+
+
+### Show Syntax Tree
+**Source:** [view_syntax_tree.rs](crates/ide/src/view_syntax_tree.rs#14)
+
+Shows a tree view with the syntax tree of the current file
+
+| Editor | Panel Name |
+|---------|-------------|
+| VS Code | **Rust Syntax Tree** |
+
+
+### Status
+**Source:** [status.rs](crates/ide/src/status.rs#28)
+
+Shows internal statistic about memory usage of rust-analyzer.
+
+| Editor | Action Name |
+|---------|-------------|
+| VS Code | **rust-analyzer: Status** |
+
+![Status](https://user-images.githubusercontent.com/48062697/113065584-05f34500-91b1-11eb-98cc-5c196f76be7f.gif)
+
+
+### Structural Search and Replace
+**Source:** [lib.rs](crates/ide-ssr/src/lib.rs#6)
+
+Search and replace with named wildcards that will match any expression, type, path, pattern or item.
+The syntax for a structural search replace command is `<search_pattern> ==>> <replace_pattern>`.
+A `$<name>` placeholder in the search pattern will match any AST node and `$<name>` will reference it in the replacement.
+Within a macro call, a placeholder will match up until whatever token follows the placeholder.
+
+All paths in both the search pattern and the replacement template must resolve in the context
+in which this command is invoked. Paths in the search pattern will then match the code if they
+resolve to the same item, even if they're written differently. For example if we invoke the
+command in the module `foo` with a pattern of `Bar`, then code in the parent module that refers
+to `foo::Bar` will match.
+
+Paths in the replacement template will be rendered appropriately for the context in which the
+replacement occurs. For example if our replacement template is `foo::Bar` and we match some
+code in the `foo` module, we'll insert just `Bar`.
+
+Inherent method calls should generally be written in UFCS form. e.g. `foo::Bar::baz($s, $a)` will
+match `$s.baz($a)`, provided the method call `baz` resolves to the method `foo::Bar::baz`. When a
+placeholder is the receiver of a method call in the search pattern (e.g. `$s.foo()`), but not in
+the replacement template (e.g. `bar($s)`), then *, & and &mut will be added as needed to mirror
+whatever autoderef and autoref was happening implicitly in the matched code.
+
+The scope of the search / replace will be restricted to the current selection if any, otherwise
+it will apply to the whole workspace.
+
+Placeholders may be given constraints by writing them as `${<name>:<constraint1>:<constraint2>...}`.
+
+Supported constraints:
+
+| Constraint | Restricts placeholder |
+|---------------|------------------------|
+| kind(literal) | Is a literal (e.g. `42` or `"forty two"`) |
+| not(a) | Negates the constraint `a` |
+
+Available via the command `rust-analyzer.ssr`.
+
+```rust
+// Using structural search replace command [foo($a, $b) ==>> ($a).foo($b)]
+
+// BEFORE
+String::from(foo(y + 5, z))
+
+// AFTER
+String::from((y + 5).foo(z))
+```
+
+| Editor | Action Name |
+|---------|--------------|
+| VS Code | **rust-analyzer: Structural Search Replace** |
+
+Also available as an assist, by writing a comment containing the structural
+search and replace rule. You will only see the assist if the comment can
+be parsed as a valid structural search and replace rule.
+
+```rust
+// Place the cursor on the line below to see the assist 💡.
+// foo($a, $b) ==>> ($a).foo($b)
+```
+
+
+### User Snippet Completions
+**Source:** [snippet.rs](crates/ide-completion/src/snippet.rs#5)
+
+rust-analyzer allows the user to define custom (postfix)-snippets that may depend on items to be accessible for the current scope to be applicable.
+
+A custom snippet can be defined by adding it to the `rust-analyzer.completion.snippets.custom` object respectively.
+
+```json
+{
+ "rust-analyzer.completion.snippets.custom": {
+ "thread spawn": {
+ "prefix": ["spawn", "tspawn"],
+ "body": [
+ "thread::spawn(move || {",
+ "\t$0",
+ "});",
+ ],
+ "description": "Insert a thread::spawn call",
+ "requires": "std::thread",
+ "scope": "expr",
+ }
+ }
+}
+```
+
+In the example above:
+
+* `"thread spawn"` is the name of the snippet.
+
+* `prefix` defines one or more trigger words that will trigger the snippets completion.
+Using `postfix` will instead create a postfix snippet.
+
+* `body` is one or more lines of content joined via newlines for the final output.
+
+* `description` is an optional description of the snippet, if unset the snippet name will be used.
+
+* `requires` is an optional list of item paths that have to be resolvable in the current crate where the completion is rendered.
+
+
+### View Crate Graph
+**Source:** [view_crate_graph.rs](crates/ide/src/view_crate_graph.rs#8)
+
+Renders the currently loaded crate graph as an SVG graphic. Requires the `dot` tool, which
+is part of graphviz, to be installed.
+
+Only workspace crates are included, no crates.io dependencies or sysroot crates.
+
+| Editor | Action Name |
+|---------|-------------|
+| VS Code | **rust-analyzer: View Crate Graph** |
+
+
+### View Hir
+**Source:** [view_hir.rs](crates/ide/src/view_hir.rs#5)
+
+| Editor | Action Name |
+|---------|--------------|
+| VS Code | **rust-analyzer: View Hir**
+
+![View Hir](https://user-images.githubusercontent.com/48062697/113065588-068bdb80-91b1-11eb-9a78-0b4ef1e972fb.gif)
+
+
+### View Memory Layout
+**Source:** [view_memory_layout.rs](crates/ide/src/view_memory_layout.rs#74)
+
+Displays the recursive memory layout of a datatype.
+
+| Editor | Action Name |
+|---------|-------------|
+| VS Code | **rust-analyzer: View Memory Layout** |
+
+
+### View Mir
+**Source:** [view_mir.rs](crates/ide/src/view_mir.rs#5)
+
+| Editor | Action Name |
+|---------|-------------|
+| VS Code | **rust-analyzer: View Mir**
+
+
+### Workspace Symbol
+**Source:** [symbol_index.rs](crates/ide-db/src/symbol_index.rs#174)
+
+Uses fuzzy-search to find types, modules and functions by name across your
+project and dependencies. This is **the** most useful feature, which improves code
+navigation tremendously. It mostly works on top of the built-in LSP
+functionality, however `#` and `*` symbols can be used to narrow down the
+search. Specifically,
+
+- `Foo` searches for `Foo` type in the current workspace
+- `foo#` searches for `foo` function in the current workspace
+- `Foo*` searches for `Foo` type among dependencies, including `stdlib`
+- `foo#*` searches for `foo` function among dependencies
+
+That is, `#` switches from "types" to all symbols, `*` switches from the current
+workspace to dependencies.
+
+Note that filtering does not currently work in VSCode due to the editor never
+sending the special symbols to the language server. Instead, you can configure
+the filtering via the `rust-analyzer.workspace.symbol.search.scope` and
+`rust-analyzer.workspace.symbol.search.kind` settings. Symbols prefixed
+with `__` are hidden from the search results unless configured otherwise.
+
+| Editor | Shortcut |
+|---------|-----------|
+| VS Code | <kbd>Ctrl+T</kbd>
diff --git a/docs/book/src/installation.md b/docs/book/src/installation.md
new file mode 100644
index 0000000000..3a4c0cf227
--- /dev/null
+++ b/docs/book/src/installation.md
@@ -0,0 +1,40 @@
+# Installation
+
+To use rust-analyzer, you need a `rust-analyzer` binary, a text editor
+that supports LSP, and the source code of the Rust standard library.
+
+If you're [using VS Code](./vs_code.html), the extension bundles a
+copy of the `rust-analyzer` binary. For other editors, you'll need to
+[install the binary](./rust_analyzer_binary.html) and [configure your
+editor](./other_editors.html).
+
+## Rust Standard Library
+
+rust-analyzer will attempt to install the standard library source code
+automatically. You can also install it manually with `rustup`.
+
+ $ rustup component add rust-src
+
+Only the latest stable standard library source is officially supported
+for use with rust-analyzer. If you are using an older toolchain or have
+an override set, rust-analyzer may fail to understand the Rust source.
+You will either need to update your toolchain or use an older version of
+rust-analyzer that is compatible with your toolchain.
+
+If you are using an override in your project, you can still force
+rust-analyzer to use the stable toolchain via the environment variable
+`RUSTUP_TOOLCHAIN`. For example, with VS Code or coc-rust-analyzer:
+
+```json
+{ "rust-analyzer.server.extraEnv": { "RUSTUP_TOOLCHAIN": "stable" } }
+```
+
+## Crates
+
+There is a package named `ra_ap_rust_analyzer` available on
+[crates.io](https://crates.io/crates/ra_ap_rust-analyzer), for people
+who want to use rust-analyzer programmatically.
+
+For more details, see [the publish
+workflow](https://github.com/rust-lang/rust-analyzer/blob/master/.github/workflows/autopublish.yaml).
+
diff --git a/docs/book/src/non_cargo_based_projects.md b/docs/book/src/non_cargo_based_projects.md
new file mode 100644
index 0000000000..151f8758a1
--- /dev/null
+++ b/docs/book/src/non_cargo_based_projects.md
@@ -0,0 +1,246 @@
+# Non-Cargo Based Projects
+
+rust-analyzer does not require Cargo. However, if you use some other
+build system, you’ll have to describe the structure of your project for
+rust-analyzer in the `rust-project.json` format:
+
+```typescript
+interface JsonProject {
+ /// Path to the sysroot directory.
+ ///
+ /// The sysroot is where rustc looks for the
+ /// crates that are built-in to rust, such as
+ /// std.
+ ///
+ /// https://doc.rust-lang.org/rustc/command-line-arguments.html#--sysroot-override-the-system-root
+ ///
+ /// To see the current value of sysroot, you
+ /// can query rustc:
+ ///
+ /// ```
+ /// $ rustc --print sysroot
+ /// /Users/yourname/.rustup/toolchains/stable-x86_64-apple-darwin
+ /// ```
+ sysroot?: string;
+ /// Path to the directory with *source code* of
+ /// sysroot crates.
+ ///
+ /// By default, this is `lib/rustlib/src/rust/library`
+ /// relative to the sysroot.
+ ///
+ /// It should point to the directory where std,
+ /// core, and friends can be found:
+ ///
+ /// https://github.com/rust-lang/rust/tree/master/library.
+ ///
+ /// If provided, rust-analyzer automatically adds
+ /// dependencies on sysroot crates. Conversely,
+ /// if you omit this path, you can specify sysroot
+ /// dependencies yourself and, for example, have
+ /// several different "sysroots" in one graph of
+ /// crates.
+ sysroot_src?: string;
+ /// List of groups of common cfg values, to allow
+ /// sharing them between crates.
+ ///
+ /// Maps from group name to its cfgs. Cfg follow
+ /// the same format as `Crate.cfg`.
+ cfg_groups?: { [key: string]: string[]; };
+ /// The set of crates comprising the current
+ /// project. Must include all transitive
+ /// dependencies as well as sysroot crate (libstd,
+ /// libcore and such).
+ crates: Crate[];
+ /// Configuration for CLI commands.
+ ///
+ /// These are used for running and debugging binaries
+ /// and tests without encoding build system-specific
+ /// knowledge into rust-analyzer.
+ ///
+ /// # Example
+ ///
+ /// Below is an example of a test runnable. `{label}` and `{test_id}`
+ /// are explained in `Runnable::args`'s documentation below.
+ ///
+ /// ```json
+ /// {
+ /// "program": "buck",
+ /// "args": [
+ /// "test",
+ /// "{label}",
+ /// "--",
+ /// "{test_id}",
+ /// "--print-passing-details"
+ /// ],
+ /// "cwd": "/home/user/repo-root/",
+ /// "kind": "testOne"
+ /// }
+ /// ```
+ runnables?: Runnable[];
+}
+
+interface Crate {
+ /// Optional crate name used for display purposes,
+ /// without affecting semantics. See the `deps`
+ /// key for semantically-significant crate names.
+ display_name?: string;
+ /// Path to the root module of the crate.
+ root_module: string;
+ /// Edition of the crate.
+ edition: '2015' | '2018' | '2021' | '2024';
+ /// The version of the crate. Used for calculating
+ /// the correct docs.rs URL.
+ version?: string;
+ /// Dependencies
+ deps: Dep[];
+ /// Should this crate be treated as a member of
+ /// current "workspace".
+ ///
+ /// By default, inferred from the `root_module`
+ /// (members are the crates which reside inside
+ /// the directory opened in the editor).
+ ///
+ /// Set this to `false` for things like standard
+ /// library and 3rd party crates to enable
+ /// performance optimizations (rust-analyzer
+ /// assumes that non-member crates don't change).
+ is_workspace_member?: boolean;
+ /// Optionally specify the (super)set of `.rs`
+ /// files comprising this crate.
+ ///
+ /// By default, rust-analyzer assumes that only
+ /// files under `root_module.parent` can belong
+ /// to a crate. `include_dirs` are included
+ /// recursively, unless a subdirectory is in
+ /// `exclude_dirs`.
+ ///
+ /// Different crates can share the same `source`.
+ ///
+ /// If two crates share an `.rs` file in common,
+ /// they *must* have the same `source`.
+ /// rust-analyzer assumes that files from one
+ /// source can't refer to files in another source.
+ source?: {
+ include_dirs: string[];
+ exclude_dirs: string[];
+ };
+ /// List of cfg groups this crate inherits.
+ ///
+ /// All cfg in these groups will be concatenated to
+ /// `cfg`. It is impossible to replace a value from
+ /// the groups.
+ cfg_groups?: string[];
+ /// The set of cfgs activated for a given crate, like
+ /// `["unix", "feature=\"foo\"", "feature=\"bar\""]`.
+ cfg: string[];
+ /// Target tuple for this Crate.
+ ///
+ /// Used when running `rustc --print cfg`
+ /// to get target-specific cfgs.
+ target?: string;
+ /// Environment variables, used for
+ /// the `env!` macro
+ env: { [key: string]: string; };
+
+ /// Whether the crate is a proc-macro crate.
+ is_proc_macro: boolean;
+ /// For proc-macro crates, path to compiled
+ /// proc-macro (.so file).
+ proc_macro_dylib_path?: string;
+
+ /// Repository, matching the URL that would be used
+ /// in Cargo.toml.
+ repository?: string;
+
+ /// Build-specific data about this crate.
+ build?: BuildInfo;
+}
+
+interface Dep {
+ /// Index of a crate in the `crates` array.
+ crate: number;
+ /// Name as should appear in the (implicit)
+ /// `extern crate name` declaration.
+ name: string;
+}
+
+interface BuildInfo {
+ /// The name associated with this crate.
+ ///
+ /// This is determined by the build system that produced
+ /// the `rust-project.json` in question. For instance, if buck were used,
+ /// the label might be something like `//ide/rust/rust-analyzer:rust-analyzer`.
+ ///
+ /// Do not attempt to parse the contents of this string; it is a build system-specific
+ /// identifier similar to `Crate::display_name`.
+ label: string;
+ /// Path corresponding to the build system-specific file defining the crate.
+ build_file: string;
+ /// The kind of target.
+ ///
+ /// This information is used to determine what sort
+ /// of runnable codelens to provide, if any.
+ target_kind: 'bin' | 'lib' | 'test';
+}
+
+interface Runnable {
+ /// The program invoked by the runnable.
+ ///
+ /// For example, this might be `cargo`, `buck`, or `bazel`.
+ program: string;
+ /// The arguments passed to `program`.
+ args: string[];
+ /// The current working directory of the runnable.
+ cwd: string;
+ /// Used to decide what code lens to offer.
+ ///
+ /// `testOne`: This runnable will be used when the user clicks the 'Run Test'
+ /// CodeLens above a test.
+ ///
+ /// The args for testOne can contain two template strings:
+ /// `{label}` and `{test_id}`. `{label}` will be replaced
+ /// with the `Build::label` and `{test_id}` will be replaced
+ /// with the test name.
+ kind: 'testOne' | string;
+}
+```
+
+This format is provisional and subject to change. Specifically, the
+`roots` setup will be different eventually.
+
+There are three ways to feed `rust-project.json` to rust-analyzer:
+
+- Place `rust-project.json` file at the root of the project, and
+ rust-analyzer will discover it.
+
+- Specify
+ `"rust-analyzer.linkedProjects": [ "path/to/rust-project.json" ]` in
+ the settings (and make sure that your LSP client sends settings as a
+ part of initialize request).
+
+- Specify
+ `"rust-analyzer.linkedProjects": [ { "roots": […​], "crates": […​] }]`
+ inline.
+
+Relative paths are interpreted relative to `rust-project.json` file
+location or (for inline JSON) relative to `rootUri`.
+
+You can set the `RA_LOG` environment variable to `rust_analyzer=info` to
+inspect how rust-analyzer handles config and project loading.
+
+Note that calls to `cargo check` are disabled when using
+`rust-project.json` by default, so compilation errors and warnings will
+no longer be sent to your LSP client. To enable these compilation errors
+you will need to specify explicitly what command rust-analyzer should
+run to perform the checks using the
+`rust-analyzer.check.overrideCommand` configuration. As an example, the
+following configuration explicitly sets `cargo check` as the `check`
+command.
+
+ { "rust-analyzer.check.overrideCommand": ["cargo", "check", "--message-format=json"] }
+
+`check.overrideCommand` requires the command specified to output json
+error messages for rust-analyzer to consume. The `--message-format=json`
+flag does this for `cargo check` so whichever command you use must also
+output errors in this format. See the [Configuration](#_configuration)
+section for more information.
diff --git a/docs/book/src/other_editors.md b/docs/book/src/other_editors.md
new file mode 100644
index 0000000000..1eac7dd2c2
--- /dev/null
+++ b/docs/book/src/other_editors.md
@@ -0,0 +1,425 @@
+# Other Editors
+
+rust-analyzer works with any editor that supports the [Language Server
+Protocol](https://microsoft.github.io/language-server-protocol/).
+
+This page assumes that you have already [installed the rust-analyzer
+binary](./rust_analyzer_binary.html).
+
+<!-- toc -->
+
+## Emacs
+
+To use `rust-analyzer`, you need to install and enable one of the two
+popular LSP client implementations for Emacs,
+[Eglot](https://github.com/joaotavora/eglot) or [LSP
+Mode](https://github.com/emacs-lsp/lsp-mode). Both enable
+`rust-analyzer` by default in Rust buffers if it is available.
+
+### Eglot
+
+Eglot is the more minimalistic and lightweight LSP client for Emacs,
+integrates well with existing Emacs functionality and is built into
+Emacs starting from release 29.
+
+After installing Eglot, e.g. via `M-x package-install` (not needed from
+Emacs 29), you can enable it via the `M-x eglot` command or load it
+automatically in `rust-mode` via
+
+```
+(add-hook 'rust-mode-hook 'eglot-ensure)
+```
+
+To enable clippy, you will need to configure the initialization options
+to pass the `check.command` setting.
+
+```
+(add-to-list 'eglot-server-programs
+ '((rust-ts-mode rust-mode) .
+ ("rust-analyzer" :initializationOptions (:check (:command "clippy")))))
+```
+
+For more detailed instructions and options see the [Eglot
+manual](https://joaotavora.github.io/eglot) (also available from Emacs
+via `M-x info`) and the [Eglot
+readme](https://github.com/joaotavora/eglot/blob/master/README.md).
+
+Eglot does not support the rust-analyzer extensions to the
+language-server protocol and does not aim to do so in the future. The
+[eglot-x](https://github.com/nemethf/eglot-x#rust-analyzer-extensions)
+package adds experimental support for those LSP extensions.
+
+### LSP Mode
+
+LSP-mode is the original LSP-client for emacs. Compared to Eglot it has
+a larger codebase and supports more features, like LSP protocol
+extensions. With extension packages like [LSP
+UI](https://github.com/emacs-lsp/lsp-mode) it offers a lot of visual
+eyecandy. Further it integrates well with [DAP
+mode](https://github.com/emacs-lsp/dap-mode) for support of the Debug
+Adapter Protocol.
+
+You can install LSP-mode via `M-x package-install` and then run it via
+the `M-x lsp` command or load it automatically in rust buffers with
+
+```
+(add-hook 'rust-mode-hook 'lsp-deferred)
+```
+
+For more information on how to set up LSP mode and its extension package
+see the instructions in the [LSP mode
+manual](https://emacs-lsp.github.io/lsp-mode/page/installation). Also
+see the [rust-analyzer
+section](https://emacs-lsp.github.io/lsp-mode/page/lsp-rust-analyzer/)
+for `rust-analyzer` specific options and commands, which you can
+optionally bind to keys.
+
+Note the excellent
+[guide](https://robert.kra.hn/posts/2021-02-07_rust-with-emacs/) from
+[@rksm](https://github.com/rksm) on how to set-up Emacs for Rust
+development with LSP mode and several other packages.
+
+## Vim/Neovim
+
+There are several LSP client implementations for Vim or Neovim:
+
+### coc-rust-analyzer
+
+1. Install coc.nvim by following the instructions at
+ [coc.nvim](https://github.com/neoclide/coc.nvim) (Node.js required)
+
+2. Run `:CocInstall coc-rust-analyzer` to install
+ [coc-rust-analyzer](https://github.com/fannheyward/coc-rust-analyzer),
+ this extension implements *most* of the features supported in the
+ VSCode extension:
+
+ - automatically install and upgrade stable/nightly releases
+
+ - same configurations as VSCode extension,
+ `rust-analyzer.server.path`, `rust-analyzer.cargo.features` etc.
+
+ - same commands too, `rust-analyzer.analyzerStatus`,
+ `rust-analyzer.ssr` etc.
+
+ - inlay hints for variables and method chaining, *Neovim Only*
+
+Note: coc-rust-analyzer is capable of installing or updating the
+rust-analyzer binary on its own.
+
+Note: for code actions, use `coc-codeaction-cursor` and
+`coc-codeaction-selected`; `coc-codeaction` and `coc-codeaction-line`
+are unlikely to be useful.
+
+### LanguageClient-neovim
+
+1. Install LanguageClient-neovim by following the instructions
+ [here](https://github.com/autozimu/LanguageClient-neovim)
+
+ - The GitHub project wiki has extra tips on configuration
+
+2. Configure by adding this to your Vim/Neovim config file (replacing
+ the existing Rust-specific line if it exists):
+
+ let g:LanguageClient_serverCommands = {
+ \ 'rust': ['rust-analyzer'],
+ \ }
+
+### YouCompleteMe
+
+Install YouCompleteMe by following the instructions
+[here](https://github.com/ycm-core/YouCompleteMe#installation).
+
+rust-analyzer is the default in ycm, it should work out of the box.
+
+### ALE
+
+To use the LSP server in [ale](https://github.com/dense-analysis/ale):
+
+ let g:ale_linters = {'rust': ['analyzer']}
+
+### nvim-lsp
+
+Neovim 0.5 has built-in language server support. For a quick start
+configuration of rust-analyzer, use
+[neovim/nvim-lspconfig](https://github.com/neovim/nvim-lspconfig#rust_analyzer).
+Once `neovim/nvim-lspconfig` is installed, use
+`lua require'lspconfig'.rust_analyzer.setup({})` in your `init.vim`.
+
+You can also pass LSP settings to the server:
+
+```lua
+lua << EOF
+local lspconfig = require'lspconfig'
+
+local on_attach = function(client)
+ require'completion'.on_attach(client)
+end
+
+lspconfig.rust_analyzer.setup({
+ on_attach = on_attach,
+ settings = {
+ ["rust-analyzer"] = {
+ imports = {
+ granularity = {
+ group = "module",
+ },
+ prefix = "self",
+ },
+ cargo = {
+ buildScripts = {
+ enable = true,
+ },
+ },
+ procMacro = {
+ enable = true
+ },
+ }
+ }
+})
+EOF
+```
+
+If you're running Neovim 0.10 or later, you can enable inlay hints via `on_attach`:
+
+```lua
+lspconfig.rust_analyzer.setup({
+ on_attach = function(client, bufnr)
+ vim.lsp.inlay_hint.enable(true, { bufnr = bufnr })
+ end
+})
+```
+
+Note that the hints are only visible after `rust-analyzer` has finished loading **and** you have to
+edit the file to trigger a re-render.
+
+See <https://sharksforarms.dev/posts/neovim-rust/> for more tips on
+getting started.
+
+Check out <https://github.com/mrcjkb/rustaceanvim> for a batteries
+included rust-analyzer setup for Neovim.
+
+### vim-lsp
+
+vim-lsp is installed by following [the plugin
+instructions](https://github.com/prabirshrestha/vim-lsp). It can be as
+simple as adding this line to your `.vimrc`:
+
+ Plug 'prabirshrestha/vim-lsp'
+
+Next you need to register the `rust-analyzer` binary. If it is avim.lspvailable
+in `$PATH`, you may want to add this to your `.vimrc`:
+
+ if executable('rust-analyzer')
+ au User lsp_setup call lsp#register_server({
+ \ 'name': 'Rust Language Server',
+ \ 'cmd': {server_info->['rust-analyzer']},
+ \ 'whitelist': ['rust'],
+ \ })
+ endif
+
+There is no dedicated UI for the server configuration, so you would need
+to send any options as a value of the `initialization_options` field, as
+described in the [Configuration](#configuration) section. Here is an
+example of how to enable the proc-macro support:
+
+ if executable('rust-analyzer')
+ au User lsp_setup call lsp#register_server({
+ \ 'name': 'Rust Language Server',
+ \ 'cmd': {server_info->['rust-analyzer']},
+ \ 'whitelist': ['rust'],
+ \ 'initialization_options': {
+ \ 'cargo': {
+ \ 'buildScripts': {
+ \ 'enable': v:true,
+ \ },
+ \ },
+ \ 'procMacro': {
+ \ 'enable': v:true,
+ \ },
+ \ },
+ \ })
+ endif
+
+## Sublime Text
+
+### Sublime Text 4:
+
+- Follow the instructions in
+ [LSP-rust-analyzer](https://github.com/sublimelsp/LSP-rust-analyzer).
+
+Install
+[LSP-file-watcher-chokidar](https://packagecontrol.io/packages/LSP-file-watcher-chokidar)
+to enable file watching (`workspace/didChangeWatchedFiles`).
+
+### Sublime Text 3:
+
+- Install the [LSP package](https://packagecontrol.io/packages/LSP).
+
+- From the command palette, run `LSP: Enable Language Server Globally`
+ and select `rust-analyzer`.
+
+If it worked, you should see "rust-analyzer, Line X, Column Y" on the
+left side of the status bar, and after waiting a bit, functionalities
+like tooltips on hovering over variables should become available.
+
+If you get an error saying `No such file or directory: 'rust-analyzer'`,
+see the [rust-analyzer binary installation](./rust_analyzer_binary.html) section.
+
+## GNOME Builder
+
+GNOME Builder 3.37.1 and newer has native `rust-analyzer` support. If
+the LSP binary is not available, GNOME Builder can install it when
+opening a Rust file.
+
+## Eclipse IDE
+
+Support for Rust development in the Eclipse IDE is provided by [Eclipse
+Corrosion](https://github.com/eclipse/corrosion). If available in PATH
+or in some standard location, `rust-analyzer` is detected and powers
+editing of Rust files without further configuration. If `rust-analyzer`
+is not detected, Corrosion will prompt you for configuration of your
+Rust toolchain and language server with a link to the *Window &gt;
+Preferences &gt; Rust* preference page; from here a button allows to
+download and configure `rust-analyzer`, but you can also reference
+another installation. You’ll need to close and reopen all .rs and Cargo
+files, or to restart the IDE, for this change to take effect.
+
+## Kate Text Editor
+
+Support for the language server protocol is built into Kate through the
+LSP plugin, which is included by default. It is preconfigured to use
+rust-analyzer for Rust sources since Kate 21.12.
+
+To change rust-analyzer config options, start from the following example
+and put it into Kate’s "User Server Settings" tab (located under the LSP
+Client settings):
+
+```json
+{
+ "servers": {
+ "rust": {
+ "initializationOptions": {
+ "cachePriming": {
+ "enable": false
+ },
+ "check": {
+ "allTargets": false
+ },
+ "checkOnSave": false
+ }
+ }
+ }
+}
+```
+
+Then click on apply, and restart the LSP server for your rust project.
+
+## juCi++
+
+[juCi++](https://gitlab.com/cppit/jucipp) has built-in support for the
+language server protocol, and since version 1.7.0 offers installation of
+both Rust and rust-analyzer when opening a Rust file.
+
+## Kakoune
+
+[Kakoune](https://kakoune.org/) supports LSP with the help of
+[`kak-lsp`](https://github.com/kak-lsp/kak-lsp). Follow the
+[instructions](https://github.com/kak-lsp/kak-lsp#installation) to
+install `kak-lsp`. To configure `kak-lsp`, refer to the [configuration
+section](https://github.com/kak-lsp/kak-lsp#configuring-kak-lsp) which
+is basically about copying the [configuration
+file](https://github.com/kak-lsp/kak-lsp/blob/master/kak-lsp.toml) in
+the right place (latest versions should use `rust-analyzer` by default).
+
+Finally, you need to configure Kakoune to talk to `kak-lsp` (see [Usage
+section](https://github.com/kak-lsp/kak-lsp#usage)). A basic
+configuration will only get you LSP but you can also activate inlay
+diagnostics and auto-formatting on save. The following might help you
+get all of this.
+
+ eval %sh{kak-lsp --kakoune -s $kak_session} # Not needed if you load it with plug.kak.
+ hook global WinSetOption filetype=rust %{
+ # Enable LSP
+ lsp-enable-window
+
+ # Auto-formatting on save
+ hook window BufWritePre .* lsp-formatting-sync
+
+ # Configure inlay hints (only on save)
+ hook window -group rust-inlay-hints BufWritePost .* rust-analyzer-inlay-hints
+ hook -once -always window WinSetOption filetype=.* %{
+ remove-hooks window rust-inlay-hints
+ }
+ }
+
+## Helix
+
+[Helix](https://docs.helix-editor.com/) supports LSP by default.
+However, it won’t install `rust-analyzer` automatically. You can follow
+instructions for [installing the rust-analyzer
+binary](./rust_analyzer_binary.html).
+
+## Visual Studio 2022
+
+There are multiple rust-analyzer extensions for Visual Studio 2022 on
+Windows:
+
+### rust-analyzer.vs
+
+(License: Creative Commons Attribution-NonCommercial-ShareAlike 4.0
+International)
+
+[Visual Studio
+Marketplace](https://marketplace.visualstudio.com/items?itemName=kitamstudios.RustAnalyzer)
+
+[GitHub](https://github.com/kitamstudios/rust-analyzer/)
+
+Support for Rust development in the Visual Studio IDE is enabled by the
+[rust-analyzer](https://marketplace.visualstudio.com/items?itemName=kitamstudios.RustAnalyzer)
+package. Either click on the download link or install from IDE’s
+extension manager. For now [Visual Studio
+2022](https://visualstudio.microsoft.com/downloads/) is required. All
+editions are supported viz. Community, Professional & Enterprise. The
+package aims to provide 0-friction installation and therefore comes
+loaded with most things required including rust-analyzer binary. If
+anything it needs is missing, appropriate errors / warnings will guide
+the user. E.g. cargo.exe needs to be in path and the package will tell
+you as much. This package is under rapid active development. So if you
+encounter any issues please file it at
+[rust-analyzer.vs](https://github.com/kitamstudios/rust-analyzer/).
+
+### VS RustAnalyzer
+
+(License: GPL)
+
+[Visual Studio
+Marketplace](https://marketplace.visualstudio.com/items?itemName=cchharris.vsrustanalyzer)
+
+[GitHub](https://github.com/cchharris/VS-RustAnalyzer)
+
+### SourceGear Rust
+
+(License: closed source)
+
+[Visual Studio
+Marketplace](https://marketplace.visualstudio.com/items?itemName=SourceGear.SourceGearRust)
+
+[GitHub (docs, issues,
+discussions)](https://github.com/sourcegear/rust-vs-extension)
+
+- Free (no-cost)
+
+- Supports all editions of Visual Studio 2022 on Windows: Community,
+ Professional, or Enterprise
+
+## Lapce
+
+[Lapce](https://lapce.dev/) has a Rust plugin which you can install
+directly. Unfortunately, it downloads an old version of `rust-analyzer`,
+but you can set the server path under Settings.
+
+## Zed
+
+[Zed](https://zed.dev) has native `rust-analyzer` support. If the
+rust-analyzer binary is not available, Zed can install it when opening
+a Rust file.
diff --git a/docs/book/src/privacy.md b/docs/book/src/privacy.md
new file mode 100644
index 0000000000..602c68d6f6
--- /dev/null
+++ b/docs/book/src/privacy.md
@@ -0,0 +1,15 @@
+# Privacy
+
+The LSP server performs no network access in itself, but runs
+`cargo metadata` which will update or download the crate registry and
+the source code of the project dependencies. If enabled (the default),
+build scripts and procedural macros can do anything.
+
+The Code extension does not access the network.
+
+Any other editor plugins are not under the control of the
+`rust-analyzer` developers. For any privacy concerns, you should check
+with their respective developers.
+
+For `rust-analyzer` developers, `cargo xtask release` uses the GitHub
+API to put together the release notes.
diff --git a/docs/book/src/rust_analyzer_binary.md b/docs/book/src/rust_analyzer_binary.md
new file mode 100644
index 0000000000..c7ac3087ce
--- /dev/null
+++ b/docs/book/src/rust_analyzer_binary.md
@@ -0,0 +1,74 @@
+# rust-analyzer Binary
+
+Text editors require the `rust-analyzer` binary to be in
+`$PATH`. You can download pre-built binaries from the
+[releases](https://github.com/rust-lang/rust-analyzer/releases) page.
+You will need to uncompress and rename the binary for your platform,
+e.g. from `rust-analyzer-aarch64-apple-darwin.gz` on Mac OS to
+`rust-analyzer`, make it executable, then move it into a directory in
+your `$PATH`.
+
+On Linux to install the `rust-analyzer` binary into `~/.local/bin`,
+these commands should work:
+
+ $ mkdir -p ~/.local/bin
+ $ curl -L https://github.com/rust-lang/rust-analyzer/releases/latest/download/rust-analyzer-x86_64-unknown-linux-gnu.gz | gunzip -c - > ~/.local/bin/rust-analyzer
+ $ chmod +x ~/.local/bin/rust-analyzer
+
+Make sure that `~/.local/bin` is listed in the `$PATH` variable and use
+the appropriate URL if you’re not on a `x86-64` system.
+
+You don’t have to use `~/.local/bin`, any other path like `~/.cargo/bin`
+or `/usr/local/bin` will work just as well.
+
+Alternatively, you can install it from source using the command below.
+You’ll need the latest stable version of the Rust toolchain.
+
+ $ git clone https://github.com/rust-lang/rust-analyzer.git && cd rust-analyzer
+ $ cargo xtask install --server
+
+If your editor can’t find the binary even though the binary is on your
+`$PATH`, the likely explanation is that it doesn’t see the same `$PATH`
+as the shell, see [this
+issue](https://github.com/rust-lang/rust-analyzer/issues/1811). On Unix,
+running the editor from a shell or changing the `.desktop` file to set
+the environment should help.
+
+### rustup
+
+`rust-analyzer` is available in `rustup`:
+
+ $ rustup component add rust-analyzer
+
+### Arch Linux
+
+The `rust-analyzer` binary can be installed from the repos or AUR (Arch
+User Repository):
+
+- [`rust-analyzer`](https://www.archlinux.org/packages/extra/x86_64/rust-analyzer/)
+ (built from latest tagged source)
+
+- [`rust-analyzer-git`](https://aur.archlinux.org/packages/rust-analyzer-git)
+ (latest Git version)
+
+Install it with pacman, for example:
+
+ $ pacman -S rust-analyzer
+
+### Gentoo Linux
+
+`rust-analyzer` is installed when the `rust-analyzer` use flag is set for dev-lang/rust or dev-lang/rust-bin. You also need to set the `rust-src` use flag.
+
+### macOS
+
+The `rust-analyzer` binary can be installed via
+[Homebrew](https://brew.sh/).
+
+ $ brew install rust-analyzer
+
+### Windows
+
+It is recommended to install the latest Microsoft Visual C++ Redistributable prior to installation.
+Download links can be found
+[here](https://learn.microsoft.com/en-us/cpp/windows/latest-supported-vc-redist).
+
diff --git a/docs/book/src/security.md b/docs/book/src/security.md
new file mode 100644
index 0000000000..1444af0324
--- /dev/null
+++ b/docs/book/src/security.md
@@ -0,0 +1,19 @@
+# Security
+
+At the moment, rust-analyzer assumes that all code is trusted. Here is a
+**non-exhaustive** list of ways to make rust-analyzer execute arbitrary
+code:
+
+- proc macros and build scripts are executed by default
+
+- `.cargo/config` can override `rustc` with an arbitrary executable
+
+- `rust-toolchain.toml` can override `rustc` with an arbitrary
+ executable
+
+- VS Code plugin reads configuration from project directory, and that
+ can be used to override paths to various executables, like `rustfmt`
+ or `rust-analyzer` itself.
+
+- rust-analyzer’s syntax trees library uses a lot of `unsafe` and
+ hasn’t been properly audited for memory safety. \ No newline at end of file
diff --git a/docs/book/src/troubleshooting.md b/docs/book/src/troubleshooting.md
new file mode 100644
index 0000000000..4092b9de99
--- /dev/null
+++ b/docs/book/src/troubleshooting.md
@@ -0,0 +1,50 @@
+# Troubleshooting
+
+Start with looking at the rust-analyzer version. Try **rust-analyzer:
+Show RA Version** in VS Code (using **Command Palette** feature
+typically activated by Ctrl+Shift+P) or `rust-analyzer --version` in the
+command line. If the date is more than a week ago, it’s better to update
+rust-analyzer version.
+
+The next thing to check would be panic messages in rust-analyzer’s log.
+Log messages are printed to stderr, in VS Code you can see them in the
+`Output > Rust Analyzer Language Server` tab of the panel. To see more
+logs, set the `RA_LOG=info` environment variable, this can be done
+either by setting the environment variable manually or by using
+`rust-analyzer.server.extraEnv`, note that both of these approaches
+require the server to be restarted.
+
+To fully capture LSP messages between the editor and the server, run
+the `rust-analyzer: Toggle LSP Logs` command and check `Output > Rust
+Analyzer Language Server Trace`.
+
+The root cause for many "nothing works" problems is that rust-analyzer
+fails to understand the project structure. To debug that, first note the
+`rust-analyzer` section in the status bar. If it has an error icon and
+red, that’s the problem (hover will have somewhat helpful error
+message). **rust-analyzer: Status** prints dependency information for
+the current file. Finally, `RA_LOG=project_model=debug` enables verbose
+logs during project loading.
+
+If rust-analyzer outright crashes, try running
+`rust-analyzer analysis-stats /path/to/project/directory/` on the
+command line. This command type checks the whole project in batch mode
+bypassing LSP machinery.
+
+When filing issues, it is useful (but not necessary) to try to minimize
+examples. An ideal bug reproduction looks like this:
+
+```shell
+$ git clone https://github.com/username/repo.git && cd repo && git switch --detach commit-hash
+$ rust-analyzer --version
+rust-analyzer dd12184e4 2021-05-08 dev
+$ rust-analyzer analysis-stats .
+💀 💀 💀
+```
+
+It is especially useful when the `repo` doesn’t use external crates or
+the standard library.
+
+If you want to go as far as to modify the source code to debug the
+problem, be sure to take a look at the [dev
+docs](https://github.com/rust-lang/rust-analyzer/tree/master/docs/dev)!
diff --git a/docs/book/src/vs_code.md b/docs/book/src/vs_code.md
new file mode 100644
index 0000000000..233b862d2c
--- /dev/null
+++ b/docs/book/src/vs_code.md
@@ -0,0 +1,121 @@
+# VS Code
+
+This is the best supported editor at the moment. The rust-analyzer
+plugin for VS Code is maintained [in
+tree](https://github.com/rust-lang/rust-analyzer/tree/master/editors/code).
+
+You can install the latest release of the plugin from [the
+marketplace](https://marketplace.visualstudio.com/items?itemName=rust-lang.rust-analyzer).
+
+Note that the plugin may cause conflicts with the [previous official
+Rust
+plugin](https://marketplace.visualstudio.com/items?itemName=rust-lang.rust).
+The latter is no longer maintained and should be uninstalled.
+
+The server binary is stored in the extension install directory, which
+starts with `rust-lang.rust-analyzer-` and is located under:
+
+- Linux: `~/.vscode/extensions`
+
+- Linux (Remote, such as WSL): `~/.vscode-server/extensions`
+
+- macOS: `~/.vscode/extensions`
+
+- Windows: `%USERPROFILE%\.vscode\extensions`
+
+As an exception, on NixOS, the extension makes a copy of the server and
+stores it under
+`~/.config/Code/User/globalStorage/rust-lang.rust-analyzer`.
+
+Note that we only support the two most recent versions of VS Code.
+
+### Updates
+
+The extension will be updated automatically as new versions become
+available. It will ask your permission to download the matching language
+server version binary if needed.
+
+#### Nightly
+
+We ship nightly releases for VS Code. To help us out by testing the
+newest code, you can enable pre-release versions in the Code extension
+page.
+
+### Manual installation
+
+Alternatively, download a VSIX corresponding to your platform from the
+[releases](https://github.com/rust-lang/rust-analyzer/releases) page.
+
+Install the extension with the `Extensions: Install from VSIX` command
+within VS Code, or from the command line via:
+
+ $ code --install-extension /path/to/rust-analyzer.vsix
+
+If you are running an unsupported platform, you can install
+`rust-analyzer-no-server.vsix` and compile or obtain a server binary.
+Copy the server anywhere, then add the path to your settings.json, for
+example:
+
+```json
+{ "rust-analyzer.server.path": "~/.local/bin/rust-analyzer-linux" }
+```
+
+### Building From Source
+
+Both the server and the Code plugin can be installed from source:
+
+ $ git clone https://github.com/rust-lang/rust-analyzer.git && cd rust-analyzer
+ $ cargo xtask install
+
+You’ll need Cargo, nodejs (matching a supported version of VS Code) and
+npm for this.
+
+Note that installing via `xtask install` does not work for VS Code
+Remote, instead you’ll need to install the `.vsix` manually.
+
+If you’re not using Code, you can compile and install only the LSP
+server:
+
+ $ cargo xtask install --server
+
+Make sure that `.cargo/bin` is in `$PATH` and precedes paths where
+`rust-analyzer` may also be installed. Specifically, `rustup` includes a
+proxy called `rust-analyzer`, which can cause problems if you’re
+planning to use a source build or even a downloaded binary.
+
+## VS Code or VSCodium in Flatpak
+
+Setting up `rust-analyzer` with a Flatpak version of Code is not trivial
+because of the Flatpak sandbox. While the sandbox can be disabled for
+some directories, `/usr/bin` will always be mounted under
+`/run/host/usr/bin`. This prevents access to the system’s C compiler, a
+system-wide installation of Rust, or any other libraries you might want
+to link to. Some compilers and libraries can be acquired as Flatpak
+SDKs, such as `org.freedesktop.Sdk.Extension.rust-stable` or
+`org.freedesktop.Sdk.Extension.llvm15`.
+
+If you use a Flatpak SDK for Rust, it must be in your `PATH`:
+
+ * install the SDK extensions with `flatpak install org.freedesktop.Sdk.Extension.{llvm15,rust-stable}//23.08`
+ * enable SDK extensions in the editor with the environment variable `FLATPAK_ENABLE_SDK_EXT=llvm15,rust-stable` (this can be done using flatseal or `flatpak override`)
+
+If you want to use Flatpak in combination with `rustup`, the following
+steps might help:
+
+- both Rust and `rustup` have to be installed using
+ <https://rustup.rs>. Distro packages *will not* work.
+
+- you need to launch Code, open a terminal and run `echo $PATH`
+
+- using
+ [Flatseal](https://flathub.org/apps/details/com.github.tchx84.Flatseal),
+ you must add an environment variable called `PATH`. Set its value to
+ the output from above, appending `:~/.cargo/bin`, where `~` is the
+ path to your home directory. You must replace `~`, as it won’t be
+ expanded otherwise.
+
+- while Flatseal is open, you must enable access to "All user files"
+
+A C compiler should already be available via `org.freedesktop.Sdk`. Any
+other tools or libraries you will need to acquire from Flatpak.
+
diff --git a/docs/user/.gitignore b/docs/user/.gitignore
deleted file mode 100644
index c32b1bcec2..0000000000
--- a/docs/user/.gitignore
+++ /dev/null
@@ -1 +0,0 @@
-manual.html
diff --git a/docs/user/generated_config.adoc b/docs/user/generated_config.adoc
deleted file mode 100644
index b33de1956b..0000000000
--- a/docs/user/generated_config.adoc
+++ /dev/null
@@ -1,1197 +0,0 @@
-[[rust-analyzer.assist.emitMustUse]]rust-analyzer.assist.emitMustUse (default: `false`)::
-+
---
-Whether to insert #[must_use] when generating `as_` methods
-for enum variants.
---
-[[rust-analyzer.assist.expressionFillDefault]]rust-analyzer.assist.expressionFillDefault (default: `"todo"`)::
-+
---
-Placeholder expression to use for missing expressions in assists.
---
-[[rust-analyzer.assist.termSearch.borrowcheck]]rust-analyzer.assist.termSearch.borrowcheck (default: `true`)::
-+
---
-Enable borrow checking for term search code assists. If set to false, also there will be more suggestions, but some of them may not borrow-check.
---
-[[rust-analyzer.assist.termSearch.fuel]]rust-analyzer.assist.termSearch.fuel (default: `1800`)::
-+
---
-Term search fuel in "units of work" for assists (Defaults to 1800).
---
-[[rust-analyzer.cachePriming.enable]]rust-analyzer.cachePriming.enable (default: `true`)::
-+
---
-Warm up caches on project load.
---
-[[rust-analyzer.cachePriming.numThreads]]rust-analyzer.cachePriming.numThreads (default: `"physical"`)::
-+
---
-How many worker threads to handle priming caches. The default `0` means to pick automatically.
---
-[[rust-analyzer.cargo.allTargets]]rust-analyzer.cargo.allTargets (default: `true`)::
-+
---
-Pass `--all-targets` to cargo invocation.
---
-[[rust-analyzer.cargo.autoreload]]rust-analyzer.cargo.autoreload (default: `true`)::
-+
---
-Automatically refresh project info via `cargo metadata` on
-`Cargo.toml` or `.cargo/config.toml` changes.
---
-[[rust-analyzer.cargo.buildScripts.enable]]rust-analyzer.cargo.buildScripts.enable (default: `true`)::
-+
---
-Run build scripts (`build.rs`) for more precise code analysis.
---
-[[rust-analyzer.cargo.buildScripts.invocationStrategy]]rust-analyzer.cargo.buildScripts.invocationStrategy (default: `"per_workspace"`)::
-+
---
-Specifies the invocation strategy to use when running the build scripts command.
-If `per_workspace` is set, the command will be executed for each Rust workspace with the
-workspace as the working directory.
-If `once` is set, the command will be executed once with the opened project as the
-working directory.
-This config only has an effect when `#rust-analyzer.cargo.buildScripts.overrideCommand#`
-is set.
---
-[[rust-analyzer.cargo.buildScripts.overrideCommand]]rust-analyzer.cargo.buildScripts.overrideCommand (default: `null`)::
-+
---
-Override the command rust-analyzer uses to run build scripts and
-build procedural macros. The command is required to output json
-and should therefore include `--message-format=json` or a similar
-option.
-
-If there are multiple linked projects/workspaces, this command is invoked for
-each of them, with the working directory being the workspace root
-(i.e., the folder containing the `Cargo.toml`). This can be overwritten
-by changing `#rust-analyzer.cargo.buildScripts.invocationStrategy#`.
-
-By default, a cargo invocation will be constructed for the configured
-targets and features, with the following base command line:
-
-```bash
-cargo check --quiet --workspace --message-format=json --all-targets --keep-going
-```
-.
---
-[[rust-analyzer.cargo.buildScripts.rebuildOnSave]]rust-analyzer.cargo.buildScripts.rebuildOnSave (default: `true`)::
-+
---
-Rerun proc-macros building/build-scripts running when proc-macro
-or build-script sources change and are saved.
---
-[[rust-analyzer.cargo.buildScripts.useRustcWrapper]]rust-analyzer.cargo.buildScripts.useRustcWrapper (default: `true`)::
-+
---
-Use `RUSTC_WRAPPER=rust-analyzer` when running build scripts to
-avoid checking unnecessary things.
---
-[[rust-analyzer.cargo.cfgs]]rust-analyzer.cargo.cfgs::
-+
---
-Default:
-----
-[
- "debug_assertions",
- "miri"
-]
-----
-List of cfg options to enable with the given values.
-
---
-[[rust-analyzer.cargo.extraArgs]]rust-analyzer.cargo.extraArgs (default: `[]`)::
-+
---
-Extra arguments that are passed to every cargo invocation.
---
-[[rust-analyzer.cargo.extraEnv]]rust-analyzer.cargo.extraEnv (default: `{}`)::
-+
---
-Extra environment variables that will be set when running cargo, rustc
-or other commands within the workspace. Useful for setting RUSTFLAGS.
---
-[[rust-analyzer.cargo.features]]rust-analyzer.cargo.features (default: `[]`)::
-+
---
-List of features to activate.
-
-Set this to `"all"` to pass `--all-features` to cargo.
---
-[[rust-analyzer.cargo.noDefaultFeatures]]rust-analyzer.cargo.noDefaultFeatures (default: `false`)::
-+
---
-Whether to pass `--no-default-features` to cargo.
---
-[[rust-analyzer.cargo.sysroot]]rust-analyzer.cargo.sysroot (default: `"discover"`)::
-+
---
-Relative path to the sysroot, or "discover" to try to automatically find it via
-"rustc --print sysroot".
-
-Unsetting this disables sysroot loading.
-
-This option does not take effect until rust-analyzer is restarted.
---
-[[rust-analyzer.cargo.sysrootSrc]]rust-analyzer.cargo.sysrootSrc (default: `null`)::
-+
---
-Relative path to the sysroot library sources. If left unset, this will default to
-`{cargo.sysroot}/lib/rustlib/src/rust/library`.
-
-This option does not take effect until rust-analyzer is restarted.
---
-[[rust-analyzer.cargo.target]]rust-analyzer.cargo.target (default: `null`)::
-+
---
-Compilation target override (target tuple).
---
-[[rust-analyzer.cargo.targetDir]]rust-analyzer.cargo.targetDir (default: `null`)::
-+
---
-Optional path to a rust-analyzer specific target directory.
-This prevents rust-analyzer's `cargo check` and initial build-script and proc-macro
-building from locking the `Cargo.lock` at the expense of duplicating build artifacts.
-
-Set to `true` to use a subdirectory of the existing target directory or
-set to a path relative to the workspace to use that path.
---
-[[rust-analyzer.cfg.setTest]]rust-analyzer.cfg.setTest (default: `true`)::
-+
---
-Set `cfg(test)` for local crates. Defaults to true.
---
-[[rust-analyzer.checkOnSave]]rust-analyzer.checkOnSave (default: `true`)::
-+
---
-Run the check command for diagnostics on save.
---
-[[rust-analyzer.check.allTargets]]rust-analyzer.check.allTargets (default: `null`)::
-+
---
-Check all targets and tests (`--all-targets`). Defaults to
-`#rust-analyzer.cargo.allTargets#`.
---
-[[rust-analyzer.check.command]]rust-analyzer.check.command (default: `"check"`)::
-+
---
-Cargo command to use for `cargo check`.
---
-[[rust-analyzer.check.extraArgs]]rust-analyzer.check.extraArgs (default: `[]`)::
-+
---
-Extra arguments for `cargo check`.
---
-[[rust-analyzer.check.extraEnv]]rust-analyzer.check.extraEnv (default: `{}`)::
-+
---
-Extra environment variables that will be set when running `cargo check`.
-Extends `#rust-analyzer.cargo.extraEnv#`.
---
-[[rust-analyzer.check.features]]rust-analyzer.check.features (default: `null`)::
-+
---
-List of features to activate. Defaults to
-`#rust-analyzer.cargo.features#`.
-
-Set to `"all"` to pass `--all-features` to Cargo.
---
-[[rust-analyzer.check.ignore]]rust-analyzer.check.ignore (default: `[]`)::
-+
---
-List of `cargo check` (or other command specified in `check.command`) diagnostics to ignore.
-
-For example for `cargo check`: `dead_code`, `unused_imports`, `unused_variables`,...
---
-[[rust-analyzer.check.invocationStrategy]]rust-analyzer.check.invocationStrategy (default: `"per_workspace"`)::
-+
---
-Specifies the invocation strategy to use when running the check command.
-If `per_workspace` is set, the command will be executed for each workspace.
-If `once` is set, the command will be executed once.
-This config only has an effect when `#rust-analyzer.check.overrideCommand#`
-is set.
---
-[[rust-analyzer.check.noDefaultFeatures]]rust-analyzer.check.noDefaultFeatures (default: `null`)::
-+
---
-Whether to pass `--no-default-features` to Cargo. Defaults to
-`#rust-analyzer.cargo.noDefaultFeatures#`.
---
-[[rust-analyzer.check.overrideCommand]]rust-analyzer.check.overrideCommand (default: `null`)::
-+
---
-Override the command rust-analyzer uses instead of `cargo check` for
-diagnostics on save. The command is required to output json and
-should therefore include `--message-format=json` or a similar option
-(if your client supports the `colorDiagnosticOutput` experimental
-capability, you can use `--message-format=json-diagnostic-rendered-ansi`).
-
-If you're changing this because you're using some tool wrapping
-Cargo, you might also want to change
-`#rust-analyzer.cargo.buildScripts.overrideCommand#`.
-
-If there are multiple linked projects/workspaces, this command is invoked for
-each of them, with the working directory being the workspace root
-(i.e., the folder containing the `Cargo.toml`). This can be overwritten
-by changing `#rust-analyzer.check.invocationStrategy#`.
-
-If `$saved_file` is part of the command, rust-analyzer will pass
-the absolute path of the saved file to the provided command. This is
-intended to be used with non-Cargo build systems.
-Note that `$saved_file` is experimental and may be removed in the future.
-
-An example command would be:
-
-```bash
-cargo check --workspace --message-format=json --all-targets
-```
-.
---
-[[rust-analyzer.check.targets]]rust-analyzer.check.targets (default: `null`)::
-+
---
-Check for specific targets. Defaults to `#rust-analyzer.cargo.target#` if empty.
-
-Can be a single target, e.g. `"x86_64-unknown-linux-gnu"` or a list of targets, e.g.
-`["aarch64-apple-darwin", "x86_64-apple-darwin"]`.
-
-Aliased as `"checkOnSave.targets"`.
---
-[[rust-analyzer.check.workspace]]rust-analyzer.check.workspace (default: `true`)::
-+
---
-Whether `--workspace` should be passed to `cargo check`.
-If false, `-p <package>` will be passed instead if applicable. In case it is not, no
-check will be performed.
---
-[[rust-analyzer.completion.addSemicolonToUnit]]rust-analyzer.completion.addSemicolonToUnit (default: `true`)::
-+
---
-Whether to automatically add a semicolon when completing unit-returning functions.
-
-In `match` arms it completes a comma instead.
---
-[[rust-analyzer.completion.autoAwait.enable]]rust-analyzer.completion.autoAwait.enable (default: `true`)::
-+
---
-Toggles the additional completions that automatically show method calls and field accesses with `await` prefixed to them when completing on a future.
---
-[[rust-analyzer.completion.autoIter.enable]]rust-analyzer.completion.autoIter.enable (default: `true`)::
-+
---
-Toggles the additional completions that automatically show method calls with `iter()` or `into_iter()` prefixed to them when completing on a type that has them.
---
-[[rust-analyzer.completion.autoimport.enable]]rust-analyzer.completion.autoimport.enable (default: `true`)::
-+
---
-Toggles the additional completions that automatically add imports when completed.
-Note that your client must specify the `additionalTextEdits` LSP client capability to truly have this feature enabled.
---
-[[rust-analyzer.completion.autoimport.exclude]]rust-analyzer.completion.autoimport.exclude::
-+
---
-Default:
-----
-[
- {
- "path": "core::borrow::Borrow",
- "type": "methods"
- },
- {
- "path": "core::borrow::BorrowMut",
- "type": "methods"
- }
-]
-----
-A list of full paths to items to exclude from auto-importing completions.
-
-Traits in this list won't have their methods suggested in completions unless the trait
-is in scope.
-
-You can either specify a string path which defaults to type "always" or use the more verbose
-form `{ "path": "path::to::item", type: "always" }`.
-
-For traits the type "methods" can be used to only exclude the methods but not the trait itself.
-
-This setting also inherits `#rust-analyzer.completion.excludeTraits#`.
-
---
-[[rust-analyzer.completion.autoself.enable]]rust-analyzer.completion.autoself.enable (default: `true`)::
-+
---
-Toggles the additional completions that automatically show method calls and field accesses
-with `self` prefixed to them when inside a method.
---
-[[rust-analyzer.completion.callable.snippets]]rust-analyzer.completion.callable.snippets (default: `"fill_arguments"`)::
-+
---
-Whether to add parenthesis and argument snippets when completing function.
---
-[[rust-analyzer.completion.excludeTraits]]rust-analyzer.completion.excludeTraits (default: `[]`)::
-+
---
-A list of full paths to traits whose methods to exclude from completion.
-
-Methods from these traits won't be completed, even if the trait is in scope. However, they will still be suggested on expressions whose type is `dyn Trait`, `impl Trait` or `T where T: Trait`.
-
-Note that the trait themselves can still be completed.
---
-[[rust-analyzer.completion.fullFunctionSignatures.enable]]rust-analyzer.completion.fullFunctionSignatures.enable (default: `false`)::
-+
---
-Whether to show full function/method signatures in completion docs.
---
-[[rust-analyzer.completion.hideDeprecated]]rust-analyzer.completion.hideDeprecated (default: `false`)::
-+
---
-Whether to omit deprecated items from autocompletion. By default they are marked as deprecated but not hidden.
---
-[[rust-analyzer.completion.limit]]rust-analyzer.completion.limit (default: `null`)::
-+
---
-Maximum number of completions to return. If `None`, the limit is infinite.
---
-[[rust-analyzer.completion.postfix.enable]]rust-analyzer.completion.postfix.enable (default: `true`)::
-+
---
-Whether to show postfix snippets like `dbg`, `if`, `not`, etc.
---
-[[rust-analyzer.completion.privateEditable.enable]]rust-analyzer.completion.privateEditable.enable (default: `false`)::
-+
---
-Enables completions of private items and fields that are defined in the current workspace even if they are not visible at the current position.
---
-[[rust-analyzer.completion.snippets.custom]]rust-analyzer.completion.snippets.custom::
-+
---
-Default:
-----
-{
- "Ok": {
- "postfix": "ok",
- "body": "Ok(${receiver})",
- "description": "Wrap the expression in a `Result::Ok`",
- "scope": "expr"
- },
- "Box::pin": {
- "postfix": "pinbox",
- "body": "Box::pin(${receiver})",
- "requires": "std::boxed::Box",
- "description": "Put the expression into a pinned `Box`",
- "scope": "expr"
- },
- "Arc::new": {
- "postfix": "arc",
- "body": "Arc::new(${receiver})",
- "requires": "std::sync::Arc",
- "description": "Put the expression into an `Arc`",
- "scope": "expr"
- },
- "Some": {
- "postfix": "some",
- "body": "Some(${receiver})",
- "description": "Wrap the expression in an `Option::Some`",
- "scope": "expr"
- },
- "Err": {
- "postfix": "err",
- "body": "Err(${receiver})",
- "description": "Wrap the expression in a `Result::Err`",
- "scope": "expr"
- },
- "Rc::new": {
- "postfix": "rc",
- "body": "Rc::new(${receiver})",
- "requires": "std::rc::Rc",
- "description": "Put the expression into an `Rc`",
- "scope": "expr"
- }
-}
-----
-Custom completion snippets.
-
---
-[[rust-analyzer.completion.termSearch.enable]]rust-analyzer.completion.termSearch.enable (default: `false`)::
-+
---
-Whether to enable term search based snippets like `Some(foo.bar().baz())`.
---
-[[rust-analyzer.completion.termSearch.fuel]]rust-analyzer.completion.termSearch.fuel (default: `1000`)::
-+
---
-Term search fuel in "units of work" for autocompletion (Defaults to 1000).
---
-[[rust-analyzer.diagnostics.disabled]]rust-analyzer.diagnostics.disabled (default: `[]`)::
-+
---
-List of rust-analyzer diagnostics to disable.
---
-[[rust-analyzer.diagnostics.enable]]rust-analyzer.diagnostics.enable (default: `true`)::
-+
---
-Whether to show native rust-analyzer diagnostics.
---
-[[rust-analyzer.diagnostics.experimental.enable]]rust-analyzer.diagnostics.experimental.enable (default: `false`)::
-+
---
-Whether to show experimental rust-analyzer diagnostics that might
-have more false positives than usual.
---
-[[rust-analyzer.diagnostics.remapPrefix]]rust-analyzer.diagnostics.remapPrefix (default: `{}`)::
-+
---
-Map of prefixes to be substituted when parsing diagnostic file paths.
-This should be the reverse mapping of what is passed to `rustc` as `--remap-path-prefix`.
---
-[[rust-analyzer.diagnostics.styleLints.enable]]rust-analyzer.diagnostics.styleLints.enable (default: `false`)::
-+
---
-Whether to run additional style lints.
---
-[[rust-analyzer.diagnostics.warningsAsHint]]rust-analyzer.diagnostics.warningsAsHint (default: `[]`)::
-+
---
-List of warnings that should be displayed with hint severity.
-
-The warnings will be indicated by faded text or three dots in code
-and will not show up in the `Problems Panel`.
---
-[[rust-analyzer.diagnostics.warningsAsInfo]]rust-analyzer.diagnostics.warningsAsInfo (default: `[]`)::
-+
---
-List of warnings that should be displayed with info severity.
-
-The warnings will be indicated by a blue squiggly underline in code
-and a blue icon in the `Problems Panel`.
---
-[[rust-analyzer.files.excludeDirs]]rust-analyzer.files.excludeDirs (default: `[]`)::
-+
---
-These directories will be ignored by rust-analyzer. They are
-relative to the workspace root, and globs are not supported. You may
-also need to add the folders to Code's `files.watcherExclude`.
---
-[[rust-analyzer.files.watcher]]rust-analyzer.files.watcher (default: `"client"`)::
-+
---
-Controls file watching implementation.
---
-[[rust-analyzer.highlightRelated.breakPoints.enable]]rust-analyzer.highlightRelated.breakPoints.enable (default: `true`)::
-+
---
-Enables highlighting of related references while the cursor is on `break`, `loop`, `while`, or `for` keywords.
---
-[[rust-analyzer.highlightRelated.closureCaptures.enable]]rust-analyzer.highlightRelated.closureCaptures.enable (default: `true`)::
-+
---
-Enables highlighting of all captures of a closure while the cursor is on the `|` or move keyword of a closure.
---
-[[rust-analyzer.highlightRelated.exitPoints.enable]]rust-analyzer.highlightRelated.exitPoints.enable (default: `true`)::
-+
---
-Enables highlighting of all exit points while the cursor is on any `return`, `?`, `fn`, or return type arrow (`->`).
---
-[[rust-analyzer.highlightRelated.references.enable]]rust-analyzer.highlightRelated.references.enable (default: `true`)::
-+
---
-Enables highlighting of related references while the cursor is on any identifier.
---
-[[rust-analyzer.highlightRelated.yieldPoints.enable]]rust-analyzer.highlightRelated.yieldPoints.enable (default: `true`)::
-+
---
-Enables highlighting of all break points for a loop or block context while the cursor is on any `async` or `await` keywords.
---
-[[rust-analyzer.hover.actions.debug.enable]]rust-analyzer.hover.actions.debug.enable (default: `true`)::
-+
---
-Whether to show `Debug` action. Only applies when
-`#rust-analyzer.hover.actions.enable#` is set.
---
-[[rust-analyzer.hover.actions.enable]]rust-analyzer.hover.actions.enable (default: `true`)::
-+
---
-Whether to show HoverActions in Rust files.
---
-[[rust-analyzer.hover.actions.gotoTypeDef.enable]]rust-analyzer.hover.actions.gotoTypeDef.enable (default: `true`)::
-+
---
-Whether to show `Go to Type Definition` action. Only applies when
-`#rust-analyzer.hover.actions.enable#` is set.
---
-[[rust-analyzer.hover.actions.implementations.enable]]rust-analyzer.hover.actions.implementations.enable (default: `true`)::
-+
---
-Whether to show `Implementations` action. Only applies when
-`#rust-analyzer.hover.actions.enable#` is set.
---
-[[rust-analyzer.hover.actions.references.enable]]rust-analyzer.hover.actions.references.enable (default: `false`)::
-+
---
-Whether to show `References` action. Only applies when
-`#rust-analyzer.hover.actions.enable#` is set.
---
-[[rust-analyzer.hover.actions.run.enable]]rust-analyzer.hover.actions.run.enable (default: `true`)::
-+
---
-Whether to show `Run` action. Only applies when
-`#rust-analyzer.hover.actions.enable#` is set.
---
-[[rust-analyzer.hover.actions.updateTest.enable]]rust-analyzer.hover.actions.updateTest.enable (default: `true`)::
-+
---
-Whether to show `Update Test` action. Only applies when
-`#rust-analyzer.hover.actions.enable#` and `#rust-analyzer.hover.actions.run.enable#` are set.
---
-[[rust-analyzer.hover.documentation.enable]]rust-analyzer.hover.documentation.enable (default: `true`)::
-+
---
-Whether to show documentation on hover.
---
-[[rust-analyzer.hover.documentation.keywords.enable]]rust-analyzer.hover.documentation.keywords.enable (default: `true`)::
-+
---
-Whether to show keyword hover popups. Only applies when
-`#rust-analyzer.hover.documentation.enable#` is set.
---
-[[rust-analyzer.hover.links.enable]]rust-analyzer.hover.links.enable (default: `true`)::
-+
---
-Use markdown syntax for links on hover.
---
-[[rust-analyzer.hover.maxSubstitutionLength]]rust-analyzer.hover.maxSubstitutionLength (default: `20`)::
-+
---
-Whether to show what types are used as generic arguments in calls etc. on hover, and what is their max length to show such types, beyond it they will be shown with ellipsis.
-
-This can take three values: `null` means "unlimited", the string `"hide"` means to not show generic substitutions at all, and a number means to limit them to X characters.
-
-The default is 20 characters.
---
-[[rust-analyzer.hover.memoryLayout.alignment]]rust-analyzer.hover.memoryLayout.alignment (default: `"hexadecimal"`)::
-+
---
-How to render the align information in a memory layout hover.
---
-[[rust-analyzer.hover.memoryLayout.enable]]rust-analyzer.hover.memoryLayout.enable (default: `true`)::
-+
---
-Whether to show memory layout data on hover.
---
-[[rust-analyzer.hover.memoryLayout.niches]]rust-analyzer.hover.memoryLayout.niches (default: `false`)::
-+
---
-How to render the niche information in a memory layout hover.
---
-[[rust-analyzer.hover.memoryLayout.offset]]rust-analyzer.hover.memoryLayout.offset (default: `"hexadecimal"`)::
-+
---
-How to render the offset information in a memory layout hover.
---
-[[rust-analyzer.hover.memoryLayout.size]]rust-analyzer.hover.memoryLayout.size (default: `"both"`)::
-+
---
-How to render the size information in a memory layout hover.
---
-[[rust-analyzer.hover.show.enumVariants]]rust-analyzer.hover.show.enumVariants (default: `5`)::
-+
---
-How many variants of an enum to display when hovering on. Show none if empty.
---
-[[rust-analyzer.hover.show.fields]]rust-analyzer.hover.show.fields (default: `5`)::
-+
---
-How many fields of a struct, variant or union to display when hovering on. Show none if empty.
---
-[[rust-analyzer.hover.show.traitAssocItems]]rust-analyzer.hover.show.traitAssocItems (default: `null`)::
-+
---
-How many associated items of a trait to display when hovering a trait.
---
-[[rust-analyzer.imports.granularity.enforce]]rust-analyzer.imports.granularity.enforce (default: `false`)::
-+
---
-Whether to enforce the import granularity setting for all files. If set to false rust-analyzer will try to keep import styles consistent per file.
---
-[[rust-analyzer.imports.granularity.group]]rust-analyzer.imports.granularity.group (default: `"crate"`)::
-+
---
-How imports should be grouped into use statements.
---
-[[rust-analyzer.imports.group.enable]]rust-analyzer.imports.group.enable (default: `true`)::
-+
---
-Group inserted imports by the https://rust-analyzer.github.io/manual.html#auto-import[following order]. Groups are separated by newlines.
---
-[[rust-analyzer.imports.merge.glob]]rust-analyzer.imports.merge.glob (default: `true`)::
-+
---
-Whether to allow import insertion to merge new imports into single path glob imports like `use std::fmt::*;`.
---
-[[rust-analyzer.imports.preferNoStd]]rust-analyzer.imports.preferNoStd (default: `false`)::
-+
---
-Prefer to unconditionally use imports of the core and alloc crate, over the std crate.
---
-[[rust-analyzer.imports.preferPrelude]]rust-analyzer.imports.preferPrelude (default: `false`)::
-+
---
-Whether to prefer import paths containing a `prelude` module.
---
-[[rust-analyzer.imports.prefix]]rust-analyzer.imports.prefix (default: `"plain"`)::
-+
---
-The path structure for newly inserted paths to use.
---
-[[rust-analyzer.imports.prefixExternPrelude]]rust-analyzer.imports.prefixExternPrelude (default: `false`)::
-+
---
-Whether to prefix external (including std, core) crate imports with `::`. e.g. "use ::std::io::Read;".
---
-[[rust-analyzer.inlayHints.bindingModeHints.enable]]rust-analyzer.inlayHints.bindingModeHints.enable (default: `false`)::
-+
---
-Whether to show inlay type hints for binding modes.
---
-[[rust-analyzer.inlayHints.chainingHints.enable]]rust-analyzer.inlayHints.chainingHints.enable (default: `true`)::
-+
---
-Whether to show inlay type hints for method chains.
---
-[[rust-analyzer.inlayHints.closingBraceHints.enable]]rust-analyzer.inlayHints.closingBraceHints.enable (default: `true`)::
-+
---
-Whether to show inlay hints after a closing `}` to indicate what item it belongs to.
---
-[[rust-analyzer.inlayHints.closingBraceHints.minLines]]rust-analyzer.inlayHints.closingBraceHints.minLines (default: `25`)::
-+
---
-Minimum number of lines required before the `}` until the hint is shown (set to 0 or 1
-to always show them).
---
-[[rust-analyzer.inlayHints.closureCaptureHints.enable]]rust-analyzer.inlayHints.closureCaptureHints.enable (default: `false`)::
-+
---
-Whether to show inlay hints for closure captures.
---
-[[rust-analyzer.inlayHints.closureReturnTypeHints.enable]]rust-analyzer.inlayHints.closureReturnTypeHints.enable (default: `"never"`)::
-+
---
-Whether to show inlay type hints for return types of closures.
---
-[[rust-analyzer.inlayHints.closureStyle]]rust-analyzer.inlayHints.closureStyle (default: `"impl_fn"`)::
-+
---
-Closure notation in type and chaining inlay hints.
---
-[[rust-analyzer.inlayHints.discriminantHints.enable]]rust-analyzer.inlayHints.discriminantHints.enable (default: `"never"`)::
-+
---
-Whether to show enum variant discriminant hints.
---
-[[rust-analyzer.inlayHints.expressionAdjustmentHints.enable]]rust-analyzer.inlayHints.expressionAdjustmentHints.enable (default: `"never"`)::
-+
---
-Whether to show inlay hints for type adjustments.
---
-[[rust-analyzer.inlayHints.expressionAdjustmentHints.hideOutsideUnsafe]]rust-analyzer.inlayHints.expressionAdjustmentHints.hideOutsideUnsafe (default: `false`)::
-+
---
-Whether to hide inlay hints for type adjustments outside of `unsafe` blocks.
---
-[[rust-analyzer.inlayHints.expressionAdjustmentHints.mode]]rust-analyzer.inlayHints.expressionAdjustmentHints.mode (default: `"prefix"`)::
-+
---
-Whether to show inlay hints as postfix ops (`.*` instead of `*`, etc).
---
-[[rust-analyzer.inlayHints.genericParameterHints.const.enable]]rust-analyzer.inlayHints.genericParameterHints.const.enable (default: `true`)::
-+
---
-Whether to show const generic parameter name inlay hints.
---
-[[rust-analyzer.inlayHints.genericParameterHints.lifetime.enable]]rust-analyzer.inlayHints.genericParameterHints.lifetime.enable (default: `false`)::
-+
---
-Whether to show generic lifetime parameter name inlay hints.
---
-[[rust-analyzer.inlayHints.genericParameterHints.type.enable]]rust-analyzer.inlayHints.genericParameterHints.type.enable (default: `false`)::
-+
---
-Whether to show generic type parameter name inlay hints.
---
-[[rust-analyzer.inlayHints.implicitDrops.enable]]rust-analyzer.inlayHints.implicitDrops.enable (default: `false`)::
-+
---
-Whether to show implicit drop hints.
---
-[[rust-analyzer.inlayHints.implicitSizedBoundHints.enable]]rust-analyzer.inlayHints.implicitSizedBoundHints.enable (default: `false`)::
-+
---
-Whether to show inlay hints for the implied type parameter `Sized` bound.
---
-[[rust-analyzer.inlayHints.lifetimeElisionHints.enable]]rust-analyzer.inlayHints.lifetimeElisionHints.enable (default: `"never"`)::
-+
---
-Whether to show inlay type hints for elided lifetimes in function signatures.
---
-[[rust-analyzer.inlayHints.lifetimeElisionHints.useParameterNames]]rust-analyzer.inlayHints.lifetimeElisionHints.useParameterNames (default: `false`)::
-+
---
-Whether to prefer using parameter names as the name for elided lifetime hints if possible.
---
-[[rust-analyzer.inlayHints.maxLength]]rust-analyzer.inlayHints.maxLength (default: `25`)::
-+
---
-Maximum length for inlay hints. Set to null to have an unlimited length.
---
-[[rust-analyzer.inlayHints.parameterHints.enable]]rust-analyzer.inlayHints.parameterHints.enable (default: `true`)::
-+
---
-Whether to show function parameter name inlay hints at the call
-site.
---
-[[rust-analyzer.inlayHints.rangeExclusiveHints.enable]]rust-analyzer.inlayHints.rangeExclusiveHints.enable (default: `false`)::
-+
---
-Whether to show exclusive range inlay hints.
---
-[[rust-analyzer.inlayHints.reborrowHints.enable]]rust-analyzer.inlayHints.reborrowHints.enable (default: `"never"`)::
-+
---
-Whether to show inlay hints for compiler inserted reborrows.
-This setting is deprecated in favor of #rust-analyzer.inlayHints.expressionAdjustmentHints.enable#.
---
-[[rust-analyzer.inlayHints.renderColons]]rust-analyzer.inlayHints.renderColons (default: `true`)::
-+
---
-Whether to render leading colons for type hints, and trailing colons for parameter hints.
---
-[[rust-analyzer.inlayHints.typeHints.enable]]rust-analyzer.inlayHints.typeHints.enable (default: `true`)::
-+
---
-Whether to show inlay type hints for variables.
---
-[[rust-analyzer.inlayHints.typeHints.hideClosureInitialization]]rust-analyzer.inlayHints.typeHints.hideClosureInitialization (default: `false`)::
-+
---
-Whether to hide inlay type hints for `let` statements that initialize to a closure.
-Only applies to closures with blocks, same as `#rust-analyzer.inlayHints.closureReturnTypeHints.enable#`.
---
-[[rust-analyzer.inlayHints.typeHints.hideNamedConstructor]]rust-analyzer.inlayHints.typeHints.hideNamedConstructor (default: `false`)::
-+
---
-Whether to hide inlay type hints for constructors.
---
-[[rust-analyzer.interpret.tests]]rust-analyzer.interpret.tests (default: `false`)::
-+
---
-Enables the experimental support for interpreting tests.
---
-[[rust-analyzer.joinLines.joinAssignments]]rust-analyzer.joinLines.joinAssignments (default: `true`)::
-+
---
-Join lines merges consecutive declaration and initialization of an assignment.
---
-[[rust-analyzer.joinLines.joinElseIf]]rust-analyzer.joinLines.joinElseIf (default: `true`)::
-+
---
-Join lines inserts else between consecutive ifs.
---
-[[rust-analyzer.joinLines.removeTrailingComma]]rust-analyzer.joinLines.removeTrailingComma (default: `true`)::
-+
---
-Join lines removes trailing commas.
---
-[[rust-analyzer.joinLines.unwrapTrivialBlock]]rust-analyzer.joinLines.unwrapTrivialBlock (default: `true`)::
-+
---
-Join lines unwraps trivial blocks.
---
-[[rust-analyzer.lens.debug.enable]]rust-analyzer.lens.debug.enable (default: `true`)::
-+
---
-Whether to show `Debug` lens. Only applies when
-`#rust-analyzer.lens.enable#` is set.
---
-[[rust-analyzer.lens.enable]]rust-analyzer.lens.enable (default: `true`)::
-+
---
-Whether to show CodeLens in Rust files.
---
-[[rust-analyzer.lens.implementations.enable]]rust-analyzer.lens.implementations.enable (default: `true`)::
-+
---
-Whether to show `Implementations` lens. Only applies when
-`#rust-analyzer.lens.enable#` is set.
---
-[[rust-analyzer.lens.location]]rust-analyzer.lens.location (default: `"above_name"`)::
-+
---
-Where to render annotations.
---
-[[rust-analyzer.lens.references.adt.enable]]rust-analyzer.lens.references.adt.enable (default: `false`)::
-+
---
-Whether to show `References` lens for Struct, Enum, and Union.
-Only applies when `#rust-analyzer.lens.enable#` is set.
---
-[[rust-analyzer.lens.references.enumVariant.enable]]rust-analyzer.lens.references.enumVariant.enable (default: `false`)::
-+
---
-Whether to show `References` lens for Enum Variants.
-Only applies when `#rust-analyzer.lens.enable#` is set.
---
-[[rust-analyzer.lens.references.method.enable]]rust-analyzer.lens.references.method.enable (default: `false`)::
-+
---
-Whether to show `Method References` lens. Only applies when
-`#rust-analyzer.lens.enable#` is set.
---
-[[rust-analyzer.lens.references.trait.enable]]rust-analyzer.lens.references.trait.enable (default: `false`)::
-+
---
-Whether to show `References` lens for Trait.
-Only applies when `#rust-analyzer.lens.enable#` is set.
---
-[[rust-analyzer.lens.run.enable]]rust-analyzer.lens.run.enable (default: `true`)::
-+
---
-Whether to show `Run` lens. Only applies when
-`#rust-analyzer.lens.enable#` is set.
---
-[[rust-analyzer.lens.updateTest.enable]]rust-analyzer.lens.updateTest.enable (default: `true`)::
-+
---
-Whether to show `Update Test` lens. Only applies when
-`#rust-analyzer.lens.enable#` and `#rust-analyzer.lens.run.enable#` are set.
---
-[[rust-analyzer.linkedProjects]]rust-analyzer.linkedProjects (default: `[]`)::
-+
---
-Disable project auto-discovery in favor of explicitly specified set
-of projects.
-
-Elements must be paths pointing to `Cargo.toml`,
-`rust-project.json`, `.rs` files (which will be treated as standalone files) or JSON
-objects in `rust-project.json` format.
---
-[[rust-analyzer.lru.capacity]]rust-analyzer.lru.capacity (default: `null`)::
-+
---
-Number of syntax trees rust-analyzer keeps in memory. Defaults to 128.
---
-[[rust-analyzer.lru.query.capacities]]rust-analyzer.lru.query.capacities (default: `{}`)::
-+
---
-Sets the LRU capacity of the specified queries.
---
-[[rust-analyzer.notifications.cargoTomlNotFound]]rust-analyzer.notifications.cargoTomlNotFound (default: `true`)::
-+
---
-Whether to show `can't find Cargo.toml` error message.
---
-[[rust-analyzer.numThreads]]rust-analyzer.numThreads (default: `null`)::
-+
---
-How many worker threads in the main loop. The default `null` means to pick automatically.
---
-[[rust-analyzer.procMacro.attributes.enable]]rust-analyzer.procMacro.attributes.enable (default: `true`)::
-+
---
-Expand attribute macros. Requires `#rust-analyzer.procMacro.enable#` to be set.
---
-[[rust-analyzer.procMacro.enable]]rust-analyzer.procMacro.enable (default: `true`)::
-+
---
-Enable support for procedural macros, implies `#rust-analyzer.cargo.buildScripts.enable#`.
---
-[[rust-analyzer.procMacro.ignored]]rust-analyzer.procMacro.ignored (default: `{}`)::
-+
---
-These proc-macros will be ignored when trying to expand them.
-
-This config takes a map of crate names with the exported proc-macro names to ignore as values.
---
-[[rust-analyzer.procMacro.server]]rust-analyzer.procMacro.server (default: `null`)::
-+
---
-Internal config, path to proc-macro server executable.
---
-[[rust-analyzer.references.excludeImports]]rust-analyzer.references.excludeImports (default: `false`)::
-+
---
-Exclude imports from find-all-references.
---
-[[rust-analyzer.references.excludeTests]]rust-analyzer.references.excludeTests (default: `false`)::
-+
---
-Exclude tests from find-all-references and call-hierarchy.
---
-[[rust-analyzer.runnables.command]]rust-analyzer.runnables.command (default: `null`)::
-+
---
-Command to be executed instead of 'cargo' for runnables.
---
-[[rust-analyzer.runnables.extraArgs]]rust-analyzer.runnables.extraArgs (default: `[]`)::
-+
---
-Additional arguments to be passed to cargo for runnables such as
-tests or binaries. For example, it may be `--release`.
---
-[[rust-analyzer.runnables.extraTestBinaryArgs]]rust-analyzer.runnables.extraTestBinaryArgs::
-+
---
-Default:
-----
-[
- "--show-output"
-]
-----
-Additional arguments to be passed through Cargo to launched tests, benchmarks, or
-doc-tests.
-
-Unless the launched target uses a
-[custom test harness](https://doc.rust-lang.org/cargo/reference/cargo-targets.html#the-harness-field),
-they will end up being interpreted as options to
-[`rustc`’s built-in test harness (“libtest”)](https://doc.rust-lang.org/rustc/tests/index.html#cli-arguments).
-
---
-[[rust-analyzer.rustc.source]]rust-analyzer.rustc.source (default: `null`)::
-+
---
-Path to the Cargo.toml of the rust compiler workspace, for usage in rustc_private
-projects, or "discover" to try to automatically find it if the `rustc-dev` component
-is installed.
-
-Any project which uses rust-analyzer with the rustcPrivate
-crates must set `[package.metadata.rust-analyzer] rustc_private=true` to use it.
-
-This option does not take effect until rust-analyzer is restarted.
---
-[[rust-analyzer.rustfmt.extraArgs]]rust-analyzer.rustfmt.extraArgs (default: `[]`)::
-+
---
-Additional arguments to `rustfmt`.
---
-[[rust-analyzer.rustfmt.overrideCommand]]rust-analyzer.rustfmt.overrideCommand (default: `null`)::
-+
---
-Advanced option, fully override the command rust-analyzer uses for
-formatting. This should be the equivalent of `rustfmt` here, and
-not that of `cargo fmt`. The file contents will be passed on the
-standard input and the formatted result will be read from the
-standard output.
---
-[[rust-analyzer.rustfmt.rangeFormatting.enable]]rust-analyzer.rustfmt.rangeFormatting.enable (default: `false`)::
-+
---
-Enables the use of rustfmt's unstable range formatting command for the
-`textDocument/rangeFormatting` request. The rustfmt option is unstable and only
-available on a nightly build.
---
-[[rust-analyzer.semanticHighlighting.doc.comment.inject.enable]]rust-analyzer.semanticHighlighting.doc.comment.inject.enable (default: `true`)::
-+
---
-Inject additional highlighting into doc comments.
-
-When enabled, rust-analyzer will highlight rust source in doc comments as well as intra
-doc links.
---
-[[rust-analyzer.semanticHighlighting.nonStandardTokens]]rust-analyzer.semanticHighlighting.nonStandardTokens (default: `true`)::
-+
---
-Whether the server is allowed to emit non-standard tokens and modifiers.
---
-[[rust-analyzer.semanticHighlighting.operator.enable]]rust-analyzer.semanticHighlighting.operator.enable (default: `true`)::
-+
---
-Use semantic tokens for operators.
-
-When disabled, rust-analyzer will emit semantic tokens only for operator tokens when
-they are tagged with modifiers.
---
-[[rust-analyzer.semanticHighlighting.operator.specialization.enable]]rust-analyzer.semanticHighlighting.operator.specialization.enable (default: `false`)::
-+
---
-Use specialized semantic tokens for operators.
-
-When enabled, rust-analyzer will emit special token types for operator tokens instead
-of the generic `operator` token type.
---
-[[rust-analyzer.semanticHighlighting.punctuation.enable]]rust-analyzer.semanticHighlighting.punctuation.enable (default: `false`)::
-+
---
-Use semantic tokens for punctuation.
-
-When disabled, rust-analyzer will emit semantic tokens only for punctuation tokens when
-they are tagged with modifiers or have a special role.
---
-[[rust-analyzer.semanticHighlighting.punctuation.separate.macro.bang]]rust-analyzer.semanticHighlighting.punctuation.separate.macro.bang (default: `false`)::
-+
---
-When enabled, rust-analyzer will emit a punctuation semantic token for the `!` of macro
-calls.
---
-[[rust-analyzer.semanticHighlighting.punctuation.specialization.enable]]rust-analyzer.semanticHighlighting.punctuation.specialization.enable (default: `false`)::
-+
---
-Use specialized semantic tokens for punctuation.
-
-When enabled, rust-analyzer will emit special token types for punctuation tokens instead
-of the generic `punctuation` token type.
---
-[[rust-analyzer.semanticHighlighting.strings.enable]]rust-analyzer.semanticHighlighting.strings.enable (default: `true`)::
-+
---
-Use semantic tokens for strings.
-
-In some editors (e.g. vscode) semantic tokens override other highlighting grammars.
-By disabling semantic tokens for strings, other grammars can be used to highlight
-their contents.
---
-[[rust-analyzer.signatureInfo.detail]]rust-analyzer.signatureInfo.detail (default: `"full"`)::
-+
---
-Show full signature of the callable. Only shows parameters if disabled.
---
-[[rust-analyzer.signatureInfo.documentation.enable]]rust-analyzer.signatureInfo.documentation.enable (default: `true`)::
-+
---
-Show documentation.
---
-[[rust-analyzer.typing.triggerChars]]rust-analyzer.typing.triggerChars (default: `"=."`)::
-+
---
-Specify the characters allowed to invoke special on typing triggers.
-- typing `=` after `let` tries to smartly add `;` if `=` is followed by an existing expression
-- typing `=` between two expressions adds `;` when in statement position
-- typing `=` to turn an assignment into an equality comparison removes `;` when in expression position
-- typing `.` in a chain method call auto-indents
-- typing `{` or `(` in front of an expression inserts a closing `}` or `)` after the expression
-- typing `{` in a use item adds a closing `}` in the right place
-- typing `>` to complete a return type `->` will insert a whitespace after it
-- typing `<` in a path or type position inserts a closing `>` after the path or type.
---
-[[rust-analyzer.vfs.extraIncludes]]rust-analyzer.vfs.extraIncludes (default: `[]`)::
-+
---
-Additional paths to include in the VFS. Generally for code that is
-generated or otherwise managed by a build system outside of Cargo,
-though Cargo might be the eventual consumer.
---
-[[rust-analyzer.workspace.discoverConfig]]rust-analyzer.workspace.discoverConfig (default: `null`)::
-+
---
-Enables automatic discovery of projects using [`DiscoverWorkspaceConfig::command`].
-
-[`DiscoverWorkspaceConfig`] also requires setting `progress_label` and `files_to_watch`.
-`progress_label` is used for the title in progress indicators, whereas `files_to_watch`
-is used to determine which build system-specific files should be watched in order to
-reload rust-analyzer.
-
-Below is an example of a valid configuration:
-```json
-"rust-analyzer.workspace.discoverConfig": {
- "command": [
- "rust-project",
- "develop-json"
- ],
- "progressLabel": "rust-analyzer",
- "filesToWatch": [
- "BUCK"
- ]
-}
-```
-
-## On `DiscoverWorkspaceConfig::command`
-
-**Warning**: This format is provisional and subject to change.
-
-[`DiscoverWorkspaceConfig::command`] *must* return a JSON object
-corresponding to `DiscoverProjectData::Finished`:
-
-```norun
-#[derive(Debug, Clone, Deserialize, Serialize)]
-#[serde(tag = "kind")]
-#[serde(rename_all = "snake_case")]
-enum DiscoverProjectData {
- Finished { buildfile: Utf8PathBuf, project: ProjectJsonData },
- Error { error: String, source: Option<String> },
- Progress { message: String },
-}
-```
-
-As JSON, `DiscoverProjectData::Finished` is:
-
-```json
-{
- // the internally-tagged representation of the enum.
- "kind": "finished",
- // the file used by a non-Cargo build system to define
- // a package or target.
- "buildfile": "rust-analyzer/BUILD",
- // the contents of a rust-project.json, elided for brevity
- "project": {
- "sysroot": "foo",
- "crates": []
- }
-}
-```
-
-It is encouraged, but not required, to use the other variants on
-`DiscoverProjectData` to provide a more polished end-user experience.
-
-`DiscoverWorkspaceConfig::command` may *optionally* include an `{arg}`,
-which will be substituted with the JSON-serialized form of the following
-enum:
-
-```norun
-#[derive(PartialEq, Clone, Debug, Serialize)]
-#[serde(rename_all = "camelCase")]
-pub enum DiscoverArgument {
- Path(AbsPathBuf),
- Buildfile(AbsPathBuf),
-}
-```
-
-The JSON representation of `DiscoverArgument::Path` is:
-
-```json
-{
- "path": "src/main.rs"
-}
-```
-
-Similarly, the JSON representation of `DiscoverArgument::Buildfile` is:
-
-```
-{
- "buildfile": "BUILD"
-}
-```
-
-`DiscoverArgument::Path` is used to find and generate a `rust-project.json`,
-and therefore, a workspace, whereas `DiscoverArgument::buildfile` is used to
-to update an existing workspace. As a reference for implementors,
-buck2's `rust-project` will likely be useful:
-https://github.com/facebook/buck2/tree/main/integrations/rust-project.
---
-[[rust-analyzer.workspace.symbol.search.kind]]rust-analyzer.workspace.symbol.search.kind (default: `"only_types"`)::
-+
---
-Workspace symbol search kind.
---
-[[rust-analyzer.workspace.symbol.search.limit]]rust-analyzer.workspace.symbol.search.limit (default: `128`)::
-+
---
-Limits the number of items returned from a workspace symbol search (Defaults to 128).
-Some clients like vs-code issue new searches on result filtering and don't require all results to be returned in the initial search.
-Other clients requires all results upfront and might require a higher limit.
---
-[[rust-analyzer.workspace.symbol.search.scope]]rust-analyzer.workspace.symbol.search.scope (default: `"workspace"`)::
-+
---
-Workspace symbol search scope.
---
diff --git a/docs/user/manual.adoc b/docs/user/manual.adoc
deleted file mode 100644
index 4a2a6f2e36..0000000000
--- a/docs/user/manual.adoc
+++ /dev/null
@@ -1,1121 +0,0 @@
-= User Manual
-:toc: preamble
-:sectanchors:
-:page-layout: post
-:icons: font
-:source-highlighter: rouge
-:experimental:
-
-////
-IMPORTANT: the master copy of this document lives in the https://github.com/rust-lang/rust-analyzer repository
-////
-
-At its core, rust-analyzer is a *library* for semantic analysis of Rust code as it changes over time.
-This manual focuses on a specific usage of the library -- running it as part of a server that implements the
-https://microsoft.github.io/language-server-protocol/[Language Server Protocol] (LSP).
-The LSP allows various code editors, like VS Code, Emacs or Vim, to implement semantic features like completion or goto definition by talking to an external language server process.
-
-[TIP]
-====
-[.lead]
-To improve this document, send a pull request: +
-https://github.com/rust-lang/rust-analyzer/blob/master/docs/user/manual.adoc[https://github.com/rust-analyzer/.../manual.adoc]
-
-The manual is written in https://asciidoc.org[AsciiDoc] and includes some extra files which are generated from the source code. Run `cargo test` and `cargo xtask codegen` to create these and then `asciidoctor manual.adoc` to create an HTML copy.
-====
-
-If you have questions about using rust-analyzer, please ask them in the https://users.rust-lang.org/c/ide/14["`IDEs and Editors`"] topic of Rust users forum.
-
-== Installation
-
-In theory, one should be able to just install the <<rust-analyzer-language-server-binary,`rust-analyzer` binary>> and have it automatically work with any editor.
-We are not there yet, so some editor specific setup is required.
-
-Additionally, rust-analyzer needs the sources of the standard library.
-If the source code is not present, rust-analyzer will attempt to install it automatically.
-
-To add the sources manually, run the following command:
-
-```bash
-$ rustup component add rust-src
-```
-
-=== Toolchain
-
-Only the latest stable standard library source is officially supported for use with rust-analyzer.
-If you are using an older toolchain or have an override set, rust-analyzer may fail to understand the Rust source.
-You will either need to update your toolchain or use an older version of rust-analyzer that is compatible with your toolchain.
-
-If you are using an override in your project, you can still force rust-analyzer to use the stable toolchain via the environment variable `RUSTUP_TOOLCHAIN`.
-For example, with VS Code or coc-rust-analyzer:
-
-[source,json]
-----
-{ "rust-analyzer.server.extraEnv": { "RUSTUP_TOOLCHAIN": "stable" } }
-----
-
-=== VS Code
-
-This is the best supported editor at the moment.
-The rust-analyzer plugin for VS Code is maintained
-https://github.com/rust-lang/rust-analyzer/tree/master/editors/code[in tree].
-
-You can install the latest release of the plugin from
-https://marketplace.visualstudio.com/items?itemName=rust-lang.rust-analyzer[the marketplace].
-
-Note that the plugin may cause conflicts with the
-https://marketplace.visualstudio.com/items?itemName=rust-lang.rust[previous official Rust plugin].
-The latter is no longer maintained and should be uninstalled.
-
-The server binary is stored in the extension install directory, which starts with `rust-lang.rust-analyzer-` and is located under:
-
-* Linux: `~/.vscode/extensions`
-* Linux (Remote, such as WSL): `~/.vscode-server/extensions`
-* macOS: `~/.vscode/extensions`
-* Windows: `%USERPROFILE%\.vscode\extensions`
-
-As an exception, on NixOS, the extension makes a copy of the server and stores it under `~/.config/Code/User/globalStorage/rust-lang.rust-analyzer`.
-
-Note that we only support the two most recent versions of VS Code.
-
-==== Updates
-
-The extension will be updated automatically as new versions become available.
-It will ask your permission to download the matching language server version binary if needed.
-
-===== Nightly
-
-We ship nightly releases for VS Code.
-To help us out by testing the newest code, you can enable pre-release versions in the Code extension page.
-
-==== Manual installation
-
-Alternatively, download a VSIX corresponding to your platform from the
-https://github.com/rust-lang/rust-analyzer/releases[releases] page.
-
-Install the extension with the `Extensions: Install from VSIX` command within VS Code, or from the command line via:
-[source]
-----
-$ code --install-extension /path/to/rust-analyzer.vsix
-----
-
-If you are running an unsupported platform, you can install `rust-analyzer-no-server.vsix` and compile or obtain a server binary.
-Copy the server anywhere, then add the path to your settings.json, for example:
-[source,json]
-----
-{ "rust-analyzer.server.path": "~/.local/bin/rust-analyzer-linux" }
-----
-
-==== Building From Source
-
-Both the server and the Code plugin can be installed from source:
-
-[source]
-----
-$ git clone https://github.com/rust-lang/rust-analyzer.git && cd rust-analyzer
-$ cargo xtask install
-----
-
-You'll need Cargo, nodejs (matching a supported version of VS Code) and npm for this.
-
-Note that installing via `xtask install` does not work for VS Code Remote, instead you'll need to install the `.vsix` manually.
-
-If you're not using Code, you can compile and install only the LSP server:
-
-[source]
-----
-$ cargo xtask install --server
-----
-
-Make sure that `.cargo/bin` is in `$PATH` and precedes paths where `rust-analyzer` may also be installed.
-Specifically, `rustup` includes a proxy called `rust-analyzer`, which can cause problems if you're planning to use a source build or even a downloaded binary.
-
-=== rust-analyzer Language Server Binary
-
-Other editors generally require the `rust-analyzer` binary to be in `$PATH`.
-You can download pre-built binaries from the https://github.com/rust-lang/rust-analyzer/releases[releases] page.
-You will need to uncompress and rename the binary for your platform, e.g. from `rust-analyzer-aarch64-apple-darwin.gz` on Mac OS to `rust-analyzer`, make it executable, then move it into a directory in your `$PATH`.
-
-On Linux to install the `rust-analyzer` binary into `~/.local/bin`, these commands should work:
-
-[source,bash]
-----
-$ mkdir -p ~/.local/bin
-$ curl -L https://github.com/rust-lang/rust-analyzer/releases/latest/download/rust-analyzer-x86_64-unknown-linux-gnu.gz | gunzip -c - > ~/.local/bin/rust-analyzer
-$ chmod +x ~/.local/bin/rust-analyzer
-----
-
-Make sure that `~/.local/bin` is listed in the `$PATH` variable and use the appropriate URL if you're not on a `x86-64` system.
-
-You don't have to use `~/.local/bin`, any other path like `~/.cargo/bin` or `/usr/local/bin` will work just as well.
-
-Alternatively, you can install it from source using the command below.
-You'll need the latest stable version of the Rust toolchain.
-
-[source,bash]
-----
-$ git clone https://github.com/rust-lang/rust-analyzer.git && cd rust-analyzer
-$ cargo xtask install --server
-----
-
-If your editor can't find the binary even though the binary is on your `$PATH`, the likely explanation is that it doesn't see the same `$PATH` as the shell, see https://github.com/rust-lang/rust-analyzer/issues/1811[this issue].
-On Unix, running the editor from a shell or changing the `.desktop` file to set the environment should help.
-
-==== rustup
-
-`rust-analyzer` is available in `rustup`:
-
-[source,bash]
-----
-$ rustup component add rust-analyzer
-----
-
-==== Arch Linux
-
-The `rust-analyzer` binary can be installed from the repos or AUR (Arch User Repository):
-
-- https://www.archlinux.org/packages/extra/x86_64/rust-analyzer/[`rust-analyzer`] (built from latest tagged source)
-- https://aur.archlinux.org/packages/rust-analyzer-git[`rust-analyzer-git`] (latest Git version)
-
-Install it with pacman, for example:
-
-[source,bash]
-----
-$ pacman -S rust-analyzer
-----
-
-==== Gentoo Linux
-
-There are two ways to install `rust-analyzer` under Gentoo:
-
-- when installing `dev-lang/rust` or `dev-lang/rust-bin`, enable the `rust-analyzer` and `rust-src` USE flags
-- use the `rust-analyzer` component in `rustup` (see instructions above)
-
-Note that in both cases, the version installed lags for a couple of months behind the official releases on GitHub.
-To obtain a newer one, you can download a binary from GitHub Releases or building from source.
-
-==== macOS
-
-The `rust-analyzer` binary can be installed via https://brew.sh/[Homebrew].
-
-[source,bash]
-----
-$ brew install rust-analyzer
-----
-
-==== Windows
-
-It is recommended to install the latest Microsoft Visual C++ Redistributable prior to installation.
-Download links can be found
-https://learn.microsoft.com/en-us/cpp/windows/latest-supported-vc-redist[here].
-
-=== VS Code or VSCodium in Flatpak
-
-Setting up `rust-analyzer` with a Flatpak version of Code is not trivial because of the Flatpak sandbox.
-While the sandbox can be disabled for some directories, `/usr/bin` will always be mounted under `/run/host/usr/bin`.
-This prevents access to the system's C compiler, a system-wide installation of Rust, or any other libraries you might want to link to.
-Some compilers and libraries can be acquired as Flatpak SDKs, such as `org.freedesktop.Sdk.Extension.rust-stable` or `org.freedesktop.Sdk.Extension.llvm15`.
-
-If you use a Flatpak SDK for Rust, it must be in your `PATH`:
-
- * install the SDK extensions with `flatpak install org.freedesktop.Sdk.Extension.{llvm15,rust-stable}//23.08`
- * enable SDK extensions in the editor with the environment variable `FLATPAK_ENABLE_SDK_EXT=llvm15,rust-stable` (this can be done using flatseal or `flatpak override`)
-
-If you want to use Flatpak in combination with `rustup`, the following steps might help:
-
- - both Rust and `rustup` have to be installed using https://rustup.rs. Distro packages _will not_ work.
- - you need to launch Code, open a terminal and run `echo $PATH`
- - using https://flathub.org/apps/details/com.github.tchx84.Flatseal[Flatseal], you must add an environment variable called `PATH`.
- Set its value to the output from above, appending `:~/.cargo/bin`, where `~` is the path to your home directory.
- You must replace `~`, as it won't be expanded otherwise.
- - while Flatseal is open, you must enable access to "All user files"
-
-A C compiler should already be available via `org.freedesktop.Sdk`.
-Any other tools or libraries you will need to acquire from Flatpak.
-
-=== Emacs
-
-Prerequisites: You have installed the <<rust-analyzer-language-server-binary,`rust-analyzer` binary>>.
-
-To use `rust-analyzer`, you need to install and enable one of the two popular LSP client implementations for Emacs, https://github.com/joaotavora/eglot[Eglot] or https://github.com/emacs-lsp/lsp-mode[LSP Mode]. Both enable `rust-analyzer` by default in rust buffers if it is available.
-
-==== Eglot
-
-Eglot is the more minimalistic and lightweight LSP client for Emacs, integrates well with existing Emacs functionality and is built into Emacs starting from release 29.
-
-After installing Eglot, e.g. via `M-x package-install` (not needed from Emacs 29), you can enable it via the `M-x eglot` command or load it automatically in `rust-mode` via
-
-[source,emacs-lisp]
-----
-(add-hook 'rust-mode-hook 'eglot-ensure)
-----
-
-To enable clippy, you will need to configure the initialization options to pass the `check.command` setting.
-
-[source,emacs-lisp]
-----
-(add-to-list 'eglot-server-programs
- '((rust-ts-mode rust-mode) .
- ("rust-analyzer" :initializationOptions (:check (:command "clippy")))))
-----
-
-For more detailed instructions and options see the https://joaotavora.github.io/eglot[Eglot manual] (also available from Emacs via `M-x info`) and the
-https://github.com/joaotavora/eglot/blob/master/README.md[Eglot readme].
-
-Eglot does not support the rust-analyzer extensions to the language-server protocol and does not aim to do so in the future. The https://github.com/nemethf/eglot-x#rust-analyzer-extensions[eglot-x] package adds experimental support for those LSP extensions.
-
-==== LSP Mode
-
-LSP-mode is the original LSP-client for emacs. Compared to Eglot it has a larger codebase and supports more features, like LSP protocol extensions.
-With extension packages like https://github.com/emacs-lsp/lsp-mode[LSP UI] it offers a lot of visual eyecandy.
-Further it integrates well with https://github.com/emacs-lsp/dap-mode[DAP mode] for support of the Debug Adapter Protocol.
-
-You can install LSP-mode via `M-x package-install` and then run it via the `M-x lsp` command or load it automatically in rust buffers with
-
-[source,emacs-lisp]
-----
-(add-hook 'rust-mode-hook 'lsp-deferred)
-----
-
-For more information on how to set up LSP mode and its extension package see the instructions in the https://emacs-lsp.github.io/lsp-mode/page/installation[LSP mode manual].
-Also see the https://emacs-lsp.github.io/lsp-mode/page/lsp-rust-analyzer/[rust-analyzer section] for `rust-analyzer` specific options and commands, which you can optionally bind to keys.
-
-Note the excellent https://robert.kra.hn/posts/2021-02-07_rust-with-emacs/[guide] from https://github.com/rksm[@rksm] on how to set-up Emacs for Rust development with LSP mode and several other packages.
-
-=== Vim/Neovim
-
-Prerequisites: You have installed the <<rust-analyzer-language-server-binary,`rust-analyzer` binary>>.
-Not needed if the extension can install/update it on its own, coc-rust-analyzer is one example.
-
-There are several LSP client implementations for Vim or Neovim:
-
-==== coc-rust-analyzer
-
-1. Install coc.nvim by following the instructions at
- https://github.com/neoclide/coc.nvim[coc.nvim]
- (Node.js required)
-2. Run `:CocInstall coc-rust-analyzer` to install
- https://github.com/fannheyward/coc-rust-analyzer[coc-rust-analyzer],
- this extension implements _most_ of the features supported in the VSCode extension:
- * automatically install and upgrade stable/nightly releases
- * same configurations as VSCode extension, `rust-analyzer.server.path`, `rust-analyzer.cargo.features` etc.
- * same commands too, `rust-analyzer.analyzerStatus`, `rust-analyzer.ssr` etc.
- * inlay hints for variables and method chaining, _Neovim Only_
-
-Note: for code actions, use `coc-codeaction-cursor` and `coc-codeaction-selected`; `coc-codeaction` and `coc-codeaction-line` are unlikely to be useful.
-
-==== LanguageClient-neovim
-
-1. Install LanguageClient-neovim by following the instructions
- https://github.com/autozimu/LanguageClient-neovim[here]
- * The GitHub project wiki has extra tips on configuration
-
-2. Configure by adding this to your Vim/Neovim config file (replacing the existing Rust-specific line if it exists):
-+
-[source,vim]
-----
-let g:LanguageClient_serverCommands = {
-\ 'rust': ['rust-analyzer'],
-\ }
-----
-
-==== YouCompleteMe
-
-Install YouCompleteMe by following the instructions
- https://github.com/ycm-core/YouCompleteMe#installation[here].
-
-rust-analyzer is the default in ycm, it should work out of the box.
-
-==== ALE
-
-To use the LSP server in https://github.com/dense-analysis/ale[ale]:
-
-[source,vim]
-----
-let g:ale_linters = {'rust': ['analyzer']}
-----
-
-==== nvim-lsp
-
-Neovim 0.5 has built-in language server support.
-For a quick start configuration of rust-analyzer, use https://github.com/neovim/nvim-lspconfig#rust_analyzer[neovim/nvim-lspconfig].
-Once `neovim/nvim-lspconfig` is installed, use `+lua require'lspconfig'.rust_analyzer.setup({})+` in your `init.vim`.
-
-You can also pass LSP settings to the server:
-
-[source,vim]
-----
-lua << EOF
-local lspconfig = require'lspconfig'
-
-local on_attach = function(client)
- require'completion'.on_attach(client)
-end
-
-lspconfig.rust_analyzer.setup({
- on_attach = on_attach,
- settings = {
- ["rust-analyzer"] = {
- imports = {
- granularity = {
- group = "module",
- },
- prefix = "self",
- },
- cargo = {
- buildScripts = {
- enable = true,
- },
- },
- procMacro = {
- enable = true
- },
- }
- }
-})
-EOF
-----
-
-If you're running Neovim 0.10 or later, you can enable inlay hints via `on_attach`:
-
-[source,vim]
-----
-lspconfig.rust_analyzer.setup({
- on_attach = function(client, bufnr)
- vim.lsp.inlay_hint.enable(true, { bufnr = bufnr })
- end
-})
-----
-
-Note that the hints are only visible after `rust-analyzer` has finished loading **and** you have to edit the file to trigger a re-render.
-
-See https://sharksforarms.dev/posts/neovim-rust/ for more tips on getting started.
-
-Check out https://github.com/mrcjkb/rustaceanvim for a batteries included rust-analyzer setup for Neovim.
-
-==== vim-lsp
-
-vim-lsp is installed by following https://github.com/prabirshrestha/vim-lsp[the plugin instructions].
-It can be as simple as adding this line to your `.vimrc`:
-
-[source,vim]
-----
-Plug 'prabirshrestha/vim-lsp'
-----
-
-Next you need to register the `rust-analyzer` binary.
-If it is available in `$PATH`, you may want to add this to your `.vimrc`:
-
-[source,vim]
-----
-if executable('rust-analyzer')
- au User lsp_setup call lsp#register_server({
- \ 'name': 'Rust Language Server',
- \ 'cmd': {server_info->['rust-analyzer']},
- \ 'whitelist': ['rust'],
- \ })
-endif
-----
-
-There is no dedicated UI for the server configuration, so you would need to send any options as a value of the `initialization_options` field, as described in the <<configuration,Configuration>> section.
-Here is an example of how to enable the proc-macro support:
-
-[source,vim]
-----
-if executable('rust-analyzer')
- au User lsp_setup call lsp#register_server({
- \ 'name': 'Rust Language Server',
- \ 'cmd': {server_info->['rust-analyzer']},
- \ 'whitelist': ['rust'],
- \ 'initialization_options': {
- \ 'cargo': {
- \ 'buildScripts': {
- \ 'enable': v:true,
- \ },
- \ },
- \ 'procMacro': {
- \ 'enable': v:true,
- \ },
- \ },
- \ })
-endif
-----
-
-=== Sublime Text
-
-==== Sublime Text 4:
-* Follow the instructions in link:https://github.com/sublimelsp/LSP-rust-analyzer[LSP-rust-analyzer].
-
-NOTE: Install link:https://packagecontrol.io/packages/LSP-file-watcher-chokidar[LSP-file-watcher-chokidar] to enable file watching (`workspace/didChangeWatchedFiles`).
-
-==== Sublime Text 3:
-* Install the <<rust-analyzer-language-server-binary,`rust-analyzer` binary>>.
-* Install the link:https://packagecontrol.io/packages/LSP[LSP package].
-* From the command palette, run `LSP: Enable Language Server Globally` and select `rust-analyzer`.
-
-If it worked, you should see "rust-analyzer, Line X, Column Y" on the left side of the status bar, and after waiting a bit, functionalities like tooltips on hovering over variables should become available.
-
-If you get an error saying `No such file or directory: 'rust-analyzer'`, see the <<rust-analyzer-language-server-binary,`rust-analyzer` binary>> section on installing the language server binary.
-
-=== GNOME Builder
-
-GNOME Builder 3.37.1 and newer has native `rust-analyzer` support.
-If the LSP binary is not available, GNOME Builder can install it when opening a Rust file.
-
-
-=== Eclipse IDE
-
-Support for Rust development in the Eclipse IDE is provided by link:https://github.com/eclipse/corrosion[Eclipse Corrosion].
-If available in PATH or in some standard location, `rust-analyzer` is detected and powers editing of Rust files without further configuration.
-If `rust-analyzer` is not detected, Corrosion will prompt you for configuration of your Rust toolchain and language server with a link to the __Window > Preferences > Rust__ preference page; from here a button allows to download and configure `rust-analyzer`, but you can also reference another installation.
-You'll need to close and reopen all .rs and Cargo files, or to restart the IDE, for this change to take effect.
-
-=== Kate Text Editor
-
-Support for the language server protocol is built into Kate through the LSP plugin, which is included by default.
-It is preconfigured to use rust-analyzer for Rust sources since Kate 21.12.
-
-To change rust-analyzer config options, start from the following example and put it into Kate's "User Server Settings" tab (located under the LSP Client settings):
-[source,json]
-----
-{
- "servers": {
- "rust": {
- "initializationOptions": {
- "cachePriming": {
- "enable": false
- },
- "check": {
- "allTargets": false
- },
- "checkOnSave": false
- }
- }
- }
-}
-----
-Then click on apply, and restart the LSP server for your rust project.
-
-=== juCi++
-
-https://gitlab.com/cppit/jucipp[juCi++] has built-in support for the language server protocol, and since version 1.7.0 offers installation of both Rust and rust-analyzer when opening a Rust file.
-
-=== Kakoune
-
-https://kakoune.org/[Kakoune] supports LSP with the help of https://github.com/kak-lsp/kak-lsp[`kak-lsp`].
-Follow the https://github.com/kak-lsp/kak-lsp#installation[instructions] to install `kak-lsp`.
-To configure `kak-lsp`, refer to the https://github.com/kak-lsp/kak-lsp#configuring-kak-lsp[configuration section] which is basically about copying the https://github.com/kak-lsp/kak-lsp/blob/master/kak-lsp.toml[configuration file] in the right place (latest versions should use `rust-analyzer` by default).
-
-Finally, you need to configure Kakoune to talk to `kak-lsp` (see https://github.com/kak-lsp/kak-lsp#usage[Usage section]).
-A basic configuration will only get you LSP but you can also activate inlay diagnostics and auto-formatting on save.
-The following might help you get all of this.
-
-[source,txt]
-----
-eval %sh{kak-lsp --kakoune -s $kak_session} # Not needed if you load it with plug.kak.
-hook global WinSetOption filetype=rust %{
- # Enable LSP
- lsp-enable-window
-
- # Auto-formatting on save
- hook window BufWritePre .* lsp-formatting-sync
-
- # Configure inlay hints (only on save)
- hook window -group rust-inlay-hints BufWritePost .* rust-analyzer-inlay-hints
- hook -once -always window WinSetOption filetype=.* %{
- remove-hooks window rust-inlay-hints
- }
-}
-----
-
-=== Helix
-
-https://docs.helix-editor.com/[Helix] supports LSP by default.
-However, it won't install `rust-analyzer` automatically.
-You can follow instructions for installing <<rust-analyzer-language-server-binary,`rust-analyzer` binary>>.
-
-[#visual-studio]
-=== [[visual-studio-2022]]Visual Studio 2022
-
-There are multiple rust-analyzer extensions for Visual Studio 2022 on Windows:
-
-==== rust-analyzer.vs
-
-(License: Creative Commons Attribution-NonCommercial-ShareAlike 4.0 International)
-
-https://marketplace.visualstudio.com/items?itemName=kitamstudios.RustAnalyzer[Visual Studio Marketplace]
-
-https://github.com/kitamstudios/rust-analyzer/[GitHub]
-
-Support for Rust development in the Visual Studio IDE is enabled by the link:https://marketplace.visualstudio.com/items?itemName=kitamstudios.RustAnalyzer[rust-analyzer] package. Either click on the download link or install from IDE's extension manager.
-For now link:https://visualstudio.microsoft.com/downloads/[Visual Studio 2022] is required. All editions are supported viz. Community, Professional & Enterprise.
-The package aims to provide 0-friction installation and therefore comes loaded with most things required including rust-analyzer binary. If anything it needs is missing, appropriate errors / warnings will guide the user. E.g. cargo.exe needs to be in path and the package will tell you as much.
-This package is under rapid active development. So if you encounter any issues please file it at link:https://github.com/kitamstudios/rust-analyzer/[rust-analyzer.vs].
-
-==== VS_RustAnalyzer
-
-(License: GPL)
-
-https://marketplace.visualstudio.com/items?itemName=cchharris.vsrustanalyzer[Visual Studio Marketplace]
-
-https://github.com/cchharris/VS-RustAnalyzer[GitHub]
-
-==== SourceGear Rust
-
-(License: closed source)
-
-https://marketplace.visualstudio.com/items?itemName=SourceGear.SourceGearRust[Visual Studio Marketplace]
-
-https://github.com/sourcegear/rust-vs-extension[GitHub (docs, issues, discussions)]
-
-* Free (no-cost)
-* Supports all editions of Visual Studio 2022 on Windows: Community, Professional, or Enterprise
-
-=== Lapce
-
-https://lapce.dev/[Lapce] has a Rust plugin which you can install directly.
-Unfortunately, it downloads an old version of `rust-analyzer`, but you can set the server path under Settings.
-
-=== Crates
-
-There is a package named `ra_ap_rust_analyzer` available on https://crates.io/crates/ra_ap_rust-analyzer[crates.io], for someone who wants to use it programmatically.
-
-For more details, see https://github.com/rust-lang/rust-analyzer/blob/master/.github/workflows/autopublish.yaml[the publish workflow].
-
-=== Zed
-
-https://zed.dev[Zed] has native `rust-analyzer` support.
-If the LSP binary is not available, Zed can install it when opening a Rust file.
-
-== Troubleshooting
-
-Start with looking at the rust-analyzer version.
-Try **rust-analyzer: Show RA Version** in VS Code (using **Command Palette** feature typically activated by Ctrl+Shift+P) or `rust-analyzer --version` in the command line.
-If the date is more than a week ago, it's better to update rust-analyzer version.
-
-The next thing to check would be panic messages in rust-analyzer's log.
-Log messages are printed to stderr, in VS Code you can see them in the `Output > Rust Analyzer Language Server` tab of the panel.
-To see more logs, set the `RA_LOG=info` environment variable, this can be done either by setting the environment variable manually or by using `rust-analyzer.server.extraEnv`, note that both of these approaches require the server to be restarted.
-
-To fully capture LSP messages between the editor and the server, run the `rust-analyzer: Toggle LSP Logs` command and check
-`Output > Rust Analyzer Language Server Trace`.
-
-The root cause for many "`nothing works`" problems is that rust-analyzer fails to understand the project structure.
-To debug that, first note the `rust-analyzer` section in the status bar.
-If it has an error icon and red, that's the problem (hover will have somewhat helpful error message).
-**rust-analyzer: Status** prints dependency information for the current file.
-Finally, `RA_LOG=project_model=debug` enables verbose logs during project loading.
-
-If rust-analyzer outright crashes, try running `rust-analyzer analysis-stats /path/to/project/directory/` on the command line.
-This command type checks the whole project in batch mode bypassing LSP machinery.
-
-When filing issues, it is useful (but not necessary) to try to minimize examples.
-An ideal bug reproduction looks like this:
-
-```bash
-$ git clone https://github.com/username/repo.git && cd repo && git switch --detach commit-hash
-$ rust-analyzer --version
-rust-analyzer dd12184e4 2021-05-08 dev
-$ rust-analyzer analysis-stats .
-💀 💀 💀
-```
-
-It is especially useful when the `repo` doesn't use external crates or the standard library.
-
-If you want to go as far as to modify the source code to debug the problem, be sure to take a look at the
-https://github.com/rust-lang/rust-analyzer/tree/master/docs/dev[dev docs]!
-
-== Configuration
-
-**Source:** https://github.com/rust-lang/rust-analyzer/blob/master/crates/rust-analyzer/src/config.rs[config.rs]
-
-The <<installation,Installation>> section contains details on configuration for some of the editors.
-In general `rust-analyzer` is configured via LSP messages, which means that it's up to the editor to decide on the exact format and location of configuration files.
-
-Some clients, such as <<vs-code,VS Code>> or <<coc-rust-analyzer,COC plugin in Vim>> provide `rust-analyzer` specific configuration UIs. Others may require you to know a bit more about the interaction with `rust-analyzer`.
-
-For the later category, it might help to know that the initial configuration is specified as a value of the `initializationOptions` field of the https://microsoft.github.io/language-server-protocol/specifications/specification-current/#initialize[`InitializeParams` message, in the LSP protocol].
-The spec says that the field type is `any?`, but `rust-analyzer` is looking for a JSON object that is constructed using settings from the list below.
-Name of the setting, ignoring the `rust-analyzer.` prefix, is used as a path, and value of the setting becomes the JSON property value.
-
-For example, a very common configuration is to enable proc-macro support, can be achieved by sending this JSON:
-
-[source,json]
-----
-{
- "cargo": {
- "buildScripts": {
- "enable": true,
- },
- },
- "procMacro": {
- "enable": true,
- }
-}
-----
-
-Please consult your editor's documentation to learn more about how to configure https://microsoft.github.io/language-server-protocol/[LSP servers].
-
-To verify which configuration is actually used by `rust-analyzer`, set `RA_LOG` environment variable to `rust_analyzer=info` and look for config-related messages.
-Logs should show both the JSON that `rust-analyzer` sees as well as the updated config.
-
-This is the list of config options `rust-analyzer` supports:
-
-include::./generated_config.adoc[]
-
-== Non-Cargo Based Projects
-
-rust-analyzer does not require Cargo.
-However, if you use some other build system, you'll have to describe the structure of your project for rust-analyzer in the `rust-project.json` format:
-
-[source,TypeScript]
-----
-interface JsonProject {
- /// Path to the sysroot directory.
- ///
- /// The sysroot is where rustc looks for the
- /// crates that are built-in to rust, such as
- /// std.
- ///
- /// https://doc.rust-lang.org/rustc/command-line-arguments.html#--sysroot-override-the-system-root
- ///
- /// To see the current value of sysroot, you
- /// can query rustc:
- ///
- /// ```
- /// $ rustc --print sysroot
- /// /Users/yourname/.rustup/toolchains/stable-x86_64-apple-darwin
- /// ```
- sysroot?: string;
- /// Path to the directory with *source code* of
- /// sysroot crates.
- ///
- /// By default, this is `lib/rustlib/src/rust/library`
- /// relative to the sysroot.
- ///
- /// It should point to the directory where std,
- /// core, and friends can be found:
- ///
- /// https://github.com/rust-lang/rust/tree/master/library.
- ///
- /// If provided, rust-analyzer automatically adds
- /// dependencies on sysroot crates. Conversely,
- /// if you omit this path, you can specify sysroot
- /// dependencies yourself and, for example, have
- /// several different "sysroots" in one graph of
- /// crates.
- sysroot_src?: string;
- /// List of groups of common cfg values, to allow
- /// sharing them between crates.
- ///
- /// Maps from group name to its cfgs. Cfg follow
- /// the same format as `Crate.cfg`.
- cfg_groups?: { [key: string]: string[]; };
- /// The set of crates comprising the current
- /// project. Must include all transitive
- /// dependencies as well as sysroot crate (libstd,
- /// libcore and such).
- crates: Crate[];
- /// Configuration for CLI commands.
- ///
- /// These are used for running and debugging binaries
- /// and tests without encoding build system-specific
- /// knowledge into rust-analyzer.
- ///
- /// # Example
- ///
- /// Below is an example of a test runnable. `{label}` and `{test_id}`
- /// are explained in `Runnable::args`'s documentation below.
- ///
- /// ```json
- /// {
- /// "program": "buck",
- /// "args": [
- /// "test",
- /// "{label}",
- /// "--",
- /// "{test_id}",
- /// "--print-passing-details"
- /// ],
- /// "cwd": "/home/user/repo-root/",
- /// "kind": "testOne"
- /// }
- /// ```
- runnables?: Runnable[];
-}
-
-interface Crate {
- /// Optional crate name used for display purposes,
- /// without affecting semantics. See the `deps`
- /// key for semantically-significant crate names.
- display_name?: string;
- /// Path to the root module of the crate.
- root_module: string;
- /// Edition of the crate.
- edition: '2015' | '2018' | '2021' | '2024';
- /// The version of the crate. Used for calculating
- /// the correct docs.rs URL.
- version?: string;
- /// Dependencies
- deps: Dep[];
- /// Should this crate be treated as a member of
- /// current "workspace".
- ///
- /// By default, inferred from the `root_module`
- /// (members are the crates which reside inside
- /// the directory opened in the editor).
- ///
- /// Set this to `false` for things like standard
- /// library and 3rd party crates to enable
- /// performance optimizations (rust-analyzer
- /// assumes that non-member crates don't change).
- is_workspace_member?: boolean;
- /// Optionally specify the (super)set of `.rs`
- /// files comprising this crate.
- ///
- /// By default, rust-analyzer assumes that only
- /// files under `root_module.parent` can belong
- /// to a crate. `include_dirs` are included
- /// recursively, unless a subdirectory is in
- /// `exclude_dirs`.
- ///
- /// Different crates can share the same `source`.
- ///
- /// If two crates share an `.rs` file in common,
- /// they *must* have the same `source`.
- /// rust-analyzer assumes that files from one
- /// source can't refer to files in another source.
- source?: {
- include_dirs: string[];
- exclude_dirs: string[];
- };
- /// List of cfg groups this crate inherits.
- ///
- /// All cfg in these groups will be concatenated to
- /// `cfg`. It is impossible to replace a value from
- /// the groups.
- cfg_groups?: string[];
- /// The set of cfgs activated for a given crate, like
- /// `["unix", "feature=\"foo\"", "feature=\"bar\""]`.
- cfg: string[];
- /// Target tuple for this Crate.
- ///
- /// Used when running `rustc --print cfg`
- /// to get target-specific cfgs.
- target?: string;
- /// Environment variables, used for
- /// the `env!` macro
- env: { [key: string]: string; };
-
- /// Whether the crate is a proc-macro crate.
- is_proc_macro: boolean;
- /// For proc-macro crates, path to compiled
- /// proc-macro (.so file).
- proc_macro_dylib_path?: string;
-
- /// Repository, matching the URL that would be used
- /// in Cargo.toml.
- repository?: string;
-
- /// Build-specific data about this crate.
- build?: BuildInfo;
-}
-
-interface Dep {
- /// Index of a crate in the `crates` array.
- crate: number;
- /// Name as should appear in the (implicit)
- /// `extern crate name` declaration.
- name: string;
-}
-
-interface BuildInfo {
- /// The name associated with this crate.
- ///
- /// This is determined by the build system that produced
- /// the `rust-project.json` in question. For instance, if buck were used,
- /// the label might be something like `//ide/rust/rust-analyzer:rust-analyzer`.
- ///
- /// Do not attempt to parse the contents of this string; it is a build system-specific
- /// identifier similar to `Crate::display_name`.
- label: string;
- /// Path corresponding to the build system-specific file defining the crate.
- build_file: string;
- /// The kind of target.
- ///
- /// This information is used to determine what sort
- /// of runnable codelens to provide, if any.
- target_kind: 'bin' | 'lib' | 'test';
-}
-
-interface Runnable {
- /// The program invoked by the runnable.
- ///
- /// For example, this might be `cargo`, `buck`, or `bazel`.
- program: string;
- /// The arguments passed to `program`.
- args: string[];
- /// The current working directory of the runnable.
- cwd: string;
- /// Used to decide what code lens to offer.
- ///
- /// `testOne`: This runnable will be used when the user clicks the 'Run Test'
- /// CodeLens above a test.
- ///
- /// The args for testOne can contain two template strings:
- /// `{label}` and `{test_id}`. `{label}` will be replaced
- /// with the `Build::label` and `{test_id}` will be replaced
- /// with the test name.
- kind: 'testOne' | string;
-}
-----
-
-This format is provisional and subject to change.
-Specifically, the `roots` setup will be different eventually.
-
-There are three ways to feed `rust-project.json` to rust-analyzer:
-
-* Place `rust-project.json` file at the root of the project, and rust-analyzer will discover it.
-* Specify `"rust-analyzer.linkedProjects": [ "path/to/rust-project.json" ]` in the settings (and make sure that your LSP client sends settings as a part of initialize request).
-* Specify `"rust-analyzer.linkedProjects": [ { "roots": [...], "crates": [...] }]` inline.
-
-Relative paths are interpreted relative to `rust-project.json` file location or (for inline JSON) relative to `rootUri`.
-
-You can set the `RA_LOG` environment variable to `rust_analyzer=info` to inspect how rust-analyzer handles config and project loading.
-
-Note that calls to `cargo check` are disabled when using `rust-project.json` by default, so compilation errors and warnings will no longer be sent to your LSP client.
-To enable these compilation errors you will need to specify explicitly what command rust-analyzer should run to perform the checks using the `rust-analyzer.check.overrideCommand` configuration.
-As an example, the following configuration explicitly sets `cargo check` as the `check` command.
-
-[source,json]
-----
-{ "rust-analyzer.check.overrideCommand": ["cargo", "check", "--message-format=json"] }
-----
-
-`check.overrideCommand` requires the command specified to output json error messages for rust-analyzer to consume.
-The `--message-format=json` flag does this for `cargo check` so whichever command you use must also output errors in this format.
-See the <<Configuration>> section for more information.
-
-== Security
-
-At the moment, rust-analyzer assumes that all code is trusted.
-Here is a **non-exhaustive** list of ways to make rust-analyzer execute arbitrary code:
-
-* proc macros and build scripts are executed by default
-* `.cargo/config` can override `rustc` with an arbitrary executable
-* `rust-toolchain.toml` can override `rustc` with an arbitrary executable
-* VS Code plugin reads configuration from project directory, and that can be used to override paths to various executables, like `rustfmt` or `rust-analyzer` itself.
-* rust-analyzer's syntax trees library uses a lot of `unsafe` and hasn't been properly audited for memory safety.
-
-== Privacy
-
-The LSP server performs no network access in itself, but runs `cargo metadata` which will update or download the crate registry and the source code of the project dependencies.
-If enabled (the default), build scripts and procedural macros can do anything.
-
-The Code extension does not access the network.
-
-Any other editor plugins are not under the control of the `rust-analyzer` developers. For any privacy concerns, you should check with their respective developers.
-
-For `rust-analyzer` developers, `cargo xtask release` uses the GitHub API to put together the release notes.
-
-== Features
-
-include::./generated_features.adoc[]
-
-== Assists (Code Actions)
-
-Assists, or code actions, are small local refactorings, available in a particular context.
-They are usually triggered by a shortcut or by clicking a light bulb icon in the editor.
-Cursor position or selection is signified by `┃` character.
-
-include::./generated_assists.adoc[]
-
-== Diagnostics
-
-While most errors and warnings provided by rust-analyzer come from the `cargo check` integration, there's a growing number of diagnostics implemented using rust-analyzer's own analysis.
-Some of these diagnostics don't respect `\#[allow]` or `\#[deny]` attributes yet, but can be turned off using the `rust-analyzer.diagnostics.enable`, `rust-analyzer.diagnostics.experimental.enable` or `rust-analyzer.diagnostics.disabled` settings.
-
-=== Clippy
-
-To run `cargo clippy` instead of `cargo check`, you can set `"rust-analyzer.check.command": "clippy"`.
-
-include::./generated_diagnostic.adoc[]
-
-== Editor Features
-=== VS Code
-
-==== Color configurations
-
-It is possible to change the foreground/background color and font family/size of inlay hints.
-Just add this to your `settings.json`:
-
-[source,jsonc]
-----
-{
- "editor.inlayHints.fontFamily": "Courier New",
- "editor.inlayHints.fontSize": 11,
-
- "workbench.colorCustomizations": {
- // Name of the theme you are currently using
- "[Default Dark+]": {
- "editorInlayHint.foreground": "#868686f0",
- "editorInlayHint.background": "#3d3d3d48",
-
- // Overrides for specific kinds of inlay hints
- "editorInlayHint.typeForeground": "#fdb6fdf0",
- "editorInlayHint.parameterForeground": "#fdb6fdf0",
- }
- }
-}
-----
-
-==== Semantic style customizations
-
-You can customize the look of different semantic elements in the source code.
-For example, mutable bindings are underlined by default and you can override this behavior by adding the following section to your `settings.json`:
-
-[source,jsonc]
-----
-{
- "editor.semanticTokenColorCustomizations": {
- "rules": {
- "*.mutable": {
- "fontStyle": "", // underline is the default
- },
- }
- },
-}
-----
-
-Most themes doesn't support styling unsafe operations differently yet. You can fix this by adding overrides for the rules `operator.unsafe`, `function.unsafe`, and `method.unsafe`:
-
-[source,jsonc]
-----
-{
- "editor.semanticTokenColorCustomizations": {
- "rules": {
- "operator.unsafe": "#ff6600",
- "function.unsafe": "#ff6600",
- "method.unsafe": "#ff6600"
- }
- },
-}
-----
-
-In addition to the top-level rules you can specify overrides for specific themes. For example, if you wanted to use a darker text color on a specific light theme, you might write:
-
-[source,jsonc]
-----
-{
- "editor.semanticTokenColorCustomizations": {
- "rules": {
- "operator.unsafe": "#ff6600"
- },
- "[Ayu Light]": {
- "rules": {
- "operator.unsafe": "#572300"
- }
- }
- },
-}
-----
-
-Make sure you include the brackets around the theme name. For example, use `"[Ayu Light]"` to customize the theme Ayu Light.
-
-==== Special `when` clause context for keybindings.
-You may use `inRustProject` context to configure keybindings for rust projects only.
-For example:
-
-[source,json]
-----
-{
- "key": "ctrl+alt+d",
- "command": "rust-analyzer.openDocs",
- "when": "inRustProject"
-}
-----
-More about `when` clause contexts https://code.visualstudio.com/docs/getstarted/keybindings#_when-clause-contexts[here].
-
-==== Setting runnable environment variables
-You can use "rust-analyzer.runnables.extraEnv" setting to define runnable environment-specific substitution variables.
-The simplest way for all runnables in a bunch:
-```jsonc
-"rust-analyzer.runnables.extraEnv": {
- "RUN_SLOW_TESTS": "1"
-}
-```
-
-Or it is possible to specify vars more granularly:
-```jsonc
-"rust-analyzer.runnables.extraEnv": [
- {
- // "mask": null, // null mask means that this rule will be applied for all runnables
- "env": {
- "APP_ID": "1",
- "APP_DATA": "asdf"
- }
- },
- {
- "mask": "test_name",
- "env": {
- "APP_ID": "2", // overwrites only APP_ID
- }
- }
-]
-```
-
-You can use any valid regular expression as a mask.
-Also note that a full runnable name is something like *run bin_or_example_name*, *test some::mod::test_name* or *test-mod some::mod*, so it is possible to distinguish binaries, single tests, and test modules with this masks: `"^run"`, `"^test "` (the trailing space matters!), and `"^test-mod"` respectively.
-
-If needed, you can set different values for different platforms:
-```jsonc
-"rust-analyzer.runnables.extraEnv": [
- {
- "platform": "win32", // windows only
- "env": {
- "APP_DATA": "windows specific data"
- }
- },
- {
- "platform": ["linux"],
- "env": {
- "APP_DATA": "linux data",
- }
- },
- { // for all platforms
- "env": {
- "APP_COMMON_DATA": "xxx",
- }
- }
-]
-```
-
-==== Compiler feedback from external commands
-
-Instead of relying on the built-in `cargo check`, you can configure Code to run a command in the background and use the `$rustc-watch` problem matcher to generate inline error markers from its output.
-
-To do this you need to create a new https://code.visualstudio.com/docs/editor/tasks[VS Code Task] and set `"rust-analyzer.checkOnSave": false` in preferences.
-
-For example, if you want to run https://crates.io/crates/cargo-watch[`cargo watch`] instead, you might add the following to `.vscode/tasks.json`:
-
-```json
-{
- "label": "Watch",
- "group": "build",
- "type": "shell",
- "command": "cargo watch",
- "problemMatcher": "$rustc-watch",
- "isBackground": true
-}
-```
-
-==== Live Share
-
-VS Code Live Share has partial support for rust-analyzer.
-
-Live Share _requires_ the official Microsoft build of VS Code, OSS builds will not work correctly.
-
-The host's rust-analyzer instance will be shared with all guests joining the session.
-The guests do not have to have the rust-analyzer extension installed for this to work.
-
-If you are joining a Live Share session and _do_ have rust-analyzer installed locally, commands from the command palette will not work correctly since they will attempt to communicate with the local server.
diff --git a/editors/code/package-lock.json b/editors/code/package-lock.json
index 6027f81331..86a066454a 100644
--- a/editors/code/package-lock.json
+++ b/editors/code/package-lock.json
@@ -23,7 +23,7 @@
"@typescript-eslint/parser": "^6.0.0",
"@vscode/test-electron": "^2.3.8",
"@vscode/vsce": "^3.0.0",
- "esbuild": "^0.18.12",
+ "esbuild": "^0.25.0",
"eslint": "^8.44.0",
"eslint-config-prettier": "^8.8.0",
"ovsx": "^0.8.2",
@@ -256,356 +256,429 @@
"node": ">=16"
}
},
+ "node_modules/@esbuild/aix-ppc64": {
+ "version": "0.25.0",
+ "resolved": "https://registry.npmjs.org/@esbuild/aix-ppc64/-/aix-ppc64-0.25.0.tgz",
+ "integrity": "sha512-O7vun9Sf8DFjH2UtqK8Ku3LkquL9SZL8OLY1T5NZkA34+wG3OQF7cl4Ql8vdNzM6fzBbYfLaiRLIOZ+2FOCgBQ==",
+ "cpu": [
+ "ppc64"
+ ],
+ "dev": true,
+ "license": "MIT",
+ "optional": true,
+ "os": [
+ "aix"
+ ],
+ "engines": {
+ "node": ">=18"
+ }
+ },
"node_modules/@esbuild/android-arm": {
- "version": "0.18.12",
- "resolved": "https://registry.npmjs.org/@esbuild/android-arm/-/android-arm-0.18.12.tgz",
- "integrity": "sha512-LIxaNIQfkFZbTLb4+cX7dozHlAbAshhFE5PKdro0l+FnCpx1GDJaQ2WMcqm+ToXKMt8p8Uojk/MFRuGyz3V5Sw==",
+ "version": "0.25.0",
+ "resolved": "https://registry.npmjs.org/@esbuild/android-arm/-/android-arm-0.25.0.tgz",
+ "integrity": "sha512-PTyWCYYiU0+1eJKmw21lWtC+d08JDZPQ5g+kFyxP0V+es6VPPSUhM6zk8iImp2jbV6GwjX4pap0JFbUQN65X1g==",
"cpu": [
"arm"
],
"dev": true,
+ "license": "MIT",
"optional": true,
"os": [
"android"
],
"engines": {
- "node": ">=12"
+ "node": ">=18"
}
},
"node_modules/@esbuild/android-arm64": {
- "version": "0.18.12",
- "resolved": "https://registry.npmjs.org/@esbuild/android-arm64/-/android-arm64-0.18.12.tgz",
- "integrity": "sha512-BMAlczRqC/LUt2P97E4apTBbkvS9JTJnp2DKFbCwpZ8vBvXVbNdqmvzW/OsdtI/+mGr+apkkpqGM8WecLkPgrA==",
+ "version": "0.25.0",
+ "resolved": "https://registry.npmjs.org/@esbuild/android-arm64/-/android-arm64-0.25.0.tgz",
+ "integrity": "sha512-grvv8WncGjDSyUBjN9yHXNt+cq0snxXbDxy5pJtzMKGmmpPxeAmAhWxXI+01lU5rwZomDgD3kJwulEnhTRUd6g==",
"cpu": [
"arm64"
],
"dev": true,
+ "license": "MIT",
"optional": true,
"os": [
"android"
],
"engines": {
- "node": ">=12"
+ "node": ">=18"
}
},
"node_modules/@esbuild/android-x64": {
- "version": "0.18.12",
- "resolved": "https://registry.npmjs.org/@esbuild/android-x64/-/android-x64-0.18.12.tgz",
- "integrity": "sha512-zU5MyluNsykf5cOJ0LZZZjgAHbhPJ1cWfdH1ZXVMXxVMhEV0VZiZXQdwBBVvmvbF28EizeK7obG9fs+fpmS0eQ==",
+ "version": "0.25.0",
+ "resolved": "https://registry.npmjs.org/@esbuild/android-x64/-/android-x64-0.25.0.tgz",
+ "integrity": "sha512-m/ix7SfKG5buCnxasr52+LI78SQ+wgdENi9CqyCXwjVR2X4Jkz+BpC3le3AoBPYTC9NHklwngVXvbJ9/Akhrfg==",
"cpu": [
"x64"
],
"dev": true,
+ "license": "MIT",
"optional": true,
"os": [
"android"
],
"engines": {
- "node": ">=12"
+ "node": ">=18"
}
},
"node_modules/@esbuild/darwin-arm64": {
- "version": "0.18.12",
- "resolved": "https://registry.npmjs.org/@esbuild/darwin-arm64/-/darwin-arm64-0.18.12.tgz",
- "integrity": "sha512-zUZMep7YONnp6954QOOwEBwFX9svlKd3ov6PkxKd53LGTHsp/gy7vHaPGhhjBmEpqXEXShi6dddjIkmd+NgMsA==",
+ "version": "0.25.0",
+ "resolved": "https://registry.npmjs.org/@esbuild/darwin-arm64/-/darwin-arm64-0.25.0.tgz",
+ "integrity": "sha512-mVwdUb5SRkPayVadIOI78K7aAnPamoeFR2bT5nszFUZ9P8UpK4ratOdYbZZXYSqPKMHfS1wdHCJk1P1EZpRdvw==",
"cpu": [
"arm64"
],
"dev": true,
+ "license": "MIT",
"optional": true,
"os": [
"darwin"
],
"engines": {
- "node": ">=12"
+ "node": ">=18"
}
},
"node_modules/@esbuild/darwin-x64": {
- "version": "0.18.12",
- "resolved": "https://registry.npmjs.org/@esbuild/darwin-x64/-/darwin-x64-0.18.12.tgz",
- "integrity": "sha512-ohqLPc7i67yunArPj1+/FeeJ7AgwAjHqKZ512ADk3WsE3FHU9l+m5aa7NdxXr0HmN1bjDlUslBjWNbFlD9y12Q==",
+ "version": "0.25.0",
+ "resolved": "https://registry.npmjs.org/@esbuild/darwin-x64/-/darwin-x64-0.25.0.tgz",
+ "integrity": "sha512-DgDaYsPWFTS4S3nWpFcMn/33ZZwAAeAFKNHNa1QN0rI4pUjgqf0f7ONmXf6d22tqTY+H9FNdgeaAa+YIFUn2Rg==",
"cpu": [
"x64"
],
"dev": true,
+ "license": "MIT",
"optional": true,
"os": [
"darwin"
],
"engines": {
- "node": ">=12"
+ "node": ">=18"
}
},
"node_modules/@esbuild/freebsd-arm64": {
- "version": "0.18.12",
- "resolved": "https://registry.npmjs.org/@esbuild/freebsd-arm64/-/freebsd-arm64-0.18.12.tgz",
- "integrity": "sha512-GIIHtQXqgeOOqdG16a/A9N28GpkvjJnjYMhOnXVbn3EDJcoItdR58v/pGN31CHjyXDc8uCcRnFWmqaJt24AYJg==",
+ "version": "0.25.0",
+ "resolved": "https://registry.npmjs.org/@esbuild/freebsd-arm64/-/freebsd-arm64-0.25.0.tgz",
+ "integrity": "sha512-VN4ocxy6dxefN1MepBx/iD1dH5K8qNtNe227I0mnTRjry8tj5MRk4zprLEdG8WPyAPb93/e4pSgi1SoHdgOa4w==",
"cpu": [
"arm64"
],
"dev": true,
+ "license": "MIT",
"optional": true,
"os": [
"freebsd"
],
"engines": {
- "node": ">=12"
+ "node": ">=18"
}
},
"node_modules/@esbuild/freebsd-x64": {
- "version": "0.18.12",
- "resolved": "https://registry.npmjs.org/@esbuild/freebsd-x64/-/freebsd-x64-0.18.12.tgz",
- "integrity": "sha512-zK0b9a1/0wZY+6FdOS3BpZcPc1kcx2G5yxxfEJtEUzVxI6n/FrC2Phsxj/YblPuBchhBZ/1wwn7AyEBUyNSa6g==",
+ "version": "0.25.0",
+ "resolved": "https://registry.npmjs.org/@esbuild/freebsd-x64/-/freebsd-x64-0.25.0.tgz",
+ "integrity": "sha512-mrSgt7lCh07FY+hDD1TxiTyIHyttn6vnjesnPoVDNmDfOmggTLXRv8Id5fNZey1gl/V2dyVK1VXXqVsQIiAk+A==",
"cpu": [
"x64"
],
"dev": true,
+ "license": "MIT",
"optional": true,
"os": [
"freebsd"
],
"engines": {
- "node": ">=12"
+ "node": ">=18"
}
},
"node_modules/@esbuild/linux-arm": {
- "version": "0.18.12",
- "resolved": "https://registry.npmjs.org/@esbuild/linux-arm/-/linux-arm-0.18.12.tgz",
- "integrity": "sha512-y75OijvrBE/1XRrXq1jtrJfG26eHeMoqLJ2dwQNwviwTuTtHGCojsDO6BJNF8gU+3jTn1KzJEMETytwsFSvc+Q==",
+ "version": "0.25.0",
+ "resolved": "https://registry.npmjs.org/@esbuild/linux-arm/-/linux-arm-0.25.0.tgz",
+ "integrity": "sha512-vkB3IYj2IDo3g9xX7HqhPYxVkNQe8qTK55fraQyTzTX/fxaDtXiEnavv9geOsonh2Fd2RMB+i5cbhu2zMNWJwg==",
"cpu": [
"arm"
],
"dev": true,
+ "license": "MIT",
"optional": true,
"os": [
"linux"
],
"engines": {
- "node": ">=12"
+ "node": ">=18"
}
},
"node_modules/@esbuild/linux-arm64": {
- "version": "0.18.12",
- "resolved": "https://registry.npmjs.org/@esbuild/linux-arm64/-/linux-arm64-0.18.12.tgz",
- "integrity": "sha512-JKgG8Q/LL/9sw/iHHxQyVMoQYu3rU3+a5Z87DxC+wAu3engz+EmctIrV+FGOgI6gWG1z1+5nDDbXiRMGQZXqiw==",
+ "version": "0.25.0",
+ "resolved": "https://registry.npmjs.org/@esbuild/linux-arm64/-/linux-arm64-0.25.0.tgz",
+ "integrity": "sha512-9QAQjTWNDM/Vk2bgBl17yWuZxZNQIF0OUUuPZRKoDtqF2k4EtYbpyiG5/Dk7nqeK6kIJWPYldkOcBqjXjrUlmg==",
"cpu": [
"arm64"
],
"dev": true,
+ "license": "MIT",
"optional": true,
"os": [
"linux"
],
"engines": {
- "node": ">=12"
+ "node": ">=18"
}
},
"node_modules/@esbuild/linux-ia32": {
- "version": "0.18.12",
- "resolved": "https://registry.npmjs.org/@esbuild/linux-ia32/-/linux-ia32-0.18.12.tgz",
- "integrity": "sha512-yoRIAqc0B4lDIAAEFEIu9ttTRFV84iuAl0KNCN6MhKLxNPfzwCBvEMgwco2f71GxmpBcTtn7KdErueZaM2rEvw==",
+ "version": "0.25.0",
+ "resolved": "https://registry.npmjs.org/@esbuild/linux-ia32/-/linux-ia32-0.25.0.tgz",
+ "integrity": "sha512-43ET5bHbphBegyeqLb7I1eYn2P/JYGNmzzdidq/w0T8E2SsYL1U6un2NFROFRg1JZLTzdCoRomg8Rvf9M6W6Gg==",
"cpu": [
"ia32"
],
"dev": true,
+ "license": "MIT",
"optional": true,
"os": [
"linux"
],
"engines": {
- "node": ">=12"
+ "node": ">=18"
}
},
"node_modules/@esbuild/linux-loong64": {
- "version": "0.18.12",
- "resolved": "https://registry.npmjs.org/@esbuild/linux-loong64/-/linux-loong64-0.18.12.tgz",
- "integrity": "sha512-qYgt3dHPVvf/MgbIBpJ4Sup/yb9DAopZ3a2JgMpNKIHUpOdnJ2eHBo/aQdnd8dJ21X/+sS58wxHtA9lEazYtXQ==",
+ "version": "0.25.0",
+ "resolved": "https://registry.npmjs.org/@esbuild/linux-loong64/-/linux-loong64-0.25.0.tgz",
+ "integrity": "sha512-fC95c/xyNFueMhClxJmeRIj2yrSMdDfmqJnyOY4ZqsALkDrrKJfIg5NTMSzVBr5YW1jf+l7/cndBfP3MSDpoHw==",
"cpu": [
"loong64"
],
"dev": true,
+ "license": "MIT",
"optional": true,
"os": [
"linux"
],
"engines": {
- "node": ">=12"
+ "node": ">=18"
}
},
"node_modules/@esbuild/linux-mips64el": {
- "version": "0.18.12",
- "resolved": "https://registry.npmjs.org/@esbuild/linux-mips64el/-/linux-mips64el-0.18.12.tgz",
- "integrity": "sha512-wHphlMLK4ufNOONqukELfVIbnGQJrHJ/mxZMMrP2jYrPgCRZhOtf0kC4yAXBwnfmULimV1qt5UJJOw4Kh13Yfg==",
+ "version": "0.25.0",
+ "resolved": "https://registry.npmjs.org/@esbuild/linux-mips64el/-/linux-mips64el-0.25.0.tgz",
+ "integrity": "sha512-nkAMFju7KDW73T1DdH7glcyIptm95a7Le8irTQNO/qtkoyypZAnjchQgooFUDQhNAy4iu08N79W4T4pMBwhPwQ==",
"cpu": [
"mips64el"
],
"dev": true,
+ "license": "MIT",
"optional": true,
"os": [
"linux"
],
"engines": {
- "node": ">=12"
+ "node": ">=18"
}
},
"node_modules/@esbuild/linux-ppc64": {
- "version": "0.18.12",
- "resolved": "https://registry.npmjs.org/@esbuild/linux-ppc64/-/linux-ppc64-0.18.12.tgz",
- "integrity": "sha512-TeN//1Ft20ZZW41+zDSdOI/Os1bEq5dbvBvYkberB7PHABbRcsteeoNVZFlI0YLpGdlBqohEpjrn06kv8heCJg==",
+ "version": "0.25.0",
+ "resolved": "https://registry.npmjs.org/@esbuild/linux-ppc64/-/linux-ppc64-0.25.0.tgz",
+ "integrity": "sha512-NhyOejdhRGS8Iwv+KKR2zTq2PpysF9XqY+Zk77vQHqNbo/PwZCzB5/h7VGuREZm1fixhs4Q/qWRSi5zmAiO4Fw==",
"cpu": [
"ppc64"
],
"dev": true,
+ "license": "MIT",
"optional": true,
"os": [
"linux"
],
"engines": {
- "node": ">=12"
+ "node": ">=18"
}
},
"node_modules/@esbuild/linux-riscv64": {
- "version": "0.18.12",
- "resolved": "https://registry.npmjs.org/@esbuild/linux-riscv64/-/linux-riscv64-0.18.12.tgz",
- "integrity": "sha512-AgUebVS4DoAblBgiB2ACQ/8l4eGE5aWBb8ZXtkXHiET9mbj7GuWt3OnsIW/zX+XHJt2RYJZctbQ2S/mDjbp0UA==",
+ "version": "0.25.0",
+ "resolved": "https://registry.npmjs.org/@esbuild/linux-riscv64/-/linux-riscv64-0.25.0.tgz",
+ "integrity": "sha512-5S/rbP5OY+GHLC5qXp1y/Mx//e92L1YDqkiBbO9TQOvuFXM+iDqUNG5XopAnXoRH3FjIUDkeGcY1cgNvnXp/kA==",
"cpu": [
"riscv64"
],
"dev": true,
+ "license": "MIT",
"optional": true,
"os": [
"linux"
],
"engines": {
- "node": ">=12"
+ "node": ">=18"
}
},
"node_modules/@esbuild/linux-s390x": {
- "version": "0.18.12",
- "resolved": "https://registry.npmjs.org/@esbuild/linux-s390x/-/linux-s390x-0.18.12.tgz",
- "integrity": "sha512-dJ3Rb3Ei2u/ysSXd6pzleGtfDdc2MuzKt8qc6ls8vreP1G3B7HInX3i7gXS4BGeVd24pp0yqyS7bJ5NHaI9ing==",
+ "version": "0.25.0",
+ "resolved": "https://registry.npmjs.org/@esbuild/linux-s390x/-/linux-s390x-0.25.0.tgz",
+ "integrity": "sha512-XM2BFsEBz0Fw37V0zU4CXfcfuACMrppsMFKdYY2WuTS3yi8O1nFOhil/xhKTmE1nPmVyvQJjJivgDT+xh8pXJA==",
"cpu": [
"s390x"
],
"dev": true,
+ "license": "MIT",
"optional": true,
"os": [
"linux"
],
"engines": {
- "node": ">=12"
+ "node": ">=18"
}
},
"node_modules/@esbuild/linux-x64": {
- "version": "0.18.12",
- "resolved": "https://registry.npmjs.org/@esbuild/linux-x64/-/linux-x64-0.18.12.tgz",
- "integrity": "sha512-OrNJMGQbPaVyHHcDF8ybNSwu7TDOfX8NGpXCbetwOSP6txOJiWlgQnRymfC9ocR1S0Y5PW0Wb1mV6pUddqmvmQ==",
+ "version": "0.25.0",
+ "resolved": "https://registry.npmjs.org/@esbuild/linux-x64/-/linux-x64-0.25.0.tgz",
+ "integrity": "sha512-9yl91rHw/cpwMCNytUDxwj2XjFpxML0y9HAOH9pNVQDpQrBxHy01Dx+vaMu0N1CKa/RzBD2hB4u//nfc+Sd3Cw==",
"cpu": [
"x64"
],
"dev": true,
+ "license": "MIT",
"optional": true,
"os": [
"linux"
],
"engines": {
- "node": ">=12"
+ "node": ">=18"
+ }
+ },
+ "node_modules/@esbuild/netbsd-arm64": {
+ "version": "0.25.0",
+ "resolved": "https://registry.npmjs.org/@esbuild/netbsd-arm64/-/netbsd-arm64-0.25.0.tgz",
+ "integrity": "sha512-RuG4PSMPFfrkH6UwCAqBzauBWTygTvb1nxWasEJooGSJ/NwRw7b2HOwyRTQIU97Hq37l3npXoZGYMy3b3xYvPw==",
+ "cpu": [
+ "arm64"
+ ],
+ "dev": true,
+ "license": "MIT",
+ "optional": true,
+ "os": [
+ "netbsd"
+ ],
+ "engines": {
+ "node": ">=18"
}
},
"node_modules/@esbuild/netbsd-x64": {
- "version": "0.18.12",
- "resolved": "https://registry.npmjs.org/@esbuild/netbsd-x64/-/netbsd-x64-0.18.12.tgz",
- "integrity": "sha512-55FzVCAiwE9FK8wWeCRuvjazNRJ1QqLCYGZVB6E8RuQuTeStSwotpSW4xoRGwp3a1wUsaVCdYcj5LGCASVJmMg==",
+ "version": "0.25.0",
+ "resolved": "https://registry.npmjs.org/@esbuild/netbsd-x64/-/netbsd-x64-0.25.0.tgz",
+ "integrity": "sha512-jl+qisSB5jk01N5f7sPCsBENCOlPiS/xptD5yxOx2oqQfyourJwIKLRA2yqWdifj3owQZCL2sn6o08dBzZGQzA==",
"cpu": [
"x64"
],
"dev": true,
+ "license": "MIT",
"optional": true,
"os": [
"netbsd"
],
"engines": {
- "node": ">=12"
+ "node": ">=18"
+ }
+ },
+ "node_modules/@esbuild/openbsd-arm64": {
+ "version": "0.25.0",
+ "resolved": "https://registry.npmjs.org/@esbuild/openbsd-arm64/-/openbsd-arm64-0.25.0.tgz",
+ "integrity": "sha512-21sUNbq2r84YE+SJDfaQRvdgznTD8Xc0oc3p3iW/a1EVWeNj/SdUCbm5U0itZPQYRuRTW20fPMWMpcrciH2EJw==",
+ "cpu": [
+ "arm64"
+ ],
+ "dev": true,
+ "license": "MIT",
+ "optional": true,
+ "os": [
+ "openbsd"
+ ],
+ "engines": {
+ "node": ">=18"
}
},
"node_modules/@esbuild/openbsd-x64": {
- "version": "0.18.12",
- "resolved": "https://registry.npmjs.org/@esbuild/openbsd-x64/-/openbsd-x64-0.18.12.tgz",
- "integrity": "sha512-qnluf8rfb6Y5Lw2tirfK2quZOBbVqmwxut7GPCIJsM8lc4AEUj9L8y0YPdLaPK0TECt4IdyBdBD/KRFKorlK3g==",
+ "version": "0.25.0",
+ "resolved": "https://registry.npmjs.org/@esbuild/openbsd-x64/-/openbsd-x64-0.25.0.tgz",
+ "integrity": "sha512-2gwwriSMPcCFRlPlKx3zLQhfN/2WjJ2NSlg5TKLQOJdV0mSxIcYNTMhk3H3ulL/cak+Xj0lY1Ym9ysDV1igceg==",
"cpu": [
"x64"
],
"dev": true,
+ "license": "MIT",
"optional": true,
"os": [
"openbsd"
],
"engines": {
- "node": ">=12"
+ "node": ">=18"
}
},
"node_modules/@esbuild/sunos-x64": {
- "version": "0.18.12",
- "resolved": "https://registry.npmjs.org/@esbuild/sunos-x64/-/sunos-x64-0.18.12.tgz",
- "integrity": "sha512-+RkKpVQR7bICjTOPUpkTBTaJ4TFqQBX5Ywyd/HSdDkQGn65VPkTsR/pL4AMvuMWy+wnXgIl4EY6q4mVpJal8Kg==",
+ "version": "0.25.0",
+ "resolved": "https://registry.npmjs.org/@esbuild/sunos-x64/-/sunos-x64-0.25.0.tgz",
+ "integrity": "sha512-bxI7ThgLzPrPz484/S9jLlvUAHYMzy6I0XiU1ZMeAEOBcS0VePBFxh1JjTQt3Xiat5b6Oh4x7UC7IwKQKIJRIg==",
"cpu": [
"x64"
],
"dev": true,
+ "license": "MIT",
"optional": true,
"os": [
"sunos"
],
"engines": {
- "node": ">=12"
+ "node": ">=18"
}
},
"node_modules/@esbuild/win32-arm64": {
- "version": "0.18.12",
- "resolved": "https://registry.npmjs.org/@esbuild/win32-arm64/-/win32-arm64-0.18.12.tgz",
- "integrity": "sha512-GNHuciv0mFM7ouzsU0+AwY+7eV4Mgo5WnbhfDCQGtpvOtD1vbOiRjPYG6dhmMoFyBjj+pNqQu2X+7DKn0KQ/Gw==",
+ "version": "0.25.0",
+ "resolved": "https://registry.npmjs.org/@esbuild/win32-arm64/-/win32-arm64-0.25.0.tgz",
+ "integrity": "sha512-ZUAc2YK6JW89xTbXvftxdnYy3m4iHIkDtK3CLce8wg8M2L+YZhIvO1DKpxrd0Yr59AeNNkTiic9YLf6FTtXWMw==",
"cpu": [
"arm64"
],
"dev": true,
+ "license": "MIT",
"optional": true,
"os": [
"win32"
],
"engines": {
- "node": ">=12"
+ "node": ">=18"
}
},
"node_modules/@esbuild/win32-ia32": {
- "version": "0.18.12",
- "resolved": "https://registry.npmjs.org/@esbuild/win32-ia32/-/win32-ia32-0.18.12.tgz",
- "integrity": "sha512-kR8cezhYipbbypGkaqCTWIeu4zID17gamC8YTPXYtcN3E5BhhtTnwKBn9I0PJur/T6UVwIEGYzkffNL0lFvxEw==",
+ "version": "0.25.0",
+ "resolved": "https://registry.npmjs.org/@esbuild/win32-ia32/-/win32-ia32-0.25.0.tgz",
+ "integrity": "sha512-eSNxISBu8XweVEWG31/JzjkIGbGIJN/TrRoiSVZwZ6pkC6VX4Im/WV2cz559/TXLcYbcrDN8JtKgd9DJVIo8GA==",
"cpu": [
"ia32"
],
"dev": true,
+ "license": "MIT",
"optional": true,
"os": [
"win32"
],
"engines": {
- "node": ">=12"
+ "node": ">=18"
}
},
"node_modules/@esbuild/win32-x64": {
- "version": "0.18.12",
- "resolved": "https://registry.npmjs.org/@esbuild/win32-x64/-/win32-x64-0.18.12.tgz",
- "integrity": "sha512-O0UYQVkvfM/jO8a4OwoV0mAKSJw+mjWTAd1MJd/1FCX6uiMdLmMRPK/w6e9OQ0ob2WGxzIm9va/KG0Ja4zIOgg==",
+ "version": "0.25.0",
+ "resolved": "https://registry.npmjs.org/@esbuild/win32-x64/-/win32-x64-0.25.0.tgz",
+ "integrity": "sha512-ZENoHJBxA20C2zFzh6AI4fT6RraMzjYw4xKWemRTRmRVtN9c5DcH9r/f2ihEkMjOW5eGgrwCslG/+Y/3bL+DHQ==",
"cpu": [
"x64"
],
"dev": true,
+ "license": "MIT",
"optional": true,
"os": [
"win32"
],
"engines": {
- "node": ">=12"
+ "node": ">=18"
}
},
"node_modules/@eslint-community/eslint-utils": {
@@ -2521,40 +2594,44 @@
}
},
"node_modules/esbuild": {
- "version": "0.18.12",
- "resolved": "https://registry.npmjs.org/esbuild/-/esbuild-0.18.12.tgz",
- "integrity": "sha512-XuOVLDdtsDslXStStduT41op21Ytmf4/BDS46aa3xPJ7X5h2eMWBF1oAe3QjUH3bDksocNXgzGUZ7XHIBya6Tg==",
+ "version": "0.25.0",
+ "resolved": "https://registry.npmjs.org/esbuild/-/esbuild-0.25.0.tgz",
+ "integrity": "sha512-BXq5mqc8ltbaN34cDqWuYKyNhX8D/Z0J1xdtdQ8UcIIIyJyz+ZMKUt58tF3SrZ85jcfN/PZYhjR5uDQAYNVbuw==",
"dev": true,
"hasInstallScript": true,
+ "license": "MIT",
"bin": {
"esbuild": "bin/esbuild"
},
"engines": {
- "node": ">=12"
+ "node": ">=18"
},
"optionalDependencies": {
- "@esbuild/android-arm": "0.18.12",
- "@esbuild/android-arm64": "0.18.12",
- "@esbuild/android-x64": "0.18.12",
- "@esbuild/darwin-arm64": "0.18.12",
- "@esbuild/darwin-x64": "0.18.12",
- "@esbuild/freebsd-arm64": "0.18.12",
- "@esbuild/freebsd-x64": "0.18.12",
- "@esbuild/linux-arm": "0.18.12",
- "@esbuild/linux-arm64": "0.18.12",
- "@esbuild/linux-ia32": "0.18.12",
- "@esbuild/linux-loong64": "0.18.12",
- "@esbuild/linux-mips64el": "0.18.12",
- "@esbuild/linux-ppc64": "0.18.12",
- "@esbuild/linux-riscv64": "0.18.12",
- "@esbuild/linux-s390x": "0.18.12",
- "@esbuild/linux-x64": "0.18.12",
- "@esbuild/netbsd-x64": "0.18.12",
- "@esbuild/openbsd-x64": "0.18.12",
- "@esbuild/sunos-x64": "0.18.12",
- "@esbuild/win32-arm64": "0.18.12",
- "@esbuild/win32-ia32": "0.18.12",
- "@esbuild/win32-x64": "0.18.12"
+ "@esbuild/aix-ppc64": "0.25.0",
+ "@esbuild/android-arm": "0.25.0",
+ "@esbuild/android-arm64": "0.25.0",
+ "@esbuild/android-x64": "0.25.0",
+ "@esbuild/darwin-arm64": "0.25.0",
+ "@esbuild/darwin-x64": "0.25.0",
+ "@esbuild/freebsd-arm64": "0.25.0",
+ "@esbuild/freebsd-x64": "0.25.0",
+ "@esbuild/linux-arm": "0.25.0",
+ "@esbuild/linux-arm64": "0.25.0",
+ "@esbuild/linux-ia32": "0.25.0",
+ "@esbuild/linux-loong64": "0.25.0",
+ "@esbuild/linux-mips64el": "0.25.0",
+ "@esbuild/linux-ppc64": "0.25.0",
+ "@esbuild/linux-riscv64": "0.25.0",
+ "@esbuild/linux-s390x": "0.25.0",
+ "@esbuild/linux-x64": "0.25.0",
+ "@esbuild/netbsd-arm64": "0.25.0",
+ "@esbuild/netbsd-x64": "0.25.0",
+ "@esbuild/openbsd-arm64": "0.25.0",
+ "@esbuild/openbsd-x64": "0.25.0",
+ "@esbuild/sunos-x64": "0.25.0",
+ "@esbuild/win32-arm64": "0.25.0",
+ "@esbuild/win32-ia32": "0.25.0",
+ "@esbuild/win32-x64": "0.25.0"
}
},
"node_modules/escalade": {
diff --git a/editors/code/package.json b/editors/code/package.json
index f148041ac3..3f09033051 100644
--- a/editors/code/package.json
+++ b/editors/code/package.json
@@ -59,7 +59,7 @@
"@typescript-eslint/parser": "^6.0.0",
"@vscode/test-electron": "^2.3.8",
"@vscode/vsce": "^3.0.0",
- "esbuild": "^0.18.12",
+ "esbuild": "^0.25.0",
"eslint": "^8.44.0",
"eslint-config-prettier": "^8.8.0",
"ovsx": "^0.8.2",
@@ -501,6 +501,10 @@
},
{
"pattern": "**/Cargo.lock"
+ },
+ {
+ "scheme": "output",
+ "pattern": "extension-output-rust-lang.rust-analyzer*"
}
]
}
@@ -1469,8 +1473,8 @@
{
"title": "files",
"properties": {
- "rust-analyzer.files.excludeDirs": {
- "markdownDescription": "These directories will be ignored by rust-analyzer. They are\nrelative to the workspace root, and globs are not supported. You may\nalso need to add the folders to Code's `files.watcherExclude`.",
+ "rust-analyzer.files.exclude": {
+ "markdownDescription": "These paths (file/directories) will be ignored by rust-analyzer. They are\nrelative to the workspace root, and globs are not supported. You may\nalso need to add the folders to Code's `files.watcherExclude`.",
"default": [],
"type": "array",
"items": {
@@ -2252,6 +2256,16 @@
{
"title": "inlayHints",
"properties": {
+ "rust-analyzer.inlayHints.typeHints.hideClosureParameter": {
+ "markdownDescription": "Whether to hide inlay parameter type hints for closures.",
+ "default": false,
+ "type": "boolean"
+ }
+ }
+ },
+ {
+ "title": "inlayHints",
+ "properties": {
"rust-analyzer.inlayHints.typeHints.hideNamedConstructor": {
"markdownDescription": "Whether to hide inlay type hints for constructors.",
"default": false,
diff --git a/editors/code/src/commands.ts b/editors/code/src/commands.ts
index b3aa04af7e..eee623ecae 100644
--- a/editors/code/src/commands.ts
+++ b/editors/code/src/commands.ts
@@ -361,10 +361,7 @@ export function syntaxTreeReveal(): Cmd {
const activeEditor = vscode.window.activeTextEditor;
if (activeEditor !== undefined) {
- const start = activeEditor.document.positionAt(element.start);
- const end = activeEditor.document.positionAt(element.end);
-
- const newSelection = new vscode.Selection(start, end);
+ const newSelection = new vscode.Selection(element.range.start, element.range.end);
activeEditor.selection = newSelection;
activeEditor.revealRange(newSelection);
@@ -378,15 +375,12 @@ function elementToString(
depth: number = 0,
): string {
let result = " ".repeat(depth);
- const start = element.istart ?? element.start;
- const end = element.iend ?? element.end;
+ const offsets = element.inner?.offsets ?? element.offsets;
- result += `${element.kind}@${start}..${end}`;
+ result += `${element.kind}@${offsets.start}..${offsets.end}`;
if (element.type === "Token") {
- const startPosition = activeDocument.positionAt(element.start);
- const endPosition = activeDocument.positionAt(element.end);
- const text = activeDocument.getText(new vscode.Range(startPosition, endPosition));
+ const text = activeDocument.getText(element.range).replaceAll("\r\n", "\n");
// JSON.stringify quotes and escapes the string for us.
result += ` ${JSON.stringify(text)}\n`;
} else {
diff --git a/editors/code/src/ctx.ts b/editors/code/src/ctx.ts
index 96dc4f19b8..4248305d5c 100644
--- a/editors/code/src/ctx.ts
+++ b/editors/code/src/ctx.ts
@@ -384,9 +384,7 @@ export class Ctx implements RustAnalyzerExtensionApi {
return;
}
- const start = e.textEditor.document.offsetAt(selection.start);
- const end = e.textEditor.document.offsetAt(selection.end);
- const result = this.syntaxTreeProvider?.getElementByRange(start, end);
+ const result = this.syntaxTreeProvider?.getElementByRange(selection);
if (result !== undefined) {
await this.syntaxTreeView?.reveal(result);
}
diff --git a/editors/code/src/syntax_tree_provider.ts b/editors/code/src/syntax_tree_provider.ts
index c7e8007e83..3f7e30f13a 100644
--- a/editors/code/src/syntax_tree_provider.ts
+++ b/editors/code/src/syntax_tree_provider.ts
@@ -37,11 +37,7 @@ export class SyntaxTreeProvider implements vscode.TreeDataProvider<SyntaxElement
const editor = vscode.window.activeTextEditor;
if (editor !== undefined) {
- const start = editor.document.positionAt(element.start);
- const end = editor.document.positionAt(element.end);
- const range = new vscode.Range(start, end);
-
- const text = editor.document.getText(range);
+ const text = editor.document.getText(element.range);
item.tooltip = new vscode.MarkdownString().appendCodeblock(text, "rust");
}
@@ -74,14 +70,61 @@ export class SyntaxTreeProvider implements vscode.TreeDataProvider<SyntaxElement
if (editor && isRustEditor(editor)) {
const params = { textDocument: { uri: editor.document.uri.toString() }, range: null };
const fileText = await this.ctx.client.sendRequest(ra.viewSyntaxTree, params);
- this.root = JSON.parse(fileText, (_key, value: SyntaxElement) => {
+ this.root = JSON.parse(fileText, (_key, value: RawElement): SyntaxElement => {
+ if (value.type !== "Node" && value.type !== "Token") {
+ // This is something other than a RawElement.
+ return value;
+ }
+ const [startOffset, startLine, startCol] = value.start;
+ const [endOffset, endLine, endCol] = value.end;
+ const range = new vscode.Range(startLine, startCol, endLine, endCol);
+ const offsets = {
+ start: startOffset,
+ end: endOffset,
+ };
+
+ let inner;
+ if (value.istart && value.iend) {
+ const [istartOffset, istartLine, istartCol] = value.istart;
+ const [iendOffset, iendLine, iendCol] = value.iend;
+
+ inner = {
+ offsets: {
+ start: istartOffset,
+ end: iendOffset,
+ },
+ range: new vscode.Range(istartLine, istartCol, iendLine, iendCol),
+ };
+ }
+
if (value.type === "Node") {
- for (const child of value.children) {
- child.parent = value;
+ const result = {
+ type: value.type,
+ kind: value.kind,
+ offsets,
+ range,
+ inner,
+ children: value.children,
+ parent: undefined,
+ document: editor.document,
+ };
+
+ for (const child of result.children) {
+ child.parent = result;
}
- }
- return value;
+ return result;
+ } else {
+ return {
+ type: value.type,
+ kind: value.kind,
+ offsets,
+ range,
+ inner,
+ parent: undefined,
+ document: editor.document,
+ };
+ }
});
} else {
this.root = undefined;
@@ -90,14 +133,14 @@ export class SyntaxTreeProvider implements vscode.TreeDataProvider<SyntaxElement
this._onDidChangeTreeData.fire();
}
- getElementByRange(start: number, end: number): SyntaxElement | undefined {
+ getElementByRange(target: vscode.Range): SyntaxElement | undefined {
if (this.root === undefined) {
return undefined;
}
let result: SyntaxElement = this.root;
- if (this.root.start === start && this.root.end === end) {
+ if (this.root.range.isEqual(target)) {
return result;
}
@@ -105,9 +148,9 @@ export class SyntaxTreeProvider implements vscode.TreeDataProvider<SyntaxElement
outer: while (true) {
for (const child of children) {
- if (child.start <= start && child.end >= end) {
+ if (child.range.contains(target)) {
result = child;
- if (start === end && start === child.end) {
+ if (target.isEmpty && target.start === child.range.end) {
// When the cursor is on the very end of a token,
// we assume the user wants the next token instead.
continue;
@@ -136,31 +179,72 @@ export class SyntaxTreeProvider implements vscode.TreeDataProvider<SyntaxElement
export type SyntaxNode = {
type: "Node";
kind: string;
- start: number;
- end: number;
- istart?: number;
- iend?: number;
+ range: vscode.Range;
+ offsets: {
+ start: number;
+ end: number;
+ };
+ /** This element's position within a Rust string literal, if it's inside of one. */
+ inner?: {
+ range: vscode.Range;
+ offsets: {
+ start: number;
+ end: number;
+ };
+ };
children: SyntaxElement[];
parent?: SyntaxElement;
+ document: vscode.TextDocument;
};
type SyntaxToken = {
type: "Token";
kind: string;
- start: number;
- end: number;
- istart?: number;
- iend?: number;
+ range: vscode.Range;
+ offsets: {
+ start: number;
+ end: number;
+ };
+ /** This element's position within a Rust string literal, if it's inside of one. */
+ inner?: {
+ range: vscode.Range;
+ offsets: {
+ start: number;
+ end: number;
+ };
+ };
parent?: SyntaxElement;
+ document: vscode.TextDocument;
};
export type SyntaxElement = SyntaxNode | SyntaxToken;
+type RawNode = {
+ type: "Node";
+ kind: string;
+ start: [number, number, number];
+ end: [number, number, number];
+ istart?: [number, number, number];
+ iend?: [number, number, number];
+ children: SyntaxElement[];
+};
+
+type RawToken = {
+ type: "Token";
+ kind: string;
+ start: [number, number, number];
+ end: [number, number, number];
+ istart?: [number, number, number];
+ iend?: [number, number, number];
+};
+
+type RawElement = RawNode | RawToken;
+
export class SyntaxTreeItem extends vscode.TreeItem {
constructor(private readonly element: SyntaxElement) {
super(element.kind);
- const icon = getIcon(element.kind);
- if (element.type === "Node") {
+ const icon = getIcon(this.element.kind);
+ if (this.element.type === "Node") {
this.contextValue = "syntaxNode";
this.iconPath = icon ?? new vscode.ThemeIcon("list-tree");
this.collapsibleState = vscode.TreeItemCollapsibleState.Expanded;
@@ -170,11 +254,9 @@ export class SyntaxTreeItem extends vscode.TreeItem {
this.collapsibleState = vscode.TreeItemCollapsibleState.None;
}
- if (element.istart !== undefined && element.iend !== undefined) {
- this.description = `${this.element.istart}..${this.element.iend}`;
- } else {
- this.description = `${this.element.start}..${this.element.end}`;
- }
+ const offsets = this.element.inner?.offsets ?? this.element.offsets;
+
+ this.description = `${offsets.start}..${offsets.end}`;
}
}
diff --git a/editors/code/walkthrough-setup-tips.md b/editors/code/walkthrough-setup-tips.md
index fda4ac8002..aabe0dd662 100644
--- a/editors/code/walkthrough-setup-tips.md
+++ b/editors/code/walkthrough-setup-tips.md
@@ -5,6 +5,7 @@ Add the following to settings.json to mark Rust library sources as read-only:
```json
"files.readonlyInclude": {
"**/.cargo/registry/src/**/*.rs": true,
+ "**/.cargo/git/checkouts/**/*.rs": true,
"**/lib/rustlib/src/rust/library/**/*.rs": true,
},
```
diff --git a/lib/line-index/src/lib.rs b/lib/line-index/src/lib.rs
index 6f0455ee98..bc87ada3eb 100644
--- a/lib/line-index/src/lib.rs
+++ b/lib/line-index/src/lib.rs
@@ -235,7 +235,7 @@ fn analyze_source_file_dispatch(
}
}
-#[cfg(target_arch = "aarch64")]
+#[cfg(all(target_arch = "aarch64", target_endian = "little"))]
fn analyze_source_file_dispatch(
src: &str,
lines: &mut Vec<TextSize>,
@@ -347,7 +347,7 @@ unsafe fn analyze_source_file_sse2(
}
#[target_feature(enable = "neon")]
-#[cfg(target_arch = "aarch64")]
+#[cfg(all(target_arch = "aarch64", target_endian = "little"))]
#[inline]
// See https://community.arm.com/arm-community-blogs/b/infrastructure-solutions-blog/posts/porting-x86-vector-bitmask-optimizations-to-arm-neon
//
@@ -362,7 +362,7 @@ unsafe fn move_mask(v: std::arch::aarch64::uint8x16_t) -> u64 {
}
#[target_feature(enable = "neon")]
-#[cfg(target_arch = "aarch64")]
+#[cfg(all(target_arch = "aarch64", target_endian = "little"))]
unsafe fn analyze_source_file_neon(
src: &str,
lines: &mut Vec<TextSize>,
@@ -441,7 +441,11 @@ unsafe fn analyze_source_file_neon(
}
}
-#[cfg(not(any(target_arch = "x86", target_arch = "x86_64", target_arch = "aarch64")))]
+#[cfg(not(any(
+ target_arch = "x86",
+ target_arch = "x86_64",
+ all(target_arch = "aarch64", target_endian = "little")
+)))]
// The target (or compiler version) does not support SSE2 ...
fn analyze_source_file_dispatch(
src: &str,
diff --git a/rust-version b/rust-version
index 2d9a927c63..6cd39fabee 100644
--- a/rust-version
+++ b/rust-version
@@ -1 +1 @@
-9a1d156f38c51441ee51e5a068f1d0caf4bb0f27
+e0be1a02626abef2878cb7f4aaef7ae409477112
diff --git a/xtask/Cargo.toml b/xtask/Cargo.toml
index b505ee835b..ebd8903ad8 100644
--- a/xtask/Cargo.toml
+++ b/xtask/Cargo.toml
@@ -16,8 +16,7 @@ xflags = "0.3.0"
time = { version = "0.3", default-features = false }
zip = { version = "0.6", default-features = false, features = ["deflate", "time"] }
stdx.workspace = true
-# https://github.com/dtolnay/proc-macro2/issues/475
-proc-macro2 = "=1.0.86"
+proc-macro2 = "1.0.93"
quote = "1.0.20"
ungrammar = "1.16.1"
either.workspace = true
diff --git a/xtask/src/codegen.rs b/xtask/src/codegen.rs
index bc04b9474f..8165a2a12b 100644
--- a/xtask/src/codegen.rs
+++ b/xtask/src/codegen.rs
@@ -24,6 +24,7 @@ impl flags::Codegen {
diagnostics_docs::generate(self.check);
assists_doc_tests::generate(self.check);
parser_inline_tests::generate(self.check);
+ feature_docs::generate(self.check)
// diagnostics_docs::generate(self.check) doesn't generate any tests
// lints::generate(self.check) Updating clones the rust repo, so don't run it unless
// explicitly asked for
@@ -118,10 +119,10 @@ impl fmt::Display for Location {
let name = self.file.file_name().unwrap();
write!(
f,
- "https://github.com/rust-lang/rust-analyzer/blob/master/{}#L{}[{}]",
+ " [{}](https://github.com/rust-lang/rust-analyzer/blob/master/{}#L{}) ",
+ name.to_str().unwrap(),
path,
- self.line,
- name.to_str().unwrap()
+ self.line
)
}
}
@@ -162,7 +163,7 @@ fn reformat(text: String) -> String {
}
fn add_preamble(cg: CodegenType, mut text: String) -> String {
- let preamble = format!("//! Generated by `cargo codegen {cg}`, do not edit by hand.\n\n");
+ let preamble = format!("//! Generated by `cargo xtask codegen {cg}`, do not edit by hand.\n\n");
text.insert_str(0, &preamble);
text
}
@@ -186,7 +187,7 @@ fn ensure_file_contents(cg: CodegenType, file: &Path, contents: &str, check: boo
file.display(),
if std::env::var("CI").is_ok() {
format!(
- "\n NOTE: run `cargo codegen {cg}` locally and commit the updated files\n"
+ "\n NOTE: run `cargo xtask codegen {cg}` locally and commit the updated files\n"
)
} else {
"".to_owned()
diff --git a/xtask/src/codegen/assists_doc_tests.rs b/xtask/src/codegen/assists_doc_tests.rs
index d06c9d65df..0bb18c73cf 100644
--- a/xtask/src/codegen/assists_doc_tests.rs
+++ b/xtask/src/codegen/assists_doc_tests.rs
@@ -1,4 +1,4 @@
-//! Generates `assists.md` documentation.
+//! Generates `assists_generated.md` documentation.
use std::{fmt, fs, path::Path};
@@ -62,7 +62,7 @@ r#####"
crate::flags::CodegenType::AssistsDocTests,
assists.into_iter().map(|it| it.to_string()).collect::<Vec<_>>().join("\n\n"),
);
- let dst = project_root().join("docs/user/generated_assists.adoc");
+ let dst = project_root().join("docs/book/src/assists_generated.md");
fs::write(dst, contents).unwrap();
}
}
@@ -146,7 +146,7 @@ impl fmt::Display for Assist {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
let _ = writeln!(
f,
- "[discrete]\n=== `{}`
+ "### `{}`
**Source:** {}",
self.id, self.location,
);
@@ -159,11 +159,11 @@ impl fmt::Display for Assist {
"
{}
-.Before
+#### Before
```rust
{}```
-.After
+#### After
```rust
{}```",
section.doc,
diff --git a/xtask/src/codegen/diagnostics_docs.rs b/xtask/src/codegen/diagnostics_docs.rs
index 4cb8f3f259..cf8f97be00 100644
--- a/xtask/src/codegen/diagnostics_docs.rs
+++ b/xtask/src/codegen/diagnostics_docs.rs
@@ -1,4 +1,4 @@
-//! Generates `assists.md` documentation.
+//! Generates `diagnostics_generated.md` documentation.
use std::{fmt, fs, io, path::PathBuf};
@@ -14,7 +14,7 @@ pub(crate) fn generate(check: bool) {
let contents =
diagnostics.into_iter().map(|it| it.to_string()).collect::<Vec<_>>().join("\n\n");
let contents = add_preamble(crate::flags::CodegenType::DiagnosticsDocs, contents);
- let dst = project_root().join("docs/user/generated_diagnostic.adoc");
+ let dst = project_root().join("docs/book/src/diagnostics_generated.md");
fs::write(dst, contents).unwrap();
}
}
@@ -73,6 +73,6 @@ fn is_valid_diagnostic_name(diagnostic: &str) -> Result<(), String> {
impl fmt::Display for Diagnostic {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
- writeln!(f, "=== {}\n**Source:** {}\n{}", self.id, self.location, self.doc)
+ writeln!(f, "#### {}\n\nSource: {}\n\n{}\n\n", self.id, self.location, self.doc)
}
}
diff --git a/xtask/src/codegen/feature_docs.rs b/xtask/src/codegen/feature_docs.rs
index c6451d888b..51ff13aba8 100644
--- a/xtask/src/codegen/feature_docs.rs
+++ b/xtask/src/codegen/feature_docs.rs
@@ -1,9 +1,9 @@
-//! Generates `assists.md` documentation.
+//! Generates `features_generated.md` documentation.
use std::{fmt, fs, io, path::PathBuf};
use crate::{
- codegen::{CommentBlock, Location},
+ codegen::{add_preamble, CommentBlock, Location},
project_root,
util::list_rust_files,
};
@@ -11,14 +11,8 @@ use crate::{
pub(crate) fn generate(_check: bool) {
let features = Feature::collect().unwrap();
let contents = features.into_iter().map(|it| it.to_string()).collect::<Vec<_>>().join("\n\n");
- let contents = format!(
- "
-// Generated file, do not edit by hand, see `sourcegen_feature_docs`.
-{}
-",
- contents.trim()
- );
- let dst = project_root().join("docs/user/generated_features.adoc");
+ let contents = add_preamble(crate::flags::CodegenType::FeatureDocs, contents);
+ let dst = project_root().join("docs/book/src/features_generated.md");
fs::write(dst, contents).unwrap();
}
@@ -80,6 +74,6 @@ fn is_valid_feature_name(feature: &str) -> Result<(), String> {
impl fmt::Display for Feature {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
- writeln!(f, "=== {}\n**Source:** {}\n{}", self.id, self.location, self.doc)
+ writeln!(f, "### {}\n**Source:** {}\n{}", self.id, self.location, self.doc)
}
}
diff --git a/xtask/src/codegen/grammar.rs b/xtask/src/codegen/grammar.rs
index d5fe323678..e20dda7fec 100644
--- a/xtask/src/codegen/grammar.rs
+++ b/xtask/src/codegen/grammar.rs
@@ -397,6 +397,9 @@ fn generate_syntax_kinds(grammar: KindsSrc) -> String {
if "{}[]()".contains(token) {
let c = token.chars().next().unwrap();
quote! { #c }
+ // underscore is an identifier in the proc-macro api
+ } else if *token == "_" {
+ quote! { _ }
} else {
let cs = token.chars().map(|c| Punct::new(c, Spacing::Joint));
quote! { #(#cs)* }
diff --git a/xtask/src/codegen/parser_inline_tests.rs b/xtask/src/codegen/parser_inline_tests.rs
index f9f73df8eb..88732cebe7 100644
--- a/xtask/src/codegen/parser_inline_tests.rs
+++ b/xtask/src/codegen/parser_inline_tests.rs
@@ -18,92 +18,87 @@ use crate::{
util::list_rust_files,
};
-const PARSER_CRATE_ROOT: &str = "crates/parser";
-const PARSER_TEST_DATA: &str = "crates/parser/test_data";
-const PARSER_TEST_DATA_INLINE: &str = "crates/parser/test_data/parser/inline";
-
pub(crate) fn generate(check: bool) {
- let tests = tests_from_dir(
- &project_root().join(Path::new(&format!("{PARSER_CRATE_ROOT}/src/grammar"))),
- );
+ let parser_crate_root = project_root().join("crates/parser");
+ let parser_test_data = parser_crate_root.join("test_data");
+ let parser_test_data_inline = parser_test_data.join("parser/inline");
+
+ let tests = tests_from_dir(&parser_crate_root.join("src/grammar"));
let mut some_file_was_updated = false;
some_file_was_updated |=
- install_tests(&tests.ok, &format!("{PARSER_TEST_DATA_INLINE}/ok"), check).unwrap();
+ install_tests(&tests.ok, parser_test_data_inline.join("ok"), check).unwrap();
some_file_was_updated |=
- install_tests(&tests.err, &format!("{PARSER_TEST_DATA_INLINE}/err"), check).unwrap();
+ install_tests(&tests.err, parser_test_data_inline.join("err"), check).unwrap();
if some_file_was_updated {
- let _ = fs::File::open(format!("{PARSER_CRATE_ROOT}/src/tests.rs"))
+ let _ = fs::File::open(parser_crate_root.join("src/tests.rs"))
.unwrap()
.set_modified(SystemTime::now());
+ }
- let ok_tests = tests.ok.values().sorted_by(|a, b| a.name.cmp(&b.name)).map(|test| {
- let test_name = quote::format_ident!("{}", test.name);
- let test_file = format!("test_data/parser/inline/ok/{test_name}.rs");
- let (test_func, args) = match &test.edition {
- Some(edition) => {
- let edition = quote::format_ident!("Edition{edition}");
- (
- quote::format_ident!("run_and_expect_no_errors_with_edition"),
- quote::quote! {#test_file, crate::Edition::#edition},
- )
- }
- None => {
- (quote::format_ident!("run_and_expect_no_errors"), quote::quote! {#test_file})
- }
- };
- quote::quote! {
- #[test]
- fn #test_name() {
- #test_func(#args);
- }
- }
- });
- let err_tests = tests.err.values().sorted_by(|a, b| a.name.cmp(&b.name)).map(|test| {
- let test_name = quote::format_ident!("{}", test.name);
- let test_file = format!("test_data/parser/inline/err/{test_name}.rs");
- let (test_func, args) = match &test.edition {
- Some(edition) => {
- let edition = quote::format_ident!("Edition{edition}");
- (
- quote::format_ident!("run_and_expect_errors_with_edition"),
- quote::quote! {#test_file, crate::Edition::#edition},
- )
- }
- None => (quote::format_ident!("run_and_expect_errors"), quote::quote! {#test_file}),
- };
- quote::quote! {
- #[test]
- fn #test_name() {
- #test_func(#args);
- }
+ let ok_tests = tests.ok.values().sorted_by(|a, b| a.name.cmp(&b.name)).map(|test| {
+ let test_name = quote::format_ident!("{}", test.name);
+ let test_file = format!("test_data/parser/inline/ok/{test_name}.rs");
+ let (test_func, args) = match &test.edition {
+ Some(edition) => {
+ let edition = quote::format_ident!("Edition{edition}");
+ (
+ quote::format_ident!("run_and_expect_no_errors_with_edition"),
+ quote::quote! {#test_file, crate::Edition::#edition},
+ )
}
- });
-
- let output = quote::quote! {
- mod ok {
- use crate::tests::*;
- #(#ok_tests)*
+ None => (quote::format_ident!("run_and_expect_no_errors"), quote::quote! {#test_file}),
+ };
+ quote::quote! {
+ #[test]
+ fn #test_name() {
+ #test_func(#args);
}
- mod err {
- use crate::tests::*;
- #(#err_tests)*
+ }
+ });
+ let err_tests = tests.err.values().sorted_by(|a, b| a.name.cmp(&b.name)).map(|test| {
+ let test_name = quote::format_ident!("{}", test.name);
+ let test_file = format!("test_data/parser/inline/err/{test_name}.rs");
+ let (test_func, args) = match &test.edition {
+ Some(edition) => {
+ let edition = quote::format_ident!("Edition{edition}");
+ (
+ quote::format_ident!("run_and_expect_errors_with_edition"),
+ quote::quote! {#test_file, crate::Edition::#edition},
+ )
}
+ None => (quote::format_ident!("run_and_expect_errors"), quote::quote! {#test_file}),
};
+ quote::quote! {
+ #[test]
+ fn #test_name() {
+ #test_func(#args);
+ }
+ }
+ });
- let pretty = reformat(output.to_string());
- ensure_file_contents(
- crate::flags::CodegenType::ParserTests,
- format!("{PARSER_TEST_DATA}/generated/runner.rs").as_ref(),
- &pretty,
- check,
- );
- }
+ let output = quote::quote! {
+ mod ok {
+ use crate::tests::*;
+ #(#ok_tests)*
+ }
+ mod err {
+ use crate::tests::*;
+ #(#err_tests)*
+ }
+ };
+
+ let pretty = reformat(output.to_string());
+ ensure_file_contents(
+ crate::flags::CodegenType::ParserTests,
+ parser_test_data.join("generated/runner.rs").as_ref(),
+ &pretty,
+ check,
+ );
}
-fn install_tests(tests: &HashMap<String, Test>, into: &str, check: bool) -> Result<bool> {
- let tests_dir = project_root().join(into);
+fn install_tests(tests: &HashMap<String, Test>, tests_dir: PathBuf, check: bool) -> Result<bool> {
if !tests_dir.is_dir() {
fs::create_dir_all(&tests_dir)?;
}
diff --git a/xtask/src/publish/notes.rs b/xtask/src/publish/notes.rs
index c30267295b..7245ce2431 100644
--- a/xtask/src/publish/notes.rs
+++ b/xtask/src/publish/notes.rs
@@ -549,18 +549,18 @@ impl Macro {
}
"pr" => {
let pr = &self.target;
- let url = format!("https://github.com/rust-analyzer/rust-analyzer/pull/{pr}");
+ let url = format!("https://github.com/rust-lang/rust-analyzer/pull/{pr}");
format!("[`#{pr}`]({url})")
}
"commit" => {
let hash = &self.target;
let short = &hash[0..7];
- let url = format!("https://github.com/rust-analyzer/rust-analyzer/commit/{hash}");
+ let url = format!("https://github.com/rust-lang/rust-analyzer/commit/{hash}");
format!("[`{short}`]({url})")
}
"release" => {
let date = &self.target;
- let url = format!("https://github.com/rust-analyzer/rust-analyzer/releases/{date}");
+ let url = format!("https://github.com/rust-lang/rust-analyzer/releases/{date}");
format!("[`{date}`]({url})")
}
_ => bail!("macro not supported: {name}"),
diff --git a/xtask/src/release.rs b/xtask/src/release.rs
index 1a1364c7d1..8e56ce439c 100644
--- a/xtask/src/release.rs
+++ b/xtask/src/release.rs
@@ -9,7 +9,7 @@ use directories::ProjectDirs;
use stdx::JodChild;
use xshell::{cmd, Shell};
-use crate::{codegen, date_iso, flags, is_release_tag, project_root};
+use crate::{date_iso, flags, is_release_tag, project_root};
impl flags::Release {
pub(crate) fn run(self, sh: &Shell) -> anyhow::Result<()> {
@@ -29,11 +29,6 @@ impl flags::Release {
cmd!(sh, "git push --force").run()?;
}
- // Generates bits of manual.adoc.
- codegen::diagnostics_docs::generate(false);
- codegen::assists_doc_tests::generate(false);
- codegen::feature_docs::generate(false);
-
let website_root = project_root().join("../rust-analyzer.github.io");
{
let _dir = sh.push_dir(&website_root);
@@ -54,20 +49,6 @@ impl flags::Release {
.max()
.unwrap_or_default();
- for adoc in [
- "manual.adoc",
- "generated_assists.adoc",
- "generated_config.adoc",
- "generated_diagnostic.adoc",
- "generated_features.adoc",
- ] {
- let src = project_root().join("./docs/user/").join(adoc);
- let dst = website_root.join(adoc);
-
- let contents = sh.read_file(src)?;
- sh.write_file(dst, contents)?;
- }
-
let tags = cmd!(sh, "git tag --list").read()?;
let prev_tag = tags.lines().filter(|line| is_release_tag(line)).last().unwrap();
diff --git a/xtask/src/tidy.rs b/xtask/src/tidy.rs
index 35412be876..b500b251ed 100644
--- a/xtask/src/tidy.rs
+++ b/xtask/src/tidy.rs
@@ -27,8 +27,9 @@ fn check_lsp_extensions_docs(sh: &Shell) {
};
let actual_hash = {
- let lsp_extensions_md =
- sh.read_file(project_root().join("docs/dev/lsp-extensions.md")).unwrap();
+ let lsp_extensions_md = sh
+ .read_file(project_root().join("docs/book/src/contributing/lsp-extensions.md"))
+ .unwrap();
let text = lsp_extensions_md
.lines()
.find_map(|line| line.strip_prefix("lsp/ext.rs hash:"))
@@ -185,7 +186,7 @@ Zlib OR Apache-2.0 OR MIT
fn check_test_attrs(path: &Path, text: &str) {
let panic_rule =
- "https://github.com/rust-lang/rust-analyzer/blob/master/docs/dev/style.md#should_panic";
+ "https://github.com/rust-lang/rust-analyzer/blob/master/docs/book/src/contributing/style.md#should_panic";
let need_panic: &[&str] = &[
// This file.
"slow-tests/tidy.rs",
diff --git a/xtask/test_data/expected.md b/xtask/test_data/expected.md
index 19c940c67b..301837b5c2 100644
--- a/xtask/test_data/expected.md
+++ b/xtask/test_data/expected.md
@@ -2,12 +2,12 @@
Hello!
-Commit: [`0123456`](https://github.com/rust-analyzer/rust-analyzer/commit/0123456789abcdef0123456789abcdef01234567) \
-Release: [`2022-01-01`](https://github.com/rust-analyzer/rust-analyzer/releases/2022-01-01)
+Commit: [`0123456`](https://github.com/rust-lang/rust-analyzer/commit/0123456789abcdef0123456789abcdef01234567) \
+Release: [`2022-01-01`](https://github.com/rust-lang/rust-analyzer/releases/2022-01-01)
## New Features
-- **BREAKING** [`#1111`](https://github.com/rust-analyzer/rust-analyzer/pull/1111) shortcut <kbd>ctrl</kbd>+<kbd>r</kbd>
+- **BREAKING** [`#1111`](https://github.com/rust-lang/rust-analyzer/pull/1111) shortcut <kbd>ctrl</kbd>+<kbd>r</kbd>
- hyphen-prefixed list item
- nested list item
- `foo` -> `foofoo`
@@ -65,7 +65,7 @@ Release: [`2022-01-01`](https://github.com/rust-analyzer/rust-analyzer/releases/
- list item with an inline image
![](https://example.com/animation.gif)
-The highlight of the month is probably [`#1111`](https://github.com/rust-analyzer/rust-analyzer/pull/1111).
+The highlight of the month is probably [`#1111`](https://github.com/rust-lang/rust-analyzer/pull/1111).
See [online manual](https://example.com/manual) for more information.
```bash