Unnamed repository; edit this file 'description' to name the repository.
-rw-r--r--.github/workflows/ci.yaml39
-rw-r--r--.github/workflows/release.yaml4
-rw-r--r--Cargo.lock60
-rw-r--r--Cargo.toml8
-rw-r--r--crates/base-db/Cargo.toml1
-rw-r--r--crates/base-db/src/input.rs45
-rw-r--r--crates/base-db/src/lib.rs11
-rw-r--r--crates/cfg/Cargo.toml1
-rw-r--r--crates/hir-def/Cargo.toml1
-rw-r--r--crates/hir-def/src/db.rs21
-rw-r--r--crates/hir-def/src/expr_store.rs76
-rw-r--r--crates/hir-def/src/expr_store/lower.rs35
-rw-r--r--crates/hir-def/src/expr_store/path.rs4
-rw-r--r--crates/hir-def/src/expr_store/pretty.rs2
-rw-r--r--crates/hir-def/src/expr_store/scope.rs2
-rw-r--r--crates/hir-def/src/hir/generics.rs6
-rw-r--r--crates/hir-def/src/hir/type_ref.rs2
-rw-r--r--crates/hir-def/src/import_map.rs8
-rw-r--r--crates/hir-def/src/lang_item.rs24
-rw-r--r--crates/hir-def/src/lib.rs63
-rw-r--r--crates/hir-def/src/macro_expansion_tests/builtin_derive_macro.rs80
-rw-r--r--crates/hir-def/src/nameres/assoc.rs12
-rw-r--r--crates/hir-def/src/nameres/collector.rs4
-rw-r--r--crates/hir-def/src/nameres/path_resolution.rs11
-rw-r--r--crates/hir-def/src/nameres/tests/incremental.rs4
-rw-r--r--crates/hir-def/src/signatures.rs90
-rw-r--r--crates/hir-def/src/visibility.rs2
-rw-r--r--crates/hir-expand/Cargo.toml1
-rw-r--r--crates/hir-expand/src/attrs.rs25
-rw-r--r--crates/hir-expand/src/builtin/derive_macro.rs140
-rw-r--r--crates/hir-expand/src/builtin/fn_macro.rs61
-rw-r--r--crates/hir-expand/src/db.rs2
-rw-r--r--crates/hir-expand/src/files.rs20
-rw-r--r--crates/hir-expand/src/lib.rs28
-rw-r--r--crates/hir-expand/src/name.rs3
-rw-r--r--crates/hir-expand/src/prettify_macro_expansion_.rs2
-rw-r--r--crates/hir-expand/src/proc_macro.rs14
-rw-r--r--crates/hir-ty/Cargo.toml1
-rw-r--r--crates/hir-ty/src/autoderef.rs2
-rw-r--r--crates/hir-ty/src/chalk_db.rs16
-rw-r--r--crates/hir-ty/src/db.rs40
-rw-r--r--crates/hir-ty/src/diagnostics/decl_check.rs4
-rw-r--r--crates/hir-ty/src/diagnostics/expr.rs6
-rw-r--r--crates/hir-ty/src/diagnostics/match_check.rs12
-rw-r--r--crates/hir-ty/src/diagnostics/match_check/pat_analysis.rs90
-rw-r--r--crates/hir-ty/src/display.rs11
-rw-r--r--crates/hir-ty/src/dyn_compatibility.rs4
-rw-r--r--crates/hir-ty/src/generics.rs11
-rw-r--r--crates/hir-ty/src/infer.rs50
-rw-r--r--crates/hir-ty/src/infer/cast.rs2
-rw-r--r--crates/hir-ty/src/infer/closure.rs22
-rw-r--r--crates/hir-ty/src/infer/expr.rs55
-rw-r--r--crates/hir-ty/src/infer/mutability.rs10
-rw-r--r--crates/hir-ty/src/infer/pat.rs6
-rw-r--r--crates/hir-ty/src/infer/path.rs2
-rw-r--r--crates/hir-ty/src/infer/unify.rs4
-rw-r--r--crates/hir-ty/src/inhabitedness.rs2
-rw-r--r--crates/hir-ty/src/layout.rs6
-rw-r--r--crates/hir-ty/src/layout/adt.rs6
-rw-r--r--crates/hir-ty/src/layout/target.rs5
-rw-r--r--crates/hir-ty/src/lib.rs4
-rw-r--r--crates/hir-ty/src/lower.rs88
-rw-r--r--crates/hir-ty/src/lower/path.rs2
-rw-r--r--crates/hir-ty/src/mapping.rs21
-rw-r--r--crates/hir-ty/src/method_resolution.rs4
-rw-r--r--crates/hir-ty/src/mir/eval.rs26
-rw-r--r--crates/hir-ty/src/mir/eval/shim.rs5
-rw-r--r--crates/hir-ty/src/mir/eval/shim/simd.rs2
-rw-r--r--crates/hir-ty/src/mir/eval/tests.rs14
-rw-r--r--crates/hir-ty/src/mir/lower.rs7
-rw-r--r--crates/hir-ty/src/mir/lower/as_place.rs10
-rw-r--r--crates/hir-ty/src/mir/lower/pattern_matching.rs4
-rw-r--r--crates/hir-ty/src/mir/pretty.rs2
-rw-r--r--crates/hir-ty/src/test_db.rs9
-rw-r--r--crates/hir-ty/src/tests.rs2
-rw-r--r--crates/hir-ty/src/tests/closure_captures.rs28
-rw-r--r--crates/hir-ty/src/tests/coercion.rs2
-rw-r--r--crates/hir-ty/src/tests/display_source_code.rs8
-rw-r--r--crates/hir-ty/src/tests/incremental.rs43
-rw-r--r--crates/hir-ty/src/tests/method_resolution.rs8
-rw-r--r--crates/hir-ty/src/tests/never_type.rs28
-rw-r--r--crates/hir-ty/src/tests/regression.rs4
-rw-r--r--crates/hir-ty/src/tests/simple.rs26
-rw-r--r--crates/hir-ty/src/tests/traits.rs76
-rw-r--r--crates/hir-ty/src/utils.rs7
-rw-r--r--crates/hir-ty/src/variance.rs18
-rw-r--r--crates/hir/Cargo.toml1
-rw-r--r--crates/hir/src/attrs.rs2
-rw-r--r--crates/hir/src/diagnostics.rs14
-rw-r--r--crates/hir/src/display.rs6
-rw-r--r--crates/hir/src/lib.rs80
-rw-r--r--crates/hir/src/semantics.rs4
-rw-r--r--crates/hir/src/semantics/child_by_source.rs4
-rw-r--r--crates/hir/src/source_analyzer.rs43
-rw-r--r--crates/hir/src/symbols.rs2
-rw-r--r--crates/ide-assists/Cargo.toml1
-rw-r--r--crates/ide-assists/src/handlers/expand_rest_pattern.rs2
-rw-r--r--crates/ide-assists/src/handlers/generate_enum_is_method.rs4
-rw-r--r--crates/ide-assists/src/handlers/generate_enum_projection_method.rs4
-rw-r--r--crates/ide-assists/src/handlers/promote_local_to_const.rs22
-rw-r--r--crates/ide-assists/src/handlers/replace_is_method_with_if_let_method.rs21
-rw-r--r--crates/ide-assists/src/handlers/term_search.rs16
-rw-r--r--crates/ide-assists/src/handlers/toggle_macro_delimiter.rs26
-rw-r--r--crates/ide-assists/src/handlers/unmerge_match_arm.rs63
-rw-r--r--crates/ide-assists/src/handlers/wrap_return_type.rs163
-rw-r--r--crates/ide-assists/src/handlers/wrap_unwrap_cfg_attr.rs86
-rw-r--r--crates/ide-assists/src/utils.rs15
-rw-r--r--crates/ide-completion/Cargo.toml1
-rw-r--r--crates/ide-completion/src/completions/fn_param.rs2
-rw-r--r--crates/ide-completion/src/context/analysis.rs10
-rw-r--r--crates/ide-completion/src/tests/attribute.rs4
-rw-r--r--crates/ide-completion/src/tests/expression.rs34
-rw-r--r--crates/ide-completion/src/tests/item_list.rs27
-rw-r--r--crates/ide-completion/src/tests/type_pos.rs18
-rw-r--r--crates/ide-db/Cargo.toml1
-rw-r--r--crates/ide-db/src/famous_defs.rs12
-rw-r--r--crates/ide-db/src/path_transform.rs31
-rw-r--r--crates/ide-db/src/prime_caches.rs2
-rw-r--r--crates/ide-db/src/search.rs8
-rw-r--r--crates/ide-diagnostics/Cargo.toml1
-rw-r--r--crates/ide-diagnostics/src/handlers/macro_error.rs4
-rw-r--r--crates/ide-diagnostics/src/handlers/missing_fields.rs2
-rw-r--r--crates/ide-diagnostics/src/handlers/moved_out_of_ref.rs18
-rw-r--r--crates/ide-diagnostics/src/handlers/trait_impl_redundant_assoc_item.rs9
-rw-r--r--crates/ide-diagnostics/src/handlers/type_mismatch.rs2
-rw-r--r--crates/ide-diagnostics/src/handlers/unresolved_method.rs13
-rw-r--r--crates/ide-ssr/Cargo.toml1
-rw-r--r--crates/ide/Cargo.toml1
-rw-r--r--crates/ide/src/call_hierarchy.rs2
-rw-r--r--crates/ide/src/doc_links.rs27
-rw-r--r--crates/ide/src/file_structure.rs50
-rwxr-xr-xcrates/ide/src/folding_ranges.rs94
-rw-r--r--crates/ide/src/goto_definition.rs315
-rw-r--r--crates/ide/src/goto_type_definition.rs6
-rw-r--r--crates/ide/src/highlight_related.rs352
-rw-r--r--crates/ide/src/hover/tests.rs31
-rw-r--r--crates/ide/src/inlay_hints/adjustment.rs108
-rw-r--r--crates/ide/src/inlay_hints/bind_pat.rs6
-rw-r--r--crates/ide/src/inlay_hints/bounds.rs2
-rw-r--r--crates/ide/src/inlay_hints/closing_brace.rs4
-rw-r--r--crates/ide/src/navigation_target.rs2
-rw-r--r--crates/ide/src/references.rs223
-rw-r--r--crates/ide/src/runnables.rs20
-rw-r--r--crates/ide/src/view_crate_graph.rs2
-rw-r--r--crates/intern/Cargo.toml1
-rw-r--r--crates/intern/src/symbol/symbols.rs2
-rw-r--r--crates/load-cargo/src/lib.rs95
-rw-r--r--crates/mbe/Cargo.toml1
-rw-r--r--crates/parser/Cargo.toml1
-rw-r--r--crates/parser/src/grammar/generic_params.rs7
-rw-r--r--crates/parser/src/lexed_str.rs131
-rw-r--r--crates/parser/test_data/parser/inline/ok/type_param_bounds.rast3
-rw-r--r--crates/parser/test_data/parser/inline/ok/type_param_bounds.rs2
-rw-r--r--crates/paths/Cargo.toml1
-rw-r--r--crates/proc-macro-api/Cargo.toml1
-rw-r--r--crates/proc-macro-srv/Cargo.toml1
-rw-r--r--crates/proc-macro-srv/proc-macro-test/Cargo.toml1
-rw-r--r--crates/proc-macro-srv/proc-macro-test/build.rs12
-rw-r--r--crates/proc-macro-srv/proc-macro-test/imp/Cargo.toml1
-rw-r--r--crates/profile/Cargo.toml1
-rw-r--r--crates/project-model/Cargo.toml1
-rw-r--r--crates/project-model/src/build_dependencies.rs100
-rw-r--r--crates/project-model/src/cargo_workspace.rs8
-rw-r--r--crates/project-model/src/env.rs22
-rw-r--r--crates/project-model/src/sysroot.rs29
-rw-r--r--crates/project-model/src/tests.rs2
-rw-r--r--crates/project-model/src/workspace.rs129
-rw-r--r--crates/query-group-macro/Cargo.toml1
-rw-r--r--crates/rust-analyzer/Cargo.toml1
-rw-r--r--crates/rust-analyzer/src/cli/rustc_tests.rs8
-rw-r--r--crates/rust-analyzer/src/config.rs9
-rw-r--r--crates/rust-analyzer/src/flycheck.rs7
-rw-r--r--crates/rust-analyzer/src/global_state.rs2
-rw-r--r--crates/rust-analyzer/src/handlers/dispatch.rs5
-rw-r--r--crates/rust-analyzer/src/handlers/request.rs19
-rw-r--r--crates/rust-analyzer/src/lsp/capabilities.rs6
-rw-r--r--crates/rust-analyzer/src/lsp/to_proto.rs5
-rw-r--r--crates/rust-analyzer/src/main_loop.rs11
-rw-r--r--crates/rust-analyzer/src/reload.rs39
-rw-r--r--crates/span/src/ast_id.rs15
-rw-r--r--crates/span/src/hygiene.rs16
-rw-r--r--crates/stdx/Cargo.toml1
-rw-r--r--crates/syntax-bridge/Cargo.toml1
-rw-r--r--crates/syntax/Cargo.toml1
-rw-r--r--crates/syntax/rust.ungram2
-rw-r--r--crates/syntax/src/ast/generated/nodes.rs4
-rw-r--r--crates/syntax/src/ast/make.rs7
-rw-r--r--crates/syntax/src/ast/syntax_factory/constructors.rs41
-rw-r--r--crates/syntax/src/ast/token_ext.rs94
-rw-r--r--crates/syntax/src/syntax_editor.rs2
-rw-r--r--crates/syntax/src/validation.rs28
-rw-r--r--crates/test-utils/Cargo.toml1
-rw-r--r--crates/test-utils/src/fixture.rs22
-rw-r--r--crates/test-utils/src/minicore.rs188
-rw-r--r--crates/toolchain/Cargo.toml1
-rw-r--r--crates/tt/Cargo.toml1
-rw-r--r--crates/tt/src/iter.rs1
-rw-r--r--crates/vfs-notify/Cargo.toml1
-rw-r--r--crates/vfs/Cargo.toml1
-rw-r--r--crates/vfs/src/file_set.rs6
-rw-r--r--docs/book/src/SUMMARY.md1
-rw-r--r--docs/book/src/configuration_generated.md7
-rw-r--r--docs/book/src/contributing/lsp-extensions.md18
-rw-r--r--docs/book/src/faq.md7
-rw-r--r--docs/book/src/non_cargo_based_projects.md3
-rw-r--r--docs/book/src/troubleshooting.md3
-rw-r--r--editors/code/package.json10
-rw-r--r--editors/code/src/client.ts114
-rw-r--r--editors/code/src/commands.ts4
-rw-r--r--editors/code/src/main.ts2
-rw-r--r--editors/code/src/run.ts57
-rw-r--r--rust-version2
212 files changed, 3548 insertions, 1530 deletions
diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml
index 79fb7a2d2e..770652494f 100644
--- a/.github/workflows/ci.yaml
+++ b/.github/workflows/ci.yaml
@@ -17,6 +17,10 @@ env:
RUST_BACKTRACE: short
RUSTUP_MAX_RETRIES: 10
+defaults:
+ run:
+ shell: bash
+
jobs:
changes:
runs-on: ubuntu-latest
@@ -80,6 +84,7 @@ jobs:
CC: deny_c
strategy:
+ fail-fast: false
matrix:
os: [ubuntu-latest, windows-latest, macos-latest]
@@ -99,7 +104,7 @@ jobs:
rustup toolchain install nightly --profile minimal --component rustfmt
# https://github.com/actions-rust-lang/setup-rust-toolchain/blob/main/rust.json
- name: Install Rust Problem Matcher
- if: matrix.os == 'ubuntu-latest'
+ if: matrix.os == 'macos-latest'
run: echo "::add-matcher::.github/rust.json"
# - name: Cache Dependencies
@@ -116,23 +121,9 @@ jobs:
if: matrix.os == 'ubuntu-latest'
run: cargo codegen --check
- - name: Compile tests
- run: cargo test --no-run
-
- name: Run tests
run: cargo nextest run --no-fail-fast --hide-progress-bar --status-level fail
- - name: Cancel parallel jobs
- if: failure()
- run: |
- # https://docs.github.com/en/rest/actions/workflow-runs?apiVersion=2022-11-28#cancel-a-workflow-run
- curl -L \
- -X POST \
- -H "Accept: application/vnd.github.v3+json" \
- -H "Authorization: Bearer ${{ secrets.GITHUB_TOKEN }}" \
- -H "X-GitHub-Api-Version: 2022-11-28" \
- https://api.github.com/repos/${{ github.repository }}/actions/runs/${{ github.run_id }}/cancel
-
- name: Run Clippy
if: matrix.os == 'macos-latest'
run: cargo clippy --all-targets -- -D clippy::disallowed_macros -D clippy::dbg_macro -D clippy::todo -D clippy::print_stdout -D clippy::print_stderr
@@ -333,3 +324,21 @@ jobs:
jq -C <<< '${{ toJson(needs) }}'
# Check if all jobs that we depend on (in the needs array) were successful (or have been skipped).
jq --exit-status 'all(.result == "success" or .result == "skipped")' <<< '${{ toJson(needs) }}'
+
+ cancel-if-matrix-failed:
+ needs: rust
+ if: ${{ always() }}
+ runs-on: ubuntu-latest
+ steps:
+ - name: Cancel parallel jobs
+ run: |
+ if jq --exit-status 'all(.result == "success" or .result == "skipped")' <<< '${{ toJson(needs) }}'; then
+ exit 0
+ fi
+ # https://docs.github.com/en/rest/actions/workflow-runs?apiVersion=2022-11-28#cancel-a-workflow-run
+ curl -L \
+ -X POST \
+ -H "Accept: application/vnd.github.v3+json" \
+ -H "Authorization: Bearer ${{ secrets.GITHUB_TOKEN }}" \
+ -H "X-GitHub-Api-Version: 2022-11-28" \
+ https://api.github.com/repos/${{ github.repository }}/actions/runs/${{ github.run_id }}/cancel
diff --git a/.github/workflows/release.yaml b/.github/workflows/release.yaml
index a758ecfd46..5bd90130f4 100644
--- a/.github/workflows/release.yaml
+++ b/.github/workflows/release.yaml
@@ -134,13 +134,13 @@ jobs:
- name: Run analysis-stats on rust-analyzer
if: matrix.target == 'x86_64-unknown-linux-gnu'
- run: target/${{ matrix.target }}/release/rust-analyzer analysis-stats .
+ run: target/${{ matrix.target }}/release/rust-analyzer analysis-stats . -q
- name: Run analysis-stats on rust std library
if: matrix.target == 'x86_64-unknown-linux-gnu'
env:
RUSTC_BOOTSTRAP: 1
- run: target/${{ matrix.target }}/release/rust-analyzer analysis-stats --with-deps $(rustc --print sysroot)/lib/rustlib/src/rust/library/std
+ run: target/${{ matrix.target }}/release/rust-analyzer analysis-stats --with-deps $(rustc --print sysroot)/lib/rustlib/src/rust/library/std -q
- name: Upload artifacts
uses: actions/upload-artifact@v4
diff --git a/Cargo.lock b/Cargo.lock
index 2c7b464164..7432a82080 100644
--- a/Cargo.lock
+++ b/Cargo.lock
@@ -570,12 +570,6 @@ dependencies = [
]
[[package]]
-name = "heck"
-version = "0.5.0"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "2304e00983f87ffb38b55b444b5e3b60a884b5d30c0fca7d82fe33449bbe55ea"
-
-[[package]]
name = "hermit-abi"
version = "0.5.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
@@ -1021,6 +1015,15 @@ dependencies = [
]
[[package]]
+name = "intrusive-collections"
+version = "0.9.7"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "189d0897e4cbe8c75efedf3502c18c887b05046e59d28404d4d8e46cbc4d1e86"
+dependencies = [
+ "memoffset",
+]
+
+[[package]]
name = "itertools"
version = "0.12.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
@@ -1428,6 +1431,16 @@ dependencies = [
]
[[package]]
+name = "papaya"
+version = "0.2.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "f92dd0b07c53a0a0c764db2ace8c541dc47320dad97c2200c2a637ab9dd2328f"
+dependencies = [
+ "equivalent",
+ "seize",
+]
+
+[[package]]
name = "parking_lot"
version = "0.12.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
@@ -1458,7 +1471,7 @@ dependencies = [
"edition",
"expect-test",
"ra-ap-rustc_lexer",
- "rustc-literal-escaper 0.0.3",
+ "rustc-literal-escaper 0.0.4",
"stdx",
"tracing",
]
@@ -1927,9 +1940,9 @@ checksum = "0041b6238913c41fe704213a4a9329e2f685a156d1781998128b4149c230ad04"
[[package]]
name = "rustc-literal-escaper"
-version = "0.0.3"
+version = "0.0.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "78744cd17f5d01c75b709e49807d1363e02a940ccee2e9e72435843fdb0d076e"
+checksum = "ab03008eb631b703dd16978282ae36c73282e7922fe101a4bd072a40ecea7b8b"
[[package]]
name = "rustc-stable-hash"
@@ -1955,16 +1968,18 @@ checksum = "28d3b2b1366ec20994f1fd18c3c594f05c5dd4bc44d8bb0c1c632c8d6829481f"
[[package]]
name = "salsa"
-version = "0.22.0"
+version = "0.23.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "c8fff508e3d6ef42a32607f7538e17171a877a12015e32036f46e99d00c95781"
+checksum = "2e235afdb8e510f38a07138fbe5a0b64691894358a9c0cbd813b1aade110efc9"
dependencies = [
"boxcar",
"crossbeam-queue",
- "dashmap",
+ "crossbeam-utils",
"hashbrown 0.15.4",
"hashlink",
"indexmap",
+ "intrusive-collections",
+ "papaya",
"parking_lot",
"portable-atomic",
"rayon",
@@ -1978,17 +1993,16 @@ dependencies = [
[[package]]
name = "salsa-macro-rules"
-version = "0.22.0"
+version = "0.23.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "8ea72b3c06f2ce6350fe3a0eeb7aaaf842d1d8352b706973c19c4f02e298a87c"
+checksum = "2edb86a7e9c91f6d30c9ce054312721dbe773a162db27bbfae834d16177b30ce"
[[package]]
name = "salsa-macros"
-version = "0.22.0"
+version = "0.23.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "0ce92025bc160b27814a207cb78d680973af17f863c7f4fc56cf3a535e22f378"
+checksum = "d0778d6e209051bc4e75acfe83bcd7848601ec3dbe9c3dbb982829020e9128af"
dependencies = [
- "heck",
"proc-macro2",
"quote",
"syn",
@@ -2026,6 +2040,16 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "94143f37725109f92c262ed2cf5e59bce7498c01bcc1502d7b9afe439a4e9f49"
[[package]]
+name = "seize"
+version = "0.5.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "e4b8d813387d566f627f3ea1b914c068aac94c40ae27ec43f5f33bde65abefe7"
+dependencies = [
+ "libc",
+ "windows-sys 0.52.0",
+]
+
+[[package]]
name = "semver"
version = "1.0.26"
source = "registry+https://github.com/rust-lang/crates.io-index"
@@ -2207,7 +2231,7 @@ dependencies = [
"rayon",
"rowan",
"rustc-hash 2.1.1",
- "rustc-literal-escaper 0.0.3",
+ "rustc-literal-escaper 0.0.4",
"rustc_apfloat",
"smol_str",
"stdx",
diff --git a/Cargo.toml b/Cargo.toml
index 449c75859c..d268ce5b0b 100644
--- a/Cargo.toml
+++ b/Cargo.toml
@@ -49,6 +49,8 @@ debug = 2
# ungrammar = { path = "../ungrammar" }
# salsa = { path = "../salsa" }
+# salsa-macros = { path = "../salsa/components/salsa-macros" }
+# salsa-macro-rules = { path = "../salsa/components/salsa-macro-rules" }
[workspace.dependencies]
# local crates
@@ -136,14 +138,14 @@ rayon = "1.10.0"
rowan = "=0.15.15"
# Ideally we'd not enable the macros feature but unfortunately the `tracked` attribute does not work
# on impls without it
-salsa = { version = "0.22.0", default-features = true, features = ["rayon","salsa_unstable", "macros"] }
-salsa-macros = "0.22.0"
+salsa = { version = "0.23.0", default-features = true, features = ["rayon","salsa_unstable", "macros"] }
+salsa-macros = "0.23.0"
semver = "1.0.26"
serde = { version = "1.0.219" }
serde_derive = { version = "1.0.219" }
serde_json = "1.0.140"
rustc-hash = "2.1.1"
-rustc-literal-escaper = "0.0.3"
+rustc-literal-escaper = "0.0.4"
smallvec = { version = "1.15.1", features = [
"const_new",
"union",
diff --git a/crates/base-db/Cargo.toml b/crates/base-db/Cargo.toml
index 3b423a86f9..ea06fd9c48 100644
--- a/crates/base-db/Cargo.toml
+++ b/crates/base-db/Cargo.toml
@@ -10,6 +10,7 @@ license.workspace = true
rust-version.workspace = true
[lib]
+doctest = false
[dependencies]
la-arena.workspace = true
diff --git a/crates/base-db/src/input.rs b/crates/base-db/src/input.rs
index 2a87b15248..8c9393bcc9 100644
--- a/crates/base-db/src/input.rs
+++ b/crates/base-db/src/input.rs
@@ -6,6 +6,7 @@
//! actual IO. See `vfs` and `project_model` in the `rust-analyzer` crate for how
//! actual IO is done and lowered to input.
+use std::error::Error;
use std::hash::BuildHasherDefault;
use std::{fmt, mem, ops};
@@ -22,7 +23,49 @@ use vfs::{AbsPathBuf, AnchoredPath, FileId, VfsPath, file_set::FileSet};
use crate::{CrateWorkspaceData, EditionedFileId, FxIndexSet, RootQueryDb};
-pub type ProcMacroPaths = FxHashMap<CrateBuilderId, Result<(String, AbsPathBuf), String>>;
+pub type ProcMacroPaths =
+ FxHashMap<CrateBuilderId, Result<(String, AbsPathBuf), ProcMacroLoadingError>>;
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub enum ProcMacroLoadingError {
+ Disabled,
+ FailedToBuild,
+ MissingDylibPath,
+ NotYetBuilt,
+ NoProcMacros,
+ ProcMacroSrvError(Box<str>),
+}
+impl ProcMacroLoadingError {
+ pub fn is_hard_error(&self) -> bool {
+ match self {
+ ProcMacroLoadingError::Disabled | ProcMacroLoadingError::NotYetBuilt => false,
+ ProcMacroLoadingError::FailedToBuild
+ | ProcMacroLoadingError::MissingDylibPath
+ | ProcMacroLoadingError::NoProcMacros
+ | ProcMacroLoadingError::ProcMacroSrvError(_) => true,
+ }
+ }
+}
+
+impl Error for ProcMacroLoadingError {}
+impl fmt::Display for ProcMacroLoadingError {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ match self {
+ ProcMacroLoadingError::Disabled => write!(f, "proc-macro expansion is disabled"),
+ ProcMacroLoadingError::FailedToBuild => write!(f, "proc-macro failed to build"),
+ ProcMacroLoadingError::MissingDylibPath => {
+ write!(f, "proc-macro crate build data is missing a dylib path")
+ }
+ ProcMacroLoadingError::NotYetBuilt => write!(f, "proc-macro not yet built"),
+ ProcMacroLoadingError::NoProcMacros => {
+ write!(f, "proc macro library has no proc macros")
+ }
+ ProcMacroLoadingError::ProcMacroSrvError(msg) => {
+ write!(f, "proc macro server error: {msg}")
+ }
+ }
+ }
+}
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, PartialOrd, Ord)]
pub struct SourceRootId(pub u32);
diff --git a/crates/base-db/src/lib.rs b/crates/base-db/src/lib.rs
index 478fae67c8..ad17f1730b 100644
--- a/crates/base-db/src/lib.rs
+++ b/crates/base-db/src/lib.rs
@@ -14,8 +14,9 @@ pub use crate::{
input::{
BuiltCrateData, BuiltDependency, Crate, CrateBuilder, CrateBuilderId, CrateDataBuilder,
CrateDisplayName, CrateGraphBuilder, CrateName, CrateOrigin, CratesIdMap, CratesMap,
- DependencyBuilder, Env, ExtraCrateData, LangCrateOrigin, ProcMacroPaths, ReleaseChannel,
- SourceRoot, SourceRootId, TargetLayoutLoadResult, UniqueCrateData,
+ DependencyBuilder, Env, ExtraCrateData, LangCrateOrigin, ProcMacroLoadingError,
+ ProcMacroPaths, ReleaseChannel, SourceRoot, SourceRootId, TargetLayoutLoadResult,
+ UniqueCrateData,
},
};
use dashmap::{DashMap, mapref::entry::Entry};
@@ -33,7 +34,7 @@ pub type FxIndexSet<T> = indexmap::IndexSet<T, rustc_hash::FxBuildHasher>;
#[macro_export]
macro_rules! impl_intern_key {
($id:ident, $loc:ident) => {
- #[salsa_macros::interned(no_lifetime)]
+ #[salsa_macros::interned(no_lifetime, revisions = usize::MAX)]
#[derive(PartialOrd, Ord)]
pub struct $id {
pub loc: $loc,
@@ -43,7 +44,7 @@ macro_rules! impl_intern_key {
impl ::std::fmt::Debug for $id {
fn fmt(&self, f: &mut ::std::fmt::Formatter<'_>) -> ::std::fmt::Result {
f.debug_tuple(stringify!($id))
- .field(&format_args!("{:04x}", self.0.as_u32()))
+ .field(&format_args!("{:04x}", self.0.index()))
.finish()
}
}
@@ -167,7 +168,7 @@ impl Files {
}
}
-#[salsa_macros::interned(no_lifetime, debug, constructor=from_span)]
+#[salsa_macros::interned(no_lifetime, debug, constructor=from_span, revisions = usize::MAX)]
#[derive(PartialOrd, Ord)]
pub struct EditionedFileId {
pub editioned_file_id: span::EditionedFileId,
diff --git a/crates/cfg/Cargo.toml b/crates/cfg/Cargo.toml
index d7764a16c0..ba34966614 100644
--- a/crates/cfg/Cargo.toml
+++ b/crates/cfg/Cargo.toml
@@ -10,6 +10,7 @@ license.workspace = true
rust-version.workspace = true
[lib]
+doctest = false
[dependencies]
rustc-hash.workspace = true
diff --git a/crates/hir-def/Cargo.toml b/crates/hir-def/Cargo.toml
index c6922eca49..abb4819a76 100644
--- a/crates/hir-def/Cargo.toml
+++ b/crates/hir-def/Cargo.toml
@@ -10,6 +10,7 @@ license.workspace = true
rust-version.workspace = true
[lib]
+doctest = false
[dependencies]
arrayvec.workspace = true
diff --git a/crates/hir-def/src/db.rs b/crates/hir-def/src/db.rs
index 00408e95ae..c67bb2422a 100644
--- a/crates/hir-def/src/db.rs
+++ b/crates/hir-def/src/db.rs
@@ -25,11 +25,10 @@ use crate::{
import_map::ImportMap,
item_tree::{ItemTree, file_item_tree_query},
lang_item::{self, LangItem},
- nameres::{assoc::TraitItems, crate_def_map, diagnostics::DefDiagnostics},
+ nameres::crate_def_map,
signatures::{
ConstSignature, EnumSignature, FunctionSignature, ImplSignature, StaticSignature,
StructSignature, TraitAliasSignature, TraitSignature, TypeAliasSignature, UnionSignature,
- VariantFields,
},
tt,
visibility::{self, Visibility},
@@ -113,24 +112,6 @@ pub trait DefDatabase: InternDatabase + ExpandDatabase + SourceDatabase {
// region:data
- #[salsa::invoke(VariantFields::query)]
- fn variant_fields_with_source_map(
- &self,
- id: VariantId,
- ) -> (Arc<VariantFields>, Arc<ExpressionStoreSourceMap>);
-
- #[salsa::transparent]
- #[salsa::invoke(TraitItems::trait_items_query)]
- fn trait_items(&self, e: TraitId) -> Arc<TraitItems>;
-
- #[salsa::invoke(TraitItems::trait_items_with_diagnostics_query)]
- fn trait_items_with_diagnostics(&self, tr: TraitId) -> (Arc<TraitItems>, DefDiagnostics);
-
- #[salsa::tracked]
- fn variant_fields(&self, id: VariantId) -> Arc<VariantFields> {
- self.variant_fields_with_source_map(id).0
- }
-
#[salsa::tracked]
fn trait_signature(&self, trait_: TraitId) -> Arc<TraitSignature> {
self.trait_signature_with_source_map(trait_).0
diff --git a/crates/hir-def/src/expr_store.rs b/crates/hir-def/src/expr_store.rs
index f617c3225a..51612f341a 100644
--- a/crates/hir-def/src/expr_store.rs
+++ b/crates/hir-def/src/expr_store.rs
@@ -9,7 +9,10 @@ pub mod scope;
#[cfg(test)]
mod tests;
-use std::ops::{Deref, Index};
+use std::{
+ ops::{Deref, Index},
+ sync::LazyLock,
+};
use cfg::{CfgExpr, CfgOptions};
use either::Either;
@@ -19,6 +22,7 @@ use rustc_hash::FxHashMap;
use smallvec::SmallVec;
use span::{Edition, SyntaxContext};
use syntax::{AstPtr, SyntaxNodePtr, ast};
+use triomphe::Arc;
use tt::TextRange;
use crate::{
@@ -89,7 +93,7 @@ pub type TypeSource = InFile<TypePtr>;
pub type LifetimePtr = AstPtr<ast::Lifetime>;
pub type LifetimeSource = InFile<LifetimePtr>;
-#[derive(Debug, Eq, PartialEq)]
+#[derive(Debug, PartialEq, Eq)]
pub struct ExpressionStore {
pub exprs: Arena<Expr>,
pub pats: Arena<Pat>,
@@ -110,7 +114,7 @@ pub struct ExpressionStore {
ident_hygiene: FxHashMap<ExprOrPatId, HygieneId>,
}
-#[derive(Debug, Eq, PartialEq, Default)]
+#[derive(Debug, Eq, Default)]
pub struct ExpressionStoreSourceMap {
// AST expressions can create patterns in destructuring assignments. Therefore, `ExprSource` can also map
// to `PatId`, and `PatId` can also map to `ExprSource` (the other way around is unaffected).
@@ -123,19 +127,20 @@ pub struct ExpressionStoreSourceMap {
label_map: FxHashMap<LabelSource, LabelId>,
label_map_back: ArenaMap<LabelId, LabelSource>,
- binding_definitions: FxHashMap<BindingId, SmallVec<[PatId; 4]>>,
-
- /// We don't create explicit nodes for record fields (`S { record_field: 92 }`).
- /// Instead, we use id of expression (`92`) to identify the field.
- field_map_back: FxHashMap<ExprId, FieldSource>,
- pat_field_map_back: FxHashMap<PatId, PatFieldSource>,
-
types_map_back: ArenaMap<TypeRefId, TypeSource>,
types_map: FxHashMap<TypeSource, TypeRefId>,
lifetime_map_back: ArenaMap<LifetimeRefId, LifetimeSource>,
lifetime_map: FxHashMap<LifetimeSource, LifetimeRefId>,
+ binding_definitions:
+ ArenaMap<BindingId, SmallVec<[PatId; 2 * size_of::<usize>() / size_of::<PatId>()]>>,
+
+ /// We don't create explicit nodes for record fields (`S { record_field: 92 }`).
+ /// Instead, we use id of expression (`92`) to identify the field.
+ field_map_back: FxHashMap<ExprId, FieldSource>,
+ pat_field_map_back: FxHashMap<PatId, PatFieldSource>,
+
template_map: Option<Box<FormatTemplate>>,
pub expansions: FxHashMap<InFile<MacroCallPtr>, MacroCallId>,
@@ -145,6 +150,43 @@ pub struct ExpressionStoreSourceMap {
pub diagnostics: Vec<ExpressionStoreDiagnostics>,
}
+impl PartialEq for ExpressionStoreSourceMap {
+ fn eq(&self, other: &Self) -> bool {
+ // we only need to compare one of the two mappings
+ // as the other is a reverse mapping and thus will compare
+ // the same as normal mapping
+ let Self {
+ expr_map: _,
+ expr_map_back,
+ pat_map: _,
+ pat_map_back,
+ label_map: _,
+ label_map_back,
+ types_map_back,
+ types_map: _,
+ lifetime_map_back,
+ lifetime_map: _,
+ // If this changed, our pattern data must have changed
+ binding_definitions: _,
+ // If this changed, our expression data must have changed
+ field_map_back: _,
+ // If this changed, our pattern data must have changed
+ pat_field_map_back: _,
+ template_map,
+ expansions,
+ diagnostics,
+ } = self;
+ *expr_map_back == other.expr_map_back
+ && *pat_map_back == other.pat_map_back
+ && *label_map_back == other.label_map_back
+ && *types_map_back == other.types_map_back
+ && *lifetime_map_back == other.lifetime_map_back
+ && *template_map == other.template_map
+ && *expansions == other.expansions
+ && *diagnostics == other.diagnostics
+ }
+}
+
/// The body of an item (function, const etc.).
#[derive(Debug, Eq, PartialEq, Default)]
pub struct ExpressionStoreBuilder {
@@ -220,6 +262,12 @@ impl ExpressionStoreBuilder {
}
impl ExpressionStore {
+ pub fn empty_singleton() -> Arc<Self> {
+ static EMPTY: LazyLock<Arc<ExpressionStore>> =
+ LazyLock::new(|| Arc::new(ExpressionStoreBuilder::default().finish()));
+ EMPTY.clone()
+ }
+
/// Returns an iterator over all block expressions in this store that define inner items.
pub fn blocks<'a>(
&'a self,
@@ -636,6 +684,12 @@ impl Index<PathId> for ExpressionStore {
// FIXME: Change `node_` prefix to something more reasonable.
// Perhaps `expr_syntax` and `expr_id`?
impl ExpressionStoreSourceMap {
+ pub fn empty_singleton() -> Arc<Self> {
+ static EMPTY: LazyLock<Arc<ExpressionStoreSourceMap>> =
+ LazyLock::new(|| Arc::new(ExpressionStoreSourceMap::default()));
+ EMPTY.clone()
+ }
+
pub fn expr_or_pat_syntax(&self, id: ExprOrPatId) -> Result<ExprOrPatSource, SyntheticSyntax> {
match id {
ExprOrPatId::ExprId(id) => self.expr_syntax(id),
@@ -682,7 +736,7 @@ impl ExpressionStoreSourceMap {
}
pub fn patterns_for_binding(&self, binding: BindingId) -> &[PatId] {
- self.binding_definitions.get(&binding).map_or(&[], Deref::deref)
+ self.binding_definitions.get(binding).map_or(&[], Deref::deref)
}
pub fn node_label(&self, node: InFile<&ast::Label>) -> Option<LabelId> {
diff --git a/crates/hir-def/src/expr_store/lower.rs b/crates/hir-def/src/expr_store/lower.rs
index efa1374a44..c0e51b338b 100644
--- a/crates/hir-def/src/expr_store/lower.rs
+++ b/crates/hir-def/src/expr_store/lower.rs
@@ -2250,7 +2250,7 @@ impl ExprCollector<'_> {
Some(ModuleDefId::ConstId(_)) => (None, Pat::Path(name.into())),
Some(ModuleDefId::EnumVariantId(variant))
// FIXME: This can cause a cycle if the user is writing invalid code
- if self.db.variant_fields(variant.into()).shape != FieldsShape::Record =>
+ if variant.fields(self.db).shape != FieldsShape::Record =>
{
(None, Pat::Path(name.into()))
}
@@ -2825,14 +2825,7 @@ impl ExprCollector<'_> {
let use_format_args_since_1_89_0 = fmt_args().is_some() && fmt_unsafe_arg().is_none();
let idx = if use_format_args_since_1_89_0 {
- self.collect_format_args_impl(
- syntax_ptr,
- fmt,
- hygiene,
- argmap,
- lit_pieces,
- format_options,
- )
+ self.collect_format_args_impl(syntax_ptr, fmt, argmap, lit_pieces, format_options)
} else {
self.collect_format_args_before_1_89_0_impl(
syntax_ptr,
@@ -2962,7 +2955,6 @@ impl ExprCollector<'_> {
&mut self,
syntax_ptr: AstPtr<ast::Expr>,
fmt: FormatArgs,
- hygiene: HygieneId,
argmap: FxIndexSet<(usize, ArgumentType)>,
lit_pieces: ExprId,
format_options: ExprId,
@@ -2997,8 +2989,11 @@ impl ExprCollector<'_> {
let args =
self.alloc_expr_desugared(Expr::Array(Array::ElementList { elements: args }));
let args_name = Name::new_symbol_root(sym::args);
- let args_binding =
- self.alloc_binding(args_name.clone(), BindingAnnotation::Unannotated, hygiene);
+ let args_binding = self.alloc_binding(
+ args_name.clone(),
+ BindingAnnotation::Unannotated,
+ HygieneId::ROOT,
+ );
let args_pat = self.alloc_pat_desugared(Pat::Bind { id: args_binding, subpat: None });
self.add_definition_to_binding(args_binding, args_pat);
// TODO: We don't have `super let` yet.
@@ -3008,13 +3003,16 @@ impl ExprCollector<'_> {
initializer: Some(args),
else_branch: None,
};
- (vec![let_stmt], self.alloc_expr_desugared(Expr::Path(Path::from(args_name))))
+ (vec![let_stmt], self.alloc_expr_desugared(Expr::Path(args_name.into())))
} else {
// Generate:
// super let args = (&arg0, &arg1, &...);
let args_name = Name::new_symbol_root(sym::args);
- let args_binding =
- self.alloc_binding(args_name.clone(), BindingAnnotation::Unannotated, hygiene);
+ let args_binding = self.alloc_binding(
+ args_name.clone(),
+ BindingAnnotation::Unannotated,
+ HygieneId::ROOT,
+ );
let args_pat = self.alloc_pat_desugared(Pat::Bind { id: args_binding, subpat: None });
self.add_definition_to_binding(args_binding, args_pat);
let elements = arguments
@@ -3057,8 +3055,11 @@ impl ExprCollector<'_> {
.collect();
let array =
self.alloc_expr_desugared(Expr::Array(Array::ElementList { elements: args }));
- let args_binding =
- self.alloc_binding(args_name.clone(), BindingAnnotation::Unannotated, hygiene);
+ let args_binding = self.alloc_binding(
+ args_name.clone(),
+ BindingAnnotation::Unannotated,
+ HygieneId::ROOT,
+ );
let args_pat = self.alloc_pat_desugared(Pat::Bind { id: args_binding, subpat: None });
self.add_definition_to_binding(args_binding, args_pat);
let let_stmt2 = Statement::Let {
diff --git a/crates/hir-def/src/expr_store/path.rs b/crates/hir-def/src/expr_store/path.rs
index db83e73a0b..19c7ce0ce0 100644
--- a/crates/hir-def/src/expr_store/path.rs
+++ b/crates/hir-def/src/expr_store/path.rs
@@ -29,8 +29,8 @@ pub enum Path {
// This type is being used a lot, make sure it doesn't grow unintentionally.
#[cfg(target_arch = "x86_64")]
const _: () = {
- assert!(size_of::<Path>() == 16);
- assert!(size_of::<Option<Path>>() == 16);
+ assert!(size_of::<Path>() == 24);
+ assert!(size_of::<Option<Path>>() == 24);
};
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
diff --git a/crates/hir-def/src/expr_store/pretty.rs b/crates/hir-def/src/expr_store/pretty.rs
index 56c7655f9e..87bcd33ed7 100644
--- a/crates/hir-def/src/expr_store/pretty.rs
+++ b/crates/hir-def/src/expr_store/pretty.rs
@@ -121,7 +121,7 @@ pub fn print_variant_body_hir(db: &dyn DefDatabase, owner: VariantId, edition: E
VariantId::UnionId(it) => format!("union {}", item_name(db, it, "<missing>")),
};
- let fields = db.variant_fields(owner);
+ let fields = owner.fields(db);
let mut p = Printer {
db,
diff --git a/crates/hir-def/src/expr_store/scope.rs b/crates/hir-def/src/expr_store/scope.rs
index a46711c67e..2dd0b9bdb8 100644
--- a/crates/hir-def/src/expr_store/scope.rs
+++ b/crates/hir-def/src/expr_store/scope.rs
@@ -535,7 +535,7 @@ fn foo() {
let resolved = scopes.resolve_name_in_scope(expr_scope, &name_ref.as_name()).unwrap();
let pat_src = source_map
- .pat_syntax(*source_map.binding_definitions[&resolved.binding()].first().unwrap())
+ .pat_syntax(*source_map.binding_definitions[resolved.binding()].first().unwrap())
.unwrap();
let local_name = pat_src.value.syntax_node_ptr().to_node(file.syntax());
diff --git a/crates/hir-def/src/hir/generics.rs b/crates/hir-def/src/hir/generics.rs
index a9a0e36312..94e683cb0f 100644
--- a/crates/hir-def/src/hir/generics.rs
+++ b/crates/hir-def/src/hir/generics.rs
@@ -331,13 +331,13 @@ impl GenericParams {
}
#[inline]
- pub fn no_predicates(&self) -> bool {
+ pub fn has_no_predicates(&self) -> bool {
self.where_predicates.is_empty()
}
#[inline]
- pub fn where_predicates(&self) -> std::slice::Iter<'_, WherePredicate> {
- self.where_predicates.iter()
+ pub fn where_predicates(&self) -> &[WherePredicate] {
+ &self.where_predicates
}
/// Iterator of type_or_consts field
diff --git a/crates/hir-def/src/hir/type_ref.rs b/crates/hir-def/src/hir/type_ref.rs
index eb3b92d31f..eacc3f3ced 100644
--- a/crates/hir-def/src/hir/type_ref.rs
+++ b/crates/hir-def/src/hir/type_ref.rs
@@ -149,7 +149,7 @@ pub enum TypeRef {
}
#[cfg(target_arch = "x86_64")]
-const _: () = assert!(size_of::<TypeRef>() == 16);
+const _: () = assert!(size_of::<TypeRef>() == 24);
pub type TypeRefId = Idx<TypeRef>;
diff --git a/crates/hir-def/src/import_map.rs b/crates/hir-def/src/import_map.rs
index a6138fb682..f31f355cfa 100644
--- a/crates/hir-def/src/import_map.rs
+++ b/crates/hir-def/src/import_map.rs
@@ -16,7 +16,7 @@ use crate::{
AssocItemId, AttrDefId, Complete, FxIndexMap, ModuleDefId, ModuleId, TraitId,
db::DefDatabase,
item_scope::{ImportOrExternCrate, ItemInNs},
- nameres::{DefMap, crate_def_map},
+ nameres::{DefMap, assoc::TraitItems, crate_def_map},
visibility::Visibility,
};
@@ -221,7 +221,7 @@ impl ImportMap {
trait_import_info: &ImportInfo,
) {
let _p = tracing::info_span!("collect_trait_assoc_items").entered();
- for &(ref assoc_item_name, item) in &db.trait_items(tr).items {
+ for &(ref assoc_item_name, item) in &TraitItems::query(db, tr).items {
let module_def_id = match item {
AssocItemId::FunctionId(f) => ModuleDefId::from(f),
AssocItemId::ConstId(c) => ModuleDefId::from(c),
@@ -482,7 +482,7 @@ mod tests {
use expect_test::{Expect, expect};
use test_fixture::WithFixture;
- use crate::{ItemContainerId, Lookup, test_db::TestDB};
+ use crate::{ItemContainerId, Lookup, nameres::assoc::TraitItems, test_db::TestDB};
use super::*;
@@ -580,7 +580,7 @@ mod tests {
let trait_info = dependency_imports.import_info_for(ItemInNs::Types(trait_id.into()))?;
- let trait_items = db.trait_items(trait_id);
+ let trait_items = TraitItems::query(db, trait_id);
let (assoc_item_name, _) = trait_items
.items
.iter()
diff --git a/crates/hir-def/src/lang_item.rs b/crates/hir-def/src/lang_item.rs
index faff7d036a..750308026e 100644
--- a/crates/hir-def/src/lang_item.rs
+++ b/crates/hir-def/src/lang_item.rs
@@ -9,8 +9,10 @@ use triomphe::Arc;
use crate::{
AdtId, AssocItemId, AttrDefId, Crate, EnumId, EnumVariantId, FunctionId, ImplId, ModuleDefId,
- StaticId, StructId, TraitId, TypeAliasId, UnionId, db::DefDatabase, expr_store::path::Path,
- nameres::crate_def_map,
+ StaticId, StructId, TraitId, TypeAliasId, UnionId,
+ db::DefDatabase,
+ expr_store::path::Path,
+ nameres::{assoc::TraitItems, crate_def_map},
};
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
@@ -113,14 +115,16 @@ pub fn crate_lang_items(db: &dyn DefDatabase, krate: Crate) -> Option<Box<LangIt
match def {
ModuleDefId::TraitId(trait_) => {
lang_items.collect_lang_item(db, trait_, LangItemTarget::Trait);
- db.trait_items(trait_).items.iter().for_each(|&(_, assoc_id)| match assoc_id {
- AssocItemId::FunctionId(f) => {
- lang_items.collect_lang_item(db, f, LangItemTarget::Function);
+ TraitItems::query(db, trait_).items.iter().for_each(|&(_, assoc_id)| {
+ match assoc_id {
+ AssocItemId::FunctionId(f) => {
+ lang_items.collect_lang_item(db, f, LangItemTarget::Function);
+ }
+ AssocItemId::TypeAliasId(alias) => {
+ lang_items.collect_lang_item(db, alias, LangItemTarget::TypeAlias)
+ }
+ AssocItemId::ConstId(_) => {}
}
- AssocItemId::TypeAliasId(alias) => {
- lang_items.collect_lang_item(db, alias, LangItemTarget::TypeAlias)
- }
- AssocItemId::ConstId(_) => {}
});
}
ModuleDefId::AdtId(AdtId::EnumId(e)) => {
@@ -304,6 +308,8 @@ impl LangItem {
language_item_table! {
// Variant name, Name, Getter method name, Target Generic requirements;
Sized, sym::sized, sized_trait, Target::Trait, GenericRequirement::Exact(0);
+ MetaSized, sym::meta_sized, sized_trait, Target::Trait, GenericRequirement::Exact(0);
+ PointeeSized, sym::pointee_sized, sized_trait, Target::Trait, GenericRequirement::Exact(0);
Unsize, sym::unsize, unsize_trait, Target::Trait, GenericRequirement::Minimum(1);
/// Trait injected by `#[derive(PartialEq)]`, (i.e. "Partial EQ").
StructuralPeq, sym::structural_peq, structural_peq_trait, Target::Trait, GenericRequirement::None;
diff --git a/crates/hir-def/src/lib.rs b/crates/hir-def/src/lib.rs
index a562f2d0af..bdf8b453e2 100644
--- a/crates/hir-def/src/lib.rs
+++ b/crates/hir-def/src/lib.rs
@@ -87,9 +87,12 @@ use crate::{
attr::Attrs,
builtin_type::BuiltinType,
db::DefDatabase,
+ expr_store::ExpressionStoreSourceMap,
hir::generics::{LocalLifetimeParamId, LocalTypeOrConstParamId},
nameres::{
- LocalDefMap, assoc::ImplItems, block_def_map, crate_def_map, crate_local_def_map,
+ LocalDefMap,
+ assoc::{ImplItems, TraitItems},
+ block_def_map, crate_def_map, crate_local_def_map,
diagnostics::DefDiagnostics,
},
signatures::{EnumVariants, InactiveEnumVariantCode, VariantFields},
@@ -252,9 +255,35 @@ impl_intern!(FunctionId, FunctionLoc, intern_function, lookup_intern_function);
type StructLoc = ItemLoc<ast::Struct>;
impl_intern!(StructId, StructLoc, intern_struct, lookup_intern_struct);
+impl StructId {
+ pub fn fields(self, db: &dyn DefDatabase) -> &VariantFields {
+ VariantFields::firewall(db, self.into())
+ }
+
+ pub fn fields_with_source_map(
+ self,
+ db: &dyn DefDatabase,
+ ) -> (Arc<VariantFields>, Arc<ExpressionStoreSourceMap>) {
+ VariantFields::query(db, self.into())
+ }
+}
+
pub type UnionLoc = ItemLoc<ast::Union>;
impl_intern!(UnionId, UnionLoc, intern_union, lookup_intern_union);
+impl UnionId {
+ pub fn fields(self, db: &dyn DefDatabase) -> &VariantFields {
+ VariantFields::firewall(db, self.into())
+ }
+
+ pub fn fields_with_source_map(
+ self,
+ db: &dyn DefDatabase,
+ ) -> (Arc<VariantFields>, Arc<ExpressionStoreSourceMap>) {
+ VariantFields::query(db, self.into())
+ }
+}
+
pub type EnumLoc = ItemLoc<ast::Enum>;
impl_intern!(EnumId, EnumLoc, intern_enum, lookup_intern_enum);
@@ -282,6 +311,13 @@ impl_intern!(StaticId, StaticLoc, intern_static, lookup_intern_static);
pub type TraitLoc = ItemLoc<ast::Trait>;
impl_intern!(TraitId, TraitLoc, intern_trait, lookup_intern_trait);
+impl TraitId {
+ #[inline]
+ pub fn trait_items(self, db: &dyn DefDatabase) -> &TraitItems {
+ TraitItems::query(db, self)
+ }
+}
+
pub type TraitAliasLoc = ItemLoc<ast::TraitAlias>;
impl_intern!(TraitAliasId, TraitAliasLoc, intern_trait_alias, lookup_intern_trait_alias);
@@ -328,6 +364,20 @@ pub struct EnumVariantLoc {
}
impl_intern!(EnumVariantId, EnumVariantLoc, intern_enum_variant, lookup_intern_enum_variant);
impl_loc!(EnumVariantLoc, id: Variant, parent: EnumId);
+
+impl EnumVariantId {
+ pub fn fields(self, db: &dyn DefDatabase) -> &VariantFields {
+ VariantFields::firewall(db, self.into())
+ }
+
+ pub fn fields_with_source_map(
+ self,
+ db: &dyn DefDatabase,
+ ) -> (Arc<VariantFields>, Arc<ExpressionStoreSourceMap>) {
+ VariantFields::query(db, self.into())
+ }
+}
+
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub struct Macro2Loc {
pub container: ModuleId,
@@ -1015,8 +1065,15 @@ pub enum VariantId {
impl_from!(EnumVariantId, StructId, UnionId for VariantId);
impl VariantId {
- pub fn variant_data(self, db: &dyn DefDatabase) -> Arc<VariantFields> {
- db.variant_fields(self)
+ pub fn fields(self, db: &dyn DefDatabase) -> &VariantFields {
+ VariantFields::firewall(db, self)
+ }
+
+ pub fn fields_with_source_map(
+ self,
+ db: &dyn DefDatabase,
+ ) -> (Arc<VariantFields>, Arc<ExpressionStoreSourceMap>) {
+ VariantFields::query(db, self)
}
pub fn file_id(self, db: &dyn DefDatabase) -> HirFileId {
diff --git a/crates/hir-def/src/macro_expansion_tests/builtin_derive_macro.rs b/crates/hir-def/src/macro_expansion_tests/builtin_derive_macro.rs
index 777953d3f2..0013c2a256 100644
--- a/crates/hir-def/src/macro_expansion_tests/builtin_derive_macro.rs
+++ b/crates/hir-def/src/macro_expansion_tests/builtin_derive_macro.rs
@@ -746,3 +746,83 @@ struct Struct9<#[pointee] T, U>(T) where T: ?Sized;
623..690: `derive(CoercePointee)` requires `T` to be marked `?Sized`"#]],
);
}
+
+#[test]
+fn union_derive() {
+ check_errors(
+ r#"
+//- minicore: clone, copy, default, fmt, hash, ord, eq, derive
+
+#[derive(Copy)]
+union Foo1 { _v: () }
+#[derive(Clone)]
+union Foo2 { _v: () }
+#[derive(Default)]
+union Foo3 { _v: () }
+#[derive(Debug)]
+union Foo4 { _v: () }
+#[derive(Hash)]
+union Foo5 { _v: () }
+#[derive(Ord)]
+union Foo6 { _v: () }
+#[derive(PartialOrd)]
+union Foo7 { _v: () }
+#[derive(Eq)]
+union Foo8 { _v: () }
+#[derive(PartialEq)]
+union Foo9 { _v: () }
+ "#,
+ expect![[r#"
+ 78..118: this trait cannot be derived for unions
+ 119..157: this trait cannot be derived for unions
+ 158..195: this trait cannot be derived for unions
+ 196..232: this trait cannot be derived for unions
+ 233..276: this trait cannot be derived for unions
+ 313..355: this trait cannot be derived for unions"#]],
+ );
+}
+
+#[test]
+fn default_enum_without_default_attr() {
+ check_errors(
+ r#"
+//- minicore: default, derive
+
+#[derive(Default)]
+enum Foo {
+ Bar,
+}
+ "#,
+ expect!["1..41: `#[derive(Default)]` on enum with no `#[default]`"],
+ );
+}
+
+#[test]
+fn generic_enum_default() {
+ check(
+ r#"
+//- minicore: default, derive
+
+#[derive(Default)]
+enum Foo<T> {
+ Bar(T),
+ #[default]
+ Baz,
+}
+"#,
+ expect![[r#"
+
+#[derive(Default)]
+enum Foo<T> {
+ Bar(T),
+ #[default]
+ Baz,
+}
+
+impl <T, > $crate::default::Default for Foo<T, > where {
+ fn default() -> Self {
+ Foo::Baz
+ }
+}"#]],
+ );
+}
diff --git a/crates/hir-def/src/nameres/assoc.rs b/crates/hir-def/src/nameres/assoc.rs
index 7aaa918d1c..07210df887 100644
--- a/crates/hir-def/src/nameres/assoc.rs
+++ b/crates/hir-def/src/nameres/assoc.rs
@@ -38,16 +38,18 @@ pub struct TraitItems {
pub macro_calls: ThinVec<(AstId<ast::Item>, MacroCallId)>,
}
+#[salsa::tracked]
impl TraitItems {
#[inline]
- pub(crate) fn trait_items_query(db: &dyn DefDatabase, tr: TraitId) -> Arc<TraitItems> {
- db.trait_items_with_diagnostics(tr).0
+ pub(crate) fn query(db: &dyn DefDatabase, tr: TraitId) -> &TraitItems {
+ &Self::query_with_diagnostics(db, tr).0
}
- pub(crate) fn trait_items_with_diagnostics_query(
+ #[salsa::tracked(returns(ref))]
+ pub fn query_with_diagnostics(
db: &dyn DefDatabase,
tr: TraitId,
- ) -> (Arc<TraitItems>, DefDiagnostics) {
+ ) -> (TraitItems, DefDiagnostics) {
let ItemLoc { container: module_id, id: ast_id } = tr.lookup(db);
let collector =
@@ -55,7 +57,7 @@ impl TraitItems {
let source = ast_id.with_value(collector.ast_id_map.get(ast_id.value)).to_node(db);
let (items, macro_calls, diagnostics) = collector.collect(source.assoc_item_list());
- (Arc::new(TraitItems { macro_calls, items }), DefDiagnostics::new(diagnostics))
+ (TraitItems { macro_calls, items }, DefDiagnostics::new(diagnostics))
}
pub fn associated_types(&self) -> impl Iterator<Item = TypeAliasId> + '_ {
diff --git a/crates/hir-def/src/nameres/collector.rs b/crates/hir-def/src/nameres/collector.rs
index 78fdc27560..0c3274d849 100644
--- a/crates/hir-def/src/nameres/collector.rs
+++ b/crates/hir-def/src/nameres/collector.rs
@@ -41,6 +41,7 @@ use crate::{
macro_call_as_call_id,
nameres::{
BuiltinShadowMode, DefMap, LocalDefMap, MacroSubNs, ModuleData, ModuleOrigin, ResolveMode,
+ assoc::TraitItems,
attr_resolution::{attr_macro_as_call_id, derive_macro_as_call_id},
crate_def_map,
diagnostics::DefDiagnostic,
@@ -1020,8 +1021,7 @@ impl<'db> DefCollector<'db> {
let resolutions = if true {
vec![]
} else {
- self.db
- .trait_items(it)
+ TraitItems::query(self.db, it)
.items
.iter()
.map(|&(ref name, variant)| {
diff --git a/crates/hir-def/src/nameres/path_resolution.rs b/crates/hir-def/src/nameres/path_resolution.rs
index e8235b1c96..4641b220da 100644
--- a/crates/hir-def/src/nameres/path_resolution.rs
+++ b/crates/hir-def/src/nameres/path_resolution.rs
@@ -24,8 +24,8 @@ use crate::{
item_scope::{BUILTIN_SCOPE, ImportOrExternCrate},
item_tree::FieldsShape,
nameres::{
- BlockInfo, BuiltinShadowMode, DefMap, LocalDefMap, MacroSubNs, crate_def_map,
- sub_namespace_match,
+ BlockInfo, BuiltinShadowMode, DefMap, LocalDefMap, MacroSubNs, assoc::TraitItems,
+ crate_def_map, sub_namespace_match,
},
per_ns::PerNs,
visibility::{RawVisibility, Visibility},
@@ -584,8 +584,11 @@ impl DefMap {
// now resulting in a cycle.
// To properly implement this, trait item collection needs to be done in def map
// collection...
- let item =
- if true { None } else { db.trait_items(t).assoc_item_by_name(segment) };
+ let item = if true {
+ None
+ } else {
+ TraitItems::query(db, t).assoc_item_by_name(segment)
+ };
return match item {
Some(item) => ResolvePathResult::new(
match item {
diff --git a/crates/hir-def/src/nameres/tests/incremental.rs b/crates/hir-def/src/nameres/tests/incremental.rs
index ba75dca3d3..338851b715 100644
--- a/crates/hir-def/src/nameres/tests/incremental.rs
+++ b/crates/hir-def/src/nameres/tests/incremental.rs
@@ -172,7 +172,7 @@ fn no() {}
"ast_id_map_shim",
"parse_shim",
"real_span_map_shim",
- "of_",
+ "EnumVariants::of_",
]
"#]],
expect![[r#"
@@ -181,7 +181,7 @@ fn no() {}
"ast_id_map_shim",
"file_item_tree_query",
"real_span_map_shim",
- "of_",
+ "EnumVariants::of_",
]
"#]],
);
diff --git a/crates/hir-def/src/signatures.rs b/crates/hir-def/src/signatures.rs
index 377a545ebf..1958eb6c6a 100644
--- a/crates/hir-def/src/signatures.rs
+++ b/crates/hir-def/src/signatures.rs
@@ -1,6 +1,6 @@
//! Item signature IR definitions
-use std::ops::Not as _;
+use std::{cell::LazyCell, ops::Not as _};
use bitflags::bitflags;
use cfg::{CfgExpr, CfgOptions};
@@ -731,29 +731,26 @@ pub struct VariantFields {
pub store: Arc<ExpressionStore>,
pub shape: FieldsShape,
}
+
+#[salsa::tracked]
impl VariantFields {
- #[inline]
+ #[salsa::tracked(returns(clone))]
pub(crate) fn query(
db: &dyn DefDatabase,
id: VariantId,
) -> (Arc<Self>, Arc<ExpressionStoreSourceMap>) {
- let (shape, (fields, store, source_map)) = match id {
+ let (shape, result) = match id {
VariantId::EnumVariantId(id) => {
let loc = id.lookup(db);
let parent = loc.parent.lookup(db);
let source = loc.source(db);
let shape = adt_shape(source.value.kind());
- let span_map = db.span_map(source.file_id);
- let override_visibility = visibility_from_ast(
- db,
- source.value.parent_enum().visibility(),
- &mut |range| span_map.span_for_range(range).ctx,
- );
+ let enum_vis = Some(source.value.parent_enum().visibility());
let fields = lower_field_list(
db,
parent.container,
source.map(|src| src.field_list()),
- Some(override_visibility),
+ enum_vis,
);
(shape, fields)
}
@@ -777,10 +774,29 @@ impl VariantFields {
(FieldsShape::Record, fields)
}
};
+ match result {
+ Some((fields, store, source_map)) => (
+ Arc::new(VariantFields { fields, store: Arc::new(store), shape }),
+ Arc::new(source_map),
+ ),
+ None => (
+ Arc::new(VariantFields {
+ fields: Arena::default(),
+ store: ExpressionStore::empty_singleton(),
+ shape,
+ }),
+ ExpressionStoreSourceMap::empty_singleton(),
+ ),
+ }
+ }
- (Arc::new(VariantFields { fields, store: Arc::new(store), shape }), Arc::new(source_map))
+ #[salsa::tracked(returns(deref))]
+ pub(crate) fn firewall(db: &dyn DefDatabase, id: VariantId) -> Arc<Self> {
+ Self::query(db, id).0
}
+}
+impl VariantFields {
pub fn len(&self) -> usize {
self.fields.len()
}
@@ -798,31 +814,24 @@ fn lower_field_list(
db: &dyn DefDatabase,
module: ModuleId,
fields: InFile<Option<ast::FieldList>>,
- override_visibility: Option<RawVisibility>,
-) -> (Arena<FieldData>, ExpressionStore, ExpressionStoreSourceMap) {
+ override_visibility: Option<Option<ast::Visibility>>,
+) -> Option<(Arena<FieldData>, ExpressionStore, ExpressionStoreSourceMap)> {
let file_id = fields.file_id;
- match fields.value {
- Some(ast::FieldList::RecordFieldList(fields)) => lower_fields(
+ match fields.value? {
+ ast::FieldList::RecordFieldList(fields) => lower_fields(
db,
module,
InFile::new(file_id, fields.fields().map(|field| (field.ty(), field))),
|_, field| as_name_opt(field.name()),
override_visibility,
),
- Some(ast::FieldList::TupleFieldList(fields)) => lower_fields(
+ ast::FieldList::TupleFieldList(fields) => lower_fields(
db,
module,
InFile::new(file_id, fields.fields().map(|field| (field.ty(), field))),
|idx, _| Name::new_tuple_field(idx),
override_visibility,
),
- None => lower_fields(
- db,
- module,
- InFile::new(file_id, std::iter::empty::<(Option<ast::Type>, ast::RecordField)>()),
- |_, _| Name::missing(),
- None,
- ),
}
}
@@ -831,22 +840,34 @@ fn lower_fields<Field: ast::HasAttrs + ast::HasVisibility>(
module: ModuleId,
fields: InFile<impl Iterator<Item = (Option<ast::Type>, Field)>>,
mut field_name: impl FnMut(usize, &Field) -> Name,
- override_visibility: Option<RawVisibility>,
-) -> (Arena<FieldData>, ExpressionStore, ExpressionStoreSourceMap) {
- let mut arena = Arena::new();
+ override_visibility: Option<Option<ast::Visibility>>,
+) -> Option<(Arena<FieldData>, ExpressionStore, ExpressionStoreSourceMap)> {
let cfg_options = module.krate.cfg_options(db);
let mut col = ExprCollector::new(db, module, fields.file_id);
+ let override_visibility = override_visibility.map(|vis| {
+ LazyCell::new(|| {
+ let span_map = db.span_map(fields.file_id);
+ visibility_from_ast(db, vis, &mut |range| span_map.span_for_range(range).ctx)
+ })
+ });
+
+ let mut arena = Arena::new();
let mut idx = 0;
+ let mut has_fields = false;
for (ty, field) in fields.value {
+ has_fields = true;
match Attrs::is_cfg_enabled_for(db, &field, col.span_map(), cfg_options) {
Ok(()) => {
let type_ref =
col.lower_type_ref_opt(ty, &mut ExprCollector::impl_trait_error_allocator);
- let visibility = override_visibility.clone().unwrap_or_else(|| {
- visibility_from_ast(db, field.visibility(), &mut |range| {
- col.span_map().span_for_range(range).ctx
- })
- });
+ let visibility = override_visibility.as_ref().map_or_else(
+ || {
+ visibility_from_ast(db, field.visibility(), &mut |range| {
+ col.span_map().span_for_range(range).ctx
+ })
+ },
+ |it| RawVisibility::clone(it),
+ );
let is_unsafe = field
.syntax()
.children_with_tokens()
@@ -867,9 +888,12 @@ fn lower_fields<Field: ast::HasAttrs + ast::HasVisibility>(
}
}
}
+ if !has_fields {
+ return None;
+ }
let store = col.store.finish();
arena.shrink_to_fit();
- (arena, store, col.source_map)
+ Some((arena, store, col.source_map))
}
#[derive(Debug, PartialEq, Eq)]
@@ -948,7 +972,7 @@ impl EnumVariants {
self.variants.iter().all(|&(v, _, _)| {
// The condition check order is slightly modified from rustc
// to improve performance by early returning with relatively fast checks
- let variant = &db.variant_fields(v.into());
+ let variant = v.fields(db);
if !variant.fields().is_empty() {
return false;
}
diff --git a/crates/hir-def/src/visibility.rs b/crates/hir-def/src/visibility.rs
index 2514e88864..b5eb84c25f 100644
--- a/crates/hir-def/src/visibility.rs
+++ b/crates/hir-def/src/visibility.rs
@@ -273,7 +273,7 @@ pub(crate) fn field_visibilities_query(
db: &dyn DefDatabase,
variant_id: VariantId,
) -> Arc<ArenaMap<LocalFieldId, Visibility>> {
- let variant_fields = db.variant_fields(variant_id);
+ let variant_fields = variant_id.fields(db);
let fields = variant_fields.fields();
if fields.is_empty() {
return Arc::default();
diff --git a/crates/hir-expand/Cargo.toml b/crates/hir-expand/Cargo.toml
index ed818c5be3..80a3c08486 100644
--- a/crates/hir-expand/Cargo.toml
+++ b/crates/hir-expand/Cargo.toml
@@ -10,6 +10,7 @@ license.workspace = true
rust-version.workspace = true
[lib]
+doctest = false
[dependencies]
cov-mark = "2.0.0"
diff --git a/crates/hir-expand/src/attrs.rs b/crates/hir-expand/src/attrs.rs
index 94c97713f0..986f8764f5 100644
--- a/crates/hir-expand/src/attrs.rs
+++ b/crates/hir-expand/src/attrs.rs
@@ -433,20 +433,19 @@ fn unescape(s: &str) -> Option<Cow<'_, str>> {
let mut buf = String::new();
let mut prev_end = 0;
let mut has_error = false;
- unescape::unescape_unicode(s, unescape::Mode::Str, &mut |char_range, unescaped_char| match (
- unescaped_char,
- buf.capacity() == 0,
- ) {
- (Ok(c), false) => buf.push(c),
- (Ok(_), true) if char_range.len() == 1 && char_range.start == prev_end => {
- prev_end = char_range.end
- }
- (Ok(c), true) => {
- buf.reserve_exact(s.len());
- buf.push_str(&s[..prev_end]);
- buf.push(c);
+ unescape::unescape_str(s, |char_range, unescaped_char| {
+ match (unescaped_char, buf.capacity() == 0) {
+ (Ok(c), false) => buf.push(c),
+ (Ok(_), true) if char_range.len() == 1 && char_range.start == prev_end => {
+ prev_end = char_range.end
+ }
+ (Ok(c), true) => {
+ buf.reserve_exact(s.len());
+ buf.push_str(&s[..prev_end]);
+ buf.push(c);
+ }
+ (Err(_), _) => has_error = true,
}
- (Err(_), _) => has_error = true,
});
match (has_error, buf.capacity() == 0) {
diff --git a/crates/hir-expand/src/builtin/derive_macro.rs b/crates/hir-expand/src/builtin/derive_macro.rs
index d135584a08..15e68ff95c 100644
--- a/crates/hir-expand/src/builtin/derive_macro.rs
+++ b/crates/hir-expand/src/builtin/derive_macro.rs
@@ -458,6 +458,7 @@ fn expand_simple_derive(
invoc_span: Span,
tt: &tt::TopSubtree,
trait_path: tt::TopSubtree,
+ allow_unions: bool,
make_trait_body: impl FnOnce(&BasicAdtInfo) -> tt::TopSubtree,
) -> ExpandResult<tt::TopSubtree> {
let info = match parse_adt(db, tt, invoc_span) {
@@ -469,6 +470,12 @@ fn expand_simple_derive(
);
}
};
+ if !allow_unions && matches!(info.shape, AdtShape::Union) {
+ return ExpandResult::new(
+ tt::TopSubtree::empty(tt::DelimSpan::from_single(invoc_span)),
+ ExpandError::other(invoc_span, "this trait cannot be derived for unions"),
+ );
+ }
ExpandResult::ok(expand_simple_derive_with_parsed(
invoc_span,
info,
@@ -535,7 +542,14 @@ fn copy_expand(
tt: &tt::TopSubtree,
) -> ExpandResult<tt::TopSubtree> {
let krate = dollar_crate(span);
- expand_simple_derive(db, span, tt, quote! {span => #krate::marker::Copy }, |_| quote! {span =>})
+ expand_simple_derive(
+ db,
+ span,
+ tt,
+ quote! {span => #krate::marker::Copy },
+ true,
+ |_| quote! {span =>},
+ )
}
fn clone_expand(
@@ -544,7 +558,7 @@ fn clone_expand(
tt: &tt::TopSubtree,
) -> ExpandResult<tt::TopSubtree> {
let krate = dollar_crate(span);
- expand_simple_derive(db, span, tt, quote! {span => #krate::clone::Clone }, |adt| {
+ expand_simple_derive(db, span, tt, quote! {span => #krate::clone::Clone }, true, |adt| {
if matches!(adt.shape, AdtShape::Union) {
let star = tt::Punct { char: '*', spacing: ::tt::Spacing::Alone, span };
return quote! {span =>
@@ -599,41 +613,63 @@ fn default_expand(
tt: &tt::TopSubtree,
) -> ExpandResult<tt::TopSubtree> {
let krate = &dollar_crate(span);
- expand_simple_derive(db, span, tt, quote! {span => #krate::default::Default }, |adt| {
- let body = match &adt.shape {
- AdtShape::Struct(fields) => {
- let name = &adt.name;
- fields.as_pattern_map(
- quote!(span =>#name),
+ let adt = match parse_adt(db, tt, span) {
+ Ok(info) => info,
+ Err(e) => {
+ return ExpandResult::new(
+ tt::TopSubtree::empty(tt::DelimSpan { open: span, close: span }),
+ e,
+ );
+ }
+ };
+ let (body, constrain_to_trait) = match &adt.shape {
+ AdtShape::Struct(fields) => {
+ let name = &adt.name;
+ let body = fields.as_pattern_map(
+ quote!(span =>#name),
+ span,
+ |_| quote!(span =>#krate::default::Default::default()),
+ );
+ (body, true)
+ }
+ AdtShape::Enum { default_variant, variants } => {
+ if let Some(d) = default_variant {
+ let (name, fields) = &variants[*d];
+ let adt_name = &adt.name;
+ let body = fields.as_pattern_map(
+ quote!(span =>#adt_name :: #name),
span,
|_| quote!(span =>#krate::default::Default::default()),
- )
+ );
+ (body, false)
+ } else {
+ return ExpandResult::new(
+ tt::TopSubtree::empty(tt::DelimSpan::from_single(span)),
+ ExpandError::other(span, "`#[derive(Default)]` on enum with no `#[default]`"),
+ );
}
- AdtShape::Enum { default_variant, variants } => {
- if let Some(d) = default_variant {
- let (name, fields) = &variants[*d];
- let adt_name = &adt.name;
- fields.as_pattern_map(
- quote!(span =>#adt_name :: #name),
- span,
- |_| quote!(span =>#krate::default::Default::default()),
- )
- } else {
- // FIXME: Return expand error here
- quote!(span =>)
+ }
+ AdtShape::Union => {
+ return ExpandResult::new(
+ tt::TopSubtree::empty(tt::DelimSpan::from_single(span)),
+ ExpandError::other(span, "this trait cannot be derived for unions"),
+ );
+ }
+ };
+ ExpandResult::ok(expand_simple_derive_with_parsed(
+ span,
+ adt,
+ quote! {span => #krate::default::Default },
+ |_adt| {
+ quote! {span =>
+ fn default() -> Self {
+ #body
}
}
- AdtShape::Union => {
- // FIXME: Return expand error here
- quote!(span =>)
- }
- };
- quote! {span =>
- fn default() -> Self {
- #body
- }
- }
- })
+ },
+ constrain_to_trait,
+ tt::TopSubtree::empty(tt::DelimSpan::from_single(span)),
+ ))
}
fn debug_expand(
@@ -642,7 +678,7 @@ fn debug_expand(
tt: &tt::TopSubtree,
) -> ExpandResult<tt::TopSubtree> {
let krate = &dollar_crate(span);
- expand_simple_derive(db, span, tt, quote! {span => #krate::fmt::Debug }, |adt| {
+ expand_simple_derive(db, span, tt, quote! {span => #krate::fmt::Debug }, false, |adt| {
let for_variant = |name: String, v: &VariantShape| match v {
VariantShape::Struct(fields) => {
let for_fields = fields.iter().map(|it| {
@@ -697,10 +733,7 @@ fn debug_expand(
}
})
.collect(),
- AdtShape::Union => {
- // FIXME: Return expand error here
- vec![]
- }
+ AdtShape::Union => unreachable!(),
};
quote! {span =>
fn fmt(&self, f: &mut #krate::fmt::Formatter) -> #krate::fmt::Result {
@@ -718,11 +751,7 @@ fn hash_expand(
tt: &tt::TopSubtree,
) -> ExpandResult<tt::TopSubtree> {
let krate = &dollar_crate(span);
- expand_simple_derive(db, span, tt, quote! {span => #krate::hash::Hash }, |adt| {
- if matches!(adt.shape, AdtShape::Union) {
- // FIXME: Return expand error here
- return quote! {span =>};
- }
+ expand_simple_derive(db, span, tt, quote! {span => #krate::hash::Hash }, false, |adt| {
if matches!(&adt.shape, AdtShape::Enum { variants, .. } if variants.is_empty()) {
let star = tt::Punct { char: '*', spacing: ::tt::Spacing::Alone, span };
return quote! {span =>
@@ -769,7 +798,14 @@ fn eq_expand(
tt: &tt::TopSubtree,
) -> ExpandResult<tt::TopSubtree> {
let krate = dollar_crate(span);
- expand_simple_derive(db, span, tt, quote! {span => #krate::cmp::Eq }, |_| quote! {span =>})
+ expand_simple_derive(
+ db,
+ span,
+ tt,
+ quote! {span => #krate::cmp::Eq },
+ true,
+ |_| quote! {span =>},
+ )
}
fn partial_eq_expand(
@@ -778,11 +814,7 @@ fn partial_eq_expand(
tt: &tt::TopSubtree,
) -> ExpandResult<tt::TopSubtree> {
let krate = dollar_crate(span);
- expand_simple_derive(db, span, tt, quote! {span => #krate::cmp::PartialEq }, |adt| {
- if matches!(adt.shape, AdtShape::Union) {
- // FIXME: Return expand error here
- return quote! {span =>};
- }
+ expand_simple_derive(db, span, tt, quote! {span => #krate::cmp::PartialEq }, false, |adt| {
let name = &adt.name;
let (self_patterns, other_patterns) = self_and_other_patterns(adt, name, span);
@@ -854,7 +886,7 @@ fn ord_expand(
tt: &tt::TopSubtree,
) -> ExpandResult<tt::TopSubtree> {
let krate = &dollar_crate(span);
- expand_simple_derive(db, span, tt, quote! {span => #krate::cmp::Ord }, |adt| {
+ expand_simple_derive(db, span, tt, quote! {span => #krate::cmp::Ord }, false, |adt| {
fn compare(
krate: &tt::Ident,
left: tt::TopSubtree,
@@ -873,10 +905,6 @@ fn ord_expand(
}
}
}
- if matches!(adt.shape, AdtShape::Union) {
- // FIXME: Return expand error here
- return quote!(span =>);
- }
let (self_patterns, other_patterns) = self_and_other_patterns(adt, &adt.name, span);
let arms = izip!(self_patterns, other_patterns, adt.shape.field_names(span)).map(
|(pat1, pat2, fields)| {
@@ -916,7 +944,7 @@ fn partial_ord_expand(
tt: &tt::TopSubtree,
) -> ExpandResult<tt::TopSubtree> {
let krate = &dollar_crate(span);
- expand_simple_derive(db, span, tt, quote! {span => #krate::cmp::PartialOrd }, |adt| {
+ expand_simple_derive(db, span, tt, quote! {span => #krate::cmp::PartialOrd }, false, |adt| {
fn compare(
krate: &tt::Ident,
left: tt::TopSubtree,
@@ -935,10 +963,6 @@ fn partial_ord_expand(
}
}
}
- if matches!(adt.shape, AdtShape::Union) {
- // FIXME: Return expand error here
- return quote!(span =>);
- }
let left = quote!(span =>#krate::intrinsics::discriminant_value(self));
let right = quote!(span =>#krate::intrinsics::discriminant_value(other));
diff --git a/crates/hir-expand/src/builtin/fn_macro.rs b/crates/hir-expand/src/builtin/fn_macro.rs
index 3180b8dae1..800b40a9e7 100644
--- a/crates/hir-expand/src/builtin/fn_macro.rs
+++ b/crates/hir-expand/src/builtin/fn_macro.rs
@@ -7,12 +7,13 @@ use intern::{
Symbol,
sym::{self},
};
+use itertools::Itertools;
use mbe::{DelimiterKind, expect_fragment};
use span::{Edition, FileId, Span};
use stdx::format_to;
use syntax::{
format_smolstr,
- unescape::{Mode, unescape_byte, unescape_char, unescape_unicode},
+ unescape::{unescape_byte, unescape_char, unescape_str},
};
use syntax_bridge::syntax_node_to_token_tree;
@@ -430,7 +431,7 @@ fn compile_error_expand(
kind: tt::LitKind::Str | tt::LitKind::StrRaw(_),
suffix: _,
})),
- ] => ExpandError::other(span, Box::from(unescape_str(text).as_str())),
+ ] => ExpandError::other(span, Box::from(unescape_symbol(text).as_str())),
_ => ExpandError::other(span, "`compile_error!` argument must be a string"),
};
@@ -481,7 +482,7 @@ fn concat_expand(
format_to!(text, "{}", it.symbol.as_str())
}
tt::LitKind::Str => {
- text.push_str(unescape_str(&it.symbol).as_str());
+ text.push_str(unescape_symbol(&it.symbol).as_str());
record_span(it.span);
}
tt::LitKind::StrRaw(_) => {
@@ -681,52 +682,36 @@ fn relative_file(
}
fn parse_string(tt: &tt::TopSubtree) -> Result<(Symbol, Span), ExpandError> {
- let delimiter = tt.top_subtree().delimiter;
- tt.iter()
- .next()
- .ok_or(delimiter.open.cover(delimiter.close))
- .and_then(|tt| match tt {
+ let mut tt = TtElement::Subtree(tt.top_subtree(), tt.iter());
+ (|| {
+ // FIXME: We wrap expression fragments in parentheses which can break this expectation
+ // here
+ // Remove this once we handle none delims correctly
+ while let TtElement::Subtree(sub, tt_iter) = &mut tt
+ && let DelimiterKind::Parenthesis | DelimiterKind::Invisible = sub.delimiter.kind
+ {
+ tt =
+ tt_iter.exactly_one().map_err(|_| sub.delimiter.open.cover(sub.delimiter.close))?;
+ }
+
+ match tt {
TtElement::Leaf(tt::Leaf::Literal(tt::Literal {
symbol: text,
span,
kind: tt::LitKind::Str,
suffix: _,
- })) => Ok((unescape_str(text), *span)),
+ })) => Ok((unescape_symbol(text), *span)),
TtElement::Leaf(tt::Leaf::Literal(tt::Literal {
symbol: text,
span,
kind: tt::LitKind::StrRaw(_),
suffix: _,
})) => Ok((text.clone(), *span)),
- // FIXME: We wrap expression fragments in parentheses which can break this expectation
- // here
- // Remove this once we handle none delims correctly
- TtElement::Subtree(tt, mut tt_iter)
- if tt.delimiter.kind == DelimiterKind::Parenthesis =>
- {
- tt_iter
- .next()
- .and_then(|tt| match tt {
- TtElement::Leaf(tt::Leaf::Literal(tt::Literal {
- symbol: text,
- span,
- kind: tt::LitKind::Str,
- suffix: _,
- })) => Some((unescape_str(text), *span)),
- TtElement::Leaf(tt::Leaf::Literal(tt::Literal {
- symbol: text,
- span,
- kind: tt::LitKind::StrRaw(_),
- suffix: _,
- })) => Some((text.clone(), *span)),
- _ => None,
- })
- .ok_or(delimiter.open.cover(delimiter.close))
- }
TtElement::Leaf(l) => Err(*l.span()),
TtElement::Subtree(tt, _) => Err(tt.delimiter.open.cover(tt.delimiter.close)),
- })
- .map_err(|span| ExpandError::other(span, "expected string literal"))
+ }
+ })()
+ .map_err(|span| ExpandError::other(span, "expected string literal"))
}
fn include_expand(
@@ -897,11 +882,11 @@ fn quote_expand(
)
}
-fn unescape_str(s: &Symbol) -> Symbol {
+fn unescape_symbol(s: &Symbol) -> Symbol {
if s.as_str().contains('\\') {
let s = s.as_str();
let mut buf = String::with_capacity(s.len());
- unescape_unicode(s, Mode::Str, &mut |_, c| {
+ unescape_str(s, |_, c| {
if let Ok(c) = c {
buf.push(c)
}
diff --git a/crates/hir-expand/src/db.rs b/crates/hir-expand/src/db.rs
index 7e9928c41f..888c1405a6 100644
--- a/crates/hir-expand/src/db.rs
+++ b/crates/hir-expand/src/db.rs
@@ -145,7 +145,7 @@ pub trait ExpandDatabase: RootQueryDb {
fn syntax_context(&self, file: HirFileId, edition: Edition) -> SyntaxContext;
}
-#[salsa_macros::interned(no_lifetime, id = span::SyntaxContext)]
+#[salsa_macros::interned(no_lifetime, id = span::SyntaxContext, revisions = usize::MAX)]
pub struct SyntaxContextWrapper {
pub data: SyntaxContext,
}
diff --git a/crates/hir-expand/src/files.rs b/crates/hir-expand/src/files.rs
index a73a22370d..6730b337d3 100644
--- a/crates/hir-expand/src/files.rs
+++ b/crates/hir-expand/src/files.rs
@@ -315,11 +315,11 @@ impl<SN: Borrow<SyntaxNode>> InFile<SN> {
}
/// Falls back to the macro call range if the node cannot be mapped up fully.
- pub fn original_file_range_with_macro_call_body(
+ pub fn original_file_range_with_macro_call_input(
self,
db: &dyn db::ExpandDatabase,
) -> FileRange {
- self.borrow().map(SyntaxNode::text_range).original_node_file_range_with_macro_call_body(db)
+ self.borrow().map(SyntaxNode::text_range).original_node_file_range_with_macro_call_input(db)
}
pub fn original_syntax_node_rooted(
@@ -465,7 +465,7 @@ impl InFile<TextRange> {
}
}
- pub fn original_node_file_range_with_macro_call_body(
+ pub fn original_node_file_range_with_macro_call_input(
self,
db: &dyn db::ExpandDatabase,
) -> FileRange {
@@ -476,7 +476,7 @@ impl InFile<TextRange> {
Some(it) => it,
_ => {
let loc = db.lookup_intern_macro_call(mac_file);
- loc.kind.original_call_range_with_body(db)
+ loc.kind.original_call_range_with_input(db)
}
}
}
@@ -497,6 +497,18 @@ impl InFile<TextRange> {
}
}
}
+
+ pub fn original_node_file_range_rooted_opt(
+ self,
+ db: &dyn db::ExpandDatabase,
+ ) -> Option<FileRange> {
+ match self.file_id {
+ HirFileId::FileId(file_id) => Some(FileRange { file_id, range: self.value }),
+ HirFileId::MacroFile(mac_file) => {
+ map_node_range_up_rooted(db, &db.expansion_span_map(mac_file), self.value)
+ }
+ }
+ }
}
impl<N: AstNode> InFile<N> {
diff --git a/crates/hir-expand/src/lib.rs b/crates/hir-expand/src/lib.rs
index 6ecac1463f..ac61b22009 100644
--- a/crates/hir-expand/src/lib.rs
+++ b/crates/hir-expand/src/lib.rs
@@ -199,9 +199,9 @@ impl ExpandErrorKind {
},
&ExpandErrorKind::MissingProcMacroExpander(def_crate) => {
match db.proc_macros_for_crate(def_crate).as_ref().and_then(|it| it.get_error()) {
- Some((e, hard_err)) => RenderedExpandError {
- message: e.to_owned(),
- error: hard_err,
+ Some(e) => RenderedExpandError {
+ message: e.to_string(),
+ error: e.is_hard_error(),
kind: RenderedExpandError::GENERAL_KIND,
},
None => RenderedExpandError {
@@ -688,8 +688,11 @@ impl MacroCallKind {
/// Returns the original file range that best describes the location of this macro call.
///
- /// Unlike `MacroCallKind::original_call_range`, this also spans the item of attributes and derives.
- pub fn original_call_range_with_body(self, db: &dyn ExpandDatabase) -> FileRange {
+ /// This spans the entire macro call, including its input. That is for
+ /// - fn_like! {}, it spans the path and token tree
+ /// - #\[derive], it spans the `#[derive(...)]` attribute and the annotated item
+ /// - #\[attr], it spans the `#[attr(...)]` attribute and the annotated item
+ pub fn original_call_range_with_input(self, db: &dyn ExpandDatabase) -> FileRange {
let mut kind = self;
let file_id = loop {
match kind.file_id() {
@@ -712,8 +715,8 @@ impl MacroCallKind {
/// Returns the original file range that best describes the location of this macro call.
///
/// Here we try to roughly match what rustc does to improve diagnostics: fn-like macros
- /// get the whole `ast::MacroCall`, attribute macros get the attribute's range, and derives
- /// get only the specific derive that is being referred to.
+ /// get the macro path (rustc shows the whole `ast::MacroCall`), attribute macros get the
+ /// attribute's range, and derives get only the specific derive that is being referred to.
pub fn original_call_range(self, db: &dyn ExpandDatabase) -> FileRange {
let mut kind = self;
let file_id = loop {
@@ -726,7 +729,14 @@ impl MacroCallKind {
};
let range = match kind {
- MacroCallKind::FnLike { ast_id, .. } => ast_id.to_ptr(db).text_range(),
+ MacroCallKind::FnLike { ast_id, .. } => {
+ let node = ast_id.to_node(db);
+ node.path()
+ .unwrap()
+ .syntax()
+ .text_range()
+ .cover(node.excl_token().unwrap().text_range())
+ }
MacroCallKind::Derive { ast_id, derive_attr_index, .. } => {
// FIXME: should be the range of the macro name, not the whole derive
// FIXME: handle `cfg_attr`
@@ -1056,7 +1066,7 @@ impl ExpandTo {
intern::impl_internable!(ModPath, attrs::AttrInput);
-#[salsa_macros::interned(no_lifetime, debug)]
+#[salsa_macros::interned(no_lifetime, debug, revisions = usize::MAX)]
#[doc(alias = "MacroFileId")]
pub struct MacroCallId {
pub loc: MacroCallLoc,
diff --git a/crates/hir-expand/src/name.rs b/crates/hir-expand/src/name.rs
index 217d991d11..679f61112a 100644
--- a/crates/hir-expand/src/name.rs
+++ b/crates/hir-expand/src/name.rs
@@ -179,9 +179,10 @@ impl Name {
self.symbol.as_str()
}
+ #[inline]
pub fn display<'a>(
&'a self,
- db: &dyn crate::db::ExpandDatabase,
+ db: &dyn salsa::Database,
edition: Edition,
) -> impl fmt::Display + 'a {
_ = db;
diff --git a/crates/hir-expand/src/prettify_macro_expansion_.rs b/crates/hir-expand/src/prettify_macro_expansion_.rs
index 6134c3a36b..6431d46d39 100644
--- a/crates/hir-expand/src/prettify_macro_expansion_.rs
+++ b/crates/hir-expand/src/prettify_macro_expansion_.rs
@@ -46,7 +46,7 @@ pub fn prettify_macro_expansion(
} else if let Some(crate_name) = &macro_def_crate.extra_data(db).display_name {
make::tokens::ident(crate_name.crate_name().as_str())
} else {
- return dollar_crate.clone();
+ dollar_crate.clone()
}
});
if replacement.text() == "$crate" {
diff --git a/crates/hir-expand/src/proc_macro.rs b/crates/hir-expand/src/proc_macro.rs
index 1c8ebb6f53..f97d721dfa 100644
--- a/crates/hir-expand/src/proc_macro.rs
+++ b/crates/hir-expand/src/proc_macro.rs
@@ -4,7 +4,7 @@ use core::fmt;
use std::any::Any;
use std::{panic::RefUnwindSafe, sync};
-use base_db::{Crate, CrateBuilderId, CratesIdMap, Env};
+use base_db::{Crate, CrateBuilderId, CratesIdMap, Env, ProcMacroLoadingError};
use intern::Symbol;
use rustc_hash::FxHashMap;
use span::Span;
@@ -53,8 +53,8 @@ pub enum ProcMacroExpansionError {
System(String),
}
-pub type ProcMacroLoadResult = Result<Vec<ProcMacro>, (String, bool)>;
-type StoredProcMacroLoadResult = Result<Box<[ProcMacro]>, (Box<str>, bool)>;
+pub type ProcMacroLoadResult = Result<Vec<ProcMacro>, ProcMacroLoadingError>;
+type StoredProcMacroLoadResult = Result<Box<[ProcMacro]>, ProcMacroLoadingError>;
#[derive(Default, Debug)]
pub struct ProcMacrosBuilder(FxHashMap<CrateBuilderId, Arc<CrateProcMacros>>);
@@ -77,9 +77,7 @@ impl ProcMacrosBuilder {
proc_macros_crate,
match proc_macro {
Ok(it) => Arc::new(CrateProcMacros(Ok(it.into_boxed_slice()))),
- Err((e, hard_err)) => {
- Arc::new(CrateProcMacros(Err((e.into_boxed_str(), hard_err))))
- }
+ Err(e) => Arc::new(CrateProcMacros(Err(e))),
},
);
}
@@ -139,8 +137,8 @@ impl CrateProcMacros {
)
}
- pub fn get_error(&self) -> Option<(&str, bool)> {
- self.0.as_ref().err().map(|(e, hard_err)| (&**e, *hard_err))
+ pub fn get_error(&self) -> Option<&ProcMacroLoadingError> {
+ self.0.as_ref().err()
}
/// Fetch the [`CustomProcMacroExpander`]s and their corresponding names for the given crate.
diff --git a/crates/hir-ty/Cargo.toml b/crates/hir-ty/Cargo.toml
index 8b65126e7b..7cc0a26d37 100644
--- a/crates/hir-ty/Cargo.toml
+++ b/crates/hir-ty/Cargo.toml
@@ -10,6 +10,7 @@ license.workspace = true
rust-version.workspace = true
[lib]
+doctest = false
[dependencies]
cov-mark = "2.0.0"
diff --git a/crates/hir-ty/src/autoderef.rs b/crates/hir-ty/src/autoderef.rs
index 7acc9456ec..cc8f7bf04a 100644
--- a/crates/hir-ty/src/autoderef.rs
+++ b/crates/hir-ty/src/autoderef.rs
@@ -208,7 +208,7 @@ pub(crate) fn deref_by_trait(
};
let trait_id = trait_id()?;
let target =
- db.trait_items(trait_id).associated_type_by_name(&Name::new_symbol_root(sym::Target))?;
+ trait_id.trait_items(db).associated_type_by_name(&Name::new_symbol_root(sym::Target))?;
let projection = {
let b = TyBuilder::subst_for_def(db, trait_id, None);
diff --git a/crates/hir-ty/src/chalk_db.rs b/crates/hir-ty/src/chalk_db.rs
index 7945442811..26b635298a 100644
--- a/crates/hir-ty/src/chalk_db.rs
+++ b/crates/hir-ty/src/chalk_db.rs
@@ -315,9 +315,8 @@ impl chalk_solve::RustIrDatabase<Interner> for ChalkContext<'_> {
crate::ImplTraitId::AsyncBlockTypeImplTrait(..) => {
if let Some((future_trait, future_output)) =
LangItem::Future.resolve_trait(self.db, self.krate).and_then(|trait_| {
- let alias = self
- .db
- .trait_items(trait_)
+ let alias = trait_
+ .trait_items(self.db)
.associated_type_by_name(&Name::new_symbol_root(sym::Output))?;
Some((trait_, alias))
})
@@ -711,7 +710,7 @@ pub(crate) fn trait_datum_query(
};
let where_clauses = convert_where_clauses(db, trait_.into(), &bound_vars);
let associated_ty_ids =
- db.trait_items(trait_).associated_types().map(to_assoc_type_id).collect();
+ trait_.trait_items(db).associated_types().map(to_assoc_type_id).collect();
let trait_datum_bound = rust_ir::TraitDatumBound { where_clauses };
let well_known = db.lang_attr(trait_.into()).and_then(well_known_trait_from_lang_item);
let trait_datum = TraitDatum {
@@ -802,7 +801,7 @@ pub(crate) fn adt_datum_query(
// this slows down rust-analyzer by quite a bit unfortunately, so enabling this is currently not worth it
let _variant_id_to_fields = |id: VariantId| {
- let variant_data = &id.variant_data(db);
+ let variant_data = &id.fields(db);
let fields = if variant_data.fields().is_empty() {
vec![]
} else {
@@ -879,7 +878,7 @@ fn impl_def_datum(db: &dyn HirDatabase, krate: Crate, impl_id: hir_def::ImplId)
let polarity = if negative { rust_ir::Polarity::Negative } else { rust_ir::Polarity::Positive };
let impl_datum_bound = rust_ir::ImplDatumBound { trait_ref, where_clauses };
- let trait_data = db.trait_items(trait_);
+ let trait_data = trait_.trait_items(db);
let associated_ty_value_ids = impl_id
.impl_items(db)
.items
@@ -931,8 +930,9 @@ fn type_alias_associated_ty_value(
.into_value_and_skipped_binders()
.0; // we don't return any assoc ty values if the impl'd trait can't be resolved
- let assoc_ty = db
- .trait_items(trait_ref.hir_trait_id())
+ let assoc_ty = trait_ref
+ .hir_trait_id()
+ .trait_items(db)
.associated_type_by_name(&type_alias_data.name)
.expect("assoc ty value should not exist"); // validated when building the impl data as well
let (ty, binders) = db.ty(type_alias.into()).into_value_and_skipped_binders();
diff --git a/crates/hir-ty/src/db.rs b/crates/hir-ty/src/db.rs
index 1029969992..5d3be07f3d 100644
--- a/crates/hir-ty/src/db.rs
+++ b/crates/hir-ty/src/db.rs
@@ -237,15 +237,6 @@ pub trait HirDatabase: DefDatabase + std::fmt::Debug {
// Interned IDs for Chalk integration
#[salsa::interned]
- fn intern_type_or_const_param_id(
- &self,
- param_id: TypeOrConstParamId,
- ) -> InternedTypeOrConstParamId;
-
- #[salsa::interned]
- fn intern_lifetime_param_id(&self, param_id: LifetimeParamId) -> InternedLifetimeParamId;
-
- #[salsa::interned]
fn intern_impl_trait_id(&self, id: ImplTraitId) -> InternedOpaqueTyId;
#[salsa::interned]
@@ -282,9 +273,8 @@ pub trait HirDatabase: DefDatabase + std::fmt::Debug {
#[salsa::invoke(crate::variance::variances_of)]
#[salsa::cycle(
- // cycle_fn = crate::variance::variances_of_cycle_fn,
- // cycle_initial = crate::variance::variances_of_cycle_initial,
- cycle_result = crate::variance::variances_of_cycle_initial,
+ cycle_fn = crate::variance::variances_of_cycle_fn,
+ cycle_initial = crate::variance::variances_of_cycle_initial,
)]
fn variances_of(&self, def: GenericDefId) -> Option<Arc<[crate::variance::Variance]>>;
@@ -329,9 +319,31 @@ fn hir_database_is_dyn_compatible() {
fn _assert_dyn_compatible(_: &dyn HirDatabase) {}
}
-impl_intern_key!(InternedTypeOrConstParamId, TypeOrConstParamId);
+#[salsa_macros::interned(no_lifetime, revisions = usize::MAX)]
+#[derive(PartialOrd, Ord)]
+pub struct InternedTypeOrConstParamId {
+ pub loc: TypeOrConstParamId,
+}
+impl ::std::fmt::Debug for InternedTypeOrConstParamId {
+ fn fmt(&self, f: &mut ::std::fmt::Formatter<'_>) -> ::std::fmt::Result {
+ f.debug_tuple(stringify!(InternedTypeOrConstParamId))
+ .field(&format_args!("{:04x}", self.0.index()))
+ .finish()
+ }
+}
-impl_intern_key!(InternedLifetimeParamId, LifetimeParamId);
+#[salsa_macros::interned(no_lifetime, revisions = usize::MAX)]
+#[derive(PartialOrd, Ord)]
+pub struct InternedLifetimeParamId {
+ pub loc: LifetimeParamId,
+}
+impl ::std::fmt::Debug for InternedLifetimeParamId {
+ fn fmt(&self, f: &mut ::std::fmt::Formatter<'_>) -> ::std::fmt::Result {
+ f.debug_tuple(stringify!(InternedLifetimeParamId))
+ .field(&format_args!("{:04x}", self.0.index()))
+ .finish()
+ }
+}
impl_intern_key!(InternedConstParamId, ConstParamId);
diff --git a/crates/hir-ty/src/diagnostics/decl_check.rs b/crates/hir-ty/src/diagnostics/decl_check.rs
index 1873f12fb7..9c0f8f4008 100644
--- a/crates/hir-ty/src/diagnostics/decl_check.rs
+++ b/crates/hir-ty/src/diagnostics/decl_check.rs
@@ -307,7 +307,7 @@ impl<'a> DeclValidator<'a> {
/// Check incorrect names for struct fields.
fn validate_struct_fields(&mut self, struct_id: StructId) {
- let data = self.db.variant_fields(struct_id.into());
+ let data = struct_id.fields(self.db);
if data.shape != FieldsShape::Record {
return;
};
@@ -468,7 +468,7 @@ impl<'a> DeclValidator<'a> {
/// Check incorrect names for fields of enum variant.
fn validate_enum_variant_fields(&mut self, variant_id: EnumVariantId) {
- let variant_data = self.db.variant_fields(variant_id.into());
+ let variant_data = variant_id.fields(self.db);
if variant_data.shape != FieldsShape::Record {
return;
};
diff --git a/crates/hir-ty/src/diagnostics/expr.rs b/crates/hir-ty/src/diagnostics/expr.rs
index df2eb410b9..5d56957be6 100644
--- a/crates/hir-ty/src/diagnostics/expr.rs
+++ b/crates/hir-ty/src/diagnostics/expr.rs
@@ -494,7 +494,7 @@ impl FilterMapNextChecker {
Some(next_function_id),
match next_function_id.lookup(db).container {
ItemContainerId::TraitId(iterator_trait_id) => {
- let iterator_trait_items = &db.trait_items(iterator_trait_id).items;
+ let iterator_trait_items = &iterator_trait_id.trait_items(db).items;
iterator_trait_items.iter().find_map(|(name, it)| match it {
&AssocItemId::FunctionId(id) if *name == sym::filter_map => Some(id),
_ => None,
@@ -558,7 +558,7 @@ pub fn record_literal_missing_fields(
return None;
}
- let variant_data = variant_def.variant_data(db);
+ let variant_data = variant_def.fields(db);
let specified_fields: FxHashSet<_> = fields.iter().map(|f| &f.name).collect();
let missed_fields: Vec<LocalFieldId> = variant_data
@@ -588,7 +588,7 @@ pub fn record_pattern_missing_fields(
return None;
}
- let variant_data = variant_def.variant_data(db);
+ let variant_data = variant_def.fields(db);
let specified_fields: FxHashSet<_> = fields.iter().map(|f| &f.name).collect();
let missed_fields: Vec<LocalFieldId> = variant_data
diff --git a/crates/hir-ty/src/diagnostics/match_check.rs b/crates/hir-ty/src/diagnostics/match_check.rs
index 916876d4ac..c3ab5aff3d 100644
--- a/crates/hir-ty/src/diagnostics/match_check.rs
+++ b/crates/hir-ty/src/diagnostics/match_check.rs
@@ -25,7 +25,6 @@ use crate::{
db::HirDatabase,
display::{HirDisplay, HirDisplayError, HirFormatter},
infer::BindingMode,
- lang_items::is_box,
};
use self::pat_util::EnumerateAndAdjustIterator;
@@ -77,7 +76,7 @@ pub(crate) enum PatKind {
subpatterns: Vec<FieldPat>,
},
- /// `box P`, `&P`, `&mut P`, etc.
+ /// `&P`, `&mut P`, etc.
Deref {
subpattern: Pat,
},
@@ -169,13 +168,13 @@ impl<'a> PatCtxt<'a> {
}
hir_def::hir::Pat::TupleStruct { ref args, ellipsis, .. } if variant.is_some() => {
- let expected_len = variant.unwrap().variant_data(self.db).fields().len();
+ let expected_len = variant.unwrap().fields(self.db).fields().len();
let subpatterns = self.lower_tuple_subpats(args, expected_len, ellipsis);
self.lower_variant_or_leaf(pat, ty, subpatterns)
}
hir_def::hir::Pat::Record { ref args, .. } if variant.is_some() => {
- let variant_data = variant.unwrap().variant_data(self.db);
+ let variant_data = variant.unwrap().fields(self.db);
let subpatterns = args
.iter()
.map(|field| {
@@ -345,7 +344,7 @@ impl HirDisplay for Pat {
)?,
};
- let variant_data = variant.variant_data(f.db);
+ let variant_data = variant.fields(f.db);
if variant_data.shape == FieldsShape::Record {
write!(f, " {{ ")?;
@@ -377,7 +376,7 @@ impl HirDisplay for Pat {
}
let num_fields =
- variant.map_or(subpatterns.len(), |v| v.variant_data(f.db).fields().len());
+ variant.map_or(subpatterns.len(), |v| v.fields(f.db).fields().len());
if num_fields != 0 || variant.is_none() {
write!(f, "(")?;
let subpats = (0..num_fields).map(|i| {
@@ -406,7 +405,6 @@ impl HirDisplay for Pat {
}
PatKind::Deref { subpattern } => {
match self.ty.kind(Interner) {
- TyKind::Adt(adt, _) if is_box(f.db, adt.0) => write!(f, "box ")?,
&TyKind::Ref(mutbl, ..) => {
write!(f, "&{}", if mutbl == Mutability::Mut { "mut " } else { "" })?
}
diff --git a/crates/hir-ty/src/diagnostics/match_check/pat_analysis.rs b/crates/hir-ty/src/diagnostics/match_check/pat_analysis.rs
index 2873a3e09e..22b7f5ac9f 100644
--- a/crates/hir-ty/src/diagnostics/match_check/pat_analysis.rs
+++ b/crates/hir-ty/src/diagnostics/match_check/pat_analysis.rs
@@ -6,7 +6,7 @@ use std::fmt;
use hir_def::{DefWithBodyId, EnumId, EnumVariantId, HasModule, LocalFieldId, ModuleId, VariantId};
use intern::sym;
use rustc_pattern_analysis::{
- Captures, IndexVec, PatCx, PrivateUninhabitedField,
+ IndexVec, PatCx, PrivateUninhabitedField,
constructor::{Constructor, ConstructorSet, VariantVisibility},
usefulness::{PlaceValidity, UsefulnessReport, compute_match_usefulness},
};
@@ -21,7 +21,7 @@ use crate::{
inhabitedness::{is_enum_variant_uninhabited_from, is_ty_uninhabited_from},
};
-use super::{FieldPat, Pat, PatKind, is_box};
+use super::{FieldPat, Pat, PatKind};
use Constructor::*;
@@ -138,15 +138,15 @@ impl<'db> MatchCheckCtx<'db> {
}
// This lists the fields of a variant along with their types.
- fn list_variant_fields<'a>(
- &'a self,
- ty: &'a Ty,
+ fn list_variant_fields(
+ &self,
+ ty: &Ty,
variant: VariantId,
- ) -> impl Iterator<Item = (LocalFieldId, Ty)> + Captures<'a> + Captures<'db> {
+ ) -> impl Iterator<Item = (LocalFieldId, Ty)> {
let (_, substs) = ty.as_adt().unwrap();
let field_tys = self.db.field_types(variant);
- let fields_len = variant.variant_data(self.db).fields().len() as u32;
+ let fields_len = variant.fields(self.db).fields().len() as u32;
(0..fields_len).map(|idx| LocalFieldId::from_raw(idx.into())).map(move |fid| {
let ty = field_tys[fid].clone().substitute(Interner, substs);
@@ -170,8 +170,6 @@ impl<'db> MatchCheckCtx<'db> {
}
PatKind::Deref { subpattern } => {
ctor = match pat.ty.kind(Interner) {
- // This is a box pattern.
- TyKind::Adt(adt, _) if is_box(self.db, adt.0) => Struct,
TyKind::Ref(..) => Ref,
_ => {
never!("pattern has unexpected type: pat: {:?}, ty: {:?}", pat, &pat.ty);
@@ -194,23 +192,6 @@ impl<'db> MatchCheckCtx<'db> {
ctor = Struct;
arity = substs.len(Interner);
}
- TyKind::Adt(adt, _) if is_box(self.db, adt.0) => {
- // The only legal patterns of type `Box` (outside `std`) are `_` and box
- // patterns. If we're here we can assume this is a box pattern.
- // FIXME(Nadrieril): A `Box` can in theory be matched either with `Box(_,
- // _)` or a box pattern. As a hack to avoid an ICE with the former, we
- // ignore other fields than the first one. This will trigger an error later
- // anyway.
- // See https://github.com/rust-lang/rust/issues/82772 ,
- // explanation: https://github.com/rust-lang/rust/pull/82789#issuecomment-796921977
- // The problem is that we can't know from the type whether we'll match
- // normally or through box-patterns. We'll have to figure out a proper
- // solution when we introduce generalized deref patterns. Also need to
- // prevent mixing of those two options.
- fields.retain(|ipat| ipat.idx == 0);
- ctor = Struct;
- arity = 1;
- }
&TyKind::Adt(AdtId(adt), _) => {
ctor = match pat.kind.as_ref() {
PatKind::Leaf { .. } if matches!(adt, hir_def::AdtId::UnionId(_)) => {
@@ -229,7 +210,7 @@ impl<'db> MatchCheckCtx<'db> {
}
};
let variant = Self::variant_id_for_adt(self.db, &ctor, adt).unwrap();
- arity = variant.variant_data(self.db).fields().len();
+ arity = variant.fields(self.db).fields().len();
}
_ => {
never!("pattern has unexpected type: pat: {:?}, ty: {:?}", pat, &pat.ty);
@@ -277,12 +258,6 @@ impl<'db> MatchCheckCtx<'db> {
})
.collect(),
},
- TyKind::Adt(adt, _) if is_box(self.db, adt.0) => {
- // Without `box_patterns`, the only legal pattern of type `Box` is `_` (outside
- // of `std`). So this branch is only reachable when the feature is enabled and
- // the pattern is a box pattern.
- PatKind::Deref { subpattern: subpatterns.next().unwrap() }
- }
TyKind::Adt(adt, substs) => {
let variant = Self::variant_id_for_adt(self.db, pat.ctor(), adt.0).unwrap();
let subpatterns = self
@@ -343,14 +318,8 @@ impl PatCx for MatchCheckCtx<'_> {
Struct | Variant(_) | UnionField => match *ty.kind(Interner) {
TyKind::Tuple(arity, ..) => arity,
TyKind::Adt(AdtId(adt), ..) => {
- if is_box(self.db, adt) {
- // The only legal patterns of type `Box` (outside `std`) are `_` and box
- // patterns. If we're here we can assume this is a box pattern.
- 1
- } else {
- let variant = Self::variant_id_for_adt(self.db, ctor, adt).unwrap();
- variant.variant_data(self.db).fields().len()
- }
+ let variant = Self::variant_id_for_adt(self.db, ctor, adt).unwrap();
+ variant.fields(self.db).fields().len()
}
_ => {
never!("Unexpected type for `Single` constructor: {:?}", ty);
@@ -383,29 +352,22 @@ impl PatCx for MatchCheckCtx<'_> {
tys.cloned().map(|ty| (ty, PrivateUninhabitedField(false))).collect()
}
TyKind::Ref(.., rty) => single(rty.clone()),
- &TyKind::Adt(AdtId(adt), ref substs) => {
- if is_box(self.db, adt) {
- // The only legal patterns of type `Box` (outside `std`) are `_` and box
- // patterns. If we're here we can assume this is a box pattern.
- let subst_ty = substs.at(Interner, 0).assert_ty_ref(Interner).clone();
- single(subst_ty)
- } else {
- let variant = Self::variant_id_for_adt(self.db, ctor, adt).unwrap();
-
- let visibilities = LazyCell::new(|| self.db.field_visibilities(variant));
-
- self.list_variant_fields(ty, variant)
- .map(move |(fid, ty)| {
- let is_visible = || {
- matches!(adt, hir_def::AdtId::EnumId(..))
- || visibilities[fid].is_visible_from(self.db, self.module)
- };
- let is_uninhabited = self.is_uninhabited(&ty);
- let private_uninhabited = is_uninhabited && !is_visible();
- (ty, PrivateUninhabitedField(private_uninhabited))
- })
- .collect()
- }
+ &TyKind::Adt(AdtId(adt), ..) => {
+ let variant = Self::variant_id_for_adt(self.db, ctor, adt).unwrap();
+
+ let visibilities = LazyCell::new(|| self.db.field_visibilities(variant));
+
+ self.list_variant_fields(ty, variant)
+ .map(move |(fid, ty)| {
+ let is_visible = || {
+ matches!(adt, hir_def::AdtId::EnumId(..))
+ || visibilities[fid].is_visible_from(self.db, self.module)
+ };
+ let is_uninhabited = self.is_uninhabited(&ty);
+ let private_uninhabited = is_uninhabited && !is_visible();
+ (ty, PrivateUninhabitedField(private_uninhabited))
+ })
+ .collect()
}
ty_kind => {
never!("Unexpected type for `{:?}` constructor: {:?}", ctor, ty_kind);
diff --git a/crates/hir-ty/src/display.rs b/crates/hir-ty/src/display.rs
index 1aa7e0fcf8..810fe76f23 100644
--- a/crates/hir-ty/src/display.rs
+++ b/crates/hir-ty/src/display.rs
@@ -888,7 +888,7 @@ fn render_const_scalar(
write!(f, "{}", data.name.display(f.db, f.edition()))?;
let field_types = f.db.field_types(s.into());
render_variant_after_name(
- &f.db.variant_fields(s.into()),
+ s.fields(f.db),
f,
&field_types,
f.db.trait_environment(adt.0.into()),
@@ -920,7 +920,7 @@ fn render_const_scalar(
)?;
let field_types = f.db.field_types(var_id.into());
render_variant_after_name(
- &f.db.variant_fields(var_id.into()),
+ var_id.fields(f.db),
f,
&field_types,
f.db.trait_environment(adt.0.into()),
@@ -1394,7 +1394,7 @@ impl HirDisplay for Ty {
let future_trait =
LangItem::Future.resolve_trait(db, body.module(db).krate());
let output = future_trait.and_then(|t| {
- db.trait_items(t)
+ t.trait_items(db)
.associated_type_by_name(&Name::new_symbol_root(sym::Output))
});
write!(f, "impl ")?;
@@ -1432,10 +1432,10 @@ impl HirDisplay for Ty {
match f.closure_style {
ClosureStyle::Hide => return write!(f, "{TYPE_HINT_TRUNCATION}"),
ClosureStyle::ClosureWithId => {
- return write!(f, "{{closure#{:?}}}", id.0.as_u32());
+ return write!(f, "{{closure#{:?}}}", id.0.index());
}
ClosureStyle::ClosureWithSubst => {
- write!(f, "{{closure#{:?}}}", id.0.as_u32())?;
+ write!(f, "{{closure#{:?}}}", id.0.index())?;
return hir_fmt_generics(f, substs.as_slice(Interner), None, None);
}
_ => (),
@@ -2178,6 +2178,7 @@ impl HirDisplayWithExpressionStore for TypeRefId {
f.write_joined(
generic_params
.where_predicates()
+ .iter()
.filter_map(|it| match it {
WherePredicate::TypeBound { target, bound }
| WherePredicate::ForLifetime { lifetimes: _, target, bound }
diff --git a/crates/hir-ty/src/dyn_compatibility.rs b/crates/hir-ty/src/dyn_compatibility.rs
index 48094945c1..30949c83bf 100644
--- a/crates/hir-ty/src/dyn_compatibility.rs
+++ b/crates/hir-ty/src/dyn_compatibility.rs
@@ -101,7 +101,7 @@ where
// rustc checks for non-lifetime binders here, but we don't support HRTB yet
- let trait_data = db.trait_items(trait_);
+ let trait_data = trait_.trait_items(db);
for (_, assoc_item) in &trait_data.items {
dyn_compatibility_violation_for_assoc_item(db, trait_, *assoc_item, cb)?;
}
@@ -164,7 +164,7 @@ fn predicates_reference_self(db: &dyn HirDatabase, trait_: TraitId) -> bool {
// Same as the above, `predicates_reference_self`
fn bounds_reference_self(db: &dyn HirDatabase, trait_: TraitId) -> bool {
- let trait_data = db.trait_items(trait_);
+ let trait_data = trait_.trait_items(db);
trait_data
.items
.iter()
diff --git a/crates/hir-ty/src/generics.rs b/crates/hir-ty/src/generics.rs
index a3ed39934c..f14872e68c 100644
--- a/crates/hir-ty/src/generics.rs
+++ b/crates/hir-ty/src/generics.rs
@@ -60,7 +60,16 @@ impl Generics {
}
pub(crate) fn where_predicates(&self) -> impl Iterator<Item = &WherePredicate> {
- self.params.where_predicates()
+ self.params.where_predicates().iter()
+ }
+
+ pub(crate) fn has_no_predicates(&self) -> bool {
+ self.params.has_no_predicates()
+ && self.parent_generics.as_ref().is_none_or(|g| g.params.has_no_predicates())
+ }
+
+ pub(crate) fn is_empty(&self) -> bool {
+ self.params.is_empty() && self.parent_generics.as_ref().is_none_or(|g| g.params.is_empty())
}
pub(crate) fn iter_id(&self) -> impl Iterator<Item = GenericParamId> + '_ {
diff --git a/crates/hir-ty/src/infer.rs b/crates/hir-ty/src/infer.rs
index 80478f1937..e880438e3a 100644
--- a/crates/hir-ty/src/infer.rs
+++ b/crates/hir-ty/src/infer.rs
@@ -460,19 +460,17 @@ pub struct InferenceResult {
/// Whenever a tuple field expression access a tuple field, we allocate a tuple id in
/// [`InferenceContext`] and store the tuples substitution there. This map is the reverse of
/// that which allows us to resolve a [`TupleFieldId`]s type.
- pub tuple_field_access_types: FxHashMap<TupleId, Substitution>,
+ tuple_field_access_types: FxHashMap<TupleId, Substitution>,
/// During inference this field is empty and [`InferenceContext::diagnostics`] is filled instead.
- pub diagnostics: Vec<InferenceDiagnostic>,
- pub type_of_expr: ArenaMap<ExprId, Ty>,
+ diagnostics: Vec<InferenceDiagnostic>,
+ pub(crate) type_of_expr: ArenaMap<ExprId, Ty>,
/// For each pattern record the type it resolves to.
///
/// **Note**: When a pattern type is resolved it may still contain
/// unresolved or missing subpatterns or subpatterns of mismatched types.
- pub type_of_pat: ArenaMap<PatId, Ty>,
- pub type_of_binding: ArenaMap<BindingId, Ty>,
- pub type_of_rpit: ArenaMap<ImplTraitIdx, Ty>,
- /// Type of the result of `.into_iter()` on the for. `ExprId` is the one of the whole for loop.
- pub type_of_for_iterator: FxHashMap<ExprId, Ty>,
+ pub(crate) type_of_pat: ArenaMap<PatId, Ty>,
+ pub(crate) type_of_binding: ArenaMap<BindingId, Ty>,
+ pub(crate) type_of_rpit: ArenaMap<ImplTraitIdx, Ty>,
type_mismatches: FxHashMap<ExprOrPatId, TypeMismatch>,
/// Whether there are any type-mismatching errors in the result.
// FIXME: This isn't as useful as initially thought due to us falling back placeholders to
@@ -483,7 +481,7 @@ pub struct InferenceResult {
// FIXME: Move this into `InferenceContext`
standard_types: InternedStandardTypes,
/// Stores the types which were implicitly dereferenced in pattern binding modes.
- pub pat_adjustments: FxHashMap<PatId, Vec<Ty>>,
+ pub(crate) pat_adjustments: FxHashMap<PatId, Vec<Ty>>,
/// Stores the binding mode (`ref` in `let ref x = 2`) of bindings.
///
/// This one is tied to the `PatId` instead of `BindingId`, because in some rare cases, a binding in an
@@ -497,12 +495,12 @@ pub struct InferenceResult {
/// }
/// ```
/// the first `rest` has implicit `ref` binding mode, but the second `rest` binding mode is `move`.
- pub binding_modes: ArenaMap<PatId, BindingMode>,
- pub expr_adjustments: FxHashMap<ExprId, Box<[Adjustment]>>,
+ pub(crate) binding_modes: ArenaMap<PatId, BindingMode>,
+ pub(crate) expr_adjustments: FxHashMap<ExprId, Box<[Adjustment]>>,
pub(crate) closure_info: FxHashMap<ClosureId, (Vec<CapturedItem>, FnTrait)>,
// FIXME: remove this field
pub mutated_bindings_in_closure: FxHashSet<BindingId>,
- pub coercion_casts: FxHashSet<ExprId>,
+ pub(crate) coercion_casts: FxHashSet<ExprId>,
}
impl InferenceResult {
@@ -566,6 +564,26 @@ impl InferenceResult {
pub fn is_erroneous(&self) -> bool {
self.has_errors && self.type_of_expr.iter().count() == 0
}
+
+ pub fn diagnostics(&self) -> &[InferenceDiagnostic] {
+ &self.diagnostics
+ }
+
+ pub fn tuple_field_access_type(&self, id: TupleId) -> &Substitution {
+ &self.tuple_field_access_types[&id]
+ }
+
+ pub fn pat_adjustment(&self, id: PatId) -> Option<&[Ty]> {
+ self.pat_adjustments.get(&id).map(|it| &**it)
+ }
+
+ pub fn expr_adjustment(&self, id: ExprId) -> Option<&[Adjustment]> {
+ self.expr_adjustments.get(&id).map(|it| &**it)
+ }
+
+ pub fn binding_mode(&self, id: PatId) -> Option<BindingMode> {
+ self.binding_modes.get(id).copied()
+ }
}
impl Index<ExprId> for InferenceResult {
@@ -772,7 +790,6 @@ impl<'db> InferenceContext<'db> {
type_of_pat,
type_of_binding,
type_of_rpit,
- type_of_for_iterator,
type_mismatches,
has_errors,
standard_types: _,
@@ -832,11 +849,6 @@ impl<'db> InferenceContext<'db> {
*has_errors = *has_errors || ty.contains_unknown();
}
type_of_rpit.shrink_to_fit();
- for ty in type_of_for_iterator.values_mut() {
- *ty = table.resolve_completely(ty.clone());
- *has_errors = *has_errors || ty.contains_unknown();
- }
- type_of_for_iterator.shrink_to_fit();
*has_errors |= !type_mismatches.is_empty();
@@ -1813,7 +1825,7 @@ impl<'db> InferenceContext<'db> {
}
fn resolve_output_on(&self, trait_: TraitId) -> Option<TypeAliasId> {
- self.db.trait_items(trait_).associated_type_by_name(&Name::new_symbol_root(sym::Output))
+ trait_.trait_items(self.db).associated_type_by_name(&Name::new_symbol_root(sym::Output))
}
fn resolve_lang_trait(&self, lang: LangItem) -> Option<TraitId> {
diff --git a/crates/hir-ty/src/infer/cast.rs b/crates/hir-ty/src/infer/cast.rs
index 8d345defdc..4e95eca3f9 100644
--- a/crates/hir-ty/src/infer/cast.rs
+++ b/crates/hir-ty/src/infer/cast.rs
@@ -382,7 +382,7 @@ fn pointer_kind(ty: &Ty, table: &mut InferenceTable<'_>) -> Result<Option<Pointe
return Err(());
};
- let struct_data = table.db.variant_fields(id.into());
+ let struct_data = id.fields(table.db);
if let Some((last_field, _)) = struct_data.fields().iter().last() {
let last_field_ty =
table.db.field_types(id.into())[last_field].clone().substitute(Interner, subst);
diff --git a/crates/hir-ty/src/infer/closure.rs b/crates/hir-ty/src/infer/closure.rs
index b756bb859d..c3029bf2b5 100644
--- a/crates/hir-ty/src/infer/closure.rs
+++ b/crates/hir-ty/src/infer/closure.rs
@@ -677,7 +677,7 @@ impl CapturedItem {
match proj {
ProjectionElem::Deref => {}
ProjectionElem::Field(Either::Left(f)) => {
- let variant_data = f.parent.variant_data(db);
+ let variant_data = f.parent.fields(db);
match variant_data.shape {
FieldsShape::Record => {
result.push('_');
@@ -720,7 +720,7 @@ impl CapturedItem {
// In source code autoderef kicks in.
ProjectionElem::Deref => {}
ProjectionElem::Field(Either::Left(f)) => {
- let variant_data = f.parent.variant_data(db);
+ let variant_data = f.parent.fields(db);
match variant_data.shape {
FieldsShape::Record => format_to!(
result,
@@ -782,7 +782,7 @@ impl CapturedItem {
if field_need_paren {
result = format!("({result})");
}
- let variant_data = f.parent.variant_data(db);
+ let variant_data = f.parent.fields(db);
let field = match variant_data.shape {
FieldsShape::Record => {
variant_data.fields()[f.local_id].name.as_str().to_owned()
@@ -1210,9 +1210,8 @@ impl InferenceContext<'_> {
if let Some(deref_trait) =
self.resolve_lang_item(LangItem::DerefMut).and_then(|it| it.as_trait())
{
- if let Some(deref_fn) = self
- .db
- .trait_items(deref_trait)
+ if let Some(deref_fn) = deref_trait
+ .trait_items(self.db)
.method_by_name(&Name::new_symbol_root(sym::deref_mut))
{
break 'b deref_fn == f;
@@ -1230,10 +1229,11 @@ impl InferenceContext<'_> {
self.select_from_expr(*expr);
}
}
- Expr::Let { pat: _, expr } => {
+ Expr::Let { pat, expr } => {
self.walk_expr(*expr);
- let place = self.place_of_expr(*expr);
- self.ref_expr(*expr, place);
+ if let Some(place) = self.place_of_expr(*expr) {
+ self.consume_with_pat(place, *pat);
+ }
}
Expr::UnaryOp { expr, op: _ }
| Expr::Array(Array::Repeat { initializer: expr, repeat: _ })
@@ -1560,7 +1560,7 @@ impl InferenceContext<'_> {
self.consume_place(place)
}
VariantId::StructId(s) => {
- let vd = &*self.db.variant_fields(s.into());
+ let vd = s.fields(self.db);
for field_pat in args.iter() {
let arg = field_pat.pat;
let Some(local_id) = vd.field(&field_pat.name) else {
@@ -1612,7 +1612,7 @@ impl InferenceContext<'_> {
self.consume_place(place)
}
VariantId::StructId(s) => {
- let vd = &*self.db.variant_fields(s.into());
+ let vd = s.fields(self.db);
let (al, ar) =
args.split_at(ellipsis.map_or(args.len(), |it| it as usize));
let fields = vd.fields().iter();
diff --git a/crates/hir-ty/src/infer/expr.rs b/crates/hir-ty/src/infer/expr.rs
index 6403127929..d43c99fc28 100644
--- a/crates/hir-ty/src/infer/expr.rs
+++ b/crates/hir-ty/src/infer/expr.rs
@@ -542,7 +542,7 @@ impl InferenceContext<'_> {
_ if fields.is_empty() => {}
Some(def) => {
let field_types = self.db.field_types(def);
- let variant_data = def.variant_data(self.db);
+ let variant_data = def.fields(self.db);
let visibilities = self.db.field_visibilities(def);
for field in fields.iter() {
let field_def = {
@@ -654,9 +654,8 @@ impl InferenceContext<'_> {
match op {
UnaryOp::Deref => {
if let Some(deref_trait) = self.resolve_lang_trait(LangItem::Deref) {
- if let Some(deref_fn) = self
- .db
- .trait_items(deref_trait)
+ if let Some(deref_fn) = deref_trait
+ .trait_items(self.db)
.method_by_name(&Name::new_symbol_root(sym::deref))
{
// FIXME: this is wrong in multiple ways, subst is empty, and we emit it even for builtin deref (note that
@@ -732,9 +731,32 @@ impl InferenceContext<'_> {
&Pat::Expr(expr) => {
Some(self.infer_expr(expr, &Expectation::none(), ExprIsRead::No))
}
- Pat::Path(path) => Some(self.infer_expr_path(path, target.into(), tgt_expr)),
+ Pat::Path(path) => {
+ let resolver_guard =
+ self.resolver.update_to_inner_scope(self.db, self.owner, tgt_expr);
+ let resolution = self.resolver.resolve_path_in_value_ns_fully(
+ self.db,
+ path,
+ self.body.pat_path_hygiene(target),
+ );
+ self.resolver.reset_to_guard(resolver_guard);
+
+ if matches!(
+ resolution,
+ Some(
+ ValueNs::ConstId(_)
+ | ValueNs::StructId(_)
+ | ValueNs::EnumVariantId(_)
+ )
+ ) {
+ None
+ } else {
+ Some(self.infer_expr_path(path, target.into(), tgt_expr))
+ }
+ }
_ => None,
};
+ let is_destructuring_assignment = lhs_ty.is_none();
if let Some(lhs_ty) = lhs_ty {
self.write_pat_ty(target, lhs_ty.clone());
@@ -748,7 +770,15 @@ impl InferenceContext<'_> {
self.inside_assignment = false;
self.resolver.reset_to_guard(resolver_guard);
}
- self.result.standard_types.unit.clone()
+ if is_destructuring_assignment && self.diverges.is_always() {
+ // Ordinary assignments always return `()`, even when they diverge.
+ // However, rustc lowers destructuring assignments into blocks, and blocks return `!` if they have no tail
+ // expression and they diverge. Therefore, we have to do the same here, even though we don't lower destructuring
+ // assignments into blocks.
+ self.table.new_maybe_never_var()
+ } else {
+ self.result.standard_types.unit.clone()
+ }
}
Expr::Range { lhs, rhs, range_type } => {
let lhs_ty =
@@ -813,9 +843,8 @@ impl InferenceContext<'_> {
self.table.new_lifetime_var(),
));
self.write_expr_adj(*base, adj.into_boxed_slice());
- if let Some(func) = self
- .db
- .trait_items(index_trait)
+ if let Some(func) = index_trait
+ .trait_items(self.db)
.method_by_name(&Name::new_symbol_root(sym::index))
{
let subst = TyBuilder::subst_for_def(self.db, index_trait, None);
@@ -1148,7 +1177,7 @@ impl InferenceContext<'_> {
let Some(trait_) = fn_x.get_id(self.db, self.table.trait_env.krate) else {
return;
};
- let trait_data = self.db.trait_items(trait_);
+ let trait_data = trait_.trait_items(self.db);
if let Some(func) = trait_data.method_by_name(&fn_x.method_name()) {
let subst = TyBuilder::subst_for_def(self.db, trait_, None)
.push(callee_ty.clone())
@@ -1316,7 +1345,7 @@ impl InferenceContext<'_> {
let trait_func = lang_items_for_bin_op(op).and_then(|(name, lang_item)| {
let trait_id = self.resolve_lang_item(lang_item)?.as_trait()?;
- let func = self.db.trait_items(trait_id).method_by_name(&name)?;
+ let func = trait_id.trait_items(self.db).method_by_name(&name)?;
Some((trait_id, func))
});
let (trait_, func) = match trait_func {
@@ -1568,12 +1597,12 @@ impl InferenceContext<'_> {
});
}
&TyKind::Adt(AdtId(hir_def::AdtId::StructId(s)), ref parameters) => {
- let local_id = self.db.variant_fields(s.into()).field(name)?;
+ let local_id = s.fields(self.db).field(name)?;
let field = FieldId { parent: s.into(), local_id };
(field, parameters.clone())
}
&TyKind::Adt(AdtId(hir_def::AdtId::UnionId(u)), ref parameters) => {
- let local_id = self.db.variant_fields(u.into()).field(name)?;
+ let local_id = u.fields(self.db).field(name)?;
let field = FieldId { parent: u.into(), local_id };
(field, parameters.clone())
}
diff --git a/crates/hir-ty/src/infer/mutability.rs b/crates/hir-ty/src/infer/mutability.rs
index ac450c0b55..d2eaf21236 100644
--- a/crates/hir-ty/src/infer/mutability.rs
+++ b/crates/hir-ty/src/infer/mutability.rs
@@ -129,9 +129,8 @@ impl InferenceContext<'_> {
if let Some(index_trait) =
LangItem::IndexMut.resolve_trait(self.db, self.table.trait_env.krate)
{
- if let Some(index_fn) = self
- .db
- .trait_items(index_trait)
+ if let Some(index_fn) = index_trait
+ .trait_items(self.db)
.method_by_name(&Name::new_symbol_root(sym::index_mut))
{
*f = index_fn;
@@ -194,9 +193,8 @@ impl InferenceContext<'_> {
});
if is_mut_ptr {
mutability = Mutability::Not;
- } else if let Some(deref_fn) = self
- .db
- .trait_items(deref_trait)
+ } else if let Some(deref_fn) = deref_trait
+ .trait_items(self.db)
.method_by_name(&Name::new_symbol_root(sym::deref_mut))
{
*f = deref_fn;
diff --git a/crates/hir-ty/src/infer/pat.rs b/crates/hir-ty/src/infer/pat.rs
index 4bc3e167eb..99d3b5c7a8 100644
--- a/crates/hir-ty/src/infer/pat.rs
+++ b/crates/hir-ty/src/infer/pat.rs
@@ -38,7 +38,7 @@ impl InferenceContext<'_> {
decl: Option<DeclContext>,
) -> Ty {
let (ty, def) = self.resolve_variant(id.into(), path, true);
- let var_data = def.map(|it| it.variant_data(self.db));
+ let var_data = def.map(|it| it.fields(self.db));
if let Some(variant) = def {
self.write_variant_resolution(id.into(), variant);
}
@@ -60,7 +60,7 @@ impl InferenceContext<'_> {
_ if subs.is_empty() => {}
Some(def) => {
let field_types = self.db.field_types(def);
- let variant_data = def.variant_data(self.db);
+ let variant_data = def.fields(self.db);
let visibilities = self.db.field_visibilities(def);
let (pre, post) = match ellipsis {
@@ -129,7 +129,7 @@ impl InferenceContext<'_> {
_ if subs.len() == 0 => {}
Some(def) => {
let field_types = self.db.field_types(def);
- let variant_data = def.variant_data(self.db);
+ let variant_data = def.fields(self.db);
let visibilities = self.db.field_visibilities(def);
let substs = ty.as_adt().map(TupleExt::tail);
diff --git a/crates/hir-ty/src/infer/path.rs b/crates/hir-ty/src/infer/path.rs
index c327c13b66..bc8648ecdd 100644
--- a/crates/hir-ty/src/infer/path.rs
+++ b/crates/hir-ty/src/infer/path.rs
@@ -278,7 +278,7 @@ impl InferenceContext<'_> {
) -> Option<(ValueNs, Substitution)> {
let trait_ = trait_ref.hir_trait_id();
let item =
- self.db.trait_items(trait_).items.iter().map(|(_name, id)| *id).find_map(|item| {
+ trait_.trait_items(self.db).items.iter().map(|(_name, id)| *id).find_map(|item| {
match item {
AssocItemId::FunctionId(func) => {
if segment.name == &self.db.function_signature(func).name {
diff --git a/crates/hir-ty/src/infer/unify.rs b/crates/hir-ty/src/infer/unify.rs
index 631b571465..c07755535f 100644
--- a/crates/hir-ty/src/infer/unify.rs
+++ b/crates/hir-ty/src/infer/unify.rs
@@ -859,7 +859,7 @@ impl<'a> InferenceTable<'a> {
] {
let krate = self.trait_env.krate;
let fn_trait = fn_trait_name.get_id(self.db, krate)?;
- let trait_data = self.db.trait_items(fn_trait);
+ let trait_data = fn_trait.trait_items(self.db);
let output_assoc_type =
trait_data.associated_type_by_name(&Name::new_symbol_root(output_assoc_name))?;
@@ -1001,7 +1001,7 @@ impl<'a> InferenceTable<'a> {
// Must use a loop here and not recursion because otherwise users will conduct completely
// artificial examples of structs that have themselves as the tail field and complain r-a crashes.
while let Some((AdtId::StructId(id), subst)) = ty.as_adt() {
- let struct_data = self.db.variant_fields(id.into());
+ let struct_data = id.fields(self.db);
if let Some((last_field, _)) = struct_data.fields().iter().next_back() {
let last_field_ty = self.db.field_types(id.into())[last_field]
.clone()
diff --git a/crates/hir-ty/src/inhabitedness.rs b/crates/hir-ty/src/inhabitedness.rs
index 79a99321f1..b16b6a1178 100644
--- a/crates/hir-ty/src/inhabitedness.rs
+++ b/crates/hir-ty/src/inhabitedness.rs
@@ -132,7 +132,7 @@ impl UninhabitedFrom<'_> {
variant: VariantId,
subst: &Substitution,
) -> ControlFlow<VisiblyUninhabited> {
- let variant_data = self.db.variant_fields(variant);
+ let variant_data = variant.fields(self.db);
let fields = variant_data.fields();
if fields.is_empty() {
return CONTINUE_OPAQUELY_INHABITED;
diff --git a/crates/hir-ty/src/layout.rs b/crates/hir-ty/src/layout.rs
index c58bd1b773..107da6a5af 100644
--- a/crates/hir-ty/src/layout.rs
+++ b/crates/hir-ty/src/layout.rs
@@ -261,7 +261,7 @@ pub fn layout_of_ty_query(
}
// Potentially-wide pointers.
TyKind::Ref(_, _, pointee) | TyKind::Raw(_, pointee) => {
- let mut data_ptr = scalar_unit(dl, Primitive::Pointer(AddressSpace::DATA));
+ let mut data_ptr = scalar_unit(dl, Primitive::Pointer(AddressSpace::ZERO));
if matches!(ty.kind(Interner), TyKind::Ref(..)) {
data_ptr.valid_range_mut().start = 1;
}
@@ -285,7 +285,7 @@ pub fn layout_of_ty_query(
scalar_unit(dl, Primitive::Int(dl.ptr_sized_integer(), false))
}
TyKind::Dyn(..) => {
- let mut vtable = scalar_unit(dl, Primitive::Pointer(AddressSpace::DATA));
+ let mut vtable = scalar_unit(dl, Primitive::Pointer(AddressSpace::ZERO));
vtable.valid_range_mut().start = 1;
vtable
}
@@ -375,7 +375,7 @@ pub(crate) fn layout_of_ty_cycle_result(
fn struct_tail_erasing_lifetimes(db: &dyn HirDatabase, pointee: Ty) -> Ty {
match pointee.kind(Interner) {
&TyKind::Adt(AdtId(hir_def::AdtId::StructId(i)), ref subst) => {
- let data = db.variant_fields(i.into());
+ let data = i.fields(db);
let mut it = data.fields().iter().rev();
match it.next() {
Some((f, _)) => {
diff --git a/crates/hir-ty/src/layout/adt.rs b/crates/hir-ty/src/layout/adt.rs
index dff986fec3..236f316366 100644
--- a/crates/hir-ty/src/layout/adt.rs
+++ b/crates/hir-ty/src/layout/adt.rs
@@ -42,7 +42,7 @@ pub fn layout_of_adt_query(
AdtId::StructId(s) => {
let sig = db.struct_signature(s);
let mut r = SmallVec::<[_; 1]>::new();
- r.push(handle_variant(s.into(), &db.variant_fields(s.into()))?);
+ r.push(handle_variant(s.into(), s.fields(db))?);
(
r,
sig.repr.unwrap_or_default(),
@@ -52,7 +52,7 @@ pub fn layout_of_adt_query(
AdtId::UnionId(id) => {
let data = db.union_signature(id);
let mut r = SmallVec::new();
- r.push(handle_variant(id.into(), &db.variant_fields(id.into()))?);
+ r.push(handle_variant(id.into(), id.fields(db))?);
(r, data.repr.unwrap_or_default(), false)
}
AdtId::EnumId(e) => {
@@ -60,7 +60,7 @@ pub fn layout_of_adt_query(
let r = variants
.variants
.iter()
- .map(|&(v, _, _)| handle_variant(v.into(), &db.variant_fields(v.into())))
+ .map(|&(v, _, _)| handle_variant(v.into(), v.fields(db)))
.collect::<Result<SmallVec<_>, _>>()?;
(r, db.enum_signature(e).repr.unwrap_or_default(), false)
}
diff --git a/crates/hir-ty/src/layout/target.rs b/crates/hir-ty/src/layout/target.rs
index e1e1c44996..88c33eccca 100644
--- a/crates/hir-ty/src/layout/target.rs
+++ b/crates/hir-ty/src/layout/target.rs
@@ -2,7 +2,7 @@
use base_db::Crate;
use hir_def::layout::TargetDataLayout;
-use rustc_abi::{AlignFromBytesError, TargetDataLayoutErrors};
+use rustc_abi::{AlignFromBytesError, TargetDataLayoutErrors, AddressSpace};
use triomphe::Arc;
use crate::db::HirDatabase;
@@ -12,7 +12,7 @@ pub fn target_data_layout_query(
krate: Crate,
) -> Result<Arc<TargetDataLayout>, Arc<str>> {
match &krate.workspace_data(db).data_layout {
- Ok(it) => match TargetDataLayout::parse_from_llvm_datalayout_string(it) {
+ Ok(it) => match TargetDataLayout::parse_from_llvm_datalayout_string(it, AddressSpace::ZERO) {
Ok(it) => Ok(Arc::new(it)),
Err(e) => {
Err(match e {
@@ -39,6 +39,7 @@ pub fn target_data_layout_query(
target,
} => format!(r#"inconsistent target specification: "data-layout" claims pointers are {pointer_size}-bit, while "target-pointer-width" is `{target}`"#),
TargetDataLayoutErrors::InvalidBitsSize { err } => err,
+ TargetDataLayoutErrors::UnknownPointerSpecification { err } => format!(r#"use of unknown pointer specifer in "data-layout": {err}"#),
}.into())
}
},
diff --git a/crates/hir-ty/src/lib.rs b/crates/hir-ty/src/lib.rs
index 148f2a41e7..e787fd9b1e 100644
--- a/crates/hir-ty/src/lib.rs
+++ b/crates/hir-ty/src/lib.rs
@@ -891,8 +891,8 @@ pub fn callable_sig_from_fn_trait(
) -> Option<(FnTrait, CallableSig)> {
let krate = trait_env.krate;
let fn_once_trait = FnTrait::FnOnce.get_id(db, krate)?;
- let output_assoc_type = db
- .trait_items(fn_once_trait)
+ let output_assoc_type = fn_once_trait
+ .trait_items(db)
.associated_type_by_name(&Name::new_symbol_root(sym::Output))?;
let mut table = InferenceTable::new(db, trait_env.clone());
diff --git a/crates/hir-ty/src/lower.rs b/crates/hir-ty/src/lower.rs
index 0a546768da..f32b6af4d8 100644
--- a/crates/hir-ty/src/lower.rs
+++ b/crates/hir-ty/src/lower.rs
@@ -581,11 +581,28 @@ impl<'a> TyLoweringContext<'a> {
match bound {
&TypeBound::Path(path, TraitBoundModifier::None) | &TypeBound::ForLifetime(_, path) => {
// FIXME Don't silently drop the hrtb lifetimes here
- if let Some((trait_ref, ctx)) = self.lower_trait_ref_from_path(path, self_ty) {
- if !ignore_bindings {
- assoc_bounds = ctx.assoc_type_bindings_from_type_bound(trait_ref.clone());
+ if let Some((trait_ref, mut ctx)) =
+ self.lower_trait_ref_from_path(path, self_ty.clone())
+ {
+ // FIXME(sized-hierarchy): Remove this bound modifications once we have implemented
+ // sized-hierarchy correctly.
+ let meta_sized = LangItem::MetaSized
+ .resolve_trait(ctx.ty_ctx().db, ctx.ty_ctx().resolver.krate());
+ let pointee_sized = LangItem::PointeeSized
+ .resolve_trait(ctx.ty_ctx().db, ctx.ty_ctx().resolver.krate());
+ if meta_sized.is_some_and(|it| it == trait_ref.hir_trait_id()) {
+ // Ignore this bound
+ } else if pointee_sized.is_some_and(|it| it == trait_ref.hir_trait_id()) {
+ // Regard this as `?Sized` bound
+ ctx.ty_ctx().unsized_types.insert(self_ty);
+ } else {
+ if !ignore_bindings {
+ assoc_bounds =
+ ctx.assoc_type_bindings_from_type_bound(trait_ref.clone());
+ }
+ clause =
+ Some(crate::wrap_empty_binders(WhereClause::Implemented(trait_ref)));
}
- clause = Some(crate::wrap_empty_binders(WhereClause::Implemented(trait_ref)));
}
}
&TypeBound::Path(path, TraitBoundModifier::Maybe) => {
@@ -711,7 +728,7 @@ impl<'a> TyLoweringContext<'a> {
.unwrap_or(it),
None => it,
},
- None => static_lifetime(),
+ None => error_lifetime(),
},
})
.intern(Interner)
@@ -805,7 +822,7 @@ fn named_associated_type_shorthand_candidates<R>(
) -> Option<R> {
let mut search = |t| {
all_super_trait_refs(db, t, |t| {
- let data = db.trait_items(t.hir_trait_id());
+ let data = t.hir_trait_id().trait_items(db);
for (name, assoc_id) in &data.items {
if let AssocItemId::TypeAliasId(alias) = assoc_id {
@@ -883,7 +900,12 @@ pub(crate) fn field_types_with_diagnostics_query(
db: &dyn HirDatabase,
variant_id: VariantId,
) -> (Arc<ArenaMap<LocalFieldId, Binders<Ty>>>, Diagnostics) {
- let var_data = db.variant_fields(variant_id);
+ let var_data = variant_id.fields(db);
+ let fields = var_data.fields();
+ if fields.is_empty() {
+ return (Arc::new(ArenaMap::default()), None);
+ }
+
let (resolver, def): (_, GenericDefId) = match variant_id {
VariantId::StructId(it) => (it.resolver(db), it.into()),
VariantId::UnionId(it) => (it.resolver(db), it.into()),
@@ -899,7 +921,7 @@ pub(crate) fn field_types_with_diagnostics_query(
LifetimeElisionKind::AnonymousReportError,
)
.with_type_param_mode(ParamLoweringMode::Variable);
- for (field_id, field_data) in var_data.fields().iter() {
+ for (field_id, field_data) in fields.iter() {
res.insert(field_id, make_binders(db, &generics, ctx.lower_ty(field_data.type_ref)));
}
(Arc::new(res), create_diagnostics(ctx.diagnostics))
@@ -920,6 +942,10 @@ pub(crate) fn generic_predicates_for_param_query(
assoc_name: Option<Name>,
) -> GenericPredicates {
let generics = generics(db, def);
+ if generics.has_no_predicates() && generics.is_empty() {
+ return GenericPredicates(None);
+ }
+
let resolver = def.resolver(db);
let mut ctx = TyLoweringContext::new(
db,
@@ -936,8 +962,32 @@ pub(crate) fn generic_predicates_for_param_query(
| WherePredicate::TypeBound { target, bound, .. } => {
let invalid_target = { ctx.lower_ty_only_param(*target) != Some(param_id) };
if invalid_target {
- // If this is filtered out without lowering, `?Sized` is not gathered into `ctx.unsized_types`
- if let TypeBound::Path(_, TraitBoundModifier::Maybe) = bound {
+ // FIXME(sized-hierarchy): Revisit and adjust this properly once we have implemented
+ // sized-hierarchy correctly.
+ // If this is filtered out without lowering, `?Sized` or `PointeeSized` is not gathered into
+ // `ctx.unsized_types`
+ let lower = || -> bool {
+ match bound {
+ TypeBound::Path(_, TraitBoundModifier::Maybe) => true,
+ TypeBound::Path(path, _) | TypeBound::ForLifetime(_, path) => {
+ let TypeRef::Path(path) = &ctx.store[path.type_ref()] else {
+ return false;
+ };
+ let Some(pointee_sized) =
+ LangItem::PointeeSized.resolve_trait(ctx.db, ctx.resolver.krate())
+ else {
+ return false;
+ };
+ // Lower the path directly with `Resolver` instead of PathLoweringContext`
+ // to prevent diagnostics duplications.
+ ctx.resolver.resolve_path_in_type_ns_fully(ctx.db, path).is_some_and(
+ |it| matches!(it, TypeNs::TraitId(tr) if tr == pointee_sized),
+ )
+ }
+ _ => false,
+ }
+ }();
+ if lower {
ctx.lower_where_predicate(pred, true).for_each(drop);
}
return false;
@@ -957,7 +1007,7 @@ pub(crate) fn generic_predicates_for_param_query(
};
all_super_traits(db, tr).iter().any(|tr| {
- db.trait_items(*tr).items.iter().any(|(name, item)| {
+ tr.trait_items(db).items.iter().any(|(name, item)| {
matches!(item, AssocItemId::TypeAliasId(_)) && name == assoc_name
})
})
@@ -1025,6 +1075,10 @@ pub(crate) fn trait_environment_query(
def: GenericDefId,
) -> Arc<TraitEnvironment> {
let generics = generics(db, def);
+ if generics.has_no_predicates() && generics.is_empty() {
+ return TraitEnvironment::empty(def.krate(db));
+ }
+
let resolver = def.resolver(db);
let mut ctx = TyLoweringContext::new(
db,
@@ -1128,6 +1182,10 @@ where
F: Fn(&WherePredicate, GenericDefId) -> bool,
{
let generics = generics(db, def);
+ if generics.has_no_predicates() && generics.is_empty() {
+ return (GenericPredicates(None), None);
+ }
+
let resolver = def.resolver(db);
let mut ctx = TyLoweringContext::new(
db,
@@ -1154,7 +1212,7 @@ where
}
}
- if generics.len() > 0 {
+ if !generics.is_empty() {
let subst = generics.bound_vars_subst(db, DebruijnIndex::INNERMOST);
let explicitly_unsized_tys = ctx.unsized_types;
if let Some(implicitly_sized_predicates) =
@@ -1229,7 +1287,7 @@ pub(crate) fn generic_defaults_with_diagnostics_query(
def: GenericDefId,
) -> (GenericDefaults, Diagnostics) {
let generic_params = generics(db, def);
- if generic_params.len() == 0 {
+ if generic_params.is_empty() {
return (GenericDefaults(None), None);
}
let resolver = def.resolver(db);
@@ -1418,7 +1476,7 @@ fn fn_sig_for_struct_constructor(db: &dyn HirDatabase, def: StructId) -> PolyFnS
/// Build the type of a tuple struct constructor.
fn type_for_struct_constructor(db: &dyn HirDatabase, def: StructId) -> Option<Binders<Ty>> {
- let struct_data = db.variant_fields(def.into());
+ let struct_data = def.fields(db);
match struct_data.shape {
FieldsShape::Record => None,
FieldsShape::Unit => Some(type_for_adt(db, def.into())),
@@ -1451,7 +1509,7 @@ fn type_for_enum_variant_constructor(
def: EnumVariantId,
) -> Option<Binders<Ty>> {
let e = def.lookup(db).parent;
- match db.variant_fields(def.into()).shape {
+ match def.fields(db).shape {
FieldsShape::Record => None,
FieldsShape::Unit => Some(type_for_adt(db, e.into())),
FieldsShape::Tuple => {
diff --git a/crates/hir-ty/src/lower/path.rs b/crates/hir-ty/src/lower/path.rs
index 726eaf8b0a..06686b6a16 100644
--- a/crates/hir-ty/src/lower/path.rs
+++ b/crates/hir-ty/src/lower/path.rs
@@ -173,7 +173,7 @@ impl<'a, 'b> PathLoweringContext<'a, 'b> {
self.skip_resolved_segment();
let segment = self.current_or_prev_segment;
let found =
- self.ctx.db.trait_items(trait_).associated_type_by_name(segment.name);
+ trait_.trait_items(self.ctx.db).associated_type_by_name(segment.name);
match found {
Some(associated_ty) => {
diff --git a/crates/hir-ty/src/mapping.rs b/crates/hir-ty/src/mapping.rs
index 6936d8193e..9d3d2044c4 100644
--- a/crates/hir-ty/src/mapping.rs
+++ b/crates/hir-ty/src/mapping.rs
@@ -13,7 +13,8 @@ use salsa::{
use crate::{
AssocTypeId, CallableDefId, ChalkTraitId, FnDefId, ForeignDefId, Interner, OpaqueTyId,
- PlaceholderIndex, chalk_db, db::HirDatabase,
+ PlaceholderIndex, chalk_db,
+ db::{HirDatabase, InternedLifetimeParamId, InternedTypeOrConstParamId},
};
pub trait ToChalk {
@@ -125,30 +126,32 @@ pub fn from_assoc_type_id(id: AssocTypeId) -> TypeAliasId {
pub fn from_placeholder_idx(db: &dyn HirDatabase, idx: PlaceholderIndex) -> TypeOrConstParamId {
assert_eq!(idx.ui, chalk_ir::UniverseIndex::ROOT);
// SAFETY: We cannot really encapsulate this unfortunately, so just hope this is sound.
- let interned_id = FromId::from_id(unsafe { Id::from_u32(idx.idx.try_into().unwrap()) });
- db.lookup_intern_type_or_const_param_id(interned_id)
+ let interned_id =
+ InternedTypeOrConstParamId::from_id(unsafe { Id::from_index(idx.idx.try_into().unwrap()) });
+ interned_id.loc(db)
}
pub fn to_placeholder_idx(db: &dyn HirDatabase, id: TypeOrConstParamId) -> PlaceholderIndex {
- let interned_id = db.intern_type_or_const_param_id(id);
+ let interned_id = InternedTypeOrConstParamId::new(db, id);
PlaceholderIndex {
ui: chalk_ir::UniverseIndex::ROOT,
- idx: interned_id.as_id().as_u32() as usize,
+ idx: interned_id.as_id().index() as usize,
}
}
pub fn lt_from_placeholder_idx(db: &dyn HirDatabase, idx: PlaceholderIndex) -> LifetimeParamId {
assert_eq!(idx.ui, chalk_ir::UniverseIndex::ROOT);
// SAFETY: We cannot really encapsulate this unfortunately, so just hope this is sound.
- let interned_id = FromId::from_id(unsafe { Id::from_u32(idx.idx.try_into().unwrap()) });
- db.lookup_intern_lifetime_param_id(interned_id)
+ let interned_id =
+ InternedLifetimeParamId::from_id(unsafe { Id::from_index(idx.idx.try_into().unwrap()) });
+ interned_id.loc(db)
}
pub fn lt_to_placeholder_idx(db: &dyn HirDatabase, id: LifetimeParamId) -> PlaceholderIndex {
- let interned_id = db.intern_lifetime_param_id(id);
+ let interned_id = InternedLifetimeParamId::new(db, id);
PlaceholderIndex {
ui: chalk_ir::UniverseIndex::ROOT,
- idx: interned_id.as_id().as_u32() as usize,
+ idx: interned_id.as_id().index() as usize,
}
}
diff --git a/crates/hir-ty/src/method_resolution.rs b/crates/hir-ty/src/method_resolution.rs
index 25f1782bdd..a6150a9bc1 100644
--- a/crates/hir-ty/src/method_resolution.rs
+++ b/crates/hir-ty/src/method_resolution.rs
@@ -1302,7 +1302,7 @@ fn iterate_trait_method_candidates(
// trait, but if we find out it doesn't, we'll skip the rest of the
// iteration
let mut known_implemented = false;
- for &(_, item) in db.trait_items(t).items.iter() {
+ for &(_, item) in t.trait_items(db).items.iter() {
// Don't pass a `visible_from_module` down to `is_valid_candidate`,
// since only inherent methods should be included into visibility checking.
let visible =
@@ -1429,7 +1429,7 @@ fn iterate_inherent_methods(
) -> ControlFlow<()> {
let db = table.db;
for t in traits {
- let data = db.trait_items(t);
+ let data = t.trait_items(db);
for &(_, item) in data.items.iter() {
// We don't pass `visible_from_module` as all trait items should be visible.
let visible = match is_valid_trait_method_candidate(
diff --git a/crates/hir-ty/src/mir/eval.rs b/crates/hir-ty/src/mir/eval.rs
index a8156ec375..55fada1436 100644
--- a/crates/hir-ty/src/mir/eval.rs
+++ b/crates/hir-ty/src/mir/eval.rs
@@ -630,7 +630,7 @@ impl Evaluator<'_> {
Ok(target_data_layout) => target_data_layout,
Err(e) => return Err(MirEvalError::TargetDataLayoutNotAvailable(e)),
};
- let cached_ptr_size = target_data_layout.pointer_size.bytes_usize();
+ let cached_ptr_size = target_data_layout.pointer_size().bytes_usize();
Ok(Evaluator {
target_data_layout,
stack: vec![0],
@@ -657,12 +657,12 @@ impl Evaluator<'_> {
cached_ptr_size,
cached_fn_trait_func: LangItem::Fn
.resolve_trait(db, crate_id)
- .and_then(|x| db.trait_items(x).method_by_name(&Name::new_symbol_root(sym::call))),
+ .and_then(|x| x.trait_items(db).method_by_name(&Name::new_symbol_root(sym::call))),
cached_fn_mut_trait_func: LangItem::FnMut.resolve_trait(db, crate_id).and_then(|x| {
- db.trait_items(x).method_by_name(&Name::new_symbol_root(sym::call_mut))
+ x.trait_items(db).method_by_name(&Name::new_symbol_root(sym::call_mut))
}),
cached_fn_once_trait_func: LangItem::FnOnce.resolve_trait(db, crate_id).and_then(|x| {
- db.trait_items(x).method_by_name(&Name::new_symbol_root(sym::call_once))
+ x.trait_items(db).method_by_name(&Name::new_symbol_root(sym::call_once))
}),
})
}
@@ -1749,8 +1749,7 @@ impl Evaluator<'_> {
AdtId::UnionId(_) => not_supported!("unsizing unions"),
AdtId::EnumId(_) => not_supported!("unsizing enums"),
};
- let Some((last_field, _)) =
- self.db.variant_fields(id.into()).fields().iter().next_back()
+ let Some((last_field, _)) = id.fields(self.db).fields().iter().next_back()
else {
not_supported!("unsizing struct without field");
};
@@ -2232,7 +2231,7 @@ impl Evaluator<'_> {
}
chalk_ir::TyKind::Adt(adt, subst) => match adt.0 {
AdtId::StructId(s) => {
- let data = this.db.variant_fields(s.into());
+ let data = s.fields(this.db);
let layout = this.layout(ty)?;
let field_types = this.db.field_types(s.into());
for (f, _) in data.fields().iter() {
@@ -2261,7 +2260,7 @@ impl Evaluator<'_> {
bytes,
e,
) {
- let data = &this.db.variant_fields(v.into());
+ let data = v.fields(this.db);
let field_types = this.db.field_types(v.into());
for (f, _) in data.fields().iter() {
let offset =
@@ -2808,7 +2807,7 @@ impl Evaluator<'_> {
) -> Result<()> {
let Some(drop_fn) = (|| {
let drop_trait = LangItem::Drop.resolve_trait(self.db, self.crate_id)?;
- self.db.trait_items(drop_trait).method_by_name(&Name::new_symbol_root(sym::drop))
+ drop_trait.trait_items(self.db).method_by_name(&Name::new_symbol_root(sym::drop))
})() else {
// in some tests we don't have drop trait in minicore, and
// we can ignore drop in them.
@@ -2838,7 +2837,7 @@ impl Evaluator<'_> {
return Ok(());
}
let layout = self.layout_adt(id.0, subst.clone())?;
- let variant_fields = self.db.variant_fields(s.into());
+ let variant_fields = s.fields(self.db);
match variant_fields.shape {
FieldsShape::Record | FieldsShape::Tuple => {
let field_types = self.db.field_types(s.into());
@@ -2918,7 +2917,7 @@ pub fn render_const_using_debug_impl(
not_supported!("core::fmt::Debug not found");
};
let Some(debug_fmt_fn) =
- db.trait_items(debug_trait).method_by_name(&Name::new_symbol_root(sym::fmt))
+ debug_trait.trait_items(db).method_by_name(&Name::new_symbol_root(sym::fmt))
else {
not_supported!("core::fmt::Debug::fmt not found");
};
@@ -3045,7 +3044,10 @@ impl IntValue {
(8, true) => Self::I64(i64::from_le_bytes(bytes.try_into().unwrap())),
(16, false) => Self::U128(u128::from_le_bytes(bytes.try_into().unwrap())),
(16, true) => Self::I128(i128::from_le_bytes(bytes.try_into().unwrap())),
- _ => panic!("invalid integer size"),
+ (len, is_signed) => {
+ never!("invalid integer size: {len}, signed: {is_signed}");
+ Self::I32(0)
+ }
}
}
diff --git a/crates/hir-ty/src/mir/eval/shim.rs b/crates/hir-ty/src/mir/eval/shim.rs
index 6ebde01334..e9665d5ae9 100644
--- a/crates/hir-ty/src/mir/eval/shim.rs
+++ b/crates/hir-ty/src/mir/eval/shim.rs
@@ -1257,9 +1257,8 @@ impl Evaluator<'_> {
args.push(IntervalAndTy::new(addr, field, self, locals)?);
}
if let Some(target) = LangItem::FnOnce.resolve_trait(self.db, self.crate_id) {
- if let Some(def) = self
- .db
- .trait_items(target)
+ if let Some(def) = target
+ .trait_items(self.db)
.method_by_name(&Name::new_symbol_root(sym::call_once))
{
self.exec_fn_trait(
diff --git a/crates/hir-ty/src/mir/eval/shim/simd.rs b/crates/hir-ty/src/mir/eval/shim/simd.rs
index 984648cfec..bc331a23d9 100644
--- a/crates/hir-ty/src/mir/eval/shim/simd.rs
+++ b/crates/hir-ty/src/mir/eval/shim/simd.rs
@@ -31,7 +31,7 @@ impl Evaluator<'_> {
Some(len) => len,
_ => {
if let AdtId::StructId(id) = id.0 {
- let struct_data = self.db.variant_fields(id.into());
+ let struct_data = id.fields(self.db);
let fields = struct_data.fields();
let Some((first_field, _)) = fields.iter().next() else {
not_supported!("simd type with no field");
diff --git a/crates/hir-ty/src/mir/eval/tests.rs b/crates/hir-ty/src/mir/eval/tests.rs
index 3abbbe45e6..c1f86960e1 100644
--- a/crates/hir-ty/src/mir/eval/tests.rs
+++ b/crates/hir-ty/src/mir/eval/tests.rs
@@ -984,3 +984,17 @@ fn main<'a, T: Foo + Bar + Baz>(
|e| matches!(e, MirEvalError::MirLowerError(_, MirLowerError::GenericArgNotProvided(..))),
);
}
+
+#[test]
+fn format_args_pass() {
+ check_pass(
+ r#"
+//- minicore: fmt
+fn main() {
+ let x1 = format_args!("");
+ let x2 = format_args!("{}", x1);
+ let x3 = format_args!("{} {}", x1, x2);
+}
+"#,
+ );
+}
diff --git a/crates/hir-ty/src/mir/lower.rs b/crates/hir-ty/src/mir/lower.rs
index 71e038b92f..845d6b8eae 100644
--- a/crates/hir-ty/src/mir/lower.rs
+++ b/crates/hir-ty/src/mir/lower.rs
@@ -503,7 +503,7 @@ impl<'ctx> MirLowerCtx<'ctx> {
Ok(Some(current))
}
ValueNs::EnumVariantId(variant_id) => {
- let variant_fields = &self.db.variant_fields(variant_id.into());
+ let variant_fields = variant_id.fields(self.db);
if variant_fields.shape == FieldsShape::Unit {
let ty = self.infer.type_of_expr[expr_id].clone();
current = self.lower_enum_variant(
@@ -856,7 +856,7 @@ impl<'ctx> MirLowerCtx<'ctx> {
TyKind::Adt(_, s) => s.clone(),
_ => not_supported!("Non ADT record literal"),
};
- let variant_fields = self.db.variant_fields(variant_id);
+ let variant_fields = variant_id.fields(self.db);
match variant_id {
VariantId::EnumVariantId(_) | VariantId::StructId(_) => {
let mut operands = vec![None; variant_fields.fields().len()];
@@ -1176,8 +1176,7 @@ impl<'ctx> MirLowerCtx<'ctx> {
place,
Rvalue::Aggregate(
AggregateKind::Adt(st.into(), subst.clone()),
- self.db
- .variant_fields(st.into())
+ st.fields(self.db)
.fields()
.iter()
.map(|it| {
diff --git a/crates/hir-ty/src/mir/lower/as_place.rs b/crates/hir-ty/src/mir/lower/as_place.rs
index ad664693e2..e7bffead93 100644
--- a/crates/hir-ty/src/mir/lower/as_place.rs
+++ b/crates/hir-ty/src/mir/lower/as_place.rs
@@ -193,9 +193,8 @@ impl MirLowerCtx<'_> {
if let Some(deref_trait) =
self.resolve_lang_item(LangItem::DerefMut)?.as_trait()
{
- if let Some(deref_fn) = self
- .db
- .trait_items(deref_trait)
+ if let Some(deref_fn) = deref_trait
+ .trait_items(self.db)
.method_by_name(&Name::new_symbol_root(sym::deref_mut))
{
break 'b deref_fn == f;
@@ -347,9 +346,8 @@ impl MirLowerCtx<'_> {
.resolve_lang_item(trait_lang_item)?
.as_trait()
.ok_or(MirLowerError::LangItemNotFound(trait_lang_item))?;
- let deref_fn = self
- .db
- .trait_items(deref_trait)
+ let deref_fn = deref_trait
+ .trait_items(self.db)
.method_by_name(&trait_method_name)
.ok_or(MirLowerError::LangItemNotFound(trait_lang_item))?;
let deref_fn_op = Operand::const_zst(
diff --git a/crates/hir-ty/src/mir/lower/pattern_matching.rs b/crates/hir-ty/src/mir/lower/pattern_matching.rs
index b3c1f6f387..61c0685c48 100644
--- a/crates/hir-ty/src/mir/lower/pattern_matching.rs
+++ b/crates/hir-ty/src/mir/lower/pattern_matching.rs
@@ -609,7 +609,7 @@ impl MirLowerCtx<'_> {
}
self.pattern_matching_variant_fields(
shape,
- &self.db.variant_fields(v.into()),
+ v.fields(self.db),
variant,
current,
current_else,
@@ -619,7 +619,7 @@ impl MirLowerCtx<'_> {
}
VariantId::StructId(s) => self.pattern_matching_variant_fields(
shape,
- &self.db.variant_fields(s.into()),
+ s.fields(self.db),
variant,
current,
current_else,
diff --git a/crates/hir-ty/src/mir/pretty.rs b/crates/hir-ty/src/mir/pretty.rs
index 8764e48b53..78a69cf450 100644
--- a/crates/hir-ty/src/mir/pretty.rs
+++ b/crates/hir-ty/src/mir/pretty.rs
@@ -326,7 +326,7 @@ impl<'a> MirPrettyCtx<'a> {
w!(this, ")");
}
ProjectionElem::Field(Either::Left(field)) => {
- let variant_fields = this.db.variant_fields(field.parent);
+ let variant_fields = field.parent.fields(this.db);
let name = &variant_fields.fields()[field.local_id].name;
match field.parent {
hir_def::VariantId::EnumVariantId(e) => {
diff --git a/crates/hir-ty/src/test_db.rs b/crates/hir-ty/src/test_db.rs
index d049c678e2..b5de0e52f5 100644
--- a/crates/hir-ty/src/test_db.rs
+++ b/crates/hir-ty/src/test_db.rs
@@ -166,10 +166,10 @@ impl TestDB {
self.events.lock().unwrap().take().unwrap()
}
- pub(crate) fn log_executed(&self, f: impl FnOnce()) -> Vec<String> {
+ pub(crate) fn log_executed(&self, f: impl FnOnce()) -> (Vec<String>, Vec<salsa::Event>) {
let events = self.log(f);
- events
- .into_iter()
+ let executed = events
+ .iter()
.filter_map(|e| match e.kind {
// This is pretty horrible, but `Debug` is the only way to inspect
// QueryDescriptor at the moment.
@@ -181,6 +181,7 @@ impl TestDB {
}
_ => None,
})
- .collect()
+ .collect();
+ (executed, events)
}
}
diff --git a/crates/hir-ty/src/tests.rs b/crates/hir-ty/src/tests.rs
index 9ca6ee476c..79754bc8a0 100644
--- a/crates/hir-ty/src/tests.rs
+++ b/crates/hir-ty/src/tests.rs
@@ -486,7 +486,7 @@ pub(crate) fn visit_module(
});
}
ModuleDefId::TraitId(it) => {
- let trait_data = db.trait_items(it);
+ let trait_data = it.trait_items(db);
for &(_, item) in trait_data.items.iter() {
match item {
AssocItemId::FunctionId(it) => cb(it.into()),
diff --git a/crates/hir-ty/src/tests/closure_captures.rs b/crates/hir-ty/src/tests/closure_captures.rs
index 7fb981752d..dbc68eeba1 100644
--- a/crates/hir-ty/src/tests/closure_captures.rs
+++ b/crates/hir-ty/src/tests/closure_captures.rs
@@ -446,7 +446,7 @@ fn main() {
}
#[test]
-fn let_binding_is_a_ref_capture() {
+fn let_binding_is_a_ref_capture_in_ref_binding() {
check_closure_captures(
r#"
//- minicore:copy
@@ -454,12 +454,36 @@ struct S;
fn main() {
let mut s = S;
let s_ref = &mut s;
+ let mut s2 = S;
+ let s_ref2 = &mut s2;
let closure = || {
if let ref cb = s_ref {
+ } else if let ref mut cb = s_ref2 {
}
};
}
"#,
- expect!["83..135;49..54;112..117 ByRef(Shared) s_ref &'? &'? mut S"],
+ expect![[r#"
+ 129..225;49..54;149..155 ByRef(Shared) s_ref &'? &'? mut S
+ 129..225;93..99;188..198 ByRef(Mut { kind: Default }) s_ref2 &'? mut &'? mut S"#]],
+ );
+}
+
+#[test]
+fn let_binding_is_a_value_capture_in_binding() {
+ check_closure_captures(
+ r#"
+//- minicore:copy, option
+struct Box(i32);
+fn main() {
+ let b = Some(Box(0));
+ let closure = || {
+ if let Some(b) = b {
+ let _move = b;
+ }
+ };
+}
+"#,
+ expect!["73..149;37..38;103..104 ByValue b Option<Box>"],
);
}
diff --git a/crates/hir-ty/src/tests/coercion.rs b/crates/hir-ty/src/tests/coercion.rs
index ddc5b71519..3894b4b6f7 100644
--- a/crates/hir-ty/src/tests/coercion.rs
+++ b/crates/hir-ty/src/tests/coercion.rs
@@ -561,7 +561,7 @@ trait Foo {}
fn test(f: impl Foo, g: &(impl Foo + ?Sized)) {
let _: &dyn Foo = &f;
let _: &dyn Foo = g;
- //^ expected &'? (dyn Foo + 'static), got &'? impl Foo + ?Sized
+ //^ expected &'? (dyn Foo + '?), got &'? impl Foo + ?Sized
}
"#,
);
diff --git a/crates/hir-ty/src/tests/display_source_code.rs b/crates/hir-ty/src/tests/display_source_code.rs
index a986b54a7b..6e3faa05a6 100644
--- a/crates/hir-ty/src/tests/display_source_code.rs
+++ b/crates/hir-ty/src/tests/display_source_code.rs
@@ -67,11 +67,11 @@ trait B: A {}
fn test<'a>(
_: &(dyn A<Assoc = ()> + Send),
- //^ &(dyn A<Assoc = ()> + Send + 'static)
+ //^ &(dyn A<Assoc = ()> + Send)
_: &'a (dyn Send + A<Assoc = ()>),
- //^ &'a (dyn A<Assoc = ()> + Send + 'static)
+ //^ &'a (dyn A<Assoc = ()> + Send)
_: &dyn B<Assoc = ()>,
- //^ &(dyn B<Assoc = ()> + 'static)
+ //^ &(dyn B<Assoc = ()>)
) {}
"#,
);
@@ -85,7 +85,7 @@ fn render_dyn_for_ty() {
trait Foo<'a> {}
fn foo(foo: &dyn for<'a> Foo<'a>) {}
- // ^^^ &(dyn Foo<'?> + 'static)
+ // ^^^ &dyn Foo<'?>
"#,
);
}
diff --git a/crates/hir-ty/src/tests/incremental.rs b/crates/hir-ty/src/tests/incremental.rs
index 905fd8a3bc..3159499e86 100644
--- a/crates/hir-ty/src/tests/incremental.rs
+++ b/crates/hir-ty/src/tests/incremental.rs
@@ -1,6 +1,7 @@
use base_db::SourceDatabase;
use expect_test::Expect;
use hir_def::{DefWithBodyId, ModuleDefId};
+use salsa::EventKind;
use test_fixture::WithFixture;
use crate::{db::HirDatabase, test_db::TestDB};
@@ -567,11 +568,11 @@ fn main() {
"ast_id_map_shim",
"parse_shim",
"real_span_map_shim",
- "trait_items_with_diagnostics_shim",
+ "TraitItems::query_with_diagnostics_",
"body_shim",
"body_with_source_map_shim",
"attrs_shim",
- "of_",
+ "ImplItems::of_",
"infer_shim",
"trait_signature_shim",
"trait_signature_with_source_map_shim",
@@ -596,8 +597,8 @@ fn main() {
"struct_signature_with_source_map_shim",
"generic_predicates_shim",
"value_ty_shim",
- "variant_fields_shim",
- "variant_fields_with_source_map_shim",
+ "VariantFields::firewall_",
+ "VariantFields::query_",
"lang_item",
"inherent_impls_in_crate_shim",
"impl_signature_shim",
@@ -674,11 +675,11 @@ fn main() {
"file_item_tree_query",
"real_span_map_shim",
"crate_local_def_map",
- "trait_items_with_diagnostics_shim",
+ "TraitItems::query_with_diagnostics_",
"body_with_source_map_shim",
"attrs_shim",
"body_shim",
- "of_",
+ "ImplItems::of_",
"infer_shim",
"attrs_shim",
"trait_signature_with_source_map_shim",
@@ -695,11 +696,9 @@ fn main() {
"return_type_impl_traits_shim",
"infer_shim",
"function_signature_with_source_map_shim",
- "trait_environment_shim",
"expr_scopes_shim",
"struct_signature_with_source_map_shim",
- "generic_predicates_shim",
- "variant_fields_with_source_map_shim",
+ "VariantFields::query_",
"inherent_impls_in_crate_shim",
"impl_signature_with_source_map_shim",
"impl_signature_shim",
@@ -709,7 +708,6 @@ fn main() {
"impl_trait_with_diagnostics_shim",
"impl_self_ty_with_diagnostics_shim",
"generic_predicates_shim",
- "generic_predicates_shim",
]
"#]],
);
@@ -721,10 +719,23 @@ fn execute_assert_events(
required: &[(&str, usize)],
expect: Expect,
) {
- let events = db.log_executed(f);
- for (event, count) in required {
- let n = events.iter().filter(|it| it.contains(event)).count();
- assert_eq!(n, *count, "Expected {event} to be executed {count} times, but only got {n}");
- }
- expect.assert_debug_eq(&events);
+ let (executed, events) = db.log_executed(f);
+ salsa::attach(db, || {
+ for (event, count) in required {
+ let n = executed.iter().filter(|it| it.contains(event)).count();
+ assert_eq!(
+ n,
+ *count,
+ "Expected {event} to be executed {count} times, but only got {n}:\n \
+ Executed: {executed:#?}\n \
+ Event log: {events:#?}",
+ events = events
+ .iter()
+ .filter(|event| !matches!(event.kind, EventKind::WillCheckCancellation))
+ .map(|event| { format!("{:?}", event.kind) })
+ .collect::<Vec<_>>(),
+ );
+ }
+ expect.assert_debug_eq(&executed);
+ });
}
diff --git a/crates/hir-ty/src/tests/method_resolution.rs b/crates/hir-ty/src/tests/method_resolution.rs
index 94826acca3..c58ca6c67a 100644
--- a/crates/hir-ty/src/tests/method_resolution.rs
+++ b/crates/hir-ty/src/tests/method_resolution.rs
@@ -1153,9 +1153,9 @@ fn dyn_trait_super_trait_not_in_scope() {
51..55 'self': &'? Self
64..69 '{ 0 }': u32
66..67 '0': u32
- 176..177 'd': &'? (dyn Trait + 'static)
+ 176..177 'd': &'? (dyn Trait + '?)
191..207 '{ ...o(); }': ()
- 197..198 'd': &'? (dyn Trait + 'static)
+ 197..198 'd': &'? (dyn Trait + '?)
197..204 'd.foo()': u32
"#]],
);
@@ -2019,10 +2019,10 @@ impl dyn Error + Send {
/// Attempts to downcast the box to a concrete type.
pub fn downcast<T: Error + 'static>(self: Box<Self>) -> Result<Box<T>, Box<dyn Error + Send>> {
let err: Box<dyn Error> = self;
- // ^^^^ expected Box<dyn Error + 'static>, got Box<dyn Error + Send + 'static>
+ // ^^^^ expected Box<dyn Error + '?>, got Box<dyn Error + Send + '?>
// FIXME, type mismatch should not occur
<dyn Error>::downcast(err).map_err(|_| loop {})
- //^^^^^^^^^^^^^^^^^^^^^ type: fn downcast<{unknown}>(Box<dyn Error + 'static>) -> Result<Box<{unknown}>, Box<dyn Error + 'static>>
+ //^^^^^^^^^^^^^^^^^^^^^ type: fn downcast<{unknown}>(Box<dyn Error + '?>) -> Result<Box<{unknown}>, Box<dyn Error + '?>>
}
}
"#,
diff --git a/crates/hir-ty/src/tests/never_type.rs b/crates/hir-ty/src/tests/never_type.rs
index 1ca4c9b2ad..6a9135622d 100644
--- a/crates/hir-ty/src/tests/never_type.rs
+++ b/crates/hir-ty/src/tests/never_type.rs
@@ -785,3 +785,31 @@ fn make_up_a_pointer<T>() -> *const T {
"#]],
)
}
+
+#[test]
+fn diverging_destructuring_assignment() {
+ check_infer_with_mismatches(
+ r#"
+fn foo() {
+ let n = match 42 {
+ 0 => _ = loop {},
+ _ => 0,
+ };
+}
+ "#,
+ expect![[r#"
+ 9..84 '{ ... }; }': ()
+ 19..20 'n': i32
+ 23..81 'match ... }': i32
+ 29..31 '42': i32
+ 42..43 '0': i32
+ 42..43 '0': i32
+ 47..48 '_': !
+ 47..58 '_ = loop {}': i32
+ 51..58 'loop {}': !
+ 56..58 '{}': ()
+ 68..69 '_': i32
+ 73..74 '0': i32
+ "#]],
+ );
+}
diff --git a/crates/hir-ty/src/tests/regression.rs b/crates/hir-ty/src/tests/regression.rs
index ff8adeef1d..238753e12e 100644
--- a/crates/hir-ty/src/tests/regression.rs
+++ b/crates/hir-ty/src/tests/regression.rs
@@ -629,7 +629,7 @@ fn issue_4053_diesel_where_clauses() {
488..522 '{ ... }': ()
498..502 'self': SelectStatement<F, S, D, W, O, LOf, {unknown}, {unknown}>
498..508 'self.order': O
- 498..515 'self.o...into()': dyn QueryFragment<DB> + 'static
+ 498..515 'self.o...into()': dyn QueryFragment<DB> + '?
"#]],
);
}
@@ -773,7 +773,7 @@ fn issue_4800() {
"#,
expect![[r#"
379..383 'self': &'? mut PeerSet<D>
- 401..424 '{ ... }': dyn Future<Output = ()> + 'static
+ 401..424 '{ ... }': dyn Future<Output = ()> + '?
411..418 'loop {}': !
416..418 '{}': ()
575..579 'self': &'? mut Self
diff --git a/crates/hir-ty/src/tests/simple.rs b/crates/hir-ty/src/tests/simple.rs
index cf51671afb..b154e59878 100644
--- a/crates/hir-ty/src/tests/simple.rs
+++ b/crates/hir-ty/src/tests/simple.rs
@@ -2741,11 +2741,11 @@ impl B for Astruct {}
715..744 '#[rust...1i32])': Box<[i32; 1], Global>
737..743 '[1i32]': [i32; 1]
738..742 '1i32': i32
- 755..756 'v': Vec<Box<dyn B + 'static, Global>, Global>
- 776..793 '<[_]> ...to_vec': fn into_vec<Box<dyn B + 'static, Global>, Global>(Box<[Box<dyn B + 'static, Global>], Global>) -> Vec<Box<dyn B + 'static, Global>, Global>
- 776..850 '<[_]> ...ct)]))': Vec<Box<dyn B + 'static, Global>, Global>
- 794..849 '#[rust...uct)])': Box<[Box<dyn B + 'static, Global>; 1], Global>
- 816..848 '[#[rus...ruct)]': [Box<dyn B + 'static, Global>; 1]
+ 755..756 'v': Vec<Box<dyn B + '?, Global>, Global>
+ 776..793 '<[_]> ...to_vec': fn into_vec<Box<dyn B + '?, Global>, Global>(Box<[Box<dyn B + '?, Global>], Global>) -> Vec<Box<dyn B + '?, Global>, Global>
+ 776..850 '<[_]> ...ct)]))': Vec<Box<dyn B + '?, Global>, Global>
+ 794..849 '#[rust...uct)])': Box<[Box<dyn B + '?, Global>; 1], Global>
+ 816..848 '[#[rus...ruct)]': [Box<dyn B + '?, Global>; 1]
817..847 '#[rust...truct)': Box<Astruct, Global>
839..846 'Astruct': Astruct
"#]],
@@ -3751,7 +3751,7 @@ fn foo() {
}
let v: bool = true;
m!();
- // ^^^^ i32
+ // ^^ i32
}
"#,
);
@@ -3765,39 +3765,39 @@ fn foo() {
let v: bool;
macro_rules! m { () => { v } }
m!();
- // ^^^^ bool
+ // ^^ bool
let v: char;
macro_rules! m { () => { v } }
m!();
- // ^^^^ char
+ // ^^ char
{
let v: u8;
macro_rules! m { () => { v } }
m!();
- // ^^^^ u8
+ // ^^ u8
let v: i8;
macro_rules! m { () => { v } }
m!();
- // ^^^^ i8
+ // ^^ i8
let v: i16;
macro_rules! m { () => { v } }
m!();
- // ^^^^ i16
+ // ^^ i16
{
let v: u32;
macro_rules! m { () => { v } }
m!();
- // ^^^^ u32
+ // ^^ u32
let v: u64;
macro_rules! m { () => { v } }
m!();
- // ^^^^ u64
+ // ^^ u64
}
}
}
diff --git a/crates/hir-ty/src/tests/traits.rs b/crates/hir-ty/src/tests/traits.rs
index e5d1fbe9de..56e31a1af1 100644
--- a/crates/hir-ty/src/tests/traits.rs
+++ b/crates/hir-ty/src/tests/traits.rs
@@ -1475,26 +1475,26 @@ fn test(x: Box<dyn Trait<u64>>, y: &dyn Trait<u64>) {
expect![[r#"
29..33 'self': &'? Self
54..58 'self': &'? Self
- 198..200 '{}': Box<dyn Trait<u64> + 'static>
- 210..211 'x': Box<dyn Trait<u64> + 'static>
- 234..235 'y': &'? (dyn Trait<u64> + 'static)
+ 198..200 '{}': Box<dyn Trait<u64> + '?>
+ 210..211 'x': Box<dyn Trait<u64> + '?>
+ 234..235 'y': &'? (dyn Trait<u64> + '?)
254..371 '{ ...2(); }': ()
- 260..261 'x': Box<dyn Trait<u64> + 'static>
- 267..268 'y': &'? (dyn Trait<u64> + 'static)
- 278..279 'z': Box<dyn Trait<u64> + 'static>
- 282..285 'bar': fn bar() -> Box<dyn Trait<u64> + 'static>
- 282..287 'bar()': Box<dyn Trait<u64> + 'static>
- 293..294 'x': Box<dyn Trait<u64> + 'static>
+ 260..261 'x': Box<dyn Trait<u64> + '?>
+ 267..268 'y': &'? (dyn Trait<u64> + '?)
+ 278..279 'z': Box<dyn Trait<u64> + '?>
+ 282..285 'bar': fn bar() -> Box<dyn Trait<u64> + '?>
+ 282..287 'bar()': Box<dyn Trait<u64> + '?>
+ 293..294 'x': Box<dyn Trait<u64> + '?>
293..300 'x.foo()': u64
- 306..307 'y': &'? (dyn Trait<u64> + 'static)
+ 306..307 'y': &'? (dyn Trait<u64> + '?)
306..313 'y.foo()': u64
- 319..320 'z': Box<dyn Trait<u64> + 'static>
+ 319..320 'z': Box<dyn Trait<u64> + '?>
319..326 'z.foo()': u64
- 332..333 'x': Box<dyn Trait<u64> + 'static>
+ 332..333 'x': Box<dyn Trait<u64> + '?>
332..340 'x.foo2()': i64
- 346..347 'y': &'? (dyn Trait<u64> + 'static)
+ 346..347 'y': &'? (dyn Trait<u64> + '?)
346..354 'y.foo2()': i64
- 360..361 'z': Box<dyn Trait<u64> + 'static>
+ 360..361 'z': Box<dyn Trait<u64> + '?>
360..368 'z.foo2()': i64
"#]],
);
@@ -1523,14 +1523,14 @@ fn test(s: S<u32, i32>) {
expect![[r#"
32..36 'self': &'? Self
102..106 'self': &'? S<T, U>
- 128..139 '{ loop {} }': &'? (dyn Trait<T, U> + 'static)
+ 128..139 '{ loop {} }': &'? (dyn Trait<T, U> + '?)
130..137 'loop {}': !
135..137 '{}': ()
175..179 'self': &'? Self
251..252 's': S<u32, i32>
267..289 '{ ...z(); }': ()
273..274 's': S<u32, i32>
- 273..280 's.bar()': &'? (dyn Trait<u32, i32> + 'static)
+ 273..280 's.bar()': &'? (dyn Trait<u32, i32> + '?)
273..286 's.bar().baz()': (u32, i32)
"#]],
);
@@ -1556,20 +1556,20 @@ fn test(x: Trait, y: &Trait) -> u64 {
}"#,
expect![[r#"
26..30 'self': &'? Self
- 60..62 '{}': dyn Trait + 'static
- 72..73 'x': dyn Trait + 'static
- 82..83 'y': &'? (dyn Trait + 'static)
+ 60..62 '{}': dyn Trait + '?
+ 72..73 'x': dyn Trait + '?
+ 82..83 'y': &'? (dyn Trait + '?)
100..175 '{ ...o(); }': u64
- 106..107 'x': dyn Trait + 'static
- 113..114 'y': &'? (dyn Trait + 'static)
- 124..125 'z': dyn Trait + 'static
- 128..131 'bar': fn bar() -> dyn Trait + 'static
- 128..133 'bar()': dyn Trait + 'static
- 139..140 'x': dyn Trait + 'static
+ 106..107 'x': dyn Trait + '?
+ 113..114 'y': &'? (dyn Trait + '?)
+ 124..125 'z': dyn Trait + '?
+ 128..131 'bar': fn bar() -> dyn Trait + '?
+ 128..133 'bar()': dyn Trait + '?
+ 139..140 'x': dyn Trait + '?
139..146 'x.foo()': u64
- 152..153 'y': &'? (dyn Trait + 'static)
+ 152..153 'y': &'? (dyn Trait + '?)
152..159 'y.foo()': u64
- 165..166 'z': dyn Trait + 'static
+ 165..166 'z': dyn Trait + '?
165..172 'z.foo()': u64
"#]],
);
@@ -1589,10 +1589,10 @@ fn main() {
expect![[r#"
31..35 'self': &'? S
37..39 '{}': ()
- 47..48 '_': &'? (dyn Fn(S) + 'static)
+ 47..48 '_': &'? (dyn Fn(S) + '?)
58..60 '{}': ()
71..105 '{ ...()); }': ()
- 77..78 'f': fn f(&'? (dyn Fn(S) + 'static))
+ 77..78 'f': fn f(&'? (dyn Fn(S) + '?))
77..102 'f(&|nu...foo())': ()
79..101 '&|numb....foo()': &'? impl Fn(S)
80..101 '|numbe....foo()': impl Fn(S)
@@ -2927,13 +2927,13 @@ fn test(x: &dyn Foo) {
foo(x);
}"#,
expect![[r#"
- 21..22 'x': &'? (dyn Foo + 'static)
+ 21..22 'x': &'? (dyn Foo + '?)
34..36 '{}': ()
- 46..47 'x': &'? (dyn Foo + 'static)
+ 46..47 'x': &'? (dyn Foo + '?)
59..74 '{ foo(x); }': ()
- 65..68 'foo': fn foo(&'? (dyn Foo + 'static))
+ 65..68 'foo': fn foo(&'? (dyn Foo + '?))
65..71 'foo(x)': ()
- 69..70 'x': &'? (dyn Foo + 'static)
+ 69..70 'x': &'? (dyn Foo + '?)
"#]],
);
}
@@ -3210,13 +3210,13 @@ fn foo() {
218..324 '{ ...&s); }': ()
228..229 's': Option<i32>
232..236 'None': Option<i32>
- 246..247 'f': Box<dyn FnOnce(&'? Option<i32>) + 'static>
- 281..310 'Box { ... {}) }': Box<dyn FnOnce(&'? Option<i32>) + 'static>
+ 246..247 'f': Box<dyn FnOnce(&'? Option<i32>) + '?>
+ 281..310 'Box { ... {}) }': Box<dyn FnOnce(&'? Option<i32>) + '?>
294..308 '&mut (|ps| {})': &'? mut impl FnOnce(&'? Option<i32>)
300..307 '|ps| {}': impl FnOnce(&'? Option<i32>)
301..303 'ps': &'? Option<i32>
305..307 '{}': ()
- 316..317 'f': Box<dyn FnOnce(&'? Option<i32>) + 'static>
+ 316..317 'f': Box<dyn FnOnce(&'? Option<i32>) + '?>
316..321 'f(&s)': ()
318..320 '&s': &'? Option<i32>
319..320 's': Option<i32>
@@ -4252,9 +4252,9 @@ fn f<'a>(v: &dyn Trait<Assoc<i32> = &'a i32>) {
"#,
expect![[r#"
90..94 'self': &'? Self
- 127..128 'v': &'? (dyn Trait<Assoc<i32> = &'a i32> + 'static)
+ 127..128 'v': &'? (dyn Trait<Assoc<i32> = &'a i32> + '?)
164..195 '{ ...f(); }': ()
- 170..171 'v': &'? (dyn Trait<Assoc<i32> = &'a i32> + 'static)
+ 170..171 'v': &'? (dyn Trait<Assoc<i32> = &'a i32> + '?)
170..184 'v.get::<i32>()': &'? i32
170..192 'v.get:...eref()': &'? i32
"#]],
diff --git a/crates/hir-ty/src/utils.rs b/crates/hir-ty/src/utils.rs
index 4c8e635eff..d07c1aa33b 100644
--- a/crates/hir-ty/src/utils.rs
+++ b/crates/hir-ty/src/utils.rs
@@ -1,7 +1,7 @@
//! Helper functions for working with def, which don't need to be a separate
//! query, but can't be computed directly from `*Data` (ie, which need a `db`).
-use std::iter;
+use std::{cell::LazyCell, iter};
use base_db::Crate;
use chalk_ir::{
@@ -161,11 +161,12 @@ impl Iterator for ClauseElaborator<'_> {
}
fn direct_super_traits_cb(db: &dyn DefDatabase, trait_: TraitId, cb: impl FnMut(TraitId)) {
- let resolver = trait_.resolver(db);
+ let resolver = LazyCell::new(|| trait_.resolver(db));
let (generic_params, store) = db.generic_params_and_store(trait_.into());
let trait_self = generic_params.trait_self_param();
generic_params
.where_predicates()
+ .iter()
.filter_map(|pred| match pred {
WherePredicate::ForLifetime { target, bound, .. }
| WherePredicate::TypeBound { target, bound } => {
@@ -218,7 +219,7 @@ pub(super) fn associated_type_by_name_including_super_traits(
name: &Name,
) -> Option<(TraitRef, TypeAliasId)> {
all_super_trait_refs(db, trait_ref, |t| {
- let assoc_type = db.trait_items(t.hir_trait_id()).associated_type_by_name(name)?;
+ let assoc_type = t.hir_trait_id().trait_items(db).associated_type_by_name(name)?;
Some((t, assoc_type))
})
}
diff --git a/crates/hir-ty/src/variance.rs b/crates/hir-ty/src/variance.rs
index 08a215fecf..87d9df611b 100644
--- a/crates/hir-ty/src/variance.rs
+++ b/crates/hir-ty/src/variance.rs
@@ -54,14 +54,14 @@ pub(crate) fn variances_of(db: &dyn HirDatabase, def: GenericDefId) -> Option<Ar
variances.is_empty().not().then(|| Arc::from_iter(variances))
}
-// pub(crate) fn variances_of_cycle_fn(
-// _db: &dyn HirDatabase,
-// _result: &Option<Arc<[Variance]>>,
-// _count: u32,
-// _def: GenericDefId,
-// ) -> salsa::CycleRecoveryAction<Option<Arc<[Variance]>>> {
-// salsa::CycleRecoveryAction::Iterate
-// }
+pub(crate) fn variances_of_cycle_fn(
+ _db: &dyn HirDatabase,
+ _result: &Option<Arc<[Variance]>>,
+ _count: u32,
+ _def: GenericDefId,
+) -> salsa::CycleRecoveryAction<Option<Arc<[Variance]>>> {
+ salsa::CycleRecoveryAction::Iterate
+}
pub(crate) fn variances_of_cycle_initial(
db: &dyn HirDatabase,
@@ -965,7 +965,7 @@ struct S3<T>(S<T, T>);
struct FixedPoint<T, U, V>(&'static FixedPoint<(), T, U>, V);
"#,
expect![[r#"
- FixedPoint[T: bivariant, U: bivariant, V: bivariant]
+ FixedPoint[T: covariant, U: covariant, V: covariant]
"#]],
);
}
diff --git a/crates/hir/Cargo.toml b/crates/hir/Cargo.toml
index 2af3c2e4c3..c68ff706e4 100644
--- a/crates/hir/Cargo.toml
+++ b/crates/hir/Cargo.toml
@@ -10,6 +10,7 @@ license.workspace = true
rust-version.workspace = true
[lib]
+doctest = false
[dependencies]
rustc-hash.workspace = true
diff --git a/crates/hir/src/attrs.rs b/crates/hir/src/attrs.rs
index 0bce69a179..c8645b6282 100644
--- a/crates/hir/src/attrs.rs
+++ b/crates/hir/src/attrs.rs
@@ -207,7 +207,7 @@ fn resolve_assoc_or_field(
// Doc paths in this context may only resolve to an item of this trait
// (i.e. no items of its supertraits), so we need to handle them here
// independently of others.
- return db.trait_items(id).items.iter().find(|it| it.0 == name).map(|(_, assoc_id)| {
+ return id.trait_items(db).items.iter().find(|it| it.0 == name).map(|(_, assoc_id)| {
let def = match *assoc_id {
AssocItemId::FunctionId(it) => ModuleDef::Function(it.into()),
AssocItemId::ConstId(it) => ModuleDef::Const(it.into()),
diff --git a/crates/hir/src/diagnostics.rs b/crates/hir/src/diagnostics.rs
index 074bde91fb..c1e814ec22 100644
--- a/crates/hir/src/diagnostics.rs
+++ b/crates/hir/src/diagnostics.rs
@@ -36,16 +36,16 @@ pub use hir_ty::{
};
macro_rules! diagnostics {
- ($($diag:ident $(<$lt:lifetime>)?,)*) => {
+ ($AnyDiagnostic:ident <$db:lifetime> -> $($diag:ident $(<$lt:lifetime>)?,)*) => {
#[derive(Debug)]
- pub enum AnyDiagnostic<'db> {$(
+ pub enum $AnyDiagnostic<$db> {$(
$diag(Box<$diag $(<$lt>)?>),
)*}
$(
- impl<'db> From<$diag $(<$lt>)?> for AnyDiagnostic<'db> {
- fn from(d: $diag $(<$lt>)?) -> AnyDiagnostic<'db> {
- AnyDiagnostic::$diag(Box::new(d))
+ impl<$db> From<$diag $(<$lt>)?> for $AnyDiagnostic<$db> {
+ fn from(d: $diag $(<$lt>)?) -> $AnyDiagnostic<$db> {
+ $AnyDiagnostic::$diag(Box::new(d))
}
}
)*
@@ -66,7 +66,7 @@ macro_rules! diagnostics {
// }, ...
// ]
-diagnostics![
+diagnostics![AnyDiagnostic<'db> ->
AwaitOutsideOfAsync,
BreakOutsideOfLoop,
CastToUnsized<'db>,
@@ -490,7 +490,7 @@ impl<'db> AnyDiagnostic<'db> {
) -> Option<AnyDiagnostic<'db>> {
match diagnostic {
BodyValidationDiagnostic::RecordMissingFields { record, variant, missed_fields } => {
- let variant_data = variant.variant_data(db);
+ let variant_data = variant.fields(db);
let missed_fields = missed_fields
.into_iter()
.map(|idx| variant_data.fields()[idx].name.clone())
diff --git a/crates/hir/src/display.rs b/crates/hir/src/display.rs
index 112558bdd0..2960ebedf3 100644
--- a/crates/hir/src/display.rs
+++ b/crates/hir/src/display.rs
@@ -404,7 +404,7 @@ impl HirDisplay for TupleField {
impl HirDisplay for Variant {
fn hir_fmt(&self, f: &mut HirFormatter<'_>) -> Result<(), HirDisplayError> {
write!(f, "{}", self.name(f.db).display(f.db, f.edition()))?;
- let data = f.db.variant_fields(self.id.into());
+ let data = self.id.fields(f.db);
match data.shape {
FieldsShape::Unit => {}
FieldsShape::Tuple => {
@@ -633,7 +633,7 @@ fn has_disaplayable_predicates(
params: &GenericParams,
store: &ExpressionStore,
) -> bool {
- params.where_predicates().any(|pred| {
+ params.where_predicates().iter().any(|pred| {
!matches!(
pred,
WherePredicate::TypeBound { target, .. }
@@ -668,7 +668,7 @@ fn write_where_predicates(
_ => false,
};
- let mut iter = params.where_predicates().peekable();
+ let mut iter = params.where_predicates().iter().peekable();
while let Some(pred) = iter.next() {
if matches!(pred, TypeBound { target, .. } if is_unnamed_type_target(*target)) {
continue;
diff --git a/crates/hir/src/lib.rs b/crates/hir/src/lib.rs
index 3b39707cf6..5c6f622e6c 100644
--- a/crates/hir/src/lib.rs
+++ b/crates/hir/src/lib.rs
@@ -54,7 +54,7 @@ use hir_def::{
},
item_tree::ImportAlias,
layout::{self, ReprOptions, TargetDataLayout},
- nameres::{self, diagnostics::DefDiagnostic},
+ nameres::{self, assoc::TraitItems, diagnostics::DefDiagnostic},
per_ns::PerNs,
resolver::{HasResolver, Resolver},
signatures::{ImplFlags, StaticFlags, TraitFlags, VariantFields},
@@ -649,7 +649,7 @@ impl Module {
acc.extend(def.diagnostics(db, style_lints))
}
ModuleDef::Trait(t) => {
- for diag in db.trait_items_with_diagnostics(t.id).1.iter() {
+ for diag in TraitItems::query_with_diagnostics(db, t.id).1.iter() {
emit_def_diagnostic(db, acc, diag, edition);
}
@@ -668,25 +668,25 @@ impl Module {
Adt::Struct(s) => {
let source_map = db.struct_signature_with_source_map(s.id).1;
expr_store_diagnostics(db, acc, &source_map);
- let source_map = db.variant_fields_with_source_map(s.id.into()).1;
- expr_store_diagnostics(db, acc, &source_map);
+ let source_map = &s.id.fields_with_source_map(db).1;
+ expr_store_diagnostics(db, acc, source_map);
push_ty_diagnostics(
db,
acc,
db.field_types_with_diagnostics(s.id.into()).1,
- &source_map,
+ source_map,
);
}
Adt::Union(u) => {
let source_map = db.union_signature_with_source_map(u.id).1;
expr_store_diagnostics(db, acc, &source_map);
- let source_map = db.variant_fields_with_source_map(u.id.into()).1;
- expr_store_diagnostics(db, acc, &source_map);
+ let source_map = &u.id.fields_with_source_map(db).1;
+ expr_store_diagnostics(db, acc, source_map);
push_ty_diagnostics(
db,
acc,
db.field_types_with_diagnostics(u.id.into()).1,
- &source_map,
+ source_map,
);
}
Adt::Enum(e) => {
@@ -711,14 +711,14 @@ impl Module {
}
}
for &(v, _, _) in &variants.variants {
- let source_map = db.variant_fields_with_source_map(v.into()).1;
+ let source_map = &v.fields_with_source_map(db).1;
push_ty_diagnostics(
db,
acc,
db.field_types_with_diagnostics(v.into()).1,
- &source_map,
+ source_map,
);
- expr_store_diagnostics(db, acc, &source_map);
+ expr_store_diagnostics(db, acc, source_map);
}
}
}
@@ -822,7 +822,7 @@ impl Module {
// Negative impls can't have items, don't emit missing items diagnostic for them
if let (false, Some(trait_)) = (impl_is_negative, trait_) {
- let items = &db.trait_items(trait_.into()).items;
+ let items = &trait_.id.trait_items(db).items;
let required_items = items.iter().filter(|&(_, assoc)| match *assoc {
AssocItemId::FunctionId(it) => !db.function_signature(it).has_body(),
AssocItemId::ConstId(id) => !db.const_signature(id).has_body(),
@@ -1260,7 +1260,9 @@ impl TupleField {
}
pub fn ty<'db>(&self, db: &'db dyn HirDatabase) -> Type<'db> {
- let ty = db.infer(self.owner).tuple_field_access_types[&self.tuple]
+ let ty = db
+ .infer(self.owner)
+ .tuple_field_access_type(self.tuple)
.as_slice(Interner)
.get(self.index as usize)
.and_then(|arg| arg.ty(Interner))
@@ -1311,7 +1313,7 @@ impl AstNode for FieldSource {
impl Field {
pub fn name(&self, db: &dyn HirDatabase) -> Name {
- db.variant_fields(self.parent.into()).fields()[self.id].name.clone()
+ VariantId::from(self.parent).fields(db).fields()[self.id].name.clone()
}
pub fn index(&self) -> usize {
@@ -1380,7 +1382,7 @@ impl Field {
impl HasVisibility for Field {
fn visibility(&self, db: &dyn HirDatabase) -> Visibility {
- let variant_data = db.variant_fields(self.parent.into());
+ let variant_data = VariantId::from(self.parent).fields(db);
let visibility = &variant_data.fields()[self.id].visibility;
let parent_id: hir_def::VariantId = self.parent.into();
// FIXME: RawVisibility::Public doesn't need to construct a resolver
@@ -1403,7 +1405,8 @@ impl Struct {
}
pub fn fields(self, db: &dyn HirDatabase) -> Vec<Field> {
- db.variant_fields(self.id.into())
+ self.id
+ .fields(db)
.fields()
.iter()
.map(|(id, _)| Field { parent: self.into(), id })
@@ -1434,8 +1437,8 @@ impl Struct {
}
}
- fn variant_fields(self, db: &dyn HirDatabase) -> Arc<VariantFields> {
- db.variant_fields(self.id.into())
+ fn variant_fields(self, db: &dyn HirDatabase) -> &VariantFields {
+ self.id.fields(db)
}
pub fn is_unstable(self, db: &dyn HirDatabase) -> bool {
@@ -1478,7 +1481,7 @@ impl Union {
}
pub fn kind(self, db: &dyn HirDatabase) -> StructKind {
- match db.variant_fields(self.id.into()).shape {
+ match self.id.fields(db).shape {
hir_def::item_tree::FieldsShape::Record => StructKind::Record,
hir_def::item_tree::FieldsShape::Tuple => StructKind::Tuple,
hir_def::item_tree::FieldsShape::Unit => StructKind::Unit,
@@ -1486,7 +1489,8 @@ impl Union {
}
pub fn fields(self, db: &dyn HirDatabase) -> Vec<Field> {
- db.variant_fields(self.id.into())
+ self.id
+ .fields(db)
.fields()
.iter()
.map(|(id, _)| Field { parent: self.into(), id })
@@ -1626,7 +1630,8 @@ impl Variant {
}
pub fn fields(self, db: &dyn HirDatabase) -> Vec<Field> {
- db.variant_fields(self.id.into())
+ self.id
+ .fields(db)
.fields()
.iter()
.map(|(id, _)| Field { parent: self.into(), id })
@@ -1634,7 +1639,7 @@ impl Variant {
}
pub fn kind(self, db: &dyn HirDatabase) -> StructKind {
- match db.variant_fields(self.id.into()).shape {
+ match self.id.fields(db).shape {
hir_def::item_tree::FieldsShape::Record => StructKind::Record,
hir_def::item_tree::FieldsShape::Tuple => StructKind::Tuple,
hir_def::item_tree::FieldsShape::Unit => StructKind::Unit,
@@ -1727,10 +1732,10 @@ impl Adt {
pub fn ty_with_args<'db>(
self,
db: &'db dyn HirDatabase,
- args: impl Iterator<Item = Type<'db>>,
+ args: impl IntoIterator<Item = Type<'db>>,
) -> Type<'db> {
let id = AdtId::from(self);
- let mut it = args.map(|t| t.ty);
+ let mut it = args.into_iter().map(|t| t.ty);
let ty = TyBuilder::def_ty(db, id.into(), None)
.fill(|x| {
let r = it.next().unwrap_or_else(|| TyKind::Error.intern(Interner));
@@ -1924,7 +1929,7 @@ impl DefWithBody {
expr_store_diagnostics(db, acc, &source_map);
let infer = db.infer(self.into());
- for d in &infer.diagnostics {
+ for d in infer.diagnostics() {
acc.extend(AnyDiagnostic::inference_diagnostic(
db,
self.into(),
@@ -2883,7 +2888,7 @@ impl Trait {
}
pub fn function(self, db: &dyn HirDatabase, name: impl PartialEq<Name>) -> Option<Function> {
- db.trait_items(self.id).items.iter().find(|(n, _)| name == *n).and_then(|&(_, it)| match it
+ self.id.trait_items(db).items.iter().find(|(n, _)| name == *n).and_then(|&(_, it)| match it
{
AssocItemId::FunctionId(id) => Some(Function { id }),
_ => None,
@@ -2891,7 +2896,7 @@ impl Trait {
}
pub fn items(self, db: &dyn HirDatabase) -> Vec<AssocItem> {
- db.trait_items(self.id).items.iter().map(|(_name, it)| (*it).into()).collect()
+ self.id.trait_items(db).items.iter().map(|(_name, it)| (*it).into()).collect()
}
pub fn items_with_supertraits(self, db: &dyn HirDatabase) -> Vec<AssocItem> {
@@ -2939,7 +2944,7 @@ impl Trait {
}
fn all_macro_calls(&self, db: &dyn HirDatabase) -> Box<[(AstId<ast::Item>, MacroCallId)]> {
- db.trait_items(self.id).macro_calls.to_vec().into_boxed_slice()
+ self.id.trait_items(db).macro_calls.to_vec().into_boxed_slice()
}
/// `#[rust_analyzer::completions(...)]` mode.
@@ -3043,10 +3048,17 @@ pub struct BuiltinType {
}
impl BuiltinType {
+ // Constructors are added on demand, feel free to add more.
pub fn str() -> BuiltinType {
BuiltinType { inner: hir_def::builtin_type::BuiltinType::Str }
}
+ pub fn i32() -> BuiltinType {
+ BuiltinType {
+ inner: hir_def::builtin_type::BuiltinType::Int(hir_ty::primitive::BuiltinInt::I32),
+ }
+ }
+
pub fn ty<'db>(self, db: &'db dyn HirDatabase) -> Type<'db> {
let core = Crate::core(db).map(|core| core.id).unwrap_or_else(|| db.all_crates()[0]);
Type::new_for_crate(core, TyBuilder::builtin(self.inner))
@@ -3667,7 +3679,7 @@ impl GenericDef {
let generics = db.generic_params(def);
- if generics.is_empty() && generics.no_predicates() {
+ if generics.is_empty() && generics.has_no_predicates() {
return;
}
@@ -5000,7 +5012,7 @@ impl<'db> Type<'db> {
}
let output_assoc_type =
- db.trait_items(trait_).associated_type_by_name(&Name::new_symbol_root(sym::Output))?;
+ trait_.trait_items(db).associated_type_by_name(&Name::new_symbol_root(sym::Output))?;
self.normalize_trait_assoc_type(db, &[], output_assoc_type.into())
}
@@ -5013,8 +5025,8 @@ impl<'db> Type<'db> {
/// This does **not** resolve `IntoIterator`, only `Iterator`.
pub fn iterator_item(self, db: &'db dyn HirDatabase) -> Option<Type<'db>> {
let iterator_trait = LangItem::Iterator.resolve_trait(db, self.env.krate)?;
- let iterator_item = db
- .trait_items(iterator_trait)
+ let iterator_item = iterator_trait
+ .trait_items(db)
.associated_type_by_name(&Name::new_symbol_root(sym::Item))?;
self.normalize_trait_assoc_type(db, &[], iterator_item.into())
}
@@ -5044,8 +5056,8 @@ impl<'db> Type<'db> {
return None;
}
- let into_iter_assoc_type = db
- .trait_items(trait_)
+ let into_iter_assoc_type = trait_
+ .trait_items(db)
.associated_type_by_name(&Name::new_symbol_root(sym::IntoIter))?;
self.normalize_trait_assoc_type(db, &[], into_iter_assoc_type.into())
}
diff --git a/crates/hir/src/semantics.rs b/crates/hir/src/semantics.rs
index d96975831e..247bb69398 100644
--- a/crates/hir/src/semantics.rs
+++ b/crates/hir/src/semantics.rs
@@ -2199,6 +2199,10 @@ pub struct SemanticsScope<'db> {
}
impl<'db> SemanticsScope<'db> {
+ pub fn file_id(&self) -> HirFileId {
+ self.file_id
+ }
+
pub fn module(&self) -> Module {
Module { id: self.resolver.module() }
}
diff --git a/crates/hir/src/semantics/child_by_source.rs b/crates/hir/src/semantics/child_by_source.rs
index fedd8239d0..e7db93d375 100644
--- a/crates/hir/src/semantics/child_by_source.rs
+++ b/crates/hir/src/semantics/child_by_source.rs
@@ -35,7 +35,7 @@ pub(crate) trait ChildBySource {
impl ChildBySource for TraitId {
fn child_by_source_to(&self, db: &dyn DefDatabase, res: &mut DynMap, file_id: HirFileId) {
- let data = db.trait_items(*self);
+ let data = self.trait_items(db);
data.macro_calls().filter(|(ast_id, _)| ast_id.file_id == file_id).for_each(
|(ast_id, call_id)| {
@@ -191,7 +191,7 @@ impl ChildBySource for VariantId {
Either::Right(source) => res[keys::RECORD_FIELD].insert(AstPtr::new(&source), id),
}
}
- let (_, sm) = db.variant_fields_with_source_map(*self);
+ let (_, sm) = self.fields_with_source_map(db);
sm.expansions().for_each(|(ast, &exp_id)| res[keys::MACRO_CALL].insert(ast.value, exp_id));
}
}
diff --git a/crates/hir/src/source_analyzer.rs b/crates/hir/src/source_analyzer.rs
index 48543ca581..0662bfddcf 100644
--- a/crates/hir/src/source_analyzer.rs
+++ b/crates/hir/src/source_analyzer.rs
@@ -156,14 +156,14 @@ impl<'db> SourceAnalyzer<'db> {
InFile { file_id, .. }: InFile<&SyntaxNode>,
_offset: Option<TextSize>,
) -> SourceAnalyzer<'db> {
- let (fields, source_map) = db.variant_fields_with_source_map(def);
+ let (fields, source_map) = def.fields_with_source_map(db);
let resolver = def.resolver(db);
SourceAnalyzer {
resolver,
body_or_sig: Some(BodyOrSig::VariantFields {
def,
store: fields.store.clone(),
- source_map,
+ source_map: source_map.clone(),
}),
file_id,
}
@@ -254,7 +254,7 @@ impl<'db> SourceAnalyzer<'db> {
// expressions nor patterns).
let expr_id = self.expr_id(expr.clone())?.as_expr()?;
let infer = self.infer()?;
- infer.expr_adjustments.get(&expr_id).map(|v| &**v)
+ infer.expr_adjustment(expr_id)
}
pub(crate) fn type_of_type(
@@ -286,7 +286,7 @@ impl<'db> SourceAnalyzer<'db> {
let infer = self.infer()?;
let coerced = expr_id
.as_expr()
- .and_then(|expr_id| infer.expr_adjustments.get(&expr_id))
+ .and_then(|expr_id| infer.expr_adjustment(expr_id))
.and_then(|adjusts| adjusts.last().map(|adjust| adjust.target.clone()));
let ty = infer[expr_id].clone();
let mk_ty = |ty| Type::new_with_resolver(db, &self.resolver, ty);
@@ -302,12 +302,11 @@ impl<'db> SourceAnalyzer<'db> {
let infer = self.infer()?;
let coerced = match expr_or_pat_id {
ExprOrPatId::ExprId(idx) => infer
- .expr_adjustments
- .get(&idx)
+ .expr_adjustment(idx)
.and_then(|adjusts| adjusts.last().cloned())
.map(|adjust| adjust.target),
ExprOrPatId::PatId(idx) => {
- infer.pat_adjustments.get(&idx).and_then(|adjusts| adjusts.last().cloned())
+ infer.pat_adjustment(idx).and_then(|adjusts| adjusts.last().cloned())
}
};
@@ -345,7 +344,7 @@ impl<'db> SourceAnalyzer<'db> {
) -> Option<BindingMode> {
let id = self.pat_id(&pat.clone().into())?;
let infer = self.infer()?;
- infer.binding_modes.get(id.as_pat()?).map(|bm| match bm {
+ infer.binding_mode(id.as_pat()?).map(|bm| match bm {
hir_ty::BindingMode::Move => BindingMode::Move,
hir_ty::BindingMode::Ref(hir_ty::Mutability::Mut) => BindingMode::Ref(Mutability::Mut),
hir_ty::BindingMode::Ref(hir_ty::Mutability::Not) => {
@@ -362,8 +361,7 @@ impl<'db> SourceAnalyzer<'db> {
let infer = self.infer()?;
Some(
infer
- .pat_adjustments
- .get(&pat_id.as_pat()?)?
+ .pat_adjustment(pat_id.as_pat()?)?
.iter()
.map(|ty| Type::new_with_resolver(db, &self.resolver, ty.clone()))
.collect(),
@@ -713,7 +711,7 @@ impl<'db> SourceAnalyzer<'db> {
};
let (adt, subst) = self.infer()?.type_of_expr_or_pat(expr_id)?.as_adt()?;
let variant = self.infer()?.variant_resolution_for_expr_or_pat(expr_id)?;
- let variant_data = variant.variant_data(db);
+ let variant_data = variant.fields(db);
let field = FieldId { parent: variant, local_id: variant_data.field(&local_name)? };
let field_ty =
db.field_types(variant).get(field.local_id)?.clone().substitute(Interner, subst);
@@ -734,9 +732,9 @@ impl<'db> SourceAnalyzer<'db> {
let record_pat = ast::RecordPat::cast(field.syntax().parent().and_then(|p| p.parent())?)?;
let pat_id = self.pat_id(&record_pat.into())?;
let variant = self.infer()?.variant_resolution_for_pat(pat_id.as_pat()?)?;
- let variant_data = variant.variant_data(db);
+ let variant_data = variant.fields(db);
let field = FieldId { parent: variant, local_id: variant_data.field(&field_name)? };
- let (adt, subst) = self.infer()?.type_of_pat.get(pat_id.as_pat()?)?.as_adt()?;
+ let (adt, subst) = self.infer()?[pat_id.as_pat()?].as_adt()?;
let field_ty =
db.field_types(variant).get(field.local_id)?.clone().substitute(Interner, subst);
Some((
@@ -765,7 +763,8 @@ impl<'db> SourceAnalyzer<'db> {
},
};
- let res = resolve_hir_path(db, &self.resolver, path, HygieneId::ROOT, Some(store))?;
+ let body_owner = self.resolver.body_owner();
+ let res = resolve_hir_value_path(db, &self.resolver, body_owner, path, HygieneId::ROOT)?;
match res {
PathResolution::Def(def) => Some(def),
_ => None,
@@ -803,8 +802,8 @@ impl<'db> SourceAnalyzer<'db> {
};
container = Either::Right(db.normalize_projection(projection, trait_env.clone()));
}
- let handle_variants = |variant, subst: &Substitution, container: &mut _| {
- let fields = db.variant_fields(variant);
+ let handle_variants = |variant: VariantId, subst: &Substitution, container: &mut _| {
+ let fields = variant.fields(db);
let field = fields.field(&field_name.as_name())?;
let field_types = db.field_types(variant);
*container = Either::Right(field_types[field].clone().substitute(Interner, subst));
@@ -1249,7 +1248,7 @@ impl<'db> SourceAnalyzer<'db> {
let infer = self.infer()?;
let pat_id = self.pat_id(&pattern.clone().into())?.as_pat()?;
- let substs = infer.type_of_pat[pat_id].as_adt()?.1;
+ let substs = infer[pat_id].as_adt()?.1;
let (variant, missing_fields, _exhaustive) =
record_pattern_missing_fields(db, infer, pat_id, &body[pat_id])?;
@@ -1423,7 +1422,7 @@ impl<'db> SourceAnalyzer<'db> {
method_name: &Name,
) -> Option<(TraitId, FunctionId)> {
let trait_id = lang_trait.resolve_trait(db, self.resolver.krate())?;
- let fn_id = db.trait_items(trait_id).method_by_name(method_name)?;
+ let fn_id = trait_id.trait_items(db).method_by_name(method_name)?;
Some((trait_id, fn_id))
}
@@ -1580,7 +1579,7 @@ fn resolve_hir_path_(
// within the trait's associated types.
if let (Some(unresolved), &TypeNs::TraitId(trait_id)) = (&unresolved, &ty) {
if let Some(type_alias_id) =
- db.trait_items(trait_id).associated_type_by_name(unresolved.name)
+ trait_id.trait_items(db).associated_type_by_name(unresolved.name)
{
return Some(PathResolution::Def(ModuleDefId::from(type_alias_id).into()));
}
@@ -1731,7 +1730,7 @@ fn resolve_hir_path_qualifier(
// within the trait's associated types.
if let (Some(unresolved), &TypeNs::TraitId(trait_id)) = (&unresolved, &ty) {
if let Some(type_alias_id) =
- db.trait_items(trait_id).associated_type_by_name(unresolved.name)
+ trait_id.trait_items(db).associated_type_by_name(unresolved.name)
{
return Some(PathResolution::Def(ModuleDefId::from(type_alias_id).into()));
}
@@ -1785,8 +1784,8 @@ pub(crate) fn name_hygiene(db: &dyn HirDatabase, name: InFile<&SyntaxNode>) -> H
}
fn type_of_expr_including_adjust(infer: &InferenceResult, id: ExprId) -> Option<&Ty> {
- match infer.expr_adjustments.get(&id).and_then(|adjustments| adjustments.last()) {
+ match infer.expr_adjustment(id).and_then(|adjustments| adjustments.last()) {
Some(adjustment) => Some(&adjustment.target),
- None => infer.type_of_expr.get(id),
+ None => Some(&infer[id]),
}
}
diff --git a/crates/hir/src/symbols.rs b/crates/hir/src/symbols.rs
index 64f2a910bd..756650891d 100644
--- a/crates/hir/src/symbols.rs
+++ b/crates/hir/src/symbols.rs
@@ -334,7 +334,7 @@ impl<'a> SymbolCollector<'a> {
fn collect_from_trait(&mut self, trait_id: TraitId, trait_do_not_complete: Complete) {
let trait_data = self.db.trait_signature(trait_id);
self.with_container_name(Some(trait_data.name.as_str().into()), |s| {
- for &(ref name, assoc_item_id) in &self.db.trait_items(trait_id).items {
+ for &(ref name, assoc_item_id) in &trait_id.trait_items(self.db).items {
s.push_assoc_item(assoc_item_id, name, Some(trait_do_not_complete));
}
});
diff --git a/crates/ide-assists/Cargo.toml b/crates/ide-assists/Cargo.toml
index 53af980c19..385b0e1eb7 100644
--- a/crates/ide-assists/Cargo.toml
+++ b/crates/ide-assists/Cargo.toml
@@ -10,6 +10,7 @@ license.workspace = true
rust-version.workspace = true
[lib]
+doctest = false
[dependencies]
cov-mark = "2.0.0"
diff --git a/crates/ide-assists/src/handlers/expand_rest_pattern.rs b/crates/ide-assists/src/handlers/expand_rest_pattern.rs
index b71de5e00c..c80b78fd97 100644
--- a/crates/ide-assists/src/handlers/expand_rest_pattern.rs
+++ b/crates/ide-assists/src/handlers/expand_rest_pattern.rs
@@ -175,7 +175,7 @@ pub(crate) fn expand_rest_pattern(acc: &mut Assists, ctx: &AssistContext<'_>) ->
// ast::TuplePat(it) => (),
// FIXME
// ast::SlicePat(it) => (),
- _ => return None,
+ _ => None,
}
}
}
diff --git a/crates/ide-assists/src/handlers/generate_enum_is_method.rs b/crates/ide-assists/src/handlers/generate_enum_is_method.rs
index 3e6d0bec68..517906b429 100644
--- a/crates/ide-assists/src/handlers/generate_enum_is_method.rs
+++ b/crates/ide-assists/src/handlers/generate_enum_is_method.rs
@@ -1,3 +1,5 @@
+use std::slice;
+
use ide_db::assists::GroupLabel;
use stdx::to_lower_snake_case;
use syntax::ast::HasVisibility;
@@ -52,7 +54,7 @@ pub(crate) fn generate_enum_is_method(acc: &mut Assists, ctx: &AssistContext<'_>
let fn_name = format!("is_{}", &to_lower_snake_case(&variant_name.text()));
// Return early if we've found an existing new fn
- let impl_def = find_struct_impl(ctx, &parent_enum, &[fn_name.clone()])?;
+ let impl_def = find_struct_impl(ctx, &parent_enum, slice::from_ref(&fn_name))?;
let target = variant.syntax().text_range();
acc.add_group(
diff --git a/crates/ide-assists/src/handlers/generate_enum_projection_method.rs b/crates/ide-assists/src/handlers/generate_enum_projection_method.rs
index 3974bcf618..e4b0f83049 100644
--- a/crates/ide-assists/src/handlers/generate_enum_projection_method.rs
+++ b/crates/ide-assists/src/handlers/generate_enum_projection_method.rs
@@ -1,3 +1,5 @@
+use std::slice;
+
use ide_db::assists::GroupLabel;
use itertools::Itertools;
use stdx::to_lower_snake_case;
@@ -148,7 +150,7 @@ fn generate_enum_projection_method(
let fn_name = format!("{fn_name_prefix}_{}", &to_lower_snake_case(&variant_name.text()));
// Return early if we've found an existing new fn
- let impl_def = find_struct_impl(ctx, &parent_enum, &[fn_name.clone()])?;
+ let impl_def = find_struct_impl(ctx, &parent_enum, slice::from_ref(&fn_name))?;
let target = variant.syntax().text_range();
acc.add_group(
diff --git a/crates/ide-assists/src/handlers/promote_local_to_const.rs b/crates/ide-assists/src/handlers/promote_local_to_const.rs
index 6316a8f0db..603be4d667 100644
--- a/crates/ide-assists/src/handlers/promote_local_to_const.rs
+++ b/crates/ide-assists/src/handlers/promote_local_to_const.rs
@@ -3,8 +3,7 @@ use ide_db::{assists::AssistId, defs::Definition};
use stdx::to_upper_snake_case;
use syntax::{
AstNode,
- ast::{self, HasName, make},
- ted,
+ ast::{self, HasName, syntax_factory::SyntaxFactory},
};
use crate::{
@@ -69,15 +68,18 @@ pub(crate) fn promote_local_to_const(acc: &mut Assists, ctx: &AssistContext<'_>)
"Promote local to constant",
let_stmt.syntax().text_range(),
|edit| {
+ let make = SyntaxFactory::with_mappings();
+ let mut editor = edit.make_editor(let_stmt.syntax());
let name = to_upper_snake_case(&name.to_string());
let usages = Definition::Local(local).usages(&ctx.sema).all();
if let Some(usages) = usages.references.get(&ctx.file_id()) {
- let name_ref = make::name_ref(&name);
+ let name_ref = make.name_ref(&name);
for usage in usages {
let Some(usage_name) = usage.name.as_name_ref().cloned() else { continue };
if let Some(record_field) = ast::RecordExprField::for_name_ref(&usage_name) {
- let name_expr = make::expr_path(make::path_from_text(&name));
+ let path = make.ident_path(&name);
+ let name_expr = make.expr_path(path);
utils::replace_record_field_expr(ctx, edit, record_field, name_expr);
} else {
let usage_range = usage.range;
@@ -86,15 +88,17 @@ pub(crate) fn promote_local_to_const(acc: &mut Assists, ctx: &AssistContext<'_>)
}
}
- let item = make::item_const(None, make::name(&name), make::ty(&ty), initializer)
- .clone_for_update();
- let let_stmt = edit.make_mut(let_stmt);
+ let item = make.item_const(None, make.name(&name), make.ty(&ty), initializer);
if let Some((cap, name)) = ctx.config.snippet_cap.zip(item.name()) {
- edit.add_tabstop_before(cap, name);
+ let tabstop = edit.make_tabstop_before(cap);
+ editor.add_annotation(name.syntax().clone(), tabstop);
}
- ted::replace(let_stmt.syntax(), item.syntax());
+ editor.replace(let_stmt.syntax(), item.syntax());
+
+ editor.add_mappings(make.finish_with_mappings());
+ edit.add_file_edits(ctx.vfs_file_id(), editor);
},
)
}
diff --git a/crates/ide-assists/src/handlers/replace_is_method_with_if_let_method.rs b/crates/ide-assists/src/handlers/replace_is_method_with_if_let_method.rs
index e933bcc40d..62914ee7f3 100644
--- a/crates/ide-assists/src/handlers/replace_is_method_with_if_let_method.rs
+++ b/crates/ide-assists/src/handlers/replace_is_method_with_if_let_method.rs
@@ -1,8 +1,5 @@
use ide_db::syntax_helpers::suggest_name;
-use syntax::{
- ast::{self, AstNode, make},
- ted,
-};
+use syntax::ast::{self, AstNode, syntax_factory::SyntaxFactory};
use crate::{AssistContext, AssistId, Assists};
@@ -60,21 +57,25 @@ pub(crate) fn replace_is_method_with_if_let_method(
message,
call_expr.syntax().text_range(),
|edit| {
- let call_expr = edit.make_mut(call_expr);
+ let make = SyntaxFactory::with_mappings();
+ let mut editor = edit.make_editor(call_expr.syntax());
- let var_pat = make::ident_pat(false, false, make::name(&var_name));
- let pat = make::tuple_struct_pat(make::ext::ident_path(text), [var_pat.into()]);
- let let_expr = make::expr_let(pat.into(), receiver).clone_for_update();
+ let var_pat = make.ident_pat(false, false, make.name(&var_name));
+ let pat = make.tuple_struct_pat(make.ident_path(text), [var_pat.into()]);
+ let let_expr = make.expr_let(pat.into(), receiver);
if let Some(cap) = ctx.config.snippet_cap {
if let Some(ast::Pat::TupleStructPat(pat)) = let_expr.pat() {
if let Some(first_var) = pat.fields().next() {
- edit.add_placeholder_snippet(cap, first_var);
+ let placeholder = edit.make_placeholder_snippet(cap);
+ editor.add_annotation(first_var.syntax(), placeholder);
}
}
}
- ted::replace(call_expr.syntax(), let_expr.syntax());
+ editor.replace(call_expr.syntax(), let_expr.syntax());
+ editor.add_mappings(make.finish_with_mappings());
+ edit.add_file_edits(ctx.vfs_file_id(), editor);
},
)
}
diff --git a/crates/ide-assists/src/handlers/term_search.rs b/crates/ide-assists/src/handlers/term_search.rs
index 019ddaf144..6527d3706e 100644
--- a/crates/ide-assists/src/handlers/term_search.rs
+++ b/crates/ide-assists/src/handlers/term_search.rs
@@ -100,9 +100,7 @@ fn f() { let a: u128 = 1; let b: u128 = todo$0!() }"#,
fn test_complete_todo_with_msg() {
check_assist(
term_search,
- // FIXME: Since we are lacking of `super let`, term search fails due to borrowck failure.
- // Should implement super let and remove `fmt_before_1_89_0`
- r#"//- minicore: todo, unimplemented, fmt_before_1_89_0
+ r#"//- minicore: todo, unimplemented
fn f() { let a: u128 = 1; let b: u128 = todo$0!("asd") }"#,
r#"fn f() { let a: u128 = 1; let b: u128 = a }"#,
)
@@ -112,10 +110,8 @@ fn f() { let a: u128 = 1; let b: u128 = todo$0!("asd") }"#,
fn test_complete_unimplemented_with_msg() {
check_assist(
term_search,
- // FIXME: Since we are lacking of `super let`, term search fails due to borrowck failure.
- // Should implement super let and remove `fmt_before_1_89_0`
- r#"//- minicore: todo, unimplemented, fmt_before_1_89_0
-fn f() { let a: u128 = 1; let b: u128 = todo$0!("asd") }"#,
+ r#"//- minicore: todo, unimplemented
+fn f() { let a: u128 = 1; let b: u128 = unimplemented$0!("asd") }"#,
r#"fn f() { let a: u128 = 1; let b: u128 = a }"#,
)
}
@@ -124,10 +120,8 @@ fn f() { let a: u128 = 1; let b: u128 = todo$0!("asd") }"#,
fn test_complete_unimplemented() {
check_assist(
term_search,
- // FIXME: Since we are lacking of `super let`, term search fails due to borrowck failure.
- // Should implement super let and remove `fmt_before_1_89_0`
- r#"//- minicore: todo, unimplemented, fmt_before_1_89_0
-fn f() { let a: u128 = 1; let b: u128 = todo$0!("asd") }"#,
+ r#"//- minicore: todo, unimplemented
+fn f() { let a: u128 = 1; let b: u128 = unimplemented$0!() }"#,
r#"fn f() { let a: u128 = 1; let b: u128 = a }"#,
)
}
diff --git a/crates/ide-assists/src/handlers/toggle_macro_delimiter.rs b/crates/ide-assists/src/handlers/toggle_macro_delimiter.rs
index 109269bd6e..504e12f93d 100644
--- a/crates/ide-assists/src/handlers/toggle_macro_delimiter.rs
+++ b/crates/ide-assists/src/handlers/toggle_macro_delimiter.rs
@@ -1,8 +1,7 @@
use ide_db::assists::AssistId;
use syntax::{
AstNode, T,
- ast::{self, make},
- ted,
+ ast::{self, syntax_factory::SyntaxFactory},
};
use crate::{AssistContext, Assists};
@@ -37,8 +36,7 @@ pub(crate) fn toggle_macro_delimiter(acc: &mut Assists, ctx: &AssistContext<'_>)
RCur,
}
- let makro = ctx.find_node_at_offset::<ast::MacroCall>()?.clone_for_update();
- let makro_text_range = makro.syntax().text_range();
+ let makro = ctx.find_node_at_offset::<ast::MacroCall>()?;
let cursor_offset = ctx.offset();
let semicolon = makro.semicolon_token();
@@ -71,24 +69,28 @@ pub(crate) fn toggle_macro_delimiter(acc: &mut Assists, ctx: &AssistContext<'_>)
},
token_tree.syntax().text_range(),
|builder| {
+ let make = SyntaxFactory::with_mappings();
+ let mut editor = builder.make_editor(makro.syntax());
+
match token {
MacroDelims::LPar | MacroDelims::RPar => {
- ted::replace(ltoken, make::token(T!['{']));
- ted::replace(rtoken, make::token(T!['}']));
+ editor.replace(ltoken, make.token(T!['{']));
+ editor.replace(rtoken, make.token(T!['}']));
if let Some(sc) = semicolon {
- ted::remove(sc);
+ editor.delete(sc);
}
}
MacroDelims::LBra | MacroDelims::RBra => {
- ted::replace(ltoken, make::token(T!['(']));
- ted::replace(rtoken, make::token(T![')']));
+ editor.replace(ltoken, make.token(T!['(']));
+ editor.replace(rtoken, make.token(T![')']));
}
MacroDelims::LCur | MacroDelims::RCur => {
- ted::replace(ltoken, make::token(T!['[']));
- ted::replace(rtoken, make::token(T![']']));
+ editor.replace(ltoken, make.token(T!['[']));
+ editor.replace(rtoken, make.token(T![']']));
}
}
- builder.replace(makro_text_range, makro.syntax().text());
+ editor.add_mappings(make.finish_with_mappings());
+ builder.add_file_edits(ctx.vfs_file_id(), editor);
},
)
}
diff --git a/crates/ide-assists/src/handlers/unmerge_match_arm.rs b/crates/ide-assists/src/handlers/unmerge_match_arm.rs
index 5aedff5cc7..7b0f2dc65a 100644
--- a/crates/ide-assists/src/handlers/unmerge_match_arm.rs
+++ b/crates/ide-assists/src/handlers/unmerge_match_arm.rs
@@ -1,8 +1,7 @@
use syntax::{
Direction, SyntaxKind, T,
- algo::neighbor,
- ast::{self, AstNode, edit::IndentLevel, make},
- ted::{self, Position},
+ ast::{self, AstNode, edit::IndentLevel, syntax_factory::SyntaxFactory},
+ syntax_editor::{Element, Position},
};
use crate::{AssistContext, AssistId, Assists};
@@ -33,7 +32,7 @@ use crate::{AssistContext, AssistId, Assists};
// ```
pub(crate) fn unmerge_match_arm(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
let pipe_token = ctx.find_token_syntax_at_offset(T![|])?;
- let or_pat = ast::OrPat::cast(pipe_token.parent()?)?.clone_for_update();
+ let or_pat = ast::OrPat::cast(pipe_token.parent()?)?;
if or_pat.leading_pipe().is_some_and(|it| it == pipe_token) {
return None;
}
@@ -44,13 +43,14 @@ pub(crate) fn unmerge_match_arm(acc: &mut Assists, ctx: &AssistContext<'_>) -> O
// without `OrPat`.
let new_parent = match_arm.syntax().parent()?;
- let old_parent_range = new_parent.text_range();
acc.add(
AssistId::refactor_rewrite("unmerge_match_arm"),
"Unmerge match arm",
pipe_token.text_range(),
|edit| {
+ let make = SyntaxFactory::with_mappings();
+ let mut editor = edit.make_editor(&new_parent);
let pats_after = pipe_token
.siblings_with_tokens(Direction::Next)
.filter_map(|it| ast::Pat::cast(it.into_node()?))
@@ -59,11 +59,9 @@ pub(crate) fn unmerge_match_arm(acc: &mut Assists, ctx: &AssistContext<'_>) -> O
let new_pat = if pats_after.len() == 1 {
pats_after[0].clone()
} else {
- make::or_pat(pats_after, or_pat.leading_pipe().is_some()).into()
+ make.or_pat(pats_after, or_pat.leading_pipe().is_some()).into()
};
- let new_match_arm =
- make::match_arm(new_pat, match_arm.guard(), match_arm_body).clone_for_update();
-
+ let new_match_arm = make.match_arm(new_pat, match_arm.guard(), match_arm_body);
let mut pipe_index = pipe_token.index();
if pipe_token
.prev_sibling_or_token()
@@ -71,10 +69,13 @@ pub(crate) fn unmerge_match_arm(acc: &mut Assists, ctx: &AssistContext<'_>) -> O
{
pipe_index -= 1;
}
- or_pat.syntax().splice_children(
- pipe_index..or_pat.syntax().children_with_tokens().count(),
- Vec::new(),
- );
+ for child in or_pat
+ .syntax()
+ .children_with_tokens()
+ .skip_while(|child| child.index() < pipe_index)
+ {
+ editor.delete(child.syntax_element());
+ }
let mut insert_after_old_arm = Vec::new();
@@ -86,33 +87,19 @@ pub(crate) fn unmerge_match_arm(acc: &mut Assists, ctx: &AssistContext<'_>) -> O
// body is a block, but we don't bother to check that.
// - Missing after the arm with arms after, if the arm body is a block. In this case
// we don't want to insert a comma at all.
- let has_comma_after =
- std::iter::successors(match_arm.syntax().last_child_or_token(), |it| {
- it.prev_sibling_or_token()
- })
- .map(|it| it.kind())
- .find(|it| !it.is_trivia())
- == Some(T![,]);
- let has_arms_after = neighbor(&match_arm, Direction::Next).is_some();
- if !has_comma_after && !has_arms_after {
- insert_after_old_arm.push(make::token(T![,]).into());
+ let has_comma_after = match_arm.comma_token().is_some();
+ if !has_comma_after && !match_arm.expr().unwrap().is_block_like() {
+ insert_after_old_arm.push(make.token(T![,]).into());
}
let indent = IndentLevel::from_node(match_arm.syntax());
- insert_after_old_arm.push(make::tokens::whitespace(&format!("\n{indent}")).into());
+ insert_after_old_arm.push(make.whitespace(&format!("\n{indent}")).into());
insert_after_old_arm.push(new_match_arm.syntax().clone().into());
- ted::insert_all_raw(Position::after(match_arm.syntax()), insert_after_old_arm);
-
- if has_comma_after {
- ted::insert_raw(
- Position::last_child_of(new_match_arm.syntax()),
- make::token(T![,]),
- );
- }
-
- edit.replace(old_parent_range, new_parent.to_string());
+ editor.insert_all(Position::after(match_arm.syntax()), insert_after_old_arm);
+ editor.add_mappings(make.finish_with_mappings());
+ edit.add_file_edits(ctx.vfs_file_id(), editor);
},
)
}
@@ -258,7 +245,7 @@ fn main() {
let x = X::A;
let y = match x {
X::A => 1i32,
- X::B => 1i32
+ X::B => 1i32,
};
}
"#,
@@ -276,7 +263,7 @@ enum X { A, B }
fn main() {
let x = X::A;
match x {
- X::A $0| X::B => {},
+ X::A $0| X::B => {}
}
}
"#,
@@ -287,8 +274,8 @@ enum X { A, B }
fn main() {
let x = X::A;
match x {
- X::A => {},
- X::B => {},
+ X::A => {}
+ X::B => {}
}
}
"#,
diff --git a/crates/ide-assists/src/handlers/wrap_return_type.rs b/crates/ide-assists/src/handlers/wrap_return_type.rs
index 9ea78719b2..d7189aa5db 100644
--- a/crates/ide-assists/src/handlers/wrap_return_type.rs
+++ b/crates/ide-assists/src/handlers/wrap_return_type.rs
@@ -56,7 +56,8 @@ pub(crate) fn wrap_return_type(acc: &mut Assists, ctx: &AssistContext<'_>) -> Op
};
let type_ref = &ret_type.ty()?;
- let ty = ctx.sema.resolve_type(type_ref)?.as_adt();
+ let ty = ctx.sema.resolve_type(type_ref)?;
+ let ty_adt = ty.as_adt();
let famous_defs = FamousDefs(&ctx.sema, ctx.sema.scope(type_ref.syntax())?.krate());
for kind in WrapperKind::ALL {
@@ -64,7 +65,7 @@ pub(crate) fn wrap_return_type(acc: &mut Assists, ctx: &AssistContext<'_>) -> Op
continue;
};
- if matches!(ty, Some(hir::Adt::Enum(ret_type)) if ret_type == core_wrapper) {
+ if matches!(ty_adt, Some(hir::Adt::Enum(ret_type)) if ret_type == core_wrapper) {
// The return type is already wrapped
cov_mark::hit!(wrap_return_type_simple_return_type_already_wrapped);
continue;
@@ -78,10 +79,23 @@ pub(crate) fn wrap_return_type(acc: &mut Assists, ctx: &AssistContext<'_>) -> Op
|builder| {
let mut editor = builder.make_editor(&parent);
let make = SyntaxFactory::with_mappings();
- let alias = wrapper_alias(ctx, &make, &core_wrapper, type_ref, kind.symbol());
- let new_return_ty = alias.unwrap_or_else(|| match kind {
- WrapperKind::Option => make.ty_option(type_ref.clone()),
- WrapperKind::Result => make.ty_result(type_ref.clone(), make.ty_infer().into()),
+ let alias = wrapper_alias(ctx, &make, core_wrapper, type_ref, &ty, kind.symbol());
+ let (ast_new_return_ty, semantic_new_return_ty) = alias.unwrap_or_else(|| {
+ let (ast_ty, ty_constructor) = match kind {
+ WrapperKind::Option => {
+ (make.ty_option(type_ref.clone()), famous_defs.core_option_Option())
+ }
+ WrapperKind::Result => (
+ make.ty_result(type_ref.clone(), make.ty_infer().into()),
+ famous_defs.core_result_Result(),
+ ),
+ };
+ let semantic_ty = ty_constructor
+ .map(|ty_constructor| {
+ hir::Adt::from(ty_constructor).ty_with_args(ctx.db(), [ty.clone()])
+ })
+ .unwrap_or_else(|| ty.clone());
+ (ast_ty, semantic_ty)
});
let mut exprs_to_wrap = Vec::new();
@@ -96,6 +110,17 @@ pub(crate) fn wrap_return_type(acc: &mut Assists, ctx: &AssistContext<'_>) -> Op
for_each_tail_expr(&body_expr, tail_cb);
for ret_expr_arg in exprs_to_wrap {
+ if let Some(ty) = ctx.sema.type_of_expr(&ret_expr_arg) {
+ if ty.adjusted().could_unify_with(ctx.db(), &semantic_new_return_ty) {
+ // The type is already correct, don't wrap it.
+ // We deliberately don't use `could_unify_with_deeply()`, because as long as the outer
+ // enum matches it's okay for us, as we don't trigger the assist if the return type
+ // is already `Option`/`Result`, so mismatched exact type is more likely a mistake
+ // than something intended.
+ continue;
+ }
+ }
+
let happy_wrapped = make.expr_call(
make.expr_path(make.ident_path(kind.happy_ident())),
make.arg_list(iter::once(ret_expr_arg.clone())),
@@ -103,12 +128,12 @@ pub(crate) fn wrap_return_type(acc: &mut Assists, ctx: &AssistContext<'_>) -> Op
editor.replace(ret_expr_arg.syntax(), happy_wrapped.syntax());
}
- editor.replace(type_ref.syntax(), new_return_ty.syntax());
+ editor.replace(type_ref.syntax(), ast_new_return_ty.syntax());
if let WrapperKind::Result = kind {
// Add a placeholder snippet at the first generic argument that doesn't equal the return type.
// This is normally the error type, but that may not be the case when we inserted a type alias.
- let args = new_return_ty
+ let args = ast_new_return_ty
.path()
.unwrap()
.segment()
@@ -188,27 +213,28 @@ impl WrapperKind {
}
// Try to find an wrapper type alias in the current scope (shadowing the default).
-fn wrapper_alias(
- ctx: &AssistContext<'_>,
+fn wrapper_alias<'db>(
+ ctx: &AssistContext<'db>,
make: &SyntaxFactory,
- core_wrapper: &hir::Enum,
- ret_type: &ast::Type,
+ core_wrapper: hir::Enum,
+ ast_ret_type: &ast::Type,
+ semantic_ret_type: &hir::Type<'db>,
wrapper: hir::Symbol,
-) -> Option<ast::PathType> {
+) -> Option<(ast::PathType, hir::Type<'db>)> {
let wrapper_path = hir::ModPath::from_segments(
hir::PathKind::Plain,
iter::once(hir::Name::new_symbol_root(wrapper)),
);
- ctx.sema.resolve_mod_path(ret_type.syntax(), &wrapper_path).and_then(|def| {
+ ctx.sema.resolve_mod_path(ast_ret_type.syntax(), &wrapper_path).and_then(|def| {
def.filter_map(|def| match def.into_module_def() {
hir::ModuleDef::TypeAlias(alias) => {
let enum_ty = alias.ty(ctx.db()).as_adt()?.as_enum()?;
- (&enum_ty == core_wrapper).then_some(alias)
+ (enum_ty == core_wrapper).then_some((alias, enum_ty))
}
_ => None,
})
- .find_map(|alias| {
+ .find_map(|(alias, enum_ty)| {
let mut inserted_ret_type = false;
let generic_args =
alias.source(ctx.db())?.value.generic_param_list()?.generic_params().map(|param| {
@@ -216,7 +242,7 @@ fn wrapper_alias(
// Replace the very first type parameter with the function's return type.
ast::GenericParam::TypeParam(_) if !inserted_ret_type => {
inserted_ret_type = true;
- make.type_arg(ret_type.clone()).into()
+ make.type_arg(ast_ret_type.clone()).into()
}
ast::GenericParam::LifetimeParam(_) => {
make.lifetime_arg(make.lifetime("'_")).into()
@@ -231,7 +257,10 @@ fn wrapper_alias(
make.path_segment_generics(make.name_ref(name.as_str()), generic_arg_list),
);
- Some(make.ty_path(path))
+ let new_ty =
+ hir::Adt::from(enum_ty).ty_with_args(ctx.db(), [semantic_ret_type.clone()]);
+
+ Some((make.ty_path(path), new_ty))
})
})
}
@@ -605,29 +634,39 @@ fn foo() -> Option<i32> {
check_assist_by_label(
wrap_return_type,
r#"
-//- minicore: option
+//- minicore: option, future
+struct F(i32);
+impl core::future::Future for F {
+ type Output = i32;
+ fn poll(self: core::pin::Pin<&mut Self>, cx: &mut core::task::Context<'_>) -> core::task::Poll<Self::Output> { 0 }
+}
async fn foo() -> i$032 {
if true {
if false {
- 1.await
+ F(1).await
} else {
- 2.await
+ F(2).await
}
} else {
- 24i32.await
+ F(24i32).await
}
}
"#,
r#"
+struct F(i32);
+impl core::future::Future for F {
+ type Output = i32;
+ fn poll(self: core::pin::Pin<&mut Self>, cx: &mut core::task::Context<'_>) -> core::task::Poll<Self::Output> { 0 }
+}
async fn foo() -> Option<i32> {
if true {
if false {
- Some(1.await)
+ Some(F(1).await)
} else {
- Some(2.await)
+ Some(F(2).await)
}
} else {
- Some(24i32.await)
+ Some(F(24i32).await)
}
}
"#,
@@ -1666,29 +1705,39 @@ fn foo() -> Result<i32, ${0:_}> {
check_assist_by_label(
wrap_return_type,
r#"
-//- minicore: result
+//- minicore: result, future
+struct F(i32);
+impl core::future::Future for F {
+ type Output = i32;
+ fn poll(self: core::pin::Pin<&mut Self>, cx: &mut core::task::Context<'_>) -> core::task::Poll<Self::Output> { 0 }
+}
async fn foo() -> i$032 {
if true {
if false {
- 1.await
+ F(1).await
} else {
- 2.await
+ F(2).await
}
} else {
- 24i32.await
+ F(24i32).await
}
}
"#,
r#"
+struct F(i32);
+impl core::future::Future for F {
+ type Output = i32;
+ fn poll(self: core::pin::Pin<&mut Self>, cx: &mut core::task::Context<'_>) -> core::task::Poll<Self::Output> { 0 }
+}
async fn foo() -> Result<i32, ${0:_}> {
if true {
if false {
- Ok(1.await)
+ Ok(F(1).await)
} else {
- Ok(2.await)
+ Ok(F(2).await)
}
} else {
- Ok(24i32.await)
+ Ok(F(24i32).await)
}
}
"#,
@@ -2460,4 +2509,54 @@ fn foo() -> Result<i32, ${0:_}> {
WrapperKind::Result.label(),
);
}
+
+ #[test]
+ fn already_wrapped() {
+ check_assist_by_label(
+ wrap_return_type,
+ r#"
+//- minicore: option
+fn foo() -> i32$0 {
+ if false {
+ 0
+ } else {
+ Some(1)
+ }
+}
+ "#,
+ r#"
+fn foo() -> Option<i32> {
+ if false {
+ Some(0)
+ } else {
+ Some(1)
+ }
+}
+ "#,
+ WrapperKind::Option.label(),
+ );
+ check_assist_by_label(
+ wrap_return_type,
+ r#"
+//- minicore: result
+fn foo() -> i32$0 {
+ if false {
+ 0
+ } else {
+ Ok(1)
+ }
+}
+ "#,
+ r#"
+fn foo() -> Result<i32, ${0:_}> {
+ if false {
+ Ok(0)
+ } else {
+ Ok(1)
+ }
+}
+ "#,
+ WrapperKind::Result.label(),
+ );
+ }
}
diff --git a/crates/ide-assists/src/handlers/wrap_unwrap_cfg_attr.rs b/crates/ide-assists/src/handlers/wrap_unwrap_cfg_attr.rs
index e1b94673e7..5183566d13 100644
--- a/crates/ide-assists/src/handlers/wrap_unwrap_cfg_attr.rs
+++ b/crates/ide-assists/src/handlers/wrap_unwrap_cfg_attr.rs
@@ -2,8 +2,7 @@ use ide_db::source_change::SourceChangeBuilder;
use itertools::Itertools;
use syntax::{
NodeOrToken, SyntaxToken, T, TextRange, algo,
- ast::{self, AstNode, make},
- ted::{self, Position},
+ ast::{self, AstNode, make, syntax_factory::SyntaxFactory},
};
use crate::{AssistContext, AssistId, Assists};
@@ -173,40 +172,45 @@ fn wrap_derive(
}
}
let handle_source_change = |edit: &mut SourceChangeBuilder| {
- let new_derive = make::attr_outer(make::meta_token_tree(
- make::ext::ident_path("derive"),
- make::token_tree(T!['('], new_derive),
- ))
- .clone_for_update();
- let meta = make::meta_token_tree(
- make::ext::ident_path("cfg_attr"),
- make::token_tree(
+ let make = SyntaxFactory::with_mappings();
+ let mut editor = edit.make_editor(attr.syntax());
+ let new_derive = make.attr_outer(
+ make.meta_token_tree(make.ident_path("derive"), make.token_tree(T!['('], new_derive)),
+ );
+ let meta = make.meta_token_tree(
+ make.ident_path("cfg_attr"),
+ make.token_tree(
T!['('],
vec![
- NodeOrToken::Token(make::token(T![,])),
- NodeOrToken::Token(make::tokens::whitespace(" ")),
- NodeOrToken::Token(make::tokens::ident("derive")),
- NodeOrToken::Node(make::token_tree(T!['('], cfg_derive_tokens)),
+ NodeOrToken::Token(make.token(T![,])),
+ NodeOrToken::Token(make.whitespace(" ")),
+ NodeOrToken::Token(make.ident("derive")),
+ NodeOrToken::Node(make.token_tree(T!['('], cfg_derive_tokens)),
],
),
);
- // Remove the derive attribute
- let edit_attr = edit.make_syntax_mut(attr.syntax().clone());
-
- ted::replace(edit_attr, new_derive.syntax().clone());
- let cfg_attr = make::attr_outer(meta).clone_for_update();
- ted::insert_all_raw(
- Position::after(new_derive.syntax().clone()),
- vec![make::tokens::whitespace("\n").into(), cfg_attr.syntax().clone().into()],
+ let cfg_attr = make.attr_outer(meta);
+ editor.replace_with_many(
+ attr.syntax(),
+ vec![
+ new_derive.syntax().clone().into(),
+ make.whitespace("\n").into(),
+ cfg_attr.syntax().clone().into(),
+ ],
);
+
if let Some(snippet_cap) = ctx.config.snippet_cap {
if let Some(first_meta) =
cfg_attr.meta().and_then(|meta| meta.token_tree()).and_then(|tt| tt.l_paren_token())
{
- edit.add_tabstop_after_token(snippet_cap, first_meta)
+ let tabstop = edit.make_tabstop_after(snippet_cap);
+ editor.add_annotation(first_meta, tabstop);
}
}
+
+ editor.add_mappings(make.finish_with_mappings());
+ edit.add_file_edits(ctx.vfs_file_id(), editor);
};
acc.add(
@@ -221,10 +225,10 @@ fn wrap_cfg_attr(acc: &mut Assists, ctx: &AssistContext<'_>, attr: ast::Attr) ->
let range = attr.syntax().text_range();
let path = attr.path()?;
let handle_source_change = |edit: &mut SourceChangeBuilder| {
- let mut raw_tokens = vec![
- NodeOrToken::Token(make::token(T![,])),
- NodeOrToken::Token(make::tokens::whitespace(" ")),
- ];
+ let make = SyntaxFactory::with_mappings();
+ let mut editor = edit.make_editor(attr.syntax());
+ let mut raw_tokens =
+ vec![NodeOrToken::Token(make.token(T![,])), NodeOrToken::Token(make.whitespace(" "))];
path.syntax().descendants_with_tokens().for_each(|it| {
if let NodeOrToken::Token(token) = it {
raw_tokens.push(NodeOrToken::Token(token));
@@ -232,9 +236,9 @@ fn wrap_cfg_attr(acc: &mut Assists, ctx: &AssistContext<'_>, attr: ast::Attr) ->
});
if let Some(meta) = attr.meta() {
if let (Some(eq), Some(expr)) = (meta.eq_token(), meta.expr()) {
- raw_tokens.push(NodeOrToken::Token(make::tokens::whitespace(" ")));
+ raw_tokens.push(NodeOrToken::Token(make.whitespace(" ")));
raw_tokens.push(NodeOrToken::Token(eq));
- raw_tokens.push(NodeOrToken::Token(make::tokens::whitespace(" ")));
+ raw_tokens.push(NodeOrToken::Token(make.whitespace(" ")));
expr.syntax().descendants_with_tokens().for_each(|it| {
if let NodeOrToken::Token(token) = it {
@@ -245,26 +249,24 @@ fn wrap_cfg_attr(acc: &mut Assists, ctx: &AssistContext<'_>, attr: ast::Attr) ->
raw_tokens.extend(tt.token_trees_and_tokens());
}
}
- let meta = make::meta_token_tree(
- make::ext::ident_path("cfg_attr"),
- make::token_tree(T!['('], raw_tokens),
- );
- let cfg_attr = if attr.excl_token().is_some() {
- make::attr_inner(meta)
- } else {
- make::attr_outer(meta)
- }
- .clone_for_update();
- let attr_syntax = edit.make_syntax_mut(attr.syntax().clone());
- ted::replace(attr_syntax, cfg_attr.syntax());
+ let meta =
+ make.meta_token_tree(make.ident_path("cfg_attr"), make.token_tree(T!['('], raw_tokens));
+ let cfg_attr =
+ if attr.excl_token().is_some() { make.attr_inner(meta) } else { make.attr_outer(meta) };
+
+ editor.replace(attr.syntax(), cfg_attr.syntax());
if let Some(snippet_cap) = ctx.config.snippet_cap {
if let Some(first_meta) =
cfg_attr.meta().and_then(|meta| meta.token_tree()).and_then(|tt| tt.l_paren_token())
{
- edit.add_tabstop_after_token(snippet_cap, first_meta)
+ let tabstop = edit.make_tabstop_after(snippet_cap);
+ editor.add_annotation(first_meta, tabstop);
}
}
+
+ editor.add_mappings(make.finish_with_mappings());
+ edit.add_file_edits(ctx.vfs_file_id(), editor);
};
acc.add(
AssistId::refactor("wrap_unwrap_cfg_attr"),
diff --git a/crates/ide-assists/src/utils.rs b/crates/ide-assists/src/utils.rs
index 1a91053f93..87a4c2ef75 100644
--- a/crates/ide-assists/src/utils.rs
+++ b/crates/ide-assists/src/utils.rs
@@ -1,5 +1,7 @@
//! Assorted functions shared by several assists.
+use std::slice;
+
pub(crate) use gen_trait_fn_body::gen_trait_fn_body;
use hir::{
DisplayTarget, HasAttrs as HirHasAttrs, HirDisplay, InFile, ModuleDef, PathResolution,
@@ -912,7 +914,7 @@ fn handle_as_ref_str(
) -> Option<(ReferenceConversionType, bool)> {
let str_type = hir::BuiltinType::str().ty(db);
- ty.impls_trait(db, famous_defs.core_convert_AsRef()?, &[str_type.clone()])
+ ty.impls_trait(db, famous_defs.core_convert_AsRef()?, slice::from_ref(&str_type))
.then_some((ReferenceConversionType::AsRefStr, could_deref_to_target(ty, &str_type, db)))
}
@@ -924,7 +926,7 @@ fn handle_as_ref_slice(
let type_argument = ty.type_arguments().next()?;
let slice_type = hir::Type::new_slice(type_argument);
- ty.impls_trait(db, famous_defs.core_convert_AsRef()?, &[slice_type.clone()]).then_some((
+ ty.impls_trait(db, famous_defs.core_convert_AsRef()?, slice::from_ref(&slice_type)).then_some((
ReferenceConversionType::AsRefSlice,
could_deref_to_target(ty, &slice_type, db),
))
@@ -937,10 +939,11 @@ fn handle_dereferenced(
) -> Option<(ReferenceConversionType, bool)> {
let type_argument = ty.type_arguments().next()?;
- ty.impls_trait(db, famous_defs.core_convert_AsRef()?, &[type_argument.clone()]).then_some((
- ReferenceConversionType::Dereferenced,
- could_deref_to_target(ty, &type_argument, db),
- ))
+ ty.impls_trait(db, famous_defs.core_convert_AsRef()?, slice::from_ref(&type_argument))
+ .then_some((
+ ReferenceConversionType::Dereferenced,
+ could_deref_to_target(ty, &type_argument, db),
+ ))
}
fn handle_option_as_ref(
diff --git a/crates/ide-completion/Cargo.toml b/crates/ide-completion/Cargo.toml
index 94c01e333e..9bad21fc8e 100644
--- a/crates/ide-completion/Cargo.toml
+++ b/crates/ide-completion/Cargo.toml
@@ -10,6 +10,7 @@ license.workspace = true
rust-version.workspace = true
[lib]
+doctest = false
[dependencies]
cov-mark = "2.0.0"
diff --git a/crates/ide-completion/src/completions/fn_param.rs b/crates/ide-completion/src/completions/fn_param.rs
index 6d1e973dc4..809e71cc11 100644
--- a/crates/ide-completion/src/completions/fn_param.rs
+++ b/crates/ide-completion/src/completions/fn_param.rs
@@ -195,5 +195,5 @@ fn comma_wrapper(ctx: &CompletionContext<'_>) -> Option<(impl Fn(&str) -> String
matches!(prev_token_kind, SyntaxKind::COMMA | SyntaxKind::L_PAREN | SyntaxKind::PIPE);
let leading = if has_leading_comma { "" } else { ", " };
- Some((move |label: &_| (format!("{leading}{label}{trailing}")), param.text_range()))
+ Some((move |label: &_| format!("{leading}{label}{trailing}"), param.text_range()))
}
diff --git a/crates/ide-completion/src/context/analysis.rs b/crates/ide-completion/src/context/analysis.rs
index 6e3a76f346..ea5fb39338 100644
--- a/crates/ide-completion/src/context/analysis.rs
+++ b/crates/ide-completion/src/context/analysis.rs
@@ -4,6 +4,7 @@ use std::iter;
use hir::{ExpandResult, InFile, Semantics, Type, TypeInfo, Variant};
use ide_db::{RootDatabase, active_parameter::ActiveParameter};
use itertools::Either;
+use stdx::always;
use syntax::{
AstNode, AstToken, Direction, NodeOrToken, SyntaxElement, SyntaxKind, SyntaxNode, SyntaxToken,
T, TextRange, TextSize,
@@ -869,8 +870,15 @@ fn classify_name_ref<'db>(
return None;
}
+ let mut receiver_ty = receiver.as_ref().and_then(|it| sema.type_of_expr(it));
+ if receiver_is_ambiguous_float_literal {
+ // `123.|` is parsed as a float but should actually be an integer.
+ always!(receiver_ty.as_ref().is_none_or(|receiver_ty| receiver_ty.original.is_float()));
+ receiver_ty = Some(TypeInfo { original: hir::BuiltinType::i32().ty(sema.db), adjusted: None });
+ }
+
let kind = NameRefKind::DotAccess(DotAccess {
- receiver_ty: receiver.as_ref().and_then(|it| sema.type_of_expr(it)),
+ receiver_ty,
kind: DotAccessKind::Field { receiver_is_ambiguous_float_literal },
receiver,
ctx: DotAccessExprCtx { in_block_expr: is_in_block(field.syntax()), in_breakable: is_in_breakable(field.syntax()) }
diff --git a/crates/ide-completion/src/tests/attribute.rs b/crates/ide-completion/src/tests/attribute.rs
index 411902f111..46a3630045 100644
--- a/crates/ide-completion/src/tests/attribute.rs
+++ b/crates/ide-completion/src/tests/attribute.rs
@@ -878,6 +878,7 @@ mod derive {
expect![[r#"
de Clone macro Clone
de Clone, Copy
+ de Debug macro Debug
de Default macro Default
de PartialEq macro PartialEq
de PartialEq, Eq
@@ -900,6 +901,7 @@ mod derive {
expect![[r#"
de Clone macro Clone
de Clone, Copy
+ de Debug macro Debug
de Default macro Default
de Eq
de Eq, PartialOrd, Ord
@@ -921,6 +923,7 @@ mod derive {
expect![[r#"
de Clone macro Clone
de Clone, Copy
+ de Debug macro Debug
de Default macro Default
de Eq
de Eq, PartialOrd, Ord
@@ -942,6 +945,7 @@ mod derive {
expect![[r#"
de Clone macro Clone
de Clone, Copy
+ de Debug macro Debug
de Default macro Default
de PartialOrd
de PartialOrd, Ord
diff --git a/crates/ide-completion/src/tests/expression.rs b/crates/ide-completion/src/tests/expression.rs
index b2d18b796f..33f729f016 100644
--- a/crates/ide-completion/src/tests/expression.rs
+++ b/crates/ide-completion/src/tests/expression.rs
@@ -2241,3 +2241,37 @@ fn main() {
"#,
);
}
+
+#[test]
+fn ambiguous_float_literal() {
+ check(
+ r#"
+#![rustc_coherence_is_core]
+
+impl i32 {
+ pub fn int_method(self) {}
+}
+impl f64 {
+ pub fn float_method(self) {}
+}
+
+fn foo() {
+ 1.$0
+}
+ "#,
+ expect![[r#"
+ me int_method() fn(self)
+ sn box Box::new(expr)
+ sn call function(expr)
+ sn const const {}
+ sn dbg dbg!(expr)
+ sn dbgr dbg!(&expr)
+ sn deref *expr
+ sn match match expr {}
+ sn ref &expr
+ sn refm &mut expr
+ sn return return expr
+ sn unsafe unsafe {}
+ "#]],
+ );
+}
diff --git a/crates/ide-completion/src/tests/item_list.rs b/crates/ide-completion/src/tests/item_list.rs
index fcdf10c856..179d669360 100644
--- a/crates/ide-completion/src/tests/item_list.rs
+++ b/crates/ide-completion/src/tests/item_list.rs
@@ -550,3 +550,30 @@ fn inside_extern_blocks() {
"#]],
)
}
+
+#[test]
+fn tokens_from_macro() {
+ check_edit(
+ "fn as_ref",
+ r#"
+//- proc_macros: identity
+//- minicore: as_ref
+struct Foo;
+
+#[proc_macros::identity]
+impl<'a> AsRef<&'a i32> for Foo {
+ $0
+}
+ "#,
+ r#"
+struct Foo;
+
+#[proc_macros::identity]
+impl<'a> AsRef<&'a i32> for Foo {
+ fn as_ref(&self) -> &&'a i32 {
+ $0
+}
+}
+ "#,
+ );
+}
diff --git a/crates/ide-completion/src/tests/type_pos.rs b/crates/ide-completion/src/tests/type_pos.rs
index 125e11e9e3..c7e2d05825 100644
--- a/crates/ide-completion/src/tests/type_pos.rs
+++ b/crates/ide-completion/src/tests/type_pos.rs
@@ -429,18 +429,18 @@ trait Tr<T> {
impl Tr<$0
"#,
expect![[r#"
- en Enum Enum
- ma makro!(…) macro_rules! makro
+ en Enum Enum
+ ma makro!(…) macro_rules! makro
md module
- sp Self dyn Tr<{unknown}> + 'static
- st Record Record
- st S S
- st Tuple Tuple
- st Unit Unit
+ sp Self dyn Tr<{unknown}>
+ st Record Record
+ st S S
+ st Tuple Tuple
+ st Unit Unit
tt Tr
tt Trait
- un Union Union
- bt u32 u32
+ un Union Union
+ bt u32 u32
kw crate::
kw self::
"#]],
diff --git a/crates/ide-db/Cargo.toml b/crates/ide-db/Cargo.toml
index acde1d665d..e065adb0f0 100644
--- a/crates/ide-db/Cargo.toml
+++ b/crates/ide-db/Cargo.toml
@@ -10,6 +10,7 @@ license.workspace = true
rust-version.workspace = true
[lib]
+doctest = false
[dependencies]
cov-mark = "2.0.0"
diff --git a/crates/ide-db/src/famous_defs.rs b/crates/ide-db/src/famous_defs.rs
index 994150b1ac..8e68738508 100644
--- a/crates/ide-db/src/famous_defs.rs
+++ b/crates/ide-db/src/famous_defs.rs
@@ -106,6 +106,18 @@ impl FamousDefs<'_, '_> {
self.find_trait("core:convert:AsRef")
}
+ pub fn core_convert_AsMut(&self) -> Option<Trait> {
+ self.find_trait("core:convert:AsMut")
+ }
+
+ pub fn core_borrow_Borrow(&self) -> Option<Trait> {
+ self.find_trait("core:borrow:Borrow")
+ }
+
+ pub fn core_borrow_BorrowMut(&self) -> Option<Trait> {
+ self.find_trait("core:borrow:BorrowMut")
+ }
+
pub fn core_ops_ControlFlow(&self) -> Option<Enum> {
self.find_enum("core:ops:ControlFlow")
}
diff --git a/crates/ide-db/src/path_transform.rs b/crates/ide-db/src/path_transform.rs
index 232648af66..0ab880bcfe 100644
--- a/crates/ide-db/src/path_transform.rs
+++ b/crates/ide-db/src/path_transform.rs
@@ -2,7 +2,10 @@
use crate::helpers::mod_path_to_ast;
use either::Either;
-use hir::{AsAssocItem, HirDisplay, ImportPathConfig, ModuleDef, SemanticsScope};
+use hir::{
+ AsAssocItem, HirDisplay, HirFileId, ImportPathConfig, ModuleDef, SemanticsScope,
+ prettify_macro_expansion,
+};
use itertools::Itertools;
use rustc_hash::FxHashMap;
use span::Edition;
@@ -136,6 +139,25 @@ impl<'a> PathTransform<'a> {
}
}
+ fn prettify_target_node(&self, node: SyntaxNode) -> SyntaxNode {
+ match self.target_scope.file_id() {
+ HirFileId::FileId(_) => node,
+ HirFileId::MacroFile(file_id) => {
+ let db = self.target_scope.db;
+ prettify_macro_expansion(
+ db,
+ node,
+ &db.expansion_span_map(file_id),
+ self.target_scope.module().krate().into(),
+ )
+ }
+ }
+ }
+
+ fn prettify_target_ast<N: AstNode>(&self, node: N) -> N {
+ N::cast(self.prettify_target_node(node.syntax().clone())).unwrap()
+ }
+
fn build_ctx(&self) -> Ctx<'a> {
let db = self.source_scope.db;
let target_module = self.target_scope.module();
@@ -163,7 +185,7 @@ impl<'a> PathTransform<'a> {
.for_each(|(k, v)| match (k.split(db), v) {
(Either::Right(k), Some(TypeOrConst::Either(v))) => {
if let Some(ty) = v.ty() {
- type_substs.insert(k, ty);
+ type_substs.insert(k, self.prettify_target_ast(ty));
}
}
(Either::Right(k), None) => {
@@ -178,7 +200,7 @@ impl<'a> PathTransform<'a> {
}
(Either::Left(k), Some(TypeOrConst::Either(v))) => {
if let Some(ty) = v.ty() {
- const_substs.insert(k, ty.syntax().clone());
+ const_substs.insert(k, self.prettify_target_node(ty.syntax().clone()));
}
}
(Either::Left(k), Some(TypeOrConst::Const(v))) => {
@@ -189,7 +211,7 @@ impl<'a> PathTransform<'a> {
// and sometimes require slight modifications; see
// https://doc.rust-lang.org/reference/statements.html#expression-statements
// (default values in curly brackets can cause the same problem)
- const_substs.insert(k, expr.syntax().clone());
+ const_substs.insert(k, self.prettify_target_node(expr.syntax().clone()));
}
}
(Either::Left(k), None) => {
@@ -204,6 +226,7 @@ impl<'a> PathTransform<'a> {
}
_ => (), // ignore mismatching params
});
+ // No need to prettify lifetimes, there's nothing to prettify.
let lifetime_substs: FxHashMap<_, _> = self
.generic_def
.into_iter()
diff --git a/crates/ide-db/src/prime_caches.rs b/crates/ide-db/src/prime_caches.rs
index 5356614dce..e6618573e0 100644
--- a/crates/ide-db/src/prime_caches.rs
+++ b/crates/ide-db/src/prime_caches.rs
@@ -272,5 +272,5 @@ fn crate_name(db: &RootDatabase, krate: Crate) -> Symbol {
.display_name
.as_deref()
.cloned()
- .unwrap_or_else(|| Symbol::integer(salsa::plumbing::AsId::as_id(&krate).as_u32() as usize))
+ .unwrap_or_else(|| Symbol::integer(salsa::plumbing::AsId::as_id(&krate).index() as usize))
}
diff --git a/crates/ide-db/src/search.rs b/crates/ide-db/src/search.rs
index 7d460f7249..4efb83ba32 100644
--- a/crates/ide-db/src/search.rs
+++ b/crates/ide-db/src/search.rs
@@ -317,7 +317,7 @@ impl Definition {
};
return match def {
Some(def) => SearchScope::file_range(
- def.as_ref().original_file_range_with_macro_call_body(db),
+ def.as_ref().original_file_range_with_macro_call_input(db),
),
None => SearchScope::single_file(file_id),
};
@@ -332,7 +332,7 @@ impl Definition {
};
return match def {
Some(def) => SearchScope::file_range(
- def.as_ref().original_file_range_with_macro_call_body(db),
+ def.as_ref().original_file_range_with_macro_call_input(db),
),
None => SearchScope::single_file(file_id),
};
@@ -341,7 +341,7 @@ impl Definition {
if let Definition::SelfType(impl_) = self {
return match impl_.source(db).map(|src| src.syntax().cloned()) {
Some(def) => SearchScope::file_range(
- def.as_ref().original_file_range_with_macro_call_body(db),
+ def.as_ref().original_file_range_with_macro_call_input(db),
),
None => SearchScope::single_file(file_id),
};
@@ -360,7 +360,7 @@ impl Definition {
};
return match def {
Some(def) => SearchScope::file_range(
- def.as_ref().original_file_range_with_macro_call_body(db),
+ def.as_ref().original_file_range_with_macro_call_input(db),
),
None => SearchScope::single_file(file_id),
};
diff --git a/crates/ide-diagnostics/Cargo.toml b/crates/ide-diagnostics/Cargo.toml
index 96be51e1b2..6f1e66948f 100644
--- a/crates/ide-diagnostics/Cargo.toml
+++ b/crates/ide-diagnostics/Cargo.toml
@@ -10,6 +10,7 @@ license.workspace = true
rust-version.workspace = true
[lib]
+doctest = false
[dependencies]
cov-mark = "2.0.0"
diff --git a/crates/ide-diagnostics/src/handlers/macro_error.rs b/crates/ide-diagnostics/src/handlers/macro_error.rs
index 546512a6cf..c39e00e178 100644
--- a/crates/ide-diagnostics/src/handlers/macro_error.rs
+++ b/crates/ide-diagnostics/src/handlers/macro_error.rs
@@ -242,8 +242,8 @@ macro_rules! outer {
fn f() {
outer!();
-} //^^^^^^^^ error: leftover tokens
- //^^^^^^^^ error: Syntax Error in Expansion: expected expression
+} //^^^^^^ error: leftover tokens
+ //^^^^^^ error: Syntax Error in Expansion: expected expression
"#,
)
}
diff --git a/crates/ide-diagnostics/src/handlers/missing_fields.rs b/crates/ide-diagnostics/src/handlers/missing_fields.rs
index 8a5d82b48c..7da799e0d4 100644
--- a/crates/ide-diagnostics/src/handlers/missing_fields.rs
+++ b/crates/ide-diagnostics/src/handlers/missing_fields.rs
@@ -66,7 +66,7 @@ fn fixes(ctx: &DiagnosticsContext<'_>, d: &hir::MissingFields) -> Option<Vec<Ass
let current_module =
ctx.sema.scope(d.field_list_parent.to_node(&root).syntax()).map(|it| it.module());
let range = InFile::new(d.file, d.field_list_parent.text_range())
- .original_node_file_range_rooted(ctx.sema.db);
+ .original_node_file_range_rooted_opt(ctx.sema.db)?;
let build_text_edit = |new_syntax: &SyntaxNode, old_syntax| {
let edit = {
diff --git a/crates/ide-diagnostics/src/handlers/moved_out_of_ref.rs b/crates/ide-diagnostics/src/handlers/moved_out_of_ref.rs
index 0928262d22..1e80d02926 100644
--- a/crates/ide-diagnostics/src/handlers/moved_out_of_ref.rs
+++ b/crates/ide-diagnostics/src/handlers/moved_out_of_ref.rs
@@ -239,4 +239,22 @@ impl S {
"#,
)
}
+
+ #[test]
+ fn regression_20155() {
+ check_diagnostics(
+ r#"
+//- minicore: copy, option
+struct Box(i32);
+fn test() {
+ let b = Some(Box(0));
+ || {
+ if let Some(b) = b {
+ let _move = b;
+ }
+ };
+}
+"#,
+ )
+ }
}
diff --git a/crates/ide-diagnostics/src/handlers/trait_impl_redundant_assoc_item.rs b/crates/ide-diagnostics/src/handlers/trait_impl_redundant_assoc_item.rs
index 4327b12dce..fc2648efb4 100644
--- a/crates/ide-diagnostics/src/handlers/trait_impl_redundant_assoc_item.rs
+++ b/crates/ide-diagnostics/src/handlers/trait_impl_redundant_assoc_item.rs
@@ -77,6 +77,7 @@ fn quickfix_for_redundant_assoc_item(
redundant_item_def: String,
range: TextRange,
) -> Option<Vec<Assist>> {
+ let file_id = d.file_id.file_id()?;
let add_assoc_item_def = |builder: &mut SourceChangeBuilder| -> Option<()> {
let db = ctx.sema.db;
let root = db.parse_or_expand(d.file_id);
@@ -90,12 +91,14 @@ fn quickfix_for_redundant_assoc_item(
let trait_def = d.trait_.source(db)?.value;
let l_curly = trait_def.assoc_item_list()?.l_curly_token()?.text_range();
let where_to_insert =
- hir::InFile::new(d.file_id, l_curly).original_node_file_range_rooted(db).range;
+ hir::InFile::new(d.file_id, l_curly).original_node_file_range_rooted_opt(db)?;
+ if where_to_insert.file_id != file_id {
+ return None;
+ }
- builder.insert(where_to_insert.end(), redundant_item_def);
+ builder.insert(where_to_insert.range.end(), redundant_item_def);
Some(())
};
- let file_id = d.file_id.file_id()?;
let mut source_change_builder = SourceChangeBuilder::new(file_id.file_id(ctx.sema.db));
add_assoc_item_def(&mut source_change_builder)?;
diff --git a/crates/ide-diagnostics/src/handlers/type_mismatch.rs b/crates/ide-diagnostics/src/handlers/type_mismatch.rs
index e2957fcaef..ac54ac0950 100644
--- a/crates/ide-diagnostics/src/handlers/type_mismatch.rs
+++ b/crates/ide-diagnostics/src/handlers/type_mismatch.rs
@@ -1171,7 +1171,7 @@ trait B {}
fn test(a: &dyn A) -> &dyn B {
a
- //^ error: expected &(dyn B + 'static), found &(dyn A + 'static)
+ //^ error: expected &dyn B, found &dyn A
}
"#,
);
diff --git a/crates/ide-diagnostics/src/handlers/unresolved_method.rs b/crates/ide-diagnostics/src/handlers/unresolved_method.rs
index 1f2d671249..dcca85d4db 100644
--- a/crates/ide-diagnostics/src/handlers/unresolved_method.rs
+++ b/crates/ide-diagnostics/src/handlers/unresolved_method.rs
@@ -120,8 +120,7 @@ fn assoc_func_fix(
let call = ast::MethodCallExpr::cast(expr.syntax().clone())?;
let range = InFile::new(expr_ptr.file_id, call.syntax().text_range())
- .original_node_file_range_rooted(db)
- .range;
+ .original_node_file_range_rooted_opt(db)?;
let receiver = call.receiver()?;
let receiver_type = &ctx.sema.type_of_expr(&receiver)?.original;
@@ -174,18 +173,16 @@ fn assoc_func_fix(
let assoc_func_call_expr_string = make::expr_call(assoc_func_path, args).to_string();
- let file_id = ctx.sema.original_range_opt(call.receiver()?.syntax())?.file_id;
-
Some(Assist {
id: AssistId::quick_fix("method_call_to_assoc_func_call_fix"),
label: Label::new(format!(
"Use associated func call instead: `{assoc_func_call_expr_string}`"
)),
group: None,
- target: range,
+ target: range.range,
source_change: Some(SourceChange::from_text_edit(
- file_id.file_id(ctx.sema.db),
- TextEdit::replace(range, assoc_func_call_expr_string),
+ range.file_id.file_id(ctx.sema.db),
+ TextEdit::replace(range.range, assoc_func_call_expr_string),
)),
command: None,
})
@@ -300,7 +297,7 @@ macro_rules! m {
}
fn main() {
m!(());
- // ^^^^^^ error: no method `foo` on type `()`
+ // ^^ error: no method `foo` on type `()`
}
"#,
);
diff --git a/crates/ide-ssr/Cargo.toml b/crates/ide-ssr/Cargo.toml
index 1212fa9f9c..0620bd26fe 100644
--- a/crates/ide-ssr/Cargo.toml
+++ b/crates/ide-ssr/Cargo.toml
@@ -10,6 +10,7 @@ license.workspace = true
rust-version.workspace = true
[lib]
+doctest = false
[dependencies]
cov-mark = "2.0.0"
diff --git a/crates/ide/Cargo.toml b/crates/ide/Cargo.toml
index 2f8ed88fbb..06d2776ebe 100644
--- a/crates/ide/Cargo.toml
+++ b/crates/ide/Cargo.toml
@@ -10,6 +10,7 @@ license.workspace = true
rust-version.workspace = true
[lib]
+doctest = false
[dependencies]
cov-mark = "2.0.0"
diff --git a/crates/ide/src/call_hierarchy.rs b/crates/ide/src/call_hierarchy.rs
index 4b8d07a253..7a0405939d 100644
--- a/crates/ide/src/call_hierarchy.rs
+++ b/crates/ide/src/call_hierarchy.rs
@@ -592,7 +592,7 @@ macro_rules! call {
"#,
expect!["callee Function FileId(0) 22..37 30..36"],
expect![[r#"
- caller Function FileId(0) 38..52 : FileId(0):44..50
+ caller Function FileId(0) 38..43 : FileId(0):44..50
caller Function FileId(1) 130..136 130..136 : FileId(0):44..50
callee Function FileId(0) 38..52 44..50 : FileId(0):44..50"#]],
expect![[]],
diff --git a/crates/ide/src/doc_links.rs b/crates/ide/src/doc_links.rs
index 2c983287d8..f58202a421 100644
--- a/crates/ide/src/doc_links.rs
+++ b/crates/ide/src/doc_links.rs
@@ -60,7 +60,7 @@ pub(crate) fn rewrite_links(
let doc = Parser::new_with_broken_link_callback(markdown, MARKDOWN_OPTIONS, Some(&mut cb))
.into_offset_iter();
- let doc = map_links(doc, |target, title, range| {
+ let doc = map_links(doc, |target, title, range, link_type| {
// This check is imperfect, there's some overlap between valid intra-doc links
// and valid URLs so we choose to be too eager to try to resolve what might be
// a URL.
@@ -78,7 +78,7 @@ pub(crate) fn rewrite_links(
.map(|(_, attr_id)| attr_id.is_inner_attr())
.unwrap_or(false);
if let Some((target, title)) =
- rewrite_intra_doc_link(db, definition, target, title, is_inner_doc)
+ rewrite_intra_doc_link(db, definition, target, title, is_inner_doc, link_type)
{
(None, target, title)
} else if let Some(target) = rewrite_url_link(db, definition, target) {
@@ -417,6 +417,7 @@ fn rewrite_intra_doc_link(
target: &str,
title: &str,
is_inner_doc: bool,
+ link_type: LinkType,
) -> Option<(String, String)> {
let (link, ns) = parse_intra_doc_link(target);
@@ -438,7 +439,21 @@ fn rewrite_intra_doc_link(
url = url.join(&file).ok()?;
url.set_fragment(frag);
- Some((url.into(), strip_prefixes_suffixes(title).to_owned()))
+ // We want to strip the keyword prefix from the title, but only if the target is implicitly the same
+ // as the title.
+ let title = match link_type {
+ LinkType::Email
+ | LinkType::Autolink
+ | LinkType::Shortcut
+ | LinkType::Collapsed
+ | LinkType::Reference
+ | LinkType::Inline => title.to_owned(),
+ LinkType::ShortcutUnknown | LinkType::CollapsedUnknown | LinkType::ReferenceUnknown => {
+ strip_prefixes_suffixes(title).to_owned()
+ }
+ };
+
+ Some((url.into(), title))
}
/// Try to resolve path to local documentation via path-based links (i.e. `../gateway/struct.Shard.html`).
@@ -470,7 +485,7 @@ fn mod_path_of_def(db: &RootDatabase, def: Definition) -> Option<String> {
/// Rewrites a markdown document, applying 'callback' to each link.
fn map_links<'e>(
events: impl Iterator<Item = (Event<'e>, Range<usize>)>,
- callback: impl Fn(&str, &str, Range<usize>) -> (Option<LinkType>, String, String),
+ callback: impl Fn(&str, &str, Range<usize>, LinkType) -> (Option<LinkType>, String, String),
) -> impl Iterator<Item = Event<'e>> {
let mut in_link = false;
// holds the origin link target on start event and the rewritten one on end event
@@ -497,7 +512,7 @@ fn map_links<'e>(
}
Event::Text(s) if in_link => {
let (link_type, link_target_s, link_name) =
- callback(&end_link_target.take().unwrap(), &s, range);
+ callback(&end_link_target.take().unwrap(), &s, range, end_link_type.unwrap());
end_link_target = Some(CowStr::Boxed(link_target_s.into()));
if !matches!(end_link_type, Some(LinkType::Autolink)) {
end_link_type = link_type;
@@ -506,7 +521,7 @@ fn map_links<'e>(
}
Event::Code(s) if in_link => {
let (link_type, link_target_s, link_name) =
- callback(&end_link_target.take().unwrap(), &s, range);
+ callback(&end_link_target.take().unwrap(), &s, range, end_link_type.unwrap());
end_link_target = Some(CowStr::Boxed(link_target_s.into()));
if !matches!(end_link_type, Some(LinkType::Autolink)) {
end_link_type = link_type;
diff --git a/crates/ide/src/file_structure.rs b/crates/ide/src/file_structure.rs
index 347da4e85b..6820f99fac 100644
--- a/crates/ide/src/file_structure.rs
+++ b/crates/ide/src/file_structure.rs
@@ -329,7 +329,7 @@ macro_rules! mcexp {
#[deprecated]
fn obsolete() {}
-#[deprecated(note = "for awhile")]
+#[deprecated(note = "for a while")]
fn very_obsolete() {}
// region: Some region name
@@ -608,8 +608,8 @@ fn let_statements() {
StructureNode {
parent: None,
label: "very_obsolete",
- navigation_range: 511..524,
- node_range: 473..529,
+ navigation_range: 512..525,
+ node_range: 473..530,
kind: SymbolKind(
Function,
),
@@ -621,8 +621,8 @@ fn let_statements() {
StructureNode {
parent: None,
label: "Some region name",
- navigation_range: 531..558,
- node_range: 531..558,
+ navigation_range: 532..559,
+ node_range: 532..559,
kind: Region,
detail: None,
deprecated: false,
@@ -630,8 +630,8 @@ fn let_statements() {
StructureNode {
parent: None,
label: "m",
- navigation_range: 598..599,
- node_range: 573..636,
+ navigation_range: 599..600,
+ node_range: 574..637,
kind: SymbolKind(
Module,
),
@@ -643,8 +643,8 @@ fn let_statements() {
22,
),
label: "dontpanic",
- navigation_range: 573..593,
- node_range: 573..593,
+ navigation_range: 574..594,
+ node_range: 574..594,
kind: Region,
detail: None,
deprecated: false,
@@ -654,8 +654,8 @@ fn let_statements() {
22,
),
label: "f",
- navigation_range: 605..606,
- node_range: 602..611,
+ navigation_range: 606..607,
+ node_range: 603..612,
kind: SymbolKind(
Function,
),
@@ -669,8 +669,8 @@ fn let_statements() {
22,
),
label: "g",
- navigation_range: 628..629,
- node_range: 612..634,
+ navigation_range: 629..630,
+ node_range: 613..635,
kind: SymbolKind(
Function,
),
@@ -682,8 +682,8 @@ fn let_statements() {
StructureNode {
parent: None,
label: "extern \"C\"",
- navigation_range: 638..648,
- node_range: 638..651,
+ navigation_range: 639..649,
+ node_range: 639..652,
kind: ExternBlock,
detail: None,
deprecated: false,
@@ -691,8 +691,8 @@ fn let_statements() {
StructureNode {
parent: None,
label: "let_statements",
- navigation_range: 656..670,
- node_range: 653..813,
+ navigation_range: 657..671,
+ node_range: 654..814,
kind: SymbolKind(
Function,
),
@@ -706,8 +706,8 @@ fn let_statements() {
27,
),
label: "x",
- navigation_range: 683..684,
- node_range: 679..690,
+ navigation_range: 684..685,
+ node_range: 680..691,
kind: SymbolKind(
Local,
),
@@ -719,8 +719,8 @@ fn let_statements() {
27,
),
label: "mut y",
- navigation_range: 699..704,
- node_range: 695..709,
+ navigation_range: 700..705,
+ node_range: 696..710,
kind: SymbolKind(
Local,
),
@@ -732,8 +732,8 @@ fn let_statements() {
27,
),
label: "Foo { .. }",
- navigation_range: 718..740,
- node_range: 714..753,
+ navigation_range: 719..741,
+ node_range: 715..754,
kind: SymbolKind(
Local,
),
@@ -745,8 +745,8 @@ fn let_statements() {
27,
),
label: "_",
- navigation_range: 803..804,
- node_range: 799..811,
+ navigation_range: 804..805,
+ node_range: 800..812,
kind: SymbolKind(
Local,
),
diff --git a/crates/ide/src/folding_ranges.rs b/crates/ide/src/folding_ranges.rs
index 194e8c968f..9bd8504733 100755
--- a/crates/ide/src/folding_ranges.rs
+++ b/crates/ide/src/folding_ranges.rs
@@ -2,7 +2,7 @@ use ide_db::{FxHashSet, syntax_helpers::node_ext::vis_eq};
use syntax::{
Direction, NodeOrToken, SourceFile,
SyntaxKind::{self, *},
- TextRange, TextSize,
+ SyntaxNode, TextRange, TextSize,
ast::{self, AstNode, AstToken},
match_ast,
};
@@ -16,16 +16,21 @@ const REGION_END: &str = "// endregion";
pub enum FoldKind {
Comment,
Imports,
- Mods,
+ Region,
Block,
ArgList,
- Region,
- Consts,
- Statics,
Array,
WhereClause,
ReturnType,
MatchArm,
+ // region: item runs
+ Modules,
+ Consts,
+ Statics,
+ TypeAliases,
+ TraitAliases,
+ ExternCrates,
+ // endregion: item runs
}
#[derive(Debug)]
@@ -41,10 +46,7 @@ pub struct Fold {
pub(crate) fn folding_ranges(file: &SourceFile) -> Vec<Fold> {
let mut res = vec![];
let mut visited_comments = FxHashSet::default();
- let mut visited_imports = FxHashSet::default();
- let mut visited_mods = FxHashSet::default();
- let mut visited_consts = FxHashSet::default();
- let mut visited_statics = FxHashSet::default();
+ let mut visited_nodes = FxHashSet::default();
// regions can be nested, here is a LIFO buffer
let mut region_starts: Vec<TextSize> = vec![];
@@ -93,30 +95,40 @@ pub(crate) fn folding_ranges(file: &SourceFile) -> Vec<Fold> {
if module.item_list().is_none() {
if let Some(range) = contiguous_range_for_item_group(
module,
- &mut visited_mods,
+ &mut visited_nodes,
) {
- res.push(Fold { range, kind: FoldKind::Mods })
+ res.push(Fold { range, kind: FoldKind::Modules })
}
}
},
ast::Use(use_) => {
- if let Some(range) = contiguous_range_for_item_group(use_, &mut visited_imports) {
+ if let Some(range) = contiguous_range_for_item_group(use_, &mut visited_nodes) {
res.push(Fold { range, kind: FoldKind::Imports })
}
},
ast::Const(konst) => {
- if let Some(range) = contiguous_range_for_item_group(konst, &mut visited_consts) {
+ if let Some(range) = contiguous_range_for_item_group(konst, &mut visited_nodes) {
res.push(Fold { range, kind: FoldKind::Consts })
}
},
ast::Static(statik) => {
- if let Some(range) = contiguous_range_for_item_group(statik, &mut visited_statics) {
+ if let Some(range) = contiguous_range_for_item_group(statik, &mut visited_nodes) {
res.push(Fold { range, kind: FoldKind::Statics })
}
},
- ast::WhereClause(where_clause) => {
- if let Some(range) = fold_range_for_where_clause(where_clause) {
- res.push(Fold { range, kind: FoldKind::WhereClause })
+ ast::TypeAlias(alias) => {
+ if let Some(range) = contiguous_range_for_item_group(alias, &mut visited_nodes) {
+ res.push(Fold { range, kind: FoldKind::TypeAliases })
+ }
+ },
+ ast::TraitAlias(alias) => {
+ if let Some(range) = contiguous_range_for_item_group(alias, &mut visited_nodes) {
+ res.push(Fold { range, kind: FoldKind::TraitAliases })
+ }
+ },
+ ast::ExternCrate(extern_crate) => {
+ if let Some(range) = contiguous_range_for_item_group(extern_crate, &mut visited_nodes) {
+ res.push(Fold { range, kind: FoldKind::ExternCrates })
}
},
ast::MatchArm(match_arm) => {
@@ -137,9 +149,10 @@ pub(crate) fn folding_ranges(file: &SourceFile) -> Vec<Fold> {
fn fold_kind(kind: SyntaxKind) -> Option<FoldKind> {
match kind {
COMMENT => Some(FoldKind::Comment),
- ARG_LIST | PARAM_LIST => Some(FoldKind::ArgList),
+ ARG_LIST | PARAM_LIST | GENERIC_ARG_LIST | GENERIC_PARAM_LIST => Some(FoldKind::ArgList),
ARRAY_EXPR => Some(FoldKind::Array),
RET_TYPE => Some(FoldKind::ReturnType),
+ WHERE_CLAUSE => Some(FoldKind::WhereClause),
ASSOC_ITEM_LIST
| RECORD_FIELD_LIST
| RECORD_PAT_FIELD_LIST
@@ -155,11 +168,14 @@ fn fold_kind(kind: SyntaxKind) -> Option<FoldKind> {
}
}
-fn contiguous_range_for_item_group<N>(first: N, visited: &mut FxHashSet<N>) -> Option<TextRange>
+fn contiguous_range_for_item_group<N>(
+ first: N,
+ visited: &mut FxHashSet<SyntaxNode>,
+) -> Option<TextRange>
where
N: ast::HasVisibility + Clone + Hash + Eq,
{
- if !visited.insert(first.clone()) {
+ if !visited.insert(first.syntax().clone()) {
return None;
}
@@ -183,7 +199,7 @@ where
if let Some(next) = N::cast(node) {
let next_vis = next.visibility();
if eq_visibility(next_vis.clone(), last_vis) {
- visited.insert(next.clone());
+ visited.insert(next.syntax().clone());
last_vis = next_vis;
last = next;
continue;
@@ -259,18 +275,6 @@ fn contiguous_range_for_comment(
}
}
-fn fold_range_for_where_clause(where_clause: ast::WhereClause) -> Option<TextRange> {
- let first_where_pred = where_clause.predicates().next();
- let last_where_pred = where_clause.predicates().last();
-
- if first_where_pred != last_where_pred {
- let start = where_clause.where_token()?.text_range().end();
- let end = where_clause.syntax().text_range().end();
- return Some(TextRange::new(start, end));
- }
- None
-}
-
fn fold_range_for_multiline_match_arm(match_arm: ast::MatchArm) -> Option<TextRange> {
if fold_kind(match_arm.expr()?.syntax().kind()).is_some() {
None
@@ -307,16 +311,19 @@ mod tests {
let kind = match fold.kind {
FoldKind::Comment => "comment",
FoldKind::Imports => "imports",
- FoldKind::Mods => "mods",
+ FoldKind::Modules => "mods",
FoldKind::Block => "block",
FoldKind::ArgList => "arglist",
FoldKind::Region => "region",
FoldKind::Consts => "consts",
FoldKind::Statics => "statics",
+ FoldKind::TypeAliases => "typealiases",
FoldKind::Array => "array",
FoldKind::WhereClause => "whereclause",
FoldKind::ReturnType => "returntype",
FoldKind::MatchArm => "matcharm",
+ FoldKind::TraitAliases => "traitaliases",
+ FoldKind::ExternCrates => "externcrates",
};
assert_eq!(kind, &attr.unwrap());
}
@@ -594,19 +601,18 @@ static SECOND_STATIC: &str = "second";</fold>
#[test]
fn fold_where_clause() {
- // fold multi-line and don't fold single line.
check(
r#"
fn foo()
-where<fold whereclause>
+<fold whereclause>where
A: Foo,
B: Foo,
C: Foo,
D: Foo,</fold> {}
fn bar()
-where
- A: Bar, {}
+<fold whereclause>where
+ A: Bar,</fold> {}
"#,
)
}
@@ -624,4 +630,16 @@ fn bar() -> (bool, bool) { (true, true) }
"#,
)
}
+
+ #[test]
+ fn fold_generics() {
+ check(
+ r#"
+type Foo<T, U> = foo<fold arglist><
+ T,
+ U,
+></fold>;
+"#,
+ )
+ }
}
diff --git a/crates/ide/src/goto_definition.rs b/crates/ide/src/goto_definition.rs
index 574803fb9e..29fc68bb50 100644
--- a/crates/ide/src/goto_definition.rs
+++ b/crates/ide/src/goto_definition.rs
@@ -291,13 +291,14 @@ fn handle_control_flow_keywords(
token: &SyntaxToken,
) -> Option<Vec<NavigationTarget>> {
match token.kind() {
- // For `fn` / `loop` / `while` / `for` / `async`, return the keyword it self,
+ // For `fn` / `loop` / `while` / `for` / `async` / `match`, return the keyword it self,
// so that VSCode will find the references when using `ctrl + click`
T![fn] | T![async] | T![try] | T![return] => nav_for_exit_points(sema, token),
T![loop] | T![while] | T![break] | T![continue] => nav_for_break_points(sema, token),
T![for] if token.parent().and_then(ast::ForExpr::cast).is_some() => {
nav_for_break_points(sema, token)
}
+ T![match] | T![=>] | T![if] => nav_for_branch_exit_points(sema, token),
_ => None,
}
}
@@ -407,6 +408,91 @@ fn nav_for_exit_points(
Some(navs)
}
+pub(crate) fn find_branch_root(
+ sema: &Semantics<'_, RootDatabase>,
+ token: &SyntaxToken,
+) -> Vec<SyntaxNode> {
+ let find_nodes = |node_filter: fn(SyntaxNode) -> Option<SyntaxNode>| {
+ sema.descend_into_macros(token.clone())
+ .into_iter()
+ .filter_map(|token| node_filter(token.parent()?))
+ .collect_vec()
+ };
+
+ match token.kind() {
+ T![match] => find_nodes(|node| Some(ast::MatchExpr::cast(node)?.syntax().clone())),
+ T![=>] => find_nodes(|node| Some(ast::MatchArm::cast(node)?.syntax().clone())),
+ T![if] => find_nodes(|node| {
+ let if_expr = ast::IfExpr::cast(node)?;
+
+ let root_if = iter::successors(Some(if_expr.clone()), |if_expr| {
+ let parent_if = if_expr.syntax().parent().and_then(ast::IfExpr::cast)?;
+ let ast::ElseBranch::IfExpr(else_branch) = parent_if.else_branch()? else {
+ return None;
+ };
+
+ (else_branch.syntax() == if_expr.syntax()).then_some(parent_if)
+ })
+ .last()?;
+
+ Some(root_if.syntax().clone())
+ }),
+ _ => vec![],
+ }
+}
+
+fn nav_for_branch_exit_points(
+ sema: &Semantics<'_, RootDatabase>,
+ token: &SyntaxToken,
+) -> Option<Vec<NavigationTarget>> {
+ let db = sema.db;
+
+ let navs = match token.kind() {
+ T![match] => find_branch_root(sema, token)
+ .into_iter()
+ .filter_map(|node| {
+ let file_id = sema.hir_file_for(&node);
+ let match_expr = ast::MatchExpr::cast(node)?;
+ let focus_range = match_expr.match_token()?.text_range();
+ let match_expr_in_file = InFile::new(file_id, match_expr.into());
+ Some(expr_to_nav(db, match_expr_in_file, Some(focus_range)))
+ })
+ .flatten()
+ .collect_vec(),
+
+ T![=>] => find_branch_root(sema, token)
+ .into_iter()
+ .filter_map(|node| {
+ let match_arm = ast::MatchArm::cast(node)?;
+ let match_expr = sema
+ .ancestors_with_macros(match_arm.syntax().clone())
+ .find_map(ast::MatchExpr::cast)?;
+ let file_id = sema.hir_file_for(match_expr.syntax());
+ let focus_range = match_arm.fat_arrow_token()?.text_range();
+ let match_expr_in_file = InFile::new(file_id, match_expr.into());
+ Some(expr_to_nav(db, match_expr_in_file, Some(focus_range)))
+ })
+ .flatten()
+ .collect_vec(),
+
+ T![if] => find_branch_root(sema, token)
+ .into_iter()
+ .filter_map(|node| {
+ let file_id = sema.hir_file_for(&node);
+ let if_expr = ast::IfExpr::cast(node)?;
+ let focus_range = if_expr.if_token()?.text_range();
+ let if_expr_in_file = InFile::new(file_id, if_expr.into());
+ Some(expr_to_nav(db, if_expr_in_file, Some(focus_range)))
+ })
+ .flatten()
+ .collect_vec(),
+
+ _ => return Some(Vec::new()),
+ };
+
+ Some(navs)
+}
+
pub(crate) fn find_loops(
sema: &Semantics<'_, RootDatabase>,
token: &SyntaxToken,
@@ -996,7 +1082,7 @@ macro_rules! define_fn {
}
define_fn!();
-//^^^^^^^^^^^^^
+//^^^^^^^^^^
fn bar() {
$0foo();
}
@@ -3142,7 +3228,7 @@ mod bar {
use crate::m;
m!();
- // ^^^^^
+ // ^^
fn qux() {
Foo$0;
@@ -3614,4 +3700,227 @@ fn foo() {
"#,
);
}
+
+ #[test]
+ fn goto_def_for_match_keyword() {
+ check(
+ r#"
+fn main() {
+ match$0 0 {
+ // ^^^^^
+ 0 => {},
+ _ => {},
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn goto_def_for_match_arm_fat_arrow() {
+ check(
+ r#"
+fn main() {
+ match 0 {
+ 0 =>$0 {},
+ // ^^
+ _ => {},
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn goto_def_for_if_keyword() {
+ check(
+ r#"
+fn main() {
+ if$0 true {
+ // ^^
+ ()
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn goto_def_for_match_nested_in_if() {
+ check(
+ r#"
+fn main() {
+ if true {
+ match$0 0 {
+ // ^^^^^
+ 0 => {},
+ _ => {},
+ }
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn goto_def_for_multiple_match_expressions() {
+ check(
+ r#"
+fn main() {
+ match 0 {
+ 0 => {},
+ _ => {},
+ };
+
+ match$0 1 {
+ // ^^^^^
+ 1 => {},
+ _ => {},
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn goto_def_for_nested_match_expressions() {
+ check(
+ r#"
+fn main() {
+ match 0 {
+ 0 => match$0 1 {
+ // ^^^^^
+ 1 => {},
+ _ => {},
+ },
+ _ => {},
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn goto_def_for_if_else_chains() {
+ check(
+ r#"
+fn main() {
+ if true {
+ // ^^
+ ()
+ } else if$0 false {
+ ()
+ } else {
+ ()
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn goto_def_for_match_with_guards() {
+ check(
+ r#"
+fn main() {
+ match 42 {
+ x if x > 0 =>$0 {},
+ // ^^
+ _ => {},
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn goto_def_for_match_with_macro_arm() {
+ check(
+ r#"
+macro_rules! arm {
+ () => { 0 => {} };
+}
+
+fn main() {
+ match$0 0 {
+ // ^^^^^
+ arm!(),
+ _ => {},
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn goto_const_from_match_pat_with_tuple_struct() {
+ check(
+ r#"
+struct Tag(u8);
+struct Path {}
+
+const Path: u8 = 0;
+ // ^^^^
+fn main() {
+ match Tag(Path) {
+ Tag(Path$0) => {}
+ _ => {}
+ }
+}
+
+"#,
+ );
+ }
+
+ #[test]
+ fn goto_const_from_match_pat() {
+ check(
+ r#"
+type T1 = u8;
+const T1: u8 = 0;
+ // ^^
+fn main() {
+ let x = 0;
+ match x {
+ T1$0 => {}
+ _ => {}
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn goto_struct_from_match_pat() {
+ check(
+ r#"
+struct T1;
+ // ^^
+fn main() {
+ let x = 0;
+ match x {
+ T1$0 => {}
+ _ => {}
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn no_goto_trait_from_match_pat() {
+ check(
+ r#"
+trait T1 {}
+fn main() {
+ let x = 0;
+ match x {
+ T1$0 => {}
+ // ^^
+ _ => {}
+ }
+}
+"#,
+ );
+ }
}
diff --git a/crates/ide/src/goto_type_definition.rs b/crates/ide/src/goto_type_definition.rs
index 86d72fefe0..b80e81d39c 100644
--- a/crates/ide/src/goto_type_definition.rs
+++ b/crates/ide/src/goto_type_definition.rs
@@ -70,11 +70,10 @@ pub(crate) fn goto_type_definition(
}
let range = token.text_range();
- sema.descend_into_macros_no_opaque(token,false)
+ sema.descend_into_macros_no_opaque(token, false)
.into_iter()
.filter_map(|token| {
- sema
- .token_ancestors_with_macros(token.value)
+ sema.token_ancestors_with_macros(token.value)
// When `token` is within a macro call, we can't determine its type. Don't continue
// this traversal because otherwise we'll end up returning the type of *that* macro
// call, which is not what we want in general.
@@ -103,7 +102,6 @@ pub(crate) fn goto_type_definition(
_ => return None,
}
};
-
Some(ty)
})
})
diff --git a/crates/ide/src/highlight_related.rs b/crates/ide/src/highlight_related.rs
index aa947921a9..356bd69aa4 100644
--- a/crates/ide/src/highlight_related.rs
+++ b/crates/ide/src/highlight_related.rs
@@ -37,8 +37,11 @@ pub struct HighlightRelatedConfig {
pub break_points: bool,
pub closure_captures: bool,
pub yield_points: bool,
+ pub branch_exit_points: bool,
}
+type HighlightMap = FxHashMap<EditionedFileId, FxHashSet<HighlightedRange>>;
+
// Feature: Highlight Related
//
// Highlights constructs related to the thing under the cursor:
@@ -64,7 +67,7 @@ pub(crate) fn highlight_related(
let token = pick_best_token(syntax.token_at_offset(offset), |kind| match kind {
T![?] => 4, // prefer `?` when the cursor is sandwiched like in `await$0?`
- T![->] => 4,
+ T![->] | T![=>] => 4,
kind if kind.is_keyword(file_id.edition(sema.db)) => 3,
IDENT | INT_NUMBER => 2,
T![|] => 1,
@@ -78,6 +81,9 @@ pub(crate) fn highlight_related(
T![fn] | T![return] | T![->] if config.exit_points => {
highlight_exit_points(sema, token).remove(&file_id)
}
+ T![match] | T![=>] | T![if] if config.branch_exit_points => {
+ highlight_branch_exit_points(sema, token).remove(&file_id)
+ }
T![await] | T![async] if config.yield_points => {
highlight_yield_points(sema, token).remove(&file_id)
}
@@ -300,11 +306,93 @@ fn highlight_references(
if res.is_empty() { None } else { Some(res.into_iter().collect()) }
}
+pub(crate) fn highlight_branch_exit_points(
+ sema: &Semantics<'_, RootDatabase>,
+ token: SyntaxToken,
+) -> FxHashMap<EditionedFileId, Vec<HighlightedRange>> {
+ let mut highlights: HighlightMap = FxHashMap::default();
+
+ let push_to_highlights = |file_id, range, highlights: &mut HighlightMap| {
+ if let Some(FileRange { file_id, range }) = original_frange(sema.db, file_id, range) {
+ let hrange = HighlightedRange { category: ReferenceCategory::empty(), range };
+ highlights.entry(file_id).or_default().insert(hrange);
+ }
+ };
+
+ let push_tail_expr = |tail: Option<ast::Expr>, highlights: &mut HighlightMap| {
+ let Some(tail) = tail else {
+ return;
+ };
+
+ for_each_tail_expr(&tail, &mut |tail| {
+ let file_id = sema.hir_file_for(tail.syntax());
+ let range = tail.syntax().text_range();
+ push_to_highlights(file_id, Some(range), highlights);
+ });
+ };
+
+ let nodes = goto_definition::find_branch_root(sema, &token).into_iter();
+ match token.kind() {
+ T![match] => {
+ for match_expr in nodes.filter_map(ast::MatchExpr::cast) {
+ let file_id = sema.hir_file_for(match_expr.syntax());
+ let range = match_expr.match_token().map(|token| token.text_range());
+ push_to_highlights(file_id, range, &mut highlights);
+
+ let Some(arm_list) = match_expr.match_arm_list() else {
+ continue;
+ };
+ for arm in arm_list.arms() {
+ push_tail_expr(arm.expr(), &mut highlights);
+ }
+ }
+ }
+ T![=>] => {
+ for arm in nodes.filter_map(ast::MatchArm::cast) {
+ let file_id = sema.hir_file_for(arm.syntax());
+ let range = arm.fat_arrow_token().map(|token| token.text_range());
+ push_to_highlights(file_id, range, &mut highlights);
+
+ push_tail_expr(arm.expr(), &mut highlights);
+ }
+ }
+ T![if] => {
+ for mut if_to_process in nodes.map(ast::IfExpr::cast) {
+ while let Some(cur_if) = if_to_process.take() {
+ let file_id = sema.hir_file_for(cur_if.syntax());
+
+ let if_kw_range = cur_if.if_token().map(|token| token.text_range());
+ push_to_highlights(file_id, if_kw_range, &mut highlights);
+
+ if let Some(then_block) = cur_if.then_branch() {
+ push_tail_expr(Some(then_block.into()), &mut highlights);
+ }
+
+ match cur_if.else_branch() {
+ Some(ast::ElseBranch::Block(else_block)) => {
+ push_tail_expr(Some(else_block.into()), &mut highlights);
+ if_to_process = None;
+ }
+ Some(ast::ElseBranch::IfExpr(nested_if)) => if_to_process = Some(nested_if),
+ None => if_to_process = None,
+ }
+ }
+ }
+ }
+ _ => {}
+ }
+
+ highlights
+ .into_iter()
+ .map(|(file_id, ranges)| (file_id, ranges.into_iter().collect()))
+ .collect()
+}
+
fn hl_exit_points(
sema: &Semantics<'_, RootDatabase>,
def_token: Option<SyntaxToken>,
body: ast::Expr,
-) -> Option<FxHashMap<EditionedFileId, FxHashSet<HighlightedRange>>> {
+) -> Option<HighlightMap> {
let mut highlights: FxHashMap<EditionedFileId, FxHashSet<_>> = FxHashMap::default();
let mut push_to_highlights = |file_id, range| {
@@ -411,7 +499,7 @@ pub(crate) fn highlight_break_points(
loop_token: Option<SyntaxToken>,
label: Option<ast::Label>,
expr: ast::Expr,
- ) -> Option<FxHashMap<EditionedFileId, FxHashSet<HighlightedRange>>> {
+ ) -> Option<HighlightMap> {
let mut highlights: FxHashMap<EditionedFileId, FxHashSet<_>> = FxHashMap::default();
let mut push_to_highlights = |file_id, range| {
@@ -504,7 +592,7 @@ pub(crate) fn highlight_yield_points(
sema: &Semantics<'_, RootDatabase>,
async_token: Option<SyntaxToken>,
body: Option<ast::Expr>,
- ) -> Option<FxHashMap<EditionedFileId, FxHashSet<HighlightedRange>>> {
+ ) -> Option<HighlightMap> {
let mut highlights: FxHashMap<EditionedFileId, FxHashSet<_>> = FxHashMap::default();
let mut push_to_highlights = |file_id, range| {
@@ -597,10 +685,7 @@ fn original_frange(
InFile::new(file_id, text_range?).original_node_file_range_opt(db).map(|(frange, _)| frange)
}
-fn merge_map(
- res: &mut FxHashMap<EditionedFileId, FxHashSet<HighlightedRange>>,
- new: Option<FxHashMap<EditionedFileId, FxHashSet<HighlightedRange>>>,
-) {
+fn merge_map(res: &mut HighlightMap, new: Option<HighlightMap>) {
let Some(new) = new else {
return;
};
@@ -750,6 +835,7 @@ mod tests {
references: true,
closure_captures: true,
yield_points: true,
+ branch_exit_points: true,
};
#[track_caller]
@@ -2135,6 +2221,62 @@ fn main() {
}
#[test]
+ fn nested_match() {
+ check(
+ r#"
+fn main() {
+ match$0 0 {
+ // ^^^^^
+ 0 => match 1 {
+ 1 => 2,
+ // ^
+ _ => 3,
+ // ^
+ },
+ _ => 4,
+ // ^
+ }
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn single_arm_highlight() {
+ check(
+ r#"
+fn main() {
+ match 0 {
+ 0 =>$0 {
+ // ^^
+ let x = 1;
+ x
+ // ^
+ }
+ _ => 2,
+ }
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn no_branches_when_disabled() {
+ let config = HighlightRelatedConfig { branch_exit_points: false, ..ENABLED_CONFIG };
+ check_with_config(
+ r#"
+fn main() {
+ match$0 0 {
+ 0 => 1,
+ _ => 2,
+ }
+}
+"#,
+ config,
+ );
+ }
+
+ #[test]
fn asm() {
check(
r#"
@@ -2165,6 +2307,200 @@ pub unsafe fn bootstrap() -> ! {
}
#[test]
+ fn complex_arms_highlight() {
+ check(
+ r#"
+fn calculate(n: i32) -> i32 { n * 2 }
+
+fn main() {
+ match$0 Some(1) {
+ // ^^^^^
+ Some(x) => match x {
+ 0 => { let y = x; y },
+ // ^
+ 1 => calculate(x),
+ //^^^^^^^^^^^^
+ _ => (|| 6)(),
+ // ^^^^^^^^
+ },
+ None => loop {
+ break 5;
+ // ^^^^^^^
+ },
+ }
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn match_in_macro_highlight() {
+ check(
+ r#"
+macro_rules! M {
+ ($e:expr) => { $e };
+}
+
+fn main() {
+ M!{
+ match$0 Some(1) {
+ // ^^^^^
+ Some(x) => x,
+ // ^
+ None => 0,
+ // ^
+ }
+ }
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn match_in_macro_highlight_2() {
+ check(
+ r#"
+macro_rules! match_ast {
+ (match $node:ident { $($tt:tt)* }) => { $crate::match_ast!(match ($node) { $($tt)* }) };
+
+ (match ($node:expr) {
+ $( $( $path:ident )::+ ($it:pat) => $res:expr, )*
+ _ => $catch_all:expr $(,)?
+ }) => {{
+ $( if let Some($it) = $($path::)+cast($node.clone()) { $res } else )*
+ { $catch_all }
+ }};
+}
+
+fn main() {
+ match_ast! {
+ match$0 Some(1) {
+ Some(x) => x,
+ }
+ }
+}
+ "#,
+ );
+ }
+
+ #[test]
+ fn nested_if_else() {
+ check(
+ r#"
+fn main() {
+ if$0 true {
+ // ^^
+ if false {
+ 1
+ // ^
+ } else {
+ 2
+ // ^
+ }
+ } else {
+ 3
+ // ^
+ }
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn if_else_if_highlight() {
+ check(
+ r#"
+fn main() {
+ if$0 true {
+ // ^^
+ 1
+ // ^
+ } else if false {
+ // ^^
+ 2
+ // ^
+ } else {
+ 3
+ // ^
+ }
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn complex_if_branches() {
+ check(
+ r#"
+fn calculate(n: i32) -> i32 { n * 2 }
+
+fn main() {
+ if$0 true {
+ // ^^
+ let x = 5;
+ calculate(x)
+ // ^^^^^^^^^^^^
+ } else if false {
+ // ^^
+ (|| 10)()
+ // ^^^^^^^^^
+ } else {
+ loop {
+ break 15;
+ // ^^^^^^^^
+ }
+ }
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn if_in_macro_highlight() {
+ check(
+ r#"
+macro_rules! M {
+ ($e:expr) => { $e };
+}
+
+fn main() {
+ M!{
+ if$0 true {
+ // ^^
+ 5
+ // ^
+ } else {
+ 10
+ // ^^
+ }
+ }
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn match_in_macro() {
+ // We should not highlight the outer `match` expression.
+ check(
+ r#"
+macro_rules! M {
+ (match) => { 1 };
+}
+
+fn main() {
+ match Some(1) {
+ Some(x) => x,
+ None => {
+ M!(match$0)
+ }
+ }
+}
+ "#,
+ )
+ }
+
+ #[test]
fn labeled_block_tail_expr() {
check(
r#"
diff --git a/crates/ide/src/hover/tests.rs b/crates/ide/src/hover/tests.rs
index a281a49152..f63499aa0f 100644
--- a/crates/ide/src/hover/tests.rs
+++ b/crates/ide/src/hover/tests.rs
@@ -10927,3 +10927,34 @@ fn main() {
"#]],
);
}
+
+#[test]
+fn keyword_inside_link() {
+ check(
+ r#"
+enum Foo {
+ MacroExpansion,
+}
+
+/// I return a [macro expansion](Foo::MacroExpansion).
+fn bar$0() -> Foo {
+ Foo::MacroExpansion
+}
+ "#,
+ expect![[r#"
+ *bar*
+
+ ```rust
+ ra_test_fixture
+ ```
+
+ ```rust
+ fn bar() -> Foo
+ ```
+
+ ---
+
+ I return a [macro expansion](https://docs.rs/ra_test_fixture/*/ra_test_fixture/enum.Foo.html#variant.MacroExpansion).
+ "#]],
+ );
+}
diff --git a/crates/ide/src/inlay_hints/adjustment.rs b/crates/ide/src/inlay_hints/adjustment.rs
index f2844a2eaa..49b43fc37f 100644
--- a/crates/ide/src/inlay_hints/adjustment.rs
+++ b/crates/ide/src/inlay_hints/adjustment.rs
@@ -109,50 +109,90 @@ pub(super) fn hints(
}
has_adjustments = true;
- // FIXME: Add some nicer tooltips to each of these
- let (text, coercion) = match kind {
+ let (text, coercion, detailed_tooltip) = match kind {
Adjust::NeverToAny if config.adjustment_hints == AdjustmentHints::Always => {
allow_edit = false;
- ("<never-to-any>", "never to any")
- }
- Adjust::Deref(None) => ("*", "dereference"),
- Adjust::Deref(Some(OverloadedDeref(Mutability::Shared))) => {
- ("*", "`Deref` dereference")
- }
- Adjust::Deref(Some(OverloadedDeref(Mutability::Mut))) => {
- ("*", "`DerefMut` dereference")
- }
- Adjust::Borrow(AutoBorrow::Ref(Mutability::Shared)) => ("&", "borrow"),
- Adjust::Borrow(AutoBorrow::Ref(Mutability::Mut)) => ("&mut ", "unique borrow"),
- Adjust::Borrow(AutoBorrow::RawPtr(Mutability::Shared)) => {
- ("&raw const ", "const pointer borrow")
- }
- Adjust::Borrow(AutoBorrow::RawPtr(Mutability::Mut)) => {
- ("&raw mut ", "mut pointer borrow")
+ (
+ "<never-to-any>",
+ "never to any",
+ "Coerces the never type `!` into any other type. This happens in code paths that never return, like after `panic!()` or `return`.",
+ )
}
+ Adjust::Deref(None) => (
+ "*",
+ "dereference",
+ "Built-in dereference of a reference to access the underlying value. The compiler inserts `*` to get the value from `&T`.",
+ ),
+ Adjust::Deref(Some(OverloadedDeref(Mutability::Shared))) => (
+ "*",
+ "`Deref` dereference",
+ "Dereference via the `Deref` trait. Used for types like `Box<T>` or `Rc<T>` so they act like plain `T`.",
+ ),
+ Adjust::Deref(Some(OverloadedDeref(Mutability::Mut))) => (
+ "*",
+ "`DerefMut` dereference",
+ "Mutable dereference using the `DerefMut` trait. Enables smart pointers to give mutable access to their inner values.",
+ ),
+ Adjust::Borrow(AutoBorrow::Ref(Mutability::Shared)) => (
+ "&",
+ "shared borrow",
+ "Inserts `&` to create a shared reference. Lets you use a value without moving or cloning it.",
+ ),
+ Adjust::Borrow(AutoBorrow::Ref(Mutability::Mut)) => (
+ "&mut ",
+ "mutable borrow",
+ "Inserts `&mut` to create a unique, mutable reference. Lets you modify a value without taking ownership.",
+ ),
+ Adjust::Borrow(AutoBorrow::RawPtr(Mutability::Shared)) => (
+ "&raw const ",
+ "const raw pointer",
+ "Converts a reference to a raw const pointer `*const T`. Often used when working with FFI or unsafe code.",
+ ),
+ Adjust::Borrow(AutoBorrow::RawPtr(Mutability::Mut)) => (
+ "&raw mut ",
+ "mut raw pointer",
+ "Converts a mutable reference to a raw mutable pointer `*mut T`. Allows mutation in unsafe contexts.",
+ ),
// some of these could be represented via `as` casts, but that's not too nice and
// handling everything as a prefix expr makes the `(` and `)` insertion easier
Adjust::Pointer(cast) if config.adjustment_hints == AdjustmentHints::Always => {
allow_edit = false;
match cast {
- PointerCast::ReifyFnPointer => {
- ("<fn-item-to-fn-pointer>", "fn item to fn pointer")
- }
+ PointerCast::ReifyFnPointer => (
+ "<fn-item-to-fn-pointer>",
+ "fn item to fn pointer",
+ "Converts a named function to a function pointer `fn()`. Useful when passing functions as values.",
+ ),
PointerCast::UnsafeFnPointer => (
"<safe-fn-pointer-to-unsafe-fn-pointer>",
"safe fn pointer to unsafe fn pointer",
+ "Coerces a safe function pointer to an unsafe one. Allows calling it in an unsafe context.",
+ ),
+ PointerCast::ClosureFnPointer(Safety::Unsafe) => (
+ "<closure-to-unsafe-fn-pointer>",
+ "closure to unsafe fn pointer",
+ "Converts a non-capturing closure to an unsafe function pointer. Required for use in `extern` or unsafe APIs.",
+ ),
+ PointerCast::ClosureFnPointer(Safety::Safe) => (
+ "<closure-to-fn-pointer>",
+ "closure to fn pointer",
+ "Converts a non-capturing closure to a function pointer. Lets closures behave like plain functions.",
+ ),
+ PointerCast::MutToConstPointer => (
+ "<mut-ptr-to-const-ptr>",
+ "mut ptr to const ptr",
+ "Coerces `*mut T` to `*const T`. Safe because const pointers restrict what you can do.",
+ ),
+ PointerCast::ArrayToPointer => (
+ "<array-ptr-to-element-ptr>",
+ "array to pointer",
+ "Converts an array to a pointer to its first element. Similar to how arrays decay to pointers in C.",
+ ),
+ PointerCast::Unsize => (
+ "<unsize>",
+ "unsize coercion",
+ "Converts a sized type to an unsized one. Used for things like turning arrays into slices or concrete types into trait objects.",
),
- PointerCast::ClosureFnPointer(Safety::Unsafe) => {
- ("<closure-to-unsafe-fn-pointer>", "closure to unsafe fn pointer")
- }
- PointerCast::ClosureFnPointer(Safety::Safe) => {
- ("<closure-to-fn-pointer>", "closure to fn pointer")
- }
- PointerCast::MutToConstPointer => {
- ("<mut-ptr-to-const-ptr>", "mut ptr to const ptr")
- }
- PointerCast::ArrayToPointer => ("<array-ptr-to-element-ptr>", ""),
- PointerCast::Unsize => ("<unsize>", "unsize"),
}
}
_ => continue,
@@ -162,9 +202,11 @@ pub(super) fn hints(
linked_location: None,
tooltip: Some(config.lazy_tooltip(|| {
InlayTooltip::Markdown(format!(
- "`{}` → `{}` ({coercion} coercion)",
+ "`{}` → `{}`\n\n**{}**\n\n{}",
source.display(sema.db, display_target),
target.display(sema.db, display_target),
+ coercion,
+ detailed_tooltip
))
})),
};
diff --git a/crates/ide/src/inlay_hints/bind_pat.rs b/crates/ide/src/inlay_hints/bind_pat.rs
index 36fdd90e8a..729349365e 100644
--- a/crates/ide/src/inlay_hints/bind_pat.rs
+++ b/crates/ide/src/inlay_hints/bind_pat.rs
@@ -380,9 +380,9 @@ fn main() {
let foo = foo3();
// ^^^ impl Fn(f64, f64) -> u32
let foo = foo4();
- // ^^^ &'static (dyn Fn(f64, f64) -> u32 + 'static)
+ // ^^^ &'static dyn Fn(f64, f64) -> u32
let foo = foo5();
- // ^^^ &'static (dyn Fn(&(dyn Fn(f64, f64) -> u32 + 'static), f64) -> u32 + 'static)
+ // ^^^ &'static dyn Fn(&dyn Fn(f64, f64) -> u32, f64) -> u32
let foo = foo6();
// ^^^ impl Fn(f64, f64) -> u32
let foo = foo7();
@@ -413,7 +413,7 @@ fn main() {
let foo = foo3();
// ^^^ impl Fn(f64, f64) -> u32
let foo = foo4();
- // ^^^ &'static (dyn Fn(f64, f64) -> u32 + 'static)
+ // ^^^ &'static dyn Fn(f64, f64) -> u32
let foo = foo5();
let foo = foo6();
let foo = foo7();
diff --git a/crates/ide/src/inlay_hints/bounds.rs b/crates/ide/src/inlay_hints/bounds.rs
index b9a98f88be..f0003dae3f 100644
--- a/crates/ide/src/inlay_hints/bounds.rs
+++ b/crates/ide/src/inlay_hints/bounds.rs
@@ -143,7 +143,7 @@ fn foo<T>() {}
file_id: FileId(
1,
),
- range: 135..140,
+ range: 446..451,
},
),
),
diff --git a/crates/ide/src/inlay_hints/closing_brace.rs b/crates/ide/src/inlay_hints/closing_brace.rs
index ca3a982760..05253b6794 100644
--- a/crates/ide/src/inlay_hints/closing_brace.rs
+++ b/crates/ide/src/inlay_hints/closing_brace.rs
@@ -91,8 +91,6 @@ pub(super) fn hints(
match_ast! {
match parent {
ast::Fn(it) => {
- // FIXME: this could include parameters, but `HirDisplay` prints too much info
- // and doesn't respect the max length either, so the hints end up way too long
(format!("fn {}", it.name()?), it.name().map(name))
},
ast::Static(it) => (format!("static {}", it.name()?), it.name().map(name)),
@@ -193,7 +191,7 @@ impl Tr for () {
//^ impl Tr for ()
impl dyn Tr {
}
-//^ impl dyn Tr + 'static
+//^ impl dyn Tr
static S0: () = 0;
static S1: () = {};
diff --git a/crates/ide/src/navigation_target.rs b/crates/ide/src/navigation_target.rs
index 4c7c597e68..7dc18141bd 100644
--- a/crates/ide/src/navigation_target.rs
+++ b/crates/ide/src/navigation_target.rs
@@ -844,7 +844,7 @@ pub(crate) fn orig_range_with_focus_r(
// *should* contain the name
_ => {
let kind = call_kind();
- let range = kind.clone().original_call_range_with_body(db);
+ let range = kind.clone().original_call_range_with_input(db);
//If the focus range is in the attribute/derive body, we
// need to point the call site to the entire body, if not, fall back
// to the name range of the attribute/derive call
diff --git a/crates/ide/src/references.rs b/crates/ide/src/references.rs
index c6a323d408..fe874bc99b 100644
--- a/crates/ide/src/references.rs
+++ b/crates/ide/src/references.rs
@@ -21,6 +21,7 @@ use hir::{PathResolution, Semantics};
use ide_db::{
FileId, RootDatabase,
defs::{Definition, NameClass, NameRefClass},
+ helpers::pick_best_token,
search::{ReferenceCategory, SearchScope, UsageSearchResult},
};
use itertools::Itertools;
@@ -397,7 +398,11 @@ fn handle_control_flow_keywords(
.attach_first_edition(file_id)
.map(|it| it.edition(sema.db))
.unwrap_or(Edition::CURRENT);
- let token = file.syntax().token_at_offset(offset).find(|t| t.kind().is_keyword(edition))?;
+ let token = pick_best_token(file.syntax().token_at_offset(offset), |kind| match kind {
+ _ if kind.is_keyword(edition) => 4,
+ T![=>] => 3,
+ _ => 1,
+ })?;
let references = match token.kind() {
T![fn] | T![return] | T![try] => highlight_related::highlight_exit_points(sema, token),
@@ -408,6 +413,7 @@ fn handle_control_flow_keywords(
T![for] if token.parent().and_then(ast::ForExpr::cast).is_some() => {
highlight_related::highlight_break_points(sema, token)
}
+ T![if] | T![=>] | T![match] => highlight_related::highlight_branch_exit_points(sema, token),
_ => return None,
}
.into_iter()
@@ -1344,6 +1350,159 @@ impl Foo {
);
}
+ #[test]
+ fn test_highlight_if_branches() {
+ check(
+ r#"
+fn main() {
+ let x = if$0 true {
+ 1
+ } else if false {
+ 2
+ } else {
+ 3
+ };
+
+ println!("x: {}", x);
+}
+"#,
+ expect![[r#"
+ FileId(0) 24..26
+ FileId(0) 42..43
+ FileId(0) 55..57
+ FileId(0) 74..75
+ FileId(0) 97..98
+ "#]],
+ );
+ }
+
+ #[test]
+ fn test_highlight_match_branches() {
+ check(
+ r#"
+fn main() {
+ $0match Some(42) {
+ Some(x) if x > 0 => println!("positive"),
+ Some(0) => println!("zero"),
+ Some(_) => println!("negative"),
+ None => println!("none"),
+ };
+}
+"#,
+ expect![[r#"
+ FileId(0) 16..21
+ FileId(0) 61..81
+ FileId(0) 102..118
+ FileId(0) 139..159
+ FileId(0) 177..193
+ "#]],
+ );
+ }
+
+ #[test]
+ fn test_highlight_match_arm_arrow() {
+ check(
+ r#"
+fn main() {
+ match Some(42) {
+ Some(x) if x > 0 $0=> println!("positive"),
+ Some(0) => println!("zero"),
+ Some(_) => println!("negative"),
+ None => println!("none"),
+ }
+}
+"#,
+ expect![[r#"
+ FileId(0) 58..60
+ FileId(0) 61..81
+ "#]],
+ );
+ }
+
+ #[test]
+ fn test_highlight_nested_branches() {
+ check(
+ r#"
+fn main() {
+ let x = $0if true {
+ if false {
+ 1
+ } else {
+ match Some(42) {
+ Some(_) => 2,
+ None => 3,
+ }
+ }
+ } else {
+ 4
+ };
+
+ println!("x: {}", x);
+}
+"#,
+ expect![[r#"
+ FileId(0) 24..26
+ FileId(0) 65..66
+ FileId(0) 140..141
+ FileId(0) 167..168
+ FileId(0) 215..216
+ "#]],
+ );
+ }
+
+ #[test]
+ fn test_highlight_match_with_complex_guards() {
+ check(
+ r#"
+fn main() {
+ let x = $0match (x, y) {
+ (a, b) if a > b && a % 2 == 0 => 1,
+ (a, b) if a < b || b % 2 == 1 => 2,
+ (a, _) if a > 40 => 3,
+ _ => 4,
+ };
+
+ println!("x: {}", x);
+}
+"#,
+ expect![[r#"
+ FileId(0) 24..29
+ FileId(0) 80..81
+ FileId(0) 124..125
+ FileId(0) 155..156
+ FileId(0) 171..172
+ "#]],
+ );
+ }
+
+ #[test]
+ fn test_highlight_mixed_if_match_expressions() {
+ check(
+ r#"
+fn main() {
+ let x = $0if let Some(x) = Some(42) {
+ 1
+ } else if let None = None {
+ 2
+ } else {
+ match 42 {
+ 0 => 3,
+ _ => 4,
+ }
+ };
+}
+"#,
+ expect![[r#"
+ FileId(0) 24..26
+ FileId(0) 60..61
+ FileId(0) 73..75
+ FileId(0) 102..103
+ FileId(0) 153..154
+ FileId(0) 173..174
+ "#]],
+ );
+ }
+
fn check(#[rust_analyzer::rust_fixture] ra_fixture: &str, expect: Expect) {
check_with_scope(ra_fixture, None, expect)
}
@@ -2867,4 +3026,66 @@ const FOO$0: i32 = 0;
"#]],
);
}
+
+ #[test]
+ fn test_highlight_if_let_match_combined() {
+ check(
+ r#"
+enum MyEnum { A(i32), B(String), C }
+
+fn main() {
+ let val = MyEnum::A(42);
+
+ let x = $0if let MyEnum::A(x) = val {
+ 1
+ } else if let MyEnum::B(s) = val {
+ 2
+ } else {
+ match val {
+ MyEnum::C => 3,
+ _ => 4,
+ }
+ };
+}
+"#,
+ expect![[r#"
+ FileId(0) 92..94
+ FileId(0) 128..129
+ FileId(0) 141..143
+ FileId(0) 177..178
+ FileId(0) 237..238
+ FileId(0) 257..258
+ "#]],
+ );
+ }
+
+ #[test]
+ fn test_highlight_nested_match_expressions() {
+ check(
+ r#"
+enum Outer { A(Inner), B }
+enum Inner { X, Y(i32) }
+
+fn main() {
+ let val = Outer::A(Inner::Y(42));
+
+ $0match val {
+ Outer::A(inner) => match inner {
+ Inner::X => println!("Inner::X"),
+ Inner::Y(n) if n > 0 => println!("Inner::Y positive: {}", n),
+ Inner::Y(_) => println!("Inner::Y non-positive"),
+ },
+ Outer::B => println!("Outer::B"),
+ }
+}
+"#,
+ expect![[r#"
+ FileId(0) 108..113
+ FileId(0) 185..205
+ FileId(0) 243..279
+ FileId(0) 308..341
+ FileId(0) 374..394
+ "#]],
+ );
+ }
}
diff --git a/crates/ide/src/runnables.rs b/crates/ide/src/runnables.rs
index f48150b369..9d1a5bae96 100644
--- a/crates/ide/src/runnables.rs
+++ b/crates/ide/src/runnables.rs
@@ -351,7 +351,7 @@ pub(crate) fn runnable_fn(
)
.call_site();
- let file_range = fn_source.syntax().original_file_range_with_macro_call_body(sema.db);
+ let file_range = fn_source.syntax().original_file_range_with_macro_call_input(sema.db);
let update_test =
UpdateTest::find_snapshot_macro(sema, &fn_source.file_syntax(sema.db), file_range);
@@ -425,7 +425,7 @@ pub(crate) fn runnable_impl(
let impl_source = sema.source(*def)?;
let impl_syntax = impl_source.syntax();
- let file_range = impl_syntax.original_file_range_with_macro_call_body(sema.db);
+ let file_range = impl_syntax.original_file_range_with_macro_call_input(sema.db);
let update_test =
UpdateTest::find_snapshot_macro(sema, &impl_syntax.file_syntax(sema.db), file_range);
@@ -1241,10 +1241,10 @@ generate_main!();
[
"(TestMod, NavigationTarget { file_id: FileId(0), full_range: 0..345, name: \"\", kind: Module })",
"(TestMod, NavigationTarget { file_id: FileId(0), full_range: 282..312, focus_range: 286..291, name: \"tests\", kind: Module, description: \"mod tests\" })",
- "(Test, NavigationTarget { file_id: FileId(0), full_range: 298..310, name: \"foo_test\", kind: Function })",
- "(TestMod, NavigationTarget { file_id: FileId(0), full_range: 313..326, name: \"tests2\", kind: Module, description: \"mod tests2\" }, true)",
- "(Test, NavigationTarget { file_id: FileId(0), full_range: 313..326, name: \"foo_test2\", kind: Function }, true)",
- "(Bin, NavigationTarget { file_id: FileId(0), full_range: 327..344, name: \"main\", kind: Function })",
+ "(Test, NavigationTarget { file_id: FileId(0), full_range: 298..307, name: \"foo_test\", kind: Function })",
+ "(TestMod, NavigationTarget { file_id: FileId(0), full_range: 313..323, name: \"tests2\", kind: Module, description: \"mod tests2\" }, true)",
+ "(Test, NavigationTarget { file_id: FileId(0), full_range: 313..323, name: \"foo_test2\", kind: Function }, true)",
+ "(Bin, NavigationTarget { file_id: FileId(0), full_range: 327..341, name: \"main\", kind: Function })",
]
"#]],
);
@@ -1272,10 +1272,10 @@ foo!();
"#,
expect![[r#"
[
- "(TestMod, NavigationTarget { file_id: FileId(0), full_range: 210..217, name: \"foo_tests\", kind: Module, description: \"mod foo_tests\" }, true)",
- "(Test, NavigationTarget { file_id: FileId(0), full_range: 210..217, name: \"foo0\", kind: Function }, true)",
- "(Test, NavigationTarget { file_id: FileId(0), full_range: 210..217, name: \"foo1\", kind: Function }, true)",
- "(Test, NavigationTarget { file_id: FileId(0), full_range: 210..217, name: \"foo2\", kind: Function }, true)",
+ "(TestMod, NavigationTarget { file_id: FileId(0), full_range: 210..214, name: \"foo_tests\", kind: Module, description: \"mod foo_tests\" }, true)",
+ "(Test, NavigationTarget { file_id: FileId(0), full_range: 210..214, name: \"foo0\", kind: Function }, true)",
+ "(Test, NavigationTarget { file_id: FileId(0), full_range: 210..214, name: \"foo1\", kind: Function }, true)",
+ "(Test, NavigationTarget { file_id: FileId(0), full_range: 210..214, name: \"foo2\", kind: Function }, true)",
]
"#]],
);
diff --git a/crates/ide/src/view_crate_graph.rs b/crates/ide/src/view_crate_graph.rs
index 7985279679..25deffe10e 100644
--- a/crates/ide/src/view_crate_graph.rs
+++ b/crates/ide/src/view_crate_graph.rs
@@ -79,7 +79,7 @@ impl<'a> dot::Labeller<'a, Crate, Edge<'a>> for DotCrateGraph<'_> {
}
fn node_id(&'a self, n: &Crate) -> Id<'a> {
- let id = n.as_id().as_u32();
+ let id = n.as_id().index();
Id::new(format!("_{id:?}")).unwrap()
}
diff --git a/crates/intern/Cargo.toml b/crates/intern/Cargo.toml
index 9ff656cb74..81b6703dee 100644
--- a/crates/intern/Cargo.toml
+++ b/crates/intern/Cargo.toml
@@ -10,6 +10,7 @@ license.workspace = true
rust-version.workspace = true
[lib]
+doctest = false
[dependencies]
diff --git a/crates/intern/src/symbol/symbols.rs b/crates/intern/src/symbol/symbols.rs
index adc581309d..1ccd20c25e 100644
--- a/crates/intern/src/symbol/symbols.rs
+++ b/crates/intern/src/symbol/symbols.rs
@@ -438,6 +438,8 @@ define_symbols! {
shr,
simd,
sized,
+ meta_sized,
+ pointee_sized,
skip,
slice_len_fn,
Some,
diff --git a/crates/load-cargo/src/lib.rs b/crates/load-cargo/src/lib.rs
index 52f59679b5..26ee698af0 100644
--- a/crates/load-cargo/src/lib.rs
+++ b/crates/load-cargo/src/lib.rs
@@ -11,7 +11,7 @@ use hir_expand::proc_macro::{
};
use ide_db::{
ChangeWithProcMacros, FxHashMap, RootDatabase,
- base_db::{CrateGraphBuilder, Env, SourceRoot, SourceRootId},
+ base_db::{CrateGraphBuilder, Env, ProcMacroLoadingError, SourceRoot, SourceRootId},
prime_caches,
};
use itertools::Itertools;
@@ -69,6 +69,23 @@ pub fn load_workspace(
extra_env: &FxHashMap<String, Option<String>>,
load_config: &LoadCargoConfig,
) -> anyhow::Result<(RootDatabase, vfs::Vfs, Option<ProcMacroClient>)> {
+ let lru_cap = std::env::var("RA_LRU_CAP").ok().and_then(|it| it.parse::<u16>().ok());
+ let mut db = RootDatabase::new(lru_cap);
+
+ let (vfs, proc_macro_server) = load_workspace_into_db(ws, extra_env, load_config, &mut db)?;
+
+ Ok((db, vfs, proc_macro_server))
+}
+
+// This variant of `load_workspace` allows deferring the loading of rust-analyzer
+// into an existing database, which is useful in certain third-party scenarios,
+// now that `salsa` supports extending foreign databases (e.g. `RootDatabase`).
+pub fn load_workspace_into_db(
+ ws: ProjectWorkspace,
+ extra_env: &FxHashMap<String, Option<String>>,
+ load_config: &LoadCargoConfig,
+ db: &mut RootDatabase,
+) -> anyhow::Result<(vfs::Vfs, Option<ProcMacroClient>)> {
let (sender, receiver) = unbounded();
let mut vfs = vfs::Vfs::default();
let mut loader = {
@@ -78,23 +95,27 @@ pub fn load_workspace(
tracing::debug!(?load_config, "LoadCargoConfig");
let proc_macro_server = match &load_config.with_proc_macro_server {
- ProcMacroServerChoice::Sysroot => ws
- .find_sysroot_proc_macro_srv()
- .and_then(|it| ProcMacroClient::spawn(&it, extra_env).map_err(Into::into))
- .map_err(|e| (e, true)),
+ ProcMacroServerChoice::Sysroot => ws.find_sysroot_proc_macro_srv().map(|it| {
+ it.and_then(|it| ProcMacroClient::spawn(&it, extra_env).map_err(Into::into)).map_err(
+ |e| ProcMacroLoadingError::ProcMacroSrvError(e.to_string().into_boxed_str()),
+ )
+ }),
ProcMacroServerChoice::Explicit(path) => {
- ProcMacroClient::spawn(path, extra_env).map_err(Into::into).map_err(|e| (e, true))
- }
- ProcMacroServerChoice::None => {
- Err((anyhow::format_err!("proc macro server disabled"), false))
+ Some(ProcMacroClient::spawn(path, extra_env).map_err(|e| {
+ ProcMacroLoadingError::ProcMacroSrvError(e.to_string().into_boxed_str())
+ }))
}
+ ProcMacroServerChoice::None => Some(Err(ProcMacroLoadingError::Disabled)),
};
match &proc_macro_server {
- Ok(server) => {
- tracing::info!(path=%server.server_path(), "Proc-macro server started")
+ Some(Ok(server)) => {
+ tracing::info!(manifest=%ws.manifest_or_root(), path=%server.server_path(), "Proc-macro server started")
}
- Err((e, _)) => {
- tracing::info!(%e, "Failed to start proc-macro server")
+ Some(Err(e)) => {
+ tracing::info!(manifest=%ws.manifest_or_root(), %e, "Failed to start proc-macro server")
+ }
+ None => {
+ tracing::info!(manifest=%ws.manifest_or_root(), "No proc-macro server started")
}
}
@@ -111,22 +132,24 @@ pub fn load_workspace(
);
let proc_macros = {
let proc_macro_server = match &proc_macro_server {
- Ok(it) => Ok(it),
- Err((e, hard_err)) => Err((e.to_string(), *hard_err)),
+ Some(Ok(it)) => Ok(it),
+ Some(Err(e)) => {
+ Err(ProcMacroLoadingError::ProcMacroSrvError(e.to_string().into_boxed_str()))
+ }
+ None => Err(ProcMacroLoadingError::ProcMacroSrvError(
+ "proc-macro-srv is not running, workspace is missing a sysroot".into(),
+ )),
};
proc_macros
.into_iter()
.map(|(crate_id, path)| {
(
crate_id,
- path.map_or_else(
- |e| Err((e, true)),
- |(_, path)| {
- proc_macro_server.as_ref().map_err(Clone::clone).and_then(
- |proc_macro_server| load_proc_macro(proc_macro_server, &path, &[]),
- )
- },
- ),
+ path.map_or_else(Err, |(_, path)| {
+ proc_macro_server.as_ref().map_err(Clone::clone).and_then(
+ |proc_macro_server| load_proc_macro(proc_macro_server, &path, &[]),
+ )
+ }),
)
})
.collect()
@@ -139,18 +162,20 @@ pub fn load_workspace(
version: 0,
});
- let db = load_crate_graph(
+ load_crate_graph_into_db(
crate_graph,
proc_macros,
project_folders.source_root_config,
&mut vfs,
&receiver,
+ db,
);
if load_config.prefill_caches {
- prime_caches::parallel_prime_caches(&db, 1, &|_| ());
+ prime_caches::parallel_prime_caches(db, 1, &|_| ());
}
- Ok((db, vfs, proc_macro_server.ok()))
+
+ Ok((vfs, proc_macro_server.and_then(Result::ok)))
}
#[derive(Default)]
@@ -391,11 +416,13 @@ pub fn load_proc_macro(
path: &AbsPath,
ignored_macros: &[Box<str>],
) -> ProcMacroLoadResult {
- let res: Result<Vec<_>, String> = (|| {
+ let res: Result<Vec<_>, _> = (|| {
let dylib = MacroDylib::new(path.to_path_buf());
- let vec = server.load_dylib(dylib).map_err(|e| format!("{e}"))?;
+ let vec = server.load_dylib(dylib).map_err(|e| {
+ ProcMacroLoadingError::ProcMacroSrvError(format!("{e}").into_boxed_str())
+ })?;
if vec.is_empty() {
- return Err("proc macro library returned no proc macros".to_owned());
+ return Err(ProcMacroLoadingError::NoProcMacros);
}
Ok(vec
.into_iter()
@@ -412,20 +439,19 @@ pub fn load_proc_macro(
}
Err(e) => {
tracing::warn!("proc-macro loading for {path} failed: {e}");
- Err((e, true))
+ Err(e)
}
}
}
-fn load_crate_graph(
+fn load_crate_graph_into_db(
crate_graph: CrateGraphBuilder,
proc_macros: ProcMacrosBuilder,
source_root_config: SourceRootConfig,
vfs: &mut vfs::Vfs,
receiver: &Receiver<vfs::loader::Message>,
-) -> RootDatabase {
- let lru_cap = std::env::var("RA_LRU_CAP").ok().and_then(|it| it.parse::<u16>().ok());
- let mut db = RootDatabase::new(lru_cap);
+ db: &mut RootDatabase,
+) {
let mut analysis_change = ChangeWithProcMacros::default();
db.enable_proc_attr_macros();
@@ -462,7 +488,6 @@ fn load_crate_graph(
analysis_change.set_proc_macros(proc_macros);
db.apply_change(analysis_change);
- db
}
fn expander_to_proc_macro(
diff --git a/crates/mbe/Cargo.toml b/crates/mbe/Cargo.toml
index f3ab093bae..eef718b706 100644
--- a/crates/mbe/Cargo.toml
+++ b/crates/mbe/Cargo.toml
@@ -10,6 +10,7 @@ license.workspace = true
rust-version.workspace = true
[lib]
+doctest = false
[dependencies]
cov-mark = "2.0.0"
diff --git a/crates/parser/Cargo.toml b/crates/parser/Cargo.toml
index c80510eedf..c7da654de6 100644
--- a/crates/parser/Cargo.toml
+++ b/crates/parser/Cargo.toml
@@ -10,6 +10,7 @@ license.workspace = true
rust-version.workspace = true
[lib]
+doctest = false
[dependencies]
drop_bomb = "0.1.5"
diff --git a/crates/parser/src/grammar/generic_params.rs b/crates/parser/src/grammar/generic_params.rs
index ea5a3bc859..55c5dc400b 100644
--- a/crates/parser/src/grammar/generic_params.rs
+++ b/crates/parser/src/grammar/generic_params.rs
@@ -122,7 +122,7 @@ fn lifetime_bounds(p: &mut Parser<'_>) {
}
// test type_param_bounds
-// struct S<T: 'a + ?Sized + (Copy) + ~const Drop>;
+// struct S<T: 'a + ?Sized + (Copy) + [const] Drop>;
pub(super) fn bounds(p: &mut Parser<'_>) {
p.expect(T![:]);
bounds_without_colon(p);
@@ -187,6 +187,11 @@ fn type_bound(p: &mut Parser<'_>) -> bool {
p.bump_any();
p.expect(T![const]);
}
+ T!['['] => {
+ p.bump_any();
+ p.expect(T![const]);
+ p.expect(T![']']);
+ }
// test const_trait_bound
// const fn foo(_: impl const Trait) {}
T![const] => {
diff --git a/crates/parser/src/lexed_str.rs b/crates/parser/src/lexed_str.rs
index 0fa9a26454..e6c92dec68 100644
--- a/crates/parser/src/lexed_str.rs
+++ b/crates/parser/src/lexed_str.rs
@@ -11,7 +11,8 @@
use std::ops;
use rustc_literal_escaper::{
- EscapeError, Mode, unescape_byte, unescape_char, unescape_mixed, unescape_unicode,
+ EscapeError, Mode, unescape_byte, unescape_byte_str, unescape_c_str, unescape_char,
+ unescape_str,
};
use crate::{
@@ -151,14 +152,14 @@ impl<'a> Converter<'a> {
self.res
}
- fn push(&mut self, kind: SyntaxKind, len: usize, err: Option<&str>) {
+ fn push(&mut self, kind: SyntaxKind, len: usize, errors: Vec<String>) {
self.res.push(kind, self.offset);
self.offset += len;
- if let Some(err) = err {
- let token = self.res.len() as u32;
- let msg = err.to_owned();
- self.res.error.push(LexError { msg, token });
+ for msg in errors {
+ if !msg.is_empty() {
+ self.res.error.push(LexError { msg, token: self.res.len() as u32 });
+ }
}
}
@@ -167,14 +168,16 @@ impl<'a> Converter<'a> {
// We drop some useful information here (see patterns with double dots `..`)
// Storing that info in `SyntaxKind` is not possible due to its layout requirements of
// being `u16` that come from `rowan::SyntaxKind`.
- let mut err = "";
+ let mut errors: Vec<String> = vec![];
let syntax_kind = {
match kind {
rustc_lexer::TokenKind::LineComment { doc_style: _ } => COMMENT,
rustc_lexer::TokenKind::BlockComment { doc_style: _, terminated } => {
if !terminated {
- err = "Missing trailing `*/` symbols to terminate the block comment";
+ errors.push(
+ "Missing trailing `*/` symbols to terminate the block comment".into(),
+ );
}
COMMENT
}
@@ -184,9 +187,9 @@ impl<'a> Converter<'a> {
invalid_infostring,
} => {
if *has_invalid_preceding_whitespace {
- err = "invalid preceding whitespace for frontmatter opening"
+ errors.push("invalid preceding whitespace for frontmatter opening".into());
} else if *invalid_infostring {
- err = "invalid infostring for frontmatter"
+ errors.push("invalid infostring for frontmatter".into());
}
FRONTMATTER
}
@@ -198,7 +201,7 @@ impl<'a> Converter<'a> {
SyntaxKind::from_keyword(token_text, self.edition).unwrap_or(IDENT)
}
rustc_lexer::TokenKind::InvalidIdent => {
- err = "Ident contains invalid characters";
+ errors.push("Ident contains invalid characters".into());
IDENT
}
@@ -206,7 +209,7 @@ impl<'a> Converter<'a> {
rustc_lexer::TokenKind::GuardedStrPrefix if self.edition.at_least_2024() => {
// FIXME: rustc does something better for recovery.
- err = "Invalid string literal (reserved syntax)";
+ errors.push("Invalid string literal (reserved syntax)".into());
ERROR
}
rustc_lexer::TokenKind::GuardedStrPrefix => {
@@ -222,12 +225,12 @@ impl<'a> Converter<'a> {
rustc_lexer::TokenKind::Lifetime { starts_with_number } => {
if *starts_with_number {
- err = "Lifetime name cannot start with a number";
+ errors.push("Lifetime name cannot start with a number".into());
}
LIFETIME_IDENT
}
rustc_lexer::TokenKind::UnknownPrefixLifetime => {
- err = "Unknown lifetime prefix";
+ errors.push("Unknown lifetime prefix".into());
LIFETIME_IDENT
}
rustc_lexer::TokenKind::RawLifetime => LIFETIME_IDENT,
@@ -262,119 +265,128 @@ impl<'a> Converter<'a> {
rustc_lexer::TokenKind::Unknown => ERROR,
rustc_lexer::TokenKind::UnknownPrefix if token_text == "builtin" => IDENT,
rustc_lexer::TokenKind::UnknownPrefix => {
- err = "unknown literal prefix";
+ errors.push("unknown literal prefix".into());
IDENT
}
rustc_lexer::TokenKind::Eof => EOF,
}
};
- let err = if err.is_empty() { None } else { Some(err) };
- self.push(syntax_kind, token_text.len(), err);
+ self.push(syntax_kind, token_text.len(), errors);
}
fn extend_literal(&mut self, len: usize, kind: &rustc_lexer::LiteralKind) {
- let mut err = "";
+ let invalid_raw_msg = String::from("Invalid raw string literal");
+
+ let mut errors = vec![];
+ let mut no_end_quote = |c: char, kind: &str| {
+ errors.push(format!("Missing trailing `{c}` symbol to terminate the {kind} literal"));
+ };
let syntax_kind = match *kind {
rustc_lexer::LiteralKind::Int { empty_int, base: _ } => {
if empty_int {
- err = "Missing digits after the integer base prefix";
+ errors.push("Missing digits after the integer base prefix".into());
}
INT_NUMBER
}
rustc_lexer::LiteralKind::Float { empty_exponent, base: _ } => {
if empty_exponent {
- err = "Missing digits after the exponent symbol";
+ errors.push("Missing digits after the exponent symbol".into());
}
FLOAT_NUMBER
}
rustc_lexer::LiteralKind::Char { terminated } => {
if !terminated {
- err = "Missing trailing `'` symbol to terminate the character literal";
+ no_end_quote('\'', "character");
} else {
let text = &self.res.text[self.offset + 1..][..len - 1];
- let i = text.rfind('\'').unwrap();
- let text = &text[..i];
+ let text = &text[..text.rfind('\'').unwrap()];
if let Err(e) = unescape_char(text) {
- err = error_to_diagnostic_message(e, Mode::Char);
+ errors.push(err_to_msg(e, Mode::Char));
}
}
CHAR
}
rustc_lexer::LiteralKind::Byte { terminated } => {
if !terminated {
- err = "Missing trailing `'` symbol to terminate the byte literal";
+ no_end_quote('\'', "byte");
} else {
let text = &self.res.text[self.offset + 2..][..len - 2];
- let i = text.rfind('\'').unwrap();
- let text = &text[..i];
+ let text = &text[..text.rfind('\'').unwrap()];
if let Err(e) = unescape_byte(text) {
- err = error_to_diagnostic_message(e, Mode::Byte);
+ errors.push(err_to_msg(e, Mode::Byte));
}
}
-
BYTE
}
rustc_lexer::LiteralKind::Str { terminated } => {
if !terminated {
- err = "Missing trailing `\"` symbol to terminate the string literal";
+ no_end_quote('"', "string");
} else {
let text = &self.res.text[self.offset + 1..][..len - 1];
- let i = text.rfind('"').unwrap();
- let text = &text[..i];
- err = unescape_string_error_message(text, Mode::Str);
+ let text = &text[..text.rfind('"').unwrap()];
+ unescape_str(text, |_, res| {
+ if let Err(e) = res {
+ errors.push(err_to_msg(e, Mode::Str));
+ }
+ });
}
STRING
}
rustc_lexer::LiteralKind::ByteStr { terminated } => {
if !terminated {
- err = "Missing trailing `\"` symbol to terminate the byte string literal";
+ no_end_quote('"', "byte string");
} else {
let text = &self.res.text[self.offset + 2..][..len - 2];
- let i = text.rfind('"').unwrap();
- let text = &text[..i];
- err = unescape_string_error_message(text, Mode::ByteStr);
+ let text = &text[..text.rfind('"').unwrap()];
+ unescape_byte_str(text, |_, res| {
+ if let Err(e) = res {
+ errors.push(err_to_msg(e, Mode::ByteStr));
+ }
+ });
}
BYTE_STRING
}
rustc_lexer::LiteralKind::CStr { terminated } => {
if !terminated {
- err = "Missing trailing `\"` symbol to terminate the string literal";
+ no_end_quote('"', "C string")
} else {
let text = &self.res.text[self.offset + 2..][..len - 2];
- let i = text.rfind('"').unwrap();
- let text = &text[..i];
- err = unescape_string_error_message(text, Mode::CStr);
+ let text = &text[..text.rfind('"').unwrap()];
+ unescape_c_str(text, |_, res| {
+ if let Err(e) = res {
+ errors.push(err_to_msg(e, Mode::CStr));
+ }
+ });
}
C_STRING
}
rustc_lexer::LiteralKind::RawStr { n_hashes } => {
if n_hashes.is_none() {
- err = "Invalid raw string literal";
+ errors.push(invalid_raw_msg);
}
STRING
}
rustc_lexer::LiteralKind::RawByteStr { n_hashes } => {
if n_hashes.is_none() {
- err = "Invalid raw string literal";
+ errors.push(invalid_raw_msg);
}
BYTE_STRING
}
rustc_lexer::LiteralKind::RawCStr { n_hashes } => {
if n_hashes.is_none() {
- err = "Invalid raw string literal";
+ errors.push(invalid_raw_msg);
}
C_STRING
}
};
- let err = if err.is_empty() { None } else { Some(err) };
- self.push(syntax_kind, len, err);
+ self.push(syntax_kind, len, errors);
}
}
-fn error_to_diagnostic_message(error: EscapeError, mode: Mode) -> &'static str {
+fn err_to_msg(error: EscapeError, mode: Mode) -> String {
match error {
EscapeError::ZeroChars => "empty character literal",
EscapeError::MoreThanOneChar => "character literal may only contain one codepoint",
@@ -410,28 +422,5 @@ fn error_to_diagnostic_message(error: EscapeError, mode: Mode) -> &'static str {
EscapeError::UnskippedWhitespaceWarning => "",
EscapeError::MultipleSkippedLinesWarning => "",
}
-}
-
-fn unescape_string_error_message(text: &str, mode: Mode) -> &'static str {
- let mut error_message = "";
- match mode {
- Mode::CStr => {
- unescape_mixed(text, mode, &mut |_, res| {
- if let Err(e) = res {
- error_message = error_to_diagnostic_message(e, mode);
- }
- });
- }
- Mode::ByteStr | Mode::Str => {
- unescape_unicode(text, mode, &mut |_, res| {
- if let Err(e) = res {
- error_message = error_to_diagnostic_message(e, mode);
- }
- });
- }
- _ => {
- // Other Modes are not supported yet or do not apply
- }
- }
- error_message
+ .into()
}
diff --git a/crates/parser/test_data/parser/inline/ok/type_param_bounds.rast b/crates/parser/test_data/parser/inline/ok/type_param_bounds.rast
index dee860c241..259637c898 100644
--- a/crates/parser/test_data/parser/inline/ok/type_param_bounds.rast
+++ b/crates/parser/test_data/parser/inline/ok/type_param_bounds.rast
@@ -40,8 +40,9 @@ SOURCE_FILE
PLUS "+"
WHITESPACE " "
TYPE_BOUND
- TILDE "~"
+ L_BRACK "["
CONST_KW "const"
+ R_BRACK "]"
WHITESPACE " "
PATH_TYPE
PATH
diff --git a/crates/parser/test_data/parser/inline/ok/type_param_bounds.rs b/crates/parser/test_data/parser/inline/ok/type_param_bounds.rs
index 5da3083b9c..8f37af78e9 100644
--- a/crates/parser/test_data/parser/inline/ok/type_param_bounds.rs
+++ b/crates/parser/test_data/parser/inline/ok/type_param_bounds.rs
@@ -1 +1 @@
-struct S<T: 'a + ?Sized + (Copy) + ~const Drop>;
+struct S<T: 'a + ?Sized + (Copy) + [const] Drop>;
diff --git a/crates/paths/Cargo.toml b/crates/paths/Cargo.toml
index 4cc70726da..f0dafab70c 100644
--- a/crates/paths/Cargo.toml
+++ b/crates/paths/Cargo.toml
@@ -10,6 +10,7 @@ license.workspace = true
rust-version.workspace = true
[lib]
+doctest = false
[dependencies]
camino.workspace = true
diff --git a/crates/proc-macro-api/Cargo.toml b/crates/proc-macro-api/Cargo.toml
index f5ba40a994..dac8e09435 100644
--- a/crates/proc-macro-api/Cargo.toml
+++ b/crates/proc-macro-api/Cargo.toml
@@ -10,6 +10,7 @@ license.workspace = true
rust-version.workspace = true
[lib]
+doctest = false
[dependencies]
serde.workspace = true
diff --git a/crates/proc-macro-srv/Cargo.toml b/crates/proc-macro-srv/Cargo.toml
index 8fd675d0d3..4034f24439 100644
--- a/crates/proc-macro-srv/Cargo.toml
+++ b/crates/proc-macro-srv/Cargo.toml
@@ -10,6 +10,7 @@ license.workspace = true
rust-version.workspace = true
[lib]
+doctest = false
[dependencies]
object.workspace = true
diff --git a/crates/proc-macro-srv/proc-macro-test/Cargo.toml b/crates/proc-macro-srv/proc-macro-test/Cargo.toml
index c416d997a8..bc04482273 100644
--- a/crates/proc-macro-srv/proc-macro-test/Cargo.toml
+++ b/crates/proc-macro-srv/proc-macro-test/Cargo.toml
@@ -7,6 +7,7 @@ edition = "2024"
license = "MIT OR Apache-2.0"
[lib]
+doctest = false
[build-dependencies]
cargo_metadata = "0.20.0"
diff --git a/crates/proc-macro-srv/proc-macro-test/build.rs b/crates/proc-macro-srv/proc-macro-test/build.rs
index b97569d4db..b9e84a474d 100644
--- a/crates/proc-macro-srv/proc-macro-test/build.rs
+++ b/crates/proc-macro-srv/proc-macro-test/build.rs
@@ -109,13 +109,11 @@ fn main() {
let mut artifact_path = None;
for message in Message::parse_stream(output.stdout.as_slice()) {
- if let Message::CompilerArtifact(artifact) = message.unwrap() {
- if artifact.target.kind.contains(&cargo_metadata::TargetKind::ProcMacro)
- && (artifact.package_id.repr.starts_with(&repr)
- || artifact.package_id.repr == pkgid)
- {
- artifact_path = Some(PathBuf::from(&artifact.filenames[0]));
- }
+ if let Message::CompilerArtifact(artifact) = message.unwrap()
+ && artifact.target.kind.contains(&cargo_metadata::TargetKind::ProcMacro)
+ && (artifact.package_id.repr.starts_with(&repr) || artifact.package_id.repr == pkgid)
+ {
+ artifact_path = Some(PathBuf::from(&artifact.filenames[0]));
}
}
diff --git a/crates/proc-macro-srv/proc-macro-test/imp/Cargo.toml b/crates/proc-macro-srv/proc-macro-test/imp/Cargo.toml
index 33b7c2bb0a..e1678bddff 100644
--- a/crates/proc-macro-srv/proc-macro-test/imp/Cargo.toml
+++ b/crates/proc-macro-srv/proc-macro-test/imp/Cargo.toml
@@ -6,6 +6,7 @@ edition = "2024"
publish = false
[lib]
+doctest = false
proc-macro = true
[dependencies]
diff --git a/crates/profile/Cargo.toml b/crates/profile/Cargo.toml
index bae891c198..4828419003 100644
--- a/crates/profile/Cargo.toml
+++ b/crates/profile/Cargo.toml
@@ -10,6 +10,7 @@ license.workspace = true
rust-version.workspace = true
[lib]
+doctest = false
[dependencies]
cfg-if = "1.0.1"
diff --git a/crates/project-model/Cargo.toml b/crates/project-model/Cargo.toml
index 64ea75922f..27fe9f79bb 100644
--- a/crates/project-model/Cargo.toml
+++ b/crates/project-model/Cargo.toml
@@ -10,6 +10,7 @@ license.workspace = true
rust-version.workspace = true
[lib]
+doctest = false
[dependencies]
anyhow.workspace = true
diff --git a/crates/project-model/src/build_dependencies.rs b/crates/project-model/src/build_dependencies.rs
index 4435376eab..499caa622c 100644
--- a/crates/project-model/src/build_dependencies.rs
+++ b/crates/project-model/src/build_dependencies.rs
@@ -20,9 +20,7 @@ use toolchain::Tool;
use crate::{
CargoConfig, CargoFeatures, CargoWorkspace, InvocationStrategy, ManifestPath, Package, Sysroot,
- TargetKind,
- toolchain_info::{QueryConfig, version},
- utf8_stdout,
+ TargetKind, utf8_stdout,
};
/// Output of the build script and proc-macro building steps for a workspace.
@@ -64,6 +62,7 @@ impl WorkspaceBuildScripts {
workspace: &CargoWorkspace,
progress: &dyn Fn(String),
sysroot: &Sysroot,
+ toolchain: Option<&semver::Version>,
) -> io::Result<WorkspaceBuildScripts> {
let current_dir = workspace.workspace_root();
@@ -74,6 +73,7 @@ impl WorkspaceBuildScripts {
workspace.manifest_path(),
current_dir,
sysroot,
+ toolchain,
)?;
Self::run_per_ws(cmd, workspace, progress)
}
@@ -95,6 +95,7 @@ impl WorkspaceBuildScripts {
&ManifestPath::try_from(working_directory.clone()).unwrap(),
working_directory,
&Sysroot::empty(),
+ None,
)?;
// NB: Cargo.toml could have been modified between `cargo metadata` and
// `cargo check`. We shouldn't assume that package ids we see here are
@@ -311,7 +312,9 @@ impl WorkspaceBuildScripts {
match message {
Message::BuildScriptExecuted(mut message) => {
with_output_for(&message.package_id.repr, &mut |name, data| {
- progress(format!("running build-script: {name}"));
+ progress(format!(
+ "building compile-time-deps: build script {name} run"
+ ));
let cfgs = {
let mut acc = Vec::new();
for cfg in &message.cfgs {
@@ -342,7 +345,9 @@ impl WorkspaceBuildScripts {
}
Message::CompilerArtifact(message) => {
with_output_for(&message.package_id.repr, &mut |name, data| {
- progress(format!("building proc-macros: {name}"));
+ progress(format!(
+ "building compile-time-deps: proc-macro {name} built"
+ ));
if message.target.kind.contains(&cargo_metadata::TargetKind::ProcMacro)
{
// Skip rmeta file
@@ -356,7 +361,7 @@ impl WorkspaceBuildScripts {
});
}
Message::CompilerMessage(message) => {
- progress(message.target.name);
+ progress(format!("received compiler message for: {}", message.target.name));
if let Some(diag) = message.message.rendered.as_deref() {
push_err(diag);
@@ -387,12 +392,13 @@ impl WorkspaceBuildScripts {
manifest_path: &ManifestPath,
current_dir: &AbsPath,
sysroot: &Sysroot,
+ toolchain: Option<&semver::Version>,
) -> io::Result<Command> {
- let mut cmd = match config.run_build_script_command.as_deref() {
+ match config.run_build_script_command.as_deref() {
Some([program, args @ ..]) => {
let mut cmd = toolchain::command(program, current_dir, &config.extra_env);
cmd.args(args);
- cmd
+ Ok(cmd)
}
_ => {
let mut cmd = sysroot.tool(Tool::Cargo, current_dir, &config.extra_env);
@@ -407,13 +413,6 @@ impl WorkspaceBuildScripts {
cmd.arg("--target-dir").arg(target_dir);
}
- // --all-targets includes tests, benches and examples in addition to the
- // default lib and bins. This is an independent concept from the --target
- // flag below.
- if config.all_targets {
- cmd.arg("--all-targets");
- }
-
if let Some(target) = &config.target {
cmd.args(["--target", target]);
}
@@ -444,40 +443,47 @@ impl WorkspaceBuildScripts {
cmd.arg("--keep-going");
- cmd
- }
- };
-
- // If [`--compile-time-deps` flag](https://github.com/rust-lang/cargo/issues/14434) is
- // available in current toolchain's cargo, use it to build compile time deps only.
- const COMP_TIME_DEPS_MIN_TOOLCHAIN_VERSION: semver::Version = semver::Version {
- major: 1,
- minor: 90,
- patch: 0,
- pre: semver::Prerelease::EMPTY,
- build: semver::BuildMetadata::EMPTY,
- };
+ // If [`--compile-time-deps` flag](https://github.com/rust-lang/cargo/issues/14434) is
+ // available in current toolchain's cargo, use it to build compile time deps only.
+ const COMP_TIME_DEPS_MIN_TOOLCHAIN_VERSION: semver::Version = semver::Version {
+ major: 1,
+ minor: 89,
+ patch: 0,
+ pre: semver::Prerelease::EMPTY,
+ build: semver::BuildMetadata::EMPTY,
+ };
+
+ let cargo_comp_time_deps_available =
+ toolchain.is_some_and(|v| *v >= COMP_TIME_DEPS_MIN_TOOLCHAIN_VERSION);
+
+ if cargo_comp_time_deps_available {
+ cmd.env("__CARGO_TEST_CHANNEL_OVERRIDE_DO_NOT_USE_THIS", "nightly");
+ cmd.arg("-Zunstable-options");
+ cmd.arg("--compile-time-deps");
+ // we can pass this unconditionally, because we won't actually build the
+ // binaries, and as such, this will succeed even on targets without libtest
+ cmd.arg("--all-targets");
+ } else {
+ // --all-targets includes tests, benches and examples in addition to the
+ // default lib and bins. This is an independent concept from the --target
+ // flag below.
+ if config.all_targets {
+ cmd.arg("--all-targets");
+ }
- let query_config = QueryConfig::Cargo(sysroot, manifest_path);
- let toolchain = version::get(query_config, &config.extra_env).ok().flatten();
- let cargo_comp_time_deps_available =
- toolchain.is_some_and(|v| v >= COMP_TIME_DEPS_MIN_TOOLCHAIN_VERSION);
-
- if cargo_comp_time_deps_available {
- cmd.env("__CARGO_TEST_CHANNEL_OVERRIDE_DO_NOT_USE_THIS", "nightly");
- cmd.arg("-Zunstable-options");
- cmd.arg("--compile-time-deps");
- } else if config.wrap_rustc_in_build_scripts {
- // Setup RUSTC_WRAPPER to point to `rust-analyzer` binary itself. We use
- // that to compile only proc macros and build scripts during the initial
- // `cargo check`.
- // We don't need this if we are using `--compile-time-deps` flag.
- let myself = std::env::current_exe()?;
- cmd.env("RUSTC_WRAPPER", myself);
- cmd.env("RA_RUSTC_WRAPPER", "1");
+ if config.wrap_rustc_in_build_scripts {
+ // Setup RUSTC_WRAPPER to point to `rust-analyzer` binary itself. We use
+ // that to compile only proc macros and build scripts during the initial
+ // `cargo check`.
+ // We don't need this if we are using `--compile-time-deps` flag.
+ let myself = std::env::current_exe()?;
+ cmd.env("RUSTC_WRAPPER", myself);
+ cmd.env("RA_RUSTC_WRAPPER", "1");
+ }
+ }
+ Ok(cmd)
+ }
}
-
- Ok(cmd)
}
}
diff --git a/crates/project-model/src/cargo_workspace.rs b/crates/project-model/src/cargo_workspace.rs
index 58507418e4..4bacc90417 100644
--- a/crates/project-model/src/cargo_workspace.rs
+++ b/crates/project-model/src/cargo_workspace.rs
@@ -48,6 +48,7 @@ pub struct CargoWorkspace {
is_sysroot: bool,
/// Environment variables set in the `.cargo/config` file.
config_env: Env,
+ requires_rustc_private: bool,
}
impl ops::Index<Package> for CargoWorkspace {
@@ -513,6 +514,7 @@ impl CargoWorkspace {
let workspace_root = AbsPathBuf::assert(meta.workspace_root);
let target_directory = AbsPathBuf::assert(meta.target_directory);
let mut is_virtual_workspace = true;
+ let mut requires_rustc_private = false;
meta.packages.sort_by(|a, b| a.id.cmp(&b.id));
for meta_pkg in meta.packages {
@@ -577,6 +579,7 @@ impl CargoWorkspace {
metadata: meta.rust_analyzer.unwrap_or_default(),
});
let pkg_data = &mut packages[pkg];
+ requires_rustc_private |= pkg_data.metadata.rustc_private;
pkg_by_id.insert(id, pkg);
for meta_tgt in meta_targets {
let cargo_metadata::Target { name, kind, required_features, src_path, .. } =
@@ -626,6 +629,7 @@ impl CargoWorkspace {
target_directory,
manifest_path: ws_manifest_path,
is_virtual_workspace,
+ requires_rustc_private,
is_sysroot,
config_env: cargo_config_env,
}
@@ -724,4 +728,8 @@ impl CargoWorkspace {
pub fn is_sysroot(&self) -> bool {
self.is_sysroot
}
+
+ pub fn requires_rustc_private(&self) -> bool {
+ self.requires_rustc_private
+ }
}
diff --git a/crates/project-model/src/env.rs b/crates/project-model/src/env.rs
index 450def5461..9e0415c3b3 100644
--- a/crates/project-model/src/env.rs
+++ b/crates/project-model/src/env.rs
@@ -1,6 +1,6 @@
//! Cargo-like environment variables injection.
use base_db::Env;
-use paths::Utf8Path;
+use paths::{Utf8Path, Utf8PathBuf};
use rustc_hash::FxHashMap;
use toolchain::Tool;
@@ -123,6 +123,26 @@ fn parse_output_cargo_config_env(manifest: &ManifestPath, stdout: &str) -> Env {
env
}
+pub(crate) fn cargo_config_build_target_dir(
+ manifest: &ManifestPath,
+ extra_env: &FxHashMap<String, Option<String>>,
+ sysroot: &Sysroot,
+) -> Option<Utf8PathBuf> {
+ let mut cargo_config = sysroot.tool(Tool::Cargo, manifest.parent(), extra_env);
+ cargo_config
+ .args(["-Z", "unstable-options", "config", "get", "build.target-dir"])
+ .env("RUSTC_BOOTSTRAP", "1");
+ if manifest.is_rust_manifest() {
+ cargo_config.arg("-Zscript");
+ }
+ utf8_stdout(&mut cargo_config)
+ .map(|stdout| {
+ Utf8Path::new(stdout.trim_start_matches("build.target-dir = ").trim_matches('"'))
+ .to_owned()
+ })
+ .ok()
+}
+
#[test]
fn parse_output_cargo_config_env_works() {
let stdout = r#"
diff --git a/crates/project-model/src/sysroot.rs b/crates/project-model/src/sysroot.rs
index 4b34fc0071..9f19260d30 100644
--- a/crates/project-model/src/sysroot.rs
+++ b/crates/project-model/src/sysroot.rs
@@ -163,18 +163,18 @@ impl Sysroot {
}
}
- pub fn discover_proc_macro_srv(&self) -> anyhow::Result<AbsPathBuf> {
- let Some(root) = self.root() else {
- return Err(anyhow::format_err!("no sysroot",));
- };
- ["libexec", "lib"]
- .into_iter()
- .map(|segment| root.join(segment).join("rust-analyzer-proc-macro-srv"))
- .find_map(|server_path| probe_for_binary(server_path.into()))
- .map(AbsPathBuf::assert)
- .ok_or_else(|| {
- anyhow::format_err!("cannot find proc-macro server in sysroot `{}`", root)
- })
+ pub fn discover_proc_macro_srv(&self) -> Option<anyhow::Result<AbsPathBuf>> {
+ let root = self.root()?;
+ Some(
+ ["libexec", "lib"]
+ .into_iter()
+ .map(|segment| root.join(segment).join("rust-analyzer-proc-macro-srv"))
+ .find_map(|server_path| probe_for_binary(server_path.into()))
+ .map(AbsPathBuf::assert)
+ .ok_or_else(|| {
+ anyhow::format_err!("cannot find proc-macro server in sysroot `{}`", root)
+ }),
+ )
}
fn assemble(
@@ -209,6 +209,7 @@ impl Sysroot {
pub fn load_workspace(
&self,
sysroot_source_config: &RustSourceWorkspaceConfig,
+ no_deps: bool,
current_dir: &AbsPath,
progress: &dyn Fn(String),
) -> Option<RustLibSrcWorkspace> {
@@ -224,6 +225,7 @@ impl Sysroot {
&library_manifest,
current_dir,
cargo_config,
+ no_deps,
progress,
) {
Ok(loaded) => return Some(loaded),
@@ -318,6 +320,7 @@ impl Sysroot {
library_manifest: &ManifestPath,
current_dir: &AbsPath,
cargo_config: &CargoMetadataConfig,
+ no_deps: bool,
progress: &dyn Fn(String),
) -> Result<RustLibSrcWorkspace> {
tracing::debug!("Loading library metadata: {library_manifest}");
@@ -333,7 +336,7 @@ impl Sysroot {
current_dir,
&cargo_config,
self,
- false,
+ no_deps,
// Make sure we never attempt to write to the sysroot
true,
progress,
diff --git a/crates/project-model/src/tests.rs b/crates/project-model/src/tests.rs
index 4f11af2d06..f229e9a650 100644
--- a/crates/project-model/src/tests.rs
+++ b/crates/project-model/src/tests.rs
@@ -240,7 +240,7 @@ fn smoke_test_real_sysroot_cargo() {
let cwd = AbsPathBuf::assert_utf8(temp_dir().join("smoke_test_real_sysroot_cargo"));
std::fs::create_dir_all(&cwd).unwrap();
let loaded_sysroot =
- sysroot.load_workspace(&RustSourceWorkspaceConfig::default_cargo(), &cwd, &|_| ());
+ sysroot.load_workspace(&RustSourceWorkspaceConfig::default_cargo(), false, &cwd, &|_| ());
if let Some(loaded_sysroot) = loaded_sysroot {
sysroot.set_workspace(loaded_sysroot);
}
diff --git a/crates/project-model/src/workspace.rs b/crates/project-model/src/workspace.rs
index a6743a32b1..43db84b4fa 100644
--- a/crates/project-model/src/workspace.rs
+++ b/crates/project-model/src/workspace.rs
@@ -7,8 +7,8 @@ use std::{collections::VecDeque, fmt, fs, iter, ops::Deref, sync, thread};
use anyhow::Context;
use base_db::{
CrateBuilderId, CrateDisplayName, CrateGraphBuilder, CrateName, CrateOrigin,
- CrateWorkspaceData, DependencyBuilder, Env, LangCrateOrigin, ProcMacroPaths,
- TargetLayoutLoadResult,
+ CrateWorkspaceData, DependencyBuilder, Env, LangCrateOrigin, ProcMacroLoadingError,
+ ProcMacroPaths, TargetLayoutLoadResult,
};
use cfg::{CfgAtom, CfgDiff, CfgOptions};
use intern::{Symbol, sym};
@@ -16,6 +16,7 @@ use paths::{AbsPath, AbsPathBuf, Utf8PathBuf};
use rustc_hash::{FxHashMap, FxHashSet};
use semver::Version;
use span::{Edition, FileId};
+use toolchain::Tool;
use tracing::instrument;
use triomphe::Arc;
@@ -25,10 +26,14 @@ use crate::{
WorkspaceBuildScripts,
build_dependencies::BuildScriptOutput,
cargo_workspace::{CargoMetadataConfig, DepKind, PackageData, RustLibSource},
- env::{cargo_config_env, inject_cargo_env, inject_cargo_package_env, inject_rustc_tool_env},
+ env::{
+ cargo_config_build_target_dir, cargo_config_env, inject_cargo_env,
+ inject_cargo_package_env, inject_rustc_tool_env,
+ },
project_json::{Crate, CrateArrayIdx},
sysroot::RustLibSrcWorkspace,
toolchain_info::{QueryConfig, rustc_cfg, target_data_layout, target_tuple, version},
+ utf8_stdout,
};
use tracing::{debug, error, info};
@@ -208,8 +213,7 @@ impl ProjectWorkspace {
config: &CargoConfig,
progress: &(dyn Fn(String) + Sync),
) -> Result<ProjectWorkspace, anyhow::Error> {
- progress("Discovering sysroot".to_owned());
- let workspace_dir = cargo_toml.parent();
+ progress("discovering sysroot".to_owned());
let CargoConfig {
features,
rustc_source,
@@ -224,6 +228,7 @@ impl ProjectWorkspace {
no_deps,
..
} = config;
+ let workspace_dir = cargo_toml.parent();
let mut sysroot = match (sysroot, sysroot_src) {
(Some(RustLibSource::Discover), None) => Sysroot::discover(workspace_dir, extra_env),
(Some(RustLibSource::Discover), Some(sysroot_src)) => {
@@ -238,8 +243,33 @@ impl ProjectWorkspace {
(None, _) => Sysroot::empty(),
};
+ // Resolve the Cargo.toml to the workspace root as we base the `target` dir off of it.
+ let mut cmd = sysroot.tool(Tool::Cargo, workspace_dir, extra_env);
+ cmd.args(["locate-project", "--workspace", "--manifest-path", cargo_toml.as_str()]);
+ let cargo_toml = &match utf8_stdout(&mut cmd) {
+ Ok(output) => {
+ #[derive(serde_derive::Deserialize)]
+ struct Root {
+ root: Utf8PathBuf,
+ }
+ match serde_json::from_str::<Root>(&output) {
+ Ok(object) => ManifestPath::try_from(AbsPathBuf::assert(object.root))
+ .expect("manifest path should be absolute"),
+ Err(e) => {
+ tracing::error!(%e, %cargo_toml, "failed fetching cargo workspace root");
+ cargo_toml.clone()
+ }
+ }
+ }
+ Err(e) => {
+ tracing::error!(%e, %cargo_toml, "failed fetching cargo workspace root");
+ cargo_toml.clone()
+ }
+ };
+ let workspace_dir = cargo_toml.parent();
+
tracing::info!(workspace = %cargo_toml, src_root = ?sysroot.rust_lib_src_root(), root = ?sysroot.root(), "Using sysroot");
- progress("Querying project metadata".to_owned());
+ progress("querying project metadata".to_owned());
let toolchain_config = QueryConfig::Cargo(&sysroot, cargo_toml);
let targets =
target_tuple::get(toolchain_config, target.as_deref(), extra_env).unwrap_or_default();
@@ -252,8 +282,11 @@ impl ProjectWorkspace {
.ok()
.flatten();
- let target_dir =
- config.target_dir.clone().unwrap_or_else(|| workspace_dir.join("target").into());
+ let target_dir = config
+ .target_dir
+ .clone()
+ .or_else(|| cargo_config_build_target_dir(cargo_toml, extra_env, &sysroot))
+ .unwrap_or_else(|| workspace_dir.join("target").into());
// We spawn a bunch of processes to query various information about the workspace's
// toolchain and sysroot
@@ -358,6 +391,7 @@ impl ProjectWorkspace {
toolchain.clone(),
target_dir.clone(),
)),
+ config.no_deps,
workspace_dir,
progress,
)
@@ -374,11 +408,17 @@ impl ProjectWorkspace {
))
});
- let (rustc_cfg, data_layout, rustc, loaded_sysroot, cargo_metadata, cargo_config_extra_env) =
- match join {
- Ok(it) => it,
- Err(e) => std::panic::resume_unwind(e),
- };
+ let (
+ rustc_cfg,
+ data_layout,
+ mut rustc,
+ loaded_sysroot,
+ cargo_metadata,
+ cargo_config_extra_env,
+ ) = match join {
+ Ok(it) => it,
+ Err(e) => std::panic::resume_unwind(e),
+ };
let (meta, error) = cargo_metadata.with_context(|| {
format!(
@@ -391,6 +431,14 @@ impl ProjectWorkspace {
sysroot.set_workspace(loaded_sysroot);
}
+ if !cargo.requires_rustc_private() {
+ if let Err(e) = &mut rustc {
+ // We don't need the rustc sources here,
+ // so just discard the error.
+ _ = e.take();
+ }
+ }
+
Ok(ProjectWorkspace {
kind: ProjectWorkspaceKind::Cargo {
cargo,
@@ -413,17 +461,25 @@ impl ProjectWorkspace {
config: &CargoConfig,
progress: &(dyn Fn(String) + Sync),
) -> ProjectWorkspace {
- progress("Discovering sysroot".to_owned());
+ progress("discovering sysroot".to_owned());
let mut sysroot =
Sysroot::new(project_json.sysroot.clone(), project_json.sysroot_src.clone());
tracing::info!(workspace = %project_json.manifest_or_root(), src_root = ?sysroot.rust_lib_src_root(), root = ?sysroot.root(), "Using sysroot");
- progress("Querying project metadata".to_owned());
+ progress("querying project metadata".to_owned());
let sysroot_project = project_json.sysroot_project.take();
let query_config = QueryConfig::Rustc(&sysroot, project_json.path().as_ref());
let targets = target_tuple::get(query_config, config.target.as_deref(), &config.extra_env)
.unwrap_or_default();
let toolchain = version::get(query_config, &config.extra_env).ok().flatten();
+ let project_root = project_json.project_root();
+ let target_dir = config
+ .target_dir
+ .clone()
+ .or_else(|| {
+ cargo_config_build_target_dir(project_json.manifest()?, &config.extra_env, &sysroot)
+ })
+ .unwrap_or_else(|| project_root.join("target").into());
// We spawn a bunch of processes to query various information about the workspace's
// toolchain and sysroot
@@ -441,18 +497,14 @@ impl ProjectWorkspace {
)
});
let loaded_sysroot = s.spawn(|| {
- let project_root = project_json.project_root();
if let Some(sysroot_project) = sysroot_project {
sysroot.load_workspace(
&RustSourceWorkspaceConfig::Json(*sysroot_project),
+ config.no_deps,
project_root,
progress,
)
} else {
- let target_dir = config
- .target_dir
- .clone()
- .unwrap_or_else(|| project_root.join("target").into());
sysroot.load_workspace(
&RustSourceWorkspaceConfig::CargoMetadata(sysroot_metadata_config(
config,
@@ -460,6 +512,7 @@ impl ProjectWorkspace {
toolchain.clone(),
target_dir,
)),
+ config.no_deps,
project_root,
progress,
)
@@ -507,7 +560,12 @@ impl ProjectWorkspace {
.unwrap_or_default();
let rustc_cfg = rustc_cfg::get(query_config, None, &config.extra_env);
let data_layout = target_data_layout::get(query_config, None, &config.extra_env);
- let target_dir = config.target_dir.clone().unwrap_or_else(|| dir.join("target").into());
+ let target_dir = config
+ .target_dir
+ .clone()
+ .or_else(|| cargo_config_build_target_dir(detached_file, &config.extra_env, &sysroot))
+ .unwrap_or_else(|| dir.join("target").into());
+
let loaded_sysroot = sysroot.load_workspace(
&RustSourceWorkspaceConfig::CargoMetadata(sysroot_metadata_config(
config,
@@ -515,6 +573,7 @@ impl ProjectWorkspace {
toolchain.clone(),
target_dir.clone(),
)),
+ config.no_deps,
dir,
&|_| (),
);
@@ -581,10 +640,16 @@ impl ProjectWorkspace {
match &self.kind {
ProjectWorkspaceKind::DetachedFile { cargo: Some((cargo, _, None)), .. }
| ProjectWorkspaceKind::Cargo { cargo, error: None, .. } => {
- WorkspaceBuildScripts::run_for_workspace(config, cargo, progress, &self.sysroot)
- .with_context(|| {
- format!("Failed to run build scripts for {}", cargo.workspace_root())
- })
+ WorkspaceBuildScripts::run_for_workspace(
+ config,
+ cargo,
+ progress,
+ &self.sysroot,
+ self.toolchain.as_ref(),
+ )
+ .with_context(|| {
+ format!("Failed to run build scripts for {}", cargo.workspace_root())
+ })
}
_ => Ok(WorkspaceBuildScripts::default()),
}
@@ -683,7 +748,7 @@ impl ProjectWorkspace {
}
}
- pub fn find_sysroot_proc_macro_srv(&self) -> anyhow::Result<AbsPathBuf> {
+ pub fn find_sysroot_proc_macro_srv(&self) -> Option<anyhow::Result<AbsPathBuf>> {
self.sysroot.discover_proc_macro_srv()
}
@@ -1166,14 +1231,10 @@ fn cargo_to_crate_graph(
// Mapping of a package to its library target
let mut pkg_to_lib_crate = FxHashMap::default();
let mut pkg_crates = FxHashMap::default();
- // Does any crate signal to rust-analyzer that they need the rustc_private crates?
- let mut has_private = false;
let workspace_proc_macro_cwd = Arc::new(cargo.workspace_root().to_path_buf());
// Next, create crates for each package, target pair
for pkg in cargo.packages() {
- has_private |= cargo[pkg].metadata.rustc_private;
-
let cfg_options = {
let mut cfg_options = cfg_options.clone();
@@ -1318,7 +1379,7 @@ fn cargo_to_crate_graph(
add_dep(crate_graph, from, name, to);
}
- if has_private {
+ if cargo.requires_rustc_private() {
// If the user provided a path to rustc sources, we add all the rustc_private crates
// and create dependencies on them for the crates which opt-in to that
if let Some((rustc_workspace, rustc_build_scripts)) = rustc {
@@ -1584,11 +1645,11 @@ fn add_target_crate_root(
Some((BuildScriptOutput { proc_macro_dylib_path, .. }, has_errors)) => {
match proc_macro_dylib_path {
Some(path) => Ok((cargo_name.to_owned(), path.clone())),
- None if has_errors => Err("failed to build proc-macro".to_owned()),
- None => Err("proc-macro crate build data is missing dylib path".to_owned()),
+ None if has_errors => Err(ProcMacroLoadingError::FailedToBuild),
+ None => Err(ProcMacroLoadingError::MissingDylibPath),
}
}
- None => Err("proc-macro crate is missing its build data".to_owned()),
+ None => Err(ProcMacroLoadingError::NotYetBuilt),
};
proc_macros.insert(crate_id, proc_macro);
}
diff --git a/crates/query-group-macro/Cargo.toml b/crates/query-group-macro/Cargo.toml
index 8b03d8f8cc..5991120a30 100644
--- a/crates/query-group-macro/Cargo.toml
+++ b/crates/query-group-macro/Cargo.toml
@@ -10,6 +10,7 @@ license.workspace = true
rust-version.workspace = true
[lib]
+doctest = false
proc-macro = true
[dependencies]
diff --git a/crates/rust-analyzer/Cargo.toml b/crates/rust-analyzer/Cargo.toml
index 5e63521d74..b301a7189b 100644
--- a/crates/rust-analyzer/Cargo.toml
+++ b/crates/rust-analyzer/Cargo.toml
@@ -13,6 +13,7 @@ license.workspace = true
rust-version.workspace = true
[lib]
+doctest = false
[[bin]]
name = "rust-analyzer"
diff --git a/crates/rust-analyzer/src/cli/rustc_tests.rs b/crates/rust-analyzer/src/cli/rustc_tests.rs
index 740fcd81ea..f97bf83244 100644
--- a/crates/rust-analyzer/src/cli/rustc_tests.rs
+++ b/crates/rust-analyzer/src/cli/rustc_tests.rs
@@ -75,8 +75,12 @@ impl Tester {
};
let mut sysroot = Sysroot::discover(tmp_file.parent().unwrap(), &cargo_config.extra_env);
- let loaded_sysroot =
- sysroot.load_workspace(&RustSourceWorkspaceConfig::default_cargo(), &path, &|_| ());
+ let loaded_sysroot = sysroot.load_workspace(
+ &RustSourceWorkspaceConfig::default_cargo(),
+ false,
+ &path,
+ &|_| (),
+ );
if let Some(loaded_sysroot) = loaded_sysroot {
sysroot.set_workspace(loaded_sysroot);
}
diff --git a/crates/rust-analyzer/src/config.rs b/crates/rust-analyzer/src/config.rs
index 05e1b832cd..51d4c29aa7 100644
--- a/crates/rust-analyzer/src/config.rs
+++ b/crates/rust-analyzer/src/config.rs
@@ -94,6 +94,8 @@ config_data! {
+ /// Enables highlighting of related return values while the cursor is on any `match`, `if`, or match arm arrow (`=>`).
+ highlightRelated_branchExitPoints_enable: bool = true,
/// Enables highlighting of related references while the cursor is on `break`, `loop`, `while`, or `for` keywords.
highlightRelated_breakPoints_enable: bool = true,
/// Enables highlighting of all captures of a closure while the cursor is on the `|` or move keyword of a closure.
@@ -1524,7 +1526,7 @@ impl Config {
CompletionConfig {
enable_postfix_completions: self.completion_postfix_enable(source_root).to_owned(),
enable_imports_on_the_fly: self.completion_autoimport_enable(source_root).to_owned()
- && self.caps.completion_item_edit_resolve(),
+ && self.caps.has_completion_item_resolve_additionalTextEdits(),
enable_self_on_the_fly: self.completion_autoself_enable(source_root).to_owned(),
enable_auto_iter: *self.completion_autoIter_enable(source_root),
enable_auto_await: *self.completion_autoAwait_enable(source_root),
@@ -1629,6 +1631,7 @@ impl Config {
exit_points: self.highlightRelated_exitPoints_enable().to_owned(),
yield_points: self.highlightRelated_yieldPoints_enable().to_owned(),
closure_captures: self.highlightRelated_closureCaptures_enable().to_owned(),
+ branch_exit_points: self.highlightRelated_branchExitPoints_enable().to_owned(),
}
}
@@ -2352,10 +2355,6 @@ impl Config {
.and_then(|it| it.version.as_ref())
}
- pub fn client_is_helix(&self) -> bool {
- self.client_info.as_ref().map(|it| it.name == "helix").unwrap_or_default()
- }
-
pub fn client_is_neovim(&self) -> bool {
self.client_info.as_ref().map(|it| it.name == "Neovim").unwrap_or_default()
}
diff --git a/crates/rust-analyzer/src/flycheck.rs b/crates/rust-analyzer/src/flycheck.rs
index 0e418240db..91d37bd7c9 100644
--- a/crates/rust-analyzer/src/flycheck.rs
+++ b/crates/rust-analyzer/src/flycheck.rs
@@ -6,6 +6,7 @@ use std::{fmt, io, process::Command, time::Duration};
use cargo_metadata::PackageId;
use crossbeam_channel::{Receiver, Sender, select_biased, unbounded};
use ide_db::FxHashSet;
+use itertools::Itertools;
use paths::{AbsPath, AbsPathBuf, Utf8PathBuf};
use rustc_hash::FxHashMap;
use serde::Deserialize as _;
@@ -379,7 +380,11 @@ impl FlycheckActor {
package_id = msg.package_id.repr,
"artifact received"
);
- self.report_progress(Progress::DidCheckCrate(msg.target.name));
+ self.report_progress(Progress::DidCheckCrate(format!(
+ "{} ({})",
+ msg.target.name,
+ msg.target.kind.iter().format_with(", ", |kind, f| f(&kind)),
+ )));
let package_id = Arc::new(msg.package_id);
if self.diagnostics_cleared_for.insert(package_id.clone()) {
tracing::trace!(
diff --git a/crates/rust-analyzer/src/global_state.rs b/crates/rust-analyzer/src/global_state.rs
index a870232d4a..62a28a1a68 100644
--- a/crates/rust-analyzer/src/global_state.rs
+++ b/crates/rust-analyzer/src/global_state.rs
@@ -101,7 +101,7 @@ pub(crate) struct GlobalState {
pub(crate) last_reported_status: lsp_ext::ServerStatusParams,
// proc macros
- pub(crate) proc_macro_clients: Arc<[anyhow::Result<ProcMacroClient>]>,
+ pub(crate) proc_macro_clients: Arc<[Option<anyhow::Result<ProcMacroClient>>]>,
pub(crate) build_deps_changed: bool,
// Flycheck
diff --git a/crates/rust-analyzer/src/handlers/dispatch.rs b/crates/rust-analyzer/src/handlers/dispatch.rs
index 40d05567fc..aea116e647 100644
--- a/crates/rust-analyzer/src/handlers/dispatch.rs
+++ b/crates/rust-analyzer/src/handlers/dispatch.rs
@@ -6,7 +6,7 @@ use std::{
use ide_db::base_db::{
DbPanicContext,
- salsa::{self, Cancelled, UnexpectedCycle},
+ salsa::{self, Cancelled},
};
use lsp_server::{ExtractError, Response, ResponseError};
use serde::{Serialize, de::DeserializeOwned};
@@ -350,9 +350,6 @@ where
if let Some(panic_message) = panic_message {
message.push_str(": ");
message.push_str(panic_message);
- } else if let Some(cycle) = panic.downcast_ref::<UnexpectedCycle>() {
- tracing::error!("{cycle}");
- message.push_str(": unexpected cycle");
} else if let Ok(cancelled) = panic.downcast::<Cancelled>() {
tracing::error!("Cancellation propagated out of salsa! This is a bug");
return Err(HandlerCancelledError::Inner(*cancelled));
diff --git a/crates/rust-analyzer/src/handlers/request.rs b/crates/rust-analyzer/src/handlers/request.rs
index afd9eff526..a76a65220d 100644
--- a/crates/rust-analyzer/src/handlers/request.rs
+++ b/crates/rust-analyzer/src/handlers/request.rs
@@ -2433,17 +2433,14 @@ fn run_rustfmt(
}
_ => {
// Something else happened - e.g. `rustfmt` is missing or caught a signal
- Err(LspError::new(
- -32900,
- format!(
- r#"rustfmt exited with:
- Status: {}
- stdout: {captured_stdout}
- stderr: {captured_stderr}"#,
- output.status,
- ),
- )
- .into())
+ tracing::error!(
+ ?command,
+ %output.status,
+ %captured_stdout,
+ %captured_stderr,
+ "rustfmt failed"
+ );
+ Ok(None)
}
};
}
diff --git a/crates/rust-analyzer/src/lsp/capabilities.rs b/crates/rust-analyzer/src/lsp/capabilities.rs
index 04e31f37fd..f94e7486ff 100644
--- a/crates/rust-analyzer/src/lsp/capabilities.rs
+++ b/crates/rust-analyzer/src/lsp/capabilities.rs
@@ -42,7 +42,7 @@ pub fn server_capabilities(config: &Config) -> ServerCapabilities {
hover_provider: Some(HoverProviderCapability::Simple(true)),
completion_provider: Some(CompletionOptions {
resolve_provider: if config.client_is_neovim() {
- config.completion_item_edit_resolve().then_some(true)
+ config.has_completion_item_resolve_additionalTextEdits().then_some(true)
} else {
Some(config.caps().completions_resolve_provider())
},
@@ -207,8 +207,8 @@ impl ClientCapabilities {
serde_json::from_value(self.0.experimental.as_ref()?.get(index)?.clone()).ok()
}
- /// Parses client capabilities and returns all completion resolve capabilities rust-analyzer supports.
- pub fn completion_item_edit_resolve(&self) -> bool {
+ #[allow(non_snake_case)]
+ pub fn has_completion_item_resolve_additionalTextEdits(&self) -> bool {
(|| {
Some(
self.0
diff --git a/crates/rust-analyzer/src/lsp/to_proto.rs b/crates/rust-analyzer/src/lsp/to_proto.rs
index 4efe330f16..8a848fb848 100644
--- a/crates/rust-analyzer/src/lsp/to_proto.rs
+++ b/crates/rust-analyzer/src/lsp/to_proto.rs
@@ -900,14 +900,17 @@ pub(crate) fn folding_range(
FoldKind::Comment => Some(lsp_types::FoldingRangeKind::Comment),
FoldKind::Imports => Some(lsp_types::FoldingRangeKind::Imports),
FoldKind::Region => Some(lsp_types::FoldingRangeKind::Region),
- FoldKind::Mods
+ FoldKind::Modules
| FoldKind::Block
| FoldKind::ArgList
| FoldKind::Consts
| FoldKind::Statics
+ | FoldKind::TypeAliases
| FoldKind::WhereClause
| FoldKind::ReturnType
| FoldKind::Array
+ | FoldKind::TraitAliases
+ | FoldKind::ExternCrates
| FoldKind::MatchArm => None,
};
diff --git a/crates/rust-analyzer/src/main_loop.rs b/crates/rust-analyzer/src/main_loop.rs
index 0c0438c4b8..00cf890510 100644
--- a/crates/rust-analyzer/src/main_loop.rs
+++ b/crates/rust-analyzer/src/main_loop.rs
@@ -783,9 +783,14 @@ impl GlobalState {
DiscoverProjectParam::Path(it) => DiscoverArgument::Path(it),
};
- let handle =
- discover.spawn(arg, &std::env::current_dir().unwrap()).unwrap();
- self.discover_handle = Some(handle);
+ let handle = discover.spawn(
+ arg,
+ &std::env::current_dir()
+ .expect("Failed to get cwd during project discovery"),
+ );
+ self.discover_handle = Some(handle.unwrap_or_else(|e| {
+ panic!("Failed to spawn project discovery command: {e}")
+ }));
}
}
}
diff --git a/crates/rust-analyzer/src/reload.rs b/crates/rust-analyzer/src/reload.rs
index 189d95ec7e..e798aa6a8a 100644
--- a/crates/rust-analyzer/src/reload.rs
+++ b/crates/rust-analyzer/src/reload.rs
@@ -18,7 +18,7 @@ use std::{iter, mem};
use hir::{ChangeWithProcMacros, ProcMacrosBuilder, db::DefDatabase};
use ide_db::{
FxHashMap,
- base_db::{CrateGraphBuilder, ProcMacroPaths, salsa::Durability},
+ base_db::{CrateGraphBuilder, ProcMacroLoadingError, ProcMacroPaths, salsa::Durability},
};
use itertools::Itertools;
use load_cargo::{ProjectFolders, load_proc_macro};
@@ -194,8 +194,7 @@ impl GlobalState {
format_to!(message, "{e}");
});
- let proc_macro_clients =
- self.proc_macro_clients.iter().map(Some).chain(iter::repeat_with(|| None));
+ let proc_macro_clients = self.proc_macro_clients.iter().chain(iter::repeat(&None));
for (ws, proc_macro_client) in self.workspaces.iter().zip(proc_macro_clients) {
if let ProjectWorkspaceKind::Cargo { error: Some(error), .. }
@@ -252,7 +251,8 @@ impl GlobalState {
message.push_str("\n\n");
}
}
- _ => (),
+ // sysroot was explicitly not set so we didn't discover a server
+ None => {}
}
}
}
@@ -419,16 +419,13 @@ impl GlobalState {
};
let mut builder = ProcMacrosBuilder::default();
- let proc_macro_clients = proc_macro_clients
- .iter()
- .map(|res| res.as_ref().map_err(|e| e.to_string()))
- .chain(iter::repeat_with(|| Err("proc-macro-srv is not running".into())));
+ let proc_macro_clients = proc_macro_clients.iter().chain(iter::repeat(&None));
for (client, paths) in proc_macro_clients.zip(paths) {
for (crate_id, res) in paths.iter() {
let expansion_res = match client {
- Ok(client) => match res {
+ Some(Ok(client)) => match res {
Ok((crate_name, path)) => {
- progress(path.to_string());
+ progress(format!("loading proc-macros: {path}"));
let ignored_proc_macros = ignored_proc_macros
.iter()
.find_map(|(name, macros)| {
@@ -438,9 +435,14 @@ impl GlobalState {
load_proc_macro(client, path, ignored_proc_macros)
}
- Err(e) => Err((e.clone(), true)),
+ Err(e) => Err(e.clone()),
},
- Err(ref e) => Err((e.clone(), true)),
+ Some(Err(e)) => Err(ProcMacroLoadingError::ProcMacroSrvError(
+ e.to_string().into_boxed_str(),
+ )),
+ None => Err(ProcMacroLoadingError::ProcMacroSrvError(
+ "proc-macro-srv is not running".into(),
+ )),
};
builder.insert(*crate_id, expansion_res)
}
@@ -655,7 +657,10 @@ impl GlobalState {
self.proc_macro_clients = Arc::from_iter(self.workspaces.iter().map(|ws| {
let path = match self.config.proc_macro_srv() {
Some(path) => path,
- None => ws.find_sysroot_proc_macro_srv()?,
+ None => match ws.find_sysroot_proc_macro_srv()? {
+ Ok(path) => path,
+ Err(e) => return Some(Err(e)),
+ },
};
let env: FxHashMap<_, _> = match &ws.kind {
@@ -682,14 +687,14 @@ impl GlobalState {
};
info!("Using proc-macro server at {path}");
- ProcMacroClient::spawn(&path, &env).map_err(|err| {
+ Some(ProcMacroClient::spawn(&path, &env).map_err(|err| {
tracing::error!(
"Failed to run proc-macro server from path {path}, error: {err:?}",
);
anyhow::format_err!(
"Failed to run proc-macro server from path {path}, error: {err:?}",
)
- })
+ }))
}))
}
@@ -753,14 +758,14 @@ impl GlobalState {
change.set_proc_macros(
crate_graph
.iter()
- .map(|id| (id, Err(("proc-macro has not been built yet".to_owned(), true))))
+ .map(|id| (id, Err(ProcMacroLoadingError::NotYetBuilt)))
.collect(),
);
} else {
change.set_proc_macros(
crate_graph
.iter()
- .map(|id| (id, Err(("proc-macro expansion is disabled".to_owned(), false))))
+ .map(|id| (id, Err(ProcMacroLoadingError::Disabled)))
.collect(),
);
}
diff --git a/crates/span/src/ast_id.rs b/crates/span/src/ast_id.rs
index 8e95971198..121d2e3324 100644
--- a/crates/span/src/ast_id.rs
+++ b/crates/span/src/ast_id.rs
@@ -107,9 +107,10 @@ impl fmt::Debug for ErasedFileAstId {
}
#[derive(Debug, Clone, Copy, Hash, PartialEq, Eq)]
+#[repr(u8)]
enum ErasedFileAstIdKind {
/// This needs to not change because it's depended upon by the proc macro server.
- Fixup,
+ Fixup = 0,
// The following are associated with `ErasedHasNameFileAstId`.
Enum,
Struct,
@@ -413,9 +414,9 @@ impl ErasedAstIdNextIndexMap {
}
macro_rules! register_enum_ast_id {
- (impl AstIdNode for $($ident:ident),+ ) => {
+ (impl $AstIdNode:ident for $($ident:ident),+ ) => {
$(
- impl AstIdNode for ast::$ident {}
+ impl $AstIdNode for ast::$ident {}
)+
};
}
@@ -426,9 +427,9 @@ register_enum_ast_id! {
}
macro_rules! register_has_name_ast_id {
- (impl AstIdNode for $($ident:ident = $name_method:ident),+ ) => {
+ (impl $AstIdNode:ident for $($ident:ident = $name_method:ident),+ ) => {
$(
- impl AstIdNode for ast::$ident {}
+ impl $AstIdNode for ast::$ident {}
)+
fn has_name_ast_id(node: &SyntaxNode, index_map: &mut ErasedAstIdNextIndexMap) -> Option<ErasedFileAstId> {
@@ -472,9 +473,9 @@ register_has_name_ast_id! {
}
macro_rules! register_assoc_item_ast_id {
- (impl AstIdNode for $($ident:ident = $name_callback:expr),+ ) => {
+ (impl $AstIdNode:ident for $($ident:ident = $name_callback:expr),+ ) => {
$(
- impl AstIdNode for ast::$ident {}
+ impl $AstIdNode for ast::$ident {}
)+
fn assoc_item_ast_id(
diff --git a/crates/span/src/hygiene.rs b/crates/span/src/hygiene.rs
index 7bb88ac365..aef3fbf051 100644
--- a/crates/span/src/hygiene.rs
+++ b/crates/span/src/hygiene.rs
@@ -97,6 +97,7 @@ const _: () = {
const LOCATION: salsa::plumbing::Location =
salsa::plumbing::Location { file: file!(), line: line!() };
const DEBUG_NAME: &'static str = "SyntaxContextData";
+ const REVISIONS: std::num::NonZeroUsize = std::num::NonZeroUsize::MAX;
type Fields<'a> = SyntaxContextData;
type Struct<'a> = SyntaxContext;
}
@@ -108,7 +109,9 @@ const _: () = {
static CACHE: zalsa_::IngredientCache<zalsa_struct_::IngredientImpl<SyntaxContext>> =
zalsa_::IngredientCache::new();
CACHE.get_or_create(db.zalsa(), || {
- db.zalsa().add_or_lookup_jar_by_type::<zalsa_struct_::JarImpl<SyntaxContext>>()
+ db.zalsa()
+ .lookup_jar_by_type::<zalsa_struct_::JarImpl<SyntaxContext>>()
+ .get_or_create()
})
}
}
@@ -130,9 +133,12 @@ const _: () = {
type MemoIngredientMap = salsa::plumbing::MemoIngredientSingletonIndex;
fn lookup_or_create_ingredient_index(
- aux: &salsa::plumbing::Zalsa,
+ zalsa: &salsa::plumbing::Zalsa,
) -> salsa::plumbing::IngredientIndices {
- aux.add_or_lookup_jar_by_type::<zalsa_struct_::JarImpl<SyntaxContext>>().into()
+ zalsa
+ .lookup_jar_by_type::<zalsa_struct_::JarImpl<SyntaxContext>>()
+ .get_or_create()
+ .into()
}
#[inline]
@@ -326,14 +332,14 @@ impl<'db> SyntaxContext {
None
} else {
// SAFETY: By our invariant, this is either a root (which we verified it's not) or a valid `salsa::Id`.
- unsafe { Some(salsa::Id::from_u32(self.0)) }
+ unsafe { Some(salsa::Id::from_index(self.0)) }
}
}
#[inline]
fn from_salsa_id(id: salsa::Id) -> Self {
// SAFETY: This comes from a Salsa ID.
- unsafe { Self::from_u32(id.as_u32()) }
+ unsafe { Self::from_u32(id.index()) }
}
#[inline]
diff --git a/crates/stdx/Cargo.toml b/crates/stdx/Cargo.toml
index a6d5781660..2c19f00f08 100644
--- a/crates/stdx/Cargo.toml
+++ b/crates/stdx/Cargo.toml
@@ -10,6 +10,7 @@ license.workspace = true
rust-version.workspace = true
[lib]
+doctest = false
[dependencies]
backtrace = { version = "0.3.75", optional = true }
diff --git a/crates/syntax-bridge/Cargo.toml b/crates/syntax-bridge/Cargo.toml
index cccd41d542..b0fd40ff59 100644
--- a/crates/syntax-bridge/Cargo.toml
+++ b/crates/syntax-bridge/Cargo.toml
@@ -10,6 +10,7 @@ license.workspace = true
rust-version.workspace = true
[lib]
+doctest = false
[dependencies]
rustc-hash.workspace = true
diff --git a/crates/syntax/Cargo.toml b/crates/syntax/Cargo.toml
index 9d3aaa8d4e..1ee93013e3 100644
--- a/crates/syntax/Cargo.toml
+++ b/crates/syntax/Cargo.toml
@@ -10,6 +10,7 @@ license.workspace = true
rust-version.workspace = true
[lib]
+doctest = false
[dependencies]
either.workspace = true
diff --git a/crates/syntax/rust.ungram b/crates/syntax/rust.ungram
index c81da06682..3f43947233 100644
--- a/crates/syntax/rust.ungram
+++ b/crates/syntax/rust.ungram
@@ -669,7 +669,7 @@ TypeBoundList =
TypeBound =
Lifetime
-| ('~' 'const' | 'const')? 'async'? '?'? Type
+| ('~' 'const' | '[' 'const' ']' | 'const')? 'async'? '?'? Type
| 'use' UseBoundGenericArgs
UseBoundGenericArgs =
diff --git a/crates/syntax/src/ast/generated/nodes.rs b/crates/syntax/src/ast/generated/nodes.rs
index 04c7e8a578..79a9f4da33 100644
--- a/crates/syntax/src/ast/generated/nodes.rs
+++ b/crates/syntax/src/ast/generated/nodes.rs
@@ -1766,6 +1766,10 @@ impl TypeBound {
support::child(&self.syntax)
}
#[inline]
+ pub fn l_brack_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T!['[']) }
+ #[inline]
+ pub fn r_brack_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![']']) }
+ #[inline]
pub fn question_mark_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![?]) }
#[inline]
pub fn async_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![async]) }
diff --git a/crates/syntax/src/ast/make.rs b/crates/syntax/src/ast/make.rs
index 955aadaa25..309332873c 100644
--- a/crates/syntax/src/ast/make.rs
+++ b/crates/syntax/src/ast/make.rs
@@ -842,9 +842,10 @@ pub fn ref_pat(pat: ast::Pat) -> ast::RefPat {
}
pub fn match_arm(pat: ast::Pat, guard: Option<ast::MatchGuard>, expr: ast::Expr) -> ast::MatchArm {
+ let comma_str = if expr.is_block_like() { "" } else { "," };
return match guard {
- Some(guard) => from_text(&format!("{pat} {guard} => {expr}")),
- None => from_text(&format!("{pat} => {expr}")),
+ Some(guard) => from_text(&format!("{pat} {guard} => {expr}{comma_str}")),
+ None => from_text(&format!("{pat} => {expr}{comma_str}")),
};
fn from_text(text: &str) -> ast::MatchArm {
@@ -877,7 +878,7 @@ pub fn match_arm_list(arms: impl IntoIterator<Item = ast::MatchArm>) -> ast::Mat
let arms_str = arms.into_iter().fold(String::new(), |mut acc, arm| {
let needs_comma =
arm.comma_token().is_none() && arm.expr().is_none_or(|it| !it.is_block_like());
- let comma = if needs_comma { "," } else { "" };
+ let comma = if needs_comma && arm.comma_token().is_none() { "," } else { "" };
let arm = arm.syntax();
format_to_acc!(acc, " {arm}{comma}\n")
});
diff --git a/crates/syntax/src/ast/syntax_factory/constructors.rs b/crates/syntax/src/ast/syntax_factory/constructors.rs
index 429e51ba36..17cc5f9c05 100644
--- a/crates/syntax/src/ast/syntax_factory/constructors.rs
+++ b/crates/syntax/src/ast/syntax_factory/constructors.rs
@@ -1212,6 +1212,43 @@ impl SyntaxFactory {
ast
}
+ pub fn attr_outer(&self, meta: ast::Meta) -> ast::Attr {
+ let ast = make::attr_outer(meta.clone()).clone_for_update();
+
+ if let Some(mut mapping) = self.mappings() {
+ let mut builder = SyntaxMappingBuilder::new(ast.syntax().clone());
+ builder.map_node(meta.syntax().clone(), ast.meta().unwrap().syntax().clone());
+ builder.finish(&mut mapping);
+ }
+
+ ast
+ }
+
+ pub fn attr_inner(&self, meta: ast::Meta) -> ast::Attr {
+ let ast = make::attr_inner(meta.clone()).clone_for_update();
+
+ if let Some(mut mapping) = self.mappings() {
+ let mut builder = SyntaxMappingBuilder::new(ast.syntax().clone());
+ builder.map_node(meta.syntax().clone(), ast.meta().unwrap().syntax().clone());
+ builder.finish(&mut mapping);
+ }
+
+ ast
+ }
+
+ pub fn meta_token_tree(&self, path: ast::Path, tt: ast::TokenTree) -> ast::Meta {
+ let ast = make::meta_token_tree(path.clone(), tt.clone()).clone_for_update();
+
+ if let Some(mut mapping) = self.mappings() {
+ let mut builder = SyntaxMappingBuilder::new(ast.syntax().clone());
+ builder.map_node(path.syntax().clone(), ast.path().unwrap().syntax().clone());
+ builder.map_node(tt.syntax().clone(), ast.token_tree().unwrap().syntax().clone());
+ builder.finish(&mut mapping);
+ }
+
+ ast
+ }
+
pub fn token_tree(
&self,
delimiter: SyntaxKind,
@@ -1242,6 +1279,10 @@ impl SyntaxFactory {
pub fn whitespace(&self, text: &str) -> SyntaxToken {
make::tokens::whitespace(text)
}
+
+ pub fn ident(&self, text: &str) -> SyntaxToken {
+ make::tokens::ident(text)
+ }
}
// `ext` constructors
diff --git a/crates/syntax/src/ast/token_ext.rs b/crates/syntax/src/ast/token_ext.rs
index ced3b713d8..4afdda78a0 100644
--- a/crates/syntax/src/ast/token_ext.rs
+++ b/crates/syntax/src/ast/token_ext.rs
@@ -1,9 +1,11 @@
//! There are many AstNodes, but only a few tokens, so we hand-write them here.
+use std::ops::Range;
use std::{borrow::Cow, num::ParseIntError};
use rustc_literal_escaper::{
- EscapeError, MixedUnit, Mode, unescape_byte, unescape_char, unescape_mixed, unescape_unicode,
+ EscapeError, MixedUnit, unescape_byte, unescape_byte_str, unescape_c_str, unescape_char,
+ unescape_str,
};
use stdx::always;
@@ -150,7 +152,7 @@ impl QuoteOffsets {
pub trait IsString: AstToken {
const RAW_PREFIX: &'static str;
- const MODE: Mode;
+ fn unescape(s: &str, callback: impl FnMut(Range<usize>, Result<char, EscapeError>));
fn is_raw(&self) -> bool {
self.text().starts_with(Self::RAW_PREFIX)
}
@@ -185,7 +187,7 @@ pub trait IsString: AstToken {
let text = &self.text()[text_range_no_quotes - start];
let offset = text_range_no_quotes.start() - start;
- unescape_unicode(text, Self::MODE, &mut |range, unescaped_char| {
+ Self::unescape(text, &mut |range: Range<usize>, unescaped_char| {
if let Some((s, e)) = range.start.try_into().ok().zip(range.end.try_into().ok()) {
cb(TextRange::new(s, e) + offset, unescaped_char);
}
@@ -203,7 +205,9 @@ pub trait IsString: AstToken {
impl IsString for ast::String {
const RAW_PREFIX: &'static str = "r";
- const MODE: Mode = Mode::Str;
+ fn unescape(s: &str, cb: impl FnMut(Range<usize>, Result<char, EscapeError>)) {
+ unescape_str(s, cb)
+ }
}
impl ast::String {
@@ -218,20 +222,19 @@ impl ast::String {
let mut buf = String::new();
let mut prev_end = 0;
let mut has_error = None;
- unescape_unicode(text, Self::MODE, &mut |char_range, unescaped_char| match (
- unescaped_char,
- buf.capacity() == 0,
- ) {
- (Ok(c), false) => buf.push(c),
- (Ok(_), true) if char_range.len() == 1 && char_range.start == prev_end => {
- prev_end = char_range.end
- }
- (Ok(c), true) => {
- buf.reserve_exact(text.len());
- buf.push_str(&text[..prev_end]);
- buf.push(c);
+ unescape_str(text, |char_range, unescaped_char| {
+ match (unescaped_char, buf.capacity() == 0) {
+ (Ok(c), false) => buf.push(c),
+ (Ok(_), true) if char_range.len() == 1 && char_range.start == prev_end => {
+ prev_end = char_range.end
+ }
+ (Ok(c), true) => {
+ buf.reserve_exact(text.len());
+ buf.push_str(&text[..prev_end]);
+ buf.push(c);
+ }
+ (Err(e), _) => has_error = Some(e),
}
- (Err(e), _) => has_error = Some(e),
});
match (has_error, buf.capacity() == 0) {
@@ -244,7 +247,9 @@ impl ast::String {
impl IsString for ast::ByteString {
const RAW_PREFIX: &'static str = "br";
- const MODE: Mode = Mode::ByteStr;
+ fn unescape(s: &str, mut callback: impl FnMut(Range<usize>, Result<char, EscapeError>)) {
+ unescape_byte_str(s, |range, res| callback(range, res.map(char::from)))
+ }
}
impl ast::ByteString {
@@ -259,20 +264,19 @@ impl ast::ByteString {
let mut buf: Vec<u8> = Vec::new();
let mut prev_end = 0;
let mut has_error = None;
- unescape_unicode(text, Self::MODE, &mut |char_range, unescaped_char| match (
- unescaped_char,
- buf.capacity() == 0,
- ) {
- (Ok(c), false) => buf.push(c as u8),
- (Ok(_), true) if char_range.len() == 1 && char_range.start == prev_end => {
- prev_end = char_range.end
- }
- (Ok(c), true) => {
- buf.reserve_exact(text.len());
- buf.extend_from_slice(&text.as_bytes()[..prev_end]);
- buf.push(c as u8);
+ unescape_byte_str(text, |char_range, unescaped_byte| {
+ match (unescaped_byte, buf.capacity() == 0) {
+ (Ok(b), false) => buf.push(b),
+ (Ok(_), true) if char_range.len() == 1 && char_range.start == prev_end => {
+ prev_end = char_range.end
+ }
+ (Ok(b), true) => {
+ buf.reserve_exact(text.len());
+ buf.extend_from_slice(&text.as_bytes()[..prev_end]);
+ buf.push(b);
+ }
+ (Err(e), _) => has_error = Some(e),
}
- (Err(e), _) => has_error = Some(e),
});
match (has_error, buf.capacity() == 0) {
@@ -285,25 +289,10 @@ impl ast::ByteString {
impl IsString for ast::CString {
const RAW_PREFIX: &'static str = "cr";
- const MODE: Mode = Mode::CStr;
-
- fn escaped_char_ranges(&self, cb: &mut dyn FnMut(TextRange, Result<char, EscapeError>)) {
- let text_range_no_quotes = match self.text_range_between_quotes() {
- Some(it) => it,
- None => return,
- };
-
- let start = self.syntax().text_range().start();
- let text = &self.text()[text_range_no_quotes - start];
- let offset = text_range_no_quotes.start() - start;
-
- unescape_mixed(text, Self::MODE, &mut |range, unescaped_char| {
- let text_range =
- TextRange::new(range.start.try_into().unwrap(), range.end.try_into().unwrap());
- // XXX: This method should only be used for highlighting ranges. The unescaped
- // char/byte is not used. For simplicity, we return an arbitrary placeholder char.
- cb(text_range + offset, unescaped_char.map(|_| ' '));
- });
+ // NOTE: This method should only be used for highlighting ranges. The unescaped
+ // char/byte is not used. For simplicity, we return an arbitrary placeholder char.
+ fn unescape(s: &str, mut callback: impl FnMut(Range<usize>, Result<char, EscapeError>)) {
+ unescape_c_str(s, |range, _res| callback(range, Ok('_')))
}
}
@@ -323,10 +312,7 @@ impl ast::CString {
MixedUnit::Char(c) => buf.extend(c.encode_utf8(&mut [0; 4]).as_bytes()),
MixedUnit::HighByte(b) => buf.push(b),
};
- unescape_mixed(text, Self::MODE, &mut |char_range, unescaped| match (
- unescaped,
- buf.capacity() == 0,
- ) {
+ unescape_c_str(text, |char_range, unescaped| match (unescaped, buf.capacity() == 0) {
(Ok(u), false) => extend_unit(&mut buf, u),
(Ok(_), true) if char_range.len() == 1 && char_range.start == prev_end => {
prev_end = char_range.end
diff --git a/crates/syntax/src/syntax_editor.rs b/crates/syntax/src/syntax_editor.rs
index 31caf618be..3fa584850f 100644
--- a/crates/syntax/src/syntax_editor.rs
+++ b/crates/syntax/src/syntax_editor.rs
@@ -435,7 +435,7 @@ mod tests {
_ => {
let var_name = 2 + 2;
(var_name, true)
- }"#]];
+ },"#]];
expect.assert_eq(&edit.new_root.to_string());
assert_eq!(edit.find_annotation(placeholder_snippet).len(), 2);
diff --git a/crates/syntax/src/validation.rs b/crates/syntax/src/validation.rs
index 5bfeb3bff8..4180f9cd18 100644
--- a/crates/syntax/src/validation.rs
+++ b/crates/syntax/src/validation.rs
@@ -6,7 +6,9 @@ mod block;
use itertools::Itertools;
use rowan::Direction;
-use rustc_literal_escaper::{self, EscapeError, Mode, unescape_mixed, unescape_unicode};
+use rustc_literal_escaper::{
+ EscapeError, unescape_byte, unescape_byte_str, unescape_c_str, unescape_char, unescape_str,
+};
use crate::{
AstNode, SyntaxError,
@@ -47,7 +49,7 @@ pub(crate) fn validate(root: &SyntaxNode, errors: &mut Vec<SyntaxError>) {
}
fn rustc_unescape_error_to_string(err: EscapeError) -> (&'static str, bool) {
- use rustc_literal_escaper::EscapeError as EE;
+ use EscapeError as EE;
#[rustfmt::skip]
let err_message = match err {
@@ -142,7 +144,7 @@ fn validate_literal(literal: ast::Literal, acc: &mut Vec<SyntaxError>) {
ast::LiteralKind::String(s) => {
if !s.is_raw() {
if let Some(without_quotes) = unquote(text, 1, '"') {
- unescape_unicode(without_quotes, Mode::Str, &mut |range, char| {
+ unescape_str(without_quotes, |range, char| {
if let Err(err) = char {
push_err(1, range.start, err);
}
@@ -153,7 +155,7 @@ fn validate_literal(literal: ast::Literal, acc: &mut Vec<SyntaxError>) {
ast::LiteralKind::ByteString(s) => {
if !s.is_raw() {
if let Some(without_quotes) = unquote(text, 2, '"') {
- unescape_unicode(without_quotes, Mode::ByteStr, &mut |range, char| {
+ unescape_byte_str(without_quotes, |range, char| {
if let Err(err) = char {
push_err(1, range.start, err);
}
@@ -164,7 +166,7 @@ fn validate_literal(literal: ast::Literal, acc: &mut Vec<SyntaxError>) {
ast::LiteralKind::CString(s) => {
if !s.is_raw() {
if let Some(without_quotes) = unquote(text, 2, '"') {
- unescape_mixed(without_quotes, Mode::CStr, &mut |range, char| {
+ unescape_c_str(without_quotes, |range, char| {
if let Err(err) = char {
push_err(1, range.start, err);
}
@@ -174,20 +176,16 @@ fn validate_literal(literal: ast::Literal, acc: &mut Vec<SyntaxError>) {
}
ast::LiteralKind::Char(_) => {
if let Some(without_quotes) = unquote(text, 1, '\'') {
- unescape_unicode(without_quotes, Mode::Char, &mut |range, char| {
- if let Err(err) = char {
- push_err(1, range.start, err);
- }
- });
+ if let Err(err) = unescape_char(without_quotes) {
+ push_err(1, 0, err);
+ }
}
}
ast::LiteralKind::Byte(_) => {
if let Some(without_quotes) = unquote(text, 2, '\'') {
- unescape_unicode(without_quotes, Mode::Byte, &mut |range, char| {
- if let Err(err) = char {
- push_err(2, range.start, err);
- }
- });
+ if let Err(err) = unescape_byte(without_quotes) {
+ push_err(2, 0, err);
+ }
}
}
ast::LiteralKind::IntNumber(_)
diff --git a/crates/test-utils/Cargo.toml b/crates/test-utils/Cargo.toml
index c27e850ce7..6d1930aa26 100644
--- a/crates/test-utils/Cargo.toml
+++ b/crates/test-utils/Cargo.toml
@@ -10,6 +10,7 @@ license.workspace = true
rust-version.workspace = true
[lib]
+doctest = false
[dependencies]
# Avoid adding deps here, this crate is widely used in tests it should compile fast!
diff --git a/crates/test-utils/src/fixture.rs b/crates/test-utils/src/fixture.rs
index 1d821e96e5..e830c6a7cf 100644
--- a/crates/test-utils/src/fixture.rs
+++ b/crates/test-utils/src/fixture.rs
@@ -435,14 +435,16 @@ impl MiniCore {
continue;
}
- let mut active_line_region = false;
- let mut inactive_line_region = false;
+ let mut active_line_region = 0;
+ let mut inactive_line_region = 0;
if let Some(idx) = trimmed.find("// :!") {
- inactive_line_region = true;
- inactive_regions.push(&trimmed[idx + "// :!".len()..]);
+ let regions = trimmed[idx + "// :!".len()..].split(", ");
+ inactive_line_region += regions.clone().count();
+ inactive_regions.extend(regions);
} else if let Some(idx) = trimmed.find("// :") {
- active_line_region = true;
- active_regions.push(&trimmed[idx + "// :".len()..]);
+ let regions = trimmed[idx + "// :".len()..].split(", ");
+ active_line_region += regions.clone().count();
+ active_regions.extend(regions);
}
let mut keep = true;
@@ -462,11 +464,11 @@ impl MiniCore {
if keep {
buf.push_str(line);
}
- if active_line_region {
- active_regions.pop().unwrap();
+ if active_line_region > 0 {
+ active_regions.drain(active_regions.len() - active_line_region..);
}
- if inactive_line_region {
- inactive_regions.pop().unwrap();
+ if inactive_line_region > 0 {
+ inactive_regions.drain(inactive_regions.len() - active_line_region..);
}
}
diff --git a/crates/test-utils/src/minicore.rs b/crates/test-utils/src/minicore.rs
index d13a81d287..dc1eba1a1a 100644
--- a/crates/test-utils/src/minicore.rs
+++ b/crates/test-utils/src/minicore.rs
@@ -11,10 +11,13 @@
//! add:
//! asm:
//! assert:
+//! as_mut: sized
//! as_ref: sized
//! async_fn: fn, tuple, future, copy
//! bool_impl: option, fn
//! builtin_impls:
+//! borrow: sized
+//! borrow_mut: borrow
//! cell: copy, drop
//! clone: sized
//! coerce_pointee: derive, sized, unsize, coerce_unsized, dispatch_from_dyn
@@ -26,7 +29,7 @@
//! deref: sized
//! derive:
//! discriminant:
-//! drop:
+//! drop: sized
//! env: option
//! eq: sized
//! error: fmt
@@ -37,7 +40,7 @@
//! future: pin
//! coroutine: pin
//! dispatch_from_dyn: unsize, pin
-//! hash:
+//! hash: sized
//! include:
//! index: sized
//! infallible:
@@ -77,33 +80,46 @@
pub mod marker {
// region:sized
+ #[lang = "pointee_sized"]
+ #[fundamental]
+ #[rustc_specialization_trait]
+ #[rustc_coinductive]
+ pub trait PointeeSized {}
+
+ #[lang = "meta_sized"]
+ #[fundamental]
+ #[rustc_specialization_trait]
+ #[rustc_coinductive]
+ pub trait MetaSized: PointeeSized {}
+
#[lang = "sized"]
#[fundamental]
#[rustc_specialization_trait]
- pub trait Sized {}
+ #[rustc_coinductive]
+ pub trait Sized: MetaSized {}
// endregion:sized
// region:send
pub unsafe auto trait Send {}
- impl<T: ?Sized> !Send for *const T {}
- impl<T: ?Sized> !Send for *mut T {}
+ impl<T: PointeeSized> !Send for *const T {}
+ impl<T: PointeeSized> !Send for *mut T {}
// region:sync
- unsafe impl<T: Sync + ?Sized> Send for &T {}
- unsafe impl<T: Send + ?Sized> Send for &mut T {}
+ unsafe impl<T: Sync + PointeeSized> Send for &T {}
+ unsafe impl<T: Send + PointeeSized> Send for &mut T {}
// endregion:sync
// endregion:send
// region:sync
pub unsafe auto trait Sync {}
- impl<T: ?Sized> !Sync for *const T {}
- impl<T: ?Sized> !Sync for *mut T {}
+ impl<T: PointeeSized> !Sync for *const T {}
+ impl<T: PointeeSized> !Sync for *mut T {}
// endregion:sync
// region:unsize
#[lang = "unsize"]
- pub trait Unsize<T: ?Sized> {}
+ pub trait Unsize<T: PointeeSized>: PointeeSized {}
// endregion:unsize
// region:unpin
@@ -120,7 +136,7 @@ pub mod marker {
// endregion:derive
mod copy_impls {
- use super::Copy;
+ use super::{Copy, PointeeSized};
macro_rules! impl_copy {
($($t:ty)*) => {
@@ -137,9 +153,9 @@ pub mod marker {
bool char
}
- impl<T: ?Sized> Copy for *const T {}
- impl<T: ?Sized> Copy for *mut T {}
- impl<T: ?Sized> Copy for &T {}
+ impl<T: PointeeSized> Copy for *const T {}
+ impl<T: PointeeSized> Copy for *mut T {}
+ impl<T: PointeeSized> Copy for &T {}
impl Copy for ! {}
}
// endregion:copy
@@ -151,7 +167,7 @@ pub mod marker {
// region:phantom_data
#[lang = "phantom_data"]
- pub struct PhantomData<T: ?Sized>;
+ pub struct PhantomData<T: PointeeSized>;
// endregion:phantom_data
// region:discriminant
@@ -206,25 +222,31 @@ pub mod default {
// region:hash
pub mod hash {
+ use crate::marker::PointeeSized;
+
pub trait Hasher {}
- pub trait Hash {
+ pub trait Hash: PointeeSized {
fn hash<H: Hasher>(&self, state: &mut H);
}
// region:derive
- #[rustc_builtin_macro]
- pub macro Hash($item:item) {}
+ pub(crate) mod derive {
+ #[rustc_builtin_macro]
+ pub macro Hash($item:item) {}
+ }
+ pub use derive::Hash;
// endregion:derive
}
// endregion:hash
// region:cell
pub mod cell {
+ use crate::marker::PointeeSized;
use crate::mem;
#[lang = "unsafe_cell"]
- pub struct UnsafeCell<T: ?Sized> {
+ pub struct UnsafeCell<T: PointeeSized> {
value: T,
}
@@ -238,7 +260,7 @@ pub mod cell {
}
}
- pub struct Cell<T: ?Sized> {
+ pub struct Cell<T: PointeeSized> {
value: UnsafeCell<T>,
}
@@ -357,20 +379,41 @@ pub mod convert {
// endregion:from
// region:as_ref
- pub trait AsRef<T: ?Sized> {
+ pub trait AsRef<T: crate::marker::PointeeSized>: crate::marker::PointeeSized {
fn as_ref(&self) -> &T;
}
// endregion:as_ref
+ // region:as_mut
+ pub trait AsMut<T: crate::marker::PointeeSized>: crate::marker::PointeeSized {
+ fn as_mut(&mut self) -> &mut T;
+ }
+ // endregion:as_mut
// region:infallible
pub enum Infallible {}
// endregion:infallible
}
+pub mod borrow {
+ // region:borrow
+ pub trait Borrow<Borrowed: ?Sized> {
+ fn borrow(&self) -> &Borrowed;
+ }
+ // endregion:borrow
+
+ // region:borrow_mut
+ pub trait BorrowMut<Borrowed: ?Sized>: Borrow<Borrowed> {
+ fn borrow_mut(&mut self) -> &mut Borrowed;
+ }
+ // endregion:borrow_mut
+}
+
pub mod mem {
// region:manually_drop
+ use crate::marker::PointeeSized;
+
#[lang = "manually_drop"]
#[repr(transparent)]
- pub struct ManuallyDrop<T: ?Sized> {
+ pub struct ManuallyDrop<T: PointeeSized> {
value: T,
}
@@ -381,7 +424,7 @@ pub mod mem {
}
// region:deref
- impl<T: ?Sized> crate::ops::Deref for ManuallyDrop<T> {
+ impl<T: PointeeSized> crate::ops::Deref for ManuallyDrop<T> {
type Target = T;
fn deref(&self) -> &T {
&self.value
@@ -428,7 +471,7 @@ pub mod mem {
pub mod ptr {
// region:drop
#[lang = "drop_in_place"]
- pub unsafe fn drop_in_place<T: ?Sized>(to_drop: *mut T) {
+ pub unsafe fn drop_in_place<T: crate::marker::PointeeSized>(to_drop: *mut T) {
unsafe { drop_in_place(to_drop) }
}
pub const unsafe fn read<T>(src: *const T) -> T {
@@ -444,7 +487,7 @@ pub mod ptr {
// region:pointee
#[lang = "pointee_trait"]
#[rustc_deny_explicit_impl(implement_via_object = false)]
- pub trait Pointee {
+ pub trait Pointee: crate::marker::PointeeSized {
#[lang = "metadata_type"]
type Metadata: Copy + Send + Sync + Ord + Hash + Unpin;
}
@@ -452,12 +495,14 @@ pub mod ptr {
// region:non_null
#[rustc_layout_scalar_valid_range_start(1)]
#[rustc_nonnull_optimization_guaranteed]
- pub struct NonNull<T: ?Sized> {
+ pub struct NonNull<T: crate::marker::PointeeSized> {
pointer: *const T,
}
// region:coerce_unsized
- impl<T: ?Sized, U: ?Sized> crate::ops::CoerceUnsized<NonNull<U>> for NonNull<T> where
- T: crate::marker::Unsize<U>
+ impl<T: crate::marker::PointeeSized, U: crate::marker::PointeeSized>
+ crate::ops::CoerceUnsized<NonNull<U>> for NonNull<T>
+ where
+ T: crate::marker::Unsize<U>,
{
}
// endregion:coerce_unsized
@@ -478,42 +523,44 @@ pub mod ptr {
pub mod ops {
// region:coerce_unsized
mod unsize {
- use crate::marker::Unsize;
+ use crate::marker::{PointeeSized, Unsize};
#[lang = "coerce_unsized"]
- pub trait CoerceUnsized<T: ?Sized> {}
+ pub trait CoerceUnsized<T> {}
- impl<'a, T: ?Sized + Unsize<U>, U: ?Sized> CoerceUnsized<&'a mut U> for &'a mut T {}
- impl<'a, 'b: 'a, T: ?Sized + Unsize<U>, U: ?Sized> CoerceUnsized<&'a U> for &'b mut T {}
- impl<'a, T: ?Sized + Unsize<U>, U: ?Sized> CoerceUnsized<*mut U> for &'a mut T {}
- impl<'a, T: ?Sized + Unsize<U>, U: ?Sized> CoerceUnsized<*const U> for &'a mut T {}
+ impl<'a, T: PointeeSized + Unsize<U>, U: PointeeSized> CoerceUnsized<&'a mut U> for &'a mut T {}
+ impl<'a, 'b: 'a, T: PointeeSized + Unsize<U>, U: PointeeSized> CoerceUnsized<&'a U> for &'b mut T {}
+ impl<'a, T: PointeeSized + Unsize<U>, U: PointeeSized> CoerceUnsized<*mut U> for &'a mut T {}
+ impl<'a, T: PointeeSized + Unsize<U>, U: PointeeSized> CoerceUnsized<*const U> for &'a mut T {}
- impl<'a, 'b: 'a, T: ?Sized + Unsize<U>, U: ?Sized> CoerceUnsized<&'a U> for &'b T {}
- impl<'a, T: ?Sized + Unsize<U>, U: ?Sized> CoerceUnsized<*const U> for &'a T {}
+ impl<'a, 'b: 'a, T: PointeeSized + Unsize<U>, U: PointeeSized> CoerceUnsized<&'a U> for &'b T {}
+ impl<'a, T: PointeeSized + Unsize<U>, U: PointeeSized> CoerceUnsized<*const U> for &'a T {}
- impl<T: ?Sized + Unsize<U>, U: ?Sized> CoerceUnsized<*mut U> for *mut T {}
- impl<T: ?Sized + Unsize<U>, U: ?Sized> CoerceUnsized<*const U> for *mut T {}
- impl<T: ?Sized + Unsize<U>, U: ?Sized> CoerceUnsized<*const U> for *const T {}
+ impl<T: PointeeSized + Unsize<U>, U: PointeeSized> CoerceUnsized<*mut U> for *mut T {}
+ impl<T: PointeeSized + Unsize<U>, U: PointeeSized> CoerceUnsized<*const U> for *mut T {}
+ impl<T: PointeeSized + Unsize<U>, U: PointeeSized> CoerceUnsized<*const U> for *const T {}
}
pub use self::unsize::CoerceUnsized;
// endregion:coerce_unsized
// region:deref
mod deref {
+ use crate::marker::PointeeSized;
+
#[lang = "deref"]
- pub trait Deref {
+ pub trait Deref: PointeeSized {
#[lang = "deref_target"]
type Target: ?Sized;
fn deref(&self) -> &Self::Target;
}
- impl<T: ?Sized> Deref for &T {
+ impl<T: PointeeSized> Deref for &T {
type Target = T;
fn deref(&self) -> &T {
loop {}
}
}
- impl<T: ?Sized> Deref for &mut T {
+ impl<T: PointeeSized> Deref for &mut T {
type Target = T;
fn deref(&self) -> &T {
loop {}
@@ -521,19 +568,19 @@ pub mod ops {
}
// region:deref_mut
#[lang = "deref_mut"]
- pub trait DerefMut: Deref {
+ pub trait DerefMut: Deref + PointeeSized {
fn deref_mut(&mut self) -> &mut Self::Target;
}
// endregion:deref_mut
// region:receiver
#[lang = "receiver"]
- pub trait Receiver {
+ pub trait Receiver: PointeeSized {
#[lang = "receiver_target"]
type Target: ?Sized;
}
- impl<P: ?Sized, T: ?Sized> Receiver for P
+ impl<P: PointeeSized, T: PointeeSized> Receiver for P
where
P: Deref<Target = T>,
{
@@ -686,7 +733,7 @@ pub mod ops {
#[rustc_const_unstable(feature = "const_fn_trait_ref_impls", issue = "101803")]
impl<A: Tuple, F: ?Sized> const Fn<A> for &F
where
- F: ~const Fn<A>,
+ F: [const] Fn<A>,
{
extern "rust-call" fn call(&self, args: A) -> F::Output {
(**self).call(args)
@@ -697,7 +744,7 @@ pub mod ops {
#[rustc_const_unstable(feature = "const_fn_trait_ref_impls", issue = "101803")]
impl<A: Tuple, F: ?Sized> const FnMut<A> for &F
where
- F: ~const Fn<A>,
+ F: [const] Fn<A>,
{
extern "rust-call" fn call_mut(&mut self, args: A) -> F::Output {
(**self).call(args)
@@ -708,7 +755,7 @@ pub mod ops {
#[rustc_const_unstable(feature = "const_fn_trait_ref_impls", issue = "101803")]
impl<A: Tuple, F: ?Sized> const FnOnce<A> for &F
where
- F: ~const Fn<A>,
+ F: [const] Fn<A>,
{
type Output = F::Output;
@@ -721,7 +768,7 @@ pub mod ops {
#[rustc_const_unstable(feature = "const_fn_trait_ref_impls", issue = "101803")]
impl<A: Tuple, F: ?Sized> const FnMut<A> for &mut F
where
- F: ~const FnMut<A>,
+ F: [const] FnMut<A>,
{
extern "rust-call" fn call_mut(&mut self, args: A) -> F::Output {
(*self).call_mut(args)
@@ -732,7 +779,7 @@ pub mod ops {
#[rustc_const_unstable(feature = "const_fn_trait_ref_impls", issue = "101803")]
impl<A: Tuple, F: ?Sized> const FnOnce<A> for &mut F
where
- F: ~const FnMut<A>,
+ F: [const] FnMut<A>,
{
type Output = F::Output;
extern "rust-call" fn call_once(self, args: A) -> F::Output {
@@ -1006,18 +1053,18 @@ pub mod ops {
// region:dispatch_from_dyn
mod dispatch_from_dyn {
- use crate::marker::Unsize;
+ use crate::marker::{PointeeSized, Unsize};
#[lang = "dispatch_from_dyn"]
pub trait DispatchFromDyn<T> {}
- impl<'a, T: ?Sized + Unsize<U>, U: ?Sized> DispatchFromDyn<&'a U> for &'a T {}
+ impl<'a, T: PointeeSized + Unsize<U>, U: PointeeSized> DispatchFromDyn<&'a U> for &'a T {}
- impl<'a, T: ?Sized + Unsize<U>, U: ?Sized> DispatchFromDyn<&'a mut U> for &'a mut T {}
+ impl<'a, T: PointeeSized + Unsize<U>, U: PointeeSized> DispatchFromDyn<&'a mut U> for &'a mut T {}
- impl<T: ?Sized + Unsize<U>, U: ?Sized> DispatchFromDyn<*const U> for *const T {}
+ impl<T: PointeeSized + Unsize<U>, U: PointeeSized> DispatchFromDyn<*const U> for *const T {}
- impl<T: ?Sized + Unsize<U>, U: ?Sized> DispatchFromDyn<*mut U> for *mut T {}
+ impl<T: PointeeSized + Unsize<U>, U: PointeeSized> DispatchFromDyn<*mut U> for *mut T {}
}
pub use self::dispatch_from_dyn::DispatchFromDyn;
// endregion:dispatch_from_dyn
@@ -1025,15 +1072,17 @@ pub mod ops {
// region:eq
pub mod cmp {
+ use crate::marker::PointeeSized;
+
#[lang = "eq"]
- pub trait PartialEq<Rhs: ?Sized = Self> {
+ pub trait PartialEq<Rhs: PointeeSized = Self>: PointeeSized {
fn eq(&self, other: &Rhs) -> bool;
fn ne(&self, other: &Rhs) -> bool {
!self.eq(other)
}
}
- pub trait Eq: PartialEq<Self> {}
+ pub trait Eq: PartialEq<Self> + PointeeSized {}
// region:derive
#[rustc_builtin_macro]
@@ -1044,11 +1093,11 @@ pub mod cmp {
// region:ord
#[lang = "partial_ord"]
- pub trait PartialOrd<Rhs: ?Sized = Self>: PartialEq<Rhs> {
+ pub trait PartialOrd<Rhs: PointeeSized = Self>: PartialEq<Rhs> + PointeeSized {
fn partial_cmp(&self, other: &Rhs) -> Option<Ordering>;
}
- pub trait Ord: Eq + PartialOrd<Self> {
+ pub trait Ord: Eq + PartialOrd<Self> + PointeeSized {
fn cmp(&self, other: &Self) -> Ordering;
}
@@ -1071,6 +1120,8 @@ pub mod cmp {
// region:fmt
pub mod fmt {
+ use crate::marker::PointeeSized;
+
pub struct Error;
pub type Result = crate::result::Result<(), Error>;
pub struct Formatter<'a>;
@@ -1106,10 +1157,10 @@ pub mod fmt {
}
}
- pub trait Debug {
+ pub trait Debug: PointeeSized {
fn fmt(&self, f: &mut Formatter<'_>) -> Result;
}
- pub trait Display {
+ pub trait Display: PointeeSized {
fn fmt(&self, f: &mut Formatter<'_>) -> Result;
}
@@ -1238,8 +1289,11 @@ pub mod fmt {
}
// region:derive
- #[rustc_builtin_macro]
- pub macro Debug($item:item) {}
+ pub(crate) mod derive {
+ #[rustc_builtin_macro]
+ pub macro Debug($item:item) {}
+ }
+ pub use derive::Debug;
// endregion:derive
// region:builtin_impls
@@ -1268,7 +1322,7 @@ pub mod fmt {
}
}
- impl<T: Debug + ?Sized> Debug for &T {
+ impl<T: Debug + PointeeSized> Debug for &T {
fn fmt(&self, f: &mut Formatter<'_>) -> Result {
(&**self).fmt(f)
}
@@ -1512,6 +1566,8 @@ pub mod iter {
mod traits {
mod iterator {
+ use crate::marker::PointeeSized;
+
#[doc(notable_trait)]
#[lang = "iterator"]
pub trait Iterator {
@@ -1543,7 +1599,7 @@ pub mod iter {
}
// endregion:iterators
}
- impl<I: Iterator + ?Sized> Iterator for &mut I {
+ impl<I: Iterator + PointeeSized> Iterator for &mut I {
type Item = I::Item;
fn next(&mut self) -> Option<I::Item> {
(**self).next()
@@ -1903,6 +1959,8 @@ pub mod prelude {
panic, // :panic
result::Result::{self, Err, Ok}, // :result
str::FromStr, // :str
+ fmt::derive::Debug, // :fmt, derive
+ hash::derive::Hash, // :hash, derive
};
}
diff --git a/crates/toolchain/Cargo.toml b/crates/toolchain/Cargo.toml
index 315a3a2890..f561c1c0e2 100644
--- a/crates/toolchain/Cargo.toml
+++ b/crates/toolchain/Cargo.toml
@@ -10,6 +10,7 @@ license.workspace = true
rust-version.workspace = true
[lib]
+doctest = false
[dependencies]
home = "0.5.11"
diff --git a/crates/tt/Cargo.toml b/crates/tt/Cargo.toml
index 529fad3244..82e7c24668 100644
--- a/crates/tt/Cargo.toml
+++ b/crates/tt/Cargo.toml
@@ -10,6 +10,7 @@ license.workspace = true
rust-version.workspace = true
[lib]
+doctest = false
[dependencies]
arrayvec.workspace = true
diff --git a/crates/tt/src/iter.rs b/crates/tt/src/iter.rs
index 0418c00174..3246156f1c 100644
--- a/crates/tt/src/iter.rs
+++ b/crates/tt/src/iter.rs
@@ -211,6 +211,7 @@ impl<'a, S: Copy> TtIter<'a, S> {
}
}
+#[derive(Clone)]
pub enum TtElement<'a, S> {
Leaf(&'a Leaf<S>),
Subtree(&'a Subtree<S>, TtIter<'a, S>),
diff --git a/crates/vfs-notify/Cargo.toml b/crates/vfs-notify/Cargo.toml
index 9b32ee17ab..bd6c8331e6 100644
--- a/crates/vfs-notify/Cargo.toml
+++ b/crates/vfs-notify/Cargo.toml
@@ -10,6 +10,7 @@ license.workspace = true
rust-version.workspace = true
[lib]
+doctest = false
[dependencies]
tracing.workspace = true
diff --git a/crates/vfs/Cargo.toml b/crates/vfs/Cargo.toml
index 546195481c..e8a6195036 100644
--- a/crates/vfs/Cargo.toml
+++ b/crates/vfs/Cargo.toml
@@ -10,6 +10,7 @@ license.workspace = true
rust-version.workspace = true
[lib]
+doctest = false
[dependencies]
rustc-hash.workspace = true
diff --git a/crates/vfs/src/file_set.rs b/crates/vfs/src/file_set.rs
index 1228e2e177..0c41ede5b5 100644
--- a/crates/vfs/src/file_set.rs
+++ b/crates/vfs/src/file_set.rs
@@ -5,8 +5,8 @@
use std::fmt;
use fst::{IntoStreamer, Streamer};
-use nohash_hasher::IntMap;
-use rustc_hash::FxHashMap;
+use indexmap::IndexMap;
+use rustc_hash::{FxBuildHasher, FxHashMap};
use crate::{AnchoredPath, FileId, Vfs, VfsPath};
@@ -14,7 +14,7 @@ use crate::{AnchoredPath, FileId, Vfs, VfsPath};
#[derive(Default, Clone, Eq, PartialEq)]
pub struct FileSet {
files: FxHashMap<VfsPath, FileId>,
- paths: IntMap<FileId, VfsPath>,
+ paths: IndexMap<FileId, VfsPath, FxBuildHasher>,
}
impl FileSet {
diff --git a/docs/book/src/SUMMARY.md b/docs/book/src/SUMMARY.md
index 1f211a97d7..dffdae94a6 100644
--- a/docs/book/src/SUMMARY.md
+++ b/docs/book/src/SUMMARY.md
@@ -6,6 +6,7 @@
- [rust-analyzer Binary](rust_analyzer_binary.md)
- [Other Editors](other_editors.md)
- [Troubleshooting](troubleshooting.md)
+ - [FAQ](faq.md)
- [Configuration](configuration.md)
- [Non-Cargo Based Projects](non_cargo_based_projects.md)
- [Security](security.md)
diff --git a/docs/book/src/configuration_generated.md b/docs/book/src/configuration_generated.md
index 9404b1454a..ebac26e1d6 100644
--- a/docs/book/src/configuration_generated.md
+++ b/docs/book/src/configuration_generated.md
@@ -612,6 +612,13 @@ Default: `"client"`
Controls file watching implementation.
+## rust-analyzer.highlightRelated.branchExitPoints.enable {#highlightRelated.branchExitPoints.enable}
+
+Default: `true`
+
+Enables highlighting of related return values while the cursor is on any `match`, `if`, or match arm arrow (`=>`).
+
+
## rust-analyzer.highlightRelated.breakPoints.enable {#highlightRelated.breakPoints.enable}
Default: `true`
diff --git a/docs/book/src/contributing/lsp-extensions.md b/docs/book/src/contributing/lsp-extensions.md
index 1ada1cb24c..8c06f33a9f 100644
--- a/docs/book/src/contributing/lsp-extensions.md
+++ b/docs/book/src/contributing/lsp-extensions.md
@@ -694,24 +694,6 @@ interface CancelFlycheckParams {}
Cancels all running flycheck processes.
-## Syntax Tree
-
-**Method:** `rust-analyzer/syntaxTree`
-
-**Request:**
-
-```typescript
-interface SyntaxTreeParams {
- textDocument: TextDocumentIdentifier,
- range?: Range,
-}
-```
-
-**Response:** `string`
-
-Returns textual representation of a parse tree for the file/selected region.
-Primarily for debugging, but very useful for all people working on rust-analyzer itself.
-
## View Syntax Tree
**Method:** `rust-analyzer/viewSyntaxTree`
diff --git a/docs/book/src/faq.md b/docs/book/src/faq.md
new file mode 100644
index 0000000000..c872033090
--- /dev/null
+++ b/docs/book/src/faq.md
@@ -0,0 +1,7 @@
+# Troubleshooting FAQ
+
+### I see a warning "Variable `None` should have snake_case name, e.g. `none`"
+
+rust-analyzer fails to resolve `None`, and thinks you are binding to a variable
+named `None`. That's usually a sign of a corrupted sysroot. Try removing and re-installing
+it: `rustup component remove rust-src` then `rustup component install rust-src`.
diff --git a/docs/book/src/non_cargo_based_projects.md b/docs/book/src/non_cargo_based_projects.md
index bbdb48bbbc..befb631ec0 100644
--- a/docs/book/src/non_cargo_based_projects.md
+++ b/docs/book/src/non_cargo_based_projects.md
@@ -40,6 +40,9 @@ interface ProjectJson {
/// several different "sysroots" in one graph of
/// crates.
sysroot_src?: string;
+ /// A ProjectJson describing the crates of the sysroot.
+ sysroot_project?: ProjectJson;
+
/// List of groups of common cfg values, to allow
/// sharing them between crates.
///
diff --git a/docs/book/src/troubleshooting.md b/docs/book/src/troubleshooting.md
index 1b2841421a..a357cbef41 100644
--- a/docs/book/src/troubleshooting.md
+++ b/docs/book/src/troubleshooting.md
@@ -1,5 +1,8 @@
# Troubleshooting
+First, search the [troubleshooting FAQ](faq.html). If your problem appears
+there (and the proposed solution works for you), great! Otherwise, read on.
+
Start with looking at the rust-analyzer version. Try **rust-analyzer:
Show RA Version** in VS Code (using **Command Palette** feature
typically activated by Ctrl+Shift+P) or `rust-analyzer --version` in the
diff --git a/editors/code/package.json b/editors/code/package.json
index 26a21c1468..3cb4c21ee1 100644
--- a/editors/code/package.json
+++ b/editors/code/package.json
@@ -1532,6 +1532,16 @@
{
"title": "highlightRelated",
"properties": {
+ "rust-analyzer.highlightRelated.branchExitPoints.enable": {
+ "markdownDescription": "Enables highlighting of related return values while the cursor is on any `match`, `if`, or match arm arrow (`=>`).",
+ "default": true,
+ "type": "boolean"
+ }
+ }
+ },
+ {
+ "title": "highlightRelated",
+ "properties": {
"rust-analyzer.highlightRelated.breakPoints.enable": {
"markdownDescription": "Enables highlighting of related references while the cursor is on `break`, `loop`, `while`, or `for` keywords.",
"default": true,
diff --git a/editors/code/src/client.ts b/editors/code/src/client.ts
index cdeea7333a..073ff2f470 100644
--- a/editors/code/src/client.ts
+++ b/editors/code/src/client.ts
@@ -3,7 +3,7 @@ import * as lc from "vscode-languageclient/node";
import * as vscode from "vscode";
import * as ra from "../src/lsp_ext";
import * as Is from "vscode-languageclient/lib/common/utils/is";
-import { assert, unwrapUndefinable } from "./util";
+import { assert } from "./util";
import * as diagnostics from "./diagnostics";
import { WorkspaceEdit } from "vscode";
import { type Config, prepareVSCodeConfig } from "./config";
@@ -188,11 +188,17 @@ export async function createClient(
context: await client.code2ProtocolConverter.asCodeActionContext(context, token),
};
const callback = async (
- values: (lc.Command | lc.CodeAction)[] | null,
+ values: (lc.Command | lc.CodeAction | object)[] | null,
): Promise<(vscode.Command | vscode.CodeAction)[] | undefined> => {
if (values === null) return undefined;
const result: (vscode.CodeAction | vscode.Command)[] = [];
- const groups = new Map<string, { index: number; items: vscode.CodeAction[] }>();
+ const groups = new Map<
+ string,
+ {
+ primary: vscode.CodeAction;
+ items: { label: string; arguments: lc.CodeAction }[];
+ }
+ >();
for (const item of values) {
// In our case we expect to get code edits only from diagnostics
if (lc.CodeAction.is(item)) {
@@ -204,62 +210,55 @@ export async function createClient(
result.push(action);
continue;
}
- assert(
- isCodeActionWithoutEditsAndCommands(item),
- "We don't expect edits or commands here",
- );
- // eslint-disable-next-line @typescript-eslint/no-explicit-any
- const kind = client.protocol2CodeConverter.asCodeActionKind((item as any).kind);
- const action = new vscode.CodeAction(item.title, kind);
- // eslint-disable-next-line @typescript-eslint/no-explicit-any
- const group = (item as any).group;
- action.command = {
- command: "rust-analyzer.resolveCodeAction",
- title: item.title,
- arguments: [item],
- };
+ assertIsCodeActionWithoutEditsAndCommands(item);
+ const kind = client.protocol2CodeConverter.asCodeActionKind(item.kind);
+ const group = item.group;
- // Set a dummy edit, so that VS Code doesn't try to resolve this.
- action.edit = new WorkspaceEdit();
+ const mkAction = () => {
+ const action = new vscode.CodeAction(item.title, kind);
+ action.command = {
+ command: "rust-analyzer.resolveCodeAction",
+ title: item.title,
+ arguments: [item],
+ };
+ // Set a dummy edit, so that VS Code doesn't try to resolve this.
+ action.edit = new WorkspaceEdit();
+ return action;
+ };
if (group) {
let entry = groups.get(group);
if (!entry) {
- entry = { index: result.length, items: [] };
+ entry = { primary: mkAction(), items: [] };
groups.set(group, entry);
- result.push(action);
+ } else {
+ entry.items.push({
+ label: item.title,
+ arguments: item,
+ });
}
- entry.items.push(action);
} else {
- result.push(action);
+ result.push(mkAction());
}
}
- for (const [group, { index, items }] of groups) {
- if (items.length === 1) {
- const item = unwrapUndefinable(items[0]);
- result[index] = item;
- } else {
- const action = new vscode.CodeAction(group);
- const item = unwrapUndefinable(items[0]);
- action.kind = item.kind;
- action.command = {
+ for (const [group, { items, primary }] of groups) {
+ // This group contains more than one item, so rewrite it to be a group action
+ if (items.length !== 0) {
+ const args = [
+ {
+ label: primary.title,
+ arguments: primary.command!.arguments![0],
+ },
+ ...items,
+ ];
+ primary.title = group;
+ primary.command = {
command: "rust-analyzer.applyActionGroup",
title: "",
- arguments: [
- items.map((item) => {
- return {
- label: item.title,
- arguments: item.command!.arguments![0],
- };
- }),
- ],
+ arguments: [args],
};
-
- // Set a dummy edit, so that VS Code doesn't try to resolve this.
- action.edit = new WorkspaceEdit();
-
- result[index] = action;
}
+ result.push(primary);
}
return result;
};
@@ -363,17 +362,22 @@ class OverrideFeatures implements lc.StaticFeature {
clear(): void {}
}
-// eslint-disable-next-line @typescript-eslint/no-explicit-any
-function isCodeActionWithoutEditsAndCommands(value: any): boolean {
- const candidate: lc.CodeAction = value;
- return (
+function assertIsCodeActionWithoutEditsAndCommands(
+ // eslint-disable-next-line @typescript-eslint/no-explicit-any
+ candidate: any,
+): asserts candidate is lc.CodeAction & {
+ group?: string;
+} {
+ assert(
candidate &&
- Is.string(candidate.title) &&
- (candidate.diagnostics === void 0 ||
- Is.typedArray(candidate.diagnostics, lc.Diagnostic.is)) &&
- (candidate.kind === void 0 || Is.string(candidate.kind)) &&
- candidate.edit === void 0 &&
- candidate.command === void 0
+ Is.string(candidate.title) &&
+ (candidate.diagnostics === undefined ||
+ Is.typedArray(candidate.diagnostics, lc.Diagnostic.is)) &&
+ (candidate.group === undefined || Is.string(candidate.group)) &&
+ (candidate.kind === undefined || Is.string(candidate.kind)) &&
+ candidate.edit === undefined &&
+ candidate.command === undefined,
+ `Expected a CodeAction without edits or commands, got: ${JSON.stringify(candidate)}`,
);
}
diff --git a/editors/code/src/commands.ts b/editors/code/src/commands.ts
index 3ac1a933d9..25b30013fa 100644
--- a/editors/code/src/commands.ts
+++ b/editors/code/src/commands.ts
@@ -1114,11 +1114,11 @@ export function applySnippetWorkspaceEditCommand(_ctx: CtxInit): Cmd {
};
}
-export function run(ctx: CtxInit): Cmd {
+export function run(ctx: CtxInit, mode?: "cursor"): Cmd {
let prevRunnable: RunnableQuickPick | undefined;
return async () => {
- const item = await selectRunnable(ctx, prevRunnable);
+ const item = await selectRunnable(ctx, prevRunnable, false, true, mode);
if (!item) return;
item.detail = "rerun";
diff --git a/editors/code/src/main.ts b/editors/code/src/main.ts
index 5e50073069..996298524f 100644
--- a/editors/code/src/main.ts
+++ b/editors/code/src/main.ts
@@ -167,7 +167,7 @@ function createCommands(): Record<string, CommandFactory> {
viewCrateGraph: { enabled: commands.viewCrateGraph },
viewFullCrateGraph: { enabled: commands.viewFullCrateGraph },
expandMacro: { enabled: commands.expandMacro },
- run: { enabled: commands.run },
+ run: { enabled: (ctx) => (mode?: "cursor") => commands.run(ctx, mode)() },
copyRunCommandLine: { enabled: commands.copyRunCommandLine },
debug: { enabled: commands.debug },
newDebugConfig: { enabled: commands.newDebugConfig },
diff --git a/editors/code/src/run.ts b/editors/code/src/run.ts
index 40027cc7c8..95166c427b 100644
--- a/editors/code/src/run.ts
+++ b/editors/code/src/run.ts
@@ -18,10 +18,15 @@ export async function selectRunnable(
prevRunnable?: RunnableQuickPick,
debuggeeOnly = false,
showButtons: boolean = true,
+ mode?: "cursor",
): Promise<RunnableQuickPick | undefined> {
const editor = ctx.activeRustEditor ?? ctx.activeCargoTomlEditor;
if (!editor) return;
+ if (mode === "cursor") {
+ return selectRunnableAtCursor(ctx, editor, prevRunnable);
+ }
+
// show a placeholder while we get the runnables from the server
const quickPick = vscode.window.createQuickPick();
quickPick.title = "Select Runnable";
@@ -54,6 +59,58 @@ export async function selectRunnable(
);
}
+async function selectRunnableAtCursor(
+ ctx: CtxInit,
+ editor: RustEditor,
+ prevRunnable?: RunnableQuickPick,
+): Promise<RunnableQuickPick | undefined> {
+ const runnableQuickPicks = await getRunnables(ctx.client, editor, prevRunnable, false);
+ let runnableQuickPickAtCursor = null;
+ const cursorPosition = ctx.client.code2ProtocolConverter.asPosition(editor.selection.active);
+ for (const runnableQuickPick of runnableQuickPicks) {
+ if (!runnableQuickPick.runnable.location?.targetRange) {
+ continue;
+ }
+ const runnableQuickPickRange = runnableQuickPick.runnable.location.targetRange;
+ if (
+ runnableQuickPickAtCursor?.runnable?.location?.targetRange != null &&
+ rangeContainsOtherRange(
+ runnableQuickPickRange,
+ runnableQuickPickAtCursor.runnable.location.targetRange,
+ )
+ ) {
+ continue;
+ }
+ if (rangeContainsPosition(runnableQuickPickRange, cursorPosition)) {
+ runnableQuickPickAtCursor = runnableQuickPick;
+ }
+ }
+ if (runnableQuickPickAtCursor == null) {
+ return;
+ }
+ return Promise.resolve(runnableQuickPickAtCursor);
+}
+
+function rangeContainsPosition(range: lc.Range, position: lc.Position): boolean {
+ return (
+ (position.line > range.start.line ||
+ (position.line === range.start.line && position.character >= range.start.character)) &&
+ (position.line < range.end.line ||
+ (position.line === range.end.line && position.character <= range.end.character))
+ );
+}
+
+function rangeContainsOtherRange(range: lc.Range, otherRange: lc.Range) {
+ return (
+ (range.start.line < otherRange.start.line ||
+ (range.start.line === otherRange.start.line &&
+ range.start.character <= otherRange.start.character)) &&
+ (range.end.line > otherRange.end.line ||
+ (range.end.line === otherRange.end.line &&
+ range.end.character >= otherRange.end.character))
+ );
+}
+
export class RunnableQuickPick implements vscode.QuickPickItem {
public label: string;
public description?: string | undefined;
diff --git a/rust-version b/rust-version
index a454087b0c..902793225e 100644
--- a/rust-version
+++ b/rust-version
@@ -1 +1 @@
-27733d46d79f4eb92e240fbba502c43022665735
+ad3b7257615c28aaf8212a189ec032b8af75de51