Unnamed repository; edit this file 'description' to name the repository.
-rw-r--r--.github/workflows/gen-lints.yml35
-rw-r--r--.github/workflows/release.yaml2
-rw-r--r--.github/workflows/rustc-pull.yml1
-rw-r--r--Cargo.lock12
-rw-r--r--Cargo.toml2
-rw-r--r--crates/base-db/src/change.rs5
-rw-r--r--crates/base-db/src/editioned_file_id.rs57
-rw-r--r--crates/base-db/src/input.rs43
-rw-r--r--crates/base-db/src/lib.rs108
-rw-r--r--crates/hir-def/src/attrs.rs566
-rw-r--r--crates/hir-def/src/attrs/docs.rs749
-rw-r--r--crates/hir-def/src/db.rs4
-rw-r--r--crates/hir-def/src/expr_store.rs2
-rw-r--r--crates/hir-def/src/expr_store/lower.rs10
-rw-r--r--crates/hir-def/src/expr_store/scope.rs5
-rw-r--r--crates/hir-def/src/expr_store/tests/body.rs21
-rw-r--r--crates/hir-def/src/expr_store/tests/body/block.rs6
-rw-r--r--crates/hir-def/src/import_map.rs6
-rw-r--r--crates/hir-def/src/macro_expansion_tests/mbe.rs16
-rw-r--r--crates/hir-def/src/macro_expansion_tests/mod.rs3
-rw-r--r--crates/hir-def/src/nameres.rs2
-rw-r--r--crates/hir-def/src/nameres/collector.rs11
-rw-r--r--crates/hir-def/src/nameres/tests.rs1
-rw-r--r--crates/hir-def/src/nameres/tests/incremental.rs60
-rw-r--r--crates/hir-def/src/nameres/tests/macros.rs5
-rw-r--r--crates/hir-def/src/test_db.rs13
-rw-r--r--crates/hir-expand/src/builtin/fn_macro.rs2
-rw-r--r--crates/hir-expand/src/db.rs8
-rw-r--r--crates/hir-expand/src/files.rs8
-rw-r--r--crates/hir-expand/src/span_map.rs2
-rw-r--r--crates/hir-ty/src/builtin_derive.rs15
-rw-r--r--crates/hir-ty/src/consteval/tests.rs4
-rw-r--r--crates/hir-ty/src/display.rs4
-rw-r--r--crates/hir-ty/src/dyn_compatibility.rs10
-rw-r--r--crates/hir-ty/src/infer.rs100
-rw-r--r--crates/hir-ty/src/infer/expr.rs2
-rw-r--r--crates/hir-ty/src/infer/path.rs2
-rw-r--r--crates/hir-ty/src/lower.rs103
-rw-r--r--crates/hir-ty/src/method_resolution.rs2
-rw-r--r--crates/hir-ty/src/method_resolution/confirm.rs4
-rw-r--r--crates/hir-ty/src/method_resolution/probe.rs2
-rw-r--r--crates/hir-ty/src/mir/eval/shim.rs6
-rw-r--r--crates/hir-ty/src/next_solver/interner.rs92
-rw-r--r--crates/hir-ty/src/next_solver/ty.rs2
-rw-r--r--crates/hir-ty/src/specialization.rs2
-rw-r--r--crates/hir-ty/src/test_db.rs10
-rw-r--r--crates/hir-ty/src/tests/incremental.rs42
-rw-r--r--crates/hir-ty/src/tests/regression.rs15
-rw-r--r--crates/hir-ty/src/tests/traits.rs111
-rw-r--r--crates/hir/src/diagnostics.rs9
-rw-r--r--crates/hir/src/display.rs4
-rw-r--r--crates/hir/src/lib.rs20
-rw-r--r--crates/hir/src/semantics.rs14
-rw-r--r--crates/hir/src/semantics/source_to_def.rs5
-rw-r--r--crates/ide-assists/src/handlers/add_missing_impl_members.rs20
-rw-r--r--crates/ide-assists/src/handlers/add_missing_match_arms.rs103
-rw-r--r--crates/ide-assists/src/handlers/apply_demorgan.rs3
-rw-r--r--crates/ide-assists/src/handlers/auto_import.rs29
-rw-r--r--crates/ide-assists/src/handlers/convert_bool_then.rs6
-rw-r--r--crates/ide-assists/src/handlers/convert_for_to_while_let.rs4
-rw-r--r--crates/ide-assists/src/handlers/convert_let_else_to_match.rs89
-rw-r--r--crates/ide-assists/src/handlers/convert_match_to_let_else.rs12
-rw-r--r--crates/ide-assists/src/handlers/convert_named_struct_to_tuple_struct.rs9
-rw-r--r--crates/ide-assists/src/handlers/convert_range_for_to_while.rs8
-rw-r--r--crates/ide-assists/src/handlers/convert_to_guarded_return.rs321
-rw-r--r--crates/ide-assists/src/handlers/convert_tuple_struct_to_named_struct.rs8
-rw-r--r--crates/ide-assists/src/handlers/extract_function.rs111
-rw-r--r--crates/ide-assists/src/handlers/extract_struct_from_enum_variant.rs240
-rw-r--r--crates/ide-assists/src/handlers/extract_variable.rs33
-rw-r--r--crates/ide-assists/src/handlers/flip_binexpr.rs4
-rw-r--r--crates/ide-assists/src/handlers/generate_delegate_trait.rs10
-rw-r--r--crates/ide-assists/src/handlers/generate_getter_or_setter.rs2
-rw-r--r--crates/ide-assists/src/handlers/generate_impl.rs7
-rw-r--r--crates/ide-assists/src/handlers/generate_mut_trait_impl.rs3
-rw-r--r--crates/ide-assists/src/handlers/generate_single_field_struct_from.rs110
-rw-r--r--crates/ide-assists/src/handlers/generate_trait_from_impl.rs17
-rw-r--r--crates/ide-assists/src/handlers/inline_type_alias.rs22
-rw-r--r--crates/ide-assists/src/handlers/introduce_named_lifetime.rs6
-rw-r--r--crates/ide-assists/src/handlers/pull_assignment_up.rs4
-rw-r--r--crates/ide-assists/src/handlers/remove_dbg.rs9
-rw-r--r--crates/ide-assists/src/handlers/replace_derive_with_manual_impl.rs33
-rw-r--r--crates/ide-assists/src/handlers/replace_if_let_with_match.rs34
-rw-r--r--crates/ide-assists/src/handlers/replace_qualified_name_with_use.rs3
-rw-r--r--crates/ide-assists/src/handlers/unwrap_block.rs4
-rw-r--r--crates/ide-assists/src/tests.rs22
-rw-r--r--crates/ide-assists/src/utils.rs102
-rw-r--r--crates/ide-assists/src/utils/gen_trait_fn_body.rs500
-rw-r--r--crates/ide-completion/src/completions.rs43
-rw-r--r--crates/ide-completion/src/completions/env_vars.rs50
-rw-r--r--crates/ide-completion/src/completions/flyimport.rs11
-rw-r--r--crates/ide-completion/src/completions/postfix.rs75
-rw-r--r--crates/ide-completion/src/completions/postfix/format_like.rs7
-rw-r--r--crates/ide-completion/src/completions/ra_fixture.rs2
-rw-r--r--crates/ide-completion/src/completions/type.rs8
-rw-r--r--crates/ide-completion/src/config.rs5
-rw-r--r--crates/ide-completion/src/context.rs30
-rw-r--r--crates/ide-completion/src/context/analysis.rs22
-rw-r--r--crates/ide-completion/src/item.rs36
-rw-r--r--crates/ide-completion/src/render.rs108
-rw-r--r--crates/ide-completion/src/tests.rs5
-rw-r--r--crates/ide-completion/src/tests/item.rs32
-rw-r--r--crates/ide-completion/src/tests/type_pos.rs226
-rw-r--r--crates/ide-db/src/imports/import_assets.rs170
-rw-r--r--crates/ide-db/src/imports/insert_use.rs12
-rw-r--r--crates/ide-db/src/lib.rs8
-rw-r--r--crates/ide-db/src/path_transform.rs67
-rw-r--r--crates/ide-db/src/prime_caches.rs11
-rw-r--r--crates/ide-db/src/ra_fixture.rs19
-rw-r--r--crates/ide-db/src/search.rs4
-rw-r--r--crates/ide-db/src/source_change.rs2
-rw-r--r--crates/ide-db/src/symbol_index.rs16
-rw-r--r--crates/ide-db/src/test_data/test_doc_alias.txt30
-rw-r--r--crates/ide-db/src/test_data/test_symbol_index_collection.txt138
-rw-r--r--crates/ide-db/src/test_data/test_symbols_exclude_imports.txt4
-rw-r--r--crates/ide-db/src/test_data/test_symbols_with_imports.txt8
-rw-r--r--crates/ide-diagnostics/src/handlers/inactive_code.rs42
-rw-r--r--crates/ide-diagnostics/src/handlers/missing_fields.rs57
-rw-r--r--crates/ide-diagnostics/src/handlers/trait_impl_redundant_assoc_item.rs112
-rw-r--r--crates/ide-diagnostics/src/handlers/type_mismatch.rs178
-rw-r--r--crates/ide-diagnostics/src/handlers/unlinked_file.rs8
-rw-r--r--crates/ide-diagnostics/src/lib.rs22
-rw-r--r--crates/ide-ssr/src/from_comment.rs4
-rw-r--r--crates/ide-ssr/src/matching.rs4
-rw-r--r--crates/ide/src/annotations.rs15
-rw-r--r--crates/ide/src/call_hierarchy.rs12
-rw-r--r--crates/ide/src/doc_links.rs4
-rw-r--r--crates/ide/src/expand_macro.rs4
-rw-r--r--crates/ide/src/fetch_crates.rs4
-rw-r--r--crates/ide/src/folding_ranges.rs293
-rw-r--r--crates/ide/src/goto_declaration.rs4
-rw-r--r--crates/ide/src/goto_definition.rs10
-rw-r--r--crates/ide/src/hover.rs10
-rw-r--r--crates/ide/src/hover/tests.rs309
-rw-r--r--crates/ide/src/inlay_hints.rs22
-rw-r--r--crates/ide/src/inlay_hints/bind_pat.rs95
-rw-r--r--crates/ide/src/inlay_hints/chaining.rs107
-rw-r--r--crates/ide/src/inlay_hints/param_name.rs16
-rw-r--r--crates/ide/src/inlay_hints/ra_fixture.rs2
-rw-r--r--crates/ide/src/lib.rs35
-rw-r--r--crates/ide/src/matching_brace.rs48
-rw-r--r--crates/ide/src/navigation_target.rs5
-rw-r--r--crates/ide/src/parent_module.rs4
-rw-r--r--crates/ide/src/references.rs12
-rw-r--r--crates/ide/src/runnables.rs22
-rw-r--r--crates/ide/src/signature_help.rs4
-rw-r--r--crates/ide/src/static_index.rs26
-rw-r--r--crates/ide/src/syntax_highlighting.rs4
-rw-r--r--crates/ide/src/syntax_highlighting/html.rs4
-rw-r--r--crates/ide/src/syntax_highlighting/inject.rs6
-rw-r--r--crates/ide/src/syntax_highlighting/tests.rs4
-rw-r--r--crates/ide/src/test_explorer.rs8
-rw-r--r--crates/ide/src/typing.rs13
-rw-r--r--crates/ide/src/typing/on_enter.rs196
-rw-r--r--crates/ide/src/view_crate_graph.rs11
-rw-r--r--crates/load-cargo/src/lib.rs32
-rw-r--r--crates/parser/src/grammar.rs12
-rw-r--r--crates/parser/src/grammar/items.rs7
-rw-r--r--crates/parser/src/grammar/types.rs3
-rw-r--r--crates/parser/test_data/generated/runner.rs4
-rw-r--r--crates/parser/test_data/parser/err/0025_nope.rast2
-rw-r--r--crates/parser/test_data/parser/err/0027_incomplete_where_for.rast2
-rw-r--r--crates/parser/test_data/parser/inline/err/function_ret_type_missing_arrow.rast50
-rw-r--r--crates/parser/test_data/parser/inline/err/function_ret_type_missing_arrow.rs2
-rw-r--r--crates/profile/Cargo.toml4
-rw-r--r--crates/profile/src/stop_watch.rs32
-rw-r--r--crates/project-model/src/cargo_workspace.rs5
-rw-r--r--crates/project-model/src/env.rs119
-rw-r--r--crates/project-model/src/workspace.rs5
-rw-r--r--crates/rust-analyzer/src/cli/analysis_stats.rs9
-rw-r--r--crates/rust-analyzer/src/config.rs126
-rw-r--r--crates/rust-analyzer/src/flycheck.rs46
-rw-r--r--crates/rust-analyzer/src/handlers/request.rs27
-rw-r--r--crates/rust-analyzer/src/integrated_benchmarks.rs10
-rw-r--r--crates/rust-analyzer/src/lsp/capabilities.rs14
-rw-r--r--crates/rust-analyzer/src/lsp/to_proto.rs29
-rw-r--r--crates/rust-analyzer/src/main_loop.rs11
-rw-r--r--crates/rust-analyzer/src/reload.rs22
-rw-r--r--crates/rust-analyzer/src/target_spec.rs22
-rw-r--r--crates/rust-analyzer/tests/slow-tests/flycheck.rs42
-rw-r--r--crates/rust-analyzer/tests/slow-tests/main.rs2
-rw-r--r--crates/rust-analyzer/tests/slow-tests/ratoml.rs42
-rw-r--r--crates/rust-analyzer/tests/slow-tests/support.rs3
-rw-r--r--crates/syntax/src/ast/edit.rs6
-rw-r--r--crates/syntax/src/ast/node_ext.rs24
-rw-r--r--crates/syntax/src/ast/syntax_factory/constructors.rs52
-rw-r--r--crates/syntax/src/syntax_editor.rs171
-rw-r--r--crates/syntax/src/syntax_editor/edit_algo.rs55
-rw-r--r--crates/syntax/src/syntax_editor/edits.rs22
-rw-r--r--crates/syntax/src/syntax_editor/mapping.rs2
-rw-r--r--crates/test-fixture/src/lib.rs4
-rw-r--r--crates/vfs-notify/src/lib.rs35
-rw-r--r--crates/vfs/src/lib.rs2
-rw-r--r--docs/book/src/configuration_generated.md62
-rw-r--r--docs/book/src/contributing/lsp-extensions.md2
-rw-r--r--docs/book/src/other_editors.md69
-rw-r--r--editors/code/package-lock.json1249
-rw-r--r--editors/code/package.json68
-rw-r--r--editors/code/src/client.ts2
-rw-r--r--editors/code/src/commands.ts3
-rw-r--r--editors/code/src/config.ts2
-rw-r--r--editors/code/src/debug.ts2
-rw-r--r--editors/code/src/dependencies_provider.ts6
-rw-r--r--editors/code/src/diagnostics.ts2
-rw-r--r--editors/code/src/lsp_ext.ts8
-rw-r--r--editors/code/src/snippets.ts2
-rw-r--r--editors/code/src/toolchain.ts2
-rw-r--r--editors/code/tsconfig.json1
-rw-r--r--rust-version2
208 files changed, 6885 insertions, 2772 deletions
diff --git a/.github/workflows/gen-lints.yml b/.github/workflows/gen-lints.yml
new file mode 100644
index 0000000000..7319b2b326
--- /dev/null
+++ b/.github/workflows/gen-lints.yml
@@ -0,0 +1,35 @@
+name: Generate lints and feature flags
+
+on:
+ workflow_dispatch:
+ schedule:
+ - cron: '50 23 * * 6'
+
+defaults:
+ run:
+ shell: bash
+
+jobs:
+ lints-gen:
+ name: Generate lints
+ runs-on: ubuntu-latest
+ steps:
+ - name: Checkout repository
+ uses: actions/checkout@v6
+
+ - name: Install nightly
+ run: rustup default nightly
+
+ - name: Generate lints/feature flags
+ run: cargo codegen lint-definitions
+
+ - name: Submit PR
+ uses: peter-evans/create-pull-request@c0f553fe549906ede9cf27b5156039d195d2ece0 # v8.1.0
+ with:
+ commit-message: "internal: update generated lints"
+ branch: "ci/gen-lints"
+ delete-branch: true
+ sign-commits: true
+ title: "Update generated lints"
+ body: "Weekly lint updates for `crates/ide-db/src/generated/lints.rs`."
+ labels: "A-infra"
diff --git a/.github/workflows/release.yaml b/.github/workflows/release.yaml
index b35614f91b..ef61e397fc 100644
--- a/.github/workflows/release.yaml
+++ b/.github/workflows/release.yaml
@@ -264,8 +264,6 @@ jobs:
name: ${{ env.TAG }}
token: ${{ secrets.GITHUB_TOKEN }}
- - run: rm dist/rust-analyzer-no-server.vsix
-
- run: npm ci
working-directory: ./editors/code
diff --git a/.github/workflows/rustc-pull.yml b/.github/workflows/rustc-pull.yml
index 37cf5f3726..be3362b79b 100644
--- a/.github/workflows/rustc-pull.yml
+++ b/.github/workflows/rustc-pull.yml
@@ -12,6 +12,7 @@ jobs:
uses: rust-lang/josh-sync/.github/workflows/rustc-pull.yml@main
with:
github-app-id: ${{ vars.APP_CLIENT_ID }}
+ pr-author: "workflows-rust-analyzer[bot]"
zulip-stream-id: 185405
zulip-bot-email: "[email protected]"
pr-base-branch: master
diff --git a/Cargo.lock b/Cargo.lock
index 5370127ddc..770f8cb940 100644
--- a/Cargo.lock
+++ b/Cargo.lock
@@ -1731,9 +1731,9 @@ checksum = "9b4f627cb1b25917193a259e49bdad08f671f8d9708acfd5fe0a8c1455d87220"
[[package]]
name = "perf-event"
-version = "0.4.7"
+version = "0.4.8"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "5396562cd2eaa828445d6d34258ae21ee1eb9d40fe626ca7f51c8dccb4af9d66"
+checksum = "b4d6393d9238342159080d79b78cb59c67399a8e7ecfa5d410bd614169e4e823"
dependencies = [
"libc",
"perf-event-open-sys",
@@ -1741,9 +1741,9 @@ dependencies = [
[[package]]
name = "perf-event-open-sys"
-version = "1.0.1"
+version = "4.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "ce9bedf5da2c234fdf2391ede2b90fabf585355f33100689bc364a3ea558561a"
+checksum = "7c44fb1c7651a45a3652c4afc6e754e40b3d6e6556f1487e2b230bfc4f33c2a8"
dependencies = [
"libc",
]
@@ -2283,9 +2283,9 @@ checksum = "7a2d987857b319362043e95f5353c0535c1f58eec5336fdfcf626430af7def58"
[[package]]
name = "rowan"
-version = "0.15.17"
+version = "0.15.18"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "d4f1e4a001f863f41ea8d0e6a0c34b356d5b733db50dadab3efef640bafb779b"
+checksum = "62f509095fc8cc0c8c8564016771d458079c11a8d857e65861f045145c0d3208"
dependencies = [
"countme",
"hashbrown 0.14.5",
diff --git a/Cargo.toml b/Cargo.toml
index 9f31e1903a..3b3929df0d 100644
--- a/Cargo.toml
+++ b/Cargo.toml
@@ -132,7 +132,7 @@ process-wrap = { version = "8.2.1", features = ["std"] }
pulldown-cmark-to-cmark = "10.0.4"
pulldown-cmark = { version = "0.9.6", default-features = false }
rayon = "1.10.0"
-rowan = "=0.15.17"
+rowan = "=0.15.18"
# Ideally we'd not enable the macros feature but unfortunately the `tracked` attribute does not work
# on impls without it
salsa = { version = "0.25.2", default-features = false, features = [
diff --git a/crates/base-db/src/change.rs b/crates/base-db/src/change.rs
index c728f3e5ca..4d4bf78cbc 100644
--- a/crates/base-db/src/change.rs
+++ b/crates/base-db/src/change.rs
@@ -9,7 +9,8 @@ use triomphe::Arc;
use vfs::FileId;
use crate::{
- CrateGraphBuilder, CratesIdMap, LibraryRoots, LocalRoots, RootQueryDb, SourceRoot, SourceRootId,
+ CrateGraphBuilder, CratesIdMap, LibraryRoots, LocalRoots, SourceDatabase, SourceRoot,
+ SourceRootId,
};
/// Encapsulate a bunch of raw `.set` calls on the database.
@@ -49,7 +50,7 @@ impl FileChange {
self.crate_graph = Some(graph);
}
- pub fn apply(self, db: &mut dyn RootQueryDb) -> Option<CratesIdMap> {
+ pub fn apply(self, db: &mut dyn SourceDatabase) -> Option<CratesIdMap> {
let _p = tracing::info_span!("FileChange::apply").entered();
if let Some(roots) = self.roots {
let mut local_roots = FxHashSet::default();
diff --git a/crates/base-db/src/editioned_file_id.rs b/crates/base-db/src/editioned_file_id.rs
index 8721f3a0ff..a77b45f8ae 100644
--- a/crates/base-db/src/editioned_file_id.rs
+++ b/crates/base-db/src/editioned_file_id.rs
@@ -5,14 +5,71 @@ use std::hash::Hash;
use salsa::Database;
use span::Edition;
+use syntax::{SyntaxError, ast};
use vfs::FileId;
+use crate::SourceDatabase;
+
#[salsa::interned(debug, constructor = from_span_file_id, no_lifetime)]
#[derive(PartialOrd, Ord)]
pub struct EditionedFileId {
field: span::EditionedFileId,
}
+// Currently does not work due to a salsa bug
+// #[salsa::tracked]
+// impl EditionedFileId {
+// #[salsa::tracked(lru = 128)]
+// pub fn parse(self, db: &dyn SourceDatabase) -> syntax::Parse<ast::SourceFile> {
+// let _p = tracing::info_span!("parse", ?self).entered();
+// let (file_id, edition) = self.unpack(db);
+// let text = db.file_text(file_id).text(db);
+// ast::SourceFile::parse(text, edition)
+// }
+
+// // firewall query
+// #[salsa::tracked(returns(as_deref))]
+// pub fn parse_errors(self, db: &dyn SourceDatabase) -> Option<Box<[SyntaxError]>> {
+// let errors = self.parse(db).errors();
+// match &*errors {
+// [] => None,
+// [..] => Some(errors.into()),
+// }
+// }
+// }
+
+impl EditionedFileId {
+ pub fn parse(self, db: &dyn SourceDatabase) -> syntax::Parse<ast::SourceFile> {
+ #[salsa::tracked(lru = 128)]
+ pub fn parse(
+ db: &dyn SourceDatabase,
+ file_id: EditionedFileId,
+ ) -> syntax::Parse<ast::SourceFile> {
+ let _p = tracing::info_span!("parse", ?file_id).entered();
+ let (file_id, edition) = file_id.unpack(db);
+ let text = db.file_text(file_id).text(db);
+ ast::SourceFile::parse(text, edition)
+ }
+ parse(db, self)
+ }
+
+ // firewall query
+ pub fn parse_errors(self, db: &dyn SourceDatabase) -> Option<&[SyntaxError]> {
+ #[salsa::tracked(returns(as_deref))]
+ pub fn parse_errors(
+ db: &dyn SourceDatabase,
+ file_id: EditionedFileId,
+ ) -> Option<Box<[SyntaxError]>> {
+ let errors = file_id.parse(db).errors();
+ match &*errors {
+ [] => None,
+ [..] => Some(errors.into()),
+ }
+ }
+ parse_errors(db, self)
+ }
+}
+
impl EditionedFileId {
#[inline]
pub fn new(db: &dyn Database, file_id: FileId, edition: Edition) -> Self {
diff --git a/crates/base-db/src/input.rs b/crates/base-db/src/input.rs
index 246c57edc2..38f9c5a5a1 100644
--- a/crates/base-db/src/input.rs
+++ b/crates/base-db/src/input.rs
@@ -21,7 +21,10 @@ use span::Edition;
use triomphe::Arc;
use vfs::{AbsPathBuf, AnchoredPath, FileId, VfsPath, file_set::FileSet};
-use crate::{CrateWorkspaceData, EditionedFileId, FxIndexSet, RootQueryDb};
+use crate::{
+ CrateWorkspaceData, EditionedFileId, FxIndexSet, SourceDatabase, all_crates,
+ set_all_crates_with_durability,
+};
pub type ProcMacroPaths =
FxHashMap<CrateBuilderId, Result<(String, AbsPathBuf), ProcMacroLoadingError>>;
@@ -490,13 +493,13 @@ impl Crate {
/// including the crate itself.
///
/// **Warning**: do not use this query in `hir-*` crates! It kills incrementality across crate metadata modifications.
- pub fn transitive_rev_deps(self, db: &dyn RootQueryDb) -> Box<[Crate]> {
+ pub fn transitive_rev_deps(self, db: &dyn SourceDatabase) -> Box<[Crate]> {
let mut worklist = vec![self];
let mut rev_deps = FxHashSet::default();
rev_deps.insert(self);
let mut inverted_graph = FxHashMap::<_, Vec<_>>::default();
- db.all_crates().iter().for_each(|&krate| {
+ all_crates(db).iter().for_each(|&krate| {
krate
.data(db)
.dependencies
@@ -586,15 +589,15 @@ impl CrateGraphBuilder {
Ok(())
}
- pub fn set_in_db(self, db: &mut dyn RootQueryDb) -> CratesIdMap {
+ pub fn set_in_db(self, db: &mut dyn SourceDatabase) -> CratesIdMap {
+ let old_all_crates = all_crates(db);
+
// For some reason in some repositories we have duplicate crates, so we use a set and not `Vec`.
// We use an `IndexSet` because the list needs to be topologically sorted.
let mut all_crates = FxIndexSet::with_capacity_and_hasher(self.arena.len(), FxBuildHasher);
let mut visited = FxHashMap::default();
let mut visited_root_files = FxHashSet::default();
- let old_all_crates = db.all_crates();
-
let crates_map = db.crates_map();
// salsa doesn't compare new input to old input to see if they are the same, so here we are doing all the work ourselves.
for krate in self.iter() {
@@ -612,17 +615,14 @@ impl CrateGraphBuilder {
if old_all_crates.len() != all_crates.len()
|| old_all_crates.iter().any(|&krate| !all_crates.contains(&krate))
{
- db.set_all_crates_with_durability(
- Arc::new(Vec::from_iter(all_crates).into_boxed_slice()),
- Durability::MEDIUM,
- );
+ set_all_crates_with_durability(db, all_crates, Durability::MEDIUM);
}
return visited;
fn go(
graph: &CrateGraphBuilder,
- db: &mut dyn RootQueryDb,
+ db: &mut dyn SourceDatabase,
crates_map: &CratesMap,
visited: &mut FxHashMap<CrateBuilderId, Crate>,
visited_root_files: &mut FxHashSet<FileId>,
@@ -929,6 +929,27 @@ impl<'a> IntoIterator for &'a Env {
}
}
+/// The crate graph had a cycle. This is typically a bug, and
+/// rust-analyzer logs a warning when it encounters a cycle. Generally
+/// rust-analyzer will continue working OK in the presence of cycle,
+/// but it's better to have an accurate crate graph.
+///
+/// ## dev-dependencies
+///
+/// Note that it's actually legal for a cargo package (i.e. a thing
+/// with a Cargo.toml) to depend on itself in dev-dependencies. This
+/// can enable additional features, and is typically used when a
+/// project wants features to be enabled in tests. Dev-dependencies
+/// are not propagated, so they aren't visible to package that depend
+/// on this one.
+///
+/// <https://doc.rust-lang.org/cargo/reference/specifying-dependencies.html#development-dependencies>
+///
+/// However, rust-analyzer constructs its crate graph from Cargo
+/// metadata, so it can end up producing a cyclic crate graph from a
+/// well-formed package graph.
+///
+/// <https://github.com/rust-lang/rust-analyzer/issues/14167>
#[derive(Debug)]
pub struct CyclicDependenciesError {
path: Vec<(CrateBuilderId, Option<CrateDisplayName>)>,
diff --git a/crates/base-db/src/lib.rs b/crates/base-db/src/lib.rs
index 5baf4ce6f9..e438505c07 100644
--- a/crates/base-db/src/lib.rs
+++ b/crates/base-db/src/lib.rs
@@ -36,7 +36,6 @@ pub use query_group;
use rustc_hash::{FxHashSet, FxHasher};
use salsa::{Durability, Setter};
pub use semver::{BuildMetadata, Prerelease, Version, VersionReq};
-use syntax::{Parse, SyntaxError, ast};
use triomphe::Arc;
pub use vfs::{AnchoredPath, AnchoredPathBuf, FileId, VfsPath, file_set::FileSet};
@@ -236,36 +235,6 @@ pub struct SourceRootInput {
pub source_root: Arc<SourceRoot>,
}
-/// Database which stores all significant input facts: source code and project
-/// model. Everything else in rust-analyzer is derived from these queries.
-#[query_group::query_group]
-pub trait RootQueryDb: SourceDatabase + salsa::Database {
- /// Parses the file into the syntax tree.
- #[salsa::invoke(parse)]
- #[salsa::lru(128)]
- fn parse(&self, file_id: EditionedFileId) -> Parse<ast::SourceFile>;
-
- /// Returns the set of errors obtained from parsing the file including validation errors.
- #[salsa::transparent]
- fn parse_errors(&self, file_id: EditionedFileId) -> Option<&[SyntaxError]>;
-
- #[salsa::transparent]
- fn toolchain_channel(&self, krate: Crate) -> Option<ReleaseChannel>;
-
- /// Crates whose root file is in `id`.
- #[salsa::invoke_interned(source_root_crates)]
- fn source_root_crates(&self, id: SourceRootId) -> Arc<[Crate]>;
-
- #[salsa::transparent]
- fn relevant_crates(&self, file_id: FileId) -> Arc<[Crate]>;
-
- /// Returns the crates in topological order.
- ///
- /// **Warning**: do not use this query in `hir-*` crates! It kills incrementality across crate metadata modifications.
- #[salsa::input]
- fn all_crates(&self) -> Arc<Box<[Crate]>>;
-}
-
#[salsa_macros::db]
pub trait SourceDatabase: salsa::Database {
/// Text of the file.
@@ -353,46 +322,67 @@ impl CrateWorkspaceData {
}
}
-fn toolchain_channel(db: &dyn RootQueryDb, krate: Crate) -> Option<ReleaseChannel> {
+pub fn toolchain_channel(db: &dyn salsa::Database, krate: Crate) -> Option<ReleaseChannel> {
krate.workspace_data(db).toolchain.as_ref().and_then(|v| ReleaseChannel::from_str(&v.pre))
}
-fn parse(db: &dyn RootQueryDb, file_id: EditionedFileId) -> Parse<ast::SourceFile> {
- let _p = tracing::info_span!("parse", ?file_id).entered();
- let (file_id, edition) = file_id.unpack(db.as_dyn_database());
- let text = db.file_text(file_id).text(db);
- ast::SourceFile::parse(text, edition)
+#[salsa::input(singleton, debug)]
+struct AllCrates {
+ crates: std::sync::Arc<[Crate]>,
}
-fn parse_errors(db: &dyn RootQueryDb, file_id: EditionedFileId) -> Option<&[SyntaxError]> {
- #[salsa_macros::tracked(returns(ref))]
- fn parse_errors(db: &dyn RootQueryDb, file_id: EditionedFileId) -> Option<Box<[SyntaxError]>> {
- let errors = db.parse(file_id).errors();
- match &*errors {
- [] => None,
- [..] => Some(errors.into()),
- }
- }
- parse_errors(db, file_id).as_ref().map(|it| &**it)
+pub fn set_all_crates_with_durability(
+ db: &mut dyn salsa::Database,
+ crates: impl IntoIterator<Item = Crate>,
+ durability: Durability,
+) {
+ AllCrates::try_get(db)
+ .unwrap_or_else(|| AllCrates::new(db, std::sync::Arc::default()))
+ .set_crates(db)
+ .with_durability(durability)
+ .to(crates.into_iter().collect());
}
-fn source_root_crates(db: &dyn RootQueryDb, id: SourceRootId) -> Arc<[Crate]> {
- let crates = db.all_crates();
- crates
- .iter()
- .copied()
- .filter(|&krate| {
- let root_file = krate.data(db).root_file_id;
- db.file_source_root(root_file).source_root_id(db) == id
- })
- .collect()
+/// Returns the crates in topological order.
+///
+/// **Warning**: do not use this query in `hir-*` crates! It kills incrementality across crate metadata modifications.
+pub fn all_crates(db: &dyn salsa::Database) -> std::sync::Arc<[Crate]> {
+ AllCrates::try_get(db).map_or(std::sync::Arc::default(), |all_crates| all_crates.crates(db))
+}
+
+// FIXME: VFS rewrite should allow us to get rid of this wrapper
+#[doc(hidden)]
+#[salsa::interned]
+pub struct InternedSourceRootId {
+ pub id: SourceRootId,
+}
+
+/// Crates whose root file is in `id`.
+pub fn source_root_crates(db: &dyn SourceDatabase, id: SourceRootId) -> &[Crate] {
+ #[salsa::tracked(returns(deref))]
+ pub fn source_root_crates<'db>(
+ db: &'db dyn SourceDatabase,
+ id: InternedSourceRootId<'db>,
+ ) -> Box<[Crate]> {
+ let crates = AllCrates::get(db).crates(db);
+ let id = id.id(db);
+ crates
+ .iter()
+ .copied()
+ .filter(|&krate| {
+ let root_file = krate.data(db).root_file_id;
+ db.file_source_root(root_file).source_root_id(db) == id
+ })
+ .collect()
+ }
+ source_root_crates(db, InternedSourceRootId::new(db, id))
}
-fn relevant_crates(db: &dyn RootQueryDb, file_id: FileId) -> Arc<[Crate]> {
+pub fn relevant_crates(db: &dyn SourceDatabase, file_id: FileId) -> &[Crate] {
let _p = tracing::info_span!("relevant_crates").entered();
let source_root = db.file_source_root(file_id);
- db.source_root_crates(source_root.source_root_id(db))
+ source_root_crates(db, source_root.source_root_id(db))
}
#[must_use]
diff --git a/crates/hir-def/src/attrs.rs b/crates/hir-def/src/attrs.rs
index e3e1aac709..dddfe8cefd 100644
--- a/crates/hir-def/src/attrs.rs
+++ b/crates/hir-def/src/attrs.rs
@@ -12,25 +12,17 @@
//! its value. This way, queries are only called on items that have the attribute, which is
//! usually only a few.
//!
-//! An exception to this model that is also defined in this module is documentation (doc
-//! comments and `#[doc = "..."]` attributes). But it also has a more compact form than
-//! the attribute: a concatenated string of the full docs as well as a source map
-//! to map it back to AST (which is needed for things like resolving links in doc comments
-//! and highlight injection). The lowering and upmapping of doc comments is a bit complicated,
-//! but it is encapsulated in the [`Docs`] struct.
-
-use std::{
- convert::Infallible,
- iter::Peekable,
- ops::{ControlFlow, Range},
-};
+//! Documentation (doc comments and `#[doc = "..."]` attributes) is handled by the [`docs`]
+//! submodule.
+
+use std::{convert::Infallible, iter::Peekable, ops::ControlFlow};
use base_db::Crate;
use cfg::{CfgExpr, CfgOptions};
use either::Either;
use hir_expand::{
- HirFileId, InFile, Lookup,
- attrs::{Meta, expand_cfg_attr, expand_cfg_attr_with_doc_comments},
+ InFile, Lookup,
+ attrs::{Meta, expand_cfg_attr},
};
use intern::Symbol;
use itertools::Itertools;
@@ -39,10 +31,10 @@ use rustc_abi::ReprOptions;
use rustc_hash::FxHashSet;
use smallvec::SmallVec;
use syntax::{
- AstNode, AstToken, NodeOrToken, SmolStr, SourceFile, SyntaxNode, SyntaxToken, T,
- ast::{self, AttrDocCommentIter, HasAttrs, IsString, TokenTreeChildren},
+ AstNode, AstToken, NodeOrToken, SmolStr, SourceFile, T,
+ ast::{self, HasAttrs, TokenTreeChildren},
};
-use tt::{TextRange, TextSize};
+use tt::TextSize;
use crate::{
AdtId, AstIdLoc, AttrDefId, FieldId, FunctionId, GenericDefId, HasModule, LifetimeParamId,
@@ -50,9 +42,14 @@ use crate::{
db::DefDatabase,
hir::generics::{GenericParams, LocalLifetimeParamId, LocalTypeOrConstParamId},
nameres::ModuleOrigin,
+ resolver::{HasResolver, Resolver},
src::{HasChildSource, HasSource},
};
+pub mod docs;
+
+pub use self::docs::{Docs, IsInnerDoc};
+
#[inline]
fn attrs_from_ast_id_loc<N: AstNode + Into<ast::AnyHasAttrs>>(
db: &dyn DefDatabase,
@@ -354,13 +351,13 @@ fn attrs_source(
let krate = def_map.krate();
let (definition, declaration, extra_crate_attrs) = match def_map[id].origin {
ModuleOrigin::CrateRoot { definition } => {
- let definition_source = db.parse(definition).tree();
+ let definition_source = definition.parse(db).tree();
let definition = InFile::new(definition.into(), definition_source.into());
let extra_crate_attrs = parse_extra_crate_attrs(db, krate);
(definition, None, extra_crate_attrs)
}
ModuleOrigin::File { declaration, declaration_tree_id, definition, .. } => {
- let definition_source = db.parse(definition).tree();
+ let definition_source = definition.parse(db).tree();
let definition = InFile::new(definition.into(), definition_source.into());
let declaration = InFile::new(declaration_tree_id.file_id(), declaration);
let declaration = declaration.with_value(declaration.to_node(db));
@@ -398,6 +395,28 @@ fn attrs_source(
(owner, None, None, krate)
}
+fn resolver_for_attr_def_id(db: &dyn DefDatabase, owner: AttrDefId) -> Resolver<'_> {
+ match owner {
+ AttrDefId::ModuleId(id) => id.resolver(db),
+ AttrDefId::AdtId(AdtId::StructId(id)) => id.resolver(db),
+ AttrDefId::AdtId(AdtId::UnionId(id)) => id.resolver(db),
+ AttrDefId::AdtId(AdtId::EnumId(id)) => id.resolver(db),
+ AttrDefId::FunctionId(id) => id.resolver(db),
+ AttrDefId::EnumVariantId(id) => id.resolver(db),
+ AttrDefId::StaticId(id) => id.resolver(db),
+ AttrDefId::ConstId(id) => id.resolver(db),
+ AttrDefId::TraitId(id) => id.resolver(db),
+ AttrDefId::TypeAliasId(id) => id.resolver(db),
+ AttrDefId::MacroId(MacroId::Macro2Id(id)) => id.resolver(db),
+ AttrDefId::MacroId(MacroId::MacroRulesId(id)) => id.resolver(db),
+ AttrDefId::MacroId(MacroId::ProcMacroId(id)) => id.resolver(db),
+ AttrDefId::ImplId(id) => id.resolver(db),
+ AttrDefId::ExternBlockId(id) => id.resolver(db),
+ AttrDefId::ExternCrateId(id) => id.resolver(db),
+ AttrDefId::UseId(id) => id.resolver(db),
+ }
+}
+
fn collect_attrs<BreakValue>(
db: &dyn DefDatabase,
owner: AttrDefId,
@@ -475,282 +494,6 @@ pub struct RustcLayoutScalarValidRange {
pub end: Option<u128>,
}
-#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
-struct DocsSourceMapLine {
- /// The offset in [`Docs::docs`].
- string_offset: TextSize,
- /// The offset in the AST of the text.
- ast_offset: TextSize,
-}
-
-#[derive(Debug, Clone, PartialEq, Eq, Hash)]
-pub struct Docs {
- /// The concatenated string of all `#[doc = "..."]` attributes and documentation comments.
- docs: String,
- /// A sorted map from an offset in `docs` to an offset in the source code.
- docs_source_map: Vec<DocsSourceMapLine>,
- /// If the item is an outlined module (`mod foo;`), `docs_source_map` store the concatenated
- /// list of the outline and inline docs (outline first). Then, this field contains the [`HirFileId`]
- /// of the outline declaration, and the index in `docs` from which the inline docs
- /// begin.
- outline_mod: Option<(HirFileId, usize)>,
- inline_file: HirFileId,
- /// The size the prepended prefix, which does not map to real doc comments.
- prefix_len: TextSize,
- /// The offset in `docs` from which the docs are inner attributes/comments.
- inline_inner_docs_start: Option<TextSize>,
- /// Like `inline_inner_docs_start`, but for `outline_mod`. This can happen only when merging `Docs`
- /// (as outline modules don't have inner attributes).
- outline_inner_docs_start: Option<TextSize>,
-}
-
-#[derive(Debug, Clone, Copy, PartialEq, Eq)]
-pub enum IsInnerDoc {
- No,
- Yes,
-}
-
-impl IsInnerDoc {
- #[inline]
- pub fn yes(self) -> bool {
- self == IsInnerDoc::Yes
- }
-}
-
-impl Docs {
- #[inline]
- pub fn docs(&self) -> &str {
- &self.docs
- }
-
- #[inline]
- pub fn into_docs(self) -> String {
- self.docs
- }
-
- pub fn find_ast_range(
- &self,
- mut string_range: TextRange,
- ) -> Option<(InFile<TextRange>, IsInnerDoc)> {
- if string_range.start() < self.prefix_len {
- return None;
- }
- string_range -= self.prefix_len;
-
- let mut file = self.inline_file;
- let mut inner_docs_start = self.inline_inner_docs_start;
- // Check whether the range is from the outline, the inline, or both.
- let source_map = if let Some((outline_mod_file, outline_mod_end)) = self.outline_mod {
- if let Some(first_inline) = self.docs_source_map.get(outline_mod_end) {
- if string_range.end() <= first_inline.string_offset {
- // The range is completely in the outline.
- file = outline_mod_file;
- inner_docs_start = self.outline_inner_docs_start;
- &self.docs_source_map[..outline_mod_end]
- } else if string_range.start() >= first_inline.string_offset {
- // The range is completely in the inline.
- &self.docs_source_map[outline_mod_end..]
- } else {
- // The range is combined from the outline and the inline - cannot map it back.
- return None;
- }
- } else {
- // There is no inline.
- file = outline_mod_file;
- inner_docs_start = self.outline_inner_docs_start;
- &self.docs_source_map
- }
- } else {
- // There is no outline.
- &self.docs_source_map
- };
-
- let after_range =
- source_map.partition_point(|line| line.string_offset <= string_range.start()) - 1;
- let after_range = &source_map[after_range..];
- let line = after_range.first()?;
- if after_range.get(1).is_some_and(|next_line| next_line.string_offset < string_range.end())
- {
- // The range is combined from two lines - cannot map it back.
- return None;
- }
- let ast_range = string_range - line.string_offset + line.ast_offset;
- let is_inner = if inner_docs_start
- .is_some_and(|inner_docs_start| string_range.start() >= inner_docs_start)
- {
- IsInnerDoc::Yes
- } else {
- IsInnerDoc::No
- };
- Some((InFile::new(file, ast_range), is_inner))
- }
-
- #[inline]
- pub fn shift_by(&mut self, offset: TextSize) {
- self.prefix_len += offset;
- }
-
- pub fn prepend_str(&mut self, s: &str) {
- self.prefix_len += TextSize::of(s);
- self.docs.insert_str(0, s);
- }
-
- pub fn append_str(&mut self, s: &str) {
- self.docs.push_str(s);
- }
-
- pub fn append(&mut self, other: &Docs) {
- let other_offset = TextSize::of(&self.docs);
-
- assert!(
- self.outline_mod.is_none() && other.outline_mod.is_none(),
- "cannot merge `Docs` that have `outline_mod` set"
- );
- self.outline_mod = Some((self.inline_file, self.docs_source_map.len()));
- self.inline_file = other.inline_file;
- self.outline_inner_docs_start = self.inline_inner_docs_start;
- self.inline_inner_docs_start = other.inline_inner_docs_start.map(|it| it + other_offset);
-
- self.docs.push_str(&other.docs);
- self.docs_source_map.extend(other.docs_source_map.iter().map(
- |&DocsSourceMapLine { string_offset, ast_offset }| DocsSourceMapLine {
- ast_offset,
- string_offset: string_offset + other_offset,
- },
- ));
- }
-
- fn extend_with_doc_comment(&mut self, comment: ast::Comment, indent: &mut usize) {
- let Some((doc, offset)) = comment.doc_comment() else { return };
- self.extend_with_doc_str(doc, comment.syntax().text_range().start() + offset, indent);
- }
-
- fn extend_with_doc_attr(&mut self, value: SyntaxToken, indent: &mut usize) {
- let Some(value) = ast::String::cast(value) else { return };
- let Some(value_offset) = value.text_range_between_quotes() else { return };
- let value_offset = value_offset.start();
- let Ok(value) = value.value() else { return };
- // FIXME: Handle source maps for escaped text.
- self.extend_with_doc_str(&value, value_offset, indent);
- }
-
- fn extend_with_doc_str(&mut self, doc: &str, mut offset_in_ast: TextSize, indent: &mut usize) {
- for line in doc.split('\n') {
- self.docs_source_map.push(DocsSourceMapLine {
- string_offset: TextSize::of(&self.docs),
- ast_offset: offset_in_ast,
- });
- offset_in_ast += TextSize::of(line) + TextSize::of("\n");
-
- let line = line.trim_end();
- if let Some(line_indent) = line.chars().position(|ch| !ch.is_whitespace()) {
- // Empty lines are handled because `position()` returns `None` for them.
- *indent = std::cmp::min(*indent, line_indent);
- }
- self.docs.push_str(line);
- self.docs.push('\n');
- }
- }
-
- fn remove_indent(&mut self, indent: usize, start_source_map_index: usize) {
- /// In case of panics, we want to avoid corrupted UTF-8 in `self.docs`, so we clear it.
- struct Guard<'a>(&'a mut Docs);
- impl Drop for Guard<'_> {
- fn drop(&mut self) {
- let Docs {
- docs,
- docs_source_map,
- outline_mod,
- inline_file: _,
- prefix_len: _,
- inline_inner_docs_start: _,
- outline_inner_docs_start: _,
- } = self.0;
- // Don't use `String::clear()` here because it's not guaranteed to not do UTF-8-dependent things,
- // and we may have temporarily broken the string's encoding.
- unsafe { docs.as_mut_vec() }.clear();
- // This is just to avoid panics down the road.
- docs_source_map.clear();
- *outline_mod = None;
- }
- }
-
- if self.docs.is_empty() {
- return;
- }
-
- let guard = Guard(self);
- let source_map = &mut guard.0.docs_source_map[start_source_map_index..];
- let Some(&DocsSourceMapLine { string_offset: mut copy_into, .. }) = source_map.first()
- else {
- return;
- };
- // We basically want to remove multiple ranges from a string. Doing this efficiently (without O(N^2)
- // or allocations) requires unsafe. Basically, for each line, we copy the line minus the indent into
- // consecutive to the previous line (which may have moved). Then at the end we truncate.
- let mut accumulated_offset = TextSize::new(0);
- for idx in 0..source_map.len() {
- let string_end_offset = source_map
- .get(idx + 1)
- .map_or_else(|| TextSize::of(&guard.0.docs), |next_attr| next_attr.string_offset);
- let line_source = &mut source_map[idx];
- let line_docs =
- &guard.0.docs[TextRange::new(line_source.string_offset, string_end_offset)];
- let line_docs_len = TextSize::of(line_docs);
- let indent_size = line_docs.char_indices().nth(indent).map_or_else(
- || TextSize::of(line_docs) - TextSize::of("\n"),
- |(offset, _)| TextSize::new(offset as u32),
- );
- unsafe { guard.0.docs.as_bytes_mut() }.copy_within(
- Range::<usize>::from(TextRange::new(
- line_source.string_offset + indent_size,
- string_end_offset,
- )),
- copy_into.into(),
- );
- copy_into += line_docs_len - indent_size;
-
- if let Some(inner_attrs_start) = &mut guard.0.inline_inner_docs_start
- && *inner_attrs_start == line_source.string_offset
- {
- *inner_attrs_start -= accumulated_offset;
- }
- // The removals in the string accumulate, but in the AST not, because it already points
- // to the beginning of each attribute.
- // Also, we need to shift the AST offset of every line, but the string offset of the first
- // line should not get shifted (in general, the shift for the string offset is by the
- // number of lines until the current one, excluding the current one).
- line_source.string_offset -= accumulated_offset;
- line_source.ast_offset += indent_size;
-
- accumulated_offset += indent_size;
- }
- // Don't use `String::truncate()` here because it's not guaranteed to not do UTF-8-dependent things,
- // and we may have temporarily broken the string's encoding.
- unsafe { guard.0.docs.as_mut_vec() }.truncate(copy_into.into());
-
- std::mem::forget(guard);
- }
-
- fn remove_last_newline(&mut self) {
- self.docs.truncate(self.docs.len().saturating_sub(1));
- }
-
- fn shrink_to_fit(&mut self) {
- let Docs {
- docs,
- docs_source_map,
- outline_mod: _,
- inline_file: _,
- prefix_len: _,
- inline_inner_docs_start: _,
- outline_inner_docs_start: _,
- } = self;
- docs.shrink_to_fit();
- docs_source_map.shrink_to_fit();
- }
-}
-
#[derive(Debug, PartialEq, Eq, Hash)]
pub struct DeriveInfo {
pub trait_name: Symbol,
@@ -785,76 +528,6 @@ fn extract_cfgs(result: &mut Vec<CfgExpr>, attr: Meta) -> ControlFlow<Infallible
ControlFlow::Continue(())
}
-fn extract_docs<'a>(
- get_cfg_options: &dyn Fn() -> &'a CfgOptions,
- source: InFile<ast::AnyHasAttrs>,
- outer_mod_decl: Option<InFile<ast::Module>>,
- inner_attrs_node: Option<SyntaxNode>,
-) -> Option<Box<Docs>> {
- let mut result = Docs {
- docs: String::new(),
- docs_source_map: Vec::new(),
- outline_mod: None,
- inline_file: source.file_id,
- prefix_len: TextSize::new(0),
- inline_inner_docs_start: None,
- outline_inner_docs_start: None,
- };
-
- let mut cfg_options = None;
- let mut extend_with_attrs =
- |result: &mut Docs, node: &SyntaxNode, expect_inner_attrs, indent: &mut usize| {
- expand_cfg_attr_with_doc_comments::<_, Infallible>(
- AttrDocCommentIter::from_syntax_node(node).filter(|attr| match attr {
- Either::Left(attr) => attr.kind().is_inner() == expect_inner_attrs,
- Either::Right(comment) => comment.kind().doc.is_some_and(|kind| {
- (kind == ast::CommentPlacement::Inner) == expect_inner_attrs
- }),
- }),
- || cfg_options.get_or_insert_with(get_cfg_options),
- |attr| {
- match attr {
- Either::Right(doc_comment) => {
- result.extend_with_doc_comment(doc_comment, indent)
- }
- Either::Left((attr, _, _, _)) => match attr {
- // FIXME: Handle macros: `#[doc = concat!("foo", "bar")]`.
- Meta::NamedKeyValue {
- name: Some(name), value: Some(value), ..
- } if name.text() == "doc" => {
- result.extend_with_doc_attr(value, indent);
- }
- _ => {}
- },
- }
- ControlFlow::Continue(())
- },
- );
- };
-
- if let Some(outer_mod_decl) = outer_mod_decl {
- let mut indent = usize::MAX;
- extend_with_attrs(&mut result, outer_mod_decl.value.syntax(), false, &mut indent);
- result.remove_indent(indent, 0);
- result.outline_mod = Some((outer_mod_decl.file_id, result.docs_source_map.len()));
- }
-
- let inline_source_map_start = result.docs_source_map.len();
- let mut indent = usize::MAX;
- extend_with_attrs(&mut result, source.value.syntax(), false, &mut indent);
- if let Some(inner_attrs_node) = &inner_attrs_node {
- result.inline_inner_docs_start = Some(TextSize::of(&result.docs));
- extend_with_attrs(&mut result, inner_attrs_node, true, &mut indent);
- }
- result.remove_indent(indent, inline_source_map_start);
-
- result.remove_last_newline();
-
- result.shrink_to_fit();
-
- if result.docs.is_empty() { None } else { Some(Box::new(result)) }
-}
-
#[salsa::tracked]
impl AttrFlags {
#[salsa::tracked]
@@ -1069,7 +742,7 @@ impl AttrFlags {
#[salsa::tracked(returns(ref))]
pub fn doc_html_root_url(db: &dyn DefDatabase, krate: Crate) -> Option<SmolStr> {
let root_file_id = krate.root_file_id(db);
- let syntax = db.parse(root_file_id).tree();
+ let syntax = root_file_id.parse(db).tree();
let extra_crate_attrs =
parse_extra_crate_attrs(db, krate).into_iter().flat_map(|src| src.attrs());
@@ -1295,7 +968,15 @@ impl AttrFlags {
// Note: we don't have to pass down `_extra_crate_attrs` here, since `extract_docs`
// does not handle crate-level attributes related to docs.
// See: https://doc.rust-lang.org/rustdoc/write-documentation/the-doc-attribute.html#at-the-crate-level
- extract_docs(&|| krate.cfg_options(db), source, outer_mod_decl, inner_attrs_node)
+ self::docs::extract_docs(
+ db,
+ krate,
+ &|| resolver_for_attr_def_id(db, owner),
+ &|| krate.cfg_options(db),
+ source,
+ outer_mod_decl,
+ inner_attrs_node,
+ )
}
#[inline]
@@ -1308,8 +989,17 @@ impl AttrFlags {
db: &dyn DefDatabase,
variant: VariantId,
) -> ArenaMap<LocalFieldId, Option<Box<Docs>>> {
+ let krate = variant.module(db).krate(db);
collect_field_attrs(db, variant, |cfg_options, field| {
- extract_docs(&|| cfg_options, field, None, None)
+ self::docs::extract_docs(
+ db,
+ krate,
+ &|| variant.resolver(db),
+ &|| cfg_options,
+ field,
+ None,
+ None,
+ )
})
}
}
@@ -1537,151 +1227,13 @@ fn next_doc_expr(it: &mut Peekable<TokenTreeChildren>) -> Option<DocAtom> {
#[cfg(test)]
mod tests {
- use expect_test::expect;
- use hir_expand::InFile;
use test_fixture::WithFixture;
- use tt::{TextRange, TextSize};
use crate::AttrDefId;
- use crate::attrs::{AttrFlags, Docs, IsInnerDoc};
+ use crate::attrs::AttrFlags;
use crate::test_db::TestDB;
#[test]
- fn docs() {
- let (_db, file_id) = TestDB::with_single_file("");
- let mut docs = Docs {
- docs: String::new(),
- docs_source_map: Vec::new(),
- outline_mod: None,
- inline_file: file_id.into(),
- prefix_len: TextSize::new(0),
- inline_inner_docs_start: None,
- outline_inner_docs_start: None,
- };
- let mut indent = usize::MAX;
-
- let outer = " foo\n\tbar baz";
- let mut ast_offset = TextSize::new(123);
- for line in outer.split('\n') {
- docs.extend_with_doc_str(line, ast_offset, &mut indent);
- ast_offset += TextSize::of(line) + TextSize::of("\n");
- }
-
- docs.inline_inner_docs_start = Some(TextSize::of(&docs.docs));
- ast_offset += TextSize::new(123);
- let inner = " bar \n baz";
- for line in inner.split('\n') {
- docs.extend_with_doc_str(line, ast_offset, &mut indent);
- ast_offset += TextSize::of(line) + TextSize::of("\n");
- }
-
- assert_eq!(indent, 1);
- expect![[r#"
- [
- DocsSourceMapLine {
- string_offset: 0,
- ast_offset: 123,
- },
- DocsSourceMapLine {
- string_offset: 5,
- ast_offset: 128,
- },
- DocsSourceMapLine {
- string_offset: 15,
- ast_offset: 261,
- },
- DocsSourceMapLine {
- string_offset: 20,
- ast_offset: 267,
- },
- ]
- "#]]
- .assert_debug_eq(&docs.docs_source_map);
-
- docs.remove_indent(indent, 0);
-
- assert_eq!(docs.inline_inner_docs_start, Some(TextSize::new(13)));
-
- assert_eq!(docs.docs, "foo\nbar baz\nbar\nbaz\n");
- expect![[r#"
- [
- DocsSourceMapLine {
- string_offset: 0,
- ast_offset: 124,
- },
- DocsSourceMapLine {
- string_offset: 4,
- ast_offset: 129,
- },
- DocsSourceMapLine {
- string_offset: 13,
- ast_offset: 262,
- },
- DocsSourceMapLine {
- string_offset: 17,
- ast_offset: 268,
- },
- ]
- "#]]
- .assert_debug_eq(&docs.docs_source_map);
-
- docs.append(&docs.clone());
- docs.prepend_str("prefix---");
- assert_eq!(docs.docs, "prefix---foo\nbar baz\nbar\nbaz\nfoo\nbar baz\nbar\nbaz\n");
- expect![[r#"
- [
- DocsSourceMapLine {
- string_offset: 0,
- ast_offset: 124,
- },
- DocsSourceMapLine {
- string_offset: 4,
- ast_offset: 129,
- },
- DocsSourceMapLine {
- string_offset: 13,
- ast_offset: 262,
- },
- DocsSourceMapLine {
- string_offset: 17,
- ast_offset: 268,
- },
- DocsSourceMapLine {
- string_offset: 21,
- ast_offset: 124,
- },
- DocsSourceMapLine {
- string_offset: 25,
- ast_offset: 129,
- },
- DocsSourceMapLine {
- string_offset: 34,
- ast_offset: 262,
- },
- DocsSourceMapLine {
- string_offset: 38,
- ast_offset: 268,
- },
- ]
- "#]]
- .assert_debug_eq(&docs.docs_source_map);
-
- let range = |start, end| TextRange::new(TextSize::new(start), TextSize::new(end));
- let in_file = |range| InFile::new(file_id.into(), range);
- assert_eq!(docs.find_ast_range(range(0, 2)), None);
- assert_eq!(docs.find_ast_range(range(8, 10)), None);
- assert_eq!(
- docs.find_ast_range(range(9, 10)),
- Some((in_file(range(124, 125)), IsInnerDoc::No))
- );
- assert_eq!(docs.find_ast_range(range(20, 23)), None);
- assert_eq!(
- docs.find_ast_range(range(23, 25)),
- Some((in_file(range(263, 265)), IsInnerDoc::Yes))
- );
- }
-
- #[test]
fn crate_attrs() {
let fixture = r#"
//- /lib.rs crate:foo crate-attr:no_std crate-attr:cfg(target_arch="x86")
diff --git a/crates/hir-def/src/attrs/docs.rs b/crates/hir-def/src/attrs/docs.rs
new file mode 100644
index 0000000000..8c14808c71
--- /dev/null
+++ b/crates/hir-def/src/attrs/docs.rs
@@ -0,0 +1,749 @@
+//! Documentation extraction and source mapping.
+//!
+//! This module handles the extraction and processing of doc comments and `#[doc = "..."]`
+//! attributes, including macro expansion for `#[doc = macro!()]` patterns.
+//! It builds a concatenated string of the full docs as well as a source map
+//! to map it back to AST (which is needed for things like resolving links in doc comments
+//! and highlight injection).
+
+use std::{
+ convert::Infallible,
+ ops::{ControlFlow, Range},
+};
+
+use base_db::Crate;
+use cfg::CfgOptions;
+use either::Either;
+use hir_expand::{
+ AstId, ExpandTo, HirFileId, InFile,
+ attrs::{Meta, expand_cfg_attr_with_doc_comments},
+ mod_path::ModPath,
+ span_map::SpanMap,
+};
+use span::AstIdMap;
+use syntax::{
+ AstNode, AstToken, SyntaxNode,
+ ast::{self, AttrDocCommentIter, IsString},
+};
+use tt::{TextRange, TextSize};
+
+use crate::{db::DefDatabase, macro_call_as_call_id, nameres::MacroSubNs, resolver::Resolver};
+
+#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
+pub(crate) struct DocsSourceMapLine {
+ /// The offset in [`Docs::docs`].
+ string_offset: TextSize,
+ /// The offset in the AST of the text. `None` for macro-expanded doc strings
+ /// where we cannot provide a faithful source mapping.
+ ast_offset: Option<TextSize>,
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct Docs {
+ /// The concatenated string of all `#[doc = "..."]` attributes and documentation comments.
+ docs: String,
+ /// A sorted map from an offset in `docs` to an offset in the source code.
+ docs_source_map: Vec<DocsSourceMapLine>,
+ /// If the item is an outlined module (`mod foo;`), `docs_source_map` store the concatenated
+ /// list of the outline and inline docs (outline first). Then, this field contains the [`HirFileId`]
+ /// of the outline declaration, and the index in `docs` from which the inline docs
+ /// begin.
+ outline_mod: Option<(HirFileId, usize)>,
+ inline_file: HirFileId,
+ /// The size the prepended prefix, which does not map to real doc comments.
+ prefix_len: TextSize,
+ /// The offset in `docs` from which the docs are inner attributes/comments.
+ inline_inner_docs_start: Option<TextSize>,
+ /// Like `inline_inner_docs_start`, but for `outline_mod`. This can happen only when merging `Docs`
+ /// (as outline modules don't have inner attributes).
+ outline_inner_docs_start: Option<TextSize>,
+}
+
+#[derive(Debug, Clone, Copy, PartialEq, Eq)]
+pub enum IsInnerDoc {
+ No,
+ Yes,
+}
+
+impl IsInnerDoc {
+ #[inline]
+ pub fn yes(self) -> bool {
+ self == IsInnerDoc::Yes
+ }
+}
+
+impl Docs {
+ #[inline]
+ pub fn docs(&self) -> &str {
+ &self.docs
+ }
+
+ #[inline]
+ pub fn into_docs(self) -> String {
+ self.docs
+ }
+
+ pub fn find_ast_range(
+ &self,
+ mut string_range: TextRange,
+ ) -> Option<(InFile<TextRange>, IsInnerDoc)> {
+ if string_range.start() < self.prefix_len {
+ return None;
+ }
+ string_range -= self.prefix_len;
+
+ let mut file = self.inline_file;
+ let mut inner_docs_start = self.inline_inner_docs_start;
+ // Check whether the range is from the outline, the inline, or both.
+ let source_map = if let Some((outline_mod_file, outline_mod_end)) = self.outline_mod {
+ if let Some(first_inline) = self.docs_source_map.get(outline_mod_end) {
+ if string_range.end() <= first_inline.string_offset {
+ // The range is completely in the outline.
+ file = outline_mod_file;
+ inner_docs_start = self.outline_inner_docs_start;
+ &self.docs_source_map[..outline_mod_end]
+ } else if string_range.start() >= first_inline.string_offset {
+ // The range is completely in the inline.
+ &self.docs_source_map[outline_mod_end..]
+ } else {
+ // The range is combined from the outline and the inline - cannot map it back.
+ return None;
+ }
+ } else {
+ // There is no inline.
+ file = outline_mod_file;
+ inner_docs_start = self.outline_inner_docs_start;
+ &self.docs_source_map
+ }
+ } else {
+ // There is no outline.
+ &self.docs_source_map
+ };
+
+ let after_range =
+ source_map.partition_point(|line| line.string_offset <= string_range.start()) - 1;
+ let after_range = &source_map[after_range..];
+ let line = after_range.first()?;
+ // Unmapped lines (from macro-expanded docs) cannot be mapped back to AST.
+ let ast_offset = line.ast_offset?;
+ if after_range.get(1).is_some_and(|next_line| next_line.string_offset < string_range.end())
+ {
+ // The range is combined from two lines - cannot map it back.
+ return None;
+ }
+ let ast_range = string_range - line.string_offset + ast_offset;
+ let is_inner = if inner_docs_start
+ .is_some_and(|inner_docs_start| string_range.start() >= inner_docs_start)
+ {
+ IsInnerDoc::Yes
+ } else {
+ IsInnerDoc::No
+ };
+ Some((InFile::new(file, ast_range), is_inner))
+ }
+
+ #[inline]
+ pub fn shift_by(&mut self, offset: TextSize) {
+ self.prefix_len += offset;
+ }
+
+ pub fn prepend_str(&mut self, s: &str) {
+ self.prefix_len += TextSize::of(s);
+ self.docs.insert_str(0, s);
+ }
+
+ pub fn append_str(&mut self, s: &str) {
+ self.docs.push_str(s);
+ }
+
+ pub fn append(&mut self, other: &Docs) {
+ let other_offset = TextSize::of(&self.docs);
+
+ assert!(
+ self.outline_mod.is_none() && other.outline_mod.is_none(),
+ "cannot merge `Docs` that have `outline_mod` set"
+ );
+ self.outline_mod = Some((self.inline_file, self.docs_source_map.len()));
+ self.inline_file = other.inline_file;
+ self.outline_inner_docs_start = self.inline_inner_docs_start;
+ self.inline_inner_docs_start = other.inline_inner_docs_start.map(|it| it + other_offset);
+
+ self.docs.push_str(&other.docs);
+ self.docs_source_map.extend(other.docs_source_map.iter().map(
+ |&DocsSourceMapLine { string_offset, ast_offset }| DocsSourceMapLine {
+ ast_offset,
+ string_offset: string_offset + other_offset,
+ },
+ ));
+ }
+
+ fn extend_with_doc_comment(&mut self, comment: ast::Comment, indent: &mut usize) {
+ let Some((doc, offset)) = comment.doc_comment() else { return };
+ self.extend_with_doc_str(doc, comment.syntax().text_range().start() + offset, indent);
+ }
+
+ fn extend_with_doc_attr(&mut self, value: syntax::SyntaxToken, indent: &mut usize) {
+ let Some(value) = ast::String::cast(value) else { return };
+ let Some(value_offset) = value.text_range_between_quotes() else { return };
+ let value_offset = value_offset.start();
+ let Ok(value) = value.value() else { return };
+ // FIXME: Handle source maps for escaped text.
+ self.extend_with_doc_str(&value, value_offset, indent);
+ }
+
+ pub(crate) fn extend_with_doc_str(
+ &mut self,
+ doc: &str,
+ offset_in_ast: TextSize,
+ indent: &mut usize,
+ ) {
+ self.push_doc_lines(doc, Some(offset_in_ast), indent);
+ }
+
+ fn extend_with_unmapped_doc_str(&mut self, doc: &str, indent: &mut usize) {
+ self.push_doc_lines(doc, None, indent);
+ }
+
+ fn push_doc_lines(&mut self, doc: &str, mut ast_offset: Option<TextSize>, indent: &mut usize) {
+ for line in doc.split('\n') {
+ self.docs_source_map
+ .push(DocsSourceMapLine { string_offset: TextSize::of(&self.docs), ast_offset });
+ if let Some(ref mut offset) = ast_offset {
+ *offset += TextSize::of(line) + TextSize::of("\n");
+ }
+
+ let line = line.trim_end();
+ if let Some(line_indent) = line.chars().position(|ch| !ch.is_whitespace()) {
+ // Empty lines are handled because `position()` returns `None` for them.
+ *indent = std::cmp::min(*indent, line_indent);
+ }
+ self.docs.push_str(line);
+ self.docs.push('\n');
+ }
+ }
+
+ fn remove_indent(&mut self, indent: usize, start_source_map_index: usize) {
+ /// In case of panics, we want to avoid corrupted UTF-8 in `self.docs`, so we clear it.
+ struct Guard<'a>(&'a mut Docs);
+ impl Drop for Guard<'_> {
+ fn drop(&mut self) {
+ let Docs {
+ docs,
+ docs_source_map,
+ outline_mod,
+ inline_file: _,
+ prefix_len: _,
+ inline_inner_docs_start: _,
+ outline_inner_docs_start: _,
+ } = self.0;
+ // Don't use `String::clear()` here because it's not guaranteed to not do UTF-8-dependent things,
+ // and we may have temporarily broken the string's encoding.
+ unsafe { docs.as_mut_vec() }.clear();
+ // This is just to avoid panics down the road.
+ docs_source_map.clear();
+ *outline_mod = None;
+ }
+ }
+
+ if self.docs.is_empty() {
+ return;
+ }
+
+ let guard = Guard(self);
+ let source_map = &mut guard.0.docs_source_map[start_source_map_index..];
+ let Some(&DocsSourceMapLine { string_offset: mut copy_into, .. }) = source_map.first()
+ else {
+ return;
+ };
+ // We basically want to remove multiple ranges from a string. Doing this efficiently (without O(N^2)
+ // or allocations) requires unsafe. Basically, for each line, we copy the line minus the indent into
+ // consecutive to the previous line (which may have moved). Then at the end we truncate.
+ let mut accumulated_offset = TextSize::new(0);
+ for idx in 0..source_map.len() {
+ let string_end_offset = source_map
+ .get(idx + 1)
+ .map_or_else(|| TextSize::of(&guard.0.docs), |next_attr| next_attr.string_offset);
+ let line_source = &mut source_map[idx];
+ let line_docs =
+ &guard.0.docs[TextRange::new(line_source.string_offset, string_end_offset)];
+ let line_docs_len = TextSize::of(line_docs);
+ let indent_size = line_docs.char_indices().nth(indent).map_or_else(
+ || TextSize::of(line_docs) - TextSize::of("\n"),
+ |(offset, _)| TextSize::new(offset as u32),
+ );
+ unsafe { guard.0.docs.as_bytes_mut() }.copy_within(
+ Range::<usize>::from(TextRange::new(
+ line_source.string_offset + indent_size,
+ string_end_offset,
+ )),
+ copy_into.into(),
+ );
+ copy_into += line_docs_len - indent_size;
+
+ if let Some(inner_attrs_start) = &mut guard.0.inline_inner_docs_start
+ && *inner_attrs_start == line_source.string_offset
+ {
+ *inner_attrs_start -= accumulated_offset;
+ }
+ // The removals in the string accumulate, but in the AST not, because it already points
+ // to the beginning of each attribute.
+ // Also, we need to shift the AST offset of every line, but the string offset of the first
+ // line should not get shifted (in general, the shift for the string offset is by the
+ // number of lines until the current one, excluding the current one).
+ line_source.string_offset -= accumulated_offset;
+ if let Some(ref mut ast_offset) = line_source.ast_offset {
+ *ast_offset += indent_size;
+ }
+
+ accumulated_offset += indent_size;
+ }
+ // Don't use `String::truncate()` here because it's not guaranteed to not do UTF-8-dependent things,
+ // and we may have temporarily broken the string's encoding.
+ unsafe { guard.0.docs.as_mut_vec() }.truncate(copy_into.into());
+
+ std::mem::forget(guard);
+ }
+
+ fn remove_last_newline(&mut self) {
+ self.docs.truncate(self.docs.len().saturating_sub(1));
+ }
+
+ fn shrink_to_fit(&mut self) {
+ let Docs {
+ docs,
+ docs_source_map,
+ outline_mod: _,
+ inline_file: _,
+ prefix_len: _,
+ inline_inner_docs_start: _,
+ outline_inner_docs_start: _,
+ } = self;
+ docs.shrink_to_fit();
+ docs_source_map.shrink_to_fit();
+ }
+}
+
+struct DocMacroExpander<'db> {
+ db: &'db dyn DefDatabase,
+ krate: Crate,
+ recursion_depth: usize,
+ recursion_limit: usize,
+}
+
+struct DocExprSourceCtx<'db> {
+ resolver: Resolver<'db>,
+ file_id: HirFileId,
+ ast_id_map: &'db AstIdMap,
+ span_map: SpanMap,
+}
+
+fn expand_doc_expr_via_macro_pipeline<'db>(
+ expander: &mut DocMacroExpander<'db>,
+ source_ctx: &DocExprSourceCtx<'db>,
+ expr: ast::Expr,
+) -> Option<String> {
+ match expr {
+ ast::Expr::ParenExpr(paren_expr) => {
+ expand_doc_expr_via_macro_pipeline(expander, source_ctx, paren_expr.expr()?)
+ }
+ ast::Expr::Literal(literal) => match literal.kind() {
+ ast::LiteralKind::String(string) => string.value().ok().map(Into::into),
+ _ => None,
+ },
+ ast::Expr::MacroExpr(macro_expr) => {
+ let macro_call = macro_expr.macro_call()?;
+ let (expr, new_source_ctx) = expand_doc_macro_call(expander, source_ctx, macro_call)?;
+ // After expansion, the expr lives in the expansion file; use its source context.
+ expand_doc_expr_via_macro_pipeline(expander, &new_source_ctx, expr)
+ }
+ _ => None,
+ }
+}
+
+fn expand_doc_macro_call<'db>(
+ expander: &mut DocMacroExpander<'db>,
+ source_ctx: &DocExprSourceCtx<'db>,
+ macro_call: ast::MacroCall,
+) -> Option<(ast::Expr, DocExprSourceCtx<'db>)> {
+ if expander.recursion_depth >= expander.recursion_limit {
+ return None;
+ }
+
+ let path = macro_call.path()?;
+ let mod_path = ModPath::from_src(expander.db, path, &mut |range| {
+ source_ctx.span_map.span_for_range(range).ctx
+ })?;
+ let call_site = source_ctx.span_map.span_for_range(macro_call.syntax().text_range());
+ let ast_id = AstId::new(source_ctx.file_id, source_ctx.ast_id_map.ast_id(&macro_call));
+ let call_id = macro_call_as_call_id(
+ expander.db,
+ ast_id,
+ &mod_path,
+ call_site.ctx,
+ ExpandTo::Expr,
+ expander.krate,
+ |path| {
+ source_ctx.resolver.resolve_path_as_macro_def(expander.db, path, Some(MacroSubNs::Bang))
+ },
+ &mut |_, _| (),
+ )
+ .ok()?
+ .value?;
+
+ expander.recursion_depth += 1;
+ let parse = expander.db.parse_macro_expansion(call_id).value.0;
+ let expr = parse.cast::<ast::Expr>().map(|parse| parse.tree())?;
+ expander.recursion_depth -= 1;
+
+ // Build a new source context for the expansion file so that any further
+ // recursive expansion (e.g. a user macro expanding to `concat!(...)`)
+ // correctly resolves AstIds and spans in the expansion.
+ let expansion_file_id: HirFileId = call_id.into();
+ let new_source_ctx = DocExprSourceCtx {
+ resolver: source_ctx.resolver.clone(),
+ file_id: expansion_file_id,
+ ast_id_map: expander.db.ast_id_map(expansion_file_id),
+ span_map: expander.db.span_map(expansion_file_id),
+ };
+ Some((expr, new_source_ctx))
+}
+
+fn extend_with_attrs<'a, 'db>(
+ result: &mut Docs,
+ db: &'db dyn DefDatabase,
+ krate: Crate,
+ node: &SyntaxNode,
+ file_id: HirFileId,
+ expect_inner_attrs: bool,
+ indent: &mut usize,
+ get_cfg_options: &dyn Fn() -> &'a CfgOptions,
+ cfg_options: &mut Option<&'a CfgOptions>,
+ make_resolver: &dyn Fn() -> Resolver<'db>,
+) {
+ // Lazily initialised when we first encounter a `#[doc = macro!()]`.
+ let mut expander: Option<(DocMacroExpander<'db>, DocExprSourceCtx<'db>)> = None;
+
+ // FIXME: `#[cfg_attr(..., doc = macro!())]` skips macro expansion because
+ // `top_attr` points to the `cfg_attr` node, not the inner `doc = macro!()`.
+ // Fixing this is difficult as we need an `Expr` that doesn't exist here for
+ // the ast id and for sanely parsing the macro call.
+ expand_cfg_attr_with_doc_comments::<_, Infallible>(
+ AttrDocCommentIter::from_syntax_node(node).filter(|attr| match attr {
+ Either::Left(attr) => attr.kind().is_inner() == expect_inner_attrs,
+ Either::Right(comment) => comment
+ .kind()
+ .doc
+ .is_some_and(|kind| (kind == ast::CommentPlacement::Inner) == expect_inner_attrs),
+ }),
+ || *cfg_options.get_or_insert_with(get_cfg_options),
+ |attr| {
+ match attr {
+ Either::Right(doc_comment) => result.extend_with_doc_comment(doc_comment, indent),
+ Either::Left((attr, _, _, top_attr)) => match attr {
+ Meta::NamedKeyValue { name: Some(name), value: Some(value), .. }
+ if name.text() == "doc" =>
+ {
+ result.extend_with_doc_attr(value, indent);
+ }
+ Meta::NamedKeyValue { name: Some(name), value: None, .. }
+ if name.text() == "doc" =>
+ {
+ // When the doc attribute comes from inside a `cfg_attr`,
+ // `top_attr` points to the `cfg_attr(...)` node, not the
+ // inner `doc = macro!()`. In that case `top_attr.expr()`
+ // would not yield the macro expression we need, so skip
+ // expansion (see FIXME above).
+ let is_from_cfg_attr =
+ top_attr.as_simple_call().is_some_and(|(name, _)| name == "cfg_attr");
+ if !is_from_cfg_attr && let Some(expr) = top_attr.expr() {
+ let (exp, ctx) = expander.get_or_insert_with(|| {
+ let resolver = make_resolver();
+ let def_map = resolver.top_level_def_map();
+ let recursion_limit = def_map.recursion_limit() as usize;
+ (
+ DocMacroExpander {
+ db,
+ krate,
+ recursion_depth: 0,
+ recursion_limit,
+ },
+ DocExprSourceCtx {
+ resolver,
+ file_id,
+ ast_id_map: db.ast_id_map(file_id),
+ span_map: db.span_map(file_id),
+ },
+ )
+ });
+ if let Some(expanded) =
+ expand_doc_expr_via_macro_pipeline(exp, ctx, expr)
+ {
+ result.extend_with_unmapped_doc_str(&expanded, indent);
+ }
+ }
+ }
+ _ => {}
+ },
+ }
+ ControlFlow::Continue(())
+ },
+ );
+}
+
+pub(crate) fn extract_docs<'a, 'db>(
+ db: &'db dyn DefDatabase,
+ krate: Crate,
+ resolver: &dyn Fn() -> Resolver<'db>,
+ get_cfg_options: &dyn Fn() -> &'a CfgOptions,
+ source: InFile<ast::AnyHasAttrs>,
+ outer_mod_decl: Option<InFile<ast::Module>>,
+ inner_attrs_node: Option<SyntaxNode>,
+) -> Option<Box<Docs>> {
+ let mut result = Docs {
+ docs: String::new(),
+ docs_source_map: Vec::new(),
+ outline_mod: None,
+ inline_file: source.file_id,
+ prefix_len: TextSize::new(0),
+ inline_inner_docs_start: None,
+ outline_inner_docs_start: None,
+ };
+
+ let mut cfg_options = None;
+
+ if let Some(outer_mod_decl) = outer_mod_decl {
+ let mut indent = usize::MAX;
+ // For outer docs (the `mod foo;` declaration), use the module's own resolver.
+ extend_with_attrs(
+ &mut result,
+ db,
+ krate,
+ outer_mod_decl.value.syntax(),
+ outer_mod_decl.file_id,
+ false,
+ &mut indent,
+ get_cfg_options,
+ &mut cfg_options,
+ resolver,
+ );
+ result.remove_indent(indent, 0);
+ result.outline_mod = Some((outer_mod_decl.file_id, result.docs_source_map.len()));
+ }
+
+ let inline_source_map_start = result.docs_source_map.len();
+ let mut indent = usize::MAX;
+ // For inline docs, use the item's own resolver.
+ extend_with_attrs(
+ &mut result,
+ db,
+ krate,
+ source.value.syntax(),
+ source.file_id,
+ false,
+ &mut indent,
+ get_cfg_options,
+ &mut cfg_options,
+ resolver,
+ );
+ if let Some(inner_attrs_node) = &inner_attrs_node {
+ result.inline_inner_docs_start = Some(TextSize::of(&result.docs));
+ extend_with_attrs(
+ &mut result,
+ db,
+ krate,
+ inner_attrs_node,
+ source.file_id,
+ true,
+ &mut indent,
+ get_cfg_options,
+ &mut cfg_options,
+ resolver,
+ );
+ }
+ result.remove_indent(indent, inline_source_map_start);
+
+ result.remove_last_newline();
+
+ result.shrink_to_fit();
+
+ if result.docs.is_empty() { None } else { Some(Box::new(result)) }
+}
+
+#[cfg(test)]
+mod tests {
+ use expect_test::expect;
+ use hir_expand::InFile;
+ use test_fixture::WithFixture;
+ use tt::{TextRange, TextSize};
+
+ use crate::test_db::TestDB;
+
+ use super::{Docs, IsInnerDoc};
+
+ #[test]
+ fn docs() {
+ let (_db, file_id) = TestDB::with_single_file("");
+ let mut docs = Docs {
+ docs: String::new(),
+ docs_source_map: Vec::new(),
+ outline_mod: None,
+ inline_file: file_id.into(),
+ prefix_len: TextSize::new(0),
+ inline_inner_docs_start: None,
+ outline_inner_docs_start: None,
+ };
+ let mut indent = usize::MAX;
+
+ let outer = " foo\n\tbar baz";
+ let mut ast_offset = TextSize::new(123);
+ for line in outer.split('\n') {
+ docs.extend_with_doc_str(line, ast_offset, &mut indent);
+ ast_offset += TextSize::of(line) + TextSize::of("\n");
+ }
+
+ docs.inline_inner_docs_start = Some(TextSize::of(&docs.docs));
+ ast_offset += TextSize::new(123);
+ let inner = " bar \n baz";
+ for line in inner.split('\n') {
+ docs.extend_with_doc_str(line, ast_offset, &mut indent);
+ ast_offset += TextSize::of(line) + TextSize::of("\n");
+ }
+
+ assert_eq!(indent, 1);
+ expect![[r#"
+ [
+ DocsSourceMapLine {
+ string_offset: 0,
+ ast_offset: Some(
+ 123,
+ ),
+ },
+ DocsSourceMapLine {
+ string_offset: 5,
+ ast_offset: Some(
+ 128,
+ ),
+ },
+ DocsSourceMapLine {
+ string_offset: 15,
+ ast_offset: Some(
+ 261,
+ ),
+ },
+ DocsSourceMapLine {
+ string_offset: 20,
+ ast_offset: Some(
+ 267,
+ ),
+ },
+ ]
+ "#]]
+ .assert_debug_eq(&docs.docs_source_map);
+
+ docs.remove_indent(indent, 0);
+
+ assert_eq!(docs.inline_inner_docs_start, Some(TextSize::new(13)));
+
+ assert_eq!(docs.docs, "foo\nbar baz\nbar\nbaz\n");
+ expect![[r#"
+ [
+ DocsSourceMapLine {
+ string_offset: 0,
+ ast_offset: Some(
+ 124,
+ ),
+ },
+ DocsSourceMapLine {
+ string_offset: 4,
+ ast_offset: Some(
+ 129,
+ ),
+ },
+ DocsSourceMapLine {
+ string_offset: 13,
+ ast_offset: Some(
+ 262,
+ ),
+ },
+ DocsSourceMapLine {
+ string_offset: 17,
+ ast_offset: Some(
+ 268,
+ ),
+ },
+ ]
+ "#]]
+ .assert_debug_eq(&docs.docs_source_map);
+
+ docs.append(&docs.clone());
+ docs.prepend_str("prefix---");
+ assert_eq!(docs.docs, "prefix---foo\nbar baz\nbar\nbaz\nfoo\nbar baz\nbar\nbaz\n");
+ expect![[r#"
+ [
+ DocsSourceMapLine {
+ string_offset: 0,
+ ast_offset: Some(
+ 124,
+ ),
+ },
+ DocsSourceMapLine {
+ string_offset: 4,
+ ast_offset: Some(
+ 129,
+ ),
+ },
+ DocsSourceMapLine {
+ string_offset: 13,
+ ast_offset: Some(
+ 262,
+ ),
+ },
+ DocsSourceMapLine {
+ string_offset: 17,
+ ast_offset: Some(
+ 268,
+ ),
+ },
+ DocsSourceMapLine {
+ string_offset: 21,
+ ast_offset: Some(
+ 124,
+ ),
+ },
+ DocsSourceMapLine {
+ string_offset: 25,
+ ast_offset: Some(
+ 129,
+ ),
+ },
+ DocsSourceMapLine {
+ string_offset: 34,
+ ast_offset: Some(
+ 262,
+ ),
+ },
+ DocsSourceMapLine {
+ string_offset: 38,
+ ast_offset: Some(
+ 268,
+ ),
+ },
+ ]
+ "#]]
+ .assert_debug_eq(&docs.docs_source_map);
+
+ let range = |start, end| TextRange::new(TextSize::new(start), TextSize::new(end));
+ let in_file = |range| InFile::new(file_id.into(), range);
+ assert_eq!(docs.find_ast_range(range(0, 2)), None);
+ assert_eq!(docs.find_ast_range(range(8, 10)), None);
+ assert_eq!(
+ docs.find_ast_range(range(9, 10)),
+ Some((in_file(range(124, 125)), IsInnerDoc::No))
+ );
+ assert_eq!(docs.find_ast_range(range(20, 23)), None);
+ assert_eq!(
+ docs.find_ast_range(range(23, 25)),
+ Some((in_file(range(263, 265)), IsInnerDoc::Yes))
+ );
+ }
+}
diff --git a/crates/hir-def/src/db.rs b/crates/hir-def/src/db.rs
index 5d5d435398..9dd7768ead 100644
--- a/crates/hir-def/src/db.rs
+++ b/crates/hir-def/src/db.rs
@@ -1,5 +1,5 @@
//! Defines database & queries for name resolution.
-use base_db::{Crate, RootQueryDb, SourceDatabase};
+use base_db::{Crate, SourceDatabase};
use hir_expand::{
EditionedFileId, HirFileId, InFile, Lookup, MacroCallId, MacroDefId, MacroDefKind,
db::ExpandDatabase,
@@ -22,7 +22,7 @@ use crate::{
use salsa::plumbing::AsId;
#[query_group::query_group(InternDatabaseStorage)]
-pub trait InternDatabase: RootQueryDb {
+pub trait InternDatabase: SourceDatabase {
// region: items
#[salsa::interned]
fn intern_use(&self, loc: UseLoc) -> UseId;
diff --git a/crates/hir-def/src/expr_store.rs b/crates/hir-def/src/expr_store.rs
index ca523622ec..62a17168b1 100644
--- a/crates/hir-def/src/expr_store.rs
+++ b/crates/hir-def/src/expr_store.rs
@@ -520,7 +520,7 @@ impl ExpressionStore {
self.const_expr_origins().iter().map(|&(id, _)| id)
}
- /// Like [`Self::signature_const_expr_roots`], but also returns the origin
+ /// Like [`Self::expr_roots`], but also returns the origin
/// of each expression.
pub fn expr_roots_with_origins(&self) -> impl Iterator<Item = (ExprId, RootExprOrigin)> {
self.const_expr_origins().iter().map(|&(id, origin)| (id, origin))
diff --git a/crates/hir-def/src/expr_store/lower.rs b/crates/hir-def/src/expr_store/lower.rs
index 74006c6037..7fe91a3d02 100644
--- a/crates/hir-def/src/expr_store/lower.rs
+++ b/crates/hir-def/src/expr_store/lower.rs
@@ -1465,7 +1465,15 @@ impl<'db> ExprCollector<'db> {
match kind {
ArrayExprKind::ElementList(e) => {
- let elements = e.map(|expr| self.collect_expr(expr)).collect();
+ let elements = e
+ .filter_map(|expr| {
+ if self.check_cfg(&expr) {
+ Some(self.collect_expr(expr))
+ } else {
+ None
+ }
+ })
+ .collect();
self.alloc_expr(Expr::Array(Array::ElementList { elements }), syntax_ptr)
}
ArrayExprKind::Repeat { initializer, repeat } => {
diff --git a/crates/hir-def/src/expr_store/scope.rs b/crates/hir-def/src/expr_store/scope.rs
index 40ae0b7de4..9738ac5c44 100644
--- a/crates/hir-def/src/expr_store/scope.rs
+++ b/crates/hir-def/src/expr_store/scope.rs
@@ -371,7 +371,6 @@ fn compute_expr_scopes(
#[cfg(test)]
mod tests {
- use base_db::RootQueryDb;
use hir_expand::{InFile, name::AsName};
use span::FileId;
use syntax::{AstNode, algo::find_node_at_offset, ast};
@@ -414,7 +413,7 @@ mod tests {
let (file_id, _) = editioned_file_id.unpack(&db);
- let file_syntax = db.parse(editioned_file_id).syntax_node();
+ let file_syntax = editioned_file_id.parse(&db).syntax_node();
let marker: ast::PathExpr = find_node_at_offset(&file_syntax, offset).unwrap();
let function = find_function(&db, file_id);
@@ -570,7 +569,7 @@ fn foo() {
let (file_id, _) = editioned_file_id.unpack(&db);
- let file = db.parse(editioned_file_id).ok().unwrap();
+ let file = editioned_file_id.parse(&db).ok().unwrap();
let expected_name = find_node_at_offset::<ast::Name>(file.syntax(), expected_offset.into())
.expect("failed to find a name at the target offset");
let name_ref: ast::NameRef = find_node_at_offset(file.syntax(), offset).unwrap();
diff --git a/crates/hir-def/src/expr_store/tests/body.rs b/crates/hir-def/src/expr_store/tests/body.rs
index 985cd96662..4e5f2ca893 100644
--- a/crates/hir-def/src/expr_store/tests/body.rs
+++ b/crates/hir-def/src/expr_store/tests/body.rs
@@ -660,3 +660,24 @@ async fn main(&self, param1: i32, ref mut param2: i32, _: i32, param4 @ _: i32,
}"#]],
)
}
+
+#[test]
+fn array_element_cfg() {
+ pretty_print(
+ r#"
+fn foo() {
+ [
+ (),
+ #[cfg(false)]
+ ()
+ ];
+}
+ "#,
+ expect![[r#"
+ fn foo() {
+ [
+ (),
+ ];
+ }"#]],
+ );
+}
diff --git a/crates/hir-def/src/expr_store/tests/body/block.rs b/crates/hir-def/src/expr_store/tests/body/block.rs
index 83594ee021..71fcced2d8 100644
--- a/crates/hir-def/src/expr_store/tests/body/block.rs
+++ b/crates/hir-def/src/expr_store/tests/body/block.rs
@@ -190,13 +190,13 @@ fn f() {
"#,
expect![[r#"
ModuleIdLt {
- [salsa id]: Id(3803),
+ [salsa id]: Id(3403),
krate: Crate(
- Id(2400),
+ Id(2000),
),
block: Some(
BlockId(
- 4801,
+ 4401,
),
),
}"#]],
diff --git a/crates/hir-def/src/import_map.rs b/crates/hir-def/src/import_map.rs
index 0014e1af5c..ba077b1b2e 100644
--- a/crates/hir-def/src/import_map.rs
+++ b/crates/hir-def/src/import_map.rs
@@ -499,7 +499,7 @@ fn search_maps(
#[cfg(test)]
mod tests {
- use base_db::RootQueryDb;
+ use base_db::all_crates;
use expect_test::{Expect, expect};
use test_fixture::WithFixture;
@@ -536,7 +536,7 @@ mod tests {
expect: Expect,
) {
let db = TestDB::with_files(ra_fixture);
- let all_crates = db.all_crates();
+ let all_crates = all_crates(&db);
let krate = all_crates
.iter()
.copied()
@@ -616,7 +616,7 @@ mod tests {
fn check(#[rust_analyzer::rust_fixture] ra_fixture: &str, expect: Expect) {
let db = TestDB::with_files(ra_fixture);
- let all_crates = db.all_crates();
+ let all_crates = all_crates(&db);
let actual = all_crates
.iter()
diff --git a/crates/hir-def/src/macro_expansion_tests/mbe.rs b/crates/hir-def/src/macro_expansion_tests/mbe.rs
index d93df7af6a..7b5d0103e6 100644
--- a/crates/hir-def/src/macro_expansion_tests/mbe.rs
+++ b/crates/hir-def/src/macro_expansion_tests/mbe.rs
@@ -35,9 +35,9 @@ macro_rules! f {
};
}
-struct#0:MacroRules[BE8F, 0]@58..64#18432# MyTraitMap2#0:MacroCall[BE8F, 0]@31..42#ROOT2024# {#0:MacroRules[BE8F, 0]@72..73#18432#
- map#0:MacroRules[BE8F, 0]@86..89#18432#:#0:MacroRules[BE8F, 0]@89..90#18432# #0:MacroRules[BE8F, 0]@89..90#18432#::#0:MacroRules[BE8F, 0]@91..93#18432#std#0:MacroRules[BE8F, 0]@93..96#18432#::#0:MacroRules[BE8F, 0]@96..98#18432#collections#0:MacroRules[BE8F, 0]@98..109#18432#::#0:MacroRules[BE8F, 0]@109..111#18432#HashSet#0:MacroRules[BE8F, 0]@111..118#18432#<#0:MacroRules[BE8F, 0]@118..119#18432#(#0:MacroRules[BE8F, 0]@119..120#18432#)#0:MacroRules[BE8F, 0]@120..121#18432#>#0:MacroRules[BE8F, 0]@121..122#18432#,#0:MacroRules[BE8F, 0]@122..123#18432#
-}#0:MacroRules[BE8F, 0]@132..133#18432#
+struct#0:MacroRules[BE8F, 0]@58..64#17408# MyTraitMap2#0:MacroCall[BE8F, 0]@31..42#ROOT2024# {#0:MacroRules[BE8F, 0]@72..73#17408#
+ map#0:MacroRules[BE8F, 0]@86..89#17408#:#0:MacroRules[BE8F, 0]@89..90#17408# #0:MacroRules[BE8F, 0]@89..90#17408#::#0:MacroRules[BE8F, 0]@91..93#17408#std#0:MacroRules[BE8F, 0]@93..96#17408#::#0:MacroRules[BE8F, 0]@96..98#17408#collections#0:MacroRules[BE8F, 0]@98..109#17408#::#0:MacroRules[BE8F, 0]@109..111#17408#HashSet#0:MacroRules[BE8F, 0]@111..118#17408#<#0:MacroRules[BE8F, 0]@118..119#17408#(#0:MacroRules[BE8F, 0]@119..120#17408#)#0:MacroRules[BE8F, 0]@120..121#17408#>#0:MacroRules[BE8F, 0]@121..122#17408#,#0:MacroRules[BE8F, 0]@122..123#17408#
+}#0:MacroRules[BE8F, 0]@132..133#17408#
"#]],
);
}
@@ -197,7 +197,7 @@ macro_rules! mk_struct {
#[macro_use]
mod foo;
-struct#1:MacroRules[DB0C, 0]@59..65#18432# Foo#0:MacroCall[DB0C, 0]@32..35#ROOT2024#(#1:MacroRules[DB0C, 0]@70..71#18432#u32#0:MacroCall[DB0C, 0]@41..44#ROOT2024#)#1:MacroRules[DB0C, 0]@74..75#18432#;#1:MacroRules[DB0C, 0]@75..76#18432#
+struct#1:MacroRules[DB0C, 0]@59..65#17408# Foo#0:MacroCall[DB0C, 0]@32..35#ROOT2024#(#1:MacroRules[DB0C, 0]@70..71#17408#u32#0:MacroCall[DB0C, 0]@41..44#ROOT2024#)#1:MacroRules[DB0C, 0]@74..75#17408#;#1:MacroRules[DB0C, 0]@75..76#17408#
"#]],
);
}
@@ -423,10 +423,10 @@ m! { foo, bar }
macro_rules! m {
($($i:ident),*) => ( impl Bar { $(fn $i() {})* } );
}
-impl#\18432# Bar#\18432# {#\18432#
- fn#\18432# foo#\ROOT2024#(#\18432#)#\18432# {#\18432#}#\18432#
- fn#\18432# bar#\ROOT2024#(#\18432#)#\18432# {#\18432#}#\18432#
-}#\18432#
+impl#\17408# Bar#\17408# {#\17408#
+ fn#\17408# foo#\ROOT2024#(#\17408#)#\17408# {#\17408#}#\17408#
+ fn#\17408# bar#\ROOT2024#(#\17408#)#\17408# {#\17408#}#\17408#
+}#\17408#
"#]],
);
}
diff --git a/crates/hir-def/src/macro_expansion_tests/mod.rs b/crates/hir-def/src/macro_expansion_tests/mod.rs
index 8317c56caf..eabdada67c 100644
--- a/crates/hir-def/src/macro_expansion_tests/mod.rs
+++ b/crates/hir-def/src/macro_expansion_tests/mod.rs
@@ -16,7 +16,6 @@ mod proc_macros;
use std::{any::TypeId, iter, ops::Range, sync};
-use base_db::RootQueryDb;
use expect_test::Expect;
use hir_expand::{
AstId, ExpansionInfo, InFile, MacroCallId, MacroCallKind, MacroKind,
@@ -75,7 +74,7 @@ fn check_errors(#[rust_analyzer::rust_fixture] ra_fixture: &str, expect: Expect)
let editioned_file_id =
ast_id.file_id.file_id().expect("macros inside macros are not supported");
- let ast = db.parse(editioned_file_id).syntax_node();
+ let ast = editioned_file_id.parse(&db).syntax_node();
let ast_id_map = db.ast_id_map(ast_id.file_id);
let node = ast_id_map.get_erased(ast_id.value).to_node(&ast);
Some((node.text_range(), errors))
diff --git a/crates/hir-def/src/nameres.rs b/crates/hir-def/src/nameres.rs
index 5fda1beab4..56b3f03f7b 100644
--- a/crates/hir-def/src/nameres.rs
+++ b/crates/hir-def/src/nameres.rs
@@ -346,7 +346,7 @@ impl ModuleOrigin {
match self {
&ModuleOrigin::File { definition: editioned_file_id, .. }
| &ModuleOrigin::CrateRoot { definition: editioned_file_id } => {
- let sf = db.parse(editioned_file_id).tree();
+ let sf = editioned_file_id.parse(db).tree();
InFile::new(editioned_file_id.into(), ModuleSource::SourceFile(sf))
}
&ModuleOrigin::Inline { definition, definition_tree_id } => InFile::new(
diff --git a/crates/hir-def/src/nameres/collector.rs b/crates/hir-def/src/nameres/collector.rs
index 9c101c127b..703f070dba 100644
--- a/crates/hir-def/src/nameres/collector.rs
+++ b/crates/hir-def/src/nameres/collector.rs
@@ -22,7 +22,7 @@ use itertools::izip;
use la_arena::Idx;
use rustc_hash::{FxHashMap, FxHashSet};
use smallvec::SmallVec;
-use span::{Edition, FileAstId, SyntaxContext};
+use span::{Edition, FileAstId, ROOT_ERASED_FILE_AST_ID, SyntaxContext};
use stdx::always;
use syntax::ast;
use triomphe::Arc;
@@ -369,7 +369,14 @@ impl<'db> DefCollector<'db> {
self.inject_prelude();
- if matches!(item_tree.top_level_attrs(), AttrsOrCfg::CfgDisabled(_)) {
+ if let AttrsOrCfg::CfgDisabled(attrs) = item_tree.top_level_attrs() {
+ let (cfg_expr, _) = &**attrs;
+ self.def_map.diagnostics.push(DefDiagnostic::unconfigured_code(
+ self.def_map.root,
+ InFile::new(file_id.into(), ROOT_ERASED_FILE_AST_ID),
+ cfg_expr.clone(),
+ self.cfg_options.clone(),
+ ));
return;
}
diff --git a/crates/hir-def/src/nameres/tests.rs b/crates/hir-def/src/nameres/tests.rs
index fe55252e25..08d98dff33 100644
--- a/crates/hir-def/src/nameres/tests.rs
+++ b/crates/hir-def/src/nameres/tests.rs
@@ -4,7 +4,6 @@ mod incremental;
mod macros;
mod mod_resolution;
-use base_db::RootQueryDb;
use expect_test::{Expect, expect};
use test_fixture::WithFixture;
diff --git a/crates/hir-def/src/nameres/tests/incremental.rs b/crates/hir-def/src/nameres/tests/incremental.rs
index 5b75c078ec..0f1828abce 100644
--- a/crates/hir-def/src/nameres/tests/incremental.rs
+++ b/crates/hir-def/src/nameres/tests/incremental.rs
@@ -1,6 +1,6 @@
use base_db::{
CrateDisplayName, CrateGraphBuilder, CrateName, CrateOrigin, CrateWorkspaceData,
- DependencyBuilder, Env, RootQueryDb, SourceDatabase,
+ DependencyBuilder, Env, SourceDatabase, all_crates,
};
use expect_test::{Expect, expect};
use intern::Symbol;
@@ -56,11 +56,11 @@ pub const BAZ: u32 = 0;
"#,
);
- for &krate in db.all_crates().iter() {
+ for &krate in all_crates(&db).iter() {
crate_def_map(&db, krate);
}
- let all_crates_before = db.all_crates();
+ let all_crates_before = all_crates(&db);
{
// Add dependencies: c -> b, b -> a.
@@ -100,15 +100,15 @@ pub const BAZ: u32 = 0;
new_crate_graph.set_in_db(&mut db);
}
- let all_crates_after = db.all_crates();
+ let all_crates_after = all_crates(&db);
assert!(
- Arc::ptr_eq(&all_crates_before, &all_crates_after),
+ std::sync::Arc::ptr_eq(&all_crates_before, &all_crates_after),
"the all_crates list should not have been invalidated"
);
execute_assert_events(
&db,
|| {
- for &krate in db.all_crates().iter() {
+ for &krate in all_crates(&db).iter() {
crate_def_map(&db, krate);
}
},
@@ -167,22 +167,22 @@ fn no() {}
"crate_local_def_map",
"file_item_tree_query",
"ast_id_map",
- "parse_shim",
+ "parse",
"real_span_map_shim",
"file_item_tree_query",
"ast_id_map",
- "parse_shim",
+ "parse",
"real_span_map_shim",
"file_item_tree_query",
"ast_id_map",
- "parse_shim",
+ "parse",
"real_span_map_shim",
"EnumVariants::of_",
]
"#]],
expect![[r#"
[
- "parse_shim",
+ "parse",
"ast_id_map",
"file_item_tree_query",
"real_span_map_shim",
@@ -225,16 +225,16 @@ pub struct S {}
"crate_local_def_map",
"file_item_tree_query",
"ast_id_map",
- "parse_shim",
+ "parse",
"real_span_map_shim",
"decl_macro_expander_shim",
"file_item_tree_query",
"ast_id_map",
- "parse_shim",
+ "parse",
"real_span_map_shim",
"file_item_tree_query",
"ast_id_map",
- "parse_shim",
+ "parse",
"real_span_map_shim",
"macro_def_shim",
"file_item_tree_query",
@@ -245,7 +245,7 @@ pub struct S {}
"#]],
expect![[r#"
[
- "parse_shim",
+ "parse",
"ast_id_map",
"file_item_tree_query",
"real_span_map_shim",
@@ -283,21 +283,21 @@ fn f() { foo }
"crate_local_def_map",
"file_item_tree_query",
"ast_id_map",
- "parse_shim",
+ "parse",
"real_span_map_shim",
"crate_local_def_map",
"proc_macros_for_crate_shim",
"file_item_tree_query",
"ast_id_map",
- "parse_shim",
+ "parse",
"real_span_map_shim",
"file_item_tree_query",
"ast_id_map",
- "parse_shim",
+ "parse",
"real_span_map_shim",
"file_item_tree_query",
"ast_id_map",
- "parse_shim",
+ "parse",
"real_span_map_shim",
"macro_def_shim",
"file_item_tree_query",
@@ -310,7 +310,7 @@ fn f() { foo }
"#]],
expect![[r#"
[
- "parse_shim",
+ "parse",
"ast_id_map",
"file_item_tree_query",
"real_span_map_shim",
@@ -407,22 +407,22 @@ pub struct S {}
"crate_local_def_map",
"file_item_tree_query",
"ast_id_map",
- "parse_shim",
+ "parse",
"real_span_map_shim",
"crate_local_def_map",
"proc_macros_for_crate_shim",
"file_item_tree_query",
"ast_id_map",
- "parse_shim",
+ "parse",
"real_span_map_shim",
"decl_macro_expander_shim",
"file_item_tree_query",
"ast_id_map",
- "parse_shim",
+ "parse",
"real_span_map_shim",
"file_item_tree_query",
"ast_id_map",
- "parse_shim",
+ "parse",
"real_span_map_shim",
"macro_def_shim",
"file_item_tree_query",
@@ -446,7 +446,7 @@ pub struct S {}
"#]],
expect![[r#"
[
- "parse_shim",
+ "parse",
"ast_id_map",
"file_item_tree_query",
"real_span_map_shim",
@@ -524,16 +524,16 @@ m!(Z);
"crate_local_def_map",
"file_item_tree_query",
"ast_id_map",
- "parse_shim",
+ "parse",
"real_span_map_shim",
"decl_macro_expander_shim",
"file_item_tree_query",
"ast_id_map",
- "parse_shim",
+ "parse",
"real_span_map_shim",
"file_item_tree_query",
"ast_id_map",
- "parse_shim",
+ "parse",
"real_span_map_shim",
"macro_def_shim",
"file_item_tree_query",
@@ -571,7 +571,7 @@ m!(Z);
&[("file_item_tree_query", 1), ("parse_macro_expansion_shim", 0)],
expect![[r#"
[
- "parse_shim",
+ "parse",
"ast_id_map",
"file_item_tree_query",
"real_span_map_shim",
@@ -611,7 +611,7 @@ pub type Ty = ();
[
"file_item_tree_query",
"ast_id_map",
- "parse_shim",
+ "parse",
"real_span_map_shim",
]
"#]],
@@ -629,7 +629,7 @@ pub type Ty = ();
&[("file_item_tree_query", 1), ("parse", 1)],
expect![[r#"
[
- "parse_shim",
+ "parse",
"ast_id_map",
"file_item_tree_query",
"real_span_map_shim",
diff --git a/crates/hir-def/src/nameres/tests/macros.rs b/crates/hir-def/src/nameres/tests/macros.rs
index a013f8b2bc..f073cf777d 100644
--- a/crates/hir-def/src/nameres/tests/macros.rs
+++ b/crates/hir-def/src/nameres/tests/macros.rs
@@ -1,3 +1,4 @@
+use base_db::all_crates;
use expect_test::expect;
use itertools::Itertools;
@@ -1129,7 +1130,7 @@ pub fn derive_macro_2(_item: TokenStream) -> TokenStream {
}
"#,
);
- let krate = *db.all_crates().last().expect("no crate graph present");
+ let krate = *all_crates(&db).last().expect("no crate graph present");
let def_map = crate_def_map(&db, krate);
assert_eq!(def_map.data.exported_derives.len(), 1);
@@ -1497,7 +1498,7 @@ struct TokenStream;
fn proc_attr(a: TokenStream, b: TokenStream) -> TokenStream { a }
"#,
);
- let krate = *db.all_crates().last().expect("no crate graph present");
+ let krate = *all_crates(&db).last().expect("no crate graph present");
let def_map = crate_def_map(&db, krate);
let root_module = &def_map[def_map.root].scope;
diff --git a/crates/hir-def/src/test_db.rs b/crates/hir-def/src/test_db.rs
index 0d260279f9..b854d2aa21 100644
--- a/crates/hir-def/src/test_db.rs
+++ b/crates/hir-def/src/test_db.rs
@@ -3,8 +3,9 @@
use std::{fmt, panic, sync::Mutex};
use base_db::{
- Crate, CrateGraphBuilder, CratesMap, FileSourceRootInput, FileText, Nonce, RootQueryDb,
- SourceDatabase, SourceRoot, SourceRootId, SourceRootInput,
+ Crate, CrateGraphBuilder, CratesMap, FileSourceRootInput, FileText, Nonce, SourceDatabase,
+ SourceRoot, SourceRootId, SourceRootInput, all_crates, relevant_crates,
+ set_all_crates_with_durability,
};
use hir_expand::{InFile, files::FilePosition};
use salsa::Durability;
@@ -49,7 +50,7 @@ impl Default for TestDB {
};
this.set_expand_proc_attr_macros_with_durability(true, Durability::HIGH);
// This needs to be here otherwise `CrateGraphBuilder` panics.
- this.set_all_crates(Arc::new(Box::new([])));
+ set_all_crates_with_durability(&mut this, std::iter::empty(), Durability::HIGH);
_ = base_db::LibraryRoots::builder(Default::default())
.durability(Durability::MEDIUM)
.new(&this);
@@ -145,7 +146,7 @@ impl SourceDatabase for TestDB {
impl TestDB {
pub(crate) fn fetch_test_crate(&self) -> Crate {
- let all_crates = self.all_crates();
+ let all_crates = all_crates(self);
all_crates
.iter()
.copied()
@@ -157,7 +158,7 @@ impl TestDB {
}
pub(crate) fn module_for_file(&self, file_id: FileId) -> ModuleId {
- for &krate in self.relevant_crates(file_id).iter() {
+ for &krate in relevant_crates(self, file_id).iter() {
let crate_def_map = crate_def_map(self, krate);
for (local_id, data) in crate_def_map.modules() {
if data.origin.file_id().map(|file_id| file_id.file_id(self)) == Some(file_id) {
@@ -288,7 +289,7 @@ impl TestDB {
let source_map = &Body::with_source_map(self, def_with_body).1;
let scopes = ExprScopes::body_expr_scopes(self, def_with_body);
- let root_syntax_node = self.parse(file_id).syntax_node();
+ let root_syntax_node = file_id.parse(self).syntax_node();
let scope_iter =
algo::ancestors_at_offset(&root_syntax_node, position.offset).filter_map(|node| {
let block = ast::BlockExpr::cast(node)?;
diff --git a/crates/hir-expand/src/builtin/fn_macro.rs b/crates/hir-expand/src/builtin/fn_macro.rs
index b3572a1cef..9962677a9d 100644
--- a/crates/hir-expand/src/builtin/fn_macro.rs
+++ b/crates/hir-expand/src/builtin/fn_macro.rs
@@ -828,7 +828,7 @@ fn include_expand(
let span_map = db.real_span_map(editioned_file_id);
// FIXME: Parse errors
ExpandResult::ok(syntax_node_to_token_tree(
- &db.parse(editioned_file_id).syntax_node(),
+ &editioned_file_id.parse(db).syntax_node(),
SpanMap::RealSpanMap(span_map),
span,
syntax_bridge::DocCommentDesugarMode::ProcMacro,
diff --git a/crates/hir-expand/src/db.rs b/crates/hir-expand/src/db.rs
index 020731cf9a..8a6b56d932 100644
--- a/crates/hir-expand/src/db.rs
+++ b/crates/hir-expand/src/db.rs
@@ -1,6 +1,6 @@
//! Defines database & queries for macro expansion.
-use base_db::{Crate, RootQueryDb};
+use base_db::{Crate, SourceDatabase};
use mbe::MatchedArmIndex;
use span::{AstIdMap, Edition, Span, SyntaxContext};
use syntax::{AstNode, Parse, SyntaxError, SyntaxNode, SyntaxToken, T, ast};
@@ -48,7 +48,7 @@ pub enum TokenExpander {
}
#[query_group::query_group]
-pub trait ExpandDatabase: RootQueryDb {
+pub trait ExpandDatabase: SourceDatabase {
/// The proc macros. Do not use this! Use `proc_macros_for_crate()` instead.
#[salsa::input]
fn proc_macros(&self) -> Arc<ProcMacros>;
@@ -343,7 +343,7 @@ fn ast_id_map(db: &dyn ExpandDatabase, file_id: HirFileId) -> AstIdMap {
/// file or a macro expansion.
fn parse_or_expand(db: &dyn ExpandDatabase, file_id: HirFileId) -> SyntaxNode {
match file_id {
- HirFileId::FileId(file_id) => db.parse(file_id).syntax_node(),
+ HirFileId::FileId(file_id) => file_id.parse(db).syntax_node(),
HirFileId::MacroFile(macro_file) => {
db.parse_macro_expansion(macro_file).value.0.syntax_node()
}
@@ -389,7 +389,7 @@ pub(crate) fn parse_with_map(
) -> (Parse<SyntaxNode>, SpanMap) {
match file_id {
HirFileId::FileId(file_id) => {
- (db.parse(file_id).to_syntax(), SpanMap::RealSpanMap(db.real_span_map(file_id)))
+ (file_id.parse(db).to_syntax(), SpanMap::RealSpanMap(db.real_span_map(file_id)))
}
HirFileId::MacroFile(macro_file) => {
let (parse, map) = db.parse_macro_expansion(macro_file).value;
diff --git a/crates/hir-expand/src/files.rs b/crates/hir-expand/src/files.rs
index fce92c8a3e..71da560b15 100644
--- a/crates/hir-expand/src/files.rs
+++ b/crates/hir-expand/src/files.rs
@@ -198,7 +198,7 @@ trait FileIdToSyntax: Copy {
impl FileIdToSyntax for EditionedFileId {
fn file_syntax(self, db: &dyn db::ExpandDatabase) -> SyntaxNode {
- db.parse(self).syntax_node()
+ self.parse(db).syntax_node()
}
}
impl FileIdToSyntax for MacroCallId {
@@ -333,8 +333,8 @@ impl<SN: Borrow<SyntaxNode>> InFile<SN> {
)?;
let kind = self.kind();
- let value = db
- .parse(editioned_file_id)
+ let value = editioned_file_id
+ .parse(db)
.syntax_node()
.covering_element(range)
.ancestors()
@@ -521,7 +521,7 @@ impl<N: AstNode> InFile<N> {
)?;
// FIXME: This heuristic is brittle and with the right macro may select completely unrelated nodes?
- let anc = db.parse(editioned_file_id).syntax_node().covering_element(range);
+ let anc = editioned_file_id.parse(db).syntax_node().covering_element(range);
let value = anc.ancestors().find_map(N::cast)?;
Some(InRealFile::new(editioned_file_id, value))
}
diff --git a/crates/hir-expand/src/span_map.rs b/crates/hir-expand/src/span_map.rs
index 71d0b880ca..aa8603341b 100644
--- a/crates/hir-expand/src/span_map.rs
+++ b/crates/hir-expand/src/span_map.rs
@@ -86,7 +86,7 @@ pub(crate) fn real_span_map(
let mut pairs = vec![(syntax::TextSize::new(0), span::ROOT_ERASED_FILE_AST_ID)];
let ast_id_map = db.ast_id_map(editioned_file_id.into());
- let tree = db.parse(editioned_file_id).tree();
+ let tree = editioned_file_id.parse(db).tree();
// This is an incrementality layer. Basically we can't use absolute ranges for our spans as that
// would mean we'd invalidate everything whenever we type. So instead we make the text ranges
// relative to some AstIds reducing the risk of invalidation as typing somewhere no longer
diff --git a/crates/hir-ty/src/builtin_derive.rs b/crates/hir-ty/src/builtin_derive.rs
index 92629b7a05..eb3922f4b6 100644
--- a/crates/hir-ty/src/builtin_derive.rs
+++ b/crates/hir-ty/src/builtin_derive.rs
@@ -174,8 +174,11 @@ pub fn predicates<'db>(db: &'db dyn HirDatabase, impl_: BuiltinDeriveImplId) ->
if matches!(loc.adt, AdtId::EnumId(_)) {
// Enums don't have extra bounds.
GenericPredicates::from_explicit_own_predicates(StoredEarlyBinder::bind(
- Clauses::new_from_slice(adt_predicates.explicit_predicates().skip_binder())
- .store(),
+ Clauses::new_from_iter(
+ interner,
+ adt_predicates.own_explicit_predicates().skip_binder(),
+ )
+ .store(),
))
} else {
simple_trait_predicates(interner, loc, generic_params, adt_predicates, trait_id)
@@ -191,7 +194,7 @@ pub fn predicates<'db>(db: &'db dyn HirDatabase, impl_: BuiltinDeriveImplId) ->
));
};
let duplicated_bounds =
- adt_predicates.explicit_predicates().iter_identity_copied().filter_map(|pred| {
+ adt_predicates.explicit_predicates().iter_identity().filter_map(|pred| {
let mentions_pointee =
pred.visit_with(&mut MentionsPointee { pointee_param_idx }).is_break();
if !mentions_pointee {
@@ -212,7 +215,7 @@ pub fn predicates<'db>(db: &'db dyn HirDatabase, impl_: BuiltinDeriveImplId) ->
interner,
adt_predicates
.explicit_predicates()
- .iter_identity_copied()
+ .iter_identity()
.chain(duplicated_bounds)
.chain(unsize_bound),
)
@@ -313,7 +316,7 @@ fn simple_trait_predicates<'db>(
interner,
adt_predicates
.explicit_predicates()
- .iter_identity_copied()
+ .iter_identity()
.chain(extra_predicates)
.chain(assoc_type_bounds),
)
@@ -440,7 +443,7 @@ mod tests {
format_to!(
predicates,
"{}\n\n",
- preds.iter().format_with("\n", |pred, formatter| formatter(&format_args!(
+ preds.format_with("\n", |pred, formatter| formatter(&format_args!(
"{pred:?}"
))),
);
diff --git a/crates/hir-ty/src/consteval/tests.rs b/crates/hir-ty/src/consteval/tests.rs
index 31cf86476f..aee27dcfde 100644
--- a/crates/hir-ty/src/consteval/tests.rs
+++ b/crates/hir-ty/src/consteval/tests.rs
@@ -1,4 +1,4 @@
-use base_db::RootQueryDb;
+use base_db::all_crates;
use hir_def::signatures::ConstSignature;
use hir_expand::EditionedFileId;
use rustc_apfloat::{
@@ -108,7 +108,7 @@ fn pretty_print_err(e: ConstEvalError, db: &TestDB) -> String {
let mut err = String::new();
let span_formatter = |file, range| format!("{file:?} {range:?}");
let display_target =
- DisplayTarget::from_crate(db, *db.all_crates().last().expect("no crate graph present"));
+ DisplayTarget::from_crate(db, *all_crates(db).last().expect("no crate graph present"));
match e {
ConstEvalError::MirLowerError(e) => {
e.pretty_print(&mut err, db, span_formatter, display_target)
diff --git a/crates/hir-ty/src/display.rs b/crates/hir-ty/src/display.rs
index d680588645..0c4e34db7d 100644
--- a/crates/hir-ty/src/display.rs
+++ b/crates/hir-ty/src/display.rs
@@ -640,7 +640,7 @@ fn write_projection<'db>(
// FIXME: We shouldn't use `param.id`, it should be removed. We should know the
// `GenericDefId` from the formatted type (store it inside the `HirFormatter`).
let bounds = GenericPredicates::query_all(f.db, param.id.parent())
- .iter_identity_copied()
+ .iter_identity()
.filter(|wc| {
let ty = match wc.kind().skip_binder() {
ClauseKind::Trait(tr) => tr.self_ty(),
@@ -1466,7 +1466,7 @@ impl<'db> HirDisplay<'db> for Ty<'db> {
}
TypeParamProvenance::ArgumentImplTrait => {
let bounds = GenericPredicates::query_all(f.db, param.id.parent())
- .iter_identity_copied()
+ .iter_identity()
.filter(|wc| match wc.kind().skip_binder() {
ClauseKind::Trait(tr) => tr.self_ty() == *self,
ClauseKind::Projection(proj) => proj.self_ty() == *self,
diff --git a/crates/hir-ty/src/dyn_compatibility.rs b/crates/hir-ty/src/dyn_compatibility.rs
index 4c300affd8..e70918f8e1 100644
--- a/crates/hir-ty/src/dyn_compatibility.rs
+++ b/crates/hir-ty/src/dyn_compatibility.rs
@@ -141,7 +141,7 @@ pub fn generics_require_sized_self(db: &dyn HirDatabase, def: GenericDefId) -> b
// FIXME: We should use `explicit_predicates_of` here, which hasn't been implemented to
// rust-analyzer yet
// https://github.com/rust-lang/rust/blob/ddaf12390d3ffb7d5ba74491a48f3cd528e5d777/compiler/rustc_hir_analysis/src/collect/predicates_of.rs#L490
- elaborate::elaborate(interner, predicates.iter_identity_copied()).any(|pred| {
+ elaborate::elaborate(interner, predicates.iter_identity()).any(|pred| {
match pred.kind().skip_binder() {
ClauseKind::Trait(trait_pred) => {
if sized == trait_pred.def_id().0
@@ -164,7 +164,7 @@ pub fn generics_require_sized_self(db: &dyn HirDatabase, def: GenericDefId) -> b
// So, just return single boolean value for existence of such `Self` reference
fn predicates_reference_self(db: &dyn HirDatabase, trait_: TraitId) -> bool {
GenericPredicates::query_explicit(db, trait_.into())
- .iter_identity_copied()
+ .iter_identity()
.any(|pred| predicate_references_self(db, trait_, pred, AllowSelfProjection::No))
}
@@ -360,8 +360,8 @@ where
cb(MethodViolationCode::UndispatchableReceiver)?;
}
- let predicates = GenericPredicates::query_own(db, func.into());
- for pred in predicates.iter_identity_copied() {
+ let predicates = GenericPredicates::query_own_explicit(db, func.into());
+ for pred in predicates.iter_identity() {
let pred = pred.kind().skip_binder();
if matches!(pred, ClauseKind::TypeOutlives(_)) {
@@ -459,7 +459,7 @@ fn receiver_is_dispatchable<'db>(
clauses: Clauses::new_from_iter(
interner,
generic_predicates
- .iter_identity_copied()
+ .iter_identity()
.chain([unsize_predicate.upcast(interner), trait_predicate.upcast(interner)])
.chain(meta_sized_predicate),
),
diff --git a/crates/hir-ty/src/infer.rs b/crates/hir-ty/src/infer.rs
index d14e9d6526..bd897113bf 100644
--- a/crates/hir-ty/src/infer.rs
+++ b/crates/hir-ty/src/infer.rs
@@ -1706,6 +1706,61 @@ impl<'body, 'db> InferenceContext<'body, 'db> {
self.generic_def,
LifetimeElisionKind::Infer,
);
+
+ if let Some(type_anchor) = path.type_anchor() {
+ let mut segments = path.segments();
+ if segments.is_empty() {
+ return (self.err_ty(), None);
+ }
+ let (mut ty, type_ns) = ctx.lower_ty_ext(type_anchor);
+ ty = self.table.process_user_written_ty(ty);
+
+ if let Some(TypeNs::SelfType(impl_)) = type_ns
+ && let Some(trait_ref) = self.db.impl_trait(impl_)
+ && let trait_ref = trait_ref.instantiate_identity()
+ && let Some(assoc_type) = trait_ref
+ .def_id
+ .0
+ .trait_items(self.db)
+ .associated_type_by_name(segments.first().unwrap().name)
+ {
+ // `<Self>::AssocType`
+ let args = self.infcx().fill_rest_fresh_args(assoc_type.into(), trait_ref.args);
+ let alias = Ty::new_alias(
+ self.interner(),
+ AliasTyKind::Projection,
+ AliasTy::new_from_args(self.interner(), assoc_type.into(), args),
+ );
+ ty = self.table.try_structurally_resolve_type(alias);
+ segments = segments.skip(1);
+ }
+
+ let variant = match ty.as_adt() {
+ Some((AdtId::StructId(id), _)) => id.into(),
+ Some((AdtId::UnionId(id), _)) => id.into(),
+ Some((AdtId::EnumId(id), _)) => {
+ if let Some(segment) = segments.first()
+ && let enum_data = id.enum_variants(self.db)
+ && let Some(variant) = enum_data.variant(segment.name)
+ {
+ // FIXME: Report error if there are generics on the variant.
+ segments = segments.skip(1);
+ variant.into()
+ } else {
+ return (self.err_ty(), None);
+ }
+ }
+ None => return (self.err_ty(), None),
+ };
+
+ if !segments.is_empty() {
+ // FIXME: Report an error.
+ return (self.err_ty(), None);
+ } else {
+ return (ty, Some(variant));
+ }
+ }
+
let mut path_ctx = ctx.at_path(path, node);
let interner = DbInterner::conjure();
let (resolution, unresolved) = if value_ns {
@@ -1838,6 +1893,46 @@ impl<'body, 'db> InferenceContext<'body, 'db> {
});
(ty, variant)
}
+ TypeNs::TraitId(_) => {
+ let Some(remaining_idx) = unresolved else {
+ return (self.err_ty(), None);
+ };
+
+ let remaining_segments = path.segments().skip(remaining_idx);
+
+ if remaining_segments.len() >= 2 {
+ path_ctx.ignore_last_segment();
+ }
+
+ let (mut ty, _) = path_ctx.lower_partly_resolved_path(resolution, true);
+ ty = self.table.process_user_written_ty(ty);
+
+ if let Some(segment) = remaining_segments.get(1)
+ && let Some((AdtId::EnumId(id), _)) = ty.as_adt()
+ {
+ let enum_data = id.enum_variants(self.db);
+ if let Some(variant) = enum_data.variant(segment.name) {
+ return if remaining_segments.len() == 2 {
+ (ty, Some(variant.into()))
+ } else {
+ // We still have unresolved paths, but enum variants never have
+ // associated types!
+ // FIXME: Report an error.
+ (self.err_ty(), None)
+ };
+ }
+ }
+
+ let variant = ty.as_adt().and_then(|(id, _)| match id {
+ AdtId::StructId(s) => Some(VariantId::StructId(s)),
+ AdtId::UnionId(u) => Some(VariantId::UnionId(u)),
+ AdtId::EnumId(_) => {
+ // FIXME Error E0071, expected struct, variant or union type, found enum `Foo`
+ None
+ }
+ });
+ (ty, variant)
+ }
TypeNs::TypeAliasId(it) => {
let Some(mod_path) = path.mod_path() else {
never!("resolver should always resolve lang item paths");
@@ -1859,10 +1954,7 @@ impl<'body, 'db> InferenceContext<'body, 'db> {
// FIXME potentially resolve assoc type
(self.err_ty(), None)
}
- TypeNs::AdtId(AdtId::EnumId(_))
- | TypeNs::BuiltinType(_)
- | TypeNs::TraitId(_)
- | TypeNs::ModuleId(_) => {
+ TypeNs::AdtId(AdtId::EnumId(_)) | TypeNs::BuiltinType(_) | TypeNs::ModuleId(_) => {
// FIXME diagnostic
(self.err_ty(), None)
}
diff --git a/crates/hir-ty/src/infer/expr.rs b/crates/hir-ty/src/infer/expr.rs
index dc57b1d1c2..ee34a30eba 100644
--- a/crates/hir-ty/src/infer/expr.rs
+++ b/crates/hir-ty/src/infer/expr.rs
@@ -2158,7 +2158,7 @@ impl<'db> InferenceContext<'_, 'db> {
);
let param_env = self.table.param_env;
self.table.register_predicates(clauses_as_obligations(
- generic_predicates.iter_instantiated_copied(self.interner(), parameters.as_slice()),
+ generic_predicates.iter_instantiated(self.interner(), parameters.as_slice()),
ObligationCause::new(),
param_env,
));
diff --git a/crates/hir-ty/src/infer/path.rs b/crates/hir-ty/src/infer/path.rs
index 71d68ccd47..3cadc8e933 100644
--- a/crates/hir-ty/src/infer/path.rs
+++ b/crates/hir-ty/src/infer/path.rs
@@ -228,7 +228,7 @@ impl<'db> InferenceContext<'_, 'db> {
let predicates = GenericPredicates::query_all(self.db, def);
let param_env = self.table.param_env;
self.table.register_predicates(clauses_as_obligations(
- predicates.iter_instantiated_copied(interner, subst.as_slice()),
+ predicates.iter_instantiated(interner, subst.as_slice()),
ObligationCause::new(),
param_env,
));
diff --git a/crates/hir-ty/src/lower.rs b/crates/hir-ty/src/lower.rs
index 7259099107..71a7db6559 100644
--- a/crates/hir-ty/src/lower.rs
+++ b/crates/hir-ty/src/lower.rs
@@ -2016,17 +2016,21 @@ fn type_alias_bounds_with_diagnostics<'db>(
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub struct GenericPredicates {
- // The order is the following: first, if `parent_is_trait == true`, comes the implicit trait
- // predicate for the parent. Then come the bounds of the associated types of the parents,
- // then the explicit, self-only predicates for the parent, then the explicit, self-only trait
- // predicate for the child, then the bounds of the associated types of the child,
- // then the implicit trait predicate for the child, if `is_trait` is `true`.
+ // The order is the following:
+ //
+ // 1. If `has_trait_implied_predicate == true`, the implicit trait predicate.
+ // 2. The bounds of the associated types of the parents, coming from `Trait<Assoc: Trait>`.
+ // Note: associated type bounds from `Self::Assoc: Trait` on traits *won't* be included
+ // here, they are in 3.
+ // 3. The explicit, self-only predicates for the parent.
+ // 4. The explicit, self-only trait predicate for the child,
+ // 5. The bounds of the associated types of the child.
predicates: StoredEarlyBinder<StoredClauses>,
+ // Keep this ordered according to the above.
+ has_trait_implied_predicate: bool,
parent_explicit_self_predicates_start: u32,
own_predicates_start: u32,
own_assoc_ty_bounds_start: u32,
- is_trait: bool,
- parent_is_trait: bool,
}
#[salsa::tracked]
@@ -2065,11 +2069,10 @@ impl GenericPredicates {
let len = predicates.get().skip_binder().len() as u32;
Self {
predicates,
+ has_trait_implied_predicate: false,
parent_explicit_self_predicates_start: 0,
own_predicates_start: 0,
own_assoc_ty_bounds_start: len,
- is_trait: false,
- parent_is_trait: false,
}
}
@@ -2082,58 +2085,68 @@ impl GenericPredicates {
pub fn query_all<'db>(
db: &'db dyn HirDatabase,
def: GenericDefId,
- ) -> EarlyBinder<'db, &'db [Clause<'db>]> {
+ ) -> EarlyBinder<'db, impl Iterator<Item = Clause<'db>>> {
Self::query(db, def).all_predicates()
}
#[inline]
- pub fn query_own<'db>(
+ pub fn query_own_explicit<'db>(
db: &'db dyn HirDatabase,
def: GenericDefId,
- ) -> EarlyBinder<'db, &'db [Clause<'db>]> {
- Self::query(db, def).own_predicates()
+ ) -> EarlyBinder<'db, impl Iterator<Item = Clause<'db>>> {
+ Self::query(db, def).own_explicit_predicates()
}
#[inline]
pub fn query_explicit<'db>(
db: &'db dyn HirDatabase,
def: GenericDefId,
- ) -> EarlyBinder<'db, &'db [Clause<'db>]> {
+ ) -> EarlyBinder<'db, impl Iterator<Item = Clause<'db>>> {
Self::query(db, def).explicit_predicates()
}
#[inline]
- pub fn query_explicit_implied<'db>(
- db: &'db dyn HirDatabase,
- def: GenericDefId,
- ) -> EarlyBinder<'db, &'db [Clause<'db>]> {
- Self::query(db, def).explicit_implied_predicates()
+ pub fn all_predicates(&self) -> EarlyBinder<'_, impl Iterator<Item = Clause<'_>>> {
+ self.predicates.get().map_bound(|it| it.as_slice().iter().copied())
}
#[inline]
- pub fn all_predicates(&self) -> EarlyBinder<'_, &[Clause<'_>]> {
- self.predicates.get().map_bound(|it| it.as_slice())
+ pub fn own_explicit_predicates(&self) -> EarlyBinder<'_, impl Iterator<Item = Clause<'_>>> {
+ self.predicates
+ .get()
+ .map_bound(|it| it.as_slice()[self.own_predicates_start as usize..].iter().copied())
}
#[inline]
- pub fn own_predicates(&self) -> EarlyBinder<'_, &[Clause<'_>]> {
- self.predicates.get().map_bound(|it| &it.as_slice()[self.own_predicates_start as usize..])
+ pub fn explicit_predicates(&self) -> EarlyBinder<'_, impl Iterator<Item = Clause<'_>>> {
+ self.predicates.get().map_bound(|it| {
+ it.as_slice()[usize::from(self.has_trait_implied_predicate)..].iter().copied()
+ })
}
- /// Returns the predicates, minus the implicit `Self: Trait` predicate and bounds of the
- /// associated types for a trait.
#[inline]
- pub fn explicit_predicates(&self) -> EarlyBinder<'_, &[Clause<'_>]> {
+ pub fn explicit_non_assoc_types_predicates(
+ &self,
+ ) -> EarlyBinder<'_, impl Iterator<Item = Clause<'_>>> {
self.predicates.get().map_bound(|it| {
- &it.as_slice()[self.parent_explicit_self_predicates_start as usize
+ it.as_slice()[self.parent_explicit_self_predicates_start as usize
..self.own_assoc_ty_bounds_start as usize]
+ .iter()
+ .copied()
})
}
#[inline]
- pub fn explicit_implied_predicates(&self) -> EarlyBinder<'_, &[Clause<'_>]> {
- self.predicates.get().map_bound(|it| {
- &it.as_slice()[usize::from(self.parent_is_trait)..it.len() - usize::from(self.is_trait)]
+ pub fn explicit_assoc_types_predicates(
+ &self,
+ ) -> EarlyBinder<'_, impl Iterator<Item = Clause<'_>>> {
+ self.predicates.get().map_bound(|predicates| {
+ let predicates = predicates.as_slice();
+ predicates[usize::from(self.has_trait_implied_predicate)
+ ..self.parent_explicit_self_predicates_start as usize]
+ .iter()
+ .copied()
+ .chain(predicates[self.own_assoc_ty_bounds_start as usize..].iter().copied())
})
}
}
@@ -2142,10 +2155,8 @@ pub(crate) fn param_env_from_predicates<'db>(
interner: DbInterner<'db>,
predicates: &'db GenericPredicates,
) -> ParamEnv<'db> {
- let clauses = rustc_type_ir::elaborate::elaborate(
- interner,
- predicates.all_predicates().iter_identity_copied(),
- );
+ let clauses =
+ rustc_type_ir::elaborate::elaborate(interner, predicates.all_predicates().iter_identity());
let clauses = Clauses::new_from_iter(interner, clauses);
// FIXME: We should normalize projections here, like rustc does.
@@ -2290,42 +2301,28 @@ fn generic_predicates(db: &dyn HirDatabase, def: GenericDefId) -> (GenericPredic
let diagnostics = create_diagnostics(ctx.diagnostics);
- // The order is:
- //
- // 1. parent implicit trait pred
- // 2. parent assoc bounds
- // 3. parent self only preds
- // 4. own self only preds
- // 5. own assoc ty bounds
- // 6. own implicit trait pred
- //
- // The purpose of this is to index the slice of the followings, without making extra `Vec`s or
- // iterators:
- // - explicit self only predicates, of own or own + self
- // - explicit predicates, of own or own + self
let predicates = parent_implicit_trait_predicate
.iter()
+ .chain(own_implicit_trait_predicate.iter())
.chain(parent_assoc_ty_bounds.iter())
.chain(parent_predicates.iter())
.chain(own_predicates.iter())
.chain(own_assoc_ty_bounds.iter())
- .chain(own_implicit_trait_predicate.iter())
.copied()
.collect::<Vec<_>>();
- let parent_is_trait = parent_implicit_trait_predicate.is_some();
- let is_trait = own_implicit_trait_predicate.is_some();
+ let has_trait_implied_predicate =
+ parent_implicit_trait_predicate.is_some() || own_implicit_trait_predicate.is_some();
let parent_explicit_self_predicates_start =
- parent_is_trait as u32 + parent_assoc_ty_bounds.len() as u32;
+ has_trait_implied_predicate as u32 + parent_assoc_ty_bounds.len() as u32;
let own_predicates_start =
parent_explicit_self_predicates_start + parent_predicates.len() as u32;
let own_assoc_ty_bounds_start = own_predicates_start + own_predicates.len() as u32;
let predicates = GenericPredicates {
+ has_trait_implied_predicate,
parent_explicit_self_predicates_start,
own_predicates_start,
own_assoc_ty_bounds_start,
- is_trait,
- parent_is_trait,
predicates: StoredEarlyBinder::bind(Clauses::new_from_slice(&predicates).store()),
};
return (predicates, diagnostics);
diff --git a/crates/hir-ty/src/method_resolution.rs b/crates/hir-ty/src/method_resolution.rs
index 05b9ea5d74..b18e48c1fe 100644
--- a/crates/hir-ty/src/method_resolution.rs
+++ b/crates/hir-ty/src/method_resolution.rs
@@ -324,7 +324,7 @@ impl<'db> InferenceTable<'db> {
// any late-bound regions appearing in its bounds.
let bounds = GenericPredicates::query_all(self.db, method_item.into());
let bounds = clauses_as_obligations(
- bounds.iter_instantiated_copied(interner, args.as_slice()),
+ bounds.iter_instantiated(interner, args.as_slice()),
ObligationCause::new(),
self.param_env,
);
diff --git a/crates/hir-ty/src/method_resolution/confirm.rs b/crates/hir-ty/src/method_resolution/confirm.rs
index ec589085a8..94c70c29f7 100644
--- a/crates/hir-ty/src/method_resolution/confirm.rs
+++ b/crates/hir-ty/src/method_resolution/confirm.rs
@@ -136,7 +136,7 @@ impl<'a, 'b, 'db> ConfirmContext<'a, 'b, 'db> {
);
let illegal_sized_bound = self.predicates_require_illegal_sized_bound(
GenericPredicates::query_all(self.db(), self.candidate.into())
- .iter_instantiated_copied(self.interner(), filler_args.as_slice()),
+ .iter_instantiated(self.interner(), filler_args.as_slice()),
);
// Unify the (adjusted) self type with what the method expects.
@@ -509,7 +509,7 @@ impl<'a, 'b, 'db> ConfirmContext<'a, 'b, 'db> {
let def_id = self.candidate;
let method_predicates = clauses_as_obligations(
GenericPredicates::query_all(self.db(), def_id.into())
- .iter_instantiated_copied(self.interner(), all_args),
+ .iter_instantiated(self.interner(), all_args),
ObligationCause::new(),
self.ctx.table.param_env,
);
diff --git a/crates/hir-ty/src/method_resolution/probe.rs b/crates/hir-ty/src/method_resolution/probe.rs
index 8c76bfbc07..3604076ccd 100644
--- a/crates/hir-ty/src/method_resolution/probe.rs
+++ b/crates/hir-ty/src/method_resolution/probe.rs
@@ -1595,7 +1595,7 @@ impl<'a, 'db, Choice: ProbeChoice<'db>> ProbeContext<'a, 'db, Choice> {
// Check whether the impl imposes obligations we have to worry about.
let impl_bounds = GenericPredicates::query_all(self.db(), impl_def_id.into());
let impl_bounds = clauses_as_obligations(
- impl_bounds.iter_instantiated_copied(self.interner(), impl_args.as_slice()),
+ impl_bounds.iter_instantiated(self.interner(), impl_args.as_slice()),
ObligationCause::new(),
self.param_env(),
);
diff --git a/crates/hir-ty/src/mir/eval/shim.rs b/crates/hir-ty/src/mir/eval/shim.rs
index ff6c99ca53..2aed76ec90 100644
--- a/crates/hir-ty/src/mir/eval/shim.rs
+++ b/crates/hir-ty/src/mir/eval/shim.rs
@@ -518,7 +518,7 @@ impl<'db> Evaluator<'db> {
"sched_getaffinity" => {
let [_pid, _set_size, set] = args else {
return Err(MirEvalError::InternalError(
- "libc::write args are not provided".into(),
+ "sched_getaffinity args are not provided".into(),
));
};
let set = Address::from_bytes(set.get(self)?)?;
@@ -530,9 +530,7 @@ impl<'db> Evaluator<'db> {
}
"getenv" => {
let [name] = args else {
- return Err(MirEvalError::InternalError(
- "libc::write args are not provided".into(),
- ));
+ return Err(MirEvalError::InternalError("getenv args are not provided".into()));
};
let mut name_buf = vec![];
let name = {
diff --git a/crates/hir-ty/src/next_solver/interner.rs b/crates/hir-ty/src/next_solver/interner.rs
index 5b81c7675d..622648bc8d 100644
--- a/crates/hir-ty/src/next_solver/interner.rs
+++ b/crates/hir-ty/src/next_solver/interner.rs
@@ -1439,81 +1439,55 @@ impl<'db> Interner for DbInterner<'db> {
}
}
- #[tracing::instrument(level = "debug", skip(self), ret)]
fn predicates_of(
self,
def_id: Self::DefId,
) -> EarlyBinder<Self, impl IntoIterator<Item = Self::Clause>> {
- predicates_of(self.db, def_id).all_predicates().map_bound(|it| it.iter().copied())
+ predicates_of(self.db, def_id).all_predicates()
}
- #[tracing::instrument(level = "debug", skip(self), ret)]
fn own_predicates_of(
self,
def_id: Self::DefId,
) -> EarlyBinder<Self, impl IntoIterator<Item = Self::Clause>> {
- predicates_of(self.db, def_id).own_predicates().map_bound(|it| it.iter().copied())
+ predicates_of(self.db, def_id).own_explicit_predicates()
}
- #[tracing::instrument(skip(self), ret)]
fn explicit_super_predicates_of(
self,
def_id: Self::TraitId,
) -> EarlyBinder<Self, impl IntoIterator<Item = (Self::Clause, Self::Span)>> {
- let is_self = |ty: Ty<'db>| match ty.kind() {
- rustc_type_ir::TyKind::Param(param) => param.index == 0,
- _ => false,
- };
-
- GenericPredicates::query_explicit(self.db, def_id.0.into()).map_bound(move |predicates| {
- predicates
- .iter()
- .copied()
- .filter(move |p| match p.kind().skip_binder() {
- // rustc has the following assertion:
- // https://github.com/rust-lang/rust/blob/52618eb338609df44978b0ca4451ab7941fd1c7a/compiler/rustc_hir_analysis/src/hir_ty_lowering/bounds.rs#L525-L608
- ClauseKind::Trait(it) => is_self(it.self_ty()),
- ClauseKind::TypeOutlives(it) => is_self(it.0),
- ClauseKind::Projection(it) => is_self(it.self_ty()),
- ClauseKind::HostEffect(it) => is_self(it.self_ty()),
- _ => false,
- })
- .map(|p| (p, Span::dummy()))
- })
+ GenericPredicates::query(self.db, def_id.0.into())
+ .explicit_non_assoc_types_predicates()
+ .map_bound(move |predicates| {
+ predicates.filter(|p| is_clause_at_ty(p, is_ty_self)).map(|p| (p, Span::dummy()))
+ })
}
- #[tracing::instrument(skip(self), ret)]
fn explicit_implied_predicates_of(
self,
def_id: Self::DefId,
) -> EarlyBinder<Self, impl IntoIterator<Item = (Self::Clause, Self::Span)>> {
- fn is_self_or_assoc(ty: Ty<'_>) -> bool {
- match ty.kind() {
- rustc_type_ir::TyKind::Param(param) => param.index == 0,
- rustc_type_ir::TyKind::Alias(rustc_type_ir::AliasTyKind::Projection, alias) => {
- is_self_or_assoc(alias.self_ty())
- }
- _ => false,
+ fn is_ty_assoc_of_self(ty: Ty<'_>) -> bool {
+ // FIXME: Is this correct wrt. combined kind of assoc type bounds, i.e. `where Self::Assoc: Trait<Assoc2: Trait>`
+ // wrt. `Assoc2`, which we should exclude?
+ if let TyKind::Alias(AliasTyKind::Projection, alias) = ty.kind() {
+ is_ty_assoc_of_self(alias.self_ty())
+ } else {
+ is_ty_self(ty)
}
}
- predicates_of(self.db, def_id).explicit_implied_predicates().map_bound(|predicates| {
- predicates
- .iter()
- .copied()
- .filter(|p| match p.kind().skip_binder() {
- ClauseKind::Trait(it) => is_self_or_assoc(it.self_ty()),
- ClauseKind::TypeOutlives(it) => is_self_or_assoc(it.0),
- ClauseKind::Projection(it) => is_self_or_assoc(it.self_ty()),
- ClauseKind::HostEffect(it) => is_self_or_assoc(it.self_ty()),
- // FIXME: Not sure is this correct to allow other clauses but we might replace
- // `generic_predicates_ns` query here with something closer to rustc's
- // `implied_bounds_with_filter`, which is more granular lowering than this
- // "lower at once and then filter" implementation.
- _ => true,
- })
- .map(|p| (p, Span::dummy()))
- })
+ let predicates = predicates_of(self.db, def_id);
+ let non_assoc_types = predicates
+ .explicit_non_assoc_types_predicates()
+ .skip_binder()
+ .filter(|p| is_clause_at_ty(p, is_ty_self));
+ let assoc_types = predicates
+ .explicit_assoc_types_predicates()
+ .skip_binder()
+ .filter(|p| is_clause_at_ty(p, is_ty_assoc_of_self));
+ EarlyBinder::bind(non_assoc_types.chain(assoc_types).map(|it| (it, Span::dummy())))
}
fn impl_super_outlives(
@@ -2294,6 +2268,24 @@ impl<'db> Interner for DbInterner<'db> {
}
}
+fn is_ty_self(ty: Ty<'_>) -> bool {
+ match ty.kind() {
+ TyKind::Param(param) => param.index == 0,
+ _ => false,
+ }
+}
+fn is_clause_at_ty(p: &Clause<'_>, filter: impl FnOnce(Ty<'_>) -> bool) -> bool {
+ match p.kind().skip_binder() {
+ // rustc has the following assertion:
+ // https://github.com/rust-lang/rust/blob/52618eb338609df44978b0ca4451ab7941fd1c7a/compiler/rustc_hir_analysis/src/hir_ty_lowering/bounds.rs#L525-L608
+ ClauseKind::Trait(it) => filter(it.self_ty()),
+ ClauseKind::TypeOutlives(it) => filter(it.0),
+ ClauseKind::Projection(it) => filter(it.self_ty()),
+ ClauseKind::HostEffect(it) => filter(it.self_ty()),
+ _ => false,
+ }
+}
+
impl<'db> DbInterner<'db> {
pub fn shift_bound_var_indices<T>(self, bound_vars: usize, value: T) -> T
where
diff --git a/crates/hir-ty/src/next_solver/ty.rs b/crates/hir-ty/src/next_solver/ty.rs
index 192cdb70ae..8e892b65ea 100644
--- a/crates/hir-ty/src/next_solver/ty.rs
+++ b/crates/hir-ty/src/next_solver/ty.rs
@@ -696,7 +696,7 @@ impl<'db> Ty<'db> {
TypeOrConstParamData::TypeParamData(p) => match p.provenance {
TypeParamProvenance::ArgumentImplTrait => {
let predicates = GenericPredicates::query_all(db, param.id.parent())
- .iter_identity_copied()
+ .iter_identity()
.filter(|wc| match wc.kind().skip_binder() {
ClauseKind::Trait(tr) => tr.self_ty() == self,
ClauseKind::Projection(pred) => pred.self_ty() == self,
diff --git a/crates/hir-ty/src/specialization.rs b/crates/hir-ty/src/specialization.rs
index 90cbcfea6a..8bc6c51fae 100644
--- a/crates/hir-ty/src/specialization.rs
+++ b/crates/hir-ty/src/specialization.rs
@@ -109,7 +109,7 @@ fn specializes_query(
// only be referenced via projection predicates.
ocx.register_obligations(clauses_as_obligations(
GenericPredicates::query_all(db, parent_impl_def_id.into())
- .iter_instantiated_copied(interner, parent_args.as_slice()),
+ .iter_instantiated(interner, parent_args.as_slice()),
cause.clone(),
param_env,
));
diff --git a/crates/hir-ty/src/test_db.rs b/crates/hir-ty/src/test_db.rs
index 243456c85f..e19e26ebc4 100644
--- a/crates/hir-ty/src/test_db.rs
+++ b/crates/hir-ty/src/test_db.rs
@@ -3,8 +3,8 @@
use std::{fmt, panic, sync::Mutex};
use base_db::{
- CrateGraphBuilder, CratesMap, FileSourceRootInput, FileText, Nonce, RootQueryDb,
- SourceDatabase, SourceRoot, SourceRootId, SourceRootInput,
+ CrateGraphBuilder, CratesMap, FileSourceRootInput, FileText, Nonce, SourceDatabase, SourceRoot,
+ SourceRootId, SourceRootInput, all_crates, relevant_crates, set_all_crates_with_durability,
};
use hir_def::{ModuleId, db::DefDatabase, nameres::crate_def_map};
@@ -45,7 +45,7 @@ impl Default for TestDB {
};
this.set_expand_proc_attr_macros_with_durability(true, Durability::HIGH);
// This needs to be here otherwise `CrateGraphBuilder` panics.
- this.set_all_crates(Arc::new(Box::new([])));
+ set_all_crates_with_durability(&mut this, std::iter::empty(), Durability::HIGH);
_ = base_db::LibraryRoots::builder(Default::default())
.durability(Durability::MEDIUM)
.new(&this);
@@ -142,7 +142,7 @@ impl panic::RefUnwindSafe for TestDB {}
impl TestDB {
pub(crate) fn module_for_file_opt(&self, file_id: impl Into<FileId>) -> Option<ModuleId> {
let file_id = file_id.into();
- for &krate in self.relevant_crates(file_id).iter() {
+ for &krate in relevant_crates(self, file_id).iter() {
let crate_def_map = crate_def_map(self, krate);
for (module_id, data) in crate_def_map.modules() {
if data.origin.file_id().map(|file_id| file_id.file_id(self)) == Some(file_id) {
@@ -161,7 +161,7 @@ impl TestDB {
&self,
) -> FxHashMap<EditionedFileId, Vec<(TextRange, String)>> {
let mut files = Vec::new();
- for &krate in self.all_crates().iter() {
+ for &krate in all_crates(self).iter() {
let crate_def_map = crate_def_map(self, krate);
for (module_id, _) in crate_def_map.modules() {
let file_id = crate_def_map[module_id].origin.file_id();
diff --git a/crates/hir-ty/src/tests/incremental.rs b/crates/hir-ty/src/tests/incremental.rs
index e806999cb4..7cda259664 100644
--- a/crates/hir-ty/src/tests/incremental.rs
+++ b/crates/hir-ty/src/tests/incremental.rs
@@ -31,11 +31,11 @@ fn foo() -> i32 {
&[("InferenceResult::for_body_", 1)],
expect_test::expect![[r#"
[
- "source_root_crates_shim",
+ "source_root_crates",
"crate_local_def_map",
"file_item_tree_query",
"ast_id_map",
- "parse_shim",
+ "parse",
"real_span_map_shim",
"InferenceResult::for_body_",
"FunctionSignature::of_",
@@ -76,7 +76,7 @@ fn foo() -> i32 {
&[("InferenceResult::for_body_", 0)],
expect_test::expect![[r#"
[
- "parse_shim",
+ "parse",
"ast_id_map",
"file_item_tree_query",
"real_span_map_shim",
@@ -119,11 +119,11 @@ fn baz() -> i32 {
&[("InferenceResult::for_body_", 3)],
expect_test::expect![[r#"
[
- "source_root_crates_shim",
+ "source_root_crates",
"crate_local_def_map",
"file_item_tree_query",
"ast_id_map",
- "parse_shim",
+ "parse",
"real_span_map_shim",
"InferenceResult::for_body_",
"FunctionSignature::of_",
@@ -189,7 +189,7 @@ fn baz() -> i32 {
&[("InferenceResult::for_body_", 1)],
expect_test::expect![[r#"
[
- "parse_shim",
+ "parse",
"ast_id_map",
"file_item_tree_query",
"real_span_map_shim",
@@ -239,11 +239,11 @@ $0",
&[("TraitImpls::for_crate_", 1)],
expect_test::expect![[r#"
[
- "source_root_crates_shim",
+ "source_root_crates",
"crate_local_def_map",
"file_item_tree_query",
"ast_id_map",
- "parse_shim",
+ "parse",
"real_span_map_shim",
"TraitImpls::for_crate_",
"lang_items",
@@ -278,7 +278,7 @@ pub struct NewStruct {
&[("TraitImpls::for_crate_", 1)],
expect_test::expect![[r#"
[
- "parse_shim",
+ "parse",
"ast_id_map",
"file_item_tree_query",
"real_span_map_shim",
@@ -314,11 +314,11 @@ $0",
&[("TraitImpls::for_crate_", 1)],
expect_test::expect![[r#"
[
- "source_root_crates_shim",
+ "source_root_crates",
"crate_local_def_map",
"file_item_tree_query",
"ast_id_map",
- "parse_shim",
+ "parse",
"real_span_map_shim",
"TraitImpls::for_crate_",
"lang_items",
@@ -354,7 +354,7 @@ pub enum SomeEnum {
&[("TraitImpls::for_crate_", 1)],
expect_test::expect![[r#"
[
- "parse_shim",
+ "parse",
"ast_id_map",
"file_item_tree_query",
"real_span_map_shim",
@@ -390,11 +390,11 @@ $0",
&[("TraitImpls::for_crate_", 1)],
expect_test::expect![[r#"
[
- "source_root_crates_shim",
+ "source_root_crates",
"crate_local_def_map",
"file_item_tree_query",
"ast_id_map",
- "parse_shim",
+ "parse",
"real_span_map_shim",
"TraitImpls::for_crate_",
"lang_items",
@@ -427,7 +427,7 @@ fn bar() -> f32 {
&[("TraitImpls::for_crate_", 1)],
expect_test::expect![[r#"
[
- "parse_shim",
+ "parse",
"ast_id_map",
"file_item_tree_query",
"real_span_map_shim",
@@ -467,11 +467,11 @@ $0",
&[("TraitImpls::for_crate_", 1)],
expect_test::expect![[r#"
[
- "source_root_crates_shim",
+ "source_root_crates",
"crate_local_def_map",
"file_item_tree_query",
"ast_id_map",
- "parse_shim",
+ "parse",
"real_span_map_shim",
"TraitImpls::for_crate_",
"lang_items",
@@ -512,7 +512,7 @@ impl SomeStruct {
&[("TraitImpls::for_crate_", 1)],
expect_test::expect![[r#"
[
- "parse_shim",
+ "parse",
"ast_id_map",
"file_item_tree_query",
"real_span_map_shim",
@@ -568,11 +568,11 @@ fn main() {
&[("trait_solve_shim", 0)],
expect_test::expect![[r#"
[
- "source_root_crates_shim",
+ "source_root_crates",
"crate_local_def_map",
"file_item_tree_query",
"ast_id_map",
- "parse_shim",
+ "parse",
"real_span_map_shim",
"TraitItems::query_with_diagnostics_",
"Body::of_",
@@ -664,7 +664,7 @@ fn main() {
&[("trait_solve_shim", 0)],
expect_test::expect![[r#"
[
- "parse_shim",
+ "parse",
"ast_id_map",
"file_item_tree_query",
"real_span_map_shim",
diff --git a/crates/hir-ty/src/tests/regression.rs b/crates/hir-ty/src/tests/regression.rs
index e4fc7e56c6..d3dfc44c22 100644
--- a/crates/hir-ty/src/tests/regression.rs
+++ b/crates/hir-ty/src/tests/regression.rs
@@ -2841,3 +2841,18 @@ fn wrapped_abs<T: SelfAbs<Output = T>>(v: T) -> T {
"#,
);
}
+
+#[test]
+fn regression_21899() {
+ check_no_mismatches(
+ r#"
+trait B where
+ Self::T: B,
+{
+ type T;
+}
+
+fn foo<T: B>(v: T::T) {}
+ "#,
+ );
+}
diff --git a/crates/hir-ty/src/tests/traits.rs b/crates/hir-ty/src/tests/traits.rs
index 22359d8f1f..1d27d52a36 100644
--- a/crates/hir-ty/src/tests/traits.rs
+++ b/crates/hir-ty/src/tests/traits.rs
@@ -4449,14 +4449,14 @@ impl Trait for () {
let a = Self::Assoc { x };
// ^ S
let a = <Self>::Assoc { x }; // unstable
- // ^ {unknown}
+ // ^ S
// should be `Copy` but we don't track ownership anyway.
let value = S { x };
if let Self::Assoc { x } = value {}
// ^ u32
if let <Self>::Assoc { x } = value {} // unstable
- // ^ {unknown}
+ // ^ u32
}
}
"#,
@@ -4508,22 +4508,22 @@ impl Trait for () {
let a = Self::Assoc::Struct { x };
// ^ E
let a = <Self>::Assoc::Struct { x }; // unstable
- // ^ {unknown}
+ // ^ E
let a = <Self::Assoc>::Struct { x }; // unstable
- // ^ {unknown}
+ // ^ E
let a = <<Self>::Assoc>::Struct { x }; // unstable
- // ^ {unknown}
+ // ^ E
// should be `Copy` but we don't track ownership anyway.
let value = E::Struct { x: 42 };
if let Self::Assoc::Struct { x } = value {}
// ^ u32
if let <Self>::Assoc::Struct { x } = value {} // unstable
- // ^ {unknown}
+ // ^ u32
if let <Self::Assoc>::Struct { x } = value {} // unstable
- // ^ {unknown}
+ // ^ u32
if let <<Self>::Assoc>::Struct { x } = value {} // unstable
- // ^ {unknown}
+ // ^ u32
}
}
"#,
@@ -5148,3 +5148,98 @@ fn foo(v: Struct<f32>) {
"#,
);
}
+
+#[test]
+fn more_qualified_paths() {
+ check_infer(
+ r#"
+struct T;
+struct S {
+ a: u32,
+}
+
+trait Trait {
+ type Assoc;
+
+ fn foo();
+}
+
+impl Trait for T {
+ type Assoc = S;
+
+ fn foo() {
+ let <Self>::Assoc { a } = <Self>::Assoc { a: 0 };
+ }
+}
+
+enum E {
+ ES { a: u32 },
+ ET(u32),
+}
+
+impl Trait for E {
+ type Assoc = Self;
+
+ fn foo() {
+ let <Self>::Assoc::ES { a } = <Self>::Assoc::ES { a: 0 };
+ }
+}
+
+fn foo() {
+ let <T as Trait>::Assoc { a } = <T as Trait>::Assoc { a: 0 };
+
+ let <E>::ES { a } = (<E>::ES { a: 0 }) else { loop {} };
+ let <E>::ET(a) = <E>::ET(0) else { loop {} };
+ let <E as Trait>::Assoc::ES { a } = (<E as Trait>::Assoc::ES { a: 0 }) else { loop {} };
+ let <E as Trait>::Assoc::ET(a) = <E as Trait>::Assoc::ET(0) else { loop {} };
+}
+ "#,
+ expect![[r#"
+ 137..202 '{ ... }': ()
+ 151..170 '<Self>... { a }': S
+ 167..168 'a': u32
+ 173..195 '<Self>...a: 0 }': S
+ 192..193 '0': u32
+ 306..379 '{ ... }': ()
+ 320..343 '<Self>... { a }': E
+ 340..341 'a': u32
+ 346..372 '<Self>...a: 0 }': E
+ 369..370 '0': u32
+ 392..748 '{ ...} }; }': ()
+ 402..427 '<T as ... { a }': S
+ 424..425 'a': u32
+ 430..458 '<T as ...a: 0 }': S
+ 455..456 '0': u32
+ 469..482 '<E>::ES { a }': E
+ 479..480 'a': u32
+ 486..502 '<E>::E...a: 0 }': E
+ 499..500 '0': u32
+ 509..520 '{ loop {} }': !
+ 511..518 'loop {}': !
+ 516..518 '{}': ()
+ 530..540 '<E>::ET(a)': E
+ 538..539 'a': u32
+ 543..550 '<E>::ET': fn ET(u32) -> E
+ 543..553 '<E>::ET(0)': E
+ 551..552 '0': u32
+ 559..570 '{ loop {} }': !
+ 561..568 'loop {}': !
+ 566..568 '{}': ()
+ 580..609 '<E as ... { a }': E
+ 606..607 'a': u32
+ 613..645 '<E as ...a: 0 }': E
+ 642..643 '0': u32
+ 652..663 '{ loop {} }': !
+ 654..661 'loop {}': !
+ 659..661 '{}': ()
+ 673..699 '<E as ...:ET(a)': E
+ 697..698 'a': u32
+ 702..725 '<E as ...oc::ET': fn ET(u32) -> E
+ 702..728 '<E as ...:ET(0)': E
+ 726..727 '0': u32
+ 734..745 '{ loop {} }': !
+ 736..743 'loop {}': !
+ 741..743 '{}': ()
+ "#]],
+ );
+}
diff --git a/crates/hir/src/diagnostics.rs b/crates/hir/src/diagnostics.rs
index 7f672a697c..555270bad8 100644
--- a/crates/hir/src/diagnostics.rs
+++ b/crates/hir/src/diagnostics.rs
@@ -282,7 +282,7 @@ pub struct MissingFields {
pub file: HirFileId,
pub field_list_parent: AstPtr<Either<ast::RecordExpr, ast::RecordPat>>,
pub field_list_parent_path: Option<AstPtr<ast::Path>>,
- pub missed_fields: Vec<Name>,
+ pub missed_fields: Vec<(Name, Field)>,
}
#[derive(Debug)]
@@ -476,7 +476,12 @@ impl<'db> AnyDiagnostic<'db> {
let variant_data = variant.fields(db);
let missed_fields = missed_fields
.into_iter()
- .map(|idx| variant_data.fields()[idx].name.clone())
+ .map(|idx| {
+ (
+ variant_data.fields()[idx].name.clone(),
+ Field { parent: variant.into(), id: idx },
+ )
+ })
.collect();
let record = match record {
diff --git a/crates/hir/src/display.rs b/crates/hir/src/display.rs
index 4bfdd239f9..53f24713cd 100644
--- a/crates/hir/src/display.rs
+++ b/crates/hir/src/display.rs
@@ -76,7 +76,7 @@ fn write_builtin_derive_impl_method<'db>(
let predicates =
hir_ty::builtin_derive::predicates(db, impl_).explicit_predicates().skip_binder();
- write_params_bounds(f, predicates)?;
+ write_params_bounds(f, &Vec::from_iter(predicates))?;
}
Ok(())
@@ -578,7 +578,7 @@ impl<'db> HirDisplay<'db> for TypeParam {
let ty = self.ty(f.db).ty;
let predicates = GenericPredicates::query_all(f.db, self.id.parent());
let predicates = predicates
- .iter_identity_copied()
+ .iter_identity()
.filter(|wc| match wc.kind().skip_binder() {
ClauseKind::Trait(tr) => tr.self_ty() == ty,
ClauseKind::Projection(proj) => proj.self_ty() == ty,
diff --git a/crates/hir/src/lib.rs b/crates/hir/src/lib.rs
index bc5e164830..89f3cfd140 100644
--- a/crates/hir/src/lib.rs
+++ b/crates/hir/src/lib.rs
@@ -45,7 +45,7 @@ use std::{
};
use arrayvec::ArrayVec;
-use base_db::{CrateDisplayName, CrateOrigin, LangCrateOrigin};
+use base_db::{CrateDisplayName, CrateOrigin, LangCrateOrigin, all_crates};
use either::Either;
use hir_def::{
AdtId, AssocItemId, AssocItemLoc, BuiltinDeriveImplId, CallableDefId, ConstId, ConstParamId,
@@ -243,7 +243,7 @@ impl Crate {
}
pub fn reverse_dependencies(self, db: &dyn HirDatabase) -> Vec<Crate> {
- let all_crates = db.all_crates();
+ let all_crates = all_crates(db);
all_crates
.iter()
.copied()
@@ -310,7 +310,7 @@ impl Crate {
}
pub fn all(db: &dyn HirDatabase) -> Vec<Crate> {
- db.all_crates().iter().map(|&id| Crate { id }).collect()
+ all_crates(db).iter().map(|&id| Crate { id }).collect()
}
/// Try to get the root URL of the documentation of a crate.
@@ -334,7 +334,7 @@ impl Crate {
}
fn core(db: &dyn HirDatabase) -> Option<Crate> {
- db.all_crates()
+ all_crates(db)
.iter()
.copied()
.find(|&krate| {
@@ -547,7 +547,7 @@ impl HasCrate for ModuleDef {
fn krate(&self, db: &dyn HirDatabase) -> Crate {
match self.module(db) {
Some(module) => module.krate(db),
- None => Crate::core(db).unwrap_or_else(|| db.all_crates()[0].into()),
+ None => Crate::core(db).unwrap_or_else(|| all_crates(db)[0].into()),
}
}
}
@@ -3394,7 +3394,7 @@ impl BuiltinType {
}
pub fn ty<'db>(self, db: &'db dyn HirDatabase) -> Type<'db> {
- let core = Crate::core(db).map(|core| core.id).unwrap_or_else(|| db.all_crates()[0]);
+ let core = Crate::core(db).map(|core| core.id).unwrap_or_else(|| all_crates(db)[0]);
let interner = DbInterner::new_no_crate(db);
Type::new_for_crate(core, Ty::from_builtin_type(interner, self.inner))
}
@@ -4680,7 +4680,7 @@ impl TypeParam {
pub fn trait_bounds(self, db: &dyn HirDatabase) -> Vec<Trait> {
let self_ty = self.ty(db).ty;
GenericPredicates::query_explicit(db, self.id.parent())
- .iter_identity_copied()
+ .iter_identity()
.filter_map(|pred| match &pred.kind().skip_binder() {
ClauseKind::Trait(trait_ref) if trait_ref.self_ty() == self_ty => {
Some(Trait::from(trait_ref.def_id().0))
@@ -4898,12 +4898,12 @@ impl Impl {
std::iter::successors(module.block(db), |block| block.loc(db).module.block(db))
.filter_map(|block| TraitImpls::for_block(db, block).as_deref())
.for_each(|impls| impls.for_self_ty(&simplified_ty, &mut extend_with_impls));
- for &krate in &**db.all_crates() {
+ for &krate in &*all_crates(db) {
TraitImpls::for_crate(db, krate)
.for_self_ty(&simplified_ty, &mut extend_with_impls);
}
} else {
- for &krate in &**db.all_crates() {
+ for &krate in &*all_crates(db) {
TraitImpls::for_crate(db, krate)
.for_self_ty(&simplified_ty, &mut extend_with_impls);
}
@@ -7175,7 +7175,7 @@ pub fn resolve_absolute_path<'a, I: Iterator<Item = Symbol> + Clone + 'a>(
.next()
.into_iter()
.flat_map(move |crate_name| {
- db.all_crates()
+ all_crates(db)
.iter()
.filter(|&krate| {
krate
diff --git a/crates/hir/src/semantics.rs b/crates/hir/src/semantics.rs
index 4e9e3c44be..9a31a08ffb 100644
--- a/crates/hir/src/semantics.rs
+++ b/crates/hir/src/semantics.rs
@@ -10,7 +10,7 @@ use std::{
ops::{self, ControlFlow, Not},
};
-use base_db::FxIndexSet;
+use base_db::{FxIndexSet, all_crates, toolchain_channel};
use either::Either;
use hir_def::{
BuiltinDeriveImplId, DefWithBodyId, ExpressionStoreOwnerId, HasModule, MacroId, StructId,
@@ -392,7 +392,7 @@ impl<DB: HirDatabase + ?Sized> Semantics<'_, DB> {
}
pub fn is_nightly(&self, krate: Crate) -> bool {
- let toolchain = self.db.toolchain_channel(krate.into());
+ let toolchain = toolchain_channel(self.db.as_dyn_database(), krate.into());
// `toolchain == None` means we're in some detached files. Since we have no information on
// the toolchain being used, let's just allow unstable items to be listed.
matches!(toolchain, Some(base_db::ReleaseChannel::Nightly) | None)
@@ -458,7 +458,7 @@ impl<'db> SemanticsImpl<'db> {
pub fn parse(&self, file_id: EditionedFileId) -> ast::SourceFile {
let hir_file_id = file_id.into();
- let tree = self.db.parse(file_id).tree();
+ let tree = file_id.parse(self.db).tree();
self.cache(tree.syntax().clone(), hir_file_id);
tree
}
@@ -467,7 +467,7 @@ impl<'db> SemanticsImpl<'db> {
pub fn first_crate(&self, file: FileId) -> Option<Crate> {
match self.file_to_module_defs(file).next() {
Some(module) => Some(module.krate(self.db)),
- None => self.db.all_crates().last().copied().map(Into::into),
+ None => all_crates(self.db).last().copied().map(Into::into),
}
}
@@ -484,7 +484,7 @@ impl<'db> SemanticsImpl<'db> {
pub fn parse_guess_edition(&self, file_id: FileId) -> ast::SourceFile {
let file_id = self.attach_first_edition(file_id);
- let tree = self.db.parse(file_id).tree();
+ let tree = file_id.parse(self.db).tree();
self.cache(tree.syntax().clone(), file_id.into());
tree
}
@@ -2461,7 +2461,7 @@ fn macro_call_to_macro_id(
Either::Left(it) => {
let node = match it.file_id {
HirFileId::FileId(file_id) => {
- it.to_ptr(db).to_node(&db.parse(file_id).syntax_node())
+ it.to_ptr(db).to_node(&file_id.parse(db).syntax_node())
}
HirFileId::MacroFile(macro_file) => {
let expansion_info = ctx.cache.get_or_insert_expansion(ctx.db, macro_file);
@@ -2473,7 +2473,7 @@ fn macro_call_to_macro_id(
Either::Right(it) => {
let node = match it.file_id {
HirFileId::FileId(file_id) => {
- it.to_ptr(db).to_node(&db.parse(file_id).syntax_node())
+ it.to_ptr(db).to_node(&file_id.parse(db).syntax_node())
}
HirFileId::MacroFile(macro_file) => {
let expansion_info = ctx.cache.get_or_insert_expansion(ctx.db, macro_file);
diff --git a/crates/hir/src/semantics/source_to_def.rs b/crates/hir/src/semantics/source_to_def.rs
index a9a779a287..59bccc22d8 100644
--- a/crates/hir/src/semantics/source_to_def.rs
+++ b/crates/hir/src/semantics/source_to_def.rs
@@ -85,6 +85,7 @@
//! active crate for a given position, and then provide an API to resolve all
//! syntax nodes against this specific crate.
+use base_db::relevant_crates;
use either::Either;
use hir_def::{
AdtId, BlockId, BuiltinDeriveImplId, ConstId, ConstParamId, DefWithBodyId, EnumId,
@@ -145,7 +146,7 @@ impl SourceToDefCache {
return m;
}
self.included_file_cache.insert(file, None);
- for &crate_id in db.relevant_crates(file.file_id(db)).iter() {
+ for &crate_id in relevant_crates(db, file.file_id(db)).iter() {
db.include_macro_invoc(crate_id).iter().for_each(|&(macro_call_id, file_id)| {
self.included_file_cache.insert(file_id, Some(macro_call_id));
});
@@ -180,7 +181,7 @@ impl SourceToDefCtx<'_, '_> {
self.cache.file_to_def_cache.entry(file).or_insert_with(|| {
let mut mods = SmallVec::new();
- for &crate_id in self.db.relevant_crates(file).iter() {
+ for &crate_id in relevant_crates(self.db, file).iter() {
// Note: `mod` declarations in block modules cannot be supported here
let crate_def_map = crate_def_map(self.db, crate_id);
let n_mods = mods.len();
diff --git a/crates/ide-assists/src/handlers/add_missing_impl_members.rs b/crates/ide-assists/src/handlers/add_missing_impl_members.rs
index afdced4215..44b367059e 100644
--- a/crates/ide-assists/src/handlers/add_missing_impl_members.rs
+++ b/crates/ide-assists/src/handlers/add_missing_impl_members.rs
@@ -1,7 +1,7 @@
use hir::HasSource;
use syntax::{
Edition,
- ast::{self, AstNode, make},
+ ast::{self, AstNode, syntax_factory::SyntaxFactory},
syntax_editor::{Position, SyntaxEditor},
};
@@ -148,7 +148,9 @@ fn add_missing_impl_members_inner(
let target = impl_def.syntax().text_range();
acc.add(AssistId::quick_fix(assist_id), label, target, |edit| {
+ let make = SyntaxFactory::with_mappings();
let new_item = add_trait_assoc_items_to_impl(
+ &make,
&ctx.sema,
ctx.config,
&missing_items,
@@ -164,6 +166,7 @@ fn add_missing_impl_members_inner(
let mut first_new_item = if let DefaultMethods::No = mode
&& let ast::AssocItem::Fn(func) = &first_new_item
&& let Some(body) = try_gen_trait_body(
+ &make,
ctx,
func,
trait_ref,
@@ -172,7 +175,7 @@ fn add_missing_impl_members_inner(
)
&& let Some(func_body) = func.body()
{
- let mut func_editor = SyntaxEditor::new(first_new_item.syntax().clone_subtree());
+ let (mut func_editor, _) = SyntaxEditor::new(first_new_item.syntax().clone());
func_editor.replace(func_body.syntax(), body.syntax());
ast::AssocItem::cast(func_editor.finish().new_root().clone())
} else {
@@ -189,10 +192,10 @@ fn add_missing_impl_members_inner(
if let Some(assoc_item_list) = impl_def.assoc_item_list() {
assoc_item_list.add_items(&mut editor, new_assoc_items);
} else {
- let assoc_item_list = make::assoc_item_list(Some(new_assoc_items)).clone_for_update();
+ let assoc_item_list = make.assoc_item_list(new_assoc_items);
editor.insert_all(
Position::after(impl_def.syntax()),
- vec![make::tokens::whitespace(" ").into(), assoc_item_list.syntax().clone().into()],
+ vec![make.whitespace(" ").into(), assoc_item_list.syntax().clone().into()],
);
first_new_item = assoc_item_list.assoc_items().next();
}
@@ -215,23 +218,24 @@ fn add_missing_impl_members_inner(
editor.add_annotation(first_new_item.syntax(), tabstop);
};
};
+ editor.add_mappings(make.finish_with_mappings());
edit.add_file_edits(ctx.vfs_file_id(), editor);
})
}
fn try_gen_trait_body(
+ make: &SyntaxFactory,
ctx: &AssistContext<'_>,
func: &ast::Fn,
trait_ref: hir::TraitRef<'_>,
impl_def: &ast::Impl,
edition: Edition,
) -> Option<ast::BlockExpr> {
- let trait_path = make::ext::ident_path(
- &trait_ref.trait_().name(ctx.db()).display(ctx.db(), edition).to_string(),
- );
+ let trait_path =
+ make.ident_path(&trait_ref.trait_().name(ctx.db()).display(ctx.db(), edition).to_string());
let hir_ty = ctx.sema.resolve_type(&impl_def.self_ty()?)?;
let adt = hir_ty.as_adt()?.source(ctx.db())?;
- gen_trait_fn_body(func, &trait_path, &adt.value, Some(trait_ref))
+ gen_trait_fn_body(make, func, &trait_path, &adt.value, Some(trait_ref))
}
#[cfg(test)]
diff --git a/crates/ide-assists/src/handlers/add_missing_match_arms.rs b/crates/ide-assists/src/handlers/add_missing_match_arms.rs
index b063e5ffce..b7510bb826 100644
--- a/crates/ide-assists/src/handlers/add_missing_match_arms.rs
+++ b/crates/ide-assists/src/handlers/add_missing_match_arms.rs
@@ -1,4 +1,4 @@
-use std::iter::{self, Peekable};
+use std::iter;
use either::Either;
use hir::{Adt, AsAssocItem, Crate, FindPathConfig, HasAttrs, ModuleDef, Semantics};
@@ -93,8 +93,8 @@ pub(crate) fn add_missing_match_arms(acc: &mut Assists, ctx: &AssistContext<'_>)
} else {
None
};
- let (mut missing_pats, is_non_exhaustive, has_hidden_variants): (
- Peekable<Box<dyn Iterator<Item = (ast::Pat, bool)>>>,
+ let (missing_pats, is_non_exhaustive, has_hidden_variants): (
+ Vec<(ast::Pat, bool)>,
bool,
bool,
) = if let Some(enum_def) = resolve_enum_def(&ctx.sema, &expr, self_ty.as_ref()) {
@@ -117,15 +117,15 @@ pub(crate) fn add_missing_match_arms(acc: &mut Assists, ctx: &AssistContext<'_>)
.filter(|(variant_pat, _)| is_variant_missing(&top_lvl_pats, variant_pat));
let option_enum = FamousDefs(&ctx.sema, module.krate(ctx.db())).core_option_Option();
- let missing_pats: Box<dyn Iterator<Item = _>> = if matches!(enum_def, ExtendedEnum::Enum { enum_: e, .. } if Some(e) == option_enum)
+ let missing_pats: Vec<_> = if matches!(enum_def, ExtendedEnum::Enum { enum_: e, .. } if Some(e) == option_enum)
{
// Match `Some` variant first.
cov_mark::hit!(option_order);
- Box::new(missing_pats.rev())
+ missing_pats.rev().collect()
} else {
- Box::new(missing_pats)
+ missing_pats.collect()
};
- (missing_pats.peekable(), is_non_exhaustive, has_hidden_variants)
+ (missing_pats, is_non_exhaustive, has_hidden_variants)
} else if let Some(enum_defs) = resolve_tuple_of_enum_def(&ctx.sema, &expr, self_ty.as_ref()) {
let is_non_exhaustive = enum_defs
.iter()
@@ -169,12 +169,9 @@ pub(crate) fn add_missing_match_arms(acc: &mut Assists, ctx: &AssistContext<'_>)
(ast::Pat::from(make.tuple_pat(patterns)), is_hidden)
})
- .filter(|(variant_pat, _)| is_variant_missing(&top_lvl_pats, variant_pat));
- (
- (Box::new(missing_pats) as Box<dyn Iterator<Item = _>>).peekable(),
- is_non_exhaustive,
- has_hidden_variants,
- )
+ .filter(|(variant_pat, _)| is_variant_missing(&top_lvl_pats, variant_pat))
+ .collect();
+ (missing_pats, is_non_exhaustive, has_hidden_variants)
} else if let Some((enum_def, len)) =
resolve_array_of_enum_def(&ctx.sema, &expr, self_ty.as_ref())
{
@@ -205,12 +202,9 @@ pub(crate) fn add_missing_match_arms(acc: &mut Assists, ctx: &AssistContext<'_>)
(ast::Pat::from(make.slice_pat(patterns)), is_hidden)
})
- .filter(|(variant_pat, _)| is_variant_missing(&top_lvl_pats, variant_pat));
- (
- (Box::new(missing_pats) as Box<dyn Iterator<Item = _>>).peekable(),
- is_non_exhaustive,
- has_hidden_variants,
- )
+ .filter(|(variant_pat, _)| is_variant_missing(&top_lvl_pats, variant_pat))
+ .collect();
+ (missing_pats, is_non_exhaustive, has_hidden_variants)
} else {
return None;
};
@@ -218,20 +212,31 @@ pub(crate) fn add_missing_match_arms(acc: &mut Assists, ctx: &AssistContext<'_>)
let mut needs_catch_all_arm = is_non_exhaustive && !has_catch_all_arm;
if !needs_catch_all_arm
- && ((has_hidden_variants && has_catch_all_arm) || missing_pats.peek().is_none())
+ && ((has_hidden_variants && has_catch_all_arm) || missing_pats.is_empty())
{
return None;
}
+ let visible_count = missing_pats.iter().filter(|(_, hidden)| !hidden).count();
+ let label = if visible_count == 0 {
+ "Add missing catch-all match arm `_`".to_owned()
+ } else if visible_count == 1 {
+ let pat = &missing_pats.iter().find(|(_, hidden)| !hidden).unwrap().0;
+ format!("Add missing match arm `{pat}`")
+ } else {
+ format!("Add {visible_count} missing match arms")
+ };
+
acc.add(
AssistId::quick_fix("add_missing_match_arms"),
- "Fill match arms",
+ label,
ctx.sema.original_range(match_expr.syntax()).range,
|builder| {
// having any hidden variants means that we need a catch-all arm
needs_catch_all_arm |= has_hidden_variants;
let mut missing_arms = missing_pats
+ .into_iter()
.filter(|(_, hidden)| {
// filter out hidden patterns because they're handled by the catch-all arm
!hidden
@@ -635,7 +640,7 @@ mod tests {
use crate::AssistConfig;
use crate::tests::{
TEST_CONFIG, check_assist, check_assist_not_applicable, check_assist_target,
- check_assist_unresolved, check_assist_with_config,
+ check_assist_unresolved, check_assist_with_config, check_assist_with_label,
};
use super::add_missing_match_arms;
@@ -1828,8 +1833,10 @@ fn foo(t: Test) {
#[test]
fn lazy_computation() {
- // Computing a single missing arm is enough to determine applicability of the assist.
- cov_mark::check_count!(add_missing_match_arms_lazy_computation, 1);
+ // We now collect all missing arms eagerly, so we can show the count
+ // of missing arms.
+ cov_mark::check_count!(add_missing_match_arms_lazy_computation, 4);
+
check_assist_unresolved(
add_missing_match_arms,
r#"
@@ -1842,6 +1849,54 @@ fn foo(tuple: (A, A)) {
}
#[test]
+ fn label_single_missing_arm() {
+ check_assist_with_label(
+ add_missing_match_arms,
+ r#"
+enum A { One, Two }
+fn foo(a: A) {
+ match $0a {
+ A::One => {}
+ }
+}
+"#,
+ "Add missing match arm `A::Two`",
+ );
+ }
+
+ #[test]
+ fn label_multiple_missing_arms() {
+ check_assist_with_label(
+ add_missing_match_arms,
+ r#"
+enum A { One, Two, Three }
+fn foo(a: A) {
+ match $0a {}
+}
+"#,
+ "Add 3 missing match arms",
+ );
+ }
+
+ #[test]
+ fn label_catch_all_only() {
+ check_assist_with_label(
+ add_missing_match_arms,
+ r#"
+//- /main.rs crate:main deps:e
+fn foo(t: ::e::E) {
+ match $0t {
+ e::E::A => {}
+ }
+}
+//- /e.rs crate:e
+pub enum E { A, #[doc(hidden)] B, }
+"#,
+ "Add missing catch-all match arm `_`",
+ );
+ }
+
+ #[test]
fn adds_comma_before_new_arms() {
check_assist(
add_missing_match_arms,
diff --git a/crates/ide-assists/src/handlers/apply_demorgan.rs b/crates/ide-assists/src/handlers/apply_demorgan.rs
index 4ee4970248..2ea0d76b01 100644
--- a/crates/ide-assists/src/handlers/apply_demorgan.rs
+++ b/crates/ide-assists/src/handlers/apply_demorgan.rs
@@ -82,8 +82,7 @@ pub(crate) fn apply_demorgan(acc: &mut Assists, ctx: &AssistContext<'_>) -> Opti
let make = SyntaxFactory::with_mappings();
- let demorganed = bin_expr.clone_subtree();
- let mut editor = SyntaxEditor::new(demorganed.syntax().clone());
+ let (mut editor, demorganed) = SyntaxEditor::with_ast_node(&bin_expr);
editor.replace(demorganed.op_token()?, make.token(inv_token));
let mut exprs = VecDeque::from([
diff --git a/crates/ide-assists/src/handlers/auto_import.rs b/crates/ide-assists/src/handlers/auto_import.rs
index de5dfdf4d9..adeb191719 100644
--- a/crates/ide-assists/src/handlers/auto_import.rs
+++ b/crates/ide-assists/src/handlers/auto_import.rs
@@ -1928,4 +1928,33 @@ fn f() {
"#;
check_auto_import_order(before, &["Import `foo::wanted`", "Import `quux::wanted`"]);
}
+
+ #[test]
+ fn consider_definition_kind() {
+ check_assist(
+ auto_import,
+ r#"
+//- /eyre.rs crate:eyre
+#[macro_export]
+macro_rules! eyre {
+ () => {};
+}
+
+//- /color-eyre.rs crate:color-eyre deps:eyre
+pub use eyre;
+
+//- /main.rs crate:main deps:color-eyre
+fn main() {
+ ey$0re!();
+}
+ "#,
+ r#"
+use color_eyre::eyre::eyre;
+
+fn main() {
+ eyre!();
+}
+ "#,
+ );
+ }
}
diff --git a/crates/ide-assists/src/handlers/convert_bool_then.rs b/crates/ide-assists/src/handlers/convert_bool_then.rs
index b3bfe5b8c4..c36c79ee99 100644
--- a/crates/ide-assists/src/handlers/convert_bool_then.rs
+++ b/crates/ide-assists/src/handlers/convert_bool_then.rs
@@ -77,8 +77,7 @@ pub(crate) fn convert_if_to_bool_then(acc: &mut Assists, ctx: &AssistContext<'_>
"Convert `if` expression to `bool::then` call",
target,
|builder| {
- let closure_body = closure_body.clone_subtree();
- let mut editor = SyntaxEditor::new(closure_body.syntax().clone());
+ let (mut editor, closure_body) = SyntaxEditor::with_ast_node(&closure_body);
// Rewrite all `Some(e)` in tail position to `e`
for_each_tail_expr(&closure_body, &mut |e| {
let e = match e {
@@ -188,8 +187,7 @@ pub(crate) fn convert_bool_then_to_if(acc: &mut Assists, ctx: &AssistContext<'_>
e => mapless_make.block_expr(None, Some(e)),
};
- let closure_body = closure_body.clone_subtree();
- let mut editor = SyntaxEditor::new(closure_body.syntax().clone());
+ let (mut editor, closure_body) = SyntaxEditor::with_ast_node(&closure_body);
// Wrap all tails in `Some(...)`
let none_path = mapless_make.expr_path(mapless_make.ident_path("None"));
let some_path = mapless_make.expr_path(mapless_make.ident_path("Some"));
diff --git a/crates/ide-assists/src/handlers/convert_for_to_while_let.rs b/crates/ide-assists/src/handlers/convert_for_to_while_let.rs
index 15f324eff3..a5c29a45a5 100644
--- a/crates/ide-assists/src/handlers/convert_for_to_while_let.rs
+++ b/crates/ide-assists/src/handlers/convert_for_to_while_let.rs
@@ -81,14 +81,14 @@ pub(crate) fn convert_for_loop_to_while_let(
let indent = IndentLevel::from_node(for_loop.syntax());
if let Some(label) = for_loop.label() {
- let label = label.syntax().clone_for_update();
+ let label = label.syntax();
editor.insert(Position::before(for_loop.syntax()), make.whitespace(" "));
editor.insert(Position::before(for_loop.syntax()), label);
}
crate::utils::insert_attributes(
for_loop.syntax(),
&mut editor,
- for_loop.attrs().map(|it| it.clone_for_update()),
+ for_loop.attrs(),
&make,
);
diff --git a/crates/ide-assists/src/handlers/convert_let_else_to_match.rs b/crates/ide-assists/src/handlers/convert_let_else_to_match.rs
index d2336a4a5d..9a9808e270 100644
--- a/crates/ide-assists/src/handlers/convert_let_else_to_match.rs
+++ b/crates/ide-assists/src/handlers/convert_let_else_to_match.rs
@@ -32,10 +32,12 @@ pub(crate) fn convert_let_else_to_match(acc: &mut Assists, ctx: &AssistContext<'
.or_else(|| ctx.find_token_syntax_at_offset(T![let])?.parent())?;
let let_stmt = LetStmt::cast(let_stmt)?;
let else_block = let_stmt.let_else()?.block_expr()?;
- let else_expr = if else_block.statements().next().is_none() {
- else_block.tail_expr()?
+ let else_expr = if else_block.statements().next().is_none()
+ && let Some(tail_expr) = else_block.tail_expr()
+ {
+ tail_expr.reset_indent()
} else {
- else_block.into()
+ else_block.reset_indent().into()
};
let init = let_stmt.initializer()?;
// Ignore let stmt with type annotation
@@ -91,8 +93,8 @@ pub(crate) fn convert_let_else_to_match(acc: &mut Assists, ctx: &AssistContext<'
},
);
let else_arm = make.match_arm(make.wildcard_pat().into(), None, else_expr);
- let match_ = make.expr_match(init, make.match_arm_list([binding_arm, else_arm]));
- let match_ = match_.reset_indent();
+ let arms = [binding_arm, else_arm].map(|arm| arm.indent(1.into()));
+ let match_ = make.expr_match(init, make.match_arm_list(arms));
let match_ = match_.indent(let_stmt.indent_level());
if bindings.is_empty() {
@@ -190,7 +192,7 @@ fn remove_mut_and_collect_idents(
let inner = p.pat()?;
if let ast::Pat::IdentPat(ident) = inner {
acc.push(ident);
- p.clone_for_update().into()
+ p.clone().into()
} else {
make.ref_pat(remove_mut_and_collect_idents(make, &inner, acc)?).into()
}
@@ -299,6 +301,81 @@ fn main() {
}
#[test]
+ fn convert_let_else_to_match_with_empty_else_block() {
+ check_assist(
+ convert_let_else_to_match,
+ r"
+fn main() {
+ let Ok(x) = f() else$0 {};
+}",
+ r"
+fn main() {
+ let x = match f() {
+ Ok(x) => x,
+ _ => {}
+ };
+}",
+ );
+ }
+
+ #[test]
+ fn convert_let_else_to_match_with_some_indent() {
+ check_assist(
+ convert_let_else_to_match,
+ r#"
+mod indent {
+ fn main() {
+ let Ok(x) = f() else$0 {
+ log();
+ unreachable!(
+ "..."
+ );
+ };
+ }
+}"#,
+ r#"
+mod indent {
+ fn main() {
+ let x = match f() {
+ Ok(x) => x,
+ _ => {
+ log();
+ unreachable!(
+ "..."
+ );
+ }
+ };
+ }
+}"#,
+ );
+
+ check_assist(
+ convert_let_else_to_match,
+ r#"
+mod indent {
+ fn main() {
+ let Ok(x) = f() else$0 {
+ unreachable!(
+ "..."
+ )
+ };
+ }
+}"#,
+ r#"
+mod indent {
+ fn main() {
+ let x = match f() {
+ Ok(x) => x,
+ _ => unreachable!(
+ "..."
+ ),
+ };
+ }
+}"#,
+ );
+ }
+
+ #[test]
fn convert_let_else_to_match_const_ref() {
check_assist(
convert_let_else_to_match,
diff --git a/crates/ide-assists/src/handlers/convert_match_to_let_else.rs b/crates/ide-assists/src/handlers/convert_match_to_let_else.rs
index 1a6d176c90..4b132d68ee 100644
--- a/crates/ide-assists/src/handlers/convert_match_to_let_else.rs
+++ b/crates/ide-assists/src/handlers/convert_match_to_let_else.rs
@@ -121,8 +121,7 @@ fn find_extracted_variable(ctx: &AssistContext<'_>, arm: &ast::MatchArm) -> Opti
// Rename `extracted` with `binding` in `pat`.
fn rename_variable(pat: &ast::Pat, extracted: &[Name], binding: ast::Pat) -> SyntaxNode {
- let syntax = pat.syntax().clone_subtree();
- let mut editor = SyntaxEditor::new(syntax.clone());
+ let (mut editor, syntax) = SyntaxEditor::new(pat.syntax().clone());
let make = SyntaxFactory::with_mappings();
let extracted = extracted
.iter()
@@ -138,15 +137,12 @@ fn rename_variable(pat: &ast::Pat, extracted: &[Name], binding: ast::Pat) -> Syn
if let Some(name_ref) = record_pat_field.field_name() {
editor.replace(
record_pat_field.syntax(),
- make.record_pat_field(
- make.name_ref(&name_ref.text()),
- binding.clone_for_update(),
- )
- .syntax(),
+ make.record_pat_field(make.name_ref(&name_ref.text()), binding.clone())
+ .syntax(),
);
}
} else {
- editor.replace(extracted_syntax, binding.syntax().clone_for_update());
+ editor.replace(extracted_syntax, binding.syntax());
}
}
editor.add_mappings(make.finish_with_mappings());
diff --git a/crates/ide-assists/src/handlers/convert_named_struct_to_tuple_struct.rs b/crates/ide-assists/src/handlers/convert_named_struct_to_tuple_struct.rs
index aaf727058c..4ea56e3e61 100644
--- a/crates/ide-assists/src/handlers/convert_named_struct_to_tuple_struct.rs
+++ b/crates/ide-assists/src/handlers/convert_named_struct_to_tuple_struct.rs
@@ -102,11 +102,11 @@ fn edit_struct_def(
// Note that we don't need to consider macro files in this function because this is
// currently not triggered for struct definitions inside macro calls.
let tuple_fields = record_fields.fields().filter_map(|f| {
- let field = ast::make::tuple_field(f.visibility(), f.ty()?);
- let mut editor = SyntaxEditor::new(field.syntax().clone());
+ let (mut editor, field) =
+ SyntaxEditor::with_ast_node(&ast::make::tuple_field(f.visibility(), f.ty()?));
editor.insert_all(
Position::first_child_of(field.syntax()),
- f.attrs().map(|attr| attr.syntax().clone_subtree().clone_for_update().into()).collect(),
+ f.attrs().map(|attr| attr.syntax().clone().into()).collect(),
);
let field_syntax = editor.finish().new_root().clone();
let field = ast::TupleField::cast(field_syntax)?;
@@ -328,8 +328,7 @@ fn delete_whitespace(edit: &mut SyntaxEditor, whitespace: Option<impl Element>)
}
fn remove_trailing_comma(w: ast::WhereClause) -> SyntaxNode {
- let w = w.syntax().clone_subtree();
- let mut editor = SyntaxEditor::new(w.clone());
+ let (mut editor, w) = SyntaxEditor::new(w.syntax().clone());
if let Some(last) = w.last_child_or_token()
&& last.kind() == T![,]
{
diff --git a/crates/ide-assists/src/handlers/convert_range_for_to_while.rs b/crates/ide-assists/src/handlers/convert_range_for_to_while.rs
index 2e649f14be..6139395076 100644
--- a/crates/ide-assists/src/handlers/convert_range_for_to_while.rs
+++ b/crates/ide-assists/src/handlers/convert_range_for_to_while.rs
@@ -133,7 +133,7 @@ fn process_loop_body(
) -> Option<()> {
let last = previous_non_trivia_token(body.r_curly_token()?)?.syntax_element();
- let new_body = body.indent(1.into()).clone_subtree();
+ let new_body = body.indent(1.into());
let mut continues = vec![];
collect_continue_to(
&mut continues,
@@ -155,15 +155,15 @@ fn process_loop_body(
let block_content = first.clone()..=children.last().unwrap_or(first);
let continue_label = make::lifetime("'cont");
- let break_expr = make::expr_break(Some(continue_label.clone()), None).clone_for_update();
- let mut new_edit = SyntaxEditor::new(new_body.syntax().clone());
+ let break_expr = make::expr_break(Some(continue_label.clone()), None);
+ let (mut new_edit, _) = SyntaxEditor::new(new_body.syntax().clone());
for continue_expr in &continues {
new_edit.replace(continue_expr.syntax(), break_expr.syntax());
}
let new_body = new_edit.finish().new_root().clone();
let elements = itertools::chain(
[
- continue_label.syntax().clone_for_update().syntax_element(),
+ continue_label.syntax().syntax_element(),
make::token(T![:]).syntax_element(),
make::tokens::single_space().syntax_element(),
new_body.syntax_element(),
diff --git a/crates/ide-assists/src/handlers/convert_to_guarded_return.rs b/crates/ide-assists/src/handlers/convert_to_guarded_return.rs
index db45916792..004d09acac 100644
--- a/crates/ide-assists/src/handlers/convert_to_guarded_return.rs
+++ b/crates/ide-assists/src/handlers/convert_to_guarded_return.rs
@@ -1,17 +1,19 @@
use std::iter::once;
use either::Either;
-use hir::{Semantics, TypeInfo};
+use hir::Semantics;
use ide_db::{RootDatabase, ty_filter::TryEnum};
use syntax::{
AstNode,
- SyntaxKind::{CLOSURE_EXPR, FN, FOR_EXPR, LOOP_EXPR, WHILE_EXPR, WHITESPACE},
+ SyntaxKind::WHITESPACE,
SyntaxNode, T,
ast::{
self,
edit::{AstNodeEdit, IndentLevel},
syntax_factory::SyntaxFactory,
},
+ match_ast,
+ syntax_editor::SyntaxEditor,
};
use crate::{
@@ -71,9 +73,7 @@ fn if_expr_to_guarded_return(
) -> Option<()> {
let make = SyntaxFactory::without_mappings();
let else_block = match if_expr.else_branch() {
- Some(ast::ElseBranch::Block(block_expr)) if is_never_block(&ctx.sema, &block_expr) => {
- Some(block_expr)
- }
+ Some(ast::ElseBranch::Block(block_expr)) => Some(block_expr),
Some(_) => return None,
_ => None,
};
@@ -96,25 +96,20 @@ fn if_expr_to_guarded_return(
let parent_block = if_expr.syntax().parent()?.ancestors().find_map(ast::BlockExpr::cast)?;
- if parent_block.tail_expr() != Some(if_expr.clone().into())
- && !(else_block.is_some() && ast::ExprStmt::can_cast(if_expr.syntax().parent()?.kind()))
- {
- return None;
- }
-
// check for early return and continue
if is_early_block(&then_block) || is_never_block(&ctx.sema, &then_branch) {
return None;
}
- let parent_container = parent_block.syntax().parent()?;
+ let container = container_of(&parent_block)?;
+ let else_block = ElseBlock::new(&ctx.sema, else_block, &container)?;
- let early_expression = else_block
- .or_else(|| {
- early_expression(parent_container, &ctx.sema, &make)
- .map(ast::make::tail_only_block_expr)
- })?
- .reset_indent();
+ if parent_block.tail_expr() != Some(if_expr.clone().into())
+ && !(else_block.is_never_block
+ && ast::ExprStmt::can_cast(if_expr.syntax().parent()?.kind()))
+ {
+ return None;
+ }
then_block.syntax().first_child_or_token().map(|t| t.kind() == T!['{'])?;
@@ -137,6 +132,7 @@ fn if_expr_to_guarded_return(
|edit| {
let make = SyntaxFactory::without_mappings();
let if_indent_level = IndentLevel::from_node(if_expr.syntax());
+ let early_expression = else_block.make_early_block(&ctx.sema, &make);
let replacement = let_chains.into_iter().map(|expr| {
if let ast::Expr::LetExpr(let_expr) = &expr
&& let (Some(pat), Some(expr)) = (let_expr.pat(), let_expr.expr())
@@ -204,14 +200,9 @@ fn let_stmt_to_guarded_return(
let happy_pattern = try_enum.happy_pattern(pat);
let target = let_stmt.syntax().text_range();
- let make = SyntaxFactory::without_mappings();
- let early_expression: ast::Expr = {
- let parent_block =
- let_stmt.syntax().parent()?.ancestors().find_map(ast::BlockExpr::cast)?;
- let parent_container = parent_block.syntax().parent()?;
-
- early_expression(parent_container, &ctx.sema, &make)?
- };
+ let parent_block = let_stmt.syntax().parent()?.ancestors().find_map(ast::BlockExpr::cast)?;
+ let container = container_of(&parent_block)?;
+ let else_block = ElseBlock::new(&ctx.sema, None, &container)?;
acc.add(
AssistId::refactor_rewrite("convert_to_guarded_return"),
@@ -226,7 +217,7 @@ fn let_stmt_to_guarded_return(
happy_pattern,
let_stmt.ty(),
expr.reset_indent(),
- ast::make::tail_only_block_expr(early_expression),
+ else_block.make_early_block(&ctx.sema, &make),
);
let let_else_stmt = let_else_stmt.indent(let_indent_level);
let_else_stmt.syntax().clone()
@@ -239,33 +230,130 @@ fn let_stmt_to_guarded_return(
)
}
-fn early_expression(
- parent_container: SyntaxNode,
- sema: &Semantics<'_, RootDatabase>,
- make: &SyntaxFactory,
-) -> Option<ast::Expr> {
- let return_none_expr = || {
- let none_expr = make.expr_path(make.ident_path("None"));
- make.expr_return(Some(none_expr))
- };
- if let Some(fn_) = ast::Fn::cast(parent_container.clone())
- && let Some(fn_def) = sema.to_def(&fn_)
- && let Some(TryEnum::Option) = TryEnum::from_ty(sema, &fn_def.ret_type(sema.db))
- {
- return Some(return_none_expr().into());
+fn container_of(block: &ast::BlockExpr) -> Option<SyntaxNode> {
+ if block.label().is_some() {
+ return Some(block.syntax().clone());
}
- if let Some(body) = ast::ClosureExpr::cast(parent_container.clone()).and_then(|it| it.body())
- && let Some(ret_ty) = sema.type_of_expr(&body).map(TypeInfo::original)
- && let Some(TryEnum::Option) = TryEnum::from_ty(sema, &ret_ty)
- {
- return Some(return_none_expr().into());
+ block.syntax().parent()
+}
+
+struct ElseBlock<'db> {
+ exist_else_block: Option<ast::BlockExpr>,
+ is_never_block: bool,
+ kind: EarlyKind<'db>,
+}
+
+impl<'db> ElseBlock<'db> {
+ fn new(
+ sema: &Semantics<'db, RootDatabase>,
+ exist_else_block: Option<ast::BlockExpr>,
+ parent_container: &SyntaxNode,
+ ) -> Option<Self> {
+ let is_never_block = exist_else_block.as_ref().is_some_and(|it| is_never_block(sema, it));
+ let kind = EarlyKind::from_node(parent_container, sema)?;
+
+ Some(Self { exist_else_block, is_never_block, kind })
+ }
+
+ fn make_early_block(
+ self,
+ sema: &Semantics<'_, RootDatabase>,
+ make: &SyntaxFactory,
+ ) -> ast::BlockExpr {
+ let Some(block_expr) = self.exist_else_block else {
+ return make.tail_only_block_expr(self.kind.make_early_expr(sema, make, None));
+ };
+
+ if self.is_never_block {
+ return block_expr.reset_indent();
+ }
+
+ let (mut edit, block_expr) = SyntaxEditor::with_ast_node(&block_expr.reset_indent());
+
+ let last_stmt = block_expr.statements().last().map(|it| it.syntax().clone());
+ let tail_expr = block_expr.tail_expr().map(|it| it.syntax().clone());
+ let Some(last_element) = tail_expr.clone().or(last_stmt.clone()) else {
+ return make.tail_only_block_expr(self.kind.make_early_expr(sema, make, None));
+ };
+ let whitespace = last_element.prev_sibling_or_token().filter(|it| it.kind() == WHITESPACE);
+
+ let make = SyntaxFactory::without_mappings();
+
+ if let Some(tail_expr) = block_expr.tail_expr()
+ && !self.kind.is_unit()
+ {
+ let early_expr = self.kind.make_early_expr(sema, &make, Some(tail_expr.clone()));
+ edit.replace(tail_expr.syntax(), early_expr.syntax());
+ } else {
+ let last_stmt = match block_expr.tail_expr() {
+ Some(expr) => make.expr_stmt(expr).syntax().clone(),
+ None => last_element.clone(),
+ };
+ let whitespace =
+ make.whitespace(&whitespace.map_or(String::new(), |it| it.to_string()));
+ let early_expr = self.kind.make_early_expr(sema, &make, None).syntax().clone().into();
+ edit.replace_with_many(
+ last_element,
+ vec![last_stmt.into(), whitespace.into(), early_expr],
+ );
+ }
+
+ ast::BlockExpr::cast(edit.finish().new_root().clone()).unwrap()
+ }
+}
+
+enum EarlyKind<'db> {
+ Continue,
+ Break(ast::Lifetime, hir::Type<'db>),
+ Return(hir::Type<'db>),
+}
+
+impl<'db> EarlyKind<'db> {
+ fn from_node(
+ parent_container: &SyntaxNode,
+ sema: &Semantics<'db, RootDatabase>,
+ ) -> Option<Self> {
+ match_ast! {
+ match parent_container {
+ ast::Fn(it) => Some(Self::Return(sema.to_def(&it)?.ret_type(sema.db))),
+ ast::ClosureExpr(it) => Some(Self::Return(sema.type_of_expr(&it.body()?)?.original)),
+ ast::BlockExpr(it) => Some(Self::Break(it.label()?.lifetime()?, sema.type_of_expr(&it.into())?.original)),
+ ast::WhileExpr(_) => Some(Self::Continue),
+ ast::LoopExpr(_) => Some(Self::Continue),
+ ast::ForExpr(_) => Some(Self::Continue),
+ _ => None
+ }
+ }
}
- Some(match parent_container.kind() {
- WHILE_EXPR | LOOP_EXPR | FOR_EXPR => make.expr_continue(None).into(),
- FN | CLOSURE_EXPR => make.expr_return(None).into(),
- _ => return None,
- })
+ fn make_early_expr(
+ &self,
+ sema: &Semantics<'_, RootDatabase>,
+ make: &SyntaxFactory,
+ ret: Option<ast::Expr>,
+ ) -> ast::Expr {
+ match self {
+ EarlyKind::Continue => make.expr_continue(None).into(),
+ EarlyKind::Break(label, _) => make.expr_break(Some(label.clone()), ret).into(),
+ EarlyKind::Return(ty) => {
+ let expr = match TryEnum::from_ty(sema, ty) {
+ Some(TryEnum::Option) => {
+ ret.or_else(|| Some(make.expr_path(make.ident_path("None"))))
+ }
+ _ => ret,
+ };
+ make.expr_return(expr).into()
+ }
+ }
+ }
+
+ fn is_unit(&self) -> bool {
+ match self {
+ EarlyKind::Continue => true,
+ EarlyKind::Break(_, ty) => ty.is_unit(),
+ EarlyKind::Return(ty) => ty.is_unit(),
+ }
+ }
}
fn flat_let_chain(mut expr: ast::Expr, make: &SyntaxFactory) -> Vec<ast::Expr> {
@@ -465,6 +553,74 @@ fn main() {
}
#[test]
+ fn convert_if_let_has_else_block() {
+ check_assist(
+ convert_to_guarded_return,
+ r#"
+fn main() -> i32 {
+ if$0 true {
+ foo();
+ } else {
+ bar()
+ }
+}
+"#,
+ r#"
+fn main() -> i32 {
+ if false {
+ return bar();
+ }
+ foo();
+}
+"#,
+ );
+
+ check_assist(
+ convert_to_guarded_return,
+ r#"
+fn main() {
+ if$0 true {
+ foo();
+ } else {
+ bar()
+ }
+}
+"#,
+ r#"
+fn main() {
+ if false {
+ bar();
+ return
+ }
+ foo();
+}
+"#,
+ );
+
+ check_assist(
+ convert_to_guarded_return,
+ r#"
+fn main() {
+ if$0 true {
+ foo();
+ } else {
+ bar();
+ }
+}
+"#,
+ r#"
+fn main() {
+ if false {
+ bar();
+ return
+ }
+ foo();
+}
+"#,
+ );
+ }
+
+ #[test]
fn convert_if_let_has_never_type_else_block() {
check_assist(
convert_to_guarded_return,
@@ -512,7 +668,7 @@ fn main() {
}
#[test]
- fn convert_if_let_has_else_block_in_statement() {
+ fn convert_if_let_has_never_type_else_block_in_statement() {
check_assist(
convert_to_guarded_return,
r#"
@@ -923,6 +1079,63 @@ fn main() {
}
#[test]
+ fn convert_let_inside_labeled_block() {
+ check_assist(
+ convert_to_guarded_return,
+ r#"
+fn main() {
+ 'l: {
+ if$0 let Some(n) = n {
+ foo(n);
+ bar();
+ }
+ }
+}
+"#,
+ r#"
+fn main() {
+ 'l: {
+ let Some(n) = n else { break 'l };
+ foo(n);
+ bar();
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn convert_let_inside_for_with_else() {
+ check_assist(
+ convert_to_guarded_return,
+ r#"
+fn main() {
+ for n in ns {
+ if$0 let Some(n) = n {
+ foo(n);
+ bar();
+ } else {
+ baz()
+ }
+ }
+}
+"#,
+ r#"
+fn main() {
+ for n in ns {
+ let Some(n) = n else {
+ baz();
+ continue
+ };
+ foo(n);
+ bar();
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
fn convert_let_stmt_inside_fn() {
check_assist(
convert_to_guarded_return,
@@ -1186,16 +1399,18 @@ fn main() {
}
#[test]
- fn ignore_else_branch() {
+ fn ignore_else_branch_has_non_never_types_in_statement() {
check_assist_not_applicable(
convert_to_guarded_return,
r#"
fn main() {
+ some_statements();
if$0 true {
foo();
} else {
bar()
}
+ some_statements();
}
"#,
);
diff --git a/crates/ide-assists/src/handlers/convert_tuple_struct_to_named_struct.rs b/crates/ide-assists/src/handlers/convert_tuple_struct_to_named_struct.rs
index ae41e6c015..4ce7a9d866 100644
--- a/crates/ide-assists/src/handlers/convert_tuple_struct_to_named_struct.rs
+++ b/crates/ide-assists/src/handlers/convert_tuple_struct_to_named_struct.rs
@@ -103,11 +103,11 @@ fn edit_struct_def(
names: Vec<ast::Name>,
) {
let record_fields = tuple_fields.fields().zip(names).filter_map(|(f, name)| {
- let field = ast::make::record_field(f.visibility(), name, f.ty()?);
- let mut field_editor = SyntaxEditor::new(field.syntax().clone());
+ let (mut field_editor, field) =
+ SyntaxEditor::with_ast_node(&ast::make::record_field(f.visibility(), name, f.ty()?));
field_editor.insert_all(
Position::first_child_of(field.syntax()),
- f.attrs().map(|attr| attr.syntax().clone_subtree().clone_for_update().into()).collect(),
+ f.attrs().map(|attr| attr.syntax().clone().into()).collect(),
);
ast::RecordField::cast(field_editor.finish().new_root().clone())
});
@@ -120,7 +120,7 @@ fn edit_struct_def(
editor.delete(w.syntax());
let mut insert_element = Vec::new();
insert_element.push(ast::make::tokens::single_newline().syntax_element());
- insert_element.push(w.syntax().clone_for_update().syntax_element());
+ insert_element.push(w.syntax().syntax_element());
if w.syntax().last_token().is_none_or(|t| t.kind() != SyntaxKind::COMMA) {
insert_element.push(ast::make::token(T![,]).into());
}
diff --git a/crates/ide-assists/src/handlers/extract_function.rs b/crates/ide-assists/src/handlers/extract_function.rs
index 124ef509fb..fa5bb39c54 100644
--- a/crates/ide-assists/src/handlers/extract_function.rs
+++ b/crates/ide-assists/src/handlers/extract_function.rs
@@ -92,11 +92,13 @@ pub(crate) fn extract_function(acc: &mut Assists, ctx: &AssistContext<'_>) -> Op
let anchor = if self_param.is_some() { Anchor::Method } else { Anchor::Freestanding };
let insert_after = node_to_insert_after(&body, anchor)?;
+ let trait_name = ast::Trait::cast(insert_after.clone()).and_then(|trait_| trait_.name());
let semantics_scope = ctx.sema.scope(&insert_after)?;
let module = semantics_scope.module();
let edition = semantics_scope.krate().edition(ctx.db());
- let (container_info, contains_tail_expr) = body.analyze_container(&ctx.sema, edition)?;
+ let (container_info, contains_tail_expr) =
+ body.analyze_container(&ctx.sema, edition, trait_name)?;
let ret_ty = body.return_ty(ctx)?;
let control_flow = body.external_control_flow(ctx, &container_info)?;
@@ -181,6 +183,7 @@ pub(crate) fn extract_function(acc: &mut Assists, ctx: &AssistContext<'_>) -> Op
builder.add_tabstop_before(cap, name);
}
+ // FIXME: wrap non-adt types
let fn_def = match fun.self_param_adt(ctx) {
Some(adt) if anchor == Anchor::Method && !has_impl_wrapper => {
fn_def.indent(1.into());
@@ -377,6 +380,7 @@ struct ControlFlow<'db> {
struct ContainerInfo<'db> {
is_const: bool,
parent_loop: Option<SyntaxNode>,
+ trait_name: Option<ast::Type>,
/// The function's return type, const's type etc.
ret_type: Option<hir::Type<'db>>,
generic_param_lists: Vec<ast::GenericParamList>,
@@ -838,6 +842,7 @@ impl FunctionBody {
&self,
sema: &Semantics<'db, RootDatabase>,
edition: Edition,
+ trait_name: Option<ast::Name>,
) -> Option<(ContainerInfo<'db>, bool)> {
let mut ancestors = self.parent()?.ancestors();
let infer_expr_opt = |expr| sema.type_of_expr(&expr?).map(TypeInfo::adjusted);
@@ -924,6 +929,9 @@ impl FunctionBody {
false
};
+ // FIXME: make trait arguments
+ let trait_name = trait_name.map(|name| make::ty_path(make::ext::ident_path(&name.text())));
+
let parent = self.parent()?;
let parents = generic_parents(&parent);
let generic_param_lists = parents.iter().filter_map(|it| it.generic_param_list()).collect();
@@ -934,6 +942,7 @@ impl FunctionBody {
ContainerInfo {
is_const,
parent_loop,
+ trait_name,
ret_type: ty,
generic_param_lists,
where_clauses,
@@ -1419,14 +1428,18 @@ fn fixup_call_site(builder: &mut SourceChangeBuilder, body: &FunctionBody) {
fn make_call(ctx: &AssistContext<'_>, fun: &Function<'_>, indent: IndentLevel) -> SyntaxNode {
let ret_ty = fun.return_type(ctx);
- let args = make::arg_list(fun.params.iter().map(|param| param.to_arg(ctx, fun.mods.edition)));
let name = fun.name.clone();
- let mut call_expr = if fun.self_param.is_some() {
+ let args = fun.params.iter().map(|param| param.to_arg(ctx, fun.mods.edition));
+ let mut call_expr = if fun.make_this_param().is_some() {
+ let self_arg = make::expr_path(make::ext::ident_path("self"));
+ let func = make::expr_path(make::path_unqualified(make::path_segment(name)));
+ make::expr_call(func, make::arg_list(Some(self_arg).into_iter().chain(args))).into()
+ } else if fun.self_param.is_some() {
let self_arg = make::expr_path(make::ext::ident_path("self"));
- make::expr_method_call(self_arg, name, args).into()
+ make::expr_method_call(self_arg, name, make::arg_list(args)).into()
} else {
let func = make::expr_path(make::path_unqualified(make::path_segment(name)));
- make::expr_call(func, args).into()
+ make::expr_call(func, make::arg_list(args)).into()
};
let handler = FlowHandler::from_ret_ty(fun, &ret_ty);
@@ -1729,9 +1742,28 @@ impl<'db> Function<'db> {
module: hir::Module,
edition: Edition,
) -> ast::ParamList {
- let self_param = self.self_param.clone();
+ let this_param = self.make_this_param().map(|f| f());
+ let self_param = self.self_param.clone().filter(|_| this_param.is_none());
let params = self.params.iter().map(|param| param.to_param(ctx, module, edition));
- make::param_list(self_param, params)
+ make::param_list(self_param, this_param.into_iter().chain(params))
+ }
+
+ fn make_this_param(&self) -> Option<impl FnOnce() -> ast::Param> {
+ if let Some(name) = self.mods.trait_name.clone()
+ && let Some(self_param) = &self.self_param
+ {
+ Some(|| {
+ let bounds = make::type_bound_list([make::type_bound(name)]);
+ let pat = make::path_pat(make::ext::ident_path("this"));
+ let mut ty = make::impl_trait_type(bounds.unwrap()).into();
+ if self_param.amp_token().is_some() {
+ ty = make::ty_ref(ty, self_param.mut_token().is_some());
+ }
+ make::param(pat, ty)
+ })
+ } else {
+ None
+ }
}
fn make_ret_ty(&self, ctx: &AssistContext<'_>, module: hir::Module) -> Option<ast::RetType> {
@@ -1806,10 +1838,12 @@ fn make_body(
) -> ast::BlockExpr {
let ret_ty = fun.return_type(ctx);
let handler = FlowHandler::from_ret_ty(fun, &ret_ty);
+ let to_this_param = fun.self_param.clone().filter(|_| fun.make_this_param().is_some());
let block = match &fun.body {
FunctionBody::Expr(expr) => {
- let expr = rewrite_body_segment(ctx, &fun.params, &handler, expr.syntax());
+ let expr =
+ rewrite_body_segment(ctx, to_this_param, &fun.params, &handler, expr.syntax());
let expr = ast::Expr::cast(expr).expect("Body segment should be an expr");
match expr {
ast::Expr::BlockExpr(block) => {
@@ -1847,7 +1881,7 @@ fn make_body(
.filter(|it| text_range.contains_range(it.text_range()))
.map(|it| match &it {
syntax::NodeOrToken::Node(n) => syntax::NodeOrToken::Node(
- rewrite_body_segment(ctx, &fun.params, &handler, n),
+ rewrite_body_segment(ctx, to_this_param.clone(), &fun.params, &handler, n),
),
_ => it,
})
@@ -1997,11 +2031,13 @@ fn make_ty(ty: &hir::Type<'_>, ctx: &AssistContext<'_>, module: hir::Module) ->
fn rewrite_body_segment(
ctx: &AssistContext<'_>,
+ to_this_param: Option<ast::SelfParam>,
params: &[Param<'_>],
handler: &FlowHandler<'_>,
syntax: &SyntaxNode,
) -> SyntaxNode {
- let syntax = fix_param_usages(ctx, params, syntax);
+ let to_this_param = to_this_param.and_then(|it| ctx.sema.to_def(&it));
+ let syntax = fix_param_usages(ctx, to_this_param, params, syntax);
update_external_control_flow(handler, &syntax);
syntax
}
@@ -2009,30 +2045,46 @@ fn rewrite_body_segment(
/// change all usages to account for added `&`/`&mut` for some params
fn fix_param_usages(
ctx: &AssistContext<'_>,
+ to_this_param: Option<Local>,
params: &[Param<'_>],
syntax: &SyntaxNode,
) -> SyntaxNode {
let mut usages_for_param: Vec<(&Param<'_>, Vec<ast::Expr>)> = Vec::new();
+ let mut usages_for_self_param: Vec<ast::Expr> = Vec::new();
let tm = TreeMutator::new(syntax);
+ let reference_filter = |reference: &FileReference| {
+ syntax
+ .text_range()
+ .contains_range(reference.range)
+ .then_some(())
+ .and_then(|_| path_element_of_reference(syntax, reference))
+ .map(|expr| tm.make_mut(&expr))
+ };
+ if let Some(self_param) = to_this_param {
+ usages_for_self_param = LocalUsages::find_local_usages(ctx, self_param)
+ .iter()
+ .filter_map(reference_filter)
+ .collect();
+ }
for param in params {
if !param.kind().is_ref() {
continue;
}
let usages = LocalUsages::find_local_usages(ctx, param.var);
- let usages = usages
- .iter()
- .filter(|reference| syntax.text_range().contains_range(reference.range))
- .filter_map(|reference| path_element_of_reference(syntax, reference))
- .map(|expr| tm.make_mut(&expr));
+ let usages = usages.iter().filter_map(reference_filter);
usages_for_param.push((param, usages.unique().collect()));
}
let res = tm.make_syntax_mut(syntax);
+ for self_usage in usages_for_self_param {
+ let this_expr = make::expr_path(make::ext::ident_path("this")).clone_for_update();
+ ted::replace(self_usage.syntax(), this_expr.syntax());
+ }
for (param, usages) in usages_for_param {
for usage in usages {
match usage.syntax().ancestors().skip(1).find_map(ast::Expr::cast) {
@@ -2940,6 +2992,35 @@ impl S {
}
#[test]
+ fn method_in_trait() {
+ check_assist(
+ extract_function,
+ r#"
+trait Foo {
+ fn f(&self) -> i32;
+
+ fn foo(&self) -> i32 {
+ $0self.f()+self.f()$0
+ }
+}
+"#,
+ r#"
+trait Foo {
+ fn f(&self) -> i32;
+
+ fn foo(&self) -> i32 {
+ fun_name(self)
+ }
+}
+
+fn $0fun_name(this: &impl Foo) -> i32 {
+ this.f()+this.f()
+}
+"#,
+ );
+ }
+
+ #[test]
fn variable_defined_inside_and_used_after_no_ret() {
check_assist(
extract_function,
diff --git a/crates/ide-assists/src/handlers/extract_struct_from_enum_variant.rs b/crates/ide-assists/src/handlers/extract_struct_from_enum_variant.rs
index 4c46a51bef..3bbf9a0ad3 100644
--- a/crates/ide-assists/src/handlers/extract_struct_from_enum_variant.rs
+++ b/crates/ide-assists/src/handlers/extract_struct_from_enum_variant.rs
@@ -6,7 +6,7 @@ use ide_db::{
FxHashSet, RootDatabase,
defs::Definition,
helpers::mod_path_to_ast,
- imports::insert_use::{ImportScope, InsertUseConfig, insert_use},
+ imports::insert_use::{ImportScope, InsertUseConfig, insert_use_with_editor},
path_transform::PathTransform,
search::FileReference,
};
@@ -16,12 +16,14 @@ use syntax::{
SyntaxKind::*,
SyntaxNode, T,
ast::{
- self, AstNode, HasAttrs, HasGenericParams, HasName, HasVisibility, edit::AstNodeEdit, make,
+ self, AstNode, HasAttrs, HasGenericParams, HasName, HasVisibility, edit::AstNodeEdit,
+ syntax_factory::SyntaxFactory,
},
- match_ast, ted,
+ match_ast,
+ syntax_editor::{Position, SyntaxEditor},
};
-use crate::{AssistContext, AssistId, Assists, assist_context::SourceChangeBuilder};
+use crate::{AssistContext, AssistId, Assists};
// Assist: extract_struct_from_enum_variant
//
@@ -58,6 +60,8 @@ pub(crate) fn extract_struct_from_enum_variant(
"Extract struct from enum variant",
target,
|builder| {
+ let make = SyntaxFactory::with_mappings();
+ let mut editor = builder.make_editor(variant.syntax());
let edition = enum_hir.krate(ctx.db()).edition(ctx.db());
let variant_hir_name = variant_hir.name(ctx.db());
let enum_module_def = ModuleDef::from(enum_hir);
@@ -73,40 +77,56 @@ pub(crate) fn extract_struct_from_enum_variant(
def_file_references = Some(references);
continue;
}
- builder.edit_file(file_id.file_id(ctx.db()));
let processed = process_references(
ctx,
- builder,
&mut visited_modules_set,
&enum_module_def,
&variant_hir_name,
references,
);
+ if processed.is_empty() {
+ continue;
+ }
+ let mut file_editor = builder.make_editor(processed[0].0.syntax());
processed.into_iter().for_each(|(path, node, import)| {
- apply_references(ctx.config.insert_use, path, node, import, edition)
+ apply_references(
+ ctx.config.insert_use,
+ path,
+ node,
+ import,
+ edition,
+ &mut file_editor,
+ &make,
+ )
});
+ file_editor.add_mappings(make.take());
+ builder.add_file_edits(file_id.file_id(ctx.db()), file_editor);
}
- builder.edit_file(ctx.vfs_file_id());
- let variant = builder.make_mut(variant.clone());
if let Some(references) = def_file_references {
let processed = process_references(
ctx,
- builder,
&mut visited_modules_set,
&enum_module_def,
&variant_hir_name,
references,
);
processed.into_iter().for_each(|(path, node, import)| {
- apply_references(ctx.config.insert_use, path, node, import, edition)
+ apply_references(
+ ctx.config.insert_use,
+ path,
+ node,
+ import,
+ edition,
+ &mut editor,
+ &make,
+ )
});
}
- let generic_params = enum_ast
- .generic_param_list()
- .and_then(|known_generics| extract_generic_params(&known_generics, &field_list));
- let generics = generic_params.as_ref().map(|generics| generics.clone_for_update());
+ let generic_params = enum_ast.generic_param_list().and_then(|known_generics| {
+ extract_generic_params(&make, &known_generics, &field_list)
+ });
// resolve GenericArg in field_list to actual type
let field_list = if let Some((target_scope, source_scope)) =
@@ -124,25 +144,45 @@ pub(crate) fn extract_struct_from_enum_variant(
}
}
} else {
- field_list.clone_for_update()
+ field_list.clone()
};
- let def =
- create_struct_def(variant_name.clone(), &variant, &field_list, generics, &enum_ast);
+ let (comments_for_struct, comments_to_delete) =
+ collect_variant_comments(&make, variant.syntax());
+ for element in &comments_to_delete {
+ editor.delete(element.clone());
+ }
+
+ let def = create_struct_def(
+ &make,
+ variant_name.clone(),
+ &field_list,
+ generic_params.clone(),
+ &enum_ast,
+ );
let enum_ast = variant.parent_enum();
let indent = enum_ast.indent_level();
let def = def.indent(indent);
- ted::insert_all(
- ted::Position::before(enum_ast.syntax()),
- vec![
- def.syntax().clone().into(),
- make::tokens::whitespace(&format!("\n\n{indent}")).into(),
- ],
+ let mut insert_items: Vec<SyntaxElement> = Vec::new();
+ for attr in enum_ast.attrs() {
+ insert_items.push(attr.syntax().clone().into());
+ insert_items.push(make.whitespace("\n").into());
+ }
+ insert_items.extend(comments_for_struct);
+ insert_items.push(def.syntax().clone().into());
+ insert_items.push(make.whitespace(&format!("\n\n{indent}")).into());
+ editor.insert_all_with_whitespace(
+ Position::before(enum_ast.syntax()),
+ insert_items,
+ &make,
);
- update_variant(&variant, generic_params.map(|g| g.clone_for_update()));
+ update_variant(&make, &mut editor, &variant, generic_params);
+
+ editor.add_mappings(make.finish_with_mappings());
+ builder.add_file_edits(ctx.vfs_file_id(), editor);
},
)
}
@@ -184,6 +224,7 @@ fn existing_definition(db: &RootDatabase, variant_name: &ast::Name, variant: &En
}
fn extract_generic_params(
+ make: &SyntaxFactory,
known_generics: &ast::GenericParamList,
field_list: &Either<ast::RecordFieldList, ast::TupleFieldList>,
) -> Option<ast::GenericParamList> {
@@ -201,7 +242,7 @@ fn extract_generic_params(
};
let generics = generics.into_iter().filter_map(|(param, tag)| tag.then_some(param));
- tagged_one.then(|| make::generic_param_list(generics))
+ tagged_one.then(|| make.generic_param_list(generics))
}
fn tag_generics_in_variant(ty: &ast::Type, generics: &mut [(ast::GenericParam, bool)]) -> bool {
@@ -250,82 +291,74 @@ fn tag_generics_in_variant(ty: &ast::Type, generics: &mut [(ast::GenericParam, b
}
fn create_struct_def(
+ make: &SyntaxFactory,
name: ast::Name,
- variant: &ast::Variant,
field_list: &Either<ast::RecordFieldList, ast::TupleFieldList>,
generics: Option<ast::GenericParamList>,
enum_: &ast::Enum,
) -> ast::Struct {
let enum_vis = enum_.visibility();
- let insert_vis = |node: &'_ SyntaxNode, vis: &'_ SyntaxNode| {
- let vis = vis.clone_for_update();
- ted::insert(ted::Position::before(node), vis);
- };
-
// for fields without any existing visibility, use visibility of enum
let field_list: ast::FieldList = match field_list {
Either::Left(field_list) => {
if let Some(vis) = &enum_vis {
- field_list
- .fields()
- .filter(|field| field.visibility().is_none())
- .filter_map(|field| field.name())
- .for_each(|it| insert_vis(it.syntax(), vis.syntax()));
+ let new_fields = field_list.fields().map(|field| {
+ if field.visibility().is_none()
+ && let Some(name) = field.name()
+ && let Some(ty) = field.ty()
+ {
+ make.record_field(Some(vis.clone()), name, ty)
+ } else {
+ field
+ }
+ });
+ make.record_field_list(new_fields).into()
+ } else {
+ field_list.clone().into()
}
-
- field_list.clone().into()
}
Either::Right(field_list) => {
if let Some(vis) = &enum_vis {
- field_list
- .fields()
- .filter(|field| field.visibility().is_none())
- .filter_map(|field| field.ty())
- .for_each(|it| insert_vis(it.syntax(), vis.syntax()));
+ let new_fields = field_list.fields().map(|field| {
+ if field.visibility().is_none()
+ && let Some(ty) = field.ty()
+ {
+ make.tuple_field(Some(vis.clone()), ty)
+ } else {
+ field
+ }
+ });
+ make.tuple_field_list(new_fields).into()
+ } else {
+ field_list.clone().into()
}
-
- field_list.clone().into()
}
};
- let strukt = make::struct_(enum_vis, name, generics, field_list).clone_for_update();
-
- // take comments from variant
- ted::insert_all(
- ted::Position::first_child_of(strukt.syntax()),
- take_all_comments(variant.syntax()),
- );
-
- // copy attributes from enum
- ted::insert_all(
- ted::Position::first_child_of(strukt.syntax()),
- enum_
- .attrs()
- .flat_map(|it| {
- vec![it.syntax().clone_for_update().into(), make::tokens::single_newline().into()]
- })
- .collect(),
- );
-
- strukt
+ make.struct_(enum_vis, name, generics, field_list)
}
-fn update_variant(variant: &ast::Variant, generics: Option<ast::GenericParamList>) -> Option<()> {
+fn update_variant(
+ make: &SyntaxFactory,
+ editor: &mut SyntaxEditor,
+ variant: &ast::Variant,
+ generics: Option<ast::GenericParamList>,
+) -> Option<()> {
let name = variant.name()?;
let generic_args = generics
.filter(|generics| generics.generic_params().count() > 0)
.map(|generics| generics.to_generic_args());
// FIXME: replace with a `ast::make` constructor
let ty = match generic_args {
- Some(generic_args) => make::ty(&format!("{name}{generic_args}")),
- None => make::ty(&name.text()),
+ Some(generic_args) => make.ty(&format!("{name}{generic_args}")),
+ None => make.ty(&name.text()),
};
// change from a record to a tuple field list
- let tuple_field = make::tuple_field(None, ty);
- let field_list = make::tuple_field_list(iter::once(tuple_field)).clone_for_update();
- ted::replace(variant.field_list()?.syntax(), field_list.syntax());
+ let tuple_field = make.tuple_field(None, ty);
+ let field_list = make.tuple_field_list(iter::once(tuple_field));
+ editor.replace(variant.field_list()?.syntax(), field_list.syntax());
// remove any ws after the name
if let Some(ws) = name
@@ -333,35 +366,39 @@ fn update_variant(variant: &ast::Variant, generics: Option<ast::GenericParamList
.siblings_with_tokens(syntax::Direction::Next)
.find_map(|tok| tok.into_token().filter(|tok| tok.kind() == WHITESPACE))
{
- ted::remove(SyntaxElement::Token(ws));
+ editor.delete(ws);
}
Some(())
}
-// Note: this also detaches whitespace after comments,
-// since `SyntaxNode::splice_children` (and by extension `ted::insert_all_raw`)
-// detaches nodes. If we only took the comments, we'd leave behind the old whitespace.
-fn take_all_comments(node: &SyntaxNode) -> Vec<SyntaxElement> {
- let mut remove_next_ws = false;
- node.children_with_tokens()
- .filter_map(move |child| match child.kind() {
+fn collect_variant_comments(
+ make: &SyntaxFactory,
+ node: &SyntaxNode,
+) -> (Vec<SyntaxElement>, Vec<SyntaxElement>) {
+ let mut to_insert: Vec<SyntaxElement> = Vec::new();
+ let mut to_delete: Vec<SyntaxElement> = Vec::new();
+ let mut after_comment = false;
+
+ for child in node.children_with_tokens() {
+ match child.kind() {
COMMENT => {
- remove_next_ws = true;
- child.detach();
- Some(child)
+ after_comment = true;
+ to_insert.push(child.clone());
+ to_delete.push(child);
}
- WHITESPACE if remove_next_ws => {
- remove_next_ws = false;
- child.detach();
- Some(make::tokens::single_newline().into())
+ WHITESPACE if after_comment => {
+ after_comment = false;
+ to_insert.push(make.whitespace("\n").into());
+ to_delete.push(child);
}
_ => {
- remove_next_ws = false;
- None
+ after_comment = false;
}
- })
- .collect()
+ }
+ }
+
+ (to_insert, to_delete)
}
fn apply_references(
@@ -370,20 +407,27 @@ fn apply_references(
node: SyntaxNode,
import: Option<(ImportScope, hir::ModPath)>,
edition: Edition,
+ editor: &mut SyntaxEditor,
+ make: &SyntaxFactory,
) {
if let Some((scope, path)) = import {
- insert_use(&scope, mod_path_to_ast(&path, edition), &insert_use_cfg);
+ insert_use_with_editor(
+ &scope,
+ mod_path_to_ast(&path, edition),
+ &insert_use_cfg,
+ editor,
+ make,
+ );
}
// deep clone to prevent cycle
- let path = make::path_from_segments(iter::once(segment.clone_subtree()), false);
- ted::insert_raw(ted::Position::before(segment.syntax()), path.clone_for_update().syntax());
- ted::insert_raw(ted::Position::before(segment.syntax()), make::token(T!['(']));
- ted::insert_raw(ted::Position::after(&node), make::token(T![')']));
+ let path = make.path_from_segments(iter::once(segment.clone()), false);
+ editor.insert(Position::before(segment.syntax()), make.token(T!['(']));
+ editor.insert(Position::before(segment.syntax()), path.syntax());
+ editor.insert(Position::after(&node), make.token(T![')']));
}
fn process_references(
ctx: &AssistContext<'_>,
- builder: &mut SourceChangeBuilder,
visited_modules: &mut FxHashSet<Module>,
enum_module_def: &ModuleDef,
variant_hir_name: &Name,
@@ -394,8 +438,6 @@ fn process_references(
refs.into_iter()
.flat_map(|reference| {
let (segment, scope_node, module) = reference_to_node(&ctx.sema, reference)?;
- let segment = builder.make_mut(segment);
- let scope_node = builder.make_syntax_mut(scope_node);
if !visited_modules.contains(&module) {
let cfg =
ctx.config.find_path_config(ctx.sema.is_nightly(module.krate(ctx.sema.db)));
diff --git a/crates/ide-assists/src/handlers/extract_variable.rs b/crates/ide-assists/src/handlers/extract_variable.rs
index e5ce02cf53..1556339d8d 100644
--- a/crates/ide-assists/src/handlers/extract_variable.rs
+++ b/crates/ide-assists/src/handlers/extract_variable.rs
@@ -283,13 +283,17 @@ fn peel_parens(mut expr: ast::Expr) -> ast::Expr {
/// Check whether the node is a valid expression which can be extracted to a variable.
/// In general that's true for any expression, but in some cases that would produce invalid code.
fn valid_target_expr(ctx: &AssistContext<'_>) -> impl Fn(SyntaxNode) -> Option<ast::Expr> {
- |node| match node.kind() {
+ let selection = ctx.selection_trimmed();
+ move |node| match node.kind() {
SyntaxKind::LOOP_EXPR | SyntaxKind::LET_EXPR => None,
SyntaxKind::BREAK_EXPR => ast::BreakExpr::cast(node).and_then(|e| e.expr()),
SyntaxKind::RETURN_EXPR => ast::ReturnExpr::cast(node).and_then(|e| e.expr()),
SyntaxKind::BLOCK_EXPR => {
ast::BlockExpr::cast(node).filter(|it| it.is_standalone()).map(ast::Expr::from)
}
+ SyntaxKind::ARG_LIST => ast::ArgList::cast(node)?
+ .args()
+ .find(|expr| crate::utils::is_selected(expr, selection, false)),
SyntaxKind::PATH_EXPR => {
let path_expr = ast::PathExpr::cast(node)?;
let path_resolution = ctx.sema.resolve_path(&path_expr.path()?)?;
@@ -1286,6 +1290,33 @@ fn main() {
}
#[test]
+ fn extract_var_in_arglist_with_comma() {
+ check_assist_by_label(
+ extract_variable,
+ r#"
+fn main() {
+ let x = 2;
+ foo(
+ x + x,
+ $0x - x,$0
+ )
+}
+"#,
+ r#"
+fn main() {
+ let x = 2;
+ let $0var_name = x - x;
+ foo(
+ x + x,
+ var_name,
+ )
+}
+"#,
+ "Extract into variable",
+ );
+ }
+
+ #[test]
fn extract_var_path_simple() {
check_assist_by_label(
extract_variable,
diff --git a/crates/ide-assists/src/handlers/flip_binexpr.rs b/crates/ide-assists/src/handlers/flip_binexpr.rs
index 8f2306e903..922a61bf3a 100644
--- a/crates/ide-assists/src/handlers/flip_binexpr.rs
+++ b/crates/ide-assists/src/handlers/flip_binexpr.rs
@@ -142,11 +142,11 @@ pub(crate) fn flip_range_expr(acc: &mut Assists, ctx: &AssistContext<'_>) -> Opt
}
(Some(start), None) => {
edit.delete(start.syntax());
- edit.insert(Position::after(&op), start.syntax().clone_for_update());
+ edit.insert(Position::after(&op), start.syntax());
}
(None, Some(end)) => {
edit.delete(end.syntax());
- edit.insert(Position::before(&op), end.syntax().clone_for_update());
+ edit.insert(Position::before(&op), end.syntax());
}
(None, None) => (),
}
diff --git a/crates/ide-assists/src/handlers/generate_delegate_trait.rs b/crates/ide-assists/src/handlers/generate_delegate_trait.rs
index f703e4dc4a..abe447d9d9 100644
--- a/crates/ide-assists/src/handlers/generate_delegate_trait.rs
+++ b/crates/ide-assists/src/handlers/generate_delegate_trait.rs
@@ -363,9 +363,9 @@ fn generate_impl(
ast_strukt,
&old_impl,
&transform_args,
- trait_args.clone_subtree(),
+ trait_args.clone(),
) {
- *trait_args = new_args.clone_subtree();
+ *trait_args = new_args.clone();
Some(new_args)
} else {
None
@@ -563,7 +563,7 @@ fn finalize_delegate(
return Some(delegate.clone());
}
- let mut editor = SyntaxEditor::new(delegate.syntax().clone_subtree());
+ let (mut editor, delegate) = SyntaxEditor::with_ast_node(delegate);
// 1. Replace assoc_item_list if we have new items
if let Some(items) = assoc_items
@@ -577,7 +577,7 @@ fn finalize_delegate(
// 2. Remove useless where clauses
if remove_where_clauses {
- remove_useless_where_clauses(&mut editor, delegate);
+ remove_useless_where_clauses(&mut editor, &delegate);
}
ast::Impl::cast(editor.finish().new_root().clone())
@@ -703,7 +703,7 @@ fn resolve_name_conflicts(
}
}
p @ ast::GenericParam::LifetimeParam(_) => {
- new_params.push(p.clone_for_update());
+ new_params.push(p);
}
ast::GenericParam::TypeParam(t) => {
let type_bounds = t.type_bound_list();
diff --git a/crates/ide-assists/src/handlers/generate_getter_or_setter.rs b/crates/ide-assists/src/handlers/generate_getter_or_setter.rs
index 62ffd3d965..4cd018d02d 100644
--- a/crates/ide-assists/src/handlers/generate_getter_or_setter.rs
+++ b/crates/ide-assists/src/handlers/generate_getter_or_setter.rs
@@ -429,7 +429,7 @@ fn build_source_change(
generate_getter_from_info(ctx, &assist_info, record_field_info, &syntax_factory)
}
};
- let new_fn = method.clone_for_update();
+ let new_fn = method;
let new_fn = new_fn.indent(1.into());
new_fn.into()
})
diff --git a/crates/ide-assists/src/handlers/generate_impl.rs b/crates/ide-assists/src/handlers/generate_impl.rs
index 2d1235792d..af123eeaa0 100644
--- a/crates/ide-assists/src/handlers/generate_impl.rs
+++ b/crates/ide-assists/src/handlers/generate_impl.rs
@@ -8,8 +8,8 @@ use syntax::{
use crate::{
AssistContext, AssistId, Assists,
utils::{
- self, DefaultMethods, IgnoreAssocItems, generate_impl_with_factory,
- generate_trait_impl_intransitive,
+ self, DefaultMethods, IgnoreAssocItems, add_trait_assoc_items_to_impl,
+ generate_impl_with_factory, generate_trait_impl_intransitive,
},
};
@@ -212,7 +212,8 @@ pub(crate) fn generate_impl_trait(acc: &mut Assists, ctx: &AssistContext<'_>) ->
make_impl_(None)
} else {
let impl_ = make_impl_(None);
- let assoc_items = utils::add_trait_assoc_items_to_impl(
+ let assoc_items = add_trait_assoc_items_to_impl(
+ &make,
&ctx.sema,
ctx.config,
&missing_items,
diff --git a/crates/ide-assists/src/handlers/generate_mut_trait_impl.rs b/crates/ide-assists/src/handlers/generate_mut_trait_impl.rs
index 3a62a8853e..31e49c8ce4 100644
--- a/crates/ide-assists/src/handlers/generate_mut_trait_impl.rs
+++ b/crates/ide-assists/src/handlers/generate_mut_trait_impl.rs
@@ -67,8 +67,7 @@ pub(crate) fn generate_mut_trait_impl(acc: &mut Assists, ctx: &AssistContext<'_>
format!("Generate `{trait_new}` impl from this `{trait_name}` trait"),
target,
|edit| {
- let impl_clone = impl_def.reset_indent().clone_subtree();
- let mut editor = SyntaxEditor::new(impl_clone.syntax().clone());
+ let (mut editor, impl_clone) = SyntaxEditor::with_ast_node(&impl_def.reset_indent());
let factory = SyntaxFactory::without_mappings();
apply_generate_mut_impl(&mut editor, &factory, &impl_clone, trait_new);
diff --git a/crates/ide-assists/src/handlers/generate_single_field_struct_from.rs b/crates/ide-assists/src/handlers/generate_single_field_struct_from.rs
index d3022ceda3..2fc2b9efe8 100644
--- a/crates/ide-assists/src/handlers/generate_single_field_struct_from.rs
+++ b/crates/ide-assists/src/handlers/generate_single_field_struct_from.rs
@@ -1,14 +1,16 @@
-use ast::make;
use hir::next_solver::{DbInterner, TypingMode};
use hir::{HasCrate, ModuleDef, Semantics};
use ide_db::{
RootDatabase, famous_defs::FamousDefs, helpers::mod_path_to_ast,
imports::import_assets::item_for_path_search, use_trivial_constructor::use_trivial_constructor,
};
-use syntax::syntax_editor::{Element, Position};
+use syntax::syntax_editor::{Position, SyntaxEditor};
use syntax::{
TokenText,
- ast::{self, AstNode, HasAttrs, HasGenericParams, HasName, edit::AstNodeEdit},
+ ast::{
+ self, AstNode, HasAttrs, HasGenericParams, HasName, edit::AstNodeEdit,
+ syntax_factory::SyntaxFactory,
+ },
};
use crate::{
@@ -78,47 +80,52 @@ pub(crate) fn generate_single_field_struct_from(
"Generate single field `From`",
strukt.syntax().text_range(),
|builder| {
+ let make = SyntaxFactory::with_mappings();
+ let mut editor = builder.make_editor(strukt.syntax());
+
let indent = strukt.indent_level();
let ty_where_clause = strukt.where_clause();
let type_gen_params = strukt.generic_param_list();
let type_gen_args = type_gen_params.as_ref().map(|params| params.to_generic_args());
- let trait_gen_args = Some(make::generic_arg_list([ast::GenericArg::TypeArg(
- make::type_arg(main_field_ty.clone()),
- )]));
+ let trait_gen_args = Some(make.generic_arg_list(
+ [ast::GenericArg::TypeArg(make.type_arg(main_field_ty.clone()))],
+ false,
+ ));
- let ty = make::ty(&strukt_name.text());
+ let ty = make.ty(&strukt_name.text());
let constructor =
- make_adt_constructor(names.as_deref(), constructors, &main_field_name);
- let body = make::block_expr([], Some(constructor));
+ make_adt_constructor(names.as_deref(), constructors, &main_field_name, &make);
+ let body = make.block_expr([], Some(constructor));
- let fn_ = make::fn_(
- None,
- None,
- make::name("from"),
- None,
- None,
- make::param_list(
+ let fn_ = make
+ .fn_(
+ [],
None,
- [make::param(
- make::path_pat(make::path_from_text(&main_field_name)),
- main_field_ty,
- )],
- ),
- body,
- Some(make::ret_type(make::ty("Self"))),
- false,
- false,
- false,
- false,
- )
- .indent(1.into());
+ make.name("from"),
+ None,
+ None,
+ make.param_list(
+ None,
+ [make.param(
+ make.path_pat(make.path_from_text(&main_field_name)),
+ main_field_ty,
+ )],
+ ),
+ body,
+ Some(make.ret_type(make.ty("Self"))),
+ false,
+ false,
+ false,
+ false,
+ )
+ .indent_with_mapping(1.into(), &make);
let cfg_attrs = strukt
.attrs()
.filter(|attr| attr.as_simple_call().is_some_and(|(name, _arg)| name == "cfg"));
- let impl_ = make::impl_trait(
+ let impl_ = make.impl_trait(
cfg_attrs,
false,
None,
@@ -126,28 +133,31 @@ pub(crate) fn generate_single_field_struct_from(
type_gen_params,
type_gen_args,
false,
- make::ty("From"),
+ make.ty("From"),
ty.clone(),
None,
ty_where_clause.map(|wc| wc.reset_indent()),
None,
- )
- .clone_for_update();
-
- impl_.get_or_create_assoc_item_list().add_item(fn_.into());
- let impl_ = impl_.indent(indent);
+ );
- let mut edit = builder.make_editor(strukt.syntax());
+ let (mut impl_editor, impl_root) = SyntaxEditor::with_ast_node(&impl_);
+ let assoc_list =
+ impl_root.get_or_create_assoc_item_list_with_editor(&mut impl_editor, &make);
+ assoc_list.add_items(&mut impl_editor, vec![fn_.into()]);
+ let impl_ = ast::Impl::cast(impl_editor.finish().new_root().clone())
+ .unwrap()
+ .indent_with_mapping(indent, &make);
- edit.insert_all(
+ editor.insert_all(
Position::after(strukt.syntax()),
vec![
- make::tokens::whitespace(&format!("\n\n{indent}")).syntax_element(),
- impl_.syntax().syntax_element(),
+ make.whitespace(&format!("\n\n{indent}")).into(),
+ impl_.syntax().clone().into(),
],
);
- builder.add_file_edits(ctx.vfs_file_id(), edit);
+ editor.add_mappings(make.finish_with_mappings());
+ builder.add_file_edits(ctx.vfs_file_id(), editor);
},
)
}
@@ -156,19 +166,18 @@ fn make_adt_constructor(
names: Option<&[ast::Name]>,
constructors: Vec<Option<ast::Expr>>,
main_field_name: &TokenText<'_>,
+ make: &SyntaxFactory,
) -> ast::Expr {
if let Some(names) = names {
- let fields = make::record_expr_field_list(names.iter().zip(constructors).map(
- |(name, initializer)| {
- make::record_expr_field(make::name_ref(&name.text()), initializer)
- },
+ let fields = make.record_expr_field_list(names.iter().zip(constructors).map(
+ |(name, initializer)| make.record_expr_field(make.name_ref(&name.text()), initializer),
));
- make::record_expr(make::path_from_text("Self"), fields).into()
+ make.record_expr(make.path_from_text("Self"), fields).into()
} else {
- let arg_list = make::arg_list(constructors.into_iter().map(|expr| {
- expr.unwrap_or_else(|| make::expr_path(make::path_from_text(main_field_name)))
+ let arg_list = make.arg_list(constructors.into_iter().map(|expr| {
+ expr.unwrap_or_else(|| make.expr_path(make.path_from_text(main_field_name)))
}));
- make::expr_call(make::expr_path(make::path_from_text("Self")), arg_list).into()
+ make.expr_call(make.expr_path(make.path_from_text("Self")), arg_list).into()
}
}
@@ -177,6 +186,7 @@ fn make_constructors(
module: hir::Module,
types: &[ast::Type],
) -> Vec<Option<ast::Expr>> {
+ let make = SyntaxFactory::without_mappings();
let (db, sema) = (ctx.db(), &ctx.sema);
let cfg = ctx.config.find_path_config(ctx.sema.is_nightly(module.krate(ctx.sema.db)));
types
@@ -184,7 +194,7 @@ fn make_constructors(
.map(|ty| {
let ty = sema.resolve_type(ty)?;
if ty.is_unit() {
- return Some(make::expr_tuple([]).into());
+ return Some(make.expr_tuple([]).into());
}
let item_in_ns = ModuleDef::Adt(ty.as_adt()?).into();
let edition = module.krate(db).edition(db);
diff --git a/crates/ide-assists/src/handlers/generate_trait_from_impl.rs b/crates/ide-assists/src/handlers/generate_trait_from_impl.rs
index 1286abe356..2d3d05849b 100644
--- a/crates/ide-assists/src/handlers/generate_trait_from_impl.rs
+++ b/crates/ide-assists/src/handlers/generate_trait_from_impl.rs
@@ -4,6 +4,7 @@ use syntax::{
AstNode, AstToken, SyntaxKind, T,
ast::{
self, HasDocComments, HasGenericParams, HasName, HasVisibility, edit::AstNodeEdit, make,
+ syntax_factory::SyntaxFactory,
},
syntax_editor::{Position, SyntaxEditor},
};
@@ -98,8 +99,8 @@ pub(crate) fn generate_trait_from_impl(acc: &mut Assists, ctx: &AssistContext<'_
impl_ast.syntax().text_range(),
|builder| {
let trait_items: ast::AssocItemList = {
- let trait_items = impl_assoc_items.clone_subtree();
- let mut trait_items_editor = SyntaxEditor::new(trait_items.syntax().clone());
+ let (mut trait_items_editor, trait_items) =
+ SyntaxEditor::with_ast_node(&impl_assoc_items);
trait_items.assoc_items().for_each(|item| {
strip_body(&mut trait_items_editor, &item);
@@ -107,17 +108,18 @@ pub(crate) fn generate_trait_from_impl(acc: &mut Assists, ctx: &AssistContext<'_
});
ast::AssocItemList::cast(trait_items_editor.finish().new_root().clone()).unwrap()
};
- let trait_ast = make::trait_(
+
+ let factory = SyntaxFactory::with_mappings();
+ let trait_ast = factory.trait_(
false,
&trait_name(&impl_assoc_items).text(),
impl_ast.generic_param_list(),
impl_ast.where_clause(),
trait_items,
- )
- .clone_for_update();
+ );
let trait_name = trait_ast.name().expect("new trait should have a name");
- let trait_name_ref = make::name_ref(&trait_name.to_string()).clone_for_update();
+ let trait_name_ref = factory.name_ref(&trait_name.to_string());
// Change `impl Foo` to `impl NewTrait for Foo`
let mut elements = vec![
@@ -128,7 +130,7 @@ pub(crate) fn generate_trait_from_impl(acc: &mut Assists, ctx: &AssistContext<'_
];
if let Some(params) = impl_ast.generic_param_list() {
- let gen_args = &params.to_generic_args().clone_for_update();
+ let gen_args = &params.to_generic_args();
elements.insert(1, gen_args.syntax().clone().into());
}
@@ -156,6 +158,7 @@ pub(crate) fn generate_trait_from_impl(acc: &mut Assists, ctx: &AssistContext<'_
editor.add_annotation(trait_name_ref.syntax(), placeholder);
}
+ editor.add_mappings(factory.finish_with_mappings());
builder.add_file_edits(ctx.vfs_file_id(), editor);
},
);
diff --git a/crates/ide-assists/src/handlers/inline_type_alias.rs b/crates/ide-assists/src/handlers/inline_type_alias.rs
index f3ebe61078..4b60f0ac1e 100644
--- a/crates/ide-assists/src/handlers/inline_type_alias.rs
+++ b/crates/ide-assists/src/handlers/inline_type_alias.rs
@@ -170,7 +170,7 @@ impl Replacement {
Replacement::Generic { lifetime_map, const_and_type_map } => {
create_replacement(lifetime_map, const_and_type_map, concrete_type)
}
- Replacement::Plain => concrete_type.syntax().clone_subtree().clone_for_update(),
+ Replacement::Plain => concrete_type.syntax().clone(),
}
}
}
@@ -312,8 +312,7 @@ fn create_replacement(
const_and_type_map: &ConstAndTypeMap,
concrete_type: &ast::Type,
) -> SyntaxNode {
- let updated_concrete_type = concrete_type.syntax().clone_subtree();
- let mut editor = SyntaxEditor::new(updated_concrete_type.clone());
+ let (mut editor, updated_concrete_type) = SyntaxEditor::new(concrete_type.syntax().clone());
let mut replacements: Vec<(SyntaxNode, SyntaxNode)> = Vec::new();
let mut removals: Vec<NodeOrToken<SyntaxNode, _>> = Vec::new();
@@ -361,7 +360,7 @@ fn create_replacement(
continue;
}
- replacements.push((syntax.clone(), new_lifetime.syntax().clone_for_update()));
+ replacements.push((syntax.clone(), new_lifetime.syntax().clone()));
}
} else if let Some(name_ref) = ast::NameRef::cast(syntax.clone()) {
let Some(replacement_syntax) = const_and_type_map.0.get(&name_ref.to_string()) else {
@@ -449,15 +448,12 @@ impl ConstOrTypeGeneric {
}
fn replacement_value(&self) -> Option<SyntaxNode> {
- Some(
- match self {
- ConstOrTypeGeneric::ConstArg(ca) => ca.expr()?.syntax().clone(),
- ConstOrTypeGeneric::TypeArg(ta) => ta.syntax().clone(),
- ConstOrTypeGeneric::ConstParam(cp) => cp.default_val()?.syntax().clone(),
- ConstOrTypeGeneric::TypeParam(tp) => tp.default_type()?.syntax().clone(),
- }
- .clone_for_update(),
- )
+ Some(match self {
+ ConstOrTypeGeneric::ConstArg(ca) => ca.expr()?.syntax().clone(),
+ ConstOrTypeGeneric::TypeArg(ta) => ta.syntax().clone(),
+ ConstOrTypeGeneric::ConstParam(cp) => cp.default_val()?.syntax().clone(),
+ ConstOrTypeGeneric::TypeParam(tp) => tp.default_type()?.syntax().clone(),
+ })
}
}
diff --git a/crates/ide-assists/src/handlers/introduce_named_lifetime.rs b/crates/ide-assists/src/handlers/introduce_named_lifetime.rs
index 854e9561d2..5e8ea7daff 100644
--- a/crates/ide-assists/src/handlers/introduce_named_lifetime.rs
+++ b/crates/ide-assists/src/handlers/introduce_named_lifetime.rs
@@ -97,8 +97,7 @@ fn generate_fn_def_assist(
};
acc.add(AssistId::refactor(ASSIST_NAME), ASSIST_LABEL, lifetime_loc, |edit| {
- let root = fn_def.syntax().ancestors().last().unwrap().clone();
- let mut editor = SyntaxEditor::new(root);
+ let mut editor = edit.make_editor(fn_def.syntax());
let factory = SyntaxFactory::with_mappings();
if let Some(generic_list) = fn_def.generic_param_list() {
@@ -167,8 +166,7 @@ fn generate_impl_def_assist(
let new_lifetime_name = generate_unique_lifetime_param_name(impl_def.generic_param_list())?;
acc.add(AssistId::refactor(ASSIST_NAME), ASSIST_LABEL, lifetime_loc, |edit| {
- let root = impl_def.syntax().ancestors().last().unwrap().clone();
- let mut editor = SyntaxEditor::new(root);
+ let mut editor = edit.make_editor(impl_def.syntax());
let factory = SyntaxFactory::without_mappings();
if let Some(generic_list) = impl_def.generic_param_list() {
diff --git a/crates/ide-assists/src/handlers/pull_assignment_up.rs b/crates/ide-assists/src/handlers/pull_assignment_up.rs
index 812ebf6c6e..74ed2e14fa 100644
--- a/crates/ide-assists/src/handlers/pull_assignment_up.rs
+++ b/crates/ide-assists/src/handlers/pull_assignment_up.rs
@@ -75,7 +75,8 @@ pub(crate) fn pull_assignment_up(acc: &mut Assists, ctx: &AssistContext<'_>) ->
}
let target = tgt.syntax().text_range();
- let edit_tgt = tgt.syntax().clone_subtree();
+ let (mut editor, edit_tgt) = SyntaxEditor::new(tgt.syntax().clone());
+
let assignments: Vec<_> = collector
.assignments
.into_iter()
@@ -93,7 +94,6 @@ pub(crate) fn pull_assignment_up(acc: &mut Assists, ctx: &AssistContext<'_>) ->
})
.collect();
- let mut editor = SyntaxEditor::new(edit_tgt);
for (stmt, rhs) in assignments {
let mut stmt = stmt.syntax().clone();
if let Some(parent) = stmt.parent()
diff --git a/crates/ide-assists/src/handlers/remove_dbg.rs b/crates/ide-assists/src/handlers/remove_dbg.rs
index 08779a3ed1..f4c354b8a2 100644
--- a/crates/ide-assists/src/handlers/remove_dbg.rs
+++ b/crates/ide-assists/src/handlers/remove_dbg.rs
@@ -50,7 +50,7 @@ pub(crate) fn remove_dbg(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<(
let mut editor = builder.make_editor(ctx.source_file().syntax());
for (range, expr) in replacements {
if let Some(expr) = expr {
- editor.insert(Position::before(range[0].clone()), expr.syntax().clone_for_update());
+ editor.insert(Position::before(range[0].clone()), expr.syntax());
}
for node_or_token in range {
editor.delete(node_or_token);
@@ -163,7 +163,7 @@ fn compute_dbg_replacement(
None => false,
};
let expr = replace_nested_dbgs(expr.clone());
- let expr = if wrap { make::expr_paren(expr).into() } else { expr.clone_subtree() };
+ let expr = if wrap { make::expr_paren(expr).into() } else { expr };
(vec![macro_call.syntax().clone().into()], Some(expr))
}
// dbg!(expr0, expr1, ...)
@@ -209,8 +209,7 @@ fn replace_nested_dbgs(expanded: ast::Expr) -> ast::Expr {
return replaced;
}
- let expanded = expanded.clone_subtree();
- let mut editor = SyntaxEditor::new(expanded.syntax().clone());
+ let (mut editor, expanded) = SyntaxEditor::with_ast_node(&expanded);
// We need to collect to avoid mutation during traversal.
let macro_exprs: Vec<_> =
expanded.syntax().descendants().filter_map(ast::MacroExpr::cast).collect();
@@ -222,7 +221,7 @@ fn replace_nested_dbgs(expanded: ast::Expr) -> ast::Expr {
};
if let Some(expr) = expr_opt {
- editor.replace(mac.syntax(), expr.syntax().clone_for_update());
+ editor.replace(mac.syntax(), expr.syntax());
} else {
editor.delete(mac.syntax());
}
diff --git a/crates/ide-assists/src/handlers/replace_derive_with_manual_impl.rs b/crates/ide-assists/src/handlers/replace_derive_with_manual_impl.rs
index f54f7a02d2..04c9d8e54d 100644
--- a/crates/ide-assists/src/handlers/replace_derive_with_manual_impl.rs
+++ b/crates/ide-assists/src/handlers/replace_derive_with_manual_impl.rs
@@ -12,8 +12,8 @@ use crate::{
AssistConfig, AssistId,
assist_context::{AssistContext, Assists},
utils::{
- DefaultMethods, IgnoreAssocItems, add_trait_assoc_items_to_impl_with_factory,
- filter_assoc_items, gen_trait_fn_body, generate_trait_impl,
+ DefaultMethods, IgnoreAssocItems, add_trait_assoc_items_to_impl, filter_assoc_items,
+ gen_trait_fn_body, generate_trait_impl, generate_trait_impl_with_item,
},
};
@@ -127,7 +127,7 @@ fn add_assist(
let label = format!("Convert to manual `impl {replace_trait_path} for {annotated_name}`");
acc.add(AssistId::refactor("replace_derive_with_manual_impl"), label, target, |builder| {
- let make = SyntaxFactory::without_mappings();
+ let make = SyntaxFactory::with_mappings();
let insert_after = Position::after(adt.syntax());
let impl_is_unsafe = trait_.map(|s| s.is_unsafe(ctx.db())).unwrap_or(false);
let impl_def = impl_def_from_trait(
@@ -141,7 +141,7 @@ fn add_assist(
);
let mut editor = builder.make_editor(attr.syntax());
- update_attribute(&mut editor, old_derives, old_tree, old_trait_path, attr);
+ update_attribute(&make, &mut editor, old_derives, old_tree, old_trait_path, attr);
let trait_path = make.ty_path(replace_trait_path.clone()).into();
@@ -177,6 +177,7 @@ fn add_assist(
insert_after,
vec![make.whitespace("\n\n").into(), impl_def.syntax().clone().into()],
);
+ editor.add_mappings(make.finish_with_mappings());
builder.add_file_edits(ctx.vfs_file_id(), editor);
})
}
@@ -207,10 +208,10 @@ fn impl_def_from_trait(
return None;
}
let make = SyntaxFactory::without_mappings();
- let trait_ty = make.ty_path(trait_path.clone()).into();
- let impl_def = generate_trait_impl(&make, impl_is_unsafe, adt, trait_ty);
+ let trait_ty: ast::Type = make.ty_path(trait_path.clone()).into();
+ let impl_def = generate_trait_impl(&make, impl_is_unsafe, adt, trait_ty.clone());
- let assoc_items = add_trait_assoc_items_to_impl_with_factory(
+ let assoc_items = add_trait_assoc_items_to_impl(
&make,
sema,
config,
@@ -219,14 +220,12 @@ fn impl_def_from_trait(
&impl_def,
&target_scope,
);
- let assoc_item_list = if let Some((first, other)) =
- assoc_items.split_first().map(|(first, other)| (first.clone_subtree(), other))
- {
- let first_item = if let ast::AssocItem::Fn(ref func) = first
- && let Some(body) = gen_trait_fn_body(func, trait_path, adt, None)
+ let assoc_item_list = if let Some((first, other)) = assoc_items.split_first() {
+ let first_item = if let ast::AssocItem::Fn(func) = first
+ && let Some(body) = gen_trait_fn_body(&make, func, trait_path, adt, None)
&& let Some(func_body) = func.body()
{
- let mut editor = SyntaxEditor::new(first.syntax().clone());
+ let (mut editor, _) = SyntaxEditor::new(first.syntax().clone());
editor.replace(func_body.syntax(), body.syntax());
ast::AssocItem::cast(editor.finish().new_root().clone())
} else {
@@ -239,21 +238,17 @@ fn impl_def_from_trait(
make.assoc_item_list_empty()
};
- let impl_def = impl_def.clone_subtree();
- let mut editor = SyntaxEditor::new(impl_def.syntax().clone());
- editor.replace(impl_def.assoc_item_list()?.syntax(), assoc_item_list.syntax());
- let impl_def = ast::Impl::cast(editor.finish().new_root().clone())?;
- Some(impl_def)
+ Some(generate_trait_impl_with_item(&make, impl_is_unsafe, adt, trait_ty, assoc_item_list))
}
fn update_attribute(
+ make: &SyntaxFactory,
editor: &mut SyntaxEditor,
old_derives: &[ast::Path],
old_tree: &ast::TokenTree,
old_trait_path: &ast::Path,
attr: &ast::Attr,
) {
- let make = SyntaxFactory::without_mappings();
let new_derives = old_derives
.iter()
.filter(|t| t.to_string() != old_trait_path.to_string())
diff --git a/crates/ide-assists/src/handlers/replace_if_let_with_match.rs b/crates/ide-assists/src/handlers/replace_if_let_with_match.rs
index 8ff30fce5b..ada2fd9b21 100644
--- a/crates/ide-assists/src/handlers/replace_if_let_with_match.rs
+++ b/crates/ide-assists/src/handlers/replace_if_let_with_match.rs
@@ -13,7 +13,10 @@ use syntax::{
use crate::{
AssistContext, AssistId, Assists,
- utils::{does_pat_match_variant, does_pat_variant_nested_or_literal, unwrap_trivial_block},
+ utils::{
+ does_pat_match_variant, does_pat_variant_nested_or_literal, unwrap_trivial_block,
+ wrap_paren,
+ },
};
// Assist: replace_if_let_with_match
@@ -289,6 +292,7 @@ pub(crate) fn replace_match_with_if_let(acc: &mut Assists, ctx: &AssistContext<'
_ => make.expr_let(if_let_pat, scrutinee).into(),
};
let condition = if let Some(guard) = guard {
+ let guard = wrap_paren(guard, &make, ast::prec::ExprPrecedence::LAnd);
make.expr_bin(condition, ast::BinaryOp::LogicOp(ast::LogicOp::And), guard).into()
} else {
condition
@@ -398,8 +402,7 @@ fn let_and_guard(cond: &ast::Expr) -> (Option<ast::LetExpr>, Option<ast::Expr>)
} else if let ast::Expr::BinExpr(bin_expr) = cond
&& let Some(ast::Expr::LetExpr(let_expr)) = and_bin_expr_left(bin_expr).lhs()
{
- let new_expr = bin_expr.clone_subtree();
- let mut edit = SyntaxEditor::new(new_expr.syntax().clone());
+ let (mut edit, new_expr) = SyntaxEditor::with_ast_node(bin_expr);
let left_bin = and_bin_expr_left(&new_expr);
if let Some(rhs) = left_bin.rhs() {
@@ -2268,14 +2271,35 @@ fn main() {
"#,
r#"
fn main() {
- if let Some(n) = Some(0) && n % 2 == 0 && n != 6 {
+ if let Some(n) = Some(0) && (n % 2 == 0 && n != 6) {
()
} else {
code()
}
}
"#,
- )
+ );
+
+ check_assist(
+ replace_match_with_if_let,
+ r#"
+fn main() {
+ match$0 Some(0) {
+ Some(n) if n % 2 == 0 || n == 7 => (),
+ _ => code(),
+ }
+}
+"#,
+ r#"
+fn main() {
+ if let Some(n) = Some(0) && (n % 2 == 0 || n == 7) {
+ ()
+ } else {
+ code()
+ }
+}
+"#,
+ );
}
#[test]
diff --git a/crates/ide-assists/src/handlers/replace_qualified_name_with_use.rs b/crates/ide-assists/src/handlers/replace_qualified_name_with_use.rs
index cdf20586ef..fd090cc081 100644
--- a/crates/ide-assists/src/handlers/replace_qualified_name_with_use.rs
+++ b/crates/ide-assists/src/handlers/replace_qualified_name_with_use.rs
@@ -111,8 +111,7 @@ fn target_path(ctx: &AssistContext<'_>, mut original_path: ast::Path) -> Option<
}
fn drop_generic_args(path: &ast::Path) -> ast::Path {
- let path = path.clone_subtree();
- let mut editor = SyntaxEditor::new(path.syntax().clone());
+ let (mut editor, path) = SyntaxEditor::with_ast_node(path);
if let Some(segment) = path.segment()
&& let Some(generic_args) = segment.generic_arg_list()
{
diff --git a/crates/ide-assists/src/handlers/unwrap_block.rs b/crates/ide-assists/src/handlers/unwrap_block.rs
index e029d7884f..5593ca3eb8 100644
--- a/crates/ide-assists/src/handlers/unwrap_block.rs
+++ b/crates/ide-assists/src/handlers/unwrap_block.rs
@@ -103,8 +103,7 @@ fn delete_else_before(container: SyntaxNode, edit: &mut SyntaxEditor) {
fn wrap_let(assign: &ast::LetStmt, replacement: ast::BlockExpr) -> ast::BlockExpr {
let try_wrap_assign = || {
let initializer = assign.initializer()?.syntax().syntax_element();
- let replacement = replacement.clone_subtree();
- let assign = assign.clone_for_update();
+ let (mut edit, replacement) = SyntaxEditor::with_ast_node(&replacement);
let tail_expr = replacement.tail_expr()?;
let before =
assign.syntax().children_with_tokens().take_while(|it| *it != initializer).collect();
@@ -115,7 +114,6 @@ fn wrap_let(assign: &ast::LetStmt, replacement: ast::BlockExpr) -> ast::BlockExp
.skip(1)
.collect();
- let mut edit = SyntaxEditor::new(replacement.syntax().clone());
edit.insert_all(Position::before(tail_expr.syntax()), before);
edit.insert_all(Position::after(tail_expr.syntax()), after);
ast::BlockExpr::cast(edit.finish().new_root().clone())
diff --git a/crates/ide-assists/src/tests.rs b/crates/ide-assists/src/tests.rs
index a52bd74d14..135e750ca0 100644
--- a/crates/ide-assists/src/tests.rs
+++ b/crates/ide-assists/src/tests.rs
@@ -208,6 +208,15 @@ pub(crate) fn check_assist_target(
}
#[track_caller]
+pub(crate) fn check_assist_with_label(
+ assist: Handler,
+ #[rust_analyzer::rust_fixture] ra_fixture: &str,
+ label: &str,
+) {
+ check(assist, ra_fixture, ExpectedResult::Label(label), None);
+}
+
+#[track_caller]
pub(crate) fn check_assist_not_applicable(
assist: Handler,
#[rust_analyzer::rust_fixture] ra_fixture: &str,
@@ -307,6 +316,7 @@ enum ExpectedResult<'a> {
Unresolved,
After(&'a str),
Target(&'a str),
+ Label(&'a str),
}
#[track_caller]
@@ -335,7 +345,7 @@ fn check_with_config(
let ctx = AssistContext::new(sema, &config, frange);
let resolve = match expected {
- ExpectedResult::Unresolved => AssistResolveStrategy::None,
+ ExpectedResult::Unresolved | ExpectedResult::Label(_) => AssistResolveStrategy::None,
_ => AssistResolveStrategy::All,
};
let mut acc = Assists::new(&ctx, resolve);
@@ -404,6 +414,9 @@ fn check_with_config(
let range = assist.target;
assert_eq_text!(&text_without_caret[range], target);
}
+ (Some(assist), ExpectedResult::Label(label)) => {
+ assert_eq!(assist.label.to_string(), label);
+ }
(Some(assist), ExpectedResult::Unresolved) => assert!(
assist.source_change.is_none(),
"unresolved assist should not contain source changes"
@@ -411,7 +424,10 @@ fn check_with_config(
(Some(_), ExpectedResult::NotApplicable) => panic!("assist should not be applicable!"),
(
None,
- ExpectedResult::After(_) | ExpectedResult::Target(_) | ExpectedResult::Unresolved,
+ ExpectedResult::After(_)
+ | ExpectedResult::Target(_)
+ | ExpectedResult::Label(_)
+ | ExpectedResult::Unresolved,
) => {
panic!("code action is not applicable")
}
@@ -479,6 +495,7 @@ pub fn test_some_range(a: int) -> bool {
expect![[r#"
Extract into...
Replace if let with match
+ Convert to guarded return
"#]]
.assert_eq(&expected);
}
@@ -511,6 +528,7 @@ pub fn test_some_range(a: int) -> bool {
expect![[r#"
Extract into...
Replace if let with match
+ Convert to guarded return
"#]]
.assert_eq(&expected);
}
diff --git a/crates/ide-assists/src/utils.rs b/crates/ide-assists/src/utils.rs
index 10057f8681..3de8ec7f53 100644
--- a/crates/ide-assists/src/utils.rs
+++ b/crates/ide-assists/src/utils.rs
@@ -203,11 +203,9 @@ pub fn filter_assoc_items(
/// [`filter_assoc_items()`]), clones each item for update and applies path transformation to it,
/// then inserts into `impl_`. Returns the modified `impl_` and the first associated item that got
/// inserted.
-///
-/// Legacy: prefer [`add_trait_assoc_items_to_impl_with_factory`] when a [`SyntaxFactory`] is
-/// available.
#[must_use]
pub fn add_trait_assoc_items_to_impl(
+ make: &SyntaxFactory,
sema: &Semantics<'_, RootDatabase>,
config: &AssistConfig,
original_items: &[InFile<ast::AssocItem>],
@@ -250,95 +248,23 @@ pub fn add_trait_assoc_items_to_impl(
})
.filter_map(|item| match item {
ast::AssocItem::Fn(fn_) if fn_.body().is_none() => {
- let fn_ = fn_.clone_subtree();
- let new_body = make::block_expr(None, Some(expr_fill_default(config)));
- let mut fn_editor = SyntaxEditor::new(fn_.syntax().clone());
- fn_.replace_or_insert_body(&mut fn_editor, new_body.clone_for_update());
- let new_fn_ = fn_editor.finish().new_root().clone();
- ast::AssocItem::cast(new_fn_)
- }
- ast::AssocItem::TypeAlias(type_alias) => {
- let type_alias = type_alias.clone_subtree();
- if let Some(type_bound_list) = type_alias.type_bound_list() {
- let mut type_alias_editor = SyntaxEditor::new(type_alias.syntax().clone());
- type_bound_list.remove(&mut type_alias_editor);
- let type_alias = type_alias_editor.finish().new_root().clone();
- ast::AssocItem::cast(type_alias)
- } else {
- Some(ast::AssocItem::TypeAlias(type_alias))
- }
- }
- item => Some(item),
- })
- .map(|item| AstNodeEdit::indent(&item, new_indent_level))
- .collect()
-}
-
-/// [`SyntaxFactory`]-based variant of [`add_trait_assoc_items_to_impl`].
-#[must_use]
-pub fn add_trait_assoc_items_to_impl_with_factory(
- make: &SyntaxFactory,
- sema: &Semantics<'_, RootDatabase>,
- config: &AssistConfig,
- original_items: &[InFile<ast::AssocItem>],
- trait_: hir::Trait,
- impl_: &ast::Impl,
- target_scope: &hir::SemanticsScope<'_>,
-) -> Vec<ast::AssocItem> {
- let new_indent_level = IndentLevel::from_node(impl_.syntax()) + 1;
- original_items
- .iter()
- .map(|InFile { file_id, value: original_item }| {
- let mut cloned_item = {
- if let Some(macro_file) = file_id.macro_file() {
- let span_map = sema.db.expansion_span_map(macro_file);
- let item_prettified = prettify_macro_expansion(
- sema.db,
- original_item.syntax().clone(),
- &span_map,
- target_scope.krate().into(),
- );
- if let Some(formatted) = ast::AssocItem::cast(item_prettified) {
- return formatted;
- } else {
- stdx::never!("formatted `AssocItem` could not be cast back to `AssocItem`");
- }
- }
- original_item
- }
- .reset_indent();
-
- if let Some(source_scope) = sema.scope(original_item.syntax()) {
- let transform =
- PathTransform::trait_impl(target_scope, &source_scope, trait_, impl_.clone());
- cloned_item = ast::AssocItem::cast(transform.apply(cloned_item.syntax())).unwrap();
- }
- cloned_item.remove_attrs_and_docs();
- cloned_item
- })
- .filter_map(|item| match item {
- ast::AssocItem::Fn(fn_) if fn_.body().is_none() => {
- let fn_ = fn_.clone_subtree();
+ let (mut fn_editor, fn_) = SyntaxEditor::with_ast_node(&fn_);
let fill_expr: ast::Expr = match config.expr_fill_default {
ExprFillDefaultMode::Todo | ExprFillDefaultMode::Default => make.expr_todo(),
ExprFillDefaultMode::Underscore => make.expr_underscore().into(),
};
let new_body = make.block_expr(None::<ast::Stmt>, Some(fill_expr));
- let mut fn_editor = SyntaxEditor::new(fn_.syntax().clone());
fn_.replace_or_insert_body(&mut fn_editor, new_body);
let new_fn_ = fn_editor.finish().new_root().clone();
ast::AssocItem::cast(new_fn_)
}
ast::AssocItem::TypeAlias(type_alias) => {
- let type_alias = type_alias.clone_subtree();
+ let (mut type_alias_editor, type_alias) = SyntaxEditor::with_ast_node(&type_alias);
if let Some(type_bound_list) = type_alias.type_bound_list() {
- let mut type_alias_editor = SyntaxEditor::new(type_alias.syntax().clone());
type_bound_list.remove(&mut type_alias_editor);
- let type_alias = type_alias_editor.finish().new_root().clone();
- ast::AssocItem::cast(type_alias)
- } else {
- Some(ast::AssocItem::TypeAlias(type_alias))
- }
+ };
+ let type_alias = type_alias_editor.finish().new_root().clone();
+ ast::AssocItem::cast(type_alias)
}
item => Some(item),
})
@@ -404,10 +330,8 @@ fn invert_special_case(make: &SyntaxFactory, expr: &ast::Expr) -> Option<ast::Ex
Some(make.expr_method_call(receiver, make.name_ref(method), arg_list).into())
}
ast::Expr::PrefixExpr(pe) if pe.op_kind()? == ast::UnaryOp::Not => match pe.expr()? {
- ast::Expr::ParenExpr(parexpr) => {
- parexpr.expr().map(|e| e.clone_subtree().clone_for_update())
- }
- _ => pe.expr().map(|e| e.clone_subtree().clone_for_update()),
+ ast::Expr::ParenExpr(parexpr) => parexpr.expr(),
+ _ => pe.expr(),
},
ast::Expr::Literal(lit) => match lit.kind() {
ast::LiteralKind::Bool(b) => match b {
@@ -758,6 +682,16 @@ pub(crate) fn generate_trait_impl_intransitive_with_item(
generate_impl_inner_with_factory(make, false, adt, Some(trait_), false, Some(body))
}
+pub(crate) fn generate_trait_impl_with_item(
+ make: &SyntaxFactory,
+ is_unsafe: bool,
+ adt: &ast::Adt,
+ trait_: ast::Type,
+ body: ast::AssocItemList,
+) -> ast::Impl {
+ generate_impl_inner_with_factory(make, is_unsafe, adt, Some(trait_), true, Some(body))
+}
+
fn generate_impl_inner(
is_unsafe: bool,
adt: &ast::Adt,
diff --git a/crates/ide-assists/src/utils/gen_trait_fn_body.rs b/crates/ide-assists/src/utils/gen_trait_fn_body.rs
index 87e90e8519..b0d88737fe 100644
--- a/crates/ide-assists/src/utils/gen_trait_fn_body.rs
+++ b/crates/ide-assists/src/utils/gen_trait_fn_body.rs
@@ -1,7 +1,10 @@
//! This module contains functions to generate default trait impl function bodies where possible.
use hir::TraitRef;
-use syntax::ast::{self, AstNode, BinaryOp, CmpOp, HasName, LogicOp, edit::AstNodeEdit, make};
+use syntax::ast::{
+ self, AstNode, BinaryOp, CmpOp, HasName, LogicOp, edit::AstNodeEdit,
+ syntax_factory::SyntaxFactory,
+};
/// Generate custom trait bodies without default implementation where possible.
///
@@ -11,6 +14,7 @@ use syntax::ast::{self, AstNode, BinaryOp, CmpOp, HasName, LogicOp, edit::AstNod
/// `None` means that generating a custom trait body failed, and the body will remain
/// as `todo!` instead.
pub(crate) fn gen_trait_fn_body(
+ make: &SyntaxFactory,
func: &ast::Fn,
trait_path: &ast::Path,
adt: &ast::Adt,
@@ -20,32 +24,32 @@ pub(crate) fn gen_trait_fn_body(
match trait_path.segment()?.name_ref()?.text().as_str() {
"Clone" => {
stdx::always!(func.name().is_some_and(|name| name.text() == "clone"));
- gen_clone_impl(adt)
+ gen_clone_impl(make, adt)
}
- "Debug" => gen_debug_impl(adt),
- "Default" => gen_default_impl(adt),
+ "Debug" => gen_debug_impl(make, adt),
+ "Default" => gen_default_impl(make, adt),
"Hash" => {
stdx::always!(func.name().is_some_and(|name| name.text() == "hash"));
- gen_hash_impl(adt)
+ gen_hash_impl(make, adt)
}
"PartialEq" => {
stdx::always!(func.name().is_some_and(|name| name.text() == "eq"));
- gen_partial_eq(adt, trait_ref)
+ gen_partial_eq(make, adt, trait_ref)
}
"PartialOrd" => {
stdx::always!(func.name().is_some_and(|name| name.text() == "partial_cmp"));
- gen_partial_ord(adt, trait_ref)
+ gen_partial_ord(make, adt, trait_ref)
}
_ => None,
}
}
/// Generate a `Clone` impl based on the fields and members of the target type.
-fn gen_clone_impl(adt: &ast::Adt) -> Option<ast::BlockExpr> {
- fn gen_clone_call(target: ast::Expr) -> ast::Expr {
- let method = make::name_ref("clone");
- make::expr_method_call(target, method, make::arg_list(None)).into()
- }
+fn gen_clone_impl(make: &SyntaxFactory, adt: &ast::Adt) -> Option<ast::BlockExpr> {
+ let gen_clone_call = |target: ast::Expr| -> ast::Expr {
+ let method = make.name_ref("clone");
+ make.expr_method_call(target, method, make.arg_list([])).into()
+ };
let expr = match adt {
// `Clone` cannot be derived for unions, so no default impl can be provided.
ast::Adt::Union(_) => return None,
@@ -54,7 +58,7 @@ fn gen_clone_impl(adt: &ast::Adt) -> Option<ast::BlockExpr> {
let mut arms = vec![];
for variant in list.variants() {
let name = variant.name()?;
- let variant_name = make::ext::path_from_idents(["Self", &format!("{name}")])?;
+ let variant_name = make.path_from_idents(["Self", &format!("{name}")])?;
match variant.field_list() {
// => match self { Self::Name { x } => Self::Name { x: x.clone() } }
@@ -63,19 +67,20 @@ fn gen_clone_impl(adt: &ast::Adt) -> Option<ast::BlockExpr> {
let mut fields = vec![];
for field in list.fields() {
let field_name = field.name()?;
- let pat = make::ident_pat(false, false, field_name.clone());
- pats.push(pat.into());
+ let pat = make.ident_pat(false, false, field_name.clone());
+ pats.push(make.record_pat_field_shorthand(pat.into()));
- let path = make::ext::ident_path(&field_name.to_string());
- let method_call = gen_clone_call(make::expr_path(path));
- let name_ref = make::name_ref(&field_name.to_string());
- let field = make::record_expr_field(name_ref, Some(method_call));
+ let path = make.ident_path(&field_name.to_string());
+ let method_call = gen_clone_call(make.expr_path(path));
+ let name_ref = make.name_ref(&field_name.to_string());
+ let field = make.record_expr_field(name_ref, Some(method_call));
fields.push(field);
}
- let pat = make::record_pat(variant_name.clone(), pats.into_iter());
- let fields = make::record_expr_field_list(fields);
- let record_expr = make::record_expr(variant_name, fields).into();
- arms.push(make::match_arm(pat.into(), None, record_expr));
+ let pat_field_list = make.record_pat_field_list(pats, None);
+ let pat = make.record_pat_with_fields(variant_name.clone(), pat_field_list);
+ let fields = make.record_expr_field_list(fields);
+ let record_expr = make.record_expr(variant_name, fields).into();
+ arms.push(make.match_arm(pat.into(), None, record_expr));
}
// => match self { Self::Name(arg1) => Self::Name(arg1.clone()) }
@@ -84,31 +89,30 @@ fn gen_clone_impl(adt: &ast::Adt) -> Option<ast::BlockExpr> {
let mut fields = vec![];
for (i, _) in list.fields().enumerate() {
let field_name = format!("arg{i}");
- let pat = make::ident_pat(false, false, make::name(&field_name));
+ let pat = make.ident_pat(false, false, make.name(&field_name));
pats.push(pat.into());
- let f_path = make::expr_path(make::ext::ident_path(&field_name));
+ let f_path = make.expr_path(make.ident_path(&field_name));
fields.push(gen_clone_call(f_path));
}
- let pat = make::tuple_struct_pat(variant_name.clone(), pats.into_iter());
- let struct_name = make::expr_path(variant_name);
- let tuple_expr =
- make::expr_call(struct_name, make::arg_list(fields)).into();
- arms.push(make::match_arm(pat.into(), None, tuple_expr));
+ let pat = make.tuple_struct_pat(variant_name.clone(), pats.into_iter());
+ let struct_name = make.expr_path(variant_name);
+ let tuple_expr = make.expr_call(struct_name, make.arg_list(fields)).into();
+ arms.push(make.match_arm(pat.into(), None, tuple_expr));
}
// => match self { Self::Name => Self::Name }
None => {
- let pattern = make::path_pat(variant_name.clone());
- let variant_expr = make::expr_path(variant_name);
- arms.push(make::match_arm(pattern, None, variant_expr));
+ let pattern = make.path_pat(variant_name.clone());
+ let variant_expr = make.expr_path(variant_name);
+ arms.push(make.match_arm(pattern, None, variant_expr));
}
}
}
- let match_target = make::expr_path(make::ext::ident_path("self"));
- let list = make::match_arm_list(arms).indent(ast::edit::IndentLevel(1));
- make::expr_match(match_target, list).into()
+ let match_target = make.expr_path(make.ident_path("self"));
+ let list = make.match_arm_list(arms).indent(ast::edit::IndentLevel(1));
+ make.expr_match(match_target, list).into()
}
ast::Adt::Struct(strukt) => {
match strukt.field_list() {
@@ -116,43 +120,43 @@ fn gen_clone_impl(adt: &ast::Adt) -> Option<ast::BlockExpr> {
Some(ast::FieldList::RecordFieldList(field_list)) => {
let mut fields = vec![];
for field in field_list.fields() {
- let base = make::expr_path(make::ext::ident_path("self"));
- let target = make::expr_field(base, &field.name()?.to_string());
+ let base = make.expr_path(make.ident_path("self"));
+ let target = make.expr_field(base, &field.name()?.to_string()).into();
let method_call = gen_clone_call(target);
- let name_ref = make::name_ref(&field.name()?.to_string());
- let field = make::record_expr_field(name_ref, Some(method_call));
+ let name_ref = make.name_ref(&field.name()?.to_string());
+ let field = make.record_expr_field(name_ref, Some(method_call));
fields.push(field);
}
- let struct_name = make::ext::ident_path("Self");
- let fields = make::record_expr_field_list(fields);
- make::record_expr(struct_name, fields).into()
+ let struct_name = make.ident_path("Self");
+ let fields = make.record_expr_field_list(fields);
+ make.record_expr(struct_name, fields).into()
}
// => Self(self.0.clone(), self.1.clone())
Some(ast::FieldList::TupleFieldList(field_list)) => {
let mut fields = vec![];
for (i, _) in field_list.fields().enumerate() {
- let f_path = make::expr_path(make::ext::ident_path("self"));
- let target = make::expr_field(f_path, &format!("{i}"));
+ let f_path = make.expr_path(make.ident_path("self"));
+ let target = make.expr_field(f_path, &format!("{i}")).into();
fields.push(gen_clone_call(target));
}
- let struct_name = make::expr_path(make::ext::ident_path("Self"));
- make::expr_call(struct_name, make::arg_list(fields)).into()
+ let struct_name = make.expr_path(make.ident_path("Self"));
+ make.expr_call(struct_name, make.arg_list(fields)).into()
}
// => Self { }
None => {
- let struct_name = make::ext::ident_path("Self");
- let fields = make::record_expr_field_list(None);
- make::record_expr(struct_name, fields).into()
+ let struct_name = make.ident_path("Self");
+ let fields = make.record_expr_field_list([]);
+ make.record_expr(struct_name, fields).into()
}
}
}
};
- let body = make::block_expr(None, Some(expr)).indent(ast::edit::IndentLevel(1));
+ let body = make.block_expr(None, Some(expr)).indent(ast::edit::IndentLevel(1));
Some(body)
}
/// Generate a `Debug` impl based on the fields and members of the target type.
-fn gen_debug_impl(adt: &ast::Adt) -> Option<ast::BlockExpr> {
+fn gen_debug_impl(make: &SyntaxFactory, adt: &ast::Adt) -> Option<ast::BlockExpr> {
let annotated_name = adt.name()?;
match adt {
// `Debug` cannot be derived for unions, so no default impl can be provided.
@@ -164,156 +168,154 @@ fn gen_debug_impl(adt: &ast::Adt) -> Option<ast::BlockExpr> {
let mut arms = vec![];
for variant in list.variants() {
let name = variant.name()?;
- let variant_name = make::ext::path_from_idents(["Self", &format!("{name}")])?;
- let target = make::expr_path(make::ext::ident_path("f"));
+ let variant_name = make.path_from_idents(["Self", &format!("{name}")])?;
+ let target = make.expr_path(make.ident_path("f"));
match variant.field_list() {
Some(ast::FieldList::RecordFieldList(list)) => {
// => f.debug_struct(name)
- let target = make::expr_path(make::ext::ident_path("f"));
- let method = make::name_ref("debug_struct");
+ let target = make.expr_path(make.ident_path("f"));
+ let method = make.name_ref("debug_struct");
let struct_name = format!("\"{name}\"");
- let args = make::arg_list(Some(make::expr_literal(&struct_name).into()));
- let mut expr = make::expr_method_call(target, method, args).into();
+ let args = make.arg_list([make.expr_literal(&struct_name).into()]);
+ let mut expr = make.expr_method_call(target, method, args).into();
let mut pats = vec![];
for field in list.fields() {
let field_name = field.name()?;
// create a field pattern for use in `MyStruct { fields.. }`
- let pat = make::ident_pat(false, false, field_name.clone());
- pats.push(pat.into());
+ let pat = make.ident_pat(false, false, field_name.clone());
+ pats.push(make.record_pat_field_shorthand(pat.into()));
// => <expr>.field("field_name", field)
- let method_name = make::name_ref("field");
- let name = make::expr_literal(&(format!("\"{field_name}\""))).into();
+ let method_name = make.name_ref("field");
+ let name = make.expr_literal(&(format!("\"{field_name}\""))).into();
let path = &format!("{field_name}");
- let path = make::expr_path(make::ext::ident_path(path));
- let args = make::arg_list(vec![name, path]);
- expr = make::expr_method_call(expr, method_name, args).into();
+ let path = make.expr_path(make.ident_path(path));
+ let args = make.arg_list([name, path]);
+ expr = make.expr_method_call(expr, method_name, args).into();
}
// => <expr>.finish()
- let method = make::name_ref("finish");
- let expr =
- make::expr_method_call(expr, method, make::arg_list(None)).into();
+ let method = make.name_ref("finish");
+ let expr = make.expr_method_call(expr, method, make.arg_list([])).into();
// => MyStruct { fields.. } => f.debug_struct("MyStruct")...finish(),
- let pat = make::record_pat(variant_name.clone(), pats.into_iter());
- arms.push(make::match_arm(pat.into(), None, expr));
+ let pat_field_list = make.record_pat_field_list(pats, None);
+ let pat = make.record_pat_with_fields(variant_name.clone(), pat_field_list);
+ arms.push(make.match_arm(pat.into(), None, expr));
}
Some(ast::FieldList::TupleFieldList(list)) => {
// => f.debug_tuple(name)
- let target = make::expr_path(make::ext::ident_path("f"));
- let method = make::name_ref("debug_tuple");
+ let target = make.expr_path(make.ident_path("f"));
+ let method = make.name_ref("debug_tuple");
let struct_name = format!("\"{name}\"");
- let args = make::arg_list(Some(make::expr_literal(&struct_name).into()));
- let mut expr = make::expr_method_call(target, method, args).into();
+ let args = make.arg_list([make.expr_literal(&struct_name).into()]);
+ let mut expr = make.expr_method_call(target, method, args).into();
let mut pats = vec![];
for (i, _) in list.fields().enumerate() {
let name = format!("arg{i}");
// create a field pattern for use in `MyStruct(fields..)`
- let field_name = make::name(&name);
- let pat = make::ident_pat(false, false, field_name.clone());
+ let field_name = make.name(&name);
+ let pat = make.ident_pat(false, false, field_name.clone());
pats.push(pat.into());
// => <expr>.field(field)
- let method_name = make::name_ref("field");
+ let method_name = make.name_ref("field");
let field_path = &name.to_string();
- let field_path = make::expr_path(make::ext::ident_path(field_path));
- let args = make::arg_list(vec![field_path]);
- expr = make::expr_method_call(expr, method_name, args).into();
+ let field_path = make.expr_path(make.ident_path(field_path));
+ let args = make.arg_list([field_path]);
+ expr = make.expr_method_call(expr, method_name, args).into();
}
// => <expr>.finish()
- let method = make::name_ref("finish");
- let expr =
- make::expr_method_call(expr, method, make::arg_list(None)).into();
+ let method = make.name_ref("finish");
+ let expr = make.expr_method_call(expr, method, make.arg_list([])).into();
// => MyStruct (fields..) => f.debug_tuple("MyStruct")...finish(),
- let pat = make::tuple_struct_pat(variant_name.clone(), pats.into_iter());
- arms.push(make::match_arm(pat.into(), None, expr));
+ let pat = make.tuple_struct_pat(variant_name.clone(), pats.into_iter());
+ arms.push(make.match_arm(pat.into(), None, expr));
}
None => {
- let fmt_string = make::expr_literal(&(format!("\"{name}\""))).into();
- let args = make::ext::token_tree_from_node(
- make::arg_list([target, fmt_string]).syntax(),
- );
- let macro_name = make::ext::ident_path("write");
- let macro_call = make::expr_macro(macro_name, args);
-
- let variant_name = make::path_pat(variant_name);
- arms.push(make::match_arm(variant_name, None, macro_call.into()));
+ let fmt_string = make.expr_literal(&(format!("\"{name}\""))).into();
+ let args =
+ make.token_tree_from_node(make.arg_list([target, fmt_string]).syntax());
+ let macro_name = make.ident_path("write");
+ let macro_call = make.expr_macro(macro_name, args);
+
+ let variant_name = make.path_pat(variant_name);
+ arms.push(make.match_arm(variant_name, None, macro_call.into()));
}
}
}
- let match_target = make::expr_path(make::ext::ident_path("self"));
- let list = make::match_arm_list(arms).indent(ast::edit::IndentLevel(1));
- let match_expr = make::expr_match(match_target, list);
+ let match_target = make.expr_path(make.ident_path("self"));
+ let list = make.match_arm_list(arms).indent(ast::edit::IndentLevel(1));
+ let match_expr = make.expr_match(match_target, list);
- let body = make::block_expr(None, Some(match_expr.into()));
+ let body = make.block_expr(None::<ast::Stmt>, Some(match_expr.into()));
let body = body.indent(ast::edit::IndentLevel(1));
Some(body)
}
ast::Adt::Struct(strukt) => {
let name = format!("\"{annotated_name}\"");
- let args = make::arg_list(Some(make::expr_literal(&name).into()));
- let target = make::expr_path(make::ext::ident_path("f"));
+ let args = make.arg_list([make.expr_literal(&name).into()]);
+ let target = make.expr_path(make.ident_path("f"));
let expr = match strukt.field_list() {
// => f.debug_struct("Name").finish()
- None => make::expr_method_call(target, make::name_ref("debug_struct"), args).into(),
+ None => make.expr_method_call(target, make.name_ref("debug_struct"), args).into(),
// => f.debug_struct("Name").field("foo", &self.foo).finish()
Some(ast::FieldList::RecordFieldList(field_list)) => {
- let method = make::name_ref("debug_struct");
- let mut expr = make::expr_method_call(target, method, args).into();
+ let method = make.name_ref("debug_struct");
+ let mut expr = make.expr_method_call(target, method, args).into();
for field in field_list.fields() {
let name = field.name()?;
- let f_name = make::expr_literal(&(format!("\"{name}\""))).into();
- let f_path = make::expr_path(make::ext::ident_path("self"));
- let f_path = make::expr_ref(f_path, false);
- let f_path = make::expr_field(f_path, &format!("{name}"));
- let args = make::arg_list([f_name, f_path]);
- expr = make::expr_method_call(expr, make::name_ref("field"), args).into();
+ let f_name = make.expr_literal(&(format!("\"{name}\""))).into();
+ let f_path = make.expr_path(make.ident_path("self"));
+ let f_path = make.expr_field(f_path, &format!("{name}")).into();
+ let f_path = make.expr_ref(f_path, false);
+ let args = make.arg_list([f_name, f_path]);
+ expr = make.expr_method_call(expr, make.name_ref("field"), args).into();
}
expr
}
- // => f.debug_tuple("Name").field(self.0).finish()
+ // => f.debug_tuple("Name").field(&self.0).finish()
Some(ast::FieldList::TupleFieldList(field_list)) => {
- let method = make::name_ref("debug_tuple");
- let mut expr = make::expr_method_call(target, method, args).into();
+ let method = make.name_ref("debug_tuple");
+ let mut expr = make.expr_method_call(target, method, args).into();
for (i, _) in field_list.fields().enumerate() {
- let f_path = make::expr_path(make::ext::ident_path("self"));
- let f_path = make::expr_ref(f_path, false);
- let f_path = make::expr_field(f_path, &format!("{i}"));
- let method = make::name_ref("field");
- expr = make::expr_method_call(expr, method, make::arg_list(Some(f_path)))
- .into();
+ let f_path = make.expr_path(make.ident_path("self"));
+ let f_path = make.expr_field(f_path, &format!("{i}")).into();
+ let f_path = make.expr_ref(f_path, false);
+ let method = make.name_ref("field");
+ expr = make.expr_method_call(expr, method, make.arg_list([f_path])).into();
}
expr
}
};
- let method = make::name_ref("finish");
- let expr = make::expr_method_call(expr, method, make::arg_list(None)).into();
- let body = make::block_expr(None, Some(expr)).indent(ast::edit::IndentLevel(1));
+ let method = make.name_ref("finish");
+ let expr = make.expr_method_call(expr, method, make.arg_list([])).into();
+ let body =
+ make.block_expr(None::<ast::Stmt>, Some(expr)).indent(ast::edit::IndentLevel(1));
Some(body)
}
}
}
-/// Generate a `Debug` impl based on the fields and members of the target type.
-fn gen_default_impl(adt: &ast::Adt) -> Option<ast::BlockExpr> {
- fn gen_default_call() -> Option<ast::Expr> {
- let fn_name = make::ext::path_from_idents(["Default", "default"])?;
- Some(make::expr_call(make::expr_path(fn_name), make::arg_list(None)).into())
- }
+/// Generate a `Default` impl based on the fields and members of the target type.
+fn gen_default_impl(make: &SyntaxFactory, adt: &ast::Adt) -> Option<ast::BlockExpr> {
+ let gen_default_call = || -> Option<ast::Expr> {
+ let fn_name = make.path_from_idents(["Default", "default"])?;
+ Some(make.expr_call(make.expr_path(fn_name), make.arg_list([])).into())
+ };
match adt {
// `Debug` cannot be derived for unions, so no default impl can be provided.
ast::Adt::Union(_) => None,
@@ -325,42 +327,43 @@ fn gen_default_impl(adt: &ast::Adt) -> Option<ast::BlockExpr> {
let mut fields = vec![];
for field in field_list.fields() {
let method_call = gen_default_call()?;
- let name_ref = make::name_ref(&field.name()?.to_string());
- let field = make::record_expr_field(name_ref, Some(method_call));
+ let name_ref = make.name_ref(&field.name()?.to_string());
+ let field = make.record_expr_field(name_ref, Some(method_call));
fields.push(field);
}
- let struct_name = make::ext::ident_path("Self");
- let fields = make::record_expr_field_list(fields);
- make::record_expr(struct_name, fields).into()
+ let struct_name = make.ident_path("Self");
+ let fields = make.record_expr_field_list(fields);
+ make.record_expr(struct_name, fields).into()
}
Some(ast::FieldList::TupleFieldList(field_list)) => {
- let struct_name = make::expr_path(make::ext::ident_path("Self"));
+ let struct_name = make.expr_path(make.ident_path("Self"));
let fields = field_list
.fields()
.map(|_| gen_default_call())
.collect::<Option<Vec<ast::Expr>>>()?;
- make::expr_call(struct_name, make::arg_list(fields)).into()
+ make.expr_call(struct_name, make.arg_list(fields)).into()
}
None => {
- let struct_name = make::ext::ident_path("Self");
- let fields = make::record_expr_field_list(None);
- make::record_expr(struct_name, fields).into()
+ let struct_name = make.ident_path("Self");
+ let fields = make.record_expr_field_list([]);
+ make.record_expr(struct_name, fields).into()
}
};
- let body = make::block_expr(None, Some(expr)).indent(ast::edit::IndentLevel(1));
+ let body =
+ make.block_expr(None::<ast::Stmt>, Some(expr)).indent(ast::edit::IndentLevel(1));
Some(body)
}
}
}
/// Generate a `Hash` impl based on the fields and members of the target type.
-fn gen_hash_impl(adt: &ast::Adt) -> Option<ast::BlockExpr> {
- fn gen_hash_call(target: ast::Expr) -> ast::Stmt {
- let method = make::name_ref("hash");
- let arg = make::expr_path(make::ext::ident_path("state"));
- let expr = make::expr_method_call(target, method, make::arg_list(Some(arg))).into();
- make::expr_stmt(expr).into()
- }
+fn gen_hash_impl(make: &SyntaxFactory, adt: &ast::Adt) -> Option<ast::BlockExpr> {
+ let gen_hash_call = |target: ast::Expr| -> ast::Stmt {
+ let method = make.name_ref("hash");
+ let arg = make.expr_path(make.ident_path("state"));
+ let expr = make.expr_method_call(target, method, make.arg_list([arg])).into();
+ make.expr_stmt(expr).into()
+ };
let body = match adt {
// `Hash` cannot be derived for unions, so no default impl can be provided.
@@ -368,35 +371,35 @@ fn gen_hash_impl(adt: &ast::Adt) -> Option<ast::BlockExpr> {
// => std::mem::discriminant(self).hash(state);
ast::Adt::Enum(_) => {
- let fn_name = make_discriminant()?;
+ let fn_name = make_discriminant(make)?;
- let arg = make::expr_path(make::ext::ident_path("self"));
- let fn_call = make::expr_call(fn_name, make::arg_list(Some(arg))).into();
+ let arg = make.expr_path(make.ident_path("self"));
+ let fn_call: ast::Expr = make.expr_call(fn_name, make.arg_list([arg])).into();
let stmt = gen_hash_call(fn_call);
- make::block_expr(Some(stmt), None).indent(ast::edit::IndentLevel(1))
+ make.block_expr([stmt], None).indent(ast::edit::IndentLevel(1))
}
ast::Adt::Struct(strukt) => match strukt.field_list() {
// => self.<field>.hash(state);
Some(ast::FieldList::RecordFieldList(field_list)) => {
let mut stmts = vec![];
for field in field_list.fields() {
- let base = make::expr_path(make::ext::ident_path("self"));
- let target = make::expr_field(base, &field.name()?.to_string());
+ let base = make.expr_path(make.ident_path("self"));
+ let target = make.expr_field(base, &field.name()?.to_string()).into();
stmts.push(gen_hash_call(target));
}
- make::block_expr(stmts, None).indent(ast::edit::IndentLevel(1))
+ make.block_expr(stmts, None).indent(ast::edit::IndentLevel(1))
}
// => self.<field_index>.hash(state);
Some(ast::FieldList::TupleFieldList(field_list)) => {
let mut stmts = vec![];
for (i, _) in field_list.fields().enumerate() {
- let base = make::expr_path(make::ext::ident_path("self"));
- let target = make::expr_field(base, &format!("{i}"));
+ let base = make.expr_path(make.ident_path("self"));
+ let target = make.expr_field(base, &format!("{i}")).into();
stmts.push(gen_hash_call(target));
}
- make::block_expr(stmts, None).indent(ast::edit::IndentLevel(1))
+ make.block_expr(stmts, None).indent(ast::edit::IndentLevel(1))
}
// No fields in the body means there's nothing to hash.
@@ -408,32 +411,37 @@ fn gen_hash_impl(adt: &ast::Adt) -> Option<ast::BlockExpr> {
}
/// Generate a `PartialEq` impl based on the fields and members of the target type.
-fn gen_partial_eq(adt: &ast::Adt, trait_ref: Option<TraitRef<'_>>) -> Option<ast::BlockExpr> {
- fn gen_eq_chain(expr: Option<ast::Expr>, cmp: ast::Expr) -> Option<ast::Expr> {
+fn gen_partial_eq(
+ make: &SyntaxFactory,
+ adt: &ast::Adt,
+ trait_ref: Option<TraitRef<'_>>,
+) -> Option<ast::BlockExpr> {
+ let gen_eq_chain = |expr: Option<ast::Expr>, cmp: ast::Expr| -> Option<ast::Expr> {
match expr {
- Some(expr) => Some(make::expr_bin_op(expr, BinaryOp::LogicOp(LogicOp::And), cmp)),
+ Some(expr) => Some(make.expr_bin_op(expr, BinaryOp::LogicOp(LogicOp::And), cmp)),
None => Some(cmp),
}
- }
+ };
- fn gen_record_pat_field(field_name: &str, pat_name: &str) -> ast::RecordPatField {
- let pat = make::ext::simple_ident_pat(make::name(pat_name));
- let name_ref = make::name_ref(field_name);
- make::record_pat_field(name_ref, pat.into())
- }
+ let gen_record_pat_field = |field_name: &str, pat_name: &str| -> ast::RecordPatField {
+ let pat = make.ident_pat(false, false, make.name(pat_name));
+ let name_ref = make.name_ref(field_name);
+ make.record_pat_field(name_ref, pat.into())
+ };
- fn gen_record_pat(record_name: ast::Path, fields: Vec<ast::RecordPatField>) -> ast::RecordPat {
- let list = make::record_pat_field_list(fields, None);
- make::record_pat_with_fields(record_name, list)
- }
+ let gen_record_pat =
+ |record_name: ast::Path, fields: Vec<ast::RecordPatField>| -> ast::RecordPat {
+ let list = make.record_pat_field_list(fields, None);
+ make.record_pat_with_fields(record_name, list)
+ };
- fn gen_variant_path(variant: &ast::Variant) -> Option<ast::Path> {
- make::ext::path_from_idents(["Self", &variant.name()?.to_string()])
- }
+ let gen_variant_path = |variant: &ast::Variant| -> Option<ast::Path> {
+ make.path_from_idents(["Self", &variant.name()?.to_string()])
+ };
- fn gen_tuple_field(field_name: &str) -> ast::Pat {
- ast::Pat::IdentPat(make::ident_pat(false, false, make::name(field_name)))
- }
+ let gen_tuple_field = |field_name: &str| -> ast::Pat {
+ ast::Pat::IdentPat(make.ident_pat(false, false, make.name(field_name)))
+ };
// Check that self type and rhs type match. We don't know how to implement the method
// automatically otherwise.
@@ -451,14 +459,14 @@ fn gen_partial_eq(adt: &ast::Adt, trait_ref: Option<TraitRef<'_>>) -> Option<ast
ast::Adt::Enum(enum_) => {
// => std::mem::discriminant(self) == std::mem::discriminant(other)
- let lhs_name = make::expr_path(make::ext::ident_path("self"));
- let lhs = make::expr_call(make_discriminant()?, make::arg_list(Some(lhs_name.clone())))
- .into();
- let rhs_name = make::expr_path(make::ext::ident_path("other"));
- let rhs = make::expr_call(make_discriminant()?, make::arg_list(Some(rhs_name.clone())))
- .into();
+ let lhs_name = make.expr_path(make.ident_path("self"));
+ let lhs =
+ make.expr_call(make_discriminant(make)?, make.arg_list([lhs_name.clone()])).into();
+ let rhs_name = make.expr_path(make.ident_path("other"));
+ let rhs =
+ make.expr_call(make_discriminant(make)?, make.arg_list([rhs_name.clone()])).into();
let eq_check =
- make::expr_bin_op(lhs, BinaryOp::CmpOp(CmpOp::Eq { negated: false }), rhs);
+ make.expr_bin_op(lhs, BinaryOp::CmpOp(CmpOp::Eq { negated: false }), rhs);
let mut n_cases = 0;
let mut arms = vec![];
@@ -480,9 +488,9 @@ fn gen_partial_eq(adt: &ast::Adt, trait_ref: Option<TraitRef<'_>>) -> Option<ast
let r_name = &format!("r_{field_name}");
r_fields.push(gen_record_pat_field(&field_name, r_name));
- let lhs = make::expr_path(make::ext::ident_path(l_name));
- let rhs = make::expr_path(make::ext::ident_path(r_name));
- let cmp = make::expr_bin_op(
+ let lhs = make.expr_path(make.ident_path(l_name));
+ let rhs = make.expr_path(make.ident_path(r_name));
+ let cmp = make.expr_bin_op(
lhs,
BinaryOp::CmpOp(CmpOp::Eq { negated: false }),
rhs,
@@ -492,10 +500,10 @@ fn gen_partial_eq(adt: &ast::Adt, trait_ref: Option<TraitRef<'_>>) -> Option<ast
let left = gen_record_pat(gen_variant_path(&variant)?, l_fields);
let right = gen_record_pat(gen_variant_path(&variant)?, r_fields);
- let tuple = make::tuple_pat(vec![left.into(), right.into()]);
+ let tuple = make.tuple_pat(vec![left.into(), right.into()]);
if let Some(expr) = expr {
- arms.push(make::match_arm(tuple.into(), None, expr));
+ arms.push(make.match_arm(tuple.into(), None, expr));
}
}
@@ -513,9 +521,9 @@ fn gen_partial_eq(adt: &ast::Adt, trait_ref: Option<TraitRef<'_>>) -> Option<ast
let r_name = format!("r{field_name}");
r_fields.push(gen_tuple_field(&r_name));
- let lhs = make::expr_path(make::ext::ident_path(&l_name));
- let rhs = make::expr_path(make::ext::ident_path(&r_name));
- let cmp = make::expr_bin_op(
+ let lhs = make.expr_path(make.ident_path(&l_name));
+ let rhs = make.expr_path(make.ident_path(&r_name));
+ let cmp = make.expr_bin_op(
lhs,
BinaryOp::CmpOp(CmpOp::Eq { negated: false }),
rhs,
@@ -523,12 +531,12 @@ fn gen_partial_eq(adt: &ast::Adt, trait_ref: Option<TraitRef<'_>>) -> Option<ast
expr = gen_eq_chain(expr, cmp);
}
- let left = make::tuple_struct_pat(gen_variant_path(&variant)?, l_fields);
- let right = make::tuple_struct_pat(gen_variant_path(&variant)?, r_fields);
- let tuple = make::tuple_pat(vec![left.into(), right.into()]);
+ let left = make.tuple_struct_pat(gen_variant_path(&variant)?, l_fields);
+ let right = make.tuple_struct_pat(gen_variant_path(&variant)?, r_fields);
+ let tuple = make.tuple_pat(vec![left.into(), right.into()]);
if let Some(expr) = expr {
- arms.push(make::match_arm(tuple.into(), None, expr));
+ arms.push(make.match_arm(tuple.into(), None, expr));
}
}
None => continue,
@@ -542,57 +550,57 @@ fn gen_partial_eq(adt: &ast::Adt, trait_ref: Option<TraitRef<'_>>) -> Option<ast
// The fallback arm will be `_ => false,` if we've already gone through every case where the variants of self and other match,
// and `_ => std::mem::discriminant(self) == std::mem::discriminant(other),` otherwise.
if n_cases > 1 {
- let lhs = make::wildcard_pat().into();
+ let lhs = make.wildcard_pat().into();
let rhs = if arms_len == n_cases {
- make::expr_literal("false").into()
+ make.expr_literal("false").into()
} else {
eq_check
};
- arms.push(make::match_arm(lhs, None, rhs));
+ arms.push(make.match_arm(lhs, None, rhs));
}
- let match_target = make::expr_tuple([lhs_name, rhs_name]).into();
- let list = make::match_arm_list(arms).indent(ast::edit::IndentLevel(1));
- make::expr_match(match_target, list).into()
+ let match_target = make.expr_tuple([lhs_name, rhs_name]).into();
+ let list = make.match_arm_list(arms).indent(ast::edit::IndentLevel(1));
+ make.expr_match(match_target, list).into()
}
};
- make::block_expr(None, Some(expr)).indent(ast::edit::IndentLevel(1))
+ make.block_expr(None::<ast::Stmt>, Some(expr)).indent(ast::edit::IndentLevel(1))
}
ast::Adt::Struct(strukt) => match strukt.field_list() {
Some(ast::FieldList::RecordFieldList(field_list)) => {
let mut expr = None;
for field in field_list.fields() {
- let lhs = make::expr_path(make::ext::ident_path("self"));
- let lhs = make::expr_field(lhs, &field.name()?.to_string());
- let rhs = make::expr_path(make::ext::ident_path("other"));
- let rhs = make::expr_field(rhs, &field.name()?.to_string());
+ let lhs = make.expr_path(make.ident_path("self"));
+ let lhs = make.expr_field(lhs, &field.name()?.to_string()).into();
+ let rhs = make.expr_path(make.ident_path("other"));
+ let rhs = make.expr_field(rhs, &field.name()?.to_string()).into();
let cmp =
- make::expr_bin_op(lhs, BinaryOp::CmpOp(CmpOp::Eq { negated: false }), rhs);
+ make.expr_bin_op(lhs, BinaryOp::CmpOp(CmpOp::Eq { negated: false }), rhs);
expr = gen_eq_chain(expr, cmp);
}
- make::block_expr(None, expr).indent(ast::edit::IndentLevel(1))
+ make.block_expr(None, expr).indent(ast::edit::IndentLevel(1))
}
Some(ast::FieldList::TupleFieldList(field_list)) => {
let mut expr = None;
for (i, _) in field_list.fields().enumerate() {
let idx = format!("{i}");
- let lhs = make::expr_path(make::ext::ident_path("self"));
- let lhs = make::expr_field(lhs, &idx);
- let rhs = make::expr_path(make::ext::ident_path("other"));
- let rhs = make::expr_field(rhs, &idx);
+ let lhs = make.expr_path(make.ident_path("self"));
+ let lhs = make.expr_field(lhs, &idx).into();
+ let rhs = make.expr_path(make.ident_path("other"));
+ let rhs = make.expr_field(rhs, &idx).into();
let cmp =
- make::expr_bin_op(lhs, BinaryOp::CmpOp(CmpOp::Eq { negated: false }), rhs);
+ make.expr_bin_op(lhs, BinaryOp::CmpOp(CmpOp::Eq { negated: false }), rhs);
expr = gen_eq_chain(expr, cmp);
}
- make::block_expr(None, expr).indent(ast::edit::IndentLevel(1))
+ make.block_expr(None::<ast::Stmt>, expr).indent(ast::edit::IndentLevel(1))
}
// No fields in the body means there's nothing to compare.
None => {
- let expr = make::expr_literal("true").into();
- make::block_expr(None, Some(expr)).indent(ast::edit::IndentLevel(1))
+ let expr = make.expr_literal("true").into();
+ make.block_expr(None, Some(expr)).indent(ast::edit::IndentLevel(1))
}
},
};
@@ -600,29 +608,33 @@ fn gen_partial_eq(adt: &ast::Adt, trait_ref: Option<TraitRef<'_>>) -> Option<ast
Some(body)
}
-fn gen_partial_ord(adt: &ast::Adt, trait_ref: Option<TraitRef<'_>>) -> Option<ast::BlockExpr> {
- fn gen_partial_eq_match(match_target: ast::Expr) -> Option<ast::Stmt> {
+fn gen_partial_ord(
+ make: &SyntaxFactory,
+ adt: &ast::Adt,
+ trait_ref: Option<TraitRef<'_>>,
+) -> Option<ast::BlockExpr> {
+ let gen_partial_eq_match = |match_target: ast::Expr| -> Option<ast::Stmt> {
let mut arms = vec![];
let variant_name =
- make::path_pat(make::ext::path_from_idents(["core", "cmp", "Ordering", "Equal"])?);
- let lhs = make::tuple_struct_pat(make::ext::path_from_idents(["Some"])?, [variant_name]);
- arms.push(make::match_arm(lhs.into(), None, make::expr_empty_block().into()));
+ make.path_pat(make.path_from_idents(["core", "cmp", "Ordering", "Equal"])?);
+ let lhs = make.tuple_struct_pat(make.path_from_idents(["Some"])?, [variant_name]);
+ arms.push(make.match_arm(lhs.into(), None, make.expr_empty_block().into()));
- arms.push(make::match_arm(
- make::ident_pat(false, false, make::name("ord")).into(),
+ arms.push(make.match_arm(
+ make.ident_pat(false, false, make.name("ord")).into(),
None,
- make::expr_return(Some(make::expr_path(make::ext::ident_path("ord")))),
+ make.expr_return(Some(make.expr_path(make.ident_path("ord")))).into(),
));
- let list = make::match_arm_list(arms).indent(ast::edit::IndentLevel(1));
- Some(make::expr_stmt(make::expr_match(match_target, list).into()).into())
- }
+ let list = make.match_arm_list(arms).indent(ast::edit::IndentLevel(1));
+ Some(make.expr_stmt(make.expr_match(match_target, list).into()).into())
+ };
- fn gen_partial_cmp_call(lhs: ast::Expr, rhs: ast::Expr) -> ast::Expr {
- let rhs = make::expr_ref(rhs, false);
- let method = make::name_ref("partial_cmp");
- make::expr_method_call(lhs, method, make::arg_list(Some(rhs))).into()
- }
+ let gen_partial_cmp_call = |lhs: ast::Expr, rhs: ast::Expr| -> ast::Expr {
+ let rhs = make.expr_ref(rhs, false);
+ let method = make.name_ref("partial_cmp");
+ make.expr_method_call(lhs, method, make.arg_list([rhs])).into()
+ };
// Check that self type and rhs type match. We don't know how to implement the method
// automatically otherwise.
@@ -643,10 +655,10 @@ fn gen_partial_ord(adt: &ast::Adt, trait_ref: Option<TraitRef<'_>>) -> Option<as
Some(ast::FieldList::RecordFieldList(field_list)) => {
let mut exprs = vec![];
for field in field_list.fields() {
- let lhs = make::expr_path(make::ext::ident_path("self"));
- let lhs = make::expr_field(lhs, &field.name()?.to_string());
- let rhs = make::expr_path(make::ext::ident_path("other"));
- let rhs = make::expr_field(rhs, &field.name()?.to_string());
+ let lhs = make.expr_path(make.ident_path("self"));
+ let lhs = make.expr_field(lhs, &field.name()?.to_string()).into();
+ let rhs = make.expr_path(make.ident_path("other"));
+ let rhs = make.expr_field(rhs, &field.name()?.to_string()).into();
let ord = gen_partial_cmp_call(lhs, rhs);
exprs.push(ord);
}
@@ -656,17 +668,17 @@ fn gen_partial_ord(adt: &ast::Adt, trait_ref: Option<TraitRef<'_>>) -> Option<as
.into_iter()
.map(gen_partial_eq_match)
.collect::<Option<Vec<ast::Stmt>>>()?;
- make::block_expr(stmts, tail).indent(ast::edit::IndentLevel(1))
+ make.block_expr(stmts, tail).indent(ast::edit::IndentLevel(1))
}
Some(ast::FieldList::TupleFieldList(field_list)) => {
let mut exprs = vec![];
for (i, _) in field_list.fields().enumerate() {
let idx = format!("{i}");
- let lhs = make::expr_path(make::ext::ident_path("self"));
- let lhs = make::expr_field(lhs, &idx);
- let rhs = make::expr_path(make::ext::ident_path("other"));
- let rhs = make::expr_field(rhs, &idx);
+ let lhs = make.expr_path(make.ident_path("self"));
+ let lhs = make.expr_field(lhs, &idx).into();
+ let rhs = make.expr_path(make.ident_path("other"));
+ let rhs = make.expr_field(rhs, &idx).into();
let ord = gen_partial_cmp_call(lhs, rhs);
exprs.push(ord);
}
@@ -675,13 +687,13 @@ fn gen_partial_ord(adt: &ast::Adt, trait_ref: Option<TraitRef<'_>>) -> Option<as
.into_iter()
.map(gen_partial_eq_match)
.collect::<Option<Vec<ast::Stmt>>>()?;
- make::block_expr(stmts, tail).indent(ast::edit::IndentLevel(1))
+ make.block_expr(stmts, tail).indent(ast::edit::IndentLevel(1))
}
// No fields in the body means there's nothing to compare.
None => {
- let expr = make::expr_literal("true").into();
- make::block_expr(None, Some(expr)).indent(ast::edit::IndentLevel(1))
+ let expr = make.expr_literal("true").into();
+ make.block_expr(None, Some(expr)).indent(ast::edit::IndentLevel(1))
}
},
};
@@ -689,6 +701,6 @@ fn gen_partial_ord(adt: &ast::Adt, trait_ref: Option<TraitRef<'_>>) -> Option<as
Some(body)
}
-fn make_discriminant() -> Option<ast::Expr> {
- Some(make::expr_path(make::ext::path_from_idents(["core", "mem", "discriminant"])?))
+fn make_discriminant(make: &SyntaxFactory) -> Option<ast::Expr> {
+ Some(make.expr_path(make.path_from_idents(["core", "mem", "discriminant"])?))
}
diff --git a/crates/ide-completion/src/completions.rs b/crates/ide-completion/src/completions.rs
index 1fb1fd4e57..4a94383ff4 100644
--- a/crates/ide-completion/src/completions.rs
+++ b/crates/ide-completion/src/completions.rs
@@ -34,7 +34,7 @@ use crate::{
CompletionContext, CompletionItem, CompletionItemKind,
context::{
DotAccess, ItemListKind, NameContext, NameKind, NameRefContext, NameRefKind,
- PathCompletionCtx, PathKind, PatternContext, TypeLocation, Visible,
+ PathCompletionCtx, PathKind, PatternContext, TypeAscriptionTarget, TypeLocation, Visible,
},
item::Builder,
render::{
@@ -45,7 +45,7 @@ use crate::{
macro_::render_macro,
pattern::{render_struct_pat, render_variant_pat},
render_expr, render_field, render_path_resolution, render_pattern_resolution,
- render_tuple_field,
+ render_tuple_field, render_type_keyword_snippet,
type_alias::{render_type_alias, render_type_alias_with_eq},
union_literal::render_union_literal,
},
@@ -104,6 +104,21 @@ impl Completions {
}
}
+ pub(crate) fn add_nameref_keywords_with_type_like(
+ &mut self,
+ ctx: &CompletionContext<'_>,
+ path_ctx: &PathCompletionCtx<'_>,
+ ) {
+ let mut add_keyword = |kw| {
+ render_type_keyword_snippet(ctx, path_ctx, kw, kw).add_to(self, ctx.db);
+ };
+ ["self::", "crate::"].into_iter().for_each(&mut add_keyword);
+
+ if ctx.depth_from_crate_root > 0 {
+ add_keyword("super::");
+ }
+ }
+
pub(crate) fn add_nameref_keywords(&mut self, ctx: &CompletionContext<'_>) {
["self", "crate"].into_iter().for_each(|kw| self.add_keyword(ctx, kw));
@@ -112,11 +127,19 @@ impl Completions {
}
}
- pub(crate) fn add_type_keywords(&mut self, ctx: &CompletionContext<'_>) {
- self.add_keyword_snippet(ctx, "fn", "fn($1)");
- self.add_keyword_snippet(ctx, "dyn", "dyn $0");
- self.add_keyword_snippet(ctx, "impl", "impl $0");
- self.add_keyword_snippet(ctx, "for", "for<$1>");
+ pub(crate) fn add_type_keywords(
+ &mut self,
+ ctx: &CompletionContext<'_>,
+ path_ctx: &PathCompletionCtx<'_>,
+ ) {
+ let mut add_keyword = |kw, snippet| {
+ render_type_keyword_snippet(ctx, path_ctx, kw, snippet).add_to(self, ctx.db);
+ };
+
+ add_keyword("fn", "fn($1)");
+ add_keyword("dyn", "dyn $0");
+ add_keyword("impl", "impl $0");
+ add_keyword("for", "for<$1>");
}
pub(crate) fn add_super_keyword(
@@ -747,6 +770,12 @@ pub(super) fn complete_name_ref(
field::complete_field_list_tuple_variant(acc, ctx, path_ctx);
}
TypeLocation::TypeAscription(ascription) => {
+ if let TypeAscriptionTarget::RetType { item: Some(item), .. } =
+ ascription
+ && path_ctx.required_thin_arrow().is_some()
+ {
+ keyword::complete_for_and_where(acc, ctx, &item.clone().into());
+ }
r#type::complete_ascribed_type(acc, ctx, path_ctx, ascription);
}
TypeLocation::GenericArg { .. }
diff --git a/crates/ide-completion/src/completions/env_vars.rs b/crates/ide-completion/src/completions/env_vars.rs
index 92cbf411c1..885d1a3075 100644
--- a/crates/ide-completion/src/completions/env_vars.rs
+++ b/crates/ide-completion/src/completions/env_vars.rs
@@ -51,11 +51,10 @@ pub(crate) fn complete_cargo_env_vars(
original: &ast::String,
expanded: &ast::String,
) -> Option<()> {
- let is_in_env_expansion = ctx
- .sema
- .hir_file_for(&expanded.syntax().parent()?)
- .macro_file()
- .is_some_and(|it| it.is_env_or_option_env(ctx.sema.db));
+ let descends = ctx.sema.descend_into_macros_exact_with_file(original.syntax().clone());
+ let macro_file = descends.first()?.file_id.macro_file();
+
+ let is_in_env_expansion = macro_file.is_some_and(|it| it.is_env_or_option_env(ctx.sema.db));
if !is_in_env_expansion {
let call = macro_call_for_string_token(expanded)?;
let makro = ctx.sema.resolve_macro_call(&call)?;
@@ -117,6 +116,47 @@ fn main() {
}
#[test]
+ fn complete_in_expanded_env_macro() {
+ check_edit(
+ "CARGO_BIN_NAME",
+ r#"
+//- minicore: env
+macro_rules! bar {
+ ($($arg:tt)*) => { $($arg)* }
+}
+
+fn main() {
+ let foo = bar!(env!("CA$0"));
+}
+ "#,
+ r#"
+macro_rules! bar {
+ ($($arg:tt)*) => { $($arg)* }
+}
+
+fn main() {
+ let foo = bar!(env!("CARGO_BIN_NAME"));
+}
+ "#,
+ );
+
+ check_edit(
+ "CARGO_BIN_NAME",
+ r#"
+//- minicore: env, fmt
+fn main() {
+ let foo = format_args!("{}", env!("CA$0"));
+}
+ "#,
+ r#"
+fn main() {
+ let foo = format_args!("{}", env!("CARGO_BIN_NAME"));
+}
+ "#,
+ );
+ }
+
+ #[test]
fn doesnt_complete_in_random_strings() {
let fixture = r#"
fn main() {
diff --git a/crates/ide-completion/src/completions/flyimport.rs b/crates/ide-completion/src/completions/flyimport.rs
index 20d01485a4..413830904a 100644
--- a/crates/ide-completion/src/completions/flyimport.rs
+++ b/crates/ide-completion/src/completions/flyimport.rs
@@ -135,7 +135,12 @@ pub(crate) fn import_on_the_fly_path(
Qualified::With { path, .. } => Some(path.clone()),
_ => None,
};
- let import_assets = import_assets_for_path(ctx, &potential_import_name, qualifier.clone())?;
+ let import_assets = import_assets_for_path(
+ ctx,
+ Some(&path_ctx.path),
+ &potential_import_name,
+ qualifier.clone(),
+ )?;
import_on_the_fly(
acc,
@@ -160,7 +165,7 @@ pub(crate) fn import_on_the_fly_pat(
}
let potential_import_name = import_name(ctx);
- let import_assets = import_assets_for_path(ctx, &potential_import_name, None)?;
+ let import_assets = import_assets_for_path(ctx, None, &potential_import_name, None)?;
import_on_the_fly_pat_(
acc,
@@ -402,6 +407,7 @@ fn import_name(ctx: &CompletionContext<'_>) -> String {
fn import_assets_for_path<'db>(
ctx: &CompletionContext<'db>,
+ path: Option<&ast::Path>,
potential_import_name: &str,
qualifier: Option<ast::Path>,
) -> Option<ImportAssets<'db>> {
@@ -411,6 +417,7 @@ fn import_assets_for_path<'db>(
let fuzzy_name_length = potential_import_name.len();
let mut assets_for_path = ImportAssets::for_fuzzy_path(
ctx.module,
+ path,
qualifier,
potential_import_name.to_owned(),
&ctx.sema,
diff --git a/crates/ide-completion/src/completions/postfix.rs b/crates/ide-completion/src/completions/postfix.rs
index 5b91e7c456..82baf885dd 100644
--- a/crates/ide-completion/src/completions/postfix.rs
+++ b/crates/ide-completion/src/completions/postfix.rs
@@ -310,7 +310,7 @@ pub(crate) fn complete_postfix(
if let ast::Expr::Literal(literal) = dot_receiver.clone()
&& let Some(literal_text) = ast::String::cast(literal.token())
{
- add_format_like_completions(acc, ctx, &dot_receiver_including_refs, cap, &literal_text);
+ add_format_like_completions(acc, ctx, dot_receiver, cap, &literal_text, semi);
}
postfix_snippet("return", "return expr", &format!("return {receiver_text}{semi}"))
@@ -402,7 +402,7 @@ fn receiver_accessor(receiver: &ast::Expr) -> ast::Expr {
.unwrap_or_else(|| receiver.clone())
}
-/// Given an `initial_element`, tries to expand it to include deref(s), and then references.
+/// Given an `initial_element`, tries to expand it to include deref(s), not(s), and then references.
/// Returns the expanded expressions, and the added prefix as a string
///
/// For example, if called with the `42` in `&&mut *42`, would return `(&&mut *42, "&&mut *")`.
@@ -410,22 +410,23 @@ fn include_references(initial_element: &ast::Expr) -> (ast::Expr, String) {
let mut resulting_element = initial_element.clone();
let mut prefix = String::new();
- let mut found_ref_or_deref = false;
-
- while let Some(parent_deref_element) =
- resulting_element.syntax().parent().and_then(ast::PrefixExpr::cast)
- && parent_deref_element.op_kind() == Some(ast::UnaryOp::Deref)
+ while let Some(parent) = resulting_element.syntax().parent().and_then(ast::PrefixExpr::cast)
+ && parent.op_kind() == Some(ast::UnaryOp::Deref)
{
- found_ref_or_deref = true;
- resulting_element = ast::Expr::from(parent_deref_element);
-
+ resulting_element = ast::Expr::from(parent);
prefix.insert(0, '*');
}
+ while let Some(parent) = resulting_element.syntax().parent().and_then(ast::PrefixExpr::cast)
+ && parent.op_kind() == Some(ast::UnaryOp::Not)
+ {
+ resulting_element = ast::Expr::from(parent);
+ prefix.insert(0, '!');
+ }
+
while let Some(parent_ref_element) =
resulting_element.syntax().parent().and_then(ast::RefExpr::cast)
{
- found_ref_or_deref = true;
let last_child_or_token = parent_ref_element.syntax().last_child_or_token();
prefix.insert_str(
0,
@@ -440,13 +441,6 @@ fn include_references(initial_element: &ast::Expr) -> (ast::Expr, String) {
resulting_element = ast::Expr::from(parent_ref_element);
}
- if !found_ref_or_deref {
- // If we do not find any ref/deref expressions, restore
- // all the progress of tree climbing
- prefix.clear();
- resulting_element = initial_element.clone();
- }
-
(resulting_element, prefix)
}
@@ -1133,6 +1127,27 @@ fn main() {
}
#[test]
+ fn postfix_completion_for_nots() {
+ check_edit(
+ "if",
+ r#"
+fn main() {
+ let is_foo = true;
+ !is_foo.$0
+}
+"#,
+ r#"
+fn main() {
+ let is_foo = true;
+ if !is_foo {
+ $0
+}
+}
+"#,
+ )
+ }
+
+ #[test]
fn postfix_completion_for_unsafe() {
postfix_completion_for_block("unsafe");
}
@@ -1287,34 +1302,42 @@ fn main() {
check_edit(
"panic",
r#"fn main() { "Panic with {a}".$0 }"#,
- r#"fn main() { panic!("Panic with {a}") }"#,
+ r#"fn main() { panic!("Panic with {a}"); }"#,
);
check_edit(
"println",
r#"fn main() { "{ 2+2 } { SomeStruct { val: 1, other: 32 } :?}".$0 }"#,
- r#"fn main() { println!("{} {:?}", 2+2, SomeStruct { val: 1, other: 32 }) }"#,
+ r#"fn main() { println!("{} {:?}", 2+2, SomeStruct { val: 1, other: 32 }); }"#,
);
check_edit(
"loge",
r#"fn main() { "{2+2}".$0 }"#,
- r#"fn main() { log::error!("{}", 2+2) }"#,
+ r#"fn main() { log::error!("{}", 2+2); }"#,
);
check_edit(
"logt",
r#"fn main() { "{2+2}".$0 }"#,
- r#"fn main() { log::trace!("{}", 2+2) }"#,
+ r#"fn main() { log::trace!("{}", 2+2); }"#,
);
check_edit(
"logd",
r#"fn main() { "{2+2}".$0 }"#,
- r#"fn main() { log::debug!("{}", 2+2) }"#,
+ r#"fn main() { log::debug!("{}", 2+2); }"#,
+ );
+ check_edit(
+ "logi",
+ r#"fn main() { "{2+2}".$0 }"#,
+ r#"fn main() { log::info!("{}", 2+2); }"#,
+ );
+ check_edit(
+ "logw",
+ r#"fn main() { "{2+2}".$0 }"#,
+ r#"fn main() { log::warn!("{}", 2+2); }"#,
);
- check_edit("logi", r#"fn main() { "{2+2}".$0 }"#, r#"fn main() { log::info!("{}", 2+2) }"#);
- check_edit("logw", r#"fn main() { "{2+2}".$0 }"#, r#"fn main() { log::warn!("{}", 2+2) }"#);
check_edit(
"loge",
r#"fn main() { "{2+2}".$0 }"#,
- r#"fn main() { log::error!("{}", 2+2) }"#,
+ r#"fn main() { log::error!("{}", 2+2); }"#,
);
}
diff --git a/crates/ide-completion/src/completions/postfix/format_like.rs b/crates/ide-completion/src/completions/postfix/format_like.rs
index 7faa113959..85a8899fd1 100644
--- a/crates/ide-completion/src/completions/postfix/format_like.rs
+++ b/crates/ide-completion/src/completions/postfix/format_like.rs
@@ -40,6 +40,7 @@ static KINDS: &[(&str, &str)] = &[
("logw", "log::warn!"),
("loge", "log::error!"),
];
+static SNIPPET_RETURNS_NON_UNIT: &[&str] = &["format"];
pub(crate) fn add_format_like_completions(
acc: &mut Completions,
@@ -47,6 +48,7 @@ pub(crate) fn add_format_like_completions(
dot_receiver: &ast::Expr,
cap: SnippetCap,
receiver_text: &ast::String,
+ semi: &str,
) {
let postfix_snippet = match build_postfix_snippet_builder(ctx, cap, dot_receiver) {
Some(it) => it,
@@ -64,10 +66,11 @@ pub(crate) fn add_format_like_completions(
let exprs = with_placeholders(exprs);
for (label, macro_name) in KINDS {
+ let semi = if SNIPPET_RETURNS_NON_UNIT.contains(label) { "" } else { semi };
let snippet = if exprs.is_empty() {
- format!(r#"{macro_name}({out})"#)
+ format!(r#"{macro_name}({out}){semi}"#)
} else {
- format!(r#"{}({}, {})"#, macro_name, out, exprs.join(", "))
+ format!(r#"{}({}, {}){semi}"#, macro_name, out, exprs.join(", "))
};
postfix_snippet(label, macro_name, &snippet).add_to(acc, ctx.db);
diff --git a/crates/ide-completion/src/completions/ra_fixture.rs b/crates/ide-completion/src/completions/ra_fixture.rs
index b44c90757f..5a8881edc7 100644
--- a/crates/ide-completion/src/completions/ra_fixture.rs
+++ b/crates/ide-completion/src/completions/ra_fixture.rs
@@ -22,7 +22,7 @@ pub(crate) fn complete_ra_fixture(
&ctx.sema,
original.clone(),
expanded,
- ctx.config.minicore,
+ &ctx.config.ra_fixture,
&mut |_| {},
)?;
let (virtual_file_id, virtual_offset) = analysis.map_offset_down(ctx.position.offset)?;
diff --git a/crates/ide-completion/src/completions/type.rs b/crates/ide-completion/src/completions/type.rs
index 8ff9c3258e..e2125a9678 100644
--- a/crates/ide-completion/src/completions/type.rs
+++ b/crates/ide-completion/src/completions/type.rs
@@ -206,8 +206,8 @@ pub(crate) fn complete_type_path(
_ => {}
};
- acc.add_nameref_keywords_with_colon(ctx);
- acc.add_type_keywords(ctx);
+ acc.add_nameref_keywords_with_type_like(ctx, path_ctx);
+ acc.add_type_keywords(ctx, path_ctx);
ctx.process_all_names(&mut |name, def, doc_aliases| {
if scope_def_applicable(def) {
acc.add_path_resolution(ctx, path_ctx, name, def, doc_aliases);
@@ -230,14 +230,14 @@ pub(crate) fn complete_ascribed_type(
TypeAscriptionTarget::Let(pat) | TypeAscriptionTarget::FnParam(pat) => {
ctx.sema.type_of_pat(pat.as_ref()?)
}
- TypeAscriptionTarget::Const(exp) | TypeAscriptionTarget::RetType(exp) => {
+ TypeAscriptionTarget::Const(exp) | TypeAscriptionTarget::RetType { body: exp, .. } => {
ctx.sema.type_of_expr(exp.as_ref()?)
}
}?
.adjusted();
if !ty.is_unknown() {
let ty_string = ty.display_source_code(ctx.db, ctx.module.into(), true).ok()?;
- acc.add(render_type_inference(ty_string, ctx));
+ acc.add(render_type_inference(ty_string, ctx, path_ctx));
}
None
}
diff --git a/crates/ide-completion/src/config.rs b/crates/ide-completion/src/config.rs
index 5623257a27..80c1572972 100644
--- a/crates/ide-completion/src/config.rs
+++ b/crates/ide-completion/src/config.rs
@@ -6,8 +6,9 @@
use hir::FindPathConfig;
use ide_db::{
- MiniCore, SnippetCap,
+ SnippetCap,
imports::{import_assets::ImportPathConfig, insert_use::InsertUseConfig},
+ ra_fixture::RaFixtureConfig,
};
use crate::{CompletionFieldsToResolve, snippet::Snippet};
@@ -35,7 +36,7 @@ pub struct CompletionConfig<'a> {
pub fields_to_resolve: CompletionFieldsToResolve,
pub exclude_flyimport: Vec<(String, AutoImportExclusionType)>,
pub exclude_traits: &'a [String],
- pub minicore: MiniCore<'a>,
+ pub ra_fixture: RaFixtureConfig<'a>,
}
#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash)]
diff --git a/crates/ide-completion/src/context.rs b/crates/ide-completion/src/context.rs
index 4fd0348156..ae3f717607 100644
--- a/crates/ide-completion/src/context.rs
+++ b/crates/ide-completion/src/context.rs
@@ -6,7 +6,7 @@ mod tests;
use std::iter;
-use base_db::RootQueryDb as _;
+use base_db::toolchain_channel;
use hir::{
DisplayTarget, HasAttrs, InFile, Local, ModuleDef, ModuleSource, Name, PathResolution,
ScopeDef, Semantics, SemanticsScope, Symbol, Type, TypeInfo,
@@ -102,6 +102,28 @@ impl PathCompletionCtx<'_> {
}
)
}
+
+ pub(crate) fn required_thin_arrow(&self) -> Option<(&'static str, TextSize)> {
+ let PathKind::Type {
+ location:
+ TypeLocation::TypeAscription(TypeAscriptionTarget::RetType {
+ item: Some(ref fn_item),
+ ..
+ }),
+ } = self.kind
+ else {
+ return None;
+ };
+ if fn_item.ret_type().is_some_and(|it| it.thin_arrow_token().is_some()) {
+ return None;
+ }
+ let ret_type = fn_item.ret_type().and_then(|it| it.ty());
+ match (ret_type, fn_item.param_list()) {
+ (Some(ty), _) => Some(("-> ", ty.syntax().text_range().start())),
+ (None, Some(param)) => Some((" ->", param.syntax().text_range().end())),
+ (None, None) => None,
+ }
+ }
}
/// The kind of path we are completing right now.
@@ -231,7 +253,7 @@ impl TypeLocation {
pub(crate) enum TypeAscriptionTarget {
Let(Option<ast::Pat>),
FnParam(Option<ast::Pat>),
- RetType(Option<ast::Expr>),
+ RetType { body: Option<ast::Expr>, item: Option<ast::Fn> },
Const(Option<ast::Expr>),
}
@@ -715,7 +737,7 @@ impl<'db> CompletionContext<'db> {
// actual completion.
let file_with_fake_ident = {
let (_, edition) = editioned_file_id.unpack(db);
- let parse = db.parse(editioned_file_id);
+ let parse = editioned_file_id.parse(db);
parse.reparse(TextRange::empty(offset), COMPLETION_MARKER, edition).tree()
};
@@ -768,7 +790,7 @@ impl<'db> CompletionContext<'db> {
let containing_function = scope.containing_function();
let edition = krate.edition(db);
- let toolchain = db.toolchain_channel(krate.into());
+ let toolchain = toolchain_channel(db, krate.into());
// `toolchain == None` means we're in some detached files. Since we have no information on
// the toolchain being used, let's just allow unstable items to be listed.
let is_nightly = matches!(toolchain, Some(base_db::ReleaseChannel::Nightly) | None);
diff --git a/crates/ide-completion/src/context/analysis.rs b/crates/ide-completion/src/context/analysis.rs
index a3494b964f..d8f160c100 100644
--- a/crates/ide-completion/src/context/analysis.rs
+++ b/crates/ide-completion/src/context/analysis.rs
@@ -827,6 +827,19 @@ fn expected_type_and_name<'db>(
.map(|c| (Some(c.return_type()), None))
.unwrap_or((None, None))
},
+ ast::Variant(it) => {
+ let is_simple_field = |field: ast::TupleField| {
+ let Some(ty) = field.ty() else { return true };
+ matches!(ty, ast::Type::PathType(_)) && ty.generic_arg_list().is_none()
+ };
+ let is_simple_variant = matches!(
+ it.field_list(),
+ Some(ast::FieldList::TupleFieldList(list))
+ if list.syntax().children_with_tokens().all(|it| it.kind() != T![,])
+ && list.fields().next().is_none_or(is_simple_field)
+ );
+ (None, it.name().filter(|_| is_simple_variant).map(NameOrNameRef::Name))
+ },
ast::Stmt(_) => (None, None),
ast::Item(_) => (None, None),
_ => {
@@ -1265,15 +1278,14 @@ fn classify_name_ref<'db>(
let original = ast::Static::cast(name.syntax().parent()?)?;
TypeLocation::TypeAscription(TypeAscriptionTarget::Const(original.body()))
},
- ast::RetType(it) => {
- it.thin_arrow_token()?;
+ ast::RetType(_) => {
let parent = match ast::Fn::cast(parent.parent()?) {
Some(it) => it.param_list(),
None => ast::ClosureExpr::cast(parent.parent()?)?.param_list(),
};
let parent = find_opt_node_in_file(original_file, parent)?.syntax().parent()?;
- TypeLocation::TypeAscription(TypeAscriptionTarget::RetType(match_ast! {
+ let body = match_ast! {
match parent {
ast::ClosureExpr(it) => {
it.body()
@@ -1283,7 +1295,9 @@ fn classify_name_ref<'db>(
},
_ => return None,
}
- }))
+ };
+ let item = ast::Fn::cast(parent);
+ TypeLocation::TypeAscription(TypeAscriptionTarget::RetType { body, item })
},
ast::Param(it) => {
it.colon_token()?;
diff --git a/crates/ide-completion/src/item.rs b/crates/ide-completion/src/item.rs
index 1a9139d855..e6dd1d37d9 100644
--- a/crates/ide-completion/src/item.rs
+++ b/crates/ide-completion/src/item.rs
@@ -450,6 +450,7 @@ impl CompletionItem {
ref_match: None,
imports_to_add: Default::default(),
doc_aliases: vec![],
+ adds_text: None,
edition,
}
}
@@ -480,12 +481,13 @@ impl CompletionItem {
/// A helper to make `CompletionItem`s.
#[must_use]
-#[derive(Clone)]
+#[derive(Debug, Clone)]
pub(crate) struct Builder {
source_range: TextRange,
imports_to_add: SmallVec<[LocatedImport; 1]>,
trait_name: Option<SmolStr>,
doc_aliases: Vec<SmolStr>,
+ adds_text: Option<SmolStr>,
label: SmolStr,
insert_text: Option<String>,
is_snippet: bool,
@@ -526,9 +528,16 @@ impl Builder {
let insert_text = self.insert_text.unwrap_or_else(|| label.to_string());
let mut detail_left = None;
+ let mut to_detail_left = |args: fmt::Arguments<'_>| {
+ let detail_left = detail_left.get_or_insert_with(String::new);
+ if !detail_left.is_empty() {
+ detail_left.push(' ');
+ }
+ format_to!(detail_left, "{args}")
+ };
if !self.doc_aliases.is_empty() {
let doc_aliases = self.doc_aliases.iter().join(", ");
- detail_left = Some(format!("(alias {doc_aliases})"));
+ to_detail_left(format_args!("(alias {doc_aliases})"));
let lookup_doc_aliases = self
.doc_aliases
.iter()
@@ -548,22 +557,17 @@ impl Builder {
lookup = format_smolstr!("{lookup}{lookup_doc_aliases}");
}
}
+ if let Some(adds_text) = self.adds_text {
+ to_detail_left(format_args!("(adds {})", adds_text.trim()));
+ }
if let [import_edit] = &*self.imports_to_add {
// snippets can have multiple imports, but normal completions only have up to one
- let detail_left = detail_left.get_or_insert_with(String::new);
- format_to!(
- detail_left,
- "{}(use {})",
- if detail_left.is_empty() { "" } else { " " },
+ to_detail_left(format_args!(
+ "(use {})",
import_edit.import_path.display(db, self.edition)
- );
+ ));
} else if let Some(trait_name) = self.trait_name {
- let detail_left = detail_left.get_or_insert_with(String::new);
- format_to!(
- detail_left,
- "{}(as {trait_name})",
- if detail_left.is_empty() { "" } else { " " },
- );
+ to_detail_left(format_args!("(as {trait_name})"));
}
let text_edit = match self.text_edit {
@@ -613,6 +617,10 @@ impl Builder {
self.doc_aliases = doc_aliases;
self
}
+ pub(crate) fn adds_text(&mut self, adds_text: SmolStr) -> &mut Builder {
+ self.adds_text = Some(adds_text);
+ self
+ }
pub(crate) fn insert_text(&mut self, insert_text: impl Into<String>) -> &mut Builder {
self.insert_text = Some(insert_text.into());
self
diff --git a/crates/ide-completion/src/render.rs b/crates/ide-completion/src/render.rs
index d77e793295..b946441991 100644
--- a/crates/ide-completion/src/render.rs
+++ b/crates/ide-completion/src/render.rs
@@ -220,13 +220,15 @@ pub(crate) fn render_tuple_field(
pub(crate) fn render_type_inference(
ty_string: String,
ctx: &CompletionContext<'_>,
+ path_ctx: &PathCompletionCtx<'_>,
) -> CompletionItem {
let mut builder = CompletionItem::new(
CompletionItemKind::InferredType,
ctx.source_range(),
- ty_string,
+ &ty_string,
ctx.edition,
);
+ adds_ret_type_arrow(ctx, path_ctx, &mut builder, ty_string);
builder.set_relevance(CompletionRelevance {
type_match: Some(CompletionRelevanceTypeMatch::Exact),
exact_name_match: true,
@@ -425,11 +427,10 @@ fn render_resolution_path(
let config = completion.config;
let requires_import = import_to_add.is_some();
- let name = local_name.display_no_db(ctx.completion.edition).to_smolstr();
+ let name = local_name.display(db, completion.edition).to_smolstr();
let mut item = render_resolution_simple_(ctx, &local_name, import_to_add, resolution);
- if local_name.needs_escape(completion.edition) {
- item.insert_text(local_name.display_no_db(completion.edition).to_smolstr());
- }
+ let mut insert_text = name.clone();
+
// Add `<>` for generic types
let type_path_no_ty_args = matches!(
path_ctx,
@@ -446,12 +447,14 @@ fn render_resolution_path(
if has_non_default_type_params {
cov_mark::hit!(inserts_angle_brackets_for_generics);
+ insert_text = format_smolstr!("{insert_text}<$0>");
item.lookup_by(name.clone())
.label(SmolStr::from_iter([&name, "<…>"]))
.trigger_call_info()
- .insert_snippet(cap, format!("{}<$0>", local_name.display(db, completion.edition)));
+ .insert_snippet(cap, ""); // set is snippet
}
}
+ adds_ret_type_arrow(completion, path_ctx, &mut item, insert_text.into());
let mut set_item_relevance = |ty: Type<'_>| {
if !ty.is_unknown() {
@@ -577,6 +580,48 @@ fn scope_def_is_deprecated(ctx: &RenderContext<'_>, resolution: ScopeDef) -> boo
}
}
+pub(crate) fn render_type_keyword_snippet(
+ ctx: &CompletionContext<'_>,
+ path_ctx: &PathCompletionCtx<'_>,
+ label: &str,
+ snippet: &str,
+) -> Builder {
+ let source_range = ctx.source_range();
+ let mut item =
+ CompletionItem::new(CompletionItemKind::Keyword, source_range, label, ctx.edition);
+
+ let insert_text = if !snippet.contains('$') {
+ item.insert_text(snippet);
+ snippet
+ } else if let Some(cap) = ctx.config.snippet_cap {
+ item.insert_snippet(cap, snippet);
+ snippet
+ } else {
+ label
+ };
+
+ adds_ret_type_arrow(ctx, path_ctx, &mut item, insert_text.to_owned());
+ item
+}
+
+fn adds_ret_type_arrow(
+ ctx: &CompletionContext<'_>,
+ path_ctx: &PathCompletionCtx<'_>,
+ item: &mut Builder,
+ insert_text: String,
+) {
+ if let Some((arrow, at)) = path_ctx.required_thin_arrow() {
+ let mut edit = TextEdit::builder();
+
+ edit.insert(at, arrow.to_owned());
+ edit.replace(ctx.source_range(), insert_text);
+
+ item.text_edit(edit.finish()).adds_text(SmolStr::new_static(arrow));
+ } else {
+ item.insert_text(insert_text);
+ }
+}
+
// FIXME: This checks types without possible coercions which some completions might want to do
fn match_types(
ctx: &CompletionContext<'_>,
@@ -3045,6 +3090,57 @@ fn main() {
}
#[test]
+ fn enum_variant_name_exact_match_is_high_priority() {
+ check_relevance(
+ r#"
+struct Other;
+struct String;
+enum Foo {
+ String($0)
+}
+ "#,
+ expect![[r#"
+ st String String [name]
+ en Foo Foo []
+ st Other Other []
+ sp Self Foo []
+ "#]],
+ );
+
+ check_relevance(
+ r#"
+struct Other;
+struct String;
+enum Foo {
+ String(String, $0)
+}
+ "#,
+ expect![[r#"
+ en Foo Foo []
+ st Other Other []
+ sp Self Foo []
+ st String String []
+ "#]],
+ );
+
+ check_relevance(
+ r#"
+struct Other;
+struct Vec<T>(T);
+enum Foo {
+ Vec(Vec<$0>)
+}
+ "#,
+ expect![[r#"
+ en Foo Foo []
+ st Other Other []
+ sp Self Foo []
+ st Vec<…> Vec<{unknown}> []
+ "#]],
+ );
+ }
+
+ #[test]
fn postfix_inexact_match_is_low_priority() {
cov_mark::check!(postfix_inexact_match_is_low_priority);
check_relevance_for_kinds(
diff --git a/crates/ide-completion/src/tests.rs b/crates/ide-completion/src/tests.rs
index cb1adfcfb6..02e299b2a9 100644
--- a/crates/ide-completion/src/tests.rs
+++ b/crates/ide-completion/src/tests.rs
@@ -29,8 +29,9 @@ use expect_test::Expect;
use hir::db::HirDatabase;
use hir::{PrefixKind, setup_tracing};
use ide_db::{
- FilePosition, MiniCore, RootDatabase, SnippetCap,
+ FilePosition, RootDatabase, SnippetCap,
imports::insert_use::{ImportGranularity, InsertUseConfig},
+ ra_fixture::RaFixtureConfig,
};
use itertools::Itertools;
use stdx::{format_to, trim_indent};
@@ -90,7 +91,7 @@ pub(crate) const TEST_CONFIG: CompletionConfig<'_> = CompletionConfig {
exclude_traits: &[],
enable_auto_await: true,
enable_auto_iter: true,
- minicore: MiniCore::default(),
+ ra_fixture: RaFixtureConfig::default(),
};
pub(crate) fn completion_list(#[rust_analyzer::rust_fixture] ra_fixture: &str) -> String {
diff --git a/crates/ide-completion/src/tests/item.rs b/crates/ide-completion/src/tests/item.rs
index 61a9da8c27..2f032c3f4c 100644
--- a/crates/ide-completion/src/tests/item.rs
+++ b/crates/ide-completion/src/tests/item.rs
@@ -116,8 +116,23 @@ fn completes_where() {
check_with_base_items(
r"fn func() $0",
expect![[r#"
- kw where
- "#]],
+ en Enum (adds ->) Enum
+ ma makro!(…) macro_rules! makro
+ md module (adds ->)
+ st Record (adds ->) Record
+ st Tuple (adds ->) Tuple
+ st Unit (adds ->) Unit
+ tt Trait (adds ->)
+ un Union (adds ->) Union
+ bt u32 (adds ->) u32
+ kw crate:: (adds ->)
+ kw dyn (adds ->)
+ kw fn (adds ->)
+ kw for (adds ->)
+ kw impl (adds ->)
+ kw self:: (adds ->)
+ kw where
+ "#]],
);
check_with_base_items(
r"enum Enum $0",
@@ -244,6 +259,19 @@ impl Copy for S where $0
}
#[test]
+fn fn_item_where_kw() {
+ check_edit(
+ "where",
+ r#"
+fn foo() $0
+"#,
+ r#"
+fn foo() where $0
+"#,
+ );
+}
+
+#[test]
fn test_is_not_considered_macro() {
check_with_base_items(
r#"
diff --git a/crates/ide-completion/src/tests/type_pos.rs b/crates/ide-completion/src/tests/type_pos.rs
index 7c6b7370aa..7d4a7fe6b8 100644
--- a/crates/ide-completion/src/tests/type_pos.rs
+++ b/crates/ide-completion/src/tests/type_pos.rs
@@ -1,7 +1,7 @@
//! Completion tests for type position.
use expect_test::expect;
-use crate::tests::{check, check_with_base_items};
+use crate::tests::{check, check_edit, check_with_base_items};
#[test]
fn record_field_ty() {
@@ -94,6 +94,230 @@ fn x<'lt, T, const C: usize>() -> $0
}
#[test]
+fn fn_return_type_missing_thin_arrow() {
+ check_with_base_items(
+ r#"
+fn x() u$0
+"#,
+ expect![[r#"
+ en Enum (adds ->) Enum
+ ma makro!(…) macro_rules! makro
+ md module (adds ->)
+ st Record (adds ->) Record
+ st Tuple (adds ->) Tuple
+ st Unit (adds ->) Unit
+ tt Trait (adds ->)
+ un Union (adds ->) Union
+ bt u32 (adds ->) u32
+ kw crate:: (adds ->)
+ kw dyn (adds ->)
+ kw fn (adds ->)
+ kw for (adds ->)
+ kw impl (adds ->)
+ kw self:: (adds ->)
+ kw where
+ "#]],
+ );
+
+ check_with_base_items(
+ r#"
+fn x() $0
+"#,
+ expect![[r#"
+ en Enum (adds ->) Enum
+ ma makro!(…) macro_rules! makro
+ md module (adds ->)
+ st Record (adds ->) Record
+ st Tuple (adds ->) Tuple
+ st Unit (adds ->) Unit
+ tt Trait (adds ->)
+ un Union (adds ->) Union
+ bt u32 (adds ->) u32
+ kw crate:: (adds ->)
+ kw dyn (adds ->)
+ kw fn (adds ->)
+ kw for (adds ->)
+ kw impl (adds ->)
+ kw self:: (adds ->)
+ kw where
+ "#]],
+ );
+}
+
+#[test]
+fn fn_return_type_missing_thin_arrow_path_completion() {
+ check_edit(
+ "u32",
+ r#"
+fn foo() u$0
+"#,
+ r#"
+fn foo() -> u32
+"#,
+ );
+
+ check_edit(
+ "u32",
+ r#"
+fn foo() $0
+"#,
+ r#"
+fn foo() -> u32
+"#,
+ );
+
+ check_edit(
+ "Num",
+ r#"
+type Num = u32;
+fn foo() $0
+"#,
+ r#"
+type Num = u32;
+fn foo() -> Num
+"#,
+ );
+
+ check_edit(
+ "impl",
+ r#"
+fn foo() $0
+"#,
+ r#"
+fn foo() -> impl $0
+"#,
+ );
+
+ check_edit(
+ "foo",
+ r#"
+mod foo { pub type Num = u32; }
+fn foo() $0
+"#,
+ r#"
+mod foo { pub type Num = u32; }
+fn foo() -> foo
+"#,
+ );
+
+ check_edit(
+ "crate::",
+ r#"
+mod foo { pub type Num = u32; }
+fn foo() $0
+"#,
+ r#"
+mod foo { pub type Num = u32; }
+fn foo() -> crate::
+"#,
+ );
+
+ check_edit(
+ "Num",
+ r#"
+mod foo { pub type Num = u32; }
+fn foo() foo::$0
+"#,
+ r#"
+mod foo { pub type Num = u32; }
+fn foo() -> foo::Num
+"#,
+ );
+
+ // no spaces, test edit order
+ check_edit(
+ "foo",
+ r#"
+mod foo { pub type Num = u32; }
+fn foo()$0
+"#,
+ r#"
+mod foo { pub type Num = u32; }
+fn foo() ->foo
+"#,
+ );
+}
+
+#[test]
+fn fn_return_type_missing_thin_arrow_path_completion_with_generic_args() {
+ check_edit(
+ "Foo",
+ r#"
+struct Foo<T>(T);
+fn foo() F$0
+"#,
+ r#"
+struct Foo<T>(T);
+fn foo() -> Foo<$0>
+"#,
+ );
+
+ check_edit(
+ "Foo",
+ r#"
+struct Foo<T>(T);
+fn foo() $0
+"#,
+ r#"
+struct Foo<T>(T);
+fn foo() -> Foo<$0>
+"#,
+ );
+
+ check_edit(
+ "Foo",
+ r#"
+type Foo<T> = T;
+fn foo() $0
+"#,
+ r#"
+type Foo<T> = T;
+fn foo() -> Foo<$0>
+"#,
+ );
+}
+
+#[test]
+fn fn_return_type_missing_thin_arrow_infer_ref_type() {
+ check_with_base_items(
+ r#"
+fn x() u$0 {&2u32}
+"#,
+ expect![[r#"
+ en Enum (adds ->) Enum
+ ma makro!(…) macro_rules! makro
+ md module (adds ->)
+ st Record (adds ->) Record
+ st Tuple (adds ->) Tuple
+ st Unit (adds ->) Unit
+ tt Trait (adds ->)
+ un Union (adds ->) Union
+ bt u32 (adds ->) u32
+ it &u32 (adds ->)
+ kw crate:: (adds ->)
+ kw dyn (adds ->)
+ kw fn (adds ->)
+ kw for (adds ->)
+ kw impl (adds ->)
+ kw self:: (adds ->)
+ kw where
+ "#]],
+ );
+
+ check_edit(
+ "&u32",
+ r#"
+struct Foo<T>(T);
+fn x() u$0 {&2u32}
+"#,
+ r#"
+struct Foo<T>(T);
+fn x() -> &u32 {&2u32}
+"#,
+ );
+}
+
+#[test]
fn fn_return_type_after_reference() {
check_with_base_items(
r#"
diff --git a/crates/ide-db/src/imports/import_assets.rs b/crates/ide-db/src/imports/import_assets.rs
index 1c48527027..2f696d07e2 100644
--- a/crates/ide-db/src/imports/import_assets.rs
+++ b/crates/ide-db/src/imports/import_assets.rs
@@ -8,8 +8,10 @@ use hir::{
SemanticsScope, Trait, Type,
};
use itertools::Itertools;
+use parser::SyntaxKind;
use rustc_hash::{FxHashMap, FxHashSet};
use smallvec::{SmallVec, smallvec};
+use stdx::never;
use syntax::{
AstNode, SyntaxNode,
ast::{self, HasName, make},
@@ -61,6 +63,103 @@ pub struct TraitImportCandidate<'db> {
pub assoc_item_name: NameToImport,
}
+#[derive(Debug)]
+struct PathDefinitionKinds {
+ modules: bool,
+ bang_macros: bool,
+ // FIXME: Distinguish between attr and derive macros.
+ attr_macros: bool,
+ value_namespace: bool,
+ type_namespace: bool,
+ /// Unions, record structs and record enum variants. Note that unions and structs
+ /// can also be enabled by `type_namespace` (either works).
+ records: bool,
+ /// Tuple structs and tuple enum variants. Both are also controlled by `value_namespace`
+ /// (either works). Structs are also covered by `type_namespace`.
+ tuple_structs: bool,
+ /// Structs, enum variants and consts.
+ structs_and_consts: bool,
+}
+
+impl PathDefinitionKinds {
+ const ALL_DISABLED: Self = Self {
+ modules: false,
+ bang_macros: false,
+ attr_macros: false,
+ value_namespace: false,
+ type_namespace: false,
+ records: false,
+ tuple_structs: false,
+ structs_and_consts: false,
+ };
+ const ALL_ENABLED: Self = Self {
+ modules: true,
+ bang_macros: true,
+ attr_macros: true,
+ value_namespace: true,
+ type_namespace: true,
+ records: true,
+ tuple_structs: true,
+ structs_and_consts: true,
+ };
+ // While a path pattern only allows unit structs/enum variants, parentheses/braces may be written later.
+ const PATH_PAT_KINDS: PathDefinitionKinds =
+ Self { structs_and_consts: true, bang_macros: true, ..Self::ALL_DISABLED };
+
+ fn deduce_from_path(path: &ast::Path, exact: bool) -> Self {
+ let Some(parent) = path.syntax().parent() else {
+ return Self::ALL_ENABLED;
+ };
+ let mut result = match parent.kind() {
+ // When there are following segments, it can be a type (with a method) or a module.
+ // Technically, a type can only have up to 2 segments following (an associated type
+ // then a method), but most paths are shorter than 3 segments anyway, and we'll also
+ // validate that the following segment resolve.
+ SyntaxKind::PATH => Self { modules: true, type_namespace: true, ..Self::ALL_DISABLED },
+ SyntaxKind::MACRO_CALL => Self { bang_macros: true, ..Self::ALL_DISABLED },
+ SyntaxKind::META => Self { attr_macros: true, ..Self::ALL_DISABLED },
+ SyntaxKind::USE_TREE => {
+ if ast::UseTree::cast(parent).unwrap().use_tree_list().is_some() {
+ Self { modules: true, ..Self::ALL_DISABLED }
+ } else {
+ Self::ALL_ENABLED
+ }
+ }
+ SyntaxKind::VISIBILITY => Self { modules: true, ..Self::ALL_DISABLED },
+ SyntaxKind::ASM_SYM => Self { value_namespace: true, ..Self::ALL_DISABLED },
+ // `bang_macros = true` because you can still type the `!`.
+ // `type_namespace = true` because you can type `::method()`.
+ SyntaxKind::PATH_EXPR => Self {
+ value_namespace: true,
+ bang_macros: true,
+ type_namespace: true,
+ ..Self::ALL_DISABLED
+ },
+ SyntaxKind::PATH_PAT => Self::PATH_PAT_KINDS,
+ SyntaxKind::TUPLE_STRUCT_PAT => {
+ Self { tuple_structs: true, bang_macros: true, ..Self::ALL_DISABLED }
+ }
+ SyntaxKind::RECORD_EXPR | SyntaxKind::RECORD_PAT => {
+ Self { records: true, bang_macros: true, ..Self::ALL_DISABLED }
+ }
+ SyntaxKind::PATH_TYPE => {
+ Self { type_namespace: true, bang_macros: true, ..Self::ALL_DISABLED }
+ }
+ SyntaxKind::ERROR => Self::ALL_ENABLED,
+ _ => {
+ never!("this match should cover all possible parents of paths\nparent={parent:#?}");
+ Self::ALL_ENABLED
+ }
+ };
+ if !exact {
+ // When the path is not required to be exact, there could be additional segments to be filled.
+ result.modules = true;
+ result.type_namespace = true;
+ }
+ result
+ }
+}
+
/// Path import for a given name, qualified or not.
#[derive(Debug)]
pub struct PathImportCandidate {
@@ -70,6 +169,8 @@ pub struct PathImportCandidate {
pub name: NameToImport,
/// Potentially more segments that should resolve in the candidate.
pub after: Vec<Name>,
+ /// The kind of definitions that we can include.
+ definition_kinds: PathDefinitionKinds,
}
/// A name that will be used during item lookups.
@@ -168,13 +269,14 @@ impl<'db> ImportAssets<'db> {
pub fn for_fuzzy_path(
module_with_candidate: Module,
+ path: Option<&ast::Path>,
qualifier: Option<ast::Path>,
fuzzy_name: String,
sema: &Semantics<'db, RootDatabase>,
candidate_node: SyntaxNode,
) -> Option<Self> {
Some(Self {
- import_candidate: ImportCandidate::for_fuzzy_path(qualifier, fuzzy_name, sema)?,
+ import_candidate: ImportCandidate::for_fuzzy_path(path, qualifier, fuzzy_name, sema)?,
module_with_candidate,
candidate_node,
})
@@ -394,6 +496,9 @@ fn path_applicable_imports(
// see also an ignored test under FIXME comment in the qualify_path.rs module
AssocSearchMode::Exclude,
)
+ .filter(|(item, _)| {
+ filter_by_definition_kind(db, *item, &path_candidate.definition_kinds)
+ })
.filter_map(|(item, do_not_complete)| {
if !scope_filter(item) {
return None;
@@ -442,6 +547,46 @@ fn path_applicable_imports(
result
}
+fn filter_by_definition_kind(
+ db: &RootDatabase,
+ item: ItemInNs,
+ allowed: &PathDefinitionKinds,
+) -> bool {
+ let item = item.into_module_def();
+ let struct_per_kind = |struct_kind| {
+ allowed.structs_and_consts
+ || match struct_kind {
+ hir::StructKind::Record => allowed.records,
+ hir::StructKind::Tuple => allowed.value_namespace || allowed.tuple_structs,
+ hir::StructKind::Unit => allowed.value_namespace,
+ }
+ };
+ match item {
+ ModuleDef::Module(_) => allowed.modules,
+ ModuleDef::Function(_) => allowed.value_namespace,
+ ModuleDef::Adt(hir::Adt::Struct(item)) => {
+ allowed.type_namespace || struct_per_kind(item.kind(db))
+ }
+ ModuleDef::Adt(hir::Adt::Enum(_)) => allowed.type_namespace,
+ ModuleDef::Adt(hir::Adt::Union(_)) => {
+ allowed.type_namespace || allowed.records || allowed.structs_and_consts
+ }
+ ModuleDef::EnumVariant(item) => struct_per_kind(item.kind(db)),
+ ModuleDef::Const(_) => allowed.value_namespace || allowed.structs_and_consts,
+ ModuleDef::Static(_) => allowed.value_namespace,
+ ModuleDef::Trait(_) => allowed.type_namespace,
+ ModuleDef::TypeAlias(_) => allowed.type_namespace,
+ ModuleDef::BuiltinType(_) => allowed.type_namespace,
+ ModuleDef::Macro(item) => {
+ if item.is_fn_like(db) {
+ allowed.bang_macros
+ } else {
+ allowed.attr_macros
+ }
+ }
+ }
+}
+
fn filter_candidates_by_after_path(
db: &RootDatabase,
scope: &SemanticsScope<'_>,
@@ -835,6 +980,7 @@ impl<'db> ImportCandidate<'db> {
.collect::<Option<_>>()?;
path_import_candidate(
sema,
+ Some(path),
path.qualifier(),
NameToImport::exact_case_sensitive(path.segment()?.name_ref()?.to_string()),
after,
@@ -853,25 +999,31 @@ impl<'db> ImportCandidate<'db> {
qualifier: vec![],
name: NameToImport::exact_case_sensitive(name.to_string()),
after: vec![],
+ definition_kinds: PathDefinitionKinds::PATH_PAT_KINDS,
}))
}
fn for_fuzzy_path(
+ path: Option<&ast::Path>,
qualifier: Option<ast::Path>,
fuzzy_name: String,
sema: &Semantics<'db, RootDatabase>,
) -> Option<Self> {
// Assume a fuzzy match does not want the segments after. Because... I guess why not?
- path_import_candidate(sema, qualifier, NameToImport::fuzzy(fuzzy_name), Vec::new())
+ path_import_candidate(sema, path, qualifier, NameToImport::fuzzy(fuzzy_name), Vec::new())
}
}
fn path_import_candidate<'db>(
sema: &Semantics<'db, RootDatabase>,
+ path: Option<&ast::Path>,
qualifier: Option<ast::Path>,
name: NameToImport,
after: Vec<Name>,
) -> Option<ImportCandidate<'db>> {
+ let definition_kinds = path.map_or(PathDefinitionKinds::ALL_ENABLED, |path| {
+ PathDefinitionKinds::deduce_from_path(path, matches!(name, NameToImport::Exact(..)))
+ });
Some(match qualifier {
Some(qualifier) => match sema.resolve_path(&qualifier) {
Some(PathResolution::Def(ModuleDef::BuiltinType(_))) | None => {
@@ -880,7 +1032,12 @@ fn path_import_candidate<'db>(
.segments()
.map(|seg| seg.name_ref().map(|name| Name::new_root(&name.text())))
.collect::<Option<Vec<_>>>()?;
- ImportCandidate::Path(PathImportCandidate { qualifier, name, after })
+ ImportCandidate::Path(PathImportCandidate {
+ qualifier,
+ name,
+ after,
+ definition_kinds,
+ })
} else {
return None;
}
@@ -904,7 +1061,12 @@ fn path_import_candidate<'db>(
}
Some(_) => return None,
},
- None => ImportCandidate::Path(PathImportCandidate { qualifier: vec![], name, after }),
+ None => ImportCandidate::Path(PathImportCandidate {
+ qualifier: vec![],
+ name,
+ after,
+ definition_kinds,
+ }),
})
}
diff --git a/crates/ide-db/src/imports/insert_use.rs b/crates/ide-db/src/imports/insert_use.rs
index da8525d1fb..41ce1e5960 100644
--- a/crates/ide-db/src/imports/insert_use.rs
+++ b/crates/ide-db/src/imports/insert_use.rs
@@ -305,10 +305,8 @@ fn insert_use_with_alias_option_with_editor(
if mb == Some(MergeBehavior::One) && use_tree.path().is_some() {
use_tree.wrap_in_tree_list();
}
- let use_item = make::use_(None, None, use_tree).clone_for_update();
- for attr in
- scope.required_cfgs.iter().map(|attr| attr.syntax().clone_subtree().clone_for_update())
- {
+ let use_item = make::use_(None, None, use_tree);
+ for attr in scope.required_cfgs.iter().map(|attr| attr.syntax().clone()) {
syntax_editor.insert(Position::first_child_of(use_item.syntax()), attr);
}
@@ -711,7 +709,11 @@ fn insert_use_with_editor_(
Some(b) => {
cov_mark::hit!(insert_empty_module);
syntax_editor.insert(Position::after(&b), syntax_factory.whitespace("\n"));
- syntax_editor.insert(Position::after(&b), use_item.syntax());
+ syntax_editor.insert_with_whitespace(
+ Position::after(&b),
+ use_item.syntax(),
+ syntax_factory,
+ );
}
None => {
cov_mark::hit!(insert_empty_file);
diff --git a/crates/ide-db/src/lib.rs b/crates/ide-db/src/lib.rs
index cde0705d8a..8d16826e19 100644
--- a/crates/ide-db/src/lib.rs
+++ b/crates/ide-db/src/lib.rs
@@ -60,8 +60,8 @@ use salsa::Durability;
use std::{fmt, mem::ManuallyDrop};
use base_db::{
- CrateGraphBuilder, CratesMap, FileSourceRootInput, FileText, Files, Nonce, RootQueryDb,
- SourceDatabase, SourceRoot, SourceRootId, SourceRootInput, query_group,
+ CrateGraphBuilder, CratesMap, FileSourceRootInput, FileText, Files, Nonce, SourceDatabase,
+ SourceRoot, SourceRootId, SourceRootInput, query_group, set_all_crates_with_durability,
};
use hir::{
FilePositionWrapper, FileRangeWrapper,
@@ -197,7 +197,7 @@ impl RootDatabase {
nonce: Nonce::new(),
};
// This needs to be here otherwise `CrateGraphBuilder` will panic.
- db.set_all_crates(Arc::new(Box::new([])));
+ set_all_crates_with_durability(&mut db, std::iter::empty(), Durability::HIGH);
CrateGraphBuilder::default().set_in_db(&mut db);
db.set_proc_macros_with_durability(Default::default(), Durability::MEDIUM);
_ = base_db::LibraryRoots::builder(Default::default())
@@ -253,7 +253,7 @@ impl RootDatabase {
}
#[query_group::query_group]
-pub trait LineIndexDatabase: base_db::RootQueryDb {
+pub trait LineIndexDatabase: base_db::SourceDatabase {
#[salsa::invoke_interned(line_index)]
fn line_index(&self, file_id: FileId) -> Arc<LineIndex>;
}
diff --git a/crates/ide-db/src/path_transform.rs b/crates/ide-db/src/path_transform.rs
index 508f841340..407276a2de 100644
--- a/crates/ide-db/src/path_transform.rs
+++ b/crates/ide-db/src/path_transform.rs
@@ -197,7 +197,7 @@ impl<'a> PathTransform<'a> {
&& let Some(default) =
&default.display_source_code(db, source_module.into(), false).ok()
{
- type_substs.insert(k, make::ty(default).clone_for_update());
+ type_substs.insert(k, make::ty(default));
defaulted_params.push(Either::Left(k));
}
}
@@ -222,7 +222,7 @@ impl<'a> PathTransform<'a> {
k.default(db, target_module.krate(db).to_display_target(db))
&& let Some(default) = default.expr()
{
- const_substs.insert(k, default.syntax().clone_for_update());
+ const_substs.insert(k, default.syntax().clone());
defaulted_params.push(Either::Right(k));
}
}
@@ -278,12 +278,10 @@ impl Ctx<'_> {
// `transform_path` may update a node's parent and that would break the
// tree traversal. Thus all paths in the tree are collected into a vec
// so that such operation is safe.
- let item = self.transform_path(item).clone_subtree();
- let mut editor = SyntaxEditor::new(item.clone());
+ let (mut editor, item) = SyntaxEditor::new(self.transform_path(item));
preorder_rev(&item).filter_map(ast::Lifetime::cast).for_each(|lifetime| {
if let Some(subst) = self.lifetime_substs.get(&lifetime.syntax().text().to_string()) {
- editor
- .replace(lifetime.syntax(), subst.clone_subtree().clone_for_update().syntax());
+ editor.replace(lifetime.syntax(), subst.clone().syntax());
}
});
@@ -331,18 +329,14 @@ impl Ctx<'_> {
result
}
- let root_path = path.clone_subtree();
-
+ let (mut editor, root_path) = SyntaxEditor::new(path.clone());
let result = find_child_paths_and_ident_pats(&root_path);
- let mut editor = SyntaxEditor::new(root_path.clone());
for sub_path in result {
let new = self.transform_path(sub_path.syntax());
editor.replace(sub_path.syntax(), new);
}
-
- let update_sub_item = editor.finish().new_root().clone().clone_subtree();
+ let (mut editor, update_sub_item) = SyntaxEditor::new(editor.finish().new_root().clone());
let item = find_child_paths_and_ident_pats(&update_sub_item);
- let mut editor = SyntaxEditor::new(update_sub_item);
for sub_path in item {
self.transform_path_or_ident_pat(&mut editor, &sub_path);
}
@@ -411,33 +405,27 @@ impl Ctx<'_> {
let segment = make::path_segment_ty(subst.clone(), trait_ref);
let qualified = make::path_from_segments(std::iter::once(segment), false);
- editor.replace(path.syntax(), qualified.clone_for_update().syntax());
+ editor.replace(path.syntax(), qualified.clone().syntax());
} else if let Some(path_ty) = ast::PathType::cast(parent) {
let old = path_ty.syntax();
if old.parent().is_some() {
- editor.replace(old, subst.clone_subtree().clone_for_update().syntax());
+ editor.replace(old, subst.clone().syntax());
} else {
- // Some `path_ty` has no parent, especially ones made for default value
- // of type parameters.
- // In this case, `ted` cannot replace `path_ty` with `subst` directly.
- // So, just replace its children as long as the `subst` is the same type.
- let new = subst.clone_subtree().clone_for_update();
- if !matches!(new, ast::Type::PathType(..)) {
- return None;
- }
let start = path_ty.syntax().first_child().map(NodeOrToken::Node)?;
let end = path_ty.syntax().last_child().map(NodeOrToken::Node)?;
editor.replace_all(
start..=end,
- new.syntax().children().map(NodeOrToken::Node).collect::<Vec<_>>(),
+ subst
+ .clone()
+ .syntax()
+ .children()
+ .map(NodeOrToken::Node)
+ .collect::<Vec<_>>(),
);
}
} else {
- editor.replace(
- path.syntax(),
- subst.clone_subtree().clone_for_update().syntax(),
- );
+ editor.replace(path.syntax(), subst.clone().syntax());
}
}
}
@@ -459,18 +447,17 @@ impl Ctx<'_> {
allow_unstable: true,
};
let found_path = self.target_module.find_path(self.source_scope.db, def, cfg)?;
- let res = mod_path_to_ast(&found_path, self.target_edition).clone_for_update();
- let mut res_editor = SyntaxEditor::new(res.syntax().clone_subtree());
+ let res = mod_path_to_ast(&found_path, self.target_edition);
+ let (mut res_editor, res) = SyntaxEditor::with_ast_node(&res);
if let Some(args) = path.segment().and_then(|it| it.generic_arg_list())
&& let Some(segment) = res.segment()
{
if let Some(old) = segment.generic_arg_list() {
- res_editor
- .replace(old.syntax(), args.clone_subtree().syntax().clone_for_update())
+ res_editor.replace(old.syntax(), args.syntax().clone())
} else {
res_editor.insert(
syntax_editor::Position::last_child_of(segment.syntax()),
- args.clone_subtree().syntax().clone_for_update(),
+ args.syntax().clone(),
);
}
}
@@ -479,7 +466,7 @@ impl Ctx<'_> {
}
hir::PathResolution::ConstParam(cp) => {
if let Some(subst) = self.const_substs.get(&cp) {
- editor.replace(path.syntax(), subst.clone_subtree().clone_for_update());
+ editor.replace(path.syntax(), subst.clone());
}
}
hir::PathResolution::SelfType(imp) => {
@@ -496,7 +483,7 @@ impl Ctx<'_> {
true,
)
.ok()?;
- let ast_ty = make::ty(ty_str).clone_for_update();
+ let ast_ty = make::ty(ty_str);
if let Some(adt) = ty.as_adt()
&& let ast::Type::PathType(path_ty) = &ast_ty
@@ -516,8 +503,10 @@ impl Ctx<'_> {
if let Some(qual) =
mod_path_to_ast(&found_path, self.target_edition).qualifier()
{
- let res = make::path_concat(qual, path_ty.path()?).clone_for_update();
- editor.replace(path.syntax(), res.syntax());
+ editor.replace(
+ path.syntax(),
+ make::path_concat(qual, path_ty.path()?).syntax(),
+ );
return Some(());
}
}
@@ -593,8 +582,10 @@ impl Ctx<'_> {
allow_unstable: true,
};
let found_path = self.target_module.find_path(self.source_scope.db, def, cfg)?;
- let res = mod_path_to_ast(&found_path, self.target_edition).clone_for_update();
- editor.replace(ident_pat.syntax(), res.syntax());
+ editor.replace(
+ ident_pat.syntax(),
+ mod_path_to_ast(&found_path, self.target_edition).syntax(),
+ );
Some(())
}
_ => None,
diff --git a/crates/ide-db/src/prime_caches.rs b/crates/ide-db/src/prime_caches.rs
index d264428212..12a48d65ac 100644
--- a/crates/ide-db/src/prime_caches.rs
+++ b/crates/ide-db/src/prime_caches.rs
@@ -4,15 +4,12 @@
//! various caches, it's not really advanced at the moment.
use std::panic::AssertUnwindSafe;
+use base_db::all_crates;
use hir::{Symbol, import_map::ImportMap};
use rustc_hash::FxHashMap;
use salsa::{Cancelled, Database};
-use crate::{
- FxIndexMap, RootDatabase,
- base_db::{Crate, RootQueryDb},
- symbol_index::SymbolIndex,
-};
+use crate::{FxIndexMap, RootDatabase, base_db::Crate, symbol_index::SymbolIndex};
/// We're indexing many crates.
#[derive(Debug)]
@@ -56,7 +53,7 @@ pub fn parallel_prime_caches(
// to compute the symbols/import map of an already computed def map in that time.
let (reverse_deps, mut to_be_done_deps) = {
- let all_crates = db.all_crates();
+ let all_crates = all_crates(db);
let to_be_done_deps = all_crates
.iter()
.map(|&krate| (krate, krate.data(db).dependencies.len() as u32))
@@ -200,7 +197,7 @@ pub fn parallel_prime_caches(
)
};
- let crate_def_maps_total = db.all_crates().len();
+ let crate_def_maps_total = all_crates(db).len();
let mut crate_def_maps_done = 0;
let (mut crate_import_maps_total, mut crate_import_maps_done) = (0usize, 0usize);
let (mut module_symbols_total, mut module_symbols_done) = (0usize, 0usize);
diff --git a/crates/ide-db/src/ra_fixture.rs b/crates/ide-db/src/ra_fixture.rs
index c9a670b2d1..2f4d319ec8 100644
--- a/crates/ide-db/src/ra_fixture.rs
+++ b/crates/ide-db/src/ra_fixture.rs
@@ -52,6 +52,18 @@ impl RootDatabase {
}
}
+#[derive(Debug, Clone, Copy)]
+pub struct RaFixtureConfig<'a> {
+ pub minicore: MiniCore<'a>,
+ pub disable_ra_fixture: bool,
+}
+
+impl<'a> RaFixtureConfig<'a> {
+ pub const fn default() -> Self {
+ Self { minicore: MiniCore::default(), disable_ra_fixture: false }
+ }
+}
+
pub struct RaFixtureAnalysis {
pub db: RootDatabase,
tmp_file_ids: Vec<(FileId, usize)>,
@@ -69,9 +81,14 @@ impl RaFixtureAnalysis {
sema: &Semantics<'_, RootDatabase>,
literal: ast::String,
expanded: &ast::String,
- minicore: MiniCore<'_>,
+ config: &RaFixtureConfig<'_>,
on_cursor: &mut dyn FnMut(TextRange),
) -> Option<RaFixtureAnalysis> {
+ if config.disable_ra_fixture {
+ return None;
+ }
+ let minicore = config.minicore;
+
if !literal.is_raw() {
return None;
}
diff --git a/crates/ide-db/src/search.rs b/crates/ide-db/src/search.rs
index 25acb47f7b..69459a4b72 100644
--- a/crates/ide-db/src/search.rs
+++ b/crates/ide-db/src/search.rs
@@ -7,7 +7,7 @@
use std::mem;
use std::{cell::LazyCell, cmp::Reverse};
-use base_db::{RootQueryDb, SourceDatabase};
+use base_db::{SourceDatabase, all_crates};
use either::Either;
use hir::{
Adt, AsAssocItem, DefWithBody, EditionedFileId, ExpressionStoreOwner, FileRange,
@@ -161,7 +161,7 @@ impl SearchScope {
fn crate_graph(db: &RootDatabase) -> SearchScope {
let mut entries = FxHashMap::default();
- let all_crates = db.all_crates();
+ let all_crates = all_crates(db);
for &krate in all_crates.iter() {
let crate_data = krate.data(db);
let source_root = db.file_source_root(crate_data.root_file_id).source_root_id(db);
diff --git a/crates/ide-db/src/source_change.rs b/crates/ide-db/src/source_change.rs
index 57072bb5ba..4a83f707fc 100644
--- a/crates/ide-db/src/source_change.rs
+++ b/crates/ide-db/src/source_change.rs
@@ -282,7 +282,7 @@ impl SourceChangeBuilder {
}
pub fn make_editor(&self, node: &SyntaxNode) -> SyntaxEditor {
- SyntaxEditor::new(node.ancestors().last().unwrap_or_else(|| node.clone()))
+ SyntaxEditor::new(node.ancestors().last().unwrap_or_else(|| node.clone())).0
}
pub fn add_file_edits(&mut self, file_id: impl Into<FileId>, edit: SyntaxEditor) {
diff --git a/crates/ide-db/src/symbol_index.rs b/crates/ide-db/src/symbol_index.rs
index 183f6b6495..2ad3a51c3d 100644
--- a/crates/ide-db/src/symbol_index.rs
+++ b/crates/ide-db/src/symbol_index.rs
@@ -27,7 +27,10 @@ use std::{
ops::ControlFlow,
};
-use base_db::{CrateOrigin, LangCrateOrigin, LibraryRoots, LocalRoots, RootQueryDb, SourceRootId};
+use base_db::{
+ CrateOrigin, InternedSourceRootId, LangCrateOrigin, LibraryRoots, LocalRoots, SourceRootId,
+ source_root_crates,
+};
use fst::{Automaton, Streamer, raw::IndexedValue};
use hir::{
Crate, Module,
@@ -255,7 +258,7 @@ pub fn world_symbols(db: &RootDatabase, mut query: Query) -> Vec<FileSymbol<'_>>
let mut crates = Vec::new();
for &root in LocalRoots::get(db).roots(db).iter() {
- crates.extend(db.source_root_crates(root).iter().copied())
+ crates.extend(source_root_crates(db, root).iter().copied())
}
crates
.par_iter()
@@ -322,7 +325,7 @@ fn resolve_path_to_modules(
// If not anchored to crate, also search for modules matching first segment in local crates
if !anchor_to_crate {
for &root in LocalRoots::get(db).roots(db).iter() {
- for &krate in db.source_root_crates(root).iter() {
+ for &krate in source_root_crates(db, root).iter() {
let root_module = Crate::from(krate).root_module(db);
for child in root_module.children(db) {
if let Some(name) = child.name(db)
@@ -369,11 +372,6 @@ impl<'db> SymbolIndex<'db> {
db: &'db dyn HirDatabase,
source_root_id: SourceRootId,
) -> &'db SymbolIndex<'db> {
- // FIXME:
- #[salsa::interned]
- struct InternedSourceRootId {
- id: SourceRootId,
- }
#[salsa::tracked(returns(ref))]
fn library_symbols<'db>(
db: &'db dyn HirDatabase,
@@ -385,7 +383,7 @@ impl<'db> SymbolIndex<'db> {
hir::attach_db(db, || {
let mut symbol_collector = SymbolCollector::new(db, true);
- db.source_root_crates(source_root_id.id(db))
+ source_root_crates(db, source_root_id.id(db))
.iter()
.flat_map(|&krate| Crate::from(krate).modules(db))
// we specifically avoid calling other SymbolsDatabase queries here, even though they do the same thing,
diff --git a/crates/ide-db/src/test_data/test_doc_alias.txt b/crates/ide-db/src/test_data/test_doc_alias.txt
index fc98ebb069..17d002e8bf 100644
--- a/crates/ide-db/src/test_data/test_doc_alias.txt
+++ b/crates/ide-db/src/test_data/test_doc_alias.txt
@@ -2,7 +2,7 @@
(
Module {
id: ModuleIdLt {
- [salsa id]: Id(3400),
+ [salsa id]: Id(3000),
},
},
[
@@ -12,7 +12,7 @@
Struct(
Struct {
id: StructId(
- 3c01,
+ 3801,
),
},
),
@@ -20,7 +20,7 @@
loc: DeclarationLocation {
hir_file_id: FileId(
EditionedFileId(
- Id(3000),
+ Id(2c00),
),
),
ptr: SyntaxNodePtr {
@@ -49,7 +49,7 @@
Struct(
Struct {
id: StructId(
- 3c00,
+ 3800,
),
},
),
@@ -57,7 +57,7 @@
loc: DeclarationLocation {
hir_file_id: FileId(
EditionedFileId(
- Id(3000),
+ Id(2c00),
),
),
ptr: SyntaxNodePtr {
@@ -86,7 +86,7 @@
Struct(
Struct {
id: StructId(
- 3c00,
+ 3800,
),
},
),
@@ -94,7 +94,7 @@
loc: DeclarationLocation {
hir_file_id: FileId(
EditionedFileId(
- Id(3000),
+ Id(2c00),
),
),
ptr: SyntaxNodePtr {
@@ -123,7 +123,7 @@
Struct(
Struct {
id: StructId(
- 3c00,
+ 3800,
),
},
),
@@ -131,7 +131,7 @@
loc: DeclarationLocation {
hir_file_id: FileId(
EditionedFileId(
- Id(3000),
+ Id(2c00),
),
),
ptr: SyntaxNodePtr {
@@ -160,7 +160,7 @@
Struct(
Struct {
id: StructId(
- 3c00,
+ 3800,
),
},
),
@@ -168,7 +168,7 @@
loc: DeclarationLocation {
hir_file_id: FileId(
EditionedFileId(
- Id(3000),
+ Id(2c00),
),
),
ptr: SyntaxNodePtr {
@@ -197,7 +197,7 @@
Struct(
Struct {
id: StructId(
- 3c01,
+ 3801,
),
},
),
@@ -205,7 +205,7 @@
loc: DeclarationLocation {
hir_file_id: FileId(
EditionedFileId(
- Id(3000),
+ Id(2c00),
),
),
ptr: SyntaxNodePtr {
@@ -234,7 +234,7 @@
Struct(
Struct {
id: StructId(
- 3c00,
+ 3800,
),
},
),
@@ -242,7 +242,7 @@
loc: DeclarationLocation {
hir_file_id: FileId(
EditionedFileId(
- Id(3000),
+ Id(2c00),
),
),
ptr: SyntaxNodePtr {
diff --git a/crates/ide-db/src/test_data/test_symbol_index_collection.txt b/crates/ide-db/src/test_data/test_symbol_index_collection.txt
index 02a023038a..1b20a574bd 100644
--- a/crates/ide-db/src/test_data/test_symbol_index_collection.txt
+++ b/crates/ide-db/src/test_data/test_symbol_index_collection.txt
@@ -2,7 +2,7 @@
(
Module {
id: ModuleIdLt {
- [salsa id]: Id(3400),
+ [salsa id]: Id(3000),
},
},
[
@@ -11,14 +11,14 @@
def: EnumVariant(
EnumVariant {
id: EnumVariantId(
- 7c00,
+ 7800,
),
},
),
loc: DeclarationLocation {
hir_file_id: FileId(
EditionedFileId(
- Id(3000),
+ Id(2c00),
),
),
ptr: SyntaxNodePtr {
@@ -48,14 +48,14 @@
def: TypeAlias(
TypeAlias {
id: TypeAliasId(
- 7000,
+ 6c00,
),
},
),
loc: DeclarationLocation {
hir_file_id: FileId(
EditionedFileId(
- Id(3000),
+ Id(2c00),
),
),
ptr: SyntaxNodePtr {
@@ -83,14 +83,14 @@
def: EnumVariant(
EnumVariant {
id: EnumVariantId(
- 7c01,
+ 7801,
),
},
),
loc: DeclarationLocation {
hir_file_id: FileId(
EditionedFileId(
- Id(3000),
+ Id(2c00),
),
),
ptr: SyntaxNodePtr {
@@ -120,14 +120,14 @@
def: Const(
Const {
id: ConstId(
- 6800,
+ 6400,
),
},
),
loc: DeclarationLocation {
hir_file_id: FileId(
EditionedFileId(
- Id(3000),
+ Id(2c00),
),
),
ptr: SyntaxNodePtr {
@@ -155,14 +155,14 @@
def: Const(
Const {
id: ConstId(
- 6802,
+ 6402,
),
},
),
loc: DeclarationLocation {
hir_file_id: FileId(
EditionedFileId(
- Id(3000),
+ Id(2c00),
),
),
ptr: SyntaxNodePtr {
@@ -191,7 +191,7 @@
Enum(
Enum {
id: EnumId(
- 5400,
+ 5000,
),
},
),
@@ -199,7 +199,7 @@
loc: DeclarationLocation {
hir_file_id: FileId(
EditionedFileId(
- Id(3000),
+ Id(2c00),
),
),
ptr: SyntaxNodePtr {
@@ -228,7 +228,7 @@
Macro {
id: Macro2Id(
Macro2Id(
- 5000,
+ 4c00,
),
),
},
@@ -236,7 +236,7 @@
loc: DeclarationLocation {
hir_file_id: FileId(
EditionedFileId(
- Id(3000),
+ Id(2c00),
),
),
ptr: SyntaxNodePtr {
@@ -265,7 +265,7 @@
Macro {
id: Macro2Id(
Macro2Id(
- 5000,
+ 4c00,
),
),
},
@@ -273,7 +273,7 @@
loc: DeclarationLocation {
hir_file_id: FileId(
EditionedFileId(
- Id(3000),
+ Id(2c00),
),
),
ptr: SyntaxNodePtr {
@@ -301,14 +301,14 @@
def: Static(
Static {
id: StaticId(
- 6c00,
+ 6800,
),
},
),
loc: DeclarationLocation {
hir_file_id: FileId(
EditionedFileId(
- Id(3000),
+ Id(2c00),
),
),
ptr: SyntaxNodePtr {
@@ -337,7 +337,7 @@
Struct(
Struct {
id: StructId(
- 4c01,
+ 4801,
),
},
),
@@ -345,7 +345,7 @@
loc: DeclarationLocation {
hir_file_id: FileId(
EditionedFileId(
- Id(3000),
+ Id(2c00),
),
),
ptr: SyntaxNodePtr {
@@ -374,7 +374,7 @@
Struct(
Struct {
id: StructId(
- 4c00,
+ 4800,
),
},
),
@@ -382,7 +382,7 @@
loc: DeclarationLocation {
hir_file_id: MacroFile(
MacroCallId(
- Id(4400),
+ Id(4000),
),
),
ptr: SyntaxNodePtr {
@@ -411,7 +411,7 @@
Struct(
Struct {
id: StructId(
- 4c05,
+ 4805,
),
},
),
@@ -419,7 +419,7 @@
loc: DeclarationLocation {
hir_file_id: FileId(
EditionedFileId(
- Id(3000),
+ Id(2c00),
),
),
ptr: SyntaxNodePtr {
@@ -450,7 +450,7 @@
Struct(
Struct {
id: StructId(
- 4c06,
+ 4806,
),
},
),
@@ -458,7 +458,7 @@
loc: DeclarationLocation {
hir_file_id: FileId(
EditionedFileId(
- Id(3000),
+ Id(2c00),
),
),
ptr: SyntaxNodePtr {
@@ -489,7 +489,7 @@
Struct(
Struct {
id: StructId(
- 4c07,
+ 4807,
),
},
),
@@ -497,7 +497,7 @@
loc: DeclarationLocation {
hir_file_id: FileId(
EditionedFileId(
- Id(3000),
+ Id(2c00),
),
),
ptr: SyntaxNodePtr {
@@ -526,7 +526,7 @@
Struct(
Struct {
id: StructId(
- 4c02,
+ 4802,
),
},
),
@@ -534,7 +534,7 @@
loc: DeclarationLocation {
hir_file_id: FileId(
EditionedFileId(
- Id(3000),
+ Id(2c00),
),
),
ptr: SyntaxNodePtr {
@@ -562,14 +562,14 @@
def: Trait(
Trait {
id: TraitId(
- 6000,
+ 5c00,
),
},
),
loc: DeclarationLocation {
hir_file_id: FileId(
EditionedFileId(
- Id(3000),
+ Id(2c00),
),
),
ptr: SyntaxNodePtr {
@@ -598,7 +598,7 @@
Macro {
id: Macro2Id(
Macro2Id(
- 5000,
+ 4c00,
),
),
},
@@ -606,7 +606,7 @@
loc: DeclarationLocation {
hir_file_id: FileId(
EditionedFileId(
- Id(3000),
+ Id(2c00),
),
),
ptr: SyntaxNodePtr {
@@ -635,7 +635,7 @@
Union(
Union {
id: UnionId(
- 5800,
+ 5400,
),
},
),
@@ -643,7 +643,7 @@
loc: DeclarationLocation {
hir_file_id: FileId(
EditionedFileId(
- Id(3000),
+ Id(2c00),
),
),
ptr: SyntaxNodePtr {
@@ -671,14 +671,14 @@
def: Module(
Module {
id: ModuleIdLt {
- [salsa id]: Id(3401),
+ [salsa id]: Id(3001),
},
},
),
loc: DeclarationLocation {
hir_file_id: FileId(
EditionedFileId(
- Id(3000),
+ Id(2c00),
),
),
ptr: SyntaxNodePtr {
@@ -706,14 +706,14 @@
def: Module(
Module {
id: ModuleIdLt {
- [salsa id]: Id(3402),
+ [salsa id]: Id(3002),
},
},
),
loc: DeclarationLocation {
hir_file_id: FileId(
EditionedFileId(
- Id(3000),
+ Id(2c00),
),
),
ptr: SyntaxNodePtr {
@@ -742,7 +742,7 @@
Macro {
id: MacroRulesId(
MacroRulesId(
- 4001,
+ 3c01,
),
),
},
@@ -750,7 +750,7 @@
loc: DeclarationLocation {
hir_file_id: FileId(
EditionedFileId(
- Id(3000),
+ Id(2c00),
),
),
ptr: SyntaxNodePtr {
@@ -778,14 +778,14 @@
def: Function(
FunctionId(
FunctionId(
- 6402,
+ 6002,
),
),
),
loc: DeclarationLocation {
hir_file_id: FileId(
EditionedFileId(
- Id(3000),
+ Id(2c00),
),
),
ptr: SyntaxNodePtr {
@@ -815,14 +815,14 @@
def: Function(
FunctionId(
FunctionId(
- 6401,
+ 6001,
),
),
),
loc: DeclarationLocation {
hir_file_id: FileId(
EditionedFileId(
- Id(3000),
+ Id(2c00),
),
),
ptr: SyntaxNodePtr {
@@ -853,7 +853,7 @@
Macro {
id: MacroRulesId(
MacroRulesId(
- 4000,
+ 3c00,
),
),
},
@@ -861,7 +861,7 @@
loc: DeclarationLocation {
hir_file_id: FileId(
EditionedFileId(
- Id(3000),
+ Id(2c00),
),
),
ptr: SyntaxNodePtr {
@@ -889,14 +889,14 @@
def: Function(
FunctionId(
FunctionId(
- 6400,
+ 6000,
),
),
),
loc: DeclarationLocation {
hir_file_id: FileId(
EditionedFileId(
- Id(3000),
+ Id(2c00),
),
),
ptr: SyntaxNodePtr {
@@ -925,7 +925,7 @@
Macro {
id: MacroRulesId(
MacroRulesId(
- 4001,
+ 3c01,
),
),
},
@@ -933,7 +933,7 @@
loc: DeclarationLocation {
hir_file_id: FileId(
EditionedFileId(
- Id(3000),
+ Id(2c00),
),
),
ptr: SyntaxNodePtr {
@@ -961,14 +961,14 @@
def: Function(
FunctionId(
FunctionId(
- 6403,
+ 6003,
),
),
),
loc: DeclarationLocation {
hir_file_id: FileId(
EditionedFileId(
- Id(3000),
+ Id(2c00),
),
),
ptr: SyntaxNodePtr {
@@ -998,7 +998,7 @@
(
Module {
id: ModuleIdLt {
- [salsa id]: Id(3401),
+ [salsa id]: Id(3001),
},
},
[
@@ -1008,7 +1008,7 @@
Struct(
Struct {
id: StructId(
- 4c03,
+ 4803,
),
},
),
@@ -1016,7 +1016,7 @@
loc: DeclarationLocation {
hir_file_id: FileId(
EditionedFileId(
- Id(3000),
+ Id(2c00),
),
),
ptr: SyntaxNodePtr {
@@ -1044,7 +1044,7 @@
(
Module {
id: ModuleIdLt {
- [salsa id]: Id(3402),
+ [salsa id]: Id(3002),
},
},
[
@@ -1053,14 +1053,14 @@
def: Trait(
Trait {
id: TraitId(
- 6000,
+ 5c00,
),
},
),
loc: DeclarationLocation {
hir_file_id: FileId(
EditionedFileId(
- Id(3001),
+ Id(2c01),
),
),
ptr: SyntaxNodePtr {
@@ -1089,7 +1089,7 @@
Macro {
id: Macro2Id(
Macro2Id(
- 5000,
+ 4c00,
),
),
},
@@ -1097,7 +1097,7 @@
loc: DeclarationLocation {
hir_file_id: FileId(
EditionedFileId(
- Id(3001),
+ Id(2c01),
),
),
ptr: SyntaxNodePtr {
@@ -1126,7 +1126,7 @@
Struct(
Struct {
id: StructId(
- 4c04,
+ 4804,
),
},
),
@@ -1134,7 +1134,7 @@
loc: DeclarationLocation {
hir_file_id: FileId(
EditionedFileId(
- Id(3001),
+ Id(2c01),
),
),
ptr: SyntaxNodePtr {
@@ -1163,7 +1163,7 @@
Macro {
id: Macro2Id(
Macro2Id(
- 5000,
+ 4c00,
),
),
},
@@ -1171,7 +1171,7 @@
loc: DeclarationLocation {
hir_file_id: FileId(
EditionedFileId(
- Id(3001),
+ Id(2c01),
),
),
ptr: SyntaxNodePtr {
@@ -1200,7 +1200,7 @@
Struct(
Struct {
id: StructId(
- 4c04,
+ 4804,
),
},
),
@@ -1208,7 +1208,7 @@
loc: DeclarationLocation {
hir_file_id: FileId(
EditionedFileId(
- Id(3001),
+ Id(2c01),
),
),
ptr: SyntaxNodePtr {
diff --git a/crates/ide-db/src/test_data/test_symbols_exclude_imports.txt b/crates/ide-db/src/test_data/test_symbols_exclude_imports.txt
index aff1d56c56..f8ae687b78 100644
--- a/crates/ide-db/src/test_data/test_symbols_exclude_imports.txt
+++ b/crates/ide-db/src/test_data/test_symbols_exclude_imports.txt
@@ -5,7 +5,7 @@
Struct(
Struct {
id: StructId(
- 4000,
+ 3c00,
),
},
),
@@ -13,7 +13,7 @@
loc: DeclarationLocation {
hir_file_id: FileId(
EditionedFileId(
- Id(3001),
+ Id(2c01),
),
),
ptr: SyntaxNodePtr {
diff --git a/crates/ide-db/src/test_data/test_symbols_with_imports.txt b/crates/ide-db/src/test_data/test_symbols_with_imports.txt
index bf5d81cfb1..2282815a61 100644
--- a/crates/ide-db/src/test_data/test_symbols_with_imports.txt
+++ b/crates/ide-db/src/test_data/test_symbols_with_imports.txt
@@ -5,7 +5,7 @@
Struct(
Struct {
id: StructId(
- 4000,
+ 3c00,
),
},
),
@@ -13,7 +13,7 @@
loc: DeclarationLocation {
hir_file_id: FileId(
EditionedFileId(
- Id(3001),
+ Id(2c01),
),
),
ptr: SyntaxNodePtr {
@@ -42,7 +42,7 @@
Struct(
Struct {
id: StructId(
- 4000,
+ 3c00,
),
},
),
@@ -50,7 +50,7 @@
loc: DeclarationLocation {
hir_file_id: FileId(
EditionedFileId(
- Id(3000),
+ Id(2c00),
),
),
ptr: SyntaxNodePtr {
diff --git a/crates/ide-diagnostics/src/handlers/inactive_code.rs b/crates/ide-diagnostics/src/handlers/inactive_code.rs
index dfa9639f6e..9bfbeeebf7 100644
--- a/crates/ide-diagnostics/src/handlers/inactive_code.rs
+++ b/crates/ide-diagnostics/src/handlers/inactive_code.rs
@@ -40,7 +40,10 @@ pub(crate) fn inactive_code(
#[cfg(test)]
mod tests {
- use crate::{DiagnosticsConfig, tests::check_diagnostics_with_config};
+ use ide_db::RootDatabase;
+ use test_fixture::WithFixture;
+
+ use crate::{DiagnosticCode, DiagnosticsConfig, tests::check_diagnostics_with_config};
#[track_caller]
pub(crate) fn check(#[rust_analyzer::rust_fixture] ra_fixture: &str) {
@@ -212,4 +215,41 @@ union FooBar {
"#,
);
}
+
+ #[test]
+ fn inactive_crate() {
+ let db = RootDatabase::with_files(
+ r#"
+#![cfg(false)]
+
+fn foo() {}
+ "#,
+ );
+ let file_id = db.test_crate().root_file_id(&db);
+ let diagnostics = hir::attach_db(&db, || {
+ crate::full_diagnostics(
+ &db,
+ &DiagnosticsConfig::test_sample(),
+ &ide_db::assists::AssistResolveStrategy::All,
+ file_id.file_id(&db),
+ )
+ });
+ let [inactive_code] = &*diagnostics else {
+ panic!("expected one inactive_code diagnostic, found {diagnostics:#?}");
+ };
+ assert_eq!(
+ inactive_code.code,
+ DiagnosticCode::Ra("inactive-code", ide_db::Severity::WeakWarning)
+ );
+ assert_eq!(
+ inactive_code.message,
+ "code is inactive due to #[cfg] directives: false is disabled",
+ );
+ assert!(inactive_code.fixes.is_none());
+ let full_file_range = file_id.parse(&db).syntax_node().text_range();
+ assert_eq!(
+ inactive_code.range,
+ ide_db::FileRange { file_id: file_id.file_id(&db), range: full_file_range },
+ );
+ }
}
diff --git a/crates/ide-diagnostics/src/handlers/missing_fields.rs b/crates/ide-diagnostics/src/handlers/missing_fields.rs
index 050d5477f6..85368cc09f 100644
--- a/crates/ide-diagnostics/src/handlers/missing_fields.rs
+++ b/crates/ide-diagnostics/src/handlers/missing_fields.rs
@@ -1,6 +1,6 @@
use either::Either;
use hir::{
- AssocItem, FindPathConfig, HirDisplay, InFile, Type,
+ AssocItem, FindPathConfig, HasVisibility, HirDisplay, InFile, Type,
db::{ExpandDatabase, HirDatabase},
sym,
};
@@ -35,7 +35,7 @@ use crate::{Diagnostic, DiagnosticCode, DiagnosticsContext, fix};
// ```
pub(crate) fn missing_fields(ctx: &DiagnosticsContext<'_>, d: &hir::MissingFields) -> Diagnostic {
let mut message = String::from("missing structure fields:\n");
- for field in &d.missed_fields {
+ for (field, _) in &d.missed_fields {
format_to!(message, "- {}\n", field.display(ctx.sema.db, ctx.edition));
}
@@ -57,7 +57,7 @@ fn fixes(ctx: &DiagnosticsContext<'_>, d: &hir::MissingFields) -> Option<Vec<Ass
// `struct A(usize);`
// `let a = A { 0: () }`
// but it is uncommon usage and it should not be encouraged.
- if d.missed_fields.iter().any(|it| it.as_tuple_index().is_some()) {
+ if d.missed_fields.iter().any(|(name, _)| name.as_tuple_index().is_some()) {
return None;
}
@@ -68,6 +68,12 @@ fn fixes(ctx: &DiagnosticsContext<'_>, d: &hir::MissingFields) -> Option<Vec<Ass
let range = InFile::new(d.file, d.field_list_parent.text_range())
.original_node_file_range_rooted_opt(ctx.sema.db)?;
+ if let Some(current_module) = current_module
+ && d.missed_fields.iter().any(|(_, field)| !field.is_visible_from(ctx.db(), current_module))
+ {
+ return None;
+ }
+
let build_text_edit = |new_syntax: &SyntaxNode, old_syntax| {
let edit = {
let old_range = ctx.sema.original_range_opt(old_syntax)?;
@@ -120,7 +126,7 @@ fn fixes(ctx: &DiagnosticsContext<'_>, d: &hir::MissingFields) -> Option<Vec<Ass
let field_expr = if let Some(local_candidate) = locals.get(&f.name(ctx.sema.db)) {
cov_mark::hit!(field_shorthand);
let candidate_ty = local_candidate.ty(ctx.sema.db);
- if ty.could_unify_with(ctx.sema.db, &candidate_ty) {
+ if candidate_ty.could_coerce_to(ctx.sema.db, ty) {
None
} else {
Some(generate_fill_expr(ty))
@@ -254,7 +260,7 @@ fn get_default_constructor(
#[cfg(test)]
mod tests {
- use crate::tests::{check_diagnostics, check_fix};
+ use crate::tests::{check_diagnostics, check_fix, check_no_fix};
#[test]
fn missing_record_pat_field_diagnostic() {
@@ -934,4 +940,45 @@ fn main() {
"#,
);
}
+
+ #[test]
+ fn coerce_existing_local() {
+ check_fix(
+ r#"
+struct A {
+ v: f64,
+}
+
+fn f() -> A {
+ let v = loop {};
+ A {$0}
+}
+ "#,
+ r#"
+struct A {
+ v: f64,
+}
+
+fn f() -> A {
+ let v = loop {};
+ A { v }
+}
+ "#,
+ );
+ }
+
+ #[test]
+ fn inaccessible_fields() {
+ check_no_fix(
+ r#"
+mod foo {
+ pub struct Bar { baz: i32 }
+}
+
+fn qux() {
+ foo::Bar {$0};
+}
+ "#,
+ );
+ }
}
diff --git a/crates/ide-diagnostics/src/handlers/trait_impl_redundant_assoc_item.rs b/crates/ide-diagnostics/src/handlers/trait_impl_redundant_assoc_item.rs
index f4054610f2..6a380481d4 100644
--- a/crates/ide-diagnostics/src/handlers/trait_impl_redundant_assoc_item.rs
+++ b/crates/ide-diagnostics/src/handlers/trait_impl_redundant_assoc_item.rs
@@ -5,8 +5,10 @@ use ide_db::{
label::Label,
source_change::SourceChangeBuilder,
};
-use syntax::ToSmolStr;
-use syntax::ast::edit::AstNodeEdit;
+use syntax::{
+ AstNode, ToSmolStr,
+ ast::{HasName, edit::AstNodeEdit},
+};
use crate::{Diagnostic, DiagnosticCode, DiagnosticsContext};
@@ -82,16 +84,18 @@ fn quickfix_for_redundant_assoc_item(
let db = ctx.sema.db;
let root = db.parse_or_expand(d.file_id);
// don't modify trait def in outer crate
- let current_crate = ctx.sema.scope(&d.impl_.syntax_node_ptr().to_node(&root))?.krate();
+ let impl_def = d.impl_.to_node(&root);
+ let current_crate = ctx.sema.scope(impl_def.syntax())?.krate();
let trait_def_crate = d.trait_.module(db).krate(db);
if trait_def_crate != current_crate {
return None;
}
let trait_def = d.trait_.source(db)?.value;
- let l_curly = trait_def.assoc_item_list()?.l_curly_token()?.text_range();
+ let insert_after = find_insert_after(range, &impl_def, &trait_def)?;
+
let where_to_insert =
- hir::InFile::new(d.file_id, l_curly).original_node_file_range_rooted_opt(db)?;
+ hir::InFile::new(d.file_id, insert_after).original_node_file_range_rooted_opt(db)?;
if where_to_insert.file_id != file_id {
return None;
}
@@ -112,6 +116,41 @@ fn quickfix_for_redundant_assoc_item(
}])
}
+fn find_insert_after(
+ redundant_range: TextRange,
+ impl_def: &syntax::ast::Impl,
+ trait_def: &syntax::ast::Trait,
+) -> Option<TextRange> {
+ let impl_items_before_redundant = impl_def
+ .assoc_item_list()?
+ .assoc_items()
+ .take_while(|it| it.syntax().text_range().start() < redundant_range.start())
+ .filter_map(|it| name_of(&it))
+ .collect::<Vec<_>>();
+
+ let after_item = trait_def
+ .assoc_item_list()?
+ .assoc_items()
+ .filter(|it| {
+ name_of(it).is_some_and(|name| {
+ impl_items_before_redundant.iter().any(|it| it.text() == name.text())
+ })
+ })
+ .last()
+ .map(|it| it.syntax().text_range());
+
+ return after_item.or_else(|| Some(trait_def.assoc_item_list()?.l_curly_token()?.text_range()));
+
+ fn name_of(it: &syntax::ast::AssocItem) -> Option<syntax::ast::Name> {
+ match it {
+ syntax::ast::AssocItem::Const(it) => it.name(),
+ syntax::ast::AssocItem::Fn(it) => it.name(),
+ syntax::ast::AssocItem::TypeAlias(it) => it.name(),
+ syntax::ast::AssocItem::MacroCall(_) => None,
+ }
+ }
+}
+
#[cfg(test)]
mod tests {
use crate::tests::{check_diagnostics, check_fix, check_no_fix};
@@ -275,6 +314,69 @@ mod indent {
}
#[test]
+ fn quickfix_order() {
+ check_fix(
+ r#"
+trait Marker {
+ fn foo();
+ fn baz();
+}
+struct Foo;
+impl Marker for Foo {
+ fn foo() {}
+ fn missing() {}$0
+ fn baz() {}
+}
+ "#,
+ r#"
+trait Marker {
+ fn foo();
+ fn missing();
+ fn baz();
+}
+struct Foo;
+impl Marker for Foo {
+ fn foo() {}
+ fn missing() {}
+ fn baz() {}
+}
+ "#,
+ );
+
+ check_fix(
+ r#"
+trait Marker {
+ type Item;
+ fn bar();
+ fn baz();
+}
+struct Foo;
+impl Marker for Foo {
+ type Item = Foo;
+ fn missing() {}$0
+ fn bar() {}
+ fn baz() {}
+}
+ "#,
+ r#"
+trait Marker {
+ type Item;
+ fn missing();
+ fn bar();
+ fn baz();
+}
+struct Foo;
+impl Marker for Foo {
+ type Item = Foo;
+ fn missing() {}
+ fn bar() {}
+ fn baz() {}
+}
+ "#,
+ );
+ }
+
+ #[test]
fn quickfix_dont_work() {
check_no_fix(
r#"
diff --git a/crates/ide-diagnostics/src/handlers/type_mismatch.rs b/crates/ide-diagnostics/src/handlers/type_mismatch.rs
index f443dc08f5..ff0e6a254b 100644
--- a/crates/ide-diagnostics/src/handlers/type_mismatch.rs
+++ b/crates/ide-diagnostics/src/handlers/type_mismatch.rs
@@ -20,7 +20,14 @@ use crate::{Assist, Diagnostic, DiagnosticCode, DiagnosticsContext, adjusted_dis
//
// This diagnostic is triggered when the type of an expression or pattern does not match
// the expected type.
-pub(crate) fn type_mismatch(ctx: &DiagnosticsContext<'_>, d: &hir::TypeMismatch<'_>) -> Diagnostic {
+pub(crate) fn type_mismatch(
+ ctx: &DiagnosticsContext<'_>,
+ d: &hir::TypeMismatch<'_>,
+) -> Option<Diagnostic> {
+ if d.expected.is_unknown() || d.actual.is_unknown() {
+ return None;
+ }
+
let display_range = adjusted_display_range(ctx, d.expr_or_pat, &|node| {
let Either::Left(expr) = node else { return None };
let salient_token_range = match expr {
@@ -39,21 +46,23 @@ pub(crate) fn type_mismatch(ctx: &DiagnosticsContext<'_>, d: &hir::TypeMismatch<
cov_mark::hit!(type_mismatch_range_adjustment);
Some(salient_token_range)
});
- Diagnostic::new(
- DiagnosticCode::RustcHardError("E0308"),
- format!(
- "expected {}, found {}",
- d.expected
- .display(ctx.sema.db, ctx.display_target)
- .with_closure_style(ClosureStyle::ClosureWithId),
- d.actual
- .display(ctx.sema.db, ctx.display_target)
- .with_closure_style(ClosureStyle::ClosureWithId),
- ),
- display_range,
+ Some(
+ Diagnostic::new(
+ DiagnosticCode::RustcHardError("E0308"),
+ format!(
+ "expected {}, found {}",
+ d.expected
+ .display(ctx.sema.db, ctx.display_target)
+ .with_closure_style(ClosureStyle::ClosureWithId),
+ d.actual
+ .display(ctx.sema.db, ctx.display_target)
+ .with_closure_style(ClosureStyle::ClosureWithId),
+ ),
+ display_range,
+ )
+ .stable()
+ .with_fixes(fixes(ctx, d)),
)
- .stable()
- .with_fixes(fixes(ctx, d))
}
fn fixes(ctx: &DiagnosticsContext<'_>, d: &hir::TypeMismatch<'_>) -> Option<Vec<Assist>> {
@@ -101,7 +110,8 @@ fn add_missing_ok_or_some(
) -> Option<()> {
let root = ctx.sema.db.parse_or_expand(expr_ptr.file_id);
let expr = expr_ptr.value.to_node(&root);
- let expr_range = expr.syntax().text_range();
+ let hir::FileRange { file_id, range: expr_range } =
+ ctx.sema.original_range_opt(expr.syntax())?;
let scope = ctx.sema.scope(expr.syntax())?;
let expected_adt = d.expected.as_adt()?;
@@ -124,6 +134,8 @@ fn add_missing_ok_or_some(
return None;
}
+ let file_id = file_id.file_id(ctx.sema.db);
+
if d.actual.is_unit() {
if let Expr::BlockExpr(block) = &expr {
if block.tail_expr().is_none() {
@@ -135,21 +147,18 @@ fn add_missing_ok_or_some(
// Empty block
let indent = block_indent + 1;
builder.insert(
- block.syntax().text_range().start() + TextSize::from(1),
+ expr_range.start() + TextSize::from(1),
format!("\n{indent}{variant_name}(())\n{block_indent}"),
);
} else {
let indent = IndentLevel::from(1);
builder.insert(
- block.syntax().text_range().end() - TextSize::from(1),
+ expr_range.end() - TextSize::from(1),
format!("{indent}{variant_name}(())\n{block_indent}"),
);
}
- let source_change = SourceChange::from_text_edit(
- expr_ptr.file_id.original_file(ctx.sema.db).file_id(ctx.sema.db),
- builder.finish(),
- );
+ let source_change = SourceChange::from_text_edit(file_id, builder.finish());
let name = format!("Insert {variant_name}(()) as the tail of this block");
acc.push(fix("insert_wrapped_unit", &name, source_change, expr_range));
}
@@ -158,26 +167,31 @@ fn add_missing_ok_or_some(
// Fix for forms like `fn foo() -> Result<(), String> { return; }`
if ret_expr.expr().is_none() {
let mut builder = TextEdit::builder();
- builder
- .insert(ret_expr.syntax().text_range().end(), format!(" {variant_name}(())"));
- let source_change = SourceChange::from_text_edit(
- expr_ptr.file_id.original_file(ctx.sema.db).file_id(ctx.sema.db),
- builder.finish(),
- );
+ builder.insert(expr_range.end(), format!(" {variant_name}(())"));
+ let source_change = SourceChange::from_text_edit(file_id, builder.finish());
let name = format!("Insert {variant_name}(()) as the return value");
acc.push(fix("insert_wrapped_unit", &name, source_change, expr_range));
}
return Some(());
+ } else if expr.is_block_like()
+ && expr.syntax().parent().and_then(ast::StmtList::cast).is_some()
+ {
+ // Fix for forms like `fn foo() -> Result<(), String> { for _ in 0..8 {} }`
+ let mut builder = TextEdit::builder();
+ let indent = expr.indent_level();
+ builder.insert(expr_range.end(), format!("\n{indent}{variant_name}(())"));
+
+ let source_change = SourceChange::from_text_edit(file_id, builder.finish());
+ let name = format!("Insert {variant_name}(()) as the tail of this block");
+ acc.push(fix("insert_wrapped_unit", &name, source_change, expr_range));
+ return Some(());
}
}
let mut builder = TextEdit::builder();
- builder.insert(expr.syntax().text_range().start(), format!("{variant_name}("));
- builder.insert(expr.syntax().text_range().end(), ")".to_owned());
- let source_change = SourceChange::from_text_edit(
- expr_ptr.file_id.original_file(ctx.sema.db).file_id(ctx.sema.db),
- builder.finish(),
- );
+ builder.insert(expr_range.start(), format!("{variant_name}("));
+ builder.insert(expr_range.end(), ")".to_owned());
+ let source_change = SourceChange::from_text_edit(file_id, builder.finish());
let name = format!("Wrap in {variant_name}");
acc.push(fix("wrap_in_constructor", &name, source_change, expr_range));
Some(())
@@ -192,6 +206,7 @@ fn remove_unnecessary_wrapper(
let db = ctx.sema.db;
let root = db.parse_or_expand(expr_ptr.file_id);
let expr = expr_ptr.value.to_node(&root);
+ // FIXME: support inside MacroCall?
let expr = ctx.sema.original_ast_node(expr)?;
let Expr::CallExpr(call_expr) = expr else {
@@ -278,6 +293,7 @@ fn remove_semicolon(
return None;
}
let block = BlockExpr::cast(expr.syntax().clone())?;
+ // FIXME: support inside MacroCall?
let expr_before_semi =
block.statements().last().and_then(|s| ExprStmt::cast(s.syntax().clone()))?;
let type_before_semi = ctx.sema.type_of_expr(&expr_before_semi.expr()?)?.original();
@@ -311,16 +327,13 @@ fn str_ref_to_owned(
let root = ctx.sema.db.parse_or_expand(expr_ptr.file_id);
let expr = expr_ptr.value.to_node(&root);
- let expr_range = expr.syntax().text_range();
+ let hir::FileRange { file_id, range } = ctx.sema.original_range_opt(expr.syntax())?;
let to_owned = ".to_owned()".to_owned();
- let edit = TextEdit::insert(expr.syntax().text_range().end(), to_owned);
- let source_change = SourceChange::from_text_edit(
- expr_ptr.file_id.original_file(ctx.sema.db).file_id(ctx.sema.db),
- edit,
- );
- acc.push(fix("str_ref_to_owned", "Add .to_owned() here", source_change, expr_range));
+ let edit = TextEdit::insert(range.end(), to_owned);
+ let source_change = SourceChange::from_text_edit(file_id.file_id(ctx.sema.db), edit);
+ acc.push(fix("str_ref_to_owned", "Add .to_owned() here", source_change, range));
Some(())
}
@@ -568,6 +581,32 @@ fn div(x: i32, y: i32) -> Result<i32, ()> {
}
"#,
);
+
+ check_fix(
+ r#"
+//- minicore: option, result
+macro_rules! identity { ($($t:tt)*) => ($($t)*) }
+identity! {
+ fn div(x: i32, y: i32) -> Result<i32, ()> {
+ if y == 0 {
+ return Err(());
+ }
+ x / y$0
+ }
+}
+"#,
+ r#"
+macro_rules! identity { ($($t:tt)*) => ($($t)*) }
+identity! {
+ fn div(x: i32, y: i32) -> Result<i32, ()> {
+ if y == 0 {
+ return Err(());
+ }
+ Ok(x / y)
+ }
+}
+"#,
+ );
}
#[test]
@@ -700,6 +739,21 @@ fn foo() -> Result<(), ()> {
}
"#,
);
+
+ check_fix(
+ r#"
+//- minicore: result
+fn foo() -> Result<(), ()> {
+ for _ in 0..5 {}$0
+}
+ "#,
+ r#"
+fn foo() -> Result<(), ()> {
+ for _ in 0..5 {}
+ Ok(())
+}
+ "#,
+ );
}
#[test]
@@ -1040,6 +1094,29 @@ fn test() -> String {
}
"#,
);
+
+ check_fix(
+ r#"
+macro_rules! identity { ($($t:tt)*) => ($($t)*) }
+struct String;
+
+identity! {
+ fn test() -> String {
+ "a"$0
+ }
+}
+ "#,
+ r#"
+macro_rules! identity { ($($t:tt)*) => ($($t)*) }
+struct String;
+
+identity! {
+ fn test() -> String {
+ "a".to_owned()
+ }
+}
+ "#,
+ );
}
#[test]
@@ -1253,4 +1330,23 @@ fn main() {
"#,
);
}
+
+ #[test]
+ fn test_ignore_unknown_mismatch() {
+ check_diagnostics(
+ r#"
+pub trait Foo {
+ type Out;
+}
+impl Foo for [i32; 1] {
+ type Out = ();
+}
+pub fn foo<T: Foo>(_: T) -> (T::Out,) { loop { } }
+
+fn main() {
+ let _x = foo(2);
+}
+"#,
+ );
+ }
}
diff --git a/crates/ide-diagnostics/src/handlers/unlinked_file.rs b/crates/ide-diagnostics/src/handlers/unlinked_file.rs
index 1283a11700..d7a0a3b0f5 100644
--- a/crates/ide-diagnostics/src/handlers/unlinked_file.rs
+++ b/crates/ide-diagnostics/src/handlers/unlinked_file.rs
@@ -4,7 +4,7 @@ use std::iter;
use hir::crate_def_map;
use hir::{InFile, ModuleSource};
-use ide_db::base_db::RootQueryDb;
+use ide_db::base_db;
use ide_db::text_edit::TextEdit;
use ide_db::{
FileId, FileRange, LineIndexDatabase, base_db::SourceDatabase, source_change::SourceChange,
@@ -101,8 +101,8 @@ fn fixes(
};
// check crate roots, i.e. main.rs, lib.rs, ...
- let relevant_crates = db.relevant_crates(file_id);
- 'crates: for &krate in &*relevant_crates {
+ let relevant_crates = base_db::relevant_crates(db, file_id);
+ 'crates: for &krate in relevant_crates {
// FIXME: This shouldnt need to access the crate def map directly
let crate_def_map = crate_def_map(ctx.sema.db, krate);
@@ -157,7 +157,7 @@ fn fixes(
paths.into_iter().find_map(|path| source_root.file_for_path(&path))
})?;
stack.pop();
- let relevant_crates = db.relevant_crates(parent_id);
+ let relevant_crates = base_db::relevant_crates(db, parent_id);
'crates: for &krate in relevant_crates.iter() {
let crate_def_map = crate_def_map(ctx.sema.db, krate);
let Some((_, module)) = crate_def_map.modules().find(|(_, module)| {
diff --git a/crates/ide-diagnostics/src/lib.rs b/crates/ide-diagnostics/src/lib.rs
index 0c6953419f..09c9f8eab0 100644
--- a/crates/ide-diagnostics/src/lib.rs
+++ b/crates/ide-diagnostics/src/lib.rs
@@ -96,7 +96,7 @@ use hir::{
use ide_db::{
FileId, FileRange, FxHashMap, FxHashSet, RootDatabase, Severity, SnippetCap,
assists::{Assist, AssistId, AssistResolveStrategy, ExprFillDefaultMode},
- base_db::{ReleaseChannel, RootQueryDb as _},
+ base_db::{ReleaseChannel, all_crates, toolchain_channel},
generated::lints::{CLIPPY_LINT_GROUPS, DEFAULT_LINT_GROUPS, DEFAULT_LINTS, Lint, LintGroup},
imports::insert_use::InsertUseConfig,
label::Label,
@@ -285,6 +285,12 @@ struct DiagnosticsContext<'a> {
is_nightly: bool,
}
+impl<'a> DiagnosticsContext<'a> {
+ fn db(&self) -> &'a RootDatabase {
+ self.sema.db
+ }
+}
+
/// Request parser level diagnostics for the given [`FileId`].
pub fn syntax_diagnostics(
db: &RootDatabase,
@@ -303,7 +309,8 @@ pub fn syntax_diagnostics(
let (file_id, _) = editioned_file_id.unpack(db);
// [#3434] Only take first 128 errors to prevent slowing down editor/ide, the number 128 is chosen arbitrarily.
- db.parse_errors(editioned_file_id)
+ editioned_file_id
+ .parse_errors(db)
.into_iter()
.flatten()
.take(128)
@@ -353,14 +360,14 @@ pub fn semantic_diagnostics(
let module = sema.file_to_module_def(file_id);
let is_nightly = matches!(
- module.and_then(|m| db.toolchain_channel(m.krate(db).into())),
+ module.and_then(|m| toolchain_channel(db, m.krate(db).into())),
Some(ReleaseChannel::Nightly) | None
);
let krate = match module {
Some(module) => module.krate(db),
None => {
- match db.all_crates().last() {
+ match all_crates(db).last() {
Some(last) => (*last).into(),
// short-circuit, return an empty vec of diagnostics
None => return vec![],
@@ -375,7 +382,7 @@ pub fn semantic_diagnostics(
// A bunch of parse errors in a file indicate some bigger structural parse changes in the
// file, so we skip semantic diagnostics so we can show these faster.
Some(m) => {
- if db.parse_errors(editioned_file_id).is_none_or(|es| es.len() < 16) {
+ if editioned_file_id.parse_errors(db).is_none_or(|es| es.len() < 16) {
m.diagnostics(db, &mut diags, config.style_lints);
}
}
@@ -430,7 +437,10 @@ pub fn semantic_diagnostics(
AnyDiagnostic::TraitImplRedundantAssocItems(d) => handlers::trait_impl_redundant_assoc_item::trait_impl_redundant_assoc_item(&ctx, &d),
AnyDiagnostic::TraitImplOrphan(d) => handlers::trait_impl_orphan::trait_impl_orphan(&ctx, &d),
AnyDiagnostic::TypedHole(d) => handlers::typed_hole::typed_hole(&ctx, &d),
- AnyDiagnostic::TypeMismatch(d) => handlers::type_mismatch::type_mismatch(&ctx, &d),
+ AnyDiagnostic::TypeMismatch(d) => match handlers::type_mismatch::type_mismatch(&ctx, &d) {
+ Some(diag) => diag,
+ None => continue,
+ },
AnyDiagnostic::UndeclaredLabel(d) => handlers::undeclared_label::undeclared_label(&ctx, &d),
AnyDiagnostic::UnimplementedBuiltinMacro(d) => handlers::unimplemented_builtin_macro::unimplemented_builtin_macro(&ctx, &d),
AnyDiagnostic::UnreachableLabel(d) => handlers::unreachable_label::unreachable_label(&ctx, &d),
diff --git a/crates/ide-ssr/src/from_comment.rs b/crates/ide-ssr/src/from_comment.rs
index 181cc74a51..83b8c3dc81 100644
--- a/crates/ide-ssr/src/from_comment.rs
+++ b/crates/ide-ssr/src/from_comment.rs
@@ -1,7 +1,7 @@
//! This module allows building an SSR MatchFinder by parsing the SSR rule
//! from a comment.
-use ide_db::{EditionedFileId, FilePosition, FileRange, RootDatabase, base_db::RootQueryDb};
+use ide_db::{EditionedFileId, FilePosition, FileRange, RootDatabase};
use syntax::{
TextRange,
ast::{self, AstNode, AstToken},
@@ -19,7 +19,7 @@ pub fn ssr_from_comment(
let comment = {
let file_id = EditionedFileId::current_edition(db, frange.file_id);
- let file = db.parse(file_id);
+ let file = file_id.parse(db);
file.tree().syntax().token_at_offset(frange.range.start()).find_map(ast::Comment::cast)
}?;
let comment_text_without_prefix = comment.text().strip_prefix(comment.prefix()).unwrap();
diff --git a/crates/ide-ssr/src/matching.rs b/crates/ide-ssr/src/matching.rs
index 264f0660d7..ab5a0f70f5 100644
--- a/crates/ide-ssr/src/matching.rs
+++ b/crates/ide-ssr/src/matching.rs
@@ -7,7 +7,7 @@ use crate::{
resolving::{ResolvedPattern, ResolvedRule, UfcsCallInfo},
};
use hir::{FileRange, FindPathConfig, Semantics};
-use ide_db::{FxHashMap, base_db::RootQueryDb};
+use ide_db::{FxHashMap, base_db::all_crates};
use std::{cell::Cell, iter::Peekable};
use syntax::{
SmolStr, SyntaxElement, SyntaxElementChildren, SyntaxKind, SyntaxNode, SyntaxToken,
@@ -621,7 +621,7 @@ impl<'db, 'sema> Matcher<'db, 'sema> {
})?
.original;
let krate = self.sema.scope(expr.syntax()).map(|it| it.krate()).unwrap_or_else(|| {
- hir::Crate::from(*self.sema.db.all_crates().last().expect("no crate graph present"))
+ hir::Crate::from(*all_crates(self.sema.db).last().expect("no crate graph present"))
});
code_type
diff --git a/crates/ide/src/annotations.rs b/crates/ide/src/annotations.rs
index 6fb8dedea4..21b2339c72 100644
--- a/crates/ide/src/annotations.rs
+++ b/crates/ide/src/annotations.rs
@@ -1,7 +1,7 @@
use hir::{HasSource, InFile, InRealFile, Semantics};
use ide_db::{
- FileId, FilePosition, FileRange, FxIndexSet, MiniCore, RootDatabase, defs::Definition,
- helpers::visit_file_defs,
+ FileId, FilePosition, FileRange, FxIndexSet, RootDatabase, defs::Definition,
+ helpers::visit_file_defs, ra_fixture::RaFixtureConfig,
};
use itertools::Itertools;
use syntax::{AstNode, TextRange, ast::HasName};
@@ -45,7 +45,7 @@ pub struct AnnotationConfig<'a> {
pub annotate_enum_variant_references: bool,
pub location: AnnotationLocation,
pub filter_adjacent_derive_implementations: bool,
- pub minicore: MiniCore<'a>,
+ pub ra_fixture: RaFixtureConfig<'a>,
}
pub enum AnnotationLocation {
@@ -216,7 +216,7 @@ pub(crate) fn resolve_annotation(
*data = find_all_refs(
&Semantics::new(db),
pos,
- &FindAllRefsConfig { search_scope: None, minicore: config.minicore },
+ &FindAllRefsConfig { search_scope: None, ra_fixture: config.ra_fixture },
)
.map(|result| {
result
@@ -244,7 +244,7 @@ fn should_skip_runnable(kind: &RunnableKind, binary_target: bool) -> bool {
#[cfg(test)]
mod tests {
use expect_test::{Expect, expect};
- use ide_db::MiniCore;
+ use ide_db::ra_fixture::RaFixtureConfig;
use crate::{Annotation, AnnotationConfig, fixture};
@@ -258,7 +258,7 @@ mod tests {
annotate_method_references: true,
annotate_enum_variant_references: true,
location: AnnotationLocation::AboveName,
- minicore: MiniCore::default(),
+ ra_fixture: RaFixtureConfig::default(),
filter_adjacent_derive_implementations: false,
};
@@ -898,9 +898,6 @@ mod tests {
test_id: Path(
"tests::my_cool_test",
),
- attr: TestAttr {
- ignore: false,
- },
},
cfg: None,
update_test: UpdateTest {
diff --git a/crates/ide/src/call_hierarchy.rs b/crates/ide/src/call_hierarchy.rs
index aded911a8d..402764f112 100644
--- a/crates/ide/src/call_hierarchy.rs
+++ b/crates/ide/src/call_hierarchy.rs
@@ -4,9 +4,10 @@ use std::iter;
use hir::Semantics;
use ide_db::{
- FileRange, FxIndexMap, MiniCore, RootDatabase,
+ FileRange, FxIndexMap, RootDatabase,
defs::{Definition, NameClass, NameRefClass},
helpers::pick_best_token,
+ ra_fixture::RaFixtureConfig,
search::FileReference,
};
use syntax::{AstNode, SyntaxKind::IDENT, ast};
@@ -25,7 +26,7 @@ pub struct CallItem {
pub struct CallHierarchyConfig<'a> {
/// Whether to exclude tests from the call hierarchy
pub exclude_tests: bool,
- pub minicore: MiniCore<'a>,
+ pub ra_fixture: RaFixtureConfig<'a>,
}
pub(crate) fn call_hierarchy(
@@ -36,7 +37,7 @@ pub(crate) fn call_hierarchy(
goto_definition::goto_definition(
db,
position,
- &GotoDefinitionConfig { minicore: config.minicore },
+ &GotoDefinitionConfig { ra_fixture: config.ra_fixture },
)
}
@@ -174,7 +175,7 @@ impl CallLocations {
#[cfg(test)]
mod tests {
use expect_test::{Expect, expect};
- use ide_db::{FilePosition, MiniCore};
+ use ide_db::{FilePosition, ra_fixture::RaFixtureConfig};
use itertools::Itertools;
use crate::fixture;
@@ -197,7 +198,8 @@ mod tests {
)
}
- let config = crate::CallHierarchyConfig { exclude_tests, minicore: MiniCore::default() };
+ let config =
+ crate::CallHierarchyConfig { exclude_tests, ra_fixture: RaFixtureConfig::default() };
let (analysis, pos) = fixture::position(ra_fixture);
let mut navs = analysis.call_hierarchy(pos, &config).unwrap().unwrap().info;
diff --git a/crates/ide/src/doc_links.rs b/crates/ide/src/doc_links.rs
index 33bed9501a..fd462d003d 100644
--- a/crates/ide/src/doc_links.rs
+++ b/crates/ide/src/doc_links.rs
@@ -17,7 +17,7 @@ use hir::{
};
use ide_db::{
RootDatabase,
- base_db::{CrateOrigin, LangCrateOrigin, ReleaseChannel, RootQueryDb},
+ base_db::{CrateOrigin, LangCrateOrigin, ReleaseChannel, toolchain_channel},
defs::{Definition, NameClass, NameRefClass},
documentation::{Documentation, HasDocs},
helpers::pick_best_token,
@@ -552,7 +552,7 @@ fn get_doc_base_urls(
.and_then(|it| Url::parse(&it).ok());
let krate = def.krate(db);
let channel = krate
- .and_then(|krate| db.toolchain_channel(krate.into()))
+ .and_then(|krate| toolchain_channel(db, krate.into()))
.unwrap_or(ReleaseChannel::Nightly)
.as_str();
diff --git a/crates/ide/src/expand_macro.rs b/crates/ide/src/expand_macro.rs
index 44285d9315..6f4ea70e0a 100644
--- a/crates/ide/src/expand_macro.rs
+++ b/crates/ide/src/expand_macro.rs
@@ -235,7 +235,7 @@ fn _format(
file_id: FileId,
expansion: &str,
) -> Option<String> {
- use ide_db::base_db::RootQueryDb;
+ use ide_db::base_db::relevant_crates;
// hack until we get hygiene working (same character amount to preserve formatting as much as possible)
const DOLLAR_CRATE_REPLACE: &str = "__r_a_";
@@ -250,7 +250,7 @@ fn _format(
};
let expansion = format!("{prefix}{expansion}{suffix}");
- let &crate_id = db.relevant_crates(file_id).iter().next()?;
+ let &crate_id = relevant_crates(db, file_id).iter().next()?;
let edition = crate_id.data(db).edition;
#[allow(clippy::disallowed_methods)]
diff --git a/crates/ide/src/fetch_crates.rs b/crates/ide/src/fetch_crates.rs
index 956379e722..ad5af8bfe1 100644
--- a/crates/ide/src/fetch_crates.rs
+++ b/crates/ide/src/fetch_crates.rs
@@ -1,6 +1,6 @@
use ide_db::{
FileId, FxIndexSet, RootDatabase,
- base_db::{CrateOrigin, RootQueryDb},
+ base_db::{CrateOrigin, all_crates},
};
#[derive(Debug, PartialEq, Eq, PartialOrd, Ord, Hash)]
@@ -20,7 +20,7 @@ pub struct CrateInfo {
//
// ![Show Dependency Tree](https://user-images.githubusercontent.com/5748995/229394139-2625beab-f4c9-484b-84ed-ad5dee0b1e1a.png)
pub(crate) fn fetch_crates(db: &RootDatabase) -> FxIndexSet<CrateInfo> {
- db.all_crates()
+ all_crates(db)
.iter()
.copied()
.map(|crate_id| (crate_id.data(db), crate_id.extra_data(db)))
diff --git a/crates/ide/src/folding_ranges.rs b/crates/ide/src/folding_ranges.rs
index 3969490e8d..375e42cc83 100644
--- a/crates/ide/src/folding_ranges.rs
+++ b/crates/ide/src/folding_ranges.rs
@@ -1,10 +1,11 @@
use ide_db::{FxHashSet, syntax_helpers::node_ext::vis_eq};
use syntax::{
- Direction, NodeOrToken, SourceFile,
- SyntaxKind::{self, *},
+ Direction, NodeOrToken, SourceFile, SyntaxElement,
+ SyntaxKind::*,
SyntaxNode, TextRange, TextSize,
ast::{self, AstNode, AstToken},
match_ast,
+ syntax_editor::Element,
};
use std::hash::Hash;
@@ -31,19 +32,33 @@ pub enum FoldKind {
TypeAliases,
ExternCrates,
// endregion: item runs
+ Stmt(ast::Stmt),
+ TailExpr(ast::Expr),
}
#[derive(Debug)]
pub struct Fold {
pub range: TextRange,
pub kind: FoldKind,
+ pub collapsed_text: Option<String>,
+}
+
+impl Fold {
+ pub fn new(range: TextRange, kind: FoldKind) -> Self {
+ Self { range, kind, collapsed_text: None }
+ }
+
+ pub fn with_text(mut self, text: Option<String>) -> Self {
+ self.collapsed_text = text;
+ self
+ }
}
// Feature: Folding
//
// Defines folding regions for curly braced blocks, runs of consecutive use, mod, const or static
// items, and `region` / `endregion` comment markers.
-pub(crate) fn folding_ranges(file: &SourceFile) -> Vec<Fold> {
+pub(crate) fn folding_ranges(file: &SourceFile, add_collapsed_text: bool) -> Vec<Fold> {
let mut res = vec![];
let mut visited_comments = FxHashSet::default();
let mut visited_nodes = FxHashSet::default();
@@ -53,39 +68,41 @@ pub(crate) fn folding_ranges(file: &SourceFile) -> Vec<Fold> {
for element in file.syntax().descendants_with_tokens() {
// Fold items that span multiple lines
- if let Some(kind) = fold_kind(element.kind()) {
+ if let Some(kind) = fold_kind(element.clone()) {
let is_multiline = match &element {
NodeOrToken::Node(node) => node.text().contains_char('\n'),
NodeOrToken::Token(token) => token.text().contains('\n'),
};
+
if is_multiline {
- // for the func with multiline param list
- if matches!(element.kind(), FN)
- && let NodeOrToken::Node(node) = &element
- && let Some(fn_node) = ast::Fn::cast(node.clone())
+ if let NodeOrToken::Node(node) = &element
+ && let Some(fn_) = ast::Fn::cast(node.clone())
{
- if !fn_node
+ if !fn_
.param_list()
.map(|param_list| param_list.syntax().text().contains_char('\n'))
- .unwrap_or(false)
+ .unwrap_or_default()
{
continue;
}
- if fn_node.body().is_some() {
+ if let Some(body) = fn_.body() {
// Get the actual start of the function (excluding doc comments)
- let fn_start = fn_node
+ let fn_start = fn_
.fn_token()
.map(|token| token.text_range().start())
.unwrap_or(node.text_range().start());
- res.push(Fold {
- range: TextRange::new(fn_start, node.text_range().end()),
- kind: FoldKind::Function,
- });
+ res.push(Fold::new(
+ TextRange::new(fn_start, body.syntax().text_range().end()),
+ FoldKind::Function,
+ ));
continue;
}
}
- res.push(Fold { range: element.text_range(), kind });
+
+ let collapsed_text = if add_collapsed_text { collapsed_text(&kind) } else { None };
+ let fold = Fold::new(element.text_range(), kind).with_text(collapsed_text);
+ res.push(fold);
continue;
}
}
@@ -102,15 +119,15 @@ pub(crate) fn folding_ranges(file: &SourceFile) -> Vec<Fold> {
region_starts.push(comment.syntax().text_range().start());
} else if text.starts_with(REGION_END) {
if let Some(region) = region_starts.pop() {
- res.push(Fold {
- range: TextRange::new(region, comment.syntax().text_range().end()),
- kind: FoldKind::Region,
- })
+ res.push(Fold::new(
+ TextRange::new(region, comment.syntax().text_range().end()),
+ FoldKind::Region,
+ ));
}
} else if let Some(range) =
contiguous_range_for_comment(comment, &mut visited_comments)
{
- res.push(Fold { range, kind: FoldKind::Comment })
+ res.push(Fold::new(range, FoldKind::Comment));
}
}
}
@@ -123,37 +140,37 @@ pub(crate) fn folding_ranges(file: &SourceFile) -> Vec<Fold> {
module,
&mut visited_nodes,
) {
- res.push(Fold { range, kind: FoldKind::Modules })
+ res.push(Fold::new(range, FoldKind::Modules));
}
},
ast::Use(use_) => {
if let Some(range) = contiguous_range_for_item_group(use_, &mut visited_nodes) {
- res.push(Fold { range, kind: FoldKind::Imports })
+ res.push(Fold::new(range, FoldKind::Imports));
}
},
ast::Const(konst) => {
if let Some(range) = contiguous_range_for_item_group(konst, &mut visited_nodes) {
- res.push(Fold { range, kind: FoldKind::Consts })
+ res.push(Fold::new(range, FoldKind::Consts));
}
},
ast::Static(statik) => {
if let Some(range) = contiguous_range_for_item_group(statik, &mut visited_nodes) {
- res.push(Fold { range, kind: FoldKind::Statics })
+ res.push(Fold::new(range, FoldKind::Statics));
}
},
ast::TypeAlias(alias) => {
if let Some(range) = contiguous_range_for_item_group(alias, &mut visited_nodes) {
- res.push(Fold { range, kind: FoldKind::TypeAliases })
+ res.push(Fold::new(range, FoldKind::TypeAliases));
}
},
ast::ExternCrate(extern_crate) => {
if let Some(range) = contiguous_range_for_item_group(extern_crate, &mut visited_nodes) {
- res.push(Fold { range, kind: FoldKind::ExternCrates })
+ res.push(Fold::new(range, FoldKind::ExternCrates));
}
},
ast::MatchArm(match_arm) => {
if let Some(range) = fold_range_for_multiline_match_arm(match_arm) {
- res.push(Fold {range, kind: FoldKind::MatchArm})
+ res.push(Fold::new(range, FoldKind::MatchArm));
}
},
_ => (),
@@ -166,8 +183,66 @@ pub(crate) fn folding_ranges(file: &SourceFile) -> Vec<Fold> {
res
}
-fn fold_kind(kind: SyntaxKind) -> Option<FoldKind> {
+fn collapsed_text(kind: &FoldKind) -> Option<String> {
match kind {
+ FoldKind::TailExpr(expr) => collapse_expr(expr.clone()),
+ FoldKind::Stmt(stmt) => {
+ match stmt {
+ ast::Stmt::ExprStmt(expr_stmt) => {
+ expr_stmt.expr().and_then(collapse_expr).map(|text| format!("{text};"))
+ }
+ ast::Stmt::LetStmt(let_stmt) => 'blk: {
+ if let_stmt.let_else().is_some() {
+ break 'blk None;
+ }
+
+ let Some(expr) = let_stmt.initializer() else {
+ break 'blk None;
+ };
+
+ // If the `let` statement spans multiple lines, we do not collapse it.
+ // We use the `eq_token` to check whether the `let` statement is a single line,
+ // as the formatter may place the initializer on a new line for better readability.
+ //
+ // Example:
+ // ```rust
+ // let complex_pat =
+ // complex_expr;
+ // ```
+ //
+ // In this case, we should generate the collapsed text.
+ let Some(eq_token) = let_stmt.eq_token() else {
+ break 'blk None;
+ };
+ let eq_token_offset =
+ eq_token.text_range().end() - let_stmt.syntax().text_range().start();
+ let text_until_eq_token = let_stmt.syntax().text().slice(..eq_token_offset);
+ if text_until_eq_token.contains_char('\n') {
+ break 'blk None;
+ }
+
+ collapse_expr(expr).map(|text| format!("{text_until_eq_token} {text};"))
+ }
+ // handling `items` in external matches.
+ ast::Stmt::Item(_) => None,
+ }
+ }
+ _ => None,
+ }
+}
+
+fn fold_kind(element: SyntaxElement) -> Option<FoldKind> {
+ // handle tail_expr
+ if let Some(node) = element.as_node()
+ // tail_expr -> stmt_list -> block
+ && let Some(block) = node.parent().and_then(|it| it.parent()).and_then(ast::BlockExpr::cast)
+ && let Some(tail_expr) = block.tail_expr()
+ && tail_expr.syntax() == node
+ {
+ return Some(FoldKind::TailExpr(tail_expr));
+ }
+
+ match element.kind() {
COMMENT => Some(FoldKind::Comment),
ARG_LIST | PARAM_LIST | GENERIC_ARG_LIST | GENERIC_PARAM_LIST => Some(FoldKind::ArgList),
ARRAY_EXPR => Some(FoldKind::Array),
@@ -185,10 +260,73 @@ fn fold_kind(kind: SyntaxKind) -> Option<FoldKind> {
| MATCH_ARM_LIST
| VARIANT_LIST
| TOKEN_TREE => Some(FoldKind::Block),
+ EXPR_STMT | LET_STMT => Some(FoldKind::Stmt(ast::Stmt::cast(element.as_node()?.clone())?)),
_ => None,
}
}
+const COLLAPSE_EXPR_MAX_LEN: usize = 100;
+
+fn collapse_expr(expr: ast::Expr) -> Option<String> {
+ let mut text = String::with_capacity(COLLAPSE_EXPR_MAX_LEN * 2);
+
+ let mut preorder = expr.syntax().preorder_with_tokens();
+ while let Some(element) = preorder.next() {
+ match element {
+ syntax::WalkEvent::Enter(NodeOrToken::Node(node)) => {
+ if let Some(arg_list) = ast::ArgList::cast(node.clone()) {
+ let content = if arg_list.args().next().is_some() { "(…)" } else { "()" };
+ text.push_str(content);
+ preorder.skip_subtree();
+ } else if let Some(expr) = ast::Expr::cast(node) {
+ match expr {
+ ast::Expr::AwaitExpr(_)
+ | ast::Expr::BecomeExpr(_)
+ | ast::Expr::BinExpr(_)
+ | ast::Expr::BreakExpr(_)
+ | ast::Expr::CallExpr(_)
+ | ast::Expr::CastExpr(_)
+ | ast::Expr::ContinueExpr(_)
+ | ast::Expr::FieldExpr(_)
+ | ast::Expr::IndexExpr(_)
+ | ast::Expr::LetExpr(_)
+ | ast::Expr::Literal(_)
+ | ast::Expr::MethodCallExpr(_)
+ | ast::Expr::OffsetOfExpr(_)
+ | ast::Expr::ParenExpr(_)
+ | ast::Expr::PathExpr(_)
+ | ast::Expr::PrefixExpr(_)
+ | ast::Expr::RangeExpr(_)
+ | ast::Expr::RefExpr(_)
+ | ast::Expr::ReturnExpr(_)
+ | ast::Expr::TryExpr(_)
+ | ast::Expr::UnderscoreExpr(_)
+ | ast::Expr::YeetExpr(_)
+ | ast::Expr::YieldExpr(_) => {}
+
+ // Some other exprs (e.g. `while` loop) are too complex to have a collapsed text
+ _ => return None,
+ }
+ }
+ }
+ syntax::WalkEvent::Enter(NodeOrToken::Token(token)) => {
+ if !token.kind().is_trivia() {
+ text.push_str(token.text());
+ }
+ }
+ syntax::WalkEvent::Leave(_) => {}
+ }
+
+ if text.len() > COLLAPSE_EXPR_MAX_LEN {
+ return None;
+ }
+ }
+
+ text.shrink_to_fit();
+
+ Some(text)
+}
+
fn contiguous_range_for_item_group<N>(
first: N,
visited: &mut FxHashSet<SyntaxNode>,
@@ -297,7 +435,7 @@ fn contiguous_range_for_comment(
}
fn fold_range_for_multiline_match_arm(match_arm: ast::MatchArm) -> Option<TextRange> {
- if fold_kind(match_arm.expr()?.syntax().kind()).is_some() {
+ if fold_kind(match_arm.expr()?.syntax().syntax_element()).is_some() {
None
} else if match_arm.expr()?.syntax().text().contains_char('\n') {
Some(match_arm.expr()?.syntax().text_range())
@@ -314,10 +452,33 @@ mod tests {
#[track_caller]
fn check(#[rust_analyzer::rust_fixture] ra_fixture: &str) {
+ check_inner(ra_fixture, true);
+ }
+
+ fn check_without_collapsed_text(#[rust_analyzer::rust_fixture] ra_fixture: &str) {
+ check_inner(ra_fixture, false);
+ }
+
+ fn check_inner(ra_fixture: &str, enable_collapsed_text: bool) {
let (ranges, text) = extract_tags(ra_fixture, "fold");
+ let ranges: Vec<_> = ranges
+ .into_iter()
+ .map(|(range, text)| {
+ let (attr, collapsed_text) = match text {
+ Some(text) => match text.split_once(':') {
+ Some((attr, collapsed_text)) => {
+ (Some(attr.to_owned()), Some(collapsed_text.to_owned()))
+ }
+ None => (Some(text), None),
+ },
+ None => (None, None),
+ };
+ (range, attr, collapsed_text)
+ })
+ .collect();
let parse = SourceFile::parse(&text, span::Edition::CURRENT);
- let mut folds = folding_ranges(&parse.tree());
+ let mut folds = folding_ranges(&parse.tree(), enable_collapsed_text);
folds.sort_by_key(|fold| (fold.range.start(), fold.range.end()));
assert_eq!(
@@ -326,7 +487,7 @@ mod tests {
"The amount of folds is different than the expected amount"
);
- for (fold, (range, attr)) in folds.iter().zip(ranges.into_iter()) {
+ for (fold, (range, attr, collapsed_text)) in folds.iter().zip(ranges.into_iter()) {
assert_eq!(fold.range.start(), range.start(), "mismatched start of folding ranges");
assert_eq!(fold.range.end(), range.end(), "mismatched end of folding ranges");
@@ -346,8 +507,15 @@ mod tests {
FoldKind::MatchArm => "matcharm",
FoldKind::Function => "function",
FoldKind::ExternCrates => "externcrates",
+ FoldKind::Stmt(_) => "stmt",
+ FoldKind::TailExpr(_) => "tailexpr",
};
assert_eq!(kind, &attr.unwrap());
+ if enable_collapsed_text {
+ assert_eq!(fold.collapsed_text, collapsed_text);
+ } else {
+ assert_eq!(fold.collapsed_text, None);
+ }
}
}
@@ -511,10 +679,10 @@ macro_rules! foo <fold block>{
check(
r#"
fn main() <fold block>{
- match 0 <fold block>{
+ <fold tailexpr>match 0 <fold block>{
0 => 0,
_ => 1,
- }</fold>
+ }</fold></fold>
}</fold>
"#,
);
@@ -525,7 +693,7 @@ fn main() <fold block>{
check(
r#"
fn main() <fold block>{
- match foo <fold block>{
+ <fold tailexpr>match foo <fold block>{
block => <fold block>{
}</fold>,
matcharm => <fold matcharm>some.
@@ -544,7 +712,7 @@ fn main() <fold block>{
structS => <fold matcharm>StructS <fold block>{
a: 31,
}</fold></fold>,
- }</fold>
+ }</fold></fold>
}</fold>
"#,
)
@@ -555,11 +723,11 @@ fn main() <fold block>{
check(
r#"
fn main() <fold block>{
- frobnicate<fold arglist>(
+ <fold tailexpr:frobnicate(…)>frobnicate<fold arglist>(
1,
2,
3,
- )</fold>
+ )</fold></fold>
}</fold>
"#,
)
@@ -698,4 +866,51 @@ type Foo<T, U> = foo<fold arglist><
"#,
);
}
+
+ #[test]
+ fn test_fold_tail_expr() {
+ check(
+ r#"
+fn f() <fold block>{
+ let x = 1;
+
+ <fold tailexpr:some_function().chain().method()>some_function()
+ .chain()
+ .method()</fold>
+}</fold>
+"#,
+ )
+ }
+
+ #[test]
+ fn test_fold_let_stmt_with_chained_methods() {
+ check(
+ r#"
+fn main() <fold block>{
+ <fold stmt:let result = some_value.method1().method2()?.method3();>let result = some_value
+ .method1()
+ .method2()?
+ .method3();</fold>
+
+ println!("{}", result);
+}</fold>
+"#,
+ )
+ }
+
+ #[test]
+ fn test_fold_let_stmt_with_chained_methods_without_collapsed_text() {
+ check_without_collapsed_text(
+ r#"
+fn main() <fold block>{
+ <fold stmt>let result = some_value
+ .method1()
+ .method2()?
+ .method3();</fold>
+
+ println!("{}", result);
+}</fold>
+"#,
+ )
+ }
}
diff --git a/crates/ide/src/goto_declaration.rs b/crates/ide/src/goto_declaration.rs
index 375ce94bf6..d2b47a37c7 100644
--- a/crates/ide/src/goto_declaration.rs
+++ b/crates/ide/src/goto_declaration.rs
@@ -79,13 +79,13 @@ pub(crate) fn goto_declaration(
#[cfg(test)]
mod tests {
- use ide_db::{FileRange, MiniCore};
+ use ide_db::{FileRange, ra_fixture::RaFixtureConfig};
use itertools::Itertools;
use crate::{GotoDefinitionConfig, fixture};
const TEST_CONFIG: GotoDefinitionConfig<'_> =
- GotoDefinitionConfig { minicore: MiniCore::default() };
+ GotoDefinitionConfig { ra_fixture: RaFixtureConfig::default() };
fn check(#[rust_analyzer::rust_fixture] ra_fixture: &str) {
let (analysis, position, expected) = fixture::annotations(ra_fixture);
diff --git a/crates/ide/src/goto_definition.rs b/crates/ide/src/goto_definition.rs
index 3890bcad7f..4cdf0eac75 100644
--- a/crates/ide/src/goto_definition.rs
+++ b/crates/ide/src/goto_definition.rs
@@ -9,7 +9,7 @@ use crate::{
use hir::{
AsAssocItem, AssocItem, CallableKind, FileRange, HasCrate, InFile, ModuleDef, Semantics, sym,
};
-use ide_db::{MiniCore, ra_fixture::UpmapFromRaFixture};
+use ide_db::ra_fixture::{RaFixtureConfig, UpmapFromRaFixture};
use ide_db::{
RootDatabase, SymbolKind,
base_db::{AnchoredPath, SourceDatabase},
@@ -26,7 +26,7 @@ use syntax::{
#[derive(Debug)]
pub struct GotoDefinitionConfig<'a> {
- pub minicore: MiniCore<'a>,
+ pub ra_fixture: RaFixtureConfig<'a>,
}
// Feature: Go to Definition
@@ -105,7 +105,7 @@ pub(crate) fn goto_definition(
if let Some(token) = ast::String::cast(token.value.clone())
&& let Some(original_token) = ast::String::cast(original_token.clone())
&& let Some((analysis, fixture_analysis)) =
- Analysis::from_ra_fixture(sema, original_token, &token, config.minicore)
+ Analysis::from_ra_fixture(sema, original_token, &token, &config.ra_fixture)
&& let Some((virtual_file_id, file_offset)) = fixture_analysis.map_offset_down(offset)
{
return hir::attach_db_allow_change(&analysis.db, || {
@@ -605,11 +605,11 @@ fn expr_to_nav(
#[cfg(test)]
mod tests {
use crate::{GotoDefinitionConfig, fixture};
- use ide_db::{FileRange, MiniCore};
+ use ide_db::{FileRange, ra_fixture::RaFixtureConfig};
use itertools::Itertools;
const TEST_CONFIG: GotoDefinitionConfig<'_> =
- GotoDefinitionConfig { minicore: MiniCore::default() };
+ GotoDefinitionConfig { ra_fixture: RaFixtureConfig::default() };
#[track_caller]
fn check(#[rust_analyzer::rust_fixture] ra_fixture: &str) {
diff --git a/crates/ide/src/hover.rs b/crates/ide/src/hover.rs
index 958de8930d..df1fcecc99 100644
--- a/crates/ide/src/hover.rs
+++ b/crates/ide/src/hover.rs
@@ -8,11 +8,11 @@ use std::{iter, ops::Not};
use either::Either;
use hir::{DisplayTarget, GenericDef, GenericSubstitution, HasCrate, HasSource, Semantics};
use ide_db::{
- FileRange, FxIndexSet, MiniCore, Ranker, RootDatabase,
+ FileRange, FxIndexSet, Ranker, RootDatabase,
defs::{Definition, IdentClass, NameRefClass, OperatorClass},
famous_defs::FamousDefs,
helpers::pick_best_token,
- ra_fixture::UpmapFromRaFixture,
+ ra_fixture::{RaFixtureConfig, UpmapFromRaFixture},
};
use itertools::{Itertools, multizip};
use macros::UpmapFromRaFixture;
@@ -44,7 +44,7 @@ pub struct HoverConfig<'a> {
pub max_enum_variants_count: Option<usize>,
pub max_subst_ty_len: SubstTyLen,
pub show_drop_glue: bool,
- pub minicore: MiniCore<'a>,
+ pub ra_fixture: RaFixtureConfig<'a>,
}
#[derive(Clone, Debug, PartialEq, Eq)]
@@ -221,7 +221,7 @@ fn hover_offset(
if let Some(literal) = ast::String::cast(original_token.clone())
&& let Some((analysis, fixture_analysis)) =
- Analysis::from_ra_fixture(sema, literal.clone(), &literal, config.minicore)
+ Analysis::from_ra_fixture(sema, literal.clone(), &literal, &config.ra_fixture)
{
let (virtual_file_id, virtual_offset) = fixture_analysis.map_offset_down(offset)?;
return analysis
@@ -422,7 +422,7 @@ fn hover_ranged(
Either::Left(ast::Expr::Literal(literal)) => {
if let Some(literal) = ast::String::cast(literal.token())
&& let Some((analysis, fixture_analysis)) =
- Analysis::from_ra_fixture(sema, literal.clone(), &literal, config.minicore)
+ Analysis::from_ra_fixture(sema, literal.clone(), &literal, &config.ra_fixture)
{
let (virtual_file_id, virtual_range) = fixture_analysis.map_range_down(range)?;
return analysis
diff --git a/crates/ide/src/hover/tests.rs b/crates/ide/src/hover/tests.rs
index 7fbbc576dd..9c53b05539 100644
--- a/crates/ide/src/hover/tests.rs
+++ b/crates/ide/src/hover/tests.rs
@@ -1,5 +1,5 @@
use expect_test::{Expect, expect};
-use ide_db::{FileRange, MiniCore, base_db::SourceDatabase};
+use ide_db::{FileRange, base_db::SourceDatabase, ra_fixture::RaFixtureConfig};
use syntax::TextRange;
use crate::{
@@ -25,7 +25,7 @@ const HOVER_BASE_CONFIG: HoverConfig<'_> = HoverConfig {
max_enum_variants_count: Some(5),
max_subst_ty_len: super::SubstTyLen::Unlimited,
show_drop_glue: true,
- minicore: MiniCore::default(),
+ ra_fixture: RaFixtureConfig::default(),
};
fn check_hover_no_result(#[rust_analyzer::rust_fixture] ra_fixture: &str) {
@@ -3526,9 +3526,6 @@ fn foo_$0test() {}
test_id: Path(
"foo_test",
),
- attr: TestAttr {
- ignore: false,
- },
},
cfg: None,
update_test: UpdateTest {
@@ -10707,9 +10704,6 @@ macro_rules! str {
test_id: Path(
"test",
),
- attr: TestAttr {
- ignore: false,
- },
},
cfg: None,
update_test: UpdateTest {
@@ -10778,9 +10772,6 @@ pub use expect_test;
test_id: Path(
"test",
),
- attr: TestAttr {
- ignore: false,
- },
},
cfg: None,
update_test: UpdateTest {
@@ -11404,3 +11395,299 @@ pub trait MyTrait {
"#]],
);
}
+
+#[test]
+fn test_hover_doc_attr_macro_generated_method_stringify_self_ty() {
+ check(
+ r#"
+#[rustc_builtin_macro]
+macro_rules! concat {}
+
+#[rustc_builtin_macro]
+macro_rules! stringify {}
+
+macro_rules! bar {
+ ($SelfT:ident) => {
+ struct $SelfT;
+ impl $SelfT {
+ #[doc = concat!("Do the foo for ", stringify!($SelfT))]
+ fn foo(&self) {}
+ }
+ }
+}
+
+bar!(Bar);
+
+fn foo() { let bar = Bar; bar.fo$0o(); }
+"#,
+ expect![[r#"
+ *foo*
+
+ ```rust
+ ra_test_fixture::Bar
+ ```
+
+ ```rust
+ fn foo(&self)
+ ```
+
+ ---
+
+ Do the foo for Bar
+ "#]],
+ );
+}
+
+#[test]
+fn test_hover_doc_attr_macro_argument_expr_issue_7688() {
+ check(
+ r#"
+#[rustc_builtin_macro]
+macro_rules! concat {}
+
+macro_rules! doc_comment {
+ ($x:expr, $($tt:tt)*) => {
+ #[doc = $x]
+ $($tt)*
+ };
+}
+
+doc_comment! {
+ concat!("Hello", " world"),
+ struct Ba$0r;
+}
+"#,
+ expect![[r#"
+ *Bar*
+
+ ```rust
+ ra_test_fixture
+ ```
+
+ ```rust
+ struct Bar
+ ```
+
+ ---
+
+ size = 0, align = 1, no Drop
+
+ ---
+
+ Hello world
+ "#]],
+ );
+}
+
+#[test]
+fn test_hover_doc_attr_concat_macro() {
+ check(
+ r#"
+#[rustc_builtin_macro]
+macro_rules! concat {}
+
+#[doc = concat!("Hello", " ", "World")]
+struct Ba$0r;
+"#,
+ expect![[r#"
+ *Bar*
+
+ ```rust
+ ra_test_fixture
+ ```
+
+ ```rust
+ struct Bar
+ ```
+
+ ---
+
+ size = 0, align = 1, no Drop
+
+ ---
+
+ Hello World
+ "#]],
+ );
+}
+
+#[test]
+fn test_hover_doc_attr_include_str_macro() {
+ check(
+ r#"
+//- /main.rs
+#[rustc_builtin_macro]
+macro_rules! include_str {}
+
+#[doc = include_str!("docs.md")]
+struct Ba$0r;
+
+//- /docs.md
+Included docs from file.
+Multiple lines of docs.
+"#,
+ expect![[r#"
+ *Bar*
+
+ ```rust
+ ra_test_fixture
+ ```
+
+ ```rust
+ struct Bar
+ ```
+
+ ---
+
+ size = 0, align = 1, no Drop
+
+ ---
+
+ Included docs from file.
+ Multiple lines of docs.
+ "#]],
+ );
+}
+
+#[test]
+fn test_hover_doc_attr_user_macro_returning_string() {
+ check(
+ r#"
+macro_rules! doc_str {
+ () => { "Documentation from macro" };
+}
+
+#[doc = doc_str!()]
+struct Ba$0r;
+"#,
+ expect![[r#"
+ *Bar*
+
+ ```rust
+ ra_test_fixture
+ ```
+
+ ```rust
+ struct Bar
+ ```
+
+ ---
+
+ size = 0, align = 1, no Drop
+
+ ---
+
+ Documentation from macro
+ "#]],
+ );
+}
+
+#[test]
+fn test_hover_doc_attr_mixed_literal_and_macro() {
+ check(
+ r#"
+#[rustc_builtin_macro]
+macro_rules! concat {}
+
+/// First line
+#[doc = concat!("Second", " line")]
+struct Ba$0r;
+"#,
+ expect![[r#"
+ *Bar*
+
+ ```rust
+ ra_test_fixture
+ ```
+
+ ```rust
+ struct Bar
+ ```
+
+ ---
+
+ size = 0, align = 1, no Drop
+
+ ---
+
+ First line
+ Second line
+ "#]],
+ );
+}
+
+#[test]
+fn test_hover_doc_attr_field_with_macro() {
+ check(
+ r#"
+#[rustc_builtin_macro]
+macro_rules! concat {}
+
+struct Bar {
+ #[doc = concat!("field", " docs")]
+ ba$0z: i32,
+}
+"#,
+ expect![[r#"
+ *baz*
+
+ ```rust
+ ra_test_fixture::Bar
+ ```
+
+ ```rust
+ baz: i32
+ ```
+
+ ---
+
+ size = 4, align = 4, offset = 0, no Drop
+
+ ---
+
+ field docs
+ "#]],
+ );
+}
+
+#[test]
+fn test_hover_doc_attr_macro_on_outlined_mod() {
+ // Outer doc-macro on `mod foo;` resolves from inside the module's scope
+ // (matching rustc behavior), and combines with inner `//!` docs from the module file.
+ check(
+ r#"
+//- /main.rs
+mod mac {
+ macro_rules! doc_str {
+ () => { "expanded from macro" };
+ }
+ pub(crate) use doc_str;
+}
+
+/// plain outer doc
+#[doc = super::mac::doc_str!()]
+mod foo$0;
+
+//- /foo.rs
+//! inner module docs
+pub struct Bar;
+"#,
+ expect![[r#"
+ *foo*
+
+ ```rust
+ ra_test_fixture
+ ```
+
+ ```rust
+ mod foo
+ ```
+
+ ---
+
+ plain outer doc
+ expanded from macro
+ inner module docs
+ "#]],
+ );
+}
diff --git a/crates/ide/src/inlay_hints.rs b/crates/ide/src/inlay_hints.rs
index a58dc6f030..f51d7f5207 100644
--- a/crates/ide/src/inlay_hints.rs
+++ b/crates/ide/src/inlay_hints.rs
@@ -9,7 +9,8 @@ use hir::{
HirDisplay, HirDisplayError, HirWrite, InRealFile, ModuleDef, ModuleDefId, Semantics, sym,
};
use ide_db::{
- FileRange, MiniCore, RootDatabase, famous_defs::FamousDefs, text_edit::TextEditBuilder,
+ FileRange, RootDatabase, famous_defs::FamousDefs, ra_fixture::RaFixtureConfig,
+ text_edit::TextEditBuilder,
};
use ide_db::{FxHashSet, text_edit::TextEdit};
use itertools::Itertools;
@@ -302,6 +303,7 @@ fn hints(
pub struct InlayHintsConfig<'a> {
pub render_colons: bool,
pub type_hints: bool,
+ pub type_hints_placement: TypeHintsPlacement,
pub sized_bound: bool,
pub discriminant_hints: DiscriminantHints,
pub parameter_hints: bool,
@@ -328,7 +330,13 @@ pub struct InlayHintsConfig<'a> {
pub max_length: Option<usize>,
pub closing_brace_hints_min_lines: Option<usize>,
pub fields_to_resolve: InlayFieldsToResolve,
- pub minicore: MiniCore<'a>,
+ pub ra_fixture: RaFixtureConfig<'a>,
+}
+
+#[derive(Copy, Clone, Debug, PartialEq, Eq)]
+pub enum TypeHintsPlacement {
+ Inline,
+ EndOfLine,
}
impl InlayHintsConfig<'_> {
@@ -899,7 +907,7 @@ mod tests {
use expect_test::Expect;
use hir::ClosureStyle;
- use ide_db::MiniCore;
+ use ide_db::ra_fixture::RaFixtureConfig;
use itertools::Itertools;
use test_utils::extract_annotations;
@@ -907,12 +915,15 @@ mod tests {
use crate::inlay_hints::{AdjustmentHints, AdjustmentHintsMode};
use crate::{LifetimeElisionHints, fixture, inlay_hints::InlayHintsConfig};
- use super::{ClosureReturnTypeHints, GenericParameterHints, InlayFieldsToResolve};
+ use super::{
+ ClosureReturnTypeHints, GenericParameterHints, InlayFieldsToResolve, TypeHintsPlacement,
+ };
pub(super) const DISABLED_CONFIG: InlayHintsConfig<'_> = InlayHintsConfig {
discriminant_hints: DiscriminantHints::Never,
render_colons: false,
type_hints: false,
+ type_hints_placement: TypeHintsPlacement::Inline,
parameter_hints: false,
parameter_hints_for_missing_arguments: false,
sized_bound: false,
@@ -942,10 +953,11 @@ mod tests {
implicit_drop_hints: false,
implied_dyn_trait_hints: false,
range_exclusive_hints: false,
- minicore: MiniCore::default(),
+ ra_fixture: RaFixtureConfig::default(),
};
pub(super) const TEST_CONFIG: InlayHintsConfig<'_> = InlayHintsConfig {
type_hints: true,
+ type_hints_placement: TypeHintsPlacement::Inline,
parameter_hints: true,
chaining_hints: true,
closure_return_type_hints: ClosureReturnTypeHints::WithBlock,
diff --git a/crates/ide/src/inlay_hints/bind_pat.rs b/crates/ide/src/inlay_hints/bind_pat.rs
index caf7cc714d..b901c6b67d 100644
--- a/crates/ide/src/inlay_hints/bind_pat.rs
+++ b/crates/ide/src/inlay_hints/bind_pat.rs
@@ -8,10 +8,12 @@ use ide_db::{RootDatabase, famous_defs::FamousDefs};
use itertools::Itertools;
use syntax::{
+ TextRange,
ast::{self, AstNode, HasGenericArgs, HasName},
match_ast,
};
+use super::TypeHintsPlacement;
use crate::{
InlayHint, InlayHintPosition, InlayHintsConfig, InlayKind,
inlay_hints::{closure_has_block_body, label_of_ty, ty_to_text_edit},
@@ -29,6 +31,7 @@ pub(super) fn hints(
}
let parent = pat.syntax().parent()?;
+ let mut enclosing_let_stmt = None;
let type_ascriptable = match_ast! {
match parent {
ast::Param(it) => {
@@ -41,6 +44,7 @@ pub(super) fn hints(
Some(it.colon_token())
},
ast::LetStmt(it) => {
+ enclosing_let_stmt = Some(it.clone());
if config.hide_closure_initialization_hints
&& let Some(ast::Expr::ClosureExpr(closure)) = it.initializer()
&& closure_has_block_body(&closure) {
@@ -101,16 +105,26 @@ pub(super) fn hints(
Some(name) => name.syntax().text_range(),
None => pat.syntax().text_range(),
};
+ let mut range = match type_ascriptable {
+ Some(Some(t)) => text_range.cover(t.text_range()),
+ _ => text_range,
+ };
+
+ let mut pad_left = !render_colons;
+ if matches!(config.type_hints_placement, TypeHintsPlacement::EndOfLine)
+ && let Some(let_stmt) = enclosing_let_stmt
+ {
+ let stmt_range = let_stmt.syntax().text_range();
+ range = TextRange::new(range.start(), stmt_range.end());
+ pad_left = true;
+ }
acc.push(InlayHint {
- range: match type_ascriptable {
- Some(Some(t)) => text_range.cover(t.text_range()),
- _ => text_range,
- },
+ range,
kind: InlayKind::Type,
label,
text_edit,
position: InlayHintPosition::After,
- pad_left: !render_colons,
+ pad_left,
pad_right: false,
resolve_parent: Some(pat.syntax().text_range()),
});
@@ -182,6 +196,7 @@ mod tests {
use crate::{ClosureReturnTypeHints, fixture, inlay_hints::InlayHintsConfig};
+ use super::TypeHintsPlacement;
use crate::inlay_hints::tests::{
DISABLED_CONFIG, TEST_CONFIG, check, check_edit, check_expect, check_no_edit,
check_with_config,
@@ -205,6 +220,76 @@ fn main() {
}
#[test]
+ fn type_hints_end_of_line_placement() {
+ let mut config = InlayHintsConfig { type_hints: true, ..DISABLED_CONFIG };
+ config.type_hints_placement = TypeHintsPlacement::EndOfLine;
+ check_expect(
+ config,
+ r#"
+fn main() {
+ let foo = 92_i32;
+}
+ "#,
+ expect![[r#"
+ [
+ (
+ 20..33,
+ [
+ "i32",
+ ],
+ ),
+ ]
+ "#]],
+ );
+ }
+
+ #[test]
+ fn type_hints_end_of_line_placement_chain_expr() {
+ let mut config = InlayHintsConfig { type_hints: true, ..DISABLED_CONFIG };
+ config.type_hints_placement = TypeHintsPlacement::EndOfLine;
+ check_expect(
+ config,
+ r#"
+fn main() {
+ struct Builder;
+ impl Builder {
+ fn iter(self) -> Builder { Builder }
+ fn map(self) -> Builder { Builder }
+ }
+ fn make() -> Builder { Builder }
+
+ let foo = make()
+ .iter()
+ .map();
+}
+"#,
+ expect![[r#"
+ [
+ (
+ 192..236,
+ [
+ InlayHintLabelPart {
+ text: "Builder",
+ linked_location: Some(
+ Computed(
+ FileRangeWrapper {
+ file_id: FileId(
+ 0,
+ ),
+ range: 23..30,
+ },
+ ),
+ ),
+ tooltip: "",
+ },
+ ],
+ ),
+ ]
+ "#]],
+ );
+ }
+
+ #[test]
fn type_hints_bindings_after_at() {
check_types(
r#"
diff --git a/crates/ide/src/inlay_hints/chaining.rs b/crates/ide/src/inlay_hints/chaining.rs
index cf3149c946..4b06f83971 100644
--- a/crates/ide/src/inlay_hints/chaining.rs
+++ b/crates/ide/src/inlay_hints/chaining.rs
@@ -2,13 +2,13 @@
use hir::DisplayTarget;
use ide_db::famous_defs::FamousDefs;
use syntax::{
- Direction, NodeOrToken, SyntaxKind, T,
+ Direction, NodeOrToken, SyntaxKind, T, TextRange,
ast::{self, AstNode},
};
use crate::{InlayHint, InlayHintPosition, InlayHintsConfig, InlayKind};
-use super::label_of_ty;
+use super::{TypeHintsPlacement, label_of_ty};
pub(super) fn hints(
acc: &mut Vec<InlayHint>,
@@ -40,13 +40,14 @@ pub(super) fn hints(
// Chaining can be defined as an expression whose next sibling tokens are newline and dot
// Ignoring extra whitespace and comments
- let next = tokens.next()?.kind();
- if next == SyntaxKind::WHITESPACE {
- let mut next_next = tokens.next()?.kind();
- while next_next == SyntaxKind::WHITESPACE {
- next_next = tokens.next()?.kind();
+ let next_token = tokens.next()?;
+ if next_token.kind() == SyntaxKind::WHITESPACE {
+ let newline_token = next_token;
+ let mut next_next = tokens.next()?;
+ while next_next.kind() == SyntaxKind::WHITESPACE {
+ next_next = tokens.next()?;
}
- if next_next == T![.] {
+ if next_next.kind() == T![.] {
let ty = sema.type_of_expr(desc_expr)?.original;
if ty.is_unknown() {
return None;
@@ -58,8 +59,18 @@ pub(super) fn hints(
return None;
}
let label = label_of_ty(famous_defs, config, &ty, display_target)?;
+ let range = {
+ let mut range = expr.syntax().text_range();
+ if config.type_hints_placement == TypeHintsPlacement::EndOfLine {
+ range = TextRange::new(
+ range.start(),
+ newline_token.text_range().start().max(range.end()),
+ );
+ }
+ range
+ };
acc.push(InlayHint {
- range: expr.syntax().text_range(),
+ range,
kind: InlayKind::Chaining,
label,
text_edit: None,
@@ -79,7 +90,7 @@ mod tests {
use ide_db::text_edit::{TextRange, TextSize};
use crate::{
- InlayHintsConfig, fixture,
+ InlayHintsConfig, TypeHintsPlacement, fixture,
inlay_hints::{
LazyProperty,
tests::{DISABLED_CONFIG, TEST_CONFIG, check_expect, check_with_config},
@@ -686,4 +697,80 @@ fn main() {
"#]],
);
}
+
+ #[test]
+ fn chaining_hints_end_of_line_placement() {
+ check_expect(
+ InlayHintsConfig {
+ chaining_hints: true,
+ type_hints_placement: TypeHintsPlacement::EndOfLine,
+ ..DISABLED_CONFIG
+ },
+ r#"
+fn main() {
+ let baz = make()
+ .into_bar()
+ .into_baz();
+}
+
+struct Foo;
+struct Bar;
+struct Baz;
+
+impl Foo {
+ fn into_bar(self) -> Bar { Bar }
+}
+
+impl Bar {
+ fn into_baz(self) -> Baz { Baz }
+}
+
+fn make() -> Foo {
+ Foo
+}
+"#,
+ expect![[r#"
+ [
+ (
+ 26..52,
+ [
+ InlayHintLabelPart {
+ text: "Bar",
+ linked_location: Some(
+ Computed(
+ FileRangeWrapper {
+ file_id: FileId(
+ 0,
+ ),
+ range: 96..99,
+ },
+ ),
+ ),
+ tooltip: "",
+ },
+ ],
+ ),
+ (
+ 26..32,
+ [
+ InlayHintLabelPart {
+ text: "Foo",
+ linked_location: Some(
+ Computed(
+ FileRangeWrapper {
+ file_id: FileId(
+ 0,
+ ),
+ range: 84..87,
+ },
+ ),
+ ),
+ tooltip: "",
+ },
+ ],
+ ),
+ ]
+ "#]],
+ );
+ }
}
diff --git a/crates/ide/src/inlay_hints/param_name.rs b/crates/ide/src/inlay_hints/param_name.rs
index 08588bbed0..8dddf9d37e 100644
--- a/crates/ide/src/inlay_hints/param_name.rs
+++ b/crates/ide/src/inlay_hints/param_name.rs
@@ -37,8 +37,9 @@ pub(super) fn hints(
let hints = callable
.params()
.into_iter()
- .zip(arg_list.args())
+ .zip(arg_list.args_maybe_empty())
.filter_map(|(p, arg)| {
+ let arg = arg?;
// Only annotate hints for expressions that exist in the original file
let range = sema.original_range_opt(arg.syntax())?;
if range.file_id != file_id {
@@ -562,6 +563,19 @@ fn main() {
}
#[test]
+ fn param_name_hints_show_after_empty_arg() {
+ check_params(
+ r#"pub fn test(a: i32, b: i32, c: i32) {}
+fn main() {
+ test(, 2,);
+ //^ b
+ test(, , 3);
+ //^ c
+}"#,
+ )
+ }
+
+ #[test]
fn function_call_parameter_hint() {
check_params(
r#"
diff --git a/crates/ide/src/inlay_hints/ra_fixture.rs b/crates/ide/src/inlay_hints/ra_fixture.rs
index bee1841642..701c8a8612 100644
--- a/crates/ide/src/inlay_hints/ra_fixture.rs
+++ b/crates/ide/src/inlay_hints/ra_fixture.rs
@@ -16,7 +16,7 @@ pub(super) fn hints(
let file_id = file_id.file_id(sema.db);
let literal = ast::String::cast(literal.token())?;
let (analysis, fixture_analysis) =
- Analysis::from_ra_fixture(sema, literal.clone(), &literal, config.minicore)?;
+ Analysis::from_ra_fixture(sema, literal.clone(), &literal, &config.ra_fixture)?;
for virtual_file_id in fixture_analysis.files() {
acc.extend(
analysis
diff --git a/crates/ide/src/lib.rs b/crates/ide/src/lib.rs
index 81a771fec8..270998cdf7 100644
--- a/crates/ide/src/lib.rs
+++ b/crates/ide/src/lib.rs
@@ -63,15 +63,16 @@ use std::panic::{AssertUnwindSafe, UnwindSafe};
use cfg::CfgOptions;
use fetch_crates::CrateInfo;
use hir::{ChangeWithProcMacros, EditionedFileId, crate_def_map, sym};
+use ide_db::base_db::relevant_crates;
+use ide_db::ra_fixture::RaFixtureAnalysis;
use ide_db::{
FxHashMap, FxIndexSet, LineIndexDatabase,
base_db::{
- CrateOrigin, CrateWorkspaceData, Env, FileSet, RootQueryDb, SourceDatabase, VfsPath,
+ CrateOrigin, CrateWorkspaceData, Env, FileSet, SourceDatabase, VfsPath,
salsa::{Cancelled, Database},
},
prime_caches, symbol_index,
};
-use ide_db::{MiniCore, ra_fixture::RaFixtureAnalysis};
use macros::UpmapFromRaFixture;
use syntax::{AstNode, SourceFile, ast};
use triomphe::Arc;
@@ -96,7 +97,7 @@ pub use crate::{
AdjustmentHints, AdjustmentHintsMode, ClosureReturnTypeHints, DiscriminantHints,
GenericParameterHints, InlayFieldsToResolve, InlayHint, InlayHintLabel, InlayHintLabelPart,
InlayHintPosition, InlayHintsConfig, InlayKind, InlayTooltip, LazyProperty,
- LifetimeElisionHints,
+ LifetimeElisionHints, TypeHintsPlacement,
},
join_lines::JoinLinesConfig,
markup::Markup,
@@ -135,6 +136,7 @@ pub use ide_db::{
label::Label,
line_index::{LineCol, LineIndex},
prime_caches::ParallelPrimeCachesProgress,
+ ra_fixture::RaFixtureConfig,
search::{ReferenceCategory, SearchScope},
source_change::{FileSystemEdit, SnippetEdit, SourceChange},
symbol_index::Query,
@@ -289,9 +291,9 @@ impl Analysis {
sema: &Semantics<'_, RootDatabase>,
literal: ast::String,
expanded: &ast::String,
- minicore: MiniCore<'_>,
+ config: &RaFixtureConfig<'_>,
) -> Option<(Analysis, RaFixtureAnalysis)> {
- Self::from_ra_fixture_with_on_cursor(sema, literal, expanded, minicore, &mut |_| {})
+ Self::from_ra_fixture_with_on_cursor(sema, literal, expanded, config, &mut |_| {})
}
/// Like [`Analysis::from_ra_fixture()`], but also calls `on_cursor` with the cursor position.
@@ -299,11 +301,11 @@ impl Analysis {
sema: &Semantics<'_, RootDatabase>,
literal: ast::String,
expanded: &ast::String,
- minicore: MiniCore<'_>,
+ config: &RaFixtureConfig<'_>,
on_cursor: &mut dyn FnMut(TextRange),
) -> Option<(Analysis, RaFixtureAnalysis)> {
let analysis =
- RaFixtureAnalysis::analyze_ra_fixture(sema, literal, expanded, minicore, on_cursor)?;
+ RaFixtureAnalysis::analyze_ra_fixture(sema, literal, expanded, config, on_cursor)?;
Some((Analysis { db: analysis.db.clone() }, analysis))
}
@@ -341,7 +343,7 @@ impl Analysis {
self.with_db(|db| {
let editioned_file_id_wrapper = EditionedFileId::current_edition(&self.db, file_id);
- db.parse(editioned_file_id_wrapper).tree()
+ editioned_file_id_wrapper.parse(db).tree()
})
}
@@ -369,7 +371,7 @@ impl Analysis {
pub fn matching_brace(&self, position: FilePosition) -> Cancellable<Option<TextSize>> {
self.with_db(|db| {
let file_id = EditionedFileId::current_edition(&self.db, position.file_id);
- let parse = db.parse(file_id);
+ let parse = file_id.parse(db);
let file = parse.tree();
matching_brace::matching_brace(&file, position.offset)
})
@@ -412,7 +414,7 @@ impl Analysis {
}
/// Renders the crate graph to GraphViz "dot" syntax.
- pub fn view_crate_graph(&self, full: bool) -> Cancellable<Result<String, String>> {
+ pub fn view_crate_graph(&self, full: bool) -> Cancellable<String> {
self.with_db(|db| view_crate_graph::view_crate_graph(db, full))
}
@@ -430,7 +432,7 @@ impl Analysis {
self.with_db(|db| {
let editioned_file_id_wrapper =
EditionedFileId::current_edition(&self.db, frange.file_id);
- let parse = db.parse(editioned_file_id_wrapper);
+ let parse = editioned_file_id_wrapper.parse(db);
join_lines::join_lines(config, &parse.tree(), frange.range)
})
}
@@ -471,7 +473,7 @@ impl Analysis {
// FIXME: Edition
self.with_db(|db| {
let editioned_file_id_wrapper = EditionedFileId::current_edition(&self.db, file_id);
- let source_file = db.parse(editioned_file_id_wrapper).tree();
+ let source_file = editioned_file_id_wrapper.parse(db).tree();
file_structure::file_structure(&source_file, config)
})
}
@@ -499,11 +501,14 @@ impl Analysis {
}
/// Returns the set of folding ranges.
- pub fn folding_ranges(&self, file_id: FileId) -> Cancellable<Vec<Fold>> {
+ pub fn folding_ranges(&self, file_id: FileId, collapsed_text: bool) -> Cancellable<Vec<Fold>> {
self.with_db(|db| {
let editioned_file_id_wrapper = EditionedFileId::current_edition(&self.db, file_id);
- folding_ranges::folding_ranges(&db.parse(editioned_file_id_wrapper).tree())
+ folding_ranges::folding_ranges(
+ &editioned_file_id_wrapper.parse(db).tree(),
+ collapsed_text,
+ )
})
}
@@ -654,7 +659,7 @@ impl Analysis {
/// Returns crates that this file *might* belong to.
pub fn relevant_crates_for(&self, file_id: FileId) -> Cancellable<Vec<Crate>> {
- self.with_db(|db| db.relevant_crates(file_id).iter().copied().collect())
+ self.with_db(|db| relevant_crates(db, file_id).to_vec())
}
/// Returns the edition of the given crate.
diff --git a/crates/ide/src/matching_brace.rs b/crates/ide/src/matching_brace.rs
index b2b91d6e3c..5079b0c4f9 100644
--- a/crates/ide/src/matching_brace.rs
+++ b/crates/ide/src/matching_brace.rs
@@ -17,25 +17,37 @@ use syntax::{
pub(crate) fn matching_brace(file: &SourceFile, offset: TextSize) -> Option<TextSize> {
const BRACES: &[SyntaxKind] =
&[T!['{'], T!['}'], T!['['], T![']'], T!['('], T![')'], T![<], T![>], T![|], T![|]];
- let (brace_token, brace_idx) = file
- .syntax()
- .token_at_offset(offset)
+ let current = file.syntax().token_at_offset(offset);
+ if let Some((brace_token, brace_idx)) = current
+ .clone()
.filter_map(|node| {
let idx = BRACES.iter().position(|&brace| brace == node.kind())?;
Some((node, idx))
})
- .last()?;
- let parent = brace_token.parent()?;
- if brace_token.kind() == T![|] && !ast::ParamList::can_cast(parent.kind()) {
- cov_mark::hit!(pipes_not_braces);
- return None;
+ .last()
+ {
+ let parent = brace_token.parent()?;
+ if brace_token.kind() == T![|] && !ast::ParamList::can_cast(parent.kind()) {
+ cov_mark::hit!(pipes_not_braces);
+ return None;
+ }
+ let matching_kind = BRACES[brace_idx ^ 1];
+ let matching_node = parent
+ .children_with_tokens()
+ .filter_map(|it| it.into_token())
+ .find(|node| node.kind() == matching_kind && node != &brace_token)?;
+ Some(matching_node.text_range().start())
+ } else {
+ // when the offset is not at a brace, find first parent
+ current.last()?.parent_ancestors().find_map(|x| {
+ x.children_with_tokens()
+ .filter_map(|it| it.into_token())
+ // with ending brace
+ .filter(|node| BRACES.contains(&node.kind()))
+ .last()
+ .map(|x| x.text_range().start())
+ })
}
- let matching_kind = BRACES[brace_idx ^ 1];
- let matching_node = parent
- .children_with_tokens()
- .filter_map(|it| it.into_token())
- .find(|node| node.kind() == matching_kind && node != &brace_token)?;
- Some(matching_node.text_range().start())
}
#[cfg(test)]
@@ -64,6 +76,14 @@ mod tests {
"fn func(x) { return (2 * (x + 3)$0) + 5;}",
"fn func(x) { return $0(2 * (x + 3)) + 5;}",
);
+ do_check(
+ "fn func(x) { return (2 * (x $0+ 3)) + 5;}",
+ "fn func(x) { return (2 * (x + 3$0)) + 5;}",
+ );
+ do_check(
+ "fn func(x) { re$0turn (2 * (x + 3)) + 5;}",
+ "fn func(x) { return (2 * (x + 3)) + 5;$0}",
+ );
{
cov_mark::check!(pipes_not_braces);
diff --git a/crates/ide/src/navigation_target.rs b/crates/ide/src/navigation_target.rs
index 92020321f4..99f8634bcb 100644
--- a/crates/ide/src/navigation_target.rs
+++ b/crates/ide/src/navigation_target.rs
@@ -11,7 +11,7 @@ use hir::{
};
use ide_db::{
FileId, FileRange, RootDatabase, SymbolKind,
- base_db::{CrateOrigin, LangCrateOrigin, RootQueryDb},
+ base_db::{CrateOrigin, LangCrateOrigin, all_crates},
defs::{Definition, find_std_module},
documentation::{Documentation, HasDocs},
famous_defs::FamousDefs,
@@ -861,8 +861,7 @@ impl TryToNav for hir::BuiltinType {
sema: &Semantics<'_, RootDatabase>,
) -> Option<UpmappingResult<NavigationTarget>> {
let db = sema.db;
- let krate = db
- .all_crates()
+ let krate = all_crates(db)
.iter()
.copied()
.find(|&krate| matches!(krate.data(db).origin, CrateOrigin::Lang(LangCrateOrigin::Std)))
diff --git a/crates/ide/src/parent_module.rs b/crates/ide/src/parent_module.rs
index 96d829d126..509ec2ab40 100644
--- a/crates/ide/src/parent_module.rs
+++ b/crates/ide/src/parent_module.rs
@@ -1,7 +1,7 @@
use hir::{Semantics, crate_def_map};
use ide_db::{
FileId, FilePosition, RootDatabase,
- base_db::{Crate, RootQueryDb},
+ base_db::{Crate, relevant_crates},
};
use itertools::Itertools;
use syntax::{
@@ -53,7 +53,7 @@ pub(crate) fn parent_module(db: &RootDatabase, position: FilePosition) -> Vec<Na
/// This returns `Vec` because a module may be included from several places.
pub(crate) fn crates_for(db: &RootDatabase, file_id: FileId) -> Vec<Crate> {
- db.relevant_crates(file_id)
+ relevant_crates(db, file_id)
.iter()
.copied()
.filter(|&crate_id| {
diff --git a/crates/ide/src/references.rs b/crates/ide/src/references.rs
index 9392651c17..0288099bbc 100644
--- a/crates/ide/src/references.rs
+++ b/crates/ide/src/references.rs
@@ -19,10 +19,10 @@
use hir::{PathResolution, Semantics};
use ide_db::{
- FileId, MiniCore, RootDatabase,
+ FileId, RootDatabase,
defs::{Definition, NameClass, NameRefClass},
helpers::pick_best_token,
- ra_fixture::UpmapFromRaFixture,
+ ra_fixture::{RaFixtureConfig, UpmapFromRaFixture},
search::{ReferenceCategory, SearchScope, UsageSearchResult},
};
use itertools::Itertools;
@@ -90,7 +90,7 @@ pub struct Declaration {
#[derive(Debug)]
pub struct FindAllRefsConfig<'a> {
pub search_scope: Option<SearchScope>,
- pub minicore: MiniCore<'a>,
+ pub ra_fixture: RaFixtureConfig<'a>,
}
/// Find all references to the item at the given position.
@@ -179,7 +179,7 @@ pub(crate) fn find_all_refs(
if let Some(token) = syntax.token_at_offset(position.offset).left_biased()
&& let Some(token) = ast::String::cast(token.clone())
&& let Some((analysis, fixture_analysis)) =
- Analysis::from_ra_fixture(sema, token.clone(), &token, config.minicore)
+ Analysis::from_ra_fixture(sema, token.clone(), &token, &config.ra_fixture)
&& let Some((virtual_file_id, file_offset)) =
fixture_analysis.map_offset_down(position.offset)
{
@@ -462,7 +462,7 @@ fn handle_control_flow_keywords(
mod tests {
use expect_test::{Expect, expect};
use hir::EditionedFileId;
- use ide_db::{FileId, MiniCore, RootDatabase};
+ use ide_db::{FileId, RootDatabase, ra_fixture::RaFixtureConfig};
use stdx::format_to;
use crate::{SearchScope, fixture, references::FindAllRefsConfig};
@@ -1567,7 +1567,7 @@ fn main() {
let (analysis, pos) = fixture::position(ra_fixture);
let config = FindAllRefsConfig {
search_scope: search_scope.map(|it| it(&analysis.db)),
- minicore: MiniCore::default(),
+ ra_fixture: RaFixtureConfig::default(),
};
let refs = analysis.find_all_refs(pos, &config).unwrap().unwrap();
diff --git a/crates/ide/src/runnables.rs b/crates/ide/src/runnables.rs
index a0a6a24559..3b472390d2 100644
--- a/crates/ide/src/runnables.rs
+++ b/crates/ide/src/runnables.rs
@@ -3,12 +3,12 @@ use std::{fmt, sync::OnceLock};
use arrayvec::ArrayVec;
use ast::HasName;
use cfg::{CfgAtom, CfgExpr};
-use hir::{AsAssocItem, HasAttrs, HasCrate, HasSource, Semantics, Symbol, db::HirDatabase, sym};
+use hir::{AsAssocItem, HasAttrs, HasCrate, HasSource, Semantics, Symbol, sym};
use ide_assists::utils::{has_test_related_attribute, test_related_attribute_syn};
+use ide_db::base_db::all_crates;
use ide_db::impl_empty_upmap_from_ra_fixture;
use ide_db::{
FilePosition, FxHashMap, FxIndexMap, FxIndexSet, RootDatabase, SymbolKind,
- base_db::RootQueryDb,
defs::Definition,
helpers::visit_file_defs,
search::{FileReferenceNode, SearchScope},
@@ -55,7 +55,7 @@ impl fmt::Display for TestId {
#[derive(Debug, Clone, Hash, PartialEq, Eq)]
pub enum RunnableKind {
TestMod { path: String },
- Test { test_id: TestId, attr: TestAttr },
+ Test { test_id: TestId },
Bench { test_id: TestId },
DocTest { test_id: TestId },
Bin,
@@ -334,8 +334,7 @@ pub(crate) fn runnable_fn(
};
if def.is_test(sema.db) {
- let attr = TestAttr::from_fn(sema.db, def);
- RunnableKind::Test { test_id: test_id(), attr }
+ RunnableKind::Test { test_id: test_id() }
} else if def.is_bench(sema.db) {
RunnableKind::Bench { test_id: test_id() }
} else {
@@ -506,7 +505,7 @@ fn module_def_doctest(sema: &Semantics<'_, RootDatabase>, def: Definition) -> Op
let krate = def.krate(db);
let edition = krate.map(|it| it.edition(db)).unwrap_or(Edition::CURRENT);
let display_target = krate
- .unwrap_or_else(|| (*db.all_crates().last().expect("no crate graph present")).into())
+ .unwrap_or_else(|| (*all_crates(db).last().expect("no crate graph present")).into())
.to_display_target(db);
if !has_runnable_doc_test(db, &attrs) {
return None;
@@ -558,17 +557,6 @@ fn module_def_doctest(sema: &Semantics<'_, RootDatabase>, def: Definition) -> Op
Some(res)
}
-#[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)]
-pub struct TestAttr {
- pub ignore: bool,
-}
-
-impl TestAttr {
- fn from_fn(db: &dyn HirDatabase, fn_def: hir::Function) -> TestAttr {
- TestAttr { ignore: fn_def.is_ignore(db) }
- }
-}
-
fn has_runnable_doc_test(db: &RootDatabase, attrs: &hir::AttrsWithOwner) -> bool {
const RUSTDOC_FENCES: [&str; 2] = ["```", "~~~"];
const RUSTDOC_CODE_BLOCK_ATTRIBUTES_RUNNABLE: &[&str] =
diff --git a/crates/ide/src/signature_help.rs b/crates/ide/src/signature_help.rs
index 9eb01b12f2..cf796b2715 100644
--- a/crates/ide/src/signature_help.rs
+++ b/crates/ide/src/signature_help.rs
@@ -1975,8 +1975,8 @@ trait Sub: Super + Super {
fn f() -> impl Sub<$0
"#,
expect![[r#"
- trait Sub<SuperTy = …, SubTy = …>
- ^^^^^^^^^^^ ---------
+ trait Sub<SubTy = …, SuperTy = …>
+ ^^^^^^^^^ -----------
"#]],
);
}
diff --git a/crates/ide/src/static_index.rs b/crates/ide/src/static_index.rs
index aba6b64f97..4b2c9ceef9 100644
--- a/crates/ide/src/static_index.rs
+++ b/crates/ide/src/static_index.rs
@@ -4,11 +4,12 @@
use arrayvec::ArrayVec;
use hir::{Crate, Module, Semantics, db::HirDatabase};
use ide_db::{
- FileId, FileRange, FxHashMap, FxHashSet, MiniCore, RootDatabase,
- base_db::{RootQueryDb, SourceDatabase, VfsPath},
+ FileId, FileRange, FxHashMap, FxHashSet, RootDatabase,
+ base_db::{SourceDatabase, VfsPath},
defs::{Definition, IdentClass},
documentation::Documentation,
famous_defs::FamousDefs,
+ ra_fixture::RaFixtureConfig,
};
use syntax::{AstNode, SyntaxNode, SyntaxToken, TextRange};
@@ -16,7 +17,7 @@ use crate::navigation_target::UpmappingResult;
use crate::{
Analysis, Fold, HoverConfig, HoverResult, InlayHint, InlayHintsConfig, TryToNav,
hover::{SubstTyLen, hover_for_definition},
- inlay_hints::{AdjustmentHintsMode, InlayFieldsToResolve},
+ inlay_hints::{AdjustmentHintsMode, InlayFieldsToResolve, TypeHintsPlacement},
moniker::{MonikerResult, SymbolInformationKind, def_to_kind, def_to_moniker},
parent_module::crates_for,
};
@@ -123,16 +124,8 @@ fn documentation_for_definition(
_ => None,
};
- def.docs(
- sema.db,
- famous_defs.as_ref(),
- def.krate(sema.db)
- .unwrap_or_else(|| {
- (*sema.db.all_crates().last().expect("no crate graph present")).into()
- })
- .to_display_target(sema.db),
- )
- .map(Documentation::into_owned)
+ def.docs(sema.db, famous_defs.as_ref(), def.krate(sema.db)?.to_display_target(sema.db))
+ .map(Documentation::into_owned)
}
// FIXME: This is a weird function
@@ -159,7 +152,7 @@ pub enum VendoredLibrariesConfig<'a> {
impl StaticIndex<'_> {
fn add_file(&mut self, file_id: FileId) {
let current_crate = crates_for(self.db, file_id).pop().map(Into::into);
- let folds = self.analysis.folding_ranges(file_id).unwrap();
+ let folds = self.analysis.folding_ranges(file_id, true).unwrap();
let inlay_hints = self
.analysis
.inlay_hints(
@@ -167,6 +160,7 @@ impl StaticIndex<'_> {
render_colons: true,
discriminant_hints: crate::DiscriminantHints::Fieldless,
type_hints: true,
+ type_hints_placement: TypeHintsPlacement::Inline,
sized_bound: false,
parameter_hints: true,
parameter_hints_for_missing_arguments: false,
@@ -196,7 +190,7 @@ impl StaticIndex<'_> {
closing_brace_hints_min_lines: Some(25),
fields_to_resolve: InlayFieldsToResolve::empty(),
range_exclusive_hints: false,
- minicore: MiniCore::default(),
+ ra_fixture: RaFixtureConfig::default(),
},
file_id,
None,
@@ -225,7 +219,7 @@ impl StaticIndex<'_> {
max_enum_variants_count: Some(5),
max_subst_ty_len: SubstTyLen::Unlimited,
show_drop_glue: true,
- minicore: MiniCore::default(),
+ ra_fixture: RaFixtureConfig::default(),
};
let mut result = StaticIndexedFile { file_id, inlay_hints, folds, tokens: vec![] };
diff --git a/crates/ide/src/syntax_highlighting.rs b/crates/ide/src/syntax_highlighting.rs
index 217b13b4ef..9fd3f005ec 100644
--- a/crates/ide/src/syntax_highlighting.rs
+++ b/crates/ide/src/syntax_highlighting.rs
@@ -17,7 +17,7 @@ use either::Either;
use hir::{
DefWithBody, EditionedFileId, ExpressionStoreOwner, InFile, InRealFile, MacroKind, Semantics,
};
-use ide_db::{FxHashMap, FxHashSet, MiniCore, Ranker, RootDatabase, SymbolKind};
+use ide_db::{FxHashMap, FxHashSet, Ranker, RootDatabase, SymbolKind, ra_fixture::RaFixtureConfig};
use syntax::{
AstNode, AstToken, NodeOrToken,
SyntaxKind::*,
@@ -65,7 +65,7 @@ pub struct HighlightConfig<'a> {
pub macro_bang: bool,
/// Whether to highlight unresolved things be their syntax
pub syntactic_name_ref_highlighting: bool,
- pub minicore: MiniCore<'a>,
+ pub ra_fixture: RaFixtureConfig<'a>,
}
// Feature: Semantic Syntax Highlighting
diff --git a/crates/ide/src/syntax_highlighting/html.rs b/crates/ide/src/syntax_highlighting/html.rs
index 74567e8213..423c0c349c 100644
--- a/crates/ide/src/syntax_highlighting/html.rs
+++ b/crates/ide/src/syntax_highlighting/html.rs
@@ -1,7 +1,7 @@
//! Renders a bit of code as HTML.
use hir::Semantics;
-use ide_db::MiniCore;
+use ide_db::ra_fixture::RaFixtureConfig;
use oorandom::Rand32;
use stdx::format_to;
use syntax::AstNode;
@@ -69,7 +69,7 @@ pub(crate) fn highlight_as_html(db: &RootDatabase, file_id: FileId, rainbow: boo
inject_doc_comment: true,
macro_bang: true,
syntactic_name_ref_highlighting: false,
- minicore: MiniCore::default(),
+ ra_fixture: RaFixtureConfig::default(),
},
file_id,
rainbow,
diff --git a/crates/ide/src/syntax_highlighting/inject.rs b/crates/ide/src/syntax_highlighting/inject.rs
index 74a8d93dfe..6afe5681a9 100644
--- a/crates/ide/src/syntax_highlighting/inject.rs
+++ b/crates/ide/src/syntax_highlighting/inject.rs
@@ -27,7 +27,7 @@ pub(super) fn ra_fixture(
sema,
literal.clone(),
expanded,
- config.minicore,
+ &config.ra_fixture,
&mut |range| {
hl.add(HlRange {
range,
@@ -56,7 +56,7 @@ pub(super) fn ra_fixture(
macro_bang: config.macro_bang,
// What if there is a fixture inside a fixture? It's fixtures all the way down.
// (In fact, we have a fixture inside a fixture in our test suite!)
- minicore: config.minicore,
+ ra_fixture: config.ra_fixture,
},
tmp_file_id,
)
@@ -186,7 +186,7 @@ pub(super) fn doc_comment(
specialize_operator: config.operator,
inject_doc_comment: config.inject_doc_comment,
macro_bang: config.macro_bang,
- minicore: config.minicore,
+ ra_fixture: config.ra_fixture,
},
tmp_file_id,
None,
diff --git a/crates/ide/src/syntax_highlighting/tests.rs b/crates/ide/src/syntax_highlighting/tests.rs
index aecd1d3fdb..e8d185b7b6 100644
--- a/crates/ide/src/syntax_highlighting/tests.rs
+++ b/crates/ide/src/syntax_highlighting/tests.rs
@@ -1,7 +1,7 @@
use std::time::Instant;
use expect_test::{ExpectFile, expect_file};
-use ide_db::{MiniCore, SymbolKind};
+use ide_db::{SymbolKind, ra_fixture::RaFixtureConfig};
use span::Edition;
use test_utils::{AssertLinear, bench, bench_fixture, skip_slow_tests};
@@ -17,7 +17,7 @@ const HL_CONFIG: HighlightConfig<'_> = HighlightConfig {
inject_doc_comment: true,
macro_bang: true,
syntactic_name_ref_highlighting: false,
- minicore: MiniCore::default(),
+ ra_fixture: RaFixtureConfig::default(),
};
#[test]
diff --git a/crates/ide/src/test_explorer.rs b/crates/ide/src/test_explorer.rs
index 4792566f5f..02040ef138 100644
--- a/crates/ide/src/test_explorer.rs
+++ b/crates/ide/src/test_explorer.rs
@@ -1,8 +1,8 @@
//! Discovers tests
use hir::{Crate, Module, ModuleDef, Semantics};
-use ide_db::base_db;
-use ide_db::{FileId, RootDatabase, base_db::RootQueryDb};
+use ide_db::base_db::{self, all_crates};
+use ide_db::{FileId, RootDatabase};
use syntax::TextRange;
use crate::{NavigationTarget, Runnable, TryToNav, runnables::runnable_fn};
@@ -26,7 +26,7 @@ pub struct TestItem {
}
pub(crate) fn discover_test_roots(db: &RootDatabase) -> Vec<TestItem> {
- db.all_crates()
+ all_crates(db)
.iter()
.copied()
.filter(|&id| id.data(db).origin.is_local())
@@ -48,7 +48,7 @@ pub(crate) fn discover_test_roots(db: &RootDatabase) -> Vec<TestItem> {
fn find_crate_by_id(db: &RootDatabase, crate_id: &str) -> Option<base_db::Crate> {
// here, we use display_name as the crate id. This is not super ideal, but it works since we
// only show tests for the local crates.
- db.all_crates().iter().copied().find(|&id| {
+ all_crates(db).iter().copied().find(|&id| {
id.data(db).origin.is_local()
&& id.extra_data(db).display_name.as_ref().is_some_and(|x| x.to_string() == crate_id)
})
diff --git a/crates/ide/src/typing.rs b/crates/ide/src/typing.rs
index e8b0c92dcb..a49a85fe78 100644
--- a/crates/ide/src/typing.rs
+++ b/crates/ide/src/typing.rs
@@ -17,7 +17,7 @@ mod on_enter;
use either::Either;
use hir::EditionedFileId;
-use ide_db::{FilePosition, RootDatabase, base_db::RootQueryDb};
+use ide_db::{FilePosition, RootDatabase, base_db::relevant_crates};
use span::Edition;
use std::iter;
@@ -70,13 +70,12 @@ pub(crate) fn on_char_typed(
if !TRIGGER_CHARS.contains(&char_typed) {
return None;
}
- let edition = db
- .relevant_crates(position.file_id)
+ let edition = relevant_crates(db, position.file_id)
.first()
.copied()
.map_or(Edition::CURRENT, |krate| krate.data(db).edition);
let editioned_file_id_wrapper = EditionedFileId::new(db, position.file_id, edition);
- let file = &db.parse(editioned_file_id_wrapper);
+ let file = &editioned_file_id_wrapper.parse(db);
let char_matches_position =
file.tree().syntax().text().char_at(position.offset) == Some(char_typed);
if !stdx::always!(char_matches_position) {
@@ -1240,12 +1239,6 @@ sdasdasdasdasd
#[test]
fn parenthesis_noop_in_item_position_with_macro() {
type_char_noop('(', r#"$0println!();"#);
- type_char_noop(
- '(',
- r#"
-fn main() $0println!("hello");
-}"#,
- );
}
#[test]
diff --git a/crates/ide/src/typing/on_enter.rs b/crates/ide/src/typing/on_enter.rs
index fdc583a15c..7d04594a5b 100644
--- a/crates/ide/src/typing/on_enter.rs
+++ b/crates/ide/src/typing/on_enter.rs
@@ -1,13 +1,11 @@
-//! Handles the `Enter` key press. At the momently, this only continues
-//! comments, but should handle indent some time in the future as well.
+//! Handles the `Enter` key press, including comment continuation and
+//! indentation in brace-delimited constructs.
-use ide_db::base_db::RootQueryDb;
use ide_db::{FilePosition, RootDatabase};
use syntax::{
AstNode, SmolStr, SourceFile,
SyntaxKind::*,
- SyntaxNode, SyntaxToken, TextRange, TextSize, TokenAtOffset,
- algo::find_node_at_offset,
+ SyntaxToken, TextRange, TextSize, TokenAtOffset,
ast::{self, AstToken, edit::IndentLevel},
};
@@ -20,7 +18,8 @@ use ide_db::text_edit::TextEdit;
// - <kbd>Enter</kbd> inside triple-slash comments automatically inserts `///`
// - <kbd>Enter</kbd> in the middle or after a trailing space in `//` inserts `//`
// - <kbd>Enter</kbd> inside `//!` doc comments automatically inserts `//!`
-// - <kbd>Enter</kbd> after `{` indents contents and closing `}` of single-line block
+// - <kbd>Enter</kbd> after `{` reformats single-line brace-delimited contents by
+// moving the text between `{` and the matching `}` onto an indented line
//
// This action needs to be assigned to shortcut explicitly.
//
@@ -52,7 +51,7 @@ use ide_db::text_edit::TextEdit;
pub(crate) fn on_enter(db: &RootDatabase, position: FilePosition) -> Option<TextEdit> {
let editioned_file_id_wrapper =
ide_db::base_db::EditionedFileId::current_edition(db, position.file_id);
- let parse = db.parse(editioned_file_id_wrapper);
+ let parse = editioned_file_id_wrapper.parse(db);
let file = parse.tree();
let token = file.syntax().token_at_offset(position.offset).left_biased()?;
@@ -60,22 +59,11 @@ pub(crate) fn on_enter(db: &RootDatabase, position: FilePosition) -> Option<Text
return on_enter_in_comment(&comment, &file, position.offset);
}
- if token.kind() == L_CURLY {
- // Typing enter after the `{` of a block expression, where the `}` is on the same line
- if let Some(edit) = find_node_at_offset(file.syntax(), position.offset - TextSize::of('{'))
- .and_then(|block| on_enter_in_block(block, position))
- {
- cov_mark::hit!(indent_block_contents);
- return Some(edit);
- }
-
- // Typing enter after the `{` of a use tree list.
- if let Some(edit) = find_node_at_offset(file.syntax(), position.offset - TextSize::of('{'))
- .and_then(|list| on_enter_in_use_tree_list(list, position))
- {
- cov_mark::hit!(indent_block_contents);
- return Some(edit);
- }
+ if token.kind() == L_CURLY
+ && let Some(edit) = on_enter_in_braces(token, position)
+ {
+ cov_mark::hit!(indent_block_contents);
+ return Some(edit);
}
None
@@ -120,44 +108,54 @@ fn on_enter_in_comment(
Some(edit)
}
-fn on_enter_in_block(block: ast::BlockExpr, position: FilePosition) -> Option<TextEdit> {
- let contents = block_contents(&block)?;
-
- if block.syntax().text().contains_char('\n') {
- return None;
- }
-
- let indent = IndentLevel::from_node(block.syntax());
- let mut edit = TextEdit::insert(position.offset, format!("\n{}$0", indent + 1));
- edit.union(TextEdit::insert(contents.text_range().end(), format!("\n{indent}"))).ok()?;
- Some(edit)
-}
-
-fn on_enter_in_use_tree_list(list: ast::UseTreeList, position: FilePosition) -> Option<TextEdit> {
- if list.syntax().text().contains_char('\n') {
+fn on_enter_in_braces(l_curly: SyntaxToken, position: FilePosition) -> Option<TextEdit> {
+ if l_curly.text_range().end() != position.offset {
return None;
}
- let indent = IndentLevel::from_node(list.syntax());
- let mut edit = TextEdit::insert(position.offset, format!("\n{}$0", indent + 1));
- edit.union(TextEdit::insert(list.r_curly_token()?.text_range().start(), format!("\n{indent}")))
- .ok()?;
- Some(edit)
+ let (r_curly, content) = brace_contents_on_same_line(&l_curly)?;
+ let indent = IndentLevel::from_token(&l_curly);
+ Some(TextEdit::replace(
+ TextRange::new(position.offset, r_curly.text_range().start()),
+ format!("\n{}$0{}\n{indent}", indent + 1, content),
+ ))
}
-fn block_contents(block: &ast::BlockExpr) -> Option<SyntaxNode> {
- let mut node = block.tail_expr().map(|e| e.syntax().clone());
+fn brace_contents_on_same_line(l_curly: &SyntaxToken) -> Option<(SyntaxToken, String)> {
+ let mut depth = 0_u32;
+ let mut tokens = Vec::new();
+ let mut token = l_curly.next_token()?;
- for stmt in block.statements() {
- if node.is_some() {
- // More than 1 node in the block
+ loop {
+ if token.kind() == WHITESPACE && token.text().contains('\n') {
return None;
}
- node = Some(stmt.syntax().clone());
- }
+ match token.kind() {
+ L_CURLY => {
+ depth += 1;
+ tokens.push(token.clone());
+ }
+ R_CURLY if depth == 0 => {
+ let first = tokens.iter().position(|it| it.kind() != WHITESPACE);
+ let last = tokens.iter().rposition(|it| it.kind() != WHITESPACE);
+ let content = match first.zip(last) {
+ Some((first, last)) => {
+ tokens[first..=last].iter().map(|it| it.text()).collect()
+ }
+ None => String::new(),
+ };
+ return Some((token, content));
+ }
+ R_CURLY => {
+ depth -= 1;
+ tokens.push(token.clone());
+ }
+ _ => tokens.push(token.clone()),
+ }
- node
+ token = token.next_token()?;
+ }
}
fn followed_by_comment(comment: &ast::Comment) -> bool {
@@ -383,10 +381,58 @@ fn main() {
}
#[test]
- fn indents_fn_body_block() {
+ fn indents_empty_brace_pairs() {
cov_mark::check!(indent_block_contents);
do_check(
r#"
+fn f() {$0}
+ "#,
+ r#"
+fn f() {
+ $0
+}
+ "#,
+ );
+ do_check(
+ r#"
+fn f() {
+ let x = {$0};
+}
+ "#,
+ r#"
+fn f() {
+ let x = {
+ $0
+ };
+}
+ "#,
+ );
+ do_check(
+ r#"
+use crate::{$0};
+ "#,
+ r#"
+use crate::{
+ $0
+};
+ "#,
+ );
+ do_check(
+ r#"
+mod m {$0}
+ "#,
+ r#"
+mod m {
+ $0
+}
+ "#,
+ );
+ }
+
+ #[test]
+ fn indents_fn_body_block() {
+ do_check(
+ r#"
fn f() {$0()}
"#,
r#"
@@ -478,29 +524,39 @@ fn f() {
}
#[test]
- fn does_not_indent_empty_block() {
- do_check_noop(
+ fn indents_block_with_multiple_statements() {
+ do_check(
r#"
-fn f() {$0}
+fn f() {$0 a = b; ()}
+ "#,
+ r#"
+fn f() {
+ $0a = b; ()
+}
"#,
);
- do_check_noop(
+ do_check(
r#"
-fn f() {{$0}}
+fn f() {$0 a = b; a = b; }
+ "#,
+ r#"
+fn f() {
+ $0a = b; a = b;
+}
"#,
);
}
#[test]
- fn does_not_indent_block_with_too_much_content() {
- do_check_noop(
+ fn trims_spaces_around_brace_contents() {
+ do_check(
r#"
-fn f() {$0 a = b; ()}
+fn f() {$0 () }
"#,
- );
- do_check_noop(
r#"
-fn f() {$0 a = b; a = b; }
+fn f() {
+ $0()
+}
"#,
);
}
@@ -571,6 +627,20 @@ use {
}
#[test]
+ fn indents_item_lists() {
+ do_check(
+ r#"
+mod m {$0}
+ "#,
+ r#"
+mod m {
+ $0
+}
+ "#,
+ );
+ }
+
+ #[test]
fn does_not_indent_use_tree_list_when_not_at_curly_brace() {
do_check_noop(
r#"
diff --git a/crates/ide/src/view_crate_graph.rs b/crates/ide/src/view_crate_graph.rs
index 25deffe10e..ecfdd09b24 100644
--- a/crates/ide/src/view_crate_graph.rs
+++ b/crates/ide/src/view_crate_graph.rs
@@ -1,10 +1,9 @@
use dot::{Id, LabelText};
+use ide_db::base_db::all_crates;
use ide_db::base_db::salsa::plumbing::AsId;
use ide_db::{
FxHashMap, RootDatabase,
- base_db::{
- BuiltCrateData, BuiltDependency, Crate, ExtraCrateData, RootQueryDb, SourceDatabase,
- },
+ base_db::{BuiltCrateData, BuiltDependency, Crate, ExtraCrateData, SourceDatabase},
};
// Feature: View Crate Graph
@@ -17,8 +16,8 @@ use ide_db::{
// | Editor | Action Name |
// |---------|-------------|
// | VS Code | **rust-analyzer: View Crate Graph** |
-pub(crate) fn view_crate_graph(db: &RootDatabase, full: bool) -> Result<String, String> {
- let all_crates = db.all_crates();
+pub(crate) fn view_crate_graph(db: &RootDatabase, full: bool) -> String {
+ let all_crates = all_crates(db);
let crates_to_render = all_crates
.iter()
.copied()
@@ -37,7 +36,7 @@ pub(crate) fn view_crate_graph(db: &RootDatabase, full: bool) -> Result<String,
let mut dot = Vec::new();
dot::render(&graph, &mut dot).unwrap();
- Ok(String::from_utf8(dot).unwrap())
+ String::from_utf8(dot).unwrap()
}
struct DotCrateGraph<'db> {
diff --git a/crates/load-cargo/src/lib.rs b/crates/load-cargo/src/lib.rs
index 8753eab43a..68bf78e037 100644
--- a/crates/load-cargo/src/lib.rs
+++ b/crates/load-cargo/src/lib.rs
@@ -283,6 +283,19 @@ impl ProjectFolders {
}
}
+ // Collect workspace roots not already covered by a local PackageRoot
+ // (e.g. virtual workspaces where no package lives at the workspace root).
+ // We need these to load workspace-root rust-analyzer.toml into a local source root.
+ let uncovered_ws_roots: Vec<AbsPathBuf> = workspaces
+ .iter()
+ .filter_map(|ws| {
+ let ws_root = ws.workspace_root().to_path_buf();
+ let dominated =
+ roots.iter().any(|root| root.is_local && root.include.contains(&ws_root));
+ (!dominated).then_some(ws_root)
+ })
+ .collect();
+
for root in roots.into_iter().filter(|it| !it.include.is_empty()) {
let file_set_roots: Vec<VfsPath> =
root.include.iter().cloned().map(VfsPath::from).collect();
@@ -291,6 +304,7 @@ impl ProjectFolders {
let mut dirs = vfs::loader::Directories::default();
dirs.extensions.push("rs".into());
dirs.extensions.push("toml".into());
+ dirs.extensions.push("md".into());
dirs.include.extend(root.include);
dirs.exclude.extend(root.exclude);
for excl in global_excludes {
@@ -335,6 +349,20 @@ impl ProjectFolders {
}
}
+ // For virtual workspaces, the workspace root has no local PackageRoot, so
+ // rust-analyzer.toml there would fall into a library source root and be
+ // ignored. Load it explicitly via Entry::Files and register the workspace
+ // root as a local file-set root so the file is classified as local.
+ for ws_root in &uncovered_ws_roots {
+ let ratoml_path = ws_root.join("rust-analyzer.toml");
+ let file_set_roots = vec![VfsPath::from(ws_root.clone())];
+ let entry = vfs::loader::Entry::Files(vec![ratoml_path]);
+ res.watch.push(res.load.len());
+ res.load.push(entry);
+ local_filesets.push(fsc.len() as u64);
+ fsc.add_file_set(file_set_roots);
+ }
+
if let Some(user_config_path) = user_config_dir_path {
let ratoml_path = {
let mut p = user_config_path.to_path_buf();
@@ -738,7 +766,7 @@ fn resolve_sub_span(
#[cfg(test)]
mod tests {
- use ide_db::base_db::RootQueryDb;
+ use ide_db::base_db::all_crates;
use vfs::file_set::FileSetConfigBuilder;
use super::*;
@@ -766,7 +794,7 @@ mod tests {
let (db, _vfs, _proc_macro) =
load_workspace(workspace, &cargo_config.extra_env, &load_cargo_config).unwrap();
- let n_crates = db.all_crates().len();
+ let n_crates = all_crates(&db).len();
// RA has quite a few crates, but the exact count doesn't matter
assert!(n_crates > 20);
}
diff --git a/crates/parser/src/grammar.rs b/crates/parser/src/grammar.rs
index e481bbe9bc..1ff8a56b58 100644
--- a/crates/parser/src/grammar.rs
+++ b/crates/parser/src/grammar.rs
@@ -303,6 +303,18 @@ fn opt_ret_type(p: &mut Parser<'_>) -> bool {
}
}
+fn opt_no_arrow_ret_type(p: &mut Parser<'_>) -> bool {
+ if p.at_ts(PATH_NAME_REF_KINDS) {
+ let m = p.start();
+ p.error("missing thin-arrow `->`");
+ types::type_no_bounds(p);
+ m.complete(p, RET_TYPE);
+ true
+ } else {
+ false
+ }
+}
+
fn name_r(p: &mut Parser<'_>, recovery: TokenSet) {
if p.at(IDENT) {
let m = p.start();
diff --git a/crates/parser/src/grammar/items.rs b/crates/parser/src/grammar/items.rs
index c609f9383e..c0acdde2a7 100644
--- a/crates/parser/src/grammar/items.rs
+++ b/crates/parser/src/grammar/items.rs
@@ -422,7 +422,12 @@ fn fn_(p: &mut Parser<'_>, m: Marker) {
// test function_ret_type
// fn foo() {}
// fn bar() -> () {}
- opt_ret_type(p);
+ if !opt_ret_type(p) {
+ // test_err function_ret_type_missing_arrow
+ // fn foo() usize {}
+ // fn bar() super::Foo {}
+ opt_no_arrow_ret_type(p);
+ }
// test_err fn_ret_recovery
// fn foo() -> A>]) { let x = 1; }
diff --git a/crates/parser/src/grammar/types.rs b/crates/parser/src/grammar/types.rs
index c62356d5c9..667bb68c64 100644
--- a/crates/parser/src/grammar/types.rs
+++ b/crates/parser/src/grammar/types.rs
@@ -59,6 +59,9 @@ fn type_with_bounds_cond(p: &mut Parser<'_>, allow_bounds: bool) {
}
_ if paths::is_path_start(p) => path_or_macro_type(p, allow_bounds),
LIFETIME_IDENT if p.nth_at(1, T![+]) => bare_dyn_trait_type(p),
+ T!['{'] => {
+ p.err_recover("expected type, found `{`", TYPE_RECOVERY_SET);
+ }
_ => {
p.err_recover("expected type", TYPE_RECOVERY_SET);
}
diff --git a/crates/parser/test_data/generated/runner.rs b/crates/parser/test_data/generated/runner.rs
index 4c001104fe..01fc172ed9 100644
--- a/crates/parser/test_data/generated/runner.rs
+++ b/crates/parser/test_data/generated/runner.rs
@@ -793,6 +793,10 @@ mod err {
run_and_expect_errors("test_data/parser/inline/err/fn_ret_recovery.rs");
}
#[test]
+ fn function_ret_type_missing_arrow() {
+ run_and_expect_errors("test_data/parser/inline/err/function_ret_type_missing_arrow.rs");
+ }
+ #[test]
fn gen_fn() {
run_and_expect_errors_with_edition(
"test_data/parser/inline/err/gen_fn.rs",
diff --git a/crates/parser/test_data/parser/err/0025_nope.rast b/crates/parser/test_data/parser/err/0025_nope.rast
index b6bc008837..23964ab9d9 100644
--- a/crates/parser/test_data/parser/err/0025_nope.rast
+++ b/crates/parser/test_data/parser/err/0025_nope.rast
@@ -194,7 +194,7 @@ SOURCE_FILE
WHITESPACE "\n"
R_CURLY "}"
WHITESPACE "\n"
-error 95: expected type
+error 95: expected type, found `{`
error 95: expected COMMA
error 96: expected field
error 98: expected field declaration
diff --git a/crates/parser/test_data/parser/err/0027_incomplete_where_for.rast b/crates/parser/test_data/parser/err/0027_incomplete_where_for.rast
index 3768a55d53..31db794d9f 100644
--- a/crates/parser/test_data/parser/err/0027_incomplete_where_for.rast
+++ b/crates/parser/test_data/parser/err/0027_incomplete_where_for.rast
@@ -26,5 +26,5 @@ SOURCE_FILE
L_CURLY "{"
R_CURLY "}"
WHITESPACE "\n"
-error 26: expected type
+error 26: expected type, found `{`
error 26: expected colon
diff --git a/crates/parser/test_data/parser/inline/err/function_ret_type_missing_arrow.rast b/crates/parser/test_data/parser/inline/err/function_ret_type_missing_arrow.rast
new file mode 100644
index 0000000000..c0bca6ed1c
--- /dev/null
+++ b/crates/parser/test_data/parser/inline/err/function_ret_type_missing_arrow.rast
@@ -0,0 +1,50 @@
+SOURCE_FILE
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "foo"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ RET_TYPE
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "usize"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ R_CURLY "}"
+ WHITESPACE "\n"
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "bar"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ RET_TYPE
+ PATH_TYPE
+ PATH
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ SUPER_KW "super"
+ COLON2 "::"
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "Foo"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ R_CURLY "}"
+ WHITESPACE "\n"
+error 9: missing thin-arrow `->`
+error 27: missing thin-arrow `->`
diff --git a/crates/parser/test_data/parser/inline/err/function_ret_type_missing_arrow.rs b/crates/parser/test_data/parser/inline/err/function_ret_type_missing_arrow.rs
new file mode 100644
index 0000000000..f48e539df5
--- /dev/null
+++ b/crates/parser/test_data/parser/inline/err/function_ret_type_missing_arrow.rs
@@ -0,0 +1,2 @@
+fn foo() usize {}
+fn bar() super::Foo {}
diff --git a/crates/profile/Cargo.toml b/crates/profile/Cargo.toml
index 4828419003..8377e94c8d 100644
--- a/crates/profile/Cargo.toml
+++ b/crates/profile/Cargo.toml
@@ -16,8 +16,8 @@ doctest = false
cfg-if = "1.0.1"
jemalloc-ctl = { version = "0.5.4", package = "tikv-jemalloc-ctl", optional = true }
-[target.'cfg(all(target_os = "linux", not(target_env = "ohos")))'.dependencies]
-perf-event = "=0.4.7"
+[target.'cfg(all(target_os = "linux", not(target_env = "ohos"), any(target_arch = "x86", target_arch = "x86_64", target_arch = "aarch64")))'.dependencies]
+perf-event = "=0.4.8"
[target.'cfg(all(target_os = "linux", target_env = "gnu"))'.dependencies]
libc.workspace = true
diff --git a/crates/profile/src/stop_watch.rs b/crates/profile/src/stop_watch.rs
index 00c37c01d2..a1c1383ad5 100644
--- a/crates/profile/src/stop_watch.rs
+++ b/crates/profile/src/stop_watch.rs
@@ -11,7 +11,11 @@ use crate::MemoryUsage;
pub struct StopWatch {
time: Instant,
- #[cfg(all(target_os = "linux", not(target_env = "ohos")))]
+ #[cfg(all(
+ target_os = "linux",
+ not(target_env = "ohos"),
+ any(target_arch = "x86", target_arch = "x86_64", target_arch = "aarch64")
+ ))]
counter: Option<perf_event::Counter>,
memory: MemoryUsage,
}
@@ -24,7 +28,11 @@ pub struct StopWatchSpan {
impl StopWatch {
pub fn start() -> StopWatch {
- #[cfg(all(target_os = "linux", not(target_env = "ohos")))]
+ #[cfg(all(
+ target_os = "linux",
+ not(target_env = "ohos"),
+ any(target_arch = "x86", target_arch = "x86_64", target_arch = "aarch64")
+ ))]
let counter = {
// When debugging rust-analyzer using rr, the perf-related syscalls cause it to abort.
// We allow disabling perf by setting the env var `RA_DISABLE_PERF`.
@@ -51,7 +59,11 @@ impl StopWatch {
let time = Instant::now();
StopWatch {
time,
- #[cfg(all(target_os = "linux", not(target_env = "ohos")))]
+ #[cfg(all(
+ target_os = "linux",
+ not(target_env = "ohos"),
+ any(target_arch = "x86", target_arch = "x86_64", target_arch = "aarch64")
+ ))]
counter,
memory,
}
@@ -60,13 +72,19 @@ impl StopWatch {
pub fn elapsed(&mut self) -> StopWatchSpan {
let time = self.time.elapsed();
- #[cfg(all(target_os = "linux", not(target_env = "ohos")))]
+ #[cfg(all(
+ target_os = "linux",
+ not(target_env = "ohos"),
+ any(target_arch = "x86", target_arch = "x86_64", target_arch = "aarch64")
+ ))]
let instructions = self.counter.as_mut().and_then(|it| {
it.read().map_err(|err| eprintln!("Failed to read perf counter: {err}")).ok()
});
- #[cfg(all(target_os = "linux", target_env = "ohos"))]
- let instructions = None;
- #[cfg(not(target_os = "linux"))]
+ #[cfg(not(all(
+ target_os = "linux",
+ not(target_env = "ohos"),
+ any(target_arch = "x86", target_arch = "x86_64", target_arch = "aarch64")
+ )))]
let instructions = None;
let memory = MemoryUsage::now() - self.memory;
diff --git a/crates/project-model/src/cargo_workspace.rs b/crates/project-model/src/cargo_workspace.rs
index 792206b74f..5d8273832b 100644
--- a/crates/project-model/src/cargo_workspace.rs
+++ b/crates/project-model/src/cargo_workspace.rs
@@ -131,6 +131,8 @@ pub struct CargoConfig {
pub run_build_script_command: Option<Vec<String>>,
/// Extra args to pass to the cargo command.
pub extra_args: Vec<String>,
+ /// Extra args passed only to `cargo metadata`, not other cargo commands.
+ pub metadata_extra_args: Vec<String>,
/// Extra env vars to set when invoking the cargo command
pub extra_env: FxHashMap<String, Option<String>>,
pub invocation_strategy: InvocationStrategy,
@@ -320,6 +322,8 @@ pub struct CargoMetadataConfig {
pub targets: Vec<String>,
/// Extra args to pass to the cargo command.
pub extra_args: Vec<String>,
+ /// Extra args passed directly to `cargo metadata` without filtering.
+ pub metadata_extra_args: Vec<String>,
/// Extra env vars to set when invoking the cargo command
pub extra_env: FxHashMap<String, Option<String>>,
/// What kind of metadata are we fetching: workspace, rustc, or sysroot.
@@ -679,6 +683,7 @@ impl FetchMetadata {
other_options.push(arg.to_owned());
}
}
+ other_options.extend(config.metadata_extra_args.iter().cloned());
let mut lockfile_copy = None;
if cargo_toml.is_rust_manifest() {
diff --git a/crates/project-model/src/env.rs b/crates/project-model/src/env.rs
index 51c447945c..ab45917a56 100644
--- a/crates/project-model/src/env.rs
+++ b/crates/project-model/src/env.rs
@@ -79,18 +79,31 @@ pub(crate) fn cargo_config_env(
for (key, entry) in env_toml {
let key = key.as_ref().as_ref();
let value = match entry.as_ref() {
- DeValue::String(s) => String::from(s.clone()),
+ DeValue::String(s) => {
+ // Plain string entries have no `force` option, so they should not
+ // override existing environment variables (matching Cargo behavior).
+ if extra_env.get(key).is_some_and(Option::is_some) {
+ continue;
+ }
+ if let Ok(val) = std::env::var(key) { val } else { String::from(s.clone()) }
+ }
DeValue::Table(entry) => {
// Each entry MUST have a `value` key.
let Some(map) = entry.get("value").and_then(|v| v.as_ref().as_str()) else {
continue;
};
- // If the entry already exists in the environment AND the `force` key is not set to
- // true, then don't overwrite the value.
- if extra_env.get(key).is_some_and(Option::is_some)
- && !entry.get("force").and_then(|v| v.as_ref().as_bool()).unwrap_or(false)
- {
- continue;
+ let is_forced =
+ entry.get("force").and_then(|v| v.as_ref().as_bool()).unwrap_or(false);
+ // If the entry already exists in the environment AND the `force` key is not set
+ // to true, use the existing value instead of the config value.
+ if !is_forced {
+ if extra_env.get(key).is_some_and(Option::is_some) {
+ continue;
+ }
+ if let Ok(val) = std::env::var(key) {
+ env.insert(key, val);
+ continue;
+ }
}
if let Some(base) = entry.get("relative").and_then(|v| {
@@ -124,38 +137,80 @@ fn parse_output_cargo_config_env_works() {
.unwrap();
let config_path = cwd.join(".cargo").join("config.toml");
let raw = r#"
-env.CARGO_WORKSPACE_DIR.relative = true
-env.CARGO_WORKSPACE_DIR.value = ""
-env.INVALID.relative = "invalidbool"
-env.INVALID.value = "../relative"
-env.RELATIVE.relative = true
-env.RELATIVE.value = "../relative"
-env.TEST.value = "test"
-env.FORCED.value = "test"
-env.FORCED.force = true
-env.UNFORCED.value = "test"
-env.UNFORCED.forced = false
-env.OVERWRITTEN.value = "test"
-env.NOT_AN_OBJECT = "value"
+env.RA_TEST_WORKSPACE_DIR.relative = true
+env.RA_TEST_WORKSPACE_DIR.value = ""
+env.RA_TEST_INVALID.relative = "invalidbool"
+env.RA_TEST_INVALID.value = "../relative"
+env.RA_TEST_RELATIVE.relative = true
+env.RA_TEST_RELATIVE.value = "../relative"
+env.RA_TEST_UNSET.value = "test"
+env.RA_TEST_FORCED.value = "test"
+env.RA_TEST_FORCED.force = true
+env.RA_TEST_UNFORCED.value = "test"
+env.RA_TEST_UNFORCED.forced = false
+env.RA_TEST_OVERWRITTEN.value = "test"
+env.RA_TEST_NOT_AN_OBJECT = "value"
"#;
let raw = raw.lines().map(|l| format!("{l} # {config_path}")).join("\n");
let config = CargoConfigFile::from_string_for_test(raw);
let extra_env = [
- ("FORCED", Some("ignored")),
- ("UNFORCED", Some("newvalue")),
- ("OVERWRITTEN", Some("newvalue")),
- ("TEST", None),
+ ("RA_TEST_FORCED", Some("ignored")),
+ ("RA_TEST_UNFORCED", Some("newvalue")),
+ ("RA_TEST_OVERWRITTEN", Some("newvalue")),
+ ("RA_TEST_UNSET", None),
]
.iter()
.map(|(k, v)| (k.to_string(), v.map(ToString::to_string)))
.collect();
let env = cargo_config_env(&Some(config), &extra_env);
- assert_eq!(env.get("CARGO_WORKSPACE_DIR").as_deref(), Some(cwd.join("").as_str()));
- assert_eq!(env.get("RELATIVE").as_deref(), Some(cwd.join("../relative").as_str()));
- assert_eq!(env.get("INVALID").as_deref(), Some("../relative"));
- assert_eq!(env.get("TEST").as_deref(), Some("test"));
- assert_eq!(env.get("FORCED").as_deref(), Some("test"));
- assert_eq!(env.get("UNFORCED").as_deref(), Some("newvalue"));
- assert_eq!(env.get("OVERWRITTEN").as_deref(), Some("newvalue"));
- assert_eq!(env.get("NOT_AN_OBJECT").as_deref(), Some("value"));
+ assert_eq!(env.get("RA_TEST_WORKSPACE_DIR").as_deref(), Some(cwd.join("").as_str()));
+ assert_eq!(env.get("RA_TEST_RELATIVE").as_deref(), Some(cwd.join("../relative").as_str()));
+ assert_eq!(env.get("RA_TEST_INVALID").as_deref(), Some("../relative"));
+ assert_eq!(env.get("RA_TEST_UNSET").as_deref(), Some("test"));
+ assert_eq!(env.get("RA_TEST_FORCED").as_deref(), Some("test"));
+ assert_eq!(env.get("RA_TEST_UNFORCED").as_deref(), Some("newvalue"));
+ assert_eq!(env.get("RA_TEST_OVERWRITTEN").as_deref(), Some("newvalue"));
+ assert_eq!(env.get("RA_TEST_NOT_AN_OBJECT").as_deref(), Some("value"));
+}
+
+#[test]
+fn cargo_config_env_respects_process_env() {
+ use itertools::Itertools;
+
+ let cwd = paths::AbsPathBuf::try_from(
+ paths::Utf8PathBuf::try_from(std::env::current_dir().unwrap()).unwrap(),
+ )
+ .unwrap();
+ let config_path = cwd.join(".cargo").join("config.toml");
+
+ // SAFETY: this test is not run in parallel with other tests that depend on these env vars.
+ unsafe {
+ std::env::set_var("RA_TEST_PROCESS_ENV_STRING", "from_process");
+ std::env::set_var("RA_TEST_PROCESS_ENV_TABLE", "from_process");
+ std::env::set_var("RA_TEST_PROCESS_ENV_FORCED", "from_process");
+ }
+
+ let raw = r#"
+env.RA_TEST_PROCESS_ENV_STRING = "from_config"
+env.RA_TEST_PROCESS_ENV_TABLE.value = "from_config"
+env.RA_TEST_PROCESS_ENV_FORCED.value = "from_config"
+env.RA_TEST_PROCESS_ENV_FORCED.force = true
+"#;
+ let raw = raw.lines().map(|l| format!("{l} # {config_path}")).join("\n");
+ let config = CargoConfigFile::from_string_for_test(raw);
+ let extra_env = FxHashMap::default();
+ let env = cargo_config_env(&Some(config), &extra_env);
+
+ // Plain string form should use process env value, not config value
+ assert_eq!(env.get("RA_TEST_PROCESS_ENV_STRING").as_deref(), Some("from_process"));
+ // Table form without force should use process env value, not config value
+ assert_eq!(env.get("RA_TEST_PROCESS_ENV_TABLE").as_deref(), Some("from_process"));
+ // Table form with force=true should override process env
+ assert_eq!(env.get("RA_TEST_PROCESS_ENV_FORCED").as_deref(), Some("from_config"));
+
+ unsafe {
+ std::env::remove_var("RA_TEST_PROCESS_ENV_STRING");
+ std::env::remove_var("RA_TEST_PROCESS_ENV_TABLE");
+ std::env::remove_var("RA_TEST_PROCESS_ENV_FORCED");
+ }
}
diff --git a/crates/project-model/src/workspace.rs b/crates/project-model/src/workspace.rs
index 581b5fa514..29a19bc32e 100644
--- a/crates/project-model/src/workspace.rs
+++ b/crates/project-model/src/workspace.rs
@@ -216,6 +216,7 @@ impl ProjectWorkspace {
features,
rustc_source,
extra_args,
+ metadata_extra_args,
extra_env,
set_test,
cfg_overrides,
@@ -289,6 +290,7 @@ impl ProjectWorkspace {
features: features.clone(),
targets: targets.clone(),
extra_args: extra_args.clone(),
+ metadata_extra_args: metadata_extra_args.clone(),
extra_env: extra_env.clone(),
toolchain_version: toolchain.clone(),
kind: "workspace",
@@ -343,6 +345,7 @@ impl ProjectWorkspace {
features: crate::CargoFeatures::default(),
targets: targets.clone(),
extra_args: extra_args.clone(),
+ metadata_extra_args: metadata_extra_args.clone(),
extra_env: extra_env.clone(),
toolchain_version: toolchain.clone(),
kind: "rustc-dev"
@@ -575,6 +578,7 @@ impl ProjectWorkspace {
features: config.features.clone(),
targets,
extra_args: config.extra_args.clone(),
+ metadata_extra_args: config.metadata_extra_args.clone(),
extra_env: config.extra_env.clone(),
toolchain_version: toolchain.clone(),
kind: "detached-file",
@@ -1942,6 +1946,7 @@ fn sysroot_metadata_config(
features: Default::default(),
targets,
extra_args: Default::default(),
+ metadata_extra_args: config.metadata_extra_args.clone(),
extra_env: config.extra_env.clone(),
toolchain_version,
kind: "sysroot",
diff --git a/crates/rust-analyzer/src/cli/analysis_stats.rs b/crates/rust-analyzer/src/cli/analysis_stats.rs
index 74828cba02..e56727d39d 100644
--- a/crates/rust-analyzer/src/cli/analysis_stats.rs
+++ b/crates/rust-analyzer/src/cli/analysis_stats.rs
@@ -23,10 +23,10 @@ use hir_def::{
use hir_ty::InferenceResult;
use ide::{
Analysis, AnalysisHost, AnnotationConfig, DiagnosticsConfig, Edition, InlayFieldsToResolve,
- InlayHintsConfig, LineCol, RootDatabase,
+ InlayHintsConfig, LineCol, RaFixtureConfig, RootDatabase,
};
use ide_db::{
- EditionedFileId, LineIndexDatabase, MiniCore, SnippetCap,
+ EditionedFileId, LineIndexDatabase, SnippetCap,
base_db::{SourceDatabase, salsa::Database},
};
use itertools::Itertools;
@@ -1367,6 +1367,7 @@ impl flags::AnalysisStats {
&InlayHintsConfig {
render_colons: false,
type_hints: true,
+ type_hints_placement: ide::TypeHintsPlacement::Inline,
sized_bound: false,
discriminant_hints: ide::DiscriminantHints::Always,
parameter_hints: true,
@@ -1397,7 +1398,7 @@ impl flags::AnalysisStats {
closing_brace_hints_min_lines: Some(20),
fields_to_resolve: InlayFieldsToResolve::empty(),
range_exclusive_hints: true,
- minicore: MiniCore::default(),
+ ra_fixture: RaFixtureConfig::default(),
},
analysis.editioned_file_id_to_vfs(file_id),
None,
@@ -1416,7 +1417,7 @@ impl flags::AnalysisStats {
annotate_enum_variant_references: false,
location: ide::AnnotationLocation::AboveName,
filter_adjacent_derive_implementations: false,
- minicore: MiniCore::default(),
+ ra_fixture: RaFixtureConfig::default(),
};
for &file_id in file_ids {
let msg = format!("annotations: {}", vfs.file_path(file_id.file_id(db)));
diff --git a/crates/rust-analyzer/src/config.rs b/crates/rust-analyzer/src/config.rs
index 2ccd85f0e3..3a88a8fe84 100644
--- a/crates/rust-analyzer/src/config.rs
+++ b/crates/rust-analyzer/src/config.rs
@@ -12,7 +12,8 @@ use ide::{
CompletionFieldsToResolve, DiagnosticsConfig, GenericParameterHints, GotoDefinitionConfig,
GotoImplementationConfig, HighlightConfig, HighlightRelatedConfig, HoverConfig, HoverDocFormat,
InlayFieldsToResolve, InlayHintsConfig, JoinLinesConfig, MemoryLayoutHoverConfig,
- MemoryLayoutHoverRenderKind, RenameConfig, Snippet, SnippetScope, SourceRootId,
+ MemoryLayoutHoverRenderKind, RaFixtureConfig, RenameConfig, Snippet, SnippetScope,
+ SourceRootId,
};
use ide_db::{
MiniCore, SnippetCap,
@@ -313,6 +314,9 @@ config_data! {
/// Hide inlay type hints for constructors.
inlayHints_typeHints_hideNamedConstructor: bool = false,
+ /// Where to render type hints relative to their binding pattern.
+ inlayHints_typeHints_location: TypeHintsLocation = TypeHintsLocation::Inline,
+
/// Enable the experimental support for interpreting tests.
interpret_tests: bool = false,
@@ -727,6 +731,11 @@ config_data! {
/// the `Problems Panel`.
diagnostics_warningsAsInfo: Vec<String> = vec![],
+ /// Disable support for `#[rust_analyzer::rust_fixture]` snippets.
+ ///
+ /// If you are not working on rust-analyzer itself, you should ignore this config.
+ disableFixtureSupport: bool = false,
+
/// Enforce the import granularity setting for all files. If set to false rust-analyzer will
/// try to keep import styles consistent per file.
imports_granularity_enforce: bool = false,
@@ -822,6 +831,9 @@ config_data! {
///
/// Set this to `"all"` to pass `--all-features` to cargo.
cargo_features: CargoFeaturesDef = CargoFeaturesDef::Selected(vec![]),
+ /// Extra arguments passed only to `cargo metadata`, not to other cargo invocations.
+ /// Useful for flags like `--config` that `cargo metadata` supports.
+ cargo_metadataExtraArgs: Vec<String> = vec![],
/// Whether to pass `--no-default-features` to cargo.
cargo_noDefaultFeatures: bool = false,
/// Whether to skip fetching dependencies. If set to "true", the analysis is performed
@@ -948,18 +960,30 @@ config_data! {
/// Override the command used for bench runnables.
/// The first element of the array should be the program to execute (for example, `cargo`).
///
- /// Use the placeholders `${package}`, `${target_arg}`, `${target}`, `${executable_args}` to dynamically
- /// replace the package name, target option (such as `--bin` or `--example`), the target name and
- /// the arguments passed to test binary args (includes `rust-analyzer.runnables.extraTestBinaryArgs`).
+ /// Use the placeholders:
+ /// - `${package}`: package name.
+ /// - `${target_arg}`: target option such as `--bin`, `--test`, `--lib`, etc.
+ /// - `${target}`: target name (empty for `--lib`).
+ /// - `${test_name}`: the test path filter, e.g. `module::bench_func`.
+ /// - `${exact}`: `--exact` for single benchmarks, empty for modules.
+ /// - `${include_ignored}`: always empty for benchmarks.
+ /// - `${executable_args}`: all of the above binary args bundled together
+ /// (includes `rust-analyzer.runnables.extraTestBinaryArgs`).
runnables_bench_overrideCommand: Option<Vec<String>> = None,
/// Command to be executed instead of 'cargo' for runnables.
runnables_command: Option<String> = None,
- /// Override the command used for bench runnables.
+ /// Override the command used for doc-test runnables.
/// The first element of the array should be the program to execute (for example, `cargo`).
///
- /// Use the placeholders `${package}`, `${target_arg}`, `${target}`, `${executable_args}` to dynamically
- /// replace the package name, target option (such as `--bin` or `--example`), the target name and
- /// the arguments passed to test binary args (includes `rust-analyzer.runnables.extraTestBinaryArgs`).
+ /// Use the placeholders:
+ /// - `${package}`: package name.
+ /// - `${target_arg}`: target option such as `--bin`, `--test`, `--lib`, etc.
+ /// - `${target}`: target name (empty for `--lib`).
+ /// - `${test_name}`: the test path filter, e.g. `module::func`.
+ /// - `${exact}`: always empty for doc-tests.
+ /// - `${include_ignored}`: always empty for doc-tests.
+ /// - `${executable_args}`: all of the above binary args bundled together
+ /// (includes `rust-analyzer.runnables.extraTestBinaryArgs`).
runnables_doctest_overrideCommand: Option<Vec<String>> = None,
/// Additional arguments to be passed to cargo for runnables such as
/// tests or binaries. For example, it may be `--release`.
@@ -977,9 +1001,15 @@ config_data! {
/// Override the command used for test runnables.
/// The first element of the array should be the program to execute (for example, `cargo`).
///
- /// Use the placeholders `${package}`, `${target_arg}`, `${target}`, `${executable_args}` to dynamically
- /// replace the package name, target option (such as `--bin` or `--example`), the target name and
- /// the arguments passed to test binary args (includes `rust-analyzer.runnables.extraTestBinaryArgs`).
+ /// Available placeholders:
+ /// - `${package}`: package name.
+ /// - `${target_arg}`: target option such as `--bin`, `--test`, `--lib`, etc.
+ /// - `${target}`: target name (empty for `--lib`).
+ /// - `${test_name}`: the test path filter, e.g. `module::test_func`.
+ /// - `${exact}`: `--exact` for single tests, empty for modules.
+ /// - `${include_ignored}`: `--include-ignored` for single tests, empty otherwise.
+ /// - `${executable_args}`: all of the above binary args bundled together
+ /// (includes `rust-analyzer.runnables.extraTestBinaryArgs`).
runnables_test_overrideCommand: Option<Vec<String>> = None,
/// Path to the Cargo.toml of the rust compiler workspace, for usage in rustc_private
@@ -1061,6 +1091,7 @@ struct ClientInfo {
version: Option<Version>,
}
+/// The configuration of this rust-analyzer instance.
#[derive(Clone)]
pub struct Config {
/// Projects that have a Cargo.toml or a rust-project.json in a
@@ -1070,11 +1101,16 @@ pub struct Config {
/// Projects whose configuration was generated by a command
/// configured in discoverConfig.
discovered_projects_from_command: Vec<ProjectJsonFromCommand>,
- /// The workspace roots as registered by the LSP client
+ /// The workspace roots as registered by the LSP client.
workspace_roots: Vec<AbsPathBuf>,
caps: ClientCapabilities,
- /// The LSP root path, deprecated in favor of `workspace_roots`
+
+ /// The root of the first project encountered. This is deprecated
+ /// because rust-analyzer might be handling multiple projects.
+ ///
+ /// Prefer `workspace_roots` and `workspace_root_for()`.
root_path: AbsPathBuf,
+
snippets: Vec<Snippet>,
client_info: Option<ClientInfo>,
@@ -1504,6 +1540,8 @@ pub struct LensConfig {
// annotations
pub location: AnnotationLocation,
pub filter_adjacent_derive_implementations: bool,
+
+ disable_ra_fixture: bool,
}
#[derive(Copy, Clone, Debug, PartialEq, Eq, Serialize, Deserialize)]
@@ -1559,7 +1597,7 @@ impl LensConfig {
annotate_method_references: self.method_refs,
annotate_enum_variant_references: self.enum_variant_refs,
location: self.location.into(),
- minicore,
+ ra_fixture: RaFixtureConfig { minicore, disable_ra_fixture: self.disable_ra_fixture },
filter_adjacent_derive_implementations: self.filter_adjacent_derive_implementations,
}
}
@@ -1776,9 +1814,23 @@ impl Config {
s
}
- pub fn root_path(&self) -> &AbsPathBuf {
- // We should probably use `workspace_roots` here if set
- &self.root_path
+ /// Find the workspace root that contains the given path, using the
+ /// longest prefix match.
+ pub fn workspace_root_for(&self, path: &AbsPath) -> &AbsPathBuf {
+ self.workspace_roots
+ .iter()
+ .filter(|root| path.starts_with(root.as_path()))
+ .max_by_key(|root| root.as_str().len())
+ .unwrap_or(self.default_root_path())
+ }
+
+ /// Best-effort root path for the current project.
+ ///
+ /// Use `workspace_root_for` where possible, because
+ /// `default_root_path` may return the wrong path when a user has
+ /// multiple workspaces.
+ pub fn default_root_path(&self) -> &AbsPathBuf {
+ self.workspace_roots.first().unwrap_or(&self.root_path)
}
pub fn caps(&self) -> &ClientCapabilities {
@@ -1816,8 +1868,15 @@ impl Config {
}
}
+ pub fn ra_fixture<'a>(&self, minicore: MiniCore<'a>) -> RaFixtureConfig<'a> {
+ RaFixtureConfig { minicore, disable_ra_fixture: *self.disableFixtureSupport(None) }
+ }
+
pub fn call_hierarchy<'a>(&self, minicore: MiniCore<'a>) -> CallHierarchyConfig<'a> {
- CallHierarchyConfig { exclude_tests: self.references_excludeTests().to_owned(), minicore }
+ CallHierarchyConfig {
+ exclude_tests: self.references_excludeTests().to_owned(),
+ ra_fixture: self.ra_fixture(minicore),
+ }
}
pub fn completion<'a>(
@@ -1878,7 +1937,7 @@ impl Config {
})
.collect(),
exclude_traits: self.completion_excludeTraits(source_root),
- minicore,
+ ra_fixture: self.ra_fixture(minicore),
}
}
@@ -1987,12 +2046,12 @@ impl Config {
None => ide::SubstTyLen::Unlimited,
},
show_drop_glue: *self.hover_dropGlue_enable(),
- minicore,
+ ra_fixture: self.ra_fixture(minicore),
}
}
pub fn goto_definition<'a>(&self, minicore: MiniCore<'a>) -> GotoDefinitionConfig<'a> {
- GotoDefinitionConfig { minicore }
+ GotoDefinitionConfig { ra_fixture: self.ra_fixture(minicore) }
}
pub fn inlay_hints<'a>(&self, minicore: MiniCore<'a>) -> InlayHintsConfig<'a> {
@@ -2001,6 +2060,10 @@ impl Config {
InlayHintsConfig {
render_colons: self.inlayHints_renderColons().to_owned(),
type_hints: self.inlayHints_typeHints_enable().to_owned(),
+ type_hints_placement: match self.inlayHints_typeHints_location() {
+ TypeHintsLocation::Inline => ide::TypeHintsPlacement::Inline,
+ TypeHintsLocation::EndOfLine => ide::TypeHintsPlacement::EndOfLine,
+ },
sized_bound: self.inlayHints_implicitSizedBoundHints_enable().to_owned(),
parameter_hints: self.inlayHints_parameterHints_enable().to_owned(),
parameter_hints_for_missing_arguments: self
@@ -2082,7 +2145,7 @@ impl Config {
implicit_drop_hints: self.inlayHints_implicitDrops_enable().to_owned(),
implied_dyn_trait_hints: self.inlayHints_impliedDynTraitHints_enable().to_owned(),
range_exclusive_hints: self.inlayHints_rangeExclusiveHints_enable().to_owned(),
- minicore,
+ ra_fixture: self.ra_fixture(minicore),
}
}
@@ -2135,7 +2198,7 @@ impl Config {
.to_owned(),
inject_doc_comment: self.semanticHighlighting_doc_comment_inject_enable().to_owned(),
syntactic_name_ref_highlighting: false,
- minicore,
+ ra_fixture: self.ra_fixture(minicore),
}
}
@@ -2402,6 +2465,7 @@ impl Config {
target_dir_config: self.target_dir_from_config(source_root),
set_test: *self.cfg_setTest(source_root),
no_deps: *self.cargo_noDeps(source_root),
+ metadata_extra_args: self.cargo_metadataExtraArgs(source_root).clone(),
}
}
@@ -2621,6 +2685,7 @@ impl Config {
location: *self.lens_location(),
filter_adjacent_derive_implementations: *self
.gotoImplementations_filterAdjacentDerives(),
+ disable_ra_fixture: *self.disableFixtureSupport(None),
}
}
@@ -2999,6 +3064,13 @@ enum ClosureStyle {
#[derive(Serialize, Deserialize, Debug, Clone)]
#[serde(rename_all = "snake_case")]
+enum TypeHintsLocation {
+ Inline,
+ EndOfLine,
+}
+
+#[derive(Serialize, Deserialize, Debug, Clone)]
+#[serde(rename_all = "snake_case")]
enum ReborrowHintsDef {
Mutable,
#[serde(with = "true_or_always")]
@@ -3908,6 +3980,14 @@ fn field_props(field: &str, ty: &str, doc: &[&str], default: &str) -> serde_json
"`hide`: Shows `...` for every closure type",
],
},
+ "TypeHintsLocation" => set! {
+ "type": "string",
+ "enum": ["inline", "end_of_line"],
+ "enumDescriptions": [
+ "Render type hints directly after the binding identifier.",
+ "Render type hints after the end of the containing `let` statement when possible.",
+ ],
+ },
"Option<MemoryLayoutHoverRenderKindDef>" => set! {
"anyOf": [
{
diff --git a/crates/rust-analyzer/src/flycheck.rs b/crates/rust-analyzer/src/flycheck.rs
index c41696bf3f..aad8bece95 100644
--- a/crates/rust-analyzer/src/flycheck.rs
+++ b/crates/rust-analyzer/src/flycheck.rs
@@ -673,27 +673,31 @@ impl FlycheckActor {
if self.diagnostics_received == DiagnosticsReceived::NotYet {
tracing::trace!(flycheck_id = self.id, "clearing diagnostics");
// We finished without receiving any diagnostics.
- // Clear everything for good measure
- match &self.scope {
- FlycheckScope::Workspace => {
- self.send(FlycheckMessage::ClearDiagnostics {
- id: self.id,
- kind: ClearDiagnosticsKind::All(ClearScope::Workspace),
- });
- }
- FlycheckScope::Package { package, workspace_deps } => {
- for pkg in
- std::iter::once(package).chain(workspace_deps.iter().flatten())
- {
- self.send(FlycheckMessage::ClearDiagnostics {
- id: self.id,
- kind: ClearDiagnosticsKind::All(ClearScope::Package(
- pkg.clone(),
- )),
- });
- }
- }
- }
+ //
+ // `cargo check` generally outputs something, even if there are no
+ // warnings/errors, so we always know which package was checked.
+ //
+ // ```text
+ // $ cargo check --message-format=json 2>/dev/null
+ // {"reason":"compiler-artifact","package_id":"path+file:///Users/wilfred/tmp/scratch#0.1.0",...}
+ // ```
+ //
+ // However, rustc only returns JSON if there are diagnostics present, so a
+ // build without warnings or errors has an empty output.
+ //
+ // ```
+ // $ rustc --error-format=json bad.rs
+ // {"$message_type":"diagnostic","message":"mismatched types","...}
+ //
+ // $ rustc --error-format=json good.rs
+ // ```
+ //
+ // So if we got zero diagnostics, it was almost certainly a check that
+ // wasn't specific to a package.
+ self.send(FlycheckMessage::ClearDiagnostics {
+ id: self.id,
+ kind: ClearDiagnosticsKind::All(ClearScope::Workspace),
+ });
} else if res.is_ok() {
// We clear diagnostics for packages on
// `[CargoCheckMessage::CompilerArtifact]` but there seem to be setups where
diff --git a/crates/rust-analyzer/src/handlers/request.rs b/crates/rust-analyzer/src/handlers/request.rs
index ad07da7759..86516b6079 100644
--- a/crates/rust-analyzer/src/handlers/request.rs
+++ b/crates/rust-analyzer/src/handlers/request.rs
@@ -332,7 +332,7 @@ pub(crate) fn handle_view_crate_graph(
params: ViewCrateGraphParams,
) -> anyhow::Result<String> {
let _p = tracing::info_span!("handle_view_crate_graph").entered();
- let dot = snap.analysis.view_crate_graph(params.full)?.map_err(anyhow::Error::msg)?;
+ let dot = snap.analysis.view_crate_graph(params.full)?;
Ok(dot)
}
@@ -1264,11 +1264,15 @@ pub(crate) fn handle_folding_range(
params: FoldingRangeParams,
) -> anyhow::Result<Option<Vec<FoldingRange>>> {
let _p = tracing::info_span!("handle_folding_range").entered();
+
let file_id = try_default!(from_proto::file_id(&snap, &params.text_document.uri)?);
- let folds = snap.analysis.folding_ranges(file_id)?;
+ let collapsed_text = snap.config.folding_range_collapsed_text();
+ let folds = snap.analysis.folding_ranges(file_id, collapsed_text)?;
+
let text = snap.analysis.file_text(file_id)?;
let line_index = snap.file_line_index(file_id)?;
let line_folding_only = snap.config.line_folding_only();
+
let res = folds
.into_iter()
.map(|it| to_proto::folding_range(&text, &line_index, line_folding_only, it))
@@ -1395,7 +1399,10 @@ pub(crate) fn handle_references(
let Some(refs) = snap.analysis.find_all_refs(
position,
- &FindAllRefsConfig { search_scope: None, minicore: snap.minicore() },
+ &FindAllRefsConfig {
+ search_scope: None,
+ ra_fixture: snap.config.ra_fixture(snap.minicore()),
+ },
)?
else {
return Ok(None);
@@ -2202,7 +2209,10 @@ fn show_ref_command_link(
.analysis
.find_all_refs(
*position,
- &FindAllRefsConfig { search_scope: None, minicore: snap.minicore() },
+ &FindAllRefsConfig {
+ search_scope: None,
+ ra_fixture: snap.config.ra_fixture(snap.minicore()),
+ },
)
.unwrap_or(None)
{
@@ -2443,7 +2453,14 @@ fn run_rustfmt(
let cmd_path = if command.contains(std::path::MAIN_SEPARATOR)
|| (cfg!(windows) && command.contains('/'))
{
- snap.config.root_path().join(cmd).into()
+ let project_root = Utf8PathBuf::from_path_buf(current_dir.clone())
+ .ok()
+ .and_then(|p| AbsPathBuf::try_from(p).ok());
+ let project_root = project_root
+ .as_ref()
+ .map(|dir| snap.config.workspace_root_for(dir))
+ .unwrap_or(snap.config.default_root_path());
+ project_root.join(cmd).into()
} else {
cmd
};
diff --git a/crates/rust-analyzer/src/integrated_benchmarks.rs b/crates/rust-analyzer/src/integrated_benchmarks.rs
index 6a74b8a54d..af449c473a 100644
--- a/crates/rust-analyzer/src/integrated_benchmarks.rs
+++ b/crates/rust-analyzer/src/integrated_benchmarks.rs
@@ -13,10 +13,10 @@
use hir::ChangeWithProcMacros;
use ide::{
AnalysisHost, CallableSnippets, CompletionConfig, CompletionFieldsToResolve, DiagnosticsConfig,
- FilePosition, TextSize,
+ FilePosition, RaFixtureConfig, TextSize,
};
use ide_db::{
- MiniCore, SnippetCap,
+ SnippetCap,
imports::insert_use::{ImportGranularity, InsertUseConfig},
};
use project_model::CargoConfig;
@@ -190,7 +190,7 @@ fn integrated_completion_benchmark() {
exclude_traits: &[],
enable_auto_await: true,
enable_auto_iter: true,
- minicore: MiniCore::default(),
+ ra_fixture: RaFixtureConfig::default(),
};
let position =
FilePosition { file_id, offset: TextSize::try_from(completion_offset).unwrap() };
@@ -245,7 +245,7 @@ fn integrated_completion_benchmark() {
exclude_traits: &[],
enable_auto_await: true,
enable_auto_iter: true,
- minicore: MiniCore::default(),
+ ra_fixture: RaFixtureConfig::default(),
};
let position =
FilePosition { file_id, offset: TextSize::try_from(completion_offset).unwrap() };
@@ -298,7 +298,7 @@ fn integrated_completion_benchmark() {
exclude_traits: &[],
enable_auto_await: true,
enable_auto_iter: true,
- minicore: MiniCore::default(),
+ ra_fixture: RaFixtureConfig::default(),
};
let position =
FilePosition { file_id, offset: TextSize::try_from(completion_offset).unwrap() };
diff --git a/crates/rust-analyzer/src/lsp/capabilities.rs b/crates/rust-analyzer/src/lsp/capabilities.rs
index d6a694be91..3ad4cb70b4 100644
--- a/crates/rust-analyzer/src/lsp/capabilities.rs
+++ b/crates/rust-analyzer/src/lsp/capabilities.rs
@@ -335,6 +335,20 @@ impl ClientCapabilities {
.unwrap_or_default()
}
+ pub fn folding_range_collapsed_text(&self) -> bool {
+ (|| -> _ {
+ self.0
+ .text_document
+ .as_ref()?
+ .folding_range
+ .as_ref()?
+ .folding_range
+ .as_ref()?
+ .collapsed_text
+ })()
+ .unwrap_or_default()
+ }
+
pub fn hierarchical_symbols(&self) -> bool {
(|| -> _ {
self.0
diff --git a/crates/rust-analyzer/src/lsp/to_proto.rs b/crates/rust-analyzer/src/lsp/to_proto.rs
index e5b983dcbf..5fa95252e7 100644
--- a/crates/rust-analyzer/src/lsp/to_proto.rs
+++ b/crates/rust-analyzer/src/lsp/to_proto.rs
@@ -913,9 +913,9 @@ pub(crate) fn folding_range(
text: &str,
line_index: &LineIndex,
line_folding_only: bool,
- fold: Fold,
+ Fold { range: text_range, kind, collapsed_text }: Fold,
) -> lsp_types::FoldingRange {
- let kind = match fold.kind {
+ let kind = match kind {
FoldKind::Comment => Some(lsp_types::FoldingRangeKind::Comment),
FoldKind::Imports => Some(lsp_types::FoldingRangeKind::Imports),
FoldKind::Region => Some(lsp_types::FoldingRangeKind::Region),
@@ -930,17 +930,19 @@ pub(crate) fn folding_range(
| FoldKind::Array
| FoldKind::ExternCrates
| FoldKind::MatchArm
- | FoldKind::Function => None,
+ | FoldKind::Function
+ | FoldKind::Stmt(_)
+ | FoldKind::TailExpr(_) => None,
};
- let range = range(line_index, fold.range);
+ let range = range(line_index, text_range);
if line_folding_only {
// Clients with line_folding_only == true (such as VSCode) will fold the whole end line
// even if it contains text not in the folding range. To prevent that we exclude
// range.end.line from the folding region if there is more text after range.end
// on the same line.
- let has_more_text_on_end_line = text[TextRange::new(fold.range.end(), TextSize::of(text))]
+ let has_more_text_on_end_line = text[TextRange::new(text_range.end(), TextSize::of(text))]
.chars()
.take_while(|it| *it != '\n')
.any(|it| !it.is_whitespace());
@@ -951,13 +953,20 @@ pub(crate) fn folding_range(
range.end.line
};
+ let collapsed_text = collapsed_text.map(|collapsed_text| {
+ let range_start = text_range.start();
+ let line_start = range_start - TextSize::from(range.start.character);
+ let text_before_range = &text[TextRange::new(line_start, range_start)];
+ format!("{text_before_range}{collapsed_text}")
+ });
+
lsp_types::FoldingRange {
start_line: range.start.line,
start_character: None,
end_line,
end_character: None,
kind,
- collapsed_text: None,
+ collapsed_text,
}
} else {
lsp_types::FoldingRange {
@@ -966,7 +975,7 @@ pub(crate) fn folding_range(
end_line: range.end.line,
end_character: Some(range.end.character),
kind,
- collapsed_text: None,
+ collapsed_text,
}
}
}
@@ -2037,8 +2046,8 @@ fn main() {
}"#;
let (analysis, file_id) = Analysis::from_single_file(text.to_owned());
- let folds = analysis.folding_ranges(file_id).unwrap();
- assert_eq!(folds.len(), 4);
+ let folds = analysis.folding_ranges(file_id, true).unwrap();
+ assert_eq!(folds.len(), 5);
let line_index = LineIndex {
index: Arc::new(ide::LineIndex::new(text)),
@@ -2048,7 +2057,7 @@ fn main() {
let converted: Vec<lsp_types::FoldingRange> =
folds.into_iter().map(|it| folding_range(text, &line_index, true, it)).collect();
- let expected_lines = [(0, 2), (4, 10), (5, 6), (7, 9)];
+ let expected_lines = [(0, 2), (4, 10), (5, 9), (5, 6), (7, 9)];
assert_eq!(converted.len(), expected_lines.len());
for (folding_range, (start_line, end_line)) in converted.iter().zip(expected_lines.iter()) {
assert_eq!(folding_range.start_line, *start_line);
diff --git a/crates/rust-analyzer/src/main_loop.rs b/crates/rust-analyzer/src/main_loop.rs
index 7c494de6f7..a8c3d062d0 100644
--- a/crates/rust-analyzer/src/main_loop.rs
+++ b/crates/rust-analyzer/src/main_loop.rs
@@ -830,12 +830,19 @@ impl GlobalState {
let command = cfg.command.clone();
let discover = DiscoverCommand::new(self.discover_sender.clone(), command);
+ let discover_path = match &arg {
+ DiscoverProjectParam::Buildfile(it) => it,
+ DiscoverProjectParam::Path(it) => it,
+ };
+ let current_dir =
+ self.config.workspace_root_for(discover_path.as_path()).clone();
+
let arg = match arg {
DiscoverProjectParam::Buildfile(it) => DiscoverArgument::Buildfile(it),
DiscoverProjectParam::Path(it) => DiscoverArgument::Path(it),
};
- match discover.spawn(arg, self.config.root_path().as_ref()) {
+ match discover.spawn(arg, current_dir.as_ref()) {
Ok(handle) => {
if self.discover_jobs_active == 0 {
let title = &cfg.progress_label.clone();
@@ -953,7 +960,7 @@ impl GlobalState {
if let Some(dir) = dir {
message += &format!(
": {}",
- match dir.strip_prefix(self.config.root_path()) {
+ match dir.strip_prefix(self.config.workspace_root_for(&dir)) {
Some(relative_path) => relative_path.as_utf8_path(),
None => dir.as_ref(),
}
diff --git a/crates/rust-analyzer/src/reload.rs b/crates/rust-analyzer/src/reload.rs
index 83f4a19b39..1832275eb3 100644
--- a/crates/rust-analyzer/src/reload.rs
+++ b/crates/rust-analyzer/src/reload.rs
@@ -390,7 +390,7 @@ impl GlobalState {
info!(%cause, "will fetch build data");
let workspaces = Arc::clone(&self.workspaces);
let config = self.config.cargo(None);
- let root_path = self.config.root_path().clone();
+ let root_path = self.config.default_root_path().clone();
self.task_pool.handle.spawn_with_sender(ThreadIntent::Worker, move |sender| {
sender.send(Task::FetchBuildData(BuildDataProgress::Begin)).unwrap();
@@ -582,7 +582,8 @@ impl GlobalState {
[
(base.clone(), "**/*.rs"),
(base.clone(), "**/Cargo.{lock,toml}"),
- (base, "**/rust-analyzer.toml"),
+ (base.clone(), "**/rust-analyzer.toml"),
+ (base, "**/*.md"),
]
})
})
@@ -607,6 +608,7 @@ impl GlobalState {
format!("{base}/**/*.rs"),
format!("{base}/**/Cargo.{{toml,lock}}"),
format!("{base}/**/rust-analyzer.toml"),
+ format!("{base}/**/*.md"),
]
})
})
@@ -883,7 +885,7 @@ impl GlobalState {
config,
crate::flycheck::FlycheckConfigJson::default(),
None,
- self.config.root_path().clone(),
+ self.config.default_root_path().clone(),
None,
None,
)]
@@ -975,6 +977,7 @@ pub(crate) fn should_refresh_for_change(
change_kind: ChangeKind,
additional_paths: &[&str],
) -> bool {
+ // Note: build scripts are retriggered on file save, no refresh is necessary
const IMPLICIT_TARGET_FILES: &[&str] = &["build.rs", "src/main.rs", "src/lib.rs"];
const IMPLICIT_TARGET_DIRS: &[&str] = &["src/bin", "examples", "tests", "benches"];
@@ -991,15 +994,20 @@ pub(crate) fn should_refresh_for_change(
return true;
}
+ // .cargo/config{.toml}
+ if matches!(file_name, "config.toml" | "config")
+ && path.parent().map(|parent| parent.as_str().ends_with(".cargo")).unwrap_or(false)
+ {
+ return true;
+ }
+
+ // Everything below only matters when files are created or deleted
if change_kind == ChangeKind::Modify {
return false;
}
- // .cargo/config{.toml}
if path.extension().unwrap_or_default() != "rs" {
- let is_cargo_config = matches!(file_name, "config.toml" | "config")
- && path.parent().map(|parent| parent.as_str().ends_with(".cargo")).unwrap_or(false);
- return is_cargo_config;
+ return false;
}
if IMPLICIT_TARGET_FILES.iter().any(|it| path.as_str().ends_with(it)) {
diff --git a/crates/rust-analyzer/src/target_spec.rs b/crates/rust-analyzer/src/target_spec.rs
index 8be061cacf..01196b80cd 100644
--- a/crates/rust-analyzer/src/target_spec.rs
+++ b/crates/rust-analyzer/src/target_spec.rs
@@ -230,6 +230,18 @@ impl CargoTargetSpec {
};
let test_name = test_name.unwrap_or_default();
+ let exact = match kind {
+ RunnableKind::Test { test_id } | RunnableKind::Bench { test_id } => match test_id {
+ TestId::Path(_) => "--exact",
+ TestId::Name(_) => "",
+ },
+ _ => "",
+ };
+ let include_ignored = match kind {
+ RunnableKind::Test { .. } => "--include-ignored",
+ _ => "",
+ };
+
let target_arg = |kind| match kind {
TargetKind::Bin => "--bin",
TargetKind::Test => "--test",
@@ -249,7 +261,9 @@ impl CargoTargetSpec {
.replace("${package}", &spec.package)
.replace("${target_arg}", target_arg(spec.target_kind))
.replace("${target}", target(spec.target_kind, &spec.target))
- .replace("${test_name}", &test_name),
+ .replace("${test_name}", &test_name)
+ .replace("${exact}", exact)
+ .replace("${include_ignored}", include_ignored),
_ => arg,
};
@@ -274,15 +288,13 @@ impl CargoTargetSpec {
let mut executable_args = Vec::new();
match kind {
- RunnableKind::Test { test_id, attr } => {
+ RunnableKind::Test { test_id } => {
executable_args.push(test_id.to_string());
if let TestId::Path(_) = test_id {
executable_args.push("--exact".to_owned());
}
executable_args.extend(extra_test_binary_args);
- if attr.ignore {
- executable_args.push("--ignored".to_owned());
- }
+ executable_args.push("--include-ignored".to_owned());
}
RunnableKind::TestMod { path } => {
executable_args.push(path.clone());
diff --git a/crates/rust-analyzer/tests/slow-tests/flycheck.rs b/crates/rust-analyzer/tests/slow-tests/flycheck.rs
index c1d53fb33a..c6f1f81139 100644
--- a/crates/rust-analyzer/tests/slow-tests/flycheck.rs
+++ b/crates/rust-analyzer/tests/slow-tests/flycheck.rs
@@ -110,3 +110,45 @@ fn main() {}
diagnostics.diagnostics,
);
}
+
+#[test]
+fn test_flycheck_diagnostics_with_override_command_cleared_after_fix() {
+ if skip_slow_tests() {
+ return;
+ }
+
+ // Start with a program that is lint clean.
+ let server = Project::with_fixture(
+ r#"
+//- /Cargo.toml
+[package]
+name = "foo"
+version = "0.0.0"
+
+//- /src/main.rs
+fn main() {}
+"#,
+ )
+ .with_config(serde_json::json!({
+ "checkOnSave": true,
+ "check": {
+ "overrideCommand": ["rustc", "--error-format=json", "$saved_file"]
+ }
+ }))
+ .server()
+ .wait_until_workspace_is_loaded();
+
+ // Introduce an unused variable.
+ server.write_file_and_save("src/main.rs", "fn main() {\n let x = 1;\n}\n".to_owned());
+
+ let diags = server.wait_for_diagnostics();
+ assert!(
+ diags.diagnostics.iter().any(|d| d.message.contains("unused variable")),
+ "expected unused variable diagnostic, got: {:?}",
+ diags.diagnostics,
+ );
+
+ // Fix it and verify that diagnostics are cleared.
+ server.write_file_and_save("src/main.rs", "fn main() {\n let _x = 1;\n}\n".to_owned());
+ server.wait_for_diagnostics_cleared();
+}
diff --git a/crates/rust-analyzer/tests/slow-tests/main.rs b/crates/rust-analyzer/tests/slow-tests/main.rs
index fcdc8bb7cd..3c57e36b4f 100644
--- a/crates/rust-analyzer/tests/slow-tests/main.rs
+++ b/crates/rust-analyzer/tests/slow-tests/main.rs
@@ -262,7 +262,7 @@ fn main() {}
{
"args": {
"cargoArgs": ["test", "--package", "foo", "--test", "spam"],
- "executableArgs": ["test_eggs", "--exact", "--nocapture"],
+ "executableArgs": ["test_eggs", "--exact", "--nocapture", "--include-ignored"],
"overrideCargo": null,
"cwd": server.path().join("foo"),
"workspaceRoot": server.path().join("foo")
diff --git a/crates/rust-analyzer/tests/slow-tests/ratoml.rs b/crates/rust-analyzer/tests/slow-tests/ratoml.rs
index cac7efd84a..dd113babff 100644
--- a/crates/rust-analyzer/tests/slow-tests/ratoml.rs
+++ b/crates/rust-analyzer/tests/slow-tests/ratoml.rs
@@ -1008,3 +1008,45 @@ fn main() {
InternalTestingFetchConfigResponse::CheckWorkspace(true),
);
}
+
+#[test]
+fn ratoml_virtual_workspace() {
+ if skip_slow_tests() {
+ return;
+ }
+
+ let server = RatomlTest::new(
+ vec![
+ r#"
+//- /p1/Cargo.toml
+[workspace]
+members = ["member"]
+"#,
+ r#"
+//- /p1/rust-analyzer.toml
+assist.emitMustUse = true
+"#,
+ r#"
+//- /p1/member/Cargo.toml
+[package]
+name = "member"
+version = "0.1.0"
+edition = "2021"
+"#,
+ r#"
+//- /p1/member/src/lib.rs
+pub fn add(left: usize, right: usize) -> usize {
+ left + right
+}
+"#,
+ ],
+ vec!["p1"],
+ None,
+ );
+
+ server.query(
+ InternalTestingFetchConfigOption::AssistEmitMustUse,
+ 3,
+ InternalTestingFetchConfigResponse::AssistEmitMustUse(true),
+ );
+}
diff --git a/crates/rust-analyzer/tests/slow-tests/support.rs b/crates/rust-analyzer/tests/slow-tests/support.rs
index 7ee31f3d53..7390403673 100644
--- a/crates/rust-analyzer/tests/slow-tests/support.rs
+++ b/crates/rust-analyzer/tests/slow-tests/support.rs
@@ -173,7 +173,8 @@ impl Project<'_> {
chalk_filter: std::env::var("CHALK_DEBUG").ok(),
profile_filter: std::env::var("RA_PROFILE").ok(),
json_profile_filter: std::env::var("RA_PROFILE_JSON").ok(),
- };
+ }
+ .init();
});
let FixtureWithProjectMeta {
diff --git a/crates/syntax/src/ast/edit.rs b/crates/syntax/src/ast/edit.rs
index b706d7f722..23a0411ead 100644
--- a/crates/syntax/src/ast/edit.rs
+++ b/crates/syntax/src/ast/edit.rs
@@ -105,8 +105,7 @@ impl IndentLevel {
}
pub(super) fn clone_increase_indent(self, node: &SyntaxNode) -> SyntaxNode {
- let node = node.clone_subtree();
- let mut editor = SyntaxEditor::new(node.clone());
+ let (mut editor, node) = SyntaxEditor::new(node.clone());
let tokens = node
.preorder_with_tokens()
.filter_map(|event| match event {
@@ -140,8 +139,7 @@ impl IndentLevel {
}
pub(super) fn clone_decrease_indent(self, node: &SyntaxNode) -> SyntaxNode {
- let node = node.clone_subtree();
- let mut editor = SyntaxEditor::new(node.clone());
+ let (mut editor, node) = SyntaxEditor::new(node.clone());
let tokens = node
.preorder_with_tokens()
.filter_map(|event| match event {
diff --git a/crates/syntax/src/ast/node_ext.rs b/crates/syntax/src/ast/node_ext.rs
index 63e4608d0f..3fc3b39fee 100644
--- a/crates/syntax/src/ast/node_ext.rs
+++ b/crates/syntax/src/ast/node_ext.rs
@@ -77,6 +77,15 @@ fn text_of_first_token(node: &SyntaxNode) -> TokenText<'_> {
}
}
+fn into_comma(it: NodeOrToken<SyntaxNode, SyntaxToken>) -> Option<SyntaxToken> {
+ let token = match it {
+ NodeOrToken::Token(it) => it,
+ NodeOrToken::Node(node) if node.kind() == SyntaxKind::ERROR => node.first_token()?,
+ NodeOrToken::Node(_) => return None,
+ };
+ (token.kind() == T![,]).then_some(token)
+}
+
impl ast::Abi {
pub fn abi_string(&self) -> Option<ast::String> {
support::token(&self.syntax, SyntaxKind::STRING).and_then(ast::String::cast)
@@ -1037,6 +1046,21 @@ impl ast::GenericParamList {
}
}
+impl ast::ArgList {
+ /// Comma separated args, argument may be empty
+ pub fn args_maybe_empty(&self) -> impl Iterator<Item = Option<ast::Expr>> {
+ // (Expr? ','?)*
+ let mut after_arg = false;
+ self.syntax().children_with_tokens().filter_map(move |it| {
+ if into_comma(it.clone()).is_some() {
+ if std::mem::take(&mut after_arg) { None } else { Some(None) }
+ } else {
+ Some(ast::Expr::cast(it.into_node()?).inspect(|_| after_arg = true))
+ }
+ })
+ }
+}
+
impl ast::ForExpr {
pub fn iterable(&self) -> Option<ast::Expr> {
// If the iterable is a BlockExpr, check if the body is missing.
diff --git a/crates/syntax/src/ast/syntax_factory/constructors.rs b/crates/syntax/src/ast/syntax_factory/constructors.rs
index 44114a7802..e91e444a32 100644
--- a/crates/syntax/src/ast/syntax_factory/constructors.rs
+++ b/crates/syntax/src/ast/syntax_factory/constructors.rs
@@ -1960,6 +1960,47 @@ impl SyntaxFactory {
ast
}
+ pub fn trait_(
+ &self,
+ is_unsafe: bool,
+ ident: &str,
+ generic_param_list: Option<ast::GenericParamList>,
+ where_clause: Option<ast::WhereClause>,
+ assoc_items: ast::AssocItemList,
+ ) -> ast::Trait {
+ let ast = make::trait_(
+ is_unsafe,
+ ident,
+ generic_param_list.clone(),
+ where_clause.clone(),
+ assoc_items.clone(),
+ )
+ .clone_for_update();
+
+ if let Some(mut mapping) = self.mappings() {
+ let mut builder = SyntaxMappingBuilder::new(ast.syntax().clone());
+ if let Some(generic_param_list) = generic_param_list {
+ builder.map_node(
+ generic_param_list.syntax().clone(),
+ ast.generic_param_list().unwrap().syntax().clone(),
+ );
+ }
+ if let Some(where_clause) = where_clause {
+ builder.map_node(
+ where_clause.syntax().clone(),
+ ast.where_clause().unwrap().syntax().clone(),
+ );
+ }
+ builder.map_node(
+ assoc_items.syntax().clone(),
+ ast.assoc_item_list().unwrap().syntax().clone(),
+ );
+ builder.finish(&mut mapping);
+ }
+
+ ast
+ }
+
pub fn ret_type(&self, ty: ast::Type) -> ast::RetType {
let ast = make::ret_type(ty.clone()).clone_for_update();
@@ -2053,6 +2094,17 @@ impl SyntaxFactory {
self.path_unqualified(self.path_segment(self.name_ref(ident)))
}
+ pub fn path_from_idents<'a>(
+ &self,
+ parts: impl IntoIterator<Item = &'a str>,
+ ) -> Option<ast::Path> {
+ make::ext::path_from_idents(parts).map(|path| path.clone_for_update())
+ }
+
+ pub fn token_tree_from_node(&self, node: &SyntaxNode) -> ast::TokenTree {
+ make::ext::token_tree_from_node(node).clone_for_update()
+ }
+
pub fn expr_unit(&self) -> ast::Expr {
self.expr_tuple([]).into()
}
diff --git a/crates/syntax/src/syntax_editor.rs b/crates/syntax/src/syntax_editor.rs
index e6937e4d0f..8e4dc75d22 100644
--- a/crates/syntax/src/syntax_editor.rs
+++ b/crates/syntax/src/syntax_editor.rs
@@ -14,7 +14,10 @@ use std::{
use rowan::TextRange;
use rustc_hash::FxHashMap;
-use crate::{SyntaxElement, SyntaxNode, SyntaxToken};
+use crate::{
+ AstNode, SyntaxElement, SyntaxKind, SyntaxNode, SyntaxToken, T,
+ ast::{self, edit::IndentLevel, syntax_factory::SyntaxFactory},
+};
mod edit_algo;
mod edits;
@@ -32,9 +35,37 @@ pub struct SyntaxEditor {
}
impl SyntaxEditor {
- /// Creates a syntax editor to start editing from `root`
- pub fn new(root: SyntaxNode) -> Self {
- Self { root, changes: vec![], mappings: SyntaxMapping::default(), annotations: vec![] }
+ /// Creates a syntax editor from `root`.
+ ///
+ /// The returned `root` is guaranteed to be a detached, immutable node.
+ /// If the provided node is not a root (i.e., has a parent) or is already
+ /// mutable, it is cloned into a fresh subtree to satisfy syntax editor
+ /// invariants.
+ pub fn new(root: SyntaxNode) -> (Self, SyntaxNode) {
+ let mut root = root;
+
+ if root.parent().is_some() || root.is_mutable() {
+ root = root.clone_subtree()
+ };
+
+ let editor = Self {
+ root: root.clone(),
+ changes: Vec::new(),
+ mappings: SyntaxMapping::default(),
+ annotations: Vec::new(),
+ };
+
+ (editor, root)
+ }
+
+ /// Typed-node variant of [`SyntaxEditor::new`].
+ pub fn with_ast_node<T>(root: &T) -> (Self, T)
+ where
+ T: AstNode,
+ {
+ let (editor, root) = Self::new(root.syntax().clone());
+
+ (editor, T::cast(root).unwrap())
}
pub fn add_annotation(&mut self, element: impl Element, annotation: SyntaxAnnotation) {
@@ -73,6 +104,34 @@ impl SyntaxEditor {
self.changes.push(Change::InsertAll(position, elements))
}
+ pub fn insert_with_whitespace(
+ &mut self,
+ position: Position,
+ element: impl Element,
+ factory: &SyntaxFactory,
+ ) {
+ self.insert_all_with_whitespace(position, vec![element.syntax_element()], factory)
+ }
+
+ pub fn insert_all_with_whitespace(
+ &mut self,
+ position: Position,
+ mut elements: Vec<SyntaxElement>,
+ factory: &SyntaxFactory,
+ ) {
+ if let Some(first) = elements.first()
+ && let Some(ws) = ws_before(&position, first, factory)
+ {
+ elements.insert(0, ws.into());
+ }
+ if let Some(last) = elements.last()
+ && let Some(ws) = ws_after(&position, last, factory)
+ {
+ elements.push(ws.into());
+ }
+ self.insert_all(position, elements)
+ }
+
pub fn delete(&mut self, element: impl Element) {
let element = element.syntax_element();
debug_assert!(is_ancestor_or_self_of_element(&element, &self.root));
@@ -384,6 +443,86 @@ impl Element for SyntaxToken {
}
}
+fn ws_before(
+ position: &Position,
+ new: &SyntaxElement,
+ factory: &SyntaxFactory,
+) -> Option<SyntaxToken> {
+ let prev = match &position.repr {
+ PositionRepr::FirstChild(_) => return None,
+ PositionRepr::After(it) => it,
+ };
+
+ if prev.kind() == T!['{']
+ && new.kind() == SyntaxKind::USE
+ && let Some(item_list) = prev.parent().and_then(ast::ItemList::cast)
+ {
+ let mut indent = IndentLevel::from_element(&item_list.syntax().clone().into());
+ indent.0 += 1;
+ return Some(factory.whitespace(&format!("\n{indent}")));
+ }
+
+ if prev.kind() == T!['{']
+ && ast::Stmt::can_cast(new.kind())
+ && let Some(stmt_list) = prev.parent().and_then(ast::StmtList::cast)
+ {
+ let mut indent = IndentLevel::from_element(&stmt_list.syntax().clone().into());
+ indent.0 += 1;
+ return Some(factory.whitespace(&format!("\n{indent}")));
+ }
+
+ ws_between(prev, new, factory)
+}
+
+fn ws_after(
+ position: &Position,
+ new: &SyntaxElement,
+ factory: &SyntaxFactory,
+) -> Option<SyntaxToken> {
+ let next = match &position.repr {
+ PositionRepr::FirstChild(parent) => parent.first_child_or_token()?,
+ PositionRepr::After(sibling) => sibling.next_sibling_or_token()?,
+ };
+ ws_between(new, &next, factory)
+}
+
+fn ws_between(
+ left: &SyntaxElement,
+ right: &SyntaxElement,
+ factory: &SyntaxFactory,
+) -> Option<SyntaxToken> {
+ if left.kind() == SyntaxKind::WHITESPACE || right.kind() == SyntaxKind::WHITESPACE {
+ return None;
+ }
+ if right.kind() == T![;] || right.kind() == T![,] {
+ return None;
+ }
+ if left.kind() == T![<] || right.kind() == T![>] {
+ return None;
+ }
+ if left.kind() == T![&] && right.kind() == SyntaxKind::LIFETIME {
+ return None;
+ }
+ if right.kind() == SyntaxKind::GENERIC_ARG_LIST {
+ return None;
+ }
+ if right.kind() == SyntaxKind::USE {
+ let mut indent = IndentLevel::from_element(left);
+ if left.kind() == SyntaxKind::USE {
+ indent.0 = IndentLevel::from_element(right).0.max(indent.0);
+ }
+ return Some(factory.whitespace(&format!("\n{indent}")));
+ }
+ if left.kind() == SyntaxKind::ATTR {
+ let mut indent = IndentLevel::from_element(right);
+ if right.kind() == SyntaxKind::ATTR {
+ indent.0 = IndentLevel::from_element(left).0.max(indent.0);
+ }
+ return Some(factory.whitespace(&format!("\n{indent}")));
+ }
+ Some(factory.whitespace(" "))
+}
+
fn is_ancestor_or_self(node: &SyntaxNode, ancestor: &SyntaxNode) -> bool {
node == ancestor || node.ancestors().any(|it| &it == ancestor)
}
@@ -420,10 +559,11 @@ mod tests {
.into(),
);
+ let (mut editor, root) = SyntaxEditor::with_ast_node(&root);
+
let to_wrap = root.syntax().descendants().find_map(ast::TupleExpr::cast).unwrap();
let to_replace = root.syntax().descendants().find_map(ast::BinExpr::cast).unwrap();
- let mut editor = SyntaxEditor::new(root.syntax().clone());
let make = SyntaxFactory::with_mappings();
let name = make::name("var_name");
@@ -478,9 +618,8 @@ mod tests {
None,
);
+ let (mut editor, root) = SyntaxEditor::with_ast_node(&root);
let second_let = root.syntax().descendants().find_map(ast::LetStmt::cast).unwrap();
-
- let mut editor = SyntaxEditor::new(root.syntax().clone());
let make = SyntaxFactory::without_mappings();
editor.insert(
@@ -530,11 +669,12 @@ mod tests {
),
);
+ let (mut editor, root) = SyntaxEditor::with_ast_node(&root);
+
let inner_block =
root.syntax().descendants().flat_map(ast::BlockExpr::cast).nth(1).unwrap();
let second_let = root.syntax().descendants().find_map(ast::LetStmt::cast).unwrap();
- let mut editor = SyntaxEditor::new(root.syntax().clone());
let make = SyntaxFactory::with_mappings();
let new_block_expr = make.block_expr([], Some(ast::Expr::BlockExpr(inner_block.clone())));
@@ -584,9 +724,9 @@ mod tests {
None,
);
- let inner_block = root.clone();
+ let (mut editor, root) = SyntaxEditor::with_ast_node(&root);
- let mut editor = SyntaxEditor::new(root.syntax().clone());
+ let inner_block = root;
let make = SyntaxFactory::with_mappings();
let new_block_expr = make.block_expr([], Some(ast::Expr::BlockExpr(inner_block.clone())));
@@ -632,7 +772,7 @@ mod tests {
false,
);
- let mut editor = SyntaxEditor::new(parent_fn.syntax().clone());
+ let (mut editor, parent_fn) = SyntaxEditor::with_ast_node(&parent_fn);
if let Some(ret_ty) = parent_fn.ret_type() {
editor.delete(ret_ty.syntax().clone());
@@ -659,7 +799,8 @@ mod tests {
let arg_list =
make::arg_list([make::expr_literal("1").into(), make::expr_literal("2").into()]);
- let mut editor = SyntaxEditor::new(arg_list.syntax().clone());
+ let (mut editor, arg_list) = SyntaxEditor::with_ast_node(&arg_list);
+
let target_expr = make::token(parser::SyntaxKind::UNDERSCORE);
for arg in arg_list.args() {
@@ -677,7 +818,8 @@ mod tests {
let arg_list =
make::arg_list([make::expr_literal("1").into(), make::expr_literal("2").into()]);
- let mut editor = SyntaxEditor::new(arg_list.syntax().clone());
+ let (mut editor, arg_list) = SyntaxEditor::with_ast_node(&arg_list);
+
let target_expr = make::expr_literal("3").clone_for_update();
for arg in arg_list.args() {
@@ -695,7 +837,8 @@ mod tests {
let arg_list =
make::arg_list([make::expr_literal("1").into(), make::expr_literal("2").into()]);
- let mut editor = SyntaxEditor::new(arg_list.syntax().clone());
+ let (mut editor, arg_list) = SyntaxEditor::with_ast_node(&arg_list);
+
let target_expr = make::ext::expr_unit().clone_for_update();
for arg in arg_list.args() {
diff --git a/crates/syntax/src/syntax_editor/edit_algo.rs b/crates/syntax/src/syntax_editor/edit_algo.rs
index e697d97061..78e7083f97 100644
--- a/crates/syntax/src/syntax_editor/edit_algo.rs
+++ b/crates/syntax/src/syntax_editor/edit_algo.rs
@@ -192,11 +192,8 @@ pub(super) fn apply_edits(editor: SyntaxEditor) -> SyntaxEdit {
}
};
}
- Change::Replace(SyntaxElement::Node(target), Some(SyntaxElement::Node(new_target))) => {
+ Change::Replace(SyntaxElement::Node(target), Some(SyntaxElement::Node(_))) => {
*target = tree_mutator.make_syntax_mut(target);
- if new_target.ancestors().any(|node| node == tree_mutator.immutable) {
- *new_target = new_target.clone_for_update();
- }
}
Change::Replace(target, _) | Change::ReplaceWithMany(target, _) => {
*target = tree_mutator.make_element_mut(target);
@@ -210,6 +207,56 @@ pub(super) fn apply_edits(editor: SyntaxEditor) -> SyntaxEdit {
}
match &mut changes[index as usize] {
+ Change::Insert(_, SyntaxElement::Node(node))
+ | Change::Replace(_, Some(SyntaxElement::Node(node))) => {
+ if node.parent().is_some() {
+ *node = node.clone_subtree().clone_for_update();
+ } else if !node.is_mutable() {
+ *node = node.clone_for_update();
+ }
+ }
+ Change::Insert(_, SyntaxElement::Token(token))
+ | Change::Replace(_, Some(SyntaxElement::Token(token))) => {
+ if let Some(parent) = token.parent() {
+ let idx = token.index();
+ let new_parent = parent.clone_subtree().clone_for_update();
+ *token = new_parent
+ .children_with_tokens()
+ .nth(idx)
+ .and_then(SyntaxElement::into_token)
+ .unwrap();
+ }
+ }
+ Change::InsertAll(_, elements)
+ | Change::ReplaceWithMany(_, elements)
+ | Change::ReplaceAll(_, elements) => {
+ for element in elements {
+ match element {
+ SyntaxElement::Node(node) => {
+ if node.parent().is_some() {
+ *node = node.clone_subtree().clone_for_update();
+ } else if !node.is_mutable() {
+ *node = node.clone_for_update();
+ }
+ }
+ SyntaxElement::Token(token) => {
+ if let Some(parent) = token.parent() {
+ let idx = token.index();
+ let new_parent = parent.clone_subtree().clone_for_update();
+ *token = new_parent
+ .children_with_tokens()
+ .nth(idx)
+ .and_then(SyntaxElement::into_token)
+ .unwrap();
+ }
+ }
+ }
+ }
+ }
+ _ => {}
+ }
+
+ match &mut changes[index as usize] {
Change::Insert(_, element) | Change::Replace(_, Some(element)) => {
deduplicate_node(element);
}
diff --git a/crates/syntax/src/syntax_editor/edits.rs b/crates/syntax/src/syntax_editor/edits.rs
index 44f0a8038e..d741adb6e3 100644
--- a/crates/syntax/src/syntax_editor/edits.rs
+++ b/crates/syntax/src/syntax_editor/edits.rs
@@ -229,6 +229,25 @@ impl ast::AssocItemList {
}
}
+impl ast::Impl {
+ pub fn get_or_create_assoc_item_list_with_editor(
+ &self,
+ editor: &mut SyntaxEditor,
+ make: &SyntaxFactory,
+ ) -> ast::AssocItemList {
+ if let Some(list) = self.assoc_item_list() {
+ list
+ } else {
+ let list = make.assoc_item_list_empty();
+ editor.insert_all(
+ Position::last_child_of(self.syntax()),
+ vec![make.whitespace(" ").into(), list.syntax().clone().into()],
+ );
+ list
+ }
+ }
+}
+
impl ast::VariantList {
pub fn add_variant(&self, editor: &mut SyntaxEditor, variant: &ast::Variant) {
let make = SyntaxFactory::without_mappings();
@@ -473,8 +492,7 @@ enum Foo {
}
fn check_add_variant(before: &str, expected: &str, variant: ast::Variant) {
- let enum_ = ast_from_text::<ast::Enum>(before);
- let mut editor = SyntaxEditor::new(enum_.syntax().clone());
+ let (mut editor, enum_) = SyntaxEditor::with_ast_node(&ast_from_text::<ast::Enum>(before));
if let Some(it) = enum_.variant_list() {
it.add_variant(&mut editor, &variant)
}
diff --git a/crates/syntax/src/syntax_editor/mapping.rs b/crates/syntax/src/syntax_editor/mapping.rs
index 6257bf4e57..180c2e69fa 100644
--- a/crates/syntax/src/syntax_editor/mapping.rs
+++ b/crates/syntax/src/syntax_editor/mapping.rs
@@ -161,7 +161,7 @@ impl SyntaxMapping {
// Try to follow the mapping tree, if it exists
let input_mapping = self.upmap_node_single(input);
let input_ancestor =
- input.ancestors().find_map(|ancestor| self.upmap_node_single(&ancestor));
+ input.ancestors().find(|ancestor| self.upmap_node_single(ancestor).is_some());
match (input_mapping, input_ancestor) {
(Some(input_mapping), _) => {
diff --git a/crates/test-fixture/src/lib.rs b/crates/test-fixture/src/lib.rs
index e271c32c86..f346535ca1 100644
--- a/crates/test-fixture/src/lib.rs
+++ b/crates/test-fixture/src/lib.rs
@@ -11,7 +11,7 @@ use base_db::target::TargetData;
use base_db::{
Crate, CrateDisplayName, CrateGraphBuilder, CrateName, CrateOrigin, CrateWorkspaceData,
DependencyBuilder, Env, FileChange, FileSet, FxIndexMap, LangCrateOrigin, SourceDatabase,
- SourceRoot, Version, VfsPath,
+ SourceRoot, Version, VfsPath, all_crates,
};
use cfg::CfgOptions;
use hir_expand::{
@@ -227,7 +227,7 @@ pub trait WithFixture: Default + ExpandDatabase + SourceDatabase + 'static {
}
fn test_crate(&self) -> Crate {
- self.all_crates().iter().copied().find(|&krate| !krate.data(self).origin.is_lang()).unwrap()
+ all_crates(self).iter().copied().find(|&krate| !krate.data(self).origin.is_lang()).unwrap()
}
}
diff --git a/crates/vfs-notify/src/lib.rs b/crates/vfs-notify/src/lib.rs
index 428b19c50b..6465a85d2d 100644
--- a/crates/vfs-notify/src/lib.rs
+++ b/crates/vfs-notify/src/lib.rs
@@ -14,7 +14,7 @@ use std::{
};
use crossbeam_channel::{Receiver, Sender, select, unbounded};
-use notify::{Config, EventKind, RecommendedWatcher, RecursiveMode, Watcher};
+use notify::{Config, EventKind, RecommendedWatcher, RecursiveMode, Watcher, event::AccessKind};
use paths::{AbsPath, AbsPathBuf, Utf8PathBuf};
use rayon::iter::{IndexedParallelIterator as _, IntoParallelIterator as _, ParallelIterator};
use rustc_hash::FxHashSet;
@@ -63,6 +63,7 @@ struct NotifyActor {
sender: loader::Sender,
watched_file_entries: FxHashSet<AbsPathBuf>,
watched_dir_entries: Vec<loader::Directories>,
+ seen_paths: FxHashSet<AbsPathBuf>,
// Drop order is significant.
watcher: Option<(RecommendedWatcher, Receiver<NotifyEvent>)>,
}
@@ -79,6 +80,7 @@ impl NotifyActor {
sender,
watched_dir_entries: Vec::new(),
watched_file_entries: FxHashSet::default(),
+ seen_paths: FxHashSet::default(),
watcher: None,
}
}
@@ -120,6 +122,7 @@ impl NotifyActor {
let n_total = config.load.len();
self.watched_dir_entries.clear();
self.watched_file_entries.clear();
+ self.seen_paths.clear();
self.send(loader::Message::Progress {
n_total,
@@ -195,10 +198,12 @@ impl NotifyActor {
},
Event::NotifyEvent(event) => {
if let Some(event) = log_notify_error(event)
- && let EventKind::Create(_) | EventKind::Modify(_) | EventKind::Remove(_) =
- event.kind
+ && let EventKind::Create(_)
+ | EventKind::Modify(_)
+ | EventKind::Remove(_)
+ | EventKind::Access(AccessKind::Open(_)) = event.kind
{
- let files = event
+ let abs_paths: Vec<AbsPathBuf> = event
.paths
.into_iter()
.filter_map(|path| {
@@ -207,6 +212,28 @@ impl NotifyActor {
.expect("path is absolute"),
)
})
+ .collect();
+
+ let mut saw_new_file = false;
+ for abs_path in &abs_paths {
+ if self.seen_paths.insert(abs_path.clone()) {
+ saw_new_file = true;
+ }
+ }
+
+ // Only consider access events for files that we haven't seen
+ // before.
+ //
+ // This is important on FUSE filesystems, where we may not get a
+ // Create event. In other cases we're about to access the file, so
+ // we don't want an infinite loop where processing an Access event
+ // creates another Access event.
+ if matches!(event.kind, EventKind::Access(_)) && !saw_new_file {
+ continue;
+ }
+
+ let files = abs_paths
+ .into_iter()
.filter_map(|path| -> Option<(AbsPathBuf, Option<Vec<u8>>)> {
// Ignore events for files/directories that we're not watching.
if !(self.watched_file_entries.contains(&path)
diff --git a/crates/vfs/src/lib.rs b/crates/vfs/src/lib.rs
index 50e388d780..d48b984407 100644
--- a/crates/vfs/src/lib.rs
+++ b/crates/vfs/src/lib.rs
@@ -157,7 +157,7 @@ pub enum Change {
}
/// Kind of [file change](ChangedFile).
-#[derive(Eq, PartialEq, Debug)]
+#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub enum ChangeKind {
/// The file was (re-)created
Create,
diff --git a/docs/book/src/configuration_generated.md b/docs/book/src/configuration_generated.md
index 35fba5accd..da37fc1582 100644
--- a/docs/book/src/configuration_generated.md
+++ b/docs/book/src/configuration_generated.md
@@ -166,6 +166,14 @@ List of features to activate.
Set this to `"all"` to pass `--all-features` to cargo.
+## rust-analyzer.cargo.metadataExtraArgs {#cargo.metadataExtraArgs}
+
+Default: `[]`
+
+Extra arguments passed only to `cargo metadata`, not to other cargo invocations.
+Useful for flags like `--config` that `cargo metadata` supports.
+
+
## rust-analyzer.cargo.noDefaultFeatures {#cargo.noDefaultFeatures}
Default: `false`
@@ -618,6 +626,15 @@ The warnings will be indicated by a blue squiggly underline in code and a blue i
the `Problems Panel`.
+## rust-analyzer.disableFixtureSupport {#disableFixtureSupport}
+
+Default: `false`
+
+Disable support for `#[rust_analyzer::rust_fixture]` snippets.
+
+If you are not working on rust-analyzer itself, you should ignore this config.
+
+
## rust-analyzer.document.symbol.search.excludeLocals {#document.symbol.search.excludeLocals}
Default: `true`
@@ -1147,6 +1164,13 @@ Default: `false`
Hide inlay type hints for constructors.
+## rust-analyzer.inlayHints.typeHints.location {#inlayHints.typeHints.location}
+
+Default: `"inline"`
+
+Where to render type hints relative to their binding pattern.
+
+
## rust-analyzer.interpret.tests {#interpret.tests}
Default: `false`
@@ -1380,9 +1404,15 @@ Default: `null`
Override the command used for bench runnables.
The first element of the array should be the program to execute (for example, `cargo`).
-Use the placeholders `${package}`, `${target_arg}`, `${target}`, `${executable_args}` to dynamically
-replace the package name, target option (such as `--bin` or `--example`), the target name and
-the arguments passed to test binary args (includes `rust-analyzer.runnables.extraTestBinaryArgs`).
+Use the placeholders:
+- `${package}`: package name.
+- `${target_arg}`: target option such as `--bin`, `--test`, `--lib`, etc.
+- `${target}`: target name (empty for `--lib`).
+- `${test_name}`: the test path filter, e.g. `module::bench_func`.
+- `${exact}`: `--exact` for single benchmarks, empty for modules.
+- `${include_ignored}`: always empty for benchmarks.
+- `${executable_args}`: all of the above binary args bundled together
+ (includes `rust-analyzer.runnables.extraTestBinaryArgs`).
## rust-analyzer.runnables.command {#runnables.command}
@@ -1396,12 +1426,18 @@ Command to be executed instead of 'cargo' for runnables.
Default: `null`
-Override the command used for bench runnables.
+Override the command used for doc-test runnables.
The first element of the array should be the program to execute (for example, `cargo`).
-Use the placeholders `${package}`, `${target_arg}`, `${target}`, `${executable_args}` to dynamically
-replace the package name, target option (such as `--bin` or `--example`), the target name and
-the arguments passed to test binary args (includes `rust-analyzer.runnables.extraTestBinaryArgs`).
+Use the placeholders:
+- `${package}`: package name.
+- `${target_arg}`: target option such as `--bin`, `--test`, `--lib`, etc.
+- `${target}`: target name (empty for `--lib`).
+- `${test_name}`: the test path filter, e.g. `module::func`.
+- `${exact}`: always empty for doc-tests.
+- `${include_ignored}`: always empty for doc-tests.
+- `${executable_args}`: all of the above binary args bundled together
+ (includes `rust-analyzer.runnables.extraTestBinaryArgs`).
## rust-analyzer.runnables.extraArgs {#runnables.extraArgs}
@@ -1444,9 +1480,15 @@ Default: `null`
Override the command used for test runnables.
The first element of the array should be the program to execute (for example, `cargo`).
-Use the placeholders `${package}`, `${target_arg}`, `${target}`, `${executable_args}` to dynamically
-replace the package name, target option (such as `--bin` or `--example`), the target name and
-the arguments passed to test binary args (includes `rust-analyzer.runnables.extraTestBinaryArgs`).
+Available placeholders:
+- `${package}`: package name.
+- `${target_arg}`: target option such as `--bin`, `--test`, `--lib`, etc.
+- `${target}`: target name (empty for `--lib`).
+- `${test_name}`: the test path filter, e.g. `module::test_func`.
+- `${exact}`: `--exact` for single tests, empty for modules.
+- `${include_ignored}`: `--include-ignored` for single tests, empty otherwise.
+- `${executable_args}`: all of the above binary args bundled together
+ (includes `rust-analyzer.runnables.extraTestBinaryArgs`).
## rust-analyzer.rustc.source {#rustc.source}
diff --git a/docs/book/src/contributing/lsp-extensions.md b/docs/book/src/contributing/lsp-extensions.md
index 5d21c37806..22c1784ac2 100644
--- a/docs/book/src/contributing/lsp-extensions.md
+++ b/docs/book/src/contributing/lsp-extensions.md
@@ -236,7 +236,7 @@ fn main() {
```
The primary goal of `onEnter` is to handle automatic indentation when opening a new line.
-This is not yet implemented.
+This is partially implemented for single-line brace-delimited contents, in addition to comment continuation.
The secondary goal is to handle fixing up syntax, like continuing doc strings and comments, and escaping `\n` in string literals.
As proper cursor positioning is raison d'être for `onEnter`, it uses `SnippetTextEdit`.
diff --git a/docs/book/src/other_editors.md b/docs/book/src/other_editors.md
index f7116fc19a..1cb2a44063 100644
--- a/docs/book/src/other_editors.md
+++ b/docs/book/src/other_editors.md
@@ -137,24 +137,22 @@ To use the LSP server in [ale](https://github.com/dense-analysis/ale):
### nvim-lsp
-Neovim 0.5 has built-in language server support. For a quick start
-configuration of rust-analyzer, use
-[neovim/nvim-lspconfig](https://github.com/neovim/nvim-lspconfig#rust_analyzer).
-Once `neovim/nvim-lspconfig` is installed, use
-`lua require'lspconfig'.rust_analyzer.setup({})` in your `init.vim`.
-
-You can also pass LSP settings to the server:
+Neovim 0.5+ added build-in support for language server with most of the heavy
+lifting happening in "framework" plugins such as
+[neovim/nvim-lspconfig](https://github.com/neovim/nvim-lspconfig).
+Since v0.11+ Neovim has full featured LSP support. nvim-lspconfig is
+still recommended to get the
+[rust-analyzer config](https://github.com/neovim/nvim-lspconfig/blob/master/lsp/rust_analyzer.lua)
+for free.
+
+1. Install [neovim/nvim-lspconfig](https://github.com/neovim/nvim-lspconfig)
+2. Add `lua vim.lsp.enable('rust-analyzer')` to your `init.vim`
+3. Customize your setup.
```lua
lua << EOF
-local lspconfig = require'lspconfig'
-
-local on_attach = function(client)
- require'completion'.on_attach(client)
-end
-
-lspconfig.rust_analyzer.setup({
- on_attach = on_attach,
+-- You can pass LSP settings to the server:
+vim.lsp.config("rust_analyzer", {
settings = {
["rust-analyzer"] = {
imports = {
@@ -171,30 +169,35 @@ lspconfig.rust_analyzer.setup({
procMacro = {
enable = true
},
- }
- }
+ },
+ },
})
-EOF
-```
-
-If you're running Neovim 0.10 or later, you can enable inlay hints via `on_attach`:
-```lua
-lspconfig.rust_analyzer.setup({
- on_attach = function(client, bufnr)
- vim.lsp.inlay_hint.enable(true, { bufnr = bufnr })
- end
+-- You can enable different LSP features
+vim.api.nvim_create_autocmd("LspAttach", {
+ callback = function(ev)
+ local client = assert(vim.lsp.get_client_by_id(ev.data.client_id))
+ -- Inlay hints display inferred types, etc.
+ if client:supports_method("inlayHint/resolve") then
+ vim.lsp.inlay_hint.enable(true, { bufnr = ev.buf })
+ end
+ -- Completion can be invoked via ctrl+x ctrl+o. It displays a list of
+ -- names inferred from the context (e.g. method names, variables, etc.)
+ if client:supports_method("textDocument/completion") then
+ vim.lsp.completion.enable(true, client.id, ev.buf, {})
+ end
+ end,
})
+EOF
```
-Note that the hints are only visible after `rust-analyzer` has finished loading **and** you have to
-edit the file to trigger a re-render.
-
-See <https://sharksforarms.dev/posts/neovim-rust/> for more tips on
-getting started.
+Note that the hints are only visible after `rust-analyzer` has finished loading
+**and** you have to edit the file to trigger a re-render.
-Check out <https://github.com/mrcjkb/rustaceanvim> for a batteries
-included rust-analyzer setup for Neovim.
+The instructions here use the 0.11+ API, if you're running an older version, you
+can follow this guide <https://sharksforarms.dev/posts/neovim-rust/> or check
+out <https://github.com/mrcjkb/rustaceanvim> for a batteries included
+rust-analyzer setup for Neovim.
### vim-lsp
diff --git a/editors/code/package-lock.json b/editors/code/package-lock.json
index b51dc4d132..5755f0708f 100644
--- a/editors/code/package-lock.json
+++ b/editors/code/package-lock.json
@@ -17,8 +17,8 @@
"vscode-languageclient": "^9.0.1"
},
"devDependencies": {
- "@eslint/js": "^9.21.0",
- "@stylistic/eslint-plugin": "^4.1.0",
+ "@eslint/js": "^10.0.1",
+ "@stylistic/eslint-plugin": "^5.10.0",
"@stylistic/eslint-plugin-js": "^4.1.0",
"@tsconfig/strictest": "^2.0.5",
"@types/lodash": "^4.17.20",
@@ -27,16 +27,16 @@
"@typescript-eslint/eslint-plugin": "^8.25.0",
"@typescript-eslint/parser": "^8.25.0",
"@vscode/test-electron": "^2.4.1",
- "@vscode/vsce": "^3.6.0",
+ "@vscode/vsce": "^3.7.1",
"esbuild": "^0.25.0",
- "eslint": "^9.21.0",
- "eslint-config-prettier": "^10.0.2",
+ "eslint": "^10.2.0",
+ "eslint-config-prettier": "^10.1.8",
"eslint-define-config": "^2.1.0",
- "ovsx": "0.10.1",
- "prettier": "^3.5.2",
+ "ovsx": "0.10.10",
+ "prettier": "^3.8.1",
"tslib": "^2.8.1",
- "typescript": "^5.7.3",
- "typescript-eslint": "^8.25.0"
+ "typescript": "^6.0.2",
+ "typescript-eslint": "^8.58.0"
},
"engines": {
"vscode": "^1.93.0"
@@ -255,6 +255,40 @@
"node": ">=6.9.0"
}
},
+ "node_modules/@emnapi/core": {
+ "version": "1.9.1",
+ "resolved": "https://registry.npmjs.org/@emnapi/core/-/core-1.9.1.tgz",
+ "integrity": "sha512-mukuNALVsoix/w1BJwFzwXBN/dHeejQtuVzcDsfOEsdpCumXb/E9j8w11h5S54tT1xhifGfbbSm/ICrObRb3KA==",
+ "dev": true,
+ "license": "MIT",
+ "optional": true,
+ "dependencies": {
+ "@emnapi/wasi-threads": "1.2.0",
+ "tslib": "^2.4.0"
+ }
+ },
+ "node_modules/@emnapi/runtime": {
+ "version": "1.9.1",
+ "resolved": "https://registry.npmjs.org/@emnapi/runtime/-/runtime-1.9.1.tgz",
+ "integrity": "sha512-VYi5+ZVLhpgK4hQ0TAjiQiZ6ol0oe4mBx7mVv7IflsiEp0OWoVsp/+f9Vc1hOhE0TtkORVrI1GvzyreqpgWtkA==",
+ "dev": true,
+ "license": "MIT",
+ "optional": true,
+ "dependencies": {
+ "tslib": "^2.4.0"
+ }
+ },
+ "node_modules/@emnapi/wasi-threads": {
+ "version": "1.2.0",
+ "resolved": "https://registry.npmjs.org/@emnapi/wasi-threads/-/wasi-threads-1.2.0.tgz",
+ "integrity": "sha512-N10dEJNSsUx41Z6pZsXU8FjPjpBEplgH24sfkmITrBED1/U2Esum9F3lfLrMjKHHjmi557zQn7kR9R+XWXu5Rg==",
+ "dev": true,
+ "license": "MIT",
+ "optional": true,
+ "dependencies": {
+ "tslib": "^2.4.0"
+ }
+ },
"node_modules/@esbuild/aix-ppc64": {
"version": "0.25.0",
"resolved": "https://registry.npmjs.org/@esbuild/aix-ppc64/-/aix-ppc64-0.25.0.tgz",
@@ -681,9 +715,9 @@
}
},
"node_modules/@eslint-community/eslint-utils": {
- "version": "4.4.1",
- "resolved": "https://registry.npmjs.org/@eslint-community/eslint-utils/-/eslint-utils-4.4.1.tgz",
- "integrity": "sha512-s3O3waFUrMV8P/XaF/+ZTp1X9XBZW1a4B97ZnjQF2KYWaFD2A8KyFBsrsfSjEmjn3RGWAIuvlneuZm3CUK3jbA==",
+ "version": "4.9.1",
+ "resolved": "https://registry.npmjs.org/@eslint-community/eslint-utils/-/eslint-utils-4.9.1.tgz",
+ "integrity": "sha512-phrYmNiYppR7znFEdqgfWHXR6NCkZEK7hwWDHZUjit/2/U0r6XvkDl0SYnoM51Hq7FhCGdLDT6zxCCOY1hexsQ==",
"dev": true,
"license": "MIT",
"dependencies": {
@@ -713,9 +747,9 @@
}
},
"node_modules/@eslint-community/regexpp": {
- "version": "4.12.1",
- "resolved": "https://registry.npmjs.org/@eslint-community/regexpp/-/regexpp-4.12.1.tgz",
- "integrity": "sha512-CCZCDJuduB9OUkFkY2IgppNZMi2lBQgD2qzwXkEia16cge2pijY/aXi96CJMquDMn3nJdlPV1A5KrJEXwfLNzQ==",
+ "version": "4.12.2",
+ "resolved": "https://registry.npmjs.org/@eslint-community/regexpp/-/regexpp-4.12.2.tgz",
+ "integrity": "sha512-EriSTlt5OC9/7SXkRSCAhfSxxoSUgBm33OH+IkwbdpgoqsSsUg7y3uh+IICI/Qg4BBWr3U2i39RpmycbxMq4ew==",
"dev": true,
"license": "MIT",
"engines": {
@@ -723,137 +757,89 @@
}
},
"node_modules/@eslint/config-array": {
- "version": "0.19.2",
- "resolved": "https://registry.npmjs.org/@eslint/config-array/-/config-array-0.19.2.tgz",
- "integrity": "sha512-GNKqxfHG2ySmJOBSHg7LxeUx4xpuCoFjacmlCoYWEbaPXLwvfIjixRI12xCQZeULksQb23uiA8F40w5TojpV7w==",
+ "version": "0.23.4",
+ "resolved": "https://registry.npmjs.org/@eslint/config-array/-/config-array-0.23.4.tgz",
+ "integrity": "sha512-lf19F24LSMfF8weXvW5QEtnLqW70u7kgit5e9PSx0MsHAFclGd1T9ynvWEMDT1w5J4Qt54tomGeAhdoAku1Xow==",
"dev": true,
"license": "Apache-2.0",
"dependencies": {
- "@eslint/object-schema": "^2.1.6",
+ "@eslint/object-schema": "^3.0.4",
"debug": "^4.3.1",
- "minimatch": "^3.1.2"
+ "minimatch": "^10.2.4"
},
"engines": {
- "node": "^18.18.0 || ^20.9.0 || >=21.1.0"
+ "node": "^20.19.0 || ^22.13.0 || >=24"
}
},
- "node_modules/@eslint/config-array/node_modules/brace-expansion": {
- "version": "1.1.13",
- "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.13.tgz",
- "integrity": "sha512-9ZLprWS6EENmhEOpjCYW2c8VkmOvckIJZfkr7rBW6dObmfgJ/L1GpSYW5Hpo9lDz4D1+n0Ckz8rU7FwHDQiG/w==",
+ "node_modules/@eslint/config-helpers": {
+ "version": "0.5.4",
+ "resolved": "https://registry.npmjs.org/@eslint/config-helpers/-/config-helpers-0.5.4.tgz",
+ "integrity": "sha512-jJhqiY3wPMlWWO3370M86CPJ7pt8GmEwSLglMfQhjXal07RCvhmU0as4IuUEW5SJeunfItiEetHmSxCCe9lDBg==",
"dev": true,
- "license": "MIT",
- "dependencies": {
- "balanced-match": "^1.0.0",
- "concat-map": "0.0.1"
- }
- },
- "node_modules/@eslint/config-array/node_modules/minimatch": {
- "version": "3.1.5",
- "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.1.5.tgz",
- "integrity": "sha512-VgjWUsnnT6n+NUk6eZq77zeFdpW2LWDzP6zFGrCbHXiYNul5Dzqk2HHQ5uFH2DNW5Xbp8+jVzaeNt94ssEEl4w==",
- "dev": true,
- "license": "ISC",
+ "license": "Apache-2.0",
"dependencies": {
- "brace-expansion": "^1.1.7"
+ "@eslint/core": "^1.2.0"
},
"engines": {
- "node": "*"
+ "node": "^20.19.0 || ^22.13.0 || >=24"
}
},
"node_modules/@eslint/core": {
- "version": "0.12.0",
- "resolved": "https://registry.npmjs.org/@eslint/core/-/core-0.12.0.tgz",
- "integrity": "sha512-cmrR6pytBuSMTaBweKoGMwu3EiHiEC+DoyupPmlZ0HxBJBtIxwe+j/E4XPIKNx+Q74c8lXKPwYawBf5glsTkHg==",
+ "version": "1.2.0",
+ "resolved": "https://registry.npmjs.org/@eslint/core/-/core-1.2.0.tgz",
+ "integrity": "sha512-8FTGbNzTvmSlc4cZBaShkC6YvFMG0riksYWRFKXztqVdXaQbcZLXlFbSpC05s70sGEsXAw0qwhx69JiW7hQS7A==",
"dev": true,
"license": "Apache-2.0",
"dependencies": {
"@types/json-schema": "^7.0.15"
},
"engines": {
- "node": "^18.18.0 || ^20.9.0 || >=21.1.0"
+ "node": "^20.19.0 || ^22.13.0 || >=24"
}
},
- "node_modules/@eslint/eslintrc": {
- "version": "3.3.0",
- "resolved": "https://registry.npmjs.org/@eslint/eslintrc/-/eslintrc-3.3.0.tgz",
- "integrity": "sha512-yaVPAiNAalnCZedKLdR21GOGILMLKPyqSLWaAjQFvYA2i/ciDi8ArYVr69Anohb6cH2Ukhqti4aFnYyPm8wdwQ==",
+ "node_modules/@eslint/js": {
+ "version": "10.0.1",
+ "resolved": "https://registry.npmjs.org/@eslint/js/-/js-10.0.1.tgz",
+ "integrity": "sha512-zeR9k5pd4gxjZ0abRoIaxdc7I3nDktoXZk2qOv9gCNWx3mVwEn32VRhyLaRsDiJjTs0xq/T8mfPtyuXu7GWBcA==",
"dev": true,
"license": "MIT",
- "dependencies": {
- "ajv": "^6.12.4",
- "debug": "^4.3.2",
- "espree": "^10.0.1",
- "globals": "^14.0.0",
- "ignore": "^5.2.0",
- "import-fresh": "^3.2.1",
- "js-yaml": "^4.1.0",
- "minimatch": "^3.1.2",
- "strip-json-comments": "^3.1.1"
- },
"engines": {
- "node": "^18.18.0 || ^20.9.0 || >=21.1.0"
+ "node": "^20.19.0 || ^22.13.0 || >=24"
},
"funding": {
- "url": "https://opencollective.com/eslint"
- }
- },
- "node_modules/@eslint/eslintrc/node_modules/brace-expansion": {
- "version": "1.1.13",
- "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.13.tgz",
- "integrity": "sha512-9ZLprWS6EENmhEOpjCYW2c8VkmOvckIJZfkr7rBW6dObmfgJ/L1GpSYW5Hpo9lDz4D1+n0Ckz8rU7FwHDQiG/w==",
- "dev": true,
- "license": "MIT",
- "dependencies": {
- "balanced-match": "^1.0.0",
- "concat-map": "0.0.1"
- }
- },
- "node_modules/@eslint/eslintrc/node_modules/minimatch": {
- "version": "3.1.5",
- "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.1.5.tgz",
- "integrity": "sha512-VgjWUsnnT6n+NUk6eZq77zeFdpW2LWDzP6zFGrCbHXiYNul5Dzqk2HHQ5uFH2DNW5Xbp8+jVzaeNt94ssEEl4w==",
- "dev": true,
- "license": "ISC",
- "dependencies": {
- "brace-expansion": "^1.1.7"
+ "url": "https://eslint.org/donate"
},
- "engines": {
- "node": "*"
- }
- },
- "node_modules/@eslint/js": {
- "version": "9.21.0",
- "resolved": "https://registry.npmjs.org/@eslint/js/-/js-9.21.0.tgz",
- "integrity": "sha512-BqStZ3HX8Yz6LvsF5ByXYrtigrV5AXADWLAGc7PH/1SxOb7/FIYYMszZZWiUou/GB9P2lXWk2SV4d+Z8h0nknw==",
- "dev": true,
- "license": "MIT",
- "engines": {
- "node": "^18.18.0 || ^20.9.0 || >=21.1.0"
+ "peerDependencies": {
+ "eslint": "^10.0.0"
+ },
+ "peerDependenciesMeta": {
+ "eslint": {
+ "optional": true
+ }
}
},
"node_modules/@eslint/object-schema": {
- "version": "2.1.6",
- "resolved": "https://registry.npmjs.org/@eslint/object-schema/-/object-schema-2.1.6.tgz",
- "integrity": "sha512-RBMg5FRL0I0gs51M/guSAj5/e14VQ4tpZnQNWwuDT66P14I43ItmPfIZRhO9fUVIPOAQXU47atlywZ/czoqFPA==",
+ "version": "3.0.4",
+ "resolved": "https://registry.npmjs.org/@eslint/object-schema/-/object-schema-3.0.4.tgz",
+ "integrity": "sha512-55lO/7+Yp0ISKRP0PsPtNTeNGapXaO085aELZmWCVc5SH3jfrqpuU6YgOdIxMS99ZHkQN1cXKE+cdIqwww9ptw==",
"dev": true,
"license": "Apache-2.0",
"engines": {
- "node": "^18.18.0 || ^20.9.0 || >=21.1.0"
+ "node": "^20.19.0 || ^22.13.0 || >=24"
}
},
"node_modules/@eslint/plugin-kit": {
- "version": "0.2.7",
- "resolved": "https://registry.npmjs.org/@eslint/plugin-kit/-/plugin-kit-0.2.7.tgz",
- "integrity": "sha512-JubJ5B2pJ4k4yGxaNLdbjrnk9d/iDz6/q8wOilpIowd6PJPgaxCuHBnBszq7Ce2TyMrywm5r4PnKm6V3iiZF+g==",
+ "version": "0.7.0",
+ "resolved": "https://registry.npmjs.org/@eslint/plugin-kit/-/plugin-kit-0.7.0.tgz",
+ "integrity": "sha512-ejvBr8MQCbVsWNZnCwDXjUKq40MDmHalq7cJ6e9s/qzTUFIIo/afzt1Vui9T97FM/V/pN4YsFVoed5NIa96RDg==",
"dev": true,
"license": "Apache-2.0",
"dependencies": {
- "@eslint/core": "^0.12.0",
+ "@eslint/core": "^1.2.0",
"levn": "^0.4.1"
},
"engines": {
- "node": "^18.18.0 || ^20.9.0 || >=21.1.0"
+ "node": "^20.19.0 || ^22.13.0 || >=24"
}
},
"node_modules/@hpcc-js/wasm": {
@@ -952,6 +938,287 @@
"node": ">=12"
}
},
+ "node_modules/@napi-rs/wasm-runtime": {
+ "version": "0.2.12",
+ "resolved": "https://registry.npmjs.org/@napi-rs/wasm-runtime/-/wasm-runtime-0.2.12.tgz",
+ "integrity": "sha512-ZVWUcfwY4E/yPitQJl481FjFo3K22D6qF0DuFH6Y/nbnE11GY5uguDxZMGXPQ8WQ0128MXQD7TnfHyK4oWoIJQ==",
+ "dev": true,
+ "license": "MIT",
+ "optional": true,
+ "dependencies": {
+ "@emnapi/core": "^1.4.3",
+ "@emnapi/runtime": "^1.4.3",
+ "@tybys/wasm-util": "^0.10.0"
+ }
+ },
+ "node_modules/@node-rs/crc32": {
+ "version": "1.10.6",
+ "resolved": "https://registry.npmjs.org/@node-rs/crc32/-/crc32-1.10.6.tgz",
+ "integrity": "sha512-+llXfqt+UzgoDzT9of5vPQPGqTAVCohU74I9zIBkNo5TH6s2P31DFJOGsJQKN207f0GHnYv5pV3wh3BCY/un/A==",
+ "dev": true,
+ "license": "MIT",
+ "engines": {
+ "node": ">= 10"
+ },
+ "funding": {
+ "type": "github",
+ "url": "https://github.com/sponsors/Brooooooklyn"
+ },
+ "optionalDependencies": {
+ "@node-rs/crc32-android-arm-eabi": "1.10.6",
+ "@node-rs/crc32-android-arm64": "1.10.6",
+ "@node-rs/crc32-darwin-arm64": "1.10.6",
+ "@node-rs/crc32-darwin-x64": "1.10.6",
+ "@node-rs/crc32-freebsd-x64": "1.10.6",
+ "@node-rs/crc32-linux-arm-gnueabihf": "1.10.6",
+ "@node-rs/crc32-linux-arm64-gnu": "1.10.6",
+ "@node-rs/crc32-linux-arm64-musl": "1.10.6",
+ "@node-rs/crc32-linux-x64-gnu": "1.10.6",
+ "@node-rs/crc32-linux-x64-musl": "1.10.6",
+ "@node-rs/crc32-wasm32-wasi": "1.10.6",
+ "@node-rs/crc32-win32-arm64-msvc": "1.10.6",
+ "@node-rs/crc32-win32-ia32-msvc": "1.10.6",
+ "@node-rs/crc32-win32-x64-msvc": "1.10.6"
+ }
+ },
+ "node_modules/@node-rs/crc32-android-arm-eabi": {
+ "version": "1.10.6",
+ "resolved": "https://registry.npmjs.org/@node-rs/crc32-android-arm-eabi/-/crc32-android-arm-eabi-1.10.6.tgz",
+ "integrity": "sha512-vZAMuJXm3TpWPOkkhxdrofWDv+Q+I2oO7ucLRbXyAPmXFNDhHtBxbO1rk9Qzz+M3eep8ieS4/+jCL1Q0zacNMQ==",
+ "cpu": [
+ "arm"
+ ],
+ "dev": true,
+ "license": "MIT",
+ "optional": true,
+ "os": [
+ "android"
+ ],
+ "engines": {
+ "node": ">= 10"
+ }
+ },
+ "node_modules/@node-rs/crc32-android-arm64": {
+ "version": "1.10.6",
+ "resolved": "https://registry.npmjs.org/@node-rs/crc32-android-arm64/-/crc32-android-arm64-1.10.6.tgz",
+ "integrity": "sha512-Vl/JbjCinCw/H9gEpZveWCMjxjcEChDcDBM8S4hKay5yyoRCUHJPuKr4sjVDBeOm+1nwU3oOm6Ca8dyblwp4/w==",
+ "cpu": [
+ "arm64"
+ ],
+ "dev": true,
+ "license": "MIT",
+ "optional": true,
+ "os": [
+ "android"
+ ],
+ "engines": {
+ "node": ">= 10"
+ }
+ },
+ "node_modules/@node-rs/crc32-darwin-arm64": {
+ "version": "1.10.6",
+ "resolved": "https://registry.npmjs.org/@node-rs/crc32-darwin-arm64/-/crc32-darwin-arm64-1.10.6.tgz",
+ "integrity": "sha512-kARYANp5GnmsQiViA5Qu74weYQ3phOHSYQf0G+U5wB3NB5JmBHnZcOc46Ig21tTypWtdv7u63TaltJQE41noyg==",
+ "cpu": [
+ "arm64"
+ ],
+ "dev": true,
+ "license": "MIT",
+ "optional": true,
+ "os": [
+ "darwin"
+ ],
+ "engines": {
+ "node": ">= 10"
+ }
+ },
+ "node_modules/@node-rs/crc32-darwin-x64": {
+ "version": "1.10.6",
+ "resolved": "https://registry.npmjs.org/@node-rs/crc32-darwin-x64/-/crc32-darwin-x64-1.10.6.tgz",
+ "integrity": "sha512-Q99bevJVMfLTISpkpKBlXgtPUItrvTWKFyiqoKH5IvscZmLV++NH4V13Pa17GTBmv9n18OwzgQY4/SRq6PQNVA==",
+ "cpu": [
+ "x64"
+ ],
+ "dev": true,
+ "license": "MIT",
+ "optional": true,
+ "os": [
+ "darwin"
+ ],
+ "engines": {
+ "node": ">= 10"
+ }
+ },
+ "node_modules/@node-rs/crc32-freebsd-x64": {
+ "version": "1.10.6",
+ "resolved": "https://registry.npmjs.org/@node-rs/crc32-freebsd-x64/-/crc32-freebsd-x64-1.10.6.tgz",
+ "integrity": "sha512-66hpawbNjrgnS9EDMErta/lpaqOMrL6a6ee+nlI2viduVOmRZWm9Rg9XdGTK/+c4bQLdtC6jOd+Kp4EyGRYkAg==",
+ "cpu": [
+ "x64"
+ ],
+ "dev": true,
+ "license": "MIT",
+ "optional": true,
+ "os": [
+ "freebsd"
+ ],
+ "engines": {
+ "node": ">= 10"
+ }
+ },
+ "node_modules/@node-rs/crc32-linux-arm-gnueabihf": {
+ "version": "1.10.6",
+ "resolved": "https://registry.npmjs.org/@node-rs/crc32-linux-arm-gnueabihf/-/crc32-linux-arm-gnueabihf-1.10.6.tgz",
+ "integrity": "sha512-E8Z0WChH7X6ankbVm8J/Yym19Cq3otx6l4NFPS6JW/cWdjv7iw+Sps2huSug+TBprjbcEA+s4TvEwfDI1KScjg==",
+ "cpu": [
+ "arm"
+ ],
+ "dev": true,
+ "license": "MIT",
+ "optional": true,
+ "os": [
+ "linux"
+ ],
+ "engines": {
+ "node": ">= 10"
+ }
+ },
+ "node_modules/@node-rs/crc32-linux-arm64-gnu": {
+ "version": "1.10.6",
+ "resolved": "https://registry.npmjs.org/@node-rs/crc32-linux-arm64-gnu/-/crc32-linux-arm64-gnu-1.10.6.tgz",
+ "integrity": "sha512-LmWcfDbqAvypX0bQjQVPmQGazh4dLiVklkgHxpV4P0TcQ1DT86H/SWpMBMs/ncF8DGuCQ05cNyMv1iddUDugoQ==",
+ "cpu": [
+ "arm64"
+ ],
+ "dev": true,
+ "license": "MIT",
+ "optional": true,
+ "os": [
+ "linux"
+ ],
+ "engines": {
+ "node": ">= 10"
+ }
+ },
+ "node_modules/@node-rs/crc32-linux-arm64-musl": {
+ "version": "1.10.6",
+ "resolved": "https://registry.npmjs.org/@node-rs/crc32-linux-arm64-musl/-/crc32-linux-arm64-musl-1.10.6.tgz",
+ "integrity": "sha512-k8ra/bmg0hwRrIEE8JL1p32WfaN9gDlUUpQRWsbxd1WhjqvXea7kKO6K4DwVxyxlPhBS9Gkb5Urq7Y4mXANzaw==",
+ "cpu": [
+ "arm64"
+ ],
+ "dev": true,
+ "license": "MIT",
+ "optional": true,
+ "os": [
+ "linux"
+ ],
+ "engines": {
+ "node": ">= 10"
+ }
+ },
+ "node_modules/@node-rs/crc32-linux-x64-gnu": {
+ "version": "1.10.6",
+ "resolved": "https://registry.npmjs.org/@node-rs/crc32-linux-x64-gnu/-/crc32-linux-x64-gnu-1.10.6.tgz",
+ "integrity": "sha512-IfjtqcuFK7JrSZ9mlAFhb83xgium30PguvRjIMI45C3FJwu18bnLk1oR619IYb/zetQT82MObgmqfKOtgemEKw==",
+ "cpu": [
+ "x64"
+ ],
+ "dev": true,
+ "license": "MIT",
+ "optional": true,
+ "os": [
+ "linux"
+ ],
+ "engines": {
+ "node": ">= 10"
+ }
+ },
+ "node_modules/@node-rs/crc32-linux-x64-musl": {
+ "version": "1.10.6",
+ "resolved": "https://registry.npmjs.org/@node-rs/crc32-linux-x64-musl/-/crc32-linux-x64-musl-1.10.6.tgz",
+ "integrity": "sha512-LbFYsA5M9pNunOweSt6uhxenYQF94v3bHDAQRPTQ3rnjn+mK6IC7YTAYoBjvoJP8lVzcvk9hRj8wp4Jyh6Y80g==",
+ "cpu": [
+ "x64"
+ ],
+ "dev": true,
+ "license": "MIT",
+ "optional": true,
+ "os": [
+ "linux"
+ ],
+ "engines": {
+ "node": ">= 10"
+ }
+ },
+ "node_modules/@node-rs/crc32-wasm32-wasi": {
+ "version": "1.10.6",
+ "resolved": "https://registry.npmjs.org/@node-rs/crc32-wasm32-wasi/-/crc32-wasm32-wasi-1.10.6.tgz",
+ "integrity": "sha512-KaejdLgHMPsRaxnM+OG9L9XdWL2TabNx80HLdsCOoX9BVhEkfh39OeahBo8lBmidylKbLGMQoGfIKDjq0YMStw==",
+ "cpu": [
+ "wasm32"
+ ],
+ "dev": true,
+ "license": "MIT",
+ "optional": true,
+ "dependencies": {
+ "@napi-rs/wasm-runtime": "^0.2.5"
+ },
+ "engines": {
+ "node": ">=14.0.0"
+ }
+ },
+ "node_modules/@node-rs/crc32-win32-arm64-msvc": {
+ "version": "1.10.6",
+ "resolved": "https://registry.npmjs.org/@node-rs/crc32-win32-arm64-msvc/-/crc32-win32-arm64-msvc-1.10.6.tgz",
+ "integrity": "sha512-x50AXiSxn5Ccn+dCjLf1T7ZpdBiV1Sp5aC+H2ijhJO4alwznvXgWbopPRVhbp2nj0i+Gb6kkDUEyU+508KAdGQ==",
+ "cpu": [
+ "arm64"
+ ],
+ "dev": true,
+ "license": "MIT",
+ "optional": true,
+ "os": [
+ "win32"
+ ],
+ "engines": {
+ "node": ">= 10"
+ }
+ },
+ "node_modules/@node-rs/crc32-win32-ia32-msvc": {
+ "version": "1.10.6",
+ "resolved": "https://registry.npmjs.org/@node-rs/crc32-win32-ia32-msvc/-/crc32-win32-ia32-msvc-1.10.6.tgz",
+ "integrity": "sha512-DpDxQLaErJF9l36aghe1Mx+cOnYLKYo6qVPqPL9ukJ5rAGLtCdU0C+Zoi3gs9ySm8zmbFgazq/LvmsZYU42aBw==",
+ "cpu": [
+ "ia32"
+ ],
+ "dev": true,
+ "license": "MIT",
+ "optional": true,
+ "os": [
+ "win32"
+ ],
+ "engines": {
+ "node": ">= 10"
+ }
+ },
+ "node_modules/@node-rs/crc32-win32-x64-msvc": {
+ "version": "1.10.6",
+ "resolved": "https://registry.npmjs.org/@node-rs/crc32-win32-x64-msvc/-/crc32-win32-x64-msvc-1.10.6.tgz",
+ "integrity": "sha512-5B1vXosIIBw1m2Rcnw62IIfH7W9s9f7H7Ma0rRuhT8HR4Xh8QCgw6NJSI2S2MCngsGktYnAhyUvs81b7efTyQw==",
+ "cpu": [
+ "x64"
+ ],
+ "dev": true,
+ "license": "MIT",
+ "optional": true,
+ "os": [
+ "win32"
+ ],
+ "engines": {
+ "node": ">= 10"
+ }
+ },
"node_modules/@nodelib/fs.scandir": {
"version": "2.1.5",
"resolved": "https://registry.npmjs.org/@nodelib/fs.scandir/-/fs.scandir-2.1.5.tgz",
@@ -1202,23 +1469,24 @@
}
},
"node_modules/@stylistic/eslint-plugin": {
- "version": "4.1.0",
- "resolved": "https://registry.npmjs.org/@stylistic/eslint-plugin/-/eslint-plugin-4.1.0.tgz",
- "integrity": "sha512-bytbL7qiici7yPyEiId0fGPK9kjQbzcPMj2aftPfzTCyJ/CRSKdtI+iVjM0LSGzGxfunflI+MDDU9vyIIeIpoQ==",
+ "version": "5.10.0",
+ "resolved": "https://registry.npmjs.org/@stylistic/eslint-plugin/-/eslint-plugin-5.10.0.tgz",
+ "integrity": "sha512-nPK52ZHvot8Ju/0A4ucSX1dcPV2/1clx0kLcH5wDmrE4naKso7TUC/voUyU1O9OTKTrR6MYip6LP0ogEMQ9jPQ==",
"dev": true,
"license": "MIT",
"dependencies": {
- "@typescript-eslint/utils": "^8.23.0",
- "eslint-visitor-keys": "^4.2.0",
- "espree": "^10.3.0",
+ "@eslint-community/eslint-utils": "^4.9.1",
+ "@typescript-eslint/types": "^8.56.0",
+ "eslint-visitor-keys": "^4.2.1",
+ "espree": "^10.4.0",
"estraverse": "^5.3.0",
- "picomatch": "^4.0.2"
+ "picomatch": "^4.0.3"
},
"engines": {
"node": "^18.18.0 || ^20.9.0 || >=21.1.0"
},
"peerDependencies": {
- "eslint": ">=9.0.0"
+ "eslint": "^9.0.0 || ^10.0.0"
}
},
"node_modules/@stylistic/eslint-plugin-js": {
@@ -1238,6 +1506,37 @@
"eslint": ">=9.0.0"
}
},
+ "node_modules/@stylistic/eslint-plugin/node_modules/eslint-visitor-keys": {
+ "version": "4.2.1",
+ "resolved": "https://registry.npmjs.org/eslint-visitor-keys/-/eslint-visitor-keys-4.2.1.tgz",
+ "integrity": "sha512-Uhdk5sfqcee/9H/rCOJikYz67o0a2Tw2hGRPOG2Y1R2dg7brRe1uG0yaNQDHu+TO/uQPF/5eCapvYSmHUjt7JQ==",
+ "dev": true,
+ "license": "Apache-2.0",
+ "engines": {
+ "node": "^18.18.0 || ^20.9.0 || >=21.1.0"
+ },
+ "funding": {
+ "url": "https://opencollective.com/eslint"
+ }
+ },
+ "node_modules/@stylistic/eslint-plugin/node_modules/espree": {
+ "version": "10.4.0",
+ "resolved": "https://registry.npmjs.org/espree/-/espree-10.4.0.tgz",
+ "integrity": "sha512-j6PAQ2uUr79PZhBjP5C5fhl8e39FmRnOjsD5lGnWrFU8i2G776tBK7+nP8KuQUTTyAZUwfQqXAgrVH5MbH9CYQ==",
+ "dev": true,
+ "license": "BSD-2-Clause",
+ "dependencies": {
+ "acorn": "^8.15.0",
+ "acorn-jsx": "^5.3.2",
+ "eslint-visitor-keys": "^4.2.1"
+ },
+ "engines": {
+ "node": "^18.18.0 || ^20.9.0 || >=21.1.0"
+ },
+ "funding": {
+ "url": "https://opencollective.com/eslint"
+ }
+ },
"node_modules/@textlint/ast-node-types": {
"version": "15.2.1",
"resolved": "https://registry.npmjs.org/@textlint/ast-node-types/-/ast-node-types-15.2.1.tgz",
@@ -1375,10 +1674,28 @@
"dev": true,
"license": "MIT"
},
+ "node_modules/@tybys/wasm-util": {
+ "version": "0.10.1",
+ "resolved": "https://registry.npmjs.org/@tybys/wasm-util/-/wasm-util-0.10.1.tgz",
+ "integrity": "sha512-9tTaPJLSiejZKx+Bmog4uSubteqTvFrVrURwkmHixBo0G4seD0zUxp98E1DzUBJxLQ3NPwXrGKDiVjwx/DpPsg==",
+ "dev": true,
+ "license": "MIT",
+ "optional": true,
+ "dependencies": {
+ "tslib": "^2.4.0"
+ }
+ },
+ "node_modules/@types/esrecurse": {
+ "version": "4.3.1",
+ "resolved": "https://registry.npmjs.org/@types/esrecurse/-/esrecurse-4.3.1.tgz",
+ "integrity": "sha512-xJBAbDifo5hpffDBuHl0Y8ywswbiAp/Wi7Y/GtAgSlZyIABppyurxVueOPE8LUQOxdlgi6Zqce7uoEpqNTeiUw==",
+ "dev": true,
+ "license": "MIT"
+ },
"node_modules/@types/estree": {
- "version": "1.0.6",
- "resolved": "https://registry.npmjs.org/@types/estree/-/estree-1.0.6.tgz",
- "integrity": "sha512-AYnb1nQyY49te+VRAVgmzfcgjYS91mY5P0TKUDCLEM+gNnA+3T6rWITXRLYCpahpqSQbN5cE+gHpnPyXjHWxcw==",
+ "version": "1.0.8",
+ "resolved": "https://registry.npmjs.org/@types/estree/-/estree-1.0.8.tgz",
+ "integrity": "sha512-dWHzHa2WqEXI/O1E9OjrocMTKJl2mSrEolh1Iomrv6U+JuNwaHXsXx9bLu5gG7BUWFIN0skIQJQ/L1rIex4X6w==",
"dev": true,
"license": "MIT"
},
@@ -1428,21 +1745,20 @@
"license": "MIT"
},
"node_modules/@typescript-eslint/eslint-plugin": {
- "version": "8.25.0",
- "resolved": "https://registry.npmjs.org/@typescript-eslint/eslint-plugin/-/eslint-plugin-8.25.0.tgz",
- "integrity": "sha512-VM7bpzAe7JO/BFf40pIT1lJqS/z1F8OaSsUB3rpFJucQA4cOSuH2RVVVkFULN+En0Djgr29/jb4EQnedUo95KA==",
+ "version": "8.58.0",
+ "resolved": "https://registry.npmjs.org/@typescript-eslint/eslint-plugin/-/eslint-plugin-8.58.0.tgz",
+ "integrity": "sha512-RLkVSiNuUP1C2ROIWfqX+YcUfLaSnxGE/8M+Y57lopVwg9VTYYfhuz15Yf1IzCKgZj6/rIbYTmJCUSqr76r0Wg==",
"dev": true,
"license": "MIT",
"dependencies": {
- "@eslint-community/regexpp": "^4.10.0",
- "@typescript-eslint/scope-manager": "8.25.0",
- "@typescript-eslint/type-utils": "8.25.0",
- "@typescript-eslint/utils": "8.25.0",
- "@typescript-eslint/visitor-keys": "8.25.0",
- "graphemer": "^1.4.0",
- "ignore": "^5.3.1",
+ "@eslint-community/regexpp": "^4.12.2",
+ "@typescript-eslint/scope-manager": "8.58.0",
+ "@typescript-eslint/type-utils": "8.58.0",
+ "@typescript-eslint/utils": "8.58.0",
+ "@typescript-eslint/visitor-keys": "8.58.0",
+ "ignore": "^7.0.5",
"natural-compare": "^1.4.0",
- "ts-api-utils": "^2.0.1"
+ "ts-api-utils": "^2.5.0"
},
"engines": {
"node": "^18.18.0 || ^20.9.0 || >=21.1.0"
@@ -1452,23 +1768,33 @@
"url": "https://opencollective.com/typescript-eslint"
},
"peerDependencies": {
- "@typescript-eslint/parser": "^8.0.0 || ^8.0.0-alpha.0",
- "eslint": "^8.57.0 || ^9.0.0",
- "typescript": ">=4.8.4 <5.8.0"
+ "@typescript-eslint/parser": "^8.58.0",
+ "eslint": "^8.57.0 || ^9.0.0 || ^10.0.0",
+ "typescript": ">=4.8.4 <6.1.0"
+ }
+ },
+ "node_modules/@typescript-eslint/eslint-plugin/node_modules/ignore": {
+ "version": "7.0.5",
+ "resolved": "https://registry.npmjs.org/ignore/-/ignore-7.0.5.tgz",
+ "integrity": "sha512-Hs59xBNfUIunMFgWAbGX5cq6893IbWg4KnrjbYwX3tx0ztorVgTDA6B2sxf8ejHJ4wz8BqGUMYlnzNBer5NvGg==",
+ "dev": true,
+ "license": "MIT",
+ "engines": {
+ "node": ">= 4"
}
},
"node_modules/@typescript-eslint/parser": {
- "version": "8.25.0",
- "resolved": "https://registry.npmjs.org/@typescript-eslint/parser/-/parser-8.25.0.tgz",
- "integrity": "sha512-4gbs64bnbSzu4FpgMiQ1A+D+urxkoJk/kqlDJ2W//5SygaEiAP2B4GoS7TEdxgwol2el03gckFV9lJ4QOMiiHg==",
+ "version": "8.58.0",
+ "resolved": "https://registry.npmjs.org/@typescript-eslint/parser/-/parser-8.58.0.tgz",
+ "integrity": "sha512-rLoGZIf9afaRBYsPUMtvkDWykwXwUPL60HebR4JgTI8mxfFe2cQTu3AGitANp4b9B2QlVru6WzjgB2IzJKiCSA==",
"dev": true,
"license": "MIT",
"dependencies": {
- "@typescript-eslint/scope-manager": "8.25.0",
- "@typescript-eslint/types": "8.25.0",
- "@typescript-eslint/typescript-estree": "8.25.0",
- "@typescript-eslint/visitor-keys": "8.25.0",
- "debug": "^4.3.4"
+ "@typescript-eslint/scope-manager": "8.58.0",
+ "@typescript-eslint/types": "8.58.0",
+ "@typescript-eslint/typescript-estree": "8.58.0",
+ "@typescript-eslint/visitor-keys": "8.58.0",
+ "debug": "^4.4.3"
},
"engines": {
"node": "^18.18.0 || ^20.9.0 || >=21.1.0"
@@ -1478,19 +1804,41 @@
"url": "https://opencollective.com/typescript-eslint"
},
"peerDependencies": {
- "eslint": "^8.57.0 || ^9.0.0",
- "typescript": ">=4.8.4 <5.8.0"
+ "eslint": "^8.57.0 || ^9.0.0 || ^10.0.0",
+ "typescript": ">=4.8.4 <6.1.0"
+ }
+ },
+ "node_modules/@typescript-eslint/project-service": {
+ "version": "8.58.0",
+ "resolved": "https://registry.npmjs.org/@typescript-eslint/project-service/-/project-service-8.58.0.tgz",
+ "integrity": "sha512-8Q/wBPWLQP1j16NxoPNIKpDZFMaxl7yWIoqXWYeWO+Bbd2mjgvoF0dxP2jKZg5+x49rgKdf7Ck473M8PC3V9lg==",
+ "dev": true,
+ "license": "MIT",
+ "dependencies": {
+ "@typescript-eslint/tsconfig-utils": "^8.58.0",
+ "@typescript-eslint/types": "^8.58.0",
+ "debug": "^4.4.3"
+ },
+ "engines": {
+ "node": "^18.18.0 || ^20.9.0 || >=21.1.0"
+ },
+ "funding": {
+ "type": "opencollective",
+ "url": "https://opencollective.com/typescript-eslint"
+ },
+ "peerDependencies": {
+ "typescript": ">=4.8.4 <6.1.0"
}
},
"node_modules/@typescript-eslint/scope-manager": {
- "version": "8.25.0",
- "resolved": "https://registry.npmjs.org/@typescript-eslint/scope-manager/-/scope-manager-8.25.0.tgz",
- "integrity": "sha512-6PPeiKIGbgStEyt4NNXa2ru5pMzQ8OYKO1hX1z53HMomrmiSB+R5FmChgQAP1ro8jMtNawz+TRQo/cSXrauTpg==",
+ "version": "8.58.0",
+ "resolved": "https://registry.npmjs.org/@typescript-eslint/scope-manager/-/scope-manager-8.58.0.tgz",
+ "integrity": "sha512-W1Lur1oF50FxSnNdGp3Vs6P+yBRSmZiw4IIjEeYxd8UQJwhUF0gDgDD/W/Tgmh73mxgEU3qX0Bzdl/NGuSPEpQ==",
"dev": true,
"license": "MIT",
"dependencies": {
- "@typescript-eslint/types": "8.25.0",
- "@typescript-eslint/visitor-keys": "8.25.0"
+ "@typescript-eslint/types": "8.58.0",
+ "@typescript-eslint/visitor-keys": "8.58.0"
},
"engines": {
"node": "^18.18.0 || ^20.9.0 || >=21.1.0"
@@ -1500,17 +1848,35 @@
"url": "https://opencollective.com/typescript-eslint"
}
},
+ "node_modules/@typescript-eslint/tsconfig-utils": {
+ "version": "8.58.0",
+ "resolved": "https://registry.npmjs.org/@typescript-eslint/tsconfig-utils/-/tsconfig-utils-8.58.0.tgz",
+ "integrity": "sha512-doNSZEVJsWEu4htiVC+PR6NpM+pa+a4ClH9INRWOWCUzMst/VA9c4gXq92F8GUD1rwhNvRLkgjfYtFXegXQF7A==",
+ "dev": true,
+ "license": "MIT",
+ "engines": {
+ "node": "^18.18.0 || ^20.9.0 || >=21.1.0"
+ },
+ "funding": {
+ "type": "opencollective",
+ "url": "https://opencollective.com/typescript-eslint"
+ },
+ "peerDependencies": {
+ "typescript": ">=4.8.4 <6.1.0"
+ }
+ },
"node_modules/@typescript-eslint/type-utils": {
- "version": "8.25.0",
- "resolved": "https://registry.npmjs.org/@typescript-eslint/type-utils/-/type-utils-8.25.0.tgz",
- "integrity": "sha512-d77dHgHWnxmXOPJuDWO4FDWADmGQkN5+tt6SFRZz/RtCWl4pHgFl3+WdYCn16+3teG09DY6XtEpf3gGD0a186g==",
+ "version": "8.58.0",
+ "resolved": "https://registry.npmjs.org/@typescript-eslint/type-utils/-/type-utils-8.58.0.tgz",
+ "integrity": "sha512-aGsCQImkDIqMyx1u4PrVlbi/krmDsQUs4zAcCV6M7yPcPev+RqVlndsJy9kJ8TLihW9TZ0kbDAzctpLn5o+lOg==",
"dev": true,
"license": "MIT",
"dependencies": {
- "@typescript-eslint/typescript-estree": "8.25.0",
- "@typescript-eslint/utils": "8.25.0",
- "debug": "^4.3.4",
- "ts-api-utils": "^2.0.1"
+ "@typescript-eslint/types": "8.58.0",
+ "@typescript-eslint/typescript-estree": "8.58.0",
+ "@typescript-eslint/utils": "8.58.0",
+ "debug": "^4.4.3",
+ "ts-api-utils": "^2.5.0"
},
"engines": {
"node": "^18.18.0 || ^20.9.0 || >=21.1.0"
@@ -1520,14 +1886,14 @@
"url": "https://opencollective.com/typescript-eslint"
},
"peerDependencies": {
- "eslint": "^8.57.0 || ^9.0.0",
- "typescript": ">=4.8.4 <5.8.0"
+ "eslint": "^8.57.0 || ^9.0.0 || ^10.0.0",
+ "typescript": ">=4.8.4 <6.1.0"
}
},
"node_modules/@typescript-eslint/types": {
- "version": "8.25.0",
- "resolved": "https://registry.npmjs.org/@typescript-eslint/types/-/types-8.25.0.tgz",
- "integrity": "sha512-+vUe0Zb4tkNgznQwicsvLUJgZIRs6ITeWSCclX1q85pR1iOiaj+4uZJIUp//Z27QWu5Cseiw3O3AR8hVpax7Aw==",
+ "version": "8.58.0",
+ "resolved": "https://registry.npmjs.org/@typescript-eslint/types/-/types-8.58.0.tgz",
+ "integrity": "sha512-O9CjxypDT89fbHxRfETNoAnHj/i6IpRK0CvbVN3qibxlLdo5p5hcLmUuCCrHMpxiWSwKyI8mCP7qRNYuOJ0Uww==",
"dev": true,
"license": "MIT",
"engines": {
@@ -1539,20 +1905,21 @@
}
},
"node_modules/@typescript-eslint/typescript-estree": {
- "version": "8.25.0",
- "resolved": "https://registry.npmjs.org/@typescript-eslint/typescript-estree/-/typescript-estree-8.25.0.tgz",
- "integrity": "sha512-ZPaiAKEZ6Blt/TPAx5Ot0EIB/yGtLI2EsGoY6F7XKklfMxYQyvtL+gT/UCqkMzO0BVFHLDlzvFqQzurYahxv9Q==",
+ "version": "8.58.0",
+ "resolved": "https://registry.npmjs.org/@typescript-eslint/typescript-estree/-/typescript-estree-8.58.0.tgz",
+ "integrity": "sha512-7vv5UWbHqew/dvs+D3e1RvLv1v2eeZ9txRHPnEEBUgSNLx5ghdzjHa0sgLWYVKssH+lYmV0JaWdoubo0ncGYLA==",
"dev": true,
"license": "MIT",
"dependencies": {
- "@typescript-eslint/types": "8.25.0",
- "@typescript-eslint/visitor-keys": "8.25.0",
- "debug": "^4.3.4",
- "fast-glob": "^3.3.2",
- "is-glob": "^4.0.3",
- "minimatch": "^9.0.4",
- "semver": "^7.6.0",
- "ts-api-utils": "^2.0.1"
+ "@typescript-eslint/project-service": "8.58.0",
+ "@typescript-eslint/tsconfig-utils": "8.58.0",
+ "@typescript-eslint/types": "8.58.0",
+ "@typescript-eslint/visitor-keys": "8.58.0",
+ "debug": "^4.4.3",
+ "minimatch": "^10.2.2",
+ "semver": "^7.7.3",
+ "tinyglobby": "^0.2.15",
+ "ts-api-utils": "^2.5.0"
},
"engines": {
"node": "^18.18.0 || ^20.9.0 || >=21.1.0"
@@ -1562,20 +1929,20 @@
"url": "https://opencollective.com/typescript-eslint"
},
"peerDependencies": {
- "typescript": ">=4.8.4 <5.8.0"
+ "typescript": ">=4.8.4 <6.1.0"
}
},
"node_modules/@typescript-eslint/utils": {
- "version": "8.25.0",
- "resolved": "https://registry.npmjs.org/@typescript-eslint/utils/-/utils-8.25.0.tgz",
- "integrity": "sha512-syqRbrEv0J1wywiLsK60XzHnQe/kRViI3zwFALrNEgnntn1l24Ra2KvOAWwWbWZ1lBZxZljPDGOq967dsl6fkA==",
+ "version": "8.58.0",
+ "resolved": "https://registry.npmjs.org/@typescript-eslint/utils/-/utils-8.58.0.tgz",
+ "integrity": "sha512-RfeSqcFeHMHlAWzt4TBjWOAtoW9lnsAGiP3GbaX9uVgTYYrMbVnGONEfUCiSss+xMHFl+eHZiipmA8WkQ7FuNA==",
"dev": true,
"license": "MIT",
"dependencies": {
- "@eslint-community/eslint-utils": "^4.4.0",
- "@typescript-eslint/scope-manager": "8.25.0",
- "@typescript-eslint/types": "8.25.0",
- "@typescript-eslint/typescript-estree": "8.25.0"
+ "@eslint-community/eslint-utils": "^4.9.1",
+ "@typescript-eslint/scope-manager": "8.58.0",
+ "@typescript-eslint/types": "8.58.0",
+ "@typescript-eslint/typescript-estree": "8.58.0"
},
"engines": {
"node": "^18.18.0 || ^20.9.0 || >=21.1.0"
@@ -1585,19 +1952,19 @@
"url": "https://opencollective.com/typescript-eslint"
},
"peerDependencies": {
- "eslint": "^8.57.0 || ^9.0.0",
- "typescript": ">=4.8.4 <5.8.0"
+ "eslint": "^8.57.0 || ^9.0.0 || ^10.0.0",
+ "typescript": ">=4.8.4 <6.1.0"
}
},
"node_modules/@typescript-eslint/visitor-keys": {
- "version": "8.25.0",
- "resolved": "https://registry.npmjs.org/@typescript-eslint/visitor-keys/-/visitor-keys-8.25.0.tgz",
- "integrity": "sha512-kCYXKAum9CecGVHGij7muybDfTS2sD3t0L4bJsEZLkyrXUImiCTq1M3LG2SRtOhiHFwMR9wAFplpT6XHYjTkwQ==",
+ "version": "8.58.0",
+ "resolved": "https://registry.npmjs.org/@typescript-eslint/visitor-keys/-/visitor-keys-8.58.0.tgz",
+ "integrity": "sha512-XJ9UD9+bbDo4a4epraTwG3TsNPeiB9aShrUneAVXy8q4LuwowN+qu89/6ByLMINqvIMeI9H9hOHQtg/ijrYXzQ==",
"dev": true,
"license": "MIT",
"dependencies": {
- "@typescript-eslint/types": "8.25.0",
- "eslint-visitor-keys": "^4.2.0"
+ "@typescript-eslint/types": "8.58.0",
+ "eslint-visitor-keys": "^5.0.0"
},
"engines": {
"node": "^18.18.0 || ^20.9.0 || >=21.1.0"
@@ -1607,6 +1974,19 @@
"url": "https://opencollective.com/typescript-eslint"
}
},
+ "node_modules/@typescript-eslint/visitor-keys/node_modules/eslint-visitor-keys": {
+ "version": "5.0.1",
+ "resolved": "https://registry.npmjs.org/eslint-visitor-keys/-/eslint-visitor-keys-5.0.1.tgz",
+ "integrity": "sha512-tD40eHxA35h0PEIZNeIjkHoDR4YjjJp34biM0mDvplBe//mB+IHCqHDGV7pxF+7MklTvighcCPPZC7ynWyjdTA==",
+ "dev": true,
+ "license": "Apache-2.0",
+ "engines": {
+ "node": "^20.19.0 || ^22.13.0 || >=24"
+ },
+ "funding": {
+ "url": "https://opencollective.com/eslint"
+ }
+ },
"node_modules/@vscode/test-electron": {
"version": "2.4.1",
"resolved": "https://registry.npmjs.org/@vscode/test-electron/-/test-electron-2.4.1.tgz",
@@ -1625,17 +2005,17 @@
}
},
"node_modules/@vscode/vsce": {
- "version": "3.6.0",
- "resolved": "https://registry.npmjs.org/@vscode/vsce/-/vsce-3.6.0.tgz",
- "integrity": "sha512-u2ZoMfymRNJb14aHNawnXJtXHLXDVKc1oKZaH4VELKT/9iWKRVgtQOdwxCgtwSxJoqYvuK4hGlBWQJ05wxADhg==",
+ "version": "3.7.1",
+ "resolved": "https://registry.npmjs.org/@vscode/vsce/-/vsce-3.7.1.tgz",
+ "integrity": "sha512-OTm2XdMt2YkpSn2Nx7z2EJtSuhRHsTPYsSK59hr3v8jRArK+2UEoju4Jumn1CmpgoBLGI6ReHLJ/czYltNUW3g==",
"dev": true,
"license": "MIT",
"dependencies": {
"@azure/identity": "^4.1.0",
- "@secretlint/node": "^10.1.1",
- "@secretlint/secretlint-formatter-sarif": "^10.1.1",
- "@secretlint/secretlint-rule-no-dotenv": "^10.1.1",
- "@secretlint/secretlint-rule-preset-recommend": "^10.1.1",
+ "@secretlint/node": "^10.1.2",
+ "@secretlint/secretlint-formatter-sarif": "^10.1.2",
+ "@secretlint/secretlint-rule-no-dotenv": "^10.1.2",
+ "@secretlint/secretlint-rule-preset-recommend": "^10.1.2",
"@vscode/vsce-sign": "^2.0.0",
"azure-devops-node-api": "^12.5.0",
"chalk": "^4.1.2",
@@ -1652,7 +2032,7 @@
"minimatch": "^3.0.3",
"parse-semver": "^1.1.1",
"read": "^1.0.7",
- "secretlint": "^10.1.1",
+ "secretlint": "^10.1.2",
"semver": "^7.5.2",
"tmp": "^0.2.3",
"typed-rest-client": "^1.8.4",
@@ -1841,9 +2221,9 @@
}
},
"node_modules/acorn": {
- "version": "8.14.0",
- "resolved": "https://registry.npmjs.org/acorn/-/acorn-8.14.0.tgz",
- "integrity": "sha512-cl669nCJTZBsL97OF4kUQm5g5hC2uihk0NxY3WENAC0TYdILVkAyHymAntgxGkl7K+t0cXIrH5siy5S4XkFycA==",
+ "version": "8.16.0",
+ "resolved": "https://registry.npmjs.org/acorn/-/acorn-8.16.0.tgz",
+ "integrity": "sha512-UVJyE9MttOsBQIDKw1skb9nAwQuR5wuGD3+82K6JgJlm/Y+KI92oNsMNGZCYdDsVtRHSak0pcV5Dno5+4jh9sw==",
"dev": true,
"license": "MIT",
"bin": {
@@ -1874,9 +2254,9 @@
}
},
"node_modules/ajv": {
- "version": "6.12.6",
- "resolved": "https://registry.npmjs.org/ajv/-/ajv-6.12.6.tgz",
- "integrity": "sha512-j3fVLgvTo527anyYyJOGTYJbG+vnnQYvE0m5mmkc1TK+nxAppkCLMIL0aZ4dblVCNoGShhm+kzE4ZUykBoMg4g==",
+ "version": "6.14.0",
+ "resolved": "https://registry.npmjs.org/ajv/-/ajv-6.14.0.tgz",
+ "integrity": "sha512-IWrosm/yrn43eiKqkfkHis7QioDleaXQHdDVPKg0FSwwd/DuvyX79TZnFOnYpB7dcsFAMmtFztZuXPDvSePkFw==",
"dev": true,
"license": "MIT",
"dependencies": {
@@ -2170,16 +2550,6 @@
"url": "https://github.com/sponsors/ljharb"
}
},
- "node_modules/callsites": {
- "version": "3.1.0",
- "resolved": "https://registry.npmjs.org/callsites/-/callsites-3.1.0.tgz",
- "integrity": "sha512-P8BjAsXvZS+VIDUI11hHCQEv74YT67YUi5JJFNWIqL235sBmjX4+qx9Muvls5ivyNENctx46xQLQ3aTuE7ssaQ==",
- "dev": true,
- "license": "MIT",
- "engines": {
- "node": ">=6"
- }
- },
"node_modules/chalk": {
"version": "4.1.2",
"resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz",
@@ -2900,9 +3270,9 @@
}
},
"node_modules/debug": {
- "version": "4.4.1",
- "resolved": "https://registry.npmjs.org/debug/-/debug-4.4.1.tgz",
- "integrity": "sha512-KcKCqiftBJcZr++7ykoDIEwSa3XWowTfNPo92BYxjXiyYEVrUQh2aLyhxBCwww+heortUFxEJYcRzosstTEBYQ==",
+ "version": "4.4.3",
+ "resolved": "https://registry.npmjs.org/debug/-/debug-4.4.3.tgz",
+ "integrity": "sha512-RGwwWnwQvkVfavKVt22FGLw+xYSdzARwm0ru6DhTVA3umU5hZc28V3kO4stgYryrTlLpuvgI9GiijltAjNbcqA==",
"dev": true,
"license": "MIT",
"dependencies": {
@@ -2982,6 +3352,24 @@
"url": "https://github.com/sponsors/sindresorhus"
}
},
+ "node_modules/define-data-property": {
+ "version": "1.1.4",
+ "resolved": "https://registry.npmjs.org/define-data-property/-/define-data-property-1.1.4.tgz",
+ "integrity": "sha512-rBMvIzlpA8v6E+SJZoo++HAYqsLrkg7MSfIinMPFhmkorw7X+dOXVJQs+QT69zGkzMyfDnIMN2Wid1+NbL3T+A==",
+ "dev": true,
+ "license": "MIT",
+ "dependencies": {
+ "es-define-property": "^1.0.0",
+ "es-errors": "^1.3.0",
+ "gopd": "^1.0.1"
+ },
+ "engines": {
+ "node": ">= 0.4"
+ },
+ "funding": {
+ "url": "https://github.com/sponsors/ljharb"
+ }
+ },
"node_modules/define-lazy-prop": {
"version": "3.0.0",
"resolved": "https://registry.npmjs.org/define-lazy-prop/-/define-lazy-prop-3.0.0.tgz",
@@ -2995,6 +3383,24 @@
"url": "https://github.com/sponsors/sindresorhus"
}
},
+ "node_modules/define-properties": {
+ "version": "1.2.1",
+ "resolved": "https://registry.npmjs.org/define-properties/-/define-properties-1.2.1.tgz",
+ "integrity": "sha512-8QmQKqEASLd5nx0U1B1okLElbUuuttJ/AnYmRXbbbGDWh6uS208EjD4Xqq/I9wK7u0v6O08XhTWnt5XtEbR6Dg==",
+ "dev": true,
+ "license": "MIT",
+ "dependencies": {
+ "define-data-property": "^1.0.1",
+ "has-property-descriptors": "^1.0.0",
+ "object-keys": "^1.1.1"
+ },
+ "engines": {
+ "node": ">= 0.4"
+ },
+ "funding": {
+ "url": "https://github.com/sponsors/ljharb"
+ }
+ },
"node_modules/delaunator": {
"version": "5.0.1",
"resolved": "https://registry.npmjs.org/delaunator/-/delaunator-5.0.1.tgz",
@@ -3291,33 +3697,30 @@
}
},
"node_modules/eslint": {
- "version": "9.21.0",
- "resolved": "https://registry.npmjs.org/eslint/-/eslint-9.21.0.tgz",
- "integrity": "sha512-KjeihdFqTPhOMXTt7StsDxriV4n66ueuF/jfPNC3j/lduHwr/ijDwJMsF+wyMJethgiKi5wniIE243vi07d3pg==",
+ "version": "10.2.0",
+ "resolved": "https://registry.npmjs.org/eslint/-/eslint-10.2.0.tgz",
+ "integrity": "sha512-+L0vBFYGIpSNIt/KWTpFonPrqYvgKw1eUI5Vn7mEogrQcWtWYtNQ7dNqC+px/J0idT3BAkiWrhfS7k+Tum8TUA==",
"dev": true,
"license": "MIT",
"dependencies": {
- "@eslint-community/eslint-utils": "^4.2.0",
- "@eslint-community/regexpp": "^4.12.1",
- "@eslint/config-array": "^0.19.2",
- "@eslint/core": "^0.12.0",
- "@eslint/eslintrc": "^3.3.0",
- "@eslint/js": "9.21.0",
- "@eslint/plugin-kit": "^0.2.7",
+ "@eslint-community/eslint-utils": "^4.8.0",
+ "@eslint-community/regexpp": "^4.12.2",
+ "@eslint/config-array": "^0.23.4",
+ "@eslint/config-helpers": "^0.5.4",
+ "@eslint/core": "^1.2.0",
+ "@eslint/plugin-kit": "^0.7.0",
"@humanfs/node": "^0.16.6",
"@humanwhocodes/module-importer": "^1.0.1",
"@humanwhocodes/retry": "^0.4.2",
"@types/estree": "^1.0.6",
- "@types/json-schema": "^7.0.15",
- "ajv": "^6.12.4",
- "chalk": "^4.0.0",
+ "ajv": "^6.14.0",
"cross-spawn": "^7.0.6",
"debug": "^4.3.2",
"escape-string-regexp": "^4.0.0",
- "eslint-scope": "^8.2.0",
- "eslint-visitor-keys": "^4.2.0",
- "espree": "^10.3.0",
- "esquery": "^1.5.0",
+ "eslint-scope": "^9.1.2",
+ "eslint-visitor-keys": "^5.0.1",
+ "espree": "^11.2.0",
+ "esquery": "^1.7.0",
"esutils": "^2.0.2",
"fast-deep-equal": "^3.1.3",
"file-entry-cache": "^8.0.0",
@@ -3327,8 +3730,7 @@
"imurmurhash": "^0.1.4",
"is-glob": "^4.0.0",
"json-stable-stringify-without-jsonify": "^1.0.1",
- "lodash.merge": "^4.6.2",
- "minimatch": "^3.1.2",
+ "minimatch": "^10.2.4",
"natural-compare": "^1.4.0",
"optionator": "^0.9.3"
},
@@ -3336,7 +3738,7 @@
"eslint": "bin/eslint.js"
},
"engines": {
- "node": "^18.18.0 || ^20.9.0 || >=21.1.0"
+ "node": "^20.19.0 || ^22.13.0 || >=24"
},
"funding": {
"url": "https://eslint.org/donate"
@@ -3351,13 +3753,16 @@
}
},
"node_modules/eslint-config-prettier": {
- "version": "10.0.2",
- "resolved": "https://registry.npmjs.org/eslint-config-prettier/-/eslint-config-prettier-10.0.2.tgz",
- "integrity": "sha512-1105/17ZIMjmCOJOPNfVdbXafLCLj3hPmkmB7dLgt7XsQ/zkxSuDerE/xgO3RxoHysR1N1whmquY0lSn2O0VLg==",
+ "version": "10.1.8",
+ "resolved": "https://registry.npmjs.org/eslint-config-prettier/-/eslint-config-prettier-10.1.8.tgz",
+ "integrity": "sha512-82GZUjRS0p/jganf6q1rEO25VSoHH0hKPCTrgillPjdI/3bgBhAE1QzHrHTizjpRvy6pGAvKjDJtk2pF9NDq8w==",
"dev": true,
"license": "MIT",
"bin": {
- "eslint-config-prettier": "build/bin/cli.js"
+ "eslint-config-prettier": "bin/cli.js"
+ },
+ "funding": {
+ "url": "https://opencollective.com/eslint-config-prettier"
},
"peerDependencies": {
"eslint": ">=7.0.0"
@@ -3367,6 +3772,7 @@
"version": "2.1.0",
"resolved": "https://registry.npmjs.org/eslint-define-config/-/eslint-define-config-2.1.0.tgz",
"integrity": "sha512-QUp6pM9pjKEVannNAbSJNeRuYwW3LshejfyBBpjeMGaJjaDUpVps4C6KVR8R7dWZnD3i0synmrE36znjTkJvdQ==",
+ "deprecated": "Package no longer supported. Contact Support at https://www.npmjs.com/support for more info.",
"dev": true,
"funding": [
{
@@ -3386,17 +3792,19 @@
}
},
"node_modules/eslint-scope": {
- "version": "8.2.0",
- "resolved": "https://registry.npmjs.org/eslint-scope/-/eslint-scope-8.2.0.tgz",
- "integrity": "sha512-PHlWUfG6lvPc3yvP5A4PNyBL1W8fkDUccmI21JUu/+GKZBoH/W5u6usENXUrWFRsyoW5ACUjFGgAFQp5gUlb/A==",
+ "version": "9.1.2",
+ "resolved": "https://registry.npmjs.org/eslint-scope/-/eslint-scope-9.1.2.tgz",
+ "integrity": "sha512-xS90H51cKw0jltxmvmHy2Iai1LIqrfbw57b79w/J7MfvDfkIkFZ+kj6zC3BjtUwh150HsSSdxXZcsuv72miDFQ==",
"dev": true,
"license": "BSD-2-Clause",
"dependencies": {
+ "@types/esrecurse": "^4.3.1",
+ "@types/estree": "^1.0.8",
"esrecurse": "^4.3.0",
"estraverse": "^5.2.0"
},
"engines": {
- "node": "^18.18.0 || ^20.9.0 || >=21.1.0"
+ "node": "^20.19.0 || ^22.13.0 || >=24"
},
"funding": {
"url": "https://opencollective.com/eslint"
@@ -3415,17 +3823,6 @@
"url": "https://opencollective.com/eslint"
}
},
- "node_modules/eslint/node_modules/brace-expansion": {
- "version": "1.1.13",
- "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.13.tgz",
- "integrity": "sha512-9ZLprWS6EENmhEOpjCYW2c8VkmOvckIJZfkr7rBW6dObmfgJ/L1GpSYW5Hpo9lDz4D1+n0Ckz8rU7FwHDQiG/w==",
- "dev": true,
- "license": "MIT",
- "dependencies": {
- "balanced-match": "^1.0.0",
- "concat-map": "0.0.1"
- }
- },
"node_modules/eslint/node_modules/escape-string-regexp": {
"version": "4.0.0",
"resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-4.0.0.tgz",
@@ -3439,17 +3836,35 @@
"url": "https://github.com/sponsors/sindresorhus"
}
},
- "node_modules/eslint/node_modules/minimatch": {
- "version": "3.1.5",
- "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.1.5.tgz",
- "integrity": "sha512-VgjWUsnnT6n+NUk6eZq77zeFdpW2LWDzP6zFGrCbHXiYNul5Dzqk2HHQ5uFH2DNW5Xbp8+jVzaeNt94ssEEl4w==",
+ "node_modules/eslint/node_modules/eslint-visitor-keys": {
+ "version": "5.0.1",
+ "resolved": "https://registry.npmjs.org/eslint-visitor-keys/-/eslint-visitor-keys-5.0.1.tgz",
+ "integrity": "sha512-tD40eHxA35h0PEIZNeIjkHoDR4YjjJp34biM0mDvplBe//mB+IHCqHDGV7pxF+7MklTvighcCPPZC7ynWyjdTA==",
"dev": true,
- "license": "ISC",
+ "license": "Apache-2.0",
+ "engines": {
+ "node": "^20.19.0 || ^22.13.0 || >=24"
+ },
+ "funding": {
+ "url": "https://opencollective.com/eslint"
+ }
+ },
+ "node_modules/eslint/node_modules/espree": {
+ "version": "11.2.0",
+ "resolved": "https://registry.npmjs.org/espree/-/espree-11.2.0.tgz",
+ "integrity": "sha512-7p3DrVEIopW1B1avAGLuCSh1jubc01H2JHc8B4qqGblmg5gI9yumBgACjWo4JlIc04ufug4xJ3SQI8HkS/Rgzw==",
+ "dev": true,
+ "license": "BSD-2-Clause",
"dependencies": {
- "brace-expansion": "^1.1.7"
+ "acorn": "^8.16.0",
+ "acorn-jsx": "^5.3.2",
+ "eslint-visitor-keys": "^5.0.1"
},
"engines": {
- "node": "*"
+ "node": "^20.19.0 || ^22.13.0 || >=24"
+ },
+ "funding": {
+ "url": "https://opencollective.com/eslint"
}
},
"node_modules/espree": {
@@ -3485,9 +3900,9 @@
}
},
"node_modules/esquery": {
- "version": "1.6.0",
- "resolved": "https://registry.npmjs.org/esquery/-/esquery-1.6.0.tgz",
- "integrity": "sha512-ca9pw9fomFcKPvFLXhBKUK90ZvGibiGOvRJNbjljY7s7uq/5YO4BOzcYtJqExdx99rF6aAcnRxHmcUHcz6sQsg==",
+ "version": "1.7.0",
+ "resolved": "https://registry.npmjs.org/esquery/-/esquery-1.7.0.tgz",
+ "integrity": "sha512-Ap6G0WQwcU/LHsvLwON1fAQX9Zp0A2Y6Y/cJBl9r/JbW90Zyg4/zbG6zzKa2OTALELarYHmKu0GhpM5EO+7T0g==",
"dev": true,
"license": "BSD-3-Clause",
"dependencies": {
@@ -3639,6 +4054,24 @@
"pend": "~1.2.0"
}
},
+ "node_modules/fdir": {
+ "version": "6.5.0",
+ "resolved": "https://registry.npmjs.org/fdir/-/fdir-6.5.0.tgz",
+ "integrity": "sha512-tIbYtZbucOs0BRGqPJkshJUYdL+SDH7dVM8gjy+ERp3WAUjLEFJE+02kanyHtwjWOnwrKYBiwAmM0p4kLJAnXg==",
+ "dev": true,
+ "license": "MIT",
+ "engines": {
+ "node": ">=12.0.0"
+ },
+ "peerDependencies": {
+ "picomatch": "^3 || ^4"
+ },
+ "peerDependenciesMeta": {
+ "picomatch": {
+ "optional": true
+ }
+ }
+ },
"node_modules/file-entry-cache": {
"version": "8.0.0",
"resolved": "https://registry.npmjs.org/file-entry-cache/-/file-entry-cache-8.0.0.tgz",
@@ -3884,56 +4317,21 @@
"node": ">=10.13.0"
}
},
- "node_modules/glob/node_modules/balanced-match": {
- "version": "4.0.4",
- "resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-4.0.4.tgz",
- "integrity": "sha512-BLrgEcRTwX2o6gGxGOCNyMvGSp35YofuYzw9h1IMTRmKqttAZZVU67bdb9Pr2vUHA8+j3i2tJfjO6C6+4myGTA==",
- "dev": true,
- "license": "MIT",
- "engines": {
- "node": "18 || 20 || >=22"
- }
- },
- "node_modules/glob/node_modules/brace-expansion": {
- "version": "5.0.5",
- "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-5.0.5.tgz",
- "integrity": "sha512-VZznLgtwhn+Mact9tfiwx64fA9erHH/MCXEUfB/0bX/6Fz6ny5EGTXYltMocqg4xFAQZtnO3DHWWXi8RiuN7cQ==",
+ "node_modules/globalthis": {
+ "version": "1.0.4",
+ "resolved": "https://registry.npmjs.org/globalthis/-/globalthis-1.0.4.tgz",
+ "integrity": "sha512-DpLKbNU4WylpxJykQujfCcwYWiV/Jhm50Goo0wrVILAv5jOr9d+H+UR3PhSCD2rCCEIg0uc+G+muBTwD54JhDQ==",
"dev": true,
"license": "MIT",
"dependencies": {
- "balanced-match": "^4.0.2"
- },
- "engines": {
- "node": "18 || 20 || >=22"
- }
- },
- "node_modules/glob/node_modules/minimatch": {
- "version": "10.2.4",
- "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-10.2.4.tgz",
- "integrity": "sha512-oRjTw/97aTBN0RHbYCdtF1MQfvusSIBQM0IZEgzl6426+8jSC0nF1a/GmnVLpfB9yyr6g6FTqWqiZVbxrtaCIg==",
- "dev": true,
- "license": "BlueOak-1.0.0",
- "dependencies": {
- "brace-expansion": "^5.0.2"
- },
- "engines": {
- "node": "18 || 20 || >=22"
+ "define-properties": "^1.2.1",
+ "gopd": "^1.0.1"
},
- "funding": {
- "url": "https://github.com/sponsors/isaacs"
- }
- },
- "node_modules/globals": {
- "version": "14.0.0",
- "resolved": "https://registry.npmjs.org/globals/-/globals-14.0.0.tgz",
- "integrity": "sha512-oahGvuMGQlPw/ivIYBjVSrWAfWLBeku5tpPE2fOPLi+WHffIWbuh2tCjhyQhTBPMf5E9jDEH4FOmTYgYwbKwtQ==",
- "dev": true,
- "license": "MIT",
"engines": {
- "node": ">=18"
+ "node": ">= 0.4"
},
"funding": {
- "url": "https://github.com/sponsors/sindresorhus"
+ "url": "https://github.com/sponsors/ljharb"
}
},
"node_modules/globby": {
@@ -3987,13 +4385,6 @@
"dev": true,
"license": "ISC"
},
- "node_modules/graphemer": {
- "version": "1.4.0",
- "resolved": "https://registry.npmjs.org/graphemer/-/graphemer-1.4.0.tgz",
- "integrity": "sha512-EtKwoO6kxCL9WO5xipiHTZlSzBm7WLT627TqC/uVRd0HKmq8NXyebnNYxDoBi7wt8eTWrUrKXCOVaFq9x1kgag==",
- "dev": true,
- "license": "MIT"
- },
"node_modules/has-flag": {
"version": "4.0.0",
"resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz",
@@ -4004,6 +4395,19 @@
"node": ">=8"
}
},
+ "node_modules/has-property-descriptors": {
+ "version": "1.0.2",
+ "resolved": "https://registry.npmjs.org/has-property-descriptors/-/has-property-descriptors-1.0.2.tgz",
+ "integrity": "sha512-55JNKuIW+vq4Ke1BjOTjM2YctQIvCT7GFzHwmfZPGo5wnrgkid0YQtnAleFSqumZm4az3n2BS+erby5ipJdgrg==",
+ "dev": true,
+ "license": "MIT",
+ "dependencies": {
+ "es-define-property": "^1.0.0"
+ },
+ "funding": {
+ "url": "https://github.com/sponsors/ljharb"
+ }
+ },
"node_modules/has-symbols": {
"version": "1.1.0",
"resolved": "https://registry.npmjs.org/has-symbols/-/has-symbols-1.1.0.tgz",
@@ -4157,23 +4561,6 @@
"dev": true,
"license": "MIT"
},
- "node_modules/import-fresh": {
- "version": "3.3.1",
- "resolved": "https://registry.npmjs.org/import-fresh/-/import-fresh-3.3.1.tgz",
- "integrity": "sha512-TR3KfrTZTYLPB6jUjfx6MF9WcWrHL9su5TObK4ZkYgBdWKPOFoSoQIdEuTuR82pmtxH2spWG9h6etwfr1pLBqQ==",
- "dev": true,
- "license": "MIT",
- "dependencies": {
- "parent-module": "^1.0.0",
- "resolve-from": "^4.0.0"
- },
- "engines": {
- "node": ">=6"
- },
- "funding": {
- "url": "https://github.com/sponsors/sindresorhus"
- }
- },
"node_modules/imurmurhash": {
"version": "0.1.4",
"resolved": "https://registry.npmjs.org/imurmurhash/-/imurmurhash-0.1.4.tgz",
@@ -4314,6 +4701,19 @@
"url": "https://github.com/sponsors/sindresorhus"
}
},
+ "node_modules/is-it-type": {
+ "version": "5.1.3",
+ "resolved": "https://registry.npmjs.org/is-it-type/-/is-it-type-5.1.3.tgz",
+ "integrity": "sha512-AX2uU0HW+TxagTgQXOJY7+2fbFHemC7YFBwN1XqD8qQMKdtfbOC8OC3fUb4s5NU59a3662Dzwto8tWDdZYRXxg==",
+ "dev": true,
+ "license": "MIT",
+ "dependencies": {
+ "globalthis": "^1.0.2"
+ },
+ "engines": {
+ "node": ">=12"
+ }
+ },
"node_modules/is-number": {
"version": "7.0.0",
"resolved": "https://registry.npmjs.org/is-number/-/is-number-7.0.0.tgz",
@@ -4650,9 +5050,9 @@
}
},
"node_modules/lodash": {
- "version": "4.17.23",
- "resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.23.tgz",
- "integrity": "sha512-LgVTMpQtIopCi79SJeDiP0TfWi5CNEc/L/aRdTh3yIvmZXTnheWpKjSZhnvMl8iXbC1tFg9gdHHDMLoV7CnG+w==",
+ "version": "4.18.1",
+ "resolved": "https://registry.npmjs.org/lodash/-/lodash-4.18.1.tgz",
+ "integrity": "sha512-dMInicTPVE8d1e5otfwmmjlxkZoUpiVLwyeTdUsi/Caj/gfzzblBcCE5sRHV/AsjuCmxWrte2TNGSYuCeCq+0Q==",
"dev": true,
"license": "MIT"
},
@@ -4698,13 +5098,6 @@
"dev": true,
"license": "MIT"
},
- "node_modules/lodash.merge": {
- "version": "4.6.2",
- "resolved": "https://registry.npmjs.org/lodash.merge/-/lodash.merge-4.6.2.tgz",
- "integrity": "sha512-0KpjqXRVvrYyCsX1swR/XTK0va6VQkQM6MNo7PqW77ByjAhoARA8EfrP1N4+KlKj8YS0ZUCtRT/YUuhyYDujIQ==",
- "dev": true,
- "license": "MIT"
- },
"node_modules/lodash.once": {
"version": "4.1.1",
"resolved": "https://registry.npmjs.org/lodash.once/-/lodash.once-4.1.1.tgz",
@@ -4895,21 +5288,44 @@
}
},
"node_modules/minimatch": {
- "version": "9.0.9",
- "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-9.0.9.tgz",
- "integrity": "sha512-OBwBN9AL4dqmETlpS2zasx+vTeWclWzkblfZk7KTA5j3jeOONz/tRCnZomUyvNg83wL5Zv9Ss6HMJXAgL8R2Yg==",
+ "version": "10.2.5",
+ "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-10.2.5.tgz",
+ "integrity": "sha512-MULkVLfKGYDFYejP07QOurDLLQpcjk7Fw+7jXS2R2czRQzR56yHRveU5NDJEOviH+hETZKSkIk5c+T23GjFUMg==",
"dev": true,
- "license": "ISC",
+ "license": "BlueOak-1.0.0",
"dependencies": {
- "brace-expansion": "^2.0.2"
+ "brace-expansion": "^5.0.5"
},
"engines": {
- "node": ">=16 || 14 >=14.17"
+ "node": "18 || 20 || >=22"
},
"funding": {
"url": "https://github.com/sponsors/isaacs"
}
},
+ "node_modules/minimatch/node_modules/balanced-match": {
+ "version": "4.0.4",
+ "resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-4.0.4.tgz",
+ "integrity": "sha512-BLrgEcRTwX2o6gGxGOCNyMvGSp35YofuYzw9h1IMTRmKqttAZZVU67bdb9Pr2vUHA8+j3i2tJfjO6C6+4myGTA==",
+ "dev": true,
+ "license": "MIT",
+ "engines": {
+ "node": "18 || 20 || >=22"
+ }
+ },
+ "node_modules/minimatch/node_modules/brace-expansion": {
+ "version": "5.0.5",
+ "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-5.0.5.tgz",
+ "integrity": "sha512-VZznLgtwhn+Mact9tfiwx64fA9erHH/MCXEUfB/0bX/6Fz6ny5EGTXYltMocqg4xFAQZtnO3DHWWXi8RiuN7cQ==",
+ "dev": true,
+ "license": "MIT",
+ "dependencies": {
+ "balanced-match": "^4.0.2"
+ },
+ "engines": {
+ "node": "18 || 20 || >=22"
+ }
+ },
"node_modules/minimist": {
"version": "1.2.8",
"resolved": "https://registry.npmjs.org/minimist/-/minimist-1.2.8.tgz",
@@ -5065,6 +5481,16 @@
"url": "https://github.com/sponsors/ljharb"
}
},
+ "node_modules/object-keys": {
+ "version": "1.1.1",
+ "resolved": "https://registry.npmjs.org/object-keys/-/object-keys-1.1.1.tgz",
+ "integrity": "sha512-NuAESUOUMrlIXOfHKzD6bpPu3tYt3xvjNdRIQ+FeT0lNb4K8WR70CaDxhuNguS2XG+GjkyMwOzsN5ZktImfhLA==",
+ "dev": true,
+ "license": "MIT",
+ "engines": {
+ "node": ">= 0.4"
+ }
+ },
"node_modules/once": {
"version": "1.4.0",
"resolved": "https://registry.npmjs.org/once/-/once-1.4.0.tgz",
@@ -5192,23 +5618,23 @@
}
},
"node_modules/ovsx": {
- "version": "0.10.1",
- "resolved": "https://registry.npmjs.org/ovsx/-/ovsx-0.10.1.tgz",
- "integrity": "sha512-8i7+MJMMeq73m1zPEIClSFe17SNuuzU5br7G77ZIfOC24elB4pGQs0N1qRd+gnnbyhL5Qu96G21nFOVOBa2OBg==",
+ "version": "0.10.10",
+ "resolved": "https://registry.npmjs.org/ovsx/-/ovsx-0.10.10.tgz",
+ "integrity": "sha512-/X5J4VLKPUGGaMynW9hgvsGg9jmwsK/3RhODeA2yzdeDbb8PUSNcg5GQ9aPDJW/znlqNvAwQcXAyE+Cq0RRvAQ==",
"dev": true,
"license": "EPL-2.0",
"dependencies": {
- "@vscode/vsce": "^3.2.1",
+ "@vscode/vsce": "^3.7.1",
"commander": "^6.2.1",
"follow-redirects": "^1.14.6",
"is-ci": "^2.0.0",
"leven": "^3.1.0",
"semver": "^7.6.0",
"tmp": "^0.2.3",
- "yauzl": "^3.1.3"
+ "yauzl-promise": "^4.0.0"
},
"bin": {
- "ovsx": "lib/ovsx"
+ "ovsx": "bin/ovsx"
},
"engines": {
"node": ">= 20"
@@ -5224,20 +5650,6 @@
"node": ">= 6"
}
},
- "node_modules/ovsx/node_modules/yauzl": {
- "version": "3.2.0",
- "resolved": "https://registry.npmjs.org/yauzl/-/yauzl-3.2.0.tgz",
- "integrity": "sha512-Ow9nuGZE+qp1u4JIPvg+uCiUr7xGQWdff7JQSk5VGYTAZMDe2q8lxJ10ygv10qmSj031Ty/6FNJpLO4o1Sgc+w==",
- "dev": true,
- "license": "MIT",
- "dependencies": {
- "buffer-crc32": "~0.2.3",
- "pend": "~1.2.0"
- },
- "engines": {
- "node": ">=12"
- }
- },
"node_modules/p-limit": {
"version": "3.1.0",
"resolved": "https://registry.npmjs.org/p-limit/-/p-limit-3.1.0.tgz",
@@ -5297,19 +5709,6 @@
"dev": true,
"license": "(MIT AND Zlib)"
},
- "node_modules/parent-module": {
- "version": "1.0.1",
- "resolved": "https://registry.npmjs.org/parent-module/-/parent-module-1.0.1.tgz",
- "integrity": "sha512-GQ2EWRpQV8/o+Aw8YqtfZZPfNRWZYkbidE9k5rpl/hC3vtHHBfGm2Ifi6qWV+coDGkrUKZAxE3Lot5kcsRlh+g==",
- "dev": true,
- "license": "MIT",
- "dependencies": {
- "callsites": "^3.0.0"
- },
- "engines": {
- "node": ">=6"
- }
- },
"node_modules/parse-json": {
"version": "8.3.0",
"resolved": "https://registry.npmjs.org/parse-json/-/parse-json-8.3.0.tgz",
@@ -5524,9 +5923,9 @@
}
},
"node_modules/prettier": {
- "version": "3.5.2",
- "resolved": "https://registry.npmjs.org/prettier/-/prettier-3.5.2.tgz",
- "integrity": "sha512-lc6npv5PH7hVqozBR7lkBNOGXV9vMwROAPlumdBkX0wTbbzPu/U1hk5yL8p2pt4Xoc+2mkT8t/sow2YrV/M5qg==",
+ "version": "3.8.1",
+ "resolved": "https://registry.npmjs.org/prettier/-/prettier-3.8.1.tgz",
+ "integrity": "sha512-UOnG6LftzbdaHZcKoPFtOcCKztrQ57WkHDeRD9t/PTQtmT0NHSeWWepj6pS0z/N7+08BHFDQVUrfmfMRcZwbMg==",
"dev": true,
"license": "MIT",
"bin": {
@@ -5744,16 +6143,6 @@
"node": ">=0.10.0"
}
},
- "node_modules/resolve-from": {
- "version": "4.0.0",
- "resolved": "https://registry.npmjs.org/resolve-from/-/resolve-from-4.0.0.tgz",
- "integrity": "sha512-pb/MYmXstAkysRFx8piNI1tGFNQIFA3vkE3Gq4EuA1dF6gHp/+vgZqsCGJapvy8N3Q+4o7FwvquPJcnZ7RYy4g==",
- "dev": true,
- "license": "MIT",
- "engines": {
- "node": ">=4"
- }
- },
"node_modules/restore-cursor": {
"version": "4.0.0",
"resolved": "https://registry.npmjs.org/restore-cursor/-/restore-cursor-4.0.0.tgz",
@@ -5895,9 +6284,9 @@
}
},
"node_modules/semver": {
- "version": "7.7.1",
- "resolved": "https://registry.npmjs.org/semver/-/semver-7.7.1.tgz",
- "integrity": "sha512-hlq8tAfn0m/61p4BVRcPzIGr6LKiMwo4VM6dGi6pt4qcRkmNzTcWq6eCEjEh+qXjkMDvPlOFFSGwQjoEa6gyMA==",
+ "version": "7.7.4",
+ "resolved": "https://registry.npmjs.org/semver/-/semver-7.7.4.tgz",
+ "integrity": "sha512-vFKC2IEtQnVhpT78h1Yp8wzwrf8CM+MzKMHGJZfBtzhZNycRFnXsHk6E5TxIkkMsgNS7mdX3AGB7x2QM2di4lA==",
"license": "ISC",
"bin": {
"semver": "bin/semver.js"
@@ -6074,6 +6463,16 @@
"simple-concat": "^1.0.0"
}
},
+ "node_modules/simple-invariant": {
+ "version": "2.0.1",
+ "resolved": "https://registry.npmjs.org/simple-invariant/-/simple-invariant-2.0.1.tgz",
+ "integrity": "sha512-1sbhsxqI+I2tqlmjbz99GXNmZtr6tKIyEgGGnJw/MKGblalqk/XoOYYFJlBzTKZCxx8kLaD3FD5s9BEEjx5Pyg==",
+ "dev": true,
+ "license": "MIT",
+ "engines": {
+ "node": ">=10"
+ }
+ },
"node_modules/slash": {
"version": "5.1.0",
"resolved": "https://registry.npmjs.org/slash/-/slash-5.1.0.tgz",
@@ -6296,19 +6695,6 @@
"node": ">=8"
}
},
- "node_modules/strip-json-comments": {
- "version": "3.1.1",
- "resolved": "https://registry.npmjs.org/strip-json-comments/-/strip-json-comments-3.1.1.tgz",
- "integrity": "sha512-6fPc+R4ihwqP6N/aIv2f1gMH8lOVtWQHoqC4yK6oSDVVocumAsfCqjkXnqiYMhmMwS/mEHLp7Vehlt3ql6lEig==",
- "dev": true,
- "license": "MIT",
- "engines": {
- "node": ">=8"
- },
- "funding": {
- "url": "https://github.com/sponsors/sindresorhus"
- }
- },
"node_modules/structured-source": {
"version": "4.0.0",
"resolved": "https://registry.npmjs.org/structured-source/-/structured-source-4.0.0.tgz",
@@ -6562,6 +6948,23 @@
"url": "https://bevry.me/fund"
}
},
+ "node_modules/tinyglobby": {
+ "version": "0.2.15",
+ "resolved": "https://registry.npmjs.org/tinyglobby/-/tinyglobby-0.2.15.tgz",
+ "integrity": "sha512-j2Zq4NyQYG5XMST4cbs02Ak8iJUdxRM0XI5QyxXuZOzKOINmWurp3smXu3y5wDcJrptwpSjgXHzIQxR0omXljQ==",
+ "dev": true,
+ "license": "MIT",
+ "dependencies": {
+ "fdir": "^6.5.0",
+ "picomatch": "^4.0.3"
+ },
+ "engines": {
+ "node": ">=12.0.0"
+ },
+ "funding": {
+ "url": "https://github.com/sponsors/SuperchupuDev"
+ }
+ },
"node_modules/tmp": {
"version": "0.2.4",
"resolved": "https://registry.npmjs.org/tmp/-/tmp-0.2.4.tgz",
@@ -6586,9 +6989,9 @@
}
},
"node_modules/ts-api-utils": {
- "version": "2.0.1",
- "resolved": "https://registry.npmjs.org/ts-api-utils/-/ts-api-utils-2.0.1.tgz",
- "integrity": "sha512-dnlgjFSVetynI8nzgJ+qF62efpglpWRk8isUEWZGWlJYySCTD6aKvbUDu+zbPeDakk3bg5H4XpitHukgfL1m9w==",
+ "version": "2.5.0",
+ "resolved": "https://registry.npmjs.org/ts-api-utils/-/ts-api-utils-2.5.0.tgz",
+ "integrity": "sha512-OJ/ibxhPlqrMM0UiNHJ/0CKQkoKF243/AEmplt3qpRgkW8VG7IfOS41h7V8TjITqdByHzrjcS/2si+y4lIh8NA==",
"dev": true,
"license": "MIT",
"engines": {
@@ -6668,9 +7071,9 @@
}
},
"node_modules/typescript": {
- "version": "5.7.3",
- "resolved": "https://registry.npmjs.org/typescript/-/typescript-5.7.3.tgz",
- "integrity": "sha512-84MVSjMEHP+FQRPy3pX9sTVV/INIex71s9TL2Gm5FG/WG1SqXeKyZ0k7/blY/4FdOzI12CBy1vGc4og/eus0fw==",
+ "version": "6.0.2",
+ "resolved": "https://registry.npmjs.org/typescript/-/typescript-6.0.2.tgz",
+ "integrity": "sha512-bGdAIrZ0wiGDo5l8c++HWtbaNCWTS4UTv7RaTH/ThVIgjkveJt83m74bBHMJkuCbslY8ixgLBVZJIOiQlQTjfQ==",
"dev": true,
"license": "Apache-2.0",
"bin": {
@@ -6682,15 +7085,16 @@
}
},
"node_modules/typescript-eslint": {
- "version": "8.25.0",
- "resolved": "https://registry.npmjs.org/typescript-eslint/-/typescript-eslint-8.25.0.tgz",
- "integrity": "sha512-TxRdQQLH4g7JkoFlYG3caW5v1S6kEkz8rqt80iQJZUYPq1zD1Ra7HfQBJJ88ABRaMvHAXnwRvRB4V+6sQ9xN5Q==",
+ "version": "8.58.0",
+ "resolved": "https://registry.npmjs.org/typescript-eslint/-/typescript-eslint-8.58.0.tgz",
+ "integrity": "sha512-e2TQzKfaI85fO+F3QywtX+tCTsu/D3WW5LVU6nz8hTFKFZ8yBJ6mSYRpXqdR3mFjPWmO0eWsTa5f+UpAOe/FMA==",
"dev": true,
"license": "MIT",
"dependencies": {
- "@typescript-eslint/eslint-plugin": "8.25.0",
- "@typescript-eslint/parser": "8.25.0",
- "@typescript-eslint/utils": "8.25.0"
+ "@typescript-eslint/eslint-plugin": "8.58.0",
+ "@typescript-eslint/parser": "8.58.0",
+ "@typescript-eslint/typescript-estree": "8.58.0",
+ "@typescript-eslint/utils": "8.58.0"
},
"engines": {
"node": "^18.18.0 || ^20.9.0 || >=21.1.0"
@@ -6700,8 +7104,8 @@
"url": "https://opencollective.com/typescript-eslint"
},
"peerDependencies": {
- "eslint": "^8.57.0 || ^9.0.0",
- "typescript": ">=4.8.4 <5.8.0"
+ "eslint": "^8.57.0 || ^9.0.0 || ^10.0.0",
+ "typescript": ">=4.8.4 <6.1.0"
}
},
"node_modules/uc.micro": {
@@ -7138,6 +7542,21 @@
"fd-slicer": "~1.1.0"
}
},
+ "node_modules/yauzl-promise": {
+ "version": "4.0.0",
+ "resolved": "https://registry.npmjs.org/yauzl-promise/-/yauzl-promise-4.0.0.tgz",
+ "integrity": "sha512-/HCXpyHXJQQHvFq9noqrjfa/WpQC2XYs3vI7tBiAi4QiIU1knvYhZGaO1QPjwIVMdqflxbmwgMXtYeaRiAE0CA==",
+ "dev": true,
+ "license": "MIT",
+ "dependencies": {
+ "@node-rs/crc32": "^1.7.0",
+ "is-it-type": "^5.1.2",
+ "simple-invariant": "^2.0.1"
+ },
+ "engines": {
+ "node": ">=16"
+ }
+ },
"node_modules/yazl": {
"version": "2.5.1",
"resolved": "https://registry.npmjs.org/yazl/-/yazl-2.5.1.tgz",
diff --git a/editors/code/package.json b/editors/code/package.json
index 1dd513c9de..29cbc8bd4f 100644
--- a/editors/code/package.json
+++ b/editors/code/package.json
@@ -31,6 +31,9 @@
"vscode": "^1.93.0"
},
"enabledApiProposals": [],
+ "extensionKind": [
+ "workspace"
+ ],
"scripts": {
"vscode:prepublish": "npm run build-base -- --minify",
"package": "vsce package -o rust-analyzer.vsix",
@@ -54,8 +57,8 @@
"vscode-languageclient": "^9.0.1"
},
"devDependencies": {
- "@eslint/js": "^9.21.0",
- "@stylistic/eslint-plugin": "^4.1.0",
+ "@eslint/js": "^10.0.1",
+ "@stylistic/eslint-plugin": "^5.10.0",
"@stylistic/eslint-plugin-js": "^4.1.0",
"@tsconfig/strictest": "^2.0.5",
"@types/lodash": "^4.17.20",
@@ -64,16 +67,16 @@
"@typescript-eslint/eslint-plugin": "^8.25.0",
"@typescript-eslint/parser": "^8.25.0",
"@vscode/test-electron": "^2.4.1",
- "@vscode/vsce": "^3.6.0",
+ "@vscode/vsce": "^3.7.1",
"esbuild": "^0.25.0",
- "eslint": "^9.21.0",
- "eslint-config-prettier": "^10.0.2",
+ "eslint": "^10.2.0",
+ "eslint-config-prettier": "^10.1.8",
"eslint-define-config": "^2.1.0",
- "ovsx": "0.10.1",
- "prettier": "^3.5.2",
+ "ovsx": "0.10.10",
+ "prettier": "^3.8.1",
"tslib": "^2.8.1",
- "typescript": "^5.7.3",
- "typescript-eslint": "^8.25.0"
+ "typescript": "^6.0.2",
+ "typescript-eslint": "^8.58.0"
},
"activationEvents": [
"workspaceContains:Cargo.toml",
@@ -993,6 +996,19 @@
{
"title": "Cargo",
"properties": {
+ "rust-analyzer.cargo.metadataExtraArgs": {
+ "markdownDescription": "Extra arguments passed only to `cargo metadata`, not to other cargo invocations.\nUseful for flags like `--config` that `cargo metadata` supports.",
+ "default": [],
+ "type": "array",
+ "items": {
+ "type": "string"
+ }
+ }
+ }
+ },
+ {
+ "title": "Cargo",
+ "properties": {
"rust-analyzer.cargo.noDefaultFeatures": {
"markdownDescription": "Whether to pass `--no-default-features` to cargo.",
"default": false,
@@ -1592,6 +1608,16 @@
}
},
{
+ "title": "rust-analyzer",
+ "properties": {
+ "rust-analyzer.disableFixtureSupport": {
+ "markdownDescription": "Disable support for `#[rust_analyzer::rust_fixture]` snippets.\n\nIf you are not working on rust-analyzer itself, you should ignore this config.",
+ "default": false,
+ "type": "boolean"
+ }
+ }
+ },
+ {
"title": "Document",
"properties": {
"rust-analyzer.document.symbol.search.excludeLocals": {
@@ -2502,6 +2528,24 @@
}
},
{
+ "title": "Inlay Hints",
+ "properties": {
+ "rust-analyzer.inlayHints.typeHints.location": {
+ "markdownDescription": "Where to render type hints relative to their binding pattern.",
+ "default": "inline",
+ "type": "string",
+ "enum": [
+ "inline",
+ "end_of_line"
+ ],
+ "enumDescriptions": [
+ "Render type hints directly after the binding identifier.",
+ "Render type hints after the end of the containing `let` statement when possible."
+ ]
+ }
+ }
+ },
+ {
"title": "Interpret",
"properties": {
"rust-analyzer.interpret.tests": {
@@ -2865,7 +2909,7 @@
"title": "Runnables",
"properties": {
"rust-analyzer.runnables.bench.overrideCommand": {
- "markdownDescription": "Override the command used for bench runnables.\nThe first element of the array should be the program to execute (for example, `cargo`).\n\nUse the placeholders `${package}`, `${target_arg}`, `${target}`, `${executable_args}` to dynamically\nreplace the package name, target option (such as `--bin` or `--example`), the target name and\nthe arguments passed to test binary args (includes `rust-analyzer.runnables.extraTestBinaryArgs`).",
+ "markdownDescription": "Override the command used for bench runnables.\nThe first element of the array should be the program to execute (for example, `cargo`).\n\nUse the placeholders:\n- `${package}`: package name.\n- `${target_arg}`: target option such as `--bin`, `--test`, `--lib`, etc.\n- `${target}`: target name (empty for `--lib`).\n- `${test_name}`: the test path filter, e.g. `module::bench_func`.\n- `${exact}`: `--exact` for single benchmarks, empty for modules.\n- `${include_ignored}`: always empty for benchmarks.\n- `${executable_args}`: all of the above binary args bundled together\n (includes `rust-analyzer.runnables.extraTestBinaryArgs`).",
"default": null,
"type": [
"null",
@@ -2894,7 +2938,7 @@
"title": "Runnables",
"properties": {
"rust-analyzer.runnables.doctest.overrideCommand": {
- "markdownDescription": "Override the command used for bench runnables.\nThe first element of the array should be the program to execute (for example, `cargo`).\n\nUse the placeholders `${package}`, `${target_arg}`, `${target}`, `${executable_args}` to dynamically\nreplace the package name, target option (such as `--bin` or `--example`), the target name and\nthe arguments passed to test binary args (includes `rust-analyzer.runnables.extraTestBinaryArgs`).",
+ "markdownDescription": "Override the command used for doc-test runnables.\nThe first element of the array should be the program to execute (for example, `cargo`).\n\nUse the placeholders:\n- `${package}`: package name.\n- `${target_arg}`: target option such as `--bin`, `--test`, `--lib`, etc.\n- `${target}`: target name (empty for `--lib`).\n- `${test_name}`: the test path filter, e.g. `module::func`.\n- `${exact}`: always empty for doc-tests.\n- `${include_ignored}`: always empty for doc-tests.\n- `${executable_args}`: all of the above binary args bundled together\n (includes `rust-analyzer.runnables.extraTestBinaryArgs`).",
"default": null,
"type": [
"null",
@@ -2948,7 +2992,7 @@
"title": "Runnables",
"properties": {
"rust-analyzer.runnables.test.overrideCommand": {
- "markdownDescription": "Override the command used for test runnables.\nThe first element of the array should be the program to execute (for example, `cargo`).\n\nUse the placeholders `${package}`, `${target_arg}`, `${target}`, `${executable_args}` to dynamically\nreplace the package name, target option (such as `--bin` or `--example`), the target name and\nthe arguments passed to test binary args (includes `rust-analyzer.runnables.extraTestBinaryArgs`).",
+ "markdownDescription": "Override the command used for test runnables.\nThe first element of the array should be the program to execute (for example, `cargo`).\n\nAvailable placeholders:\n- `${package}`: package name.\n- `${target_arg}`: target option such as `--bin`, `--test`, `--lib`, etc.\n- `${target}`: target name (empty for `--lib`).\n- `${test_name}`: the test path filter, e.g. `module::test_func`.\n- `${exact}`: `--exact` for single tests, empty for modules.\n- `${include_ignored}`: `--include-ignored` for single tests, empty otherwise.\n- `${executable_args}`: all of the above binary args bundled together\n (includes `rust-analyzer.runnables.extraTestBinaryArgs`).",
"default": null,
"type": [
"null",
diff --git a/editors/code/src/client.ts b/editors/code/src/client.ts
index 5b358e3211..e265cff391 100644
--- a/editors/code/src/client.ts
+++ b/editors/code/src/client.ts
@@ -1,4 +1,4 @@
-import * as anser from "anser";
+import anser from "anser";
import * as lc from "vscode-languageclient/node";
import * as vscode from "vscode";
import * as ra from "../src/lsp_ext";
diff --git a/editors/code/src/commands.ts b/editors/code/src/commands.ts
index c1b6f31030..302f51dee4 100644
--- a/editors/code/src/commands.ts
+++ b/editors/code/src/commands.ts
@@ -1194,9 +1194,8 @@ export function runSingle(ctx: CtxInit): Cmd {
}
export function copyRunCommandLine(ctx: CtxInit) {
- let prevRunnable: RunnableQuickPick | undefined;
return async () => {
- const item = await selectRunnable(ctx, prevRunnable);
+ const item = await selectRunnable(ctx, undefined);
if (!item || !isCargoRunnableArgs(item.runnable.args)) return;
const args = createCargoArgs(item.runnable.args);
const commandLine = ["cargo", ...args].join(" ");
diff --git a/editors/code/src/config.ts b/editors/code/src/config.ts
index 5dc2c419ef..d65f011c75 100644
--- a/editors/code/src/config.ts
+++ b/editors/code/src/config.ts
@@ -485,7 +485,7 @@ export function substituteVariablesInEnv(env: Env): Env {
Object.entries(env).map(([key, value]) => {
const deps = new Set<string>();
if (value) {
- let match = undefined;
+ let match;
while ((match = depRe.exec(value))) {
const depName = unwrapUndefinable(match.groups?.["depName"]);
deps.add(depName);
diff --git a/editors/code/src/debug.ts b/editors/code/src/debug.ts
index 24f8d90873..9bc3adad2f 100644
--- a/editors/code/src/debug.ts
+++ b/editors/code/src/debug.ts
@@ -48,7 +48,7 @@ export async function makeDebugConfig(ctx: Ctx, runnable: ra.Runnable): Promise<
}
export async function startDebugSession(ctx: Ctx, runnable: ra.Runnable): Promise<boolean> {
- let debugConfig: vscode.DebugConfiguration | undefined = undefined;
+ let debugConfig: vscode.DebugConfiguration | undefined;
let message = "";
const wsLaunchSection = vscode.workspace.getConfiguration("launch");
diff --git a/editors/code/src/dependencies_provider.ts b/editors/code/src/dependencies_provider.ts
index 203ef5cc85..3c04f2ef64 100644
--- a/editors/code/src/dependencies_provider.ts
+++ b/editors/code/src/dependencies_provider.ts
@@ -6,9 +6,9 @@ import * as ra from "./lsp_ext";
import type { FetchDependencyListResult } from "./lsp_ext";
import { unwrapUndefinable } from "./util";
-export class RustDependenciesProvider
- implements vscode.TreeDataProvider<Dependency | DependencyFile>
-{
+export class RustDependenciesProvider implements vscode.TreeDataProvider<
+ Dependency | DependencyFile
+> {
dependenciesMap: { [id: string]: Dependency | DependencyFile };
ctx: CtxInit;
diff --git a/editors/code/src/diagnostics.ts b/editors/code/src/diagnostics.ts
index cd0e43b212..32a41745ed 100644
--- a/editors/code/src/diagnostics.ts
+++ b/editors/code/src/diagnostics.ts
@@ -1,4 +1,4 @@
-import * as anser from "anser";
+import anser from "anser";
import * as vscode from "vscode";
import {
type ProviderResult,
diff --git a/editors/code/src/lsp_ext.ts b/editors/code/src/lsp_ext.ts
index 9712bd4b7b..cf190ea3ce 100644
--- a/editors/code/src/lsp_ext.ts
+++ b/editors/code/src/lsp_ext.ts
@@ -300,14 +300,14 @@ export type SsrParams = {
};
export type RecursiveMemoryLayoutNode = {
- item_name: string;
+ itemName: string;
typename: string;
size: number;
alignment: number;
offset: number;
- parent_idx: number;
- children_start: number;
- children_len: number;
+ parentIdx: number;
+ childrenStart: number;
+ childrenLen: number;
};
export type RecursiveMemoryLayout = {
nodes: RecursiveMemoryLayoutNode[];
diff --git a/editors/code/src/snippets.ts b/editors/code/src/snippets.ts
index a469a9cd1f..6d75428eaa 100644
--- a/editors/code/src/snippets.ts
+++ b/editors/code/src/snippets.ts
@@ -53,7 +53,7 @@ export async function applySnippetTextEdits(editor: vscode.TextEditor, edits: vs
}
function hasSnippet(snip: string): boolean {
- const m = snip.match(/\$\d+|\{\d+:[^}]*\}/);
+ const m = snip.match(/\$\d+|\$\{\d+:[^}]*\}/);
return m != null;
}
diff --git a/editors/code/src/toolchain.ts b/editors/code/src/toolchain.ts
index 06f75a8f8d..76946d1510 100644
--- a/editors/code/src/toolchain.ts
+++ b/editors/code/src/toolchain.ts
@@ -100,7 +100,7 @@ export class Cargo {
);
} catch (err) {
log.error(`Cargo invocation has failed: ${err}`);
- throw new Error(`Cargo invocation has failed: ${err}`);
+ throw new Error(`Cargo invocation has failed: ${err}`, { cause: err });
}
return spec.filter?.(artifacts) ?? artifacts;
diff --git a/editors/code/tsconfig.json b/editors/code/tsconfig.json
index a13afab170..380acec59d 100644
--- a/editors/code/tsconfig.json
+++ b/editors/code/tsconfig.json
@@ -1,7 +1,6 @@
{
"extends": "@tsconfig/strictest/tsconfig.json",
"compilerOptions": {
- "esModuleInterop": false,
"module": "NodeNext",
"moduleResolution": "nodenext",
"target": "ES2024",
diff --git a/rust-version b/rust-version
index 68f38716db..38f153f78d 100644
--- a/rust-version
+++ b/rust-version
@@ -1 +1 @@
-1174f784096deb8e4ba93f7e4b5ccb7bb4ba2c55
+4c4205163abcbd08948b3efab796c543ba1ea687