Unnamed repository; edit this file 'description' to name the repository.
-rw-r--r--.cargo/config.toml1
-rw-r--r--Cargo.lock15
-rw-r--r--Cargo.toml6
-rw-r--r--crates/base-db/src/change.rs2
-rw-r--r--crates/base-db/src/input.rs2
-rw-r--r--crates/base-db/src/lib.rs4
-rw-r--r--crates/flycheck/src/lib.rs8
-rw-r--r--crates/hir-def/src/attr.rs61
-rw-r--r--crates/hir-def/src/attr/tests.rs48
-rw-r--r--crates/hir-def/src/body.rs8
-rw-r--r--crates/hir-def/src/body/lower.rs12
-rw-r--r--crates/hir-def/src/body/pretty.rs17
-rw-r--r--crates/hir-def/src/body/tests.rs8
-rw-r--r--crates/hir-def/src/child_by_source.rs52
-rw-r--r--crates/hir-def/src/data.rs10
-rw-r--r--crates/hir-def/src/db.rs10
-rw-r--r--crates/hir-def/src/dyn_map/keys.rs13
-rw-r--r--crates/hir-def/src/expander.rs2
-rw-r--r--crates/hir-def/src/find_path.rs313
-rw-r--r--crates/hir-def/src/generics.rs38
-rw-r--r--crates/hir-def/src/hir.rs6
-rw-r--r--crates/hir-def/src/import_map.rs8
-rw-r--r--crates/hir-def/src/item_tree.rs14
-rw-r--r--crates/hir-def/src/lang_item.rs10
-rw-r--r--crates/hir-def/src/lib.rs105
-rw-r--r--crates/hir-def/src/macro_expansion_tests/mbe.rs38
-rw-r--r--crates/hir-def/src/macro_expansion_tests/mod.rs2
-rw-r--r--crates/hir-def/src/nameres.rs6
-rw-r--r--crates/hir-def/src/nameres/attr_resolution.rs19
-rw-r--r--crates/hir-def/src/nameres/collector.rs159
-rw-r--r--crates/hir-def/src/nameres/path_resolution.rs11
-rw-r--r--crates/hir-def/src/path/lower.rs4
-rw-r--r--crates/hir-def/src/per_ns.rs4
-rw-r--r--crates/hir-def/src/pretty.rs2
-rw-r--r--crates/hir-def/src/visibility.rs7
-rw-r--r--crates/hir-expand/src/attrs.rs12
-rw-r--r--crates/hir-expand/src/builtin_attr_macro.rs2
-rw-r--r--crates/hir-expand/src/builtin_fn_macro.rs4
-rw-r--r--crates/hir-expand/src/cfg_process.rs4
-rw-r--r--crates/hir-expand/src/db.rs48
-rw-r--r--crates/hir-expand/src/eager.rs6
-rw-r--r--crates/hir-expand/src/files.rs27
-rw-r--r--crates/hir-expand/src/fixup.rs151
-rw-r--r--crates/hir-expand/src/hygiene.rs2
-rw-r--r--crates/hir-expand/src/inert_attr_macro.rs (renamed from crates/hir-def/src/attr/builtin.rs)9
-rw-r--r--crates/hir-expand/src/lib.rs127
-rw-r--r--crates/hir-expand/src/mod_path.rs12
-rw-r--r--crates/hir-expand/src/quote.rs2
-rw-r--r--crates/hir-ty/src/autoderef.rs2
-rw-r--r--crates/hir-ty/src/builder.rs9
-rw-r--r--crates/hir-ty/src/chalk_db.rs10
-rw-r--r--crates/hir-ty/src/chalk_ext.rs10
-rw-r--r--crates/hir-ty/src/consteval.rs13
-rw-r--r--crates/hir-ty/src/consteval/tests.rs6
-rw-r--r--crates/hir-ty/src/db.rs18
-rw-r--r--crates/hir-ty/src/diagnostics/decl_check.rs2
-rw-r--r--crates/hir-ty/src/diagnostics/expr.rs3
-rw-r--r--crates/hir-ty/src/diagnostics/unsafe_check.rs2
-rw-r--r--crates/hir-ty/src/display.rs97
-rw-r--r--crates/hir-ty/src/generics.rs263
-rw-r--r--crates/hir-ty/src/infer.rs28
-rw-r--r--crates/hir-ty/src/infer/closure.rs10
-rw-r--r--crates/hir-ty/src/infer/expr.rs21
-rw-r--r--crates/hir-ty/src/infer/path.rs9
-rw-r--r--crates/hir-ty/src/infer/unify.rs3
-rw-r--r--crates/hir-ty/src/inhabitedness.rs4
-rw-r--r--crates/hir-ty/src/lib.rs38
-rw-r--r--crates/hir-ty/src/lower.rs308
-rw-r--r--crates/hir-ty/src/method_resolution.rs22
-rw-r--r--crates/hir-ty/src/mir.rs25
-rw-r--r--crates/hir-ty/src/mir/borrowck.rs2
-rw-r--r--crates/hir-ty/src/mir/eval.rs4
-rw-r--r--crates/hir-ty/src/mir/eval/tests.rs2
-rw-r--r--crates/hir-ty/src/mir/lower.rs7
-rw-r--r--crates/hir-ty/src/mir/monomorphization.rs2
-rw-r--r--crates/hir-ty/src/traits.rs4
-rw-r--r--crates/hir-ty/src/utils.rs297
-rw-r--r--crates/hir/Cargo.toml1
-rw-r--r--crates/hir/src/attrs.rs2
-rw-r--r--crates/hir/src/display.rs296
-rw-r--r--crates/hir/src/has_source.rs75
-rw-r--r--crates/hir/src/lib.rs134
-rw-r--r--crates/hir/src/semantics.rs283
-rw-r--r--crates/hir/src/semantics/source_to_def.rs362
-rw-r--r--crates/hir/src/source_analyzer.rs41
-rw-r--r--crates/hir/src/term_search.rs1
-rw-r--r--crates/hir/src/term_search/expr.rs55
-rw-r--r--crates/hir/src/term_search/tactics.rs52
-rw-r--r--crates/ide-assists/src/handlers/add_missing_impl_members.rs2
-rw-r--r--crates/ide-assists/src/handlers/auto_import.rs51
-rw-r--r--crates/ide-assists/src/handlers/bool_to_enum.rs4
-rw-r--r--crates/ide-assists/src/handlers/destructure_struct_binding.rs2
-rw-r--r--crates/ide-assists/src/handlers/extract_expressions_from_format_string.rs145
-rw-r--r--crates/ide-assists/src/handlers/generate_delegate_trait.rs2
-rw-r--r--crates/ide-assists/src/handlers/generate_function.rs10
-rw-r--r--crates/ide-assists/src/handlers/generate_getter_or_setter.rs2
-rw-r--r--crates/ide-assists/src/handlers/generate_mut_trait_impl.rs2
-rw-r--r--crates/ide-assists/src/handlers/inline_call.rs4
-rw-r--r--crates/ide-assists/src/handlers/into_to_qualified_from.rs4
-rw-r--r--crates/ide-assists/src/handlers/merge_nested_if.rs2
-rw-r--r--crates/ide-assists/src/handlers/remove_parentheses.rs2
-rw-r--r--crates/ide-assists/src/handlers/term_search.rs30
-rw-r--r--crates/ide-assists/src/utils.rs47
-rw-r--r--crates/ide-assists/src/utils/suggest_name.rs2
-rw-r--r--crates/ide-completion/src/completions/env_vars.rs3
-rw-r--r--crates/ide-completion/src/completions/expr.rs4
-rw-r--r--crates/ide-completion/src/completions/field.rs12
-rw-r--r--crates/ide-completion/src/completions/flyimport.rs19
-rw-r--r--crates/ide-completion/src/completions/item_list.rs19
-rw-r--r--crates/ide-completion/src/completions/keyword.rs6
-rw-r--r--crates/ide-completion/src/completions/mod_.rs2
-rw-r--r--crates/ide-completion/src/completions/pattern.rs12
-rw-r--r--crates/ide-completion/src/completions/postfix.rs2
-rw-r--r--crates/ide-completion/src/completions/postfix/format_like.rs4
-rw-r--r--crates/ide-completion/src/completions/type.rs2
-rw-r--r--crates/ide-completion/src/completions/vis.rs2
-rw-r--r--crates/ide-completion/src/context.rs9
-rw-r--r--crates/ide-completion/src/context/analysis.rs4
-rw-r--r--crates/ide-completion/src/item.rs2
-rw-r--r--crates/ide-completion/src/lib.rs2
-rw-r--r--crates/ide-completion/src/render.rs7
-rw-r--r--crates/ide-completion/src/render/const_.rs2
-rw-r--r--crates/ide-completion/src/render/function.rs4
-rw-r--r--crates/ide-completion/src/render/literal.rs4
-rw-r--r--crates/ide-completion/src/render/macro_.rs4
-rw-r--r--crates/ide-completion/src/render/pattern.rs4
-rw-r--r--crates/ide-completion/src/render/type_alias.rs4
-rw-r--r--crates/ide-completion/src/tests/expression.rs11
-rw-r--r--crates/ide-completion/src/tests/item.rs89
-rw-r--r--crates/ide-completion/src/tests/item_list.rs5
-rw-r--r--crates/ide-completion/src/tests/pattern.rs31
-rw-r--r--crates/ide-completion/src/tests/record.rs47
-rw-r--r--crates/ide-completion/src/tests/special.rs4
-rw-r--r--crates/ide-db/src/active_parameter.rs2
-rw-r--r--crates/ide-db/src/apply_change.rs5
-rw-r--r--crates/ide-db/src/defs.rs11
-rw-r--r--crates/ide-db/src/famous_defs.rs2
-rw-r--r--crates/ide-db/src/helpers.rs4
-rw-r--r--crates/ide-db/src/imports/import_assets.rs16
-rw-r--r--crates/ide-db/src/imports/insert_use.rs2
-rw-r--r--crates/ide-db/src/items_locator.rs4
-rw-r--r--crates/ide-db/src/prime_caches.rs54
-rw-r--r--crates/ide-db/src/rename.rs20
-rw-r--r--crates/ide-db/src/search.rs4
-rw-r--r--crates/ide-db/src/source_change.rs6
-rw-r--r--crates/ide-db/src/symbol_index.rs18
-rw-r--r--crates/ide-diagnostics/src/handlers/json_is_not_rust.rs2
-rw-r--r--crates/ide-diagnostics/src/handlers/macro_error.rs2
-rw-r--r--crates/ide-diagnostics/src/handlers/missing_unsafe.rs14
-rw-r--r--crates/ide-diagnostics/src/handlers/trait_impl_redundant_assoc_item.rs6
-rw-r--r--crates/ide-diagnostics/src/handlers/unlinked_file.rs32
-rw-r--r--crates/ide-diagnostics/src/handlers/unresolved_method.rs8
-rw-r--r--crates/ide-diagnostics/src/lib.rs8
-rw-r--r--crates/ide-ssr/src/matching.rs2
-rw-r--r--crates/ide/src/call_hierarchy.rs17
-rw-r--r--crates/ide/src/expand_macro.rs30
-rw-r--r--crates/ide/src/extend_selection.rs22
-rw-r--r--crates/ide/src/goto_definition.rs24
-rw-r--r--crates/ide/src/highlight_related.rs2
-rw-r--r--crates/ide/src/hover.rs2
-rw-r--r--crates/ide/src/hover/render.rs1
-rw-r--r--crates/ide/src/inlay_hints.rs21
-rw-r--r--crates/ide/src/inlay_hints/adjustment.rs21
-rw-r--r--crates/ide/src/inlay_hints/param_name.rs4
-rw-r--r--crates/ide/src/interpret_function.rs2
-rw-r--r--crates/ide/src/lib.rs11
-rw-r--r--crates/ide/src/navigation_target.rs34
-rw-r--r--crates/ide/src/references.rs2
-rw-r--r--crates/ide/src/rename.rs48
-rw-r--r--crates/ide/src/signature_help.rs2
-rw-r--r--crates/ide/src/syntax_highlighting.rs2
-rw-r--r--crates/ide/src/syntax_highlighting/escape.rs2
-rw-r--r--crates/ide/src/syntax_highlighting/highlight.rs3
-rw-r--r--crates/ide/src/syntax_highlighting/test_data/highlight_block_mod_items.html9
-rw-r--r--crates/ide/src/syntax_highlighting/test_data/highlight_const.html2
-rw-r--r--crates/ide/src/syntax_highlighting/test_data/highlight_default_library.html2
-rw-r--r--crates/ide/src/syntax_highlighting/test_data/highlight_general.html2
-rw-r--r--crates/ide/src/syntax_highlighting/test_data/highlight_strings.html2
-rw-r--r--crates/ide/src/syntax_highlighting/tests.rs7
-rw-r--r--crates/ide/src/view_memory_layout.rs2
-rw-r--r--crates/load-cargo/src/lib.rs116
-rw-r--r--crates/mbe/src/benchmark.rs2
-rw-r--r--crates/mbe/src/expander/matcher.rs4
-rw-r--r--crates/mbe/src/expander/transcriber.rs2
-rw-r--r--crates/mbe/src/lib.rs6
-rw-r--r--crates/mbe/src/parser.rs4
-rw-r--r--crates/mbe/src/syntax_bridge.rs4
-rw-r--r--crates/parser/src/grammar.rs4
-rw-r--r--crates/parser/src/grammar/patterns.rs2
-rw-r--r--crates/parser/src/lexed_str.rs2
-rw-r--r--crates/parser/src/lib.rs2
-rw-r--r--crates/parser/src/shortcuts.rs2
-rw-r--r--crates/paths/src/lib.rs22
-rw-r--r--crates/proc-macro-api/src/lib.rs2
-rw-r--r--crates/proc-macro-api/src/process.rs3
-rw-r--r--crates/proc-macro-api/src/version.rs2
-rw-r--r--crates/project-model/src/lib.rs2
-rw-r--r--crates/project-model/src/project_json.rs180
-rw-r--r--crates/project-model/src/rustc_cfg.rs2
-rw-r--r--crates/project-model/src/sysroot.rs3
-rw-r--r--crates/project-model/src/tests.rs2
-rw-r--r--crates/project-model/src/workspace.rs20
-rw-r--r--crates/rust-analyzer/Cargo.toml1
-rw-r--r--crates/rust-analyzer/src/bin/main.rs18
-rw-r--r--crates/rust-analyzer/src/caps.rs2
-rw-r--r--crates/rust-analyzer/src/cli/analysis_stats.rs5
-rw-r--r--crates/rust-analyzer/src/cli/parse.rs2
-rw-r--r--crates/rust-analyzer/src/cli/run_tests.rs4
-rw-r--r--crates/rust-analyzer/src/cli/rustc_tests.rs4
-rw-r--r--crates/rust-analyzer/src/cli/scip.rs12
-rw-r--r--crates/rust-analyzer/src/config.rs1461
-rw-r--r--crates/rust-analyzer/src/diagnostics.rs52
-rw-r--r--crates/rust-analyzer/src/diagnostics/to_proto.rs1
-rw-r--r--crates/rust-analyzer/src/dispatch.rs12
-rw-r--r--crates/rust-analyzer/src/global_state.rs169
-rw-r--r--crates/rust-analyzer/src/handlers/notification.rs31
-rw-r--r--crates/rust-analyzer/src/handlers/request.rs237
-rw-r--r--crates/rust-analyzer/src/integrated_benchmarks.rs11
-rw-r--r--crates/rust-analyzer/src/lib.rs4
-rw-r--r--crates/rust-analyzer/src/lsp/ext.rs41
-rw-r--r--crates/rust-analyzer/src/lsp/from_proto.rs5
-rw-r--r--crates/rust-analyzer/src/lsp/to_proto.rs184
-rw-r--r--crates/rust-analyzer/src/main_loop.rs84
-rw-r--r--crates/rust-analyzer/src/reload.rs100
-rw-r--r--crates/rust-analyzer/src/target_spec.rs (renamed from crates/rust-analyzer/src/cargo_target_spec.rs)110
-rw-r--r--crates/rust-analyzer/src/tracing/config.rs1
-rw-r--r--crates/rust-analyzer/src/tracing/hprof.rs6
-rw-r--r--crates/rust-analyzer/tests/slow-tests/main.rs9
-rw-r--r--crates/rust-analyzer/tests/slow-tests/ratoml.rs947
-rw-r--r--crates/rust-analyzer/tests/slow-tests/support.rs35
-rw-r--r--crates/rust-analyzer/tests/slow-tests/tidy.rs21
-rw-r--r--crates/salsa/salsa-macros/src/query_group.rs20
-rw-r--r--crates/salsa/src/interned.rs37
-rw-r--r--crates/salsa/src/lib.rs8
-rw-r--r--crates/salsa/tests/cycles.rs2
-rw-r--r--crates/salsa/tests/incremental/constants.rs4
-rw-r--r--crates/salsa/tests/incremental/implementation.rs8
-rw-r--r--crates/salsa/tests/parallel/parallel_cycle_none_recover.rs2
-rw-r--r--crates/salsa/tests/parallel/race.rs2
-rw-r--r--crates/span/src/hygiene.rs2
-rw-r--r--crates/span/src/lib.rs13
-rw-r--r--crates/stdx/src/anymap.rs4
-rw-r--r--crates/syntax/src/algo.rs4
-rw-r--r--crates/syntax/src/ast/make.rs6
-rw-r--r--crates/syntax/src/lib.rs48
-rw-r--r--crates/syntax/src/parsing.rs17
-rw-r--r--crates/syntax/src/validation.rs2
-rw-r--r--crates/test-fixture/src/lib.rs2
-rw-r--r--crates/test-utils/src/fixture.rs8
-rw-r--r--crates/test-utils/src/lib.rs2
-rw-r--r--crates/toolchain/src/lib.rs4
-rw-r--r--crates/tt/src/lib.rs2
-rw-r--r--crates/vfs/src/lib.rs5
-rw-r--r--crates/vfs/src/vfs_path.rs3
-rw-r--r--docs/dev/README.md2
-rw-r--r--docs/dev/lsp-extensions.md15
-rw-r--r--docs/user/generated_config.adoc2
-rw-r--r--docs/user/manual.adoc4
-rw-r--r--editors/code/package-lock.json14
-rw-r--r--editors/code/package.json3630
-rw-r--r--editors/code/src/ast_inspector.ts3
-rw-r--r--editors/code/src/bootstrap.ts76
-rw-r--r--editors/code/src/client.ts91
-rw-r--r--editors/code/src/commands.ts50
-rw-r--r--editors/code/src/config.ts4
-rw-r--r--editors/code/src/ctx.ts62
-rw-r--r--editors/code/src/debug.ts43
-rw-r--r--editors/code/src/dependencies_provider.ts2
-rw-r--r--editors/code/src/diagnostics.ts2
-rw-r--r--editors/code/src/lsp_ext.ts36
-rw-r--r--editors/code/src/main.ts13
-rw-r--r--editors/code/src/nullable.ts19
-rw-r--r--editors/code/src/run.ts90
-rw-r--r--editors/code/src/snippets.ts3
-rw-r--r--editors/code/src/tasks.ts100
-rw-r--r--editors/code/src/toolchain.ts7
-rw-r--r--editors/code/src/undefinable.ts19
-rw-r--r--editors/code/src/util.ts67
-rw-r--r--editors/code/tests/unit/runnable_env.test.ts4
-rw-r--r--lib/lsp-server/src/lib.rs3
-rw-r--r--rust-version2
-rw-r--r--xtask/src/dist.rs4
-rw-r--r--xtask/src/metrics.rs2
-rw-r--r--xtask/src/publish.rs9
284 files changed, 8836 insertions, 4787 deletions
diff --git a/.cargo/config.toml b/.cargo/config.toml
index 070560dfbc..0193a9566e 100644
--- a/.cargo/config.toml
+++ b/.cargo/config.toml
@@ -4,6 +4,7 @@ tq = "test -- -q"
qt = "tq"
lint = "clippy --all-targets -- --cap-lints warn"
codegen = "run --package xtask --bin xtask -- codegen"
+dist = "run --package xtask --bin xtask -- dist"
[target.x86_64-pc-windows-msvc]
linker = "rust-lld"
diff --git a/Cargo.lock b/Cargo.lock
index 3558c39bb3..57d43dad3f 100644
--- a/Cargo.lock
+++ b/Cargo.lock
@@ -329,6 +329,15 @@ dependencies = [
]
[[package]]
+name = "dirs"
+version = "5.0.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "44c45a9d03d6676652bcb5e724c7e988de1acad23a711b5217ab9cbecbec2225"
+dependencies = [
+ "dirs-sys",
+]
+
+[[package]]
name = "dirs-sys"
version = "0.4.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
@@ -503,6 +512,7 @@ dependencies = [
"hir-def",
"hir-expand",
"hir-ty",
+ "intern",
"itertools",
"once_cell",
"rustc-hash",
@@ -891,9 +901,9 @@ checksum = "e2abad23fbc42b3700f2f279844dc832adb2b2eb069b2df918f455c4e18cc646"
[[package]]
name = "libc"
-version = "0.2.154"
+version = "0.2.155"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "ae743338b92ff9146ce83992f766a31066a91a8c84a45e0e9f21e7cf6de6d346"
+checksum = "97b3888a4aecf77e811145cadf6eef5901f4782c53886191b2f693f24761847c"
[[package]]
name = "libloading"
@@ -1665,6 +1675,7 @@ dependencies = [
"anyhow",
"cfg",
"crossbeam-channel",
+ "dirs",
"dissimilar",
"expect-test",
"flycheck",
diff --git a/Cargo.toml b/Cargo.toml
index ccc27e2133..583c7bbe33 100644
--- a/Cargo.toml
+++ b/Cargo.toml
@@ -162,7 +162,11 @@ xshell = "0.2.5"
dashmap = { version = "=5.5.3", features = ["raw-api"] }
[workspace.lints.rust]
-rust_2018_idioms = "warn"
+bare_trait_objects = "warn"
+elided_lifetimes_in_paths = "warn"
+ellipsis_inclusive_range_patterns = "warn"
+explicit_outlives_requirements = "warn"
+unused_extern_crates = "warn"
unused_lifetimes = "warn"
unreachable_pub = "warn"
semicolon_in_expressions_from_macros = "warn"
diff --git a/crates/base-db/src/change.rs b/crates/base-db/src/change.rs
index 927b2108a6..0fd54e1211 100644
--- a/crates/base-db/src/change.rs
+++ b/crates/base-db/src/change.rs
@@ -51,7 +51,7 @@ impl FileChange {
}
pub fn apply(self, db: &mut dyn SourceDatabaseExt) {
- let _p = tracing::span!(tracing::Level::INFO, "FileChange::apply").entered();
+ let _p = tracing::info_span!("FileChange::apply").entered();
if let Some(roots) = self.roots {
for (idx, root) in roots.into_iter().enumerate() {
let root_id = SourceRootId(idx as u32);
diff --git a/crates/base-db/src/input.rs b/crates/base-db/src/input.rs
index b2c3f38ab4..1d172ab9e4 100644
--- a/crates/base-db/src/input.rs
+++ b/crates/base-db/src/input.rs
@@ -412,7 +412,7 @@ impl CrateGraph {
from: CrateId,
dep: Dependency,
) -> Result<(), CyclicDependenciesError> {
- let _p = tracing::span!(tracing::Level::INFO, "add_dep").entered();
+ let _p = tracing::info_span!("add_dep").entered();
self.check_cycle_after_dependency(from, dep.crate_id)?;
diff --git a/crates/base-db/src/lib.rs b/crates/base-db/src/lib.rs
index 2c13eed56c..f5165ea8a7 100644
--- a/crates/base-db/src/lib.rs
+++ b/crates/base-db/src/lib.rs
@@ -85,7 +85,7 @@ fn toolchain_channel(db: &dyn SourceDatabase, krate: CrateId) -> Option<ReleaseC
}
fn parse(db: &dyn SourceDatabase, file_id: FileId) -> Parse<ast::SourceFile> {
- let _p = tracing::span!(tracing::Level::INFO, "parse", ?file_id).entered();
+ let _p = tracing::info_span!("parse", ?file_id).entered();
let text = db.file_text(file_id);
// FIXME: Edition based parsing
SourceFile::parse(&text, span::Edition::CURRENT)
@@ -187,7 +187,7 @@ impl<T: SourceDatabaseExt> FileLoader for FileLoaderDelegate<&'_ T> {
}
fn relevant_crates(&self, file_id: FileId) -> Arc<[CrateId]> {
- let _p = tracing::span!(tracing::Level::INFO, "relevant_crates").entered();
+ let _p = tracing::info_span!("relevant_crates").entered();
let source_root = self.0.file_source_root(file_id);
self.0.source_root_crates(source_root)
}
diff --git a/crates/flycheck/src/lib.rs b/crates/flycheck/src/lib.rs
index afdc3e389b..4584400e66 100644
--- a/crates/flycheck/src/lib.rs
+++ b/crates/flycheck/src/lib.rs
@@ -304,7 +304,7 @@ impl FlycheckActor {
Some(c) => c,
None => continue,
};
- let formatted_command = format!("{:?}", command);
+ let formatted_command = format!("{command:?}");
tracing::debug!(?command, "will restart flycheck");
let (sender, receiver) = unbounded();
@@ -318,8 +318,7 @@ impl FlycheckActor {
}
Err(error) => {
self.report_progress(Progress::DidFailToRestart(format!(
- "Failed to run the following command: {} error={}",
- formatted_command, error
+ "Failed to run the following command: {formatted_command} error={error}"
)));
self.status = FlycheckStatus::Finished;
}
@@ -331,7 +330,7 @@ impl FlycheckActor {
// Watcher finished
let command_handle = self.command_handle.take().unwrap();
self.command_receiver.take();
- let formatted_handle = format!("{:?}", command_handle);
+ let formatted_handle = format!("{command_handle:?}");
let res = command_handle.join();
if let Err(error) = &res {
@@ -387,6 +386,7 @@ impl FlycheckActor {
"did cancel flycheck"
);
command_handle.cancel();
+ self.command_receiver.take();
self.report_progress(Progress::DidCancel);
self.status = FlycheckStatus::Finished;
}
diff --git a/crates/hir-def/src/attr.rs b/crates/hir-def/src/attr.rs
index d9eeffd798..184dab8367 100644
--- a/crates/hir-def/src/attr.rs
+++ b/crates/hir-def/src/attr.rs
@@ -1,10 +1,5 @@
//! A higher level attributes based on TokenTree, with also some shortcuts.
-pub mod builtin;
-
-#[cfg(test)]
-mod tests;
-
use std::{borrow::Cow, hash::Hash, ops, slice::Iter as SliceIter};
use base_db::CrateId;
@@ -75,7 +70,7 @@ impl Attrs {
db: &dyn DefDatabase,
v: VariantId,
) -> Arc<ArenaMap<LocalFieldId, Attrs>> {
- let _p = tracing::span!(tracing::Level::INFO, "fields_attrs_query").entered();
+ let _p = tracing::info_span!("fields_attrs_query").entered();
// FIXME: There should be some proper form of mapping between item tree field ids and hir field ids
let mut res = ArenaMap::default();
@@ -326,7 +321,7 @@ impl AttrsWithOwner {
}
pub(crate) fn attrs_query(db: &dyn DefDatabase, def: AttrDefId) -> Attrs {
- let _p = tracing::span!(tracing::Level::INFO, "attrs_query").entered();
+ let _p = tracing::info_span!("attrs_query").entered();
// FIXME: this should use `Trace` to avoid duplication in `source_map` below
let raw_attrs = match def {
AttrDefId::ModuleId(module) => {
@@ -646,3 +641,55 @@ pub(crate) fn fields_attrs_source_map(
Arc::new(res)
}
+
+#[cfg(test)]
+mod tests {
+ //! This module contains tests for doc-expression parsing.
+ //! Currently, it tests `#[doc(hidden)]` and `#[doc(alias)]`.
+
+ use triomphe::Arc;
+
+ use base_db::FileId;
+ use hir_expand::span_map::{RealSpanMap, SpanMap};
+ use mbe::{syntax_node_to_token_tree, DocCommentDesugarMode};
+ use syntax::{ast, AstNode, TextRange};
+
+ use crate::attr::{DocAtom, DocExpr};
+
+ fn assert_parse_result(input: &str, expected: DocExpr) {
+ let source_file = ast::SourceFile::parse(input, span::Edition::CURRENT).ok().unwrap();
+ let tt = source_file.syntax().descendants().find_map(ast::TokenTree::cast).unwrap();
+ let map = SpanMap::RealSpanMap(Arc::new(RealSpanMap::absolute(FileId::from_raw(0))));
+ let tt = syntax_node_to_token_tree(
+ tt.syntax(),
+ map.as_ref(),
+ map.span_for_range(TextRange::empty(0.into())),
+ DocCommentDesugarMode::ProcMacro,
+ );
+ let cfg = DocExpr::parse(&tt);
+ assert_eq!(cfg, expected);
+ }
+
+ #[test]
+ fn test_doc_expr_parser() {
+ assert_parse_result("#![doc(hidden)]", DocAtom::Flag("hidden".into()).into());
+
+ assert_parse_result(
+ r#"#![doc(alias = "foo")]"#,
+ DocAtom::KeyValue { key: "alias".into(), value: "foo".into() }.into(),
+ );
+
+ assert_parse_result(r#"#![doc(alias("foo"))]"#, DocExpr::Alias(["foo".into()].into()));
+ assert_parse_result(
+ r#"#![doc(alias("foo", "bar", "baz"))]"#,
+ DocExpr::Alias(["foo".into(), "bar".into(), "baz".into()].into()),
+ );
+
+ assert_parse_result(
+ r#"
+ #[doc(alias("Bar", "Qux"))]
+ struct Foo;"#,
+ DocExpr::Alias(["Bar".into(), "Qux".into()].into()),
+ );
+ }
+}
diff --git a/crates/hir-def/src/attr/tests.rs b/crates/hir-def/src/attr/tests.rs
deleted file mode 100644
index 727f442980..0000000000
--- a/crates/hir-def/src/attr/tests.rs
+++ /dev/null
@@ -1,48 +0,0 @@
-//! This module contains tests for doc-expression parsing.
-//! Currently, it tests `#[doc(hidden)]` and `#[doc(alias)]`.
-
-use triomphe::Arc;
-
-use base_db::FileId;
-use hir_expand::span_map::{RealSpanMap, SpanMap};
-use mbe::{syntax_node_to_token_tree, DocCommentDesugarMode};
-use syntax::{ast, AstNode, TextRange};
-
-use crate::attr::{DocAtom, DocExpr};
-
-fn assert_parse_result(input: &str, expected: DocExpr) {
- let source_file = ast::SourceFile::parse(input, span::Edition::CURRENT).ok().unwrap();
- let tt = source_file.syntax().descendants().find_map(ast::TokenTree::cast).unwrap();
- let map = SpanMap::RealSpanMap(Arc::new(RealSpanMap::absolute(FileId::from_raw(0))));
- let tt = syntax_node_to_token_tree(
- tt.syntax(),
- map.as_ref(),
- map.span_for_range(TextRange::empty(0.into())),
- DocCommentDesugarMode::ProcMacro,
- );
- let cfg = DocExpr::parse(&tt);
- assert_eq!(cfg, expected);
-}
-
-#[test]
-fn test_doc_expr_parser() {
- assert_parse_result("#![doc(hidden)]", DocAtom::Flag("hidden".into()).into());
-
- assert_parse_result(
- r#"#![doc(alias = "foo")]"#,
- DocAtom::KeyValue { key: "alias".into(), value: "foo".into() }.into(),
- );
-
- assert_parse_result(r#"#![doc(alias("foo"))]"#, DocExpr::Alias(["foo".into()].into()));
- assert_parse_result(
- r#"#![doc(alias("foo", "bar", "baz"))]"#,
- DocExpr::Alias(["foo".into(), "bar".into(), "baz".into()].into()),
- );
-
- assert_parse_result(
- r#"
- #[doc(alias("Bar", "Qux"))]
- struct Foo;"#,
- DocExpr::Alias(["Bar".into(), "Qux".into()].into()),
- );
-}
diff --git a/crates/hir-def/src/body.rs b/crates/hir-def/src/body.rs
index d2f4d7b7e5..ca4a3f5217 100644
--- a/crates/hir-def/src/body.rs
+++ b/crates/hir-def/src/body.rs
@@ -124,7 +124,7 @@ impl Body {
db: &dyn DefDatabase,
def: DefWithBodyId,
) -> (Arc<Body>, Arc<BodySourceMap>) {
- let _p = tracing::span!(tracing::Level::INFO, "body_with_source_map_query").entered();
+ let _p = tracing::info_span!("body_with_source_map_query").entered();
let mut params = None;
let mut is_async_fn = false;
@@ -395,6 +395,12 @@ impl BodySourceMap {
self.expr_map.get(&src).copied()
}
+ pub fn expansions(
+ &self,
+ ) -> impl Iterator<Item = (&InFile<AstPtr<ast::MacroCall>>, &MacroFileId)> {
+ self.expansions.iter()
+ }
+
pub fn implicit_format_args(
&self,
node: InFile<&ast::FormatArgsExpr>,
diff --git a/crates/hir-def/src/body/lower.rs b/crates/hir-def/src/body/lower.rs
index c6d9ba6cfe..faba9050fc 100644
--- a/crates/hir-def/src/body/lower.rs
+++ b/crates/hir-def/src/body/lower.rs
@@ -12,6 +12,7 @@ use intern::Interned;
use rustc_hash::FxHashMap;
use smallvec::SmallVec;
use span::AstIdMap;
+use stdx::never;
use syntax::{
ast::{
self, ArrayExprKind, AstChildren, BlockExpr, HasArgList, HasAttrs, HasLoopBody, HasName,
@@ -480,7 +481,8 @@ impl ExprCollector<'_> {
} else if e.const_token().is_some() {
Mutability::Shared
} else {
- unreachable!("parser only remaps to raw_token() if matching mutability token follows")
+ never!("parser only remaps to raw_token() if matching mutability token follows");
+ Mutability::Shared
}
} else {
Mutability::from_mutable(e.mut_token().is_some())
@@ -963,7 +965,7 @@ impl ExprCollector<'_> {
.resolve_path(
self.db,
module,
- &path,
+ path,
crate::item_scope::BuiltinShadowMode::Other,
Some(MacroSubNs::Bang),
)
@@ -1006,9 +1008,9 @@ impl ExprCollector<'_> {
Some((mark, expansion)) => {
// Keep collecting even with expansion errors so we can provide completions and
// other services in incomplete macro expressions.
- self.source_map
- .expansions
- .insert(macro_call_ptr, self.expander.current_file_id().macro_file().unwrap());
+ if let Some(macro_file) = self.expander.current_file_id().macro_file() {
+ self.source_map.expansions.insert(macro_call_ptr, macro_file);
+ }
let prev_ast_id_map = mem::replace(
&mut self.ast_id_map,
self.db.ast_id_map(self.expander.current_file_id()),
diff --git a/crates/hir-def/src/body/pretty.rs b/crates/hir-def/src/body/pretty.rs
index cbb5ca887f..c48d16d053 100644
--- a/crates/hir-def/src/body/pretty.rs
+++ b/crates/hir-def/src/body/pretty.rs
@@ -48,21 +48,30 @@ pub(super) fn print_body_hir(db: &dyn DefDatabase, body: &Body, owner: DefWithBo
let mut p = Printer { db, body, buf: header, indent_level: 0, needs_indent: false };
if let DefWithBodyId::FunctionId(it) = owner {
p.buf.push('(');
- let params = &db.function_data(it).params;
- let mut params = params.iter();
+ let function_data = &db.function_data(it);
+ let (mut params, ret_type) = (function_data.params.iter(), &function_data.ret_type);
if let Some(self_param) = body.self_param {
p.print_binding(self_param);
- p.buf.push(':');
+ p.buf.push_str(": ");
if let Some(ty) = params.next() {
p.print_type_ref(ty);
+ p.buf.push_str(", ");
}
}
body.params.iter().zip(params).for_each(|(&param, ty)| {
p.print_pat(param);
- p.buf.push(':');
+ p.buf.push_str(": ");
p.print_type_ref(ty);
+ p.buf.push_str(", ");
});
+ // remove the last ", " in param list
+ if body.params.len() > 0 {
+ p.buf.truncate(p.buf.len() - 2);
+ }
p.buf.push(')');
+ // return type
+ p.buf.push_str(" -> ");
+ p.print_type_ref(ret_type);
p.buf.push(' ');
}
p.print_expr(body.body_expr);
diff --git a/crates/hir-def/src/body/tests.rs b/crates/hir-def/src/body/tests.rs
index e8b26d5373..0011d3a20c 100644
--- a/crates/hir-def/src/body/tests.rs
+++ b/crates/hir-def/src/body/tests.rs
@@ -156,7 +156,7 @@ fn main() {
);
expect![[r#"
- fn main() {
+ fn main() -> () {
let are = "are";
let count = 10;
builtin#lang(Arguments::new_v1_formatted)(
@@ -258,7 +258,7 @@ impl SsrError {
assert_eq!(db.body_with_source_map(def).1.diagnostics(), &[]);
expect![[r#"
- fn main() {
+ fn main() -> () {
_ = $crate::error::SsrError::new(
builtin#lang(Arguments::new_v1_formatted)(
&[
@@ -303,7 +303,7 @@ macro_rules! m {
};
}
-fn f() {
+fn f(a: i32, b: u32) -> String {
m!();
}
"#,
@@ -317,7 +317,7 @@ fn f() {
}
expect![[r#"
- fn f() {
+ fn f(a: i32, b: u32) -> String {
{
$crate::panicking::panic_fmt(
builtin#lang(Arguments::new_v1_formatted)(
diff --git a/crates/hir-def/src/child_by_source.rs b/crates/hir-def/src/child_by_source.rs
index 0b41984bdd..106109eb18 100644
--- a/crates/hir-def/src/child_by_source.rs
+++ b/crates/hir-def/src/child_by_source.rs
@@ -6,7 +6,7 @@
use either::Either;
use hir_expand::{attrs::collect_attrs, HirFileId};
-use syntax::ast;
+use syntax::{ast, AstPtr};
use crate::{
db::DefDatabase,
@@ -38,7 +38,7 @@ impl ChildBySource for TraitId {
data.attribute_calls().filter(|(ast_id, _)| ast_id.file_id == file_id).for_each(
|(ast_id, call_id)| {
- res[keys::ATTR_MACRO_CALL].insert(ast_id.to_node(db.upcast()), call_id);
+ res[keys::ATTR_MACRO_CALL].insert(ast_id.to_ptr(db.upcast()), call_id);
},
);
data.items.iter().for_each(|&(_, item)| {
@@ -50,9 +50,10 @@ impl ChildBySource for TraitId {
impl ChildBySource for ImplId {
fn child_by_source_to(&self, db: &dyn DefDatabase, res: &mut DynMap, file_id: HirFileId) {
let data = db.impl_data(*self);
+ // FIXME: Macro calls
data.attribute_calls().filter(|(ast_id, _)| ast_id.file_id == file_id).for_each(
|(ast_id, call_id)| {
- res[keys::ATTR_MACRO_CALL].insert(ast_id.to_node(db.upcast()), call_id);
+ res[keys::ATTR_MACRO_CALL].insert(ast_id.to_ptr(db.upcast()), call_id);
},
);
data.items.iter().for_each(|&item| {
@@ -80,7 +81,7 @@ impl ChildBySource for ItemScope {
.for_each(|konst| insert_item_loc(db, res, file_id, konst, keys::CONST));
self.attr_macro_invocs().filter(|(id, _)| id.file_id == file_id).for_each(
|(ast_id, call_id)| {
- res[keys::ATTR_MACRO_CALL].insert(ast_id.to_node(db.upcast()), call_id);
+ res[keys::ATTR_MACRO_CALL].insert(ast_id.to_ptr(db.upcast()), call_id);
},
);
self.legacy_macros().for_each(|(_, ids)| {
@@ -88,7 +89,7 @@ impl ChildBySource for ItemScope {
if let MacroId::MacroRulesId(id) = id {
let loc = id.lookup(db);
if loc.id.file_id() == file_id {
- res[keys::MACRO_RULES].insert(loc.source(db).value, id);
+ res[keys::MACRO_RULES].insert(loc.ast_ptr(db).value, id);
}
}
})
@@ -100,12 +101,18 @@ impl ChildBySource for ItemScope {
if let Some((_, Either::Left(attr))) =
collect_attrs(&adt).nth(attr_id.ast_index())
{
- res[keys::DERIVE_MACRO_CALL].insert(attr, (attr_id, call_id, calls.into()));
+ res[keys::DERIVE_MACRO_CALL]
+ .insert(AstPtr::new(&attr), (attr_id, call_id, calls.into()));
}
});
},
);
-
+ self.iter_macro_invoc().filter(|(id, _)| id.file_id == file_id).for_each(
+ |(ast_id, &call)| {
+ let ast = ast_id.to_ptr(db.upcast());
+ res[keys::MACRO_CALL].insert(ast, call);
+ },
+ );
fn add_module_def(
db: &dyn DefDatabase,
map: &mut DynMap,
@@ -155,8 +162,8 @@ impl ChildBySource for VariantId {
for (local_id, source) in arena_map.value.iter() {
let id = FieldId { parent, local_id };
match source.clone() {
- Either::Left(source) => res[keys::TUPLE_FIELD].insert(source, id),
- Either::Right(source) => res[keys::RECORD_FIELD].insert(source, id),
+ Either::Left(source) => res[keys::TUPLE_FIELD].insert(AstPtr::new(&source), id),
+ Either::Right(source) => res[keys::RECORD_FIELD].insert(AstPtr::new(&source), id),
}
}
}
@@ -171,29 +178,30 @@ impl ChildBySource for EnumId {
let tree = loc.id.item_tree(db);
let ast_id_map = db.ast_id_map(loc.id.file_id());
- let root = db.parse_or_expand(loc.id.file_id());
db.enum_data(*self).variants.iter().for_each(|&(variant, _)| {
- res[keys::ENUM_VARIANT].insert(
- ast_id_map.get(tree[variant.lookup(db).id.value].ast_id).to_node(&root),
- variant,
- );
+ res[keys::ENUM_VARIANT]
+ .insert(ast_id_map.get(tree[variant.lookup(db).id.value].ast_id), variant);
});
}
}
impl ChildBySource for DefWithBodyId {
fn child_by_source_to(&self, db: &dyn DefDatabase, res: &mut DynMap, file_id: HirFileId) {
- let body = db.body(*self);
+ let (body, sm) = db.body_with_source_map(*self);
if let &DefWithBodyId::VariantId(v) = self {
VariantId::EnumVariantId(v).child_by_source_to(db, res, file_id)
}
+ sm.expansions().filter(|(ast, _)| ast.file_id == file_id).for_each(|(ast, &exp_id)| {
+ res[keys::MACRO_CALL].insert(ast.value, exp_id.macro_call_id);
+ });
+
for (block, def_map) in body.blocks(db) {
// All block expressions are merged into the same map, because they logically all add
// inner items to the containing `DefWithBodyId`.
def_map[DefMap::ROOT].scope.child_by_source_to(db, res, file_id);
- res[keys::BLOCK].insert(block.lookup(db).ast_id.to_node(db.upcast()), block);
+ res[keys::BLOCK].insert(block.lookup(db).ast_id.to_ptr(db.upcast()), block);
}
}
}
@@ -220,13 +228,17 @@ impl ChildBySource for GenericDefId {
{
let id = TypeOrConstParamId { parent: *self, local_id };
match ast_param {
- ast::TypeOrConstParam::Type(a) => res[keys::TYPE_PARAM].insert(a, id),
- ast::TypeOrConstParam::Const(a) => res[keys::CONST_PARAM].insert(a, id),
+ ast::TypeOrConstParam::Type(a) => {
+ res[keys::TYPE_PARAM].insert(AstPtr::new(&a), id)
+ }
+ ast::TypeOrConstParam::Const(a) => {
+ res[keys::CONST_PARAM].insert(AstPtr::new(&a), id)
+ }
}
}
for (local_id, ast_param) in lts_idx_iter.zip(generic_params_list.lifetime_params()) {
let id = LifetimeParamId { parent: *self, local_id };
- res[keys::LIFETIME_PARAM].insert(ast_param, id);
+ res[keys::LIFETIME_PARAM].insert(AstPtr::new(&ast_param), id);
}
}
}
@@ -246,7 +258,7 @@ fn insert_item_loc<ID, N, Data>(
{
let loc = id.lookup(db);
if loc.item_tree_id().file_id() == file_id {
- res[key].insert(loc.source(db).value, id)
+ res[key].insert(loc.ast_ptr(db).value, id)
}
}
diff --git a/crates/hir-def/src/data.rs b/crates/hir-def/src/data.rs
index 51a4dd6f42..4338163672 100644
--- a/crates/hir-def/src/data.rs
+++ b/crates/hir-def/src/data.rs
@@ -340,7 +340,7 @@ impl ImplData {
db: &dyn DefDatabase,
id: ImplId,
) -> (Arc<ImplData>, DefDiagnostics) {
- let _p = tracing::span!(tracing::Level::INFO, "impl_data_with_diagnostics_query").entered();
+ let _p = tracing::info_span!("impl_data_with_diagnostics_query").entered();
let ItemLoc { container: module_id, id: tree_id } = id.lookup(db);
let item_tree = tree_id.item_tree(db);
@@ -628,7 +628,7 @@ impl<'a> AssocItemCollector<'a> {
'attrs: for attr in &*attrs {
let ast_id =
AstId::new(self.expander.current_file_id(), item.ast_id(item_tree).upcast());
- let ast_id_with_path = AstIdWithPath { path: (*attr.path).clone(), ast_id };
+ let ast_id_with_path = AstIdWithPath { path: attr.path.clone(), ast_id };
match self.def_map.resolve_attr_macro(
self.db,
@@ -642,7 +642,7 @@ impl<'a> AssocItemCollector<'a> {
continue 'attrs;
}
let loc = self.db.lookup_intern_macro_call(call_id);
- if let MacroDefKind::ProcMacro(exp, ..) = loc.def.kind {
+ if let MacroDefKind::ProcMacro(_, exp, _) = loc.def.kind {
// If there's no expander for the proc macro (e.g. the
// proc macro is ignored, or building the proc macro
// crate failed), skip expansion like we would if it was
@@ -719,12 +719,12 @@ impl<'a> AssocItemCollector<'a> {
let MacroCall { ast_id, expand_to, ctxt, ref path } = item_tree[call];
let module = self.expander.module.local_id;
- let resolver = |path| {
+ let resolver = |path: &_| {
self.def_map
.resolve_path(
self.db,
module,
- &path,
+ path,
crate::item_scope::BuiltinShadowMode::Other,
Some(MacroSubNs::Bang),
)
diff --git a/crates/hir-def/src/db.rs b/crates/hir-def/src/db.rs
index 55ecabdc38..61fed71218 100644
--- a/crates/hir-def/src/db.rs
+++ b/crates/hir-def/src/db.rs
@@ -294,10 +294,10 @@ fn macro_def(db: &dyn DefDatabase, id: MacroId) -> MacroDefId {
let in_file = InFile::new(file_id, m);
match expander {
MacroExpander::Declarative => MacroDefKind::Declarative(in_file),
- MacroExpander::BuiltIn(it) => MacroDefKind::BuiltIn(it, in_file),
- MacroExpander::BuiltInAttr(it) => MacroDefKind::BuiltInAttr(it, in_file),
- MacroExpander::BuiltInDerive(it) => MacroDefKind::BuiltInDerive(it, in_file),
- MacroExpander::BuiltInEager(it) => MacroDefKind::BuiltInEager(it, in_file),
+ MacroExpander::BuiltIn(it) => MacroDefKind::BuiltIn(in_file, it),
+ MacroExpander::BuiltInAttr(it) => MacroDefKind::BuiltInAttr(in_file, it),
+ MacroExpander::BuiltInDerive(it) => MacroDefKind::BuiltInDerive(in_file, it),
+ MacroExpander::BuiltInEager(it) => MacroDefKind::BuiltInEager(in_file, it),
}
};
@@ -338,9 +338,9 @@ fn macro_def(db: &dyn DefDatabase, id: MacroId) -> MacroDefId {
MacroDefId {
krate: loc.container.krate,
kind: MacroDefKind::ProcMacro(
+ InFile::new(loc.id.file_id(), makro.ast_id),
loc.expander,
loc.kind,
- InFile::new(loc.id.file_id(), makro.ast_id),
),
local_inner: false,
allow_internal_unsafe: false,
diff --git a/crates/hir-def/src/dyn_map/keys.rs b/crates/hir-def/src/dyn_map/keys.rs
index f83ab1e1a0..9d330a7bf1 100644
--- a/crates/hir-def/src/dyn_map/keys.rs
+++ b/crates/hir-def/src/dyn_map/keys.rs
@@ -13,7 +13,7 @@ use crate::{
TraitId, TypeAliasId, TypeOrConstParamId, UnionId, UseId,
};
-pub type Key<K, V> = crate::dyn_map::Key<K, V, AstPtrPolicy<K, V>>;
+pub type Key<K, V> = crate::dyn_map::Key<AstPtr<K>, V, AstPtrPolicy<K, V>>;
pub const BLOCK: Key<ast::BlockExpr, BlockId> = Key::new();
pub const FUNCTION: Key<ast::Fn, FunctionId> = Key::new();
@@ -39,6 +39,7 @@ pub const LIFETIME_PARAM: Key<ast::LifetimeParam, LifetimeParamId> = Key::new();
pub const MACRO_RULES: Key<ast::MacroRules, MacroRulesId> = Key::new();
pub const MACRO2: Key<ast::MacroDef, Macro2Id> = Key::new();
pub const PROC_MACRO: Key<ast::Fn, ProcMacroId> = Key::new();
+pub const MACRO_CALL: Key<ast::MacroCall, MacroCallId> = Key::new();
pub const ATTR_MACRO_CALL: Key<ast::Item, MacroCallId> = Key::new();
pub const DERIVE_MACRO_CALL: Key<ast::Attr, (AttrId, MacroCallId, Box<[Option<MacroCallId>]>)> =
Key::new();
@@ -54,18 +55,16 @@ pub struct AstPtrPolicy<AST, ID> {
}
impl<AST: AstNode + 'static, ID: 'static> Policy for AstPtrPolicy<AST, ID> {
- type K = AST;
+ type K = AstPtr<AST>;
type V = ID;
- fn insert(map: &mut DynMap, key: AST, value: ID) {
- let key = AstPtr::new(&key);
+ fn insert(map: &mut DynMap, key: AstPtr<AST>, value: ID) {
map.map
.entry::<FxHashMap<AstPtr<AST>, ID>>()
.or_insert_with(Default::default)
.insert(key, value);
}
- fn get<'a>(map: &'a DynMap, key: &AST) -> Option<&'a ID> {
- let key = AstPtr::new(key);
- map.map.get::<FxHashMap<AstPtr<AST>, ID>>()?.get(&key)
+ fn get<'a>(map: &'a DynMap, key: &AstPtr<AST>) -> Option<&'a ID> {
+ map.map.get::<FxHashMap<AstPtr<AST>, ID>>()?.get(key)
}
fn is_empty(map: &DynMap) -> bool {
map.map.get::<FxHashMap<AstPtr<AST>, ID>>().map_or(true, |it| it.is_empty())
diff --git a/crates/hir-def/src/expander.rs b/crates/hir-def/src/expander.rs
index 73ce942c58..dbf8e6b225 100644
--- a/crates/hir-def/src/expander.rs
+++ b/crates/hir-def/src/expander.rs
@@ -56,7 +56,7 @@ impl Expander {
&mut self,
db: &dyn DefDatabase,
macro_call: ast::MacroCall,
- resolver: impl Fn(ModPath) -> Option<MacroId>,
+ resolver: impl Fn(&ModPath) -> Option<MacroId>,
) -> Result<ExpandResult<Option<(Mark, Parse<T>)>>, UnresolvedMacro> {
// FIXME: within_limit should support this, instead of us having to extract the error
let mut unresolved_macro_err = None;
diff --git a/crates/hir-def/src/find_path.rs b/crates/hir-def/src/find_path.rs
index d9495d36c0..58a1872ef2 100644
--- a/crates/hir-def/src/find_path.rs
+++ b/crates/hir-def/src/find_path.rs
@@ -1,6 +1,6 @@
//! An algorithm to find a path to refer to a certain item.
-use std::{cmp::Ordering, iter};
+use std::{cell::Cell, cmp::Ordering, iter};
use hir_expand::{
name::{known, AsName, Name},
@@ -23,15 +23,40 @@ pub fn find_path(
db: &dyn DefDatabase,
item: ItemInNs,
from: ModuleId,
- prefix_kind: PrefixKind,
+ mut prefix_kind: PrefixKind,
ignore_local_imports: bool,
- cfg: ImportPathConfig,
+ mut cfg: ImportPathConfig,
) -> Option<ModPath> {
- let _p = tracing::span!(tracing::Level::INFO, "find_path").entered();
- find_path_inner(FindPathCtx { db, prefix: prefix_kind, cfg, ignore_local_imports }, item, from)
+ let _p = tracing::info_span!("find_path").entered();
+
+ // - if the item is a builtin, it's in scope
+ if let ItemInNs::Types(ModuleDefId::BuiltinType(builtin)) = item {
+ return Some(ModPath::from_segments(PathKind::Plain, iter::once(builtin.as_name())));
+ }
+
+ // within block modules, forcing a `self` or `crate` prefix will not allow using inner items, so
+ // default to plain paths.
+ if item.module(db).is_some_and(ModuleId::is_within_block) {
+ prefix_kind = PrefixKind::Plain;
+ }
+ cfg.prefer_no_std = cfg.prefer_no_std || db.crate_supports_no_std(from.krate());
+
+ find_path_inner(
+ &FindPathCtx {
+ db,
+ prefix: prefix_kind,
+ cfg,
+ ignore_local_imports,
+ from,
+ from_def_map: &from.def_map(db),
+ fuel: Cell::new(FIND_PATH_FUEL),
+ },
+ item,
+ MAX_PATH_LEN,
+ )
}
-#[derive(Copy, Clone, Debug)]
+#[derive(Copy, Clone, Debug, PartialEq, Eq)]
enum Stability {
Unstable,
Stable,
@@ -46,6 +71,7 @@ fn zip_stability(a: Stability, b: Stability) -> Stability {
}
const MAX_PATH_LEN: usize = 15;
+const FIND_PATH_FUEL: usize = 10000;
#[derive(Copy, Clone, Debug, PartialEq, Eq)]
pub enum PrefixKind {
@@ -63,79 +89,54 @@ impl PrefixKind {
#[inline]
fn path_kind(self) -> PathKind {
match self {
- PrefixKind::BySelf => PathKind::Super(0),
+ PrefixKind::BySelf => PathKind::SELF,
PrefixKind::Plain => PathKind::Plain,
PrefixKind::ByCrate => PathKind::Crate,
}
}
}
-#[derive(Copy, Clone)]
struct FindPathCtx<'db> {
db: &'db dyn DefDatabase,
prefix: PrefixKind,
cfg: ImportPathConfig,
ignore_local_imports: bool,
+ from: ModuleId,
+ from_def_map: &'db DefMap,
+ fuel: Cell<usize>,
}
/// Attempts to find a path to refer to the given `item` visible from the `from` ModuleId
-fn find_path_inner(ctx: FindPathCtx<'_>, item: ItemInNs, from: ModuleId) -> Option<ModPath> {
- // - if the item is a builtin, it's in scope
- if let ItemInNs::Types(ModuleDefId::BuiltinType(builtin)) = item {
- return Some(ModPath::from_segments(PathKind::Plain, iter::once(builtin.as_name())));
- }
-
- let def_map = from.def_map(ctx.db);
- let crate_root = from.derive_crate_root();
+fn find_path_inner(ctx: &FindPathCtx<'_>, item: ItemInNs, max_len: usize) -> Option<ModPath> {
// - if the item is a module, jump straight to module search
if let ItemInNs::Types(ModuleDefId::ModuleId(module_id)) = item {
let mut visited_modules = FxHashSet::default();
- return find_path_for_module(
- FindPathCtx {
- cfg: ImportPathConfig {
- prefer_no_std: ctx.cfg.prefer_no_std
- || ctx.db.crate_supports_no_std(crate_root.krate),
- ..ctx.cfg
- },
- ..ctx
- },
- &def_map,
- &mut visited_modules,
- from,
- module_id,
- MAX_PATH_LEN,
- )
- .map(|(item, _)| item);
+ return find_path_for_module(ctx, &mut visited_modules, module_id, max_len)
+ .map(|(item, _)| item);
}
- let prefix = if item.module(ctx.db).is_some_and(|it| it.is_within_block()) {
- PrefixKind::Plain
- } else {
- ctx.prefix
- };
- let may_be_in_scope = match prefix {
+ let may_be_in_scope = match ctx.prefix {
PrefixKind::Plain | PrefixKind::BySelf => true,
- PrefixKind::ByCrate => from.is_crate_root(),
+ PrefixKind::ByCrate => ctx.from.is_crate_root(),
};
if may_be_in_scope {
// - if the item is already in scope, return the name under which it is
- let scope_name = find_in_scope(ctx.db, &def_map, from, item, ctx.ignore_local_imports);
+ let scope_name =
+ find_in_scope(ctx.db, ctx.from_def_map, ctx.from, item, ctx.ignore_local_imports);
if let Some(scope_name) = scope_name {
- return Some(ModPath::from_segments(prefix.path_kind(), iter::once(scope_name)));
+ return Some(ModPath::from_segments(ctx.prefix.path_kind(), iter::once(scope_name)));
}
}
// - if the item is in the prelude, return the name from there
- if let value @ Some(_) =
- find_in_prelude(ctx.db, &crate_root.def_map(ctx.db), &def_map, item, from)
- {
- return value;
+ if let Some(value) = find_in_prelude(ctx.db, ctx.from_def_map, item, ctx.from) {
+ return Some(value);
}
if let Some(ModuleDefId::EnumVariantId(variant)) = item.as_module_def_id() {
// - if the item is an enum variant, refer to it via the enum
if let Some(mut path) =
- find_path_inner(ctx, ItemInNs::Types(variant.lookup(ctx.db).parent.into()), from)
+ find_path_inner(ctx, ItemInNs::Types(variant.lookup(ctx.db).parent.into()), max_len)
{
path.push_segment(ctx.db.enum_variant_data(variant).name.clone());
return Some(path);
@@ -147,53 +148,32 @@ fn find_path_inner(ctx: FindPathCtx<'_>, item: ItemInNs, from: ModuleId) -> Opti
let mut visited_modules = FxHashSet::default();
- calculate_best_path(
- FindPathCtx {
- cfg: ImportPathConfig {
- prefer_no_std: ctx.cfg.prefer_no_std
- || ctx.db.crate_supports_no_std(crate_root.krate),
- ..ctx.cfg
- },
- ..ctx
- },
- &def_map,
- &mut visited_modules,
- MAX_PATH_LEN,
- item,
- from,
- )
- .map(|(item, _)| item)
+ calculate_best_path(ctx, &mut visited_modules, item, max_len).map(|(item, _)| item)
}
#[tracing::instrument(skip_all)]
fn find_path_for_module(
- ctx: FindPathCtx<'_>,
- def_map: &DefMap,
+ ctx: &FindPathCtx<'_>,
visited_modules: &mut FxHashSet<ModuleId>,
- from: ModuleId,
module_id: ModuleId,
max_len: usize,
) -> Option<(ModPath, Stability)> {
- if max_len == 0 {
- return None;
- }
-
- let is_crate_root = module_id.as_crate_root();
- // - if the item is the crate root, return `crate`
- if is_crate_root.is_some_and(|it| it == from.derive_crate_root()) {
- return Some((ModPath::from_segments(PathKind::Crate, None), Stable));
- }
+ if let Some(crate_root) = module_id.as_crate_root() {
+ if crate_root == ctx.from.derive_crate_root() {
+ // - if the item is the crate root, return `crate`
+ return Some((ModPath::from_segments(PathKind::Crate, None), Stable));
+ }
+ // - otherwise if the item is the crate root of a dependency crate, return the name from the extern prelude
- let root_def_map = from.derive_crate_root().def_map(ctx.db);
- // - if the item is the crate root of a dependency crate, return the name from the extern prelude
- if let Some(crate_root) = is_crate_root {
+ let root_def_map = ctx.from.derive_crate_root().def_map(ctx.db);
// rev here so we prefer looking at renamed extern decls first
for (name, (def_id, _extern_crate)) in root_def_map.extern_prelude().rev() {
if crate_root != def_id {
continue;
}
- let name_already_occupied_in_type_ns = def_map
- .with_ancestor_maps(ctx.db, from.local_id, &mut |def_map, local_id| {
+ let name_already_occupied_in_type_ns = ctx
+ .from_def_map
+ .with_ancestor_maps(ctx.db, ctx.from.local_id, &mut |def_map, local_id| {
def_map[local_id]
.scope
.type_(name)
@@ -209,30 +189,30 @@ fn find_path_for_module(
return Some((ModPath::from_segments(kind, iter::once(name.clone())), Stable));
}
}
- let prefix = if module_id.is_within_block() { PrefixKind::Plain } else { ctx.prefix };
- let may_be_in_scope = match prefix {
+
+ let may_be_in_scope = match ctx.prefix {
PrefixKind::Plain | PrefixKind::BySelf => true,
- PrefixKind::ByCrate => from.is_crate_root(),
+ PrefixKind::ByCrate => ctx.from.is_crate_root(),
};
if may_be_in_scope {
let scope_name = find_in_scope(
ctx.db,
- def_map,
- from,
+ ctx.from_def_map,
+ ctx.from,
ItemInNs::Types(module_id.into()),
ctx.ignore_local_imports,
);
if let Some(scope_name) = scope_name {
// - if the item is already in scope, return the name under which it is
return Some((
- ModPath::from_segments(prefix.path_kind(), iter::once(scope_name)),
+ ModPath::from_segments(ctx.prefix.path_kind(), iter::once(scope_name)),
Stable,
));
}
}
// - if the module can be referenced as self, super or crate, do that
- if let Some(mod_path) = is_kw_kind_relative_to_from(def_map, module_id, from) {
+ if let Some(mod_path) = is_kw_kind_relative_to_from(ctx.from_def_map, module_id, ctx.from) {
if ctx.prefix != PrefixKind::ByCrate || mod_path.kind == PathKind::Crate {
return Some((mod_path, Stable));
}
@@ -240,21 +220,13 @@ fn find_path_for_module(
// - if the module is in the prelude, return it by that path
if let Some(mod_path) =
- find_in_prelude(ctx.db, &root_def_map, def_map, ItemInNs::Types(module_id.into()), from)
+ find_in_prelude(ctx.db, ctx.from_def_map, ItemInNs::Types(module_id.into()), ctx.from)
{
return Some((mod_path, Stable));
}
- calculate_best_path(
- ctx,
- def_map,
- visited_modules,
- max_len,
- ItemInNs::Types(module_id.into()),
- from,
- )
+ calculate_best_path(ctx, visited_modules, ItemInNs::Types(module_id.into()), max_len)
}
-// FIXME: Do we still need this now that we record import origins, and hence aliases?
fn find_in_scope(
db: &dyn DefDatabase,
def_map: &DefMap,
@@ -274,13 +246,11 @@ fn find_in_scope(
/// name doesn't clash in current scope.
fn find_in_prelude(
db: &dyn DefDatabase,
- root_def_map: &DefMap,
local_def_map: &DefMap,
item: ItemInNs,
from: ModuleId,
) -> Option<ModPath> {
- let (prelude_module, _) = root_def_map.prelude()?;
- // Preludes in block DefMaps are ignored, only the crate DefMap is searched
+ let (prelude_module, _) = local_def_map.prelude()?;
let prelude_def_map = prelude_module.def_map(db);
let prelude_scope = &prelude_def_map[prelude_module.local_id].scope;
let (name, vis, _declared) = prelude_scope.name_of(item)?;
@@ -319,7 +289,7 @@ fn is_kw_kind_relative_to_from(
let from = from.local_id;
if item == from {
// - if the item is the module we're in, use `self`
- Some(ModPath::from_segments(PathKind::Super(0), None))
+ Some(ModPath::from_segments(PathKind::SELF, None))
} else if let Some(parent_id) = def_map[from].parent {
if item == parent_id {
// - if the item is the parent module, use `super` (this is not used recursively, since `super::super` is ugly)
@@ -337,60 +307,71 @@ fn is_kw_kind_relative_to_from(
#[tracing::instrument(skip_all)]
fn calculate_best_path(
- ctx: FindPathCtx<'_>,
- def_map: &DefMap,
+ ctx: &FindPathCtx<'_>,
visited_modules: &mut FxHashSet<ModuleId>,
- max_len: usize,
item: ItemInNs,
- from: ModuleId,
+ max_len: usize,
) -> Option<(ModPath, Stability)> {
if max_len <= 1 {
+ // recursive base case, we can't find a path prefix of length 0, one segment is occupied by
+ // the item's name itself.
+ return None;
+ }
+ let fuel = ctx.fuel.get();
+ if fuel == 0 {
+ // we ran out of fuel, so we stop searching here
+ tracing::warn!(
+ "ran out of fuel while searching for a path for item {item:?} of krate {:?} from krate {:?}",
+ item.krate(ctx.db),
+ ctx.from.krate()
+ );
return None;
}
+ ctx.fuel.set(fuel - 1);
+
let mut best_path = None;
- let update_best_path =
- |best_path: &mut Option<_>, new_path: (ModPath, Stability)| match best_path {
+ let mut best_path_len = max_len;
+ let mut process = |mut path: (ModPath, Stability), name, best_path_len: &mut _| {
+ path.0.push_segment(name);
+ let new_path = match best_path.take() {
+ Some(best_path) => select_best_path(best_path, path, ctx.cfg),
+ None => path,
+ };
+ if new_path.1 == Stable {
+ *best_path_len = new_path.0.len();
+ }
+ match &mut best_path {
Some((old_path, old_stability)) => {
*old_path = new_path.0;
*old_stability = zip_stability(*old_stability, new_path.1);
}
- None => *best_path = Some(new_path),
- };
- // Recursive case:
- // - otherwise, look for modules containing (reexporting) it and import it from one of those
- if item.krate(ctx.db) == Some(from.krate) {
- let mut best_path_len = max_len;
+ None => best_path = Some(new_path),
+ }
+ };
+ let db = ctx.db;
+ if item.krate(db) == Some(ctx.from.krate) {
// Item was defined in the same crate that wants to import it. It cannot be found in any
// dependency in this case.
- for (module_id, name) in find_local_import_locations(ctx.db, item, from) {
+ // FIXME: cache the `find_local_import_locations` output?
+ find_local_import_locations(db, item, ctx.from, ctx.from_def_map, |name, module_id| {
if !visited_modules.insert(module_id) {
- continue;
+ return;
}
- if let Some(mut path) = find_path_for_module(
- ctx,
- def_map,
- visited_modules,
- from,
- module_id,
- best_path_len - 1,
- ) {
- path.0.push_segment(name);
-
- let new_path = match best_path.take() {
- Some(best_path) => select_best_path(best_path, path, ctx.cfg),
- None => path,
- };
- best_path_len = new_path.0.len();
- update_best_path(&mut best_path, new_path);
+ // we are looking for paths of length up to best_path_len, any longer will make it be
+ // less optimal. The -1 is due to us pushing name onto it afterwards.
+ if let Some(path) =
+ find_path_for_module(ctx, visited_modules, module_id, best_path_len - 1)
+ {
+ process(path, name.clone(), &mut best_path_len);
}
- }
+ })
} else {
// Item was defined in some upstream crate. This means that it must be exported from one,
// too (unless we can't name it at all). It could *also* be (re)exported by the same crate
// that wants to import it here, but we always prefer to use the external path here.
- for dep in &ctx.db.crate_graph()[from.krate].dependencies {
- let import_map = ctx.db.import_map(dep.crate_id);
+ for dep in &db.crate_graph()[ctx.from.krate].dependencies {
+ let import_map = db.import_map(dep.crate_id);
let Some(import_info_for) = import_map.import_info_for(item) else { continue };
for info in import_info_for {
if info.is_doc_hidden {
@@ -400,29 +381,18 @@ fn calculate_best_path(
// Determine best path for containing module and append last segment from `info`.
// FIXME: we should guide this to look up the path locally, or from the same crate again?
- let Some((mut path, path_stability)) = find_path_for_module(
- ctx,
- def_map,
- visited_modules,
- from,
- info.container,
- max_len - 1,
- ) else {
+ let path =
+ find_path_for_module(ctx, visited_modules, info.container, best_path_len - 1);
+ let Some((path, path_stability)) = path else {
continue;
};
cov_mark::hit!(partially_imported);
- path.push_segment(info.name.clone());
-
- let path_with_stab = (
+ let path = (
path,
zip_stability(path_stability, if info.is_unstable { Unstable } else { Stable }),
);
- let new_path_with_stab = match best_path.take() {
- Some(best_path) => select_best_path(best_path, path_with_stab, ctx.cfg),
- None => path_with_stab,
- };
- update_best_path(&mut best_path, new_path_with_stab);
+ process(path, info.name.clone(), &mut best_path_len);
}
}
}
@@ -430,7 +400,7 @@ fn calculate_best_path(
}
/// Select the best (most relevant) path between two paths.
-/// This accounts for stability, path length whether std should be chosen over alloc/core paths as
+/// This accounts for stability, path length whether, std should be chosen over alloc/core paths as
/// well as ignoring prelude like paths or not.
fn select_best_path(
old_path @ (_, old_stability): (ModPath, Stability),
@@ -496,36 +466,33 @@ fn select_best_path(
}
}
-// FIXME: Remove allocations
/// Finds locations in `from.krate` from which `item` can be imported by `from`.
fn find_local_import_locations(
db: &dyn DefDatabase,
item: ItemInNs,
from: ModuleId,
-) -> Vec<(ModuleId, Name)> {
- let _p = tracing::span!(tracing::Level::INFO, "find_local_import_locations").entered();
+ def_map: &DefMap,
+ mut cb: impl FnMut(&Name, ModuleId),
+) {
+ let _p = tracing::info_span!("find_local_import_locations").entered();
// `from` can import anything below `from` with visibility of at least `from`, and anything
// above `from` with any visibility. That means we do not need to descend into private siblings
// of `from` (and similar).
- let def_map = from.def_map(db);
-
// Compute the initial worklist. We start with all direct child modules of `from` as well as all
// of its (recursive) parent modules.
- let data = &def_map[from.local_id];
- let mut worklist =
- data.children.values().map(|child| def_map.module_id(*child)).collect::<Vec<_>>();
- // FIXME: do we need to traverse out of block expressions here?
- for ancestor in iter::successors(from.containing_module(db), |m| m.containing_module(db)) {
- worklist.push(ancestor);
- }
+ let mut worklist = def_map[from.local_id]
+ .children
+ .values()
+ .map(|child| def_map.module_id(*child))
+ // FIXME: do we need to traverse out of block expressions here?
+ .chain(iter::successors(from.containing_module(db), |m| m.containing_module(db)))
+ .collect::<Vec<_>>();
+ let mut seen: FxHashSet<_> = FxHashSet::default();
let def_map = def_map.crate_root().def_map(db);
- let mut seen: FxHashSet<_> = FxHashSet::default();
-
- let mut locations = Vec::new();
while let Some(module) = worklist.pop() {
if !seen.insert(module) {
continue; // already processed this module
@@ -566,7 +533,7 @@ fn find_local_import_locations(
// the item and we're a submodule of it, so can we.
// Also this keeps the cached data smaller.
if declared || is_pub_or_explicit {
- locations.push((module, name.clone()));
+ cb(name, module);
}
}
}
@@ -578,8 +545,6 @@ fn find_local_import_locations(
}
}
}
-
- locations
}
#[cfg(test)]
@@ -633,15 +598,13 @@ mod tests {
.into_iter()
.cartesian_product([false, true])
{
- let found_path = find_path_inner(
- FindPathCtx {
- db: &db,
- prefix,
- cfg: ImportPathConfig { prefer_no_std: false, prefer_prelude },
- ignore_local_imports,
- },
+ let found_path = find_path(
+ &db,
resolved,
module,
+ prefix,
+ ignore_local_imports,
+ ImportPathConfig { prefer_no_std: false, prefer_prelude },
);
format_to!(
res,
diff --git a/crates/hir-def/src/generics.rs b/crates/hir-def/src/generics.rs
index 10a1d65bb9..b9f8082391 100644
--- a/crates/hir-def/src/generics.rs
+++ b/crates/hir-def/src/generics.rs
@@ -11,7 +11,7 @@ use hir_expand::{
ExpandResult,
};
use intern::Interned;
-use la_arena::Arena;
+use la_arena::{Arena, RawIdx};
use once_cell::unsync::Lazy;
use stdx::impl_from;
use syntax::ast::{self, HasGenericParams, HasName, HasTypeBounds};
@@ -28,6 +28,9 @@ use crate::{
LocalLifetimeParamId, LocalTypeOrConstParamId, Lookup, TypeOrConstParamId, TypeParamId,
};
+const SELF_PARAM_ID_IN_SELF: la_arena::Idx<TypeOrConstParamData> =
+ LocalTypeOrConstParamId::from_raw(RawIdx::from_u32(0));
+
/// Data about a generic type parameter (to a function, struct, impl, ...).
#[derive(Clone, PartialEq, Eq, Debug, Hash)]
pub struct TypeParamData {
@@ -403,12 +406,12 @@ impl GenericParamsCollector {
let (def_map, expander) = &mut **exp;
let module = expander.module.local_id;
- let resolver = |path| {
+ let resolver = |path: &_| {
def_map
.resolve_path(
db,
module,
- &path,
+ path,
crate::item_scope::BuiltinShadowMode::Other,
Some(MacroSubNs::Bang),
)
@@ -441,15 +444,18 @@ impl GenericParamsCollector {
impl GenericParams {
/// Number of Generic parameters (type_or_consts + lifetimes)
+ #[inline]
pub fn len(&self) -> usize {
self.type_or_consts.len() + self.lifetimes.len()
}
+ #[inline]
pub fn is_empty(&self) -> bool {
self.len() == 0
}
/// Iterator of type_or_consts field
+ #[inline]
pub fn iter_type_or_consts(
&self,
) -> impl DoubleEndedIterator<Item = (LocalTypeOrConstParamId, &TypeOrConstParamData)> {
@@ -457,6 +463,7 @@ impl GenericParams {
}
/// Iterator of lifetimes field
+ #[inline]
pub fn iter_lt(
&self,
) -> impl DoubleEndedIterator<Item = (LocalLifetimeParamId, &LifetimeParamData)> {
@@ -467,7 +474,7 @@ impl GenericParams {
db: &dyn DefDatabase,
def: GenericDefId,
) -> Interned<GenericParams> {
- let _p = tracing::span!(tracing::Level::INFO, "generic_params_query").entered();
+ let _p = tracing::info_span!("generic_params_query").entered();
let krate = def.module(db).krate;
let cfg_options = db.crate_graph();
@@ -605,17 +612,18 @@ impl GenericParams {
})
}
- pub fn find_trait_self_param(&self) -> Option<LocalTypeOrConstParamId> {
- self.type_or_consts.iter().find_map(|(id, p)| {
- matches!(
- p,
- TypeOrConstParamData::TypeParamData(TypeParamData {
- provenance: TypeParamProvenance::TraitSelf,
- ..
- })
- )
- .then(|| id)
- })
+ pub fn trait_self_param(&self) -> Option<LocalTypeOrConstParamId> {
+ if self.type_or_consts.is_empty() {
+ return None;
+ }
+ matches!(
+ self.type_or_consts[SELF_PARAM_ID_IN_SELF],
+ TypeOrConstParamData::TypeParamData(TypeParamData {
+ provenance: TypeParamProvenance::TraitSelf,
+ ..
+ })
+ )
+ .then(|| SELF_PARAM_ID_IN_SELF)
}
pub fn find_lifetime_by_name(
diff --git a/crates/hir-def/src/hir.rs b/crates/hir-def/src/hir.rs
index 2f7ebbfec1..fd6f4a3d08 100644
--- a/crates/hir-def/src/hir.rs
+++ b/crates/hir-def/src/hir.rs
@@ -503,11 +503,11 @@ impl BindingAnnotation {
#[derive(Debug, Clone, Eq, PartialEq)]
pub enum BindingProblems {
- /// https://doc.rust-lang.org/stable/error_codes/E0416.html
+ /// <https://doc.rust-lang.org/stable/error_codes/E0416.html>
BoundMoreThanOnce,
- /// https://doc.rust-lang.org/stable/error_codes/E0409.html
+ /// <https://doc.rust-lang.org/stable/error_codes/E0409.html>
BoundInconsistently,
- /// https://doc.rust-lang.org/stable/error_codes/E0408.html
+ /// <https://doc.rust-lang.org/stable/error_codes/E0408.html>
NotBoundAcrossAll,
}
diff --git a/crates/hir-def/src/import_map.rs b/crates/hir-def/src/import_map.rs
index 6e40293dbf..2b2db21a9f 100644
--- a/crates/hir-def/src/import_map.rs
+++ b/crates/hir-def/src/import_map.rs
@@ -73,7 +73,7 @@ impl ImportMap {
}
pub(crate) fn import_map_query(db: &dyn DefDatabase, krate: CrateId) -> Arc<Self> {
- let _p = tracing::span!(tracing::Level::INFO, "import_map_query").entered();
+ let _p = tracing::info_span!("import_map_query").entered();
let map = Self::collect_import_map(db, krate);
@@ -124,7 +124,7 @@ impl ImportMap {
}
fn collect_import_map(db: &dyn DefDatabase, krate: CrateId) -> ImportMapIndex {
- let _p = tracing::span!(tracing::Level::INFO, "collect_import_map").entered();
+ let _p = tracing::info_span!("collect_import_map").entered();
let def_map = db.crate_def_map(krate);
let mut map = FxIndexMap::default();
@@ -214,7 +214,7 @@ impl ImportMap {
is_type_in_ns: bool,
trait_import_info: &ImportInfo,
) {
- let _p = tracing::span!(tracing::Level::INFO, "collect_trait_assoc_items").entered();
+ let _p = tracing::info_span!("collect_trait_assoc_items").entered();
for &(ref assoc_item_name, item) in &db.trait_data(tr).items {
let module_def_id = match item {
AssocItemId::FunctionId(f) => ModuleDefId::from(f),
@@ -396,7 +396,7 @@ pub fn search_dependencies(
krate: CrateId,
query: &Query,
) -> FxHashSet<ItemInNs> {
- let _p = tracing::span!(tracing::Level::INFO, "search_dependencies", ?query).entered();
+ let _p = tracing::info_span!("search_dependencies", ?query).entered();
let graph = db.crate_graph();
diff --git a/crates/hir-def/src/item_tree.rs b/crates/hir-def/src/item_tree.rs
index acda64c41f..c3b7a78301 100644
--- a/crates/hir-def/src/item_tree.rs
+++ b/crates/hir-def/src/item_tree.rs
@@ -99,7 +99,7 @@ pub struct ItemTree {
impl ItemTree {
pub(crate) fn file_item_tree_query(db: &dyn DefDatabase, file_id: HirFileId) -> Arc<ItemTree> {
- let _p = tracing::span!(tracing::Level::INFO, "file_item_tree_query", ?file_id).entered();
+ let _p = tracing::info_span!("file_item_tree_query", ?file_id).entered();
let syntax = db.parse_or_expand(file_id);
@@ -242,11 +242,11 @@ impl ItemVisibilities {
match &vis {
RawVisibility::Public => RawVisibilityId::PUB,
RawVisibility::Module(path, explicitiy) if path.segments().is_empty() => {
- match (&path.kind, explicitiy) {
- (PathKind::Super(0), VisibilityExplicitness::Explicit) => {
+ match (path.kind, explicitiy) {
+ (PathKind::SELF, VisibilityExplicitness::Explicit) => {
RawVisibilityId::PRIV_EXPLICIT
}
- (PathKind::Super(0), VisibilityExplicitness::Implicit) => {
+ (PathKind::SELF, VisibilityExplicitness::Implicit) => {
RawVisibilityId::PRIV_IMPLICIT
}
(PathKind::Crate, _) => RawVisibilityId::PUB_CRATE,
@@ -586,11 +586,11 @@ impl Index<RawVisibilityId> for ItemTree {
fn index(&self, index: RawVisibilityId) -> &Self::Output {
static VIS_PUB: RawVisibility = RawVisibility::Public;
static VIS_PRIV_IMPLICIT: RawVisibility = RawVisibility::Module(
- ModPath::from_kind(PathKind::Super(0)),
+ ModPath::from_kind(PathKind::SELF),
VisibilityExplicitness::Implicit,
);
static VIS_PRIV_EXPLICIT: RawVisibility = RawVisibility::Module(
- ModPath::from_kind(PathKind::Super(0)),
+ ModPath::from_kind(PathKind::SELF),
VisibilityExplicitness::Explicit,
);
static VIS_PUB_CRATE: RawVisibility = RawVisibility::Module(
@@ -928,7 +928,7 @@ impl UseTree {
_ => None,
}
}
- (Some(prefix), PathKind::Super(0)) if path.segments().is_empty() => {
+ (Some(prefix), PathKind::SELF) if path.segments().is_empty() => {
// `some::path::self` == `some::path`
Some((prefix, ImportKind::TypeOnly))
}
diff --git a/crates/hir-def/src/lang_item.rs b/crates/hir-def/src/lang_item.rs
index 3a07c67842..6d7836d5ae 100644
--- a/crates/hir-def/src/lang_item.rs
+++ b/crates/hir-def/src/lang_item.rs
@@ -91,7 +91,7 @@ impl LangItems {
db: &dyn DefDatabase,
krate: CrateId,
) -> Option<Arc<LangItems>> {
- let _p = tracing::span!(tracing::Level::INFO, "crate_lang_items_query").entered();
+ let _p = tracing::info_span!("crate_lang_items_query").entered();
let mut lang_items = LangItems::default();
@@ -163,7 +163,7 @@ impl LangItems {
start_crate: CrateId,
item: LangItem,
) -> Option<LangItemTarget> {
- let _p = tracing::span!(tracing::Level::INFO, "lang_item_query").entered();
+ let _p = tracing::info_span!("lang_item_query").entered();
if let Some(target) =
db.crate_lang_items(start_crate).and_then(|it| it.items.get(&item).copied())
{
@@ -183,7 +183,7 @@ impl LangItems {
) where
T: Into<AttrDefId> + Copy,
{
- let _p = tracing::span!(tracing::Level::INFO, "collect_lang_item").entered();
+ let _p = tracing::info_span!("collect_lang_item").entered();
if let Some(lang_item) = lang_attr(db, item.into()) {
self.items.entry(lang_item).or_insert_with(|| constructor(item));
}
@@ -199,7 +199,7 @@ pub(crate) fn notable_traits_in_deps(
db: &dyn DefDatabase,
krate: CrateId,
) -> Arc<[Arc<[TraitId]>]> {
- let _p = tracing::span!(tracing::Level::INFO, "notable_traits_in_deps", ?krate).entered();
+ let _p = tracing::info_span!("notable_traits_in_deps", ?krate).entered();
let crate_graph = db.crate_graph();
Arc::from_iter(
@@ -208,7 +208,7 @@ pub(crate) fn notable_traits_in_deps(
}
pub(crate) fn crate_notable_traits(db: &dyn DefDatabase, krate: CrateId) -> Option<Arc<[TraitId]>> {
- let _p = tracing::span!(tracing::Level::INFO, "crate_notable_traits", ?krate).entered();
+ let _p = tracing::info_span!("crate_notable_traits", ?krate).entered();
let mut traits = Vec::new();
diff --git a/crates/hir-def/src/lib.rs b/crates/hir-def/src/lib.rs
index 682d169adb..f6fe0c618a 100644
--- a/crates/hir-def/src/lib.rs
+++ b/crates/hir-def/src/lib.rs
@@ -56,6 +56,7 @@ pub mod find_path;
pub mod import_map;
pub mod visibility;
+use intern::Interned;
pub use rustc_abi as layout;
use triomphe::Arc;
@@ -72,7 +73,7 @@ use std::{
use base_db::{
impl_intern_key,
- salsa::{self, impl_intern_value_trivial},
+ salsa::{self, InternValueTrivial},
CrateId,
};
use hir_expand::{
@@ -90,7 +91,7 @@ use hir_expand::{
use item_tree::ExternBlock;
use la_arena::Idx;
use nameres::DefMap;
-use span::{AstIdNode, Edition, FileAstId, FileId, SyntaxContextId};
+use span::{AstIdNode, Edition, FileAstId, SyntaxContextId};
use stdx::impl_from;
use syntax::{ast, AstNode};
@@ -186,7 +187,7 @@ pub trait ItemTreeLoc {
macro_rules! impl_intern {
($id:ident, $loc:ident, $intern:ident, $lookup:ident) => {
impl_intern_key!($id);
- impl_intern_value_trivial!($loc);
+ impl InternValueTrivial for $loc {}
impl_intern_lookup!(DefDatabase, $id, $loc, $intern, $lookup);
};
}
@@ -396,6 +397,23 @@ impl PartialEq<ModuleId> for CrateRootModuleId {
other.block.is_none() && other.local_id == DefMap::ROOT && self.krate == other.krate
}
}
+impl PartialEq<CrateRootModuleId> for ModuleId {
+ fn eq(&self, other: &CrateRootModuleId) -> bool {
+ other == self
+ }
+}
+
+impl From<CrateRootModuleId> for ModuleId {
+ fn from(CrateRootModuleId { krate }: CrateRootModuleId) -> Self {
+ ModuleId { krate, block: None, local_id: DefMap::ROOT }
+ }
+}
+
+impl From<CrateRootModuleId> for ModuleDefId {
+ fn from(value: CrateRootModuleId) -> Self {
+ ModuleDefId::ModuleId(value.into())
+ }
+}
impl From<CrateId> for CrateRootModuleId {
fn from(krate: CrateId) -> Self {
@@ -472,6 +490,7 @@ impl ModuleId {
self.block.is_some()
}
+ /// Returns the [`CrateRootModuleId`] for this module if it is the crate root module.
pub fn as_crate_root(&self) -> Option<CrateRootModuleId> {
if self.local_id == DefMap::ROOT && self.block.is_none() {
Some(CrateRootModuleId { krate: self.krate })
@@ -480,33 +499,17 @@ impl ModuleId {
}
}
+ /// Returns the [`CrateRootModuleId`] for this module.
pub fn derive_crate_root(&self) -> CrateRootModuleId {
CrateRootModuleId { krate: self.krate }
}
+ /// Whether this module represents the crate root module
fn is_crate_root(&self) -> bool {
self.local_id == DefMap::ROOT && self.block.is_none()
}
}
-impl PartialEq<CrateRootModuleId> for ModuleId {
- fn eq(&self, other: &CrateRootModuleId) -> bool {
- other == self
- }
-}
-
-impl From<CrateRootModuleId> for ModuleId {
- fn from(CrateRootModuleId { krate }: CrateRootModuleId) -> Self {
- ModuleId { krate, block: None, local_id: DefMap::ROOT }
- }
-}
-
-impl From<CrateRootModuleId> for ModuleDefId {
- fn from(value: CrateRootModuleId) -> Self {
- ModuleDefId::ModuleId(value.into())
- }
-}
-
/// An ID of a module, **local** to a `DefMap`.
pub type LocalModuleId = Idx<nameres::ModuleData>;
@@ -532,7 +535,7 @@ pub struct TypeOrConstParamId {
pub parent: GenericDefId,
pub local_id: LocalTypeOrConstParamId,
}
-impl_intern_value_trivial!(TypeOrConstParamId);
+impl InternValueTrivial for TypeOrConstParamId {}
/// A TypeOrConstParamId with an invariant that it actually belongs to a type
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
@@ -594,7 +597,7 @@ pub struct LifetimeParamId {
pub local_id: LocalLifetimeParamId,
}
pub type LocalLifetimeParamId = Idx<generics::LifetimeParamData>;
-impl_intern_value_trivial!(LifetimeParamId);
+impl InternValueTrivial for LifetimeParamId {}
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub enum ItemContainerId {
@@ -920,6 +923,7 @@ pub enum GenericDefId {
ImplId(ImplId),
// enum variants cannot have generics themselves, but their parent enums
// can, and this makes some code easier to write
+ // FIXME: Try to remove this as that will reduce the amount of query slots generated per enum?
EnumVariantId(EnumVariantId),
// consts can have type parameters from their parents (i.e. associated consts of traits)
ConstId(ConstId),
@@ -956,15 +960,14 @@ impl GenericDefId {
match self {
GenericDefId::FunctionId(it) => file_id_and_params_of_item_loc(db, it),
GenericDefId::TypeAliasId(it) => file_id_and_params_of_item_loc(db, it),
- GenericDefId::ConstId(_) => (FileId::BOGUS.into(), None),
GenericDefId::AdtId(AdtId::StructId(it)) => file_id_and_params_of_item_loc(db, it),
GenericDefId::AdtId(AdtId::UnionId(it)) => file_id_and_params_of_item_loc(db, it),
GenericDefId::AdtId(AdtId::EnumId(it)) => file_id_and_params_of_item_loc(db, it),
GenericDefId::TraitId(it) => file_id_and_params_of_item_loc(db, it),
GenericDefId::TraitAliasId(it) => file_id_and_params_of_item_loc(db, it),
GenericDefId::ImplId(it) => file_id_and_params_of_item_loc(db, it),
- // We won't be using this ID anyway
- GenericDefId::EnumVariantId(_) => (FileId::BOGUS.into(), None),
+ GenericDefId::ConstId(it) => (it.lookup(db).id.file_id(), None),
+ GenericDefId::EnumVariantId(it) => (it.lookup(db).id.file_id(), None),
}
}
@@ -1368,7 +1371,7 @@ pub trait AsMacroCall {
&self,
db: &dyn ExpandDatabase,
krate: CrateId,
- resolver: impl Fn(path::ModPath) -> Option<MacroDefId> + Copy,
+ resolver: impl Fn(&path::ModPath) -> Option<MacroDefId> + Copy,
) -> Option<MacroCallId> {
self.as_call_id_with_errors(db, krate, resolver).ok()?.value
}
@@ -1377,7 +1380,7 @@ pub trait AsMacroCall {
&self,
db: &dyn ExpandDatabase,
krate: CrateId,
- resolver: impl Fn(path::ModPath) -> Option<MacroDefId> + Copy,
+ resolver: impl Fn(&path::ModPath) -> Option<MacroDefId> + Copy,
) -> Result<ExpandResult<Option<MacroCallId>>, UnresolvedMacro>;
}
@@ -1386,7 +1389,7 @@ impl AsMacroCall for InFile<&ast::MacroCall> {
&self,
db: &dyn ExpandDatabase,
krate: CrateId,
- resolver: impl Fn(path::ModPath) -> Option<MacroDefId> + Copy,
+ resolver: impl Fn(&path::ModPath) -> Option<MacroDefId> + Copy,
) -> Result<ExpandResult<Option<MacroCallId>>, UnresolvedMacro> {
let expands_to = hir_expand::ExpandTo::from_call_site(self.value);
let ast_id = AstId::new(self.file_id, db.ast_id_map(self.file_id).ast_id(self.value));
@@ -1406,7 +1409,8 @@ impl AsMacroCall for InFile<&ast::MacroCall> {
macro_call_as_call_id_with_eager(
db,
- &AstIdWithPath::new(ast_id.file_id, ast_id.value, path),
+ ast_id,
+ &path,
call_site.ctx,
expands_to,
krate,
@@ -1420,11 +1424,15 @@ impl AsMacroCall for InFile<&ast::MacroCall> {
#[derive(Clone, Debug, Eq, PartialEq)]
struct AstIdWithPath<T: AstIdNode> {
ast_id: AstId<T>,
- path: path::ModPath,
+ path: Interned<path::ModPath>,
}
impl<T: AstIdNode> AstIdWithPath<T> {
- fn new(file_id: HirFileId, ast_id: FileAstId<T>, path: path::ModPath) -> AstIdWithPath<T> {
+ fn new(
+ file_id: HirFileId,
+ ast_id: FileAstId<T>,
+ path: Interned<path::ModPath>,
+ ) -> AstIdWithPath<T> {
AstIdWithPath { ast_id: AstId::new(file_id, ast_id), path }
}
}
@@ -1435,30 +1443,39 @@ fn macro_call_as_call_id(
call_site: SyntaxContextId,
expand_to: ExpandTo,
krate: CrateId,
- resolver: impl Fn(path::ModPath) -> Option<MacroDefId> + Copy,
+ resolver: impl Fn(&path::ModPath) -> Option<MacroDefId> + Copy,
) -> Result<Option<MacroCallId>, UnresolvedMacro> {
- macro_call_as_call_id_with_eager(db, call, call_site, expand_to, krate, resolver, resolver)
- .map(|res| res.value)
+ macro_call_as_call_id_with_eager(
+ db,
+ call.ast_id,
+ &call.path,
+ call_site,
+ expand_to,
+ krate,
+ resolver,
+ resolver,
+ )
+ .map(|res| res.value)
}
fn macro_call_as_call_id_with_eager(
db: &dyn ExpandDatabase,
- call: &AstIdWithPath<ast::MacroCall>,
+ ast_id: AstId<ast::MacroCall>,
+ path: &path::ModPath,
call_site: SyntaxContextId,
expand_to: ExpandTo,
krate: CrateId,
- resolver: impl FnOnce(path::ModPath) -> Option<MacroDefId>,
- eager_resolver: impl Fn(path::ModPath) -> Option<MacroDefId>,
+ resolver: impl FnOnce(&path::ModPath) -> Option<MacroDefId>,
+ eager_resolver: impl Fn(&path::ModPath) -> Option<MacroDefId>,
) -> Result<ExpandResult<Option<MacroCallId>>, UnresolvedMacro> {
- let def =
- resolver(call.path.clone()).ok_or_else(|| UnresolvedMacro { path: call.path.clone() })?;
+ let def = resolver(path).ok_or_else(|| UnresolvedMacro { path: path.clone() })?;
let res = match def.kind {
MacroDefKind::BuiltInEager(..) => expand_eager_macro_input(
db,
krate,
- &call.ast_id.to_node(db),
- call.ast_id,
+ &ast_id.to_node(db),
+ ast_id,
def,
call_site,
&|path| eager_resolver(path).filter(MacroDefId::is_fn_like),
@@ -1467,12 +1484,12 @@ fn macro_call_as_call_id_with_eager(
value: Some(def.make_call(
db,
krate,
- MacroCallKind::FnLike { ast_id: call.ast_id, expand_to, eager: None },
+ MacroCallKind::FnLike { ast_id, expand_to, eager: None },
call_site,
)),
err: None,
},
- _ => return Err(UnresolvedMacro { path: call.path.clone() }),
+ _ => return Err(UnresolvedMacro { path: path.clone() }),
};
Ok(res)
}
diff --git a/crates/hir-def/src/macro_expansion_tests/mbe.rs b/crates/hir-def/src/macro_expansion_tests/mbe.rs
index c5c26e26bc..4058159cef 100644
--- a/crates/hir-def/src/macro_expansion_tests/mbe.rs
+++ b/crates/hir-def/src/macro_expansion_tests/mbe.rs
@@ -1883,3 +1883,41 @@ fn test() {
"#]],
);
}
+
+#[test]
+fn test_pat_fragment_eof_17441() {
+ check(
+ r#"
+macro_rules! matches {
+ ($expression:expr, $pattern:pat $(if $guard:expr)? ) => {
+ match $expression {
+ $pattern $(if $guard)? => true,
+ _ => false
+ }
+ };
+}
+fn f() {
+ matches!(0, 10..);
+ matches!(0, 10.. if true);
+}
+ "#,
+ expect![[r#"
+macro_rules! matches {
+ ($expression:expr, $pattern:pat $(if $guard:expr)? ) => {
+ match $expression {
+ $pattern $(if $guard)? => true,
+ _ => false
+ }
+ };
+}
+fn f() {
+ match 0 {
+ 10.. =>true , _=>false
+ };
+ match 0 {
+ 10..if true =>true , _=>false
+ };
+}
+ "#]],
+ );
+}
diff --git a/crates/hir-def/src/macro_expansion_tests/mod.rs b/crates/hir-def/src/macro_expansion_tests/mod.rs
index 8904aca9f2..dc964b3c9a 100644
--- a/crates/hir-def/src/macro_expansion_tests/mod.rs
+++ b/crates/hir-def/src/macro_expansion_tests/mod.rs
@@ -96,7 +96,7 @@ pub fn identity_when_valid(_attr: TokenStream, item: TokenStream) -> TokenStream
let res = macro_call
.as_call_id_with_errors(&db, krate, |path| {
resolver
- .resolve_path_as_macro(&db, &path, Some(MacroSubNs::Bang))
+ .resolve_path_as_macro(&db, path, Some(MacroSubNs::Bang))
.map(|(it, _)| db.macro_def(it))
})
.unwrap();
diff --git a/crates/hir-def/src/nameres.rs b/crates/hir-def/src/nameres.rs
index a3eab22fc4..162b6429c3 100644
--- a/crates/hir-def/src/nameres.rs
+++ b/crates/hir-def/src/nameres.rs
@@ -16,8 +16,8 @@
//!
//! This happens in the `raw` module, which parses a single source file into a
//! set of top-level items. Nested imports are desugared to flat imports in this
-//! phase. Macro calls are represented as a triple of (Path, Option<Name>,
-//! TokenTree).
+//! phase. Macro calls are represented as a triple of `(Path, Option<Name>,
+//! TokenTree)`.
//!
//! ## Collecting Modules
//!
@@ -333,7 +333,7 @@ impl DefMap {
let crate_graph = db.crate_graph();
let krate = &crate_graph[crate_id];
let name = krate.display_name.as_deref().unwrap_or_default();
- let _p = tracing::span!(tracing::Level::INFO, "crate_def_map_query", ?name).entered();
+ let _p = tracing::info_span!("crate_def_map_query", ?name).entered();
let module_data = ModuleData::new(
ModuleOrigin::CrateRoot { definition: krate.root_file_id },
diff --git a/crates/hir-def/src/nameres/attr_resolution.rs b/crates/hir-def/src/nameres/attr_resolution.rs
index 3cb0666edf..f842027d64 100644
--- a/crates/hir-def/src/nameres/attr_resolution.rs
+++ b/crates/hir-def/src/nameres/attr_resolution.rs
@@ -3,6 +3,7 @@
use base_db::CrateId;
use hir_expand::{
attrs::{Attr, AttrId, AttrInput},
+ inert_attr_macro::find_builtin_attr_idx,
MacroCallId, MacroCallKind, MacroDefId,
};
use span::SyntaxContextId;
@@ -10,7 +11,6 @@ use syntax::{ast, SmolStr};
use triomphe::Arc;
use crate::{
- attr::builtin::find_builtin_attr_idx,
db::DefDatabase,
item_scope::BuiltinShadowMode,
nameres::path_resolution::ResolveMode,
@@ -59,7 +59,7 @@ impl DefMap {
return Ok(ResolvedAttr::Other);
}
}
- None => return Err(UnresolvedMacro { path: ast_id.path }),
+ None => return Err(UnresolvedMacro { path: ast_id.path.as_ref().clone() }),
};
Ok(ResolvedAttr::Macro(attr_macro_as_call_id(
@@ -89,9 +89,12 @@ impl DefMap {
}
if segments.len() == 1 {
- let mut registered = self.data.registered_attrs.iter().map(SmolStr::as_str);
- let is_inert = find_builtin_attr_idx(&name).is_some() || registered.any(pred);
- return is_inert;
+ if find_builtin_attr_idx(&name).is_some() {
+ return true;
+ }
+ if self.data.registered_attrs.iter().map(SmolStr::as_str).any(pred) {
+ return true;
+ }
}
}
false
@@ -134,12 +137,12 @@ pub(super) fn derive_macro_as_call_id(
derive_pos: u32,
call_site: SyntaxContextId,
krate: CrateId,
- resolver: impl Fn(path::ModPath) -> Option<(MacroId, MacroDefId)>,
+ resolver: impl Fn(&path::ModPath) -> Option<(MacroId, MacroDefId)>,
derive_macro_id: MacroCallId,
) -> Result<(MacroId, MacroDefId, MacroCallId), UnresolvedMacro> {
- let (macro_id, def_id) = resolver(item_attr.path.clone())
+ let (macro_id, def_id) = resolver(&item_attr.path)
.filter(|(_, def_id)| def_id.is_derive())
- .ok_or_else(|| UnresolvedMacro { path: item_attr.path.clone() })?;
+ .ok_or_else(|| UnresolvedMacro { path: item_attr.path.as_ref().clone() })?;
let call_id = def_id.make_call(
db.upcast(),
krate,
diff --git a/crates/hir-def/src/nameres/collector.rs b/crates/hir-def/src/nameres/collector.rs
index 587997c473..6d2eb71549 100644
--- a/crates/hir-def/src/nameres/collector.rs
+++ b/crates/hir-def/src/nameres/collector.rs
@@ -10,18 +10,19 @@ use cfg::{CfgExpr, CfgOptions};
use either::Either;
use hir_expand::{
attrs::{Attr, AttrId},
- builtin_attr_macro::{find_builtin_attr, BuiltinAttrExpander},
+ builtin_attr_macro::find_builtin_attr,
builtin_derive_macro::find_builtin_derive,
builtin_fn_macro::find_builtin_macro,
name::{name, AsName, Name},
proc_macro::CustomProcMacroExpander,
ExpandTo, HirFileId, InFile, MacroCallId, MacroCallKind, MacroDefId, MacroDefKind,
};
+use intern::Interned;
use itertools::{izip, Itertools};
use la_arena::Idx;
use limit::Limit;
use rustc_hash::{FxHashMap, FxHashSet};
-use span::{Edition, ErasedFileAstId, FileAstId, Span, SyntaxContextId};
+use span::{Edition, ErasedFileAstId, FileAstId, SyntaxContextId};
use syntax::ast;
use triomphe::Arc;
@@ -75,36 +76,23 @@ pub(super) fn collect_defs(db: &dyn DefDatabase, def_map: DefMap, tree_id: TreeI
let proc_macros = if krate.is_proc_macro {
match db.proc_macros().get(&def_map.krate) {
- Some(Ok(proc_macros)) => {
- Ok(proc_macros
- .iter()
- .enumerate()
- .map(|(idx, it)| {
- // FIXME: a hacky way to create a Name from string.
- let name = tt::Ident {
- text: it.name.clone(),
- span: Span {
- range: syntax::TextRange::empty(syntax::TextSize::new(0)),
- anchor: span::SpanAnchor {
- file_id: FileId::BOGUS,
- ast_id: span::ROOT_ERASED_FILE_AST_ID,
- },
- ctx: SyntaxContextId::ROOT,
- },
- };
- (
- name.as_name(),
- if it.disabled {
- CustomProcMacroExpander::disabled()
- } else {
- CustomProcMacroExpander::new(
- hir_expand::proc_macro::ProcMacroId::new(idx as u32),
- )
- },
- )
- })
- .collect())
- }
+ Some(Ok(proc_macros)) => Ok(proc_macros
+ .iter()
+ .enumerate()
+ .map(|(idx, it)| {
+ let name = Name::new_text_dont_use(it.name.clone());
+ (
+ name,
+ if it.disabled {
+ CustomProcMacroExpander::disabled()
+ } else {
+ CustomProcMacroExpander::new(hir_expand::proc_macro::ProcMacroId::new(
+ idx as u32,
+ ))
+ },
+ )
+ })
+ .collect()),
Some(Err(e)) => Err(e.clone().into_boxed_str()),
None => Err("No proc-macros present for crate".to_owned().into_boxed_str()),
}
@@ -270,12 +258,13 @@ struct DefCollector<'a> {
///
/// This also stores the attributes to skip when we resolve derive helpers and non-macro
/// non-builtin attributes in general.
+ // FIXME: There has to be a better way to do this
skip_attrs: FxHashMap<InFile<ModItem>, AttrId>,
}
impl DefCollector<'_> {
fn seed_with_top_level(&mut self) {
- let _p = tracing::span!(tracing::Level::INFO, "seed_with_top_level").entered();
+ let _p = tracing::info_span!("seed_with_top_level").entered();
let crate_graph = self.db.crate_graph();
let file_id = crate_graph[self.def_map.krate].root_file_id;
@@ -410,17 +399,17 @@ impl DefCollector<'_> {
}
fn resolution_loop(&mut self) {
- let _p = tracing::span!(tracing::Level::INFO, "DefCollector::resolution_loop").entered();
+ let _p = tracing::info_span!("DefCollector::resolution_loop").entered();
// main name resolution fixed-point loop.
let mut i = 0;
'resolve_attr: loop {
- let _p = tracing::span!(tracing::Level::INFO, "resolve_macros loop").entered();
+ let _p = tracing::info_span!("resolve_macros loop").entered();
'resolve_macros: loop {
self.db.unwind_if_cancelled();
{
- let _p = tracing::span!(tracing::Level::INFO, "resolve_imports loop").entered();
+ let _p = tracing::info_span!("resolve_imports loop").entered();
'resolve_imports: loop {
if self.resolve_imports() == ReachedFixedPoint::Yes {
@@ -446,7 +435,7 @@ impl DefCollector<'_> {
}
fn collect(&mut self) {
- let _p = tracing::span!(tracing::Level::INFO, "DefCollector::collect").entered();
+ let _p = tracing::info_span!("DefCollector::collect").entered();
self.resolution_loop();
@@ -794,7 +783,7 @@ impl DefCollector<'_> {
}
fn resolve_import(&self, module_id: LocalModuleId, import: &Import) -> PartialResolvedImport {
- let _p = tracing::span!(tracing::Level::INFO, "resolve_import", import_path = %import.path.display(self.db.upcast()))
+ let _p = tracing::info_span!("resolve_import", import_path = %import.path.display(self.db.upcast()))
.entered();
tracing::debug!("resolving import: {:?} ({:?})", import, self.def_map.data.edition);
match import.source {
@@ -856,7 +845,7 @@ impl DefCollector<'_> {
}
fn record_resolved_import(&mut self, directive: &ImportDirective) {
- let _p = tracing::span!(tracing::Level::INFO, "record_resolved_import").entered();
+ let _p = tracing::info_span!("record_resolved_import").entered();
let module_id = directive.module_id;
let import = &directive.import;
@@ -1136,18 +1125,18 @@ impl DefCollector<'_> {
MacroSubNs::Attr
}
};
- let resolver = |path| {
+ let resolver = |path: &_| {
let resolved_res = self.def_map.resolve_path_fp_with_macro(
self.db,
ResolveMode::Other,
directive.module_id,
- &path,
+ path,
BuiltinShadowMode::Module,
Some(subns),
);
resolved_res.resolved_def.take_macros().map(|it| (it, self.db.macro_def(it)))
};
- let resolver_def_id = |path| resolver(path).map(|(_, it)| it);
+ let resolver_def_id = |path: &_| resolver(path).map(|(_, it)| it);
match &directive.kind {
MacroDirectiveKind::FnLike { ast_id, expand_to, ctxt: call_site } => {
@@ -1250,22 +1239,28 @@ impl DefCollector<'_> {
}
}
- let def = match resolver_def_id(path.clone()) {
+ let def = match resolver_def_id(path) {
Some(def) if def.is_attribute() => def,
_ => return Resolved::No,
};
- let call_id =
- attr_macro_as_call_id(self.db, file_ast_id, attr, self.def_map.krate, def);
- if let MacroDefId {
- kind:
- MacroDefKind::BuiltInAttr(
- BuiltinAttrExpander::Derive | BuiltinAttrExpander::DeriveConst,
- _,
- ),
- ..
- } = def
- {
+ // Skip #[test]/#[bench] expansion, which would merely result in more memory usage
+ // due to duplicating functions into macro expansions
+ if matches!(
+ def.kind,
+ MacroDefKind::BuiltInAttr(_, expander)
+ if expander.is_test() || expander.is_bench()
+ ) {
+ return recollect_without(self);
+ }
+
+ let call_id = || {
+ attr_macro_as_call_id(self.db, file_ast_id, attr, self.def_map.krate, def)
+ };
+ if matches!(def,
+ MacroDefId { kind: MacroDefKind::BuiltInAttr(_, exp), .. }
+ if exp.is_derive()
+ ) {
// Resolved to `#[derive]`, we don't actually expand this attribute like
// normal (as that would just be an identity expansion with extra output)
// Instead we treat derive attributes special and apply them separately.
@@ -1290,9 +1285,14 @@ impl DefCollector<'_> {
match attr.parse_path_comma_token_tree(self.db.upcast()) {
Some(derive_macros) => {
+ let call_id = call_id();
let mut len = 0;
for (idx, (path, call_site)) in derive_macros.enumerate() {
- let ast_id = AstIdWithPath::new(file_id, ast_id.value, path);
+ let ast_id = AstIdWithPath::new(
+ file_id,
+ ast_id.value,
+ Interned::new(path),
+ );
self.unresolved_macros.push(MacroDirective {
module_id: directive.module_id,
depth: directive.depth + 1,
@@ -1312,13 +1312,6 @@ impl DefCollector<'_> {
// This is just a trick to be able to resolve the input to derives
// as proper paths in `Semantics`.
// Check the comment in [`builtin_attr_macro`].
- let call_id = attr_macro_as_call_id(
- self.db,
- file_ast_id,
- attr,
- self.def_map.krate,
- def,
- );
self.def_map.modules[directive.module_id]
.scope
.init_derive_attribute(ast_id, attr.id, call_id, len + 1);
@@ -1336,17 +1329,8 @@ impl DefCollector<'_> {
return recollect_without(self);
}
- // Skip #[test]/#[bench] expansion, which would merely result in more memory usage
- // due to duplicating functions into macro expansions
- if matches!(
- def.kind,
- MacroDefKind::BuiltInAttr(expander, _)
- if expander.is_test() || expander.is_bench()
- ) {
- return recollect_without(self);
- }
-
- if let MacroDefKind::ProcMacro(exp, ..) = def.kind {
+ let call_id = call_id();
+ if let MacroDefKind::ProcMacro(_, exp, _) = def.kind {
// If proc attribute macro expansion is disabled, skip expanding it here
if !self.db.expand_proc_attr_macros() {
self.def_map.diagnostics.push(DefDiagnostic::unresolved_proc_macro(
@@ -1430,7 +1414,7 @@ impl DefCollector<'_> {
fn finish(mut self) -> DefMap {
// Emit diagnostics for all remaining unexpanded macros.
- let _p = tracing::span!(tracing::Level::INFO, "DefCollector::finish").entered();
+ let _p = tracing::info_span!("DefCollector::finish").entered();
for directive in &self.unresolved_macros {
match &directive.kind {
@@ -1447,7 +1431,7 @@ impl DefCollector<'_> {
self.db,
ResolveMode::Other,
directive.module_id,
- &path,
+ path,
BuiltinShadowMode::Module,
Some(MacroSubNs::Bang),
);
@@ -1481,7 +1465,7 @@ impl DefCollector<'_> {
derive_index: *derive_pos as u32,
derive_macro_id: *derive_macro_id,
},
- ast_id.path.clone(),
+ ast_id.path.as_ref().clone(),
));
}
// These are diagnosed by `reseed_with_unresolved_attribute`, as that function consumes them
@@ -2116,7 +2100,7 @@ impl ModCollector<'_, '_> {
let ast_id = AstIdWithPath::new(
self.file_id(),
mod_item.ast_id(self.item_tree),
- attr.path.as_ref().clone(),
+ attr.path.clone(),
);
self.def_collector.unresolved_macros.push(MacroDirective {
module_id: self.module_id,
@@ -2162,19 +2146,7 @@ impl ModCollector<'_, '_> {
let name;
let name = match attrs.by_key("rustc_builtin_macro").string_value() {
Some(it) => {
- // FIXME: a hacky way to create a Name from string.
- name = tt::Ident {
- text: it.into(),
- span: Span {
- range: syntax::TextRange::empty(syntax::TextSize::new(0)),
- anchor: span::SpanAnchor {
- file_id: FileId::BOGUS,
- ast_id: span::ROOT_ERASED_FILE_AST_ID,
- },
- ctx: SyntaxContextId::ROOT,
- },
- }
- .as_name();
+ name = Name::new_text_dont_use(it.into());
&name
}
None => {
@@ -2310,7 +2282,7 @@ impl ModCollector<'_, '_> {
&MacroCall { ref path, ast_id, expand_to, ctxt }: &MacroCall,
container: ItemContainerId,
) {
- let ast_id = AstIdWithPath::new(self.file_id(), ast_id, ModPath::clone(path));
+ let ast_id = AstIdWithPath::new(self.file_id(), ast_id, path.clone());
let db = self.def_collector.db;
// FIXME: Immediately expanding in "Case 1" is insufficient since "Case 2" may also define
@@ -2320,7 +2292,8 @@ impl ModCollector<'_, '_> {
// Case 1: try to resolve macro calls with single-segment name and expand macro_rules
if let Ok(res) = macro_call_as_call_id_with_eager(
db.upcast(),
- &ast_id,
+ ast_id.ast_id,
+ &ast_id.path,
ctxt,
expand_to,
self.def_collector.def_map.krate,
@@ -2347,7 +2320,7 @@ impl ModCollector<'_, '_> {
db,
ResolveMode::Other,
self.module_id,
- &path,
+ path,
BuiltinShadowMode::Module,
Some(MacroSubNs::Bang),
);
diff --git a/crates/hir-def/src/nameres/path_resolution.rs b/crates/hir-def/src/nameres/path_resolution.rs
index d621f3a360..e797d19223 100644
--- a/crates/hir-def/src/nameres/path_resolution.rs
+++ b/crates/hir-def/src/nameres/path_resolution.rs
@@ -283,7 +283,7 @@ impl DefMap {
// If we have a different `DefMap` from `self` (the original `DefMap` we started
// with), resolve the remaining path segments in that `DefMap`.
let path =
- ModPath::from_segments(PathKind::Super(0), path.segments().iter().cloned());
+ ModPath::from_segments(PathKind::SELF, path.segments().iter().cloned());
return def_map.resolve_path_fp_with_macro(
db,
mode,
@@ -333,7 +333,7 @@ impl DefMap {
ModuleDefId::ModuleId(module) => {
if module.krate != self.krate {
let path = ModPath::from_segments(
- PathKind::Super(0),
+ PathKind::SELF,
path.segments()[i..].iter().cloned(),
);
tracing::debug!("resolving {:?} in other crate", path);
@@ -493,7 +493,12 @@ impl DefMap {
)
})
};
- let prelude = || self.resolve_in_prelude(db, name);
+ let prelude = || {
+ if self.block.is_some() && module == DefMap::ROOT {
+ return PerNs::none();
+ }
+ self.resolve_in_prelude(db, name)
+ };
from_legacy_macro
.or(from_scope_or_builtin)
diff --git a/crates/hir-def/src/path/lower.rs b/crates/hir-def/src/path/lower.rs
index 6af5261411..2b555b3998 100644
--- a/crates/hir-def/src/path/lower.rs
+++ b/crates/hir-def/src/path/lower.rs
@@ -122,7 +122,7 @@ pub(super) fn lower_path(ctx: &LowerCtx<'_>, mut path: ast::Path) -> Option<Path
// don't break out if `self` is the last segment of a path, this mean we got a
// use tree like `foo::{self}` which we want to resolve as `foo`
if !segments.is_empty() {
- kind = PathKind::Super(0);
+ kind = PathKind::SELF;
break;
}
}
@@ -144,7 +144,7 @@ pub(super) fn lower_path(ctx: &LowerCtx<'_>, mut path: ast::Path) -> Option<Path
if segments.is_empty() && kind == PathKind::Plain && type_anchor.is_none() {
// plain empty paths don't exist, this means we got a single `self` segment as our path
- kind = PathKind::Super(0);
+ kind = PathKind::SELF;
}
// handle local_inner_macros :
diff --git a/crates/hir-def/src/per_ns.rs b/crates/hir-def/src/per_ns.rs
index 36ab62d0f7..19485c476f 100644
--- a/crates/hir-def/src/per_ns.rs
+++ b/crates/hir-def/src/per_ns.rs
@@ -86,7 +86,7 @@ impl PerNs {
}
pub fn filter_visibility(self, mut f: impl FnMut(Visibility) -> bool) -> PerNs {
- let _p = tracing::span!(tracing::Level::INFO, "PerNs::filter_visibility").entered();
+ let _p = tracing::info_span!("PerNs::filter_visibility").entered();
PerNs {
types: self.types.filter(|&(_, v, _)| f(v)),
values: self.values.filter(|&(_, v, _)| f(v)),
@@ -119,7 +119,7 @@ impl PerNs {
}
pub fn iter_items(self) -> impl Iterator<Item = (ItemInNs, Option<ImportOrExternCrate>)> {
- let _p = tracing::span!(tracing::Level::INFO, "PerNs::iter_items").entered();
+ let _p = tracing::info_span!("PerNs::iter_items").entered();
self.types
.map(|it| (ItemInNs::Types(it.0), it.2))
.into_iter()
diff --git a/crates/hir-def/src/pretty.rs b/crates/hir-def/src/pretty.rs
index d3135bba96..d08e063976 100644
--- a/crates/hir-def/src/pretty.rs
+++ b/crates/hir-def/src/pretty.rs
@@ -57,7 +57,7 @@ pub(crate) fn print_path(db: &dyn DefDatabase, path: &Path, buf: &mut dyn Write)
}
None => match path.kind() {
PathKind::Plain => {}
- PathKind::Super(0) => write!(buf, "self")?,
+ &PathKind::SELF => write!(buf, "self")?,
PathKind::Super(n) => {
for i in 0..*n {
if i == 0 {
diff --git a/crates/hir-def/src/visibility.rs b/crates/hir-def/src/visibility.rs
index 1ef8fa772a..e08718fc83 100644
--- a/crates/hir-def/src/visibility.rs
+++ b/crates/hir-def/src/visibility.rs
@@ -27,10 +27,7 @@ pub enum RawVisibility {
impl RawVisibility {
pub(crate) const fn private() -> RawVisibility {
- RawVisibility::Module(
- ModPath::from_kind(PathKind::Super(0)),
- VisibilityExplicitness::Implicit,
- )
+ RawVisibility::Module(ModPath::from_kind(PathKind::SELF), VisibilityExplicitness::Implicit)
}
pub(crate) fn from_ast(
@@ -60,7 +57,7 @@ impl RawVisibility {
}
ast::VisibilityKind::PubCrate => ModPath::from_kind(PathKind::Crate),
ast::VisibilityKind::PubSuper => ModPath::from_kind(PathKind::Super(1)),
- ast::VisibilityKind::PubSelf => ModPath::from_kind(PathKind::Super(0)),
+ ast::VisibilityKind::PubSelf => ModPath::from_kind(PathKind::SELF),
ast::VisibilityKind::Pub => return RawVisibility::Public,
};
RawVisibility::Module(path, VisibilityExplicitness::Explicit)
diff --git a/crates/hir-expand/src/attrs.rs b/crates/hir-expand/src/attrs.rs
index 85ec02ae07..db0feb055e 100644
--- a/crates/hir-expand/src/attrs.rs
+++ b/crates/hir-expand/src/attrs.rs
@@ -54,7 +54,7 @@ impl RawAttrs {
let span = span_map.span_for_range(comment.syntax().text_range());
Attr {
id,
- input: Some(Interned::new(AttrInput::Literal(tt::Literal {
+ input: Some(Box::new(AttrInput::Literal(tt::Literal {
text: SmolStr::new(format_smolstr!("\"{}\"", Self::escape_chars(doc))),
span,
}))),
@@ -199,7 +199,7 @@ impl AttrId {
pub struct Attr {
pub id: AttrId,
pub path: Interned<ModPath>,
- pub input: Option<Interned<AttrInput>>,
+ pub input: Option<Box<AttrInput>>,
pub ctxt: SyntaxContextId,
}
@@ -234,7 +234,7 @@ impl Attr {
})?);
let span = span_map.span_for_range(range);
let input = if let Some(ast::Expr::Literal(lit)) = ast.expr() {
- Some(Interned::new(AttrInput::Literal(tt::Literal {
+ Some(Box::new(AttrInput::Literal(tt::Literal {
text: lit.token().text().into(),
span,
})))
@@ -245,7 +245,7 @@ impl Attr {
span,
DocCommentDesugarMode::ProcMacro,
);
- Some(Interned::new(AttrInput::TokenTree(Box::new(tree))))
+ Some(Box::new(AttrInput::TokenTree(Box::new(tree))))
} else {
None
};
@@ -281,12 +281,12 @@ impl Attr {
let input = match input.first() {
Some(tt::TokenTree::Subtree(tree)) => {
- Some(Interned::new(AttrInput::TokenTree(Box::new(tree.clone()))))
+ Some(Box::new(AttrInput::TokenTree(Box::new(tree.clone()))))
}
Some(tt::TokenTree::Leaf(tt::Leaf::Punct(tt::Punct { char: '=', .. }))) => {
let input = match input.get(1) {
Some(tt::TokenTree::Leaf(tt::Leaf::Literal(lit))) => {
- Some(Interned::new(AttrInput::Literal(lit.clone())))
+ Some(Box::new(AttrInput::Literal(lit.clone())))
}
_ => None,
};
diff --git a/crates/hir-expand/src/builtin_attr_macro.rs b/crates/hir-expand/src/builtin_attr_macro.rs
index 9ff29b484d..2e115f4793 100644
--- a/crates/hir-expand/src/builtin_attr_macro.rs
+++ b/crates/hir-expand/src/builtin_attr_macro.rs
@@ -52,8 +52,6 @@ impl BuiltinAttrExpander {
register_builtin! {
(bench, Bench) => dummy_attr_expand,
- (cfg, Cfg) => dummy_attr_expand,
- (cfg_attr, CfgAttr) => dummy_attr_expand,
(cfg_accessible, CfgAccessible) => dummy_attr_expand,
(cfg_eval, CfgEval) => dummy_attr_expand,
(derive, Derive) => derive_expand,
diff --git a/crates/hir-expand/src/builtin_fn_macro.rs b/crates/hir-expand/src/builtin_fn_macro.rs
index ba96ab6cc2..02fd431e4e 100644
--- a/crates/hir-expand/src/builtin_fn_macro.rs
+++ b/crates/hir-expand/src/builtin_fn_macro.rs
@@ -67,6 +67,10 @@ impl BuiltinFnLikeExpander {
let span = span_with_def_site_ctxt(db, span, id);
self.expander()(db, id, tt, span)
}
+
+ pub fn is_asm(&self) -> bool {
+ matches!(self, Self::Asm | Self::GlobalAsm)
+ }
}
impl EagerExpander {
diff --git a/crates/hir-expand/src/cfg_process.rs b/crates/hir-expand/src/cfg_process.rs
index 9dd44262ba..55ae19068f 100644
--- a/crates/hir-expand/src/cfg_process.rs
+++ b/crates/hir-expand/src/cfg_process.rs
@@ -189,8 +189,8 @@ pub(crate) fn process_cfg_attrs(
// FIXME: #[cfg_eval] is not implemented. But it is not stable yet
let is_derive = match loc.def.kind {
MacroDefKind::BuiltInDerive(..)
- | MacroDefKind::ProcMacro(_, ProcMacroKind::CustomDerive, _) => true,
- MacroDefKind::BuiltInAttr(expander, _) => expander.is_derive(),
+ | MacroDefKind::ProcMacro(_, _, ProcMacroKind::CustomDerive) => true,
+ MacroDefKind::BuiltInAttr(_, expander) => expander.is_derive(),
_ => false,
};
if !is_derive {
diff --git a/crates/hir-expand/src/db.rs b/crates/hir-expand/src/db.rs
index 12421bbe70..ad25a1168c 100644
--- a/crates/hir-expand/src/db.rs
+++ b/crates/hir-expand/src/db.rs
@@ -146,13 +146,11 @@ pub fn expand_speculative(
token_to_map: SyntaxToken,
) -> Option<(SyntaxNode, SyntaxToken)> {
let loc = db.lookup_intern_macro_call(actual_macro_call);
+ let (_, _, span) = db.macro_arg_considering_derives(actual_macro_call, &loc.kind);
- // FIXME: This BOGUS here is dangerous once the proc-macro server can call back into the database!
- let span_map = RealSpanMap::absolute(FileId::BOGUS);
+ let span_map = RealSpanMap::absolute(span.anchor.file_id);
let span_map = SpanMapRef::RealSpanMap(&span_map);
- let (_, _, span) = db.macro_arg_considering_derives(actual_macro_call, &loc.kind);
-
// Build the subtree and token mapping for the speculative args
let (mut tt, undo_info) = match loc.kind {
MacroCallKind::FnLike { .. } => (
@@ -252,7 +250,7 @@ pub fn expand_speculative(
// Otherwise the expand query will fetch the non speculative attribute args and pass those instead.
let mut speculative_expansion =
match loc.def.kind {
- MacroDefKind::ProcMacro(expander, _, ast) => {
+ MacroDefKind::ProcMacro(ast, expander, _) => {
let span = db.proc_macro_span(ast);
tt.delimiter = tt::Delimiter::invisible_spanned(span);
expander.expand(
@@ -266,22 +264,22 @@ pub fn expand_speculative(
span_with_mixed_site_ctxt(db, span, actual_macro_call),
)
}
- MacroDefKind::BuiltInAttr(BuiltinAttrExpander::Derive, _) => {
+ MacroDefKind::BuiltInAttr(_, it) if it.is_derive() => {
pseudo_derive_attr_expansion(&tt, attr_arg.as_ref()?, span)
}
MacroDefKind::Declarative(it) => db
.decl_macro_expander(loc.krate, it)
.expand_unhygienic(db, tt, loc.def.krate, span, loc.def.edition),
- MacroDefKind::BuiltIn(it, _) => {
+ MacroDefKind::BuiltIn(_, it) => {
it.expand(db, actual_macro_call, &tt, span).map_err(Into::into)
}
- MacroDefKind::BuiltInDerive(it, ..) => {
+ MacroDefKind::BuiltInDerive(_, it) => {
it.expand(db, actual_macro_call, &tt, span).map_err(Into::into)
}
- MacroDefKind::BuiltInEager(it, _) => {
+ MacroDefKind::BuiltInEager(_, it) => {
it.expand(db, actual_macro_call, &tt, span).map_err(Into::into)
}
- MacroDefKind::BuiltInAttr(it, _) => it.expand(db, actual_macro_call, &tt, span),
+ MacroDefKind::BuiltInAttr(_, it) => it.expand(db, actual_macro_call, &tt, span),
};
let expand_to = loc.expand_to();
@@ -334,7 +332,7 @@ fn parse_macro_expansion(
db: &dyn ExpandDatabase,
macro_file: MacroFileId,
) -> ExpandResult<(Parse<SyntaxNode>, Arc<ExpansionSpanMap>)> {
- let _p = tracing::span!(tracing::Level::INFO, "parse_macro_expansion").entered();
+ let _p = tracing::info_span!("parse_macro_expansion").entered();
let loc = db.lookup_intern_macro_call(macro_file.macro_call_id);
let edition = loc.def.edition;
let expand_to = loc.expand_to();
@@ -493,7 +491,7 @@ fn macro_arg(db: &dyn ExpandDatabase, id: MacroCallId) -> MacroArgResult {
.map_or_else(|| node.syntax().text_range(), |it| it.syntax().text_range()),
);
// If derive attribute we need to censor the derive input
- if matches!(loc.def.kind, MacroDefKind::BuiltInAttr(expander, ..) if expander.is_derive())
+ if matches!(loc.def.kind, MacroDefKind::BuiltInAttr(_, expander) if expander.is_derive())
&& ast::Adt::can_cast(node.syntax().kind())
{
let adt = ast::Adt::cast(node.syntax().clone()).unwrap();
@@ -569,11 +567,11 @@ impl TokenExpander {
MacroDefKind::Declarative(ast_id) => {
TokenExpander::DeclarativeMacro(db.decl_macro_expander(id.krate, ast_id))
}
- MacroDefKind::BuiltIn(expander, _) => TokenExpander::BuiltIn(expander),
- MacroDefKind::BuiltInAttr(expander, _) => TokenExpander::BuiltInAttr(expander),
- MacroDefKind::BuiltInDerive(expander, _) => TokenExpander::BuiltInDerive(expander),
- MacroDefKind::BuiltInEager(expander, ..) => TokenExpander::BuiltInEager(expander),
- MacroDefKind::ProcMacro(expander, ..) => TokenExpander::ProcMacro(expander),
+ MacroDefKind::BuiltIn(_, expander) => TokenExpander::BuiltIn(expander),
+ MacroDefKind::BuiltInAttr(_, expander) => TokenExpander::BuiltInAttr(expander),
+ MacroDefKind::BuiltInDerive(_, expander) => TokenExpander::BuiltInDerive(expander),
+ MacroDefKind::BuiltInEager(_, expander) => TokenExpander::BuiltInEager(expander),
+ MacroDefKind::ProcMacro(_, expander, _) => TokenExpander::ProcMacro(expander),
}
}
}
@@ -588,7 +586,7 @@ fn macro_expand(
macro_call_id: MacroCallId,
loc: MacroCallLoc,
) -> ExpandResult<(CowArc<tt::Subtree>, MatchedArmIndex)> {
- let _p = tracing::span!(tracing::Level::INFO, "macro_expand").entered();
+ let _p = tracing::info_span!("macro_expand").entered();
let (ExpandResult { value: (tt, matched_arm), err }, span) = match loc.def.kind {
MacroDefKind::ProcMacro(..) => {
@@ -604,13 +602,13 @@ fn macro_expand(
MacroDefKind::Declarative(id) => db
.decl_macro_expander(loc.def.krate, id)
.expand(db, arg.clone(), macro_call_id, span),
- MacroDefKind::BuiltIn(it, _) => {
+ MacroDefKind::BuiltIn(_, it) => {
it.expand(db, macro_call_id, arg, span).map_err(Into::into).zip_val(None)
}
- MacroDefKind::BuiltInDerive(it, _) => {
+ MacroDefKind::BuiltInDerive(_, it) => {
it.expand(db, macro_call_id, arg, span).map_err(Into::into).zip_val(None)
}
- MacroDefKind::BuiltInEager(it, _) => {
+ MacroDefKind::BuiltInEager(_, it) => {
// This might look a bit odd, but we do not expand the inputs to eager macros here.
// Eager macros inputs are expanded, well, eagerly when we collect the macro calls.
// That kind of expansion uses the ast id map of an eager macros input though which goes through
@@ -634,12 +632,12 @@ fn macro_expand(
}
res.zip_val(None)
}
- MacroDefKind::BuiltInAttr(it, _) => {
+ MacroDefKind::BuiltInAttr(_, it) => {
let mut res = it.expand(db, macro_call_id, arg, span);
fixup::reverse_fixups(&mut res.value, &undo_info);
res.zip_val(None)
}
- _ => unreachable!(),
+ MacroDefKind::ProcMacro(_, _, _) => unreachable!(),
};
(ExpandResult { value: res.value, err: res.err }, span)
}
@@ -678,8 +676,8 @@ fn expand_proc_macro(db: &dyn ExpandDatabase, id: MacroCallId) -> ExpandResult<A
let loc = db.lookup_intern_macro_call(id);
let (macro_arg, undo_info, span) = db.macro_arg_considering_derives(id, &loc.kind);
- let (expander, ast) = match loc.def.kind {
- MacroDefKind::ProcMacro(expander, _, ast) => (expander, ast),
+ let (ast, expander) = match loc.def.kind {
+ MacroDefKind::ProcMacro(ast, expander, _) => (ast, expander),
_ => unreachable!(),
};
diff --git a/crates/hir-expand/src/eager.rs b/crates/hir-expand/src/eager.rs
index 64e04bc08f..3e0d2dfa6c 100644
--- a/crates/hir-expand/src/eager.rs
+++ b/crates/hir-expand/src/eager.rs
@@ -39,7 +39,7 @@ pub fn expand_eager_macro_input(
ast_id: AstId<ast::MacroCall>,
def: MacroDefId,
call_site: SyntaxContextId,
- resolver: &dyn Fn(ModPath) -> Option<MacroDefId>,
+ resolver: &dyn Fn(&ModPath) -> Option<MacroDefId>,
) -> ExpandResult<Option<MacroCallId>> {
let expand_to = ExpandTo::from_call_site(macro_call);
@@ -138,7 +138,7 @@ fn eager_macro_recur(
curr: InFile<SyntaxNode>,
krate: CrateId,
call_site: SyntaxContextId,
- macro_resolver: &dyn Fn(ModPath) -> Option<MacroDefId>,
+ macro_resolver: &dyn Fn(&ModPath) -> Option<MacroDefId>,
) -> ExpandResult<Option<(SyntaxNode, TextSize)>> {
let original = curr.value.clone_for_update();
@@ -172,7 +172,7 @@ fn eager_macro_recur(
let def = match call.path().and_then(|path| {
ModPath::from_src(db, path, &mut |range| span_map.span_at(range.start()).ctx)
}) {
- Some(path) => match macro_resolver(path.clone()) {
+ Some(path) => match macro_resolver(&path) {
Some(def) => def,
None => {
error =
diff --git a/crates/hir-expand/src/files.rs b/crates/hir-expand/src/files.rs
index 1ba85c5c7e..743fac50f4 100644
--- a/crates/hir-expand/src/files.rs
+++ b/crates/hir-expand/src/files.rs
@@ -1,6 +1,4 @@
//! Things to wrap other things in file ids.
-use std::iter;
-
use either::Either;
use span::{
AstIdNode, ErasedFileAstId, FileAstId, FileId, FileRange, HirFileId, HirFileIdRepr,
@@ -150,27 +148,16 @@ impl<FileId: Copy, N: AstNode> InFileWrapper<FileId, N> {
}
}
+impl<FileId: Copy, N: AstNode> InFileWrapper<FileId, &N> {
+ // unfortunately `syntax` collides with the impl above, because `&_` is fundamental
+ pub fn syntax_ref(&self) -> InFileWrapper<FileId, &SyntaxNode> {
+ self.with_value(self.value.syntax())
+ }
+}
+
// region:specific impls
impl InFile<&SyntaxNode> {
- /// Traverse up macro calls and skips the macro invocation node
- pub fn ancestors_with_macros(
- self,
- db: &dyn db::ExpandDatabase,
- ) -> impl Iterator<Item = InFile<SyntaxNode>> + '_ {
- let succ = move |node: &InFile<SyntaxNode>| match node.value.parent() {
- Some(parent) => Some(node.with_value(parent)),
- None => db
- .lookup_intern_macro_call(node.file_id.macro_file()?.macro_call_id)
- .to_node_item(db)
- .syntax()
- .cloned()
- .map(|node| node.parent())
- .transpose(),
- };
- iter::successors(succ(&self.cloned()), succ)
- }
-
/// Falls back to the macro call range if the node cannot be mapped up fully.
///
/// For attributes and derives, this will point back to the attribute only.
diff --git a/crates/hir-expand/src/fixup.rs b/crates/hir-expand/src/fixup.rs
index 9ec2a83162..9fdf4aa4f7 100644
--- a/crates/hir-expand/src/fixup.rs
+++ b/crates/hir-expand/src/fixup.rs
@@ -4,7 +4,10 @@
use mbe::DocCommentDesugarMode;
use rustc_hash::{FxHashMap, FxHashSet};
use smallvec::SmallVec;
-use span::{ErasedFileAstId, Span, SpanAnchor, FIXUP_ERASED_FILE_AST_ID_MARKER};
+use span::{
+ ErasedFileAstId, Span, SpanAnchor, SyntaxContextId, FIXUP_ERASED_FILE_AST_ID_MARKER,
+ ROOT_ERASED_FILE_AST_ID,
+};
use stdx::never;
use syntax::{
ast::{self, AstNode, HasLoopBody},
@@ -88,7 +91,6 @@ pub(crate) fn fixup_syntax(
preorder.skip_subtree();
continue;
}
-
// In some other situations, we can fix things by just appending some tokens.
match_ast! {
match node {
@@ -273,6 +275,62 @@ pub(crate) fn fixup_syntax(
]);
}
},
+ ast::RecordExprField(it) => {
+ if let Some(colon) = it.colon_token() {
+ if it.name_ref().is_some() && it.expr().is_none() {
+ append.insert(colon.into(), vec![
+ Leaf::Ident(Ident {
+ text: "__ra_fixup".into(),
+ span: fake_span(node_range)
+ })
+ ]);
+ }
+ }
+ },
+ ast::Path(it) => {
+ if let Some(colon) = it.coloncolon_token() {
+ if it.segment().is_none() {
+ append.insert(colon.into(), vec![
+ Leaf::Ident(Ident {
+ text: "__ra_fixup".into(),
+ span: fake_span(node_range)
+ })
+ ]);
+ }
+ }
+ },
+ ast::ArgList(it) => {
+ if it.r_paren_token().is_none() {
+ append.insert(node.into(), vec![
+ Leaf::Punct(Punct {
+ span: fake_span(node_range),
+ char: ')',
+ spacing: Spacing::Alone
+ })
+ ]);
+ }
+ },
+ ast::ArgList(it) => {
+ if it.r_paren_token().is_none() {
+ append.insert(node.into(), vec![
+ Leaf::Punct(Punct {
+ span: fake_span(node_range),
+ char: ')',
+ spacing: Spacing::Alone
+ })
+ ]);
+ }
+ },
+ ast::ClosureExpr(it) => {
+ if it.body().is_none() {
+ append.insert(node.into(), vec![
+ Leaf::Ident(Ident {
+ text: "__ra_fixup".into(),
+ span: fake_span(node_range)
+ })
+ ]);
+ }
+ },
_ => (),
}
}
@@ -307,8 +365,13 @@ pub(crate) fn reverse_fixups(tt: &mut Subtree, undo_info: &SyntaxFixupUndoInfo)
tt.delimiter.close.anchor.ast_id == FIXUP_DUMMY_AST_ID
|| tt.delimiter.open.anchor.ast_id == FIXUP_DUMMY_AST_ID
) {
- tt.delimiter.close = Span::DUMMY;
- tt.delimiter.open = Span::DUMMY;
+ let span = |file_id| Span {
+ range: TextRange::empty(TextSize::new(0)),
+ anchor: SpanAnchor { file_id, ast_id: ROOT_ERASED_FILE_AST_ID },
+ ctx: SyntaxContextId::ROOT,
+ };
+ tt.delimiter.open = span(tt.delimiter.open.anchor.file_id);
+ tt.delimiter.close = span(tt.delimiter.close.anchor.file_id);
}
reverse_fixups_(tt, undo_info);
}
@@ -751,4 +814,84 @@ fn foo () {loop { }}
"#]],
)
}
+
+ #[test]
+ fn fixup_path() {
+ check(
+ r#"
+fn foo() {
+ path::
+}
+"#,
+ expect![[r#"
+fn foo () {path :: __ra_fixup}
+"#]],
+ )
+ }
+
+ #[test]
+ fn fixup_record_ctor_field() {
+ check(
+ r#"
+fn foo() {
+ R { f: }
+}
+"#,
+ expect![[r#"
+fn foo () {R {f : __ra_fixup}}
+"#]],
+ )
+ }
+
+ #[test]
+ fn no_fixup_record_ctor_field() {
+ check(
+ r#"
+fn foo() {
+ R { f: a }
+}
+"#,
+ expect![[r#"
+fn foo () {R {f : a}}
+"#]],
+ )
+ }
+
+ #[test]
+ fn fixup_arg_list() {
+ check(
+ r#"
+fn foo() {
+ foo(a
+}
+"#,
+ expect![[r#"
+fn foo () { foo ( a ) }
+"#]],
+ );
+ check(
+ r#"
+fn foo() {
+ bar.foo(a
+}
+"#,
+ expect![[r#"
+fn foo () { bar . foo ( a ) }
+"#]],
+ );
+ }
+
+ #[test]
+ fn fixup_closure() {
+ check(
+ r#"
+fn foo() {
+ ||
+}
+"#,
+ expect![[r#"
+fn foo () {|| __ra_fixup}
+"#]],
+ );
+ }
}
diff --git a/crates/hir-expand/src/hygiene.rs b/crates/hir-expand/src/hygiene.rs
index 097e760c70..cc02332207 100644
--- a/crates/hir-expand/src/hygiene.rs
+++ b/crates/hir-expand/src/hygiene.rs
@@ -4,7 +4,7 @@
//! Expansion, and Definition Contexts,” *Journal of Functional Programming* 22, no. 2
//! (March 1, 2012): 181–216, <https://doi.org/10.1017/S0956796812000093>.
//!
-//! Also see https://rustc-dev-guide.rust-lang.org/macro-expansion.html#hygiene-and-hierarchies
+//! Also see <https://rustc-dev-guide.rust-lang.org/macro-expansion.html#hygiene-and-hierarchies>
//!
//! # The Expansion Order Hierarchy
//!
diff --git a/crates/hir-def/src/attr/builtin.rs b/crates/hir-expand/src/inert_attr_macro.rs
index f4564c94bb..35fd85bf45 100644
--- a/crates/hir-def/src/attr/builtin.rs
+++ b/crates/hir-expand/src/inert_attr_macro.rs
@@ -36,11 +36,6 @@ pub fn find_builtin_attr_idx(name: &str) -> Option<usize> {
.copied()
}
-// impl AttributeTemplate {
-// const DEFAULT: AttributeTemplate =
-// AttributeTemplate { word: false, list: None, name_value_str: None };
-// }
-
/// A convenience macro for constructing attribute templates.
/// E.g., `template!(Word, List: "description")` means that the attribute
/// supports forms `#[attr]` and `#[attr(description)]`.
@@ -628,6 +623,10 @@ pub const INERT_ATTRIBUTES: &[BuiltinAttribute] = &[
rustc_safe_intrinsic, Normal, template!(Word), WarnFollowing,
"the `#[rustc_safe_intrinsic]` attribute is used internally to mark intrinsics as safe"
),
+ rustc_attr!(
+ rustc_deprecated_safe_2024, Normal, template!(Word), WarnFollowing,
+ "the `#[rustc_safe_intrinsic]` marks functions as unsafe in Rust 2024",
+ ),
// ==========================================================================
// Internal attributes, Testing:
diff --git a/crates/hir-expand/src/lib.rs b/crates/hir-expand/src/lib.rs
index 83e92565f4..a7150cf308 100644
--- a/crates/hir-expand/src/lib.rs
+++ b/crates/hir-expand/src/lib.rs
@@ -16,6 +16,7 @@ pub mod declarative;
pub mod eager;
pub mod files;
pub mod hygiene;
+pub mod inert_attr_macro;
pub mod mod_path;
pub mod name;
pub mod proc_macro;
@@ -30,7 +31,7 @@ use triomphe::Arc;
use std::{fmt, hash::Hash};
-use base_db::{salsa::impl_intern_value_trivial, CrateId, FileId};
+use base_db::{salsa::InternValueTrivial, CrateId, FileId};
use either::Either;
use span::{
Edition, ErasedFileAstId, FileAstId, FileRange, HirFileIdRepr, Span, SpanAnchor,
@@ -46,7 +47,7 @@ use crate::{
builtin_attr_macro::BuiltinAttrExpander,
builtin_derive_macro::BuiltinDeriveExpander,
builtin_fn_macro::{BuiltinFnLikeExpander, EagerExpander},
- db::{ExpandDatabase, TokenExpander},
+ db::ExpandDatabase,
mod_path::ModPath,
proc_macro::{CustomProcMacroExpander, ProcMacroKind},
span_map::{ExpansionSpanMap, SpanMap},
@@ -172,7 +173,7 @@ pub struct MacroCallLoc {
pub kind: MacroCallKind,
pub ctxt: SyntaxContextId,
}
-impl_intern_value_trivial!(MacroCallLoc);
+impl InternValueTrivial for MacroCallLoc {}
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub struct MacroDefId {
@@ -186,11 +187,11 @@ pub struct MacroDefId {
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub enum MacroDefKind {
Declarative(AstId<ast::Macro>),
- BuiltIn(BuiltinFnLikeExpander, AstId<ast::Macro>),
- BuiltInAttr(BuiltinAttrExpander, AstId<ast::Macro>),
- BuiltInDerive(BuiltinDeriveExpander, AstId<ast::Macro>),
- BuiltInEager(EagerExpander, AstId<ast::Macro>),
- ProcMacro(CustomProcMacroExpander, ProcMacroKind, AstId<ast::Fn>),
+ BuiltIn(AstId<ast::Macro>, BuiltinFnLikeExpander),
+ BuiltInAttr(AstId<ast::Macro>, BuiltinAttrExpander),
+ BuiltInDerive(AstId<ast::Macro>, BuiltinDeriveExpander),
+ BuiltInEager(AstId<ast::Macro>, EagerExpander),
+ ProcMacro(AstId<ast::Fn>, CustomProcMacroExpander, ProcMacroKind),
}
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
@@ -252,9 +253,6 @@ pub trait HirFileIdExt {
/// If this is a macro call, returns the syntax node of the very first macro call this file resides in.
fn original_call_node(self, db: &dyn ExpandDatabase) -> Option<InRealFile<SyntaxNode>>;
- /// Return expansion information if it is a macro-expansion file
- fn expansion_info(self, db: &dyn ExpandDatabase) -> Option<ExpansionInfo>;
-
fn as_builtin_derive_attr_node(&self, db: &dyn ExpandDatabase) -> Option<InFile<ast::Attr>>;
}
@@ -308,11 +306,6 @@ impl HirFileIdExt for HirFileId {
}
}
- /// Return expansion information if it is a macro-expansion file
- fn expansion_info(self, db: &dyn ExpandDatabase) -> Option<ExpansionInfo> {
- Some(ExpansionInfo::new(db, self.macro_file()?))
- }
-
fn as_builtin_derive_attr_node(&self, db: &dyn ExpandDatabase) -> Option<InFile<ast::Attr>> {
let macro_file = self.macro_file()?;
let loc: MacroCallLoc = db.lookup_intern_macro_call(macro_file.macro_call_id);
@@ -379,7 +372,7 @@ impl MacroFileIdExt for MacroFileId {
fn is_custom_derive(&self, db: &dyn ExpandDatabase) -> bool {
matches!(
db.lookup_intern_macro_call(self.macro_call_id).def.kind,
- MacroDefKind::ProcMacro(_, ProcMacroKind::CustomDerive, _)
+ MacroDefKind::ProcMacro(_, _, ProcMacroKind::CustomDerive)
)
}
@@ -416,8 +409,10 @@ impl MacroFileIdExt for MacroFileId {
}
fn is_attr_macro(&self, db: &dyn ExpandDatabase) -> bool {
- let loc = db.lookup_intern_macro_call(self.macro_call_id);
- matches!(loc.kind, MacroCallKind::Attr { .. })
+ matches!(
+ db.lookup_intern_macro_call(self.macro_call_id).def.kind,
+ MacroDefKind::BuiltInAttr(..) | MacroDefKind::ProcMacro(_, _, ProcMacroKind::Attr)
+ )
}
fn is_derive_attr_pseudo_expansion(&self, db: &dyn ExpandDatabase) -> bool {
@@ -440,13 +435,13 @@ impl MacroDefId {
pub fn definition_range(&self, db: &dyn ExpandDatabase) -> InFile<TextRange> {
match self.kind {
MacroDefKind::Declarative(id)
- | MacroDefKind::BuiltIn(_, id)
- | MacroDefKind::BuiltInAttr(_, id)
- | MacroDefKind::BuiltInDerive(_, id)
- | MacroDefKind::BuiltInEager(_, id) => {
+ | MacroDefKind::BuiltIn(id, _)
+ | MacroDefKind::BuiltInAttr(id, _)
+ | MacroDefKind::BuiltInDerive(id, _)
+ | MacroDefKind::BuiltInEager(id, _) => {
id.with_value(db.ast_id_map(id.file_id).get(id.value).text_range())
}
- MacroDefKind::ProcMacro(_, _, id) => {
+ MacroDefKind::ProcMacro(id, _, _) => {
id.with_value(db.ast_id_map(id.file_id).get(id.value).text_range())
}
}
@@ -454,12 +449,12 @@ impl MacroDefId {
pub fn ast_id(&self) -> Either<AstId<ast::Macro>, AstId<ast::Fn>> {
match self.kind {
- MacroDefKind::ProcMacro(.., id) => Either::Right(id),
+ MacroDefKind::ProcMacro(id, ..) => Either::Right(id),
MacroDefKind::Declarative(id)
- | MacroDefKind::BuiltIn(_, id)
- | MacroDefKind::BuiltInAttr(_, id)
- | MacroDefKind::BuiltInDerive(_, id)
- | MacroDefKind::BuiltInEager(_, id) => Either::Left(id),
+ | MacroDefKind::BuiltIn(id, _)
+ | MacroDefKind::BuiltInAttr(id, _)
+ | MacroDefKind::BuiltInDerive(id, _)
+ | MacroDefKind::BuiltInEager(id, _) => Either::Left(id),
}
}
@@ -470,7 +465,7 @@ impl MacroDefId {
pub fn is_attribute(&self) -> bool {
matches!(
self.kind,
- MacroDefKind::BuiltInAttr(..) | MacroDefKind::ProcMacro(_, ProcMacroKind::Attr, _)
+ MacroDefKind::BuiltInAttr(..) | MacroDefKind::ProcMacro(_, _, ProcMacroKind::Attr)
)
}
@@ -478,7 +473,7 @@ impl MacroDefId {
matches!(
self.kind,
MacroDefKind::BuiltInDerive(..)
- | MacroDefKind::ProcMacro(_, ProcMacroKind::CustomDerive, _)
+ | MacroDefKind::ProcMacro(_, _, ProcMacroKind::CustomDerive)
)
}
@@ -486,26 +481,26 @@ impl MacroDefId {
matches!(
self.kind,
MacroDefKind::BuiltIn(..)
- | MacroDefKind::ProcMacro(_, ProcMacroKind::Bang, _)
+ | MacroDefKind::ProcMacro(_, _, ProcMacroKind::Bang)
| MacroDefKind::BuiltInEager(..)
| MacroDefKind::Declarative(..)
)
}
pub fn is_attribute_derive(&self) -> bool {
- matches!(self.kind, MacroDefKind::BuiltInAttr(expander, ..) if expander.is_derive())
+ matches!(self.kind, MacroDefKind::BuiltInAttr(_, expander) if expander.is_derive())
}
pub fn is_include(&self) -> bool {
- matches!(self.kind, MacroDefKind::BuiltInEager(expander, ..) if expander.is_include())
+ matches!(self.kind, MacroDefKind::BuiltInEager(_, expander) if expander.is_include())
}
pub fn is_include_like(&self) -> bool {
- matches!(self.kind, MacroDefKind::BuiltInEager(expander, ..) if expander.is_include_like())
+ matches!(self.kind, MacroDefKind::BuiltInEager(_, expander) if expander.is_include_like())
}
pub fn is_env_or_option_env(&self) -> bool {
- matches!(self.kind, MacroDefKind::BuiltInEager(expander, ..) if expander.is_env_or_option_env())
+ matches!(self.kind, MacroDefKind::BuiltInEager(_, expander) if expander.is_env_or_option_env())
}
}
@@ -702,16 +697,12 @@ impl MacroCallKind {
// simpler function calls if the map is only used once
#[derive(Clone, Debug, PartialEq, Eq)]
pub struct ExpansionInfo {
- pub expanded: InMacroFile<SyntaxNode>,
+ expanded: InMacroFile<SyntaxNode>,
/// The argument TokenTree or item for attributes
arg: InFile<Option<SyntaxNode>>,
- /// The `macro_rules!` or attribute input.
- attr_input_or_mac_def: Option<InFile<ast::TokenTree>>,
-
- macro_def: TokenExpander,
- macro_arg: Arc<tt::Subtree>,
- pub exp_map: Arc<ExpansionSpanMap>,
+ exp_map: Arc<ExpansionSpanMap>,
arg_map: SpanMap,
+ loc: MacroCallLoc,
}
impl ExpansionInfo {
@@ -719,14 +710,21 @@ impl ExpansionInfo {
self.expanded.clone()
}
- pub fn call_node(&self) -> Option<InFile<SyntaxNode>> {
- Some(self.arg.with_value(self.arg.value.as_ref()?.parent()?))
+ pub fn arg(&self) -> InFile<Option<&SyntaxNode>> {
+ self.arg.as_ref().map(|it| it.as_ref())
}
pub fn call_file(&self) -> HirFileId {
self.arg.file_id
}
+ pub fn is_attr(&self) -> bool {
+ matches!(
+ self.loc.def.kind,
+ MacroDefKind::BuiltInAttr(..) | MacroDefKind::ProcMacro(_, _, ProcMacroKind::Attr)
+ )
+ }
+
/// Maps the passed in file range down into a macro expansion if it is the input to a macro call.
///
/// Note this does a linear search through the entire backing vector of the spanmap.
@@ -811,49 +809,16 @@ impl ExpansionInfo {
}
pub fn new(db: &dyn ExpandDatabase, macro_file: MacroFileId) -> ExpansionInfo {
- let loc: MacroCallLoc = db.lookup_intern_macro_call(macro_file.macro_call_id);
+ let _p = tracing::info_span!("ExpansionInfo::new").entered();
+ let loc = db.lookup_intern_macro_call(macro_file.macro_call_id);
let arg_tt = loc.kind.arg(db);
let arg_map = db.span_map(arg_tt.file_id);
- let macro_def = db.macro_expander(loc.def);
let (parse, exp_map) = db.parse_macro_expansion(macro_file).value;
let expanded = InMacroFile { file_id: macro_file, value: parse.syntax_node() };
- let (macro_arg, _, _) =
- db.macro_arg_considering_derives(macro_file.macro_call_id, &loc.kind);
-
- let def = loc.def.ast_id().left().and_then(|id| {
- let def_tt = match id.to_node(db) {
- ast::Macro::MacroRules(mac) => mac.token_tree()?,
- ast::Macro::MacroDef(_) if matches!(macro_def, TokenExpander::BuiltInAttr(_)) => {
- return None
- }
- ast::Macro::MacroDef(mac) => mac.body()?,
- };
- Some(InFile::new(id.file_id, def_tt))
- });
- let attr_input_or_mac_def = def.or_else(|| match loc.kind {
- MacroCallKind::Attr { ast_id, invoc_attr_index, .. } => {
- // FIXME: handle `cfg_attr`
- let tt = collect_attrs(&ast_id.to_node(db))
- .nth(invoc_attr_index.ast_index())
- .and_then(|x| Either::left(x.1))?
- .token_tree()?;
- Some(InFile::new(ast_id.file_id, tt))
- }
- _ => None,
- });
-
- ExpansionInfo {
- expanded,
- arg: arg_tt,
- attr_input_or_mac_def,
- macro_arg,
- macro_def,
- exp_map,
- arg_map,
- }
+ ExpansionInfo { expanded, loc, arg: arg_tt, exp_map, arg_map }
}
}
diff --git a/crates/hir-expand/src/mod_path.rs b/crates/hir-expand/src/mod_path.rs
index 46f8c2b9d8..12fdf88a2a 100644
--- a/crates/hir-expand/src/mod_path.rs
+++ b/crates/hir-expand/src/mod_path.rs
@@ -44,6 +44,10 @@ pub enum PathKind {
DollarCrate(CrateId),
}
+impl PathKind {
+ pub const SELF: PathKind = PathKind::Super(0);
+}
+
impl ModPath {
pub fn from_src(
db: &dyn ExpandDatabase,
@@ -96,7 +100,7 @@ impl ModPath {
pub fn textual_len(&self) -> usize {
let base = match self.kind {
PathKind::Plain => 0,
- PathKind::Super(0) => "self".len(),
+ PathKind::SELF => "self".len(),
PathKind::Super(i) => "super".len() * i as usize,
PathKind::Crate => "crate".len(),
PathKind::Abs => 0,
@@ -113,7 +117,7 @@ impl ModPath {
}
pub fn is_self(&self) -> bool {
- self.kind == PathKind::Super(0) && self.segments.is_empty()
+ self.kind == PathKind::SELF && self.segments.is_empty()
}
#[allow(non_snake_case)]
@@ -193,7 +197,7 @@ fn display_fmt_path(
};
match path.kind {
PathKind::Plain => {}
- PathKind::Super(0) => add_segment("self")?,
+ PathKind::SELF => add_segment("self")?,
PathKind::Super(n) => {
for _ in 0..n {
add_segment("super")?;
@@ -316,7 +320,7 @@ fn convert_path_tt(db: &dyn ExpandDatabase, tt: &[tt::TokenTree]) -> Option<ModP
tt::Leaf::Ident(tt::Ident { text, span }) if text == "$crate" => {
resolve_crate_root(db, span.ctx).map(PathKind::DollarCrate).unwrap_or(PathKind::Crate)
}
- tt::Leaf::Ident(tt::Ident { text, .. }) if text == "self" => PathKind::Super(0),
+ tt::Leaf::Ident(tt::Ident { text, .. }) if text == "self" => PathKind::SELF,
tt::Leaf::Ident(tt::Ident { text, .. }) if text == "super" => {
let mut deg = 1;
while let Some(tt::Leaf::Ident(tt::Ident { text, .. })) = leaves.next() {
diff --git a/crates/hir-expand/src/quote.rs b/crates/hir-expand/src/quote.rs
index a31a111c91..8f1e32321e 100644
--- a/crates/hir-expand/src/quote.rs
+++ b/crates/hir-expand/src/quote.rs
@@ -231,7 +231,7 @@ mod tests {
const DUMMY: tt::Span = tt::Span {
range: TextRange::empty(TextSize::new(0)),
- anchor: SpanAnchor { file_id: FileId::BOGUS, ast_id: ROOT_ERASED_FILE_AST_ID },
+ anchor: SpanAnchor { file_id: FileId::from_raw(0xe4e4e), ast_id: ROOT_ERASED_FILE_AST_ID },
ctx: SyntaxContextId::ROOT,
};
diff --git a/crates/hir-ty/src/autoderef.rs b/crates/hir-ty/src/autoderef.rs
index e2446c3425..b706cef0b3 100644
--- a/crates/hir-ty/src/autoderef.rs
+++ b/crates/hir-ty/src/autoderef.rs
@@ -143,7 +143,7 @@ pub(crate) fn deref_by_trait(
table @ &mut InferenceTable { db, .. }: &mut InferenceTable<'_>,
ty: Ty,
) -> Option<Ty> {
- let _p = tracing::span!(tracing::Level::INFO, "deref_by_trait").entered();
+ let _p = tracing::info_span!("deref_by_trait").entered();
if table.resolve_ty_shallow(&ty).inference_var(Interner).is_some() {
// don't try to deref unknown variables
return None;
diff --git a/crates/hir-ty/src/builder.rs b/crates/hir-ty/src/builder.rs
index 41acd3555e..52411f94ad 100644
--- a/crates/hir-ty/src/builder.rs
+++ b/crates/hir-ty/src/builder.rs
@@ -14,10 +14,10 @@ use hir_def::{
use smallvec::SmallVec;
use crate::{
- consteval::unknown_const_as_generic, db::HirDatabase, error_lifetime,
- infer::unify::InferenceTable, primitive, to_assoc_type_id, to_chalk_trait_id, utils::generics,
- Binders, BoundVar, CallableSig, GenericArg, GenericArgData, Interner, ProjectionTy,
- Substitution, TraitRef, Ty, TyDefId, TyExt, TyKind,
+ consteval::unknown_const_as_generic, db::HirDatabase, error_lifetime, generics::generics,
+ infer::unify::InferenceTable, primitive, to_assoc_type_id, to_chalk_trait_id, Binders,
+ BoundVar, CallableSig, GenericArg, GenericArgData, Interner, ProjectionTy, Substitution,
+ TraitRef, Ty, TyDefId, TyExt, TyKind,
};
#[derive(Debug, Clone, PartialEq, Eq)]
@@ -246,6 +246,7 @@ impl TyBuilder<()> {
/// - yield type of coroutine ([`Coroutine::Yield`](std::ops::Coroutine::Yield))
/// - return type of coroutine ([`Coroutine::Return`](std::ops::Coroutine::Return))
/// - generic parameters in scope on `parent`
+ ///
/// in this order.
///
/// This method prepopulates the builder with placeholder substitution of `parent`, so you
diff --git a/crates/hir-ty/src/chalk_db.rs b/crates/hir-ty/src/chalk_db.rs
index 84ac8740ec..debae1fe12 100644
--- a/crates/hir-ty/src/chalk_db.rs
+++ b/crates/hir-ty/src/chalk_db.rs
@@ -20,13 +20,14 @@ use hir_expand::name::name;
use crate::{
db::{HirDatabase, InternedCoroutine},
display::HirDisplay,
- from_assoc_type_id, from_chalk_trait_id, from_foreign_def_id, make_binders,
- make_single_type_binders,
+ from_assoc_type_id, from_chalk_trait_id, from_foreign_def_id,
+ generics::generics,
+ make_binders, make_single_type_binders,
mapping::{from_chalk, ToChalk, TypeAliasAsValue},
method_resolution::{TraitImpls, TyFingerprint, ALL_FLOAT_FPS, ALL_INT_FPS},
to_assoc_type_id, to_chalk_trait_id,
traits::ChalkContext,
- utils::{generics, ClosureSubst},
+ utils::ClosureSubst,
wrap_empty_binders, AliasEq, AliasTy, BoundVar, CallableDefId, DebruijnIndex, FnDefId,
Interner, ProjectionTy, ProjectionTyExt, QuantifiedWhereClause, Substitution, TraitRef,
TraitRefExt, Ty, TyBuilder, TyExt, TyKind, WhereClause,
@@ -603,7 +604,6 @@ pub(crate) fn associated_ty_data_query(
// Lower bounds -- we could/should maybe move this to a separate query in `lower`
let type_alias_data = db.type_alias_data(type_alias);
let generic_params = generics(db.upcast(), type_alias.into());
- // let bound_vars = generic_params.bound_vars_subst(DebruijnIndex::INNERMOST);
let resolver = hir_def::resolver::HasResolver::resolver(type_alias, db.upcast());
let ctx = crate::TyLoweringContext::new(db, &resolver, type_alias.into())
.with_type_param_mode(crate::lower::ParamLoweringMode::Variable);
@@ -806,7 +806,7 @@ pub(crate) fn impl_datum_query(
krate: CrateId,
impl_id: ImplId,
) -> Arc<ImplDatum> {
- let _p = tracing::span!(tracing::Level::INFO, "impl_datum_query").entered();
+ let _p = tracing::info_span!("impl_datum_query").entered();
debug!("impl_datum {:?}", impl_id);
let impl_: hir_def::ImplId = from_chalk(db, impl_id);
impl_def_datum(db, krate, impl_id, impl_)
diff --git a/crates/hir-ty/src/chalk_ext.rs b/crates/hir-ty/src/chalk_ext.rs
index d99ef6679e..4279c75651 100644
--- a/crates/hir-ty/src/chalk_ext.rs
+++ b/crates/hir-ty/src/chalk_ext.rs
@@ -12,12 +12,10 @@ use hir_def::{
};
use crate::{
- db::HirDatabase,
- from_assoc_type_id, from_chalk_trait_id, from_foreign_def_id, from_placeholder_idx,
- to_chalk_trait_id,
- utils::{generics, ClosureSubst},
- AdtId, AliasEq, AliasTy, Binders, CallableDefId, CallableSig, Canonical, CanonicalVarKinds,
- ClosureId, DynTy, FnPointer, ImplTraitId, InEnvironment, Interner, Lifetime, ProjectionTy,
+ db::HirDatabase, from_assoc_type_id, from_chalk_trait_id, from_foreign_def_id,
+ from_placeholder_idx, generics::generics, to_chalk_trait_id, utils::ClosureSubst, AdtId,
+ AliasEq, AliasTy, Binders, CallableDefId, CallableSig, Canonical, CanonicalVarKinds, ClosureId,
+ DynTy, FnPointer, ImplTraitId, InEnvironment, Interner, Lifetime, ProjectionTy,
QuantifiedWhereClause, Substitution, TraitRef, Ty, TyBuilder, TyKind, TypeFlags, WhereClause,
};
diff --git a/crates/hir-ty/src/consteval.rs b/crates/hir-ty/src/consteval.rs
index f09277a92e..8b6cde975f 100644
--- a/crates/hir-ty/src/consteval.rs
+++ b/crates/hir-ty/src/consteval.rs
@@ -15,10 +15,9 @@ use stdx::never;
use triomphe::Arc;
use crate::{
- db::HirDatabase, infer::InferenceContext, lower::ParamLoweringMode,
- mir::monomorphize_mir_body_bad, to_placeholder_idx, utils::Generics, Const, ConstData,
- ConstScalar, ConstValue, GenericArg, Interner, MemoryMap, Substitution, TraitEnvironment, Ty,
- TyBuilder,
+ db::HirDatabase, generics::Generics, infer::InferenceContext, lower::ParamLoweringMode,
+ mir::monomorphize_mir_body_bad, to_placeholder_idx, Const, ConstData, ConstScalar, ConstValue,
+ GenericArg, Interner, MemoryMap, Substitution, TraitEnvironment, Ty, TyBuilder,
};
use super::mir::{interpret_mir, lower_to_mir, pad16, MirEvalError, MirLowerError};
@@ -72,12 +71,12 @@ impl From<MirEvalError> for ConstEvalError {
}
}
-pub(crate) fn path_to_const(
+pub(crate) fn path_to_const<'g>(
db: &dyn HirDatabase,
resolver: &Resolver,
path: &Path,
mode: ParamLoweringMode,
- args: impl FnOnce() -> Option<Generics>,
+ args: impl FnOnce() -> Option<&'g Generics>,
debruijn: DebruijnIndex,
expected_ty: Ty,
) -> Option<Const> {
@@ -90,7 +89,7 @@ pub(crate) fn path_to_const(
}
ParamLoweringMode::Variable => {
let args = args();
- match args.as_ref().and_then(|args| args.type_or_const_param_idx(p.into())) {
+ match args.and_then(|args| args.type_or_const_param_idx(p.into())) {
Some(it) => ConstValue::BoundVar(BoundVar::new(debruijn, it)),
None => {
never!(
diff --git a/crates/hir-ty/src/consteval/tests.rs b/crates/hir-ty/src/consteval/tests.rs
index d1ffd5046c..1b4584a18d 100644
--- a/crates/hir-ty/src/consteval/tests.rs
+++ b/crates/hir-ty/src/consteval/tests.rs
@@ -73,7 +73,7 @@ fn check_answer(ra_fixture: &str, check: impl FnOnce(&[u8], &MemoryMap)) {
Ok(t) => t,
Err(e) => {
let err = pretty_print_err(e, db);
- panic!("Error in evaluating goal: {}", err);
+ panic!("Error in evaluating goal: {err}");
}
};
match &r.data(Interner).value {
@@ -81,7 +81,7 @@ fn check_answer(ra_fixture: &str, check: impl FnOnce(&[u8], &MemoryMap)) {
ConstScalar::Bytes(b, mm) => {
check(b, mm);
}
- x => panic!("Expected number but found {:?}", x),
+ x => panic!("Expected number but found {x:?}"),
},
_ => panic!("result of const eval wasn't a concrete const"),
}
@@ -89,7 +89,7 @@ fn check_answer(ra_fixture: &str, check: impl FnOnce(&[u8], &MemoryMap)) {
fn pretty_print_err(e: ConstEvalError, db: TestDB) -> String {
let mut err = String::new();
- let span_formatter = |file, range| format!("{:?} {:?}", file, range);
+ let span_formatter = |file, range| format!("{file:?} {range:?}");
match e {
ConstEvalError::MirLowerError(e) => e.pretty_print(&mut err, &db, span_formatter),
ConstEvalError::MirEvalError(e) => e.pretty_print(&mut err, &db, span_formatter),
diff --git a/crates/hir-ty/src/db.rs b/crates/hir-ty/src/db.rs
index 90bf46b505..e951048021 100644
--- a/crates/hir-ty/src/db.rs
+++ b/crates/hir-ty/src/db.rs
@@ -5,7 +5,7 @@ use std::sync;
use base_db::{
impl_intern_key,
- salsa::{self, impl_intern_value_trivial},
+ salsa::{self, InternValueTrivial},
CrateId, Upcast,
};
use hir_def::{
@@ -21,11 +21,12 @@ use crate::{
chalk_db,
consteval::ConstEvalError,
layout::{Layout, LayoutError},
+ lower::{GenericDefaults, GenericPredicates},
method_resolution::{InherentImpls, TraitImpls, TyFingerprint},
mir::{BorrowckResult, MirBody, MirLowerError},
- Binders, CallableDefId, ClosureId, Const, FnDefId, GenericArg, ImplTraitId, ImplTraits,
- InferenceResult, Interner, PolyFnSig, QuantifiedWhereClause, Substitution, TraitEnvironment,
- TraitRef, Ty, TyDefId, ValueTyDefId,
+ Binders, CallableDefId, ClosureId, Const, FnDefId, ImplTraitId, ImplTraits, InferenceResult,
+ Interner, PolyFnSig, QuantifiedWhereClause, Substitution, TraitEnvironment, TraitRef, Ty,
+ TyDefId, ValueTyDefId,
};
use hir_expand::name::Name;
@@ -147,7 +148,7 @@ pub trait HirDatabase: DefDatabase + Upcast<dyn DefDatabase> {
) -> Arc<[Binders<QuantifiedWhereClause>]>;
#[salsa::invoke(crate::lower::generic_predicates_query)]
- fn generic_predicates(&self, def: GenericDefId) -> Arc<[Binders<QuantifiedWhereClause>]>;
+ fn generic_predicates(&self, def: GenericDefId) -> GenericPredicates;
#[salsa::invoke(crate::lower::trait_environment_for_body_query)]
#[salsa::transparent]
@@ -158,7 +159,7 @@ pub trait HirDatabase: DefDatabase + Upcast<dyn DefDatabase> {
#[salsa::invoke(crate::lower::generic_defaults_query)]
#[salsa::cycle(crate::lower::generic_defaults_recover)]
- fn generic_defaults(&self, def: GenericDefId) -> Arc<[Binders<GenericArg>]>;
+ fn generic_defaults(&self, def: GenericDefId) -> GenericDefaults;
#[salsa::invoke(InherentImpls::inherent_impls_in_crate_query)]
fn inherent_impls_in_crate(&self, krate: CrateId) -> Arc<InherentImpls>;
@@ -298,7 +299,8 @@ impl_intern_key!(InternedClosureId);
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub struct InternedClosure(pub DefWithBodyId, pub ExprId);
-impl_intern_value_trivial!(InternedClosure);
+
+impl InternValueTrivial for InternedClosure {}
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub struct InternedCoroutineId(salsa::InternId);
@@ -306,7 +308,7 @@ impl_intern_key!(InternedCoroutineId);
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub struct InternedCoroutine(pub DefWithBodyId, pub ExprId);
-impl_intern_value_trivial!(InternedCoroutine);
+impl InternValueTrivial for InternedCoroutine {}
/// This exists just for Chalk, because Chalk just has a single `FnDefId` where
/// we have different IDs for struct and enum variant constructors.
diff --git a/crates/hir-ty/src/diagnostics/decl_check.rs b/crates/hir-ty/src/diagnostics/decl_check.rs
index ecbb1d4c60..15ecf9aafc 100644
--- a/crates/hir-ty/src/diagnostics/decl_check.rs
+++ b/crates/hir-ty/src/diagnostics/decl_check.rs
@@ -43,7 +43,7 @@ mod allow {
}
pub fn incorrect_case(db: &dyn HirDatabase, owner: ModuleDefId) -> Vec<IncorrectCase> {
- let _p = tracing::span!(tracing::Level::INFO, "incorrect_case").entered();
+ let _p = tracing::info_span!("incorrect_case").entered();
let mut validator = DeclValidator::new(db);
validator.validate_item(owner);
validator.sink
diff --git a/crates/hir-ty/src/diagnostics/expr.rs b/crates/hir-ty/src/diagnostics/expr.rs
index a5a42c52af..ce3fa53f7a 100644
--- a/crates/hir-ty/src/diagnostics/expr.rs
+++ b/crates/hir-ty/src/diagnostics/expr.rs
@@ -65,8 +65,7 @@ impl BodyValidationDiagnostic {
owner: DefWithBodyId,
validate_lints: bool,
) -> Vec<BodyValidationDiagnostic> {
- let _p =
- tracing::span!(tracing::Level::INFO, "BodyValidationDiagnostic::collect").entered();
+ let _p = tracing::info_span!("BodyValidationDiagnostic::collect").entered();
let infer = db.infer(owner);
let body = db.body(owner);
let mut validator =
diff --git a/crates/hir-ty/src/diagnostics/unsafe_check.rs b/crates/hir-ty/src/diagnostics/unsafe_check.rs
index 081b4d83a8..22aa5c69bb 100644
--- a/crates/hir-ty/src/diagnostics/unsafe_check.rs
+++ b/crates/hir-ty/src/diagnostics/unsafe_check.rs
@@ -13,7 +13,7 @@ use crate::{
};
pub fn missing_unsafe(db: &dyn HirDatabase, def: DefWithBodyId) -> Vec<ExprId> {
- let _p = tracing::span!(tracing::Level::INFO, "missing_unsafe").entered();
+ let _p = tracing::info_span!("missing_unsafe").entered();
let mut res = Vec::new();
let is_unsafe = match def {
diff --git a/crates/hir-ty/src/display.rs b/crates/hir-ty/src/display.rs
index 241690d008..66b5398b88 100644
--- a/crates/hir-ty/src/display.rs
+++ b/crates/hir-ty/src/display.rs
@@ -4,7 +4,7 @@
use std::{
fmt::{self, Debug},
- mem::size_of,
+ mem::{self, size_of},
};
use base_db::CrateId;
@@ -36,12 +36,13 @@ use crate::{
consteval::try_const_usize,
db::{HirDatabase, InternedClosure},
from_assoc_type_id, from_foreign_def_id, from_placeholder_idx,
+ generics::generics,
layout::Layout,
lt_from_placeholder_idx,
mapping::from_chalk,
mir::pad16,
primitive, to_assoc_type_id,
- utils::{self, detect_variant_from_bytes, generics, ClosureSubst},
+ utils::{self, detect_variant_from_bytes, ClosureSubst},
AdtId, AliasEq, AliasTy, Binders, CallableDefId, CallableSig, ConcreteConst, Const,
ConstScalar, ConstValue, DomainGoal, FnAbi, GenericArg, ImplTraitId, Interner, Lifetime,
LifetimeData, LifetimeOutlives, MemoryMap, Mutability, OpaqueTy, ProjectionTy, ProjectionTyExt,
@@ -74,6 +75,8 @@ pub struct HirFormatter<'a> {
/// When rendering something that has a concept of "children" (like fields in a struct), this limits
/// how many should be rendered.
pub entity_limit: Option<usize>,
+ /// When rendering functions, whether to show the constraint from the container
+ show_container_bounds: bool,
omit_verbose_types: bool,
closure_style: ClosureStyle,
display_target: DisplayTarget,
@@ -101,6 +104,7 @@ pub trait HirDisplay {
omit_verbose_types: bool,
display_target: DisplayTarget,
closure_style: ClosureStyle,
+ show_container_bounds: bool,
) -> HirDisplayWrapper<'a, Self>
where
Self: Sized,
@@ -117,6 +121,7 @@ pub trait HirDisplay {
omit_verbose_types,
display_target,
closure_style,
+ show_container_bounds,
}
}
@@ -134,6 +139,7 @@ pub trait HirDisplay {
omit_verbose_types: false,
closure_style: ClosureStyle::ImplFn,
display_target: DisplayTarget::Diagnostics,
+ show_container_bounds: false,
}
}
@@ -155,6 +161,7 @@ pub trait HirDisplay {
omit_verbose_types: true,
closure_style: ClosureStyle::ImplFn,
display_target: DisplayTarget::Diagnostics,
+ show_container_bounds: false,
}
}
@@ -176,6 +183,7 @@ pub trait HirDisplay {
omit_verbose_types: true,
closure_style: ClosureStyle::ImplFn,
display_target: DisplayTarget::Diagnostics,
+ show_container_bounds: false,
}
}
@@ -198,6 +206,7 @@ pub trait HirDisplay {
omit_verbose_types: false,
closure_style: ClosureStyle::ImplFn,
display_target: DisplayTarget::SourceCode { module_id, allow_opaque },
+ show_container_bounds: false,
}) {
Ok(()) => {}
Err(HirDisplayError::FmtError) => panic!("Writing to String can't fail!"),
@@ -219,6 +228,29 @@ pub trait HirDisplay {
omit_verbose_types: false,
closure_style: ClosureStyle::ImplFn,
display_target: DisplayTarget::Test,
+ show_container_bounds: false,
+ }
+ }
+
+ /// Returns a String representation of `self` that shows the constraint from
+ /// the container for functions
+ fn display_with_container_bounds<'a>(
+ &'a self,
+ db: &'a dyn HirDatabase,
+ show_container_bounds: bool,
+ ) -> HirDisplayWrapper<'a, Self>
+ where
+ Self: Sized,
+ {
+ HirDisplayWrapper {
+ db,
+ t: self,
+ max_size: None,
+ limited_size: None,
+ omit_verbose_types: false,
+ closure_style: ClosureStyle::ImplFn,
+ display_target: DisplayTarget::Diagnostics,
+ show_container_bounds,
}
}
}
@@ -277,6 +309,10 @@ impl HirFormatter<'_> {
pub fn omit_verbose_types(&self) -> bool {
self.omit_verbose_types
}
+
+ pub fn show_container_bounds(&self) -> bool {
+ self.show_container_bounds
+ }
}
#[derive(Clone, Copy)]
@@ -336,6 +372,7 @@ pub struct HirDisplayWrapper<'a, T> {
omit_verbose_types: bool,
closure_style: ClosureStyle,
display_target: DisplayTarget,
+ show_container_bounds: bool,
}
#[derive(Debug, PartialEq, Eq, Clone, Copy)]
@@ -365,6 +402,7 @@ impl<T: HirDisplay> HirDisplayWrapper<'_, T> {
omit_verbose_types: self.omit_verbose_types,
display_target: self.display_target,
closure_style: self.closure_style,
+ show_container_bounds: self.show_container_bounds,
})
}
@@ -423,7 +461,7 @@ impl HirDisplay for ProjectionTy {
let proj_params_count =
self.substitution.len(Interner) - trait_ref.substitution.len(Interner);
let proj_params = &self.substitution.as_slice(Interner)[..proj_params_count];
- hir_fmt_generics(f, proj_params, None)
+ hir_fmt_generics(f, proj_params, None, None)
}
}
@@ -456,7 +494,7 @@ impl HirDisplay for Const {
ConstValue::Placeholder(idx) => {
let id = from_placeholder_idx(f.db, *idx);
let generics = generics(f.db.upcast(), id.parent);
- let param_data = &generics.params[id.local_id];
+ let param_data = &generics[id.local_id];
write!(f, "{}", param_data.name().unwrap().display(f.db.upcast()))?;
Ok(())
}
@@ -468,6 +506,7 @@ impl HirDisplay for Const {
f,
parameters.as_slice(Interner),
c.generic_def(f.db.upcast()),
+ None,
)?;
Ok(())
}
@@ -670,7 +709,7 @@ fn render_const_scalar(
TyKind::FnDef(..) => ty.hir_fmt(f),
TyKind::Function(_) | TyKind::Raw(_, _) => {
let it = u128::from_le_bytes(pad16(b, false));
- write!(f, "{:#X} as ", it)?;
+ write!(f, "{it:#X} as ")?;
ty.hir_fmt(f)
}
TyKind::Array(ty, len) => {
@@ -950,7 +989,7 @@ impl HirDisplay for Ty {
if parameters.len(Interner) > 0 {
let generics = generics(db.upcast(), def.into());
- let (parent_len, self_, type_, const_, impl_, lifetime) =
+ let (parent_len, self_param, type_, const_, impl_, lifetime) =
generics.provenance_split();
let parameters = parameters.as_slice(Interner);
// We print all params except implicit impl Trait params. Still a bit weird; should we leave out parent and self?
@@ -958,7 +997,7 @@ impl HirDisplay for Ty {
// `parameters` are in the order of fn's params (including impl traits), fn's lifetimes
// parent's params (those from enclosing impl or trait, if any).
let (fn_params, other) =
- parameters.split_at(self_ + type_ + const_ + lifetime);
+ parameters.split_at(self_param as usize + type_ + const_ + lifetime);
let (_impl, parent_params) = other.split_at(impl_);
debug_assert_eq!(parent_params.len(), parent_len);
@@ -967,11 +1006,11 @@ impl HirDisplay for Ty {
let fn_params = generic_args_sans_defaults(f, Some(def.into()), fn_params);
write!(f, "<")?;
- hir_fmt_generic_arguments(f, parent_params)?;
+ hir_fmt_generic_arguments(f, parent_params, None)?;
if !parent_params.is_empty() && !fn_params.is_empty() {
write!(f, ", ")?;
}
- hir_fmt_generic_arguments(f, fn_params)?;
+ hir_fmt_generic_arguments(f, fn_params, None)?;
write!(f, ">")?;
}
}
@@ -1016,7 +1055,7 @@ impl HirDisplay for Ty {
let generic_def = self.as_generic_def(db);
- hir_fmt_generics(f, parameters.as_slice(Interner), generic_def)?;
+ hir_fmt_generics(f, parameters.as_slice(Interner), generic_def, None)?;
}
TyKind::AssociatedType(assoc_type_id, parameters) => {
let type_alias = from_assoc_type_id(*assoc_type_id);
@@ -1039,7 +1078,7 @@ impl HirDisplay for Ty {
f.end_location_link();
// Note that the generic args for the associated type come before those for the
// trait (including the self type).
- hir_fmt_generics(f, parameters.as_slice(Interner), None)
+ hir_fmt_generics(f, parameters.as_slice(Interner), None, None)
} else {
let projection_ty = ProjectionTy {
associated_ty_id: to_assoc_type_id(type_alias),
@@ -1141,7 +1180,7 @@ impl HirDisplay for Ty {
}
ClosureStyle::ClosureWithSubst => {
write!(f, "{{closure#{:?}}}", id.0.as_u32())?;
- return hir_fmt_generics(f, substs.as_slice(Interner), None);
+ return hir_fmt_generics(f, substs.as_slice(Interner), None, None);
}
_ => (),
}
@@ -1177,7 +1216,7 @@ impl HirDisplay for Ty {
TyKind::Placeholder(idx) => {
let id = from_placeholder_idx(db, *idx);
let generics = generics(db.upcast(), id.parent);
- let param_data = &generics.params[id.local_id];
+ let param_data = &generics[id.local_id];
match param_data {
TypeOrConstParamData::TypeParamData(p) => match p.provenance {
TypeParamProvenance::TypeParamList | TypeParamProvenance::TraitSelf => {
@@ -1329,6 +1368,7 @@ fn hir_fmt_generics(
f: &mut HirFormatter<'_>,
parameters: &[GenericArg],
generic_def: Option<hir_def::GenericDefId>,
+ self_: Option<&Ty>,
) -> Result<(), HirDisplayError> {
if parameters.is_empty() {
return Ok(());
@@ -1348,7 +1388,7 @@ fn hir_fmt_generics(
});
if !parameters_to_write.is_empty() && !only_err_lifetimes {
write!(f, "<")?;
- hir_fmt_generic_arguments(f, parameters_to_write)?;
+ hir_fmt_generic_arguments(f, parameters_to_write, self_)?;
write!(f, ">")?;
}
@@ -1411,6 +1451,7 @@ fn generic_args_sans_defaults<'ga>(
fn hir_fmt_generic_arguments(
f: &mut HirFormatter<'_>,
parameters: &[GenericArg],
+ self_: Option<&Ty>,
) -> Result<(), HirDisplayError> {
let mut first = true;
let lifetime_offset = parameters.iter().position(|arg| arg.lifetime(Interner).is_some());
@@ -1432,11 +1473,13 @@ fn hir_fmt_generic_arguments(
continue;
}
- if !first {
+ if !mem::take(&mut first) {
write!(f, ", ")?;
}
- first = false;
- generic_arg.hir_fmt(f)?;
+ match self_ {
+ self_ @ Some(_) if generic_arg.ty(Interner) == self_ => write!(f, "Self")?,
+ _ => generic_arg.hir_fmt(f)?,
+ }
}
Ok(())
}
@@ -1559,12 +1602,16 @@ fn write_bounds_like_dyn_trait(
write!(f, "{}", f.db.trait_data(trait_).name.display(f.db.upcast()))?;
f.end_location_link();
if is_fn_trait {
- if let [_self, params @ ..] = trait_ref.substitution.as_slice(Interner) {
+ if let [self_, params @ ..] = trait_ref.substitution.as_slice(Interner) {
if let Some(args) =
params.first().and_then(|it| it.assert_ty_ref(Interner).as_tuple())
{
write!(f, "(")?;
- hir_fmt_generic_arguments(f, args.as_slice(Interner))?;
+ hir_fmt_generic_arguments(
+ f,
+ args.as_slice(Interner),
+ self_.ty(Interner),
+ )?;
write!(f, ")")?;
}
}
@@ -1574,10 +1621,10 @@ fn write_bounds_like_dyn_trait(
Some(trait_.into()),
trait_ref.substitution.as_slice(Interner),
);
- if let [_self, params @ ..] = params {
+ if let [self_, params @ ..] = params {
if !params.is_empty() {
write!(f, "<")?;
- hir_fmt_generic_arguments(f, params)?;
+ hir_fmt_generic_arguments(f, params, self_.ty(Interner))?;
// there might be assoc type bindings, so we leave the angle brackets open
angle_open = true;
}
@@ -1635,6 +1682,7 @@ fn write_bounds_like_dyn_trait(
hir_fmt_generic_arguments(
f,
&proj.substitution.as_slice(Interner)[..proj_arg_count],
+ None,
)?;
write!(f, ">")?;
}
@@ -1691,7 +1739,8 @@ fn fmt_trait_ref(
f.start_location_link(trait_.into());
write!(f, "{}", f.db.trait_data(trait_).name.display(f.db.upcast()))?;
f.end_location_link();
- hir_fmt_generics(f, &tr.substitution.as_slice(Interner)[1..], None)
+ let substs = tr.substitution.as_slice(Interner);
+ hir_fmt_generics(f, &substs[1..], None, substs[0].ty(Interner))
}
impl HirDisplay for TraitRef {
@@ -1749,7 +1798,7 @@ impl HirDisplay for LifetimeData {
LifetimeData::Placeholder(idx) => {
let id = lt_from_placeholder_idx(f.db, *idx);
let generics = generics(f.db.upcast(), id.parent);
- let param_data = &generics.params[id.local_id];
+ let param_data = &generics[id.local_id];
write!(f, "{}", param_data.name.display(f.db.upcast()))?;
Ok(())
}
@@ -1943,7 +1992,7 @@ impl HirDisplay for Path {
(_, PathKind::Plain) => {}
(_, PathKind::Abs) => {}
(_, PathKind::Crate) => write!(f, "crate")?,
- (_, PathKind::Super(0)) => write!(f, "self")?,
+ (_, &PathKind::SELF) => write!(f, "self")?,
(_, PathKind::Super(n)) => {
for i in 0..*n {
if i > 0 {
diff --git a/crates/hir-ty/src/generics.rs b/crates/hir-ty/src/generics.rs
new file mode 100644
index 0000000000..ea10e6881e
--- /dev/null
+++ b/crates/hir-ty/src/generics.rs
@@ -0,0 +1,263 @@
+//! Utilities for working with generics.
+//!
+//! The layout for generics as expected by chalk are as follows:
+//! - Optional Self parameter
+//! - Type or Const parameters
+//! - Lifetime parameters
+//! - Parent parameters
+//!
+//! where parent follows the same scheme.
+use std::ops;
+
+use chalk_ir::{cast::Cast as _, BoundVar, DebruijnIndex};
+use hir_def::{
+ db::DefDatabase,
+ generics::{
+ GenericParamDataRef, GenericParams, LifetimeParamData, TypeOrConstParamData,
+ TypeParamProvenance,
+ },
+ ConstParamId, GenericDefId, GenericParamId, ItemContainerId, LifetimeParamId,
+ LocalLifetimeParamId, LocalTypeOrConstParamId, Lookup, TypeOrConstParamId, TypeParamId,
+};
+use intern::Interned;
+
+use crate::{db::HirDatabase, lt_to_placeholder_idx, to_placeholder_idx, Interner, Substitution};
+
+pub(crate) fn generics(db: &dyn DefDatabase, def: GenericDefId) -> Generics {
+ let parent_generics = parent_generic_def(db, def).map(|def| Box::new(generics(db, def)));
+ Generics { def, params: db.generic_params(def), parent_generics }
+}
+#[derive(Clone, Debug)]
+pub(crate) struct Generics {
+ def: GenericDefId,
+ params: Interned<GenericParams>,
+ parent_generics: Option<Box<Generics>>,
+}
+
+impl<T> ops::Index<T> for Generics
+where
+ GenericParams: ops::Index<T>,
+{
+ type Output = <GenericParams as ops::Index<T>>::Output;
+ fn index(&self, index: T) -> &Self::Output {
+ &self.params[index]
+ }
+}
+
+impl Generics {
+ pub(crate) fn def(&self) -> GenericDefId {
+ self.def
+ }
+
+ pub(crate) fn iter_id(&self) -> impl Iterator<Item = GenericParamId> + '_ {
+ self.iter_self_id().chain(self.iter_parent_id())
+ }
+
+ pub(crate) fn iter_self_id(&self) -> impl Iterator<Item = GenericParamId> + '_ {
+ self.iter_self().map(|(id, _)| id)
+ }
+
+ fn iter_parent_id(&self) -> impl Iterator<Item = GenericParamId> + '_ {
+ self.iter_parent().map(|(id, _)| id)
+ }
+
+ pub(crate) fn iter_self_type_or_consts(
+ &self,
+ ) -> impl DoubleEndedIterator<Item = (LocalTypeOrConstParamId, &TypeOrConstParamData)> {
+ self.params.iter_type_or_consts()
+ }
+
+ /// Iterate over the params followed by the parent params.
+ pub(crate) fn iter(
+ &self,
+ ) -> impl DoubleEndedIterator<Item = (GenericParamId, GenericParamDataRef<'_>)> + '_ {
+ self.iter_self().chain(self.iter_parent())
+ }
+
+ /// Iterate over the params without parent params.
+ pub(crate) fn iter_self(
+ &self,
+ ) -> impl DoubleEndedIterator<Item = (GenericParamId, GenericParamDataRef<'_>)> + '_ {
+ self.params
+ .iter_type_or_consts()
+ .map(from_toc_id(self))
+ .chain(self.params.iter_lt().map(from_lt_id(self)))
+ }
+
+ /// Iterator over types and const params of parent.
+ fn iter_parent(
+ &self,
+ ) -> impl DoubleEndedIterator<Item = (GenericParamId, GenericParamDataRef<'_>)> + '_ {
+ self.parent_generics().into_iter().flat_map(|it| {
+ let lt_iter = it.params.iter_lt().map(from_lt_id(it));
+ it.params.iter_type_or_consts().map(from_toc_id(it)).chain(lt_iter)
+ })
+ }
+
+ /// Returns total number of generic parameters in scope, including those from parent.
+ pub(crate) fn len(&self) -> usize {
+ let parent = self.parent_generics().map_or(0, Generics::len);
+ let child = self.params.len();
+ parent + child
+ }
+
+ /// Returns numbers of generic parameters excluding those from parent.
+ pub(crate) fn len_self(&self) -> usize {
+ self.params.len()
+ }
+
+ /// (parent total, self param, type params, const params, impl trait list, lifetimes)
+ pub(crate) fn provenance_split(&self) -> (usize, bool, usize, usize, usize, usize) {
+ let mut self_param = false;
+ let mut type_params = 0;
+ let mut impl_trait_params = 0;
+ let mut const_params = 0;
+ self.params.iter_type_or_consts().for_each(|(_, data)| match data {
+ TypeOrConstParamData::TypeParamData(p) => match p.provenance {
+ TypeParamProvenance::TypeParamList => type_params += 1,
+ TypeParamProvenance::TraitSelf => self_param |= true,
+ TypeParamProvenance::ArgumentImplTrait => impl_trait_params += 1,
+ },
+ TypeOrConstParamData::ConstParamData(_) => const_params += 1,
+ });
+
+ let lifetime_params = self.params.iter_lt().count();
+
+ let parent_len = self.parent_generics().map_or(0, Generics::len);
+ (parent_len, self_param, type_params, const_params, impl_trait_params, lifetime_params)
+ }
+
+ pub(crate) fn type_or_const_param_idx(&self, param: TypeOrConstParamId) -> Option<usize> {
+ self.find_type_or_const_param(param)
+ }
+
+ fn find_type_or_const_param(&self, param: TypeOrConstParamId) -> Option<usize> {
+ if param.parent == self.def {
+ let idx = param.local_id.into_raw().into_u32() as usize;
+ debug_assert!(idx <= self.params.type_or_consts.len());
+ Some(idx)
+ } else {
+ debug_assert_eq!(self.parent_generics().map(|it| it.def), Some(param.parent));
+ self.parent_generics()
+ .and_then(|g| g.find_type_or_const_param(param))
+ // Remember that parent parameters come after parameters for self.
+ .map(|idx| self.len_self() + idx)
+ }
+ }
+
+ pub(crate) fn lifetime_idx(&self, lifetime: LifetimeParamId) -> Option<usize> {
+ self.find_lifetime(lifetime)
+ }
+
+ fn find_lifetime(&self, lifetime: LifetimeParamId) -> Option<usize> {
+ if lifetime.parent == self.def {
+ let idx = lifetime.local_id.into_raw().into_u32() as usize;
+ debug_assert!(idx <= self.params.lifetimes.len());
+ Some(self.params.type_or_consts.len() + idx)
+ } else {
+ debug_assert_eq!(self.parent_generics().map(|it| it.def), Some(lifetime.parent));
+ self.parent_generics()
+ .and_then(|g| g.find_lifetime(lifetime))
+ .map(|idx| self.len_self() + idx)
+ }
+ }
+
+ pub(crate) fn parent_generics(&self) -> Option<&Generics> {
+ self.parent_generics.as_deref()
+ }
+
+ pub(crate) fn parent_or_self(&self) -> &Generics {
+ self.parent_generics.as_deref().unwrap_or(self)
+ }
+
+ /// Returns a Substitution that replaces each parameter by a bound variable.
+ pub(crate) fn bound_vars_subst(
+ &self,
+ db: &dyn HirDatabase,
+ debruijn: DebruijnIndex,
+ ) -> Substitution {
+ Substitution::from_iter(
+ Interner,
+ self.iter_id().enumerate().map(|(idx, id)| match id {
+ GenericParamId::ConstParamId(id) => BoundVar::new(debruijn, idx)
+ .to_const(Interner, db.const_param_ty(id))
+ .cast(Interner),
+ GenericParamId::TypeParamId(_) => {
+ BoundVar::new(debruijn, idx).to_ty(Interner).cast(Interner)
+ }
+ GenericParamId::LifetimeParamId(_) => {
+ BoundVar::new(debruijn, idx).to_lifetime(Interner).cast(Interner)
+ }
+ }),
+ )
+ }
+
+ /// Returns a Substitution that replaces each parameter by itself (i.e. `Ty::Param`).
+ pub(crate) fn placeholder_subst(&self, db: &dyn HirDatabase) -> Substitution {
+ Substitution::from_iter(
+ Interner,
+ self.iter_id().map(|id| match id {
+ GenericParamId::TypeParamId(id) => {
+ to_placeholder_idx(db, id.into()).to_ty(Interner).cast(Interner)
+ }
+ GenericParamId::ConstParamId(id) => to_placeholder_idx(db, id.into())
+ .to_const(Interner, db.const_param_ty(id))
+ .cast(Interner),
+ GenericParamId::LifetimeParamId(id) => {
+ lt_to_placeholder_idx(db, id).to_lifetime(Interner).cast(Interner)
+ }
+ }),
+ )
+ }
+}
+
+fn parent_generic_def(db: &dyn DefDatabase, def: GenericDefId) -> Option<GenericDefId> {
+ let container = match def {
+ GenericDefId::FunctionId(it) => it.lookup(db).container,
+ GenericDefId::TypeAliasId(it) => it.lookup(db).container,
+ GenericDefId::ConstId(it) => it.lookup(db).container,
+ GenericDefId::EnumVariantId(it) => return Some(it.lookup(db).parent.into()),
+ GenericDefId::AdtId(_)
+ | GenericDefId::TraitId(_)
+ | GenericDefId::ImplId(_)
+ | GenericDefId::TraitAliasId(_) => return None,
+ };
+
+ match container {
+ ItemContainerId::ImplId(it) => Some(it.into()),
+ ItemContainerId::TraitId(it) => Some(it.into()),
+ ItemContainerId::ModuleId(_) | ItemContainerId::ExternBlockId(_) => None,
+ }
+}
+
+fn from_toc_id<'a>(
+ it: &'a Generics,
+) -> impl Fn(
+ (LocalTypeOrConstParamId, &'a TypeOrConstParamData),
+) -> (GenericParamId, GenericParamDataRef<'a>) {
+ move |(local_id, p): (_, _)| {
+ let id = TypeOrConstParamId { parent: it.def, local_id };
+ match p {
+ TypeOrConstParamData::TypeParamData(p) => (
+ GenericParamId::TypeParamId(TypeParamId::from_unchecked(id)),
+ GenericParamDataRef::TypeParamData(p),
+ ),
+ TypeOrConstParamData::ConstParamData(p) => (
+ GenericParamId::ConstParamId(ConstParamId::from_unchecked(id)),
+ GenericParamDataRef::ConstParamData(p),
+ ),
+ }
+ }
+}
+
+fn from_lt_id<'a>(
+ it: &'a Generics,
+) -> impl Fn((LocalLifetimeParamId, &'a LifetimeParamData)) -> (GenericParamId, GenericParamDataRef<'a>)
+{
+ move |(local_id, p): (_, _)| {
+ (
+ GenericParamId::LifetimeParamId(LifetimeParamId { parent: it.def, local_id }),
+ GenericParamDataRef::LifetimeParamData(p),
+ )
+ }
+}
diff --git a/crates/hir-ty/src/infer.rs b/crates/hir-ty/src/infer.rs
index 6f2f70dd40..96431ba4ce 100644
--- a/crates/hir-ty/src/infer.rs
+++ b/crates/hir-ty/src/infer.rs
@@ -49,6 +49,7 @@ use hir_def::{
use hir_expand::name::{name, Name};
use indexmap::IndexSet;
use la_arena::{ArenaMap, Entry};
+use once_cell::unsync::OnceCell;
use rustc_hash::{FxHashMap, FxHashSet};
use stdx::{always, never};
use triomphe::Arc;
@@ -56,14 +57,15 @@ use triomphe::Arc;
use crate::{
db::HirDatabase,
error_lifetime, fold_tys,
+ generics::Generics,
infer::{coerce::CoerceMany, unify::InferenceTable},
lower::ImplTraitLoweringMode,
to_assoc_type_id,
traits::FnTrait,
- utils::{Generics, InTypeConstIdMetadata, UnevaluatedConstEvaluatorFolder},
+ utils::{InTypeConstIdMetadata, UnevaluatedConstEvaluatorFolder},
AliasEq, AliasTy, Binders, ClosureId, Const, DomainGoal, GenericArg, Goal, ImplTraitId,
- ImplTraitIdx, InEnvironment, Interner, Lifetime, OpaqueTyId, ProjectionTy, Substitution,
- TraitEnvironment, Ty, TyBuilder, TyExt,
+ ImplTraitIdx, InEnvironment, Interner, Lifetime, OpaqueTyId, ParamLoweringMode, ProjectionTy,
+ Substitution, TraitEnvironment, Ty, TyBuilder, TyExt,
};
// This lint has a false positive here. See the link below for details.
@@ -79,7 +81,7 @@ pub(crate) use closure::{CaptureKind, CapturedItem, CapturedItemWithoutTy};
/// The entry point of type inference.
pub(crate) fn infer_query(db: &dyn HirDatabase, def: DefWithBodyId) -> Arc<InferenceResult> {
- let _p = tracing::span!(tracing::Level::INFO, "infer_query").entered();
+ let _p = tracing::info_span!("infer_query").entered();
let resolver = def.resolver(db.upcast());
let body = db.body(def);
let mut ctx = InferenceContext::new(db, def, &body, resolver);
@@ -526,6 +528,7 @@ pub(crate) struct InferenceContext<'a> {
pub(crate) owner: DefWithBodyId,
pub(crate) body: &'a Body,
pub(crate) resolver: Resolver,
+ generics: OnceCell<Option<Generics>>,
table: unify::InferenceTable<'a>,
/// The traits in scope, disregarding block modules. This is used for caching purposes.
traits_in_scope: FxHashSet<TraitId>,
@@ -611,6 +614,7 @@ impl<'a> InferenceContext<'a> {
) -> Self {
let trait_env = db.trait_environment_for_body(owner);
InferenceContext {
+ generics: OnceCell::new(),
result: InferenceResult::default(),
table: unify::InferenceTable::new(db, trait_env),
tuple_field_accesses_rev: Default::default(),
@@ -632,8 +636,14 @@ impl<'a> InferenceContext<'a> {
}
}
- pub(crate) fn generics(&self) -> Option<Generics> {
- Some(crate::utils::generics(self.db.upcast(), self.resolver.generic_def()?))
+ pub(crate) fn generics(&self) -> Option<&Generics> {
+ self.generics
+ .get_or_init(|| {
+ self.resolver
+ .generic_def()
+ .map(|def| crate::generics::generics(self.db.upcast(), def))
+ })
+ .as_ref()
}
// FIXME: This function should be private in module. It is currently only used in the consteval, since we need
@@ -781,7 +791,8 @@ impl<'a> InferenceContext<'a> {
fn collect_fn(&mut self, func: FunctionId) {
let data = self.db.function_data(func);
- let ctx = crate::lower::TyLoweringContext::new(self.db, &self.resolver, func.into())
+ let ctx = crate::lower::TyLoweringContext::new(self.db, &self.resolver, self.owner.into())
+ .with_type_param_mode(ParamLoweringMode::Placeholder)
.with_impl_trait_mode(ImplTraitLoweringMode::Param);
let mut param_tys =
data.params.iter().map(|type_ref| ctx.lower_ty(type_ref)).collect::<Vec<_>>();
@@ -816,6 +827,7 @@ impl<'a> InferenceContext<'a> {
let return_ty = &*data.ret_type;
let ctx = crate::lower::TyLoweringContext::new(self.db, &self.resolver, self.owner.into())
+ .with_type_param_mode(ParamLoweringMode::Placeholder)
.with_impl_trait_mode(ImplTraitLoweringMode::Opaque);
let return_ty = ctx.lower_ty(return_ty);
let return_ty = self.insert_type_vars(return_ty);
@@ -1263,7 +1275,7 @@ impl<'a> InferenceContext<'a> {
forbid_unresolved_segments((ty, Some(var.into())), unresolved)
}
TypeNs::SelfType(impl_id) => {
- let generics = crate::utils::generics(self.db.upcast(), impl_id.into());
+ let generics = crate::generics::generics(self.db.upcast(), impl_id.into());
let substs = generics.placeholder_subst(self.db);
let mut ty = self.db.impl_self_ty(impl_id).substitute(Interner, &substs);
diff --git a/crates/hir-ty/src/infer/closure.rs b/crates/hir-ty/src/infer/closure.rs
index a25498eff3..b7c7b66545 100644
--- a/crates/hir-ty/src/infer/closure.rs
+++ b/crates/hir-ty/src/infer/closure.rs
@@ -22,11 +22,13 @@ use stdx::never;
use crate::{
db::{HirDatabase, InternedClosure},
- error_lifetime, from_chalk_trait_id, from_placeholder_idx, make_binders,
+ error_lifetime, from_chalk_trait_id, from_placeholder_idx,
+ generics::Generics,
+ make_binders,
mir::{BorrowKind, MirSpan, MutBorrowKind, ProjectionElem},
to_chalk_trait_id,
traits::FnTrait,
- utils::{self, elaborate_clause_supertraits, Generics},
+ utils::{self, elaborate_clause_supertraits},
Adjust, Adjustment, AliasEq, AliasTy, Binders, BindingMode, ChalkTraitId, ClosureId, DynTy,
DynTyExt, FnAbi, FnPointer, FnSig, Interner, OpaqueTy, ProjectionTyExt, Substitution, Ty,
TyExt, WhereClause,
@@ -337,7 +339,7 @@ impl CapturedItemWithoutTy {
fn replace_placeholder_with_binder(ctx: &mut InferenceContext<'_>, ty: Ty) -> Binders<Ty> {
struct Filler<'a> {
db: &'a dyn HirDatabase,
- generics: Generics,
+ generics: &'a Generics,
}
impl FallibleTypeFolder<Interner> for Filler<'_> {
type Error = ();
@@ -380,7 +382,7 @@ impl CapturedItemWithoutTy {
};
let filler = &mut Filler { db: ctx.db, generics };
let result = ty.clone().try_fold_with(filler, DebruijnIndex::INNERMOST).unwrap_or(ty);
- make_binders(ctx.db, &filler.generics, result)
+ make_binders(ctx.db, filler.generics, result)
}
}
}
diff --git a/crates/hir-ty/src/infer/expr.rs b/crates/hir-ty/src/infer/expr.rs
index 4c12786362..95f28531ac 100644
--- a/crates/hir-ty/src/infer/expr.rs
+++ b/crates/hir-ty/src/infer/expr.rs
@@ -24,6 +24,7 @@ use crate::{
consteval,
db::{InternedClosure, InternedCoroutine},
error_lifetime,
+ generics::{generics, Generics},
infer::{
coerce::{CoerceMany, CoercionCause},
find_continuable,
@@ -39,7 +40,6 @@ use crate::{
primitive::{self, UintTy},
static_lifetime, to_chalk_trait_id,
traits::FnTrait,
- utils::{generics, Generics},
Adjust, Adjustment, AdtId, AutoBorrow, Binders, CallableDefId, FnAbi, FnPointer, FnSig,
FnSubst, Interner, Rawness, Scalar, Substitution, TraitEnvironment, TraitRef, Ty, TyBuilder,
TyExt, TyKind,
@@ -1830,13 +1830,13 @@ impl InferenceContext<'_> {
) -> Substitution {
let (
parent_params,
- self_params,
+ has_self_param,
type_params,
const_params,
impl_trait_params,
lifetime_params,
) = def_generics.provenance_split();
- assert_eq!(self_params, 0); // method shouldn't have another Self param
+ assert!(!has_self_param); // method shouldn't have another Self param
let total_len =
parent_params + type_params + const_params + impl_trait_params + lifetime_params;
let mut substs = Vec::with_capacity(total_len);
@@ -1844,13 +1844,11 @@ impl InferenceContext<'_> {
// handle provided arguments
if let Some(generic_args) = generic_args {
// if args are provided, it should be all of them, but we can't rely on that
- for (arg, kind_id) in generic_args
- .args
- .iter()
- .take(type_params + const_params + lifetime_params)
- .zip(def_generics.iter_id())
+ let self_params = type_params + const_params + lifetime_params;
+ for (arg, kind_id) in
+ generic_args.args.iter().zip(def_generics.iter_self_id()).take(self_params)
{
- if let Some(g) = generic_arg_to_chalk(
+ let arg = generic_arg_to_chalk(
self.db,
kind_id,
arg,
@@ -1869,9 +1867,8 @@ impl InferenceContext<'_> {
)
},
|this, lt_ref| this.make_lifetime(lt_ref),
- ) {
- substs.push(g);
- }
+ );
+ substs.push(arg);
}
};
diff --git a/crates/hir-ty/src/infer/path.rs b/crates/hir-ty/src/infer/path.rs
index 9a1835b625..d876008cd5 100644
--- a/crates/hir-ty/src/infer/path.rs
+++ b/crates/hir-ty/src/infer/path.rs
@@ -12,11 +12,10 @@ use stdx::never;
use crate::{
builder::ParamKind,
consteval, error_lifetime,
+ generics::generics,
method_resolution::{self, VisibleFromModule},
- to_chalk_trait_id,
- utils::generics,
- InferenceDiagnostic, Interner, Substitution, TraitRef, TraitRefExt, Ty, TyBuilder, TyExt,
- TyKind, ValueTyDefId,
+ to_chalk_trait_id, InferenceDiagnostic, Interner, Substitution, TraitRef, TraitRefExt, Ty,
+ TyBuilder, TyExt, TyKind, ValueTyDefId,
};
use super::{ExprOrPatId, InferenceContext};
@@ -64,7 +63,7 @@ impl InferenceContext<'_> {
it.into()
}
ValueNs::ImplSelf(impl_id) => {
- let generics = crate::utils::generics(self.db.upcast(), impl_id.into());
+ let generics = crate::generics::generics(self.db.upcast(), impl_id.into());
let substs = generics.placeholder_subst(self.db);
let ty = self.db.impl_self_ty(impl_id).substitute(Interner, &substs);
if let Some((AdtId::StructId(struct_id), substs)) = ty.as_adt() {
diff --git a/crates/hir-ty/src/infer/unify.rs b/crates/hir-ty/src/infer/unify.rs
index 36e3a45889..ed4d55d203 100644
--- a/crates/hir-ty/src/infer/unify.rs
+++ b/crates/hir-ty/src/infer/unify.rs
@@ -613,8 +613,7 @@ impl<'a> InferenceTable<'a> {
}
pub(crate) fn resolve_obligations_as_possible(&mut self) {
- let _span =
- tracing::span!(tracing::Level::INFO, "resolve_obligations_as_possible").entered();
+ let _span = tracing::info_span!("resolve_obligations_as_possible").entered();
let mut changed = true;
let mut obligations = mem::take(&mut self.resolve_obligations_buffer);
while mem::take(&mut changed) {
diff --git a/crates/hir-ty/src/inhabitedness.rs b/crates/hir-ty/src/inhabitedness.rs
index 7546369d8d..f5fb2ffd78 100644
--- a/crates/hir-ty/src/inhabitedness.rs
+++ b/crates/hir-ty/src/inhabitedness.rs
@@ -15,7 +15,7 @@ use crate::{
// FIXME: Turn this into a query, it can be quite slow
/// Checks whether a type is visibly uninhabited from a particular module.
pub(crate) fn is_ty_uninhabited_from(db: &dyn HirDatabase, ty: &Ty, target_mod: ModuleId) -> bool {
- let _p = tracing::span!(tracing::Level::INFO, "is_ty_uninhabited_from", ?ty).entered();
+ let _p = tracing::info_span!("is_ty_uninhabited_from", ?ty).entered();
let mut uninhabited_from =
UninhabitedFrom { target_mod, db, max_depth: 500, recursive_ty: FxHashSet::default() };
let inhabitedness = ty.visit_with(&mut uninhabited_from, DebruijnIndex::INNERMOST);
@@ -30,7 +30,7 @@ pub(crate) fn is_enum_variant_uninhabited_from(
subst: &Substitution,
target_mod: ModuleId,
) -> bool {
- let _p = tracing::span!(tracing::Level::INFO, "is_enum_variant_uninhabited_from").entered();
+ let _p = tracing::info_span!("is_enum_variant_uninhabited_from").entered();
let mut uninhabited_from =
UninhabitedFrom { target_mod, db, max_depth: 500, recursive_ty: FxHashSet::default() };
diff --git a/crates/hir-ty/src/lib.rs b/crates/hir-ty/src/lib.rs
index 26a839f0e9..5e33e1285e 100644
--- a/crates/hir-ty/src/lib.rs
+++ b/crates/hir-ty/src/lib.rs
@@ -22,6 +22,7 @@ extern crate ra_ap_rustc_pattern_analysis as rustc_pattern_analysis;
mod builder;
mod chalk_db;
mod chalk_ext;
+mod generics;
mod infer;
mod inhabitedness;
mod interner;
@@ -52,7 +53,7 @@ use std::{
hash::{BuildHasherDefault, Hash},
};
-use base_db::salsa::impl_intern_value_trivial;
+use base_db::salsa::InternValueTrivial;
use chalk_ir::{
fold::{Shift, TypeFoldable},
interner::HasInterner,
@@ -67,11 +68,10 @@ use rustc_hash::{FxHashMap, FxHashSet};
use syntax::ast::{make, ConstArg};
use traits::FnTrait;
use triomphe::Arc;
-use utils::Generics;
use crate::{
- consteval::unknown_const, db::HirDatabase, display::HirDisplay, infer::unify::InferenceTable,
- utils::generics,
+ consteval::unknown_const, db::HirDatabase, display::HirDisplay, generics::Generics,
+ infer::unify::InferenceTable,
};
pub use autoderef::autoderef;
@@ -289,7 +289,7 @@ impl Hash for ConstScalar {
/// Return an index of a parameter in the generic type parameter list by it's id.
pub fn param_idx(db: &dyn HirDatabase, id: TypeOrConstParamId) -> Option<usize> {
- generics(db.upcast(), id.parent).type_or_const_param_idx(id)
+ generics::generics(db.upcast(), id.parent).type_or_const_param_idx(id)
}
pub(crate) fn wrap_empty_binders<T>(value: T) -> Binders<T>
@@ -330,18 +330,15 @@ pub(crate) fn make_single_type_binders<T: HasInterner<Interner = Interner>>(
)
}
-pub(crate) fn make_binders_with_count<T: HasInterner<Interner = Interner>>(
+pub(crate) fn make_binders<T: HasInterner<Interner = Interner>>(
db: &dyn HirDatabase,
- count: usize,
generics: &Generics,
value: T,
) -> Binders<T> {
- let it = generics.iter_id().take(count);
-
Binders::new(
VariableKinds::from_iter(
Interner,
- it.map(|x| match x {
+ generics.iter_id().map(|x| match x {
hir_def::GenericParamId::ConstParamId(id) => {
chalk_ir::VariableKind::Const(db.const_param_ty(id))
}
@@ -355,14 +352,6 @@ pub(crate) fn make_binders_with_count<T: HasInterner<Interner = Interner>>(
)
}
-pub(crate) fn make_binders<T: HasInterner<Interner = Interner>>(
- db: &dyn HirDatabase,
- generics: &Generics,
- value: T,
-) -> Binders<T> {
- make_binders_with_count(db, usize::MAX, generics, value)
-}
-
// FIXME: get rid of this, just replace it by FnPointer
/// A function signature as seen by type inference: Several parameter types and
/// one return type.
@@ -524,14 +513,16 @@ pub type PolyFnSig = Binders<CallableSig>;
impl CallableSig {
pub fn from_params_and_return(
- mut params: Vec<Ty>,
+ params: impl ExactSizeIterator<Item = Ty>,
ret: Ty,
is_varargs: bool,
safety: Safety,
abi: FnAbi,
) -> CallableSig {
- params.push(ret);
- CallableSig { params_and_return: params.into(), is_varargs, safety, abi }
+ let mut params_and_return = Vec::with_capacity(params.len() + 1);
+ params_and_return.extend(params);
+ params_and_return.push(ret);
+ CallableSig { params_and_return: params_and_return.into(), is_varargs, safety, abi }
}
pub fn from_def(db: &dyn HirDatabase, def: FnDefId, substs: &Substitution) -> CallableSig {
@@ -606,7 +597,7 @@ pub enum ImplTraitId {
AssociatedTypeImplTrait(hir_def::TypeAliasId, ImplTraitIdx),
AsyncBlockTypeImplTrait(hir_def::DefWithBodyId, ExprId),
}
-impl_intern_value_trivial!(ImplTraitId);
+impl InternValueTrivial for ImplTraitId {}
#[derive(PartialEq, Eq, Debug, Hash)]
pub struct ImplTraits {
@@ -946,8 +937,7 @@ pub fn callable_sig_from_fn_trait(
.as_tuple()?
.iter(Interner)
.map(|it| it.assert_ty_ref(Interner))
- .cloned()
- .collect();
+ .cloned();
return Some((
fn_x,
diff --git a/crates/hir-ty/src/lower.rs b/crates/hir-ty/src/lower.rs
index 04ace38202..96f545415e 100644
--- a/crates/hir-ty/src/lower.rs
+++ b/crates/hir-ty/src/lower.rs
@@ -8,10 +8,11 @@
use std::{
cell::{Cell, RefCell, RefMut},
iter,
+ ops::{self, Not as _},
};
use base_db::{
- salsa::{impl_intern_value_trivial, Cycle},
+ salsa::{Cycle, InternValueTrivial},
CrateId,
};
use chalk_ir::{
@@ -45,7 +46,9 @@ use hir_def::{
use hir_expand::{name::Name, ExpandResult};
use intern::Interned;
use la_arena::{Arena, ArenaMap};
+use once_cell::unsync::OnceCell;
use rustc_hash::FxHashSet;
+use rustc_pattern_analysis::Captures;
use smallvec::SmallVec;
use stdx::{impl_from, never};
use syntax::ast;
@@ -58,12 +61,13 @@ use crate::{
unknown_const_as_generic,
},
db::HirDatabase,
- error_lifetime, make_binders,
+ error_lifetime,
+ generics::{generics, Generics},
+ make_binders,
mapping::{from_chalk_trait_id, lt_to_placeholder_idx, ToChalk},
static_lifetime, to_assoc_type_id, to_chalk_trait_id, to_placeholder_idx,
utils::{
- self, all_super_trait_refs, associated_type_by_name_including_super_traits, generics,
- Generics, InTypeConstIdMetadata,
+ all_super_trait_refs, associated_type_by_name_including_super_traits, InTypeConstIdMetadata,
},
AliasEq, AliasTy, Binders, BoundVar, CallableSig, Const, ConstScalar, DebruijnIndex, DynTy,
FnAbi, FnPointer, FnSig, FnSubst, ImplTrait, ImplTraitId, ImplTraits, Interner, Lifetime,
@@ -121,6 +125,7 @@ impl ImplTraitLoweringState {
pub struct TyLoweringContext<'a> {
pub db: &'a dyn HirDatabase,
resolver: &'a Resolver,
+ generics: OnceCell<Option<Generics>>,
in_binders: DebruijnIndex,
// FIXME: Should not be an `Option` but `Resolver` currently does not return owners in all cases
// where expected
@@ -152,6 +157,7 @@ impl<'a> TyLoweringContext<'a> {
Self {
db,
resolver,
+ generics: OnceCell::new(),
owner,
in_binders,
impl_trait_mode,
@@ -174,6 +180,7 @@ impl<'a> TyLoweringContext<'a> {
impl_trait_mode,
expander: RefCell::new(expander),
unsized_types: RefCell::new(unsized_types),
+ generics: self.generics.clone(),
..*self
};
let result = f(&new_ctx);
@@ -245,8 +252,10 @@ impl<'a> TyLoweringContext<'a> {
)
}
- fn generics(&self) -> Option<Generics> {
- Some(generics(self.db.upcast(), self.resolver.generic_def()?))
+ fn generics(&self) -> Option<&Generics> {
+ self.generics
+ .get_or_init(|| self.resolver.generic_def().map(|def| generics(self.db.upcast(), def)))
+ .as_ref()
}
pub fn lower_ty_ext(&self, type_ref: &TypeRef) -> (Ty, Option<TypeNs>) {
@@ -374,7 +383,7 @@ impl<'a> TyLoweringContext<'a> {
counter.set(idx + count_impl_traits(type_ref) as u16);
let (
_parent_params,
- self_params,
+ self_param,
type_params,
const_params,
_impl_trait_params,
@@ -385,7 +394,7 @@ impl<'a> TyLoweringContext<'a> {
.provenance_split();
TyKind::BoundVar(BoundVar::new(
self.in_binders,
- idx as usize + self_params + type_params + const_params,
+ idx as usize + self_param as usize + type_params + const_params,
))
.intern(Interner)
}
@@ -416,9 +425,9 @@ impl<'a> TyLoweringContext<'a> {
};
let ty = {
let macro_call = macro_call.to_node(self.db.upcast());
- let resolver = |path| {
+ let resolver = |path: &_| {
self.resolver
- .resolve_path_as_macro(self.db.upcast(), &path, Some(MacroSubNs::Bang))
+ .resolve_path_as_macro(self.db.upcast(), path, Some(MacroSubNs::Bang))
.map(|(it, _)| it)
};
match expander.enter_expand::<ast::Type>(self.db.upcast(), macro_call, resolver)
@@ -705,7 +714,8 @@ impl<'a> TyLoweringContext<'a> {
None,
);
- let len_self = utils::generics(self.db.upcast(), associated_ty.into()).len_self();
+ let len_self =
+ crate::generics::generics(self.db.upcast(), associated_ty.into()).len_self();
let substs = Substitution::from_iter(
Interner,
@@ -815,14 +825,14 @@ impl<'a> TyLoweringContext<'a> {
let def_generics = generics(self.db.upcast(), def);
let (
parent_params,
- self_params,
+ self_param,
type_params,
const_params,
impl_trait_params,
lifetime_params,
) = def_generics.provenance_split();
let item_len =
- self_params + type_params + const_params + impl_trait_params + lifetime_params;
+ self_param as usize + type_params + const_params + impl_trait_params + lifetime_params;
let total_len = parent_params + item_len;
let ty_error = TyKind::Error.intern(Interner).cast(Interner);
@@ -830,18 +840,16 @@ impl<'a> TyLoweringContext<'a> {
let mut def_generic_iter = def_generics.iter_id();
let fill_self_params = || {
- for x in explicit_self_ty
- .into_iter()
- .map(|x| x.cast(Interner))
- .chain(iter::repeat(ty_error.clone()))
- .take(self_params)
- {
+ if self_param {
+ let self_ty =
+ explicit_self_ty.map(|x| x.cast(Interner)).unwrap_or_else(|| ty_error.clone());
+
if let Some(id) = def_generic_iter.next() {
assert!(matches!(
id,
GenericParamId::TypeParamId(_) | GenericParamId::LifetimeParamId(_)
));
- substs.push(x);
+ substs.push(self_ty);
}
}
};
@@ -852,11 +860,11 @@ impl<'a> TyLoweringContext<'a> {
fill_self_params();
}
let expected_num = if generic_args.has_self_type {
- self_params + type_params + const_params
+ self_param as usize + type_params + const_params
} else {
type_params + const_params
};
- let skip = if generic_args.has_self_type && self_params == 0 { 1 } else { 0 };
+ let skip = if generic_args.has_self_type && !self_param { 1 } else { 0 };
// if args are provided, it should be all of them, but we can't rely on that
for arg in generic_args
.args
@@ -866,7 +874,7 @@ impl<'a> TyLoweringContext<'a> {
.take(expected_num)
{
if let Some(id) = def_generic_iter.next() {
- if let Some(x) = generic_arg_to_chalk(
+ let arg = generic_arg_to_chalk(
self.db,
id,
arg,
@@ -874,13 +882,9 @@ impl<'a> TyLoweringContext<'a> {
|_, type_ref| self.lower_ty(type_ref),
|_, const_ref, ty| self.lower_const(const_ref, ty),
|_, lifetime_ref| self.lower_lifetime(lifetime_ref),
- ) {
- had_explicit_args = true;
- substs.push(x);
- } else {
- // we just filtered them out
- never!("Unexpected lifetime argument");
- }
+ );
+ had_explicit_args = true;
+ substs.push(arg);
}
}
@@ -893,7 +897,7 @@ impl<'a> TyLoweringContext<'a> {
// Taking into the fact that def_generic_iter will always have lifetimes at the end
// Should have some test cases tho to test this behaviour more properly
if let Some(id) = def_generic_iter.next() {
- if let Some(x) = generic_arg_to_chalk(
+ let arg = generic_arg_to_chalk(
self.db,
id,
arg,
@@ -901,13 +905,9 @@ impl<'a> TyLoweringContext<'a> {
|_, type_ref| self.lower_ty(type_ref),
|_, const_ref, ty| self.lower_const(const_ref, ty),
|_, lifetime_ref| self.lower_lifetime(lifetime_ref),
- ) {
- had_explicit_args = true;
- substs.push(x);
- } else {
- // Never return a None explicitly
- never!("Unexpected None by generic_arg_to_chalk");
- }
+ );
+ had_explicit_args = true;
+ substs.push(arg);
}
}
} else {
@@ -1176,7 +1176,7 @@ impl<'a> TyLoweringContext<'a> {
let ty = if let Some(target_param_idx) = target_param_idx {
let mut counter = 0;
let generics = self.generics().expect("generics in scope");
- for (idx, data) in generics.params.type_or_consts.iter() {
+ for (idx, data) in generics.iter_self_type_or_consts() {
// Count the number of `impl Trait` things that appear before
// the target of our `bound`.
// Our counter within `impl_trait_mode` should be that number
@@ -1478,7 +1478,7 @@ fn named_associated_type_shorthand_candidates<R>(
// Handle `Self::Type` referring to own associated type in trait definitions
if let GenericDefId::TraitId(trait_id) = param_id.parent() {
let trait_generics = generics(db.upcast(), trait_id.into());
- if trait_generics.params[param_id.local_id()].is_trait_self() {
+ if trait_generics[param_id.local_id()].is_trait_self() {
let def_generics = generics(db.upcast(), def);
let starting_idx = match def {
GenericDefId::TraitId(_) => 0,
@@ -1596,14 +1596,20 @@ pub(crate) fn generic_predicates_for_param_query(
.collect();
let subst = generics.bound_vars_subst(db, DebruijnIndex::INNERMOST);
- let explicitly_unsized_tys = ctx.unsized_types.into_inner();
- if let Some(implicitly_sized_predicates) =
- implicitly_sized_clauses(db, param_id.parent, &explicitly_unsized_tys, &subst, &resolver)
- {
- predicates.extend(
- implicitly_sized_predicates
- .map(|p| make_binders(db, &generics, crate::wrap_empty_binders(p))),
- );
+ if !subst.is_empty(Interner) {
+ let explicitly_unsized_tys = ctx.unsized_types.into_inner();
+ if let Some(implicitly_sized_predicates) = implicitly_sized_clauses(
+ db,
+ param_id.parent,
+ &explicitly_unsized_tys,
+ &subst,
+ &resolver,
+ ) {
+ predicates.extend(
+ implicitly_sized_predicates
+ .map(|p| make_binders(db, &generics, crate::wrap_empty_binders(p))),
+ );
+ };
}
predicates.into()
}
@@ -1666,14 +1672,17 @@ pub(crate) fn trait_environment_query(
}
let subst = generics(db.upcast(), def).placeholder_subst(db);
- let explicitly_unsized_tys = ctx.unsized_types.into_inner();
- if let Some(implicitly_sized_clauses) =
- implicitly_sized_clauses(db, def, &explicitly_unsized_tys, &subst, &resolver)
- {
- clauses.extend(
- implicitly_sized_clauses
- .map(|pred| pred.cast::<ProgramClause>(Interner).into_from_env_clause(Interner)),
- );
+ if !subst.is_empty(Interner) {
+ let explicitly_unsized_tys = ctx.unsized_types.into_inner();
+ if let Some(implicitly_sized_clauses) =
+ implicitly_sized_clauses(db, def, &explicitly_unsized_tys, &subst, &resolver)
+ {
+ clauses.extend(
+ implicitly_sized_clauses.map(|pred| {
+ pred.cast::<ProgramClause>(Interner).into_from_env_clause(Interner)
+ }),
+ );
+ };
}
let env = chalk_ir::Environment::new(Interner).add_clauses(Interner, clauses);
@@ -1681,20 +1690,32 @@ pub(crate) fn trait_environment_query(
TraitEnvironment::new(resolver.krate(), None, traits_in_scope.into_boxed_slice(), env)
}
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct GenericPredicates(Option<Arc<[Binders<QuantifiedWhereClause>]>>);
+
+impl ops::Deref for GenericPredicates {
+ type Target = [Binders<crate::QuantifiedWhereClause>];
+
+ fn deref(&self) -> &Self::Target {
+ self.0.as_deref().unwrap_or(&[])
+ }
+}
+
/// Resolve the where clause(s) of an item with generics.
pub(crate) fn generic_predicates_query(
db: &dyn HirDatabase,
def: GenericDefId,
-) -> Arc<[Binders<QuantifiedWhereClause>]> {
+) -> GenericPredicates {
let resolver = def.resolver(db.upcast());
- let ctx = if let GenericDefId::FunctionId(_) = def {
- TyLoweringContext::new(db, &resolver, def.into())
- .with_impl_trait_mode(ImplTraitLoweringMode::Variable)
- .with_type_param_mode(ParamLoweringMode::Variable)
- } else {
- TyLoweringContext::new(db, &resolver, def.into())
- .with_type_param_mode(ParamLoweringMode::Variable)
+ let (impl_trait_lowering, param_lowering) = match def {
+ GenericDefId::FunctionId(_) => {
+ (ImplTraitLoweringMode::Variable, ParamLoweringMode::Variable)
+ }
+ _ => (ImplTraitLoweringMode::Disallowed, ParamLoweringMode::Variable),
};
+ let ctx = TyLoweringContext::new(db, &resolver, def.into())
+ .with_impl_trait_mode(impl_trait_lowering)
+ .with_type_param_mode(param_lowering);
let generics = generics(db.upcast(), def);
let mut predicates = resolver
@@ -1705,27 +1726,29 @@ pub(crate) fn generic_predicates_query(
.collect::<Vec<_>>();
let subst = generics.bound_vars_subst(db, DebruijnIndex::INNERMOST);
- let explicitly_unsized_tys = ctx.unsized_types.into_inner();
- if let Some(implicitly_sized_predicates) =
- implicitly_sized_clauses(db, def, &explicitly_unsized_tys, &subst, &resolver)
- {
- predicates.extend(
- implicitly_sized_predicates
- .map(|p| make_binders(db, &generics, crate::wrap_empty_binders(p))),
- );
+ if !subst.is_empty(Interner) {
+ let explicitly_unsized_tys = ctx.unsized_types.into_inner();
+ if let Some(implicitly_sized_predicates) =
+ implicitly_sized_clauses(db, def, &explicitly_unsized_tys, &subst, &resolver)
+ {
+ predicates.extend(
+ implicitly_sized_predicates
+ .map(|p| make_binders(db, &generics, crate::wrap_empty_binders(p))),
+ );
+ };
}
- predicates.into()
+ GenericPredicates(predicates.is_empty().not().then(|| predicates.into()))
}
/// Generate implicit `: Sized` predicates for all generics that has no `?Sized` bound.
/// Exception is Self of a trait def.
-fn implicitly_sized_clauses<'a>(
+fn implicitly_sized_clauses<'a, 'subst: 'a>(
db: &dyn HirDatabase,
def: GenericDefId,
explicitly_unsized_tys: &'a FxHashSet<Ty>,
- substitution: &'a Substitution,
+ substitution: &'subst Substitution,
resolver: &Resolver,
-) -> Option<impl Iterator<Item = WhereClause> + 'a> {
+) -> Option<impl Iterator<Item = WhereClause> + Captures<'a> + Captures<'subst>> {
let is_trait_def = matches!(def, GenericDefId::TraitId(..));
let generic_args = &substitution.as_slice(Interner)[is_trait_def as usize..];
let sized_trait = db
@@ -1746,71 +1769,84 @@ fn implicitly_sized_clauses<'a>(
})
}
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct GenericDefaults(Option<Arc<[Binders<crate::GenericArg>]>>);
+
+impl ops::Deref for GenericDefaults {
+ type Target = [Binders<crate::GenericArg>];
+
+ fn deref(&self) -> &Self::Target {
+ self.0.as_deref().unwrap_or(&[])
+ }
+}
+
/// Resolve the default type params from generics
-pub(crate) fn generic_defaults_query(
- db: &dyn HirDatabase,
- def: GenericDefId,
-) -> Arc<[Binders<crate::GenericArg>]> {
- let resolver = def.resolver(db.upcast());
- let ctx = TyLoweringContext::new(db, &resolver, def.into())
- .with_type_param_mode(ParamLoweringMode::Variable);
+pub(crate) fn generic_defaults_query(db: &dyn HirDatabase, def: GenericDefId) -> GenericDefaults {
let generic_params = generics(db.upcast(), def);
+ if generic_params.len() == 0 {
+ return GenericDefaults(None);
+ }
+ let resolver = def.resolver(db.upcast());
let parent_start_idx = generic_params.len_self();
- let defaults = Arc::from_iter(generic_params.iter().enumerate().map(|(idx, (id, p))| {
- match p {
- GenericParamDataRef::TypeParamData(p) => {
- let mut ty =
- p.default.as_ref().map_or(TyKind::Error.intern(Interner), |t| ctx.lower_ty(t));
- // Each default can only refer to previous parameters.
- // Type variable default referring to parameter coming
- // after it is forbidden (FIXME: report diagnostic)
- ty = fallback_bound_vars(ty, idx, parent_start_idx);
- crate::make_binders(db, &generic_params, ty.cast(Interner))
- }
- GenericParamDataRef::ConstParamData(p) => {
- let GenericParamId::ConstParamId(id) = id else {
- unreachable!("Unexpected lifetime or type argument")
- };
+ let ctx = TyLoweringContext::new(db, &resolver, def.into())
+ .with_impl_trait_mode(ImplTraitLoweringMode::Disallowed)
+ .with_type_param_mode(ParamLoweringMode::Variable);
+ GenericDefaults(Some(Arc::from_iter(generic_params.iter().enumerate().map(
+ |(idx, (id, p))| {
+ match p {
+ GenericParamDataRef::TypeParamData(p) => {
+ let ty = p.default.as_ref().map_or(TyKind::Error.intern(Interner), |ty| {
+ // Each default can only refer to previous parameters.
+ // Type variable default referring to parameter coming
+ // after it is forbidden (FIXME: report diagnostic)
+ fallback_bound_vars(ctx.lower_ty(ty), idx, parent_start_idx)
+ });
+ crate::make_binders(db, &generic_params, ty.cast(Interner))
+ }
+ GenericParamDataRef::ConstParamData(p) => {
+ let GenericParamId::ConstParamId(id) = id else {
+ unreachable!("Unexpected lifetime or type argument")
+ };
- let mut val = p.default.as_ref().map_or_else(
- || unknown_const_as_generic(db.const_param_ty(id)),
- |c| {
- let c = ctx.lower_const(c, ctx.lower_ty(&p.ty));
- c.cast(Interner)
- },
- );
- // Each default can only refer to previous parameters, see above.
- val = fallback_bound_vars(val, idx, parent_start_idx);
- make_binders(db, &generic_params, val)
- }
- GenericParamDataRef::LifetimeParamData(_) => {
- make_binders(db, &generic_params, error_lifetime().cast(Interner))
+ let mut val = p.default.as_ref().map_or_else(
+ || unknown_const_as_generic(db.const_param_ty(id)),
+ |c| {
+ let c = ctx.lower_const(c, ctx.lower_ty(&p.ty));
+ c.cast(Interner)
+ },
+ );
+ // Each default can only refer to previous parameters, see above.
+ val = fallback_bound_vars(val, idx, parent_start_idx);
+ make_binders(db, &generic_params, val)
+ }
+ GenericParamDataRef::LifetimeParamData(_) => {
+ make_binders(db, &generic_params, error_lifetime().cast(Interner))
+ }
}
- }
- }));
-
- defaults
+ },
+ ))))
}
pub(crate) fn generic_defaults_recover(
db: &dyn HirDatabase,
_cycle: &Cycle,
def: &GenericDefId,
-) -> Arc<[Binders<crate::GenericArg>]> {
+) -> GenericDefaults {
let generic_params = generics(db.upcast(), *def);
+ if generic_params.len() == 0 {
+ return GenericDefaults(None);
+ }
// FIXME: this code is not covered in tests.
// we still need one default per parameter
- let defaults = Arc::from_iter(generic_params.iter_id().map(|id| {
+ GenericDefaults(Some(Arc::from_iter(generic_params.iter_id().map(|id| {
let val = match id {
GenericParamId::TypeParamId(_) => TyKind::Error.intern(Interner).cast(Interner),
GenericParamId::ConstParamId(id) => unknown_const_as_generic(db.const_param_ty(id)),
GenericParamId::LifetimeParamId(_) => error_lifetime().cast(Interner),
};
crate::make_binders(db, &generic_params, val)
- }));
-
- defaults
+ }))))
}
fn fn_sig_for_fn(db: &dyn HirDatabase, def: FunctionId) -> PolyFnSig {
@@ -1819,7 +1855,7 @@ fn fn_sig_for_fn(db: &dyn HirDatabase, def: FunctionId) -> PolyFnSig {
let ctx_params = TyLoweringContext::new(db, &resolver, def.into())
.with_impl_trait_mode(ImplTraitLoweringMode::Variable)
.with_type_param_mode(ParamLoweringMode::Variable);
- let params = data.params.iter().map(|tr| ctx_params.lower_ty(tr)).collect::<Vec<_>>();
+ let params = data.params.iter().map(|tr| ctx_params.lower_ty(tr));
let ctx_ret = TyLoweringContext::new(db, &resolver, def.into())
.with_impl_trait_mode(ImplTraitLoweringMode::Opaque)
.with_type_param_mode(ParamLoweringMode::Variable);
@@ -1873,7 +1909,7 @@ fn fn_sig_for_struct_constructor(db: &dyn HirDatabase, def: StructId) -> PolyFnS
let resolver = def.resolver(db.upcast());
let ctx = TyLoweringContext::new(db, &resolver, AdtId::from(def).into())
.with_type_param_mode(ParamLoweringMode::Variable);
- let params = fields.iter().map(|(_, field)| ctx.lower_ty(&field.type_ref)).collect::<Vec<_>>();
+ let params = fields.iter().map(|(_, field)| ctx.lower_ty(&field.type_ref));
let (ret, binders) = type_for_adt(db, def.into()).into_value_and_skipped_binders();
Binders::new(
binders,
@@ -1905,7 +1941,7 @@ fn fn_sig_for_enum_variant_constructor(db: &dyn HirDatabase, def: EnumVariantId)
let resolver = def.resolver(db.upcast());
let ctx = TyLoweringContext::new(db, &resolver, DefWithBodyId::VariantId(def).into())
.with_type_param_mode(ParamLoweringMode::Variable);
- let params = fields.iter().map(|(_, field)| ctx.lower_ty(&field.type_ref)).collect::<Vec<_>>();
+ let params = fields.iter().map(|(_, field)| ctx.lower_ty(&field.type_ref));
let (ret, binders) =
type_for_adt(db, def.lookup(db.upcast()).parent.into()).into_value_and_skipped_binders();
Binders::new(
@@ -1965,7 +2001,9 @@ pub enum CallableDefId {
StructId(StructId),
EnumVariantId(EnumVariantId),
}
-impl_intern_value_trivial!(CallableDefId);
+
+impl InternValueTrivial for CallableDefId {}
+
impl_from!(FunctionId, StructId, EnumVariantId for CallableDefId);
impl From<CallableDefId> for ModuleDefId {
fn from(def: CallableDefId) -> ModuleDefId {
@@ -2166,7 +2204,6 @@ pub(crate) fn lower_to_chalk_mutability(m: hir_def::type_ref::Mutability) -> Mut
/// Checks if the provided generic arg matches its expected kind, then lower them via
/// provided closures. Use unknown if there was kind mismatch.
///
-/// Returns `Some` of the lowered generic arg. `None` if the provided arg is a lifetime.
pub(crate) fn generic_arg_to_chalk<'a, T>(
db: &dyn HirDatabase,
kind_id: GenericParamId,
@@ -2175,7 +2212,7 @@ pub(crate) fn generic_arg_to_chalk<'a, T>(
for_type: impl FnOnce(&mut T, &TypeRef) -> Ty + 'a,
for_const: impl FnOnce(&mut T, &ConstRef, Ty) -> Const + 'a,
for_lifetime: impl FnOnce(&mut T, &LifetimeRef) -> Lifetime + 'a,
-) -> Option<crate::GenericArg> {
+) -> crate::GenericArg {
let kind = match kind_id {
GenericParamId::TypeParamId(_) => ParamKind::Type,
GenericParamId::ConstParamId(id) => {
@@ -2184,7 +2221,7 @@ pub(crate) fn generic_arg_to_chalk<'a, T>(
}
GenericParamId::LifetimeParamId(_) => ParamKind::Lifetime,
};
- Some(match (arg, kind) {
+ match (arg, kind) {
(GenericArg::Type(type_ref), ParamKind::Type) => for_type(this, type_ref).cast(Interner),
(GenericArg::Const(c), ParamKind::Const(c_ty)) => for_const(this, c, c_ty).cast(Interner),
(GenericArg::Lifetime(lifetime_ref), ParamKind::Lifetime) => {
@@ -2197,11 +2234,12 @@ pub(crate) fn generic_arg_to_chalk<'a, T>(
// as types. Maybe here is not the best place to do it, but
// it works.
if let TypeRef::Path(p) = t {
- let p = p.mod_path()?;
- if p.kind == PathKind::Plain {
- if let [n] = p.segments() {
- let c = ConstRef::Path(n.clone());
- return Some(for_const(this, &c, c_ty).cast(Interner));
+ if let Some(p) = p.mod_path() {
+ if p.kind == PathKind::Plain {
+ if let [n] = p.segments() {
+ let c = ConstRef::Path(n.clone());
+ return for_const(this, &c, c_ty).cast(Interner);
+ }
}
}
}
@@ -2210,17 +2248,17 @@ pub(crate) fn generic_arg_to_chalk<'a, T>(
(GenericArg::Lifetime(_), ParamKind::Const(c_ty)) => unknown_const_as_generic(c_ty),
(GenericArg::Type(_), ParamKind::Lifetime) => error_lifetime().cast(Interner),
(GenericArg::Const(_), ParamKind::Lifetime) => error_lifetime().cast(Interner),
- })
+ }
}
-pub(crate) fn const_or_path_to_chalk(
+pub(crate) fn const_or_path_to_chalk<'g>(
db: &dyn HirDatabase,
resolver: &Resolver,
owner: TypeOwnerId,
expected_ty: Ty,
value: &ConstRef,
mode: ParamLoweringMode,
- args: impl FnOnce() -> Option<Generics>,
+ args: impl FnOnce() -> Option<&'g Generics>,
debruijn: DebruijnIndex,
) -> Const {
match value {
diff --git a/crates/hir-ty/src/method_resolution.rs b/crates/hir-ty/src/method_resolution.rs
index cb56a6f0bf..5ce124d6d2 100644
--- a/crates/hir-ty/src/method_resolution.rs
+++ b/crates/hir-ty/src/method_resolution.rs
@@ -144,8 +144,7 @@ pub struct TraitImpls {
impl TraitImpls {
pub(crate) fn trait_impls_in_crate_query(db: &dyn HirDatabase, krate: CrateId) -> Arc<Self> {
- let _p =
- tracing::span!(tracing::Level::INFO, "trait_impls_in_crate_query", ?krate).entered();
+ let _p = tracing::info_span!("trait_impls_in_crate_query", ?krate).entered();
let mut impls = FxHashMap::default();
Self::collect_def_map(db, &mut impls, &db.crate_def_map(krate));
@@ -157,7 +156,7 @@ impl TraitImpls {
db: &dyn HirDatabase,
block: BlockId,
) -> Option<Arc<Self>> {
- let _p = tracing::span!(tracing::Level::INFO, "trait_impls_in_block_query").entered();
+ let _p = tracing::info_span!("trait_impls_in_block_query").entered();
let mut impls = FxHashMap::default();
Self::collect_def_map(db, &mut impls, &db.block_def_map(block));
@@ -173,8 +172,7 @@ impl TraitImpls {
db: &dyn HirDatabase,
krate: CrateId,
) -> Arc<[Arc<Self>]> {
- let _p =
- tracing::span!(tracing::Level::INFO, "trait_impls_in_deps_query", ?krate).entered();
+ let _p = tracing::info_span!("trait_impls_in_deps_query", ?krate).entered();
let crate_graph = db.crate_graph();
Arc::from_iter(
@@ -280,8 +278,7 @@ pub struct InherentImpls {
impl InherentImpls {
pub(crate) fn inherent_impls_in_crate_query(db: &dyn HirDatabase, krate: CrateId) -> Arc<Self> {
- let _p =
- tracing::span!(tracing::Level::INFO, "inherent_impls_in_crate_query", ?krate).entered();
+ let _p = tracing::info_span!("inherent_impls_in_crate_query", ?krate).entered();
let mut impls = Self { map: FxHashMap::default(), invalid_impls: Vec::default() };
let crate_def_map = db.crate_def_map(krate);
@@ -295,7 +292,7 @@ impl InherentImpls {
db: &dyn HirDatabase,
block: BlockId,
) -> Option<Arc<Self>> {
- let _p = tracing::span!(tracing::Level::INFO, "inherent_impls_in_block_query").entered();
+ let _p = tracing::info_span!("inherent_impls_in_block_query").entered();
let mut impls = Self { map: FxHashMap::default(), invalid_impls: Vec::default() };
let block_def_map = db.block_def_map(block);
@@ -368,7 +365,7 @@ pub(crate) fn incoherent_inherent_impl_crates(
krate: CrateId,
fp: TyFingerprint,
) -> SmallVec<[CrateId; 2]> {
- let _p = tracing::span!(tracing::Level::INFO, "incoherent_inherent_impl_crates").entered();
+ let _p = tracing::info_span!("incoherent_inherent_impl_crates").entered();
let mut res = SmallVec::new();
let crate_graph = db.crate_graph();
@@ -937,8 +934,7 @@ pub fn iterate_method_candidates_dyn(
mode: LookupMode,
callback: &mut dyn FnMut(ReceiverAdjustments, AssocItemId, bool) -> ControlFlow<()>,
) -> ControlFlow<()> {
- let _p = tracing::span!(
- tracing::Level::INFO,
+ let _p = tracing::info_span!(
"iterate_method_candidates_dyn",
?mode,
?name,
@@ -1504,7 +1500,7 @@ fn is_valid_impl_fn_candidate(
}
}
table.run_in_snapshot(|table| {
- let _p = tracing::span!(tracing::Level::INFO, "subst_for_def").entered();
+ let _p = tracing::info_span!("subst_for_def").entered();
let impl_subst =
TyBuilder::subst_for_def(db, impl_id, None).fill_with_inference_vars(table).build();
let expect_self_ty = db.impl_self_ty(impl_id).substitute(Interner, &impl_subst);
@@ -1512,7 +1508,7 @@ fn is_valid_impl_fn_candidate(
check_that!(table.unify(&expect_self_ty, self_ty));
if let Some(receiver_ty) = receiver_ty {
- let _p = tracing::span!(tracing::Level::INFO, "check_receiver_ty").entered();
+ let _p = tracing::info_span!("check_receiver_ty").entered();
check_that!(data.has_self_param());
let fn_subst = TyBuilder::subst_for_def(db, fn_id, Some(impl_subst.clone()))
diff --git a/crates/hir-ty/src/mir.rs b/crates/hir-ty/src/mir.rs
index d513355037..2e106877cb 100644
--- a/crates/hir-ty/src/mir.rs
+++ b/crates/hir-ty/src/mir.rs
@@ -898,20 +898,19 @@ pub enum Rvalue {
Cast(CastKind, Operand, Ty),
// FIXME link to `pointer::offset` when it hits stable.
- /// * `Offset` has the same semantics as `pointer::offset`, except that the second
- /// parameter may be a `usize` as well.
- /// * The comparison operations accept `bool`s, `char`s, signed or unsigned integers, floats,
- /// raw pointers, or function pointers and return a `bool`. The types of the operands must be
- /// matching, up to the usual caveat of the lifetimes in function pointers.
- /// * Left and right shift operations accept signed or unsigned integers not necessarily of the
- /// same type and return a value of the same type as their LHS. Like in Rust, the RHS is
- /// truncated as needed.
- /// * The `Bit*` operations accept signed integers, unsigned integers, or bools with matching
- /// types and return a value of that type.
- /// * The remaining operations accept signed integers, unsigned integers, or floats with
- /// matching types and return a value of that type.
+ // /// * `Offset` has the same semantics as `pointer::offset`, except that the second
+ // /// parameter may be a `usize` as well.
+ // /// * The comparison operations accept `bool`s, `char`s, signed or unsigned integers, floats,
+ // /// raw pointers, or function pointers and return a `bool`. The types of the operands must be
+ // /// matching, up to the usual caveat of the lifetimes in function pointers.
+ // /// * Left and right shift operations accept signed or unsigned integers not necessarily of the
+ // /// same type and return a value of the same type as their LHS. Like in Rust, the RHS is
+ // /// truncated as needed.
+ // /// * The `Bit*` operations accept signed integers, unsigned integers, or bools with matching
+ // /// types and return a value of that type.
+ // /// * The remaining operations accept signed integers, unsigned integers, or floats with
+ // /// matching types and return a value of that type.
//BinaryOp(BinOp, Box<(Operand, Operand)>),
-
/// Same as `BinaryOp`, but yields `(T, bool)` with a `bool` indicating an error condition.
///
/// When overflow checking is disabled and we are generating run-time code, the error condition
diff --git a/crates/hir-ty/src/mir/borrowck.rs b/crates/hir-ty/src/mir/borrowck.rs
index 8b6936f8bc..878d584a4e 100644
--- a/crates/hir-ty/src/mir/borrowck.rs
+++ b/crates/hir-ty/src/mir/borrowck.rs
@@ -91,7 +91,7 @@ pub fn borrowck_query(
db: &dyn HirDatabase,
def: DefWithBodyId,
) -> Result<Arc<[BorrowckResult]>, MirLowerError> {
- let _p = tracing::span!(tracing::Level::INFO, "borrowck_query").entered();
+ let _p = tracing::info_span!("borrowck_query").entered();
let mut res = vec![];
all_mir_bodies(db, def, |body| {
res.push(BorrowckResult {
diff --git a/crates/hir-ty/src/mir/eval.rs b/crates/hir-ty/src/mir/eval.rs
index 2de1aa30c9..4ee96a66a3 100644
--- a/crates/hir-ty/src/mir/eval.rs
+++ b/crates/hir-ty/src/mir/eval.rs
@@ -363,7 +363,7 @@ impl MirEvalError {
)?;
}
Either::Right(closure) => {
- writeln!(f, "In {:?}", closure)?;
+ writeln!(f, "In {closure:?}")?;
}
}
let source_map = db.body_with_source_map(*def).1;
@@ -424,7 +424,7 @@ impl MirEvalError {
| MirEvalError::StackOverflow
| MirEvalError::CoerceUnsizedError(_)
| MirEvalError::InternalError(_)
- | MirEvalError::InvalidVTableId(_) => writeln!(f, "{:?}", err)?,
+ | MirEvalError::InvalidVTableId(_) => writeln!(f, "{err:?}")?,
}
Ok(())
}
diff --git a/crates/hir-ty/src/mir/eval/tests.rs b/crates/hir-ty/src/mir/eval/tests.rs
index 4abbda56cb..c3b35cd553 100644
--- a/crates/hir-ty/src/mir/eval/tests.rs
+++ b/crates/hir-ty/src/mir/eval/tests.rs
@@ -77,7 +77,7 @@ fn check_panic(ra_fixture: &str, expected_panic: &str) {
let (db, file_ids) = TestDB::with_many_files(ra_fixture);
let file_id = *file_ids.last().unwrap();
let e = eval_main(&db, file_id).unwrap_err();
- assert_eq!(e.is_panic().unwrap_or_else(|| panic!("unexpected error: {:?}", e)), expected_panic);
+ assert_eq!(e.is_panic().unwrap_or_else(|| panic!("unexpected error: {e:?}")), expected_panic);
}
#[test]
diff --git a/crates/hir-ty/src/mir/lower.rs b/crates/hir-ty/src/mir/lower.rs
index 151f65cfbb..09302846f1 100644
--- a/crates/hir-ty/src/mir/lower.rs
+++ b/crates/hir-ty/src/mir/lower.rs
@@ -28,6 +28,7 @@ use crate::{
db::{HirDatabase, InternedClosure},
display::HirDisplay,
error_lifetime,
+ generics::generics,
infer::{CaptureKind, CapturedItem, TypeMismatch},
inhabitedness::is_ty_uninhabited_from,
layout::LayoutError,
@@ -42,7 +43,7 @@ use crate::{
},
static_lifetime,
traits::FnTrait,
- utils::{generics, ClosureSubst},
+ utils::ClosureSubst,
Adjust, Adjustment, AutoBorrow, CallableDefId, TyBuilder, TyExt,
};
@@ -213,7 +214,7 @@ impl MirLowerError {
| MirLowerError::LangItemNotFound(_)
| MirLowerError::MutatingRvalue
| MirLowerError::UnresolvedLabel
- | MirLowerError::UnresolvedUpvar(_) => writeln!(f, "{:?}", self)?,
+ | MirLowerError::UnresolvedUpvar(_) => writeln!(f, "{self:?}")?,
}
Ok(())
}
@@ -2133,7 +2134,7 @@ pub fn mir_body_query(db: &dyn HirDatabase, def: DefWithBodyId) -> Result<Arc<Mi
}
DefWithBodyId::InTypeConstId(it) => format!("in type const {it:?}"),
};
- let _p = tracing::span!(tracing::Level::INFO, "mir_body_query", ?detail).entered();
+ let _p = tracing::info_span!("mir_body_query", ?detail).entered();
let body = db.body(def);
let infer = db.infer(def);
let mut result = lower_to_mir(db, def, &body, &infer, body.body_expr)?;
diff --git a/crates/hir-ty/src/mir/monomorphization.rs b/crates/hir-ty/src/mir/monomorphization.rs
index a384c9306e..43afa61504 100644
--- a/crates/hir-ty/src/mir/monomorphization.rs
+++ b/crates/hir-ty/src/mir/monomorphization.rs
@@ -21,8 +21,8 @@ use crate::{
consteval::{intern_const_scalar, unknown_const},
db::{HirDatabase, InternedClosure},
from_placeholder_idx,
+ generics::{generics, Generics},
infer::normalize,
- utils::{generics, Generics},
ClosureId, Const, Interner, ProjectionTy, Substitution, TraitEnvironment, Ty, TyKind,
};
diff --git a/crates/hir-ty/src/traits.rs b/crates/hir-ty/src/traits.rs
index 02f2cd7615..4283a94657 100644
--- a/crates/hir-ty/src/traits.rs
+++ b/crates/hir-ty/src/traits.rs
@@ -108,7 +108,7 @@ pub(crate) fn trait_solve_query(
GoalData::DomainGoal(DomainGoal::Holds(WhereClause::AliasEq(_))) => "alias_eq".to_owned(),
_ => "??".to_owned(),
};
- let _p = tracing::span!(tracing::Level::INFO, "trait_solve_query", ?detail).entered();
+ let _p = tracing::info_span!("trait_solve_query", ?detail).entered();
tracing::info!("trait_solve_query({:?})", goal.value.goal);
if let GoalData::DomainGoal(DomainGoal::Holds(WhereClause::AliasEq(AliasEq {
@@ -140,7 +140,7 @@ fn solve(
block: Option<BlockId>,
goal: &chalk_ir::UCanonical<chalk_ir::InEnvironment<chalk_ir::Goal<Interner>>>,
) -> Option<chalk_solve::Solution<Interner>> {
- let _p = tracing::span!(tracing::Level::INFO, "solve", ?krate, ?block).entered();
+ let _p = tracing::info_span!("solve", ?krate, ?block).entered();
let context = ChalkContext { db, krate, block };
tracing::debug!("solve goal: {:?}", goal);
let mut solver = create_chalk_solver();
diff --git a/crates/hir-ty/src/utils.rs b/crates/hir-ty/src/utils.rs
index 42c7a84032..969999cdb8 100644
--- a/crates/hir-ty/src/utils.rs
+++ b/crates/hir-ty/src/utils.rs
@@ -5,25 +5,19 @@ use std::{hash::Hash, iter};
use base_db::CrateId;
use chalk_ir::{
- cast::Cast,
fold::{FallibleTypeFolder, Shift},
- BoundVar, DebruijnIndex,
+ DebruijnIndex,
};
use hir_def::{
db::DefDatabase,
- generics::{
- GenericParamDataRef, GenericParams, LifetimeParamData, TypeOrConstParamData,
- TypeParamProvenance, WherePredicate, WherePredicateTypeTarget,
- },
+ generics::{WherePredicate, WherePredicateTypeTarget},
lang_item::LangItem,
resolver::{HasResolver, TypeNs},
type_ref::{TraitBoundModifier, TypeRef},
- ConstParamId, EnumId, EnumVariantId, FunctionId, GenericDefId, GenericParamId, ItemContainerId,
- LifetimeParamId, Lookup, OpaqueInternableThing, TraitId, TypeAliasId, TypeOrConstParamId,
- TypeParamId,
+ EnumId, EnumVariantId, FunctionId, Lookup, OpaqueInternableThing, TraitId, TypeAliasId,
+ TypeOrConstParamId,
};
use hir_expand::name::Name;
-use intern::Interned;
use rustc_abi::TargetDataLayout;
use rustc_hash::FxHashSet;
use smallvec::{smallvec, SmallVec};
@@ -161,7 +155,7 @@ impl Iterator for ClauseElaborator<'_> {
fn direct_super_traits(db: &dyn DefDatabase, trait_: TraitId, cb: impl FnMut(TraitId)) {
let resolver = trait_.resolver(db);
let generic_params = db.generic_params(trait_.into());
- let trait_self = generic_params.find_trait_self_param();
+ let trait_self = generic_params.trait_self_param();
generic_params
.where_predicates
.iter()
@@ -194,7 +188,7 @@ fn direct_super_traits(db: &dyn DefDatabase, trait_: TraitId, cb: impl FnMut(Tra
fn direct_super_trait_refs(db: &dyn HirDatabase, trait_ref: &TraitRef, cb: impl FnMut(TraitRef)) {
let generic_params = db.generic_params(trait_ref.hir_trait_id().into());
- let trait_self = match generic_params.find_trait_self_param() {
+ let trait_self = match generic_params.trait_self_param() {
Some(p) => TypeOrConstParamId { parent: trait_ref.hir_trait_id().into(), local_id: p },
None => return,
};
@@ -226,11 +220,6 @@ pub(super) fn associated_type_by_name_including_super_traits(
})
}
-pub(crate) fn generics(db: &dyn DefDatabase, def: GenericDefId) -> Generics {
- let parent_generics = parent_generic_def(db, def).map(|def| Box::new(generics(db, def)));
- Generics { def, params: db.generic_params(def), parent_generics }
-}
-
/// It is a bit different from the rustc equivalent. Currently it stores:
/// - 0: the function signature, encoded as a function pointer type
/// - 1..n: generics of the parent
@@ -262,278 +251,14 @@ impl<'a> ClosureSubst<'a> {
}
}
-#[derive(Clone, Debug)]
-pub(crate) struct Generics {
- def: GenericDefId,
- pub(crate) params: Interned<GenericParams>,
- parent_generics: Option<Box<Generics>>,
-}
-
-impl Generics {
- pub(crate) fn iter_id(&self) -> impl Iterator<Item = GenericParamId> + '_ {
- self.iter().map(|(id, _)| id)
- }
-
- pub(crate) fn def(&self) -> GenericDefId {
- self.def
- }
-
- /// Iterator over types and const params of self, then parent.
- pub(crate) fn iter<'a>(
- &'a self,
- ) -> impl DoubleEndedIterator<Item = (GenericParamId, GenericParamDataRef<'a>)> + 'a {
- let from_toc_id = |it: &'a Generics| {
- move |(local_id, p): (_, &'a TypeOrConstParamData)| {
- let id = TypeOrConstParamId { parent: it.def, local_id };
- match p {
- TypeOrConstParamData::TypeParamData(p) => (
- GenericParamId::TypeParamId(TypeParamId::from_unchecked(id)),
- GenericParamDataRef::TypeParamData(p),
- ),
- TypeOrConstParamData::ConstParamData(p) => (
- GenericParamId::ConstParamId(ConstParamId::from_unchecked(id)),
- GenericParamDataRef::ConstParamData(p),
- ),
- }
- }
- };
-
- let from_lt_id = |it: &'a Generics| {
- move |(local_id, p): (_, &'a LifetimeParamData)| {
- (
- GenericParamId::LifetimeParamId(LifetimeParamId { parent: it.def, local_id }),
- GenericParamDataRef::LifetimeParamData(p),
- )
- }
- };
-
- let lt_iter = self.params.iter_lt().map(from_lt_id(self));
- self.params
- .iter_type_or_consts()
- .map(from_toc_id(self))
- .chain(lt_iter)
- .chain(self.iter_parent())
- }
-
- /// Iterate over types and const params without parent params.
- pub(crate) fn iter_self<'a>(
- &'a self,
- ) -> impl DoubleEndedIterator<Item = (GenericParamId, GenericParamDataRef<'a>)> + 'a {
- let from_toc_id = |it: &'a Generics| {
- move |(local_id, p): (_, &'a TypeOrConstParamData)| {
- let id = TypeOrConstParamId { parent: it.def, local_id };
- match p {
- TypeOrConstParamData::TypeParamData(p) => (
- GenericParamId::TypeParamId(TypeParamId::from_unchecked(id)),
- GenericParamDataRef::TypeParamData(p),
- ),
- TypeOrConstParamData::ConstParamData(p) => (
- GenericParamId::ConstParamId(ConstParamId::from_unchecked(id)),
- GenericParamDataRef::ConstParamData(p),
- ),
- }
- }
- };
-
- let from_lt_id = |it: &'a Generics| {
- move |(local_id, p): (_, &'a LifetimeParamData)| {
- (
- GenericParamId::LifetimeParamId(LifetimeParamId { parent: it.def, local_id }),
- GenericParamDataRef::LifetimeParamData(p),
- )
- }
- };
-
- self.params
- .iter_type_or_consts()
- .map(from_toc_id(self))
- .chain(self.params.iter_lt().map(from_lt_id(self)))
- }
-
- /// Iterator over types and const params of parent.
- pub(crate) fn iter_parent(
- &self,
- ) -> impl DoubleEndedIterator<Item = (GenericParamId, GenericParamDataRef<'_>)> + '_ {
- self.parent_generics().into_iter().flat_map(|it| {
- let from_toc_id = move |(local_id, p)| {
- let p: &_ = p;
- let id = TypeOrConstParamId { parent: it.def, local_id };
- match p {
- TypeOrConstParamData::TypeParamData(p) => (
- GenericParamId::TypeParamId(TypeParamId::from_unchecked(id)),
- GenericParamDataRef::TypeParamData(p),
- ),
- TypeOrConstParamData::ConstParamData(p) => (
- GenericParamId::ConstParamId(ConstParamId::from_unchecked(id)),
- GenericParamDataRef::ConstParamData(p),
- ),
- }
- };
-
- let from_lt_id = move |(local_id, p): (_, _)| {
- (
- GenericParamId::LifetimeParamId(LifetimeParamId { parent: it.def, local_id }),
- GenericParamDataRef::LifetimeParamData(p),
- )
- };
- let lt_iter = it.params.iter_lt().map(from_lt_id);
- it.params.iter_type_or_consts().map(from_toc_id).chain(lt_iter)
- })
- }
-
- /// Returns total number of generic parameters in scope, including those from parent.
- pub(crate) fn len(&self) -> usize {
- let parent = self.parent_generics().map_or(0, Generics::len);
- let child = self.params.len();
- parent + child
- }
-
- /// Returns numbers of generic parameters and lifetimes excluding those from parent.
- pub(crate) fn len_self(&self) -> usize {
- self.params.len()
- }
-
- /// Returns number of generic parameter excluding those from parent
- fn len_type_and_const_params(&self) -> usize {
- self.params.type_or_consts.len()
- }
-
- /// (parent total, self param, type params, const params, impl trait list, lifetimes)
- pub(crate) fn provenance_split(&self) -> (usize, usize, usize, usize, usize, usize) {
- let mut self_params = 0;
- let mut type_params = 0;
- let mut impl_trait_params = 0;
- let mut const_params = 0;
- let mut lifetime_params = 0;
- self.params.iter_type_or_consts().for_each(|(_, data)| match data {
- TypeOrConstParamData::TypeParamData(p) => match p.provenance {
- TypeParamProvenance::TypeParamList => type_params += 1,
- TypeParamProvenance::TraitSelf => self_params += 1,
- TypeParamProvenance::ArgumentImplTrait => impl_trait_params += 1,
- },
- TypeOrConstParamData::ConstParamData(_) => const_params += 1,
- });
-
- self.params.iter_lt().for_each(|(_, _)| lifetime_params += 1);
-
- let parent_len = self.parent_generics().map_or(0, Generics::len);
- (parent_len, self_params, type_params, const_params, impl_trait_params, lifetime_params)
- }
-
- pub(crate) fn type_or_const_param_idx(&self, param: TypeOrConstParamId) -> Option<usize> {
- Some(self.find_type_or_const_param(param)?.0)
- }
-
- fn find_type_or_const_param(
- &self,
- param: TypeOrConstParamId,
- ) -> Option<(usize, &TypeOrConstParamData)> {
- if param.parent == self.def {
- let idx = param.local_id.into_raw().into_u32() as usize;
- if idx >= self.params.type_or_consts.len() {
- return None;
- }
- Some((idx, &self.params.type_or_consts[param.local_id]))
- } else {
- self.parent_generics()
- .and_then(|g| g.find_type_or_const_param(param))
- // Remember that parent parameters come after parameters for self.
- .map(|(idx, data)| (self.len_self() + idx, data))
- }
- }
-
- pub(crate) fn lifetime_idx(&self, lifetime: LifetimeParamId) -> Option<usize> {
- Some(self.find_lifetime(lifetime)?.0)
- }
-
- fn find_lifetime(&self, lifetime: LifetimeParamId) -> Option<(usize, &LifetimeParamData)> {
- if lifetime.parent == self.def {
- let idx = lifetime.local_id.into_raw().into_u32() as usize;
- if idx >= self.params.lifetimes.len() {
- return None;
- }
- Some((
- self.len_type_and_const_params() + idx,
- &self.params.lifetimes[lifetime.local_id],
- ))
- } else {
- self.parent_generics()
- .and_then(|g| g.find_lifetime(lifetime))
- .map(|(idx, data)| (self.len_self() + idx, data))
- }
- }
-
- pub(crate) fn parent_generics(&self) -> Option<&Generics> {
- self.parent_generics.as_deref()
- }
-
- pub(crate) fn parent_or_self(&self) -> &Generics {
- self.parent_generics.as_deref().unwrap_or(self)
- }
-
- /// Returns a Substitution that replaces each parameter by a bound variable.
- pub(crate) fn bound_vars_subst(
- &self,
- db: &dyn HirDatabase,
- debruijn: DebruijnIndex,
- ) -> Substitution {
- Substitution::from_iter(
- Interner,
- self.iter_id().enumerate().map(|(idx, id)| match id {
- GenericParamId::ConstParamId(id) => BoundVar::new(debruijn, idx)
- .to_const(Interner, db.const_param_ty(id))
- .cast(Interner),
- GenericParamId::TypeParamId(_) => {
- BoundVar::new(debruijn, idx).to_ty(Interner).cast(Interner)
- }
- GenericParamId::LifetimeParamId(_) => {
- BoundVar::new(debruijn, idx).to_lifetime(Interner).cast(Interner)
- }
- }),
- )
- }
-
- /// Returns a Substitution that replaces each parameter by itself (i.e. `Ty::Param`).
- pub(crate) fn placeholder_subst(&self, db: &dyn HirDatabase) -> Substitution {
- Substitution::from_iter(
- Interner,
- self.iter_id().map(|id| match id {
- GenericParamId::TypeParamId(id) => {
- crate::to_placeholder_idx(db, id.into()).to_ty(Interner).cast(Interner)
- }
- GenericParamId::ConstParamId(id) => crate::to_placeholder_idx(db, id.into())
- .to_const(Interner, db.const_param_ty(id))
- .cast(Interner),
- GenericParamId::LifetimeParamId(id) => {
- crate::lt_to_placeholder_idx(db, id).to_lifetime(Interner).cast(Interner)
- }
- }),
- )
- }
-}
-
-fn parent_generic_def(db: &dyn DefDatabase, def: GenericDefId) -> Option<GenericDefId> {
- let container = match def {
- GenericDefId::FunctionId(it) => it.lookup(db).container,
- GenericDefId::TypeAliasId(it) => it.lookup(db).container,
- GenericDefId::ConstId(it) => it.lookup(db).container,
- GenericDefId::EnumVariantId(it) => return Some(it.lookup(db).parent.into()),
- GenericDefId::AdtId(_)
- | GenericDefId::TraitId(_)
- | GenericDefId::ImplId(_)
- | GenericDefId::TraitAliasId(_) => return None,
- };
-
- match container {
- ItemContainerId::ImplId(it) => Some(it.into()),
- ItemContainerId::TraitId(it) => Some(it.into()),
- ItemContainerId::ModuleId(_) | ItemContainerId::ExternBlockId(_) => None,
- }
-}
-
pub fn is_fn_unsafe_to_call(db: &dyn HirDatabase, func: FunctionId) -> bool {
let data = db.function_data(func);
if data.has_unsafe_kw() {
+ // Functions that are `#[rustc_deprecated_safe_2024]` are safe to call before 2024.
+ if db.attrs(func.into()).by_key("rustc_deprecated_safe_2024").exists() {
+ // FIXME: Properly check the caller span and mark it as unsafe after 2024.
+ return false;
+ }
return true;
}
diff --git a/crates/hir/Cargo.toml b/crates/hir/Cargo.toml
index 6d7ecd1e50..edf26a07a7 100644
--- a/crates/hir/Cargo.toml
+++ b/crates/hir/Cargo.toml
@@ -27,6 +27,7 @@ cfg.workspace = true
hir-def.workspace = true
hir-expand.workspace = true
hir-ty.workspace = true
+intern.workspace = true
stdx.workspace = true
syntax.workspace = true
tt.workspace = true
diff --git a/crates/hir/src/attrs.rs b/crates/hir/src/attrs.rs
index c7502890ef..7b3ff7b064 100644
--- a/crates/hir/src/attrs.rs
+++ b/crates/hir/src/attrs.rs
@@ -307,7 +307,7 @@ fn doc_modpath_from_str(link: &str) -> Option<ModPath> {
let kind = match parts.next()? {
"" => PathKind::Abs,
"crate" => PathKind::Crate,
- "self" => PathKind::Super(0),
+ "self" => PathKind::SELF,
"super" => {
let mut deg = 1;
for segment in parts.by_ref() {
diff --git a/crates/hir/src/display.rs b/crates/hir/src/display.rs
index c276e87786..79069ed66b 100644
--- a/crates/hir/src/display.rs
+++ b/crates/hir/src/display.rs
@@ -3,7 +3,8 @@ use either::Either;
use hir_def::{
data::adt::{StructKind, VariantData},
generics::{
- TypeOrConstParamData, TypeParamProvenance, WherePredicate, WherePredicateTypeTarget,
+ GenericParams, TypeOrConstParamData, TypeParamProvenance, WherePredicate,
+ WherePredicateTypeTarget,
},
lang_item::LangItem,
type_ref::{TypeBound, TypeRef},
@@ -16,10 +17,12 @@ use hir_ty::{
},
AliasEq, AliasTy, Interner, ProjectionTyExt, TraitRefExt, TyKind, WhereClause,
};
+use intern::Interned;
+use itertools::Itertools;
use crate::{
Adt, AsAssocItem, AssocItem, AssocItemContainer, Const, ConstParam, Enum, ExternCrateDecl,
- Field, Function, GenericParam, HasCrate, HasVisibility, LifetimeParam, Macro, Module,
+ Field, Function, GenericParam, HasCrate, HasVisibility, Impl, LifetimeParam, Macro, Module,
SelfParam, Static, Struct, Trait, TraitAlias, TupleField, TyBuilder, Type, TypeAlias,
TypeOrConstParam, TypeParam, Union, Variant,
};
@@ -30,12 +33,42 @@ impl HirDisplay for Function {
let data = db.function_data(self.id);
let container = self.as_assoc_item(db).map(|it| it.container(db));
let mut module = self.module(db);
+
+ // Write container (trait or impl)
+ let container_params = match container {
+ Some(AssocItemContainer::Trait(trait_)) => {
+ let params = f.db.generic_params(trait_.id.into());
+ if f.show_container_bounds() && !params.is_empty() {
+ write_trait_header(&trait_, f)?;
+ f.write_char('\n')?;
+ has_disaplayable_predicates(&params).then_some(params)
+ } else {
+ None
+ }
+ }
+ Some(AssocItemContainer::Impl(impl_)) => {
+ let params = f.db.generic_params(impl_.id.into());
+ if f.show_container_bounds() && !params.is_empty() {
+ write_impl_header(&impl_, f)?;
+ f.write_char('\n')?;
+ has_disaplayable_predicates(&params).then_some(params)
+ } else {
+ None
+ }
+ }
+ None => None,
+ };
+
+ // Write signature of the function
+
+ // Block-local impls are "hoisted" to the nearest (non-block) module.
if let Some(AssocItemContainer::Impl(_)) = container {
- // Block-local impls are "hoisted" to the nearest (non-block) module.
module = module.nearest_non_block_module(db);
}
let module_id = module.id;
+
write_visibility(module_id, self.visibility(db), f)?;
+
if data.has_default_kw() {
f.write_str("default ")?;
}
@@ -116,12 +149,41 @@ impl HirDisplay for Function {
}
}
- write_where_clause(GenericDefId::FunctionId(self.id), f)?;
-
+ // Write where clauses
+ let has_written_where = write_where_clause(GenericDefId::FunctionId(self.id), f)?;
+ if let Some(container_params) = container_params {
+ if !has_written_where {
+ f.write_str("\nwhere")?;
+ }
+ let container_name = match container.unwrap() {
+ AssocItemContainer::Trait(_) => "trait",
+ AssocItemContainer::Impl(_) => "impl",
+ };
+ write!(f, "\n // Bounds from {container_name}:",)?;
+ write_where_predicates(&container_params, f)?;
+ }
Ok(())
}
}
+fn write_impl_header(impl_: &Impl, f: &mut HirFormatter<'_>) -> Result<(), HirDisplayError> {
+ let db = f.db;
+
+ f.write_str("impl")?;
+ let def_id = GenericDefId::ImplId(impl_.id);
+ write_generic_params(def_id, f)?;
+
+ if let Some(trait_) = impl_.trait_(db) {
+ let trait_data = db.trait_data(trait_.id);
+ write!(f, " {} for", trait_data.name.display(db.upcast()))?;
+ }
+
+ f.write_char(' ')?;
+ impl_.self_ty(db).hir_fmt(f)?;
+
+ Ok(())
+}
+
impl HirDisplay for SelfParam {
fn hir_fmt(&self, f: &mut HirFormatter<'_>) -> Result<(), HirDisplayError> {
let data = f.db.function_data(self.func);
@@ -188,7 +250,7 @@ impl HirDisplay for Struct {
StructKind::Record => {
let has_where_clause = write_where_clause(def_id, f)?;
if let Some(limit) = f.entity_limit {
- display_fields(&self.fields(f.db), has_where_clause, limit, false, f)?;
+ write_fields(&self.fields(f.db), has_where_clause, limit, false, f)?;
}
}
StructKind::Unit => _ = write_where_clause(def_id, f)?,
@@ -208,7 +270,7 @@ impl HirDisplay for Enum {
let has_where_clause = write_where_clause(def_id, f)?;
if let Some(limit) = f.entity_limit {
- display_variants(&self.variants(f.db), has_where_clause, limit, f)?;
+ write_variants(&self.variants(f.db), has_where_clause, limit, f)?;
}
Ok(())
@@ -225,13 +287,13 @@ impl HirDisplay for Union {
let has_where_clause = write_where_clause(def_id, f)?;
if let Some(limit) = f.entity_limit {
- display_fields(&self.fields(f.db), has_where_clause, limit, false, f)?;
+ write_fields(&self.fields(f.db), has_where_clause, limit, false, f)?;
}
Ok(())
}
}
-fn display_fields(
+fn write_fields(
fields: &[Field],
has_where_clause: bool,
limit: usize,
@@ -242,11 +304,7 @@ fn display_fields(
let (indent, separator) = if in_line { ("", ' ') } else { (" ", '\n') };
f.write_char(if !has_where_clause { ' ' } else { separator })?;
if count == 0 {
- if fields.is_empty() {
- f.write_str("{}")?;
- } else {
- f.write_str("{ /* … */ }")?;
- }
+ f.write_str(if fields.is_empty() { "{}" } else { "{ /* … */ }" })?;
} else {
f.write_char('{')?;
@@ -255,14 +313,11 @@ fn display_fields(
for field in &fields[..count] {
f.write_str(indent)?;
field.hir_fmt(f)?;
- f.write_char(',')?;
- f.write_char(separator)?;
+ write!(f, ",{separator}")?;
}
if fields.len() > count {
- f.write_str(indent)?;
- f.write_str("/* … */")?;
- f.write_char(separator)?;
+ write!(f, "{indent}/* … */{separator}")?;
}
}
@@ -272,7 +327,7 @@ fn display_fields(
Ok(())
}
-fn display_variants(
+fn write_variants(
variants: &[Variant],
has_where_clause: bool,
limit: usize,
@@ -281,30 +336,22 @@ fn display_variants(
let count = variants.len().min(limit);
f.write_char(if !has_where_clause { ' ' } else { '\n' })?;
if count == 0 {
- if variants.is_empty() {
- f.write_str("{}")?;
- } else {
- f.write_str("{ /* … */ }")?;
- }
+ let variants = if variants.is_empty() { "{}" } else { "{ /* … */ }" };
+ f.write_str(variants)?;
} else {
f.write_str("{\n")?;
for variant in &variants[..count] {
- f.write_str(" ")?;
- write!(f, "{}", variant.name(f.db).display(f.db.upcast()))?;
+ write!(f, " {}", variant.name(f.db).display(f.db.upcast()))?;
match variant.kind(f.db) {
StructKind::Tuple => {
- if variant.fields(f.db).is_empty() {
- f.write_str("()")?;
- } else {
- f.write_str("( /* … */ )")?;
- }
+ let fields_str =
+ if variant.fields(f.db).is_empty() { "()" } else { "( /* … */ )" };
+ f.write_str(fields_str)?;
}
StructKind::Record => {
- if variant.fields(f.db).is_empty() {
- f.write_str(" {}")?;
- } else {
- f.write_str(" { /* … */ }")?;
- }
+ let fields_str =
+ if variant.fields(f.db).is_empty() { " {}" } else { " { /* … */ }" };
+ f.write_str(fields_str)?;
}
StructKind::Unit => {}
}
@@ -357,7 +404,7 @@ impl HirDisplay for Variant {
}
VariantData::Record(_) => {
if let Some(limit) = f.entity_limit {
- display_fields(&self.fields(f.db), false, limit, true, f)?;
+ write_fields(&self.fields(f.db), false, limit, true, f)?;
}
}
}
@@ -554,102 +601,96 @@ fn write_where_clause(
f: &mut HirFormatter<'_>,
) -> Result<bool, HirDisplayError> {
let params = f.db.generic_params(def);
+ if !has_disaplayable_predicates(&params) {
+ return Ok(false);
+ }
- // unnamed type targets are displayed inline with the argument itself, e.g. `f: impl Y`.
- let is_unnamed_type_target = |target: &WherePredicateTypeTarget| match target {
- WherePredicateTypeTarget::TypeRef(_) => false,
- WherePredicateTypeTarget::TypeOrConstParam(id) => {
- params.type_or_consts[*id].name().is_none()
- }
- };
+ f.write_str("\nwhere")?;
+ write_where_predicates(&params, f)?;
- let has_displayable_predicate = params
- .where_predicates
- .iter()
- .any(|pred| {
- !matches!(pred, WherePredicate::TypeBound { target, .. } if is_unnamed_type_target(target))
- });
+ Ok(true)
+}
- if !has_displayable_predicate {
- return Ok(false);
- }
+fn has_disaplayable_predicates(params: &Interned<GenericParams>) -> bool {
+ params.where_predicates.iter().any(|pred| {
+ !matches!(
+ pred,
+ WherePredicate::TypeBound { target: WherePredicateTypeTarget::TypeOrConstParam(id), .. }
+ if params.type_or_consts[*id].name().is_none()
+ )
+ })
+}
+
+fn write_where_predicates(
+ params: &Interned<GenericParams>,
+ f: &mut HirFormatter<'_>,
+) -> Result<(), HirDisplayError> {
+ use WherePredicate::*;
+
+ // unnamed type targets are displayed inline with the argument itself, e.g. `f: impl Y`.
+ let is_unnamed_type_target =
+ |params: &Interned<GenericParams>, target: &WherePredicateTypeTarget| {
+ matches!(target,
+ WherePredicateTypeTarget::TypeOrConstParam(id) if params.type_or_consts[*id].name().is_none()
+ )
+ };
let write_target = |target: &WherePredicateTypeTarget, f: &mut HirFormatter<'_>| match target {
WherePredicateTypeTarget::TypeRef(ty) => ty.hir_fmt(f),
- WherePredicateTypeTarget::TypeOrConstParam(id) => {
- match &params.type_or_consts[*id].name() {
- Some(name) => write!(f, "{}", name.display(f.db.upcast())),
- None => f.write_str("{unnamed}"),
- }
- }
+ WherePredicateTypeTarget::TypeOrConstParam(id) => match params.type_or_consts[*id].name() {
+ Some(name) => write!(f, "{}", name.display(f.db.upcast())),
+ None => f.write_str("{unnamed}"),
+ },
};
- f.write_str("\nwhere")?;
-
- for (pred_idx, pred) in params.where_predicates.iter().enumerate() {
- let prev_pred =
- if pred_idx == 0 { None } else { Some(&params.where_predicates[pred_idx - 1]) };
+ let check_same_target = |pred1: &WherePredicate, pred2: &WherePredicate| match (pred1, pred2) {
+ (TypeBound { target: t1, .. }, TypeBound { target: t2, .. }) => t1 == t2,
+ (Lifetime { target: t1, .. }, Lifetime { target: t2, .. }) => t1 == t2,
+ (
+ ForLifetime { lifetimes: l1, target: t1, .. },
+ ForLifetime { lifetimes: l2, target: t2, .. },
+ ) => l1 == l2 && t1 == t2,
+ _ => false,
+ };
- let new_predicate = |f: &mut HirFormatter<'_>| {
- f.write_str(if pred_idx == 0 { "\n " } else { ",\n " })
- };
+ let mut iter = params.where_predicates.iter().peekable();
+ while let Some(pred) = iter.next() {
+ if matches!(pred, TypeBound { target, .. } if is_unnamed_type_target(params, target)) {
+ continue;
+ }
+ f.write_str("\n ")?;
match pred {
- WherePredicate::TypeBound { target, .. } if is_unnamed_type_target(target) => {}
- WherePredicate::TypeBound { target, bound } => {
- if matches!(prev_pred, Some(WherePredicate::TypeBound { target: target_, .. }) if target_ == target)
- {
- f.write_str(" + ")?;
- } else {
- new_predicate(f)?;
- write_target(target, f)?;
- f.write_str(": ")?;
- }
+ TypeBound { target, bound } => {
+ write_target(target, f)?;
+ f.write_str(": ")?;
bound.hir_fmt(f)?;
}
- WherePredicate::Lifetime { target, bound } => {
- if matches!(prev_pred, Some(WherePredicate::Lifetime { target: target_, .. }) if target_ == target)
- {
- write!(f, " + {}", bound.name.display(f.db.upcast()))?;
- } else {
- new_predicate(f)?;
- write!(
- f,
- "{}: {}",
- target.name.display(f.db.upcast()),
- bound.name.display(f.db.upcast())
- )?;
- }
+ Lifetime { target, bound } => {
+ let target = target.name.display(f.db.upcast());
+ let bound = bound.name.display(f.db.upcast());
+ write!(f, "{target}: {bound}")?;
}
- WherePredicate::ForLifetime { lifetimes, target, bound } => {
- if matches!(
- prev_pred,
- Some(WherePredicate::ForLifetime { lifetimes: lifetimes_, target: target_, .. })
- if lifetimes_ == lifetimes && target_ == target,
- ) {
- f.write_str(" + ")?;
- } else {
- new_predicate(f)?;
- f.write_str("for<")?;
- for (idx, lifetime) in lifetimes.iter().enumerate() {
- if idx != 0 {
- f.write_str(", ")?;
- }
- write!(f, "{}", lifetime.display(f.db.upcast()))?;
- }
- f.write_str("> ")?;
- write_target(target, f)?;
- f.write_str(": ")?;
- }
+ ForLifetime { lifetimes, target, bound } => {
+ let lifetimes = lifetimes.iter().map(|it| it.display(f.db.upcast())).join(", ");
+ write!(f, "for<{lifetimes}> ")?;
+ write_target(target, f)?;
+ f.write_str(": ")?;
bound.hir_fmt(f)?;
}
}
- }
- // End of final predicate. There must be at least one predicate here.
- f.write_char(',')?;
+ while let Some(nxt) = iter.next_if(|nxt| check_same_target(pred, nxt)) {
+ f.write_str(" + ")?;
+ match nxt {
+ TypeBound { bound, .. } | ForLifetime { bound, .. } => bound.hir_fmt(f)?,
+ Lifetime { bound, .. } => write!(f, "{}", bound.name.display(f.db.upcast()))?,
+ }
+ }
+ f.write_str(",")?;
+ }
- Ok(true)
+ Ok(())
}
impl HirDisplay for Const {
@@ -689,17 +730,8 @@ impl HirDisplay for Static {
impl HirDisplay for Trait {
fn hir_fmt(&self, f: &mut HirFormatter<'_>) -> Result<(), HirDisplayError> {
- write_visibility(self.module(f.db).id, self.visibility(f.db), f)?;
- let data = f.db.trait_data(self.id);
- if data.is_unsafe {
- f.write_str("unsafe ")?;
- }
- if data.is_auto {
- f.write_str("auto ")?;
- }
- write!(f, "trait {}", data.name.display(f.db.upcast()))?;
+ write_trait_header(self, f)?;
let def_id = GenericDefId::TraitId(self.id);
- write_generic_params(def_id, f)?;
let has_where_clause = write_where_clause(def_id, f)?;
if let Some(limit) = f.entity_limit {
@@ -735,6 +767,20 @@ impl HirDisplay for Trait {
}
}
+fn write_trait_header(trait_: &Trait, f: &mut HirFormatter<'_>) -> Result<(), HirDisplayError> {
+ write_visibility(trait_.module(f.db).id, trait_.visibility(f.db), f)?;
+ let data = f.db.trait_data(trait_.id);
+ if data.is_unsafe {
+ f.write_str("unsafe ")?;
+ }
+ if data.is_auto {
+ f.write_str("auto ")?;
+ }
+ write!(f, "trait {}", data.name.display(f.db.upcast()))?;
+ write_generic_params(GenericDefId::TraitId(trait_.id), f)?;
+ Ok(())
+}
+
impl HirDisplay for TraitAlias {
fn hir_fmt(&self, f: &mut HirFormatter<'_>) -> Result<(), HirDisplayError> {
write_visibility(self.module(f.db).id, self.visibility(f.db), f)?;
diff --git a/crates/hir/src/has_source.rs b/crates/hir/src/has_source.rs
index 7cdcdd76d1..929c8b3c09 100644
--- a/crates/hir/src/has_source.rs
+++ b/crates/hir/src/has_source.rs
@@ -8,13 +8,14 @@ use hir_def::{
Lookup, MacroId, VariantId,
};
use hir_expand::{HirFileId, InFile};
+use hir_ty::{db::InternedClosure, CallableDefId};
use syntax::ast;
use tt::TextRange;
use crate::{
- db::HirDatabase, Adt, Const, Enum, ExternCrateDecl, Field, FieldSource, Function, Impl,
- LifetimeParam, LocalSource, Macro, Module, Static, Struct, Trait, TraitAlias, TypeAlias,
- TypeOrConstParam, Union, Variant,
+ db::HirDatabase, Adt, Callee, Const, Enum, ExternCrateDecl, Field, FieldSource, Function, Impl,
+ Label, LifetimeParam, LocalSource, Macro, Module, Param, SelfParam, Static, Struct, Trait,
+ TraitAlias, TypeAlias, TypeOrConstParam, Union, Variant,
};
pub trait HasSource {
@@ -25,7 +26,7 @@ pub trait HasSource {
///
/// The current some implementations can return `InFile` instead of `Option<InFile>`.
/// But we made this method `Option` to support rlib in the future
- /// by https://github.com/rust-lang/rust-analyzer/issues/6913
+ /// by <https://github.com/rust-lang/rust-analyzer/issues/6913>
fn source(self, db: &dyn HirDatabase) -> Option<InFile<Self::Ast>>;
}
@@ -202,7 +203,7 @@ impl HasSource for TypeOrConstParam {
type Ast = Either<ast::TypeOrConstParam, ast::TraitOrAlias>;
fn source(self, db: &dyn HirDatabase) -> Option<InFile<Self::Ast>> {
let child_source = self.id.parent.child_source(db.upcast());
- Some(child_source.map(|it| it[self.id.local_id].clone()))
+ child_source.map(|it| it.get(self.id.local_id).cloned()).transpose()
}
}
@@ -210,7 +211,7 @@ impl HasSource for LifetimeParam {
type Ast = ast::LifetimeParam;
fn source(self, db: &dyn HirDatabase) -> Option<InFile<Self::Ast>> {
let child_source = self.id.parent.child_source(db.upcast());
- Some(child_source.map(|it| it[self.id.local_id].clone()))
+ child_source.map(|it| it.get(self.id.local_id).cloned()).transpose()
}
}
@@ -222,6 +223,68 @@ impl HasSource for LocalSource {
}
}
+impl HasSource for Param {
+ type Ast = Either<ast::SelfParam, ast::Param>;
+
+ fn source(self, db: &dyn HirDatabase) -> Option<InFile<Self::Ast>> {
+ match self.func {
+ Callee::Def(CallableDefId::FunctionId(func)) => {
+ let InFile { file_id, value } = Function { id: func }.source(db)?;
+ let params = value.param_list()?;
+ if let Some(self_param) = params.self_param() {
+ if let Some(idx) = self.idx.checked_sub(1) {
+ params.params().nth(idx).map(Either::Right)
+ } else {
+ Some(Either::Left(self_param))
+ }
+ } else {
+ params.params().nth(self.idx).map(Either::Right)
+ }
+ .map(|value| InFile { file_id, value })
+ }
+ Callee::Closure(closure, _) => {
+ let InternedClosure(owner, expr_id) = db.lookup_intern_closure(closure.into());
+ let (_, source_map) = db.body_with_source_map(owner);
+ let ast @ InFile { file_id, value } = source_map.expr_syntax(expr_id).ok()?;
+ let root = db.parse_or_expand(file_id);
+ match value.to_node(&root) {
+ ast::Expr::ClosureExpr(it) => it
+ .param_list()?
+ .params()
+ .nth(self.idx)
+ .map(Either::Right)
+ .map(|value| InFile { file_id: ast.file_id, value }),
+ _ => None,
+ }
+ }
+ _ => None,
+ }
+ }
+}
+
+impl HasSource for SelfParam {
+ type Ast = ast::SelfParam;
+
+ fn source(self, db: &dyn HirDatabase) -> Option<InFile<Self::Ast>> {
+ let InFile { file_id, value } = Function::from(self.func).source(db)?;
+ value
+ .param_list()
+ .and_then(|params| params.self_param())
+ .map(|value| InFile { file_id, value })
+ }
+}
+
+impl HasSource for Label {
+ type Ast = ast::Label;
+
+ fn source(self, db: &dyn HirDatabase) -> Option<InFile<Self::Ast>> {
+ let (_body, source_map) = db.body_with_source_map(self.parent);
+ let src = source_map.label_syntax(self.label_id);
+ let root = src.file_syntax(db.upcast());
+ Some(src.map(|ast| ast.to_node(&root)))
+ }
+}
+
impl HasSource for ExternCrateDecl {
type Ast = ast::ExternCrate;
diff --git a/crates/hir/src/lib.rs b/crates/hir/src/lib.rs
index 777be711a5..c1fe8a8b31 100644
--- a/crates/hir/src/lib.rs
+++ b/crates/hir/src/lib.rs
@@ -64,7 +64,6 @@ use hir_expand::{
use hir_ty::{
all_super_traits, autoderef, check_orphan_rules,
consteval::{try_const_usize, unknown_const_as_generic, ConstExt},
- db::InternedClosure,
diagnostics::BodyValidationDiagnostic,
error_lifetime, known_const_to_ast,
layout::{Layout as TyLayout, RustcEnumVariantIdx, RustcFieldIdx, TagEncoding},
@@ -113,7 +112,7 @@ pub use hir_ty::method_resolution::TyFingerprint;
pub use {
cfg::{CfgAtom, CfgExpr, CfgOptions},
hir_def::{
- attr::{builtin::AttributeTemplate, AttrSourceMap, Attrs, AttrsWithOwner},
+ attr::{AttrSourceMap, Attrs, AttrsWithOwner},
data::adt::StructKind,
find_path::PrefixKind,
import_map,
@@ -132,6 +131,7 @@ pub use {
attrs::{Attr, AttrId},
change::ChangeWithProcMacros,
hygiene::{marks_rev, SyntaxContextExt},
+ inert_attr_macro::AttributeTemplate,
name::{known, Name},
proc_macro::ProcMacros,
tt, ExpandResult, HirFileId, HirFileIdExt, InFile, InMacroFile, InRealFile, MacroFileId,
@@ -242,7 +242,7 @@ impl Crate {
db: &dyn DefDatabase,
query: import_map::Query,
) -> impl Iterator<Item = Either<ModuleDef, Macro>> {
- let _p = tracing::span!(tracing::Level::INFO, "query_external_importables").entered();
+ let _p = tracing::info_span!("query_external_importables").entered();
import_map::search_dependencies(db, self.into(), &query).into_iter().map(|item| {
match ItemInNs::from(item) {
ItemInNs::Types(mod_id) | ItemInNs::Values(mod_id) => Either::Left(mod_id),
@@ -551,8 +551,7 @@ impl Module {
acc: &mut Vec<AnyDiagnostic>,
style_lints: bool,
) {
- let _p = tracing::span!(tracing::Level::INFO, "Module::diagnostics", name = ?self.name(db))
- .entered();
+ let _p = tracing::info_span!("Module::diagnostics", name = ?self.name(db)).entered();
let def_map = self.id.def_map(db.upcast());
for diag in def_map.diagnostics() {
if diag.in_module != self.id.local_id {
@@ -1099,6 +1098,35 @@ pub enum FieldSource {
Pos(ast::TupleField),
}
+impl AstNode for FieldSource {
+ fn can_cast(kind: syntax::SyntaxKind) -> bool
+ where
+ Self: Sized,
+ {
+ ast::RecordField::can_cast(kind) || ast::TupleField::can_cast(kind)
+ }
+
+ fn cast(syntax: SyntaxNode) -> Option<Self>
+ where
+ Self: Sized,
+ {
+ if ast::RecordField::can_cast(syntax.kind()) {
+ <ast::RecordField as AstNode>::cast(syntax).map(FieldSource::Named)
+ } else if ast::TupleField::can_cast(syntax.kind()) {
+ <ast::TupleField as AstNode>::cast(syntax).map(FieldSource::Pos)
+ } else {
+ None
+ }
+ }
+
+ fn syntax(&self) -> &SyntaxNode {
+ match self {
+ FieldSource::Named(it) => it.syntax(),
+ FieldSource::Pos(it) => it.syntax(),
+ }
+ }
+}
+
impl Field {
pub fn name(&self, db: &dyn HirDatabase) -> Name {
self.parent.variant_data(db).fields()[self.id].name.clone()
@@ -1884,6 +1912,14 @@ impl Function {
Type::from_value_def(db, self.id)
}
+ pub fn fn_ptr_type(self, db: &dyn HirDatabase) -> Type {
+ let resolver = self.id.resolver(db.upcast());
+ let substs = TyBuilder::placeholder_subst(db, self.id);
+ let callable_sig = db.callable_item_signature(self.id.into()).substitute(Interner, &substs);
+ let ty = TyKind::Function(callable_sig.to_fn_ptr()).intern(Interner);
+ Type::new_with_resolver_inner(db, &resolver, ty)
+ }
+
/// Get this function's return type
pub fn ret_type(self, db: &dyn HirDatabase) -> Type {
let resolver = self.id.resolver(db.upcast());
@@ -2208,47 +2244,9 @@ impl Param {
}
}
- pub fn pattern_source(&self, db: &dyn HirDatabase) -> Option<ast::Pat> {
+ pub fn pattern_source(self, db: &dyn HirDatabase) -> Option<ast::Pat> {
self.source(db).and_then(|p| p.value.right()?.pat())
}
-
- pub fn source(
- &self,
- db: &dyn HirDatabase,
- ) -> Option<InFile<Either<ast::SelfParam, ast::Param>>> {
- match self.func {
- Callee::Def(CallableDefId::FunctionId(func)) => {
- let InFile { file_id, value } = Function { id: func }.source(db)?;
- let params = value.param_list()?;
- if let Some(self_param) = params.self_param() {
- if let Some(idx) = self.idx.checked_sub(1) {
- params.params().nth(idx).map(Either::Right)
- } else {
- Some(Either::Left(self_param))
- }
- } else {
- params.params().nth(self.idx).map(Either::Right)
- }
- .map(|value| InFile { file_id, value })
- }
- Callee::Closure(closure, _) => {
- let InternedClosure(owner, expr_id) = db.lookup_intern_closure(closure.into());
- let (_, source_map) = db.body_with_source_map(owner);
- let ast @ InFile { file_id, value } = source_map.expr_syntax(expr_id).ok()?;
- let root = db.parse_or_expand(file_id);
- match value.to_node(&root) {
- ast::Expr::ClosureExpr(it) => it
- .param_list()?
- .params()
- .nth(self.idx)
- .map(Either::Right)
- .map(|value| InFile { file_id: ast.file_id, value }),
- _ => None,
- }
- }
- _ => None,
- }
- }
}
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
@@ -2272,14 +2270,6 @@ impl SelfParam {
.unwrap_or(Access::Owned)
}
- pub fn source(&self, db: &dyn HirDatabase) -> Option<InFile<ast::SelfParam>> {
- let InFile { file_id, value } = Function::from(self.func).source(db)?;
- value
- .param_list()
- .and_then(|params| params.self_param())
- .map(|value| InFile { file_id, value })
- }
-
pub fn parent_fn(&self) -> Function {
Function::from(self.func)
}
@@ -2414,9 +2404,9 @@ impl Const {
let value_signed =
i128::from_le_bytes(mir::pad16(b, matches!(s, Scalar::Int(_))));
if value >= 10 {
- return Ok(format!("{} ({:#X})", value_signed, value));
+ return Ok(format!("{value_signed} ({value:#X})"));
} else {
- return Ok(format!("{}", value_signed));
+ return Ok(format!("{value_signed}"));
}
}
}
@@ -2746,6 +2736,12 @@ impl Macro {
}
}
+ pub fn is_asm_or_global_asm(&self, db: &dyn HirDatabase) -> bool {
+ matches!(self.id, MacroId::Macro2Id(it) if {
+ matches!(it.lookup(db.upcast()).expander, MacroExpander::BuiltIn(m) if m.is_asm())
+ })
+ }
+
pub fn is_attr(&self, db: &dyn HirDatabase) -> bool {
matches!(self.kind(db), MacroKind::Attr)
}
@@ -2788,6 +2784,7 @@ impl From<ModuleDef> for ItemInNs {
ModuleDef::Static(_) | ModuleDef::Const(_) | ModuleDef::Function(_) => {
ItemInNs::Values(module_def)
}
+ ModuleDef::Macro(it) => ItemInNs::Macros(it),
_ => ItemInNs::Types(module_def),
}
}
@@ -3381,7 +3378,7 @@ impl BuiltinAttr {
}
fn builtin(name: &str) -> Option<Self> {
- hir_def::attr::builtin::find_builtin_attr_idx(name)
+ hir_expand::inert_attr_macro::find_builtin_attr_idx(name)
.map(|idx| BuiltinAttr { krate: None, idx: idx as u32 })
}
@@ -3389,14 +3386,18 @@ impl BuiltinAttr {
// FIXME: Return a `Name` here
match self.krate {
Some(krate) => db.crate_def_map(krate).registered_attrs()[self.idx as usize].clone(),
- None => SmolStr::new(hir_def::attr::builtin::INERT_ATTRIBUTES[self.idx as usize].name),
+ None => {
+ SmolStr::new(hir_expand::inert_attr_macro::INERT_ATTRIBUTES[self.idx as usize].name)
+ }
}
}
pub fn template(&self, _: &dyn HirDatabase) -> Option<AttributeTemplate> {
match self.krate {
Some(_) => None,
- None => Some(hir_def::attr::builtin::INERT_ATTRIBUTES[self.idx as usize].template),
+ None => {
+ Some(hir_expand::inert_attr_macro::INERT_ATTRIBUTES[self.idx as usize].template)
+ }
}
}
}
@@ -3440,13 +3441,6 @@ impl Label {
let body = db.body(self.parent);
body[self.label_id].name.clone()
}
-
- pub fn source(self, db: &dyn HirDatabase) -> InFile<ast::Label> {
- let (_body, source_map) = db.body_with_source_map(self.parent);
- let src = source_map.label_syntax(self.label_id);
- let root = src.file_syntax(db.upcast());
- src.map(|ast| ast.to_node(&root))
- }
}
#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
@@ -4612,8 +4606,7 @@ impl Type {
name: Option<&Name>,
mut callback: impl FnMut(Function) -> Option<T>,
) -> Option<T> {
- let _p =
- tracing::span!(tracing::Level::INFO, "iterate_method_candidates_with_traits").entered();
+ let _p = tracing::info_span!("iterate_method_candidates_with_traits").entered();
let mut slot = None;
self.iterate_method_candidates_dyn(
@@ -4662,8 +4655,7 @@ impl Type {
name: Option<&Name>,
callback: &mut dyn FnMut(AssocItemId) -> ControlFlow<()>,
) {
- let _p = tracing::span!(
- tracing::Level::INFO,
+ let _p = tracing::info_span!(
"iterate_method_candidates_dyn",
with_local_impls = traits_in_scope.len(),
traits_in_scope = traits_in_scope.len(),
@@ -4701,7 +4693,7 @@ impl Type {
name: Option<&Name>,
mut callback: impl FnMut(AssocItem) -> Option<T>,
) -> Option<T> {
- let _p = tracing::span!(tracing::Level::INFO, "iterate_path_candidates").entered();
+ let _p = tracing::info_span!("iterate_path_candidates").entered();
let mut slot = None;
self.iterate_path_candidates_dyn(
db,
@@ -4768,7 +4760,7 @@ impl Type {
&'a self,
db: &'a dyn HirDatabase,
) -> impl Iterator<Item = Trait> + 'a {
- let _p = tracing::span!(tracing::Level::INFO, "applicable_inherent_traits").entered();
+ let _p = tracing::info_span!("applicable_inherent_traits").entered();
self.autoderef_(db)
.filter_map(|ty| ty.dyn_trait())
.flat_map(move |dyn_trait_id| hir_ty::all_super_traits(db.upcast(), dyn_trait_id))
@@ -4776,7 +4768,7 @@ impl Type {
}
pub fn env_traits<'a>(&'a self, db: &'a dyn HirDatabase) -> impl Iterator<Item = Trait> + 'a {
- let _p = tracing::span!(tracing::Level::INFO, "env_traits").entered();
+ let _p = tracing::info_span!("env_traits").entered();
self.autoderef_(db)
.filter(|ty| matches!(ty.kind(Interner), TyKind::Placeholder(_)))
.flat_map(|ty| {
diff --git a/crates/hir/src/semantics.rs b/crates/hir/src/semantics.rs
index 43de2a6ee7..f6c88edbff 100644
--- a/crates/hir/src/semantics.rs
+++ b/crates/hir/src/semantics.rs
@@ -19,7 +19,11 @@ use hir_def::{
AsMacroCall, DefWithBodyId, FunctionId, MacroId, TraitId, VariantId,
};
use hir_expand::{
- attrs::collect_attrs, db::ExpandDatabase, files::InRealFile, name::AsName, ExpansionInfo,
+ attrs::collect_attrs,
+ builtin_fn_macro::{BuiltinFnLikeExpander, EagerExpander},
+ db::ExpandDatabase,
+ files::InRealFile,
+ name::AsName,
InMacroFile, MacroCallId, MacroFileId, MacroFileIdExt,
};
use itertools::Itertools;
@@ -132,9 +136,6 @@ pub struct SemanticsImpl<'db> {
s2d_cache: RefCell<SourceToDefCache>,
/// Rootnode to HirFileId cache
root_to_file_cache: RefCell<FxHashMap<SyntaxNode, HirFileId>>,
- // These 2 caches are mainly useful for semantic highlighting as nothing else descends a lot of tokens
- // So we might wanna move them out into something specific for semantic highlighting
- expansion_info_cache: RefCell<FxHashMap<MacroFileId, ExpansionInfo>>,
/// MacroCall to its expansion's MacroFileId cache
macro_call_cache: RefCell<FxHashMap<InFile<ast::MacroCall>, MacroFileId>>,
}
@@ -295,7 +296,6 @@ impl<'db> SemanticsImpl<'db> {
db,
s2d_cache: Default::default(),
root_to_file_cache: Default::default(),
- expansion_info_cache: Default::default(),
macro_call_cache: Default::default(),
}
}
@@ -314,7 +314,58 @@ impl<'db> SemanticsImpl<'db> {
pub fn expand(&self, macro_call: &ast::MacroCall) -> Option<SyntaxNode> {
let sa = self.analyze_no_infer(macro_call.syntax())?;
- let file_id = sa.expand(self.db, InFile::new(sa.file_id, macro_call))?;
+
+ let macro_call = InFile::new(sa.file_id, macro_call);
+ let file_id = if let Some(call) =
+ <ast::MacroCall as crate::semantics::ToDef>::to_def(self, macro_call)
+ {
+ call.as_macro_file()
+ } else {
+ sa.expand(self.db, macro_call)?
+ };
+
+ let node = self.parse_or_expand(file_id.into());
+ Some(node)
+ }
+
+ /// Expands the macro if it isn't one of the built-in ones that expand to custom syntax or dummy
+ /// expansions.
+ pub fn expand_allowed_builtins(&self, macro_call: &ast::MacroCall) -> Option<SyntaxNode> {
+ let sa = self.analyze_no_infer(macro_call.syntax())?;
+
+ let macro_call = InFile::new(sa.file_id, macro_call);
+ let file_id = if let Some(call) =
+ <ast::MacroCall as crate::semantics::ToDef>::to_def(self, macro_call)
+ {
+ call.as_macro_file()
+ } else {
+ sa.expand(self.db, macro_call)?
+ };
+ let macro_call = self.db.lookup_intern_macro_call(file_id.macro_call_id);
+
+ let skip = matches!(
+ macro_call.def.kind,
+ hir_expand::MacroDefKind::BuiltIn(
+ _,
+ BuiltinFnLikeExpander::Column
+ | BuiltinFnLikeExpander::File
+ | BuiltinFnLikeExpander::ModulePath
+ | BuiltinFnLikeExpander::Asm
+ | BuiltinFnLikeExpander::LlvmAsm
+ | BuiltinFnLikeExpander::GlobalAsm
+ | BuiltinFnLikeExpander::LogSyntax
+ | BuiltinFnLikeExpander::TraceMacros
+ | BuiltinFnLikeExpander::FormatArgs
+ | BuiltinFnLikeExpander::FormatArgsNl
+ | BuiltinFnLikeExpander::ConstFormatArgs,
+ ) | hir_expand::MacroDefKind::BuiltInEager(_, EagerExpander::CompileError)
+ );
+ if skip {
+ // these macros expand to custom builtin syntax and/or dummy things, no point in
+ // showing these to the user
+ return None;
+ }
+
let node = self.parse_or_expand(file_id.into());
Some(node)
}
@@ -322,7 +373,7 @@ impl<'db> SemanticsImpl<'db> {
/// If `item` has an attribute macro attached to it, expands it.
pub fn expand_attr_macro(&self, item: &ast::Item) -> Option<SyntaxNode> {
let src = self.wrap_node_infile(item.clone());
- let macro_call_id = self.with_ctx(|ctx| ctx.item_to_macro_call(src))?;
+ let macro_call_id = self.with_ctx(|ctx| ctx.item_to_macro_call(src.as_ref()))?;
Some(self.parse_or_expand(macro_call_id.as_file()))
}
@@ -341,9 +392,7 @@ impl<'db> SemanticsImpl<'db> {
Some(
calls
.into_iter()
- .map(|call| {
- macro_call_to_macro_id(ctx, self.db.upcast(), call?).map(|id| Macro { id })
- })
+ .map(|call| macro_call_to_macro_id(ctx, call?).map(|id| Macro { id }))
.collect(),
)
})
@@ -403,7 +452,7 @@ impl<'db> SemanticsImpl<'db> {
pub fn is_attr_macro_call(&self, item: &ast::Item) -> bool {
let file_id = self.find_file(item.syntax()).file_id;
- let src = InFile::new(file_id, item.clone());
+ let src = InFile::new(file_id, item);
self.with_ctx(|ctx| ctx.item_to_macro_call(src).is_some())
}
@@ -420,7 +469,7 @@ impl<'db> SemanticsImpl<'db> {
let macro_call = InFile::new(file_id, actual_macro_call);
let krate = resolver.krate();
let macro_call_id = macro_call.as_call_id(self.db.upcast(), krate, |path| {
- resolver.resolve_path_as_macro_def(self.db.upcast(), &path, Some(MacroSubNs::Bang))
+ resolver.resolve_path_as_macro_def(self.db.upcast(), path, Some(MacroSubNs::Bang))
})?;
hir_expand::db::expand_speculative(
self.db.upcast(),
@@ -453,7 +502,7 @@ impl<'db> SemanticsImpl<'db> {
token_to_map: SyntaxToken,
) -> Option<(SyntaxNode, SyntaxToken)> {
let macro_call = self.wrap_node_infile(actual_macro_call.clone());
- let macro_call_id = self.with_ctx(|ctx| ctx.item_to_macro_call(macro_call))?;
+ let macro_call_id = self.with_ctx(|ctx| ctx.item_to_macro_call(macro_call.as_ref()))?;
hir_expand::db::expand_speculative(
self.db.upcast(),
macro_call_id,
@@ -705,8 +754,6 @@ impl<'db> SemanticsImpl<'db> {
let parent = token.parent()?;
let file_id = self.find_file(&parent).file_id.file_id()?;
- let mut cache = self.expansion_info_cache.borrow_mut();
-
// iterate related crates and find all include! invocations that include_file_id matches
for (invoc, _) in self
.db
@@ -716,18 +763,32 @@ impl<'db> SemanticsImpl<'db> {
.filter(|&(_, include_file_id)| include_file_id == file_id)
{
let macro_file = invoc.as_macro_file();
- let expansion_info = cache.entry(macro_file).or_insert_with(|| {
- let exp_info = macro_file.expansion_info(self.db.upcast());
-
- let InMacroFile { file_id, value } = exp_info.expanded();
- self.cache(value, file_id.into());
+ let expansion_info = {
+ self.with_ctx(|ctx| {
+ ctx.cache
+ .expansion_info_cache
+ .entry(macro_file)
+ .or_insert_with(|| {
+ let exp_info = macro_file.expansion_info(self.db.upcast());
+
+ let InMacroFile { file_id, value } = exp_info.expanded();
+ if let InFile { file_id, value: Some(value) } = exp_info.arg() {
+ self.cache(value.ancestors().last().unwrap(), file_id);
+ }
+ self.cache(value, file_id.into());
- exp_info
- });
+ exp_info
+ })
+ .clone()
+ })
+ };
// FIXME: uncached parse
// Create the source analyzer for the macro call scope
- let Some(sa) = self.analyze_no_infer(&self.parse_or_expand(expansion_info.call_file()))
+ let Some(sa) = expansion_info
+ .arg()
+ .value
+ .and_then(|it| self.analyze_no_infer(&it.ancestors().last().unwrap()))
else {
continue;
};
@@ -758,7 +819,7 @@ impl<'db> SemanticsImpl<'db> {
mut token: SyntaxToken,
f: &mut dyn FnMut(InFile<SyntaxToken>) -> ControlFlow<()>,
) {
- let _p = tracing::span!(tracing::Level::INFO, "descend_into_macros_impl").entered();
+ let _p = tracing::info_span!("descend_into_macros_impl").entered();
let (sa, span, file_id) =
match token.parent().and_then(|parent| self.analyze_no_infer(&parent)) {
Some(sa) => match sa.file_id.file_id() {
@@ -785,23 +846,28 @@ impl<'db> SemanticsImpl<'db> {
}
};
- let mut cache = self.expansion_info_cache.borrow_mut();
- let mut mcache = self.macro_call_cache.borrow_mut();
+ let mut m_cache = self.macro_call_cache.borrow_mut();
let def_map = sa.resolver.def_map();
let mut stack: Vec<(_, SmallVec<[_; 2]>)> = vec![(file_id, smallvec![token])];
- let mut process_expansion_for_token = |stack: &mut Vec<_>, macro_file| {
- let exp_info = cache.entry(macro_file).or_insert_with(|| {
- let exp_info = macro_file.expansion_info(self.db.upcast());
-
- let InMacroFile { file_id, value } = exp_info.expanded();
- self.cache(value, file_id.into());
-
- exp_info
- });
-
- let InMacroFile { file_id, value: mapped_tokens } = exp_info.map_range_down(span)?;
- let mapped_tokens: SmallVec<[_; 2]> = mapped_tokens.collect();
+ let process_expansion_for_token = |stack: &mut Vec<_>, macro_file| {
+ let InMacroFile { file_id, value: mapped_tokens } = self.with_ctx(|ctx| {
+ Some(
+ ctx.cache
+ .expansion_info_cache
+ .entry(macro_file)
+ .or_insert_with(|| {
+ let exp_info = macro_file.expansion_info(self.db.upcast());
+
+ let InMacroFile { file_id, value } = exp_info.expanded();
+ self.cache(value, file_id.into());
+
+ exp_info
+ })
+ .map_range_down(span)?
+ .map(SmallVec::<[_; 2]>::from_iter),
+ )
+ })?;
// we have found a mapping for the token if the vec is non-empty
let res = mapped_tokens.is_empty().not().then_some(());
@@ -818,10 +884,7 @@ impl<'db> SemanticsImpl<'db> {
token.parent_ancestors().filter_map(ast::Item::cast).find_map(|item| {
// Don't force populate the dyn cache for items that don't have an attribute anyways
item.attrs().next()?;
- Some((
- ctx.item_to_macro_call(InFile::new(file_id, item.clone()))?,
- item,
- ))
+ Some((ctx.item_to_macro_call(InFile::new(file_id, &item))?, item))
})
});
if let Some((call_id, item)) = containing_attribute_macro_call {
@@ -874,13 +937,20 @@ impl<'db> SemanticsImpl<'db> {
return None;
}
let macro_call = tt.syntax().parent().and_then(ast::MacroCall::cast)?;
- let mcall: hir_expand::files::InFileWrapper<HirFileId, ast::MacroCall> =
- InFile::new(file_id, macro_call);
- let file_id = match mcache.get(&mcall) {
+ let mcall = InFile::new(file_id, macro_call);
+ let file_id = match m_cache.get(&mcall) {
Some(&it) => it,
None => {
- let it = sa.expand(self.db, mcall.as_ref())?;
- mcache.insert(mcall, it);
+ let it = if let Some(call) =
+ <ast::MacroCall as crate::semantics::ToDef>::to_def(
+ self,
+ mcall.as_ref(),
+ ) {
+ call.as_macro_file()
+ } else {
+ sa.expand(self.db, mcall.as_ref())?
+ };
+ m_cache.insert(mcall, it);
it
}
};
@@ -953,6 +1023,13 @@ impl<'db> SemanticsImpl<'db> {
let helpers =
def_map.derive_helpers_in_scope(InFile::new(file_id, id))?;
+ if !helpers.is_empty() {
+ let text_range = attr.syntax().text_range();
+ // remove any other token in this macro input, all their mappings are the
+ // same as this
+ tokens.retain(|t| !text_range.contains_range(t.text_range()));
+ }
+
let mut res = None;
for (.., derive) in
helpers.iter().filter(|(helper, ..)| *helper == attr_name)
@@ -1056,16 +1133,20 @@ impl<'db> SemanticsImpl<'db> {
node: SyntaxNode,
) -> impl Iterator<Item = SyntaxNode> + Clone + '_ {
let node = self.find_file(&node);
- let db = self.db.upcast();
iter::successors(Some(node.cloned()), move |&InFile { file_id, ref value }| {
match value.parent() {
Some(parent) => Some(InFile::new(file_id, parent)),
None => {
- let call_node = file_id.macro_file()?.call_node(db);
- // cache the node
- // FIXME: uncached parse
- self.parse_or_expand(call_node.file_id);
- Some(call_node)
+ let macro_file = file_id.macro_file()?;
+
+ self.with_ctx(|ctx| {
+ let expansion_info = ctx
+ .cache
+ .expansion_info_cache
+ .entry(macro_file)
+ .or_insert_with(|| macro_file.expansion_info(self.db.upcast()));
+ expansion_info.arg().map(|node| node?.parent()).transpose()
+ })
}
}
})
@@ -1090,7 +1171,7 @@ impl<'db> SemanticsImpl<'db> {
.find(|tp| tp.lifetime().as_ref().map(|lt| lt.text()).as_ref() == Some(&text))
})?;
let src = self.wrap_node_infile(lifetime_param);
- ToDef::to_def(self, src)
+ ToDef::to_def(self, src.as_ref())
}
pub fn resolve_label(&self, lifetime: &ast::Lifetime) -> Option<Label> {
@@ -1112,7 +1193,7 @@ impl<'db> SemanticsImpl<'db> {
})
})?;
let src = self.wrap_node_infile(label);
- ToDef::to_def(self, src)
+ ToDef::to_def(self, src.as_ref())
}
pub fn resolve_type(&self, ty: &ast::Type) -> Option<Type> {
@@ -1275,9 +1356,15 @@ impl<'db> SemanticsImpl<'db> {
}
pub fn resolve_macro_call(&self, macro_call: &ast::MacroCall) -> Option<Macro> {
- let sa = self.analyze(macro_call.syntax())?;
let macro_call = self.find_file(macro_call.syntax()).with_value(macro_call);
- sa.resolve_macro_call(self.db, macro_call)
+ self.with_ctx(|ctx| {
+ ctx.macro_call_to_macro_call(macro_call)
+ .and_then(|call| macro_call_to_macro_id(ctx, call))
+ .map(Into::into)
+ })
+ .or_else(|| {
+ self.analyze(macro_call.value.syntax())?.resolve_macro_call(self.db, macro_call)
+ })
}
pub fn is_proc_macro_call(&self, macro_call: &ast::MacroCall) -> bool {
@@ -1297,19 +1384,24 @@ impl<'db> SemanticsImpl<'db> {
}
pub fn is_unsafe_macro_call(&self, macro_call: &ast::MacroCall) -> bool {
- let sa = match self.analyze(macro_call.syntax()) {
- Some(it) => it,
- None => return false,
- };
+ let Some(mac) = self.resolve_macro_call(macro_call) else { return false };
+ if mac.is_asm_or_global_asm(self.db) {
+ return true;
+ }
+
+ let Some(sa) = self.analyze(macro_call.syntax()) else { return false };
let macro_call = self.find_file(macro_call.syntax()).with_value(macro_call);
- sa.is_unsafe_macro_call(self.db, macro_call)
+ match macro_call.map(|it| it.syntax().parent().and_then(ast::MacroExpr::cast)).transpose() {
+ Some(it) => sa.is_unsafe_macro_call_expr(self.db, it.as_ref()),
+ None => false,
+ }
}
pub fn resolve_attr_macro_call(&self, item: &ast::Item) -> Option<Macro> {
let item_in_file = self.wrap_node_infile(item.clone());
let id = self.with_ctx(|ctx| {
- let macro_call_id = ctx.item_to_macro_call(item_in_file)?;
- macro_call_to_macro_id(ctx, self.db.upcast(), macro_call_id)
+ let macro_call_id = ctx.item_to_macro_call(item_in_file.as_ref())?;
+ macro_call_to_macro_id(ctx, macro_call_id)
})?;
Some(Macro { id })
}
@@ -1339,18 +1431,17 @@ impl<'db> SemanticsImpl<'db> {
}
fn with_ctx<F: FnOnce(&mut SourceToDefCtx<'_, '_>) -> T, T>(&self, f: F) -> T {
- let mut cache = self.s2d_cache.borrow_mut();
- let mut ctx = SourceToDefCtx { db: self.db, dynmap_cache: &mut cache };
+ let mut ctx = SourceToDefCtx { db: self.db, cache: &mut self.s2d_cache.borrow_mut() };
f(&mut ctx)
}
pub fn to_def<T: ToDef>(&self, src: &T) -> Option<T::Def> {
- let src = self.find_file(src.syntax()).with_value(src).cloned();
+ let src = self.find_file(src.syntax()).with_value(src);
T::to_def(self, src)
}
fn file_to_module_defs(&self, file: FileId) -> impl Iterator<Item = Module> {
- self.with_ctx(|ctx| ctx.file_to_def(file)).into_iter().map(Module::from)
+ self.with_ctx(|ctx| ctx.file_to_def(file).to_owned()).into_iter().map(Module::from)
}
pub fn scope(&self, node: &SyntaxNode) -> Option<SemanticsScope<'db>> {
@@ -1380,6 +1471,7 @@ impl<'db> SemanticsImpl<'db> {
where
Def::Ast: AstNode,
{
+ // FIXME: source call should go through the parse cache
let res = def.source(self.db)?;
self.cache(find_root(res.value.syntax()), res.file_id);
Some(res)
@@ -1409,7 +1501,7 @@ impl<'db> SemanticsImpl<'db> {
offset: Option<TextSize>,
infer_body: bool,
) -> Option<SourceAnalyzer> {
- let _p = tracing::span!(tracing::Level::INFO, "SemanticsImpl::analyze_impl").entered();
+ let _p = tracing::info_span!("SemanticsImpl::analyze_impl").entered();
let node = self.find_file(node);
let container = self.with_ctx(|ctx| ctx.find_container(node))?;
@@ -1438,7 +1530,7 @@ impl<'db> SemanticsImpl<'db> {
assert!(root_node.parent().is_none());
let mut cache = self.root_to_file_cache.borrow_mut();
let prev = cache.insert(root_node, file_id);
- assert!(prev.is_none() || prev == Some(file_id))
+ assert!(prev.is_none() || prev == Some(file_id));
}
pub fn assert_contains_node(&self, node: &SyntaxNode) {
@@ -1613,35 +1705,59 @@ impl<'db> SemanticsImpl<'db> {
fn macro_call_to_macro_id(
ctx: &mut SourceToDefCtx<'_, '_>,
- db: &dyn ExpandDatabase,
macro_call_id: MacroCallId,
) -> Option<MacroId> {
+ use span::HirFileIdRepr;
+
+ let db: &dyn ExpandDatabase = ctx.db.upcast();
let loc = db.lookup_intern_macro_call(macro_call_id);
- match loc.def.kind {
- hir_expand::MacroDefKind::Declarative(it)
- | hir_expand::MacroDefKind::BuiltIn(_, it)
- | hir_expand::MacroDefKind::BuiltInAttr(_, it)
- | hir_expand::MacroDefKind::BuiltInDerive(_, it)
- | hir_expand::MacroDefKind::BuiltInEager(_, it) => {
- ctx.macro_to_def(InFile::new(it.file_id, it.to_node(db)))
+
+ match loc.def.ast_id() {
+ Either::Left(it) => {
+ let node = match it.file_id.repr() {
+ HirFileIdRepr::FileId(file_id) => {
+ it.to_ptr(db).to_node(&db.parse(file_id).syntax_node())
+ }
+ HirFileIdRepr::MacroFile(macro_file) => {
+ let expansion_info = ctx
+ .cache
+ .expansion_info_cache
+ .entry(macro_file)
+ .or_insert_with(|| macro_file.expansion_info(ctx.db.upcast()));
+ it.to_ptr(db).to_node(&expansion_info.expanded().value)
+ }
+ };
+ ctx.macro_to_def(InFile::new(it.file_id, &node))
}
- hir_expand::MacroDefKind::ProcMacro(_, _, it) => {
- ctx.proc_macro_to_def(InFile::new(it.file_id, it.to_node(db)))
+ Either::Right(it) => {
+ let node = match it.file_id.repr() {
+ HirFileIdRepr::FileId(file_id) => {
+ it.to_ptr(db).to_node(&db.parse(file_id).syntax_node())
+ }
+ HirFileIdRepr::MacroFile(macro_file) => {
+ let expansion_info = ctx
+ .cache
+ .expansion_info_cache
+ .entry(macro_file)
+ .or_insert_with(|| macro_file.expansion_info(ctx.db.upcast()));
+ it.to_ptr(db).to_node(&expansion_info.expanded().value)
+ }
+ };
+ ctx.proc_macro_to_def(InFile::new(it.file_id, &node))
}
}
}
pub trait ToDef: AstNode + Clone {
type Def;
-
- fn to_def(sema: &SemanticsImpl<'_>, src: InFile<Self>) -> Option<Self::Def>;
+ fn to_def(sema: &SemanticsImpl<'_>, src: InFile<&Self>) -> Option<Self::Def>;
}
macro_rules! to_def_impls {
($(($def:path, $ast:path, $meth:ident)),* ,) => {$(
impl ToDef for $ast {
type Def = $def;
- fn to_def(sema: &SemanticsImpl<'_>, src: InFile<Self>) -> Option<Self::Def> {
+ fn to_def(sema: &SemanticsImpl<'_>, src: InFile<&Self>) -> Option<Self::Def> {
sema.with_ctx(|ctx| ctx.$meth(src)).map(<$def>::from)
}
}
@@ -1674,6 +1790,7 @@ to_def_impls![
(crate::Label, ast::Label, label_to_def),
(crate::Adt, ast::Adt, adt_to_def),
(crate::ExternCrateDecl, ast::ExternCrate, extern_crate_to_def),
+ (MacroCallId, ast::MacroCall, macro_call_to_macro_call),
];
fn find_root(node: &SyntaxNode) -> SyntaxNode {
diff --git a/crates/hir/src/semantics/source_to_def.rs b/crates/hir/src/semantics/source_to_def.rs
index 77e7cdb58a..74ed2640f4 100644
--- a/crates/hir/src/semantics/source_to_def.rs
+++ b/crates/hir/src/semantics/source_to_def.rs
@@ -26,19 +26,19 @@
//!
//! The actual algorithm to resolve syntax to def is curious in two aspects:
//!
-//! * It is recursive
-//! * It uses the inverse algorithm (what is the syntax for this def?)
+//! * It is recursive
+//! * It uses the inverse algorithm (what is the syntax for this def?)
//!
//! Specifically, the algorithm goes like this:
//!
-//! 1. Find the syntactic container for the syntax. For example, field's
-//! container is the struct, and structs container is a module.
-//! 2. Recursively get the def corresponding to container.
-//! 3. Ask the container def for all child defs. These child defs contain
-//! the answer and answer's siblings.
-//! 4. For each child def, ask for it's source.
-//! 5. The child def whose source is the syntax node we've started with
-//! is the answer.
+//! 1. Find the syntactic container for the syntax. For example, field's
+//! container is the struct, and structs container is a module.
+//! 2. Recursively get the def corresponding to container.
+//! 3. Ask the container def for all child defs. These child defs contain
+//! the answer and answer's siblings.
+//! 4. For each child def, ask for it's source.
+//! 5. The child def whose source is the syntax node we've started with
+//! is the answer.
//!
//! It's interesting that both Roslyn and Kotlin contain very similar code
//! shape.
@@ -98,56 +98,68 @@ use hir_def::{
FieldId, FunctionId, GenericDefId, GenericParamId, ImplId, LifetimeParamId, MacroId, ModuleId,
StaticId, StructId, TraitAliasId, TraitId, TypeAliasId, TypeParamId, UnionId, UseId, VariantId,
};
-use hir_expand::{attrs::AttrId, name::AsName, HirFileId, HirFileIdExt, MacroCallId};
+use hir_expand::{
+ attrs::AttrId, name::AsName, ExpansionInfo, HirFileId, HirFileIdExt, MacroCallId,
+};
use rustc_hash::FxHashMap;
use smallvec::SmallVec;
+use span::MacroFileId;
use stdx::impl_from;
use syntax::{
ast::{self, HasName},
- AstNode, SyntaxNode,
+ AstNode, AstPtr, SyntaxNode,
};
use crate::{db::HirDatabase, InFile};
-pub(super) type SourceToDefCache = FxHashMap<(ChildContainer, HirFileId), DynMap>;
+#[derive(Default)]
+pub(super) struct SourceToDefCache {
+ pub(super) dynmap_cache: FxHashMap<(ChildContainer, HirFileId), DynMap>,
+ pub(super) expansion_info_cache: FxHashMap<MacroFileId, ExpansionInfo>,
+ pub(super) file_to_def_cache: FxHashMap<FileId, SmallVec<[ModuleId; 1]>>,
+}
-pub(super) struct SourceToDefCtx<'a, 'b> {
- pub(super) db: &'b dyn HirDatabase,
- pub(super) dynmap_cache: &'a mut SourceToDefCache,
+pub(super) struct SourceToDefCtx<'db, 'cache> {
+ pub(super) db: &'db dyn HirDatabase,
+ pub(super) cache: &'cache mut SourceToDefCache,
}
impl SourceToDefCtx<'_, '_> {
- pub(super) fn file_to_def(&self, file: FileId) -> SmallVec<[ModuleId; 1]> {
- let _p = tracing::span!(tracing::Level::INFO, "SourceToDefCtx::file_to_def").entered();
- let mut mods = SmallVec::new();
- for &crate_id in self.db.relevant_crates(file).iter() {
- // Note: `mod` declarations in block modules cannot be supported here
- let crate_def_map = self.db.crate_def_map(crate_id);
- mods.extend(
- crate_def_map
- .modules_for_file(file)
- .map(|local_id| crate_def_map.module_id(local_id)),
- )
- }
- if mods.is_empty() {
- // FIXME: detached file
- }
- mods
+ pub(super) fn file_to_def(&mut self, file: FileId) -> &SmallVec<[ModuleId; 1]> {
+ let _p = tracing::info_span!("SourceToDefCtx::file_to_def").entered();
+ self.cache.file_to_def_cache.entry(file).or_insert_with(|| {
+ let mut mods = SmallVec::new();
+ for &crate_id in self.db.relevant_crates(file).iter() {
+ // Note: `mod` declarations in block modules cannot be supported here
+ let crate_def_map = self.db.crate_def_map(crate_id);
+ mods.extend(
+ crate_def_map
+ .modules_for_file(file)
+ .map(|local_id| crate_def_map.module_id(local_id)),
+ )
+ }
+ if mods.is_empty() {
+ // FIXME: detached file
+ }
+ mods
+ })
}
- pub(super) fn module_to_def(&mut self, src: InFile<ast::Module>) -> Option<ModuleId> {
- let _p = tracing::span!(tracing::Level::INFO, "module_to_def").entered();
- let parent_declaration = src
- .syntax()
- .ancestors_with_macros(self.db.upcast())
- .find_map(|it| it.map(Either::<ast::Module, ast::BlockExpr>::cast).transpose())
+ pub(super) fn module_to_def(&mut self, src: InFile<&ast::Module>) -> Option<ModuleId> {
+ let _p = tracing::info_span!("module_to_def").entered();
+ let parent_declaration = self
+ .ancestors_with_macros(src.syntax_ref(), |_, ancestor| {
+ ancestor.map(Either::<ast::Module, ast::BlockExpr>::cast).transpose()
+ })
.map(|it| it.transpose());
let parent_module = match parent_declaration {
Some(Either::Right(parent_block)) => self
- .block_to_def(parent_block)
+ .block_to_def(parent_block.as_ref())
.map(|block| self.db.block_def_map(block).root_module_id()),
- Some(Either::Left(parent_declaration)) => self.module_to_def(parent_declaration),
+ Some(Either::Left(parent_declaration)) => {
+ self.module_to_def(parent_declaration.as_ref())
+ }
None => {
let file_id = src.file_id.original_file(self.db.upcast());
self.file_to_def(file_id).first().copied()
@@ -160,73 +172,79 @@ impl SourceToDefCtx<'_, '_> {
Some(def_map.module_id(child_id))
}
- pub(super) fn source_file_to_def(&self, src: InFile<ast::SourceFile>) -> Option<ModuleId> {
- let _p = tracing::span!(tracing::Level::INFO, "source_file_to_def").entered();
+ pub(super) fn source_file_to_def(&mut self, src: InFile<&ast::SourceFile>) -> Option<ModuleId> {
+ let _p = tracing::info_span!("source_file_to_def").entered();
let file_id = src.file_id.original_file(self.db.upcast());
self.file_to_def(file_id).first().copied()
}
- pub(super) fn trait_to_def(&mut self, src: InFile<ast::Trait>) -> Option<TraitId> {
+ pub(super) fn trait_to_def(&mut self, src: InFile<&ast::Trait>) -> Option<TraitId> {
self.to_def(src, keys::TRAIT)
}
pub(super) fn trait_alias_to_def(
&mut self,
- src: InFile<ast::TraitAlias>,
+ src: InFile<&ast::TraitAlias>,
) -> Option<TraitAliasId> {
self.to_def(src, keys::TRAIT_ALIAS)
}
- pub(super) fn impl_to_def(&mut self, src: InFile<ast::Impl>) -> Option<ImplId> {
+ pub(super) fn impl_to_def(&mut self, src: InFile<&ast::Impl>) -> Option<ImplId> {
self.to_def(src, keys::IMPL)
}
- pub(super) fn fn_to_def(&mut self, src: InFile<ast::Fn>) -> Option<FunctionId> {
+ pub(super) fn fn_to_def(&mut self, src: InFile<&ast::Fn>) -> Option<FunctionId> {
self.to_def(src, keys::FUNCTION)
}
- pub(super) fn struct_to_def(&mut self, src: InFile<ast::Struct>) -> Option<StructId> {
+ pub(super) fn struct_to_def(&mut self, src: InFile<&ast::Struct>) -> Option<StructId> {
self.to_def(src, keys::STRUCT)
}
- pub(super) fn enum_to_def(&mut self, src: InFile<ast::Enum>) -> Option<EnumId> {
+ pub(super) fn enum_to_def(&mut self, src: InFile<&ast::Enum>) -> Option<EnumId> {
self.to_def(src, keys::ENUM)
}
- pub(super) fn union_to_def(&mut self, src: InFile<ast::Union>) -> Option<UnionId> {
+ pub(super) fn union_to_def(&mut self, src: InFile<&ast::Union>) -> Option<UnionId> {
self.to_def(src, keys::UNION)
}
- pub(super) fn static_to_def(&mut self, src: InFile<ast::Static>) -> Option<StaticId> {
+ pub(super) fn static_to_def(&mut self, src: InFile<&ast::Static>) -> Option<StaticId> {
self.to_def(src, keys::STATIC)
}
- pub(super) fn const_to_def(&mut self, src: InFile<ast::Const>) -> Option<ConstId> {
+ pub(super) fn const_to_def(&mut self, src: InFile<&ast::Const>) -> Option<ConstId> {
self.to_def(src, keys::CONST)
}
- pub(super) fn type_alias_to_def(&mut self, src: InFile<ast::TypeAlias>) -> Option<TypeAliasId> {
+ pub(super) fn type_alias_to_def(
+ &mut self,
+ src: InFile<&ast::TypeAlias>,
+ ) -> Option<TypeAliasId> {
self.to_def(src, keys::TYPE_ALIAS)
}
- pub(super) fn record_field_to_def(&mut self, src: InFile<ast::RecordField>) -> Option<FieldId> {
+ pub(super) fn record_field_to_def(
+ &mut self,
+ src: InFile<&ast::RecordField>,
+ ) -> Option<FieldId> {
self.to_def(src, keys::RECORD_FIELD)
}
- pub(super) fn tuple_field_to_def(&mut self, src: InFile<ast::TupleField>) -> Option<FieldId> {
+ pub(super) fn tuple_field_to_def(&mut self, src: InFile<&ast::TupleField>) -> Option<FieldId> {
self.to_def(src, keys::TUPLE_FIELD)
}
- pub(super) fn block_to_def(&mut self, src: InFile<ast::BlockExpr>) -> Option<BlockId> {
+ pub(super) fn block_to_def(&mut self, src: InFile<&ast::BlockExpr>) -> Option<BlockId> {
self.to_def(src, keys::BLOCK)
}
pub(super) fn enum_variant_to_def(
&mut self,
- src: InFile<ast::Variant>,
+ src: InFile<&ast::Variant>,
) -> Option<EnumVariantId> {
self.to_def(src, keys::ENUM_VARIANT)
}
pub(super) fn extern_crate_to_def(
&mut self,
- src: InFile<ast::ExternCrate>,
+ src: InFile<&ast::ExternCrate>,
) -> Option<ExternCrateId> {
self.to_def(src, keys::EXTERN_CRATE)
}
#[allow(dead_code)]
- pub(super) fn use_to_def(&mut self, src: InFile<ast::Use>) -> Option<UseId> {
+ pub(super) fn use_to_def(&mut self, src: InFile<&ast::Use>) -> Option<UseId> {
self.to_def(src, keys::USE)
}
pub(super) fn adt_to_def(
&mut self,
- InFile { file_id, value }: InFile<ast::Adt>,
+ InFile { file_id, value }: InFile<&ast::Adt>,
) -> Option<AdtId> {
match value {
ast::Adt::Enum(it) => self.enum_to_def(InFile::new(file_id, it)).map(AdtId::EnumId),
@@ -238,11 +256,11 @@ impl SourceToDefCtx<'_, '_> {
}
pub(super) fn bind_pat_to_def(
&mut self,
- src: InFile<ast::IdentPat>,
+ src: InFile<&ast::IdentPat>,
) -> Option<(DefWithBodyId, BindingId)> {
- let container = self.find_pat_or_label_container(src.syntax())?;
+ let container = self.find_pat_or_label_container(src.syntax_ref())?;
let (body, source_map) = self.db.body_with_source_map(container);
- let src = src.map(ast::Pat::from);
+ let src = src.cloned().map(ast::Pat::from);
let pat_id = source_map.node_pat(src.as_ref())?;
// the pattern could resolve to a constant, verify that that is not the case
if let crate::Pat::Bind { id, .. } = body[pat_id] {
@@ -253,25 +271,33 @@ impl SourceToDefCtx<'_, '_> {
}
pub(super) fn self_param_to_def(
&mut self,
- src: InFile<ast::SelfParam>,
+ src: InFile<&ast::SelfParam>,
) -> Option<(DefWithBodyId, BindingId)> {
- let container = self.find_pat_or_label_container(src.syntax())?;
+ let container = self.find_pat_or_label_container(src.syntax_ref())?;
let body = self.db.body(container);
Some((container, body.self_param?))
}
pub(super) fn label_to_def(
&mut self,
- src: InFile<ast::Label>,
+ src: InFile<&ast::Label>,
) -> Option<(DefWithBodyId, LabelId)> {
- let container = self.find_pat_or_label_container(src.syntax())?;
+ let container = self.find_pat_or_label_container(src.syntax_ref())?;
let (_body, source_map) = self.db.body_with_source_map(container);
- let label_id = source_map.node_label(src.as_ref())?;
+ let label_id = source_map.node_label(src)?;
Some((container, label_id))
}
- pub(super) fn item_to_macro_call(&mut self, src: InFile<ast::Item>) -> Option<MacroCallId> {
- let map = self.dyn_map(src.as_ref())?;
- map[keys::ATTR_MACRO_CALL].get(&src.value).copied()
+ pub(super) fn item_to_macro_call(&mut self, src: InFile<&ast::Item>) -> Option<MacroCallId> {
+ let map = self.dyn_map(src)?;
+ map[keys::ATTR_MACRO_CALL].get(&AstPtr::new(src.value)).copied()
+ }
+
+ pub(super) fn macro_call_to_macro_call(
+ &mut self,
+ src: InFile<&ast::MacroCall>,
+ ) -> Option<MacroCallId> {
+ let map = self.dyn_map(src)?;
+ map[keys::MACRO_CALL].get(&AstPtr::new(src.value)).copied()
}
/// (AttrId, derive attribute call id, derive call ids)
@@ -282,7 +308,7 @@ impl SourceToDefCtx<'_, '_> {
) -> Option<(AttrId, MacroCallId, &[Option<MacroCallId>])> {
let map = self.dyn_map(item)?;
map[keys::DERIVE_MACRO_CALL]
- .get(&src.value)
+ .get(&AstPtr::new(&src.value))
.map(|&(attr_id, call_id, ref ids)| (attr_id, call_id, &**ids))
}
@@ -292,10 +318,10 @@ impl SourceToDefCtx<'_, '_> {
fn to_def<Ast: AstNode + 'static, ID: Copy + 'static>(
&mut self,
- src: InFile<Ast>,
+ src: InFile<&Ast>,
key: Key<Ast, ID>,
) -> Option<ID> {
- self.dyn_map(src.as_ref())?[key].get(&src.value).copied()
+ self.dyn_map(src)?[key].get(&AstPtr::new(src.value)).copied()
}
fn dyn_map<Ast: AstNode + 'static>(&mut self, src: InFile<&Ast>) -> Option<&DynMap> {
@@ -305,38 +331,48 @@ impl SourceToDefCtx<'_, '_> {
fn cache_for(&mut self, container: ChildContainer, file_id: HirFileId) -> &DynMap {
let db = self.db;
- self.dynmap_cache
+ self.cache
+ .dynmap_cache
.entry((container, file_id))
.or_insert_with(|| container.child_by_source(db, file_id))
}
- pub(super) fn type_param_to_def(&mut self, src: InFile<ast::TypeParam>) -> Option<TypeParamId> {
- let container: ChildContainer = self.find_generic_param_container(src.syntax())?.into();
+ pub(super) fn type_param_to_def(
+ &mut self,
+ src: InFile<&ast::TypeParam>,
+ ) -> Option<TypeParamId> {
+ let container: ChildContainer = self.find_generic_param_container(src.syntax_ref())?.into();
let dyn_map = self.cache_for(container, src.file_id);
- dyn_map[keys::TYPE_PARAM].get(&src.value).copied().map(TypeParamId::from_unchecked)
+ dyn_map[keys::TYPE_PARAM]
+ .get(&AstPtr::new(src.value))
+ .copied()
+ .map(TypeParamId::from_unchecked)
}
pub(super) fn lifetime_param_to_def(
&mut self,
- src: InFile<ast::LifetimeParam>,
+ src: InFile<&ast::LifetimeParam>,
) -> Option<LifetimeParamId> {
- let container: ChildContainer = self.find_generic_param_container(src.syntax())?.into();
+ let container: ChildContainer = self.find_generic_param_container(src.syntax_ref())?.into();
let dyn_map = self.cache_for(container, src.file_id);
- dyn_map[keys::LIFETIME_PARAM].get(&src.value).copied()
+ dyn_map[keys::LIFETIME_PARAM].get(&AstPtr::new(src.value)).copied()
}
pub(super) fn const_param_to_def(
&mut self,
- src: InFile<ast::ConstParam>,
+ src: InFile<&ast::ConstParam>,
) -> Option<ConstParamId> {
- let container: ChildContainer = self.find_generic_param_container(src.syntax())?.into();
+ let container: ChildContainer = self.find_generic_param_container(src.syntax_ref())?.into();
let dyn_map = self.cache_for(container, src.file_id);
- dyn_map[keys::CONST_PARAM].get(&src.value).copied().map(ConstParamId::from_unchecked)
+ dyn_map[keys::CONST_PARAM]
+ .get(&AstPtr::new(src.value))
+ .copied()
+ .map(ConstParamId::from_unchecked)
}
pub(super) fn generic_param_to_def(
&mut self,
- InFile { file_id, value }: InFile<ast::GenericParam>,
+ InFile { file_id, value }: InFile<&ast::GenericParam>,
) -> Option<GenericParamId> {
match value {
ast::GenericParam::ConstParam(it) => {
@@ -351,34 +387,113 @@ impl SourceToDefCtx<'_, '_> {
}
}
- pub(super) fn macro_to_def(&mut self, src: InFile<ast::Macro>) -> Option<MacroId> {
- self.dyn_map(src.as_ref()).and_then(|it| match &src.value {
+ pub(super) fn macro_to_def(&mut self, src: InFile<&ast::Macro>) -> Option<MacroId> {
+ self.dyn_map(src).and_then(|it| match src.value {
ast::Macro::MacroRules(value) => {
- it[keys::MACRO_RULES].get(value).copied().map(MacroId::from)
+ it[keys::MACRO_RULES].get(&AstPtr::new(value)).copied().map(MacroId::from)
+ }
+ ast::Macro::MacroDef(value) => {
+ it[keys::MACRO2].get(&AstPtr::new(value)).copied().map(MacroId::from)
}
- ast::Macro::MacroDef(value) => it[keys::MACRO2].get(value).copied().map(MacroId::from),
})
}
- pub(super) fn proc_macro_to_def(&mut self, src: InFile<ast::Fn>) -> Option<MacroId> {
- self.dyn_map(src.as_ref())
- .and_then(|it| it[keys::PROC_MACRO].get(&src.value).copied().map(MacroId::from))
+ pub(super) fn proc_macro_to_def(&mut self, src: InFile<&ast::Fn>) -> Option<MacroId> {
+ self.dyn_map(src).and_then(|it| {
+ it[keys::PROC_MACRO].get(&AstPtr::new(src.value)).copied().map(MacroId::from)
+ })
}
pub(super) fn find_container(&mut self, src: InFile<&SyntaxNode>) -> Option<ChildContainer> {
- for container in src.ancestors_with_macros(self.db.upcast()) {
- if let Some(res) = self.container_to_def(container) {
- return Some(res);
- }
+ let _p = tracing::info_span!("find_container").entered();
+ let def =
+ self.ancestors_with_macros(src, |this, container| this.container_to_def(container));
+ if let Some(def) = def {
+ return Some(def);
}
let def = self.file_to_def(src.file_id.original_file(self.db.upcast())).first().copied()?;
Some(def.into())
}
+ /// Skips the attributed item that caused the macro invocation we are climbing up
+ fn ancestors_with_macros<T>(
+ &mut self,
+ node: InFile<&SyntaxNode>,
+ mut cb: impl FnMut(&mut Self, InFile<SyntaxNode>) -> Option<T>,
+ ) -> Option<T> {
+ use hir_expand::MacroFileIdExt;
+ let parent = |this: &mut Self, node: InFile<&SyntaxNode>| match node.value.parent() {
+ Some(parent) => Some(node.with_value(parent)),
+ None => {
+ let macro_file = node.file_id.macro_file()?;
+
+ let expansion_info = this
+ .cache
+ .expansion_info_cache
+ .entry(macro_file)
+ .or_insert_with(|| macro_file.expansion_info(this.db.upcast()));
+
+ expansion_info.arg().map(|node| node?.parent()).transpose()
+ }
+ };
+ let mut node = node.cloned();
+ while let Some(parent) = parent(self, node.as_ref()) {
+ if let Some(res) = cb(self, parent.clone()) {
+ return Some(res);
+ }
+ node = parent;
+ }
+ None
+ }
+
+ fn find_generic_param_container(&mut self, src: InFile<&SyntaxNode>) -> Option<GenericDefId> {
+ self.ancestors_with_macros(src, |this, InFile { file_id, value }| {
+ let item = ast::Item::cast(value)?;
+ match &item {
+ ast::Item::Fn(it) => this.fn_to_def(InFile::new(file_id, it)).map(Into::into),
+ ast::Item::Struct(it) => {
+ this.struct_to_def(InFile::new(file_id, it)).map(Into::into)
+ }
+ ast::Item::Enum(it) => this.enum_to_def(InFile::new(file_id, it)).map(Into::into),
+ ast::Item::Trait(it) => this.trait_to_def(InFile::new(file_id, it)).map(Into::into),
+ ast::Item::TraitAlias(it) => {
+ this.trait_alias_to_def(InFile::new(file_id, it)).map(Into::into)
+ }
+ ast::Item::TypeAlias(it) => {
+ this.type_alias_to_def(InFile::new(file_id, it)).map(Into::into)
+ }
+ ast::Item::Impl(it) => this.impl_to_def(InFile::new(file_id, it)).map(Into::into),
+ _ => None,
+ }
+ })
+ }
+
+ fn find_pat_or_label_container(&mut self, src: InFile<&SyntaxNode>) -> Option<DefWithBodyId> {
+ self.ancestors_with_macros(src, |this, InFile { file_id, value }| {
+ let item = match ast::Item::cast(value.clone()) {
+ Some(it) => it,
+ None => {
+ let variant = ast::Variant::cast(value.clone())?;
+ return this
+ .enum_variant_to_def(InFile::new(file_id, &variant))
+ .map(Into::into);
+ }
+ };
+ match &item {
+ ast::Item::Fn(it) => this.fn_to_def(InFile::new(file_id, it)).map(Into::into),
+ ast::Item::Const(it) => this.const_to_def(InFile::new(file_id, it)).map(Into::into),
+ ast::Item::Static(it) => {
+ this.static_to_def(InFile::new(file_id, it)).map(Into::into)
+ }
+ _ => None,
+ }
+ })
+ }
+
fn container_to_def(&mut self, container: InFile<SyntaxNode>) -> Option<ChildContainer> {
let cont = if let Some(item) = ast::Item::cast(container.value.clone()) {
- match item {
+ match &item {
ast::Item::Module(it) => self.module_to_def(container.with_value(it))?.into(),
ast::Item::Trait(it) => self.trait_to_def(container.with_value(it))?.into(),
ast::Item::TraitAlias(it) => {
@@ -413,63 +528,11 @@ impl SourceToDefCtx<'_, '_> {
}
} else {
let it = ast::Variant::cast(container.value)?;
- let def = self.enum_variant_to_def(InFile::new(container.file_id, it))?;
+ let def = self.enum_variant_to_def(InFile::new(container.file_id, &it))?;
DefWithBodyId::from(def).into()
};
Some(cont)
}
-
- fn find_generic_param_container(&mut self, src: InFile<&SyntaxNode>) -> Option<GenericDefId> {
- let ancestors = src.ancestors_with_macros(self.db.upcast());
- for InFile { file_id, value } in ancestors {
- let item = match ast::Item::cast(value) {
- Some(it) => it,
- None => continue,
- };
- let res: GenericDefId = match item {
- ast::Item::Fn(it) => self.fn_to_def(InFile::new(file_id, it))?.into(),
- ast::Item::Struct(it) => self.struct_to_def(InFile::new(file_id, it))?.into(),
- ast::Item::Union(it) => self.union_to_def(InFile::new(file_id, it))?.into(),
- ast::Item::Enum(it) => self.enum_to_def(InFile::new(file_id, it))?.into(),
- ast::Item::Trait(it) => self.trait_to_def(InFile::new(file_id, it))?.into(),
- ast::Item::TraitAlias(it) => {
- self.trait_alias_to_def(InFile::new(file_id, it))?.into()
- }
- ast::Item::TypeAlias(it) => {
- self.type_alias_to_def(InFile::new(file_id, it))?.into()
- }
- ast::Item::Impl(it) => self.impl_to_def(InFile::new(file_id, it))?.into(),
- _ => continue,
- };
- return Some(res);
- }
- None
- }
-
- fn find_pat_or_label_container(&mut self, src: InFile<&SyntaxNode>) -> Option<DefWithBodyId> {
- let ancestors = src.ancestors_with_macros(self.db.upcast());
- for InFile { file_id, value } in ancestors {
- let item = match ast::Item::cast(value.clone()) {
- Some(it) => it,
- None => {
- if let Some(variant) = ast::Variant::cast(value.clone()) {
- return self
- .enum_variant_to_def(InFile::new(file_id, variant))
- .map(Into::into);
- }
- continue;
- }
- };
- let res: DefWithBodyId = match item {
- ast::Item::Const(it) => self.const_to_def(InFile::new(file_id, it))?.into(),
- ast::Item::Static(it) => self.static_to_def(InFile::new(file_id, it))?.into(),
- ast::Item::Fn(it) => self.fn_to_def(InFile::new(file_id, it))?.into(),
- _ => continue,
- };
- return Some(res);
- }
- None
- }
}
#[derive(Clone, Copy, PartialEq, Eq, Hash, Debug)]
@@ -501,6 +564,7 @@ impl_from! {
impl ChildContainer {
fn child_by_source(self, db: &dyn HirDatabase, file_id: HirFileId) -> DynMap {
+ let _p = tracing::info_span!("ChildContainer::child_by_source").entered();
let db = db.upcast();
match self {
ChildContainer::DefWithBodyId(it) => it.child_by_source(db, file_id),
diff --git a/crates/hir/src/source_analyzer.rs b/crates/hir/src/source_analyzer.rs
index d229584064..8e71a54f80 100644
--- a/crates/hir/src/source_analyzer.rs
+++ b/crates/hir/src/source_analyzer.rs
@@ -24,11 +24,12 @@ use hir_def::{
LocalFieldId, Lookup, ModuleDefId, TraitId, VariantId,
};
use hir_expand::{
- builtin_fn_macro::BuiltinFnLikeExpander,
mod_path::path,
- name,
- name::{AsName, Name},
HirFileId, InFile, InMacroFile, MacroFileId, MacroFileIdExt,
+ {
+ name,
+ name::{AsName, Name},
+ },
};
use hir_ty::{
diagnostics::{
@@ -822,8 +823,10 @@ impl SourceAnalyzer {
macro_call: InFile<&ast::MacroCall>,
) -> Option<MacroFileId> {
let krate = self.resolver.krate();
+ // FIXME: This causes us to parse, generally this is the wrong approach for resolving a
+ // macro call to a macro call id!
let macro_call_id = macro_call.as_call_id(db.upcast(), krate, |path| {
- self.resolver.resolve_path_as_macro_def(db.upcast(), &path, Some(MacroSubNs::Bang))
+ self.resolver.resolve_path_as_macro_def(db.upcast(), path, Some(MacroSubNs::Bang))
})?;
// why the 64?
Some(macro_call_id.as_macro_file()).filter(|it| it.expansion_level(db.upcast()) < 64)
@@ -839,37 +842,13 @@ impl SourceAnalyzer {
infer.variant_resolution_for_expr(expr_id)
}
- pub(crate) fn is_unsafe_macro_call(
+ pub(crate) fn is_unsafe_macro_call_expr(
&self,
db: &dyn HirDatabase,
- macro_call: InFile<&ast::MacroCall>,
+ macro_expr: InFile<&ast::MacroExpr>,
) -> bool {
- // check for asm/global_asm
- if let Some(mac) = self.resolve_macro_call(db, macro_call) {
- let ex = match mac.id {
- hir_def::MacroId::Macro2Id(it) => it.lookup(db.upcast()).expander,
- hir_def::MacroId::MacroRulesId(it) => it.lookup(db.upcast()).expander,
- _ => hir_def::MacroExpander::Declarative,
- };
- match ex {
- hir_def::MacroExpander::BuiltIn(e)
- if e == BuiltinFnLikeExpander::Asm || e == BuiltinFnLikeExpander::GlobalAsm =>
- {
- return true
- }
- _ => (),
- }
- }
- let macro_expr = match macro_call
- .map(|it| it.syntax().parent().and_then(ast::MacroExpr::cast))
- .transpose()
- {
- Some(it) => it,
- None => return false,
- };
-
if let (Some((def, body, sm)), Some(infer)) = (&self.def, &self.infer) {
- if let Some(expanded_expr) = sm.macro_expansion_expr(macro_expr.as_ref()) {
+ if let Some(expanded_expr) = sm.macro_expansion_expr(macro_expr) {
let mut is_unsafe = false;
unsafe_expressions(
db,
diff --git a/crates/hir/src/term_search.rs b/crates/hir/src/term_search.rs
index 7b70cdf459..aa046b02e2 100644
--- a/crates/hir/src/term_search.rs
+++ b/crates/hir/src/term_search.rs
@@ -325,6 +325,7 @@ pub fn term_search<DB: HirDatabase>(ctx: &TermSearchCtx<'_, DB>) -> Vec<Expr> {
let mut solutions: Vec<Expr> = tactics::trivial(ctx, &defs, &mut lookup).collect();
// Use well known types tactic before iterations as it does not depend on other tactics
solutions.extend(tactics::famous_types(ctx, &defs, &mut lookup));
+ solutions.extend(tactics::assoc_const(ctx, &defs, &mut lookup));
while should_continue() {
lookup.new_round();
diff --git a/crates/hir/src/term_search/expr.rs b/crates/hir/src/term_search/expr.rs
index 8173427cd9..bb687f5e73 100644
--- a/crates/hir/src/term_search/expr.rs
+++ b/crates/hir/src/term_search/expr.rs
@@ -9,8 +9,8 @@ use hir_ty::{
use itertools::Itertools;
use crate::{
- Adt, AsAssocItem, Const, ConstParam, Field, Function, GenericDef, Local, ModuleDef,
- SemanticsScope, Static, Struct, StructKind, Trait, Type, Variant,
+ Adt, AsAssocItem, AssocItemContainer, Const, ConstParam, Field, Function, GenericDef, Local,
+ ModuleDef, SemanticsScope, Static, Struct, StructKind, Trait, Type, Variant,
};
/// Helper function to get path to `ModuleDef`
@@ -138,7 +138,17 @@ impl Expr {
let db = sema_scope.db;
let mod_item_path_str = |s, def| mod_item_path_str(s, def, cfg);
match self {
- Expr::Const(it) => mod_item_path_str(sema_scope, &ModuleDef::Const(*it)),
+ Expr::Const(it) => match it.as_assoc_item(db).map(|it| it.container(db)) {
+ Some(container) => {
+ let container_name = container_name(container, sema_scope, cfg)?;
+ let const_name = it
+ .name(db)
+ .map(|c| c.display(db.upcast()).to_string())
+ .unwrap_or(String::new());
+ Ok(format!("{container_name}::{const_name}"))
+ }
+ None => mod_item_path_str(sema_scope, &ModuleDef::Const(*it)),
+ },
Expr::Static(it) => mod_item_path_str(sema_scope, &ModuleDef::Static(*it)),
Expr::Local(it) => Ok(it.name(db).display(db.upcast()).to_string()),
Expr::ConstParam(it) => Ok(it.name(db).display(db.upcast()).to_string()),
@@ -153,22 +163,7 @@ impl Expr {
match func.as_assoc_item(db).map(|it| it.container(db)) {
Some(container) => {
- let container_name = match container {
- crate::AssocItemContainer::Trait(trait_) => {
- mod_item_path_str(sema_scope, &ModuleDef::Trait(trait_))?
- }
- crate::AssocItemContainer::Impl(imp) => {
- let self_ty = imp.self_ty(db);
- // Should it be guaranteed that `mod_item_path` always exists?
- match self_ty
- .as_adt()
- .and_then(|adt| mod_item_path(sema_scope, &adt.into(), cfg))
- {
- Some(path) => path.display(sema_scope.db.upcast()).to_string(),
- None => self_ty.display(db).to_string(),
- }
- }
- };
+ let container_name = container_name(container, sema_scope, cfg)?;
let fn_name = func.name(db).display(db.upcast()).to_string();
Ok(format!("{container_name}::{fn_name}({args})"))
}
@@ -414,3 +409,25 @@ impl Expr {
matches!(self, Expr::Many(_))
}
}
+
+/// Helper function to find name of container
+fn container_name(
+ container: AssocItemContainer,
+ sema_scope: &SemanticsScope<'_>,
+ cfg: ImportPathConfig,
+) -> Result<String, DisplaySourceCodeError> {
+ let container_name = match container {
+ crate::AssocItemContainer::Trait(trait_) => {
+ mod_item_path_str(sema_scope, &ModuleDef::Trait(trait_), cfg)?
+ }
+ crate::AssocItemContainer::Impl(imp) => {
+ let self_ty = imp.self_ty(sema_scope.db);
+ // Should it be guaranteed that `mod_item_path` always exists?
+ match self_ty.as_adt().and_then(|adt| mod_item_path(sema_scope, &adt.into(), cfg)) {
+ Some(path) => path.display(sema_scope.db.upcast()).to_string(),
+ None => self_ty.display(sema_scope.db).to_string(),
+ }
+ }
+ };
+ Ok(container_name)
+}
diff --git a/crates/hir/src/term_search/tactics.rs b/crates/hir/src/term_search/tactics.rs
index f95ff1dc0f..b738e6af77 100644
--- a/crates/hir/src/term_search/tactics.rs
+++ b/crates/hir/src/term_search/tactics.rs
@@ -5,6 +5,7 @@
//! * `defs` - Set of items in scope at term search target location
//! * `lookup` - Lookup table for types
//! * `should_continue` - Function that indicates when to stop iterating
+//!
//! And they return iterator that yields type trees that unify with the `goal` type.
use std::iter;
@@ -79,7 +80,10 @@ pub(super) fn trivial<'a, DB: HirDatabase>(
lookup.insert(ty.clone(), std::iter::once(expr.clone()));
// Don't suggest local references as they are not valid for return
- if matches!(expr, Expr::Local(_)) && ty.contains_reference(db) {
+ if matches!(expr, Expr::Local(_))
+ && ty.contains_reference(db)
+ && ctx.config.enable_borrowcheck
+ {
return None;
}
@@ -87,6 +91,52 @@ pub(super) fn trivial<'a, DB: HirDatabase>(
})
}
+/// # Associated constant tactic
+///
+/// Attempts to fulfill the goal by trying constants defined as associated items.
+/// Only considers them on types that are in scope.
+///
+/// # Arguments
+/// * `ctx` - Context for the term search
+/// * `defs` - Set of items in scope at term search target location
+/// * `lookup` - Lookup table for types
+///
+/// Returns iterator that yields elements that unify with `goal`.
+///
+/// _Note that there is no use of calling this tactic in every iteration as the output does not
+/// depend on the current state of `lookup`_
+pub(super) fn assoc_const<'a, DB: HirDatabase>(
+ ctx: &'a TermSearchCtx<'a, DB>,
+ defs: &'a FxHashSet<ScopeDef>,
+ lookup: &'a mut LookupTable,
+) -> impl Iterator<Item = Expr> + 'a {
+ let db = ctx.sema.db;
+ let module = ctx.scope.module();
+
+ defs.iter()
+ .filter_map(|def| match def {
+ ScopeDef::ModuleDef(ModuleDef::Adt(it)) => Some(it),
+ _ => None,
+ })
+ .flat_map(|it| Impl::all_for_type(db, it.ty(db)))
+ .filter(|it| !it.is_unsafe(db))
+ .flat_map(|it| it.items(db))
+ .filter(move |it| it.is_visible_from(db, module))
+ .filter_map(AssocItem::as_const)
+ .filter_map(|it| {
+ let expr = Expr::Const(it);
+ let ty = it.ty(db);
+
+ if ty.contains_unknown() {
+ return None;
+ }
+
+ lookup.insert(ty.clone(), std::iter::once(expr.clone()));
+
+ ty.could_unify_with_deeply(db, &ctx.goal).then_some(expr)
+ })
+}
+
/// # Data constructor tactic
///
/// Attempts different data constructors for enums and structs in scope
diff --git a/crates/ide-assists/src/handlers/add_missing_impl_members.rs b/crates/ide-assists/src/handlers/add_missing_impl_members.rs
index 24a32086f3..ebfbb83bb9 100644
--- a/crates/ide-assists/src/handlers/add_missing_impl_members.rs
+++ b/crates/ide-assists/src/handlers/add_missing_impl_members.rs
@@ -105,7 +105,7 @@ fn add_missing_impl_members_inner(
assist_id: &'static str,
label: &'static str,
) -> Option<()> {
- let _p = tracing::span!(tracing::Level::INFO, "add_missing_impl_members_inner").entered();
+ let _p = tracing::info_span!("add_missing_impl_members_inner").entered();
let impl_def = ctx.find_node_at_offset::<ast::Impl>()?;
let impl_ = ctx.sema.to_def(&impl_def)?;
diff --git a/crates/ide-assists/src/handlers/auto_import.rs b/crates/ide-assists/src/handlers/auto_import.rs
index 3bd003a267..fe895eb259 100644
--- a/crates/ide-assists/src/handlers/auto_import.rs
+++ b/crates/ide-assists/src/handlers/auto_import.rs
@@ -140,7 +140,7 @@ pub(crate) fn auto_import(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<
acc.add_group(
&group_label,
assist_id,
- format!("Import `{}`", import_name),
+ format!("Import `{import_name}`"),
range,
|builder| {
let scope = match scope.clone() {
@@ -165,7 +165,7 @@ pub(crate) fn auto_import(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<
acc.add_group(
&group_label,
assist_id,
- format!("Import `{} as _`", import_name),
+ format!("Import `{import_name} as _`"),
range,
|builder| {
let scope = match scope.clone() {
@@ -272,8 +272,10 @@ fn module_distance_heuristic(db: &dyn HirDatabase, current: &Module, item: &Modu
// cost of importing from another crate
let crate_boundary_cost = if current.krate() == item.krate() {
0
- } else if item.krate().is_builtin(db) {
+ } else if item.krate().origin(db).is_local() {
2
+ } else if item.krate().is_builtin(db) {
+ 3
} else {
4
};
@@ -366,6 +368,49 @@ pub struct HashMap;
}
#[test]
+ fn prefer_workspace() {
+ let before = r"
+//- /main.rs crate:main deps:foo,bar
+HashMap$0::new();
+
+//- /lib.rs crate:foo
+pub mod module {
+ pub struct HashMap;
+}
+
+//- /lib.rs crate:bar library
+pub struct HashMap;
+ ";
+
+ check_auto_import_order(before, &["Import `foo::module::HashMap`", "Import `bar::HashMap`"])
+ }
+
+ #[test]
+ fn prefer_non_local_over_long_path() {
+ let before = r"
+//- /main.rs crate:main deps:foo,bar
+HashMap$0::new();
+
+//- /lib.rs crate:foo
+pub mod deeply {
+ pub mod nested {
+ pub mod module {
+ pub struct HashMap;
+ }
+ }
+}
+
+//- /lib.rs crate:bar library
+pub struct HashMap;
+ ";
+
+ check_auto_import_order(
+ before,
+ &["Import `bar::HashMap`", "Import `foo::deeply::nested::module::HashMap`"],
+ )
+ }
+
+ #[test]
fn not_applicable_if_scope_inside_macro() {
check_assist_not_applicable(
auto_import,
diff --git a/crates/ide-assists/src/handlers/bool_to_enum.rs b/crates/ide-assists/src/handlers/bool_to_enum.rs
index 71436e6580..c95e24693d 100644
--- a/crates/ide-assists/src/handlers/bool_to_enum.rs
+++ b/crates/ide-assists/src/handlers/bool_to_enum.rs
@@ -228,7 +228,7 @@ fn replace_usages(
edit.replace(
prefix_expr.syntax().text_range(),
- format!("{} == Bool::False", inner_expr),
+ format!("{inner_expr} == Bool::False"),
);
} else if let Some((record_field, initializer)) = name
.as_name_ref()
@@ -275,7 +275,7 @@ fn replace_usages(
} else if let Some(receiver) = find_method_call_expr_usage(&name) {
edit.replace(
receiver.syntax().text_range(),
- format!("({} == Bool::True)", receiver),
+ format!("({receiver} == Bool::True)"),
);
} else if name.syntax().ancestors().find_map(ast::UseTree::cast).is_none() {
// for any other usage in an expression, replace it with a check that it is the true variant
diff --git a/crates/ide-assists/src/handlers/destructure_struct_binding.rs b/crates/ide-assists/src/handlers/destructure_struct_binding.rs
index 9adbdd220c..7618871552 100644
--- a/crates/ide-assists/src/handlers/destructure_struct_binding.rs
+++ b/crates/ide-assists/src/handlers/destructure_struct_binding.rs
@@ -242,7 +242,7 @@ fn generate_field_names(ctx: &AssistContext<'_>, data: &StructEditData) -> Vec<(
.iter()
.enumerate()
.map(|(index, _)| {
- let new_name = new_field_name((format!("_{}", index)).into(), &data.names_in_scope);
+ let new_name = new_field_name((format!("_{index}")).into(), &data.names_in_scope);
(index.to_string().into(), new_name)
})
.collect(),
diff --git a/crates/ide-assists/src/handlers/extract_expressions_from_format_string.rs b/crates/ide-assists/src/handlers/extract_expressions_from_format_string.rs
index 2725a97de8..28f645171c 100644
--- a/crates/ide-assists/src/handlers/extract_expressions_from_format_string.rs
+++ b/crates/ide-assists/src/handlers/extract_expressions_from_format_string.rs
@@ -1,4 +1,4 @@
-use crate::{AssistContext, Assists};
+use crate::{utils, AssistContext, Assists};
use hir::DescendPreference;
use ide_db::{
assists::{AssistId, AssistKind},
@@ -8,8 +8,12 @@ use ide_db::{
},
};
use itertools::Itertools;
-use stdx::format_to;
-use syntax::{ast, AstNode, AstToken, NodeOrToken, SyntaxKind::COMMA, TextRange};
+use syntax::{
+ ast::{self, make},
+ ted, AstNode, AstToken, NodeOrToken,
+ SyntaxKind::WHITESPACE,
+ T,
+};
// Assist: extract_expressions_from_format_string
//
@@ -34,6 +38,7 @@ pub(crate) fn extract_expressions_from_format_string(
) -> Option<()> {
let fmt_string = ctx.find_token_at_offset::<ast::String>()?;
let tt = fmt_string.syntax().parent().and_then(ast::TokenTree::cast)?;
+ let tt_delimiter = tt.left_delimiter_token()?.kind();
let expanded_t = ast::String::cast(
ctx.sema
@@ -61,72 +66,63 @@ pub(crate) fn extract_expressions_from_format_string(
"Extract format expressions",
tt.syntax().text_range(),
|edit| {
- let fmt_range = fmt_string.syntax().text_range();
-
- // Replace old format string with new format string whose arguments have been extracted
- edit.replace(fmt_range, new_fmt);
-
- // Insert cursor at end of format string
- edit.insert(fmt_range.end(), "$0");
+ let tt = edit.make_mut(tt);
// Extract existing arguments in macro
- let tokens =
- tt.token_trees_and_tokens().collect_vec();
-
- let mut existing_args: Vec<String> = vec![];
+ let tokens = tt.token_trees_and_tokens().collect_vec();
- let mut current_arg = String::new();
- if let [_opening_bracket, NodeOrToken::Token(format_string), _args_start_comma, tokens @ .., NodeOrToken::Token(end_bracket)] =
+ let existing_args = if let [_opening_bracket, NodeOrToken::Token(_format_string), _args_start_comma, tokens @ .., NodeOrToken::Token(_end_bracket)] =
tokens.as_slice()
{
- for t in tokens {
- match t {
- NodeOrToken::Node(n) => {
- format_to!(current_arg, "{n}");
- },
- NodeOrToken::Token(t) if t.kind() == COMMA => {
- existing_args.push(current_arg.trim().into());
- current_arg.clear();
- },
- NodeOrToken::Token(t) => {
- current_arg.push_str(t.text());
- },
- }
- }
- existing_args.push(current_arg.trim().into());
+ let args = tokens.split(|it| matches!(it, NodeOrToken::Token(t) if t.kind() == T![,])).map(|arg| {
+ // Strip off leading and trailing whitespace tokens
+ let arg = match arg.split_first() {
+ Some((NodeOrToken::Token(t), rest)) if t.kind() == WHITESPACE => rest,
+ _ => arg,
+ };
+ let arg = match arg.split_last() {
+ Some((NodeOrToken::Token(t), rest)) if t.kind() == WHITESPACE => rest,
+ _ => arg,
+ };
+ arg
+ });
- // delete everything after the format string till end bracket
- // we're going to insert the new arguments later
- edit.delete(TextRange::new(
- format_string.text_range().end(),
- end_bracket.text_range().start(),
- ));
- }
+ args.collect()
+ } else {
+ vec![]
+ };
// Start building the new args
let mut existing_args = existing_args.into_iter();
- let mut args = String::new();
+ let mut new_tt_bits = vec![NodeOrToken::Token(make::tokens::literal(&new_fmt))];
+ let mut placeholder_indexes = vec![];
- let mut placeholder_idx = 1;
+ for arg in extracted_args {
+ if matches!(arg, Arg::Expr(_) | Arg::Placeholder) {
+ // insert ", " before each arg
+ new_tt_bits.extend_from_slice(&[
+ NodeOrToken::Token(make::token(T![,])),
+ NodeOrToken::Token(make::tokens::single_space()),
+ ]);
+ }
- for extracted_args in extracted_args {
- match extracted_args {
- Arg::Expr(s)=> {
- args.push_str(", ");
+ match arg {
+ Arg::Expr(s) => {
// insert arg
- args.push_str(&s);
+ // FIXME: use the crate's edition for parsing
+ let expr = ast::Expr::parse(&s, syntax::Edition::CURRENT).syntax_node();
+ let mut expr_tt = utils::tt_from_syntax(expr);
+ new_tt_bits.append(&mut expr_tt);
}
Arg::Placeholder => {
- args.push_str(", ");
// try matching with existing argument
match existing_args.next() {
- Some(ea) => {
- args.push_str(&ea);
+ Some(arg) => {
+ new_tt_bits.extend_from_slice(arg);
}
None => {
- // insert placeholder
- args.push_str(&format!("${placeholder_idx}"));
- placeholder_idx += 1;
+ placeholder_indexes.push(new_tt_bits.len());
+ new_tt_bits.push(NodeOrToken::Token(make::token(T![_])));
}
}
}
@@ -134,8 +130,31 @@ pub(crate) fn extract_expressions_from_format_string(
}
}
+
// Insert new args
- edit.insert(fmt_range.end(), args);
+ let new_tt = make::token_tree(tt_delimiter, new_tt_bits).clone_for_update();
+ ted::replace(tt.syntax(), new_tt.syntax());
+
+ if let Some(cap) = ctx.config.snippet_cap {
+ // Add placeholder snippets over placeholder args
+ for pos in placeholder_indexes {
+ // Skip the opening delimiter
+ let Some(NodeOrToken::Token(placeholder)) =
+ new_tt.token_trees_and_tokens().skip(1).nth(pos)
+ else {
+ continue;
+ };
+
+ if stdx::always!(placeholder.kind() == T![_]) {
+ edit.add_placeholder_snippet_token(cap, placeholder);
+ }
+ }
+
+ // Add the final tabstop after the format literal
+ if let Some(NodeOrToken::Token(literal)) = new_tt.token_trees_and_tokens().nth(1) {
+ edit.add_tabstop_after_token(cap, literal);
+ }
+ }
},
);
@@ -145,7 +164,7 @@ pub(crate) fn extract_expressions_from_format_string(
#[cfg(test)]
mod tests {
use super::*;
- use crate::tests::check_assist;
+ use crate::tests::{check_assist, check_assist_no_snippet_cap};
#[test]
fn multiple_middle_arg() {
@@ -195,7 +214,7 @@ fn main() {
"#,
r#"
fn main() {
- print!("{} {:b} {} {}"$0, y + 2, x + 1, 2, $1);
+ print!("{} {:b} {} {}"$0, y + 2, x + 1, 2, ${1:_});
}
"#,
);
@@ -292,4 +311,22 @@ fn main() {
"#,
);
}
+
+ #[test]
+ fn without_snippets() {
+ check_assist_no_snippet_cap(
+ extract_expressions_from_format_string,
+ r#"
+//- minicore: fmt
+fn main() {
+ print!("{} {x + 1:b} {} {}$0", y + 2, 2);
+}
+"#,
+ r#"
+fn main() {
+ print!("{} {:b} {} {}", y + 2, x + 1, 2, _);
+}
+"#,
+ );
+ }
}
diff --git a/crates/ide-assists/src/handlers/generate_delegate_trait.rs b/crates/ide-assists/src/handlers/generate_delegate_trait.rs
index 748acb46ef..19521b8a4b 100644
--- a/crates/ide-assists/src/handlers/generate_delegate_trait.rs
+++ b/crates/ide-assists/src/handlers/generate_delegate_trait.rs
@@ -758,7 +758,7 @@ fn ty_assoc_item(item: syntax::ast::TypeAlias, qual_path_ty: Path) -> Option<Ass
}
fn qualified_path(qual_path_ty: ast::Path, path_expr_seg: ast::Path) -> ast::Path {
- make::path_from_text(&format!("{}::{}", qual_path_ty, path_expr_seg))
+ make::path_from_text(&format!("{qual_path_ty}::{path_expr_seg}"))
}
#[cfg(test)]
diff --git a/crates/ide-assists/src/handlers/generate_function.rs b/crates/ide-assists/src/handlers/generate_function.rs
index 0fc122d623..41693855be 100644
--- a/crates/ide-assists/src/handlers/generate_function.rs
+++ b/crates/ide-assists/src/handlers/generate_function.rs
@@ -393,9 +393,9 @@ impl FunctionBuilder {
/// The rule for whether we focus a return type or not (and thus focus the function body),
/// is rather simple:
/// * If we could *not* infer what the return type should be, focus it (so the user can fill-in
-/// the correct return type).
+/// the correct return type).
/// * If we could infer the return type, don't focus it (and thus focus the function body) so the
-/// user can change the `todo!` function body.
+/// user can change the `todo!` function body.
fn make_return_type(
ctx: &AssistContext<'_>,
expr: &ast::Expr,
@@ -918,9 +918,9 @@ fn filter_generic_params(ctx: &AssistContext<'_>, node: SyntaxNode) -> Option<hi
/// Say we have a trait bound `Struct<T>: Trait<U>`. Given `necessary_params`, when is it relevant
/// and when not? Some observations:
/// - When `necessary_params` contains `T`, it's likely that we want this bound, but now we have
-/// an extra param to consider: `U`.
+/// an extra param to consider: `U`.
/// - On the other hand, when `necessary_params` contains `U` (but not `T`), then it's unlikely
-/// that we want this bound because it doesn't really constrain `U`.
+/// that we want this bound because it doesn't really constrain `U`.
///
/// (FIXME?: The latter clause might be overstating. We may want to include the bound if the self
/// type does *not* include generic params at all - like `Option<i32>: From<U>`)
@@ -928,7 +928,7 @@ fn filter_generic_params(ctx: &AssistContext<'_>, node: SyntaxNode) -> Option<hi
/// Can we make this a bit more formal? Let's define "dependency" between generic parameters and
/// trait bounds:
/// - A generic parameter `T` depends on a trait bound if `T` appears in the self type (i.e. left
-/// part) of the bound.
+/// part) of the bound.
/// - A trait bound depends on a generic parameter `T` if `T` appears in the bound.
///
/// Using the notion, what we want is all the bounds that params in `necessary_params`
diff --git a/crates/ide-assists/src/handlers/generate_getter_or_setter.rs b/crates/ide-assists/src/handlers/generate_getter_or_setter.rs
index e90a032f1c..60214aaaf6 100644
--- a/crates/ide-assists/src/handlers/generate_getter_or_setter.rs
+++ b/crates/ide-assists/src/handlers/generate_getter_or_setter.rs
@@ -47,7 +47,7 @@ pub(crate) fn generate_setter(acc: &mut Assists, ctx: &AssistContext<'_>) -> Opt
}
// Prepend set_ to fn names.
- fn_names.iter_mut().for_each(|name| *name = format!("set_{}", name));
+ fn_names.iter_mut().for_each(|name| *name = format!("set_{name}"));
// Return early if we've found an existing fn
let impl_def = find_struct_impl(ctx, &ast::Adt::Struct(strukt.clone()), &fn_names)?;
diff --git a/crates/ide-assists/src/handlers/generate_mut_trait_impl.rs b/crates/ide-assists/src/handlers/generate_mut_trait_impl.rs
index 91eaa96b6c..6aa561ad7f 100644
--- a/crates/ide-assists/src/handlers/generate_mut_trait_impl.rs
+++ b/crates/ide-assists/src/handlers/generate_mut_trait_impl.rs
@@ -105,7 +105,7 @@ pub(crate) fn generate_mut_trait_impl(acc: &mut Assists, ctx: &AssistContext<'_>
"Generate `IndexMut` impl from this `Index` trait",
target,
|edit| {
- edit.insert(target.start(), format!("$0{}\n\n", impl_def));
+ edit.insert(target.start(), format!("$0{impl_def}\n\n"));
},
)
}
diff --git a/crates/ide-assists/src/handlers/inline_call.rs b/crates/ide-assists/src/handlers/inline_call.rs
index 44307ffd75..88fa6dc745 100644
--- a/crates/ide-assists/src/handlers/inline_call.rs
+++ b/crates/ide-assists/src/handlers/inline_call.rs
@@ -368,7 +368,7 @@ fn inline(
_ => None,
})
.for_each(|usage| {
- ted::replace(usage, &this());
+ ted::replace(usage, this());
});
}
}
@@ -483,7 +483,7 @@ fn inline(
cov_mark::hit!(inline_call_inline_direct_field);
field.replace_expr(replacement.clone_for_update());
} else {
- ted::replace(usage.syntax(), &replacement.syntax().clone_for_update());
+ ted::replace(usage.syntax(), replacement.syntax().clone_for_update());
}
};
diff --git a/crates/ide-assists/src/handlers/into_to_qualified_from.rs b/crates/ide-assists/src/handlers/into_to_qualified_from.rs
index dee74afcbe..e405af5533 100644
--- a/crates/ide-assists/src/handlers/into_to_qualified_from.rs
+++ b/crates/ide-assists/src/handlers/into_to_qualified_from.rs
@@ -67,9 +67,9 @@ pub(crate) fn into_to_qualified_from(acc: &mut Assists, ctx: &AssistContext<'_>)
edit.replace(
method_call.syntax().text_range(),
if sc.chars().all(|c| c.is_alphanumeric() || c == ':') {
- format!("{}::from({})", sc, receiver)
+ format!("{sc}::from({receiver})")
} else {
- format!("<{}>::from({})", sc, receiver)
+ format!("<{sc}>::from({receiver})")
},
);
},
diff --git a/crates/ide-assists/src/handlers/merge_nested_if.rs b/crates/ide-assists/src/handlers/merge_nested_if.rs
index 2f3136f027..7a0037fa20 100644
--- a/crates/ide-assists/src/handlers/merge_nested_if.rs
+++ b/crates/ide-assists/src/handlers/merge_nested_if.rs
@@ -86,7 +86,7 @@ pub(crate) fn merge_nested_if(acc: &mut Assists, ctx: &AssistContext<'_>) -> Opt
nested_if_cond.syntax().text().to_string()
};
- let replace_cond = format!("{} && {}", cond_text, nested_if_cond_text);
+ let replace_cond = format!("{cond_text} && {nested_if_cond_text}");
edit.replace(cond_range, replace_cond);
edit.replace(then_branch_range, nested_if_then_branch.syntax().text());
diff --git a/crates/ide-assists/src/handlers/remove_parentheses.rs b/crates/ide-assists/src/handlers/remove_parentheses.rs
index 99c55e9ff7..799d36be93 100644
--- a/crates/ide-assists/src/handlers/remove_parentheses.rs
+++ b/crates/ide-assists/src/handlers/remove_parentheses.rs
@@ -48,7 +48,7 @@ pub(crate) fn remove_parentheses(acc: &mut Assists, ctx: &AssistContext<'_>) ->
}
None => false,
};
- let expr = if need_to_add_ws { format!(" {}", expr) } else { expr.to_string() };
+ let expr = if need_to_add_ws { format!(" {expr}") } else { expr.to_string() };
builder.replace(parens.syntax().text_range(), expr)
},
diff --git a/crates/ide-assists/src/handlers/term_search.rs b/crates/ide-assists/src/handlers/term_search.rs
index 94e0519cba..8a9229c549 100644
--- a/crates/ide-assists/src/handlers/term_search.rs
+++ b/crates/ide-assists/src/handlers/term_search.rs
@@ -290,4 +290,34 @@ fn f() { let a = 1; let b: Foo<i32> = todo$0!(); }"#,
fn f() { let a = 1; let b: Foo<i32> = Foo(a); }"#,
)
}
+
+ #[test]
+ fn test_struct_assoc_item() {
+ check_assist(
+ term_search,
+ r#"//- minicore: todo, unimplemented
+struct Foo;
+impl Foo { const FOO: i32 = 0; }
+fn f() { let a: i32 = todo$0!(); }"#,
+ r#"struct Foo;
+impl Foo { const FOO: i32 = 0; }
+fn f() { let a: i32 = Foo::FOO; }"#,
+ )
+ }
+
+ #[test]
+ fn test_trait_assoc_item() {
+ check_assist(
+ term_search,
+ r#"//- minicore: todo, unimplemented
+struct Foo;
+trait Bar { const BAR: i32; }
+impl Bar for Foo { const BAR: i32 = 0; }
+fn f() { let a: i32 = todo$0!(); }"#,
+ r#"struct Foo;
+trait Bar { const BAR: i32; }
+impl Bar for Foo { const BAR: i32 = 0; }
+fn f() { let a: i32 = Foo::BAR; }"#,
+ )
+ }
}
diff --git a/crates/ide-assists/src/utils.rs b/crates/ide-assists/src/utils.rs
index bc0c9b79c7..ba6ef1921a 100644
--- a/crates/ide-assists/src/utils.rs
+++ b/crates/ide-assists/src/utils.rs
@@ -14,9 +14,9 @@ use syntax::{
edit_in_place::{AttrsOwnerEdit, Indent, Removable},
make, HasArgList, HasAttrs, HasGenericParams, HasName, HasTypeBounds, Whitespace,
},
- ted, AstNode, AstToken, Direction, SourceFile,
+ ted, AstNode, AstToken, Direction, NodeOrToken, SourceFile,
SyntaxKind::*,
- SyntaxNode, TextRange, TextSize, T,
+ SyntaxNode, SyntaxToken, TextRange, TextSize, T,
};
use crate::assist_context::{AssistContext, SourceChangeBuilder};
@@ -916,3 +916,46 @@ pub(crate) fn replace_record_field_expr(
edit.replace(file_range.range, initializer.syntax().text());
}
}
+
+/// Creates a token tree list from a syntax node, creating the needed delimited sub token trees.
+/// Assumes that the input syntax node is a valid syntax tree.
+pub(crate) fn tt_from_syntax(node: SyntaxNode) -> Vec<NodeOrToken<ast::TokenTree, SyntaxToken>> {
+ let mut tt_stack = vec![(None, vec![])];
+
+ for element in node.descendants_with_tokens() {
+ let NodeOrToken::Token(token) = element else { continue };
+
+ match token.kind() {
+ T!['('] | T!['{'] | T!['['] => {
+ // Found an opening delimiter, start a new sub token tree
+ tt_stack.push((Some(token.kind()), vec![]));
+ }
+ T![')'] | T!['}'] | T![']'] => {
+ // Closing a subtree
+ let (delimiter, tt) = tt_stack.pop().expect("unbalanced delimiters");
+ let (_, parent_tt) = tt_stack
+ .last_mut()
+ .expect("parent token tree was closed before it was completed");
+ let closing_delimiter = delimiter.map(|it| match it {
+ T!['('] => T![')'],
+ T!['{'] => T!['}'],
+ T!['['] => T![']'],
+ _ => unreachable!(),
+ });
+ stdx::always!(
+ closing_delimiter == Some(token.kind()),
+ "mismatched opening and closing delimiters"
+ );
+
+ let sub_tt = make::token_tree(delimiter.expect("unbalanced delimiters"), tt);
+ parent_tt.push(NodeOrToken::Node(sub_tt));
+ }
+ _ => {
+ let (_, current_tt) = tt_stack.last_mut().expect("unmatched delimiters");
+ current_tt.push(NodeOrToken::Token(token))
+ }
+ }
+ }
+
+ tt_stack.pop().expect("parent token tree was closed before it was completed").1
+}
diff --git a/crates/ide-assists/src/utils/suggest_name.rs b/crates/ide-assists/src/utils/suggest_name.rs
index 23a06404f3..f2a097afc8 100644
--- a/crates/ide-assists/src/utils/suggest_name.rs
+++ b/crates/ide-assists/src/utils/suggest_name.rs
@@ -254,7 +254,7 @@ fn from_param(expr: &ast::Expr, sema: &Semantics<'_, RootDatabase>) -> Option<St
let (idx, _) = arg_list.args().find_position(|it| it == expr).unwrap();
let param = func.params().into_iter().nth(idx)?;
- let pat = param.source(sema.db)?.value.right()?.pat()?;
+ let pat = sema.source(param)?.value.right()?.pat()?;
let name = var_name_from_pat(&pat)?;
normalize(&name.to_string())
}
diff --git a/crates/ide-completion/src/completions/env_vars.rs b/crates/ide-completion/src/completions/env_vars.rs
index 4005753773..23d93d3b74 100644
--- a/crates/ide-completion/src/completions/env_vars.rs
+++ b/crates/ide-completion/src/completions/env_vars.rs
@@ -1,4 +1,5 @@
-//! Completes environment variables defined by Cargo (https://doc.rust-lang.org/cargo/reference/environment-variables.html)
+//! Completes environment variables defined by Cargo
+//! (<https://doc.rust-lang.org/cargo/reference/environment-variables.html>)
use hir::MacroFileIdExt;
use ide_db::syntax_helpers::node_ext::macro_call_for_string_token;
use syntax::{
diff --git a/crates/ide-completion/src/completions/expr.rs b/crates/ide-completion/src/completions/expr.rs
index ddb1aeb371..7281c607da 100644
--- a/crates/ide-completion/src/completions/expr.rs
+++ b/crates/ide-completion/src/completions/expr.rs
@@ -15,7 +15,7 @@ pub(crate) fn complete_expr_path(
path_ctx @ PathCompletionCtx { qualified, .. }: &PathCompletionCtx,
expr_ctx: &PathExprCtx,
) {
- let _p = tracing::span!(tracing::Level::INFO, "complete_expr_path").entered();
+ let _p = tracing::info_span!("complete_expr_path").entered();
if !ctx.qualifier_ctx.none() {
return;
}
@@ -334,7 +334,7 @@ pub(crate) fn complete_expr_path(
}
pub(crate) fn complete_expr(acc: &mut Completions, ctx: &CompletionContext<'_>) {
- let _p = tracing::span!(tracing::Level::INFO, "complete_expr").entered();
+ let _p = tracing::info_span!("complete_expr").entered();
if !ctx.config.enable_term_search {
return;
diff --git a/crates/ide-completion/src/completions/field.rs b/crates/ide-completion/src/completions/field.rs
index 53fcb7ca6c..b795bbd872 100644
--- a/crates/ide-completion/src/completions/field.rs
+++ b/crates/ide-completion/src/completions/field.rs
@@ -20,9 +20,9 @@ pub(crate) fn complete_field_list_tuple_variant(
} = path_ctx
{
let mut add_keyword = |kw, snippet| acc.add_keyword_snippet(ctx, kw, snippet);
- add_keyword("pub(crate)", "pub(crate)");
- add_keyword("pub(super)", "pub(super)");
- add_keyword("pub", "pub");
+ add_keyword("pub(crate)", "pub(crate) $0");
+ add_keyword("pub(super)", "pub(super) $0");
+ add_keyword("pub", "pub $0");
}
}
@@ -32,8 +32,8 @@ pub(crate) fn complete_field_list_record_variant(
) {
if ctx.qualifier_ctx.vis_node.is_none() {
let mut add_keyword = |kw, snippet| acc.add_keyword_snippet(ctx, kw, snippet);
- add_keyword("pub(crate)", "pub(crate)");
- add_keyword("pub(super)", "pub(super)");
- add_keyword("pub", "pub");
+ add_keyword("pub(crate)", "pub(crate) $0");
+ add_keyword("pub(super)", "pub(super) $0");
+ add_keyword("pub", "pub $0");
}
}
diff --git a/crates/ide-completion/src/completions/flyimport.rs b/crates/ide-completion/src/completions/flyimport.rs
index d5a4e9ecdc..71d44a57cb 100644
--- a/crates/ide-completion/src/completions/flyimport.rs
+++ b/crates/ide-completion/src/completions/flyimport.rs
@@ -207,8 +207,7 @@ fn import_on_the_fly(
position: SyntaxNode,
potential_import_name: String,
) -> Option<()> {
- let _p =
- tracing::span!(tracing::Level::INFO, "import_on_the_fly", ?potential_import_name).entered();
+ let _p = tracing::info_span!("import_on_the_fly", ?potential_import_name).entered();
ImportScope::find_insert_use_container(&position, &ctx.sema)?;
@@ -296,8 +295,7 @@ fn import_on_the_fly_pat_(
position: SyntaxNode,
potential_import_name: String,
) -> Option<()> {
- let _p = tracing::span!(tracing::Level::INFO, "import_on_the_fly_pat_", ?potential_import_name)
- .entered();
+ let _p = tracing::info_span!("import_on_the_fly_pat_", ?potential_import_name).entered();
ImportScope::find_insert_use_container(&position, &ctx.sema)?;
@@ -347,9 +345,7 @@ fn import_on_the_fly_method(
position: SyntaxNode,
potential_import_name: String,
) -> Option<()> {
- let _p =
- tracing::span!(tracing::Level::INFO, "import_on_the_fly_method", ?potential_import_name)
- .entered();
+ let _p = tracing::info_span!("import_on_the_fly_method", ?potential_import_name).entered();
ImportScope::find_insert_use_container(&position, &ctx.sema)?;
@@ -397,13 +393,8 @@ fn import_assets_for_path(
potential_import_name: &str,
qualifier: Option<ast::Path>,
) -> Option<ImportAssets> {
- let _p = tracing::span!(
- tracing::Level::INFO,
- "import_assets_for_path",
- ?potential_import_name,
- ?qualifier
- )
- .entered();
+ let _p =
+ tracing::info_span!("import_assets_for_path", ?potential_import_name, ?qualifier).entered();
let fuzzy_name_length = potential_import_name.len();
let mut assets_for_path = ImportAssets::for_fuzzy_path(
diff --git a/crates/ide-completion/src/completions/item_list.rs b/crates/ide-completion/src/completions/item_list.rs
index 0a6a8633a2..02298b1e9b 100644
--- a/crates/ide-completion/src/completions/item_list.rs
+++ b/crates/ide-completion/src/completions/item_list.rs
@@ -28,7 +28,7 @@ pub(crate) fn complete_item_list(
path_ctx @ PathCompletionCtx { qualified, .. }: &PathCompletionCtx,
kind: &ItemListKind,
) {
- let _p = tracing::span!(tracing::Level::INFO, "complete_item_list").entered();
+ let _p = tracing::info_span!("complete_item_list").entered();
if path_ctx.is_trivial_path() {
add_keywords(acc, ctx, Some(kind));
}
@@ -79,7 +79,7 @@ fn add_keywords(acc: &mut Completions, ctx: &CompletionContext<'_>, kind: Option
let in_trait = matches!(kind, Some(ItemListKind::Trait));
let in_trait_impl = matches!(kind, Some(ItemListKind::TraitImpl(_)));
let in_inherent_impl = matches!(kind, Some(ItemListKind::Impl));
- let no_qualifiers = ctx.qualifier_ctx.vis_node.is_none();
+ let no_vis_qualifiers = ctx.qualifier_ctx.vis_node.is_none();
let in_block = kind.is_none();
if !in_trait_impl {
@@ -89,7 +89,7 @@ fn add_keywords(acc: &mut Completions, ctx: &CompletionContext<'_>, kind: Option
}
if in_item_list {
add_keyword("trait", "trait $1 {\n $0\n}");
- if no_qualifiers {
+ if no_vis_qualifiers {
add_keyword("impl", "impl $1 {\n $0\n}");
}
}
@@ -100,19 +100,20 @@ fn add_keywords(acc: &mut Completions, ctx: &CompletionContext<'_>, kind: Option
add_keyword("enum", "enum $1 {\n $0\n}");
add_keyword("mod", "mod $0");
add_keyword("static", "static $0");
+ add_keyword("async", "async $0");
add_keyword("struct", "struct $0");
add_keyword("trait", "trait $1 {\n $0\n}");
add_keyword("union", "union $1 {\n $0\n}");
add_keyword("use", "use $0");
- if no_qualifiers {
+ if no_vis_qualifiers {
add_keyword("impl", "impl $1 {\n $0\n}");
}
}
- if !in_trait && !in_block && no_qualifiers {
- add_keyword("pub(crate)", "pub(crate)");
- add_keyword("pub(super)", "pub(super)");
- add_keyword("pub", "pub");
+ if !in_trait && !in_block && no_vis_qualifiers {
+ add_keyword("pub(crate)", "pub(crate) $0");
+ add_keyword("pub(super)", "pub(super) $0");
+ add_keyword("pub", "pub $0");
}
if in_extern_block {
@@ -126,7 +127,7 @@ fn add_keywords(acc: &mut Completions, ctx: &CompletionContext<'_>, kind: Option
}
add_keyword("fn", "fn $1($2) {\n $0\n}");
- add_keyword("unsafe", "unsafe");
+ add_keyword("unsafe", "unsafe $0");
add_keyword("const", "const $0");
}
}
diff --git a/crates/ide-completion/src/completions/keyword.rs b/crates/ide-completion/src/completions/keyword.rs
index 1322c05e30..d79b539882 100644
--- a/crates/ide-completion/src/completions/keyword.rs
+++ b/crates/ide-completion/src/completions/keyword.rs
@@ -14,9 +14,9 @@ pub(crate) fn complete_for_and_where(
match keyword_item {
Item::Impl(it) => {
if it.for_token().is_none() && it.trait_().is_none() && it.self_ty().is_some() {
- add_keyword("for", "for");
+ add_keyword("for", "for $0");
}
- add_keyword("where", "where");
+ add_keyword("where", "where $0");
}
Item::Enum(_)
| Item::Fn(_)
@@ -24,7 +24,7 @@ pub(crate) fn complete_for_and_where(
| Item::Trait(_)
| Item::TypeAlias(_)
| Item::Union(_) => {
- add_keyword("where", "where");
+ add_keyword("where", "where $0");
}
_ => (),
}
diff --git a/crates/ide-completion/src/completions/mod_.rs b/crates/ide-completion/src/completions/mod_.rs
index c2faa2d939..f307ba9eb3 100644
--- a/crates/ide-completion/src/completions/mod_.rs
+++ b/crates/ide-completion/src/completions/mod_.rs
@@ -21,7 +21,7 @@ pub(crate) fn complete_mod(
return None;
}
- let _p = tracing::span!(tracing::Level::INFO, "completion::complete_mod").entered();
+ let _p = tracing::info_span!("completion::complete_mod").entered();
let mut current_module = ctx.module;
// For `mod $0`, `ctx.module` is its parent, but for `mod f$0`, it's `mod f` itself, but we're
diff --git a/crates/ide-completion/src/completions/pattern.rs b/crates/ide-completion/src/completions/pattern.rs
index 40b2c831a5..60cfb7e5a8 100644
--- a/crates/ide-completion/src/completions/pattern.rs
+++ b/crates/ide-completion/src/completions/pattern.rs
@@ -14,25 +14,27 @@ pub(crate) fn complete_pattern(
ctx: &CompletionContext<'_>,
pattern_ctx: &PatternContext,
) {
+ let mut add_keyword = |kw, snippet| acc.add_keyword_snippet(ctx, kw, snippet);
+
match pattern_ctx.parent_pat.as_ref() {
Some(Pat::RangePat(_) | Pat::BoxPat(_)) => (),
Some(Pat::RefPat(r)) => {
if r.mut_token().is_none() {
- acc.add_keyword(ctx, "mut");
+ add_keyword("mut", "mut $0");
}
}
_ => {
let tok = ctx.token.text_range().start();
match (pattern_ctx.ref_token.as_ref(), pattern_ctx.mut_token.as_ref()) {
(None, None) => {
- acc.add_keyword(ctx, "ref");
- acc.add_keyword(ctx, "mut");
+ add_keyword("ref", "ref $0");
+ add_keyword("mut", "mut $0");
}
(None, Some(m)) if tok < m.text_range().start() => {
- acc.add_keyword(ctx, "ref");
+ add_keyword("ref", "ref $0");
}
(Some(r), None) if tok > r.text_range().end() => {
- acc.add_keyword(ctx, "mut");
+ add_keyword("mut", "mut $0");
}
_ => (),
}
diff --git a/crates/ide-completion/src/completions/postfix.rs b/crates/ide-completion/src/completions/postfix.rs
index a230edd39b..5041ef8d8a 100644
--- a/crates/ide-completion/src/completions/postfix.rs
+++ b/crates/ide-completion/src/completions/postfix.rs
@@ -668,7 +668,7 @@ fn main() {
check_edit(
"unsafe",
r#"fn main() { let x = true else {panic!()}.$0}"#,
- r#"fn main() { let x = true else {panic!()}.unsafe}"#,
+ r#"fn main() { let x = true else {panic!()}.unsafe $0}"#,
);
}
diff --git a/crates/ide-completion/src/completions/postfix/format_like.rs b/crates/ide-completion/src/completions/postfix/format_like.rs
index fd50fd4e8c..2755329bb3 100644
--- a/crates/ide-completion/src/completions/postfix/format_like.rs
+++ b/crates/ide-completion/src/completions/postfix/format_like.rs
@@ -65,7 +65,7 @@ pub(crate) fn add_format_like_completions(
let exprs = with_placeholders(exprs);
for (label, macro_name) in KINDS {
let snippet = if exprs.is_empty() {
- format!(r#"{}({})"#, macro_name, out)
+ format!(r#"{macro_name}({out})"#)
} else {
format!(r#"{}({}, {})"#, macro_name, out, exprs.join(", "))
};
@@ -108,7 +108,7 @@ mod tests {
for (kind, input, output) in test_vector {
let (parsed_string, _exprs) = parse_format_exprs(input).unwrap();
- let snippet = format!(r#"{}("{}")"#, kind, parsed_string);
+ let snippet = format!(r#"{kind}("{parsed_string}")"#);
assert_eq!(&snippet, output);
}
}
diff --git a/crates/ide-completion/src/completions/type.rs b/crates/ide-completion/src/completions/type.rs
index 2361d14aae..b071488093 100644
--- a/crates/ide-completion/src/completions/type.rs
+++ b/crates/ide-completion/src/completions/type.rs
@@ -15,7 +15,7 @@ pub(crate) fn complete_type_path(
path_ctx @ PathCompletionCtx { qualified, .. }: &PathCompletionCtx,
location: &TypeLocation,
) {
- let _p = tracing::span!(tracing::Level::INFO, "complete_type_path").entered();
+ let _p = tracing::info_span!("complete_type_path").entered();
let scope_def_applicable = |def| {
use hir::{GenericParam::*, ModuleDef::*};
diff --git a/crates/ide-completion/src/completions/vis.rs b/crates/ide-completion/src/completions/vis.rs
index e0a959ad0b..0ea5157fb4 100644
--- a/crates/ide-completion/src/completions/vis.rs
+++ b/crates/ide-completion/src/completions/vis.rs
@@ -33,7 +33,7 @@ pub(crate) fn complete_vis_path(
Qualified::No => {
if !has_in_token {
cov_mark::hit!(kw_completion_in);
- acc.add_keyword(ctx, "in");
+ acc.add_keyword_snippet(ctx, "in", "in $0");
}
acc.add_nameref_keywords(ctx);
}
diff --git a/crates/ide-completion/src/context.rs b/crates/ide-completion/src/context.rs
index db34beadc0..992ca18bb0 100644
--- a/crates/ide-completion/src/context.rs
+++ b/crates/ide-completion/src/context.rs
@@ -452,6 +452,7 @@ pub(crate) struct CompletionContext<'a> {
/// - crate-root
/// - mod foo
/// - mod bar
+ ///
/// Here depth will be 2
pub(crate) depth_from_crate_root: usize,
}
@@ -585,8 +586,7 @@ impl CompletionContext<'_> {
/// A version of [`SemanticsScope::process_all_names`] that filters out `#[doc(hidden)]` items and
/// passes all doc-aliases along, to funnel it into [`Completions::add_path_resolution`].
pub(crate) fn process_all_names(&self, f: &mut dyn FnMut(Name, ScopeDef, Vec<SmolStr>)) {
- let _p =
- tracing::span!(tracing::Level::INFO, "CompletionContext::process_all_names").entered();
+ let _p = tracing::info_span!("CompletionContext::process_all_names").entered();
self.scope.process_all_names(&mut |name, def| {
if self.is_scope_def_hidden(def) {
return;
@@ -597,8 +597,7 @@ impl CompletionContext<'_> {
}
pub(crate) fn process_all_names_raw(&self, f: &mut dyn FnMut(Name, ScopeDef)) {
- let _p = tracing::span!(tracing::Level::INFO, "CompletionContext::process_all_names_raw")
- .entered();
+ let _p = tracing::info_span!("CompletionContext::process_all_names_raw").entered();
self.scope.process_all_names(f);
}
@@ -656,7 +655,7 @@ impl<'a> CompletionContext<'a> {
position @ FilePosition { file_id, offset }: FilePosition,
config: &'a CompletionConfig,
) -> Option<(CompletionContext<'a>, CompletionAnalysis)> {
- let _p = tracing::span!(tracing::Level::INFO, "CompletionContext::new").entered();
+ let _p = tracing::info_span!("CompletionContext::new").entered();
let sema = Semantics::new(db);
let original_file = sema.parse(file_id);
diff --git a/crates/ide-completion/src/context/analysis.rs b/crates/ide-completion/src/context/analysis.rs
index f0c6e7a63b..80ce5bd4cf 100644
--- a/crates/ide-completion/src/context/analysis.rs
+++ b/crates/ide-completion/src/context/analysis.rs
@@ -74,7 +74,7 @@ fn expand(
mut fake_ident_token: SyntaxToken,
relative_offset: TextSize,
) -> ExpansionResult {
- let _p = tracing::span!(tracing::Level::INFO, "CompletionContext::expand").entered();
+ let _p = tracing::info_span!("CompletionContext::expand").entered();
let mut derive_ctx = None;
'expansion: loop {
@@ -278,7 +278,7 @@ fn analyze(
original_token: &SyntaxToken,
self_token: &SyntaxToken,
) -> Option<(CompletionAnalysis, (Option<Type>, Option<ast::NameOrNameRef>), QualifierCtx)> {
- let _p = tracing::span!(tracing::Level::INFO, "CompletionContext::analyze").entered();
+ let _p = tracing::info_span!("CompletionContext::analyze").entered();
let ExpansionResult { original_file, speculative_file, offset, fake_ident_token, derive_ctx } =
expansion_result;
diff --git a/crates/ide-completion/src/item.rs b/crates/ide-completion/src/item.rs
index b9a2c383bd..debfefc480 100644
--- a/crates/ide-completion/src/item.rs
+++ b/crates/ide-completion/src/item.rs
@@ -483,7 +483,7 @@ impl Builder {
}
pub(crate) fn build(self, db: &RootDatabase) -> CompletionItem {
- let _p = tracing::span!(tracing::Level::INFO, "item::Builder::build").entered();
+ let _p = tracing::info_span!("item::Builder::build").entered();
let label = self.label;
let mut label_detail = None;
diff --git a/crates/ide-completion/src/lib.rs b/crates/ide-completion/src/lib.rs
index a83fa28b87..8323b8f933 100644
--- a/crates/ide-completion/src/lib.rs
+++ b/crates/ide-completion/src/lib.rs
@@ -238,7 +238,7 @@ pub fn resolve_completion_edits(
FilePosition { file_id, offset }: FilePosition,
imports: impl IntoIterator<Item = (String, String)>,
) -> Option<Vec<TextEdit>> {
- let _p = tracing::span!(tracing::Level::INFO, "resolve_completion_edits").entered();
+ let _p = tracing::info_span!("resolve_completion_edits").entered();
let sema = hir::Semantics::new(db);
let original_file = sema.parse(file_id);
diff --git a/crates/ide-completion/src/render.rs b/crates/ide-completion/src/render.rs
index 8f56379073..ebdc813f3d 100644
--- a/crates/ide-completion/src/render.rs
+++ b/crates/ide-completion/src/render.rs
@@ -353,7 +353,7 @@ fn render_resolution_pat(
import_to_add: Option<LocatedImport>,
resolution: ScopeDef,
) -> Builder {
- let _p = tracing::span!(tracing::Level::INFO, "render_resolution_pat").entered();
+ let _p = tracing::info_span!("render_resolution_pat").entered();
use hir::ModuleDef::*;
if let ScopeDef::ModuleDef(Macro(mac)) = resolution {
@@ -371,7 +371,7 @@ fn render_resolution_path(
import_to_add: Option<LocatedImport>,
resolution: ScopeDef,
) -> Builder {
- let _p = tracing::span!(tracing::Level::INFO, "render_resolution_path").entered();
+ let _p = tracing::info_span!("render_resolution_path").entered();
use hir::ModuleDef::*;
match resolution {
@@ -479,7 +479,7 @@ fn render_resolution_simple_(
import_to_add: Option<LocatedImport>,
resolution: ScopeDef,
) -> Builder {
- let _p = tracing::span!(tracing::Level::INFO, "render_resolution_simple_").entered();
+ let _p = tracing::info_span!("render_resolution_simple_").entered();
let db = ctx.db();
let ctx = ctx.import_to_add(import_to_add);
@@ -1799,6 +1799,7 @@ fn go(world: &WorldSnapshot) { go(w$0) }
"#,
expect![[r#"
lc world [type+name+local]
+ ex world [type]
st WorldSnapshot {…} []
st &WorldSnapshot {…} [type]
st WorldSnapshot []
diff --git a/crates/ide-completion/src/render/const_.rs b/crates/ide-completion/src/render/const_.rs
index 0d24882156..a6a1c79e66 100644
--- a/crates/ide-completion/src/render/const_.rs
+++ b/crates/ide-completion/src/render/const_.rs
@@ -6,7 +6,7 @@ use ide_db::SymbolKind;
use crate::{item::CompletionItem, render::RenderContext};
pub(crate) fn render_const(ctx: RenderContext<'_>, const_: hir::Const) -> Option<CompletionItem> {
- let _p = tracing::span!(tracing::Level::INFO, "render_const").entered();
+ let _p = tracing::info_span!("render_const").entered();
render(ctx, const_)
}
diff --git a/crates/ide-completion/src/render/function.rs b/crates/ide-completion/src/render/function.rs
index 1634b0a920..48c9d624f6 100644
--- a/crates/ide-completion/src/render/function.rs
+++ b/crates/ide-completion/src/render/function.rs
@@ -30,7 +30,7 @@ pub(crate) fn render_fn(
local_name: Option<hir::Name>,
func: hir::Function,
) -> Builder {
- let _p = tracing::span!(tracing::Level::INFO, "render_fn").entered();
+ let _p = tracing::info_span!("render_fn").entered();
render(ctx, local_name, func, FuncKind::Function(path_ctx))
}
@@ -41,7 +41,7 @@ pub(crate) fn render_method(
local_name: Option<hir::Name>,
func: hir::Function,
) -> Builder {
- let _p = tracing::span!(tracing::Level::INFO, "render_method").entered();
+ let _p = tracing::info_span!("render_method").entered();
render(ctx, local_name, func, FuncKind::Method(dot_access, receiver))
}
diff --git a/crates/ide-completion/src/render/literal.rs b/crates/ide-completion/src/render/literal.rs
index 9c5cb1e37d..27435307d5 100644
--- a/crates/ide-completion/src/render/literal.rs
+++ b/crates/ide-completion/src/render/literal.rs
@@ -27,7 +27,7 @@ pub(crate) fn render_variant_lit(
variant: hir::Variant,
path: Option<hir::ModPath>,
) -> Option<Builder> {
- let _p = tracing::span!(tracing::Level::INFO, "render_variant_lit").entered();
+ let _p = tracing::info_span!("render_variant_lit").entered();
let db = ctx.db();
let name = local_name.unwrap_or_else(|| variant.name(db));
@@ -41,7 +41,7 @@ pub(crate) fn render_struct_literal(
path: Option<hir::ModPath>,
local_name: Option<hir::Name>,
) -> Option<Builder> {
- let _p = tracing::span!(tracing::Level::INFO, "render_struct_literal").entered();
+ let _p = tracing::info_span!("render_struct_literal").entered();
let db = ctx.db();
let name = local_name.unwrap_or_else(|| strukt.name(db));
diff --git a/crates/ide-completion/src/render/macro_.rs b/crates/ide-completion/src/render/macro_.rs
index 8b81a95abb..a6c8c0e853 100644
--- a/crates/ide-completion/src/render/macro_.rs
+++ b/crates/ide-completion/src/render/macro_.rs
@@ -17,7 +17,7 @@ pub(crate) fn render_macro(
name: hir::Name,
macro_: hir::Macro,
) -> Builder {
- let _p = tracing::span!(tracing::Level::INFO, "render_macro").entered();
+ let _p = tracing::info_span!("render_macro").entered();
render(ctx, *kind == PathKind::Use, *has_macro_bang, *has_call_parens, name, macro_)
}
@@ -27,7 +27,7 @@ pub(crate) fn render_macro_pat(
name: hir::Name,
macro_: hir::Macro,
) -> Builder {
- let _p = tracing::span!(tracing::Level::INFO, "render_macro_pat").entered();
+ let _p = tracing::info_span!("render_macro_pat").entered();
render(ctx, false, false, false, name, macro_)
}
diff --git a/crates/ide-completion/src/render/pattern.rs b/crates/ide-completion/src/render/pattern.rs
index c07966f7a7..942670be2a 100644
--- a/crates/ide-completion/src/render/pattern.rs
+++ b/crates/ide-completion/src/render/pattern.rs
@@ -20,7 +20,7 @@ pub(crate) fn render_struct_pat(
strukt: hir::Struct,
local_name: Option<Name>,
) -> Option<CompletionItem> {
- let _p = tracing::span!(tracing::Level::INFO, "render_struct_pat").entered();
+ let _p = tracing::info_span!("render_struct_pat").entered();
let fields = strukt.fields(ctx.db());
let (visible_fields, fields_omitted) = visible_fields(ctx.completion, &fields, strukt)?;
@@ -50,7 +50,7 @@ pub(crate) fn render_variant_pat(
local_name: Option<Name>,
path: Option<&hir::ModPath>,
) -> Option<CompletionItem> {
- let _p = tracing::span!(tracing::Level::INFO, "render_variant_pat").entered();
+ let _p = tracing::info_span!("render_variant_pat").entered();
let fields = variant.fields(ctx.db());
let (visible_fields, fields_omitted) = visible_fields(ctx.completion, &fields, variant)?;
diff --git a/crates/ide-completion/src/render/type_alias.rs b/crates/ide-completion/src/render/type_alias.rs
index 8f80793dd7..47254e6a18 100644
--- a/crates/ide-completion/src/render/type_alias.rs
+++ b/crates/ide-completion/src/render/type_alias.rs
@@ -10,7 +10,7 @@ pub(crate) fn render_type_alias(
ctx: RenderContext<'_>,
type_alias: hir::TypeAlias,
) -> Option<CompletionItem> {
- let _p = tracing::span!(tracing::Level::INFO, "render_type_alias").entered();
+ let _p = tracing::info_span!("render_type_alias").entered();
render(ctx, type_alias, false)
}
@@ -18,7 +18,7 @@ pub(crate) fn render_type_alias_with_eq(
ctx: RenderContext<'_>,
type_alias: hir::TypeAlias,
) -> Option<CompletionItem> {
- let _p = tracing::span!(tracing::Level::INFO, "render_type_alias_with_eq").entered();
+ let _p = tracing::info_span!("render_type_alias_with_eq").entered();
render(ctx, type_alias, true)
}
diff --git a/crates/ide-completion/src/tests/expression.rs b/crates/ide-completion/src/tests/expression.rs
index a653314233..545c2a2a8a 100644
--- a/crates/ide-completion/src/tests/expression.rs
+++ b/crates/ide-completion/src/tests/expression.rs
@@ -141,6 +141,7 @@ impl Unit {
un Union Union
ev TupleV(…) TupleV(u32)
bt u32 u32
+ kw async
kw const
kw crate::
kw enum
@@ -217,6 +218,7 @@ fn complete_in_block() {
expect![[r#"
fn foo() fn()
bt u32 u32
+ kw async
kw const
kw crate::
kw enum
@@ -264,6 +266,7 @@ fn complete_after_if_expr() {
expect![[r#"
fn foo() fn()
bt u32 u32
+ kw async
kw const
kw crate::
kw else
@@ -336,6 +339,7 @@ fn completes_in_loop_ctx() {
expect![[r#"
fn my() fn()
bt u32 u32
+ kw async
kw break
kw const
kw continue
@@ -799,6 +803,7 @@ fn foo() { if foo {} $0 }
expect![[r#"
fn foo() fn()
bt u32 u32
+ kw async
kw const
kw crate::
kw else
@@ -839,6 +844,7 @@ fn foo() { if foo {} el$0 }
expect![[r#"
fn foo() fn()
bt u32 u32
+ kw async
kw const
kw crate::
kw else
@@ -927,6 +933,7 @@ fn foo() { if foo {} $0 let x = 92; }
expect![[r#"
fn foo() fn()
bt u32 u32
+ kw async
kw const
kw crate::
kw else
@@ -967,6 +974,7 @@ fn foo() { if foo {} el$0 let x = 92; }
expect![[r#"
fn foo() fn()
bt u32 u32
+ kw async
kw const
kw crate::
kw else
@@ -1007,6 +1015,7 @@ fn foo() { if foo {} el$0 { let x = 92; } }
expect![[r#"
fn foo() fn()
bt u32 u32
+ kw async
kw const
kw crate::
kw else
@@ -1059,6 +1068,7 @@ pub struct UnstableThisShouldNotBeListed;
fn main() fn()
md std
bt u32 u32
+ kw async
kw const
kw crate::
kw enum
@@ -1111,6 +1121,7 @@ pub struct UnstableButWeAreOnNightlyAnyway;
md std
st UnstableButWeAreOnNightlyAnyway UnstableButWeAreOnNightlyAnyway
bt u32 u32
+ kw async
kw const
kw crate::
kw enum
diff --git a/crates/ide-completion/src/tests/item.rs b/crates/ide-completion/src/tests/item.rs
index de3fd05189..09254aed7c 100644
--- a/crates/ide-completion/src/tests/item.rs
+++ b/crates/ide-completion/src/tests/item.rs
@@ -6,6 +6,8 @@ use expect_test::{expect, Expect};
use crate::tests::{completion_list, BASE_ITEMS_FIXTURE};
+use super::check_edit;
+
fn check(ra_fixture: &str, expect: Expect) {
let actual = completion_list(&format!("{BASE_ITEMS_FIXTURE}{ra_fixture}"));
expect.assert_eq(&actual)
@@ -152,3 +154,90 @@ struct Foo {
"#]],
)
}
+
+#[test]
+fn add_space_after_vis_kw() {
+ check_edit(
+ "pub(crate)",
+ r"
+$0
+",
+ r#"
+pub(crate) $0
+"#,
+ );
+
+ check_edit(
+ "pub",
+ r"
+$0
+",
+ r#"
+pub $0
+"#,
+ );
+
+ check_edit(
+ "pub(super)",
+ r"
+$0
+",
+ r#"
+pub(super) $0
+"#,
+ );
+
+ check_edit(
+ "in",
+ r"
+pub($0)
+",
+ r#"
+pub(in $0)
+"#,
+ );
+}
+
+#[test]
+fn add_space_after_unsafe_kw() {
+ check_edit(
+ "unsafe",
+ r"
+$0
+",
+ r#"
+unsafe $0
+"#,
+ );
+}
+
+#[test]
+fn add_space_after_for_where_kw() {
+ check_edit(
+ "for",
+ r#"
+struct S {}
+
+impl Copy $0
+"#,
+ r#"
+struct S {}
+
+impl Copy for $0
+"#,
+ );
+
+ check_edit(
+ "where",
+ r#"
+struct S {}
+
+impl Copy for S $0
+"#,
+ r#"
+struct S {}
+
+impl Copy for S where $0
+"#,
+ );
+}
diff --git a/crates/ide-completion/src/tests/item_list.rs b/crates/ide-completion/src/tests/item_list.rs
index 2b5b4dd773..c37900478e 100644
--- a/crates/ide-completion/src/tests/item_list.rs
+++ b/crates/ide-completion/src/tests/item_list.rs
@@ -14,6 +14,7 @@ fn in_mod_item_list() {
r#"mod tests { $0 }"#,
expect![[r#"
ma makro!(…) macro_rules! makro
+ kw async
kw const
kw crate::
kw enum
@@ -47,6 +48,7 @@ fn in_source_file_item_list() {
expect![[r#"
ma makro!(…) macro_rules! makro
md module
+ kw async
kw const
kw crate::
kw enum
@@ -79,6 +81,7 @@ fn in_item_list_after_attr() {
expect![[r#"
ma makro!(…) macro_rules! makro
md module
+ kw async
kw const
kw crate::
kw enum
@@ -132,6 +135,7 @@ fn after_visibility() {
check(
r#"pub $0"#,
expect![[r#"
+ kw async
kw const
kw enum
kw extern
@@ -356,6 +360,7 @@ fn after_unit_struct() {
expect![[r#"
ma makro!(…) macro_rules! makro
md module
+ kw async
kw const
kw crate::
kw enum
diff --git a/crates/ide-completion/src/tests/pattern.rs b/crates/ide-completion/src/tests/pattern.rs
index 67cf551fce..8720cb555a 100644
--- a/crates/ide-completion/src/tests/pattern.rs
+++ b/crates/ide-completion/src/tests/pattern.rs
@@ -819,3 +819,34 @@ pub enum Enum {
"#]],
);
}
+
+#[test]
+fn add_space_after_mut_ref_kw() {
+ check_edit(
+ "mut",
+ r#"
+fn foo() {
+ let $0
+}
+"#,
+ r#"
+fn foo() {
+ let mut $0
+}
+"#,
+ );
+
+ check_edit(
+ "ref",
+ r#"
+fn foo() {
+ let $0
+}
+"#,
+ r#"
+fn foo() {
+ let ref $0
+}
+"#,
+ );
+}
diff --git a/crates/ide-completion/src/tests/record.rs b/crates/ide-completion/src/tests/record.rs
index e64ec74c61..56162bb57b 100644
--- a/crates/ide-completion/src/tests/record.rs
+++ b/crates/ide-completion/src/tests/record.rs
@@ -2,6 +2,8 @@ use expect_test::{expect, Expect};
use crate::tests::completion_list;
+use super::check_edit;
+
fn check(ra_fixture: &str, expect: Expect) {
let actual = completion_list(ra_fixture);
expect.assert_eq(&actual);
@@ -301,3 +303,48 @@ fn foo() {
expect![[r#""#]],
)
}
+
+#[test]
+fn add_space_after_vis_kw() {
+ check_edit(
+ "pub(crate)",
+ r"
+pub(crate) struct S {
+ $0
+}
+",
+ r#"
+pub(crate) struct S {
+ pub(crate) $0
+}
+"#,
+ );
+
+ check_edit(
+ "pub",
+ r"
+pub struct S {
+ $0
+}
+",
+ r#"
+pub struct S {
+ pub $0
+}
+"#,
+ );
+
+ check_edit(
+ "pub(super)",
+ r"
+pub(super) struct S {
+ $0
+}
+",
+ r#"
+pub(super) struct S {
+ pub(super) $0
+}
+"#,
+ );
+}
diff --git a/crates/ide-completion/src/tests/special.rs b/crates/ide-completion/src/tests/special.rs
index 69d8fe9104..2ae7d37889 100644
--- a/crates/ide-completion/src/tests/special.rs
+++ b/crates/ide-completion/src/tests/special.rs
@@ -1017,6 +1017,7 @@ fn here_we_go() {
fn here_we_go() fn()
st Foo (alias Bar) Foo
bt u32 u32
+ kw async
kw const
kw crate::
kw enum
@@ -1066,6 +1067,7 @@ fn here_we_go() {
fn here_we_go() fn()
st Foo (alias Bar, Qux, Baz) Foo
bt u32 u32
+ kw async
kw const
kw crate::
kw enum
@@ -1188,6 +1190,7 @@ fn bar() { qu$0 }
fn bar() fn()
fn foo() (alias qux) fn()
bt u32 u32
+ kw async
kw const
kw crate::
kw enum
@@ -1443,6 +1446,7 @@ fn foo() {
expect![[r#"
fn foo() fn()
bt u32 u32
+ kw async
kw const
kw crate::
kw enum
diff --git a/crates/ide-db/src/active_parameter.rs b/crates/ide-db/src/active_parameter.rs
index 98d2e81754..088d2ec5e3 100644
--- a/crates/ide-db/src/active_parameter.rs
+++ b/crates/ide-db/src/active_parameter.rs
@@ -28,7 +28,7 @@ impl ActiveParameter {
return None;
}
let param = params.swap_remove(idx);
- Some(ActiveParameter { ty: param.ty().clone(), src: param.source(sema.db) })
+ Some(ActiveParameter { ty: param.ty().clone(), src: sema.source(param) })
}
pub fn ident(&self) -> Option<ast::Name> {
diff --git a/crates/ide-db/src/apply_change.rs b/crates/ide-db/src/apply_change.rs
index ce9a5f0dd2..74c8fc96d4 100644
--- a/crates/ide-db/src/apply_change.rs
+++ b/crates/ide-db/src/apply_change.rs
@@ -15,13 +15,12 @@ use crate::{symbol_index::SymbolsDatabase, ChangeWithProcMacros, RootDatabase};
impl RootDatabase {
pub fn request_cancellation(&mut self) {
- let _p =
- tracing::span!(tracing::Level::INFO, "RootDatabase::request_cancellation").entered();
+ let _p = tracing::info_span!("RootDatabase::request_cancellation").entered();
self.synthetic_write(Durability::LOW);
}
pub fn apply_change(&mut self, change: ChangeWithProcMacros) {
- let _p = tracing::span!(tracing::Level::INFO, "RootDatabase::apply_change").entered();
+ let _p = tracing::info_span!("RootDatabase::apply_change").entered();
self.request_cancellation();
tracing::trace!("apply_change {:?}", change);
if let Some(roots) = &change.source_change.roots {
diff --git a/crates/ide-db/src/defs.rs b/crates/ide-db/src/defs.rs
index 634277e869..a75a708d95 100644
--- a/crates/ide-db/src/defs.rs
+++ b/crates/ide-db/src/defs.rs
@@ -413,7 +413,7 @@ impl NameClass {
}
pub fn classify(sema: &Semantics<'_, RootDatabase>, name: &ast::Name) -> Option<NameClass> {
- let _p = tracing::span!(tracing::Level::INFO, "NameClass::classify").entered();
+ let _p = tracing::info_span!("NameClass::classify").entered();
let parent = name.syntax().parent()?;
@@ -505,8 +505,7 @@ impl NameClass {
sema: &Semantics<'_, RootDatabase>,
lifetime: &ast::Lifetime,
) -> Option<NameClass> {
- let _p = tracing::span!(tracing::Level::INFO, "NameClass::classify_lifetime", ?lifetime)
- .entered();
+ let _p = tracing::info_span!("NameClass::classify_lifetime", ?lifetime).entered();
let parent = lifetime.syntax().parent()?;
if let Some(it) = ast::LifetimeParam::cast(parent.clone()) {
@@ -597,8 +596,7 @@ impl NameRefClass {
sema: &Semantics<'_, RootDatabase>,
name_ref: &ast::NameRef,
) -> Option<NameRefClass> {
- let _p =
- tracing::span!(tracing::Level::INFO, "NameRefClass::classify", ?name_ref).entered();
+ let _p = tracing::info_span!("NameRefClass::classify", ?name_ref).entered();
let parent = name_ref.syntax().parent()?;
@@ -697,8 +695,7 @@ impl NameRefClass {
sema: &Semantics<'_, RootDatabase>,
lifetime: &ast::Lifetime,
) -> Option<NameRefClass> {
- let _p = tracing::span!(tracing::Level::INFO, "NameRefClass::classify_lifetime", ?lifetime)
- .entered();
+ let _p = tracing::info_span!("NameRefClass::classify_lifetime", ?lifetime).entered();
if lifetime.text() == "'static" {
return Some(NameRefClass::Definition(Definition::BuiltinLifetime(StaticLifetime)));
}
diff --git a/crates/ide-db/src/famous_defs.rs b/crates/ide-db/src/famous_defs.rs
index e445e9fb68..51ac0b7191 100644
--- a/crates/ide-db/src/famous_defs.rs
+++ b/crates/ide-db/src/famous_defs.rs
@@ -15,7 +15,7 @@ use crate::RootDatabase;
/// you'd want to include minicore (see `test_utils::MiniCore`) declaration at
/// the start of your tests:
///
-/// ```
+/// ```text
/// //- minicore: iterator, ord, derive
/// ```
pub struct FamousDefs<'a, 'b>(pub &'a Semantics<'b, RootDatabase>, pub Crate);
diff --git a/crates/ide-db/src/helpers.rs b/crates/ide-db/src/helpers.rs
index db44b1e723..c069e1c25b 100644
--- a/crates/ide-db/src/helpers.rs
+++ b/crates/ide-db/src/helpers.rs
@@ -35,13 +35,13 @@ pub fn pick_token<T: AstToken>(mut tokens: TokenAtOffset<SyntaxToken>) -> Option
/// Converts the mod path struct into its ast representation.
pub fn mod_path_to_ast(path: &hir::ModPath) -> ast::Path {
- let _p = tracing::span!(tracing::Level::INFO, "mod_path_to_ast").entered();
+ let _p = tracing::info_span!("mod_path_to_ast").entered();
let mut segments = Vec::new();
let mut is_abs = false;
match path.kind {
hir::PathKind::Plain => {}
- hir::PathKind::Super(0) => segments.push(make::path_segment_self()),
+ hir::PathKind::SELF => segments.push(make::path_segment_self()),
hir::PathKind::Super(n) => segments.extend((0..n).map(|_| make::path_segment_super())),
hir::PathKind::DollarCrate(_) | hir::PathKind::Crate => {
segments.push(make::path_segment_crate())
diff --git a/crates/ide-db/src/imports/import_assets.rs b/crates/ide-db/src/imports/import_assets.rs
index 4caecb3f2f..088717a66e 100644
--- a/crates/ide-db/src/imports/import_assets.rs
+++ b/crates/ide-db/src/imports/import_assets.rs
@@ -208,7 +208,7 @@ impl ImportAssets {
cfg: ImportPathConfig,
prefix_kind: PrefixKind,
) -> impl Iterator<Item = LocatedImport> {
- let _p = tracing::span!(tracing::Level::INFO, "ImportAssets::search_for_imports").entered();
+ let _p = tracing::info_span!("ImportAssets::search_for_imports").entered();
self.search_for(sema, Some(prefix_kind), cfg)
}
@@ -218,8 +218,7 @@ impl ImportAssets {
sema: &Semantics<'_, RootDatabase>,
cfg: ImportPathConfig,
) -> impl Iterator<Item = LocatedImport> {
- let _p = tracing::span!(tracing::Level::INFO, "ImportAssets::search_for_relative_paths")
- .entered();
+ let _p = tracing::info_span!("ImportAssets::search_for_relative_paths").entered();
self.search_for(sema, None, cfg)
}
@@ -259,7 +258,7 @@ impl ImportAssets {
prefixed: Option<PrefixKind>,
cfg: ImportPathConfig,
) -> impl Iterator<Item = LocatedImport> {
- let _p = tracing::span!(tracing::Level::INFO, "ImportAssets::search_for").entered();
+ let _p = tracing::info_span!("ImportAssets::search_for").entered();
let scope = match sema.scope(&self.candidate_node) {
Some(it) => it,
@@ -303,7 +302,7 @@ impl ImportAssets {
}
fn scope_definitions(&self, sema: &Semantics<'_, RootDatabase>) -> FxHashSet<ScopeDef> {
- let _p = tracing::span!(tracing::Level::INFO, "ImportAssets::scope_definitions").entered();
+ let _p = tracing::info_span!("ImportAssets::scope_definitions").entered();
let mut scope_definitions = FxHashSet::default();
if let Some(scope) = sema.scope(&self.candidate_node) {
scope.process_all_names(&mut |_, scope_def| {
@@ -321,8 +320,7 @@ fn path_applicable_imports(
mod_path: impl Fn(ItemInNs) -> Option<ModPath> + Copy,
scope_filter: impl Fn(ItemInNs) -> bool + Copy,
) -> FxHashSet<LocatedImport> {
- let _p =
- tracing::span!(tracing::Level::INFO, "ImportAssets::path_applicable_imports").entered();
+ let _p = tracing::info_span!("ImportAssets::path_applicable_imports").entered();
match &path_candidate.qualifier {
None => {
@@ -369,7 +367,7 @@ fn import_for_item(
original_item: ItemInNs,
scope_filter: impl Fn(ItemInNs) -> bool,
) -> Option<LocatedImport> {
- let _p = tracing::span!(tracing::Level::INFO, "ImportAssets::import_for_item").entered();
+ let _p = tracing::info_span!("ImportAssets::import_for_item").entered();
let [first_segment, ..] = unresolved_qualifier else { return None };
let item_as_assoc = item_as_assoc(db, original_item);
@@ -503,7 +501,7 @@ fn trait_applicable_items(
mod_path: impl Fn(ItemInNs) -> Option<ModPath>,
scope_filter: impl Fn(hir::Trait) -> bool,
) -> FxHashSet<LocatedImport> {
- let _p = tracing::span!(tracing::Level::INFO, "ImportAssets::trait_applicable_items").entered();
+ let _p = tracing::info_span!("ImportAssets::trait_applicable_items").entered();
let db = sema.db;
diff --git a/crates/ide-db/src/imports/insert_use.rs b/crates/ide-db/src/imports/insert_use.rs
index 026d4e36f9..9102980677 100644
--- a/crates/ide-db/src/imports/insert_use.rs
+++ b/crates/ide-db/src/imports/insert_use.rs
@@ -194,7 +194,7 @@ fn insert_use_with_alias_option(
cfg: &InsertUseConfig,
alias: Option<ast::Rename>,
) {
- let _p = tracing::span!(tracing::Level::INFO, "insert_use_with_alias_option").entered();
+ let _p = tracing::info_span!("insert_use_with_alias_option").entered();
let mut mb = match cfg.granularity {
ImportGranularity::Crate => Some(MergeBehavior::Crate),
ImportGranularity::Module => Some(MergeBehavior::Module),
diff --git a/crates/ide-db/src/items_locator.rs b/crates/ide-db/src/items_locator.rs
index 1b6f650768..47549a1d00 100644
--- a/crates/ide-db/src/items_locator.rs
+++ b/crates/ide-db/src/items_locator.rs
@@ -21,7 +21,7 @@ pub fn items_with_name<'a>(
assoc_item_search: AssocSearchMode,
) -> impl Iterator<Item = ItemInNs> + 'a {
let krate_name = krate.display_name(sema.db).map(|name| name.to_string());
- let _p = tracing::span!(tracing::Level::INFO, "items_with_name", name = name.text(), assoc_item_search = ?assoc_item_search, crate = ?krate_name)
+ let _p = tracing::info_span!("items_with_name", name = name.text(), assoc_item_search = ?assoc_item_search, crate = ?krate_name)
.entered();
let prefix = matches!(name, NameToImport::Prefix(..));
@@ -72,7 +72,7 @@ fn find_items<'a>(
local_query: symbol_index::Query,
external_query: import_map::Query,
) -> impl Iterator<Item = ItemInNs> + 'a {
- let _p = tracing::span!(tracing::Level::INFO, "find_items").entered();
+ let _p = tracing::info_span!("find_items").entered();
let db = sema.db;
// NOTE: `external_query` includes `assoc_item_search`, so we don't need to
diff --git a/crates/ide-db/src/prime_caches.rs b/crates/ide-db/src/prime_caches.rs
index 58077f636b..62104fb7dc 100644
--- a/crates/ide-db/src/prime_caches.rs
+++ b/crates/ide-db/src/prime_caches.rs
@@ -11,9 +11,9 @@ use hir::db::DefDatabase;
use crate::{
base_db::{
salsa::{Database, ParallelDatabase, Snapshot},
- Cancelled, CrateGraph, CrateId, SourceDatabase, SourceDatabaseExt,
+ Cancelled, CrateId, SourceDatabase, SourceDatabaseExt,
},
- FxHashSet, FxIndexMap, RootDatabase,
+ FxIndexMap, RootDatabase,
};
/// We're indexing many crates.
@@ -29,26 +29,17 @@ pub struct ParallelPrimeCachesProgress {
pub fn parallel_prime_caches(
db: &RootDatabase,
- num_worker_threads: u8,
+ num_worker_threads: usize,
cb: &(dyn Fn(ParallelPrimeCachesProgress) + Sync),
) {
- let _p = tracing::span!(tracing::Level::INFO, "parallel_prime_caches").entered();
+ let _p = tracing::info_span!("parallel_prime_caches").entered();
let graph = db.crate_graph();
let mut crates_to_prime = {
- let crate_ids = compute_crates_to_prime(db, &graph);
-
let mut builder = topologic_sort::TopologicalSortIter::builder();
- for &crate_id in &crate_ids {
- let crate_data = &graph[crate_id];
- let dependencies = crate_data
- .dependencies
- .iter()
- .map(|d| d.crate_id)
- .filter(|i| crate_ids.contains(i));
-
- builder.add(crate_id, dependencies);
+ for crate_id in graph.iter() {
+ builder.add(crate_id, graph[crate_id].dependencies.iter().map(|d| d.crate_id));
}
builder.build()
@@ -62,13 +53,20 @@ pub fn parallel_prime_caches(
let (work_sender, progress_receiver) = {
let (progress_sender, progress_receiver) = crossbeam_channel::unbounded();
let (work_sender, work_receiver) = crossbeam_channel::unbounded();
+ let graph = graph.clone();
let prime_caches_worker = move |db: Snapshot<RootDatabase>| {
while let Ok((crate_id, crate_name)) = work_receiver.recv() {
progress_sender
.send(ParallelPrimeCacheWorkerProgress::BeginCrate { crate_id, crate_name })?;
- // This also computes the DefMap
- db.import_map(crate_id);
+ let file_id = graph[crate_id].root_file_id;
+ let root_id = db.file_source_root(file_id);
+ if db.source_root(root_id).is_library {
+ db.crate_def_map(crate_id);
+ } else {
+ // This also computes the DefMap
+ db.import_map(crate_id);
+ }
progress_sender.send(ParallelPrimeCacheWorkerProgress::EndCrate { crate_id })?;
}
@@ -76,13 +74,13 @@ pub fn parallel_prime_caches(
Ok::<_, crossbeam_channel::SendError<_>>(())
};
- for _ in 0..num_worker_threads {
+ for id in 0..num_worker_threads {
let worker = prime_caches_worker.clone();
let db = db.snapshot();
stdx::thread::Builder::new(stdx::thread::ThreadIntent::Worker)
.allow_leak(true)
- .name("PrimeCaches".to_owned())
+ .name(format!("PrimeCaches#{id}"))
.spawn(move || Cancelled::catch(|| worker(db)))
.expect("failed to spawn thread");
}
@@ -96,7 +94,7 @@ pub fn parallel_prime_caches(
// an index map is used to preserve ordering so we can sort the progress report in order of
// "longest crate to index" first
let mut crates_currently_indexing =
- FxIndexMap::with_capacity_and_hasher(num_worker_threads as _, Default::default());
+ FxIndexMap::with_capacity_and_hasher(num_worker_threads, Default::default());
while crates_done < crates_total {
db.unwind_if_cancelled();
@@ -144,19 +142,3 @@ pub fn parallel_prime_caches(
cb(progress);
}
}
-
-fn compute_crates_to_prime(db: &RootDatabase, graph: &CrateGraph) -> FxHashSet<CrateId> {
- // We're only interested in the workspace crates and the `ImportMap`s of their direct
- // dependencies, though in practice the latter also compute the `DefMap`s.
- // We don't prime transitive dependencies because they're generally not visible in
- // the current workspace.
- graph
- .iter()
- .filter(|&id| {
- let file_id = graph[id].root_file_id;
- let root_id = db.file_source_root(file_id);
- !db.source_root(root_id).is_library
- })
- .flat_map(|id| graph[id].dependencies.iter().map(|krate| krate.crate_id))
- .collect()
-}
diff --git a/crates/ide-db/src/rename.rs b/crates/ide-db/src/rename.rs
index 288d56b534..484c65c2b0 100644
--- a/crates/ide-db/src/rename.rs
+++ b/crates/ide-db/src/rename.rs
@@ -24,7 +24,7 @@ use std::fmt;
use base_db::{AnchoredPathBuf, FileId, FileRange};
use either::Either;
-use hir::{FieldSource, HasSource, HirFileIdExt, InFile, ModuleSource, Semantics};
+use hir::{FieldSource, HirFileIdExt, InFile, ModuleSource, Semantics};
use span::SyntaxContextId;
use stdx::{never, TupleExt};
use syntax::{
@@ -109,7 +109,7 @@ impl Definition {
let syn_ctx_is_root = |(range, ctx): (_, SyntaxContextId)| ctx.is_root().then_some(range);
let res = match self {
Definition::Macro(mac) => {
- let src = mac.source(sema.db)?;
+ let src = sema.source(mac)?;
let name = match &src.value {
Either::Left(it) => it.name()?,
Either::Right(it) => it.name()?,
@@ -119,7 +119,7 @@ impl Definition {
.and_then(syn_ctx_is_root)
}
Definition::Field(field) => {
- let src = field.source(sema.db)?;
+ let src = sema.source(field)?;
match &src.value {
FieldSource::Named(record_field) => {
let name = record_field.name()?;
@@ -154,18 +154,18 @@ impl Definition {
}
Definition::GenericParam(generic_param) => match generic_param {
hir::GenericParam::LifetimeParam(lifetime_param) => {
- let src = lifetime_param.source(sema.db)?;
+ let src = sema.source(lifetime_param)?;
src.with_value(src.value.lifetime()?.syntax())
.original_file_range_opt(sema.db)
.and_then(syn_ctx_is_root)
}
_ => {
- let x = match generic_param {
+ let param = match generic_param {
hir::GenericParam::TypeParam(it) => it.merge(),
hir::GenericParam::ConstParam(it) => it.merge(),
hir::GenericParam::LifetimeParam(_) => return None,
};
- let src = x.source(sema.db)?;
+ let src = sema.source(param)?;
let name = match &src.value {
Either::Left(x) => x.name()?,
Either::Right(_) => return None,
@@ -176,14 +176,14 @@ impl Definition {
}
},
Definition::Label(label) => {
- let src = label.source(sema.db);
+ let src = sema.source(label)?;
let lifetime = src.value.lifetime()?;
src.with_value(lifetime.syntax())
.original_file_range_opt(sema.db)
.and_then(syn_ctx_is_root)
}
Definition::ExternCrateDecl(it) => {
- let src = it.source(sema.db)?;
+ let src = sema.source(it)?;
if let Some(rename) = src.value.rename() {
let name = rename.name()?;
src.with_value(name.syntax())
@@ -212,10 +212,10 @@ impl Definition {
sema: &Semantics<'_, RootDatabase>,
) -> Option<(FileRange, SyntaxContextId)>
where
- D: HasSource,
+ D: hir::HasSource,
D::Ast: ast::HasName,
{
- let src = def.source(sema.db)?;
+ let src = sema.source(def)?;
let name = src.value.name()?;
src.with_value(name.syntax()).original_file_range_opt(sema.db)
}
diff --git a/crates/ide-db/src/search.rs b/crates/ide-db/src/search.rs
index 8f633065f3..b62f34f415 100644
--- a/crates/ide-db/src/search.rs
+++ b/crates/ide-db/src/search.rs
@@ -266,7 +266,7 @@ impl IntoIterator for SearchScope {
impl Definition {
fn search_scope(&self, db: &RootDatabase) -> SearchScope {
- let _p = tracing::span!(tracing::Level::INFO, "search_scope").entered();
+ let _p = tracing::info_span!("search_scope").entered();
if let Definition::BuiltinType(_) = self {
return SearchScope::crate_graph(db);
@@ -434,7 +434,7 @@ impl<'a> FindUsages<'a> {
}
pub fn search(&self, sink: &mut dyn FnMut(FileId, FileReference) -> bool) {
- let _p = tracing::span!(tracing::Level::INFO, "FindUsages:search").entered();
+ let _p = tracing::info_span!("FindUsages:search").entered();
let sema = self.sema;
let search_scope = {
diff --git a/crates/ide-db/src/source_change.rs b/crates/ide-db/src/source_change.rs
index f59d8d08c8..7ef7b7ae1d 100644
--- a/crates/ide-db/src/source_change.rs
+++ b/crates/ide-db/src/source_change.rs
@@ -338,6 +338,12 @@ impl SourceChangeBuilder {
self.add_snippet(PlaceSnippet::Over(node.syntax().clone().into()))
}
+ /// Adds a snippet to move the cursor selected over `token`
+ pub fn add_placeholder_snippet_token(&mut self, _cap: SnippetCap, token: SyntaxToken) {
+ assert!(token.parent().is_some());
+ self.add_snippet(PlaceSnippet::Over(token.into()))
+ }
+
/// Adds a snippet to move the cursor selected over `nodes`
///
/// This allows for renaming newly generated items without having to go
diff --git a/crates/ide-db/src/symbol_index.rs b/crates/ide-db/src/symbol_index.rs
index 12085f9ebd..c70aed4c43 100644
--- a/crates/ide-db/src/symbol_index.rs
+++ b/crates/ide-db/src/symbol_index.rs
@@ -124,7 +124,7 @@ pub trait SymbolsDatabase: HirDatabase + SourceDatabaseExt + Upcast<dyn HirDatab
}
fn library_symbols(db: &dyn SymbolsDatabase, source_root_id: SourceRootId) -> Arc<SymbolIndex> {
- let _p = tracing::span!(tracing::Level::INFO, "library_symbols").entered();
+ let _p = tracing::info_span!("library_symbols").entered();
let mut symbol_collector = SymbolCollector::new(db.upcast());
@@ -142,14 +142,14 @@ fn library_symbols(db: &dyn SymbolsDatabase, source_root_id: SourceRootId) -> Ar
}
fn module_symbols(db: &dyn SymbolsDatabase, module: Module) -> Arc<SymbolIndex> {
- let _p = tracing::span!(tracing::Level::INFO, "module_symbols").entered();
+ let _p = tracing::info_span!("module_symbols").entered();
let symbols = SymbolCollector::collect_module(db.upcast(), module);
Arc::new(SymbolIndex::new(symbols))
}
pub fn crate_symbols(db: &dyn SymbolsDatabase, krate: Crate) -> Box<[Arc<SymbolIndex>]> {
- let _p = tracing::span!(tracing::Level::INFO, "crate_symbols").entered();
+ let _p = tracing::info_span!("crate_symbols").entered();
krate.modules(db.upcast()).into_iter().map(|module| db.module_symbols(module)).collect()
}
@@ -192,7 +192,8 @@ impl<DB> std::ops::Deref for Snap<DB> {
// Note that filtering does not currently work in VSCode due to the editor never
// sending the special symbols to the language server. Instead, you can configure
// the filtering via the `rust-analyzer.workspace.symbol.search.scope` and
-// `rust-analyzer.workspace.symbol.search.kind` settings.
+// `rust-analyzer.workspace.symbol.search.kind` settings. Symbols prefixed
+// with `__` are hidden from the search results unless configured otherwise.
//
// |===
// | Editor | Shortcut
@@ -200,7 +201,7 @@ impl<DB> std::ops::Deref for Snap<DB> {
// | VS Code | kbd:[Ctrl+T]
// |===
pub fn world_symbols(db: &RootDatabase, query: Query) -> Vec<FileSymbol> {
- let _p = tracing::span!(tracing::Level::INFO, "world_symbols", query = ?query.query).entered();
+ let _p = tracing::info_span!("world_symbols", query = ?query.query).entered();
let indices: Vec<_> = if query.libs {
db.library_roots()
@@ -320,7 +321,7 @@ impl Query {
indices: &'sym [Arc<SymbolIndex>],
cb: impl FnMut(&'sym FileSymbol),
) {
- let _p = tracing::span!(tracing::Level::INFO, "symbol_index::Query::search").entered();
+ let _p = tracing::info_span!("symbol_index::Query::search").entered();
let mut op = fst::map::OpBuilder::new();
match self.mode {
SearchMode::Exact => {
@@ -356,6 +357,7 @@ impl Query {
mut stream: fst::map::Union<'_>,
mut cb: impl FnMut(&'sym FileSymbol),
) {
+ let ignore_underscore_prefixed = !self.query.starts_with("__");
while let Some((_, indexed_values)) = stream.next() {
for &IndexedValue { index, value } in indexed_values {
let symbol_index = &indices[index];
@@ -374,6 +376,10 @@ impl Query {
if non_type_for_type_only_query || !self.matches_assoc_mode(symbol.is_assoc) {
continue;
}
+ // Hide symbols that start with `__` unless the query starts with `__`
+ if ignore_underscore_prefixed && symbol.name.starts_with("__") {
+ continue;
+ }
if self.mode.check(&self.query, self.case_sensitive, &symbol.name) {
cb(symbol);
}
diff --git a/crates/ide-diagnostics/src/handlers/json_is_not_rust.rs b/crates/ide-diagnostics/src/handlers/json_is_not_rust.rs
index 132b93df10..2b8779044f 100644
--- a/crates/ide-diagnostics/src/handlers/json_is_not_rust.rs
+++ b/crates/ide-diagnostics/src/handlers/json_is_not_rust.rs
@@ -37,7 +37,7 @@ impl State {
self.names.insert(name.clone(), 1);
1
};
- make::name(&format!("{}{}", name, count))
+ make::name(&format!("{name}{count}"))
}
fn serde_derive(&self) -> String {
diff --git a/crates/ide-diagnostics/src/handlers/macro_error.rs b/crates/ide-diagnostics/src/handlers/macro_error.rs
index 6a957ac1c9..f8780fc0da 100644
--- a/crates/ide-diagnostics/src/handlers/macro_error.rs
+++ b/crates/ide-diagnostics/src/handlers/macro_error.rs
@@ -11,7 +11,6 @@ pub(crate) fn macro_error(ctx: &DiagnosticsContext<'_>, d: &hir::MacroError) ->
d.message.clone(),
display_range,
)
- .experimental()
}
// Diagnostic: macro-error
@@ -26,7 +25,6 @@ pub(crate) fn macro_def_error(ctx: &DiagnosticsContext<'_>, d: &hir::MacroDefErr
d.message.clone(),
display_range,
)
- .experimental()
}
#[cfg(test)]
diff --git a/crates/ide-diagnostics/src/handlers/missing_unsafe.rs b/crates/ide-diagnostics/src/handlers/missing_unsafe.rs
index 1b29e0a374..30dd26a118 100644
--- a/crates/ide-diagnostics/src/handlers/missing_unsafe.rs
+++ b/crates/ide-diagnostics/src/handlers/missing_unsafe.rs
@@ -183,6 +183,20 @@ fn main() {
}
#[test]
+ fn no_missing_unsafe_diagnostic_with_deprecated_safe_2024() {
+ check_diagnostics(
+ r#"
+#[rustc_deprecated_safe_2024]
+fn set_var() {}
+
+fn main() {
+ set_var();
+}
+"#,
+ );
+ }
+
+ #[test]
fn add_unsafe_block_when_dereferencing_a_raw_pointer() {
check_fix(
r#"
diff --git a/crates/ide-diagnostics/src/handlers/trait_impl_redundant_assoc_item.rs b/crates/ide-diagnostics/src/handlers/trait_impl_redundant_assoc_item.rs
index 00710ef507..be1e6ed572 100644
--- a/crates/ide-diagnostics/src/handlers/trait_impl_redundant_assoc_item.rs
+++ b/crates/ide-diagnostics/src/handlers/trait_impl_redundant_assoc_item.rs
@@ -27,7 +27,7 @@ pub(crate) fn trait_impl_redundant_assoc_item(
hir::AssocItem::Function(id) => {
let function = id;
(
- format!("`fn {}`", redundant_assoc_item_name),
+ format!("`fn {redundant_assoc_item_name}`"),
function
.source(db)
.map(|it| it.syntax().value.text_range())
@@ -38,7 +38,7 @@ pub(crate) fn trait_impl_redundant_assoc_item(
hir::AssocItem::Const(id) => {
let constant = id;
(
- format!("`const {}`", redundant_assoc_item_name),
+ format!("`const {redundant_assoc_item_name}`"),
constant
.source(db)
.map(|it| it.syntax().value.text_range())
@@ -49,7 +49,7 @@ pub(crate) fn trait_impl_redundant_assoc_item(
hir::AssocItem::TypeAlias(id) => {
let type_alias = id;
(
- format!("`type {}`", redundant_assoc_item_name),
+ format!("`type {redundant_assoc_item_name}`"),
type_alias
.source(db)
.map(|it| it.syntax().value.text_range())
diff --git a/crates/ide-diagnostics/src/handlers/unlinked_file.rs b/crates/ide-diagnostics/src/handlers/unlinked_file.rs
index b9327f8556..cbf50d13f5 100644
--- a/crates/ide-diagnostics/src/handlers/unlinked_file.rs
+++ b/crates/ide-diagnostics/src/handlers/unlinked_file.rs
@@ -26,8 +26,6 @@ pub(crate) fn unlinked_file(
acc: &mut Vec<Diagnostic>,
file_id: FileId,
) {
- // Limit diagnostic to the first few characters in the file. This matches how VS Code
- // renders it with the full span, but on other editors, and is less invasive.
let fixes = fixes(ctx, file_id);
// FIXME: This is a hack for the vscode extension to notice whether there is an autofix or not before having to resolve diagnostics.
// This is to prevent project linking popups from appearing when there is an autofix. https://github.com/rust-lang/rust-analyzer/issues/14523
@@ -37,14 +35,27 @@ pub(crate) fn unlinked_file(
"file not included in module tree"
};
- let range = ctx.sema.db.parse(file_id).syntax_node().text_range();
- let range = FileLoader::file_text(ctx.sema.db, file_id)
- .char_indices()
- .take(3)
- .last()
- .map(|(i, _)| i)
- .map(|i| TextRange::up_to(i.try_into().unwrap()))
- .unwrap_or(range);
+ let mut range = ctx.sema.db.parse(file_id).syntax_node().text_range();
+ let mut unused = true;
+
+ if fixes.is_none() {
+ // If we don't have a fix, the unlinked-file diagnostic is not
+ // actionable. This generally means that rust-analyzer hasn't
+ // finished startup, or we couldn't find the Cargo.toml.
+ //
+ // Only show this diagnostic on the first three characters of
+ // the file, to avoid overwhelming the user during startup.
+ range = FileLoader::file_text(ctx.sema.db, file_id)
+ .char_indices()
+ .take(3)
+ .last()
+ .map(|(i, _)| i)
+ .map(|i| TextRange::up_to(i.try_into().unwrap()))
+ .unwrap_or(range);
+ // Prefer a diagnostic underline over graying out the text,
+ // since we're only highlighting a small region.
+ unused = false;
+ }
acc.push(
Diagnostic::new(
@@ -52,6 +63,7 @@ pub(crate) fn unlinked_file(
message,
FileRange { file_id, range },
)
+ .with_unused(unused)
.with_fixes(fixes),
);
}
diff --git a/crates/ide-diagnostics/src/handlers/unresolved_method.rs b/crates/ide-diagnostics/src/handlers/unresolved_method.rs
index 0614fdc551..42211cdbe5 100644
--- a/crates/ide-diagnostics/src/handlers/unresolved_method.rs
+++ b/crates/ide-diagnostics/src/handlers/unresolved_method.rs
@@ -161,12 +161,11 @@ fn assoc_func_fix(ctx: &DiagnosticsContext<'_>, d: &hir::UnresolvedMethodCall) -
// we could omit generic parameters cause compiler can deduce it automatically
if !need_to_take_receiver_as_first_arg && !generic_parameters.is_empty() {
let generic_parameters = generic_parameters.join(", ");
- receiver_type_adt_name =
- format!("{}::<{}>", receiver_type_adt_name, generic_parameters);
+ receiver_type_adt_name = format!("{receiver_type_adt_name}::<{generic_parameters}>");
}
let method_name = call.name_ref()?;
- let assoc_func_call = format!("{}::{}()", receiver_type_adt_name, method_name);
+ let assoc_func_call = format!("{receiver_type_adt_name}::{method_name}()");
let assoc_func_call = make::expr_path(make::path_from_text(&assoc_func_call));
@@ -184,8 +183,7 @@ fn assoc_func_fix(ctx: &DiagnosticsContext<'_>, d: &hir::UnresolvedMethodCall) -
Some(Assist {
id: AssistId("method_call_to_assoc_func_call_fix", AssistKind::QuickFix),
label: Label::new(format!(
- "Use associated func call instead: `{}`",
- assoc_func_call_expr_string
+ "Use associated func call instead: `{assoc_func_call_expr_string}`"
)),
group: None,
target: range,
diff --git a/crates/ide-diagnostics/src/lib.rs b/crates/ide-diagnostics/src/lib.rs
index 15543a5d65..a419f04bfa 100644
--- a/crates/ide-diagnostics/src/lib.rs
+++ b/crates/ide-diagnostics/src/lib.rs
@@ -86,7 +86,7 @@ use ide_db::{
label::Label,
source_change::SourceChange,
syntax_helpers::node_ext::parse_tt_as_comma_sep_paths,
- FxHashMap, FxHashSet, RootDatabase,
+ FxHashMap, FxHashSet, RootDatabase, SnippetCap,
};
use once_cell::sync::Lazy;
use stdx::never;
@@ -229,6 +229,7 @@ pub struct DiagnosticsConfig {
pub expr_fill_default: ExprFillDefaultMode,
pub style_lints: bool,
// FIXME: We may want to include a whole `AssistConfig` here
+ pub snippet_cap: Option<SnippetCap>,
pub insert_use: InsertUseConfig,
pub prefer_no_std: bool,
pub prefer_prelude: bool,
@@ -248,6 +249,7 @@ impl DiagnosticsConfig {
disabled: Default::default(),
expr_fill_default: Default::default(),
style_lints: true,
+ snippet_cap: SnippetCap::new(true),
insert_use: InsertUseConfig {
granularity: ImportGranularity::Preserve,
enforce_granularity: false,
@@ -297,7 +299,7 @@ pub fn diagnostics(
resolve: &AssistResolveStrategy,
file_id: FileId,
) -> Vec<Diagnostic> {
- let _p = tracing::span!(tracing::Level::INFO, "diagnostics").entered();
+ let _p = tracing::info_span!("diagnostics").entered();
let sema = Semantics::new(db);
let mut res = Vec::new();
@@ -482,7 +484,7 @@ fn handle_lint_attributes(
clippy_stack: &mut FxHashMap<String, Vec<Severity>>,
diagnostics_of_range: &mut FxHashMap<InFile<SyntaxNode>, &mut Diagnostic>,
) {
- let _g = tracing::span!(tracing::Level::INFO, "handle_lint_attributes").entered();
+ let _g = tracing::info_span!("handle_lint_attributes").entered();
let file_id = sema.hir_file_for(root);
let preorder = root.preorder();
for ev in preorder {
diff --git a/crates/ide-ssr/src/matching.rs b/crates/ide-ssr/src/matching.rs
index cf7e7e08bc..b29053c0c2 100644
--- a/crates/ide-ssr/src/matching.rs
+++ b/crates/ide-ssr/src/matching.rs
@@ -575,7 +575,7 @@ impl<'db, 'sema> Matcher<'db, 'sema> {
.resolve_method_call_as_callable(code)
.and_then(|callable| {
let (self_param, _) = callable.receiver_param(self.sema.db)?;
- Some(self_param.source(self.sema.db)?.value.kind())
+ Some(self.sema.source(self_param)?.value.kind())
})
.unwrap_or(ast::SelfParamKind::Owned);
}
diff --git a/crates/ide/src/call_hierarchy.rs b/crates/ide/src/call_hierarchy.rs
index 654a1cd316..f7e5b40dde 100644
--- a/crates/ide/src/call_hierarchy.rs
+++ b/crates/ide/src/call_hierarchy.rs
@@ -19,13 +19,6 @@ pub struct CallItem {
pub ranges: Vec<TextRange>,
}
-impl CallItem {
- #[cfg(test)]
- pub(crate) fn debug_render(&self) -> String {
- format!("{} : {:?}", self.target.debug_render(), self.ranges)
- }
-}
-
pub(crate) fn call_hierarchy(
db: &RootDatabase,
position: FilePosition,
@@ -159,6 +152,10 @@ mod tests {
expected_incoming: Expect,
expected_outgoing: Expect,
) {
+ fn debug_render(item: crate::CallItem) -> String {
+ format!("{} : {:?}", item.target.debug_render(), item.ranges)
+ }
+
let (analysis, pos) = fixture::position(ra_fixture);
let mut navs = analysis.call_hierarchy(pos).unwrap().unwrap().info;
@@ -169,12 +166,10 @@ mod tests {
let item_pos =
FilePosition { file_id: nav.file_id, offset: nav.focus_or_full_range().start() };
let incoming_calls = analysis.incoming_calls(item_pos).unwrap().unwrap();
- expected_incoming
- .assert_eq(&incoming_calls.into_iter().map(|call| call.debug_render()).join("\n"));
+ expected_incoming.assert_eq(&incoming_calls.into_iter().map(debug_render).join("\n"));
let outgoing_calls = analysis.outgoing_calls(item_pos).unwrap().unwrap();
- expected_outgoing
- .assert_eq(&outgoing_calls.into_iter().map(|call| call.debug_render()).join("\n"));
+ expected_outgoing.assert_eq(&outgoing_calls.into_iter().map(debug_render).join("\n"));
}
#[test]
diff --git a/crates/ide/src/expand_macro.rs b/crates/ide/src/expand_macro.rs
index 1ead045788..4b54c057bf 100644
--- a/crates/ide/src/expand_macro.rs
+++ b/crates/ide/src/expand_macro.rs
@@ -111,9 +111,10 @@ fn expand_macro_recur(
macro_call: &ast::Item,
) -> Option<SyntaxNode> {
let expanded = match macro_call {
- item @ ast::Item::MacroCall(macro_call) => {
- sema.expand_attr_macro(item).or_else(|| sema.expand(macro_call))?.clone_for_update()
- }
+ item @ ast::Item::MacroCall(macro_call) => sema
+ .expand_attr_macro(item)
+ .or_else(|| sema.expand_allowed_builtins(macro_call))?
+ .clone_for_update(),
item => sema.expand_attr_macro(item)?.clone_for_update(),
};
expand(sema, expanded)
@@ -229,6 +230,29 @@ mod tests {
}
#[test]
+ fn expand_allowed_builtin_macro() {
+ check(
+ r#"
+//- minicore: concat
+$0concat!("test", 10, 'b', true);"#,
+ expect![[r#"
+ concat!
+ "test10btrue""#]],
+ );
+ }
+
+ #[test]
+ fn do_not_expand_disallowed_macro() {
+ let (analysis, pos) = fixture::position(
+ r#"
+//- minicore: asm
+$0asm!("0x300, x0");"#,
+ );
+ let expansion = analysis.expand_macro(pos).unwrap();
+ assert!(expansion.is_none());
+ }
+
+ #[test]
fn macro_expand_as_keyword() {
check(
r#"
diff --git a/crates/ide/src/extend_selection.rs b/crates/ide/src/extend_selection.rs
index b706e959d3..e8d6dc9734 100644
--- a/crates/ide/src/extend_selection.rs
+++ b/crates/ide/src/extend_selection.rs
@@ -210,7 +210,13 @@ fn extend_single_word_in_comment_or_string(
let start_idx = before.rfind(non_word_char)? as u32;
let end_idx = after.find(non_word_char).unwrap_or(after.len()) as u32;
- let from: TextSize = (start_idx + 1).into();
+ // FIXME: use `ceil_char_boundary` from `std::str` when it gets stable
+ // https://github.com/rust-lang/rust/issues/93743
+ fn ceil_char_boundary(text: &str, index: u32) -> u32 {
+ (index..).find(|&index| text.is_char_boundary(index as usize)).unwrap_or(text.len() as u32)
+ }
+
+ let from: TextSize = ceil_char_boundary(text, start_idx + 1).into();
let to: TextSize = (cursor_position + end_idx).into();
let range = TextRange::new(from, to);
@@ -662,4 +668,18 @@ fn main() { let (
],
);
}
+
+ #[test]
+ fn extend_selection_inside_str_with_wide_char() {
+ // should not panic
+ do_check(
+ r#"fn main() { let x = "═$0═══════"; }"#,
+ &[
+ r#""════════""#,
+ r#"let x = "════════";"#,
+ r#"{ let x = "════════"; }"#,
+ r#"fn main() { let x = "════════"; }"#,
+ ],
+ );
+ }
}
diff --git a/crates/ide/src/goto_definition.rs b/crates/ide/src/goto_definition.rs
index 76b80fcefa..f57cb1cb73 100644
--- a/crates/ide/src/goto_definition.rs
+++ b/crates/ide/src/goto_definition.rs
@@ -2289,4 +2289,28 @@ macro_rules! baz {
"#,
);
}
+
+ #[test]
+ fn goto_shadowed_preludes_in_block_module() {
+ check(
+ r#"
+//- /main.rs crate:main edition:2021 deps:core
+pub struct S;
+ //^
+
+fn main() {
+ fn f() -> S$0 {
+ fn inner() {} // forces a block def map
+ return S;
+ }
+}
+//- /core.rs crate:core
+pub mod prelude {
+ pub mod rust_2021 {
+ pub enum S;
+ }
+}
+ "#,
+ );
+ }
}
diff --git a/crates/ide/src/highlight_related.rs b/crates/ide/src/highlight_related.rs
index 6f32ce76b2..a5689403ee 100644
--- a/crates/ide/src/highlight_related.rs
+++ b/crates/ide/src/highlight_related.rs
@@ -55,7 +55,7 @@ pub(crate) fn highlight_related(
config: HighlightRelatedConfig,
pos @ FilePosition { offset, file_id }: FilePosition,
) -> Option<Vec<HighlightedRange>> {
- let _p = tracing::span!(tracing::Level::INFO, "highlight_related").entered();
+ let _p = tracing::info_span!("highlight_related").entered();
let syntax = sema.parse(file_id).syntax().clone();
let token = pick_best_token(syntax.token_at_offset(offset), |kind| match kind {
diff --git a/crates/ide/src/hover.rs b/crates/ide/src/hover.rs
index bdb56081c3..2006baa30a 100644
--- a/crates/ide/src/hover.rs
+++ b/crates/ide/src/hover.rs
@@ -375,8 +375,8 @@ pub(crate) fn hover_for_definition(
HoverResult {
markup: render::process_markup(sema.db, def, &markup, config),
actions: [
- show_implementations_action(sema.db, def),
show_fn_references_action(sema.db, def),
+ show_implementations_action(sema.db, def),
runnable_action(sema, def, file_id),
goto_type_action_for_def(sema.db, def, &notable_traits),
]
diff --git a/crates/ide/src/hover/render.rs b/crates/ide/src/hover/render.rs
index 3bc17f95e7..99568c9922 100644
--- a/crates/ide/src/hover/render.rs
+++ b/crates/ide/src/hover/render.rs
@@ -430,6 +430,7 @@ pub(super) fn definition(
}
label
}
+ Definition::Function(fn_) => fn_.display_with_container_bounds(db, true).to_string(),
_ => def.label(db),
};
let docs = def.docs(db, famous_defs);
diff --git a/crates/ide/src/inlay_hints.rs b/crates/ide/src/inlay_hints.rs
index dda5a005a7..3f10bed511 100644
--- a/crates/ide/src/inlay_hints.rs
+++ b/crates/ide/src/inlay_hints.rs
@@ -455,6 +455,23 @@ fn ty_to_text_edit(
// * elided lifetimes
// * compiler inserted reborrows
//
+// Note: inlay hints for function argument names are heuristically omitted to reduce noise and will not appear if
+// any of the
+// link:https://github.com/rust-lang/rust-analyzer/blob/6b8b8ff4c56118ddee6c531cde06add1aad4a6af/crates/ide/src/inlay_hints/param_name.rs#L92-L99[following criteria]
+// are met:
+//
+// * the parameter name is a suffix of the function's name
+// * the argument is a qualified constructing or call expression where the qualifier is an ADT
+// * exact argument<->parameter match(ignoring leading underscore) or parameter is a prefix/suffix
+// of argument with _ splitting it off
+// * the parameter name starts with `ra_fixture`
+// * the parameter name is a
+// link:https://github.com/rust-lang/rust-analyzer/blob/6b8b8ff4c56118ddee6c531cde06add1aad4a6af/crates/ide/src/inlay_hints/param_name.rs#L200[well known name]
+// in a unary function
+// * the parameter name is a
+// link:https://github.com/rust-lang/rust-analyzer/blob/6b8b8ff4c56118ddee6c531cde06add1aad4a6af/crates/ide/src/inlay_hints/param_name.rs#L201[single character]
+// in a unary function
+//
// image::https://user-images.githubusercontent.com/48062697/113020660-b5f98b80-917a-11eb-8d70-3be3fd558cdd.png[]
pub(crate) fn inlay_hints(
db: &RootDatabase,
@@ -462,7 +479,7 @@ pub(crate) fn inlay_hints(
range_limit: Option<TextRange>,
config: &InlayHintsConfig,
) -> Vec<InlayHint> {
- let _p = tracing::span!(tracing::Level::INFO, "inlay_hints").entered();
+ let _p = tracing::info_span!("inlay_hints").entered();
let sema = Semantics::new(db);
let file = sema.parse(file_id);
let file = file.syntax();
@@ -496,7 +513,7 @@ pub(crate) fn inlay_hints_resolve(
config: &InlayHintsConfig,
hasher: impl Fn(&InlayHint) -> u64,
) -> Option<InlayHint> {
- let _p = tracing::span!(tracing::Level::INFO, "inlay_hints_resolve").entered();
+ let _p = tracing::info_span!("inlay_hints_resolve").entered();
let sema = Semantics::new(db);
let file = sema.parse(file_id);
let file = file.syntax();
diff --git a/crates/ide/src/inlay_hints/adjustment.rs b/crates/ide/src/inlay_hints/adjustment.rs
index 20128a286f..7932d8efbc 100644
--- a/crates/ide/src/inlay_hints/adjustment.rs
+++ b/crates/ide/src/inlay_hints/adjustment.rs
@@ -709,4 +709,25 @@ fn main() {
"#,
)
}
+
+ // regression test for a stackoverflow in hir display code
+ #[test]
+ fn adjustment_hints_method_call_on_impl_trait_self() {
+ check_with_config(
+ InlayHintsConfig { adjustment_hints: AdjustmentHints::Always, ..DISABLED_CONFIG },
+ r#"
+//- minicore: slice, coerce_unsized
+trait T<RHS = Self> {}
+
+fn hello(it: &&[impl T]) {
+ it.len();
+ //^^(
+ //^^&
+ //^^*
+ //^^*
+ //^^)
+}
+"#,
+ );
+ }
}
diff --git a/crates/ide/src/inlay_hints/param_name.rs b/crates/ide/src/inlay_hints/param_name.rs
index fb50c49a3a..9819d0e3fb 100644
--- a/crates/ide/src/inlay_hints/param_name.rs
+++ b/crates/ide/src/inlay_hints/param_name.rs
@@ -30,7 +30,7 @@ pub(super) fn hints(
.filter_map(|(p, arg)| {
// Only annotate hints for expressions that exist in the original file
let range = sema.original_range_opt(arg.syntax())?;
- let source = p.source(sema.db)?;
+ let source = sema.source(p)?;
let (param_name, name_syntax) = match source.value.as_ref() {
Either::Left(pat) => (pat.name()?, pat.name()),
Either::Right(param) => match param.pat()? {
@@ -38,8 +38,6 @@ pub(super) fn hints(
_ => return None,
},
};
- // make sure the file is cached so we can map out of macros
- sema.parse_or_expand(source.file_id);
Some((name_syntax, param_name, arg, range))
})
.filter(|(_, param_name, arg, _)| {
diff --git a/crates/ide/src/interpret_function.rs b/crates/ide/src/interpret_function.rs
index df444a3f4d..7bd2da9f88 100644
--- a/crates/ide/src/interpret_function.rs
+++ b/crates/ide/src/interpret_function.rs
@@ -43,7 +43,7 @@ fn find_and_interpret(db: &RootDatabase, position: FilePosition) -> Option<Strin
let path = path.as_deref().unwrap_or("<unknown file>");
match db.line_index(file_id).try_line_col(text_range.start()) {
Some(line_col) => format!("file://{path}#{}:{}", line_col.line + 1, line_col.col),
- None => format!("file://{path} range {:?}", text_range),
+ None => format!("file://{path} range {text_range:?}"),
}
};
Some(def.eval(db, span_formatter))
diff --git a/crates/ide/src/lib.rs b/crates/ide/src/lib.rs
index 431aa30e56..a2ac62341d 100644
--- a/crates/ide/src/lib.rs
+++ b/crates/ide/src/lib.rs
@@ -273,11 +273,18 @@ impl Analysis {
self.with_db(|db| status::status(db, file_id))
}
- pub fn source_root(&self, file_id: FileId) -> Cancellable<SourceRootId> {
+ pub fn source_root_id(&self, file_id: FileId) -> Cancellable<SourceRootId> {
self.with_db(|db| db.file_source_root(file_id))
}
- pub fn parallel_prime_caches<F>(&self, num_worker_threads: u8, cb: F) -> Cancellable<()>
+ pub fn is_local_source_root(&self, source_root_id: SourceRootId) -> Cancellable<bool> {
+ self.with_db(|db| {
+ let sr = db.source_root(source_root_id);
+ !sr.is_library
+ })
+ }
+
+ pub fn parallel_prime_caches<F>(&self, num_worker_threads: usize, cb: F) -> Cancellable<()>
where
F: Fn(ParallelPrimeCachesProgress) + Sync + std::panic::UnwindSafe,
{
diff --git a/crates/ide/src/navigation_target.rs b/crates/ide/src/navigation_target.rs
index fc836d5540..bfd62e7624 100644
--- a/crates/ide/src/navigation_target.rs
+++ b/crates/ide/src/navigation_target.rs
@@ -220,7 +220,7 @@ impl TryToNav for Definition {
fn try_to_nav(&self, db: &RootDatabase) -> Option<UpmappingResult<NavigationTarget>> {
match self {
Definition::Local(it) => Some(it.to_nav(db)),
- Definition::Label(it) => Some(it.to_nav(db)),
+ Definition::Label(it) => it.try_to_nav(db),
Definition::Module(it) => Some(it.to_nav(db)),
Definition::Macro(it) => it.try_to_nav(db),
Definition::Field(it) => it.try_to_nav(db),
@@ -562,12 +562,12 @@ impl ToNav for hir::Local {
}
}
-impl ToNav for hir::Label {
- fn to_nav(&self, db: &RootDatabase) -> UpmappingResult<NavigationTarget> {
- let InFile { file_id, value } = self.source(db);
+impl TryToNav for hir::Label {
+ fn try_to_nav(&self, db: &RootDatabase) -> Option<UpmappingResult<NavigationTarget>> {
+ let InFile { file_id, value } = self.source(db)?;
let name = self.name(db).to_smol_str();
- orig_range_with_focus(db, file_id, value.syntax(), value.lifetime()).map(
+ Some(orig_range_with_focus(db, file_id, value.syntax(), value.lifetime()).map(
|(FileRange { file_id, range: full_range }, focus_range)| NavigationTarget {
file_id,
name: name.clone(),
@@ -579,7 +579,7 @@ impl ToNav for hir::Label {
description: None,
docs: None,
},
- )
+ ))
}
}
@@ -926,4 +926,26 @@ struct Foo;
let navs = analysis.symbol_search(Query::new("foo".to_owned()), !0).unwrap();
assert_eq!(navs.len(), 2)
}
+
+ #[test]
+ fn test_ensure_hidden_symbols_are_not_returned() {
+ let (analysis, _) = fixture::file(
+ r#"
+fn foo() {}
+struct Foo;
+static __FOO_CALLSITE: () = ();
+"#,
+ );
+
+ // It doesn't show the hidden symbol
+ let navs = analysis.symbol_search(Query::new("foo".to_owned()), !0).unwrap();
+ assert_eq!(navs.len(), 2);
+ let navs = analysis.symbol_search(Query::new("_foo".to_owned()), !0).unwrap();
+ assert_eq!(navs.len(), 0);
+
+ // Unless we explicitly search for a `__` prefix
+ let query = Query::new("__foo".to_owned());
+ let navs = analysis.symbol_search(query, !0).unwrap();
+ assert_eq!(navs.len(), 1);
+ }
}
diff --git a/crates/ide/src/references.rs b/crates/ide/src/references.rs
index 01af864cdf..6f9e1b3740 100644
--- a/crates/ide/src/references.rs
+++ b/crates/ide/src/references.rs
@@ -55,7 +55,7 @@ pub(crate) fn find_all_refs(
position: FilePosition,
search_scope: Option<SearchScope>,
) -> Option<Vec<ReferenceSearchResult>> {
- let _p = tracing::span!(tracing::Level::INFO, "find_all_refs").entered();
+ let _p = tracing::info_span!("find_all_refs").entered();
let syntax = sema.parse(position.file_id).syntax().clone();
let make_searcher = |literal_search: bool| {
move |def: Definition| {
diff --git a/crates/ide/src/rename.rs b/crates/ide/src/rename.rs
index 8c2ae327c7..3d08e2f371 100644
--- a/crates/ide/src/rename.rs
+++ b/crates/ide/src/rename.rs
@@ -119,9 +119,9 @@ pub(crate) fn rename(
};
let mut source_change = SourceChange::default();
- source_change.extend(usages.iter().map(|(&file_id, refs)| {
- (file_id, source_edit_from_references(refs, def, new_name))
- }));
+ source_change.extend(usages.references.get_mut(&position.file_id).iter().map(
+ |refs| (position.file_id, source_edit_from_references(refs, def, new_name)),
+ ));
Ok(source_change)
})
@@ -361,8 +361,9 @@ fn rename_to_self(
bail!("Parameter type differs from impl block type");
}
- let InFile { file_id, value: param_source } =
- first_param.source(sema.db).ok_or_else(|| format_err!("No source for parameter found"))?;
+ let InFile { file_id, value: param_source } = sema
+ .source(first_param.clone())
+ .ok_or_else(|| format_err!("No source for parameter found"))?;
let def = Definition::Local(local);
let usages = def.usages(sema).all();
@@ -392,7 +393,7 @@ fn rename_self_to_param(
let identifier_kind = IdentifierKind::classify(new_name)?;
let InFile { file_id, value: self_param } =
- self_param.source(sema.db).ok_or_else(|| format_err!("cannot find function source"))?;
+ sema.source(self_param).ok_or_else(|| format_err!("cannot find function source"))?;
let def = Definition::Local(local);
let usages = def.usages(sema).all();
@@ -444,12 +445,8 @@ mod tests {
use super::{RangeInfo, RenameError};
- fn check(new_name: &str, ra_fixture_before: &str, ra_fixture_after: &str) {
- check_with_rename_config(new_name, ra_fixture_before, ra_fixture_after);
- }
-
#[track_caller]
- fn check_with_rename_config(new_name: &str, ra_fixture_before: &str, ra_fixture_after: &str) {
+ fn check(new_name: &str, ra_fixture_before: &str, ra_fixture_after: &str) {
let ra_fixture_after = &trim_indent(ra_fixture_after);
let (analysis, position) = fixture::position(ra_fixture_before);
if !ra_fixture_after.starts_with("error: ") {
@@ -466,7 +463,7 @@ mod tests {
let (&file_id, edit) = match source_change.source_file_edits.len() {
0 => return,
1 => source_change.source_file_edits.iter().next().unwrap(),
- _ => (&position.file_id, &source_change.source_file_edits[&position.file_id]),
+ _ => panic!(),
};
for indel in edit.0.iter() {
text_edit_builder.replace(indel.delete, indel.insert.clone());
@@ -2689,7 +2686,7 @@ use qux as frob;
#[test]
fn disallow_renaming_for_non_local_definition() {
- check_with_rename_config(
+ check(
"Baz",
r#"
//- /lib.rs crate:lib new_source_root:library
@@ -2704,7 +2701,7 @@ fn main() { let _: S$0; }
#[test]
fn disallow_renaming_for_builtin_macros() {
- check_with_rename_config(
+ check(
"Baz",
r#"
//- minicore: derive, hash
@@ -2762,14 +2759,19 @@ fn test() {
check(
"Baz",
r#"
+//- /main.rs crate:main
+mod module;
mod foo { pub struct Foo; }
mod bar { use super::Foo; }
use foo::Foo$0;
fn main() { let _: Foo; }
+//- /module.rs
+use crate::foo::Foo;
"#,
r#"
+mod module;
mod foo { pub struct Foo; }
mod bar { use super::Baz; }
@@ -2779,4 +2781,22 @@ fn main() { let _: Baz; }
"#,
)
}
+
+ #[test]
+ fn rename_path_inside_use_tree_foreign() {
+ check(
+ "Baz",
+ r#"
+//- /lib.rs crate:lib new_source_root:library
+pub struct S;
+//- /main.rs crate:main deps:lib new_source_root:local
+use lib::S$0;
+fn main() { let _: S; }
+"#,
+ r#"
+use lib::S as Baz;
+fn main() { let _: Baz; }
+"#,
+ );
+ }
}
diff --git a/crates/ide/src/signature_help.rs b/crates/ide/src/signature_help.rs
index 378a38892c..89c725a6c4 100644
--- a/crates/ide/src/signature_help.rs
+++ b/crates/ide/src/signature_help.rs
@@ -226,7 +226,7 @@ fn signature_help_for_call(
let mut buf = String::new();
for (idx, p) in callable.params().into_iter().enumerate() {
buf.clear();
- if let Some(param) = p.source(sema.db) {
+ if let Some(param) = sema.source(p.clone()) {
match param.value {
Either::Right(param) => match param.pat() {
Some(pat) => format_to!(buf, "{}: ", pat),
diff --git a/crates/ide/src/syntax_highlighting.rs b/crates/ide/src/syntax_highlighting.rs
index 6aaf8f8e77..fd8e6f4046 100644
--- a/crates/ide/src/syntax_highlighting.rs
+++ b/crates/ide/src/syntax_highlighting.rs
@@ -186,7 +186,7 @@ pub(crate) fn highlight(
file_id: FileId,
range_to_highlight: Option<TextRange>,
) -> Vec<HlRange> {
- let _p = tracing::span!(tracing::Level::INFO, "highlight").entered();
+ let _p = tracing::info_span!("highlight").entered();
let sema = Semantics::new(db);
// Determine the root based on the given range.
diff --git a/crates/ide/src/syntax_highlighting/escape.rs b/crates/ide/src/syntax_highlighting/escape.rs
index 2f387968c9..552ce9cd8c 100644
--- a/crates/ide/src/syntax_highlighting/escape.rs
+++ b/crates/ide/src/syntax_highlighting/escape.rs
@@ -28,7 +28,7 @@ pub(super) fn highlight_escape_string<T: IsString>(
pub(super) fn highlight_escape_char(stack: &mut Highlights, char: &Char, start: TextSize) {
if char.value().is_err() {
// We do not emit invalid escapes highlighting here. The lexer would likely be in a bad
- // state and this token contains junks, since `'` is not a reliable delimiter (consider
+ // state and this token contains junk, since `'` is not a reliable delimiter (consider
// lifetimes). Nonetheless, parser errors should already be emitted.
return;
}
diff --git a/crates/ide/src/syntax_highlighting/highlight.rs b/crates/ide/src/syntax_highlighting/highlight.rs
index 3b784ebec3..c73b6acb0d 100644
--- a/crates/ide/src/syntax_highlighting/highlight.rs
+++ b/crates/ide/src/syntax_highlighting/highlight.rs
@@ -584,6 +584,9 @@ fn highlight_method_call(
if func.is_async(sema.db) {
h |= HlMod::Async;
}
+ if func.is_const(sema.db) {
+ h |= HlMod::Const;
+ }
if func
.as_assoc_item(sema.db)
.and_then(|it| it.container_or_implemented_trait(sema.db))
diff --git a/crates/ide/src/syntax_highlighting/test_data/highlight_block_mod_items.html b/crates/ide/src/syntax_highlighting/test_data/highlight_block_mod_items.html
index eed3968a90..70f2d7203e 100644
--- a/crates/ide/src/syntax_highlighting/test_data/highlight_block_mod_items.html
+++ b/crates/ide/src/syntax_highlighting/test_data/highlight_block_mod_items.html
@@ -48,16 +48,15 @@ pre { color: #DCDCCC; background: #3F3F3F; font-size: 22px; padd
<pre><code><span class="keyword">macro_rules</span><span class="macro_bang">!</span> <span class="macro declaration">foo</span> <span class="brace">{</span>
<span class="parenthesis">(</span><span class="punctuation">$</span>foo<span class="colon">:</span>ident<span class="parenthesis">)</span> <span class="operator">=</span><span class="angle">&gt;</span> <span class="brace">{</span>
<span class="keyword">mod</span> y <span class="brace">{</span>
- <span class="keyword">struct</span> <span class="punctuation">$</span>foo<span class="semicolon">;</span>
+ <span class="keyword">pub</span> <span class="keyword">struct</span> <span class="punctuation">$</span>foo<span class="semicolon">;</span>
<span class="brace">}</span>
<span class="brace">}</span><span class="semicolon">;</span>
<span class="brace">}</span>
<span class="keyword">fn</span> <span class="function declaration">main</span><span class="parenthesis">(</span><span class="parenthesis">)</span> <span class="brace">{</span>
- <span class="macro">foo</span><span class="macro_bang">!</span><span class="parenthesis macro">(</span><span class="struct declaration macro">Foo</span><span class="parenthesis macro">)</span><span class="semicolon">;</span>
+ <span class="macro">foo</span><span class="macro_bang">!</span><span class="parenthesis macro">(</span><span class="struct declaration macro public">Foo</span><span class="parenthesis macro">)</span><span class="semicolon">;</span>
<span class="keyword">mod</span> <span class="module declaration">module</span> <span class="brace">{</span>
- <span class="comment">// FIXME: IDE layer has this unresolved</span>
- <span class="unresolved_reference">foo</span><span class="macro_bang">!</span><span class="parenthesis macro">(</span><span class="none macro">Bar</span><span class="parenthesis macro">)</span><span class="semicolon">;</span>
- <span class="keyword">fn</span> <span class="function declaration">func</span><span class="parenthesis">(</span><span class="parenthesis">)</span> <span class="brace">{</span>
+ <span class="macro">foo</span><span class="macro_bang">!</span><span class="parenthesis macro">(</span><span class="struct declaration macro public">Bar</span><span class="parenthesis macro">)</span><span class="semicolon">;</span>
+ <span class="keyword">fn</span> <span class="function declaration">func</span><span class="parenthesis">(</span><span class="punctuation">_</span><span class="colon">:</span> <span class="module">y</span><span class="operator">::</span><span class="struct public">Bar</span><span class="parenthesis">)</span> <span class="brace">{</span>
<span class="keyword">mod</span> <span class="module declaration">inner</span> <span class="brace">{</span>
<span class="keyword">struct</span> <span class="struct declaration">Innerest</span><span class="angle">&lt;</span><span class="keyword">const</span> <span class="const_param const declaration">C</span><span class="colon">:</span> <span class="unresolved_reference">usize</span><span class="angle">&gt;</span> <span class="brace">{</span> <span class="field declaration">field</span><span class="colon">:</span> <span class="bracket">[</span><span class="parenthesis">(</span><span class="parenthesis">)</span><span class="semicolon">;</span> <span class="brace">{</span><span class="const_param const">C</span><span class="brace">}</span><span class="bracket">]</span> <span class="brace">}</span>
<span class="brace">}</span>
diff --git a/crates/ide/src/syntax_highlighting/test_data/highlight_const.html b/crates/ide/src/syntax_highlighting/test_data/highlight_const.html
index cd6ffc2c3a..a14f2cc88c 100644
--- a/crates/ide/src/syntax_highlighting/test_data/highlight_const.html
+++ b/crates/ide/src/syntax_highlighting/test_data/highlight_const.html
@@ -66,10 +66,12 @@ pre { color: #DCDCCC; background: #3F3F3F; font-size: 22px; padd
<span class="operator macro">&</span><span class="keyword macro">raw</span> <span class="keyword macro">const</span> <span class="parenthesis macro">(</span><span class="parenthesis macro">)</span><span class="semicolon macro">;</span>
<span class="keyword macro">const</span>
<span class="parenthesis macro">)</span><span class="semicolon">;</span>
+ <span class="parenthesis">(</span><span class="parenthesis">)</span><span class="operator">.</span><span class="method const consuming trait">assoc_const_method</span><span class="parenthesis">(</span><span class="parenthesis">)</span><span class="semicolon">;</span>
<span class="brace">}</span>
<span class="keyword">trait</span> <span class="trait declaration">ConstTrait</span> <span class="brace">{</span>
<span class="keyword const">const</span> <span class="constant associated const declaration static trait">ASSOC_CONST</span><span class="colon">:</span> <span class="parenthesis">(</span><span class="parenthesis">)</span> <span class="operator">=</span> <span class="parenthesis">(</span><span class="parenthesis">)</span><span class="semicolon">;</span>
<span class="keyword const">const</span> <span class="keyword">fn</span> <span class="function associated const declaration static trait">assoc_const_fn</span><span class="parenthesis">(</span><span class="parenthesis">)</span> <span class="brace">{</span><span class="brace">}</span>
+ <span class="keyword const">const</span> <span class="keyword">fn</span> <span class="method associated const consuming declaration trait">assoc_const_method</span><span class="parenthesis">(</span><span class="self_keyword declaration">self</span><span class="parenthesis">)</span> <span class="brace">{</span><span class="brace">}</span>
<span class="brace">}</span>
<span class="keyword">impl</span> <span class="keyword const">const</span> <span class="trait">ConstTrait</span> <span class="keyword">for</span> <span class="parenthesis">(</span><span class="parenthesis">)</span> <span class="brace">{</span>
<span class="keyword const">const</span> <span class="constant associated const declaration static trait">ASSOC_CONST</span><span class="colon">:</span> <span class="parenthesis">(</span><span class="parenthesis">)</span> <span class="operator">=</span> <span class="parenthesis">(</span><span class="parenthesis">)</span><span class="semicolon">;</span>
diff --git a/crates/ide/src/syntax_highlighting/test_data/highlight_default_library.html b/crates/ide/src/syntax_highlighting/test_data/highlight_default_library.html
index 893e3c0675..dfad3a6605 100644
--- a/crates/ide/src/syntax_highlighting/test_data/highlight_default_library.html
+++ b/crates/ide/src/syntax_highlighting/test_data/highlight_default_library.html
@@ -49,5 +49,5 @@ pre { color: #DCDCCC; background: #3F3F3F; font-size: 22px; padd
<span class="keyword">fn</span> <span class="function declaration">main</span><span class="parenthesis">(</span><span class="parenthesis">)</span> <span class="brace">{</span>
<span class="keyword">let</span> <span class="variable declaration">foo</span> <span class="operator">=</span> <span class="enum_variant default_library library">Some</span><span class="parenthesis">(</span><span class="numeric_literal">92</span><span class="parenthesis">)</span><span class="semicolon">;</span>
- <span class="keyword">let</span> <span class="variable declaration">nums</span> <span class="operator">=</span> <span class="module default_library library">iter</span><span class="operator">::</span><span class="function default_library library">repeat</span><span class="parenthesis">(</span><span class="variable">foo</span><span class="operator">.</span><span class="method default_library library">unwrap</span><span class="parenthesis">(</span><span class="parenthesis">)</span><span class="parenthesis">)</span><span class="semicolon">;</span>
+ <span class="keyword">let</span> <span class="variable declaration">nums</span> <span class="operator">=</span> <span class="module default_library library">iter</span><span class="operator">::</span><span class="function default_library library">repeat</span><span class="parenthesis">(</span><span class="variable">foo</span><span class="operator">.</span><span class="method const default_library library">unwrap</span><span class="parenthesis">(</span><span class="parenthesis">)</span><span class="parenthesis">)</span><span class="semicolon">;</span>
<span class="brace">}</span></code></pre> \ No newline at end of file
diff --git a/crates/ide/src/syntax_highlighting/test_data/highlight_general.html b/crates/ide/src/syntax_highlighting/test_data/highlight_general.html
index 413edb6d65..9be7c92fc7 100644
--- a/crates/ide/src/syntax_highlighting/test_data/highlight_general.html
+++ b/crates/ide/src/syntax_highlighting/test_data/highlight_general.html
@@ -218,7 +218,7 @@ pre { color: #DCDCCC; background: #3F3F3F; font-size: 22px; padd
<span class="bool_literal">true</span>
<span class="brace">}</span>
<span class="brace">}</span>
-<span class="keyword const">const</span> <span class="constant const declaration">USAGE_OF_BOOL</span><span class="colon">:</span> <span class="builtin_type">bool</span> <span class="operator">=</span> <span class="enum public">Bool</span><span class="operator">::</span><span class="enum_variant public">True</span><span class="operator">.</span><span class="method consuming public">to_primitive</span><span class="parenthesis">(</span><span class="parenthesis">)</span><span class="semicolon">;</span>
+<span class="keyword const">const</span> <span class="constant const declaration">USAGE_OF_BOOL</span><span class="colon">:</span> <span class="builtin_type">bool</span> <span class="operator">=</span> <span class="enum public">Bool</span><span class="operator">::</span><span class="enum_variant public">True</span><span class="operator">.</span><span class="method const consuming public">to_primitive</span><span class="parenthesis">(</span><span class="parenthesis">)</span><span class="semicolon">;</span>
<span class="keyword">trait</span> <span class="trait declaration">Baz</span> <span class="brace">{</span>
<span class="keyword">type</span> <span class="type_alias associated declaration static trait">Qux</span><span class="semicolon">;</span>
diff --git a/crates/ide/src/syntax_highlighting/test_data/highlight_strings.html b/crates/ide/src/syntax_highlighting/test_data/highlight_strings.html
index 22706dea1f..cb47fc68bc 100644
--- a/crates/ide/src/syntax_highlighting/test_data/highlight_strings.html
+++ b/crates/ide/src/syntax_highlighting/test_data/highlight_strings.html
@@ -165,7 +165,7 @@ pre { color: #DCDCCC; background: #3F3F3F; font-size: 22px; padd
<span class="macro">toho</span><span class="macro_bang">!</span><span class="parenthesis macro">(</span><span class="string_literal macro">"{}fmt"</span><span class="comma macro">,</span> <span class="numeric_literal macro">0</span><span class="parenthesis macro">)</span><span class="semicolon">;</span>
<span class="keyword">let</span> <span class="variable declaration">i</span><span class="colon">:</span> <span class="builtin_type">u64</span> <span class="operator">=</span> <span class="numeric_literal">3</span><span class="semicolon">;</span>
<span class="keyword">let</span> <span class="variable declaration">o</span><span class="colon">:</span> <span class="builtin_type">u64</span><span class="semicolon">;</span>
- <span class="macro default_library library unsafe">asm</span><span class="macro_bang">!</span><span class="parenthesis macro">(</span>
+ <span class="macro default_library library">asm</span><span class="macro_bang">!</span><span class="parenthesis macro">(</span>
<span class="string_literal macro">"mov </span><span class="format_specifier">{</span><span class="numeric_literal">0</span><span class="format_specifier">}</span><span class="string_literal macro">, </span><span class="format_specifier">{</span><span class="numeric_literal">1</span><span class="format_specifier">}</span><span class="string_literal macro">"</span><span class="comma macro">,</span>
<span class="string_literal macro">"add </span><span class="format_specifier">{</span><span class="numeric_literal">0</span><span class="format_specifier">}</span><span class="string_literal macro">, 5"</span><span class="comma macro">,</span>
<span class="none macro">out</span><span class="parenthesis macro">(</span><span class="none macro">reg</span><span class="parenthesis macro">)</span> <span class="none macro">o</span><span class="comma macro">,</span>
diff --git a/crates/ide/src/syntax_highlighting/tests.rs b/crates/ide/src/syntax_highlighting/tests.rs
index 901e41d27c..08acfca2cb 100644
--- a/crates/ide/src/syntax_highlighting/tests.rs
+++ b/crates/ide/src/syntax_highlighting/tests.rs
@@ -657,10 +657,12 @@ const fn const_fn<const CONST_PARAM: ()>(const {}: const fn()) where (): const C
&raw const ();
const
);
+ ().assoc_const_method();
}
trait ConstTrait {
const ASSOC_CONST: () = ();
const fn assoc_const_fn() {}
+ const fn assoc_const_method(self) {}
}
impl const ConstTrait for () {
const ASSOC_CONST: () = ();
@@ -1070,16 +1072,15 @@ fn test_block_mod_items() {
macro_rules! foo {
($foo:ident) => {
mod y {
- struct $foo;
+ pub struct $foo;
}
};
}
fn main() {
foo!(Foo);
mod module {
- // FIXME: IDE layer has this unresolved
foo!(Bar);
- fn func() {
+ fn func(_: y::Bar) {
mod inner {
struct Innerest<const C: usize> { field: [(); {C}] }
}
diff --git a/crates/ide/src/view_memory_layout.rs b/crates/ide/src/view_memory_layout.rs
index a229bc87c8..826447d058 100644
--- a/crates/ide/src/view_memory_layout.rs
+++ b/crates/ide/src/view_memory_layout.rs
@@ -40,7 +40,7 @@ impl fmt::Display for RecursiveMemoryLayout {
"{}: {} (size: {}, align: {}, field offset: {})\n",
node.item_name, node.typename, node.size, node.alignment, node.offset
);
- write!(fmt, "{}", out)?;
+ write!(fmt, "{out}")?;
if node.children_start != -1 {
for j in nodes[idx].children_start
..(nodes[idx].children_start + nodes[idx].children_len as i64)
diff --git a/crates/load-cargo/src/lib.rs b/crates/load-cargo/src/lib.rs
index 76940ab57a..de68b86714 100644
--- a/crates/load-cargo/src/lib.rs
+++ b/crates/load-cargo/src/lib.rs
@@ -15,9 +15,9 @@ use ide_db::{
};
use itertools::Itertools;
use proc_macro_api::{MacroDylib, ProcMacroServer};
-use project_model::{CargoConfig, PackageRoot, ProjectManifest, ProjectWorkspace};
+use project_model::{CargoConfig, ManifestPath, PackageRoot, ProjectManifest, ProjectWorkspace};
use span::Span;
-use tracing::{instrument, Level};
+use tracing::instrument;
use vfs::{file_set::FileSetConfig, loader::Handle, AbsPath, AbsPathBuf, VfsPath};
pub struct LoadCargoConfig {
@@ -238,6 +238,19 @@ impl ProjectFolders {
fsc.add_file_set(file_set_roots)
}
+ // register the workspace manifest as well, note that this currently causes duplicates for
+ // non-virtual cargo workspaces! We ought to fix that
+ for manifest in workspaces.iter().filter_map(|ws| ws.manifest().map(ManifestPath::as_ref)) {
+ let file_set_roots: Vec<VfsPath> = vec![VfsPath::from(manifest.to_owned())];
+
+ let entry = vfs::loader::Entry::Files(vec![manifest.to_owned()]);
+
+ res.watch.push(res.load.len());
+ res.load.push(entry);
+ local_filesets.push(fsc.len() as u64);
+ fsc.add_file_set(file_set_roots)
+ }
+
let fsc = fsc.build();
res.source_root_config = SourceRootConfig { fsc, local_filesets };
@@ -272,24 +285,53 @@ impl SourceRootConfig {
/// If a `SourceRoot` doesn't have a parent and is local then it is not contained in this mapping but it can be asserted that it is a root `SourceRoot`.
pub fn source_root_parent_map(&self) -> FxHashMap<SourceRootId, SourceRootId> {
let roots = self.fsc.roots();
- let mut map = FxHashMap::<SourceRootId, SourceRootId>::default();
- roots
- .iter()
- .enumerate()
- .filter(|(_, (_, id))| self.local_filesets.contains(id))
- .filter_map(|(idx, (root, root_id))| {
- // We are interested in parents if they are also local source roots.
- // So instead of a non-local parent we may take a local ancestor as a parent to a node.
- roots.iter().take(idx).find_map(|(root2, root2_id)| {
- if self.local_filesets.contains(root2_id) && root.starts_with(root2) {
- return Some((root_id, root2_id));
+
+ let mut map = FxHashMap::default();
+
+ // See https://github.com/rust-lang/rust-analyzer/issues/17409
+ //
+ // We can view the connections between roots as a graph. The problem is
+ // that this graph may contain cycles, so when adding edges, it is necessary
+ // to check whether it will lead to a cycle.
+ //
+ // Since we ensure that each node has at most one outgoing edge (because
+ // each SourceRoot can have only one parent), we can use a disjoint-set to
+ // maintain the connectivity between nodes. If an edge’s two nodes belong
+ // to the same set, they are already connected.
+ let mut dsu = FxHashMap::default();
+ fn find_parent(dsu: &mut FxHashMap<u64, u64>, id: u64) -> u64 {
+ if let Some(&parent) = dsu.get(&id) {
+ let parent = find_parent(dsu, parent);
+ dsu.insert(id, parent);
+ parent
+ } else {
+ id
+ }
+ }
+
+ for (idx, (root, root_id)) in roots.iter().enumerate() {
+ if !self.local_filesets.contains(root_id)
+ || map.contains_key(&SourceRootId(*root_id as u32))
+ {
+ continue;
+ }
+
+ for (root2, root2_id) in roots[..idx].iter().rev() {
+ if self.local_filesets.contains(root2_id)
+ && root_id != root2_id
+ && root.starts_with(root2)
+ {
+ // check if the edge will create a cycle
+ if find_parent(&mut dsu, *root_id) != find_parent(&mut dsu, *root2_id) {
+ map.insert(SourceRootId(*root_id as u32), SourceRootId(*root2_id as u32));
+ dsu.insert(*root_id, *root2_id);
}
- None
- })
- })
- .for_each(|(child, parent)| {
- map.insert(SourceRootId(*child as u32), SourceRootId(*parent as u32));
- });
+
+ break;
+ }
+ }
+ }
+
map
}
}
@@ -352,8 +394,8 @@ fn load_crate_graph(
}
}
vfs::loader::Message::Loaded { files } | vfs::loader::Message::Changed { files } => {
- let _p = tracing::span!(Level::INFO, "load_cargo::load_crate_craph/LoadedChanged")
- .entered();
+ let _p =
+ tracing::info_span!("load_cargo::load_crate_craph/LoadedChanged").entered();
for (path, contents) in files {
vfs.set_file_contents(path.into(), contents);
}
@@ -560,4 +602,36 @@ mod tests {
assert_eq!(vc, vec![(SourceRootId(3), SourceRootId(1)),])
}
+
+ #[test]
+ fn parents_with_identical_root_id() {
+ let mut builder = FileSetConfigBuilder::default();
+ builder.add_file_set(vec![
+ VfsPath::new_virtual_path("/ROOT/def".to_owned()),
+ VfsPath::new_virtual_path("/ROOT/def/abc/def".to_owned()),
+ ]);
+ builder.add_file_set(vec![VfsPath::new_virtual_path("/ROOT/def/abc/def/ghi".to_owned())]);
+ let fsc = builder.build();
+ let src = SourceRootConfig { fsc, local_filesets: vec![0, 1] };
+ let mut vc = src.source_root_parent_map().into_iter().collect::<Vec<_>>();
+ vc.sort_by(|x, y| x.0 .0.cmp(&y.0 .0));
+
+ assert_eq!(vc, vec![(SourceRootId(1), SourceRootId(0)),])
+ }
+
+ #[test]
+ fn circular_reference() {
+ let mut builder = FileSetConfigBuilder::default();
+ builder.add_file_set(vec![
+ VfsPath::new_virtual_path("/ROOT/def".to_owned()),
+ VfsPath::new_virtual_path("/ROOT/def/abc/def".to_owned()),
+ ]);
+ builder.add_file_set(vec![VfsPath::new_virtual_path("/ROOT/def/abc".to_owned())]);
+ let fsc = builder.build();
+ let src = SourceRootConfig { fsc, local_filesets: vec![0, 1] };
+ let mut vc = src.source_root_parent_map().into_iter().collect::<Vec<_>>();
+ vc.sort_by(|x, y| x.0 .0.cmp(&y.0 .0));
+
+ assert_eq!(vc, vec![(SourceRootId(1), SourceRootId(0)),])
+ }
}
diff --git a/crates/mbe/src/benchmark.rs b/crates/mbe/src/benchmark.rs
index f73e188c79..19ba5c7a15 100644
--- a/crates/mbe/src/benchmark.rs
+++ b/crates/mbe/src/benchmark.rs
@@ -215,7 +215,7 @@ fn invocation_fixtures(
token_trees.push(subtree.into());
}
- Op::Ignore { .. } | Op::Index { .. } | Op::Count { .. } | Op::Length { .. } => {}
+ Op::Ignore { .. } | Op::Index { .. } | Op::Count { .. } | Op::Len { .. } => {}
};
// Simple linear congruential generator for deterministic result
diff --git a/crates/mbe/src/expander/matcher.rs b/crates/mbe/src/expander/matcher.rs
index 78d4bfee2a..0cec4e70da 100644
--- a/crates/mbe/src/expander/matcher.rs
+++ b/crates/mbe/src/expander/matcher.rs
@@ -566,7 +566,7 @@ fn match_loop_inner<'t>(
error_items.push(item);
}
OpDelimited::Op(
- Op::Ignore { .. } | Op::Index { .. } | Op::Count { .. } | Op::Length { .. },
+ Op::Ignore { .. } | Op::Index { .. } | Op::Count { .. } | Op::Len { .. },
) => {
stdx::never!("metavariable expression in lhs found");
}
@@ -832,7 +832,7 @@ fn collect_vars(collector_fun: &mut impl FnMut(SmolStr), pattern: &MetaTemplate)
Op::Subtree { tokens, .. } => collect_vars(collector_fun, tokens),
Op::Repeat { tokens, .. } => collect_vars(collector_fun, tokens),
Op::Literal(_) | Op::Ident(_) | Op::Punct(_) => {}
- Op::Ignore { .. } | Op::Index { .. } | Op::Count { .. } | Op::Length { .. } => {
+ Op::Ignore { .. } | Op::Index { .. } | Op::Count { .. } | Op::Len { .. } => {
stdx::never!("metavariable expression in lhs found");
}
}
diff --git a/crates/mbe/src/expander/transcriber.rs b/crates/mbe/src/expander/transcriber.rs
index 5e6e45f152..0f689a2692 100644
--- a/crates/mbe/src/expander/transcriber.rs
+++ b/crates/mbe/src/expander/transcriber.rs
@@ -242,7 +242,7 @@ fn expand_subtree(
.into(),
);
}
- Op::Length { depth } => {
+ Op::Len { depth } => {
let length = ctx.nesting.get(ctx.nesting.len() - 1 - depth).map_or(0, |_nest| {
// FIXME: to be implemented
0
diff --git a/crates/mbe/src/lib.rs b/crates/mbe/src/lib.rs
index 6920832dd2..ed3200964d 100644
--- a/crates/mbe/src/lib.rs
+++ b/crates/mbe/src/lib.rs
@@ -317,6 +317,12 @@ pub struct ValueResult<T, E> {
pub err: Option<E>,
}
+impl<T: Default, E> Default for ValueResult<T, E> {
+ fn default() -> Self {
+ Self { value: Default::default(), err: Default::default() }
+ }
+}
+
impl<T, E> ValueResult<T, E> {
pub fn new(value: T, err: E) -> Self {
Self { value, err: Some(err) }
diff --git a/crates/mbe/src/parser.rs b/crates/mbe/src/parser.rs
index eaf2fd8c27..bbe00f0afc 100644
--- a/crates/mbe/src/parser.rs
+++ b/crates/mbe/src/parser.rs
@@ -75,7 +75,7 @@ pub(crate) enum Op {
Index {
depth: usize,
},
- Length {
+ Len {
depth: usize,
},
Count {
@@ -345,7 +345,7 @@ fn parse_metavar_expr(new_meta_vars: bool, src: &mut TtIter<'_, Span>) -> Result
Op::Ignore { name: ident.text.clone(), id: ident.span }
}
"index" => Op::Index { depth: parse_depth(&mut args)? },
- "length" => Op::Length { depth: parse_depth(&mut args)? },
+ "len" => Op::Len { depth: parse_depth(&mut args)? },
"count" => {
if new_meta_vars {
args.expect_dollar()?;
diff --git a/crates/mbe/src/syntax_bridge.rs b/crates/mbe/src/syntax_bridge.rs
index 412e492176..c8ff8c35e9 100644
--- a/crates/mbe/src/syntax_bridge.rs
+++ b/crates/mbe/src/syntax_bridge.rs
@@ -47,7 +47,7 @@ pub(crate) mod dummy_test_span_utils {
pub const DUMMY: Span = Span {
range: TextRange::empty(TextSize::new(0)),
anchor: span::SpanAnchor {
- file_id: span::FileId::BOGUS,
+ file_id: span::FileId::from_raw(0xe4e4e),
ast_id: span::ROOT_ERASED_FILE_AST_ID,
},
ctx: SyntaxContextId::ROOT,
@@ -60,7 +60,7 @@ pub(crate) mod dummy_test_span_utils {
Span {
range,
anchor: span::SpanAnchor {
- file_id: span::FileId::BOGUS,
+ file_id: span::FileId::from_raw(0xe4e4e),
ast_id: span::ROOT_ERASED_FILE_AST_ID,
},
ctx: SyntaxContextId::ROOT,
diff --git a/crates/parser/src/grammar.rs b/crates/parser/src/grammar.rs
index 4e5837312f..2930190cb3 100644
--- a/crates/parser/src/grammar.rs
+++ b/crates/parser/src/grammar.rs
@@ -13,7 +13,7 @@
//! Code in this module also contains inline tests, which start with
//! `// test name-of-the-test` comment and look like this:
//!
-//! ```
+//! ```text
//! // test function_with_zero_parameters
//! // fn foo() {}
//! ```
@@ -418,7 +418,7 @@ fn delimited(
}
if !p.eat(delim) {
if p.at_ts(first_set) {
- p.error(format!("expected {:?}", delim));
+ p.error(format!("expected {delim:?}"));
} else {
break;
}
diff --git a/crates/parser/src/grammar/patterns.rs b/crates/parser/src/grammar/patterns.rs
index eff6b66404..882c243b0c 100644
--- a/crates/parser/src/grammar/patterns.rs
+++ b/crates/parser/src/grammar/patterns.rs
@@ -181,7 +181,7 @@ fn pattern_single_r(p: &mut Parser<'_>, recovery_set: TokenSet) {
// ^
if matches!(
p.current(),
- T![=] | T![,] | T![:] | T![')'] | T!['}'] | T![']'] | T![if]
+ T![=] | T![,] | T![:] | T![')'] | T!['}'] | T![']'] | T![if] | EOF
) {
// test half_open_range_pat
// fn f() {
diff --git a/crates/parser/src/lexed_str.rs b/crates/parser/src/lexed_str.rs
index e5fec67de7..52b24b7372 100644
--- a/crates/parser/src/lexed_str.rs
+++ b/crates/parser/src/lexed_str.rs
@@ -31,7 +31,7 @@ struct LexError {
impl<'a> LexedStr<'a> {
pub fn new(text: &'a str) -> LexedStr<'a> {
- let _p = tracing::span!(tracing::Level::INFO, "LexedStr::new").entered();
+ let _p = tracing::info_span!("LexedStr::new").entered();
let mut conv = Converter::new(text);
if let Some(shebang_len) = rustc_lexer::strip_shebang(text) {
conv.res.push(SHEBANG, conv.offset);
diff --git a/crates/parser/src/lib.rs b/crates/parser/src/lib.rs
index c7ad025f6b..c2f9ea4772 100644
--- a/crates/parser/src/lib.rs
+++ b/crates/parser/src/lib.rs
@@ -89,7 +89,7 @@ pub enum TopEntryPoint {
impl TopEntryPoint {
pub fn parse(&self, input: &Input, edition: Edition) -> Output {
- let _p = tracing::span!(tracing::Level::INFO, "TopEntryPoint::parse", ?self).entered();
+ let _p = tracing::info_span!("TopEntryPoint::parse", ?self).entered();
let entry_point: fn(&'_ mut parser::Parser<'_>) = match self {
TopEntryPoint::SourceFile => grammar::entry::top::source_file,
TopEntryPoint::MacroStmts => grammar::entry::top::macro_stmts,
diff --git a/crates/parser/src/shortcuts.rs b/crates/parser/src/shortcuts.rs
index cc2b63d1e6..7f49cc087a 100644
--- a/crates/parser/src/shortcuts.rs
+++ b/crates/parser/src/shortcuts.rs
@@ -26,7 +26,7 @@ pub enum StrStep<'a> {
impl LexedStr<'_> {
pub fn to_input(&self) -> crate::Input {
- let _p = tracing::span!(tracing::Level::INFO, "LexedStr::to_input").entered();
+ let _p = tracing::info_span!("LexedStr::to_input").entered();
let mut res = crate::Input::default();
let mut was_joint = false;
for i in 0..self.len() {
diff --git a/crates/paths/src/lib.rs b/crates/paths/src/lib.rs
index 2d3653401d..33c3f83db5 100644
--- a/crates/paths/src/lib.rs
+++ b/crates/paths/src/lib.rs
@@ -106,7 +106,7 @@ impl AbsPathBuf {
/// Panics if `path` is not absolute.
pub fn assert(path: Utf8PathBuf) -> AbsPathBuf {
AbsPathBuf::try_from(path)
- .unwrap_or_else(|path| panic!("expected absolute path, got {}", path))
+ .unwrap_or_else(|path| panic!("expected absolute path, got {path}"))
}
/// Wrap the given absolute path in `AbsPathBuf`
@@ -135,6 +135,24 @@ impl AbsPathBuf {
pub fn pop(&mut self) -> bool {
self.0.pop()
}
+
+ /// Equivalent of [`PathBuf::push`] for `AbsPathBuf`.
+ ///
+ /// Extends `self` with `path`.
+ ///
+ /// If `path` is absolute, it replaces the current path.
+ ///
+ /// On Windows:
+ ///
+ /// * if `path` has a root but no prefix (e.g., `\windows`), it
+ /// replaces everything except for the prefix (if any) of `self`.
+ /// * if `path` has a prefix but no root, it replaces `self`.
+ /// * if `self` has a verbatim prefix (e.g. `\\?\C:\windows`)
+ /// and `path` is not empty, the new path is normalized: all references
+ /// to `.` and `..` are removed.
+ pub fn push<P: AsRef<Utf8Path>>(&mut self, suffix: P) {
+ self.0.push(suffix)
+ }
}
impl fmt::Display for AbsPathBuf {
@@ -197,7 +215,7 @@ impl AbsPath {
///
/// Panics if `path` is not absolute.
pub fn assert(path: &Utf8Path) -> &AbsPath {
- assert!(path.is_absolute());
+ assert!(path.is_absolute(), "{path} is not absolute");
unsafe { &*(path as *const Utf8Path as *const AbsPath) }
}
diff --git a/crates/proc-macro-api/src/lib.rs b/crates/proc-macro-api/src/lib.rs
index 8749417290..8970bdd012 100644
--- a/crates/proc-macro-api/src/lib.rs
+++ b/crates/proc-macro-api/src/lib.rs
@@ -129,7 +129,7 @@ impl ProcMacroServer {
}
pub fn load_dylib(&self, dylib: MacroDylib) -> Result<Vec<ProcMacro>, ServerError> {
- let _p = tracing::span!(tracing::Level::INFO, "ProcMacroServer::load_dylib").entered();
+ let _p = tracing::info_span!("ProcMacroServer::load_dylib").entered();
let macros =
self.process.lock().unwrap_or_else(|e| e.into_inner()).find_proc_macros(&dylib.path)?;
diff --git a/crates/proc-macro-api/src/process.rs b/crates/proc-macro-api/src/process.rs
index dce086d429..718a96dc80 100644
--- a/crates/proc-macro-api/src/process.rs
+++ b/crates/proc-macro-api/src/process.rs
@@ -50,8 +50,7 @@ impl ProcMacroProcessSrv {
Ok(v) if v > CURRENT_API_VERSION => Err(io::Error::new(
io::ErrorKind::Other,
format!(
- "proc-macro server's api version ({}) is newer than rust-analyzer's ({})",
- v, CURRENT_API_VERSION
+ "proc-macro server's api version ({v}) is newer than rust-analyzer's ({CURRENT_API_VERSION})"
),
)),
Ok(v) => {
diff --git a/crates/proc-macro-api/src/version.rs b/crates/proc-macro-api/src/version.rs
index f768de3e31..09b8125071 100644
--- a/crates/proc-macro-api/src/version.rs
+++ b/crates/proc-macro-api/src/version.rs
@@ -93,6 +93,7 @@ fn read_section<'a>(dylib_binary: &'a [u8], section_name: &str) -> io::Result<&'
/// means bytes from here(including this sequence) are compressed in
/// snappy compression format. Version info is inside here, so decompress
/// this.
+///
/// The bytes you get after decompressing the snappy format portion has
/// following layout:
/// * [b'r',b'u',b's',b't',0,0,0,5] is the first 8 bytes(again)
@@ -102,6 +103,7 @@ fn read_section<'a>(dylib_binary: &'a [u8], section_name: &str) -> io::Result<&'
/// for the version string's utf8 bytes
/// * [version string bytes encoded in utf8] <- GET THIS BOI
/// * [some more bytes that we don't really care but about still there] :-)
+///
/// Check this issue for more about the bytes layout:
/// <https://github.com/rust-lang/rust-analyzer/issues/6174>
pub fn read_version(dylib_path: &AbsPath) -> io::Result<String> {
diff --git a/crates/project-model/src/lib.rs b/crates/project-model/src/lib.rs
index 181c07f46b..35643dcc02 100644
--- a/crates/project-model/src/lib.rs
+++ b/crates/project-model/src/lib.rs
@@ -22,7 +22,7 @@ mod cargo_workspace;
mod cfg;
mod env;
mod manifest_path;
-mod project_json;
+pub mod project_json;
mod rustc_cfg;
mod sysroot;
pub mod target_data_layout;
diff --git a/crates/project-model/src/project_json.rs b/crates/project-model/src/project_json.rs
index 5bee446f61..4a916e570b 100644
--- a/crates/project-model/src/project_json.rs
+++ b/crates/project-model/src/project_json.rs
@@ -33,7 +33,7 @@
//!
//! * file on disk
//! * a field in the config (ie, you can send a JSON request with the contents
-//! of rust-project.json to rust-analyzer, no need to write anything to disk)
+//! of `rust-project.json` to rust-analyzer, no need to write anything to disk)
//!
//! Another possible thing we don't do today, but which would be totally valid,
//! is to add an extension point to VS Code extension to register custom
@@ -55,8 +55,7 @@ use rustc_hash::FxHashMap;
use serde::{de, Deserialize, Serialize};
use span::Edition;
-use crate::cfg::CfgFlag;
-use crate::ManifestPath;
+use crate::{cfg::CfgFlag, ManifestPath, TargetKind};
/// Roots and crates that compose this Rust project.
#[derive(Clone, Debug, Eq, PartialEq)]
@@ -68,6 +67,10 @@ pub struct ProjectJson {
project_root: AbsPathBuf,
manifest: Option<ManifestPath>,
crates: Vec<Crate>,
+ /// Configuration for CLI commands.
+ ///
+ /// Examples include a check build or a test run.
+ runnables: Vec<Runnable>,
}
/// A crate points to the root module of a crate and lists the dependencies of the crate. This is
@@ -88,6 +91,86 @@ pub struct Crate {
pub(crate) exclude: Vec<AbsPathBuf>,
pub(crate) is_proc_macro: bool,
pub(crate) repository: Option<String>,
+ pub build: Option<Build>,
+}
+
+/// Additional, build-specific data about a crate.
+#[derive(Clone, Debug, Eq, PartialEq)]
+pub struct Build {
+ /// The name associated with this crate.
+ ///
+ /// This is determined by the build system that produced
+ /// the `rust-project.json` in question. For instance, if buck were used,
+ /// the label might be something like `//ide/rust/rust-analyzer:rust-analyzer`.
+ ///
+ /// Do not attempt to parse the contents of this string; it is a build system-specific
+ /// identifier similar to [`Crate::display_name`].
+ pub label: String,
+ /// Path corresponding to the build system-specific file defining the crate.
+ ///
+ /// It is roughly analogous to [`ManifestPath`], but it should *not* be used with
+ /// [`crate::ProjectManifest::from_manifest_file`], as the build file may not be
+ /// be in the `rust-project.json`.
+ pub build_file: Utf8PathBuf,
+ /// The kind of target.
+ ///
+ /// Examples (non-exhaustively) include [`TargetKind::Bin`], [`TargetKind::Lib`],
+ /// and [`TargetKind::Test`]. This information is used to determine what sort
+ /// of runnable codelens to provide, if any.
+ pub target_kind: TargetKind,
+}
+
+/// A template-like structure for describing runnables.
+///
+/// These are used for running and debugging binaries and tests without encoding
+/// build system-specific knowledge into rust-analyzer.
+///
+/// # Example
+///
+/// Below is an example of a test runnable. `{label}` and `{test_id}`
+/// are explained in [`Runnable::args`]'s documentation.
+///
+/// ```json
+/// {
+/// "program": "buck",
+/// "args": [
+/// "test",
+/// "{label}",
+/// "--",
+/// "{test_id}",
+/// "--print-passing-details"
+/// ],
+/// "cwd": "/home/user/repo-root/",
+/// "kind": "testOne"
+/// }
+/// ```
+#[derive(Debug, Clone, PartialEq, Eq)]
+pub struct Runnable {
+ /// The program invoked by the runnable.
+ ///
+ /// For example, this might be `cargo`, `buck`, or `bazel`.
+ pub program: String,
+ /// The arguments passed to [`Runnable::program`].
+ ///
+ /// The args can contain two template strings: `{label}` and `{test_id}`.
+ /// rust-analyzer will find and replace `{label}` with [`Build::label`] and
+ /// `{test_id}` with the test name.
+ pub args: Vec<String>,
+ /// The current working directory of the runnable.
+ pub cwd: Utf8PathBuf,
+ pub kind: RunnableKind,
+}
+
+/// The kind of runnable.
+#[derive(Debug, Clone, PartialEq, Eq)]
+pub enum RunnableKind {
+ Check,
+
+ /// Can run a binary.
+ Run,
+
+ /// Run a single test.
+ TestOne,
}
impl ProjectJson {
@@ -95,6 +178,7 @@ impl ProjectJson {
///
/// # Arguments
///
+ /// * `manifest` - The path to the `rust-project.json`.
/// * `base` - The path to the workspace root (i.e. the folder containing `rust-project.json`)
/// * `data` - The parsed contents of `rust-project.json`, or project json that's passed via
/// configuration.
@@ -109,6 +193,7 @@ impl ProjectJson {
sysroot_src: data.sysroot_src.map(absolutize_on_base),
project_root: base.to_path_buf(),
manifest,
+ runnables: data.runnables.into_iter().map(Runnable::from).collect(),
crates: data
.crates
.into_iter()
@@ -127,6 +212,15 @@ impl ProjectJson {
None => (vec![root_module.parent().unwrap().to_path_buf()], Vec::new()),
};
+ let build = match crate_data.build {
+ Some(build) => Some(Build {
+ label: build.label,
+ build_file: build.build_file,
+ target_kind: build.target_kind.into(),
+ }),
+ None => None,
+ };
+
Crate {
display_name: crate_data
.display_name
@@ -146,6 +240,7 @@ impl ProjectJson {
exclude,
is_proc_macro: crate_data.is_proc_macro,
repository: crate_data.repository,
+ build,
}
})
.collect(),
@@ -167,10 +262,27 @@ impl ProjectJson {
&self.project_root
}
+ pub fn crate_by_root(&self, root: &AbsPath) -> Option<Crate> {
+ self.crates
+ .iter()
+ .filter(|krate| krate.is_workspace_member)
+ .find(|krate| krate.root_module == root)
+ .cloned()
+ }
+
+ /// Returns the path to the project's manifest, if it exists.
+ pub fn manifest(&self) -> Option<&ManifestPath> {
+ self.manifest.as_ref()
+ }
+
/// Returns the path to the project's manifest or root folder, if no manifest exists.
pub fn manifest_or_root(&self) -> &AbsPath {
self.manifest.as_ref().map_or(&self.project_root, |manifest| manifest.as_ref())
}
+
+ pub fn runnables(&self) -> &[Runnable] {
+ &self.runnables
+ }
}
#[derive(Serialize, Deserialize, Debug, Clone)]
@@ -178,6 +290,8 @@ pub struct ProjectJsonData {
sysroot: Option<Utf8PathBuf>,
sysroot_src: Option<Utf8PathBuf>,
crates: Vec<CrateData>,
+ #[serde(default)]
+ runnables: Vec<RunnableData>,
}
#[derive(Serialize, Deserialize, Debug, Clone)]
@@ -200,6 +314,8 @@ struct CrateData {
is_proc_macro: bool,
#[serde(default)]
repository: Option<String>,
+ #[serde(default)]
+ build: Option<BuildData>,
}
#[derive(Serialize, Deserialize, Debug, Clone)]
@@ -215,6 +331,48 @@ enum EditionData {
Edition2024,
}
+#[derive(Debug, Clone, Serialize, Deserialize)]
+pub struct BuildData {
+ label: String,
+ build_file: Utf8PathBuf,
+ target_kind: TargetKindData,
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)]
+pub struct RunnableData {
+ pub program: String,
+ pub args: Vec<String>,
+ pub cwd: Utf8PathBuf,
+ pub kind: RunnableKindData,
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Deserialize, Serialize)]
+#[serde(rename_all = "camelCase")]
+pub enum RunnableKindData {
+ Check,
+ Run,
+ TestOne,
+}
+
+#[derive(Debug, Clone, Copy, PartialEq, Eq, Deserialize, Serialize)]
+#[serde(rename_all = "camelCase")]
+pub enum TargetKindData {
+ Bin,
+ /// Any kind of Cargo lib crate-type (dylib, rlib, proc-macro, ...).
+ Lib,
+ Test,
+}
+
+impl From<TargetKindData> for TargetKind {
+ fn from(data: TargetKindData) -> Self {
+ match data {
+ TargetKindData::Bin => TargetKind::Bin,
+ TargetKindData::Lib => TargetKind::Lib { is_proc_macro: false },
+ TargetKindData::Test => TargetKind::Test,
+ }
+ }
+}
+
impl From<EditionData> for Edition {
fn from(data: EditionData) -> Self {
match data {
@@ -226,6 +384,22 @@ impl From<EditionData> for Edition {
}
}
+impl From<RunnableData> for Runnable {
+ fn from(data: RunnableData) -> Self {
+ Runnable { program: data.program, args: data.args, cwd: data.cwd, kind: data.kind.into() }
+ }
+}
+
+impl From<RunnableKindData> for RunnableKind {
+ fn from(data: RunnableKindData) -> Self {
+ match data {
+ RunnableKindData::Check => RunnableKind::Check,
+ RunnableKindData::Run => RunnableKind::Run,
+ RunnableKindData::TestOne => RunnableKind::TestOne,
+ }
+ }
+}
+
/// Identifies a crate by position in the crates array.
///
/// This will differ from `CrateId` when multiple `ProjectJson`
diff --git a/crates/project-model/src/rustc_cfg.rs b/crates/project-model/src/rustc_cfg.rs
index 26499308ce..599897f84a 100644
--- a/crates/project-model/src/rustc_cfg.rs
+++ b/crates/project-model/src/rustc_cfg.rs
@@ -21,7 +21,7 @@ pub(crate) fn get(
extra_env: &FxHashMap<String, String>,
config: RustcCfgConfig<'_>,
) -> Vec<CfgFlag> {
- let _p = tracing::span!(tracing::Level::INFO, "rustc_cfg::get").entered();
+ let _p = tracing::info_span!("rustc_cfg::get").entered();
let mut res = Vec::with_capacity(6 * 2 + 1);
// Some nightly-only cfgs, which are required for stdlib
diff --git a/crates/project-model/src/sysroot.rs b/crates/project-model/src/sysroot.rs
index 653e7157bc..1eeec4cede 100644
--- a/crates/project-model/src/sysroot.rs
+++ b/crates/project-model/src/sysroot.rs
@@ -219,8 +219,7 @@ impl Sysroot {
", try running `rustup component add rust-src` to possibly fix this"
};
sysroot.error = Some(format!(
- "sysroot at `{}` is missing a `core` library{var_note}",
- src_root,
+ "sysroot at `{src_root}` is missing a `core` library{var_note}",
));
}
}
diff --git a/crates/project-model/src/tests.rs b/crates/project-model/src/tests.rs
index a6730863d6..2762de5997 100644
--- a/crates/project-model/src/tests.rs
+++ b/crates/project-model/src/tests.rs
@@ -126,7 +126,7 @@ fn replace_fake_sys_root(s: &mut String) {
let fake_sysroot_path = get_test_path("fake-sysroot");
let fake_sysroot_path = if cfg!(windows) {
let normalized_path = fake_sysroot_path.as_str().replace('\\', r#"\\"#);
- format!(r#"{}\\"#, normalized_path)
+ format!(r#"{normalized_path}\\"#)
} else {
format!("{}/", fake_sysroot_path.as_str())
};
diff --git a/crates/project-model/src/workspace.rs b/crates/project-model/src/workspace.rs
index 0d2174073a..17e40e74de 100644
--- a/crates/project-model/src/workspace.rs
+++ b/crates/project-model/src/workspace.rs
@@ -76,7 +76,7 @@ pub enum ProjectWorkspaceKind {
/// Environment variables set in the `.cargo/config` file.
cargo_config_extra_env: FxHashMap<String, String>,
},
- /// Project workspace was manually specified using a `rust-project.json` file.
+ /// Project workspace was specified using a `rust-project.json` file.
Json(ProjectJson),
// FIXME: The primary limitation of this approach is that the set of detached files needs to be fixed at the beginning.
// That's not the end user experience we should strive for.
@@ -527,6 +527,16 @@ impl ProjectWorkspace {
}
}
+ pub fn manifest(&self) -> Option<&ManifestPath> {
+ match &self.kind {
+ ProjectWorkspaceKind::Cargo { cargo, .. } => Some(cargo.manifest_path()),
+ ProjectWorkspaceKind::Json(project) => project.manifest(),
+ ProjectWorkspaceKind::DetachedFile { cargo, .. } => {
+ Some(cargo.as_ref()?.0.manifest_path())
+ }
+ }
+ }
+
pub fn find_sysroot_proc_macro_srv(&self) -> anyhow::Result<AbsPathBuf> {
self.sysroot.discover_proc_macro_srv()
}
@@ -705,7 +715,7 @@ impl ProjectWorkspace {
load: FileLoader<'_>,
extra_env: &FxHashMap<String, String>,
) -> (CrateGraph, ProcMacroPaths) {
- let _p = tracing::span!(tracing::Level::INFO, "ProjectWorkspace::to_crate_graph").entered();
+ let _p = tracing::info_span!("ProjectWorkspace::to_crate_graph").entered();
let Self { kind, sysroot, cfg_overrides, rustc_cfg, .. } = self;
let ((mut crate_graph, proc_macros), sysroot) = match kind {
@@ -949,7 +959,7 @@ fn cargo_to_crate_graph(
override_cfg: &CfgOverrides,
build_scripts: &WorkspaceBuildScripts,
) -> (CrateGraph, ProcMacroPaths) {
- let _p = tracing::span!(tracing::Level::INFO, "cargo_to_crate_graph").entered();
+ let _p = tracing::info_span!("cargo_to_crate_graph").entered();
let mut res = (CrateGraph::default(), ProcMacroPaths::default());
let crate_graph = &mut res.0;
let proc_macros = &mut res.1;
@@ -1134,7 +1144,7 @@ fn detached_file_to_crate_graph(
sysroot: &Sysroot,
override_cfg: &CfgOverrides,
) -> (CrateGraph, ProcMacroPaths) {
- let _p = tracing::span!(tracing::Level::INFO, "detached_file_to_crate_graph").entered();
+ let _p = tracing::info_span!("detached_file_to_crate_graph").entered();
let mut crate_graph = CrateGraph::default();
let (public_deps, _libproc_macro) =
sysroot_to_crate_graph(&mut crate_graph, sysroot, rustc_cfg.clone(), load);
@@ -1365,7 +1375,7 @@ fn sysroot_to_crate_graph(
rustc_cfg: Vec<CfgFlag>,
load: FileLoader<'_>,
) -> (SysrootPublicDeps, Option<CrateId>) {
- let _p = tracing::span!(tracing::Level::INFO, "sysroot_to_crate_graph").entered();
+ let _p = tracing::info_span!("sysroot_to_crate_graph").entered();
match sysroot.mode() {
SysrootMode::Workspace(cargo) => {
let (mut cg, mut pm) = cargo_to_crate_graph(
diff --git a/crates/rust-analyzer/Cargo.toml b/crates/rust-analyzer/Cargo.toml
index 34b3e49314..8ff7235b8f 100644
--- a/crates/rust-analyzer/Cargo.toml
+++ b/crates/rust-analyzer/Cargo.toml
@@ -22,6 +22,7 @@ path = "src/bin/main.rs"
[dependencies]
anyhow.workspace = true
crossbeam-channel = "0.5.5"
+dirs = "5.0.1"
dissimilar.workspace = true
itertools.workspace = true
scip = "0.3.3"
diff --git a/crates/rust-analyzer/src/bin/main.rs b/crates/rust-analyzer/src/bin/main.rs
index 9daae914d7..774784f37b 100644
--- a/crates/rust-analyzer/src/bin/main.rs
+++ b/crates/rust-analyzer/src/bin/main.rs
@@ -15,7 +15,11 @@ use std::{env, fs, path::PathBuf, process::ExitCode, sync::Arc};
use anyhow::Context;
use lsp_server::Connection;
-use rust_analyzer::{cli::flags, config::Config, from_json};
+use rust_analyzer::{
+ cli::flags,
+ config::{Config, ConfigChange, ConfigErrors},
+ from_json,
+};
use semver::Version;
use tracing_subscriber::fmt::writer::BoxMakeWriter;
use vfs::AbsPathBuf;
@@ -220,16 +224,22 @@ fn run_server() -> anyhow::Result<()> {
.filter(|workspaces| !workspaces.is_empty())
.unwrap_or_else(|| vec![root_path.clone()]);
let mut config =
- Config::new(root_path, capabilities, workspace_roots, visual_studio_code_version);
+ Config::new(root_path, capabilities, workspace_roots, visual_studio_code_version, None);
if let Some(json) = initialization_options {
- if let Err(e) = config.update(json) {
+ let mut change = ConfigChange::default();
+ change.change_client_config(json);
+
+ let error_sink: ConfigErrors;
+ (config, error_sink, _) = config.apply_change(change);
+
+ if !error_sink.is_empty() {
use lsp_types::{
notification::{Notification, ShowMessage},
MessageType, ShowMessageParams,
};
let not = lsp_server::Notification::new(
ShowMessage::METHOD.to_owned(),
- ShowMessageParams { typ: MessageType::WARNING, message: e.to_string() },
+ ShowMessageParams { typ: MessageType::WARNING, message: error_sink.to_string() },
);
connection.sender.send(lsp_server::Message::Notification(not)).unwrap();
}
diff --git a/crates/rust-analyzer/src/caps.rs b/crates/rust-analyzer/src/caps.rs
index a1469c22ab..a207be3cac 100644
--- a/crates/rust-analyzer/src/caps.rs
+++ b/crates/rust-analyzer/src/caps.rs
@@ -223,7 +223,7 @@ fn code_action_capabilities(client_caps: &ClientCapabilities) -> CodeActionProvi
fn more_trigger_character(config: &Config) -> Vec<String> {
let mut res = vec![".".to_owned(), ">".to_owned(), "{".to_owned(), "(".to_owned()];
- if config.snippet_cap() {
+ if config.snippet_cap().is_some() {
res.push("<".to_owned());
}
res
diff --git a/crates/rust-analyzer/src/cli/analysis_stats.rs b/crates/rust-analyzer/src/cli/analysis_stats.rs
index bded41932c..90b81d0a80 100644
--- a/crates/rust-analyzer/src/cli/analysis_stats.rs
+++ b/crates/rust-analyzer/src/cli/analysis_stats.rs
@@ -25,7 +25,7 @@ use ide_db::{
salsa::{self, debug::DebugQueryTable, ParallelDatabase},
SourceDatabase, SourceDatabaseExt,
},
- LineIndexDatabase,
+ LineIndexDatabase, SnippetCap,
};
use itertools::Itertools;
use load_cargo::{load_workspace, LoadCargoConfig, ProcMacroServerChoice};
@@ -479,7 +479,7 @@ impl flags::AnalysisStats {
.or_insert(1);
} else {
acc.syntax_errors += 1;
- bar.println(format!("Syntax error: \n{}", err));
+ bar.println(format!("Syntax error: \n{err}"));
}
}
}
@@ -982,6 +982,7 @@ impl flags::AnalysisStats {
disable_experimental: false,
disabled: Default::default(),
expr_fill_default: Default::default(),
+ snippet_cap: SnippetCap::new(true),
insert_use: ide_db::imports::insert_use::InsertUseConfig {
granularity: ide_db::imports::insert_use::ImportGranularity::Crate,
enforce_granularity: true,
diff --git a/crates/rust-analyzer/src/cli/parse.rs b/crates/rust-analyzer/src/cli/parse.rs
index ed048aa635..85ec95409a 100644
--- a/crates/rust-analyzer/src/cli/parse.rs
+++ b/crates/rust-analyzer/src/cli/parse.rs
@@ -6,7 +6,7 @@ use crate::cli::{flags, read_stdin};
impl flags::Parse {
pub fn run(self) -> anyhow::Result<()> {
- let _p = tracing::span!(tracing::Level::INFO, "flags::Parse::run").entered();
+ let _p = tracing::info_span!("flags::Parse::run").entered();
let text = read_stdin()?;
let file = SourceFile::parse(&text, Edition::CURRENT).tree();
if !self.no_dump {
diff --git a/crates/rust-analyzer/src/cli/run_tests.rs b/crates/rust-analyzer/src/cli/run_tests.rs
index a2d0dcc599..10cb2d5ad6 100644
--- a/crates/rust-analyzer/src/cli/run_tests.rs
+++ b/crates/rust-analyzer/src/cli/run_tests.rs
@@ -49,7 +49,7 @@ impl flags::RunTests {
let mut sw_all = StopWatch::start();
for test in tests {
let full_name = full_name_of_item(db, test.module(db), test.name(db));
- println!("test {}", full_name);
+ println!("test {full_name}");
if test.is_ignore(db) {
println!("ignored");
ignore_count += 1;
@@ -62,7 +62,7 @@ impl flags::RunTests {
} else {
fail_count += 1;
}
- println!("{}", result);
+ println!("{result}");
eprintln!("{:<20} {}", format!("test {}", full_name), sw_one.elapsed());
}
println!("{pass_count} passed, {fail_count} failed, {ignore_count} ignored");
diff --git a/crates/rust-analyzer/src/cli/rustc_tests.rs b/crates/rust-analyzer/src/cli/rustc_tests.rs
index e9a4db7a2b..31565878d8 100644
--- a/crates/rust-analyzer/src/cli/rustc_tests.rs
+++ b/crates/rust-analyzer/src/cli/rustc_tests.rs
@@ -220,8 +220,8 @@ impl Tester {
self.pass_count += 1;
} else {
println!("{p:?} FAIL");
- println!("actual (r-a) = {:?}", actual);
- println!("expected (rustc) = {:?}", expected);
+ println!("actual (r-a) = {actual:?}");
+ println!("expected (rustc) = {expected:?}");
self.fail_count += 1;
}
}
diff --git a/crates/rust-analyzer/src/cli/scip.rs b/crates/rust-analyzer/src/cli/scip.rs
index aef2c1be22..8f60b17b59 100644
--- a/crates/rust-analyzer/src/cli/scip.rs
+++ b/crates/rust-analyzer/src/cli/scip.rs
@@ -10,9 +10,11 @@ use ide_db::LineIndexDatabase;
use load_cargo::{load_workspace_at, LoadCargoConfig, ProcMacroServerChoice};
use rustc_hash::{FxHashMap, FxHashSet};
use scip::types as scip_types;
+use tracing::error;
use crate::{
cli::flags,
+ config::ConfigChange,
line_index::{LineEndings, LineIndex, PositionEncoding},
};
@@ -35,12 +37,20 @@ impl flags::Scip {
lsp_types::ClientCapabilities::default(),
vec![],
None,
+ None,
);
if let Some(p) = self.config_path {
let mut file = std::io::BufReader::new(std::fs::File::open(p)?);
let json = serde_json::from_reader(&mut file)?;
- config.update(json)?;
+ let mut change = ConfigChange::default();
+ change.change_client_config(json);
+
+ let error_sink;
+ (config, error_sink, _) = config.apply_change(change);
+
+ // FIXME @alibektas : What happens to errors without logging?
+ error!(?error_sink, "Config Error(s)");
}
let cargo_config = config.cargo();
let (db, vfs, _) = load_workspace_at(
diff --git a/crates/rust-analyzer/src/config.rs b/crates/rust-analyzer/src/config.rs
index a8d1e72aed..e8504979be 100644
--- a/crates/rust-analyzer/src/config.rs
+++ b/crates/rust-analyzer/src/config.rs
@@ -1,14 +1,12 @@
//! Config used by the language server.
//!
-//! We currently get this config from `initialize` LSP request, which is not the
-//! best way to do it, but was the simplest thing we could implement.
-//!
//! Of particular interest is the `feature_flags` hash map: while other fields
//! configure the server itself, feature flags are passed into analysis, and
//! tweak things like automatic insertion of `()` in completions.
-use std::{fmt, iter, ops::Not};
+use std::{fmt, iter, ops::Not, sync::OnceLock};
use cfg::{CfgAtom, CfgDiff};
+use dirs::config_dir;
use flycheck::{CargoOptions, FlycheckConfig};
use ide::{
AssistConfig, CallableSnippets, CompletionConfig, DiagnosticsConfig, ExprFillDefaultMode,
@@ -29,9 +27,13 @@ use project_model::{
};
use rustc_hash::{FxHashMap, FxHashSet};
use semver::Version;
-use serde::{de::DeserializeOwned, Deserialize, Serialize};
+use serde::{
+ de::{DeserializeOwned, Error},
+ Deserialize, Serialize,
+};
use stdx::format_to_acc;
-use vfs::{AbsPath, AbsPathBuf};
+use triomphe::Arc;
+use vfs::{AbsPath, AbsPathBuf, VfsPath};
use crate::{
caps::completion_item_edit_resolve,
@@ -60,6 +62,7 @@ mod patch_old_style;
// parsing the old name.
config_data! {
/// Configs that apply on a workspace-wide scope. There are 3 levels on which a global configuration can be configured
+ // FIXME: 1. and 3. should be split, some configs do not make sense per project
///
/// 1. `rust-analyzer.toml` file under user's config directory (e.g ~/.config/rust-analyzer.toml)
/// 2. Client's own configurations (e.g `settings.json` on VS Code)
@@ -67,16 +70,10 @@ config_data! {
///
/// A config is searched for by traversing a "config tree" in a bottom up fashion. It is chosen by the nearest first principle.
global: struct GlobalDefaultConfigData <- GlobalConfigInput -> {
- /// Whether to insert #[must_use] when generating `as_` methods
- /// for enum variants.
- assist_emitMustUse: bool = false,
- /// Placeholder expression to use for missing expressions in assists.
- assist_expressionFillDefault: ExprFillDefaultDef = ExprFillDefaultDef::Todo,
-
/// Warm up caches on project load.
cachePriming_enable: bool = true,
/// How many worker threads to handle priming caches. The default `0` means to pick automatically.
- cachePriming_numThreads: ParallelCachePrimingNumThreads = 0u8,
+ cachePriming_numThreads: NumThreads = NumThreads::Physical,
/// Pass `--all-targets` to cargo invocation.
cargo_allTargets: bool = true,
@@ -272,87 +269,12 @@ config_data! {
/// The warnings will be indicated by a blue squiggly underline in code
/// and a blue icon in the `Problems Panel`.
diagnostics_warningsAsInfo: Vec<String> = vec![],
+
/// These directories will be ignored by rust-analyzer. They are
/// relative to the workspace root, and globs are not supported. You may
/// also need to add the folders to Code's `files.watcherExclude`.
files_excludeDirs: Vec<Utf8PathBuf> = vec![],
- /// Controls file watching implementation.
- files_watcher: FilesWatcherDef = FilesWatcherDef::Client,
- /// Whether to show `Debug` action. Only applies when
- /// `#rust-analyzer.hover.actions.enable#` is set.
- hover_actions_debug_enable: bool = true,
- /// Whether to show HoverActions in Rust files.
- hover_actions_enable: bool = true,
- /// Whether to show `Go to Type Definition` action. Only applies when
- /// `#rust-analyzer.hover.actions.enable#` is set.
- hover_actions_gotoTypeDef_enable: bool = true,
- /// Whether to show `Implementations` action. Only applies when
- /// `#rust-analyzer.hover.actions.enable#` is set.
- hover_actions_implementations_enable: bool = true,
- /// Whether to show `References` action. Only applies when
- /// `#rust-analyzer.hover.actions.enable#` is set.
- hover_actions_references_enable: bool = false,
- /// Whether to show `Run` action. Only applies when
- /// `#rust-analyzer.hover.actions.enable#` is set.
- hover_actions_run_enable: bool = true,
-
- /// Whether to show documentation on hover.
- hover_documentation_enable: bool = true,
- /// Whether to show keyword hover popups. Only applies when
- /// `#rust-analyzer.hover.documentation.enable#` is set.
- hover_documentation_keywords_enable: bool = true,
- /// Use markdown syntax for links on hover.
- hover_links_enable: bool = true,
- /// How to render the align information in a memory layout hover.
- hover_memoryLayout_alignment: Option<MemoryLayoutHoverRenderKindDef> = Some(MemoryLayoutHoverRenderKindDef::Hexadecimal),
- /// Whether to show memory layout data on hover.
- hover_memoryLayout_enable: bool = true,
- /// How to render the niche information in a memory layout hover.
- hover_memoryLayout_niches: Option<bool> = Some(false),
- /// How to render the offset information in a memory layout hover.
- hover_memoryLayout_offset: Option<MemoryLayoutHoverRenderKindDef> = Some(MemoryLayoutHoverRenderKindDef::Hexadecimal),
- /// How to render the size information in a memory layout hover.
- hover_memoryLayout_size: Option<MemoryLayoutHoverRenderKindDef> = Some(MemoryLayoutHoverRenderKindDef::Both),
-
- /// How many variants of an enum to display when hovering on. Show none if empty.
- hover_show_enumVariants: Option<usize> = Some(5),
- /// How many fields of a struct, variant or union to display when hovering on. Show none if empty.
- hover_show_fields: Option<usize> = Some(5),
- /// How many associated items of a trait to display when hovering a trait.
- hover_show_traitAssocItems: Option<usize> = None,
-
- /// Enables the experimental support for interpreting tests.
- interpret_tests: bool = false,
-
- /// Whether to show `Debug` lens. Only applies when
- /// `#rust-analyzer.lens.enable#` is set.
- lens_debug_enable: bool = true,
- /// Whether to show CodeLens in Rust files.
- lens_enable: bool = true,
- /// Internal config: use custom client-side commands even when the
- /// client doesn't set the corresponding capability.
- lens_forceCustomCommands: bool = true,
- /// Whether to show `Implementations` lens. Only applies when
- /// `#rust-analyzer.lens.enable#` is set.
- lens_implementations_enable: bool = true,
- /// Where to render annotations.
- lens_location: AnnotationLocation = AnnotationLocation::AboveName,
- /// Whether to show `References` lens for Struct, Enum, and Union.
- /// Only applies when `#rust-analyzer.lens.enable#` is set.
- lens_references_adt_enable: bool = false,
- /// Whether to show `References` lens for Enum Variants.
- /// Only applies when `#rust-analyzer.lens.enable#` is set.
- lens_references_enumVariant_enable: bool = false,
- /// Whether to show `Method References` lens. Only applies when
- /// `#rust-analyzer.lens.enable#` is set.
- lens_references_method_enable: bool = false,
- /// Whether to show `References` lens for Trait.
- /// Only applies when `#rust-analyzer.lens.enable#` is set.
- lens_references_trait_enable: bool = false,
- /// Whether to show `Run` lens. Only applies when
- /// `#rust-analyzer.lens.enable#` is set.
- lens_run_enable: bool = true,
/// Disable project auto-discovery in favor of explicitly specified set
/// of projects.
@@ -367,31 +289,10 @@ config_data! {
/// Sets the LRU capacity of the specified queries.
lru_query_capacities: FxHashMap<Box<str>, usize> = FxHashMap::default(),
- /// Whether to show `can't find Cargo.toml` error message.
- notifications_cargoTomlNotFound: bool = true,
-
- /// Whether to send an UnindexedProject notification to the client.
- notifications_unindexedProject: bool = false,
-
- /// How many worker threads in the main loop. The default `null` means to pick automatically.
- numThreads: Option<usize> = None,
-
- /// Expand attribute macros. Requires `#rust-analyzer.procMacro.enable#` to be set.
- procMacro_attributes_enable: bool = true,
- /// Enable support for procedural macros, implies `#rust-analyzer.cargo.buildScripts.enable#`.
- procMacro_enable: bool = true,
/// These proc-macros will be ignored when trying to expand them.
///
/// This config takes a map of crate names with the exported proc-macro names to ignore as values.
procMacro_ignored: FxHashMap<Box<str>, Box<[Box<str>]>> = FxHashMap::default(),
- /// Internal config, path to proc-macro server executable.
- procMacro_server: Option<Utf8PathBuf> = None,
-
- /// Exclude imports from find-all-references.
- references_excludeImports: bool = false,
-
- /// Exclude tests from find-all-references.
- references_excludeTests: bool = false,
/// Command to be executed instead of 'cargo' for runnables.
runnables_command: Option<String> = None,
@@ -429,34 +330,41 @@ config_data! {
/// `textDocument/rangeFormatting` request. The rustfmt option is unstable and only
/// available on a nightly build.
rustfmt_rangeFormatting_enable: bool = false,
-
-
- /// Show full signature of the callable. Only shows parameters if disabled.
- signatureInfo_detail: SignatureDetail = SignatureDetail::Full,
- /// Show documentation.
- signatureInfo_documentation_enable: bool = true,
-
- /// Whether to insert closing angle brackets when typing an opening angle bracket of a generic argument list.
- typing_autoClosingAngleBrackets_enable: bool = false,
-
- /// Workspace symbol search kind.
- workspace_symbol_search_kind: WorkspaceSymbolSearchKindDef = WorkspaceSymbolSearchKindDef::OnlyTypes,
- /// Limits the number of items returned from a workspace symbol search (Defaults to 128).
- /// Some clients like vs-code issue new searches on result filtering and don't require all results to be returned in the initial search.
- /// Other clients requires all results upfront and might require a higher limit.
- workspace_symbol_search_limit: usize = 128,
- /// Workspace symbol search scope.
- workspace_symbol_search_scope: WorkspaceSymbolSearchScopeDef = WorkspaceSymbolSearchScopeDef::Workspace,
}
}
config_data! {
- /// Local configurations can be overridden for every crate by placing a `rust-analyzer.toml` on crate root.
- /// A config is searched for by traversing a "config tree" in a bottom up fashion. It is chosen by the nearest first principle.
+ /// Local configurations can be defined per `SourceRoot`. This almost always corresponds to a `Crate`.
local: struct LocalDefaultConfigData <- LocalConfigInput -> {
+ /// Whether to insert #[must_use] when generating `as_` methods
+ /// for enum variants.
+ assist_emitMustUse: bool = false,
+ /// Placeholder expression to use for missing expressions in assists.
+ assist_expressionFillDefault: ExprFillDefaultDef = ExprFillDefaultDef::Todo,
/// Term search fuel in "units of work" for assists (Defaults to 400).
assist_termSearch_fuel: usize = 400,
+ /// Whether to enforce the import granularity setting for all files. If set to false rust-analyzer will try to keep import styles consistent per file.
+ imports_granularity_enforce: bool = false,
+ /// How imports should be grouped into use statements.
+ imports_granularity_group: ImportGranularityDef = ImportGranularityDef::Crate,
+ /// Group inserted imports by the https://rust-analyzer.github.io/manual.html#auto-import[following order]. Groups are separated by newlines.
+ imports_group_enable: bool = true,
+ /// Whether to allow import insertion to merge new imports into single path glob imports like `use std::fmt::*;`.
+ imports_merge_glob: bool = true,
+ /// Prefer to unconditionally use imports of the core and alloc crate, over the std crate.
+ imports_preferNoStd | imports_prefer_no_std: bool = false,
+ /// Whether to prefer import paths containing a `prelude` module.
+ imports_preferPrelude: bool = false,
+ /// The path structure for newly inserted paths to use.
+ imports_prefix: ImportPrefixDef = ImportPrefixDef::Plain,
+ }
+}
+
+config_data! {
+ /// Configs that only make sense when they are set by a client. As such they can only be defined
+ /// by setting them using client's settings (e.g `settings.json` on VS Code).
+ client: struct ClientDefaultConfigData <- ClientConfigInput -> {
/// Toggles the additional completions that automatically add imports when completed.
/// Note that your client must specify the `additionalTextEdits` LSP client capability to truly have this feature enabled.
completion_autoimport_enable: bool = true,
@@ -521,6 +429,9 @@ config_data! {
/// Term search fuel in "units of work" for autocompletion (Defaults to 200).
completion_termSearch_fuel: usize = 200,
+ /// Controls file watching implementation.
+ files_watcher: FilesWatcherDef = FilesWatcherDef::Client,
+
/// Enables highlighting of related references while the cursor is on `break`, `loop`, `while`, or `for` keywords.
highlightRelated_breakPoints_enable: bool = true,
/// Enables highlighting of all captures of a closure while the cursor is on the `|` or move keyword of a closure.
@@ -532,21 +443,48 @@ config_data! {
/// Enables highlighting of all break points for a loop or block context while the cursor is on any `async` or `await` keywords.
highlightRelated_yieldPoints_enable: bool = true,
- /// Whether to enforce the import granularity setting for all files. If set to false rust-analyzer will try to keep import styles consistent per file.
- imports_granularity_enforce: bool = false,
- /// How imports should be grouped into use statements.
- imports_granularity_group: ImportGranularityDef = ImportGranularityDef::Crate,
- /// Group inserted imports by the https://rust-analyzer.github.io/manual.html#auto-import[following order]. Groups are separated by newlines.
- imports_group_enable: bool = true,
- /// Whether to allow import insertion to merge new imports into single path glob imports like `use std::fmt::*;`.
- imports_merge_glob: bool = true,
- /// Prefer to unconditionally use imports of the core and alloc crate, over the std crate.
- imports_preferNoStd | imports_prefer_no_std: bool = false,
- /// Whether to prefer import paths containing a `prelude` module.
- imports_preferPrelude: bool = false,
- /// The path structure for newly inserted paths to use.
- imports_prefix: ImportPrefixDef = ImportPrefixDef::Plain,
+ /// Whether to show `Debug` action. Only applies when
+ /// `#rust-analyzer.hover.actions.enable#` is set.
+ hover_actions_debug_enable: bool = true,
+ /// Whether to show HoverActions in Rust files.
+ hover_actions_enable: bool = true,
+ /// Whether to show `Go to Type Definition` action. Only applies when
+ /// `#rust-analyzer.hover.actions.enable#` is set.
+ hover_actions_gotoTypeDef_enable: bool = true,
+ /// Whether to show `Implementations` action. Only applies when
+ /// `#rust-analyzer.hover.actions.enable#` is set.
+ hover_actions_implementations_enable: bool = true,
+ /// Whether to show `References` action. Only applies when
+ /// `#rust-analyzer.hover.actions.enable#` is set.
+ hover_actions_references_enable: bool = false,
+ /// Whether to show `Run` action. Only applies when
+ /// `#rust-analyzer.hover.actions.enable#` is set.
+ hover_actions_run_enable: bool = true,
+ /// Whether to show documentation on hover.
+ hover_documentation_enable: bool = true,
+ /// Whether to show keyword hover popups. Only applies when
+ /// `#rust-analyzer.hover.documentation.enable#` is set.
+ hover_documentation_keywords_enable: bool = true,
+ /// Use markdown syntax for links on hover.
+ hover_links_enable: bool = true,
+ /// How to render the align information in a memory layout hover.
+ hover_memoryLayout_alignment: Option<MemoryLayoutHoverRenderKindDef> = Some(MemoryLayoutHoverRenderKindDef::Hexadecimal),
+ /// Whether to show memory layout data on hover.
+ hover_memoryLayout_enable: bool = true,
+ /// How to render the niche information in a memory layout hover.
+ hover_memoryLayout_niches: Option<bool> = Some(false),
+ /// How to render the offset information in a memory layout hover.
+ hover_memoryLayout_offset: Option<MemoryLayoutHoverRenderKindDef> = Some(MemoryLayoutHoverRenderKindDef::Hexadecimal),
+ /// How to render the size information in a memory layout hover.
+ hover_memoryLayout_size: Option<MemoryLayoutHoverRenderKindDef> = Some(MemoryLayoutHoverRenderKindDef::Both),
+
+ /// How many variants of an enum to display when hovering on. Show none if empty.
+ hover_show_enumVariants: Option<usize> = Some(5),
+ /// How many fields of a struct, variant or union to display when hovering on. Show none if empty.
+ hover_show_fields: Option<usize> = Some(5),
+ /// How many associated items of a trait to display when hovering a trait.
+ hover_show_traitAssocItems: Option<usize> = None,
/// Whether to show inlay type hints for binding modes.
inlayHints_bindingModeHints_enable: bool = false,
@@ -597,6 +535,8 @@ config_data! {
/// Whether to hide inlay type hints for constructors.
inlayHints_typeHints_hideNamedConstructor: bool = false,
+ /// Enables the experimental support for interpreting tests.
+ interpret_tests: bool = false,
/// Join lines merges consecutive declaration and initialization of an assignment.
joinLines_joinAssignments: bool = true,
@@ -607,6 +547,57 @@ config_data! {
/// Join lines unwraps trivial blocks.
joinLines_unwrapTrivialBlock: bool = true,
+ /// Whether to show `Debug` lens. Only applies when
+ /// `#rust-analyzer.lens.enable#` is set.
+ lens_debug_enable: bool = true,
+ /// Whether to show CodeLens in Rust files.
+ lens_enable: bool = true,
+ /// Internal config: use custom client-side commands even when the
+ /// client doesn't set the corresponding capability.
+ lens_forceCustomCommands: bool = true,
+ /// Whether to show `Implementations` lens. Only applies when
+ /// `#rust-analyzer.lens.enable#` is set.
+ lens_implementations_enable: bool = true,
+ /// Where to render annotations.
+ lens_location: AnnotationLocation = AnnotationLocation::AboveName,
+ /// Whether to show `References` lens for Struct, Enum, and Union.
+ /// Only applies when `#rust-analyzer.lens.enable#` is set.
+ lens_references_adt_enable: bool = false,
+ /// Whether to show `References` lens for Enum Variants.
+ /// Only applies when `#rust-analyzer.lens.enable#` is set.
+ lens_references_enumVariant_enable: bool = false,
+ /// Whether to show `Method References` lens. Only applies when
+ /// `#rust-analyzer.lens.enable#` is set.
+ lens_references_method_enable: bool = false,
+ /// Whether to show `References` lens for Trait.
+ /// Only applies when `#rust-analyzer.lens.enable#` is set.
+ lens_references_trait_enable: bool = false,
+ /// Whether to show `Run` lens. Only applies when
+ /// `#rust-analyzer.lens.enable#` is set.
+ lens_run_enable: bool = true,
+
+ /// Whether to show `can't find Cargo.toml` error message.
+ notifications_cargoTomlNotFound: bool = true,
+
+ /// Whether to send an UnindexedProject notification to the client.
+ notifications_unindexedProject: bool = false,
+
+ /// How many worker threads in the main loop. The default `null` means to pick automatically.
+ numThreads: Option<NumThreads> = None,
+
+ /// Expand attribute macros. Requires `#rust-analyzer.procMacro.enable#` to be set.
+ procMacro_attributes_enable: bool = true,
+ /// Enable support for procedural macros, implies `#rust-analyzer.cargo.buildScripts.enable#`.
+ procMacro_enable: bool = true,
+ /// Internal config, path to proc-macro server executable.
+ procMacro_server: Option<Utf8PathBuf> = None,
+
+ /// Exclude imports from find-all-references.
+ references_excludeImports: bool = false,
+
+ /// Exclude tests from find-all-references.
+ references_excludeTests: bool = false,
+
/// Inject additional highlighting into doc comments.
///
/// When enabled, rust-analyzer will highlight rust source in doc comments as well as intra
@@ -643,13 +634,24 @@ config_data! {
/// By disabling semantic tokens for strings, other grammars can be used to highlight
/// their contents.
semanticHighlighting_strings_enable: bool = true,
- }
-}
-config_data! {
- /// Configs that only make sense when they are set by a client. As such they can only be defined
- /// by setting them using client's settings (e.g `settings.json` on VS Code).
- client: struct ClientDefaultConfigData <- ClientConfigInput -> {}
+ /// Show full signature of the callable. Only shows parameters if disabled.
+ signatureInfo_detail: SignatureDetail = SignatureDetail::Full,
+ /// Show documentation.
+ signatureInfo_documentation_enable: bool = true,
+
+ /// Whether to insert closing angle brackets when typing an opening angle bracket of a generic argument list.
+ typing_autoClosingAngleBrackets_enable: bool = false,
+
+ /// Workspace symbol search kind.
+ workspace_symbol_search_kind: WorkspaceSymbolSearchKindDef = WorkspaceSymbolSearchKindDef::OnlyTypes,
+ /// Limits the number of items returned from a workspace symbol search (Defaults to 128).
+ /// Some clients like vs-code issue new searches on result filtering and don't require all results to be returned in the initial search.
+ /// Other clients requires all results upfront and might require a higher limit.
+ workspace_symbol_search_limit: usize = 128,
+ /// Workspace symbol search scope.
+ workspace_symbol_search_scope: WorkspaceSymbolSearchScopeDef = WorkspaceSymbolSearchScopeDef::Workspace,
+ }
}
#[derive(Debug, Clone)]
@@ -659,23 +661,294 @@ pub struct Config {
workspace_roots: Vec<AbsPathBuf>,
caps: lsp_types::ClientCapabilities,
root_path: AbsPathBuf,
- detached_files: Vec<AbsPathBuf>,
snippets: Vec<Snippet>,
visual_studio_code_version: Option<Version>,
- default_config: DefaultConfigData,
- client_config: FullConfigInput,
- user_config: GlobalLocalConfigInput,
- #[allow(dead_code)]
- ratoml_files: FxHashMap<SourceRootId, RatomlNode>,
+ default_config: &'static DefaultConfigData,
+ /// Config node that obtains its initial value during the server initialization and
+ /// by receiving a `lsp_types::notification::DidChangeConfiguration`.
+ client_config: (FullConfigInput, ConfigErrors),
+
+ /// Path to the root configuration file. This can be seen as a generic way to define what would be `$XDG_CONFIG_HOME/rust-analyzer/rust-analyzer.toml` in Linux.
+ /// If not specified by init of a `Config` object this value defaults to :
+ ///
+ /// |Platform | Value | Example |
+ /// | ------- | ------------------------------------- | ---------------------------------------- |
+ /// | Linux | `$XDG_CONFIG_HOME` or `$HOME`/.config | /home/alice/.config |
+ /// | macOS | `$HOME`/Library/Application Support | /Users/Alice/Library/Application Support |
+ /// | Windows | `{FOLDERID_RoamingAppData}` | C:\Users\Alice\AppData\Roaming |
+ user_config_path: VfsPath,
+
+ /// FIXME @alibektas : Change this to sth better.
+ /// Config node whose values apply to **every** Rust project.
+ user_config: Option<(GlobalLocalConfigInput, ConfigErrors)>,
+
+ /// A special file for this session whose path is set to `self.root_path.join("rust-analyzer.toml")`
+ root_ratoml_path: VfsPath,
+
+ /// This file can be used to make global changes while having only a workspace-wide scope.
+ root_ratoml: Option<(GlobalLocalConfigInput, ConfigErrors)>,
+
+ /// For every `SourceRoot` there can be at most one RATOML file.
+ ratoml_files: FxHashMap<SourceRootId, (LocalConfigInput, ConfigErrors)>,
+
+ /// Clone of the value that is stored inside a `GlobalState`.
+ source_root_parent_map: Arc<FxHashMap<SourceRootId, SourceRootId>>,
+
+ detached_files: Vec<AbsPathBuf>,
}
-#[derive(Clone, Debug)]
-struct RatomlNode {
- #[allow(dead_code)]
- node: GlobalLocalConfigInput,
- #[allow(dead_code)]
- parent: Option<SourceRootId>,
+impl Config {
+ pub fn user_config_path(&self) -> &VfsPath {
+ &self.user_config_path
+ }
+
+ pub fn same_source_root_parent_map(
+ &self,
+ other: &Arc<FxHashMap<SourceRootId, SourceRootId>>,
+ ) -> bool {
+ Arc::ptr_eq(&self.source_root_parent_map, other)
+ }
+
+ // FIXME @alibektas : Server's health uses error sink but in other places it is not used atm.
+ /// Changes made to client and global configurations will partially not be reflected even after `.apply_change()` was called.
+ /// The return tuple's bool component signals whether the `GlobalState` should call its `update_configuration()` method.
+ fn apply_change_with_sink(&self, change: ConfigChange) -> (Config, bool) {
+ let mut config = self.clone();
+
+ let mut should_update = false;
+
+ if let Some(change) = change.user_config_change {
+ if let Ok(table) = toml::from_str(&change) {
+ let mut toml_errors = vec![];
+ validate_toml_table(
+ GlobalLocalConfigInput::FIELDS,
+ &table,
+ &mut String::new(),
+ &mut toml_errors,
+ );
+ config.user_config = Some((
+ GlobalLocalConfigInput::from_toml(table, &mut toml_errors),
+ ConfigErrors(
+ toml_errors
+ .into_iter()
+ .map(|(a, b)| ConfigErrorInner::Toml { config_key: a, error: b })
+ .map(Arc::new)
+ .collect(),
+ ),
+ ));
+ should_update = true;
+ }
+ }
+
+ if let Some(mut json) = change.client_config_change {
+ tracing::info!("updating config from JSON: {:#}", json);
+ if !(json.is_null() || json.as_object().map_or(false, |it| it.is_empty())) {
+ let mut json_errors = vec![];
+ let detached_files = get_field::<Vec<Utf8PathBuf>>(
+ &mut json,
+ &mut json_errors,
+ "detachedFiles",
+ None,
+ )
+ .unwrap_or_default()
+ .into_iter()
+ .map(AbsPathBuf::assert)
+ .collect();
+
+ patch_old_style::patch_json_for_outdated_configs(&mut json);
+
+ config.client_config = (
+ FullConfigInput::from_json(json, &mut json_errors),
+ ConfigErrors(
+ json_errors
+ .into_iter()
+ .map(|(a, b)| ConfigErrorInner::Json { config_key: a, error: b })
+ .map(Arc::new)
+ .collect(),
+ ),
+ );
+ config.detached_files = detached_files;
+ }
+ should_update = true;
+ }
+
+ if let Some(change) = change.root_ratoml_change {
+ tracing::info!("updating root ra-toml config: {:#}", change);
+ #[allow(clippy::single_match)]
+ match toml::from_str(&change) {
+ Ok(table) => {
+ let mut toml_errors = vec![];
+ validate_toml_table(
+ GlobalLocalConfigInput::FIELDS,
+ &table,
+ &mut String::new(),
+ &mut toml_errors,
+ );
+ config.root_ratoml = Some((
+ GlobalLocalConfigInput::from_toml(table, &mut toml_errors),
+ ConfigErrors(
+ toml_errors
+ .into_iter()
+ .map(|(a, b)| ConfigErrorInner::Toml { config_key: a, error: b })
+ .map(Arc::new)
+ .collect(),
+ ),
+ ));
+ should_update = true;
+ }
+ // FIXME
+ Err(_) => (),
+ }
+ }
+
+ if let Some(change) = change.ratoml_file_change {
+ for (source_root_id, (_, text)) in change {
+ if let Some(text) = text {
+ let mut toml_errors = vec![];
+ tracing::info!("updating ra-toml config: {:#}", text);
+ #[allow(clippy::single_match)]
+ match toml::from_str(&text) {
+ Ok(table) => {
+ validate_toml_table(
+ &[LocalConfigInput::FIELDS],
+ &table,
+ &mut String::new(),
+ &mut toml_errors,
+ );
+ config.ratoml_files.insert(
+ source_root_id,
+ (
+ LocalConfigInput::from_toml(&table, &mut toml_errors),
+ ConfigErrors(
+ toml_errors
+ .into_iter()
+ .map(|(a, b)| ConfigErrorInner::Toml {
+ config_key: a,
+ error: b,
+ })
+ .map(Arc::new)
+ .collect(),
+ ),
+ ),
+ );
+ }
+ // FIXME
+ Err(_) => (),
+ }
+ }
+ }
+ }
+
+ if let Some(source_root_map) = change.source_map_change {
+ config.source_root_parent_map = source_root_map;
+ }
+
+ let snips = self.completion_snippets_custom().to_owned();
+
+ for (name, def) in snips.iter() {
+ if def.prefix.is_empty() && def.postfix.is_empty() {
+ continue;
+ }
+ let scope = match def.scope {
+ SnippetScopeDef::Expr => SnippetScope::Expr,
+ SnippetScopeDef::Type => SnippetScope::Type,
+ SnippetScopeDef::Item => SnippetScope::Item,
+ };
+ #[allow(clippy::single_match)]
+ match Snippet::new(
+ &def.prefix,
+ &def.postfix,
+ &def.body,
+ def.description.as_ref().unwrap_or(name),
+ &def.requires,
+ scope,
+ ) {
+ Some(snippet) => config.snippets.push(snippet),
+ // FIXME
+ // None => error_sink.0.push(ConfigErrorInner::Json {
+ // config_key: "".to_owned(),
+ // error: <serde_json::Error as serde::de::Error>::custom(format!(
+ // "snippet {name} is invalid or triggers are missing",
+ // )),
+ // }),
+ None => (),
+ }
+ }
+
+ // FIXME: bring this back
+ // if config.check_command().is_empty() {
+ // error_sink.0.push(ConfigErrorInner::Json {
+ // config_key: "/check/command".to_owned(),
+ // error: serde_json::Error::custom("expected a non-empty string"),
+ // });
+ // }
+ (config, should_update)
+ }
+
+ /// Given `change` this generates a new `Config`, thereby collecting errors of type `ConfigError`.
+ /// If there are changes that have global/client level effect, the last component of the return type
+ /// will be set to `true`, which should be used by the `GlobalState` to update itself.
+ pub fn apply_change(&self, change: ConfigChange) -> (Config, ConfigErrors, bool) {
+ let (config, should_update) = self.apply_change_with_sink(change);
+ let e = ConfigErrors(
+ config
+ .client_config
+ .1
+ .0
+ .iter()
+ .chain(config.root_ratoml.as_ref().into_iter().flat_map(|it| it.1 .0.iter()))
+ .chain(config.user_config.as_ref().into_iter().flat_map(|it| it.1 .0.iter()))
+ .chain(config.ratoml_files.values().flat_map(|it| it.1 .0.iter()))
+ .cloned()
+ .collect(),
+ );
+ (config, e, should_update)
+ }
+}
+
+#[derive(Default, Debug)]
+pub struct ConfigChange {
+ user_config_change: Option<Arc<str>>,
+ root_ratoml_change: Option<Arc<str>>,
+ client_config_change: Option<serde_json::Value>,
+ ratoml_file_change: Option<FxHashMap<SourceRootId, (VfsPath, Option<Arc<str>>)>>,
+ source_map_change: Option<Arc<FxHashMap<SourceRootId, SourceRootId>>>,
+}
+
+impl ConfigChange {
+ pub fn change_ratoml(
+ &mut self,
+ source_root: SourceRootId,
+ vfs_path: VfsPath,
+ content: Option<Arc<str>>,
+ ) -> Option<(VfsPath, Option<Arc<str>>)> {
+ self.ratoml_file_change
+ .get_or_insert_with(Default::default)
+ .insert(source_root, (vfs_path, content))
+ }
+
+ pub fn change_user_config(&mut self, content: Option<Arc<str>>) {
+ assert!(self.user_config_change.is_none()); // Otherwise it is a double write.
+ self.user_config_change = content;
+ }
+
+ pub fn change_root_ratoml(&mut self, content: Option<Arc<str>>) {
+ assert!(self.root_ratoml_change.is_none()); // Otherwise it is a double write.
+ self.root_ratoml_change = content;
+ }
+
+ pub fn change_client_config(&mut self, change: serde_json::Value) {
+ self.client_config_change = Some(change);
+ }
+
+ pub fn change_source_root_parent_map(
+ &mut self,
+ source_root_map: Arc<FxHashMap<SourceRootId, SourceRootId>>,
+ ) {
+ assert!(self.source_map_change.is_none());
+ self.source_map_change = Some(source_root_map.clone());
+ }
}
macro_rules! try_ {
@@ -695,8 +968,6 @@ macro_rules! try_or_def {
};
}
-type ParallelCachePrimingNumThreads = u8;
-
#[derive(Debug, Clone, Eq, PartialEq)]
pub enum LinkedProject {
ProjectManifest(ProjectManifest),
@@ -866,27 +1137,39 @@ pub struct ClientCommandsConfig {
}
#[derive(Debug)]
-pub struct ConfigError {
- errors: Vec<(String, serde_json::Error)>,
+pub enum ConfigErrorInner {
+ Json { config_key: String, error: serde_json::Error },
+ Toml { config_key: String, error: toml::de::Error },
}
-impl fmt::Display for ConfigError {
+#[derive(Clone, Debug)]
+pub struct ConfigErrors(Vec<Arc<ConfigErrorInner>>);
+
+impl ConfigErrors {
+ pub fn is_empty(&self) -> bool {
+ self.0.is_empty()
+ }
+}
+
+impl fmt::Display for ConfigErrors {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
- let errors = self.errors.iter().format_with("\n", |(key, e), f| {
- f(key)?;
- f(&": ")?;
- f(e)
+ let errors = self.0.iter().format_with("\n", |inner, f| match &**inner {
+ ConfigErrorInner::Json { config_key: key, error: e } => {
+ f(key)?;
+ f(&": ")?;
+ f(e)
+ }
+ ConfigErrorInner::Toml { config_key: key, error: e } => {
+ f(key)?;
+ f(&": ")?;
+ f(e)
+ }
});
- write!(
- f,
- "invalid config value{}:\n{}",
- if self.errors.len() == 1 { "" } else { "s" },
- errors
- )
+ write!(f, "invalid config value{}:\n{}", if self.0.len() == 1 { "" } else { "s" }, errors)
}
}
-impl std::error::Error for ConfigError {}
+impl std::error::Error for ConfigErrors {}
impl Config {
pub fn new(
@@ -894,19 +1177,46 @@ impl Config {
caps: ClientCapabilities,
workspace_roots: Vec<AbsPathBuf>,
visual_studio_code_version: Option<Version>,
+ user_config_path: Option<Utf8PathBuf>,
) -> Self {
+ static DEFAULT_CONFIG_DATA: OnceLock<&'static DefaultConfigData> = OnceLock::new();
+ let user_config_path = if let Some(user_config_path) = user_config_path {
+ user_config_path.join("rust-analyzer").join("rust-analyzer.toml")
+ } else {
+ let p = config_dir()
+ .expect("A config dir is expected to existed on all platforms ra supports.")
+ .join("rust-analyzer")
+ .join("rust-analyzer.toml");
+ Utf8PathBuf::from_path_buf(p).expect("Config dir expected to be abs.")
+ };
+
+ // A user config cannot be a virtual path as rust-analyzer cannot support watching changes in virtual paths.
+ // See `GlobalState::process_changes` to get more info.
+ // FIXME @alibektas : Temporary solution. I don't think this is right as at some point we may allow users to specify
+ // custom USER_CONFIG_PATHs which may also be relative.
+ let user_config_path = VfsPath::from(AbsPathBuf::assert(user_config_path));
+ let root_ratoml_path = {
+ let mut p = root_path.clone();
+ p.push("rust-analyzer.toml");
+ VfsPath::new_real_path(p.to_string())
+ };
+
Config {
caps,
- detached_files: Vec::new(),
discovered_projects: Vec::new(),
root_path,
snippets: Default::default(),
workspace_roots,
visual_studio_code_version,
- client_config: FullConfigInput::default(),
- user_config: GlobalLocalConfigInput::default(),
+ client_config: (FullConfigInput::default(), ConfigErrors(vec![])),
ratoml_files: FxHashMap::default(),
- default_config: DefaultConfigData::default(),
+ default_config: DEFAULT_CONFIG_DATA.get_or_init(|| Box::leak(Box::default())),
+ source_root_parent_map: Arc::new(FxHashMap::default()),
+ user_config: None,
+ user_config_path,
+ root_ratoml: None,
+ root_ratoml_path,
+ detached_files: Default::default(),
}
}
@@ -929,71 +1239,6 @@ impl Config {
self.workspace_roots.extend(paths);
}
- pub fn update(&mut self, mut json: serde_json::Value) -> Result<(), ConfigError> {
- tracing::info!("updating config from JSON: {:#}", json);
- if json.is_null() || json.as_object().map_or(false, |it| it.is_empty()) {
- return Ok(());
- }
- let mut errors = Vec::new();
- self.detached_files =
- get_field::<Vec<Utf8PathBuf>>(&mut json, &mut errors, "detachedFiles", None)
- .unwrap_or_default()
- .into_iter()
- .map(AbsPathBuf::assert)
- .collect();
- patch_old_style::patch_json_for_outdated_configs(&mut json);
- self.client_config = FullConfigInput::from_json(json, &mut errors);
- tracing::debug!(?self.client_config, "deserialized config data");
- self.snippets.clear();
-
- let snips = self.completion_snippets_custom(None).to_owned();
-
- for (name, def) in snips.iter() {
- if def.prefix.is_empty() && def.postfix.is_empty() {
- continue;
- }
- let scope = match def.scope {
- SnippetScopeDef::Expr => SnippetScope::Expr,
- SnippetScopeDef::Type => SnippetScope::Type,
- SnippetScopeDef::Item => SnippetScope::Item,
- };
- match Snippet::new(
- &def.prefix,
- &def.postfix,
- &def.body,
- def.description.as_ref().unwrap_or(name),
- &def.requires,
- scope,
- ) {
- Some(snippet) => self.snippets.push(snippet),
- None => errors.push((
- format!("snippet {name} is invalid"),
- <serde_json::Error as serde::de::Error>::custom(
- "snippet path is invalid or triggers are missing",
- ),
- )),
- }
- }
-
- self.validate(&mut errors);
-
- if errors.is_empty() {
- Ok(())
- } else {
- Err(ConfigError { errors })
- }
- }
-
- fn validate(&self, error_sink: &mut Vec<(String, serde_json::Error)>) {
- use serde::de::Error;
- if self.check_command().is_empty() {
- error_sink.push((
- "/check/command".to_owned(),
- serde_json::Error::custom("expected a non-empty string"),
- ));
- }
- }
-
pub fn json_schema() -> serde_json::Value {
FullConfigInput::json_schema()
}
@@ -1002,23 +1247,23 @@ impl Config {
&self.root_path
}
- pub fn caps(&self) -> &lsp_types::ClientCapabilities {
- &self.caps
+ pub fn root_ratoml_path(&self) -> &VfsPath {
+ &self.root_ratoml_path
}
- pub fn detached_files(&self) -> &[AbsPathBuf] {
- &self.detached_files
+ pub fn caps(&self) -> &lsp_types::ClientCapabilities {
+ &self.caps
}
}
impl Config {
pub fn assist(&self, source_root: Option<SourceRootId>) -> AssistConfig {
AssistConfig {
- snippet_cap: SnippetCap::new(self.experimental("snippetTextEdit")),
+ snippet_cap: self.snippet_cap(),
allowed: None,
insert_use: self.insert_use_config(source_root),
prefer_no_std: self.imports_preferNoStd(source_root).to_owned(),
- assist_emit_must_use: self.assist_emitMustUse().to_owned(),
+ assist_emit_must_use: self.assist_emitMustUse(source_root).to_owned(),
prefer_prelude: self.imports_preferPrelude(source_root).to_owned(),
term_search_fuel: self.assist_termSearch_fuel(source_root).to_owned() as u64,
}
@@ -1026,17 +1271,13 @@ impl Config {
pub fn completion(&self, source_root: Option<SourceRootId>) -> CompletionConfig {
CompletionConfig {
- enable_postfix_completions: self.completion_postfix_enable(source_root).to_owned(),
- enable_imports_on_the_fly: self.completion_autoimport_enable(source_root).to_owned()
+ enable_postfix_completions: self.completion_postfix_enable().to_owned(),
+ enable_imports_on_the_fly: self.completion_autoimport_enable().to_owned()
&& completion_item_edit_resolve(&self.caps),
- enable_self_on_the_fly: self.completion_autoself_enable(source_root).to_owned(),
- enable_private_editable: self.completion_privateEditable_enable(source_root).to_owned(),
- enable_term_search: self.completion_termSearch_enable(source_root).to_owned(),
- term_search_fuel: self.completion_termSearch_fuel(source_root).to_owned() as u64,
- full_function_signatures: self
- .completion_fullFunctionSignatures_enable(source_root)
- .to_owned(),
- callable: match self.completion_callable_snippets(source_root) {
+ enable_self_on_the_fly: self.completion_autoself_enable().to_owned(),
+ enable_private_editable: self.completion_privateEditable_enable().to_owned(),
+ full_function_signatures: self.completion_fullFunctionSignatures_enable().to_owned(),
+ callable: match self.completion_callable_snippets() {
CallableCompletionDef::FillArguments => Some(CallableSnippets::FillArguments),
CallableCompletionDef::AddParentheses => Some(CallableSnippets::AddParentheses),
CallableCompletionDef::None => None,
@@ -1055,10 +1296,18 @@ impl Config {
prefer_no_std: self.imports_preferNoStd(source_root).to_owned(),
prefer_prelude: self.imports_preferPrelude(source_root).to_owned(),
snippets: self.snippets.clone().to_vec(),
- limit: self.completion_limit(source_root).to_owned(),
+ limit: self.completion_limit().to_owned(),
+ enable_term_search: self.completion_termSearch_enable().to_owned(),
+ term_search_fuel: self.completion_termSearch_fuel().to_owned() as u64,
}
}
+ pub fn detached_files(&self) -> &Vec<AbsPathBuf> {
+ // FIXME @alibektas : This is the only config that is confusing. If it's a proper configuration
+ // why is it not among the others? If it's client only which I doubt it is current state should be alright
+ &self.detached_files
+ }
+
pub fn diagnostics(&self, source_root: Option<SourceRootId>) -> DiagnosticsConfig {
DiagnosticsConfig {
enabled: *self.diagnostics_enable(),
@@ -1066,10 +1315,11 @@ impl Config {
proc_macros_enabled: *self.procMacro_enable(),
disable_experimental: !self.diagnostics_experimental_enable(),
disabled: self.diagnostics_disabled().clone(),
- expr_fill_default: match self.assist_expressionFillDefault() {
+ expr_fill_default: match self.assist_expressionFillDefault(source_root) {
ExprFillDefaultDef::Todo => ExprFillDefaultMode::Todo,
ExprFillDefaultDef::Default => ExprFillDefaultMode::Default,
},
+ snippet_cap: self.snippet_cap(),
insert_use: self.insert_use_config(source_root),
prefer_no_std: self.imports_preferNoStd(source_root).to_owned(),
prefer_prelude: self.imports_preferPrelude(source_root).to_owned(),
@@ -1081,13 +1331,13 @@ impl Config {
self.procMacro_enable().to_owned() && self.procMacro_attributes_enable().to_owned()
}
- pub fn highlight_related(&self, source_root: Option<SourceRootId>) -> HighlightRelatedConfig {
+ pub fn highlight_related(&self, _source_root: Option<SourceRootId>) -> HighlightRelatedConfig {
HighlightRelatedConfig {
- references: self.highlightRelated_references_enable(source_root).to_owned(),
- break_points: self.highlightRelated_breakPoints_enable(source_root).to_owned(),
- exit_points: self.highlightRelated_exitPoints_enable(source_root).to_owned(),
- yield_points: self.highlightRelated_yieldPoints_enable(source_root).to_owned(),
- closure_captures: self.highlightRelated_closureCaptures_enable(source_root).to_owned(),
+ references: self.highlightRelated_references_enable().to_owned(),
+ break_points: self.highlightRelated_breakPoints_enable().to_owned(),
+ exit_points: self.highlightRelated_exitPoints_enable().to_owned(),
+ yield_points: self.highlightRelated_yieldPoints_enable().to_owned(),
+ closure_captures: self.highlightRelated_closureCaptures_enable().to_owned(),
}
}
@@ -1141,7 +1391,7 @@ impl Config {
}
}
- pub fn inlay_hints(&self, source_root: Option<SourceRootId>) -> InlayHintsConfig {
+ pub fn inlay_hints(&self) -> InlayHintsConfig {
let client_capability_fields = self
.caps
.text_document
@@ -1155,74 +1405,65 @@ impl Config {
.collect::<FxHashSet<_>>();
InlayHintsConfig {
- render_colons: self.inlayHints_renderColons(source_root).to_owned(),
- type_hints: self.inlayHints_typeHints_enable(source_root).to_owned(),
- parameter_hints: self.inlayHints_parameterHints_enable(source_root).to_owned(),
- chaining_hints: self.inlayHints_chainingHints_enable(source_root).to_owned(),
- discriminant_hints: match self.inlayHints_discriminantHints_enable(source_root) {
+ render_colons: self.inlayHints_renderColons().to_owned(),
+ type_hints: self.inlayHints_typeHints_enable().to_owned(),
+ parameter_hints: self.inlayHints_parameterHints_enable().to_owned(),
+ chaining_hints: self.inlayHints_chainingHints_enable().to_owned(),
+ discriminant_hints: match self.inlayHints_discriminantHints_enable() {
DiscriminantHintsDef::Always => ide::DiscriminantHints::Always,
DiscriminantHintsDef::Never => ide::DiscriminantHints::Never,
DiscriminantHintsDef::Fieldless => ide::DiscriminantHints::Fieldless,
},
- closure_return_type_hints: match self
- .inlayHints_closureReturnTypeHints_enable(source_root)
- {
+ closure_return_type_hints: match self.inlayHints_closureReturnTypeHints_enable() {
ClosureReturnTypeHintsDef::Always => ide::ClosureReturnTypeHints::Always,
ClosureReturnTypeHintsDef::Never => ide::ClosureReturnTypeHints::Never,
ClosureReturnTypeHintsDef::WithBlock => ide::ClosureReturnTypeHints::WithBlock,
},
- lifetime_elision_hints: match self.inlayHints_lifetimeElisionHints_enable(source_root) {
+ lifetime_elision_hints: match self.inlayHints_lifetimeElisionHints_enable() {
LifetimeElisionDef::Always => ide::LifetimeElisionHints::Always,
LifetimeElisionDef::Never => ide::LifetimeElisionHints::Never,
LifetimeElisionDef::SkipTrivial => ide::LifetimeElisionHints::SkipTrivial,
},
hide_named_constructor_hints: self
- .inlayHints_typeHints_hideNamedConstructor(source_root)
+ .inlayHints_typeHints_hideNamedConstructor()
.to_owned(),
hide_closure_initialization_hints: self
- .inlayHints_typeHints_hideClosureInitialization(source_root)
+ .inlayHints_typeHints_hideClosureInitialization()
.to_owned(),
- closure_style: match self.inlayHints_closureStyle(source_root) {
+ closure_style: match self.inlayHints_closureStyle() {
ClosureStyle::ImplFn => hir::ClosureStyle::ImplFn,
ClosureStyle::RustAnalyzer => hir::ClosureStyle::RANotation,
ClosureStyle::WithId => hir::ClosureStyle::ClosureWithId,
ClosureStyle::Hide => hir::ClosureStyle::Hide,
},
- closure_capture_hints: self
- .inlayHints_closureCaptureHints_enable(source_root)
- .to_owned(),
- adjustment_hints: match self.inlayHints_expressionAdjustmentHints_enable(source_root) {
+ closure_capture_hints: self.inlayHints_closureCaptureHints_enable().to_owned(),
+ adjustment_hints: match self.inlayHints_expressionAdjustmentHints_enable() {
AdjustmentHintsDef::Always => ide::AdjustmentHints::Always,
- AdjustmentHintsDef::Never => {
- match self.inlayHints_reborrowHints_enable(source_root) {
- ReborrowHintsDef::Always | ReborrowHintsDef::Mutable => {
- ide::AdjustmentHints::ReborrowOnly
- }
- ReborrowHintsDef::Never => ide::AdjustmentHints::Never,
+ AdjustmentHintsDef::Never => match self.inlayHints_reborrowHints_enable() {
+ ReborrowHintsDef::Always | ReborrowHintsDef::Mutable => {
+ ide::AdjustmentHints::ReborrowOnly
}
- }
+ ReborrowHintsDef::Never => ide::AdjustmentHints::Never,
+ },
AdjustmentHintsDef::Reborrow => ide::AdjustmentHints::ReborrowOnly,
},
- adjustment_hints_mode: match self.inlayHints_expressionAdjustmentHints_mode(source_root)
- {
+ adjustment_hints_mode: match self.inlayHints_expressionAdjustmentHints_mode() {
AdjustmentHintsModeDef::Prefix => ide::AdjustmentHintsMode::Prefix,
AdjustmentHintsModeDef::Postfix => ide::AdjustmentHintsMode::Postfix,
AdjustmentHintsModeDef::PreferPrefix => ide::AdjustmentHintsMode::PreferPrefix,
AdjustmentHintsModeDef::PreferPostfix => ide::AdjustmentHintsMode::PreferPostfix,
},
adjustment_hints_hide_outside_unsafe: self
- .inlayHints_expressionAdjustmentHints_hideOutsideUnsafe(source_root)
+ .inlayHints_expressionAdjustmentHints_hideOutsideUnsafe()
.to_owned(),
- binding_mode_hints: self.inlayHints_bindingModeHints_enable(source_root).to_owned(),
+ binding_mode_hints: self.inlayHints_bindingModeHints_enable().to_owned(),
param_names_for_lifetime_elision_hints: self
- .inlayHints_lifetimeElisionHints_useParameterNames(source_root)
+ .inlayHints_lifetimeElisionHints_useParameterNames()
.to_owned(),
- max_length: self.inlayHints_maxLength(source_root).to_owned(),
- closing_brace_hints_min_lines: if self
- .inlayHints_closingBraceHints_enable(source_root)
- .to_owned()
+ max_length: self.inlayHints_maxLength().to_owned(),
+ closing_brace_hints_min_lines: if self.inlayHints_closingBraceHints_enable().to_owned()
{
- Some(self.inlayHints_closingBraceHints_minLines(source_root).to_owned())
+ Some(self.inlayHints_closingBraceHints_minLines().to_owned())
} else {
None
},
@@ -1233,10 +1474,8 @@ impl Config {
resolve_label_location: client_capability_fields.contains("label.location"),
resolve_label_command: client_capability_fields.contains("label.command"),
},
- implicit_drop_hints: self.inlayHints_implicitDrops_enable(source_root).to_owned(),
- range_exclusive_hints: self
- .inlayHints_rangeExclusiveHints_enable(source_root)
- .to_owned(),
+ implicit_drop_hints: self.inlayHints_implicitDrops_enable().to_owned(),
+ range_exclusive_hints: self.inlayHints_rangeExclusiveHints_enable().to_owned(),
}
}
@@ -1260,36 +1499,32 @@ impl Config {
}
}
- pub fn join_lines(&self, source_root: Option<SourceRootId>) -> JoinLinesConfig {
+ pub fn join_lines(&self) -> JoinLinesConfig {
JoinLinesConfig {
- join_else_if: self.joinLines_joinElseIf(source_root).to_owned(),
- remove_trailing_comma: self.joinLines_removeTrailingComma(source_root).to_owned(),
- unwrap_trivial_blocks: self.joinLines_unwrapTrivialBlock(source_root).to_owned(),
- join_assignments: self.joinLines_joinAssignments(source_root).to_owned(),
+ join_else_if: self.joinLines_joinElseIf().to_owned(),
+ remove_trailing_comma: self.joinLines_removeTrailingComma().to_owned(),
+ unwrap_trivial_blocks: self.joinLines_unwrapTrivialBlock().to_owned(),
+ join_assignments: self.joinLines_joinAssignments().to_owned(),
}
}
- pub fn highlighting_non_standard_tokens(&self, source_root: Option<SourceRootId>) -> bool {
- self.semanticHighlighting_nonStandardTokens(source_root).to_owned()
+ pub fn highlighting_non_standard_tokens(&self) -> bool {
+ self.semanticHighlighting_nonStandardTokens().to_owned()
}
- pub fn highlighting_config(&self, source_root: Option<SourceRootId>) -> HighlightConfig {
+ pub fn highlighting_config(&self) -> HighlightConfig {
HighlightConfig {
- strings: self.semanticHighlighting_strings_enable(source_root).to_owned(),
- punctuation: self.semanticHighlighting_punctuation_enable(source_root).to_owned(),
+ strings: self.semanticHighlighting_strings_enable().to_owned(),
+ punctuation: self.semanticHighlighting_punctuation_enable().to_owned(),
specialize_punctuation: self
- .semanticHighlighting_punctuation_specialization_enable(source_root)
+ .semanticHighlighting_punctuation_specialization_enable()
.to_owned(),
- macro_bang: self
- .semanticHighlighting_punctuation_separate_macro_bang(source_root)
- .to_owned(),
- operator: self.semanticHighlighting_operator_enable(source_root).to_owned(),
+ macro_bang: self.semanticHighlighting_punctuation_separate_macro_bang().to_owned(),
+ operator: self.semanticHighlighting_operator_enable().to_owned(),
specialize_operator: self
- .semanticHighlighting_operator_specialization_enable(source_root)
- .to_owned(),
- inject_doc_comment: self
- .semanticHighlighting_doc_comment_inject_enable(source_root)
+ .semanticHighlighting_operator_specialization_enable()
.to_owned(),
+ inject_doc_comment: self.semanticHighlighting_doc_comment_inject_enable().to_owned(),
syntactic_name_ref_highlighting: false,
}
}
@@ -1771,8 +2006,10 @@ impl Config {
*self.references_excludeTests()
}
- pub fn snippet_cap(&self) -> bool {
- self.experimental("snippetTextEdit")
+ pub fn snippet_cap(&self) -> Option<SnippetCap> {
+ // FIXME: Also detect the proposed lsp version at caps.workspace.workspaceEdit.snippetEditSupport
+ // once lsp-types has it.
+ SnippetCap::new(self.experimental("snippetTextEdit"))
}
pub fn call_info(&self) -> CallInfoConfig {
@@ -1856,15 +2093,22 @@ impl Config {
}
}
- pub fn prime_caches_num_threads(&self) -> u8 {
- match *self.cachePriming_numThreads() {
- 0 => num_cpus::get_physical().try_into().unwrap_or(u8::MAX),
- n => n,
+ pub fn prime_caches_num_threads(&self) -> usize {
+ match self.cachePriming_numThreads() {
+ NumThreads::Concrete(0) | NumThreads::Physical => num_cpus::get_physical(),
+ &NumThreads::Concrete(n) => n,
+ NumThreads::Logical => num_cpus::get(),
}
}
pub fn main_loop_num_threads(&self) -> usize {
- self.numThreads().unwrap_or(num_cpus::get_physical())
+ match self.numThreads() {
+ Some(NumThreads::Concrete(0)) | None | Some(NumThreads::Physical) => {
+ num_cpus::get_physical()
+ }
+ &Some(NumThreads::Concrete(n)) => n,
+ Some(NumThreads::Logical) => num_cpus::get(),
+ }
}
pub fn typing_autoclose_angle(&self) -> bool {
@@ -1959,51 +2203,6 @@ macro_rules! create_bool_or_string_serde {
create_bool_or_string_serde!(true_or_always<true, "always">);
create_bool_or_string_serde!(false_or_never<false, "never">);
-macro_rules! named_unit_variant {
- ($variant:ident) => {
- pub(super) mod $variant {
- pub(in super::super) fn deserialize<'de, D>(deserializer: D) -> Result<(), D::Error>
- where
- D: serde::Deserializer<'de>,
- {
- struct V;
- impl<'de> serde::de::Visitor<'de> for V {
- type Value = ();
- fn expecting(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
- f.write_str(concat!("\"", stringify!($variant), "\""))
- }
- fn visit_str<E: serde::de::Error>(self, value: &str) -> Result<Self::Value, E> {
- if value == stringify!($variant) {
- Ok(())
- } else {
- Err(E::invalid_value(serde::de::Unexpected::Str(value), &self))
- }
- }
- }
- deserializer.deserialize_str(V)
- }
- pub(in super::super) fn serialize<S>(serializer: S) -> Result<S::Ok, S::Error>
- where
- S: serde::Serializer,
- {
- serializer.serialize_str(stringify!($variant))
- }
- }
- };
-}
-
-mod unit_v {
- named_unit_variant!(all);
- named_unit_variant!(skip_trivial);
- named_unit_variant!(mutable);
- named_unit_variant!(reborrow);
- named_unit_variant!(fieldless);
- named_unit_variant!(with_block);
- named_unit_variant!(decimal);
- named_unit_variant!(hexadecimal);
- named_unit_variant!(both);
-}
-
#[derive(Serialize, Deserialize, Debug, Clone, Copy, PartialEq)]
#[serde(rename_all = "snake_case")]
#[derive(Default)]
@@ -2016,7 +2215,7 @@ enum SnippetScopeDef {
#[derive(Serialize, Deserialize, Debug, Clone, Default)]
#[serde(default)]
-struct SnippetDef {
+pub(crate) struct SnippetDef {
#[serde(with = "single_or_array")]
#[serde(skip_serializing_if = "Vec::is_empty")]
prefix: Vec<String>,
@@ -2111,17 +2310,17 @@ enum ImportGranularityDef {
#[derive(Serialize, Deserialize, Debug, Copy, Clone)]
#[serde(rename_all = "snake_case")]
-enum CallableCompletionDef {
+pub(crate) enum CallableCompletionDef {
FillArguments,
AddParentheses,
None,
}
#[derive(Serialize, Deserialize, Debug, Clone)]
-#[serde(untagged)]
+#[serde(rename_all = "snake_case")]
enum CargoFeaturesDef {
- #[serde(with = "unit_v::all")]
All,
+ #[serde(untagged)]
Selected(Vec<String>),
}
@@ -2143,25 +2342,27 @@ enum InvocationLocation {
}
#[derive(Serialize, Deserialize, Debug, Clone)]
-#[serde(untagged)]
+#[serde(rename_all = "snake_case")]
enum LifetimeElisionDef {
+ SkipTrivial,
#[serde(with = "true_or_always")]
+ #[serde(untagged)]
Always,
#[serde(with = "false_or_never")]
+ #[serde(untagged)]
Never,
- #[serde(with = "unit_v::skip_trivial")]
- SkipTrivial,
}
#[derive(Serialize, Deserialize, Debug, Clone)]
-#[serde(untagged)]
+#[serde(rename_all = "snake_case")]
enum ClosureReturnTypeHintsDef {
+ WithBlock,
#[serde(with = "true_or_always")]
+ #[serde(untagged)]
Always,
#[serde(with = "false_or_never")]
+ #[serde(untagged)]
Never,
- #[serde(with = "unit_v::with_block")]
- WithBlock,
}
#[derive(Serialize, Deserialize, Debug, Clone)]
@@ -2174,36 +2375,39 @@ enum ClosureStyle {
}
#[derive(Serialize, Deserialize, Debug, Clone)]
-#[serde(untagged)]
+#[serde(rename_all = "snake_case")]
enum ReborrowHintsDef {
+ Mutable,
#[serde(with = "true_or_always")]
+ #[serde(untagged)]
Always,
#[serde(with = "false_or_never")]
+ #[serde(untagged)]
Never,
- #[serde(with = "unit_v::mutable")]
- Mutable,
}
#[derive(Serialize, Deserialize, Debug, Clone)]
-#[serde(untagged)]
+#[serde(rename_all = "snake_case")]
enum AdjustmentHintsDef {
+ Reborrow,
#[serde(with = "true_or_always")]
+ #[serde(untagged)]
Always,
#[serde(with = "false_or_never")]
+ #[serde(untagged)]
Never,
- #[serde(with = "unit_v::reborrow")]
- Reborrow,
}
#[derive(Serialize, Deserialize, Debug, Clone)]
-#[serde(untagged)]
+#[serde(rename_all = "snake_case")]
enum DiscriminantHintsDef {
+ Fieldless,
#[serde(with = "true_or_always")]
+ #[serde(untagged)]
Always,
#[serde(with = "false_or_never")]
+ #[serde(untagged)]
Never,
- #[serde(with = "unit_v::fieldless")]
- Fieldless,
}
#[derive(Serialize, Deserialize, Debug, Clone)]
@@ -2227,9 +2431,11 @@ enum FilesWatcherDef {
#[serde(rename_all = "snake_case")]
enum ImportPrefixDef {
Plain,
- #[serde(alias = "self")]
+ #[serde(rename = "self")]
+ #[serde(alias = "by_self")]
BySelf,
- #[serde(alias = "crate")]
+ #[serde(rename = "crate")]
+ #[serde(alias = "by_crate")]
ByCrate,
}
@@ -2256,13 +2462,9 @@ enum WorkspaceSymbolSearchKindDef {
#[derive(Serialize, Deserialize, Debug, Copy, Clone, PartialEq)]
#[serde(rename_all = "snake_case")]
-#[serde(untagged)]
enum MemoryLayoutHoverRenderKindDef {
- #[serde(with = "unit_v::decimal")]
Decimal,
- #[serde(with = "unit_v::hexadecimal")]
Hexadecimal,
- #[serde(with = "unit_v::both")]
Both,
}
@@ -2285,6 +2487,15 @@ pub enum TargetDirectory {
Directory(Utf8PathBuf),
}
+#[derive(Serialize, Deserialize, Debug, Clone, PartialEq)]
+#[serde(rename_all = "snake_case")]
+pub enum NumThreads {
+ Physical,
+ Logical,
+ #[serde(untagged)]
+ Concrete(usize),
+}
+
macro_rules! _default_val {
(@verbatim: $s:literal, $ty:ty) => {{
let default_: $ty = serde_json::from_str(&$s).unwrap();
@@ -2318,54 +2529,81 @@ macro_rules! _impl_for_config_data {
$(
$($doc)*
#[allow(non_snake_case)]
- $vis fn $field(&self, _source_root: Option<SourceRootId>) -> &$ty {
- if let Some(v) = self.client_config.local.$field.as_ref() {
- return &v;
+ $vis fn $field(&self, source_root: Option<SourceRootId>) -> &$ty {
+ let mut par: Option<SourceRootId> = source_root;
+ while let Some(source_root_id) = par {
+ par = self.source_root_parent_map.get(&source_root_id).copied();
+ if let Some((config, _)) = self.ratoml_files.get(&source_root_id) {
+ if let Some(value) = config.$field.as_ref() {
+ return value;
+ }
+ }
+ }
+
+ if let Some((root_path_ratoml, _)) = self.root_ratoml.as_ref() {
+ if let Some(v) = root_path_ratoml.local.$field.as_ref() {
+ return &v;
+ }
}
- if let Some(v) = self.user_config.local.$field.as_ref() {
+ if let Some(v) = self.client_config.0.local.$field.as_ref() {
return &v;
}
+ if let Some((user_config, _)) = self.user_config.as_ref() {
+ if let Some(v) = user_config.local.$field.as_ref() {
+ return &v;
+ }
+ }
+
&self.default_config.local.$field
}
)*
}
};
(global, $(
- $(#[doc=$doc:literal])*
- $vis:vis $field:ident : $ty:ty = $default:expr,
- )*
- ) => {
+ $(#[doc=$doc:literal])*
+ $vis:vis $field:ident : $ty:ty = $default:expr,
+ )*
+ ) => {
impl Config {
$(
$($doc)*
#[allow(non_snake_case)]
$vis fn $field(&self) -> &$ty {
- if let Some(v) = self.client_config.global.$field.as_ref() {
- return &v;
+
+ if let Some((root_path_ratoml, _)) = self.root_ratoml.as_ref() {
+ if let Some(v) = root_path_ratoml.global.$field.as_ref() {
+ return &v;
+ }
}
- if let Some(v) = self.user_config.global.$field.as_ref() {
+ if let Some(v) = self.client_config.0.global.$field.as_ref() {
return &v;
}
+ if let Some((user_config, _)) = self.user_config.as_ref() {
+ if let Some(v) = user_config.global.$field.as_ref() {
+ return &v;
+ }
+ }
+
&self.default_config.global.$field
}
)*
}
};
(client, $(
- $(#[doc=$doc:literal])*
- $vis:vis $field:ident : $ty:ty = $default:expr,
- )*
+ $(#[doc=$doc:literal])*
+ $vis:vis $field:ident : $ty:ty = $default:expr,
+ )*
) => {
impl Config {
$(
$($doc)*
#[allow(non_snake_case)]
$vis fn $field(&self) -> &$ty {
- if let Some(v) = self.client_config.global.$field.as_ref() {
+ if let Some(v) = self.client_config.0.client.$field.as_ref() {
return &v;
}
@@ -2387,7 +2625,7 @@ macro_rules! _config_data {
}) => {
/// Default config values for this grouping.
#[allow(non_snake_case)]
- #[derive(Debug, Clone, Serialize)]
+ #[derive(Debug, Clone )]
struct $name { $($field: $ty,)* }
impl_for_config_data!{
@@ -2425,26 +2663,10 @@ macro_rules! _config_data {
}
}
- #[allow(unused)]
- impl $name {
- /// Applies overrides from some more local config blob, to self.
- fn apply_input(&mut self, input: $input) {
- $(
- if let Some(value) = input.$field {
- self.$field = value;
- }
- )*
- }
-
- fn clone_with_overrides(&self, input: $input) -> Self {
- Self {$(
- $field: input.$field.unwrap_or_else(|| self.$field.clone()),
- )*}
- }
- }
-
#[allow(unused, clippy::ptr_arg)]
impl $input {
+ const FIELDS: &'static [&'static str] = &[$(stringify!($field)),*];
+
fn from_json(json: &mut serde_json::Value, error_sink: &mut Vec<(String, serde_json::Error)>) -> Self {
Self {$(
$field: get_field(
@@ -2456,7 +2678,7 @@ macro_rules! _config_data {
)*}
}
- fn from_toml(toml: &mut toml::Table, error_sink: &mut Vec<(String, toml::de::Error)>) -> Self {
+ fn from_toml(toml: &toml::Table, error_sink: &mut Vec<(String, toml::de::Error)>) -> Self {
Self {$(
$field: get_field_toml::<$ty>(
toml,
@@ -2483,8 +2705,7 @@ macro_rules! _config_data {
mod $modname {
#[test]
fn fields_are_sorted() {
- let field_names: &'static [&'static str] = &[$(stringify!($field)),*];
- field_names.windows(2).for_each(|w| assert!(w[0] <= w[1], "{} <= {} does not hold", w[0], w[1]));
+ super::$input::FIELDS.windows(2).for_each(|w| assert!(w[0] <= w[1], "{} <= {} does not hold", w[0], w[1]));
}
}
};
@@ -2495,18 +2716,16 @@ use _config_data as config_data;
struct DefaultConfigData {
global: GlobalDefaultConfigData,
local: LocalDefaultConfigData,
- #[allow(dead_code)]
client: ClientDefaultConfigData,
}
/// All of the config levels, all fields `Option<T>`, to describe fields that are actually set by
/// some rust-analyzer.toml file or JSON blob. An empty rust-analyzer.toml corresponds to
/// all fields being None.
-#[derive(Debug, Clone, Default)]
+#[derive(Debug, Clone, Default, Serialize)]
struct FullConfigInput {
global: GlobalConfigInput,
local: LocalConfigInput,
- #[allow(dead_code)]
client: ClientConfigInput,
}
@@ -2527,9 +2746,12 @@ impl FullConfigInput {
GlobalConfigInput::schema_fields(&mut fields);
LocalConfigInput::schema_fields(&mut fields);
ClientConfigInput::schema_fields(&mut fields);
- // HACK: sort the fields, so the diffs on the generated docs/schema are smaller
fields.sort_by_key(|&(x, ..)| x);
fields
+ .iter()
+ .tuple_windows()
+ .for_each(|(a, b)| assert!(a.0 != b.0, "{a:?} duplicate field"));
+ fields
}
fn json_schema() -> serde_json::Value {
@@ -2545,63 +2767,57 @@ impl FullConfigInput {
/// All of the config levels, all fields `Option<T>`, to describe fields that are actually set by
/// some rust-analyzer.toml file or JSON blob. An empty rust-analyzer.toml corresponds to
/// all fields being None.
-#[derive(Debug, Clone, Default)]
+#[derive(Debug, Clone, Default, Serialize)]
struct GlobalLocalConfigInput {
global: GlobalConfigInput,
local: LocalConfigInput,
}
impl GlobalLocalConfigInput {
- #[allow(dead_code)]
+ const FIELDS: &'static [&'static [&'static str]] =
+ &[GlobalConfigInput::FIELDS, LocalConfigInput::FIELDS];
fn from_toml(
- mut toml: toml::Table,
+ toml: toml::Table,
error_sink: &mut Vec<(String, toml::de::Error)>,
) -> GlobalLocalConfigInput {
GlobalLocalConfigInput {
- global: GlobalConfigInput::from_toml(&mut toml, error_sink),
- local: LocalConfigInput::from_toml(&mut toml, error_sink),
+ global: GlobalConfigInput::from_toml(&toml, error_sink),
+ local: LocalConfigInput::from_toml(&toml, error_sink),
}
}
}
-fn get_field_toml<T: DeserializeOwned>(
- val: &toml::Table,
- error_sink: &mut Vec<(String, toml::de::Error)>,
+fn get_field<T: DeserializeOwned>(
+ json: &mut serde_json::Value,
+ error_sink: &mut Vec<(String, serde_json::Error)>,
field: &'static str,
alias: Option<&'static str>,
) -> Option<T> {
+ // XXX: check alias first, to work around the VS Code where it pre-fills the
+ // defaults instead of sending an empty object.
alias
.into_iter()
.chain(iter::once(field))
.filter_map(move |field| {
- let subkeys = field.split('_');
- let mut v = val;
- for subkey in subkeys {
- if let Some(val) = v.get(subkey) {
- if let Some(map) = val.as_table() {
- v = map;
- } else {
- return Some(toml::Value::try_into(val.clone()).map_err(|e| (e, v)));
- }
- } else {
- return None;
- }
- }
- None
+ let mut pointer = field.replace('_', "/");
+ pointer.insert(0, '/');
+ json.pointer_mut(&pointer)
+ .map(|it| serde_json::from_value(it.take()).map_err(|e| (e, pointer)))
})
.find(Result::is_ok)
.and_then(|res| match res {
Ok(it) => Some(it),
Err((e, pointer)) => {
- error_sink.push((pointer.to_string(), e));
+ tracing::warn!("Failed to deserialize config field at {}: {:?}", pointer, e);
+ error_sink.push((pointer, e));
None
}
})
}
-fn get_field<T: DeserializeOwned>(
- json: &mut serde_json::Value,
- error_sink: &mut Vec<(String, serde_json::Error)>,
+fn get_field_toml<T: DeserializeOwned>(
+ toml: &toml::Table,
+ error_sink: &mut Vec<(String, toml::de::Error)>,
field: &'static str,
alias: Option<&'static str>,
) -> Option<T> {
@@ -2613,8 +2829,8 @@ fn get_field<T: DeserializeOwned>(
.filter_map(move |field| {
let mut pointer = field.replace('_', "/");
pointer.insert(0, '/');
- json.pointer_mut(&pointer)
- .map(|it| serde_json::from_value(it.take()).map_err(|e| (e, pointer)))
+ toml_pointer(toml, &pointer)
+ .map(|it| <_>::deserialize(it.clone()).map_err(|e| (e, pointer)))
})
.find(Result::is_ok)
.and_then(|res| match res {
@@ -2627,6 +2843,32 @@ fn get_field<T: DeserializeOwned>(
})
}
+fn toml_pointer<'a>(toml: &'a toml::Table, pointer: &str) -> Option<&'a toml::Value> {
+ fn parse_index(s: &str) -> Option<usize> {
+ if s.starts_with('+') || (s.starts_with('0') && s.len() != 1) {
+ return None;
+ }
+ s.parse().ok()
+ }
+
+ if pointer.is_empty() {
+ return None;
+ }
+ if !pointer.starts_with('/') {
+ return None;
+ }
+ let mut parts = pointer.split('/').skip(1);
+ let first = parts.next()?;
+ let init = toml.get(first)?;
+ parts.map(|x| x.replace("~1", "/").replace("~0", "~")).try_fold(init, |target, token| {
+ match target {
+ toml::Value::Table(table) => table.get(&token),
+ toml::Value::Array(list) => parse_index(&token).and_then(move |x| list.get(x)),
+ _ => None,
+ }
+ })
+}
+
type SchemaField = (&'static str, &'static str, &'static [&'static str], String);
fn schema(fields: &[SchemaField]) -> serde_json::Value {
@@ -2634,11 +2876,18 @@ fn schema(fields: &[SchemaField]) -> serde_json::Value {
.iter()
.map(|(field, ty, doc, default)| {
let name = field.replace('_', ".");
+ let category =
+ name.find('.').map(|end| String::from(&name[..end])).unwrap_or("general".into());
let name = format!("rust-analyzer.{name}");
let props = field_props(field, ty, doc, default);
- (name, props)
+ serde_json::json!({
+ "title": category,
+ "properties": {
+ name: props
+ }
+ })
})
- .collect::<serde_json::Map<_, _>>();
+ .collect::<Vec<_>>();
map.into()
}
@@ -2761,11 +3010,6 @@ fn field_props(field: &str, ty: &str, doc: &[&str], default: &str) -> serde_json
"Search for all symbols kinds."
],
},
- "ParallelCachePrimingNumThreads" => set! {
- "type": "number",
- "minimum": 0,
- "maximum": 255
- },
"LifetimeElisionDef" => set! {
"type": "string",
"enum": [
@@ -2987,12 +3231,77 @@ fn field_props(field: &str, ty: &str, doc: &[&str], default: &str) -> serde_json
},
],
},
- _ => panic!("missing entry for {ty}: {default}"),
+ "NumThreads" => set! {
+ "anyOf": [
+ {
+ "type": "number",
+ "minimum": 0,
+ "maximum": 255
+ },
+ {
+ "type": "string",
+ "enum": ["physical", "logical", ],
+ "enumDescriptions": [
+ "Use the number of physical cores",
+ "Use the number of logical cores",
+ ],
+ },
+ ],
+ },
+ "Option<NumThreads>" => set! {
+ "anyOf": [
+ {
+ "type": "null"
+ },
+ {
+ "type": "number",
+ "minimum": 0,
+ "maximum": 255
+ },
+ {
+ "type": "string",
+ "enum": ["physical", "logical", ],
+ "enumDescriptions": [
+ "Use the number of physical cores",
+ "Use the number of logical cores",
+ ],
+ },
+ ],
+ },
+ _ => panic!("missing entry for {ty}: {default} (field {field})"),
}
map.into()
}
+fn validate_toml_table(
+ known_ptrs: &[&[&'static str]],
+ toml: &toml::Table,
+ ptr: &mut String,
+ error_sink: &mut Vec<(String, toml::de::Error)>,
+) {
+ let verify = |ptr: &String| known_ptrs.iter().any(|ptrs| ptrs.contains(&ptr.as_str()));
+
+ let l = ptr.len();
+ for (k, v) in toml {
+ if !ptr.is_empty() {
+ ptr.push('_');
+ }
+ ptr.push_str(k);
+
+ match v {
+ // This is a table config, any entry in it is therefor valid
+ toml::Value::Table(_) if verify(ptr) => (),
+ toml::Value::Table(table) => validate_toml_table(known_ptrs, table, ptr, error_sink),
+ _ if !verify(ptr) => error_sink
+ .push((ptr.replace('_', "/"), toml::de::Error::custom("unexpected field"))),
+ _ => (),
+ }
+
+ ptr.truncate(l);
+ }
+}
+
#[cfg(test)]
fn manual(fields: &[SchemaField]) -> String {
fields.iter().fold(String::new(), |mut acc, (field, _ty, doc, default)| {
@@ -3037,10 +3346,10 @@ mod tests {
let s = Config::json_schema();
let schema = format!("{s:#}");
let mut schema = schema
- .trim_start_matches('{')
- .trim_end_matches('}')
+ .trim_start_matches('[')
+ .trim_end_matches(']')
.replace(" ", " ")
- .replace('\n', "\n ")
+ .replace('\n', "\n ")
.trim_start_matches('\n')
.trim_end()
.to_owned();
@@ -3072,8 +3381,10 @@ mod tests {
let package_json_path = project_root().join("editors/code/package.json");
let mut package_json = fs::read_to_string(&package_json_path).unwrap();
- let start_marker = " \"$generated-start\": {},\n";
- let end_marker = " \"$generated-end\": {}\n";
+ let start_marker =
+ " {\n \"title\": \"$generated-start\"\n },\n";
+ let end_marker =
+ " {\n \"title\": \"$generated-end\"\n }\n";
let start = package_json.find(start_marker).unwrap() + start_marker.len();
let end = package_json.find(end_marker).unwrap();
@@ -3104,12 +3415,16 @@ mod tests {
Default::default(),
vec![],
None,
+ None,
);
- config
- .update(serde_json::json!({
- "procMacro_server": null,
- }))
- .unwrap();
+
+ let mut change = ConfigChange::default();
+ change.change_client_config(serde_json::json!({
+ "procMacro" : {
+ "server": null,
+ }}));
+
+ (config, _, _) = config.apply_change(change);
assert_eq!(config.proc_macro_srv(), None);
}
@@ -3120,12 +3435,15 @@ mod tests {
Default::default(),
vec![],
None,
+ None,
);
- config
- .update(serde_json::json!({
- "procMacro": {"server": project_root().display().to_string()}
- }))
- .unwrap();
+ let mut change = ConfigChange::default();
+ change.change_client_config(serde_json::json!({
+ "procMacro" : {
+ "server": project_root().display().to_string(),
+ }}));
+
+ (config, _, _) = config.apply_change(change);
assert_eq!(config.proc_macro_srv(), Some(AbsPathBuf::try_from(project_root()).unwrap()));
}
@@ -3136,12 +3454,18 @@ mod tests {
Default::default(),
vec![],
None,
+ None,
);
- config
- .update(serde_json::json!({
- "procMacro": {"server": "./server"}
- }))
- .unwrap();
+
+ let mut change = ConfigChange::default();
+
+ change.change_client_config(serde_json::json!({
+ "procMacro" : {
+ "server": "./server"
+ }}));
+
+ (config, _, _) = config.apply_change(change);
+
assert_eq!(
config.proc_macro_srv(),
Some(AbsPathBuf::try_from(project_root().join("./server")).unwrap())
@@ -3155,12 +3479,16 @@ mod tests {
Default::default(),
vec![],
None,
+ None,
);
- config
- .update(serde_json::json!({
- "rust": { "analyzerTargetDir": null }
- }))
- .unwrap();
+
+ let mut change = ConfigChange::default();
+
+ change.change_client_config(serde_json::json!({
+ "rust" : { "analyzerTargetDir" : null }
+ }));
+
+ (config, _, _) = config.apply_change(change);
assert_eq!(config.cargo_targetDir(), &None);
assert!(
matches!(config.flycheck(), FlycheckConfig::CargoCommand { options, .. } if options.target_dir.is_none())
@@ -3174,12 +3502,16 @@ mod tests {
Default::default(),
vec![],
None,
+ None,
);
- config
- .update(serde_json::json!({
- "rust": { "analyzerTargetDir": true }
- }))
- .unwrap();
+
+ let mut change = ConfigChange::default();
+ change.change_client_config(serde_json::json!({
+ "rust" : { "analyzerTargetDir" : true }
+ }));
+
+ (config, _, _) = config.apply_change(change);
+
assert_eq!(config.cargo_targetDir(), &Some(TargetDirectory::UseSubdirectory(true)));
assert!(
matches!(config.flycheck(), FlycheckConfig::CargoCommand { options, .. } if options.target_dir == Some(Utf8PathBuf::from("target/rust-analyzer")))
@@ -3193,12 +3525,16 @@ mod tests {
Default::default(),
vec![],
None,
+ None,
);
- config
- .update(serde_json::json!({
- "rust": { "analyzerTargetDir": "other_folder" }
- }))
- .unwrap();
+
+ let mut change = ConfigChange::default();
+ change.change_client_config(serde_json::json!({
+ "rust" : { "analyzerTargetDir" : "other_folder" }
+ }));
+
+ (config, _, _) = config.apply_change(change);
+
assert_eq!(
config.cargo_targetDir(),
&Some(TargetDirectory::Directory(Utf8PathBuf::from("other_folder")))
@@ -3207,4 +3543,95 @@ mod tests {
matches!(config.flycheck(), FlycheckConfig::CargoCommand { options, .. } if options.target_dir == Some(Utf8PathBuf::from("other_folder")))
);
}
+
+ #[test]
+ fn toml_unknown_key() {
+ let config = Config::new(
+ AbsPathBuf::try_from(project_root()).unwrap(),
+ Default::default(),
+ vec![],
+ None,
+ None,
+ );
+
+ let mut change = ConfigChange::default();
+
+ change.change_root_ratoml(Some(
+ toml::toml! {
+ [cargo.cfgs]
+ these = "these"
+ should = "should"
+ be = "be"
+ valid = "valid"
+
+ [invalid.config]
+ err = "error"
+
+ [cargo]
+ target = "ok"
+
+ // FIXME: This should be an error
+ [cargo.sysroot]
+ non-table = "expected"
+ }
+ .to_string()
+ .into(),
+ ));
+
+ let (config, e, _) = config.apply_change(change);
+ expect_test::expect![[r#"
+ ConfigErrors(
+ [
+ Toml {
+ config_key: "invalid/config/err",
+ error: Error {
+ inner: Error {
+ inner: TomlError {
+ message: "unexpected field",
+ raw: None,
+ keys: [],
+ span: None,
+ },
+ },
+ },
+ },
+ ],
+ )
+ "#]]
+ .assert_debug_eq(&e);
+ let mut change = ConfigChange::default();
+
+ change.change_user_config(Some(
+ toml::toml! {
+ [cargo.cfgs]
+ these = "these"
+ should = "should"
+ be = "be"
+ valid = "valid"
+ }
+ .to_string()
+ .into(),
+ ));
+ let (_, e, _) = config.apply_change(change);
+ expect_test::expect![[r#"
+ ConfigErrors(
+ [
+ Toml {
+ config_key: "invalid/config/err",
+ error: Error {
+ inner: Error {
+ inner: TomlError {
+ message: "unexpected field",
+ raw: None,
+ keys: [],
+ span: None,
+ },
+ },
+ },
+ },
+ ],
+ )
+ "#]]
+ .assert_debug_eq(&e);
+ }
}
diff --git a/crates/rust-analyzer/src/diagnostics.rs b/crates/rust-analyzer/src/diagnostics.rs
index 65a9a49149..b23e7b7e98 100644
--- a/crates/rust-analyzer/src/diagnostics.rs
+++ b/crates/rust-analyzer/src/diagnostics.rs
@@ -8,6 +8,7 @@ use ide_db::FxHashMap;
use itertools::Itertools;
use nohash_hasher::{IntMap, IntSet};
use rustc_hash::FxHashSet;
+use stdx::iter_eq_by;
use triomphe::Arc;
use crate::{global_state::GlobalStateSnapshot, lsp, lsp_ext};
@@ -22,14 +23,21 @@ pub struct DiagnosticsMapConfig {
pub check_ignore: FxHashSet<String>,
}
+pub(crate) type DiagnosticsGeneration = usize;
+
#[derive(Debug, Default, Clone)]
pub(crate) struct DiagnosticCollection {
// FIXME: should be IntMap<FileId, Vec<ra_id::Diagnostic>>
- pub(crate) native: IntMap<FileId, Vec<lsp_types::Diagnostic>>,
+ pub(crate) native: IntMap<FileId, (DiagnosticsGeneration, Vec<lsp_types::Diagnostic>)>,
// FIXME: should be Vec<flycheck::Diagnostic>
pub(crate) check: IntMap<usize, IntMap<FileId, Vec<lsp_types::Diagnostic>>>,
pub(crate) check_fixes: CheckFixes,
changes: IntSet<FileId>,
+ /// Counter for supplying a new generation number for diagnostics.
+ /// This is used to keep track of when to clear the diagnostics for a given file as we compute
+ /// diagnostics on multiple worker threads simultaneously which may result in multiple diagnostics
+ /// updates for the same file in a single generation update (due to macros affecting multiple files).
+ generation: DiagnosticsGeneration,
}
#[derive(Debug, Clone)]
@@ -82,21 +90,31 @@ impl DiagnosticCollection {
pub(crate) fn set_native_diagnostics(
&mut self,
+ generation: DiagnosticsGeneration,
file_id: FileId,
- diagnostics: Vec<lsp_types::Diagnostic>,
+ mut diagnostics: Vec<lsp_types::Diagnostic>,
) {
- if let Some(existing_diagnostics) = self.native.get(&file_id) {
+ diagnostics.sort_by_key(|it| (it.range.start, it.range.end));
+ if let Some((old_gen, existing_diagnostics)) = self.native.get_mut(&file_id) {
if existing_diagnostics.len() == diagnostics.len()
- && diagnostics
- .iter()
- .zip(existing_diagnostics)
- .all(|(new, existing)| are_diagnostics_equal(new, existing))
+ && iter_eq_by(&diagnostics, &*existing_diagnostics, |new, existing| {
+ are_diagnostics_equal(new, existing)
+ })
{
+ // don't signal an update if the diagnostics are the same
return;
}
+ if *old_gen < generation || generation == 0 {
+ self.native.insert(file_id, (generation, diagnostics));
+ } else {
+ existing_diagnostics.extend(diagnostics);
+ // FIXME: Doing the merge step of a merge sort here would be a bit more performant
+ // but eh
+ existing_diagnostics.sort_by_key(|it| (it.range.start, it.range.end))
+ }
+ } else {
+ self.native.insert(file_id, (generation, diagnostics));
}
-
- self.native.insert(file_id, diagnostics);
self.changes.insert(file_id);
}
@@ -104,7 +122,7 @@ impl DiagnosticCollection {
&self,
file_id: FileId,
) -> impl Iterator<Item = &lsp_types::Diagnostic> {
- let native = self.native.get(&file_id).into_iter().flatten();
+ let native = self.native.get(&file_id).into_iter().flat_map(|(_, d)| d);
let check = self.check.values().filter_map(move |it| it.get(&file_id)).flatten();
native.chain(check)
}
@@ -115,6 +133,11 @@ impl DiagnosticCollection {
}
Some(mem::take(&mut self.changes))
}
+
+ pub(crate) fn next_generation(&mut self) -> usize {
+ self.generation += 1;
+ self.generation
+ }
}
fn are_diagnostics_equal(left: &lsp_types::Diagnostic, right: &lsp_types::Diagnostic) -> bool {
@@ -126,9 +149,10 @@ fn are_diagnostics_equal(left: &lsp_types::Diagnostic, right: &lsp_types::Diagno
pub(crate) fn fetch_native_diagnostics(
snapshot: GlobalStateSnapshot,
- subscriptions: Vec<FileId>,
+ subscriptions: std::sync::Arc<[FileId]>,
+ slice: std::ops::Range<usize>,
) -> Vec<(FileId, Vec<lsp_types::Diagnostic>)> {
- let _p = tracing::span!(tracing::Level::INFO, "fetch_native_diagnostics").entered();
+ let _p = tracing::info_span!("fetch_native_diagnostics").entered();
let _ctx = stdx::panic_context::enter("fetch_native_diagnostics".to_owned());
let convert_diagnostic =
@@ -149,12 +173,12 @@ pub(crate) fn fetch_native_diagnostics(
// the diagnostics produced may point to different files not requested by the concrete request,
// put those into here and filter later
let mut odd_ones = Vec::new();
- let mut diagnostics = subscriptions
+ let mut diagnostics = subscriptions[slice]
.iter()
.copied()
.filter_map(|file_id| {
let line_index = snapshot.file_line_index(file_id).ok()?;
- let source_root = snapshot.analysis.source_root(file_id).ok()?;
+ let source_root = snapshot.analysis.source_root_id(file_id).ok()?;
let diagnostics = snapshot
.analysis
diff --git a/crates/rust-analyzer/src/diagnostics/to_proto.rs b/crates/rust-analyzer/src/diagnostics/to_proto.rs
index 3d3f944019..4832e8cab4 100644
--- a/crates/rust-analyzer/src/diagnostics/to_proto.rs
+++ b/crates/rust-analyzer/src/diagnostics/to_proto.rs
@@ -547,6 +547,7 @@ mod tests {
ClientCapabilities::default(),
Vec::new(),
None,
+ None,
),
);
let snap = state.snapshot();
diff --git a/crates/rust-analyzer/src/dispatch.rs b/crates/rust-analyzer/src/dispatch.rs
index cf3b8d331d..ebdc196a65 100644
--- a/crates/rust-analyzer/src/dispatch.rs
+++ b/crates/rust-analyzer/src/dispatch.rs
@@ -52,7 +52,8 @@ impl RequestDispatcher<'_> {
Some(it) => it,
None => return self,
};
- let _guard = tracing::span!(tracing::Level::INFO, "request", method = ?req.method, "request_id" = ?req.id).entered();
+ let _guard =
+ tracing::info_span!("request", method = ?req.method, "request_id" = ?req.id).entered();
tracing::debug!(?params);
let result = {
let _pctx = stdx::panic_context::enter(panic_context);
@@ -79,7 +80,8 @@ impl RequestDispatcher<'_> {
Some(it) => it,
None => return self,
};
- let _guard = tracing::span!(tracing::Level::INFO, "request", method = ?req.method, "request_id" = ?req.id).entered();
+ let _guard =
+ tracing::info_span!("request", method = ?req.method, "request_id" = ?req.id).entered();
tracing::debug!(?params);
let global_state_snapshot = self.global_state.snapshot();
@@ -162,7 +164,8 @@ impl RequestDispatcher<'_> {
Some(it) => it,
None => return self,
};
- let _guard = tracing::span!(tracing::Level::INFO, "request", method = ?req.method, "request_id" = ?req.id).entered();
+ let _guard =
+ tracing::info_span!("request", method = ?req.method, "request_id" = ?req.id).entered();
tracing::debug!(?params);
let world = self.global_state.snapshot();
@@ -297,8 +300,7 @@ impl NotificationDispatcher<'_> {
None => return Ok(self),
};
- let _guard =
- tracing::span!(tracing::Level::INFO, "notification", method = ?not.method).entered();
+ let _guard = tracing::info_span!("notification", method = ?not.method).entered();
let params = match not.extract::<N::Params>(N::METHOD) {
Ok(it) => it,
diff --git a/crates/rust-analyzer/src/global_state.rs b/crates/rust-analyzer/src/global_state.rs
index f64e66183d..717d8a632c 100644
--- a/crates/rust-analyzer/src/global_state.rs
+++ b/crates/rust-analyzer/src/global_state.rs
@@ -3,13 +3,13 @@
//!
//! Each tick provides an immutable snapshot of the state as `WorldSnapshot`.
-use std::time::Instant;
+use std::{ops::Not as _, time::Instant};
use crossbeam_channel::{unbounded, Receiver, Sender};
use flycheck::FlycheckHandle;
use hir::ChangeWithProcMacros;
use ide::{Analysis, AnalysisHost, Cancellable, FileId, SourceRootId};
-use ide_db::base_db::{CrateId, ProcMacroPaths};
+use ide_db::base_db::{CrateId, ProcMacroPaths, SourceDatabaseExt};
use load_cargo::SourceRootConfig;
use lsp_types::{SemanticTokens, Url};
use nohash_hasher::IntMap;
@@ -18,25 +18,26 @@ use parking_lot::{
RwLockWriteGuard,
};
use proc_macro_api::ProcMacroServer;
-use project_model::{
- CargoWorkspace, ManifestPath, ProjectWorkspace, ProjectWorkspaceKind, Target,
- WorkspaceBuildScripts,
-};
+use project_model::{ManifestPath, ProjectWorkspace, ProjectWorkspaceKind, WorkspaceBuildScripts};
use rustc_hash::{FxHashMap, FxHashSet};
use tracing::{span, Level};
use triomphe::Arc;
-use vfs::{AnchoredPathBuf, Vfs};
+use vfs::{AnchoredPathBuf, ChangeKind, Vfs};
use crate::{
- config::{Config, ConfigError},
+ config::{Config, ConfigChange, ConfigErrors},
diagnostics::{CheckFixes, DiagnosticCollection},
line_index::{LineEndings, LineIndex},
- lsp::{from_proto, to_proto::url_from_abs_path},
+ lsp::{
+ from_proto::{self},
+ to_proto::url_from_abs_path,
+ },
lsp_ext,
main_loop::Task,
mem_docs::MemDocs,
op_queue::OpQueue,
reload,
+ target_spec::{CargoTargetSpec, ProjectJsonTargetSpec, TargetSpec},
task_pool::{TaskPool, TaskQueue},
};
@@ -65,13 +66,13 @@ pub(crate) struct GlobalState {
pub(crate) fmt_pool: Handle<TaskPool<Task>, Receiver<Task>>,
pub(crate) config: Arc<Config>,
- pub(crate) config_errors: Option<ConfigError>,
+ pub(crate) config_errors: Option<ConfigErrors>,
pub(crate) analysis_host: AnalysisHost,
pub(crate) diagnostics: DiagnosticCollection,
pub(crate) mem_docs: MemDocs,
pub(crate) source_root_config: SourceRootConfig,
/// A mapping that maps a local source root's `SourceRootId` to it parent's `SourceRootId`, if it has one.
- pub(crate) local_roots_parent_map: FxHashMap<SourceRootId, SourceRootId>,
+ pub(crate) local_roots_parent_map: Arc<FxHashMap<SourceRootId, SourceRootId>>,
pub(crate) semantic_tokens_cache: Arc<Mutex<FxHashMap<Url, SemanticTokens>>>,
// status
@@ -160,7 +161,9 @@ pub(crate) struct GlobalStateSnapshot {
pub(crate) semantic_tokens_cache: Arc<Mutex<FxHashMap<Url, SemanticTokens>>>,
vfs: Arc<RwLock<(vfs::Vfs, IntMap<FileId, LineEndings>)>>,
pub(crate) workspaces: Arc<Vec<ProjectWorkspace>>,
- // used to signal semantic highlighting to fall back to syntax based highlighting until proc-macros have been loaded
+ // used to signal semantic highlighting to fall back to syntax based highlighting until
+ // proc-macros have been loaded
+ // FIXME: Can we derive this from somewhere else?
pub(crate) proc_macros_loaded: bool,
pub(crate) flycheck: Arc<[FlycheckHandle]>,
}
@@ -213,7 +216,7 @@ impl GlobalState {
shutdown_requested: false,
last_reported_status: None,
source_root_config: SourceRootConfig::default(),
- local_roots_parent_map: FxHashMap::default(),
+ local_roots_parent_map: Arc::new(FxHashMap::default()),
config_errors: Default::default(),
proc_macro_clients: Arc::from_iter([]),
@@ -254,6 +257,14 @@ impl GlobalState {
pub(crate) fn process_changes(&mut self) -> bool {
let _p = span!(Level::INFO, "GlobalState::process_changes").entered();
+
+ // We cannot directly resolve a change in a ratoml file to a format
+ // that can be used by the config module because config talks
+ // in `SourceRootId`s instead of `FileId`s and `FileId` -> `SourceRootId`
+ // mapping is not ready until `AnalysisHost::apply_changes` has been called.
+ let mut modified_ratoml_files: FxHashMap<FileId, (ChangeKind, vfs::VfsPath)> =
+ FxHashMap::default();
+
let (change, modified_rust_files, workspace_structure_change) = {
let mut change = ChangeWithProcMacros::new();
let mut guard = self.vfs.write();
@@ -273,6 +284,11 @@ impl GlobalState {
let mut modified_rust_files = vec![];
for file in changed_files.into_values() {
let vfs_path = vfs.file_path(file.file_id);
+ if let Some(("rust-analyzer", Some("toml"))) = vfs_path.name_and_extension() {
+ // Remember ids to use them after `apply_changes`
+ modified_ratoml_files.insert(file.file_id, (file.kind(), vfs_path.clone()));
+ }
+
if let Some(path) = vfs_path.as_path() {
has_structure_changes |= file.is_created_or_deleted();
@@ -310,12 +326,15 @@ impl GlobalState {
bytes.push((file.file_id, text));
}
let (vfs, line_endings_map) = &mut *RwLockUpgradableReadGuard::upgrade(guard);
- bytes.into_iter().for_each(|(file_id, text)| match text {
- None => change.change_file(file_id, None),
- Some((text, line_endings)) => {
- line_endings_map.insert(file_id, line_endings);
- change.change_file(file_id, Some(text));
- }
+ bytes.into_iter().for_each(|(file_id, text)| {
+ let text = match text {
+ None => None,
+ Some((text, line_endings)) => {
+ line_endings_map.insert(file_id, line_endings);
+ Some(text)
+ }
+ };
+ change.change_file(file_id, text);
});
if has_structure_changes {
let roots = self.source_root_config.partition(vfs);
@@ -326,6 +345,63 @@ impl GlobalState {
let _p = span!(Level::INFO, "GlobalState::process_changes/apply_change").entered();
self.analysis_host.apply_change(change);
+ if !modified_ratoml_files.is_empty()
+ || !self.config.same_source_root_parent_map(&self.local_roots_parent_map)
+ {
+ let config_change = {
+ let user_config_path = self.config.user_config_path();
+ let root_ratoml_path = self.config.root_ratoml_path();
+ let mut change = ConfigChange::default();
+ let db = self.analysis_host.raw_database();
+
+ for (file_id, (_change_kind, vfs_path)) in modified_ratoml_files {
+ if vfs_path == *user_config_path {
+ change.change_user_config(Some(db.file_text(file_id)));
+ continue;
+ }
+
+ if vfs_path == *root_ratoml_path {
+ change.change_root_ratoml(Some(db.file_text(file_id)));
+ continue;
+ }
+
+ // If change has been made to a ratoml file that
+ // belongs to a non-local source root, we will ignore it.
+ // As it doesn't make sense a users to use external config files.
+ let sr_id = db.file_source_root(file_id);
+ let sr = db.source_root(sr_id);
+ if !sr.is_library {
+ if let Some((old_path, old_text)) = change.change_ratoml(
+ sr_id,
+ vfs_path.clone(),
+ Some(db.file_text(file_id)),
+ ) {
+ // SourceRoot has more than 1 RATOML files. In this case lexicographically smaller wins.
+ if old_path < vfs_path {
+ span!(Level::ERROR, "Two `rust-analyzer.toml` files were found inside the same crate. {vfs_path} has no effect.");
+ // Put the old one back in.
+ change.change_ratoml(sr_id, old_path, old_text);
+ }
+ }
+ } else {
+ // Mapping to a SourceRoot should always end up in `Ok`
+ span!(Level::ERROR, "Mapping to SourceRootId failed.");
+ }
+ }
+ change.change_source_root_parent_map(self.local_roots_parent_map.clone());
+ change
+ };
+
+ let (config, e, should_update) = self.config.apply_change(config_change);
+ self.config_errors = e.is_empty().not().then_some(e);
+
+ if should_update {
+ self.update_configuration(config);
+ } else {
+ // No global or client level config was changed. So we can just naively replace config.
+ self.config = Arc::new(config);
+ }
+ }
{
if !matches!(&workspace_structure_change, Some((.., true))) {
@@ -478,21 +554,52 @@ impl GlobalStateSnapshot {
self.vfs_read().file_path(file_id).clone()
}
- pub(crate) fn cargo_target_for_crate_root(
- &self,
- crate_id: CrateId,
- ) -> Option<(&CargoWorkspace, Target)> {
+ pub(crate) fn target_spec_for_crate(&self, crate_id: CrateId) -> Option<TargetSpec> {
let file_id = self.analysis.crate_root(crate_id).ok()?;
let path = self.vfs_read().file_path(file_id).clone();
let path = path.as_path()?;
- self.workspaces.iter().find_map(|ws| match &ws.kind {
- ProjectWorkspaceKind::Cargo { cargo, .. }
- | ProjectWorkspaceKind::DetachedFile { cargo: Some((cargo, _)), .. } => {
- cargo.target_by_root(path).map(|it| (cargo, it))
- }
- ProjectWorkspaceKind::Json { .. } => None,
- ProjectWorkspaceKind::DetachedFile { .. } => None,
- })
+
+ for workspace in self.workspaces.iter() {
+ match &workspace.kind {
+ ProjectWorkspaceKind::Cargo { cargo, .. }
+ | ProjectWorkspaceKind::DetachedFile { cargo: Some((cargo, _)), .. } => {
+ let Some(target_idx) = cargo.target_by_root(path) else {
+ continue;
+ };
+
+ let target_data = &cargo[target_idx];
+ let package_data = &cargo[target_data.package];
+
+ return Some(TargetSpec::Cargo(CargoTargetSpec {
+ workspace_root: cargo.workspace_root().to_path_buf(),
+ cargo_toml: package_data.manifest.clone(),
+ crate_id,
+ package: cargo.package_flag(package_data),
+ target: target_data.name.clone(),
+ target_kind: target_data.kind,
+ required_features: target_data.required_features.clone(),
+ features: package_data.features.keys().cloned().collect(),
+ }));
+ }
+ ProjectWorkspaceKind::Json(project) => {
+ let Some(krate) = project.crate_by_root(path) else {
+ continue;
+ };
+ let Some(build) = krate.build else {
+ continue;
+ };
+
+ return Some(TargetSpec::ProjectJson(ProjectJsonTargetSpec {
+ label: build.label,
+ target_kind: build.target_kind,
+ shell_runnables: project.runnables().to_owned(),
+ }));
+ }
+ ProjectWorkspaceKind::DetachedFile { .. } => {}
+ };
+ }
+
+ None
}
pub(crate) fn file_exists(&self, file_id: FileId) -> bool {
diff --git a/crates/rust-analyzer/src/handlers/notification.rs b/crates/rust-analyzer/src/handlers/notification.rs
index 9d30063ccc..2dbc297ea6 100644
--- a/crates/rust-analyzer/src/handlers/notification.rs
+++ b/crates/rust-analyzer/src/handlers/notification.rs
@@ -1,7 +1,7 @@
//! This module is responsible for implementing handlers for Language Server
//! Protocol. This module specifically handles notifications.
-use std::ops::Deref;
+use std::ops::{Deref, Not as _};
use itertools::Itertools;
use lsp_types::{
@@ -13,7 +13,7 @@ use triomphe::Arc;
use vfs::{AbsPathBuf, ChangeKind, VfsPath};
use crate::{
- config::Config,
+ config::{Config, ConfigChange},
global_state::GlobalState,
lsp::{from_proto, utils::apply_document_changes},
lsp_ext::{self, RunFlycheckParams},
@@ -54,7 +54,7 @@ pub(crate) fn handle_did_open_text_document(
state: &mut GlobalState,
params: DidOpenTextDocumentParams,
) -> anyhow::Result<()> {
- let _p = tracing::span!(tracing::Level::INFO, "handle_did_open_text_document").entered();
+ let _p = tracing::info_span!("handle_did_open_text_document").entered();
if let Ok(path) = from_proto::vfs_path(&params.text_document.uri) {
let already_exists = state
@@ -71,6 +71,7 @@ pub(crate) fn handle_did_open_text_document(
tracing::error!("duplicate DidOpenTextDocument: {}", path);
}
+ tracing::info!("New file content set {:?}", params.text_document.text);
state.vfs.write().0.set_file_contents(path, Some(params.text_document.text.into_bytes()));
if state.config.notifications().unindexed_project {
tracing::debug!("queuing task");
@@ -87,7 +88,7 @@ pub(crate) fn handle_did_change_text_document(
state: &mut GlobalState,
params: DidChangeTextDocumentParams,
) -> anyhow::Result<()> {
- let _p = tracing::span!(tracing::Level::INFO, "handle_did_change_text_document").entered();
+ let _p = tracing::info_span!("handle_did_change_text_document").entered();
if let Ok(path) = from_proto::vfs_path(&params.text_document.uri) {
let Some(DocumentData { version, data }) = state.mem_docs.get_mut(&path) else {
@@ -116,7 +117,7 @@ pub(crate) fn handle_did_close_text_document(
state: &mut GlobalState,
params: DidCloseTextDocumentParams,
) -> anyhow::Result<()> {
- let _p = tracing::span!(tracing::Level::INFO, "handle_did_close_text_document").entered();
+ let _p = tracing::info_span!("handle_did_close_text_document").entered();
if let Ok(path) = from_proto::vfs_path(&params.text_document.uri) {
if state.mem_docs.remove(&path).is_err() {
@@ -196,10 +197,14 @@ pub(crate) fn handle_did_change_configuration(
}
(None, Some(mut configs)) => {
if let Some(json) = configs.get_mut(0) {
- // Note that json can be null according to the spec if the client can't
- // provide a configuration. This is handled in Config::update below.
- let mut config = Config::clone(&*this.config);
- this.config_errors = config.update(json.take()).err();
+ let config = Config::clone(&*this.config);
+ let mut change = ConfigChange::default();
+ change.change_client_config(json.take());
+
+ let (config, e, _) = config.apply_change(change);
+ this.config_errors = e.is_empty().not().then_some(e);
+
+ // Client config changes neccesitates .update_config method to be called.
this.update_configuration(config);
}
}
@@ -254,7 +259,7 @@ pub(crate) fn handle_did_change_watched_files(
}
fn run_flycheck(state: &mut GlobalState, vfs_path: VfsPath) -> bool {
- let _p = tracing::span!(tracing::Level::INFO, "run_flycheck").entered();
+ let _p = tracing::info_span!("run_flycheck").entered();
let file_id = state.vfs.read().0.file_id(&vfs_path);
if let Some(file_id) = file_id {
@@ -349,13 +354,13 @@ fn run_flycheck(state: &mut GlobalState, vfs_path: VfsPath) -> bool {
}
pub(crate) fn handle_cancel_flycheck(state: &mut GlobalState, _: ()) -> anyhow::Result<()> {
- let _p = tracing::span!(tracing::Level::INFO, "handle_cancel_flycheck").entered();
+ let _p = tracing::info_span!("handle_cancel_flycheck").entered();
state.flycheck.iter().for_each(|flycheck| flycheck.cancel());
Ok(())
}
pub(crate) fn handle_clear_flycheck(state: &mut GlobalState, _: ()) -> anyhow::Result<()> {
- let _p = tracing::span!(tracing::Level::INFO, "handle_clear_flycheck").entered();
+ let _p = tracing::info_span!("handle_clear_flycheck").entered();
state.diagnostics.clear_check_all();
Ok(())
}
@@ -364,7 +369,7 @@ pub(crate) fn handle_run_flycheck(
state: &mut GlobalState,
params: RunFlycheckParams,
) -> anyhow::Result<()> {
- let _p = tracing::span!(tracing::Level::INFO, "handle_run_flycheck").entered();
+ let _p = tracing::info_span!("handle_run_flycheck").entered();
if let Some(text_document) = params.text_document {
if let Ok(vfs_path) = from_proto::vfs_path(&text_document.uri) {
if run_flycheck(state, vfs_path) {
diff --git a/crates/rust-analyzer/src/handlers/request.rs b/crates/rust-analyzer/src/handlers/request.rs
index 1e24bf3aae..8e39b15da3 100644
--- a/crates/rust-analyzer/src/handlers/request.rs
+++ b/crates/rust-analyzer/src/handlers/request.rs
@@ -35,13 +35,13 @@ use triomphe::Arc;
use vfs::{AbsPath, AbsPathBuf, FileId, VfsPath};
use crate::{
- cargo_target_spec::CargoTargetSpec,
config::{Config, RustfmtConfig, WorkspaceSymbolConfig},
diff::diff,
global_state::{GlobalState, GlobalStateSnapshot},
hack_recover_crate_name,
line_index::LineEndings,
lsp::{
+ ext::InternalTestingFetchConfigParams,
from_proto, to_proto,
utils::{all_edits_are_disjoint, invalid_params_error},
LspError,
@@ -50,6 +50,7 @@ use crate::{
self, CrateInfoResult, ExternalDocsPair, ExternalDocsResponse, FetchDependencyListParams,
FetchDependencyListResult, PositionOrRange, ViewCrateGraphParams, WorkspaceSymbolParams,
},
+ target_spec::TargetSpec,
};
pub(crate) fn handle_workspace_reload(state: &mut GlobalState, _: ()) -> anyhow::Result<()> {
@@ -72,7 +73,7 @@ pub(crate) fn handle_analyzer_status(
snap: GlobalStateSnapshot,
params: lsp_ext::AnalyzerStatusParams,
) -> anyhow::Result<String> {
- let _p = tracing::span!(tracing::Level::INFO, "handle_analyzer_status").entered();
+ let _p = tracing::info_span!("handle_analyzer_status").entered();
let mut buf = String::new();
@@ -113,7 +114,7 @@ pub(crate) fn handle_analyzer_status(
}
pub(crate) fn handle_memory_usage(state: &mut GlobalState, _: ()) -> anyhow::Result<String> {
- let _p = tracing::span!(tracing::Level::INFO, "handle_memory_usage").entered();
+ let _p = tracing::info_span!("handle_memory_usage").entered();
let mem = state.analysis_host.per_query_memory_usage();
let mut out = String::new();
@@ -134,7 +135,7 @@ pub(crate) fn handle_syntax_tree(
snap: GlobalStateSnapshot,
params: lsp_ext::SyntaxTreeParams,
) -> anyhow::Result<String> {
- let _p = tracing::span!(tracing::Level::INFO, "handle_syntax_tree").entered();
+ let _p = tracing::info_span!("handle_syntax_tree").entered();
let id = from_proto::file_id(&snap, &params.text_document.uri)?;
let line_index = snap.file_line_index(id)?;
let text_range = params.range.and_then(|r| from_proto::text_range(&line_index, r).ok());
@@ -146,7 +147,7 @@ pub(crate) fn handle_view_hir(
snap: GlobalStateSnapshot,
params: lsp_types::TextDocumentPositionParams,
) -> anyhow::Result<String> {
- let _p = tracing::span!(tracing::Level::INFO, "handle_view_hir").entered();
+ let _p = tracing::info_span!("handle_view_hir").entered();
let position = from_proto::file_position(&snap, params)?;
let res = snap.analysis.view_hir(position)?;
Ok(res)
@@ -156,7 +157,7 @@ pub(crate) fn handle_view_mir(
snap: GlobalStateSnapshot,
params: lsp_types::TextDocumentPositionParams,
) -> anyhow::Result<String> {
- let _p = tracing::span!(tracing::Level::INFO, "handle_view_mir").entered();
+ let _p = tracing::info_span!("handle_view_mir").entered();
let position = from_proto::file_position(&snap, params)?;
let res = snap.analysis.view_mir(position)?;
Ok(res)
@@ -166,7 +167,7 @@ pub(crate) fn handle_interpret_function(
snap: GlobalStateSnapshot,
params: lsp_types::TextDocumentPositionParams,
) -> anyhow::Result<String> {
- let _p = tracing::span!(tracing::Level::INFO, "handle_interpret_function").entered();
+ let _p = tracing::info_span!("handle_interpret_function").entered();
let position = from_proto::file_position(&snap, params)?;
let res = snap.analysis.interpret_function(position)?;
Ok(res)
@@ -184,7 +185,7 @@ pub(crate) fn handle_view_item_tree(
snap: GlobalStateSnapshot,
params: lsp_ext::ViewItemTreeParams,
) -> anyhow::Result<String> {
- let _p = tracing::span!(tracing::Level::INFO, "handle_view_item_tree").entered();
+ let _p = tracing::info_span!("handle_view_item_tree").entered();
let file_id = from_proto::file_id(&snap, &params.text_document.uri)?;
let res = snap.analysis.view_item_tree(file_id)?;
Ok(res)
@@ -245,7 +246,7 @@ pub(crate) fn handle_discover_test(
snap: GlobalStateSnapshot,
params: lsp_ext::DiscoverTestParams,
) -> anyhow::Result<lsp_ext::DiscoverTestResults> {
- let _p = tracing::span!(tracing::Level::INFO, "handle_discover_test").entered();
+ let _p = tracing::info_span!("handle_discover_test").entered();
let (tests, scope) = match params.test_id {
Some(id) => {
let crate_id = id.split_once("::").map(|it| it.0).unwrap_or(&id);
@@ -276,7 +277,7 @@ pub(crate) fn handle_view_crate_graph(
snap: GlobalStateSnapshot,
params: ViewCrateGraphParams,
) -> anyhow::Result<String> {
- let _p = tracing::span!(tracing::Level::INFO, "handle_view_crate_graph").entered();
+ let _p = tracing::info_span!("handle_view_crate_graph").entered();
let dot = snap.analysis.view_crate_graph(params.full)?.map_err(anyhow::Error::msg)?;
Ok(dot)
}
@@ -285,7 +286,7 @@ pub(crate) fn handle_expand_macro(
snap: GlobalStateSnapshot,
params: lsp_ext::ExpandMacroParams,
) -> anyhow::Result<Option<lsp_ext::ExpandedMacro>> {
- let _p = tracing::span!(tracing::Level::INFO, "handle_expand_macro").entered();
+ let _p = tracing::info_span!("handle_expand_macro").entered();
let file_id = from_proto::file_id(&snap, &params.text_document.uri)?;
let line_index = snap.file_line_index(file_id)?;
let offset = from_proto::offset(&line_index, params.position)?;
@@ -298,7 +299,7 @@ pub(crate) fn handle_selection_range(
snap: GlobalStateSnapshot,
params: lsp_types::SelectionRangeParams,
) -> anyhow::Result<Option<Vec<lsp_types::SelectionRange>>> {
- let _p = tracing::span!(tracing::Level::INFO, "handle_selection_range").entered();
+ let _p = tracing::info_span!("handle_selection_range").entered();
let file_id = from_proto::file_id(&snap, &params.text_document.uri)?;
let line_index = snap.file_line_index(file_id)?;
let res: anyhow::Result<Vec<lsp_types::SelectionRange>> = params
@@ -341,7 +342,7 @@ pub(crate) fn handle_matching_brace(
snap: GlobalStateSnapshot,
params: lsp_ext::MatchingBraceParams,
) -> anyhow::Result<Vec<Position>> {
- let _p = tracing::span!(tracing::Level::INFO, "handle_matching_brace").entered();
+ let _p = tracing::info_span!("handle_matching_brace").entered();
let file_id = from_proto::file_id(&snap, &params.text_document.uri)?;
let line_index = snap.file_line_index(file_id)?;
params
@@ -364,11 +365,10 @@ pub(crate) fn handle_join_lines(
snap: GlobalStateSnapshot,
params: lsp_ext::JoinLinesParams,
) -> anyhow::Result<Vec<lsp_types::TextEdit>> {
- let _p = tracing::span!(tracing::Level::INFO, "handle_join_lines").entered();
+ let _p = tracing::info_span!("handle_join_lines").entered();
let file_id = from_proto::file_id(&snap, &params.text_document.uri)?;
- let source_root = snap.analysis.source_root(file_id)?;
- let config = snap.config.join_lines(Some(source_root));
+ let config = snap.config.join_lines();
let line_index = snap.file_line_index(file_id)?;
let mut res = TextEdit::default();
@@ -390,7 +390,7 @@ pub(crate) fn handle_on_enter(
snap: GlobalStateSnapshot,
params: lsp_types::TextDocumentPositionParams,
) -> anyhow::Result<Option<Vec<lsp_ext::SnippetTextEdit>>> {
- let _p = tracing::span!(tracing::Level::INFO, "handle_on_enter").entered();
+ let _p = tracing::info_span!("handle_on_enter").entered();
let position = from_proto::file_position(&snap, params)?;
let edit = match snap.analysis.on_enter(position)? {
None => return Ok(None),
@@ -405,7 +405,7 @@ pub(crate) fn handle_on_type_formatting(
snap: GlobalStateSnapshot,
params: lsp_types::DocumentOnTypeFormattingParams,
) -> anyhow::Result<Option<Vec<lsp_ext::SnippetTextEdit>>> {
- let _p = tracing::span!(tracing::Level::INFO, "handle_on_type_formatting").entered();
+ let _p = tracing::info_span!("handle_on_type_formatting").entered();
let mut position = from_proto::file_position(&snap, params.text_document_position)?;
let line_index = snap.file_line_index(position.file_id)?;
@@ -446,7 +446,7 @@ pub(crate) fn handle_document_symbol(
snap: GlobalStateSnapshot,
params: lsp_types::DocumentSymbolParams,
) -> anyhow::Result<Option<lsp_types::DocumentSymbolResponse>> {
- let _p = tracing::span!(tracing::Level::INFO, "handle_document_symbol").entered();
+ let _p = tracing::info_span!("handle_document_symbol").entered();
let file_id = from_proto::file_id(&snap, &params.text_document.uri)?;
let line_index = snap.file_line_index(file_id)?;
@@ -535,7 +535,7 @@ pub(crate) fn handle_workspace_symbol(
snap: GlobalStateSnapshot,
params: WorkspaceSymbolParams,
) -> anyhow::Result<Option<lsp_types::WorkspaceSymbolResponse>> {
- let _p = tracing::span!(tracing::Level::INFO, "handle_workspace_symbol").entered();
+ let _p = tracing::info_span!("handle_workspace_symbol").entered();
let config = snap.config.workspace_symbol();
let (all_symbols, libs) = decide_search_scope_and_kind(&params, &config);
@@ -627,7 +627,7 @@ pub(crate) fn handle_will_rename_files(
snap: GlobalStateSnapshot,
params: lsp_types::RenameFilesParams,
) -> anyhow::Result<Option<lsp_types::WorkspaceEdit>> {
- let _p = tracing::span!(tracing::Level::INFO, "handle_will_rename_files").entered();
+ let _p = tracing::info_span!("handle_will_rename_files").entered();
let source_changes: Vec<SourceChange> = params
.files
@@ -689,7 +689,7 @@ pub(crate) fn handle_goto_definition(
snap: GlobalStateSnapshot,
params: lsp_types::GotoDefinitionParams,
) -> anyhow::Result<Option<lsp_types::GotoDefinitionResponse>> {
- let _p = tracing::span!(tracing::Level::INFO, "handle_goto_definition").entered();
+ let _p = tracing::info_span!("handle_goto_definition").entered();
let position = from_proto::file_position(&snap, params.text_document_position_params)?;
let nav_info = match snap.analysis.goto_definition(position)? {
None => return Ok(None),
@@ -704,7 +704,7 @@ pub(crate) fn handle_goto_declaration(
snap: GlobalStateSnapshot,
params: lsp_types::request::GotoDeclarationParams,
) -> anyhow::Result<Option<lsp_types::request::GotoDeclarationResponse>> {
- let _p = tracing::span!(tracing::Level::INFO, "handle_goto_declaration").entered();
+ let _p = tracing::info_span!("handle_goto_declaration").entered();
let position = from_proto::file_position(&snap, params.text_document_position_params.clone())?;
let nav_info = match snap.analysis.goto_declaration(position)? {
None => return handle_goto_definition(snap, params),
@@ -719,7 +719,7 @@ pub(crate) fn handle_goto_implementation(
snap: GlobalStateSnapshot,
params: lsp_types::request::GotoImplementationParams,
) -> anyhow::Result<Option<lsp_types::request::GotoImplementationResponse>> {
- let _p = tracing::span!(tracing::Level::INFO, "handle_goto_implementation").entered();
+ let _p = tracing::info_span!("handle_goto_implementation").entered();
let position = from_proto::file_position(&snap, params.text_document_position_params)?;
let nav_info = match snap.analysis.goto_implementation(position)? {
None => return Ok(None),
@@ -734,7 +734,7 @@ pub(crate) fn handle_goto_type_definition(
snap: GlobalStateSnapshot,
params: lsp_types::request::GotoTypeDefinitionParams,
) -> anyhow::Result<Option<lsp_types::request::GotoTypeDefinitionResponse>> {
- let _p = tracing::span!(tracing::Level::INFO, "handle_goto_type_definition").entered();
+ let _p = tracing::info_span!("handle_goto_type_definition").entered();
let position = from_proto::file_position(&snap, params.text_document_position_params)?;
let nav_info = match snap.analysis.goto_type_definition(position)? {
None => return Ok(None),
@@ -749,7 +749,7 @@ pub(crate) fn handle_parent_module(
snap: GlobalStateSnapshot,
params: lsp_types::TextDocumentPositionParams,
) -> anyhow::Result<Option<lsp_types::GotoDefinitionResponse>> {
- let _p = tracing::span!(tracing::Level::INFO, "handle_parent_module").entered();
+ let _p = tracing::info_span!("handle_parent_module").entered();
if let Ok(file_path) = &params.text_document.uri.to_file_path() {
if file_path.file_name().unwrap_or_default() == "Cargo.toml" {
// search workspaces for parent packages or fallback to workspace root
@@ -790,9 +790,9 @@ pub(crate) fn handle_parent_module(
Some(&crate_id) => crate_id,
None => return Ok(None),
};
- let cargo_spec = match CargoTargetSpec::for_file(&snap, file_id)? {
- Some(it) => it,
- None => return Ok(None),
+ let cargo_spec = match TargetSpec::for_file(&snap, file_id)? {
+ Some(TargetSpec::Cargo(it)) => it,
+ Some(TargetSpec::ProjectJson(_)) | None => return Ok(None),
};
if snap.analysis.crate_root(crate_id)? == file_id {
@@ -819,11 +819,11 @@ pub(crate) fn handle_runnables(
snap: GlobalStateSnapshot,
params: lsp_ext::RunnablesParams,
) -> anyhow::Result<Vec<lsp_ext::Runnable>> {
- let _p = tracing::span!(tracing::Level::INFO, "handle_runnables").entered();
+ let _p = tracing::info_span!("handle_runnables").entered();
let file_id = from_proto::file_id(&snap, &params.text_document.uri)?;
let line_index = snap.file_line_index(file_id)?;
let offset = params.position.and_then(|it| from_proto::offset(&line_index, it).ok());
- let cargo_spec = CargoTargetSpec::for_file(&snap, file_id)?;
+ let target_spec = TargetSpec::for_file(&snap, file_id)?;
let expect_test = match offset {
Some(offset) => {
@@ -840,21 +840,24 @@ pub(crate) fn handle_runnables(
if should_skip_for_offset(&runnable, offset) {
continue;
}
- if should_skip_target(&runnable, cargo_spec.as_ref()) {
+ if should_skip_target(&runnable, target_spec.as_ref()) {
continue;
}
- let mut runnable = to_proto::runnable(&snap, runnable)?;
- if expect_test {
- runnable.label = format!("{} + expect", runnable.label);
- runnable.args.expect_test = Some(true);
+ if let Some(mut runnable) = to_proto::runnable(&snap, runnable)? {
+ if expect_test {
+ if let lsp_ext::RunnableArgs::Cargo(r) = &mut runnable.args {
+ runnable.label = format!("{} + expect", runnable.label);
+ r.expect_test = Some(true);
+ }
+ }
+ res.push(runnable);
}
- res.push(runnable);
}
// Add `cargo check` and `cargo test` for all targets of the whole package
let config = snap.config.runnables();
- match cargo_spec {
- Some(spec) => {
+ match target_spec {
+ Some(TargetSpec::Cargo(spec)) => {
let is_crate_no_std = snap.analysis.is_crate_no_std(spec.crate_id)?;
for cmd in ["check", "run", "test"] {
if cmd == "run" && spec.target_kind != TargetKind::Bin {
@@ -879,7 +882,7 @@ pub(crate) fn handle_runnables(
),
location: None,
kind: lsp_ext::RunnableKind::Cargo,
- args: lsp_ext::CargoRunnable {
+ args: lsp_ext::RunnableArgs::Cargo(lsp_ext::CargoRunnableArgs {
workspace_root: Some(spec.workspace_root.clone().into()),
cwd: Some(cwd.into()),
override_cargo: config.override_cargo.clone(),
@@ -887,17 +890,18 @@ pub(crate) fn handle_runnables(
cargo_extra_args: config.cargo_extra_args.clone(),
executable_args: Vec::new(),
expect_test: None,
- },
+ }),
})
}
}
+ Some(TargetSpec::ProjectJson(_)) => {}
None => {
if !snap.config.linked_or_discovered_projects().is_empty() {
res.push(lsp_ext::Runnable {
label: "cargo check --workspace".to_owned(),
location: None,
kind: lsp_ext::RunnableKind::Cargo,
- args: lsp_ext::CargoRunnable {
+ args: lsp_ext::RunnableArgs::Cargo(lsp_ext::CargoRunnableArgs {
workspace_root: None,
cwd: None,
override_cargo: config.override_cargo,
@@ -905,7 +909,7 @@ pub(crate) fn handle_runnables(
cargo_extra_args: config.cargo_extra_args,
executable_args: Vec::new(),
expect_test: None,
- },
+ }),
});
}
}
@@ -925,13 +929,13 @@ pub(crate) fn handle_related_tests(
snap: GlobalStateSnapshot,
params: lsp_types::TextDocumentPositionParams,
) -> anyhow::Result<Vec<lsp_ext::TestInfo>> {
- let _p = tracing::span!(tracing::Level::INFO, "handle_related_tests").entered();
+ let _p = tracing::info_span!("handle_related_tests").entered();
let position = from_proto::file_position(&snap, params)?;
let tests = snap.analysis.related_tests(position, None)?;
let mut res = Vec::new();
for it in tests {
- if let Ok(runnable) = to_proto::runnable(&snap, it) {
+ if let Ok(Some(runnable)) = to_proto::runnable(&snap, it) {
res.push(lsp_ext::TestInfo { runnable })
}
}
@@ -943,13 +947,13 @@ pub(crate) fn handle_completion(
snap: GlobalStateSnapshot,
lsp_types::CompletionParams { text_document_position, context,.. }: lsp_types::CompletionParams,
) -> anyhow::Result<Option<lsp_types::CompletionResponse>> {
- let _p = tracing::span!(tracing::Level::INFO, "handle_completion").entered();
+ let _p = tracing::info_span!("handle_completion").entered();
let mut position = from_proto::file_position(&snap, text_document_position.clone())?;
let line_index = snap.file_line_index(position.file_id)?;
let completion_trigger_character =
context.and_then(|ctx| ctx.trigger_character).and_then(|s| s.chars().next());
- let source_root = snap.analysis.source_root(position.file_id)?;
+ let source_root = snap.analysis.source_root_id(position.file_id)?;
let completion_config = &snap.config.completion(Some(source_root));
// FIXME: We should fix up the position when retrying the cancelled request instead
position.offset = position.offset.min(line_index.index.len());
@@ -978,7 +982,7 @@ pub(crate) fn handle_completion_resolve(
snap: GlobalStateSnapshot,
mut original_completion: CompletionItem,
) -> anyhow::Result<CompletionItem> {
- let _p = tracing::span!(tracing::Level::INFO, "handle_completion_resolve").entered();
+ let _p = tracing::info_span!("handle_completion_resolve").entered();
if !all_edits_are_disjoint(&original_completion, &[]) {
return Err(invalid_params_error(
@@ -997,7 +1001,7 @@ pub(crate) fn handle_completion_resolve(
let Ok(offset) = from_proto::offset(&line_index, resolve_data.position.position) else {
return Ok(original_completion);
};
- let source_root = snap.analysis.source_root(file_id)?;
+ let source_root = snap.analysis.source_root_id(file_id)?;
let additional_edits = snap
.analysis
@@ -1035,7 +1039,7 @@ pub(crate) fn handle_folding_range(
snap: GlobalStateSnapshot,
params: FoldingRangeParams,
) -> anyhow::Result<Option<Vec<FoldingRange>>> {
- let _p = tracing::span!(tracing::Level::INFO, "handle_folding_range").entered();
+ let _p = tracing::info_span!("handle_folding_range").entered();
let file_id = from_proto::file_id(&snap, &params.text_document.uri)?;
let folds = snap.analysis.folding_ranges(file_id)?;
let text = snap.analysis.file_text(file_id)?;
@@ -1052,7 +1056,7 @@ pub(crate) fn handle_signature_help(
snap: GlobalStateSnapshot,
params: lsp_types::SignatureHelpParams,
) -> anyhow::Result<Option<lsp_types::SignatureHelp>> {
- let _p = tracing::span!(tracing::Level::INFO, "handle_signature_help").entered();
+ let _p = tracing::info_span!("handle_signature_help").entered();
let position = from_proto::file_position(&snap, params.text_document_position_params)?;
let help = match snap.analysis.signature_help(position)? {
Some(it) => it,
@@ -1067,7 +1071,7 @@ pub(crate) fn handle_hover(
snap: GlobalStateSnapshot,
params: lsp_ext::HoverParams,
) -> anyhow::Result<Option<lsp_ext::Hover>> {
- let _p = tracing::span!(tracing::Level::INFO, "handle_hover").entered();
+ let _p = tracing::info_span!("handle_hover").entered();
let range = match params.position {
PositionOrRange::Position(position) => Range::new(position, position),
PositionOrRange::Range(range) => range,
@@ -1105,7 +1109,7 @@ pub(crate) fn handle_prepare_rename(
snap: GlobalStateSnapshot,
params: lsp_types::TextDocumentPositionParams,
) -> anyhow::Result<Option<PrepareRenameResponse>> {
- let _p = tracing::span!(tracing::Level::INFO, "handle_prepare_rename").entered();
+ let _p = tracing::info_span!("handle_prepare_rename").entered();
let position = from_proto::file_position(&snap, params)?;
let change = snap.analysis.prepare_rename(position)?.map_err(to_proto::rename_error)?;
@@ -1119,7 +1123,7 @@ pub(crate) fn handle_rename(
snap: GlobalStateSnapshot,
params: RenameParams,
) -> anyhow::Result<Option<WorkspaceEdit>> {
- let _p = tracing::span!(tracing::Level::INFO, "handle_rename").entered();
+ let _p = tracing::info_span!("handle_rename").entered();
let position = from_proto::file_position(&snap, params.text_document_position)?;
let mut change =
@@ -1154,7 +1158,7 @@ pub(crate) fn handle_references(
snap: GlobalStateSnapshot,
params: lsp_types::ReferenceParams,
) -> anyhow::Result<Option<Vec<Location>>> {
- let _p = tracing::span!(tracing::Level::INFO, "handle_references").entered();
+ let _p = tracing::info_span!("handle_references").entered();
let position = from_proto::file_position(&snap, params.text_document_position)?;
let exclude_imports = snap.config.find_all_refs_exclude_imports();
@@ -1199,7 +1203,7 @@ pub(crate) fn handle_formatting(
snap: GlobalStateSnapshot,
params: lsp_types::DocumentFormattingParams,
) -> anyhow::Result<Option<Vec<lsp_types::TextEdit>>> {
- let _p = tracing::span!(tracing::Level::INFO, "handle_formatting").entered();
+ let _p = tracing::info_span!("handle_formatting").entered();
run_rustfmt(&snap, params.text_document, None)
}
@@ -1208,7 +1212,7 @@ pub(crate) fn handle_range_formatting(
snap: GlobalStateSnapshot,
params: lsp_types::DocumentRangeFormattingParams,
) -> anyhow::Result<Option<Vec<lsp_types::TextEdit>>> {
- let _p = tracing::span!(tracing::Level::INFO, "handle_range_formatting").entered();
+ let _p = tracing::info_span!("handle_range_formatting").entered();
run_rustfmt(&snap, params.text_document, Some(params.range))
}
@@ -1217,7 +1221,7 @@ pub(crate) fn handle_code_action(
snap: GlobalStateSnapshot,
params: lsp_types::CodeActionParams,
) -> anyhow::Result<Option<Vec<lsp_ext::CodeAction>>> {
- let _p = tracing::span!(tracing::Level::INFO, "handle_code_action").entered();
+ let _p = tracing::info_span!("handle_code_action").entered();
if !snap.config.code_action_literals() {
// We intentionally don't support command-based actions, as those either
@@ -1229,7 +1233,7 @@ pub(crate) fn handle_code_action(
let file_id = from_proto::file_id(&snap, &params.text_document.uri)?;
let line_index = snap.file_line_index(file_id)?;
let frange = from_proto::file_range(&snap, &params.text_document, params.range)?;
- let source_root = snap.analysis.source_root(file_id)?;
+ let source_root = snap.analysis.source_root_id(file_id)?;
let mut assists_config = snap.config.assist(Some(source_root));
assists_config.allowed = params
@@ -1295,7 +1299,7 @@ pub(crate) fn handle_code_action_resolve(
snap: GlobalStateSnapshot,
mut code_action: lsp_ext::CodeAction,
) -> anyhow::Result<lsp_ext::CodeAction> {
- let _p = tracing::span!(tracing::Level::INFO, "handle_code_action_resolve").entered();
+ let _p = tracing::info_span!("handle_code_action_resolve").entered();
let Some(params) = code_action.data.take() else {
return Err(invalid_params_error("code action without data".to_owned()).into());
};
@@ -1307,7 +1311,7 @@ pub(crate) fn handle_code_action_resolve(
let line_index = snap.file_line_index(file_id)?;
let range = from_proto::text_range(&line_index, params.code_action_params.range)?;
let frange = FileRange { file_id, range };
- let source_root = snap.analysis.source_root(file_id)?;
+ let source_root = snap.analysis.source_root_id(file_id)?;
let mut assists_config = snap.config.assist(Some(source_root));
assists_config.allowed = params
@@ -1388,7 +1392,7 @@ pub(crate) fn handle_code_lens(
snap: GlobalStateSnapshot,
params: lsp_types::CodeLensParams,
) -> anyhow::Result<Option<Vec<CodeLens>>> {
- let _p = tracing::span!(tracing::Level::INFO, "handle_code_lens").entered();
+ let _p = tracing::info_span!("handle_code_lens").entered();
let lens_config = snap.config.lens();
if lens_config.none() {
@@ -1397,14 +1401,14 @@ pub(crate) fn handle_code_lens(
}
let file_id = from_proto::file_id(&snap, &params.text_document.uri)?;
- let cargo_target_spec = CargoTargetSpec::for_file(&snap, file_id)?;
+ let target_spec = TargetSpec::for_file(&snap, file_id)?;
let annotations = snap.analysis.annotations(
&AnnotationConfig {
- binary_target: cargo_target_spec
+ binary_target: target_spec
.map(|spec| {
matches!(
- spec.target_kind,
+ spec.target_kind(),
TargetKind::Bin | TargetKind::Example | TargetKind::Test
)
})
@@ -1457,10 +1461,10 @@ pub(crate) fn handle_document_highlight(
snap: GlobalStateSnapshot,
params: lsp_types::DocumentHighlightParams,
) -> anyhow::Result<Option<Vec<lsp_types::DocumentHighlight>>> {
- let _p = tracing::span!(tracing::Level::INFO, "handle_document_highlight").entered();
+ let _p = tracing::info_span!("handle_document_highlight").entered();
let position = from_proto::file_position(&snap, params.text_document_position_params)?;
let line_index = snap.file_line_index(position.file_id)?;
- let source_root = snap.analysis.source_root(position.file_id)?;
+ let source_root = snap.analysis.source_root_id(position.file_id)?;
let refs = match snap
.analysis
@@ -1483,7 +1487,7 @@ pub(crate) fn handle_ssr(
snap: GlobalStateSnapshot,
params: lsp_ext::SsrParams,
) -> anyhow::Result<lsp_types::WorkspaceEdit> {
- let _p = tracing::span!(tracing::Level::INFO, "handle_ssr").entered();
+ let _p = tracing::info_span!("handle_ssr").entered();
let selections = params
.selections
.iter()
@@ -1503,7 +1507,7 @@ pub(crate) fn handle_inlay_hints(
snap: GlobalStateSnapshot,
params: InlayHintParams,
) -> anyhow::Result<Option<Vec<InlayHint>>> {
- let _p = tracing::span!(tracing::Level::INFO, "handle_inlay_hints").entered();
+ let _p = tracing::info_span!("handle_inlay_hints").entered();
let document_uri = &params.text_document.uri;
let FileRange { file_id, range } = from_proto::file_range(
&snap,
@@ -1511,13 +1515,12 @@ pub(crate) fn handle_inlay_hints(
params.range,
)?;
let line_index = snap.file_line_index(file_id)?;
- let source_root = snap.analysis.source_root(file_id)?;
let range = TextRange::new(
range.start().min(line_index.index.len()),
range.end().min(line_index.index.len()),
);
- let inlay_hints_config = snap.config.inlay_hints(Some(source_root));
+ let inlay_hints_config = snap.config.inlay_hints();
Ok(Some(
snap.analysis
.inlay_hints(&inlay_hints_config, file_id, Some(range))?
@@ -1539,7 +1542,7 @@ pub(crate) fn handle_inlay_hints_resolve(
snap: GlobalStateSnapshot,
mut original_hint: InlayHint,
) -> anyhow::Result<InlayHint> {
- let _p = tracing::span!(tracing::Level::INFO, "handle_inlay_hints_resolve").entered();
+ let _p = tracing::info_span!("handle_inlay_hints_resolve").entered();
let Some(data) = original_hint.data.take() else { return Ok(original_hint) };
let resolve_data: lsp_ext::InlayHintResolveData = serde_json::from_value(data)?;
@@ -1553,9 +1556,8 @@ pub(crate) fn handle_inlay_hints_resolve(
let line_index = snap.file_line_index(file_id)?;
let hint_position = from_proto::offset(&line_index, original_hint.position)?;
- let source_root = snap.analysis.source_root(file_id)?;
- let mut forced_resolve_inlay_hints_config = snap.config.inlay_hints(Some(source_root));
+ let mut forced_resolve_inlay_hints_config = snap.config.inlay_hints();
forced_resolve_inlay_hints_config.fields_to_resolve = InlayFieldsToResolve::empty();
let resolve_hints = snap.analysis.inlay_hints_resolve(
&forced_resolve_inlay_hints_config,
@@ -1590,7 +1592,7 @@ pub(crate) fn handle_call_hierarchy_prepare(
snap: GlobalStateSnapshot,
params: CallHierarchyPrepareParams,
) -> anyhow::Result<Option<Vec<CallHierarchyItem>>> {
- let _p = tracing::span!(tracing::Level::INFO, "handle_call_hierarchy_prepare").entered();
+ let _p = tracing::info_span!("handle_call_hierarchy_prepare").entered();
let position = from_proto::file_position(&snap, params.text_document_position_params)?;
let nav_info = match snap.analysis.call_hierarchy(position)? {
@@ -1612,7 +1614,7 @@ pub(crate) fn handle_call_hierarchy_incoming(
snap: GlobalStateSnapshot,
params: CallHierarchyIncomingCallsParams,
) -> anyhow::Result<Option<Vec<CallHierarchyIncomingCall>>> {
- let _p = tracing::span!(tracing::Level::INFO, "handle_call_hierarchy_incoming").entered();
+ let _p = tracing::info_span!("handle_call_hierarchy_incoming").entered();
let item = params.item;
let doc = TextDocumentIdentifier::new(item.uri);
@@ -1647,7 +1649,7 @@ pub(crate) fn handle_call_hierarchy_outgoing(
snap: GlobalStateSnapshot,
params: CallHierarchyOutgoingCallsParams,
) -> anyhow::Result<Option<Vec<CallHierarchyOutgoingCall>>> {
- let _p = tracing::span!(tracing::Level::INFO, "handle_call_hierarchy_outgoing").entered();
+ let _p = tracing::info_span!("handle_call_hierarchy_outgoing").entered();
let item = params.item;
let doc = TextDocumentIdentifier::new(item.uri);
@@ -1682,14 +1684,13 @@ pub(crate) fn handle_semantic_tokens_full(
snap: GlobalStateSnapshot,
params: SemanticTokensParams,
) -> anyhow::Result<Option<SemanticTokensResult>> {
- let _p = tracing::span!(tracing::Level::INFO, "handle_semantic_tokens_full").entered();
+ let _p = tracing::info_span!("handle_semantic_tokens_full").entered();
let file_id = from_proto::file_id(&snap, &params.text_document.uri)?;
let text = snap.analysis.file_text(file_id)?;
let line_index = snap.file_line_index(file_id)?;
- let source_root = snap.analysis.source_root(file_id)?;
- let mut highlight_config = snap.config.highlighting_config(Some(source_root));
+ let mut highlight_config = snap.config.highlighting_config();
// Avoid flashing a bunch of unresolved references when the proc-macro servers haven't been spawned yet.
highlight_config.syntactic_name_ref_highlighting =
snap.workspaces.is_empty() || !snap.proc_macros_loaded;
@@ -1700,7 +1701,7 @@ pub(crate) fn handle_semantic_tokens_full(
&line_index,
highlights,
snap.config.semantics_tokens_augments_syntax_tokens(),
- snap.config.highlighting_non_standard_tokens(Some(source_root)),
+ snap.config.highlighting_non_standard_tokens(),
);
// Unconditionally cache the tokens
@@ -1713,14 +1714,13 @@ pub(crate) fn handle_semantic_tokens_full_delta(
snap: GlobalStateSnapshot,
params: SemanticTokensDeltaParams,
) -> anyhow::Result<Option<SemanticTokensFullDeltaResult>> {
- let _p = tracing::span!(tracing::Level::INFO, "handle_semantic_tokens_full_delta").entered();
+ let _p = tracing::info_span!("handle_semantic_tokens_full_delta").entered();
let file_id = from_proto::file_id(&snap, &params.text_document.uri)?;
let text = snap.analysis.file_text(file_id)?;
let line_index = snap.file_line_index(file_id)?;
- let source_root = snap.analysis.source_root(file_id)?;
- let mut highlight_config = snap.config.highlighting_config(Some(source_root));
+ let mut highlight_config = snap.config.highlighting_config();
// Avoid flashing a bunch of unresolved references when the proc-macro servers haven't been spawned yet.
highlight_config.syntactic_name_ref_highlighting =
snap.workspaces.is_empty() || !snap.proc_macros_loaded;
@@ -1731,7 +1731,7 @@ pub(crate) fn handle_semantic_tokens_full_delta(
&line_index,
highlights,
snap.config.semantics_tokens_augments_syntax_tokens(),
- snap.config.highlighting_non_standard_tokens(Some(source_root)),
+ snap.config.highlighting_non_standard_tokens(),
);
let cached_tokens = snap.semantic_tokens_cache.lock().remove(&params.text_document.uri);
@@ -1757,14 +1757,13 @@ pub(crate) fn handle_semantic_tokens_range(
snap: GlobalStateSnapshot,
params: SemanticTokensRangeParams,
) -> anyhow::Result<Option<SemanticTokensRangeResult>> {
- let _p = tracing::span!(tracing::Level::INFO, "handle_semantic_tokens_range").entered();
+ let _p = tracing::info_span!("handle_semantic_tokens_range").entered();
let frange = from_proto::file_range(&snap, &params.text_document, params.range)?;
let text = snap.analysis.file_text(frange.file_id)?;
let line_index = snap.file_line_index(frange.file_id)?;
- let source_root = snap.analysis.source_root(frange.file_id)?;
- let mut highlight_config = snap.config.highlighting_config(Some(source_root));
+ let mut highlight_config = snap.config.highlighting_config();
// Avoid flashing a bunch of unresolved references when the proc-macro servers haven't been spawned yet.
highlight_config.syntactic_name_ref_highlighting =
snap.workspaces.is_empty() || !snap.proc_macros_loaded;
@@ -1775,7 +1774,7 @@ pub(crate) fn handle_semantic_tokens_range(
&line_index,
highlights,
snap.config.semantics_tokens_augments_syntax_tokens(),
- snap.config.highlighting_non_standard_tokens(Some(source_root)),
+ snap.config.highlighting_non_standard_tokens(),
);
Ok(Some(semantic_tokens.into()))
}
@@ -1784,7 +1783,7 @@ pub(crate) fn handle_open_docs(
snap: GlobalStateSnapshot,
params: lsp_types::TextDocumentPositionParams,
) -> anyhow::Result<ExternalDocsResponse> {
- let _p = tracing::span!(tracing::Level::INFO, "handle_open_docs").entered();
+ let _p = tracing::info_span!("handle_open_docs").entered();
let position = from_proto::file_position(&snap, params)?;
let ws_and_sysroot = snap.workspaces.iter().find_map(|ws| match &ws.kind {
@@ -1826,12 +1825,12 @@ pub(crate) fn handle_open_cargo_toml(
snap: GlobalStateSnapshot,
params: lsp_ext::OpenCargoTomlParams,
) -> anyhow::Result<Option<lsp_types::GotoDefinitionResponse>> {
- let _p = tracing::span!(tracing::Level::INFO, "handle_open_cargo_toml").entered();
+ let _p = tracing::info_span!("handle_open_cargo_toml").entered();
let file_id = from_proto::file_id(&snap, &params.text_document.uri)?;
- let cargo_spec = match CargoTargetSpec::for_file(&snap, file_id)? {
- Some(it) => it,
- None => return Ok(None),
+ let cargo_spec = match TargetSpec::for_file(&snap, file_id)? {
+ Some(TargetSpec::Cargo(it)) => it,
+ Some(TargetSpec::ProjectJson(_)) | None => return Ok(None),
};
let cargo_toml_url = to_proto::url_from_abs_path(&cargo_spec.cargo_toml);
@@ -1844,7 +1843,7 @@ pub(crate) fn handle_move_item(
snap: GlobalStateSnapshot,
params: lsp_ext::MoveItemParams,
) -> anyhow::Result<Vec<lsp_ext::SnippetTextEdit>> {
- let _p = tracing::span!(tracing::Level::INFO, "handle_move_item").entered();
+ let _p = tracing::info_span!("handle_move_item").entered();
let file_id = from_proto::file_id(&snap, &params.text_document.uri)?;
let range = from_proto::file_range(&snap, &params.text_document, params.range)?;
@@ -1866,7 +1865,7 @@ pub(crate) fn handle_view_recursive_memory_layout(
snap: GlobalStateSnapshot,
params: lsp_types::TextDocumentPositionParams,
) -> anyhow::Result<Option<lsp_ext::RecursiveMemoryLayout>> {
- let _p = tracing::span!(tracing::Level::INFO, "handle_view_recursive_memory_layout").entered();
+ let _p = tracing::info_span!("handle_view_recursive_memory_layout").entered();
let file_id = from_proto::file_id(&snap, &params.text_document.uri)?;
let line_index = snap.file_line_index(file_id)?;
let offset = from_proto::offset(&line_index, params.position)?;
@@ -1959,8 +1958,8 @@ fn runnable_action_links(
return None;
}
- let cargo_spec = CargoTargetSpec::for_file(snap, runnable.nav.file_id).ok()?;
- if should_skip_target(&runnable, cargo_spec.as_ref()) {
+ let target_spec = TargetSpec::for_file(snap, runnable.nav.file_id).ok()?;
+ if should_skip_target(&runnable, target_spec.as_ref()) {
return None;
}
@@ -1970,7 +1969,7 @@ fn runnable_action_links(
}
let title = runnable.title();
- let r = to_proto::runnable(snap, runnable).ok()?;
+ let r = to_proto::runnable(snap, runnable).ok()??;
let mut group = lsp_ext::CommandLinkGroup::default();
@@ -1991,8 +1990,8 @@ fn goto_type_action_links(
snap: &GlobalStateSnapshot,
nav_targets: &[HoverGotoTypeData],
) -> Option<lsp_ext::CommandLinkGroup> {
- if nav_targets.is_empty()
- || !snap.config.hover_actions().goto_type_def
+ if !snap.config.hover_actions().goto_type_def
+ || nav_targets.is_empty()
|| !snap.config.client_commands().goto_location
{
return None;
@@ -2025,13 +2024,13 @@ fn prepare_hover_actions(
.collect()
}
-fn should_skip_target(runnable: &Runnable, cargo_spec: Option<&CargoTargetSpec>) -> bool {
+fn should_skip_target(runnable: &Runnable, cargo_spec: Option<&TargetSpec>) -> bool {
match runnable.kind {
RunnableKind::Bin => {
// Do not suggest binary run on other target than binary
match &cargo_spec {
Some(spec) => !matches!(
- spec.target_kind,
+ spec.target_kind(),
TargetKind::Bin | TargetKind::Example | TargetKind::Test
),
None => true,
@@ -2108,9 +2107,9 @@ fn run_rustfmt(
}
RustfmtConfig::CustomCommand { command, args } => {
let cmd = Utf8PathBuf::from(&command);
- let workspace = CargoTargetSpec::for_file(snap, file_id)?;
- let mut cmd = match workspace {
- Some(spec) => {
+ let target_spec = TargetSpec::for_file(snap, file_id)?;
+ let mut cmd = match target_spec {
+ Some(TargetSpec::Cargo(spec)) => {
// approach: if the command name contains a path separator, join it with the workspace root.
// however, if the path is absolute, joining will result in the absolute path being preserved.
// as a fallback, rely on $PATH-based discovery.
@@ -2123,7 +2122,7 @@ fn run_rustfmt(
};
process::Command::new(cmd_path)
}
- None => process::Command::new(cmd),
+ _ => process::Command::new(cmd),
};
cmd.envs(snap.config.extra_env());
@@ -2237,6 +2236,30 @@ pub(crate) fn fetch_dependency_list(
Ok(FetchDependencyListResult { crates: crate_infos })
}
+pub(crate) fn internal_testing_fetch_config(
+ state: GlobalStateSnapshot,
+ params: InternalTestingFetchConfigParams,
+) -> anyhow::Result<serde_json::Value> {
+ let source_root = params
+ .text_document
+ .map(|it| {
+ state
+ .analysis
+ .source_root_id(from_proto::file_id(&state, &it.uri)?)
+ .map_err(anyhow::Error::from)
+ })
+ .transpose()?;
+ serde_json::to_value(match &*params.config {
+ "local" => state.config.assist(source_root).assist_emit_must_use,
+ "global" => matches!(
+ state.config.rustfmt(),
+ RustfmtConfig::Rustfmt { enable_range_formatting: true, .. }
+ ),
+ _ => return Err(anyhow::anyhow!("Unknown test config key: {}", params.config)),
+ })
+ .map_err(Into::into)
+}
+
/// Searches for the directory of a Rust crate given this crate's root file path.
///
/// # Arguments
diff --git a/crates/rust-analyzer/src/integrated_benchmarks.rs b/crates/rust-analyzer/src/integrated_benchmarks.rs
index 5d617780b6..1e2cd4339b 100644
--- a/crates/rust-analyzer/src/integrated_benchmarks.rs
+++ b/crates/rust-analyzer/src/integrated_benchmarks.rs
@@ -74,7 +74,7 @@ fn integrated_highlighting_benchmark() {
host.apply_change(change);
}
- let _g = crate::tracing::hprof::init("*>20");
+ let _g = crate::tracing::hprof::init("*>10");
{
let _it = stdx::timeit("after change");
@@ -160,7 +160,7 @@ fn integrated_completion_benchmark() {
analysis.completions(&config, position, None).unwrap();
}
- let _g = crate::tracing::hprof::init("*");
+ let _g = crate::tracing::hprof::init("*>10");
let completion_offset = {
let _it = stdx::timeit("change");
@@ -175,7 +175,7 @@ fn integrated_completion_benchmark() {
};
{
- let _p = tracing::span!(tracing::Level::INFO, "unqualified path completion").entered();
+ let _p = tracing::info_span!("unqualified path completion").entered();
let _span = profile::cpu_span();
let analysis = host.analysis();
let config = CompletionConfig {
@@ -218,7 +218,7 @@ fn integrated_completion_benchmark() {
};
{
- let _p = tracing::span!(tracing::Level::INFO, "dot completion").entered();
+ let _p = tracing::info_span!("dot completion").entered();
let _span = profile::cpu_span();
let analysis = host.analysis();
let config = CompletionConfig {
@@ -289,6 +289,7 @@ fn integrated_diagnostics_benchmark() {
disabled: Default::default(),
expr_fill_default: Default::default(),
style_lints: false,
+ snippet_cap: SnippetCap::new(true),
insert_use: InsertUseConfig {
granularity: ImportGranularity::Crate,
enforce_granularity: false,
@@ -316,7 +317,7 @@ fn integrated_diagnostics_benchmark() {
};
{
- let _p = tracing::span!(tracing::Level::INFO, "diagnostics").entered();
+ let _p = tracing::info_span!("diagnostics").entered();
let _span = profile::cpu_span();
host.analysis()
.diagnostics(&diagnostics_config, ide::AssistResolveStrategy::None, file_id)
diff --git a/crates/rust-analyzer/src/lib.rs b/crates/rust-analyzer/src/lib.rs
index 175ffa622f..a398e98f09 100644
--- a/crates/rust-analyzer/src/lib.rs
+++ b/crates/rust-analyzer/src/lib.rs
@@ -14,17 +14,16 @@
pub mod cli;
mod caps;
-mod cargo_target_spec;
mod diagnostics;
mod diff;
mod dispatch;
-mod global_state;
mod hack_recover_crate_name;
mod line_index;
mod main_loop;
mod mem_docs;
mod op_queue;
mod reload;
+mod target_spec;
mod task_pool;
mod version;
@@ -40,6 +39,7 @@ pub mod tracing {
}
pub mod config;
+mod global_state;
pub mod lsp;
use self::lsp::ext as lsp_ext;
diff --git a/crates/rust-analyzer/src/lsp/ext.rs b/crates/rust-analyzer/src/lsp/ext.rs
index aa75633ac3..b82ba44190 100644
--- a/crates/rust-analyzer/src/lsp/ext.rs
+++ b/crates/rust-analyzer/src/lsp/ext.rs
@@ -3,7 +3,6 @@
#![allow(clippy::disallowed_types)]
use std::ops;
-use std::path::PathBuf;
use ide_db::line_index::WideEncoding;
use lsp_types::request::Request;
@@ -12,11 +11,26 @@ use lsp_types::{
PartialResultParams, Position, Range, TextDocumentIdentifier, WorkDoneProgressParams,
};
use lsp_types::{PositionEncodingKind, Url};
+use paths::Utf8PathBuf;
use rustc_hash::FxHashMap;
use serde::{Deserialize, Serialize};
use crate::line_index::PositionEncoding;
+pub enum InternalTestingFetchConfig {}
+
+impl Request for InternalTestingFetchConfig {
+ type Params = InternalTestingFetchConfigParams;
+ type Result = serde_json::Value;
+ const METHOD: &'static str = "rust-analyzer-internal/internalTestingFetchConfig";
+}
+
+#[derive(Deserialize, Serialize, Debug)]
+#[serde(rename_all = "camelCase")]
+pub struct InternalTestingFetchConfigParams {
+ pub text_document: Option<TextDocumentIdentifier>,
+ pub config: String,
+}
pub enum AnalyzerStatus {}
impl Request for AnalyzerStatus {
@@ -425,24 +439,33 @@ pub struct Runnable {
#[serde(skip_serializing_if = "Option::is_none")]
pub location: Option<lsp_types::LocationLink>,
pub kind: RunnableKind,
- pub args: CargoRunnable,
+ pub args: RunnableArgs,
+}
+
+#[derive(Deserialize, Serialize, Debug)]
+#[serde(rename_all = "camelCase")]
+#[serde(untagged)]
+pub enum RunnableArgs {
+ Cargo(CargoRunnableArgs),
+ Shell(ShellRunnableArgs),
}
#[derive(Serialize, Deserialize, Debug)]
#[serde(rename_all = "lowercase")]
pub enum RunnableKind {
Cargo,
+ Shell,
}
#[derive(Deserialize, Serialize, Debug)]
#[serde(rename_all = "camelCase")]
-pub struct CargoRunnable {
+pub struct CargoRunnableArgs {
// command to be executed instead of cargo
pub override_cargo: Option<String>,
#[serde(skip_serializing_if = "Option::is_none")]
- pub workspace_root: Option<PathBuf>,
+ pub workspace_root: Option<Utf8PathBuf>,
#[serde(skip_serializing_if = "Option::is_none")]
- pub cwd: Option<PathBuf>,
+ pub cwd: Option<Utf8PathBuf>,
// command, --package and --lib stuff
pub cargo_args: Vec<String>,
// user-specified additional cargo args, like `--release`.
@@ -453,6 +476,14 @@ pub struct CargoRunnable {
pub expect_test: Option<bool>,
}
+#[derive(Deserialize, Serialize, Debug)]
+#[serde(rename_all = "camelCase")]
+pub struct ShellRunnableArgs {
+ pub program: String,
+ pub args: Vec<String>,
+ pub cwd: Utf8PathBuf,
+}
+
pub enum RelatedTests {}
impl Request for RelatedTests {
diff --git a/crates/rust-analyzer/src/lsp/from_proto.rs b/crates/rust-analyzer/src/lsp/from_proto.rs
index b6b20296d8..60fe847bb7 100644
--- a/crates/rust-analyzer/src/lsp/from_proto.rs
+++ b/crates/rust-analyzer/src/lsp/from_proto.rs
@@ -37,8 +37,9 @@ pub(crate) fn offset(
.ok_or_else(|| format_err!("Invalid wide col offset"))?
}
};
- let text_size =
- line_index.index.offset(line_col).ok_or_else(|| format_err!("Invalid offset"))?;
+ let text_size = line_index.index.offset(line_col).ok_or_else(|| {
+ format_err!("Invalid offset {line_col:?} (line index length: {:?})", line_index.index.len())
+ })?;
Ok(text_size)
}
diff --git a/crates/rust-analyzer/src/lsp/to_proto.rs b/crates/rust-analyzer/src/lsp/to_proto.rs
index 86368c9eea..db5f666a5b 100644
--- a/crates/rust-analyzer/src/lsp/to_proto.rs
+++ b/crates/rust-analyzer/src/lsp/to_proto.rs
@@ -21,16 +21,17 @@ use serde_json::to_value;
use vfs::AbsPath;
use crate::{
- cargo_target_spec::CargoTargetSpec,
config::{CallInfoConfig, Config},
global_state::GlobalStateSnapshot,
line_index::{LineEndings, LineIndex, PositionEncoding},
lsp::{
+ ext::ShellRunnableArgs,
semantic_tokens::{self, standard_fallback_type},
utils::invalid_params_error,
LspError,
},
lsp_ext::{self, SnippetTextEdit},
+ target_spec::{CargoTargetSpec, TargetSpec},
};
pub(crate) fn position(line_index: &LineIndex, offset: TextSize) -> lsp_types::Position {
@@ -1356,34 +1357,90 @@ pub(crate) fn code_action(
pub(crate) fn runnable(
snap: &GlobalStateSnapshot,
runnable: Runnable,
-) -> Cancellable<lsp_ext::Runnable> {
+) -> Cancellable<Option<lsp_ext::Runnable>> {
let config = snap.config.runnables();
- let spec = CargoTargetSpec::for_file(snap, runnable.nav.file_id)?;
- let workspace_root = spec.as_ref().map(|it| it.workspace_root.clone());
- let cwd = match runnable.kind {
- ide::RunnableKind::Bin { .. } => workspace_root.clone().map(|it| it.into()),
- _ => spec.as_ref().map(|it| it.cargo_toml.parent().into()),
- };
- let target = spec.as_ref().map(|s| s.target.as_str());
- let label = runnable.label(target);
- let (cargo_args, executable_args) =
- CargoTargetSpec::runnable_args(snap, spec, &runnable.kind, &runnable.cfg);
- let location = location_link(snap, None, runnable.nav)?;
+ let target_spec = TargetSpec::for_file(snap, runnable.nav.file_id)?;
- Ok(lsp_ext::Runnable {
- label,
- location: Some(location),
- kind: lsp_ext::RunnableKind::Cargo,
- args: lsp_ext::CargoRunnable {
- workspace_root: workspace_root.map(|it| it.into()),
- cwd,
- override_cargo: config.override_cargo,
- cargo_args,
- cargo_extra_args: config.cargo_extra_args,
- executable_args,
- expect_test: None,
- },
- })
+ match target_spec {
+ Some(TargetSpec::Cargo(spec)) => {
+ let workspace_root = spec.workspace_root.clone();
+
+ let target = spec.target.clone();
+
+ let (cargo_args, executable_args) = CargoTargetSpec::runnable_args(
+ snap,
+ Some(spec.clone()),
+ &runnable.kind,
+ &runnable.cfg,
+ );
+
+ let cwd = match runnable.kind {
+ ide::RunnableKind::Bin { .. } => workspace_root.clone(),
+ _ => spec.cargo_toml.parent().to_owned(),
+ };
+
+ let label = runnable.label(Some(&target));
+ let location = location_link(snap, None, runnable.nav)?;
+
+ Ok(Some(lsp_ext::Runnable {
+ label,
+ location: Some(location),
+ kind: lsp_ext::RunnableKind::Cargo,
+ args: lsp_ext::RunnableArgs::Cargo(lsp_ext::CargoRunnableArgs {
+ workspace_root: Some(workspace_root.into()),
+ override_cargo: config.override_cargo,
+ cargo_args,
+ cwd: Some(cwd.into()),
+ cargo_extra_args: config.cargo_extra_args,
+ executable_args,
+ expect_test: None,
+ }),
+ }))
+ }
+ Some(TargetSpec::ProjectJson(spec)) => {
+ let label = runnable.label(Some(&spec.label));
+ let location = location_link(snap, None, runnable.nav)?;
+
+ match spec.runnable_args(&runnable.kind) {
+ Some(json_shell_runnable_args) => {
+ let runnable_args = ShellRunnableArgs {
+ program: json_shell_runnable_args.program,
+ args: json_shell_runnable_args.args,
+ cwd: json_shell_runnable_args.cwd,
+ };
+ Ok(Some(lsp_ext::Runnable {
+ label,
+ location: Some(location),
+ kind: lsp_ext::RunnableKind::Shell,
+ args: lsp_ext::RunnableArgs::Shell(runnable_args),
+ }))
+ }
+ None => Ok(None),
+ }
+ }
+ None => {
+ let (cargo_args, executable_args) =
+ CargoTargetSpec::runnable_args(snap, None, &runnable.kind, &runnable.cfg);
+
+ let label = runnable.label(None);
+ let location = location_link(snap, None, runnable.nav)?;
+
+ Ok(Some(lsp_ext::Runnable {
+ label,
+ location: Some(location),
+ kind: lsp_ext::RunnableKind::Cargo,
+ args: lsp_ext::RunnableArgs::Cargo(lsp_ext::CargoRunnableArgs {
+ workspace_root: None,
+ override_cargo: config.override_cargo,
+ cargo_args,
+ cwd: None,
+ cargo_extra_args: config.cargo_extra_args,
+ executable_args,
+ expect_test: None,
+ }),
+ }))
+ }
+ }
}
pub(crate) fn code_lens(
@@ -1407,33 +1464,37 @@ pub(crate) fn code_lens(
};
let r = runnable(snap, run)?;
- let lens_config = snap.config.lens();
- if lens_config.run
- && client_commands_config.run_single
- && r.args.workspace_root.is_some()
- {
- let command = command::run_single(&r, &title);
- acc.push(lsp_types::CodeLens {
- range: annotation_range,
- command: Some(command),
- data: None,
- })
- }
- if lens_config.debug && can_debug && client_commands_config.debug_single {
- let command = command::debug_single(&r);
- acc.push(lsp_types::CodeLens {
- range: annotation_range,
- command: Some(command),
- data: None,
- })
- }
- if lens_config.interpret {
- let command = command::interpret_single(&r);
- acc.push(lsp_types::CodeLens {
- range: annotation_range,
- command: Some(command),
- data: None,
- })
+ if let Some(r) = r {
+ let has_root = match &r.args {
+ lsp_ext::RunnableArgs::Cargo(c) => c.workspace_root.is_some(),
+ lsp_ext::RunnableArgs::Shell(_) => true,
+ };
+
+ let lens_config = snap.config.lens();
+ if lens_config.run && client_commands_config.run_single && has_root {
+ let command = command::run_single(&r, &title);
+ acc.push(lsp_types::CodeLens {
+ range: annotation_range,
+ command: Some(command),
+ data: None,
+ })
+ }
+ if lens_config.debug && can_debug && client_commands_config.debug_single {
+ let command = command::debug_single(&r);
+ acc.push(lsp_types::CodeLens {
+ range: annotation_range,
+ command: Some(command),
+ data: None,
+ })
+ }
+ if lens_config.interpret {
+ let command = command::interpret_single(&r);
+ acc.push(lsp_types::CodeLens {
+ range: annotation_range,
+ command: Some(command),
+ data: None,
+ })
+ }
}
}
AnnotationKind::HasImpls { pos, data } => {
@@ -1538,12 +1599,8 @@ pub(crate) fn test_item(
id: test_item.id,
label: test_item.label,
kind: match test_item.kind {
- ide::TestItemKind::Crate(id) => 'b: {
- let Some((cargo_ws, target)) = snap.cargo_target_for_crate_root(id) else {
- break 'b lsp_ext::TestItemKind::Package;
- };
- let target = &cargo_ws[target];
- match target.kind {
+ ide::TestItemKind::Crate(id) => match snap.target_spec_for_crate(id) {
+ Some(target_spec) => match target_spec.target_kind() {
project_model::TargetKind::Bin
| project_model::TargetKind::Lib { .. }
| project_model::TargetKind::Example
@@ -1552,8 +1609,9 @@ pub(crate) fn test_item(
project_model::TargetKind::Test => lsp_ext::TestItemKind::Test,
// benches are not tests needed to be shown in the test explorer
project_model::TargetKind::Bench => return None,
- }
- }
+ },
+ None => lsp_ext::TestItemKind::Package,
+ },
ide::TestItemKind::Module => lsp_ext::TestItemKind::Module,
ide::TestItemKind::Function => lsp_ext::TestItemKind::Test,
},
@@ -1566,7 +1624,7 @@ pub(crate) fn test_item(
.file
.map(|f| lsp_types::TextDocumentIdentifier { uri: url(snap, f) }),
range: line_index.and_then(|l| Some(range(l, test_item.text_range?))),
- runnable: test_item.runnable.and_then(|r| runnable(snap, r).ok()),
+ runnable: test_item.runnable.and_then(|r| runnable(snap, r).ok()).flatten(),
})
}
diff --git a/crates/rust-analyzer/src/main_loop.rs b/crates/rust-analyzer/src/main_loop.rs
index 193b3fdd4a..07414a6e49 100644
--- a/crates/rust-analyzer/src/main_loop.rs
+++ b/crates/rust-analyzer/src/main_loop.rs
@@ -3,6 +3,7 @@
use std::{
fmt,
+ ops::Div as _,
time::{Duration, Instant},
};
@@ -17,7 +18,7 @@ use vfs::FileId;
use crate::{
config::Config,
- diagnostics::fetch_native_diagnostics,
+ diagnostics::{fetch_native_diagnostics, DiagnosticsGeneration},
dispatch::{NotificationDispatcher, RequestDispatcher},
global_state::{file_id_to_url, url_to_file_id, GlobalState},
hack_recover_crate_name,
@@ -87,7 +88,7 @@ pub(crate) enum Task {
Response(lsp_server::Response),
ClientNotification(lsp_ext::UnindexedProjectParams),
Retry(lsp_server::Request),
- Diagnostics(Vec<(FileId, Vec<lsp_types::Diagnostic>)>),
+ Diagnostics(DiagnosticsGeneration, Vec<(FileId, Vec<lsp_types::Diagnostic>)>),
DiscoverTest(lsp_ext::DiscoverTestResults),
PrimeCaches(PrimeCachesProgress),
FetchWorkspace(ProjectWorkspaceProgress),
@@ -186,6 +187,11 @@ impl GlobalState {
scheme: None,
pattern: Some("**/Cargo.lock".into()),
},
+ lsp_types::DocumentFilter {
+ language: None,
+ scheme: None,
+ pattern: Some("**/rust-analyzer.toml".into()),
+ },
]),
},
};
@@ -230,7 +236,7 @@ impl GlobalState {
fn handle_event(&mut self, event: Event) -> anyhow::Result<()> {
let loop_start = Instant::now();
// NOTE: don't count blocking select! call as a loop-turn time
- let _p = tracing::span!(Level::INFO, "GlobalState::handle_event", event = %event).entered();
+ let _p = tracing::info_span!("GlobalState::handle_event", event = %event).entered();
let event_dbg_msg = format!("{event:?}");
tracing::debug!(?loop_start, ?event, "handle_event");
@@ -249,9 +255,7 @@ impl GlobalState {
lsp_server::Message::Response(resp) => self.complete_request(resp),
},
Event::QueuedTask(task) => {
- let _p =
- tracing::span!(tracing::Level::INFO, "GlobalState::handle_event/queued_task")
- .entered();
+ let _p = tracing::info_span!("GlobalState::handle_event/queued_task").entered();
self.handle_queued_task(task);
// Coalesce multiple task events into one loop turn
while let Ok(task) = self.deferred_task_queue.receiver.try_recv() {
@@ -259,8 +263,7 @@ impl GlobalState {
}
}
Event::Task(task) => {
- let _p = tracing::span!(tracing::Level::INFO, "GlobalState::handle_event/task")
- .entered();
+ let _p = tracing::info_span!("GlobalState::handle_event/task").entered();
let mut prime_caches_progress = Vec::new();
self.handle_task(&mut prime_caches_progress, task);
@@ -314,8 +317,7 @@ impl GlobalState {
}
}
Event::Vfs(message) => {
- let _p =
- tracing::span!(tracing::Level::INFO, "GlobalState::handle_event/vfs").entered();
+ let _p = tracing::info_span!("GlobalState::handle_event/vfs").entered();
self.handle_vfs_msg(message);
// Coalesce many VFS event into a single loop turn
while let Ok(message) = self.loader.receiver.try_recv() {
@@ -323,8 +325,7 @@ impl GlobalState {
}
}
Event::Flycheck(message) => {
- let _p = tracing::span!(tracing::Level::INFO, "GlobalState::handle_event/flycheck")
- .entered();
+ let _p = tracing::info_span!("GlobalState::handle_event/flycheck").entered();
self.handle_flycheck_msg(message);
// Coalesce many flycheck updates into a single loop turn
while let Ok(message) = self.flycheck_receiver.try_recv() {
@@ -332,9 +333,7 @@ impl GlobalState {
}
}
Event::TestResult(message) => {
- let _p =
- tracing::span!(tracing::Level::INFO, "GlobalState::handle_event/test_result")
- .entered();
+ let _p = tracing::info_span!("GlobalState::handle_event/test_result").entered();
self.handle_cargo_test_msg(message);
// Coalesce many test result event into a single loop turn
while let Ok(message) = self.test_run_receiver.try_recv() {
@@ -481,6 +480,7 @@ impl GlobalState {
fn update_diagnostics(&mut self) {
let db = self.analysis_host.raw_database();
+ let generation = self.diagnostics.next_generation();
let subscriptions = {
let vfs = &self.vfs.read().0;
self.mem_docs
@@ -495,16 +495,37 @@ impl GlobalState {
// forever if we emitted them here.
!db.source_root(source_root).is_library
})
- .collect::<Vec<_>>()
+ .collect::<std::sync::Arc<_>>()
};
tracing::trace!("updating notifications for {:?}", subscriptions);
-
- // Diagnostics are triggered by the user typing
- // so we run them on a latency sensitive thread.
- self.task_pool.handle.spawn(ThreadIntent::LatencySensitive, {
- let snapshot = self.snapshot();
- move || Task::Diagnostics(fetch_native_diagnostics(snapshot, subscriptions))
- });
+ // Split up the work on multiple threads, but we don't wanna fill the entire task pool with
+ // diagnostic tasks, so we limit the number of tasks to a quarter of the total thread pool.
+ let max_tasks = self.config.main_loop_num_threads().div(4).max(1);
+ let chunk_length = subscriptions.len() / max_tasks;
+ let remainder = subscriptions.len() % max_tasks;
+
+ let mut start = 0;
+ for task_idx in 0..max_tasks {
+ let extra = if task_idx < remainder { 1 } else { 0 };
+ let end = start + chunk_length + extra;
+ let slice = start..end;
+ if slice.is_empty() {
+ break;
+ }
+ // Diagnostics are triggered by the user typing
+ // so we run them on a latency sensitive thread.
+ self.task_pool.handle.spawn(ThreadIntent::LatencySensitive, {
+ let snapshot = self.snapshot();
+ let subscriptions = subscriptions.clone();
+ move || {
+ Task::Diagnostics(
+ generation,
+ fetch_native_diagnostics(snapshot, subscriptions, slice),
+ )
+ }
+ });
+ start = end;
+ }
}
fn update_tests(&mut self) {
@@ -591,9 +612,9 @@ impl GlobalState {
// Only retry requests that haven't been cancelled. Otherwise we do unnecessary work.
Task::Retry(req) if !self.is_completed(&req) => self.on_request(req),
Task::Retry(_) => (),
- Task::Diagnostics(diagnostics_per_file) => {
+ Task::Diagnostics(generation, diagnostics_per_file) => {
for (file_id, diagnostics) in diagnostics_per_file {
- self.diagnostics.set_native_diagnostics(file_id, diagnostics)
+ self.diagnostics.set_native_diagnostics(generation, file_id, diagnostics)
}
}
Task::PrimeCaches(progress) => match progress {
@@ -669,12 +690,11 @@ impl GlobalState {
}
fn handle_vfs_msg(&mut self, message: vfs::loader::Message) {
- let _p = tracing::span!(Level::INFO, "GlobalState::handle_vfs_msg").entered();
+ let _p = tracing::info_span!("GlobalState::handle_vfs_msg").entered();
let is_changed = matches!(message, vfs::loader::Message::Changed { .. });
match message {
vfs::loader::Message::Changed { files } | vfs::loader::Message::Loaded { files } => {
- let _p = tracing::span!(Level::INFO, "GlobalState::handle_vfs_msg{changed/load}")
- .entered();
+ let _p = tracing::info_span!("GlobalState::handle_vfs_msg{changed/load}").entered();
let vfs = &mut self.vfs.write().0;
for (path, contents) in files {
let path = VfsPath::from(path);
@@ -688,8 +708,7 @@ impl GlobalState {
}
}
vfs::loader::Message::Progress { n_total, n_done, dir, config_version } => {
- let _p =
- tracing::span!(Level::INFO, "GlobalState::handle_vfs_mgs/progress").entered();
+ let _p = tracing::info_span!("GlobalState::handle_vfs_mgs/progress").entered();
always!(config_version <= self.vfs_config_version);
let state = match n_done {
@@ -731,8 +750,7 @@ impl GlobalState {
let snap = self.snapshot();
self.task_pool.handle.spawn_with_sender(ThreadIntent::Worker, move |sender| {
- let _p = tracing::span!(tracing::Level::INFO, "GlobalState::check_if_indexed")
- .entered();
+ let _p = tracing::info_span!("GlobalState::check_if_indexed").entered();
tracing::debug!(?uri, "handling uri");
let id = from_proto::file_id(&snap, &uri).expect("unable to get FileId");
if let Ok(crates) = &snap.analysis.crates_for(id) {
@@ -981,6 +999,8 @@ impl GlobalState {
.on::<NO_RETRY, lsp_ext::ExternalDocs>(handlers::handle_open_docs)
.on::<NO_RETRY, lsp_ext::OpenCargoToml>(handlers::handle_open_cargo_toml)
.on::<NO_RETRY, lsp_ext::MoveItem>(handlers::handle_move_item)
+ //
+ .on::<NO_RETRY, lsp_ext::InternalTestingFetchConfig>(handlers::internal_testing_fetch_config)
.finish();
}
diff --git a/crates/rust-analyzer/src/reload.rs b/crates/rust-analyzer/src/reload.rs
index 627be7e951..bd0f733ef3 100644
--- a/crates/rust-analyzer/src/reload.rs
+++ b/crates/rust-analyzer/src/reload.rs
@@ -24,6 +24,7 @@ use ide_db::{
};
use itertools::Itertools;
use load_cargo::{load_proc_macro, ProjectFolders};
+use lsp_types::FileSystemWatcher;
use proc_macro_api::ProcMacroServer;
use project_model::{ManifestPath, ProjectWorkspace, ProjectWorkspaceKind, WorkspaceBuildScripts};
use stdx::{format_to, thread::ThreadIntent};
@@ -70,8 +71,7 @@ impl GlobalState {
}
pub(crate) fn update_configuration(&mut self, config: Config) {
- let _p =
- tracing::span!(tracing::Level::INFO, "GlobalState::update_configuration").entered();
+ let _p = tracing::info_span!("GlobalState::update_configuration").entered();
let old_config = mem::replace(&mut self.config, Arc::new(config));
if self.config.lru_parse_query_capacity() != old_config.lru_parse_query_capacity() {
self.analysis_host.update_lru_capacity(self.config.lru_parse_query_capacity());
@@ -372,7 +372,7 @@ impl GlobalState {
}
pub(crate) fn switch_workspaces(&mut self, cause: Cause) {
- let _p = tracing::span!(tracing::Level::INFO, "GlobalState::switch_workspaces").entered();
+ let _p = tracing::info_span!("GlobalState::switch_workspaces").entered();
tracing::info!(%cause, "will switch workspaces");
let Some((workspaces, force_reload_crate_graph)) =
@@ -440,43 +440,67 @@ impl GlobalState {
}
if let FilesWatcher::Client = self.config.files().watcher {
- let filter =
- self.workspaces.iter().flat_map(|ws| ws.to_roots()).filter(|it| it.is_local);
-
- let watchers = if self.config.did_change_watched_files_relative_pattern_support() {
- // When relative patterns are supported by the client, prefer using them
- filter
- .flat_map(|root| {
- root.include.into_iter().flat_map(|base| {
- [(base.clone(), "**/*.rs"), (base, "**/Cargo.{lock,toml}")]
+ let filter = self
+ .workspaces
+ .iter()
+ .flat_map(|ws| ws.to_roots())
+ .filter(|it| it.is_local)
+ .map(|it| it.include);
+
+ let mut watchers: Vec<FileSystemWatcher> =
+ if self.config.did_change_watched_files_relative_pattern_support() {
+ // When relative patterns are supported by the client, prefer using them
+ filter
+ .flat_map(|include| {
+ include.into_iter().flat_map(|base| {
+ [
+ (base.clone(), "**/*.rs"),
+ (base.clone(), "**/Cargo.{lock,toml}"),
+ (base, "**/rust-analyzer.toml"),
+ ]
+ })
})
- })
- .map(|(base, pat)| lsp_types::FileSystemWatcher {
- glob_pattern: lsp_types::GlobPattern::Relative(
- lsp_types::RelativePattern {
- base_uri: lsp_types::OneOf::Right(
- lsp_types::Url::from_file_path(base).unwrap(),
- ),
- pattern: pat.to_owned(),
- },
- ),
- kind: None,
- })
- .collect()
- } else {
- // When they're not, integrate the base to make them into absolute patterns
- filter
- .flat_map(|root| {
- root.include.into_iter().flat_map(|base| {
- [format!("{base}/**/*.rs"), format!("{base}/**/Cargo.{{lock,toml}}")]
+ .map(|(base, pat)| lsp_types::FileSystemWatcher {
+ glob_pattern: lsp_types::GlobPattern::Relative(
+ lsp_types::RelativePattern {
+ base_uri: lsp_types::OneOf::Right(
+ lsp_types::Url::from_file_path(base).unwrap(),
+ ),
+ pattern: pat.to_owned(),
+ },
+ ),
+ kind: None,
})
- })
+ .collect()
+ } else {
+ // When they're not, integrate the base to make them into absolute patterns
+ filter
+ .flat_map(|include| {
+ include.into_iter().flat_map(|base| {
+ [
+ format!("{base}/**/*.rs"),
+ format!("{base}/**/Cargo.{{toml,lock}}"),
+ format!("{base}/**/rust-analyzer.toml"),
+ ]
+ })
+ })
+ .map(|glob_pattern| lsp_types::FileSystemWatcher {
+ glob_pattern: lsp_types::GlobPattern::String(glob_pattern),
+ kind: None,
+ })
+ .collect()
+ };
+
+ watchers.extend(
+ iter::once(self.config.user_config_path().as_path())
+ .chain(iter::once(self.config.root_ratoml_path().as_path()))
+ .chain(self.workspaces.iter().map(|ws| ws.manifest().map(ManifestPath::as_ref)))
+ .flatten()
.map(|glob_pattern| lsp_types::FileSystemWatcher {
- glob_pattern: lsp_types::GlobPattern::String(glob_pattern),
+ glob_pattern: lsp_types::GlobPattern::String(glob_pattern.to_string()),
kind: None,
- })
- .collect()
- };
+ }),
+ );
let registration_options =
lsp_types::DidChangeWatchedFilesRegistrationOptions { watchers };
@@ -548,7 +572,7 @@ impl GlobalState {
version: self.vfs_config_version,
});
self.source_root_config = project_folders.source_root_config;
- self.local_roots_parent_map = self.source_root_config.source_root_parent_map();
+ self.local_roots_parent_map = Arc::new(self.source_root_config.source_root_parent_map());
self.recreate_crate_graph(cause);
@@ -660,7 +684,7 @@ impl GlobalState {
}
fn reload_flycheck(&mut self) {
- let _p = tracing::span!(tracing::Level::INFO, "GlobalState::reload_flycheck").entered();
+ let _p = tracing::info_span!("GlobalState::reload_flycheck").entered();
let config = self.config.flycheck();
let sender = self.flycheck_sender.clone();
let invocation_strategy = match config {
diff --git a/crates/rust-analyzer/src/cargo_target_spec.rs b/crates/rust-analyzer/src/target_spec.rs
index 693a35b91e..6145f7e05f 100644
--- a/crates/rust-analyzer/src/cargo_target_spec.rs
+++ b/crates/rust-analyzer/src/target_spec.rs
@@ -1,20 +1,52 @@
-//! See `CargoTargetSpec`
+//! See `TargetSpec`
use std::mem;
use cfg::{CfgAtom, CfgExpr};
use ide::{Cancellable, CrateId, FileId, RunnableKind, TestId};
+use project_model::project_json::Runnable;
use project_model::{CargoFeatures, ManifestPath, TargetKind};
use rustc_hash::FxHashSet;
use vfs::AbsPathBuf;
use crate::global_state::GlobalStateSnapshot;
+/// A target represents a thing we can build or test.
+///
+/// We use it to calculate the CLI arguments required to build, run or
+/// test the target.
+#[derive(Clone, Debug)]
+pub(crate) enum TargetSpec {
+ Cargo(CargoTargetSpec),
+ ProjectJson(ProjectJsonTargetSpec),
+}
+
+impl TargetSpec {
+ pub(crate) fn for_file(
+ global_state_snapshot: &GlobalStateSnapshot,
+ file_id: FileId,
+ ) -> Cancellable<Option<Self>> {
+ let crate_id = match &*global_state_snapshot.analysis.crates_for(file_id)? {
+ &[crate_id, ..] => crate_id,
+ _ => return Ok(None),
+ };
+
+ Ok(global_state_snapshot.target_spec_for_crate(crate_id))
+ }
+
+ pub(crate) fn target_kind(&self) -> TargetKind {
+ match self {
+ TargetSpec::Cargo(cargo) => cargo.target_kind,
+ TargetSpec::ProjectJson(project_json) => project_json.target_kind,
+ }
+ }
+}
+
/// Abstract representation of Cargo target.
///
/// We use it to cook up the set of cli args we need to pass to Cargo to
/// build/test/run the target.
-#[derive(Clone)]
+#[derive(Clone, Debug)]
pub(crate) struct CargoTargetSpec {
pub(crate) workspace_root: AbsPathBuf,
pub(crate) cargo_toml: ManifestPath,
@@ -26,6 +58,51 @@ pub(crate) struct CargoTargetSpec {
pub(crate) features: FxHashSet<String>,
}
+#[derive(Clone, Debug)]
+pub(crate) struct ProjectJsonTargetSpec {
+ pub(crate) label: String,
+ pub(crate) target_kind: TargetKind,
+ pub(crate) shell_runnables: Vec<Runnable>,
+}
+
+impl ProjectJsonTargetSpec {
+ pub(crate) fn runnable_args(&self, kind: &RunnableKind) -> Option<Runnable> {
+ match kind {
+ RunnableKind::Bin => {
+ for runnable in &self.shell_runnables {
+ if matches!(runnable.kind, project_model::project_json::RunnableKind::Run) {
+ return Some(runnable.clone());
+ }
+ }
+
+ None
+ }
+ RunnableKind::Test { test_id, .. } => {
+ for runnable in &self.shell_runnables {
+ if matches!(runnable.kind, project_model::project_json::RunnableKind::TestOne) {
+ let mut runnable = runnable.clone();
+
+ let replaced_args: Vec<_> = runnable
+ .args
+ .iter()
+ .map(|arg| arg.replace("{test_id}", &test_id.to_string()))
+ .map(|arg| arg.replace("{label}", &self.label))
+ .collect();
+ runnable.args = replaced_args;
+
+ return Some(runnable);
+ }
+ }
+
+ None
+ }
+ RunnableKind::TestMod { .. } => None,
+ RunnableKind::Bench { .. } => None,
+ RunnableKind::DocTest { .. } => None,
+ }
+ }
+}
+
impl CargoTargetSpec {
pub(crate) fn runnable_args(
snap: &GlobalStateSnapshot,
@@ -122,35 +199,6 @@ impl CargoTargetSpec {
(cargo_args, executable_args)
}
- pub(crate) fn for_file(
- global_state_snapshot: &GlobalStateSnapshot,
- file_id: FileId,
- ) -> Cancellable<Option<CargoTargetSpec>> {
- let crate_id = match &*global_state_snapshot.analysis.crates_for(file_id)? {
- &[crate_id, ..] => crate_id,
- _ => return Ok(None),
- };
- let (cargo_ws, target) = match global_state_snapshot.cargo_target_for_crate_root(crate_id) {
- Some(it) => it,
- None => return Ok(None),
- };
-
- let target_data = &cargo_ws[target];
- let package_data = &cargo_ws[target_data.package];
- let res = CargoTargetSpec {
- workspace_root: cargo_ws.workspace_root().to_path_buf(),
- cargo_toml: package_data.manifest.clone(),
- package: cargo_ws.package_flag(package_data),
- target: target_data.name.clone(),
- target_kind: target_data.kind,
- required_features: target_data.required_features.clone(),
- features: package_data.features.keys().cloned().collect(),
- crate_id,
- };
-
- Ok(Some(res))
- }
-
pub(crate) fn push_to(self, buf: &mut Vec<String>, kind: &RunnableKind) {
buf.push("--package".to_owned());
buf.push(self.package);
diff --git a/crates/rust-analyzer/src/tracing/config.rs b/crates/rust-analyzer/src/tracing/config.rs
index f77d989330..fcdbf6c694 100644
--- a/crates/rust-analyzer/src/tracing/config.rs
+++ b/crates/rust-analyzer/src/tracing/config.rs
@@ -13,6 +13,7 @@ use tracing_tree::HierarchicalLayer;
use crate::tracing::hprof;
+#[derive(Debug)]
pub struct Config<T> {
pub writer: T,
pub filter: String,
diff --git a/crates/rust-analyzer/src/tracing/hprof.rs b/crates/rust-analyzer/src/tracing/hprof.rs
index 73f94671f2..2d1604e70b 100644
--- a/crates/rust-analyzer/src/tracing/hprof.rs
+++ b/crates/rust-analyzer/src/tracing/hprof.rs
@@ -1,8 +1,8 @@
//! Consumer of `tracing` data, which prints a hierarchical profile.
//!
-//! Based on https://github.com/davidbarsky/tracing-tree, but does less, while
+//! Based on <https://github.com/davidbarsky/tracing-tree>, but does less, while
//! actually printing timings for spans by default. The code here is vendored from
-//! https://github.com/matklad/tracing-span-tree.
+//! <https://github.com/matklad/tracing-span-tree>.
//!
//! Usage:
//!
@@ -199,7 +199,7 @@ impl Node {
let _ = write!(out, " ({} calls)", self.count);
}
- eprintln!("{}", out);
+ eprintln!("{out}");
for child in &self.children {
child.go(level + 1, filter)
diff --git a/crates/rust-analyzer/tests/slow-tests/main.rs b/crates/rust-analyzer/tests/slow-tests/main.rs
index 43a8305010..f886df60e6 100644
--- a/crates/rust-analyzer/tests/slow-tests/main.rs
+++ b/crates/rust-analyzer/tests/slow-tests/main.rs
@@ -11,6 +11,7 @@
#![warn(rust_2018_idioms, unused_lifetimes)]
#![allow(clippy::disallowed_types)]
+mod ratoml;
#[cfg(not(feature = "in-rust-tree"))]
mod sourcegen;
mod support;
@@ -30,15 +31,15 @@ use lsp_types::{
InlayHint, InlayHintLabel, InlayHintParams, PartialResultParams, Position, Range,
RenameFilesParams, TextDocumentItem, TextDocumentPositionParams, WorkDoneProgressParams,
};
+
use rust_analyzer::lsp::ext::{OnEnter, Runnables, RunnablesParams, UnindexedProject};
use serde_json::json;
use stdx::format_to_acc;
+
use test_utils::skip_slow_tests;
+use testdir::TestDir;
-use crate::{
- support::{project, Project},
- testdir::TestDir,
-};
+use crate::support::{project, Project};
#[test]
fn completes_items_from_standard_library() {
diff --git a/crates/rust-analyzer/tests/slow-tests/ratoml.rs b/crates/rust-analyzer/tests/slow-tests/ratoml.rs
new file mode 100644
index 0000000000..218a9a32ad
--- /dev/null
+++ b/crates/rust-analyzer/tests/slow-tests/ratoml.rs
@@ -0,0 +1,947 @@
+use crate::support::{Project, Server};
+use crate::testdir::TestDir;
+use lsp_types::{
+ notification::{DidChangeTextDocument, DidOpenTextDocument, DidSaveTextDocument},
+ DidChangeTextDocumentParams, DidOpenTextDocumentParams, DidSaveTextDocumentParams,
+ TextDocumentContentChangeEvent, TextDocumentIdentifier, TextDocumentItem, Url,
+ VersionedTextDocumentIdentifier,
+};
+use paths::Utf8PathBuf;
+
+use rust_analyzer::lsp::ext::{InternalTestingFetchConfig, InternalTestingFetchConfigParams};
+use serde_json::json;
+
+enum QueryType {
+ Local,
+ /// A query whose config key is a part of the global configs, so that
+ /// testing for changes to this config means testing if global changes
+ /// take affect.
+ Global,
+}
+
+struct RatomlTest {
+ urls: Vec<Url>,
+ server: Server,
+ tmp_path: Utf8PathBuf,
+ user_config_dir: Utf8PathBuf,
+}
+
+impl RatomlTest {
+ const EMIT_MUST_USE: &'static str = r#"assist.emitMustUse = true"#;
+ const EMIT_MUST_NOT_USE: &'static str = r#"assist.emitMustUse = false"#;
+
+ const GLOBAL_TRAIT_ASSOC_ITEMS_ZERO: &'static str = r#"hover.show.traitAssocItems = 0"#;
+
+ fn new(
+ fixtures: Vec<&str>,
+ roots: Vec<&str>,
+ client_config: Option<serde_json::Value>,
+ ) -> Self {
+ let tmp_dir = TestDir::new();
+ let tmp_path = tmp_dir.path().to_owned();
+
+ let full_fixture = fixtures.join("\n");
+
+ let user_cnf_dir = TestDir::new();
+ let user_config_dir = user_cnf_dir.path().to_owned();
+
+ let mut project =
+ Project::with_fixture(&full_fixture).tmp_dir(tmp_dir).user_config_dir(user_cnf_dir);
+
+ for root in roots {
+ project = project.root(root);
+ }
+
+ if let Some(client_config) = client_config {
+ project = project.with_config(client_config);
+ }
+
+ let server = project.server().wait_until_workspace_is_loaded();
+
+ let mut case = Self { urls: vec![], server, tmp_path, user_config_dir };
+ let urls = fixtures.iter().map(|fixture| case.fixture_path(fixture)).collect::<Vec<_>>();
+ case.urls = urls;
+ case
+ }
+
+ fn fixture_path(&self, fixture: &str) -> Url {
+ let mut lines = fixture.trim().split('\n');
+
+ let mut path =
+ lines.next().expect("All files in a fixture are expected to have at least one line.");
+
+ if path.starts_with("//- minicore") {
+ path = lines.next().expect("A minicore line must be followed by a path.")
+ }
+
+ path = path.strip_prefix("//- ").expect("Path must be preceded by a //- prefix ");
+
+ let spl = path[1..].split('/');
+ let mut path = self.tmp_path.clone();
+
+ let mut spl = spl.into_iter();
+ if let Some(first) = spl.next() {
+ if first == "$$CONFIG_DIR$$" {
+ path = self.user_config_dir.clone();
+ } else {
+ path = path.join(first);
+ }
+ }
+ for piece in spl {
+ path = path.join(piece);
+ }
+
+ Url::parse(
+ format!(
+ "file://{}",
+ path.into_string().to_owned().replace("C:\\", "/c:/").replace('\\', "/")
+ )
+ .as_str(),
+ )
+ .unwrap()
+ }
+
+ fn create(&mut self, fixture_path: &str, text: String) {
+ let url = self.fixture_path(fixture_path);
+
+ self.server.notification::<DidOpenTextDocument>(DidOpenTextDocumentParams {
+ text_document: TextDocumentItem {
+ uri: url.clone(),
+ language_id: "rust".to_owned(),
+ version: 0,
+ text: String::new(),
+ },
+ });
+
+ self.server.notification::<DidChangeTextDocument>(DidChangeTextDocumentParams {
+ text_document: VersionedTextDocumentIdentifier { uri: url, version: 0 },
+ content_changes: vec![TextDocumentContentChangeEvent {
+ range: None,
+ range_length: None,
+ text,
+ }],
+ });
+ }
+
+ fn delete(&mut self, file_idx: usize) {
+ self.server.notification::<DidOpenTextDocument>(DidOpenTextDocumentParams {
+ text_document: TextDocumentItem {
+ uri: self.urls[file_idx].clone(),
+ language_id: "rust".to_owned(),
+ version: 0,
+ text: "".to_owned(),
+ },
+ });
+
+ // See if deleting ratoml file will make the config of interest to return to its default value.
+ self.server.notification::<DidSaveTextDocument>(DidSaveTextDocumentParams {
+ text_document: TextDocumentIdentifier { uri: self.urls[file_idx].clone() },
+ text: Some("".to_owned()),
+ });
+ }
+
+ fn edit(&mut self, file_idx: usize, text: String) {
+ self.server.notification::<DidOpenTextDocument>(DidOpenTextDocumentParams {
+ text_document: TextDocumentItem {
+ uri: self.urls[file_idx].clone(),
+ language_id: "rust".to_owned(),
+ version: 0,
+ text: String::new(),
+ },
+ });
+
+ self.server.notification::<DidChangeTextDocument>(DidChangeTextDocumentParams {
+ text_document: VersionedTextDocumentIdentifier {
+ uri: self.urls[file_idx].clone(),
+ version: 0,
+ },
+ content_changes: vec![TextDocumentContentChangeEvent {
+ range: None,
+ range_length: None,
+ text,
+ }],
+ });
+ }
+
+ fn query(&self, query: QueryType, source_file_idx: usize) -> bool {
+ let config = match query {
+ QueryType::Local => "local".to_owned(),
+ QueryType::Global => "global".to_owned(),
+ };
+ let res = self.server.send_request::<InternalTestingFetchConfig>(
+ InternalTestingFetchConfigParams {
+ text_document: Some(TextDocumentIdentifier {
+ uri: self.urls[source_file_idx].clone(),
+ }),
+ config,
+ },
+ );
+ res.as_bool().unwrap()
+ }
+}
+
+// /// Check if we are listening for changes in user's config file ( e.g on Linux `~/.config/rust-analyzer/.rust-analyzer.toml`)
+// #[test]
+// #[cfg(target_os = "windows")]
+// fn listen_to_user_config_scenario_windows() {
+// todo!()
+// }
+
+// #[test]
+// #[cfg(target_os = "linux")]
+// fn listen_to_user_config_scenario_linux() {
+// todo!()
+// }
+
+// #[test]
+// #[cfg(target_os = "macos")]
+// fn listen_to_user_config_scenario_macos() {
+// todo!()
+// }
+
+/// Check if made changes have had any effect on
+/// the client config.
+#[test]
+fn ratoml_client_config_basic() {
+ let server = RatomlTest::new(
+ vec![
+ r#"
+//- /p1/Cargo.toml
+[package]
+name = "p1"
+version = "0.1.0"
+edition = "2021"
+"#,
+ r#"//- /p1/src/lib.rs
+enum Value {
+ Number(i32),
+ Text(String),
+}"#,
+ ],
+ vec!["p1"],
+ Some(json!({
+ "assist" : {
+ "emitMustUse" : true
+ }
+ })),
+ );
+
+ assert!(server.query(QueryType::Local, 1));
+}
+
+/// Checks if client config can be modified.
+/// FIXME @alibektas : This test is atm not valid.
+/// Asking for client config from the client is a 2 way communication
+/// which we cannot imitate with the current slow-tests infrastructure.
+/// See rust-analyzer::handlers::notifications#197
+// #[test]
+// fn client_config_update() {
+// setup();
+
+// let server = RatomlTest::new(
+// vec![
+// r#"
+// //- /p1/Cargo.toml
+// [package]
+// name = "p1"
+// version = "0.1.0"
+// edition = "2021"
+// "#,
+// r#"
+// //- /p1/src/lib.rs
+// enum Value {
+// Number(i32),
+// Text(String),
+// }"#,
+// ],
+// vec!["p1"],
+// None,
+// );
+
+// assert!(!server.query(QueryType::AssistEmitMustUse, 1));
+
+// // a.notification::<DidChangeConfiguration>(DidChangeConfigurationParams {
+// // settings: json!({
+// // "assists" : {
+// // "emitMustUse" : true
+// // }
+// // }),
+// // });
+
+// assert!(server.query(QueryType::AssistEmitMustUse, 1));
+// }
+
+// #[test]
+// fn ratoml_create_ratoml_basic() {
+// let server = RatomlTest::new(
+// vec![
+// r#"
+// //- /p1/Cargo.toml
+// [package]
+// name = "p1"
+// version = "0.1.0"
+// edition = "2021"
+// "#,
+// r#"
+// //- /p1/rust-analyzer.toml
+// assist.emitMustUse = true
+// "#,
+// r#"
+// //- /p1/src/lib.rs
+// enum Value {
+// Number(i32),
+// Text(String),
+// }
+// "#,
+// ],
+// vec!["p1"],
+// None,
+// );
+
+// assert!(server.query(QueryType::AssistEmitMustUse, 2));
+// }
+
+#[test]
+#[ignore = "the user config is currently not being watched on startup, fix this"]
+fn ratoml_user_config_detected() {
+ let server = RatomlTest::new(
+ vec![
+ r#"
+//- /$$CONFIG_DIR$$/rust-analyzer/rust-analyzer.toml
+assist.emitMustUse = true
+"#,
+ r#"
+//- /p1/Cargo.toml
+[package]
+name = "p1"
+version = "0.1.0"
+edition = "2021"
+"#,
+ r#"//- /p1/src/lib.rs
+enum Value {
+ Number(i32),
+ Text(String),
+}"#,
+ ],
+ vec!["p1"],
+ None,
+ );
+
+ assert!(server.query(QueryType::Local, 2));
+}
+
+#[test]
+#[ignore = "the user config is currently not being watched on startup, fix this"]
+fn ratoml_create_user_config() {
+ let mut server = RatomlTest::new(
+ vec![
+ r#"
+//- /p1/Cargo.toml
+[package]
+name = "p1"
+version = "0.1.0"
+edition = "2021"
+"#,
+ r#"
+//- /p1/src/lib.rs
+enum Value {
+ Number(i32),
+ Text(String),
+}"#,
+ ],
+ vec!["p1"],
+ None,
+ );
+
+ assert!(!server.query(QueryType::Local, 1));
+ server.create(
+ "//- /$$CONFIG_DIR$$/rust-analyzer/rust-analyzer.toml",
+ RatomlTest::EMIT_MUST_USE.to_owned(),
+ );
+ assert!(server.query(QueryType::Local, 1));
+}
+
+#[test]
+#[ignore = "the user config is currently not being watched on startup, fix this"]
+fn ratoml_modify_user_config() {
+ let mut server = RatomlTest::new(
+ vec![
+ r#"
+//- /p1/Cargo.toml
+[package]
+name = "p1"
+version = "0.1.0"
+edition = "2021""#,
+ r#"
+//- /p1/src/lib.rs
+enum Value {
+ Number(i32),
+ Text(String),
+}"#,
+ r#"
+//- /$$CONFIG_DIR$$/rust-analyzer/rust-analyzer.toml
+assist.emitMustUse = true"#,
+ ],
+ vec!["p1"],
+ None,
+ );
+
+ assert!(server.query(QueryType::Local, 1));
+ server.edit(2, String::new());
+ assert!(!server.query(QueryType::Local, 1));
+}
+
+#[test]
+#[ignore = "the user config is currently not being watched on startup, fix this"]
+fn ratoml_delete_user_config() {
+ let mut server = RatomlTest::new(
+ vec![
+ r#"
+//- /p1/Cargo.toml
+[package]
+name = "p1"
+version = "0.1.0"
+edition = "2021""#,
+ r#"
+//- /p1/src/lib.rs
+enum Value {
+ Number(i32),
+ Text(String),
+}"#,
+ r#"
+//- /$$CONFIG_DIR$$/rust-analyzer/rust-analyzer.toml
+assist.emitMustUse = true"#,
+ ],
+ vec!["p1"],
+ None,
+ );
+
+ assert!(server.query(QueryType::Local, 1));
+ server.delete(2);
+ assert!(!server.query(QueryType::Local, 1));
+}
+// #[test]
+// fn delete_user_config() {
+// todo!()
+// }
+
+// #[test]
+// fn modify_client_config() {
+// todo!()
+// }
+
+#[test]
+fn ratoml_inherit_config_from_ws_root() {
+ let server = RatomlTest::new(
+ vec![
+ r#"
+//- /p1/Cargo.toml
+workspace = { members = ["p2"] }
+[package]
+name = "p1"
+version = "0.1.0"
+edition = "2021"
+"#,
+ r#"
+//- /p1/rust-analyzer.toml
+assist.emitMustUse = true
+"#,
+ r#"
+//- /p1/p2/Cargo.toml
+[package]
+name = "p2"
+version = "0.1.0"
+edition = "2021"
+"#,
+ r#"
+//- /p1/p2/src/lib.rs
+enum Value {
+ Number(i32),
+ Text(String),
+}"#,
+ r#"
+//- /p1/src/lib.rs
+pub fn add(left: usize, right: usize) -> usize {
+ left + right
+}
+"#,
+ ],
+ vec!["p1"],
+ None,
+ );
+
+ assert!(server.query(QueryType::Local, 3));
+}
+
+#[test]
+fn ratoml_modify_ratoml_at_ws_root() {
+ let mut server = RatomlTest::new(
+ vec![
+ r#"
+//- /p1/Cargo.toml
+workspace = { members = ["p2"] }
+[package]
+name = "p1"
+version = "0.1.0"
+edition = "2021"
+"#,
+ r#"
+//- /p1/rust-analyzer.toml
+assist.emitMustUse = false
+"#,
+ r#"
+//- /p1/p2/Cargo.toml
+[package]
+name = "p2"
+version = "0.1.0"
+edition = "2021"
+"#,
+ r#"
+//- /p1/p2/src/lib.rs
+enum Value {
+ Number(i32),
+ Text(String),
+}"#,
+ r#"
+//- /p1/src/lib.rs
+pub fn add(left: usize, right: usize) -> usize {
+ left + right
+}
+"#,
+ ],
+ vec!["p1"],
+ None,
+ );
+
+ assert!(!server.query(QueryType::Local, 3));
+ server.edit(1, "assist.emitMustUse = true".to_owned());
+ assert!(server.query(QueryType::Local, 3));
+}
+
+#[test]
+fn ratoml_delete_ratoml_at_ws_root() {
+ let mut server = RatomlTest::new(
+ vec![
+ r#"
+//- /p1/Cargo.toml
+workspace = { members = ["p2"] }
+[package]
+name = "p1"
+version = "0.1.0"
+edition = "2021"
+"#,
+ r#"
+//- /p1/rust-analyzer.toml
+assist.emitMustUse = true
+"#,
+ r#"
+//- /p1/p2/Cargo.toml
+[package]
+name = "p2"
+version = "0.1.0"
+edition = "2021"
+"#,
+ r#"
+//- /p1/p2/src/lib.rs
+enum Value {
+ Number(i32),
+ Text(String),
+}"#,
+ r#"
+//- /p1/src/lib.rs
+pub fn add(left: usize, right: usize) -> usize {
+ left + right
+}
+"#,
+ ],
+ vec!["p1"],
+ None,
+ );
+
+ assert!(server.query(QueryType::Local, 3));
+ server.delete(1);
+ assert!(!server.query(QueryType::Local, 3));
+}
+
+#[test]
+fn ratoml_add_immediate_child_to_ws_root() {
+ let mut server = RatomlTest::new(
+ vec![
+ r#"
+//- /p1/Cargo.toml
+workspace = { members = ["p2"] }
+[package]
+name = "p1"
+version = "0.1.0"
+edition = "2021"
+"#,
+ r#"
+//- /p1/rust-analyzer.toml
+assist.emitMustUse = true
+"#,
+ r#"
+//- /p1/p2/Cargo.toml
+[package]
+name = "p2"
+version = "0.1.0"
+edition = "2021"
+"#,
+ r#"
+//- /p1/p2/src/lib.rs
+enum Value {
+ Number(i32),
+ Text(String),
+}"#,
+ r#"
+//- /p1/src/lib.rs
+pub fn add(left: usize, right: usize) -> usize {
+ left + right
+}
+"#,
+ ],
+ vec!["p1"],
+ None,
+ );
+
+ assert!(server.query(QueryType::Local, 3));
+ server.create("//- /p1/p2/rust-analyzer.toml", RatomlTest::EMIT_MUST_NOT_USE.to_owned());
+ assert!(!server.query(QueryType::Local, 3));
+}
+
+#[test]
+fn ratoml_rm_ws_root_ratoml_child_has_client_as_parent_now() {
+ let mut server = RatomlTest::new(
+ vec![
+ r#"
+//- /p1/Cargo.toml
+workspace = { members = ["p2"] }
+[package]
+name = "p1"
+version = "0.1.0"
+edition = "2021"
+"#,
+ r#"
+//- /p1/rust-analyzer.toml
+assist.emitMustUse = true
+"#,
+ r#"
+//- /p1/p2/Cargo.toml
+[package]
+name = "p2"
+version = "0.1.0"
+edition = "2021"
+"#,
+ r#"
+//- /p1/p2/src/lib.rs
+enum Value {
+ Number(i32),
+ Text(String),
+}"#,
+ r#"
+//- /p1/src/lib.rs
+pub fn add(left: usize, right: usize) -> usize {
+ left + right
+}
+"#,
+ ],
+ vec!["p1"],
+ None,
+ );
+
+ assert!(server.query(QueryType::Local, 3));
+ server.delete(1);
+ assert!(!server.query(QueryType::Local, 3));
+}
+
+#[test]
+fn ratoml_crates_both_roots() {
+ let server = RatomlTest::new(
+ vec![
+ r#"
+//- /p1/Cargo.toml
+workspace = { members = ["p2"] }
+[package]
+name = "p1"
+version = "0.1.0"
+edition = "2021"
+"#,
+ r#"
+//- /p1/rust-analyzer.toml
+assist.emitMustUse = true
+"#,
+ r#"
+//- /p1/p2/Cargo.toml
+[package]
+name = "p2"
+version = "0.1.0"
+edition = "2021"
+"#,
+ r#"
+//- /p1/p2/src/lib.rs
+enum Value {
+ Number(i32),
+ Text(String),
+}"#,
+ r#"
+//- /p1/src/lib.rs
+enum Value {
+ Number(i32),
+ Text(String),
+}"#,
+ ],
+ vec!["p1", "p2"],
+ None,
+ );
+
+ assert!(server.query(QueryType::Local, 3));
+ assert!(server.query(QueryType::Local, 4));
+}
+
+#[test]
+fn ratoml_multiple_ratoml_in_single_source_root() {
+ let server = RatomlTest::new(
+ vec![
+ r#"
+ //- /p1/Cargo.toml
+ [package]
+ name = "p1"
+ version = "0.1.0"
+ edition = "2021"
+ "#,
+ r#"
+ //- /p1/rust-analyzer.toml
+ assist.emitMustUse = true
+ "#,
+ r#"
+ //- /p1/src/rust-analyzer.toml
+ assist.emitMustUse = false
+ "#,
+ r#"
+ //- /p1/src/lib.rs
+ enum Value {
+ Number(i32),
+ Text(String),
+ }
+ "#,
+ ],
+ vec!["p1"],
+ None,
+ );
+
+ assert!(server.query(QueryType::Local, 3));
+
+ let server = RatomlTest::new(
+ vec![
+ r#"
+//- /p1/Cargo.toml
+[package]
+name = "p1"
+version = "0.1.0"
+edition = "2021"
+"#,
+ r#"
+//- /p1/src/rust-analyzer.toml
+assist.emitMustUse = false
+"#,
+ r#"
+//- /p1/rust-analyzer.toml
+assist.emitMustUse = true
+"#,
+ r#"
+//- /p1/src/lib.rs
+enum Value {
+ Number(i32),
+ Text(String),
+}
+"#,
+ ],
+ vec!["p1"],
+ None,
+ );
+
+ assert!(server.query(QueryType::Local, 3));
+}
+
+/// If a root is non-local, so we cannot find what its parent is
+/// in our `config.local_root_parent_map`. So if any config should
+/// apply, it must be looked for starting from the client level.
+/// FIXME @alibektas : "locality" is according to ra that, which is simply in the file system.
+/// This doesn't really help us with what we want to achieve here.
+// #[test]
+// fn ratoml_non_local_crates_start_inheriting_from_client() {
+// let server = RatomlTest::new(
+// vec![
+// r#"
+// //- /p1/Cargo.toml
+// [package]
+// name = "p1"
+// version = "0.1.0"
+// edition = "2021"
+
+// [dependencies]
+// p2 = { path = "../p2" }
+// #,
+// r#"
+// //- /p1/src/lib.rs
+// enum Value {
+// Number(i32),
+// Text(String),
+// }
+
+// use p2;
+
+// pub fn add(left: usize, right: usize) -> usize {
+// p2::add(left, right)
+// }
+
+// #[cfg(test)]
+// mod tests {
+// use super::*;
+
+// #[test]
+// fn it_works() {
+// let result = add(2, 2);
+// assert_eq!(result, 4);
+// }
+// }"#,
+// r#"
+// //- /p2/Cargo.toml
+// [package]
+// name = "p2"
+// version = "0.1.0"
+// edition = "2021"
+
+// # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
+
+// [dependencies]
+// "#,
+// r#"
+// //- /p2/rust-analyzer.toml
+// # DEF
+// assist.emitMustUse = true
+// "#,
+// r#"
+// //- /p2/src/lib.rs
+// enum Value {
+// Number(i32),
+// Text(String),
+// }"#,
+// ],
+// vec!["p1", "p2"],
+// None,
+// );
+
+// assert!(!server.query(QueryType::AssistEmitMustUse, 5));
+// }
+
+/// Having a ratoml file at the root of a project enables
+/// configuring global level configurations as well.
+#[test]
+fn ratoml_in_root_is_global() {
+ let server = RatomlTest::new(
+ vec![
+ r#"
+//- /p1/Cargo.toml
+[package]
+name = "p1"
+version = "0.1.0"
+edition = "2021"
+ "#,
+ r#"
+//- /rust-analyzer.toml
+hover.show.traitAssocItems = 4
+ "#,
+ r#"
+//- /p1/src/lib.rs
+trait RandomTrait {
+ type B;
+ fn abc() -> i32;
+ fn def() -> i64;
+}
+
+fn main() {
+ let a = RandomTrait;
+}"#,
+ ],
+ vec![],
+ None,
+ );
+
+ server.query(QueryType::Global, 2);
+}
+
+#[allow(unused)]
+// #[test]
+// FIXME: Re-enable this test when we have a global config we can check again
+fn ratoml_root_is_updateable() {
+ let mut server = RatomlTest::new(
+ vec![
+ r#"
+//- /p1/Cargo.toml
+[package]
+name = "p1"
+version = "0.1.0"
+edition = "2021"
+ "#,
+ r#"
+//- /rust-analyzer.toml
+hover.show.traitAssocItems = 4
+ "#,
+ r#"
+//- /p1/src/lib.rs
+trait RandomTrait {
+ type B;
+ fn abc() -> i32;
+ fn def() -> i64;
+}
+
+fn main() {
+ let a = RandomTrait;
+}"#,
+ ],
+ vec![],
+ None,
+ );
+
+ assert!(server.query(QueryType::Global, 2));
+ server.edit(1, RatomlTest::GLOBAL_TRAIT_ASSOC_ITEMS_ZERO.to_owned());
+ assert!(!server.query(QueryType::Global, 2));
+}
+
+#[allow(unused)]
+// #[test]
+// FIXME: Re-enable this test when we have a global config we can check again
+fn ratoml_root_is_deletable() {
+ let mut server = RatomlTest::new(
+ vec![
+ r#"
+//- /p1/Cargo.toml
+[package]
+name = "p1"
+version = "0.1.0"
+edition = "2021"
+ "#,
+ r#"
+//- /rust-analyzer.toml
+hover.show.traitAssocItems = 4
+ "#,
+ r#"
+//- /p1/src/lib.rs
+trait RandomTrait {
+ type B;
+ fn abc() -> i32;
+ fn def() -> i64;
+}
+
+fn main() {
+ let a = RandomTrait;
+}"#,
+ ],
+ vec![],
+ None,
+ );
+
+ assert!(server.query(QueryType::Global, 2));
+ server.delete(1);
+ assert!(!server.query(QueryType::Global, 2));
+}
diff --git a/crates/rust-analyzer/tests/slow-tests/support.rs b/crates/rust-analyzer/tests/slow-tests/support.rs
index cf27cc7eef..c438325532 100644
--- a/crates/rust-analyzer/tests/slow-tests/support.rs
+++ b/crates/rust-analyzer/tests/slow-tests/support.rs
@@ -9,7 +9,10 @@ use crossbeam_channel::{after, select, Receiver};
use lsp_server::{Connection, Message, Notification, Request};
use lsp_types::{notification::Exit, request::Shutdown, TextDocumentIdentifier, Url};
use paths::{Utf8Path, Utf8PathBuf};
-use rust_analyzer::{config::Config, lsp, main_loop};
+use rust_analyzer::{
+ config::{Config, ConfigChange, ConfigErrors},
+ lsp, main_loop,
+};
use serde::Serialize;
use serde_json::{json, to_string_pretty, Value};
use test_utils::FixtureWithProjectMeta;
@@ -24,6 +27,7 @@ pub(crate) struct Project<'a> {
roots: Vec<Utf8PathBuf>,
config: serde_json::Value,
root_dir_contains_symlink: bool,
+ user_config_path: Option<Utf8PathBuf>,
}
impl Project<'_> {
@@ -47,9 +51,15 @@ impl Project<'_> {
}
}),
root_dir_contains_symlink: false,
+ user_config_path: None,
}
}
+ pub(crate) fn user_config_dir(mut self, config_path_dir: TestDir) -> Self {
+ self.user_config_path = Some(config_path_dir.path().to_owned());
+ self
+ }
+
pub(crate) fn tmp_dir(mut self, tmp_dir: TestDir) -> Self {
self.tmp_dir = Some(tmp_dir);
self
@@ -111,10 +121,17 @@ impl Project<'_> {
assert!(proc_macro_names.is_empty());
assert!(mini_core.is_none());
assert!(toolchain.is_none());
+
for entry in fixture {
- let path = tmp_dir.path().join(&entry.path['/'.len_utf8()..]);
- fs::create_dir_all(path.parent().unwrap()).unwrap();
- fs::write(path.as_path(), entry.text.as_bytes()).unwrap();
+ if let Some(pth) = entry.path.strip_prefix("/$$CONFIG_DIR$$") {
+ let path = self.user_config_path.clone().unwrap().join(&pth['/'.len_utf8()..]);
+ fs::create_dir_all(path.parent().unwrap()).unwrap();
+ fs::write(path.as_path(), entry.text.as_bytes()).unwrap();
+ } else {
+ let path = tmp_dir.path().join(&entry.path['/'.len_utf8()..]);
+ fs::create_dir_all(path.parent().unwrap()).unwrap();
+ fs::write(path.as_path(), entry.text.as_bytes()).unwrap();
+ }
}
let tmp_dir_path = AbsPathBuf::assert(tmp_dir.path().to_path_buf());
@@ -184,8 +201,16 @@ impl Project<'_> {
},
roots,
None,
+ self.user_config_path,
);
- config.update(self.config).expect("invalid config");
+ let mut change = ConfigChange::default();
+
+ change.change_client_config(self.config);
+
+ let error_sink: ConfigErrors;
+ (config, error_sink, _) = config.apply_change(change);
+ assert!(error_sink.is_empty(), "{error_sink:?}");
+
config.rediscover_workspaces();
Server::new(tmp_dir.keep(), config)
diff --git a/crates/rust-analyzer/tests/slow-tests/tidy.rs b/crates/rust-analyzer/tests/slow-tests/tidy.rs
index 4a7415b016..7dd6382cfa 100644
--- a/crates/rust-analyzer/tests/slow-tests/tidy.rs
+++ b/crates/rust-analyzer/tests/slow-tests/tidy.rs
@@ -185,27 +185,6 @@ Zlib OR Apache-2.0 OR MIT
}
fn check_test_attrs(path: &Path, text: &str) {
- let ignore_rule =
- "https://github.com/rust-lang/rust-analyzer/blob/master/docs/dev/style.md#ignore";
- let need_ignore: &[&str] = &[
- // This file.
- "slow-tests/tidy.rs",
- // Special case to run `#[ignore]` tests.
- "ide/src/runnables.rs",
- // A legit test which needs to be ignored, as it takes too long to run
- // :(
- "hir-def/src/nameres/collector.rs",
- // Long sourcegen test to generate lint completions.
- "ide-db/src/tests/sourcegen_lints.rs",
- // Obviously needs ignore.
- "ide-assists/src/handlers/toggle_ignore.rs",
- // See above.
- "ide-assists/src/tests/generated.rs",
- ];
- if text.contains("#[ignore") && !need_ignore.iter().any(|p| path.ends_with(p)) {
- panic!("\ndon't `#[ignore]` tests, see:\n\n {ignore_rule}\n\n {}\n", path.display(),)
- }
-
let panic_rule =
"https://github.com/rust-lang/rust-analyzer/blob/master/docs/dev/style.md#should_panic";
let need_panic: &[&str] = &[
diff --git a/crates/salsa/salsa-macros/src/query_group.rs b/crates/salsa/salsa-macros/src/query_group.rs
index 659797d6d4..4e70741239 100644
--- a/crates/salsa/salsa-macros/src/query_group.rs
+++ b/crates/salsa/salsa-macros/src/query_group.rs
@@ -20,7 +20,7 @@ pub(crate) fn query_group(args: TokenStream, input: TokenStream) -> TokenStream
let input_span = input.span();
let (trait_attrs, salsa_attrs) = filter_attrs(input.attrs);
if !salsa_attrs.is_empty() {
- return Error::new(input_span, format!("unsupported attributes: {:?}", salsa_attrs))
+ return Error::new(input_span, format!("unsupported attributes: {salsa_attrs:?}"))
.to_compile_error()
.into();
}
@@ -78,7 +78,7 @@ pub(crate) fn query_group(args: TokenStream, input: TokenStream) -> TokenStream
num_storages += 1;
}
_ => {
- return Error::new(span, format!("unknown salsa attribute `{}`", name))
+ return Error::new(span, format!("unknown salsa attribute `{name}`"))
.to_compile_error()
.into();
}
@@ -111,7 +111,7 @@ pub(crate) fn query_group(args: TokenStream, input: TokenStream) -> TokenStream
_ => {
return Error::new(
sig_span,
- format!("first argument of query `{}` must be `&self`", query_name),
+ format!("first argument of query `{query_name}` must be `&self`"),
)
.to_compile_error()
.into();
@@ -130,7 +130,7 @@ pub(crate) fn query_group(args: TokenStream, input: TokenStream) -> TokenStream
arg => {
return Error::new(
arg.span(),
- format!("unsupported argument `{:?}` of `{}`", arg, query_name,),
+ format!("unsupported argument `{arg:?}` of `{query_name}`",),
)
.to_compile_error()
.into();
@@ -144,7 +144,7 @@ pub(crate) fn query_group(args: TokenStream, input: TokenStream) -> TokenStream
ref ret => {
return Error::new(
ret.span(),
- format!("unsupported return type `{:?}` of `{}`", ret, query_name),
+ format!("unsupported return type `{ret:?}` of `{query_name}`"),
)
.to_compile_error()
.into();
@@ -169,7 +169,7 @@ pub(crate) fn query_group(args: TokenStream, input: TokenStream) -> TokenStream
let lookup_keys = vec![(parse_quote! { key }, value.clone())];
Some(Query {
query_type: lookup_query_type,
- query_name: format!("{}", lookup_fn_name),
+ query_name: format!("{lookup_fn_name}"),
fn_name: lookup_fn_name,
receiver: self_receiver.clone(),
attrs: vec![], // FIXME -- some automatically generated docs on this method?
@@ -238,7 +238,7 @@ pub(crate) fn query_group(args: TokenStream, input: TokenStream) -> TokenStream
let tracing = if let QueryStorage::Memoized = query.storage {
let s = format!("{trait_name}::{fn_name}");
Some(quote! {
- let _p = tracing::span!(tracing::Level::DEBUG, #s, #(#key_names = tracing::field::debug(&#key_names)),*).entered();
+ let _p = tracing::debug_span!(#s, #(#key_names = tracing::field::debug(&#key_names)),*).entered();
})
} else {
None
@@ -274,8 +274,7 @@ pub(crate) fn query_group(args: TokenStream, input: TokenStream) -> TokenStream
*Note:* Setting values will trigger cancellation
of any ongoing queries; this method blocks until
those queries have been cancelled.
- ",
- fn_name = fn_name
+ "
);
let set_constant_fn_docs = format!(
@@ -290,8 +289,7 @@ pub(crate) fn query_group(args: TokenStream, input: TokenStream) -> TokenStream
*Note:* Setting values will trigger cancellation
of any ongoing queries; this method blocks until
those queries have been cancelled.
- ",
- fn_name = fn_name
+ "
);
query_fn_declarations.extend(quote! {
diff --git a/crates/salsa/src/interned.rs b/crates/salsa/src/interned.rs
index bfa9cc0591..eef8bcc814 100644
--- a/crates/salsa/src/interned.rs
+++ b/crates/salsa/src/interned.rs
@@ -98,26 +98,27 @@ impl<A: InternValue + Eq + Hash + Debug + Clone, B: InternValue + Eq + Hash + De
}
}
+pub trait InternValueTrivial
+where
+ Self: Eq + Hash + Debug + Clone,
+{
+}
+
/// Implement [`InternValue`] trivially, that is without actually mapping at all.
-#[macro_export]
-macro_rules! impl_intern_value_trivial {
- ($($ty:ty),*) => {
- $(
- impl $crate::InternValue for $ty {
- type Key = $ty;
- #[inline]
- fn into_key(&self) -> Self::Key {
- self.clone()
- }
- #[inline]
- fn with_key<F: FnOnce(&Self::Key) -> T, T>(&self, f: F) -> T {
- f(self)
- }
- }
- )*
- };
+impl<V: InternValueTrivial> InternValue for V {
+ type Key = Self;
+ #[inline]
+ fn into_key(&self) -> Self::Key {
+ self.clone()
+ }
+ #[inline]
+ fn with_key<F: FnOnce(&Self::Key) -> T, T>(&self, f: F) -> T {
+ f(self)
+ }
}
-impl_intern_value_trivial!(String);
+
+impl InternValueTrivial for String {}
+
#[derive(Debug)]
struct Slot<V> {
/// DatabaseKeyIndex for this slot.
diff --git a/crates/salsa/src/lib.rs b/crates/salsa/src/lib.rs
index 5dde0d560f..e11e6e2e19 100644
--- a/crates/salsa/src/lib.rs
+++ b/crates/salsa/src/lib.rs
@@ -40,7 +40,7 @@ use std::panic::{self, UnwindSafe};
pub use crate::durability::Durability;
pub use crate::intern_id::InternId;
-pub use crate::interned::{InternKey, InternValue};
+pub use crate::interned::{InternKey, InternValue, InternValueTrivial};
pub use crate::runtime::Runtime;
pub use crate::runtime::RuntimeId;
pub use crate::storage::Storage;
@@ -284,7 +284,7 @@ pub trait ParallelDatabase: Database + Send {
/// series of queries in parallel and arranging the results. Using
/// this method for that purpose ensures that those queries will
/// see a consistent view of the database (it is also advisable
- /// for those queries to use the [`Runtime::unwind_if_cancelled`]
+ /// for those queries to use the [`Database::unwind_if_cancelled`]
/// method to check for cancellation).
///
/// # Panics
@@ -513,6 +513,10 @@ where
{
self.storage.purge();
}
+
+ pub fn storage(&self) -> &<Q as Query>::Storage {
+ self.storage
+ }
}
/// Return value from [the `query_mut` method] on `Database`.
diff --git a/crates/salsa/tests/cycles.rs b/crates/salsa/tests/cycles.rs
index ea5d15a250..e9bddfc630 100644
--- a/crates/salsa/tests/cycles.rs
+++ b/crates/salsa/tests/cycles.rs
@@ -162,7 +162,7 @@ fn extract_cycle(f: impl FnOnce() + UnwindSafe) -> salsa::Cycle {
return cycle.clone();
}
}
- panic!("unexpected value: {:?}", v)
+ panic!("unexpected value: {v:?}")
}
#[test]
diff --git a/crates/salsa/tests/incremental/constants.rs b/crates/salsa/tests/incremental/constants.rs
index ea0eb81978..32bfbc4564 100644
--- a/crates/salsa/tests/incremental/constants.rs
+++ b/crates/salsa/tests/incremental/constants.rs
@@ -13,12 +13,12 @@ pub(crate) trait ConstantsDatabase: TestContext {
}
fn add(db: &dyn ConstantsDatabase, key1: char, key2: char) -> usize {
- db.log().add(format!("add({}, {})", key1, key2));
+ db.log().add(format!("add({key1}, {key2})"));
db.input(key1) + db.input(key2)
}
fn add3(db: &dyn ConstantsDatabase, key1: char, key2: char, key3: char) -> usize {
- db.log().add(format!("add3({}, {}, {})", key1, key2, key3));
+ db.log().add(format!("add3({key1}, {key2}, {key3})"));
db.add(key1, key2) + db.input(key3)
}
diff --git a/crates/salsa/tests/incremental/implementation.rs b/crates/salsa/tests/incremental/implementation.rs
index 19752bba00..8434913441 100644
--- a/crates/salsa/tests/incremental/implementation.rs
+++ b/crates/salsa/tests/incremental/implementation.rs
@@ -26,7 +26,7 @@ pub(crate) struct TestContextImpl {
impl TestContextImpl {
#[track_caller]
pub(crate) fn assert_log(&self, expected_log: &[&str]) {
- let expected_text = &format!("{:#?}", expected_log);
+ let expected_text = &format!("{expected_log:#?}");
let actual_text = &format!("{:#?}", self.log().take());
if expected_text == actual_text {
@@ -36,9 +36,9 @@ impl TestContextImpl {
#[allow(clippy::print_stdout)]
for diff in dissimilar::diff(expected_text, actual_text) {
match diff {
- dissimilar::Chunk::Delete(l) => println!("-{}", l),
- dissimilar::Chunk::Equal(l) => println!(" {}", l),
- dissimilar::Chunk::Insert(r) => println!("+{}", r),
+ dissimilar::Chunk::Delete(l) => println!("-{l}"),
+ dissimilar::Chunk::Equal(l) => println!(" {l}"),
+ dissimilar::Chunk::Insert(r) => println!("+{r}"),
}
}
diff --git a/crates/salsa/tests/parallel/parallel_cycle_none_recover.rs b/crates/salsa/tests/parallel/parallel_cycle_none_recover.rs
index 2930c4e379..3c73852eaf 100644
--- a/crates/salsa/tests/parallel/parallel_cycle_none_recover.rs
+++ b/crates/salsa/tests/parallel/parallel_cycle_none_recover.rs
@@ -33,7 +33,7 @@ fn parallel_cycle_none_recover() {
"#]]
.assert_debug_eq(&c.unexpected_participants(&db));
} else {
- panic!("b failed in an unexpected way: {:?}", err_b);
+ panic!("b failed in an unexpected way: {err_b:?}");
}
// We expect A to propagate a panic, which causes us to use the sentinel
diff --git a/crates/salsa/tests/parallel/race.rs b/crates/salsa/tests/parallel/race.rs
index e875de998f..c53d4b464e 100644
--- a/crates/salsa/tests/parallel/race.rs
+++ b/crates/salsa/tests/parallel/race.rs
@@ -28,7 +28,7 @@ fn in_par_get_set_race() {
// cancellation, it'll unwind.
let result1 = thread1.join().unwrap();
if let Ok(value1) = result1 {
- assert!(value1 == 111 || value1 == 1011, "illegal result {}", value1);
+ assert!(value1 == 111 || value1 == 1011, "illegal result {value1}");
}
// thread2 can not observe a cancellation because it performs a
diff --git a/crates/span/src/hygiene.rs b/crates/span/src/hygiene.rs
index e4b0a26a6f..e8c558355c 100644
--- a/crates/span/src/hygiene.rs
+++ b/crates/span/src/hygiene.rs
@@ -4,7 +4,7 @@
//! Expansion, and Definition Contexts,” *Journal of Functional Programming* 22, no. 2
//! (March 1, 2012): 181–216, <https://doi.org/10.1017/S0956796812000093>.
//!
-//! Also see https://rustc-dev-guide.rust-lang.org/macro-expansion.html#hygiene-and-hierarchies
+//! Also see <https://rustc-dev-guide.rust-lang.org/macro-expansion.html#hygiene-and-hierarchies>
//!
//! # The Expansion Order Hierarchy
//!
diff --git a/crates/span/src/lib.rs b/crates/span/src/lib.rs
index 8ca7bc2d38..bbaf1b2a6d 100644
--- a/crates/span/src/lib.rs
+++ b/crates/span/src/lib.rs
@@ -86,15 +86,6 @@ impl<Ctx: Copy> SpanData<Ctx> {
}
}
-impl Span {
- #[deprecated = "dummy spans will panic if surfaced incorrectly, as such they should be replaced appropriately"]
- pub const DUMMY: Self = Self {
- range: TextRange::empty(TextSize::new(0)),
- anchor: SpanAnchor { file_id: FileId::BOGUS, ast_id: ROOT_ERASED_FILE_AST_ID },
- ctx: SyntaxContextId::ROOT,
- };
-}
-
impl fmt::Display for Span {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
fmt::Debug::fmt(&self.anchor.file_id.index(), f)?;
@@ -178,6 +169,8 @@ impl salsa::InternKey for MacroCallId {
}
impl MacroCallId {
+ pub const MAX_ID: u32 = 0x7fff_ffff;
+
pub fn as_file(self) -> HirFileId {
MacroFileId { macro_call_id: self }.into()
}
@@ -218,7 +211,7 @@ impl From<MacroFileId> for HirFileId {
fn from(MacroFileId { macro_call_id: MacroCallId(id) }: MacroFileId) -> Self {
_ = Self::ASSERT_MAX_FILE_ID_IS_SAME;
let id = id.as_u32();
- assert!(id <= Self::MAX_HIR_FILE_ID, "MacroCallId index {} is too large", id);
+ assert!(id <= Self::MAX_HIR_FILE_ID, "MacroCallId index {id} is too large");
HirFileId(id | Self::MACRO_FILE_TAG_MASK)
}
}
diff --git a/crates/stdx/src/anymap.rs b/crates/stdx/src/anymap.rs
index d47b3d1647..d189b56a46 100644
--- a/crates/stdx/src/anymap.rs
+++ b/crates/stdx/src/anymap.rs
@@ -1,6 +1,6 @@
-//! This file is a port of only the necessary features from https://github.com/chris-morgan/anymap version 1.0.0-beta.2 for use within rust-analyzer.
+//! This file is a port of only the necessary features from <https://github.com/chris-morgan/anymap> version 1.0.0-beta.2 for use within rust-analyzer.
//! Copyright © 2014–2022 Chris Morgan.
-//! COPYING: https://github.com/chris-morgan/anymap/blob/master/COPYING
+//! COPYING: <https://github.com/chris-morgan/anymap/blob/master/COPYING>
//! Note that the license is changed from Blue Oak Model 1.0.0 or MIT or Apache-2.0 to MIT OR Apache-2.0
//!
//! This implementation provides a safe and convenient store for one value of each type.
diff --git a/crates/syntax/src/algo.rs b/crates/syntax/src/algo.rs
index 0e62de5feb..9b43da8341 100644
--- a/crates/syntax/src/algo.rs
+++ b/crates/syntax/src/algo.rs
@@ -120,7 +120,7 @@ pub struct TreeDiff {
impl TreeDiff {
pub fn into_text_edit(&self, builder: &mut TextEditBuilder) {
- let _p = tracing::span!(tracing::Level::INFO, "into_text_edit").entered();
+ let _p = tracing::info_span!("into_text_edit").entered();
for (anchor, to) in &self.insertions {
let offset = match anchor {
@@ -149,7 +149,7 @@ impl TreeDiff {
///
/// This function tries to find a fine-grained diff.
pub fn diff(from: &SyntaxNode, to: &SyntaxNode) -> TreeDiff {
- let _p = tracing::span!(tracing::Level::INFO, "diff").entered();
+ let _p = tracing::info_span!("diff").entered();
let mut diff = TreeDiff {
replacements: FxHashMap::default(),
diff --git a/crates/syntax/src/ast/make.rs b/crates/syntax/src/ast/make.rs
index 186f1b01da..35ec9b1013 100644
--- a/crates/syntax/src/ast/make.rs
+++ b/crates/syntax/src/ast/make.rs
@@ -172,7 +172,7 @@ pub fn ty_alias(
assignment: Option<(ast::Type, Option<ast::WhereClause>)>,
) -> ast::TypeAlias {
let mut s = String::new();
- s.push_str(&format!("type {}", ident));
+ s.push_str(&format!("type {ident}"));
if let Some(list) = generic_param_list {
s.push_str(&list.to_string());
@@ -297,7 +297,7 @@ pub fn impl_trait(
};
let where_clause = merge_where_clause(ty_where_clause, trait_where_clause)
- .map_or_else(|| " ".to_owned(), |wc| format!("\n{}\n", wc));
+ .map_or_else(|| " ".to_owned(), |wc| format!("\n{wc}\n"));
let body = match body {
Some(bd) => bd.iter().map(|elem| elem.to_string()).join(""),
@@ -1159,7 +1159,7 @@ pub mod tokens {
pub(super) static SOURCE_FILE: Lazy<Parse<SourceFile>> = Lazy::new(|| {
SourceFile::parse(
- "const C: <()>::Item = ( true && true , true || true , 1 != 1, 2 == 2, 3 < 3, 4 <= 4, 5 > 5, 6 >= 6, !true, *p, &p , &mut p, { let a @ [] })\n;\n\nimpl A for B where: {}", Edition::CURRENT,
+ "const C: <()>::Item = ( true && true , true || true , 1 != 1, 2 == 2, 3 < 3, 4 <= 4, 5 > 5, 6 >= 6, !true, *p, &p , &mut p, { let _ @ [] })\n;\n\nimpl A for B where: {}", Edition::CURRENT,
)
});
diff --git a/crates/syntax/src/lib.rs b/crates/syntax/src/lib.rs
index 3a9ebafe87..b5d816b0ce 100644
--- a/crates/syntax/src/lib.rs
+++ b/crates/syntax/src/lib.rs
@@ -107,14 +107,22 @@ impl<T> Parse<T> {
}
impl<T: AstNode> Parse<T> {
+ /// Converts this parse result into a parse result for an untyped syntax tree.
pub fn to_syntax(self) -> Parse<SyntaxNode> {
Parse { green: self.green, errors: self.errors, _ty: PhantomData }
}
+ /// Gets the parsed syntax tree as a typed ast node.
+ ///
+ /// # Panics
+ ///
+ /// Panics if the root node cannot be casted into the typed ast node
+ /// (e.g. if it's an `ERROR` node).
pub fn tree(&self) -> T {
T::cast(self.syntax_node()).unwrap()
}
+ /// Converts from `Parse<T>` to [`Result<T, Vec<SyntaxError>>`].
pub fn ok(self) -> Result<T, Vec<SyntaxError>> {
match self.errors() {
errors if !errors.is_empty() => Err(errors),
@@ -167,21 +175,40 @@ impl Parse<SourceFile> {
}
}
+impl ast::Expr {
+ /// Parses an `ast::Expr` from `text`.
+ ///
+ /// Note that if the parsed root node is not a valid expression, [`Parse::tree`] will panic.
+ /// For example:
+ /// ```rust,should_panic
+ /// # use syntax::{ast, Edition};
+ /// ast::Expr::parse("let fail = true;", Edition::CURRENT).tree();
+ /// ```
+ pub fn parse(text: &str, edition: Edition) -> Parse<ast::Expr> {
+ let _p = tracing::info_span!("Expr::parse").entered();
+ let (green, errors) = parsing::parse_text_at(text, parser::TopEntryPoint::Expr, edition);
+ let root = SyntaxNode::new_root(green.clone());
+
+ assert!(
+ ast::Expr::can_cast(root.kind()) || root.kind() == SyntaxKind::ERROR,
+ "{:?} isn't an expression",
+ root.kind()
+ );
+ Parse::new(green, errors)
+ }
+}
+
/// `SourceFile` represents a parse tree for a single Rust file.
pub use crate::ast::SourceFile;
impl SourceFile {
pub fn parse(text: &str, edition: Edition) -> Parse<SourceFile> {
- let _p = tracing::span!(tracing::Level::INFO, "SourceFile::parse").entered();
+ let _p = tracing::info_span!("SourceFile::parse").entered();
let (green, errors) = parsing::parse_text(text, edition);
let root = SyntaxNode::new_root(green.clone());
assert_eq!(root.kind(), SyntaxKind::SOURCE_FILE);
- Parse {
- green,
- errors: if errors.is_empty() { None } else { Some(errors.into()) },
- _ty: PhantomData,
- }
+ Parse::new(green, errors)
}
}
@@ -290,12 +317,7 @@ impl ast::TokenTree {
}
let (green, errors) = builder.finish_raw();
-
- Parse {
- green,
- errors: if errors.is_empty() { None } else { Some(errors.into()) },
- _ty: PhantomData,
- }
+ Parse::new(green, errors)
}
}
@@ -420,7 +442,7 @@ fn api_walkthrough() {
assert!(expr_syntax.siblings_with_tokens(Direction::Next).any(|it| it.kind() == T!['}']));
assert_eq!(
expr_syntax.descendants_with_tokens().count(),
- 8, // 5 tokens `1`, ` `, `+`, ` `, `!`
+ 8, // 5 tokens `1`, ` `, `+`, ` `, `1`
// 2 child literal expressions: `1`, `1`
// 1 the node itself: `1 + 1`
);
diff --git a/crates/syntax/src/parsing.rs b/crates/syntax/src/parsing.rs
index 420f4938e5..4bf2a03279 100644
--- a/crates/syntax/src/parsing.rs
+++ b/crates/syntax/src/parsing.rs
@@ -10,7 +10,7 @@ use crate::{syntax_node::GreenNode, SyntaxError, SyntaxTreeBuilder};
pub(crate) use crate::parsing::reparsing::incremental_reparse;
pub(crate) fn parse_text(text: &str, edition: parser::Edition) -> (GreenNode, Vec<SyntaxError>) {
- let _p = tracing::span!(tracing::Level::INFO, "parse_text").entered();
+ let _p = tracing::info_span!("parse_text").entered();
let lexed = parser::LexedStr::new(text);
let parser_input = lexed.to_input();
let parser_output = parser::TopEntryPoint::SourceFile.parse(&parser_input, edition);
@@ -18,11 +18,24 @@ pub(crate) fn parse_text(text: &str, edition: parser::Edition) -> (GreenNode, Ve
(node, errors)
}
+pub(crate) fn parse_text_at(
+ text: &str,
+ entry: parser::TopEntryPoint,
+ edition: parser::Edition,
+) -> (GreenNode, Vec<SyntaxError>) {
+ let _p = tracing::info_span!("parse_text_at").entered();
+ let lexed = parser::LexedStr::new(text);
+ let parser_input = lexed.to_input();
+ let parser_output = entry.parse(&parser_input, edition);
+ let (node, errors, _eof) = build_tree(lexed, parser_output);
+ (node, errors)
+}
+
pub(crate) fn build_tree(
lexed: parser::LexedStr<'_>,
parser_output: parser::Output,
) -> (GreenNode, Vec<SyntaxError>, bool) {
- let _p = tracing::span!(tracing::Level::INFO, "build_tree").entered();
+ let _p = tracing::info_span!("build_tree").entered();
let mut builder = SyntaxTreeBuilder::default();
let is_eof = lexed.intersperse_trivia(&parser_output, &mut |step| match step {
diff --git a/crates/syntax/src/validation.rs b/crates/syntax/src/validation.rs
index dbfab537fe..13d352d3c6 100644
--- a/crates/syntax/src/validation.rs
+++ b/crates/syntax/src/validation.rs
@@ -16,7 +16,7 @@ use crate::{
};
pub(crate) fn validate(root: &SyntaxNode, errors: &mut Vec<SyntaxError>) {
- let _p = tracing::span!(tracing::Level::INFO, "parser::validate").entered();
+ let _p = tracing::info_span!("parser::validate").entered();
// FIXME:
// * Add unescape validation of raw string literals and raw byte string literals
// * Add validation of doc comments are being attached to nodes
diff --git a/crates/test-fixture/src/lib.rs b/crates/test-fixture/src/lib.rs
index be9961120d..e65186d377 100644
--- a/crates/test-fixture/src/lib.rs
+++ b/crates/test-fixture/src/lib.rs
@@ -196,7 +196,7 @@ impl ChangeFixture {
origin,
);
let prev = crates.insert(crate_name.clone(), crate_id);
- assert!(prev.is_none(), "multiple crates with same name: {}", crate_name);
+ assert!(prev.is_none(), "multiple crates with same name: {crate_name}");
for dep in meta.deps {
let prelude = match &meta.extern_prelude {
Some(v) => v.contains(&dep),
diff --git a/crates/test-utils/src/fixture.rs b/crates/test-utils/src/fixture.rs
index aafe4fb5b1..54c9db7aac 100644
--- a/crates/test-utils/src/fixture.rs
+++ b/crates/test-utils/src/fixture.rs
@@ -143,14 +143,14 @@ pub struct FixtureWithProjectMeta {
/// Specifies LLVM data layout to be used.
///
/// You probably don't want to manually specify this. See LLVM manual for the
- /// syntax, if you must: https://llvm.org/docs/LangRef.html#data-layout
+ /// syntax, if you must: <https://llvm.org/docs/LangRef.html#data-layout>
pub target_data_layout: String,
}
impl FixtureWithProjectMeta {
/// Parses text which looks like this:
///
- /// ```not_rust
+ /// ```text
/// //- some meta
/// line 1
/// line 2
@@ -159,7 +159,7 @@ impl FixtureWithProjectMeta {
///
/// Fixture can also start with a proc_macros and minicore declaration (in that order):
///
- /// ```
+ /// ```text
/// //- toolchain: nightly
/// //- proc_macros: identity
/// //- minicore: sized
@@ -450,7 +450,7 @@ impl MiniCore {
}
if !active_regions.is_empty() {
- panic!("unclosed regions: {:?} Add an `endregion` comment", active_regions);
+ panic!("unclosed regions: {active_regions:?} Add an `endregion` comment");
}
for flag in &self.valid_flags {
diff --git a/crates/test-utils/src/lib.rs b/crates/test-utils/src/lib.rs
index b750107803..43f62d0d1e 100644
--- a/crates/test-utils/src/lib.rs
+++ b/crates/test-utils/src/lib.rs
@@ -224,7 +224,7 @@ pub fn add_cursor(text: &str, offset: TextSize) -> String {
/// Annotations point to the last line that actually was long enough for the
/// range, not counting annotations themselves. So overlapping annotations are
/// possible:
-/// ```no_run
+/// ```text
/// // stuff other stuff
/// // ^^ 'st'
/// // ^^^^^ 'stuff'
diff --git a/crates/toolchain/src/lib.rs b/crates/toolchain/src/lib.rs
index b577723612..2591ed1691 100644
--- a/crates/toolchain/src/lib.rs
+++ b/crates/toolchain/src/lib.rs
@@ -23,7 +23,7 @@ impl Tool {
///
/// The current implementation checks three places for an executable to use:
/// 1) `$CARGO_HOME/bin/<executable_name>`
- /// where $CARGO_HOME defaults to ~/.cargo (see https://doc.rust-lang.org/cargo/guide/cargo-home.html)
+ /// where $CARGO_HOME defaults to ~/.cargo (see <https://doc.rust-lang.org/cargo/guide/cargo-home.html>)
/// example: for cargo, this tries $CARGO_HOME/bin/cargo, or ~/.cargo/bin/cargo if $CARGO_HOME is unset.
/// It seems that this is a reasonable place to try for cargo, rustc, and rustup
/// 2) Appropriate environment variable (erroring if this is set but not a usable executable)
@@ -45,7 +45,7 @@ impl Tool {
/// example: for cargo, this tries all paths in $PATH with appended `cargo`, returning the
/// first that exists
/// 3) `$CARGO_HOME/bin/<executable_name>`
- /// where $CARGO_HOME defaults to ~/.cargo (see https://doc.rust-lang.org/cargo/guide/cargo-home.html)
+ /// where $CARGO_HOME defaults to ~/.cargo (see <https://doc.rust-lang.org/cargo/guide/cargo-home.html>)
/// example: for cargo, this tries $CARGO_HOME/bin/cargo, or ~/.cargo/bin/cargo if $CARGO_HOME is unset.
/// It seems that this is a reasonable place to try for cargo, rustc, and rustup
/// 4) If all else fails, we just try to use the executable name directly
diff --git a/crates/tt/src/lib.rs b/crates/tt/src/lib.rs
index ab0efff651..e9de3f97b0 100644
--- a/crates/tt/src/lib.rs
+++ b/crates/tt/src/lib.rs
@@ -147,7 +147,7 @@ pub struct Punct<S> {
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub enum Spacing {
Alone,
- /// Whether the following token is joint to the current one.
+ /// Whether the following token is joint to this one.
Joint,
}
diff --git a/crates/vfs/src/lib.rs b/crates/vfs/src/lib.rs
index b07e97cd6c..18c8699dd4 100644
--- a/crates/vfs/src/lib.rs
+++ b/crates/vfs/src/lib.rs
@@ -69,9 +69,6 @@ pub struct FileId(u32);
// pub struct FileId(NonMaxU32);
impl FileId {
- /// Think twice about using this outside of tests. If this ends up in a wrong place it will cause panics!
- // FIXME: To be removed once we get rid of all `SpanData::DUMMY` usages.
- pub const BOGUS: FileId = FileId(0xe4e4e);
pub const MAX_FILE_ID: u32 = 0x7fff_ffff;
#[inline]
@@ -282,7 +279,7 @@ impl Vfs {
/// Returns the id associated with `path`
///
/// - If `path` does not exists in the `Vfs`, allocate a new id for it, associated with a
- /// deleted file;
+ /// deleted file;
/// - Else, returns `path`'s id.
///
/// Does not record a change.
diff --git a/crates/vfs/src/vfs_path.rs b/crates/vfs/src/vfs_path.rs
index 2d3fb9d88c..92a49e0793 100644
--- a/crates/vfs/src/vfs_path.rs
+++ b/crates/vfs/src/vfs_path.rs
@@ -384,8 +384,7 @@ impl VirtualPath {
///
/// # Returns
/// - `None` if `self` ends with `"//"`.
- /// - `Some((name, None))` if `self`'s base contains no `.`, or only one `.` at
- /// the start.
+ /// - `Some((name, None))` if `self`'s base contains no `.`, or only one `.` at the start.
/// - `Some((name, Some(extension))` else.
///
/// # Note
diff --git a/docs/dev/README.md b/docs/dev/README.md
index 8897f02e27..002b8ba2a6 100644
--- a/docs/dev/README.md
+++ b/docs/dev/README.md
@@ -145,7 +145,7 @@ To log all communication between the server and the client, there are two choice
```
env RA_LOG=lsp_server=debug code .
```
-* You can log on the client side, by enabling `"rust-analyzer.trace.server": "verbose"` workspace setting.
+* You can log on the client side, by the `rust-analyzer: Toggle LSP Logs` command or enabling `"rust-analyzer.trace.server": "verbose"` workspace setting.
These logs are shown in a separate tab in the output and could be used with LSP inspector.
Kudos to [@DJMcNab](https://github.com/DJMcNab) for setting this awesome infra up!
diff --git a/docs/dev/lsp-extensions.md b/docs/dev/lsp-extensions.md
index 1c91e856e7..695fec7e8e 100644
--- a/docs/dev/lsp-extensions.md
+++ b/docs/dev/lsp-extensions.md
@@ -1,5 +1,5 @@
<!---
-lsp/ext.rs hash: 1babf76a3c2cef3b
+lsp/ext.rs hash: 8e6e340f2899b5e9
If you need to change the above hash to make the test pass, please check if you
need to adjust this doc as well and ping this issue:
@@ -372,7 +372,7 @@ interface Runnable {
}
```
-rust-analyzer supports only one `kind`, `"cargo"`. The `args` for `"cargo"` look like this:
+rust-analyzer supports two `kind`s of runnables, `"cargo"` and `"shell"`. The `args` for `"cargo"` look like this:
```typescript
{
@@ -386,6 +386,17 @@ rust-analyzer supports only one `kind`, `"cargo"`. The `args` for `"cargo"` look
}
```
+The args for `"shell"` look like this:
+
+```typescript
+{
+ kind: string;
+ program: string;
+ args: string[];
+ cwd: string;
+}
+```
+
## Test explorer
**Experimental Client Capability:** `{ "testExplorer": boolean }`
diff --git a/docs/user/generated_config.adoc b/docs/user/generated_config.adoc
index 8993a46d2b..14aae91741 100644
--- a/docs/user/generated_config.adoc
+++ b/docs/user/generated_config.adoc
@@ -19,7 +19,7 @@ Term search fuel in "units of work" for assists (Defaults to 400).
--
Warm up caches on project load.
--
-[[rust-analyzer.cachePriming.numThreads]]rust-analyzer.cachePriming.numThreads (default: `0`)::
+[[rust-analyzer.cachePriming.numThreads]]rust-analyzer.cachePriming.numThreads (default: `"physical"`)::
+
--
How many worker threads to handle priming caches. The default `0` means to pick automatically.
diff --git a/docs/user/manual.adoc b/docs/user/manual.adoc
index 8e6c53d0c5..e1c1c54ec4 100644
--- a/docs/user/manual.adoc
+++ b/docs/user/manual.adoc
@@ -21,7 +21,7 @@ The LSP allows various code editors, like VS Code, Emacs or Vim, to implement se
To improve this document, send a pull request: +
https://github.com/rust-lang/rust-analyzer/blob/master/docs/user/manual.adoc[https://github.com/rust-analyzer/.../manual.adoc]
-The manual is written in https://asciidoc.org[AsciiDoc] and includes some extra files which are generated from the source code. Run `cargo test` and `cargo test -p xtask` to create these and then `asciidoctor manual.adoc` to create an HTML copy.
+The manual is written in https://asciidoc.org[AsciiDoc] and includes some extra files which are generated from the source code. Run `cargo test` and `cargo xtask codegen` to create these and then `asciidoctor manual.adoc` to create an HTML copy.
====
If you have questions about using rust-analyzer, please ask them in the https://users.rust-lang.org/c/ide/14["`IDEs and Editors`"] topic of Rust users forum.
@@ -591,7 +591,7 @@ The next thing to check would be panic messages in rust-analyzer's log.
Log messages are printed to stderr, in VS Code you can see them in the `Output > Rust Analyzer Language Server` tab of the panel.
To see more logs, set the `RA_LOG=info` environment variable, this can be done either by setting the environment variable manually or by using `rust-analyzer.server.extraEnv`, note that both of these approaches require the server to be restarted.
-To fully capture LSP messages between the editor and the server, set `"rust-analyzer.trace.server": "verbose"` config and check
+To fully capture LSP messages between the editor and the server, run the `rust-analyzer: Toggle LSP Logs` command and check
`Output > Rust Analyzer Language Server Trace`.
The root cause for many "`nothing works`" problems is that rust-analyzer fails to understand the project structure.
diff --git a/editors/code/package-lock.json b/editors/code/package-lock.json
index bd8b0e9c4e..7de7576317 100644
--- a/editors/code/package-lock.json
+++ b/editors/code/package-lock.json
@@ -1087,12 +1087,12 @@
}
},
"node_modules/braces": {
- "version": "3.0.2",
- "resolved": "https://registry.npmjs.org/braces/-/braces-3.0.2.tgz",
- "integrity": "sha512-b8um+L1RzM3WDSzvhm6gIz1yfTbBt6YTlcEKAvsmqCZZFw46z626lVj9j1yEPW33H5H+lBQpZMP1k8l+78Ha0A==",
+ "version": "3.0.3",
+ "resolved": "https://registry.npmjs.org/braces/-/braces-3.0.3.tgz",
+ "integrity": "sha512-yQbXgO/OSZVD2IsiLlro+7Hf6Q18EJrKSEsdoMzKePKXct3gvD8oLcOQdIzGupr5Fj+EDe8gO/lxc1BzfMpxvA==",
"dev": true,
"dependencies": {
- "fill-range": "^7.0.1"
+ "fill-range": "^7.1.1"
},
"engines": {
"node": ">=8"
@@ -2243,9 +2243,9 @@
}
},
"node_modules/fill-range": {
- "version": "7.0.1",
- "resolved": "https://registry.npmjs.org/fill-range/-/fill-range-7.0.1.tgz",
- "integrity": "sha512-qOo9F+dMUmC2Lcb4BbVvnKJxTPjCm+RRpe4gDuGrzkL7mEVl/djYSu2OdQ2Pa302N4oqkSg9ir6jaLWJ2USVpQ==",
+ "version": "7.1.1",
+ "resolved": "https://registry.npmjs.org/fill-range/-/fill-range-7.1.1.tgz",
+ "integrity": "sha512-YsGpe3WHLK8ZYi4tWDg2Jy3ebRz2rXowDxnld4bkQB00cc/1Zw9AWnC0i9ztDJitivtQvaI9KaLyKrc+hBW0yg==",
"dev": true,
"dependencies": {
"to-regex-range": "^5.0.1"
diff --git a/editors/code/package.json b/editors/code/package.json
index 6e4fedd992..db2a989106 100644
--- a/editors/code/package.json
+++ b/editors/code/package.json
@@ -300,6 +300,11 @@
"command": "rust-analyzer.toggleCheckOnSave",
"title": "Toggle Check on Save",
"category": "rust-analyzer"
+ },
+ {
+ "command": "rust-analyzer.toggleLSPLogs",
+ "title": "Toggle LSP Logs",
+ "category": "rust-analyzer"
}
],
"keybindings": [
@@ -314,1440 +319,2281 @@
"when": "editorTextFocus && editorLangId == rust"
}
],
- "configuration": {
- "type": "object",
- "title": "rust-analyzer",
- "properties": {
- "rust-analyzer.cargoRunner": {
- "type": [
- "null",
- "string"
- ],
- "default": null,
- "description": "Custom cargo runner extension ID."
- },
- "rust-analyzer.runnables.extraEnv": {
- "anyOf": [
- {
- "type": "null"
- },
- {
- "type": "array",
- "items": {
- "type": "object",
- "properties": {
- "platform": {
- "type": [
- "null",
- "string",
- "array"
- ],
- "default": null,
- "markdownDescription": "Platform(s) filter like \"win32\" or [\"linux\", \"win32\"]. See [process.platform](https://nodejs.org/api/process.html#processplatform) values."
- },
- "mask": {
- "type": "string",
- "description": "Runnable name mask"
- },
- "env": {
- "type": "object",
- "description": "Variables in form of { \"key\": \"value\"}"
+ "configuration": [
+ {
+ "title": "general",
+ "properties": {
+ "rust-analyzer.cargoRunner": {
+ "type": [
+ "null",
+ "string"
+ ],
+ "default": null,
+ "description": "Custom cargo runner extension ID."
+ },
+ "rust-analyzer.restartServerOnConfigChange": {
+ "markdownDescription": "Whether to restart the server automatically when certain settings that require a restart are changed.",
+ "default": false,
+ "type": "boolean"
+ },
+ "rust-analyzer.discoverProjectRunner": {
+ "markdownDescription": "Sets the extension responsible for determining which extension the rust-analyzer extension uses to generate `rust-project.json` files. This should should only be used\n if a build system like Buck or Bazel is also in use.",
+ "default": null,
+ "type": [
+ "null",
+ "string"
+ ]
+ },
+ "rust-analyzer.showUnlinkedFileNotification": {
+ "markdownDescription": "Whether to show a notification for unlinked files asking the user to add the corresponding Cargo.toml to the linked projects setting.",
+ "default": true,
+ "type": "boolean"
+ },
+ "rust-analyzer.showRequestFailedErrorNotification": {
+ "markdownDescription": "Whether to show error notifications for failing requests.",
+ "default": true,
+ "type": "boolean"
+ },
+ "rust-analyzer.showDependenciesExplorer": {
+ "markdownDescription": "Whether to show the dependencies view.",
+ "default": true,
+ "type": "boolean"
+ },
+ "rust-analyzer.testExplorer": {
+ "markdownDescription": "Whether to show the test explorer.",
+ "default": false,
+ "type": "boolean"
+ }
+ }
+ },
+ {
+ "title": "runnables",
+ "properties": {
+ "rust-analyzer.runnables.extraEnv": {
+ "anyOf": [
+ {
+ "type": "null"
+ },
+ {
+ "type": "array",
+ "items": {
+ "type": "object",
+ "properties": {
+ "platform": {
+ "type": [
+ "null",
+ "string",
+ "array"
+ ],
+ "default": null,
+ "markdownDescription": "Platform(s) filter like \"win32\" or [\"linux\", \"win32\"]. See [process.platform](https://nodejs.org/api/process.html#processplatform) values."
+ },
+ "mask": {
+ "type": "string",
+ "description": "Runnable name mask"
+ },
+ "env": {
+ "type": "object",
+ "description": "Variables in form of { \"key\": \"value\"}"
+ }
}
}
+ },
+ {
+ "type": "object",
+ "description": "Variables in form of { \"key\": \"value\"}"
}
+ ],
+ "default": null,
+ "markdownDescription": "Environment variables passed to the runnable launched using `Test` or `Debug` lens or `rust-analyzer.run` command."
+ },
+ "rust-analyzer.runnables.problemMatcher": {
+ "type": "array",
+ "items": {
+ "type": "string"
},
- {
- "type": "object",
- "description": "Variables in form of { \"key\": \"value\"}"
- }
- ],
- "default": null,
- "markdownDescription": "Environment variables passed to the runnable launched using `Test` or `Debug` lens or `rust-analyzer.run` command."
- },
- "rust-analyzer.runnables.problemMatcher": {
- "type": "array",
- "items": {
- "type": "string"
+ "default": [
+ "$rustc"
+ ],
+ "markdownDescription": "Problem matchers to use for `rust-analyzer.run` command, eg `[\"$rustc\", \"$rust-panic\"]`."
+ }
+ }
+ },
+ {
+ "title": "statusBar",
+ "properties": {
+ "rust-analyzer.statusBar.clickAction": {
+ "type": "string",
+ "enum": [
+ "stopServer",
+ "openLogs"
+ ],
+ "enumDescriptions": [
+ "Stop Server",
+ "Open Logs"
+ ],
+ "default": "openLogs",
+ "markdownDescription": "Action to run when clicking the extension status bar item."
+ }
+ }
+ },
+ {
+ "title": "server",
+ "properties": {
+ "rust-analyzer.server.path": {
+ "type": [
+ "null",
+ "string"
+ ],
+ "scope": "machine-overridable",
+ "default": null,
+ "markdownDescription": "Path to rust-analyzer executable (points to bundled binary by default)."
},
- "default": [
- "$rustc"
- ],
- "markdownDescription": "Problem matchers to use for `rust-analyzer.run` command, eg `[\"$rustc\", \"$rust-panic\"]`."
- },
- "rust-analyzer.statusBar.clickAction": {
- "type": "string",
- "enum": [
- "stopServer",
- "openLogs"
- ],
- "enumDescriptions": [
- "Stop Server",
- "Open Logs"
- ],
- "default": "openLogs",
- "markdownDescription": "Action to run when clicking the extension status bar item."
- },
- "rust-analyzer.server.path": {
- "type": [
- "null",
- "string"
- ],
- "scope": "machine-overridable",
- "default": null,
- "markdownDescription": "Path to rust-analyzer executable (points to bundled binary by default)."
- },
- "rust-analyzer.server.extraEnv": {
- "type": [
- "null",
- "object"
- ],
- "additionalProperties": {
+ "rust-analyzer.server.extraEnv": {
"type": [
- "string",
- "number"
+ "null",
+ "object"
+ ],
+ "additionalProperties": {
+ "type": [
+ "string",
+ "number"
+ ]
+ },
+ "default": null,
+ "markdownDescription": "Extra environment variables that will be passed to the rust-analyzer executable. Useful for passing e.g. `RA_LOG` for debugging."
+ }
+ }
+ },
+ {
+ "title": "trace",
+ "properties": {
+ "rust-analyzer.trace.server": {
+ "type": "string",
+ "scope": "window",
+ "enum": [
+ "off",
+ "messages",
+ "verbose"
+ ],
+ "enumDescriptions": [
+ "No traces",
+ "Error only",
+ "Full log"
+ ],
+ "default": "off",
+ "description": "Trace requests to the rust-analyzer (this is usually overly verbose and not recommended for regular users)."
+ },
+ "rust-analyzer.trace.extension": {
+ "description": "Enable logging of VS Code extensions itself.",
+ "type": "boolean",
+ "default": false
+ }
+ }
+ },
+ {
+ "title": "debug",
+ "properties": {
+ "rust-analyzer.debug.engine": {
+ "type": "string",
+ "enum": [
+ "auto",
+ "vadimcn.vscode-lldb",
+ "ms-vscode.cpptools"
+ ],
+ "default": "auto",
+ "description": "Preferred debug engine.",
+ "markdownEnumDescriptions": [
+ "First try to use [CodeLLDB](https://marketplace.visualstudio.com/items?itemName=vadimcn.vscode-lldb), if it's not installed try to use [MS C++ tools](https://marketplace.visualstudio.com/items?itemName=ms-vscode.cpptools).",
+ "Use [CodeLLDB](https://marketplace.visualstudio.com/items?itemName=vadimcn.vscode-lldb)",
+ "Use [MS C++ tools](https://marketplace.visualstudio.com/items?itemName=ms-vscode.cpptools)"
]
},
- "default": null,
- "markdownDescription": "Extra environment variables that will be passed to the rust-analyzer executable. Useful for passing e.g. `RA_LOG` for debugging."
- },
- "rust-analyzer.trace.server": {
- "type": "string",
- "scope": "window",
- "enum": [
- "off",
- "messages",
- "verbose"
- ],
- "enumDescriptions": [
- "No traces",
- "Error only",
- "Full log"
- ],
- "default": "off",
- "description": "Trace requests to the rust-analyzer (this is usually overly verbose and not recommended for regular users)."
- },
- "rust-analyzer.trace.extension": {
- "description": "Enable logging of VS Code extensions itself.",
- "type": "boolean",
- "default": false
- },
- "rust-analyzer.debug.engine": {
- "type": "string",
- "enum": [
- "auto",
- "vadimcn.vscode-lldb",
- "ms-vscode.cpptools"
- ],
- "default": "auto",
- "description": "Preferred debug engine.",
- "markdownEnumDescriptions": [
- "First try to use [CodeLLDB](https://marketplace.visualstudio.com/items?itemName=vadimcn.vscode-lldb), if it's not installed try to use [MS C++ tools](https://marketplace.visualstudio.com/items?itemName=ms-vscode.cpptools).",
- "Use [CodeLLDB](https://marketplace.visualstudio.com/items?itemName=vadimcn.vscode-lldb)",
- "Use [MS C++ tools](https://marketplace.visualstudio.com/items?itemName=ms-vscode.cpptools)"
- ]
- },
- "rust-analyzer.debug.sourceFileMap": {
- "type": [
- "object",
- "string"
- ],
- "const": "auto",
- "description": "Optional source file mappings passed to the debug engine.",
- "default": {
- "/rustc/<id>": "${env:USERPROFILE}/.rustup/toolchains/<toolchain-id>/lib/rustlib/src/rust"
+ "rust-analyzer.debug.sourceFileMap": {
+ "type": [
+ "object",
+ "string"
+ ],
+ "const": "auto",
+ "description": "Optional source file mappings passed to the debug engine.",
+ "default": {
+ "/rustc/<id>": "${env:USERPROFILE}/.rustup/toolchains/<toolchain-id>/lib/rustlib/src/rust"
+ }
+ },
+ "rust-analyzer.debug.openDebugPane": {
+ "markdownDescription": "Whether to open up the `Debug Panel` on debugging start.",
+ "type": "boolean",
+ "default": false
+ },
+ "rust-analyzer.debug.engineSettings": {
+ "type": "object",
+ "default": {},
+ "markdownDescription": "Optional settings passed to the debug engine. Example: `{ \"lldb\": { \"terminal\":\"external\"} }`"
}
- },
- "rust-analyzer.debug.openDebugPane": {
- "markdownDescription": "Whether to open up the `Debug Panel` on debugging start.",
- "type": "boolean",
- "default": false
- },
- "rust-analyzer.debug.engineSettings": {
- "type": "object",
- "default": {},
- "markdownDescription": "Optional settings passed to the debug engine. Example: `{ \"lldb\": { \"terminal\":\"external\"} }`"
- },
- "rust-analyzer.restartServerOnConfigChange": {
- "markdownDescription": "Whether to restart the server automatically when certain settings that require a restart are changed.",
- "default": false,
- "type": "boolean"
- },
- "rust-analyzer.typing.continueCommentsOnNewline": {
- "markdownDescription": "Whether to prefix newlines after comments with the corresponding comment prefix.",
- "default": true,
- "type": "boolean"
- },
- "rust-analyzer.diagnostics.previewRustcOutput": {
- "markdownDescription": "Whether to show the main part of the rendered rustc output of a diagnostic message.",
- "default": false,
- "type": "boolean"
- },
- "rust-analyzer.diagnostics.useRustcErrorCode": {
- "markdownDescription": "Whether to show diagnostics using the original rustc error code. If this is false, all rustc diagnostics will have the code 'rustc(Click for full compiler diagnostics)'",
- "default": false,
- "type": "boolean"
- },
- "rust-analyzer.discoverProjectRunner": {
- "markdownDescription": "Sets the extension responsible for determining which extension the rust-analyzer extension uses to generate `rust-project.json` files. This should should only be used\n if a build system like Buck or Bazel is also in use.",
- "default": null,
- "type": [
- "null",
- "string"
- ]
- },
- "rust-analyzer.showUnlinkedFileNotification": {
- "markdownDescription": "Whether to show a notification for unlinked files asking the user to add the corresponding Cargo.toml to the linked projects setting.",
- "default": true,
- "type": "boolean"
- },
- "rust-analyzer.showRequestFailedErrorNotification": {
- "markdownDescription": "Whether to show error notifications for failing requests.",
- "default": true,
- "type": "boolean"
- },
- "rust-analyzer.showDependenciesExplorer": {
- "markdownDescription": "Whether to show the dependencies view.",
- "default": true,
- "type": "boolean"
- },
- "rust-analyzer.testExplorer": {
- "markdownDescription": "Whether to show the test explorer.",
- "default": false,
- "type": "boolean"
- },
- "$generated-start": {},
- "rust-analyzer.assist.emitMustUse": {
- "markdownDescription": "Whether to insert #[must_use] when generating `as_` methods\nfor enum variants.",
- "default": false,
- "type": "boolean"
- },
- "rust-analyzer.assist.expressionFillDefault": {
- "markdownDescription": "Placeholder expression to use for missing expressions in assists.",
- "default": "todo",
- "type": "string",
- "enum": [
- "todo",
- "default"
- ],
- "enumDescriptions": [
- "Fill missing expressions with the `todo` macro",
- "Fill missing expressions with reasonable defaults, `new` or `default` constructors."
- ]
- },
- "rust-analyzer.assist.termSearch.fuel": {
- "markdownDescription": "Term search fuel in \"units of work\" for assists (Defaults to 400).",
- "default": 400,
- "type": "integer",
- "minimum": 0
- },
- "rust-analyzer.cachePriming.enable": {
- "markdownDescription": "Warm up caches on project load.",
- "default": true,
- "type": "boolean"
- },
- "rust-analyzer.cachePriming.numThreads": {
- "markdownDescription": "How many worker threads to handle priming caches. The default `0` means to pick automatically.",
- "default": 0,
- "type": "number",
- "minimum": 0,
- "maximum": 255
- },
- "rust-analyzer.cargo.allTargets": {
- "markdownDescription": "Pass `--all-targets` to cargo invocation.",
- "default": true,
- "type": "boolean"
- },
- "rust-analyzer.cargo.autoreload": {
- "markdownDescription": "Automatically refresh project info via `cargo metadata` on\n`Cargo.toml` or `.cargo/config.toml` changes.",
- "default": true,
- "type": "boolean"
- },
- "rust-analyzer.cargo.buildScripts.enable": {
- "markdownDescription": "Run build scripts (`build.rs`) for more precise code analysis.",
- "default": true,
- "type": "boolean"
- },
- "rust-analyzer.cargo.buildScripts.invocationLocation": {
- "markdownDescription": "Specifies the working directory for running build scripts.\n- \"workspace\": run build scripts for a workspace in the workspace's root directory.\n This is incompatible with `#rust-analyzer.cargo.buildScripts.invocationStrategy#` set to `once`.\n- \"root\": run build scripts in the project's root directory.\nThis config only has an effect when `#rust-analyzer.cargo.buildScripts.overrideCommand#`\nis set.",
- "default": "workspace",
- "type": "string",
- "enum": [
- "workspace",
- "root"
- ],
- "enumDescriptions": [
- "The command will be executed in the corresponding workspace root.",
- "The command will be executed in the project root."
- ]
- },
- "rust-analyzer.cargo.buildScripts.invocationStrategy": {
- "markdownDescription": "Specifies the invocation strategy to use when running the build scripts command.\nIf `per_workspace` is set, the command will be executed for each workspace.\nIf `once` is set, the command will be executed once.\nThis config only has an effect when `#rust-analyzer.cargo.buildScripts.overrideCommand#`\nis set.",
- "default": "per_workspace",
- "type": "string",
- "enum": [
- "per_workspace",
- "once"
- ],
- "enumDescriptions": [
- "The command will be executed for each workspace.",
- "The command will be executed once."
- ]
- },
- "rust-analyzer.cargo.buildScripts.overrideCommand": {
- "markdownDescription": "Override the command rust-analyzer uses to run build scripts and\nbuild procedural macros. The command is required to output json\nand should therefore include `--message-format=json` or a similar\noption.\n\nIf there are multiple linked projects/workspaces, this command is invoked for\neach of them, with the working directory being the workspace root\n(i.e., the folder containing the `Cargo.toml`). This can be overwritten\nby changing `#rust-analyzer.cargo.buildScripts.invocationStrategy#` and\n`#rust-analyzer.cargo.buildScripts.invocationLocation#`.\n\nBy default, a cargo invocation will be constructed for the configured\ntargets and features, with the following base command line:\n\n```bash\ncargo check --quiet --workspace --message-format=json --all-targets\n```\n.",
- "default": null,
- "type": [
- "null",
- "array"
- ],
- "items": {
- "type": "string"
+ }
+ },
+ {
+ "title": "typing",
+ "properties": {
+ "rust-analyzer.typing.continueCommentsOnNewline": {
+ "markdownDescription": "Whether to prefix newlines after comments with the corresponding comment prefix.",
+ "default": true,
+ "type": "boolean"
}
- },
- "rust-analyzer.cargo.buildScripts.rebuildOnSave": {
- "markdownDescription": "Rerun proc-macros building/build-scripts running when proc-macro\nor build-script sources change and are saved.",
- "default": true,
- "type": "boolean"
- },
- "rust-analyzer.cargo.buildScripts.useRustcWrapper": {
- "markdownDescription": "Use `RUSTC_WRAPPER=rust-analyzer` when running build scripts to\navoid checking unnecessary things.",
- "default": true,
- "type": "boolean"
- },
- "rust-analyzer.cargo.cfgs": {
- "markdownDescription": "List of cfg options to enable with the given values.",
- "default": {
- "debug_assertions": null,
- "miri": null
+ }
+ },
+ {
+ "title": "diagnostics",
+ "properties": {
+ "rust-analyzer.diagnostics.previewRustcOutput": {
+ "markdownDescription": "Whether to show the main part of the rendered rustc output of a diagnostic message.",
+ "default": false,
+ "type": "boolean"
},
- "type": "object"
- },
- "rust-analyzer.cargo.extraArgs": {
- "markdownDescription": "Extra arguments that are passed to every cargo invocation.",
- "default": [],
- "type": "array",
- "items": {
- "type": "string"
+ "rust-analyzer.diagnostics.useRustcErrorCode": {
+ "markdownDescription": "Whether to show diagnostics using the original rustc error code. If this is false, all rustc diagnostics will have the code 'rustc(Click for full compiler diagnostics)'",
+ "default": false,
+ "type": "boolean"
}
- },
- "rust-analyzer.cargo.extraEnv": {
- "markdownDescription": "Extra environment variables that will be set when running cargo, rustc\nor other commands within the workspace. Useful for setting RUSTFLAGS.",
- "default": {},
- "type": "object"
- },
- "rust-analyzer.cargo.features": {
- "markdownDescription": "List of features to activate.\n\nSet this to `\"all\"` to pass `--all-features` to cargo.",
- "default": [],
- "anyOf": [
- {
- "type": "string",
- "enum": [
- "all"
- ],
- "enumDescriptions": [
- "Pass `--all-features` to cargo"
- ]
- },
- {
- "type": "array",
- "items": {
- "type": "string"
+ }
+ },
+ {
+ "title": "$generated-start"
+ },
+ {
+ "title": "assist",
+ "properties": {
+ "rust-analyzer.assist.emitMustUse": {
+ "markdownDescription": "Whether to insert #[must_use] when generating `as_` methods\nfor enum variants.",
+ "default": false,
+ "type": "boolean"
+ }
+ }
+ },
+ {
+ "title": "assist",
+ "properties": {
+ "rust-analyzer.assist.expressionFillDefault": {
+ "markdownDescription": "Placeholder expression to use for missing expressions in assists.",
+ "default": "todo",
+ "type": "string",
+ "enum": [
+ "todo",
+ "default"
+ ],
+ "enumDescriptions": [
+ "Fill missing expressions with the `todo` macro",
+ "Fill missing expressions with reasonable defaults, `new` or `default` constructors."
+ ]
+ }
+ }
+ },
+ {
+ "title": "assist",
+ "properties": {
+ "rust-analyzer.assist.termSearch.fuel": {
+ "markdownDescription": "Term search fuel in \"units of work\" for assists (Defaults to 400).",
+ "default": 400,
+ "type": "integer",
+ "minimum": 0
+ }
+ }
+ },
+ {
+ "title": "cachePriming",
+ "properties": {
+ "rust-analyzer.cachePriming.enable": {
+ "markdownDescription": "Warm up caches on project load.",
+ "default": true,
+ "type": "boolean"
+ }
+ }
+ },
+ {
+ "title": "cachePriming",
+ "properties": {
+ "rust-analyzer.cachePriming.numThreads": {
+ "markdownDescription": "How many worker threads to handle priming caches. The default `0` means to pick automatically.",
+ "default": "physical",
+ "anyOf": [
+ {
+ "type": "number",
+ "minimum": 0,
+ "maximum": 255
+ },
+ {
+ "type": "string",
+ "enum": [
+ "physical",
+ "logical"
+ ],
+ "enumDescriptions": [
+ "Use the number of physical cores",
+ "Use the number of logical cores"
+ ]
}
+ ]
+ }
+ }
+ },
+ {
+ "title": "cargo",
+ "properties": {
+ "rust-analyzer.cargo.allTargets": {
+ "markdownDescription": "Pass `--all-targets` to cargo invocation.",
+ "default": true,
+ "type": "boolean"
+ }
+ }
+ },
+ {
+ "title": "cargo",
+ "properties": {
+ "rust-analyzer.cargo.autoreload": {
+ "markdownDescription": "Automatically refresh project info via `cargo metadata` on\n`Cargo.toml` or `.cargo/config.toml` changes.",
+ "default": true,
+ "type": "boolean"
+ }
+ }
+ },
+ {
+ "title": "cargo",
+ "properties": {
+ "rust-analyzer.cargo.buildScripts.enable": {
+ "markdownDescription": "Run build scripts (`build.rs`) for more precise code analysis.",
+ "default": true,
+ "type": "boolean"
+ }
+ }
+ },
+ {
+ "title": "cargo",
+ "properties": {
+ "rust-analyzer.cargo.buildScripts.invocationLocation": {
+ "markdownDescription": "Specifies the working directory for running build scripts.\n- \"workspace\": run build scripts for a workspace in the workspace's root directory.\n This is incompatible with `#rust-analyzer.cargo.buildScripts.invocationStrategy#` set to `once`.\n- \"root\": run build scripts in the project's root directory.\nThis config only has an effect when `#rust-analyzer.cargo.buildScripts.overrideCommand#`\nis set.",
+ "default": "workspace",
+ "type": "string",
+ "enum": [
+ "workspace",
+ "root"
+ ],
+ "enumDescriptions": [
+ "The command will be executed in the corresponding workspace root.",
+ "The command will be executed in the project root."
+ ]
+ }
+ }
+ },
+ {
+ "title": "cargo",
+ "properties": {
+ "rust-analyzer.cargo.buildScripts.invocationStrategy": {
+ "markdownDescription": "Specifies the invocation strategy to use when running the build scripts command.\nIf `per_workspace` is set, the command will be executed for each workspace.\nIf `once` is set, the command will be executed once.\nThis config only has an effect when `#rust-analyzer.cargo.buildScripts.overrideCommand#`\nis set.",
+ "default": "per_workspace",
+ "type": "string",
+ "enum": [
+ "per_workspace",
+ "once"
+ ],
+ "enumDescriptions": [
+ "The command will be executed for each workspace.",
+ "The command will be executed once."
+ ]
+ }
+ }
+ },
+ {
+ "title": "cargo",
+ "properties": {
+ "rust-analyzer.cargo.buildScripts.overrideCommand": {
+ "markdownDescription": "Override the command rust-analyzer uses to run build scripts and\nbuild procedural macros. The command is required to output json\nand should therefore include `--message-format=json` or a similar\noption.\n\nIf there are multiple linked projects/workspaces, this command is invoked for\neach of them, with the working directory being the workspace root\n(i.e., the folder containing the `Cargo.toml`). This can be overwritten\nby changing `#rust-analyzer.cargo.buildScripts.invocationStrategy#` and\n`#rust-analyzer.cargo.buildScripts.invocationLocation#`.\n\nBy default, a cargo invocation will be constructed for the configured\ntargets and features, with the following base command line:\n\n```bash\ncargo check --quiet --workspace --message-format=json --all-targets\n```\n.",
+ "default": null,
+ "type": [
+ "null",
+ "array"
+ ],
+ "items": {
+ "type": "string"
}
- ]
- },
- "rust-analyzer.cargo.noDefaultFeatures": {
- "markdownDescription": "Whether to pass `--no-default-features` to cargo.",
- "default": false,
- "type": "boolean"
- },
- "rust-analyzer.cargo.sysroot": {
- "markdownDescription": "Relative path to the sysroot, or \"discover\" to try to automatically find it via\n\"rustc --print sysroot\".\n\nUnsetting this disables sysroot loading.\n\nThis option does not take effect until rust-analyzer is restarted.",
- "default": "discover",
- "type": [
- "null",
- "string"
- ]
- },
- "rust-analyzer.cargo.sysrootQueryMetadata": {
- "markdownDescription": "Whether to run cargo metadata on the sysroot library allowing rust-analyzer to analyze\nthird-party dependencies of the standard libraries.\n\nThis will cause `cargo` to create a lockfile in your sysroot directory. rust-analyzer\nwill attempt to clean up afterwards, but nevertheless requires the location to be\nwritable to.",
- "default": false,
- "type": "boolean"
- },
- "rust-analyzer.cargo.sysrootSrc": {
- "markdownDescription": "Relative path to the sysroot library sources. If left unset, this will default to\n`{cargo.sysroot}/lib/rustlib/src/rust/library`.\n\nThis option does not take effect until rust-analyzer is restarted.",
- "default": null,
- "type": [
- "null",
- "string"
- ]
- },
- "rust-analyzer.cargo.target": {
- "markdownDescription": "Compilation target override (target triple).",
- "default": null,
- "type": [
- "null",
- "string"
- ]
- },
- "rust-analyzer.cargo.targetDir": {
- "markdownDescription": "Optional path to a rust-analyzer specific target directory.\nThis prevents rust-analyzer's `cargo check` and initial build-script and proc-macro\nbuilding from locking the `Cargo.lock` at the expense of duplicating build artifacts.\n\nSet to `true` to use a subdirectory of the existing target directory or\nset to a path relative to the workspace to use that path.",
- "default": null,
- "anyOf": [
- {
- "type": "null"
- },
- {
- "type": "boolean"
+ }
+ }
+ },
+ {
+ "title": "cargo",
+ "properties": {
+ "rust-analyzer.cargo.buildScripts.rebuildOnSave": {
+ "markdownDescription": "Rerun proc-macros building/build-scripts running when proc-macro\nor build-script sources change and are saved.",
+ "default": true,
+ "type": "boolean"
+ }
+ }
+ },
+ {
+ "title": "cargo",
+ "properties": {
+ "rust-analyzer.cargo.buildScripts.useRustcWrapper": {
+ "markdownDescription": "Use `RUSTC_WRAPPER=rust-analyzer` when running build scripts to\navoid checking unnecessary things.",
+ "default": true,
+ "type": "boolean"
+ }
+ }
+ },
+ {
+ "title": "cargo",
+ "properties": {
+ "rust-analyzer.cargo.cfgs": {
+ "markdownDescription": "List of cfg options to enable with the given values.",
+ "default": {
+ "debug_assertions": null,
+ "miri": null
},
- {
+ "type": "object"
+ }
+ }
+ },
+ {
+ "title": "cargo",
+ "properties": {
+ "rust-analyzer.cargo.extraArgs": {
+ "markdownDescription": "Extra arguments that are passed to every cargo invocation.",
+ "default": [],
+ "type": "array",
+ "items": {
"type": "string"
}
- ]
- },
- "rust-analyzer.checkOnSave": {
- "markdownDescription": "Run the check command for diagnostics on save.",
- "default": true,
- "type": "boolean"
- },
- "rust-analyzer.check.allTargets": {
- "markdownDescription": "Check all targets and tests (`--all-targets`). Defaults to\n`#rust-analyzer.cargo.allTargets#`.",
- "default": null,
- "type": [
- "null",
- "boolean"
- ]
- },
- "rust-analyzer.check.command": {
- "markdownDescription": "Cargo command to use for `cargo check`.",
- "default": "check",
- "type": "string"
- },
- "rust-analyzer.check.extraArgs": {
- "markdownDescription": "Extra arguments for `cargo check`.",
- "default": [],
- "type": "array",
- "items": {
- "type": "string"
}
- },
- "rust-analyzer.check.extraEnv": {
- "markdownDescription": "Extra environment variables that will be set when running `cargo check`.\nExtends `#rust-analyzer.cargo.extraEnv#`.",
- "default": {},
- "type": "object"
- },
- "rust-analyzer.check.features": {
- "markdownDescription": "List of features to activate. Defaults to\n`#rust-analyzer.cargo.features#`.\n\nSet to `\"all\"` to pass `--all-features` to Cargo.",
- "default": null,
- "anyOf": [
- {
- "type": "string",
- "enum": [
- "all"
- ],
- "enumDescriptions": [
- "Pass `--all-features` to cargo"
- ]
- },
- {
- "type": "array",
- "items": {
+ }
+ },
+ {
+ "title": "cargo",
+ "properties": {
+ "rust-analyzer.cargo.extraEnv": {
+ "markdownDescription": "Extra environment variables that will be set when running cargo, rustc\nor other commands within the workspace. Useful for setting RUSTFLAGS.",
+ "default": {},
+ "type": "object"
+ }
+ }
+ },
+ {
+ "title": "cargo",
+ "properties": {
+ "rust-analyzer.cargo.features": {
+ "markdownDescription": "List of features to activate.\n\nSet this to `\"all\"` to pass `--all-features` to cargo.",
+ "default": [],
+ "anyOf": [
+ {
+ "type": "string",
+ "enum": [
+ "all"
+ ],
+ "enumDescriptions": [
+ "Pass `--all-features` to cargo"
+ ]
+ },
+ {
+ "type": "array",
+ "items": {
+ "type": "string"
+ }
+ }
+ ]
+ }
+ }
+ },
+ {
+ "title": "cargo",
+ "properties": {
+ "rust-analyzer.cargo.noDefaultFeatures": {
+ "markdownDescription": "Whether to pass `--no-default-features` to cargo.",
+ "default": false,
+ "type": "boolean"
+ }
+ }
+ },
+ {
+ "title": "cargo",
+ "properties": {
+ "rust-analyzer.cargo.sysroot": {
+ "markdownDescription": "Relative path to the sysroot, or \"discover\" to try to automatically find it via\n\"rustc --print sysroot\".\n\nUnsetting this disables sysroot loading.\n\nThis option does not take effect until rust-analyzer is restarted.",
+ "default": "discover",
+ "type": [
+ "null",
+ "string"
+ ]
+ }
+ }
+ },
+ {
+ "title": "cargo",
+ "properties": {
+ "rust-analyzer.cargo.sysrootQueryMetadata": {
+ "markdownDescription": "Whether to run cargo metadata on the sysroot library allowing rust-analyzer to analyze\nthird-party dependencies of the standard libraries.\n\nThis will cause `cargo` to create a lockfile in your sysroot directory. rust-analyzer\nwill attempt to clean up afterwards, but nevertheless requires the location to be\nwritable to.",
+ "default": false,
+ "type": "boolean"
+ }
+ }
+ },
+ {
+ "title": "cargo",
+ "properties": {
+ "rust-analyzer.cargo.sysrootSrc": {
+ "markdownDescription": "Relative path to the sysroot library sources. If left unset, this will default to\n`{cargo.sysroot}/lib/rustlib/src/rust/library`.\n\nThis option does not take effect until rust-analyzer is restarted.",
+ "default": null,
+ "type": [
+ "null",
+ "string"
+ ]
+ }
+ }
+ },
+ {
+ "title": "cargo",
+ "properties": {
+ "rust-analyzer.cargo.target": {
+ "markdownDescription": "Compilation target override (target triple).",
+ "default": null,
+ "type": [
+ "null",
+ "string"
+ ]
+ }
+ }
+ },
+ {
+ "title": "cargo",
+ "properties": {
+ "rust-analyzer.cargo.targetDir": {
+ "markdownDescription": "Optional path to a rust-analyzer specific target directory.\nThis prevents rust-analyzer's `cargo check` and initial build-script and proc-macro\nbuilding from locking the `Cargo.lock` at the expense of duplicating build artifacts.\n\nSet to `true` to use a subdirectory of the existing target directory or\nset to a path relative to the workspace to use that path.",
+ "default": null,
+ "anyOf": [
+ {
+ "type": "null"
+ },
+ {
+ "type": "boolean"
+ },
+ {
"type": "string"
}
- },
- {
- "type": "null"
- }
- ]
- },
- "rust-analyzer.check.ignore": {
- "markdownDescription": "List of `cargo check` (or other command specified in `check.command`) diagnostics to ignore.\n\nFor example for `cargo check`: `dead_code`, `unused_imports`, `unused_variables`,...",
- "default": [],
- "type": "array",
- "items": {
- "type": "string"
- },
- "uniqueItems": true
- },
- "rust-analyzer.check.invocationLocation": {
- "markdownDescription": "Specifies the working directory for running checks.\n- \"workspace\": run checks for workspaces in the corresponding workspaces' root directories.\n This falls back to \"root\" if `#rust-analyzer.check.invocationStrategy#` is set to `once`.\n- \"root\": run checks in the project's root directory.\nThis config only has an effect when `#rust-analyzer.check.overrideCommand#`\nis set.",
- "default": "workspace",
- "type": "string",
- "enum": [
- "workspace",
- "root"
- ],
- "enumDescriptions": [
- "The command will be executed in the corresponding workspace root.",
- "The command will be executed in the project root."
- ]
- },
- "rust-analyzer.check.invocationStrategy": {
- "markdownDescription": "Specifies the invocation strategy to use when running the check command.\nIf `per_workspace` is set, the command will be executed for each workspace.\nIf `once` is set, the command will be executed once.\nThis config only has an effect when `#rust-analyzer.check.overrideCommand#`\nis set.",
- "default": "per_workspace",
- "type": "string",
- "enum": [
- "per_workspace",
- "once"
- ],
- "enumDescriptions": [
- "The command will be executed for each workspace.",
- "The command will be executed once."
- ]
- },
- "rust-analyzer.check.noDefaultFeatures": {
- "markdownDescription": "Whether to pass `--no-default-features` to Cargo. Defaults to\n`#rust-analyzer.cargo.noDefaultFeatures#`.",
- "default": null,
- "type": [
- "null",
- "boolean"
- ]
- },
- "rust-analyzer.check.overrideCommand": {
- "markdownDescription": "Override the command rust-analyzer uses instead of `cargo check` for\ndiagnostics on save. The command is required to output json and\nshould therefore include `--message-format=json` or a similar option\n(if your client supports the `colorDiagnosticOutput` experimental\ncapability, you can use `--message-format=json-diagnostic-rendered-ansi`).\n\nIf you're changing this because you're using some tool wrapping\nCargo, you might also want to change\n`#rust-analyzer.cargo.buildScripts.overrideCommand#`.\n\nIf there are multiple linked projects/workspaces, this command is invoked for\neach of them, with the working directory being the workspace root\n(i.e., the folder containing the `Cargo.toml`). This can be overwritten\nby changing `#rust-analyzer.check.invocationStrategy#` and\n`#rust-analyzer.check.invocationLocation#`.\n\nIf `$saved_file` is part of the command, rust-analyzer will pass\nthe absolute path of the saved file to the provided command. This is\nintended to be used with non-Cargo build systems.\nNote that `$saved_file` is experimental and may be removed in the future.\n\nAn example command would be:\n\n```bash\ncargo check --workspace --message-format=json --all-targets\n```\n.",
- "default": null,
- "type": [
- "null",
- "array"
- ],
- "items": {
+ ]
+ }
+ }
+ },
+ {
+ "title": "general",
+ "properties": {
+ "rust-analyzer.checkOnSave": {
+ "markdownDescription": "Run the check command for diagnostics on save.",
+ "default": true,
+ "type": "boolean"
+ }
+ }
+ },
+ {
+ "title": "check",
+ "properties": {
+ "rust-analyzer.check.allTargets": {
+ "markdownDescription": "Check all targets and tests (`--all-targets`). Defaults to\n`#rust-analyzer.cargo.allTargets#`.",
+ "default": null,
+ "type": [
+ "null",
+ "boolean"
+ ]
+ }
+ }
+ },
+ {
+ "title": "check",
+ "properties": {
+ "rust-analyzer.check.command": {
+ "markdownDescription": "Cargo command to use for `cargo check`.",
+ "default": "check",
"type": "string"
}
- },
- "rust-analyzer.check.targets": {
- "markdownDescription": "Check for specific targets. Defaults to `#rust-analyzer.cargo.target#` if empty.\n\nCan be a single target, e.g. `\"x86_64-unknown-linux-gnu\"` or a list of targets, e.g.\n`[\"aarch64-apple-darwin\", \"x86_64-apple-darwin\"]`.\n\nAliased as `\"checkOnSave.targets\"`.",
- "default": null,
- "anyOf": [
- {
- "type": "null"
- },
- {
+ }
+ },
+ {
+ "title": "check",
+ "properties": {
+ "rust-analyzer.check.extraArgs": {
+ "markdownDescription": "Extra arguments for `cargo check`.",
+ "default": [],
+ "type": "array",
+ "items": {
+ "type": "string"
+ }
+ }
+ }
+ },
+ {
+ "title": "check",
+ "properties": {
+ "rust-analyzer.check.extraEnv": {
+ "markdownDescription": "Extra environment variables that will be set when running `cargo check`.\nExtends `#rust-analyzer.cargo.extraEnv#`.",
+ "default": {},
+ "type": "object"
+ }
+ }
+ },
+ {
+ "title": "check",
+ "properties": {
+ "rust-analyzer.check.features": {
+ "markdownDescription": "List of features to activate. Defaults to\n`#rust-analyzer.cargo.features#`.\n\nSet to `\"all\"` to pass `--all-features` to Cargo.",
+ "default": null,
+ "anyOf": [
+ {
+ "type": "string",
+ "enum": [
+ "all"
+ ],
+ "enumDescriptions": [
+ "Pass `--all-features` to cargo"
+ ]
+ },
+ {
+ "type": "array",
+ "items": {
+ "type": "string"
+ }
+ },
+ {
+ "type": "null"
+ }
+ ]
+ }
+ }
+ },
+ {
+ "title": "check",
+ "properties": {
+ "rust-analyzer.check.ignore": {
+ "markdownDescription": "List of `cargo check` (or other command specified in `check.command`) diagnostics to ignore.\n\nFor example for `cargo check`: `dead_code`, `unused_imports`, `unused_variables`,...",
+ "default": [],
+ "type": "array",
+ "items": {
"type": "string"
},
- {
- "type": "array",
- "items": {
+ "uniqueItems": true
+ }
+ }
+ },
+ {
+ "title": "check",
+ "properties": {
+ "rust-analyzer.check.invocationLocation": {
+ "markdownDescription": "Specifies the working directory for running checks.\n- \"workspace\": run checks for workspaces in the corresponding workspaces' root directories.\n This falls back to \"root\" if `#rust-analyzer.check.invocationStrategy#` is set to `once`.\n- \"root\": run checks in the project's root directory.\nThis config only has an effect when `#rust-analyzer.check.overrideCommand#`\nis set.",
+ "default": "workspace",
+ "type": "string",
+ "enum": [
+ "workspace",
+ "root"
+ ],
+ "enumDescriptions": [
+ "The command will be executed in the corresponding workspace root.",
+ "The command will be executed in the project root."
+ ]
+ }
+ }
+ },
+ {
+ "title": "check",
+ "properties": {
+ "rust-analyzer.check.invocationStrategy": {
+ "markdownDescription": "Specifies the invocation strategy to use when running the check command.\nIf `per_workspace` is set, the command will be executed for each workspace.\nIf `once` is set, the command will be executed once.\nThis config only has an effect when `#rust-analyzer.check.overrideCommand#`\nis set.",
+ "default": "per_workspace",
+ "type": "string",
+ "enum": [
+ "per_workspace",
+ "once"
+ ],
+ "enumDescriptions": [
+ "The command will be executed for each workspace.",
+ "The command will be executed once."
+ ]
+ }
+ }
+ },
+ {
+ "title": "check",
+ "properties": {
+ "rust-analyzer.check.noDefaultFeatures": {
+ "markdownDescription": "Whether to pass `--no-default-features` to Cargo. Defaults to\n`#rust-analyzer.cargo.noDefaultFeatures#`.",
+ "default": null,
+ "type": [
+ "null",
+ "boolean"
+ ]
+ }
+ }
+ },
+ {
+ "title": "check",
+ "properties": {
+ "rust-analyzer.check.overrideCommand": {
+ "markdownDescription": "Override the command rust-analyzer uses instead of `cargo check` for\ndiagnostics on save. The command is required to output json and\nshould therefore include `--message-format=json` or a similar option\n(if your client supports the `colorDiagnosticOutput` experimental\ncapability, you can use `--message-format=json-diagnostic-rendered-ansi`).\n\nIf you're changing this because you're using some tool wrapping\nCargo, you might also want to change\n`#rust-analyzer.cargo.buildScripts.overrideCommand#`.\n\nIf there are multiple linked projects/workspaces, this command is invoked for\neach of them, with the working directory being the workspace root\n(i.e., the folder containing the `Cargo.toml`). This can be overwritten\nby changing `#rust-analyzer.check.invocationStrategy#` and\n`#rust-analyzer.check.invocationLocation#`.\n\nIf `$saved_file` is part of the command, rust-analyzer will pass\nthe absolute path of the saved file to the provided command. This is\nintended to be used with non-Cargo build systems.\nNote that `$saved_file` is experimental and may be removed in the future.\n\nAn example command would be:\n\n```bash\ncargo check --workspace --message-format=json --all-targets\n```\n.",
+ "default": null,
+ "type": [
+ "null",
+ "array"
+ ],
+ "items": {
+ "type": "string"
+ }
+ }
+ }
+ },
+ {
+ "title": "check",
+ "properties": {
+ "rust-analyzer.check.targets": {
+ "markdownDescription": "Check for specific targets. Defaults to `#rust-analyzer.cargo.target#` if empty.\n\nCan be a single target, e.g. `\"x86_64-unknown-linux-gnu\"` or a list of targets, e.g.\n`[\"aarch64-apple-darwin\", \"x86_64-apple-darwin\"]`.\n\nAliased as `\"checkOnSave.targets\"`.",
+ "default": null,
+ "anyOf": [
+ {
+ "type": "null"
+ },
+ {
"type": "string"
+ },
+ {
+ "type": "array",
+ "items": {
+ "type": "string"
+ }
+ }
+ ]
+ }
+ }
+ },
+ {
+ "title": "check",
+ "properties": {
+ "rust-analyzer.check.workspace": {
+ "markdownDescription": "Whether `--workspace` should be passed to `cargo check`.\nIf false, `-p <package>` will be passed instead.",
+ "default": true,
+ "type": "boolean"
+ }
+ }
+ },
+ {
+ "title": "completion",
+ "properties": {
+ "rust-analyzer.completion.autoimport.enable": {
+ "markdownDescription": "Toggles the additional completions that automatically add imports when completed.\nNote that your client must specify the `additionalTextEdits` LSP client capability to truly have this feature enabled.",
+ "default": true,
+ "type": "boolean"
+ }
+ }
+ },
+ {
+ "title": "completion",
+ "properties": {
+ "rust-analyzer.completion.autoself.enable": {
+ "markdownDescription": "Toggles the additional completions that automatically show method calls and field accesses\nwith `self` prefixed to them when inside a method.",
+ "default": true,
+ "type": "boolean"
+ }
+ }
+ },
+ {
+ "title": "completion",
+ "properties": {
+ "rust-analyzer.completion.callable.snippets": {
+ "markdownDescription": "Whether to add parenthesis and argument snippets when completing function.",
+ "default": "fill_arguments",
+ "type": "string",
+ "enum": [
+ "fill_arguments",
+ "add_parentheses",
+ "none"
+ ],
+ "enumDescriptions": [
+ "Add call parentheses and pre-fill arguments.",
+ "Add call parentheses.",
+ "Do no snippet completions for callables."
+ ]
+ }
+ }
+ },
+ {
+ "title": "completion",
+ "properties": {
+ "rust-analyzer.completion.fullFunctionSignatures.enable": {
+ "markdownDescription": "Whether to show full function/method signatures in completion docs.",
+ "default": false,
+ "type": "boolean"
+ }
+ }
+ },
+ {
+ "title": "completion",
+ "properties": {
+ "rust-analyzer.completion.limit": {
+ "markdownDescription": "Maximum number of completions to return. If `None`, the limit is infinite.",
+ "default": null,
+ "type": [
+ "null",
+ "integer"
+ ],
+ "minimum": 0
+ }
+ }
+ },
+ {
+ "title": "completion",
+ "properties": {
+ "rust-analyzer.completion.postfix.enable": {
+ "markdownDescription": "Whether to show postfix snippets like `dbg`, `if`, `not`, etc.",
+ "default": true,
+ "type": "boolean"
+ }
+ }
+ },
+ {
+ "title": "completion",
+ "properties": {
+ "rust-analyzer.completion.privateEditable.enable": {
+ "markdownDescription": "Enables completions of private items and fields that are defined in the current workspace even if they are not visible at the current position.",
+ "default": false,
+ "type": "boolean"
+ }
+ }
+ },
+ {
+ "title": "completion",
+ "properties": {
+ "rust-analyzer.completion.snippets.custom": {
+ "markdownDescription": "Custom completion snippets.",
+ "default": {
+ "Arc::new": {
+ "postfix": "arc",
+ "body": "Arc::new(${receiver})",
+ "requires": "std::sync::Arc",
+ "description": "Put the expression into an `Arc`",
+ "scope": "expr"
+ },
+ "Rc::new": {
+ "postfix": "rc",
+ "body": "Rc::new(${receiver})",
+ "requires": "std::rc::Rc",
+ "description": "Put the expression into an `Rc`",
+ "scope": "expr"
+ },
+ "Box::pin": {
+ "postfix": "pinbox",
+ "body": "Box::pin(${receiver})",
+ "requires": "std::boxed::Box",
+ "description": "Put the expression into a pinned `Box`",
+ "scope": "expr"
+ },
+ "Ok": {
+ "postfix": "ok",
+ "body": "Ok(${receiver})",
+ "description": "Wrap the expression in a `Result::Ok`",
+ "scope": "expr"
+ },
+ "Err": {
+ "postfix": "err",
+ "body": "Err(${receiver})",
+ "description": "Wrap the expression in a `Result::Err`",
+ "scope": "expr"
+ },
+ "Some": {
+ "postfix": "some",
+ "body": "Some(${receiver})",
+ "description": "Wrap the expression in an `Option::Some`",
+ "scope": "expr"
}
- }
- ]
- },
- "rust-analyzer.check.workspace": {
- "markdownDescription": "Whether `--workspace` should be passed to `cargo check`.\nIf false, `-p <package>` will be passed instead.",
- "default": true,
- "type": "boolean"
- },
- "rust-analyzer.completion.autoimport.enable": {
- "markdownDescription": "Toggles the additional completions that automatically add imports when completed.\nNote that your client must specify the `additionalTextEdits` LSP client capability to truly have this feature enabled.",
- "default": true,
- "type": "boolean"
- },
- "rust-analyzer.completion.autoself.enable": {
- "markdownDescription": "Toggles the additional completions that automatically show method calls and field accesses\nwith `self` prefixed to them when inside a method.",
- "default": true,
- "type": "boolean"
- },
- "rust-analyzer.completion.callable.snippets": {
- "markdownDescription": "Whether to add parenthesis and argument snippets when completing function.",
- "default": "fill_arguments",
- "type": "string",
- "enum": [
- "fill_arguments",
- "add_parentheses",
- "none"
- ],
- "enumDescriptions": [
- "Add call parentheses and pre-fill arguments.",
- "Add call parentheses.",
- "Do no snippet completions for callables."
- ]
- },
- "rust-analyzer.completion.fullFunctionSignatures.enable": {
- "markdownDescription": "Whether to show full function/method signatures in completion docs.",
- "default": false,
- "type": "boolean"
- },
- "rust-analyzer.completion.limit": {
- "markdownDescription": "Maximum number of completions to return. If `None`, the limit is infinite.",
- "default": null,
- "type": [
- "null",
- "integer"
- ],
- "minimum": 0
- },
- "rust-analyzer.completion.postfix.enable": {
- "markdownDescription": "Whether to show postfix snippets like `dbg`, `if`, `not`, etc.",
- "default": true,
- "type": "boolean"
- },
- "rust-analyzer.completion.privateEditable.enable": {
- "markdownDescription": "Enables completions of private items and fields that are defined in the current workspace even if they are not visible at the current position.",
- "default": false,
- "type": "boolean"
- },
- "rust-analyzer.completion.snippets.custom": {
- "markdownDescription": "Custom completion snippets.",
- "default": {
- "Arc::new": {
- "postfix": "arc",
- "body": "Arc::new(${receiver})",
- "requires": "std::sync::Arc",
- "description": "Put the expression into an `Arc`",
- "scope": "expr"
- },
- "Rc::new": {
- "postfix": "rc",
- "body": "Rc::new(${receiver})",
- "requires": "std::rc::Rc",
- "description": "Put the expression into an `Rc`",
- "scope": "expr"
- },
- "Box::pin": {
- "postfix": "pinbox",
- "body": "Box::pin(${receiver})",
- "requires": "std::boxed::Box",
- "description": "Put the expression into a pinned `Box`",
- "scope": "expr"
- },
- "Ok": {
- "postfix": "ok",
- "body": "Ok(${receiver})",
- "description": "Wrap the expression in a `Result::Ok`",
- "scope": "expr"
},
- "Err": {
- "postfix": "err",
- "body": "Err(${receiver})",
- "description": "Wrap the expression in a `Result::Err`",
- "scope": "expr"
+ "type": "object"
+ }
+ }
+ },
+ {
+ "title": "completion",
+ "properties": {
+ "rust-analyzer.completion.termSearch.enable": {
+ "markdownDescription": "Whether to enable term search based snippets like `Some(foo.bar().baz())`.",
+ "default": false,
+ "type": "boolean"
+ }
+ }
+ },
+ {
+ "title": "completion",
+ "properties": {
+ "rust-analyzer.completion.termSearch.fuel": {
+ "markdownDescription": "Term search fuel in \"units of work\" for autocompletion (Defaults to 200).",
+ "default": 200,
+ "type": "integer",
+ "minimum": 0
+ }
+ }
+ },
+ {
+ "title": "diagnostics",
+ "properties": {
+ "rust-analyzer.diagnostics.disabled": {
+ "markdownDescription": "List of rust-analyzer diagnostics to disable.",
+ "default": [],
+ "type": "array",
+ "items": {
+ "type": "string"
},
- "Some": {
- "postfix": "some",
- "body": "Some(${receiver})",
- "description": "Wrap the expression in an `Option::Some`",
- "scope": "expr"
+ "uniqueItems": true
+ }
+ }
+ },
+ {
+ "title": "diagnostics",
+ "properties": {
+ "rust-analyzer.diagnostics.enable": {
+ "markdownDescription": "Whether to show native rust-analyzer diagnostics.",
+ "default": true,
+ "type": "boolean"
+ }
+ }
+ },
+ {
+ "title": "diagnostics",
+ "properties": {
+ "rust-analyzer.diagnostics.experimental.enable": {
+ "markdownDescription": "Whether to show experimental rust-analyzer diagnostics that might\nhave more false positives than usual.",
+ "default": false,
+ "type": "boolean"
+ }
+ }
+ },
+ {
+ "title": "diagnostics",
+ "properties": {
+ "rust-analyzer.diagnostics.remapPrefix": {
+ "markdownDescription": "Map of prefixes to be substituted when parsing diagnostic file paths.\nThis should be the reverse mapping of what is passed to `rustc` as `--remap-path-prefix`.",
+ "default": {},
+ "type": "object"
+ }
+ }
+ },
+ {
+ "title": "diagnostics",
+ "properties": {
+ "rust-analyzer.diagnostics.styleLints.enable": {
+ "markdownDescription": "Whether to run additional style lints.",
+ "default": false,
+ "type": "boolean"
+ }
+ }
+ },
+ {
+ "title": "diagnostics",
+ "properties": {
+ "rust-analyzer.diagnostics.warningsAsHint": {
+ "markdownDescription": "List of warnings that should be displayed with hint severity.\n\nThe warnings will be indicated by faded text or three dots in code\nand will not show up in the `Problems Panel`.",
+ "default": [],
+ "type": "array",
+ "items": {
+ "type": "string"
}
- },
- "type": "object"
- },
- "rust-analyzer.completion.termSearch.enable": {
- "markdownDescription": "Whether to enable term search based snippets like `Some(foo.bar().baz())`.",
- "default": false,
- "type": "boolean"
- },
- "rust-analyzer.completion.termSearch.fuel": {
- "markdownDescription": "Term search fuel in \"units of work\" for autocompletion (Defaults to 200).",
- "default": 200,
- "type": "integer",
- "minimum": 0
- },
- "rust-analyzer.diagnostics.disabled": {
- "markdownDescription": "List of rust-analyzer diagnostics to disable.",
- "default": [],
- "type": "array",
- "items": {
- "type": "string"
- },
- "uniqueItems": true
- },
- "rust-analyzer.diagnostics.enable": {
- "markdownDescription": "Whether to show native rust-analyzer diagnostics.",
- "default": true,
- "type": "boolean"
- },
- "rust-analyzer.diagnostics.experimental.enable": {
- "markdownDescription": "Whether to show experimental rust-analyzer diagnostics that might\nhave more false positives than usual.",
- "default": false,
- "type": "boolean"
- },
- "rust-analyzer.diagnostics.remapPrefix": {
- "markdownDescription": "Map of prefixes to be substituted when parsing diagnostic file paths.\nThis should be the reverse mapping of what is passed to `rustc` as `--remap-path-prefix`.",
- "default": {},
- "type": "object"
- },
- "rust-analyzer.diagnostics.styleLints.enable": {
- "markdownDescription": "Whether to run additional style lints.",
- "default": false,
- "type": "boolean"
- },
- "rust-analyzer.diagnostics.warningsAsHint": {
- "markdownDescription": "List of warnings that should be displayed with hint severity.\n\nThe warnings will be indicated by faded text or three dots in code\nand will not show up in the `Problems Panel`.",
- "default": [],
- "type": "array",
- "items": {
- "type": "string"
}
- },
- "rust-analyzer.diagnostics.warningsAsInfo": {
- "markdownDescription": "List of warnings that should be displayed with info severity.\n\nThe warnings will be indicated by a blue squiggly underline in code\nand a blue icon in the `Problems Panel`.",
- "default": [],
- "type": "array",
- "items": {
- "type": "string"
+ }
+ },
+ {
+ "title": "diagnostics",
+ "properties": {
+ "rust-analyzer.diagnostics.warningsAsInfo": {
+ "markdownDescription": "List of warnings that should be displayed with info severity.\n\nThe warnings will be indicated by a blue squiggly underline in code\nand a blue icon in the `Problems Panel`.",
+ "default": [],
+ "type": "array",
+ "items": {
+ "type": "string"
+ }
}
- },
- "rust-analyzer.files.excludeDirs": {
- "markdownDescription": "These directories will be ignored by rust-analyzer. They are\nrelative to the workspace root, and globs are not supported. You may\nalso need to add the folders to Code's `files.watcherExclude`.",
- "default": [],
- "type": "array",
- "items": {
- "type": "string"
+ }
+ },
+ {
+ "title": "files",
+ "properties": {
+ "rust-analyzer.files.excludeDirs": {
+ "markdownDescription": "These directories will be ignored by rust-analyzer. They are\nrelative to the workspace root, and globs are not supported. You may\nalso need to add the folders to Code's `files.watcherExclude`.",
+ "default": [],
+ "type": "array",
+ "items": {
+ "type": "string"
+ }
}
- },
- "rust-analyzer.files.watcher": {
- "markdownDescription": "Controls file watching implementation.",
- "default": "client",
- "type": "string",
- "enum": [
- "client",
- "server"
- ],
- "enumDescriptions": [
- "Use the client (editor) to watch files for changes",
- "Use server-side file watching"
- ]
- },
- "rust-analyzer.highlightRelated.breakPoints.enable": {
- "markdownDescription": "Enables highlighting of related references while the cursor is on `break`, `loop`, `while`, or `for` keywords.",
- "default": true,
- "type": "boolean"
- },
- "rust-analyzer.highlightRelated.closureCaptures.enable": {
- "markdownDescription": "Enables highlighting of all captures of a closure while the cursor is on the `|` or move keyword of a closure.",
- "default": true,
- "type": "boolean"
- },
- "rust-analyzer.highlightRelated.exitPoints.enable": {
- "markdownDescription": "Enables highlighting of all exit points while the cursor is on any `return`, `?`, `fn`, or return type arrow (`->`).",
- "default": true,
- "type": "boolean"
- },
- "rust-analyzer.highlightRelated.references.enable": {
- "markdownDescription": "Enables highlighting of related references while the cursor is on any identifier.",
- "default": true,
- "type": "boolean"
- },
- "rust-analyzer.highlightRelated.yieldPoints.enable": {
- "markdownDescription": "Enables highlighting of all break points for a loop or block context while the cursor is on any `async` or `await` keywords.",
- "default": true,
- "type": "boolean"
- },
- "rust-analyzer.hover.actions.debug.enable": {
- "markdownDescription": "Whether to show `Debug` action. Only applies when\n`#rust-analyzer.hover.actions.enable#` is set.",
- "default": true,
- "type": "boolean"
- },
- "rust-analyzer.hover.actions.enable": {
- "markdownDescription": "Whether to show HoverActions in Rust files.",
- "default": true,
- "type": "boolean"
- },
- "rust-analyzer.hover.actions.gotoTypeDef.enable": {
- "markdownDescription": "Whether to show `Go to Type Definition` action. Only applies when\n`#rust-analyzer.hover.actions.enable#` is set.",
- "default": true,
- "type": "boolean"
- },
- "rust-analyzer.hover.actions.implementations.enable": {
- "markdownDescription": "Whether to show `Implementations` action. Only applies when\n`#rust-analyzer.hover.actions.enable#` is set.",
- "default": true,
- "type": "boolean"
- },
- "rust-analyzer.hover.actions.references.enable": {
- "markdownDescription": "Whether to show `References` action. Only applies when\n`#rust-analyzer.hover.actions.enable#` is set.",
- "default": false,
- "type": "boolean"
- },
- "rust-analyzer.hover.actions.run.enable": {
- "markdownDescription": "Whether to show `Run` action. Only applies when\n`#rust-analyzer.hover.actions.enable#` is set.",
- "default": true,
- "type": "boolean"
- },
- "rust-analyzer.hover.documentation.enable": {
- "markdownDescription": "Whether to show documentation on hover.",
- "default": true,
- "type": "boolean"
- },
- "rust-analyzer.hover.documentation.keywords.enable": {
- "markdownDescription": "Whether to show keyword hover popups. Only applies when\n`#rust-analyzer.hover.documentation.enable#` is set.",
- "default": true,
- "type": "boolean"
- },
- "rust-analyzer.hover.links.enable": {
- "markdownDescription": "Use markdown syntax for links on hover.",
- "default": true,
- "type": "boolean"
- },
- "rust-analyzer.hover.memoryLayout.alignment": {
- "markdownDescription": "How to render the align information in a memory layout hover.",
- "default": "hexadecimal",
- "anyOf": [
- {
- "type": "null"
- },
- {
- "type": "string",
- "enum": [
- "both",
- "decimal",
- "hexadecimal"
- ],
- "enumDescriptions": [
- "Render as 12 (0xC)",
- "Render as 12",
- "Render as 0xC"
+ }
+ },
+ {
+ "title": "files",
+ "properties": {
+ "rust-analyzer.files.watcher": {
+ "markdownDescription": "Controls file watching implementation.",
+ "default": "client",
+ "type": "string",
+ "enum": [
+ "client",
+ "server"
+ ],
+ "enumDescriptions": [
+ "Use the client (editor) to watch files for changes",
+ "Use server-side file watching"
+ ]
+ }
+ }
+ },
+ {
+ "title": "highlightRelated",
+ "properties": {
+ "rust-analyzer.highlightRelated.breakPoints.enable": {
+ "markdownDescription": "Enables highlighting of related references while the cursor is on `break`, `loop`, `while`, or `for` keywords.",
+ "default": true,
+ "type": "boolean"
+ }
+ }
+ },
+ {
+ "title": "highlightRelated",
+ "properties": {
+ "rust-analyzer.highlightRelated.closureCaptures.enable": {
+ "markdownDescription": "Enables highlighting of all captures of a closure while the cursor is on the `|` or move keyword of a closure.",
+ "default": true,
+ "type": "boolean"
+ }
+ }
+ },
+ {
+ "title": "highlightRelated",
+ "properties": {
+ "rust-analyzer.highlightRelated.exitPoints.enable": {
+ "markdownDescription": "Enables highlighting of all exit points while the cursor is on any `return`, `?`, `fn`, or return type arrow (`->`).",
+ "default": true,
+ "type": "boolean"
+ }
+ }
+ },
+ {
+ "title": "highlightRelated",
+ "properties": {
+ "rust-analyzer.highlightRelated.references.enable": {
+ "markdownDescription": "Enables highlighting of related references while the cursor is on any identifier.",
+ "default": true,
+ "type": "boolean"
+ }
+ }
+ },
+ {
+ "title": "highlightRelated",
+ "properties": {
+ "rust-analyzer.highlightRelated.yieldPoints.enable": {
+ "markdownDescription": "Enables highlighting of all break points for a loop or block context while the cursor is on any `async` or `await` keywords.",
+ "default": true,
+ "type": "boolean"
+ }
+ }
+ },
+ {
+ "title": "hover",
+ "properties": {
+ "rust-analyzer.hover.actions.debug.enable": {
+ "markdownDescription": "Whether to show `Debug` action. Only applies when\n`#rust-analyzer.hover.actions.enable#` is set.",
+ "default": true,
+ "type": "boolean"
+ }
+ }
+ },
+ {
+ "title": "hover",
+ "properties": {
+ "rust-analyzer.hover.actions.enable": {
+ "markdownDescription": "Whether to show HoverActions in Rust files.",
+ "default": true,
+ "type": "boolean"
+ }
+ }
+ },
+ {
+ "title": "hover",
+ "properties": {
+ "rust-analyzer.hover.actions.gotoTypeDef.enable": {
+ "markdownDescription": "Whether to show `Go to Type Definition` action. Only applies when\n`#rust-analyzer.hover.actions.enable#` is set.",
+ "default": true,
+ "type": "boolean"
+ }
+ }
+ },
+ {
+ "title": "hover",
+ "properties": {
+ "rust-analyzer.hover.actions.implementations.enable": {
+ "markdownDescription": "Whether to show `Implementations` action. Only applies when\n`#rust-analyzer.hover.actions.enable#` is set.",
+ "default": true,
+ "type": "boolean"
+ }
+ }
+ },
+ {
+ "title": "hover",
+ "properties": {
+ "rust-analyzer.hover.actions.references.enable": {
+ "markdownDescription": "Whether to show `References` action. Only applies when\n`#rust-analyzer.hover.actions.enable#` is set.",
+ "default": false,
+ "type": "boolean"
+ }
+ }
+ },
+ {
+ "title": "hover",
+ "properties": {
+ "rust-analyzer.hover.actions.run.enable": {
+ "markdownDescription": "Whether to show `Run` action. Only applies when\n`#rust-analyzer.hover.actions.enable#` is set.",
+ "default": true,
+ "type": "boolean"
+ }
+ }
+ },
+ {
+ "title": "hover",
+ "properties": {
+ "rust-analyzer.hover.documentation.enable": {
+ "markdownDescription": "Whether to show documentation on hover.",
+ "default": true,
+ "type": "boolean"
+ }
+ }
+ },
+ {
+ "title": "hover",
+ "properties": {
+ "rust-analyzer.hover.documentation.keywords.enable": {
+ "markdownDescription": "Whether to show keyword hover popups. Only applies when\n`#rust-analyzer.hover.documentation.enable#` is set.",
+ "default": true,
+ "type": "boolean"
+ }
+ }
+ },
+ {
+ "title": "hover",
+ "properties": {
+ "rust-analyzer.hover.links.enable": {
+ "markdownDescription": "Use markdown syntax for links on hover.",
+ "default": true,
+ "type": "boolean"
+ }
+ }
+ },
+ {
+ "title": "hover",
+ "properties": {
+ "rust-analyzer.hover.memoryLayout.alignment": {
+ "markdownDescription": "How to render the align information in a memory layout hover.",
+ "default": "hexadecimal",
+ "anyOf": [
+ {
+ "type": "null"
+ },
+ {
+ "type": "string",
+ "enum": [
+ "both",
+ "decimal",
+ "hexadecimal"
+ ],
+ "enumDescriptions": [
+ "Render as 12 (0xC)",
+ "Render as 12",
+ "Render as 0xC"
+ ]
+ }
+ ]
+ }
+ }
+ },
+ {
+ "title": "hover",
+ "properties": {
+ "rust-analyzer.hover.memoryLayout.enable": {
+ "markdownDescription": "Whether to show memory layout data on hover.",
+ "default": true,
+ "type": "boolean"
+ }
+ }
+ },
+ {
+ "title": "hover",
+ "properties": {
+ "rust-analyzer.hover.memoryLayout.niches": {
+ "markdownDescription": "How to render the niche information in a memory layout hover.",
+ "default": false,
+ "type": [
+ "null",
+ "boolean"
+ ]
+ }
+ }
+ },
+ {
+ "title": "hover",
+ "properties": {
+ "rust-analyzer.hover.memoryLayout.offset": {
+ "markdownDescription": "How to render the offset information in a memory layout hover.",
+ "default": "hexadecimal",
+ "anyOf": [
+ {
+ "type": "null"
+ },
+ {
+ "type": "string",
+ "enum": [
+ "both",
+ "decimal",
+ "hexadecimal"
+ ],
+ "enumDescriptions": [
+ "Render as 12 (0xC)",
+ "Render as 12",
+ "Render as 0xC"
+ ]
+ }
+ ]
+ }
+ }
+ },
+ {
+ "title": "hover",
+ "properties": {
+ "rust-analyzer.hover.memoryLayout.size": {
+ "markdownDescription": "How to render the size information in a memory layout hover.",
+ "default": "both",
+ "anyOf": [
+ {
+ "type": "null"
+ },
+ {
+ "type": "string",
+ "enum": [
+ "both",
+ "decimal",
+ "hexadecimal"
+ ],
+ "enumDescriptions": [
+ "Render as 12 (0xC)",
+ "Render as 12",
+ "Render as 0xC"
+ ]
+ }
+ ]
+ }
+ }
+ },
+ {
+ "title": "hover",
+ "properties": {
+ "rust-analyzer.hover.show.enumVariants": {
+ "markdownDescription": "How many variants of an enum to display when hovering on. Show none if empty.",
+ "default": 5,
+ "type": [
+ "null",
+ "integer"
+ ],
+ "minimum": 0
+ }
+ }
+ },
+ {
+ "title": "hover",
+ "properties": {
+ "rust-analyzer.hover.show.fields": {
+ "markdownDescription": "How many fields of a struct, variant or union to display when hovering on. Show none if empty.",
+ "default": 5,
+ "type": [
+ "null",
+ "integer"
+ ],
+ "minimum": 0
+ }
+ }
+ },
+ {
+ "title": "hover",
+ "properties": {
+ "rust-analyzer.hover.show.traitAssocItems": {
+ "markdownDescription": "How many associated items of a trait to display when hovering a trait.",
+ "default": null,
+ "type": [
+ "null",
+ "integer"
+ ],
+ "minimum": 0
+ }
+ }
+ },
+ {
+ "title": "imports",
+ "properties": {
+ "rust-analyzer.imports.granularity.enforce": {
+ "markdownDescription": "Whether to enforce the import granularity setting for all files. If set to false rust-analyzer will try to keep import styles consistent per file.",
+ "default": false,
+ "type": "boolean"
+ }
+ }
+ },
+ {
+ "title": "imports",
+ "properties": {
+ "rust-analyzer.imports.granularity.group": {
+ "markdownDescription": "How imports should be grouped into use statements.",
+ "default": "crate",
+ "type": "string",
+ "enum": [
+ "preserve",
+ "crate",
+ "module",
+ "item",
+ "one"
+ ],
+ "enumDescriptions": [
+ "Do not change the granularity of any imports and preserve the original structure written by the developer.",
+ "Merge imports from the same crate into a single use statement. Conversely, imports from different crates are split into separate statements.",
+ "Merge imports from the same module into a single use statement. Conversely, imports from different modules are split into separate statements.",
+ "Flatten imports so that each has its own use statement.",
+ "Merge all imports into a single use statement as long as they have the same visibility and attributes."
+ ]
+ }
+ }
+ },
+ {
+ "title": "imports",
+ "properties": {
+ "rust-analyzer.imports.group.enable": {
+ "markdownDescription": "Group inserted imports by the [following order](https://rust-analyzer.github.io/manual.html#auto-import). Groups are separated by newlines.",
+ "default": true,
+ "type": "boolean"
+ }
+ }
+ },
+ {
+ "title": "imports",
+ "properties": {
+ "rust-analyzer.imports.merge.glob": {
+ "markdownDescription": "Whether to allow import insertion to merge new imports into single path glob imports like `use std::fmt::*;`.",
+ "default": true,
+ "type": "boolean"
+ }
+ }
+ },
+ {
+ "title": "imports",
+ "properties": {
+ "rust-analyzer.imports.preferNoStd": {
+ "markdownDescription": "Prefer to unconditionally use imports of the core and alloc crate, over the std crate.",
+ "default": false,
+ "type": "boolean"
+ }
+ }
+ },
+ {
+ "title": "imports",
+ "properties": {
+ "rust-analyzer.imports.preferPrelude": {
+ "markdownDescription": "Whether to prefer import paths containing a `prelude` module.",
+ "default": false,
+ "type": "boolean"
+ }
+ }
+ },
+ {
+ "title": "imports",
+ "properties": {
+ "rust-analyzer.imports.prefix": {
+ "markdownDescription": "The path structure for newly inserted paths to use.",
+ "default": "plain",
+ "type": "string",
+ "enum": [
+ "plain",
+ "self",
+ "crate"
+ ],
+ "enumDescriptions": [
+ "Insert import paths relative to the current module, using up to one `super` prefix if the parent module contains the requested item.",
+ "Insert import paths relative to the current module, using up to one `super` prefix if the parent module contains the requested item. Prefixes `self` in front of the path if it starts with a module.",
+ "Force import paths to be absolute by always starting them with `crate` or the extern crate name they come from."
+ ]
+ }
+ }
+ },
+ {
+ "title": "inlayHints",
+ "properties": {
+ "rust-analyzer.inlayHints.bindingModeHints.enable": {
+ "markdownDescription": "Whether to show inlay type hints for binding modes.",
+ "default": false,
+ "type": "boolean"
+ }
+ }
+ },
+ {
+ "title": "inlayHints",
+ "properties": {
+ "rust-analyzer.inlayHints.chainingHints.enable": {
+ "markdownDescription": "Whether to show inlay type hints for method chains.",
+ "default": true,
+ "type": "boolean"
+ }
+ }
+ },
+ {
+ "title": "inlayHints",
+ "properties": {
+ "rust-analyzer.inlayHints.closingBraceHints.enable": {
+ "markdownDescription": "Whether to show inlay hints after a closing `}` to indicate what item it belongs to.",
+ "default": true,
+ "type": "boolean"
+ }
+ }
+ },
+ {
+ "title": "inlayHints",
+ "properties": {
+ "rust-analyzer.inlayHints.closingBraceHints.minLines": {
+ "markdownDescription": "Minimum number of lines required before the `}` until the hint is shown (set to 0 or 1\nto always show them).",
+ "default": 25,
+ "type": "integer",
+ "minimum": 0
+ }
+ }
+ },
+ {
+ "title": "inlayHints",
+ "properties": {
+ "rust-analyzer.inlayHints.closureCaptureHints.enable": {
+ "markdownDescription": "Whether to show inlay hints for closure captures.",
+ "default": false,
+ "type": "boolean"
+ }
+ }
+ },
+ {
+ "title": "inlayHints",
+ "properties": {
+ "rust-analyzer.inlayHints.closureReturnTypeHints.enable": {
+ "markdownDescription": "Whether to show inlay type hints for return types of closures.",
+ "default": "never",
+ "type": "string",
+ "enum": [
+ "always",
+ "never",
+ "with_block"
+ ],
+ "enumDescriptions": [
+ "Always show type hints for return types of closures.",
+ "Never show type hints for return types of closures.",
+ "Only show type hints for return types of closures with blocks."
+ ]
+ }
+ }
+ },
+ {
+ "title": "inlayHints",
+ "properties": {
+ "rust-analyzer.inlayHints.closureStyle": {
+ "markdownDescription": "Closure notation in type and chaining inlay hints.",
+ "default": "impl_fn",
+ "type": "string",
+ "enum": [
+ "impl_fn",
+ "rust_analyzer",
+ "with_id",
+ "hide"
+ ],
+ "enumDescriptions": [
+ "`impl_fn`: `impl FnMut(i32, u64) -> i8`",
+ "`rust_analyzer`: `|i32, u64| -> i8`",
+ "`with_id`: `{closure#14352}`, where that id is the unique number of the closure in r-a internals",
+ "`hide`: Shows `...` for every closure type"
+ ]
+ }
+ }
+ },
+ {
+ "title": "inlayHints",
+ "properties": {
+ "rust-analyzer.inlayHints.discriminantHints.enable": {
+ "markdownDescription": "Whether to show enum variant discriminant hints.",
+ "default": "never",
+ "type": "string",
+ "enum": [
+ "always",
+ "never",
+ "fieldless"
+ ],
+ "enumDescriptions": [
+ "Always show all discriminant hints.",
+ "Never show discriminant hints.",
+ "Only show discriminant hints on fieldless enum variants."
+ ]
+ }
+ }
+ },
+ {
+ "title": "inlayHints",
+ "properties": {
+ "rust-analyzer.inlayHints.expressionAdjustmentHints.enable": {
+ "markdownDescription": "Whether to show inlay hints for type adjustments.",
+ "default": "never",
+ "type": "string",
+ "enum": [
+ "always",
+ "never",
+ "reborrow"
+ ],
+ "enumDescriptions": [
+ "Always show all adjustment hints.",
+ "Never show adjustment hints.",
+ "Only show auto borrow and dereference adjustment hints."
+ ]
+ }
+ }
+ },
+ {
+ "title": "inlayHints",
+ "properties": {
+ "rust-analyzer.inlayHints.expressionAdjustmentHints.hideOutsideUnsafe": {
+ "markdownDescription": "Whether to hide inlay hints for type adjustments outside of `unsafe` blocks.",
+ "default": false,
+ "type": "boolean"
+ }
+ }
+ },
+ {
+ "title": "inlayHints",
+ "properties": {
+ "rust-analyzer.inlayHints.expressionAdjustmentHints.mode": {
+ "markdownDescription": "Whether to show inlay hints as postfix ops (`.*` instead of `*`, etc).",
+ "default": "prefix",
+ "type": "string",
+ "enum": [
+ "prefix",
+ "postfix",
+ "prefer_prefix",
+ "prefer_postfix"
+ ],
+ "enumDescriptions": [
+ "Always show adjustment hints as prefix (`*expr`).",
+ "Always show adjustment hints as postfix (`expr.*`).",
+ "Show prefix or postfix depending on which uses less parenthesis, preferring prefix.",
+ "Show prefix or postfix depending on which uses less parenthesis, preferring postfix."
+ ]
+ }
+ }
+ },
+ {
+ "title": "inlayHints",
+ "properties": {
+ "rust-analyzer.inlayHints.implicitDrops.enable": {
+ "markdownDescription": "Whether to show implicit drop hints.",
+ "default": false,
+ "type": "boolean"
+ }
+ }
+ },
+ {
+ "title": "inlayHints",
+ "properties": {
+ "rust-analyzer.inlayHints.lifetimeElisionHints.enable": {
+ "markdownDescription": "Whether to show inlay type hints for elided lifetimes in function signatures.",
+ "default": "never",
+ "type": "string",
+ "enum": [
+ "always",
+ "never",
+ "skip_trivial"
+ ],
+ "enumDescriptions": [
+ "Always show lifetime elision hints.",
+ "Never show lifetime elision hints.",
+ "Only show lifetime elision hints if a return type is involved."
+ ]
+ }
+ }
+ },
+ {
+ "title": "inlayHints",
+ "properties": {
+ "rust-analyzer.inlayHints.lifetimeElisionHints.useParameterNames": {
+ "markdownDescription": "Whether to prefer using parameter names as the name for elided lifetime hints if possible.",
+ "default": false,
+ "type": "boolean"
+ }
+ }
+ },
+ {
+ "title": "inlayHints",
+ "properties": {
+ "rust-analyzer.inlayHints.maxLength": {
+ "markdownDescription": "Maximum length for inlay hints. Set to null to have an unlimited length.",
+ "default": 25,
+ "type": [
+ "null",
+ "integer"
+ ],
+ "minimum": 0
+ }
+ }
+ },
+ {
+ "title": "inlayHints",
+ "properties": {
+ "rust-analyzer.inlayHints.parameterHints.enable": {
+ "markdownDescription": "Whether to show function parameter name inlay hints at the call\nsite.",
+ "default": true,
+ "type": "boolean"
+ }
+ }
+ },
+ {
+ "title": "inlayHints",
+ "properties": {
+ "rust-analyzer.inlayHints.rangeExclusiveHints.enable": {
+ "markdownDescription": "Whether to show exclusive range inlay hints.",
+ "default": false,
+ "type": "boolean"
+ }
+ }
+ },
+ {
+ "title": "inlayHints",
+ "properties": {
+ "rust-analyzer.inlayHints.reborrowHints.enable": {
+ "markdownDescription": "Whether to show inlay hints for compiler inserted reborrows.\nThis setting is deprecated in favor of #rust-analyzer.inlayHints.expressionAdjustmentHints.enable#.",
+ "default": "never",
+ "type": "string",
+ "enum": [
+ "always",
+ "never",
+ "mutable"
+ ],
+ "enumDescriptions": [
+ "Always show reborrow hints.",
+ "Never show reborrow hints.",
+ "Only show mutable reborrow hints."
+ ]
+ }
+ }
+ },
+ {
+ "title": "inlayHints",
+ "properties": {
+ "rust-analyzer.inlayHints.renderColons": {
+ "markdownDescription": "Whether to render leading colons for type hints, and trailing colons for parameter hints.",
+ "default": true,
+ "type": "boolean"
+ }
+ }
+ },
+ {
+ "title": "inlayHints",
+ "properties": {
+ "rust-analyzer.inlayHints.typeHints.enable": {
+ "markdownDescription": "Whether to show inlay type hints for variables.",
+ "default": true,
+ "type": "boolean"
+ }
+ }
+ },
+ {
+ "title": "inlayHints",
+ "properties": {
+ "rust-analyzer.inlayHints.typeHints.hideClosureInitialization": {
+ "markdownDescription": "Whether to hide inlay type hints for `let` statements that initialize to a closure.\nOnly applies to closures with blocks, same as `#rust-analyzer.inlayHints.closureReturnTypeHints.enable#`.",
+ "default": false,
+ "type": "boolean"
+ }
+ }
+ },
+ {
+ "title": "inlayHints",
+ "properties": {
+ "rust-analyzer.inlayHints.typeHints.hideNamedConstructor": {
+ "markdownDescription": "Whether to hide inlay type hints for constructors.",
+ "default": false,
+ "type": "boolean"
+ }
+ }
+ },
+ {
+ "title": "interpret",
+ "properties": {
+ "rust-analyzer.interpret.tests": {
+ "markdownDescription": "Enables the experimental support for interpreting tests.",
+ "default": false,
+ "type": "boolean"
+ }
+ }
+ },
+ {
+ "title": "joinLines",
+ "properties": {
+ "rust-analyzer.joinLines.joinAssignments": {
+ "markdownDescription": "Join lines merges consecutive declaration and initialization of an assignment.",
+ "default": true,
+ "type": "boolean"
+ }
+ }
+ },
+ {
+ "title": "joinLines",
+ "properties": {
+ "rust-analyzer.joinLines.joinElseIf": {
+ "markdownDescription": "Join lines inserts else between consecutive ifs.",
+ "default": true,
+ "type": "boolean"
+ }
+ }
+ },
+ {
+ "title": "joinLines",
+ "properties": {
+ "rust-analyzer.joinLines.removeTrailingComma": {
+ "markdownDescription": "Join lines removes trailing commas.",
+ "default": true,
+ "type": "boolean"
+ }
+ }
+ },
+ {
+ "title": "joinLines",
+ "properties": {
+ "rust-analyzer.joinLines.unwrapTrivialBlock": {
+ "markdownDescription": "Join lines unwraps trivial blocks.",
+ "default": true,
+ "type": "boolean"
+ }
+ }
+ },
+ {
+ "title": "lens",
+ "properties": {
+ "rust-analyzer.lens.debug.enable": {
+ "markdownDescription": "Whether to show `Debug` lens. Only applies when\n`#rust-analyzer.lens.enable#` is set.",
+ "default": true,
+ "type": "boolean"
+ }
+ }
+ },
+ {
+ "title": "lens",
+ "properties": {
+ "rust-analyzer.lens.enable": {
+ "markdownDescription": "Whether to show CodeLens in Rust files.",
+ "default": true,
+ "type": "boolean"
+ }
+ }
+ },
+ {
+ "title": "lens",
+ "properties": {
+ "rust-analyzer.lens.forceCustomCommands": {
+ "markdownDescription": "Internal config: use custom client-side commands even when the\nclient doesn't set the corresponding capability.",
+ "default": true,
+ "type": "boolean"
+ }
+ }
+ },
+ {
+ "title": "lens",
+ "properties": {
+ "rust-analyzer.lens.implementations.enable": {
+ "markdownDescription": "Whether to show `Implementations` lens. Only applies when\n`#rust-analyzer.lens.enable#` is set.",
+ "default": true,
+ "type": "boolean"
+ }
+ }
+ },
+ {
+ "title": "lens",
+ "properties": {
+ "rust-analyzer.lens.location": {
+ "markdownDescription": "Where to render annotations.",
+ "default": "above_name",
+ "type": "string",
+ "enum": [
+ "above_name",
+ "above_whole_item"
+ ],
+ "enumDescriptions": [
+ "Render annotations above the name of the item.",
+ "Render annotations above the whole item, including documentation comments and attributes."
+ ]
+ }
+ }
+ },
+ {
+ "title": "lens",
+ "properties": {
+ "rust-analyzer.lens.references.adt.enable": {
+ "markdownDescription": "Whether to show `References` lens for Struct, Enum, and Union.\nOnly applies when `#rust-analyzer.lens.enable#` is set.",
+ "default": false,
+ "type": "boolean"
+ }
+ }
+ },
+ {
+ "title": "lens",
+ "properties": {
+ "rust-analyzer.lens.references.enumVariant.enable": {
+ "markdownDescription": "Whether to show `References` lens for Enum Variants.\nOnly applies when `#rust-analyzer.lens.enable#` is set.",
+ "default": false,
+ "type": "boolean"
+ }
+ }
+ },
+ {
+ "title": "lens",
+ "properties": {
+ "rust-analyzer.lens.references.method.enable": {
+ "markdownDescription": "Whether to show `Method References` lens. Only applies when\n`#rust-analyzer.lens.enable#` is set.",
+ "default": false,
+ "type": "boolean"
+ }
+ }
+ },
+ {
+ "title": "lens",
+ "properties": {
+ "rust-analyzer.lens.references.trait.enable": {
+ "markdownDescription": "Whether to show `References` lens for Trait.\nOnly applies when `#rust-analyzer.lens.enable#` is set.",
+ "default": false,
+ "type": "boolean"
+ }
+ }
+ },
+ {
+ "title": "lens",
+ "properties": {
+ "rust-analyzer.lens.run.enable": {
+ "markdownDescription": "Whether to show `Run` lens. Only applies when\n`#rust-analyzer.lens.enable#` is set.",
+ "default": true,
+ "type": "boolean"
+ }
+ }
+ },
+ {
+ "title": "general",
+ "properties": {
+ "rust-analyzer.linkedProjects": {
+ "markdownDescription": "Disable project auto-discovery in favor of explicitly specified set\nof projects.\n\nElements must be paths pointing to `Cargo.toml`,\n`rust-project.json`, `.rs` files (which will be treated as standalone files) or JSON\nobjects in `rust-project.json` format.",
+ "default": [],
+ "type": "array",
+ "items": {
+ "type": [
+ "string",
+ "object"
]
}
- ]
- },
- "rust-analyzer.hover.memoryLayout.enable": {
- "markdownDescription": "Whether to show memory layout data on hover.",
- "default": true,
- "type": "boolean"
- },
- "rust-analyzer.hover.memoryLayout.niches": {
- "markdownDescription": "How to render the niche information in a memory layout hover.",
- "default": false,
- "type": [
- "null",
- "boolean"
- ]
- },
- "rust-analyzer.hover.memoryLayout.offset": {
- "markdownDescription": "How to render the offset information in a memory layout hover.",
- "default": "hexadecimal",
- "anyOf": [
- {
- "type": "null"
- },
- {
- "type": "string",
- "enum": [
- "both",
- "decimal",
- "hexadecimal"
- ],
- "enumDescriptions": [
- "Render as 12 (0xC)",
- "Render as 12",
- "Render as 0xC"
- ]
+ }
+ }
+ },
+ {
+ "title": "lru",
+ "properties": {
+ "rust-analyzer.lru.capacity": {
+ "markdownDescription": "Number of syntax trees rust-analyzer keeps in memory. Defaults to 128.",
+ "default": null,
+ "type": [
+ "null",
+ "integer"
+ ],
+ "minimum": 0
+ }
+ }
+ },
+ {
+ "title": "lru",
+ "properties": {
+ "rust-analyzer.lru.query.capacities": {
+ "markdownDescription": "Sets the LRU capacity of the specified queries.",
+ "default": {},
+ "type": "object"
+ }
+ }
+ },
+ {
+ "title": "notifications",
+ "properties": {
+ "rust-analyzer.notifications.cargoTomlNotFound": {
+ "markdownDescription": "Whether to show `can't find Cargo.toml` error message.",
+ "default": true,
+ "type": "boolean"
+ }
+ }
+ },
+ {
+ "title": "notifications",
+ "properties": {
+ "rust-analyzer.notifications.unindexedProject": {
+ "markdownDescription": "Whether to send an UnindexedProject notification to the client.",
+ "default": false,
+ "type": "boolean"
+ }
+ }
+ },
+ {
+ "title": "general",
+ "properties": {
+ "rust-analyzer.numThreads": {
+ "markdownDescription": "How many worker threads in the main loop. The default `null` means to pick automatically.",
+ "default": null,
+ "anyOf": [
+ {
+ "type": "null"
+ },
+ {
+ "type": "number",
+ "minimum": 0,
+ "maximum": 255
+ },
+ {
+ "type": "string",
+ "enum": [
+ "physical",
+ "logical"
+ ],
+ "enumDescriptions": [
+ "Use the number of physical cores",
+ "Use the number of logical cores"
+ ]
+ }
+ ]
+ }
+ }
+ },
+ {
+ "title": "procMacro",
+ "properties": {
+ "rust-analyzer.procMacro.attributes.enable": {
+ "markdownDescription": "Expand attribute macros. Requires `#rust-analyzer.procMacro.enable#` to be set.",
+ "default": true,
+ "type": "boolean"
+ }
+ }
+ },
+ {
+ "title": "procMacro",
+ "properties": {
+ "rust-analyzer.procMacro.enable": {
+ "markdownDescription": "Enable support for procedural macros, implies `#rust-analyzer.cargo.buildScripts.enable#`.",
+ "default": true,
+ "type": "boolean"
+ }
+ }
+ },
+ {
+ "title": "procMacro",
+ "properties": {
+ "rust-analyzer.procMacro.ignored": {
+ "markdownDescription": "These proc-macros will be ignored when trying to expand them.\n\nThis config takes a map of crate names with the exported proc-macro names to ignore as values.",
+ "default": {},
+ "type": "object"
+ }
+ }
+ },
+ {
+ "title": "procMacro",
+ "properties": {
+ "rust-analyzer.procMacro.server": {
+ "markdownDescription": "Internal config, path to proc-macro server executable.",
+ "default": null,
+ "type": [
+ "null",
+ "string"
+ ]
+ }
+ }
+ },
+ {
+ "title": "references",
+ "properties": {
+ "rust-analyzer.references.excludeImports": {
+ "markdownDescription": "Exclude imports from find-all-references.",
+ "default": false,
+ "type": "boolean"
+ }
+ }
+ },
+ {
+ "title": "references",
+ "properties": {
+ "rust-analyzer.references.excludeTests": {
+ "markdownDescription": "Exclude tests from find-all-references.",
+ "default": false,
+ "type": "boolean"
+ }
+ }
+ },
+ {
+ "title": "runnables",
+ "properties": {
+ "rust-analyzer.runnables.command": {
+ "markdownDescription": "Command to be executed instead of 'cargo' for runnables.",
+ "default": null,
+ "type": [
+ "null",
+ "string"
+ ]
+ }
+ }
+ },
+ {
+ "title": "runnables",
+ "properties": {
+ "rust-analyzer.runnables.extraArgs": {
+ "markdownDescription": "Additional arguments to be passed to cargo for runnables such as\ntests or binaries. For example, it may be `--release`.",
+ "default": [],
+ "type": "array",
+ "items": {
+ "type": "string"
}
- ]
- },
- "rust-analyzer.hover.memoryLayout.size": {
- "markdownDescription": "How to render the size information in a memory layout hover.",
- "default": "both",
- "anyOf": [
- {
- "type": "null"
- },
- {
- "type": "string",
- "enum": [
- "both",
- "decimal",
- "hexadecimal"
- ],
- "enumDescriptions": [
- "Render as 12 (0xC)",
- "Render as 12",
- "Render as 0xC"
- ]
+ }
+ }
+ },
+ {
+ "title": "runnables",
+ "properties": {
+ "rust-analyzer.runnables.extraTestBinaryArgs": {
+ "markdownDescription": "Additional arguments to be passed through Cargo to launched tests, benchmarks, or\ndoc-tests.\n\nUnless the launched target uses a\n[custom test harness](https://doc.rust-lang.org/cargo/reference/cargo-targets.html#the-harness-field),\nthey will end up being interpreted as options to\n[`rustc`’s built-in test harness (“libtest”)](https://doc.rust-lang.org/rustc/tests/index.html#cli-arguments).",
+ "default": [
+ "--show-output"
+ ],
+ "type": "array",
+ "items": {
+ "type": "string"
}
- ]
- },
- "rust-analyzer.hover.show.enumVariants": {
- "markdownDescription": "How many variants of an enum to display when hovering on. Show none if empty.",
- "default": 5,
- "type": [
- "null",
- "integer"
- ],
- "minimum": 0
- },
- "rust-analyzer.hover.show.fields": {
- "markdownDescription": "How many fields of a struct, variant or union to display when hovering on. Show none if empty.",
- "default": 5,
- "type": [
- "null",
- "integer"
- ],
- "minimum": 0
- },
- "rust-analyzer.hover.show.traitAssocItems": {
- "markdownDescription": "How many associated items of a trait to display when hovering a trait.",
- "default": null,
- "type": [
- "null",
- "integer"
- ],
- "minimum": 0
- },
- "rust-analyzer.imports.granularity.enforce": {
- "markdownDescription": "Whether to enforce the import granularity setting for all files. If set to false rust-analyzer will try to keep import styles consistent per file.",
- "default": false,
- "type": "boolean"
- },
- "rust-analyzer.imports.granularity.group": {
- "markdownDescription": "How imports should be grouped into use statements.",
- "default": "crate",
- "type": "string",
- "enum": [
- "preserve",
- "crate",
- "module",
- "item",
- "one"
- ],
- "enumDescriptions": [
- "Do not change the granularity of any imports and preserve the original structure written by the developer.",
- "Merge imports from the same crate into a single use statement. Conversely, imports from different crates are split into separate statements.",
- "Merge imports from the same module into a single use statement. Conversely, imports from different modules are split into separate statements.",
- "Flatten imports so that each has its own use statement.",
- "Merge all imports into a single use statement as long as they have the same visibility and attributes."
- ]
- },
- "rust-analyzer.imports.group.enable": {
- "markdownDescription": "Group inserted imports by the [following order](https://rust-analyzer.github.io/manual.html#auto-import). Groups are separated by newlines.",
- "default": true,
- "type": "boolean"
- },
- "rust-analyzer.imports.merge.glob": {
- "markdownDescription": "Whether to allow import insertion to merge new imports into single path glob imports like `use std::fmt::*;`.",
- "default": true,
- "type": "boolean"
- },
- "rust-analyzer.imports.preferNoStd": {
- "markdownDescription": "Prefer to unconditionally use imports of the core and alloc crate, over the std crate.",
- "default": false,
- "type": "boolean"
- },
- "rust-analyzer.imports.preferPrelude": {
- "markdownDescription": "Whether to prefer import paths containing a `prelude` module.",
- "default": false,
- "type": "boolean"
- },
- "rust-analyzer.imports.prefix": {
- "markdownDescription": "The path structure for newly inserted paths to use.",
- "default": "plain",
- "type": "string",
- "enum": [
- "plain",
- "self",
- "crate"
- ],
- "enumDescriptions": [
- "Insert import paths relative to the current module, using up to one `super` prefix if the parent module contains the requested item.",
- "Insert import paths relative to the current module, using up to one `super` prefix if the parent module contains the requested item. Prefixes `self` in front of the path if it starts with a module.",
- "Force import paths to be absolute by always starting them with `crate` or the extern crate name they come from."
- ]
- },
- "rust-analyzer.inlayHints.bindingModeHints.enable": {
- "markdownDescription": "Whether to show inlay type hints for binding modes.",
- "default": false,
- "type": "boolean"
- },
- "rust-analyzer.inlayHints.chainingHints.enable": {
- "markdownDescription": "Whether to show inlay type hints for method chains.",
- "default": true,
- "type": "boolean"
- },
- "rust-analyzer.inlayHints.closingBraceHints.enable": {
- "markdownDescription": "Whether to show inlay hints after a closing `}` to indicate what item it belongs to.",
- "default": true,
- "type": "boolean"
- },
- "rust-analyzer.inlayHints.closingBraceHints.minLines": {
- "markdownDescription": "Minimum number of lines required before the `}` until the hint is shown (set to 0 or 1\nto always show them).",
- "default": 25,
- "type": "integer",
- "minimum": 0
- },
- "rust-analyzer.inlayHints.closureCaptureHints.enable": {
- "markdownDescription": "Whether to show inlay hints for closure captures.",
- "default": false,
- "type": "boolean"
- },
- "rust-analyzer.inlayHints.closureReturnTypeHints.enable": {
- "markdownDescription": "Whether to show inlay type hints for return types of closures.",
- "default": "never",
- "type": "string",
- "enum": [
- "always",
- "never",
- "with_block"
- ],
- "enumDescriptions": [
- "Always show type hints for return types of closures.",
- "Never show type hints for return types of closures.",
- "Only show type hints for return types of closures with blocks."
- ]
- },
- "rust-analyzer.inlayHints.closureStyle": {
- "markdownDescription": "Closure notation in type and chaining inlay hints.",
- "default": "impl_fn",
- "type": "string",
- "enum": [
- "impl_fn",
- "rust_analyzer",
- "with_id",
- "hide"
- ],
- "enumDescriptions": [
- "`impl_fn`: `impl FnMut(i32, u64) -> i8`",
- "`rust_analyzer`: `|i32, u64| -> i8`",
- "`with_id`: `{closure#14352}`, where that id is the unique number of the closure in r-a internals",
- "`hide`: Shows `...` for every closure type"
- ]
- },
- "rust-analyzer.inlayHints.discriminantHints.enable": {
- "markdownDescription": "Whether to show enum variant discriminant hints.",
- "default": "never",
- "type": "string",
- "enum": [
- "always",
- "never",
- "fieldless"
- ],
- "enumDescriptions": [
- "Always show all discriminant hints.",
- "Never show discriminant hints.",
- "Only show discriminant hints on fieldless enum variants."
- ]
- },
- "rust-analyzer.inlayHints.expressionAdjustmentHints.enable": {
- "markdownDescription": "Whether to show inlay hints for type adjustments.",
- "default": "never",
- "type": "string",
- "enum": [
- "always",
- "never",
- "reborrow"
- ],
- "enumDescriptions": [
- "Always show all adjustment hints.",
- "Never show adjustment hints.",
- "Only show auto borrow and dereference adjustment hints."
- ]
- },
- "rust-analyzer.inlayHints.expressionAdjustmentHints.hideOutsideUnsafe": {
- "markdownDescription": "Whether to hide inlay hints for type adjustments outside of `unsafe` blocks.",
- "default": false,
- "type": "boolean"
- },
- "rust-analyzer.inlayHints.expressionAdjustmentHints.mode": {
- "markdownDescription": "Whether to show inlay hints as postfix ops (`.*` instead of `*`, etc).",
- "default": "prefix",
- "type": "string",
- "enum": [
- "prefix",
- "postfix",
- "prefer_prefix",
- "prefer_postfix"
- ],
- "enumDescriptions": [
- "Always show adjustment hints as prefix (`*expr`).",
- "Always show adjustment hints as postfix (`expr.*`).",
- "Show prefix or postfix depending on which uses less parenthesis, preferring prefix.",
- "Show prefix or postfix depending on which uses less parenthesis, preferring postfix."
- ]
- },
- "rust-analyzer.inlayHints.implicitDrops.enable": {
- "markdownDescription": "Whether to show implicit drop hints.",
- "default": false,
- "type": "boolean"
- },
- "rust-analyzer.inlayHints.lifetimeElisionHints.enable": {
- "markdownDescription": "Whether to show inlay type hints for elided lifetimes in function signatures.",
- "default": "never",
- "type": "string",
- "enum": [
- "always",
- "never",
- "skip_trivial"
- ],
- "enumDescriptions": [
- "Always show lifetime elision hints.",
- "Never show lifetime elision hints.",
- "Only show lifetime elision hints if a return type is involved."
- ]
- },
- "rust-analyzer.inlayHints.lifetimeElisionHints.useParameterNames": {
- "markdownDescription": "Whether to prefer using parameter names as the name for elided lifetime hints if possible.",
- "default": false,
- "type": "boolean"
- },
- "rust-analyzer.inlayHints.maxLength": {
- "markdownDescription": "Maximum length for inlay hints. Set to null to have an unlimited length.",
- "default": 25,
- "type": [
- "null",
- "integer"
- ],
- "minimum": 0
- },
- "rust-analyzer.inlayHints.parameterHints.enable": {
- "markdownDescription": "Whether to show function parameter name inlay hints at the call\nsite.",
- "default": true,
- "type": "boolean"
- },
- "rust-analyzer.inlayHints.rangeExclusiveHints.enable": {
- "markdownDescription": "Whether to show exclusive range inlay hints.",
- "default": false,
- "type": "boolean"
- },
- "rust-analyzer.inlayHints.reborrowHints.enable": {
- "markdownDescription": "Whether to show inlay hints for compiler inserted reborrows.\nThis setting is deprecated in favor of #rust-analyzer.inlayHints.expressionAdjustmentHints.enable#.",
- "default": "never",
- "type": "string",
- "enum": [
- "always",
- "never",
- "mutable"
- ],
- "enumDescriptions": [
- "Always show reborrow hints.",
- "Never show reborrow hints.",
- "Only show mutable reborrow hints."
- ]
- },
- "rust-analyzer.inlayHints.renderColons": {
- "markdownDescription": "Whether to render leading colons for type hints, and trailing colons for parameter hints.",
- "default": true,
- "type": "boolean"
- },
- "rust-analyzer.inlayHints.typeHints.enable": {
- "markdownDescription": "Whether to show inlay type hints for variables.",
- "default": true,
- "type": "boolean"
- },
- "rust-analyzer.inlayHints.typeHints.hideClosureInitialization": {
- "markdownDescription": "Whether to hide inlay type hints for `let` statements that initialize to a closure.\nOnly applies to closures with blocks, same as `#rust-analyzer.inlayHints.closureReturnTypeHints.enable#`.",
- "default": false,
- "type": "boolean"
- },
- "rust-analyzer.inlayHints.typeHints.hideNamedConstructor": {
- "markdownDescription": "Whether to hide inlay type hints for constructors.",
- "default": false,
- "type": "boolean"
- },
- "rust-analyzer.interpret.tests": {
- "markdownDescription": "Enables the experimental support for interpreting tests.",
- "default": false,
- "type": "boolean"
- },
- "rust-analyzer.joinLines.joinAssignments": {
- "markdownDescription": "Join lines merges consecutive declaration and initialization of an assignment.",
- "default": true,
- "type": "boolean"
- },
- "rust-analyzer.joinLines.joinElseIf": {
- "markdownDescription": "Join lines inserts else between consecutive ifs.",
- "default": true,
- "type": "boolean"
- },
- "rust-analyzer.joinLines.removeTrailingComma": {
- "markdownDescription": "Join lines removes trailing commas.",
- "default": true,
- "type": "boolean"
- },
- "rust-analyzer.joinLines.unwrapTrivialBlock": {
- "markdownDescription": "Join lines unwraps trivial blocks.",
- "default": true,
- "type": "boolean"
- },
- "rust-analyzer.lens.debug.enable": {
- "markdownDescription": "Whether to show `Debug` lens. Only applies when\n`#rust-analyzer.lens.enable#` is set.",
- "default": true,
- "type": "boolean"
- },
- "rust-analyzer.lens.enable": {
- "markdownDescription": "Whether to show CodeLens in Rust files.",
- "default": true,
- "type": "boolean"
- },
- "rust-analyzer.lens.forceCustomCommands": {
- "markdownDescription": "Internal config: use custom client-side commands even when the\nclient doesn't set the corresponding capability.",
- "default": true,
- "type": "boolean"
- },
- "rust-analyzer.lens.implementations.enable": {
- "markdownDescription": "Whether to show `Implementations` lens. Only applies when\n`#rust-analyzer.lens.enable#` is set.",
- "default": true,
- "type": "boolean"
- },
- "rust-analyzer.lens.location": {
- "markdownDescription": "Where to render annotations.",
- "default": "above_name",
- "type": "string",
- "enum": [
- "above_name",
- "above_whole_item"
- ],
- "enumDescriptions": [
- "Render annotations above the name of the item.",
- "Render annotations above the whole item, including documentation comments and attributes."
- ]
- },
- "rust-analyzer.lens.references.adt.enable": {
- "markdownDescription": "Whether to show `References` lens for Struct, Enum, and Union.\nOnly applies when `#rust-analyzer.lens.enable#` is set.",
- "default": false,
- "type": "boolean"
- },
- "rust-analyzer.lens.references.enumVariant.enable": {
- "markdownDescription": "Whether to show `References` lens for Enum Variants.\nOnly applies when `#rust-analyzer.lens.enable#` is set.",
- "default": false,
- "type": "boolean"
- },
- "rust-analyzer.lens.references.method.enable": {
- "markdownDescription": "Whether to show `Method References` lens. Only applies when\n`#rust-analyzer.lens.enable#` is set.",
- "default": false,
- "type": "boolean"
- },
- "rust-analyzer.lens.references.trait.enable": {
- "markdownDescription": "Whether to show `References` lens for Trait.\nOnly applies when `#rust-analyzer.lens.enable#` is set.",
- "default": false,
- "type": "boolean"
- },
- "rust-analyzer.lens.run.enable": {
- "markdownDescription": "Whether to show `Run` lens. Only applies when\n`#rust-analyzer.lens.enable#` is set.",
- "default": true,
- "type": "boolean"
- },
- "rust-analyzer.linkedProjects": {
- "markdownDescription": "Disable project auto-discovery in favor of explicitly specified set\nof projects.\n\nElements must be paths pointing to `Cargo.toml`,\n`rust-project.json`, `.rs` files (which will be treated as standalone files) or JSON\nobjects in `rust-project.json` format.",
- "default": [],
- "type": "array",
- "items": {
+ }
+ }
+ },
+ {
+ "title": "rustc",
+ "properties": {
+ "rust-analyzer.rustc.source": {
+ "markdownDescription": "Path to the Cargo.toml of the rust compiler workspace, for usage in rustc_private\nprojects, or \"discover\" to try to automatically find it if the `rustc-dev` component\nis installed.\n\nAny project which uses rust-analyzer with the rustcPrivate\ncrates must set `[package.metadata.rust-analyzer] rustc_private=true` to use it.\n\nThis option does not take effect until rust-analyzer is restarted.",
+ "default": null,
"type": [
- "string",
- "object"
+ "null",
+ "string"
]
}
- },
- "rust-analyzer.lru.capacity": {
- "markdownDescription": "Number of syntax trees rust-analyzer keeps in memory. Defaults to 128.",
- "default": null,
- "type": [
- "null",
- "integer"
- ],
- "minimum": 0
- },
- "rust-analyzer.lru.query.capacities": {
- "markdownDescription": "Sets the LRU capacity of the specified queries.",
- "default": {},
- "type": "object"
- },
- "rust-analyzer.notifications.cargoTomlNotFound": {
- "markdownDescription": "Whether to show `can't find Cargo.toml` error message.",
- "default": true,
- "type": "boolean"
- },
- "rust-analyzer.notifications.unindexedProject": {
- "markdownDescription": "Whether to send an UnindexedProject notification to the client.",
- "default": false,
- "type": "boolean"
- },
- "rust-analyzer.numThreads": {
- "markdownDescription": "How many worker threads in the main loop. The default `null` means to pick automatically.",
- "default": null,
- "type": [
- "null",
- "integer"
- ],
- "minimum": 0
- },
- "rust-analyzer.procMacro.attributes.enable": {
- "markdownDescription": "Expand attribute macros. Requires `#rust-analyzer.procMacro.enable#` to be set.",
- "default": true,
- "type": "boolean"
- },
- "rust-analyzer.procMacro.enable": {
- "markdownDescription": "Enable support for procedural macros, implies `#rust-analyzer.cargo.buildScripts.enable#`.",
- "default": true,
- "type": "boolean"
- },
- "rust-analyzer.procMacro.ignored": {
- "markdownDescription": "These proc-macros will be ignored when trying to expand them.\n\nThis config takes a map of crate names with the exported proc-macro names to ignore as values.",
- "default": {},
- "type": "object"
- },
- "rust-analyzer.procMacro.server": {
- "markdownDescription": "Internal config, path to proc-macro server executable.",
- "default": null,
- "type": [
- "null",
- "string"
- ]
- },
- "rust-analyzer.references.excludeImports": {
- "markdownDescription": "Exclude imports from find-all-references.",
- "default": false,
- "type": "boolean"
- },
- "rust-analyzer.references.excludeTests": {
- "markdownDescription": "Exclude tests from find-all-references.",
- "default": false,
- "type": "boolean"
- },
- "rust-analyzer.runnables.command": {
- "markdownDescription": "Command to be executed instead of 'cargo' for runnables.",
- "default": null,
- "type": [
- "null",
- "string"
- ]
- },
- "rust-analyzer.runnables.extraArgs": {
- "markdownDescription": "Additional arguments to be passed to cargo for runnables such as\ntests or binaries. For example, it may be `--release`.",
- "default": [],
- "type": "array",
- "items": {
- "type": "string"
+ }
+ },
+ {
+ "title": "rustfmt",
+ "properties": {
+ "rust-analyzer.rustfmt.extraArgs": {
+ "markdownDescription": "Additional arguments to `rustfmt`.",
+ "default": [],
+ "type": "array",
+ "items": {
+ "type": "string"
+ }
}
- },
- "rust-analyzer.runnables.extraTestBinaryArgs": {
- "markdownDescription": "Additional arguments to be passed through Cargo to launched tests, benchmarks, or\ndoc-tests.\n\nUnless the launched target uses a\n[custom test harness](https://doc.rust-lang.org/cargo/reference/cargo-targets.html#the-harness-field),\nthey will end up being interpreted as options to\n[`rustc`’s built-in test harness (“libtest”)](https://doc.rust-lang.org/rustc/tests/index.html#cli-arguments).",
- "default": [
- "--show-output"
- ],
- "type": "array",
- "items": {
- "type": "string"
+ }
+ },
+ {
+ "title": "rustfmt",
+ "properties": {
+ "rust-analyzer.rustfmt.overrideCommand": {
+ "markdownDescription": "Advanced option, fully override the command rust-analyzer uses for\nformatting. This should be the equivalent of `rustfmt` here, and\nnot that of `cargo fmt`. The file contents will be passed on the\nstandard input and the formatted result will be read from the\nstandard output.",
+ "default": null,
+ "type": [
+ "null",
+ "array"
+ ],
+ "items": {
+ "type": "string"
+ }
}
- },
- "rust-analyzer.rustc.source": {
- "markdownDescription": "Path to the Cargo.toml of the rust compiler workspace, for usage in rustc_private\nprojects, or \"discover\" to try to automatically find it if the `rustc-dev` component\nis installed.\n\nAny project which uses rust-analyzer with the rustcPrivate\ncrates must set `[package.metadata.rust-analyzer] rustc_private=true` to use it.\n\nThis option does not take effect until rust-analyzer is restarted.",
- "default": null,
- "type": [
- "null",
- "string"
- ]
- },
- "rust-analyzer.rustfmt.extraArgs": {
- "markdownDescription": "Additional arguments to `rustfmt`.",
- "default": [],
- "type": "array",
- "items": {
- "type": "string"
+ }
+ },
+ {
+ "title": "rustfmt",
+ "properties": {
+ "rust-analyzer.rustfmt.rangeFormatting.enable": {
+ "markdownDescription": "Enables the use of rustfmt's unstable range formatting command for the\n`textDocument/rangeFormatting` request. The rustfmt option is unstable and only\navailable on a nightly build.",
+ "default": false,
+ "type": "boolean"
}
- },
- "rust-analyzer.rustfmt.overrideCommand": {
- "markdownDescription": "Advanced option, fully override the command rust-analyzer uses for\nformatting. This should be the equivalent of `rustfmt` here, and\nnot that of `cargo fmt`. The file contents will be passed on the\nstandard input and the formatted result will be read from the\nstandard output.",
- "default": null,
- "type": [
- "null",
- "array"
- ],
- "items": {
- "type": "string"
+ }
+ },
+ {
+ "title": "semanticHighlighting",
+ "properties": {
+ "rust-analyzer.semanticHighlighting.doc.comment.inject.enable": {
+ "markdownDescription": "Inject additional highlighting into doc comments.\n\nWhen enabled, rust-analyzer will highlight rust source in doc comments as well as intra\ndoc links.",
+ "default": true,
+ "type": "boolean"
}
- },
- "rust-analyzer.rustfmt.rangeFormatting.enable": {
- "markdownDescription": "Enables the use of rustfmt's unstable range formatting command for the\n`textDocument/rangeFormatting` request. The rustfmt option is unstable and only\navailable on a nightly build.",
- "default": false,
- "type": "boolean"
- },
- "rust-analyzer.semanticHighlighting.doc.comment.inject.enable": {
- "markdownDescription": "Inject additional highlighting into doc comments.\n\nWhen enabled, rust-analyzer will highlight rust source in doc comments as well as intra\ndoc links.",
- "default": true,
- "type": "boolean"
- },
- "rust-analyzer.semanticHighlighting.nonStandardTokens": {
- "markdownDescription": "Whether the server is allowed to emit non-standard tokens and modifiers.",
- "default": true,
- "type": "boolean"
- },
- "rust-analyzer.semanticHighlighting.operator.enable": {
- "markdownDescription": "Use semantic tokens for operators.\n\nWhen disabled, rust-analyzer will emit semantic tokens only for operator tokens when\nthey are tagged with modifiers.",
- "default": true,
- "type": "boolean"
- },
- "rust-analyzer.semanticHighlighting.operator.specialization.enable": {
- "markdownDescription": "Use specialized semantic tokens for operators.\n\nWhen enabled, rust-analyzer will emit special token types for operator tokens instead\nof the generic `operator` token type.",
- "default": false,
- "type": "boolean"
- },
- "rust-analyzer.semanticHighlighting.punctuation.enable": {
- "markdownDescription": "Use semantic tokens for punctuation.\n\nWhen disabled, rust-analyzer will emit semantic tokens only for punctuation tokens when\nthey are tagged with modifiers or have a special role.",
- "default": false,
- "type": "boolean"
- },
- "rust-analyzer.semanticHighlighting.punctuation.separate.macro.bang": {
- "markdownDescription": "When enabled, rust-analyzer will emit a punctuation semantic token for the `!` of macro\ncalls.",
- "default": false,
- "type": "boolean"
- },
- "rust-analyzer.semanticHighlighting.punctuation.specialization.enable": {
- "markdownDescription": "Use specialized semantic tokens for punctuation.\n\nWhen enabled, rust-analyzer will emit special token types for punctuation tokens instead\nof the generic `punctuation` token type.",
- "default": false,
- "type": "boolean"
- },
- "rust-analyzer.semanticHighlighting.strings.enable": {
- "markdownDescription": "Use semantic tokens for strings.\n\nIn some editors (e.g. vscode) semantic tokens override other highlighting grammars.\nBy disabling semantic tokens for strings, other grammars can be used to highlight\ntheir contents.",
- "default": true,
- "type": "boolean"
- },
- "rust-analyzer.signatureInfo.detail": {
- "markdownDescription": "Show full signature of the callable. Only shows parameters if disabled.",
- "default": "full",
- "type": "string",
- "enum": [
- "full",
- "parameters"
- ],
- "enumDescriptions": [
- "Show the entire signature.",
- "Show only the parameters."
- ]
- },
- "rust-analyzer.signatureInfo.documentation.enable": {
- "markdownDescription": "Show documentation.",
- "default": true,
- "type": "boolean"
- },
- "rust-analyzer.typing.autoClosingAngleBrackets.enable": {
- "markdownDescription": "Whether to insert closing angle brackets when typing an opening angle bracket of a generic argument list.",
- "default": false,
- "type": "boolean"
- },
- "rust-analyzer.workspace.symbol.search.kind": {
- "markdownDescription": "Workspace symbol search kind.",
- "default": "only_types",
- "type": "string",
- "enum": [
- "only_types",
- "all_symbols"
- ],
- "enumDescriptions": [
- "Search for types only.",
- "Search for all symbols kinds."
- ]
- },
- "rust-analyzer.workspace.symbol.search.limit": {
- "markdownDescription": "Limits the number of items returned from a workspace symbol search (Defaults to 128).\nSome clients like vs-code issue new searches on result filtering and don't require all results to be returned in the initial search.\nOther clients requires all results upfront and might require a higher limit.",
- "default": 128,
- "type": "integer",
- "minimum": 0
- },
- "rust-analyzer.workspace.symbol.search.scope": {
- "markdownDescription": "Workspace symbol search scope.",
- "default": "workspace",
- "type": "string",
- "enum": [
- "workspace",
- "workspace_and_dependencies"
- ],
- "enumDescriptions": [
- "Search in current workspace only.",
- "Search in current workspace and dependencies."
- ]
- },
- "$generated-end": {}
+ }
+ },
+ {
+ "title": "semanticHighlighting",
+ "properties": {
+ "rust-analyzer.semanticHighlighting.nonStandardTokens": {
+ "markdownDescription": "Whether the server is allowed to emit non-standard tokens and modifiers.",
+ "default": true,
+ "type": "boolean"
+ }
+ }
+ },
+ {
+ "title": "semanticHighlighting",
+ "properties": {
+ "rust-analyzer.semanticHighlighting.operator.enable": {
+ "markdownDescription": "Use semantic tokens for operators.\n\nWhen disabled, rust-analyzer will emit semantic tokens only for operator tokens when\nthey are tagged with modifiers.",
+ "default": true,
+ "type": "boolean"
+ }
+ }
+ },
+ {
+ "title": "semanticHighlighting",
+ "properties": {
+ "rust-analyzer.semanticHighlighting.operator.specialization.enable": {
+ "markdownDescription": "Use specialized semantic tokens for operators.\n\nWhen enabled, rust-analyzer will emit special token types for operator tokens instead\nof the generic `operator` token type.",
+ "default": false,
+ "type": "boolean"
+ }
+ }
+ },
+ {
+ "title": "semanticHighlighting",
+ "properties": {
+ "rust-analyzer.semanticHighlighting.punctuation.enable": {
+ "markdownDescription": "Use semantic tokens for punctuation.\n\nWhen disabled, rust-analyzer will emit semantic tokens only for punctuation tokens when\nthey are tagged with modifiers or have a special role.",
+ "default": false,
+ "type": "boolean"
+ }
+ }
+ },
+ {
+ "title": "semanticHighlighting",
+ "properties": {
+ "rust-analyzer.semanticHighlighting.punctuation.separate.macro.bang": {
+ "markdownDescription": "When enabled, rust-analyzer will emit a punctuation semantic token for the `!` of macro\ncalls.",
+ "default": false,
+ "type": "boolean"
+ }
+ }
+ },
+ {
+ "title": "semanticHighlighting",
+ "properties": {
+ "rust-analyzer.semanticHighlighting.punctuation.specialization.enable": {
+ "markdownDescription": "Use specialized semantic tokens for punctuation.\n\nWhen enabled, rust-analyzer will emit special token types for punctuation tokens instead\nof the generic `punctuation` token type.",
+ "default": false,
+ "type": "boolean"
+ }
+ }
+ },
+ {
+ "title": "semanticHighlighting",
+ "properties": {
+ "rust-analyzer.semanticHighlighting.strings.enable": {
+ "markdownDescription": "Use semantic tokens for strings.\n\nIn some editors (e.g. vscode) semantic tokens override other highlighting grammars.\nBy disabling semantic tokens for strings, other grammars can be used to highlight\ntheir contents.",
+ "default": true,
+ "type": "boolean"
+ }
+ }
+ },
+ {
+ "title": "signatureInfo",
+ "properties": {
+ "rust-analyzer.signatureInfo.detail": {
+ "markdownDescription": "Show full signature of the callable. Only shows parameters if disabled.",
+ "default": "full",
+ "type": "string",
+ "enum": [
+ "full",
+ "parameters"
+ ],
+ "enumDescriptions": [
+ "Show the entire signature.",
+ "Show only the parameters."
+ ]
+ }
+ }
+ },
+ {
+ "title": "signatureInfo",
+ "properties": {
+ "rust-analyzer.signatureInfo.documentation.enable": {
+ "markdownDescription": "Show documentation.",
+ "default": true,
+ "type": "boolean"
+ }
+ }
+ },
+ {
+ "title": "typing",
+ "properties": {
+ "rust-analyzer.typing.autoClosingAngleBrackets.enable": {
+ "markdownDescription": "Whether to insert closing angle brackets when typing an opening angle bracket of a generic argument list.",
+ "default": false,
+ "type": "boolean"
+ }
+ }
+ },
+ {
+ "title": "workspace",
+ "properties": {
+ "rust-analyzer.workspace.symbol.search.kind": {
+ "markdownDescription": "Workspace symbol search kind.",
+ "default": "only_types",
+ "type": "string",
+ "enum": [
+ "only_types",
+ "all_symbols"
+ ],
+ "enumDescriptions": [
+ "Search for types only.",
+ "Search for all symbols kinds."
+ ]
+ }
+ }
+ },
+ {
+ "title": "workspace",
+ "properties": {
+ "rust-analyzer.workspace.symbol.search.limit": {
+ "markdownDescription": "Limits the number of items returned from a workspace symbol search (Defaults to 128).\nSome clients like vs-code issue new searches on result filtering and don't require all results to be returned in the initial search.\nOther clients requires all results upfront and might require a higher limit.",
+ "default": 128,
+ "type": "integer",
+ "minimum": 0
+ }
+ }
+ },
+ {
+ "title": "workspace",
+ "properties": {
+ "rust-analyzer.workspace.symbol.search.scope": {
+ "markdownDescription": "Workspace symbol search scope.",
+ "default": "workspace",
+ "type": "string",
+ "enum": [
+ "workspace",
+ "workspace_and_dependencies"
+ ],
+ "enumDescriptions": [
+ "Search in current workspace only.",
+ "Search in current workspace and dependencies."
+ ]
+ }
+ }
+ },
+ {
+ "title": "$generated-end"
}
- },
+ ],
"configurationDefaults": {
"explorer.fileNesting.patterns": {
"Cargo.toml": "Cargo.lock"
@@ -2282,6 +3128,10 @@
{
"command": "rust-analyzer.viewMemoryLayout",
"when": "inRustProject"
+ },
+ {
+ "command": "rust-analyzer.toggleLSPLogs",
+ "when": "inRustProject"
}
],
"editor/context": [
diff --git a/editors/code/src/ast_inspector.ts b/editors/code/src/ast_inspector.ts
index 688c53a9b1..35b705c477 100644
--- a/editors/code/src/ast_inspector.ts
+++ b/editors/code/src/ast_inspector.ts
@@ -1,8 +1,7 @@
import * as vscode from "vscode";
import type { Ctx, Disposable } from "./ctx";
-import { type RustEditor, isRustEditor } from "./util";
-import { unwrapUndefinable } from "./undefinable";
+import { type RustEditor, isRustEditor, unwrapUndefinable } from "./util";
// FIXME: consider implementing this via the Tree View API?
// https://code.visualstudio.com/api/extension-guides/tree-view
diff --git a/editors/code/src/bootstrap.ts b/editors/code/src/bootstrap.ts
index 6cf399599d..5a92b285ae 100644
--- a/editors/code/src/bootstrap.ts
+++ b/editors/code/src/bootstrap.ts
@@ -1,9 +1,9 @@
import * as vscode from "vscode";
import * as os from "os";
import type { Config } from "./config";
-import { log, isValidExecutable } from "./util";
+import { type Env, log } from "./util";
import type { PersistentState } from "./persistent_state";
-import { exec } from "child_process";
+import { exec, spawnSync } from "child_process";
export async function bootstrap(
context: vscode.ExtensionContext,
@@ -13,7 +13,7 @@ export async function bootstrap(
const path = await getServer(context, config, state);
if (!path) {
throw new Error(
- "Rust Analyzer Language Server is not available. " +
+ "rust-analyzer Language Server is not available. " +
"Please, ensure its [proper installation](https://rust-analyzer.github.io/manual.html#installation).",
);
}
@@ -21,12 +21,12 @@ export async function bootstrap(
log.info("Using server binary at", path);
if (!isValidExecutable(path, config.serverExtraEnv)) {
- if (config.serverPath) {
- throw new Error(`Failed to execute ${path} --version. \`config.server.path\` or \`config.serverPath\` has been set explicitly.\
- Consider removing this config or making a valid server binary available at that path.`);
- } else {
- throw new Error(`Failed to execute ${path} --version`);
- }
+ throw new Error(
+ `Failed to execute ${path} --version.` + config.serverPath
+ ? `\`config.server.path\` or \`config.serverPath\` has been set explicitly.\
+ Consider removing this config or making a valid server binary available at that path.`
+ : "",
+ );
}
return path;
@@ -54,27 +54,12 @@ async function getServer(
if (bundledExists) {
let server = bundled;
if (await isNixOs()) {
- await vscode.workspace.fs.createDirectory(config.globalStorageUri).then();
- const dest = vscode.Uri.joinPath(config.globalStorageUri, `rust-analyzer${ext}`);
- let exists = await vscode.workspace.fs.stat(dest).then(
- () => true,
- () => false,
- );
- if (exists && config.package.version !== state.serverVersion) {
- await vscode.workspace.fs.delete(dest);
- exists = false;
- }
- if (!exists) {
- await vscode.workspace.fs.copy(bundled, dest);
- await patchelf(dest);
- }
- server = dest;
+ server = await getNixOsServer(config, ext, state, bundled, server);
+ await state.updateServerVersion(config.package.version);
}
- await state.updateServerVersion(config.package.version);
return server.fsPath;
}
- await state.updateServerVersion(undefined);
await vscode.window.showErrorMessage(
"Unfortunately we don't ship binaries for your platform yet. " +
"You need to manually clone the rust-analyzer repository and " +
@@ -86,6 +71,45 @@ async function getServer(
return undefined;
}
+export function isValidExecutable(path: string, extraEnv: Env): boolean {
+ log.debug("Checking availability of a binary at", path);
+
+ const res = spawnSync(path, ["--version"], {
+ encoding: "utf8",
+ env: { ...process.env, ...extraEnv },
+ });
+
+ const printOutput = res.error ? log.warn : log.info;
+ printOutput(path, "--version:", res);
+
+ return res.status === 0;
+}
+
+async function getNixOsServer(
+ config: Config,
+ ext: string,
+ state: PersistentState,
+ bundled: vscode.Uri,
+ server: vscode.Uri,
+) {
+ await vscode.workspace.fs.createDirectory(config.globalStorageUri).then();
+ const dest = vscode.Uri.joinPath(config.globalStorageUri, `rust-analyzer${ext}`);
+ let exists = await vscode.workspace.fs.stat(dest).then(
+ () => true,
+ () => false,
+ );
+ if (exists && config.package.version !== state.serverVersion) {
+ await vscode.workspace.fs.delete(dest);
+ exists = false;
+ }
+ if (!exists) {
+ await vscode.workspace.fs.copy(bundled, dest);
+ await patchelf(dest);
+ }
+ server = dest;
+ return server;
+}
+
async function isNixOs(): Promise<boolean> {
try {
const contents = (
diff --git a/editors/code/src/client.ts b/editors/code/src/client.ts
index 372dc8bedf..1c2a34b484 100644
--- a/editors/code/src/client.ts
+++ b/editors/code/src/client.ts
@@ -3,73 +3,13 @@ import * as lc from "vscode-languageclient/node";
import * as vscode from "vscode";
import * as ra from "../src/lsp_ext";
import * as Is from "vscode-languageclient/lib/common/utils/is";
-import { assert } from "./util";
+import { assert, unwrapUndefinable } from "./util";
import * as diagnostics from "./diagnostics";
import { WorkspaceEdit } from "vscode";
import { type Config, prepareVSCodeConfig } from "./config";
-import { randomUUID } from "crypto";
import { sep as pathSeparator } from "path";
-import { unwrapUndefinable } from "./undefinable";
import { RaLanguageClient } from "./lang_client";
-export interface Env {
- [name: string]: string;
-}
-
-// Command URIs have a form of command:command-name?arguments, where
-// arguments is a percent-encoded array of data we want to pass along to
-// the command function. For "Show References" this is a list of all file
-// URIs with locations of every reference, and it can get quite long.
-//
-// To work around it we use an intermediary linkToCommand command. When
-// we render a command link, a reference to a command with all its arguments
-// is stored in a map, and instead a linkToCommand link is rendered
-// with the key to that map.
-export const LINKED_COMMANDS = new Map<string, ra.CommandLink>();
-
-// For now the map is cleaned up periodically (I've set it to every
-// 10 minutes). In general case we'll probably need to introduce TTLs or
-// flags to denote ephemeral links (like these in hover popups) and
-// persistent links and clean those separately. But for now simply keeping
-// the last few links in the map should be good enough. Likewise, we could
-// add code to remove a target command from the map after the link is
-// clicked, but assuming most links in hover sheets won't be clicked anyway
-// this code won't change the overall memory use much.
-setInterval(
- function cleanupOlderCommandLinks() {
- // keys are returned in insertion order, we'll keep a few
- // of recent keys available, and clean the rest
- const keys = [...LINKED_COMMANDS.keys()];
- const keysToRemove = keys.slice(0, keys.length - 10);
- for (const key of keysToRemove) {
- LINKED_COMMANDS.delete(key);
- }
- },
- 10 * 60 * 1000,
-);
-
-function renderCommand(cmd: ra.CommandLink): string {
- const commandId = randomUUID();
- LINKED_COMMANDS.set(commandId, cmd);
- return `[${cmd.title}](command:rust-analyzer.linkToCommand?${encodeURIComponent(
- JSON.stringify([commandId]),
- )} '${cmd.tooltip}')`;
-}
-
-function renderHoverActions(actions: ra.CommandLinkGroup[]): vscode.MarkdownString {
- const text = actions
- .map(
- (group) =>
- (group.title ? group.title + " " : "") +
- group.commands.map(renderCommand).join(" | "),
- )
- .join("___");
-
- const result = new vscode.MarkdownString(text);
- result.isTrusted = true;
- return result;
-}
-
export async function createClient(
traceOutputChannel: vscode.OutputChannel,
outputChannel: vscode.OutputChannel,
@@ -450,3 +390,32 @@ function isCodeActionWithoutEditsAndCommands(value: any): boolean {
candidate.command === void 0
);
}
+
+// Command URIs have a form of command:command-name?arguments, where
+// arguments is a percent-encoded array of data we want to pass along to
+// the command function. For "Show References" this is a list of all file
+// URIs with locations of every reference, and it can get quite long.
+// So long in fact that it will fail rendering inside an `a` tag so we need
+// to proxy around that. We store the last hover's reference command link
+// here, as only one hover can be active at a time, and we don't need to
+// keep a history of these.
+export let HOVER_REFERENCE_COMMAND: ra.CommandLink | undefined = undefined;
+
+function renderCommand(cmd: ra.CommandLink): string {
+ HOVER_REFERENCE_COMMAND = cmd;
+ return `[${cmd.title}](command:rust-analyzer.hoverRefCommandProxy '${cmd.tooltip}')`;
+}
+
+function renderHoverActions(actions: ra.CommandLinkGroup[]): vscode.MarkdownString {
+ const text = actions
+ .map(
+ (group) =>
+ (group.title ? group.title + " " : "") +
+ group.commands.map(renderCommand).join(" | "),
+ )
+ .join(" | ");
+
+ const result = new vscode.MarkdownString(text);
+ result.isTrusted = true;
+ return result;
+}
diff --git a/editors/code/src/commands.ts b/editors/code/src/commands.ts
index 849fae5cf2..f0f9fab1c6 100644
--- a/editors/code/src/commands.ts
+++ b/editors/code/src/commands.ts
@@ -9,22 +9,27 @@ import {
applySnippetTextEdits,
type SnippetTextDocumentEdit,
} from "./snippets";
-import { spawnSync } from "child_process";
-import { type RunnableQuickPick, selectRunnable, createTask, createArgs } from "./run";
+import {
+ type RunnableQuickPick,
+ selectRunnable,
+ createTaskFromRunnable,
+ createCargoArgs,
+} from "./run";
import { AstInspector } from "./ast_inspector";
import {
isRustDocument,
+ isCargoRunnableArgs,
isCargoTomlDocument,
sleep,
isRustEditor,
type RustEditor,
type RustDocument,
+ unwrapUndefinable,
} from "./util";
import { startDebugSession, makeDebugConfig } from "./debug";
import type { LanguageClient } from "vscode-languageclient/node";
-import { LINKED_COMMANDS } from "./client";
+import { HOVER_REFERENCE_COMMAND } from "./client";
import type { DependencyId } from "./dependencies_provider";
-import { unwrapUndefinable } from "./undefinable";
import { log } from "./util";
export * from "./ast_inspector";
@@ -415,10 +420,9 @@ export function serverVersion(ctx: CtxInit): Cmd {
void vscode.window.showWarningMessage(`rust-analyzer server is not running`);
return;
}
- const { stdout } = spawnSync(ctx.serverPath, ["--version"], { encoding: "utf8" });
- const versionString = stdout.slice(`rust-analyzer `.length).trim();
-
- void vscode.window.showInformationMessage(`rust-analyzer version: ${versionString}`);
+ void vscode.window.showInformationMessage(
+ `rust-analyzer version: ${ctx.serverVersion} [${ctx.serverPath}]`,
+ );
};
}
@@ -1097,7 +1101,7 @@ export function run(ctx: CtxInit): Cmd {
item.detail = "rerun";
prevRunnable = item;
- const task = await createTask(item.runnable, ctx.config);
+ const task = await createTaskFromRunnable(item.runnable, ctx.config);
return await vscode.tasks.executeTask(task);
};
}
@@ -1140,7 +1144,7 @@ export function runSingle(ctx: CtxInit): Cmd {
const editor = ctx.activeRustEditor;
if (!editor) return;
- const task = await createTask(runnable, ctx.config);
+ const task = await createTaskFromRunnable(runnable, ctx.config);
task.group = vscode.TaskGroup.Build;
task.presentationOptions = {
reveal: vscode.TaskRevealKind.Always,
@@ -1156,8 +1160,8 @@ export function copyRunCommandLine(ctx: CtxInit) {
let prevRunnable: RunnableQuickPick | undefined;
return async () => {
const item = await selectRunnable(ctx, prevRunnable);
- if (!item) return;
- const args = createArgs(item.runnable);
+ if (!item || !isCargoRunnableArgs(item.runnable.args)) return;
+ const args = createCargoArgs(item.runnable.args);
const commandLine = ["cargo", ...args].join(" ");
await vscode.env.clipboard.writeText(commandLine);
await vscode.window.showInformationMessage("Cargo invocation copied to the clipboard.");
@@ -1192,11 +1196,10 @@ export function newDebugConfig(ctx: CtxInit): Cmd {
};
}
-export function linkToCommand(_: Ctx): Cmd {
- return async (commandId: string) => {
- const link = LINKED_COMMANDS.get(commandId);
- if (link) {
- const { command, arguments: args = [] } = link;
+export function hoverRefCommandProxy(_: Ctx): Cmd {
+ return async () => {
+ if (HOVER_REFERENCE_COMMAND) {
+ const { command, arguments: args = [] } = HOVER_REFERENCE_COMMAND;
await vscode.commands.executeCommand(command, ...args);
}
};
@@ -1486,3 +1489,16 @@ export function toggleCheckOnSave(ctx: Ctx): Cmd {
ctx.refreshServerStatus();
};
}
+
+export function toggleLSPLogs(ctx: Ctx): Cmd {
+ return async () => {
+ const config = vscode.workspace.getConfiguration("rust-analyzer");
+ const targetValue =
+ config.get<string | undefined>("trace.server") === "verbose" ? undefined : "verbose";
+
+ await config.update("trace.server", targetValue, vscode.ConfigurationTarget.Workspace);
+ if (targetValue && ctx.client && ctx.client.traceOutputChannel) {
+ ctx.client.traceOutputChannel.show();
+ }
+ };
+}
diff --git a/editors/code/src/config.ts b/editors/code/src/config.ts
index e676bc0826..1931cfe381 100644
--- a/editors/code/src/config.ts
+++ b/editors/code/src/config.ts
@@ -2,9 +2,7 @@ import * as Is from "vscode-languageclient/lib/common/utils/is";
import * as os from "os";
import * as path from "path";
import * as vscode from "vscode";
-import type { Env } from "./client";
-import { log } from "./util";
-import { expectNotUndefined, unwrapUndefinable } from "./undefinable";
+import { type Env, log, unwrapUndefinable, expectNotUndefined } from "./util";
import type { JsonProject } from "./rust_project";
export type RunnableEnvCfgItem = {
diff --git a/editors/code/src/ctx.ts b/editors/code/src/ctx.ts
index 474e18b722..bf0b84ec35 100644
--- a/editors/code/src/ctx.ts
+++ b/editors/code/src/ctx.ts
@@ -25,6 +25,8 @@ import { bootstrap } from "./bootstrap";
import type { RustAnalyzerExtensionApi } from "./main";
import type { JsonProject } from "./rust_project";
import { prepareTestExplorer } from "./test_explorer";
+import { spawn } from "node:child_process";
+import { text } from "node:stream/consumers";
// We only support local folders, not eg. Live Share (`vlsl:` scheme), so don't activate if
// only those are in use. We use "Empty" to represent these scenarios
@@ -71,6 +73,7 @@ export class Ctx implements RustAnalyzerExtensionApi {
readonly statusBar: vscode.StatusBarItem;
config: Config;
readonly workspace: Workspace;
+ readonly version: string;
private _client: lc.LanguageClient | undefined;
private _serverPath: string | undefined;
@@ -85,6 +88,15 @@ export class Ctx implements RustAnalyzerExtensionApi {
private _dependencies: RustDependenciesProvider | undefined;
private _treeView: vscode.TreeView<Dependency | DependencyFile | DependencyId> | undefined;
private lastStatus: ServerStatusParams | { health: "stopped" } = { health: "stopped" };
+ private _serverVersion: string;
+
+ get serverPath(): string | undefined {
+ return this._serverPath;
+ }
+
+ get serverVersion(): string | undefined {
+ return this._serverVersion;
+ }
get client() {
return this._client;
@@ -104,6 +116,8 @@ export class Ctx implements RustAnalyzerExtensionApi {
workspace: Workspace,
) {
extCtx.subscriptions.push(this);
+ this.version = extCtx.extension.packageJSON.version ?? "<unknown>";
+ this._serverVersion = "<not running>";
this.config = new Config(extCtx);
this.statusBar = vscode.window.createStatusBarItem(vscode.StatusBarAlignment.Left);
if (this.config.testExplorer) {
@@ -186,6 +200,19 @@ export class Ctx implements RustAnalyzerExtensionApi {
throw new Error(message);
},
);
+ text(spawn(this._serverPath, ["--version"]).stdout.setEncoding("utf-8")).then(
+ (data) => {
+ const prefix = `rust-analyzer `;
+ this._serverVersion = data
+ .slice(data.startsWith(prefix) ? prefix.length : 0)
+ .trim();
+ this.refreshServerStatus();
+ },
+ (_) => {
+ this._serverVersion = "<unknown>";
+ this.refreshServerStatus();
+ },
+ );
const newEnv = Object.assign({}, process.env, this.config.serverExtraEnv);
const run: lc.Executable = {
command: this._serverPath,
@@ -372,10 +399,6 @@ export class Ctx implements RustAnalyzerExtensionApi {
return this.extCtx.subscriptions;
}
- get serverPath(): string | undefined {
- return this._serverPath;
- }
-
setWorkspaces(workspaces: JsonProject[]) {
this.config.discoveredWorkspaces = workspaces;
}
@@ -475,23 +498,24 @@ export class Ctx implements RustAnalyzerExtensionApi {
if (statusBar.tooltip.value) {
statusBar.tooltip.appendMarkdown("\n\n---\n\n");
}
- statusBar.tooltip.appendMarkdown("\n\n[Open Logs](command:rust-analyzer.openLogs)");
- statusBar.tooltip.appendMarkdown(
- `\n\n[${
- this.config.checkOnSave ? "Disable" : "Enable"
- } Check on Save](command:rust-analyzer.toggleCheckOnSave)`,
- );
- statusBar.tooltip.appendMarkdown(
- "\n\n[Reload Workspace](command:rust-analyzer.reloadWorkspace)",
- );
- statusBar.tooltip.appendMarkdown(
- "\n\n[Rebuild Proc Macros](command:rust-analyzer.rebuildProcMacros)",
- );
+
+ const toggleCheckOnSave = this.config.checkOnSave ? "Disable" : "Enable";
statusBar.tooltip.appendMarkdown(
- "\n\n[Restart server](command:rust-analyzer.restartServer)",
+ `[Extension Info](command:analyzer.serverVersion "Show version and server binary info"): Version ${this.version}, Server Version ${this._serverVersion}` +
+ "\n\n---\n\n" +
+ '[$(terminal) Open Logs](command:rust-analyzer.openLogs "Open the server logs")' +
+ "\n\n" +
+ `[$(settings) ${toggleCheckOnSave} Check on Save](command:rust-analyzer.toggleCheckOnSave "Temporarily ${toggleCheckOnSave.toLowerCase()} check on save functionality")` +
+ "\n\n" +
+ '[$(refresh) Reload Workspace](command:rust-analyzer.reloadWorkspace "Reload and rediscover workspaces")' +
+ "\n\n" +
+ '[$(symbol-property) Rebuild Build Dependencies](command:rust-analyzer.rebuildProcMacros "Rebuild build scripts and proc-macros")' +
+ "\n\n" +
+ '[$(stop-circle) Stop server](command:rust-analyzer.stopServer "Stop the server")' +
+ "\n\n" +
+ '[$(debug-restart) Restart server](command:rust-analyzer.restartServer "Restart the server")',
);
- statusBar.tooltip.appendMarkdown("\n\n[Stop server](command:rust-analyzer.stopServer)");
- if (!status.quiescent) icon = "$(sync~spin) ";
+ if (!status.quiescent) icon = "$(loading~spin) ";
statusBar.text = `${icon}rust-analyzer`;
}
diff --git a/editors/code/src/debug.ts b/editors/code/src/debug.ts
index 4b96e4d5c8..58fe1df51f 100644
--- a/editors/code/src/debug.ts
+++ b/editors/code/src/debug.ts
@@ -6,11 +6,12 @@ import type * as ra from "./lsp_ext";
import { Cargo, getRustcId, getSysroot } from "./toolchain";
import type { Ctx } from "./ctx";
import { prepareEnv } from "./run";
-import { unwrapUndefinable } from "./undefinable";
+import { isCargoRunnableArgs, unwrapUndefinable } from "./util";
const debugOutput = vscode.window.createOutputChannel("Debug");
type DebugConfigProvider = (
- config: ra.Runnable,
+ runnable: ra.Runnable,
+ runnableArgs: ra.CargoRunnableArgs,
executable: string,
env: Record<string, string>,
sourceFileMap?: Record<string, string>,
@@ -76,6 +77,11 @@ async function getDebugConfiguration(
ctx: Ctx,
runnable: ra.Runnable,
): Promise<vscode.DebugConfiguration | undefined> {
+ if (!isCargoRunnableArgs(runnable.args)) {
+ return;
+ }
+ const runnableArgs: ra.CargoRunnableArgs = runnable.args;
+
const editor = ctx.activeRustEditor;
if (!editor) return;
@@ -119,9 +125,9 @@ async function getDebugConfiguration(
const isMultiFolderWorkspace = workspaceFolders.length > 1;
const firstWorkspace = workspaceFolders[0];
const maybeWorkspace =
- !isMultiFolderWorkspace || !runnable.args.workspaceRoot
+ !isMultiFolderWorkspace || !runnableArgs.workspaceRoot
? firstWorkspace
- : workspaceFolders.find((w) => runnable.args.workspaceRoot?.includes(w.uri.fsPath)) ||
+ : workspaceFolders.find((w) => runnableArgs.workspaceRoot?.includes(w.uri.fsPath)) ||
firstWorkspace;
const workspace = unwrapUndefinable(maybeWorkspace);
@@ -129,11 +135,11 @@ async function getDebugConfiguration(
const workspaceQualifier = isMultiFolderWorkspace ? `:${workspace.name}` : "";
function simplifyPath(p: string): string {
// see https://github.com/rust-lang/rust-analyzer/pull/5513#issuecomment-663458818 for why this is needed
- return path.normalize(p).replace(wsFolder, "${workspaceFolder" + workspaceQualifier + "}");
+ return path.normalize(p).replace(wsFolder, `\${workspaceFolder${workspaceQualifier}}`);
}
- const env = prepareEnv(runnable, ctx.config.runnablesExtraEnv);
- const executable = await getDebugExecutable(runnable, env);
+ const env = prepareEnv(runnable.label, runnableArgs, ctx.config.runnablesExtraEnv);
+ const executable = await getDebugExecutable(runnableArgs, env);
let sourceFileMap = debugOptions.sourceFileMap;
if (sourceFileMap === "auto") {
// let's try to use the default toolchain
@@ -147,7 +153,7 @@ async function getDebugConfiguration(
}
const provider = unwrapUndefinable(knownEngines[debugEngine.id]);
- const debugConfig = provider(runnable, simplifyPath(executable), env, sourceFileMap);
+ const debugConfig = provider(runnable, runnableArgs, simplifyPath(executable), env);
if (debugConfig.type in debugOptions.engineSettings) {
const settingsMap = (debugOptions.engineSettings as any)[debugConfig.type];
for (var key in settingsMap) {
@@ -170,11 +176,11 @@ async function getDebugConfiguration(
}
async function getDebugExecutable(
- runnable: ra.Runnable,
+ runnableArgs: ra.CargoRunnableArgs,
env: Record<string, string>,
): Promise<string> {
- const cargo = new Cargo(runnable.args.workspaceRoot || ".", debugOutput, env);
- const executable = await cargo.executableFromArgs(runnable.args.cargoArgs);
+ const cargo = new Cargo(runnableArgs.workspaceRoot || ".", debugOutput, env);
+ const executable = await cargo.executableFromArgs(runnableArgs.cargoArgs);
// if we are here, there were no compilation errors.
return executable;
@@ -182,6 +188,7 @@ async function getDebugExecutable(
function getCCppDebugConfig(
runnable: ra.Runnable,
+ runnableArgs: ra.CargoRunnableArgs,
executable: string,
env: Record<string, string>,
sourceFileMap?: Record<string, string>,
@@ -191,8 +198,8 @@ function getCCppDebugConfig(
request: "launch",
name: runnable.label,
program: executable,
- args: runnable.args.executableArgs,
- cwd: runnable.args.cwd || runnable.args.workspaceRoot || ".",
+ args: runnableArgs.executableArgs,
+ cwd: runnable.args.cwd || runnableArgs.workspaceRoot || ".",
sourceFileMap,
environment: Object.entries(env).map((entry) => ({
name: entry[0],
@@ -207,6 +214,7 @@ function getCCppDebugConfig(
function getCodeLldbDebugConfig(
runnable: ra.Runnable,
+ runnableArgs: ra.CargoRunnableArgs,
executable: string,
env: Record<string, string>,
sourceFileMap?: Record<string, string>,
@@ -216,8 +224,8 @@ function getCodeLldbDebugConfig(
request: "launch",
name: runnable.label,
program: executable,
- args: runnable.args.executableArgs,
- cwd: runnable.args.cwd || runnable.args.workspaceRoot || ".",
+ args: runnableArgs.executableArgs,
+ cwd: runnable.args.cwd || runnableArgs.workspaceRoot || ".",
sourceMap: sourceFileMap,
sourceLanguages: ["rust"],
env,
@@ -226,6 +234,7 @@ function getCodeLldbDebugConfig(
function getNativeDebugConfig(
runnable: ra.Runnable,
+ runnableArgs: ra.CargoRunnableArgs,
executable: string,
env: Record<string, string>,
_sourceFileMap?: Record<string, string>,
@@ -236,8 +245,8 @@ function getNativeDebugConfig(
name: runnable.label,
target: executable,
// See https://github.com/WebFreak001/code-debug/issues/359
- arguments: quote(runnable.args.executableArgs),
- cwd: runnable.args.cwd || runnable.args.workspaceRoot || ".",
+ arguments: quote(runnableArgs.executableArgs),
+ cwd: runnable.args.cwd || runnableArgs.workspaceRoot || ".",
env,
valuesFormatting: "prettyPrinters",
};
diff --git a/editors/code/src/dependencies_provider.ts b/editors/code/src/dependencies_provider.ts
index 863ace0780..203ef5cc85 100644
--- a/editors/code/src/dependencies_provider.ts
+++ b/editors/code/src/dependencies_provider.ts
@@ -4,7 +4,7 @@ import * as fs from "fs";
import type { CtxInit } from "./ctx";
import * as ra from "./lsp_ext";
import type { FetchDependencyListResult } from "./lsp_ext";
-import { unwrapUndefinable } from "./undefinable";
+import { unwrapUndefinable } from "./util";
export class RustDependenciesProvider
implements vscode.TreeDataProvider<Dependency | DependencyFile>
diff --git a/editors/code/src/diagnostics.ts b/editors/code/src/diagnostics.ts
index e31a1cdcef..9fb2993d12 100644
--- a/editors/code/src/diagnostics.ts
+++ b/editors/code/src/diagnostics.ts
@@ -8,7 +8,7 @@ import {
window,
} from "vscode";
import type { Ctx } from "./ctx";
-import { unwrapUndefinable } from "./undefinable";
+import { unwrapUndefinable } from "./util";
export const URI_SCHEME = "rust-analyzer-diagnostics-view";
diff --git a/editors/code/src/lsp_ext.ts b/editors/code/src/lsp_ext.ts
index 8e48aeef15..699052e4d4 100644
--- a/editors/code/src/lsp_ext.ts
+++ b/editors/code/src/lsp_ext.ts
@@ -223,17 +223,35 @@ export type OpenCargoTomlParams = {
export type Runnable = {
label: string;
location?: lc.LocationLink;
+} & (RunnableCargo | RunnableShell);
+
+type RunnableCargo = {
kind: "cargo";
- args: {
- workspaceRoot?: string;
- cwd?: string;
- cargoArgs: string[];
- cargoExtraArgs: string[];
- executableArgs: string[];
- expectTest?: boolean;
- overrideCargo?: string;
- };
+ args: CargoRunnableArgs;
+};
+
+type RunnableShell = {
+ kind: "shell";
+ args: ShellRunnableArgs;
+};
+
+export type ShellRunnableArgs = {
+ kind: string;
+ program: string;
+ args: string[];
+ cwd: string;
+};
+
+export type CargoRunnableArgs = {
+ workspaceRoot?: string;
+ cargoArgs: string[];
+ cwd: string;
+ cargoExtraArgs: string[];
+ executableArgs: string[];
+ expectTest?: boolean;
+ overrideCargo?: string;
};
+
export type RunnablesParams = {
textDocument: lc.TextDocumentIdentifier;
position: lc.Position | null;
diff --git a/editors/code/src/main.ts b/editors/code/src/main.ts
index 0af58fd781..ff67bb7bd5 100644
--- a/editors/code/src/main.ts
+++ b/editors/code/src/main.ts
@@ -177,12 +177,13 @@ function createCommands(): Record<string, CommandFactory> {
serverVersion: { enabled: commands.serverVersion },
viewMemoryLayout: { enabled: commands.viewMemoryLayout },
toggleCheckOnSave: { enabled: commands.toggleCheckOnSave },
+ toggleLSPLogs: { enabled: commands.toggleLSPLogs },
// Internal commands which are invoked by the server.
applyActionGroup: { enabled: commands.applyActionGroup },
applySnippetWorkspaceEdit: { enabled: commands.applySnippetWorkspaceEditCommand },
debugSingle: { enabled: commands.debugSingle },
gotoLocation: { enabled: commands.gotoLocation },
- linkToCommand: { enabled: commands.linkToCommand },
+ hoverRefCommandProxy: { enabled: commands.hoverRefCommandProxy },
resolveCodeAction: { enabled: commands.resolveCodeAction },
runSingle: { enabled: commands.runSingle },
showReferences: { enabled: commands.showReferences },
@@ -203,14 +204,4 @@ function checkConflictingExtensions() {
)
.then(() => {}, console.error);
}
-
- if (vscode.extensions.getExtension("panicbit.cargo")) {
- vscode.window
- .showWarningMessage(
- `You have both the rust-analyzer (rust-lang.rust-analyzer) and Cargo (panicbit.cargo) plugins enabled, ` +
- 'you can disable it or set {"cargo.automaticCheck": false} in settings.json to avoid invoking cargo twice',
- "Got it",
- )
- .then(() => {}, console.error);
- }
}
diff --git a/editors/code/src/nullable.ts b/editors/code/src/nullable.ts
deleted file mode 100644
index e973e16290..0000000000
--- a/editors/code/src/nullable.ts
+++ /dev/null
@@ -1,19 +0,0 @@
-export type NotNull<T> = T extends null ? never : T;
-
-export type Nullable<T> = T | null;
-
-function isNotNull<T>(input: Nullable<T>): input is NotNull<T> {
- return input !== null;
-}
-
-function expectNotNull<T>(input: Nullable<T>, msg: string): NotNull<T> {
- if (isNotNull(input)) {
- return input;
- }
-
- throw new TypeError(msg);
-}
-
-export function unwrapNullable<T>(input: Nullable<T>): NotNull<T> {
- return expectNotNull(input, `unwrapping \`null\``);
-}
diff --git a/editors/code/src/run.ts b/editors/code/src/run.ts
index 4470689cd8..7a9049af0d 100644
--- a/editors/code/src/run.ts
+++ b/editors/code/src/run.ts
@@ -6,9 +6,9 @@ import * as tasks from "./tasks";
import type { CtxInit } from "./ctx";
import { makeDebugConfig } from "./debug";
import type { Config, RunnableEnvCfg, RunnableEnvCfgItem } from "./config";
-import { unwrapUndefinable } from "./undefinable";
import type { LanguageClient } from "vscode-languageclient/node";
-import type { RustEditor } from "./util";
+import { unwrapUndefinable, type RustEditor } from "./util";
+import * as toolchain from "./toolchain";
const quickPickButtons = [
{ iconPath: new vscode.ThemeIcon("save"), tooltip: "Save as a launch.json configuration." },
@@ -66,17 +66,23 @@ export class RunnableQuickPick implements vscode.QuickPickItem {
}
}
+export function prepareBaseEnv(): Record<string, string> {
+ const env: Record<string, string> = { RUST_BACKTRACE: "short" };
+ Object.assign(env, process.env as { [key: string]: string });
+ return env;
+}
+
export function prepareEnv(
- runnable: ra.Runnable,
+ label: string,
+ runnableArgs: ra.CargoRunnableArgs,
runnableEnvCfg: RunnableEnvCfg,
): Record<string, string> {
- const env: Record<string, string> = { RUST_BACKTRACE: "short" };
+ const env = prepareBaseEnv();
- if (runnable.args.expectTest) {
+ if (runnableArgs.expectTest) {
env["UPDATE_EXPECT"] = "1";
}
- Object.assign(env, process.env as { [key: string]: string });
const platform = process.platform;
const checkPlatform = (it: RunnableEnvCfgItem) => {
@@ -90,7 +96,7 @@ export function prepareEnv(
if (runnableEnvCfg) {
if (Array.isArray(runnableEnvCfg)) {
for (const it of runnableEnvCfg) {
- const masked = !it.mask || new RegExp(it.mask).test(runnable.label);
+ const masked = !it.mask || new RegExp(it.mask).test(label);
if (masked && checkPlatform(it)) {
Object.assign(env, it.env);
}
@@ -103,34 +109,52 @@ export function prepareEnv(
return env;
}
-export async function createTask(runnable: ra.Runnable, config: Config): Promise<vscode.Task> {
- if (runnable.kind !== "cargo") {
- // rust-analyzer supports only one kind, "cargo"
- // do not use tasks.TASK_TYPE here, these are completely different meanings.
+export async function createTaskFromRunnable(
+ runnable: ra.Runnable,
+ config: Config,
+): Promise<vscode.Task> {
+ let definition: tasks.RustTargetDefinition;
+ if (runnable.kind === "cargo") {
+ const runnableArgs = runnable.args;
+ let args = createCargoArgs(runnableArgs);
+
+ let program: string;
+ if (runnableArgs.overrideCargo) {
+ // Split on spaces to allow overrides like "wrapper cargo".
+ const cargoParts = runnableArgs.overrideCargo.split(" ");
+
+ program = unwrapUndefinable(cargoParts[0]);
+ args = [...cargoParts.slice(1), ...args];
+ } else {
+ program = await toolchain.cargoPath();
+ }
- throw `Unexpected runnable kind: ${runnable.kind}`;
+ definition = {
+ type: tasks.CARGO_TASK_TYPE,
+ command: program,
+ args,
+ cwd: runnableArgs.workspaceRoot || ".",
+ env: prepareEnv(runnable.label, runnableArgs, config.runnablesExtraEnv),
+ };
+ } else {
+ const runnableArgs = runnable.args;
+ definition = {
+ type: tasks.SHELL_TASK_TYPE,
+ command: runnableArgs.program,
+ args: runnableArgs.args,
+ cwd: runnableArgs.cwd,
+ env: prepareBaseEnv(),
+ };
}
- const args = createArgs(runnable);
-
- const definition: tasks.CargoTaskDefinition = {
- type: tasks.TASK_TYPE,
- command: unwrapUndefinable(args[0]), // run, test, etc...
- args: args.slice(1),
- cwd: runnable.args.workspaceRoot || ".",
- env: prepareEnv(runnable, config.runnablesExtraEnv),
- overrideCargo: runnable.args.overrideCargo,
- };
-
- // eslint-disable-next-line @typescript-eslint/no-unnecessary-type-assertion
- const target = vscode.workspace.workspaceFolders![0]; // safe, see main activate()
+ const target = vscode.workspace.workspaceFolders?.[0];
+ const exec = await tasks.targetToExecution(definition, config.cargoRunner, true);
const task = await tasks.buildRustTask(
target,
definition,
runnable.label,
config.problemMatcher,
- config.cargoRunner,
- true,
+ exec,
);
task.presentationOptions.clear = true;
@@ -141,13 +165,13 @@ export async function createTask(runnable: ra.Runnable, config: Config): Promise
return task;
}
-export function createArgs(runnable: ra.Runnable): string[] {
- const args = [...runnable.args.cargoArgs]; // should be a copy!
- if (runnable.args.cargoExtraArgs) {
- args.push(...runnable.args.cargoExtraArgs); // Append user-specified cargo options.
+export function createCargoArgs(runnableArgs: ra.CargoRunnableArgs): string[] {
+ const args = [...runnableArgs.cargoArgs]; // should be a copy!
+ if (runnableArgs.cargoExtraArgs) {
+ args.push(...runnableArgs.cargoExtraArgs); // Append user-specified cargo options.
}
- if (runnable.args.executableArgs.length > 0) {
- args.push("--", ...runnable.args.executableArgs);
+ if (runnableArgs.executableArgs.length > 0) {
+ args.push("--", ...runnableArgs.executableArgs);
}
return args;
}
diff --git a/editors/code/src/snippets.ts b/editors/code/src/snippets.ts
index b3982bdf2b..a469a9cd1f 100644
--- a/editors/code/src/snippets.ts
+++ b/editors/code/src/snippets.ts
@@ -1,7 +1,6 @@
import * as vscode from "vscode";
-import { assert } from "./util";
-import { unwrapUndefinable } from "./undefinable";
+import { assert, unwrapUndefinable } from "./util";
export type SnippetTextDocumentEdit = [vscode.Uri, (vscode.TextEdit | vscode.SnippetTextEdit)[]];
diff --git a/editors/code/src/tasks.ts b/editors/code/src/tasks.ts
index 2b3abc5d65..6f4fbf9188 100644
--- a/editors/code/src/tasks.ts
+++ b/editors/code/src/tasks.ts
@@ -1,28 +1,29 @@
import * as vscode from "vscode";
-import * as toolchain from "./toolchain";
import type { Config } from "./config";
-import { log } from "./util";
-import { unwrapUndefinable } from "./undefinable";
+import { log, unwrapUndefinable } from "./util";
+import * as toolchain from "./toolchain";
// This ends up as the `type` key in tasks.json. RLS also uses `cargo` and
// our configuration should be compatible with it so use the same key.
-export const TASK_TYPE = "cargo";
+export const CARGO_TASK_TYPE = "cargo";
+export const SHELL_TASK_TYPE = "shell";
-export const TASK_SOURCE = "rust";
+export const RUST_TASK_SOURCE = "rust";
-export interface CargoTaskDefinition extends vscode.TaskDefinition {
- // The cargo command, such as "run" or "check".
+export type RustTargetDefinition = {
+ readonly type: typeof CARGO_TASK_TYPE | typeof SHELL_TASK_TYPE;
+} & vscode.TaskDefinition &
+ RustTarget;
+export type RustTarget = {
+ // The command to run, usually `cargo`.
command: string;
- // Additional arguments passed to the cargo command.
+ // Additional arguments passed to the command.
args?: string[];
- // The working directory to run the cargo command in.
+ // The working directory to run the command in.
cwd?: string;
// The shell environment.
env?: { [key: string]: string };
- // Override the cargo executable name, such as
- // "my_custom_cargo_bin".
- overrideCargo?: string;
-}
+};
class RustTaskProvider implements vscode.TaskProvider {
private readonly config: Config;
@@ -32,6 +33,10 @@ class RustTaskProvider implements vscode.TaskProvider {
}
async provideTasks(): Promise<vscode.Task[]> {
+ if (!vscode.workspace.workspaceFolders) {
+ return [];
+ }
+
// Detect Rust tasks. Currently we do not do any actual detection
// of tasks (e.g. aliases in .cargo/config) and just return a fixed
// set of tasks that always exist. These tasks cannot be removed in
@@ -46,15 +51,23 @@ class RustTaskProvider implements vscode.TaskProvider {
{ command: "run", group: undefined },
];
+ // FIXME: The server should provide this
+ const cargo = await toolchain.cargoPath();
+
const tasks: vscode.Task[] = [];
- for (const workspaceTarget of vscode.workspace.workspaceFolders || []) {
+ for (const workspaceTarget of vscode.workspace.workspaceFolders) {
for (const def of defs) {
+ const definition = {
+ command: cargo,
+ args: [def.command],
+ };
+ const exec = await targetToExecution(definition, this.config.cargoRunner);
const vscodeTask = await buildRustTask(
workspaceTarget,
- { type: TASK_TYPE, command: def.command },
+ { ...definition, type: CARGO_TASK_TYPE },
`cargo ${def.command}`,
this.config.problemMatcher,
- this.config.cargoRunner,
+ exec,
);
vscodeTask.group = def.group;
tasks.push(vscodeTask);
@@ -68,16 +81,24 @@ class RustTaskProvider implements vscode.TaskProvider {
// VSCode calls this for every cargo task in the user's tasks.json,
// we need to inform VSCode how to execute that command by creating
// a ShellExecution for it.
-
- const definition = task.definition as CargoTaskDefinition;
-
- if (definition.type === TASK_TYPE) {
+ if (task.definition.type === CARGO_TASK_TYPE) {
+ const taskDefinition = task.definition as RustTargetDefinition;
+ const cargo = await toolchain.cargoPath();
+ const exec = await targetToExecution(
+ {
+ command: cargo,
+ args: [taskDefinition.command].concat(taskDefinition.args || []),
+ cwd: taskDefinition.cwd,
+ env: taskDefinition.env,
+ },
+ this.config.cargoRunner,
+ );
return await buildRustTask(
task.scope,
- definition,
+ taskDefinition,
task.name,
this.config.problemMatcher,
- this.config.cargoRunner,
+ exec,
);
}
@@ -87,37 +108,34 @@ class RustTaskProvider implements vscode.TaskProvider {
export async function buildRustTask(
scope: vscode.WorkspaceFolder | vscode.TaskScope | undefined,
- definition: CargoTaskDefinition,
+ definition: RustTargetDefinition,
name: string,
problemMatcher: string[],
- customRunner?: string,
- throwOnError: boolean = false,
+ exec: vscode.ProcessExecution | vscode.ShellExecution,
): Promise<vscode.Task> {
- const exec = await cargoToExecution(definition, customRunner, throwOnError);
-
return new vscode.Task(
definition,
// scope can sometimes be undefined. in these situations we default to the workspace taskscope as
// recommended by the official docs: https://code.visualstudio.com/api/extension-guides/task-provider#task-provider)
scope ?? vscode.TaskScope.Workspace,
name,
- TASK_SOURCE,
+ RUST_TASK_SOURCE,
exec,
problemMatcher,
);
}
-async function cargoToExecution(
- definition: CargoTaskDefinition,
- customRunner: string | undefined,
- throwOnError: boolean,
+export async function targetToExecution(
+ definition: RustTarget,
+ customRunner?: string,
+ throwOnError: boolean = false,
): Promise<vscode.ProcessExecution | vscode.ShellExecution> {
if (customRunner) {
const runnerCommand = `${customRunner}.buildShellExecution`;
try {
const runnerArgs = {
- kind: TASK_TYPE,
+ kind: CARGO_TASK_TYPE,
args: definition.args,
cwd: definition.cwd,
env: definition.env,
@@ -137,18 +155,8 @@ async function cargoToExecution(
// fallback to default processing
}
}
-
- // Check whether we must use a user-defined substitute for cargo.
- // Split on spaces to allow overrides like "wrapper cargo".
- const cargoPath = await toolchain.cargoPath();
- const cargoCommand = definition.overrideCargo?.split(" ") ?? [cargoPath];
-
- const args = [definition.command].concat(definition.args ?? []);
- const fullCommand = [...cargoCommand, ...args];
-
- const processName = unwrapUndefinable(fullCommand[0]);
-
- return new vscode.ProcessExecution(processName, fullCommand.slice(1), {
+ const args = unwrapUndefinable(definition.args);
+ return new vscode.ProcessExecution(definition.command, args, {
cwd: definition.cwd,
env: definition.env,
});
@@ -156,5 +164,5 @@ async function cargoToExecution(
export function activateTaskProvider(config: Config): vscode.Disposable {
const provider = new RustTaskProvider(config);
- return vscode.tasks.registerTaskProvider(TASK_TYPE, provider);
+ return vscode.tasks.registerTaskProvider(CARGO_TASK_TYPE, provider);
}
diff --git a/editors/code/src/toolchain.ts b/editors/code/src/toolchain.ts
index 58e5fc747a..a48d2d90cc 100644
--- a/editors/code/src/toolchain.ts
+++ b/editors/code/src/toolchain.ts
@@ -3,9 +3,7 @@ import * as os from "os";
import * as path from "path";
import * as readline from "readline";
import * as vscode from "vscode";
-import { execute, log, memoizeAsync } from "./util";
-import { unwrapNullable } from "./nullable";
-import { unwrapUndefinable } from "./undefinable";
+import { execute, log, memoizeAsync, unwrapNullable, unwrapUndefinable } from "./util";
interface CompilationArtifact {
fileName: string;
@@ -151,12 +149,13 @@ export async function getRustcId(dir: string): Promise<string> {
}
/** Mirrors `toolchain::cargo()` implementation */
+// FIXME: The server should provide this
export function cargoPath(): Promise<string> {
return getPathForExecutable("cargo");
}
/** Mirrors `toolchain::get_path_for_executable()` implementation */
-export const getPathForExecutable = memoizeAsync(
+const getPathForExecutable = memoizeAsync(
// We apply caching to decrease file-system interactions
async (executableName: "cargo" | "rustc" | "rustup"): Promise<string> => {
{
diff --git a/editors/code/src/undefinable.ts b/editors/code/src/undefinable.ts
deleted file mode 100644
index 813bac5a12..0000000000
--- a/editors/code/src/undefinable.ts
+++ /dev/null
@@ -1,19 +0,0 @@
-export type NotUndefined<T> = T extends undefined ? never : T;
-
-export type Undefinable<T> = T | undefined;
-
-function isNotUndefined<T>(input: Undefinable<T>): input is NotUndefined<T> {
- return input !== undefined;
-}
-
-export function expectNotUndefined<T>(input: Undefinable<T>, msg: string): NotUndefined<T> {
- if (isNotUndefined(input)) {
- return input;
- }
-
- throw new TypeError(msg);
-}
-
-export function unwrapUndefinable<T>(input: Undefinable<T>): NotUndefined<T> {
- return expectNotUndefined(input, `unwrapping \`undefined\``);
-}
diff --git a/editors/code/src/util.ts b/editors/code/src/util.ts
index 51f921a296..dd1cbe38ff 100644
--- a/editors/code/src/util.ts
+++ b/editors/code/src/util.ts
@@ -1,8 +1,8 @@
import * as vscode from "vscode";
import { strict as nativeAssert } from "assert";
-import { exec, type ExecOptions, spawnSync } from "child_process";
+import { exec, type ExecOptions } from "child_process";
import { inspect } from "util";
-import type { Env } from "./client";
+import type { CargoRunnableArgs, ShellRunnableArgs } from "./lsp_ext";
export function assert(condition: boolean, explanation: string): asserts condition {
try {
@@ -13,6 +13,10 @@ export function assert(condition: boolean, explanation: string): asserts conditi
}
}
+export type Env = {
+ [name: string]: string;
+};
+
export const log = new (class {
private enabled = true;
private readonly output = vscode.window.createOutputChannel("Rust Analyzer Client");
@@ -77,6 +81,12 @@ export function isCargoTomlDocument(document: vscode.TextDocument): document is
return document.uri.scheme === "file" && document.fileName.endsWith("Cargo.toml");
}
+export function isCargoRunnableArgs(
+ args: CargoRunnableArgs | ShellRunnableArgs,
+): args is CargoRunnableArgs {
+ return (args as CargoRunnableArgs).executableArgs !== undefined;
+}
+
export function isRustEditor(editor: vscode.TextEditor): editor is RustEditor {
return isRustDocument(editor.document);
}
@@ -94,20 +104,6 @@ export function isDocumentInWorkspace(document: RustDocument): boolean {
return false;
}
-export function isValidExecutable(path: string, extraEnv: Env): boolean {
- log.debug("Checking availability of a binary at", path);
-
- const res = spawnSync(path, ["--version"], {
- encoding: "utf8",
- env: { ...process.env, ...extraEnv },
- });
-
- const printOutput = res.error ? log.warn : log.info;
- printOutput(path, "--version:", res);
-
- return res.status === 0;
-}
-
/** Sets ['when'](https://code.visualstudio.com/docs/getstarted/keybindings#_when-clause-contexts) clause contexts */
export function setContextValue(key: string, value: any): Thenable<void> {
return vscode.commands.executeCommand("setContext", key, value);
@@ -199,3 +195,42 @@ export class LazyOutputChannel implements vscode.OutputChannel {
}
}
}
+
+export type NotNull<T> = T extends null ? never : T;
+
+export type Nullable<T> = T | null;
+
+function isNotNull<T>(input: Nullable<T>): input is NotNull<T> {
+ return input !== null;
+}
+
+function expectNotNull<T>(input: Nullable<T>, msg: string): NotNull<T> {
+ if (isNotNull(input)) {
+ return input;
+ }
+
+ throw new TypeError(msg);
+}
+
+export function unwrapNullable<T>(input: Nullable<T>): NotNull<T> {
+ return expectNotNull(input, `unwrapping \`null\``);
+}
+export type NotUndefined<T> = T extends undefined ? never : T;
+
+export type Undefinable<T> = T | undefined;
+
+function isNotUndefined<T>(input: Undefinable<T>): input is NotUndefined<T> {
+ return input !== undefined;
+}
+
+export function expectNotUndefined<T>(input: Undefinable<T>, msg: string): NotUndefined<T> {
+ if (isNotUndefined(input)) {
+ return input;
+ }
+
+ throw new TypeError(msg);
+}
+
+export function unwrapUndefinable<T>(input: Undefinable<T>): NotUndefined<T> {
+ return expectNotUndefined(input, `unwrapping \`undefined\``);
+}
diff --git a/editors/code/tests/unit/runnable_env.test.ts b/editors/code/tests/unit/runnable_env.test.ts
index b1407ce019..21bdaf5384 100644
--- a/editors/code/tests/unit/runnable_env.test.ts
+++ b/editors/code/tests/unit/runnable_env.test.ts
@@ -10,6 +10,7 @@ function makeRunnable(label: string): ra.Runnable {
kind: "cargo",
args: {
cargoArgs: [],
+ cwd: ".",
executableArgs: [],
cargoExtraArgs: [],
},
@@ -18,7 +19,8 @@ function makeRunnable(label: string): ra.Runnable {
function fakePrepareEnv(runnableName: string, config: RunnableEnvCfg): Record<string, string> {
const runnable = makeRunnable(runnableName);
- return prepareEnv(runnable, config);
+ const runnableArgs = runnable.args as ra.CargoRunnableArgs;
+ return prepareEnv(runnable.label, runnableArgs, config);
}
export async function getTests(ctx: Context) {
diff --git a/lib/lsp-server/src/lib.rs b/lib/lsp-server/src/lib.rs
index 5dc052b587..4069e6f2c0 100644
--- a/lib/lsp-server/src/lib.rs
+++ b/lib/lsp-server/src/lib.rs
@@ -433,8 +433,7 @@ mod tests {
initialize_start_test(TestCase {
test_messages: vec![notification_msg.clone()],
expected_resp: Err(ProtocolError::new(format!(
- "expected initialize request, got {:?}",
- notification_msg
+ "expected initialize request, got {notification_msg:?}"
))),
});
}
diff --git a/rust-version b/rust-version
index 207ef6c5de..c605feb6ee 100644
--- a/rust-version
+++ b/rust-version
@@ -1 +1 @@
-6579ed89f0fcc26da71afdd11d30d63f6f812a0a
+3d5d7a24f76006b391d8a53d903ae64c1b4a52d2
diff --git a/xtask/src/dist.rs b/xtask/src/dist.rs
index 2fe9db98cf..742cf7f609 100644
--- a/xtask/src/dist.rs
+++ b/xtask/src/dist.rs
@@ -70,8 +70,8 @@ fn dist_client(
&format!(r#""version": "{version}""#),
)
.replace(r#""releaseTag": null"#, &format!(r#""releaseTag": "{release_tag}""#))
- .replace(r#""$generated-start": {},"#, "")
- .replace(",\n \"$generated-end\": {}", "")
+ .replace(r#""title": "$generated-start""#, "")
+ .replace(r#""title": "$generated-end""#, "")
.replace(r#""enabledApiProposals": [],"#, r#""#);
patch.commit(sh)?;
diff --git a/xtask/src/metrics.rs b/xtask/src/metrics.rs
index 285abb9efc..9a7785dd43 100644
--- a/xtask/src/metrics.rs
+++ b/xtask/src/metrics.rs
@@ -64,7 +64,7 @@ impl flags::Metrics {
};
let mut file =
- fs::File::options().write(true).create(true).open(format!("target/{}.json", name))?;
+ fs::File::options().write(true).create(true).open(format!("target/{name}.json"))?;
writeln!(file, "{}", metrics.json())?;
eprintln!("{metrics:#?}");
Ok(())
diff --git a/xtask/src/publish.rs b/xtask/src/publish.rs
index 7faae9b20c..f5d765d7c9 100644
--- a/xtask/src/publish.rs
+++ b/xtask/src/publish.rs
@@ -9,6 +9,15 @@ impl flags::PublishReleaseNotes {
pub(crate) fn run(self, sh: &Shell) -> anyhow::Result<()> {
let asciidoc = sh.read_file(&self.changelog)?;
let mut markdown = notes::convert_asciidoc_to_markdown(std::io::Cursor::new(&asciidoc))?;
+ if !markdown.starts_with("# Changelog") {
+ bail!("changelog Markdown should start with `# Changelog`");
+ }
+ const NEWLINES: &str = "\n\n";
+ let Some(idx) = markdown.find(NEWLINES) else {
+ bail!("missing newlines after changelog title");
+ };
+ markdown.replace_range(0..idx + NEWLINES.len(), "");
+
let file_name = check_file_name(self.changelog)?;
let tag_name = &file_name[0..10];
let original_changelog_url = create_original_changelog_url(&file_name);