Unnamed repository; edit this file 'description' to name the repository.
Merge ref 'db3e99bbab28' from rust-lang/rust
Pull recent changes from https://github.com/rust-lang/rust via Josh. Upstream ref: rust-lang/rust@db3e99bbab28c6ca778b13222becdea54533d908 Filtered ref: rust-lang/compiler-builtins@970db0bc6d91c4f4525c69c5a91858bc7e509a14 Upstream diff: https://github.com/rust-lang/rust/compare/44e34e1ac6d7e69b40856cf1403d3da145319c30...db3e99bbab28c6ca778b13222becdea54533d908 This merge was created using https://github.com/rust-lang/josh-sync.
The rustc-josh-sync Cronjob Bot 3 months ago
parent 2377d23 · parent 4b9e642 · commit 533cfab
-rw-r--r--.github/workflows/rustdoc.yaml2
-rw-r--r--Cargo.lock14
-rw-r--r--Cargo.toml4
-rw-r--r--crates/base-db/src/input.rs3
-rw-r--r--crates/hir-def/src/attrs.rs1
-rw-r--r--crates/hir-def/src/builtin_derive.rs22
-rw-r--r--crates/hir-def/src/dyn_map.rs10
-rw-r--r--crates/hir-def/src/expr_store.rs14
-rw-r--r--crates/hir-def/src/expr_store/lower.rs80
-rw-r--r--crates/hir-def/src/expr_store/lower/format_args.rs5
-rw-r--r--crates/hir-def/src/expr_store/pretty.rs20
-rw-r--r--crates/hir-def/src/expr_store/tests/body.rs18
-rw-r--r--crates/hir-def/src/hir.rs9
-rw-r--r--crates/hir-def/src/item_scope.rs19
-rw-r--r--crates/hir-def/src/lang_item.rs59
-rw-r--r--crates/hir-def/src/macro_expansion_tests/mbe/matching.rs20
-rw-r--r--crates/hir-def/src/nameres.rs12
-rw-r--r--crates/hir-def/src/nameres/collector.rs88
-rw-r--r--crates/hir-def/src/nameres/tests.rs2
-rw-r--r--crates/hir-def/src/nameres/tests/imports.rs63
-rw-r--r--crates/hir-def/src/nameres/tests/primitives.rs23
-rw-r--r--crates/hir-def/src/signatures.rs12
-rw-r--r--crates/hir-expand/src/builtin/attr_macro.rs2
-rw-r--r--crates/hir-expand/src/cfg_process.rs2
-rw-r--r--crates/hir-expand/src/declarative.rs4
-rw-r--r--crates/hir-expand/src/mod_path.rs4
-rw-r--r--crates/hir-ty/src/diagnostics/expr.rs46
-rw-r--r--crates/hir-ty/src/infer.rs28
-rw-r--r--crates/hir-ty/src/infer/cast.rs270
-rw-r--r--crates/hir-ty/src/infer/closure.rs2
-rw-r--r--crates/hir-ty/src/infer/closure/analysis.rs4
-rw-r--r--crates/hir-ty/src/infer/expr.rs24
-rw-r--r--crates/hir-ty/src/infer/mutability.rs10
-rw-r--r--crates/hir-ty/src/infer/path.rs1
-rw-r--r--crates/hir-ty/src/infer/unify.rs10
-rw-r--r--crates/hir-ty/src/lower.rs458
-rw-r--r--crates/hir-ty/src/lower/path.rs42
-rw-r--r--crates/hir-ty/src/method_resolution.rs4
-rw-r--r--crates/hir-ty/src/method_resolution/probe.rs31
-rw-r--r--crates/hir-ty/src/mir/lower.rs11
-rw-r--r--crates/hir-ty/src/next_solver/fulfill.rs8
-rw-r--r--crates/hir-ty/src/next_solver/infer/canonical/instantiate.rs4
-rw-r--r--crates/hir-ty/src/next_solver/infer/mod.rs2
-rw-r--r--crates/hir-ty/src/next_solver/infer/traits.rs7
-rw-r--r--crates/hir-ty/src/next_solver/interner.rs7
-rw-r--r--crates/hir-ty/src/next_solver/predicate.rs5
-rw-r--r--crates/hir-ty/src/next_solver/ty.rs19
-rw-r--r--crates/hir-ty/src/next_solver/util.rs15
-rw-r--r--crates/hir-ty/src/tests/closure_captures.rs82
-rw-r--r--crates/hir-ty/src/tests/opaque_types.rs36
-rw-r--r--crates/hir-ty/src/tests/patterns.rs118
-rw-r--r--crates/hir-ty/src/tests/regression.rs104
-rw-r--r--crates/hir-ty/src/tests/regression/new_solver.rs131
-rw-r--r--crates/hir-ty/src/tests/simple.rs99
-rw-r--r--crates/hir-ty/src/tests/traits.rs69
-rw-r--r--crates/hir/src/attrs.rs66
-rw-r--r--crates/hir/src/lib.rs17
-rw-r--r--crates/hir/src/semantics.rs56
-rw-r--r--crates/hir/src/semantics/source_to_def.rs17
-rw-r--r--crates/hir/src/source_analyzer.rs198
-rw-r--r--crates/hir/src/symbols.rs87
-rw-r--r--crates/hir/src/term_search.rs2
-rw-r--r--crates/ide-assists/src/handlers/apply_demorgan.rs16
-rw-r--r--crates/ide-assists/src/handlers/convert_range_for_to_while.rs157
-rw-r--r--crates/ide-assists/src/handlers/convert_to_guarded_return.rs72
-rw-r--r--crates/ide-assists/src/handlers/expand_rest_pattern.rs6
-rw-r--r--crates/ide-assists/src/handlers/extract_function.rs95
-rw-r--r--crates/ide-assists/src/handlers/generate_mut_trait_impl.rs191
-rw-r--r--crates/ide-assists/src/handlers/inline_type_alias.rs6
-rw-r--r--crates/ide-assists/src/handlers/move_guard.rs100
-rw-r--r--crates/ide-assists/src/handlers/remove_parentheses.rs6
-rw-r--r--crates/ide-assists/src/handlers/toggle_macro_delimiter.rs149
-rw-r--r--crates/ide-assists/src/handlers/unwrap_block.rs255
-rw-r--r--crates/ide-assists/src/utils.rs11
-rw-r--r--crates/ide-completion/src/completions/expr.rs2
-rw-r--r--crates/ide-completion/src/completions/extern_crate.rs6
-rw-r--r--crates/ide-completion/src/completions/record.rs14
-rw-r--r--crates/ide-completion/src/context.rs2
-rw-r--r--crates/ide-completion/src/context/analysis.rs22
-rw-r--r--crates/ide-completion/src/item.rs1
-rw-r--r--crates/ide-completion/src/tests/expression.rs51
-rw-r--r--crates/ide-completion/src/tests/flyimport.rs48
-rw-r--r--crates/ide-completion/src/tests/record.rs40
-rw-r--r--crates/ide-completion/src/tests/type_pos.rs29
-rw-r--r--crates/ide-db/src/imports/import_assets.rs14
-rw-r--r--crates/ide-db/src/items_locator.rs4
-rw-r--r--crates/ide-db/src/lib.rs8
-rw-r--r--crates/ide-db/src/symbol_index.rs682
-rw-r--r--crates/ide-db/src/test_data/test_doc_alias.txt84
-rw-r--r--crates/ide-db/src/test_data/test_symbol_index_collection.txt396
-rw-r--r--crates/ide-db/src/test_data/test_symbols_exclude_imports.txt12
-rw-r--r--crates/ide-db/src/test_data/test_symbols_with_imports.txt24
-rw-r--r--crates/ide-diagnostics/src/handlers/invalid_cast.rs10
-rw-r--r--crates/ide-diagnostics/src/handlers/missing_fields.rs77
-rw-r--r--crates/ide-diagnostics/src/handlers/missing_lifetime.rs15
-rw-r--r--crates/ide-diagnostics/src/handlers/mutability_errors.rs6
-rw-r--r--crates/ide/src/expand_macro.rs77
-rw-r--r--crates/ide/src/hover/render.rs8
-rw-r--r--crates/ide/src/hover/tests.rs72
-rw-r--r--crates/ide/src/navigation_target.rs17
-rw-r--r--crates/ide/src/references.rs2
-rw-r--r--crates/ide/src/runnables.rs8
-rw-r--r--crates/ide/src/syntax_highlighting.rs16
-rw-r--r--crates/ide/src/syntax_highlighting/escape.rs43
-rw-r--r--crates/ide/src/syntax_highlighting/highlight.rs26
-rw-r--r--crates/ide/src/syntax_highlighting/highlights.rs8
-rw-r--r--crates/ide/src/syntax_highlighting/inject.rs12
-rw-r--r--crates/ide/src/syntax_highlighting/tags.rs1
-rw-r--r--crates/ide/src/syntax_highlighting/test_data/highlight_asm.html8
-rw-r--r--crates/ide/src/syntax_highlighting/test_data/highlight_attributes.html2
-rw-r--r--crates/ide/src/syntax_highlighting/test_data/highlight_crate_root.html16
-rw-r--r--crates/ide/src/syntax_highlighting/test_data/highlight_default_library.html2
-rw-r--r--crates/ide/src/syntax_highlighting/test_data/highlight_deprecated.html4
-rw-r--r--crates/ide/src/syntax_highlighting/test_data/highlight_doctest.html6
-rw-r--r--crates/ide/src/syntax_highlighting/test_data/highlight_extern_crate.html14
-rw-r--r--crates/ide/src/syntax_highlighting/test_data/highlight_general.html12
-rw-r--r--crates/ide/src/syntax_highlighting/test_data/highlight_injection_2.html2
-rw-r--r--crates/ide/src/syntax_highlighting/test_data/highlight_issue_18089.html2
-rw-r--r--crates/ide/src/syntax_highlighting/test_data/highlight_keywords_2015.html8
-rw-r--r--crates/ide/src/syntax_highlighting/test_data/highlight_keywords_2018.html8
-rw-r--r--crates/ide/src/syntax_highlighting/test_data/highlight_keywords_2021.html8
-rw-r--r--crates/ide/src/syntax_highlighting/test_data/highlight_keywords_2024.html8
-rw-r--r--crates/ide/src/syntax_highlighting/test_data/highlight_keywords_macros.html4
-rw-r--r--crates/ide/src/syntax_highlighting/test_data/highlight_macros.html2
-rw-r--r--crates/ide/src/syntax_highlighting/test_data/highlight_strings.html2
-rw-r--r--crates/ide/src/syntax_highlighting/test_data/highlight_strings_disabled.html47
-rw-r--r--crates/ide/src/syntax_highlighting/test_data/highlight_unsafe.html4
-rw-r--r--crates/ide/src/syntax_highlighting/tests.rs17
-rw-r--r--crates/intern/src/symbol/symbols.rs7
-rw-r--r--crates/load-cargo/src/lib.rs85
-rw-r--r--crates/mbe/src/expander/matcher.rs5
-rw-r--r--crates/parser/src/grammar.rs2
-rw-r--r--crates/parser/src/grammar/attributes.rs12
-rw-r--r--crates/parser/test_data/parser/err/0002_duplicate_shebang.rast2
-rw-r--r--crates/parser/test_data/parser/err/0005_attribute_recover.rast2
-rw-r--r--crates/parser/test_data/parser/err/0032_match_arms_inner_attrs.rast6
-rw-r--r--crates/proc-macro-api/src/bidirectional_protocol.rs62
-rw-r--r--crates/proc-macro-api/src/bidirectional_protocol/msg.rs55
-rw-r--r--crates/proc-macro-api/src/legacy_protocol.rs23
-rw-r--r--crates/proc-macro-api/src/legacy_protocol/msg.rs38
-rw-r--r--crates/proc-macro-api/src/lib.rs99
-rw-r--r--crates/proc-macro-api/src/pool.rs91
-rw-r--r--crates/proc-macro-api/src/process.rs251
-rw-r--r--crates/proc-macro-api/src/transport.rs4
-rw-r--r--crates/proc-macro-api/src/transport/codec.rs15
-rw-r--r--crates/proc-macro-api/src/transport/codec/json.rs58
-rw-r--r--crates/proc-macro-api/src/transport/codec/postcard.rs40
-rw-r--r--crates/proc-macro-api/src/transport/framing.rs14
-rw-r--r--crates/proc-macro-api/src/transport/json.rs48
-rw-r--r--crates/proc-macro-api/src/transport/postcard.rs30
-rw-r--r--crates/proc-macro-srv-cli/Cargo.toml15
-rw-r--r--crates/proc-macro-srv-cli/src/lib.rs11
-rw-r--r--crates/proc-macro-srv-cli/src/main.rs54
-rw-r--r--crates/proc-macro-srv-cli/src/main_loop.rs190
-rw-r--r--crates/proc-macro-srv-cli/tests/bidirectional_postcard.rs229
-rw-r--r--crates/proc-macro-srv-cli/tests/common/utils.rs288
-rw-r--r--crates/proc-macro-srv-cli/tests/legacy_json.rs234
-rw-r--r--crates/proc-macro-srv/Cargo.toml1
-rw-r--r--crates/proc-macro-srv/proc-macro-test/imp/src/lib.rs10
-rw-r--r--crates/proc-macro-srv/src/lib.rs62
-rw-r--r--crates/proc-macro-srv/src/server_impl/rust_analyzer_span.rs14
-rw-r--r--crates/proc-macro-srv/src/tests/mod.rs15
-rw-r--r--crates/proc-macro-srv/src/tests/utils.rs71
-rw-r--r--crates/project-model/src/cargo_workspace.rs2
-rw-r--r--crates/project-model/src/project_json.rs41
-rw-r--r--crates/project-model/src/workspace.rs6
-rw-r--r--crates/rust-analyzer/src/cli/analysis_stats.rs1
-rw-r--r--crates/rust-analyzer/src/cli/diagnostics.rs1
-rw-r--r--crates/rust-analyzer/src/cli/lsif.rs1
-rw-r--r--crates/rust-analyzer/src/cli/prime_caches.rs1
-rw-r--r--crates/rust-analyzer/src/cli/run_tests.rs1
-rw-r--r--crates/rust-analyzer/src/cli/rustc_tests.rs1
-rw-r--r--crates/rust-analyzer/src/cli/scip.rs1
-rw-r--r--crates/rust-analyzer/src/cli/ssr.rs2
-rw-r--r--crates/rust-analyzer/src/cli/unresolved_references.rs1
-rw-r--r--crates/rust-analyzer/src/command.rs8
-rw-r--r--crates/rust-analyzer/src/config.rs200
-rw-r--r--crates/rust-analyzer/src/config/patch_old_style.rs2
-rw-r--r--crates/rust-analyzer/src/diagnostics.rs76
-rw-r--r--crates/rust-analyzer/src/discover.rs8
-rw-r--r--crates/rust-analyzer/src/flycheck.rs500
-rw-r--r--crates/rust-analyzer/src/global_state.rs60
-rw-r--r--crates/rust-analyzer/src/handlers/dispatch.rs6
-rw-r--r--crates/rust-analyzer/src/handlers/notification.rs85
-rw-r--r--crates/rust-analyzer/src/integrated_benchmarks.rs3
-rw-r--r--crates/rust-analyzer/src/lsp/to_proto.rs8
-rw-r--r--crates/rust-analyzer/src/main_loop.rs83
-rw-r--r--crates/rust-analyzer/src/reload.rs40
-rw-r--r--crates/rust-analyzer/src/target_spec.rs12
-rw-r--r--crates/rust-analyzer/src/task_pool.rs2
-rw-r--r--crates/rust-analyzer/src/test_runner.rs4
-rw-r--r--crates/rust-analyzer/tests/slow-tests/main.rs44
-rw-r--r--crates/span/src/hygiene.rs12
-rw-r--r--crates/stdx/src/process.rs2
-rw-r--r--crates/syntax/src/ast/prec.rs24
-rw-r--r--crates/syntax/src/ast/syntax_factory/constructors.rs38
-rw-r--r--crates/syntax/src/ptr.rs2
-rw-r--r--crates/syntax/src/syntax_editor/mapping.rs2
-rw-r--r--crates/test-fixture/src/lib.rs109
-rw-r--r--crates/test-utils/src/minicore.rs96
-rw-r--r--crates/toolchain/src/lib.rs3
-rw-r--r--crates/vfs-notify/Cargo.toml2
-rw-r--r--docs/book/src/configuration_generated.md147
-rw-r--r--docs/book/src/faq.md2
-rw-r--r--docs/book/src/non_cargo_based_projects.md128
-rw-r--r--editors/code/package.json29
-rw-r--r--lib/line-index/src/lib.rs2
-rw-r--r--lib/smol_str/CHANGELOG.md2
-rw-r--r--lib/smol_str/Cargo.toml2
-rw-r--r--rust-version2
210 files changed, 7485 insertions, 2141 deletions
diff --git a/.github/workflows/rustdoc.yaml b/.github/workflows/rustdoc.yaml
index 9cc18fc69e..0cc7ce77dd 100644
--- a/.github/workflows/rustdoc.yaml
+++ b/.github/workflows/rustdoc.yaml
@@ -24,7 +24,7 @@ jobs:
run: rustup update --no-self-update stable
- name: Build Documentation
- run: cargo doc --all --no-deps
+ run: cargo doc --all --no-deps --document-private-items
- name: Deploy Docs
uses: peaceiris/actions-gh-pages@4f9cc6602d3f66b9c108549d475ec49e8ef4d45e # v4.0.0
diff --git a/Cargo.lock b/Cargo.lock
index 42eaeb01f1..2cf3e37a43 100644
--- a/Cargo.lock
+++ b/Cargo.lock
@@ -178,9 +178,9 @@ checksum = "1fd0f2584146f6f2ef48085050886acf353beff7305ebd1ae69500e27c67f64b"
[[package]]
name = "camino"
-version = "1.2.0"
+version = "1.2.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "e1de8bc0aa9e9385ceb3bf0c152e3a9b9544f6c4a912c8ae504e80c1f0368603"
+checksum = "e629a66d692cb9ff1a1c664e41771b3dcaf961985a9774c0eb0bd1b51cf60a48"
dependencies = [
"serde_core",
]
@@ -1864,6 +1864,7 @@ dependencies = [
"intern",
"libc",
"libloading",
+ "line-index 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)",
"memmap2",
"object",
"paths",
@@ -1878,9 +1879,14 @@ name = "proc-macro-srv-cli"
version = "0.0.0"
dependencies = [
"clap",
- "postcard",
+ "expect-test",
+ "intern",
+ "paths",
"proc-macro-api",
"proc-macro-srv",
+ "proc-macro-test",
+ "span",
+ "tt",
]
[[package]]
@@ -2628,7 +2634,7 @@ dependencies = [
[[package]]
name = "smol_str"
-version = "0.3.4"
+version = "0.3.5"
dependencies = [
"arbitrary",
"borsh",
diff --git a/Cargo.toml b/Cargo.toml
index 8003cb2fba..2288933a96 100644
--- a/Cargo.toml
+++ b/Cargo.toml
@@ -42,7 +42,7 @@ debug = 2
# lsp-server = { path = "lib/lsp-server" }
-# ungrammar = { path = "lin/ungrammar" }
+# ungrammar = { path = "lib/ungrammar" }
# salsa = { path = "../salsa" }
# salsa-macros = { path = "../salsa/components/salsa-macros" }
@@ -107,7 +107,7 @@ anyhow = "1.0.98"
arrayvec = "0.7.6"
bitflags = "2.9.1"
cargo_metadata = "0.23.0"
-camino = "1.1.10"
+camino = "1.2.2"
crossbeam-channel = "0.5.15"
dissimilar = "1.0.10"
dot = "0.1.4"
diff --git a/crates/base-db/src/input.rs b/crates/base-db/src/input.rs
index 240f126491..94793a3618 100644
--- a/crates/base-db/src/input.rs
+++ b/crates/base-db/src/input.rs
@@ -221,6 +221,7 @@ pub enum LangCrateOrigin {
ProcMacro,
Std,
Test,
+ Dependency,
Other,
}
@@ -245,7 +246,7 @@ impl fmt::Display for LangCrateOrigin {
LangCrateOrigin::ProcMacro => "proc_macro",
LangCrateOrigin::Std => "std",
LangCrateOrigin::Test => "test",
- LangCrateOrigin::Other => "other",
+ LangCrateOrigin::Other | LangCrateOrigin::Dependency => "other",
};
f.write_str(text)
}
diff --git a/crates/hir-def/src/attrs.rs b/crates/hir-def/src/attrs.rs
index 83df11f2d2..0b8f656872 100644
--- a/crates/hir-def/src/attrs.rs
+++ b/crates/hir-def/src/attrs.rs
@@ -135,6 +135,7 @@ fn match_attr_flags(attr_flags: &mut AttrFlags, attr: Meta) -> ControlFlow<Infal
match attr {
Meta::NamedKeyValue { name: Some(name), value, .. } => match name.text() {
"deprecated" => attr_flags.insert(AttrFlags::IS_DEPRECATED),
+ "ignore" => attr_flags.insert(AttrFlags::IS_IGNORE),
"lang" => attr_flags.insert(AttrFlags::LANG_ITEM),
"path" => attr_flags.insert(AttrFlags::HAS_PATH),
"unstable" => attr_flags.insert(AttrFlags::IS_UNSTABLE),
diff --git a/crates/hir-def/src/builtin_derive.rs b/crates/hir-def/src/builtin_derive.rs
index 32385516ab..946f08ec36 100644
--- a/crates/hir-def/src/builtin_derive.rs
+++ b/crates/hir-def/src/builtin_derive.rs
@@ -8,7 +8,8 @@ use intern::{Symbol, sym};
use tt::TextRange;
use crate::{
- AdtId, BuiltinDeriveImplId, BuiltinDeriveImplLoc, FunctionId, HasModule, db::DefDatabase,
+ AdtId, BuiltinDeriveImplId, BuiltinDeriveImplLoc, FunctionId, HasModule, MacroId,
+ db::DefDatabase, lang_item::LangItems,
};
macro_rules! declare_enum {
@@ -86,6 +87,25 @@ declare_enum!(
DispatchFromDyn => [],
);
+impl BuiltinDeriveImplTrait {
+ pub fn derive_macro(self, lang_items: &LangItems) -> Option<MacroId> {
+ match self {
+ BuiltinDeriveImplTrait::Copy => lang_items.CopyDerive,
+ BuiltinDeriveImplTrait::Clone => lang_items.CloneDerive,
+ BuiltinDeriveImplTrait::Default => lang_items.DefaultDerive,
+ BuiltinDeriveImplTrait::Debug => lang_items.DebugDerive,
+ BuiltinDeriveImplTrait::Hash => lang_items.HashDerive,
+ BuiltinDeriveImplTrait::Ord => lang_items.OrdDerive,
+ BuiltinDeriveImplTrait::PartialOrd => lang_items.PartialOrdDerive,
+ BuiltinDeriveImplTrait::Eq => lang_items.EqDerive,
+ BuiltinDeriveImplTrait::PartialEq => lang_items.PartialEqDerive,
+ BuiltinDeriveImplTrait::CoerceUnsized | BuiltinDeriveImplTrait::DispatchFromDyn => {
+ lang_items.CoercePointeeDerive
+ }
+ }
+ }
+}
+
impl BuiltinDeriveImplMethod {
pub fn trait_method(
self,
diff --git a/crates/hir-def/src/dyn_map.rs b/crates/hir-def/src/dyn_map.rs
index 7d3a94b038..4308d0ef1c 100644
--- a/crates/hir-def/src/dyn_map.rs
+++ b/crates/hir-def/src/dyn_map.rs
@@ -27,14 +27,15 @@
pub mod keys {
use std::marker::PhantomData;
+ use either::Either;
use hir_expand::{MacroCallId, attrs::AttrId};
use rustc_hash::FxHashMap;
use syntax::{AstNode, AstPtr, ast};
use crate::{
- BlockId, ConstId, EnumId, EnumVariantId, ExternBlockId, ExternCrateId, FieldId, FunctionId,
- ImplId, LifetimeParamId, Macro2Id, MacroRulesId, ProcMacroId, StaticId, StructId, TraitId,
- TypeAliasId, TypeOrConstParamId, UnionId, UseId,
+ BlockId, BuiltinDeriveImplId, ConstId, EnumId, EnumVariantId, ExternBlockId, ExternCrateId,
+ FieldId, FunctionId, ImplId, LifetimeParamId, Macro2Id, MacroRulesId, ProcMacroId,
+ StaticId, StructId, TraitId, TypeAliasId, TypeOrConstParamId, UnionId, UseId,
dyn_map::{DynMap, Policy},
};
@@ -71,7 +72,8 @@ pub mod keys {
(
AttrId,
/* derive() */ MacroCallId,
- /* actual derive macros */ Box<[Option<MacroCallId>]>,
+ /* actual derive macros */
+ Box<[Option<Either<MacroCallId, BuiltinDeriveImplId>>]>,
),
> = Key::new();
diff --git a/crates/hir-def/src/expr_store.rs b/crates/hir-def/src/expr_store.rs
index 10cd460d1d..1ce4c881e7 100644
--- a/crates/hir-def/src/expr_store.rs
+++ b/crates/hir-def/src/expr_store.rs
@@ -32,7 +32,7 @@ use crate::{
expr_store::path::Path,
hir::{
Array, AsmOperand, Binding, BindingId, Expr, ExprId, ExprOrPatId, Label, LabelId, Pat,
- PatId, RecordFieldPat, Statement,
+ PatId, RecordFieldPat, RecordSpread, Statement,
},
nameres::{DefMap, block_def_map},
type_ref::{LifetimeRef, LifetimeRefId, PathId, TypeRef, TypeRefId},
@@ -474,8 +474,8 @@ impl ExpressionStore {
match expr_only.binding_owners.get(&binding) {
Some(it) => {
// We assign expression ids in a way that outer closures will receive
- // a lower id
- it.into_raw() < relative_to.into_raw()
+ // a higher id (allocated after their body is collected)
+ it.into_raw() > relative_to.into_raw()
}
None => true,
}
@@ -575,8 +575,8 @@ impl ExpressionStore {
for field in fields.iter() {
f(field.expr);
}
- if let &Some(expr) = spread {
- f(expr);
+ if let RecordSpread::Expr(expr) = spread {
+ f(*expr);
}
}
Expr::Closure { body, .. } => {
@@ -706,8 +706,8 @@ impl ExpressionStore {
for field in fields.iter() {
f(field.expr);
}
- if let &Some(expr) = spread {
- f(expr);
+ if let RecordSpread::Expr(expr) = spread {
+ f(*expr);
}
}
Expr::Closure { body, .. } => {
diff --git a/crates/hir-def/src/expr_store/lower.rs b/crates/hir-def/src/expr_store/lower.rs
index 4ae4271b92..4fbf6d9517 100644
--- a/crates/hir-def/src/expr_store/lower.rs
+++ b/crates/hir-def/src/expr_store/lower.rs
@@ -47,7 +47,7 @@ use crate::{
hir::{
Array, Binding, BindingAnnotation, BindingId, BindingProblems, CaptureBy, ClosureKind,
Expr, ExprId, Item, Label, LabelId, Literal, MatchArm, Movability, OffsetOf, Pat, PatId,
- RecordFieldPat, RecordLitField, Statement, generics::GenericParams,
+ RecordFieldPat, RecordLitField, RecordSpread, Statement, generics::GenericParams,
},
item_scope::BuiltinShadowMode,
item_tree::FieldsShape,
@@ -150,6 +150,7 @@ pub(super) fn lower_body(
};
let body_expr = collector.collect(
+ &mut params,
body,
if is_async_fn {
Awaitable::Yes
@@ -903,24 +904,57 @@ impl<'db> ExprCollector<'db> {
})
}
- fn collect(&mut self, expr: Option<ast::Expr>, awaitable: Awaitable) -> ExprId {
+ /// An `async fn` needs to capture all parameters in the generated `async` block, even if they have
+ /// non-captured patterns such as wildcards (to ensure consistent drop order).
+ fn lower_async_fn(&mut self, params: &mut Vec<PatId>, body: ExprId) -> ExprId {
+ let mut statements = Vec::new();
+ for param in params {
+ let name = match self.store.pats[*param] {
+ Pat::Bind { id, .. }
+ if matches!(
+ self.store.bindings[id].mode,
+ BindingAnnotation::Unannotated | BindingAnnotation::Mutable
+ ) =>
+ {
+ // If this is a direct binding, we can leave it as-is, as it'll always be captured anyway.
+ continue;
+ }
+ Pat::Bind { id, .. } => {
+ // If this is a `ref` binding, we can't leave it as is but we can at least reuse the name, for better display.
+ self.store.bindings[id].name.clone()
+ }
+ _ => self.generate_new_name(),
+ };
+ let binding_id =
+ self.alloc_binding(name.clone(), BindingAnnotation::Mutable, HygieneId::ROOT);
+ let pat_id = self.alloc_pat_desugared(Pat::Bind { id: binding_id, subpat: None });
+ let expr = self.alloc_expr_desugared(Expr::Path(name.into()));
+ statements.push(Statement::Let {
+ pat: *param,
+ type_ref: None,
+ initializer: Some(expr),
+ else_branch: None,
+ });
+ *param = pat_id;
+ }
+
+ self.alloc_expr_desugared(Expr::Async {
+ id: None,
+ statements: statements.into_boxed_slice(),
+ tail: Some(body),
+ })
+ }
+
+ fn collect(
+ &mut self,
+ params: &mut Vec<PatId>,
+ expr: Option<ast::Expr>,
+ awaitable: Awaitable,
+ ) -> ExprId {
self.awaitable_context.replace(awaitable);
self.with_label_rib(RibKind::Closure, |this| {
- if awaitable == Awaitable::Yes {
- match expr {
- Some(e) => {
- let syntax_ptr = AstPtr::new(&e);
- let expr = this.collect_expr(e);
- this.alloc_expr_desugared_with_ptr(
- Expr::Async { id: None, statements: Box::new([]), tail: Some(expr) },
- syntax_ptr,
- )
- }
- None => this.missing_expr(),
- }
- } else {
- this.collect_expr_opt(expr)
- }
+ let body = this.collect_expr_opt(expr);
+ if awaitable == Awaitable::Yes { this.lower_async_fn(params, body) } else { body }
})
}
@@ -1232,10 +1266,16 @@ impl<'db> ExprCollector<'db> {
Some(RecordLitField { name, expr })
})
.collect();
- let spread = nfl.spread().map(|s| self.collect_expr(s));
+ let spread_expr = nfl.spread().map(|s| self.collect_expr(s));
+ let has_spread_syntax = nfl.dotdot_token().is_some();
+ let spread = match (spread_expr, has_spread_syntax) {
+ (None, false) => RecordSpread::None,
+ (None, true) => RecordSpread::FieldDefaults,
+ (Some(expr), _) => RecordSpread::Expr(expr),
+ };
Expr::RecordLit { path, fields, spread }
} else {
- Expr::RecordLit { path, fields: Box::default(), spread: None }
+ Expr::RecordLit { path, fields: Box::default(), spread: RecordSpread::None }
};
self.alloc_expr(record_lit, syntax_ptr)
@@ -1961,7 +2001,7 @@ impl<'db> ExprCollector<'db> {
}
}
- fn collect_expr_opt(&mut self, expr: Option<ast::Expr>) -> ExprId {
+ pub fn collect_expr_opt(&mut self, expr: Option<ast::Expr>) -> ExprId {
match expr {
Some(expr) => self.collect_expr(expr),
None => self.missing_expr(),
diff --git a/crates/hir-def/src/expr_store/lower/format_args.rs b/crates/hir-def/src/expr_store/lower/format_args.rs
index 7ef84f27f6..51616afb38 100644
--- a/crates/hir-def/src/expr_store/lower/format_args.rs
+++ b/crates/hir-def/src/expr_store/lower/format_args.rs
@@ -10,7 +10,8 @@ use crate::{
builtin_type::BuiltinUint,
expr_store::{HygieneId, lower::ExprCollector, path::Path},
hir::{
- Array, BindingAnnotation, Expr, ExprId, Literal, Pat, RecordLitField, Statement,
+ Array, BindingAnnotation, Expr, ExprId, Literal, Pat, RecordLitField, RecordSpread,
+ Statement,
format_args::{
self, FormatAlignment, FormatArgs, FormatArgsPiece, FormatArgument, FormatArgumentKind,
FormatArgumentsCollector, FormatCount, FormatDebugHex, FormatOptions,
@@ -869,7 +870,7 @@ impl<'db> ExprCollector<'db> {
self.alloc_expr_desugared(Expr::RecordLit {
path: self.lang_path(lang_items.FormatPlaceholder).map(Box::new),
fields: Box::new([position, flags, precision, width]),
- spread: None,
+ spread: RecordSpread::None,
})
} else {
let format_placeholder_new =
diff --git a/crates/hir-def/src/expr_store/pretty.rs b/crates/hir-def/src/expr_store/pretty.rs
index f5ef8e1a35..35f3cd114e 100644
--- a/crates/hir-def/src/expr_store/pretty.rs
+++ b/crates/hir-def/src/expr_store/pretty.rs
@@ -16,7 +16,8 @@ use crate::{
attrs::AttrFlags,
expr_store::path::{GenericArg, GenericArgs},
hir::{
- Array, BindingAnnotation, CaptureBy, ClosureKind, Literal, Movability, Statement,
+ Array, BindingAnnotation, CaptureBy, ClosureKind, Literal, Movability, RecordSpread,
+ Statement,
generics::{GenericParams, WherePredicate},
},
lang_item::LangItemTarget,
@@ -139,7 +140,7 @@ pub fn print_variant_body_hir(db: &dyn DefDatabase, owner: VariantId, edition: E
}
for (_, data) in fields.fields().iter() {
- let FieldData { name, type_ref, visibility, is_unsafe } = data;
+ let FieldData { name, type_ref, visibility, is_unsafe, default_value: _ } = data;
match visibility {
crate::item_tree::RawVisibility::Module(interned, _visibility_explicitness) => {
w!(p, "pub(in {})", interned.display(db, p.edition))
@@ -679,10 +680,17 @@ impl Printer<'_> {
p.print_expr(field.expr);
wln!(p, ",");
}
- if let Some(spread) = spread {
- w!(p, "..");
- p.print_expr(*spread);
- wln!(p);
+ match spread {
+ RecordSpread::None => {}
+ RecordSpread::FieldDefaults => {
+ w!(p, "..");
+ wln!(p);
+ }
+ RecordSpread::Expr(spread_expr) => {
+ w!(p, "..");
+ p.print_expr(*spread_expr);
+ wln!(p);
+ }
}
});
w!(self, "}}");
diff --git a/crates/hir-def/src/expr_store/tests/body.rs b/crates/hir-def/src/expr_store/tests/body.rs
index 504c310684..8f857aeeff 100644
--- a/crates/hir-def/src/expr_store/tests/body.rs
+++ b/crates/hir-def/src/expr_store/tests/body.rs
@@ -659,3 +659,21 @@ fn main() {
}"#]]
.assert_eq(&body.pretty_print(&db, def, Edition::CURRENT))
}
+
+#[test]
+fn async_fn_weird_param_patterns() {
+ let (db, body, def) = lower(
+ r#"
+async fn main(&self, param1: i32, ref mut param2: i32, _: i32, param4 @ _: i32, 123: i32) {}
+"#,
+ );
+
+ expect![[r#"
+ fn main(self, param1, mut param2, mut <ra@gennew>0, param4 @ _, mut <ra@gennew>1) async {
+ let ref mut param2 = param2;
+ let _ = <ra@gennew>0;
+ let 123 = <ra@gennew>1;
+ {}
+ }"#]]
+ .assert_eq(&body.pretty_print(&db, def, Edition::CURRENT))
+}
diff --git a/crates/hir-def/src/hir.rs b/crates/hir-def/src/hir.rs
index 53be0de7d9..7781a8fe54 100644
--- a/crates/hir-def/src/hir.rs
+++ b/crates/hir-def/src/hir.rs
@@ -187,6 +187,13 @@ impl From<ast::LiteralKind> for Literal {
}
}
+#[derive(Debug, Clone, Eq, PartialEq, Copy)]
+pub enum RecordSpread {
+ None,
+ FieldDefaults,
+ Expr(ExprId),
+}
+
#[derive(Debug, Clone, Eq, PartialEq)]
pub enum Expr {
/// This is produced if the syntax tree does not have a required expression piece.
@@ -259,7 +266,7 @@ pub enum Expr {
RecordLit {
path: Option<Box<Path>>,
fields: Box<[RecordLitField]>,
- spread: Option<ExprId>,
+ spread: RecordSpread,
},
Field {
expr: ExprId,
diff --git a/crates/hir-def/src/item_scope.rs b/crates/hir-def/src/item_scope.rs
index a3278dd76c..9e1efb9777 100644
--- a/crates/hir-def/src/item_scope.rs
+++ b/crates/hir-def/src/item_scope.rs
@@ -4,6 +4,7 @@
use std::{fmt, sync::LazyLock};
use base_db::Crate;
+use either::Either;
use hir_expand::{AstId, MacroCallId, attrs::AttrId, name::Name};
use indexmap::map::Entry;
use itertools::Itertools;
@@ -199,7 +200,7 @@ struct DeriveMacroInvocation {
attr_id: AttrId,
/// The `#[derive]` call
attr_call_id: MacroCallId,
- derive_call_ids: SmallVec<[Option<MacroCallId>; 4]>,
+ derive_call_ids: SmallVec<[Option<Either<MacroCallId, BuiltinDeriveImplId>>; 4]>,
}
pub(crate) static BUILTIN_SCOPE: LazyLock<FxIndexMap<Name, PerNs>> = LazyLock::new(|| {
@@ -345,7 +346,9 @@ impl ItemScope {
pub fn all_macro_calls(&self) -> impl Iterator<Item = MacroCallId> + '_ {
self.macro_invocations.values().copied().chain(self.attr_macros.values().copied()).chain(
self.derive_macros.values().flat_map(|it| {
- it.iter().flat_map(|it| it.derive_call_ids.iter().copied().flatten())
+ it.iter().flat_map(|it| {
+ it.derive_call_ids.iter().copied().flatten().flat_map(|it| it.left())
+ })
}),
)
}
@@ -379,6 +382,10 @@ impl ItemScope {
self.types.get(name).map(|item| (item.def, item.vis))
}
+ pub(crate) fn makro(&self, name: &Name) -> Option<MacroId> {
+ self.macros.get(name).map(|item| item.def)
+ }
+
/// XXX: this is O(N) rather than O(1), try to not introduce new usages.
pub(crate) fn name_of(&self, item: ItemInNs) -> Option<(&Name, Visibility, /*declared*/ bool)> {
match item {
@@ -519,7 +526,7 @@ impl ItemScope {
pub(crate) fn set_derive_macro_invoc(
&mut self,
adt: AstId<ast::Adt>,
- call: MacroCallId,
+ call: Either<MacroCallId, BuiltinDeriveImplId>,
id: AttrId,
idx: usize,
) {
@@ -539,7 +546,7 @@ impl ItemScope {
adt: AstId<ast::Adt>,
attr_id: AttrId,
attr_call_id: MacroCallId,
- mut derive_call_ids: SmallVec<[Option<MacroCallId>; 4]>,
+ mut derive_call_ids: SmallVec<[Option<Either<MacroCallId, BuiltinDeriveImplId>>; 4]>,
) {
derive_call_ids.shrink_to_fit();
self.derive_macros.entry(adt).or_default().push(DeriveMacroInvocation {
@@ -554,7 +561,9 @@ impl ItemScope {
) -> impl Iterator<
Item = (
AstId<ast::Adt>,
- impl Iterator<Item = (AttrId, MacroCallId, &[Option<MacroCallId>])>,
+ impl Iterator<
+ Item = (AttrId, MacroCallId, &[Option<Either<MacroCallId, BuiltinDeriveImplId>>]),
+ >,
),
> + '_ {
self.derive_macros.iter().map(|(k, v)| {
diff --git a/crates/hir-def/src/lang_item.rs b/crates/hir-def/src/lang_item.rs
index eba4d87ec9..51dd55301f 100644
--- a/crates/hir-def/src/lang_item.rs
+++ b/crates/hir-def/src/lang_item.rs
@@ -7,8 +7,8 @@ use intern::{Symbol, sym};
use stdx::impl_from;
use crate::{
- AdtId, AssocItemId, AttrDefId, Crate, EnumId, EnumVariantId, FunctionId, ImplId, ModuleDefId,
- StaticId, StructId, TraitId, TypeAliasId, UnionId,
+ AdtId, AssocItemId, AttrDefId, Crate, EnumId, EnumVariantId, FunctionId, ImplId, MacroId,
+ ModuleDefId, StaticId, StructId, TraitId, TypeAliasId, UnionId,
attrs::AttrFlags,
db::DefDatabase,
nameres::{DefMap, assoc::TraitItems, crate_def_map, crate_local_def_map},
@@ -99,7 +99,7 @@ pub fn crate_lang_items(db: &dyn DefDatabase, krate: Crate) -> Option<Box<LangIt
}
if matches!(krate.data(db).origin, base_db::CrateOrigin::Lang(base_db::LangCrateOrigin::Core)) {
- lang_items.fill_non_lang_core_traits(db, crate_def_map);
+ lang_items.fill_non_lang_core_items(db, crate_def_map);
}
if lang_items.is_empty() { None } else { Some(Box::new(lang_items)) }
@@ -169,6 +169,27 @@ fn resolve_core_trait(
Some(trait_)
}
+fn resolve_core_macro(
+ db: &dyn DefDatabase,
+ core_def_map: &DefMap,
+ modules: &[Symbol],
+ name: Symbol,
+) -> Option<MacroId> {
+ let mut current = &core_def_map[core_def_map.root];
+ for module in modules {
+ let Some((ModuleDefId::ModuleId(cur), _)) =
+ current.scope.type_(&Name::new_symbol_root(module.clone()))
+ else {
+ return None;
+ };
+ if cur.krate(db) != core_def_map.krate() || cur.block(db) != core_def_map.block_id() {
+ return None;
+ }
+ current = &core_def_map[cur];
+ }
+ current.scope.makro(&Name::new_symbol_root(name))
+}
+
#[salsa::tracked(returns(as_deref))]
pub(crate) fn crate_notable_traits(db: &dyn DefDatabase, krate: Crate) -> Option<Box<[TraitId]>> {
let mut traits = Vec::new();
@@ -195,7 +216,11 @@ macro_rules! language_item_table {
@non_lang_core_traits:
- $( core::$($non_lang_module:ident)::*, $non_lang_trait:ident; )*
+ $( core::$($non_lang_trait_module:ident)::*, $non_lang_trait:ident; )*
+
+ @non_lang_core_macros:
+
+ $( core::$($non_lang_macro_module:ident)::*, $non_lang_macro:ident, $non_lang_macro_field:ident; )*
) => {
#[allow(non_snake_case)] // FIXME: Should we remove this?
#[derive(Debug, Default, Clone, PartialEq, Eq, Hash)]
@@ -207,6 +232,9 @@ macro_rules! language_item_table {
$(
pub $non_lang_trait: Option<TraitId>,
)*
+ $(
+ pub $non_lang_macro_field: Option<MacroId>,
+ )*
}
impl LangItems {
@@ -218,6 +246,7 @@ macro_rules! language_item_table {
fn merge_prefer_self(&mut self, other: &Self) {
$( self.$lang_item = self.$lang_item.or(other.$lang_item); )*
$( self.$non_lang_trait = self.$non_lang_trait.or(other.$non_lang_trait); )*
+ $( self.$non_lang_macro_field = self.$non_lang_macro_field.or(other.$non_lang_macro_field); )*
}
fn assign_lang_item(&mut self, name: Symbol, target: LangItemTarget) {
@@ -233,8 +262,9 @@ macro_rules! language_item_table {
}
}
- fn fill_non_lang_core_traits(&mut self, db: &dyn DefDatabase, core_def_map: &DefMap) {
- $( self.$non_lang_trait = resolve_core_trait(db, core_def_map, &[ $(sym::$non_lang_module),* ], sym::$non_lang_trait); )*
+ fn fill_non_lang_core_items(&mut self, db: &dyn DefDatabase, core_def_map: &DefMap) {
+ $( self.$non_lang_trait = resolve_core_trait(db, core_def_map, &[ $(sym::$non_lang_trait_module),* ], sym::$non_lang_trait); )*
+ $( self.$non_lang_macro_field = resolve_core_macro(db, core_def_map, &[ $(sym::$non_lang_macro_module),* ], sym::$non_lang_macro); )*
}
}
@@ -469,6 +499,11 @@ language_item_table! { LangItems =>
RangeToInclusive, sym::RangeToInclusive, StructId;
RangeTo, sym::RangeTo, StructId;
+ RangeFromCopy, sym::RangeFromCopy, StructId;
+ RangeInclusiveCopy, sym::RangeInclusiveCopy, StructId;
+ RangeCopy, sym::RangeCopy, StructId;
+ RangeToInclusiveCopy, sym::RangeToInclusiveCopy, StructId;
+
String, sym::String, StructId;
CStr, sym::CStr, StructId;
Ordering, sym::Ordering, EnumId;
@@ -479,4 +514,16 @@ language_item_table! { LangItems =>
core::hash, Hash;
core::cmp, Ord;
core::cmp, Eq;
+
+ @non_lang_core_macros:
+ core::default, Default, DefaultDerive;
+ core::fmt, Debug, DebugDerive;
+ core::hash, Hash, HashDerive;
+ core::cmp, PartialOrd, PartialOrdDerive;
+ core::cmp, Ord, OrdDerive;
+ core::cmp, PartialEq, PartialEqDerive;
+ core::cmp, Eq, EqDerive;
+ core::marker, CoercePointee, CoercePointeeDerive;
+ core::marker, Copy, CopyDerive;
+ core::clone, Clone, CloneDerive;
}
diff --git a/crates/hir-def/src/macro_expansion_tests/mbe/matching.rs b/crates/hir-def/src/macro_expansion_tests/mbe/matching.rs
index e33a366769..bbadcf8794 100644
--- a/crates/hir-def/src/macro_expansion_tests/mbe/matching.rs
+++ b/crates/hir-def/src/macro_expansion_tests/mbe/matching.rs
@@ -237,3 +237,23 @@ fn test() {
"#]],
);
}
+
+#[test]
+fn meta_fat_arrow() {
+ check(
+ r#"
+macro_rules! m {
+ ( $m:meta => ) => {};
+}
+
+m! { foo => }
+ "#,
+ expect![[r#"
+macro_rules! m {
+ ( $m:meta => ) => {};
+}
+
+
+ "#]],
+ );
+}
diff --git a/crates/hir-def/src/nameres.rs b/crates/hir-def/src/nameres.rs
index 5f05cdb1e2..1e3ea50c5a 100644
--- a/crates/hir-def/src/nameres.rs
+++ b/crates/hir-def/src/nameres.rs
@@ -61,6 +61,7 @@ mod tests;
use std::ops::{Deref, DerefMut, Index, IndexMut};
use base_db::Crate;
+use either::Either;
use hir_expand::{
EditionedFileId, ErasedAstId, HirFileId, InFile, MacroCallId, mod_path::ModPath, name::Name,
proc_macro::ProcMacroKind,
@@ -75,8 +76,8 @@ use triomphe::Arc;
use tt::TextRange;
use crate::{
- AstId, BlockId, BlockLoc, ExternCrateId, FunctionId, FxIndexMap, Lookup, MacroCallStyles,
- MacroExpander, MacroId, ModuleId, ModuleIdLt, ProcMacroId, UseId,
+ AstId, BlockId, BlockLoc, BuiltinDeriveImplId, ExternCrateId, FunctionId, FxIndexMap, Lookup,
+ MacroCallStyles, MacroExpander, MacroId, ModuleId, ModuleIdLt, ProcMacroId, UseId,
db::DefDatabase,
item_scope::{BuiltinShadowMode, ItemScope},
item_tree::TreeId,
@@ -192,7 +193,8 @@ pub struct DefMap {
/// Tracks which custom derives are in scope for an item, to allow resolution of derive helper
/// attributes.
// FIXME: Figure out a better way for the IDE layer to resolve these?
- derive_helpers_in_scope: FxHashMap<AstId<ast::Item>, Vec<(Name, MacroId, MacroCallId)>>,
+ derive_helpers_in_scope:
+ FxHashMap<AstId<ast::Item>, Vec<(Name, MacroId, Either<MacroCallId, BuiltinDeriveImplId>)>>,
/// A mapping from [`hir_expand::MacroDefId`] to [`crate::MacroId`].
pub macro_def_to_macro_id: FxHashMap<ErasedAstId, MacroId>,
@@ -214,7 +216,7 @@ struct DefMapCrateData {
registered_tools: Vec<Symbol>,
/// Unstable features of Rust enabled with `#![feature(A, B)]`.
unstable_features: FxHashSet<Symbol>,
- /// #[rustc_coherence_is_core]
+ /// `#[rustc_coherence_is_core]`
rustc_coherence_is_core: bool,
no_core: bool,
no_std: bool,
@@ -540,7 +542,7 @@ impl DefMap {
pub fn derive_helpers_in_scope(
&self,
id: AstId<ast::Adt>,
- ) -> Option<&[(Name, MacroId, MacroCallId)]> {
+ ) -> Option<&[(Name, MacroId, Either<MacroCallId, BuiltinDeriveImplId>)]> {
self.derive_helpers_in_scope.get(&id.map(|it| it.upcast())).map(Deref::deref)
}
diff --git a/crates/hir-def/src/nameres/collector.rs b/crates/hir-def/src/nameres/collector.rs
index 87ade06517..323060f61d 100644
--- a/crates/hir-def/src/nameres/collector.rs
+++ b/crates/hir-def/src/nameres/collector.rs
@@ -3,7 +3,7 @@
//! `DefCollector::collect` contains the fixed-point iteration loop which
//! resolves imports and expands macros.
-use std::{iter, mem};
+use std::{iter, mem, ops::Range};
use base_db::{BuiltDependency, Crate, CrateOrigin, LangCrateOrigin};
use cfg::{CfgAtom, CfgExpr, CfgOptions};
@@ -226,6 +226,7 @@ struct DeferredBuiltinDerive {
container: ItemContainerId,
derive_attr_id: AttrId,
derive_index: u32,
+ helpers_range: Range<usize>,
}
/// Walks the tree of module recursively
@@ -1354,7 +1355,7 @@ impl<'db> DefCollector<'db> {
if let Ok((macro_id, def_id, call_id)) = id {
self.def_map.modules[directive.module_id].scope.set_derive_macro_invoc(
ast_id.ast_id,
- call_id,
+ Either::Left(call_id),
*derive_attr,
*derive_pos,
);
@@ -1369,7 +1370,7 @@ impl<'db> DefCollector<'db> {
.extend(izip!(
helpers.iter().cloned(),
iter::repeat(macro_id),
- iter::repeat(call_id),
+ iter::repeat(Either::Left(call_id)),
));
}
}
@@ -1492,6 +1493,8 @@ impl<'db> DefCollector<'db> {
Interned::new(path),
);
+ derive_call_ids.push(None);
+
// Try to resolve the derive immediately. If we succeed, we can also use the fast path
// for builtin derives. If not, we cannot use it, as it can cause the ADT to become
// interned while the derive is still unresolved, which will cause it to get forgotten.
@@ -1506,23 +1509,42 @@ impl<'db> DefCollector<'db> {
call_id,
);
+ let ast_id_without_path = ast_id.ast_id;
+ let directive = MacroDirective {
+ module_id: directive.module_id,
+ depth: directive.depth + 1,
+ kind: MacroDirectiveKind::Derive {
+ ast_id,
+ derive_attr: *attr_id,
+ derive_pos: idx,
+ ctxt: call_site.ctx,
+ derive_macro_id: call_id,
+ },
+ container: directive.container,
+ };
+
if let Ok((macro_id, def_id, call_id)) = id {
- derive_call_ids.push(Some(call_id));
+ let (mut helpers_start, mut helpers_end) = (0, 0);
// Record its helper attributes.
if def_id.krate != self.def_map.krate {
let def_map = crate_def_map(self.db, def_id.krate);
if let Some(helpers) =
def_map.data.exported_derives.get(&macro_id)
{
- self.def_map
+ let derive_helpers = self
+ .def_map
.derive_helpers_in_scope
- .entry(ast_id.ast_id.map(|it| it.upcast()))
- .or_default()
- .extend(izip!(
- helpers.iter().cloned(),
- iter::repeat(macro_id),
- iter::repeat(call_id),
- ));
+ .entry(
+ ast_id_without_path.map(|it| it.upcast()),
+ )
+ .or_default();
+ helpers_start = derive_helpers.len();
+ derive_helpers.extend(izip!(
+ helpers.iter().cloned(),
+ iter::repeat(macro_id),
+ iter::repeat(Either::Left(call_id)),
+ ));
+ helpers_end = derive_helpers.len();
}
}
@@ -1531,7 +1553,7 @@ impl<'db> DefCollector<'db> {
def_id.kind
{
self.deferred_builtin_derives
- .entry(ast_id.ast_id.upcast())
+ .entry(ast_id_without_path.upcast())
.or_default()
.push(DeferredBuiltinDerive {
call_id,
@@ -1541,24 +1563,15 @@ impl<'db> DefCollector<'db> {
depth: directive.depth,
derive_attr_id: *attr_id,
derive_index: idx as u32,
+ helpers_range: helpers_start..helpers_end,
});
} else {
- push_resolved(&mut resolved, directive, call_id);
+ push_resolved(&mut resolved, &directive, call_id);
+ *derive_call_ids.last_mut().unwrap() =
+ Some(Either::Left(call_id));
}
} else {
- derive_call_ids.push(None);
- self.unresolved_macros.push(MacroDirective {
- module_id: directive.module_id,
- depth: directive.depth + 1,
- kind: MacroDirectiveKind::Derive {
- ast_id,
- derive_attr: *attr_id,
- derive_pos: idx,
- ctxt: call_site.ctx,
- derive_macro_id: call_id,
- },
- container: directive.container,
- });
+ self.unresolved_macros.push(directive);
}
}
@@ -1858,9 +1871,8 @@ impl ModCollector<'_, '_> {
ast_id: FileAstId<ast::Adt>,
id: AdtId,
def_map: &mut DefMap| {
- let Some(deferred_derives) =
- deferred_derives.remove(&InFile::new(file_id, ast_id.upcast()))
- else {
+ let ast_id = InFile::new(file_id, ast_id.upcast());
+ let Some(deferred_derives) = deferred_derives.remove(&ast_id.upcast()) else {
return;
};
let module = &mut def_map.modules[module_id];
@@ -1876,6 +1888,22 @@ impl ModCollector<'_, '_> {
},
);
module.scope.define_builtin_derive_impl(impl_id);
+ module.scope.set_derive_macro_invoc(
+ ast_id,
+ Either::Right(impl_id),
+ deferred_derive.derive_attr_id,
+ deferred_derive.derive_index as usize,
+ );
+ // Change its helper attributes to the new id.
+ if let Some(derive_helpers) =
+ def_map.derive_helpers_in_scope.get_mut(&ast_id.map(|it| it.upcast()))
+ {
+ for (_, _, call_id) in
+ &mut derive_helpers[deferred_derive.helpers_range.clone()]
+ {
+ *call_id = Either::Right(impl_id);
+ }
+ }
});
}
};
diff --git a/crates/hir-def/src/nameres/tests.rs b/crates/hir-def/src/nameres/tests.rs
index 23d60d58f0..fe55252e25 100644
--- a/crates/hir-def/src/nameres/tests.rs
+++ b/crates/hir-def/src/nameres/tests.rs
@@ -1,8 +1,8 @@
mod globs;
+mod imports;
mod incremental;
mod macros;
mod mod_resolution;
-mod primitives;
use base_db::RootQueryDb;
use expect_test::{Expect, expect};
diff --git a/crates/hir-def/src/nameres/tests/imports.rs b/crates/hir-def/src/nameres/tests/imports.rs
new file mode 100644
index 0000000000..b1960b785a
--- /dev/null
+++ b/crates/hir-def/src/nameres/tests/imports.rs
@@ -0,0 +1,63 @@
+use super::*;
+
+#[test]
+fn kw_path_renames() {
+ check(
+ r#"
+macro_rules! m {
+ () => {
+ pub use $crate as dollar_crate;
+ pub use $crate::{self as self_dollar_crate};
+ };
+}
+
+pub use self as this;
+pub use crate as krate;
+
+pub use crate::{self as self_krate};
+m!();
+
+mod foo {
+ pub use super as zuper;
+ pub use super::{self as self_zuper};
+}
+"#,
+ expect![[r#"
+ crate
+ - dollar_crate : type (import)
+ - foo : type
+ - krate : type (import)
+ - self_dollar_crate : type (import)
+ - self_krate : type (import)
+ - this : type (import)
+ - (legacy) m : macro!
+
+ crate::foo
+ - self_zuper : type (import)
+ - zuper : type (import)
+ - (legacy) m : macro!
+ "#]],
+ );
+}
+
+#[test]
+fn primitive_reexport() {
+ check(
+ r#"
+//- /lib.rs
+mod foo;
+use foo::int;
+
+//- /foo.rs
+pub use i32 as int;
+"#,
+ expect![[r#"
+ crate
+ - foo : type
+ - int : type (import)
+
+ crate::foo
+ - int : type (import)
+ "#]],
+ );
+}
diff --git a/crates/hir-def/src/nameres/tests/primitives.rs b/crates/hir-def/src/nameres/tests/primitives.rs
deleted file mode 100644
index 861690238d..0000000000
--- a/crates/hir-def/src/nameres/tests/primitives.rs
+++ /dev/null
@@ -1,23 +0,0 @@
-use super::*;
-
-#[test]
-fn primitive_reexport() {
- check(
- r#"
-//- /lib.rs
-mod foo;
-use foo::int;
-
-//- /foo.rs
-pub use i32 as int;
-"#,
- expect![[r#"
- crate
- - foo : type
- - int : type (import)
-
- crate::foo
- - int : type (import)
- "#]],
- );
-}
diff --git a/crates/hir-def/src/signatures.rs b/crates/hir-def/src/signatures.rs
index 0dd88edbfb..37c8f762fe 100644
--- a/crates/hir-def/src/signatures.rs
+++ b/crates/hir-def/src/signatures.rs
@@ -12,7 +12,7 @@ use intern::{Symbol, sym};
use la_arena::{Arena, Idx};
use rustc_abi::{IntegerType, ReprOptions};
use syntax::{
- NodeOrToken, SyntaxNodePtr, T,
+ AstNode, NodeOrToken, SyntaxNodePtr, T,
ast::{self, HasGenericParams, HasName, HasVisibility, IsString},
};
use thin_vec::ThinVec;
@@ -754,6 +754,7 @@ pub struct FieldData {
pub type_ref: TypeRefId,
pub visibility: RawVisibility,
pub is_unsafe: bool,
+ pub default_value: Option<ExprId>,
}
pub type LocalFieldId = Idx<FieldData>;
@@ -903,7 +904,14 @@ fn lower_fields<Field: ast::HasAttrs + ast::HasVisibility>(
.filter_map(NodeOrToken::into_token)
.any(|token| token.kind() == T![unsafe]);
let name = field_name(idx, &field);
- arena.alloc(FieldData { name, type_ref, visibility, is_unsafe });
+
+ // Check if field has default value (only for record fields)
+ let default_value = ast::RecordField::cast(field.syntax().clone())
+ .and_then(|rf| rf.eq_token().is_some().then_some(rf.expr()))
+ .flatten()
+ .map(|expr| col.collect_expr_opt(Some(expr)));
+
+ arena.alloc(FieldData { name, type_ref, visibility, is_unsafe, default_value });
idx += 1;
}
Err(cfg) => {
diff --git a/crates/hir-expand/src/builtin/attr_macro.rs b/crates/hir-expand/src/builtin/attr_macro.rs
index 06b9b5418e..c94663ca0c 100644
--- a/crates/hir-expand/src/builtin/attr_macro.rs
+++ b/crates/hir-expand/src/builtin/attr_macro.rs
@@ -115,7 +115,7 @@ fn dummy_gate_test_expand(
/// wasting a lot of memory, and it would also require some way to use a path in a way that makes it
/// always resolve as a derive without nameres recollecting them.
/// So this hacky approach is a lot more friendly for us, though it does require a bit of support in
-/// [`hir::Semantics`] to make this work.
+/// hir::Semantics to make this work.
fn derive_expand(
db: &dyn ExpandDatabase,
id: MacroCallId,
diff --git a/crates/hir-expand/src/cfg_process.rs b/crates/hir-expand/src/cfg_process.rs
index a0de36548e..ccef9168ac 100644
--- a/crates/hir-expand/src/cfg_process.rs
+++ b/crates/hir-expand/src/cfg_process.rs
@@ -1,4 +1,4 @@
-//! Processes out #[cfg] and #[cfg_attr] attributes from the input for the derive macro
+//! Processes out `#[cfg]` and `#[cfg_attr]` attributes from the input for the derive macro
use std::{cell::OnceCell, ops::ControlFlow};
use ::tt::TextRange;
diff --git a/crates/hir-expand/src/declarative.rs b/crates/hir-expand/src/declarative.rs
index d10e122a5d..1726412275 100644
--- a/crates/hir-expand/src/declarative.rs
+++ b/crates/hir-expand/src/declarative.rs
@@ -101,7 +101,9 @@ impl DeclarativeMacroExpander {
match &*value {
"transparent" => ControlFlow::Break(Transparency::Transparent),
// "semitransparent" is for old rustc versions.
- "semiopaque" | "semitransparent" => ControlFlow::Break(Transparency::SemiOpaque),
+ "semiopaque" | "semitransparent" => {
+ ControlFlow::Break(Transparency::SemiOpaque)
+ }
"opaque" => ControlFlow::Break(Transparency::Opaque),
_ => ControlFlow::Continue(()),
}
diff --git a/crates/hir-expand/src/mod_path.rs b/crates/hir-expand/src/mod_path.rs
index 1712c28aa8..78228cf82e 100644
--- a/crates/hir-expand/src/mod_path.rs
+++ b/crates/hir-expand/src/mod_path.rs
@@ -423,6 +423,10 @@ macro_rules! __known_path {
(core::ops::RangeTo) => {};
(core::ops::RangeToInclusive) => {};
(core::ops::RangeInclusive) => {};
+ (core::range::Range) => {};
+ (core::range::RangeFrom) => {};
+ (core::range::RangeInclusive) => {};
+ (core::range::RangeToInclusive) => {};
(core::future::Future) => {};
(core::future::IntoFuture) => {};
(core::fmt::Debug) => {};
diff --git a/crates/hir-ty/src/diagnostics/expr.rs b/crates/hir-ty/src/diagnostics/expr.rs
index dd1fc3b36e..4e1bb6f4c5 100644
--- a/crates/hir-ty/src/diagnostics/expr.rs
+++ b/crates/hir-ty/src/diagnostics/expr.rs
@@ -41,7 +41,7 @@ use crate::{
pub(crate) use hir_def::{
LocalFieldId, VariantId,
expr_store::Body,
- hir::{Expr, ExprId, MatchArm, Pat, PatId, Statement},
+ hir::{Expr, ExprId, MatchArm, Pat, PatId, RecordSpread, Statement},
};
pub enum BodyValidationDiagnostic {
@@ -123,7 +123,7 @@ impl<'db> ExprValidator<'db> {
}
for (id, expr) in body.exprs() {
- if let Some((variant, missed_fields, true)) =
+ if let Some((variant, missed_fields)) =
record_literal_missing_fields(db, self.infer, id, expr)
{
self.diagnostics.push(BodyValidationDiagnostic::RecordMissingFields {
@@ -154,7 +154,7 @@ impl<'db> ExprValidator<'db> {
}
for (id, pat) in body.pats() {
- if let Some((variant, missed_fields, true)) =
+ if let Some((variant, missed_fields)) =
record_pattern_missing_fields(db, self.infer, id, pat)
{
self.diagnostics.push(BodyValidationDiagnostic::RecordMissingFields {
@@ -557,9 +557,9 @@ pub fn record_literal_missing_fields(
infer: &InferenceResult,
id: ExprId,
expr: &Expr,
-) -> Option<(VariantId, Vec<LocalFieldId>, /*exhaustive*/ bool)> {
- let (fields, exhaustive) = match expr {
- Expr::RecordLit { fields, spread, .. } => (fields, spread.is_none()),
+) -> Option<(VariantId, Vec<LocalFieldId>)> {
+ let (fields, spread) = match expr {
+ Expr::RecordLit { fields, spread, .. } => (fields, spread),
_ => return None,
};
@@ -571,15 +571,28 @@ pub fn record_literal_missing_fields(
let variant_data = variant_def.fields(db);
let specified_fields: FxHashSet<_> = fields.iter().map(|f| &f.name).collect();
+ // don't show missing fields if:
+ // - has ..expr
+ // - or has default value + ..
+ // - or already in code
let missed_fields: Vec<LocalFieldId> = variant_data
.fields()
.iter()
- .filter_map(|(f, d)| if specified_fields.contains(&d.name) { None } else { Some(f) })
+ .filter_map(|(f, d)| {
+ if specified_fields.contains(&d.name)
+ || matches!(spread, RecordSpread::Expr(_))
+ || (d.default_value.is_some() && matches!(spread, RecordSpread::FieldDefaults))
+ {
+ None
+ } else {
+ Some(f)
+ }
+ })
.collect();
if missed_fields.is_empty() {
return None;
}
- Some((variant_def, missed_fields, exhaustive))
+ Some((variant_def, missed_fields))
}
pub fn record_pattern_missing_fields(
@@ -587,9 +600,9 @@ pub fn record_pattern_missing_fields(
infer: &InferenceResult,
id: PatId,
pat: &Pat,
-) -> Option<(VariantId, Vec<LocalFieldId>, /*exhaustive*/ bool)> {
- let (fields, exhaustive) = match pat {
- Pat::Record { path: _, args, ellipsis } => (args, !ellipsis),
+) -> Option<(VariantId, Vec<LocalFieldId>)> {
+ let (fields, ellipsis) = match pat {
+ Pat::Record { path: _, args, ellipsis } => (args, *ellipsis),
_ => return None,
};
@@ -601,15 +614,22 @@ pub fn record_pattern_missing_fields(
let variant_data = variant_def.fields(db);
let specified_fields: FxHashSet<_> = fields.iter().map(|f| &f.name).collect();
+ // don't show missing fields if:
+ // - in code
+ // - or has ..
let missed_fields: Vec<LocalFieldId> = variant_data
.fields()
.iter()
- .filter_map(|(f, d)| if specified_fields.contains(&d.name) { None } else { Some(f) })
+ .filter_map(
+ |(f, d)| {
+ if specified_fields.contains(&d.name) || ellipsis { None } else { Some(f) }
+ },
+ )
.collect();
if missed_fields.is_empty() {
return None;
}
- Some((variant_def, missed_fields, exhaustive))
+ Some((variant_def, missed_fields))
}
fn types_of_subpatterns_do_match(pat: PatId, body: &Body, infer: &InferenceResult) -> bool {
diff --git a/crates/hir-ty/src/infer.rs b/crates/hir-ty/src/infer.rs
index d527a4ae29..35d744e7d1 100644
--- a/crates/hir-ty/src/infer.rs
+++ b/crates/hir-ty/src/infer.rs
@@ -1815,18 +1815,34 @@ impl<'body, 'db> InferenceContext<'body, 'db> {
Some(struct_.into())
}
+ fn has_new_range_feature(&self) -> bool {
+ self.resolver.top_level_def_map().is_unstable_feature_enabled(&sym::new_range)
+ }
+
fn resolve_range(&self) -> Option<AdtId> {
- let struct_ = self.lang_items.Range?;
+ let struct_ = if self.has_new_range_feature() {
+ self.lang_items.RangeCopy?
+ } else {
+ self.lang_items.Range?
+ };
Some(struct_.into())
}
fn resolve_range_inclusive(&self) -> Option<AdtId> {
- let struct_ = self.lang_items.RangeInclusiveStruct?;
+ let struct_ = if self.has_new_range_feature() {
+ self.lang_items.RangeInclusiveCopy?
+ } else {
+ self.lang_items.RangeInclusiveStruct?
+ };
Some(struct_.into())
}
fn resolve_range_from(&self) -> Option<AdtId> {
- let struct_ = self.lang_items.RangeFrom?;
+ let struct_ = if self.has_new_range_feature() {
+ self.lang_items.RangeFromCopy?
+ } else {
+ self.lang_items.RangeFrom?
+ };
Some(struct_.into())
}
@@ -1836,7 +1852,11 @@ impl<'body, 'db> InferenceContext<'body, 'db> {
}
fn resolve_range_to_inclusive(&self) -> Option<AdtId> {
- let struct_ = self.lang_items.RangeToInclusive?;
+ let struct_ = if self.has_new_range_feature() {
+ self.lang_items.RangeToInclusiveCopy?
+ } else {
+ self.lang_items.RangeToInclusive?
+ };
Some(struct_.into())
}
diff --git a/crates/hir-ty/src/infer/cast.rs b/crates/hir-ty/src/infer/cast.rs
index d073b06ccc..d69b00adb7 100644
--- a/crates/hir-ty/src/infer/cast.rs
+++ b/crates/hir-ty/src/infer/cast.rs
@@ -2,8 +2,10 @@
use hir_def::{AdtId, hir::ExprId, signatures::TraitFlags};
use rustc_ast_ir::Mutability;
+use rustc_hash::FxHashSet;
use rustc_type_ir::{
- Flags, InferTy, TypeFlags, UintTy,
+ InferTy, TypeVisitableExt, UintTy, elaborate,
+ error::TypeError,
inherent::{AdtDef, BoundExistentialPredicates as _, IntoKind, Ty as _},
};
use stdx::never;
@@ -12,7 +14,10 @@ use crate::{
InferenceDiagnostic,
db::HirDatabase,
infer::{AllowTwoPhase, InferenceContext, expr::ExprIsRead},
- next_solver::{BoundExistentialPredicates, DbInterner, ParamTy, Ty, TyKind},
+ next_solver::{
+ BoundExistentialPredicates, ExistentialPredicate, ParamTy, Region, Ty, TyKind,
+ infer::traits::ObligationCause,
+ },
};
#[derive(Debug)]
@@ -66,12 +71,13 @@ pub enum CastError {
DifferingKinds,
SizedUnsizedCast,
IllegalCast,
- IntToFatCast,
+ IntToWideCast,
NeedDeref,
NeedViaPtr,
NeedViaThinPtr,
NeedViaInt,
NonScalar,
+ PtrPtrAddingAutoTraits,
// We don't want to report errors with unknown types currently.
// UnknownCastPtrKind,
// UnknownExprPtrKind,
@@ -137,22 +143,13 @@ impl<'db> CastCheck<'db> {
return Ok(());
}
- if !self.cast_ty.flags().contains(TypeFlags::HAS_TY_INFER)
- && !ctx.table.is_sized(self.cast_ty)
- {
+ if !self.cast_ty.has_infer_types() && !ctx.table.is_sized(self.cast_ty) {
return Err(InferenceDiagnostic::CastToUnsized {
expr: self.expr,
cast_ty: self.cast_ty.store(),
});
}
- // Chalk doesn't support trait upcasting and fails to solve some obvious goals
- // when the trait environment contains some recursive traits (See issue #18047)
- // We skip cast checks for such cases for now, until the next-gen solver.
- if contains_dyn_trait(self.cast_ty) {
- return Ok(());
- }
-
self.do_check(ctx).map_err(|e| e.into_diagnostic(self.expr, self.expr_ty, self.cast_ty))
}
@@ -162,22 +159,23 @@ impl<'db> CastCheck<'db> {
(Some(t_from), Some(t_cast)) => (t_from, t_cast),
(None, Some(t_cast)) => match self.expr_ty.kind() {
TyKind::FnDef(..) => {
- let sig =
- self.expr_ty.callable_sig(ctx.interner()).expect("FnDef had no sig");
- let sig = ctx.table.normalize_associated_types_in(sig);
+ // rustc calls `FnCtxt::normalize` on this but it's a no-op in next-solver
+ let sig = self.expr_ty.fn_sig(ctx.interner());
let fn_ptr = Ty::new_fn_ptr(ctx.interner(), sig);
- if ctx
- .coerce(
- self.source_expr.into(),
- self.expr_ty,
- fn_ptr,
- AllowTwoPhase::No,
- ExprIsRead::Yes,
- )
- .is_ok()
- {
- } else {
- return Err(CastError::IllegalCast);
+ match ctx.coerce(
+ self.source_expr.into(),
+ self.expr_ty,
+ fn_ptr,
+ AllowTwoPhase::No,
+ ExprIsRead::Yes,
+ ) {
+ Ok(_) => {}
+ Err(TypeError::IntrinsicCast) => {
+ return Err(CastError::IllegalCast);
+ }
+ Err(_) => {
+ return Err(CastError::NonScalar);
+ }
}
(CastTy::FnPtr, t_cast)
@@ -213,23 +211,41 @@ impl<'db> CastCheck<'db> {
// rustc checks whether the `expr_ty` is foreign adt with `non_exhaustive` sym
match (t_from, t_cast) {
+ // These types have invariants! can't cast into them.
(_, CastTy::Int(Int::CEnum) | CastTy::FnPtr) => Err(CastError::NonScalar),
+
+ // * -> Bool
(_, CastTy::Int(Int::Bool)) => Err(CastError::CastToBool),
- (CastTy::Int(Int::U(UintTy::U8)), CastTy::Int(Int::Char)) => Ok(()),
+
+ // * -> Char
+ (CastTy::Int(Int::U(UintTy::U8)), CastTy::Int(Int::Char)) => Ok(()), // u8-char-cast
(_, CastTy::Int(Int::Char)) => Err(CastError::CastToChar),
+
+ // prim -> float,ptr
(CastTy::Int(Int::Bool | Int::CEnum | Int::Char), CastTy::Float) => {
Err(CastError::NeedViaInt)
}
+
(CastTy::Int(Int::Bool | Int::CEnum | Int::Char) | CastTy::Float, CastTy::Ptr(..))
| (CastTy::Ptr(..) | CastTy::FnPtr, CastTy::Float) => Err(CastError::IllegalCast),
- (CastTy::Ptr(src, _), CastTy::Ptr(dst, _)) => self.check_ptr_ptr_cast(ctx, src, dst),
+
+ // ptr -> ptr
+ (CastTy::Ptr(src, _), CastTy::Ptr(dst, _)) => self.check_ptr_ptr_cast(ctx, src, dst), // ptr-ptr-cast
+
+ // // ptr-addr-cast
(CastTy::Ptr(src, _), CastTy::Int(_)) => self.check_ptr_addr_cast(ctx, src),
+ (CastTy::FnPtr, CastTy::Int(_)) => Ok(()),
+
+ // addr-ptr-cast
(CastTy::Int(_), CastTy::Ptr(dst, _)) => self.check_addr_ptr_cast(ctx, dst),
+
+ // fn-ptr-cast
(CastTy::FnPtr, CastTy::Ptr(dst, _)) => self.check_fptr_ptr_cast(ctx, dst),
+
+ // prim -> prim
(CastTy::Int(Int::CEnum), CastTy::Int(_)) => Ok(()),
(CastTy::Int(Int::Char | Int::Bool), CastTy::Int(_)) => Ok(()),
(CastTy::Int(_) | CastTy::Float, CastTy::Int(_) | CastTy::Float) => Ok(()),
- (CastTy::FnPtr, CastTy::Int(_)) => Ok(()),
}
}
@@ -241,10 +257,16 @@ impl<'db> CastCheck<'db> {
t_cast: Ty<'db>,
m_cast: Mutability,
) -> Result<(), CastError> {
- // Mutability order is opposite to rustc. `Mut < Not`
- if m_expr <= m_cast
+ let t_expr = ctx.table.try_structurally_resolve_type(t_expr);
+ let t_cast = ctx.table.try_structurally_resolve_type(t_cast);
+
+ if m_expr >= m_cast
&& let TyKind::Array(ety, _) = t_expr.kind()
+ && ctx.infcx().can_eq(ctx.table.param_env, ety, t_cast)
{
+ // Due to historical reasons we allow directly casting references of
+ // arrays into raw pointers of their element type.
+
// Coerce to a raw pointer so that we generate RawPtr in MIR.
let array_ptr_type = Ty::new_ptr(ctx.interner(), t_expr, m_expr);
if ctx
@@ -265,14 +287,9 @@ impl<'db> CastCheck<'db> {
);
}
- // This is a less strict condition than rustc's `demand_eqtype`,
- // but false negative is better than false positive
- if ctx
- .coerce(self.source_expr.into(), ety, t_cast, AllowTwoPhase::No, ExprIsRead::Yes)
- .is_ok()
- {
- return Ok(());
- }
+ // this will report a type mismatch if needed
+ let _ = ctx.demand_eqtype(self.expr.into(), ety, t_cast);
+ return Ok(());
}
Err(CastError::IllegalCast)
@@ -289,30 +306,147 @@ impl<'db> CastCheck<'db> {
match (src_kind, dst_kind) {
(Some(PointerKind::Error), _) | (_, Some(PointerKind::Error)) => Ok(()),
+
// (_, None) => Err(CastError::UnknownCastPtrKind),
// (None, _) => Err(CastError::UnknownExprPtrKind),
(_, None) | (None, _) => Ok(()),
+
+ // Cast to thin pointer is OK
(_, Some(PointerKind::Thin)) => Ok(()),
+
+ // thin -> fat? report invalid cast (don't complain about vtable kinds)
(Some(PointerKind::Thin), _) => Err(CastError::SizedUnsizedCast),
+
+ // trait object -> trait object? need to do additional checks
(Some(PointerKind::VTable(src_tty)), Some(PointerKind::VTable(dst_tty))) => {
match (src_tty.principal_def_id(), dst_tty.principal_def_id()) {
+ // A<dyn Src<...> + SrcAuto> -> B<dyn Dst<...> + DstAuto>. need to make sure
+ // - `Src` and `Dst` traits are the same
+ // - traits have the same generic arguments
+ // - projections are the same
+ // - `SrcAuto` (+auto traits implied by `Src`) is a superset of `DstAuto`
+ //
+ // Note that trait upcasting goes through a different mechanism (`coerce_unsized`)
+ // and is unaffected by this check.
(Some(src_principal), Some(dst_principal)) => {
if src_principal == dst_principal {
return Ok(());
}
- let src_principal = ctx.db.trait_signature(src_principal.0);
- let dst_principal = ctx.db.trait_signature(dst_principal.0);
- if src_principal.flags.contains(TraitFlags::AUTO)
- && dst_principal.flags.contains(TraitFlags::AUTO)
+
+ // We need to reconstruct trait object types.
+ // `m_src` and `m_dst` won't work for us here because they will potentially
+ // contain wrappers, which we do not care about.
+ //
+ // e.g. we want to allow `dyn T -> (dyn T,)`, etc.
+ //
+ // We also need to skip auto traits to emit an FCW and not an error.
+ let src_obj = Ty::new_dynamic(
+ ctx.interner(),
+ BoundExistentialPredicates::new_from_iter(
+ ctx.interner(),
+ src_tty.iter().filter(|pred| {
+ !matches!(
+ pred.skip_binder(),
+ ExistentialPredicate::AutoTrait(_)
+ )
+ }),
+ ),
+ Region::new_erased(ctx.interner()),
+ );
+ let dst_obj = Ty::new_dynamic(
+ ctx.interner(),
+ BoundExistentialPredicates::new_from_iter(
+ ctx.interner(),
+ dst_tty.iter().filter(|pred| {
+ !matches!(
+ pred.skip_binder(),
+ ExistentialPredicate::AutoTrait(_)
+ )
+ }),
+ ),
+ Region::new_erased(ctx.interner()),
+ );
+
+ // `dyn Src = dyn Dst`, this checks for matching traits/generics/projections
+ // This is `fcx.demand_eqtype`, but inlined to give a better error.
+ if ctx
+ .table
+ .at(&ObligationCause::dummy())
+ .eq(src_obj, dst_obj)
+ .map(|infer_ok| ctx.table.register_infer_ok(infer_ok))
+ .is_err()
{
- Ok(())
- } else {
- Err(CastError::DifferingKinds)
+ return Err(CastError::DifferingKinds);
}
+
+ // Check that `SrcAuto` (+auto traits implied by `Src`) is a superset of `DstAuto`.
+ // Emit an FCW otherwise.
+ let src_auto: FxHashSet<_> = src_tty
+ .auto_traits()
+ .into_iter()
+ .chain(
+ elaborate::supertrait_def_ids(ctx.interner(), src_principal)
+ .filter(|trait_| {
+ ctx.db
+ .trait_signature(trait_.0)
+ .flags
+ .contains(TraitFlags::AUTO)
+ }),
+ )
+ .collect();
+
+ let added = dst_tty
+ .auto_traits()
+ .into_iter()
+ .any(|trait_| !src_auto.contains(&trait_));
+
+ if added {
+ return Err(CastError::PtrPtrAddingAutoTraits);
+ }
+
+ Ok(())
}
- _ => Err(CastError::Unknown),
+
+ // dyn Auto -> dyn Auto'? ok.
+ (None, None) => Ok(()),
+
+ // dyn Trait -> dyn Auto? not ok (for now).
+ //
+ // Although dropping the principal is already allowed for unsizing coercions
+ // (e.g. `*const (dyn Trait + Auto)` to `*const dyn Auto`), dropping it is
+ // currently **NOT** allowed for (non-coercion) ptr-to-ptr casts (e.g
+ // `*const Foo` to `*const Bar` where `Foo` has a `dyn Trait + Auto` tail
+ // and `Bar` has a `dyn Auto` tail), because the underlying MIR operations
+ // currently work very differently:
+ //
+ // * A MIR unsizing coercion on raw pointers to trait objects (`*const dyn Src`
+ // to `*const dyn Dst`) is currently equivalent to downcasting the source to
+ // the concrete sized type that it was originally unsized from first (via a
+ // ptr-to-ptr cast from `*const Src` to `*const T` with `T: Sized`) and then
+ // unsizing this thin pointer to the target type (unsizing `*const T` to
+ // `*const Dst`). In particular, this means that the pointer's metadata
+ // (vtable) will semantically change, e.g. for const eval and miri, even
+ // though the vtables will always be merged for codegen.
+ //
+ // * A MIR ptr-to-ptr cast is currently equivalent to a transmute and does not
+ // change the pointer metadata (vtable) at all.
+ //
+ // In addition to this potentially surprising difference between coercion and
+ // non-coercion casts, casting away the principal with a MIR ptr-to-ptr cast
+ // is currently considered undefined behavior:
+ //
+ // As a validity invariant of pointers to trait objects, we currently require
+ // that the principal of the vtable in the pointer metadata exactly matches
+ // the principal of the pointee type, where "no principal" is also considered
+ // a kind of principal.
+ (Some(_), None) => Err(CastError::DifferingKinds),
+
+ // dyn Auto -> dyn Trait? not ok.
+ (None, Some(_)) => Err(CastError::DifferingKinds),
}
}
+
+ // fat -> fat? metadata kinds must match
(Some(src_kind), Some(dst_kind)) if src_kind == dst_kind => Ok(()),
(_, _) => Err(CastError::DifferingKinds),
}
@@ -342,9 +476,9 @@ impl<'db> CastCheck<'db> {
None => Ok(()),
Some(PointerKind::Error) => Ok(()),
Some(PointerKind::Thin) => Ok(()),
- Some(PointerKind::VTable(_)) => Err(CastError::IntToFatCast),
- Some(PointerKind::Length) => Err(CastError::IntToFatCast),
- Some(PointerKind::OfAlias | PointerKind::OfParam(_)) => Err(CastError::IntToFatCast),
+ Some(PointerKind::VTable(_)) => Err(CastError::IntToWideCast),
+ Some(PointerKind::Length) => Err(CastError::IntToWideCast),
+ Some(PointerKind::OfAlias | PointerKind::OfParam(_)) => Err(CastError::IntToWideCast),
}
}
@@ -363,15 +497,20 @@ impl<'db> CastCheck<'db> {
}
}
+/// The kind of pointer and associated metadata (thin, length or vtable) - we
+/// only allow casts between wide pointers if their metadata have the same
+/// kind.
#[derive(Debug, PartialEq, Eq)]
enum PointerKind<'db> {
- // thin pointer
+ /// No metadata attached, ie pointer to sized type or foreign type
Thin,
- // trait object
+ /// A trait object
VTable(BoundExistentialPredicates<'db>),
- // slice
+ /// Slice
Length,
+ /// The unsize info of this projection or opaque type
OfAlias,
+ /// The unsize info of this parameter
OfParam(ParamTy),
Error,
}
@@ -439,24 +578,3 @@ fn pointer_kind<'db>(
}
}
}
-
-fn contains_dyn_trait<'db>(ty: Ty<'db>) -> bool {
- use std::ops::ControlFlow;
-
- use rustc_type_ir::{TypeSuperVisitable, TypeVisitable, TypeVisitor};
-
- struct DynTraitVisitor;
-
- impl<'db> TypeVisitor<DbInterner<'db>> for DynTraitVisitor {
- type Result = ControlFlow<()>;
-
- fn visit_ty(&mut self, ty: Ty<'db>) -> ControlFlow<()> {
- match ty.kind() {
- TyKind::Dynamic(..) => ControlFlow::Break(()),
- _ => ty.super_visit_with(self),
- }
- }
- }
-
- ty.visit_with(&mut DynTraitVisitor).is_break()
-}
diff --git a/crates/hir-ty/src/infer/closure.rs b/crates/hir-ty/src/infer/closure.rs
index d1391ad24e..ce99016470 100644
--- a/crates/hir-ty/src/infer/closure.rs
+++ b/crates/hir-ty/src/infer/closure.rs
@@ -466,7 +466,7 @@ impl<'db> InferenceContext<'_, 'db> {
}
/// Given an `FnOnce::Output` or `AsyncFn::Output` projection, extract the args
- /// and return type to infer a [`ty::PolyFnSig`] for the closure.
+ /// and return type to infer a `PolyFnSig` for the closure.
fn extract_sig_from_projection(
&self,
projection: PolyProjectionPredicate<'db>,
diff --git a/crates/hir-ty/src/infer/closure/analysis.rs b/crates/hir-ty/src/infer/closure/analysis.rs
index b25901cc3b..5a3eba1a71 100644
--- a/crates/hir-ty/src/infer/closure/analysis.rs
+++ b/crates/hir-ty/src/infer/closure/analysis.rs
@@ -8,7 +8,7 @@ use hir_def::{
expr_store::path::Path,
hir::{
Array, AsmOperand, BinaryOp, BindingId, CaptureBy, Expr, ExprId, ExprOrPatId, Pat, PatId,
- Statement, UnaryOp,
+ RecordSpread, Statement, UnaryOp,
},
item_tree::FieldsShape,
resolver::ValueNs,
@@ -627,7 +627,7 @@ impl<'db> InferenceContext<'_, 'db> {
self.consume_expr(expr);
}
Expr::RecordLit { fields, spread, .. } => {
- if let &Some(expr) = spread {
+ if let RecordSpread::Expr(expr) = *spread {
self.consume_expr(expr);
}
self.consume_exprs(fields.iter().map(|it| it.expr));
diff --git a/crates/hir-ty/src/infer/expr.rs b/crates/hir-ty/src/infer/expr.rs
index 226e9f5cd6..9f2d9d25b9 100644
--- a/crates/hir-ty/src/infer/expr.rs
+++ b/crates/hir-ty/src/infer/expr.rs
@@ -8,7 +8,7 @@ use hir_def::{
expr_store::path::{GenericArgs as HirGenericArgs, Path},
hir::{
Array, AsmOperand, AsmOptions, BinaryOp, BindingAnnotation, Expr, ExprId, ExprOrPatId,
- LabelId, Literal, Pat, PatId, Statement, UnaryOp,
+ InlineAsmKind, LabelId, Literal, Pat, PatId, RecordSpread, Statement, UnaryOp,
},
resolver::ValueNs,
};
@@ -657,8 +657,8 @@ impl<'db> InferenceContext<'_, 'db> {
}
}
}
- if let Some(expr) = spread {
- self.infer_expr(*expr, &Expectation::has_type(ty), ExprIsRead::Yes);
+ if let RecordSpread::Expr(expr) = *spread {
+ self.infer_expr(expr, &Expectation::has_type(ty), ExprIsRead::Yes);
}
ty
}
@@ -1037,7 +1037,11 @@ impl<'db> InferenceContext<'_, 'db> {
// FIXME: `sym` should report for things that are not functions or statics.
AsmOperand::Sym(_) => (),
});
- if diverge { self.types.types.never } else { self.types.types.unit }
+ if diverge || asm.kind == InlineAsmKind::NakedAsm {
+ self.types.types.never
+ } else {
+ self.types.types.unit
+ }
}
};
// use a new type variable if we got unknown here
@@ -1704,7 +1708,7 @@ impl<'db> InferenceContext<'_, 'db> {
});
match resolved {
Ok((func, _is_visible)) => {
- self.check_method_call(tgt_expr, &[], func.sig, receiver_ty, expected)
+ self.check_method_call(tgt_expr, &[], func.sig, expected)
}
Err(_) => self.err_ty(),
}
@@ -1844,7 +1848,7 @@ impl<'db> InferenceContext<'_, 'db> {
item: func.def_id.into(),
})
}
- self.check_method_call(tgt_expr, args, func.sig, receiver_ty, expected)
+ self.check_method_call(tgt_expr, args, func.sig, expected)
}
// Failed to resolve, report diagnostic and try to resolve as call to field access or
// assoc function
@@ -1934,16 +1938,14 @@ impl<'db> InferenceContext<'_, 'db> {
tgt_expr: ExprId,
args: &[ExprId],
sig: FnSig<'db>,
- receiver_ty: Ty<'db>,
expected: &Expectation<'db>,
) -> Ty<'db> {
- let (formal_receiver_ty, param_tys) = if !sig.inputs_and_output.inputs().is_empty() {
- (sig.inputs_and_output.as_slice()[0], &sig.inputs_and_output.inputs()[1..])
+ let param_tys = if !sig.inputs_and_output.inputs().is_empty() {
+ &sig.inputs_and_output.inputs()[1..]
} else {
- (self.types.types.error, &[] as _)
+ &[]
};
let ret_ty = sig.output();
- self.table.unify(formal_receiver_ty, receiver_ty);
self.check_call_arguments(tgt_expr, param_tys, ret_ty, expected, args, &[], sig.c_variadic);
ret_ty
diff --git a/crates/hir-ty/src/infer/mutability.rs b/crates/hir-ty/src/infer/mutability.rs
index 729ed214da..45fa141b6d 100644
--- a/crates/hir-ty/src/infer/mutability.rs
+++ b/crates/hir-ty/src/infer/mutability.rs
@@ -2,7 +2,8 @@
//! between `Deref` and `DerefMut` or `Index` and `IndexMut` or similar.
use hir_def::hir::{
- Array, AsmOperand, BinaryOp, BindingAnnotation, Expr, ExprId, Pat, PatId, Statement, UnaryOp,
+ Array, AsmOperand, BinaryOp, BindingAnnotation, Expr, ExprId, Pat, PatId, RecordSpread,
+ Statement, UnaryOp,
};
use rustc_ast_ir::Mutability;
@@ -132,8 +133,11 @@ impl<'db> InferenceContext<'_, 'db> {
Expr::Become { expr } => {
self.infer_mut_expr(*expr, Mutability::Not);
}
- Expr::RecordLit { path: _, fields, spread } => {
- self.infer_mut_not_expr_iter(fields.iter().map(|it| it.expr).chain(*spread))
+ Expr::RecordLit { path: _, fields, spread, .. } => {
+ self.infer_mut_not_expr_iter(fields.iter().map(|it| it.expr));
+ if let RecordSpread::Expr(expr) = *spread {
+ self.infer_mut_expr(expr, Mutability::Not);
+ }
}
&Expr::Index { base, index } => {
if mutability == Mutability::Mut {
diff --git a/crates/hir-ty/src/infer/path.rs b/crates/hir-ty/src/infer/path.rs
index b11650bbcd..ef1a610a32 100644
--- a/crates/hir-ty/src/infer/path.rs
+++ b/crates/hir-ty/src/infer/path.rs
@@ -93,6 +93,7 @@ impl<'db> InferenceContext<'_, 'db> {
if let GenericDefId::StaticId(_) = generic_def {
// `Static` is the kind of item that can never be generic currently. We can just skip the binders to get its type.
let ty = self.db.value_ty(value_def)?.skip_binder();
+ let ty = self.process_remote_user_written_ty(ty);
return Some(ValuePathResolution::NonGeneric(ty));
};
diff --git a/crates/hir-ty/src/infer/unify.rs b/crates/hir-ty/src/infer/unify.rs
index d55fc0ab0d..2057159c46 100644
--- a/crates/hir-ty/src/infer/unify.rs
+++ b/crates/hir-ty/src/infer/unify.rs
@@ -261,16 +261,6 @@ impl<'db> InferenceTable<'db> {
self.infer_ctxt.canonicalize_response(t)
}
- // FIXME: We should get rid of this method. We cannot deeply normalize during inference, only when finishing.
- // Inference should use shallow normalization (`try_structurally_resolve_type()`) only, when needed.
- pub(crate) fn normalize_associated_types_in<T>(&mut self, ty: T) -> T
- where
- T: TypeFoldable<DbInterner<'db>> + Clone,
- {
- let ty = self.resolve_vars_with_obligations(ty);
- self.at(&ObligationCause::new()).deeply_normalize(ty.clone()).unwrap_or(ty)
- }
-
pub(crate) fn normalize_alias_ty(&mut self, alias: Ty<'db>) -> Ty<'db> {
self.infer_ctxt
.at(&ObligationCause::new(), self.param_env)
diff --git a/crates/hir-ty/src/lower.rs b/crates/hir-ty/src/lower.rs
index 8e1ea9c478..5789bf02a4 100644
--- a/crates/hir-ty/src/lower.rs
+++ b/crates/hir-ty/src/lower.rs
@@ -27,8 +27,8 @@ use hir_def::{
resolver::{HasResolver, LifetimeNs, Resolver, TypeNs, ValueNs},
signatures::{FunctionSignature, TraitFlags, TypeAliasFlags},
type_ref::{
- ConstRef, LifetimeRefId, PathId, TraitBoundModifier, TraitRef as HirTraitRef, TypeBound,
- TypeRef, TypeRefId,
+ ConstRef, FnType, LifetimeRefId, PathId, TraitBoundModifier, TraitRef as HirTraitRef,
+ TypeBound, TypeRef, TypeRefId,
},
};
use hir_expand::name::Name;
@@ -53,7 +53,7 @@ use tracing::debug;
use triomphe::{Arc, ThinArc};
use crate::{
- FnAbi, ImplTraitId, TyLoweringDiagnostic, TyLoweringDiagnosticKind,
+ FnAbi, ImplTraitId, TyLoweringDiagnostic, TyLoweringDiagnosticKind, all_super_traits,
consteval::intern_const_ref,
db::{HirDatabase, InternedOpaqueTyId},
generics::{Generics, generics, trait_self_param_idx},
@@ -77,6 +77,7 @@ pub struct ImplTraits {
#[derive(PartialEq, Eq, Debug, Hash)]
pub struct ImplTrait {
pub(crate) predicates: StoredClauses,
+ pub(crate) assoc_ty_bounds_start: u32,
}
pub type ImplTraitIdx = Idx<ImplTrait>;
@@ -97,7 +98,7 @@ impl ImplTraitLoweringState {
}
}
-#[derive(Debug, Clone)]
+#[derive(Debug, Clone, Copy)]
pub enum LifetimeElisionKind<'db> {
/// Create a new anonymous lifetime parameter and reference it.
///
@@ -166,6 +167,12 @@ impl<'db> LifetimeElisionKind<'db> {
}
}
+#[derive(Clone, Copy, PartialEq, Debug)]
+pub(crate) enum GenericPredicateSource {
+ SelfOnly,
+ AssocTyBound,
+}
+
#[derive(Debug)]
pub struct TyLoweringContext<'db, 'a> {
pub db: &'db dyn HirDatabase,
@@ -430,26 +437,7 @@ impl<'db, 'a> TyLoweringContext<'db, 'a> {
Ty::new_ref(interner, lifetime, inner_ty, lower_mutability(ref_.mutability))
}
TypeRef::Placeholder => Ty::new_error(interner, ErrorGuaranteed),
- TypeRef::Fn(fn_) => {
- let substs = self.with_shifted_in(
- DebruijnIndex::from_u32(1),
- |ctx: &mut TyLoweringContext<'_, '_>| {
- Tys::new_from_iter(
- interner,
- fn_.params.iter().map(|&(_, tr)| ctx.lower_ty(tr)),
- )
- },
- );
- Ty::new_fn_ptr(
- interner,
- Binder::dummy(FnSig {
- abi: fn_.abi.as_ref().map_or(FnAbi::Rust, FnAbi::from_symbol),
- safety: if fn_.is_unsafe { Safety::Unsafe } else { Safety::Safe },
- c_variadic: fn_.is_varargs,
- inputs_and_output: substs,
- }),
- )
- }
+ TypeRef::Fn(fn_) => self.lower_fn_ptr(fn_),
TypeRef::DynTrait(bounds) => self.lower_dyn_trait(bounds),
TypeRef::ImplTrait(bounds) => {
match self.impl_trait_mode.mode {
@@ -465,10 +453,10 @@ impl<'db, 'a> TyLoweringContext<'db, 'a> {
// this dance is to make sure the data is in the right
// place even if we encounter more opaque types while
// lowering the bounds
- let idx = self
- .impl_trait_mode
- .opaque_type_data
- .alloc(ImplTrait { predicates: Clauses::empty(interner).store() });
+ let idx = self.impl_trait_mode.opaque_type_data.alloc(ImplTrait {
+ predicates: Clauses::empty(interner).store(),
+ assoc_ty_bounds_start: 0,
+ });
let impl_trait_id = origin.either(
|f| ImplTraitId::ReturnTypeImplTrait(f, idx),
@@ -510,6 +498,30 @@ impl<'db, 'a> TyLoweringContext<'db, 'a> {
(ty, res)
}
+ fn lower_fn_ptr(&mut self, fn_: &FnType) -> Ty<'db> {
+ let interner = self.interner;
+ let (params, ret_ty) = fn_.split_params_and_ret();
+ let old_lifetime_elision = self.lifetime_elision;
+ let mut args = Vec::with_capacity(fn_.params.len());
+ self.with_shifted_in(DebruijnIndex::from_u32(1), |ctx: &mut TyLoweringContext<'_, '_>| {
+ ctx.lifetime_elision =
+ LifetimeElisionKind::AnonymousCreateParameter { report_in_path: false };
+ args.extend(params.iter().map(|&(_, tr)| ctx.lower_ty(tr)));
+ ctx.lifetime_elision = LifetimeElisionKind::for_fn_ret(interner);
+ args.push(ctx.lower_ty(ret_ty));
+ });
+ self.lifetime_elision = old_lifetime_elision;
+ Ty::new_fn_ptr(
+ interner,
+ Binder::dummy(FnSig {
+ abi: fn_.abi.as_ref().map_or(FnAbi::Rust, FnAbi::from_symbol),
+ safety: if fn_.is_unsafe { Safety::Unsafe } else { Safety::Safe },
+ c_variadic: fn_.is_varargs,
+ inputs_and_output: Tys::new_from_slice(&args),
+ }),
+ )
+ }
+
/// This is only for `generic_predicates_for_param`, where we can't just
/// lower the self types of the predicates since that could lead to cycles.
/// So we just check here if the `type_ref` resolves to a generic param, and which.
@@ -608,7 +620,7 @@ impl<'db, 'a> TyLoweringContext<'db, 'a> {
ignore_bindings: bool,
generics: &Generics,
predicate_filter: PredicateFilter,
- ) -> impl Iterator<Item = Clause<'db>> + use<'a, 'b, 'db> {
+ ) -> impl Iterator<Item = (Clause<'db>, GenericPredicateSource)> + use<'a, 'b, 'db> {
match where_predicate {
WherePredicate::ForLifetime { target, bound, .. }
| WherePredicate::TypeBound { target, bound } => {
@@ -634,8 +646,8 @@ impl<'db, 'a> TyLoweringContext<'db, 'a> {
let self_ty = self.lower_ty(*target);
Either::Left(Either::Right(self.lower_type_bound(bound, self_ty, ignore_bindings)))
}
- &WherePredicate::Lifetime { bound, target } => {
- Either::Right(iter::once(Clause(Predicate::new(
+ &WherePredicate::Lifetime { bound, target } => Either::Right(iter::once((
+ Clause(Predicate::new(
self.interner,
Binder::dummy(rustc_type_ir::PredicateKind::Clause(
rustc_type_ir::ClauseKind::RegionOutlives(OutlivesPredicate(
@@ -643,8 +655,9 @@ impl<'db, 'a> TyLoweringContext<'db, 'a> {
self.lower_lifetime(target),
)),
)),
- ))))
- }
+ )),
+ GenericPredicateSource::SelfOnly,
+ ))),
}
.into_iter()
}
@@ -654,7 +667,7 @@ impl<'db, 'a> TyLoweringContext<'db, 'a> {
bound: &'b TypeBound,
self_ty: Ty<'db>,
ignore_bindings: bool,
- ) -> impl Iterator<Item = Clause<'db>> + use<'b, 'a, 'db> {
+ ) -> impl Iterator<Item = (Clause<'db>, GenericPredicateSource)> + use<'b, 'a, 'db> {
let interner = self.interner;
let meta_sized = self.lang_items.MetaSized;
let pointee_sized = self.lang_items.PointeeSized;
@@ -712,7 +725,10 @@ impl<'db, 'a> TyLoweringContext<'db, 'a> {
}
TypeBound::Use(_) | TypeBound::Error => {}
}
- clause.into_iter().chain(assoc_bounds.into_iter().flatten())
+ clause
+ .into_iter()
+ .map(|pred| (pred, GenericPredicateSource::SelfOnly))
+ .chain(assoc_bounds.into_iter().flatten())
}
fn lower_dyn_trait(&mut self, bounds: &[TypeBound]) -> Ty<'db> {
@@ -732,7 +748,7 @@ impl<'db, 'a> TyLoweringContext<'db, 'a> {
for b in bounds {
let db = ctx.db;
- ctx.lower_type_bound(b, dummy_self_ty, false).for_each(|b| {
+ ctx.lower_type_bound(b, dummy_self_ty, false).for_each(|(b, _)| {
match b.kind().skip_binder() {
rustc_type_ir::ClauseKind::Trait(t) => {
let id = t.def_id();
@@ -990,35 +1006,49 @@ impl<'db, 'a> TyLoweringContext<'db, 'a> {
rustc_type_ir::AliasTyKind::Opaque,
AliasTy::new_from_args(interner, def_id, args),
);
- let predicates = self.with_shifted_in(DebruijnIndex::from_u32(1), |ctx| {
- let mut predicates = Vec::new();
- for b in bounds {
- predicates.extend(ctx.lower_type_bound(b, self_ty, false));
- }
+ let (predicates, assoc_ty_bounds_start) =
+ self.with_shifted_in(DebruijnIndex::from_u32(1), |ctx| {
+ let mut predicates = Vec::new();
+ let mut assoc_ty_bounds = Vec::new();
+ for b in bounds {
+ for (pred, source) in ctx.lower_type_bound(b, self_ty, false) {
+ match source {
+ GenericPredicateSource::SelfOnly => predicates.push(pred),
+ GenericPredicateSource::AssocTyBound => assoc_ty_bounds.push(pred),
+ }
+ }
+ }
- if !ctx.unsized_types.contains(&self_ty) {
- let sized_trait = self.lang_items.Sized;
- let sized_clause = sized_trait.map(|trait_id| {
- let trait_ref = TraitRef::new_from_args(
- interner,
- trait_id.into(),
- GenericArgs::new_from_slice(&[self_ty.into()]),
- );
- Clause(Predicate::new(
- interner,
- Binder::dummy(rustc_type_ir::PredicateKind::Clause(
- rustc_type_ir::ClauseKind::Trait(TraitPredicate {
- trait_ref,
- polarity: rustc_type_ir::PredicatePolarity::Positive,
- }),
- )),
- ))
- });
- predicates.extend(sized_clause);
- }
- predicates
- });
- ImplTrait { predicates: Clauses::new_from_slice(&predicates).store() }
+ if !ctx.unsized_types.contains(&self_ty) {
+ let sized_trait = self.lang_items.Sized;
+ let sized_clause = sized_trait.map(|trait_id| {
+ let trait_ref = TraitRef::new_from_args(
+ interner,
+ trait_id.into(),
+ GenericArgs::new_from_slice(&[self_ty.into()]),
+ );
+ Clause(Predicate::new(
+ interner,
+ Binder::dummy(rustc_type_ir::PredicateKind::Clause(
+ rustc_type_ir::ClauseKind::Trait(TraitPredicate {
+ trait_ref,
+ polarity: rustc_type_ir::PredicatePolarity::Positive,
+ }),
+ )),
+ ))
+ });
+ predicates.extend(sized_clause);
+ }
+
+ let assoc_ty_bounds_start = predicates.len() as u32;
+ predicates.extend(assoc_ty_bounds);
+ (predicates, assoc_ty_bounds_start)
+ });
+
+ ImplTrait {
+ predicates: Clauses::new_from_slice(&predicates).store(),
+ assoc_ty_bounds_start,
+ }
}
pub(crate) fn lower_lifetime(&mut self, lifetime: LifetimeRefId) -> Region<'db> {
@@ -1139,6 +1169,31 @@ impl ImplTraitId {
.expect("owner should have opaque type")
.get_with(|it| it.impl_traits[idx].predicates.as_ref().as_slice())
}
+
+ #[inline]
+ pub fn self_predicates<'db>(
+ self,
+ db: &'db dyn HirDatabase,
+ ) -> EarlyBinder<'db, &'db [Clause<'db>]> {
+ let (impl_traits, idx) = match self {
+ ImplTraitId::ReturnTypeImplTrait(owner, idx) => {
+ (ImplTraits::return_type_impl_traits(db, owner), idx)
+ }
+ ImplTraitId::TypeAliasImplTrait(owner, idx) => {
+ (ImplTraits::type_alias_impl_traits(db, owner), idx)
+ }
+ };
+ let predicates =
+ impl_traits.as_deref().expect("owner should have opaque type").get_with(|it| {
+ let impl_trait = &it.impl_traits[idx];
+ (
+ impl_trait.predicates.as_ref().as_slice(),
+ impl_trait.assoc_ty_bounds_start as usize,
+ )
+ });
+
+ predicates.map_bound(|(preds, len)| &preds[..len])
+ }
}
impl InternedOpaqueTyId {
@@ -1146,6 +1201,14 @@ impl InternedOpaqueTyId {
pub fn predicates<'db>(self, db: &'db dyn HirDatabase) -> EarlyBinder<'db, &'db [Clause<'db>]> {
self.loc(db).predicates(db)
}
+
+ #[inline]
+ pub fn self_predicates<'db>(
+ self,
+ db: &'db dyn HirDatabase,
+ ) -> EarlyBinder<'db, &'db [Clause<'db>]> {
+ self.loc(db).self_predicates(db)
+ }
}
#[salsa::tracked]
@@ -1561,11 +1624,16 @@ pub(crate) fn field_types_with_diagnostics_query<'db>(
(res, create_diagnostics(ctx.diagnostics))
}
+/// Predicates for `param_id` of the form `P: SomeTrait`. If
+/// `assoc_name` is provided, only return predicates referencing traits
+/// that have an associated type of that name.
+///
/// This query exists only to be used when resolving short-hand associated types
/// like `T::Item`.
///
/// See the analogous query in rustc and its comment:
/// <https://github.com/rust-lang/rust/blob/9150f844e2624eb013ec78ca08c1d416e6644026/src/librustc_typeck/astconv.rs#L46>
+///
/// This is a query mostly to handle cycles somewhat gracefully; e.g. the
/// following bounds are disallowed: `T: Foo<U::Item>, U: Foo<T::Item>`, but
/// these are fine: `T: Foo<U::Item>, U: Foo<()>`.
@@ -1589,7 +1657,7 @@ pub(crate) fn generic_predicates_for_param<'db>(
);
// we have to filter out all other predicates *first*, before attempting to lower them
- let predicate = |pred: &_, ctx: &mut TyLoweringContext<'_, '_>| match pred {
+ let has_relevant_bound = |pred: &_, ctx: &mut TyLoweringContext<'_, '_>| match pred {
WherePredicate::ForLifetime { target, bound, .. }
| WherePredicate::TypeBound { target, bound, .. } => {
let invalid_target = { ctx.lower_ty_only_param(*target) != Some(param_id) };
@@ -1637,11 +1705,7 @@ pub(crate) fn generic_predicates_for_param<'db>(
return false;
};
- rustc_type_ir::elaborate::supertrait_def_ids(interner, tr.into()).any(|tr| {
- tr.0.trait_items(db).items.iter().any(|(name, item)| {
- matches!(item, AssocItemId::TypeAliasId(_)) && name == assoc_name
- })
- })
+ trait_or_supertrait_has_assoc_type(db, tr, assoc_name)
}
TypeBound::Use(_) | TypeBound::Lifetime(_) | TypeBound::Error => false,
}
@@ -1654,13 +1718,16 @@ pub(crate) fn generic_predicates_for_param<'db>(
{
ctx.store = maybe_parent_generics.store();
for pred in maybe_parent_generics.where_predicates() {
- if predicate(pred, &mut ctx) {
- predicates.extend(ctx.lower_where_predicate(
- pred,
- true,
- maybe_parent_generics,
- PredicateFilter::All,
- ));
+ if has_relevant_bound(pred, &mut ctx) {
+ predicates.extend(
+ ctx.lower_where_predicate(
+ pred,
+ true,
+ maybe_parent_generics,
+ PredicateFilter::All,
+ )
+ .map(|(pred, _)| pred),
+ );
}
}
}
@@ -1691,26 +1758,70 @@ pub(crate) fn generic_predicates_for_param_cycle_result(
StoredEarlyBinder::bind(Clauses::empty(DbInterner::new_no_crate(db)).store())
}
+/// Check if this trait or any of its supertraits define an associated
+/// type with the given name.
+fn trait_or_supertrait_has_assoc_type(
+ db: &dyn HirDatabase,
+ tr: TraitId,
+ assoc_name: &Name,
+) -> bool {
+ for trait_id in all_super_traits(db, tr) {
+ if trait_id
+ .trait_items(db)
+ .items
+ .iter()
+ .any(|(name, item)| matches!(item, AssocItemId::TypeAliasId(_)) && name == assoc_name)
+ {
+ return true;
+ }
+ }
+
+ false
+}
+
#[inline]
pub(crate) fn type_alias_bounds<'db>(
db: &'db dyn HirDatabase,
type_alias: TypeAliasId,
) -> EarlyBinder<'db, &'db [Clause<'db>]> {
- type_alias_bounds_with_diagnostics(db, type_alias).0.map_bound(|it| it.as_slice())
+ type_alias_bounds_with_diagnostics(db, type_alias).0.predicates.map_bound(|it| it.as_slice())
}
-pub(crate) fn type_alias_bounds_with_diagnostics<'db>(
+#[inline]
+pub(crate) fn type_alias_self_bounds<'db>(
db: &'db dyn HirDatabase,
type_alias: TypeAliasId,
-) -> (EarlyBinder<'db, Clauses<'db>>, Diagnostics) {
- let (bounds, diags) = type_alias_bounds_with_diagnostics_query(db, type_alias);
- return (bounds.get(), diags.clone());
+) -> EarlyBinder<'db, &'db [Clause<'db>]> {
+ let (TypeAliasBounds { predicates, assoc_ty_bounds_start }, _) =
+ type_alias_bounds_with_diagnostics(db, type_alias);
+ predicates.map_bound(|it| &it.as_slice()[..assoc_ty_bounds_start as usize])
+}
+
+#[derive(PartialEq, Eq, Debug, Hash)]
+struct TypeAliasBounds<T> {
+ predicates: T,
+ assoc_ty_bounds_start: u32,
+}
+
+fn type_alias_bounds_with_diagnostics<'db>(
+ db: &'db dyn HirDatabase,
+ type_alias: TypeAliasId,
+) -> (TypeAliasBounds<EarlyBinder<'db, Clauses<'db>>>, Diagnostics) {
+ let (TypeAliasBounds { predicates, assoc_ty_bounds_start }, diags) =
+ type_alias_bounds_with_diagnostics_query(db, type_alias);
+ return (
+ TypeAliasBounds {
+ predicates: predicates.get(),
+ assoc_ty_bounds_start: *assoc_ty_bounds_start,
+ },
+ diags.clone(),
+ );
#[salsa::tracked(returns(ref))]
pub fn type_alias_bounds_with_diagnostics_query<'db>(
db: &'db dyn HirDatabase,
type_alias: TypeAliasId,
- ) -> (StoredEarlyBinder<StoredClauses>, Diagnostics) {
+ ) -> (TypeAliasBounds<StoredEarlyBinder<StoredClauses>>, Diagnostics) {
let type_alias_data = db.type_alias_signature(type_alias);
let resolver = hir_def::resolver::HasResolver::resolver(type_alias, db);
let mut ctx = TyLoweringContext::new(
@@ -1727,10 +1838,18 @@ pub(crate) fn type_alias_bounds_with_diagnostics<'db>(
let interner_ty = Ty::new_projection_from_args(interner, def_id, item_args);
let mut bounds = Vec::new();
+ let mut assoc_ty_bounds = Vec::new();
for bound in &type_alias_data.bounds {
- ctx.lower_type_bound(bound, interner_ty, false).for_each(|pred| {
- bounds.push(pred);
- });
+ ctx.lower_type_bound(bound, interner_ty, false).for_each(
+ |(pred, source)| match source {
+ GenericPredicateSource::SelfOnly => {
+ bounds.push(pred);
+ }
+ GenericPredicateSource::AssocTyBound => {
+ assoc_ty_bounds.push(pred);
+ }
+ },
+ );
}
if !ctx.unsized_types.contains(&interner_ty) {
@@ -1745,8 +1864,14 @@ pub(crate) fn type_alias_bounds_with_diagnostics<'db>(
};
}
+ let assoc_ty_bounds_start = bounds.len() as u32;
+ bounds.extend(assoc_ty_bounds);
+
(
- StoredEarlyBinder::bind(Clauses::new_from_slice(&bounds).store()),
+ TypeAliasBounds {
+ predicates: StoredEarlyBinder::bind(Clauses::new_from_slice(&bounds).store()),
+ assoc_ty_bounds_start,
+ },
create_diagnostics(ctx.diagnostics),
)
}
@@ -1754,11 +1879,15 @@ pub(crate) fn type_alias_bounds_with_diagnostics<'db>(
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub struct GenericPredicates {
- // The order is the following: first, if `parent_is_trait == true`, comes the implicit trait predicate for the
- // parent. Then come the explicit predicates for the parent, then the explicit trait predicate for the child,
+ // The order is the following: first, if `parent_is_trait == true`, comes the implicit trait
+ // predicate for the parent. Then come the bounds of the associated types of the parents,
+ // then the explicit, self-only predicates for the parent, then the explicit, self-only trait
+ // predicate for the child, then the bounds of the associated types of the child,
// then the implicit trait predicate for the child, if `is_trait` is `true`.
predicates: StoredEarlyBinder<StoredClauses>,
+ parent_explicit_self_predicates_start: u32,
own_predicates_start: u32,
+ own_assoc_ty_bounds_start: u32,
is_trait: bool,
parent_is_trait: bool,
}
@@ -1782,7 +1911,15 @@ impl GenericPredicates {
pub(crate) fn from_explicit_own_predicates(
predicates: StoredEarlyBinder<StoredClauses>,
) -> Self {
- Self { predicates, own_predicates_start: 0, is_trait: false, parent_is_trait: false }
+ let len = predicates.get().skip_binder().len() as u32;
+ Self {
+ predicates,
+ parent_explicit_self_predicates_start: 0,
+ own_predicates_start: 0,
+ own_assoc_ty_bounds_start: len,
+ is_trait: false,
+ parent_is_trait: false,
+ }
}
#[inline]
@@ -1815,6 +1952,14 @@ impl GenericPredicates {
}
#[inline]
+ pub fn query_explicit_implied<'db>(
+ db: &'db dyn HirDatabase,
+ def: GenericDefId,
+ ) -> EarlyBinder<'db, &'db [Clause<'db>]> {
+ Self::query(db, def).explicit_implied_predicates()
+ }
+
+ #[inline]
pub fn all_predicates(&self) -> EarlyBinder<'_, &[Clause<'_>]> {
self.predicates.get().map_bound(|it| it.as_slice())
}
@@ -1824,10 +1969,19 @@ impl GenericPredicates {
self.predicates.get().map_bound(|it| &it.as_slice()[self.own_predicates_start as usize..])
}
- /// Returns the predicates, minus the implicit `Self: Trait` predicate for a trait.
+ /// Returns the predicates, minus the implicit `Self: Trait` predicate and bounds of the
+ /// associated types for a trait.
#[inline]
pub fn explicit_predicates(&self) -> EarlyBinder<'_, &[Clause<'_>]> {
self.predicates.get().map_bound(|it| {
+ &it.as_slice()[self.parent_explicit_self_predicates_start as usize
+ ..self.own_assoc_ty_bounds_start as usize]
+ })
+ }
+
+ #[inline]
+ pub fn explicit_implied_predicates(&self) -> EarlyBinder<'_, &[Clause<'_>]> {
+ self.predicates.get().map_bound(|it| {
&it.as_slice()[usize::from(self.parent_is_trait)..it.len() - usize::from(self.is_trait)]
})
}
@@ -1902,26 +2056,22 @@ where
);
let sized_trait = ctx.lang_items.Sized;
- let mut predicates = Vec::new();
+ // We need to lower parents and self separately - see the comment below lowering of implicit
+ // `Sized` predicates for why.
+ let mut own_predicates = Vec::new();
+ let mut parent_predicates = Vec::new();
+ let mut own_assoc_ty_bounds = Vec::new();
+ let mut parent_assoc_ty_bounds = Vec::new();
let all_generics =
std::iter::successors(Some(&generics), |generics| generics.parent_generics())
.collect::<ArrayVec<_, 2>>();
- let mut is_trait = false;
- let mut parent_is_trait = false;
- if all_generics.len() > 1 {
- add_implicit_trait_predicate(
- interner,
- all_generics.last().unwrap().def(),
- predicate_filter,
- &mut predicates,
- &mut parent_is_trait,
- );
- }
- // We need to lower parent predicates first - see the comment below lowering of implicit `Sized` predicates
- // for why.
- let mut own_predicates_start = 0;
+ let own_implicit_trait_predicate = implicit_trait_predicate(interner, def, predicate_filter);
+ let parent_implicit_trait_predicate = if all_generics.len() > 1 {
+ implicit_trait_predicate(interner, all_generics.last().unwrap().def(), predicate_filter)
+ } else {
+ None
+ };
for &maybe_parent_generics in all_generics.iter().rev() {
- let current_def_predicates_start = predicates.len();
// Collect only diagnostics from the child, not including parents.
ctx.diagnostics.clear();
@@ -1929,15 +2079,37 @@ where
ctx.store = maybe_parent_generics.store();
for pred in maybe_parent_generics.where_predicates() {
tracing::debug!(?pred);
- predicates.extend(ctx.lower_where_predicate(
- pred,
- false,
- maybe_parent_generics,
- predicate_filter,
- ));
+ for (pred, source) in
+ ctx.lower_where_predicate(pred, false, maybe_parent_generics, predicate_filter)
+ {
+ match source {
+ GenericPredicateSource::SelfOnly => {
+ if maybe_parent_generics.def() == def {
+ own_predicates.push(pred);
+ } else {
+ parent_predicates.push(pred);
+ }
+ }
+ GenericPredicateSource::AssocTyBound => {
+ if maybe_parent_generics.def() == def {
+ own_assoc_ty_bounds.push(pred);
+ } else {
+ parent_assoc_ty_bounds.push(pred);
+ }
+ }
+ }
+ }
}
- push_const_arg_has_type_predicates(db, &mut predicates, maybe_parent_generics);
+ if maybe_parent_generics.def() == def {
+ push_const_arg_has_type_predicates(db, &mut own_predicates, maybe_parent_generics);
+ } else {
+ push_const_arg_has_type_predicates(
+ db,
+ &mut parent_predicates,
+ maybe_parent_generics,
+ );
+ }
if let Some(sized_trait) = sized_trait {
let mut add_sized_clause = |param_idx, param_id, param_data| {
@@ -1971,7 +2143,11 @@ where
}),
)),
));
- predicates.push(clause);
+ if maybe_parent_generics.def() == def {
+ own_predicates.push(clause);
+ } else {
+ parent_predicates.push(clause);
+ }
};
let parent_params_len = maybe_parent_generics.len_parent();
maybe_parent_generics.iter_self().enumerate().for_each(
@@ -1990,30 +2166,55 @@ where
// predicates before lowering the child, as a child cannot define a `?Sized` predicate for its parent.
// But we do have to lower the parent first.
}
-
- if maybe_parent_generics.def() == def {
- own_predicates_start = current_def_predicates_start as u32;
- }
}
- add_implicit_trait_predicate(interner, def, predicate_filter, &mut predicates, &mut is_trait);
-
let diagnostics = create_diagnostics(ctx.diagnostics);
+
+ // The order is:
+ //
+ // 1. parent implicit trait pred
+ // 2. parent assoc bounds
+ // 3. parent self only preds
+ // 4. own self only preds
+ // 5. own assoc ty bounds
+ // 6. own implicit trait pred
+ //
+ // The purpose of this is to index the slice of the followings, without making extra `Vec`s or
+ // iterators:
+ // - explicit self only predicates, of own or own + self
+ // - explicit predicates, of own or own + self
+ let predicates = parent_implicit_trait_predicate
+ .iter()
+ .chain(parent_assoc_ty_bounds.iter())
+ .chain(parent_predicates.iter())
+ .chain(own_predicates.iter())
+ .chain(own_assoc_ty_bounds.iter())
+ .chain(own_implicit_trait_predicate.iter())
+ .copied()
+ .collect::<Vec<_>>();
+ let parent_is_trait = parent_implicit_trait_predicate.is_some();
+ let is_trait = own_implicit_trait_predicate.is_some();
+ let parent_explicit_self_predicates_start =
+ parent_is_trait as u32 + parent_assoc_ty_bounds.len() as u32;
+ let own_predicates_start =
+ parent_explicit_self_predicates_start + parent_predicates.len() as u32;
+ let own_assoc_ty_bounds_start = own_predicates_start + own_predicates.len() as u32;
+
let predicates = GenericPredicates {
+ parent_explicit_self_predicates_start,
own_predicates_start,
+ own_assoc_ty_bounds_start,
is_trait,
parent_is_trait,
predicates: StoredEarlyBinder::bind(Clauses::new_from_slice(&predicates).store()),
};
return (predicates, diagnostics);
- fn add_implicit_trait_predicate<'db>(
+ fn implicit_trait_predicate<'db>(
interner: DbInterner<'db>,
def: GenericDefId,
predicate_filter: PredicateFilter,
- predicates: &mut Vec<Clause<'db>>,
- set_is_trait: &mut bool,
- ) {
+ ) -> Option<Clause<'db>> {
// For traits, add `Self: Trait` predicate. This is
// not part of the predicates that a user writes, but it
// is something that one must prove in order to invoke a
@@ -2029,8 +2230,9 @@ where
if let GenericDefId::TraitId(def_id) = def
&& predicate_filter == PredicateFilter::All
{
- *set_is_trait = true;
- predicates.push(TraitRef::identity(interner, def_id.into()).upcast(interner));
+ Some(TraitRef::identity(interner, def_id.into()).upcast(interner))
+ } else {
+ None
}
}
}
@@ -2327,7 +2529,7 @@ pub(crate) fn associated_ty_item_bounds<'db>(
let mut bounds = Vec::new();
for bound in &type_alias_data.bounds {
- ctx.lower_type_bound(bound, self_ty, false).for_each(|pred| {
+ ctx.lower_type_bound(bound, self_ty, false).for_each(|(pred, _)| {
if let Some(bound) = pred
.kind()
.map_bound(|c| match c {
diff --git a/crates/hir-ty/src/lower/path.rs b/crates/hir-ty/src/lower/path.rs
index a79f547c2a..f3d0de1227 100644
--- a/crates/hir-ty/src/lower/path.rs
+++ b/crates/hir-ty/src/lower/path.rs
@@ -32,7 +32,8 @@ use crate::{
db::HirDatabase,
generics::{Generics, generics},
lower::{
- LifetimeElisionKind, PathDiagnosticCallbackData, named_associated_type_shorthand_candidates,
+ GenericPredicateSource, LifetimeElisionKind, PathDiagnosticCallbackData,
+ named_associated_type_shorthand_candidates,
},
next_solver::{
Binder, Clause, Const, DbInterner, ErrorGuaranteed, GenericArg, GenericArgs, Predicate,
@@ -598,7 +599,7 @@ impl<'a, 'b, 'db> PathLoweringContext<'a, 'b, 'db> {
explicit_self_ty: Option<Ty<'db>>,
lowering_assoc_type_generics: bool,
) -> GenericArgs<'db> {
- let old_lifetime_elision = self.ctx.lifetime_elision.clone();
+ let old_lifetime_elision = self.ctx.lifetime_elision;
if let Some(args) = self.current_or_prev_segment.args_and_bindings
&& args.parenthesized != GenericArgsParentheses::No
@@ -639,7 +640,7 @@ impl<'a, 'b, 'db> PathLoweringContext<'a, 'b, 'db> {
explicit_self_ty,
PathGenericsSource::Segment(self.current_segment_u32()),
lowering_assoc_type_generics,
- self.ctx.lifetime_elision.clone(),
+ self.ctx.lifetime_elision,
);
self.ctx.lifetime_elision = old_lifetime_elision;
result
@@ -853,7 +854,8 @@ impl<'a, 'b, 'db> PathLoweringContext<'a, 'b, 'db> {
pub(super) fn assoc_type_bindings_from_type_bound<'c>(
mut self,
trait_ref: TraitRef<'db>,
- ) -> Option<impl Iterator<Item = Clause<'db>> + use<'a, 'b, 'c, 'db>> {
+ ) -> Option<impl Iterator<Item = (Clause<'db>, GenericPredicateSource)> + use<'a, 'b, 'c, 'db>>
+ {
let interner = self.ctx.interner;
self.current_or_prev_segment.args_and_bindings.map(|args_and_bindings| {
args_and_bindings.bindings.iter().enumerate().flat_map(move |(binding_idx, binding)| {
@@ -882,7 +884,7 @@ impl<'a, 'b, 'db> PathLoweringContext<'a, 'b, 'db> {
assoc_type: binding_idx as u32,
},
false,
- this.ctx.lifetime_elision.clone(),
+ this.ctx.lifetime_elision,
)
});
let args = GenericArgs::new_from_iter(
@@ -900,7 +902,7 @@ impl<'a, 'b, 'db> PathLoweringContext<'a, 'b, 'db> {
// `Fn()`-style generics are elided like functions. This is `Output` (we lower to it in hir-def).
LifetimeElisionKind::for_fn_ret(self.ctx.interner)
} else {
- self.ctx.lifetime_elision.clone()
+ self.ctx.lifetime_elision
};
self.with_lifetime_elision(lifetime_elision, |this| {
match (&this.ctx.store[type_ref], this.ctx.impl_trait_mode.mode) {
@@ -921,21 +923,29 @@ impl<'a, 'b, 'db> PathLoweringContext<'a, 'b, 'db> {
),
)),
));
- predicates.push(pred);
+ predicates.push((pred, GenericPredicateSource::SelfOnly));
}
}
})
}
for bound in binding.bounds.iter() {
- predicates.extend(self.ctx.lower_type_bound(
- bound,
- Ty::new_alias(
- self.ctx.interner,
- AliasTyKind::Projection,
- AliasTy::new_from_args(self.ctx.interner, associated_ty.into(), args),
- ),
- false,
- ));
+ predicates.extend(
+ self.ctx
+ .lower_type_bound(
+ bound,
+ Ty::new_alias(
+ self.ctx.interner,
+ AliasTyKind::Projection,
+ AliasTy::new_from_args(
+ self.ctx.interner,
+ associated_ty.into(),
+ args,
+ ),
+ ),
+ false,
+ )
+ .map(|(pred, _)| (pred, GenericPredicateSource::AssocTyBound)),
+ );
}
predicates
})
diff --git a/crates/hir-ty/src/method_resolution.rs b/crates/hir-ty/src/method_resolution.rs
index e4681b464f..ad4d79e68a 100644
--- a/crates/hir-ty/src/method_resolution.rs
+++ b/crates/hir-ty/src/method_resolution.rs
@@ -206,11 +206,11 @@ impl<'a, 'db> InferenceContext<'a, 'db> {
}
}
-/// Used by [FnCtxt::lookup_method_for_operator] with `-Znext-solver`.
+/// Used by `FnCtxt::lookup_method_for_operator` with `-Znext-solver`.
///
/// With `AsRigid` we error on `impl Opaque: NotInItemBounds` while
/// `AsInfer` just treats it as ambiguous and succeeds. This is necessary
-/// as we want [FnCtxt::check_expr_call] to treat not-yet-defined opaque
+/// as we want `FnCtxt::check_expr_call` to treat not-yet-defined opaque
/// types as rigid to support `impl Deref<Target = impl FnOnce()>` and
/// `Box<impl FnOnce()>`.
///
diff --git a/crates/hir-ty/src/method_resolution/probe.rs b/crates/hir-ty/src/method_resolution/probe.rs
index 4a7c7d9353..fc2bd87ee4 100644
--- a/crates/hir-ty/src/method_resolution/probe.rs
+++ b/crates/hir-ty/src/method_resolution/probe.rs
@@ -285,11 +285,15 @@ impl<'a, 'db> MethodResolutionContext<'a, 'db> {
let infcx = self.infcx;
let (self_ty, var_values) = infcx.instantiate_canonical(&query_input);
debug!(?self_ty, ?query_input, "probe_op: Mode::Path");
+ let prev_opaque_entries =
+ self.infcx.inner.borrow_mut().opaque_types().num_entries();
MethodAutoderefStepsResult {
steps: smallvec![CandidateStep {
- self_ty: self
- .infcx
- .make_query_response_ignoring_pending_obligations(var_values, self_ty),
+ self_ty: self.infcx.make_query_response_ignoring_pending_obligations(
+ var_values,
+ self_ty,
+ prev_opaque_entries
+ ),
self_ty_is_opaque: false,
autoderefs: 0,
from_unsafe_deref: false,
@@ -376,6 +380,8 @@ impl<'a, 'db> MethodResolutionContext<'a, 'db> {
// infer var is not an opaque.
let infcx = self.infcx;
let (self_ty, inference_vars) = infcx.instantiate_canonical(self_ty);
+ let prev_opaque_entries = infcx.inner.borrow_mut().opaque_types().num_entries();
+
let self_ty_is_opaque = |ty: Ty<'_>| {
if let TyKind::Infer(InferTy::TyVar(vid)) = ty.kind() {
infcx.has_opaques_with_sub_unified_hidden_type(vid)
@@ -414,6 +420,7 @@ impl<'a, 'db> MethodResolutionContext<'a, 'db> {
self_ty: infcx.make_query_response_ignoring_pending_obligations(
inference_vars,
ty,
+ prev_opaque_entries,
),
self_ty_is_opaque: self_ty_is_opaque(ty),
autoderefs: d,
@@ -437,6 +444,7 @@ impl<'a, 'db> MethodResolutionContext<'a, 'db> {
self_ty: infcx.make_query_response_ignoring_pending_obligations(
inference_vars,
ty,
+ prev_opaque_entries,
),
self_ty_is_opaque: self_ty_is_opaque(ty),
autoderefs: d,
@@ -461,13 +469,17 @@ impl<'a, 'db> MethodResolutionContext<'a, 'db> {
ty: infcx.make_query_response_ignoring_pending_obligations(
inference_vars,
final_ty,
+ prev_opaque_entries,
),
})
}
TyKind::Error(_) => Some(MethodAutoderefBadTy {
reached_raw_pointer,
- ty: infcx
- .make_query_response_ignoring_pending_obligations(inference_vars, final_ty),
+ ty: infcx.make_query_response_ignoring_pending_obligations(
+ inference_vars,
+ final_ty,
+ prev_opaque_entries,
+ ),
}),
TyKind::Array(elem_ty, _) => {
let autoderefs = steps.iter().filter(|s| s.reachable_via_deref).count() - 1;
@@ -475,6 +487,7 @@ impl<'a, 'db> MethodResolutionContext<'a, 'db> {
self_ty: infcx.make_query_response_ignoring_pending_obligations(
inference_vars,
Ty::new_slice(infcx.interner, elem_ty),
+ prev_opaque_entries,
),
self_ty_is_opaque: false,
autoderefs,
@@ -1246,9 +1259,9 @@ impl<'a, 'db, Choice: ProbeChoice<'db>> ProbeContext<'a, 'db, Choice> {
.filter(|step| step.reachable_via_deref)
.filter(|step| {
debug!("pick_all_method: step={:?}", step);
- // skip types that are from a type error or that would require dereferencing
- // a raw pointer
- !step.self_ty.value.value.references_non_lt_error() && !step.from_unsafe_deref
+ // Skip types with type errors (but not const/lifetime errors, which are
+ // often spurious due to incomplete const evaluation) and raw pointer derefs.
+ !step.self_ty.value.value.references_only_ty_error() && !step.from_unsafe_deref
})
.try_for_each(|step| {
let InferOk { value: self_ty, obligations: instantiate_self_ty_obligations } = self
@@ -1740,7 +1753,7 @@ impl<'a, 'db, Choice: ProbeChoice<'db>> ProbeContext<'a, 'db, Choice> {
/// We want to only accept trait methods if they were hold even if the
/// opaque types were rigid. To handle this, we both check that for trait
/// candidates the goal were to hold even when treating opaques as rigid,
- /// see [OpaqueTypesJank](rustc_trait_selection::solve::OpaqueTypesJank).
+ /// see `rustc_trait_selection::solve::OpaqueTypesJank`.
///
/// We also check that all opaque types encountered as self types in the
/// autoderef chain don't get constrained when applying the candidate.
diff --git a/crates/hir-ty/src/mir/lower.rs b/crates/hir-ty/src/mir/lower.rs
index 1579f00e92..199db7a3e7 100644
--- a/crates/hir-ty/src/mir/lower.rs
+++ b/crates/hir-ty/src/mir/lower.rs
@@ -9,7 +9,7 @@ use hir_def::{
expr_store::{Body, ExpressionStore, HygieneId, path::Path},
hir::{
ArithOp, Array, BinaryOp, BindingAnnotation, BindingId, ExprId, LabelId, Literal, MatchArm,
- Pat, PatId, RecordFieldPat, RecordLitField,
+ Pat, PatId, RecordFieldPat, RecordLitField, RecordSpread,
},
item_tree::FieldsShape,
lang_item::LangItems,
@@ -867,16 +867,17 @@ impl<'a, 'db> MirLowerCtx<'a, 'db> {
}
Expr::Become { .. } => not_supported!("tail-calls"),
Expr::Yield { .. } => not_supported!("yield"),
- Expr::RecordLit { fields, path, spread } => {
- let spread_place = match spread {
- &Some(it) => {
+ Expr::RecordLit { fields, path, spread, .. } => {
+ let spread_place = match *spread {
+ RecordSpread::Expr(it) => {
let Some((p, c)) = self.lower_expr_as_place(current, it, true)? else {
return Ok(None);
};
current = c;
Some(p)
}
- None => None,
+ RecordSpread::None => None,
+ RecordSpread::FieldDefaults => not_supported!("empty record spread"),
};
let variant_id =
self.infer.variant_resolution_for_expr(expr_id).ok_or_else(|| match path {
diff --git a/crates/hir-ty/src/next_solver/fulfill.rs b/crates/hir-ty/src/next_solver/fulfill.rs
index 0fe0732972..a8bff44a02 100644
--- a/crates/hir-ty/src/next_solver/fulfill.rs
+++ b/crates/hir-ty/src/next_solver/fulfill.rs
@@ -48,6 +48,7 @@ pub struct FulfillmentCtxt<'db> {
/// use the context in exactly this snapshot.
#[expect(unused)]
usable_in_snapshot: usize,
+ try_evaluate_obligations_scratch: PendingObligations<'db>,
}
#[derive(Default, Debug, Clone)]
@@ -115,6 +116,7 @@ impl<'db> FulfillmentCtxt<'db> {
FulfillmentCtxt {
obligations: Default::default(),
usable_in_snapshot: infcx.num_open_snapshots(),
+ try_evaluate_obligations_scratch: Default::default(),
}
}
}
@@ -162,12 +164,12 @@ impl<'db> FulfillmentCtxt<'db> {
// and select. They should use a different `ObligationCtxt` instead. Then we'll be also able
// to not put the obligations queue in `InferenceTable`'s snapshots.
// assert_eq!(self.usable_in_snapshot, infcx.num_open_snapshots());
+ self.try_evaluate_obligations_scratch.clear();
let mut errors = Vec::new();
- let mut obligations = Vec::new();
loop {
let mut any_changed = false;
- obligations.extend(self.obligations.drain_pending(|_| true));
- for (mut obligation, stalled_on) in obligations.drain(..) {
+ self.try_evaluate_obligations_scratch.extend(self.obligations.drain_pending(|_| true));
+ for (mut obligation, stalled_on) in self.try_evaluate_obligations_scratch.drain(..) {
if obligation.recursion_depth >= infcx.interner.recursion_limit() {
self.obligations.on_fulfillment_overflow(infcx);
// Only return true errors that we have accumulated while processing.
diff --git a/crates/hir-ty/src/next_solver/infer/canonical/instantiate.rs b/crates/hir-ty/src/next_solver/infer/canonical/instantiate.rs
index b758042e85..61d1e97746 100644
--- a/crates/hir-ty/src/next_solver/infer/canonical/instantiate.rs
+++ b/crates/hir-ty/src/next_solver/infer/canonical/instantiate.rs
@@ -15,6 +15,7 @@ use crate::next_solver::{
infer::{
InferCtxt, InferOk, InferResult,
canonical::{QueryRegionConstraints, QueryResponse, canonicalizer::OriginalQueryValues},
+ opaque_types::table::OpaqueTypeStorageEntries,
traits::{ObligationCause, PredicateObligations},
},
};
@@ -194,6 +195,7 @@ impl<'db> InferCtxt<'db> {
&self,
inference_vars: CanonicalVarValues<'db>,
answer: T,
+ prev_entries: OpaqueTypeStorageEntries,
) -> Canonical<'db, QueryResponse<'db, T>>
where
T: TypeFoldable<DbInterner<'db>>,
@@ -209,7 +211,7 @@ impl<'db> InferCtxt<'db> {
.inner
.borrow_mut()
.opaque_type_storage
- .iter_opaque_types()
+ .opaque_types_added_since(prev_entries)
.map(|(k, v)| (k, v.ty))
.collect();
diff --git a/crates/hir-ty/src/next_solver/infer/mod.rs b/crates/hir-ty/src/next_solver/infer/mod.rs
index 7d291f7ddb..21baacb116 100644
--- a/crates/hir-ty/src/next_solver/infer/mod.rs
+++ b/crates/hir-ty/src/next_solver/infer/mod.rs
@@ -140,7 +140,7 @@ pub struct InferCtxtInner<'db> {
///
/// Before running `resolve_regions_and_report_errors`, the creator
/// of the inference context is expected to invoke
- /// [`InferCtxt::process_registered_region_obligations`]
+ /// `InferCtxt::process_registered_region_obligations`
/// for each body-id in this map, which will process the
/// obligations within. This is expected to be done 'late enough'
/// that all type inference variables have been bound and so forth.
diff --git a/crates/hir-ty/src/next_solver/infer/traits.rs b/crates/hir-ty/src/next_solver/infer/traits.rs
index 14df42dc2a..dde6234836 100644
--- a/crates/hir-ty/src/next_solver/infer/traits.rs
+++ b/crates/hir-ty/src/next_solver/infer/traits.rs
@@ -55,6 +55,13 @@ impl ObligationCause {
}
}
+impl Default for ObligationCause {
+ #[inline]
+ fn default() -> Self {
+ Self::new()
+ }
+}
+
/// An `Obligation` represents some trait reference (e.g., `i32: Eq`) for
/// which the "impl_source" must be found. The process of finding an "impl_source" is
/// called "resolving" the `Obligation`. This process consists of
diff --git a/crates/hir-ty/src/next_solver/interner.rs b/crates/hir-ty/src/next_solver/interner.rs
index 2a3df1d32a..e17bdac68c 100644
--- a/crates/hir-ty/src/next_solver/interner.rs
+++ b/crates/hir-ty/src/next_solver/interner.rs
@@ -41,7 +41,8 @@ use crate::{
AdtIdWrapper, AnyImplId, BoundConst, CallableIdWrapper, CanonicalVarKind, ClosureIdWrapper,
CoroutineIdWrapper, Ctor, FnSig, FxIndexMap, GeneralConstIdWrapper, OpaqueTypeKey,
RegionAssumptions, SimplifiedType, SolverContext, SolverDefIds, TraitIdWrapper,
- TypeAliasIdWrapper, UnevaluatedConst, util::explicit_item_bounds,
+ TypeAliasIdWrapper, UnevaluatedConst,
+ util::{explicit_item_bounds, explicit_item_self_bounds},
},
};
@@ -1421,7 +1422,7 @@ impl<'db> Interner for DbInterner<'db> {
self,
def_id: Self::DefId,
) -> EarlyBinder<Self, impl IntoIterator<Item = Self::Clause>> {
- explicit_item_bounds(self, def_id)
+ explicit_item_self_bounds(self, def_id)
.map_bound(|bounds| elaborate(self, bounds).filter_only_self())
}
@@ -1500,7 +1501,7 @@ impl<'db> Interner for DbInterner<'db> {
}
}
- predicates_of(self.db, def_id).explicit_predicates().map_bound(|predicates| {
+ predicates_of(self.db, def_id).explicit_implied_predicates().map_bound(|predicates| {
predicates
.iter()
.copied()
diff --git a/crates/hir-ty/src/next_solver/predicate.rs b/crates/hir-ty/src/next_solver/predicate.rs
index 5758e2dc7e..6f4fae7073 100644
--- a/crates/hir-ty/src/next_solver/predicate.rs
+++ b/crates/hir-ty/src/next_solver/predicate.rs
@@ -273,9 +273,8 @@ impl<'db> std::fmt::Debug for Clauses<'db> {
impl<'db> Clauses<'db> {
#[inline]
- pub fn empty(_interner: DbInterner<'db>) -> Self {
- // FIXME: Get from a static.
- Self::new_from_slice(&[])
+ pub fn empty(interner: DbInterner<'db>) -> Self {
+ interner.default_types().empty.clauses
}
#[inline]
diff --git a/crates/hir-ty/src/next_solver/ty.rs b/crates/hir-ty/src/next_solver/ty.rs
index 66a24d3949..1173028a10 100644
--- a/crates/hir-ty/src/next_solver/ty.rs
+++ b/crates/hir-ty/src/next_solver/ty.rs
@@ -508,6 +508,11 @@ impl<'db> Ty<'db> {
references_non_lt_error(&self)
}
+ /// Whether the type contains a type error (ignoring const and lifetime errors).
+ pub fn references_only_ty_error(self) -> bool {
+ references_only_ty_error(&self)
+ }
+
pub fn callable_sig(self, interner: DbInterner<'db>) -> Option<Binder<'db, FnSig<'db>>> {
match self.kind() {
TyKind::FnDef(callable, args) => {
@@ -777,6 +782,20 @@ impl<'db> TypeVisitor<DbInterner<'db>> for ReferencesNonLifetimeError {
}
}
+pub fn references_only_ty_error<'db, T: TypeVisitableExt<DbInterner<'db>>>(t: &T) -> bool {
+ t.references_error() && t.visit_with(&mut ReferencesOnlyTyError).is_break()
+}
+
+struct ReferencesOnlyTyError;
+
+impl<'db> TypeVisitor<DbInterner<'db>> for ReferencesOnlyTyError {
+ type Result = ControlFlow<()>;
+
+ fn visit_ty(&mut self, ty: Ty<'db>) -> Self::Result {
+ if ty.is_ty_error() { ControlFlow::Break(()) } else { ty.super_visit_with(self) }
+ }
+}
+
impl<'db> std::fmt::Debug for Ty<'db> {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
self.inner().internee.fmt(f)
diff --git a/crates/hir-ty/src/next_solver/util.rs b/crates/hir-ty/src/next_solver/util.rs
index 34ecfed08f..9a1b476976 100644
--- a/crates/hir-ty/src/next_solver/util.rs
+++ b/crates/hir-ty/src/next_solver/util.rs
@@ -455,6 +455,21 @@ pub fn explicit_item_bounds<'db>(
clauses.map_bound(|clauses| clauses.iter().copied())
}
+pub fn explicit_item_self_bounds<'db>(
+ interner: DbInterner<'db>,
+ def_id: SolverDefId,
+) -> EarlyBinder<'db, impl DoubleEndedIterator<Item = Clause<'db>> + ExactSizeIterator> {
+ let db = interner.db();
+ let clauses = match def_id {
+ SolverDefId::TypeAliasId(type_alias) => {
+ crate::lower::type_alias_self_bounds(db, type_alias)
+ }
+ SolverDefId::InternedOpaqueTyId(id) => id.self_predicates(db),
+ _ => panic!("Unexpected GenericDefId"),
+ };
+ clauses.map_bound(|clauses| clauses.iter().copied())
+}
+
pub struct ContainsTypeErrors;
impl<'db> TypeVisitor<DbInterner<'db>> for ContainsTypeErrors {
diff --git a/crates/hir-ty/src/tests/closure_captures.rs b/crates/hir-ty/src/tests/closure_captures.rs
index 8408c0a7bf..f089120cd7 100644
--- a/crates/hir-ty/src/tests/closure_captures.rs
+++ b/crates/hir-ty/src/tests/closure_captures.rs
@@ -135,7 +135,7 @@ fn check_closure_captures(#[rust_analyzer::rust_fixture] ra_fixture: &str, expec
fn deref_in_let() {
check_closure_captures(
r#"
-//- minicore:copy
+//- minicore:copy, fn
fn main() {
let a = &mut true;
let closure = || { let b = *a; };
@@ -149,7 +149,7 @@ fn main() {
fn deref_then_ref_pattern() {
check_closure_captures(
r#"
-//- minicore:copy
+//- minicore:copy, fn
fn main() {
let a = &mut true;
let closure = || { let &mut ref b = a; };
@@ -159,7 +159,7 @@ fn main() {
);
check_closure_captures(
r#"
-//- minicore:copy
+//- minicore:copy, fn
fn main() {
let a = &mut true;
let closure = || { let &mut ref mut b = a; };
@@ -173,7 +173,7 @@ fn main() {
fn unique_borrow() {
check_closure_captures(
r#"
-//- minicore:copy
+//- minicore:copy, fn
fn main() {
let a = &mut true;
let closure = || { *a = false; };
@@ -187,7 +187,7 @@ fn main() {
fn deref_ref_mut() {
check_closure_captures(
r#"
-//- minicore:copy
+//- minicore:copy, fn
fn main() {
let a = &mut true;
let closure = || { let ref mut b = *a; };
@@ -201,7 +201,7 @@ fn main() {
fn let_else_not_consuming() {
check_closure_captures(
r#"
-//- minicore:copy
+//- minicore:copy, fn
fn main() {
let a = &mut true;
let closure = || { let _ = *a else { return; }; };
@@ -215,7 +215,7 @@ fn main() {
fn consume() {
check_closure_captures(
r#"
-//- minicore:copy
+//- minicore:copy, fn
struct NonCopy;
fn main() {
let a = NonCopy;
@@ -230,7 +230,7 @@ fn main() {
fn ref_to_upvar() {
check_closure_captures(
r#"
-//- minicore:copy
+//- minicore:copy, fn
struct NonCopy;
fn main() {
let mut a = NonCopy;
@@ -248,7 +248,7 @@ fn main() {
fn field() {
check_closure_captures(
r#"
-//- minicore:copy
+//- minicore:copy, fn
struct Foo { a: i32, b: i32 }
fn main() {
let a = Foo { a: 0, b: 0 };
@@ -263,7 +263,7 @@ fn main() {
fn fields_different_mode() {
check_closure_captures(
r#"
-//- minicore:copy
+//- minicore:copy, fn
struct NonCopy;
struct Foo { a: i32, b: i32, c: NonCopy, d: bool }
fn main() {
@@ -286,7 +286,7 @@ fn main() {
fn autoref() {
check_closure_captures(
r#"
-//- minicore:copy
+//- minicore:copy, fn
struct Foo;
impl Foo {
fn imm(&self) {}
@@ -308,7 +308,7 @@ fn main() {
fn captures_priority() {
check_closure_captures(
r#"
-//- minicore:copy
+//- minicore:copy, fn
struct NonCopy;
fn main() {
let mut a = &mut true;
@@ -336,7 +336,7 @@ fn main() {
fn let_underscore() {
check_closure_captures(
r#"
-//- minicore:copy
+//- minicore:copy, fn
fn main() {
let mut a = true;
let closure = || { let _ = a; };
@@ -350,7 +350,7 @@ fn main() {
fn match_wildcard() {
check_closure_captures(
r#"
-//- minicore:copy
+//- minicore:copy, fn
struct NonCopy;
fn main() {
let mut a = NonCopy;
@@ -375,7 +375,7 @@ fn main() {
fn multiple_bindings() {
check_closure_captures(
r#"
-//- minicore:copy
+//- minicore:copy, fn
fn main() {
let mut a = false;
let mut closure = || { let (b | b) = a; };
@@ -389,7 +389,7 @@ fn main() {
fn multiple_usages() {
check_closure_captures(
r#"
-//- minicore:copy
+//- minicore:copy, fn
fn main() {
let mut a = false;
let mut closure = || {
@@ -410,7 +410,7 @@ fn main() {
fn ref_then_deref() {
check_closure_captures(
r#"
-//- minicore:copy
+//- minicore:copy, fn
fn main() {
let mut a = false;
let mut closure = || { let b = *&mut a; };
@@ -424,7 +424,7 @@ fn main() {
fn ref_of_ref() {
check_closure_captures(
r#"
-//- minicore:copy
+//- minicore:copy, fn
fn main() {
let mut a = &false;
let closure = || { let b = &a; };
@@ -446,7 +446,7 @@ fn main() {
fn multiple_capture_usages() {
check_closure_captures(
r#"
-//- minicore:copy
+//- minicore:copy, fn
struct A { a: i32, b: bool }
fn main() {
let mut a = A { a: 123, b: false };
@@ -465,7 +465,7 @@ fn main() {
fn let_binding_is_a_ref_capture_in_ref_binding() {
check_closure_captures(
r#"
-//- minicore:copy
+//- minicore:copy, fn
struct S;
fn main() {
let mut s = S;
@@ -489,7 +489,7 @@ fn main() {
fn let_binding_is_a_value_capture_in_binding() {
check_closure_captures(
r#"
-//- minicore:copy, option
+//- minicore:copy, fn, option
struct Box(i32);
fn main() {
let b = Some(Box(0));
@@ -508,7 +508,7 @@ fn main() {
fn alias_needs_to_be_normalized() {
check_closure_captures(
r#"
-//- minicore:copy
+//- minicore:copy, fn
trait Trait {
type Associated;
}
@@ -528,3 +528,41 @@ fn main() {
expect!["220..257;174..175;245..250 ByRef(Shared) c.b.x &'? i32"],
);
}
+
+#[test]
+fn nested_ref_captures_from_outer() {
+ check_closure_captures(
+ r#"
+//- minicore:copy, fn
+fn f() {
+ let a = 1;
+ let a_closure = || {
+ let b_closure = || {
+ { a };
+ };
+ };
+}
+"#,
+ expect![[r#"
+ 44..113;17..18;92..93 ByRef(Shared) a &'? i32
+ 73..106;17..18;92..93 ByRef(Shared) a &'? i32"#]],
+ );
+}
+
+#[test]
+fn nested_ref_captures() {
+ check_closure_captures(
+ r#"
+//- minicore:copy, fn
+fn f() {
+ let a_closure = || {
+ let b = 2;
+ let b_closure = || {
+ { b };
+ };
+ };
+}
+"#,
+ expect!["77..110;46..47;96..97 ByRef(Shared) b &'? i32"],
+ );
+}
diff --git a/crates/hir-ty/src/tests/opaque_types.rs b/crates/hir-ty/src/tests/opaque_types.rs
index ca986336ff..21d830ed51 100644
--- a/crates/hir-ty/src/tests/opaque_types.rs
+++ b/crates/hir-ty/src/tests/opaque_types.rs
@@ -1,5 +1,7 @@
use expect_test::expect;
+use crate::tests::check_infer;
+
use super::{check_infer_with_mismatches, check_no_mismatches, check_types};
#[test]
@@ -176,3 +178,37 @@ fn main() {
"#,
);
}
+
+#[test]
+fn regression_21455() {
+ check_infer(
+ r#"
+//- minicore: copy
+
+struct Vec<T>(T);
+impl<T> Vec<T> {
+ pub fn new() -> Self { loop {} }
+}
+
+pub struct Miku {}
+
+impl Miku {
+ pub fn all_paths_to(&self) -> impl Copy {
+ Miku {
+ full_paths: Vec::new(),
+ }
+ }
+}
+ "#,
+ expect![[r#"
+ 61..72 '{ loop {} }': Vec<T>
+ 63..70 'loop {}': !
+ 68..70 '{}': ()
+ 133..137 'self': &'? Miku
+ 152..220 '{ ... }': Miku
+ 162..214 'Miku {... }': Miku
+ 193..201 'Vec::new': fn new<{unknown}>() -> Vec<{unknown}>
+ 193..203 'Vec::new()': Vec<{unknown}>
+ "#]],
+ );
+}
diff --git a/crates/hir-ty/src/tests/patterns.rs b/crates/hir-ty/src/tests/patterns.rs
index 0b776938c5..8c7d29f993 100644
--- a/crates/hir-ty/src/tests/patterns.rs
+++ b/crates/hir-ty/src/tests/patterns.rs
@@ -13,11 +13,11 @@ fn infer_pattern() {
let a = z;
let (c, d) = (1, "hello");
- for (e, f) in some_iter {
+ for (e, f) in [(0, 1)] {
let g = e;
}
- if let [val] = opt {
+ if let [val] = [y] {
let h = val;
}
@@ -33,7 +33,7 @@ fn infer_pattern() {
"#,
expect![[r#"
8..9 'x': &'? i32
- 17..400 '{ ...o_x; }': ()
+ 17..399 '{ ...o_x; }': ()
27..28 'y': &'? i32
31..32 'x': &'? i32
42..44 '&z': &'? i32
@@ -47,58 +47,62 @@ fn infer_pattern() {
82..94 '(1, "hello")': (i32, &'? str)
83..84 '1': i32
86..93 '"hello"': &'static str
- 101..151 'for (e... }': fn into_iter<{unknown}>({unknown}) -> <{unknown} as IntoIterator>::IntoIter
- 101..151 'for (e... }': <{unknown} as IntoIterator>::IntoIter
- 101..151 'for (e... }': !
- 101..151 'for (e... }': {unknown}
- 101..151 'for (e... }': &'? mut {unknown}
- 101..151 'for (e... }': fn next<{unknown}>(&'? mut {unknown}) -> Option<<{unknown} as Iterator>::Item>
- 101..151 'for (e... }': Option<<{unknown} as Iterator>::Item>
- 101..151 'for (e... }': ()
- 101..151 'for (e... }': ()
- 101..151 'for (e... }': ()
- 101..151 'for (e... }': ()
- 105..111 '(e, f)': ({unknown}, {unknown})
- 106..107 'e': {unknown}
- 109..110 'f': {unknown}
- 115..124 'some_iter': {unknown}
- 125..151 '{ ... }': ()
- 139..140 'g': {unknown}
- 143..144 'e': {unknown}
- 157..204 'if let... }': ()
- 160..175 'let [val] = opt': bool
- 164..169 '[val]': [{unknown}]
- 165..168 'val': {unknown}
- 172..175 'opt': [{unknown}]
- 176..204 '{ ... }': ()
- 190..191 'h': {unknown}
- 194..197 'val': {unknown}
- 210..236 'if let...rue {}': ()
- 213..233 'let x ... &true': bool
- 217..225 'x @ true': &'? bool
- 221..225 'true': bool
- 221..225 'true': bool
- 228..233 '&true': &'? bool
- 229..233 'true': bool
- 234..236 '{}': ()
- 246..252 'lambda': impl Fn(u64, u64, i32) -> i32
- 255..287 '|a: u6...b; c }': impl Fn(u64, u64, i32) -> i32
- 256..257 'a': u64
- 264..265 'b': u64
- 267..268 'c': i32
- 275..287 '{ a + b; c }': i32
- 277..278 'a': u64
- 277..282 'a + b': u64
- 281..282 'b': u64
- 284..285 'c': i32
- 298..310 'ref ref_to_x': &'? &'? i32
- 313..314 'x': &'? i32
- 324..333 'mut mut_x': &'? i32
- 336..337 'x': &'? i32
- 347..367 'ref mu...f_to_x': &'? mut &'? i32
- 370..371 'x': &'? i32
- 381..382 'k': &'? mut &'? i32
- 385..397 'mut_ref_to_x': &'? mut &'? i32
+ 101..150 'for (e... }': fn into_iter<[(i32, i32); 1]>([(i32, i32); 1]) -> <[(i32, i32); 1] as IntoIterator>::IntoIter
+ 101..150 'for (e... }': IntoIter<(i32, i32), 1>
+ 101..150 'for (e... }': !
+ 101..150 'for (e... }': IntoIter<(i32, i32), 1>
+ 101..150 'for (e... }': &'? mut IntoIter<(i32, i32), 1>
+ 101..150 'for (e... }': fn next<IntoIter<(i32, i32), 1>>(&'? mut IntoIter<(i32, i32), 1>) -> Option<<IntoIter<(i32, i32), 1> as Iterator>::Item>
+ 101..150 'for (e... }': Option<(i32, i32)>
+ 101..150 'for (e... }': ()
+ 101..150 'for (e... }': ()
+ 101..150 'for (e... }': ()
+ 101..150 'for (e... }': ()
+ 105..111 '(e, f)': (i32, i32)
+ 106..107 'e': i32
+ 109..110 'f': i32
+ 115..123 '[(0, 1)]': [(i32, i32); 1]
+ 116..122 '(0, 1)': (i32, i32)
+ 117..118 '0': i32
+ 120..121 '1': i32
+ 124..150 '{ ... }': ()
+ 138..139 'g': i32
+ 142..143 'e': i32
+ 156..203 'if let... }': ()
+ 159..174 'let [val] = [y]': bool
+ 163..168 '[val]': [&'? i32; 1]
+ 164..167 'val': &'? i32
+ 171..174 '[y]': [&'? i32; 1]
+ 172..173 'y': &'? i32
+ 175..203 '{ ... }': ()
+ 189..190 'h': &'? i32
+ 193..196 'val': &'? i32
+ 209..235 'if let...rue {}': ()
+ 212..232 'let x ... &true': bool
+ 216..224 'x @ true': &'? bool
+ 220..224 'true': bool
+ 220..224 'true': bool
+ 227..232 '&true': &'? bool
+ 228..232 'true': bool
+ 233..235 '{}': ()
+ 245..251 'lambda': impl Fn(u64, u64, i32) -> i32
+ 254..286 '|a: u6...b; c }': impl Fn(u64, u64, i32) -> i32
+ 255..256 'a': u64
+ 263..264 'b': u64
+ 266..267 'c': i32
+ 274..286 '{ a + b; c }': i32
+ 276..277 'a': u64
+ 276..281 'a + b': u64
+ 280..281 'b': u64
+ 283..284 'c': i32
+ 297..309 'ref ref_to_x': &'? &'? i32
+ 312..313 'x': &'? i32
+ 323..332 'mut mut_x': &'? i32
+ 335..336 'x': &'? i32
+ 346..366 'ref mu...f_to_x': &'? mut &'? i32
+ 369..370 'x': &'? i32
+ 380..381 'k': &'? mut &'? i32
+ 384..396 'mut_ref_to_x': &'? mut &'? i32
"#]],
);
}
@@ -380,7 +384,7 @@ fn infer_pattern_match_string_literal() {
fn infer_pattern_match_byte_string_literal() {
check_infer_with_mismatches(
r#"
- //- minicore: index
+ //- minicore: index, range
struct S;
impl<T, const N: usize> core::ops::Index<S> for [T; N] {
type Output = [u8];
@@ -395,7 +399,7 @@ fn infer_pattern_match_byte_string_literal() {
"#,
expect![[r#"
105..109 'self': &'? [T; N]
- 111..116 'index': {unknown}
+ 111..116 'index': RangeFull
157..180 '{ ... }': &'? [u8]
167..174 'loop {}': !
172..174 '{}': ()
diff --git a/crates/hir-ty/src/tests/regression.rs b/crates/hir-ty/src/tests/regression.rs
index c805f03044..4f1480c393 100644
--- a/crates/hir-ty/src/tests/regression.rs
+++ b/crates/hir-ty/src/tests/regression.rs
@@ -891,13 +891,14 @@ use core::ops::Deref;
struct BufWriter {}
-struct Mutex<T> {}
-struct MutexGuard<'a, T> {}
+struct Mutex<T>(T);
+struct MutexGuard<'a, T>(&'a T);
impl<T> Mutex<T> {
fn lock(&self) -> MutexGuard<'_, T> {}
}
impl<'a, T: 'a> Deref for MutexGuard<'a, T> {
type Target = T;
+ fn deref(&self) -> &Self::Target { loop {} }
}
fn flush(&self) {
let w: &Mutex<BufWriter>;
@@ -905,14 +906,18 @@ fn flush(&self) {
}
"#,
expect![[r#"
- 123..127 'self': &'? Mutex<T>
- 150..152 '{}': MutexGuard<'?, T>
- 234..238 'self': &'? {unknown}
- 240..290 '{ ...()); }': ()
- 250..251 'w': &'? Mutex<BufWriter>
- 276..287 '*(w.lock())': BufWriter
- 278..279 'w': &'? Mutex<BufWriter>
- 278..286 'w.lock()': MutexGuard<'?, BufWriter>
+ 129..133 'self': &'? Mutex<T>
+ 156..158 '{}': MutexGuard<'?, T>
+ 242..246 'self': &'? MutexGuard<'a, T>
+ 265..276 '{ loop {} }': &'? T
+ 267..274 'loop {}': !
+ 272..274 '{}': ()
+ 289..293 'self': &'? {unknown}
+ 295..345 '{ ...()); }': ()
+ 305..306 'w': &'? Mutex<BufWriter>
+ 331..342 '*(w.lock())': BufWriter
+ 333..334 'w': &'? Mutex<BufWriter>
+ 333..341 'w.lock()': MutexGuard<'?, BufWriter>
"#]],
);
}
@@ -2230,7 +2235,6 @@ async fn f<A, B, C>() -> Bar {}
"#,
expect![[r#"
64..66 '{}': ()
- 64..66 '{}': impl Future<Output = ()>
"#]],
);
}
@@ -2563,3 +2567,81 @@ fn main() {
"#,
);
}
+
+#[test]
+fn regression_21429() {
+ check_no_mismatches(
+ r#"
+trait DatabaseLike {
+ type ForeignKey: ForeignKeyLike<DB = Self>;
+}
+
+trait ForeignKeyLike {
+ type DB: DatabaseLike;
+
+ fn host_columns(&self, database: &Self::DB);
+}
+
+trait ColumnLike {
+ type DB: DatabaseLike;
+
+ fn foo() -> &&<<Self as ColumnLike>::DB as DatabaseLike>::ForeignKey {
+ loop {}
+ }
+
+ fn foreign_keys(&self, database: &Self::DB) {
+ let fk = Self::foo();
+ fk.host_columns(database);
+ }
+}
+ "#,
+ );
+}
+
+#[test]
+fn issue_21006_generic_predicates_for_param_supertrait_cycle() {
+ check_no_mismatches(
+ r#"
+trait VCipherSuite {}
+
+trait CipherSuite
+where
+ OprfHash<Self>: Hash,
+{
+}
+
+type Bar<CS: CipherSuite> = <CS::Baz as VCipherSuite>::Hash;
+
+type OprfHash<CS: CipherSuite> = <CS::Baz as VCipherSuite>::Hash;
+
+impl<CS: CipherSuite> Foo<CS> {
+ fn seal() {}
+}
+ "#,
+ );
+}
+
+#[test]
+fn issue_21006_self_assoc_trait() {
+ check_types(
+ r#"
+trait Baz {
+ fn baz(&self);
+}
+
+trait Foo {
+ type Assoc;
+}
+
+trait Bar: Foo
+where
+ Self::Assoc: Baz,
+{
+ fn bar(v: Self::Assoc) {
+ let _ = v.baz();
+ // ^ ()
+ }
+}
+ "#,
+ );
+}
diff --git a/crates/hir-ty/src/tests/regression/new_solver.rs b/crates/hir-ty/src/tests/regression/new_solver.rs
index a4554673cd..f47a26d429 100644
--- a/crates/hir-ty/src/tests/regression/new_solver.rs
+++ b/crates/hir-ty/src/tests/regression/new_solver.rs
@@ -471,7 +471,76 @@ fn foo() {
244..246 '_x': {unknown}
249..257 'to_bytes': fn to_bytes() -> [u8; _]
249..259 'to_bytes()': [u8; _]
- 249..268 'to_byt..._vec()': {unknown}
+ 249..268 'to_byt..._vec()': Vec<<[u8; _] as Foo>::Item>
+ "#]],
+ );
+}
+
+#[test]
+fn regression_21315() {
+ check_infer(
+ r#"
+struct Consts;
+impl Consts { const MAX: usize = 0; }
+
+struct Between<const M: usize, const N: usize, T>(T);
+
+impl<const M: usize, T> Between<M, { Consts::MAX }, T> {
+ fn sep_once(self, _sep: &str, _other: Self) -> Self {
+ self
+ }
+}
+
+trait Parser: Sized {
+ fn at_least<const M: usize>(self) -> Between<M, { Consts::MAX }, Self> {
+ Between(self)
+ }
+ fn at_most<const N: usize>(self) -> Between<0, N, Self> {
+ Between(self)
+ }
+}
+
+impl Parser for char {}
+
+fn test_at_least() {
+ let num = '9'.at_least::<1>();
+ let _ver = num.sep_once(".", num);
+}
+
+fn test_at_most() {
+ let num = '9'.at_most::<1>();
+}
+ "#,
+ expect![[r#"
+ 48..49 '0': usize
+ 182..186 'self': Between<M, _, T>
+ 188..192 '_sep': &'? str
+ 200..206 '_other': Between<M, _, T>
+ 222..242 '{ ... }': Between<M, _, T>
+ 232..236 'self': Between<M, _, T>
+ 300..304 'self': Self
+ 343..372 '{ ... }': Between<M, _, Self>
+ 353..360 'Between': fn Between<M, _, Self>(Self) -> Between<M, _, Self>
+ 353..366 'Between(self)': Between<M, _, Self>
+ 361..365 'self': Self
+ 404..408 'self': Self
+ 433..462 '{ ... }': Between<0, N, Self>
+ 443..450 'Between': fn Between<0, N, Self>(Self) -> Between<0, N, Self>
+ 443..456 'Between(self)': Between<0, N, Self>
+ 451..455 'self': Self
+ 510..587 '{ ...um); }': ()
+ 520..523 'num': Between<1, _, char>
+ 526..529 ''9'': char
+ 526..545 ''9'.at...:<1>()': Between<1, _, char>
+ 555..559 '_ver': Between<1, _, char>
+ 562..565 'num': Between<1, _, char>
+ 562..584 'num.se..., num)': Between<1, _, char>
+ 575..578 '"."': &'static str
+ 580..583 'num': Between<1, _, char>
+ 607..644 '{ ...>(); }': ()
+ 617..620 'num': Between<0, 1, char>
+ 623..626 ''9'': char
+ 623..641 ''9'.at...:<1>()': Between<0, 1, char>
"#]],
);
}
@@ -750,3 +819,63 @@ fn main() {
"#]],
);
}
+
+#[test]
+fn regression_19339() {
+ check_infer(
+ r#"
+trait Bar {
+ type Baz;
+
+ fn baz(&self) -> Self::Baz;
+}
+
+trait Foo {
+ type Bar;
+
+ fn bar(&self) -> Self::Bar;
+}
+
+trait FooFactory {
+ type Output: Foo<Bar: Bar<Baz = u8>>;
+
+ fn foo(&self) -> Self::Output;
+
+ fn foo_rpit(&self) -> impl Foo<Bar: Bar<Baz = u8>>;
+}
+
+fn test1(foo: impl Foo<Bar: Bar<Baz = u8>>) {
+ let baz = foo.bar().baz();
+}
+
+fn test2<T: FooFactory>(factory: T) {
+ let baz = factory.foo().bar().baz();
+ let baz = factory.foo_rpit().bar().baz();
+}
+"#,
+ expect![[r#"
+ 39..43 'self': &'? Self
+ 101..105 'self': &'? Self
+ 198..202 'self': &'? Self
+ 239..243 'self': &'? Self
+ 290..293 'foo': impl Foo + ?Sized
+ 325..359 '{ ...z(); }': ()
+ 335..338 'baz': u8
+ 341..344 'foo': impl Foo + ?Sized
+ 341..350 'foo.bar()': impl Bar
+ 341..356 'foo.bar().baz()': u8
+ 385..392 'factory': T
+ 397..487 '{ ...z(); }': ()
+ 407..410 'baz': u8
+ 413..420 'factory': T
+ 413..426 'factory.foo()': <T as FooFactory>::Output
+ 413..432 'factor....bar()': <<T as FooFactory>::Output as Foo>::Bar
+ 413..438 'factor....baz()': u8
+ 448..451 'baz': u8
+ 454..461 'factory': T
+ 454..472 'factor...rpit()': impl Foo + Bar<Baz = u8> + ?Sized
+ 454..478 'factor....bar()': <impl Foo + Bar<Baz = u8> + ?Sized as Foo>::Bar
+ 454..484 'factor....baz()': u8
+ "#]],
+ );
+}
diff --git a/crates/hir-ty/src/tests/simple.rs b/crates/hir-ty/src/tests/simple.rs
index a9a5e96f75..98503452d3 100644
--- a/crates/hir-ty/src/tests/simple.rs
+++ b/crates/hir-ty/src/tests/simple.rs
@@ -64,20 +64,37 @@ fn type_alias_in_struct_lit() {
#[test]
fn infer_ranges() {
- check_types(
+ check_no_mismatches(
r#"
-//- minicore: range
+//- minicore: range, new_range
+
fn test() {
- let a = ..;
- let b = 1..;
- let c = ..2u32;
- let d = 1..2usize;
- let e = ..=10;
- let f = 'a'..='z';
-
- let t = (a, b, c, d, e, f);
- t;
-} //^ (RangeFull, RangeFrom<i32>, RangeTo<u32>, Range<usize>, RangeToInclusive<i32>, RangeInclusive<char>)
+ let _: core::ops::RangeFull = ..;
+ let _: core::ops::RangeFrom<i32> = 1..;
+ let _: core::ops::RangeTo<u32> = ..2u32;
+ let _: core::ops::Range<usize> = 1..2usize;
+ let _: core::ops::RangeToInclusive<i32> = ..=10;
+ let _: core::ops::RangeInclusive<char> = 'a'..='z';
+}
+"#,
+ );
+}
+
+#[test]
+fn infer_ranges_new_range() {
+ check_no_mismatches(
+ r#"
+//- minicore: range, new_range
+#![feature(new_range)]
+
+fn test() {
+ let _: core::ops::RangeFull = ..;
+ let _: core::range::RangeFrom<i32> = 1..;
+ let _: core::ops::RangeTo<u32> = ..2u32;
+ let _: core::range::Range<usize> = 1..2usize;
+ let _: core::range::RangeToInclusive<i32> = ..=10;
+ let _: core::range::RangeInclusive<char> = 'a'..='z';
+}
"#,
);
}
@@ -2139,7 +2156,6 @@ async fn main() {
"#,
expect![[r#"
16..193 '{ ...2 }; }': ()
- 16..193 '{ ...2 }; }': impl Future<Output = ()>
26..27 'x': i32
30..43 'unsafe { 92 }': i32
39..41 '92': i32
@@ -3983,3 +3999,60 @@ fn foo() {
"#]],
);
}
+
+#[test]
+fn naked_asm_returns_never() {
+ check_no_mismatches(
+ r#"
+//- minicore: asm
+
+#[unsafe(naked)]
+extern "C" fn foo() -> ! {
+ core::arch::naked_asm!("");
+}
+ "#,
+ );
+}
+
+#[test]
+fn regression_21478() {
+ check_infer(
+ r#"
+//- minicore: unsize, coerce_unsized
+struct LazyLock<T>(T);
+
+impl<T> LazyLock<T> {
+ const fn new() -> Self {
+ loop {}
+ }
+
+ fn force(this: &Self) -> &T {
+ loop {}
+ }
+}
+
+static VALUES_LAZY_LOCK: LazyLock<[u32; { 0 }]> = LazyLock::new();
+
+fn foo() {
+ let _ = LazyLock::force(&VALUES_LAZY_LOCK);
+}
+ "#,
+ expect![[r#"
+ 73..96 '{ ... }': LazyLock<T>
+ 83..90 'loop {}': !
+ 88..90 '{}': ()
+ 111..115 'this': &'? LazyLock<T>
+ 130..153 '{ ... }': &'? T
+ 140..147 'loop {}': !
+ 145..147 '{}': ()
+ 207..220 'LazyLock::new': fn new<[u32; _]>() -> LazyLock<[u32; _]>
+ 207..222 'LazyLock::new()': LazyLock<[u32; _]>
+ 234..285 '{ ...CK); }': ()
+ 244..245 '_': &'? [u32; _]
+ 248..263 'LazyLock::force': fn force<[u32; _]>(&'? LazyLock<[u32; _]>) -> &'? [u32; _]
+ 248..282 'LazyLo..._LOCK)': &'? [u32; _]
+ 264..281 '&VALUE...Y_LOCK': &'? LazyLock<[u32; _]>
+ 265..281 'VALUES...Y_LOCK': LazyLock<[u32; _]>
+ "#]],
+ );
+}
diff --git a/crates/hir-ty/src/tests/traits.rs b/crates/hir-ty/src/tests/traits.rs
index 38591f486e..390553c0d7 100644
--- a/crates/hir-ty/src/tests/traits.rs
+++ b/crates/hir-ty/src/tests/traits.rs
@@ -429,7 +429,7 @@ fn associated_type_shorthand_from_method_bound() {
trait Iterable {
type Item;
}
-struct S<T>;
+struct S<T>(T);
impl<T> S<T> {
fn foo(self) -> T::Item where T: Iterable { loop {} }
}
@@ -1103,40 +1103,50 @@ fn test() {
fn argument_impl_trait_type_args_2() {
check_infer_with_mismatches(
r#"
-//- minicore: sized
+//- minicore: sized, phantom_data
+use core::marker::PhantomData;
+
trait Trait {}
struct S;
impl Trait for S {}
-struct F<T>;
+struct F<T>(PhantomData<T>);
impl<T> F<T> {
fn foo<U>(self, x: impl Trait) -> (T, U) { loop {} }
}
fn test() {
- F.foo(S);
- F::<u32>.foo(S);
- F::<u32>.foo::<i32>(S);
- F::<u32>.foo::<i32, u32>(S); // extraneous argument should be ignored
+ F(PhantomData).foo(S);
+ F::<u32>(PhantomData).foo(S);
+ F::<u32>(PhantomData).foo::<i32>(S);
+ F::<u32>(PhantomData).foo::<i32, u32>(S); // extraneous argument should be ignored
}"#,
expect![[r#"
- 87..91 'self': F<T>
- 93..94 'x': impl Trait
- 118..129 '{ loop {} }': (T, U)
- 120..127 'loop {}': !
- 125..127 '{}': ()
- 143..283 '{ ...ored }': ()
- 149..150 'F': F<{unknown}>
- 149..157 'F.foo(S)': ({unknown}, {unknown})
- 155..156 'S': S
- 163..171 'F::<u32>': F<u32>
- 163..178 'F::<u32>.foo(S)': (u32, {unknown})
- 176..177 'S': S
- 184..192 'F::<u32>': F<u32>
- 184..206 'F::<u3...32>(S)': (u32, i32)
- 204..205 'S': S
- 212..220 'F::<u32>': F<u32>
- 212..239 'F::<u3...32>(S)': (u32, i32)
- 237..238 'S': S
+ 135..139 'self': F<T>
+ 141..142 'x': impl Trait
+ 166..177 '{ loop {} }': (T, U)
+ 168..175 'loop {}': !
+ 173..175 '{}': ()
+ 191..383 '{ ...ored }': ()
+ 197..198 'F': fn F<{unknown}>(PhantomData<{unknown}>) -> F<{unknown}>
+ 197..211 'F(PhantomData)': F<{unknown}>
+ 197..218 'F(Phan...foo(S)': ({unknown}, {unknown})
+ 199..210 'PhantomData': PhantomData<{unknown}>
+ 216..217 'S': S
+ 224..232 'F::<u32>': fn F<u32>(PhantomData<u32>) -> F<u32>
+ 224..245 'F::<u3...mData)': F<u32>
+ 224..252 'F::<u3...foo(S)': (u32, {unknown})
+ 233..244 'PhantomData': PhantomData<u32>
+ 250..251 'S': S
+ 258..266 'F::<u32>': fn F<u32>(PhantomData<u32>) -> F<u32>
+ 258..279 'F::<u3...mData)': F<u32>
+ 258..293 'F::<u3...32>(S)': (u32, i32)
+ 267..278 'PhantomData': PhantomData<u32>
+ 291..292 'S': S
+ 299..307 'F::<u32>': fn F<u32>(PhantomData<u32>) -> F<u32>
+ 299..320 'F::<u3...mData)': F<u32>
+ 299..339 'F::<u3...32>(S)': (u32, i32)
+ 308..319 'PhantomData': PhantomData<u32>
+ 337..338 'S': S
"#]],
);
}
@@ -4012,7 +4022,7 @@ fn f<F: Foo>() {
fn dyn_map() {
check_types(
r#"
-pub struct Key<K, V, P = (K, V)> {}
+pub struct Key<K, V, P = (K, V)>(K, V, P);
pub trait Policy {
type K;
@@ -4024,7 +4034,7 @@ impl<K, V> Policy for (K, V) {
type V = V;
}
-pub struct KeyMap<KEY> {}
+pub struct KeyMap<KEY>(KEY);
impl<P: Policy> KeyMap<Key<P::K, P::V, P>> {
pub fn get(&self, key: &P::K) -> P::V {
@@ -4859,7 +4869,6 @@ async fn baz<T: AsyncFnOnce(u32) -> i32>(c: T) {
expect![[r#"
37..38 'a': T
43..83 '{ ...ait; }': ()
- 43..83 '{ ...ait; }': impl Future<Output = ()>
53..57 'fut1': <T as AsyncFnMut<(u32,)>>::CallRefFuture<'?>
60..61 'a': T
60..64 'a(0)': <T as AsyncFnMut<(u32,)>>::CallRefFuture<'?>
@@ -4868,7 +4877,6 @@ async fn baz<T: AsyncFnOnce(u32) -> i32>(c: T) {
70..80 'fut1.await': i32
124..129 'mut b': T
134..174 '{ ...ait; }': ()
- 134..174 '{ ...ait; }': impl Future<Output = ()>
144..148 'fut2': <T as AsyncFnMut<(u32,)>>::CallRefFuture<'?>
151..152 'b': T
151..155 'b(0)': <T as AsyncFnMut<(u32,)>>::CallRefFuture<'?>
@@ -4877,7 +4885,6 @@ async fn baz<T: AsyncFnOnce(u32) -> i32>(c: T) {
161..171 'fut2.await': i32
216..217 'c': T
222..262 '{ ...ait; }': ()
- 222..262 '{ ...ait; }': impl Future<Output = ()>
232..236 'fut3': <T as AsyncFnOnce<(u32,)>>::CallOnceFuture
239..240 'c': T
239..243 'c(0)': <T as AsyncFnOnce<(u32,)>>::CallOnceFuture
@@ -5023,7 +5030,7 @@ fn main() {
278..280 '{}': ()
290..291 '_': Box<dyn Iterator<Item = &'? [u8]> + '?>
294..298 'iter': Box<dyn Iterator<Item = &'? [u8]> + 'static>
- 294..310 'iter.i...iter()': Box<dyn Iterator<Item = &'? [u8]> + 'static>
+ 294..310 'iter.i...iter()': Box<dyn Iterator<Item = &'? [u8]> + '?>
152..156 'self': &'? mut Box<I>
177..208 '{ ... }': Option<<I as Iterator>::Item>
191..198 'loop {}': !
diff --git a/crates/hir/src/attrs.rs b/crates/hir/src/attrs.rs
index cba1b39e52..cfb95e07c3 100644
--- a/crates/hir/src/attrs.rs
+++ b/crates/hir/src/attrs.rs
@@ -3,7 +3,8 @@
use cfg::CfgExpr;
use either::Either;
use hir_def::{
- AssocItemId, AttrDefId, FieldId, LifetimeParamId, ModuleDefId, TypeOrConstParamId,
+ AssocItemId, AttrDefId, FieldId, GenericDefId, ItemContainerId, LifetimeParamId, ModuleDefId,
+ TraitId, TypeOrConstParamId,
attrs::{AttrFlags, Docs, IsInnerDoc},
expr_store::path::Path,
item_scope::ItemInNs,
@@ -22,6 +23,7 @@ use hir_ty::{
next_solver::{DbInterner, TypingMode, infer::DbInternerInferExt},
};
use intern::Symbol;
+use stdx::never;
use crate::{
Adt, AsAssocItem, AssocItem, BuiltinType, Const, ConstParam, DocLinkDef, Enum, ExternCrateDecl,
@@ -357,13 +359,46 @@ fn resolve_assoc_or_field(
ns: Option<Namespace>,
) -> Option<DocLinkDef> {
let path = Path::from_known_path_with_no_generic(path);
- // FIXME: This does not handle `Self` on trait definitions, which we should resolve to the
- // trait itself.
let base_def = resolver.resolve_path_in_type_ns_fully(db, &path)?;
+ let handle_trait = |id: TraitId| {
+ // Doc paths in this context may only resolve to an item of this trait
+ // (i.e. no items of its supertraits), so we need to handle them here
+ // independently of others.
+ id.trait_items(db).items.iter().find(|it| it.0 == name).map(|(_, assoc_id)| {
+ let def = match *assoc_id {
+ AssocItemId::FunctionId(it) => ModuleDef::Function(it.into()),
+ AssocItemId::ConstId(it) => ModuleDef::Const(it.into()),
+ AssocItemId::TypeAliasId(it) => ModuleDef::TypeAlias(it.into()),
+ };
+ DocLinkDef::ModuleDef(def)
+ })
+ };
let ty = match base_def {
TypeNs::SelfType(id) => Impl::from(id).self_ty(db),
- TypeNs::GenericParam(_) => {
+ TypeNs::GenericParam(param) => {
+ let generic_params = db.generic_params(param.parent());
+ if generic_params[param.local_id()].is_trait_self() {
+ // `Self::assoc` in traits should refer to the trait itself.
+ let parent_trait = |container| match container {
+ ItemContainerId::TraitId(trait_) => handle_trait(trait_),
+ _ => {
+ never!("container {container:?} should be a trait");
+ None
+ }
+ };
+ return match param.parent() {
+ GenericDefId::TraitId(trait_) => handle_trait(trait_),
+ GenericDefId::ConstId(it) => parent_trait(it.loc(db).container),
+ GenericDefId::FunctionId(it) => parent_trait(it.loc(db).container),
+ GenericDefId::TypeAliasId(it) => parent_trait(it.loc(db).container),
+ _ => {
+ never!("type param {param:?} should belong to a trait");
+ None
+ }
+ };
+ }
+
// Even if this generic parameter has some trait bounds, rustdoc doesn't
// resolve `name` to trait items.
return None;
@@ -384,19 +419,7 @@ fn resolve_assoc_or_field(
alias.ty(db)
}
TypeNs::BuiltinType(id) => BuiltinType::from(id).ty(db),
- TypeNs::TraitId(id) => {
- // Doc paths in this context may only resolve to an item of this trait
- // (i.e. no items of its supertraits), so we need to handle them here
- // independently of others.
- return id.trait_items(db).items.iter().find(|it| it.0 == name).map(|(_, assoc_id)| {
- let def = match *assoc_id {
- AssocItemId::FunctionId(it) => ModuleDef::Function(it.into()),
- AssocItemId::ConstId(it) => ModuleDef::Const(it.into()),
- AssocItemId::TypeAliasId(it) => ModuleDef::TypeAlias(it.into()),
- };
- DocLinkDef::ModuleDef(def)
- });
- }
+ TypeNs::TraitId(id) => return handle_trait(id),
TypeNs::ModuleId(_) => {
return None;
}
@@ -414,7 +437,14 @@ fn resolve_assoc_or_field(
let variant_def = match ty.as_adt()? {
Adt::Struct(it) => it.into(),
Adt::Union(it) => it.into(),
- Adt::Enum(_) => return None,
+ Adt::Enum(enum_) => {
+ // Can happen on `Self::Variant` (otherwise would be fully resolved by the resolver).
+ return enum_
+ .id
+ .enum_variants(db)
+ .variant(&name)
+ .map(|variant| DocLinkDef::ModuleDef(ModuleDef::Variant(variant.into())));
+ }
};
resolve_field(db, variant_def, name, ns)
}
diff --git a/crates/hir/src/lib.rs b/crates/hir/src/lib.rs
index 78be5a7e8f..252d71fb80 100644
--- a/crates/hir/src/lib.rs
+++ b/crates/hir/src/lib.rs
@@ -610,6 +610,23 @@ impl Module {
res
}
+ pub fn modules_in_scope(&self, db: &dyn HirDatabase, pub_only: bool) -> Vec<(Name, Module)> {
+ let def_map = self.id.def_map(db);
+ let scope = &def_map[self.id].scope;
+
+ let mut res = Vec::new();
+
+ for (name, item) in scope.types() {
+ if let ModuleDefId::ModuleId(m) = item.def
+ && (!pub_only || item.vis == Visibility::Public)
+ {
+ res.push((name.clone(), Module { id: m }));
+ }
+ }
+
+ res
+ }
+
/// Returns a `ModuleScope`: a set of items, visible in this module.
pub fn scope(
self,
diff --git a/crates/hir/src/semantics.rs b/crates/hir/src/semantics.rs
index f4c42537de..4bc757da44 100644
--- a/crates/hir/src/semantics.rs
+++ b/crates/hir/src/semantics.rs
@@ -13,7 +13,7 @@ use std::{
use base_db::FxIndexSet;
use either::Either;
use hir_def::{
- DefWithBodyId, MacroId, StructId, TraitId, VariantId,
+ BuiltinDeriveImplId, DefWithBodyId, HasModule, MacroId, StructId, TraitId, VariantId,
attrs::parse_extra_crate_attrs,
expr_store::{Body, ExprOrPatSource, HygieneId, path::Path},
hir::{BindingId, Expr, ExprId, ExprOrPatId, Pat},
@@ -622,18 +622,34 @@ impl<'db> SemanticsImpl<'db> {
Some(
calls
.into_iter()
- .map(|call| macro_call_to_macro_id(ctx, call?).map(|id| Macro { id }))
+ .map(|call| {
+ let call = call?;
+ match call {
+ Either::Left(call) => {
+ macro_call_to_macro_id(ctx, call).map(|id| Macro { id })
+ }
+ Either::Right(call) => {
+ let call = call.loc(self.db);
+ let krate = call.krate(self.db);
+ let lang_items = hir_def::lang_item::lang_items(self.db, krate);
+ call.trait_.derive_macro(lang_items).map(|id| Macro { id })
+ }
+ }
+ })
.collect(),
)
})
}
- pub fn expand_derive_macro(&self, attr: &ast::Attr) -> Option<Vec<ExpandResult<SyntaxNode>>> {
+ pub fn expand_derive_macro(
+ &self,
+ attr: &ast::Attr,
+ ) -> Option<Vec<Option<ExpandResult<SyntaxNode>>>> {
let res: Vec<_> = self
.derive_macro_calls(attr)?
.into_iter()
- .flat_map(|call| {
- let file_id = call?;
+ .map(|call| {
+ let file_id = call?.left()?;
let ExpandResult { value, err } = self.db.parse_macro_expansion(file_id);
let root_node = value.0.syntax_node();
self.cache(root_node.clone(), file_id.into());
@@ -643,7 +659,10 @@ impl<'db> SemanticsImpl<'db> {
Some(res)
}
- fn derive_macro_calls(&self, attr: &ast::Attr) -> Option<Vec<Option<MacroCallId>>> {
+ fn derive_macro_calls(
+ &self,
+ attr: &ast::Attr,
+ ) -> Option<Vec<Option<Either<MacroCallId, BuiltinDeriveImplId>>>> {
let adt = attr.syntax().parent().and_then(ast::Adt::cast)?;
let file_id = self.find_file(adt.syntax()).file_id;
let adt = InFile::new(file_id, &adt);
@@ -690,8 +709,9 @@ impl<'db> SemanticsImpl<'db> {
.derive_helpers_in_scope(InFile::new(sa.file_id, id))?
.iter()
.filter(|&(name, _, _)| *name == attr_name)
- .map(|&(_, macro_, call)| (macro_.into(), call))
+ .filter_map(|&(_, macro_, call)| Some((macro_.into(), call.left()?)))
.collect();
+ // FIXME: We filter our builtin derive "fake" expansions, is this correct? Should we still expose them somehow?
res.is_empty().not().then_some(res)
}
@@ -1338,6 +1358,7 @@ impl<'db> SemanticsImpl<'db> {
// FIXME: We need to call `f` for all of them as well though!
process_expansion_for_token(ctx, &mut stack, derive_attr);
for derive in derives.into_iter().flatten() {
+ let Either::Left(derive) = derive else { continue };
process_expansion_for_token(ctx, &mut stack, derive);
}
}
@@ -1467,11 +1488,12 @@ impl<'db> SemanticsImpl<'db> {
for (.., derive) in
helpers.iter().filter(|(helper, ..)| *helper == attr_name)
{
+ let Either::Left(derive) = *derive else { continue };
// as there may be multiple derives registering the same helper
// name, we gotta make sure to call this for all of them!
// FIXME: We need to call `f` for all of them as well though!
res = res
- .or(process_expansion_for_token(ctx, &mut stack, *derive));
+ .or(process_expansion_for_token(ctx, &mut stack, derive));
}
res
})
@@ -1981,6 +2003,15 @@ impl<'db> SemanticsImpl<'db> {
.unwrap_or_default()
}
+ pub fn record_literal_matched_fields(
+ &self,
+ literal: &ast::RecordExpr,
+ ) -> Vec<(Field, Type<'db>)> {
+ self.analyze(literal.syntax())
+ .and_then(|it| it.record_literal_matched_fields(self.db, literal))
+ .unwrap_or_default()
+ }
+
pub fn record_pattern_missing_fields(
&self,
pattern: &ast::RecordPat,
@@ -1990,6 +2021,15 @@ impl<'db> SemanticsImpl<'db> {
.unwrap_or_default()
}
+ pub fn record_pattern_matched_fields(
+ &self,
+ pattern: &ast::RecordPat,
+ ) -> Vec<(Field, Type<'db>)> {
+ self.analyze(pattern.syntax())
+ .and_then(|it| it.record_pattern_matched_fields(self.db, pattern))
+ .unwrap_or_default()
+ }
+
fn with_ctx<F: FnOnce(&mut SourceToDefCtx<'_, '_>) -> T, T>(&self, f: F) -> T {
let mut ctx = SourceToDefCtx { db: self.db, cache: &mut self.s2d_cache.borrow_mut() };
f(&mut ctx)
diff --git a/crates/hir/src/semantics/source_to_def.rs b/crates/hir/src/semantics/source_to_def.rs
index 2574059927..d222c3dc7e 100644
--- a/crates/hir/src/semantics/source_to_def.rs
+++ b/crates/hir/src/semantics/source_to_def.rs
@@ -87,10 +87,10 @@
use either::Either;
use hir_def::{
- AdtId, BlockId, ConstId, ConstParamId, DefWithBodyId, EnumId, EnumVariantId, ExternBlockId,
- ExternCrateId, FieldId, FunctionId, GenericDefId, GenericParamId, ImplId, LifetimeParamId,
- Lookup, MacroId, ModuleId, StaticId, StructId, TraitId, TypeAliasId, TypeParamId, UnionId,
- UseId, VariantId,
+ AdtId, BlockId, BuiltinDeriveImplId, ConstId, ConstParamId, DefWithBodyId, EnumId,
+ EnumVariantId, ExternBlockId, ExternCrateId, FieldId, FunctionId, GenericDefId, GenericParamId,
+ ImplId, LifetimeParamId, Lookup, MacroId, ModuleId, StaticId, StructId, TraitId, TypeAliasId,
+ TypeParamId, UnionId, UseId, VariantId,
dyn_map::{
DynMap,
keys::{self, Key},
@@ -394,7 +394,7 @@ impl SourceToDefCtx<'_, '_> {
&mut self,
item: InFile<&ast::Adt>,
src: InFile<ast::Attr>,
- ) -> Option<(AttrId, MacroCallId, &[Option<MacroCallId>])> {
+ ) -> Option<(AttrId, MacroCallId, &[Option<Either<MacroCallId, BuiltinDeriveImplId>>])> {
let map = self.dyn_map(item)?;
map[keys::DERIVE_MACRO_CALL]
.get(&AstPtr::new(&src.value))
@@ -409,8 +409,11 @@ impl SourceToDefCtx<'_, '_> {
pub(super) fn derive_macro_calls<'slf>(
&'slf mut self,
adt: InFile<&ast::Adt>,
- ) -> Option<impl Iterator<Item = (AttrId, MacroCallId, &'slf [Option<MacroCallId>])> + use<'slf>>
- {
+ ) -> Option<
+ impl Iterator<
+ Item = (AttrId, MacroCallId, &'slf [Option<Either<MacroCallId, BuiltinDeriveImplId>>]),
+ > + use<'slf>,
+ > {
self.dyn_map(adt).as_ref().map(|&map| {
let dyn_map = &map[keys::DERIVE_MACRO_CALL];
adt.value
diff --git a/crates/hir/src/source_analyzer.rs b/crates/hir/src/source_analyzer.rs
index 6ba7a42c19..c6f2d151f5 100644
--- a/crates/hir/src/source_analyzer.rs
+++ b/crates/hir/src/source_analyzer.rs
@@ -17,7 +17,7 @@ use hir_def::{
path::Path,
scope::{ExprScopes, ScopeId},
},
- hir::{BindingId, Expr, ExprId, ExprOrPatId, Pat},
+ hir::{BindingId, Expr, ExprId, ExprOrPatId, Pat, PatId},
lang_item::LangItems,
nameres::MacroSubNs,
resolver::{HasResolver, Resolver, TypeNs, ValueNs, resolver_for_scope},
@@ -44,6 +44,7 @@ use hir_ty::{
};
use intern::sym;
use itertools::Itertools;
+use rustc_hash::FxHashSet;
use rustc_type_ir::{
AliasTyKind,
inherent::{AdtDef, IntoKind, Ty as _},
@@ -531,18 +532,12 @@ impl<'db> SourceAnalyzer<'db> {
db: &'db dyn HirDatabase,
range_pat: &ast::RangePat,
) -> Option<StructId> {
- let path: ModPath = match (range_pat.op_kind()?, range_pat.start(), range_pat.end()) {
- (RangeOp::Exclusive, None, Some(_)) => path![core::ops::RangeTo],
- (RangeOp::Exclusive, Some(_), None) => path![core::ops::RangeFrom],
- (RangeOp::Exclusive, Some(_), Some(_)) => path![core::ops::Range],
- (RangeOp::Inclusive, None, Some(_)) => path![core::ops::RangeToInclusive],
- (RangeOp::Inclusive, Some(_), Some(_)) => path![core::ops::RangeInclusive],
-
- (RangeOp::Exclusive, None, None) => return None,
- (RangeOp::Inclusive, None, None) => return None,
- (RangeOp::Inclusive, Some(_), None) => return None,
- };
- self.resolver.resolve_known_struct(db, &path)
+ self.resolve_range_struct(
+ db,
+ range_pat.op_kind()?,
+ range_pat.start().is_some(),
+ range_pat.end().is_some(),
+ )
}
pub(crate) fn resolve_range_expr(
@@ -550,19 +545,59 @@ impl<'db> SourceAnalyzer<'db> {
db: &'db dyn HirDatabase,
range_expr: &ast::RangeExpr,
) -> Option<StructId> {
- let path: ModPath = match (range_expr.op_kind()?, range_expr.start(), range_expr.end()) {
- (RangeOp::Exclusive, None, None) => path![core::ops::RangeFull],
- (RangeOp::Exclusive, None, Some(_)) => path![core::ops::RangeTo],
- (RangeOp::Exclusive, Some(_), None) => path![core::ops::RangeFrom],
- (RangeOp::Exclusive, Some(_), Some(_)) => path![core::ops::Range],
- (RangeOp::Inclusive, None, Some(_)) => path![core::ops::RangeToInclusive],
- (RangeOp::Inclusive, Some(_), Some(_)) => path![core::ops::RangeInclusive],
+ self.resolve_range_struct(
+ db,
+ range_expr.op_kind()?,
+ range_expr.start().is_some(),
+ range_expr.end().is_some(),
+ )
+ }
+ fn resolve_range_struct(
+ &self,
+ db: &'db dyn HirDatabase,
+ op_kind: RangeOp,
+ has_start: bool,
+ has_end: bool,
+ ) -> Option<StructId> {
+ let has_new_range =
+ self.resolver.top_level_def_map().is_unstable_feature_enabled(&sym::new_range);
+ let lang_items = self.lang_items(db);
+ match (op_kind, has_start, has_end) {
+ (RangeOp::Exclusive, false, false) => lang_items.RangeFull,
+ (RangeOp::Exclusive, false, true) => lang_items.RangeTo,
+ (RangeOp::Exclusive, true, false) => {
+ if has_new_range {
+ lang_items.RangeFromCopy
+ } else {
+ lang_items.RangeFrom
+ }
+ }
+ (RangeOp::Exclusive, true, true) => {
+ if has_new_range {
+ lang_items.RangeCopy
+ } else {
+ lang_items.Range
+ }
+ }
+ (RangeOp::Inclusive, false, true) => {
+ if has_new_range {
+ lang_items.RangeToInclusiveCopy
+ } else {
+ lang_items.RangeToInclusive
+ }
+ }
+ (RangeOp::Inclusive, true, true) => {
+ if has_new_range {
+ lang_items.RangeInclusiveCopy
+ } else {
+ lang_items.RangeInclusiveStruct
+ }
+ }
// [E0586] inclusive ranges must be bounded at the end
- (RangeOp::Inclusive, None, None) => return None,
- (RangeOp::Inclusive, Some(_), None) => return None,
- };
- self.resolver.resolve_known_struct(db, &path)
+ (RangeOp::Inclusive, false, false) => None,
+ (RangeOp::Inclusive, true, false) => None,
+ }
}
pub(crate) fn resolve_await_to_poll(
@@ -1241,21 +1276,31 @@ impl<'db> SourceAnalyzer<'db> {
let body = self.store()?;
let infer = self.infer()?;
- let expr_id = self.expr_id(literal.clone().into())?;
- let substs = infer.expr_or_pat_ty(expr_id).as_adt()?.1;
-
- let (variant, missing_fields, _exhaustive) = match expr_id {
- ExprOrPatId::ExprId(expr_id) => {
- record_literal_missing_fields(db, infer, expr_id, &body[expr_id])?
- }
- ExprOrPatId::PatId(pat_id) => {
- record_pattern_missing_fields(db, infer, pat_id, &body[pat_id])?
- }
- };
+ let expr_id = self.expr_id(literal.clone().into())?.as_expr()?;
+ let substs = infer.expr_ty(expr_id).as_adt()?.1;
+ let (variant, missing_fields) =
+ record_literal_missing_fields(db, infer, expr_id, &body[expr_id])?;
let res = self.missing_fields(db, substs, variant, missing_fields);
Some(res)
}
+ pub(crate) fn record_literal_matched_fields(
+ &self,
+ db: &'db dyn HirDatabase,
+ literal: &ast::RecordExpr,
+ ) -> Option<Vec<(Field, Type<'db>)>> {
+ let body = self.store()?;
+ let infer = self.infer()?;
+
+ let expr_id = self.expr_id(literal.clone().into())?.as_expr()?;
+ let substs = infer.expr_ty(expr_id).as_adt()?.1;
+ let (variant, matched_fields) =
+ record_literal_matched_fields(db, infer, expr_id, &body[expr_id])?;
+
+ let res = self.missing_fields(db, substs, variant, matched_fields);
+ Some(res)
+ }
+
pub(crate) fn record_pattern_missing_fields(
&self,
db: &'db dyn HirDatabase,
@@ -1267,12 +1312,29 @@ impl<'db> SourceAnalyzer<'db> {
let pat_id = self.pat_id(&pattern.clone().into())?.as_pat()?;
let substs = infer.pat_ty(pat_id).as_adt()?.1;
- let (variant, missing_fields, _exhaustive) =
+ let (variant, missing_fields) =
record_pattern_missing_fields(db, infer, pat_id, &body[pat_id])?;
let res = self.missing_fields(db, substs, variant, missing_fields);
Some(res)
}
+ pub(crate) fn record_pattern_matched_fields(
+ &self,
+ db: &'db dyn HirDatabase,
+ pattern: &ast::RecordPat,
+ ) -> Option<Vec<(Field, Type<'db>)>> {
+ let body = self.store()?;
+ let infer = self.infer()?;
+
+ let pat_id = self.pat_id(&pattern.clone().into())?.as_pat()?;
+ let substs = infer.pat_ty(pat_id).as_adt()?.1;
+
+ let (variant, matched_fields) =
+ record_pattern_matched_fields(db, infer, pat_id, &body[pat_id])?;
+ let res = self.missing_fields(db, substs, variant, matched_fields);
+ Some(res)
+ }
+
fn missing_fields(
&self,
db: &'db dyn HirDatabase,
@@ -1810,3 +1872,67 @@ pub(crate) fn name_hygiene(db: &dyn HirDatabase, name: InFile<&SyntaxNode>) -> H
let ctx = span_map.span_at(name.value.text_range().start()).ctx;
HygieneId::new(ctx.opaque_and_semiopaque(db))
}
+
+fn record_literal_matched_fields(
+ db: &dyn HirDatabase,
+ infer: &InferenceResult,
+ id: ExprId,
+ expr: &Expr,
+) -> Option<(VariantId, Vec<LocalFieldId>)> {
+ let (fields, _spread) = match expr {
+ Expr::RecordLit { fields, spread, .. } => (fields, spread),
+ _ => return None,
+ };
+
+ let variant_def = infer.variant_resolution_for_expr(id)?;
+ if let VariantId::UnionId(_) = variant_def {
+ return None;
+ }
+
+ let variant_data = variant_def.fields(db);
+
+ let specified_fields: FxHashSet<_> = fields.iter().map(|f| &f.name).collect();
+ // suggest fields if:
+ // - not in code
+ let matched_fields: Vec<LocalFieldId> = variant_data
+ .fields()
+ .iter()
+ .filter_map(|(f, d)| (!specified_fields.contains(&d.name)).then_some(f))
+ .collect();
+ if matched_fields.is_empty() {
+ return None;
+ }
+ Some((variant_def, matched_fields))
+}
+
+fn record_pattern_matched_fields(
+ db: &dyn HirDatabase,
+ infer: &InferenceResult,
+ id: PatId,
+ pat: &Pat,
+) -> Option<(VariantId, Vec<LocalFieldId>)> {
+ let (fields, _ellipsis) = match pat {
+ Pat::Record { path: _, args, ellipsis } => (args, *ellipsis),
+ _ => return None,
+ };
+
+ let variant_def = infer.variant_resolution_for_pat(id)?;
+ if let VariantId::UnionId(_) = variant_def {
+ return None;
+ }
+
+ let variant_data = variant_def.fields(db);
+
+ let specified_fields: FxHashSet<_> = fields.iter().map(|f| &f.name).collect();
+ // suggest fields if:
+ // - not in code
+ let matched_fields: Vec<LocalFieldId> = variant_data
+ .fields()
+ .iter()
+ .filter_map(|(f, d)| if !specified_fields.contains(&d.name) { Some(f) } else { None })
+ .collect();
+ if matched_fields.is_empty() {
+ return None;
+ }
+ Some((variant_def, matched_fields))
+}
diff --git a/crates/hir/src/symbols.rs b/crates/hir/src/symbols.rs
index 073142670d..c088f3aa0c 100644
--- a/crates/hir/src/symbols.rs
+++ b/crates/hir/src/symbols.rs
@@ -5,10 +5,11 @@ use std::marker::PhantomData;
use base_db::FxIndexSet;
use either::Either;
use hir_def::{
- AdtId, AssocItemId, Complete, DefWithBodyId, ExternCrateId, HasModule, ImplId, Lookup, MacroId,
- ModuleDefId, ModuleId, TraitId,
+ AdtId, AssocItemId, AstIdLoc, Complete, DefWithBodyId, ExternCrateId, HasModule, ImplId,
+ Lookup, MacroId, ModuleDefId, ModuleId, TraitId,
db::DefDatabase,
item_scope::{ImportId, ImportOrExternCrate, ImportOrGlob},
+ nameres::crate_def_map,
per_ns::Item,
src::{HasChildSource, HasSource},
visibility::{Visibility, VisibilityExplicitness},
@@ -22,7 +23,7 @@ use intern::Symbol;
use rustc_hash::FxHashMap;
use syntax::{AstNode, AstPtr, SyntaxNode, SyntaxNodePtr, ToSmolStr, ast::HasName};
-use crate::{HasCrate, Module, ModuleDef, Semantics};
+use crate::{Crate, HasCrate, Module, ModuleDef, Semantics};
/// The actual data that is stored in the index. It should be as compact as
/// possible.
@@ -40,14 +41,14 @@ pub struct FileSymbol<'db> {
_marker: PhantomData<&'db ()>,
}
-#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
pub struct DeclarationLocation {
/// The file id for both the `ptr` and `name_ptr`.
pub hir_file_id: HirFileId,
/// This points to the whole syntax node of the declaration.
pub ptr: SyntaxNodePtr,
/// This points to the [`syntax::ast::Name`] identifier of the declaration.
- pub name_ptr: AstPtr<Either<syntax::ast::Name, syntax::ast::NameRef>>,
+ pub name_ptr: Option<AstPtr<Either<syntax::ast::Name, syntax::ast::NameRef>>>,
}
impl DeclarationLocation {
@@ -108,6 +109,51 @@ impl<'a> SymbolCollector<'a> {
}
}
+ /// Push a symbol for a crate's root module.
+ /// This allows crate roots to appear in the symbol index for queries like `::` or `::foo`.
+ pub fn push_crate_root(&mut self, krate: Crate) {
+ let Some(display_name) = krate.display_name(self.db) else { return };
+ let crate_name = display_name.crate_name();
+ let canonical_name = display_name.canonical_name();
+
+ let def_map = crate_def_map(self.db, krate.into());
+ let module_data = &def_map[def_map.crate_root(self.db)];
+
+ let definition = module_data.origin.definition_source(self.db);
+ let hir_file_id = definition.file_id;
+ let syntax_node = definition.value.node();
+ let ptr = SyntaxNodePtr::new(&syntax_node);
+
+ let loc = DeclarationLocation { hir_file_id, ptr, name_ptr: None };
+ let root_module = krate.root_module(self.db);
+
+ self.symbols.insert(FileSymbol {
+ name: crate_name.symbol().clone(),
+ def: ModuleDef::Module(root_module),
+ loc,
+ container_name: None,
+ is_alias: false,
+ is_assoc: false,
+ is_import: false,
+ do_not_complete: Complete::Yes,
+ _marker: PhantomData,
+ });
+
+ if canonical_name != crate_name.symbol() {
+ self.symbols.insert(FileSymbol {
+ name: canonical_name.clone(),
+ def: ModuleDef::Module(root_module),
+ loc,
+ container_name: None,
+ is_alias: false,
+ is_assoc: false,
+ is_import: false,
+ do_not_complete: Complete::Yes,
+ _marker: PhantomData,
+ });
+ }
+ }
+
pub fn finish(self) -> Box<[FileSymbol<'a>]> {
self.symbols.into_iter().collect()
}
@@ -123,6 +169,7 @@ impl<'a> SymbolCollector<'a> {
fn collect_from_module(&mut self, module_id: ModuleId) {
let collect_pub_only = self.collect_pub_only;
+ let is_block_module = module_id.is_block_module(self.db);
let push_decl = |this: &mut Self, def: ModuleDefId, name, vis| {
if collect_pub_only && vis != Visibility::Public {
return;
@@ -194,6 +241,10 @@ impl<'a> SymbolCollector<'a> {
let source = import_child_source_cache
.entry(i.use_)
.or_insert_with(|| i.use_.child_source(this.db));
+ if is_block_module && source.file_id.is_macro() {
+ // Macros tend to generate a lot of imports, the user really won't care about them
+ return;
+ }
let Some(use_tree_src) = source.value.get(i.idx) else { return };
let rename = use_tree_src.rename().and_then(|rename| rename.name());
let name_syntax = match rename {
@@ -209,7 +260,7 @@ impl<'a> SymbolCollector<'a> {
let dec_loc = DeclarationLocation {
hir_file_id: source.file_id,
ptr: SyntaxNodePtr::new(use_tree_src.syntax()),
- name_ptr: AstPtr::new(&name_syntax),
+ name_ptr: Some(AstPtr::new(&name_syntax)),
};
this.symbols.insert(FileSymbol {
name: name.symbol().clone(),
@@ -230,6 +281,12 @@ impl<'a> SymbolCollector<'a> {
return;
}
let loc = i.lookup(this.db);
+ if is_block_module && loc.ast_id().file_id.is_macro() {
+ // Macros (especially derivves) tend to generate renamed extern crate items,
+ // the user really won't care about them
+ return;
+ }
+
let source = loc.source(this.db);
let rename = source.value.rename().and_then(|rename| rename.name());
@@ -244,7 +301,7 @@ impl<'a> SymbolCollector<'a> {
let dec_loc = DeclarationLocation {
hir_file_id: source.file_id,
ptr: SyntaxNodePtr::new(source.value.syntax()),
- name_ptr: AstPtr::new(&name_syntax),
+ name_ptr: Some(AstPtr::new(&name_syntax)),
};
this.symbols.insert(FileSymbol {
name: name.symbol().clone(),
@@ -409,10 +466,10 @@ impl<'a> SymbolCollector<'a> {
let source = loc.source(self.db);
let Some(name_node) = source.value.name() else { return Complete::Yes };
let def = ModuleDef::from(id.into());
- let dec_loc = DeclarationLocation {
+ let loc = DeclarationLocation {
hir_file_id: source.file_id,
ptr: SyntaxNodePtr::new(source.value.syntax()),
- name_ptr: AstPtr::new(&name_node).wrap_left(),
+ name_ptr: Some(AstPtr::new(&name_node).wrap_left()),
};
let mut do_not_complete = Complete::Yes;
@@ -427,7 +484,7 @@ impl<'a> SymbolCollector<'a> {
self.symbols.insert(FileSymbol {
name: alias.clone(),
def,
- loc: dec_loc.clone(),
+ loc,
container_name: self.current_container_name.clone(),
is_alias: true,
is_assoc,
@@ -442,7 +499,7 @@ impl<'a> SymbolCollector<'a> {
name: name.symbol().clone(),
def,
container_name: self.current_container_name.clone(),
- loc: dec_loc,
+ loc,
is_alias: false,
is_assoc,
is_import: false,
@@ -459,10 +516,10 @@ impl<'a> SymbolCollector<'a> {
let Some(declaration) = module_data.origin.declaration() else { return };
let module = declaration.to_node(self.db);
let Some(name_node) = module.name() else { return };
- let dec_loc = DeclarationLocation {
+ let loc = DeclarationLocation {
hir_file_id: declaration.file_id,
ptr: SyntaxNodePtr::new(module.syntax()),
- name_ptr: AstPtr::new(&name_node).wrap_left(),
+ name_ptr: Some(AstPtr::new(&name_node).wrap_left()),
};
let def = ModuleDef::Module(module_id.into());
@@ -475,7 +532,7 @@ impl<'a> SymbolCollector<'a> {
self.symbols.insert(FileSymbol {
name: alias.clone(),
def,
- loc: dec_loc.clone(),
+ loc,
container_name: self.current_container_name.clone(),
is_alias: true,
is_assoc: false,
@@ -490,7 +547,7 @@ impl<'a> SymbolCollector<'a> {
name: name.symbol().clone(),
def: ModuleDef::Module(module_id.into()),
container_name: self.current_container_name.clone(),
- loc: dec_loc,
+ loc,
is_alias: false,
is_assoc: false,
is_import: false,
diff --git a/crates/hir/src/term_search.rs b/crates/hir/src/term_search.rs
index e408921830..f2dc1ce798 100644
--- a/crates/hir/src/term_search.rs
+++ b/crates/hir/src/term_search.rs
@@ -172,7 +172,7 @@ impl<'db> LookupTable<'db> {
/// Insert new type trees for type
///
/// Note that the types have to be the same, unification is not enough as unification is not
- /// transitive. For example Vec<i32> and FxHashSet<i32> both unify with Iterator<Item = i32>,
+ /// transitive. For example `Vec<i32>` and `FxHashSet<i32>` both unify with `Iterator<Item = i32>`,
/// but they clearly do not unify themselves.
fn insert(&mut self, ty: Type<'db>, exprs: impl Iterator<Item = Expr<'db>>) {
match self.data.get_mut(&ty) {
diff --git a/crates/ide-assists/src/handlers/apply_demorgan.rs b/crates/ide-assists/src/handlers/apply_demorgan.rs
index d193e8a9d8..80d0a6da12 100644
--- a/crates/ide-assists/src/handlers/apply_demorgan.rs
+++ b/crates/ide-assists/src/handlers/apply_demorgan.rs
@@ -3,7 +3,7 @@ use std::collections::VecDeque;
use ide_db::{
assists::GroupLabel,
famous_defs::FamousDefs,
- syntax_helpers::node_ext::{for_each_tail_expr, walk_expr},
+ syntax_helpers::node_ext::{for_each_tail_expr, is_pattern_cond, walk_expr},
};
use syntax::{
NodeOrToken, SyntaxKind, T,
@@ -69,6 +69,10 @@ pub(crate) fn apply_demorgan(acc: &mut Assists, ctx: &AssistContext<'_>) -> Opti
}
}
+ if is_pattern_cond(bin_expr.clone().into()) {
+ return None;
+ }
+
let op = bin_expr.op_kind()?;
let (inv_token, prec) = match op {
ast::BinaryOp::LogicOp(ast::LogicOp::And) => (SyntaxKind::PIPE2, ExprPrecedence::LOr),
@@ -376,6 +380,16 @@ fn f() { !(S <= S || S < S) }
}
#[test]
+ fn demorgan_doesnt_handles_pattern() {
+ check_assist_not_applicable(
+ apply_demorgan,
+ r#"
+fn f() { if let 1 = 1 &&$0 true { } }
+"#,
+ );
+ }
+
+ #[test]
fn demorgan_on_not() {
check_assist(
apply_demorgan,
diff --git a/crates/ide-assists/src/handlers/convert_range_for_to_while.rs b/crates/ide-assists/src/handlers/convert_range_for_to_while.rs
index ba577b217d..2e649f14be 100644
--- a/crates/ide-assists/src/handlers/convert_range_for_to_while.rs
+++ b/crates/ide-assists/src/handlers/convert_range_for_to_while.rs
@@ -1,13 +1,15 @@
use ide_db::assists::AssistId;
use itertools::Itertools;
use syntax::{
- AstNode, T,
+ AstNode, SyntaxElement,
+ SyntaxKind::WHITESPACE,
+ T,
algo::previous_non_trivia_token,
ast::{
self, HasArgList, HasLoopBody, HasName, RangeItem, edit::AstNodeEdit, make,
syntax_factory::SyntaxFactory,
},
- syntax_editor::{Element, Position},
+ syntax_editor::{Element, Position, SyntaxEditor},
};
use crate::assist_context::{AssistContext, Assists};
@@ -40,8 +42,8 @@ pub(crate) fn convert_range_for_to_while(acc: &mut Assists, ctx: &AssistContext<
let iterable = for_.iterable()?;
let (start, end, step, inclusive) = extract_range(&iterable)?;
let name = pat.name()?;
- let body = for_.loop_body()?;
- let last = previous_non_trivia_token(body.stmt_list()?.r_curly_token()?)?;
+ let body = for_.loop_body()?.stmt_list()?;
+ let label = for_.label();
let description = if end.is_some() {
"Replace with while expression"
@@ -90,8 +92,10 @@ pub(crate) fn convert_range_for_to_while(acc: &mut Assists, ctx: &AssistContext<
);
let op = ast::BinaryOp::Assignment { op: Some(ast::ArithOp::Add) };
- edit.insert_all(
- Position::after(last),
+ process_loop_body(
+ body,
+ label,
+ &mut edit,
vec![
make.whitespace(&format!("\n{}", indent + 1)).syntax_element(),
make.expr_bin(var_expr, op, step).syntax().syntax_element(),
@@ -121,6 +125,86 @@ fn extract_range(iterable: &ast::Expr) -> Option<(ast::Expr, Option<ast::Expr>,
})
}
+fn process_loop_body(
+ body: ast::StmtList,
+ label: Option<ast::Label>,
+ edit: &mut SyntaxEditor,
+ incrementer: Vec<SyntaxElement>,
+) -> Option<()> {
+ let last = previous_non_trivia_token(body.r_curly_token()?)?.syntax_element();
+
+ let new_body = body.indent(1.into()).clone_subtree();
+ let mut continues = vec![];
+ collect_continue_to(
+ &mut continues,
+ &label.and_then(|it| it.lifetime()),
+ new_body.syntax(),
+ false,
+ );
+
+ if continues.is_empty() {
+ edit.insert_all(Position::after(last), incrementer);
+ return Some(());
+ }
+
+ let mut children = body
+ .syntax()
+ .children_with_tokens()
+ .filter(|it| !matches!(it.kind(), WHITESPACE | T!['{'] | T!['}']));
+ let first = children.next()?;
+ let block_content = first.clone()..=children.last().unwrap_or(first);
+
+ let continue_label = make::lifetime("'cont");
+ let break_expr = make::expr_break(Some(continue_label.clone()), None).clone_for_update();
+ let mut new_edit = SyntaxEditor::new(new_body.syntax().clone());
+ for continue_expr in &continues {
+ new_edit.replace(continue_expr.syntax(), break_expr.syntax());
+ }
+ let new_body = new_edit.finish().new_root().clone();
+ let elements = itertools::chain(
+ [
+ continue_label.syntax().clone_for_update().syntax_element(),
+ make::token(T![:]).syntax_element(),
+ make::tokens::single_space().syntax_element(),
+ new_body.syntax_element(),
+ ],
+ incrementer,
+ );
+ edit.replace_all(block_content, elements.collect());
+
+ Some(())
+}
+
+fn collect_continue_to(
+ acc: &mut Vec<ast::ContinueExpr>,
+ label: &Option<ast::Lifetime>,
+ node: &syntax::SyntaxNode,
+ only_label: bool,
+) {
+ let match_label = |it: &Option<ast::Lifetime>, label: &Option<ast::Lifetime>| match (it, label)
+ {
+ (None, _) => !only_label,
+ (Some(a), Some(b)) if a.text() == b.text() => true,
+ _ => false,
+ };
+ if let Some(expr) = ast::ContinueExpr::cast(node.clone())
+ && match_label(&expr.lifetime(), label)
+ {
+ acc.push(expr);
+ } else if let Some(any_loop) = ast::AnyHasLoopBody::cast(node.clone()) {
+ if match_label(label, &any_loop.label().and_then(|it| it.lifetime())) {
+ return;
+ }
+ for children in node.children() {
+ collect_continue_to(acc, label, &children, true);
+ }
+ } else {
+ for children in node.children() {
+ collect_continue_to(acc, label, &children, only_label);
+ }
+ }
+}
+
#[cfg(test)]
mod tests {
use crate::tests::{check_assist, check_assist_not_applicable};
@@ -220,6 +304,67 @@ fn foo() {
}
#[test]
+ fn test_convert_range_for_to_while_with_continue() {
+ check_assist(
+ convert_range_for_to_while,
+ "
+fn foo() {
+ $0for mut i in 3..7 {
+ foo(i);
+ continue;
+ loop { break; continue }
+ bar(i);
+ }
+}
+ ",
+ "
+fn foo() {
+ let mut i = 3;
+ while i < 7 {
+ 'cont: {
+ foo(i);
+ break 'cont;
+ loop { break; continue }
+ bar(i);
+ }
+ i += 1;
+ }
+}
+ ",
+ );
+
+ check_assist(
+ convert_range_for_to_while,
+ "
+fn foo() {
+ 'x: $0for mut i in 3..7 {
+ foo(i);
+ continue 'x;
+ loop { break; continue 'x }
+ 'x: loop { continue 'x }
+ bar(i);
+ }
+}
+ ",
+ "
+fn foo() {
+ let mut i = 3;
+ 'x: while i < 7 {
+ 'cont: {
+ foo(i);
+ break 'cont;
+ loop { break; break 'cont }
+ 'x: loop { continue 'x }
+ bar(i);
+ }
+ i += 1;
+ }
+}
+ ",
+ );
+ }
+
+ #[test]
fn test_convert_range_for_to_while_step_by() {
check_assist(
convert_range_for_to_while,
diff --git a/crates/ide-assists/src/handlers/convert_to_guarded_return.rs b/crates/ide-assists/src/handlers/convert_to_guarded_return.rs
index 08b114072f..ea5c1637b7 100644
--- a/crates/ide-assists/src/handlers/convert_to_guarded_return.rs
+++ b/crates/ide-assists/src/handlers/convert_to_guarded_return.rs
@@ -95,7 +95,9 @@ fn if_expr_to_guarded_return(
let parent_block = if_expr.syntax().parent()?.ancestors().find_map(ast::BlockExpr::cast)?;
- if parent_block.tail_expr()? != if_expr.clone().into() {
+ if parent_block.tail_expr() != Some(if_expr.clone().into())
+ && !(else_block.is_some() && ast::ExprStmt::can_cast(if_expr.syntax().parent()?.kind()))
+ {
return None;
}
@@ -503,6 +505,36 @@ fn main() {
}
#[test]
+ fn convert_if_let_has_else_block_in_statement() {
+ check_assist(
+ convert_to_guarded_return,
+ r#"
+fn main() {
+ some_statements();
+ if$0 let Ok(x) = Err(92) {
+ foo(x);
+ } else {
+ // needless comment
+ return;
+ }
+ some_statements();
+}
+"#,
+ r#"
+fn main() {
+ some_statements();
+ let Ok(x) = Err(92) else {
+ // needless comment
+ return;
+ };
+ foo(x);
+ some_statements();
+}
+"#,
+ );
+ }
+
+ #[test]
fn convert_if_let_result_inside_let() {
check_assist(
convert_to_guarded_return,
@@ -1137,6 +1169,44 @@ fn main() {
}
#[test]
+ fn ignore_else_if() {
+ check_assist_not_applicable(
+ convert_to_guarded_return,
+ r#"
+fn main() {
+ some_statements();
+ if cond {
+ ()
+ } else if$0 let Ok(x) = Err(92) {
+ foo(x);
+ } else {
+ return;
+ }
+ some_statements();
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn ignore_if_inside_let() {
+ check_assist_not_applicable(
+ convert_to_guarded_return,
+ r#"
+fn main() {
+ some_statements();
+ let _ = if$0 let Ok(x) = Err(92) {
+ foo(x);
+ } else {
+ return;
+ }
+ some_statements();
+}
+"#,
+ );
+ }
+
+ #[test]
fn ignore_let_else_branch() {
check_assist_not_applicable(
convert_to_guarded_return,
diff --git a/crates/ide-assists/src/handlers/expand_rest_pattern.rs b/crates/ide-assists/src/handlers/expand_rest_pattern.rs
index b746099e72..867ac48518 100644
--- a/crates/ide-assists/src/handlers/expand_rest_pattern.rs
+++ b/crates/ide-assists/src/handlers/expand_rest_pattern.rs
@@ -33,8 +33,8 @@ fn expand_record_rest_pattern(
record_pat: ast::RecordPat,
rest_pat: ast::RestPat,
) -> Option<()> {
- let missing_fields = ctx.sema.record_pattern_missing_fields(&record_pat);
- if missing_fields.is_empty() {
+ let matched_fields = ctx.sema.record_pattern_matched_fields(&record_pat);
+ if matched_fields.is_empty() {
cov_mark::hit!(no_missing_fields);
return None;
}
@@ -53,7 +53,7 @@ fn expand_record_rest_pattern(
|builder| {
let make = SyntaxFactory::with_mappings();
let mut editor = builder.make_editor(rest_pat.syntax());
- let new_fields = old_field_list.fields().chain(missing_fields.iter().map(|(f, _)| {
+ let new_fields = old_field_list.fields().chain(matched_fields.iter().map(|(f, _)| {
make.record_pat_field_shorthand(
make.ident_pat(
false,
diff --git a/crates/ide-assists/src/handlers/extract_function.rs b/crates/ide-assists/src/handlers/extract_function.rs
index 231df9b5b3..f2363c6f7b 100644
--- a/crates/ide-assists/src/handlers/extract_function.rs
+++ b/crates/ide-assists/src/handlers/extract_function.rs
@@ -25,7 +25,7 @@ use syntax::{
SyntaxKind::{self, COMMENT},
SyntaxNode, SyntaxToken, T, TextRange, TextSize, TokenAtOffset, WalkEvent,
ast::{
- self, AstNode, AstToken, HasGenericParams, HasName, edit::IndentLevel,
+ self, AstNode, AstToken, HasAttrs, HasGenericParams, HasName, edit::IndentLevel,
edit_in_place::Indent,
},
match_ast, ted,
@@ -120,7 +120,7 @@ pub(crate) fn extract_function(acc: &mut Assists, ctx: &AssistContext<'_>) -> Op
let params = body.extracted_function_params(ctx, &container_info, locals_used);
- let name = make_function_name(&semantics_scope);
+ let name = make_function_name(&semantics_scope, &body);
let fun = Function {
name,
@@ -241,7 +241,10 @@ pub(crate) fn extract_function(acc: &mut Assists, ctx: &AssistContext<'_>) -> Op
)
}
-fn make_function_name(semantics_scope: &hir::SemanticsScope<'_>) -> ast::NameRef {
+fn make_function_name(
+ semantics_scope: &hir::SemanticsScope<'_>,
+ body: &FunctionBody,
+) -> ast::NameRef {
let mut names_in_scope = vec![];
semantics_scope.process_all_names(&mut |name, _| {
names_in_scope.push(
@@ -252,7 +255,10 @@ fn make_function_name(semantics_scope: &hir::SemanticsScope<'_>) -> ast::NameRef
let default_name = "fun_name";
- let mut name = default_name.to_owned();
+ let mut name = body
+ .suggest_name()
+ .filter(|name| name.len() > 2)
+ .unwrap_or_else(|| default_name.to_owned());
let mut counter = 0;
while names_in_scope.contains(&name) {
counter += 1;
@@ -375,6 +381,7 @@ struct ContainerInfo<'db> {
ret_type: Option<hir::Type<'db>>,
generic_param_lists: Vec<ast::GenericParamList>,
where_clauses: Vec<ast::WhereClause>,
+ attrs: Vec<ast::Attr>,
edition: Edition,
}
@@ -778,6 +785,16 @@ impl FunctionBody {
fn contains_node(&self, node: &SyntaxNode) -> bool {
self.contains_range(node.text_range())
}
+
+ fn suggest_name(&self) -> Option<String> {
+ if let Some(ast::Pat::IdentPat(pat)) = self.parent().and_then(ast::LetStmt::cast)?.pat()
+ && let Some(name) = pat.name().and_then(|it| it.ident_token())
+ {
+ Some(name.text().to_owned())
+ } else {
+ None
+ }
+ }
}
impl FunctionBody {
@@ -911,6 +928,7 @@ impl FunctionBody {
let parents = generic_parents(&parent);
let generic_param_lists = parents.iter().filter_map(|it| it.generic_param_list()).collect();
let where_clauses = parents.iter().filter_map(|it| it.where_clause()).collect();
+ let attrs = parents.iter().flat_map(|it| it.attrs()).filter(is_inherit_attr).collect();
Some((
ContainerInfo {
@@ -919,6 +937,7 @@ impl FunctionBody {
ret_type: ty,
generic_param_lists,
where_clauses,
+ attrs,
edition,
},
contains_tail_expr,
@@ -1103,6 +1122,14 @@ impl GenericParent {
GenericParent::Trait(trait_) => trait_.where_clause(),
}
}
+
+ fn attrs(&self) -> impl Iterator<Item = ast::Attr> {
+ match self {
+ GenericParent::Fn(fn_) => fn_.attrs(),
+ GenericParent::Impl(impl_) => impl_.attrs(),
+ GenericParent::Trait(trait_) => trait_.attrs(),
+ }
+ }
}
/// Search `parent`'s ancestors for items with potentially applicable generic parameters
@@ -1578,7 +1605,7 @@ fn format_function(
let (generic_params, where_clause) = make_generic_params_and_where_clause(ctx, fun);
make::fn_(
- None,
+ fun.mods.attrs.clone(),
None,
fun_name,
generic_params,
@@ -1958,6 +1985,11 @@ fn format_type(ty: &hir::Type<'_>, ctx: &AssistContext<'_>, module: hir::Module)
ty.display_source_code(ctx.db(), module.into(), true).ok().unwrap_or_else(|| "_".to_owned())
}
+fn is_inherit_attr(attr: &ast::Attr) -> bool {
+ let Some(name) = attr.simple_name() else { return false };
+ matches!(name.as_str(), "track_caller" | "cfg")
+}
+
fn make_ty(ty: &hir::Type<'_>, ctx: &AssistContext<'_>, module: hir::Module) -> ast::Type {
let ty_str = format_type(ty, ctx, module);
make::ty(&ty_str)
@@ -5414,12 +5446,12 @@ impl Struct {
impl Trait for Struct {
fn bar(&self) -> i32 {
- let three_squared = fun_name();
+ let three_squared = three_squared();
self.0 + three_squared
}
}
-fn $0fun_name() -> i32 {
+fn $0three_squared() -> i32 {
3 * 3
}
"#,
@@ -6375,4 +6407,53 @@ fn $0fun_name(mut a: i32, mut b: i32) {
"#,
);
}
+
+ #[test]
+ fn with_cfg_attr() {
+ check_assist(
+ extract_function,
+ r#"
+//- /main.rs crate:main cfg:test
+#[cfg(test)]
+fn foo() {
+ foo($01 + 1$0);
+}
+"#,
+ r#"
+#[cfg(test)]
+fn foo() {
+ foo(fun_name());
+}
+
+#[cfg(test)]
+fn $0fun_name() -> i32 {
+ 1 + 1
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn with_track_caller() {
+ check_assist(
+ extract_function,
+ r#"
+#[track_caller]
+fn foo() {
+ foo($01 + 1$0);
+}
+"#,
+ r#"
+#[track_caller]
+fn foo() {
+ foo(fun_name());
+}
+
+#[track_caller]
+fn $0fun_name() -> i32 {
+ 1 + 1
+}
+"#,
+ );
+ }
}
diff --git a/crates/ide-assists/src/handlers/generate_mut_trait_impl.rs b/crates/ide-assists/src/handlers/generate_mut_trait_impl.rs
index ae1ae24d1e..53f6f4883f 100644
--- a/crates/ide-assists/src/handlers/generate_mut_trait_impl.rs
+++ b/crates/ide-assists/src/handlers/generate_mut_trait_impl.rs
@@ -1,8 +1,8 @@
use ide_db::{famous_defs::FamousDefs, traits::resolve_target_trait};
use syntax::{
- AstNode, T,
- ast::{self, edit_in_place::Indent, make},
- ted,
+ AstNode, SyntaxElement, SyntaxNode, T,
+ ast::{self, edit::AstNodeEdit, edit_in_place::Indent, syntax_factory::SyntaxFactory},
+ syntax_editor::{Element, Position, SyntaxEditor},
};
use crate::{AssistContext, AssistId, Assists};
@@ -45,12 +45,13 @@ use crate::{AssistContext, AssistId, Assists};
// }
// ```
pub(crate) fn generate_mut_trait_impl(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
- let impl_def = ctx.find_node_at_offset::<ast::Impl>()?.clone_for_update();
- let indent = impl_def.indent_level();
+ let impl_def = ctx.find_node_at_offset::<ast::Impl>()?;
+ let indent = Indent::indent_level(&impl_def);
let ast::Type::PathType(path) = impl_def.trait_()? else {
return None;
};
+
let trait_name = path.path()?.segment()?.name_ref()?;
let scope = ctx.sema.scope(impl_def.trait_()?.syntax())?;
@@ -59,75 +60,133 @@ pub(crate) fn generate_mut_trait_impl(acc: &mut Assists, ctx: &AssistContext<'_>
let trait_ = resolve_target_trait(&ctx.sema, &impl_def)?;
let trait_new = get_trait_mut(&trait_, famous)?;
- // Index -> IndexMut
- ted::replace(trait_name.syntax(), make::name_ref(trait_new).clone_for_update().syntax());
-
- // index -> index_mut
- let (trait_method_name, new_trait_method_name) = impl_def
- .syntax()
- .descendants()
- .filter_map(ast::Name::cast)
- .find_map(process_method_name)?;
- ted::replace(
- trait_method_name.syntax(),
- make::name(new_trait_method_name).clone_for_update().syntax(),
- );
-
- if let Some(type_alias) = impl_def.syntax().descendants().find_map(ast::TypeAlias::cast) {
- ted::remove(type_alias.syntax());
- }
-
- // &self -> &mut self
- let mut_self_param = make::mut_self_param();
- let self_param: ast::SelfParam =
- impl_def.syntax().descendants().find_map(ast::SelfParam::cast)?;
- ted::replace(self_param.syntax(), mut_self_param.clone_for_update().syntax());
-
- // &Self::Output -> &mut Self::Output
- let ret_type = impl_def.syntax().descendants().find_map(ast::RetType::cast)?;
- let new_ret_type = process_ret_type(&ret_type)?;
- ted::replace(ret_type.syntax(), make::ret_type(new_ret_type).clone_for_update().syntax());
-
- let fn_ = impl_def.assoc_item_list()?.assoc_items().find_map(|it| match it {
- ast::AssocItem::Fn(f) => Some(f),
- _ => None,
- })?;
- let _ = process_ref_mut(&fn_);
-
- let assoc_list = make::assoc_item_list(None).clone_for_update();
- ted::replace(impl_def.assoc_item_list()?.syntax(), assoc_list.syntax());
- impl_def.get_or_create_assoc_item_list().add_item(syntax::ast::AssocItem::Fn(fn_));
-
let target = impl_def.syntax().text_range();
+
acc.add(
AssistId::generate("generate_mut_trait_impl"),
format!("Generate `{trait_new}` impl from this `{trait_name}` trait"),
target,
|edit| {
- edit.insert(
- target.start(),
- if ctx.config.snippet_cap.is_some() {
- format!("$0{impl_def}\n\n{indent}")
- } else {
- format!("{impl_def}\n\n{indent}")
- },
+ let impl_clone = impl_def.reset_indent().clone_subtree();
+ let mut editor = SyntaxEditor::new(impl_clone.syntax().clone());
+ let factory = SyntaxFactory::without_mappings();
+
+ apply_generate_mut_impl(&mut editor, &factory, &impl_clone, trait_new);
+
+ let new_root = editor.finish();
+ let new_root = new_root.new_root();
+
+ let new_impl = ast::Impl::cast(new_root.clone()).unwrap();
+
+ Indent::indent(&new_impl, indent);
+
+ let mut editor = edit.make_editor(impl_def.syntax());
+ editor.insert_all(
+ Position::before(impl_def.syntax()),
+ vec![
+ new_impl.syntax().syntax_element(),
+ factory.whitespace(&format!("\n\n{indent}")).syntax_element(),
+ ],
);
+
+ if let Some(cap) = ctx.config.snippet_cap {
+ let tabstop_before = edit.make_tabstop_before(cap);
+ editor.add_annotation(new_impl.syntax(), tabstop_before);
+ }
+
+ edit.add_file_edits(ctx.vfs_file_id(), editor);
},
)
}
-fn process_ref_mut(fn_: &ast::Fn) -> Option<()> {
- let expr = fn_.body()?.tail_expr()?;
- match &expr {
- ast::Expr::RefExpr(ref_expr) if ref_expr.mut_token().is_none() => {
- ted::insert_all_raw(
- ted::Position::after(ref_expr.amp_token()?),
- vec![make::token(T![mut]).into(), make::tokens::whitespace(" ").into()],
- );
- }
- _ => {}
+fn delete_with_trivia(editor: &mut SyntaxEditor, node: &SyntaxNode) {
+ let mut end: SyntaxElement = node.clone().into();
+
+ if let Some(next) = node.next_sibling_or_token()
+ && let SyntaxElement::Token(tok) = &next
+ && tok.kind().is_trivia()
+ {
+ end = next.clone();
}
- None
+
+ editor.delete_all(node.clone().into()..=end);
+}
+
+fn apply_generate_mut_impl(
+ editor: &mut SyntaxEditor,
+ factory: &SyntaxFactory,
+ impl_def: &ast::Impl,
+ trait_new: &str,
+) -> Option<()> {
+ let path =
+ impl_def.trait_().and_then(|t| t.syntax().descendants().find_map(ast::Path::cast))?;
+ let seg = path.segment()?;
+ let name_ref = seg.name_ref()?;
+
+ let new_name_ref = factory.name_ref(trait_new);
+ editor.replace(name_ref.syntax(), new_name_ref.syntax());
+
+ if let Some((name, new_name)) =
+ impl_def.syntax().descendants().filter_map(ast::Name::cast).find_map(process_method_name)
+ {
+ let new_name_node = factory.name(new_name);
+ editor.replace(name.syntax(), new_name_node.syntax());
+ }
+
+ if let Some(type_alias) = impl_def.syntax().descendants().find_map(ast::TypeAlias::cast) {
+ delete_with_trivia(editor, type_alias.syntax());
+ }
+
+ if let Some(self_param) = impl_def.syntax().descendants().find_map(ast::SelfParam::cast) {
+ let mut_self = factory.mut_self_param();
+ editor.replace(self_param.syntax(), mut_self.syntax());
+ }
+
+ if let Some(ret_type) = impl_def.syntax().descendants().find_map(ast::RetType::cast)
+ && let Some(new_ty) = process_ret_type(factory, &ret_type)
+ {
+ let new_ret = factory.ret_type(new_ty);
+ editor.replace(ret_type.syntax(), new_ret.syntax())
+ }
+
+ if let Some(fn_) = impl_def.assoc_item_list().and_then(|l| {
+ l.assoc_items().find_map(|it| match it {
+ ast::AssocItem::Fn(f) => Some(f),
+ _ => None,
+ })
+ }) {
+ process_ref_mut(editor, factory, &fn_);
+ }
+
+ Some(())
+}
+
+fn process_ref_mut(editor: &mut SyntaxEditor, factory: &SyntaxFactory, fn_: &ast::Fn) {
+ let Some(expr) = fn_.body().and_then(|b| b.tail_expr()) else { return };
+
+ let ast::Expr::RefExpr(ref_expr) = expr else { return };
+
+ if ref_expr.mut_token().is_some() {
+ return;
+ }
+
+ let Some(amp) = ref_expr.amp_token() else { return };
+
+ let mut_kw = factory.token(T![mut]);
+ let space = factory.whitespace(" ");
+
+ editor.insert(Position::after(amp.clone()), space.syntax_element());
+ editor.insert(Position::after(amp), mut_kw.syntax_element());
+}
+
+fn process_ret_type(factory: &SyntaxFactory, ref_ty: &ast::RetType) -> Option<ast::Type> {
+ let ty = ref_ty.ty()?;
+ let ast::Type::RefType(ref_type) = ty else {
+ return None;
+ };
+
+ let inner = ref_type.ty()?;
+ Some(factory.ty_ref(inner, true))
}
fn get_trait_mut(apply_trait: &hir::Trait, famous: FamousDefs<'_, '_>) -> Option<&'static str> {
@@ -158,14 +217,6 @@ fn process_method_name(name: ast::Name) -> Option<(ast::Name, &'static str)> {
Some((name, new_name))
}
-fn process_ret_type(ref_ty: &ast::RetType) -> Option<ast::Type> {
- let ty = ref_ty.ty()?;
- let ast::Type::RefType(ref_type) = ty else {
- return None;
- };
- Some(make::ty_ref(ref_type.ty()?, true))
-}
-
#[cfg(test)]
mod tests {
use crate::{
diff --git a/crates/ide-assists/src/handlers/inline_type_alias.rs b/crates/ide-assists/src/handlers/inline_type_alias.rs
index ae8d130df2..c7a48f3261 100644
--- a/crates/ide-assists/src/handlers/inline_type_alias.rs
+++ b/crates/ide-assists/src/handlers/inline_type_alias.rs
@@ -290,19 +290,23 @@ impl ConstAndTypeMap {
/// ^ alias generic params
/// let a: A<100>;
/// ^ instance generic args
-/// ```
///
/// generic['a] = '_ due to omission
/// generic[N] = 100 due to the instance arg
/// generic[T] = u64 due to the default param
+/// ```
///
/// 2. Copy the concrete type and substitute in each found mapping:
///
+/// ```ignore
/// &'_ [u64; 100]
+/// ```
///
/// 3. Remove wildcard lifetimes entirely:
///
+/// ```ignore
/// &[u64; 100]
+/// ```
fn create_replacement(
lifetime_map: &LifetimeMap,
const_and_type_map: &ConstAndTypeMap,
diff --git a/crates/ide-assists/src/handlers/move_guard.rs b/crates/ide-assists/src/handlers/move_guard.rs
index 1c0c6e43d5..84f02bdfdb 100644
--- a/crates/ide-assists/src/handlers/move_guard.rs
+++ b/crates/ide-assists/src/handlers/move_guard.rs
@@ -3,7 +3,7 @@ use syntax::{
SyntaxKind::WHITESPACE,
ast::{
AstNode, BlockExpr, ElseBranch, Expr, IfExpr, MatchArm, Pat, edit::AstNodeEdit, make,
- syntax_factory::SyntaxFactory,
+ prec::ExprPrecedence, syntax_factory::SyntaxFactory,
},
syntax_editor::Element,
};
@@ -49,7 +49,7 @@ pub(crate) fn move_guard_to_arm_body(acc: &mut Assists, ctx: &AssistContext<'_>)
let guard_condition = guard.condition()?.reset_indent();
let arm_expr = match_arm.expr()?;
- let then_branch = make::block_expr(None, Some(arm_expr.reset_indent().indent(1.into())));
+ let then_branch = crate::utils::wrap_block(&arm_expr);
let if_expr = make::expr_if(guard_condition, then_branch, None).indent(arm_expr.indent_level());
let target = guard.syntax().text_range();
@@ -109,6 +109,7 @@ pub(crate) fn move_arm_cond_to_match_guard(
let match_arm: MatchArm = ctx.find_node_at_offset::<MatchArm>()?;
let match_pat = match_arm.pat()?;
let arm_body = match_arm.expr()?;
+ let arm_guard = match_arm.guard().and_then(|it| it.condition());
let mut replace_node = None;
let if_expr: IfExpr = IfExpr::cast(arm_body.syntax().clone()).or_else(|| {
@@ -149,6 +150,25 @@ pub(crate) fn move_arm_cond_to_match_guard(
0
};
let indent_level = match_arm.indent_level();
+ let make_guard = |cond: Option<Expr>| {
+ let condition = match (arm_guard.clone(), cond) {
+ (None, None) => return None,
+ (None, Some(it)) | (Some(it), None) => it,
+ (Some(lhs), Some(rhs)) => {
+ let op_expr = |expr: Expr| {
+ if expr.precedence().needs_parentheses_in(ExprPrecedence::LAnd) {
+ make.expr_paren(expr).into()
+ } else {
+ expr
+ }
+ };
+ let op = syntax::ast::BinaryOp::LogicOp(syntax::ast::LogicOp::And);
+ let expr_bin = make.expr_bin(op_expr(lhs), op, op_expr(rhs));
+ expr_bin.into()
+ }
+ };
+ Some(make.match_guard(condition))
+ };
for (cond, block) in conds_blocks {
let only_expr = block.statements().next().is_none();
@@ -156,8 +176,7 @@ pub(crate) fn move_arm_cond_to_match_guard(
Some(then_expr) if only_expr => then_expr,
_ => block.dedent(dedent.into()).into(),
};
- let guard = make.match_guard(cond);
- let new_arm = make.match_arm(match_pat.clone(), Some(guard), expr);
+ let new_arm = make.match_arm(match_pat.clone(), make_guard(Some(cond)), expr);
replace_arms.push(new_arm);
}
if let Some(block) = tail {
@@ -170,7 +189,7 @@ pub(crate) fn move_arm_cond_to_match_guard(
}
_ => block.dedent(dedent.into()).into(),
};
- let new_arm = make.match_arm(match_pat, None, expr);
+ let new_arm = make.match_arm(match_pat, make_guard(None), expr);
replace_arms.push(new_arm);
} else {
// There's no else branch. Add a pattern without guard, unless the following match
@@ -185,7 +204,7 @@ pub(crate) fn move_arm_cond_to_match_guard(
}
_ => {
let block_expr = make.expr_empty_block().into();
- replace_arms.push(make.match_arm(match_pat, None, block_expr));
+ replace_arms.push(make.match_arm(match_pat, make_guard(None), block_expr));
}
}
}
@@ -326,6 +345,35 @@ fn main() {
}
#[test]
+ fn move_guard_to_block_arm_body_works() {
+ check_assist(
+ move_guard_to_arm_body,
+ r#"
+fn main() {
+ match 92 {
+ x $0if x > 10 => {
+ let _ = true;
+ false
+ },
+ _ => true
+ }
+}
+"#,
+ r#"
+fn main() {
+ match 92 {
+ x => if x > 10 {
+ let _ = true;
+ false
+ },
+ _ => true
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
fn move_let_guard_to_arm_body_works() {
check_assist(
move_guard_to_arm_body,
@@ -376,9 +424,7 @@ fn main() {
&& true
&& true {
{
- {
- false
- }
+ false
}
},
_ => true
@@ -1084,4 +1130,40 @@ fn main() {
"#,
)
}
+
+ #[test]
+ fn move_arm_cond_to_match_guard_elseif_exist_guard() {
+ check_assist(
+ move_arm_cond_to_match_guard,
+ r#"
+fn main() {
+ let cond = true;
+ match 92 {
+ 3 => true,
+ x if cond => if x $0> 10 {
+ false
+ } else if x > 5 {
+ true
+ } else if x > 4 || x < -2 {
+ false
+ } else {
+ true
+ },
+ }
+}
+"#,
+ r#"
+fn main() {
+ let cond = true;
+ match 92 {
+ 3 => true,
+ x if cond && x > 10 => false,
+ x if cond && x > 5 => true,
+ x if cond && (x > 4 || x < -2) => false,
+ x if cond => true,
+ }
+}
+"#,
+ )
+ }
}
diff --git a/crates/ide-assists/src/handlers/remove_parentheses.rs b/crates/ide-assists/src/handlers/remove_parentheses.rs
index aa4d2bcadb..f07da489e2 100644
--- a/crates/ide-assists/src/handlers/remove_parentheses.rs
+++ b/crates/ide-assists/src/handlers/remove_parentheses.rs
@@ -322,6 +322,12 @@ mod tests {
}
#[test]
+ fn remove_parens_conflict_cast_before_l_angle() {
+ check_assist_not_applicable(remove_parentheses, r#"fn f() { _ = $0(1 as u32) << 10; }"#);
+ check_assist_not_applicable(remove_parentheses, r#"fn f() { _ = $0(1 as u32) < 10; }"#);
+ }
+
+ #[test]
fn remove_parens_double_paren_stmt() {
check_assist(
remove_parentheses,
diff --git a/crates/ide-assists/src/handlers/toggle_macro_delimiter.rs b/crates/ide-assists/src/handlers/toggle_macro_delimiter.rs
index bf1546986e..60b0797f02 100644
--- a/crates/ide-assists/src/handlers/toggle_macro_delimiter.rs
+++ b/crates/ide-assists/src/handlers/toggle_macro_delimiter.rs
@@ -86,7 +86,14 @@ pub(crate) fn toggle_macro_delimiter(acc: &mut Assists, ctx: &AssistContext<'_>)
}
MacroDelims::LCur | MacroDelims::RCur => {
editor.replace(ltoken, make.token(T!['[']));
- editor.replace(rtoken, make.token(T![']']));
+ if semicolon.is_some() || !needs_semicolon(token_tree) {
+ editor.replace(rtoken, make.token(T![']']));
+ } else {
+ editor.replace_with_many(
+ rtoken,
+ vec![make.token(T![']']).into(), make.token(T![;]).into()],
+ );
+ }
}
}
editor.add_mappings(make.finish_with_mappings());
@@ -103,6 +110,30 @@ fn macro_semicolon(makro: &ast::MacroCall) -> Option<SyntaxToken> {
})
}
+fn needs_semicolon(tt: ast::TokenTree) -> bool {
+ (|| {
+ let call = ast::MacroCall::cast(tt.syntax().parent()?)?;
+ let container = call.syntax().parent()?;
+ let kind = container.kind();
+
+ if call.semicolon_token().is_some() {
+ return Some(false);
+ }
+
+ Some(
+ ast::ItemList::can_cast(kind)
+ || ast::SourceFile::can_cast(kind)
+ || ast::AssocItemList::can_cast(kind)
+ || ast::ExternItemList::can_cast(kind)
+ || ast::MacroItems::can_cast(kind)
+ || ast::MacroExpr::can_cast(kind)
+ && ast::ExprStmt::cast(container.parent()?)
+ .is_some_and(|it| it.semicolon_token().is_none()),
+ )
+ })()
+ .unwrap_or(false)
+}
+
#[cfg(test)]
mod tests {
use crate::tests::{check_assist, check_assist_not_applicable};
@@ -161,7 +192,7 @@ macro_rules! sth {
() => {};
}
-sth!$0{ };
+sth!$0{ }
"#,
r#"
macro_rules! sth {
@@ -170,7 +201,117 @@ macro_rules! sth {
sth![ ];
"#,
- )
+ );
+
+ check_assist(
+ toggle_macro_delimiter,
+ r#"
+macro_rules! sth {
+ () => {};
+}
+
+fn foo() -> i32 {
+ sth!$0{ }
+ 2
+}
+ "#,
+ r#"
+macro_rules! sth {
+ () => {};
+}
+
+fn foo() -> i32 {
+ sth![ ];
+ 2
+}
+ "#,
+ );
+
+ check_assist(
+ toggle_macro_delimiter,
+ r#"
+macro_rules! sth {
+ () => {2};
+}
+
+fn foo() {
+ sth!$0{ };
+}
+ "#,
+ r#"
+macro_rules! sth {
+ () => {2};
+}
+
+fn foo() {
+ sth![ ];
+}
+ "#,
+ );
+
+ check_assist(
+ toggle_macro_delimiter,
+ r#"
+macro_rules! sth {
+ () => {2};
+}
+
+fn foo() -> i32 {
+ sth!$0{ }
+}
+ "#,
+ r#"
+macro_rules! sth {
+ () => {2};
+}
+
+fn foo() -> i32 {
+ sth![ ]
+}
+ "#,
+ );
+
+ check_assist(
+ toggle_macro_delimiter,
+ r#"
+macro_rules! sth {
+ () => {};
+}
+impl () {
+ sth!$0{}
+}
+ "#,
+ r#"
+macro_rules! sth {
+ () => {};
+}
+impl () {
+ sth![];
+}
+ "#,
+ );
+
+ check_assist(
+ toggle_macro_delimiter,
+ r#"
+macro_rules! sth {
+ () => {2};
+}
+
+fn foo() -> i32 {
+ bar(sth!$0{ })
+}
+ "#,
+ r#"
+macro_rules! sth {
+ () => {2};
+}
+
+fn foo() -> i32 {
+ bar(sth![ ])
+}
+ "#,
+ );
}
#[test]
@@ -204,7 +345,7 @@ mod abc {
() => {};
}
- sth!$0{ };
+ sth!$0{ }
}
"#,
r#"
diff --git a/crates/ide-assists/src/handlers/unwrap_block.rs b/crates/ide-assists/src/handlers/unwrap_block.rs
index a83f6835ca..e4f5e3523b 100644
--- a/crates/ide-assists/src/handlers/unwrap_block.rs
+++ b/crates/ide-assists/src/handlers/unwrap_block.rs
@@ -1,10 +1,12 @@
use syntax::{
- AstNode, SyntaxKind, T, TextRange,
+ AstNode, SyntaxElement, SyntaxKind, SyntaxNode, T,
ast::{
self,
edit::{AstNodeEdit, IndentLevel},
make,
},
+ match_ast,
+ syntax_editor::{Element, Position, SyntaxEditor},
};
use crate::{AssistContext, AssistId, Assists};
@@ -27,123 +29,108 @@ use crate::{AssistContext, AssistId, Assists};
// }
// ```
pub(crate) fn unwrap_block(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
- let assist_id = AssistId::refactor_rewrite("unwrap_block");
- let assist_label = "Unwrap block";
let l_curly_token = ctx.find_token_syntax_at_offset(T!['{'])?;
- let mut block = ast::BlockExpr::cast(l_curly_token.parent_ancestors().nth(1)?)?;
+ let block = l_curly_token.parent_ancestors().nth(1).and_then(ast::BlockExpr::cast)?;
let target = block.syntax().text_range();
- let mut parent = block.syntax().parent()?;
- if ast::MatchArm::can_cast(parent.kind()) {
- parent = parent.ancestors().find(|it| ast::MatchExpr::can_cast(it.kind()))?
- }
-
- let kind = parent.kind();
- if matches!(kind, SyntaxKind::STMT_LIST | SyntaxKind::EXPR_STMT) {
- acc.add(assist_id, assist_label, target, |builder| {
- builder.replace(block.syntax().text_range(), update_expr_string(block.to_string()));
- })
- } else if matches!(kind, SyntaxKind::LET_STMT) {
- let parent = ast::LetStmt::cast(parent)?;
- let pattern = ast::Pat::cast(parent.syntax().first_child()?)?;
- let ty = parent.ty();
- let list = block.stmt_list()?;
- let replaced = match list.syntax().last_child() {
- Some(last) => {
- let stmts: Vec<ast::Stmt> = list.statements().collect();
- let initializer = ast::Expr::cast(last)?;
- let let_stmt = make::let_stmt(pattern, ty, Some(initializer));
- if !stmts.is_empty() {
- let block = make::block_expr(stmts, None);
- format!("{}\n {}", update_expr_string(block.to_string()), let_stmt)
- } else {
- let_stmt.to_string()
- }
- }
- None => {
- let empty_tuple = make::ext::expr_unit();
- make::let_stmt(pattern, ty, Some(empty_tuple)).to_string()
- }
- };
- acc.add(assist_id, assist_label, target, |builder| {
- builder.replace(parent.syntax().text_range(), replaced);
- })
- } else {
- let parent = ast::Expr::cast(parent)?;
- match parent.clone() {
- ast::Expr::ForExpr(_) | ast::Expr::WhileExpr(_) | ast::Expr::LoopExpr(_) => (),
- ast::Expr::MatchExpr(_) => block = block.dedent(IndentLevel(1)),
- ast::Expr::IfExpr(if_expr) => {
- let then_branch = if_expr.then_branch()?;
- if then_branch == block {
- if let Some(ancestor) = if_expr.syntax().parent().and_then(ast::IfExpr::cast) {
- // For `else if` blocks
- let ancestor_then_branch = ancestor.then_branch()?;
-
- return acc.add(assist_id, assist_label, target, |edit| {
- let range_to_del_else_if = TextRange::new(
- ancestor_then_branch.syntax().text_range().end(),
- l_curly_token.text_range().start(),
- );
- let range_to_del_rest = TextRange::new(
- then_branch.syntax().text_range().end(),
- if_expr.syntax().text_range().end(),
- );
-
- edit.delete(range_to_del_rest);
- edit.delete(range_to_del_else_if);
- edit.replace(
- target,
- update_expr_string_without_newline(then_branch.to_string()),
- );
- });
- }
- } else {
- return acc.add(assist_id, assist_label, target, |edit| {
- let range_to_del = TextRange::new(
- then_branch.syntax().text_range().end(),
- l_curly_token.text_range().start(),
- );
-
- edit.delete(range_to_del);
- edit.replace(target, update_expr_string_without_newline(block.to_string()));
+ let mut container = block.syntax().clone();
+ let mut replacement = block.clone();
+ let mut prefer_container = None;
+
+ let from_indent = block.indent_level();
+ let into_indent = loop {
+ let parent = container.parent()?;
+ container = match_ast! {
+ match parent {
+ ast::ForExpr(it) => it.syntax().clone(),
+ ast::LoopExpr(it) => it.syntax().clone(),
+ ast::WhileExpr(it) => it.syntax().clone(),
+ ast::MatchArm(it) => it.parent_match().syntax().clone(),
+ ast::LetStmt(it) => {
+ replacement = wrap_let(&it, replacement);
+ prefer_container = Some(it.syntax().clone());
+ it.syntax().clone()
+ },
+ ast::IfExpr(it) => {
+ prefer_container.get_or_insert_with(|| {
+ if let Some(else_branch) = it.else_branch()
+ && *else_branch.syntax() == container
+ {
+ else_branch.syntax().clone()
+ } else {
+ it.syntax().clone()
+ }
});
- }
+ it.syntax().clone()
+ },
+ ast::ExprStmt(it) => it.syntax().clone(),
+ ast::StmtList(it) => break it.indent_level(),
+ _ => return None,
}
- _ => return None,
};
+ };
+ let replacement = replacement.stmt_list()?;
- acc.add(assist_id, assist_label, target, |builder| {
- builder.replace(parent.syntax().text_range(), update_expr_string(block.to_string()));
- })
- }
-}
+ acc.add(AssistId::refactor_rewrite("unwrap_block"), "Unwrap block", target, |builder| {
+ let mut edit = builder.make_editor(block.syntax());
+ let replacement = replacement.dedent(from_indent).indent(into_indent);
+ let container = prefer_container.unwrap_or(container);
-fn update_expr_string(expr_string: String) -> String {
- update_expr_string_with_pat(expr_string, &[' ', '\n'])
-}
+ edit.replace_with_many(&container, extract_statements(replacement));
+ delete_else_before(container, &mut edit);
-fn update_expr_string_without_newline(expr_string: String) -> String {
- update_expr_string_with_pat(expr_string, &[' '])
+ builder.add_file_edits(ctx.vfs_file_id(), edit);
+ })
}
-fn update_expr_string_with_pat(expr_str: String, whitespace_pat: &[char]) -> String {
- // Remove leading whitespace, index to remove the leading '{',
- // then continue to remove leading whitespace.
- // We cannot assume the `{` is the first character because there are block modifiers
- // (`unsafe`, `async` etc.).
- let after_open_brace_index = expr_str.find('{').map_or(0, |it| it + 1);
- let expr_str = expr_str[after_open_brace_index..].trim_start_matches(whitespace_pat);
-
- // Remove trailing whitespace, index [..expr_str.len() - 1] to remove the trailing '}',
- // then continue to remove trailing whitespace.
- let expr_str = expr_str.trim_end_matches(whitespace_pat);
- let expr_str = expr_str[..expr_str.len() - 1].trim_end_matches(whitespace_pat);
+fn delete_else_before(container: SyntaxNode, edit: &mut SyntaxEditor) {
+ let Some(else_token) = container
+ .siblings_with_tokens(syntax::Direction::Prev)
+ .skip(1)
+ .map_while(|it| it.into_token())
+ .find(|it| it.kind() == T![else])
+ else {
+ return;
+ };
+ itertools::chain(else_token.prev_token(), else_token.next_token())
+ .filter(|it| it.kind() == SyntaxKind::WHITESPACE)
+ .for_each(|it| edit.delete(it));
+ let indent = IndentLevel::from_node(&container);
+ let newline = make::tokens::whitespace(&format!("\n{indent}"));
+ edit.replace(else_token, newline);
+}
+
+fn wrap_let(assign: &ast::LetStmt, replacement: ast::BlockExpr) -> ast::BlockExpr {
+ let try_wrap_assign = || {
+ let initializer = assign.initializer()?.syntax().syntax_element();
+ let replacement = replacement.clone_subtree();
+ let assign = assign.clone_for_update();
+ let tail_expr = replacement.tail_expr()?;
+ let before =
+ assign.syntax().children_with_tokens().take_while(|it| *it != initializer).collect();
+ let after = assign
+ .syntax()
+ .children_with_tokens()
+ .skip_while(|it| *it != initializer)
+ .skip(1)
+ .collect();
+
+ let mut edit = SyntaxEditor::new(replacement.syntax().clone());
+ edit.insert_all(Position::before(tail_expr.syntax()), before);
+ edit.insert_all(Position::after(tail_expr.syntax()), after);
+ ast::BlockExpr::cast(edit.finish().new_root().clone())
+ };
+ try_wrap_assign().unwrap_or(replacement)
+}
- expr_str
- .lines()
- .map(|line| line.replacen(" ", "", 1)) // Delete indentation
- .collect::<Vec<String>>()
- .join("\n")
+fn extract_statements(stmt_list: ast::StmtList) -> Vec<SyntaxElement> {
+ let mut elements = stmt_list
+ .syntax()
+ .children_with_tokens()
+ .filter(|it| !matches!(it.kind(), T!['{'] | T!['}']))
+ .skip_while(|it| it.kind() == SyntaxKind::WHITESPACE)
+ .collect::<Vec<_>>();
+ while elements.pop_if(|it| it.kind() == SyntaxKind::WHITESPACE).is_some() {}
+ elements
}
#[cfg(test)]
@@ -594,6 +581,30 @@ fn main() {
}
#[test]
+ fn unwrap_match_arm_in_let() {
+ check_assist(
+ unwrap_block,
+ r#"
+fn main() {
+ let value = match rel_path {
+ Ok(rel_path) => {$0
+ let rel_path = RelativePathBuf::from_path(rel_path).ok()?;
+ Some((*id, rel_path))
+ }
+ Err(_) => None,
+ };
+}
+"#,
+ r#"
+fn main() {
+ let rel_path = RelativePathBuf::from_path(rel_path).ok()?;
+ let value = Some((*id, rel_path));
+}
+"#,
+ );
+ }
+
+ #[test]
fn simple_if_in_while_bad_cursor_position() {
check_assist_not_applicable(
unwrap_block,
@@ -751,27 +762,27 @@ fn main() -> i32 {
unwrap_block,
r#"
fn main() {
- let x = {$0};
+ let x = {$0
+ bar
+ };
}
"#,
r#"
fn main() {
- let x = ();
+ let x = bar;
}
"#,
);
check_assist(
unwrap_block,
r#"
-fn main() {
- let x = {$0
- bar
- };
+fn main() -> i32 {
+ let _ = {$01; 2};
}
"#,
r#"
-fn main() {
- let x = bar;
+fn main() -> i32 {
+ 1; let _ = 2;
}
"#,
);
@@ -779,13 +790,12 @@ fn main() {
unwrap_block,
r#"
fn main() -> i32 {
- let _ = {$01; 2};
+ let mut a = {$01; 2};
}
"#,
r#"
fn main() -> i32 {
- 1;
- let _ = 2;
+ 1; let mut a = 2;
}
"#,
);
@@ -793,13 +803,18 @@ fn main() -> i32 {
unwrap_block,
r#"
fn main() -> i32 {
- let mut a = {$01; 2};
+ let mut a = {$0
+ 1;
+ 2;
+ 3
+ };
}
"#,
r#"
fn main() -> i32 {
1;
- let mut a = 2;
+ 2;
+ let mut a = 3;
}
"#,
);
diff --git a/crates/ide-assists/src/utils.rs b/crates/ide-assists/src/utils.rs
index 9a96374c00..4b8c193057 100644
--- a/crates/ide-assists/src/utils.rs
+++ b/crates/ide-assists/src/utils.rs
@@ -86,6 +86,17 @@ pub fn extract_trivial_expression(block_expr: &ast::BlockExpr) -> Option<ast::Ex
None
}
+pub(crate) fn wrap_block(expr: &ast::Expr) -> ast::BlockExpr {
+ if let ast::Expr::BlockExpr(block) = expr
+ && let Some(first) = block.syntax().first_token()
+ && first.kind() == T!['{']
+ {
+ block.reset_indent()
+ } else {
+ make::block_expr(None, Some(expr.reset_indent().indent(1.into())))
+ }
+}
+
/// This is a method with a heuristics to support test methods annotated with custom test annotations, such as
/// `#[test_case(...)]`, `#[tokio::test]` and similar.
/// Also a regular `#[test]` annotation is supported.
diff --git a/crates/ide-completion/src/completions/expr.rs b/crates/ide-completion/src/completions/expr.rs
index 77734c5d6f..8c532e0f4d 100644
--- a/crates/ide-completion/src/completions/expr.rs
+++ b/crates/ide-completion/src/completions/expr.rs
@@ -340,7 +340,7 @@ pub(crate) fn complete_expr_path(
let missing_fields =
ctx.sema.record_literal_missing_fields(record_expr);
if !missing_fields.is_empty() {
- add_default_update(acc, ctx, ty);
+ add_default_update(acc, ctx, ty.as_ref());
}
}
};
diff --git a/crates/ide-completion/src/completions/extern_crate.rs b/crates/ide-completion/src/completions/extern_crate.rs
index 71a3e4eb4e..91202e8b32 100644
--- a/crates/ide-completion/src/completions/extern_crate.rs
+++ b/crates/ide-completion/src/completions/extern_crate.rs
@@ -17,7 +17,7 @@ pub(crate) fn complete_extern_crate(acc: &mut Completions, ctx: &CompletionConte
}
let mut item = CompletionItem::new(
- CompletionItemKind::SymbolKind(SymbolKind::Module),
+ CompletionItemKind::SymbolKind(SymbolKind::CrateRoot),
ctx.source_range(),
name.display_no_db(ctx.edition).to_smolstr(),
ctx.edition,
@@ -48,7 +48,7 @@ mod other_mod {}
let completion_list = completion_list_no_kw(case);
- assert_eq!("md other_crate_a\n".to_owned(), completion_list);
+ assert_eq!("cr other_crate_a\n".to_owned(), completion_list);
}
#[test]
@@ -68,6 +68,6 @@ mod other_mod {}
let completion_list = completion_list_no_kw(case);
- assert_eq!("md other_crate_a\n".to_owned(), completion_list);
+ assert_eq!("cr other_crate_a\n".to_owned(), completion_list);
}
}
diff --git a/crates/ide-completion/src/completions/record.rs b/crates/ide-completion/src/completions/record.rs
index c5bfdcb8b7..12c564af5c 100644
--- a/crates/ide-completion/src/completions/record.rs
+++ b/crates/ide-completion/src/completions/record.rs
@@ -36,7 +36,7 @@ pub(crate) fn complete_record_pattern_fields(
true => return,
}
}
- _ => ctx.sema.record_pattern_missing_fields(record_pat),
+ _ => ctx.sema.record_pattern_matched_fields(record_pat),
};
complete_fields(acc, ctx, missing_fields);
}
@@ -69,14 +69,14 @@ pub(crate) fn complete_record_expr_fields(
}
}
_ => {
- let missing_fields = ctx.sema.record_literal_missing_fields(record_expr);
+ let suggest_fields = ctx.sema.record_literal_matched_fields(record_expr);
let update_exists = record_expr
.record_expr_field_list()
.is_some_and(|list| list.dotdot_token().is_some());
- if !missing_fields.is_empty() && !update_exists {
+ if !suggest_fields.is_empty() && !update_exists {
cov_mark::hit!(functional_update_field);
- add_default_update(acc, ctx, ty);
+ add_default_update(acc, ctx, ty.as_ref());
}
if dot_prefix {
cov_mark::hit!(functional_update_one_dot);
@@ -90,7 +90,7 @@ pub(crate) fn complete_record_expr_fields(
item.add_to(acc, ctx.db);
return;
}
- missing_fields
+ suggest_fields
}
};
complete_fields(acc, ctx, missing_fields);
@@ -99,11 +99,11 @@ pub(crate) fn complete_record_expr_fields(
pub(crate) fn add_default_update(
acc: &mut Completions,
ctx: &CompletionContext<'_>,
- ty: Option<hir::TypeInfo<'_>>,
+ ty: Option<&hir::TypeInfo<'_>>,
) {
let default_trait = ctx.famous_defs().core_default_Default();
let impls_default_trait = default_trait
- .zip(ty.as_ref())
+ .zip(ty)
.is_some_and(|(default_trait, ty)| ty.original.impls_trait(ctx.db, default_trait, &[]));
if impls_default_trait {
// FIXME: This should make use of scope_def like completions so we get all the other goodies
diff --git a/crates/ide-completion/src/context.rs b/crates/ide-completion/src/context.rs
index d116f665ad..cab8bced88 100644
--- a/crates/ide-completion/src/context.rs
+++ b/crates/ide-completion/src/context.rs
@@ -628,7 +628,7 @@ impl CompletionContext<'_> {
}
/// A version of [`SemanticsScope::process_all_names`] that filters out `#[doc(hidden)]` items and
- /// passes all doc-aliases along, to funnel it into [`Completions::add_path_resolution`].
+ /// passes all doc-aliases along, to funnel it into `Completions::add_path_resolution`.
pub(crate) fn process_all_names(&self, f: &mut dyn FnMut(Name, ScopeDef, Vec<SmolStr>)) {
let _p = tracing::info_span!("CompletionContext::process_all_names").entered();
self.scope.process_all_names(&mut |name, def| {
diff --git a/crates/ide-completion/src/context/analysis.rs b/crates/ide-completion/src/context/analysis.rs
index 65bae5b66e..1c8bc656ca 100644
--- a/crates/ide-completion/src/context/analysis.rs
+++ b/crates/ide-completion/src/context/analysis.rs
@@ -1250,6 +1250,11 @@ fn classify_name_ref<'db>(
let original = ast::Const::cast(name.syntax().parent()?)?;
TypeLocation::TypeAscription(TypeAscriptionTarget::Const(original.body()))
},
+ ast::Static(it) => {
+ let name = find_opt_node_in_file(original_file, it.name())?;
+ let original = ast::Static::cast(name.syntax().parent()?)?;
+ TypeLocation::TypeAscription(TypeAscriptionTarget::Const(original.body()))
+ },
ast::RetType(it) => {
it.thin_arrow_token()?;
let parent = match ast::Fn::cast(parent.parent()?) {
@@ -1305,14 +1310,14 @@ fn classify_name_ref<'db>(
let make_path_kind_expr = |expr: ast::Expr| {
let it = expr.syntax();
+ let prev_token = iter::successors(it.first_token(), |it| it.prev_token())
+ .skip(1)
+ .find(|it| !it.kind().is_trivia());
let in_block_expr = is_in_block(it);
let (in_loop_body, innermost_breakable) = is_in_breakable(it).unzip();
let after_if_expr = is_after_if_expr(it.clone());
- let ref_expr_parent =
- path.as_single_name_ref().and_then(|_| it.parent()).and_then(ast::RefExpr::cast);
- let after_amp = non_trivia_sibling(it.clone().into(), Direction::Prev)
- .map(|it| it.kind() == SyntaxKind::AMP)
- .unwrap_or(false);
+ let after_amp = prev_token.as_ref().is_some_and(|it| it.kind() == SyntaxKind::AMP);
+ let ref_expr_parent = prev_token.and_then(|it| it.parent()).and_then(ast::RefExpr::cast);
let (innermost_ret_ty, self_param) = {
let find_ret_ty = |it: SyntaxNode| {
if let Some(item) = ast::Item::cast(it.clone()) {
@@ -2030,9 +2035,10 @@ fn is_after_if_expr(node: SyntaxNode) -> bool {
Some(stmt) => stmt.syntax().clone(),
None => node,
};
- let prev_sibling =
- non_trivia_sibling(node.into(), Direction::Prev).and_then(NodeOrToken::into_node);
- iter::successors(prev_sibling, |it| it.last_child_or_token()?.into_node())
+ let Some(prev_token) = previous_non_trivia_token(node) else { return false };
+ prev_token
+ .parent_ancestors()
+ .take_while(|it| it.text_range().end() == prev_token.text_range().end())
.find_map(ast::IfExpr::cast)
.is_some()
}
diff --git a/crates/ide-completion/src/item.rs b/crates/ide-completion/src/item.rs
index 71d32da747..1a9139d855 100644
--- a/crates/ide-completion/src/item.rs
+++ b/crates/ide-completion/src/item.rs
@@ -381,6 +381,7 @@ impl CompletionItemKind {
SymbolKind::BuiltinAttr => "ba",
SymbolKind::Const => "ct",
SymbolKind::ConstParam => "cp",
+ SymbolKind::CrateRoot => "cr",
SymbolKind::Derive => "de",
SymbolKind::DeriveHelper => "dh",
SymbolKind::Enum => "en",
diff --git a/crates/ide-completion/src/tests/expression.rs b/crates/ide-completion/src/tests/expression.rs
index 78f003dd21..df39591a33 100644
--- a/crates/ide-completion/src/tests/expression.rs
+++ b/crates/ide-completion/src/tests/expression.rs
@@ -706,7 +706,30 @@ fn completes_after_ref_expr() {
kw while
kw while let
"#]],
- )
+ );
+ check(
+ r#"fn main() { let _ = &$0x.foo() }"#,
+ expect![[r#"
+ fn main() fn()
+ bt u32 u32
+ kw const
+ kw crate::
+ kw false
+ kw for
+ kw if
+ kw if let
+ kw loop
+ kw match
+ kw mut
+ kw raw
+ kw return
+ kw self::
+ kw true
+ kw unsafe
+ kw while
+ kw while let
+ "#]],
+ );
}
#[test]
@@ -2159,6 +2182,32 @@ fn foo() { match () { () => if foo {} $0, _ => (), } }
kw ref
"#]],
);
+ check(
+ r#"
+fn foo() -> (i32, i32) { if foo {} el$0 (2, 3) }
+"#,
+ expect![[r#"
+ fn foo fn() -> (i32, i32)
+ bt u32 u32
+ kw const
+ kw crate::
+ kw else
+ kw else if
+ kw false
+ kw for
+ kw if
+ kw if let
+ kw loop
+ kw match
+ kw return
+ kw self::
+ kw true
+ kw unsafe
+ kw while
+ kw while let
+ ex foo()
+ "#]],
+ );
// FIXME: support else completion after ast::RecordExprField
}
diff --git a/crates/ide-completion/src/tests/flyimport.rs b/crates/ide-completion/src/tests/flyimport.rs
index 797df3f163..d7db896679 100644
--- a/crates/ide-completion/src/tests/flyimport.rs
+++ b/crates/ide-completion/src/tests/flyimport.rs
@@ -1976,3 +1976,51 @@ fn main() {
"#]],
);
}
+
+#[test]
+fn trait_method_import_across_multiple_crates() {
+ let fixture = r#"
+ //- /lib.rs crate:test-trait
+ pub trait TestTrait {
+ fn test_function(&self) -> u32;
+ }
+
+ //- /lib.rs crate:test-implementation deps:test-trait
+ pub struct TestStruct(pub usize);
+
+ impl test_trait::TestTrait for TestStruct {
+ fn test_function(&self) -> u32 {
+ 1
+ }
+ }
+
+ //- /main.rs crate:main deps:test-implementation,test-trait
+ use test_implementation::TestStruct;
+
+ fn main() {
+ let test = TestStruct(42);
+ test.test_f$0
+ }
+ "#;
+
+ check(
+ fixture,
+ expect![[r#"
+ me test_function() (use test_trait::TestTrait) fn(&self) -> u32
+ "#]],
+ );
+
+ check_edit(
+ "test_function",
+ fixture,
+ r#"
+use test_implementation::TestStruct;
+use test_trait::TestTrait;
+
+fn main() {
+ let test = TestStruct(42);
+ test.test_function()$0
+}
+"#,
+ );
+}
diff --git a/crates/ide-completion/src/tests/record.rs b/crates/ide-completion/src/tests/record.rs
index d9be6556fa..045b2d03b0 100644
--- a/crates/ide-completion/src/tests/record.rs
+++ b/crates/ide-completion/src/tests/record.rs
@@ -287,6 +287,24 @@ fn main() {
}
#[test]
+fn functional_update_fields_completion() {
+ // Complete fields before functional update `..`
+ check(
+ r#"
+struct Point { x: i32 = 0, y: i32 = 0 }
+
+fn main() {
+ let p = Point { $0, .. };
+}
+"#,
+ expect![[r#"
+ fd x i32
+ fd y i32
+ "#]],
+ );
+}
+
+#[test]
fn empty_union_literal() {
check(
r#"
@@ -302,7 +320,27 @@ fn foo() {
fd bar f32
fd foo u32
"#]],
- )
+ );
+}
+
+#[test]
+fn record_pattern_field_with_rest_pat() {
+ // When .. is present, complete all unspecified fields (even those with default values)
+ check(
+ r#"
+struct UserInfo { id: i32, age: f32, email: u64 }
+
+fn foo(u1: UserInfo) {
+ let UserInfo { id, $0, .. } = u1;
+}
+"#,
+ expect![[r#"
+ fd age f32
+ fd email u64
+ kw mut
+ kw ref
+ "#]],
+ );
}
#[test]
diff --git a/crates/ide-completion/src/tests/type_pos.rs b/crates/ide-completion/src/tests/type_pos.rs
index 3bbba18c2b..7c6b7370aa 100644
--- a/crates/ide-completion/src/tests/type_pos.rs
+++ b/crates/ide-completion/src/tests/type_pos.rs
@@ -184,6 +184,35 @@ const FOO: $0 = Foo(2);
}
#[test]
+fn inferred_type_static() {
+ check_with_base_items(
+ r#"
+struct Foo<T>(T);
+static FOO: $0 = Foo(2);
+"#,
+ expect![[r#"
+ en Enum Enum
+ ma makro!(…) macro_rules! makro
+ md module
+ st Foo<…> Foo<{unknown}>
+ st Record Record
+ st Tuple Tuple
+ st Unit Unit
+ tt Trait
+ un Union Union
+ bt u32 u32
+ it Foo<i32>
+ kw crate::
+ kw dyn
+ kw fn
+ kw for
+ kw impl
+ kw self::
+ "#]],
+ );
+}
+
+#[test]
fn inferred_type_closure_param() {
check_with_base_items(
r#"
diff --git a/crates/ide-db/src/imports/import_assets.rs b/crates/ide-db/src/imports/import_assets.rs
index 90e3bb61f4..35579eb259 100644
--- a/crates/ide-db/src/imports/import_assets.rs
+++ b/crates/ide-db/src/imports/import_assets.rs
@@ -600,7 +600,19 @@ fn trait_applicable_items<'db>(
}
deref_chain
.into_iter()
- .filter_map(|ty| Some((ty.krate(db).into(), ty.fingerprint_for_trait_impl()?)))
+ .flat_map(|ty| {
+ let fingerprint = ty.fingerprint_for_trait_impl()?;
+ let mut crates = vec![];
+
+ if let Some(adt) = ty.as_adt() {
+ // Push crate where ADT was defined
+ crates.push((adt.krate(db).into(), fingerprint));
+ }
+ // Always include environment crate
+ crates.push((ty.krate(db).into(), fingerprint));
+ Some(crates)
+ })
+ .flatten()
.unique()
.collect::<Vec<_>>()
};
diff --git a/crates/ide-db/src/items_locator.rs b/crates/ide-db/src/items_locator.rs
index 0d305530d9..af0c69c685 100644
--- a/crates/ide-db/src/items_locator.rs
+++ b/crates/ide-db/src/items_locator.rs
@@ -110,7 +110,7 @@ pub fn items_with_name_in_module<T>(
local_query
}
};
- local_query.search(&[SymbolIndex::module_symbols(db, module)], |local_candidate| {
+ local_query.search(db, &[SymbolIndex::module_symbols(db, module)], |local_candidate| {
cb(match local_candidate.def {
hir::ModuleDef::Macro(macro_def) => ItemInNs::Macros(macro_def),
def => ItemInNs::from(def),
@@ -140,7 +140,7 @@ fn find_items(
// Query the local crate using the symbol index.
let mut local_results = Vec::new();
- local_query.search(&symbol_index::crate_symbols(db, krate), |local_candidate| {
+ local_query.search(db, &symbol_index::crate_symbols(db, krate), |local_candidate| {
let def = match local_candidate.def {
hir::ModuleDef::Macro(macro_def) => ItemInNs::Macros(macro_def),
def => ItemInNs::from(def),
diff --git a/crates/ide-db/src/lib.rs b/crates/ide-db/src/lib.rs
index 413b58bf79..023b32b361 100644
--- a/crates/ide-db/src/lib.rs
+++ b/crates/ide-db/src/lib.rs
@@ -65,7 +65,7 @@ use base_db::{
};
use hir::{
FilePositionWrapper, FileRangeWrapper,
- db::{DefDatabase, ExpandDatabase},
+ db::{DefDatabase, ExpandDatabase, HirDatabase},
};
use triomphe::Arc;
@@ -269,6 +269,7 @@ pub enum SymbolKind {
BuiltinAttr,
Const,
ConstParam,
+ CrateRoot,
Derive,
DeriveHelper,
Enum,
@@ -307,14 +308,15 @@ impl From<hir::MacroKind> for SymbolKind {
}
}
-impl From<hir::ModuleDef> for SymbolKind {
- fn from(it: hir::ModuleDef) -> Self {
+impl SymbolKind {
+ pub fn from_module_def(db: &dyn HirDatabase, it: hir::ModuleDef) -> Self {
match it {
hir::ModuleDef::Const(..) => SymbolKind::Const,
hir::ModuleDef::Variant(..) => SymbolKind::Variant,
hir::ModuleDef::Function(..) => SymbolKind::Function,
hir::ModuleDef::Macro(mac) if mac.is_proc_macro() => SymbolKind::ProcMacro,
hir::ModuleDef::Macro(..) => SymbolKind::Macro,
+ hir::ModuleDef::Module(m) if m.is_crate_root(db) => SymbolKind::CrateRoot,
hir::ModuleDef::Module(..) => SymbolKind::Module,
hir::ModuleDef::Static(..) => SymbolKind::Static,
hir::ModuleDef::Adt(hir::Adt::Struct(..)) => SymbolKind::Struct,
diff --git a/crates/ide-db/src/symbol_index.rs b/crates/ide-db/src/symbol_index.rs
index eb0529d6b5..183f6b6495 100644
--- a/crates/ide-db/src/symbol_index.rs
+++ b/crates/ide-db/src/symbol_index.rs
@@ -27,7 +27,7 @@ use std::{
ops::ControlFlow,
};
-use base_db::{LibraryRoots, LocalRoots, RootQueryDb, SourceRootId};
+use base_db::{CrateOrigin, LangCrateOrigin, LibraryRoots, LocalRoots, RootQueryDb, SourceRootId};
use fst::{Automaton, Streamer, raw::IndexedValue};
use hir::{
Crate, Module,
@@ -35,29 +35,84 @@ use hir::{
import_map::{AssocSearchMode, SearchMode},
symbols::{FileSymbol, SymbolCollector},
};
+use itertools::Itertools;
use rayon::prelude::*;
use salsa::Update;
use crate::RootDatabase;
+/// A query for searching symbols in the workspace or dependencies.
+///
+/// This struct configures how symbol search is performed, including the search text,
+/// matching strategy, and filtering options. It is used by [`world_symbols`] to find
+/// symbols across the codebase.
+///
+/// # Example
+/// ```ignore
+/// let mut query = Query::new("MyStruct".to_string());
+/// query.only_types(); // Only search for type definitions
+/// query.libs(); // Include library dependencies
+/// query.exact(); // Use exact matching instead of fuzzy
+/// ```
#[derive(Debug, Clone)]
pub struct Query {
+ /// The item name to search for (last segment of the path, or full query if no path).
+ /// When empty with a non-empty `path_filter`, returns all items in that module.
query: String,
+ /// Lowercase version of [`Self::query`], pre-computed for efficiency.
+ /// Used to build FST automata for case-insensitive index lookups.
lowercased: String,
+ /// Path segments to filter by (all segments except the last).
+ /// Empty if no `::` in the original query.
+ path_filter: Vec<String>,
+ /// If true, the first path segment must be a crate name (query started with `::`).
+ anchor_to_crate: bool,
+ /// The search strategy to use when matching symbols.
+ /// - [`SearchMode::Exact`]: Symbol name must exactly match the query.
+ /// - [`SearchMode::Fuzzy`]: Symbol name must contain all query characters in order (subsequence match).
+ /// - [`SearchMode::Prefix`]: Symbol name must start with the query string.
+ ///
+ /// Defaults to [`SearchMode::Fuzzy`].
mode: SearchMode,
+ /// Controls filtering of trait-associated items (methods, constants, types).
+ /// - [`AssocSearchMode::Include`]: Include both associated and non-associated items.
+ /// - [`AssocSearchMode::Exclude`]: Exclude trait-associated items from results.
+ /// - [`AssocSearchMode::AssocItemsOnly`]: Only return trait-associated items.
+ ///
+ /// Defaults to [`AssocSearchMode::Include`].
assoc_mode: AssocSearchMode,
+ /// Whether the final symbol name comparison should be case-sensitive.
+ /// When `false`, matching is case-insensitive (e.g., "foo" matches "Foo").
+ ///
+ /// Defaults to `false`.
case_sensitive: bool,
+ /// When `true`, only return type definitions: structs, enums, unions,
+ /// type aliases, built-in types, and traits. Functions, constants, statics,
+ /// and modules are excluded.
+ ///
+ /// Defaults to `false`.
only_types: bool,
+ /// When `true`, search library dependency roots instead of local workspace crates.
+ /// This enables finding symbols in external dependencies including the standard library.
+ ///
+ /// Defaults to `false` (search local workspace only).
libs: bool,
+ /// When `true`, exclude re-exported/imported symbols from results,
+ /// showing only the original definitions.
+ ///
+ /// Defaults to `false`.
exclude_imports: bool,
}
impl Query {
pub fn new(query: String) -> Query {
- let lowercased = query.to_lowercase();
+ let (path_filter, item_query, anchor_to_crate) = Self::parse_path_query(&query);
+ let lowercased = item_query.to_lowercase();
Query {
- query,
+ query: item_query,
lowercased,
+ path_filter,
+ anchor_to_crate,
only_types: false,
libs: false,
mode: SearchMode::Fuzzy,
@@ -67,6 +122,35 @@ impl Query {
}
}
+ /// Parse a query string that may contain path segments.
+ ///
+ /// Returns (path_filter, item_query, anchor_to_crate) where:
+ /// - `path_filter`: Path segments to match (all but the last segment)
+ /// - `item_query`: The item name to search for (last segment)
+ /// - `anchor_to_crate`: Whether the first segment must be a crate name
+ fn parse_path_query(query: &str) -> (Vec<String>, String, bool) {
+ // Check for leading :: (absolute path / crate search)
+ let (query, anchor_to_crate) = match query.strip_prefix("::") {
+ Some(q) => (q, true),
+ None => (query, false),
+ };
+
+ let Some((prefix, query)) = query.rsplit_once("::") else {
+ return (vec![], query.to_owned(), anchor_to_crate);
+ };
+
+ let prefix: Vec<_> =
+ prefix.split("::").filter(|s| !s.is_empty()).map(ToOwned::to_owned).collect();
+
+ (prefix, query.to_owned(), anchor_to_crate)
+ }
+
+ /// Returns true if this query is searching for crates
+ /// (i.e., the query was "::" alone or "::foo" for fuzzy crate search)
+ fn is_crate_search(&self) -> bool {
+ self.anchor_to_crate && self.path_filter.is_empty()
+ }
+
pub fn only_types(&mut self) {
self.only_types = true;
}
@@ -123,19 +207,41 @@ pub fn crate_symbols(db: &dyn HirDatabase, krate: Crate) -> Box<[&SymbolIndex<'_
// That is, `#` switches from "types" to all symbols, `*` switches from the current
// workspace to dependencies.
//
-// Note that filtering does not currently work in VSCode due to the editor never
-// sending the special symbols to the language server. Instead, you can configure
-// the filtering via the `rust-analyzer.workspace.symbol.search.scope` and
-// `rust-analyzer.workspace.symbol.search.kind` settings. Symbols prefixed
-// with `__` are hidden from the search results unless configured otherwise.
+// This also supports general Rust path syntax with the usual rules.
+//
+// Note that paths do not currently work in VSCode due to the editor never
+// sending the special symbols to the language server. Some other editors might not support the # or
+// * search either, instead, you can configure the filtering via the
+// `rust-analyzer.workspace.symbol.search.scope` and `rust-analyzer.workspace.symbol.search.kind`
+// settings. Symbols prefixed with `__` are hidden from the search results unless configured
+// otherwise.
//
// | Editor | Shortcut |
// |---------|-----------|
// | VS Code | <kbd>Ctrl+T</kbd>
-pub fn world_symbols(db: &RootDatabase, query: Query) -> Vec<FileSymbol<'_>> {
+pub fn world_symbols(db: &RootDatabase, mut query: Query) -> Vec<FileSymbol<'_>> {
let _p = tracing::info_span!("world_symbols", query = ?query.query).entered();
- let indices: Vec<_> = if query.libs {
+ // Search for crates by name (handles "::" and "::foo" queries)
+ let indices: Vec<_> = if query.is_crate_search() {
+ query.only_types = false;
+ vec![SymbolIndex::extern_prelude_symbols(db)]
+ // If we have a path filter, resolve it to target modules
+ } else if !query.path_filter.is_empty() {
+ query.only_types = false;
+ let target_modules = resolve_path_to_modules(
+ db,
+ &query.path_filter,
+ query.anchor_to_crate,
+ query.case_sensitive,
+ );
+
+ if target_modules.is_empty() {
+ return vec![];
+ }
+
+ target_modules.iter().map(|&module| SymbolIndex::module_symbols(db, module)).collect()
+ } else if query.libs {
LibraryRoots::get(db)
.roots(db)
.par_iter()
@@ -154,17 +260,103 @@ pub fn world_symbols(db: &RootDatabase, query: Query) -> Vec<FileSymbol<'_>> {
crates
.par_iter()
.for_each_with(db.clone(), |snap, &krate| _ = crate_symbols(snap, krate.into()));
- crates.into_iter().flat_map(|krate| Vec::from(crate_symbols(db, krate.into()))).collect()
+ crates
+ .into_iter()
+ .flat_map(|krate| Vec::from(crate_symbols(db, krate.into())))
+ .chain(std::iter::once(SymbolIndex::extern_prelude_symbols(db)))
+ .collect()
};
let mut res = vec![];
- query.search::<()>(&indices, |f| {
+
+ // Normal search: use FST to match item name
+ query.search::<()>(db, &indices, |f| {
res.push(f.clone());
ControlFlow::Continue(())
});
+
res
}
+/// Resolve a path filter to the target module(s) it points to.
+/// Returns the modules whose symbol indices should be searched.
+///
+/// The path_filter contains segments like ["std", "vec"] for a query like "std::vec::Vec".
+/// We resolve this by:
+/// 1. Finding crates matching the first segment
+/// 2. Walking down the module tree following subsequent segments
+fn resolve_path_to_modules(
+ db: &dyn HirDatabase,
+ path_filter: &[String],
+ anchor_to_crate: bool,
+ case_sensitive: bool,
+) -> Vec<Module> {
+ let [first_segment, rest_segments @ ..] = path_filter else {
+ return vec![];
+ };
+
+ // Helper for name comparison
+ let names_match = |actual: &str, expected: &str| -> bool {
+ if case_sensitive { actual == expected } else { actual.eq_ignore_ascii_case(expected) }
+ };
+
+ // Find crates matching the first segment
+ let matching_crates: Vec<Crate> = Crate::all(db)
+ .into_iter()
+ .filter(|krate| {
+ krate
+ .display_name(db)
+ .is_some_and(|name| names_match(name.crate_name().as_str(), first_segment))
+ })
+ .collect();
+
+ // If anchor_to_crate is true, first segment MUST be a crate name
+ // If anchor_to_crate is false, first segment could be a crate OR a module in local crates
+ let mut candidate_modules: Vec<(Module, bool)> = vec![];
+
+ // Add crate root modules for matching crates
+ for krate in matching_crates {
+ candidate_modules.push((krate.root_module(db), krate.origin(db).is_local()));
+ }
+
+ // If not anchored to crate, also search for modules matching first segment in local crates
+ if !anchor_to_crate {
+ for &root in LocalRoots::get(db).roots(db).iter() {
+ for &krate in db.source_root_crates(root).iter() {
+ let root_module = Crate::from(krate).root_module(db);
+ for child in root_module.children(db) {
+ if let Some(name) = child.name(db)
+ && names_match(name.as_str(), first_segment)
+ {
+ candidate_modules.push((child, true));
+ }
+ }
+ }
+ }
+ }
+
+ // Walk down the module tree for remaining path segments
+ for segment in rest_segments {
+ candidate_modules = candidate_modules
+ .into_iter()
+ .flat_map(|(module, local)| {
+ module
+ .modules_in_scope(db, !local)
+ .into_iter()
+ .filter(|(name, _)| names_match(name.as_str(), segment))
+ .map(move |(_, module)| (module, local))
+ })
+ .unique()
+ .collect();
+
+ if candidate_modules.is_empty() {
+ break;
+ }
+ }
+
+ candidate_modules.into_iter().map(|(module, _)| module).collect()
+}
+
#[derive(Default)]
pub struct SymbolIndex<'db> {
symbols: Box<[FileSymbol<'db>]>,
@@ -236,6 +428,39 @@ impl<'db> SymbolIndex<'db> {
module_symbols(db, InternedModuleId::new(db, hir::ModuleId::from(module)))
}
+
+ /// The symbol index for all extern prelude crates.
+ pub fn extern_prelude_symbols(db: &dyn HirDatabase) -> &SymbolIndex<'_> {
+ #[salsa::tracked(returns(ref))]
+ fn extern_prelude_symbols<'db>(db: &'db dyn HirDatabase) -> SymbolIndex<'db> {
+ let _p = tracing::info_span!("extern_prelude_symbols").entered();
+
+ // We call this without attaching because this runs in parallel, so we need to attach here.
+ hir::attach_db(db, || {
+ let mut collector = SymbolCollector::new(db, false);
+
+ for krate in Crate::all(db) {
+ if krate
+ .display_name(db)
+ .is_none_or(|name| name.canonical_name().as_str() == "build-script-build")
+ {
+ continue;
+ }
+ if let CrateOrigin::Lang(LangCrateOrigin::Dependency | LangCrateOrigin::Other) =
+ krate.origin(db)
+ {
+ // don't show dependencies of the sysroot
+ continue;
+ }
+ collector.push_crate_root(krate);
+ }
+
+ SymbolIndex::new(collector.finish())
+ })
+ }
+
+ extern_prelude_symbols(db)
+ }
}
impl fmt::Debug for SymbolIndex<'_> {
@@ -336,12 +561,15 @@ impl<'db> SymbolIndex<'db> {
}
impl Query {
+ /// Search symbols in the given indices.
pub(crate) fn search<'db, T>(
- self,
+ &self,
+ db: &'db RootDatabase,
indices: &[&'db SymbolIndex<'db>],
cb: impl FnMut(&'db FileSymbol<'db>) -> ControlFlow<T>,
) -> Option<T> {
let _p = tracing::info_span!("symbol_index::Query::search").entered();
+
let mut op = fst::map::OpBuilder::new();
match self.mode {
SearchMode::Exact => {
@@ -350,7 +578,7 @@ impl Query {
for index in indices.iter() {
op = op.add(index.map.search(&automaton));
}
- self.search_maps(indices, op.union(), cb)
+ self.search_maps(db, indices, op.union(), cb)
}
SearchMode::Fuzzy => {
let automaton = fst::automaton::Subsequence::new(&self.lowercased);
@@ -358,7 +586,7 @@ impl Query {
for index in indices.iter() {
op = op.add(index.map.search(&automaton));
}
- self.search_maps(indices, op.union(), cb)
+ self.search_maps(db, indices, op.union(), cb)
}
SearchMode::Prefix => {
let automaton = fst::automaton::Str::new(&self.lowercased).starts_with();
@@ -366,13 +594,14 @@ impl Query {
for index in indices.iter() {
op = op.add(index.map.search(&automaton));
}
- self.search_maps(indices, op.union(), cb)
+ self.search_maps(db, indices, op.union(), cb)
}
}
}
fn search_maps<'db, T>(
&self,
+ db: &'db RootDatabase,
indices: &[&'db SymbolIndex<'db>],
mut stream: fst::map::Union<'_>,
mut cb: impl FnMut(&'db FileSymbol<'db>) -> ControlFlow<T>,
@@ -380,18 +609,21 @@ impl Query {
let ignore_underscore_prefixed = !self.query.starts_with("__");
while let Some((_, indexed_values)) = stream.next() {
for &IndexedValue { index, value } in indexed_values {
- let symbol_index = &indices[index];
+ let symbol_index = indices[index];
let (start, end) = SymbolIndex::map_value_to_range(value);
for symbol in &symbol_index.symbols[start..end] {
let non_type_for_type_only_query = self.only_types
- && !matches!(
+ && !(matches!(
symbol.def,
hir::ModuleDef::Adt(..)
| hir::ModuleDef::TypeAlias(..)
| hir::ModuleDef::BuiltinType(..)
| hir::ModuleDef::Trait(..)
- );
+ ) || matches!(
+ symbol.def,
+ hir::ModuleDef::Module(module) if module.is_crate_root(db)
+ ));
if non_type_for_type_only_query || !self.matches_assoc_mode(symbol.is_assoc) {
continue;
}
@@ -576,4 +808,416 @@ pub struct Foo;
let symbols = world_symbols(&db, query);
expect_file!["./test_data/test_symbols_exclude_imports.txt"].assert_debug_eq(&symbols);
}
+
+ #[test]
+ fn test_parse_path_query() {
+ // Plain query - no path
+ let (path, item, anchor) = Query::parse_path_query("Item");
+ assert_eq!(path, Vec::<String>::new());
+ assert_eq!(item, "Item");
+ assert!(!anchor);
+
+ // Path with item
+ let (path, item, anchor) = Query::parse_path_query("foo::Item");
+ assert_eq!(path, vec!["foo"]);
+ assert_eq!(item, "Item");
+ assert!(!anchor);
+
+ // Multi-segment path
+ let (path, item, anchor) = Query::parse_path_query("foo::bar::Item");
+ assert_eq!(path, vec!["foo", "bar"]);
+ assert_eq!(item, "Item");
+ assert!(!anchor);
+
+ // Leading :: (anchor to crate)
+ let (path, item, anchor) = Query::parse_path_query("::std::vec::Vec");
+ assert_eq!(path, vec!["std", "vec"]);
+ assert_eq!(item, "Vec");
+ assert!(anchor);
+
+ // Just "::" - return all crates
+ let (path, item, anchor) = Query::parse_path_query("::");
+ assert_eq!(path, Vec::<String>::new());
+ assert_eq!(item, "");
+ assert!(anchor);
+
+ // "::foo" - fuzzy search crate names
+ let (path, item, anchor) = Query::parse_path_query("::foo");
+ assert_eq!(path, Vec::<String>::new());
+ assert_eq!(item, "foo");
+ assert!(anchor);
+
+ // Trailing ::
+ let (path, item, anchor) = Query::parse_path_query("foo::");
+ assert_eq!(path, vec!["foo"]);
+ assert_eq!(item, "");
+ assert!(!anchor);
+
+ // Full path with trailing ::
+ let (path, item, anchor) = Query::parse_path_query("foo::bar::");
+ assert_eq!(path, vec!["foo", "bar"]);
+ assert_eq!(item, "");
+ assert!(!anchor);
+
+ // Absolute path with trailing ::
+ let (path, item, anchor) = Query::parse_path_query("::std::vec::");
+ assert_eq!(path, vec!["std", "vec"]);
+ assert_eq!(item, "");
+ assert!(anchor);
+
+ // Empty segments should be filtered
+ let (path, item, anchor) = Query::parse_path_query("foo::::bar");
+ assert_eq!(path, vec!["foo"]);
+ assert_eq!(item, "bar");
+ assert!(!anchor);
+ }
+
+ #[test]
+ fn test_path_search() {
+ let (mut db, _) = RootDatabase::with_many_files(
+ r#"
+//- /lib.rs crate:main
+mod inner;
+pub struct RootStruct;
+
+//- /inner.rs
+pub struct InnerStruct;
+pub mod nested {
+ pub struct NestedStruct;
+}
+"#,
+ );
+
+ let mut local_roots = FxHashSet::default();
+ local_roots.insert(WORKSPACE);
+ LocalRoots::get(&db).set_roots(&mut db).to(local_roots);
+
+ // Search for item in specific module
+ let query = Query::new("inner::InnerStruct".to_owned());
+ let symbols = world_symbols(&db, query);
+ let names: Vec<_> = symbols.iter().map(|s| s.name.as_str()).collect();
+ assert!(names.contains(&"InnerStruct"), "Expected InnerStruct in {:?}", names);
+
+ // Search for item in nested module
+ let query = Query::new("inner::nested::NestedStruct".to_owned());
+ let symbols = world_symbols(&db, query);
+ let names: Vec<_> = symbols.iter().map(|s| s.name.as_str()).collect();
+ assert!(names.contains(&"NestedStruct"), "Expected NestedStruct in {:?}", names);
+
+ // Search with crate prefix
+ let query = Query::new("main::inner::InnerStruct".to_owned());
+ let symbols = world_symbols(&db, query);
+ let names: Vec<_> = symbols.iter().map(|s| s.name.as_str()).collect();
+ assert!(names.contains(&"InnerStruct"), "Expected InnerStruct in {:?}", names);
+
+ // Wrong path should return empty
+ let query = Query::new("wrong::InnerStruct".to_owned());
+ let symbols = world_symbols(&db, query);
+ assert!(symbols.is_empty(), "Expected empty results for wrong path");
+ }
+
+ #[test]
+ fn test_path_search_module() {
+ let (mut db, _) = RootDatabase::with_many_files(
+ r#"
+//- /lib.rs crate:main
+mod mymod;
+
+//- /mymod.rs
+pub struct MyStruct;
+pub fn my_func() {}
+pub const MY_CONST: u32 = 1;
+"#,
+ );
+
+ let mut local_roots = FxHashSet::default();
+ local_roots.insert(WORKSPACE);
+ LocalRoots::get(&db).set_roots(&mut db).to(local_roots);
+
+ // Browse all items in module
+ let query = Query::new("main::mymod::".to_owned());
+ let symbols = world_symbols(&db, query);
+ let names: Vec<_> = symbols.iter().map(|s| s.name.as_str()).collect();
+
+ assert!(names.contains(&"MyStruct"), "Expected MyStruct in {:?}", names);
+ assert!(names.contains(&"my_func"), "Expected my_func in {:?}", names);
+ assert!(names.contains(&"MY_CONST"), "Expected MY_CONST in {:?}", names);
+ }
+
+ #[test]
+ fn test_fuzzy_item_with_path() {
+ let (mut db, _) = RootDatabase::with_many_files(
+ r#"
+//- /lib.rs crate:main
+mod mymod;
+
+//- /mymod.rs
+pub struct MyLongStructName;
+"#,
+ );
+
+ let mut local_roots = FxHashSet::default();
+ local_roots.insert(WORKSPACE);
+ LocalRoots::get(&db).set_roots(&mut db).to(local_roots);
+
+ // Fuzzy match on item name with exact path
+ let query = Query::new("main::mymod::MyLong".to_owned());
+ let symbols = world_symbols(&db, query);
+ let names: Vec<_> = symbols.iter().map(|s| s.name.as_str()).collect();
+ assert!(
+ names.contains(&"MyLongStructName"),
+ "Expected fuzzy match for MyLongStructName in {:?}",
+ names
+ );
+ }
+
+ #[test]
+ fn test_case_insensitive_path() {
+ let (mut db, _) = RootDatabase::with_many_files(
+ r#"
+//- /lib.rs crate:main
+mod MyMod;
+
+//- /MyMod.rs
+pub struct MyStruct;
+"#,
+ );
+
+ let mut local_roots = FxHashSet::default();
+ local_roots.insert(WORKSPACE);
+ LocalRoots::get(&db).set_roots(&mut db).to(local_roots);
+
+ // Case insensitive path matching (default)
+ let query = Query::new("main::mymod::MyStruct".to_owned());
+ let symbols = world_symbols(&db, query);
+ let names: Vec<_> = symbols.iter().map(|s| s.name.as_str()).collect();
+ assert!(names.contains(&"MyStruct"), "Expected case-insensitive match in {:?}", names);
+ }
+
+ #[test]
+ fn test_absolute_path_search() {
+ let (mut db, _) = RootDatabase::with_many_files(
+ r#"
+//- /lib.rs crate:mycrate
+mod inner;
+pub struct CrateRoot;
+
+//- /inner.rs
+pub struct InnerItem;
+"#,
+ );
+
+ let mut local_roots = FxHashSet::default();
+ local_roots.insert(WORKSPACE);
+ LocalRoots::get(&db).set_roots(&mut db).to(local_roots);
+
+ // Absolute path with leading ::
+ let query = Query::new("::mycrate::inner::InnerItem".to_owned());
+ let symbols = world_symbols(&db, query);
+ let names: Vec<_> = symbols.iter().map(|s| s.name.as_str()).collect();
+ assert!(
+ names.contains(&"InnerItem"),
+ "Expected InnerItem with absolute path in {:?}",
+ names
+ );
+
+ // Absolute path should NOT match if crate name is wrong
+ let query = Query::new("::wrongcrate::inner::InnerItem".to_owned());
+ let symbols = world_symbols(&db, query);
+ assert!(symbols.is_empty(), "Expected empty results for wrong crate name");
+ }
+
+ #[test]
+ fn test_wrong_path_returns_empty() {
+ let (mut db, _) = RootDatabase::with_many_files(
+ r#"
+//- /lib.rs crate:main
+mod existing;
+
+//- /existing.rs
+pub struct MyStruct;
+"#,
+ );
+
+ let mut local_roots = FxHashSet::default();
+ local_roots.insert(WORKSPACE);
+ LocalRoots::get(&db).set_roots(&mut db).to(local_roots);
+
+ // Non-existent module path
+ let query = Query::new("nonexistent::MyStruct".to_owned());
+ let symbols = world_symbols(&db, query);
+ assert!(symbols.is_empty(), "Expected empty results for non-existent path");
+
+ // Correct item, wrong module
+ let query = Query::new("wrongmod::MyStruct".to_owned());
+ let symbols = world_symbols(&db, query);
+ assert!(symbols.is_empty(), "Expected empty results for wrong module");
+ }
+
+ #[test]
+ fn test_root_module_items() {
+ let (mut db, _) = RootDatabase::with_many_files(
+ r#"
+//- /lib.rs crate:mylib
+pub struct RootItem;
+pub fn root_fn() {}
+"#,
+ );
+
+ let mut local_roots = FxHashSet::default();
+ local_roots.insert(WORKSPACE);
+ LocalRoots::get(&db).set_roots(&mut db).to(local_roots);
+
+ // Items at crate root - path is just the crate name
+ let query = Query::new("mylib::RootItem".to_owned());
+ let symbols = world_symbols(&db, query);
+ let names: Vec<_> = symbols.iter().map(|s| s.name.as_str()).collect();
+ assert!(names.contains(&"RootItem"), "Expected RootItem at crate root in {:?}", names);
+
+ let query = Query::new("mylib::".to_owned());
+ let symbols = world_symbols(&db, query);
+ let names: Vec<_> = symbols.iter().map(|s| s.name.as_str()).collect();
+ assert!(names.contains(&"RootItem"), "Expected RootItem {:?}", names);
+ assert!(names.contains(&"root_fn"), "Expected root_fn {:?}", names);
+ }
+
+ #[test]
+ fn test_crate_search_all() {
+ // Test that sole "::" returns all crates
+ let (mut db, _) = RootDatabase::with_many_files(
+ r#"
+//- /lib.rs crate:alpha
+pub struct AlphaStruct;
+
+//- /beta.rs crate:beta
+pub struct BetaStruct;
+
+//- /gamma.rs crate:gamma
+pub struct GammaStruct;
+"#,
+ );
+
+ let mut local_roots = FxHashSet::default();
+ local_roots.insert(WORKSPACE);
+ LocalRoots::get(&db).set_roots(&mut db).to(local_roots);
+
+ // Sole "::" should return all crates (as module symbols)
+ let query = Query::new("::".to_owned());
+ let symbols = world_symbols(&db, query);
+ let names: Vec<_> = symbols.iter().map(|s| s.name.as_str()).collect();
+
+ assert!(names.contains(&"alpha"), "Expected alpha crate in {:?}", names);
+ assert!(names.contains(&"beta"), "Expected beta crate in {:?}", names);
+ assert!(names.contains(&"gamma"), "Expected gamma crate in {:?}", names);
+ assert_eq!(symbols.len(), 3, "Expected exactly 3 crates, got {:?}", names);
+ }
+
+ #[test]
+ fn test_crate_search_fuzzy() {
+ // Test that "::foo" fuzzy-matches crate names
+ let (mut db, _) = RootDatabase::with_many_files(
+ r#"
+//- /lib.rs crate:my_awesome_lib
+pub struct AwesomeStruct;
+
+//- /other.rs crate:another_lib
+pub struct OtherStruct;
+
+//- /foo.rs crate:foobar
+pub struct FooStruct;
+"#,
+ );
+
+ let mut local_roots = FxHashSet::default();
+ local_roots.insert(WORKSPACE);
+ LocalRoots::get(&db).set_roots(&mut db).to(local_roots);
+
+ // "::foo" should fuzzy-match crate names containing "foo"
+ let query = Query::new("::foo".to_owned());
+ let symbols = world_symbols(&db, query);
+ let names: Vec<_> = symbols.iter().map(|s| s.name.as_str()).collect();
+
+ assert!(names.contains(&"foobar"), "Expected foobar crate in {:?}", names);
+ assert_eq!(symbols.len(), 1, "Expected only foobar crate, got {:?}", names);
+
+ // "::awesome" should match my_awesome_lib
+ let query = Query::new("::awesome".to_owned());
+ let symbols = world_symbols(&db, query);
+ let names: Vec<_> = symbols.iter().map(|s| s.name.as_str()).collect();
+
+ assert!(names.contains(&"my_awesome_lib"), "Expected my_awesome_lib crate in {:?}", names);
+ assert_eq!(symbols.len(), 1, "Expected only my_awesome_lib crate, got {:?}", names);
+
+ // "::lib" should match multiple crates
+ let query = Query::new("::lib".to_owned());
+ let symbols = world_symbols(&db, query);
+ let names: Vec<_> = symbols.iter().map(|s| s.name.as_str()).collect();
+
+ assert!(names.contains(&"my_awesome_lib"), "Expected my_awesome_lib in {:?}", names);
+ assert!(names.contains(&"another_lib"), "Expected another_lib in {:?}", names);
+ assert_eq!(symbols.len(), 2, "Expected 2 crates matching 'lib', got {:?}", names);
+
+ // "::nonexistent" should return empty
+ let query = Query::new("::nonexistent".to_owned());
+ let symbols = world_symbols(&db, query);
+ assert!(symbols.is_empty(), "Expected empty results for non-matching crate pattern");
+ }
+
+ #[test]
+ fn test_path_search_with_use_reexport() {
+ // Test that module resolution works for `use` items (re-exports), not just `mod` items
+ let (mut db, _) = RootDatabase::with_many_files(
+ r#"
+//- /lib.rs crate:main
+mod inner;
+pub use inner::nested;
+
+//- /inner.rs
+pub mod nested {
+ pub struct NestedStruct;
+ pub fn nested_fn() {}
+}
+"#,
+ );
+
+ let mut local_roots = FxHashSet::default();
+ local_roots.insert(WORKSPACE);
+ LocalRoots::get(&db).set_roots(&mut db).to(local_roots);
+
+ // Search via the re-exported path (main::nested::NestedStruct)
+ // This should work because `nested` is in scope via `pub use inner::nested`
+ let query = Query::new("main::nested::NestedStruct".to_owned());
+ let symbols = world_symbols(&db, query);
+ let names: Vec<_> = symbols.iter().map(|s| s.name.as_str()).collect();
+ assert!(
+ names.contains(&"NestedStruct"),
+ "Expected NestedStruct via re-exported path in {:?}",
+ names
+ );
+
+ // Also verify the original path still works
+ let query = Query::new("main::inner::nested::NestedStruct".to_owned());
+ let symbols = world_symbols(&db, query);
+ let names: Vec<_> = symbols.iter().map(|s| s.name.as_str()).collect();
+ assert!(
+ names.contains(&"NestedStruct"),
+ "Expected NestedStruct via original path in {:?}",
+ names
+ );
+
+ // Browse the re-exported module
+ let query = Query::new("main::nested::".to_owned());
+ let symbols = world_symbols(&db, query);
+ let names: Vec<_> = symbols.iter().map(|s| s.name.as_str()).collect();
+ assert!(
+ names.contains(&"NestedStruct"),
+ "Expected NestedStruct when browsing re-exported module in {:?}",
+ names
+ );
+ assert!(
+ names.contains(&"nested_fn"),
+ "Expected nested_fn when browsing re-exported module in {:?}",
+ names
+ );
+ }
}
diff --git a/crates/ide-db/src/test_data/test_doc_alias.txt b/crates/ide-db/src/test_data/test_doc_alias.txt
index 5783d97564..0c28c312f8 100644
--- a/crates/ide-db/src/test_data/test_doc_alias.txt
+++ b/crates/ide-db/src/test_data/test_doc_alias.txt
@@ -27,11 +27,13 @@
kind: STRUCT,
range: 83..119,
},
- name_ptr: AstPtr(
- SyntaxNodePtr {
- kind: NAME,
- range: 109..118,
- },
+ name_ptr: Some(
+ AstPtr(
+ SyntaxNodePtr {
+ kind: NAME,
+ range: 109..118,
+ },
+ ),
),
},
container_name: None,
@@ -62,11 +64,13 @@
kind: STRUCT,
range: 0..81,
},
- name_ptr: AstPtr(
- SyntaxNodePtr {
- kind: NAME,
- range: 74..80,
- },
+ name_ptr: Some(
+ AstPtr(
+ SyntaxNodePtr {
+ kind: NAME,
+ range: 74..80,
+ },
+ ),
),
},
container_name: None,
@@ -97,11 +101,13 @@
kind: STRUCT,
range: 0..81,
},
- name_ptr: AstPtr(
- SyntaxNodePtr {
- kind: NAME,
- range: 74..80,
- },
+ name_ptr: Some(
+ AstPtr(
+ SyntaxNodePtr {
+ kind: NAME,
+ range: 74..80,
+ },
+ ),
),
},
container_name: None,
@@ -132,11 +138,13 @@
kind: STRUCT,
range: 0..81,
},
- name_ptr: AstPtr(
- SyntaxNodePtr {
- kind: NAME,
- range: 74..80,
- },
+ name_ptr: Some(
+ AstPtr(
+ SyntaxNodePtr {
+ kind: NAME,
+ range: 74..80,
+ },
+ ),
),
},
container_name: None,
@@ -167,11 +175,13 @@
kind: STRUCT,
range: 0..81,
},
- name_ptr: AstPtr(
- SyntaxNodePtr {
- kind: NAME,
- range: 74..80,
- },
+ name_ptr: Some(
+ AstPtr(
+ SyntaxNodePtr {
+ kind: NAME,
+ range: 74..80,
+ },
+ ),
),
},
container_name: None,
@@ -202,11 +212,13 @@
kind: STRUCT,
range: 83..119,
},
- name_ptr: AstPtr(
- SyntaxNodePtr {
- kind: NAME,
- range: 109..118,
- },
+ name_ptr: Some(
+ AstPtr(
+ SyntaxNodePtr {
+ kind: NAME,
+ range: 109..118,
+ },
+ ),
),
},
container_name: None,
@@ -237,11 +249,13 @@
kind: STRUCT,
range: 0..81,
},
- name_ptr: AstPtr(
- SyntaxNodePtr {
- kind: NAME,
- range: 74..80,
- },
+ name_ptr: Some(
+ AstPtr(
+ SyntaxNodePtr {
+ kind: NAME,
+ range: 74..80,
+ },
+ ),
),
},
container_name: None,
diff --git a/crates/ide-db/src/test_data/test_symbol_index_collection.txt b/crates/ide-db/src/test_data/test_symbol_index_collection.txt
index 7692a7d61a..4b588572d3 100644
--- a/crates/ide-db/src/test_data/test_symbol_index_collection.txt
+++ b/crates/ide-db/src/test_data/test_symbol_index_collection.txt
@@ -25,11 +25,13 @@
kind: VARIANT,
range: 201..202,
},
- name_ptr: AstPtr(
- SyntaxNodePtr {
- kind: NAME,
- range: 201..202,
- },
+ name_ptr: Some(
+ AstPtr(
+ SyntaxNodePtr {
+ kind: NAME,
+ range: 201..202,
+ },
+ ),
),
},
container_name: Some(
@@ -60,11 +62,13 @@
kind: TYPE_ALIAS,
range: 470..490,
},
- name_ptr: AstPtr(
- SyntaxNodePtr {
- kind: NAME,
- range: 475..480,
- },
+ name_ptr: Some(
+ AstPtr(
+ SyntaxNodePtr {
+ kind: NAME,
+ range: 475..480,
+ },
+ ),
),
},
container_name: None,
@@ -93,11 +97,13 @@
kind: VARIANT,
range: 204..205,
},
- name_ptr: AstPtr(
- SyntaxNodePtr {
- kind: NAME,
- range: 204..205,
- },
+ name_ptr: Some(
+ AstPtr(
+ SyntaxNodePtr {
+ kind: NAME,
+ range: 204..205,
+ },
+ ),
),
},
container_name: Some(
@@ -128,11 +134,13 @@
kind: CONST,
range: 413..434,
},
- name_ptr: AstPtr(
- SyntaxNodePtr {
- kind: NAME,
- range: 419..424,
- },
+ name_ptr: Some(
+ AstPtr(
+ SyntaxNodePtr {
+ kind: NAME,
+ range: 419..424,
+ },
+ ),
),
},
container_name: None,
@@ -161,11 +169,13 @@
kind: CONST,
range: 593..665,
},
- name_ptr: AstPtr(
- SyntaxNodePtr {
- kind: NAME,
- range: 599..615,
- },
+ name_ptr: Some(
+ AstPtr(
+ SyntaxNodePtr {
+ kind: NAME,
+ range: 599..615,
+ },
+ ),
),
},
container_name: None,
@@ -196,11 +206,13 @@
kind: ENUM,
range: 185..207,
},
- name_ptr: AstPtr(
- SyntaxNodePtr {
- kind: NAME,
- range: 190..194,
- },
+ name_ptr: Some(
+ AstPtr(
+ SyntaxNodePtr {
+ kind: NAME,
+ range: 190..194,
+ },
+ ),
),
},
container_name: None,
@@ -231,11 +243,13 @@
kind: USE_TREE,
range: 727..749,
},
- name_ptr: AstPtr(
- SyntaxNodePtr {
- kind: NAME,
- range: 736..749,
- },
+ name_ptr: Some(
+ AstPtr(
+ SyntaxNodePtr {
+ kind: NAME,
+ range: 736..749,
+ },
+ ),
),
},
container_name: None,
@@ -266,11 +280,13 @@
kind: MACRO_DEF,
range: 153..168,
},
- name_ptr: AstPtr(
- SyntaxNodePtr {
- kind: NAME,
- range: 159..164,
- },
+ name_ptr: Some(
+ AstPtr(
+ SyntaxNodePtr {
+ kind: NAME,
+ range: 159..164,
+ },
+ ),
),
},
container_name: None,
@@ -299,11 +315,13 @@
kind: STATIC,
range: 435..469,
},
- name_ptr: AstPtr(
- SyntaxNodePtr {
- kind: NAME,
- range: 442..448,
- },
+ name_ptr: Some(
+ AstPtr(
+ SyntaxNodePtr {
+ kind: NAME,
+ range: 442..448,
+ },
+ ),
),
},
container_name: None,
@@ -334,11 +352,13 @@
kind: STRUCT,
range: 170..184,
},
- name_ptr: AstPtr(
- SyntaxNodePtr {
- kind: NAME,
- range: 177..183,
- },
+ name_ptr: Some(
+ AstPtr(
+ SyntaxNodePtr {
+ kind: NAME,
+ range: 177..183,
+ },
+ ),
),
},
container_name: None,
@@ -369,11 +389,13 @@
kind: STRUCT,
range: 0..22,
},
- name_ptr: AstPtr(
- SyntaxNodePtr {
- kind: NAME,
- range: 6..21,
- },
+ name_ptr: Some(
+ AstPtr(
+ SyntaxNodePtr {
+ kind: NAME,
+ range: 6..21,
+ },
+ ),
),
},
container_name: None,
@@ -404,11 +426,13 @@
kind: STRUCT,
range: 391..409,
},
- name_ptr: AstPtr(
- SyntaxNodePtr {
- kind: NAME,
- range: 398..408,
- },
+ name_ptr: Some(
+ AstPtr(
+ SyntaxNodePtr {
+ kind: NAME,
+ range: 398..408,
+ },
+ ),
),
},
container_name: Some(
@@ -441,11 +465,13 @@
kind: STRUCT,
range: 628..654,
},
- name_ptr: AstPtr(
- SyntaxNodePtr {
- kind: NAME,
- range: 635..653,
- },
+ name_ptr: Some(
+ AstPtr(
+ SyntaxNodePtr {
+ kind: NAME,
+ range: 635..653,
+ },
+ ),
),
},
container_name: Some(
@@ -478,11 +504,13 @@
kind: STRUCT,
range: 552..580,
},
- name_ptr: AstPtr(
- SyntaxNodePtr {
- kind: NAME,
- range: 559..579,
- },
+ name_ptr: Some(
+ AstPtr(
+ SyntaxNodePtr {
+ kind: NAME,
+ range: 559..579,
+ },
+ ),
),
},
container_name: None,
@@ -513,11 +541,13 @@
kind: STRUCT,
range: 261..279,
},
- name_ptr: AstPtr(
- SyntaxNodePtr {
- kind: NAME,
- range: 268..275,
- },
+ name_ptr: Some(
+ AstPtr(
+ SyntaxNodePtr {
+ kind: NAME,
+ range: 268..275,
+ },
+ ),
),
},
container_name: None,
@@ -546,11 +576,13 @@
kind: TRAIT,
range: 334..373,
},
- name_ptr: AstPtr(
- SyntaxNodePtr {
- kind: NAME,
- range: 340..345,
- },
+ name_ptr: Some(
+ AstPtr(
+ SyntaxNodePtr {
+ kind: NAME,
+ range: 340..345,
+ },
+ ),
),
},
container_name: None,
@@ -581,11 +613,13 @@
kind: USE_TREE,
range: 755..769,
},
- name_ptr: AstPtr(
- SyntaxNodePtr {
- kind: NAME,
- range: 764..769,
- },
+ name_ptr: Some(
+ AstPtr(
+ SyntaxNodePtr {
+ kind: NAME,
+ range: 764..769,
+ },
+ ),
),
},
container_name: None,
@@ -616,11 +650,13 @@
kind: UNION,
range: 208..222,
},
- name_ptr: AstPtr(
- SyntaxNodePtr {
- kind: NAME,
- range: 214..219,
- },
+ name_ptr: Some(
+ AstPtr(
+ SyntaxNodePtr {
+ kind: NAME,
+ range: 214..219,
+ },
+ ),
),
},
container_name: None,
@@ -649,11 +685,13 @@
kind: MODULE,
range: 492..530,
},
- name_ptr: AstPtr(
- SyntaxNodePtr {
- kind: NAME,
- range: 496..501,
- },
+ name_ptr: Some(
+ AstPtr(
+ SyntaxNodePtr {
+ kind: NAME,
+ range: 496..501,
+ },
+ ),
),
},
container_name: None,
@@ -682,11 +720,13 @@
kind: MODULE,
range: 667..677,
},
- name_ptr: AstPtr(
- SyntaxNodePtr {
- kind: NAME,
- range: 671..676,
- },
+ name_ptr: Some(
+ AstPtr(
+ SyntaxNodePtr {
+ kind: NAME,
+ range: 671..676,
+ },
+ ),
),
},
container_name: None,
@@ -717,11 +757,13 @@
kind: MACRO_RULES,
range: 51..131,
},
- name_ptr: AstPtr(
- SyntaxNodePtr {
- kind: NAME,
- range: 64..77,
- },
+ name_ptr: Some(
+ AstPtr(
+ SyntaxNodePtr {
+ kind: NAME,
+ range: 64..77,
+ },
+ ),
),
},
container_name: None,
@@ -750,11 +792,13 @@
kind: FN,
range: 307..330,
},
- name_ptr: AstPtr(
- SyntaxNodePtr {
- kind: NAME,
- range: 310..325,
- },
+ name_ptr: Some(
+ AstPtr(
+ SyntaxNodePtr {
+ kind: NAME,
+ range: 310..325,
+ },
+ ),
),
},
container_name: Some(
@@ -785,11 +829,13 @@
kind: FN,
range: 242..257,
},
- name_ptr: AstPtr(
- SyntaxNodePtr {
- kind: NAME,
- range: 245..252,
- },
+ name_ptr: Some(
+ AstPtr(
+ SyntaxNodePtr {
+ kind: NAME,
+ range: 245..252,
+ },
+ ),
),
},
container_name: Some(
@@ -822,11 +868,13 @@
kind: MACRO_RULES,
range: 1..48,
},
- name_ptr: AstPtr(
- SyntaxNodePtr {
- kind: NAME,
- range: 14..31,
- },
+ name_ptr: Some(
+ AstPtr(
+ SyntaxNodePtr {
+ kind: NAME,
+ range: 14..31,
+ },
+ ),
),
},
container_name: None,
@@ -855,11 +903,13 @@
kind: FN,
range: 375..411,
},
- name_ptr: AstPtr(
- SyntaxNodePtr {
- kind: NAME,
- range: 378..382,
- },
+ name_ptr: Some(
+ AstPtr(
+ SyntaxNodePtr {
+ kind: NAME,
+ range: 378..382,
+ },
+ ),
),
},
container_name: None,
@@ -890,11 +940,13 @@
kind: USE_TREE,
range: 684..721,
},
- name_ptr: AstPtr(
- SyntaxNodePtr {
- kind: NAME,
- range: 701..721,
- },
+ name_ptr: Some(
+ AstPtr(
+ SyntaxNodePtr {
+ kind: NAME,
+ range: 701..721,
+ },
+ ),
),
},
container_name: None,
@@ -923,11 +975,13 @@
kind: FN,
range: 352..371,
},
- name_ptr: AstPtr(
- SyntaxNodePtr {
- kind: NAME,
- range: 355..363,
- },
+ name_ptr: Some(
+ AstPtr(
+ SyntaxNodePtr {
+ kind: NAME,
+ range: 355..363,
+ },
+ ),
),
},
container_name: Some(
@@ -969,11 +1023,13 @@
kind: STRUCT,
range: 508..528,
},
- name_ptr: AstPtr(
- SyntaxNodePtr {
- kind: NAME,
- range: 515..527,
- },
+ name_ptr: Some(
+ AstPtr(
+ SyntaxNodePtr {
+ kind: NAME,
+ range: 515..527,
+ },
+ ),
),
},
container_name: None,
@@ -1011,11 +1067,13 @@
kind: USE_TREE,
range: 141..173,
},
- name_ptr: AstPtr(
- SyntaxNodePtr {
- kind: NAME,
- range: 157..173,
- },
+ name_ptr: Some(
+ AstPtr(
+ SyntaxNodePtr {
+ kind: NAME,
+ range: 157..173,
+ },
+ ),
),
},
container_name: None,
@@ -1046,11 +1104,13 @@
kind: USE_TREE,
range: 141..173,
},
- name_ptr: AstPtr(
- SyntaxNodePtr {
- kind: NAME,
- range: 157..173,
- },
+ name_ptr: Some(
+ AstPtr(
+ SyntaxNodePtr {
+ kind: NAME,
+ range: 157..173,
+ },
+ ),
),
},
container_name: None,
@@ -1081,11 +1141,13 @@
kind: STRUCT,
range: 0..20,
},
- name_ptr: AstPtr(
- SyntaxNodePtr {
- kind: NAME,
- range: 7..19,
- },
+ name_ptr: Some(
+ AstPtr(
+ SyntaxNodePtr {
+ kind: NAME,
+ range: 7..19,
+ },
+ ),
),
},
container_name: None,
@@ -1116,11 +1178,13 @@
kind: USE_TREE,
range: 35..69,
},
- name_ptr: AstPtr(
- SyntaxNodePtr {
- kind: NAME,
- range: 51..69,
- },
+ name_ptr: Some(
+ AstPtr(
+ SyntaxNodePtr {
+ kind: NAME,
+ range: 51..69,
+ },
+ ),
),
},
container_name: None,
@@ -1151,11 +1215,13 @@
kind: USE_TREE,
range: 85..125,
},
- name_ptr: AstPtr(
- SyntaxNodePtr {
- kind: NAME,
- range: 115..125,
- },
+ name_ptr: Some(
+ AstPtr(
+ SyntaxNodePtr {
+ kind: NAME,
+ range: 115..125,
+ },
+ ),
),
},
container_name: None,
diff --git a/crates/ide-db/src/test_data/test_symbols_exclude_imports.txt b/crates/ide-db/src/test_data/test_symbols_exclude_imports.txt
index 6f5f8f889c..87f0c7d9a8 100644
--- a/crates/ide-db/src/test_data/test_symbols_exclude_imports.txt
+++ b/crates/ide-db/src/test_data/test_symbols_exclude_imports.txt
@@ -20,11 +20,13 @@
kind: STRUCT,
range: 0..15,
},
- name_ptr: AstPtr(
- SyntaxNodePtr {
- kind: NAME,
- range: 11..14,
- },
+ name_ptr: Some(
+ AstPtr(
+ SyntaxNodePtr {
+ kind: NAME,
+ range: 11..14,
+ },
+ ),
),
},
container_name: None,
diff --git a/crates/ide-db/src/test_data/test_symbols_with_imports.txt b/crates/ide-db/src/test_data/test_symbols_with_imports.txt
index 5d3fe4d265..e96aa889ba 100644
--- a/crates/ide-db/src/test_data/test_symbols_with_imports.txt
+++ b/crates/ide-db/src/test_data/test_symbols_with_imports.txt
@@ -20,11 +20,13 @@
kind: STRUCT,
range: 0..15,
},
- name_ptr: AstPtr(
- SyntaxNodePtr {
- kind: NAME,
- range: 11..14,
- },
+ name_ptr: Some(
+ AstPtr(
+ SyntaxNodePtr {
+ kind: NAME,
+ range: 11..14,
+ },
+ ),
),
},
container_name: None,
@@ -55,11 +57,13 @@
kind: USE_TREE,
range: 17..25,
},
- name_ptr: AstPtr(
- SyntaxNodePtr {
- kind: NAME_REF,
- range: 22..25,
- },
+ name_ptr: Some(
+ AstPtr(
+ SyntaxNodePtr {
+ kind: NAME_REF,
+ range: 22..25,
+ },
+ ),
),
},
container_name: None,
diff --git a/crates/ide-diagnostics/src/handlers/invalid_cast.rs b/crates/ide-diagnostics/src/handlers/invalid_cast.rs
index a59077b757..7479f8147d 100644
--- a/crates/ide-diagnostics/src/handlers/invalid_cast.rs
+++ b/crates/ide-diagnostics/src/handlers/invalid_cast.rs
@@ -51,7 +51,7 @@ pub(crate) fn invalid_cast(ctx: &DiagnosticsContext<'_>, d: &hir::InvalidCast<'_
DiagnosticCode::RustcHardError("E0606"),
format_ty!(ctx, "casting `{}` as `{}` is invalid", d.expr_ty, d.cast_ty),
),
- CastError::IntToFatCast => (
+ CastError::IntToWideCast => (
DiagnosticCode::RustcHardError("E0606"),
format_ty!(ctx, "cannot cast `{}` to a fat pointer `{}`", d.expr_ty, d.cast_ty),
),
@@ -95,6 +95,10 @@ pub(crate) fn invalid_cast(ctx: &DiagnosticsContext<'_>, d: &hir::InvalidCast<'_
DiagnosticCode::RustcHardError("E0605"),
format_ty!(ctx, "non-primitive cast: `{}` as `{}`", d.expr_ty, d.cast_ty),
),
+ CastError::PtrPtrAddingAutoTraits => (
+ DiagnosticCode::RustcHardError("E0804"),
+ "cannot add auto trait to dyn bound via pointer cast".to_owned(),
+ ),
// CastError::UnknownCastPtrKind | CastError::UnknownExprPtrKind => (
// DiagnosticCode::RustcHardError("E0641"),
// "cannot cast to a pointer of an unknown kind".to_owned(),
@@ -444,8 +448,8 @@ fn main() {
q as *const [i32];
//^^^^^^^^^^^^^^^^^ error: cannot cast thin pointer `*const i32` to fat pointer `*const [i32]`
- // FIXME: This should emit diagnostics but disabled to prevent many false positives
let t: *mut (dyn Trait + 'static) = 0 as *mut _;
+ //^^^^^^^^^^^ error: cannot cast `usize` to a fat pointer `*mut (dyn Trait + 'static)`
let mut fail: *const str = 0 as *const str;
//^^^^^^^^^^^^^^^ error: cannot cast `usize` to a fat pointer `*const str`
@@ -543,7 +547,7 @@ fn main() {
fn ptr_to_trait_obj_ok() {
check_diagnostics(
r#"
-//- minicore: pointee
+//- minicore: pointee, send, sync
trait Trait<'a> {}
fn remove_auto<'a>(x: *mut (dyn Trait<'a> + Send)) -> *mut dyn Trait<'a> {
diff --git a/crates/ide-diagnostics/src/handlers/missing_fields.rs b/crates/ide-diagnostics/src/handlers/missing_fields.rs
index 2a251382d4..d5f25dfaf2 100644
--- a/crates/ide-diagnostics/src/handlers/missing_fields.rs
+++ b/crates/ide-diagnostics/src/handlers/missing_fields.rs
@@ -857,4 +857,81 @@ pub struct Claims {
"#,
);
}
+
+ #[test]
+ fn test_default_field_values_basic() {
+ // This should work without errors - only field 'b' is required
+ check_diagnostics(
+ r#"
+#![feature(default_field_values)]
+struct Struct {
+ a: usize = 0,
+ b: usize,
+}
+
+fn main() {
+ Struct { b: 1, .. };
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn test_default_field_values_missing_field_error() {
+ // This should report a missing field error because email is required
+ check_diagnostics(
+ r#"
+#![feature(default_field_values)]
+struct UserInfo {
+ id: i32,
+ age: f32 = 1.0,
+ email: String,
+}
+
+fn main() {
+ UserInfo { id: 20, .. };
+// ^^^^^^^^💡 error: missing structure fields:
+// |- email
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn test_default_field_values_requires_spread_syntax() {
+ // without `..` should report missing fields
+ check_diagnostics(
+ r#"
+#![feature(default_field_values)]
+struct Point {
+ x: i32 = 0,
+ y: i32 = 0,
+}
+
+fn main() {
+ Point { x: 0 };
+// ^^^^^💡 error: missing structure fields:
+// |- y
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn test_default_field_values_pattern_matching() {
+ check_diagnostics(
+ r#"
+#![feature(default_field_values)]
+struct Point {
+ x: i32 = 0,
+ y: i32 = 0,
+ z: i32,
+}
+
+fn main() {
+ let Point { x, .. } = Point { z: 5, .. };
+}
+"#,
+ );
+ }
}
diff --git a/crates/ide-diagnostics/src/handlers/missing_lifetime.rs b/crates/ide-diagnostics/src/handlers/missing_lifetime.rs
index b07f9e68f6..5cb710b66b 100644
--- a/crates/ide-diagnostics/src/handlers/missing_lifetime.rs
+++ b/crates/ide-diagnostics/src/handlers/missing_lifetime.rs
@@ -100,4 +100,19 @@ fn foo<T: Fn(WithLifetime) -> WithLifetime>() {}
"#,
);
}
+
+ #[test]
+ fn regression_21430() {
+ check_diagnostics(
+ r#"
+struct S {
+ f: fn(A<()>),
+}
+
+struct A<'a, T> {
+ a: &'a T,
+}
+ "#,
+ );
+ }
}
diff --git a/crates/ide-diagnostics/src/handlers/mutability_errors.rs b/crates/ide-diagnostics/src/handlers/mutability_errors.rs
index 2887a32825..18280a4add 100644
--- a/crates/ide-diagnostics/src/handlers/mutability_errors.rs
+++ b/crates/ide-diagnostics/src/handlers/mutability_errors.rs
@@ -995,10 +995,6 @@ fn fn_once(mut x: impl FnOnce(u8) -> u8) -> u8 {
}
"#,
);
- // FIXME: There should be no "unused variable" here, and there should be a mutability error,
- // but our MIR infra is horribly broken and due to the order in which expressions are lowered
- // there is no `StorageLive` for `x` in the closure (in fact, `x` should not even be a variable
- // of the closure, the environment should be, but as I said, our MIR infra is horribly broken).
check_diagnostics(
r#"
//- minicore: copy, fn
@@ -1007,8 +1003,8 @@ fn f() {
|| {
|| {
let x = 2;
- // ^ 💡 warn: unused variable
|| { || { x = 5; } }
+ //^^^^^ 💡 error: cannot mutate immutable variable `x`
}
}
};
diff --git a/crates/ide/src/expand_macro.rs b/crates/ide/src/expand_macro.rs
index 7d02b80918..44285d9315 100644
--- a/crates/ide/src/expand_macro.rs
+++ b/crates/ide/src/expand_macro.rs
@@ -63,7 +63,7 @@ pub(crate) fn expand_macro(db: &RootDatabase, position: FilePosition) -> Option<
.take_while(|it| it != &token)
.filter(|it| it.kind() == T![,])
.count();
- let ExpandResult { err, value: expansion } = expansions.get(idx)?.clone();
+ let ExpandResult { err, value: expansion } = expansions.get(idx)?.clone()?;
let expansion_file_id = sema.hir_file_for(&expansion).macro_file()?;
let expansion_span_map = db.expansion_span_map(expansion_file_id);
let mut expansion = format(
@@ -583,26 +583,16 @@ fn main() {
fn macro_expand_derive() {
check(
r#"
-//- proc_macros: identity
-//- minicore: clone, derive
+//- proc_macros: identity, derive_identity
+//- minicore: derive
#[proc_macros::identity]
-#[derive(C$0lone)]
+#[derive(proc_macros::DeriveIde$0ntity)]
struct Foo {}
"#,
expect![[r#"
- Clone
- impl <>core::clone::Clone for Foo< >where {
- fn clone(&self) -> Self {
- match self {
- Foo{}
- => Foo{}
- ,
-
- }
- }
-
- }"#]],
+ proc_macros::DeriveIdentity
+ struct Foo{}"#]],
);
}
@@ -610,15 +600,17 @@ struct Foo {}
fn macro_expand_derive2() {
check(
r#"
-//- minicore: copy, clone, derive
+//- proc_macros: derive_identity
+//- minicore: derive
-#[derive(Cop$0y)]
-#[derive(Clone)]
+#[derive(proc_macros::$0DeriveIdentity)]
+#[derive(proc_macros::DeriveIdentity)]
struct Foo {}
"#,
expect![[r#"
- Copy
- impl <>core::marker::Copy for Foo< >where{}"#]],
+ proc_macros::DeriveIdentity
+ #[derive(proc_macros::DeriveIdentity)]
+ struct Foo{}"#]],
);
}
@@ -626,35 +618,27 @@ struct Foo {}
fn macro_expand_derive_multi() {
check(
r#"
-//- minicore: copy, clone, derive
+//- proc_macros: derive_identity
+//- minicore: derive
-#[derive(Cop$0y, Clone)]
+#[derive(proc_macros::DeriveIdent$0ity, proc_macros::DeriveIdentity)]
struct Foo {}
"#,
expect![[r#"
- Copy
- impl <>core::marker::Copy for Foo< >where{}"#]],
+ proc_macros::DeriveIdentity
+ struct Foo{}"#]],
);
check(
r#"
-//- minicore: copy, clone, derive
+//- proc_macros: derive_identity
+//- minicore: derive
-#[derive(Copy, Cl$0one)]
+#[derive(proc_macros::DeriveIdentity, proc_macros::De$0riveIdentity)]
struct Foo {}
"#,
expect![[r#"
- Clone
- impl <>core::clone::Clone for Foo< >where {
- fn clone(&self) -> Self {
- match self {
- Foo{}
- => Foo{}
- ,
-
- }
- }
-
- }"#]],
+ proc_macros::DeriveIdentity
+ struct Foo{}"#]],
);
}
@@ -864,4 +848,19 @@ struct S {
u32"#]],
);
}
+
+ #[test]
+ fn regression_21489() {
+ check(
+ r#"
+//- proc_macros: derive_identity
+//- minicore: derive, fmt
+#[derive(Debug, proc_macros::DeriveIdentity$0)]
+struct Foo;
+ "#,
+ expect![[r#"
+ proc_macros::DeriveIdentity
+ struct Foo;"#]],
+ );
+ }
}
diff --git a/crates/ide/src/hover/render.rs b/crates/ide/src/hover/render.rs
index feac5fff84..15ea92d1c6 100644
--- a/crates/ide/src/hover/render.rs
+++ b/crates/ide/src/hover/render.rs
@@ -272,9 +272,9 @@ pub(super) fn struct_rest_pat(
edition: Edition,
display_target: DisplayTarget,
) -> HoverResult {
- let missing_fields = sema.record_pattern_missing_fields(pattern);
+ let matched_fields = sema.record_pattern_matched_fields(pattern);
- // if there are no missing fields, the end result is a hover that shows ".."
+ // if there are no matched fields, the end result is a hover that shows ".."
// should be left in to indicate that there are no more fields in the pattern
// example, S {a: 1, b: 2, ..} when struct S {a: u32, b: u32}
@@ -285,13 +285,13 @@ pub(super) fn struct_rest_pat(
targets.push(item);
}
};
- for (_, t) in &missing_fields {
+ for (_, t) in &matched_fields {
walk_and_push_ty(sema.db, t, &mut push_new_def);
}
res.markup = {
let mut s = String::from(".., ");
- for (f, _) in &missing_fields {
+ for (f, _) in &matched_fields {
s += f.display(sema.db, display_target).to_string().as_ref();
s += ", ";
}
diff --git a/crates/ide/src/hover/tests.rs b/crates/ide/src/hover/tests.rs
index 0b518021e3..7900a0dc99 100644
--- a/crates/ide/src/hover/tests.rs
+++ b/crates/ide/src/hover/tests.rs
@@ -11239,3 +11239,75 @@ impl<T> Foo<i32, u64> for T {
"#]],
);
}
+
+#[test]
+fn doc_link_enum_self_variant() {
+ check(
+ r#"
+/// - [`VariantOne$0`](Self::One)
+pub enum MyEnum {
+ One,
+ Two,
+}
+ "#,
+ expect![[r#"
+ *[`VariantOne`](Self::One)*
+
+ ```rust
+ ra_test_fixture::MyEnum
+ ```
+
+ ```rust
+ One = 0
+ ```
+ "#]],
+ );
+}
+
+#[test]
+fn doc_link_trait_self() {
+ check(
+ r#"
+/// - [`do_something$0`](Self::do_something)
+pub trait MyTrait {
+ fn do_something(&self);
+}
+ "#,
+ expect![[r#"
+ *[`do_something`](Self::do_something)*
+
+ ```rust
+ ra_test_fixture::MyTrait
+ ```
+
+ ```rust
+ pub trait MyTrait
+ pub fn do_something(&self)
+ ```
+ "#]],
+ );
+ check(
+ r#"
+pub trait MyTrait {
+ /// - [`do_something$0`](Self::do_something)
+ fn do_something(&self);
+}
+ "#,
+ expect![[r#"
+ *[`do_something`](Self::do_something)*
+
+ ```rust
+ ra_test_fixture::MyTrait
+ ```
+
+ ```rust
+ pub trait MyTrait
+ pub fn do_something(&self)
+ ```
+
+ ---
+
+ * [`do_something`](https://docs.rs/ra_test_fixture/*/ra_test_fixture/trait.MyTrait.html#tymethod.do_something)
+ "#]],
+ );
+}
diff --git a/crates/ide/src/navigation_target.rs b/crates/ide/src/navigation_target.rs
index a271cac6fc..185df92e2d 100644
--- a/crates/ide/src/navigation_target.rs
+++ b/crates/ide/src/navigation_target.rs
@@ -19,7 +19,7 @@ use ide_db::{
};
use stdx::never;
use syntax::{
- AstNode, SyntaxNode, TextRange,
+ AstNode, AstPtr, SyntaxNode, TextRange,
ast::{self, HasName},
};
@@ -253,7 +253,7 @@ impl<'db> TryToNav for FileSymbol<'db> {
db,
self.loc.hir_file_id,
self.loc.ptr.text_range(),
- Some(self.loc.name_ptr.text_range()),
+ self.loc.name_ptr.map(AstPtr::text_range),
)
.map(|(FileRange { file_id, range: full_range }, focus_range)| {
NavigationTarget {
@@ -264,7 +264,7 @@ impl<'db> TryToNav for FileSymbol<'db> {
.flatten()
.map_or_else(|| self.name.clone(), |it| it.symbol().clone()),
alias: self.is_alias.then(|| self.name.clone()),
- kind: Some(self.def.into()),
+ kind: Some(SymbolKind::from_module_def(db, self.def)),
full_range,
focus_range,
container_name: self.container_name.clone(),
@@ -480,16 +480,11 @@ impl ToNav for hir::Module {
ModuleSource::Module(node) => (node.syntax(), node.name()),
ModuleSource::BlockExpr(node) => (node.syntax(), None),
};
+ let kind = if self.is_crate_root(db) { SymbolKind::CrateRoot } else { SymbolKind::Module };
orig_range_with_focus(db, file_id, syntax, focus).map(
|(FileRange { file_id, range: full_range }, focus_range)| {
- NavigationTarget::from_syntax(
- file_id,
- name.clone(),
- focus_range,
- full_range,
- SymbolKind::Module,
- )
+ NavigationTarget::from_syntax(file_id, name.clone(), focus_range, full_range, kind)
},
)
}
@@ -549,7 +544,7 @@ impl TryToNav for hir::ExternCrateDecl {
self.alias_or_name(db).unwrap_or_else(|| self.name(db)).symbol().clone(),
focus_range,
full_range,
- SymbolKind::Module,
+ SymbolKind::CrateRoot,
);
res.docs = self.docs(db).map(Documentation::into_owned);
diff --git a/crates/ide/src/references.rs b/crates/ide/src/references.rs
index 4918fe4ff9..5443021988 100644
--- a/crates/ide/src/references.rs
+++ b/crates/ide/src/references.rs
@@ -1079,7 +1079,7 @@ use self$0;
use self$0;
"#,
expect![[r#"
- _ Module FileId(0) 0..10
+ _ CrateRoot FileId(0) 0..10
FileId(0) 4..8 import
"#]],
diff --git a/crates/ide/src/runnables.rs b/crates/ide/src/runnables.rs
index 6cec912503..42efa7142b 100644
--- a/crates/ide/src/runnables.rs
+++ b/crates/ide/src/runnables.rs
@@ -815,7 +815,7 @@ mod not_a_root {
"#,
expect![[r#"
[
- "(TestMod, NavigationTarget { file_id: FileId(0), full_range: 0..331, name: \"_\", kind: Module })",
+ "(TestMod, NavigationTarget { file_id: FileId(0), full_range: 0..331, name: \"_\", kind: CrateRoot })",
"(Bin, NavigationTarget { file_id: FileId(0), full_range: 1..13, focus_range: 4..8, name: \"main\", kind: Function })",
"(Bin, NavigationTarget { file_id: FileId(0), full_range: 15..76, focus_range: 42..71, name: \"__cortex_m_rt_main_trampoline\", kind: Function })",
"(Bin, NavigationTarget { file_id: FileId(0), full_range: 78..154, focus_range: 113..149, name: \"__cortex_m_rt_main_trampoline_unsafe\", kind: Function })",
@@ -1136,7 +1136,7 @@ fn test_foo1() {}
"#,
expect![[r#"
[
- "(TestMod, NavigationTarget { file_id: FileId(0), full_range: 0..51, name: \"_\", kind: Module })",
+ "(TestMod, NavigationTarget { file_id: FileId(0), full_range: 0..51, name: \"_\", kind: CrateRoot })",
"(Test, NavigationTarget { file_id: FileId(0), full_range: 1..50, focus_range: 36..45, name: \"test_foo1\", kind: Function }, Atom(KeyValue { key: \"feature\", value: \"foo\" }))",
]
"#]],
@@ -1155,7 +1155,7 @@ fn test_foo1() {}
"#,
expect![[r#"
[
- "(TestMod, NavigationTarget { file_id: FileId(0), full_range: 0..73, name: \"_\", kind: Module })",
+ "(TestMod, NavigationTarget { file_id: FileId(0), full_range: 0..73, name: \"_\", kind: CrateRoot })",
"(Test, NavigationTarget { file_id: FileId(0), full_range: 1..72, focus_range: 58..67, name: \"test_foo1\", kind: Function }, All([Atom(KeyValue { key: \"feature\", value: \"foo\" }), Atom(KeyValue { key: \"feature\", value: \"bar\" })]))",
]
"#]],
@@ -1234,7 +1234,7 @@ generate_main!();
"#,
expect![[r#"
[
- "(TestMod, NavigationTarget { file_id: FileId(0), full_range: 0..345, name: \"_\", kind: Module })",
+ "(TestMod, NavigationTarget { file_id: FileId(0), full_range: 0..345, name: \"_\", kind: CrateRoot })",
"(TestMod, NavigationTarget { file_id: FileId(0), full_range: 282..312, focus_range: 286..291, name: \"tests\", kind: Module, description: \"mod tests\" })",
"(Test, NavigationTarget { file_id: FileId(0), full_range: 298..307, name: \"foo_test\", kind: Function })",
"(TestMod, NavigationTarget { file_id: FileId(0), full_range: 313..323, name: \"tests2\", kind: Module, description: \"mod tests2\" }, true)",
diff --git a/crates/ide/src/syntax_highlighting.rs b/crates/ide/src/syntax_highlighting.rs
index e7c5f95a25..e64fd6488f 100644
--- a/crates/ide/src/syntax_highlighting.rs
+++ b/crates/ide/src/syntax_highlighting.rs
@@ -513,21 +513,21 @@ fn string_injections(
);
if !string.is_raw() {
- highlight_escape_string(hl, &string);
+ highlight_escape_string(hl, config, &string);
}
}
} else if let Some(byte_string) = ast::ByteString::cast(token.clone()) {
if !byte_string.is_raw() {
- highlight_escape_string(hl, &byte_string);
+ highlight_escape_string(hl, config, &byte_string);
}
} else if let Some(c_string) = ast::CString::cast(token.clone()) {
if !c_string.is_raw() {
- highlight_escape_string(hl, &c_string);
+ highlight_escape_string(hl, config, &c_string);
}
} else if let Some(char) = ast::Char::cast(token.clone()) {
- highlight_escape_char(hl, &char)
+ highlight_escape_char(hl, config, &char)
} else if let Some(byte) = ast::Byte::cast(token) {
- highlight_escape_byte(hl, &byte)
+ highlight_escape_byte(hl, config, &byte)
}
ControlFlow::Continue(())
}
@@ -586,7 +586,11 @@ fn descend_token(
fn filter_by_config(highlight: &mut Highlight, config: &HighlightConfig<'_>) -> bool {
match &mut highlight.tag {
- HlTag::StringLiteral if !config.strings => return false,
+ HlTag::StringLiteral | HlTag::EscapeSequence | HlTag::InvalidEscapeSequence
+ if !config.strings =>
+ {
+ return false;
+ }
HlTag::Comment if !config.comments => return false,
// If punctuation is disabled, make the macro bang part of the macro call again.
tag @ HlTag::Punctuation(HlPunct::MacroBang) => {
diff --git a/crates/ide/src/syntax_highlighting/escape.rs b/crates/ide/src/syntax_highlighting/escape.rs
index 094f88f3a8..4da69cc43d 100644
--- a/crates/ide/src/syntax_highlighting/escape.rs
+++ b/crates/ide/src/syntax_highlighting/escape.rs
@@ -1,10 +1,14 @@
//! Syntax highlighting for escape sequences
use crate::syntax_highlighting::highlights::Highlights;
-use crate::{HlRange, HlTag};
+use crate::{HighlightConfig, HlRange, HlTag};
use syntax::ast::{Byte, Char, IsString};
use syntax::{AstToken, TextRange, TextSize};
-pub(super) fn highlight_escape_string<T: IsString>(stack: &mut Highlights, string: &T) {
+pub(super) fn highlight_escape_string<T: IsString>(
+ stack: &mut Highlights,
+ config: &HighlightConfig<'_>,
+ string: &T,
+) {
let text = string.text();
let start = string.syntax().text_range().start();
string.escaped_char_ranges(&mut |piece_range, char| {
@@ -13,16 +17,23 @@ pub(super) fn highlight_escape_string<T: IsString>(stack: &mut Highlights, strin
Ok(_) => HlTag::EscapeSequence,
Err(_) => HlTag::InvalidEscapeSequence,
};
- stack.add(HlRange {
- range: piece_range + start,
- highlight: highlight.into(),
- binding_hash: None,
- });
+ stack.add_with(
+ config,
+ HlRange {
+ range: piece_range + start,
+ highlight: highlight.into(),
+ binding_hash: None,
+ },
+ );
}
});
}
-pub(super) fn highlight_escape_char(stack: &mut Highlights, char: &Char) {
+pub(super) fn highlight_escape_char(
+ stack: &mut Highlights,
+ config: &HighlightConfig<'_>,
+ char: &Char,
+) {
if char.value().is_err() {
// We do not emit invalid escapes highlighting here. The lexer would likely be in a bad
// state and this token contains junk, since `'` is not a reliable delimiter (consider
@@ -43,10 +54,17 @@ pub(super) fn highlight_escape_char(stack: &mut Highlights, char: &Char) {
char.syntax().text_range().start() + TextSize::from(1),
TextSize::from(text.len() as u32),
);
- stack.add(HlRange { range, highlight: HlTag::EscapeSequence.into(), binding_hash: None })
+ stack.add_with(
+ config,
+ HlRange { range, highlight: HlTag::EscapeSequence.into(), binding_hash: None },
+ )
}
-pub(super) fn highlight_escape_byte(stack: &mut Highlights, byte: &Byte) {
+pub(super) fn highlight_escape_byte(
+ stack: &mut Highlights,
+ config: &HighlightConfig<'_>,
+ byte: &Byte,
+) {
if byte.value().is_err() {
// See `highlight_escape_char` for why no error highlighting here.
return;
@@ -65,5 +83,8 @@ pub(super) fn highlight_escape_byte(stack: &mut Highlights, byte: &Byte) {
byte.syntax().text_range().start() + TextSize::from(2),
TextSize::from(text.len() as u32),
);
- stack.add(HlRange { range, highlight: HlTag::EscapeSequence.into(), binding_hash: None })
+ stack.add_with(
+ config,
+ HlRange { range, highlight: HlTag::EscapeSequence.into(), binding_hash: None },
+ )
}
diff --git a/crates/ide/src/syntax_highlighting/highlight.rs b/crates/ide/src/syntax_highlighting/highlight.rs
index 044fd3f5ac..dcc9a8c0d5 100644
--- a/crates/ide/src/syntax_highlighting/highlight.rs
+++ b/crates/ide/src/syntax_highlighting/highlight.rs
@@ -399,7 +399,7 @@ fn highlight_name_ref(
highlight_def(sema, krate, field_ref.into(), edition, true)
}
NameRefClass::ExternCrateShorthand { decl, krate: resolved_krate } => {
- let mut h = HlTag::Symbol(SymbolKind::Module).into();
+ let mut h = HlTag::Symbol(SymbolKind::CrateRoot).into();
if krate.as_ref().is_some_and(|krate| resolved_krate != *krate) {
h |= HlMod::Library;
@@ -417,7 +417,6 @@ fn highlight_name_ref(
if is_deprecated {
h |= HlMod::Deprecated;
}
- h |= HlMod::CrateRoot;
h
}
};
@@ -495,16 +494,15 @@ pub(super) fn highlight_def(
(Highlight::new(HlTag::Symbol(SymbolKind::Field)), Some(field.attrs(sema.db)))
}
Definition::TupleField(_) => (Highlight::new(HlTag::Symbol(SymbolKind::Field)), None),
- Definition::Crate(krate) => (
- Highlight::new(HlTag::Symbol(SymbolKind::Module)) | HlMod::CrateRoot,
- Some(krate.attrs(sema.db)),
- ),
+ Definition::Crate(krate) => {
+ (Highlight::new(HlTag::Symbol(SymbolKind::CrateRoot)), Some(krate.attrs(sema.db)))
+ }
Definition::Module(module) => {
- let mut h = Highlight::new(HlTag::Symbol(SymbolKind::Module));
- if module.is_crate_root(db) {
- h |= HlMod::CrateRoot;
- }
-
+ let h = Highlight::new(HlTag::Symbol(if module.is_crate_root(db) {
+ SymbolKind::CrateRoot
+ } else {
+ SymbolKind::Module
+ }));
(h, Some(module.attrs(sema.db)))
}
Definition::Function(func) => {
@@ -662,8 +660,7 @@ pub(super) fn highlight_def(
(h, None)
}
Definition::ExternCrateDecl(extern_crate) => {
- let mut highlight =
- Highlight::new(HlTag::Symbol(SymbolKind::Module)) | HlMod::CrateRoot;
+ let mut highlight = Highlight::new(HlTag::Symbol(SymbolKind::CrateRoot));
if extern_crate.alias(db).is_none() {
highlight |= HlMod::Library;
}
@@ -805,6 +802,7 @@ fn highlight_name_by_syntax(name: ast::Name) -> Highlight {
TYPE_PARAM => SymbolKind::TypeParam,
RECORD_FIELD => SymbolKind::Field,
MODULE => SymbolKind::Module,
+ EXTERN_CRATE => SymbolKind::CrateRoot,
FN => SymbolKind::Function,
CONST => SymbolKind::Const,
STATIC => SymbolKind::Static,
@@ -835,7 +833,7 @@ fn highlight_name_ref_by_syntax(
};
match parent.kind() {
- EXTERN_CRATE => HlTag::Symbol(SymbolKind::Module) | HlMod::CrateRoot,
+ EXTERN_CRATE => HlTag::Symbol(SymbolKind::CrateRoot).into(),
METHOD_CALL_EXPR => ast::MethodCallExpr::cast(parent)
.and_then(|it| highlight_method_call(sema, krate, &it, is_unsafe_node))
.unwrap_or_else(|| SymbolKind::Method.into()),
diff --git a/crates/ide/src/syntax_highlighting/highlights.rs b/crates/ide/src/syntax_highlighting/highlights.rs
index 340290eafe..6fe4d08443 100644
--- a/crates/ide/src/syntax_highlighting/highlights.rs
+++ b/crates/ide/src/syntax_highlighting/highlights.rs
@@ -4,7 +4,7 @@ use std::iter;
use stdx::equal_range_by;
use syntax::TextRange;
-use crate::{HlRange, HlTag};
+use crate::{HighlightConfig, HlRange, HlTag};
pub(super) struct Highlights {
root: Node,
@@ -26,6 +26,12 @@ impl Highlights {
self.root.add(hl_range);
}
+ pub(super) fn add_with(&mut self, config: &HighlightConfig<'_>, mut hl_range: HlRange) {
+ if super::filter_by_config(&mut hl_range.highlight, config) {
+ self.root.add(hl_range);
+ }
+ }
+
pub(super) fn to_vec(&self) -> Vec<HlRange> {
let mut res = Vec::new();
self.root.flatten(&mut res);
diff --git a/crates/ide/src/syntax_highlighting/inject.rs b/crates/ide/src/syntax_highlighting/inject.rs
index 26d2bb5e02..291333f09c 100644
--- a/crates/ide/src/syntax_highlighting/inject.rs
+++ b/crates/ide/src/syntax_highlighting/inject.rs
@@ -1,6 +1,6 @@
//! "Recursive" Syntax highlighting for code in doctests and fixtures.
-use hir::{EditionedFileId, HirFileId, InFile, Semantics};
+use hir::{EditionedFileId, HirFileId, InFile, Semantics, db::HirDatabase};
use ide_db::{
SymbolKind, defs::Definition, documentation::Documentation, range_mapper::RangeMapper,
rust_doc::is_rust_fence,
@@ -109,7 +109,7 @@ pub(super) fn doc_comment(
.for_each(|(range, def)| {
hl.add(HlRange {
range,
- highlight: module_def_to_hl_tag(def)
+ highlight: module_def_to_hl_tag(sema.db, def)
| HlMod::Documentation
| HlMod::Injected
| HlMod::IntraDocLink,
@@ -200,11 +200,11 @@ pub(super) fn doc_comment(
}
}
-fn module_def_to_hl_tag(def: Definition) -> HlTag {
+fn module_def_to_hl_tag(db: &dyn HirDatabase, def: Definition) -> HlTag {
let symbol = match def {
- Definition::Module(_) | Definition::Crate(_) | Definition::ExternCrateDecl(_) => {
- SymbolKind::Module
- }
+ Definition::Crate(_) | Definition::ExternCrateDecl(_) => SymbolKind::CrateRoot,
+ Definition::Module(m) if m.is_crate_root(db) => SymbolKind::CrateRoot,
+ Definition::Module(_) => SymbolKind::Module,
Definition::Function(_) => SymbolKind::Function,
Definition::Adt(hir::Adt::Struct(_)) => SymbolKind::Struct,
Definition::Adt(hir::Adt::Enum(_)) => SymbolKind::Enum,
diff --git a/crates/ide/src/syntax_highlighting/tags.rs b/crates/ide/src/syntax_highlighting/tags.rs
index ca3c3e3aaa..0c64d3de10 100644
--- a/crates/ide/src/syntax_highlighting/tags.rs
+++ b/crates/ide/src/syntax_highlighting/tags.rs
@@ -144,6 +144,7 @@ impl HlTag {
SymbolKind::BuiltinAttr => "builtin_attr",
SymbolKind::Const => "constant",
SymbolKind::ConstParam => "const_param",
+ SymbolKind::CrateRoot => "crate_root",
SymbolKind::Derive => "derive",
SymbolKind::DeriveHelper => "derive_helper",
SymbolKind::Enum => "enum",
diff --git a/crates/ide/src/syntax_highlighting/test_data/highlight_asm.html b/crates/ide/src/syntax_highlighting/test_data/highlight_asm.html
index 100fdd2155..1228849c5b 100644
--- a/crates/ide/src/syntax_highlighting/test_data/highlight_asm.html
+++ b/crates/ide/src/syntax_highlighting/test_data/highlight_asm.html
@@ -45,7 +45,7 @@ pre { color: #DCDCCC; background: #3F3F3F; font-size: 22px; padd
<span class="keyword unsafe">unsafe</span> <span class="brace">{</span>
<span class="keyword">let</span> <span class="variable declaration">foo</span> <span class="operator">=</span> <span class="numeric_literal">1</span><span class="semicolon">;</span>
<span class="keyword">let</span> <span class="keyword">mut</span> <span class="variable declaration mutable">o</span> <span class="operator">=</span> <span class="numeric_literal">0</span><span class="semicolon">;</span>
- <span class="module crate_root default_library library">core</span><span class="operator">::</span><span class="module default_library library">arch</span><span class="operator">::</span><span class="macro default_library library unsafe">asm</span><span class="macro_bang unsafe">!</span><span class="parenthesis unsafe">(</span>
+ <span class="crate_root default_library library">core</span><span class="operator">::</span><span class="module default_library library">arch</span><span class="operator">::</span><span class="macro default_library library unsafe">asm</span><span class="macro_bang unsafe">!</span><span class="parenthesis unsafe">(</span>
<span class="string_literal macro">"%input = OpLoad _ {</span><span class="variable">0</span><span class="string_literal macro">}"</span><span class="comma macro">,</span>
<span class="macro default_library library macro">concat</span><span class="macro_bang macro">!</span><span class="parenthesis macro">(</span><span class="string_literal macro">"%result = "</span><span class="comma macro">,</span> <span class="string_literal macro">"bar"</span><span class="comma macro">,</span> <span class="string_literal macro">" _ %input"</span><span class="parenthesis macro">)</span><span class="comma macro">,</span>
<span class="string_literal macro">"OpStore {</span><span class="variable">1</span><span class="string_literal macro">} %result"</span><span class="comma macro">,</span>
@@ -54,7 +54,7 @@ pre { color: #DCDCCC; background: #3F3F3F; font-size: 22px; padd
<span class="parenthesis unsafe">)</span><span class="semicolon">;</span>
<span class="keyword">let</span> <span class="variable declaration">thread_id</span><span class="colon">:</span> <span class="builtin_type">usize</span><span class="semicolon">;</span>
- <span class="module crate_root default_library library">core</span><span class="operator">::</span><span class="module default_library library">arch</span><span class="operator">::</span><span class="macro default_library library unsafe">asm</span><span class="macro_bang unsafe">!</span><span class="parenthesis unsafe">(</span><span class="string_literal macro">"
+ <span class="crate_root default_library library">core</span><span class="operator">::</span><span class="module default_library library">arch</span><span class="operator">::</span><span class="macro default_library library unsafe">asm</span><span class="macro_bang unsafe">!</span><span class="parenthesis unsafe">(</span><span class="string_literal macro">"
mov {</span><span class="variable">0</span><span class="string_literal macro">}, gs:[0x30]
mov {</span><span class="variable">0</span><span class="string_literal macro">}, [{</span><span class="variable">0</span><span class="string_literal macro">}+0x48]
"</span><span class="comma macro">,</span> <span class="keyword macro">out</span><span class="parenthesis macro">(</span><span class="reg library macro">reg</span><span class="parenthesis macro">)</span> <span class="variable macro">thread_id</span><span class="comma macro">,</span> <span class="keyword macro">options</span><span class="parenthesis macro">(</span><span class="keyword macro">pure</span><span class="comma macro">,</span> <span class="keyword macro">readonly</span><span class="comma macro">,</span> <span class="keyword macro">nostack</span><span class="parenthesis macro">)</span><span class="parenthesis unsafe">)</span><span class="semicolon">;</span>
@@ -64,7 +64,7 @@ pre { color: #DCDCCC; background: #3F3F3F; font-size: 22px; padd
<span class="keyword">static</span> <span class="static declaration">VirtualFree</span><span class="colon">:</span> <span class="builtin_type">usize</span><span class="semicolon">;</span>
<span class="keyword const">const</span> <span class="constant const declaration">OffPtr</span><span class="colon">:</span> <span class="builtin_type">usize</span><span class="semicolon">;</span>
<span class="keyword const">const</span> <span class="constant const declaration">OffFn</span><span class="colon">:</span> <span class="builtin_type">usize</span><span class="semicolon">;</span>
- <span class="module crate_root default_library library">core</span><span class="operator">::</span><span class="module default_library library">arch</span><span class="operator">::</span><span class="macro default_library library unsafe">asm</span><span class="macro_bang unsafe">!</span><span class="parenthesis unsafe">(</span><span class="string_literal macro">"
+ <span class="crate_root default_library library">core</span><span class="operator">::</span><span class="module default_library library">arch</span><span class="operator">::</span><span class="macro default_library library unsafe">asm</span><span class="macro_bang unsafe">!</span><span class="parenthesis unsafe">(</span><span class="string_literal macro">"
push {</span><span class="variable">free_type</span><span class="string_literal macro">}
push {</span><span class="variable">free_size</span><span class="string_literal macro">}
push {</span><span class="variable">base</span><span class="string_literal macro">}
@@ -97,7 +97,7 @@ pre { color: #DCDCCC; background: #3F3F3F; font-size: 22px; padd
<span class="comment">// Ensure thumb mode is set.</span>
<span class="keyword">let</span> <span class="variable declaration">rv</span> <span class="operator">=</span> <span class="parenthesis">(</span><span class="value_param">rv</span> <span class="keyword">as</span> <span class="builtin_type">u32</span><span class="parenthesis">)</span> <span class="bitwise">|</span> <span class="numeric_literal">1</span><span class="semicolon">;</span>
<span class="keyword">let</span> <span class="variable declaration">msp</span> <span class="operator">=</span> <span class="value_param">msp</span> <span class="keyword">as</span> <span class="builtin_type">u32</span><span class="semicolon">;</span>
- <span class="module crate_root default_library library">core</span><span class="operator">::</span><span class="module default_library library">arch</span><span class="operator">::</span><span class="macro default_library library unsafe">asm</span><span class="macro_bang unsafe">!</span><span class="parenthesis unsafe">(</span>
+ <span class="crate_root default_library library">core</span><span class="operator">::</span><span class="module default_library library">arch</span><span class="operator">::</span><span class="macro default_library library unsafe">asm</span><span class="macro_bang unsafe">!</span><span class="parenthesis unsafe">(</span>
<span class="string_literal macro">"mrs {</span><span class="variable">tmp</span><span class="string_literal macro">}, CONTROL"</span><span class="comma macro">,</span>
<span class="string_literal macro">"bics {</span><span class="variable">tmp</span><span class="string_literal macro">}, {</span><span class="variable">spsel</span><span class="string_literal macro">}"</span><span class="comma macro">,</span>
<span class="string_literal macro">"msr CONTROL, {</span><span class="variable">tmp</span><span class="string_literal macro">}"</span><span class="comma macro">,</span>
diff --git a/crates/ide/src/syntax_highlighting/test_data/highlight_attributes.html b/crates/ide/src/syntax_highlighting/test_data/highlight_attributes.html
index b151ff42fc..fa7f7b1cba 100644
--- a/crates/ide/src/syntax_highlighting/test_data/highlight_attributes.html
+++ b/crates/ide/src/syntax_highlighting/test_data/highlight_attributes.html
@@ -43,7 +43,7 @@ pre { color: #DCDCCC; background: #3F3F3F; font-size: 22px; padd
</style>
<pre><code><span class="attribute_bracket attribute macro proc_macro">#</span><span class="attribute_bracket attribute macro proc_macro">[</span><span class="builtin_attr attribute macro proc_macro">allow</span><span class="parenthesis attribute macro proc_macro">(</span><span class="none attribute macro proc_macro">dead_code</span><span class="parenthesis attribute macro proc_macro">)</span><span class="attribute_bracket attribute macro proc_macro">]</span>
<span class="attribute_bracket attribute macro proc_macro">#</span><span class="attribute_bracket attribute macro proc_macro">[</span><span class="tool_module attribute macro proc_macro">rustfmt</span><span class="operator attribute macro proc_macro">::</span><span class="tool_module attribute macro proc_macro">skip</span><span class="attribute_bracket attribute macro proc_macro">]</span>
-<span class="attribute_bracket attribute">#</span><span class="attribute_bracket attribute">[</span><span class="module attribute crate_root library">proc_macros</span><span class="operator attribute">::</span><span class="attribute attribute library">identity</span><span class="attribute_bracket attribute">]</span>
+<span class="attribute_bracket attribute">#</span><span class="attribute_bracket attribute">[</span><span class="crate_root attribute library">proc_macros</span><span class="operator attribute">::</span><span class="attribute attribute library">identity</span><span class="attribute_bracket attribute">]</span>
<span class="attribute_bracket attribute macro proc_macro">#</span><span class="attribute_bracket attribute macro proc_macro">[</span><span class="attribute attribute default_library library macro proc_macro">derive</span><span class="parenthesis attribute macro proc_macro">(</span><span class="derive attribute default_library library macro">Default</span><span class="parenthesis attribute macro proc_macro">)</span><span class="attribute_bracket attribute macro proc_macro">]</span>
<span class="comment documentation">/// This is a doc comment</span>
<span class="comment">// This is a normal comment</span>
diff --git a/crates/ide/src/syntax_highlighting/test_data/highlight_crate_root.html b/crates/ide/src/syntax_highlighting/test_data/highlight_crate_root.html
index a6e6b16bea..0b32cedca5 100644
--- a/crates/ide/src/syntax_highlighting/test_data/highlight_crate_root.html
+++ b/crates/ide/src/syntax_highlighting/test_data/highlight_crate_root.html
@@ -41,25 +41,25 @@ pre { color: #DCDCCC; background: #3F3F3F; font-size: 22px; padd
.invalid_escape_sequence { color: #FC5555; text-decoration: wavy underline; }
.unresolved_reference { color: #FC5555; text-decoration: wavy underline; }
</style>
-<pre><code><span class="keyword">extern</span> <span class="keyword">crate</span> <span class="module crate_root library">foo</span><span class="semicolon">;</span>
-<span class="keyword">use</span> <span class="module crate_root default_library library">core</span><span class="operator">::</span><span class="module default_library library">iter</span><span class="semicolon">;</span>
+<pre><code><span class="keyword">extern</span> <span class="keyword">crate</span> <span class="crate_root library">foo</span><span class="semicolon">;</span>
+<span class="keyword">use</span> <span class="crate_root default_library library">core</span><span class="operator">::</span><span class="module default_library library">iter</span><span class="semicolon">;</span>
<span class="keyword">pub</span> <span class="keyword const">const</span> <span class="constant const declaration public">NINETY_TWO</span><span class="colon">:</span> <span class="builtin_type">u8</span> <span class="operator">=</span> <span class="numeric_literal">92</span><span class="semicolon">;</span>
-<span class="keyword">use</span> <span class="module crate_root library">foo</span> <span class="keyword">as</span> <span class="module crate_root declaration library">foooo</span><span class="semicolon">;</span>
+<span class="keyword">use</span> <span class="crate_root library">foo</span> <span class="keyword">as</span> <span class="crate_root declaration library">foooo</span><span class="semicolon">;</span>
-<span class="keyword">pub</span><span class="parenthesis">(</span><span class="keyword crate_root public">crate</span><span class="parenthesis">)</span> <span class="keyword">fn</span> <span class="function declaration">main</span><span class="parenthesis">(</span><span class="parenthesis">)</span> <span class="brace">{</span>
+<span class="keyword">pub</span><span class="parenthesis">(</span><span class="keyword public">crate</span><span class="parenthesis">)</span> <span class="keyword">fn</span> <span class="function declaration">main</span><span class="parenthesis">(</span><span class="parenthesis">)</span> <span class="brace">{</span>
<span class="keyword">let</span> <span class="variable declaration">baz</span> <span class="operator">=</span> <span class="module default_library library">iter</span><span class="operator">::</span><span class="function default_library library">repeat</span><span class="parenthesis">(</span><span class="numeric_literal">92</span><span class="parenthesis">)</span><span class="semicolon">;</span>
<span class="brace">}</span>
<span class="keyword">mod</span> <span class="module declaration">bar</span> <span class="brace">{</span>
- <span class="keyword">pub</span><span class="parenthesis">(</span><span class="keyword control">in</span> <span class="keyword crate_root public">super</span><span class="parenthesis">)</span> <span class="keyword const">const</span> <span class="constant const declaration">FORTY_TWO</span><span class="colon">:</span> <span class="builtin_type">u8</span> <span class="operator">=</span> <span class="numeric_literal">42</span><span class="semicolon">;</span>
+ <span class="keyword">pub</span><span class="parenthesis">(</span><span class="keyword control">in</span> <span class="keyword public">super</span><span class="parenthesis">)</span> <span class="keyword const">const</span> <span class="constant const declaration">FORTY_TWO</span><span class="colon">:</span> <span class="builtin_type">u8</span> <span class="operator">=</span> <span class="numeric_literal">42</span><span class="semicolon">;</span>
<span class="keyword">mod</span> <span class="module declaration">baz</span> <span class="brace">{</span>
- <span class="keyword">use</span> <span class="keyword">super</span><span class="operator">::</span><span class="keyword crate_root public">super</span><span class="operator">::</span><span class="constant const public">NINETY_TWO</span><span class="semicolon">;</span>
- <span class="keyword">use</span> <span class="keyword crate_root public">crate</span><span class="operator">::</span><span class="module crate_root library">foooo</span><span class="operator">::</span><span class="struct library">Point</span><span class="semicolon">;</span>
+ <span class="keyword">use</span> <span class="keyword">super</span><span class="operator">::</span><span class="keyword public">super</span><span class="operator">::</span><span class="constant const public">NINETY_TWO</span><span class="semicolon">;</span>
+ <span class="keyword">use</span> <span class="keyword public">crate</span><span class="operator">::</span><span class="crate_root library">foooo</span><span class="operator">::</span><span class="struct library">Point</span><span class="semicolon">;</span>
- <span class="keyword">pub</span><span class="parenthesis">(</span><span class="keyword control">in</span> <span class="keyword">super</span><span class="operator">::</span><span class="keyword crate_root public">super</span><span class="parenthesis">)</span> <span class="keyword const">const</span> <span class="constant const declaration">TWENTY_NINE</span><span class="colon">:</span> <span class="builtin_type">u8</span> <span class="operator">=</span> <span class="numeric_literal">29</span><span class="semicolon">;</span>
+ <span class="keyword">pub</span><span class="parenthesis">(</span><span class="keyword control">in</span> <span class="keyword">super</span><span class="operator">::</span><span class="keyword public">super</span><span class="parenthesis">)</span> <span class="keyword const">const</span> <span class="constant const declaration">TWENTY_NINE</span><span class="colon">:</span> <span class="builtin_type">u8</span> <span class="operator">=</span> <span class="numeric_literal">29</span><span class="semicolon">;</span>
<span class="brace">}</span>
<span class="brace">}</span>
</code></pre> \ No newline at end of file
diff --git a/crates/ide/src/syntax_highlighting/test_data/highlight_default_library.html b/crates/ide/src/syntax_highlighting/test_data/highlight_default_library.html
index 2f4a2004f1..29f78959a5 100644
--- a/crates/ide/src/syntax_highlighting/test_data/highlight_default_library.html
+++ b/crates/ide/src/syntax_highlighting/test_data/highlight_default_library.html
@@ -41,7 +41,7 @@ pre { color: #DCDCCC; background: #3F3F3F; font-size: 22px; padd
.invalid_escape_sequence { color: #FC5555; text-decoration: wavy underline; }
.unresolved_reference { color: #FC5555; text-decoration: wavy underline; }
</style>
-<pre><code><span class="keyword">use</span> <span class="module crate_root default_library library">core</span><span class="operator">::</span><span class="module default_library library">iter</span><span class="semicolon">;</span>
+<pre><code><span class="keyword">use</span> <span class="crate_root default_library library">core</span><span class="operator">::</span><span class="module default_library library">iter</span><span class="semicolon">;</span>
<span class="keyword">fn</span> <span class="function declaration">main</span><span class="parenthesis">(</span><span class="parenthesis">)</span> <span class="brace">{</span>
<span class="keyword">let</span> <span class="variable declaration">foo</span> <span class="operator">=</span> <span class="enum_variant default_library library">Some</span><span class="parenthesis">(</span><span class="numeric_literal">92</span><span class="parenthesis">)</span><span class="semicolon">;</span>
diff --git a/crates/ide/src/syntax_highlighting/test_data/highlight_deprecated.html b/crates/ide/src/syntax_highlighting/test_data/highlight_deprecated.html
index 41d3dff8ed..5287affbfc 100644
--- a/crates/ide/src/syntax_highlighting/test_data/highlight_deprecated.html
+++ b/crates/ide/src/syntax_highlighting/test_data/highlight_deprecated.html
@@ -42,8 +42,8 @@ pre { color: #DCDCCC; background: #3F3F3F; font-size: 22px; padd
.unresolved_reference { color: #FC5555; text-decoration: wavy underline; }
</style>
<pre><code><span class="attribute_bracket attribute">#</span><span class="attribute_bracket attribute">!</span><span class="attribute_bracket attribute">[</span><span class="builtin_attr attribute">deprecated</span><span class="attribute_bracket attribute">]</span>
-<span class="keyword">use</span> <span class="keyword crate_root deprecated public">crate</span> <span class="keyword">as</span> <span class="punctuation">_</span><span class="semicolon">;</span>
-<span class="keyword">extern</span> <span class="keyword">crate</span> <span class="module crate_root deprecated library">bar</span><span class="semicolon">;</span>
+<span class="keyword">use</span> <span class="keyword deprecated public">crate</span> <span class="keyword">as</span> <span class="punctuation">_</span><span class="semicolon">;</span>
+<span class="keyword">extern</span> <span class="keyword">crate</span> <span class="crate_root deprecated library">bar</span><span class="semicolon">;</span>
<span class="attribute_bracket attribute">#</span><span class="attribute_bracket attribute">[</span><span class="builtin_attr attribute">deprecated</span><span class="attribute_bracket attribute">]</span>
<span class="keyword">macro_rules</span><span class="macro_bang">!</span> <span class="macro declaration deprecated public">macro_</span> <span class="brace">{</span>
<span class="parenthesis">(</span><span class="parenthesis">)</span> <span class="operator">=</span><span class="operator">&gt;</span> <span class="brace">{</span><span class="brace">}</span><span class="semicolon">;</span>
diff --git a/crates/ide/src/syntax_highlighting/test_data/highlight_doctest.html b/crates/ide/src/syntax_highlighting/test_data/highlight_doctest.html
index b5c3df6ee4..ce9ec7431a 100644
--- a/crates/ide/src/syntax_highlighting/test_data/highlight_doctest.html
+++ b/crates/ide/src/syntax_highlighting/test_data/highlight_doctest.html
@@ -48,9 +48,9 @@ pre { color: #DCDCCC; background: #3F3F3F; font-size: 22px; padd
<span class="comment documentation">//! Syntactic name ref highlighting testing</span>
<span class="comment documentation">//! ```rust</span>
-<span class="comment documentation">//! </span><span class="keyword injected">extern</span><span class="none injected"> </span><span class="keyword injected">crate</span><span class="none injected"> </span><span class="self_keyword crate_root injected">self</span><span class="semicolon injected">;</span>
-<span class="comment documentation">//! </span><span class="keyword injected">extern</span><span class="none injected"> </span><span class="keyword injected">crate</span><span class="none injected"> </span><span class="module crate_root injected">other</span><span class="none injected"> </span><span class="keyword injected">as</span><span class="none injected"> </span><span class="module crate_root declaration injected">otter</span><span class="semicolon injected">;</span>
-<span class="comment documentation">//! </span><span class="keyword injected">extern</span><span class="none injected"> </span><span class="keyword injected">crate</span><span class="none injected"> </span><span class="module crate_root injected">core</span><span class="semicolon injected">;</span>
+<span class="comment documentation">//! </span><span class="keyword injected">extern</span><span class="none injected"> </span><span class="keyword injected">crate</span><span class="none injected"> </span><span class="self_keyword injected">self</span><span class="semicolon injected">;</span>
+<span class="comment documentation">//! </span><span class="keyword injected">extern</span><span class="none injected"> </span><span class="keyword injected">crate</span><span class="none injected"> </span><span class="crate_root injected">other</span><span class="none injected"> </span><span class="keyword injected">as</span><span class="none injected"> </span><span class="crate_root declaration injected">otter</span><span class="semicolon injected">;</span>
+<span class="comment documentation">//! </span><span class="keyword injected">extern</span><span class="none injected"> </span><span class="keyword injected">crate</span><span class="none injected"> </span><span class="crate_root injected">core</span><span class="semicolon injected">;</span>
<span class="comment documentation">//! </span><span class="keyword injected">trait</span><span class="none injected"> </span><span class="trait declaration injected">T</span><span class="none injected"> </span><span class="brace injected">{</span><span class="none injected"> </span><span class="keyword injected">type</span><span class="none injected"> </span><span class="type_alias associated declaration injected static trait">Assoc</span><span class="semicolon injected">;</span><span class="none injected"> </span><span class="brace injected">}</span>
<span class="comment documentation">//! </span><span class="keyword injected">fn</span><span class="none injected"> </span><span class="function declaration injected">f</span><span class="angle injected">&lt;</span><span class="type_param declaration injected">Arg</span><span class="angle injected">&gt;</span><span class="parenthesis injected">(</span><span class="parenthesis injected">)</span><span class="none injected"> </span><span class="operator injected">-&gt;</span><span class="none injected"> </span><span class="keyword injected">use</span><span class="angle injected">&lt;</span><span class="struct injected">Arg</span><span class="angle injected">&gt;</span><span class="none injected"> </span><span class="keyword injected">where</span><span class="none injected"> </span><span class="parenthesis injected">(</span><span class="parenthesis injected">)</span><span class="colon injected">:</span><span class="none injected"> </span><span class="trait injected">T</span><span class="comparison injected">&lt;</span><span class="struct injected">Assoc</span><span class="none injected"> </span><span class="operator injected">=</span><span class="none injected"> </span><span class="parenthesis injected">(</span><span class="parenthesis injected">)</span><span class="comparison injected">&gt;</span><span class="none injected"> </span><span class="brace injected">{</span><span class="brace injected">}</span>
<span class="comment documentation">//! ```</span>
diff --git a/crates/ide/src/syntax_highlighting/test_data/highlight_extern_crate.html b/crates/ide/src/syntax_highlighting/test_data/highlight_extern_crate.html
index 3a45182368..8f7cbddd7f 100644
--- a/crates/ide/src/syntax_highlighting/test_data/highlight_extern_crate.html
+++ b/crates/ide/src/syntax_highlighting/test_data/highlight_extern_crate.html
@@ -41,12 +41,12 @@ pre { color: #DCDCCC; background: #3F3F3F; font-size: 22px; padd
.invalid_escape_sequence { color: #FC5555; text-decoration: wavy underline; }
.unresolved_reference { color: #FC5555; text-decoration: wavy underline; }
</style>
-<pre><code><span class="keyword">extern</span> <span class="keyword">crate</span> <span class="self_keyword crate_root">self</span> <span class="keyword">as</span> <span class="module crate_root declaration">this</span><span class="semicolon">;</span>
-<span class="keyword">extern</span> <span class="keyword">crate</span> <span class="module crate_root default_library library">std</span><span class="semicolon">;</span>
-<span class="keyword">extern</span> <span class="keyword">crate</span> <span class="module crate_root default_library library">alloc</span> <span class="keyword">as</span> <span class="module crate_root declaration">abc</span><span class="semicolon">;</span>
-<span class="keyword">extern</span> <span class="keyword">crate</span> <span class="unresolved_reference">unresolved</span> <span class="keyword">as</span> <span class="module crate_root declaration">definitely_unresolved</span><span class="semicolon">;</span>
+<pre><code><span class="keyword">extern</span> <span class="keyword">crate</span> <span class="self_keyword">self</span> <span class="keyword">as</span> <span class="crate_root declaration">this</span><span class="semicolon">;</span>
+<span class="keyword">extern</span> <span class="keyword">crate</span> <span class="crate_root default_library library">std</span><span class="semicolon">;</span>
+<span class="keyword">extern</span> <span class="keyword">crate</span> <span class="crate_root default_library library">alloc</span> <span class="keyword">as</span> <span class="crate_root declaration">abc</span><span class="semicolon">;</span>
+<span class="keyword">extern</span> <span class="keyword">crate</span> <span class="unresolved_reference">unresolved</span> <span class="keyword">as</span> <span class="crate_root declaration">definitely_unresolved</span><span class="semicolon">;</span>
<span class="keyword">extern</span> <span class="keyword">crate</span> <span class="unresolved_reference">unresolved</span> <span class="keyword">as</span> <span class="punctuation">_</span><span class="semicolon">;</span>
-<span class="keyword">extern</span> <span class="keyword">crate</span> <span class="module crate_root default_library library">test</span> <span class="keyword">as</span> <span class="module crate_root declaration">opt_in_crate</span><span class="semicolon">;</span>
-<span class="keyword">extern</span> <span class="keyword">crate</span> <span class="module crate_root default_library library">test</span> <span class="keyword">as</span> <span class="punctuation">_</span><span class="semicolon">;</span>
-<span class="keyword">extern</span> <span class="keyword">crate</span> <span class="module crate_root default_library library">proc_macro</span><span class="semicolon">;</span>
+<span class="keyword">extern</span> <span class="keyword">crate</span> <span class="crate_root default_library library">test</span> <span class="keyword">as</span> <span class="crate_root declaration">opt_in_crate</span><span class="semicolon">;</span>
+<span class="keyword">extern</span> <span class="keyword">crate</span> <span class="crate_root default_library library">test</span> <span class="keyword">as</span> <span class="punctuation">_</span><span class="semicolon">;</span>
+<span class="keyword">extern</span> <span class="keyword">crate</span> <span class="crate_root default_library library">proc_macro</span><span class="semicolon">;</span>
</code></pre> \ No newline at end of file
diff --git a/crates/ide/src/syntax_highlighting/test_data/highlight_general.html b/crates/ide/src/syntax_highlighting/test_data/highlight_general.html
index fd652f444f..c6dbc435c0 100644
--- a/crates/ide/src/syntax_highlighting/test_data/highlight_general.html
+++ b/crates/ide/src/syntax_highlighting/test_data/highlight_general.html
@@ -72,7 +72,7 @@ pre { color: #DCDCCC; background: #3F3F3F; font-size: 22px; padd
<span class="brace">}</span>
<span class="brace">}</span>
-<span class="keyword">use</span> <span class="self_keyword crate_root public">self</span><span class="operator">::</span><span class="struct">FooCopy</span><span class="operator">::</span><span class="brace">{</span><span class="self_keyword">self</span> <span class="keyword">as</span> <span class="struct declaration">BarCopy</span><span class="brace">}</span><span class="semicolon">;</span>
+<span class="keyword">use</span> <span class="self_keyword public">self</span><span class="operator">::</span><span class="struct">FooCopy</span><span class="operator">::</span><span class="brace">{</span><span class="self_keyword">self</span> <span class="keyword">as</span> <span class="struct declaration">BarCopy</span><span class="brace">}</span><span class="semicolon">;</span>
<span class="attribute_bracket attribute">#</span><span class="attribute_bracket attribute">[</span><span class="attribute attribute default_library library">derive</span><span class="parenthesis attribute">(</span><span class="derive attribute default_library library macro">Copy</span><span class="parenthesis attribute">)</span><span class="attribute_bracket attribute">]</span>
<span class="keyword">struct</span> <span class="struct declaration">FooCopy</span> <span class="brace">{</span>
@@ -110,7 +110,7 @@ pre { color: #DCDCCC; background: #3F3F3F; font-size: 22px; padd
<span class="const_param const">FOO</span>
<span class="brace">}</span>
-<span class="keyword">use</span> <span class="module crate_root default_library library">core</span><span class="operator">::</span><span class="module default_library library">ops</span><span class="operator">::</span><span class="trait default_library library">Fn</span><span class="semicolon">;</span>
+<span class="keyword">use</span> <span class="crate_root default_library library">core</span><span class="operator">::</span><span class="module default_library library">ops</span><span class="operator">::</span><span class="trait default_library library">Fn</span><span class="semicolon">;</span>
<span class="keyword">fn</span> <span class="function declaration">baz</span><span class="angle">&lt;</span><span class="type_param declaration">F</span><span class="colon">:</span> <span class="trait default_library library">Fn</span><span class="parenthesis">(</span><span class="parenthesis">)</span> <span class="operator">-&gt;</span> <span class="parenthesis">(</span><span class="parenthesis">)</span><span class="angle">&gt;</span><span class="parenthesis">(</span><span class="value_param callable declaration">f</span><span class="colon">:</span> <span class="type_param">F</span><span class="parenthesis">)</span> <span class="brace">{</span>
<span class="value_param callable">f</span><span class="parenthesis">(</span><span class="parenthesis">)</span>
<span class="brace">}</span>
@@ -184,15 +184,15 @@ pre { color: #DCDCCC; background: #3F3F3F; font-size: 22px; padd
<span class="brace">}</span>
<span class="keyword">fn</span> <span class="function declaration">use_foo_items</span><span class="parenthesis">(</span><span class="parenthesis">)</span> <span class="brace">{</span>
- <span class="keyword">let</span> <span class="variable declaration">bob</span> <span class="operator">=</span> <span class="module crate_root library">foo</span><span class="operator">::</span><span class="struct library">Person</span> <span class="brace">{</span>
+ <span class="keyword">let</span> <span class="variable declaration">bob</span> <span class="operator">=</span> <span class="crate_root library">foo</span><span class="operator">::</span><span class="struct library">Person</span> <span class="brace">{</span>
<span class="field library">name</span><span class="colon">:</span> <span class="string_literal">"Bob"</span><span class="comma">,</span>
- <span class="field library">age</span><span class="colon">:</span> <span class="module crate_root library">foo</span><span class="operator">::</span><span class="module library">consts</span><span class="operator">::</span><span class="constant const library">NUMBER</span><span class="comma">,</span>
+ <span class="field library">age</span><span class="colon">:</span> <span class="crate_root library">foo</span><span class="operator">::</span><span class="module library">consts</span><span class="operator">::</span><span class="constant const library">NUMBER</span><span class="comma">,</span>
<span class="brace">}</span><span class="semicolon">;</span>
- <span class="keyword">let</span> <span class="variable declaration">control_flow</span> <span class="operator">=</span> <span class="module crate_root library">foo</span><span class="operator">::</span><span class="function library">identity</span><span class="parenthesis">(</span><span class="module crate_root library">foo</span><span class="operator">::</span><span class="enum library">ControlFlow</span><span class="operator">::</span><span class="enum_variant library">Continue</span><span class="parenthesis">)</span><span class="semicolon">;</span>
+ <span class="keyword">let</span> <span class="variable declaration">control_flow</span> <span class="operator">=</span> <span class="crate_root library">foo</span><span class="operator">::</span><span class="function library">identity</span><span class="parenthesis">(</span><span class="crate_root library">foo</span><span class="operator">::</span><span class="enum library">ControlFlow</span><span class="operator">::</span><span class="enum_variant library">Continue</span><span class="parenthesis">)</span><span class="semicolon">;</span>
<span class="keyword control">if</span> <span class="variable">control_flow</span><span class="operator">.</span><span class="method consuming library">should_die</span><span class="parenthesis">(</span><span class="parenthesis">)</span> <span class="brace">{</span>
- <span class="module crate_root library">foo</span><span class="operator">::</span><span class="unresolved_reference">die</span><span class="macro_bang">!</span><span class="parenthesis">(</span><span class="parenthesis">)</span><span class="semicolon">;</span>
+ <span class="crate_root library">foo</span><span class="operator">::</span><span class="unresolved_reference">die</span><span class="macro_bang">!</span><span class="parenthesis">(</span><span class="parenthesis">)</span><span class="semicolon">;</span>
<span class="brace">}</span>
<span class="brace">}</span>
diff --git a/crates/ide/src/syntax_highlighting/test_data/highlight_injection_2.html b/crates/ide/src/syntax_highlighting/test_data/highlight_injection_2.html
index 5a5d9bd1f9..391a46f706 100644
--- a/crates/ide/src/syntax_highlighting/test_data/highlight_injection_2.html
+++ b/crates/ide/src/syntax_highlighting/test_data/highlight_injection_2.html
@@ -47,7 +47,7 @@ pre { color: #DCDCCC; background: #3F3F3F; font-size: 22px; padd
<span class="function">fixture</span><span class="parenthesis">(</span><span class="string_literal">r#"</span>
@@- /main.rs crate:main deps:other_crate
<span class="keyword injected">fn</span><span class="none injected"> </span><span class="function declaration injected">test</span><span class="parenthesis injected">(</span><span class="parenthesis injected">)</span><span class="none injected"> </span><span class="brace injected">{</span><span class="none injected">
-</span> <span class="keyword injected">let</span><span class="none injected"> </span><span class="variable declaration injected">x</span><span class="none injected"> </span><span class="operator injected">=</span><span class="none injected"> </span><span class="module crate_root injected library">other_crate</span><span class="operator injected">::</span><span class="module injected library">foo</span><span class="operator injected">::</span><span class="struct injected library">S</span><span class="operator injected">::</span><span class="function associated injected library static">thing</span><span class="parenthesis injected">(</span><span class="parenthesis injected">)</span><span class="semicolon injected">;</span><span class="none injected">
+</span> <span class="keyword injected">let</span><span class="none injected"> </span><span class="variable declaration injected">x</span><span class="none injected"> </span><span class="operator injected">=</span><span class="none injected"> </span><span class="crate_root injected library">other_crate</span><span class="operator injected">::</span><span class="module injected library">foo</span><span class="operator injected">::</span><span class="struct injected library">S</span><span class="operator injected">::</span><span class="function associated injected library static">thing</span><span class="parenthesis injected">(</span><span class="parenthesis injected">)</span><span class="semicolon injected">;</span><span class="none injected">
</span> <span class="variable injected">x</span><span class="semicolon injected">;</span><span class="none injected">
</span><span class="brace injected">}</span><span class="none injected"> </span><span class="comment injected">//^ i128</span><span class="none injected">
</span><span class="none injected">
diff --git a/crates/ide/src/syntax_highlighting/test_data/highlight_issue_18089.html b/crates/ide/src/syntax_highlighting/test_data/highlight_issue_18089.html
index b28818e679..fccf34083d 100644
--- a/crates/ide/src/syntax_highlighting/test_data/highlight_issue_18089.html
+++ b/crates/ide/src/syntax_highlighting/test_data/highlight_issue_18089.html
@@ -45,5 +45,5 @@ pre { color: #DCDCCC; background: #3F3F3F; font-size: 22px; padd
<span class="macro public">template</span><span class="macro_bang">!</span><span class="parenthesis">(</span>template<span class="parenthesis">)</span><span class="semicolon">;</span>
<span class="brace">}</span>
-<span class="attribute_bracket attribute">#</span><span class="attribute_bracket attribute">[</span><span class="module attribute crate_root library">proc_macros</span><span class="operator attribute">::</span><span class="attribute attribute library">issue_18089</span><span class="attribute_bracket attribute">]</span>
+<span class="attribute_bracket attribute">#</span><span class="attribute_bracket attribute">[</span><span class="crate_root attribute library">proc_macros</span><span class="operator attribute">::</span><span class="attribute attribute library">issue_18089</span><span class="attribute_bracket attribute">]</span>
<span class="keyword macro proc_macro">fn</span> <span class="macro declaration macro proc_macro public">template</span><span class="parenthesis macro proc_macro">(</span><span class="parenthesis macro proc_macro">)</span> <span class="brace macro proc_macro">{</span><span class="brace macro proc_macro">}</span></code></pre> \ No newline at end of file
diff --git a/crates/ide/src/syntax_highlighting/test_data/highlight_keywords_2015.html b/crates/ide/src/syntax_highlighting/test_data/highlight_keywords_2015.html
index d2a53b2ff9..6366cba1bd 100644
--- a/crates/ide/src/syntax_highlighting/test_data/highlight_keywords_2015.html
+++ b/crates/ide/src/syntax_highlighting/test_data/highlight_keywords_2015.html
@@ -41,12 +41,12 @@ pre { color: #DCDCCC; background: #3F3F3F; font-size: 22px; padd
.invalid_escape_sequence { color: #FC5555; text-decoration: wavy underline; }
.unresolved_reference { color: #FC5555; text-decoration: wavy underline; }
</style>
-<pre><code><span class="keyword">extern</span> <span class="keyword">crate</span> <span class="self_keyword crate_root">self</span><span class="semicolon">;</span>
+<pre><code><span class="keyword">extern</span> <span class="keyword">crate</span> <span class="self_keyword">self</span><span class="semicolon">;</span>
-<span class="keyword">use</span> <span class="keyword crate_root public">crate</span><span class="semicolon">;</span>
-<span class="keyword">use</span> <span class="self_keyword crate_root public">self</span><span class="semicolon">;</span>
+<span class="keyword">use</span> <span class="keyword public">crate</span><span class="semicolon">;</span>
+<span class="keyword">use</span> <span class="self_keyword public">self</span><span class="semicolon">;</span>
<span class="keyword">mod</span> <span class="module declaration">__</span> <span class="brace">{</span>
- <span class="keyword">use</span> <span class="keyword crate_root public">super</span><span class="operator">::</span><span class="punctuation">*</span><span class="semicolon">;</span>
+ <span class="keyword">use</span> <span class="keyword public">super</span><span class="operator">::</span><span class="punctuation">*</span><span class="semicolon">;</span>
<span class="brace">}</span>
<span class="keyword">macro_rules</span><span class="macro_bang">!</span> <span class="macro declaration public">void</span> <span class="brace">{</span>
diff --git a/crates/ide/src/syntax_highlighting/test_data/highlight_keywords_2018.html b/crates/ide/src/syntax_highlighting/test_data/highlight_keywords_2018.html
index d309b47232..a89e819083 100644
--- a/crates/ide/src/syntax_highlighting/test_data/highlight_keywords_2018.html
+++ b/crates/ide/src/syntax_highlighting/test_data/highlight_keywords_2018.html
@@ -41,12 +41,12 @@ pre { color: #DCDCCC; background: #3F3F3F; font-size: 22px; padd
.invalid_escape_sequence { color: #FC5555; text-decoration: wavy underline; }
.unresolved_reference { color: #FC5555; text-decoration: wavy underline; }
</style>
-<pre><code><span class="keyword">extern</span> <span class="keyword">crate</span> <span class="self_keyword crate_root">self</span><span class="semicolon">;</span>
+<pre><code><span class="keyword">extern</span> <span class="keyword">crate</span> <span class="self_keyword">self</span><span class="semicolon">;</span>
-<span class="keyword">use</span> <span class="keyword crate_root public">crate</span><span class="semicolon">;</span>
-<span class="keyword">use</span> <span class="self_keyword crate_root public">self</span><span class="semicolon">;</span>
+<span class="keyword">use</span> <span class="keyword public">crate</span><span class="semicolon">;</span>
+<span class="keyword">use</span> <span class="self_keyword public">self</span><span class="semicolon">;</span>
<span class="keyword">mod</span> <span class="module declaration">__</span> <span class="brace">{</span>
- <span class="keyword">use</span> <span class="keyword crate_root public">super</span><span class="operator">::</span><span class="punctuation">*</span><span class="semicolon">;</span>
+ <span class="keyword">use</span> <span class="keyword public">super</span><span class="operator">::</span><span class="punctuation">*</span><span class="semicolon">;</span>
<span class="brace">}</span>
<span class="keyword">macro_rules</span><span class="macro_bang">!</span> <span class="macro declaration public">void</span> <span class="brace">{</span>
diff --git a/crates/ide/src/syntax_highlighting/test_data/highlight_keywords_2021.html b/crates/ide/src/syntax_highlighting/test_data/highlight_keywords_2021.html
index d309b47232..a89e819083 100644
--- a/crates/ide/src/syntax_highlighting/test_data/highlight_keywords_2021.html
+++ b/crates/ide/src/syntax_highlighting/test_data/highlight_keywords_2021.html
@@ -41,12 +41,12 @@ pre { color: #DCDCCC; background: #3F3F3F; font-size: 22px; padd
.invalid_escape_sequence { color: #FC5555; text-decoration: wavy underline; }
.unresolved_reference { color: #FC5555; text-decoration: wavy underline; }
</style>
-<pre><code><span class="keyword">extern</span> <span class="keyword">crate</span> <span class="self_keyword crate_root">self</span><span class="semicolon">;</span>
+<pre><code><span class="keyword">extern</span> <span class="keyword">crate</span> <span class="self_keyword">self</span><span class="semicolon">;</span>
-<span class="keyword">use</span> <span class="keyword crate_root public">crate</span><span class="semicolon">;</span>
-<span class="keyword">use</span> <span class="self_keyword crate_root public">self</span><span class="semicolon">;</span>
+<span class="keyword">use</span> <span class="keyword public">crate</span><span class="semicolon">;</span>
+<span class="keyword">use</span> <span class="self_keyword public">self</span><span class="semicolon">;</span>
<span class="keyword">mod</span> <span class="module declaration">__</span> <span class="brace">{</span>
- <span class="keyword">use</span> <span class="keyword crate_root public">super</span><span class="operator">::</span><span class="punctuation">*</span><span class="semicolon">;</span>
+ <span class="keyword">use</span> <span class="keyword public">super</span><span class="operator">::</span><span class="punctuation">*</span><span class="semicolon">;</span>
<span class="brace">}</span>
<span class="keyword">macro_rules</span><span class="macro_bang">!</span> <span class="macro declaration public">void</span> <span class="brace">{</span>
diff --git a/crates/ide/src/syntax_highlighting/test_data/highlight_keywords_2024.html b/crates/ide/src/syntax_highlighting/test_data/highlight_keywords_2024.html
index 575c9a6b0a..aa1500b8f8 100644
--- a/crates/ide/src/syntax_highlighting/test_data/highlight_keywords_2024.html
+++ b/crates/ide/src/syntax_highlighting/test_data/highlight_keywords_2024.html
@@ -41,12 +41,12 @@ pre { color: #DCDCCC; background: #3F3F3F; font-size: 22px; padd
.invalid_escape_sequence { color: #FC5555; text-decoration: wavy underline; }
.unresolved_reference { color: #FC5555; text-decoration: wavy underline; }
</style>
-<pre><code><span class="keyword">extern</span> <span class="keyword">crate</span> <span class="self_keyword crate_root">self</span><span class="semicolon">;</span>
+<pre><code><span class="keyword">extern</span> <span class="keyword">crate</span> <span class="self_keyword">self</span><span class="semicolon">;</span>
-<span class="keyword">use</span> <span class="keyword crate_root public">crate</span><span class="semicolon">;</span>
-<span class="keyword">use</span> <span class="self_keyword crate_root public">self</span><span class="semicolon">;</span>
+<span class="keyword">use</span> <span class="keyword public">crate</span><span class="semicolon">;</span>
+<span class="keyword">use</span> <span class="self_keyword public">self</span><span class="semicolon">;</span>
<span class="keyword">mod</span> <span class="module declaration">__</span> <span class="brace">{</span>
- <span class="keyword">use</span> <span class="keyword crate_root public">super</span><span class="operator">::</span><span class="punctuation">*</span><span class="semicolon">;</span>
+ <span class="keyword">use</span> <span class="keyword public">super</span><span class="operator">::</span><span class="punctuation">*</span><span class="semicolon">;</span>
<span class="brace">}</span>
<span class="keyword">macro_rules</span><span class="macro_bang">!</span> <span class="macro declaration public">void</span> <span class="brace">{</span>
diff --git a/crates/ide/src/syntax_highlighting/test_data/highlight_keywords_macros.html b/crates/ide/src/syntax_highlighting/test_data/highlight_keywords_macros.html
index caf66ace7a..484afd81ea 100644
--- a/crates/ide/src/syntax_highlighting/test_data/highlight_keywords_macros.html
+++ b/crates/ide/src/syntax_highlighting/test_data/highlight_keywords_macros.html
@@ -41,6 +41,6 @@ pre { color: #DCDCCC; background: #3F3F3F; font-size: 22px; padd
.invalid_escape_sequence { color: #FC5555; text-decoration: wavy underline; }
.unresolved_reference { color: #FC5555; text-decoration: wavy underline; }
</style>
-<pre><code><span class="module crate_root library">lib2015</span><span class="operator">::</span><span class="macro library">void_2015</span><span class="macro_bang">!</span><span class="parenthesis">(</span>try async await <span class="none macro">gen</span><span class="parenthesis">)</span><span class="semicolon">;</span>
-<span class="module crate_root library">lib2024</span><span class="operator">::</span><span class="macro library">void_2024</span><span class="macro_bang">!</span><span class="parenthesis">(</span><span class="keyword macro">try</span> <span class="keyword async macro">async</span> <span class="keyword async control macro">await</span> <span class="none macro">gen</span><span class="parenthesis">)</span><span class="semicolon">;</span>
+<pre><code><span class="crate_root library">lib2015</span><span class="operator">::</span><span class="macro library">void_2015</span><span class="macro_bang">!</span><span class="parenthesis">(</span>try async await <span class="none macro">gen</span><span class="parenthesis">)</span><span class="semicolon">;</span>
+<span class="crate_root library">lib2024</span><span class="operator">::</span><span class="macro library">void_2024</span><span class="macro_bang">!</span><span class="parenthesis">(</span><span class="keyword macro">try</span> <span class="keyword async macro">async</span> <span class="keyword async control macro">await</span> <span class="none macro">gen</span><span class="parenthesis">)</span><span class="semicolon">;</span>
</code></pre> \ No newline at end of file
diff --git a/crates/ide/src/syntax_highlighting/test_data/highlight_macros.html b/crates/ide/src/syntax_highlighting/test_data/highlight_macros.html
index b63d5cedc8..59612634fd 100644
--- a/crates/ide/src/syntax_highlighting/test_data/highlight_macros.html
+++ b/crates/ide/src/syntax_highlighting/test_data/highlight_macros.html
@@ -41,7 +41,7 @@ pre { color: #DCDCCC; background: #3F3F3F; font-size: 22px; padd
.invalid_escape_sequence { color: #FC5555; text-decoration: wavy underline; }
.unresolved_reference { color: #FC5555; text-decoration: wavy underline; }
</style>
-<pre><code><span class="keyword">use</span> <span class="module crate_root library">proc_macros</span><span class="operator">::</span><span class="brace">{</span><span class="function library">mirror</span><span class="comma">,</span> <span class="function library">identity</span><span class="comma">,</span> <span class="derive library">DeriveIdentity</span><span class="brace">}</span><span class="semicolon">;</span>
+<pre><code><span class="keyword">use</span> <span class="crate_root library">proc_macros</span><span class="operator">::</span><span class="brace">{</span><span class="function library">mirror</span><span class="comma">,</span> <span class="function library">identity</span><span class="comma">,</span> <span class="derive library">DeriveIdentity</span><span class="brace">}</span><span class="semicolon">;</span>
<span class="proc_macro library">mirror</span><span class="macro_bang">!</span> <span class="brace">{</span>
<span class="brace macro proc_macro">{</span>
diff --git a/crates/ide/src/syntax_highlighting/test_data/highlight_strings.html b/crates/ide/src/syntax_highlighting/test_data/highlight_strings.html
index e178782c79..4e3822c3d3 100644
--- a/crates/ide/src/syntax_highlighting/test_data/highlight_strings.html
+++ b/crates/ide/src/syntax_highlighting/test_data/highlight_strings.html
@@ -165,7 +165,7 @@ pre { color: #DCDCCC; background: #3F3F3F; font-size: 22px; padd
<span class="macro public">toho</span><span class="macro_bang">!</span><span class="parenthesis">(</span><span class="string_literal macro">"{}fmt"</span><span class="comma macro">,</span> <span class="numeric_literal macro">0</span><span class="parenthesis">)</span><span class="semicolon">;</span>
<span class="keyword">let</span> <span class="variable declaration">i</span><span class="colon">:</span> <span class="builtin_type">u64</span> <span class="operator">=</span> <span class="numeric_literal">3</span><span class="semicolon">;</span>
<span class="keyword">let</span> <span class="variable declaration">o</span><span class="colon">:</span> <span class="builtin_type">u64</span><span class="semicolon">;</span>
- <span class="module crate_root default_library library">core</span><span class="operator">::</span><span class="module default_library library">arch</span><span class="operator">::</span><span class="macro default_library library unsafe">asm</span><span class="macro_bang unsafe">!</span><span class="parenthesis unsafe">(</span>
+ <span class="crate_root default_library library">core</span><span class="operator">::</span><span class="module default_library library">arch</span><span class="operator">::</span><span class="macro default_library library unsafe">asm</span><span class="macro_bang unsafe">!</span><span class="parenthesis unsafe">(</span>
<span class="string_literal macro">"mov {</span><span class="variable">0</span><span class="string_literal macro">}, {</span><span class="variable">1</span><span class="string_literal macro">}"</span><span class="comma macro">,</span>
<span class="string_literal macro">"add {</span><span class="variable">0</span><span class="string_literal macro">}, 5"</span><span class="comma macro">,</span>
<span class="keyword macro">out</span><span class="parenthesis macro">(</span><span class="reg library macro">reg</span><span class="parenthesis macro">)</span> <span class="variable macro">o</span><span class="comma macro">,</span>
diff --git a/crates/ide/src/syntax_highlighting/test_data/highlight_strings_disabled.html b/crates/ide/src/syntax_highlighting/test_data/highlight_strings_disabled.html
new file mode 100644
index 0000000000..344d0c2ff0
--- /dev/null
+++ b/crates/ide/src/syntax_highlighting/test_data/highlight_strings_disabled.html
@@ -0,0 +1,47 @@
+
+<style>
+body { margin: 0; }
+pre { color: #DCDCCC; background: #3F3F3F; font-size: 22px; padding: 0.4em; }
+
+.lifetime { color: #DFAF8F; font-style: italic; }
+.label { color: #DFAF8F; font-style: italic; }
+.comment { color: #7F9F7F; }
+.documentation { color: #629755; }
+.intra_doc_link { font-style: italic; }
+.injected { opacity: 0.65 ; }
+.struct, .enum { color: #7CB8BB; }
+.enum_variant { color: #BDE0F3; }
+.string_literal { color: #CC9393; }
+.field { color: #94BFF3; }
+.function { color: #93E0E3; }
+.parameter { color: #94BFF3; }
+.text { color: #DCDCCC; }
+.type { color: #7CB8BB; }
+.builtin_type { color: #8CD0D3; }
+.type_param { color: #DFAF8F; }
+.attribute { color: #94BFF3; }
+.numeric_literal { color: #BFEBBF; }
+.bool_literal { color: #BFE6EB; }
+.macro { color: #94BFF3; }
+.proc_macro { color: #94BFF3; text-decoration: underline; }
+.derive { color: #94BFF3; font-style: italic; }
+.module { color: #AFD8AF; }
+.value_param { color: #DCDCCC; }
+.variable { color: #DCDCCC; }
+.format_specifier { color: #CC696B; }
+.mutable { text-decoration: underline; }
+.escape_sequence { color: #94BFF3; }
+.keyword { color: #F0DFAF; font-weight: bold; }
+.control { font-style: italic; }
+.reference { font-style: italic; font-weight: bold; }
+.const { font-weight: bolder; }
+.unsafe { color: #BC8383; }
+.deprecated { text-decoration: line-through; }
+
+.invalid_escape_sequence { color: #FC5555; text-decoration: wavy underline; }
+.unresolved_reference { color: #FC5555; text-decoration: wavy underline; }
+</style>
+<pre><code><span class="keyword">fn</span> <span class="function declaration">main</span><span class="parenthesis">(</span><span class="parenthesis">)</span> <span class="brace">{</span>
+ <span class="macro default_library library">format_args</span><span class="macro_bang">!</span><span class="parenthesis">(</span>"foo\nbar"<span class="parenthesis">)</span><span class="semicolon">;</span>
+ <span class="macro default_library library">format_args</span><span class="macro_bang">!</span><span class="parenthesis">(</span>"foo\invalid"<span class="parenthesis">)</span><span class="semicolon">;</span>
+<span class="brace">}</span></code></pre> \ No newline at end of file
diff --git a/crates/ide/src/syntax_highlighting/test_data/highlight_unsafe.html b/crates/ide/src/syntax_highlighting/test_data/highlight_unsafe.html
index 93513f5b57..008987d409 100644
--- a/crates/ide/src/syntax_highlighting/test_data/highlight_unsafe.html
+++ b/crates/ide/src/syntax_highlighting/test_data/highlight_unsafe.html
@@ -91,7 +91,7 @@ pre { color: #DCDCCC; background: #3F3F3F; font-size: 22px; padd
<span class="comment">// unsafe fn and method calls</span>
<span class="function unsafe">unsafe_fn</span><span class="parenthesis unsafe">(</span><span class="parenthesis unsafe">)</span><span class="semicolon">;</span>
- <span class="self_keyword crate_root public">self</span><span class="operator">::</span><span class="function unsafe">unsafe_fn</span><span class="parenthesis unsafe">(</span><span class="parenthesis unsafe">)</span><span class="semicolon">;</span>
+ <span class="self_keyword public">self</span><span class="operator">::</span><span class="function unsafe">unsafe_fn</span><span class="parenthesis unsafe">(</span><span class="parenthesis unsafe">)</span><span class="semicolon">;</span>
<span class="parenthesis">(</span><span class="function">unsafe_fn</span> <span class="keyword">as</span> <span class="keyword unsafe">unsafe</span> <span class="keyword">fn</span><span class="parenthesis">(</span><span class="parenthesis">)</span><span class="parenthesis">)</span><span class="parenthesis unsafe">(</span><span class="parenthesis unsafe">)</span><span class="semicolon">;</span>
<span class="struct">Struct</span> <span class="brace">{</span> <span class="field">field</span><span class="colon">:</span> <span class="numeric_literal">0</span> <span class="brace">}</span><span class="operator">.</span><span class="method reference unsafe">unsafe_method</span><span class="parenthesis unsafe">(</span><span class="parenthesis unsafe">)</span><span class="semicolon">;</span>
@@ -120,7 +120,7 @@ pre { color: #DCDCCC; background: #3F3F3F; font-size: 22px; padd
<span class="operator">&</span><span class="static unsafe">EXTERN_STATIC</span><span class="semicolon">;</span>
<span class="operator">&</span><span class="keyword">raw</span> <span class="keyword const">const</span> <span class="static">EXTERN_STATIC</span><span class="semicolon">;</span>
- <span class="module crate_root default_library library">core</span><span class="operator">::</span><span class="module default_library library">arch</span><span class="operator">::</span><span class="macro default_library library unsafe">asm</span><span class="macro_bang unsafe">!</span><span class="parenthesis unsafe">(</span>
+ <span class="crate_root default_library library">core</span><span class="operator">::</span><span class="module default_library library">arch</span><span class="operator">::</span><span class="macro default_library library unsafe">asm</span><span class="macro_bang unsafe">!</span><span class="parenthesis unsafe">(</span>
<span class="string_literal macro">"push {</span><span class="variable">base</span><span class="string_literal macro">}"</span><span class="comma macro">,</span>
<span class="variable declaration macro">base</span> <span class="operator macro">=</span> <span class="keyword const macro">const</span> <span class="numeric_literal macro">0</span>
<span class="parenthesis unsafe">)</span><span class="semicolon">;</span>
diff --git a/crates/ide/src/syntax_highlighting/tests.rs b/crates/ide/src/syntax_highlighting/tests.rs
index 89a5e434f9..8b529cf10f 100644
--- a/crates/ide/src/syntax_highlighting/tests.rs
+++ b/crates/ide/src/syntax_highlighting/tests.rs
@@ -1499,6 +1499,23 @@ fn main() {
}
#[test]
+fn test_strings_highlighting_disabled() {
+ // Test that comments are not highlighted when disabled
+ check_highlighting_with_config(
+ r#"
+//- minicore: fmt
+fn main() {
+ format_args!("foo\nbar");
+ format_args!("foo\invalid");
+}
+"#,
+ HighlightConfig { strings: false, ..HL_CONFIG },
+ expect_file!["./test_data/highlight_strings_disabled.html"],
+ false,
+ );
+}
+
+#[test]
fn regression_20952() {
check_highlighting(
r#"
diff --git a/crates/intern/src/symbol/symbols.rs b/crates/intern/src/symbol/symbols.rs
index cbaac64be4..2be4e41f4f 100644
--- a/crates/intern/src/symbol/symbols.rs
+++ b/crates/intern/src/symbol/symbols.rs
@@ -526,10 +526,17 @@ define_symbols! {
arbitrary_self_types,
arbitrary_self_types_pointers,
supertrait_item_shadowing,
+ new_range,
+ range,
+ RangeCopy,
+ RangeFromCopy,
+ RangeInclusiveCopy,
+ RangeToInclusiveCopy,
hash,
partial_cmp,
cmp,
CoerceUnsized,
DispatchFromDyn,
define_opaque,
+ marker,
}
diff --git a/crates/load-cargo/src/lib.rs b/crates/load-cargo/src/lib.rs
index e8d98b1ce6..c2935d94a8 100644
--- a/crates/load-cargo/src/lib.rs
+++ b/crates/load-cargo/src/lib.rs
@@ -45,6 +45,7 @@ pub struct LoadCargoConfig {
pub load_out_dirs_from_check: bool,
pub with_proc_macro_server: ProcMacroServerChoice,
pub prefill_caches: bool,
+ pub proc_macro_processes: usize,
}
#[derive(Debug, Clone, PartialEq, Eq)]
@@ -113,15 +114,25 @@ pub fn load_workspace_into_db(
let proc_macro_server = match &load_config.with_proc_macro_server {
ProcMacroServerChoice::Sysroot => ws.find_sysroot_proc_macro_srv().map(|it| {
it.and_then(|it| {
- ProcMacroClient::spawn(&it, extra_env, ws.toolchain.as_ref()).map_err(Into::into)
+ ProcMacroClient::spawn(
+ &it,
+ extra_env,
+ ws.toolchain.as_ref(),
+ load_config.proc_macro_processes,
+ )
+ .map_err(Into::into)
})
.map_err(|e| ProcMacroLoadingError::ProcMacroSrvError(e.to_string().into_boxed_str()))
}),
- ProcMacroServerChoice::Explicit(path) => {
- Some(ProcMacroClient::spawn(path, extra_env, ws.toolchain.as_ref()).map_err(|e| {
- ProcMacroLoadingError::ProcMacroSrvError(e.to_string().into_boxed_str())
- }))
- }
+ ProcMacroServerChoice::Explicit(path) => Some(
+ ProcMacroClient::spawn(
+ path,
+ extra_env,
+ ws.toolchain.as_ref(),
+ load_config.proc_macro_processes,
+ )
+ .map_err(|e| ProcMacroLoadingError::ProcMacroSrvError(e.to_string().into_boxed_str())),
+ ),
ProcMacroServerChoice::None => Some(Err(ProcMacroLoadingError::Disabled)),
};
match &proc_macro_server {
@@ -435,7 +446,7 @@ pub fn load_proc_macro(
) -> ProcMacroLoadResult {
let res: Result<Vec<_>, _> = (|| {
let dylib = MacroDylib::new(path.to_path_buf());
- let vec = server.load_dylib(dylib, Some(&mut reject_subrequests)).map_err(|e| {
+ let vec = server.load_dylib(dylib, Some(&reject_subrequests)).map_err(|e| {
ProcMacroLoadingError::ProcMacroSrvError(format!("{e}").into_boxed_str())
})?;
if vec.is_empty() {
@@ -541,7 +552,7 @@ impl ProcMacroExpander for Expander {
mixed_site: Span,
current_dir: String,
) -> Result<tt::TopSubtree, ProcMacroExpansionError> {
- let mut cb = |req| match req {
+ let cb = |req| match req {
SubRequest::LocalFilePath { file_id } => {
let file_id = FileId::from_raw(file_id);
let source_root_id = db.file_source_root(file_id).source_root_id(db);
@@ -553,15 +564,14 @@ impl ProcMacroExpander for Expander {
Ok(SubResponse::LocalFilePathResult { name })
}
+ // Not incremental: requires full file text.
SubRequest::SourceText { file_id, ast_id, start, end } => {
- let ast_id = span::ErasedFileAstId::from_raw(ast_id);
- let editioned_file_id = span::EditionedFileId::from_raw(file_id);
- let span = Span {
- range: TextRange::new(TextSize::from(start), TextSize::from(end)),
- anchor: SpanAnchor { file_id: editioned_file_id, ast_id },
- ctx: SyntaxContext::root(editioned_file_id.edition()),
- };
- let range = db.resolve_span(span);
+ let range = resolve_sub_span(
+ db,
+ file_id,
+ ast_id,
+ TextRange::new(TextSize::from(start), TextSize::from(end)),
+ );
let source = db.file_text(range.file_id.file_id(db)).text(db);
let text = source
.get(usize::from(range.range.start())..usize::from(range.range.end()))
@@ -569,6 +579,19 @@ impl ProcMacroExpander for Expander {
Ok(SubResponse::SourceTextResult { text })
}
+ // Not incremental: requires building line index.
+ SubRequest::LineColumn { file_id, ast_id, offset } => {
+ let range =
+ resolve_sub_span(db, file_id, ast_id, TextRange::empty(TextSize::from(offset)));
+ let source = db.file_text(range.file_id.file_id(db)).text(db);
+ let line_index = ide_db::line_index::LineIndex::new(source);
+ let (line, column) = line_index
+ .try_line_col(range.range.start())
+ .map(|lc| (lc.line + 1, lc.col + 1))
+ .unwrap_or((1, 1));
+ // proc_macro::Span line/column are 1-based
+ Ok(SubResponse::LineColumnResult { line, column })
+ }
SubRequest::FilePath { file_id } => {
let file_id = FileId::from_raw(file_id);
let source_root_id = db.file_source_root(file_id).source_root_id(db);
@@ -581,6 +604,17 @@ impl ProcMacroExpander for Expander {
Ok(SubResponse::FilePathResult { name })
}
+ // Not incremental: requires global span resolution.
+ SubRequest::ByteRange { file_id, ast_id, start, end } => {
+ let range = resolve_sub_span(
+ db,
+ file_id,
+ ast_id,
+ TextRange::new(TextSize::from(start), TextSize::from(end)),
+ );
+
+ Ok(SubResponse::ByteRangeResult { range: range.range.into() })
+ }
};
match self.0.expand(
subtree.view(),
@@ -590,7 +624,7 @@ impl ProcMacroExpander for Expander {
call_site,
mixed_site,
current_dir,
- Some(&mut cb),
+ Some(&cb),
) {
Ok(Ok(subtree)) => Ok(subtree),
Ok(Err(err)) => Err(ProcMacroExpansionError::Panic(err)),
@@ -603,6 +637,22 @@ impl ProcMacroExpander for Expander {
}
}
+fn resolve_sub_span(
+ db: &dyn ExpandDatabase,
+ file_id: u32,
+ ast_id: u32,
+ range: TextRange,
+) -> hir_expand::FileRange {
+ let ast_id = span::ErasedFileAstId::from_raw(ast_id);
+ let editioned_file_id = span::EditionedFileId::from_raw(file_id);
+ let span = Span {
+ range,
+ anchor: SpanAnchor { file_id: editioned_file_id, ast_id },
+ ctx: SyntaxContext::root(editioned_file_id.edition()),
+ };
+ db.resolve_span(span)
+}
+
#[cfg(test)]
mod tests {
use ide_db::base_db::RootQueryDb;
@@ -618,6 +668,7 @@ mod tests {
load_out_dirs_from_check: false,
with_proc_macro_server: ProcMacroServerChoice::None,
prefill_caches: false,
+ proc_macro_processes: 1,
};
let (db, _vfs, _proc_macro) =
load_workspace_at(path, &cargo_config, &load_cargo_config, &|_| {}).unwrap();
diff --git a/crates/mbe/src/expander/matcher.rs b/crates/mbe/src/expander/matcher.rs
index 8f6627a60f..fe01fb1f10 100644
--- a/crates/mbe/src/expander/matcher.rs
+++ b/crates/mbe/src/expander/matcher.rs
@@ -414,8 +414,9 @@ fn match_loop_inner<'t>(
}
// Check if we need a separator.
- if item.sep.is_some() && !item.sep_matched {
- let sep = item.sep.as_ref().unwrap();
+ if let Some(sep) = &item.sep
+ && !item.sep_matched
+ {
let mut fork = src.clone();
if expect_separator(&mut fork, sep) {
// HACK: here we use `meta_result` to pass `TtIter` back to caller because
diff --git a/crates/parser/src/grammar.rs b/crates/parser/src/grammar.rs
index bf84302941..e481bbe9bc 100644
--- a/crates/parser/src/grammar.rs
+++ b/crates/parser/src/grammar.rs
@@ -6,7 +6,7 @@
//! each submodule starts with `use super::*` import and exports
//! "public" productions via `pub(super)`.
//!
-//! See docs for [`Parser`](super::parser::Parser) to learn about API,
+//! See docs for [`Parser`] to learn about API,
//! available to the grammar, and see docs for [`Event`](super::event::Event)
//! to learn how this actually manages to produce parse trees.
//!
diff --git a/crates/parser/src/grammar/attributes.rs b/crates/parser/src/grammar/attributes.rs
index ccb556b2cc..c0cf43a87b 100644
--- a/crates/parser/src/grammar/attributes.rs
+++ b/crates/parser/src/grammar/attributes.rs
@@ -24,15 +24,11 @@ fn attr(p: &mut Parser<'_>, inner: bool) {
p.bump(T![!]);
}
- if p.eat(T!['[']) {
+ if p.expect(T!['[']) {
meta(p);
-
- if !p.eat(T![']']) {
- p.error("expected `]`");
- }
- } else {
- p.error("expected `[`");
+ p.expect(T![']']);
}
+
attr.complete(p, ATTR);
}
@@ -74,7 +70,7 @@ pub(super) fn meta(p: &mut Parser<'_>) {
paths::attr_path(p);
match p.current() {
- T![=] => {
+ T![=] if !p.at(T![=>]) && !p.at(T![==]) => {
p.bump(T![=]);
if expressions::expr(p).is_none() {
p.error("expected expression");
diff --git a/crates/parser/test_data/parser/err/0002_duplicate_shebang.rast b/crates/parser/test_data/parser/err/0002_duplicate_shebang.rast
index 7ee1ecfbb1..60cc690f7c 100644
--- a/crates/parser/test_data/parser/err/0002_duplicate_shebang.rast
+++ b/crates/parser/test_data/parser/err/0002_duplicate_shebang.rast
@@ -28,7 +28,7 @@ SOURCE_FILE
NAME_REF
IDENT "rusti"
WHITESPACE "\n"
-error 23: expected `[`
+error 23: expected L_BRACK
error 23: expected an item
error 27: expected one of `*`, `::`, `{`, `self`, `super` or an identifier
error 28: expected SEMICOLON
diff --git a/crates/parser/test_data/parser/err/0005_attribute_recover.rast b/crates/parser/test_data/parser/err/0005_attribute_recover.rast
index 6ff072e207..77b4d06321 100644
--- a/crates/parser/test_data/parser/err/0005_attribute_recover.rast
+++ b/crates/parser/test_data/parser/err/0005_attribute_recover.rast
@@ -58,5 +58,5 @@ SOURCE_FILE
R_CURLY "}"
WHITESPACE "\n"
error 53: expected R_PAREN
-error 53: expected `]`
+error 53: expected R_BRACK
error 53: expected an item
diff --git a/crates/parser/test_data/parser/err/0032_match_arms_inner_attrs.rast b/crates/parser/test_data/parser/err/0032_match_arms_inner_attrs.rast
index 327bf94a49..b657e98341 100644
--- a/crates/parser/test_data/parser/err/0032_match_arms_inner_attrs.rast
+++ b/crates/parser/test_data/parser/err/0032_match_arms_inner_attrs.rast
@@ -192,14 +192,14 @@ SOURCE_FILE
WHITESPACE "\n"
R_CURLY "}"
WHITESPACE "\n"
-error 52: expected `[`
+error 52: expected L_BRACK
error 52: expected pattern
error 53: expected FAT_ARROW
error 78: expected `,`
-error 161: expected `[`
+error 161: expected L_BRACK
error 161: expected pattern
error 162: expected FAT_ARROW
-error 232: expected `[`
+error 232: expected L_BRACK
error 232: expected pattern
error 233: expected FAT_ARROW
error 250: expected `,`
diff --git a/crates/proc-macro-api/src/bidirectional_protocol.rs b/crates/proc-macro-api/src/bidirectional_protocol.rs
index e44723a6a3..ba59cb219b 100644
--- a/crates/proc-macro-api/src/bidirectional_protocol.rs
+++ b/crates/proc-macro-api/src/bidirectional_protocol.rs
@@ -2,6 +2,7 @@
use std::{
io::{self, BufRead, Write},
+ panic::{AssertUnwindSafe, catch_unwind},
sync::Arc,
};
@@ -9,7 +10,7 @@ use paths::AbsPath;
use span::Span;
use crate::{
- Codec, ProcMacro, ProcMacroKind, ServerError,
+ ProcMacro, ProcMacroKind, ServerError,
bidirectional_protocol::msg::{
BidirectionalMessage, ExpandMacro, ExpandMacroData, ExpnGlobals, Request, Response,
SubRequest, SubResponse,
@@ -22,26 +23,25 @@ use crate::{
},
},
process::ProcMacroServerProcess,
- transport::codec::postcard::PostcardProtocol,
- version,
+ transport::postcard,
};
pub mod msg;
-pub type SubCallback<'a> = &'a mut dyn FnMut(SubRequest) -> Result<SubResponse, ServerError>;
+pub type SubCallback<'a> = &'a dyn Fn(SubRequest) -> Result<SubResponse, ServerError>;
-pub fn run_conversation<C: Codec>(
+pub fn run_conversation(
writer: &mut dyn Write,
reader: &mut dyn BufRead,
- buf: &mut C::Buf,
+ buf: &mut Vec<u8>,
msg: BidirectionalMessage,
callback: SubCallback<'_>,
) -> Result<BidirectionalMessage, ServerError> {
- let encoded = C::encode(&msg).map_err(wrap_encode)?;
- C::write(writer, &encoded).map_err(wrap_io("failed to write initial request"))?;
+ let encoded = postcard::encode(&msg).map_err(wrap_encode)?;
+ postcard::write(writer, &encoded).map_err(wrap_io("failed to write initial request"))?;
loop {
- let maybe_buf = C::read(reader, buf).map_err(wrap_io("failed to read message"))?;
+ let maybe_buf = postcard::read(reader, buf).map_err(wrap_io("failed to read message"))?;
let Some(b) = maybe_buf else {
return Err(ServerError {
message: "proc-macro server closed the stream".into(),
@@ -49,17 +49,28 @@ pub fn run_conversation<C: Codec>(
});
};
- let msg: BidirectionalMessage = C::decode(b).map_err(wrap_decode)?;
+ let msg: BidirectionalMessage = postcard::decode(b).map_err(wrap_decode)?;
match msg {
BidirectionalMessage::Response(response) => {
return Ok(BidirectionalMessage::Response(response));
}
BidirectionalMessage::SubRequest(sr) => {
- let resp = callback(sr)?;
- let reply = BidirectionalMessage::SubResponse(resp);
- let encoded = C::encode(&reply).map_err(wrap_encode)?;
- C::write(writer, &encoded).map_err(wrap_io("failed to write sub-response"))?;
+ // TODO: Avoid `AssertUnwindSafe` by making the callback `UnwindSafe` once `ExpandDatabase`
+ // becomes unwind-safe (currently blocked by `parking_lot::RwLock` in the VFS).
+ let resp = match catch_unwind(AssertUnwindSafe(|| callback(sr))) {
+ Ok(Ok(resp)) => BidirectionalMessage::SubResponse(resp),
+ Ok(Err(err)) => BidirectionalMessage::SubResponse(SubResponse::Cancel {
+ reason: err.to_string(),
+ }),
+ Err(_) => BidirectionalMessage::SubResponse(SubResponse::Cancel {
+ reason: "callback panicked or was cancelled".into(),
+ }),
+ };
+
+ let encoded = postcard::encode(&resp).map_err(wrap_encode)?;
+ postcard::write(writer, &encoded)
+ .map_err(wrap_io("failed to write sub-response"))?;
}
_ => {
return Err(ServerError {
@@ -138,6 +149,7 @@ pub(crate) fn find_proc_macros(
pub(crate) fn expand(
proc_macro: &ProcMacro,
+ process: &ProcMacroServerProcess,
subtree: tt::SubtreeView<'_>,
attr: Option<tt::SubtreeView<'_>>,
env: Vec<(String, String)>,
@@ -147,7 +159,7 @@ pub(crate) fn expand(
current_dir: String,
callback: SubCallback<'_>,
) -> Result<Result<tt::TopSubtree, String>, crate::ServerError> {
- let version = proc_macro.process.version();
+ let version = process.version();
let mut span_data_table = SpanDataIndexMap::default();
let def_site = span_data_table.insert_full(def_site).0;
let call_site = span_data_table.insert_full(call_site).0;
@@ -158,13 +170,8 @@ pub(crate) fn expand(
macro_name: proc_macro.name.to_string(),
attributes: attr
.map(|subtree| FlatTree::from_subtree(subtree, version, &mut span_data_table)),
- has_global_spans: ExpnGlobals {
- serialize: version >= version::HAS_GLOBAL_SPANS,
- def_site,
- call_site,
- mixed_site,
- },
- span_data_table: if proc_macro.process.rust_analyzer_spans() {
+ has_global_spans: ExpnGlobals { def_site, call_site, mixed_site },
+ span_data_table: if process.rust_analyzer_spans() {
serialize_span_data_index_map(&span_data_table)
} else {
Vec::new()
@@ -175,7 +182,7 @@ pub(crate) fn expand(
current_dir: Some(current_dir),
})));
- let response_payload = run_request(&proc_macro.process, task, callback)?;
+ let response_payload = run_request(process, task, callback)?;
match response_payload {
BidirectionalMessage::Response(Response::ExpandMacro(it)) => Ok(it
@@ -212,14 +219,7 @@ fn run_request(
if let Some(err) = srv.exited() {
return Err(err.clone());
}
-
- match srv.use_postcard() {
- true => srv.run_bidirectional::<PostcardProtocol>(msg, callback),
- false => Err(ServerError {
- message: "bidirectional messaging does not support JSON".to_owned(),
- io: None,
- }),
- }
+ srv.run_bidirectional(msg, callback)
}
pub fn reject_subrequests(req: SubRequest) -> Result<SubResponse, ServerError> {
diff --git a/crates/proc-macro-api/src/bidirectional_protocol/msg.rs b/crates/proc-macro-api/src/bidirectional_protocol/msg.rs
index e41f8a5d7d..3f0422dc5b 100644
--- a/crates/proc-macro-api/src/bidirectional_protocol/msg.rs
+++ b/crates/proc-macro-api/src/bidirectional_protocol/msg.rs
@@ -1,11 +1,17 @@
//! Bidirectional protocol messages
+use std::{
+ io::{self, BufRead, Write},
+ ops::Range,
+};
+
use paths::Utf8PathBuf;
use serde::{Deserialize, Serialize};
use crate::{
ProcMacroKind,
legacy_protocol::msg::{FlatTree, Message, PanicMessage, ServerConfig},
+ transport::postcard,
};
#[derive(Debug, Serialize, Deserialize)]
@@ -13,13 +19,32 @@ pub enum SubRequest {
FilePath { file_id: u32 },
SourceText { file_id: u32, ast_id: u32, start: u32, end: u32 },
LocalFilePath { file_id: u32 },
+ LineColumn { file_id: u32, ast_id: u32, offset: u32 },
+ ByteRange { file_id: u32, ast_id: u32, start: u32, end: u32 },
}
#[derive(Debug, Serialize, Deserialize)]
pub enum SubResponse {
- FilePathResult { name: String },
- SourceTextResult { text: Option<String> },
- LocalFilePathResult { name: Option<String> },
+ FilePathResult {
+ name: String,
+ },
+ SourceTextResult {
+ text: Option<String>,
+ },
+ LocalFilePathResult {
+ name: Option<String>,
+ },
+ /// Line and column are 1-based.
+ LineColumnResult {
+ line: u32,
+ column: u32,
+ },
+ ByteRangeResult {
+ range: Range<usize>,
+ },
+ Cancel {
+ reason: String,
+ },
}
#[derive(Debug, Serialize, Deserialize)]
@@ -52,7 +77,6 @@ pub struct ExpandMacro {
pub lib: Utf8PathBuf,
pub env: Vec<(String, String)>,
pub current_dir: Option<String>,
- #[serde(flatten)]
pub data: ExpandMacroData,
}
@@ -67,29 +91,30 @@ pub struct ExpandMacroData {
pub macro_body: FlatTree,
pub macro_name: String,
pub attributes: Option<FlatTree>,
- #[serde(skip_serializing_if = "ExpnGlobals::skip_serializing_if")]
#[serde(default)]
pub has_global_spans: ExpnGlobals,
-
- #[serde(skip_serializing_if = "Vec::is_empty")]
#[serde(default)]
pub span_data_table: Vec<u32>,
}
#[derive(Clone, Copy, Default, Debug, Serialize, Deserialize)]
pub struct ExpnGlobals {
- #[serde(skip_serializing)]
- #[serde(default)]
- pub serialize: bool,
pub def_site: usize,
pub call_site: usize,
pub mixed_site: usize,
}
-impl ExpnGlobals {
- fn skip_serializing_if(&self) -> bool {
- !self.serialize
+impl Message for BidirectionalMessage {
+ type Buf = Vec<u8>;
+
+ fn read(inp: &mut dyn BufRead, buf: &mut Self::Buf) -> io::Result<Option<Self>> {
+ Ok(match postcard::read(inp, buf)? {
+ None => None,
+ Some(buf) => Some(postcard::decode(buf)?),
+ })
+ }
+ fn write(self, out: &mut dyn Write) -> io::Result<()> {
+ let value = postcard::encode(&self)?;
+ postcard::write(out, &value)
}
}
-
-impl Message for BidirectionalMessage {}
diff --git a/crates/proc-macro-api/src/legacy_protocol.rs b/crates/proc-macro-api/src/legacy_protocol.rs
index 22a7d9868e..ee1795d39c 100644
--- a/crates/proc-macro-api/src/legacy_protocol.rs
+++ b/crates/proc-macro-api/src/legacy_protocol.rs
@@ -18,8 +18,6 @@ use crate::{
flat::serialize_span_data_index_map,
},
process::ProcMacroServerProcess,
- transport::codec::Codec,
- transport::codec::{json::JsonProtocol, postcard::PostcardProtocol},
version,
};
@@ -77,6 +75,7 @@ pub(crate) fn find_proc_macros(
pub(crate) fn expand(
proc_macro: &ProcMacro,
+ process: &ProcMacroServerProcess,
subtree: tt::SubtreeView<'_>,
attr: Option<tt::SubtreeView<'_>>,
env: Vec<(String, String)>,
@@ -85,7 +84,7 @@ pub(crate) fn expand(
mixed_site: Span,
current_dir: String,
) -> Result<Result<tt::TopSubtree, String>, crate::ServerError> {
- let version = proc_macro.process.version();
+ let version = process.version();
let mut span_data_table = SpanDataIndexMap::default();
let def_site = span_data_table.insert_full(def_site).0;
let call_site = span_data_table.insert_full(call_site).0;
@@ -102,7 +101,7 @@ pub(crate) fn expand(
call_site,
mixed_site,
},
- span_data_table: if proc_macro.process.rust_analyzer_spans() {
+ span_data_table: if process.rust_analyzer_spans() {
serialize_span_data_index_map(&span_data_table)
} else {
Vec::new()
@@ -113,7 +112,7 @@ pub(crate) fn expand(
current_dir: Some(current_dir),
};
- let response = send_task(&proc_macro.process, Request::ExpandMacro(Box::new(task)))?;
+ let response = send_task(process, Request::ExpandMacro(Box::new(task)))?;
match response {
Response::ExpandMacro(it) => Ok(it
@@ -148,25 +147,21 @@ fn send_task(srv: &ProcMacroServerProcess, req: Request) -> Result<Response, Ser
return Err(server_error.clone());
}
- if srv.use_postcard() {
- srv.send_task::<_, _, PostcardProtocol>(send_request::<PostcardProtocol>, req)
- } else {
- srv.send_task::<_, _, JsonProtocol>(send_request::<JsonProtocol>, req)
- }
+ srv.send_task_legacy::<_, _>(send_request, req)
}
/// Sends a request to the server and reads the response.
-fn send_request<P: Codec>(
+fn send_request(
mut writer: &mut dyn Write,
mut reader: &mut dyn BufRead,
req: Request,
- buf: &mut P::Buf,
+ buf: &mut String,
) -> Result<Option<Response>, ServerError> {
- req.write::<_, P>(&mut writer).map_err(|err| ServerError {
+ req.write(&mut writer).map_err(|err| ServerError {
message: "failed to write request".into(),
io: Some(Arc::new(err)),
})?;
- let res = Response::read::<_, P>(&mut reader, buf).map_err(|err| ServerError {
+ let res = Response::read(&mut reader, buf).map_err(|err| ServerError {
message: "failed to read response".into(),
io: Some(Arc::new(err)),
})?;
diff --git a/crates/proc-macro-api/src/legacy_protocol/msg.rs b/crates/proc-macro-api/src/legacy_protocol/msg.rs
index 4146b619ec..bb0dde4728 100644
--- a/crates/proc-macro-api/src/legacy_protocol/msg.rs
+++ b/crates/proc-macro-api/src/legacy_protocol/msg.rs
@@ -8,7 +8,7 @@ use paths::Utf8PathBuf;
use serde::de::DeserializeOwned;
use serde_derive::{Deserialize, Serialize};
-use crate::{Codec, ProcMacroKind};
+use crate::{ProcMacroKind, transport::json};
/// Represents requests sent from the client to the proc-macro-srv.
#[derive(Debug, Serialize, Deserialize)]
@@ -155,20 +155,40 @@ impl ExpnGlobals {
}
pub trait Message: serde::Serialize + DeserializeOwned {
- fn read<R: BufRead, C: Codec>(inp: &mut R, buf: &mut C::Buf) -> io::Result<Option<Self>> {
- Ok(match C::read(inp, buf)? {
+ type Buf;
+ fn read(inp: &mut dyn BufRead, buf: &mut Self::Buf) -> io::Result<Option<Self>>;
+ fn write(self, out: &mut dyn Write) -> io::Result<()>;
+}
+
+impl Message for Request {
+ type Buf = String;
+
+ fn read(inp: &mut dyn BufRead, buf: &mut Self::Buf) -> io::Result<Option<Self>> {
+ Ok(match json::read(inp, buf)? {
None => None,
- Some(buf) => Some(C::decode(buf)?),
+ Some(buf) => Some(json::decode(buf)?),
})
}
- fn write<W: Write, C: Codec>(self, out: &mut W) -> io::Result<()> {
- let value = C::encode(&self)?;
- C::write(out, &value)
+ fn write(self, out: &mut dyn Write) -> io::Result<()> {
+ let value = json::encode(&self)?;
+ json::write(out, &value)
}
}
-impl Message for Request {}
-impl Message for Response {}
+impl Message for Response {
+ type Buf = String;
+
+ fn read(inp: &mut dyn BufRead, buf: &mut Self::Buf) -> io::Result<Option<Self>> {
+ Ok(match json::read(inp, buf)? {
+ None => None,
+ Some(buf) => Some(json::decode(buf)?),
+ })
+ }
+ fn write(self, out: &mut dyn Write) -> io::Result<()> {
+ let value = json::encode(&self)?;
+ json::write(out, &value)
+ }
+}
#[cfg(test)]
mod tests {
diff --git a/crates/proc-macro-api/src/lib.rs b/crates/proc-macro-api/src/lib.rs
index f5fcc99f14..e4b121b033 100644
--- a/crates/proc-macro-api/src/lib.rs
+++ b/crates/proc-macro-api/src/lib.rs
@@ -18,7 +18,8 @@ extern crate rustc_driver as _;
pub mod bidirectional_protocol;
pub mod legacy_protocol;
-mod process;
+pub mod pool;
+pub mod process;
pub mod transport;
use paths::{AbsPath, AbsPathBuf};
@@ -26,8 +27,9 @@ use semver::Version;
use span::{ErasedFileAstId, FIXUP_ERASED_FILE_AST_ID_MARKER, Span};
use std::{fmt, io, sync::Arc, time::SystemTime};
-pub use crate::transport::codec::Codec;
-use crate::{bidirectional_protocol::SubCallback, process::ProcMacroServerProcess};
+use crate::{
+ bidirectional_protocol::SubCallback, pool::ProcMacroServerPool, process::ProcMacroServerProcess,
+};
/// The versions of the server protocol
pub mod version {
@@ -44,6 +46,26 @@ pub mod version {
pub const CURRENT_API_VERSION: u32 = HASHED_AST_ID;
}
+/// Protocol format for communication between client and server.
+#[derive(Copy, Clone, Debug, PartialEq, Eq)]
+pub enum ProtocolFormat {
+ /// JSON-based legacy protocol (newline-delimited JSON).
+ JsonLegacy,
+ /// Bidirectional postcard protocol with sub-request support.
+ BidirectionalPostcardPrototype,
+}
+
+impl fmt::Display for ProtocolFormat {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ match self {
+ ProtocolFormat::JsonLegacy => write!(f, "json-legacy"),
+ ProtocolFormat::BidirectionalPostcardPrototype => {
+ write!(f, "bidirectional-postcard-prototype")
+ }
+ }
+ }
+}
+
/// Represents different kinds of procedural macros that can be expanded by the external server.
#[derive(Copy, Clone, Eq, PartialEq, Debug, serde_derive::Serialize, serde_derive::Deserialize)]
pub enum ProcMacroKind {
@@ -65,7 +87,7 @@ pub struct ProcMacroClient {
///
/// That means that concurrent salsa requests may block each other when expanding proc macros,
/// which is unfortunate, but simple and good enough for the time being.
- process: Arc<ProcMacroServerProcess>,
+ pool: Arc<ProcMacroServerPool>,
path: AbsPathBuf,
}
@@ -87,7 +109,7 @@ impl MacroDylib {
/// we share a single expander process for all macros within a workspace.
#[derive(Debug, Clone)]
pub struct ProcMacro {
- process: Arc<ProcMacroServerProcess>,
+ pool: ProcMacroServerPool,
dylib_path: Arc<AbsPathBuf>,
name: Box<str>,
kind: ProcMacroKind,
@@ -101,7 +123,6 @@ impl PartialEq for ProcMacro {
&& self.kind == other.kind
&& self.dylib_path == other.dylib_path
&& self.dylib_last_modified == other.dylib_last_modified
- && Arc::ptr_eq(&self.process, &other.process)
}
}
@@ -131,9 +152,44 @@ impl ProcMacroClient {
Item = (impl AsRef<std::ffi::OsStr>, &'a Option<impl 'a + AsRef<std::ffi::OsStr>>),
> + Clone,
version: Option<&Version>,
+ num_process: usize,
) -> io::Result<ProcMacroClient> {
- let process = ProcMacroServerProcess::run(process_path, env, version)?;
- Ok(ProcMacroClient { process: Arc::new(process), path: process_path.to_owned() })
+ let pool_size = num_process;
+ let mut workers = Vec::with_capacity(pool_size);
+ for _ in 0..pool_size {
+ let worker = ProcMacroServerProcess::spawn(process_path, env.clone(), version)?;
+ workers.push(worker);
+ }
+
+ let pool = ProcMacroServerPool::new(workers);
+ Ok(ProcMacroClient { pool: Arc::new(pool), path: process_path.to_owned() })
+ }
+
+ /// Invokes `spawn` and returns a client connected to the resulting read and write handles.
+ ///
+ /// The `process_path` is used for `Self::server_path`. This function is mainly used for testing.
+ pub fn with_io_channels(
+ process_path: &AbsPath,
+ spawn: impl Fn(
+ Option<ProtocolFormat>,
+ ) -> io::Result<(
+ Box<dyn process::ProcessExit>,
+ Box<dyn io::Write + Send + Sync>,
+ Box<dyn io::BufRead + Send + Sync>,
+ )> + Clone,
+ version: Option<&Version>,
+ num_process: usize,
+ ) -> io::Result<ProcMacroClient> {
+ let pool_size = num_process;
+ let mut workers = Vec::with_capacity(pool_size);
+ for _ in 0..pool_size {
+ let worker =
+ ProcMacroServerProcess::run(spawn.clone(), version, || "<unknown>".to_owned())?;
+ workers.push(worker);
+ }
+
+ let pool = ProcMacroServerPool::new(workers);
+ Ok(ProcMacroClient { pool: Arc::new(pool), path: process_path.to_owned() })
}
/// Returns the absolute path to the proc-macro server.
@@ -147,31 +203,12 @@ impl ProcMacroClient {
dylib: MacroDylib,
callback: Option<SubCallback<'_>>,
) -> Result<Vec<ProcMacro>, ServerError> {
- let _p = tracing::info_span!("ProcMacroServer::load_dylib").entered();
- let macros = self.process.find_proc_macros(&dylib.path, callback)?;
-
- let dylib_path = Arc::new(dylib.path);
- let dylib_last_modified = std::fs::metadata(dylib_path.as_path())
- .ok()
- .and_then(|metadata| metadata.modified().ok());
- match macros {
- Ok(macros) => Ok(macros
- .into_iter()
- .map(|(name, kind)| ProcMacro {
- process: self.process.clone(),
- name: name.into(),
- kind,
- dylib_path: dylib_path.clone(),
- dylib_last_modified,
- })
- .collect()),
- Err(message) => Err(ServerError { message, io: None }),
- }
+ self.pool.load_dylib(&dylib, callback)
}
/// Checks if the proc-macro server has exited.
pub fn exited(&self) -> Option<&ServerError> {
- self.process.exited()
+ self.pool.exited()
}
}
@@ -187,7 +224,7 @@ impl ProcMacro {
}
fn needs_fixup_change(&self) -> bool {
- let version = self.process.version();
+ let version = self.pool.version();
(version::RUST_ANALYZER_SPAN_SUPPORT..version::HASHED_AST_ID).contains(&version)
}
@@ -231,7 +268,7 @@ impl ProcMacro {
}
}
- self.process.expand(
+ self.pool.pick_process()?.expand(
self,
subtree,
attr,
diff --git a/crates/proc-macro-api/src/pool.rs b/crates/proc-macro-api/src/pool.rs
new file mode 100644
index 0000000000..a637bc0e48
--- /dev/null
+++ b/crates/proc-macro-api/src/pool.rs
@@ -0,0 +1,91 @@
+//! A pool of proc-macro server processes
+use std::sync::Arc;
+
+use crate::{
+ MacroDylib, ProcMacro, ServerError, bidirectional_protocol::SubCallback,
+ process::ProcMacroServerProcess,
+};
+
+#[derive(Debug, Clone)]
+pub(crate) struct ProcMacroServerPool {
+ workers: Arc<[ProcMacroServerProcess]>,
+ version: u32,
+}
+
+impl ProcMacroServerPool {
+ pub(crate) fn new(workers: Vec<ProcMacroServerProcess>) -> Self {
+ let version = workers[0].version();
+ Self { workers: workers.into(), version }
+ }
+}
+
+impl ProcMacroServerPool {
+ pub(crate) fn exited(&self) -> Option<&ServerError> {
+ for worker in &*self.workers {
+ worker.exited()?;
+ }
+ self.workers[0].exited()
+ }
+
+ pub(crate) fn pick_process(&self) -> Result<&ProcMacroServerProcess, ServerError> {
+ let mut best: Option<&ProcMacroServerProcess> = None;
+ let mut best_load = u32::MAX;
+
+ for w in self.workers.iter().filter(|w| w.exited().is_none()) {
+ let load = w.number_of_active_req();
+
+ if load == 0 {
+ return Ok(w);
+ }
+
+ if load < best_load {
+ best = Some(w);
+ best_load = load;
+ }
+ }
+
+ best.ok_or_else(|| ServerError {
+ message: "all proc-macro server workers have exited".into(),
+ io: None,
+ })
+ }
+
+ pub(crate) fn load_dylib(
+ &self,
+ dylib: &MacroDylib,
+ callback: Option<SubCallback<'_>>,
+ ) -> Result<Vec<ProcMacro>, ServerError> {
+ let _span = tracing::info_span!("ProcMacroServer::load_dylib").entered();
+
+ let dylib_path = Arc::new(dylib.path.clone());
+ let dylib_last_modified =
+ std::fs::metadata(dylib_path.as_path()).ok().and_then(|m| m.modified().ok());
+
+ let (first, rest) = self.workers.split_first().expect("worker pool must not be empty");
+
+ let macros = first
+ .find_proc_macros(&dylib.path, callback)?
+ .map_err(|e| ServerError { message: e, io: None })?;
+
+ for worker in rest {
+ worker
+ .find_proc_macros(&dylib.path, callback)?
+ .map_err(|e| ServerError { message: e, io: None })?;
+ }
+
+ Ok(macros
+ .into_iter()
+ .map(|(name, kind)| ProcMacro {
+ pool: self.clone(),
+ name: name.into(),
+ kind,
+ dylib_path: dylib_path.clone(),
+ dylib_last_modified,
+ })
+ .collect())
+ }
+
+ pub(crate) fn version(&self) -> u32 {
+ self.version
+ }
+}
diff --git a/crates/proc-macro-api/src/process.rs b/crates/proc-macro-api/src/process.rs
index f6a656e3ce..9f80880965 100644
--- a/crates/proc-macro-api/src/process.rs
+++ b/crates/proc-macro-api/src/process.rs
@@ -1,10 +1,14 @@
//! Handle process life-time and message passing for proc-macro client
use std::{
+ fmt::Debug,
io::{self, BufRead, BufReader, Read, Write},
panic::AssertUnwindSafe,
process::{Child, ChildStdin, ChildStdout, Command, Stdio},
- sync::{Arc, Mutex, OnceLock},
+ sync::{
+ Arc, Mutex, OnceLock,
+ atomic::{AtomicU32, Ordering},
+ },
};
use paths::AbsPath;
@@ -13,14 +17,13 @@ use span::Span;
use stdx::JodChild;
use crate::{
- Codec, ProcMacro, ProcMacroKind, ServerError,
+ ProcMacro, ProcMacroKind, ProtocolFormat, ServerError,
bidirectional_protocol::{self, SubCallback, msg::BidirectionalMessage, reject_subrequests},
legacy_protocol::{self, SpanMode},
version,
};
/// Represents a process handling proc-macro communication.
-#[derive(Debug)]
pub(crate) struct ProcMacroServerProcess {
/// The state of the proc-macro server process, the protocol is currently strictly sequential
/// hence the lock on the state.
@@ -29,32 +32,103 @@ pub(crate) struct ProcMacroServerProcess {
protocol: Protocol,
/// Populated when the server exits.
exited: OnceLock<AssertUnwindSafe<ServerError>>,
+ active: AtomicU32,
+}
+
+impl std::fmt::Debug for ProcMacroServerProcess {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ f.debug_struct("ProcMacroServerProcess")
+ .field("version", &self.version)
+ .field("protocol", &self.protocol)
+ .field("exited", &self.exited)
+ .finish()
+ }
}
#[derive(Debug, Clone)]
pub(crate) enum Protocol {
LegacyJson { mode: SpanMode },
- LegacyPostcard { mode: SpanMode },
BidirectionalPostcardPrototype { mode: SpanMode },
}
+pub trait ProcessExit: Send + Sync {
+ fn exit_err(&mut self) -> Option<ServerError>;
+}
+
+impl ProcessExit for Process {
+ fn exit_err(&mut self) -> Option<ServerError> {
+ match self.child.try_wait() {
+ Ok(None) | Err(_) => None,
+ Ok(Some(status)) => {
+ let mut msg = String::new();
+ if !status.success()
+ && let Some(stderr) = self.child.stderr.as_mut()
+ {
+ _ = stderr.read_to_string(&mut msg);
+ }
+ Some(ServerError {
+ message: format!(
+ "proc-macro server exited with {status}{}{msg}",
+ if msg.is_empty() { "" } else { ": " }
+ ),
+ io: None,
+ })
+ }
+ }
+ }
+}
+
/// Maintains the state of the proc-macro server process.
-#[derive(Debug)]
-struct ProcessSrvState {
- process: Process,
- stdin: ChildStdin,
- stdout: BufReader<ChildStdout>,
+pub(crate) struct ProcessSrvState {
+ process: Box<dyn ProcessExit>,
+ stdin: Box<dyn Write + Send + Sync>,
+ stdout: Box<dyn BufRead + Send + Sync>,
}
impl ProcMacroServerProcess {
/// Starts the proc-macro server and performs a version check
- pub(crate) fn run<'a>(
+ pub(crate) fn spawn<'a>(
process_path: &AbsPath,
env: impl IntoIterator<
Item = (impl AsRef<std::ffi::OsStr>, &'a Option<impl 'a + AsRef<std::ffi::OsStr>>),
> + Clone,
version: Option<&Version>,
) -> io::Result<ProcMacroServerProcess> {
+ Self::run(
+ |format| {
+ let mut process = Process::run(
+ process_path,
+ env.clone(),
+ format.map(|format| format.to_string()).as_deref(),
+ )?;
+ let (stdin, stdout) = process.stdio().expect("couldn't access child stdio");
+
+ Ok((Box::new(process), Box::new(stdin), Box::new(stdout)))
+ },
+ version,
+ || {
+ #[expect(clippy::disallowed_methods)]
+ Command::new(process_path)
+ .arg("--version")
+ .output()
+ .map(|output| String::from_utf8_lossy(&output.stdout).trim().to_owned())
+ .unwrap_or_else(|_| "unknown version".to_owned())
+ },
+ )
+ }
+
+ /// Invokes `spawn` and performs a version check.
+ pub(crate) fn run(
+ spawn: impl Fn(
+ Option<ProtocolFormat>,
+ ) -> io::Result<(
+ Box<dyn ProcessExit>,
+ Box<dyn Write + Send + Sync>,
+ Box<dyn BufRead + Send + Sync>,
+ )>,
+ version: Option<&Version>,
+ binary_server_version: impl Fn() -> String,
+ ) -> io::Result<ProcMacroServerProcess> {
const VERSION: Version = Version::new(1, 93, 0);
// we do `>` for nightly as this started working in the middle of the 1.93 nightly release, so we dont want to break on half of the nightlies
let has_working_format_flag = version.map_or(false, |v| {
@@ -65,40 +139,38 @@ impl ProcMacroServerProcess {
&& has_working_format_flag
{
&[
- (
- Some("bidirectional-postcard-prototype"),
- Protocol::BidirectionalPostcardPrototype { mode: SpanMode::Id },
- ),
- (Some("postcard-legacy"), Protocol::LegacyPostcard { mode: SpanMode::Id }),
- (Some("json-legacy"), Protocol::LegacyJson { mode: SpanMode::Id }),
+ Some(ProtocolFormat::BidirectionalPostcardPrototype),
+ Some(ProtocolFormat::JsonLegacy),
]
} else {
- &[(None, Protocol::LegacyJson { mode: SpanMode::Id })]
+ &[None]
};
let mut err = None;
- for &(format, ref protocol) in formats {
+ for &format in formats {
let create_srv = || {
- let mut process = Process::run(process_path, env.clone(), format)?;
- let (stdin, stdout) = process.stdio().expect("couldn't access child stdio");
+ let (process, stdin, stdout) = spawn(format)?;
io::Result::Ok(ProcMacroServerProcess {
state: Mutex::new(ProcessSrvState { process, stdin, stdout }),
version: 0,
- protocol: protocol.clone(),
+ protocol: match format {
+ Some(ProtocolFormat::BidirectionalPostcardPrototype) => {
+ Protocol::BidirectionalPostcardPrototype { mode: SpanMode::Id }
+ }
+ Some(ProtocolFormat::JsonLegacy) | None => {
+ Protocol::LegacyJson { mode: SpanMode::Id }
+ }
+ },
exited: OnceLock::new(),
+ active: AtomicU32::new(0),
})
};
let mut srv = create_srv()?;
tracing::info!("sending proc-macro server version check");
- match srv.version_check(Some(&mut reject_subrequests)) {
+ match srv.version_check(Some(&reject_subrequests)) {
Ok(v) if v > version::CURRENT_API_VERSION => {
- #[allow(clippy::disallowed_methods)]
- let process_version = Command::new(process_path)
- .arg("--version")
- .output()
- .map(|output| String::from_utf8_lossy(&output.stdout).trim().to_owned())
- .unwrap_or_else(|_| "unknown version".to_owned());
+ let process_version = binary_server_version();
err = Some(io::Error::other(format!(
"Your installed proc-macro server is too new for your rust-analyzer. API version: {}, server version: {process_version}. \
This will prevent proc-macro expansion from working. Please consider updating your rust-analyzer to ensure compatibility with your current toolchain.",
@@ -110,11 +182,10 @@ impl ProcMacroServerProcess {
srv.version = v;
if srv.version >= version::RUST_ANALYZER_SPAN_SUPPORT
&& let Ok(new_mode) =
- srv.enable_rust_analyzer_spans(Some(&mut reject_subrequests))
+ srv.enable_rust_analyzer_spans(Some(&reject_subrequests))
{
match &mut srv.protocol {
Protocol::LegacyJson { mode }
- | Protocol::LegacyPostcard { mode }
| Protocol::BidirectionalPostcardPrototype { mode } => *mode = new_mode,
}
}
@@ -132,15 +203,27 @@ impl ProcMacroServerProcess {
Err(err.unwrap())
}
+ /// Finds proc-macros in a given dynamic library.
+ pub(crate) fn find_proc_macros(
+ &self,
+ dylib_path: &AbsPath,
+ callback: Option<SubCallback<'_>>,
+ ) -> Result<Result<Vec<(String, ProcMacroKind)>, String>, ServerError> {
+ match self.protocol {
+ Protocol::LegacyJson { .. } => legacy_protocol::find_proc_macros(self, dylib_path),
+
+ Protocol::BidirectionalPostcardPrototype { .. } => {
+ let cb = callback.expect("callback required for bidirectional protocol");
+ bidirectional_protocol::find_proc_macros(self, dylib_path, cb)
+ }
+ }
+ }
+
/// Returns the server error if the process has exited.
pub(crate) fn exited(&self) -> Option<&ServerError> {
self.exited.get().map(|it| &it.0)
}
- pub(crate) fn use_postcard(&self) -> bool {
- matches!(self.protocol, Protocol::LegacyPostcard { .. })
- }
-
/// Retrieves the API version of the proc-macro server.
pub(crate) fn version(&self) -> u32 {
self.version
@@ -150,7 +233,6 @@ impl ProcMacroServerProcess {
pub(crate) fn rust_analyzer_spans(&self) -> bool {
match self.protocol {
Protocol::LegacyJson { mode } => mode == SpanMode::RustAnalyzer,
- Protocol::LegacyPostcard { mode } => mode == SpanMode::RustAnalyzer,
Protocol::BidirectionalPostcardPrototype { mode } => mode == SpanMode::RustAnalyzer,
}
}
@@ -158,9 +240,7 @@ impl ProcMacroServerProcess {
/// Checks the API version of the running proc-macro server.
fn version_check(&self, callback: Option<SubCallback<'_>>) -> Result<u32, ServerError> {
match self.protocol {
- Protocol::LegacyJson { .. } | Protocol::LegacyPostcard { .. } => {
- legacy_protocol::version_check(self)
- }
+ Protocol::LegacyJson { .. } => legacy_protocol::version_check(self),
Protocol::BidirectionalPostcardPrototype { .. } => {
let cb = callback.expect("callback required for bidirectional protocol");
bidirectional_protocol::version_check(self, cb)
@@ -174,9 +254,7 @@ impl ProcMacroServerProcess {
callback: Option<SubCallback<'_>>,
) -> Result<SpanMode, ServerError> {
match self.protocol {
- Protocol::LegacyJson { .. } | Protocol::LegacyPostcard { .. } => {
- legacy_protocol::enable_rust_analyzer_spans(self)
- }
+ Protocol::LegacyJson { .. } => legacy_protocol::enable_rust_analyzer_spans(self),
Protocol::BidirectionalPostcardPrototype { .. } => {
let cb = callback.expect("callback required for bidirectional protocol");
bidirectional_protocol::enable_rust_analyzer_spans(self, cb)
@@ -184,23 +262,6 @@ impl ProcMacroServerProcess {
}
}
- /// Finds proc-macros in a given dynamic library.
- pub(crate) fn find_proc_macros(
- &self,
- dylib_path: &AbsPath,
- callback: Option<SubCallback<'_>>,
- ) -> Result<Result<Vec<(String, ProcMacroKind)>, String>, ServerError> {
- match self.protocol {
- Protocol::LegacyJson { .. } | Protocol::LegacyPostcard { .. } => {
- legacy_protocol::find_proc_macros(self, dylib_path)
- }
- Protocol::BidirectionalPostcardPrototype { .. } => {
- let cb = callback.expect("callback required for bidirectional protocol");
- bidirectional_protocol::find_proc_macros(self, dylib_path, cb)
- }
- }
- }
-
pub(crate) fn expand(
&self,
proc_macro: &ProcMacro,
@@ -213,21 +274,22 @@ impl ProcMacroServerProcess {
current_dir: String,
callback: Option<SubCallback<'_>>,
) -> Result<Result<tt::TopSubtree, String>, ServerError> {
- match self.protocol {
- Protocol::LegacyJson { .. } | Protocol::LegacyPostcard { .. } => {
- legacy_protocol::expand(
- proc_macro,
- subtree,
- attr,
- env,
- def_site,
- call_site,
- mixed_site,
- current_dir,
- )
- }
+ self.active.fetch_add(1, Ordering::AcqRel);
+ let result = match self.protocol {
+ Protocol::LegacyJson { .. } => legacy_protocol::expand(
+ proc_macro,
+ self,
+ subtree,
+ attr,
+ env,
+ def_site,
+ call_site,
+ mixed_site,
+ current_dir,
+ ),
Protocol::BidirectionalPostcardPrototype { .. } => bidirectional_protocol::expand(
proc_macro,
+ self,
subtree,
attr,
env,
@@ -237,20 +299,23 @@ impl ProcMacroServerProcess {
current_dir,
callback.expect("callback required for bidirectional protocol"),
),
- }
+ };
+
+ self.active.fetch_sub(1, Ordering::AcqRel);
+ result
}
- pub(crate) fn send_task<Request, Response, C: Codec>(
+ pub(crate) fn send_task_legacy<Request, Response>(
&self,
send: impl FnOnce(
&mut dyn Write,
&mut dyn BufRead,
Request,
- &mut C::Buf,
+ &mut String,
) -> Result<Option<Response>, ServerError>,
req: Request,
) -> Result<Response, ServerError> {
- self.with_locked_io::<C, _>(|writer, reader, buf| {
+ self.with_locked_io(String::new(), |writer, reader, buf| {
send(writer, reader, req, buf).and_then(|res| {
res.ok_or_else(|| {
let message = "proc-macro server did not respond with data".to_owned();
@@ -266,31 +331,17 @@ impl ProcMacroServerProcess {
})
}
- pub(crate) fn with_locked_io<C: Codec, R>(
+ fn with_locked_io<R, B>(
&self,
- f: impl FnOnce(&mut dyn Write, &mut dyn BufRead, &mut C::Buf) -> Result<R, ServerError>,
+ mut buf: B,
+ f: impl FnOnce(&mut dyn Write, &mut dyn BufRead, &mut B) -> Result<R, ServerError>,
) -> Result<R, ServerError> {
let state = &mut *self.state.lock().unwrap();
- let mut buf = C::Buf::default();
-
f(&mut state.stdin, &mut state.stdout, &mut buf).map_err(|e| {
if e.io.as_ref().map(|it| it.kind()) == Some(io::ErrorKind::BrokenPipe) {
- match state.process.child.try_wait() {
- Ok(None) | Err(_) => e,
- Ok(Some(status)) => {
- let mut msg = String::new();
- if !status.success()
- && let Some(stderr) = state.process.child.stderr.as_mut()
- {
- _ = stderr.read_to_string(&mut msg);
- }
- let server_error = ServerError {
- message: format!(
- "proc-macro server exited with {status}{}{msg}",
- if msg.is_empty() { "" } else { ": " }
- ),
- io: None,
- };
+ match state.process.exit_err() {
+ None => e,
+ Some(server_error) => {
self.exited.get_or_init(|| AssertUnwindSafe(server_error)).0.clone()
}
}
@@ -300,15 +351,19 @@ impl ProcMacroServerProcess {
})
}
- pub(crate) fn run_bidirectional<C: Codec>(
+ pub(crate) fn run_bidirectional(
&self,
initial: BidirectionalMessage,
callback: SubCallback<'_>,
) -> Result<BidirectionalMessage, ServerError> {
- self.with_locked_io::<C, _>(|writer, reader, buf| {
- bidirectional_protocol::run_conversation::<C>(writer, reader, buf, initial, callback)
+ self.with_locked_io(Vec::new(), |writer, reader, buf| {
+ bidirectional_protocol::run_conversation(writer, reader, buf, initial, callback)
})
}
+
+ pub(crate) fn number_of_active_req(&self) -> u32 {
+ self.active.load(Ordering::Acquire)
+ }
}
/// Manages the execution of the proc-macro server process.
diff --git a/crates/proc-macro-api/src/transport.rs b/crates/proc-macro-api/src/transport.rs
index b7a1d8f732..f383edb0cb 100644
--- a/crates/proc-macro-api/src/transport.rs
+++ b/crates/proc-macro-api/src/transport.rs
@@ -1,3 +1,3 @@
//! Contains construct for transport of messages.
-pub mod codec;
-pub mod framing;
+pub(crate) mod json;
+pub(crate) mod postcard;
diff --git a/crates/proc-macro-api/src/transport/codec.rs b/crates/proc-macro-api/src/transport/codec.rs
deleted file mode 100644
index c9afad260a..0000000000
--- a/crates/proc-macro-api/src/transport/codec.rs
+++ /dev/null
@@ -1,15 +0,0 @@
-//! Protocol codec
-
-use std::io;
-
-use serde::de::DeserializeOwned;
-
-use crate::transport::framing::Framing;
-
-pub mod json;
-pub mod postcard;
-
-pub trait Codec: Framing {
- fn encode<T: serde::Serialize>(msg: &T) -> io::Result<Self::Buf>;
- fn decode<T: DeserializeOwned>(buf: &mut Self::Buf) -> io::Result<T>;
-}
diff --git a/crates/proc-macro-api/src/transport/codec/json.rs b/crates/proc-macro-api/src/transport/codec/json.rs
deleted file mode 100644
index 96db802e0b..0000000000
--- a/crates/proc-macro-api/src/transport/codec/json.rs
+++ /dev/null
@@ -1,58 +0,0 @@
-//! Protocol functions for json.
-use std::io::{self, BufRead, Write};
-
-use serde::{Serialize, de::DeserializeOwned};
-
-use crate::{Codec, transport::framing::Framing};
-
-pub struct JsonProtocol;
-
-impl Framing for JsonProtocol {
- type Buf = String;
-
- fn read<'a, R: BufRead + ?Sized>(
- inp: &mut R,
- buf: &'a mut String,
- ) -> io::Result<Option<&'a mut String>> {
- loop {
- buf.clear();
-
- inp.read_line(buf)?;
- buf.pop(); // Remove trailing '\n'
-
- if buf.is_empty() {
- return Ok(None);
- }
-
- // Some ill behaved macro try to use stdout for debugging
- // We ignore it here
- if !buf.starts_with('{') {
- tracing::error!("proc-macro tried to print : {}", buf);
- continue;
- }
-
- return Ok(Some(buf));
- }
- }
-
- fn write<W: Write + ?Sized>(out: &mut W, buf: &String) -> io::Result<()> {
- tracing::debug!("> {}", buf);
- out.write_all(buf.as_bytes())?;
- out.write_all(b"\n")?;
- out.flush()
- }
-}
-
-impl Codec for JsonProtocol {
- fn encode<T: Serialize>(msg: &T) -> io::Result<String> {
- Ok(serde_json::to_string(msg)?)
- }
-
- fn decode<T: DeserializeOwned>(buf: &mut String) -> io::Result<T> {
- let mut deserializer = serde_json::Deserializer::from_str(buf);
- // Note that some proc-macro generate very deep syntax tree
- // We have to disable the current limit of serde here
- deserializer.disable_recursion_limit();
- Ok(T::deserialize(&mut deserializer)?)
- }
-}
diff --git a/crates/proc-macro-api/src/transport/codec/postcard.rs b/crates/proc-macro-api/src/transport/codec/postcard.rs
deleted file mode 100644
index 6f5319e75b..0000000000
--- a/crates/proc-macro-api/src/transport/codec/postcard.rs
+++ /dev/null
@@ -1,40 +0,0 @@
-//! Postcard encode and decode implementations.
-
-use std::io::{self, BufRead, Write};
-
-use serde::{Serialize, de::DeserializeOwned};
-
-use crate::{Codec, transport::framing::Framing};
-
-pub struct PostcardProtocol;
-
-impl Framing for PostcardProtocol {
- type Buf = Vec<u8>;
-
- fn read<'a, R: BufRead + ?Sized>(
- inp: &mut R,
- buf: &'a mut Vec<u8>,
- ) -> io::Result<Option<&'a mut Vec<u8>>> {
- buf.clear();
- let n = inp.read_until(0, buf)?;
- if n == 0 {
- return Ok(None);
- }
- Ok(Some(buf))
- }
-
- fn write<W: Write + ?Sized>(out: &mut W, buf: &Vec<u8>) -> io::Result<()> {
- out.write_all(buf)?;
- out.flush()
- }
-}
-
-impl Codec for PostcardProtocol {
- fn encode<T: Serialize>(msg: &T) -> io::Result<Vec<u8>> {
- postcard::to_allocvec_cobs(msg).map_err(|e| io::Error::new(io::ErrorKind::InvalidData, e))
- }
-
- fn decode<T: DeserializeOwned>(buf: &mut Self::Buf) -> io::Result<T> {
- postcard::from_bytes_cobs(buf).map_err(|e| io::Error::new(io::ErrorKind::InvalidData, e))
- }
-}
diff --git a/crates/proc-macro-api/src/transport/framing.rs b/crates/proc-macro-api/src/transport/framing.rs
deleted file mode 100644
index 56c3b68e8c..0000000000
--- a/crates/proc-macro-api/src/transport/framing.rs
+++ /dev/null
@@ -1,14 +0,0 @@
-//! Protocol framing
-
-use std::io::{self, BufRead, Write};
-
-pub trait Framing {
- type Buf: Default + Send + Sync;
-
- fn read<'a, R: BufRead + ?Sized>(
- inp: &mut R,
- buf: &'a mut Self::Buf,
- ) -> io::Result<Option<&'a mut Self::Buf>>;
-
- fn write<W: Write + ?Sized>(out: &mut W, buf: &Self::Buf) -> io::Result<()>;
-}
diff --git a/crates/proc-macro-api/src/transport/json.rs b/crates/proc-macro-api/src/transport/json.rs
new file mode 100644
index 0000000000..f433bb7de0
--- /dev/null
+++ b/crates/proc-macro-api/src/transport/json.rs
@@ -0,0 +1,48 @@
+//! Protocol functions for json.
+use std::io::{self, BufRead, Write};
+
+use serde::{Serialize, de::DeserializeOwned};
+
+pub(crate) fn read<'a, R: BufRead + ?Sized>(
+ inp: &mut R,
+ buf: &'a mut String,
+) -> io::Result<Option<&'a mut String>> {
+ loop {
+ buf.clear();
+
+ inp.read_line(buf)?;
+ buf.pop(); // Remove trailing '\n'
+
+ if buf.is_empty() {
+ return Ok(None);
+ }
+
+ // Some ill behaved macro try to use stdout for debugging
+ // We ignore it here
+ if !buf.starts_with('{') {
+ tracing::error!("proc-macro tried to print : {}", buf);
+ continue;
+ }
+
+ return Ok(Some(buf));
+ }
+}
+
+pub(crate) fn write<W: Write + ?Sized>(out: &mut W, buf: &String) -> io::Result<()> {
+ tracing::debug!("> {}", buf);
+ out.write_all(buf.as_bytes())?;
+ out.write_all(b"\n")?;
+ out.flush()
+}
+
+pub(crate) fn encode<T: Serialize>(msg: &T) -> io::Result<String> {
+ Ok(serde_json::to_string(msg)?)
+}
+
+pub(crate) fn decode<T: DeserializeOwned>(buf: &mut str) -> io::Result<T> {
+ let mut deserializer = serde_json::Deserializer::from_str(buf);
+ // Note that some proc-macro generate very deep syntax tree
+ // We have to disable the current limit of serde here
+ deserializer.disable_recursion_limit();
+ Ok(T::deserialize(&mut deserializer)?)
+}
diff --git a/crates/proc-macro-api/src/transport/postcard.rs b/crates/proc-macro-api/src/transport/postcard.rs
new file mode 100644
index 0000000000..75aa90e4c4
--- /dev/null
+++ b/crates/proc-macro-api/src/transport/postcard.rs
@@ -0,0 +1,30 @@
+//! Postcard encode and decode implementations.
+
+use std::io::{self, BufRead, Write};
+
+use serde::{Serialize, de::DeserializeOwned};
+
+pub(crate) fn read<'a, R: BufRead + ?Sized>(
+ inp: &mut R,
+ buf: &'a mut Vec<u8>,
+) -> io::Result<Option<&'a mut Vec<u8>>> {
+ buf.clear();
+ let n = inp.read_until(0, buf)?;
+ if n == 0 {
+ return Ok(None);
+ }
+ Ok(Some(buf))
+}
+
+pub(crate) fn write<W: Write + ?Sized>(out: &mut W, buf: &[u8]) -> io::Result<()> {
+ out.write_all(buf)?;
+ out.flush()
+}
+
+pub(crate) fn encode<T: Serialize>(msg: &T) -> io::Result<Vec<u8>> {
+ postcard::to_allocvec_cobs(msg).map_err(|e| io::Error::new(io::ErrorKind::InvalidData, e))
+}
+
+pub(crate) fn decode<T: DeserializeOwned>(buf: &mut [u8]) -> io::Result<T> {
+ postcard::from_bytes_cobs(buf).map_err(|e| io::Error::new(io::ErrorKind::InvalidData, e))
+}
diff --git a/crates/proc-macro-srv-cli/Cargo.toml b/crates/proc-macro-srv-cli/Cargo.toml
index 6b2db0b269..f586fe7644 100644
--- a/crates/proc-macro-srv-cli/Cargo.toml
+++ b/crates/proc-macro-srv-cli/Cargo.toml
@@ -10,12 +10,25 @@ license.workspace = true
rust-version.workspace = true
publish = false
+[lib]
+doctest = false
+
[dependencies]
proc-macro-srv.workspace = true
proc-macro-api.workspace = true
-postcard.workspace = true
clap = {version = "4.5.42", default-features = false, features = ["std"]}
+[dev-dependencies]
+expect-test.workspace = true
+paths.workspace = true
+# span = {workspace = true, default-features = false} does not work
+span = { path = "../span", default-features = false}
+tt.workspace = true
+intern.workspace = true
+
+# used as proc macro test target
+proc-macro-test.path = "../proc-macro-srv/proc-macro-test"
+
[features]
default = []
# default = ["sysroot-abi"]
diff --git a/crates/proc-macro-srv-cli/src/lib.rs b/crates/proc-macro-srv-cli/src/lib.rs
new file mode 100644
index 0000000000..8475c05ae8
--- /dev/null
+++ b/crates/proc-macro-srv-cli/src/lib.rs
@@ -0,0 +1,11 @@
+//! Library interface for `proc-macro-srv-cli`.
+//!
+//! This module exposes the server main loop and protocol format for integration testing.
+
+#![cfg_attr(feature = "in-rust-tree", feature(rustc_private))]
+
+#[cfg(feature = "in-rust-tree")]
+extern crate rustc_driver as _;
+
+#[cfg(feature = "sysroot-abi")]
+pub mod main_loop;
diff --git a/crates/proc-macro-srv-cli/src/main.rs b/crates/proc-macro-srv-cli/src/main.rs
index bdfdb50002..928753659f 100644
--- a/crates/proc-macro-srv-cli/src/main.rs
+++ b/crates/proc-macro-srv-cli/src/main.rs
@@ -9,11 +9,11 @@ extern crate rustc_driver as _;
mod version;
-#[cfg(feature = "sysroot-abi")]
-mod main_loop;
use clap::{Command, ValueEnum};
+use proc_macro_api::ProtocolFormat;
+
#[cfg(feature = "sysroot-abi")]
-use main_loop::run;
+use proc_macro_srv_cli::main_loop::run;
fn main() -> std::io::Result<()> {
let v = std::env::var("RUST_ANALYZER_INTERNALS_DO_NOT_USE");
@@ -32,7 +32,7 @@ fn main() -> std::io::Result<()> {
.long("format")
.action(clap::ArgAction::Set)
.default_value("json-legacy")
- .value_parser(clap::builder::EnumValueParser::<ProtocolFormat>::new()),
+ .value_parser(clap::builder::EnumValueParser::<ProtocolFormatArg>::new()),
clap::Arg::new("version")
.long("version")
.action(clap::ArgAction::SetTrue)
@@ -43,44 +43,48 @@ fn main() -> std::io::Result<()> {
println!("rust-analyzer-proc-macro-srv {}", version::version());
return Ok(());
}
- let &format =
- matches.get_one::<ProtocolFormat>("format").expect("format value should always be present");
- run(format)
+ let &format = matches
+ .get_one::<ProtocolFormatArg>("format")
+ .expect("format value should always be present");
+
+ let mut stdin = std::io::BufReader::new(std::io::stdin());
+ let mut stdout = std::io::stdout();
+
+ run(&mut stdin, &mut stdout, format.into())
}
+/// Wrapper for CLI argument parsing that implements `ValueEnum`.
#[derive(Copy, Clone)]
-enum ProtocolFormat {
- JsonLegacy,
- PostcardLegacy,
- BidirectionalPostcardPrototype,
+struct ProtocolFormatArg(ProtocolFormat);
+
+impl From<ProtocolFormatArg> for ProtocolFormat {
+ fn from(arg: ProtocolFormatArg) -> Self {
+ arg.0
+ }
}
-impl ValueEnum for ProtocolFormat {
+impl ValueEnum for ProtocolFormatArg {
fn value_variants<'a>() -> &'a [Self] {
&[
- ProtocolFormat::JsonLegacy,
- ProtocolFormat::PostcardLegacy,
- ProtocolFormat::BidirectionalPostcardPrototype,
+ ProtocolFormatArg(ProtocolFormat::JsonLegacy),
+ ProtocolFormatArg(ProtocolFormat::BidirectionalPostcardPrototype),
]
}
fn to_possible_value(&self) -> Option<clap::builder::PossibleValue> {
- match self {
+ match self.0 {
ProtocolFormat::JsonLegacy => Some(clap::builder::PossibleValue::new("json-legacy")),
- ProtocolFormat::PostcardLegacy => {
- Some(clap::builder::PossibleValue::new("postcard-legacy"))
- }
ProtocolFormat::BidirectionalPostcardPrototype => {
Some(clap::builder::PossibleValue::new("bidirectional-postcard-prototype"))
}
}
}
+
fn from_str(input: &str, _ignore_case: bool) -> Result<Self, String> {
match input {
- "json-legacy" => Ok(ProtocolFormat::JsonLegacy),
- "postcard-legacy" => Ok(ProtocolFormat::PostcardLegacy),
+ "json-legacy" => Ok(ProtocolFormatArg(ProtocolFormat::JsonLegacy)),
"bidirectional-postcard-prototype" => {
- Ok(ProtocolFormat::BidirectionalPostcardPrototype)
+ Ok(ProtocolFormatArg(ProtocolFormat::BidirectionalPostcardPrototype))
}
_ => Err(format!("unknown protocol format: {input}")),
}
@@ -88,7 +92,11 @@ impl ValueEnum for ProtocolFormat {
}
#[cfg(not(feature = "sysroot-abi"))]
-fn run(_: ProtocolFormat) -> std::io::Result<()> {
+fn run(
+ _: &mut std::io::BufReader<std::io::Stdin>,
+ _: &mut std::io::Stdout,
+ _: ProtocolFormat,
+) -> std::io::Result<()> {
Err(std::io::Error::new(
std::io::ErrorKind::Unsupported,
"proc-macro-srv-cli needs to be compiled with the `sysroot-abi` feature to function"
diff --git a/crates/proc-macro-srv-cli/src/main_loop.rs b/crates/proc-macro-srv-cli/src/main_loop.rs
index b2f4b96bd2..9be3199a38 100644
--- a/crates/proc-macro-srv-cli/src/main_loop.rs
+++ b/crates/proc-macro-srv-cli/src/main_loop.rs
@@ -1,18 +1,18 @@
//! The main loop of the proc-macro server.
use proc_macro_api::{
- Codec,
- bidirectional_protocol::msg as bidirectional,
- legacy_protocol::msg as legacy,
- transport::codec::{json::JsonProtocol, postcard::PostcardProtocol},
+ ProtocolFormat, bidirectional_protocol::msg as bidirectional, legacy_protocol::msg as legacy,
version::CURRENT_API_VERSION,
};
-use std::io;
+use std::panic::{panic_any, resume_unwind};
+use std::{
+ io::{self, BufRead, Write},
+ ops::Range,
+};
use legacy::Message;
-use proc_macro_srv::{EnvSnapshot, SpanId};
+use proc_macro_srv::{EnvSnapshot, ProcMacroClientError, ProcMacroPanicMarker, SpanId};
-use crate::ProtocolFormat;
struct SpanTrans;
impl legacy::SpanTransformer for SpanTrans {
@@ -32,15 +32,21 @@ impl legacy::SpanTransformer for SpanTrans {
}
}
-pub(crate) fn run(format: ProtocolFormat) -> io::Result<()> {
+pub fn run(
+ stdin: &mut (dyn BufRead + Send + Sync),
+ stdout: &mut (dyn Write + Send + Sync),
+ format: ProtocolFormat,
+) -> io::Result<()> {
match format {
- ProtocolFormat::JsonLegacy => run_::<JsonProtocol>(),
- ProtocolFormat::PostcardLegacy => run_::<PostcardProtocol>(),
- ProtocolFormat::BidirectionalPostcardPrototype => run_new::<PostcardProtocol>(),
+ ProtocolFormat::JsonLegacy => run_old(stdin, stdout),
+ ProtocolFormat::BidirectionalPostcardPrototype => run_new(stdin, stdout),
}
}
-fn run_new<C: Codec>() -> io::Result<()> {
+fn run_new(
+ stdin: &mut (dyn BufRead + Send + Sync),
+ stdout: &mut (dyn Write + Send + Sync),
+) -> io::Result<()> {
fn macro_kind_to_api(kind: proc_macro_srv::ProcMacroKind) -> proc_macro_api::ProcMacroKind {
match kind {
proc_macro_srv::ProcMacroKind::CustomDerive => {
@@ -51,9 +57,7 @@ fn run_new<C: Codec>() -> io::Result<()> {
}
}
- let mut buf = C::Buf::default();
- let mut stdin = io::stdin();
- let mut stdout = io::stdout();
+ let mut buf = Vec::default();
let env_snapshot = EnvSnapshot::default();
let srv = proc_macro_srv::ProcMacroSrv::new(&env_snapshot);
@@ -61,8 +65,7 @@ fn run_new<C: Codec>() -> io::Result<()> {
let mut span_mode = legacy::SpanMode::Id;
'outer: loop {
- let req_opt =
- bidirectional::BidirectionalMessage::read::<_, C>(&mut stdin.lock(), &mut buf)?;
+ let req_opt = bidirectional::BidirectionalMessage::read(stdin, &mut buf)?;
let Some(req) = req_opt else {
break 'outer;
};
@@ -77,22 +80,22 @@ fn run_new<C: Codec>() -> io::Result<()> {
.collect()
});
- send_response::<C>(&stdout, bidirectional::Response::ListMacros(res))?;
+ send_response(stdout, bidirectional::Response::ListMacros(res))?;
}
bidirectional::Request::ApiVersionCheck {} => {
- send_response::<C>(
- &stdout,
+ send_response(
+ stdout,
bidirectional::Response::ApiVersionCheck(CURRENT_API_VERSION),
)?;
}
bidirectional::Request::SetConfig(config) => {
span_mode = config.span_mode;
- send_response::<C>(&stdout, bidirectional::Response::SetConfig(config))?;
+ send_response(stdout, bidirectional::Response::SetConfig(config))?;
}
bidirectional::Request::ExpandMacro(task) => {
- handle_expand::<C>(&srv, &mut stdin, &mut stdout, &mut buf, span_mode, *task)?;
+ handle_expand(&srv, stdin, stdout, &mut buf, span_mode, *task)?;
}
},
_ => continue,
@@ -102,23 +105,23 @@ fn run_new<C: Codec>() -> io::Result<()> {
Ok(())
}
-fn handle_expand<C: Codec>(
+fn handle_expand(
srv: &proc_macro_srv::ProcMacroSrv<'_>,
- stdin: &io::Stdin,
- stdout: &io::Stdout,
- buf: &mut C::Buf,
+ stdin: &mut (dyn BufRead + Send + Sync),
+ stdout: &mut (dyn Write + Send + Sync),
+ buf: &mut Vec<u8>,
span_mode: legacy::SpanMode,
task: bidirectional::ExpandMacro,
) -> io::Result<()> {
match span_mode {
- legacy::SpanMode::Id => handle_expand_id::<C>(srv, stdout, task),
- legacy::SpanMode::RustAnalyzer => handle_expand_ra::<C>(srv, stdin, stdout, buf, task),
+ legacy::SpanMode::Id => handle_expand_id(srv, stdout, task),
+ legacy::SpanMode::RustAnalyzer => handle_expand_ra(srv, stdin, stdout, buf, task),
}
}
-fn handle_expand_id<C: Codec>(
+fn handle_expand_id(
srv: &proc_macro_srv::ProcMacroSrv<'_>,
- stdout: &io::Stdout,
+ stdout: &mut dyn Write,
task: bidirectional::ExpandMacro,
) -> io::Result<()> {
let bidirectional::ExpandMacro { lib, env, current_dir, data } = task;
@@ -157,40 +160,65 @@ fn handle_expand_id<C: Codec>(
})
.map_err(|e| legacy::PanicMessage(e.into_string().unwrap_or_default()));
- send_response::<C>(&stdout, bidirectional::Response::ExpandMacro(res))
+ send_response(stdout, bidirectional::Response::ExpandMacro(res))
}
-struct ProcMacroClientHandle<'a, C: Codec> {
- stdin: &'a io::Stdin,
- stdout: &'a io::Stdout,
- buf: &'a mut C::Buf,
+struct ProcMacroClientHandle<'a> {
+ stdin: &'a mut (dyn BufRead + Send + Sync),
+ stdout: &'a mut (dyn Write + Send + Sync),
+ buf: &'a mut Vec<u8>,
}
-impl<'a, C: Codec> ProcMacroClientHandle<'a, C> {
+impl<'a> ProcMacroClientHandle<'a> {
fn roundtrip(
&mut self,
req: bidirectional::SubRequest,
- ) -> Option<bidirectional::BidirectionalMessage> {
+ ) -> Result<bidirectional::SubResponse, ProcMacroClientError> {
let msg = bidirectional::BidirectionalMessage::SubRequest(req);
- if msg.write::<_, C>(&mut self.stdout.lock()).is_err() {
- return None;
+ msg.write(&mut *self.stdout).map_err(ProcMacroClientError::Io)?;
+
+ let msg = bidirectional::BidirectionalMessage::read(&mut *self.stdin, self.buf)
+ .map_err(ProcMacroClientError::Io)?
+ .ok_or(ProcMacroClientError::Eof)?;
+
+ match msg {
+ bidirectional::BidirectionalMessage::SubResponse(resp) => match resp {
+ bidirectional::SubResponse::Cancel { reason } => {
+ Err(ProcMacroClientError::Cancelled { reason })
+ }
+ other => Ok(other),
+ },
+ other => {
+ Err(ProcMacroClientError::Protocol(format!("expected SubResponse, got {other:?}")))
+ }
}
+ }
+}
- match bidirectional::BidirectionalMessage::read::<_, C>(&mut self.stdin.lock(), self.buf) {
- Ok(Some(msg)) => Some(msg),
- _ => None,
+fn handle_failure(failure: Result<bidirectional::SubResponse, ProcMacroClientError>) -> ! {
+ match failure {
+ Err(ProcMacroClientError::Cancelled { reason }) => {
+ resume_unwind(Box::new(ProcMacroPanicMarker::Cancelled { reason }));
+ }
+ Err(err) => {
+ panic_any(ProcMacroPanicMarker::Internal {
+ reason: format!("proc-macro IPC error: {err:?}"),
+ });
+ }
+ Ok(other) => {
+ panic_any(ProcMacroPanicMarker::Internal {
+ reason: format!("unexpected SubResponse {other:?}"),
+ });
}
}
}
-impl<C: Codec> proc_macro_srv::ProcMacroClientInterface for ProcMacroClientHandle<'_, C> {
+impl proc_macro_srv::ProcMacroClientInterface for ProcMacroClientHandle<'_> {
fn file(&mut self, file_id: proc_macro_srv::span::FileId) -> String {
match self.roundtrip(bidirectional::SubRequest::FilePath { file_id: file_id.index() }) {
- Some(bidirectional::BidirectionalMessage::SubResponse(
- bidirectional::SubResponse::FilePathResult { name },
- )) => name,
- _ => String::new(),
+ Ok(bidirectional::SubResponse::FilePathResult { name }) => name,
+ other => handle_failure(other),
}
}
@@ -204,29 +232,54 @@ impl<C: Codec> proc_macro_srv::ProcMacroClientInterface for ProcMacroClientHandl
start: range.start().into(),
end: range.end().into(),
}) {
- Some(bidirectional::BidirectionalMessage::SubResponse(
- bidirectional::SubResponse::SourceTextResult { text },
- )) => text,
- _ => None,
+ Ok(bidirectional::SubResponse::SourceTextResult { text }) => text,
+ other => handle_failure(other),
}
}
fn local_file(&mut self, file_id: proc_macro_srv::span::FileId) -> Option<String> {
match self.roundtrip(bidirectional::SubRequest::LocalFilePath { file_id: file_id.index() })
{
- Some(bidirectional::BidirectionalMessage::SubResponse(
- bidirectional::SubResponse::LocalFilePathResult { name },
- )) => name,
- _ => None,
+ Ok(bidirectional::SubResponse::LocalFilePathResult { name }) => name,
+ other => handle_failure(other),
+ }
+ }
+
+ fn line_column(&mut self, span: proc_macro_srv::span::Span) -> Option<(u32, u32)> {
+ let proc_macro_srv::span::Span { range, anchor, ctx: _ } = span;
+ match self.roundtrip(bidirectional::SubRequest::LineColumn {
+ file_id: anchor.file_id.as_u32(),
+ ast_id: anchor.ast_id.into_raw(),
+ offset: range.start().into(),
+ }) {
+ Ok(bidirectional::SubResponse::LineColumnResult { line, column }) => {
+ Some((line, column))
+ }
+ other => handle_failure(other),
+ }
+ }
+
+ fn byte_range(
+ &mut self,
+ proc_macro_srv::span::Span { range, anchor, ctx: _ }: proc_macro_srv::span::Span,
+ ) -> Range<usize> {
+ match self.roundtrip(bidirectional::SubRequest::ByteRange {
+ file_id: anchor.file_id.as_u32(),
+ ast_id: anchor.ast_id.into_raw(),
+ start: range.start().into(),
+ end: range.end().into(),
+ }) {
+ Ok(bidirectional::SubResponse::ByteRangeResult { range }) => range,
+ other => handle_failure(other),
}
}
}
-fn handle_expand_ra<C: Codec>(
+fn handle_expand_ra(
srv: &proc_macro_srv::ProcMacroSrv<'_>,
- stdin: &io::Stdin,
- stdout: &io::Stdout,
- buf: &mut C::Buf,
+ stdin: &mut (dyn BufRead + Send + Sync),
+ stdout: &mut (dyn Write + Send + Sync),
+ buf: &mut Vec<u8>,
task: bidirectional::ExpandMacro,
) -> io::Result<()> {
let bidirectional::ExpandMacro {
@@ -271,7 +324,7 @@ fn handle_expand_ra<C: Codec>(
def_site,
call_site,
mixed_site,
- Some(&mut ProcMacroClientHandle::<C> { stdin, stdout, buf }),
+ Some(&mut ProcMacroClientHandle { stdin, stdout, buf }),
)
.map(|it| {
(
@@ -287,10 +340,13 @@ fn handle_expand_ra<C: Codec>(
.map(|(tree, span_data_table)| bidirectional::ExpandMacroExtended { tree, span_data_table })
.map_err(|e| legacy::PanicMessage(e.into_string().unwrap_or_default()));
- send_response::<C>(&stdout, bidirectional::Response::ExpandMacroExtended(res))
+ send_response(stdout, bidirectional::Response::ExpandMacroExtended(res))
}
-fn run_<C: Codec>() -> io::Result<()> {
+fn run_old(
+ stdin: &mut (dyn BufRead + Send + Sync),
+ stdout: &mut (dyn Write + Send + Sync),
+) -> io::Result<()> {
fn macro_kind_to_api(kind: proc_macro_srv::ProcMacroKind) -> proc_macro_api::ProcMacroKind {
match kind {
proc_macro_srv::ProcMacroKind::CustomDerive => {
@@ -301,9 +357,9 @@ fn run_<C: Codec>() -> io::Result<()> {
}
}
- let mut buf = C::Buf::default();
- let mut read_request = || legacy::Request::read::<_, C>(&mut io::stdin().lock(), &mut buf);
- let write_response = |msg: legacy::Response| msg.write::<_, C>(&mut io::stdout().lock());
+ let mut buf = String::default();
+ let mut read_request = || legacy::Request::read(stdin, &mut buf);
+ let mut write_response = |msg: legacy::Response| msg.write(stdout);
let env = EnvSnapshot::default();
let srv = proc_macro_srv::ProcMacroSrv::new(&env);
@@ -432,7 +488,7 @@ fn run_<C: Codec>() -> io::Result<()> {
Ok(())
}
-fn send_response<C: Codec>(stdout: &io::Stdout, resp: bidirectional::Response) -> io::Result<()> {
+fn send_response(stdout: &mut dyn Write, resp: bidirectional::Response) -> io::Result<()> {
let resp = bidirectional::BidirectionalMessage::Response(resp);
- resp.write::<_, C>(&mut stdout.lock())
+ resp.write(stdout)
}
diff --git a/crates/proc-macro-srv-cli/tests/bidirectional_postcard.rs b/crates/proc-macro-srv-cli/tests/bidirectional_postcard.rs
new file mode 100644
index 0000000000..ba9657a9bb
--- /dev/null
+++ b/crates/proc-macro-srv-cli/tests/bidirectional_postcard.rs
@@ -0,0 +1,229 @@
+#![cfg(feature = "sysroot-abi")]
+#![cfg_attr(feature = "in-rust-tree", feature(rustc_private))]
+
+#[cfg(feature = "in-rust-tree")]
+extern crate rustc_driver as _;
+
+mod common {
+ pub(crate) mod utils;
+}
+
+use common::utils::{
+ create_empty_token_tree, proc_macro_test_dylib_path, request_bidirectional, with_server,
+};
+use expect_test::expect;
+use proc_macro_api::{
+ ProtocolFormat::BidirectionalPostcardPrototype,
+ bidirectional_protocol::{
+ msg::{ExpandMacro, ExpandMacroData, ExpnGlobals, Request, Response},
+ reject_subrequests,
+ },
+ legacy_protocol::msg::{PanicMessage, ServerConfig, SpanDataIndexMap, SpanMode},
+ version::CURRENT_API_VERSION,
+};
+
+#[test]
+fn test_bidi_version_check_bidirectional() {
+ with_server(BidirectionalPostcardPrototype, |writer, reader| {
+ let response =
+ request_bidirectional(writer, reader, Request::ApiVersionCheck {}, reject_subrequests);
+
+ match response {
+ Response::ApiVersionCheck(version) => {
+ assert_eq!(version, CURRENT_API_VERSION);
+ }
+ other => panic!("unexpected response: {other:?}"),
+ }
+ });
+}
+
+#[test]
+fn test_bidi_list_macros() {
+ with_server(BidirectionalPostcardPrototype, |writer, reader| {
+ let dylib_path = proc_macro_test_dylib_path();
+ let response = request_bidirectional(
+ writer,
+ reader,
+ Request::ListMacros { dylib_path },
+ &reject_subrequests,
+ );
+
+ let Response::ListMacros(Ok(macros)) = response else {
+ panic!("expected successful ListMacros response");
+ };
+
+ let mut macro_list: Vec<_> =
+ macros.iter().map(|(name, kind)| format!("{name} [{kind:?}]")).collect();
+ macro_list.sort();
+ let macro_list_str = macro_list.join("\n");
+
+ expect![[r#"
+ DeriveEmpty [CustomDerive]
+ DeriveError [CustomDerive]
+ DerivePanic [CustomDerive]
+ DeriveReemit [CustomDerive]
+ attr_error [Attr]
+ attr_noop [Attr]
+ attr_panic [Attr]
+ fn_like_clone_tokens [Bang]
+ fn_like_error [Bang]
+ fn_like_mk_idents [Bang]
+ fn_like_mk_literals [Bang]
+ fn_like_noop [Bang]
+ fn_like_panic [Bang]
+ fn_like_span_join [Bang]
+ fn_like_span_line_column [Bang]
+ fn_like_span_ops [Bang]"#]]
+ .assert_eq(&macro_list_str);
+ });
+}
+
+#[test]
+fn test_bidi_list_macros_invalid_path() {
+ with_server(BidirectionalPostcardPrototype, |writer, reader| {
+ let response = request_bidirectional(
+ writer,
+ reader,
+ Request::ListMacros { dylib_path: "/nonexistent/path/to/dylib.so".into() },
+ reject_subrequests,
+ );
+
+ match response {
+ Response::ListMacros(Err(e)) => assert!(
+ e.starts_with("Cannot create expander for /nonexistent/path/to/dylib.so"),
+ "{e}"
+ ),
+ other => panic!("expected error response, got: {other:?}"),
+ }
+ });
+}
+
+#[test]
+fn test_bidi_set_config() {
+ with_server(BidirectionalPostcardPrototype, |writer, reader| {
+ let config = ServerConfig { span_mode: SpanMode::Id };
+ let response =
+ request_bidirectional(writer, reader, Request::SetConfig(config), reject_subrequests);
+
+ match response {
+ Response::SetConfig(returned_config) => {
+ assert_eq!(returned_config.span_mode, SpanMode::Id);
+ }
+ other => panic!("unexpected response: {other:?}"),
+ }
+ });
+}
+
+#[test]
+fn test_bidi_set_config_rust_analyzer_mode() {
+ with_server(BidirectionalPostcardPrototype, |writer, reader| {
+ let config = ServerConfig { span_mode: SpanMode::RustAnalyzer };
+ let response =
+ request_bidirectional(writer, reader, Request::SetConfig(config), reject_subrequests);
+
+ match response {
+ Response::SetConfig(returned_config) => {
+ assert_eq!(returned_config.span_mode, SpanMode::RustAnalyzer);
+ }
+ other => panic!("unexpected response: {other:?}"),
+ }
+ });
+}
+
+#[test]
+fn test_bidi_expand_macro_panic() {
+ with_server(BidirectionalPostcardPrototype, |writer, reader| {
+ let dylib_path = proc_macro_test_dylib_path();
+
+ let mut span_data_table = SpanDataIndexMap::default();
+ let macro_body =
+ common::utils::create_empty_token_tree(CURRENT_API_VERSION, &mut span_data_table);
+
+ let request1 = Request::ExpandMacro(Box::new(ExpandMacro {
+ lib: dylib_path,
+ env: vec![],
+ current_dir: None,
+ data: ExpandMacroData {
+ macro_body,
+ macro_name: "fn_like_panic".to_owned(),
+ attributes: None,
+ has_global_spans: ExpnGlobals { def_site: 0, call_site: 0, mixed_site: 0 },
+ span_data_table: vec![],
+ },
+ }));
+
+ let response = request_bidirectional(writer, reader, request1, reject_subrequests);
+
+ match response {
+ Response::ExpandMacro(Err(PanicMessage(msg))) => {
+ assert!(msg.contains("fn_like_panic"), "panic message should mention macro name");
+ }
+ other => panic!("expected panic response, got: {other:?}"),
+ }
+ });
+}
+
+#[test]
+fn test_bidi_basic_call_flow() {
+ with_server(BidirectionalPostcardPrototype, |writer, reader| {
+ let dylib_path = proc_macro_test_dylib_path();
+
+ let response1 =
+ request_bidirectional(writer, reader, Request::ApiVersionCheck {}, reject_subrequests);
+ assert!(matches!(response1, Response::ApiVersionCheck(_)));
+
+ let response2 = request_bidirectional(
+ writer,
+ reader,
+ Request::SetConfig(ServerConfig { span_mode: SpanMode::Id }),
+ reject_subrequests,
+ );
+ assert!(matches!(response2, Response::SetConfig(_)));
+
+ let response3 = request_bidirectional(
+ writer,
+ reader,
+ Request::ListMacros { dylib_path: dylib_path.clone() },
+ reject_subrequests,
+ );
+ assert!(matches!(response3, Response::ListMacros(Ok(_))));
+ });
+}
+
+#[test]
+fn test_bidi_expand_nonexistent_macro() {
+ with_server(BidirectionalPostcardPrototype, |writer, reader| {
+ let dylib_path = proc_macro_test_dylib_path();
+
+ let version_response =
+ request_bidirectional(writer, reader, Request::ApiVersionCheck {}, reject_subrequests);
+ let Response::ApiVersionCheck(version) = version_response else {
+ panic!("expected version check response");
+ };
+
+ let mut span_data_table = SpanDataIndexMap::default();
+ let macro_body = create_empty_token_tree(version, &mut span_data_table);
+
+ let expand_request = Request::ExpandMacro(Box::new(ExpandMacro {
+ lib: dylib_path,
+ env: vec![],
+ current_dir: None,
+ data: ExpandMacroData {
+ macro_body,
+ macro_name: "NonexistentMacro".to_owned(),
+ attributes: None,
+ has_global_spans: ExpnGlobals { def_site: 0, call_site: 0, mixed_site: 0 },
+ span_data_table: vec![],
+ },
+ }));
+
+ let response = request_bidirectional(writer, reader, expand_request, reject_subrequests);
+
+ match response {
+ Response::ExpandMacro(Err(PanicMessage(msg))) => {
+ expect!["proc-macro `NonexistentMacro` is missing"].assert_eq(&msg)
+ }
+ other => panic!("expected error for nonexistent macro, got: {other:?}"),
+ }
+ });
+}
diff --git a/crates/proc-macro-srv-cli/tests/common/utils.rs b/crates/proc-macro-srv-cli/tests/common/utils.rs
new file mode 100644
index 0000000000..3049e98004
--- /dev/null
+++ b/crates/proc-macro-srv-cli/tests/common/utils.rs
@@ -0,0 +1,288 @@
+use std::{
+ collections::VecDeque,
+ io::{self, BufRead, Read, Write},
+ sync::{Arc, Condvar, Mutex},
+ thread,
+};
+
+use paths::Utf8PathBuf;
+use proc_macro_api::{
+ ServerError,
+ bidirectional_protocol::msg::{
+ BidirectionalMessage, Request as BiRequest, Response as BiResponse, SubRequest, SubResponse,
+ },
+ legacy_protocol::msg::{FlatTree, Message, Request, Response, SpanDataIndexMap},
+};
+use span::{Edition, EditionedFileId, FileId, Span, SpanAnchor, SyntaxContext, TextRange};
+use tt::{Delimiter, DelimiterKind, TopSubtreeBuilder};
+
+/// Shared state for an in-memory byte channel.
+#[derive(Default)]
+struct ChannelState {
+ buffer: VecDeque<u8>,
+ closed: bool,
+}
+
+type InMemoryChannel = Arc<(Mutex<ChannelState>, Condvar)>;
+
+/// Writer end of an in-memory channel.
+pub(crate) struct ChannelWriter {
+ state: InMemoryChannel,
+}
+
+impl Write for ChannelWriter {
+ fn write(&mut self, buf: &[u8]) -> io::Result<usize> {
+ let (lock, cvar) = &*self.state;
+ let mut state = lock.lock().unwrap();
+ if state.closed {
+ return Err(io::Error::new(io::ErrorKind::BrokenPipe, "channel closed"));
+ }
+ state.buffer.extend(buf);
+ cvar.notify_all();
+ Ok(buf.len())
+ }
+
+ fn flush(&mut self) -> io::Result<()> {
+ Ok(())
+ }
+}
+
+impl Drop for ChannelWriter {
+ fn drop(&mut self) {
+ let (lock, cvar) = &*self.state;
+ let mut state = lock.lock().unwrap();
+ state.closed = true;
+ cvar.notify_all();
+ }
+}
+
+/// Reader end of an in-memory channel.
+pub(crate) struct ChannelReader {
+ state: InMemoryChannel,
+ internal_buf: Vec<u8>,
+}
+
+impl Read for ChannelReader {
+ fn read(&mut self, buf: &mut [u8]) -> io::Result<usize> {
+ let (lock, cvar) = &*self.state;
+ let mut state = lock.lock().unwrap();
+
+ while state.buffer.is_empty() && !state.closed {
+ state = cvar.wait(state).unwrap();
+ }
+
+ if state.buffer.is_empty() && state.closed {
+ return Ok(0);
+ }
+
+ let to_read = buf.len().min(state.buffer.len());
+ for (dst, src) in buf.iter_mut().zip(state.buffer.drain(..to_read)) {
+ *dst = src;
+ }
+ Ok(to_read)
+ }
+}
+
+impl BufRead for ChannelReader {
+ fn fill_buf(&mut self) -> io::Result<&[u8]> {
+ let (lock, cvar) = &*self.state;
+ let mut state = lock.lock().unwrap();
+
+ while state.buffer.is_empty() && !state.closed {
+ state = cvar.wait(state).unwrap();
+ }
+
+ self.internal_buf.clear();
+ self.internal_buf.extend(&state.buffer);
+ Ok(&self.internal_buf)
+ }
+
+ fn consume(&mut self, amt: usize) {
+ let (lock, _) = &*self.state;
+ let mut state = lock.lock().unwrap();
+ let to_drain = amt.min(state.buffer.len());
+ drop(state.buffer.drain(..to_drain));
+ }
+}
+
+/// Creates a connected pair of channels for bidirectional communication.
+fn create_channel_pair() -> (ChannelWriter, ChannelReader, ChannelWriter, ChannelReader) {
+ // Channel for client -> server communication
+ let client_to_server = Arc::new((
+ Mutex::new(ChannelState { buffer: VecDeque::new(), closed: false }),
+ Condvar::new(),
+ ));
+ let client_writer = ChannelWriter { state: client_to_server.clone() };
+ let server_reader = ChannelReader { state: client_to_server, internal_buf: Vec::new() };
+
+ // Channel for server -> client communication
+ let server_to_client = Arc::new((
+ Mutex::new(ChannelState { buffer: VecDeque::new(), closed: false }),
+ Condvar::new(),
+ ));
+
+ let server_writer = ChannelWriter { state: server_to_client.clone() };
+ let client_reader = ChannelReader { state: server_to_client, internal_buf: Vec::new() };
+
+ (client_writer, client_reader, server_writer, server_reader)
+}
+
+pub(crate) fn proc_macro_test_dylib_path() -> Utf8PathBuf {
+ let path = proc_macro_test::PROC_MACRO_TEST_LOCATION;
+ if path.is_empty() {
+ panic!("proc-macro-test dylib not available (requires nightly toolchain)");
+ }
+ path.into()
+}
+
+/// Creates a simple empty token tree suitable for testing.
+pub(crate) fn create_empty_token_tree(
+ version: u32,
+ span_data_table: &mut SpanDataIndexMap,
+) -> FlatTree {
+ let anchor = SpanAnchor {
+ file_id: EditionedFileId::new(FileId::from_raw(0), Edition::CURRENT),
+ ast_id: span::ROOT_ERASED_FILE_AST_ID,
+ };
+ let span = Span {
+ range: TextRange::empty(0.into()),
+ anchor,
+ ctx: SyntaxContext::root(Edition::CURRENT),
+ };
+
+ let builder = TopSubtreeBuilder::new(Delimiter {
+ open: span,
+ close: span,
+ kind: DelimiterKind::Invisible,
+ });
+ let tt = builder.build();
+
+ FlatTree::from_subtree(tt.view(), version, span_data_table)
+}
+
+pub(crate) fn with_server<F, R>(format: proc_macro_api::ProtocolFormat, test_fn: F) -> R
+where
+ F: FnOnce(&mut dyn Write, &mut dyn BufRead) -> R,
+{
+ let (mut client_writer, mut client_reader, mut server_writer, mut server_reader) =
+ create_channel_pair();
+
+ let server_handle = thread::spawn(move || {
+ proc_macro_srv_cli::main_loop::run(&mut server_reader, &mut server_writer, format)
+ });
+
+ let result = test_fn(&mut client_writer, &mut client_reader);
+
+ drop(client_writer);
+
+ match server_handle.join() {
+ Ok(Ok(())) => {}
+ Ok(Err(e)) => {
+ if !matches!(
+ e.kind(),
+ io::ErrorKind::BrokenPipe
+ | io::ErrorKind::UnexpectedEof
+ | io::ErrorKind::InvalidData
+ ) {
+ panic!("Server error: {e}");
+ }
+ }
+ Err(e) => std::panic::resume_unwind(e),
+ }
+
+ result
+}
+
+trait TestProtocol {
+ type Request;
+ type Response;
+
+ fn request(&self, writer: &mut dyn Write, req: Self::Request);
+ fn receive(&self, reader: &mut dyn BufRead, writer: &mut dyn Write) -> Self::Response;
+}
+
+#[allow(dead_code)]
+struct JsonLegacy;
+
+impl TestProtocol for JsonLegacy {
+ type Request = Request;
+ type Response = Response;
+
+ fn request(&self, writer: &mut dyn Write, req: Request) {
+ req.write(writer).expect("failed to write request");
+ }
+
+ fn receive(&self, reader: &mut dyn BufRead, _writer: &mut dyn Write) -> Response {
+ let mut buf = String::new();
+ Response::read(reader, &mut buf)
+ .expect("failed to read response")
+ .expect("no response received")
+ }
+}
+
+#[allow(dead_code)]
+struct PostcardBidirectional<F>
+where
+ F: Fn(SubRequest) -> Result<SubResponse, ServerError>,
+{
+ callback: F,
+}
+
+impl<F> TestProtocol for PostcardBidirectional<F>
+where
+ F: Fn(SubRequest) -> Result<SubResponse, ServerError>,
+{
+ type Request = BiRequest;
+ type Response = BiResponse;
+
+ fn request(&self, writer: &mut dyn Write, req: BiRequest) {
+ let msg = BidirectionalMessage::Request(req);
+ msg.write(writer).expect("failed to write request");
+ }
+
+ fn receive(&self, reader: &mut dyn BufRead, writer: &mut dyn Write) -> BiResponse {
+ let mut buf = Vec::new();
+
+ loop {
+ let msg = BidirectionalMessage::read(reader, &mut buf)
+ .expect("failed to read message")
+ .expect("no message received");
+
+ match msg {
+ BidirectionalMessage::Response(resp) => return resp,
+ BidirectionalMessage::SubRequest(sr) => {
+ let reply = (self.callback)(sr).expect("subrequest callback failed");
+ let msg = BidirectionalMessage::SubResponse(reply);
+ msg.write(writer).expect("failed to write subresponse");
+ }
+ other => panic!("unexpected message: {other:?}"),
+ }
+ }
+ }
+}
+
+#[allow(dead_code)]
+pub(crate) fn request_legacy(
+ writer: &mut dyn Write,
+ reader: &mut dyn BufRead,
+ request: Request,
+) -> Response {
+ let protocol = JsonLegacy;
+ protocol.request(writer, request);
+ protocol.receive(reader, writer)
+}
+
+#[allow(dead_code)]
+pub(crate) fn request_bidirectional<F>(
+ writer: &mut dyn Write,
+ reader: &mut dyn BufRead,
+ request: BiRequest,
+ callback: F,
+) -> BiResponse
+where
+ F: Fn(SubRequest) -> Result<SubResponse, ServerError>,
+{
+ let protocol = PostcardBidirectional { callback };
+ protocol.request(writer, request);
+ protocol.receive(reader, writer)
+}
diff --git a/crates/proc-macro-srv-cli/tests/legacy_json.rs b/crates/proc-macro-srv-cli/tests/legacy_json.rs
new file mode 100644
index 0000000000..562cf0c251
--- /dev/null
+++ b/crates/proc-macro-srv-cli/tests/legacy_json.rs
@@ -0,0 +1,234 @@
+//! Integration tests for the proc-macro-srv-cli main loop.
+//!
+//! These tests exercise the full client-server RPC procedure using in-memory
+//! channels without needing to spawn the actual server and client processes.
+
+#![cfg(feature = "sysroot-abi")]
+#![cfg_attr(feature = "in-rust-tree", feature(rustc_private))]
+
+#[cfg(feature = "in-rust-tree")]
+extern crate rustc_driver as _;
+
+mod common {
+ pub(crate) mod utils;
+}
+
+use common::utils::{
+ create_empty_token_tree, proc_macro_test_dylib_path, request_legacy, with_server,
+};
+use expect_test::expect;
+use proc_macro_api::{
+ ProtocolFormat::JsonLegacy,
+ legacy_protocol::msg::{
+ ExpandMacro, ExpandMacroData, ExpnGlobals, PanicMessage, Request, Response, ServerConfig,
+ SpanDataIndexMap, SpanMode,
+ },
+ version::CURRENT_API_VERSION,
+};
+
+#[test]
+fn test_version_check() {
+ with_server(JsonLegacy, |writer, reader| {
+ let response = request_legacy(writer, reader, Request::ApiVersionCheck {});
+
+ match response {
+ Response::ApiVersionCheck(version) => {
+ assert_eq!(version, CURRENT_API_VERSION);
+ }
+ other => panic!("unexpected response: {other:?}"),
+ }
+ });
+}
+
+#[test]
+fn test_list_macros() {
+ with_server(JsonLegacy, |writer, reader| {
+ let dylib_path = proc_macro_test_dylib_path();
+ let response = request_legacy(writer, reader, Request::ListMacros { dylib_path });
+
+ let Response::ListMacros(Ok(macros)) = response else {
+ panic!("expected successful ListMacros response");
+ };
+
+ let mut macro_list: Vec<_> =
+ macros.iter().map(|(name, kind)| format!("{name} [{kind:?}]")).collect();
+ macro_list.sort();
+ let macro_list_str = macro_list.join("\n");
+
+ expect![[r#"
+ DeriveEmpty [CustomDerive]
+ DeriveError [CustomDerive]
+ DerivePanic [CustomDerive]
+ DeriveReemit [CustomDerive]
+ attr_error [Attr]
+ attr_noop [Attr]
+ attr_panic [Attr]
+ fn_like_clone_tokens [Bang]
+ fn_like_error [Bang]
+ fn_like_mk_idents [Bang]
+ fn_like_mk_literals [Bang]
+ fn_like_noop [Bang]
+ fn_like_panic [Bang]
+ fn_like_span_join [Bang]
+ fn_like_span_line_column [Bang]
+ fn_like_span_ops [Bang]"#]]
+ .assert_eq(&macro_list_str);
+ });
+}
+
+#[test]
+fn test_list_macros_invalid_path() {
+ with_server(JsonLegacy, |writer, reader| {
+ let response = request_legacy(
+ writer,
+ reader,
+ Request::ListMacros { dylib_path: "/nonexistent/path/to/dylib.so".into() },
+ );
+
+ match response {
+ Response::ListMacros(Err(e)) => assert!(
+ e.starts_with("Cannot create expander for /nonexistent/path/to/dylib.so"),
+ "{e}"
+ ),
+ other => panic!("expected error response, got: {other:?}"),
+ }
+ });
+}
+
+#[test]
+fn test_set_config() {
+ with_server(JsonLegacy, |writer, reader| {
+ let config = ServerConfig { span_mode: SpanMode::Id };
+ let response = request_legacy(writer, reader, Request::SetConfig(config));
+
+ match response {
+ Response::SetConfig(returned_config) => {
+ assert_eq!(returned_config.span_mode, SpanMode::Id);
+ }
+ other => panic!("unexpected response: {other:?}"),
+ }
+ });
+}
+
+#[test]
+fn test_set_config_rust_analyzer_mode() {
+ with_server(JsonLegacy, |writer, reader| {
+ let config = ServerConfig { span_mode: SpanMode::RustAnalyzer };
+ let response = request_legacy(writer, reader, Request::SetConfig(config));
+
+ match response {
+ Response::SetConfig(returned_config) => {
+ assert_eq!(returned_config.span_mode, SpanMode::RustAnalyzer);
+ }
+ other => panic!("unexpected response: {other:?}"),
+ }
+ });
+}
+
+#[test]
+fn test_expand_macro_panic() {
+ with_server(JsonLegacy, |writer, reader| {
+ let dylib_path = proc_macro_test_dylib_path();
+
+ let version_response = request_legacy(writer, reader, Request::ApiVersionCheck {});
+ let Response::ApiVersionCheck(version) = version_response else {
+ panic!("expected version check response");
+ };
+
+ let mut span_data_table = SpanDataIndexMap::default();
+ let macro_body = create_empty_token_tree(version, &mut span_data_table);
+
+ let expand_request = Request::ExpandMacro(Box::new(ExpandMacro {
+ lib: dylib_path,
+ env: vec![],
+ current_dir: None,
+ data: ExpandMacroData {
+ macro_body,
+ macro_name: "fn_like_panic".to_owned(),
+ attributes: None,
+ has_global_spans: ExpnGlobals {
+ serialize: version >= 3,
+ def_site: 0,
+ call_site: 0,
+ mixed_site: 0,
+ },
+ span_data_table: vec![],
+ },
+ }));
+
+ let response = request_legacy(writer, reader, expand_request);
+
+ match response {
+ Response::ExpandMacro(Err(PanicMessage(msg))) => {
+ assert!(msg.contains("fn_like_panic"), "panic message should mention the macro");
+ }
+ Response::ExpandMacro(Ok(_)) => {
+ panic!("expected panic, but macro succeeded");
+ }
+ other => panic!("unexpected response: {other:?}"),
+ }
+ });
+}
+
+#[test]
+fn test_basic_call_flow() {
+ with_server(JsonLegacy, |writer, reader| {
+ let dylib_path = proc_macro_test_dylib_path();
+
+ let response1 = request_legacy(writer, reader, Request::ApiVersionCheck {});
+ assert!(matches!(response1, Response::ApiVersionCheck(_)));
+
+ let response2 = request_legacy(
+ writer,
+ reader,
+ Request::SetConfig(ServerConfig { span_mode: SpanMode::Id }),
+ );
+ assert!(matches!(response2, Response::SetConfig(_)));
+
+ let response3 =
+ request_legacy(writer, reader, Request::ListMacros { dylib_path: dylib_path.clone() });
+ assert!(matches!(response3, Response::ListMacros(Ok(_))));
+ });
+}
+
+#[test]
+fn test_expand_nonexistent_macro() {
+ with_server(JsonLegacy, |writer, reader| {
+ let dylib_path = proc_macro_test_dylib_path();
+
+ let version_response = request_legacy(writer, reader, Request::ApiVersionCheck {});
+ let Response::ApiVersionCheck(version) = version_response else {
+ panic!("expected version check response");
+ };
+
+ let mut span_data_table = SpanDataIndexMap::default();
+ let macro_body = create_empty_token_tree(version, &mut span_data_table);
+
+ let expand_request = Request::ExpandMacro(Box::new(ExpandMacro {
+ lib: dylib_path,
+ env: vec![],
+ current_dir: None,
+ data: ExpandMacroData {
+ macro_body,
+ macro_name: "NonexistentMacro".to_owned(),
+ attributes: None,
+ has_global_spans: ExpnGlobals {
+ serialize: version >= 3,
+ def_site: 0,
+ call_site: 0,
+ mixed_site: 0,
+ },
+ span_data_table: vec![],
+ },
+ }));
+
+ let response = request_legacy(writer, reader, expand_request);
+
+ match response {
+ Response::ExpandMacro(Err(PanicMessage(msg))) => {
+ expect!["proc-macro `NonexistentMacro` is missing"].assert_eq(&msg)
+ }
+ other => panic!("expected error for nonexistent macro, got: {other:?}"),
+ }
+ });
+}
diff --git a/crates/proc-macro-srv/Cargo.toml b/crates/proc-macro-srv/Cargo.toml
index 3610171784..8e5617f8a2 100644
--- a/crates/proc-macro-srv/Cargo.toml
+++ b/crates/proc-macro-srv/Cargo.toml
@@ -31,6 +31,7 @@ libc.workspace = true
[dev-dependencies]
expect-test.workspace = true
+line-index.workspace = true
# used as proc macro test targets
proc-macro-test.path = "./proc-macro-test"
diff --git a/crates/proc-macro-srv/proc-macro-test/imp/src/lib.rs b/crates/proc-macro-srv/proc-macro-test/imp/src/lib.rs
index b4fac26d6e..06c76b6d03 100644
--- a/crates/proc-macro-srv/proc-macro-test/imp/src/lib.rs
+++ b/crates/proc-macro-srv/proc-macro-test/imp/src/lib.rs
@@ -79,6 +79,16 @@ pub fn fn_like_span_ops(args: TokenStream) -> TokenStream {
TokenStream::from_iter(vec![first, second, third])
}
+/// Returns the line and column of the first token's span as two integer literals.
+#[proc_macro]
+pub fn fn_like_span_line_column(args: TokenStream) -> TokenStream {
+ let first = args.into_iter().next().unwrap();
+ let span = first.span();
+ let line = Literal::usize_unsuffixed(span.line());
+ let column = Literal::usize_unsuffixed(span.column());
+ TokenStream::from_iter(vec![TokenTree::Literal(line), TokenTree::Literal(column)])
+}
+
#[proc_macro_attribute]
pub fn attr_noop(_args: TokenStream, item: TokenStream) -> TokenStream {
item
diff --git a/crates/proc-macro-srv/src/lib.rs b/crates/proc-macro-srv/src/lib.rs
index 920d58b4e9..c548dc620a 100644
--- a/crates/proc-macro-srv/src/lib.rs
+++ b/crates/proc-macro-srv/src/lib.rs
@@ -22,8 +22,11 @@
)]
#![deny(deprecated_safe, clippy::undocumented_unsafe_blocks)]
+#[cfg(not(feature = "in-rust-tree"))]
+extern crate proc_macro as rustc_proc_macro;
#[cfg(feature = "in-rust-tree")]
extern crate rustc_driver as _;
+#[cfg(feature = "in-rust-tree")]
extern crate rustc_proc_macro;
#[cfg(not(feature = "in-rust-tree"))]
@@ -41,6 +44,7 @@ use std::{
env,
ffi::OsString,
fs,
+ ops::Range,
path::{Path, PathBuf},
sync::{Arc, Mutex, PoisonError},
thread,
@@ -92,16 +96,50 @@ impl<'env> ProcMacroSrv<'env> {
}
}
+#[derive(Debug)]
+pub enum ProcMacroClientError {
+ Cancelled { reason: String },
+ Io(std::io::Error),
+ Protocol(String),
+ Eof,
+}
+
+#[derive(Debug)]
+pub enum ProcMacroPanicMarker {
+ Cancelled { reason: String },
+ Internal { reason: String },
+}
+
pub type ProcMacroClientHandle<'a> = &'a mut (dyn ProcMacroClientInterface + Sync + Send);
pub trait ProcMacroClientInterface {
fn file(&mut self, file_id: span::FileId) -> String;
fn source_text(&mut self, span: Span) -> Option<String>;
fn local_file(&mut self, file_id: span::FileId) -> Option<String>;
+ /// Line and column are 1-based.
+ fn line_column(&mut self, span: Span) -> Option<(u32, u32)>;
+
+ fn byte_range(&mut self, span: Span) -> Range<usize>;
}
const EXPANDER_STACK_SIZE: usize = 8 * 1024 * 1024;
+pub enum ExpandError {
+ Panic(PanicMessage),
+ Cancelled { reason: Option<String> },
+ Internal { reason: Option<String> },
+}
+
+impl ExpandError {
+ pub fn into_string(self) -> Option<String> {
+ match self {
+ ExpandError::Panic(panic_message) => panic_message.into_string(),
+ ExpandError::Cancelled { reason } => reason,
+ ExpandError::Internal { reason } => reason,
+ }
+ }
+}
+
impl ProcMacroSrv<'_> {
pub fn expand<S: ProcMacroSrvSpan>(
&self,
@@ -115,10 +153,10 @@ impl ProcMacroSrv<'_> {
call_site: S,
mixed_site: S,
callback: Option<ProcMacroClientHandle<'_>>,
- ) -> Result<token_stream::TokenStream<S>, PanicMessage> {
+ ) -> Result<token_stream::TokenStream<S>, ExpandError> {
let snapped_env = self.env;
- let expander = self.expander(lib.as_ref()).map_err(|err| PanicMessage {
- message: Some(format!("failed to load macro: {err}")),
+ let expander = self.expander(lib.as_ref()).map_err(|err| ExpandError::Internal {
+ reason: Some(format!("failed to load macro: {err}")),
})?;
let prev_env = EnvChange::apply(snapped_env, env, current_dir.as_ref().map(<_>::as_ref));
@@ -136,8 +174,22 @@ impl ProcMacroSrv<'_> {
)
});
match thread.unwrap().join() {
- Ok(res) => res,
- Err(e) => std::panic::resume_unwind(e),
+ Ok(res) => res.map_err(ExpandError::Panic),
+
+ Err(payload) => {
+ if let Some(marker) = payload.downcast_ref::<ProcMacroPanicMarker>() {
+ return match marker {
+ ProcMacroPanicMarker::Cancelled { reason } => {
+ Err(ExpandError::Cancelled { reason: Some(reason.clone()) })
+ }
+ ProcMacroPanicMarker::Internal { reason } => {
+ Err(ExpandError::Internal { reason: Some(reason.clone()) })
+ }
+ };
+ }
+
+ std::panic::resume_unwind(payload)
+ }
}
});
prev_env.rollback();
diff --git a/crates/proc-macro-srv/src/server_impl/rust_analyzer_span.rs b/crates/proc-macro-srv/src/server_impl/rust_analyzer_span.rs
index eacb100fbc..c114d52ec3 100644
--- a/crates/proc-macro-srv/src/server_impl/rust_analyzer_span.rs
+++ b/crates/proc-macro-srv/src/server_impl/rust_analyzer_span.rs
@@ -177,7 +177,9 @@ impl server::Server for RaSpanServer<'_> {
span
}
fn span_byte_range(&mut self, span: Self::Span) -> Range<usize> {
- // FIXME requires db to resolve the ast id, THIS IS NOT INCREMENTAL
+ if let Some(cb) = self.callback.as_mut() {
+ return cb.byte_range(span);
+ }
Range { start: span.range.start().into(), end: span.range.end().into() }
}
fn span_join(&mut self, first: Self::Span, second: Self::Span) -> Option<Self::Span> {
@@ -272,14 +274,12 @@ impl server::Server for RaSpanServer<'_> {
Span { range: TextRange::empty(span.range.start()), ..span }
}
- fn span_line(&mut self, _span: Self::Span) -> usize {
- // FIXME requires db to resolve line index, THIS IS NOT INCREMENTAL
- 1
+ fn span_line(&mut self, span: Self::Span) -> usize {
+ self.callback.as_mut().and_then(|cb| cb.line_column(span)).map_or(1, |(l, _)| l as usize)
}
- fn span_column(&mut self, _span: Self::Span) -> usize {
- // FIXME requires db to resolve line index, THIS IS NOT INCREMENTAL
- 1
+ fn span_column(&mut self, span: Self::Span) -> usize {
+ self.callback.as_mut().and_then(|cb| cb.line_column(span)).map_or(1, |(_, c)| c as usize)
}
fn symbol_normalize_and_validate_ident(&mut self, string: &str) -> Result<Self::Symbol, ()> {
diff --git a/crates/proc-macro-srv/src/tests/mod.rs b/crates/proc-macro-srv/src/tests/mod.rs
index 20507a6def..ebef9a9a51 100644
--- a/crates/proc-macro-srv/src/tests/mod.rs
+++ b/crates/proc-macro-srv/src/tests/mod.rs
@@ -703,6 +703,7 @@ fn list_test_macros() {
fn_like_mk_idents [Bang]
fn_like_span_join [Bang]
fn_like_span_ops [Bang]
+ fn_like_span_line_column [Bang]
attr_noop [Attr]
attr_panic [Attr]
attr_error [Attr]
@@ -712,3 +713,17 @@ fn list_test_macros() {
DeriveError [CustomDerive]"#]]
.assert_eq(&res);
}
+
+#[test]
+fn test_fn_like_span_line_column() {
+ assert_expand_with_callback(
+ "fn_like_span_line_column",
+ // Input text with known position: "hello" starts at offset 1 (line 2, column 1 in 1-based)
+ "
+hello",
+ expect![[r#"
+ LITER 42:Root[0000, 0]@0..100#ROOT2024 Integer 2
+ LITER 42:Root[0000, 0]@0..100#ROOT2024 Integer 1
+ "#]],
+ );
+}
diff --git a/crates/proc-macro-srv/src/tests/utils.rs b/crates/proc-macro-srv/src/tests/utils.rs
index 61fcd810b1..b7c5c4fdd2 100644
--- a/crates/proc-macro-srv/src/tests/utils.rs
+++ b/crates/proc-macro-srv/src/tests/utils.rs
@@ -4,9 +4,11 @@ use expect_test::Expect;
use span::{
EditionedFileId, FileId, ROOT_ERASED_FILE_AST_ID, Span, SpanAnchor, SyntaxContext, TextRange,
};
+use std::ops::Range;
use crate::{
- EnvSnapshot, ProcMacroSrv, SpanId, dylib, proc_macro_test_dylib_path, token_stream::TokenStream,
+ EnvSnapshot, ProcMacroClientInterface, ProcMacroSrv, SpanId, dylib, proc_macro_test_dylib_path,
+ token_stream::TokenStream,
};
fn parse_string(call_site: SpanId, src: &str) -> TokenStream<SpanId> {
@@ -109,3 +111,70 @@ pub(crate) fn list() -> Vec<String> {
let res = srv.list_macros(&dylib_path).unwrap();
res.into_iter().map(|(name, kind)| format!("{name} [{kind:?}]")).collect()
}
+
+/// A mock callback for testing that computes line/column from the input text.
+struct MockCallback<'a> {
+ text: &'a str,
+}
+
+impl ProcMacroClientInterface for MockCallback<'_> {
+ fn source_text(&mut self, span: Span) -> Option<String> {
+ self.text
+ .get(usize::from(span.range.start())..usize::from(span.range.end()))
+ .map(ToOwned::to_owned)
+ }
+
+ fn file(&mut self, _file_id: FileId) -> String {
+ String::new()
+ }
+
+ fn local_file(&mut self, _file_id: FileId) -> Option<String> {
+ None
+ }
+
+ fn line_column(&mut self, span: Span) -> Option<(u32, u32)> {
+ let line_index = line_index::LineIndex::new(self.text);
+ let line_col = line_index.try_line_col(span.range.start())?;
+ // proc_macro uses 1-based line/column
+ Some((line_col.line as u32 + 1, line_col.col as u32 + 1))
+ }
+
+ fn byte_range(&mut self, span: Span) -> Range<usize> {
+ Range { start: span.range.start().into(), end: span.range.end().into() }
+ }
+}
+
+pub fn assert_expand_with_callback(
+ macro_name: &str,
+ #[rust_analyzer::rust_fixture] ra_fixture: &str,
+ expect_spanned: Expect,
+) {
+ let path = proc_macro_test_dylib_path();
+ let expander = dylib::Expander::new(&temp_dir::TempDir::new().unwrap(), &path).unwrap();
+
+ let def_site = Span {
+ range: TextRange::new(0.into(), 150.into()),
+ anchor: SpanAnchor {
+ file_id: EditionedFileId::current_edition(FileId::from_raw(41)),
+ ast_id: ROOT_ERASED_FILE_AST_ID,
+ },
+ ctx: SyntaxContext::root(span::Edition::CURRENT),
+ };
+ let call_site = Span {
+ range: TextRange::new(0.into(), 100.into()),
+ anchor: SpanAnchor {
+ file_id: EditionedFileId::current_edition(FileId::from_raw(42)),
+ ast_id: ROOT_ERASED_FILE_AST_ID,
+ },
+ ctx: SyntaxContext::root(span::Edition::CURRENT),
+ };
+ let mixed_site = call_site;
+
+ let fixture = parse_string_spanned(call_site.anchor, call_site.ctx, ra_fixture);
+
+ let mut callback = MockCallback { text: ra_fixture };
+ let res = expander
+ .expand(macro_name, fixture, None, def_site, call_site, mixed_site, Some(&mut callback))
+ .unwrap();
+ expect_spanned.assert_eq(&format!("{res:?}"));
+}
diff --git a/crates/project-model/src/cargo_workspace.rs b/crates/project-model/src/cargo_workspace.rs
index 6e1a3f37ff..483ab28450 100644
--- a/crates/project-model/src/cargo_workspace.rs
+++ b/crates/project-model/src/cargo_workspace.rs
@@ -640,7 +640,7 @@ impl FetchMetadata {
/// Builds a command to fetch metadata for the given `cargo_toml` manifest.
///
/// Performs a lightweight pre-fetch using the `--no-deps` option,
- /// available via [`FetchMetadata::no_deps_metadata`], to gather basic
+ /// available via `FetchMetadata::no_deps_metadata`, to gather basic
/// information such as the `target-dir`.
///
/// The provided sysroot is used to set the `RUSTUP_TOOLCHAIN`
diff --git a/crates/project-model/src/project_json.rs b/crates/project-model/src/project_json.rs
index b3478d2cfe..6938010cbd 100644
--- a/crates/project-model/src/project_json.rs
+++ b/crates/project-model/src/project_json.rs
@@ -78,6 +78,13 @@ pub struct ProjectJson {
runnables: Vec<Runnable>,
}
+impl std::ops::Index<CrateArrayIdx> for ProjectJson {
+ type Output = Crate;
+ fn index(&self, index: CrateArrayIdx) -> &Self::Output {
+ &self.crates[index.0]
+ }
+}
+
impl ProjectJson {
/// Create a new ProjectJson instance.
///
@@ -195,12 +202,11 @@ impl ProjectJson {
&self.project_root
}
- pub fn crate_by_root(&self, root: &AbsPath) -> Option<Crate> {
+ pub fn crate_by_root(&self, root: &AbsPath) -> Option<&Crate> {
self.crates
.iter()
.filter(|krate| krate.is_workspace_member)
.find(|krate| krate.root_module == root)
- .cloned()
}
/// Returns the path to the project's manifest, if it exists.
@@ -214,8 +220,17 @@ impl ProjectJson {
self.crates
.iter()
.filter(|krate| krate.is_workspace_member)
- .filter_map(|krate| krate.build.clone())
+ .filter_map(|krate| krate.build.as_ref())
.find(|build| build.build_file.as_std_path() == path)
+ .cloned()
+ }
+
+ pub fn crate_by_label(&self, label: &str) -> Option<&Crate> {
+ // this is fast enough for now, but it's unfortunate that this is O(crates).
+ self.crates
+ .iter()
+ .filter(|krate| krate.is_workspace_member)
+ .find(|krate| krate.build.as_ref().is_some_and(|build| build.label == label))
}
/// Returns the path to the project's manifest or root folder, if no manifest exists.
@@ -231,6 +246,10 @@ impl ProjectJson {
pub fn runnables(&self) -> &[Runnable] {
&self.runnables
}
+
+ pub fn runnable_template(&self, kind: RunnableKind) -> Option<&Runnable> {
+ self.runnables().iter().find(|r| r.kind == kind)
+ }
}
/// A crate points to the root module of a crate and lists the dependencies of the crate. This is
@@ -258,6 +277,12 @@ pub struct Crate {
pub build: Option<Build>,
}
+impl Crate {
+ pub fn iter_deps(&self) -> impl ExactSizeIterator<Item = CrateArrayIdx> {
+ self.deps.iter().map(|dep| dep.krate)
+ }
+}
+
/// Additional, build-specific data about a crate.
#[derive(Clone, Debug, Eq, PartialEq)]
pub struct Build {
@@ -328,13 +353,21 @@ pub struct Runnable {
/// The kind of runnable.
#[derive(Debug, Clone, PartialEq, Eq)]
pub enum RunnableKind {
+ /// `cargo check`, basically, with human-readable output.
Check,
/// Can run a binary.
+ /// May include {label} which will get the label from the `build` section of a crate.
Run,
/// Run a single test.
+ /// May include {label} which will get the label from the `build` section of a crate.
+ /// May include {test_id} which will get the test clicked on by the user.
TestOne,
+
+ /// Template for checking a target, emitting rustc JSON diagnostics.
+ /// May include {label} which will get the label from the `build` section of a crate.
+ Flycheck,
}
#[derive(Serialize, Deserialize, Debug, Clone, Eq, PartialEq)]
@@ -441,6 +474,7 @@ pub struct RunnableData {
#[derive(Debug, Clone, PartialEq, Eq, Deserialize, Serialize)]
#[serde(rename_all = "camelCase")]
pub enum RunnableKindData {
+ Flycheck,
Check,
Run,
TestOne,
@@ -511,6 +545,7 @@ impl From<RunnableKindData> for RunnableKind {
RunnableKindData::Check => RunnableKind::Check,
RunnableKindData::Run => RunnableKind::Run,
RunnableKindData::TestOne => RunnableKind::TestOne,
+ RunnableKindData::Flycheck => RunnableKind::Flycheck,
}
}
}
diff --git a/crates/project-model/src/workspace.rs b/crates/project-model/src/workspace.rs
index fa3a79e041..8f15f7e150 100644
--- a/crates/project-model/src/workspace.rs
+++ b/crates/project-model/src/workspace.rs
@@ -1161,6 +1161,8 @@ fn project_json_to_crate_graph(
name: Some(name.canonical_name().to_owned()),
}
}
+ } else if is_sysroot {
+ CrateOrigin::Lang(LangCrateOrigin::Dependency)
} else {
CrateOrigin::Local { repo: None, name: None }
},
@@ -1294,6 +1296,8 @@ fn cargo_to_crate_graph(
name: Some(Symbol::intern(&pkg_data.name)),
}
}
+ } else if cargo.is_sysroot() {
+ CrateOrigin::Lang(LangCrateOrigin::Dependency)
} else {
CrateOrigin::Library {
repo: pkg_data.repository.clone(),
@@ -1717,7 +1721,7 @@ fn extend_crate_graph_with_sysroot(
!matches!(lang_crate, LangCrateOrigin::Test | LangCrateOrigin::Alloc),
)),
LangCrateOrigin::ProcMacro => libproc_macro = Some(cid),
- LangCrateOrigin::Other => (),
+ LangCrateOrigin::Other | LangCrateOrigin::Dependency => (),
}
}
}
diff --git a/crates/rust-analyzer/src/cli/analysis_stats.rs b/crates/rust-analyzer/src/cli/analysis_stats.rs
index a02d1a7856..1995d38898 100644
--- a/crates/rust-analyzer/src/cli/analysis_stats.rs
+++ b/crates/rust-analyzer/src/cli/analysis_stats.rs
@@ -91,6 +91,7 @@ impl flags::AnalysisStats {
}
},
prefill_caches: false,
+ proc_macro_processes: 1,
};
let build_scripts_time = if self.disable_build_scripts {
diff --git a/crates/rust-analyzer/src/cli/diagnostics.rs b/crates/rust-analyzer/src/cli/diagnostics.rs
index 776069f155..575c77f842 100644
--- a/crates/rust-analyzer/src/cli/diagnostics.rs
+++ b/crates/rust-analyzer/src/cli/diagnostics.rs
@@ -41,6 +41,7 @@ impl flags::Diagnostics {
load_out_dirs_from_check: !self.disable_build_scripts,
with_proc_macro_server,
prefill_caches: false,
+ proc_macro_processes: 1,
};
let (db, _vfs, _proc_macro) =
load_workspace_at(&self.path, &cargo_config, &load_cargo_config, &|_| {})?;
diff --git a/crates/rust-analyzer/src/cli/lsif.rs b/crates/rust-analyzer/src/cli/lsif.rs
index f3b0699d55..e5e238db63 100644
--- a/crates/rust-analyzer/src/cli/lsif.rs
+++ b/crates/rust-analyzer/src/cli/lsif.rs
@@ -293,6 +293,7 @@ impl flags::Lsif {
load_out_dirs_from_check: true,
with_proc_macro_server: ProcMacroServerChoice::Sysroot,
prefill_caches: false,
+ proc_macro_processes: 1,
};
let path = AbsPathBuf::assert_utf8(env::current_dir()?.join(self.path));
let root = ProjectManifest::discover_single(&path)?;
diff --git a/crates/rust-analyzer/src/cli/prime_caches.rs b/crates/rust-analyzer/src/cli/prime_caches.rs
index 467d8a5388..d5da679179 100644
--- a/crates/rust-analyzer/src/cli/prime_caches.rs
+++ b/crates/rust-analyzer/src/cli/prime_caches.rs
@@ -38,6 +38,7 @@ impl flags::PrimeCaches {
// we want to ensure that this command, not `load_workspace_at`,
// is responsible for that work.
prefill_caches: false,
+ proc_macro_processes: config.proc_macro_num_processes(),
};
let root = AbsPathBuf::assert_utf8(std::env::current_dir()?.join(root));
diff --git a/crates/rust-analyzer/src/cli/run_tests.rs b/crates/rust-analyzer/src/cli/run_tests.rs
index 82ace8c8b3..d4a56d773e 100644
--- a/crates/rust-analyzer/src/cli/run_tests.rs
+++ b/crates/rust-analyzer/src/cli/run_tests.rs
@@ -23,6 +23,7 @@ impl flags::RunTests {
load_out_dirs_from_check: true,
with_proc_macro_server: ProcMacroServerChoice::Sysroot,
prefill_caches: false,
+ proc_macro_processes: 1,
};
let (ref db, _vfs, _proc_macro) =
load_workspace_at(&self.path, &cargo_config, &load_cargo_config, &|_| {})?;
diff --git a/crates/rust-analyzer/src/cli/rustc_tests.rs b/crates/rust-analyzer/src/cli/rustc_tests.rs
index 249566d2ac..e8c6c5f4d4 100644
--- a/crates/rust-analyzer/src/cli/rustc_tests.rs
+++ b/crates/rust-analyzer/src/cli/rustc_tests.rs
@@ -103,6 +103,7 @@ impl Tester {
load_out_dirs_from_check: false,
with_proc_macro_server: ProcMacroServerChoice::Sysroot,
prefill_caches: false,
+ proc_macro_processes: 1,
};
let (db, _vfs, _proc_macro) =
load_workspace(workspace, &cargo_config.extra_env, &load_cargo_config)?;
diff --git a/crates/rust-analyzer/src/cli/scip.rs b/crates/rust-analyzer/src/cli/scip.rs
index 271d2507bc..ed0476697c 100644
--- a/crates/rust-analyzer/src/cli/scip.rs
+++ b/crates/rust-analyzer/src/cli/scip.rs
@@ -52,6 +52,7 @@ impl flags::Scip {
load_out_dirs_from_check: true,
with_proc_macro_server: ProcMacroServerChoice::Sysroot,
prefill_caches: true,
+ proc_macro_processes: config.proc_macro_num_processes(),
};
let cargo_config = config.cargo(None);
let (db, vfs, _) = load_workspace_at(
diff --git a/crates/rust-analyzer/src/cli/ssr.rs b/crates/rust-analyzer/src/cli/ssr.rs
index 3918683145..5c69bda723 100644
--- a/crates/rust-analyzer/src/cli/ssr.rs
+++ b/crates/rust-analyzer/src/cli/ssr.rs
@@ -20,6 +20,7 @@ impl flags::Ssr {
load_out_dirs_from_check: true,
with_proc_macro_server: ProcMacroServerChoice::Sysroot,
prefill_caches: false,
+ proc_macro_processes: 1,
};
let (ref db, vfs, _proc_macro) = load_workspace_at(
&std::env::current_dir()?,
@@ -56,6 +57,7 @@ impl flags::Search {
load_out_dirs_from_check: true,
with_proc_macro_server: ProcMacroServerChoice::Sysroot,
prefill_caches: false,
+ proc_macro_processes: 1,
};
let (ref db, _vfs, _proc_macro) = load_workspace_at(
&std::env::current_dir()?,
diff --git a/crates/rust-analyzer/src/cli/unresolved_references.rs b/crates/rust-analyzer/src/cli/unresolved_references.rs
index 294add682d..49c6fcb91e 100644
--- a/crates/rust-analyzer/src/cli/unresolved_references.rs
+++ b/crates/rust-analyzer/src/cli/unresolved_references.rs
@@ -44,6 +44,7 @@ impl flags::UnresolvedReferences {
load_out_dirs_from_check: !self.disable_build_scripts,
with_proc_macro_server,
prefill_caches: false,
+ proc_macro_processes: config.proc_macro_num_processes(),
};
let (db, vfs, _proc_macro) =
load_workspace_at(&self.path, &cargo_config, &load_cargo_config, &|_| {})?;
diff --git a/crates/rust-analyzer/src/command.rs b/crates/rust-analyzer/src/command.rs
index 2f052618cd..49ce6db4ea 100644
--- a/crates/rust-analyzer/src/command.rs
+++ b/crates/rust-analyzer/src/command.rs
@@ -10,6 +10,7 @@ use std::{
process::{ChildStderr, ChildStdout, Command, Stdio},
};
+use anyhow::Context;
use crossbeam_channel::Sender;
use paths::Utf8PathBuf;
use process_wrap::std::{StdChildWrapper, StdCommandWrap};
@@ -156,7 +157,7 @@ impl<T: Sized + Send + 'static> CommandHandle<T> {
parser: impl JsonLinesParser<T>,
sender: Sender<T>,
out_file: Option<Utf8PathBuf>,
- ) -> std::io::Result<Self> {
+ ) -> anyhow::Result<Self> {
command.stdout(Stdio::piped()).stderr(Stdio::piped()).stdin(Stdio::null());
let program = command.get_program().into();
@@ -168,7 +169,10 @@ impl<T: Sized + Send + 'static> CommandHandle<T> {
child.wrap(process_wrap::std::ProcessSession);
#[cfg(windows)]
child.wrap(process_wrap::std::JobObject);
- let mut child = child.spawn().map(JodGroupChild)?;
+ let mut child = child
+ .spawn()
+ .map(JodGroupChild)
+ .with_context(|| "Failed to spawn command: {child:?}")?;
let stdout = child.0.stdout().take().unwrap();
let stderr = child.0.stderr().take().unwrap();
diff --git a/crates/rust-analyzer/src/config.rs b/crates/rust-analyzer/src/config.rs
index e39569e108..0dda7f3cc2 100644
--- a/crates/rust-analyzer/src/config.rs
+++ b/crates/rust-analyzer/src/config.rs
@@ -387,6 +387,12 @@ config_data! {
/// Enable support for procedural macros, implies `#rust-analyzer.cargo.buildScripts.enable#`.
procMacro_enable: bool = true,
+ /// Number of proc-macro server processes to spawn.
+ ///
+ /// Controls how many independent `proc-macro-srv` processes rust-analyzer
+ /// runs in parallel to handle macro expansion.
+ procMacro_processes: NumProcesses = NumProcesses::Concrete(1),
+
/// Internal config, path to proc-macro server executable.
procMacro_server: Option<Utf8PathBuf> = None,
@@ -478,33 +484,83 @@ config_data! {
typing_triggerChars: Option<String> = Some("=.".to_owned()),
- /// Enables automatic discovery of projects using [`DiscoverWorkspaceConfig::command`].
+ /// Configure a command that rust-analyzer can invoke to
+ /// obtain configuration.
+ ///
+ /// This is an alternative to manually generating
+ /// `rust-project.json`: it enables rust-analyzer to generate
+ /// rust-project.json on the fly, and regenerate it when
+ /// switching or modifying projects.
+ ///
+ /// This is an object with three fields:
+ ///
+ /// * `command`: the shell command to invoke
+ ///
+ /// * `filesToWatch`: which build system-specific files should
+ /// be watched to trigger regenerating the configuration
///
- /// [`DiscoverWorkspaceConfig`] also requires setting `progress_label` and `files_to_watch`.
- /// `progress_label` is used for the title in progress indicators, whereas `files_to_watch`
- /// is used to determine which build system-specific files should be watched in order to
- /// reload rust-analyzer.
+ /// * `progressLabel`: the name of the command, used in
+ /// progress indicators in the IDE
+ ///
+ /// Here's an example of a valid configuration:
///
- /// Below is an example of a valid configuration:
/// ```json
/// "rust-analyzer.workspace.discoverConfig": {
/// "command": [
/// "rust-project",
- /// "develop-json"
+ /// "develop-json",
+ /// "{arg}"
/// ],
- /// "progressLabel": "rust-analyzer",
+ /// "progressLabel": "buck2/rust-project",
/// "filesToWatch": [
/// "BUCK"
/// ]
/// }
/// ```
///
- /// ## On `DiscoverWorkspaceConfig::command`
+ /// ## Argument Substitutions
+ ///
+ /// If `command` includes the argument `{arg}`, that argument will be substituted
+ /// with the JSON-serialized form of the following enum:
+ ///
+ /// ```norun
+ /// #[derive(PartialEq, Clone, Debug, Serialize)]
+ /// #[serde(rename_all = "camelCase")]
+ /// pub enum DiscoverArgument {
+ /// Path(AbsPathBuf),
+ /// Buildfile(AbsPathBuf),
+ /// }
+ /// ```
+ ///
+ /// rust-analyzer will use the path invocation to find and
+ /// generate a `rust-project.json` and therefore a
+ /// workspace. Example:
+ ///
+ ///
+ /// ```norun
+ /// rust-project develop-json '{ "path": "myproject/src/main.rs" }'
+ /// ```
+ ///
+ /// rust-analyzer will use build file invocations to update an
+ /// existing workspace. Example:
+ ///
+ /// Or with a build file and the configuration above:
+ ///
+ /// ```norun
+ /// rust-project develop-json '{ "buildfile": "myproject/BUCK" }'
+ /// ```
+ ///
+ /// As a reference for implementors, buck2's `rust-project`
+ /// will likely be useful:
+ /// <https://github.com/facebook/buck2/tree/main/integrations/rust-project>.
+ ///
+ /// ## Discover Command Output
///
/// **Warning**: This format is provisional and subject to change.
///
- /// [`DiscoverWorkspaceConfig::command`] *must* return a JSON object corresponding to
- /// `DiscoverProjectData::Finished`:
+ /// The discover command should output JSON objects, one per
+ /// line (JSONL format). These objects should correspond to
+ /// this Rust data type:
///
/// ```norun
/// #[derive(Debug, Clone, Deserialize, Serialize)]
@@ -517,7 +573,14 @@ config_data! {
/// }
/// ```
///
- /// As JSON, `DiscoverProjectData::Finished` is:
+ /// For example, a progress event:
+ ///
+ /// ```json
+ /// {"kind":"progress","message":"generating rust-project.json"}
+ /// ```
+ ///
+ /// A finished event can look like this (expanded and
+ /// commented for readability):
///
/// ```json
/// {
@@ -525,7 +588,7 @@ config_data! {
/// "kind": "finished",
/// // the file used by a non-Cargo build system to define
/// // a package or target.
- /// "buildfile": "rust-analyzer/BUILD",
+ /// "buildfile": "rust-analyzer/BUCK",
/// // the contents of a rust-project.json, elided for brevity
/// "project": {
/// "sysroot": "foo",
@@ -534,41 +597,9 @@ config_data! {
/// }
/// ```
///
- /// It is encouraged, but not required, to use the other variants on `DiscoverProjectData`
- /// to provide a more polished end-user experience.
- ///
- /// `DiscoverWorkspaceConfig::command` may *optionally* include an `{arg}`, which will be
- /// substituted with the JSON-serialized form of the following enum:
- ///
- /// ```norun
- /// #[derive(PartialEq, Clone, Debug, Serialize)]
- /// #[serde(rename_all = "camelCase")]
- /// pub enum DiscoverArgument {
- /// Path(AbsPathBuf),
- /// Buildfile(AbsPathBuf),
- /// }
- /// ```
- ///
- /// The JSON representation of `DiscoverArgument::Path` is:
- ///
- /// ```json
- /// {
- /// "path": "src/main.rs"
- /// }
- /// ```
- ///
- /// Similarly, the JSON representation of `DiscoverArgument::Buildfile` is:
- ///
- /// ```json
- /// {
- /// "buildfile": "BUILD"
- /// }
- /// ```
- ///
- /// `DiscoverArgument::Path` is used to find and generate a `rust-project.json`, and
- /// therefore, a workspace, whereas `DiscoverArgument::buildfile` is used to to update an
- /// existing workspace. As a reference for implementors, buck2's `rust-project` will likely
- /// be useful: <https://github.com/facebook/buck2/tree/main/integrations/rust-project>.
+ /// Only the finished event is required, but the other
+ /// variants are encouraged to give users more feedback about
+ /// progress or errors.
workspace_discoverConfig: Option<DiscoverWorkspaceConfig> = None,
}
}
@@ -870,10 +901,18 @@ config_data! {
/// (i.e., the folder containing the `Cargo.toml`). This can be overwritten
/// by changing `#rust-analyzer.check.invocationStrategy#`.
///
- /// If `$saved_file` is part of the command, rust-analyzer will pass
- /// the absolute path of the saved file to the provided command. This is
- /// intended to be used with non-Cargo build systems.
- /// Note that `$saved_file` is experimental and may be removed in the future.
+ /// It supports two interpolation syntaxes, both mainly intended to be used with
+ /// [non-Cargo build systems](./non_cargo_based_projects.md):
+ ///
+ /// - If `{saved_file}` is part of the command, rust-analyzer will pass
+ /// the absolute path of the saved file to the provided command.
+ /// (A previous version, `$saved_file`, also works.)
+ /// - If `{label}` is part of the command, rust-analyzer will pass the
+ /// Cargo package ID, which can be used with `cargo check -p`, or a build label from
+ /// `rust-project.json`. If `{label}` is included, rust-analyzer behaves much like
+ /// [`"rust-analyzer.check.workspace": false`](#check.workspace).
+ ///
+ ///
///
/// An example command would be:
///
@@ -1034,6 +1073,7 @@ pub struct Config {
/// The workspace roots as registered by the LSP client
workspace_roots: Vec<AbsPathBuf>,
caps: ClientCapabilities,
+ /// The LSP root path, deprecated in favor of `workspace_roots`
root_path: AbsPathBuf,
snippets: Vec<Snippet>,
client_info: Option<ClientInfo>,
@@ -1357,6 +1397,10 @@ impl Config {
self.discovered_projects_from_command.push(ProjectJsonFromCommand { data, buildfile });
}
+
+ pub fn workspace_roots(&self) -> &[AbsPathBuf] {
+ &self.workspace_roots
+ }
}
#[derive(Default, Debug)]
@@ -1733,6 +1777,7 @@ impl Config {
}
pub fn root_path(&self) -> &AbsPathBuf {
+ // We should probably use `workspace_roots` here if set
&self.root_path
}
@@ -2431,6 +2476,8 @@ impl Config {
pub(crate) fn cargo_test_options(&self, source_root: Option<SourceRootId>) -> CargoOptions {
CargoOptions {
+ // Might be nice to allow users to specify test_command = "nextest"
+ subcommand: "test".into(),
target_tuples: self.cargo_target(source_root).clone().into_iter().collect(),
all_targets: false,
no_default_features: *self.cargo_noDefaultFeatures(source_root),
@@ -2464,9 +2511,9 @@ impl Config {
},
}
}
- Some(_) | None => FlycheckConfig::CargoCommand {
- command: self.check_command(source_root).clone(),
- options: CargoOptions {
+ Some(_) | None => FlycheckConfig::Automatic {
+ cargo_options: CargoOptions {
+ subcommand: self.check_command(source_root).clone(),
target_tuples: self
.check_targets(source_root)
.clone()
@@ -2630,6 +2677,13 @@ impl Config {
}
}
+ pub fn proc_macro_num_processes(&self) -> usize {
+ match self.procMacro_processes() {
+ NumProcesses::Concrete(0) | NumProcesses::Physical => num_cpus::get_physical(),
+ &NumProcesses::Concrete(n) => n,
+ }
+ }
+
pub fn main_loop_num_threads(&self) -> usize {
match self.numThreads() {
Some(NumThreads::Concrete(0)) | None | Some(NumThreads::Physical) => {
@@ -3066,6 +3120,14 @@ pub enum NumThreads {
Concrete(usize),
}
+#[derive(Serialize, Deserialize, Debug, Clone, PartialEq)]
+#[serde(rename_all = "snake_case")]
+pub enum NumProcesses {
+ Physical,
+ #[serde(untagged)]
+ Concrete(usize),
+}
+
macro_rules! _default_val {
($default:expr, $ty:ty) => {{
let default_: $ty = $default;
@@ -3892,6 +3954,22 @@ fn field_props(field: &str, ty: &str, doc: &[&str], default: &str) -> serde_json
},
],
},
+ "NumProcesses" => set! {
+ "anyOf": [
+ {
+ "type": "number",
+ "minimum": 0,
+ "maximum": 255
+ },
+ {
+ "type": "string",
+ "enum": ["physical"],
+ "enumDescriptions": [
+ "Use the number of physical cores",
+ ],
+ },
+ ],
+ },
"Option<NumThreads>" => set! {
"anyOf": [
{
@@ -4171,8 +4249,8 @@ mod tests {
assert_eq!(config.cargo_targetDir(None), &None);
assert!(matches!(
config.flycheck(None),
- FlycheckConfig::CargoCommand {
- options: CargoOptions { target_dir_config: TargetDirectoryConfig::None, .. },
+ FlycheckConfig::Automatic {
+ cargo_options: CargoOptions { target_dir_config: TargetDirectoryConfig::None, .. },
..
}
));
@@ -4195,8 +4273,8 @@ mod tests {
Utf8PathBuf::from(std::env::var("CARGO_TARGET_DIR").unwrap_or("target".to_owned()));
assert!(matches!(
config.flycheck(None),
- FlycheckConfig::CargoCommand {
- options: CargoOptions { target_dir_config, .. },
+ FlycheckConfig::Automatic {
+ cargo_options: CargoOptions { target_dir_config, .. },
..
} if target_dir_config.target_dir(Some(&ws_target_dir)).map(Cow::into_owned)
== Some(ws_target_dir.join("rust-analyzer"))
@@ -4221,8 +4299,8 @@ mod tests {
);
assert!(matches!(
config.flycheck(None),
- FlycheckConfig::CargoCommand {
- options: CargoOptions { target_dir_config, .. },
+ FlycheckConfig::Automatic {
+ cargo_options: CargoOptions { target_dir_config, .. },
..
} if target_dir_config.target_dir(None).map(Cow::into_owned)
== Some(Utf8PathBuf::from("other_folder"))
diff --git a/crates/rust-analyzer/src/config/patch_old_style.rs b/crates/rust-analyzer/src/config/patch_old_style.rs
index 389bb7848c..5dc463eccc 100644
--- a/crates/rust-analyzer/src/config/patch_old_style.rs
+++ b/crates/rust-analyzer/src/config/patch_old_style.rs
@@ -3,7 +3,7 @@ use serde_json::{Value, json};
/// This function patches the json config to the new expected keys.
/// That is we try to load old known config keys here and convert them to the new ones.
-/// See https://github.com/rust-lang/rust-analyzer/pull/12010
+/// See <https://github.com/rust-lang/rust-analyzer/pull/12010>
///
/// We already have an alias system for simple cases, but if we make structural changes
/// the alias infra fails down.
diff --git a/crates/rust-analyzer/src/diagnostics.rs b/crates/rust-analyzer/src/diagnostics.rs
index 4a247800af..8d0f52433e 100644
--- a/crates/rust-analyzer/src/diagnostics.rs
+++ b/crates/rust-analyzer/src/diagnostics.rs
@@ -3,7 +3,6 @@ pub(crate) mod flycheck_to_proto;
use std::mem;
-use cargo_metadata::PackageId;
use ide::FileId;
use ide_db::{FxHashMap, base_db::DbPanicContext};
use itertools::Itertools;
@@ -12,10 +11,13 @@ use smallvec::SmallVec;
use stdx::iter_eq_by;
use triomphe::Arc;
-use crate::{global_state::GlobalStateSnapshot, lsp, lsp_ext, main_loop::DiagnosticsTaskKind};
+use crate::{
+ flycheck::PackageSpecifier, global_state::GlobalStateSnapshot, lsp, lsp_ext,
+ main_loop::DiagnosticsTaskKind,
+};
pub(crate) type CheckFixes =
- Arc<Vec<FxHashMap<Option<Arc<PackageId>>, FxHashMap<FileId, Vec<Fix>>>>>;
+ Arc<Vec<FxHashMap<Option<PackageSpecifier>, FxHashMap<FileId, Vec<Fix>>>>>;
#[derive(Debug, Default, Clone)]
pub struct DiagnosticsMapConfig {
@@ -29,7 +31,7 @@ pub(crate) type DiagnosticsGeneration = usize;
#[derive(Debug, Clone, Default)]
pub(crate) struct WorkspaceFlycheckDiagnostic {
- pub(crate) per_package: FxHashMap<Option<Arc<PackageId>>, PackageFlycheckDiagnostic>,
+ pub(crate) per_package: FxHashMap<Option<PackageSpecifier>, PackageFlycheckDiagnostic>,
}
#[derive(Debug, Clone)]
@@ -85,7 +87,7 @@ impl DiagnosticCollection {
pub(crate) fn clear_check_for_package(
&mut self,
flycheck_id: usize,
- package_id: Arc<PackageId>,
+ package_id: PackageSpecifier,
) {
let Some(check) = self.check.get_mut(flycheck_id) else {
return;
@@ -124,7 +126,7 @@ impl DiagnosticCollection {
pub(crate) fn clear_check_older_than_for_package(
&mut self,
flycheck_id: usize,
- package_id: Arc<PackageId>,
+ package_id: PackageSpecifier,
generation: DiagnosticsGeneration,
) {
let Some(check) = self.check.get_mut(flycheck_id) else {
@@ -154,7 +156,7 @@ impl DiagnosticCollection {
&mut self,
flycheck_id: usize,
generation: DiagnosticsGeneration,
- package_id: &Option<Arc<PackageId>>,
+ package_id: &Option<PackageSpecifier>,
file_id: FileId,
diagnostic: lsp_types::Diagnostic,
fix: Option<Box<Fix>>,
@@ -287,34 +289,40 @@ pub(crate) fn fetch_native_diagnostics(
let mut diagnostics = subscriptions[slice]
.iter()
.copied()
- .filter_map(|file_id| {
- let line_index = snapshot.file_line_index(file_id).ok()?;
- let source_root = snapshot.analysis.source_root_id(file_id).ok()?;
-
- let config = &snapshot.config.diagnostics(Some(source_root));
- let diagnostics = match kind {
- NativeDiagnosticsFetchKind::Syntax => {
- snapshot.analysis.syntax_diagnostics(config, file_id).ok()?
- }
-
- NativeDiagnosticsFetchKind::Semantic if config.enabled => snapshot
- .analysis
- .semantic_diagnostics(config, ide::AssistResolveStrategy::None, file_id)
- .ok()?,
- NativeDiagnosticsFetchKind::Semantic => return None,
- };
- let diagnostics = diagnostics
- .into_iter()
- .filter_map(|d| {
- if d.range.file_id == file_id {
- Some(convert_diagnostic(&line_index, d))
- } else {
- odd_ones.push(d);
- None
+ .map(|file_id| {
+ let diagnostics = (|| {
+ let line_index = snapshot.file_line_index(file_id).ok()?;
+ let source_root = snapshot.analysis.source_root_id(file_id).ok()?;
+
+ let config = &snapshot.config.diagnostics(Some(source_root));
+ let diagnostics = match kind {
+ NativeDiagnosticsFetchKind::Syntax => {
+ snapshot.analysis.syntax_diagnostics(config, file_id).ok()?
}
- })
- .collect::<Vec<_>>();
- Some((file_id, diagnostics))
+
+ NativeDiagnosticsFetchKind::Semantic if config.enabled => snapshot
+ .analysis
+ .semantic_diagnostics(config, ide::AssistResolveStrategy::None, file_id)
+ .ok()?,
+ NativeDiagnosticsFetchKind::Semantic => return None,
+ };
+ Some(
+ diagnostics
+ .into_iter()
+ .filter_map(|d| {
+ if d.range.file_id == file_id {
+ Some(convert_diagnostic(&line_index, d))
+ } else {
+ odd_ones.push(d);
+ None
+ }
+ })
+ .collect::<Vec<_>>(),
+ )
+ })()
+ .unwrap_or_default();
+
+ (file_id, diagnostics)
})
.collect::<Vec<_>>();
diff --git a/crates/rust-analyzer/src/discover.rs b/crates/rust-analyzer/src/discover.rs
index 4aef5b0b7f..098b6a4d98 100644
--- a/crates/rust-analyzer/src/discover.rs
+++ b/crates/rust-analyzer/src/discover.rs
@@ -1,6 +1,6 @@
//! Infrastructure for lazy project discovery. Currently only support rust-project.json discovery
//! via a custom discover command.
-use std::{io, path::Path};
+use std::path::Path;
use crossbeam_channel::Sender;
use ide_db::FxHashMap;
@@ -42,12 +42,12 @@ impl DiscoverCommand {
Self { sender, command }
}
- /// Spawn the command inside [Discover] and report progress, if any.
+ /// Spawn the command inside `DiscoverCommand` and report progress, if any.
pub(crate) fn spawn(
&self,
discover_arg: DiscoverArgument,
current_dir: &Path,
- ) -> io::Result<DiscoverHandle> {
+ ) -> anyhow::Result<DiscoverHandle> {
let command = &self.command[0];
let args = &self.command[1..];
@@ -73,7 +73,7 @@ impl DiscoverCommand {
}
}
-/// A handle to a spawned [Discover].
+/// A handle to a spawned `DiscoverCommand`.
#[derive(Debug)]
pub(crate) struct DiscoverHandle {
pub(crate) handle: CommandHandle<DiscoverProjectMessage>,
diff --git a/crates/rust-analyzer/src/flycheck.rs b/crates/rust-analyzer/src/flycheck.rs
index b062641691..512c231990 100644
--- a/crates/rust-analyzer/src/flycheck.rs
+++ b/crates/rust-analyzer/src/flycheck.rs
@@ -14,6 +14,7 @@ use ide_db::FxHashSet;
use itertools::Itertools;
use paths::{AbsPath, AbsPathBuf, Utf8Path, Utf8PathBuf};
use project_model::TargetDirectoryConfig;
+use project_model::project_json;
use rustc_hash::FxHashMap;
use serde::Deserialize as _;
use serde_derive::Deserialize;
@@ -21,6 +22,7 @@ use serde_derive::Deserialize;
pub(crate) use cargo_metadata::diagnostic::{
Applicability, Diagnostic, DiagnosticCode, DiagnosticLevel, DiagnosticSpan,
};
+use toolchain::DISPLAY_COMMAND_IGNORE_ENVS;
use toolchain::Tool;
use triomphe::Arc;
@@ -36,8 +38,11 @@ pub(crate) enum InvocationStrategy {
PerWorkspace,
}
+/// Data needed to construct a `cargo` command invocation, e.g. for flycheck or running a test.
#[derive(Clone, Debug, PartialEq, Eq)]
pub(crate) struct CargoOptions {
+ /// The cargo subcommand to run, e.g. "check" or "clippy"
+ pub(crate) subcommand: String,
pub(crate) target_tuples: Vec<String>,
pub(crate) all_targets: bool,
pub(crate) set_test: bool,
@@ -89,13 +94,36 @@ impl CargoOptions {
}
}
+/// The flycheck config from a rust-project.json file or discoverConfig JSON output.
+#[derive(Debug, Default)]
+pub(crate) struct FlycheckConfigJson {
+ /// The template with [project_json::RunnableKind::Flycheck]
+ pub single_template: Option<project_json::Runnable>,
+}
+
+impl FlycheckConfigJson {
+ pub(crate) fn any_configured(&self) -> bool {
+ // self.workspace_template.is_some() ||
+ self.single_template.is_some()
+ }
+}
+
+/// The flycheck config from rust-analyzer's own configuration.
+///
+/// We rely on this when rust-project.json does not specify a flycheck runnable
+///
#[derive(Clone, Debug, PartialEq, Eq)]
pub(crate) enum FlycheckConfig {
- CargoCommand {
- command: String,
- options: CargoOptions,
+ /// Automatically use rust-project.json's flycheck runnable or just use cargo (the common case)
+ ///
+ /// We can't have a variant for ProjectJson because that is configured on the fly during
+ /// discoverConfig. We only know what we can read at config time.
+ Automatic {
+ /// If we do use cargo, how to build the check command
+ cargo_options: CargoOptions,
ansi_color_output: bool,
},
+ /// check_overrideCommand. This overrides both cargo and rust-project.json's flycheck runnable.
CustomCommand {
command: String,
args: Vec<String>,
@@ -107,7 +135,7 @@ pub(crate) enum FlycheckConfig {
impl FlycheckConfig {
pub(crate) fn invocation_strategy(&self) -> InvocationStrategy {
match self {
- FlycheckConfig::CargoCommand { .. } => InvocationStrategy::PerWorkspace,
+ FlycheckConfig::Automatic { .. } => InvocationStrategy::PerWorkspace,
FlycheckConfig::CustomCommand { invocation_strategy, .. } => {
invocation_strategy.clone()
}
@@ -118,7 +146,9 @@ impl FlycheckConfig {
impl fmt::Display for FlycheckConfig {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {
- FlycheckConfig::CargoCommand { command, .. } => write!(f, "cargo {command}"),
+ FlycheckConfig::Automatic { cargo_options, .. } => {
+ write!(f, "cargo {}", cargo_options.subcommand)
+ }
FlycheckConfig::CustomCommand { command, args, .. } => {
// Don't show `my_custom_check --foo $saved_file` literally to the user, as it
// looks like we've forgotten to substitute $saved_file.
@@ -128,7 +158,7 @@ impl fmt::Display for FlycheckConfig {
// in the IDE (e.g. in the VS Code status bar).
let display_args = args
.iter()
- .map(|arg| if arg == SAVED_FILE_PLACEHOLDER { "..." } else { arg })
+ .map(|arg| if arg == SAVED_FILE_PLACEHOLDER_DOLLAR { "..." } else { arg })
.collect::<Vec<_>>();
write!(f, "{command} {}", display_args.join(" "))
@@ -156,6 +186,7 @@ impl FlycheckHandle {
generation: Arc<AtomicUsize>,
sender: Sender<FlycheckMessage>,
config: FlycheckConfig,
+ config_json: FlycheckConfigJson,
sysroot_root: Option<AbsPathBuf>,
workspace_root: AbsPathBuf,
manifest_path: Option<AbsPathBuf>,
@@ -166,6 +197,7 @@ impl FlycheckHandle {
generation.load(Ordering::Relaxed),
sender,
config,
+ config_json,
sysroot_root,
workspace_root,
manifest_path,
@@ -195,16 +227,17 @@ impl FlycheckHandle {
/// Schedule a re-start of the cargo check worker to do a package wide check.
pub(crate) fn restart_for_package(
&self,
- package: Arc<PackageId>,
+ package: PackageSpecifier,
target: Option<Target>,
- workspace_deps: Option<FxHashSet<Arc<PackageId>>>,
+ workspace_deps: Option<FxHashSet<PackageSpecifier>>,
+ saved_file: Option<AbsPathBuf>,
) {
let generation = self.generation.fetch_add(1, Ordering::Relaxed) + 1;
self.sender
.send(StateChange::Restart {
generation,
scope: FlycheckScope::Package { package, workspace_deps },
- saved_file: None,
+ saved_file,
target,
})
.unwrap();
@@ -233,7 +266,7 @@ pub(crate) enum ClearDiagnosticsKind {
#[derive(Debug)]
pub(crate) enum ClearScope {
Workspace,
- Package(Arc<PackageId>),
+ Package(PackageSpecifier),
}
pub(crate) enum FlycheckMessage {
@@ -243,7 +276,7 @@ pub(crate) enum FlycheckMessage {
generation: DiagnosticsGeneration,
workspace_root: Arc<AbsPathBuf>,
diagnostic: Diagnostic,
- package_id: Option<Arc<PackageId>>,
+ package_id: Option<PackageSpecifier>,
},
/// Request clearing all outdated diagnostics.
@@ -286,16 +319,56 @@ impl fmt::Debug for FlycheckMessage {
#[derive(Debug)]
pub(crate) enum Progress {
- DidStart,
+ DidStart {
+ /// The user sees this in VSCode, etc. May be a shortened version of the command we actually
+ /// executed, otherwise it is way too long.
+ user_facing_command: String,
+ },
DidCheckCrate(String),
DidFinish(io::Result<()>),
DidCancel,
DidFailToRestart(String),
}
+#[derive(Debug, Clone)]
enum FlycheckScope {
Workspace,
- Package { package: Arc<PackageId>, workspace_deps: Option<FxHashSet<Arc<PackageId>>> },
+ Package {
+ // Either a cargo package or a $label in rust-project.check.overrideCommand
+ package: PackageSpecifier,
+ workspace_deps: Option<FxHashSet<PackageSpecifier>>,
+ },
+}
+
+#[derive(Debug, Hash, PartialEq, Eq, Clone)]
+pub(crate) enum PackageSpecifier {
+ Cargo {
+ /// The one in Cargo.toml, assumed to work with `cargo check -p {}` etc
+ package_id: Arc<PackageId>,
+ },
+ BuildInfo {
+ /// If a `build` field is present in rust-project.json, its label field
+ label: String,
+ },
+}
+
+impl PackageSpecifier {
+ pub(crate) fn as_str(&self) -> &str {
+ match self {
+ Self::Cargo { package_id } => &package_id.repr,
+ Self::BuildInfo { label } => label,
+ }
+ }
+}
+
+#[derive(Debug)]
+enum FlycheckCommandOrigin {
+ /// Regular cargo invocation
+ Cargo,
+ /// Configured via check_overrideCommand
+ CheckOverrideCommand,
+ /// From a runnable with [project_json::RunnableKind::Flycheck]
+ ProjectJsonRunnable,
}
enum StateChange {
@@ -316,6 +389,8 @@ struct FlycheckActor {
generation: DiagnosticsGeneration,
sender: Sender<FlycheckMessage>,
config: FlycheckConfig,
+ config_json: FlycheckConfigJson,
+
manifest_path: Option<AbsPathBuf>,
ws_target_dir: Option<Utf8PathBuf>,
/// Either the workspace root of the workspace we are flychecking,
@@ -331,7 +406,7 @@ struct FlycheckActor {
command_handle: Option<CommandHandle<CargoCheckMessage>>,
/// The receiver side of the channel mentioned above.
command_receiver: Option<Receiver<CargoCheckMessage>>,
- diagnostics_cleared_for: FxHashSet<Arc<PackageId>>,
+ diagnostics_cleared_for: FxHashSet<PackageSpecifier>,
diagnostics_received: DiagnosticsReceived,
}
@@ -348,7 +423,66 @@ enum Event {
CheckEvent(Option<CargoCheckMessage>),
}
-pub(crate) const SAVED_FILE_PLACEHOLDER: &str = "$saved_file";
+/// This is stable behaviour. Don't change.
+const SAVED_FILE_PLACEHOLDER_DOLLAR: &str = "$saved_file";
+const LABEL_INLINE: &str = "{label}";
+const SAVED_FILE_INLINE: &str = "{saved_file}";
+
+struct Substitutions<'a> {
+ label: Option<&'a str>,
+ saved_file: Option<&'a str>,
+}
+
+impl<'a> Substitutions<'a> {
+ /// If you have a runnable, and it has {label} in it somewhere, treat it as a template that
+ /// may be unsatisfied if you do not provide a label to substitute into it. Returns None in
+ /// that situation. Otherwise performs the requested substitutions.
+ ///
+ /// Same for {saved_file}.
+ ///
+ #[allow(clippy::disallowed_types)] /* generic parameter allows for FxHashMap */
+ fn substitute<H>(
+ self,
+ template: &project_json::Runnable,
+ extra_env: &std::collections::HashMap<String, Option<String>, H>,
+ ) -> Option<Command> {
+ let mut cmd = toolchain::command(&template.program, &template.cwd, extra_env);
+ for arg in &template.args {
+ if let Some(ix) = arg.find(LABEL_INLINE) {
+ if let Some(label) = self.label {
+ let mut arg = arg.to_string();
+ arg.replace_range(ix..ix + LABEL_INLINE.len(), label);
+ cmd.arg(arg);
+ continue;
+ } else {
+ return None;
+ }
+ }
+ if let Some(ix) = arg.find(SAVED_FILE_INLINE) {
+ if let Some(saved_file) = self.saved_file {
+ let mut arg = arg.to_string();
+ arg.replace_range(ix..ix + SAVED_FILE_INLINE.len(), saved_file);
+ cmd.arg(arg);
+ continue;
+ } else {
+ return None;
+ }
+ }
+ // Legacy syntax: full argument match
+ if arg == SAVED_FILE_PLACEHOLDER_DOLLAR {
+ if let Some(saved_file) = self.saved_file {
+ cmd.arg(saved_file);
+ continue;
+ } else {
+ return None;
+ }
+ }
+ cmd.arg(arg);
+ }
+ cmd.current_dir(&template.cwd);
+ Some(cmd)
+ }
+}
impl FlycheckActor {
fn new(
@@ -356,6 +490,7 @@ impl FlycheckActor {
generation: DiagnosticsGeneration,
sender: Sender<FlycheckMessage>,
config: FlycheckConfig,
+ config_json: FlycheckConfigJson,
sysroot_root: Option<AbsPathBuf>,
workspace_root: AbsPathBuf,
manifest_path: Option<AbsPathBuf>,
@@ -367,6 +502,7 @@ impl FlycheckActor {
generation,
sender,
config,
+ config_json,
sysroot_root,
root: Arc::new(workspace_root),
scope: FlycheckScope::Workspace,
@@ -418,27 +554,39 @@ impl FlycheckActor {
}
let command = self.check_command(&scope, saved_file.as_deref(), target);
- self.scope = scope;
+ self.scope = scope.clone();
self.generation = generation;
- let Some(command) = command else {
+ let Some((command, origin)) = command else {
+ tracing::debug!(?scope, "failed to build flycheck command");
continue;
};
- let formatted_command = format!("{command:?}");
+ let debug_command = format!("{command:?}");
+ let user_facing_command = match origin {
+ // Don't show all the --format=json-with-blah-blah args, just the simple
+ // version
+ FlycheckCommandOrigin::Cargo => self.config.to_string(),
+ // show them the full command but pretty printed. advanced user
+ FlycheckCommandOrigin::ProjectJsonRunnable
+ | FlycheckCommandOrigin::CheckOverrideCommand => display_command(
+ &command,
+ Some(std::path::Path::new(self.root.as_path())),
+ ),
+ };
- tracing::debug!(?command, "will restart flycheck");
+ tracing::debug!(?origin, ?command, "will restart flycheck");
let (sender, receiver) = unbounded();
match CommandHandle::spawn(
command,
CargoCheckParser,
sender,
match &self.config {
- FlycheckConfig::CargoCommand { options, .. } => {
+ FlycheckConfig::Automatic { cargo_options, .. } => {
let ws_target_dir =
self.ws_target_dir.as_ref().map(Utf8PathBuf::as_path);
let target_dir =
- options.target_dir_config.target_dir(ws_target_dir);
+ cargo_options.target_dir_config.target_dir(ws_target_dir);
// If `"rust-analyzer.cargo.targetDir": null`, we should use
// workspace's target dir instead of hard-coded fallback.
@@ -464,14 +612,14 @@ impl FlycheckActor {
},
) {
Ok(command_handle) => {
- tracing::debug!(command = formatted_command, "did restart flycheck");
+ tracing::debug!(?origin, command = %debug_command, "did restart flycheck");
self.command_handle = Some(command_handle);
self.command_receiver = Some(receiver);
- self.report_progress(Progress::DidStart);
+ self.report_progress(Progress::DidStart { user_facing_command });
}
Err(error) => {
self.report_progress(Progress::DidFailToRestart(format!(
- "Failed to run the following command: {formatted_command} error={error}"
+ "Failed to run the following command: {debug_command} origin={origin:?} error={error}"
)));
}
}
@@ -564,7 +712,10 @@ impl FlycheckActor {
msg.target.kind.iter().format_with(", ", |kind, f| f(&kind)),
)));
let package_id = Arc::new(msg.package_id);
- if self.diagnostics_cleared_for.insert(package_id.clone()) {
+ if self
+ .diagnostics_cleared_for
+ .insert(PackageSpecifier::Cargo { package_id: package_id.clone() })
+ {
tracing::trace!(
flycheck_id = self.id,
package_id = package_id.repr,
@@ -572,7 +723,9 @@ impl FlycheckActor {
);
self.send(FlycheckMessage::ClearDiagnostics {
id: self.id,
- kind: ClearDiagnosticsKind::All(ClearScope::Package(package_id)),
+ kind: ClearDiagnosticsKind::All(ClearScope::Package(
+ PackageSpecifier::Cargo { package_id },
+ )),
});
}
}
@@ -580,7 +733,7 @@ impl FlycheckActor {
tracing::trace!(
flycheck_id = self.id,
message = diagnostic.message,
- package_id = package_id.as_ref().map(|it| &it.repr),
+ package_id = package_id.as_ref().map(|it| it.as_str()),
"diagnostic received"
);
if self.diagnostics_received == DiagnosticsReceived::No {
@@ -590,7 +743,7 @@ impl FlycheckActor {
if self.diagnostics_cleared_for.insert(package_id.clone()) {
tracing::trace!(
flycheck_id = self.id,
- package_id = package_id.repr,
+ package_id = package_id.as_str(),
"clearing diagnostics"
);
self.send(FlycheckMessage::ClearDiagnostics {
@@ -642,6 +795,29 @@ impl FlycheckActor {
self.diagnostics_received = DiagnosticsReceived::No;
}
+ fn explicit_check_command(
+ &self,
+ scope: &FlycheckScope,
+ saved_file: Option<&AbsPath>,
+ ) -> Option<Command> {
+ let label = match scope {
+ // We could add a runnable like "RunnableKind::FlycheckWorkspace". But generally
+ // if you're not running cargo, it's because your workspace is too big to check
+ // all at once. You can always use `check_overrideCommand` with no {label}.
+ FlycheckScope::Workspace => return None,
+ FlycheckScope::Package { package: PackageSpecifier::BuildInfo { label }, .. } => {
+ label.as_str()
+ }
+ FlycheckScope::Package {
+ package: PackageSpecifier::Cargo { package_id: label },
+ ..
+ } => &label.repr,
+ };
+ let template = self.config_json.single_template.as_ref()?;
+ let subs = Substitutions { label: Some(label), saved_file: saved_file.map(|x| x.as_str()) };
+ subs.substitute(template, &FxHashMap::default())
+ }
+
/// Construct a `Command` object for checking the user's code. If the user
/// has specified a custom command with placeholders that we cannot fill,
/// return None.
@@ -650,23 +826,49 @@ impl FlycheckActor {
scope: &FlycheckScope,
saved_file: Option<&AbsPath>,
target: Option<Target>,
- ) -> Option<Command> {
+ ) -> Option<(Command, FlycheckCommandOrigin)> {
match &self.config {
- FlycheckConfig::CargoCommand { command, options, ansi_color_output } => {
+ FlycheckConfig::Automatic { cargo_options, ansi_color_output } => {
+ // Only use the rust-project.json's flycheck config when no check_overrideCommand
+ // is configured. In the FlycheckConcig::CustomCommand branch we will still do
+ // label substitution, but on the overrideCommand instead.
+ //
+ // There needs to be SOME way to override what your discoverConfig tool says,
+ // because to change the flycheck runnable there you may have to literally
+ // recompile the tool.
+ if self.config_json.any_configured() {
+ // Completely handle according to rust-project.json.
+ // We don't consider this to be "using cargo" so we will not apply any of the
+ // CargoOptions to the command.
+ let cmd = self.explicit_check_command(scope, saved_file)?;
+ return Some((cmd, FlycheckCommandOrigin::ProjectJsonRunnable));
+ }
+
let mut cmd =
- toolchain::command(Tool::Cargo.path(), &*self.root, &options.extra_env);
+ toolchain::command(Tool::Cargo.path(), &*self.root, &cargo_options.extra_env);
if let Some(sysroot_root) = &self.sysroot_root
- && !options.extra_env.contains_key("RUSTUP_TOOLCHAIN")
+ && !cargo_options.extra_env.contains_key("RUSTUP_TOOLCHAIN")
&& std::env::var_os("RUSTUP_TOOLCHAIN").is_none()
{
cmd.env("RUSTUP_TOOLCHAIN", AsRef::<std::path::Path>::as_ref(sysroot_root));
}
cmd.env("CARGO_LOG", "cargo::core::compiler::fingerprint=info");
- cmd.arg(command);
+ cmd.arg(&cargo_options.subcommand);
match scope {
FlycheckScope::Workspace => cmd.arg("--workspace"),
- FlycheckScope::Package { package, .. } => cmd.arg("-p").arg(&package.repr),
+ FlycheckScope::Package {
+ package: PackageSpecifier::Cargo { package_id },
+ ..
+ } => cmd.arg("-p").arg(&package_id.repr),
+ FlycheckScope::Package {
+ package: PackageSpecifier::BuildInfo { .. }, ..
+ } => {
+ // No way to flycheck this single package. All we have is a build label.
+ // There's no way to really say whether this build label happens to be
+ // a cargo canonical name, so we won't try.
+ return None;
+ }
};
if let Some(tgt) = target {
@@ -695,12 +897,12 @@ impl FlycheckActor {
cmd.arg("--keep-going");
- options.apply_on_command(
+ cargo_options.apply_on_command(
&mut cmd,
self.ws_target_dir.as_ref().map(Utf8PathBuf::as_path),
);
- cmd.args(&options.extra_args);
- Some(cmd)
+ cmd.args(&cargo_options.extra_args);
+ Some((cmd, FlycheckCommandOrigin::Cargo))
}
FlycheckConfig::CustomCommand { command, args, extra_env, invocation_strategy } => {
let root = match invocation_strategy {
@@ -710,31 +912,25 @@ impl FlycheckActor {
&*self.root
}
};
- let mut cmd = toolchain::command(command, root, extra_env);
-
- // If the custom command has a $saved_file placeholder, and
- // we're saving a file, replace the placeholder in the arguments.
- if let Some(saved_file) = saved_file {
- for arg in args {
- if arg == SAVED_FILE_PLACEHOLDER {
- cmd.arg(saved_file);
- } else {
- cmd.arg(arg);
- }
- }
- } else {
- for arg in args {
- if arg == SAVED_FILE_PLACEHOLDER {
- // The custom command has a $saved_file placeholder,
- // but we had an IDE event that wasn't a file save. Do nothing.
- return None;
- }
+ let runnable = project_json::Runnable {
+ program: command.clone(),
+ cwd: Utf8Path::to_owned(root.as_ref()),
+ args: args.clone(),
+ kind: project_json::RunnableKind::Flycheck,
+ };
- cmd.arg(arg);
- }
- }
+ let label = match scope {
+ FlycheckScope::Workspace => None,
+ // We support substituting both build labels (e.g. buck, bazel) and cargo package ids.
+ // With cargo package ids, you get `cargo check -p path+file:///path/to/rust-analyzer/crates/hir#0.0.0`.
+ // That does work!
+ FlycheckScope::Package { package, .. } => Some(package.as_str()),
+ };
- Some(cmd)
+ let subs = Substitutions { label, saved_file: saved_file.map(|x| x.as_str()) };
+ let cmd = subs.substitute(&runnable, extra_env)?;
+
+ Some((cmd, FlycheckCommandOrigin::CheckOverrideCommand))
}
}
}
@@ -748,7 +944,7 @@ impl FlycheckActor {
#[allow(clippy::large_enum_variant)]
enum CargoCheckMessage {
CompilerArtifact(cargo_metadata::Artifact),
- Diagnostic { diagnostic: Diagnostic, package_id: Option<Arc<PackageId>> },
+ Diagnostic { diagnostic: Diagnostic, package_id: Option<PackageSpecifier> },
}
struct CargoCheckParser;
@@ -767,7 +963,9 @@ impl JsonLinesParser<CargoCheckMessage> for CargoCheckParser {
cargo_metadata::Message::CompilerMessage(msg) => {
Some(CargoCheckMessage::Diagnostic {
diagnostic: msg.message,
- package_id: Some(Arc::new(msg.package_id)),
+ package_id: Some(PackageSpecifier::Cargo {
+ package_id: Arc::new(msg.package_id),
+ }),
})
}
_ => None,
@@ -794,3 +992,181 @@ enum JsonMessage {
Cargo(cargo_metadata::Message),
Rustc(Diagnostic),
}
+
+/// Not good enough to execute in a shell, but good enough to show the user without all the noisy
+/// quotes
+///
+/// Pass implicit_cwd if there is one regarded as the obvious by the user, so we can skip showing it.
+/// Compactness is the aim of the game, the output typically gets truncated quite a lot.
+fn display_command(c: &Command, implicit_cwd: Option<&std::path::Path>) -> String {
+ let mut o = String::new();
+ use std::fmt::Write;
+ let lossy = std::ffi::OsStr::to_string_lossy;
+ if let Some(dir) = c.get_current_dir() {
+ if Some(dir) == implicit_cwd.map(std::path::Path::new) {
+ // pass
+ } else if dir.to_string_lossy().contains(" ") {
+ write!(o, "cd {:?} && ", dir).unwrap();
+ } else {
+ write!(o, "cd {} && ", dir.display()).unwrap();
+ }
+ }
+ for (env, val) in c.get_envs() {
+ let (env, val) = (lossy(env), val.map(lossy).unwrap_or(std::borrow::Cow::Borrowed("")));
+ if DISPLAY_COMMAND_IGNORE_ENVS.contains(&env.as_ref()) {
+ continue;
+ }
+ if env.contains(" ") {
+ write!(o, "\"{}={}\" ", env, val).unwrap();
+ } else if val.contains(" ") {
+ write!(o, "{}=\"{}\" ", env, val).unwrap();
+ } else {
+ write!(o, "{}={} ", env, val).unwrap();
+ }
+ }
+ let prog = lossy(c.get_program());
+ if prog.contains(" ") {
+ write!(o, "{:?}", prog).unwrap();
+ } else {
+ write!(o, "{}", prog).unwrap();
+ }
+ for arg in c.get_args() {
+ let arg = lossy(arg);
+ if arg.contains(" ") {
+ write!(o, " \"{}\"", arg).unwrap();
+ } else {
+ write!(o, " {}", arg).unwrap();
+ }
+ }
+ o
+}
+
+#[cfg(test)]
+mod tests {
+ use ide_db::FxHashMap;
+ use itertools::Itertools;
+ use paths::Utf8Path;
+ use project_model::project_json;
+
+ use crate::flycheck::Substitutions;
+ use crate::flycheck::display_command;
+
+ #[test]
+ fn test_substitutions() {
+ let label = ":label";
+ let saved_file = "file.rs";
+
+ // Runnable says it needs both; you need both.
+ assert_eq!(test_substitute(None, None, "{label} {saved_file}").as_deref(), None);
+ assert_eq!(test_substitute(Some(label), None, "{label} {saved_file}").as_deref(), None);
+ assert_eq!(
+ test_substitute(None, Some(saved_file), "{label} {saved_file}").as_deref(),
+ None
+ );
+ assert_eq!(
+ test_substitute(Some(label), Some(saved_file), "{label} {saved_file}").as_deref(),
+ Some("build :label file.rs")
+ );
+
+ // Only need label? only need label.
+ assert_eq!(test_substitute(None, None, "{label}").as_deref(), None);
+ assert_eq!(test_substitute(Some(label), None, "{label}").as_deref(), Some("build :label"),);
+ assert_eq!(test_substitute(None, Some(saved_file), "{label}").as_deref(), None,);
+ assert_eq!(
+ test_substitute(Some(label), Some(saved_file), "{label}").as_deref(),
+ Some("build :label"),
+ );
+
+ // Only need saved_file
+ assert_eq!(test_substitute(None, None, "{saved_file}").as_deref(), None);
+ assert_eq!(test_substitute(Some(label), None, "{saved_file}").as_deref(), None);
+ assert_eq!(
+ test_substitute(None, Some(saved_file), "{saved_file}").as_deref(),
+ Some("build file.rs")
+ );
+ assert_eq!(
+ test_substitute(Some(label), Some(saved_file), "{saved_file}").as_deref(),
+ Some("build file.rs")
+ );
+
+ // Need neither
+ assert_eq!(test_substitute(None, None, "xxx").as_deref(), Some("build xxx"));
+ assert_eq!(test_substitute(Some(label), None, "xxx").as_deref(), Some("build xxx"));
+ assert_eq!(test_substitute(None, Some(saved_file), "xxx").as_deref(), Some("build xxx"));
+ assert_eq!(
+ test_substitute(Some(label), Some(saved_file), "xxx").as_deref(),
+ Some("build xxx")
+ );
+
+ // {label} mid-argument substitution
+ assert_eq!(
+ test_substitute(Some(label), None, "--label={label}").as_deref(),
+ Some("build --label=:label")
+ );
+
+ // {saved_file} mid-argument substitution
+ assert_eq!(
+ test_substitute(None, Some(saved_file), "--saved={saved_file}").as_deref(),
+ Some("build --saved=file.rs")
+ );
+
+ // $saved_file legacy support (no mid-argument substitution, we never supported that)
+ assert_eq!(
+ test_substitute(None, Some(saved_file), "$saved_file").as_deref(),
+ Some("build file.rs")
+ );
+
+ fn test_substitute(
+ label: Option<&str>,
+ saved_file: Option<&str>,
+ args: &str,
+ ) -> Option<String> {
+ Substitutions { label, saved_file }
+ .substitute(
+ &project_json::Runnable {
+ program: "build".to_owned(),
+ args: Vec::from_iter(args.split_whitespace().map(ToOwned::to_owned)),
+ cwd: Utf8Path::new("/path").to_owned(),
+ kind: project_json::RunnableKind::Flycheck,
+ },
+ &FxHashMap::default(),
+ )
+ .map(|command| {
+ command.get_args().map(|x| x.to_string_lossy()).collect_vec().join(" ")
+ })
+ .map(|args| format!("build {}", args))
+ }
+ }
+
+ #[test]
+ fn test_display_command() {
+ use std::path::Path;
+ let workdir = Path::new("workdir");
+ let mut cmd = toolchain::command("command", workdir, &FxHashMap::default());
+ assert_eq!(display_command(cmd.arg("--arg"), Some(workdir)), "command --arg");
+ assert_eq!(
+ display_command(cmd.arg("spaced arg"), Some(workdir)),
+ "command --arg \"spaced arg\""
+ );
+ assert_eq!(
+ display_command(cmd.env("ENVIRON", "yeah"), Some(workdir)),
+ "ENVIRON=yeah command --arg \"spaced arg\""
+ );
+ assert_eq!(
+ display_command(cmd.env("OTHER", "spaced env"), Some(workdir)),
+ "ENVIRON=yeah OTHER=\"spaced env\" command --arg \"spaced arg\""
+ );
+ assert_eq!(
+ display_command(cmd.current_dir("/tmp"), Some(workdir)),
+ "cd /tmp && ENVIRON=yeah OTHER=\"spaced env\" command --arg \"spaced arg\""
+ );
+ assert_eq!(
+ display_command(cmd.current_dir("/tmp and/thing"), Some(workdir)),
+ "cd \"/tmp and/thing\" && ENVIRON=yeah OTHER=\"spaced env\" command --arg \"spaced arg\""
+ );
+ assert_eq!(
+ display_command(cmd.current_dir("/tmp and/thing"), Some(Path::new("/tmp and/thing"))),
+ "ENVIRON=yeah OTHER=\"spaced env\" command --arg \"spaced arg\""
+ );
+ }
+}
diff --git a/crates/rust-analyzer/src/global_state.rs b/crates/rust-analyzer/src/global_state.rs
index 9beab3c0e4..afd4162de6 100644
--- a/crates/rust-analyzer/src/global_state.rs
+++ b/crates/rust-analyzer/src/global_state.rs
@@ -9,7 +9,6 @@ use std::{
time::{Duration, Instant},
};
-use cargo_metadata::PackageId;
use crossbeam_channel::{Receiver, Sender, unbounded};
use hir::ChangeWithProcMacros;
use ide::{Analysis, AnalysisHost, Cancellable, FileId, SourceRootId};
@@ -36,7 +35,7 @@ use crate::{
config::{Config, ConfigChange, ConfigErrors, RatomlFileKind},
diagnostics::{CheckFixes, DiagnosticCollection},
discover,
- flycheck::{FlycheckHandle, FlycheckMessage},
+ flycheck::{FlycheckHandle, FlycheckMessage, PackageSpecifier},
line_index::{LineEndings, LineIndex},
lsp::{from_proto, to_proto::url_from_abs_path},
lsp_ext,
@@ -113,6 +112,7 @@ pub(crate) struct GlobalState {
pub(crate) flycheck_sender: Sender<FlycheckMessage>,
pub(crate) flycheck_receiver: Receiver<FlycheckMessage>,
pub(crate) last_flycheck_error: Option<String>,
+ pub(crate) flycheck_formatted_commands: Vec<String>,
// Test explorer
pub(crate) test_run_session: Option<Vec<CargoTestHandle>>,
@@ -188,7 +188,7 @@ pub(crate) struct GlobalState {
/// been called.
pub(crate) deferred_task_queue: DeferredTaskQueue,
- /// HACK: Workaround for https://github.com/rust-lang/rust-analyzer/issues/19709
+ /// HACK: Workaround for <https://github.com/rust-lang/rust-analyzer/issues/19709>
/// This is marked true if we failed to load a crate root file at crate graph creation,
/// which will usually end up causing a bunch of incorrect diagnostics on startup.
pub(crate) incomplete_crate_graph: bool,
@@ -289,6 +289,7 @@ impl GlobalState {
flycheck_sender,
flycheck_receiver,
last_flycheck_error: None,
+ flycheck_formatted_commands: vec![],
test_run_session: None,
test_run_sender,
@@ -825,7 +826,7 @@ impl GlobalStateSnapshot {
let Some(krate) = project.crate_by_root(path) else {
continue;
};
- let Some(build) = krate.build else {
+ let Some(build) = krate.build.clone() else {
continue;
};
@@ -833,6 +834,7 @@ impl GlobalStateSnapshot {
label: build.label,
target_kind: build.target_kind,
shell_runnables: project.runnables().to_owned(),
+ project_root: project.project_root().to_owned(),
}));
}
ProjectWorkspaceKind::DetachedFile { .. } => {}
@@ -844,23 +846,43 @@ impl GlobalStateSnapshot {
pub(crate) fn all_workspace_dependencies_for_package(
&self,
- package: &Arc<PackageId>,
- ) -> Option<FxHashSet<Arc<PackageId>>> {
- for workspace in self.workspaces.iter() {
- match &workspace.kind {
- ProjectWorkspaceKind::Cargo { cargo, .. }
- | ProjectWorkspaceKind::DetachedFile { cargo: Some((cargo, _, _)), .. } => {
- let package = cargo.packages().find(|p| cargo[*p].id == *package)?;
-
- return cargo[package]
- .all_member_deps
- .as_ref()
- .map(|deps| deps.iter().map(|dep| cargo[*dep].id.clone()).collect());
- }
- _ => {}
+ package: &PackageSpecifier,
+ ) -> Option<FxHashSet<PackageSpecifier>> {
+ match package {
+ PackageSpecifier::Cargo { package_id } => {
+ self.workspaces.iter().find_map(|workspace| match &workspace.kind {
+ ProjectWorkspaceKind::Cargo { cargo, .. }
+ | ProjectWorkspaceKind::DetachedFile { cargo: Some((cargo, _, _)), .. } => {
+ let package = cargo.packages().find(|p| cargo[*p].id == *package_id)?;
+
+ cargo[package].all_member_deps.as_ref().map(|deps| {
+ deps.iter()
+ .map(|dep| cargo[*dep].id.clone())
+ .map(|p| PackageSpecifier::Cargo { package_id: p })
+ .collect()
+ })
+ }
+ _ => None,
+ })
+ }
+ PackageSpecifier::BuildInfo { label } => {
+ self.workspaces.iter().find_map(|workspace| match &workspace.kind {
+ ProjectWorkspaceKind::Json(p) => {
+ let krate = p.crate_by_label(label)?;
+ Some(
+ krate
+ .iter_deps()
+ .filter_map(|dep| p[dep].build.as_ref())
+ .map(|build| PackageSpecifier::BuildInfo {
+ label: build.label.clone(),
+ })
+ .collect(),
+ )
+ }
+ _ => None,
+ })
}
}
- None
}
pub(crate) fn file_exists(&self, file_id: FileId) -> bool {
diff --git a/crates/rust-analyzer/src/handlers/dispatch.rs b/crates/rust-analyzer/src/handlers/dispatch.rs
index 10bbb0bb31..90deae2d90 100644
--- a/crates/rust-analyzer/src/handlers/dispatch.rs
+++ b/crates/rust-analyzer/src/handlers/dispatch.rs
@@ -101,7 +101,7 @@ impl RequestDispatcher<'_> {
}
/// Dispatches a non-latency-sensitive request onto the thread pool. When the VFS is marked not
- /// ready this will return a default constructed [`R::Result`].
+ /// ready this will return a default constructed `R::Result`.
pub(crate) fn on<const ALLOW_RETRYING: bool, R>(
&mut self,
f: fn(GlobalStateSnapshot, R::Params) -> anyhow::Result<R::Result>,
@@ -128,7 +128,7 @@ impl RequestDispatcher<'_> {
}
/// Dispatches a non-latency-sensitive request onto the thread pool. When the VFS is marked not
- /// ready this will return a `default` constructed [`R::Result`].
+ /// ready this will return a `default` constructed `R::Result`.
pub(crate) fn on_with_vfs_default<R>(
&mut self,
f: fn(GlobalStateSnapshot, R::Params) -> anyhow::Result<R::Result>,
@@ -176,7 +176,7 @@ impl RequestDispatcher<'_> {
}
/// Dispatches a latency-sensitive request onto the thread pool. When the VFS is marked not
- /// ready this will return a default constructed [`R::Result`].
+ /// ready this will return a default constructed `R::Result`.
pub(crate) fn on_latency_sensitive<const ALLOW_RETRYING: bool, R>(
&mut self,
f: fn(GlobalStateSnapshot, R::Params) -> anyhow::Result<R::Result>,
diff --git a/crates/rust-analyzer/src/handlers/notification.rs b/crates/rust-analyzer/src/handlers/notification.rs
index 4a6544508f..138310b78f 100644
--- a/crates/rust-analyzer/src/handlers/notification.rs
+++ b/crates/rust-analyzer/src/handlers/notification.rs
@@ -18,7 +18,7 @@ use vfs::{AbsPathBuf, ChangeKind, VfsPath};
use crate::{
config::{Config, ConfigChange},
- flycheck::{InvocationStrategy, Target},
+ flycheck::{InvocationStrategy, PackageSpecifier, Target},
global_state::{FetchWorkspaceRequest, GlobalState},
lsp::{from_proto, utils::apply_document_changes},
lsp_ext::{self, RunFlycheckParams},
@@ -289,11 +289,24 @@ pub(crate) fn handle_did_change_watched_files(
state: &mut GlobalState,
params: DidChangeWatchedFilesParams,
) -> anyhow::Result<()> {
+ // we want to trigger flycheck if a file outside of our workspaces has changed,
+ // as to reduce stale diagnostics when outside changes happen
+ let mut trigger_flycheck = false;
for change in params.changes.iter().unique_by(|&it| &it.uri) {
if let Ok(path) = from_proto::abs_path(&change.uri) {
+ if !trigger_flycheck {
+ trigger_flycheck =
+ state.config.workspace_roots().iter().any(|root| !path.starts_with(root));
+ }
state.loader.handle.invalidate(path);
}
}
+
+ if trigger_flycheck && state.config.check_on_save(None) {
+ for flycheck in state.flycheck.iter() {
+ flycheck.restart_workspace(None);
+ }
+ }
Ok(())
}
@@ -328,22 +341,33 @@ fn run_flycheck(state: &mut GlobalState, vfs_path: VfsPath) -> bool {
}
InvocationStrategy::PerWorkspace => {
Box::new(move || {
- let target = TargetSpec::for_file(&world, file_id)?.and_then(|it| {
+ let saved_file = vfs_path.as_path().map(ToOwned::to_owned);
+ let target = TargetSpec::for_file(&world, file_id)?.map(|it| {
let tgt_kind = it.target_kind();
let (tgt_name, root, package) = match it {
- TargetSpec::Cargo(c) => (c.target, c.workspace_root, c.package_id),
- _ => return None,
+ TargetSpec::Cargo(c) => (
+ Some(c.target),
+ c.workspace_root,
+ PackageSpecifier::Cargo { package_id: c.package_id },
+ ),
+ TargetSpec::ProjectJson(p) => (
+ None,
+ p.project_root,
+ PackageSpecifier::BuildInfo { label: p.label.clone() },
+ ),
};
- let tgt = match tgt_kind {
- project_model::TargetKind::Bin => Target::Bin(tgt_name),
- project_model::TargetKind::Example => Target::Example(tgt_name),
- project_model::TargetKind::Test => Target::Test(tgt_name),
- project_model::TargetKind::Bench => Target::Benchmark(tgt_name),
- _ => return Some((None, root, package)),
- };
+ let tgt = tgt_name.and_then(|tgt_name| {
+ Some(match tgt_kind {
+ project_model::TargetKind::Bin => Target::Bin(tgt_name),
+ project_model::TargetKind::Example => Target::Example(tgt_name),
+ project_model::TargetKind::Test => Target::Test(tgt_name),
+ project_model::TargetKind::Bench => Target::Benchmark(tgt_name),
+ _ => return None,
+ })
+ });
- Some((Some(tgt), root, package))
+ (tgt, root, package)
});
tracing::debug!(?target, "flycheck target");
// we have a specific non-library target, attempt to only check that target, nothing
@@ -352,8 +376,10 @@ fn run_flycheck(state: &mut GlobalState, vfs_path: VfsPath) -> bool {
if let Some((target, root, package)) = target {
// trigger a package check if we have a non-library target as that can't affect
// anything else in the workspace OR if we're not allowed to check the workspace as
- // the user opted into package checks then
- let package_check_allowed = target.is_some() || !may_flycheck_workspace;
+ // the user opted into package checks then OR if this is not cargo.
+ let package_check_allowed = target.is_some()
+ || !may_flycheck_workspace
+ || matches!(package, PackageSpecifier::BuildInfo { .. });
if package_check_allowed {
package_workspace_idx =
world.workspaces.iter().position(|ws| match &ws.kind {
@@ -365,16 +391,30 @@ fn run_flycheck(state: &mut GlobalState, vfs_path: VfsPath) -> bool {
cargo: Some((cargo, _, _)),
..
} => *cargo.workspace_root() == root,
- _ => false,
+ project_model::ProjectWorkspaceKind::Json(p) => {
+ *p.project_root() == root
+ }
+ project_model::ProjectWorkspaceKind::DetachedFile {
+ cargo: None,
+ ..
+ } => false,
});
if let Some(idx) = package_workspace_idx {
- let workspace_deps =
- world.all_workspace_dependencies_for_package(&package);
- world.flycheck[idx].restart_for_package(
- package,
- target,
- workspace_deps,
- );
+ // flycheck handles are indexed by their ID (which is the workspace index),
+ // but not all workspaces have flycheck enabled (e.g., JSON projects without
+ // a flycheck template). Find the flycheck handle by its ID.
+ if let Some(flycheck) =
+ world.flycheck.iter().find(|fc| fc.id() == idx)
+ {
+ let workspace_deps =
+ world.all_workspace_dependencies_for_package(&package);
+ flycheck.restart_for_package(
+ package,
+ target,
+ workspace_deps,
+ saved_file.clone(),
+ );
+ }
}
}
}
@@ -444,7 +484,6 @@ fn run_flycheck(state: &mut GlobalState, vfs_path: VfsPath) -> bool {
ws_contains_file && !is_pkg_ws
});
- let saved_file = vfs_path.as_path().map(ToOwned::to_owned);
let mut workspace_check_triggered = false;
// Find and trigger corresponding flychecks
'flychecks: for flycheck in world.flycheck.iter() {
diff --git a/crates/rust-analyzer/src/integrated_benchmarks.rs b/crates/rust-analyzer/src/integrated_benchmarks.rs
index c61825b99f..d16ca2fb48 100644
--- a/crates/rust-analyzer/src/integrated_benchmarks.rs
+++ b/crates/rust-analyzer/src/integrated_benchmarks.rs
@@ -53,6 +53,7 @@ fn integrated_highlighting_benchmark() {
load_out_dirs_from_check: true,
with_proc_macro_server: ProcMacroServerChoice::Sysroot,
prefill_caches: false,
+ proc_macro_processes: 1,
};
let (db, vfs, _proc_macro) = {
@@ -121,6 +122,7 @@ fn integrated_completion_benchmark() {
load_out_dirs_from_check: true,
with_proc_macro_server: ProcMacroServerChoice::Sysroot,
prefill_caches: true,
+ proc_macro_processes: 1,
};
let (db, vfs, _proc_macro) = {
@@ -322,6 +324,7 @@ fn integrated_diagnostics_benchmark() {
load_out_dirs_from_check: true,
with_proc_macro_server: ProcMacroServerChoice::Sysroot,
prefill_caches: true,
+ proc_macro_processes: 1,
};
let (db, vfs, _proc_macro) = {
diff --git a/crates/rust-analyzer/src/lsp/to_proto.rs b/crates/rust-analyzer/src/lsp/to_proto.rs
index 6f0f57725f..e5b983dcbf 100644
--- a/crates/rust-analyzer/src/lsp/to_proto.rs
+++ b/crates/rust-analyzer/src/lsp/to_proto.rs
@@ -70,6 +70,7 @@ pub(crate) fn symbol_kind(symbol_kind: SymbolKind) -> lsp_types::SymbolKind {
| SymbolKind::Attribute
| SymbolKind::Derive
| SymbolKind::DeriveHelper => lsp_types::SymbolKind::FUNCTION,
+ SymbolKind::CrateRoot => lsp_types::SymbolKind::PACKAGE,
SymbolKind::Module | SymbolKind::ToolModule => lsp_types::SymbolKind::MODULE,
SymbolKind::TypeAlias | SymbolKind::TypeParam | SymbolKind::SelfType => {
lsp_types::SymbolKind::TYPE_PARAMETER
@@ -141,6 +142,7 @@ pub(crate) fn completion_item_kind(
SymbolKind::Method => lsp_types::CompletionItemKind::METHOD,
SymbolKind::Const => lsp_types::CompletionItemKind::CONSTANT,
SymbolKind::ConstParam => lsp_types::CompletionItemKind::TYPE_PARAMETER,
+ SymbolKind::CrateRoot => lsp_types::CompletionItemKind::MODULE,
SymbolKind::Derive => lsp_types::CompletionItemKind::FUNCTION,
SymbolKind::DeriveHelper => lsp_types::CompletionItemKind::FUNCTION,
SymbolKind::Enum => lsp_types::CompletionItemKind::ENUM,
@@ -803,11 +805,16 @@ fn semantic_token_type_and_modifiers(
) -> (lsp_types::SemanticTokenType, semantic_tokens::ModifierSet) {
use semantic_tokens::{modifiers as mods, types};
+ let mut mods = semantic_tokens::ModifierSet::default();
let ty = match highlight.tag {
HlTag::Symbol(symbol) => match symbol {
SymbolKind::Attribute => types::DECORATOR,
SymbolKind::Derive => types::DERIVE,
SymbolKind::DeriveHelper => types::DERIVE_HELPER,
+ SymbolKind::CrateRoot => {
+ mods |= mods::CRATE_ROOT;
+ types::NAMESPACE
+ }
SymbolKind::Module => types::NAMESPACE,
SymbolKind::Impl => types::TYPE_ALIAS,
SymbolKind::Field => types::PROPERTY,
@@ -870,7 +877,6 @@ fn semantic_token_type_and_modifiers(
},
};
- let mut mods = semantic_tokens::ModifierSet::default();
for modifier in highlight.mods.iter() {
let modifier = match modifier {
HlMod::Associated => mods::ASSOCIATED,
diff --git a/crates/rust-analyzer/src/main_loop.rs b/crates/rust-analyzer/src/main_loop.rs
index dd0813c144..64decc9e0d 100644
--- a/crates/rust-analyzer/src/main_loop.rs
+++ b/crates/rust-analyzer/src/main_loop.rs
@@ -309,10 +309,10 @@ impl GlobalState {
let event_dbg_msg = format!("{event:?}");
tracing::debug!(?loop_start, ?event, "handle_event");
- if tracing::enabled!(tracing::Level::INFO) {
+ if tracing::enabled!(tracing::Level::TRACE) {
let task_queue_len = self.task_pool.handle.len();
if task_queue_len > 0 {
- tracing::info!("task queue len: {}", task_queue_len);
+ tracing::trace!("task queue len: {}", task_queue_len);
}
}
@@ -666,31 +666,33 @@ impl GlobalState {
move |sender| {
// We aren't observing the semantics token cache here
let snapshot = AssertUnwindSafe(&snapshot);
- let Ok(diags) = std::panic::catch_unwind(|| {
+ let diags = std::panic::catch_unwind(|| {
fetch_native_diagnostics(
&snapshot,
subscriptions.clone(),
slice.clone(),
NativeDiagnosticsFetchKind::Syntax,
)
- }) else {
- return;
- };
+ })
+ .unwrap_or_else(|_| {
+ subscriptions.iter().map(|&id| (id, Vec::new())).collect::<Vec<_>>()
+ });
sender
.send(Task::Diagnostics(DiagnosticsTaskKind::Syntax(generation, diags)))
.unwrap();
if fetch_semantic {
- let Ok(diags) = std::panic::catch_unwind(|| {
+ let diags = std::panic::catch_unwind(|| {
fetch_native_diagnostics(
&snapshot,
subscriptions.clone(),
slice.clone(),
NativeDiagnosticsFetchKind::Semantic,
)
- }) else {
- return;
- };
+ })
+ .unwrap_or_else(|_| {
+ subscriptions.iter().map(|&id| (id, Vec::new())).collect::<Vec<_>>()
+ });
sender
.send(Task::Diagnostics(DiagnosticsTaskKind::Semantic(
generation, diags,
@@ -825,33 +827,29 @@ impl GlobalState {
}
Task::DiscoverLinkedProjects(arg) => {
if let Some(cfg) = self.config.discover_workspace_config() {
- // the clone is unfortunately necessary to avoid a borrowck error when
- // `self.report_progress` is called later
- let title = &cfg.progress_label.clone();
let command = cfg.command.clone();
let discover = DiscoverCommand::new(self.discover_sender.clone(), command);
- if self.discover_jobs_active == 0 {
- self.report_progress(title, Progress::Begin, None, None, None);
- }
- self.discover_jobs_active += 1;
-
let arg = match arg {
DiscoverProjectParam::Buildfile(it) => DiscoverArgument::Buildfile(it),
DiscoverProjectParam::Path(it) => DiscoverArgument::Path(it),
};
- let handle = discover
- .spawn(
- arg,
- &std::env::current_dir()
- .expect("Failed to get cwd during project discovery"),
- )
- .unwrap_or_else(|e| {
- panic!("Failed to spawn project discovery command: {e}")
- });
-
- self.discover_handles.push(handle);
+ match discover.spawn(arg, self.config.root_path().as_ref()) {
+ Ok(handle) => {
+ if self.discover_jobs_active == 0 {
+ let title = &cfg.progress_label.clone();
+ self.report_progress(title, Progress::Begin, None, None, None);
+ }
+ self.discover_jobs_active += 1;
+ self.discover_handles.push(handle)
+ }
+ Err(e) => self.show_message(
+ lsp_types::MessageType::ERROR,
+ format!("Failed to spawn project discovery command: {e:#}"),
+ false,
+ ),
+ }
}
}
Task::FetchBuildData(progress) => {
@@ -1179,8 +1177,24 @@ impl GlobalState {
kind: ClearDiagnosticsKind::OlderThan(generation, ClearScope::Package(package_id)),
} => self.diagnostics.clear_check_older_than_for_package(id, package_id, generation),
FlycheckMessage::Progress { id, progress } => {
+ let format_with_id = |user_facing_command: String| {
+ if self.flycheck.len() == 1 {
+ user_facing_command
+ } else {
+ format!("{user_facing_command} (#{})", id + 1)
+ }
+ };
+
+ self.flycheck_formatted_commands
+ .resize_with(self.flycheck.len().max(id + 1), || {
+ format_with_id(self.config.flycheck(None).to_string())
+ });
+
let (state, message) = match progress {
- flycheck::Progress::DidStart => (Progress::Begin, None),
+ flycheck::Progress::DidStart { user_facing_command } => {
+ self.flycheck_formatted_commands[id] = format_with_id(user_facing_command);
+ (Progress::Begin, None)
+ }
flycheck::Progress::DidCheckCrate(target) => (Progress::Report, Some(target)),
flycheck::Progress::DidCancel => {
self.last_flycheck_error = None;
@@ -1200,13 +1214,8 @@ impl GlobalState {
}
};
- // When we're running multiple flychecks, we have to include a disambiguator in
- // the title, or the editor complains. Note that this is a user-facing string.
- let title = if self.flycheck.len() == 1 {
- format!("{}", self.config.flycheck(None))
- } else {
- format!("{} (#{})", self.config.flycheck(None), id + 1)
- };
+ // Clone because we &mut self for report_progress
+ let title = self.flycheck_formatted_commands[id].clone();
self.report_progress(
&title,
state,
diff --git a/crates/rust-analyzer/src/reload.rs b/crates/rust-analyzer/src/reload.rs
index e3a5ee2219..83f4a19b39 100644
--- a/crates/rust-analyzer/src/reload.rs
+++ b/crates/rust-analyzer/src/reload.rs
@@ -25,7 +25,9 @@ use load_cargo::{ProjectFolders, load_proc_macro};
use lsp_types::FileSystemWatcher;
use paths::Utf8Path;
use proc_macro_api::ProcMacroClient;
-use project_model::{ManifestPath, ProjectWorkspace, ProjectWorkspaceKind, WorkspaceBuildScripts};
+use project_model::{
+ ManifestPath, ProjectWorkspace, ProjectWorkspaceKind, WorkspaceBuildScripts, project_json,
+};
use stdx::{format_to, thread::ThreadIntent};
use triomphe::Arc;
use vfs::{AbsPath, AbsPathBuf, ChangeKind};
@@ -699,15 +701,19 @@ impl GlobalState {
_ => Default::default(),
};
info!("Using proc-macro server at {path}");
+ let num_process = self.config.proc_macro_num_processes();
- Some(ProcMacroClient::spawn(&path, &env, ws.toolchain.as_ref()).map_err(|err| {
- tracing::error!(
- "Failed to run proc-macro server from path {path}, error: {err:?}",
- );
- anyhow::format_err!(
- "Failed to run proc-macro server from path {path}, error: {err:?}",
- )
- }))
+ Some(
+ ProcMacroClient::spawn(&path, &env, ws.toolchain.as_ref(), num_process)
+ .map_err(|err| {
+ tracing::error!(
+ "Failed to run proc-macro server from path {path}, error: {err:?}",
+ );
+ anyhow::format_err!(
+ "Failed to run proc-macro server from path {path}, error: {err:?}",
+ )
+ }),
+ )
}))
}
@@ -875,6 +881,7 @@ impl GlobalState {
generation.clone(),
sender.clone(),
config,
+ crate::flycheck::FlycheckConfigJson::default(),
None,
self.config.root_path().clone(),
None,
@@ -894,16 +901,25 @@ impl GlobalState {
cargo: Some((cargo, _, _)),
..
} => (
+ crate::flycheck::FlycheckConfigJson::default(),
cargo.workspace_root(),
Some(cargo.manifest_path()),
Some(cargo.target_directory()),
),
ProjectWorkspaceKind::Json(project) => {
+ let config_json = crate::flycheck::FlycheckConfigJson {
+ single_template: project
+ .runnable_template(project_json::RunnableKind::Flycheck)
+ .cloned(),
+ };
// Enable flychecks for json projects if a custom flycheck command was supplied
// in the workspace configuration.
match config {
+ _ if config_json.any_configured() => {
+ (config_json, project.path(), None, None)
+ }
FlycheckConfig::CustomCommand { .. } => {
- (project.path(), None, None)
+ (config_json, project.path(), None, None)
}
_ => return None,
}
@@ -913,12 +929,13 @@ impl GlobalState {
ws.sysroot.root().map(ToOwned::to_owned),
))
})
- .map(|(id, (root, manifest_path, target_dir), sysroot_root)| {
+ .map(|(id, (config_json, root, manifest_path, target_dir), sysroot_root)| {
FlycheckHandle::spawn(
id,
generation.clone(),
sender.clone(),
config.clone(),
+ config_json,
sysroot_root,
root.to_path_buf(),
manifest_path.map(|it| it.to_path_buf()),
@@ -929,6 +946,7 @@ impl GlobalState {
}
}
.into();
+ self.flycheck_formatted_commands = vec![];
}
}
diff --git a/crates/rust-analyzer/src/target_spec.rs b/crates/rust-analyzer/src/target_spec.rs
index e0f95a7830..b8d9acc02a 100644
--- a/crates/rust-analyzer/src/target_spec.rs
+++ b/crates/rust-analyzer/src/target_spec.rs
@@ -68,6 +68,7 @@ pub(crate) struct ProjectJsonTargetSpec {
pub(crate) label: String,
pub(crate) target_kind: TargetKind,
pub(crate) shell_runnables: Vec<Runnable>,
+ pub(crate) project_root: AbsPathBuf,
}
impl ProjectJsonTargetSpec {
@@ -76,7 +77,16 @@ impl ProjectJsonTargetSpec {
RunnableKind::Bin => {
for runnable in &self.shell_runnables {
if matches!(runnable.kind, project_model::project_json::RunnableKind::Run) {
- return Some(runnable.clone());
+ let mut runnable = runnable.clone();
+
+ let replaced_args: Vec<_> = runnable
+ .args
+ .iter()
+ .map(|arg| arg.replace("{label}", &self.label))
+ .collect();
+ runnable.args = replaced_args;
+
+ return Some(runnable);
}
}
diff --git a/crates/rust-analyzer/src/task_pool.rs b/crates/rust-analyzer/src/task_pool.rs
index 8b8876b801..104cd3d2ea 100644
--- a/crates/rust-analyzer/src/task_pool.rs
+++ b/crates/rust-analyzer/src/task_pool.rs
@@ -52,7 +52,7 @@ impl<T> TaskPool<T> {
/// `DeferredTaskQueue` holds deferred tasks.
///
/// These are tasks that must be run after
-/// [`GlobalState::process_changes`] has been called.
+/// `GlobalState::process_changes` has been called.
pub(crate) struct DeferredTaskQueue {
pub(crate) sender: crossbeam_channel::Sender<DeferredTask>,
pub(crate) receiver: crossbeam_channel::Receiver<DeferredTask>,
diff --git a/crates/rust-analyzer/src/test_runner.rs b/crates/rust-analyzer/src/test_runner.rs
index 7111a15d02..0d9c8310d8 100644
--- a/crates/rust-analyzer/src/test_runner.rs
+++ b/crates/rust-analyzer/src/test_runner.rs
@@ -101,11 +101,11 @@ impl CargoTestHandle {
ws_target_dir: Option<&Utf8Path>,
test_target: TestTarget,
sender: Sender<CargoTestMessage>,
- ) -> std::io::Result<Self> {
+ ) -> anyhow::Result<Self> {
let mut cmd = toolchain::command(Tool::Cargo.path(), root, &options.extra_env);
cmd.env("RUSTC_BOOTSTRAP", "1");
cmd.arg("--color=always");
- cmd.arg("test");
+ cmd.arg(&options.subcommand); // test, usually
cmd.arg("--package");
cmd.arg(&test_target.package);
diff --git a/crates/rust-analyzer/tests/slow-tests/main.rs b/crates/rust-analyzer/tests/slow-tests/main.rs
index eb1b8c5dd0..b4a7b44d16 100644
--- a/crates/rust-analyzer/tests/slow-tests/main.rs
+++ b/crates/rust-analyzer/tests/slow-tests/main.rs
@@ -1447,7 +1447,27 @@ foo = { path = "../foo" }
.server()
.wait_until_workspace_is_loaded();
- server.request::<WorkspaceSymbolRequest>(Default::default(), json!([]));
+ server.request::<WorkspaceSymbolRequest>(
+ Default::default(),
+ json!([
+ {
+ "name": "bar",
+ "kind": 4,
+ "location": {
+ "uri": "file:///[..]bar/src/lib.rs",
+ "range": {
+ "start": {
+ "line": 0,
+ "character": 0
+ },
+ "end": {
+ "line": 0,
+ "character": 0
+ }
+ }
+ }
+ }]),
+ );
let server = Project::with_fixture(
r#"
@@ -1486,7 +1506,27 @@ version = "0.0.0"
.server()
.wait_until_workspace_is_loaded();
- server.request::<WorkspaceSymbolRequest>(Default::default(), json!([]));
+ server.request::<WorkspaceSymbolRequest>(
+ Default::default(),
+ json!([
+ {
+ "name": "baz",
+ "kind": 4,
+ "location": {
+ "uri": "file:///[..]baz/src/lib.rs",
+ "range": {
+ "start": {
+ "line": 0,
+ "character": 0
+ },
+ "end": {
+ "line": 0,
+ "character": 0
+ }
+ }
+ }
+ }]),
+ );
}
#[test]
diff --git a/crates/span/src/hygiene.rs b/crates/span/src/hygiene.rs
index fdfa94dfee..fe05ef9465 100644
--- a/crates/span/src/hygiene.rs
+++ b/crates/span/src/hygiene.rs
@@ -8,9 +8,9 @@
//!
//! # The Expansion Order Hierarchy
//!
-//! `ExpnData` in rustc, rust-analyzer's version is [`MacroCallLoc`]. Traversing the hierarchy
-//! upwards can be achieved by walking up [`MacroCallLoc::kind`]'s contained file id, as
-//! [`MacroFile`]s are interned [`MacroCallLoc`]s.
+//! `ExpnData` in rustc, rust-analyzer's version is `MacroCallLoc`. Traversing the hierarchy
+//! upwards can be achieved by walking up `MacroCallLoc::kind`'s contained file id, as
+//! `MacroFile`s are interned `MacroCallLoc`s.
//!
//! # The Macro Definition Hierarchy
//!
@@ -18,7 +18,7 @@
//!
//! # The Call-site Hierarchy
//!
-//! `ExpnData::call_site` in rustc, [`MacroCallLoc::call_site`] in rust-analyzer.
+//! `ExpnData::call_site` in rustc, `MacroCallLoc::call_site` in rust-analyzer.
use crate::Edition;
use std::fmt;
@@ -241,9 +241,7 @@ const _: () = {
edition: zalsa_::interned::Lookup::into_owned(data.2),
parent: zalsa_::interned::Lookup::into_owned(data.3),
opaque: opaque(zalsa_::FromId::from_id(id)),
- opaque_and_semiopaque: opaque_and_semiopaque(
- zalsa_::FromId::from_id(id),
- ),
+ opaque_and_semiopaque: opaque_and_semiopaque(zalsa_::FromId::from_id(id)),
},
)
}
diff --git a/crates/stdx/src/process.rs b/crates/stdx/src/process.rs
index 2efeed45e4..7c4ae978b0 100644
--- a/crates/stdx/src/process.rs
+++ b/crates/stdx/src/process.rs
@@ -76,7 +76,7 @@ pub fn spawn_with_streaming_output(
Ok(Output { status, stdout, stderr })
}
-#[cfg(unix)]
+#[cfg(all(unix, not(target_arch = "wasm32")))]
mod imp {
use std::{
io::{self, prelude::*},
diff --git a/crates/syntax/src/ast/prec.rs b/crates/syntax/src/ast/prec.rs
index 8c88224a76..d99cf49261 100644
--- a/crates/syntax/src/ast/prec.rs
+++ b/crates/syntax/src/ast/prec.rs
@@ -154,6 +154,11 @@ fn check_ancestry(ancestor: &SyntaxNode, descendent: &SyntaxNode) -> bool {
bail()
}
+fn next_token_of(node: &SyntaxNode) -> Option<ast::SyntaxToken> {
+ let last = node.last_token()?;
+ skip_trivia_token(last.next_token()?, Direction::Next)
+}
+
impl Expr {
pub fn precedence(&self) -> ExprPrecedence {
precedence(self)
@@ -197,6 +202,8 @@ impl Expr {
if is_parent_call_expr && is_field_expr {
return true;
}
+ let place_of_parent =
+ || place_of.ancestors().find(|it| it.parent().is_none_or(|p| &p == parent.syntax()));
// Special-case block weirdness
if parent.child_is_followed_by_a_block() {
@@ -226,15 +233,24 @@ impl Expr {
// For `&&`, we avoid introducing `<ret-like> && <expr>` into a binary chain.
if self.precedence() == ExprPrecedence::Jump
- && let Some(node) =
- place_of.ancestors().find(|it| it.parent().is_none_or(|p| &p == parent.syntax()))
- && let Some(next) =
- node.last_token().and_then(|t| skip_trivia_token(t.next_token()?, Direction::Next))
+ && let Some(node) = place_of_parent()
+ && let Some(next) = next_token_of(&node)
&& matches!(next.kind(), T![||] | T![&&])
{
return true;
}
+ // Special-case `2 as x < 3`
+ if let ast::Expr::CastExpr(it) = self
+ && let Some(ty) = it.ty()
+ && ty.syntax().last_token().and_then(|it| ast::NameLike::cast(it.parent()?)).is_some()
+ && let Some(node) = place_of_parent()
+ && let Some(next) = next_token_of(&node)
+ && matches!(next.kind(), T![<] | T![<<])
+ {
+ return true;
+ }
+
if self.is_paren_like()
|| parent.is_paren_like()
|| self.is_prefix()
diff --git a/crates/syntax/src/ast/syntax_factory/constructors.rs b/crates/syntax/src/ast/syntax_factory/constructors.rs
index 7cf9e2bf14..5fe419ad4e 100644
--- a/crates/syntax/src/ast/syntax_factory/constructors.rs
+++ b/crates/syntax/src/ast/syntax_factory/constructors.rs
@@ -1578,6 +1578,44 @@ impl SyntaxFactory {
pub fn ident(&self, text: &str) -> SyntaxToken {
make::tokens::ident(text)
}
+
+ pub fn mut_self_param(&self) -> ast::SelfParam {
+ let ast = make::mut_self_param().clone_for_update();
+
+ if let Some(mut mapping) = self.mappings() {
+ let builder = SyntaxMappingBuilder::new(ast.syntax().clone());
+ builder.finish(&mut mapping);
+ }
+
+ ast
+ }
+
+ pub fn ret_type(&self, ty: ast::Type) -> ast::RetType {
+ let ast = make::ret_type(ty.clone()).clone_for_update();
+
+ if let Some(mut mapping) = self.mappings() {
+ let mut builder = SyntaxMappingBuilder::new(ast.syntax().clone());
+ builder.map_node(ty.syntax().clone(), ast.ty().unwrap().syntax().clone());
+ builder.finish(&mut mapping);
+ }
+ ast
+ }
+
+ pub fn ty_ref(&self, ty: ast::Type, is_mut: bool) -> ast::Type {
+ let ast = make::ty_ref(ty.clone(), is_mut).clone_for_update();
+
+ if let Some(mut mapping) = self.mappings() {
+ let mut builder = SyntaxMappingBuilder::new(ast.syntax().clone());
+ match &ast {
+ ast::Type::RefType(ref_ty) => {
+ builder.map_node(ty.syntax().clone(), ref_ty.ty().unwrap().syntax().clone());
+ }
+ _ => unreachable!(),
+ }
+ builder.finish(&mut mapping);
+ }
+ ast
+ }
}
// `ext` constructors
diff --git a/crates/syntax/src/ptr.rs b/crates/syntax/src/ptr.rs
index 34c07598d2..c4979b8e3a 100644
--- a/crates/syntax/src/ptr.rs
+++ b/crates/syntax/src/ptr.rs
@@ -68,7 +68,7 @@ impl<N: AstNode> AstPtr<N> {
self.raw
}
- pub fn text_range(&self) -> TextRange {
+ pub fn text_range(self) -> TextRange {
self.raw.text_range()
}
diff --git a/crates/syntax/src/syntax_editor/mapping.rs b/crates/syntax/src/syntax_editor/mapping.rs
index 1eaef03197..6257bf4e57 100644
--- a/crates/syntax/src/syntax_editor/mapping.rs
+++ b/crates/syntax/src/syntax_editor/mapping.rs
@@ -1,6 +1,6 @@
//! Maps syntax elements through disjoint syntax nodes.
//!
-//! [`SyntaxMappingBuilder`] should be used to create mappings to add to a [`SyntaxEditor`]
+//! [`SyntaxMappingBuilder`] should be used to create mappings to add to a `SyntaxEditor`
use itertools::Itertools;
use rustc_hash::FxHashMap;
diff --git a/crates/test-fixture/src/lib.rs b/crates/test-fixture/src/lib.rs
index d81f27d7c3..ca68edd88c 100644
--- a/crates/test-fixture/src/lib.rs
+++ b/crates/test-fixture/src/lib.rs
@@ -37,7 +37,110 @@ use triomphe::Arc;
pub const WORKSPACE: base_db::SourceRootId = base_db::SourceRootId(0);
+/// A trait for setting up test databases from fixture strings.
+///
+/// Fixtures are strings containing Rust source code with optional metadata that describe
+/// a project setup. This is the primary way to write tests for rust-analyzer without
+/// having to depend on the entire sysroot.
+///
+/// # Fixture Syntax
+///
+/// ## Basic Structure
+///
+/// A fixture without metadata is parsed into a single source file (`/main.rs`).
+/// Metadata is added after a `//-` comment prefix.
+///
+/// ```text
+/// //- /main.rs
+/// fn main() {
+/// println!("Hello");
+/// }
+/// ```
+///
+/// Note that the fixture syntax is optional and can be omitted if the test only requires
+/// a simple single file.
+///
+/// ## File Metadata
+///
+/// Each file can have the following metadata after `//-`:
+///
+/// - **Path** (required): Must start with `/`, e.g., `/main.rs`, `/lib.rs`, `/foo/bar.rs`
+/// - **`crate:<name>`**: Defines a new crate with this file as its root
+/// - Optional version: `crate:[email protected],https://example.com/repo.git`
+/// - **`deps:<crate1>,<crate2>`**: Dependencies (requires `crate:`)
+/// - **`extern-prelude:<crate1>,<crate2>`**: Limits extern prelude to specified crates
+/// - **`edition:<year>`**: Rust edition (2015, 2018, 2021, 2024). Defaults to current.
+/// - **`cfg:<key>=<value>,<flag>`**: Configuration options, e.g., `cfg:test,feature="foo"`
+/// - **`env:<KEY>=<value>`**: Environment variables
+/// - **`crate-attr:<attr>`**: Crate-level attributes, e.g., `crate-attr:no_std`
+/// - **`new_source_root:local|library`**: Starts a new source root
+/// - **`library`**: Marks crate as external library (not workspace member)
+///
+/// ## Global Meta (must appear at the top, in order)
+///
+/// - **`//- toolchain: nightly|stable`**: Sets the Rust toolchain (default: stable)
+/// - **`//- target_data_layout: <layout>`**: LLVM data layout string
+/// - **`//- target_arch: <arch>`**: Target architecture (default: x86_64)
+/// - **`//- proc_macros: <name1>,<name2>`**: Enables predefined test proc macros
+/// - **`//- minicore: <flag1>, <flag2>`**: Includes subset of libcore
+///
+/// ## Cursor Markers
+///
+/// Use `$0` to mark cursor position(s) in the fixture:
+/// - Single `$0`: marks a position (use with [`with_position`](Self::with_position))
+/// - Two `$0` markers: marks a range (use with [`with_range`](Self::with_range))
+/// - Escape as `\$0` if you need a literal `$0`
+///
+/// # Examples
+///
+/// ## Single file with cursor position
+/// ```text
+/// r#"
+/// fn main() {
+/// let x$0 = 42;
+/// }
+/// "#
+/// ```
+///
+/// ## Multiple crates with dependencies
+/// ```text
+/// r#"
+/// //- /main.rs crate:main deps:helper
+/// use helper::greet;
+/// fn main() { greet(); }
+///
+/// //- /lib.rs crate:helper
+/// pub fn greet() {}
+/// "#
+/// ```
+///
+/// ## Using minicore for lang items
+/// ```text
+/// r#"
+/// //- minicore: option, result, iterator
+/// //- /main.rs
+/// fn foo() -> Option<i32> { Some(42) }
+/// "#
+/// ```
+///
+/// The available minicore flags are listed at the top of crates\test-utils\src\minicore.rs.
+///
+/// ## Using test proc macros
+/// ```text
+/// r#"
+/// //- proc_macros: identity, mirror
+/// //- /main.rs crate:main deps:proc_macros
+/// use proc_macros::identity;
+///
+/// #[identity]
+/// fn foo() {}
+/// "#
+/// ```
+///
+/// Available proc macros: `identity` (attr), `DeriveIdentity` (derive), `input_replace` (attr),
+/// `mirror` (bang), `shorten` (bang)
pub trait WithFixture: Default + ExpandDatabase + SourceDatabase + 'static {
+ /// See the trait documentation for more information on fixtures.
#[track_caller]
fn with_single_file(
#[rust_analyzer::rust_fixture] ra_fixture: &str,
@@ -50,6 +153,7 @@ pub trait WithFixture: Default + ExpandDatabase + SourceDatabase + 'static {
(db, file)
}
+ /// See the trait documentation for more information on fixtures.
#[track_caller]
fn with_many_files(
#[rust_analyzer::rust_fixture] ra_fixture: &str,
@@ -66,6 +170,7 @@ pub trait WithFixture: Default + ExpandDatabase + SourceDatabase + 'static {
(db, files)
}
+ /// See the trait documentation for more information on fixtures.
#[track_caller]
fn with_files(#[rust_analyzer::rust_fixture] ra_fixture: &str) -> Self {
let mut db = Self::default();
@@ -75,6 +180,7 @@ pub trait WithFixture: Default + ExpandDatabase + SourceDatabase + 'static {
db
}
+ /// See the trait documentation for more information on fixtures.
#[track_caller]
fn with_files_extra_proc_macros(
#[rust_analyzer::rust_fixture] ra_fixture: &str,
@@ -88,6 +194,7 @@ pub trait WithFixture: Default + ExpandDatabase + SourceDatabase + 'static {
db
}
+ /// See the trait documentation for more information on fixtures.
#[track_caller]
fn with_position(#[rust_analyzer::rust_fixture] ra_fixture: &str) -> (Self, FilePosition) {
let (db, file_id, range_or_offset) = Self::with_range_or_offset(ra_fixture);
@@ -95,6 +202,7 @@ pub trait WithFixture: Default + ExpandDatabase + SourceDatabase + 'static {
(db, FilePosition { file_id, offset })
}
+ /// See the trait documentation for more information on fixtures.
#[track_caller]
fn with_range(#[rust_analyzer::rust_fixture] ra_fixture: &str) -> (Self, FileRange) {
let (db, file_id, range_or_offset) = Self::with_range_or_offset(ra_fixture);
@@ -102,6 +210,7 @@ pub trait WithFixture: Default + ExpandDatabase + SourceDatabase + 'static {
(db, FileRange { file_id, range })
}
+ /// See the trait documentation for more information on fixtures.
#[track_caller]
fn with_range_or_offset(
#[rust_analyzer::rust_fixture] ra_fixture: &str,
diff --git a/crates/test-utils/src/minicore.rs b/crates/test-utils/src/minicore.rs
index c3429356d9..48c3e89525 100644
--- a/crates/test-utils/src/minicore.rs
+++ b/crates/test-utils/src/minicore.rs
@@ -58,6 +58,7 @@
//! pin:
//! pointee: copy, send, sync, ord, hash, unpin, phantom_data
//! range:
+//! new_range:
//! receiver: deref
//! result:
//! send: sized
@@ -175,7 +176,9 @@ pub mod marker {
// region:clone
impl<T: PointeeSized> Clone for PhantomData<T> {
- fn clone(&self) -> Self { Self }
+ fn clone(&self) -> Self {
+ Self
+ }
}
// endregion:clone
@@ -1128,6 +1131,32 @@ pub mod ops {
// endregion:dispatch_from_dyn
}
+// region:new_range
+pub mod range {
+ #[lang = "RangeCopy"]
+ pub struct Range<Idx> {
+ pub start: Idx,
+ pub end: Idx,
+ }
+
+ #[lang = "RangeFromCopy"]
+ pub struct RangeFrom<Idx> {
+ pub start: Idx,
+ }
+
+ #[lang = "RangeInclusiveCopy"]
+ pub struct RangeInclusive<Idx> {
+ pub start: Idx,
+ pub end: Idx,
+ }
+
+ #[lang = "RangeToInclusiveCopy"]
+ pub struct RangeToInclusive<Idx> {
+ pub end: Idx,
+ }
+}
+// endregion:new_range
+
// region:eq
pub mod cmp {
use crate::marker::PointeeSized;
@@ -1144,7 +1173,9 @@ pub mod cmp {
// region:builtin_impls
impl PartialEq for () {
- fn eq(&self, other: &()) -> bool { true }
+ fn eq(&self, other: &()) -> bool {
+ true
+ }
}
// endregion:builtin_impls
@@ -1567,10 +1598,7 @@ pub mod pin {
}
// endregion:dispatch_from_dyn
// region:coerce_unsized
- impl<Ptr, U> crate::ops::CoerceUnsized<Pin<U>> for Pin<Ptr> where
- Ptr: crate::ops::CoerceUnsized<U>
- {
- }
+ impl<Ptr, U> crate::ops::CoerceUnsized<Pin<U>> for Pin<Ptr> where Ptr: crate::ops::CoerceUnsized<U> {}
// endregion:coerce_unsized
}
// endregion:pin
@@ -1792,9 +1820,9 @@ pub mod iter {
fn from_iter<T: IntoIterator<Item = A>>(iter: T) -> Self;
}
}
- pub use self::collect::{IntoIterator, FromIterator};
+ pub use self::collect::{FromIterator, IntoIterator};
}
- pub use self::traits::{IntoIterator, FromIterator, Iterator};
+ pub use self::traits::{FromIterator, IntoIterator, Iterator};
}
// endregion:iterator
@@ -1880,6 +1908,10 @@ mod arch {
pub macro global_asm("assembly template", $(operands,)* $(options($(option),*))?) {
/* compiler built-in */
}
+ #[rustc_builtin_macro]
+ pub macro naked_asm("assembly template", $(operands,)* $(options($(option),*))?) {
+ /* compiler built-in */
+ }
}
// endregion:asm
@@ -2087,30 +2119,30 @@ macro_rules! column {
pub mod prelude {
pub mod v1 {
pub use crate::{
- clone::Clone, // :clone
- cmp::{Eq, PartialEq}, // :eq
- cmp::{Ord, PartialOrd}, // :ord
- convert::AsMut, // :as_mut
- convert::AsRef, // :as_ref
- convert::{From, Into, TryFrom, TryInto}, // :from
- default::Default, // :default
- iter::{IntoIterator, Iterator, FromIterator}, // :iterator
- macros::builtin::{derive, derive_const}, // :derive
- marker::Copy, // :copy
- marker::Send, // :send
- marker::Sized, // :sized
- marker::Sync, // :sync
- mem::drop, // :drop
- mem::size_of, // :size_of
- ops::Drop, // :drop
- ops::{AsyncFn, AsyncFnMut, AsyncFnOnce}, // :async_fn
- ops::{Fn, FnMut, FnOnce}, // :fn
- option::Option::{self, None, Some}, // :option
- panic, // :panic
- result::Result::{self, Err, Ok}, // :result
- str::FromStr, // :str
- fmt::derive::Debug, // :fmt, derive
- hash::derive::Hash, // :hash, derive
+ clone::Clone, // :clone
+ cmp::{Eq, PartialEq}, // :eq
+ cmp::{Ord, PartialOrd}, // :ord
+ convert::AsMut, // :as_mut
+ convert::AsRef, // :as_ref
+ convert::{From, Into, TryFrom, TryInto}, // :from
+ default::Default, // :default
+ fmt::derive::Debug, // :fmt, derive
+ hash::derive::Hash, // :hash, derive
+ iter::{FromIterator, IntoIterator, Iterator}, // :iterator
+ macros::builtin::{derive, derive_const}, // :derive
+ marker::Copy, // :copy
+ marker::Send, // :send
+ marker::Sized, // :sized
+ marker::Sync, // :sync
+ mem::drop, // :drop
+ mem::size_of, // :size_of
+ ops::Drop, // :drop
+ ops::{AsyncFn, AsyncFnMut, AsyncFnOnce}, // :async_fn
+ ops::{Fn, FnMut, FnOnce}, // :fn
+ option::Option::{self, None, Some}, // :option
+ panic, // :panic
+ result::Result::{self, Err, Ok}, // :result
+ str::FromStr, // :str
};
}
diff --git a/crates/toolchain/src/lib.rs b/crates/toolchain/src/lib.rs
index 39319886cf..1a17269838 100644
--- a/crates/toolchain/src/lib.rs
+++ b/crates/toolchain/src/lib.rs
@@ -74,6 +74,9 @@ impl Tool {
// Prevent rustup from automatically installing toolchains, see https://github.com/rust-lang/rust-analyzer/issues/20719.
pub const NO_RUSTUP_AUTO_INSTALL_ENV: (&str, &str) = ("RUSTUP_AUTO_INSTALL", "0");
+// These get ignored when displaying what command is running in LSP status messages.
+pub const DISPLAY_COMMAND_IGNORE_ENVS: &[&str] = &[NO_RUSTUP_AUTO_INSTALL_ENV.0];
+
#[allow(clippy::disallowed_types)] /* generic parameter allows for FxHashMap */
pub fn command<H>(
cmd: impl AsRef<OsStr>,
diff --git a/crates/vfs-notify/Cargo.toml b/crates/vfs-notify/Cargo.toml
index bd6c8331e6..ce7ea53b53 100644
--- a/crates/vfs-notify/Cargo.toml
+++ b/crates/vfs-notify/Cargo.toml
@@ -16,7 +16,7 @@ doctest = false
tracing.workspace = true
walkdir = "2.5.0"
crossbeam-channel.workspace = true
-notify = "8.0.0"
+notify = "8.2.0"
rayon = "1.10.0"
stdx.workspace = true
diff --git a/docs/book/src/configuration_generated.md b/docs/book/src/configuration_generated.md
index 58b6363345..8460c2c7d0 100644
--- a/docs/book/src/configuration_generated.md
+++ b/docs/book/src/configuration_generated.md
@@ -323,10 +323,18 @@ each of them, with the working directory being the workspace root
(i.e., the folder containing the `Cargo.toml`). This can be overwritten
by changing `#rust-analyzer.check.invocationStrategy#`.
-If `$saved_file` is part of the command, rust-analyzer will pass
-the absolute path of the saved file to the provided command. This is
-intended to be used with non-Cargo build systems.
-Note that `$saved_file` is experimental and may be removed in the future.
+It supports two interpolation syntaxes, both mainly intended to be used with
+[non-Cargo build systems](./non_cargo_based_projects.md):
+
+- If `{saved_file}` is part of the command, rust-analyzer will pass
+ the absolute path of the saved file to the provided command.
+ (A previous version, `$saved_file`, also works.)
+- If `{label}` is part of the command, rust-analyzer will pass the
+ Cargo package ID, which can be used with `cargo check -p`, or a build label from
+ `rust-project.json`. If `{label}` is included, rust-analyzer behaves much like
+ [`"rust-analyzer.check.workspace": false`](#check.workspace).
+
+
An example command would be:
@@ -1310,6 +1318,16 @@ These proc-macros will be ignored when trying to expand them.
This config takes a map of crate names with the exported proc-macro names to ignore as values.
+## rust-analyzer.procMacro.processes {#procMacro.processes}
+
+Default: `1`
+
+Number of proc-macro server processes to spawn.
+
+Controls how many independent `proc-macro-srv` processes rust-analyzer
+runs in parallel to handle macro expansion.
+
+
## rust-analyzer.procMacro.server {#procMacro.server}
Default: `null`
@@ -1611,33 +1629,83 @@ though Cargo might be the eventual consumer.
Default: `null`
-Enables automatic discovery of projects using [`DiscoverWorkspaceConfig::command`].
+Configure a command that rust-analyzer can invoke to
+obtain configuration.
-[`DiscoverWorkspaceConfig`] also requires setting `progress_label` and `files_to_watch`.
-`progress_label` is used for the title in progress indicators, whereas `files_to_watch`
-is used to determine which build system-specific files should be watched in order to
-reload rust-analyzer.
+This is an alternative to manually generating
+`rust-project.json`: it enables rust-analyzer to generate
+rust-project.json on the fly, and regenerate it when
+switching or modifying projects.
+
+This is an object with three fields:
+
+* `command`: the shell command to invoke
+
+* `filesToWatch`: which build system-specific files should
+be watched to trigger regenerating the configuration
+
+* `progressLabel`: the name of the command, used in
+progress indicators in the IDE
+
+Here's an example of a valid configuration:
-Below is an example of a valid configuration:
```json
"rust-analyzer.workspace.discoverConfig": {
"command": [
"rust-project",
- "develop-json"
+ "develop-json",
+ "{arg}"
],
- "progressLabel": "rust-analyzer",
+ "progressLabel": "buck2/rust-project",
"filesToWatch": [
"BUCK"
]
}
```
-## On `DiscoverWorkspaceConfig::command`
+## Argument Substitutions
+
+If `command` includes the argument `{arg}`, that argument will be substituted
+with the JSON-serialized form of the following enum:
+
+```norun
+#[derive(PartialEq, Clone, Debug, Serialize)]
+#[serde(rename_all = "camelCase")]
+pub enum DiscoverArgument {
+ Path(AbsPathBuf),
+ Buildfile(AbsPathBuf),
+}
+```
+
+rust-analyzer will use the path invocation to find and
+generate a `rust-project.json` and therefore a
+workspace. Example:
+
+
+```norun
+rust-project develop-json '{ "path": "myproject/src/main.rs" }'
+```
+
+rust-analyzer will use build file invocations to update an
+existing workspace. Example:
+
+Or with a build file and the configuration above:
+
+```norun
+rust-project develop-json '{ "buildfile": "myproject/BUCK" }'
+```
+
+As a reference for implementors, buck2's `rust-project`
+will likely be useful:
+<https://github.com/facebook/buck2/tree/main/integrations/rust-project>.
+
+## Discover Command Output
**Warning**: This format is provisional and subject to change.
-[`DiscoverWorkspaceConfig::command`] *must* return a JSON object corresponding to
-`DiscoverProjectData::Finished`:
+The discover command should output JSON objects, one per
+line (JSONL format). These objects should correspond to
+this Rust data type:
```norun
#[derive(Debug, Clone, Deserialize, Serialize)]
@@ -1650,7 +1718,14 @@ enum DiscoverProjectData {
}
```
-As JSON, `DiscoverProjectData::Finished` is:
+For example, a progress event:
+
+```json
+{"kind":"progress","message":"generating rust-project.json"}
+```
+
+A finished event can look like this (expanded and
+commented for readability):
```json
{
@@ -1658,7 +1733,7 @@ As JSON, `DiscoverProjectData::Finished` is:
"kind": "finished",
// the file used by a non-Cargo build system to define
// a package or target.
- "buildfile": "rust-analyzer/BUILD",
+ "buildfile": "rust-analyzer/BUCK",
// the contents of a rust-project.json, elided for brevity
"project": {
"sysroot": "foo",
@@ -1667,41 +1742,9 @@ As JSON, `DiscoverProjectData::Finished` is:
}
```
-It is encouraged, but not required, to use the other variants on `DiscoverProjectData`
-to provide a more polished end-user experience.
-
-`DiscoverWorkspaceConfig::command` may *optionally* include an `{arg}`, which will be
-substituted with the JSON-serialized form of the following enum:
-
-```norun
-#[derive(PartialEq, Clone, Debug, Serialize)]
-#[serde(rename_all = "camelCase")]
-pub enum DiscoverArgument {
- Path(AbsPathBuf),
- Buildfile(AbsPathBuf),
-}
-```
-
-The JSON representation of `DiscoverArgument::Path` is:
-
-```json
-{
- "path": "src/main.rs"
-}
-```
-
-Similarly, the JSON representation of `DiscoverArgument::Buildfile` is:
-
-```json
-{
- "buildfile": "BUILD"
-}
-```
-
-`DiscoverArgument::Path` is used to find and generate a `rust-project.json`, and
-therefore, a workspace, whereas `DiscoverArgument::buildfile` is used to to update an
-existing workspace. As a reference for implementors, buck2's `rust-project` will likely
-be useful: <https://github.com/facebook/buck2/tree/main/integrations/rust-project>.
+Only the finished event is required, but the other
+variants are encouraged to give users more feedback about
+progress or errors.
## rust-analyzer.workspace.symbol.search.excludeImports {#workspace.symbol.search.excludeImports}
diff --git a/docs/book/src/faq.md b/docs/book/src/faq.md
index 8c143ab949..9eeb2ae555 100644
--- a/docs/book/src/faq.md
+++ b/docs/book/src/faq.md
@@ -4,7 +4,7 @@
rust-analyzer fails to resolve `None`, and thinks you are binding to a variable
named `None`. That's usually a sign of a corrupted sysroot. Try removing and re-installing
-it: `rustup component remove rust-src` then `rustup component install rust-src`.
+it: `rustup component remove rust-src` then `rustup component add rust-src`.
### Rust Analyzer and Cargo compete over the build lock
diff --git a/docs/book/src/non_cargo_based_projects.md b/docs/book/src/non_cargo_based_projects.md
index e7df4a5d76..f1f10ae336 100644
--- a/docs/book/src/non_cargo_based_projects.md
+++ b/docs/book/src/non_cargo_based_projects.md
@@ -204,23 +204,40 @@ interface Runnable {
args: string[];
/// The current working directory of the runnable.
cwd: string;
- /// Used to decide what code lens to offer.
+ /// Maps a runnable to a piece of rust-analyzer functionality.
///
- /// `testOne`: This runnable will be used when the user clicks the 'Run Test'
- /// CodeLens above a test.
+ /// - `testOne`: This runnable will be used when the user clicks the 'Run Test'
+ /// CodeLens above a test.
+ /// - `run`: This runnable will be used when the user clicks the 'Run' CodeLens
+ /// above a main function or triggers a run command.
+ /// - `flycheck`: This is run to provide check-on-save diagnostics when the user
+ /// saves a file. It must emit rustc JSON diagnostics that rust-analyzer can
+ /// parse. If this runnable is not specified, we may try to use `cargo check -p`.
+ /// This is only run for a single crate that the user saved a file in. The
+ /// {label} syntax is replaced with `BuildInfo::label`.
+ /// Alternatively, you may use `{saved_file}` and figure out which crate
+ /// to produce diagnostics for based on that.
///
/// The args for testOne can contain two template strings:
/// `{label}` and `{test_id}`. `{label}` will be replaced
- /// with the `Build::label` and `{test_id}` will be replaced
+ /// with the `BuildInfo::label` and `{test_id}` will be replaced
/// with the test name.
- kind: 'testOne' | string;
+ kind: 'testOne' | 'run' | 'flycheck' | string;
}
```
This format is provisional and subject to change. Specifically, the
`roots` setup will be different eventually.
-There are three ways to feed `rust-project.json` to rust-analyzer:
+### Providing a JSON project to rust-analyzer
+
+There are four ways to feed `rust-project.json` to rust-analyzer:
+
+- Use
+ [`"rust-analyzer.workspace.discoverConfig": … }`](./configuration.md#workspace.discoverConfig)
+ to specify a workspace discovery command to generate project descriptions
+ on-the-fly. Please note that the command output is message-oriented and must
+ output JSONL [as described in the configuration docs](./configuration.md#workspace.discoverConfig).
- Place `rust-project.json` file at the root of the project, and
rust-analyzer will discover it.
@@ -240,19 +257,86 @@ location or (for inline JSON) relative to `rootUri`.
You can set the `RA_LOG` environment variable to `rust_analyzer=info` to
inspect how rust-analyzer handles config and project loading.
-Note that calls to `cargo check` are disabled when using
-`rust-project.json` by default, so compilation errors and warnings will
-no longer be sent to your LSP client. To enable these compilation errors
-you will need to specify explicitly what command rust-analyzer should
-run to perform the checks using the
-`rust-analyzer.check.overrideCommand` configuration. As an example, the
-following configuration explicitly sets `cargo check` as the `check`
-command.
-
- { "rust-analyzer.check.overrideCommand": ["cargo", "check", "--message-format=json"] }
-
-`check.overrideCommand` requires the command specified to output json
-error messages for rust-analyzer to consume. The `--message-format=json`
-flag does this for `cargo check` so whichever command you use must also
-output errors in this format. See the [Configuration](#_configuration)
-section for more information.
+### Flycheck support
+
+Rust-analyzer has functionality to run an actual build of a crate when the user saves a file, to
+fill in diagnostics it does not implement natively. This is known as "flycheck".
+
+**Flycheck is disabled when using `rust-project.json` unless explicitly configured**, so compilation
+errors and warnings will no longer be sent to your LSP client by default. To enable these
+compilation errors you will need to specify explicitly what command rust-analyzer should run to
+perform the checks. There are two ways to do this:
+
+- `rust-project.json` may contain a `runnables` field. The `flycheck` runnable may be used to
+ configure a check command. See above for documentation.
+
+- Using the [`rust-analyzer.check.overrideCommand`](./configuration.md#check.overrideCommand)
+ configuration. This will also override anything in `rust-project.json`. As an example, the
+ following configuration explicitly sets `cargo check` as the `check` command.
+
+ ```json
+ { "rust-analyzer.check.overrideCommand": ["cargo", "check", "--message-format=json"] }
+ ```
+
+ Note also that this works with cargo projects.
+
+Either option requires the command specified to output JSON error messages for rust-analyzer to
+consume. The `--message-format=json` flag does this for `cargo check` so whichever command you use
+must also output errors in this format.
+
+Either option also supports two syntaxes within each argument:
+
+- `{label}` will be replaced with the `BuildInfo::label` of the crate
+ containing a saved file, if `BuildInfo` is provided. In the case of `check.overrideCommand` being
+ used in a Cargo project, this will be the cargo package ID, which can be used with `cargo check -p`.
+- `{saved_file}` will be replaced with an absolute path to the saved file. This can be queried against a
+ build system to find targets that include the file.
+
+For example:
+
+```json
+{ "rust-analyzer.check.overrideCommand": ["custom_crate_checker", "{label}"] }
+```
+
+If you do use `{label}` or `{saved_file}`, the command will not be run unless the relevant value can
+be substituted.
+
+
+#### Flycheck considerations
+
+##### Diagnostic output on error
+
+A flycheck command using a complex build orchestrator like `"bazel", "build", "{label}"`, even with
+a tweak to return JSON messages, is often insufficient. Such a command will typically succeed if
+there are warnings, but if there are errors, it might "fail to compile" the diagnostics and not
+produce any output. You must build a package in such a way that the build succeeds even if `rustc`
+exits with an error, and prints the JSON build messages in every case.
+
+##### Diagnostics for upstream crates
+
+`cargo check -p` re-prints any errors and warnings in crates higher up in the dependency graph
+than the one requested. We do clear all diagnostics when flychecking, so if you manage to
+replicate this behaviour, diagnostics for crates other than the one being checked will show up in
+the editor. If you do not, then users may be confused that diagnostics are "stuck" or disappear
+entirely when there is a build error in an upstream crate.
+
+##### Compiler options
+
+`cargo check` invokes rustc differently from `cargo build`. It turns off codegen (with `rustc
+--emit=metadata`), which results in lower latency to get to diagnostics. If your build system can
+configure this, it is recommended.
+
+If your build tool can configure rustc for incremental compiles, this is also recommended.
+
+##### Locking and pre-emption
+
+In any good build system, including Cargo, build commands sometimes block each other. Running a
+flycheck will (by default) frequently block you from running other build commands. Generally this is
+undesirable. Users will have to (unintuitively) press save again in the editor to cancel a
+flycheck, so that some other command may proceed.
+
+If your build system has the ability to isolate any rust-analyzer-driven flychecks and prevent lock
+contention, for example a separate build output directory and/or daemon instance, this is
+recommended. Alternatively, consider using a feature if available that can set the priority of
+various build invocations and automatically cancel lower-priority ones when needed. Flychecks should
+be set to a lower priority than general direct build invocations.
diff --git a/editors/code/package.json b/editors/code/package.json
index 2157cbd486..fc20597e88 100644
--- a/editors/code/package.json
+++ b/editors/code/package.json
@@ -1213,7 +1213,7 @@
"title": "Check",
"properties": {
"rust-analyzer.check.overrideCommand": {
- "markdownDescription": "Override the command rust-analyzer uses instead of `cargo check` for\ndiagnostics on save. The command is required to output json and\nshould therefore include `--message-format=json` or a similar option\n(if your client supports the `colorDiagnosticOutput` experimental\ncapability, you can use `--message-format=json-diagnostic-rendered-ansi`).\n\nIf you're changing this because you're using some tool wrapping\nCargo, you might also want to change\n`#rust-analyzer.cargo.buildScripts.overrideCommand#`.\n\nIf there are multiple linked projects/workspaces, this command is invoked for\neach of them, with the working directory being the workspace root\n(i.e., the folder containing the `Cargo.toml`). This can be overwritten\nby changing `#rust-analyzer.check.invocationStrategy#`.\n\nIf `$saved_file` is part of the command, rust-analyzer will pass\nthe absolute path of the saved file to the provided command. This is\nintended to be used with non-Cargo build systems.\nNote that `$saved_file` is experimental and may be removed in the future.\n\nAn example command would be:\n\n```bash\ncargo check --workspace --message-format=json --all-targets\n```\n\nNote: The option must be specified as an array of command line arguments, with\nthe first argument being the name of the command to run.",
+ "markdownDescription": "Override the command rust-analyzer uses instead of `cargo check` for\ndiagnostics on save. The command is required to output json and\nshould therefore include `--message-format=json` or a similar option\n(if your client supports the `colorDiagnosticOutput` experimental\ncapability, you can use `--message-format=json-diagnostic-rendered-ansi`).\n\nIf you're changing this because you're using some tool wrapping\nCargo, you might also want to change\n`#rust-analyzer.cargo.buildScripts.overrideCommand#`.\n\nIf there are multiple linked projects/workspaces, this command is invoked for\neach of them, with the working directory being the workspace root\n(i.e., the folder containing the `Cargo.toml`). This can be overwritten\nby changing `#rust-analyzer.check.invocationStrategy#`.\n\nIt supports two interpolation syntaxes, both mainly intended to be used with\n[non-Cargo build systems](./non_cargo_based_projects.md):\n\n- If `{saved_file}` is part of the command, rust-analyzer will pass\n the absolute path of the saved file to the provided command.\n (A previous version, `$saved_file`, also works.)\n- If `{label}` is part of the command, rust-analyzer will pass the\n Cargo package ID, which can be used with `cargo check -p`, or a build label from\n `rust-project.json`. If `{label}` is included, rust-analyzer behaves much like\n [`\"rust-analyzer.check.workspace\": false`](#check.workspace).\n\n\n\nAn example command would be:\n\n```bash\ncargo check --workspace --message-format=json --all-targets\n```\n\nNote: The option must be specified as an array of command line arguments, with\nthe first argument being the name of the command to run.",
"default": null,
"type": [
"null",
@@ -2773,6 +2773,31 @@
{
"title": "Proc Macro",
"properties": {
+ "rust-analyzer.procMacro.processes": {
+ "markdownDescription": "Number of proc-macro server processes to spawn.\n\nControls how many independent `proc-macro-srv` processes rust-analyzer\nruns in parallel to handle macro expansion.",
+ "default": 1,
+ "anyOf": [
+ {
+ "type": "number",
+ "minimum": 0,
+ "maximum": 255
+ },
+ {
+ "type": "string",
+ "enum": [
+ "physical"
+ ],
+ "enumDescriptions": [
+ "Use the number of physical cores"
+ ]
+ }
+ ]
+ }
+ }
+ },
+ {
+ "title": "Proc Macro",
+ "properties": {
"rust-analyzer.procMacro.server": {
"markdownDescription": "Internal config, path to proc-macro server executable.",
"default": null,
@@ -3135,7 +3160,7 @@
"title": "Workspace",
"properties": {
"rust-analyzer.workspace.discoverConfig": {
- "markdownDescription": "Enables automatic discovery of projects using [`DiscoverWorkspaceConfig::command`].\n\n[`DiscoverWorkspaceConfig`] also requires setting `progress_label` and `files_to_watch`.\n`progress_label` is used for the title in progress indicators, whereas `files_to_watch`\nis used to determine which build system-specific files should be watched in order to\nreload rust-analyzer.\n\nBelow is an example of a valid configuration:\n```json\n\"rust-analyzer.workspace.discoverConfig\": {\n \"command\": [\n \"rust-project\",\n \"develop-json\"\n ],\n \"progressLabel\": \"rust-analyzer\",\n \"filesToWatch\": [\n \"BUCK\"\n ]\n}\n```\n\n## On `DiscoverWorkspaceConfig::command`\n\n**Warning**: This format is provisional and subject to change.\n\n[`DiscoverWorkspaceConfig::command`] *must* return a JSON object corresponding to\n`DiscoverProjectData::Finished`:\n\n```norun\n#[derive(Debug, Clone, Deserialize, Serialize)]\n#[serde(tag = \"kind\")]\n#[serde(rename_all = \"snake_case\")]\nenum DiscoverProjectData {\n Finished { buildfile: Utf8PathBuf, project: ProjectJsonData },\n Error { error: String, source: Option<String> },\n Progress { message: String },\n}\n```\n\nAs JSON, `DiscoverProjectData::Finished` is:\n\n```json\n{\n // the internally-tagged representation of the enum.\n \"kind\": \"finished\",\n // the file used by a non-Cargo build system to define\n // a package or target.\n \"buildfile\": \"rust-analyzer/BUILD\",\n // the contents of a rust-project.json, elided for brevity\n \"project\": {\n \"sysroot\": \"foo\",\n \"crates\": []\n }\n}\n```\n\nIt is encouraged, but not required, to use the other variants on `DiscoverProjectData`\nto provide a more polished end-user experience.\n\n`DiscoverWorkspaceConfig::command` may *optionally* include an `{arg}`, which will be\nsubstituted with the JSON-serialized form of the following enum:\n\n```norun\n#[derive(PartialEq, Clone, Debug, Serialize)]\n#[serde(rename_all = \"camelCase\")]\npub enum DiscoverArgument {\n Path(AbsPathBuf),\n Buildfile(AbsPathBuf),\n}\n```\n\nThe JSON representation of `DiscoverArgument::Path` is:\n\n```json\n{\n \"path\": \"src/main.rs\"\n}\n```\n\nSimilarly, the JSON representation of `DiscoverArgument::Buildfile` is:\n\n```json\n{\n \"buildfile\": \"BUILD\"\n}\n```\n\n`DiscoverArgument::Path` is used to find and generate a `rust-project.json`, and\ntherefore, a workspace, whereas `DiscoverArgument::buildfile` is used to to update an\nexisting workspace. As a reference for implementors, buck2's `rust-project` will likely\nbe useful: <https://github.com/facebook/buck2/tree/main/integrations/rust-project>.",
+ "markdownDescription": "Configure a command that rust-analyzer can invoke to\nobtain configuration.\n\nThis is an alternative to manually generating\n`rust-project.json`: it enables rust-analyzer to generate\nrust-project.json on the fly, and regenerate it when\nswitching or modifying projects.\n\nThis is an object with three fields:\n\n* `command`: the shell command to invoke\n\n* `filesToWatch`: which build system-specific files should\nbe watched to trigger regenerating the configuration\n\n* `progressLabel`: the name of the command, used in\nprogress indicators in the IDE\n\nHere's an example of a valid configuration:\n\n```json\n\"rust-analyzer.workspace.discoverConfig\": {\n \"command\": [\n \"rust-project\",\n \"develop-json\",\n \"{arg}\"\n ],\n \"progressLabel\": \"buck2/rust-project\",\n \"filesToWatch\": [\n \"BUCK\"\n ]\n}\n```\n\n## Argument Substitutions\n\nIf `command` includes the argument `{arg}`, that argument will be substituted\nwith the JSON-serialized form of the following enum:\n\n```norun\n#[derive(PartialEq, Clone, Debug, Serialize)]\n#[serde(rename_all = \"camelCase\")]\npub enum DiscoverArgument {\n Path(AbsPathBuf),\n Buildfile(AbsPathBuf),\n}\n```\n\nrust-analyzer will use the path invocation to find and\ngenerate a `rust-project.json` and therefore a\nworkspace. Example:\n\n\n```norun\nrust-project develop-json '{ \"path\": \"myproject/src/main.rs\" }'\n```\n\nrust-analyzer will use build file invocations to update an\nexisting workspace. Example:\n\nOr with a build file and the configuration above:\n\n```norun\nrust-project develop-json '{ \"buildfile\": \"myproject/BUCK\" }'\n```\n\nAs a reference for implementors, buck2's `rust-project`\nwill likely be useful:\n<https://github.com/facebook/buck2/tree/main/integrations/rust-project>.\n\n## Discover Command Output\n\n**Warning**: This format is provisional and subject to change.\n\nThe discover command should output JSON objects, one per\nline (JSONL format). These objects should correspond to\nthis Rust data type:\n\n```norun\n#[derive(Debug, Clone, Deserialize, Serialize)]\n#[serde(tag = \"kind\")]\n#[serde(rename_all = \"snake_case\")]\nenum DiscoverProjectData {\n Finished { buildfile: Utf8PathBuf, project: ProjectJsonData },\n Error { error: String, source: Option<String> },\n Progress { message: String },\n}\n```\n\nFor example, a progress event:\n\n```json\n{\"kind\":\"progress\",\"message\":\"generating rust-project.json\"}\n```\n\nA finished event can look like this (expanded and\ncommented for readability):\n\n```json\n{\n // the internally-tagged representation of the enum.\n \"kind\": \"finished\",\n // the file used by a non-Cargo build system to define\n // a package or target.\n \"buildfile\": \"rust-analyzer/BUCK\",\n // the contents of a rust-project.json, elided for brevity\n \"project\": {\n \"sysroot\": \"foo\",\n \"crates\": []\n }\n}\n```\n\nOnly the finished event is required, but the other\nvariants are encouraged to give users more feedback about\nprogress or errors.",
"default": null,
"anyOf": [
{
diff --git a/lib/line-index/src/lib.rs b/lib/line-index/src/lib.rs
index 905da330e6..d5f0584d98 100644
--- a/lib/line-index/src/lib.rs
+++ b/lib/line-index/src/lib.rs
@@ -207,7 +207,7 @@ impl LineIndex {
}
}
-/// This is adapted from the rustc_span crate, https://github.com/rust-lang/rust/blob/de59844c98f7925242a798a72c59dc3610dd0e2c/compiler/rustc_span/src/analyze_source_file.rs
+/// This is adapted from the rustc_span crate, <https://github.com/rust-lang/rust/blob/de59844c98f7925242a798a72c59dc3610dd0e2c/compiler/rustc_span/src/analyze_source_file.rs>
fn analyze_source_file(src: &str) -> (Vec<TextSize>, IntMap<u32, Box<[WideChar]>>) {
assert!(src.len() < !0u32 as usize);
let mut lines = vec![];
diff --git a/lib/smol_str/CHANGELOG.md b/lib/smol_str/CHANGELOG.md
index b7da6d18a4..4aa25fa134 100644
--- a/lib/smol_str/CHANGELOG.md
+++ b/lib/smol_str/CHANGELOG.md
@@ -1,6 +1,6 @@
# Changelog
-## Unreleased
+## 0.3.5 - 2026-01-08
- Optimise `SmolStr::clone` 4-5x speedup inline, 0.5x heap (slow down).
## 0.3.4 - 2025-10-23
diff --git a/lib/smol_str/Cargo.toml b/lib/smol_str/Cargo.toml
index 118b25993f..4e7844b49e 100644
--- a/lib/smol_str/Cargo.toml
+++ b/lib/smol_str/Cargo.toml
@@ -1,6 +1,6 @@
[package]
name = "smol_str"
-version = "0.3.4"
+version = "0.3.5"
description = "small-string optimized string type with O(1) clone"
license = "MIT OR Apache-2.0"
repository = "https://github.com/rust-lang/rust-analyzer/tree/master/lib/smol_str"
diff --git a/rust-version b/rust-version
index 5ffe95a0b5..a1011c4a0a 100644
--- a/rust-version
+++ b/rust-version
@@ -1 +1 @@
-e7d44143a12a526488e4f0c0d7ea8e62a4fe9354
+ba284f468cd2cda48420251efc991758ec13d450