Unnamed repository; edit this file 'description' to name the repository.
Auto merge of #134681 - lnicola:sync-from-ra, r=lnicola
Subtree update of `rust-analyzer` r? `@ghost`
bors 2024-12-23
parent 5401151 · parent 80c97ca · commit 485f5e8
-rw-r--r--Cargo.lock69
-rw-r--r--Cargo.toml15
-rw-r--r--crates/hir-def/src/body.rs52
-rw-r--r--crates/hir-def/src/body/lower.rs20
-rw-r--r--crates/hir-def/src/body/lower/asm.rs10
-rw-r--r--crates/hir-def/src/body/pretty.rs1
-rw-r--r--crates/hir-def/src/body/tests.rs18
-rw-r--r--crates/hir-def/src/hir/format_args.rs38
-rw-r--r--crates/hir-def/src/item_scope.rs124
-rw-r--r--crates/hir-def/src/nameres/collector.rs56
-rw-r--r--crates/hir-def/src/nameres/path_resolution.rs12
-rw-r--r--crates/hir-def/src/nameres/tests/incremental.rs2
-rw-r--r--crates/hir-def/src/per_ns.rs77
-rw-r--r--crates/hir-def/src/resolver.rs24
-rw-r--r--crates/hir-expand/src/db.rs16
-rw-r--r--crates/hir-ty/src/chalk_db.rs11
-rw-r--r--crates/hir-ty/src/diagnostics/unsafe_check.rs13
-rw-r--r--crates/hir/src/diagnostics.rs21
-rw-r--r--crates/hir/src/lib.rs6
-rw-r--r--crates/hir/src/semantics.rs43
-rw-r--r--crates/ide-assists/src/assist_context.rs5
-rw-r--r--crates/ide-assists/src/handlers/destructure_tuple_binding.rs141
-rw-r--r--crates/ide-assists/src/handlers/extract_variable.rs183
-rw-r--r--crates/ide-assists/src/handlers/qualify_method_call.rs2
-rw-r--r--crates/ide-assists/src/handlers/qualify_path.rs10
-rw-r--r--crates/ide-assists/src/handlers/replace_derive_with_manual_impl.rs2
-rw-r--r--crates/ide-assists/src/handlers/unnecessary_async.rs43
-rw-r--r--crates/ide-assists/src/handlers/wrap_return_type.rs2
-rw-r--r--crates/ide-assists/src/tests/generated.rs2
-rw-r--r--crates/ide-completion/src/completions/dot.rs50
-rw-r--r--crates/ide-completion/src/completions/item_list/trait_impl.rs30
-rw-r--r--crates/ide-completion/src/completions/keyword.rs78
-rw-r--r--crates/ide-completion/src/completions/postfix.rs110
-rw-r--r--crates/ide-completion/src/context.rs4
-rw-r--r--crates/ide-completion/src/context/analysis.rs726
-rw-r--r--crates/ide-completion/src/item.rs56
-rw-r--r--crates/ide-completion/src/render.rs540
-rw-r--r--crates/ide-completion/src/tests.rs39
-rw-r--r--crates/ide-completion/src/tests/attribute.rs24
-rw-r--r--crates/ide-completion/src/tests/expression.rs462
-rw-r--r--crates/ide-completion/src/tests/flyimport.rs106
-rw-r--r--crates/ide-completion/src/tests/fn_param.rs2
-rw-r--r--crates/ide-completion/src/tests/item.rs24
-rw-r--r--crates/ide-completion/src/tests/item_list.rs18
-rw-r--r--crates/ide-completion/src/tests/pattern.rs70
-rw-r--r--crates/ide-completion/src/tests/predicate.rs42
-rw-r--r--crates/ide-completion/src/tests/proc_macros.rs104
-rw-r--r--crates/ide-completion/src/tests/record.rs48
-rw-r--r--crates/ide-completion/src/tests/special.rs165
-rw-r--r--crates/ide-completion/src/tests/type_pos.rs378
-rw-r--r--crates/ide-completion/src/tests/use_tree.rs14
-rw-r--r--crates/ide-db/src/imports/import_assets.rs213
-rw-r--r--crates/ide-db/src/items_locator.rs59
-rw-r--r--crates/ide-db/src/syntax_helpers/suggest_name.rs2
-rw-r--r--crates/ide-diagnostics/src/handlers/missing_unsafe.rs16
-rw-r--r--crates/ide-diagnostics/src/handlers/unresolved_field.rs19
-rw-r--r--crates/ide-diagnostics/src/handlers/unresolved_ident.rs16
-rw-r--r--crates/ide/src/runnables.rs19
-rw-r--r--crates/ide/src/typing.rs19
-rw-r--r--crates/load-cargo/src/lib.rs2
-rw-r--r--crates/parser/src/grammar/items.rs18
-rw-r--r--crates/parser/src/tests/top_entries.rs30
-rw-r--r--crates/parser/test_data/generated/runner.rs4
-rw-r--r--crates/parser/test_data/parser/err/0002_duplicate_shebang.rast18
-rw-r--r--crates/parser/test_data/parser/err/0008_item_block_recovery.rast14
-rw-r--r--crates/parser/test_data/parser/err/0013_invalid_type.rast6
-rw-r--r--crates/parser/test_data/parser/err/0044_item_modifiers.rast14
-rw-r--r--crates/parser/test_data/parser/err/0055_impl_use.rast7
-rw-r--r--crates/parser/test_data/parser/inline/err/gen_fn.rast12
-rw-r--r--crates/parser/test_data/parser/inline/err/path_item_without_excl.rast8
-rw-r--r--crates/parser/test_data/parser/inline/err/path_item_without_excl.rs1
-rw-r--r--crates/paths/Cargo.toml3
-rw-r--r--crates/proc-macro-api/Cargo.toml9
-rw-r--r--crates/proc-macro-api/src/lib.rs5
-rw-r--r--crates/proc-macro-api/src/msg.rs5
-rw-r--r--crates/proc-macro-api/src/msg/flat.rs51
-rw-r--r--crates/proc-macro-api/src/process.rs29
-rw-r--r--crates/proc-macro-srv/Cargo.toml4
-rw-r--r--crates/project-model/Cargo.toml1
-rw-r--r--crates/project-model/src/build_dependencies.rs2
-rw-r--r--crates/project-model/src/cargo_workspace.rs2
-rw-r--r--crates/ra-salsa/Cargo.toml2
-rw-r--r--crates/rust-analyzer/Cargo.toml1
-rw-r--r--crates/rust-analyzer/src/config.rs3
-rw-r--r--crates/rust-analyzer/src/diagnostics.rs68
-rw-r--r--crates/rust-analyzer/src/flycheck.rs120
-rw-r--r--crates/rust-analyzer/src/global_state.rs8
-rw-r--r--crates/rust-analyzer/src/handlers/dispatch.rs2
-rw-r--r--crates/rust-analyzer/src/handlers/notification.rs25
-rw-r--r--crates/rust-analyzer/src/handlers/request.rs28
-rw-r--r--crates/rust-analyzer/src/lib.rs7
-rw-r--r--crates/rust-analyzer/src/lsp/ext.rs4
-rw-r--r--crates/rust-analyzer/src/lsp/to_proto.rs56
-rw-r--r--crates/rust-analyzer/src/main_loop.rs26
-rw-r--r--crates/rust-analyzer/src/reload.rs1
-rw-r--r--crates/rust-analyzer/src/test_runner.rs3
-rw-r--r--crates/rust-analyzer/src/tracing/json.rs2
-rw-r--r--crates/rust-analyzer/tests/slow-tests/support.rs39
-rw-r--r--crates/span/Cargo.toml5
-rw-r--r--crates/span/src/hygiene.rs5
-rw-r--r--crates/span/src/lib.rs73
-rw-r--r--crates/syntax-bridge/Cargo.toml3
-rw-r--r--crates/syntax/rust.ungram2
-rw-r--r--crates/syntax/src/ast/generated/nodes.rs2
-rw-r--r--docs/dev/lsp-extensions.md2
-rw-r--r--docs/user/generated_config.adoc3
-rw-r--r--editors/code/package.json2
-rw-r--r--editors/code/src/ctx.ts2
-rw-r--r--lib/lsp-server/Cargo.toml5
-rw-r--r--lib/lsp-server/src/msg.rs9
-rw-r--r--lib/lsp-server/src/req_queue.rs4
-rw-r--r--rust-version2
-rw-r--r--xtask/Cargo.toml3
113 files changed, 3082 insertions, 2151 deletions
diff --git a/Cargo.lock b/Cargo.lock
index ab6580a97a..2323fdf533 100644
--- a/Cargo.lock
+++ b/Cargo.lock
@@ -1011,24 +1011,25 @@ checksum = "a7a70ba024b9dc04c27ea2f0c0548feb474ec5c54bba33a7f72f873a39d07b24"
[[package]]
name = "lsp-server"
version = "0.7.7"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "550446e84739dcaf6d48a4a093973850669e13e8a34d8f8d64851041be267cd9"
dependencies = [
"crossbeam-channel",
- "ctrlc",
"log",
- "lsp-types",
"serde",
"serde_json",
]
[[package]]
name = "lsp-server"
-version = "0.7.7"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "550446e84739dcaf6d48a4a093973850669e13e8a34d8f8d64851041be267cd9"
+version = "0.7.8"
dependencies = [
"crossbeam-channel",
+ "ctrlc",
"log",
+ "lsp-types",
"serde",
+ "serde_derive",
"serde_json",
]
@@ -1289,7 +1290,6 @@ name = "paths"
version = "0.0.0"
dependencies = [
"camino",
- "serde",
]
[[package]]
@@ -1352,12 +1352,12 @@ dependencies = [
name = "proc-macro-api"
version = "0.0.0"
dependencies = [
- "base-db",
"indexmap",
"intern",
"paths",
"rustc-hash 2.0.0",
"serde",
+ "serde_derive",
"serde_json",
"span",
"stdx",
@@ -1369,7 +1369,6 @@ dependencies = [
name = "proc-macro-srv"
version = "0.0.0"
dependencies = [
- "base-db",
"expect-test",
"intern",
"libloading",
@@ -1448,6 +1447,7 @@ dependencies = [
"rustc-hash 2.0.0",
"semver",
"serde",
+ "serde_derive",
"serde_json",
"span",
"stdx",
@@ -1507,9 +1507,9 @@ dependencies = [
[[package]]
name = "ra-ap-rustc_abi"
-version = "0.85.0"
+version = "0.87.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "af462c3a2d524b84a51b6848b439787f01b35c6c1086d3e3086a5f5eea92ed9a"
+checksum = "28b782af0a7a8df16ddf43cd70da9f17bc3b1ce712c9e4992b6edb16f5f53632"
dependencies = [
"bitflags 2.6.0",
"ra-ap-rustc_index",
@@ -1518,9 +1518,9 @@ dependencies = [
[[package]]
name = "ra-ap-rustc_index"
-version = "0.85.0"
+version = "0.87.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "be6bb8cb0ab78d94a222f1ffd3e87254cdfb57413382b8d6ebe26a85482f99d1"
+checksum = "ce5742f134960482f543b35ecebec3cacc6d79a9a685713518b4d8d70c5f9aa8"
dependencies = [
"ra-ap-rustc_index_macros",
"smallvec",
@@ -1528,9 +1528,9 @@ dependencies = [
[[package]]
name = "ra-ap-rustc_index_macros"
-version = "0.85.0"
+version = "0.87.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "c24b1641455b46e87435b7321219672077066e678963d239a4a2904732979b16"
+checksum = "d7ea011fcf68309a8835ad01d91c032cb18444617b00e2cab21d45b208164441"
dependencies = [
"proc-macro2",
"quote",
@@ -1539,9 +1539,9 @@ dependencies = [
[[package]]
name = "ra-ap-rustc_lexer"
-version = "0.85.0"
+version = "0.87.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "94daa86974417981fed2f12bd8fb00158dfa6fee561152bed689278c846d0272"
+checksum = "eb76f0a4d4c20859e41f0a23bff0f37ab9ca9171c214a6c7dd72ea69434865dc"
dependencies = [
"unicode-properties",
"unicode-xid",
@@ -1549,9 +1549,9 @@ dependencies = [
[[package]]
name = "ra-ap-rustc_parse_format"
-version = "0.85.0"
+version = "0.87.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "fc07f6bd581746f358e39c4b6bfe8d455b3d6ad1a857821016d0d42eeb5e1e3e"
+checksum = "06080bd35078305421a62da77f3c128482d8d44441b6da8ce9d146d1cd9cdb5b"
dependencies = [
"ra-ap-rustc_index",
"ra-ap-rustc_lexer",
@@ -1559,9 +1559,9 @@ dependencies = [
[[package]]
name = "ra-ap-rustc_pattern_analysis"
-version = "0.85.0"
+version = "0.87.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "2f49b86e1276c1c3c72898410def29b699415f4e7d1dfb3531daf79794694372"
+checksum = "68a3154fe4c20c177d7b3c678a2d3a97aba0cca156ddef88959915041889daf0"
dependencies = [
"ra-ap-rustc_index",
"rustc-hash 2.0.0",
@@ -1676,7 +1676,7 @@ dependencies = [
"intern",
"itertools",
"load-cargo",
- "lsp-server 0.7.7 (registry+https://github.com/rust-lang/crates.io-index)",
+ "lsp-server 0.7.7",
"lsp-types",
"memchr",
"mimalloc",
@@ -1695,6 +1695,7 @@ dependencies = [
"scip",
"semver",
"serde",
+ "serde_derive",
"serde_json",
"stdx",
"syntax",
@@ -1822,18 +1823,18 @@ dependencies = [
[[package]]
name = "serde"
-version = "1.0.206"
+version = "1.0.216"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "5b3e4cd94123dd520a128bcd11e34d9e9e423e7e3e50425cb1b4b1e3549d0284"
+checksum = "0b9781016e935a97e8beecf0c933758c97a5520d32930e460142b4cd80c6338e"
dependencies = [
"serde_derive",
]
[[package]]
name = "serde_derive"
-version = "1.0.206"
+version = "1.0.216"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "fabfb6138d2383ea8208cf98ccf69cdfb1aff4088460681d84189aa259762f97"
+checksum = "46f859dbbf73865c6627ed570e78961cd3ac92407a2d117204c49232485da55e"
dependencies = [
"proc-macro2",
"quote",
@@ -1925,12 +1926,6 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3b9b39299b249ad65f3b7e96443bad61c02ca5cd3589f46cb6d610a0fd6c0d6a"
[[package]]
-name = "stable_deref_trait"
-version = "1.2.0"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "a8f112729512f8e442d81f95a8a7ddf2b7c6b8a1a6f509a95864142b30cab2d3"
-
-[[package]]
name = "stdx"
version = "0.0.0"
dependencies = [
@@ -1946,9 +1941,9 @@ dependencies = [
[[package]]
name = "syn"
-version = "2.0.74"
+version = "2.0.87"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "1fceb41e3d546d0bd83421d3409b1460cc7444cd389341a4c880fe7a042cb3d7"
+checksum = "25aa4ce346d03a6dcd68dd8b4010bcb74e54e62c90c573f394c46eae99aba32d"
dependencies = [
"proc-macro2",
"quote",
@@ -2264,13 +2259,9 @@ dependencies = [
[[package]]
name = "triomphe"
-version = "0.1.13"
+version = "0.1.14"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "e6631e42e10b40c0690bf92f404ebcfe6e1fdb480391d15f17cc8e96eeed5369"
-dependencies = [
- "serde",
- "stable_deref_trait",
-]
+checksum = "ef8f7726da4807b58ea5c96fdc122f80702030edc33b35aff9190a51148ccc85"
[[package]]
name = "tt"
diff --git a/Cargo.toml b/Cargo.toml
index 8086569a78..7f3abcccc4 100644
--- a/Cargo.toml
+++ b/Cargo.toml
@@ -85,11 +85,11 @@ vfs-notify = { path = "./crates/vfs-notify", version = "0.0.0" }
vfs = { path = "./crates/vfs", version = "0.0.0" }
edition = { path = "./crates/edition", version = "0.0.0" }
-ra-ap-rustc_lexer = { version = "0.85", default-features = false }
-ra-ap-rustc_parse_format = { version = "0.85", default-features = false }
-ra-ap-rustc_index = { version = "0.85", default-features = false }
-ra-ap-rustc_abi = { version = "0.85", default-features = false }
-ra-ap-rustc_pattern_analysis = { version = "0.85", default-features = false }
+ra-ap-rustc_lexer = { version = "0.87", default-features = false }
+ra-ap-rustc_parse_format = { version = "0.87", default-features = false }
+ra-ap-rustc_index = { version = "0.87", default-features = false }
+ra-ap-rustc_abi = { version = "0.87", default-features = false }
+ra-ap-rustc_pattern_analysis = { version = "0.87", default-features = false }
# local crates that aren't published to crates.io. These should not have versions.
test-fixture = { path = "./crates/test-fixture" }
@@ -138,7 +138,8 @@ pulldown-cmark = { version = "0.9.0", default-features = false }
rayon = "1.8.0"
rustc-hash = "2.0.0"
semver = "1.0.14"
-serde = { version = "1.0.192", features = ["derive"] }
+serde = { version = "1.0.192" }
+serde_derive = { version = "1.0.192" }
serde_json = "1.0.108"
smallvec = { version = "1.10.0", features = [
"const_new",
@@ -157,7 +158,7 @@ tracing-subscriber = { version = "0.3.18", default-features = false, features =
"time",
"tracing-log",
] }
-triomphe = { version = "0.1.10", default-features = false, features = ["std"] }
+triomphe = { version = "0.1.14", default-features = false, features = ["std"] }
url = "2.3.1"
xshell = "0.2.5"
diff --git a/crates/hir-def/src/body.rs b/crates/hir-def/src/body.rs
index 867bee95be..433a956ff9 100644
--- a/crates/hir-def/src/body.rs
+++ b/crates/hir-def/src/body.rs
@@ -18,6 +18,7 @@ use smallvec::SmallVec;
use span::{Edition, MacroFileId};
use syntax::{ast, AstPtr, SyntaxNodePtr};
use triomphe::Arc;
+use tt::TextRange;
use crate::{
db::DefDatabase,
@@ -143,15 +144,7 @@ pub struct BodySourceMap {
pub types: TypesSourceMap,
- // FIXME: Make this a sane struct.
- template_map: Option<
- Box<(
- // format_args!
- FxHashMap<ExprId, (HygieneId, Vec<(syntax::TextRange, Name)>)>,
- // asm!
- FxHashMap<ExprId, Vec<Vec<(syntax::TextRange, usize)>>>,
- )>,
- >,
+ template_map: Option<Box<FormatTemplate>>,
expansions: FxHashMap<InFile<AstPtr<ast::MacroCall>>, MacroFileId>,
@@ -160,6 +153,20 @@ pub struct BodySourceMap {
diagnostics: Vec<BodyDiagnostic>,
}
+#[derive(Default, Debug, Eq, PartialEq)]
+struct FormatTemplate {
+ /// A map from `format_args!()` expressions to their captures.
+ format_args_to_captures: FxHashMap<ExprId, (HygieneId, Vec<(syntax::TextRange, Name)>)>,
+ /// A map from `asm!()` expressions to their captures.
+ asm_to_captures: FxHashMap<ExprId, Vec<Vec<(syntax::TextRange, usize)>>>,
+ /// A map from desugared expressions of implicit captures to their source.
+ ///
+ /// The value stored for each capture is its template literal and offset inside it. The template literal
+ /// is from the `format_args[_nl]!()` macro and so needs to be mapped up once to go to the user-written
+ /// template.
+ implicit_capture_to_source: FxHashMap<ExprId, InFile<(AstPtr<ast::Expr>, TextRange)>>,
+}
+
#[derive(Debug, Eq, PartialEq)]
pub enum BodyDiagnostic {
InactiveCode { node: InFile<SyntaxNodePtr>, cfg: CfgExpr, opts: CfgOptions },
@@ -798,18 +805,29 @@ impl BodySourceMap {
node: InFile<&ast::FormatArgsExpr>,
) -> Option<(HygieneId, &[(syntax::TextRange, Name)])> {
let src = node.map(AstPtr::new).map(AstPtr::upcast::<ast::Expr>);
- let (hygiene, names) =
- self.template_map.as_ref()?.0.get(&self.expr_map.get(&src)?.as_expr()?)?;
+ let (hygiene, names) = self
+ .template_map
+ .as_ref()?
+ .format_args_to_captures
+ .get(&self.expr_map.get(&src)?.as_expr()?)?;
Some((*hygiene, &**names))
}
+ pub fn format_args_implicit_capture(
+ &self,
+ capture_expr: ExprId,
+ ) -> Option<InFile<(AstPtr<ast::Expr>, TextRange)>> {
+ self.template_map.as_ref()?.implicit_capture_to_source.get(&capture_expr).copied()
+ }
+
pub fn asm_template_args(
&self,
node: InFile<&ast::AsmExpr>,
) -> Option<(ExprId, &[Vec<(syntax::TextRange, usize)>])> {
let src = node.map(AstPtr::new).map(AstPtr::upcast::<ast::Expr>);
let expr = self.expr_map.get(&src)?.as_expr()?;
- Some(expr).zip(self.template_map.as_ref()?.1.get(&expr).map(std::ops::Deref::deref))
+ Some(expr)
+ .zip(self.template_map.as_ref()?.asm_to_captures.get(&expr).map(std::ops::Deref::deref))
}
/// Get a reference to the body source map's diagnostics.
@@ -835,8 +853,14 @@ impl BodySourceMap {
types,
} = self;
if let Some(template_map) = template_map {
- template_map.0.shrink_to_fit();
- template_map.1.shrink_to_fit();
+ let FormatTemplate {
+ format_args_to_captures,
+ asm_to_captures,
+ implicit_capture_to_source,
+ } = &mut **template_map;
+ format_args_to_captures.shrink_to_fit();
+ asm_to_captures.shrink_to_fit();
+ implicit_capture_to_source.shrink_to_fit();
}
expr_map.shrink_to_fit();
expr_map_back.shrink_to_fit();
diff --git a/crates/hir-def/src/body/lower.rs b/crates/hir-def/src/body/lower.rs
index 3b73d40963..eed9f9468f 100644
--- a/crates/hir-def/src/body/lower.rs
+++ b/crates/hir-def/src/body/lower.rs
@@ -1957,8 +1957,10 @@ impl ExprCollector<'_> {
_ => None,
});
let mut mappings = vec![];
- let (fmt, hygiene) = match template.and_then(|it| self.expand_macros_to_string(it)) {
- Some((s, is_direct_literal)) => {
+ let (fmt, hygiene) = match template.and_then(|template| {
+ self.expand_macros_to_string(template.clone()).map(|it| (it, template))
+ }) {
+ Some(((s, is_direct_literal), template)) => {
let call_ctx = self.expander.syntax_context();
let hygiene = self.hygiene_id_for(s.syntax().text_range().start());
let fmt = format_args::parse(
@@ -1966,8 +1968,18 @@ impl ExprCollector<'_> {
fmt_snippet,
args,
is_direct_literal,
- |name| {
+ |name, range| {
let expr_id = self.alloc_expr_desugared(Expr::Path(Path::from(name)));
+ if let Some(range) = range {
+ self.source_map
+ .template_map
+ .get_or_insert_with(Default::default)
+ .implicit_capture_to_source
+ .insert(
+ expr_id,
+ self.expander.in_file((AstPtr::new(&template), range)),
+ );
+ }
if !hygiene.is_root() {
self.body.expr_hygiene.insert(expr_id, hygiene);
}
@@ -2139,7 +2151,7 @@ impl ExprCollector<'_> {
self.source_map
.template_map
.get_or_insert_with(Default::default)
- .0
+ .format_args_to_captures
.insert(idx, (hygiene, mappings));
idx
}
diff --git a/crates/hir-def/src/body/lower/asm.rs b/crates/hir-def/src/body/lower/asm.rs
index c1b58dbdd0..68c7173d1e 100644
--- a/crates/hir-def/src/body/lower/asm.rs
+++ b/crates/hir-def/src/body/lower/asm.rs
@@ -6,7 +6,7 @@ use syntax::{
ast::{self, HasName, IsString},
AstNode, AstPtr, AstToken, T,
};
-use tt::{TextRange, TextSize};
+use tt::TextRange;
use crate::{
body::lower::{ExprCollector, FxIndexSet},
@@ -224,7 +224,7 @@ impl ExprCollector<'_> {
TextRange::new(
inner_span.start.try_into().unwrap(),
inner_span.end.try_into().unwrap(),
- ) - TextSize::from(str_style.map(|it| it + 1).unwrap_or(0) as u32 + 1)
+ )
})
};
for piece in unverified_pieces {
@@ -268,7 +268,11 @@ impl ExprCollector<'_> {
Expr::InlineAsm(InlineAsm { operands: operands.into_boxed_slice(), options }),
syntax_ptr,
);
- self.source_map.template_map.get_or_insert_with(Default::default).1.insert(idx, mappings);
+ self.source_map
+ .template_map
+ .get_or_insert_with(Default::default)
+ .asm_to_captures
+ .insert(idx, mappings);
idx
}
}
diff --git a/crates/hir-def/src/body/pretty.rs b/crates/hir-def/src/body/pretty.rs
index f8b6eef342..52b91b522a 100644
--- a/crates/hir-def/src/body/pretty.rs
+++ b/crates/hir-def/src/body/pretty.rs
@@ -685,6 +685,7 @@ impl Printer<'_> {
self.print_binding(*id);
if let Some(pat) = subpat {
self.whitespace();
+ w!(self, "@ ");
self.print_pat(*pat);
}
}
diff --git a/crates/hir-def/src/body/tests.rs b/crates/hir-def/src/body/tests.rs
index 8f01091584..13ba4db606 100644
--- a/crates/hir-def/src/body/tests.rs
+++ b/crates/hir-def/src/body/tests.rs
@@ -426,3 +426,21 @@ fn f() {
"should have a binding for `B`",
);
}
+
+#[test]
+fn regression_pretty_print_bind_pat() {
+ let (db, body, owner) = lower(
+ r#"
+fn foo() {
+ let v @ u = 123;
+}
+"#,
+ );
+ let printed = body.pretty_print(&db, owner, Edition::CURRENT);
+ assert_eq!(
+ printed,
+ r#"fn foo() -> () {
+ let v @ u = 123;
+}"#
+ );
+}
diff --git a/crates/hir-def/src/hir/format_args.rs b/crates/hir-def/src/hir/format_args.rs
index e1c3bd25bc..e64e498c17 100644
--- a/crates/hir-def/src/hir/format_args.rs
+++ b/crates/hir-def/src/hir/format_args.rs
@@ -1,5 +1,6 @@
//! Parses `format_args` input.
+use either::Either;
use hir_expand::name::Name;
use intern::Symbol;
use rustc_parse_format as parse;
@@ -7,7 +8,7 @@ use span::SyntaxContextId;
use stdx::TupleExt;
use syntax::{
ast::{self, IsString},
- TextRange, TextSize,
+ TextRange,
};
use crate::hir::ExprId;
@@ -33,7 +34,7 @@ pub enum FormatArgsPiece {
Placeholder(FormatPlaceholder),
}
-#[derive(Copy, Debug, Clone, PartialEq, Eq)]
+#[derive(Debug, Clone, PartialEq, Eq)]
pub struct FormatPlaceholder {
/// Index into [`FormatArgs::arguments`].
pub argument: FormatArgPosition,
@@ -45,11 +46,11 @@ pub struct FormatPlaceholder {
pub format_options: FormatOptions,
}
-#[derive(Copy, Debug, Clone, PartialEq, Eq)]
+#[derive(Debug, Clone, PartialEq, Eq)]
pub struct FormatArgPosition {
/// Which argument this position refers to (Ok),
/// or would've referred to if it existed (Err).
- pub index: Result<usize, usize>,
+ pub index: Result<usize, Either<usize, Name>>,
/// What kind of position this is. See [`FormatArgPositionKind`].
pub kind: FormatArgPositionKind,
/// The span of the name or number.
@@ -88,7 +89,7 @@ pub enum FormatTrait {
UpperHex,
}
-#[derive(Copy, Clone, Default, Debug, PartialEq, Eq)]
+#[derive(Clone, Default, Debug, PartialEq, Eq)]
pub struct FormatOptions {
/// The width. E.g. `{:5}` or `{:width$}`.
pub width: Option<FormatCount>,
@@ -133,7 +134,7 @@ pub enum FormatAlignment {
Center,
}
-#[derive(Copy, Clone, Debug, PartialEq, Eq)]
+#[derive(Clone, Debug, PartialEq, Eq)]
pub enum FormatCount {
/// `{:5}` or `{:.5}`
Literal(usize),
@@ -173,7 +174,7 @@ pub(crate) fn parse(
fmt_snippet: Option<String>,
mut args: FormatArgumentsCollector,
is_direct_literal: bool,
- mut synth: impl FnMut(Name) -> ExprId,
+ mut synth: impl FnMut(Name, Option<TextRange>) -> ExprId,
mut record_usage: impl FnMut(Name, Option<TextRange>),
call_ctx: SyntaxContextId,
) -> FormatArgs {
@@ -192,7 +193,6 @@ pub(crate) fn parse(
}
None => None,
};
-
let mut parser =
parse::Parser::new(&text, str_style, fmt_snippet, false, parse::ParseMode::Format);
@@ -217,7 +217,6 @@ pub(crate) fn parse(
let to_span = |inner_span: parse::InnerSpan| {
is_source_literal.then(|| {
TextRange::new(inner_span.start.try_into().unwrap(), inner_span.end.try_into().unwrap())
- - TextSize::from(str_style.map(|it| it + 1).unwrap_or(0) as u32 + 1)
})
};
@@ -245,8 +244,8 @@ pub(crate) fn parse(
Ok(index)
} else {
// Doesn't exist as an explicit argument.
- invalid_refs.push((index, span, used_as, kind));
- Err(index)
+ invalid_refs.push((Either::Left(index), span, used_as, kind));
+ Err(Either::Left(index))
}
}
ArgRef::Name(name, span) => {
@@ -265,14 +264,17 @@ pub(crate) fn parse(
// For the moment capturing variables from format strings expanded from macros is
// disabled (see RFC #2795)
// FIXME: Diagnose
+ invalid_refs.push((Either::Right(name.clone()), span, used_as, kind));
+ Err(Either::Right(name))
+ } else {
+ record_usage(name.clone(), span);
+ Ok(args.add(FormatArgument {
+ kind: FormatArgumentKind::Captured(name.clone()),
+ // FIXME: This is problematic, we might want to synthesize a dummy
+ // expression proper and/or desugar these.
+ expr: synth(name, span),
+ }))
}
- record_usage(name.clone(), span);
- Ok(args.add(FormatArgument {
- kind: FormatArgumentKind::Captured(name.clone()),
- // FIXME: This is problematic, we might want to synthesize a dummy
- // expression proper and/or desugar these.
- expr: synth(name),
- }))
}
}
};
diff --git a/crates/hir-def/src/item_scope.rs b/crates/hir-def/src/item_scope.rs
index e96e38ecee..2c3eb5c8e5 100644
--- a/crates/hir-def/src/item_scope.rs
+++ b/crates/hir-def/src/item_scope.rs
@@ -16,7 +16,7 @@ use syntax::ast;
use crate::{
db::DefDatabase,
- per_ns::PerNs,
+ per_ns::{Item, MacrosItem, PerNs, TypesItem, ValuesItem},
visibility::{Visibility, VisibilityExplicitness},
AdtId, BuiltinType, ConstId, ExternCrateId, FxIndexMap, HasModule, ImplId, LocalModuleId,
Lookup, MacroId, ModuleDefId, ModuleId, TraitId, UseId,
@@ -80,9 +80,9 @@ pub struct ItemScope {
/// Defs visible in this scope. This includes `declarations`, but also
/// imports. The imports belong to this module and can be resolved by using them on
/// the `use_imports_*` fields.
- types: FxIndexMap<Name, (ModuleDefId, Visibility, Option<ImportOrExternCrate>)>,
- values: FxIndexMap<Name, (ModuleDefId, Visibility, Option<ImportId>)>,
- macros: FxIndexMap<Name, (MacroId, Visibility, Option<ImportId>)>,
+ types: FxIndexMap<Name, TypesItem>,
+ values: FxIndexMap<Name, ValuesItem>,
+ macros: FxIndexMap<Name, MacrosItem>,
unresolved: FxHashSet<Name>,
/// The defs declared in this scope. Each def has a single scope where it is
@@ -92,7 +92,7 @@ pub struct ItemScope {
impls: Vec<ImplId>,
unnamed_consts: Vec<ConstId>,
/// Traits imported via `use Trait as _;`.
- unnamed_trait_imports: FxHashMap<TraitId, (Visibility, Option<ImportId>)>,
+ unnamed_trait_imports: FxHashMap<TraitId, Item<()>>,
// the resolutions of the imports of this scope
use_imports_types: FxHashMap<ImportOrExternCrate, ImportOrDef>,
@@ -187,7 +187,7 @@ impl ItemScope {
import = i;
}
ImportOrDef::Def(ModuleDefId::MacroId(def)) => {
- res.macros = Some((def, Visibility::Public, None));
+ res.macros = Some(Item { def, vis: Visibility::Public, import: None });
break;
}
_ => break,
@@ -203,7 +203,7 @@ impl ItemScope {
import = i;
}
ImportOrDef::Def(def) => {
- res.types = Some((def, Visibility::Public, None));
+ res.types = Some(Item { def, vis: Visibility::Public, import: None });
break;
}
_ => break,
@@ -219,7 +219,7 @@ impl ItemScope {
import = i;
}
ImportOrDef::Def(def) => {
- res.values = Some((def, Visibility::Public, None));
+ res.values = Some(Item { def, vis: Visibility::Public, import: None });
break;
}
_ => break,
@@ -253,8 +253,8 @@ impl ItemScope {
}
pub(crate) fn modules_in_scope(&self) -> impl Iterator<Item = (ModuleId, Visibility)> + '_ {
- self.types.values().copied().filter_map(|(def, vis, _)| match def {
- ModuleDefId::ModuleId(module) => Some((module, vis)),
+ self.types.values().filter_map(|ns| match ns.def {
+ ModuleDefId::ModuleId(module) => Some((module, ns.vis)),
_ => None,
})
}
@@ -283,20 +283,20 @@ impl ItemScope {
}
pub(crate) fn type_(&self, name: &Name) -> Option<(ModuleDefId, Visibility)> {
- self.types.get(name).copied().map(|(a, b, _)| (a, b))
+ self.types.get(name).map(|item| (item.def, item.vis))
}
/// XXX: this is O(N) rather than O(1), try to not introduce new usages.
pub(crate) fn name_of(&self, item: ItemInNs) -> Option<(&Name, Visibility, /*declared*/ bool)> {
match item {
- ItemInNs::Macros(def) => self.macros.iter().find_map(|(name, &(other_def, vis, i))| {
- (other_def == def).then_some((name, vis, i.is_none()))
+ ItemInNs::Macros(def) => self.macros.iter().find_map(|(name, other_def)| {
+ (other_def.def == def).then_some((name, other_def.vis, other_def.import.is_none()))
}),
- ItemInNs::Types(def) => self.types.iter().find_map(|(name, &(other_def, vis, i))| {
- (other_def == def).then_some((name, vis, i.is_none()))
+ ItemInNs::Types(def) => self.types.iter().find_map(|(name, other_def)| {
+ (other_def.def == def).then_some((name, other_def.vis, other_def.import.is_none()))
}),
- ItemInNs::Values(def) => self.values.iter().find_map(|(name, &(other_def, vis, i))| {
- (other_def == def).then_some((name, vis, i.is_none()))
+ ItemInNs::Values(def) => self.values.iter().find_map(|(name, other_def)| {
+ (other_def.def == def).then_some((name, other_def.vis, other_def.import.is_none()))
}),
}
}
@@ -311,22 +311,34 @@ impl ItemScope {
ItemInNs::Macros(def) => self
.macros
.iter()
- .filter_map(|(name, &(other_def, vis, i))| {
- (other_def == def).then_some((name, vis, i.is_none()))
+ .filter_map(|(name, other_def)| {
+ (other_def.def == def).then_some((
+ name,
+ other_def.vis,
+ other_def.import.is_none(),
+ ))
})
.find_map(|(a, b, c)| cb(a, b, c)),
ItemInNs::Types(def) => self
.types
.iter()
- .filter_map(|(name, &(other_def, vis, i))| {
- (other_def == def).then_some((name, vis, i.is_none()))
+ .filter_map(|(name, other_def)| {
+ (other_def.def == def).then_some((
+ name,
+ other_def.vis,
+ other_def.import.is_none(),
+ ))
})
.find_map(|(a, b, c)| cb(a, b, c)),
ItemInNs::Values(def) => self
.values
.iter()
- .filter_map(|(name, &(other_def, vis, i))| {
- (other_def == def).then_some((name, vis, i.is_none()))
+ .filter_map(|(name, other_def)| {
+ (other_def.def == def).then_some((
+ name,
+ other_def.vis,
+ other_def.import.is_none(),
+ ))
})
.find_map(|(a, b, c)| cb(a, b, c)),
}
@@ -335,7 +347,7 @@ impl ItemScope {
pub(crate) fn traits(&self) -> impl Iterator<Item = TraitId> + '_ {
self.types
.values()
- .filter_map(|&(def, _, _)| match def {
+ .filter_map(|def| match def.def {
ModuleDefId::TraitId(t) => Some(t),
_ => None,
})
@@ -344,13 +356,13 @@ impl ItemScope {
pub(crate) fn resolutions(&self) -> impl Iterator<Item = (Option<Name>, PerNs)> + '_ {
self.entries().map(|(name, res)| (Some(name.clone()), res)).chain(
- self.unnamed_trait_imports.iter().map(|(tr, (vis, i))| {
+ self.unnamed_trait_imports.iter().map(|(tr, trait_)| {
(
None,
PerNs::types(
ModuleDefId::TraitId(*tr),
- *vis,
- i.map(ImportOrExternCrate::Import),
+ trait_.vis,
+ trait_.import.map(ImportOrExternCrate::Import),
),
)
}),
@@ -464,12 +476,12 @@ impl ItemScope {
// FIXME: This is only used in collection, we should move the relevant parts of it out of ItemScope
pub(crate) fn unnamed_trait_vis(&self, tr: TraitId) -> Option<Visibility> {
- self.unnamed_trait_imports.get(&tr).copied().map(|(a, _)| a)
+ self.unnamed_trait_imports.get(&tr).map(|trait_| trait_.vis)
}
pub(crate) fn push_unnamed_trait(&mut self, tr: TraitId, vis: Visibility) {
// FIXME: import
- self.unnamed_trait_imports.insert(tr, (vis, None));
+ self.unnamed_trait_imports.insert(tr, Item { def: (), vis, import: None });
}
pub(crate) fn push_res_with_import(
@@ -502,7 +514,7 @@ impl ItemScope {
}
None | Some(ImportType::Glob(_)) => None,
};
- let prev = std::mem::replace(&mut fld.2, import);
+ let prev = std::mem::replace(&mut fld.import, import);
if let Some(import) = import {
self.use_imports_types.insert(
import,
@@ -513,7 +525,7 @@ impl ItemScope {
Some(ImportOrExternCrate::ExternCrate(import)) => {
ImportOrDef::ExternCrate(import)
}
- None => ImportOrDef::Def(fld.0),
+ None => ImportOrDef::Def(fld.def),
},
);
}
@@ -540,7 +552,7 @@ impl ItemScope {
}
None | Some(ImportType::Glob(_)) => None,
};
- let prev = std::mem::replace(&mut fld.2, import);
+ let prev = std::mem::replace(&mut fld.import, import);
if let Some(import) = import {
self.use_imports_types.insert(
import,
@@ -551,7 +563,7 @@ impl ItemScope {
Some(ImportOrExternCrate::ExternCrate(import)) => {
ImportOrDef::ExternCrate(import)
}
- None => ImportOrDef::Def(fld.0),
+ None => ImportOrDef::Def(fld.def),
},
);
}
@@ -579,13 +591,13 @@ impl ItemScope {
Some(ImportType::Import(import)) => Some(import),
_ => None,
};
- let prev = std::mem::replace(&mut fld.2, import);
+ let prev = std::mem::replace(&mut fld.import, import);
if let Some(import) = import {
self.use_imports_values.insert(
import,
match prev {
Some(import) => ImportOrDef::Import(import),
- None => ImportOrDef::Def(fld.0),
+ None => ImportOrDef::Def(fld.def),
},
);
}
@@ -599,13 +611,13 @@ impl ItemScope {
Some(ImportType::Import(import)) => Some(import),
_ => None,
};
- let prev = std::mem::replace(&mut fld.2, import);
+ let prev = std::mem::replace(&mut fld.import, import);
if let Some(import) = import {
self.use_imports_values.insert(
import,
match prev {
Some(import) => ImportOrDef::Import(import),
- None => ImportOrDef::Def(fld.0),
+ None => ImportOrDef::Def(fld.def),
},
);
}
@@ -631,13 +643,13 @@ impl ItemScope {
Some(ImportType::Import(import)) => Some(import),
_ => None,
};
- let prev = std::mem::replace(&mut fld.2, import);
+ let prev = std::mem::replace(&mut fld.import, import);
if let Some(import) = import {
self.use_imports_macros.insert(
import,
match prev {
Some(import) => ImportOrDef::Import(import),
- None => ImportOrDef::Def(fld.0.into()),
+ None => ImportOrDef::Def(fld.def.into()),
},
);
}
@@ -651,13 +663,13 @@ impl ItemScope {
Some(ImportType::Import(import)) => Some(import),
_ => None,
};
- let prev = std::mem::replace(&mut fld.2, import);
+ let prev = std::mem::replace(&mut fld.import, import);
if let Some(import) = import {
self.use_imports_macros.insert(
import,
match prev {
Some(import) => ImportOrDef::Import(import),
- None => ImportOrDef::Def(fld.0.into()),
+ None => ImportOrDef::Def(fld.def.into()),
},
);
}
@@ -680,19 +692,19 @@ impl ItemScope {
pub(crate) fn censor_non_proc_macros(&mut self, this_module: ModuleId) {
self.types
.values_mut()
- .map(|(_, vis, _)| vis)
- .chain(self.values.values_mut().map(|(_, vis, _)| vis))
- .chain(self.unnamed_trait_imports.values_mut().map(|(vis, _)| vis))
+ .map(|def| &mut def.vis)
+ .chain(self.values.values_mut().map(|def| &mut def.vis))
+ .chain(self.unnamed_trait_imports.values_mut().map(|def| &mut def.vis))
.for_each(|vis| {
*vis = Visibility::Module(this_module, VisibilityExplicitness::Implicit)
});
- for (mac, vis, import) in self.macros.values_mut() {
- if matches!(mac, MacroId::ProcMacroId(_) if import.is_none()) {
+ for mac in self.macros.values_mut() {
+ if matches!(mac.def, MacroId::ProcMacroId(_) if mac.import.is_none()) {
continue;
}
- *vis = Visibility::Module(this_module, VisibilityExplicitness::Implicit);
+ mac.vis = Visibility::Module(this_module, VisibilityExplicitness::Implicit);
}
}
@@ -707,23 +719,23 @@ impl ItemScope {
name.map_or("_".to_owned(), |name| name.display(db, Edition::LATEST).to_string())
);
- if let Some((.., i)) = def.types {
+ if let Some(Item { import, .. }) = def.types {
buf.push_str(" t");
- match i {
+ match import {
Some(ImportOrExternCrate::Import(_)) => buf.push('i'),
Some(ImportOrExternCrate::ExternCrate(_)) => buf.push('e'),
None => (),
}
}
- if let Some((.., i)) = def.values {
+ if let Some(Item { import, .. }) = def.values {
buf.push_str(" v");
- if i.is_some() {
+ if import.is_some() {
buf.push('i');
}
}
- if let Some((.., i)) = def.macros {
+ if let Some(Item { import, .. }) = def.macros {
buf.push_str(" m");
- if i.is_some() {
+ if import.is_some() {
buf.push('i');
}
}
@@ -781,19 +793,19 @@ impl ItemScope {
pub(crate) fn update_visibility_types(&mut self, name: &Name, vis: Visibility) {
let res =
self.types.get_mut(name).expect("tried to update visibility of non-existent type");
- res.1 = vis;
+ res.vis = vis;
}
pub(crate) fn update_visibility_values(&mut self, name: &Name, vis: Visibility) {
let res =
self.values.get_mut(name).expect("tried to update visibility of non-existent value");
- res.1 = vis;
+ res.vis = vis;
}
pub(crate) fn update_visibility_macros(&mut self, name: &Name, vis: Visibility) {
let res =
self.macros.get_mut(name).expect("tried to update visibility of non-existent macro");
- res.1 = vis;
+ res.vis = vis;
}
}
diff --git a/crates/hir-def/src/nameres/collector.rs b/crates/hir-def/src/nameres/collector.rs
index 98b08bcf70..f391cc41c1 100644
--- a/crates/hir-def/src/nameres/collector.rs
+++ b/crates/hir-def/src/nameres/collector.rs
@@ -44,7 +44,7 @@ use crate::{
ResolveMode,
},
path::{ImportAlias, ModPath, PathKind},
- per_ns::PerNs,
+ per_ns::{Item, PerNs},
tt,
visibility::{RawVisibility, Visibility},
AdtId, AstId, AstIdWithPath, ConstLoc, CrateRootModuleId, EnumLoc, EnumVariantLoc,
@@ -523,7 +523,7 @@ impl DefCollector<'_> {
self.def_map.resolve_path(self.db, DefMap::ROOT, &path, BuiltinShadowMode::Other, None);
match per_ns.types {
- Some((ModuleDefId::ModuleId(m), _, import)) => {
+ Some(Item { def: ModuleDefId::ModuleId(m), import, .. }) => {
// FIXME: This should specifically look for a glob import somehow and record that here
self.def_map.prelude = Some((
m,
@@ -1069,9 +1069,9 @@ impl DefCollector<'_> {
//
// This has been historically allowed, but may be not allowed in future
// https://github.com/rust-lang/rust/issues/127909
- if let Some((_, v, it)) = defs.types.as_mut() {
+ if let Some(def) = defs.types.as_mut() {
let is_extern_crate_reimport_without_prefix = || {
- let Some(ImportOrExternCrate::ExternCrate(_)) = it else {
+ let Some(ImportOrExternCrate::ExternCrate(_)) = def.import else {
return false;
};
let Some(ImportType::Import(id)) = def_import_type else {
@@ -1086,16 +1086,16 @@ impl DefCollector<'_> {
path.segments().len() < 2
};
if is_extern_crate_reimport_without_prefix() {
- *v = vis;
+ def.vis = vis;
} else {
- *v = v.min(vis, &self.def_map).unwrap_or(vis);
+ def.vis = def.vis.min(vis, &self.def_map).unwrap_or(vis);
}
}
- if let Some((_, v, _)) = defs.values.as_mut() {
- *v = v.min(vis, &self.def_map).unwrap_or(vis);
+ if let Some(def) = defs.values.as_mut() {
+ def.vis = def.vis.min(vis, &self.def_map).unwrap_or(vis);
}
- if let Some((_, v, _)) = defs.macros.as_mut() {
- *v = v.min(vis, &self.def_map).unwrap_or(vis);
+ if let Some(def) = defs.macros.as_mut() {
+ def.vis = def.vis.min(vis, &self.def_map).unwrap_or(vis);
}
let mut changed = false;
@@ -1106,12 +1106,12 @@ impl DefCollector<'_> {
// Multiple globs may import the same item and they may override visibility from
// previously resolved globs. Handle overrides here and leave the rest to
// `ItemScope::push_res_with_import()`.
- if let Some((def, def_vis, _)) = defs.types {
- if let Some((prev_def, prev_vis, _)) = prev_defs.types {
- if def == prev_def
+ if let Some(def) = defs.types {
+ if let Some(prev_def) = prev_defs.types {
+ if def.def == prev_def.def
&& self.from_glob_import.contains_type(module_id, name.clone())
- && def_vis != prev_vis
- && def_vis.max(prev_vis, &self.def_map) == Some(def_vis)
+ && def.vis != prev_def.vis
+ && def.vis.max(prev_def.vis, &self.def_map) == Some(def.vis)
{
changed = true;
// This import is being handled here, don't pass it down to
@@ -1119,41 +1119,41 @@ impl DefCollector<'_> {
defs.types = None;
self.def_map.modules[module_id]
.scope
- .update_visibility_types(name, def_vis);
+ .update_visibility_types(name, def.vis);
}
}
}
- if let Some((def, def_vis, _)) = defs.values {
- if let Some((prev_def, prev_vis, _)) = prev_defs.values {
- if def == prev_def
+ if let Some(def) = defs.values {
+ if let Some(prev_def) = prev_defs.values {
+ if def.def == prev_def.def
&& self.from_glob_import.contains_value(module_id, name.clone())
- && def_vis != prev_vis
- && def_vis.max(prev_vis, &self.def_map) == Some(def_vis)
+ && def.vis != prev_def.vis
+ && def.vis.max(prev_def.vis, &self.def_map) == Some(def.vis)
{
changed = true;
// See comment above.
defs.values = None;
self.def_map.modules[module_id]
.scope
- .update_visibility_values(name, def_vis);
+ .update_visibility_values(name, def.vis);
}
}
}
- if let Some((def, def_vis, _)) = defs.macros {
- if let Some((prev_def, prev_vis, _)) = prev_defs.macros {
- if def == prev_def
+ if let Some(def) = defs.macros {
+ if let Some(prev_def) = prev_defs.macros {
+ if def.def == prev_def.def
&& self.from_glob_import.contains_macro(module_id, name.clone())
- && def_vis != prev_vis
- && def_vis.max(prev_vis, &self.def_map) == Some(def_vis)
+ && def.vis != prev_def.vis
+ && def.vis.max(prev_def.vis, &self.def_map) == Some(def.vis)
{
changed = true;
// See comment above.
defs.macros = None;
self.def_map.modules[module_id]
.scope
- .update_visibility_macros(name, def_vis);
+ .update_visibility_macros(name, def.vis);
}
}
}
diff --git a/crates/hir-def/src/nameres/path_resolution.rs b/crates/hir-def/src/nameres/path_resolution.rs
index 29379d0074..8eb195680d 100644
--- a/crates/hir-def/src/nameres/path_resolution.rs
+++ b/crates/hir-def/src/nameres/path_resolution.rs
@@ -67,8 +67,8 @@ impl PerNs {
db: &dyn DefDatabase,
expected: Option<MacroSubNs>,
) -> Self {
- self.macros = self.macros.filter(|&(id, _, _)| {
- let this = MacroSubNs::from_id(db, id);
+ self.macros = self.macros.filter(|def| {
+ let this = MacroSubNs::from_id(db, def.def);
sub_namespace_match(Some(this), expected)
});
@@ -411,7 +411,7 @@ impl DefMap {
original_module: LocalModuleId,
) -> ResolvePathResult {
for (i, segment) in segments {
- let (curr, vis, imp) = match curr_per_ns.take_types_full() {
+ let curr = match curr_per_ns.take_types_full() {
Some(r) => r,
None => {
// we still have path segments left, but the path so far
@@ -424,7 +424,7 @@ impl DefMap {
};
// resolve segment in curr
- curr_per_ns = match curr {
+ curr_per_ns = match curr.def {
ModuleDefId::ModuleId(module) => {
if module.krate != self.krate {
let path = ModPath::from_segments(
@@ -492,7 +492,7 @@ impl DefMap {
Some(res) => res,
None => {
return ResolvePathResult::new(
- PerNs::types(e.into(), vis, imp),
+ PerNs::types(e.into(), curr.vis, curr.import),
ReachedFixedPoint::Yes,
Some(i),
false,
@@ -510,7 +510,7 @@ impl DefMap {
);
return ResolvePathResult::new(
- PerNs::types(s, vis, imp),
+ PerNs::types(s, curr.vis, curr.import),
ReachedFixedPoint::Yes,
Some(i),
false,
diff --git a/crates/hir-def/src/nameres/tests/incremental.rs b/crates/hir-def/src/nameres/tests/incremental.rs
index 1cfbabca28..c8b7ec463a 100644
--- a/crates/hir-def/src/nameres/tests/incremental.rs
+++ b/crates/hir-def/src/nameres/tests/incremental.rs
@@ -331,7 +331,7 @@ pub type Ty = ();
}
for (_, res) in module_data.scope.resolutions() {
- match res.values.map(|(a, _, _)| a).or(res.types.map(|(a, _, _)| a)).unwrap() {
+ match res.values.map(|it| it.def).or(res.types.map(|it| it.def)).unwrap() {
ModuleDefId::FunctionId(f) => _ = db.function_data(f),
ModuleDefId::AdtId(adt) => match adt {
AdtId::StructId(it) => _ = db.struct_data(it),
diff --git a/crates/hir-def/src/per_ns.rs b/crates/hir-def/src/per_ns.rs
index 3f3b98c6b5..899dd4afff 100644
--- a/crates/hir-def/src/per_ns.rs
+++ b/crates/hir-def/src/per_ns.rs
@@ -28,11 +28,22 @@ bitflags! {
}
}
+#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
+pub struct Item<Def, Import = ImportId> {
+ pub def: Def,
+ pub vis: Visibility,
+ pub import: Option<Import>,
+}
+
+pub type TypesItem = Item<ModuleDefId, ImportOrExternCrate>;
+pub type ValuesItem = Item<ModuleDefId>;
+pub type MacrosItem = Item<MacroId>;
+
#[derive(Clone, Copy, Debug, Default, Eq, Hash, PartialEq)]
pub struct PerNs {
- pub types: Option<(ModuleDefId, Visibility, Option<ImportOrExternCrate>)>,
- pub values: Option<(ModuleDefId, Visibility, Option<ImportId>)>,
- pub macros: Option<(MacroId, Visibility, Option<ImportId>)>,
+ pub types: Option<TypesItem>,
+ pub values: Option<ValuesItem>,
+ pub macros: Option<MacrosItem>,
}
impl PerNs {
@@ -48,29 +59,33 @@ impl PerNs {
PerNs { types: None, values: None, macros: None }
}
- pub fn values(t: ModuleDefId, v: Visibility, i: Option<ImportId>) -> PerNs {
- PerNs { types: None, values: Some((t, v, i)), macros: None }
+ pub fn values(def: ModuleDefId, vis: Visibility, import: Option<ImportId>) -> PerNs {
+ PerNs { types: None, values: Some(Item { def, vis, import }), macros: None }
}
- pub fn types(t: ModuleDefId, v: Visibility, i: Option<ImportOrExternCrate>) -> PerNs {
- PerNs { types: Some((t, v, i)), values: None, macros: None }
+ pub fn types(def: ModuleDefId, vis: Visibility, import: Option<ImportOrExternCrate>) -> PerNs {
+ PerNs { types: Some(Item { def, vis, import }), values: None, macros: None }
}
pub fn both(
types: ModuleDefId,
values: ModuleDefId,
- v: Visibility,
- i: Option<ImportOrExternCrate>,
+ vis: Visibility,
+ import: Option<ImportOrExternCrate>,
) -> PerNs {
PerNs {
- types: Some((types, v, i)),
- values: Some((values, v, i.and_then(ImportOrExternCrate::into_import))),
+ types: Some(Item { def: types, vis, import }),
+ values: Some(Item {
+ def: values,
+ vis,
+ import: import.and_then(ImportOrExternCrate::into_import),
+ }),
macros: None,
}
}
- pub fn macros(macro_: MacroId, v: Visibility, i: Option<ImportId>) -> PerNs {
- PerNs { types: None, values: None, macros: Some((macro_, v, i)) }
+ pub fn macros(def: MacroId, vis: Visibility, import: Option<ImportId>) -> PerNs {
+ PerNs { types: None, values: None, macros: Some(Item { def, vis, import }) }
}
pub fn is_none(&self) -> bool {
@@ -82,43 +97,43 @@ impl PerNs {
}
pub fn take_types(self) -> Option<ModuleDefId> {
- self.types.map(|it| it.0)
+ self.types.map(|it| it.def)
}
- pub fn take_types_full(self) -> Option<(ModuleDefId, Visibility, Option<ImportOrExternCrate>)> {
+ pub fn take_types_full(self) -> Option<TypesItem> {
self.types
}
pub fn take_values(self) -> Option<ModuleDefId> {
- self.values.map(|it| it.0)
+ self.values.map(|it| it.def)
}
pub fn take_values_import(self) -> Option<(ModuleDefId, Option<ImportId>)> {
- self.values.map(|it| (it.0, it.2))
+ self.values.map(|it| (it.def, it.import))
}
pub fn take_macros(self) -> Option<MacroId> {
- self.macros.map(|it| it.0)
+ self.macros.map(|it| it.def)
}
pub fn take_macros_import(self) -> Option<(MacroId, Option<ImportId>)> {
- self.macros.map(|it| (it.0, it.2))
+ self.macros.map(|it| (it.def, it.import))
}
pub fn filter_visibility(self, mut f: impl FnMut(Visibility) -> bool) -> PerNs {
let _p = tracing::info_span!("PerNs::filter_visibility").entered();
PerNs {
- types: self.types.filter(|&(_, v, _)| f(v)),
- values: self.values.filter(|&(_, v, _)| f(v)),
- macros: self.macros.filter(|&(_, v, _)| f(v)),
+ types: self.types.filter(|def| f(def.vis)),
+ values: self.values.filter(|def| f(def.vis)),
+ macros: self.macros.filter(|def| f(def.vis)),
}
}
pub fn with_visibility(self, vis: Visibility) -> PerNs {
PerNs {
- types: self.types.map(|(it, _, c)| (it, vis, c)),
- values: self.values.map(|(it, _, c)| (it, vis, c)),
- macros: self.macros.map(|(it, _, import)| (it, vis, import)),
+ types: self.types.map(|def| Item { vis, ..def }),
+ values: self.values.map(|def| Item { vis, ..def }),
+ macros: self.macros.map(|def| Item { vis, ..def }),
}
}
@@ -141,15 +156,17 @@ impl PerNs {
pub fn iter_items(self) -> impl Iterator<Item = (ItemInNs, Option<ImportOrExternCrate>)> {
let _p = tracing::info_span!("PerNs::iter_items").entered();
self.types
- .map(|it| (ItemInNs::Types(it.0), it.2))
+ .map(|it| (ItemInNs::Types(it.def), it.import))
.into_iter()
.chain(
- self.values
- .map(|it| (ItemInNs::Values(it.0), it.2.map(ImportOrExternCrate::Import))),
+ self.values.map(|it| {
+ (ItemInNs::Values(it.def), it.import.map(ImportOrExternCrate::Import))
+ }),
)
.chain(
- self.macros
- .map(|it| (ItemInNs::Macros(it.0), it.2.map(ImportOrExternCrate::Import))),
+ self.macros.map(|it| {
+ (ItemInNs::Macros(it.def), it.import.map(ImportOrExternCrate::Import))
+ }),
)
}
}
diff --git a/crates/hir-def/src/resolver.rs b/crates/hir-def/src/resolver.rs
index 316406c151..f4dfd42a30 100644
--- a/crates/hir-def/src/resolver.rs
+++ b/crates/hir-def/src/resolver.rs
@@ -933,8 +933,8 @@ impl ModuleItemMap {
Some(ResolveValueResult::ValueNs(value, import))
}
Some(idx) => {
- let (def, _, import) = module_def.take_types_full()?;
- let ty = match def {
+ let def = module_def.take_types_full()?;
+ let ty = match def.def {
ModuleDefId::AdtId(it) => TypeNs::AdtId(it),
ModuleDefId::TraitId(it) => TypeNs::TraitId(it),
ModuleDefId::TraitAliasId(it) => TypeNs::TraitAliasId(it),
@@ -948,7 +948,7 @@ impl ModuleItemMap {
| ModuleDefId::MacroId(_)
| ModuleDefId::StaticId(_) => return None,
};
- Some(ResolveValueResult::Partial(ty, idx, import))
+ Some(ResolveValueResult::Partial(ty, idx, def.import))
}
}
}
@@ -986,8 +986,8 @@ fn to_value_ns(per_ns: PerNs) -> Option<(ValueNs, Option<ImportId>)> {
}
fn to_type_ns(per_ns: PerNs) -> Option<(TypeNs, Option<ImportOrExternCrate>)> {
- let (def, _, import) = per_ns.take_types_full()?;
- let res = match def {
+ let def = per_ns.take_types_full()?;
+ let res = match def.def {
ModuleDefId::AdtId(it) => TypeNs::AdtId(it),
ModuleDefId::EnumVariantId(it) => TypeNs::EnumVariantId(it),
@@ -1003,7 +1003,7 @@ fn to_type_ns(per_ns: PerNs) -> Option<(TypeNs, Option<ImportOrExternCrate>)> {
| ModuleDefId::StaticId(_)
| ModuleDefId::ModuleId(_) => return None,
};
- Some((res, import))
+ Some((res, def.import))
}
#[derive(Default)]
@@ -1019,14 +1019,14 @@ impl ScopeNames {
}
}
fn add_per_ns(&mut self, name: &Name, def: PerNs) {
- if let &Some((ty, _, _)) = &def.types {
- self.add(name, ScopeDef::ModuleDef(ty))
+ if let Some(ty) = &def.types {
+ self.add(name, ScopeDef::ModuleDef(ty.def))
}
- if let &Some((def, _, _)) = &def.values {
- self.add(name, ScopeDef::ModuleDef(def))
+ if let Some(def) = &def.values {
+ self.add(name, ScopeDef::ModuleDef(def.def))
}
- if let &Some((mac, _, _)) = &def.macros {
- self.add(name, ScopeDef::ModuleDef(ModuleDefId::MacroId(mac)))
+ if let Some(mac) = &def.macros {
+ self.add(name, ScopeDef::ModuleDef(ModuleDefId::MacroId(mac.def)))
}
if def.is_none() {
self.add(name, ScopeDef::Unknown)
diff --git a/crates/hir-expand/src/db.rs b/crates/hir-expand/src/db.rs
index 0d19ae202c..fa400378f3 100644
--- a/crates/hir-expand/src/db.rs
+++ b/crates/hir-expand/src/db.rs
@@ -153,13 +153,13 @@ fn syntax_context(db: &dyn ExpandDatabase, file: HirFileId) -> SyntaxContextId {
/// This expands the given macro call, but with different arguments. This is
/// used for completion, where we want to see what 'would happen' if we insert a
/// token. The `token_to_map` mapped down into the expansion, with the mapped
-/// token returned.
+/// token(s) returned with their priority.
pub fn expand_speculative(
db: &dyn ExpandDatabase,
actual_macro_call: MacroCallId,
speculative_args: &SyntaxNode,
token_to_map: SyntaxToken,
-) -> Option<(SyntaxNode, SyntaxToken)> {
+) -> Option<(SyntaxNode, Vec<(SyntaxToken, u8)>)> {
let loc = db.lookup_intern_macro_call(actual_macro_call);
let (_, _, span) = db.macro_arg_considering_derives(actual_macro_call, &loc.kind);
@@ -303,17 +303,19 @@ pub fn expand_speculative(
token_tree_to_syntax_node(&speculative_expansion.value, expand_to, loc.def.edition);
let syntax_node = node.syntax_node();
- let (token, _) = rev_tmap
+ let token = rev_tmap
.ranges_with_span(span_map.span_for_range(token_to_map.text_range()))
.filter_map(|(range, ctx)| syntax_node.covering_element(range).into_token().zip(Some(ctx)))
- .min_by_key(|(t, ctx)| {
+ .map(|(t, ctx)| {
// prefer tokens of the same kind and text, as well as non opaque marked ones
// Note the inversion of the score here, as we want to prefer the first token in case
// of all tokens having the same score
- ctx.is_opaque(db) as u8
+ let ranking = ctx.is_opaque(db) as u8
+ 2 * (t.kind() != token_to_map.kind()) as u8
- + 4 * ((t.text() != token_to_map.text()) as u8)
- })?;
+ + 4 * ((t.text() != token_to_map.text()) as u8);
+ (t, ranking)
+ })
+ .collect();
Some((node.syntax_node(), token))
}
diff --git a/crates/hir-ty/src/chalk_db.rs b/crates/hir-ty/src/chalk_db.rs
index 53795c0b60..55d0edd5e0 100644
--- a/crates/hir-ty/src/chalk_db.rs
+++ b/crates/hir-ty/src/chalk_db.rs
@@ -22,7 +22,6 @@ use hir_def::{
use crate::{
db::{HirDatabase, InternedCoroutine},
- display::HirDisplay,
from_assoc_type_id, from_chalk_trait_id, from_foreign_def_id,
generics::generics,
make_binders, make_single_type_binders,
@@ -823,13 +822,12 @@ pub(crate) fn impl_datum_query(
let _p = tracing::info_span!("impl_datum_query").entered();
debug!("impl_datum {:?}", impl_id);
let impl_: hir_def::ImplId = from_chalk(db, impl_id);
- impl_def_datum(db, krate, impl_id, impl_)
+ impl_def_datum(db, krate, impl_)
}
fn impl_def_datum(
db: &dyn HirDatabase,
krate: CrateId,
- chalk_id: ImplId,
impl_id: hir_def::ImplId,
) -> Arc<ImplDatum> {
let trait_ref = db
@@ -850,13 +848,6 @@ fn impl_def_datum(
};
let where_clauses = convert_where_clauses(db, impl_id.into(), &bound_vars);
let negative = impl_data.is_negative;
- debug!(
- "impl {:?}: {}{} where {:?}",
- chalk_id,
- if negative { "!" } else { "" },
- trait_ref.display(db, db.crate_graph()[krate].edition),
- where_clauses
- );
let polarity = if negative { rust_ir::Polarity::Negative } else { rust_ir::Polarity::Positive };
diff --git a/crates/hir-ty/src/diagnostics/unsafe_check.rs b/crates/hir-ty/src/diagnostics/unsafe_check.rs
index 193aaa52c2..6bba83fac9 100644
--- a/crates/hir-ty/src/diagnostics/unsafe_check.rs
+++ b/crates/hir-ty/src/diagnostics/unsafe_check.rs
@@ -193,10 +193,19 @@ impl<'a> UnsafeVisitor<'a> {
self.resolver.reset_to_guard(guard);
}
Expr::Ref { expr, rawness: Rawness::RawPtr, mutability: _ } => {
- if let Expr::Path(_) = self.body.exprs[*expr] {
+ match self.body.exprs[*expr] {
// Do not report unsafe for `addr_of[_mut]!(EXTERN_OR_MUT_STATIC)`,
// see https://github.com/rust-lang/rust/pull/125834.
- return;
+ Expr::Path(_) => return,
+ // https://github.com/rust-lang/rust/pull/129248
+ // Taking a raw ref to a deref place expr is always safe.
+ Expr::UnaryOp { expr, op: UnaryOp::Deref } => {
+ self.body
+ .walk_child_exprs_without_pats(expr, |child| self.walk_expr(child));
+
+ return;
+ }
+ _ => (),
}
}
Expr::MethodCall { .. } => {
diff --git a/crates/hir/src/diagnostics.rs b/crates/hir/src/diagnostics.rs
index 612c6adb20..cbb1ed95ed 100644
--- a/crates/hir/src/diagnostics.rs
+++ b/crates/hir/src/diagnostics.rs
@@ -262,7 +262,7 @@ pub struct UnresolvedAssocItem {
#[derive(Debug)]
pub struct UnresolvedIdent {
- pub expr_or_pat: InFile<AstPtr<Either<ast::Expr, ast::Pat>>>,
+ pub node: InFile<(AstPtr<Either<ast::Expr, ast::Pat>>, Option<TextRange>)>,
}
#[derive(Debug)]
@@ -550,11 +550,10 @@ impl AnyDiagnostic {
source_map: &hir_def::body::BodySourceMap,
) -> Option<AnyDiagnostic> {
let expr_syntax = |expr| {
- source_map.expr_syntax(expr).inspect_err(|_| tracing::error!("synthetic syntax")).ok()
- };
- let pat_syntax = |pat| {
- source_map.pat_syntax(pat).inspect_err(|_| tracing::error!("synthetic syntax")).ok()
+ source_map.expr_syntax(expr).inspect_err(|_| stdx::never!("synthetic syntax")).ok()
};
+ let pat_syntax =
+ |pat| source_map.pat_syntax(pat).inspect_err(|_| stdx::never!("synthetic syntax")).ok();
let expr_or_pat_syntax = |id| match id {
ExprOrPatId::ExprId(expr) => expr_syntax(expr).map(|it| it.map(AstPtr::wrap_left)),
ExprOrPatId::PatId(pat) => pat_syntax(pat),
@@ -626,8 +625,16 @@ impl AnyDiagnostic {
UnresolvedAssocItem { expr_or_pat }.into()
}
&InferenceDiagnostic::UnresolvedIdent { id } => {
- let expr_or_pat = expr_or_pat_syntax(id)?;
- UnresolvedIdent { expr_or_pat }.into()
+ let node = match id {
+ ExprOrPatId::ExprId(id) => match source_map.expr_syntax(id) {
+ Ok(syntax) => syntax.map(|it| (it.wrap_left(), None)),
+ Err(SyntheticSyntax) => source_map
+ .format_args_implicit_capture(id)?
+ .map(|(node, range)| (node.wrap_left(), Some(range))),
+ },
+ ExprOrPatId::PatId(id) => pat_syntax(id)?.map(|it| (it, None)),
+ };
+ UnresolvedIdent { node }.into()
}
&InferenceDiagnostic::BreakOutsideOfLoop { expr, is_break, bad_value_break } => {
let expr = expr_syntax(expr)?;
diff --git a/crates/hir/src/lib.rs b/crates/hir/src/lib.rs
index 3bc2eee1e7..dfc91c7343 100644
--- a/crates/hir/src/lib.rs
+++ b/crates/hir/src/lib.rs
@@ -3105,10 +3105,10 @@ impl From<ModuleDef> for ItemInNs {
}
impl ItemInNs {
- pub fn as_module_def(self) -> Option<ModuleDef> {
+ pub fn into_module_def(self) -> ModuleDef {
match self {
- ItemInNs::Types(id) | ItemInNs::Values(id) => Some(id),
- ItemInNs::Macros(_) => None,
+ ItemInNs::Types(id) | ItemInNs::Values(id) => id,
+ ItemInNs::Macros(id) => ModuleDef::Macro(id),
}
}
diff --git a/crates/hir/src/semantics.rs b/crates/hir/src/semantics.rs
index b896cda9dd..1cf22b05e7 100644
--- a/crates/hir/src/semantics.rs
+++ b/crates/hir/src/semantics.rs
@@ -38,9 +38,9 @@ use span::{AstIdMap, EditionedFileId, FileId, HirFileIdRepr, SyntaxContextId};
use stdx::TupleExt;
use syntax::{
algo::skip_trivia_token,
- ast::{self, HasAttrs as _, HasGenericParams, IsString as _},
- AstNode, AstToken, Direction, SyntaxKind, SyntaxNode, SyntaxNodePtr, SyntaxToken, TextRange,
- TextSize,
+ ast::{self, HasAttrs as _, HasGenericParams},
+ AstNode, AstToken, Direction, SmolStr, SyntaxKind, SyntaxNode, SyntaxNodePtr, SyntaxToken,
+ TextRange, TextSize,
};
use triomphe::Arc;
@@ -571,7 +571,7 @@ impl<'db> SemanticsImpl<'db> {
actual_macro_call: &ast::MacroCall,
speculative_args: &ast::TokenTree,
token_to_map: SyntaxToken,
- ) -> Option<(SyntaxNode, SyntaxToken)> {
+ ) -> Option<(SyntaxNode, Vec<(SyntaxToken, u8)>)> {
let SourceAnalyzer { file_id, resolver, .. } =
self.analyze_no_infer(actual_macro_call.syntax())?;
let macro_call = InFile::new(file_id, actual_macro_call);
@@ -592,7 +592,7 @@ impl<'db> SemanticsImpl<'db> {
macro_file: MacroFileId,
speculative_args: &SyntaxNode,
token_to_map: SyntaxToken,
- ) -> Option<(SyntaxNode, SyntaxToken)> {
+ ) -> Option<(SyntaxNode, Vec<(SyntaxToken, u8)>)> {
hir_expand::db::expand_speculative(
self.db.upcast(),
macro_file.macro_call_id,
@@ -608,7 +608,7 @@ impl<'db> SemanticsImpl<'db> {
actual_macro_call: &ast::Item,
speculative_args: &ast::Item,
token_to_map: SyntaxToken,
- ) -> Option<(SyntaxNode, SyntaxToken)> {
+ ) -> Option<(SyntaxNode, Vec<(SyntaxToken, u8)>)> {
let macro_call = self.wrap_node_infile(actual_macro_call.clone());
let macro_call_id = self.with_ctx(|ctx| ctx.item_to_macro_call(macro_call.as_ref()))?;
hir_expand::db::expand_speculative(
@@ -624,7 +624,7 @@ impl<'db> SemanticsImpl<'db> {
actual_macro_call: &ast::Attr,
speculative_args: &ast::Attr,
token_to_map: SyntaxToken,
- ) -> Option<(SyntaxNode, SyntaxToken)> {
+ ) -> Option<(SyntaxNode, Vec<(SyntaxToken, u8)>)> {
let attr = self.wrap_node_infile(actual_macro_call.clone());
let adt = actual_macro_call.syntax().parent().and_then(ast::Adt::cast)?;
let macro_call_id = self.with_ctx(|ctx| {
@@ -643,8 +643,7 @@ impl<'db> SemanticsImpl<'db> {
&self,
string: &ast::String,
) -> Option<Vec<(TextRange, Option<Either<PathResolution, InlineAsmOperand>>)>> {
- let quote = string.open_quote_text_range()?;
-
+ let string_start = string.syntax().text_range().start();
let token = self.wrap_token_infile(string.syntax().clone()).into_real_file().ok()?;
self.descend_into_macros_breakable(token, |token, _| {
(|| {
@@ -658,7 +657,7 @@ impl<'db> SemanticsImpl<'db> {
let format_args = self.wrap_node_infile(format_args);
let res = source_analyzer
.as_format_args_parts(self.db, format_args.as_ref())?
- .map(|(range, res)| (range + quote.end(), res.map(Either::Left)))
+ .map(|(range, res)| (range + string_start, res.map(Either::Left)))
.collect();
Some(res)
} else {
@@ -672,7 +671,7 @@ impl<'db> SemanticsImpl<'db> {
.iter()
.map(|&(range, index)| {
(
- range + quote.end(),
+ range + string_start,
Some(Either::Right(InlineAsmOperand { owner, expr, index })),
)
})
@@ -690,17 +689,16 @@ impl<'db> SemanticsImpl<'db> {
original_token: SyntaxToken,
offset: TextSize,
) -> Option<(TextRange, Option<Either<PathResolution, InlineAsmOperand>>)> {
- let original_string = ast::String::cast(original_token.clone())?;
+ let string_start = original_token.text_range().start();
let original_token = self.wrap_token_infile(original_token).into_real_file().ok()?;
- let quote = original_string.open_quote_text_range()?;
self.descend_into_macros_breakable(original_token, |token, _| {
(|| {
let token = token.value;
self.resolve_offset_in_format_args(
ast::String::cast(token)?,
- offset.checked_sub(quote.end())?,
+ offset.checked_sub(string_start)?,
)
- .map(|(range, res)| (range + quote.end(), res))
+ .map(|(range, res)| (range + string_start, res))
})()
.map_or(ControlFlow::Continue(()), ControlFlow::Break)
})
@@ -1542,6 +1540,21 @@ impl<'db> SemanticsImpl<'db> {
Some(items.iter_items().map(|(item, _)| item.into()))
}
+ pub fn resolve_mod_path_relative(
+ &self,
+ to: Module,
+ segments: impl IntoIterator<Item = SmolStr>,
+ ) -> Option<impl Iterator<Item = ItemInNs>> {
+ let items = to.id.resolver(self.db.upcast()).resolve_module_path_in_items(
+ self.db.upcast(),
+ &ModPath::from_segments(
+ hir_def::path::PathKind::Plain,
+ segments.into_iter().map(|it| Name::new(&it, SyntaxContextId::ROOT)),
+ ),
+ );
+ Some(items.iter_items().map(|(item, _)| item.into()))
+ }
+
fn resolve_variant(&self, record_lit: ast::RecordExpr) -> Option<VariantId> {
self.analyze(record_lit.syntax())?.resolve_variant(self.db, record_lit)
}
diff --git a/crates/ide-assists/src/assist_context.rs b/crates/ide-assists/src/assist_context.rs
index 0146369f29..074d943719 100644
--- a/crates/ide-assists/src/assist_context.rs
+++ b/crates/ide-assists/src/assist_context.rs
@@ -3,6 +3,7 @@
use hir::{FileRange, Semantics};
use ide_db::EditionedFileId;
use ide_db::{label::Label, FileId, RootDatabase};
+use syntax::Edition;
use syntax::{
algo::{self, find_node_at_offset, find_node_at_range},
AstNode, AstToken, Direction, SourceFile, SyntaxElement, SyntaxKind, SyntaxToken, TextRange,
@@ -94,6 +95,10 @@ impl<'a> AssistContext<'a> {
self.frange.file_id
}
+ pub(crate) fn edition(&self) -> Edition {
+ self.frange.file_id.edition()
+ }
+
pub(crate) fn has_empty_selection(&self) -> bool {
self.trimmed_range.is_empty()
}
diff --git a/crates/ide-assists/src/handlers/destructure_tuple_binding.rs b/crates/ide-assists/src/handlers/destructure_tuple_binding.rs
index 3f0d5cf152..b9142d0318 100644
--- a/crates/ide-assists/src/handlers/destructure_tuple_binding.rs
+++ b/crates/ide-assists/src/handlers/destructure_tuple_binding.rs
@@ -1,10 +1,12 @@
-use ide_db::text_edit::TextRange;
use ide_db::{
assists::{AssistId, AssistKind},
defs::Definition,
- search::{FileReference, SearchScope, UsageSearchResult},
+ search::{FileReference, SearchScope},
+ syntax_helpers::suggest_name,
+ text_edit::TextRange,
};
use itertools::Itertools;
+use syntax::SmolStr;
use syntax::{
ast::{self, make, AstNode, FieldExpr, HasName, IdentPat},
ted,
@@ -122,33 +124,43 @@ fn collect_data(ident_pat: IdentPat, ctx: &AssistContext<'_>) -> Option<TupleDat
return None;
}
- let name = ident_pat.name()?.to_string();
-
- let usages = ctx.sema.to_def(&ident_pat).map(|def| {
+ let usages = ctx.sema.to_def(&ident_pat).and_then(|def| {
Definition::Local(def)
.usages(&ctx.sema)
.in_scope(&SearchScope::single_file(ctx.file_id()))
.all()
+ .iter()
+ .next()
+ .map(|(_, refs)| refs.to_vec())
});
- let field_names = (0..field_types.len())
- .map(|i| generate_name(ctx, i, &name, &ident_pat, &usages))
+ let mut name_generator = {
+ let mut names = vec![];
+ if let Some(scope) = ctx.sema.scope(ident_pat.syntax()) {
+ scope.process_all_names(&mut |name, scope| {
+ if let hir::ScopeDef::Local(_) = scope {
+ names.push(name.as_str().into())
+ }
+ })
+ }
+ suggest_name::NameGenerator::new_with_names(names.iter().map(|s: &SmolStr| s.as_str()))
+ };
+
+ let field_names = field_types
+ .into_iter()
+ .enumerate()
+ .map(|(id, ty)| {
+ match name_generator.for_type(&ty, ctx.db(), ctx.edition()) {
+ Some(name) => name,
+ None => name_generator.suggest_name(&format!("_{}", id)),
+ }
+ .to_string()
+ })
.collect::<Vec<_>>();
Some(TupleData { ident_pat, ref_type, field_names, usages })
}
-fn generate_name(
- _ctx: &AssistContext<'_>,
- index: usize,
- _tuple_name: &str,
- _ident_pat: &IdentPat,
- _usages: &Option<UsageSearchResult>,
-) -> String {
- // FIXME: detect if name already used
- format!("_{index}")
-}
-
enum RefType {
ReadOnly,
Mutable,
@@ -157,7 +169,7 @@ struct TupleData {
ident_pat: IdentPat,
ref_type: Option<RefType>,
field_names: Vec<String>,
- usages: Option<UsageSearchResult>,
+ usages: Option<Vec<FileReference>>,
}
fn edit_tuple_assignment(
ctx: &AssistContext<'_>,
@@ -213,42 +225,23 @@ fn edit_tuple_usages(
ctx: &AssistContext<'_>,
in_sub_pattern: bool,
) -> Option<Vec<EditTupleUsage>> {
- let mut current_file_usages = None;
-
- if let Some(usages) = data.usages.as_ref() {
- // We need to collect edits first before actually applying them
- // as mapping nodes to their mutable node versions requires an
- // unmodified syntax tree.
- //
- // We also defer editing usages in the current file first since
- // tree mutation in the same file breaks when `builder.edit_file`
- // is called
-
- if let Some((_, refs)) = usages.iter().find(|(file_id, _)| *file_id == ctx.file_id()) {
- current_file_usages = Some(
- refs.iter()
- .filter_map(|r| edit_tuple_usage(ctx, edit, r, data, in_sub_pattern))
- .collect_vec(),
- );
- }
-
- for (file_id, refs) in usages.iter() {
- if file_id == ctx.file_id() {
- continue;
- }
-
- edit.edit_file(file_id.file_id());
-
- let tuple_edits = refs
- .iter()
- .filter_map(|r| edit_tuple_usage(ctx, edit, r, data, in_sub_pattern))
- .collect_vec();
-
- tuple_edits.into_iter().for_each(|tuple_edit| tuple_edit.apply(edit))
- }
- }
-
- current_file_usages
+ // We need to collect edits first before actually applying them
+ // as mapping nodes to their mutable node versions requires an
+ // unmodified syntax tree.
+ //
+ // We also defer editing usages in the current file first since
+ // tree mutation in the same file breaks when `builder.edit_file`
+ // is called
+
+ let edits = data
+ .usages
+ .as_ref()?
+ .as_slice()
+ .iter()
+ .filter_map(|r| edit_tuple_usage(ctx, edit, r, data, in_sub_pattern))
+ .collect_vec();
+
+ Some(edits)
}
fn edit_tuple_usage(
ctx: &AssistContext<'_>,
@@ -1769,14 +1762,14 @@ struct S4 {
}
fn foo() -> Option<()> {
- let ($0_0, _1, _2, _3, _4, _5) = &(0, (1,"1"), Some(2), [3;3], S4 { value: 4 }, &5);
+ let ($0_0, _1, _2, _3, s4, _5) = &(0, (1,"1"), Some(2), [3;3], S4 { value: 4 }, &5);
let v: i32 = *_0; // deref, no parens
let v: &i32 = _0; // no deref, no parens, remove `&`
f1(*_0); // deref, no parens
f2(_0); // `&*` -> cancel out -> no deref, no parens
// https://github.com/rust-lang/rust-analyzer/issues/1109#issuecomment-658868639
// let v: i32 = t.1.0; // no deref, no parens
- let v: i32 = _4.value; // no deref, no parens
+ let v: i32 = s4.value; // no deref, no parens
(*_0).do_stuff(); // deref, parens
let v: i32 = (*_2)?; // deref, parens
let v: i32 = _3[0]; // no deref, no parens
@@ -1815,8 +1808,8 @@ impl S {
}
fn main() {
- let ($0_0, _1) = &(S,2);
- let s = _0.f();
+ let ($0s, _1) = &(S,2);
+ let s = s.f();
}
"#,
)
@@ -1845,8 +1838,8 @@ impl S {
}
fn main() {
- let ($0_0, _1) = &(S,2);
- let s = (*_0).f();
+ let ($0s, _1) = &(S,2);
+ let s = (*s).f();
}
"#,
)
@@ -1882,8 +1875,8 @@ impl T for &S {
}
fn main() {
- let ($0_0, _1) = &(S,2);
- let s = (*_0).f();
+ let ($0s, _1) = &(S,2);
+ let s = (*s).f();
}
"#,
)
@@ -1923,8 +1916,8 @@ impl T for &S {
}
fn main() {
- let ($0_0, _1) = &(S,2);
- let s = (*_0).f();
+ let ($0s, _1) = &(S,2);
+ let s = (*s).f();
}
"#,
)
@@ -1951,8 +1944,8 @@ impl S {
fn do_stuff(&self) -> i32 { 42 }
}
fn main() {
- let ($0_0, _1) = &(S,&S);
- let v = _0.do_stuff();
+ let ($0s, s1) = &(S,&S);
+ let v = s.do_stuff();
}
"#,
)
@@ -1973,7 +1966,7 @@ fn main() {
// `t.0` gets auto-refed -> no deref needed -> no parens
let v = t.0.do_stuff(); // no deref, no parens
let v = &t.0.do_stuff(); // `&` is for result -> no deref, no parens
- // deref: `_1` is `&&S`, but method called is on `&S` -> there might be a method accepting `&&S`
+ // deref: `s1` is `&&S`, but method called is on `&S` -> there might be a method accepting `&&S`
let v = t.1.do_stuff(); // deref, parens
}
"#,
@@ -1984,13 +1977,13 @@ impl S {
fn do_stuff(&self) -> i32 { 42 }
}
fn main() {
- let ($0_0, _1) = &(S,&S);
- let v = _0.do_stuff(); // no deref, remove parens
+ let ($0s, s1) = &(S,&S);
+ let v = s.do_stuff(); // no deref, remove parens
// `t.0` gets auto-refed -> no deref needed -> no parens
- let v = _0.do_stuff(); // no deref, no parens
- let v = &_0.do_stuff(); // `&` is for result -> no deref, no parens
- // deref: `_1` is `&&S`, but method called is on `&S` -> there might be a method accepting `&&S`
- let v = (*_1).do_stuff(); // deref, parens
+ let v = s.do_stuff(); // no deref, no parens
+ let v = &s.do_stuff(); // `&` is for result -> no deref, no parens
+ // deref: `s1` is `&&S`, but method called is on `&S` -> there might be a method accepting `&&S`
+ let v = (*s1).do_stuff(); // deref, parens
}
"#,
)
diff --git a/crates/ide-assists/src/handlers/extract_variable.rs b/crates/ide-assists/src/handlers/extract_variable.rs
index a8d71ed7f4..6735d7dcbe 100644
--- a/crates/ide-assists/src/handlers/extract_variable.rs
+++ b/crates/ide-assists/src/handlers/extract_variable.rs
@@ -1,5 +1,8 @@
use hir::{HirDisplay, TypeInfo};
-use ide_db::{assists::GroupLabel, syntax_helpers::suggest_name};
+use ide_db::{
+ assists::GroupLabel,
+ syntax_helpers::{suggest_name, LexedStr},
+};
use syntax::{
ast::{
self, edit::IndentLevel, edit_in_place::Indent, make, syntax_factory::SyntaxFactory,
@@ -320,24 +323,58 @@ impl ExtractionKind {
ctx: &AssistContext<'_>,
to_extract: &ast::Expr,
) -> (String, SyntaxNode) {
- let field_shorthand = to_extract
- .syntax()
- .parent()
- .and_then(ast::RecordExprField::cast)
- .filter(|field| field.name_ref().is_some());
- let (var_name, expr_replace) = match field_shorthand {
- Some(field) => (field.to_string(), field.syntax().clone()),
- None => {
- (suggest_name::for_variable(to_extract, &ctx.sema), to_extract.syntax().clone())
+ // We only do this sort of extraction for fields because they should have lowercase names
+ if let ExtractionKind::Variable = self {
+ let field_shorthand = to_extract
+ .syntax()
+ .parent()
+ .and_then(ast::RecordExprField::cast)
+ .filter(|field| field.name_ref().is_some());
+
+ if let Some(field) = field_shorthand {
+ return (field.to_string(), field.syntax().clone());
}
+ }
+
+ let var_name = if let Some(literal_name) = get_literal_name(ctx, to_extract) {
+ literal_name
+ } else {
+ suggest_name::for_variable(to_extract, &ctx.sema)
};
let var_name = match self {
- ExtractionKind::Variable => var_name,
+ ExtractionKind::Variable => var_name.to_lowercase(),
ExtractionKind::Constant | ExtractionKind::Static => var_name.to_uppercase(),
};
- (var_name, expr_replace)
+ (var_name, to_extract.syntax().clone())
+ }
+}
+
+fn get_literal_name(ctx: &AssistContext<'_>, expr: &ast::Expr) -> Option<String> {
+ let literal = match expr {
+ ast::Expr::Literal(literal) => literal,
+ _ => return None,
+ };
+ let inner = match literal.kind() {
+ ast::LiteralKind::String(string) => string.value().ok()?.into_owned(),
+ ast::LiteralKind::ByteString(byte_string) => {
+ String::from_utf8(byte_string.value().ok()?.into_owned()).ok()?
+ }
+ ast::LiteralKind::CString(cstring) => {
+ String::from_utf8(cstring.value().ok()?.into_owned()).ok()?
+ }
+ _ => return None,
+ };
+
+ // Entirely arbitrary
+ if inner.len() > 32 {
+ return None;
+ }
+
+ match LexedStr::single_token(ctx.file_id().edition(), &inner) {
+ Some((SyntaxKind::IDENT, None)) => Some(inner),
+ _ => None,
}
}
@@ -493,7 +530,7 @@ fn main() {
"#,
r#"
fn main() {
- let $0var_name = "hello";
+ let $0hello = "hello";
}
"#,
"Extract into variable",
@@ -588,7 +625,7 @@ fn main() {
"#,
r#"
fn main() {
- const $0VAR_NAME: &str = "hello";
+ const $0HELLO: &str = "hello";
}
"#,
"Extract into constant",
@@ -683,7 +720,7 @@ fn main() {
"#,
r#"
fn main() {
- static $0VAR_NAME: &str = "hello";
+ static $0HELLO: &str = "hello";
}
"#,
"Extract into static",
@@ -2479,4 +2516,120 @@ fn foo() {
"Extract into variable",
);
}
+
+ #[test]
+ fn extract_string_literal() {
+ check_assist_by_label(
+ extract_variable,
+ r#"
+struct Entry(&str);
+fn foo() {
+ let entry = Entry($0"Hello"$0);
+}
+"#,
+ r#"
+struct Entry(&str);
+fn foo() {
+ let $0hello = "Hello";
+ let entry = Entry(hello);
+}
+"#,
+ "Extract into variable",
+ );
+
+ check_assist_by_label(
+ extract_variable,
+ r#"
+struct Entry(&str);
+fn foo() {
+ let entry = Entry($0"Hello"$0);
+}
+"#,
+ r#"
+struct Entry(&str);
+fn foo() {
+ const $0HELLO: &str = "Hello";
+ let entry = Entry(HELLO);
+}
+"#,
+ "Extract into constant",
+ );
+
+ check_assist_by_label(
+ extract_variable,
+ r#"
+struct Entry(&str);
+fn foo() {
+ let entry = Entry($0"Hello"$0);
+}
+"#,
+ r#"
+struct Entry(&str);
+fn foo() {
+ static $0HELLO: &str = "Hello";
+ let entry = Entry(HELLO);
+}
+"#,
+ "Extract into static",
+ );
+ }
+
+ #[test]
+ fn extract_variable_string_literal_use_field_shorthand() {
+ // When field shorthand is available, it should
+ // only be used when extracting into a variable
+ check_assist_by_label(
+ extract_variable,
+ r#"
+struct Entry { message: &str }
+fn foo() {
+ let entry = Entry { message: $0"Hello"$0 };
+}
+"#,
+ r#"
+struct Entry { message: &str }
+fn foo() {
+ let $0message = "Hello";
+ let entry = Entry { message };
+}
+"#,
+ "Extract into variable",
+ );
+
+ check_assist_by_label(
+ extract_variable,
+ r#"
+struct Entry { message: &str }
+fn foo() {
+ let entry = Entry { message: $0"Hello"$0 };
+}
+"#,
+ r#"
+struct Entry { message: &str }
+fn foo() {
+ const $0HELLO: &str = "Hello";
+ let entry = Entry { message: HELLO };
+}
+"#,
+ "Extract into constant",
+ );
+
+ check_assist_by_label(
+ extract_variable,
+ r#"
+struct Entry { message: &str }
+fn foo() {
+ let entry = Entry { message: $0"Hello"$0 };
+}
+"#,
+ r#"
+struct Entry { message: &str }
+fn foo() {
+ static $0HELLO: &str = "Hello";
+ let entry = Entry { message: HELLO };
+}
+"#,
+ "Extract into static",
+ );
+ }
}
diff --git a/crates/ide-assists/src/handlers/qualify_method_call.rs b/crates/ide-assists/src/handlers/qualify_method_call.rs
index 14518c4d2c..c3600af5a6 100644
--- a/crates/ide-assists/src/handlers/qualify_method_call.rs
+++ b/crates/ide-assists/src/handlers/qualify_method_call.rs
@@ -86,7 +86,7 @@ fn item_for_path_search(db: &dyn HirDatabase, item: ItemInNs) -> Option<ItemInNs
}
fn item_as_assoc(db: &dyn HirDatabase, item: ItemInNs) -> Option<AssocItem> {
- item.as_module_def().and_then(|module_def| module_def.as_assoc_item(db))
+ item.into_module_def().as_assoc_item(db)
}
#[cfg(test)]
diff --git a/crates/ide-assists/src/handlers/qualify_path.rs b/crates/ide-assists/src/handlers/qualify_path.rs
index ac88861fe4..849b8a42c6 100644
--- a/crates/ide-assists/src/handlers/qualify_path.rs
+++ b/crates/ide-assists/src/handlers/qualify_path.rs
@@ -51,7 +51,7 @@ pub(crate) fn qualify_path(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option
let candidate = import_assets.import_candidate();
let qualify_candidate = match syntax_under_caret.clone() {
NodeOrToken::Node(syntax_under_caret) => match candidate {
- ImportCandidate::Path(candidate) if candidate.qualifier.is_some() => {
+ ImportCandidate::Path(candidate) if !candidate.qualifier.is_empty() => {
cov_mark::hit!(qualify_path_qualifier_start);
let path = ast::Path::cast(syntax_under_caret)?;
let (prev_segment, segment) = (path.qualifier()?.segment()?, path.segment()?);
@@ -219,11 +219,9 @@ fn find_trait_method(
}
fn item_as_trait(db: &RootDatabase, item: hir::ItemInNs) -> Option<hir::Trait> {
- let item_module_def = item.as_module_def()?;
-
- match item_module_def {
+ match item.into_module_def() {
hir::ModuleDef::Trait(trait_) => Some(trait_),
- _ => item_module_def.as_assoc_item(db)?.container_trait(db),
+ item_module_def => item_module_def.as_assoc_item(db)?.container_trait(db),
}
}
@@ -247,7 +245,7 @@ fn label(
let import_path = &import.import_path;
match candidate {
- ImportCandidate::Path(candidate) if candidate.qualifier.is_none() => {
+ ImportCandidate::Path(candidate) if candidate.qualifier.is_empty() => {
format!("Qualify as `{}`", import_path.display(db, edition))
}
_ => format!("Qualify with `{}`", import_path.display(db, edition)),
diff --git a/crates/ide-assists/src/handlers/replace_derive_with_manual_impl.rs b/crates/ide-assists/src/handlers/replace_derive_with_manual_impl.rs
index 2dec876215..31e828eae2 100644
--- a/crates/ide-assists/src/handlers/replace_derive_with_manual_impl.rs
+++ b/crates/ide-assists/src/handlers/replace_derive_with_manual_impl.rs
@@ -78,7 +78,7 @@ pub(crate) fn replace_derive_with_manual_impl(
NameToImport::exact_case_sensitive(path.segments().last()?.to_string()),
items_locator::AssocSearchMode::Exclude,
)
- .filter_map(|item| match item.as_module_def()? {
+ .filter_map(|item| match item.into_module_def() {
ModuleDef::Trait(trait_) => Some(trait_),
_ => None,
})
diff --git a/crates/ide-assists/src/handlers/unnecessary_async.rs b/crates/ide-assists/src/handlers/unnecessary_async.rs
index a83b27867b..abe7fb132f 100644
--- a/crates/ide-assists/src/handlers/unnecessary_async.rs
+++ b/crates/ide-assists/src/handlers/unnecessary_async.rs
@@ -12,13 +12,15 @@ use syntax::{
use crate::{AssistContext, Assists};
+// FIXME: This ought to be a diagnostic lint.
+
// Assist: unnecessary_async
//
// Removes the `async` mark from functions which have no `.await` in their body.
// Looks for calls to the functions and removes the `.await` on the call site.
//
// ```
-// pub async f$0n foo() {}
+// pub asy$0nc fn foo() {}
// pub async fn bar() { foo().await }
// ```
// ->
@@ -29,15 +31,11 @@ use crate::{AssistContext, Assists};
pub(crate) fn unnecessary_async(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
let function: ast::Fn = ctx.find_node_at_offset()?;
- // Do nothing if the cursor is not on the prototype. This is so that the check does not pollute
- // when the user asks us for assists when in the middle of the function body.
- // We consider the prototype to be anything that is before the body of the function.
- let cursor_position = ctx.offset();
- if cursor_position >= function.body()?.syntax().text_range().start() {
+ // Do nothing if the cursor isn't on the async token.
+ let async_token = function.async_token()?;
+ if !async_token.text_range().contains_inclusive(ctx.offset()) {
return None;
}
- // Do nothing if the function isn't async.
- function.async_token()?;
// Do nothing if the function has an `await` expression in its body.
if function.body()?.syntax().descendants().find_map(ast::AwaitExpr::cast).is_some() {
return None;
@@ -138,27 +136,22 @@ mod tests {
#[test]
fn applies_on_empty_function() {
- check_assist(unnecessary_async, "pub async f$0n f() {}", "pub fn f() {}")
+ check_assist(unnecessary_async, "pub asy$0nc fn f() {}", "pub fn f() {}")
}
#[test]
fn applies_and_removes_whitespace() {
- check_assist(unnecessary_async, "pub async f$0n f() {}", "pub fn f() {}")
- }
-
- #[test]
- fn does_not_apply_on_non_async_function() {
- check_assist_not_applicable(unnecessary_async, "pub f$0n f() {}")
+ check_assist(unnecessary_async, "pub async$0 fn f() {}", "pub fn f() {}")
}
#[test]
fn applies_on_function_with_a_non_await_expr() {
- check_assist(unnecessary_async, "pub async f$0n f() { f2() }", "pub fn f() { f2() }")
+ check_assist(unnecessary_async, "pub asy$0nc fn f() { f2() }", "pub fn f() { f2() }")
}
#[test]
fn does_not_apply_on_function_with_an_await_expr() {
- check_assist_not_applicable(unnecessary_async, "pub async f$0n f() { f2().await }")
+ check_assist_not_applicable(unnecessary_async, "pub asy$0nc fn f() { f2().await }")
}
#[test]
@@ -167,7 +160,7 @@ mod tests {
unnecessary_async,
r#"
pub async fn f4() { }
-pub async f$0n f2() { }
+pub asy$0nc fn f2() { }
pub async fn f() { f2().await }
pub async fn f3() { f2().await }"#,
r#"
@@ -184,7 +177,7 @@ pub async fn f3() { f2() }"#,
unnecessary_async,
r#"
pub async fn f4() { }
-mod a { pub async f$0n f2() { } }
+mod a { pub asy$0nc fn f2() { } }
pub async fn f() { a::f2().await }
pub async fn f3() { a::f2().await }"#,
r#"
@@ -202,7 +195,7 @@ pub async fn f3() { a::f2() }"#,
// Ensure that it is the first await on the 3rd line that is removed
r#"
pub async fn f() { f2().await }
-pub async f$0n f2() -> i32 { 1 }
+pub asy$0nc fn f2() -> i32 { 1 }
pub async fn f3() { f4(f2().await).await }
pub async fn f4(i: i32) { }"#,
r#"
@@ -220,7 +213,7 @@ pub async fn f4(i: i32) { }"#,
// Ensure that it is the second await on the 3rd line that is removed
r#"
pub async fn f() { f2().await }
-pub async f$0n f2(i: i32) { }
+pub async$0 fn f2(i: i32) { }
pub async fn f3() { f2(f4().await).await }
pub async fn f4() -> i32 { 1 }"#,
r#"
@@ -237,7 +230,7 @@ pub async fn f4() -> i32 { 1 }"#,
unnecessary_async,
r#"
pub struct S { }
-impl S { pub async f$0n f2(&self) { } }
+impl S { pub async$0 fn f2(&self) { } }
pub async fn f(s: &S) { s.f2().await }"#,
r#"
pub struct S { }
@@ -250,13 +243,13 @@ pub async fn f(s: &S) { s.f2() }"#,
fn does_not_apply_on_function_with_a_nested_await_expr() {
check_assist_not_applicable(
unnecessary_async,
- "async f$0n f() { if true { loop { f2().await } } }",
+ "async$0 fn f() { if true { loop { f2().await } } }",
)
}
#[test]
- fn does_not_apply_when_not_on_prototype() {
- check_assist_not_applicable(unnecessary_async, "pub async fn f() { $0f2() }")
+ fn does_not_apply_when_not_on_async_token() {
+ check_assist_not_applicable(unnecessary_async, "pub async fn$0 f() { f2() }")
}
#[test]
diff --git a/crates/ide-assists/src/handlers/wrap_return_type.rs b/crates/ide-assists/src/handlers/wrap_return_type.rs
index 2d918a5b1c..658600cd2d 100644
--- a/crates/ide-assists/src/handlers/wrap_return_type.rs
+++ b/crates/ide-assists/src/handlers/wrap_return_type.rs
@@ -198,7 +198,7 @@ fn wrapper_alias(
);
ctx.sema.resolve_mod_path(ret_type.syntax(), &wrapper_path).and_then(|def| {
- def.filter_map(|def| match def.as_module_def()? {
+ def.filter_map(|def| match def.into_module_def() {
hir::ModuleDef::TypeAlias(alias) => {
let enum_ty = alias.ty(ctx.db()).as_adt()?.as_enum()?;
(&enum_ty == core_wrapper).then_some(alias)
diff --git a/crates/ide-assists/src/tests/generated.rs b/crates/ide-assists/src/tests/generated.rs
index 87c3d166ee..78fdfba6a0 100644
--- a/crates/ide-assists/src/tests/generated.rs
+++ b/crates/ide-assists/src/tests/generated.rs
@@ -3280,7 +3280,7 @@ fn doctest_unnecessary_async() {
check_doc_test(
"unnecessary_async",
r#####"
-pub async f$0n foo() {}
+pub asy$0nc fn foo() {}
pub async fn bar() { foo().await }
"#####,
r#####"
diff --git a/crates/ide-completion/src/completions/dot.rs b/crates/ide-completion/src/completions/dot.rs
index f2c360a9d5..229ce7723b 100644
--- a/crates/ide-completion/src/completions/dot.rs
+++ b/crates/ide-completion/src/completions/dot.rs
@@ -205,7 +205,7 @@ impl S {
fn foo(s: S) { s.$0 }
"#,
expect![[r#"
- fd foo u32
+ fd foo u32
me bar() fn(&self)
"#]],
);
@@ -259,7 +259,7 @@ impl S {
"#,
expect![[r#"
fd the_field (u32,)
- me foo() fn(self)
+ me foo() fn(self)
"#]],
)
}
@@ -275,7 +275,7 @@ impl A {
"#,
expect![[r#"
fd the_field (u32, i32)
- me foo() fn(&self)
+ me foo() fn(&self)
"#]],
)
}
@@ -536,7 +536,7 @@ impl A {
}
"#,
expect![[r#"
- fd pub_field u32
+ fd pub_field u32
me pub_method() fn(&self)
"#]],
)
@@ -550,7 +550,7 @@ union U { field: u8, other: u16 }
fn foo(u: U) { u.$0 }
"#,
expect![[r#"
- fd field u8
+ fd field u8
fd other u16
"#]],
);
@@ -725,8 +725,8 @@ fn test(a: A) {
}
"#,
expect![[r#"
- fd another u32
- fd field u8
+ fd another u32
+ fd field u8
me deref() (use core::ops::Deref) fn(&self) -> &<Self as Deref>::Target
"#]],
);
@@ -748,8 +748,8 @@ fn test(a: A) {
}
"#,
expect![[r#"
- fd 0 u8
- fd 1 u32
+ fd 0 u8
+ fd 1 u32
me deref() (use core::ops::Deref) fn(&self) -> &<Self as Deref>::Target
"#]],
);
@@ -770,8 +770,8 @@ fn test(a: A) {
}
"#,
expect![[r#"
- fd 0 u8
- fd 1 u32
+ fd 0 u8
+ fd 1 u32
me deref() (use core::ops::Deref) fn(&self) -> &<Self as Deref>::Target
"#]],
);
@@ -964,12 +964,12 @@ struct Foo { field: i32 }
impl Foo { fn foo(&self) { $0 } }"#,
expect![[r#"
- fd self.field i32
+ fd self.field i32
me self.foo() fn(&self)
- lc self &Foo
- sp Self Foo
- st Foo Foo
- bt u32 u32
+ lc self &Foo
+ sp Self Foo
+ st Foo Foo
+ bt u32 u32
"#]],
);
check(
@@ -978,12 +978,12 @@ struct Foo(i32);
impl Foo { fn foo(&mut self) { $0 } }"#,
expect![[r#"
- fd self.0 i32
+ fd self.0 i32
me self.foo() fn(&mut self)
- lc self &mut Foo
- sp Self Foo
- st Foo Foo
- bt u32 u32
+ lc self &mut Foo
+ sp Self Foo
+ st Foo Foo
+ bt u32 u32
"#]],
);
}
@@ -1106,7 +1106,7 @@ fn test(a: A) {
}
"#,
expect![[r#"
- fd 0 u8
+ fd 0 u8
me deref() (use core::ops::Deref) fn(&self) -> &<Self as Deref>::Target
"#]],
);
@@ -1162,7 +1162,7 @@ impl<F: core::ops::Deref<Target = impl Bar>> Foo<F> {
}
"#,
expect![[r#"
- fd foo &u8
+ fd foo &u8
me foobar() fn(&self)
"#]],
);
@@ -1199,8 +1199,8 @@ impl<B: Bar, F: core::ops::Deref<Target = B>> Foo<F> {
}
"#,
expect![[r#"
- fd foo &u8
- "#]],
+ fd foo &u8
+ "#]],
);
}
diff --git a/crates/ide-completion/src/completions/item_list/trait_impl.rs b/crates/ide-completion/src/completions/item_list/trait_impl.rs
index c38a8ef29b..80d72b460f 100644
--- a/crates/ide-completion/src/completions/item_list/trait_impl.rs
+++ b/crates/ide-completion/src/completions/item_list/trait_impl.rs
@@ -537,10 +537,10 @@ impl Test for T {
}
",
expect![[r#"
- sp Self T
- st T T
+ sp Self T
+ st T T
tt Test
- bt u32 u32
+ bt u32 u32
"#]],
);
@@ -646,10 +646,10 @@ impl Test for T {
}
",
expect![[r#"
- sp Self T
- st T T
+ sp Self T
+ st T T
tt Test
- bt u32 u32
+ bt u32 u32
"#]],
);
@@ -663,10 +663,10 @@ impl Test for T {
}
",
expect![[r#"
- sp Self T
- st T T
+ sp Self T
+ st T T
tt Test
- bt u32 u32
+ bt u32 u32
"#]],
);
@@ -682,10 +682,10 @@ impl Test for T {
}
",
expect![[r#"
- sp Self T
- st T T
+ sp Self T
+ st T T
tt Test
- bt u32 u32
+ bt u32 u32
"#]],
);
@@ -730,10 +730,10 @@ impl Test for T {
}
",
expect![[r#"
- sp Self T
- st T T
+ sp Self T
+ st T T
tt Test
- bt u32 u32
+ bt u32 u32
"#]],
);
diff --git a/crates/ide-completion/src/completions/keyword.rs b/crates/ide-completion/src/completions/keyword.rs
index 71ca6e9949..4700ed6c1a 100644
--- a/crates/ide-completion/src/completions/keyword.rs
+++ b/crates/ide-completion/src/completions/keyword.rs
@@ -78,19 +78,19 @@ fn foo(a: A) { a.$0 }
"#,
expect![[r#"
me into_future() (as IntoFuture) fn(self) -> <Self as IntoFuture>::IntoFuture
- kw await expr.await
- sn box Box::new(expr)
- sn call function(expr)
- sn dbg dbg!(expr)
- sn dbgr dbg!(&expr)
- sn deref *expr
- sn let let
- sn letm let mut
- sn match match expr {}
- sn ref &expr
- sn refm &mut expr
- sn return return expr
- sn unsafe unsafe {}
+ kw await expr.await
+ sn box Box::new(expr)
+ sn call function(expr)
+ sn dbg dbg!(expr)
+ sn dbgr dbg!(&expr)
+ sn deref *expr
+ sn let let
+ sn letm let mut
+ sn match match expr {}
+ sn ref &expr
+ sn refm &mut expr
+ sn return return expr
+ sn unsafe unsafe {}
"#]],
);
@@ -105,19 +105,19 @@ fn foo() {
"#,
expect![[r#"
me into_future() (use core::future::IntoFuture) fn(self) -> <Self as IntoFuture>::IntoFuture
- kw await expr.await
- sn box Box::new(expr)
- sn call function(expr)
- sn dbg dbg!(expr)
- sn dbgr dbg!(&expr)
- sn deref *expr
- sn let let
- sn letm let mut
- sn match match expr {}
- sn ref &expr
- sn refm &mut expr
- sn return return expr
- sn unsafe unsafe {}
+ kw await expr.await
+ sn box Box::new(expr)
+ sn call function(expr)
+ sn dbg dbg!(expr)
+ sn dbgr dbg!(&expr)
+ sn deref *expr
+ sn let let
+ sn letm let mut
+ sn match match expr {}
+ sn ref &expr
+ sn refm &mut expr
+ sn return return expr
+ sn unsafe unsafe {}
"#]],
);
}
@@ -134,19 +134,19 @@ fn foo(a: A) { a.$0 }
"#,
expect![[r#"
me into_future() (as IntoFuture) fn(self) -> <Self as IntoFuture>::IntoFuture
- kw await expr.await
- sn box Box::new(expr)
- sn call function(expr)
- sn dbg dbg!(expr)
- sn dbgr dbg!(&expr)
- sn deref *expr
- sn let let
- sn letm let mut
- sn match match expr {}
- sn ref &expr
- sn refm &mut expr
- sn return return expr
- sn unsafe unsafe {}
+ kw await expr.await
+ sn box Box::new(expr)
+ sn call function(expr)
+ sn dbg dbg!(expr)
+ sn dbgr dbg!(&expr)
+ sn deref *expr
+ sn let let
+ sn letm let mut
+ sn match match expr {}
+ sn ref &expr
+ sn refm &mut expr
+ sn return return expr
+ sn unsafe unsafe {}
"#]],
);
}
diff --git a/crates/ide-completion/src/completions/postfix.rs b/crates/ide-completion/src/completions/postfix.rs
index 495f82da86..7b57eea052 100644
--- a/crates/ide-completion/src/completions/postfix.rs
+++ b/crates/ide-completion/src/completions/postfix.rs
@@ -423,21 +423,21 @@ fn main() {
}
"#,
expect![[r#"
- sn box Box::new(expr)
- sn call function(expr)
- sn dbg dbg!(expr)
- sn dbgr dbg!(&expr)
- sn deref *expr
- sn if if expr {}
- sn let let
- sn letm let mut
- sn match match expr {}
- sn not !expr
- sn ref &expr
- sn refm &mut expr
- sn return return expr
- sn unsafe unsafe {}
- sn while while expr {}
+ sn box Box::new(expr)
+ sn call function(expr)
+ sn dbg dbg!(expr)
+ sn dbgr dbg!(&expr)
+ sn deref *expr
+ sn if if expr {}
+ sn let let
+ sn letm let mut
+ sn match match expr {}
+ sn not !expr
+ sn ref &expr
+ sn refm &mut expr
+ sn return return expr
+ sn unsafe unsafe {}
+ sn while while expr {}
"#]],
);
}
@@ -456,19 +456,19 @@ fn main() {
}
"#,
expect![[r#"
- sn box Box::new(expr)
- sn call function(expr)
- sn dbg dbg!(expr)
- sn dbgr dbg!(&expr)
- sn deref *expr
- sn if if expr {}
- sn match match expr {}
- sn not !expr
- sn ref &expr
- sn refm &mut expr
- sn return return expr
- sn unsafe unsafe {}
- sn while while expr {}
+ sn box Box::new(expr)
+ sn call function(expr)
+ sn dbg dbg!(expr)
+ sn dbgr dbg!(&expr)
+ sn deref *expr
+ sn if if expr {}
+ sn match match expr {}
+ sn not !expr
+ sn ref &expr
+ sn refm &mut expr
+ sn return return expr
+ sn unsafe unsafe {}
+ sn while while expr {}
"#]],
);
}
@@ -483,18 +483,18 @@ fn main() {
}
"#,
expect![[r#"
- sn box Box::new(expr)
- sn call function(expr)
- sn dbg dbg!(expr)
- sn dbgr dbg!(&expr)
- sn deref *expr
- sn let let
- sn letm let mut
- sn match match expr {}
- sn ref &expr
- sn refm &mut expr
- sn return return expr
- sn unsafe unsafe {}
+ sn box Box::new(expr)
+ sn call function(expr)
+ sn dbg dbg!(expr)
+ sn dbgr dbg!(&expr)
+ sn deref *expr
+ sn let let
+ sn letm let mut
+ sn match match expr {}
+ sn ref &expr
+ sn refm &mut expr
+ sn return return expr
+ sn unsafe unsafe {}
"#]],
)
}
@@ -509,21 +509,21 @@ fn main() {
}
"#,
expect![[r#"
- sn box Box::new(expr)
- sn call function(expr)
- sn dbg dbg!(expr)
- sn dbgr dbg!(&expr)
- sn deref *expr
- sn if if expr {}
- sn let let
- sn letm let mut
- sn match match expr {}
- sn not !expr
- sn ref &expr
- sn refm &mut expr
- sn return return expr
- sn unsafe unsafe {}
- sn while while expr {}
+ sn box Box::new(expr)
+ sn call function(expr)
+ sn dbg dbg!(expr)
+ sn dbgr dbg!(&expr)
+ sn deref *expr
+ sn if if expr {}
+ sn let let
+ sn letm let mut
+ sn match match expr {}
+ sn not !expr
+ sn ref &expr
+ sn refm &mut expr
+ sn return return expr
+ sn unsafe unsafe {}
+ sn while while expr {}
"#]],
);
}
diff --git a/crates/ide-completion/src/context.rs b/crates/ide-completion/src/context.rs
index 3a66170633..f8d403122d 100644
--- a/crates/ide-completion/src/context.rs
+++ b/crates/ide-completion/src/context.rs
@@ -718,7 +718,7 @@ impl<'a> CompletionContext<'a> {
expected: (expected_type, expected_name),
qualifier_ctx,
token,
- offset,
+ original_offset,
} = expand_and_analyze(
&sema,
original_file.syntax().clone(),
@@ -728,7 +728,7 @@ impl<'a> CompletionContext<'a> {
)?;
// adjust for macro input, this still fails if there is no token written yet
- let scope = sema.scope_at_offset(&token.parent()?, offset)?;
+ let scope = sema.scope_at_offset(&token.parent()?, original_offset)?;
let krate = scope.krate();
let module = scope.module();
diff --git a/crates/ide-completion/src/context/analysis.rs b/crates/ide-completion/src/context/analysis.rs
index 3b7898b9e8..1c4cbb25b1 100644
--- a/crates/ide-completion/src/context/analysis.rs
+++ b/crates/ide-completion/src/context/analysis.rs
@@ -22,10 +22,14 @@ use crate::context::{
COMPLETION_MARKER,
};
+#[derive(Debug)]
struct ExpansionResult {
original_file: SyntaxNode,
speculative_file: SyntaxNode,
- offset: TextSize,
+ /// The offset in the original file.
+ original_offset: TextSize,
+ /// The offset in the speculatively expanded file.
+ speculative_offset: TextSize,
fake_ident_token: SyntaxToken,
derive_ctx: Option<(SyntaxNode, SyntaxNode, TextSize, ast::Attr)>,
}
@@ -36,7 +40,8 @@ pub(super) struct AnalysisResult {
pub(super) qualifier_ctx: QualifierCtx,
/// the original token of the expanded file
pub(super) token: SyntaxToken,
- pub(super) offset: TextSize,
+ /// The offset in the original file.
+ pub(super) original_offset: TextSize,
}
pub(super) fn expand_and_analyze(
@@ -54,226 +59,344 @@ pub(super) fn expand_and_analyze(
// make the offset point to the start of the original token, as that is what the
// intermediate offsets calculated in expansion always points to
let offset = offset - relative_offset;
- let expansion =
- expand(sema, original_file, speculative_file, offset, fake_ident_token, relative_offset);
+ let expansion = expand(
+ sema,
+ original_file.clone(),
+ speculative_file.clone(),
+ offset,
+ fake_ident_token.clone(),
+ relative_offset,
+ )
+ .unwrap_or(ExpansionResult {
+ original_file,
+ speculative_file,
+ original_offset: offset,
+ speculative_offset: fake_ident_token.text_range().start(),
+ fake_ident_token,
+ derive_ctx: None,
+ });
// add the relative offset back, so that left_biased finds the proper token
- let offset = expansion.offset + relative_offset;
- let token = expansion.original_file.token_at_offset(offset).left_biased()?;
+ let original_offset = expansion.original_offset + relative_offset;
+ let token = expansion.original_file.token_at_offset(original_offset).left_biased()?;
analyze(sema, expansion, original_token, &token).map(|(analysis, expected, qualifier_ctx)| {
- AnalysisResult { analysis, expected, qualifier_ctx, token, offset }
+ AnalysisResult { analysis, expected, qualifier_ctx, token, original_offset }
})
}
/// Expand attributes and macro calls at the current cursor position for both the original file
/// and fake file repeatedly. As soon as one of the two expansions fail we stop so the original
/// and speculative states stay in sync.
+///
+/// We do this by recursively expanding all macros and picking the best possible match. We cannot just
+/// choose the first expansion each time because macros can expand to something that does not include
+/// our completion marker, e.g.:
+/// ```
+/// macro_rules! helper { ($v:ident) => {} }
+/// macro_rules! my_macro {
+/// ($v:ident) => {
+/// helper!($v);
+/// $v
+/// };
+/// }
+///
+/// my_macro!(complete_me_here)
+/// ```
+/// If we would expand the first thing we encounter only (which in fact this method used to do), we would
+/// be unable to complete here, because we would be walking directly into the void. So we instead try
+/// *every* possible path.
+///
+/// This can also creates discrepancies between the speculative and real expansions: because we insert
+/// tokens, we insert characters, which means if we try the second occurrence it may not be at the same
+/// position in the original and speculative file. We take an educated guess here, and for each token
+/// that we check, we subtract `COMPLETION_MARKER.len()`. This may not be accurate because proc macros
+/// can insert the text of the completion marker in other places while removing the span, but this is
+/// the best we can do.
fn expand(
sema: &Semantics<'_, RootDatabase>,
- mut original_file: SyntaxNode,
- mut speculative_file: SyntaxNode,
- mut offset: TextSize,
- mut fake_ident_token: SyntaxToken,
+ original_file: SyntaxNode,
+ speculative_file: SyntaxNode,
+ original_offset: TextSize,
+ fake_ident_token: SyntaxToken,
relative_offset: TextSize,
-) -> ExpansionResult {
+) -> Option<ExpansionResult> {
let _p = tracing::info_span!("CompletionContext::expand").entered();
- let mut derive_ctx = None;
-
- 'expansion: loop {
- let parent_item =
- |item: &ast::Item| item.syntax().ancestors().skip(1).find_map(ast::Item::cast);
- let ancestor_items = iter::successors(
- Option::zip(
- find_node_at_offset::<ast::Item>(&original_file, offset),
- find_node_at_offset::<ast::Item>(&speculative_file, offset),
+
+ if !sema.might_be_inside_macro_call(&fake_ident_token)
+ && original_file
+ .token_at_offset(original_offset + relative_offset)
+ .right_biased()
+ .is_some_and(|original_token| !sema.might_be_inside_macro_call(&original_token))
+ {
+ // Recursion base case.
+ return Some(ExpansionResult {
+ original_file,
+ speculative_file,
+ original_offset,
+ speculative_offset: fake_ident_token.text_range().start(),
+ fake_ident_token,
+ derive_ctx: None,
+ });
+ }
+
+ let parent_item =
+ |item: &ast::Item| item.syntax().ancestors().skip(1).find_map(ast::Item::cast);
+ let ancestor_items = iter::successors(
+ Option::zip(
+ find_node_at_offset::<ast::Item>(&original_file, original_offset),
+ find_node_at_offset::<ast::Item>(
+ &speculative_file,
+ fake_ident_token.text_range().start(),
),
- |(a, b)| parent_item(a).zip(parent_item(b)),
- );
-
- // first try to expand attributes as these are always the outermost macro calls
- 'ancestors: for (actual_item, item_with_fake_ident) in ancestor_items {
- match (
- sema.expand_attr_macro(&actual_item),
- sema.speculative_expand_attr_macro(
- &actual_item,
- &item_with_fake_ident,
- fake_ident_token.clone(),
- ),
- ) {
- // maybe parent items have attributes, so continue walking the ancestors
- (None, None) => continue 'ancestors,
- // successful expansions
- (
- Some(ExpandResult { value: actual_expansion, err: _ }),
- Some((fake_expansion, fake_mapped_token)),
- ) => {
- let new_offset = fake_mapped_token.text_range().start();
- if new_offset + relative_offset > actual_expansion.text_range().end() {
- // offset outside of bounds from the original expansion,
- // stop here to prevent problems from happening
- break 'expansion;
- }
- original_file = actual_expansion;
- speculative_file = fake_expansion;
- fake_ident_token = fake_mapped_token;
- offset = new_offset;
- continue 'expansion;
+ ),
+ |(a, b)| parent_item(a).zip(parent_item(b)),
+ );
+
+ // first try to expand attributes as these are always the outermost macro calls
+ 'ancestors: for (actual_item, item_with_fake_ident) in ancestor_items {
+ match (
+ sema.expand_attr_macro(&actual_item),
+ sema.speculative_expand_attr_macro(
+ &actual_item,
+ &item_with_fake_ident,
+ fake_ident_token.clone(),
+ ),
+ ) {
+ // maybe parent items have attributes, so continue walking the ancestors
+ (None, None) => continue 'ancestors,
+ // successful expansions
+ (
+ Some(ExpandResult { value: actual_expansion, err: _ }),
+ Some((fake_expansion, fake_mapped_tokens)),
+ ) => {
+ let mut accumulated_offset_from_fake_tokens = 0;
+ let actual_range = actual_expansion.text_range().end();
+ let result = fake_mapped_tokens
+ .into_iter()
+ .filter_map(|(fake_mapped_token, rank)| {
+ let accumulated_offset = accumulated_offset_from_fake_tokens;
+ if !fake_mapped_token.text().contains(COMPLETION_MARKER) {
+ // Proc macros can make the same span with different text, we don't
+ // want them to participate in completion because the macro author probably
+ // didn't intend them to.
+ return None;
+ }
+ accumulated_offset_from_fake_tokens += COMPLETION_MARKER.len();
+
+ let new_offset = fake_mapped_token.text_range().start()
+ - TextSize::new(accumulated_offset as u32);
+ if new_offset + relative_offset > actual_range {
+ // offset outside of bounds from the original expansion,
+ // stop here to prevent problems from happening
+ return None;
+ }
+ let result = expand(
+ sema,
+ actual_expansion.clone(),
+ fake_expansion.clone(),
+ new_offset,
+ fake_mapped_token,
+ relative_offset,
+ )?;
+ Some((result, rank))
+ })
+ .min_by_key(|(_, rank)| *rank)
+ .map(|(result, _)| result);
+ if result.is_some() {
+ return result;
}
- // exactly one expansion failed, inconsistent state so stop expanding completely
- _ => break 'expansion,
}
+ // exactly one expansion failed, inconsistent state so stop expanding completely
+ _ => break 'ancestors,
}
+ }
- // No attributes have been expanded, so look for macro_call! token trees or derive token trees
- let orig_tt = match ancestors_at_offset(&original_file, offset)
- .map_while(Either::<ast::TokenTree, ast::Meta>::cast)
- .last()
- {
- Some(it) => it,
- None => break 'expansion,
- };
- let spec_tt = match ancestors_at_offset(&speculative_file, offset)
- .map_while(Either::<ast::TokenTree, ast::Meta>::cast)
- .last()
- {
- Some(it) => it,
- None => break 'expansion,
- };
-
- let (tts, attrs) = match (orig_tt, spec_tt) {
- (Either::Left(orig_tt), Either::Left(spec_tt)) => {
- let attrs = orig_tt
- .syntax()
- .parent()
- .and_then(ast::Meta::cast)
- .and_then(|it| it.parent_attr())
- .zip(
- spec_tt
- .syntax()
- .parent()
- .and_then(ast::Meta::cast)
- .and_then(|it| it.parent_attr()),
- );
- (Some((orig_tt, spec_tt)), attrs)
- }
- (Either::Right(orig_path), Either::Right(spec_path)) => {
- (None, orig_path.parent_attr().zip(spec_path.parent_attr()))
- }
- _ => break 'expansion,
- };
+ // No attributes have been expanded, so look for macro_call! token trees or derive token trees
+ let orig_tt = ancestors_at_offset(&original_file, original_offset)
+ .map_while(Either::<ast::TokenTree, ast::Meta>::cast)
+ .last()?;
+ let spec_tt = ancestors_at_offset(&speculative_file, fake_ident_token.text_range().start())
+ .map_while(Either::<ast::TokenTree, ast::Meta>::cast)
+ .last()?;
+
+ let (tts, attrs) = match (orig_tt, spec_tt) {
+ (Either::Left(orig_tt), Either::Left(spec_tt)) => {
+ let attrs = orig_tt
+ .syntax()
+ .parent()
+ .and_then(ast::Meta::cast)
+ .and_then(|it| it.parent_attr())
+ .zip(
+ spec_tt
+ .syntax()
+ .parent()
+ .and_then(ast::Meta::cast)
+ .and_then(|it| it.parent_attr()),
+ );
+ (Some((orig_tt, spec_tt)), attrs)
+ }
+ (Either::Right(orig_path), Either::Right(spec_path)) => {
+ (None, orig_path.parent_attr().zip(spec_path.parent_attr()))
+ }
+ _ => return None,
+ };
- // Expand pseudo-derive expansion aka `derive(Debug$0)`
- if let Some((orig_attr, spec_attr)) = attrs {
- if let (Some(actual_expansion), Some((fake_expansion, fake_mapped_token))) = (
- sema.expand_derive_as_pseudo_attr_macro(&orig_attr),
- sema.speculative_expand_derive_as_pseudo_attr_macro(
- &orig_attr,
- &spec_attr,
- fake_ident_token.clone(),
- ),
- ) {
- derive_ctx = Some((
- actual_expansion,
- fake_expansion,
- fake_mapped_token.text_range().start(),
- orig_attr,
- ));
- break 'expansion;
+ // Expand pseudo-derive expansion aka `derive(Debug$0)`
+ if let Some((orig_attr, spec_attr)) = attrs {
+ if let (Some(actual_expansion), Some((fake_expansion, fake_mapped_tokens))) = (
+ sema.expand_derive_as_pseudo_attr_macro(&orig_attr),
+ sema.speculative_expand_derive_as_pseudo_attr_macro(
+ &orig_attr,
+ &spec_attr,
+ fake_ident_token.clone(),
+ ),
+ ) {
+ if let Some((fake_mapped_token, _)) =
+ fake_mapped_tokens.into_iter().min_by_key(|(_, rank)| *rank)
+ {
+ return Some(ExpansionResult {
+ original_file,
+ speculative_file,
+ original_offset,
+ speculative_offset: fake_ident_token.text_range().start(),
+ fake_ident_token,
+ derive_ctx: Some((
+ actual_expansion,
+ fake_expansion,
+ fake_mapped_token.text_range().start(),
+ orig_attr,
+ )),
+ });
}
+ }
- if let Some(spec_adt) =
- spec_attr.syntax().ancestors().find_map(ast::Item::cast).and_then(|it| match it {
- ast::Item::Struct(it) => Some(ast::Adt::Struct(it)),
- ast::Item::Enum(it) => Some(ast::Adt::Enum(it)),
- ast::Item::Union(it) => Some(ast::Adt::Union(it)),
- _ => None,
- })
- {
- // might be the path of derive helper or a token tree inside of one
- if let Some(helpers) = sema.derive_helper(&orig_attr) {
- for (_mac, file) in helpers {
- if let Some((fake_expansion, fake_mapped_token)) = sema
- .speculative_expand_raw(
- file,
- spec_adt.syntax(),
- fake_ident_token.clone(),
- )
- {
- // we are inside a derive helper token tree, treat this as being inside
- // the derive expansion
- let actual_expansion = sema.parse_or_expand(file.into());
- let new_offset = fake_mapped_token.text_range().start();
- if new_offset + relative_offset > actual_expansion.text_range().end() {
- // offset outside of bounds from the original expansion,
- // stop here to prevent problems from happening
- break 'expansion;
- }
- original_file = actual_expansion;
- speculative_file = fake_expansion;
- fake_ident_token = fake_mapped_token;
- offset = new_offset;
- continue 'expansion;
+ if let Some(spec_adt) =
+ spec_attr.syntax().ancestors().find_map(ast::Item::cast).and_then(|it| match it {
+ ast::Item::Struct(it) => Some(ast::Adt::Struct(it)),
+ ast::Item::Enum(it) => Some(ast::Adt::Enum(it)),
+ ast::Item::Union(it) => Some(ast::Adt::Union(it)),
+ _ => None,
+ })
+ {
+ // might be the path of derive helper or a token tree inside of one
+ if let Some(helpers) = sema.derive_helper(&orig_attr) {
+ for (_mac, file) in helpers {
+ if let Some((fake_expansion, fake_mapped_tokens)) = sema.speculative_expand_raw(
+ file,
+ spec_adt.syntax(),
+ fake_ident_token.clone(),
+ ) {
+ // we are inside a derive helper token tree, treat this as being inside
+ // the derive expansion
+ let actual_expansion = sema.parse_or_expand(file.into());
+ let mut accumulated_offset_from_fake_tokens = 0;
+ let actual_range = actual_expansion.text_range().end();
+ let result = fake_mapped_tokens
+ .into_iter()
+ .filter_map(|(fake_mapped_token, rank)| {
+ let accumulated_offset = accumulated_offset_from_fake_tokens;
+ if !fake_mapped_token.text().contains(COMPLETION_MARKER) {
+ // Proc macros can make the same span with different text, we don't
+ // want them to participate in completion because the macro author probably
+ // didn't intend them to.
+ return None;
+ }
+ accumulated_offset_from_fake_tokens += COMPLETION_MARKER.len();
+
+ let new_offset = fake_mapped_token.text_range().start()
+ - TextSize::new(accumulated_offset as u32);
+ if new_offset + relative_offset > actual_range {
+ // offset outside of bounds from the original expansion,
+ // stop here to prevent problems from happening
+ return None;
+ }
+ let result = expand(
+ sema,
+ actual_expansion.clone(),
+ fake_expansion.clone(),
+ new_offset,
+ fake_mapped_token,
+ relative_offset,
+ )?;
+ Some((result, rank))
+ })
+ .min_by_key(|(_, rank)| *rank)
+ .map(|(result, _)| result);
+ if result.is_some() {
+ return result;
}
}
}
}
- // at this point we won't have any more successful expansions, so stop
- break 'expansion;
}
+ // at this point we won't have any more successful expansions, so stop
+ return None;
+ }
- // Expand fn-like macro calls
- let Some((orig_tt, spec_tt)) = tts else { break 'expansion };
- if let (Some(actual_macro_call), Some(macro_call_with_fake_ident)) = (
- orig_tt.syntax().parent().and_then(ast::MacroCall::cast),
- spec_tt.syntax().parent().and_then(ast::MacroCall::cast),
- ) {
- let mac_call_path0 = actual_macro_call.path().as_ref().map(|s| s.syntax().text());
- let mac_call_path1 =
- macro_call_with_fake_ident.path().as_ref().map(|s| s.syntax().text());
+ // Expand fn-like macro calls
+ let (orig_tt, spec_tt) = tts?;
+ let (actual_macro_call, macro_call_with_fake_ident) = (
+ orig_tt.syntax().parent().and_then(ast::MacroCall::cast)?,
+ spec_tt.syntax().parent().and_then(ast::MacroCall::cast)?,
+ );
+ let mac_call_path0 = actual_macro_call.path().as_ref().map(|s| s.syntax().text());
+ let mac_call_path1 = macro_call_with_fake_ident.path().as_ref().map(|s| s.syntax().text());
- // inconsistent state, stop expanding
- if mac_call_path0 != mac_call_path1 {
- break 'expansion;
- }
- let speculative_args = match macro_call_with_fake_ident.token_tree() {
- Some(tt) => tt,
- None => break 'expansion,
- };
+ // inconsistent state, stop expanding
+ if mac_call_path0 != mac_call_path1 {
+ return None;
+ }
+ let speculative_args = macro_call_with_fake_ident.token_tree()?;
+
+ match (
+ sema.expand_macro_call(&actual_macro_call),
+ sema.speculative_expand_macro_call(
+ &actual_macro_call,
+ &speculative_args,
+ fake_ident_token.clone(),
+ ),
+ ) {
+ // successful expansions
+ (Some(actual_expansion), Some((fake_expansion, fake_mapped_tokens))) => {
+ let mut accumulated_offset_from_fake_tokens = 0;
+ let actual_range = actual_expansion.text_range().end();
+ fake_mapped_tokens
+ .into_iter()
+ .filter_map(|(fake_mapped_token, rank)| {
+ let accumulated_offset = accumulated_offset_from_fake_tokens;
+ if !fake_mapped_token.text().contains(COMPLETION_MARKER) {
+ // Proc macros can make the same span with different text, we don't
+ // want them to participate in completion because the macro author probably
+ // didn't intend them to.
+ return None;
+ }
+ accumulated_offset_from_fake_tokens += COMPLETION_MARKER.len();
- match (
- sema.expand_macro_call(&actual_macro_call),
- sema.speculative_expand_macro_call(
- &actual_macro_call,
- &speculative_args,
- fake_ident_token.clone(),
- ),
- ) {
- // successful expansions
- (Some(actual_expansion), Some((fake_expansion, fake_mapped_token))) => {
- let new_offset = fake_mapped_token.text_range().start();
- if new_offset + relative_offset > actual_expansion.text_range().end() {
+ let new_offset = fake_mapped_token.text_range().start()
+ - TextSize::new(accumulated_offset as u32);
+ if new_offset + relative_offset > actual_range {
// offset outside of bounds from the original expansion,
// stop here to prevent problems from happening
- break 'expansion;
+ return None;
}
- original_file = actual_expansion;
- speculative_file = fake_expansion;
- fake_ident_token = fake_mapped_token;
- offset = new_offset;
- continue 'expansion;
- }
- // at least on expansion failed, we won't have anything to expand from this point
- // onwards so break out
- _ => break 'expansion,
- }
+ let result = expand(
+ sema,
+ actual_expansion.clone(),
+ fake_expansion.clone(),
+ new_offset,
+ fake_mapped_token,
+ relative_offset,
+ )?;
+ Some((result, rank))
+ })
+ .min_by_key(|(_, rank)| *rank)
+ .map(|(result, _)| result)
}
-
- // none of our states have changed so stop the loop
- break 'expansion;
+ // at least one expansion failed, we won't have anything to expand from this point
+ // onwards so break out
+ _ => None,
}
-
- ExpansionResult { original_file, speculative_file, offset, fake_ident_token, derive_ctx }
}
/// Fill the completion context, this is what does semantic reasoning about the surrounding context
@@ -285,8 +408,14 @@ fn analyze(
self_token: &SyntaxToken,
) -> Option<(CompletionAnalysis, (Option<Type>, Option<ast::NameOrNameRef>), QualifierCtx)> {
let _p = tracing::info_span!("CompletionContext::analyze").entered();
- let ExpansionResult { original_file, speculative_file, offset, fake_ident_token, derive_ctx } =
- expansion_result;
+ let ExpansionResult {
+ original_file,
+ speculative_file,
+ original_offset: _,
+ speculative_offset,
+ fake_ident_token,
+ derive_ctx,
+ } = expansion_result;
// Overwrite the path kind for derives
if let Some((original_file, file_with_fake_ident, offset, origin_attr)) = derive_ctx {
@@ -294,7 +423,8 @@ fn analyze(
find_node_at_offset(&file_with_fake_ident, offset)
{
let parent = name_ref.syntax().parent()?;
- let (mut nameref_ctx, _) = classify_name_ref(sema, &original_file, name_ref, parent)?;
+ let (mut nameref_ctx, _) =
+ classify_name_ref(sema, &original_file, name_ref, offset, parent)?;
if let NameRefKind::Path(path_ctx) = &mut nameref_ctx.kind {
path_ctx.kind = PathKind::Derive {
existing_derives: sema
@@ -314,7 +444,7 @@ fn analyze(
return None;
}
- let Some(name_like) = find_node_at_offset(&speculative_file, offset) else {
+ let Some(name_like) = find_node_at_offset(&speculative_file, speculative_offset) else {
let analysis = if let Some(original) = ast::String::cast(original_token.clone()) {
CompletionAnalysis::String { original, expanded: ast::String::cast(self_token.clone()) }
} else {
@@ -350,8 +480,13 @@ fn analyze(
}
ast::NameLike::NameRef(name_ref) => {
let parent = name_ref.syntax().parent()?;
- let (nameref_ctx, qualifier_ctx) =
- classify_name_ref(sema, &original_file, name_ref, parent)?;
+ let (nameref_ctx, qualifier_ctx) = classify_name_ref(
+ sema,
+ &original_file,
+ name_ref,
+ expansion_result.original_offset,
+ parent,
+ )?;
if let NameRefContext {
kind:
@@ -636,9 +771,10 @@ fn classify_name_ref(
sema: &Semantics<'_, RootDatabase>,
original_file: &SyntaxNode,
name_ref: ast::NameRef,
+ original_offset: TextSize,
parent: SyntaxNode,
) -> Option<(NameRefContext, QualifierCtx)> {
- let nameref = find_node_at_offset(original_file, name_ref.syntax().text_range().start());
+ let nameref = find_node_at_offset(original_file, original_offset);
let make_res = |kind| (NameRefContext { nameref: nameref.clone(), kind }, Default::default());
@@ -760,7 +896,7 @@ fn classify_name_ref(
// We do not want to generate path completions when we are sandwiched between an item decl signature and its body.
// ex. trait Foo $0 {}
// in these cases parser recovery usually kicks in for our inserted identifier, causing it
- // to either be parsed as an ExprStmt or a MacroCall, depending on whether it is in a block
+ // to either be parsed as an ExprStmt or a ItemRecovery, depending on whether it is in a block
// expression or an item list.
// The following code checks if the body is missing, if it is we either cut off the body
// from the item or it was missing in the first place
@@ -1088,15 +1224,10 @@ fn classify_name_ref(
PathKind::Type { location: location.unwrap_or(TypeLocation::Other) }
};
- let mut kind_macro_call = |it: ast::MacroCall| {
- path_ctx.has_macro_bang = it.excl_token().is_some();
- let parent = it.syntax().parent()?;
- // Any path in an item list will be treated as a macro call by the parser
+ let kind_item = |it: &SyntaxNode| {
+ let parent = it.parent()?;
let kind = match_ast! {
match parent {
- ast::MacroExpr(expr) => make_path_kind_expr(expr.into()),
- ast::MacroPat(it) => PathKind::Pat { pat_ctx: pattern_context_for(sema, original_file, it.into())},
- ast::MacroType(ty) => make_path_kind_type(ty.into()),
ast::ItemList(_) => PathKind::Item { kind: ItemListKind::Module },
ast::AssocItemList(_) => PathKind::Item { kind: match parent.parent() {
Some(it) => match_ast! {
@@ -1126,6 +1257,23 @@ fn classify_name_ref(
};
Some(kind)
};
+
+ let mut kind_macro_call = |it: ast::MacroCall| {
+ path_ctx.has_macro_bang = it.excl_token().is_some();
+ let parent = it.syntax().parent()?;
+ if let Some(kind) = kind_item(it.syntax()) {
+ return Some(kind);
+ }
+ let kind = match_ast! {
+ match parent {
+ ast::MacroExpr(expr) => make_path_kind_expr(expr.into()),
+ ast::MacroPat(it) => PathKind::Pat { pat_ctx: pattern_context_for(sema, original_file, it.into())},
+ ast::MacroType(ty) => make_path_kind_type(ty.into()),
+ _ => return None,
+ }
+ };
+ Some(kind)
+ };
let make_path_kind_attr = |meta: ast::Meta| {
let attr = meta.parent_attr()?;
let kind = attr.kind();
@@ -1153,94 +1301,98 @@ fn classify_name_ref(
// Infer the path kind
let parent = path.syntax().parent()?;
- let kind = match_ast! {
- match parent {
- ast::PathType(it) => make_path_kind_type(it.into()),
- ast::PathExpr(it) => {
- if let Some(p) = it.syntax().parent() {
- let p_kind = p.kind();
- // The syntax node of interest, for which we want to check whether
- // it is sandwiched between an item decl signature and its body.
- let probe = if ast::ExprStmt::can_cast(p_kind) {
- Some(p)
- } else if ast::StmtList::can_cast(p_kind) {
- Some(it.syntax().clone())
- } else {
- None
- };
- if let Some(kind) = probe.and_then(inbetween_body_and_decl_check) {
- return Some(make_res(NameRefKind::Keyword(kind)));
- }
- }
+ let kind = 'find_kind: {
+ if parent.kind() == SyntaxKind::ERROR {
+ if let Some(kind) = inbetween_body_and_decl_check(parent.clone()) {
+ return Some(make_res(NameRefKind::Keyword(kind)));
+ }
- path_ctx.has_call_parens = it.syntax().parent().map_or(false, |it| ast::CallExpr::can_cast(it.kind()));
+ break 'find_kind kind_item(&parent)?;
+ }
+ match_ast! {
+ match parent {
+ ast::PathType(it) => make_path_kind_type(it.into()),
+ ast::PathExpr(it) => {
+ if let Some(p) = it.syntax().parent() {
+ let p_kind = p.kind();
+ // The syntax node of interest, for which we want to check whether
+ // it is sandwiched between an item decl signature and its body.
+ let probe = if ast::ExprStmt::can_cast(p_kind) {
+ Some(p)
+ } else if ast::StmtList::can_cast(p_kind) {
+ Some(it.syntax().clone())
+ } else {
+ None
+ };
+ if let Some(kind) = probe.and_then(inbetween_body_and_decl_check) {
+ return Some(make_res(NameRefKind::Keyword(kind)));
+ }
+ }
- make_path_kind_expr(it.into())
- },
- ast::TupleStructPat(it) => {
- path_ctx.has_call_parens = true;
- PathKind::Pat { pat_ctx: pattern_context_for(sema, original_file, it.into()) }
- },
- ast::RecordPat(it) => {
- path_ctx.has_call_parens = true;
- PathKind::Pat { pat_ctx: pattern_context_for(sema, original_file, it.into()) }
- },
- ast::PathPat(it) => {
- PathKind::Pat { pat_ctx: pattern_context_for(sema, original_file, it.into())}
- },
- ast::MacroCall(it) => {
- // A macro call in this position is usually a result of parsing recovery, so check that
- if let Some(kind) = inbetween_body_and_decl_check(it.syntax().clone()) {
- return Some(make_res(NameRefKind::Keyword(kind)));
- }
+ path_ctx.has_call_parens = it.syntax().parent().map_or(false, |it| ast::CallExpr::can_cast(it.kind()));
- kind_macro_call(it)?
- },
- ast::Meta(meta) => make_path_kind_attr(meta)?,
- ast::Visibility(it) => PathKind::Vis { has_in_token: it.in_token().is_some() },
- ast::UseTree(_) => PathKind::Use,
- // completing inside a qualifier
- ast::Path(parent) => {
- path_ctx.parent = Some(parent.clone());
- let parent = iter::successors(Some(parent), |it| it.parent_path()).last()?.syntax().parent()?;
- match_ast! {
- match parent {
- ast::PathType(it) => make_path_kind_type(it.into()),
- ast::PathExpr(it) => {
- path_ctx.has_call_parens = it.syntax().parent().map_or(false, |it| ast::CallExpr::can_cast(it.kind()));
+ make_path_kind_expr(it.into())
+ },
+ ast::TupleStructPat(it) => {
+ path_ctx.has_call_parens = true;
+ PathKind::Pat { pat_ctx: pattern_context_for(sema, original_file, it.into()) }
+ },
+ ast::RecordPat(it) => {
+ path_ctx.has_call_parens = true;
+ PathKind::Pat { pat_ctx: pattern_context_for(sema, original_file, it.into()) }
+ },
+ ast::PathPat(it) => {
+ PathKind::Pat { pat_ctx: pattern_context_for(sema, original_file, it.into())}
+ },
+ ast::MacroCall(it) => {
+ kind_macro_call(it)?
+ },
+ ast::Meta(meta) => make_path_kind_attr(meta)?,
+ ast::Visibility(it) => PathKind::Vis { has_in_token: it.in_token().is_some() },
+ ast::UseTree(_) => PathKind::Use,
+ // completing inside a qualifier
+ ast::Path(parent) => {
+ path_ctx.parent = Some(parent.clone());
+ let parent = iter::successors(Some(parent), |it| it.parent_path()).last()?.syntax().parent()?;
+ match_ast! {
+ match parent {
+ ast::PathType(it) => make_path_kind_type(it.into()),
+ ast::PathExpr(it) => {
+ path_ctx.has_call_parens = it.syntax().parent().map_or(false, |it| ast::CallExpr::can_cast(it.kind()));
- make_path_kind_expr(it.into())
- },
- ast::TupleStructPat(it) => {
- path_ctx.has_call_parens = true;
- PathKind::Pat { pat_ctx: pattern_context_for(sema, original_file, it.into()) }
- },
- ast::RecordPat(it) => {
- path_ctx.has_call_parens = true;
- PathKind::Pat { pat_ctx: pattern_context_for(sema, original_file, it.into()) }
- },
- ast::PathPat(it) => {
- PathKind::Pat { pat_ctx: pattern_context_for(sema, original_file, it.into())}
- },
- ast::MacroCall(it) => {
- kind_macro_call(it)?
- },
- ast::Meta(meta) => make_path_kind_attr(meta)?,
- ast::Visibility(it) => PathKind::Vis { has_in_token: it.in_token().is_some() },
- ast::UseTree(_) => PathKind::Use,
- ast::RecordExpr(it) => make_path_kind_expr(it.into()),
- _ => return None,
+ make_path_kind_expr(it.into())
+ },
+ ast::TupleStructPat(it) => {
+ path_ctx.has_call_parens = true;
+ PathKind::Pat { pat_ctx: pattern_context_for(sema, original_file, it.into()) }
+ },
+ ast::RecordPat(it) => {
+ path_ctx.has_call_parens = true;
+ PathKind::Pat { pat_ctx: pattern_context_for(sema, original_file, it.into()) }
+ },
+ ast::PathPat(it) => {
+ PathKind::Pat { pat_ctx: pattern_context_for(sema, original_file, it.into())}
+ },
+ ast::MacroCall(it) => {
+ kind_macro_call(it)?
+ },
+ ast::Meta(meta) => make_path_kind_attr(meta)?,
+ ast::Visibility(it) => PathKind::Vis { has_in_token: it.in_token().is_some() },
+ ast::UseTree(_) => PathKind::Use,
+ ast::RecordExpr(it) => make_path_kind_expr(it.into()),
+ _ => return None,
+ }
}
- }
- },
- ast::RecordExpr(it) => {
- // A record expression in this position is usually a result of parsing recovery, so check that
- if let Some(kind) = inbetween_body_and_decl_check(it.syntax().clone()) {
- return Some(make_res(NameRefKind::Keyword(kind)));
- }
- make_path_kind_expr(it.into())
- },
- _ => return None,
+ },
+ ast::RecordExpr(it) => {
+ // A record expression in this position is usually a result of parsing recovery, so check that
+ if let Some(kind) = inbetween_body_and_decl_check(it.syntax().clone()) {
+ return Some(make_res(NameRefKind::Keyword(kind)));
+ }
+ make_path_kind_expr(it.into())
+ },
+ _ => return None,
+ }
}
};
@@ -1320,9 +1472,7 @@ fn classify_name_ref(
}
})
}
- PathKind::Item { .. } => {
- parent.ancestors().find(|it| ast::MacroCall::can_cast(it.kind()))
- }
+ PathKind::Item { .. } => parent.ancestors().find(|it| it.kind() == SyntaxKind::ERROR),
_ => None,
};
if let Some(top) = top_node {
diff --git a/crates/ide-completion/src/item.rs b/crates/ide-completion/src/item.rs
index 8878fbbea3..9608eed99d 100644
--- a/crates/ide-completion/src/item.rs
+++ b/crates/ide-completion/src/item.rs
@@ -10,7 +10,7 @@ use ide_db::{
};
use itertools::Itertools;
use smallvec::SmallVec;
-use stdx::{impl_from, never};
+use stdx::{format_to, impl_from, never};
use syntax::{format_smolstr, Edition, SmolStr, TextRange, TextSize};
use crate::{
@@ -27,10 +27,7 @@ use crate::{
#[non_exhaustive]
pub struct CompletionItem {
/// Label in the completion pop up which identifies completion.
- pub label: SmolStr,
- /// Additional label details in the completion pop up that are
- /// displayed and aligned on the right side after the label.
- pub label_detail: Option<SmolStr>,
+ pub label: CompletionItemLabel,
/// Range of identifier that is being completed.
///
@@ -89,11 +86,23 @@ pub struct CompletionItem {
pub import_to_add: SmallVec<[(String, String); 1]>,
}
+#[derive(Clone, PartialEq, Eq, PartialOrd, Ord)]
+pub struct CompletionItemLabel {
+ /// The primary label for the completion item.
+ pub primary: SmolStr,
+ /// The left detail for the completion item, usually rendered right next to the primary label.
+ pub detail_left: Option<String>,
+ /// The right detail for the completion item, usually rendered right aligned at the end of the completion item.
+ pub detail_right: Option<String>,
+}
// We use custom debug for CompletionItem to make snapshot tests more readable.
impl fmt::Debug for CompletionItem {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
let mut s = f.debug_struct("CompletionItem");
- s.field("label", &self.label).field("source_range", &self.source_range);
+ s.field("label", &self.label.primary)
+ .field("detail_left", &self.label.detail_left)
+ .field("detail_right", &self.label.detail_right)
+ .field("source_range", &self.source_range);
if self.text_edit.len() == 1 {
let atom = self.text_edit.iter().next().unwrap();
s.field("delete", &atom.delete);
@@ -102,7 +111,7 @@ impl fmt::Debug for CompletionItem {
s.field("text_edit", &self.text_edit);
}
s.field("kind", &self.kind);
- if self.lookup() != self.label {
+ if self.lookup() != self.label.primary {
s.field("lookup", &self.lookup());
}
if let Some(detail) = &self.detail {
@@ -434,7 +443,7 @@ impl CompletionItem {
self.ref_match.map(|(mutability, offset)| {
(
- format!("&{}{}", mutability.as_keyword_for_ref(), self.label),
+ format!("&{}{}", mutability.as_keyword_for_ref(), self.label.primary),
ide_db::text_edit::Indel::insert(
offset,
format!("&{}", mutability.as_keyword_for_ref()),
@@ -488,13 +497,13 @@ impl Builder {
let _p = tracing::info_span!("item::Builder::build").entered();
let label = self.label;
- let mut label_detail = None;
let mut lookup = self.lookup.unwrap_or_else(|| label.clone());
let insert_text = self.insert_text.unwrap_or_else(|| label.to_string());
+ let mut detail_left = None;
if !self.doc_aliases.is_empty() {
let doc_aliases = self.doc_aliases.iter().join(", ");
- label_detail.replace(format_smolstr!(" (alias {doc_aliases})"));
+ detail_left = Some(format!("(alias {doc_aliases})"));
let lookup_doc_aliases = self
.doc_aliases
.iter()
@@ -516,16 +525,20 @@ impl Builder {
}
if let [import_edit] = &*self.imports_to_add {
// snippets can have multiple imports, but normal completions only have up to one
- label_detail.replace(format_smolstr!(
- "{} (use {})",
- label_detail.as_deref().unwrap_or_default(),
+ let detail_left = detail_left.get_or_insert_with(String::new);
+ format_to!(
+ detail_left,
+ "{}(use {})",
+ if detail_left.is_empty() { "" } else { " " },
import_edit.import_path.display(db, self.edition)
- ));
+ );
} else if let Some(trait_name) = self.trait_name {
- label_detail.replace(format_smolstr!(
- "{} (as {trait_name})",
- label_detail.as_deref().unwrap_or_default(),
- ));
+ let detail_left = detail_left.get_or_insert_with(String::new);
+ format_to!(
+ detail_left,
+ "{}(as {trait_name})",
+ if detail_left.is_empty() { "" } else { " " },
+ );
}
let text_edit = match self.text_edit {
@@ -546,8 +559,11 @@ impl Builder {
CompletionItem {
source_range: self.source_range,
- label,
- label_detail,
+ label: CompletionItemLabel {
+ primary: label,
+ detail_left,
+ detail_right: self.detail.clone(),
+ },
text_edit,
is_snippet: self.is_snippet,
detail: self.detail,
diff --git a/crates/ide-completion/src/render.rs b/crates/ide-completion/src/render.rs
index 0f00ad45f9..baa30b2863 100644
--- a/crates/ide-completion/src/render.rs
+++ b/crates/ide-completion/src/render.rs
@@ -748,9 +748,9 @@ mod tests {
let tag = it.kind.tag();
let relevance = display_relevance(it.relevance);
items.push(format!(
- "{tag} {}{} {relevance}\n",
- it.label,
- it.label_detail.clone().unwrap_or_default(),
+ "{tag} {} {} {relevance}\n",
+ it.label.primary,
+ it.label.detail_right.clone().unwrap_or_default(),
));
if let Some((label, _indel, relevance)) = it.ref_match() {
@@ -812,13 +812,13 @@ fn main() {
}
"#,
expect![[r#"
- st dep::test_mod_b::Struct {…} [type_could_unify]
- ex dep::test_mod_b::Struct { } [type_could_unify]
- st Struct (use dep::test_mod_b::Struct) [type_could_unify+requires_import]
- fn main() []
- fn test(…) []
- md dep []
- st Struct (use dep::test_mod_a::Struct) [requires_import]
+ st dep::test_mod_b::Struct {…} dep::test_mod_b::Struct { } [type_could_unify]
+ ex dep::test_mod_b::Struct { } [type_could_unify]
+ st Struct Struct [type_could_unify+requires_import]
+ fn main() fn() []
+ fn test(…) fn(Struct) []
+ md dep []
+ st Struct Struct [requires_import]
"#]],
);
}
@@ -852,11 +852,11 @@ fn main() {
}
"#,
expect![[r#"
- un Union (use dep::test_mod_b::Union) [type_could_unify+requires_import]
- fn main() []
- fn test(…) []
- md dep []
- en Union (use dep::test_mod_a::Union) [requires_import]
+ un Union Union [type_could_unify+requires_import]
+ fn main() fn() []
+ fn test(…) fn(Union) []
+ md dep []
+ en Union Union [requires_import]
"#]],
);
}
@@ -888,13 +888,13 @@ fn main() {
}
"#,
expect![[r#"
- ev dep::test_mod_b::Enum::variant [type_could_unify]
- ex dep::test_mod_b::Enum::variant [type_could_unify]
- en Enum (use dep::test_mod_b::Enum) [type_could_unify+requires_import]
- fn main() []
- fn test(…) []
- md dep []
- en Enum (use dep::test_mod_a::Enum) [requires_import]
+ ev dep::test_mod_b::Enum::variant dep::test_mod_b::Enum::variant [type_could_unify]
+ ex dep::test_mod_b::Enum::variant [type_could_unify]
+ en Enum Enum [type_could_unify+requires_import]
+ fn main() fn() []
+ fn test(…) fn(Enum) []
+ md dep []
+ en Enum Enum [requires_import]
"#]],
);
}
@@ -926,11 +926,11 @@ fn main() {
}
"#,
expect![[r#"
- ev dep::test_mod_b::Enum::Variant [type_could_unify]
- ex dep::test_mod_b::Enum::Variant [type_could_unify]
- fn main() []
- fn test(…) []
- md dep []
+ ev dep::test_mod_b::Enum::Variant dep::test_mod_b::Enum::Variant [type_could_unify]
+ ex dep::test_mod_b::Enum::Variant [type_could_unify]
+ fn main() fn() []
+ fn test(…) fn(Enum) []
+ md dep []
"#]],
);
}
@@ -958,11 +958,11 @@ fn main() {
}
"#,
expect![[r#"
- fn main() []
- fn test(…) []
- md dep []
- fn function (use dep::test_mod_a::function) [requires_import]
- fn function(…) (use dep::test_mod_b::function) [requires_import]
+ fn main() fn() []
+ fn test(…) fn(fn(usize) -> i32) []
+ md dep []
+ fn function fn(usize) -> i32 [requires_import]
+ fn function(…) fn(isize) -> i32 [requires_import]
"#]],
);
}
@@ -990,11 +990,11 @@ fn main() {
}
"#,
expect![[r#"
- ct CONST (use dep::test_mod_b::CONST) [type_could_unify+requires_import]
- fn main() []
- fn test(…) []
- md dep []
- ct CONST (use dep::test_mod_a::CONST) [requires_import]
+ ct CONST i32 [type_could_unify+requires_import]
+ fn main() fn() []
+ fn test(…) fn(i32) []
+ md dep []
+ ct CONST i64 [requires_import]
"#]],
);
}
@@ -1022,11 +1022,11 @@ fn main() {
}
"#,
expect![[r#"
- sc STATIC (use dep::test_mod_b::STATIC) [type_could_unify+requires_import]
- fn main() []
- fn test(…) []
- md dep []
- sc STATIC (use dep::test_mod_a::STATIC) [requires_import]
+ sc STATIC i32 [type_could_unify+requires_import]
+ fn main() fn() []
+ fn test(…) fn(i32) []
+ md dep []
+ sc STATIC i64 [requires_import]
"#]],
);
}
@@ -1058,7 +1058,7 @@ fn main() {
"#,
expect![[r#"
- me Function []
+ me Function fn(&self, i32) -> bool []
"#]],
);
}
@@ -1081,14 +1081,14 @@ fn func(input: Struct) { }
"#,
expect![[r#"
- st Struct [type]
- st Self [type]
- sp Self [type]
- st Struct [type]
- ex Struct [type]
- lc self [local]
- fn func(…) []
- me self.test() []
+ st Struct Struct [type]
+ st Self Self [type]
+ sp Self Struct [type]
+ st Struct Struct [type]
+ ex Struct [type]
+ lc self &Struct [local]
+ fn func(…) fn(Struct) []
+ me self.test() fn(&self) []
"#]],
);
}
@@ -1109,13 +1109,13 @@ fn main() {
}
"#,
expect![[r#"
- lc input [type+name+local]
- ex input [type]
- ex true [type]
- ex false [type]
- lc inputbad [local]
- fn main() []
- fn test(…) []
+ lc input bool [type+name+local]
+ ex input [type]
+ ex true [type]
+ ex false [type]
+ lc inputbad i32 [local]
+ fn main() fn() []
+ fn test(…) fn(bool) []
"#]],
);
}
@@ -1133,6 +1133,10 @@ fn main() { Foo::Fo$0 }
[
CompletionItem {
label: "Foo {…}",
+ detail_left: None,
+ detail_right: Some(
+ "Foo { x: i32, y: i32 }",
+ ),
source_range: 54..56,
delete: 54..56,
insert: "Foo { x: ${1:()}, y: ${2:()} }$0",
@@ -1161,6 +1165,10 @@ fn main() { Foo::Fo$0 }
[
CompletionItem {
label: "Foo(…)",
+ detail_left: None,
+ detail_right: Some(
+ "Foo(i32, i32)",
+ ),
source_range: 46..48,
delete: 46..48,
insert: "Foo(${1:()}, ${2:()})$0",
@@ -1189,6 +1197,10 @@ fn main() { fo$0 }
[
CompletionItem {
label: "foo(…)",
+ detail_left: None,
+ detail_right: Some(
+ "fn(u32, u32, T) -> (u32, T)",
+ ),
source_range: 68..70,
delete: 68..70,
insert: "foo(${1:a}, ${2:b}, ${3:t})$0",
@@ -1201,6 +1213,10 @@ fn main() { fo$0 }
},
CompletionItem {
label: "main()",
+ detail_left: None,
+ detail_right: Some(
+ "fn()",
+ ),
source_range: 68..70,
delete: 68..70,
insert: "main();$0",
@@ -1228,6 +1244,10 @@ fn main() { Foo::Fo$0 }
[
CompletionItem {
label: "Foo",
+ detail_left: None,
+ detail_right: Some(
+ "Foo",
+ ),
source_range: 35..37,
delete: 35..37,
insert: "Foo$0",
@@ -1260,6 +1280,10 @@ fn main() { let _: m::Spam = S$0 }
[
CompletionItem {
label: "main()",
+ detail_left: None,
+ detail_right: Some(
+ "fn()",
+ ),
source_range: 75..76,
delete: 75..76,
insert: "main();$0",
@@ -1271,6 +1295,8 @@ fn main() { let _: m::Spam = S$0 }
},
CompletionItem {
label: "m",
+ detail_left: None,
+ detail_right: None,
source_range: 75..76,
delete: 75..76,
insert: "m",
@@ -1280,6 +1306,10 @@ fn main() { let _: m::Spam = S$0 }
},
CompletionItem {
label: "m::Spam::Bar(…)",
+ detail_left: None,
+ detail_right: Some(
+ "m::Spam::Bar(i32)",
+ ),
source_range: 75..76,
delete: 75..76,
insert: "m::Spam::Bar(${1:()})$0",
@@ -1305,6 +1335,10 @@ fn main() { let _: m::Spam = S$0 }
},
CompletionItem {
label: "m::Spam::Foo",
+ detail_left: None,
+ detail_right: Some(
+ "m::Spam::Foo",
+ ),
source_range: 75..76,
delete: 75..76,
insert: "m::Spam::Foo$0",
@@ -1347,6 +1381,10 @@ fn main() { som$0 }
[
CompletionItem {
label: "main()",
+ detail_left: None,
+ detail_right: Some(
+ "fn()",
+ ),
source_range: 56..59,
delete: 56..59,
insert: "main();$0",
@@ -1358,6 +1396,10 @@ fn main() { som$0 }
},
CompletionItem {
label: "something_deprecated()",
+ detail_left: None,
+ detail_right: Some(
+ "fn()",
+ ),
source_range: 56..59,
delete: 56..59,
insert: "something_deprecated();$0",
@@ -1382,6 +1424,10 @@ fn foo() { A { the$0 } }
[
CompletionItem {
label: "the_field",
+ detail_left: None,
+ detail_right: Some(
+ "u32",
+ ),
source_range: 57..60,
delete: 57..60,
insert: "the_field",
@@ -1429,6 +1475,10 @@ impl S {
[
CompletionItem {
label: "bar()",
+ detail_left: None,
+ detail_right: Some(
+ "fn(self)",
+ ),
source_range: 94..94,
delete: 94..94,
insert: "bar();$0",
@@ -1460,6 +1510,10 @@ impl S {
},
CompletionItem {
label: "foo",
+ detail_left: None,
+ detail_right: Some(
+ "{unknown}",
+ ),
source_range: 94..94,
delete: 94..94,
insert: "foo",
@@ -1498,6 +1552,8 @@ use self::E::*;
[
CompletionItem {
label: "my",
+ detail_left: None,
+ detail_right: None,
source_range: 10..12,
delete: 10..12,
insert: "my",
@@ -1510,6 +1566,10 @@ use self::E::*;
},
CompletionItem {
label: "V",
+ detail_left: None,
+ detail_right: Some(
+ "V",
+ ),
source_range: 10..12,
delete: 10..12,
insert: "V$0",
@@ -1524,6 +1584,10 @@ use self::E::*;
},
CompletionItem {
label: "E",
+ detail_left: None,
+ detail_right: Some(
+ "E",
+ ),
source_range: 10..12,
delete: 10..12,
insert: "E",
@@ -1556,6 +1620,10 @@ fn foo(s: S) { s.$0 }
[
CompletionItem {
label: "the_method()",
+ detail_left: None,
+ detail_right: Some(
+ "fn(&self)",
+ ),
source_range: 81..81,
delete: 81..81,
insert: "the_method();$0",
@@ -1729,9 +1797,9 @@ fn test(bar: u32) { }
fn foo(s: S) { test(s.$0) }
"#,
expect![[r#"
- fd bar [type+name]
- fd baz [type]
- fd foo []
+ fd bar u32 [type+name]
+ fd baz u32 [type]
+ fd foo i64 []
"#]],
);
}
@@ -1745,9 +1813,9 @@ struct B { x: (), y: f32, bar: u32 }
fn foo(a: A) { B { bar: a.$0 }; }
"#,
expect![[r#"
- fd bar [type+name]
- fd baz [type]
- fd foo []
+ fd bar u32 [type+name]
+ fd baz u32 [type]
+ fd foo i64 []
"#]],
)
}
@@ -1768,6 +1836,10 @@ fn f() -> i32 {
[
CompletionItem {
label: "0",
+ detail_left: None,
+ detail_right: Some(
+ "i32",
+ ),
source_range: 56..57,
delete: 56..57,
insert: "0",
@@ -1804,9 +1876,9 @@ fn f(foo: i64) { }
fn foo(a: A) { B { bar: f(a.$0) }; }
"#,
expect![[r#"
- fd foo [type+name]
- fd bar []
- fd baz []
+ fd foo i64 [type+name]
+ fd bar u32 []
+ fd baz u32 []
"#]],
);
check_relevance(
@@ -1817,9 +1889,9 @@ fn f(foo: i64) { }
fn foo(a: A) { f(B { bar: a.$0 }); }
"#,
expect![[r#"
- fd bar [type+name]
- fd baz [type]
- fd foo []
+ fd bar u32 [type+name]
+ fd baz u32 [type]
+ fd foo i64 []
"#]],
);
}
@@ -1832,13 +1904,13 @@ struct WorldSnapshot { _f: () };
fn go(world: &WorldSnapshot) { go(w$0) }
"#,
expect![[r#"
- lc world [type+name+local]
- ex world [type]
- st WorldSnapshot {…} []
+ lc world &WorldSnapshot [type+name+local]
+ ex world [type]
+ st WorldSnapshot {…} WorldSnapshot { _f: () } []
st &WorldSnapshot {…} [type]
- st WorldSnapshot []
+ st WorldSnapshot WorldSnapshot []
st &WorldSnapshot [type]
- fn go(…) []
+ fn go(…) fn(&WorldSnapshot) []
"#]],
);
}
@@ -1852,9 +1924,9 @@ struct Foo;
fn f(foo: &Foo) { f(foo, w$0) }
"#,
expect![[r#"
- lc foo [local]
- st Foo []
- fn f(…) []
+ lc foo &Foo [local]
+ st Foo Foo []
+ fn f(…) fn(&Foo) []
"#]],
);
}
@@ -1869,12 +1941,12 @@ fn bar() -> u8 { 0 }
fn f() { A { bar: b$0 }; }
"#,
expect![[r#"
- fn bar() [type+name]
- fn baz() [type]
- ex bar() [type]
- ex baz() [type]
- st A []
- fn f() []
+ fn bar() fn() -> u8 [type+name]
+ fn baz() fn() -> u8 [type]
+ ex bar() [type]
+ ex baz() [type]
+ st A A []
+ fn f() fn() []
"#]],
);
}
@@ -1895,9 +1967,9 @@ fn f() {
}
"#,
expect![[r#"
- me aaa() [type+name]
- me bbb() [type]
- me ccc() []
+ me aaa() fn(&self) -> u32 [type+name]
+ me bbb() fn(&self) -> u32 [type]
+ me ccc() fn(&self) -> u64 []
"#]],
);
}
@@ -1916,7 +1988,7 @@ fn f() {
}
"#,
expect![[r#"
- me aaa() [name]
+ me aaa() fn(&self) -> u64 [name]
"#]],
);
}
@@ -1934,14 +2006,14 @@ fn main() {
}
"#,
expect![[r#"
- lc s [name+local]
+ lc s S [name+local]
lc &mut s [type+name+local]
- st S []
+ st S S []
st &mut S [type]
- st S []
+ st S S []
st &mut S [type]
- fn foo(…) []
- fn main() []
+ fn foo(…) fn(&mut S) []
+ fn main() fn() []
"#]],
);
check_relevance(
@@ -1954,13 +2026,13 @@ fn main() {
}
"#,
expect![[r#"
- lc s [type+name+local]
- st S [type]
- st S [type]
- ex s [type]
- ex S [type]
- fn foo(…) []
- fn main() []
+ lc s S [type+name+local]
+ st S S [type]
+ st S S [type]
+ ex s [type]
+ ex S [type]
+ fn foo(…) fn(&mut S) []
+ fn main() fn() []
"#]],
);
check_relevance(
@@ -1973,13 +2045,13 @@ fn main() {
}
"#,
expect![[r#"
- lc ssss [type+local]
- st S [type]
- st S [type]
- ex ssss [type]
- ex S [type]
- fn foo(…) []
- fn main() []
+ lc ssss S [type+local]
+ st S S [type]
+ st S S [type]
+ ex ssss [type]
+ ex S [type]
+ fn foo(…) fn(&mut S) []
+ fn main() fn() []
"#]],
);
}
@@ -2010,19 +2082,19 @@ fn main() {
}
"#,
expect![[r#"
- ex core::ops::Deref::deref(&t) (use core::ops::Deref) [type_could_unify]
- lc m [local]
- lc t [local]
+ ex core::ops::Deref::deref(&t) [type_could_unify]
+ lc m i32 [local]
+ lc t T [local]
lc &t [type+local]
- st S []
+ st S S []
st &S [type]
- st S []
+ st S S []
st &S [type]
- st T []
+ st T T []
st &T [type]
- fn foo(…) []
- fn main() []
- md core []
+ fn foo(…) fn(&S) []
+ fn main() fn() []
+ md core []
"#]],
)
}
@@ -2059,19 +2131,19 @@ fn main() {
}
"#,
expect![[r#"
- ex core::ops::DerefMut::deref_mut(&mut t) (use core::ops::DerefMut) [type_could_unify]
- lc m [local]
- lc t [local]
+ ex core::ops::DerefMut::deref_mut(&mut t) [type_could_unify]
+ lc m i32 [local]
+ lc t T [local]
lc &mut t [type+local]
- st S []
+ st S S []
st &mut S [type]
- st S []
+ st S S []
st &mut S [type]
- st T []
+ st T T []
st &mut T [type]
- fn foo(…) []
- fn main() []
- md core []
+ fn foo(…) fn(&mut S) []
+ fn main() fn() []
+ md core []
"#]],
)
}
@@ -2087,9 +2159,9 @@ fn foo(bar: u32) {
}
"#,
expect![[r#"
- lc baz [local]
- lc bar [local]
- fn foo(…) []
+ lc baz i32 [local]
+ lc bar u32 [local]
+ fn foo(…) fn(u32) []
"#]],
);
}
@@ -2105,13 +2177,13 @@ fn foo() {
fn bar(t: Foo) {}
"#,
expect![[r#"
- ev Foo::A [type]
- ev Foo::B [type]
- en Foo [type]
- ex Foo::A [type]
- ex Foo::B [type]
- fn bar(…) []
- fn foo() []
+ ev Foo::A Foo::A [type]
+ ev Foo::B Foo::B [type]
+ en Foo Foo [type]
+ ex Foo::A [type]
+ ex Foo::B [type]
+ fn bar(…) fn(Foo) []
+ fn foo() fn() []
"#]],
);
}
@@ -2127,14 +2199,14 @@ fn foo() {
fn bar(t: &Foo) {}
"#,
expect![[r#"
- ev Foo::A []
+ ev Foo::A Foo::A []
ev &Foo::A [type]
- ev Foo::B []
+ ev Foo::B Foo::B []
ev &Foo::B [type]
- en Foo []
+ en Foo Foo []
en &Foo [type]
- fn bar(…) []
- fn foo() []
+ fn bar(…) fn(&Foo) []
+ fn foo() fn() []
"#]],
);
}
@@ -2163,18 +2235,18 @@ fn main() {
}
"#,
expect![[r#"
- ex core::ops::Deref::deref(&bar()) (use core::ops::Deref) [type_could_unify]
- st S []
+ ex core::ops::Deref::deref(&bar()) [type_could_unify]
+ st S S []
st &S [type]
- st S []
+ st S S []
st &S [type]
- st T []
+ st T T []
st &T [type]
- fn bar() []
+ fn bar() fn() -> T []
fn &bar() [type]
- fn foo(…) []
- fn main() []
- md core []
+ fn foo(…) fn(&S) []
+ fn main() fn() []
+ md core []
"#]],
)
}
@@ -2191,7 +2263,7 @@ impl Sub for u32 {}
fn foo(a: u32) { a.$0 }
"#,
expect![[r#"
- me sub(…) (as Sub) [op_method]
+ me sub(…) fn(self, Self) -> Self [op_method]
"#]],
);
check_relevance(
@@ -2212,9 +2284,9 @@ fn main() {
}
"#,
expect![[r#"
- fn new() []
- me eq(…) (as PartialEq) [op_method]
- me ne(…) (as PartialEq) [op_method]
+ fn new() fn() -> Foo []
+ me eq(…) fn(&self, &Rhs) -> bool [op_method]
+ me ne(…) fn(&self, &Rhs) -> bool [op_method]
"#]],
);
}
@@ -2238,9 +2310,9 @@ fn test() {
}
"#,
expect![[r#"
- fn fn_ctr() [type_could_unify]
- fn fn_ctr_self() [type_could_unify]
- fn fn_another(…) [type_could_unify]
+ fn fn_ctr() fn() -> Foo [type_could_unify]
+ fn fn_ctr_self() fn() -> Option<Foo> [type_could_unify]
+ fn fn_another(…) fn(u32) -> Other [type_could_unify]
"#]],
);
}
@@ -2384,12 +2456,12 @@ fn test() {
// Constructor
// Others
expect![[r#"
- fn fn_direct_ctr() [type_could_unify]
- fn fn_ctr_with_args(…) [type_could_unify]
- fn fn_builder() [type_could_unify]
- fn fn_ctr() [type_could_unify]
- me fn_no_ret(…) [type_could_unify]
- fn fn_other() [type_could_unify]
+ fn fn_direct_ctr() fn() -> Foo [type_could_unify]
+ fn fn_ctr_with_args(…) fn(u32) -> Foo [type_could_unify]
+ fn fn_builder() fn() -> FooBuilder [type_could_unify]
+ fn fn_ctr() fn() -> Result<Foo> [type_could_unify]
+ me fn_no_ret(…) fn(&self) [type_could_unify]
+ fn fn_other() fn() -> Result<u32> [type_could_unify]
"#]],
);
@@ -2420,14 +2492,14 @@ fn test() {
}
"#,
expect![[r#"
- fn fn_direct_ctr() [type_could_unify]
- fn fn_ctr_with_args(…) [type_could_unify]
- fn fn_builder() [type_could_unify]
- fn fn_ctr_wrapped() [type_could_unify]
- fn fn_ctr_wrapped_2() [type_could_unify]
- me fn_returns_unit(…) [type_could_unify]
- fn fn_other() [type_could_unify]
- "#]],
+ fn fn_direct_ctr() fn() -> Foo<T> [type_could_unify]
+ fn fn_ctr_with_args(…) fn(T) -> Foo<T> [type_could_unify]
+ fn fn_builder() fn() -> FooBuilder [type_could_unify]
+ fn fn_ctr_wrapped() fn() -> Option<Foo<T>> [type_could_unify]
+ fn fn_ctr_wrapped_2() fn() -> Result<Foo<T>, u32> [type_could_unify]
+ me fn_returns_unit(…) fn(&self) [type_could_unify]
+ fn fn_other() fn() -> Option<u32> [type_could_unify]
+ "#]],
);
}
@@ -2456,13 +2528,13 @@ fn test() {
}
"#,
expect![[r#"
- fn fn_direct_ctr() [type_could_unify]
- fn fn_ctr_with_args(…) [type_could_unify]
- fn fn_builder() [type_could_unify]
- fn fn_ctr() [type_could_unify]
- fn fn_ctr2() [type_could_unify]
- me fn_no_ret(…) [type_could_unify]
- fn fn_other() [type_could_unify]
+ fn fn_direct_ctr() fn() -> Foo<T> [type_could_unify]
+ fn fn_ctr_with_args(…) fn(T) -> Foo<T> [type_could_unify]
+ fn fn_builder() fn() -> FooBuilder [type_could_unify]
+ fn fn_ctr() fn() -> Option<Foo<T>> [type_could_unify]
+ fn fn_ctr2() fn() -> Result<Foo<T>, u32> [type_could_unify]
+ me fn_no_ret(…) fn(&self) [type_could_unify]
+ fn fn_other() fn() -> Option<u32> [type_could_unify]
"#]],
);
}
@@ -2484,6 +2556,10 @@ fn foo(f: Foo) { let _: &u32 = f.b$0 }
[
CompletionItem {
label: "baz()",
+ detail_left: None,
+ detail_right: Some(
+ "fn(&self) -> u32",
+ ),
source_range: 109..110,
delete: 109..110,
insert: "baz()$0",
@@ -2513,6 +2589,10 @@ fn foo(f: Foo) { let _: &u32 = f.b$0 }
},
CompletionItem {
label: "bar",
+ detail_left: None,
+ detail_right: Some(
+ "u32",
+ ),
source_range: 109..110,
delete: 109..110,
insert: "bar",
@@ -2524,6 +2604,10 @@ fn foo(f: Foo) { let _: &u32 = f.b$0 }
},
CompletionItem {
label: "qux",
+ detail_left: None,
+ detail_right: Some(
+ "fn()",
+ ),
source_range: 109..110,
text_edit: TextEdit {
indels: [
@@ -2562,6 +2646,10 @@ fn foo() {
[
CompletionItem {
label: "field",
+ detail_left: None,
+ detail_right: Some(
+ "fn()",
+ ),
source_range: 76..78,
delete: 76..78,
insert: "field",
@@ -2610,6 +2698,10 @@ fn main() {
[
CompletionItem {
label: "foo()",
+ detail_left: None,
+ detail_right: Some(
+ "fn() -> S",
+ ),
source_range: 95..95,
delete: 95..95,
insert: "foo()$0",
@@ -2661,15 +2753,15 @@ fn foo() {
}
"#,
expect![[r#"
- lc foo [type+local]
- ex foo [type]
- ex Foo::B [type]
- ev Foo::A(…) [type_could_unify]
- ev Foo::B [type_could_unify]
- en Foo [type_could_unify]
- fn foo() []
- fn bar() []
- fn baz() []
+ lc foo Foo<u32> [type+local]
+ ex foo [type]
+ ex Foo::B [type]
+ ev Foo::A(…) Foo::A(T) [type_could_unify]
+ ev Foo::B Foo::B [type_could_unify]
+ en Foo Foo<{unknown}> [type_could_unify]
+ fn foo() fn() []
+ fn bar() fn() -> Foo<u8> []
+ fn baz() fn() -> Foo<T> []
"#]],
);
}
@@ -2697,20 +2789,20 @@ fn main() {
"#,
&[CompletionItemKind::Snippet, CompletionItemKind::SymbolKind(SymbolKind::Method)],
expect![[r#"
- sn not [snippet]
- me not() (use ops::Not) [type_could_unify+requires_import]
- sn if []
- sn while []
- sn ref []
- sn refm []
- sn deref []
- sn unsafe []
- sn match []
- sn box []
- sn dbg []
- sn dbgr []
- sn call []
- sn return []
+ sn not !expr [snippet]
+ me not() fn(self) -> <Self as Not>::Output [type_could_unify+requires_import]
+ sn if if expr {} []
+ sn while while expr {} []
+ sn ref &expr []
+ sn refm &mut expr []
+ sn deref *expr []
+ sn unsafe unsafe {} []
+ sn match match expr {} []
+ sn box Box::new(expr) []
+ sn dbg dbg!(expr) []
+ sn dbgr dbg!(&expr) []
+ sn call function(expr) []
+ sn return return expr []
"#]],
);
}
@@ -2730,19 +2822,19 @@ fn main() {
"#,
&[CompletionItemKind::Snippet, CompletionItemKind::SymbolKind(SymbolKind::Method)],
expect![[r#"
- me f() []
- sn ref []
- sn refm []
- sn deref []
- sn unsafe []
- sn match []
- sn box []
- sn dbg []
- sn dbgr []
- sn call []
- sn let []
- sn letm []
- sn return []
+ me f() fn(&self) []
+ sn ref &expr []
+ sn refm &mut expr []
+ sn deref *expr []
+ sn unsafe unsafe {} []
+ sn match match expr {} []
+ sn box Box::new(expr) []
+ sn dbg dbg!(expr) []
+ sn dbgr dbg!(&expr) []
+ sn call function(expr) []
+ sn let let []
+ sn letm let mut []
+ sn return return expr []
"#]],
);
}
@@ -2765,12 +2857,12 @@ fn f() {
}
"#,
expect![[r#"
- st Buffer []
- fn f() []
- md std []
- tt BufRead (use std::io::BufRead) [requires_import]
- st BufReader (use std::io::BufReader) [requires_import]
- st BufWriter (use std::io::BufWriter) [requires_import]
+ st Buffer Buffer []
+ fn f() fn() []
+ md std []
+ tt BufRead [requires_import]
+ st BufReader BufReader [requires_import]
+ st BufWriter BufWriter [requires_import]
"#]],
);
}
@@ -2979,6 +3071,12 @@ fn main() {
[
CompletionItem {
label: "flush()",
+ detail_left: Some(
+ "(as Write)",
+ ),
+ detail_right: Some(
+ "fn(&self)",
+ ),
source_range: 193..193,
delete: 193..193,
insert: "flush();$0",
@@ -3006,6 +3104,12 @@ fn main() {
},
CompletionItem {
label: "write()",
+ detail_left: Some(
+ "(as Write)",
+ ),
+ detail_right: Some(
+ "fn(&self)",
+ ),
source_range: 193..193,
delete: 193..193,
insert: "write();$0",
diff --git a/crates/ide-completion/src/tests.rs b/crates/ide-completion/src/tests.rs
index f371012de3..e01097a910 100644
--- a/crates/ide-completion/src/tests.rs
+++ b/crates/ide-completion/src/tests.rs
@@ -118,10 +118,16 @@ fn completion_list_with_config_raw(
let items = get_all_items(config, ra_fixture, trigger_character);
items
.into_iter()
- .filter(|it| it.kind != CompletionItemKind::BuiltinType || it.label == "u32")
+ .filter(|it| it.kind != CompletionItemKind::BuiltinType || it.label.primary == "u32")
.filter(|it| include_keywords || it.kind != CompletionItemKind::Keyword)
.filter(|it| include_keywords || it.kind != CompletionItemKind::Snippet)
- .sorted_by_key(|it| (it.kind, it.label.clone(), it.detail.as_ref().map(ToOwned::to_owned)))
+ .sorted_by_key(|it| {
+ (
+ it.kind,
+ it.label.primary.clone(),
+ it.label.detail_left.as_ref().map(ToOwned::to_owned),
+ )
+ })
.collect()
}
@@ -173,27 +179,30 @@ fn render_completion_list(completions: Vec<CompletionItem>) -> String {
let label_width = completions
.iter()
.map(|it| {
- monospace_width(&it.label)
- + monospace_width(it.label_detail.as_deref().unwrap_or_default())
+ monospace_width(&it.label.primary)
+ + monospace_width(it.label.detail_left.as_deref().unwrap_or_default())
+ + monospace_width(it.label.detail_right.as_deref().unwrap_or_default())
+ + it.label.detail_left.is_some() as usize
+ + it.label.detail_right.is_some() as usize
})
.max()
- .unwrap_or_default()
- .min(22);
+ .unwrap_or_default();
completions
.into_iter()
.map(|it| {
let tag = it.kind.tag();
- let var_name = format!("{tag} {}", it.label);
- let mut buf = var_name;
- if let Some(ref label_detail) = it.label_detail {
- format_to!(buf, "{label_detail}");
+ let mut buf = format!("{tag} {}", it.label.primary);
+ if let Some(label_detail) = &it.label.detail_left {
+ format_to!(buf, " {label_detail}");
}
- if let Some(detail) = it.detail {
- let width = label_width.saturating_sub(
- monospace_width(&it.label)
- + monospace_width(&it.label_detail.unwrap_or_default()),
+ if let Some(detail_right) = it.label.detail_right {
+ let pad_with = label_width.saturating_sub(
+ monospace_width(&it.label.primary)
+ + monospace_width(it.label.detail_left.as_deref().unwrap_or_default())
+ + monospace_width(&detail_right)
+ + it.label.detail_left.is_some() as usize,
);
- format_to!(buf, "{:width$} {}", "", detail, width = width);
+ format_to!(buf, "{:pad_with$}{detail_right}", "",);
}
if it.deprecated {
format_to!(buf, " DEPRECATED");
diff --git a/crates/ide-completion/src/tests/attribute.rs b/crates/ide-completion/src/tests/attribute.rs
index 1443ebc6c0..acafa6518f 100644
--- a/crates/ide-completion/src/tests/attribute.rs
+++ b/crates/ide-completion/src/tests/attribute.rs
@@ -33,7 +33,7 @@ pub struct Foo(#[m$0] i32);
at cold
at deny(…)
at deprecated
- at derive macro derive
+ at derive macro derive
at derive(…)
at doc = "…"
at doc(alias = "…")
@@ -367,9 +367,9 @@ struct Foo;
at cfg_attr(…)
at deny(…)
at deprecated
- at derive macro derive
+ at derive macro derive
at derive(…)
- at derive_const macro derive_const
+ at derive_const macro derive_const
at doc = "…"
at doc(alias = "…")
at doc(hidden)
@@ -790,10 +790,10 @@ mod derive {
#[derive($0)] struct Test;
"#,
expect![[r#"
- de Clone macro Clone
+ de Clone macro Clone
de Clone, Copy
- de Default macro Default
- de PartialEq macro PartialEq
+ de Default macro Default
+ de PartialEq macro PartialEq
de PartialEq, Eq
de PartialEq, Eq, PartialOrd, Ord
de PartialEq, PartialOrd
@@ -812,9 +812,9 @@ mod derive {
#[derive(serde::Serialize, PartialEq, $0)] struct Test;
"#,
expect![[r#"
- de Clone macro Clone
+ de Clone macro Clone
de Clone, Copy
- de Default macro Default
+ de Default macro Default
de Eq
de Eq, PartialOrd, Ord
de PartialOrd
@@ -833,9 +833,9 @@ mod derive {
#[derive($0 serde::Serialize, PartialEq)] struct Test;
"#,
expect![[r#"
- de Clone macro Clone
+ de Clone macro Clone
de Clone, Copy
- de Default macro Default
+ de Default macro Default
de Eq
de Eq, PartialOrd, Ord
de PartialOrd
@@ -854,9 +854,9 @@ mod derive {
#[derive(PartialEq, Eq, Or$0)] struct Test;
"#,
expect![[r#"
- de Clone macro Clone
+ de Clone macro Clone
de Clone, Copy
- de Default macro Default
+ de Default macro Default
de PartialOrd
de PartialOrd, Ord
md core
diff --git a/crates/ide-completion/src/tests/expression.rs b/crates/ide-completion/src/tests/expression.rs
index 545c2a2a8a..ea1b7ad787 100644
--- a/crates/ide-completion/src/tests/expression.rs
+++ b/crates/ide-completion/src/tests/expression.rs
@@ -26,22 +26,22 @@ fn baz() {
"#,
// This should not contain `FooDesc {…}`.
expect![[r#"
- ct CONST Unit
- en Enum Enum
- fn baz() fn()
- fn create_foo(…) fn(&FooDesc)
- fn function() fn()
- ma makro!(…) macro_rules! makro
+ ct CONST Unit
+ en Enum Enum
+ fn baz() fn()
+ fn create_foo(…) fn(&FooDesc)
+ fn function() fn()
+ ma makro!(…) macro_rules! makro
md _69latrick
md module
- sc STATIC Unit
- st FooDesc FooDesc
- st Record Record
- st Tuple Tuple
- st Unit Unit
- un Union Union
- ev TupleV(…) TupleV(u32)
- bt u32 u32
+ sc STATIC Unit
+ st FooDesc FooDesc
+ st Record Record
+ st Tuple Tuple
+ st Unit Unit
+ un Union Union
+ ev TupleV(…) TupleV(u32)
+ bt u32 u32
kw crate::
kw false
kw for
@@ -76,14 +76,14 @@ fn func(param0 @ (param1, param2): (i32, i32)) {
}
"#,
expect![[r#"
- fn func(…) fn((i32, i32))
- lc ifletlocal i32
- lc letlocal i32
- lc matcharm i32
- lc param0 (i32, i32)
- lc param1 i32
- lc param2 i32
- bt u32 u32
+ fn func(…) fn((i32, i32))
+ lc ifletlocal i32
+ lc letlocal i32
+ lc matcharm i32
+ lc param0 (i32, i32)
+ lc param1 i32
+ lc param2 i32
+ bt u32 u32
kw crate::
kw false
kw for
@@ -122,25 +122,25 @@ impl Unit {
"#,
// `self` is in here twice, once as the module, once as the local
expect![[r#"
- ct CONST Unit
+ ct CONST Unit
cp CONST_PARAM
- en Enum Enum
- fn function() fn()
- fn local_func() fn()
- me self.foo() fn(self)
- lc self Unit
- ma makro!(…) macro_rules! makro
+ en Enum Enum
+ fn function() fn()
+ fn local_func() fn()
+ me self.foo() fn(self)
+ lc self Unit
+ ma makro!(…) macro_rules! makro
md module
md qualified
- sp Self Unit
- sc STATIC Unit
- st Record Record
- st Tuple Tuple
- st Unit Unit
+ sp Self Unit
+ sc STATIC Unit
+ st Record Record
+ st Tuple Tuple
+ st Unit Unit
tp TypeParam
- un Union Union
- ev TupleV(…) TupleV(u32)
- bt u32 u32
+ un Union Union
+ ev TupleV(…) TupleV(u32)
+ bt u32 u32
kw async
kw const
kw crate::
@@ -187,19 +187,19 @@ impl Unit {
}
"#,
expect![[r#"
- ct CONST Unit
- en Enum Enum
- fn function() fn()
- ma makro!(…) macro_rules! makro
+ ct CONST Unit
+ en Enum Enum
+ fn function() fn()
+ ma makro!(…) macro_rules! makro
md module
md qualified
- sc STATIC Unit
- st Record Record
- st Tuple Tuple
- st Unit Unit
+ sc STATIC Unit
+ st Record Record
+ st Tuple Tuple
+ st Unit Unit
tt Trait
- un Union Union
- ev TupleV(…) TupleV(u32)
+ un Union Union
+ ev TupleV(…) TupleV(u32)
?? Unresolved
"#]],
);
@@ -216,8 +216,8 @@ fn complete_in_block() {
}
"#,
expect![[r#"
- fn foo() fn()
- bt u32 u32
+ fn foo() fn()
+ bt u32 u32
kw async
kw const
kw crate::
@@ -264,8 +264,8 @@ fn complete_after_if_expr() {
}
"#,
expect![[r#"
- fn foo() fn()
- bt u32 u32
+ fn foo() fn()
+ bt u32 u32
kw async
kw const
kw crate::
@@ -313,8 +313,8 @@ fn complete_in_match_arm() {
}
"#,
expect![[r#"
- fn foo() fn()
- bt u32 u32
+ fn foo() fn()
+ bt u32 u32
kw crate::
kw false
kw for
@@ -337,8 +337,8 @@ fn completes_in_loop_ctx() {
check_empty(
r"fn my() { loop { $0 } }",
expect![[r#"
- fn my() fn()
- bt u32 u32
+ fn my() fn()
+ bt u32 u32
kw async
kw break
kw const
@@ -376,22 +376,22 @@ fn completes_in_loop_ctx() {
check_empty(
r"fn my() { loop { foo.$0 } }",
expect![[r#"
- sn box Box::new(expr)
- sn break break expr
- sn call function(expr)
- sn dbg dbg!(expr)
- sn dbgr dbg!(&expr)
- sn deref *expr
- sn if if expr {}
- sn let let
- sn letm let mut
- sn match match expr {}
- sn not !expr
- sn ref &expr
- sn refm &mut expr
- sn return return expr
- sn unsafe unsafe {}
- sn while while expr {}
+ sn box Box::new(expr)
+ sn break break expr
+ sn call function(expr)
+ sn dbg dbg!(expr)
+ sn dbgr dbg!(&expr)
+ sn deref *expr
+ sn if if expr {}
+ sn let let
+ sn letm let mut
+ sn match match expr {}
+ sn not !expr
+ sn ref &expr
+ sn refm &mut expr
+ sn return return expr
+ sn unsafe unsafe {}
+ sn while while expr {}
"#]],
);
}
@@ -401,8 +401,8 @@ fn completes_in_let_initializer() {
check_empty(
r#"fn main() { let _ = $0 }"#,
expect![[r#"
- fn main() fn()
- bt u32 u32
+ fn main() fn()
+ bt u32 u32
kw crate::
kw false
kw for
@@ -434,9 +434,9 @@ fn foo() {
}
"#,
expect![[r#"
- fn foo() fn()
- st Foo Foo
- bt u32 u32
+ fn foo() fn()
+ st Foo Foo
+ bt u32 u32
kw crate::
kw false
kw for
@@ -469,9 +469,9 @@ fn foo() {
}
"#,
expect![[r#"
- fn foo() fn()
- lc bar i32
- bt u32 u32
+ fn foo() fn()
+ lc bar i32
+ bt u32 u32
kw crate::
kw false
kw for
@@ -499,10 +499,10 @@ fn quux(x: i32) {
}
"#,
expect![[r#"
- fn quux(…) fn(i32)
- lc x i32
- ma m!(…) macro_rules! m
- bt u32 u32
+ fn quux(…) fn(i32)
+ lc x i32
+ ma m!(…) macro_rules! m
+ bt u32 u32
kw crate::
kw false
kw for
@@ -526,10 +526,10 @@ fn quux(x: i32) {
}
",
expect![[r#"
- fn quux(…) fn(i32)
- lc x i32
- ma m!(…) macro_rules! m
- bt u32 u32
+ fn quux(…) fn(i32)
+ lc x i32
+ ma m!(…) macro_rules! m
+ bt u32 u32
kw crate::
kw false
kw for
@@ -554,11 +554,11 @@ fn quux(x: i32) {
}
"#,
expect![[r#"
- fn quux(…) fn(i32)
- lc x i32
- lc y i32
- ma m!(…) macro_rules! m
- bt u32 u32
+ fn quux(…) fn(i32)
+ lc x i32
+ lc y i32
+ ma m!(…) macro_rules! m
+ bt u32 u32
kw crate::
kw false
kw for
@@ -590,12 +590,12 @@ fn func() {
}
"#,
expect![[r#"
- ct ASSOC_CONST const ASSOC_CONST: ()
- fn assoc_fn() fn()
- ta AssocType type AssocType = ()
+ ct ASSOC_CONST const ASSOC_CONST: ()
+ fn assoc_fn() fn()
+ ta AssocType type AssocType = ()
ev RecordV {…} RecordV { field: u32 }
- ev TupleV(…) TupleV(u32)
- ev UnitV UnitV
+ ev TupleV(…) TupleV(u32)
+ ev UnitV UnitV
"#]],
);
}
@@ -633,7 +633,7 @@ fn func() {
"#,
expect![[r#"
fn variant fn() -> Enum
- ev Variant Variant
+ ev Variant Variant
"#]],
);
}
@@ -650,8 +650,8 @@ fn main() {
}
",
expect![[r#"
- fn foo() fn() -> impl Trait<U>
- fn main() fn()
+ fn foo() fn() -> impl Trait<U>
+ fn main() fn()
tt Trait
"#]],
);
@@ -670,9 +670,9 @@ fn main() {
}
"#,
expect![[r#"
- fn bar() async fn() -> impl Trait<U>
- fn foo() async fn() -> u8
- fn main() fn()
+ fn bar() async fn() -> impl Trait<U>
+ fn foo() async fn() -> u8
+ fn main() fn()
tt Trait
"#]],
);
@@ -692,9 +692,9 @@ fn main() {
Foo::$0
}
",
- expect![[r"
+ expect![[r#"
fn bar(…) fn(impl Trait<U>)
- "]],
+ "#]],
);
}
@@ -712,7 +712,7 @@ fn main() {
}
"#,
expect![[r#"
- fn test() fn() -> Zulu
+ fn test() fn() -> Zulu
ex Zulu
ex Zulu::test()
"#]],
@@ -736,11 +736,11 @@ fn brr() {
}
"#,
expect![[r#"
- en HH HH
- fn brr() fn()
- st YoloVariant YoloVariant
+ en HH HH
+ fn brr() fn()
+ st YoloVariant YoloVariant
st YoloVariant {…} YoloVariant { f: usize }
- bt u32 u32
+ bt u32 u32
kw crate::
kw false
kw for
@@ -801,8 +801,8 @@ fn else_completion_after_if() {
fn foo() { if foo {} $0 }
"#,
expect![[r#"
- fn foo() fn()
- bt u32 u32
+ fn foo() fn()
+ bt u32 u32
kw async
kw const
kw crate::
@@ -842,8 +842,8 @@ fn foo() { if foo {} $0 }
fn foo() { if foo {} el$0 }
"#,
expect![[r#"
- fn foo() fn()
- bt u32 u32
+ fn foo() fn()
+ bt u32 u32
kw async
kw const
kw crate::
@@ -883,8 +883,8 @@ fn foo() { if foo {} el$0 }
fn foo() { bar(if foo {} $0) }
"#,
expect![[r#"
- fn foo() fn()
- bt u32 u32
+ fn foo() fn()
+ bt u32 u32
kw crate::
kw else
kw else if
@@ -907,8 +907,8 @@ fn foo() { bar(if foo {} $0) }
fn foo() { bar(if foo {} el$0) }
"#,
expect![[r#"
- fn foo() fn()
- bt u32 u32
+ fn foo() fn()
+ bt u32 u32
kw crate::
kw else
kw else if
@@ -931,8 +931,8 @@ fn foo() { bar(if foo {} el$0) }
fn foo() { if foo {} $0 let x = 92; }
"#,
expect![[r#"
- fn foo() fn()
- bt u32 u32
+ fn foo() fn()
+ bt u32 u32
kw async
kw const
kw crate::
@@ -972,8 +972,8 @@ fn foo() { if foo {} $0 let x = 92; }
fn foo() { if foo {} el$0 let x = 92; }
"#,
expect![[r#"
- fn foo() fn()
- bt u32 u32
+ fn foo() fn()
+ bt u32 u32
kw async
kw const
kw crate::
@@ -1013,8 +1013,8 @@ fn foo() { if foo {} el$0 let x = 92; }
fn foo() { if foo {} el$0 { let x = 92; } }
"#,
expect![[r#"
- fn foo() fn()
- bt u32 u32
+ fn foo() fn()
+ bt u32 u32
kw async
kw const
kw crate::
@@ -1065,9 +1065,9 @@ fn main() {
pub struct UnstableThisShouldNotBeListed;
"#,
expect![[r#"
- fn main() fn()
+ fn main() fn()
md std
- bt u32 u32
+ bt u32 u32
kw async
kw const
kw crate::
@@ -1117,10 +1117,10 @@ fn main() {
pub struct UnstableButWeAreOnNightlyAnyway;
"#,
expect![[r#"
- fn main() fn()
+ fn main() fn()
md std
st UnstableButWeAreOnNightlyAnyway UnstableButWeAreOnNightlyAnyway
- bt u32 u32
+ bt u32 u32
kw async
kw const
kw crate::
@@ -1170,17 +1170,17 @@ fn main() {
}
"#,
expect![[r#"
- me foo() fn(&self)
- sn box Box::new(expr)
- sn call function(expr)
- sn dbg dbg!(expr)
- sn dbgr dbg!(&expr)
- sn deref *expr
- sn match match expr {}
- sn ref &expr
- sn refm &mut expr
- sn return return expr
- sn unsafe unsafe {}
+ me foo() fn(&self)
+ sn box Box::new(expr)
+ sn call function(expr)
+ sn dbg dbg!(expr)
+ sn dbgr dbg!(&expr)
+ sn deref *expr
+ sn match match expr {}
+ sn ref &expr
+ sn refm &mut expr
+ sn return return expr
+ sn unsafe unsafe {}
"#]],
);
check_empty(
@@ -1196,17 +1196,17 @@ fn main() {
}
"#,
expect![[r#"
- me foo() fn(&self)
- sn box Box::new(expr)
- sn call function(expr)
- sn dbg dbg!(expr)
- sn dbgr dbg!(&expr)
- sn deref *expr
- sn match match expr {}
- sn ref &expr
- sn refm &mut expr
- sn return return expr
- sn unsafe unsafe {}
+ me foo() fn(&self)
+ sn box Box::new(expr)
+ sn call function(expr)
+ sn dbg dbg!(expr)
+ sn dbgr dbg!(&expr)
+ sn deref *expr
+ sn match match expr {}
+ sn ref &expr
+ sn refm &mut expr
+ sn return return expr
+ sn unsafe unsafe {}
"#]],
);
}
@@ -1226,17 +1226,17 @@ fn main() {
}
"#,
expect![[r#"
- me foo() fn(&self)
- sn box Box::new(expr)
- sn call function(expr)
- sn dbg dbg!(expr)
- sn dbgr dbg!(&expr)
- sn deref *expr
- sn match match expr {}
- sn ref &expr
- sn refm &mut expr
- sn return return expr
- sn unsafe unsafe {}
+ me foo() fn(&self)
+ sn box Box::new(expr)
+ sn call function(expr)
+ sn dbg dbg!(expr)
+ sn dbgr dbg!(&expr)
+ sn deref *expr
+ sn match match expr {}
+ sn ref &expr
+ sn refm &mut expr
+ sn return return expr
+ sn unsafe unsafe {}
"#]],
);
check_empty(
@@ -1252,17 +1252,17 @@ fn main() {
}
"#,
expect![[r#"
- me foo() fn(&self)
- sn box Box::new(expr)
- sn call function(expr)
- sn dbg dbg!(expr)
- sn dbgr dbg!(&expr)
- sn deref *expr
- sn match match expr {}
- sn ref &expr
- sn refm &mut expr
- sn return return expr
- sn unsafe unsafe {}
+ me foo() fn(&self)
+ sn box Box::new(expr)
+ sn call function(expr)
+ sn dbg dbg!(expr)
+ sn dbgr dbg!(&expr)
+ sn deref *expr
+ sn match match expr {}
+ sn ref &expr
+ sn refm &mut expr
+ sn return return expr
+ sn unsafe unsafe {}
"#]],
);
check_empty(
@@ -1278,17 +1278,17 @@ fn main() {
}
"#,
expect![[r#"
- me foo() fn(&self)
- sn box Box::new(expr)
- sn call function(expr)
- sn dbg dbg!(expr)
- sn dbgr dbg!(&expr)
- sn deref *expr
- sn match match expr {}
- sn ref &expr
- sn refm &mut expr
- sn return return expr
- sn unsafe unsafe {}
+ me foo() fn(&self)
+ sn box Box::new(expr)
+ sn call function(expr)
+ sn dbg dbg!(expr)
+ sn dbgr dbg!(&expr)
+ sn deref *expr
+ sn match match expr {}
+ sn ref &expr
+ sn refm &mut expr
+ sn return return expr
+ sn unsafe unsafe {}
"#]],
);
check_empty(
@@ -1304,19 +1304,89 @@ fn main() {
}
"#,
expect![[r#"
- sn box Box::new(expr)
- sn call function(expr)
- sn dbg dbg!(expr)
- sn dbgr dbg!(&expr)
- sn deref *expr
- sn if if expr {}
- sn match match expr {}
- sn not !expr
- sn ref &expr
- sn refm &mut expr
- sn return return expr
- sn unsafe unsafe {}
- sn while while expr {}
+ sn box Box::new(expr)
+ sn call function(expr)
+ sn dbg dbg!(expr)
+ sn dbgr dbg!(&expr)
+ sn deref *expr
+ sn if if expr {}
+ sn match match expr {}
+ sn not !expr
+ sn ref &expr
+ sn refm &mut expr
+ sn return return expr
+ sn unsafe unsafe {}
+ sn while while expr {}
+ "#]],
+ );
+}
+
+#[test]
+fn macro_that_ignores_completion_marker() {
+ check(
+ r#"
+macro_rules! helper {
+ ($v:ident) => {};
+}
+
+macro_rules! m {
+ ($v:ident) => {{
+ helper!($v);
+ $v
+ }};
+}
+
+fn main() {
+ let variable = "test";
+ m!(v$0);
+}
+ "#,
+ expect![[r#"
+ ct CONST Unit
+ en Enum Enum
+ fn function() fn()
+ fn main() fn()
+ lc variable &str
+ ma helper!(…) macro_rules! helper
+ ma m!(…) macro_rules! m
+ ma makro!(…) macro_rules! makro
+ md module
+ sc STATIC Unit
+ st Record Record
+ st Tuple Tuple
+ st Unit Unit
+ un Union Union
+ ev TupleV(…) TupleV(u32)
+ bt u32 u32
+ kw async
+ kw const
+ kw crate::
+ kw enum
+ kw extern
+ kw false
+ kw fn
+ kw for
+ kw if
+ kw if let
+ kw impl
+ kw let
+ kw loop
+ kw match
+ kw mod
+ kw self::
+ kw static
+ kw struct
+ kw trait
+ kw true
+ kw type
+ kw union
+ kw unsafe
+ kw use
+ kw while
+ kw while let
+ sn macro_rules
+ sn pd
+ sn ppd
"#]],
);
}
diff --git a/crates/ide-completion/src/tests/flyimport.rs b/crates/ide-completion/src/tests/flyimport.rs
index 4b949e0d65..447dbc998b 100644
--- a/crates/ide-completion/src/tests/flyimport.rs
+++ b/crates/ide-completion/src/tests/flyimport.rs
@@ -139,9 +139,9 @@ fn main() {
}
"#,
expect![[r#"
- st Rc (use dep::Rc) Rc
- st Rcar (use dep::Rcar) Rcar
- st Rc (use dep::some_module::Rc) Rc
+ st Rc (use dep::Rc) Rc
+ st Rcar (use dep::Rcar) Rcar
+ st Rc (use dep::some_module::Rc) Rc
st Rcar (use dep::some_module::Rcar) Rcar
"#]],
);
@@ -165,11 +165,11 @@ fn main() {
}
"#,
expect![[r#"
- ct RC (use dep::RC) ()
- st Rc (use dep::Rc) Rc
- st Rcar (use dep::Rcar) Rcar
- ct RC (use dep::some_module::RC) ()
- st Rc (use dep::some_module::Rc) Rc
+ ct RC (use dep::RC) ()
+ st Rc (use dep::Rc) Rc
+ st Rcar (use dep::Rcar) Rcar
+ ct RC (use dep::some_module::RC) ()
+ st Rc (use dep::some_module::Rc) Rc
st Rcar (use dep::some_module::Rcar) Rcar
"#]],
);
@@ -193,7 +193,7 @@ fn main() {
}
"#,
expect![[r#"
- ct RC (use dep::RC) ()
+ ct RC (use dep::RC) ()
ct RC (use dep::some_module::RC) ()
"#]],
);
@@ -227,7 +227,7 @@ fn main() {
}
"#,
expect![[r#"
- st ThirdStruct (use dep::some_module::ThirdStruct) ThirdStruct
+ st ThirdStruct (use dep::some_module::ThirdStruct) ThirdStruct
st AfterThirdStruct (use dep::some_module::AfterThirdStruct) AfterThirdStruct
st ThiiiiiirdStruct (use dep::some_module::ThiiiiiirdStruct) ThiiiiiirdStruct
"#]],
@@ -263,8 +263,8 @@ fn trait_function_fuzzy_completion() {
check(
fixture,
expect![[r#"
- fn weird_function() (use dep::test_mod::TestTrait) fn()
- "#]],
+ fn weird_function() (use dep::test_mod::TestTrait) fn()
+ "#]],
);
check_edit(
@@ -356,8 +356,8 @@ fn trait_method_fuzzy_completion() {
check(
fixture,
expect![[r#"
- me random_method() (use dep::test_mod::TestTrait) fn(&self)
- "#]],
+ me random_method() (use dep::test_mod::TestTrait) fn(&self)
+ "#]],
);
check_edit(
@@ -401,8 +401,8 @@ fn main() {
check(
fixture,
expect![[r#"
- me some_method() (use foo::TestTrait) fn(&self)
- "#]],
+ me some_method() (use foo::TestTrait) fn(&self)
+ "#]],
);
check_edit(
@@ -448,8 +448,8 @@ fn main() {
check(
fixture,
expect![[r#"
- me some_method() (use foo::TestTrait) fn(&self)
- "#]],
+ me some_method() (use foo::TestTrait) fn(&self)
+ "#]],
);
check_edit(
@@ -496,8 +496,8 @@ fn completion<T: Wrapper>(whatever: T) {
check(
fixture,
expect![[r#"
- me not_in_scope() (use foo::NotInScope) fn(&self)
- "#]],
+ me not_in_scope() (use foo::NotInScope) fn(&self)
+ "#]],
);
check_edit(
@@ -539,8 +539,8 @@ fn main() {
check(
fixture,
expect![[r#"
- me into() (use test_trait::TestInto) fn(self) -> T
- "#]],
+ me into() (use test_trait::TestInto) fn(self) -> T
+ "#]],
);
}
@@ -568,8 +568,8 @@ fn main() {
check(
fixture,
expect![[r#"
- fn random_method() (use dep::test_mod::TestTrait) fn()
- "#]],
+ fn random_method() (use dep::test_mod::TestTrait) fn()
+ "#]],
);
check_edit(
@@ -737,8 +737,8 @@ fn main() {
}
"#,
expect![[r#"
- me random_method() (use dep::test_mod::TestTrait) fn(&self) DEPRECATED
- "#]],
+ me random_method() (use dep::test_mod::TestTrait) fn(&self) DEPRECATED
+ "#]],
);
check(
@@ -767,8 +767,8 @@ fn main() {
}
"#,
expect![[r#"
- ct SPECIAL_CONST (use dep::test_mod::TestTrait) u8 DEPRECATED
- fn weird_function() (use dep::test_mod::TestTrait) fn() DEPRECATED
+ ct SPECIAL_CONST (use dep::test_mod::TestTrait) u8 DEPRECATED
+ fn weird_function() (use dep::test_mod::TestTrait) fn() DEPRECATED
me random_method(…) (use dep::test_mod::TestTrait) fn(&self) DEPRECATED
"#]],
);
@@ -1117,7 +1117,7 @@ fn main() {
tes$0
}"#,
expect![[r#"
- ct TEST_CONST (use foo::TEST_CONST) usize
+ ct TEST_CONST (use foo::TEST_CONST) usize
fn test_function() (use foo::test_function) fn() -> i32
"#]],
);
@@ -1175,8 +1175,8 @@ fn main() {
}
"#,
expect![[r#"
- fn some_fn() (use m::some_fn) fn() -> i32
- "#]],
+ fn some_fn() (use m::some_fn) fn() -> i32
+ "#]],
);
}
@@ -1691,7 +1691,7 @@ fn function() {
expect![[r#"
st FooStruct (use outer::FooStruct) BarStruct
md foo (use outer::foo)
- fn foo_fun() (use outer::foo_fun) fn()
+ fn foo_fun() (use outer::foo_fun) fn()
"#]],
);
}
@@ -1720,3 +1720,45 @@ fn function() {
"#]],
);
}
+
+#[test]
+fn intrinsics() {
+ check(
+ r#"
+ //- /core.rs crate:core
+ pub mod intrinsics {
+ extern "rust-intrinsic" {
+ pub fn transmute<Src, Dst>(src: Src) -> Dst;
+ }
+ }
+ pub mod mem {
+ pub use crate::intrinsics::transmute;
+ }
+ //- /main.rs crate:main deps:core
+ fn function() {
+ transmute$0
+ }
+ "#,
+ expect![[r#"
+ fn transmute(…) (use core::mem::transmute) unsafe fn(Src) -> Dst
+ "#]],
+ );
+ check(
+ r#"
+//- /core.rs crate:core
+pub mod intrinsics {
+ extern "rust-intrinsic" {
+ pub fn transmute<Src, Dst>(src: Src) -> Dst;
+ }
+}
+pub mod mem {
+ pub use crate::intrinsics::transmute;
+}
+//- /main.rs crate:main deps:core
+fn function() {
+ mem::transmute$0
+}
+"#,
+ expect![""],
+ );
+}
diff --git a/crates/ide-completion/src/tests/fn_param.rs b/crates/ide-completion/src/tests/fn_param.rs
index cce74604c2..4a89f874e1 100644
--- a/crates/ide-completion/src/tests/fn_param.rs
+++ b/crates/ide-completion/src/tests/fn_param.rs
@@ -140,7 +140,7 @@ fn foo2($0) {}
expect![[r#"
st Bar
bn Bar { bar }: Bar
- bn Bar {…} Bar { bar$1 }: Bar$0
+ bn Bar {…} Bar { bar$1 }: Bar$0
kw mut
kw ref
"#]],
diff --git a/crates/ide-completion/src/tests/item.rs b/crates/ide-completion/src/tests/item.rs
index 09254aed7c..f34f3d0fc2 100644
--- a/crates/ide-completion/src/tests/item.rs
+++ b/crates/ide-completion/src/tests/item.rs
@@ -20,15 +20,15 @@ fn target_type_or_trait_in_impl_block() {
impl Tra$0
"#,
expect![[r#"
- en Enum Enum
+ en Enum Enum
ma makro!(…) macro_rules! makro
md module
- st Record Record
- st Tuple Tuple
- st Unit Unit
+ st Record Record
+ st Tuple Tuple
+ st Unit Unit
tt Trait
- un Union Union
- bt u32 u32
+ un Union Union
+ bt u32 u32
kw crate::
kw self::
"#]],
@@ -42,15 +42,15 @@ fn target_type_in_trait_impl_block() {
impl Trait for Str$0
"#,
expect![[r#"
- en Enum Enum
+ en Enum Enum
ma makro!(…) macro_rules! makro
md module
- st Record Record
- st Tuple Tuple
- st Unit Unit
+ st Record Record
+ st Tuple Tuple
+ st Unit Unit
tt Trait
- un Union Union
- bt u32 u32
+ un Union Union
+ bt u32 u32
kw crate::
kw self::
"#]],
diff --git a/crates/ide-completion/src/tests/item_list.rs b/crates/ide-completion/src/tests/item_list.rs
index dfef8fa472..d3d52dc6df 100644
--- a/crates/ide-completion/src/tests/item_list.rs
+++ b/crates/ide-completion/src/tests/item_list.rs
@@ -13,7 +13,7 @@ fn in_mod_item_list() {
check(
r#"mod tests { $0 }"#,
expect![[r#"
- ma makro!(…) macro_rules! makro
+ ma makro!(…) macro_rules! makro
kw async
kw const
kw crate::
@@ -46,7 +46,7 @@ fn in_source_file_item_list() {
check(
r#"$0"#,
expect![[r#"
- ma makro!(…) macro_rules! makro
+ ma makro!(…) macro_rules! makro
md module
kw async
kw const
@@ -79,7 +79,7 @@ fn in_item_list_after_attr() {
check(
r#"#[attr] $0"#,
expect![[r#"
- ma makro!(…) macro_rules! makro
+ ma makro!(…) macro_rules! makro
md module
kw async
kw const
@@ -182,7 +182,7 @@ fn in_impl_assoc_item_list() {
check(
r#"impl Struct { $0 }"#,
expect![[r#"
- ma makro!(…) macro_rules! makro
+ ma makro!(…) macro_rules! makro
md module
kw async
kw const
@@ -202,7 +202,7 @@ fn in_impl_assoc_item_list_after_attr() {
check(
r#"impl Struct { #[attr] $0 }"#,
expect![[r#"
- ma makro!(…) macro_rules! makro
+ ma makro!(…) macro_rules! makro
md module
kw async
kw const
@@ -315,7 +315,7 @@ impl Test for () {
fn async fn function2()
fn fn function1()
fn fn function2()
- ma makro!(…) macro_rules! makro
+ ma makro!(…) macro_rules! makro
md module
ta type Type1 =
kw crate::
@@ -381,7 +381,7 @@ fn after_unit_struct() {
check(
r#"struct S; f$0"#,
expect![[r#"
- ma makro!(…) macro_rules! makro
+ ma makro!(…) macro_rules! makro
md module
kw async
kw const
@@ -503,7 +503,7 @@ fn inside_extern_blocks() {
check(
r#"extern { $0 }"#,
expect![[r#"
- ma makro!(…) macro_rules! makro
+ ma makro!(…) macro_rules! makro
md module
kw crate::
kw fn
@@ -520,7 +520,7 @@ fn inside_extern_blocks() {
check(
r#"unsafe extern { $0 }"#,
expect![[r#"
- ma makro!(…) macro_rules! makro
+ ma makro!(…) macro_rules! makro
md module
kw crate::
kw fn
diff --git a/crates/ide-completion/src/tests/pattern.rs b/crates/ide-completion/src/tests/pattern.rs
index a5eb0369b1..2f1f555e52 100644
--- a/crates/ide-completion/src/tests/pattern.rs
+++ b/crates/ide-completion/src/tests/pattern.rs
@@ -122,15 +122,15 @@ fn foo() {
expect![[r#"
ct CONST
en Enum
- ma makro!(…) macro_rules! makro
+ ma makro!(…) macro_rules! makro
md module
st Record
st Tuple
st Unit
ev TupleV
bn Record {…} Record { field$1 }$0
- bn Tuple(…) Tuple($1)$0
- bn TupleV(…) TupleV($1)$0
+ bn Tuple(…) Tuple($1)$0
+ bn TupleV(…) TupleV($1)$0
kw mut
kw ref
"#]],
@@ -151,15 +151,15 @@ fn foo() {
"#,
expect![[r#"
en SingleVariantEnum
- ma makro!(…) macro_rules! makro
+ ma makro!(…) macro_rules! makro
md module
st Record
st Tuple
st Unit
ev Variant
- bn Record {…} Record { field$1 }$0
- bn Tuple(…) Tuple($1)$0
- bn Variant Variant$0
+ bn Record {…} Record { field$1 }$0
+ bn Tuple(…) Tuple($1)$0
+ bn Variant Variant$0
kw mut
kw ref
"#]],
@@ -174,13 +174,13 @@ fn foo(a$0) {
}
"#,
expect![[r#"
- ma makro!(…) macro_rules! makro
+ ma makro!(…) macro_rules! makro
md module
st Record
st Tuple
st Unit
bn Record {…} Record { field$1 }: Record$0
- bn Tuple(…) Tuple($1): Tuple$0
+ bn Tuple(…) Tuple($1): Tuple$0
kw mut
kw ref
"#]],
@@ -191,13 +191,13 @@ fn foo(a$0: Tuple) {
}
"#,
expect![[r#"
- ma makro!(…) macro_rules! makro
+ ma makro!(…) macro_rules! makro
md module
st Record
st Tuple
st Unit
bn Record {…} Record { field$1 }$0
- bn Tuple(…) Tuple($1)$0
+ bn Tuple(…) Tuple($1)$0
bn tuple
kw mut
kw ref
@@ -240,7 +240,7 @@ fn foo() {
expect![[r#"
en E
ma m!(…) macro_rules! m
- bn E::X E::X$0
+ bn E::X E::X$0
kw mut
kw ref
"#]],
@@ -268,7 +268,7 @@ fn outer() {
st Record
st Tuple
bn Record {…} Record { field$1, .. }$0
- bn Tuple(…) Tuple($1, ..)$0
+ bn Tuple(…) Tuple($1, ..)$0
kw mut
kw ref
"#]],
@@ -291,7 +291,7 @@ impl Foo {
expect![[r#"
sp Self
st Foo
- bn Foo(…) Foo($1)$0
+ bn Foo(…) Foo($1)$0
bn Self(…) Self($1)$0
kw mut
kw ref
@@ -315,8 +315,8 @@ fn func() {
expect![[r#"
ct ASSOC_CONST const ASSOC_CONST: ()
bn RecordV {…} RecordV { field$1 }$0
- bn TupleV(…) TupleV($1)$0
- bn UnitV UnitV$0
+ bn TupleV(…) TupleV($1)$0
+ bn UnitV UnitV$0
"#]],
);
}
@@ -332,7 +332,7 @@ fn outer(Foo { bar: $0 }: Foo) {}
expect![[r#"
st Bar
st Foo
- bn Bar(…) Bar($1)$0
+ bn Bar(…) Bar($1)$0
bn Foo {…} Foo { bar$1 }$0
kw mut
kw ref
@@ -395,7 +395,7 @@ fn foo($0) {}
expect![[r#"
st Bar
st Foo
- bn Bar(…) Bar($1): Bar$0
+ bn Bar(…) Bar($1): Bar$0
bn Foo {…} Foo { bar$1 }: Foo$0
kw mut
kw ref
@@ -416,7 +416,7 @@ fn foo() {
expect![[r#"
st Bar
st Foo
- bn Bar(…) Bar($1)$0
+ bn Bar(…) Bar($1)$0
bn Foo {…} Foo { bar$1 }$0
kw mut
kw ref
@@ -436,7 +436,7 @@ fn foo() {
}
"#,
expect![[r#"
- st Bar Bar
+ st Bar Bar
kw crate::
kw self::
"#]],
@@ -451,7 +451,7 @@ fn foo() {
}
"#,
expect![[r#"
- st Foo Foo
+ st Foo Foo
kw crate::
kw self::
"#]],
@@ -535,10 +535,10 @@ fn foo() {
"#,
expect![[r#"
en Enum
- bn Enum::A Enum::A$0
- bn Enum::B {…} Enum::B { r#type$1 }$0
+ bn Enum::A Enum::A$0
+ bn Enum::B {…} Enum::B { r#type$1 }$0
bn Enum::struct {…} Enum::r#struct { r#type$1 }$0
- bn Enum::type Enum::r#type$0
+ bn Enum::type Enum::r#type$0
kw mut
kw ref
"#]],
@@ -559,10 +559,10 @@ fn foo() {
}
"#,
expect![[r#"
- bn A A$0
- bn B {…} B { r#type$1 }$0
+ bn A A$0
+ bn B {…} B { r#type$1 }$0
bn struct {…} r#struct { r#type$1 }$0
- bn type r#type$0
+ bn type r#type$0
"#]],
);
}
@@ -672,8 +672,8 @@ impl Ty {
st Ty
bn &mut self
bn &self
- bn Self(…) Self($1): Self$0
- bn Ty(…) Ty($1): Ty$0
+ bn Self(…) Self($1): Self$0
+ bn Ty(…) Ty($1): Ty$0
bn mut self
bn self
kw mut
@@ -693,8 +693,8 @@ impl Ty {
st Ty
bn &mut self
bn &self
- bn Self(…) Self($1): Self$0
- bn Ty(…) Ty($1): Ty$0
+ bn Self(…) Self($1): Self$0
+ bn Ty(…) Ty($1): Ty$0
bn mut self
bn self
kw mut
@@ -714,8 +714,8 @@ impl Ty {
st Ty
bn &mut self
bn &self
- bn Self(…) Self($1): Self$0
- bn Ty(…) Ty($1): Ty$0
+ bn Self(…) Self($1): Self$0
+ bn Ty(…) Ty($1): Ty$0
bn mut self
bn self
kw mut
@@ -734,7 +734,7 @@ impl Ty {
sp Self
st Ty
bn Self(…) Self($1): Self$0
- bn Ty(…) Ty($1): Ty$0
+ bn Ty(…) Ty($1): Ty$0
kw mut
kw ref
"#]],
@@ -763,7 +763,7 @@ fn f(x: EnumAlias<u8>) {
"#,
expect![[r#"
bn Tuple(…) Tuple($1)$0
- bn Unit Unit$0
+ bn Unit Unit$0
"#]],
);
}
diff --git a/crates/ide-completion/src/tests/predicate.rs b/crates/ide-completion/src/tests/predicate.rs
index 46a3e97d3e..c1926359ef 100644
--- a/crates/ide-completion/src/tests/predicate.rs
+++ b/crates/ide-completion/src/tests/predicate.rs
@@ -16,16 +16,16 @@ fn predicate_start() {
struct Foo<'lt, T, const C: usize> where $0 {}
"#,
expect![[r#"
- en Enum Enum
+ en Enum Enum
ma makro!(…) macro_rules! makro
md module
- st Foo<…> Foo<'_, {unknown}, _>
- st Record Record
- st Tuple Tuple
- st Unit Unit
+ st Foo<…> Foo<'_, {unknown}, _>
+ st Record Record
+ st Tuple Tuple
+ st Unit Unit
tt Trait
- un Union Union
- bt u32 u32
+ un Union Union
+ bt u32 u32
kw crate::
kw self::
"#]],
@@ -89,16 +89,16 @@ fn param_list_for_for_pred() {
struct Foo<'lt, T, const C: usize> where for<'a> $0 {}
"#,
expect![[r#"
- en Enum Enum
+ en Enum Enum
ma makro!(…) macro_rules! makro
md module
- st Foo<…> Foo<'_, {unknown}, _>
- st Record Record
- st Tuple Tuple
- st Unit Unit
+ st Foo<…> Foo<'_, {unknown}, _>
+ st Record Record
+ st Tuple Tuple
+ st Unit Unit
tt Trait
- un Union Union
- bt u32 u32
+ un Union Union
+ bt u32 u32
kw crate::
kw self::
"#]],
@@ -114,16 +114,16 @@ impl Record {
}
"#,
expect![[r#"
- en Enum Enum
+ en Enum Enum
ma makro!(…) macro_rules! makro
md module
- sp Self Record
- st Record Record
- st Tuple Tuple
- st Unit Unit
+ sp Self Record
+ st Record Record
+ st Tuple Tuple
+ st Unit Unit
tt Trait
- un Union Union
- bt u32 u32
+ un Union Union
+ bt u32 u32
kw crate::
kw self::
"#]],
diff --git a/crates/ide-completion/src/tests/proc_macros.rs b/crates/ide-completion/src/tests/proc_macros.rs
index 613f33309f..afc286b6fb 100644
--- a/crates/ide-completion/src/tests/proc_macros.rs
+++ b/crates/ide-completion/src/tests/proc_macros.rs
@@ -24,19 +24,19 @@ fn main() {
}
"#,
expect![[r#"
- me foo() fn(&self)
- sn box Box::new(expr)
- sn call function(expr)
- sn dbg dbg!(expr)
- sn dbgr dbg!(&expr)
- sn deref *expr
- sn let let
- sn letm let mut
- sn match match expr {}
- sn ref &expr
- sn refm &mut expr
- sn return return expr
- sn unsafe unsafe {}
+ me foo() fn(&self)
+ sn box Box::new(expr)
+ sn call function(expr)
+ sn dbg dbg!(expr)
+ sn dbgr dbg!(&expr)
+ sn deref *expr
+ sn let let
+ sn letm let mut
+ sn match match expr {}
+ sn ref &expr
+ sn refm &mut expr
+ sn return return expr
+ sn unsafe unsafe {}
"#]],
)
}
@@ -57,19 +57,19 @@ fn main() {
}
"#,
expect![[r#"
- me foo() fn(&self)
- sn box Box::new(expr)
- sn call function(expr)
- sn dbg dbg!(expr)
- sn dbgr dbg!(&expr)
- sn deref *expr
- sn let let
- sn letm let mut
- sn match match expr {}
- sn ref &expr
- sn refm &mut expr
- sn return return expr
- sn unsafe unsafe {}
+ me foo() fn(&self)
+ sn box Box::new(expr)
+ sn call function(expr)
+ sn dbg dbg!(expr)
+ sn dbgr dbg!(&expr)
+ sn deref *expr
+ sn let let
+ sn letm let mut
+ sn match match expr {}
+ sn ref &expr
+ sn refm &mut expr
+ sn return return expr
+ sn unsafe unsafe {}
"#]],
)
}
@@ -92,19 +92,19 @@ impl Foo {
fn main() {}
"#,
expect![[r#"
- me foo() fn(&self)
- sn box Box::new(expr)
- sn call function(expr)
- sn dbg dbg!(expr)
- sn dbgr dbg!(&expr)
- sn deref *expr
- sn let let
- sn letm let mut
- sn match match expr {}
- sn ref &expr
- sn refm &mut expr
- sn return return expr
- sn unsafe unsafe {}
+ me foo() fn(&self)
+ sn box Box::new(expr)
+ sn call function(expr)
+ sn dbg dbg!(expr)
+ sn dbgr dbg!(&expr)
+ sn deref *expr
+ sn let let
+ sn letm let mut
+ sn match match expr {}
+ sn ref &expr
+ sn refm &mut expr
+ sn return return expr
+ sn unsafe unsafe {}
"#]],
)
}
@@ -127,19 +127,19 @@ impl Foo {
fn main() {}
"#,
expect![[r#"
- me foo() fn(&self)
- sn box Box::new(expr)
- sn call function(expr)
- sn dbg dbg!(expr)
- sn dbgr dbg!(&expr)
- sn deref *expr
- sn let let
- sn letm let mut
- sn match match expr {}
- sn ref &expr
- sn refm &mut expr
- sn return return expr
- sn unsafe unsafe {}
+ me foo() fn(&self)
+ sn box Box::new(expr)
+ sn call function(expr)
+ sn dbg dbg!(expr)
+ sn dbgr dbg!(&expr)
+ sn deref *expr
+ sn let let
+ sn letm let mut
+ sn match match expr {}
+ sn ref &expr
+ sn refm &mut expr
+ sn return return expr
+ sn unsafe unsafe {}
"#]],
)
}
diff --git a/crates/ide-completion/src/tests/record.rs b/crates/ide-completion/src/tests/record.rs
index 56162bb57b..a9c9f604e0 100644
--- a/crates/ide-completion/src/tests/record.rs
+++ b/crates/ide-completion/src/tests/record.rs
@@ -70,8 +70,8 @@ fn foo(baz: Baz) {
ev Ok
bn Baz::Bar Baz::Bar$0
bn Baz::Foo Baz::Foo$0
- bn Err(…) Err($1)$0
- bn Ok(…) Ok($1)$0
+ bn Err(…) Err($1)$0
+ bn Ok(…) Ok($1)$0
kw mut
kw ref
"#]],
@@ -91,20 +91,20 @@ fn foo(baz: Baz) {
}
"#,
expect![[r#"
- en Baz
- en Result
- md core
- ev Bar
- ev Err
- ev Foo
- ev Ok
- bn Bar Bar$0
- bn Err(…) Err($1)$0
- bn Foo Foo$0
- bn Ok(…) Ok($1)$0
- kw mut
- kw ref
- "#]],
+ en Baz
+ en Result
+ md core
+ ev Bar
+ ev Err
+ ev Foo
+ ev Ok
+ bn Bar Bar$0
+ bn Err(…) Err($1)$0
+ bn Foo Foo$0
+ bn Ok(…) Ok($1)$0
+ kw mut
+ kw ref
+ "#]],
);
}
@@ -184,14 +184,14 @@ fn main() {
"#,
expect![[r#"
fd ..Default::default()
- fn main() fn()
- lc foo Foo
- lc thing i32
+ fn main() fn()
+ lc foo Foo
+ lc thing i32
md core
- st Foo Foo
- st Foo {…} Foo { foo1: u32, foo2: u32 }
+ st Foo Foo
+ st Foo {…} Foo { foo1: u32, foo2: u32 }
tt Default
- bt u32 u32
+ bt u32 u32
kw crate::
kw self::
ex Foo::default()
@@ -238,8 +238,8 @@ fn main() {
"#,
expect![[r#"
fd ..Default::default()
- fd foo1 u32
- fd foo2 u32
+ fd foo1 u32
+ fd foo2 u32
"#]],
);
}
diff --git a/crates/ide-completion/src/tests/special.rs b/crates/ide-completion/src/tests/special.rs
index 508f6248dd..388af48c68 100644
--- a/crates/ide-completion/src/tests/special.rs
+++ b/crates/ide-completion/src/tests/special.rs
@@ -87,7 +87,7 @@ pub mod prelude {
expect![[r#"
md std
st Option Option
- bt u32 u32
+ bt u32 u32
"#]],
);
}
@@ -113,10 +113,10 @@ mod macros {
}
"#,
expect![[r#"
- fn f() fn()
+ fn f() fn()
ma concat!(…) macro_rules! concat
md std
- bt u32 u32
+ bt u32 u32
"#]],
);
}
@@ -146,7 +146,7 @@ pub mod prelude {
md core
md std
st String String
- bt u32 u32
+ bt u32 u32
"#]],
);
}
@@ -174,7 +174,7 @@ pub mod prelude {
expect![[r#"
fn f() fn()
md std
- bt u32 u32
+ bt u32 u32
"#]],
);
}
@@ -226,9 +226,9 @@ impl S {
fn foo() { let _ = lib::S::$0 }
"#,
expect![[r#"
- ct PUBLIC_CONST pub const PUBLIC_CONST: u32
- fn public_method() fn()
- ta PublicType pub type PublicType = u32
+ ct PUBLIC_CONST pub const PUBLIC_CONST: u32
+ fn public_method() fn()
+ ta PublicType pub type PublicType = u32
"#]],
);
}
@@ -317,14 +317,14 @@ trait Sub: Super {
fn foo<T: Sub>() { T::$0 }
"#,
expect![[r#"
- ct C2 (as Sub) const C2: ()
- ct CONST (as Super) const CONST: u8
- fn func() (as Super) fn()
- fn subfunc() (as Sub) fn()
- me method(…) (as Super) fn(&self)
- me submethod(…) (as Sub) fn(&self)
+ ct C2 (as Sub) const C2: ()
+ ct CONST (as Super) const CONST: u8
+ fn func() (as Super) fn()
+ fn subfunc() (as Sub) fn()
+ me method(…) (as Super) fn(&self)
+ me submethod(…) (as Sub) fn(&self)
ta SubTy (as Sub) type SubTy
- ta Ty (as Super) type Ty
+ ta Ty (as Super) type Ty
"#]],
);
}
@@ -357,14 +357,14 @@ impl<T> Sub for Wrap<T> {
}
"#,
expect![[r#"
- ct C2 (as Sub) const C2: ()
- ct CONST (as Super) const CONST: u8
- fn func() (as Super) fn()
- fn subfunc() (as Sub) fn()
- me method(…) (as Super) fn(&self)
- me submethod(…) (as Sub) fn(&self)
+ ct C2 (as Sub) const C2: ()
+ ct CONST (as Super) const CONST: u8
+ fn func() (as Super) fn()
+ fn subfunc() (as Sub) fn()
+ me method(…) (as Super) fn(&self)
+ me submethod(…) (as Sub) fn(&self)
ta SubTy (as Sub) type SubTy
- ta Ty (as Super) type Ty
+ ta Ty (as Super) type Ty
"#]],
);
}
@@ -381,9 +381,9 @@ impl T { fn bar() {} }
fn main() { T::$0; }
"#,
expect![[r#"
- fn bar() fn()
- fn foo() fn()
- "#]],
+ fn bar() fn()
+ fn foo() fn()
+ "#]],
);
}
@@ -397,7 +397,7 @@ macro_rules! foo { () => {} }
fn main() { let _ = crate::$0 }
"#,
expect![[r#"
- fn main() fn()
+ fn main() fn()
ma foo!(…) macro_rules! foo
"#]],
);
@@ -447,9 +447,9 @@ mod p {
}
"#,
expect![[r#"
- ct RIGHT_CONST u32
- fn right_fn() fn()
- st RightType WrongType
+ ct RIGHT_CONST u32
+ fn right_fn() fn()
+ st RightType WrongType
"#]],
);
@@ -495,9 +495,9 @@ fn main() { m!(self::f$0); }
fn foo() {}
"#,
expect![[r#"
- fn foo() fn()
- fn main() fn()
- "#]],
+ fn foo() fn()
+ fn main() fn()
+ "#]],
);
}
@@ -513,9 +513,9 @@ mod m {
}
"#,
expect![[r#"
- fn z() fn()
- md z
- "#]],
+ fn z() fn()
+ md z
+ "#]],
);
}
@@ -534,8 +534,8 @@ fn foo() {
}
"#,
expect![[r#"
- fn new() fn() -> HashMap<K, V, RandomState>
- "#]],
+ fn new() fn() -> HashMap<K, V, RandomState>
+ "#]],
);
}
@@ -557,8 +557,8 @@ impl Foo {
"#,
expect![[r#"
me foo(…) fn(self)
- ev Bar Bar
- ev Baz Baz
+ ev Bar Bar
+ ev Baz Baz
"#]],
);
}
@@ -578,9 +578,9 @@ fn foo(self) {
}
"#,
expect![[r#"
- ev Bar Bar
- ev Baz Baz
- "#]],
+ ev Bar Bar
+ ev Baz Baz
+ "#]],
);
check_no_kw(
@@ -598,8 +598,8 @@ enum Foo {
}
"#,
expect![[r#"
- ev Baz Baz
- "#]],
+ ev Baz Baz
+ "#]],
);
}
@@ -623,9 +623,9 @@ impl u8 {
}
"#,
expect![[r#"
- ct MAX pub const MAX: Self
- me func(…) fn(self)
- "#]],
+ ct MAX pub const MAX: Self
+ me func(…) fn(self)
+ "#]],
);
}
@@ -643,8 +643,8 @@ fn main() {
}
"#,
expect![[r#"
- ev Bar Bar
- "#]],
+ ev Bar Bar
+ "#]],
);
}
@@ -723,7 +723,7 @@ fn bar() -> Bar {
}
"#,
expect![[r#"
- fn bar() fn()
+ fn bar() fn()
fn foo() (as Foo) fn() -> Self
ex Bar
ex bar()
@@ -787,7 +787,7 @@ fn main() {
}
"#,
expect![[r#"
- me by_macro() (as MyTrait) fn(&self)
+ me by_macro() (as MyTrait) fn(&self)
me not_by_macro() (as MyTrait) fn(&self)
"#]],
)
@@ -827,7 +827,7 @@ fn main() {
}
"#,
expect![[r#"
- me by_macro() (as MyTrait) fn(&self)
+ me by_macro() (as MyTrait) fn(&self)
me not_by_macro() (as MyTrait) fn(&self)
"#]],
)
@@ -885,10 +885,10 @@ fn main() {
}
"#,
expect![[r#"
- fn main() fn()
- lc foobar i32
- ma x!(…) macro_rules! x
- bt u32 u32
+ fn main() fn()
+ lc foobar i32
+ ma x!(…) macro_rules! x
+ bt u32 u32
"#]],
)
}
@@ -1014,7 +1014,7 @@ fn here_we_go() {
}
"#,
expect![[r#"
- fn here_we_go() fn()
+ fn here_we_go() fn()
st Foo (alias Bar) Foo
bt u32 u32
kw async
@@ -1064,9 +1064,9 @@ fn here_we_go() {
}
"#,
expect![[r#"
- fn here_we_go() fn()
+ fn here_we_go() fn()
st Foo (alias Bar, Qux, Baz) Foo
- bt u32 u32
+ bt u32 u32
kw async
kw const
kw crate::
@@ -1160,20 +1160,20 @@ fn here_we_go() {
}
"#,
expect![[r#"
- fd bar u8
+ fd bar u8
me baz() (alias qux) fn(&self) -> u8
- sn box Box::new(expr)
- sn call function(expr)
- sn dbg dbg!(expr)
- sn dbgr dbg!(&expr)
- sn deref *expr
- sn let let
- sn letm let mut
- sn match match expr {}
- sn ref &expr
- sn refm &mut expr
- sn return return expr
- sn unsafe unsafe {}
+ sn box Box::new(expr)
+ sn call function(expr)
+ sn dbg dbg!(expr)
+ sn dbgr dbg!(&expr)
+ sn deref *expr
+ sn let let
+ sn letm let mut
+ sn match match expr {}
+ sn ref &expr
+ sn refm &mut expr
+ sn return return expr
+ sn unsafe unsafe {}
"#]],
);
}
@@ -1189,7 +1189,7 @@ fn bar() { qu$0 }
expect![[r#"
fn bar() fn()
fn foo() (alias qux) fn()
- bt u32 u32
+ bt u32 u32
kw async
kw const
kw crate::
@@ -1277,10 +1277,10 @@ fn here_we_go() {
}
"#,
expect![[r#"
- fn here_we_go() fn()
+ fn here_we_go() fn()
md foo
st Bar (alias Qux) (use foo::Bar) Bar
- bt u32 u32
+ bt u32 u32
kw crate::
kw false
kw for
@@ -1315,10 +1315,9 @@ use krate::e;
fn main() {
e::$0
}"#,
- expect![
- "fn i_am_public() fn()
-"
- ],
+ expect![[r#"
+ fn i_am_public() fn()
+ "#]],
)
}
@@ -1444,8 +1443,8 @@ fn foo() {
"#,
Some('_'),
expect![[r#"
- fn foo() fn()
- bt u32 u32
+ fn foo() fn()
+ bt u32 u32
kw async
kw const
kw crate::
@@ -1498,7 +1497,7 @@ fn foo(_: a_$0) { }
"#,
Some('_'),
expect![[r#"
- bt u32 u32
+ bt u32 u32
kw crate::
kw self::
"#]],
@@ -1512,7 +1511,7 @@ fn foo<T>() {
Some('_'),
expect![[r#"
tp T
- bt u32 u32
+ bt u32 u32
kw crate::
kw self::
"#]],
diff --git a/crates/ide-completion/src/tests/type_pos.rs b/crates/ide-completion/src/tests/type_pos.rs
index db4ac9381c..9ea262bcc5 100644
--- a/crates/ide-completion/src/tests/type_pos.rs
+++ b/crates/ide-completion/src/tests/type_pos.rs
@@ -17,18 +17,18 @@ struct Foo<'lt, T, const C: usize> {
}
"#,
expect![[r#"
- en Enum Enum
+ en Enum Enum
ma makro!(…) macro_rules! makro
md module
- sp Self Foo<'_, {unknown}, _>
- st Foo<…> Foo<'_, {unknown}, _>
- st Record Record
- st Tuple Tuple
- st Unit Unit
+ sp Self Foo<'_, {unknown}, _>
+ st Foo<…> Foo<'_, {unknown}, _>
+ st Record Record
+ st Tuple Tuple
+ st Unit Unit
tt Trait
tp T
- un Union Union
- bt u32 u32
+ un Union Union
+ bt u32 u32
kw crate::
kw self::
"#]],
@@ -42,18 +42,18 @@ fn tuple_struct_field() {
struct Foo<'lt, T, const C: usize>(f$0);
"#,
expect![[r#"
- en Enum Enum
- ma makro!(…) macro_rules! makro
+ en Enum Enum
+ ma makro!(…) macro_rules! makro
md module
- sp Self Foo<'_, {unknown}, _>
- st Foo<…> Foo<'_, {unknown}, _>
- st Record Record
- st Tuple Tuple
- st Unit Unit
+ sp Self Foo<'_, {unknown}, _>
+ st Foo<…> Foo<'_, {unknown}, _>
+ st Record Record
+ st Tuple Tuple
+ st Unit Unit
tt Trait
tp T
- un Union Union
- bt u32 u32
+ un Union Union
+ bt u32 u32
kw crate::
kw pub
kw pub(crate)
@@ -70,16 +70,16 @@ fn fn_return_type() {
fn x<'lt, T, const C: usize>() -> $0
"#,
expect![[r#"
- en Enum Enum
+ en Enum Enum
ma makro!(…) macro_rules! makro
md module
- st Record Record
- st Tuple Tuple
- st Unit Unit
+ st Record Record
+ st Tuple Tuple
+ st Unit Unit
tt Trait
tp T
- un Union Union
- bt u32 u32
+ un Union Union
+ bt u32 u32
kw crate::
kw self::
"#]],
@@ -100,15 +100,15 @@ fn foo() -> B$0 {
}
"#,
expect![[r#"
- en Enum Enum
+ en Enum Enum
ma makro!(…) macro_rules! makro
md module
- st Record Record
- st Tuple Tuple
- st Unit Unit
+ st Record Record
+ st Tuple Tuple
+ st Unit Unit
tt Trait
- un Union Union
- bt u32 u32
+ un Union Union
+ bt u32 u32
it ()
kw crate::
kw self::
@@ -124,16 +124,16 @@ struct Foo<T>(T);
const FOO: $0 = Foo(2);
"#,
expect![[r#"
- en Enum Enum
+ en Enum Enum
ma makro!(…) macro_rules! makro
md module
- st Foo<…> Foo<{unknown}>
- st Record Record
- st Tuple Tuple
- st Unit Unit
+ st Foo<…> Foo<{unknown}>
+ st Record Record
+ st Tuple Tuple
+ st Unit Unit
tt Trait
- un Union Union
- bt u32 u32
+ un Union Union
+ bt u32 u32
it Foo<i32>
kw crate::
kw self::
@@ -151,15 +151,15 @@ fn f2() {
}
"#,
expect![[r#"
- en Enum Enum
+ en Enum Enum
ma makro!(…) macro_rules! makro
md module
- st Record Record
- st Tuple Tuple
- st Unit Unit
+ st Record Record
+ st Tuple Tuple
+ st Unit Unit
tt Trait
- un Union Union
- bt u32 u32
+ un Union Union
+ bt u32 u32
it i32
kw crate::
kw self::
@@ -179,15 +179,15 @@ fn f2() {
}
"#,
expect![[r#"
- en Enum Enum
+ en Enum Enum
ma makro!(…) macro_rules! makro
md module
- st Record Record
- st Tuple Tuple
- st Unit Unit
+ st Record Record
+ st Tuple Tuple
+ st Unit Unit
tt Trait
- un Union Union
- bt u32 u32
+ un Union Union
+ bt u32 u32
it u64
kw crate::
kw self::
@@ -204,15 +204,15 @@ fn f2(x: u64) -> $0 {
}
"#,
expect![[r#"
- en Enum Enum
+ en Enum Enum
ma makro!(…) macro_rules! makro
md module
- st Record Record
- st Tuple Tuple
- st Unit Unit
+ st Record Record
+ st Tuple Tuple
+ st Unit Unit
tt Trait
- un Union Union
- bt u32 u32
+ un Union Union
+ bt u32 u32
it u64
kw crate::
kw self::
@@ -230,15 +230,15 @@ fn f2(x: $0) {
}
"#,
expect![[r#"
- en Enum Enum
+ en Enum Enum
ma makro!(…) macro_rules! makro
md module
- st Record Record
- st Tuple Tuple
- st Unit Unit
+ st Record Record
+ st Tuple Tuple
+ st Unit Unit
tt Trait
- un Union Union
- bt u32 u32
+ un Union Union
+ bt u32 u32
it i32
kw crate::
kw self::
@@ -262,17 +262,17 @@ fn foo<'lt, T, const C: usize>() {
}
"#,
expect![[r#"
- en Enum Enum
- ma makro!(…) macro_rules! makro
+ en Enum Enum
+ ma makro!(…) macro_rules! makro
md a
md module
- st Record Record
- st Tuple Tuple
- st Unit Unit
+ st Record Record
+ st Tuple Tuple
+ st Unit Unit
tt Trait
tp T
- un Union Union
- bt u32 u32
+ un Union Union
+ bt u32 u32
it a::Foo<a::Foo<i32>>
kw crate::
kw self::
@@ -291,17 +291,17 @@ fn foo<'lt, T, const C: usize>() {
}
"#,
expect![[r#"
- en Enum Enum
+ en Enum Enum
ma makro!(…) macro_rules! makro
md module
- st Foo<…> Foo<{unknown}>
- st Record Record
- st Tuple Tuple
- st Unit Unit
+ st Foo<…> Foo<{unknown}>
+ st Record Record
+ st Tuple Tuple
+ st Unit Unit
tt Trait
tp T
- un Union Union
- bt u32 u32
+ un Union Union
+ bt u32 u32
it Foo<i32>
kw crate::
kw self::
@@ -319,16 +319,16 @@ fn foo<'lt, T, const C: usize>() {
}
"#,
expect![[r#"
- en Enum Enum
+ en Enum Enum
ma makro!(…) macro_rules! makro
md module
- st Record Record
- st Tuple Tuple
- st Unit Unit
+ st Record Record
+ st Tuple Tuple
+ st Unit Unit
tt Trait
tp T
- un Union Union
- bt u32 u32
+ un Union Union
+ bt u32 u32
kw crate::
kw self::
"#]],
@@ -341,14 +341,14 @@ fn foo<'lt, T, const C: usize>() {
}
"#,
expect![[r#"
- en Enum Enum
+ en Enum Enum
ma makro!(…) macro_rules! makro
md module
- st Record Record
- st Tuple Tuple
- st Unit Unit
+ st Record Record
+ st Tuple Tuple
+ st Unit Unit
tt Trait
- un Union Union
+ un Union Union
"#]],
);
}
@@ -368,7 +368,7 @@ trait Trait2: Trait1 {
fn foo<'lt, T: Trait2<$0>, const CONST_PARAM: usize>(_: T) {}
"#,
expect![[r#"
- ta Foo = (as Trait2) type Foo
+ ta Foo = (as Trait2) type Foo
ta Super = (as Trait1) type Super
"#]],
);
@@ -384,18 +384,18 @@ trait Trait2<T>: Trait1 {
fn foo<'lt, T: Trait2<$0>, const CONST_PARAM: usize>(_: T) {}
"#,
expect![[r#"
- en Enum Enum
+ en Enum Enum
ma makro!(…) macro_rules! makro
md module
- st Record Record
- st Tuple Tuple
- st Unit Unit
+ st Record Record
+ st Tuple Tuple
+ st Unit Unit
tt Trait
tt Trait1
tt Trait2
tp T
- un Union Union
- bt u32 u32
+ un Union Union
+ bt u32 u32
kw crate::
kw self::
"#]],
@@ -409,15 +409,15 @@ trait Trait2<T> {
fn foo<'lt, T: Trait2<self::$0>, const CONST_PARAM: usize>(_: T) {}
"#,
expect![[r#"
- en Enum Enum
+ en Enum Enum
ma makro!(…) macro_rules! makro
md module
- st Record Record
- st Tuple Tuple
- st Unit Unit
+ st Record Record
+ st Tuple Tuple
+ st Unit Unit
tt Trait
tt Trait2
- un Union Union
+ un Union Union
"#]],
);
}
@@ -434,18 +434,18 @@ trait Tr<T> {
impl Tr<$0
"#,
expect![[r#"
- en Enum Enum
+ en Enum Enum
ma makro!(…) macro_rules! makro
md module
- sp Self dyn Tr<{unknown}>
- st Record Record
- st S S
- st Tuple Tuple
- st Unit Unit
+ sp Self dyn Tr<{unknown}>
+ st Record Record
+ st S S
+ st Tuple Tuple
+ st Unit Unit
tt Tr
tt Trait
- un Union Union
- bt u32 u32
+ un Union Union
+ bt u32 u32
kw crate::
kw self::
"#]],
@@ -481,16 +481,16 @@ trait MyTrait<T, U> {
fn f(t: impl MyTrait<u$0
"#,
expect![[r#"
- en Enum Enum
+ en Enum Enum
ma makro!(…) macro_rules! makro
md module
- st Record Record
- st Tuple Tuple
- st Unit Unit
+ st Record Record
+ st Tuple Tuple
+ st Unit Unit
tt MyTrait
tt Trait
- un Union Union
- bt u32 u32
+ un Union Union
+ bt u32 u32
kw crate::
kw self::
"#]],
@@ -506,16 +506,16 @@ trait MyTrait<T, U> {
fn f(t: impl MyTrait<u8, u$0
"#,
expect![[r#"
- en Enum Enum
+ en Enum Enum
ma makro!(…) macro_rules! makro
md module
- st Record Record
- st Tuple Tuple
- st Unit Unit
+ st Record Record
+ st Tuple Tuple
+ st Unit Unit
tt MyTrait
tt Trait
- un Union Union
- bt u32 u32
+ un Union Union
+ bt u32 u32
kw crate::
kw self::
"#]],
@@ -549,16 +549,16 @@ trait MyTrait<T, U = u8> {
fn f(t: impl MyTrait<u$0
"#,
expect![[r#"
- en Enum Enum
+ en Enum Enum
ma makro!(…) macro_rules! makro
md module
- st Record Record
- st Tuple Tuple
- st Unit Unit
+ st Record Record
+ st Tuple Tuple
+ st Unit Unit
tt MyTrait
tt Trait
- un Union Union
- bt u32 u32
+ un Union Union
+ bt u32 u32
kw crate::
kw self::
"#]],
@@ -574,18 +574,18 @@ trait MyTrait<T, U = u8> {
fn f(t: impl MyTrait<u8, u$0
"#,
expect![[r#"
- en Enum Enum
- ma makro!(…) macro_rules! makro
+ en Enum Enum
+ ma makro!(…) macro_rules! makro
md module
- st Record Record
- st Tuple Tuple
- st Unit Unit
+ st Record Record
+ st Tuple Tuple
+ st Unit Unit
tt MyTrait
tt Trait
ta Item1 = (as MyTrait) type Item1
ta Item2 = (as MyTrait) type Item2
- un Union Union
- bt u32 u32
+ un Union Union
+ bt u32 u32
kw crate::
kw self::
"#]],
@@ -619,16 +619,16 @@ trait MyTrait {
fn f(t: impl MyTrait<Item1 = $0
"#,
expect![[r#"
- en Enum Enum
+ en Enum Enum
ma makro!(…) macro_rules! makro
md module
- st Record Record
- st Tuple Tuple
- st Unit Unit
+ st Record Record
+ st Tuple Tuple
+ st Unit Unit
tt MyTrait
tt Trait
- un Union Union
- bt u32 u32
+ un Union Union
+ bt u32 u32
kw crate::
kw self::
"#]],
@@ -644,16 +644,16 @@ trait MyTrait {
fn f(t: impl MyTrait<Item1 = u8, Item2 = $0
"#,
expect![[r#"
- en Enum Enum
+ en Enum Enum
ma makro!(…) macro_rules! makro
md module
- st Record Record
- st Tuple Tuple
- st Unit Unit
+ st Record Record
+ st Tuple Tuple
+ st Unit Unit
tt MyTrait
tt Trait
- un Union Union
- bt u32 u32
+ un Union Union
+ bt u32 u32
kw crate::
kw self::
"#]],
@@ -668,7 +668,7 @@ trait MyTrait {
fn f(t: impl MyTrait<C = $0
"#,
expect![[r#"
- ct CONST Unit
+ ct CONST Unit
ma makro!(…) macro_rules! makro
kw crate::
kw self::
@@ -691,9 +691,9 @@ pub struct S;
"#,
expect![[r#"
md std
- sp Self Foo
- st Foo Foo
- bt u32 u32
+ sp Self Foo
+ st Foo Foo
+ bt u32 u32
kw crate::
kw self::
"#]],
@@ -716,10 +716,10 @@ pub struct S;
"#,
expect![[r#"
md std
- sp Self Foo
- st Foo Foo
- st S S
- bt u32 u32
+ sp Self Foo
+ st Foo Foo
+ st S S
+ bt u32 u32
kw crate::
kw self::
"#]],
@@ -739,16 +739,16 @@ fn completes_const_and_type_generics_separately() {
}
"#,
expect![[r#"
- en Enum Enum
+ en Enum Enum
ma makro!(…) macro_rules! makro
md module
- st Foo Foo
- st Record Record
- st Tuple Tuple
- st Unit Unit
+ st Foo Foo
+ st Record Record
+ st Tuple Tuple
+ st Unit Unit
tt Trait
- un Union Union
- bt u32 u32
+ un Union Union
+ bt u32 u32
kw crate::
kw self::
"#]],
@@ -766,8 +766,8 @@ fn completes_const_and_type_generics_separately() {
}
"#,
expect![[r#"
- ct CONST Unit
- ct X usize
+ ct CONST Unit
+ ct X usize
ma makro!(…) macro_rules! makro
kw crate::
kw self::
@@ -785,16 +785,16 @@ fn completes_const_and_type_generics_separately() {
}
"#,
expect![[r#"
- en Enum Enum
+ en Enum Enum
ma makro!(…) macro_rules! makro
md module
- st Foo Foo
- st Record Record
- st Tuple Tuple
- st Unit Unit
+ st Foo Foo
+ st Record Record
+ st Tuple Tuple
+ st Unit Unit
tt Trait
- un Union Union
- bt u32 u32
+ un Union Union
+ bt u32 u32
kw crate::
kw self::
"#]],
@@ -809,8 +809,8 @@ fn completes_const_and_type_generics_separately() {
}
"#,
expect![[r#"
- ct CONST Unit
- ct X usize
+ ct CONST Unit
+ ct X usize
ma makro!(…) macro_rules! makro
kw crate::
kw self::
@@ -828,17 +828,17 @@ fn completes_const_and_type_generics_separately() {
fn foo(_: impl Bar<Baz<F$0, 0> = ()>) {}
"#,
expect![[r#"
- en Enum Enum
+ en Enum Enum
ma makro!(…) macro_rules! makro
md module
- st Foo Foo
- st Record Record
- st Tuple Tuple
- st Unit Unit
+ st Foo Foo
+ st Record Record
+ st Tuple Tuple
+ st Unit Unit
tt Bar
tt Trait
- un Union Union
- bt u32 u32
+ un Union Union
+ bt u32 u32
kw crate::
kw self::
"#]],
@@ -853,8 +853,8 @@ fn completes_const_and_type_generics_separately() {
fn foo<T: Bar<Baz<(), $0> = ()>>() {}
"#,
expect![[r#"
- ct CONST Unit
- ct X usize
+ ct CONST Unit
+ ct X usize
ma makro!(…) macro_rules! makro
kw crate::
kw self::
@@ -871,8 +871,8 @@ fn completes_const_and_type_generics_separately() {
}
"#,
expect![[r#"
- ct CONST Unit
- ct X usize
+ ct CONST Unit
+ ct X usize
ma makro!(…) macro_rules! makro
kw crate::
kw self::
@@ -890,8 +890,8 @@ fn completes_const_and_type_generics_separately() {
}
"#,
expect![[r#"
- ct CONST Unit
- ct X usize
+ ct CONST Unit
+ ct X usize
ma makro!(…) macro_rules! makro
kw crate::
kw self::
@@ -908,8 +908,8 @@ fn completes_const_and_type_generics_separately() {
}
"#,
expect![[r#"
- ct CONST Unit
- ct X usize
+ ct CONST Unit
+ ct X usize
ma makro!(…) macro_rules! makro
kw crate::
kw self::
@@ -924,8 +924,8 @@ fn completes_const_and_type_generics_separately() {
impl Foo<(), $0> for () {}
"#,
expect![[r#"
- ct CONST Unit
- ct X usize
+ ct CONST Unit
+ ct X usize
ma makro!(…) macro_rules! makro
kw crate::
kw self::
@@ -942,8 +942,8 @@ fn completes_const_and_type_generics_separately() {
fn foo<T: Bar<X$0, ()>>() {}
"#,
expect![[r#"
- ct CONST Unit
- ct X usize
+ ct CONST Unit
+ ct X usize
ma makro!(…) macro_rules! makro
kw crate::
kw self::
@@ -957,7 +957,7 @@ struct S<'a, 'b, const C: usize, T>(core::marker::PhantomData<&'a &'b T>);
fn foo<'a>() { S::<F$0, _>; }
"#,
expect![[r#"
- ct CONST Unit
+ ct CONST Unit
ma makro!(…) macro_rules! makro
kw crate::
kw self::
@@ -970,7 +970,7 @@ struct S<'a, 'b, const C: usize, T>(core::marker::PhantomData<&'a &'b T>);
fn foo<'a>() { S::<'static, 'static, F$0, _>; }
"#,
expect![[r#"
- ct CONST Unit
+ ct CONST Unit
ma makro!(…) macro_rules! makro
kw crate::
kw self::
diff --git a/crates/ide-completion/src/tests/use_tree.rs b/crates/ide-completion/src/tests/use_tree.rs
index f8b76571ca..2ea2e4e4c9 100644
--- a/crates/ide-completion/src/tests/use_tree.rs
+++ b/crates/ide-completion/src/tests/use_tree.rs
@@ -92,7 +92,7 @@ use self::{foo::*, bar$0};
"#,
expect![[r#"
md foo
- st S S
+ st S S
"#]],
);
}
@@ -179,7 +179,7 @@ struct Bar;
"#,
expect![[r#"
ma foo macro_rules! foo_
- st Foo Foo
+ st Foo Foo
"#]],
);
}
@@ -203,8 +203,8 @@ impl Foo {
"#,
expect![[r#"
ev RecordVariant RecordVariant
- ev TupleVariant TupleVariant
- ev UnitVariant UnitVariant
+ ev TupleVariant TupleVariant
+ ev UnitVariant UnitVariant
"#]],
);
}
@@ -257,7 +257,7 @@ mod a {
}
"#,
expect![[r#"
- ct A usize
+ ct A usize
md b
kw super::
"#]],
@@ -450,9 +450,9 @@ pub fn foo() {}
marco_rules! m { () => {} }
"#,
expect![[r#"
- fn foo fn()
+ fn foo fn()
md simd
- st S S
+ st S S
"#]],
);
}
diff --git a/crates/ide-db/src/imports/import_assets.rs b/crates/ide-db/src/imports/import_assets.rs
index 82a182806a..dab36bf20b 100644
--- a/crates/ide-db/src/imports/import_assets.rs
+++ b/crates/ide-db/src/imports/import_assets.rs
@@ -2,10 +2,10 @@
use hir::{
db::HirDatabase, AsAssocItem, AssocItem, AssocItemContainer, Crate, HasCrate, ImportPathConfig,
- ItemInNs, ModPath, Module, ModuleDef, Name, PathResolution, PrefixKind, ScopeDef, Semantics,
+ ItemInNs, ModPath, Module, ModuleDef, PathResolution, PrefixKind, ScopeDef, Semantics,
SemanticsScope, Trait, TyFingerprint, Type,
};
-use itertools::{EitherOrBoth, Itertools};
+use itertools::Itertools;
use rustc_hash::{FxHashMap, FxHashSet};
use syntax::{
ast::{self, make, HasName},
@@ -13,7 +13,6 @@ use syntax::{
};
use crate::{
- helpers::item_name,
items_locator::{self, AssocSearchMode, DEFAULT_QUERY_SEARCH_LIMIT},
FxIndexSet, RootDatabase,
};
@@ -52,7 +51,7 @@ pub struct TraitImportCandidate {
#[derive(Debug)]
pub struct PathImportCandidate {
/// Optional qualifier before name.
- pub qualifier: Option<Vec<SmolStr>>,
+ pub qualifier: Vec<SmolStr>,
/// The name the item (struct, trait, enum, etc.) should have.
pub name: NameToImport,
}
@@ -264,7 +263,6 @@ impl ImportAssets {
Some(it) => it,
None => return <FxIndexSet<_>>::default().into_iter(),
};
-
let krate = self.module_with_candidate.krate();
let scope_definitions = self.scope_definitions(sema);
let mod_path = |item| {
@@ -279,11 +277,14 @@ impl ImportAssets {
};
match &self.import_candidate {
- ImportCandidate::Path(path_candidate) => {
- path_applicable_imports(sema, krate, path_candidate, mod_path, |item_to_import| {
- !scope_definitions.contains(&ScopeDef::from(item_to_import))
- })
- }
+ ImportCandidate::Path(path_candidate) => path_applicable_imports(
+ sema,
+ &scope,
+ krate,
+ path_candidate,
+ mod_path,
+ |item_to_import| !scope_definitions.contains(&ScopeDef::from(item_to_import)),
+ ),
ImportCandidate::TraitAssocItem(trait_candidate)
| ImportCandidate::TraitMethod(trait_candidate) => trait_applicable_items(
sema,
@@ -315,6 +316,7 @@ impl ImportAssets {
fn path_applicable_imports(
sema: &Semantics<'_, RootDatabase>,
+ scope: &SemanticsScope<'_>,
current_crate: Crate,
path_candidate: &PathImportCandidate,
mod_path: impl Fn(ItemInNs) -> Option<ModPath> + Copy,
@@ -322,8 +324,8 @@ fn path_applicable_imports(
) -> FxIndexSet<LocatedImport> {
let _p = tracing::info_span!("ImportAssets::path_applicable_imports").entered();
- match &path_candidate.qualifier {
- None => {
+ match &*path_candidate.qualifier {
+ [] => {
items_locator::items_with_name(
sema,
current_crate,
@@ -348,89 +350,107 @@ fn path_applicable_imports(
.take(DEFAULT_QUERY_SEARCH_LIMIT.inner())
.collect()
}
- Some(qualifier) => items_locator::items_with_name(
+ [first_qsegment, qualifier_rest @ ..] => items_locator::items_with_name(
sema,
current_crate,
- path_candidate.name.clone(),
- AssocSearchMode::Include,
+ NameToImport::Exact(first_qsegment.to_string(), true),
+ AssocSearchMode::Exclude,
)
- .filter_map(|item| import_for_item(sema.db, mod_path, qualifier, item, scope_filter))
+ .filter_map(|item| {
+ import_for_item(
+ sema,
+ scope,
+ mod_path,
+ &path_candidate.name,
+ item,
+ qualifier_rest,
+ scope_filter,
+ )
+ })
.take(DEFAULT_QUERY_SEARCH_LIMIT.inner())
.collect(),
}
}
fn import_for_item(
- db: &RootDatabase,
+ sema: &Semantics<'_, RootDatabase>,
+ scope: &SemanticsScope<'_>,
mod_path: impl Fn(ItemInNs) -> Option<ModPath>,
+ candidate: &NameToImport,
+ resolved_qualifier: ItemInNs,
unresolved_qualifier: &[SmolStr],
- original_item: ItemInNs,
scope_filter: impl Fn(ItemInNs) -> bool,
) -> Option<LocatedImport> {
let _p = tracing::info_span!("ImportAssets::import_for_item").entered();
- let [first_segment, ..] = unresolved_qualifier else { return None };
-
- let item_as_assoc = item_as_assoc(db, original_item);
- let (original_item_candidate, trait_item_to_import) = match item_as_assoc {
- Some(assoc_item) => match assoc_item.container(db) {
- AssocItemContainer::Trait(trait_) => {
- let trait_ = ItemInNs::from(ModuleDef::from(trait_));
- (trait_, Some(trait_))
- }
- AssocItemContainer::Impl(impl_) => {
- (ItemInNs::from(ModuleDef::from(impl_.self_ty(db).as_adt()?)), None)
+ let qualifier = {
+ let mut adjusted_resolved_qualifier = resolved_qualifier;
+ if !unresolved_qualifier.is_empty() {
+ match resolved_qualifier {
+ ItemInNs::Types(ModuleDef::Module(module)) => {
+ adjusted_resolved_qualifier = sema
+ .resolve_mod_path_relative(module, unresolved_qualifier.iter().cloned())?
+ .next()?;
+ }
+ // can't resolve multiple segments for non-module item path bases
+ _ => return None,
}
- },
- None => (original_item, None),
- };
- let import_path_candidate = mod_path(original_item_candidate)?;
-
- let mut import_path_candidate_segments = import_path_candidate.segments().iter().rev();
- let predicate = |it: EitherOrBoth<&SmolStr, &Name>| match it {
- // segments match, check next one
- EitherOrBoth::Both(a, b) if b.as_str() == &**a => None,
- // segments mismatch / qualifier is longer than the path, bail out
- EitherOrBoth::Both(..) | EitherOrBoth::Left(_) => Some(false),
- // all segments match and we have exhausted the qualifier, proceed
- EitherOrBoth::Right(_) => Some(true),
- };
- if item_as_assoc.is_none() {
- let item_name = item_name(db, original_item)?;
- let last_segment = import_path_candidate_segments.next()?;
- if *last_segment != item_name {
- return None;
}
- }
- let ends_with = unresolved_qualifier
- .iter()
- .rev()
- .zip_longest(import_path_candidate_segments)
- .find_map(predicate)
- .unwrap_or(true);
- if !ends_with {
- return None;
- }
- let segment_import = find_import_for_segment(db, original_item_candidate, first_segment)?;
-
- Some(match (segment_import == original_item_candidate, trait_item_to_import) {
- (true, Some(_)) => {
- // FIXME we should be able to import both the trait and the segment,
- // but it's unclear what to do with overlapping edits (merge imports?)
- // especially in case of lazy completion edit resolutions.
- return None;
+ match adjusted_resolved_qualifier {
+ ItemInNs::Types(def) => def,
+ _ => return None,
}
- (false, Some(trait_to_import)) if scope_filter(trait_to_import) => {
- LocatedImport::new(mod_path(trait_to_import)?, trait_to_import, original_item)
+ };
+ let import_path_candidate = mod_path(resolved_qualifier)?;
+ let ty = match qualifier {
+ ModuleDef::Module(module) => {
+ return items_locator::items_with_name_in_module(
+ sema,
+ module,
+ candidate.clone(),
+ AssocSearchMode::Exclude,
+ )
+ .find(|&it| scope_filter(it))
+ .map(|item| LocatedImport::new(import_path_candidate, resolved_qualifier, item))
}
- (true, None) if scope_filter(original_item_candidate) => {
- LocatedImport::new(import_path_candidate, original_item_candidate, original_item)
+ // FIXME
+ ModuleDef::Trait(_) => return None,
+ // FIXME
+ ModuleDef::TraitAlias(_) => return None,
+ ModuleDef::TypeAlias(alias) => alias.ty(sema.db),
+ ModuleDef::BuiltinType(builtin) => builtin.ty(sema.db),
+ ModuleDef::Adt(adt) => adt.ty(sema.db),
+ _ => return None,
+ };
+ ty.iterate_path_candidates(sema.db, scope, &FxHashSet::default(), None, None, |assoc| {
+ // FIXME: Support extra trait imports
+ if assoc.container_or_implemented_trait(sema.db).is_some() {
+ return None;
}
- (false, None) if scope_filter(segment_import) => {
- LocatedImport::new(mod_path(segment_import)?, segment_import, original_item)
+ let name = assoc.name(sema.db)?;
+ let is_match = match candidate {
+ NameToImport::Prefix(text, true) => name.as_str().starts_with(text),
+ NameToImport::Prefix(text, false) => {
+ name.as_str().chars().zip(text.chars()).all(|(name_char, candidate_char)| {
+ name_char.eq_ignore_ascii_case(&candidate_char)
+ })
+ }
+ NameToImport::Exact(text, true) => name.as_str() == text,
+ NameToImport::Exact(text, false) => name.as_str().eq_ignore_ascii_case(text),
+ NameToImport::Fuzzy(text, true) => text.chars().all(|c| name.as_str().contains(c)),
+ NameToImport::Fuzzy(text, false) => text
+ .chars()
+ .all(|c| name.as_str().chars().any(|name_char| name_char.eq_ignore_ascii_case(&c))),
+ };
+ if !is_match {
+ return None;
}
- _ => return None,
+ Some(LocatedImport::new(
+ import_path_candidate.clone(),
+ resolved_qualifier,
+ assoc_to_item(assoc),
+ ))
})
}
@@ -453,45 +473,6 @@ fn item_for_path_search_assoc(db: &RootDatabase, assoc_item: AssocItem) -> Optio
})
}
-fn find_import_for_segment(
- db: &RootDatabase,
- original_item: ItemInNs,
- unresolved_first_segment: &str,
-) -> Option<ItemInNs> {
- let segment_is_name = item_name(db, original_item)
- .map(|name| name.eq_ident(unresolved_first_segment))
- .unwrap_or(false);
-
- Some(if segment_is_name {
- original_item
- } else {
- let matching_module =
- module_with_segment_name(db, unresolved_first_segment, original_item)?;
- ItemInNs::from(ModuleDef::from(matching_module))
- })
-}
-
-fn module_with_segment_name(
- db: &RootDatabase,
- segment_name: &str,
- candidate: ItemInNs,
-) -> Option<Module> {
- let mut current_module = match candidate {
- ItemInNs::Types(module_def_id) => module_def_id.module(db),
- ItemInNs::Values(module_def_id) => module_def_id.module(db),
- ItemInNs::Macros(macro_def_id) => ModuleDef::from(macro_def_id).module(db),
- };
- while let Some(module) = current_module {
- if let Some(module_name) = module.name(db) {
- if module_name.eq_ident(segment_name) {
- return Some(module);
- }
- }
- current_module = module.parent(db);
- }
- None
-}
-
fn trait_applicable_items(
sema: &Semantics<'_, RootDatabase>,
current_crate: Crate,
@@ -703,7 +684,7 @@ impl ImportCandidate {
return None;
}
Some(ImportCandidate::Path(PathImportCandidate {
- qualifier: None,
+ qualifier: vec![],
name: NameToImport::exact_case_sensitive(name.to_string()),
}))
}
@@ -730,7 +711,7 @@ fn path_import_candidate(
.segments()
.map(|seg| seg.name_ref().map(|name| SmolStr::new(name.text())))
.collect::<Option<Vec<_>>>()?;
- ImportCandidate::Path(PathImportCandidate { qualifier: Some(qualifier), name })
+ ImportCandidate::Path(PathImportCandidate { qualifier, name })
} else {
return None;
}
@@ -754,10 +735,10 @@ fn path_import_candidate(
}
Some(_) => return None,
},
- None => ImportCandidate::Path(PathImportCandidate { qualifier: None, name }),
+ None => ImportCandidate::Path(PathImportCandidate { qualifier: vec![], name }),
})
}
fn item_as_assoc(db: &RootDatabase, item: ItemInNs) -> Option<AssocItem> {
- item.as_module_def().and_then(|module_def| module_def.as_assoc_item(db))
+ item.into_module_def().as_assoc_item(db)
}
diff --git a/crates/ide-db/src/items_locator.rs b/crates/ide-db/src/items_locator.rs
index 47549a1d00..7f66ea0c10 100644
--- a/crates/ide-db/src/items_locator.rs
+++ b/crates/ide-db/src/items_locator.rs
@@ -3,10 +3,14 @@
//! The main reason for this module to exist is the fact that project's items and dependencies' items
//! are located in different caches, with different APIs.
use either::Either;
-use hir::{import_map, Crate, ItemInNs, Semantics};
+use hir::{import_map, Crate, ItemInNs, Module, Semantics};
use limit::Limit;
-use crate::{imports::import_assets::NameToImport, symbol_index, RootDatabase};
+use crate::{
+ imports::import_assets::NameToImport,
+ symbol_index::{self, SymbolsDatabase as _},
+ RootDatabase,
+};
/// A value to use, when uncertain which limit to pick.
pub static DEFAULT_QUERY_SEARCH_LIMIT: Limit = Limit::new(100);
@@ -20,8 +24,7 @@ pub fn items_with_name<'a>(
name: NameToImport,
assoc_item_search: AssocSearchMode,
) -> impl Iterator<Item = ItemInNs> + 'a {
- let krate_name = krate.display_name(sema.db).map(|name| name.to_string());
- let _p = tracing::info_span!("items_with_name", name = name.text(), assoc_item_search = ?assoc_item_search, crate = ?krate_name)
+ let _p = tracing::info_span!("items_with_name", name = name.text(), assoc_item_search = ?assoc_item_search, crate = ?krate.display_name(sema.db).map(|name| name.to_string()))
.entered();
let prefix = matches!(name, NameToImport::Prefix(..));
@@ -66,6 +69,54 @@ pub fn items_with_name<'a>(
find_items(sema, krate, local_query, external_query)
}
+/// Searches for importable items with the given name in the crate and its dependencies.
+pub fn items_with_name_in_module<'a>(
+ sema: &'a Semantics<'_, RootDatabase>,
+ module: Module,
+ name: NameToImport,
+ assoc_item_search: AssocSearchMode,
+) -> impl Iterator<Item = ItemInNs> + 'a {
+ let _p = tracing::info_span!("items_with_name_in", name = name.text(), assoc_item_search = ?assoc_item_search, ?module)
+ .entered();
+
+ let prefix = matches!(name, NameToImport::Prefix(..));
+ let local_query = match name {
+ NameToImport::Prefix(exact_name, case_sensitive)
+ | NameToImport::Exact(exact_name, case_sensitive) => {
+ let mut local_query = symbol_index::Query::new(exact_name.clone());
+ local_query.assoc_search_mode(assoc_item_search);
+ if prefix {
+ local_query.prefix();
+ } else {
+ local_query.exact();
+ }
+ if case_sensitive {
+ local_query.case_sensitive();
+ }
+ local_query
+ }
+ NameToImport::Fuzzy(fuzzy_search_string, case_sensitive) => {
+ let mut local_query = symbol_index::Query::new(fuzzy_search_string.clone());
+ local_query.fuzzy();
+ local_query.assoc_search_mode(assoc_item_search);
+
+ if case_sensitive {
+ local_query.case_sensitive();
+ }
+
+ local_query
+ }
+ };
+ let mut local_results = Vec::new();
+ local_query.search(&[sema.db.module_symbols(module)], |local_candidate| {
+ local_results.push(match local_candidate.def {
+ hir::ModuleDef::Macro(macro_def) => ItemInNs::Macros(macro_def),
+ def => ItemInNs::from(def),
+ })
+ });
+ local_results.into_iter()
+}
+
fn find_items<'a>(
sema: &'a Semantics<'_, RootDatabase>,
krate: Crate,
diff --git a/crates/ide-db/src/syntax_helpers/suggest_name.rs b/crates/ide-db/src/syntax_helpers/suggest_name.rs
index b3ecc26cb2..1e08e8e309 100644
--- a/crates/ide-db/src/syntax_helpers/suggest_name.rs
+++ b/crates/ide-db/src/syntax_helpers/suggest_name.rs
@@ -377,6 +377,8 @@ fn name_of_type(ty: &hir::Type, db: &RootDatabase, edition: Edition) -> Option<S
return None;
}
name
+ } else if let Some(inner_ty) = ty.remove_ref() {
+ return name_of_type(&inner_ty, db, edition);
} else {
return None;
};
diff --git a/crates/ide-diagnostics/src/handlers/missing_unsafe.rs b/crates/ide-diagnostics/src/handlers/missing_unsafe.rs
index dc3dee5c9c..5f38d13570 100644
--- a/crates/ide-diagnostics/src/handlers/missing_unsafe.rs
+++ b/crates/ide-diagnostics/src/handlers/missing_unsafe.rs
@@ -778,4 +778,20 @@ fn bar(mut v: Union2) {
"#,
)
}
+
+ #[test]
+ fn raw_ref_reborrow_is_safe() {
+ check_diagnostics(
+ r#"
+fn main() {
+ let ptr: *mut i32;
+ let _addr = &raw const *ptr;
+
+ let local = 1;
+ let ptr = &local as *const i32;
+ let _addr = &raw const *ptr;
+}
+"#,
+ )
+ }
}
diff --git a/crates/ide-diagnostics/src/handlers/unresolved_field.rs b/crates/ide-diagnostics/src/handlers/unresolved_field.rs
index 656bedff1a..4accd181ca 100644
--- a/crates/ide-diagnostics/src/handlers/unresolved_field.rs
+++ b/crates/ide-diagnostics/src/handlers/unresolved_field.rs
@@ -90,7 +90,9 @@ fn field_fix(ctx: &DiagnosticsContext<'_>, d: &hir::UnresolvedField) -> Option<A
make::ty("()")
};
- if !is_editable_crate(target_module.krate(), ctx.sema.db) {
+ if !is_editable_crate(target_module.krate(), ctx.sema.db)
+ || SyntaxKind::from_keyword(field_name, ctx.edition).is_some()
+ {
return None;
}
@@ -501,4 +503,19 @@ fn main() {}
"#,
)
}
+
+ #[test]
+ fn regression_18683() {
+ check_diagnostics(
+ r#"
+struct S;
+impl S {
+ fn f(self) {
+ self.self
+ // ^^^^ error: no field `self` on type `S`
+ }
+}
+ "#,
+ );
+ }
}
diff --git a/crates/ide-diagnostics/src/handlers/unresolved_ident.rs b/crates/ide-diagnostics/src/handlers/unresolved_ident.rs
index 68f14a97f5..4f64dabeb5 100644
--- a/crates/ide-diagnostics/src/handlers/unresolved_ident.rs
+++ b/crates/ide-diagnostics/src/handlers/unresolved_ident.rs
@@ -7,20 +7,19 @@ pub(crate) fn unresolved_ident(
ctx: &DiagnosticsContext<'_>,
d: &hir::UnresolvedIdent,
) -> Diagnostic {
- Diagnostic::new_with_syntax_node_ptr(
- ctx,
- DiagnosticCode::RustcHardError("E0425"),
- "no such value in this scope",
- d.expr_or_pat.map(Into::into),
- )
- .experimental()
+ let mut range =
+ ctx.sema.diagnostics_display_range(d.node.map(|(node, _)| node.syntax_node_ptr()));
+ if let Some(in_node_range) = d.node.value.1 {
+ range.range = in_node_range + range.range.start();
+ }
+ Diagnostic::new(DiagnosticCode::RustcHardError("E0425"), "no such value in this scope", range)
+ .experimental()
}
#[cfg(test)]
mod tests {
use crate::tests::check_diagnostics;
- // FIXME: This should show a diagnostic
#[test]
fn feature() {
check_diagnostics(
@@ -28,6 +27,7 @@ mod tests {
//- minicore: fmt
fn main() {
format_args!("{unresolved}");
+ // ^^^^^^^^^^ error: no such value in this scope
}
"#,
)
diff --git a/crates/ide/src/runnables.rs b/crates/ide/src/runnables.rs
index 3bbbd36c1b..d385e453e2 100644
--- a/crates/ide/src/runnables.rs
+++ b/crates/ide/src/runnables.rs
@@ -16,7 +16,7 @@ use ide_db::{
};
use itertools::Itertools;
use span::{Edition, TextSize};
-use stdx::{always, format_to};
+use stdx::format_to;
use syntax::{
ast::{self, AstNode},
SmolStr, SyntaxNode, ToSmolStr,
@@ -130,14 +130,7 @@ pub(crate) fn runnables(db: &RootDatabase, file_id: FileId) -> Vec<Runnable> {
// In case an expansion creates multiple runnables we want to name them to avoid emitting a bunch of equally named runnables.
let mut in_macro_expansion = FxHashMap::<hir::HirFileId, Vec<Runnable>>::default();
let mut add_opt = |runnable: Option<Runnable>, def| {
- if let Some(runnable) = runnable.filter(|runnable| {
- always!(
- runnable.nav.file_id == file_id,
- "tried adding a runnable pointing to a different file: {:?} for {:?}",
- runnable.kind,
- file_id
- )
- }) {
+ if let Some(runnable) = runnable.filter(|runnable| runnable.nav.file_id == file_id) {
if let Some(def) = def {
let file_id = match def {
Definition::Module(it) => {
@@ -161,13 +154,7 @@ pub(crate) fn runnables(db: &RootDatabase, file_id: FileId) -> Vec<Runnable> {
Definition::SelfType(impl_) => runnable_impl(&sema, &impl_),
_ => None,
};
- add_opt(
- runnable
- .or_else(|| module_def_doctest(sema.db, def))
- // #[macro_export] mbe macros are declared in the root, while their definition may reside in a different module
- .filter(|it| it.nav.file_id == file_id),
- Some(def),
- );
+ add_opt(runnable.or_else(|| module_def_doctest(sema.db, def)), Some(def));
if let Definition::SelfType(impl_) = def {
impl_.items(db).into_iter().for_each(|assoc| {
let runnable = match assoc {
diff --git a/crates/ide/src/typing.rs b/crates/ide/src/typing.rs
index 3d9146cc4c..d37318ff45 100644
--- a/crates/ide/src/typing.rs
+++ b/crates/ide/src/typing.rs
@@ -174,6 +174,9 @@ fn on_delimited_node_typed(
kinds: &[fn(SyntaxKind) -> bool],
) -> Option<TextEdit> {
let t = reparsed.syntax().token_at_offset(offset).right_biased()?;
+ if t.prev_token().map_or(false, |t| t.kind().is_any_identifier()) {
+ return None;
+ }
let (filter, node) = t
.parent_ancestors()
.take_while(|n| n.text_range().start() == offset)
@@ -1092,6 +1095,22 @@ fn f() {
}
#[test]
+ fn preceding_whitespace_is_significant_for_closing_brackets() {
+ type_char_noop(
+ '(',
+ r#"
+fn f() { a.b$0if true {} }
+"#,
+ );
+ type_char_noop(
+ '(',
+ r#"
+fn f() { foo$0{} }
+"#,
+ );
+ }
+
+ #[test]
fn adds_closing_parenthesis_for_pat() {
type_char(
'(',
diff --git a/crates/load-cargo/src/lib.rs b/crates/load-cargo/src/lib.rs
index aa64f570ed..1b2162dad0 100644
--- a/crates/load-cargo/src/lib.rs
+++ b/crates/load-cargo/src/lib.rs
@@ -487,7 +487,7 @@ impl ProcMacroExpander for Expander {
match self.0.expand(
subtree,
attrs,
- env.clone(),
+ env.clone().into(),
def_site,
call_site,
mixed_site,
diff --git a/crates/parser/src/grammar/items.rs b/crates/parser/src/grammar/items.rs
index 8ece5af527..0ac11371c5 100644
--- a/crates/parser/src/grammar/items.rs
+++ b/crates/parser/src/grammar/items.rs
@@ -72,8 +72,19 @@ pub(super) fn item_or_macro(p: &mut Parser<'_>, stop_on_r_curly: bool, is_in_ext
// macro_rules! ()
// macro_rules! []
if paths::is_use_path_start(p) {
- macro_call(p, m);
- return;
+ paths::use_path(p);
+ // Do not create a MACRO_CALL node here if this isn't a macro call, this causes problems with completion.
+
+ // test_err path_item_without_excl
+ // foo
+ if p.at(T![!]) {
+ macro_call(p, m);
+ return;
+ } else {
+ m.complete(p, ERROR);
+ p.error("expected an item");
+ return;
+ }
}
m.abandon(p);
@@ -410,8 +421,7 @@ fn fn_(p: &mut Parser<'_>, m: Marker) {
}
fn macro_call(p: &mut Parser<'_>, m: Marker) {
- assert!(paths::is_use_path_start(p));
- paths::use_path(p);
+ assert!(p.at(T![!]));
match macro_call_after_excl(p) {
BlockLike::Block => (),
BlockLike::NotBlock => {
diff --git a/crates/parser/src/tests/top_entries.rs b/crates/parser/src/tests/top_entries.rs
index 7076e03ba4..6cad71093f 100644
--- a/crates/parser/src/tests/top_entries.rs
+++ b/crates/parser/src/tests/top_entries.rs
@@ -30,22 +30,20 @@ fn source_file() {
TopEntryPoint::SourceFile,
"@error@",
expect![[r#"
- SOURCE_FILE
- ERROR
- AT "@"
- MACRO_CALL
- PATH
- PATH_SEGMENT
- NAME_REF
- IDENT "error"
- ERROR
- AT "@"
- error 0: expected an item
- error 6: expected BANG
- error 6: expected `{`, `[`, `(`
- error 6: expected SEMICOLON
- error 6: expected an item
- "#]],
+ SOURCE_FILE
+ ERROR
+ AT "@"
+ ERROR
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "error"
+ ERROR
+ AT "@"
+ error 0: expected an item
+ error 6: expected an item
+ error 6: expected an item
+ "#]],
);
}
diff --git a/crates/parser/test_data/generated/runner.rs b/crates/parser/test_data/generated/runner.rs
index 003b7fda94..b9f87b6af2 100644
--- a/crates/parser/test_data/generated/runner.rs
+++ b/crates/parser/test_data/generated/runner.rs
@@ -775,6 +775,10 @@ mod err {
run_and_expect_errors("test_data/parser/inline/err/missing_fn_param_type.rs");
}
#[test]
+ fn path_item_without_excl() {
+ run_and_expect_errors("test_data/parser/inline/err/path_item_without_excl.rs");
+ }
+ #[test]
fn pointer_type_no_mutability() {
run_and_expect_errors("test_data/parser/inline/err/pointer_type_no_mutability.rs");
}
diff --git a/crates/parser/test_data/parser/err/0002_duplicate_shebang.rast b/crates/parser/test_data/parser/err/0002_duplicate_shebang.rast
index ec6c315100..3159a15a3b 100644
--- a/crates/parser/test_data/parser/err/0002_duplicate_shebang.rast
+++ b/crates/parser/test_data/parser/err/0002_duplicate_shebang.rast
@@ -10,20 +10,20 @@ SOURCE_FILE
USE_KW "use"
ERROR
SLASH "/"
- MACRO_CALL
+ ERROR
PATH
PATH_SEGMENT
NAME_REF
IDENT "bin"
ERROR
SLASH "/"
- MACRO_CALL
+ ERROR
PATH
PATH_SEGMENT
NAME_REF
IDENT "env"
WHITESPACE " "
- MACRO_CALL
+ ERROR
PATH
PATH_SEGMENT
NAME_REF
@@ -33,13 +33,7 @@ error 23: expected `[`
error 23: expected an item
error 27: expected one of `*`, `::`, `{`, `self`, `super` or an identifier
error 28: expected SEMICOLON
-error 31: expected BANG
-error 31: expected `{`, `[`, `(`
-error 31: expected SEMICOLON
error 31: expected an item
-error 35: expected BANG
-error 35: expected `{`, `[`, `(`
-error 35: expected SEMICOLON
-error 41: expected BANG
-error 41: expected `{`, `[`, `(`
-error 41: expected SEMICOLON
+error 31: expected an item
+error 35: expected an item
+error 41: expected an item
diff --git a/crates/parser/test_data/parser/err/0008_item_block_recovery.rast b/crates/parser/test_data/parser/err/0008_item_block_recovery.rast
index 60b2fe9875..2a296fe4aa 100644
--- a/crates/parser/test_data/parser/err/0008_item_block_recovery.rast
+++ b/crates/parser/test_data/parser/err/0008_item_block_recovery.rast
@@ -14,14 +14,15 @@ SOURCE_FILE
WHITESPACE "\n"
R_CURLY "}"
WHITESPACE "\n\n"
- MACRO_CALL
+ ERROR
PATH
PATH_SEGMENT
NAME_REF
IDENT "bar"
- TOKEN_TREE
- L_PAREN "("
- R_PAREN ")"
+ ERROR
+ L_PAREN "("
+ ERROR
+ R_PAREN ")"
WHITESPACE " "
ERROR
L_CURLY "{"
@@ -75,6 +76,7 @@ SOURCE_FILE
WHITESPACE "\n"
R_CURLY "}"
WHITESPACE "\n"
-error 17: expected BANG
-error 19: expected SEMICOLON
+error 17: expected an item
+error 17: expected an item
+error 18: expected an item
error 20: expected an item
diff --git a/crates/parser/test_data/parser/err/0013_invalid_type.rast b/crates/parser/test_data/parser/err/0013_invalid_type.rast
index b485c71ab3..8c8debb8b0 100644
--- a/crates/parser/test_data/parser/err/0013_invalid_type.rast
+++ b/crates/parser/test_data/parser/err/0013_invalid_type.rast
@@ -46,7 +46,7 @@ SOURCE_FILE
ERROR
AT "@"
WHITESPACE " "
- MACRO_CALL
+ ERROR
PATH
PATH_SEGMENT
NAME_REF
@@ -72,9 +72,7 @@ error 67: expected R_ANGLE
error 67: expected R_PAREN
error 67: expected SEMICOLON
error 67: expected an item
-error 72: expected BANG
-error 72: expected `{`, `[`, `(`
-error 72: expected SEMICOLON
+error 72: expected an item
error 72: expected an item
error 73: expected an item
error 79: expected an item
diff --git a/crates/parser/test_data/parser/err/0044_item_modifiers.rast b/crates/parser/test_data/parser/err/0044_item_modifiers.rast
index 76464bf7cc..d6e3219c39 100644
--- a/crates/parser/test_data/parser/err/0044_item_modifiers.rast
+++ b/crates/parser/test_data/parser/err/0044_item_modifiers.rast
@@ -26,14 +26,15 @@ SOURCE_FILE
ERROR
FN_KW "fn"
WHITESPACE " "
- MACRO_CALL
+ ERROR
PATH
PATH_SEGMENT
NAME_REF
IDENT "bar"
- TOKEN_TREE
- L_PAREN "("
- R_PAREN ")"
+ ERROR
+ L_PAREN "("
+ ERROR
+ R_PAREN ")"
WHITESPACE " "
ERROR
L_CURLY "{"
@@ -43,6 +44,7 @@ error 6: expected fn, trait or impl
error 38: expected a name
error 40: missing type for `const` or `static`
error 40: expected SEMICOLON
-error 44: expected BANG
-error 46: expected SEMICOLON
+error 44: expected an item
+error 44: expected an item
+error 45: expected an item
error 47: expected an item
diff --git a/crates/parser/test_data/parser/err/0055_impl_use.rast b/crates/parser/test_data/parser/err/0055_impl_use.rast
index 751f007df9..87a8b519d7 100644
--- a/crates/parser/test_data/parser/err/0055_impl_use.rast
+++ b/crates/parser/test_data/parser/err/0055_impl_use.rast
@@ -12,15 +12,16 @@ SOURCE_FILE
ERROR
USE_KW "use"
WHITESPACE " "
- MACRO_CALL
+ ERROR
PATH
PATH_SEGMENT
NAME_REF
IDENT "std"
+ ERROR
SEMICOLON ";"
WHITESPACE "\n"
error 8: expected R_ANGLE
error 8: expected type
error 11: expected `{`
-error 15: expected BANG
-error 15: expected `{`, `[`, `(`
+error 15: expected an item
+error 15: expected an item
diff --git a/crates/parser/test_data/parser/inline/err/gen_fn.rast b/crates/parser/test_data/parser/inline/err/gen_fn.rast
index 9609ece77d..f8a7d0e552 100644
--- a/crates/parser/test_data/parser/inline/err/gen_fn.rast
+++ b/crates/parser/test_data/parser/inline/err/gen_fn.rast
@@ -1,5 +1,5 @@
SOURCE_FILE
- MACRO_CALL
+ ERROR
PATH
PATH_SEGMENT
NAME_REF
@@ -22,7 +22,7 @@ SOURCE_FILE
ERROR
ASYNC_KW "async"
WHITESPACE " "
- MACRO_CALL
+ ERROR
PATH
PATH_SEGMENT
NAME_REF
@@ -42,10 +42,6 @@ SOURCE_FILE
L_CURLY "{"
R_CURLY "}"
WHITESPACE "\n"
-error 3: expected BANG
-error 3: expected `{`, `[`, `(`
-error 3: expected SEMICOLON
+error 3: expected an item
error 24: expected fn, trait or impl
-error 28: expected BANG
-error 28: expected `{`, `[`, `(`
-error 28: expected SEMICOLON
+error 28: expected an item
diff --git a/crates/parser/test_data/parser/inline/err/path_item_without_excl.rast b/crates/parser/test_data/parser/inline/err/path_item_without_excl.rast
new file mode 100644
index 0000000000..a22dff1a67
--- /dev/null
+++ b/crates/parser/test_data/parser/inline/err/path_item_without_excl.rast
@@ -0,0 +1,8 @@
+SOURCE_FILE
+ ERROR
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "foo"
+ WHITESPACE "\n"
+error 3: expected an item
diff --git a/crates/parser/test_data/parser/inline/err/path_item_without_excl.rs b/crates/parser/test_data/parser/inline/err/path_item_without_excl.rs
new file mode 100644
index 0000000000..257cc5642c
--- /dev/null
+++ b/crates/parser/test_data/parser/inline/err/path_item_without_excl.rs
@@ -0,0 +1 @@
+foo
diff --git a/crates/paths/Cargo.toml b/crates/paths/Cargo.toml
index d4b0a54ed6..f0dafab70c 100644
--- a/crates/paths/Cargo.toml
+++ b/crates/paths/Cargo.toml
@@ -14,10 +14,9 @@ doctest = false
[dependencies]
camino.workspace = true
-serde = { workspace = true, optional = true }
[features]
-serde1 = ["camino/serde1", "dep:serde"]
+serde1 = ["camino/serde1"]
[lints]
workspace = true
diff --git a/crates/proc-macro-api/Cargo.toml b/crates/proc-macro-api/Cargo.toml
index 84b877f026..dac8e09435 100644
--- a/crates/proc-macro-api/Cargo.toml
+++ b/crates/proc-macro-api/Cargo.toml
@@ -14,6 +14,7 @@ doctest = false
[dependencies]
serde.workspace = true
+serde_derive.workspace = true
serde_json = { workspace = true, features = ["unbounded_depth"] }
tracing.workspace = true
rustc-hash.workspace = true
@@ -23,11 +24,9 @@ indexmap.workspace = true
paths = { workspace = true, features = ["serde1"] }
tt.workspace = true
stdx.workspace = true
-# Ideally this crate would not depend on salsa things, but we need span information here which wraps
-# InternIds for the syntax context
-span.workspace = true
-# only here due to the `Env` newtype :/
-base-db.workspace = true
+# span = {workspace = true, default-features = false} does not work
+span = { path = "../span", version = "0.0.0", default-features = false}
+
intern.workspace = true
[lints]
diff --git a/crates/proc-macro-api/src/lib.rs b/crates/proc-macro-api/src/lib.rs
index 011baad65f..e54d501b94 100644
--- a/crates/proc-macro-api/src/lib.rs
+++ b/crates/proc-macro-api/src/lib.rs
@@ -9,7 +9,6 @@ pub mod json;
pub mod msg;
mod process;
-use base_db::Env;
use paths::{AbsPath, AbsPathBuf};
use span::Span;
use std::{fmt, io, sync::Arc};
@@ -148,7 +147,7 @@ impl ProcMacro {
&self,
subtree: &tt::Subtree<Span>,
attr: Option<&tt::Subtree<Span>>,
- env: Env,
+ env: Vec<(String, String)>,
def_site: Span,
call_site: Span,
mixed_site: Span,
@@ -179,7 +178,7 @@ impl ProcMacro {
},
},
lib: self.dylib_path.to_path_buf().into(),
- env: env.into(),
+ env,
current_dir,
};
diff --git a/crates/proc-macro-api/src/msg.rs b/crates/proc-macro-api/src/msg.rs
index 883528558d..bbd9f582df 100644
--- a/crates/proc-macro-api/src/msg.rs
+++ b/crates/proc-macro-api/src/msg.rs
@@ -4,7 +4,8 @@ pub(crate) mod flat;
use std::io::{self, BufRead, Write};
use paths::Utf8PathBuf;
-use serde::{de::DeserializeOwned, Deserialize, Serialize};
+use serde::de::DeserializeOwned;
+use serde_derive::{Deserialize, Serialize};
use crate::ProcMacroKind;
@@ -123,7 +124,7 @@ impl ExpnGlobals {
}
}
-pub trait Message: Serialize + DeserializeOwned {
+pub trait Message: serde::Serialize + DeserializeOwned {
fn read<R: BufRead>(
from_proto: ProtocolRead<R>,
inp: &mut R,
diff --git a/crates/proc-macro-api/src/msg/flat.rs b/crates/proc-macro-api/src/msg/flat.rs
index af3412e90e..ce4b060fca 100644
--- a/crates/proc-macro-api/src/msg/flat.rs
+++ b/crates/proc-macro-api/src/msg/flat.rs
@@ -39,10 +39,10 @@ use std::collections::VecDeque;
use intern::Symbol;
use rustc_hash::FxHashMap;
-use serde::{Deserialize, Serialize};
+use serde_derive::{Deserialize, Serialize};
use span::{EditionedFileId, ErasedFileAstId, Span, SpanAnchor, SyntaxContextId, TextRange};
-use crate::msg::EXTENDED_LEAF_DATA;
+use crate::msg::{ENCODE_CLOSE_SPAN_VERSION, EXTENDED_LEAF_DATA};
pub type SpanDataIndexMap =
indexmap::IndexSet<Span, std::hash::BuildHasherDefault<rustc_hash::FxHasher>>;
@@ -145,7 +145,11 @@ impl FlatTree {
w.write(subtree);
FlatTree {
- subtree: write_vec(w.subtree, SubtreeRepr::write),
+ subtree: if version >= ENCODE_CLOSE_SPAN_VERSION {
+ write_vec(w.subtree, SubtreeRepr::write_with_close_span)
+ } else {
+ write_vec(w.subtree, SubtreeRepr::write)
+ },
literal: if version >= EXTENDED_LEAF_DATA {
write_vec(w.literal, LiteralRepr::write_with_kind)
} else {
@@ -179,7 +183,11 @@ impl FlatTree {
w.write(subtree);
FlatTree {
- subtree: write_vec(w.subtree, SubtreeRepr::write),
+ subtree: if version >= ENCODE_CLOSE_SPAN_VERSION {
+ write_vec(w.subtree, SubtreeRepr::write_with_close_span)
+ } else {
+ write_vec(w.subtree, SubtreeRepr::write)
+ },
literal: if version >= EXTENDED_LEAF_DATA {
write_vec(w.literal, LiteralRepr::write_with_kind)
} else {
@@ -202,7 +210,11 @@ impl FlatTree {
span_data_table: &SpanDataIndexMap,
) -> tt::Subtree<Span> {
Reader {
- subtree: read_vec(self.subtree, SubtreeRepr::read),
+ subtree: if version >= ENCODE_CLOSE_SPAN_VERSION {
+ read_vec(self.subtree, SubtreeRepr::read_with_close_span)
+ } else {
+ read_vec(self.subtree, SubtreeRepr::read)
+ },
literal: if version >= EXTENDED_LEAF_DATA {
read_vec(self.literal, LiteralRepr::read_with_kind)
} else {
@@ -224,7 +236,11 @@ impl FlatTree {
pub fn to_subtree_unresolved(self, version: u32) -> tt::Subtree<TokenId> {
Reader {
- subtree: read_vec(self.subtree, SubtreeRepr::read),
+ subtree: if version >= ENCODE_CLOSE_SPAN_VERSION {
+ read_vec(self.subtree, SubtreeRepr::read_with_close_span)
+ } else {
+ read_vec(self.subtree, SubtreeRepr::read)
+ },
literal: if version >= EXTENDED_LEAF_DATA {
read_vec(self.literal, LiteralRepr::read_with_kind)
} else {
@@ -257,7 +273,26 @@ fn write_vec<T, F: Fn(T) -> [u32; N], const N: usize>(xs: Vec<T>, f: F) -> Vec<u
}
impl SubtreeRepr {
- fn write(self) -> [u32; 5] {
+ fn write(self) -> [u32; 4] {
+ let kind = match self.kind {
+ tt::DelimiterKind::Invisible => 0,
+ tt::DelimiterKind::Parenthesis => 1,
+ tt::DelimiterKind::Brace => 2,
+ tt::DelimiterKind::Bracket => 3,
+ };
+ [self.open.0, kind, self.tt[0], self.tt[1]]
+ }
+ fn read([open, kind, lo, len]: [u32; 4]) -> SubtreeRepr {
+ let kind = match kind {
+ 0 => tt::DelimiterKind::Invisible,
+ 1 => tt::DelimiterKind::Parenthesis,
+ 2 => tt::DelimiterKind::Brace,
+ 3 => tt::DelimiterKind::Bracket,
+ other => panic!("bad kind {other}"),
+ };
+ SubtreeRepr { open: TokenId(open), close: TokenId(!0), kind, tt: [lo, len] }
+ }
+ fn write_with_close_span(self) -> [u32; 5] {
let kind = match self.kind {
tt::DelimiterKind::Invisible => 0,
tt::DelimiterKind::Parenthesis => 1,
@@ -266,7 +301,7 @@ impl SubtreeRepr {
};
[self.open.0, self.close.0, kind, self.tt[0], self.tt[1]]
}
- fn read([open, close, kind, lo, len]: [u32; 5]) -> SubtreeRepr {
+ fn read_with_close_span([open, close, kind, lo, len]: [u32; 5]) -> SubtreeRepr {
let kind = match kind {
0 => tt::DelimiterKind::Invisible,
1 => tt::DelimiterKind::Parenthesis,
diff --git a/crates/proc-macro-api/src/process.rs b/crates/proc-macro-api/src/process.rs
index b1e35b7a08..4045e25fdf 100644
--- a/crates/proc-macro-api/src/process.rs
+++ b/crates/proc-macro-api/src/process.rs
@@ -56,25 +56,8 @@ impl ProcMacroProcessSrv {
match srv.version_check() {
Ok(v) if v > CURRENT_API_VERSION => Err(io::Error::new(
io::ErrorKind::Other,
- format!(
- "The version of the proc-macro server ({v}) in your Rust toolchain \
- is newer than the version supported by your rust-analyzer ({CURRENT_API_VERSION}).
-\
- This will prevent proc-macro expansion from working. \
- Please consider updating your rust-analyzer to ensure compatibility with your \
- current toolchain."
- ),
- )),
- Ok(v) if v < RUST_ANALYZER_SPAN_SUPPORT => Err(io::Error::new(
- io::ErrorKind::Other,
- format!(
- "The version of the proc-macro server ({v}) in your Rust toolchain \
- is too old and no longer supported by your rust-analyzer which requires\
- version {RUST_ANALYZER_SPAN_SUPPORT} or higher.
-\
- This will prevent proc-macro expansion from working. \
- Please consider updating your toolchain or downgrading your rust-analyzer \
- to ensure compatibility with your current toolchain."
+ format!( "The version of the proc-macro server ({v}) in your Rust toolchain is newer than the version supported by your rust-analyzer ({CURRENT_API_VERSION}).
+ This will prevent proc-macro expansion from working. Please consider updating your rust-analyzer to ensure compatibility with your current toolchain."
),
)),
Ok(v) => {
@@ -89,10 +72,10 @@ impl ProcMacroProcessSrv {
tracing::info!("Proc-macro server span mode: {:?}", srv.mode);
Ok(srv)
}
- Err(e) => Err(io::Error::new(
- io::ErrorKind::Other,
- format!("Failed to fetch proc-macro server version: {e}"),
- )),
+ Err(e) => {
+ tracing::info!(%e, "proc-macro version check failed, restarting and assuming version 0");
+ create_srv(false)
+ }
}
}
diff --git a/crates/proc-macro-srv/Cargo.toml b/crates/proc-macro-srv/Cargo.toml
index 4fabcc9006..9838596945 100644
--- a/crates/proc-macro-srv/Cargo.toml
+++ b/crates/proc-macro-srv/Cargo.toml
@@ -21,8 +21,8 @@ stdx.workspace = true
tt.workspace = true
syntax-bridge.workspace = true
paths.workspace = true
-base-db.workspace = true
-span.workspace = true
+# span = {workspace = true, default-features = false} does not work
+span = { path = "../span", version = "0.0.0", default-features = false}
proc-macro-api.workspace = true
intern.workspace = true
diff --git a/crates/project-model/Cargo.toml b/crates/project-model/Cargo.toml
index 68e0e1ba55..ed647950e6 100644
--- a/crates/project-model/Cargo.toml
+++ b/crates/project-model/Cargo.toml
@@ -19,6 +19,7 @@ rustc-hash.workspace = true
semver.workspace = true
serde_json.workspace = true
serde.workspace = true
+serde_derive.workspace = true
tracing.workspace = true
triomphe.workspace = true
la-arena.workspace = true
diff --git a/crates/project-model/src/build_dependencies.rs b/crates/project-model/src/build_dependencies.rs
index dc71b13eee..524323b973 100644
--- a/crates/project-model/src/build_dependencies.rs
+++ b/crates/project-model/src/build_dependencies.rs
@@ -15,7 +15,7 @@ use itertools::Itertools;
use la_arena::ArenaMap;
use paths::{AbsPath, AbsPathBuf, Utf8PathBuf};
use rustc_hash::{FxHashMap, FxHashSet};
-use serde::Deserialize;
+use serde::Deserialize as _;
use toolchain::Tool;
use crate::{
diff --git a/crates/project-model/src/cargo_workspace.rs b/crates/project-model/src/cargo_workspace.rs
index 4ae3426ed9..ba4946bf0b 100644
--- a/crates/project-model/src/cargo_workspace.rs
+++ b/crates/project-model/src/cargo_workspace.rs
@@ -8,7 +8,7 @@ use cargo_metadata::{CargoOpt, MetadataCommand};
use la_arena::{Arena, Idx};
use paths::{AbsPath, AbsPathBuf, Utf8PathBuf};
use rustc_hash::{FxHashMap, FxHashSet};
-use serde::Deserialize;
+use serde_derive::Deserialize;
use serde_json::from_value;
use span::Edition;
use toolchain::Tool;
diff --git a/crates/ra-salsa/Cargo.toml b/crates/ra-salsa/Cargo.toml
index f8a3156fe4..57a20be0ca 100644
--- a/crates/ra-salsa/Cargo.toml
+++ b/crates/ra-salsa/Cargo.toml
@@ -20,7 +20,7 @@ parking_lot = "0.12.1"
rustc-hash = "2.0.0"
smallvec = "1.0.0"
oorandom = "11"
-triomphe = "0.1.11"
+triomphe.workspace = true
itertools.workspace = true
ra-salsa-macros = { version = "0.0.0", path = "ra-salsa-macros", package = "salsa-macros" }
diff --git a/crates/rust-analyzer/Cargo.toml b/crates/rust-analyzer/Cargo.toml
index 7c8610280b..fa9ff6b56d 100644
--- a/crates/rust-analyzer/Cargo.toml
+++ b/crates/rust-analyzer/Cargo.toml
@@ -36,6 +36,7 @@ rayon.workspace = true
rustc-hash.workspace = true
serde_json = { workspace = true, features = ["preserve_order"] }
serde.workspace = true
+serde_derive.workspace = true
tenthash = "0.4.0"
num_cpus = "1.15.0"
mimalloc = { version = "0.1.30", default-features = false, optional = true }
diff --git a/crates/rust-analyzer/src/config.rs b/crates/rust-analyzer/src/config.rs
index 40fd294e72..b06117f738 100644
--- a/crates/rust-analyzer/src/config.rs
+++ b/crates/rust-analyzer/src/config.rs
@@ -644,7 +644,8 @@ config_data! {
/// Aliased as `"checkOnSave.targets"`.
check_targets | checkOnSave_targets | checkOnSave_target: Option<CheckOnSaveTargets> = None,
/// Whether `--workspace` should be passed to `cargo check`.
- /// If false, `-p <package>` will be passed instead.
+ /// If false, `-p <package>` will be passed instead if applicable. In case it is not, no
+ /// check will be performed.
check_workspace: bool = true,
/// These proc-macros will be ignored when trying to expand them.
diff --git a/crates/rust-analyzer/src/diagnostics.rs b/crates/rust-analyzer/src/diagnostics.rs
index 22910ee4c6..0b51dd87fe 100644
--- a/crates/rust-analyzer/src/diagnostics.rs
+++ b/crates/rust-analyzer/src/diagnostics.rs
@@ -3,6 +3,7 @@ pub(crate) mod to_proto;
use std::mem;
+use cargo_metadata::PackageId;
use ide::FileId;
use ide_db::FxHashMap;
use itertools::Itertools;
@@ -13,7 +14,8 @@ use triomphe::Arc;
use crate::{global_state::GlobalStateSnapshot, lsp, lsp_ext, main_loop::DiagnosticsTaskKind};
-pub(crate) type CheckFixes = Arc<IntMap<usize, IntMap<FileId, Vec<Fix>>>>;
+pub(crate) type CheckFixes =
+ Arc<IntMap<usize, FxHashMap<Option<Arc<PackageId>>, IntMap<FileId, Vec<Fix>>>>>;
#[derive(Debug, Default, Clone)]
pub struct DiagnosticsMapConfig {
@@ -31,7 +33,10 @@ pub(crate) struct DiagnosticCollection {
pub(crate) native_syntax: IntMap<FileId, (DiagnosticsGeneration, Vec<lsp_types::Diagnostic>)>,
pub(crate) native_semantic: IntMap<FileId, (DiagnosticsGeneration, Vec<lsp_types::Diagnostic>)>,
// FIXME: should be Vec<flycheck::Diagnostic>
- pub(crate) check: IntMap<usize, IntMap<FileId, Vec<lsp_types::Diagnostic>>>,
+ pub(crate) check: IntMap<
+ usize,
+ FxHashMap<Option<Arc<PackageId>>, IntMap<FileId, Vec<lsp_types::Diagnostic>>>,
+ >,
pub(crate) check_fixes: CheckFixes,
changes: IntSet<FileId>,
/// Counter for supplying a new generation number for diagnostics.
@@ -50,18 +55,37 @@ pub(crate) struct Fix {
impl DiagnosticCollection {
pub(crate) fn clear_check(&mut self, flycheck_id: usize) {
- if let Some(it) = Arc::make_mut(&mut self.check_fixes).get_mut(&flycheck_id) {
- it.clear();
- }
- if let Some(it) = self.check.get_mut(&flycheck_id) {
- self.changes.extend(it.drain().map(|(key, _value)| key));
+ let Some(check) = self.check.get_mut(&flycheck_id) else {
+ return;
+ };
+ self.changes.extend(check.drain().flat_map(|(_, v)| v.into_keys()));
+ if let Some(fixes) = Arc::make_mut(&mut self.check_fixes).get_mut(&flycheck_id) {
+ fixes.clear();
}
}
pub(crate) fn clear_check_all(&mut self) {
Arc::make_mut(&mut self.check_fixes).clear();
- self.changes
- .extend(self.check.values_mut().flat_map(|it| it.drain().map(|(key, _value)| key)))
+ self.changes.extend(
+ self.check.values_mut().flat_map(|it| it.drain().flat_map(|(_, v)| v.into_keys())),
+ )
+ }
+
+ pub(crate) fn clear_check_for_package(
+ &mut self,
+ flycheck_id: usize,
+ package_id: Arc<PackageId>,
+ ) {
+ let Some(check) = self.check.get_mut(&flycheck_id) else {
+ return;
+ };
+ let package_id = Some(package_id);
+ if let Some(checks) = check.remove(&package_id) {
+ self.changes.extend(checks.into_keys());
+ }
+ if let Some(fixes) = Arc::make_mut(&mut self.check_fixes).get_mut(&flycheck_id) {
+ fixes.remove(&package_id);
+ }
}
pub(crate) fn clear_native_for(&mut self, file_id: FileId) {
@@ -73,11 +97,19 @@ impl DiagnosticCollection {
pub(crate) fn add_check_diagnostic(
&mut self,
flycheck_id: usize,
+ package_id: &Option<Arc<PackageId>>,
file_id: FileId,
diagnostic: lsp_types::Diagnostic,
fix: Option<Box<Fix>>,
) {
- let diagnostics = self.check.entry(flycheck_id).or_default().entry(file_id).or_default();
+ let diagnostics = self
+ .check
+ .entry(flycheck_id)
+ .or_default()
+ .entry(package_id.clone())
+ .or_default()
+ .entry(file_id)
+ .or_default();
for existing_diagnostic in diagnostics.iter() {
if are_diagnostics_equal(existing_diagnostic, &diagnostic) {
return;
@@ -86,7 +118,14 @@ impl DiagnosticCollection {
if let Some(fix) = fix {
let check_fixes = Arc::make_mut(&mut self.check_fixes);
- check_fixes.entry(flycheck_id).or_default().entry(file_id).or_default().push(*fix);
+ check_fixes
+ .entry(flycheck_id)
+ .or_default()
+ .entry(package_id.clone())
+ .or_default()
+ .entry(file_id)
+ .or_default()
+ .push(*fix);
}
diagnostics.push(diagnostic);
self.changes.insert(file_id);
@@ -135,7 +174,12 @@ impl DiagnosticCollection {
) -> impl Iterator<Item = &lsp_types::Diagnostic> {
let native_syntax = self.native_syntax.get(&file_id).into_iter().flat_map(|(_, d)| d);
let native_semantic = self.native_semantic.get(&file_id).into_iter().flat_map(|(_, d)| d);
- let check = self.check.values().filter_map(move |it| it.get(&file_id)).flatten();
+ let check = self
+ .check
+ .values()
+ .flat_map(|it| it.values())
+ .filter_map(move |it| it.get(&file_id))
+ .flatten();
native_syntax.chain(native_semantic).chain(check)
}
diff --git a/crates/rust-analyzer/src/flycheck.rs b/crates/rust-analyzer/src/flycheck.rs
index b035d779a7..53c145f884 100644
--- a/crates/rust-analyzer/src/flycheck.rs
+++ b/crates/rust-analyzer/src/flycheck.rs
@@ -1,17 +1,20 @@
//! Flycheck provides the functionality needed to run `cargo check` to provide
//! LSP diagnostics based on the output of the command.
-use std::{fmt, io, process::Command, time::Duration};
+use std::{fmt, io, mem, process::Command, time::Duration};
+use cargo_metadata::PackageId;
use crossbeam_channel::{select_biased, unbounded, Receiver, Sender};
use paths::{AbsPath, AbsPathBuf, Utf8PathBuf};
use rustc_hash::FxHashMap;
-use serde::Deserialize;
+use serde::Deserialize as _;
+use serde_derive::Deserialize;
pub(crate) use cargo_metadata::diagnostic::{
Applicability, Diagnostic, DiagnosticCode, DiagnosticLevel, DiagnosticSpan,
};
use toolchain::Tool;
+use triomphe::Arc;
use crate::command::{CommandHandle, ParseFromLine};
@@ -150,10 +153,19 @@ impl FlycheckHandle {
pub(crate) enum FlycheckMessage {
/// Request adding a diagnostic with fixes included to a file
- AddDiagnostic { id: usize, workspace_root: AbsPathBuf, diagnostic: Diagnostic },
+ AddDiagnostic {
+ id: usize,
+ workspace_root: Arc<AbsPathBuf>,
+ diagnostic: Diagnostic,
+ package_id: Option<Arc<PackageId>>,
+ },
- /// Request clearing all previous diagnostics
- ClearDiagnostics { id: usize },
+ /// Request clearing all outdated diagnostics.
+ ClearDiagnostics {
+ id: usize,
+ /// The package whose diagnostics to clear, or if unspecified, all diagnostics.
+ package_id: Option<Arc<PackageId>>,
+ },
/// Request check progress notification to client
Progress {
@@ -166,15 +178,18 @@ pub(crate) enum FlycheckMessage {
impl fmt::Debug for FlycheckMessage {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {
- FlycheckMessage::AddDiagnostic { id, workspace_root, diagnostic } => f
+ FlycheckMessage::AddDiagnostic { id, workspace_root, diagnostic, package_id } => f
.debug_struct("AddDiagnostic")
.field("id", id)
.field("workspace_root", workspace_root)
+ .field("package_id", package_id)
.field("diagnostic_code", &diagnostic.code.as_ref().map(|it| &it.code))
.finish(),
- FlycheckMessage::ClearDiagnostics { id } => {
- f.debug_struct("ClearDiagnostics").field("id", id).finish()
- }
+ FlycheckMessage::ClearDiagnostics { id, package_id } => f
+ .debug_struct("ClearDiagnostics")
+ .field("id", id)
+ .field("package_id", package_id)
+ .finish(),
FlycheckMessage::Progress { id, progress } => {
f.debug_struct("Progress").field("id", id).field("progress", progress).finish()
}
@@ -200,12 +215,13 @@ enum StateChange {
struct FlycheckActor {
/// The workspace id of this flycheck instance.
id: usize,
+
sender: Sender<FlycheckMessage>,
config: FlycheckConfig,
manifest_path: Option<AbsPathBuf>,
/// Either the workspace root of the workspace we are flychecking,
/// or the project root of the project.
- root: AbsPathBuf,
+ root: Arc<AbsPathBuf>,
sysroot_root: Option<AbsPathBuf>,
/// CargoHandle exists to wrap around the communication needed to be able to
/// run `cargo check` without blocking. Currently the Rust standard library
@@ -215,8 +231,13 @@ struct FlycheckActor {
command_handle: Option<CommandHandle<CargoCheckMessage>>,
/// The receiver side of the channel mentioned above.
command_receiver: Option<Receiver<CargoCheckMessage>>,
+ package_status: FxHashMap<Arc<PackageId>, DiagnosticReceived>,
+}
- status: FlycheckStatus,
+#[derive(PartialEq, Eq, Copy, Clone, Debug)]
+enum DiagnosticReceived {
+ Yes,
+ No,
}
#[allow(clippy::large_enum_variant)]
@@ -225,13 +246,6 @@ enum Event {
CheckEvent(Option<CargoCheckMessage>),
}
-#[derive(PartialEq)]
-enum FlycheckStatus {
- Started,
- DiagnosticSent,
- Finished,
-}
-
pub(crate) const SAVED_FILE_PLACEHOLDER: &str = "$saved_file";
impl FlycheckActor {
@@ -249,11 +263,11 @@ impl FlycheckActor {
sender,
config,
sysroot_root,
- root: workspace_root,
+ root: Arc::new(workspace_root),
manifest_path,
command_handle: None,
command_receiver: None,
- status: FlycheckStatus::Finished,
+ package_status: FxHashMap::default(),
}
}
@@ -306,13 +320,11 @@ impl FlycheckActor {
self.command_handle = Some(command_handle);
self.command_receiver = Some(receiver);
self.report_progress(Progress::DidStart);
- self.status = FlycheckStatus::Started;
}
Err(error) => {
self.report_progress(Progress::DidFailToRestart(format!(
"Failed to run the following command: {formatted_command} error={error}"
)));
- self.status = FlycheckStatus::Finished;
}
}
}
@@ -332,37 +344,62 @@ impl FlycheckActor {
error
);
}
- if self.status == FlycheckStatus::Started {
- self.send(FlycheckMessage::ClearDiagnostics { id: self.id });
+ if self.package_status.is_empty() {
+ // We finished without receiving any diagnostics.
+ // That means all of them are stale.
+ self.send(FlycheckMessage::ClearDiagnostics {
+ id: self.id,
+ package_id: None,
+ });
+ } else {
+ for (package_id, status) in mem::take(&mut self.package_status) {
+ if let DiagnosticReceived::No = status {
+ self.send(FlycheckMessage::ClearDiagnostics {
+ id: self.id,
+ package_id: Some(package_id),
+ });
+ }
+ }
}
+
self.report_progress(Progress::DidFinish(res));
- self.status = FlycheckStatus::Finished;
}
Event::CheckEvent(Some(message)) => match message {
CargoCheckMessage::CompilerArtifact(msg) => {
tracing::trace!(
flycheck_id = self.id,
artifact = msg.target.name,
+ package_id = msg.package_id.repr,
"artifact received"
);
self.report_progress(Progress::DidCheckCrate(msg.target.name));
+ self.package_status
+ .entry(Arc::new(msg.package_id))
+ .or_insert(DiagnosticReceived::No);
}
-
- CargoCheckMessage::Diagnostic(msg) => {
+ CargoCheckMessage::Diagnostic { diagnostic, package_id } => {
tracing::trace!(
flycheck_id = self.id,
- message = msg.message,
+ message = diagnostic.message,
+ package_id = package_id.as_ref().map(|it| &it.repr),
"diagnostic received"
);
- if self.status == FlycheckStatus::Started {
- self.send(FlycheckMessage::ClearDiagnostics { id: self.id });
+ if let Some(package_id) = &package_id {
+ if !self.package_status.contains_key(package_id) {
+ self.package_status
+ .insert(package_id.clone(), DiagnosticReceived::Yes);
+ self.send(FlycheckMessage::ClearDiagnostics {
+ id: self.id,
+ package_id: Some(package_id.clone()),
+ });
+ }
}
self.send(FlycheckMessage::AddDiagnostic {
id: self.id,
+ package_id,
workspace_root: self.root.clone(),
- diagnostic: msg,
+ diagnostic,
});
- self.status = FlycheckStatus::DiagnosticSent;
}
},
}
@@ -380,7 +417,7 @@ impl FlycheckActor {
command_handle.cancel();
self.command_receiver.take();
self.report_progress(Progress::DidCancel);
- self.status = FlycheckStatus::Finished;
+ self.package_status.clear();
}
}
@@ -400,7 +437,7 @@ impl FlycheckActor {
cmd.env("RUSTUP_TOOLCHAIN", AsRef::<std::path::Path>::as_ref(sysroot_root));
}
cmd.arg(command);
- cmd.current_dir(&self.root);
+ cmd.current_dir(&*self.root);
match package {
Some(pkg) => cmd.arg("-p").arg(pkg),
@@ -442,11 +479,11 @@ impl FlycheckActor {
match invocation_strategy {
InvocationStrategy::Once => {
- cmd.current_dir(&self.root);
+ cmd.current_dir(&*self.root);
}
InvocationStrategy::PerWorkspace => {
// FIXME: cmd.current_dir(&affected_workspace);
- cmd.current_dir(&self.root);
+ cmd.current_dir(&*self.root);
}
}
@@ -486,7 +523,7 @@ impl FlycheckActor {
#[allow(clippy::large_enum_variant)]
enum CargoCheckMessage {
CompilerArtifact(cargo_metadata::Artifact),
- Diagnostic(Diagnostic),
+ Diagnostic { diagnostic: Diagnostic, package_id: Option<Arc<PackageId>> },
}
impl ParseFromLine for CargoCheckMessage {
@@ -501,11 +538,16 @@ impl ParseFromLine for CargoCheckMessage {
Some(CargoCheckMessage::CompilerArtifact(artifact))
}
cargo_metadata::Message::CompilerMessage(msg) => {
- Some(CargoCheckMessage::Diagnostic(msg.message))
+ Some(CargoCheckMessage::Diagnostic {
+ diagnostic: msg.message,
+ package_id: Some(Arc::new(msg.package_id)),
+ })
}
_ => None,
},
- JsonMessage::Rustc(message) => Some(CargoCheckMessage::Diagnostic(message)),
+ JsonMessage::Rustc(message) => {
+ Some(CargoCheckMessage::Diagnostic { diagnostic: message, package_id: None })
+ }
};
}
diff --git a/crates/rust-analyzer/src/global_state.rs b/crates/rust-analyzer/src/global_state.rs
index 29be53cee1..dd13bdba4c 100644
--- a/crates/rust-analyzer/src/global_state.rs
+++ b/crates/rust-analyzer/src/global_state.rs
@@ -92,7 +92,7 @@ pub(crate) struct GlobalState {
// status
pub(crate) shutdown_requested: bool,
- pub(crate) last_reported_status: Option<lsp_ext::ServerStatusParams>,
+ pub(crate) last_reported_status: lsp_ext::ServerStatusParams,
// proc macros
pub(crate) proc_macro_clients: Arc<[anyhow::Result<ProcMacroServer>]>,
@@ -238,7 +238,11 @@ impl GlobalState {
mem_docs: MemDocs::default(),
semantic_tokens_cache: Arc::new(Default::default()),
shutdown_requested: false,
- last_reported_status: None,
+ last_reported_status: lsp_ext::ServerStatusParams {
+ health: lsp_ext::Health::Ok,
+ quiescent: true,
+ message: None,
+ },
source_root_config: SourceRootConfig::default(),
local_roots_parent_map: Arc::new(FxHashMap::default()),
config_errors: Default::default(),
diff --git a/crates/rust-analyzer/src/handlers/dispatch.rs b/crates/rust-analyzer/src/handlers/dispatch.rs
index 03759b036b..2aa4ffbe1d 100644
--- a/crates/rust-analyzer/src/handlers/dispatch.rs
+++ b/crates/rust-analyzer/src/handlers/dispatch.rs
@@ -126,7 +126,7 @@ impl RequestDispatcher<'_> {
/// Dispatches a non-latency-sensitive request onto the thread pool. When the VFS is marked not
/// ready this will return a `default` constructed [`R::Result`].
- pub(crate) fn on_with<R>(
+ pub(crate) fn on_with_vfs_default<R>(
&mut self,
f: fn(GlobalStateSnapshot, R::Params) -> anyhow::Result<R::Result>,
default: impl FnOnce() -> R::Result,
diff --git a/crates/rust-analyzer/src/handlers/notification.rs b/crates/rust-analyzer/src/handlers/notification.rs
index 5e7262b14c..c0231fd04e 100644
--- a/crates/rust-analyzer/src/handlers/notification.rs
+++ b/crates/rust-analyzer/src/handlers/notification.rs
@@ -189,7 +189,7 @@ pub(crate) fn handle_did_save_text_document(
if !state.config.check_on_save(Some(sr)) || run_flycheck(state, vfs_path) {
return Ok(());
}
- } else if state.config.check_on_save(None) {
+ } else if state.config.check_on_save(None) && state.config.flycheck_workspace(None) {
// No specific flycheck was triggered, so let's trigger all of them.
for flycheck in state.flycheck.iter() {
flycheck.restart_workspace(None);
@@ -293,7 +293,7 @@ fn run_flycheck(state: &mut GlobalState, vfs_path: VfsPath) -> bool {
let file_id = state.vfs.read().0.file_id(&vfs_path);
if let Some(file_id) = file_id {
let world = state.snapshot();
- let source_root_id = world.analysis.source_root_id(file_id).ok();
+ let may_flycheck_workspace = state.config.flycheck_workspace(None);
let mut updated = false;
let task = move || -> std::result::Result<(), ide::Cancelled> {
// Is the target binary? If so we let flycheck run only for the workspace that contains the crate.
@@ -375,21 +375,22 @@ fn run_flycheck(state: &mut GlobalState, vfs_path: VfsPath) -> bool {
let saved_file = vfs_path.as_path().map(|p| p.to_owned());
// Find and trigger corresponding flychecks
- for flycheck in world.flycheck.iter() {
+ 'flychecks: for flycheck in world.flycheck.iter() {
for (id, package) in workspace_ids.clone() {
if id == flycheck.id() {
updated = true;
- match package.filter(|_| !world.config.flycheck_workspace(source_root_id)) {
- Some(package) => flycheck
- .restart_for_package(package, target.clone().map(TupleExt::head)),
- None => flycheck.restart_workspace(saved_file.clone()),
+ if may_flycheck_workspace {
+ flycheck.restart_workspace(saved_file.clone())
+ } else if let Some(package) = package {
+ flycheck
+ .restart_for_package(package, target.clone().map(TupleExt::head))
}
- continue;
+ continue 'flychecks;
}
}
}
// No specific flycheck was triggered, so let's trigger all of them.
- if !updated {
+ if !updated && may_flycheck_workspace {
for flycheck in world.flycheck.iter() {
flycheck.restart_workspace(saved_file.clone());
}
@@ -432,8 +433,10 @@ pub(crate) fn handle_run_flycheck(
}
}
// No specific flycheck was triggered, so let's trigger all of them.
- for flycheck in state.flycheck.iter() {
- flycheck.restart_workspace(None);
+ if state.config.flycheck_workspace(None) {
+ for flycheck in state.flycheck.iter() {
+ flycheck.restart_workspace(None);
+ }
}
Ok(())
}
diff --git a/crates/rust-analyzer/src/handlers/request.rs b/crates/rust-analyzer/src/handlers/request.rs
index fa78be5cb6..8f2bf80ea2 100644
--- a/crates/rust-analyzer/src/handlers/request.rs
+++ b/crates/rust-analyzer/src/handlers/request.rs
@@ -481,27 +481,28 @@ pub(crate) fn handle_document_diagnostics(
snap: GlobalStateSnapshot,
params: lsp_types::DocumentDiagnosticParams,
) -> anyhow::Result<lsp_types::DocumentDiagnosticReportResult> {
- const EMPTY: lsp_types::DocumentDiagnosticReportResult =
+ let empty = || {
lsp_types::DocumentDiagnosticReportResult::Report(
lsp_types::DocumentDiagnosticReport::Full(
lsp_types::RelatedFullDocumentDiagnosticReport {
related_documents: None,
full_document_diagnostic_report: lsp_types::FullDocumentDiagnosticReport {
- result_id: None,
+ result_id: Some("rust-analyzer".to_owned()),
items: vec![],
},
},
),
- );
+ )
+ };
let file_id = from_proto::file_id(&snap, &params.text_document.uri)?;
let source_root = snap.analysis.source_root_id(file_id)?;
if !snap.analysis.is_local_source_root(source_root)? {
- return Ok(EMPTY);
+ return Ok(empty());
}
let config = snap.config.diagnostics(Some(source_root));
if !config.enabled {
- return Ok(EMPTY);
+ return Ok(empty());
}
let line_index = snap.file_line_index(file_id)?;
let supports_related = snap.config.text_document_diagnostic_related_document_support();
@@ -529,7 +530,7 @@ pub(crate) fn handle_document_diagnostics(
Ok(lsp_types::DocumentDiagnosticReportResult::Report(
lsp_types::DocumentDiagnosticReport::Full(lsp_types::RelatedFullDocumentDiagnosticReport {
full_document_diagnostic_report: lsp_types::FullDocumentDiagnosticReport {
- result_id: None,
+ result_id: Some("rust-analyzer".to_owned()),
items: diagnostics.collect(),
},
related_documents: related_documents.is_empty().not().then(|| {
@@ -539,7 +540,10 @@ pub(crate) fn handle_document_diagnostics(
(
to_proto::url(&snap, id),
lsp_types::DocumentDiagnosticReportKind::Full(
- lsp_types::FullDocumentDiagnosticReport { result_id: None, items },
+ lsp_types::FullDocumentDiagnosticReport {
+ result_id: Some("rust-analyzer".to_owned()),
+ items,
+ },
),
)
})
@@ -1144,7 +1148,7 @@ pub(crate) fn handle_completion_resolve(
let Some(corresponding_completion) = completions.into_iter().find(|completion_item| {
// Avoid computing hashes for items that obviously do not match
// r-a might append a detail-based suffix to the label, so we cannot check for equality
- original_completion.label.starts_with(completion_item.label.as_str())
+ original_completion.label.starts_with(completion_item.label.primary.as_str())
&& resolve_data_hash == completion_item_hash(completion_item, resolve_data.for_ref)
}) else {
return Ok(original_completion);
@@ -1441,7 +1445,13 @@ pub(crate) fn handle_code_action(
}
// Fixes from `cargo check`.
- for fix in snap.check_fixes.values().filter_map(|it| it.get(&frange.file_id)).flatten() {
+ for fix in snap
+ .check_fixes
+ .values()
+ .flat_map(|it| it.values())
+ .filter_map(|it| it.get(&frange.file_id))
+ .flatten()
+ {
// FIXME: this mapping is awkward and shouldn't exist. Refactor
// `snap.check_fixes` to not convert to LSP prematurely.
let intersect_fix_range = fix
diff --git a/crates/rust-analyzer/src/lib.rs b/crates/rust-analyzer/src/lib.rs
index 15d60c873f..e7f5a7f5e7 100644
--- a/crates/rust-analyzer/src/lib.rs
+++ b/crates/rust-analyzer/src/lib.rs
@@ -114,8 +114,11 @@ fn completion_item_hash(item: &CompletionItem, is_ref_completion: bool) -> [u8;
u8::from(item.deprecated),
u8::from(item.trigger_call_info),
]);
- hasher.update(&item.label);
- if let Some(label_detail) = &item.label_detail {
+ hasher.update(&item.label.primary);
+ if let Some(label_detail) = &item.label.detail_left {
+ hasher.update(label_detail);
+ }
+ if let Some(label_detail) = &item.label.detail_right {
hasher.update(label_detail);
}
// NB: do not hash edits or source range, as those may change between the time the client sends the resolve request
diff --git a/crates/rust-analyzer/src/lsp/ext.rs b/crates/rust-analyzer/src/lsp/ext.rs
index df06270a8b..c0173d9c24 100644
--- a/crates/rust-analyzer/src/lsp/ext.rs
+++ b/crates/rust-analyzer/src/lsp/ext.rs
@@ -823,8 +823,11 @@ impl Request for OnTypeFormatting {
#[derive(Debug, Serialize, Deserialize)]
pub struct CompletionResolveData {
pub position: lsp_types::TextDocumentPositionParams,
+ #[serde(skip_serializing_if = "Vec::is_empty", default)]
pub imports: Vec<CompletionImport>,
+ #[serde(skip_serializing_if = "Option::is_none", default)]
pub version: Option<i32>,
+ #[serde(skip_serializing_if = "Option::is_none", default)]
pub trigger_character: Option<char>,
pub for_ref: bool,
pub hash: String,
@@ -836,6 +839,7 @@ pub struct InlayHintResolveData {
// This is a string instead of a u64 as javascript can't represent u64 fully
pub hash: String,
pub resolve_range: lsp_types::Range,
+ #[serde(skip_serializing_if = "Option::is_none", default)]
pub version: Option<i32>,
}
diff --git a/crates/rust-analyzer/src/lsp/to_proto.rs b/crates/rust-analyzer/src/lsp/to_proto.rs
index 612cb547b4..05e93b4e6a 100644
--- a/crates/rust-analyzer/src/lsp/to_proto.rs
+++ b/crates/rust-analyzer/src/lsp/to_proto.rs
@@ -2,6 +2,7 @@
use std::{
iter::once,
mem,
+ ops::Not as _,
sync::atomic::{AtomicU32, Ordering},
};
@@ -353,14 +354,17 @@ fn completion_item(
};
let mut lsp_item = lsp_types::CompletionItem {
- label: item.label.to_string(),
+ label: item.label.primary.to_string(),
detail,
filter_text,
kind: Some(completion_item_kind(item.kind)),
text_edit,
- additional_text_edits: Some(additional_text_edits),
+ additional_text_edits: additional_text_edits
+ .is_empty()
+ .not()
+ .then_some(additional_text_edits),
documentation,
- deprecated: Some(item.deprecated),
+ deprecated: item.deprecated.then_some(item.deprecated),
tags,
command,
insert_text_format,
@@ -368,15 +372,17 @@ fn completion_item(
};
if config.completion_label_details_support() {
+ let has_label_details =
+ item.label.detail_left.is_some() || item.label.detail_right.is_some();
if fields_to_resolve.resolve_label_details {
- something_to_resolve |= true;
- } else {
+ something_to_resolve |= has_label_details;
+ } else if has_label_details {
lsp_item.label_details = Some(lsp_types::CompletionItemLabelDetails {
- detail: item.label_detail.as_ref().map(ToString::to_string),
- description: item.detail.clone(),
+ detail: item.label.detail_left.clone(),
+ description: item.label.detail_right.clone(),
});
}
- } else if let Some(label_detail) = &item.label_detail {
+ } else if let Some(label_detail) = &item.label.detail_left {
lsp_item.label.push_str(label_detail.as_str());
}
@@ -1578,22 +1584,26 @@ pub(crate) fn code_lens(
};
let lens_config = snap.config.lens();
- if lens_config.run && client_commands_config.run_single && has_root {
- let command = command::run_single(&r, &title);
- acc.push(lsp_types::CodeLens {
- range: annotation_range,
- command: Some(command),
- data: None,
- })
- }
- if lens_config.debug && can_debug && client_commands_config.debug_single {
- let command = command::debug_single(&r);
- acc.push(lsp_types::CodeLens {
- range: annotation_range,
- command: Some(command),
- data: None,
- })
+
+ if has_root {
+ if lens_config.run && client_commands_config.run_single {
+ let command = command::run_single(&r, &title);
+ acc.push(lsp_types::CodeLens {
+ range: annotation_range,
+ command: Some(command),
+ data: None,
+ })
+ }
+ if lens_config.debug && can_debug && client_commands_config.debug_single {
+ let command = command::debug_single(&r);
+ acc.push(lsp_types::CodeLens {
+ range: annotation_range,
+ command: Some(command),
+ data: None,
+ })
+ }
}
+
if lens_config.interpret {
let command = command::interpret_single(&r);
acc.push(lsp_types::CodeLens {
diff --git a/crates/rust-analyzer/src/main_loop.rs b/crates/rust-analyzer/src/main_loop.rs
index a34f0a3c92..d97d96d54a 100644
--- a/crates/rust-analyzer/src/main_loop.rs
+++ b/crates/rust-analyzer/src/main_loop.rs
@@ -408,7 +408,10 @@ impl GlobalState {
if self.is_quiescent() {
let became_quiescent = !was_quiescent;
if became_quiescent {
- if self.config.check_on_save(None) {
+ if self.config.check_on_save(None)
+ && self.config.flycheck_workspace(None)
+ && !self.fetch_build_data_queue.op_requested()
+ {
// Project has loaded properly, kick off initial flycheck
self.flycheck.iter().for_each(|flycheck| flycheck.restart_workspace(None));
}
@@ -656,8 +659,8 @@ impl GlobalState {
fn update_status_or_notify(&mut self) {
let status = self.current_status();
- if self.last_reported_status.as_ref() != Some(&status) {
- self.last_reported_status = Some(status.clone());
+ if self.last_reported_status != status {
+ self.last_reported_status = status.clone();
if self.config.server_status_notification() {
self.send_notification::<lsp_ext::ServerStatusNotification>(status);
@@ -715,6 +718,7 @@ impl GlobalState {
error!("FetchWorkspaceError: {e}");
}
self.wants_to_switch = Some("fetched workspace".to_owned());
+ self.diagnostics.clear_check_all();
(Progress::End, None)
}
};
@@ -956,7 +960,7 @@ impl GlobalState {
fn handle_flycheck_msg(&mut self, message: FlycheckMessage) {
match message {
- FlycheckMessage::AddDiagnostic { id, workspace_root, diagnostic } => {
+ FlycheckMessage::AddDiagnostic { id, workspace_root, diagnostic, package_id } => {
let snap = self.snapshot();
let diagnostics = crate::diagnostics::to_proto::map_rust_diagnostic_to_lsp(
&self.config.diagnostics_map(None),
@@ -968,6 +972,7 @@ impl GlobalState {
match url_to_file_id(&self.vfs.read().0, &diag.url) {
Ok(file_id) => self.diagnostics.add_check_diagnostic(
id,
+ &package_id,
file_id,
diag.diagnostic,
diag.fix,
@@ -981,9 +986,12 @@ impl GlobalState {
};
}
}
-
- FlycheckMessage::ClearDiagnostics { id } => self.diagnostics.clear_check(id),
-
+ FlycheckMessage::ClearDiagnostics { id, package_id: None } => {
+ self.diagnostics.clear_check(id)
+ }
+ FlycheckMessage::ClearDiagnostics { id, package_id: Some(package_id) } => {
+ self.diagnostics.clear_check_for_package(id, package_id)
+ }
FlycheckMessage::Progress { id, progress } => {
let (state, message) = match progress {
flycheck::Progress::DidStart => (Progress::Begin, None),
@@ -1090,12 +1098,12 @@ impl GlobalState {
.on_latency_sensitive::<NO_RETRY, lsp_request::SemanticTokensRangeRequest>(handlers::handle_semantic_tokens_range)
// FIXME: Some of these NO_RETRY could be retries if the file they are interested didn't change.
// All other request handlers
- .on_with::<lsp_request::DocumentDiagnosticRequest>(handlers::handle_document_diagnostics, || lsp_types::DocumentDiagnosticReportResult::Report(
+ .on_with_vfs_default::<lsp_request::DocumentDiagnosticRequest>(handlers::handle_document_diagnostics, || lsp_types::DocumentDiagnosticReportResult::Report(
lsp_types::DocumentDiagnosticReport::Full(
lsp_types::RelatedFullDocumentDiagnosticReport {
related_documents: None,
full_document_diagnostic_report: lsp_types::FullDocumentDiagnosticReport {
- result_id: None,
+ result_id: Some("rust-analyzer".to_owned()),
items: vec![],
},
},
diff --git a/crates/rust-analyzer/src/reload.rs b/crates/rust-analyzer/src/reload.rs
index 4549735fef..3444773695 100644
--- a/crates/rust-analyzer/src/reload.rs
+++ b/crates/rust-analyzer/src/reload.rs
@@ -70,7 +70,6 @@ impl GlobalState {
/// are ready to do semantic work.
pub(crate) fn is_quiescent(&self) -> bool {
self.vfs_done
- && self.last_reported_status.is_some()
&& !self.fetch_workspaces_queue.op_in_progress()
&& !self.fetch_build_data_queue.op_in_progress()
&& !self.fetch_proc_macros_queue.op_in_progress()
diff --git a/crates/rust-analyzer/src/test_runner.rs b/crates/rust-analyzer/src/test_runner.rs
index 5e43a3c60d..2fd5254733 100644
--- a/crates/rust-analyzer/src/test_runner.rs
+++ b/crates/rust-analyzer/src/test_runner.rs
@@ -5,7 +5,8 @@ use std::process::Command;
use crossbeam_channel::Sender;
use paths::AbsPath;
-use serde::Deserialize;
+use serde::Deserialize as _;
+use serde_derive::Deserialize;
use toolchain::Tool;
use crate::{
diff --git a/crates/rust-analyzer/src/tracing/json.rs b/crates/rust-analyzer/src/tracing/json.rs
index f540a33b45..9e35990a5b 100644
--- a/crates/rust-analyzer/src/tracing/json.rs
+++ b/crates/rust-analyzer/src/tracing/json.rs
@@ -54,7 +54,7 @@ where
fn on_event(&self, _event: &Event<'_>, _ctx: Context<'_, S>) {}
fn on_close(&self, id: Id, ctx: Context<'_, S>) {
- #[derive(serde::Serialize)]
+ #[derive(serde_derive::Serialize)]
struct JsonDataInner {
name: &'static str,
elapsed_ms: u128,
diff --git a/crates/rust-analyzer/tests/slow-tests/support.rs b/crates/rust-analyzer/tests/slow-tests/support.rs
index 5a88a5515c..1f52f366c5 100644
--- a/crates/rust-analyzer/tests/slow-tests/support.rs
+++ b/crates/rust-analyzer/tests/slow-tests/support.rs
@@ -1,7 +1,7 @@
use std::{
cell::{Cell, RefCell},
env, fs,
- sync::{Once, OnceLock},
+ sync::Once,
time::Duration,
};
@@ -141,34 +141,15 @@ impl Project<'_> {
/// file in the config dir after server is run, something where our naive approach comes short.
/// Using a `prelock` allows us to force a lock when we know we need it.
pub(crate) fn server_with_lock(self, config_lock: bool) -> Server {
- static CONFIG_DIR_LOCK: OnceLock<(Utf8PathBuf, Mutex<()>)> = OnceLock::new();
+ static CONFIG_DIR_LOCK: Mutex<()> = Mutex::new(());
let config_dir_guard = if config_lock {
Some({
- let (path, mutex) = CONFIG_DIR_LOCK.get_or_init(|| {
- let value = TestDir::new().keep().path().to_owned();
- env::set_var("__TEST_RA_USER_CONFIG_DIR", &value);
- (value, Mutex::new(()))
- });
- #[allow(dyn_drop)]
- (mutex.lock(), {
- Box::new({
- struct Dropper(Utf8PathBuf);
- impl Drop for Dropper {
- fn drop(&mut self) {
- for entry in fs::read_dir(&self.0).unwrap() {
- let path = entry.unwrap().path();
- if path.is_file() {
- fs::remove_file(path).unwrap();
- } else if path.is_dir() {
- fs::remove_dir_all(path).unwrap();
- }
- }
- }
- }
- Dropper(path.clone())
- }) as Box<dyn Drop>
- })
+ let guard = CONFIG_DIR_LOCK.lock();
+ let test_dir = TestDir::new();
+ let value = test_dir.path().to_owned();
+ env::set_var("__TEST_RA_USER_CONFIG_DIR", &value);
+ (guard, test_dir)
})
} else {
None
@@ -311,14 +292,12 @@ pub(crate) struct Server {
client: Connection,
/// XXX: remove the tempdir last
dir: TestDir,
- #[allow(dyn_drop)]
- _config_dir_guard: Option<(MutexGuard<'static, ()>, Box<dyn Drop>)>,
+ _config_dir_guard: Option<(MutexGuard<'static, ()>, TestDir)>,
}
impl Server {
- #[allow(dyn_drop)]
fn new(
- config_dir_guard: Option<(MutexGuard<'static, ()>, Box<dyn Drop>)>,
+ config_dir_guard: Option<(MutexGuard<'static, ()>, TestDir)>,
dir: TestDir,
config: Config,
) -> Server {
diff --git a/crates/span/Cargo.toml b/crates/span/Cargo.toml
index 569da8082a..097a056c99 100644
--- a/crates/span/Cargo.toml
+++ b/crates/span/Cargo.toml
@@ -12,7 +12,7 @@ authors.workspace = true
[dependencies]
la-arena.workspace = true
-ra-salsa.workspace = true
+ra-salsa = { workspace = true, optional = true }
rustc-hash.workspace = true
hashbrown.workspace = true
text-size.workspace = true
@@ -22,5 +22,8 @@ vfs.workspace = true
syntax.workspace = true
stdx.workspace = true
+[features]
+default = ["ra-salsa"]
+
[lints]
workspace = true
diff --git a/crates/span/src/hygiene.rs b/crates/span/src/hygiene.rs
index 67d7bb9a0d..87a948df55 100644
--- a/crates/span/src/hygiene.rs
+++ b/crates/span/src/hygiene.rs
@@ -21,6 +21,9 @@
//! `ExpnData::call_site` in rustc, [`MacroCallLoc::call_site`] in rust-analyzer.
use std::fmt;
+#[cfg(not(feature = "ra-salsa"))]
+use crate::InternId;
+#[cfg(feature = "ra-salsa")]
use ra_salsa::{InternId, InternValue};
use crate::MacroCallId;
@@ -39,6 +42,7 @@ impl fmt::Debug for SyntaxContextId {
}
}
+#[cfg(feature = "ra-salsa")]
impl ra_salsa::InternKey for SyntaxContextId {
fn from_intern_id(v: ra_salsa::InternId) -> Self {
SyntaxContextId(v)
@@ -92,6 +96,7 @@ pub struct SyntaxContextData {
pub opaque_and_semitransparent: SyntaxContextId,
}
+#[cfg(feature = "ra-salsa")]
impl InternValue for SyntaxContextData {
type Key = (SyntaxContextId, Option<MacroCallId>, Transparency);
diff --git a/crates/span/src/lib.rs b/crates/span/src/lib.rs
index bd270bfe2b..20c3b087af 100644
--- a/crates/span/src/lib.rs
+++ b/crates/span/src/lib.rs
@@ -1,6 +1,7 @@
//! File and span related types.
use std::fmt::{self, Write};
+#[cfg(feature = "ra-salsa")]
use ra_salsa::InternId;
mod ast_id;
@@ -261,8 +262,9 @@ pub struct MacroFileId {
/// `MacroCallId` identifies a particular macro invocation, like
/// `println!("Hello, {}", world)`.
#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
-pub struct MacroCallId(ra_salsa::InternId);
+pub struct MacroCallId(InternId);
+#[cfg(feature = "ra-salsa")]
impl ra_salsa::InternKey for MacroCallId {
fn from_intern_id(v: ra_salsa::InternId) -> Self {
MacroCallId(v)
@@ -355,3 +357,72 @@ impl HirFileId {
}
}
}
+
+#[cfg(not(feature = "ra-salsa"))]
+mod intern_id_proxy {
+ use std::fmt;
+ use std::num::NonZeroU32;
+
+ #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
+ pub(super) struct InternId {
+ value: NonZeroU32,
+ }
+
+ impl InternId {
+ pub(super) const MAX: u32 = 0xFFFF_FF00;
+
+ pub(super) const unsafe fn new_unchecked(value: u32) -> Self {
+ debug_assert!(value < InternId::MAX);
+ let value = unsafe { NonZeroU32::new_unchecked(value + 1) };
+ InternId { value }
+ }
+
+ pub(super) fn as_u32(self) -> u32 {
+ self.value.get() - 1
+ }
+
+ pub(super) fn as_usize(self) -> usize {
+ self.as_u32() as usize
+ }
+ }
+
+ impl From<InternId> for u32 {
+ fn from(raw: InternId) -> u32 {
+ raw.as_u32()
+ }
+ }
+
+ impl From<InternId> for usize {
+ fn from(raw: InternId) -> usize {
+ raw.as_usize()
+ }
+ }
+
+ impl From<u32> for InternId {
+ fn from(id: u32) -> InternId {
+ assert!(id < InternId::MAX);
+ unsafe { InternId::new_unchecked(id) }
+ }
+ }
+
+ impl From<usize> for InternId {
+ fn from(id: usize) -> InternId {
+ assert!(id < (InternId::MAX as usize));
+ unsafe { InternId::new_unchecked(id as u32) }
+ }
+ }
+
+ impl fmt::Debug for InternId {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ self.as_usize().fmt(f)
+ }
+ }
+
+ impl fmt::Display for InternId {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ self.as_usize().fmt(f)
+ }
+ }
+}
+#[cfg(not(feature = "ra-salsa"))]
+use intern_id_proxy::InternId;
diff --git a/crates/syntax-bridge/Cargo.toml b/crates/syntax-bridge/Cargo.toml
index e995ff3b55..f9a9f40541 100644
--- a/crates/syntax-bridge/Cargo.toml
+++ b/crates/syntax-bridge/Cargo.toml
@@ -21,7 +21,8 @@ syntax.workspace = true
parser.workspace = true
tt.workspace = true
stdx.workspace = true
-span.workspace = true
+# span = {workspace = true, default-features = false} does not work
+span = { path = "../span", version = "0.0.0", default-features = false}
intern.workspace = true
[dev-dependencies]
diff --git a/crates/syntax/rust.ungram b/crates/syntax/rust.ungram
index 30428329dd..4e2a70d6cd 100644
--- a/crates/syntax/rust.ungram
+++ b/crates/syntax/rust.ungram
@@ -414,7 +414,7 @@ AsmClobberAbi = 'clobber_abi' '(' ('@string' (',' '@string')* ','?) ')'
// option := "pure" / "nomem" / "readonly" / "preserves_flags" / "noreturn" / "nostack" / "att_syntax" / "raw"
AsmOption = 'pure' | 'nomem' | 'readonly' | 'preserves_flags' | 'noreturn' | 'nostack' | 'att_syntax' | 'raw' | 'may_unwind'
// options := "options(" option *("," option) [","] ")"
-AsmOptions = 'options' '(' AsmOption *(',' AsmOption) ','? ')'
+AsmOptions = 'options' '(' (AsmOption (',' AsmOption)*) ','? ')'
AsmLabel = 'label' BlockExpr
AsmSym = 'sym' Path
AsmConst = 'const' Expr
diff --git a/crates/syntax/src/ast/generated/nodes.rs b/crates/syntax/src/ast/generated/nodes.rs
index 01dcb646b3..3876ef71a0 100644
--- a/crates/syntax/src/ast/generated/nodes.rs
+++ b/crates/syntax/src/ast/generated/nodes.rs
@@ -212,8 +212,6 @@ pub struct AsmOptions {
}
impl AsmOptions {
#[inline]
- pub fn asm_option(&self) -> Option<AsmOption> { support::child(&self.syntax) }
- #[inline]
pub fn asm_options(&self) -> AstChildren<AsmOption> { support::children(&self.syntax) }
#[inline]
pub fn l_paren_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T!['(']) }
diff --git a/docs/dev/lsp-extensions.md b/docs/dev/lsp-extensions.md
index 2aad2cfa36..0e37611a54 100644
--- a/docs/dev/lsp-extensions.md
+++ b/docs/dev/lsp-extensions.md
@@ -1,5 +1,5 @@
<!---
-lsp/ext.rs hash: 14b7fb1309f5bb00
+lsp/ext.rs hash: 9790509d87670c22
If you need to change the above hash to make the test pass, please check if you
need to adjust this doc as well and ping this issue:
diff --git a/docs/user/generated_config.adoc b/docs/user/generated_config.adoc
index 1195a85cf7..142aa22a70 100644
--- a/docs/user/generated_config.adoc
+++ b/docs/user/generated_config.adoc
@@ -270,7 +270,8 @@ Aliased as `"checkOnSave.targets"`.
+
--
Whether `--workspace` should be passed to `cargo check`.
-If false, `-p <package>` will be passed instead.
+If false, `-p <package>` will be passed instead if applicable. In case it is not, no
+check will be performed.
--
[[rust-analyzer.completion.addSemicolonToUnit]]rust-analyzer.completion.addSemicolonToUnit (default: `true`)::
+
diff --git a/editors/code/package.json b/editors/code/package.json
index 469c1b458d..df97efaae7 100644
--- a/editors/code/package.json
+++ b/editors/code/package.json
@@ -1098,7 +1098,7 @@
"title": "check",
"properties": {
"rust-analyzer.check.workspace": {
- "markdownDescription": "Whether `--workspace` should be passed to `cargo check`.\nIf false, `-p <package>` will be passed instead.",
+ "markdownDescription": "Whether `--workspace` should be passed to `cargo check`.\nIf false, `-p <package>` will be passed instead if applicable. In case it is not, no\ncheck will be performed.",
"default": true,
"type": "boolean"
}
diff --git a/editors/code/src/ctx.ts b/editors/code/src/ctx.ts
index 4a3f66b00d..459754b1d1 100644
--- a/editors/code/src/ctx.ts
+++ b/editors/code/src/ctx.ts
@@ -347,6 +347,8 @@ export class Ctx implements RustAnalyzerExtensionApi {
}
log.info("Disposing language client");
this.updateCommands("disable");
+ // we give the server 100ms to stop gracefully
+ await this.client?.stop(100).catch((_) => {});
await this.disposeClient();
}
diff --git a/lib/lsp-server/Cargo.toml b/lib/lsp-server/Cargo.toml
index cce007ae54..2fa3272e65 100644
--- a/lib/lsp-server/Cargo.toml
+++ b/lib/lsp-server/Cargo.toml
@@ -1,6 +1,6 @@
[package]
name = "lsp-server"
-version = "0.7.7"
+version = "0.7.8"
description = "Generic LSP server scaffold."
license = "MIT OR Apache-2.0"
repository = "https://github.com/rust-lang/rust-analyzer/tree/master/lib/lsp-server"
@@ -9,7 +9,8 @@ edition = "2021"
[dependencies]
log = "0.4.17"
serde_json = "1.0.108"
-serde = { version = "1.0.192", features = ["derive"] }
+serde = { version = "1.0.216" }
+serde_derive = { version = "1.0.216" }
crossbeam-channel.workspace = true
[dev-dependencies]
diff --git a/lib/lsp-server/src/msg.rs b/lib/lsp-server/src/msg.rs
index 53c64796f2..11f98f5079 100644
--- a/lib/lsp-server/src/msg.rs
+++ b/lib/lsp-server/src/msg.rs
@@ -3,7 +3,8 @@ use std::{
io::{self, BufRead, Write},
};
-use serde::{de::DeserializeOwned, Deserialize, Serialize};
+use serde::de::DeserializeOwned;
+use serde_derive::{Deserialize, Serialize};
use crate::error::ExtractError;
@@ -196,7 +197,7 @@ impl Message {
}
impl Response {
- pub fn new_ok<R: Serialize>(id: RequestId, result: R) -> Response {
+ pub fn new_ok<R: serde::Serialize>(id: RequestId, result: R) -> Response {
Response { id, result: Some(serde_json::to_value(result).unwrap()), error: None }
}
pub fn new_err(id: RequestId, code: i32, message: String) -> Response {
@@ -206,7 +207,7 @@ impl Response {
}
impl Request {
- pub fn new<P: Serialize>(id: RequestId, method: String, params: P) -> Request {
+ pub fn new<P: serde::Serialize>(id: RequestId, method: String, params: P) -> Request {
Request { id, method, params: serde_json::to_value(params).unwrap() }
}
pub fn extract<P: DeserializeOwned>(
@@ -231,7 +232,7 @@ impl Request {
}
impl Notification {
- pub fn new(method: String, params: impl Serialize) -> Notification {
+ pub fn new(method: String, params: impl serde::Serialize) -> Notification {
Notification { method, params: serde_json::to_value(params).unwrap() }
}
pub fn extract<P: DeserializeOwned>(
diff --git a/lib/lsp-server/src/req_queue.rs b/lib/lsp-server/src/req_queue.rs
index 347a9fb6fb..c216864bee 100644
--- a/lib/lsp-server/src/req_queue.rs
+++ b/lib/lsp-server/src/req_queue.rs
@@ -1,7 +1,5 @@
use std::collections::HashMap;
-use serde::Serialize;
-
use crate::{ErrorCode, Request, RequestId, Response, ResponseError};
/// Manages the set of pending requests, both incoming and outgoing.
@@ -56,7 +54,7 @@ impl<I> Incoming<I> {
}
impl<O> Outgoing<O> {
- pub fn register<P: Serialize>(&mut self, method: String, params: P, data: O) -> Request {
+ pub fn register<P: serde::Serialize>(&mut self, method: String, params: P, data: O) -> Request {
let id = RequestId::from(self.next_id);
self.pending.insert(id.clone(), data);
self.next_id += 1;
diff --git a/rust-version b/rust-version
index 7d60fa6cb7..3be63741c0 100644
--- a/rust-version
+++ b/rust-version
@@ -1 +1 @@
-5a6036a1802262f8cf02192b02026688d396f1d7
+0eca4dd3205a01dba4bd7b7c140ec370aff03440
diff --git a/xtask/Cargo.toml b/xtask/Cargo.toml
index 01ad333631..b505ee835b 100644
--- a/xtask/Cargo.toml
+++ b/xtask/Cargo.toml
@@ -16,7 +16,8 @@ xflags = "0.3.0"
time = { version = "0.3", default-features = false }
zip = { version = "0.6", default-features = false, features = ["deflate", "time"] }
stdx.workspace = true
-proc-macro2 = "1.0.47"
+# https://github.com/dtolnay/proc-macro2/issues/475
+proc-macro2 = "=1.0.86"
quote = "1.0.20"
ungrammar = "1.16.1"
either.workspace = true