Unnamed repository; edit this file 'description' to name the repository.
Merge pull request #2508 from rust-lang/rustc-pull
Rustc pull update
Tshepang Mbambo 9 months ago
parent 4c4a25d · parent 3a0c384 · commit ebd6111
-rw-r--r--Cargo.lock52
-rw-r--r--Cargo.toml6
-rw-r--r--crates/base-db/src/input.rs45
-rw-r--r--crates/base-db/src/lib.rs11
-rw-r--r--crates/hir-def/src/expr_store.rs58
-rw-r--r--crates/hir-def/src/expr_store/path.rs4
-rw-r--r--crates/hir-def/src/expr_store/scope.rs2
-rw-r--r--crates/hir-def/src/hir/type_ref.rs2
-rw-r--r--crates/hir-def/src/macro_expansion_tests/builtin_derive_macro.rs80
-rw-r--r--crates/hir-def/src/nameres/tests/incremental.rs4
-rw-r--r--crates/hir-expand/src/builtin/derive_macro.rs140
-rw-r--r--crates/hir-expand/src/builtin/fn_macro.rs49
-rw-r--r--crates/hir-expand/src/db.rs2
-rw-r--r--crates/hir-expand/src/files.rs20
-rw-r--r--crates/hir-expand/src/lib.rs28
-rw-r--r--crates/hir-expand/src/name.rs3
-rw-r--r--crates/hir-expand/src/prettify_macro_expansion_.rs2
-rw-r--r--crates/hir-expand/src/proc_macro.rs14
-rw-r--r--crates/hir-ty/src/db.rs40
-rw-r--r--crates/hir-ty/src/diagnostics/match_check.rs4
-rw-r--r--crates/hir-ty/src/diagnostics/match_check/pat_analysis.rs84
-rw-r--r--crates/hir-ty/src/display.rs4
-rw-r--r--crates/hir-ty/src/infer.rs48
-rw-r--r--crates/hir-ty/src/infer/closure.rs7
-rw-r--r--crates/hir-ty/src/infer/expr.rs35
-rw-r--r--crates/hir-ty/src/layout.rs4
-rw-r--r--crates/hir-ty/src/layout/target.rs5
-rw-r--r--crates/hir-ty/src/mapping.rs21
-rw-r--r--crates/hir-ty/src/mir/eval.rs2
-rw-r--r--crates/hir-ty/src/test_db.rs9
-rw-r--r--crates/hir-ty/src/tests/closure_captures.rs28
-rw-r--r--crates/hir-ty/src/tests/incremental.rs40
-rw-r--r--crates/hir-ty/src/tests/never_type.rs28
-rw-r--r--crates/hir-ty/src/tests/simple.rs16
-rw-r--r--crates/hir-ty/src/variance.rs18
-rw-r--r--crates/hir/src/diagnostics.rs12
-rw-r--r--crates/hir/src/lib.rs6
-rw-r--r--crates/hir/src/source_analyzer.rs25
-rw-r--r--crates/ide-assists/src/handlers/expand_rest_pattern.rs2
-rw-r--r--crates/ide-assists/src/handlers/promote_local_to_const.rs22
-rw-r--r--crates/ide-assists/src/handlers/replace_is_method_with_if_let_method.rs21
-rw-r--r--crates/ide-assists/src/handlers/toggle_macro_delimiter.rs26
-rw-r--r--crates/ide-assists/src/handlers/unmerge_match_arm.rs63
-rw-r--r--crates/ide-assists/src/handlers/wrap_unwrap_cfg_attr.rs86
-rw-r--r--crates/ide-assists/src/utils.rs15
-rw-r--r--crates/ide-completion/src/tests/attribute.rs4
-rw-r--r--crates/ide-db/src/famous_defs.rs12
-rw-r--r--crates/ide-db/src/generated/lints.rs4
-rw-r--r--crates/ide-db/src/prime_caches.rs2
-rw-r--r--crates/ide-db/src/search.rs8
-rw-r--r--crates/ide-diagnostics/src/handlers/macro_error.rs4
-rw-r--r--crates/ide-diagnostics/src/handlers/missing_fields.rs2
-rw-r--r--crates/ide-diagnostics/src/handlers/moved_out_of_ref.rs18
-rw-r--r--crates/ide-diagnostics/src/handlers/trait_impl_redundant_assoc_item.rs9
-rw-r--r--crates/ide-diagnostics/src/handlers/unresolved_method.rs13
-rw-r--r--crates/ide/src/call_hierarchy.rs2
-rw-r--r--crates/ide/src/doc_links.rs27
-rw-r--r--crates/ide/src/file_structure.rs50
-rw-r--r--crates/ide/src/goto_definition.rs76
-rw-r--r--crates/ide/src/hover/tests.rs31
-rw-r--r--crates/ide/src/inlay_hints/adjustment.rs108
-rw-r--r--crates/ide/src/inlay_hints/closing_brace.rs2
-rw-r--r--crates/ide/src/navigation_target.rs2
-rw-r--r--crates/ide/src/runnables.rs20
-rw-r--r--crates/ide/src/view_crate_graph.rs2
-rw-r--r--crates/load-cargo/src/lib.rs95
-rw-r--r--crates/parser/src/lexed_str.rs10
-rw-r--r--crates/proc-macro-srv/src/server_impl.rs2
-rw-r--r--crates/project-model/src/build_dependencies.rs43
-rw-r--r--crates/project-model/src/sysroot.rs29
-rw-r--r--crates/project-model/src/tests.rs2
-rw-r--r--crates/project-model/src/workspace.rs16
-rw-r--r--crates/rust-analyzer/src/cli/rustc_tests.rs8
-rw-r--r--crates/rust-analyzer/src/cli/scip.rs2
-rw-r--r--crates/rust-analyzer/src/config.rs6
-rw-r--r--crates/rust-analyzer/src/flycheck.rs7
-rw-r--r--crates/rust-analyzer/src/global_state.rs2
-rw-r--r--crates/rust-analyzer/src/handlers/dispatch.rs5
-rw-r--r--crates/rust-analyzer/src/lsp/capabilities.rs6
-rw-r--r--crates/rust-analyzer/src/main_loop.rs11
-rw-r--r--crates/rust-analyzer/src/reload.rs37
-rw-r--r--crates/span/src/ast_id.rs15
-rw-r--r--crates/span/src/hygiene.rs16
-rw-r--r--crates/syntax/src/ast/make.rs7
-rw-r--r--crates/syntax/src/ast/syntax_factory/constructors.rs41
-rw-r--r--crates/syntax/src/syntax_editor.rs2
-rw-r--r--crates/test-utils/src/fixture.rs22
-rw-r--r--crates/test-utils/src/minicore.rs38
-rw-r--r--crates/tt/src/iter.rs1
-rw-r--r--crates/tt/src/lib.rs2
-rw-r--r--crates/vfs/src/file_set.rs6
-rw-r--r--docs/book/src/contributing/lsp-extensions.md18
92 files changed, 1331 insertions, 707 deletions
diff --git a/Cargo.lock b/Cargo.lock
index caa8f28d8e..7432a82080 100644
--- a/Cargo.lock
+++ b/Cargo.lock
@@ -570,12 +570,6 @@ dependencies = [
]
[[package]]
-name = "heck"
-version = "0.5.0"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "2304e00983f87ffb38b55b444b5e3b60a884b5d30c0fca7d82fe33449bbe55ea"
-
-[[package]]
name = "hermit-abi"
version = "0.5.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
@@ -1021,6 +1015,15 @@ dependencies = [
]
[[package]]
+name = "intrusive-collections"
+version = "0.9.7"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "189d0897e4cbe8c75efedf3502c18c887b05046e59d28404d4d8e46cbc4d1e86"
+dependencies = [
+ "memoffset",
+]
+
+[[package]]
name = "itertools"
version = "0.12.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
@@ -1428,6 +1431,16 @@ dependencies = [
]
[[package]]
+name = "papaya"
+version = "0.2.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "f92dd0b07c53a0a0c764db2ace8c541dc47320dad97c2200c2a637ab9dd2328f"
+dependencies = [
+ "equivalent",
+ "seize",
+]
+
+[[package]]
name = "parking_lot"
version = "0.12.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
@@ -1955,16 +1968,18 @@ checksum = "28d3b2b1366ec20994f1fd18c3c594f05c5dd4bc44d8bb0c1c632c8d6829481f"
[[package]]
name = "salsa"
-version = "0.22.0"
+version = "0.23.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "c8fff508e3d6ef42a32607f7538e17171a877a12015e32036f46e99d00c95781"
+checksum = "2e235afdb8e510f38a07138fbe5a0b64691894358a9c0cbd813b1aade110efc9"
dependencies = [
"boxcar",
"crossbeam-queue",
- "dashmap",
+ "crossbeam-utils",
"hashbrown 0.15.4",
"hashlink",
"indexmap",
+ "intrusive-collections",
+ "papaya",
"parking_lot",
"portable-atomic",
"rayon",
@@ -1978,17 +1993,16 @@ dependencies = [
[[package]]
name = "salsa-macro-rules"
-version = "0.22.0"
+version = "0.23.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "8ea72b3c06f2ce6350fe3a0eeb7aaaf842d1d8352b706973c19c4f02e298a87c"
+checksum = "2edb86a7e9c91f6d30c9ce054312721dbe773a162db27bbfae834d16177b30ce"
[[package]]
name = "salsa-macros"
-version = "0.22.0"
+version = "0.23.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "0ce92025bc160b27814a207cb78d680973af17f863c7f4fc56cf3a535e22f378"
+checksum = "d0778d6e209051bc4e75acfe83bcd7848601ec3dbe9c3dbb982829020e9128af"
dependencies = [
- "heck",
"proc-macro2",
"quote",
"syn",
@@ -2026,6 +2040,16 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "94143f37725109f92c262ed2cf5e59bce7498c01bcc1502d7b9afe439a4e9f49"
[[package]]
+name = "seize"
+version = "0.5.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "e4b8d813387d566f627f3ea1b914c068aac94c40ae27ec43f5f33bde65abefe7"
+dependencies = [
+ "libc",
+ "windows-sys 0.52.0",
+]
+
+[[package]]
name = "semver"
version = "1.0.26"
source = "registry+https://github.com/rust-lang/crates.io-index"
diff --git a/Cargo.toml b/Cargo.toml
index 0a8e6feb46..d268ce5b0b 100644
--- a/Cargo.toml
+++ b/Cargo.toml
@@ -49,6 +49,8 @@ debug = 2
# ungrammar = { path = "../ungrammar" }
# salsa = { path = "../salsa" }
+# salsa-macros = { path = "../salsa/components/salsa-macros" }
+# salsa-macro-rules = { path = "../salsa/components/salsa-macro-rules" }
[workspace.dependencies]
# local crates
@@ -136,8 +138,8 @@ rayon = "1.10.0"
rowan = "=0.15.15"
# Ideally we'd not enable the macros feature but unfortunately the `tracked` attribute does not work
# on impls without it
-salsa = { version = "0.22.0", default-features = true, features = ["rayon","salsa_unstable", "macros"] }
-salsa-macros = "0.22.0"
+salsa = { version = "0.23.0", default-features = true, features = ["rayon","salsa_unstable", "macros"] }
+salsa-macros = "0.23.0"
semver = "1.0.26"
serde = { version = "1.0.219" }
serde_derive = { version = "1.0.219" }
diff --git a/crates/base-db/src/input.rs b/crates/base-db/src/input.rs
index 2a87b15248..8c9393bcc9 100644
--- a/crates/base-db/src/input.rs
+++ b/crates/base-db/src/input.rs
@@ -6,6 +6,7 @@
//! actual IO. See `vfs` and `project_model` in the `rust-analyzer` crate for how
//! actual IO is done and lowered to input.
+use std::error::Error;
use std::hash::BuildHasherDefault;
use std::{fmt, mem, ops};
@@ -22,7 +23,49 @@ use vfs::{AbsPathBuf, AnchoredPath, FileId, VfsPath, file_set::FileSet};
use crate::{CrateWorkspaceData, EditionedFileId, FxIndexSet, RootQueryDb};
-pub type ProcMacroPaths = FxHashMap<CrateBuilderId, Result<(String, AbsPathBuf), String>>;
+pub type ProcMacroPaths =
+ FxHashMap<CrateBuilderId, Result<(String, AbsPathBuf), ProcMacroLoadingError>>;
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub enum ProcMacroLoadingError {
+ Disabled,
+ FailedToBuild,
+ MissingDylibPath,
+ NotYetBuilt,
+ NoProcMacros,
+ ProcMacroSrvError(Box<str>),
+}
+impl ProcMacroLoadingError {
+ pub fn is_hard_error(&self) -> bool {
+ match self {
+ ProcMacroLoadingError::Disabled | ProcMacroLoadingError::NotYetBuilt => false,
+ ProcMacroLoadingError::FailedToBuild
+ | ProcMacroLoadingError::MissingDylibPath
+ | ProcMacroLoadingError::NoProcMacros
+ | ProcMacroLoadingError::ProcMacroSrvError(_) => true,
+ }
+ }
+}
+
+impl Error for ProcMacroLoadingError {}
+impl fmt::Display for ProcMacroLoadingError {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ match self {
+ ProcMacroLoadingError::Disabled => write!(f, "proc-macro expansion is disabled"),
+ ProcMacroLoadingError::FailedToBuild => write!(f, "proc-macro failed to build"),
+ ProcMacroLoadingError::MissingDylibPath => {
+ write!(f, "proc-macro crate build data is missing a dylib path")
+ }
+ ProcMacroLoadingError::NotYetBuilt => write!(f, "proc-macro not yet built"),
+ ProcMacroLoadingError::NoProcMacros => {
+ write!(f, "proc macro library has no proc macros")
+ }
+ ProcMacroLoadingError::ProcMacroSrvError(msg) => {
+ write!(f, "proc macro server error: {msg}")
+ }
+ }
+ }
+}
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, PartialOrd, Ord)]
pub struct SourceRootId(pub u32);
diff --git a/crates/base-db/src/lib.rs b/crates/base-db/src/lib.rs
index 478fae67c8..ad17f1730b 100644
--- a/crates/base-db/src/lib.rs
+++ b/crates/base-db/src/lib.rs
@@ -14,8 +14,9 @@ pub use crate::{
input::{
BuiltCrateData, BuiltDependency, Crate, CrateBuilder, CrateBuilderId, CrateDataBuilder,
CrateDisplayName, CrateGraphBuilder, CrateName, CrateOrigin, CratesIdMap, CratesMap,
- DependencyBuilder, Env, ExtraCrateData, LangCrateOrigin, ProcMacroPaths, ReleaseChannel,
- SourceRoot, SourceRootId, TargetLayoutLoadResult, UniqueCrateData,
+ DependencyBuilder, Env, ExtraCrateData, LangCrateOrigin, ProcMacroLoadingError,
+ ProcMacroPaths, ReleaseChannel, SourceRoot, SourceRootId, TargetLayoutLoadResult,
+ UniqueCrateData,
},
};
use dashmap::{DashMap, mapref::entry::Entry};
@@ -33,7 +34,7 @@ pub type FxIndexSet<T> = indexmap::IndexSet<T, rustc_hash::FxBuildHasher>;
#[macro_export]
macro_rules! impl_intern_key {
($id:ident, $loc:ident) => {
- #[salsa_macros::interned(no_lifetime)]
+ #[salsa_macros::interned(no_lifetime, revisions = usize::MAX)]
#[derive(PartialOrd, Ord)]
pub struct $id {
pub loc: $loc,
@@ -43,7 +44,7 @@ macro_rules! impl_intern_key {
impl ::std::fmt::Debug for $id {
fn fmt(&self, f: &mut ::std::fmt::Formatter<'_>) -> ::std::fmt::Result {
f.debug_tuple(stringify!($id))
- .field(&format_args!("{:04x}", self.0.as_u32()))
+ .field(&format_args!("{:04x}", self.0.index()))
.finish()
}
}
@@ -167,7 +168,7 @@ impl Files {
}
}
-#[salsa_macros::interned(no_lifetime, debug, constructor=from_span)]
+#[salsa_macros::interned(no_lifetime, debug, constructor=from_span, revisions = usize::MAX)]
#[derive(PartialOrd, Ord)]
pub struct EditionedFileId {
pub editioned_file_id: span::EditionedFileId,
diff --git a/crates/hir-def/src/expr_store.rs b/crates/hir-def/src/expr_store.rs
index 85bd193223..51612f341a 100644
--- a/crates/hir-def/src/expr_store.rs
+++ b/crates/hir-def/src/expr_store.rs
@@ -93,7 +93,7 @@ pub type TypeSource = InFile<TypePtr>;
pub type LifetimePtr = AstPtr<ast::Lifetime>;
pub type LifetimeSource = InFile<LifetimePtr>;
-#[derive(Debug, Eq, PartialEq)]
+#[derive(Debug, PartialEq, Eq)]
pub struct ExpressionStore {
pub exprs: Arena<Expr>,
pub pats: Arena<Pat>,
@@ -114,7 +114,7 @@ pub struct ExpressionStore {
ident_hygiene: FxHashMap<ExprOrPatId, HygieneId>,
}
-#[derive(Debug, Eq, PartialEq, Default)]
+#[derive(Debug, Eq, Default)]
pub struct ExpressionStoreSourceMap {
// AST expressions can create patterns in destructuring assignments. Therefore, `ExprSource` can also map
// to `PatId`, and `PatId` can also map to `ExprSource` (the other way around is unaffected).
@@ -127,19 +127,20 @@ pub struct ExpressionStoreSourceMap {
label_map: FxHashMap<LabelSource, LabelId>,
label_map_back: ArenaMap<LabelId, LabelSource>,
- binding_definitions: FxHashMap<BindingId, SmallVec<[PatId; 4]>>,
-
- /// We don't create explicit nodes for record fields (`S { record_field: 92 }`).
- /// Instead, we use id of expression (`92`) to identify the field.
- field_map_back: FxHashMap<ExprId, FieldSource>,
- pat_field_map_back: FxHashMap<PatId, PatFieldSource>,
-
types_map_back: ArenaMap<TypeRefId, TypeSource>,
types_map: FxHashMap<TypeSource, TypeRefId>,
lifetime_map_back: ArenaMap<LifetimeRefId, LifetimeSource>,
lifetime_map: FxHashMap<LifetimeSource, LifetimeRefId>,
+ binding_definitions:
+ ArenaMap<BindingId, SmallVec<[PatId; 2 * size_of::<usize>() / size_of::<PatId>()]>>,
+
+ /// We don't create explicit nodes for record fields (`S { record_field: 92 }`).
+ /// Instead, we use id of expression (`92`) to identify the field.
+ field_map_back: FxHashMap<ExprId, FieldSource>,
+ pat_field_map_back: FxHashMap<PatId, PatFieldSource>,
+
template_map: Option<Box<FormatTemplate>>,
pub expansions: FxHashMap<InFile<MacroCallPtr>, MacroCallId>,
@@ -149,6 +150,43 @@ pub struct ExpressionStoreSourceMap {
pub diagnostics: Vec<ExpressionStoreDiagnostics>,
}
+impl PartialEq for ExpressionStoreSourceMap {
+ fn eq(&self, other: &Self) -> bool {
+ // we only need to compare one of the two mappings
+ // as the other is a reverse mapping and thus will compare
+ // the same as normal mapping
+ let Self {
+ expr_map: _,
+ expr_map_back,
+ pat_map: _,
+ pat_map_back,
+ label_map: _,
+ label_map_back,
+ types_map_back,
+ types_map: _,
+ lifetime_map_back,
+ lifetime_map: _,
+ // If this changed, our pattern data must have changed
+ binding_definitions: _,
+ // If this changed, our expression data must have changed
+ field_map_back: _,
+ // If this changed, our pattern data must have changed
+ pat_field_map_back: _,
+ template_map,
+ expansions,
+ diagnostics,
+ } = self;
+ *expr_map_back == other.expr_map_back
+ && *pat_map_back == other.pat_map_back
+ && *label_map_back == other.label_map_back
+ && *types_map_back == other.types_map_back
+ && *lifetime_map_back == other.lifetime_map_back
+ && *template_map == other.template_map
+ && *expansions == other.expansions
+ && *diagnostics == other.diagnostics
+ }
+}
+
/// The body of an item (function, const etc.).
#[derive(Debug, Eq, PartialEq, Default)]
pub struct ExpressionStoreBuilder {
@@ -698,7 +736,7 @@ impl ExpressionStoreSourceMap {
}
pub fn patterns_for_binding(&self, binding: BindingId) -> &[PatId] {
- self.binding_definitions.get(&binding).map_or(&[], Deref::deref)
+ self.binding_definitions.get(binding).map_or(&[], Deref::deref)
}
pub fn node_label(&self, node: InFile<&ast::Label>) -> Option<LabelId> {
diff --git a/crates/hir-def/src/expr_store/path.rs b/crates/hir-def/src/expr_store/path.rs
index db83e73a0b..19c7ce0ce0 100644
--- a/crates/hir-def/src/expr_store/path.rs
+++ b/crates/hir-def/src/expr_store/path.rs
@@ -29,8 +29,8 @@ pub enum Path {
// This type is being used a lot, make sure it doesn't grow unintentionally.
#[cfg(target_arch = "x86_64")]
const _: () = {
- assert!(size_of::<Path>() == 16);
- assert!(size_of::<Option<Path>>() == 16);
+ assert!(size_of::<Path>() == 24);
+ assert!(size_of::<Option<Path>>() == 24);
};
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
diff --git a/crates/hir-def/src/expr_store/scope.rs b/crates/hir-def/src/expr_store/scope.rs
index a46711c67e..2dd0b9bdb8 100644
--- a/crates/hir-def/src/expr_store/scope.rs
+++ b/crates/hir-def/src/expr_store/scope.rs
@@ -535,7 +535,7 @@ fn foo() {
let resolved = scopes.resolve_name_in_scope(expr_scope, &name_ref.as_name()).unwrap();
let pat_src = source_map
- .pat_syntax(*source_map.binding_definitions[&resolved.binding()].first().unwrap())
+ .pat_syntax(*source_map.binding_definitions[resolved.binding()].first().unwrap())
.unwrap();
let local_name = pat_src.value.syntax_node_ptr().to_node(file.syntax());
diff --git a/crates/hir-def/src/hir/type_ref.rs b/crates/hir-def/src/hir/type_ref.rs
index eb3b92d31f..eacc3f3ced 100644
--- a/crates/hir-def/src/hir/type_ref.rs
+++ b/crates/hir-def/src/hir/type_ref.rs
@@ -149,7 +149,7 @@ pub enum TypeRef {
}
#[cfg(target_arch = "x86_64")]
-const _: () = assert!(size_of::<TypeRef>() == 16);
+const _: () = assert!(size_of::<TypeRef>() == 24);
pub type TypeRefId = Idx<TypeRef>;
diff --git a/crates/hir-def/src/macro_expansion_tests/builtin_derive_macro.rs b/crates/hir-def/src/macro_expansion_tests/builtin_derive_macro.rs
index 777953d3f2..0013c2a256 100644
--- a/crates/hir-def/src/macro_expansion_tests/builtin_derive_macro.rs
+++ b/crates/hir-def/src/macro_expansion_tests/builtin_derive_macro.rs
@@ -746,3 +746,83 @@ struct Struct9<#[pointee] T, U>(T) where T: ?Sized;
623..690: `derive(CoercePointee)` requires `T` to be marked `?Sized`"#]],
);
}
+
+#[test]
+fn union_derive() {
+ check_errors(
+ r#"
+//- minicore: clone, copy, default, fmt, hash, ord, eq, derive
+
+#[derive(Copy)]
+union Foo1 { _v: () }
+#[derive(Clone)]
+union Foo2 { _v: () }
+#[derive(Default)]
+union Foo3 { _v: () }
+#[derive(Debug)]
+union Foo4 { _v: () }
+#[derive(Hash)]
+union Foo5 { _v: () }
+#[derive(Ord)]
+union Foo6 { _v: () }
+#[derive(PartialOrd)]
+union Foo7 { _v: () }
+#[derive(Eq)]
+union Foo8 { _v: () }
+#[derive(PartialEq)]
+union Foo9 { _v: () }
+ "#,
+ expect![[r#"
+ 78..118: this trait cannot be derived for unions
+ 119..157: this trait cannot be derived for unions
+ 158..195: this trait cannot be derived for unions
+ 196..232: this trait cannot be derived for unions
+ 233..276: this trait cannot be derived for unions
+ 313..355: this trait cannot be derived for unions"#]],
+ );
+}
+
+#[test]
+fn default_enum_without_default_attr() {
+ check_errors(
+ r#"
+//- minicore: default, derive
+
+#[derive(Default)]
+enum Foo {
+ Bar,
+}
+ "#,
+ expect!["1..41: `#[derive(Default)]` on enum with no `#[default]`"],
+ );
+}
+
+#[test]
+fn generic_enum_default() {
+ check(
+ r#"
+//- minicore: default, derive
+
+#[derive(Default)]
+enum Foo<T> {
+ Bar(T),
+ #[default]
+ Baz,
+}
+"#,
+ expect![[r#"
+
+#[derive(Default)]
+enum Foo<T> {
+ Bar(T),
+ #[default]
+ Baz,
+}
+
+impl <T, > $crate::default::Default for Foo<T, > where {
+ fn default() -> Self {
+ Foo::Baz
+ }
+}"#]],
+ );
+}
diff --git a/crates/hir-def/src/nameres/tests/incremental.rs b/crates/hir-def/src/nameres/tests/incremental.rs
index ba75dca3d3..338851b715 100644
--- a/crates/hir-def/src/nameres/tests/incremental.rs
+++ b/crates/hir-def/src/nameres/tests/incremental.rs
@@ -172,7 +172,7 @@ fn no() {}
"ast_id_map_shim",
"parse_shim",
"real_span_map_shim",
- "of_",
+ "EnumVariants::of_",
]
"#]],
expect![[r#"
@@ -181,7 +181,7 @@ fn no() {}
"ast_id_map_shim",
"file_item_tree_query",
"real_span_map_shim",
- "of_",
+ "EnumVariants::of_",
]
"#]],
);
diff --git a/crates/hir-expand/src/builtin/derive_macro.rs b/crates/hir-expand/src/builtin/derive_macro.rs
index d135584a08..15e68ff95c 100644
--- a/crates/hir-expand/src/builtin/derive_macro.rs
+++ b/crates/hir-expand/src/builtin/derive_macro.rs
@@ -458,6 +458,7 @@ fn expand_simple_derive(
invoc_span: Span,
tt: &tt::TopSubtree,
trait_path: tt::TopSubtree,
+ allow_unions: bool,
make_trait_body: impl FnOnce(&BasicAdtInfo) -> tt::TopSubtree,
) -> ExpandResult<tt::TopSubtree> {
let info = match parse_adt(db, tt, invoc_span) {
@@ -469,6 +470,12 @@ fn expand_simple_derive(
);
}
};
+ if !allow_unions && matches!(info.shape, AdtShape::Union) {
+ return ExpandResult::new(
+ tt::TopSubtree::empty(tt::DelimSpan::from_single(invoc_span)),
+ ExpandError::other(invoc_span, "this trait cannot be derived for unions"),
+ );
+ }
ExpandResult::ok(expand_simple_derive_with_parsed(
invoc_span,
info,
@@ -535,7 +542,14 @@ fn copy_expand(
tt: &tt::TopSubtree,
) -> ExpandResult<tt::TopSubtree> {
let krate = dollar_crate(span);
- expand_simple_derive(db, span, tt, quote! {span => #krate::marker::Copy }, |_| quote! {span =>})
+ expand_simple_derive(
+ db,
+ span,
+ tt,
+ quote! {span => #krate::marker::Copy },
+ true,
+ |_| quote! {span =>},
+ )
}
fn clone_expand(
@@ -544,7 +558,7 @@ fn clone_expand(
tt: &tt::TopSubtree,
) -> ExpandResult<tt::TopSubtree> {
let krate = dollar_crate(span);
- expand_simple_derive(db, span, tt, quote! {span => #krate::clone::Clone }, |adt| {
+ expand_simple_derive(db, span, tt, quote! {span => #krate::clone::Clone }, true, |adt| {
if matches!(adt.shape, AdtShape::Union) {
let star = tt::Punct { char: '*', spacing: ::tt::Spacing::Alone, span };
return quote! {span =>
@@ -599,41 +613,63 @@ fn default_expand(
tt: &tt::TopSubtree,
) -> ExpandResult<tt::TopSubtree> {
let krate = &dollar_crate(span);
- expand_simple_derive(db, span, tt, quote! {span => #krate::default::Default }, |adt| {
- let body = match &adt.shape {
- AdtShape::Struct(fields) => {
- let name = &adt.name;
- fields.as_pattern_map(
- quote!(span =>#name),
+ let adt = match parse_adt(db, tt, span) {
+ Ok(info) => info,
+ Err(e) => {
+ return ExpandResult::new(
+ tt::TopSubtree::empty(tt::DelimSpan { open: span, close: span }),
+ e,
+ );
+ }
+ };
+ let (body, constrain_to_trait) = match &adt.shape {
+ AdtShape::Struct(fields) => {
+ let name = &adt.name;
+ let body = fields.as_pattern_map(
+ quote!(span =>#name),
+ span,
+ |_| quote!(span =>#krate::default::Default::default()),
+ );
+ (body, true)
+ }
+ AdtShape::Enum { default_variant, variants } => {
+ if let Some(d) = default_variant {
+ let (name, fields) = &variants[*d];
+ let adt_name = &adt.name;
+ let body = fields.as_pattern_map(
+ quote!(span =>#adt_name :: #name),
span,
|_| quote!(span =>#krate::default::Default::default()),
- )
+ );
+ (body, false)
+ } else {
+ return ExpandResult::new(
+ tt::TopSubtree::empty(tt::DelimSpan::from_single(span)),
+ ExpandError::other(span, "`#[derive(Default)]` on enum with no `#[default]`"),
+ );
}
- AdtShape::Enum { default_variant, variants } => {
- if let Some(d) = default_variant {
- let (name, fields) = &variants[*d];
- let adt_name = &adt.name;
- fields.as_pattern_map(
- quote!(span =>#adt_name :: #name),
- span,
- |_| quote!(span =>#krate::default::Default::default()),
- )
- } else {
- // FIXME: Return expand error here
- quote!(span =>)
+ }
+ AdtShape::Union => {
+ return ExpandResult::new(
+ tt::TopSubtree::empty(tt::DelimSpan::from_single(span)),
+ ExpandError::other(span, "this trait cannot be derived for unions"),
+ );
+ }
+ };
+ ExpandResult::ok(expand_simple_derive_with_parsed(
+ span,
+ adt,
+ quote! {span => #krate::default::Default },
+ |_adt| {
+ quote! {span =>
+ fn default() -> Self {
+ #body
}
}
- AdtShape::Union => {
- // FIXME: Return expand error here
- quote!(span =>)
- }
- };
- quote! {span =>
- fn default() -> Self {
- #body
- }
- }
- })
+ },
+ constrain_to_trait,
+ tt::TopSubtree::empty(tt::DelimSpan::from_single(span)),
+ ))
}
fn debug_expand(
@@ -642,7 +678,7 @@ fn debug_expand(
tt: &tt::TopSubtree,
) -> ExpandResult<tt::TopSubtree> {
let krate = &dollar_crate(span);
- expand_simple_derive(db, span, tt, quote! {span => #krate::fmt::Debug }, |adt| {
+ expand_simple_derive(db, span, tt, quote! {span => #krate::fmt::Debug }, false, |adt| {
let for_variant = |name: String, v: &VariantShape| match v {
VariantShape::Struct(fields) => {
let for_fields = fields.iter().map(|it| {
@@ -697,10 +733,7 @@ fn debug_expand(
}
})
.collect(),
- AdtShape::Union => {
- // FIXME: Return expand error here
- vec![]
- }
+ AdtShape::Union => unreachable!(),
};
quote! {span =>
fn fmt(&self, f: &mut #krate::fmt::Formatter) -> #krate::fmt::Result {
@@ -718,11 +751,7 @@ fn hash_expand(
tt: &tt::TopSubtree,
) -> ExpandResult<tt::TopSubtree> {
let krate = &dollar_crate(span);
- expand_simple_derive(db, span, tt, quote! {span => #krate::hash::Hash }, |adt| {
- if matches!(adt.shape, AdtShape::Union) {
- // FIXME: Return expand error here
- return quote! {span =>};
- }
+ expand_simple_derive(db, span, tt, quote! {span => #krate::hash::Hash }, false, |adt| {
if matches!(&adt.shape, AdtShape::Enum { variants, .. } if variants.is_empty()) {
let star = tt::Punct { char: '*', spacing: ::tt::Spacing::Alone, span };
return quote! {span =>
@@ -769,7 +798,14 @@ fn eq_expand(
tt: &tt::TopSubtree,
) -> ExpandResult<tt::TopSubtree> {
let krate = dollar_crate(span);
- expand_simple_derive(db, span, tt, quote! {span => #krate::cmp::Eq }, |_| quote! {span =>})
+ expand_simple_derive(
+ db,
+ span,
+ tt,
+ quote! {span => #krate::cmp::Eq },
+ true,
+ |_| quote! {span =>},
+ )
}
fn partial_eq_expand(
@@ -778,11 +814,7 @@ fn partial_eq_expand(
tt: &tt::TopSubtree,
) -> ExpandResult<tt::TopSubtree> {
let krate = dollar_crate(span);
- expand_simple_derive(db, span, tt, quote! {span => #krate::cmp::PartialEq }, |adt| {
- if matches!(adt.shape, AdtShape::Union) {
- // FIXME: Return expand error here
- return quote! {span =>};
- }
+ expand_simple_derive(db, span, tt, quote! {span => #krate::cmp::PartialEq }, false, |adt| {
let name = &adt.name;
let (self_patterns, other_patterns) = self_and_other_patterns(adt, name, span);
@@ -854,7 +886,7 @@ fn ord_expand(
tt: &tt::TopSubtree,
) -> ExpandResult<tt::TopSubtree> {
let krate = &dollar_crate(span);
- expand_simple_derive(db, span, tt, quote! {span => #krate::cmp::Ord }, |adt| {
+ expand_simple_derive(db, span, tt, quote! {span => #krate::cmp::Ord }, false, |adt| {
fn compare(
krate: &tt::Ident,
left: tt::TopSubtree,
@@ -873,10 +905,6 @@ fn ord_expand(
}
}
}
- if matches!(adt.shape, AdtShape::Union) {
- // FIXME: Return expand error here
- return quote!(span =>);
- }
let (self_patterns, other_patterns) = self_and_other_patterns(adt, &adt.name, span);
let arms = izip!(self_patterns, other_patterns, adt.shape.field_names(span)).map(
|(pat1, pat2, fields)| {
@@ -916,7 +944,7 @@ fn partial_ord_expand(
tt: &tt::TopSubtree,
) -> ExpandResult<tt::TopSubtree> {
let krate = &dollar_crate(span);
- expand_simple_derive(db, span, tt, quote! {span => #krate::cmp::PartialOrd }, |adt| {
+ expand_simple_derive(db, span, tt, quote! {span => #krate::cmp::PartialOrd }, false, |adt| {
fn compare(
krate: &tt::Ident,
left: tt::TopSubtree,
@@ -935,10 +963,6 @@ fn partial_ord_expand(
}
}
}
- if matches!(adt.shape, AdtShape::Union) {
- // FIXME: Return expand error here
- return quote!(span =>);
- }
let left = quote!(span =>#krate::intrinsics::discriminant_value(self));
let right = quote!(span =>#krate::intrinsics::discriminant_value(other));
diff --git a/crates/hir-expand/src/builtin/fn_macro.rs b/crates/hir-expand/src/builtin/fn_macro.rs
index f9abe4f556..800b40a9e7 100644
--- a/crates/hir-expand/src/builtin/fn_macro.rs
+++ b/crates/hir-expand/src/builtin/fn_macro.rs
@@ -7,6 +7,7 @@ use intern::{
Symbol,
sym::{self},
};
+use itertools::Itertools;
use mbe::{DelimiterKind, expect_fragment};
use span::{Edition, FileId, Span};
use stdx::format_to;
@@ -681,11 +682,19 @@ fn relative_file(
}
fn parse_string(tt: &tt::TopSubtree) -> Result<(Symbol, Span), ExpandError> {
- let delimiter = tt.top_subtree().delimiter;
- tt.iter()
- .next()
- .ok_or(delimiter.open.cover(delimiter.close))
- .and_then(|tt| match tt {
+ let mut tt = TtElement::Subtree(tt.top_subtree(), tt.iter());
+ (|| {
+ // FIXME: We wrap expression fragments in parentheses which can break this expectation
+ // here
+ // Remove this once we handle none delims correctly
+ while let TtElement::Subtree(sub, tt_iter) = &mut tt
+ && let DelimiterKind::Parenthesis | DelimiterKind::Invisible = sub.delimiter.kind
+ {
+ tt =
+ tt_iter.exactly_one().map_err(|_| sub.delimiter.open.cover(sub.delimiter.close))?;
+ }
+
+ match tt {
TtElement::Leaf(tt::Leaf::Literal(tt::Literal {
symbol: text,
span,
@@ -698,35 +707,11 @@ fn parse_string(tt: &tt::TopSubtree) -> Result<(Symbol, Span), ExpandError> {
kind: tt::LitKind::StrRaw(_),
suffix: _,
})) => Ok((text.clone(), *span)),
- // FIXME: We wrap expression fragments in parentheses which can break this expectation
- // here
- // Remove this once we handle none delims correctly
- TtElement::Subtree(tt, mut tt_iter)
- if tt.delimiter.kind == DelimiterKind::Parenthesis =>
- {
- tt_iter
- .next()
- .and_then(|tt| match tt {
- TtElement::Leaf(tt::Leaf::Literal(tt::Literal {
- symbol: text,
- span,
- kind: tt::LitKind::Str,
- suffix: _,
- })) => Some((unescape_symbol(text), *span)),
- TtElement::Leaf(tt::Leaf::Literal(tt::Literal {
- symbol: text,
- span,
- kind: tt::LitKind::StrRaw(_),
- suffix: _,
- })) => Some((text.clone(), *span)),
- _ => None,
- })
- .ok_or(delimiter.open.cover(delimiter.close))
- }
TtElement::Leaf(l) => Err(*l.span()),
TtElement::Subtree(tt, _) => Err(tt.delimiter.open.cover(tt.delimiter.close)),
- })
- .map_err(|span| ExpandError::other(span, "expected string literal"))
+ }
+ })()
+ .map_err(|span| ExpandError::other(span, "expected string literal"))
}
fn include_expand(
diff --git a/crates/hir-expand/src/db.rs b/crates/hir-expand/src/db.rs
index 7e9928c41f..888c1405a6 100644
--- a/crates/hir-expand/src/db.rs
+++ b/crates/hir-expand/src/db.rs
@@ -145,7 +145,7 @@ pub trait ExpandDatabase: RootQueryDb {
fn syntax_context(&self, file: HirFileId, edition: Edition) -> SyntaxContext;
}
-#[salsa_macros::interned(no_lifetime, id = span::SyntaxContext)]
+#[salsa_macros::interned(no_lifetime, id = span::SyntaxContext, revisions = usize::MAX)]
pub struct SyntaxContextWrapper {
pub data: SyntaxContext,
}
diff --git a/crates/hir-expand/src/files.rs b/crates/hir-expand/src/files.rs
index a73a22370d..6730b337d3 100644
--- a/crates/hir-expand/src/files.rs
+++ b/crates/hir-expand/src/files.rs
@@ -315,11 +315,11 @@ impl<SN: Borrow<SyntaxNode>> InFile<SN> {
}
/// Falls back to the macro call range if the node cannot be mapped up fully.
- pub fn original_file_range_with_macro_call_body(
+ pub fn original_file_range_with_macro_call_input(
self,
db: &dyn db::ExpandDatabase,
) -> FileRange {
- self.borrow().map(SyntaxNode::text_range).original_node_file_range_with_macro_call_body(db)
+ self.borrow().map(SyntaxNode::text_range).original_node_file_range_with_macro_call_input(db)
}
pub fn original_syntax_node_rooted(
@@ -465,7 +465,7 @@ impl InFile<TextRange> {
}
}
- pub fn original_node_file_range_with_macro_call_body(
+ pub fn original_node_file_range_with_macro_call_input(
self,
db: &dyn db::ExpandDatabase,
) -> FileRange {
@@ -476,7 +476,7 @@ impl InFile<TextRange> {
Some(it) => it,
_ => {
let loc = db.lookup_intern_macro_call(mac_file);
- loc.kind.original_call_range_with_body(db)
+ loc.kind.original_call_range_with_input(db)
}
}
}
@@ -497,6 +497,18 @@ impl InFile<TextRange> {
}
}
}
+
+ pub fn original_node_file_range_rooted_opt(
+ self,
+ db: &dyn db::ExpandDatabase,
+ ) -> Option<FileRange> {
+ match self.file_id {
+ HirFileId::FileId(file_id) => Some(FileRange { file_id, range: self.value }),
+ HirFileId::MacroFile(mac_file) => {
+ map_node_range_up_rooted(db, &db.expansion_span_map(mac_file), self.value)
+ }
+ }
+ }
}
impl<N: AstNode> InFile<N> {
diff --git a/crates/hir-expand/src/lib.rs b/crates/hir-expand/src/lib.rs
index 6ecac1463f..ac61b22009 100644
--- a/crates/hir-expand/src/lib.rs
+++ b/crates/hir-expand/src/lib.rs
@@ -199,9 +199,9 @@ impl ExpandErrorKind {
},
&ExpandErrorKind::MissingProcMacroExpander(def_crate) => {
match db.proc_macros_for_crate(def_crate).as_ref().and_then(|it| it.get_error()) {
- Some((e, hard_err)) => RenderedExpandError {
- message: e.to_owned(),
- error: hard_err,
+ Some(e) => RenderedExpandError {
+ message: e.to_string(),
+ error: e.is_hard_error(),
kind: RenderedExpandError::GENERAL_KIND,
},
None => RenderedExpandError {
@@ -688,8 +688,11 @@ impl MacroCallKind {
/// Returns the original file range that best describes the location of this macro call.
///
- /// Unlike `MacroCallKind::original_call_range`, this also spans the item of attributes and derives.
- pub fn original_call_range_with_body(self, db: &dyn ExpandDatabase) -> FileRange {
+ /// This spans the entire macro call, including its input. That is for
+ /// - fn_like! {}, it spans the path and token tree
+ /// - #\[derive], it spans the `#[derive(...)]` attribute and the annotated item
+ /// - #\[attr], it spans the `#[attr(...)]` attribute and the annotated item
+ pub fn original_call_range_with_input(self, db: &dyn ExpandDatabase) -> FileRange {
let mut kind = self;
let file_id = loop {
match kind.file_id() {
@@ -712,8 +715,8 @@ impl MacroCallKind {
/// Returns the original file range that best describes the location of this macro call.
///
/// Here we try to roughly match what rustc does to improve diagnostics: fn-like macros
- /// get the whole `ast::MacroCall`, attribute macros get the attribute's range, and derives
- /// get only the specific derive that is being referred to.
+ /// get the macro path (rustc shows the whole `ast::MacroCall`), attribute macros get the
+ /// attribute's range, and derives get only the specific derive that is being referred to.
pub fn original_call_range(self, db: &dyn ExpandDatabase) -> FileRange {
let mut kind = self;
let file_id = loop {
@@ -726,7 +729,14 @@ impl MacroCallKind {
};
let range = match kind {
- MacroCallKind::FnLike { ast_id, .. } => ast_id.to_ptr(db).text_range(),
+ MacroCallKind::FnLike { ast_id, .. } => {
+ let node = ast_id.to_node(db);
+ node.path()
+ .unwrap()
+ .syntax()
+ .text_range()
+ .cover(node.excl_token().unwrap().text_range())
+ }
MacroCallKind::Derive { ast_id, derive_attr_index, .. } => {
// FIXME: should be the range of the macro name, not the whole derive
// FIXME: handle `cfg_attr`
@@ -1056,7 +1066,7 @@ impl ExpandTo {
intern::impl_internable!(ModPath, attrs::AttrInput);
-#[salsa_macros::interned(no_lifetime, debug)]
+#[salsa_macros::interned(no_lifetime, debug, revisions = usize::MAX)]
#[doc(alias = "MacroFileId")]
pub struct MacroCallId {
pub loc: MacroCallLoc,
diff --git a/crates/hir-expand/src/name.rs b/crates/hir-expand/src/name.rs
index 217d991d11..679f61112a 100644
--- a/crates/hir-expand/src/name.rs
+++ b/crates/hir-expand/src/name.rs
@@ -179,9 +179,10 @@ impl Name {
self.symbol.as_str()
}
+ #[inline]
pub fn display<'a>(
&'a self,
- db: &dyn crate::db::ExpandDatabase,
+ db: &dyn salsa::Database,
edition: Edition,
) -> impl fmt::Display + 'a {
_ = db;
diff --git a/crates/hir-expand/src/prettify_macro_expansion_.rs b/crates/hir-expand/src/prettify_macro_expansion_.rs
index 6134c3a36b..6431d46d39 100644
--- a/crates/hir-expand/src/prettify_macro_expansion_.rs
+++ b/crates/hir-expand/src/prettify_macro_expansion_.rs
@@ -46,7 +46,7 @@ pub fn prettify_macro_expansion(
} else if let Some(crate_name) = &macro_def_crate.extra_data(db).display_name {
make::tokens::ident(crate_name.crate_name().as_str())
} else {
- return dollar_crate.clone();
+ dollar_crate.clone()
}
});
if replacement.text() == "$crate" {
diff --git a/crates/hir-expand/src/proc_macro.rs b/crates/hir-expand/src/proc_macro.rs
index 1c8ebb6f53..f97d721dfa 100644
--- a/crates/hir-expand/src/proc_macro.rs
+++ b/crates/hir-expand/src/proc_macro.rs
@@ -4,7 +4,7 @@ use core::fmt;
use std::any::Any;
use std::{panic::RefUnwindSafe, sync};
-use base_db::{Crate, CrateBuilderId, CratesIdMap, Env};
+use base_db::{Crate, CrateBuilderId, CratesIdMap, Env, ProcMacroLoadingError};
use intern::Symbol;
use rustc_hash::FxHashMap;
use span::Span;
@@ -53,8 +53,8 @@ pub enum ProcMacroExpansionError {
System(String),
}
-pub type ProcMacroLoadResult = Result<Vec<ProcMacro>, (String, bool)>;
-type StoredProcMacroLoadResult = Result<Box<[ProcMacro]>, (Box<str>, bool)>;
+pub type ProcMacroLoadResult = Result<Vec<ProcMacro>, ProcMacroLoadingError>;
+type StoredProcMacroLoadResult = Result<Box<[ProcMacro]>, ProcMacroLoadingError>;
#[derive(Default, Debug)]
pub struct ProcMacrosBuilder(FxHashMap<CrateBuilderId, Arc<CrateProcMacros>>);
@@ -77,9 +77,7 @@ impl ProcMacrosBuilder {
proc_macros_crate,
match proc_macro {
Ok(it) => Arc::new(CrateProcMacros(Ok(it.into_boxed_slice()))),
- Err((e, hard_err)) => {
- Arc::new(CrateProcMacros(Err((e.into_boxed_str(), hard_err))))
- }
+ Err(e) => Arc::new(CrateProcMacros(Err(e))),
},
);
}
@@ -139,8 +137,8 @@ impl CrateProcMacros {
)
}
- pub fn get_error(&self) -> Option<(&str, bool)> {
- self.0.as_ref().err().map(|(e, hard_err)| (&**e, *hard_err))
+ pub fn get_error(&self) -> Option<&ProcMacroLoadingError> {
+ self.0.as_ref().err()
}
/// Fetch the [`CustomProcMacroExpander`]s and their corresponding names for the given crate.
diff --git a/crates/hir-ty/src/db.rs b/crates/hir-ty/src/db.rs
index 1029969992..5d3be07f3d 100644
--- a/crates/hir-ty/src/db.rs
+++ b/crates/hir-ty/src/db.rs
@@ -237,15 +237,6 @@ pub trait HirDatabase: DefDatabase + std::fmt::Debug {
// Interned IDs for Chalk integration
#[salsa::interned]
- fn intern_type_or_const_param_id(
- &self,
- param_id: TypeOrConstParamId,
- ) -> InternedTypeOrConstParamId;
-
- #[salsa::interned]
- fn intern_lifetime_param_id(&self, param_id: LifetimeParamId) -> InternedLifetimeParamId;
-
- #[salsa::interned]
fn intern_impl_trait_id(&self, id: ImplTraitId) -> InternedOpaqueTyId;
#[salsa::interned]
@@ -282,9 +273,8 @@ pub trait HirDatabase: DefDatabase + std::fmt::Debug {
#[salsa::invoke(crate::variance::variances_of)]
#[salsa::cycle(
- // cycle_fn = crate::variance::variances_of_cycle_fn,
- // cycle_initial = crate::variance::variances_of_cycle_initial,
- cycle_result = crate::variance::variances_of_cycle_initial,
+ cycle_fn = crate::variance::variances_of_cycle_fn,
+ cycle_initial = crate::variance::variances_of_cycle_initial,
)]
fn variances_of(&self, def: GenericDefId) -> Option<Arc<[crate::variance::Variance]>>;
@@ -329,9 +319,31 @@ fn hir_database_is_dyn_compatible() {
fn _assert_dyn_compatible(_: &dyn HirDatabase) {}
}
-impl_intern_key!(InternedTypeOrConstParamId, TypeOrConstParamId);
+#[salsa_macros::interned(no_lifetime, revisions = usize::MAX)]
+#[derive(PartialOrd, Ord)]
+pub struct InternedTypeOrConstParamId {
+ pub loc: TypeOrConstParamId,
+}
+impl ::std::fmt::Debug for InternedTypeOrConstParamId {
+ fn fmt(&self, f: &mut ::std::fmt::Formatter<'_>) -> ::std::fmt::Result {
+ f.debug_tuple(stringify!(InternedTypeOrConstParamId))
+ .field(&format_args!("{:04x}", self.0.index()))
+ .finish()
+ }
+}
-impl_intern_key!(InternedLifetimeParamId, LifetimeParamId);
+#[salsa_macros::interned(no_lifetime, revisions = usize::MAX)]
+#[derive(PartialOrd, Ord)]
+pub struct InternedLifetimeParamId {
+ pub loc: LifetimeParamId,
+}
+impl ::std::fmt::Debug for InternedLifetimeParamId {
+ fn fmt(&self, f: &mut ::std::fmt::Formatter<'_>) -> ::std::fmt::Result {
+ f.debug_tuple(stringify!(InternedLifetimeParamId))
+ .field(&format_args!("{:04x}", self.0.index()))
+ .finish()
+ }
+}
impl_intern_key!(InternedConstParamId, ConstParamId);
diff --git a/crates/hir-ty/src/diagnostics/match_check.rs b/crates/hir-ty/src/diagnostics/match_check.rs
index 0bce32a677..c3ab5aff3d 100644
--- a/crates/hir-ty/src/diagnostics/match_check.rs
+++ b/crates/hir-ty/src/diagnostics/match_check.rs
@@ -25,7 +25,6 @@ use crate::{
db::HirDatabase,
display::{HirDisplay, HirDisplayError, HirFormatter},
infer::BindingMode,
- lang_items::is_box,
};
use self::pat_util::EnumerateAndAdjustIterator;
@@ -77,7 +76,7 @@ pub(crate) enum PatKind {
subpatterns: Vec<FieldPat>,
},
- /// `box P`, `&P`, `&mut P`, etc.
+ /// `&P`, `&mut P`, etc.
Deref {
subpattern: Pat,
},
@@ -406,7 +405,6 @@ impl HirDisplay for Pat {
}
PatKind::Deref { subpattern } => {
match self.ty.kind(Interner) {
- TyKind::Adt(adt, _) if is_box(f.db, adt.0) => write!(f, "box ")?,
&TyKind::Ref(mutbl, ..) => {
write!(f, "&{}", if mutbl == Mutability::Mut { "mut " } else { "" })?
}
diff --git a/crates/hir-ty/src/diagnostics/match_check/pat_analysis.rs b/crates/hir-ty/src/diagnostics/match_check/pat_analysis.rs
index 7cf22c64d0..56fd12e1f2 100644
--- a/crates/hir-ty/src/diagnostics/match_check/pat_analysis.rs
+++ b/crates/hir-ty/src/diagnostics/match_check/pat_analysis.rs
@@ -21,7 +21,7 @@ use crate::{
inhabitedness::{is_enum_variant_uninhabited_from, is_ty_uninhabited_from},
};
-use super::{FieldPat, Pat, PatKind, is_box};
+use super::{FieldPat, Pat, PatKind};
use Constructor::*;
@@ -170,8 +170,6 @@ impl<'db> MatchCheckCtx<'db> {
}
PatKind::Deref { subpattern } => {
ctor = match pat.ty.kind(Interner) {
- // This is a box pattern.
- TyKind::Adt(adt, _) if is_box(self.db, adt.0) => Struct,
TyKind::Ref(..) => Ref,
_ => {
never!("pattern has unexpected type: pat: {:?}, ty: {:?}", pat, &pat.ty);
@@ -194,23 +192,6 @@ impl<'db> MatchCheckCtx<'db> {
ctor = Struct;
arity = substs.len(Interner);
}
- TyKind::Adt(adt, _) if is_box(self.db, adt.0) => {
- // The only legal patterns of type `Box` (outside `std`) are `_` and box
- // patterns. If we're here we can assume this is a box pattern.
- // FIXME(Nadrieril): A `Box` can in theory be matched either with `Box(_,
- // _)` or a box pattern. As a hack to avoid an ICE with the former, we
- // ignore other fields than the first one. This will trigger an error later
- // anyway.
- // See https://github.com/rust-lang/rust/issues/82772 ,
- // explanation: https://github.com/rust-lang/rust/pull/82789#issuecomment-796921977
- // The problem is that we can't know from the type whether we'll match
- // normally or through box-patterns. We'll have to figure out a proper
- // solution when we introduce generalized deref patterns. Also need to
- // prevent mixing of those two options.
- fields.retain(|ipat| ipat.idx == 0);
- ctor = Struct;
- arity = 1;
- }
&TyKind::Adt(AdtId(adt), _) => {
ctor = match pat.kind.as_ref() {
PatKind::Leaf { .. } if matches!(adt, hir_def::AdtId::UnionId(_)) => {
@@ -277,12 +258,6 @@ impl<'db> MatchCheckCtx<'db> {
})
.collect(),
},
- TyKind::Adt(adt, _) if is_box(self.db, adt.0) => {
- // Without `box_patterns`, the only legal pattern of type `Box` is `_` (outside
- // of `std`). So this branch is only reachable when the feature is enabled and
- // the pattern is a box pattern.
- PatKind::Deref { subpattern: subpatterns.next().unwrap() }
- }
TyKind::Adt(adt, substs) => {
let variant = Self::variant_id_for_adt(self.db, pat.ctor(), adt.0).unwrap();
let subpatterns = self
@@ -343,14 +318,8 @@ impl PatCx for MatchCheckCtx<'_> {
Struct | Variant(_) | UnionField => match *ty.kind(Interner) {
TyKind::Tuple(arity, ..) => arity,
TyKind::Adt(AdtId(adt), ..) => {
- if is_box(self.db, adt) {
- // The only legal patterns of type `Box` (outside `std`) are `_` and box
- // patterns. If we're here we can assume this is a box pattern.
- 1
- } else {
- let variant = Self::variant_id_for_adt(self.db, ctor, adt).unwrap();
- variant.fields(self.db).fields().len()
- }
+ let variant = Self::variant_id_for_adt(self.db, ctor, adt).unwrap();
+ variant.fields(self.db).fields().len()
}
_ => {
never!("Unexpected type for `Single` constructor: {:?}", ty);
@@ -383,29 +352,22 @@ impl PatCx for MatchCheckCtx<'_> {
tys.cloned().map(|ty| (ty, PrivateUninhabitedField(false))).collect()
}
TyKind::Ref(.., rty) => single(rty.clone()),
- &TyKind::Adt(AdtId(adt), ref substs) => {
- if is_box(self.db, adt) {
- // The only legal patterns of type `Box` (outside `std`) are `_` and box
- // patterns. If we're here we can assume this is a box pattern.
- let subst_ty = substs.at(Interner, 0).assert_ty_ref(Interner).clone();
- single(subst_ty)
- } else {
- let variant = Self::variant_id_for_adt(self.db, ctor, adt).unwrap();
-
- let visibilities = LazyCell::new(|| self.db.field_visibilities(variant));
-
- self.list_variant_fields(ty, variant)
- .map(move |(fid, ty)| {
- let is_visible = || {
- matches!(adt, hir_def::AdtId::EnumId(..))
- || visibilities[fid].is_visible_from(self.db, self.module)
- };
- let is_uninhabited = self.is_uninhabited(&ty);
- let private_uninhabited = is_uninhabited && !is_visible();
- (ty, PrivateUninhabitedField(private_uninhabited))
- })
- .collect()
- }
+ &TyKind::Adt(AdtId(adt), ..) => {
+ let variant = Self::variant_id_for_adt(self.db, ctor, adt).unwrap();
+
+ let visibilities = LazyCell::new(|| self.db.field_visibilities(variant));
+
+ self.list_variant_fields(ty, variant)
+ .map(move |(fid, ty)| {
+ let is_visible = || {
+ matches!(adt, hir_def::AdtId::EnumId(..))
+ || visibilities[fid].is_visible_from(self.db, self.module)
+ };
+ let is_uninhabited = self.is_uninhabited(&ty);
+ let private_uninhabited = is_uninhabited && !is_visible();
+ (ty, PrivateUninhabitedField(private_uninhabited))
+ })
+ .collect()
}
ty_kind => {
never!("Unexpected type for `{:?}` constructor: {:?}", ctor, ty_kind);
@@ -527,6 +489,14 @@ impl PatCx for MatchCheckCtx<'_> {
fn complexity_exceeded(&self) -> Result<(), Self::Error> {
Err(())
}
+
+ fn report_mixed_deref_pat_ctors(
+ &self,
+ _deref_pat: &DeconstructedPat<'_>,
+ _normal_pat: &DeconstructedPat<'_>,
+ ) {
+ // FIXME(deref_patterns): This could report an error comparable to the one in rustc.
+ }
}
impl fmt::Debug for MatchCheckCtx<'_> {
diff --git a/crates/hir-ty/src/display.rs b/crates/hir-ty/src/display.rs
index 507bab2920..810fe76f23 100644
--- a/crates/hir-ty/src/display.rs
+++ b/crates/hir-ty/src/display.rs
@@ -1432,10 +1432,10 @@ impl HirDisplay for Ty {
match f.closure_style {
ClosureStyle::Hide => return write!(f, "{TYPE_HINT_TRUNCATION}"),
ClosureStyle::ClosureWithId => {
- return write!(f, "{{closure#{:?}}}", id.0.as_u32());
+ return write!(f, "{{closure#{:?}}}", id.0.index());
}
ClosureStyle::ClosureWithSubst => {
- write!(f, "{{closure#{:?}}}", id.0.as_u32())?;
+ write!(f, "{{closure#{:?}}}", id.0.index())?;
return hir_fmt_generics(f, substs.as_slice(Interner), None, None);
}
_ => (),
diff --git a/crates/hir-ty/src/infer.rs b/crates/hir-ty/src/infer.rs
index ce53198e96..e880438e3a 100644
--- a/crates/hir-ty/src/infer.rs
+++ b/crates/hir-ty/src/infer.rs
@@ -460,19 +460,17 @@ pub struct InferenceResult {
/// Whenever a tuple field expression access a tuple field, we allocate a tuple id in
/// [`InferenceContext`] and store the tuples substitution there. This map is the reverse of
/// that which allows us to resolve a [`TupleFieldId`]s type.
- pub tuple_field_access_types: FxHashMap<TupleId, Substitution>,
+ tuple_field_access_types: FxHashMap<TupleId, Substitution>,
/// During inference this field is empty and [`InferenceContext::diagnostics`] is filled instead.
- pub diagnostics: Vec<InferenceDiagnostic>,
- pub type_of_expr: ArenaMap<ExprId, Ty>,
+ diagnostics: Vec<InferenceDiagnostic>,
+ pub(crate) type_of_expr: ArenaMap<ExprId, Ty>,
/// For each pattern record the type it resolves to.
///
/// **Note**: When a pattern type is resolved it may still contain
/// unresolved or missing subpatterns or subpatterns of mismatched types.
- pub type_of_pat: ArenaMap<PatId, Ty>,
- pub type_of_binding: ArenaMap<BindingId, Ty>,
- pub type_of_rpit: ArenaMap<ImplTraitIdx, Ty>,
- /// Type of the result of `.into_iter()` on the for. `ExprId` is the one of the whole for loop.
- pub type_of_for_iterator: FxHashMap<ExprId, Ty>,
+ pub(crate) type_of_pat: ArenaMap<PatId, Ty>,
+ pub(crate) type_of_binding: ArenaMap<BindingId, Ty>,
+ pub(crate) type_of_rpit: ArenaMap<ImplTraitIdx, Ty>,
type_mismatches: FxHashMap<ExprOrPatId, TypeMismatch>,
/// Whether there are any type-mismatching errors in the result.
// FIXME: This isn't as useful as initially thought due to us falling back placeholders to
@@ -483,7 +481,7 @@ pub struct InferenceResult {
// FIXME: Move this into `InferenceContext`
standard_types: InternedStandardTypes,
/// Stores the types which were implicitly dereferenced in pattern binding modes.
- pub pat_adjustments: FxHashMap<PatId, Vec<Ty>>,
+ pub(crate) pat_adjustments: FxHashMap<PatId, Vec<Ty>>,
/// Stores the binding mode (`ref` in `let ref x = 2`) of bindings.
///
/// This one is tied to the `PatId` instead of `BindingId`, because in some rare cases, a binding in an
@@ -497,12 +495,12 @@ pub struct InferenceResult {
/// }
/// ```
/// the first `rest` has implicit `ref` binding mode, but the second `rest` binding mode is `move`.
- pub binding_modes: ArenaMap<PatId, BindingMode>,
- pub expr_adjustments: FxHashMap<ExprId, Box<[Adjustment]>>,
+ pub(crate) binding_modes: ArenaMap<PatId, BindingMode>,
+ pub(crate) expr_adjustments: FxHashMap<ExprId, Box<[Adjustment]>>,
pub(crate) closure_info: FxHashMap<ClosureId, (Vec<CapturedItem>, FnTrait)>,
// FIXME: remove this field
pub mutated_bindings_in_closure: FxHashSet<BindingId>,
- pub coercion_casts: FxHashSet<ExprId>,
+ pub(crate) coercion_casts: FxHashSet<ExprId>,
}
impl InferenceResult {
@@ -566,6 +564,26 @@ impl InferenceResult {
pub fn is_erroneous(&self) -> bool {
self.has_errors && self.type_of_expr.iter().count() == 0
}
+
+ pub fn diagnostics(&self) -> &[InferenceDiagnostic] {
+ &self.diagnostics
+ }
+
+ pub fn tuple_field_access_type(&self, id: TupleId) -> &Substitution {
+ &self.tuple_field_access_types[&id]
+ }
+
+ pub fn pat_adjustment(&self, id: PatId) -> Option<&[Ty]> {
+ self.pat_adjustments.get(&id).map(|it| &**it)
+ }
+
+ pub fn expr_adjustment(&self, id: ExprId) -> Option<&[Adjustment]> {
+ self.expr_adjustments.get(&id).map(|it| &**it)
+ }
+
+ pub fn binding_mode(&self, id: PatId) -> Option<BindingMode> {
+ self.binding_modes.get(id).copied()
+ }
}
impl Index<ExprId> for InferenceResult {
@@ -772,7 +790,6 @@ impl<'db> InferenceContext<'db> {
type_of_pat,
type_of_binding,
type_of_rpit,
- type_of_for_iterator,
type_mismatches,
has_errors,
standard_types: _,
@@ -832,11 +849,6 @@ impl<'db> InferenceContext<'db> {
*has_errors = *has_errors || ty.contains_unknown();
}
type_of_rpit.shrink_to_fit();
- for ty in type_of_for_iterator.values_mut() {
- *ty = table.resolve_completely(ty.clone());
- *has_errors = *has_errors || ty.contains_unknown();
- }
- type_of_for_iterator.shrink_to_fit();
*has_errors |= !type_mismatches.is_empty();
diff --git a/crates/hir-ty/src/infer/closure.rs b/crates/hir-ty/src/infer/closure.rs
index 65a273cdf8..c3029bf2b5 100644
--- a/crates/hir-ty/src/infer/closure.rs
+++ b/crates/hir-ty/src/infer/closure.rs
@@ -1229,10 +1229,11 @@ impl InferenceContext<'_> {
self.select_from_expr(*expr);
}
}
- Expr::Let { pat: _, expr } => {
+ Expr::Let { pat, expr } => {
self.walk_expr(*expr);
- let place = self.place_of_expr(*expr);
- self.ref_expr(*expr, place);
+ if let Some(place) = self.place_of_expr(*expr) {
+ self.consume_with_pat(place, *pat);
+ }
}
Expr::UnaryOp { expr, op: _ }
| Expr::Array(Array::Repeat { initializer: expr, repeat: _ })
diff --git a/crates/hir-ty/src/infer/expr.rs b/crates/hir-ty/src/infer/expr.rs
index d40d52c134..d43c99fc28 100644
--- a/crates/hir-ty/src/infer/expr.rs
+++ b/crates/hir-ty/src/infer/expr.rs
@@ -731,9 +731,32 @@ impl InferenceContext<'_> {
&Pat::Expr(expr) => {
Some(self.infer_expr(expr, &Expectation::none(), ExprIsRead::No))
}
- Pat::Path(path) => Some(self.infer_expr_path(path, target.into(), tgt_expr)),
+ Pat::Path(path) => {
+ let resolver_guard =
+ self.resolver.update_to_inner_scope(self.db, self.owner, tgt_expr);
+ let resolution = self.resolver.resolve_path_in_value_ns_fully(
+ self.db,
+ path,
+ self.body.pat_path_hygiene(target),
+ );
+ self.resolver.reset_to_guard(resolver_guard);
+
+ if matches!(
+ resolution,
+ Some(
+ ValueNs::ConstId(_)
+ | ValueNs::StructId(_)
+ | ValueNs::EnumVariantId(_)
+ )
+ ) {
+ None
+ } else {
+ Some(self.infer_expr_path(path, target.into(), tgt_expr))
+ }
+ }
_ => None,
};
+ let is_destructuring_assignment = lhs_ty.is_none();
if let Some(lhs_ty) = lhs_ty {
self.write_pat_ty(target, lhs_ty.clone());
@@ -747,7 +770,15 @@ impl InferenceContext<'_> {
self.inside_assignment = false;
self.resolver.reset_to_guard(resolver_guard);
}
- self.result.standard_types.unit.clone()
+ if is_destructuring_assignment && self.diverges.is_always() {
+ // Ordinary assignments always return `()`, even when they diverge.
+ // However, rustc lowers destructuring assignments into blocks, and blocks return `!` if they have no tail
+ // expression and they diverge. Therefore, we have to do the same here, even though we don't lower destructuring
+ // assignments into blocks.
+ self.table.new_maybe_never_var()
+ } else {
+ self.result.standard_types.unit.clone()
+ }
}
Expr::Range { lhs, rhs, range_type } => {
let lhs_ty =
diff --git a/crates/hir-ty/src/layout.rs b/crates/hir-ty/src/layout.rs
index 3fa2bfbd1b..107da6a5af 100644
--- a/crates/hir-ty/src/layout.rs
+++ b/crates/hir-ty/src/layout.rs
@@ -261,7 +261,7 @@ pub fn layout_of_ty_query(
}
// Potentially-wide pointers.
TyKind::Ref(_, _, pointee) | TyKind::Raw(_, pointee) => {
- let mut data_ptr = scalar_unit(dl, Primitive::Pointer(AddressSpace::DATA));
+ let mut data_ptr = scalar_unit(dl, Primitive::Pointer(AddressSpace::ZERO));
if matches!(ty.kind(Interner), TyKind::Ref(..)) {
data_ptr.valid_range_mut().start = 1;
}
@@ -285,7 +285,7 @@ pub fn layout_of_ty_query(
scalar_unit(dl, Primitive::Int(dl.ptr_sized_integer(), false))
}
TyKind::Dyn(..) => {
- let mut vtable = scalar_unit(dl, Primitive::Pointer(AddressSpace::DATA));
+ let mut vtable = scalar_unit(dl, Primitive::Pointer(AddressSpace::ZERO));
vtable.valid_range_mut().start = 1;
vtable
}
diff --git a/crates/hir-ty/src/layout/target.rs b/crates/hir-ty/src/layout/target.rs
index e1e1c44996..88c33eccca 100644
--- a/crates/hir-ty/src/layout/target.rs
+++ b/crates/hir-ty/src/layout/target.rs
@@ -2,7 +2,7 @@
use base_db::Crate;
use hir_def::layout::TargetDataLayout;
-use rustc_abi::{AlignFromBytesError, TargetDataLayoutErrors};
+use rustc_abi::{AlignFromBytesError, TargetDataLayoutErrors, AddressSpace};
use triomphe::Arc;
use crate::db::HirDatabase;
@@ -12,7 +12,7 @@ pub fn target_data_layout_query(
krate: Crate,
) -> Result<Arc<TargetDataLayout>, Arc<str>> {
match &krate.workspace_data(db).data_layout {
- Ok(it) => match TargetDataLayout::parse_from_llvm_datalayout_string(it) {
+ Ok(it) => match TargetDataLayout::parse_from_llvm_datalayout_string(it, AddressSpace::ZERO) {
Ok(it) => Ok(Arc::new(it)),
Err(e) => {
Err(match e {
@@ -39,6 +39,7 @@ pub fn target_data_layout_query(
target,
} => format!(r#"inconsistent target specification: "data-layout" claims pointers are {pointer_size}-bit, while "target-pointer-width" is `{target}`"#),
TargetDataLayoutErrors::InvalidBitsSize { err } => err,
+ TargetDataLayoutErrors::UnknownPointerSpecification { err } => format!(r#"use of unknown pointer specifer in "data-layout": {err}"#),
}.into())
}
},
diff --git a/crates/hir-ty/src/mapping.rs b/crates/hir-ty/src/mapping.rs
index 6936d8193e..9d3d2044c4 100644
--- a/crates/hir-ty/src/mapping.rs
+++ b/crates/hir-ty/src/mapping.rs
@@ -13,7 +13,8 @@ use salsa::{
use crate::{
AssocTypeId, CallableDefId, ChalkTraitId, FnDefId, ForeignDefId, Interner, OpaqueTyId,
- PlaceholderIndex, chalk_db, db::HirDatabase,
+ PlaceholderIndex, chalk_db,
+ db::{HirDatabase, InternedLifetimeParamId, InternedTypeOrConstParamId},
};
pub trait ToChalk {
@@ -125,30 +126,32 @@ pub fn from_assoc_type_id(id: AssocTypeId) -> TypeAliasId {
pub fn from_placeholder_idx(db: &dyn HirDatabase, idx: PlaceholderIndex) -> TypeOrConstParamId {
assert_eq!(idx.ui, chalk_ir::UniverseIndex::ROOT);
// SAFETY: We cannot really encapsulate this unfortunately, so just hope this is sound.
- let interned_id = FromId::from_id(unsafe { Id::from_u32(idx.idx.try_into().unwrap()) });
- db.lookup_intern_type_or_const_param_id(interned_id)
+ let interned_id =
+ InternedTypeOrConstParamId::from_id(unsafe { Id::from_index(idx.idx.try_into().unwrap()) });
+ interned_id.loc(db)
}
pub fn to_placeholder_idx(db: &dyn HirDatabase, id: TypeOrConstParamId) -> PlaceholderIndex {
- let interned_id = db.intern_type_or_const_param_id(id);
+ let interned_id = InternedTypeOrConstParamId::new(db, id);
PlaceholderIndex {
ui: chalk_ir::UniverseIndex::ROOT,
- idx: interned_id.as_id().as_u32() as usize,
+ idx: interned_id.as_id().index() as usize,
}
}
pub fn lt_from_placeholder_idx(db: &dyn HirDatabase, idx: PlaceholderIndex) -> LifetimeParamId {
assert_eq!(idx.ui, chalk_ir::UniverseIndex::ROOT);
// SAFETY: We cannot really encapsulate this unfortunately, so just hope this is sound.
- let interned_id = FromId::from_id(unsafe { Id::from_u32(idx.idx.try_into().unwrap()) });
- db.lookup_intern_lifetime_param_id(interned_id)
+ let interned_id =
+ InternedLifetimeParamId::from_id(unsafe { Id::from_index(idx.idx.try_into().unwrap()) });
+ interned_id.loc(db)
}
pub fn lt_to_placeholder_idx(db: &dyn HirDatabase, id: LifetimeParamId) -> PlaceholderIndex {
- let interned_id = db.intern_lifetime_param_id(id);
+ let interned_id = InternedLifetimeParamId::new(db, id);
PlaceholderIndex {
ui: chalk_ir::UniverseIndex::ROOT,
- idx: interned_id.as_id().as_u32() as usize,
+ idx: interned_id.as_id().index() as usize,
}
}
diff --git a/crates/hir-ty/src/mir/eval.rs b/crates/hir-ty/src/mir/eval.rs
index 1ec55a8209..55fada1436 100644
--- a/crates/hir-ty/src/mir/eval.rs
+++ b/crates/hir-ty/src/mir/eval.rs
@@ -630,7 +630,7 @@ impl Evaluator<'_> {
Ok(target_data_layout) => target_data_layout,
Err(e) => return Err(MirEvalError::TargetDataLayoutNotAvailable(e)),
};
- let cached_ptr_size = target_data_layout.pointer_size.bytes_usize();
+ let cached_ptr_size = target_data_layout.pointer_size().bytes_usize();
Ok(Evaluator {
target_data_layout,
stack: vec![0],
diff --git a/crates/hir-ty/src/test_db.rs b/crates/hir-ty/src/test_db.rs
index d049c678e2..b5de0e52f5 100644
--- a/crates/hir-ty/src/test_db.rs
+++ b/crates/hir-ty/src/test_db.rs
@@ -166,10 +166,10 @@ impl TestDB {
self.events.lock().unwrap().take().unwrap()
}
- pub(crate) fn log_executed(&self, f: impl FnOnce()) -> Vec<String> {
+ pub(crate) fn log_executed(&self, f: impl FnOnce()) -> (Vec<String>, Vec<salsa::Event>) {
let events = self.log(f);
- events
- .into_iter()
+ let executed = events
+ .iter()
.filter_map(|e| match e.kind {
// This is pretty horrible, but `Debug` is the only way to inspect
// QueryDescriptor at the moment.
@@ -181,6 +181,7 @@ impl TestDB {
}
_ => None,
})
- .collect()
+ .collect();
+ (executed, events)
}
}
diff --git a/crates/hir-ty/src/tests/closure_captures.rs b/crates/hir-ty/src/tests/closure_captures.rs
index 7fb981752d..dbc68eeba1 100644
--- a/crates/hir-ty/src/tests/closure_captures.rs
+++ b/crates/hir-ty/src/tests/closure_captures.rs
@@ -446,7 +446,7 @@ fn main() {
}
#[test]
-fn let_binding_is_a_ref_capture() {
+fn let_binding_is_a_ref_capture_in_ref_binding() {
check_closure_captures(
r#"
//- minicore:copy
@@ -454,12 +454,36 @@ struct S;
fn main() {
let mut s = S;
let s_ref = &mut s;
+ let mut s2 = S;
+ let s_ref2 = &mut s2;
let closure = || {
if let ref cb = s_ref {
+ } else if let ref mut cb = s_ref2 {
}
};
}
"#,
- expect!["83..135;49..54;112..117 ByRef(Shared) s_ref &'? &'? mut S"],
+ expect![[r#"
+ 129..225;49..54;149..155 ByRef(Shared) s_ref &'? &'? mut S
+ 129..225;93..99;188..198 ByRef(Mut { kind: Default }) s_ref2 &'? mut &'? mut S"#]],
+ );
+}
+
+#[test]
+fn let_binding_is_a_value_capture_in_binding() {
+ check_closure_captures(
+ r#"
+//- minicore:copy, option
+struct Box(i32);
+fn main() {
+ let b = Some(Box(0));
+ let closure = || {
+ if let Some(b) = b {
+ let _move = b;
+ }
+ };
+}
+"#,
+ expect!["73..149;37..38;103..104 ByValue b Option<Box>"],
);
}
diff --git a/crates/hir-ty/src/tests/incremental.rs b/crates/hir-ty/src/tests/incremental.rs
index 0377ce95f1..3159499e86 100644
--- a/crates/hir-ty/src/tests/incremental.rs
+++ b/crates/hir-ty/src/tests/incremental.rs
@@ -1,6 +1,7 @@
use base_db::SourceDatabase;
use expect_test::Expect;
use hir_def::{DefWithBodyId, ModuleDefId};
+use salsa::EventKind;
use test_fixture::WithFixture;
use crate::{db::HirDatabase, test_db::TestDB};
@@ -567,11 +568,11 @@ fn main() {
"ast_id_map_shim",
"parse_shim",
"real_span_map_shim",
- "query_with_diagnostics_",
+ "TraitItems::query_with_diagnostics_",
"body_shim",
"body_with_source_map_shim",
"attrs_shim",
- "of_",
+ "ImplItems::of_",
"infer_shim",
"trait_signature_shim",
"trait_signature_with_source_map_shim",
@@ -596,8 +597,8 @@ fn main() {
"struct_signature_with_source_map_shim",
"generic_predicates_shim",
"value_ty_shim",
- "firewall_",
- "query_",
+ "VariantFields::firewall_",
+ "VariantFields::query_",
"lang_item",
"inherent_impls_in_crate_shim",
"impl_signature_shim",
@@ -674,11 +675,11 @@ fn main() {
"file_item_tree_query",
"real_span_map_shim",
"crate_local_def_map",
- "query_with_diagnostics_",
+ "TraitItems::query_with_diagnostics_",
"body_with_source_map_shim",
"attrs_shim",
"body_shim",
- "of_",
+ "ImplItems::of_",
"infer_shim",
"attrs_shim",
"trait_signature_with_source_map_shim",
@@ -697,7 +698,7 @@ fn main() {
"function_signature_with_source_map_shim",
"expr_scopes_shim",
"struct_signature_with_source_map_shim",
- "query_",
+ "VariantFields::query_",
"inherent_impls_in_crate_shim",
"impl_signature_with_source_map_shim",
"impl_signature_shim",
@@ -718,10 +719,23 @@ fn execute_assert_events(
required: &[(&str, usize)],
expect: Expect,
) {
- let events = db.log_executed(f);
- for (event, count) in required {
- let n = events.iter().filter(|it| it.contains(event)).count();
- assert_eq!(n, *count, "Expected {event} to be executed {count} times, but only got {n}");
- }
- expect.assert_debug_eq(&events);
+ let (executed, events) = db.log_executed(f);
+ salsa::attach(db, || {
+ for (event, count) in required {
+ let n = executed.iter().filter(|it| it.contains(event)).count();
+ assert_eq!(
+ n,
+ *count,
+ "Expected {event} to be executed {count} times, but only got {n}:\n \
+ Executed: {executed:#?}\n \
+ Event log: {events:#?}",
+ events = events
+ .iter()
+ .filter(|event| !matches!(event.kind, EventKind::WillCheckCancellation))
+ .map(|event| { format!("{:?}", event.kind) })
+ .collect::<Vec<_>>(),
+ );
+ }
+ expect.assert_debug_eq(&executed);
+ });
}
diff --git a/crates/hir-ty/src/tests/never_type.rs b/crates/hir-ty/src/tests/never_type.rs
index 1ca4c9b2ad..6a9135622d 100644
--- a/crates/hir-ty/src/tests/never_type.rs
+++ b/crates/hir-ty/src/tests/never_type.rs
@@ -785,3 +785,31 @@ fn make_up_a_pointer<T>() -> *const T {
"#]],
)
}
+
+#[test]
+fn diverging_destructuring_assignment() {
+ check_infer_with_mismatches(
+ r#"
+fn foo() {
+ let n = match 42 {
+ 0 => _ = loop {},
+ _ => 0,
+ };
+}
+ "#,
+ expect![[r#"
+ 9..84 '{ ... }; }': ()
+ 19..20 'n': i32
+ 23..81 'match ... }': i32
+ 29..31 '42': i32
+ 42..43 '0': i32
+ 42..43 '0': i32
+ 47..48 '_': !
+ 47..58 '_ = loop {}': i32
+ 51..58 'loop {}': !
+ 56..58 '{}': ()
+ 68..69 '_': i32
+ 73..74 '0': i32
+ "#]],
+ );
+}
diff --git a/crates/hir-ty/src/tests/simple.rs b/crates/hir-ty/src/tests/simple.rs
index 43e8f3747a..b154e59878 100644
--- a/crates/hir-ty/src/tests/simple.rs
+++ b/crates/hir-ty/src/tests/simple.rs
@@ -3751,7 +3751,7 @@ fn foo() {
}
let v: bool = true;
m!();
- // ^^^^ i32
+ // ^^ i32
}
"#,
);
@@ -3765,39 +3765,39 @@ fn foo() {
let v: bool;
macro_rules! m { () => { v } }
m!();
- // ^^^^ bool
+ // ^^ bool
let v: char;
macro_rules! m { () => { v } }
m!();
- // ^^^^ char
+ // ^^ char
{
let v: u8;
macro_rules! m { () => { v } }
m!();
- // ^^^^ u8
+ // ^^ u8
let v: i8;
macro_rules! m { () => { v } }
m!();
- // ^^^^ i8
+ // ^^ i8
let v: i16;
macro_rules! m { () => { v } }
m!();
- // ^^^^ i16
+ // ^^ i16
{
let v: u32;
macro_rules! m { () => { v } }
m!();
- // ^^^^ u32
+ // ^^ u32
let v: u64;
macro_rules! m { () => { v } }
m!();
- // ^^^^ u64
+ // ^^ u64
}
}
}
diff --git a/crates/hir-ty/src/variance.rs b/crates/hir-ty/src/variance.rs
index 08a215fecf..87d9df611b 100644
--- a/crates/hir-ty/src/variance.rs
+++ b/crates/hir-ty/src/variance.rs
@@ -54,14 +54,14 @@ pub(crate) fn variances_of(db: &dyn HirDatabase, def: GenericDefId) -> Option<Ar
variances.is_empty().not().then(|| Arc::from_iter(variances))
}
-// pub(crate) fn variances_of_cycle_fn(
-// _db: &dyn HirDatabase,
-// _result: &Option<Arc<[Variance]>>,
-// _count: u32,
-// _def: GenericDefId,
-// ) -> salsa::CycleRecoveryAction<Option<Arc<[Variance]>>> {
-// salsa::CycleRecoveryAction::Iterate
-// }
+pub(crate) fn variances_of_cycle_fn(
+ _db: &dyn HirDatabase,
+ _result: &Option<Arc<[Variance]>>,
+ _count: u32,
+ _def: GenericDefId,
+) -> salsa::CycleRecoveryAction<Option<Arc<[Variance]>>> {
+ salsa::CycleRecoveryAction::Iterate
+}
pub(crate) fn variances_of_cycle_initial(
db: &dyn HirDatabase,
@@ -965,7 +965,7 @@ struct S3<T>(S<T, T>);
struct FixedPoint<T, U, V>(&'static FixedPoint<(), T, U>, V);
"#,
expect![[r#"
- FixedPoint[T: bivariant, U: bivariant, V: bivariant]
+ FixedPoint[T: covariant, U: covariant, V: covariant]
"#]],
);
}
diff --git a/crates/hir/src/diagnostics.rs b/crates/hir/src/diagnostics.rs
index aba2e032b3..c1e814ec22 100644
--- a/crates/hir/src/diagnostics.rs
+++ b/crates/hir/src/diagnostics.rs
@@ -36,16 +36,16 @@ pub use hir_ty::{
};
macro_rules! diagnostics {
- ($($diag:ident $(<$lt:lifetime>)?,)*) => {
+ ($AnyDiagnostic:ident <$db:lifetime> -> $($diag:ident $(<$lt:lifetime>)?,)*) => {
#[derive(Debug)]
- pub enum AnyDiagnostic<'db> {$(
+ pub enum $AnyDiagnostic<$db> {$(
$diag(Box<$diag $(<$lt>)?>),
)*}
$(
- impl<'db> From<$diag $(<$lt>)?> for AnyDiagnostic<'db> {
- fn from(d: $diag $(<$lt>)?) -> AnyDiagnostic<'db> {
- AnyDiagnostic::$diag(Box::new(d))
+ impl<$db> From<$diag $(<$lt>)?> for $AnyDiagnostic<$db> {
+ fn from(d: $diag $(<$lt>)?) -> $AnyDiagnostic<$db> {
+ $AnyDiagnostic::$diag(Box::new(d))
}
}
)*
@@ -66,7 +66,7 @@ macro_rules! diagnostics {
// }, ...
// ]
-diagnostics![
+diagnostics![AnyDiagnostic<'db> ->
AwaitOutsideOfAsync,
BreakOutsideOfLoop,
CastToUnsized<'db>,
diff --git a/crates/hir/src/lib.rs b/crates/hir/src/lib.rs
index e8a1816971..5c6f622e6c 100644
--- a/crates/hir/src/lib.rs
+++ b/crates/hir/src/lib.rs
@@ -1260,7 +1260,9 @@ impl TupleField {
}
pub fn ty<'db>(&self, db: &'db dyn HirDatabase) -> Type<'db> {
- let ty = db.infer(self.owner).tuple_field_access_types[&self.tuple]
+ let ty = db
+ .infer(self.owner)
+ .tuple_field_access_type(self.tuple)
.as_slice(Interner)
.get(self.index as usize)
.and_then(|arg| arg.ty(Interner))
@@ -1927,7 +1929,7 @@ impl DefWithBody {
expr_store_diagnostics(db, acc, &source_map);
let infer = db.infer(self.into());
- for d in &infer.diagnostics {
+ for d in infer.diagnostics() {
acc.extend(AnyDiagnostic::inference_diagnostic(
db,
self.into(),
diff --git a/crates/hir/src/source_analyzer.rs b/crates/hir/src/source_analyzer.rs
index f18ca7cb20..0662bfddcf 100644
--- a/crates/hir/src/source_analyzer.rs
+++ b/crates/hir/src/source_analyzer.rs
@@ -254,7 +254,7 @@ impl<'db> SourceAnalyzer<'db> {
// expressions nor patterns).
let expr_id = self.expr_id(expr.clone())?.as_expr()?;
let infer = self.infer()?;
- infer.expr_adjustments.get(&expr_id).map(|v| &**v)
+ infer.expr_adjustment(expr_id)
}
pub(crate) fn type_of_type(
@@ -286,7 +286,7 @@ impl<'db> SourceAnalyzer<'db> {
let infer = self.infer()?;
let coerced = expr_id
.as_expr()
- .and_then(|expr_id| infer.expr_adjustments.get(&expr_id))
+ .and_then(|expr_id| infer.expr_adjustment(expr_id))
.and_then(|adjusts| adjusts.last().map(|adjust| adjust.target.clone()));
let ty = infer[expr_id].clone();
let mk_ty = |ty| Type::new_with_resolver(db, &self.resolver, ty);
@@ -302,12 +302,11 @@ impl<'db> SourceAnalyzer<'db> {
let infer = self.infer()?;
let coerced = match expr_or_pat_id {
ExprOrPatId::ExprId(idx) => infer
- .expr_adjustments
- .get(&idx)
+ .expr_adjustment(idx)
.and_then(|adjusts| adjusts.last().cloned())
.map(|adjust| adjust.target),
ExprOrPatId::PatId(idx) => {
- infer.pat_adjustments.get(&idx).and_then(|adjusts| adjusts.last().cloned())
+ infer.pat_adjustment(idx).and_then(|adjusts| adjusts.last().cloned())
}
};
@@ -345,7 +344,7 @@ impl<'db> SourceAnalyzer<'db> {
) -> Option<BindingMode> {
let id = self.pat_id(&pat.clone().into())?;
let infer = self.infer()?;
- infer.binding_modes.get(id.as_pat()?).map(|bm| match bm {
+ infer.binding_mode(id.as_pat()?).map(|bm| match bm {
hir_ty::BindingMode::Move => BindingMode::Move,
hir_ty::BindingMode::Ref(hir_ty::Mutability::Mut) => BindingMode::Ref(Mutability::Mut),
hir_ty::BindingMode::Ref(hir_ty::Mutability::Not) => {
@@ -362,8 +361,7 @@ impl<'db> SourceAnalyzer<'db> {
let infer = self.infer()?;
Some(
infer
- .pat_adjustments
- .get(&pat_id.as_pat()?)?
+ .pat_adjustment(pat_id.as_pat()?)?
.iter()
.map(|ty| Type::new_with_resolver(db, &self.resolver, ty.clone()))
.collect(),
@@ -736,7 +734,7 @@ impl<'db> SourceAnalyzer<'db> {
let variant = self.infer()?.variant_resolution_for_pat(pat_id.as_pat()?)?;
let variant_data = variant.fields(db);
let field = FieldId { parent: variant, local_id: variant_data.field(&field_name)? };
- let (adt, subst) = self.infer()?.type_of_pat.get(pat_id.as_pat()?)?.as_adt()?;
+ let (adt, subst) = self.infer()?[pat_id.as_pat()?].as_adt()?;
let field_ty =
db.field_types(variant).get(field.local_id)?.clone().substitute(Interner, subst);
Some((
@@ -765,7 +763,8 @@ impl<'db> SourceAnalyzer<'db> {
},
};
- let res = resolve_hir_path(db, &self.resolver, path, HygieneId::ROOT, Some(store))?;
+ let body_owner = self.resolver.body_owner();
+ let res = resolve_hir_value_path(db, &self.resolver, body_owner, path, HygieneId::ROOT)?;
match res {
PathResolution::Def(def) => Some(def),
_ => None,
@@ -1249,7 +1248,7 @@ impl<'db> SourceAnalyzer<'db> {
let infer = self.infer()?;
let pat_id = self.pat_id(&pattern.clone().into())?.as_pat()?;
- let substs = infer.type_of_pat[pat_id].as_adt()?.1;
+ let substs = infer[pat_id].as_adt()?.1;
let (variant, missing_fields, _exhaustive) =
record_pattern_missing_fields(db, infer, pat_id, &body[pat_id])?;
@@ -1785,8 +1784,8 @@ pub(crate) fn name_hygiene(db: &dyn HirDatabase, name: InFile<&SyntaxNode>) -> H
}
fn type_of_expr_including_adjust(infer: &InferenceResult, id: ExprId) -> Option<&Ty> {
- match infer.expr_adjustments.get(&id).and_then(|adjustments| adjustments.last()) {
+ match infer.expr_adjustment(id).and_then(|adjustments| adjustments.last()) {
Some(adjustment) => Some(&adjustment.target),
- None => infer.type_of_expr.get(id),
+ None => Some(&infer[id]),
}
}
diff --git a/crates/ide-assists/src/handlers/expand_rest_pattern.rs b/crates/ide-assists/src/handlers/expand_rest_pattern.rs
index b71de5e00c..c80b78fd97 100644
--- a/crates/ide-assists/src/handlers/expand_rest_pattern.rs
+++ b/crates/ide-assists/src/handlers/expand_rest_pattern.rs
@@ -175,7 +175,7 @@ pub(crate) fn expand_rest_pattern(acc: &mut Assists, ctx: &AssistContext<'_>) ->
// ast::TuplePat(it) => (),
// FIXME
// ast::SlicePat(it) => (),
- _ => return None,
+ _ => None,
}
}
}
diff --git a/crates/ide-assists/src/handlers/promote_local_to_const.rs b/crates/ide-assists/src/handlers/promote_local_to_const.rs
index 6316a8f0db..603be4d667 100644
--- a/crates/ide-assists/src/handlers/promote_local_to_const.rs
+++ b/crates/ide-assists/src/handlers/promote_local_to_const.rs
@@ -3,8 +3,7 @@ use ide_db::{assists::AssistId, defs::Definition};
use stdx::to_upper_snake_case;
use syntax::{
AstNode,
- ast::{self, HasName, make},
- ted,
+ ast::{self, HasName, syntax_factory::SyntaxFactory},
};
use crate::{
@@ -69,15 +68,18 @@ pub(crate) fn promote_local_to_const(acc: &mut Assists, ctx: &AssistContext<'_>)
"Promote local to constant",
let_stmt.syntax().text_range(),
|edit| {
+ let make = SyntaxFactory::with_mappings();
+ let mut editor = edit.make_editor(let_stmt.syntax());
let name = to_upper_snake_case(&name.to_string());
let usages = Definition::Local(local).usages(&ctx.sema).all();
if let Some(usages) = usages.references.get(&ctx.file_id()) {
- let name_ref = make::name_ref(&name);
+ let name_ref = make.name_ref(&name);
for usage in usages {
let Some(usage_name) = usage.name.as_name_ref().cloned() else { continue };
if let Some(record_field) = ast::RecordExprField::for_name_ref(&usage_name) {
- let name_expr = make::expr_path(make::path_from_text(&name));
+ let path = make.ident_path(&name);
+ let name_expr = make.expr_path(path);
utils::replace_record_field_expr(ctx, edit, record_field, name_expr);
} else {
let usage_range = usage.range;
@@ -86,15 +88,17 @@ pub(crate) fn promote_local_to_const(acc: &mut Assists, ctx: &AssistContext<'_>)
}
}
- let item = make::item_const(None, make::name(&name), make::ty(&ty), initializer)
- .clone_for_update();
- let let_stmt = edit.make_mut(let_stmt);
+ let item = make.item_const(None, make.name(&name), make.ty(&ty), initializer);
if let Some((cap, name)) = ctx.config.snippet_cap.zip(item.name()) {
- edit.add_tabstop_before(cap, name);
+ let tabstop = edit.make_tabstop_before(cap);
+ editor.add_annotation(name.syntax().clone(), tabstop);
}
- ted::replace(let_stmt.syntax(), item.syntax());
+ editor.replace(let_stmt.syntax(), item.syntax());
+
+ editor.add_mappings(make.finish_with_mappings());
+ edit.add_file_edits(ctx.vfs_file_id(), editor);
},
)
}
diff --git a/crates/ide-assists/src/handlers/replace_is_method_with_if_let_method.rs b/crates/ide-assists/src/handlers/replace_is_method_with_if_let_method.rs
index e933bcc40d..62914ee7f3 100644
--- a/crates/ide-assists/src/handlers/replace_is_method_with_if_let_method.rs
+++ b/crates/ide-assists/src/handlers/replace_is_method_with_if_let_method.rs
@@ -1,8 +1,5 @@
use ide_db::syntax_helpers::suggest_name;
-use syntax::{
- ast::{self, AstNode, make},
- ted,
-};
+use syntax::ast::{self, AstNode, syntax_factory::SyntaxFactory};
use crate::{AssistContext, AssistId, Assists};
@@ -60,21 +57,25 @@ pub(crate) fn replace_is_method_with_if_let_method(
message,
call_expr.syntax().text_range(),
|edit| {
- let call_expr = edit.make_mut(call_expr);
+ let make = SyntaxFactory::with_mappings();
+ let mut editor = edit.make_editor(call_expr.syntax());
- let var_pat = make::ident_pat(false, false, make::name(&var_name));
- let pat = make::tuple_struct_pat(make::ext::ident_path(text), [var_pat.into()]);
- let let_expr = make::expr_let(pat.into(), receiver).clone_for_update();
+ let var_pat = make.ident_pat(false, false, make.name(&var_name));
+ let pat = make.tuple_struct_pat(make.ident_path(text), [var_pat.into()]);
+ let let_expr = make.expr_let(pat.into(), receiver);
if let Some(cap) = ctx.config.snippet_cap {
if let Some(ast::Pat::TupleStructPat(pat)) = let_expr.pat() {
if let Some(first_var) = pat.fields().next() {
- edit.add_placeholder_snippet(cap, first_var);
+ let placeholder = edit.make_placeholder_snippet(cap);
+ editor.add_annotation(first_var.syntax(), placeholder);
}
}
}
- ted::replace(call_expr.syntax(), let_expr.syntax());
+ editor.replace(call_expr.syntax(), let_expr.syntax());
+ editor.add_mappings(make.finish_with_mappings());
+ edit.add_file_edits(ctx.vfs_file_id(), editor);
},
)
}
diff --git a/crates/ide-assists/src/handlers/toggle_macro_delimiter.rs b/crates/ide-assists/src/handlers/toggle_macro_delimiter.rs
index 109269bd6e..504e12f93d 100644
--- a/crates/ide-assists/src/handlers/toggle_macro_delimiter.rs
+++ b/crates/ide-assists/src/handlers/toggle_macro_delimiter.rs
@@ -1,8 +1,7 @@
use ide_db::assists::AssistId;
use syntax::{
AstNode, T,
- ast::{self, make},
- ted,
+ ast::{self, syntax_factory::SyntaxFactory},
};
use crate::{AssistContext, Assists};
@@ -37,8 +36,7 @@ pub(crate) fn toggle_macro_delimiter(acc: &mut Assists, ctx: &AssistContext<'_>)
RCur,
}
- let makro = ctx.find_node_at_offset::<ast::MacroCall>()?.clone_for_update();
- let makro_text_range = makro.syntax().text_range();
+ let makro = ctx.find_node_at_offset::<ast::MacroCall>()?;
let cursor_offset = ctx.offset();
let semicolon = makro.semicolon_token();
@@ -71,24 +69,28 @@ pub(crate) fn toggle_macro_delimiter(acc: &mut Assists, ctx: &AssistContext<'_>)
},
token_tree.syntax().text_range(),
|builder| {
+ let make = SyntaxFactory::with_mappings();
+ let mut editor = builder.make_editor(makro.syntax());
+
match token {
MacroDelims::LPar | MacroDelims::RPar => {
- ted::replace(ltoken, make::token(T!['{']));
- ted::replace(rtoken, make::token(T!['}']));
+ editor.replace(ltoken, make.token(T!['{']));
+ editor.replace(rtoken, make.token(T!['}']));
if let Some(sc) = semicolon {
- ted::remove(sc);
+ editor.delete(sc);
}
}
MacroDelims::LBra | MacroDelims::RBra => {
- ted::replace(ltoken, make::token(T!['(']));
- ted::replace(rtoken, make::token(T![')']));
+ editor.replace(ltoken, make.token(T!['(']));
+ editor.replace(rtoken, make.token(T![')']));
}
MacroDelims::LCur | MacroDelims::RCur => {
- ted::replace(ltoken, make::token(T!['[']));
- ted::replace(rtoken, make::token(T![']']));
+ editor.replace(ltoken, make.token(T!['[']));
+ editor.replace(rtoken, make.token(T![']']));
}
}
- builder.replace(makro_text_range, makro.syntax().text());
+ editor.add_mappings(make.finish_with_mappings());
+ builder.add_file_edits(ctx.vfs_file_id(), editor);
},
)
}
diff --git a/crates/ide-assists/src/handlers/unmerge_match_arm.rs b/crates/ide-assists/src/handlers/unmerge_match_arm.rs
index 5aedff5cc7..7b0f2dc65a 100644
--- a/crates/ide-assists/src/handlers/unmerge_match_arm.rs
+++ b/crates/ide-assists/src/handlers/unmerge_match_arm.rs
@@ -1,8 +1,7 @@
use syntax::{
Direction, SyntaxKind, T,
- algo::neighbor,
- ast::{self, AstNode, edit::IndentLevel, make},
- ted::{self, Position},
+ ast::{self, AstNode, edit::IndentLevel, syntax_factory::SyntaxFactory},
+ syntax_editor::{Element, Position},
};
use crate::{AssistContext, AssistId, Assists};
@@ -33,7 +32,7 @@ use crate::{AssistContext, AssistId, Assists};
// ```
pub(crate) fn unmerge_match_arm(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
let pipe_token = ctx.find_token_syntax_at_offset(T![|])?;
- let or_pat = ast::OrPat::cast(pipe_token.parent()?)?.clone_for_update();
+ let or_pat = ast::OrPat::cast(pipe_token.parent()?)?;
if or_pat.leading_pipe().is_some_and(|it| it == pipe_token) {
return None;
}
@@ -44,13 +43,14 @@ pub(crate) fn unmerge_match_arm(acc: &mut Assists, ctx: &AssistContext<'_>) -> O
// without `OrPat`.
let new_parent = match_arm.syntax().parent()?;
- let old_parent_range = new_parent.text_range();
acc.add(
AssistId::refactor_rewrite("unmerge_match_arm"),
"Unmerge match arm",
pipe_token.text_range(),
|edit| {
+ let make = SyntaxFactory::with_mappings();
+ let mut editor = edit.make_editor(&new_parent);
let pats_after = pipe_token
.siblings_with_tokens(Direction::Next)
.filter_map(|it| ast::Pat::cast(it.into_node()?))
@@ -59,11 +59,9 @@ pub(crate) fn unmerge_match_arm(acc: &mut Assists, ctx: &AssistContext<'_>) -> O
let new_pat = if pats_after.len() == 1 {
pats_after[0].clone()
} else {
- make::or_pat(pats_after, or_pat.leading_pipe().is_some()).into()
+ make.or_pat(pats_after, or_pat.leading_pipe().is_some()).into()
};
- let new_match_arm =
- make::match_arm(new_pat, match_arm.guard(), match_arm_body).clone_for_update();
-
+ let new_match_arm = make.match_arm(new_pat, match_arm.guard(), match_arm_body);
let mut pipe_index = pipe_token.index();
if pipe_token
.prev_sibling_or_token()
@@ -71,10 +69,13 @@ pub(crate) fn unmerge_match_arm(acc: &mut Assists, ctx: &AssistContext<'_>) -> O
{
pipe_index -= 1;
}
- or_pat.syntax().splice_children(
- pipe_index..or_pat.syntax().children_with_tokens().count(),
- Vec::new(),
- );
+ for child in or_pat
+ .syntax()
+ .children_with_tokens()
+ .skip_while(|child| child.index() < pipe_index)
+ {
+ editor.delete(child.syntax_element());
+ }
let mut insert_after_old_arm = Vec::new();
@@ -86,33 +87,19 @@ pub(crate) fn unmerge_match_arm(acc: &mut Assists, ctx: &AssistContext<'_>) -> O
// body is a block, but we don't bother to check that.
// - Missing after the arm with arms after, if the arm body is a block. In this case
// we don't want to insert a comma at all.
- let has_comma_after =
- std::iter::successors(match_arm.syntax().last_child_or_token(), |it| {
- it.prev_sibling_or_token()
- })
- .map(|it| it.kind())
- .find(|it| !it.is_trivia())
- == Some(T![,]);
- let has_arms_after = neighbor(&match_arm, Direction::Next).is_some();
- if !has_comma_after && !has_arms_after {
- insert_after_old_arm.push(make::token(T![,]).into());
+ let has_comma_after = match_arm.comma_token().is_some();
+ if !has_comma_after && !match_arm.expr().unwrap().is_block_like() {
+ insert_after_old_arm.push(make.token(T![,]).into());
}
let indent = IndentLevel::from_node(match_arm.syntax());
- insert_after_old_arm.push(make::tokens::whitespace(&format!("\n{indent}")).into());
+ insert_after_old_arm.push(make.whitespace(&format!("\n{indent}")).into());
insert_after_old_arm.push(new_match_arm.syntax().clone().into());
- ted::insert_all_raw(Position::after(match_arm.syntax()), insert_after_old_arm);
-
- if has_comma_after {
- ted::insert_raw(
- Position::last_child_of(new_match_arm.syntax()),
- make::token(T![,]),
- );
- }
-
- edit.replace(old_parent_range, new_parent.to_string());
+ editor.insert_all(Position::after(match_arm.syntax()), insert_after_old_arm);
+ editor.add_mappings(make.finish_with_mappings());
+ edit.add_file_edits(ctx.vfs_file_id(), editor);
},
)
}
@@ -258,7 +245,7 @@ fn main() {
let x = X::A;
let y = match x {
X::A => 1i32,
- X::B => 1i32
+ X::B => 1i32,
};
}
"#,
@@ -276,7 +263,7 @@ enum X { A, B }
fn main() {
let x = X::A;
match x {
- X::A $0| X::B => {},
+ X::A $0| X::B => {}
}
}
"#,
@@ -287,8 +274,8 @@ enum X { A, B }
fn main() {
let x = X::A;
match x {
- X::A => {},
- X::B => {},
+ X::A => {}
+ X::B => {}
}
}
"#,
diff --git a/crates/ide-assists/src/handlers/wrap_unwrap_cfg_attr.rs b/crates/ide-assists/src/handlers/wrap_unwrap_cfg_attr.rs
index e1b94673e7..5183566d13 100644
--- a/crates/ide-assists/src/handlers/wrap_unwrap_cfg_attr.rs
+++ b/crates/ide-assists/src/handlers/wrap_unwrap_cfg_attr.rs
@@ -2,8 +2,7 @@ use ide_db::source_change::SourceChangeBuilder;
use itertools::Itertools;
use syntax::{
NodeOrToken, SyntaxToken, T, TextRange, algo,
- ast::{self, AstNode, make},
- ted::{self, Position},
+ ast::{self, AstNode, make, syntax_factory::SyntaxFactory},
};
use crate::{AssistContext, AssistId, Assists};
@@ -173,40 +172,45 @@ fn wrap_derive(
}
}
let handle_source_change = |edit: &mut SourceChangeBuilder| {
- let new_derive = make::attr_outer(make::meta_token_tree(
- make::ext::ident_path("derive"),
- make::token_tree(T!['('], new_derive),
- ))
- .clone_for_update();
- let meta = make::meta_token_tree(
- make::ext::ident_path("cfg_attr"),
- make::token_tree(
+ let make = SyntaxFactory::with_mappings();
+ let mut editor = edit.make_editor(attr.syntax());
+ let new_derive = make.attr_outer(
+ make.meta_token_tree(make.ident_path("derive"), make.token_tree(T!['('], new_derive)),
+ );
+ let meta = make.meta_token_tree(
+ make.ident_path("cfg_attr"),
+ make.token_tree(
T!['('],
vec![
- NodeOrToken::Token(make::token(T![,])),
- NodeOrToken::Token(make::tokens::whitespace(" ")),
- NodeOrToken::Token(make::tokens::ident("derive")),
- NodeOrToken::Node(make::token_tree(T!['('], cfg_derive_tokens)),
+ NodeOrToken::Token(make.token(T![,])),
+ NodeOrToken::Token(make.whitespace(" ")),
+ NodeOrToken::Token(make.ident("derive")),
+ NodeOrToken::Node(make.token_tree(T!['('], cfg_derive_tokens)),
],
),
);
- // Remove the derive attribute
- let edit_attr = edit.make_syntax_mut(attr.syntax().clone());
-
- ted::replace(edit_attr, new_derive.syntax().clone());
- let cfg_attr = make::attr_outer(meta).clone_for_update();
- ted::insert_all_raw(
- Position::after(new_derive.syntax().clone()),
- vec![make::tokens::whitespace("\n").into(), cfg_attr.syntax().clone().into()],
+ let cfg_attr = make.attr_outer(meta);
+ editor.replace_with_many(
+ attr.syntax(),
+ vec![
+ new_derive.syntax().clone().into(),
+ make.whitespace("\n").into(),
+ cfg_attr.syntax().clone().into(),
+ ],
);
+
if let Some(snippet_cap) = ctx.config.snippet_cap {
if let Some(first_meta) =
cfg_attr.meta().and_then(|meta| meta.token_tree()).and_then(|tt| tt.l_paren_token())
{
- edit.add_tabstop_after_token(snippet_cap, first_meta)
+ let tabstop = edit.make_tabstop_after(snippet_cap);
+ editor.add_annotation(first_meta, tabstop);
}
}
+
+ editor.add_mappings(make.finish_with_mappings());
+ edit.add_file_edits(ctx.vfs_file_id(), editor);
};
acc.add(
@@ -221,10 +225,10 @@ fn wrap_cfg_attr(acc: &mut Assists, ctx: &AssistContext<'_>, attr: ast::Attr) ->
let range = attr.syntax().text_range();
let path = attr.path()?;
let handle_source_change = |edit: &mut SourceChangeBuilder| {
- let mut raw_tokens = vec![
- NodeOrToken::Token(make::token(T![,])),
- NodeOrToken::Token(make::tokens::whitespace(" ")),
- ];
+ let make = SyntaxFactory::with_mappings();
+ let mut editor = edit.make_editor(attr.syntax());
+ let mut raw_tokens =
+ vec![NodeOrToken::Token(make.token(T![,])), NodeOrToken::Token(make.whitespace(" "))];
path.syntax().descendants_with_tokens().for_each(|it| {
if let NodeOrToken::Token(token) = it {
raw_tokens.push(NodeOrToken::Token(token));
@@ -232,9 +236,9 @@ fn wrap_cfg_attr(acc: &mut Assists, ctx: &AssistContext<'_>, attr: ast::Attr) ->
});
if let Some(meta) = attr.meta() {
if let (Some(eq), Some(expr)) = (meta.eq_token(), meta.expr()) {
- raw_tokens.push(NodeOrToken::Token(make::tokens::whitespace(" ")));
+ raw_tokens.push(NodeOrToken::Token(make.whitespace(" ")));
raw_tokens.push(NodeOrToken::Token(eq));
- raw_tokens.push(NodeOrToken::Token(make::tokens::whitespace(" ")));
+ raw_tokens.push(NodeOrToken::Token(make.whitespace(" ")));
expr.syntax().descendants_with_tokens().for_each(|it| {
if let NodeOrToken::Token(token) = it {
@@ -245,26 +249,24 @@ fn wrap_cfg_attr(acc: &mut Assists, ctx: &AssistContext<'_>, attr: ast::Attr) ->
raw_tokens.extend(tt.token_trees_and_tokens());
}
}
- let meta = make::meta_token_tree(
- make::ext::ident_path("cfg_attr"),
- make::token_tree(T!['('], raw_tokens),
- );
- let cfg_attr = if attr.excl_token().is_some() {
- make::attr_inner(meta)
- } else {
- make::attr_outer(meta)
- }
- .clone_for_update();
- let attr_syntax = edit.make_syntax_mut(attr.syntax().clone());
- ted::replace(attr_syntax, cfg_attr.syntax());
+ let meta =
+ make.meta_token_tree(make.ident_path("cfg_attr"), make.token_tree(T!['('], raw_tokens));
+ let cfg_attr =
+ if attr.excl_token().is_some() { make.attr_inner(meta) } else { make.attr_outer(meta) };
+
+ editor.replace(attr.syntax(), cfg_attr.syntax());
if let Some(snippet_cap) = ctx.config.snippet_cap {
if let Some(first_meta) =
cfg_attr.meta().and_then(|meta| meta.token_tree()).and_then(|tt| tt.l_paren_token())
{
- edit.add_tabstop_after_token(snippet_cap, first_meta)
+ let tabstop = edit.make_tabstop_after(snippet_cap);
+ editor.add_annotation(first_meta, tabstop);
}
}
+
+ editor.add_mappings(make.finish_with_mappings());
+ edit.add_file_edits(ctx.vfs_file_id(), editor);
};
acc.add(
AssistId::refactor("wrap_unwrap_cfg_attr"),
diff --git a/crates/ide-assists/src/utils.rs b/crates/ide-assists/src/utils.rs
index 1a91053f93..87a4c2ef75 100644
--- a/crates/ide-assists/src/utils.rs
+++ b/crates/ide-assists/src/utils.rs
@@ -1,5 +1,7 @@
//! Assorted functions shared by several assists.
+use std::slice;
+
pub(crate) use gen_trait_fn_body::gen_trait_fn_body;
use hir::{
DisplayTarget, HasAttrs as HirHasAttrs, HirDisplay, InFile, ModuleDef, PathResolution,
@@ -912,7 +914,7 @@ fn handle_as_ref_str(
) -> Option<(ReferenceConversionType, bool)> {
let str_type = hir::BuiltinType::str().ty(db);
- ty.impls_trait(db, famous_defs.core_convert_AsRef()?, &[str_type.clone()])
+ ty.impls_trait(db, famous_defs.core_convert_AsRef()?, slice::from_ref(&str_type))
.then_some((ReferenceConversionType::AsRefStr, could_deref_to_target(ty, &str_type, db)))
}
@@ -924,7 +926,7 @@ fn handle_as_ref_slice(
let type_argument = ty.type_arguments().next()?;
let slice_type = hir::Type::new_slice(type_argument);
- ty.impls_trait(db, famous_defs.core_convert_AsRef()?, &[slice_type.clone()]).then_some((
+ ty.impls_trait(db, famous_defs.core_convert_AsRef()?, slice::from_ref(&slice_type)).then_some((
ReferenceConversionType::AsRefSlice,
could_deref_to_target(ty, &slice_type, db),
))
@@ -937,10 +939,11 @@ fn handle_dereferenced(
) -> Option<(ReferenceConversionType, bool)> {
let type_argument = ty.type_arguments().next()?;
- ty.impls_trait(db, famous_defs.core_convert_AsRef()?, &[type_argument.clone()]).then_some((
- ReferenceConversionType::Dereferenced,
- could_deref_to_target(ty, &type_argument, db),
- ))
+ ty.impls_trait(db, famous_defs.core_convert_AsRef()?, slice::from_ref(&type_argument))
+ .then_some((
+ ReferenceConversionType::Dereferenced,
+ could_deref_to_target(ty, &type_argument, db),
+ ))
}
fn handle_option_as_ref(
diff --git a/crates/ide-completion/src/tests/attribute.rs b/crates/ide-completion/src/tests/attribute.rs
index 411902f111..46a3630045 100644
--- a/crates/ide-completion/src/tests/attribute.rs
+++ b/crates/ide-completion/src/tests/attribute.rs
@@ -878,6 +878,7 @@ mod derive {
expect![[r#"
de Clone macro Clone
de Clone, Copy
+ de Debug macro Debug
de Default macro Default
de PartialEq macro PartialEq
de PartialEq, Eq
@@ -900,6 +901,7 @@ mod derive {
expect![[r#"
de Clone macro Clone
de Clone, Copy
+ de Debug macro Debug
de Default macro Default
de Eq
de Eq, PartialOrd, Ord
@@ -921,6 +923,7 @@ mod derive {
expect![[r#"
de Clone macro Clone
de Clone, Copy
+ de Debug macro Debug
de Default macro Default
de Eq
de Eq, PartialOrd, Ord
@@ -942,6 +945,7 @@ mod derive {
expect![[r#"
de Clone macro Clone
de Clone, Copy
+ de Debug macro Debug
de Default macro Default
de PartialOrd
de PartialOrd, Ord
diff --git a/crates/ide-db/src/famous_defs.rs b/crates/ide-db/src/famous_defs.rs
index 994150b1ac..8e68738508 100644
--- a/crates/ide-db/src/famous_defs.rs
+++ b/crates/ide-db/src/famous_defs.rs
@@ -106,6 +106,18 @@ impl FamousDefs<'_, '_> {
self.find_trait("core:convert:AsRef")
}
+ pub fn core_convert_AsMut(&self) -> Option<Trait> {
+ self.find_trait("core:convert:AsMut")
+ }
+
+ pub fn core_borrow_Borrow(&self) -> Option<Trait> {
+ self.find_trait("core:borrow:Borrow")
+ }
+
+ pub fn core_borrow_BorrowMut(&self) -> Option<Trait> {
+ self.find_trait("core:borrow:BorrowMut")
+ }
+
pub fn core_ops_ControlFlow(&self) -> Option<Enum> {
self.find_enum("core:ops:ControlFlow")
}
diff --git a/crates/ide-db/src/generated/lints.rs b/crates/ide-db/src/generated/lints.rs
index de8a42979b..f9eb44d03a 100644
--- a/crates/ide-db/src/generated/lints.rs
+++ b/crates/ide-db/src/generated/lints.rs
@@ -4711,9 +4711,9 @@ The tracking issue for this feature is: [#133668]
label: "const_trait_impl",
description: r##"# `const_trait_impl`
-The tracking issue for this feature is: [#67792]
+The tracking issue for this feature is: [#143874]
-[#67792]: https://github.com/rust-lang/rust/issues/67792
+[#143874]: https://github.com/rust-lang/rust/issues/143874
------------------------
"##,
diff --git a/crates/ide-db/src/prime_caches.rs b/crates/ide-db/src/prime_caches.rs
index 5356614dce..e6618573e0 100644
--- a/crates/ide-db/src/prime_caches.rs
+++ b/crates/ide-db/src/prime_caches.rs
@@ -272,5 +272,5 @@ fn crate_name(db: &RootDatabase, krate: Crate) -> Symbol {
.display_name
.as_deref()
.cloned()
- .unwrap_or_else(|| Symbol::integer(salsa::plumbing::AsId::as_id(&krate).as_u32() as usize))
+ .unwrap_or_else(|| Symbol::integer(salsa::plumbing::AsId::as_id(&krate).index() as usize))
}
diff --git a/crates/ide-db/src/search.rs b/crates/ide-db/src/search.rs
index 7d460f7249..4efb83ba32 100644
--- a/crates/ide-db/src/search.rs
+++ b/crates/ide-db/src/search.rs
@@ -317,7 +317,7 @@ impl Definition {
};
return match def {
Some(def) => SearchScope::file_range(
- def.as_ref().original_file_range_with_macro_call_body(db),
+ def.as_ref().original_file_range_with_macro_call_input(db),
),
None => SearchScope::single_file(file_id),
};
@@ -332,7 +332,7 @@ impl Definition {
};
return match def {
Some(def) => SearchScope::file_range(
- def.as_ref().original_file_range_with_macro_call_body(db),
+ def.as_ref().original_file_range_with_macro_call_input(db),
),
None => SearchScope::single_file(file_id),
};
@@ -341,7 +341,7 @@ impl Definition {
if let Definition::SelfType(impl_) = self {
return match impl_.source(db).map(|src| src.syntax().cloned()) {
Some(def) => SearchScope::file_range(
- def.as_ref().original_file_range_with_macro_call_body(db),
+ def.as_ref().original_file_range_with_macro_call_input(db),
),
None => SearchScope::single_file(file_id),
};
@@ -360,7 +360,7 @@ impl Definition {
};
return match def {
Some(def) => SearchScope::file_range(
- def.as_ref().original_file_range_with_macro_call_body(db),
+ def.as_ref().original_file_range_with_macro_call_input(db),
),
None => SearchScope::single_file(file_id),
};
diff --git a/crates/ide-diagnostics/src/handlers/macro_error.rs b/crates/ide-diagnostics/src/handlers/macro_error.rs
index 546512a6cf..c39e00e178 100644
--- a/crates/ide-diagnostics/src/handlers/macro_error.rs
+++ b/crates/ide-diagnostics/src/handlers/macro_error.rs
@@ -242,8 +242,8 @@ macro_rules! outer {
fn f() {
outer!();
-} //^^^^^^^^ error: leftover tokens
- //^^^^^^^^ error: Syntax Error in Expansion: expected expression
+} //^^^^^^ error: leftover tokens
+ //^^^^^^ error: Syntax Error in Expansion: expected expression
"#,
)
}
diff --git a/crates/ide-diagnostics/src/handlers/missing_fields.rs b/crates/ide-diagnostics/src/handlers/missing_fields.rs
index 8a5d82b48c..7da799e0d4 100644
--- a/crates/ide-diagnostics/src/handlers/missing_fields.rs
+++ b/crates/ide-diagnostics/src/handlers/missing_fields.rs
@@ -66,7 +66,7 @@ fn fixes(ctx: &DiagnosticsContext<'_>, d: &hir::MissingFields) -> Option<Vec<Ass
let current_module =
ctx.sema.scope(d.field_list_parent.to_node(&root).syntax()).map(|it| it.module());
let range = InFile::new(d.file, d.field_list_parent.text_range())
- .original_node_file_range_rooted(ctx.sema.db);
+ .original_node_file_range_rooted_opt(ctx.sema.db)?;
let build_text_edit = |new_syntax: &SyntaxNode, old_syntax| {
let edit = {
diff --git a/crates/ide-diagnostics/src/handlers/moved_out_of_ref.rs b/crates/ide-diagnostics/src/handlers/moved_out_of_ref.rs
index 0928262d22..1e80d02926 100644
--- a/crates/ide-diagnostics/src/handlers/moved_out_of_ref.rs
+++ b/crates/ide-diagnostics/src/handlers/moved_out_of_ref.rs
@@ -239,4 +239,22 @@ impl S {
"#,
)
}
+
+ #[test]
+ fn regression_20155() {
+ check_diagnostics(
+ r#"
+//- minicore: copy, option
+struct Box(i32);
+fn test() {
+ let b = Some(Box(0));
+ || {
+ if let Some(b) = b {
+ let _move = b;
+ }
+ };
+}
+"#,
+ )
+ }
}
diff --git a/crates/ide-diagnostics/src/handlers/trait_impl_redundant_assoc_item.rs b/crates/ide-diagnostics/src/handlers/trait_impl_redundant_assoc_item.rs
index 4327b12dce..fc2648efb4 100644
--- a/crates/ide-diagnostics/src/handlers/trait_impl_redundant_assoc_item.rs
+++ b/crates/ide-diagnostics/src/handlers/trait_impl_redundant_assoc_item.rs
@@ -77,6 +77,7 @@ fn quickfix_for_redundant_assoc_item(
redundant_item_def: String,
range: TextRange,
) -> Option<Vec<Assist>> {
+ let file_id = d.file_id.file_id()?;
let add_assoc_item_def = |builder: &mut SourceChangeBuilder| -> Option<()> {
let db = ctx.sema.db;
let root = db.parse_or_expand(d.file_id);
@@ -90,12 +91,14 @@ fn quickfix_for_redundant_assoc_item(
let trait_def = d.trait_.source(db)?.value;
let l_curly = trait_def.assoc_item_list()?.l_curly_token()?.text_range();
let where_to_insert =
- hir::InFile::new(d.file_id, l_curly).original_node_file_range_rooted(db).range;
+ hir::InFile::new(d.file_id, l_curly).original_node_file_range_rooted_opt(db)?;
+ if where_to_insert.file_id != file_id {
+ return None;
+ }
- builder.insert(where_to_insert.end(), redundant_item_def);
+ builder.insert(where_to_insert.range.end(), redundant_item_def);
Some(())
};
- let file_id = d.file_id.file_id()?;
let mut source_change_builder = SourceChangeBuilder::new(file_id.file_id(ctx.sema.db));
add_assoc_item_def(&mut source_change_builder)?;
diff --git a/crates/ide-diagnostics/src/handlers/unresolved_method.rs b/crates/ide-diagnostics/src/handlers/unresolved_method.rs
index 1f2d671249..dcca85d4db 100644
--- a/crates/ide-diagnostics/src/handlers/unresolved_method.rs
+++ b/crates/ide-diagnostics/src/handlers/unresolved_method.rs
@@ -120,8 +120,7 @@ fn assoc_func_fix(
let call = ast::MethodCallExpr::cast(expr.syntax().clone())?;
let range = InFile::new(expr_ptr.file_id, call.syntax().text_range())
- .original_node_file_range_rooted(db)
- .range;
+ .original_node_file_range_rooted_opt(db)?;
let receiver = call.receiver()?;
let receiver_type = &ctx.sema.type_of_expr(&receiver)?.original;
@@ -174,18 +173,16 @@ fn assoc_func_fix(
let assoc_func_call_expr_string = make::expr_call(assoc_func_path, args).to_string();
- let file_id = ctx.sema.original_range_opt(call.receiver()?.syntax())?.file_id;
-
Some(Assist {
id: AssistId::quick_fix("method_call_to_assoc_func_call_fix"),
label: Label::new(format!(
"Use associated func call instead: `{assoc_func_call_expr_string}`"
)),
group: None,
- target: range,
+ target: range.range,
source_change: Some(SourceChange::from_text_edit(
- file_id.file_id(ctx.sema.db),
- TextEdit::replace(range, assoc_func_call_expr_string),
+ range.file_id.file_id(ctx.sema.db),
+ TextEdit::replace(range.range, assoc_func_call_expr_string),
)),
command: None,
})
@@ -300,7 +297,7 @@ macro_rules! m {
}
fn main() {
m!(());
- // ^^^^^^ error: no method `foo` on type `()`
+ // ^^ error: no method `foo` on type `()`
}
"#,
);
diff --git a/crates/ide/src/call_hierarchy.rs b/crates/ide/src/call_hierarchy.rs
index 4b8d07a253..7a0405939d 100644
--- a/crates/ide/src/call_hierarchy.rs
+++ b/crates/ide/src/call_hierarchy.rs
@@ -592,7 +592,7 @@ macro_rules! call {
"#,
expect!["callee Function FileId(0) 22..37 30..36"],
expect![[r#"
- caller Function FileId(0) 38..52 : FileId(0):44..50
+ caller Function FileId(0) 38..43 : FileId(0):44..50
caller Function FileId(1) 130..136 130..136 : FileId(0):44..50
callee Function FileId(0) 38..52 44..50 : FileId(0):44..50"#]],
expect![[]],
diff --git a/crates/ide/src/doc_links.rs b/crates/ide/src/doc_links.rs
index 2c983287d8..f58202a421 100644
--- a/crates/ide/src/doc_links.rs
+++ b/crates/ide/src/doc_links.rs
@@ -60,7 +60,7 @@ pub(crate) fn rewrite_links(
let doc = Parser::new_with_broken_link_callback(markdown, MARKDOWN_OPTIONS, Some(&mut cb))
.into_offset_iter();
- let doc = map_links(doc, |target, title, range| {
+ let doc = map_links(doc, |target, title, range, link_type| {
// This check is imperfect, there's some overlap between valid intra-doc links
// and valid URLs so we choose to be too eager to try to resolve what might be
// a URL.
@@ -78,7 +78,7 @@ pub(crate) fn rewrite_links(
.map(|(_, attr_id)| attr_id.is_inner_attr())
.unwrap_or(false);
if let Some((target, title)) =
- rewrite_intra_doc_link(db, definition, target, title, is_inner_doc)
+ rewrite_intra_doc_link(db, definition, target, title, is_inner_doc, link_type)
{
(None, target, title)
} else if let Some(target) = rewrite_url_link(db, definition, target) {
@@ -417,6 +417,7 @@ fn rewrite_intra_doc_link(
target: &str,
title: &str,
is_inner_doc: bool,
+ link_type: LinkType,
) -> Option<(String, String)> {
let (link, ns) = parse_intra_doc_link(target);
@@ -438,7 +439,21 @@ fn rewrite_intra_doc_link(
url = url.join(&file).ok()?;
url.set_fragment(frag);
- Some((url.into(), strip_prefixes_suffixes(title).to_owned()))
+ // We want to strip the keyword prefix from the title, but only if the target is implicitly the same
+ // as the title.
+ let title = match link_type {
+ LinkType::Email
+ | LinkType::Autolink
+ | LinkType::Shortcut
+ | LinkType::Collapsed
+ | LinkType::Reference
+ | LinkType::Inline => title.to_owned(),
+ LinkType::ShortcutUnknown | LinkType::CollapsedUnknown | LinkType::ReferenceUnknown => {
+ strip_prefixes_suffixes(title).to_owned()
+ }
+ };
+
+ Some((url.into(), title))
}
/// Try to resolve path to local documentation via path-based links (i.e. `../gateway/struct.Shard.html`).
@@ -470,7 +485,7 @@ fn mod_path_of_def(db: &RootDatabase, def: Definition) -> Option<String> {
/// Rewrites a markdown document, applying 'callback' to each link.
fn map_links<'e>(
events: impl Iterator<Item = (Event<'e>, Range<usize>)>,
- callback: impl Fn(&str, &str, Range<usize>) -> (Option<LinkType>, String, String),
+ callback: impl Fn(&str, &str, Range<usize>, LinkType) -> (Option<LinkType>, String, String),
) -> impl Iterator<Item = Event<'e>> {
let mut in_link = false;
// holds the origin link target on start event and the rewritten one on end event
@@ -497,7 +512,7 @@ fn map_links<'e>(
}
Event::Text(s) if in_link => {
let (link_type, link_target_s, link_name) =
- callback(&end_link_target.take().unwrap(), &s, range);
+ callback(&end_link_target.take().unwrap(), &s, range, end_link_type.unwrap());
end_link_target = Some(CowStr::Boxed(link_target_s.into()));
if !matches!(end_link_type, Some(LinkType::Autolink)) {
end_link_type = link_type;
@@ -506,7 +521,7 @@ fn map_links<'e>(
}
Event::Code(s) if in_link => {
let (link_type, link_target_s, link_name) =
- callback(&end_link_target.take().unwrap(), &s, range);
+ callback(&end_link_target.take().unwrap(), &s, range, end_link_type.unwrap());
end_link_target = Some(CowStr::Boxed(link_target_s.into()));
if !matches!(end_link_type, Some(LinkType::Autolink)) {
end_link_type = link_type;
diff --git a/crates/ide/src/file_structure.rs b/crates/ide/src/file_structure.rs
index 347da4e85b..6820f99fac 100644
--- a/crates/ide/src/file_structure.rs
+++ b/crates/ide/src/file_structure.rs
@@ -329,7 +329,7 @@ macro_rules! mcexp {
#[deprecated]
fn obsolete() {}
-#[deprecated(note = "for awhile")]
+#[deprecated(note = "for a while")]
fn very_obsolete() {}
// region: Some region name
@@ -608,8 +608,8 @@ fn let_statements() {
StructureNode {
parent: None,
label: "very_obsolete",
- navigation_range: 511..524,
- node_range: 473..529,
+ navigation_range: 512..525,
+ node_range: 473..530,
kind: SymbolKind(
Function,
),
@@ -621,8 +621,8 @@ fn let_statements() {
StructureNode {
parent: None,
label: "Some region name",
- navigation_range: 531..558,
- node_range: 531..558,
+ navigation_range: 532..559,
+ node_range: 532..559,
kind: Region,
detail: None,
deprecated: false,
@@ -630,8 +630,8 @@ fn let_statements() {
StructureNode {
parent: None,
label: "m",
- navigation_range: 598..599,
- node_range: 573..636,
+ navigation_range: 599..600,
+ node_range: 574..637,
kind: SymbolKind(
Module,
),
@@ -643,8 +643,8 @@ fn let_statements() {
22,
),
label: "dontpanic",
- navigation_range: 573..593,
- node_range: 573..593,
+ navigation_range: 574..594,
+ node_range: 574..594,
kind: Region,
detail: None,
deprecated: false,
@@ -654,8 +654,8 @@ fn let_statements() {
22,
),
label: "f",
- navigation_range: 605..606,
- node_range: 602..611,
+ navigation_range: 606..607,
+ node_range: 603..612,
kind: SymbolKind(
Function,
),
@@ -669,8 +669,8 @@ fn let_statements() {
22,
),
label: "g",
- navigation_range: 628..629,
- node_range: 612..634,
+ navigation_range: 629..630,
+ node_range: 613..635,
kind: SymbolKind(
Function,
),
@@ -682,8 +682,8 @@ fn let_statements() {
StructureNode {
parent: None,
label: "extern \"C\"",
- navigation_range: 638..648,
- node_range: 638..651,
+ navigation_range: 639..649,
+ node_range: 639..652,
kind: ExternBlock,
detail: None,
deprecated: false,
@@ -691,8 +691,8 @@ fn let_statements() {
StructureNode {
parent: None,
label: "let_statements",
- navigation_range: 656..670,
- node_range: 653..813,
+ navigation_range: 657..671,
+ node_range: 654..814,
kind: SymbolKind(
Function,
),
@@ -706,8 +706,8 @@ fn let_statements() {
27,
),
label: "x",
- navigation_range: 683..684,
- node_range: 679..690,
+ navigation_range: 684..685,
+ node_range: 680..691,
kind: SymbolKind(
Local,
),
@@ -719,8 +719,8 @@ fn let_statements() {
27,
),
label: "mut y",
- navigation_range: 699..704,
- node_range: 695..709,
+ navigation_range: 700..705,
+ node_range: 696..710,
kind: SymbolKind(
Local,
),
@@ -732,8 +732,8 @@ fn let_statements() {
27,
),
label: "Foo { .. }",
- navigation_range: 718..740,
- node_range: 714..753,
+ navigation_range: 719..741,
+ node_range: 715..754,
kind: SymbolKind(
Local,
),
@@ -745,8 +745,8 @@ fn let_statements() {
27,
),
label: "_",
- navigation_range: 803..804,
- node_range: 799..811,
+ navigation_range: 804..805,
+ node_range: 800..812,
kind: SymbolKind(
Local,
),
diff --git a/crates/ide/src/goto_definition.rs b/crates/ide/src/goto_definition.rs
index fd465f31d4..29fc68bb50 100644
--- a/crates/ide/src/goto_definition.rs
+++ b/crates/ide/src/goto_definition.rs
@@ -1082,7 +1082,7 @@ macro_rules! define_fn {
}
define_fn!();
-//^^^^^^^^^^^^^
+//^^^^^^^^^^
fn bar() {
$0foo();
}
@@ -3228,7 +3228,7 @@ mod bar {
use crate::m;
m!();
- // ^^^^^
+ // ^^
fn qux() {
Foo$0;
@@ -3851,4 +3851,76 @@ fn main() {
"#,
);
}
+
+ #[test]
+ fn goto_const_from_match_pat_with_tuple_struct() {
+ check(
+ r#"
+struct Tag(u8);
+struct Path {}
+
+const Path: u8 = 0;
+ // ^^^^
+fn main() {
+ match Tag(Path) {
+ Tag(Path$0) => {}
+ _ => {}
+ }
+}
+
+"#,
+ );
+ }
+
+ #[test]
+ fn goto_const_from_match_pat() {
+ check(
+ r#"
+type T1 = u8;
+const T1: u8 = 0;
+ // ^^
+fn main() {
+ let x = 0;
+ match x {
+ T1$0 => {}
+ _ => {}
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn goto_struct_from_match_pat() {
+ check(
+ r#"
+struct T1;
+ // ^^
+fn main() {
+ let x = 0;
+ match x {
+ T1$0 => {}
+ _ => {}
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn no_goto_trait_from_match_pat() {
+ check(
+ r#"
+trait T1 {}
+fn main() {
+ let x = 0;
+ match x {
+ T1$0 => {}
+ // ^^
+ _ => {}
+ }
+}
+"#,
+ );
+ }
}
diff --git a/crates/ide/src/hover/tests.rs b/crates/ide/src/hover/tests.rs
index a281a49152..f63499aa0f 100644
--- a/crates/ide/src/hover/tests.rs
+++ b/crates/ide/src/hover/tests.rs
@@ -10927,3 +10927,34 @@ fn main() {
"#]],
);
}
+
+#[test]
+fn keyword_inside_link() {
+ check(
+ r#"
+enum Foo {
+ MacroExpansion,
+}
+
+/// I return a [macro expansion](Foo::MacroExpansion).
+fn bar$0() -> Foo {
+ Foo::MacroExpansion
+}
+ "#,
+ expect![[r#"
+ *bar*
+
+ ```rust
+ ra_test_fixture
+ ```
+
+ ```rust
+ fn bar() -> Foo
+ ```
+
+ ---
+
+ I return a [macro expansion](https://docs.rs/ra_test_fixture/*/ra_test_fixture/enum.Foo.html#variant.MacroExpansion).
+ "#]],
+ );
+}
diff --git a/crates/ide/src/inlay_hints/adjustment.rs b/crates/ide/src/inlay_hints/adjustment.rs
index f2844a2eaa..49b43fc37f 100644
--- a/crates/ide/src/inlay_hints/adjustment.rs
+++ b/crates/ide/src/inlay_hints/adjustment.rs
@@ -109,50 +109,90 @@ pub(super) fn hints(
}
has_adjustments = true;
- // FIXME: Add some nicer tooltips to each of these
- let (text, coercion) = match kind {
+ let (text, coercion, detailed_tooltip) = match kind {
Adjust::NeverToAny if config.adjustment_hints == AdjustmentHints::Always => {
allow_edit = false;
- ("<never-to-any>", "never to any")
- }
- Adjust::Deref(None) => ("*", "dereference"),
- Adjust::Deref(Some(OverloadedDeref(Mutability::Shared))) => {
- ("*", "`Deref` dereference")
- }
- Adjust::Deref(Some(OverloadedDeref(Mutability::Mut))) => {
- ("*", "`DerefMut` dereference")
- }
- Adjust::Borrow(AutoBorrow::Ref(Mutability::Shared)) => ("&", "borrow"),
- Adjust::Borrow(AutoBorrow::Ref(Mutability::Mut)) => ("&mut ", "unique borrow"),
- Adjust::Borrow(AutoBorrow::RawPtr(Mutability::Shared)) => {
- ("&raw const ", "const pointer borrow")
- }
- Adjust::Borrow(AutoBorrow::RawPtr(Mutability::Mut)) => {
- ("&raw mut ", "mut pointer borrow")
+ (
+ "<never-to-any>",
+ "never to any",
+ "Coerces the never type `!` into any other type. This happens in code paths that never return, like after `panic!()` or `return`.",
+ )
}
+ Adjust::Deref(None) => (
+ "*",
+ "dereference",
+ "Built-in dereference of a reference to access the underlying value. The compiler inserts `*` to get the value from `&T`.",
+ ),
+ Adjust::Deref(Some(OverloadedDeref(Mutability::Shared))) => (
+ "*",
+ "`Deref` dereference",
+ "Dereference via the `Deref` trait. Used for types like `Box<T>` or `Rc<T>` so they act like plain `T`.",
+ ),
+ Adjust::Deref(Some(OverloadedDeref(Mutability::Mut))) => (
+ "*",
+ "`DerefMut` dereference",
+ "Mutable dereference using the `DerefMut` trait. Enables smart pointers to give mutable access to their inner values.",
+ ),
+ Adjust::Borrow(AutoBorrow::Ref(Mutability::Shared)) => (
+ "&",
+ "shared borrow",
+ "Inserts `&` to create a shared reference. Lets you use a value without moving or cloning it.",
+ ),
+ Adjust::Borrow(AutoBorrow::Ref(Mutability::Mut)) => (
+ "&mut ",
+ "mutable borrow",
+ "Inserts `&mut` to create a unique, mutable reference. Lets you modify a value without taking ownership.",
+ ),
+ Adjust::Borrow(AutoBorrow::RawPtr(Mutability::Shared)) => (
+ "&raw const ",
+ "const raw pointer",
+ "Converts a reference to a raw const pointer `*const T`. Often used when working with FFI or unsafe code.",
+ ),
+ Adjust::Borrow(AutoBorrow::RawPtr(Mutability::Mut)) => (
+ "&raw mut ",
+ "mut raw pointer",
+ "Converts a mutable reference to a raw mutable pointer `*mut T`. Allows mutation in unsafe contexts.",
+ ),
// some of these could be represented via `as` casts, but that's not too nice and
// handling everything as a prefix expr makes the `(` and `)` insertion easier
Adjust::Pointer(cast) if config.adjustment_hints == AdjustmentHints::Always => {
allow_edit = false;
match cast {
- PointerCast::ReifyFnPointer => {
- ("<fn-item-to-fn-pointer>", "fn item to fn pointer")
- }
+ PointerCast::ReifyFnPointer => (
+ "<fn-item-to-fn-pointer>",
+ "fn item to fn pointer",
+ "Converts a named function to a function pointer `fn()`. Useful when passing functions as values.",
+ ),
PointerCast::UnsafeFnPointer => (
"<safe-fn-pointer-to-unsafe-fn-pointer>",
"safe fn pointer to unsafe fn pointer",
+ "Coerces a safe function pointer to an unsafe one. Allows calling it in an unsafe context.",
+ ),
+ PointerCast::ClosureFnPointer(Safety::Unsafe) => (
+ "<closure-to-unsafe-fn-pointer>",
+ "closure to unsafe fn pointer",
+ "Converts a non-capturing closure to an unsafe function pointer. Required for use in `extern` or unsafe APIs.",
+ ),
+ PointerCast::ClosureFnPointer(Safety::Safe) => (
+ "<closure-to-fn-pointer>",
+ "closure to fn pointer",
+ "Converts a non-capturing closure to a function pointer. Lets closures behave like plain functions.",
+ ),
+ PointerCast::MutToConstPointer => (
+ "<mut-ptr-to-const-ptr>",
+ "mut ptr to const ptr",
+ "Coerces `*mut T` to `*const T`. Safe because const pointers restrict what you can do.",
+ ),
+ PointerCast::ArrayToPointer => (
+ "<array-ptr-to-element-ptr>",
+ "array to pointer",
+ "Converts an array to a pointer to its first element. Similar to how arrays decay to pointers in C.",
+ ),
+ PointerCast::Unsize => (
+ "<unsize>",
+ "unsize coercion",
+ "Converts a sized type to an unsized one. Used for things like turning arrays into slices or concrete types into trait objects.",
),
- PointerCast::ClosureFnPointer(Safety::Unsafe) => {
- ("<closure-to-unsafe-fn-pointer>", "closure to unsafe fn pointer")
- }
- PointerCast::ClosureFnPointer(Safety::Safe) => {
- ("<closure-to-fn-pointer>", "closure to fn pointer")
- }
- PointerCast::MutToConstPointer => {
- ("<mut-ptr-to-const-ptr>", "mut ptr to const ptr")
- }
- PointerCast::ArrayToPointer => ("<array-ptr-to-element-ptr>", ""),
- PointerCast::Unsize => ("<unsize>", "unsize"),
}
}
_ => continue,
@@ -162,9 +202,11 @@ pub(super) fn hints(
linked_location: None,
tooltip: Some(config.lazy_tooltip(|| {
InlayTooltip::Markdown(format!(
- "`{}` → `{}` ({coercion} coercion)",
+ "`{}` → `{}`\n\n**{}**\n\n{}",
source.display(sema.db, display_target),
target.display(sema.db, display_target),
+ coercion,
+ detailed_tooltip
))
})),
};
diff --git a/crates/ide/src/inlay_hints/closing_brace.rs b/crates/ide/src/inlay_hints/closing_brace.rs
index d2216e66cc..05253b6794 100644
--- a/crates/ide/src/inlay_hints/closing_brace.rs
+++ b/crates/ide/src/inlay_hints/closing_brace.rs
@@ -91,8 +91,6 @@ pub(super) fn hints(
match_ast! {
match parent {
ast::Fn(it) => {
- // FIXME: this could include parameters, but `HirDisplay` prints too much info
- // and doesn't respect the max length either, so the hints end up way too long
(format!("fn {}", it.name()?), it.name().map(name))
},
ast::Static(it) => (format!("static {}", it.name()?), it.name().map(name)),
diff --git a/crates/ide/src/navigation_target.rs b/crates/ide/src/navigation_target.rs
index 4c7c597e68..7dc18141bd 100644
--- a/crates/ide/src/navigation_target.rs
+++ b/crates/ide/src/navigation_target.rs
@@ -844,7 +844,7 @@ pub(crate) fn orig_range_with_focus_r(
// *should* contain the name
_ => {
let kind = call_kind();
- let range = kind.clone().original_call_range_with_body(db);
+ let range = kind.clone().original_call_range_with_input(db);
//If the focus range is in the attribute/derive body, we
// need to point the call site to the entire body, if not, fall back
// to the name range of the attribute/derive call
diff --git a/crates/ide/src/runnables.rs b/crates/ide/src/runnables.rs
index f48150b369..9d1a5bae96 100644
--- a/crates/ide/src/runnables.rs
+++ b/crates/ide/src/runnables.rs
@@ -351,7 +351,7 @@ pub(crate) fn runnable_fn(
)
.call_site();
- let file_range = fn_source.syntax().original_file_range_with_macro_call_body(sema.db);
+ let file_range = fn_source.syntax().original_file_range_with_macro_call_input(sema.db);
let update_test =
UpdateTest::find_snapshot_macro(sema, &fn_source.file_syntax(sema.db), file_range);
@@ -425,7 +425,7 @@ pub(crate) fn runnable_impl(
let impl_source = sema.source(*def)?;
let impl_syntax = impl_source.syntax();
- let file_range = impl_syntax.original_file_range_with_macro_call_body(sema.db);
+ let file_range = impl_syntax.original_file_range_with_macro_call_input(sema.db);
let update_test =
UpdateTest::find_snapshot_macro(sema, &impl_syntax.file_syntax(sema.db), file_range);
@@ -1241,10 +1241,10 @@ generate_main!();
[
"(TestMod, NavigationTarget { file_id: FileId(0), full_range: 0..345, name: \"\", kind: Module })",
"(TestMod, NavigationTarget { file_id: FileId(0), full_range: 282..312, focus_range: 286..291, name: \"tests\", kind: Module, description: \"mod tests\" })",
- "(Test, NavigationTarget { file_id: FileId(0), full_range: 298..310, name: \"foo_test\", kind: Function })",
- "(TestMod, NavigationTarget { file_id: FileId(0), full_range: 313..326, name: \"tests2\", kind: Module, description: \"mod tests2\" }, true)",
- "(Test, NavigationTarget { file_id: FileId(0), full_range: 313..326, name: \"foo_test2\", kind: Function }, true)",
- "(Bin, NavigationTarget { file_id: FileId(0), full_range: 327..344, name: \"main\", kind: Function })",
+ "(Test, NavigationTarget { file_id: FileId(0), full_range: 298..307, name: \"foo_test\", kind: Function })",
+ "(TestMod, NavigationTarget { file_id: FileId(0), full_range: 313..323, name: \"tests2\", kind: Module, description: \"mod tests2\" }, true)",
+ "(Test, NavigationTarget { file_id: FileId(0), full_range: 313..323, name: \"foo_test2\", kind: Function }, true)",
+ "(Bin, NavigationTarget { file_id: FileId(0), full_range: 327..341, name: \"main\", kind: Function })",
]
"#]],
);
@@ -1272,10 +1272,10 @@ foo!();
"#,
expect![[r#"
[
- "(TestMod, NavigationTarget { file_id: FileId(0), full_range: 210..217, name: \"foo_tests\", kind: Module, description: \"mod foo_tests\" }, true)",
- "(Test, NavigationTarget { file_id: FileId(0), full_range: 210..217, name: \"foo0\", kind: Function }, true)",
- "(Test, NavigationTarget { file_id: FileId(0), full_range: 210..217, name: \"foo1\", kind: Function }, true)",
- "(Test, NavigationTarget { file_id: FileId(0), full_range: 210..217, name: \"foo2\", kind: Function }, true)",
+ "(TestMod, NavigationTarget { file_id: FileId(0), full_range: 210..214, name: \"foo_tests\", kind: Module, description: \"mod foo_tests\" }, true)",
+ "(Test, NavigationTarget { file_id: FileId(0), full_range: 210..214, name: \"foo0\", kind: Function }, true)",
+ "(Test, NavigationTarget { file_id: FileId(0), full_range: 210..214, name: \"foo1\", kind: Function }, true)",
+ "(Test, NavigationTarget { file_id: FileId(0), full_range: 210..214, name: \"foo2\", kind: Function }, true)",
]
"#]],
);
diff --git a/crates/ide/src/view_crate_graph.rs b/crates/ide/src/view_crate_graph.rs
index 7985279679..25deffe10e 100644
--- a/crates/ide/src/view_crate_graph.rs
+++ b/crates/ide/src/view_crate_graph.rs
@@ -79,7 +79,7 @@ impl<'a> dot::Labeller<'a, Crate, Edge<'a>> for DotCrateGraph<'_> {
}
fn node_id(&'a self, n: &Crate) -> Id<'a> {
- let id = n.as_id().as_u32();
+ let id = n.as_id().index();
Id::new(format!("_{id:?}")).unwrap()
}
diff --git a/crates/load-cargo/src/lib.rs b/crates/load-cargo/src/lib.rs
index 52f59679b5..26ee698af0 100644
--- a/crates/load-cargo/src/lib.rs
+++ b/crates/load-cargo/src/lib.rs
@@ -11,7 +11,7 @@ use hir_expand::proc_macro::{
};
use ide_db::{
ChangeWithProcMacros, FxHashMap, RootDatabase,
- base_db::{CrateGraphBuilder, Env, SourceRoot, SourceRootId},
+ base_db::{CrateGraphBuilder, Env, ProcMacroLoadingError, SourceRoot, SourceRootId},
prime_caches,
};
use itertools::Itertools;
@@ -69,6 +69,23 @@ pub fn load_workspace(
extra_env: &FxHashMap<String, Option<String>>,
load_config: &LoadCargoConfig,
) -> anyhow::Result<(RootDatabase, vfs::Vfs, Option<ProcMacroClient>)> {
+ let lru_cap = std::env::var("RA_LRU_CAP").ok().and_then(|it| it.parse::<u16>().ok());
+ let mut db = RootDatabase::new(lru_cap);
+
+ let (vfs, proc_macro_server) = load_workspace_into_db(ws, extra_env, load_config, &mut db)?;
+
+ Ok((db, vfs, proc_macro_server))
+}
+
+// This variant of `load_workspace` allows deferring the loading of rust-analyzer
+// into an existing database, which is useful in certain third-party scenarios,
+// now that `salsa` supports extending foreign databases (e.g. `RootDatabase`).
+pub fn load_workspace_into_db(
+ ws: ProjectWorkspace,
+ extra_env: &FxHashMap<String, Option<String>>,
+ load_config: &LoadCargoConfig,
+ db: &mut RootDatabase,
+) -> anyhow::Result<(vfs::Vfs, Option<ProcMacroClient>)> {
let (sender, receiver) = unbounded();
let mut vfs = vfs::Vfs::default();
let mut loader = {
@@ -78,23 +95,27 @@ pub fn load_workspace(
tracing::debug!(?load_config, "LoadCargoConfig");
let proc_macro_server = match &load_config.with_proc_macro_server {
- ProcMacroServerChoice::Sysroot => ws
- .find_sysroot_proc_macro_srv()
- .and_then(|it| ProcMacroClient::spawn(&it, extra_env).map_err(Into::into))
- .map_err(|e| (e, true)),
+ ProcMacroServerChoice::Sysroot => ws.find_sysroot_proc_macro_srv().map(|it| {
+ it.and_then(|it| ProcMacroClient::spawn(&it, extra_env).map_err(Into::into)).map_err(
+ |e| ProcMacroLoadingError::ProcMacroSrvError(e.to_string().into_boxed_str()),
+ )
+ }),
ProcMacroServerChoice::Explicit(path) => {
- ProcMacroClient::spawn(path, extra_env).map_err(Into::into).map_err(|e| (e, true))
- }
- ProcMacroServerChoice::None => {
- Err((anyhow::format_err!("proc macro server disabled"), false))
+ Some(ProcMacroClient::spawn(path, extra_env).map_err(|e| {
+ ProcMacroLoadingError::ProcMacroSrvError(e.to_string().into_boxed_str())
+ }))
}
+ ProcMacroServerChoice::None => Some(Err(ProcMacroLoadingError::Disabled)),
};
match &proc_macro_server {
- Ok(server) => {
- tracing::info!(path=%server.server_path(), "Proc-macro server started")
+ Some(Ok(server)) => {
+ tracing::info!(manifest=%ws.manifest_or_root(), path=%server.server_path(), "Proc-macro server started")
}
- Err((e, _)) => {
- tracing::info!(%e, "Failed to start proc-macro server")
+ Some(Err(e)) => {
+ tracing::info!(manifest=%ws.manifest_or_root(), %e, "Failed to start proc-macro server")
+ }
+ None => {
+ tracing::info!(manifest=%ws.manifest_or_root(), "No proc-macro server started")
}
}
@@ -111,22 +132,24 @@ pub fn load_workspace(
);
let proc_macros = {
let proc_macro_server = match &proc_macro_server {
- Ok(it) => Ok(it),
- Err((e, hard_err)) => Err((e.to_string(), *hard_err)),
+ Some(Ok(it)) => Ok(it),
+ Some(Err(e)) => {
+ Err(ProcMacroLoadingError::ProcMacroSrvError(e.to_string().into_boxed_str()))
+ }
+ None => Err(ProcMacroLoadingError::ProcMacroSrvError(
+ "proc-macro-srv is not running, workspace is missing a sysroot".into(),
+ )),
};
proc_macros
.into_iter()
.map(|(crate_id, path)| {
(
crate_id,
- path.map_or_else(
- |e| Err((e, true)),
- |(_, path)| {
- proc_macro_server.as_ref().map_err(Clone::clone).and_then(
- |proc_macro_server| load_proc_macro(proc_macro_server, &path, &[]),
- )
- },
- ),
+ path.map_or_else(Err, |(_, path)| {
+ proc_macro_server.as_ref().map_err(Clone::clone).and_then(
+ |proc_macro_server| load_proc_macro(proc_macro_server, &path, &[]),
+ )
+ }),
)
})
.collect()
@@ -139,18 +162,20 @@ pub fn load_workspace(
version: 0,
});
- let db = load_crate_graph(
+ load_crate_graph_into_db(
crate_graph,
proc_macros,
project_folders.source_root_config,
&mut vfs,
&receiver,
+ db,
);
if load_config.prefill_caches {
- prime_caches::parallel_prime_caches(&db, 1, &|_| ());
+ prime_caches::parallel_prime_caches(db, 1, &|_| ());
}
- Ok((db, vfs, proc_macro_server.ok()))
+
+ Ok((vfs, proc_macro_server.and_then(Result::ok)))
}
#[derive(Default)]
@@ -391,11 +416,13 @@ pub fn load_proc_macro(
path: &AbsPath,
ignored_macros: &[Box<str>],
) -> ProcMacroLoadResult {
- let res: Result<Vec<_>, String> = (|| {
+ let res: Result<Vec<_>, _> = (|| {
let dylib = MacroDylib::new(path.to_path_buf());
- let vec = server.load_dylib(dylib).map_err(|e| format!("{e}"))?;
+ let vec = server.load_dylib(dylib).map_err(|e| {
+ ProcMacroLoadingError::ProcMacroSrvError(format!("{e}").into_boxed_str())
+ })?;
if vec.is_empty() {
- return Err("proc macro library returned no proc macros".to_owned());
+ return Err(ProcMacroLoadingError::NoProcMacros);
}
Ok(vec
.into_iter()
@@ -412,20 +439,19 @@ pub fn load_proc_macro(
}
Err(e) => {
tracing::warn!("proc-macro loading for {path} failed: {e}");
- Err((e, true))
+ Err(e)
}
}
}
-fn load_crate_graph(
+fn load_crate_graph_into_db(
crate_graph: CrateGraphBuilder,
proc_macros: ProcMacrosBuilder,
source_root_config: SourceRootConfig,
vfs: &mut vfs::Vfs,
receiver: &Receiver<vfs::loader::Message>,
-) -> RootDatabase {
- let lru_cap = std::env::var("RA_LRU_CAP").ok().and_then(|it| it.parse::<u16>().ok());
- let mut db = RootDatabase::new(lru_cap);
+ db: &mut RootDatabase,
+) {
let mut analysis_change = ChangeWithProcMacros::default();
db.enable_proc_attr_macros();
@@ -462,7 +488,6 @@ fn load_crate_graph(
analysis_change.set_proc_macros(proc_macros);
db.apply_change(analysis_change);
- db
}
fn expander_to_proc_macro(
diff --git a/crates/parser/src/lexed_str.rs b/crates/parser/src/lexed_str.rs
index e6c92dec68..bff9acd78f 100644
--- a/crates/parser/src/lexed_str.rs
+++ b/crates/parser/src/lexed_str.rs
@@ -11,8 +11,8 @@
use std::ops;
use rustc_literal_escaper::{
- EscapeError, Mode, unescape_byte, unescape_byte_str, unescape_c_str, unescape_char,
- unescape_str,
+ unescape_byte, unescape_byte_str, unescape_c_str, unescape_char, unescape_str, EscapeError,
+ Mode,
};
use crate::{
@@ -44,7 +44,9 @@ impl<'a> LexedStr<'a> {
// Re-create the tokenizer from scratch every token because `GuardedStrPrefix` is one token in the lexer
// but we want to split it to two in edition <2024.
- while let Some(token) = rustc_lexer::tokenize(&text[conv.offset..]).next() {
+ while let Some(token) =
+ rustc_lexer::tokenize(&text[conv.offset..], rustc_lexer::FrontmatterAllowed::No).next()
+ {
let token_text = &text[conv.offset..][..token.len as usize];
conv.extend_token(&token.kind, token_text);
@@ -58,7 +60,7 @@ impl<'a> LexedStr<'a> {
return None;
}
- let token = rustc_lexer::tokenize(text).next()?;
+ let token = rustc_lexer::tokenize(text, rustc_lexer::FrontmatterAllowed::No).next()?;
if token.len as usize != text.len() {
return None;
}
diff --git a/crates/proc-macro-srv/src/server_impl.rs b/crates/proc-macro-srv/src/server_impl.rs
index dd576f23ae..662f625764 100644
--- a/crates/proc-macro-srv/src/server_impl.rs
+++ b/crates/proc-macro-srv/src/server_impl.rs
@@ -121,7 +121,7 @@ pub(super) fn literal_from_str<Span: Copy>(
use proc_macro::bridge::LitKind;
use rustc_lexer::{LiteralKind, Token, TokenKind};
- let mut tokens = rustc_lexer::tokenize(s);
+ let mut tokens = rustc_lexer::tokenize(s, rustc_lexer::FrontmatterAllowed::No);
let minus_or_lit = tokens.next().unwrap_or(Token { kind: TokenKind::Eof, len: 0 });
let lit = if minus_or_lit.kind == TokenKind::Minus {
diff --git a/crates/project-model/src/build_dependencies.rs b/crates/project-model/src/build_dependencies.rs
index bbaa8f4f29..499caa622c 100644
--- a/crates/project-model/src/build_dependencies.rs
+++ b/crates/project-model/src/build_dependencies.rs
@@ -312,7 +312,9 @@ impl WorkspaceBuildScripts {
match message {
Message::BuildScriptExecuted(mut message) => {
with_output_for(&message.package_id.repr, &mut |name, data| {
- progress(format!("running build-script: {name}"));
+ progress(format!(
+ "building compile-time-deps: build script {name} run"
+ ));
let cfgs = {
let mut acc = Vec::new();
for cfg in &message.cfgs {
@@ -343,7 +345,9 @@ impl WorkspaceBuildScripts {
}
Message::CompilerArtifact(message) => {
with_output_for(&message.package_id.repr, &mut |name, data| {
- progress(format!("building proc-macros: {name}"));
+ progress(format!(
+ "building compile-time-deps: proc-macro {name} built"
+ ));
if message.target.kind.contains(&cargo_metadata::TargetKind::ProcMacro)
{
// Skip rmeta file
@@ -409,13 +413,6 @@ impl WorkspaceBuildScripts {
cmd.arg("--target-dir").arg(target_dir);
}
- // --all-targets includes tests, benches and examples in addition to the
- // default lib and bins. This is an independent concept from the --target
- // flag below.
- if config.all_targets {
- cmd.arg("--all-targets");
- }
-
if let Some(target) = &config.target {
cmd.args(["--target", target]);
}
@@ -463,14 +460,26 @@ impl WorkspaceBuildScripts {
cmd.env("__CARGO_TEST_CHANNEL_OVERRIDE_DO_NOT_USE_THIS", "nightly");
cmd.arg("-Zunstable-options");
cmd.arg("--compile-time-deps");
- } else if config.wrap_rustc_in_build_scripts {
- // Setup RUSTC_WRAPPER to point to `rust-analyzer` binary itself. We use
- // that to compile only proc macros and build scripts during the initial
- // `cargo check`.
- // We don't need this if we are using `--compile-time-deps` flag.
- let myself = std::env::current_exe()?;
- cmd.env("RUSTC_WRAPPER", myself);
- cmd.env("RA_RUSTC_WRAPPER", "1");
+ // we can pass this unconditionally, because we won't actually build the
+ // binaries, and as such, this will succeed even on targets without libtest
+ cmd.arg("--all-targets");
+ } else {
+ // --all-targets includes tests, benches and examples in addition to the
+ // default lib and bins. This is an independent concept from the --target
+ // flag below.
+ if config.all_targets {
+ cmd.arg("--all-targets");
+ }
+
+ if config.wrap_rustc_in_build_scripts {
+ // Setup RUSTC_WRAPPER to point to `rust-analyzer` binary itself. We use
+ // that to compile only proc macros and build scripts during the initial
+ // `cargo check`.
+ // We don't need this if we are using `--compile-time-deps` flag.
+ let myself = std::env::current_exe()?;
+ cmd.env("RUSTC_WRAPPER", myself);
+ cmd.env("RA_RUSTC_WRAPPER", "1");
+ }
}
Ok(cmd)
}
diff --git a/crates/project-model/src/sysroot.rs b/crates/project-model/src/sysroot.rs
index 4b34fc0071..9f19260d30 100644
--- a/crates/project-model/src/sysroot.rs
+++ b/crates/project-model/src/sysroot.rs
@@ -163,18 +163,18 @@ impl Sysroot {
}
}
- pub fn discover_proc_macro_srv(&self) -> anyhow::Result<AbsPathBuf> {
- let Some(root) = self.root() else {
- return Err(anyhow::format_err!("no sysroot",));
- };
- ["libexec", "lib"]
- .into_iter()
- .map(|segment| root.join(segment).join("rust-analyzer-proc-macro-srv"))
- .find_map(|server_path| probe_for_binary(server_path.into()))
- .map(AbsPathBuf::assert)
- .ok_or_else(|| {
- anyhow::format_err!("cannot find proc-macro server in sysroot `{}`", root)
- })
+ pub fn discover_proc_macro_srv(&self) -> Option<anyhow::Result<AbsPathBuf>> {
+ let root = self.root()?;
+ Some(
+ ["libexec", "lib"]
+ .into_iter()
+ .map(|segment| root.join(segment).join("rust-analyzer-proc-macro-srv"))
+ .find_map(|server_path| probe_for_binary(server_path.into()))
+ .map(AbsPathBuf::assert)
+ .ok_or_else(|| {
+ anyhow::format_err!("cannot find proc-macro server in sysroot `{}`", root)
+ }),
+ )
}
fn assemble(
@@ -209,6 +209,7 @@ impl Sysroot {
pub fn load_workspace(
&self,
sysroot_source_config: &RustSourceWorkspaceConfig,
+ no_deps: bool,
current_dir: &AbsPath,
progress: &dyn Fn(String),
) -> Option<RustLibSrcWorkspace> {
@@ -224,6 +225,7 @@ impl Sysroot {
&library_manifest,
current_dir,
cargo_config,
+ no_deps,
progress,
) {
Ok(loaded) => return Some(loaded),
@@ -318,6 +320,7 @@ impl Sysroot {
library_manifest: &ManifestPath,
current_dir: &AbsPath,
cargo_config: &CargoMetadataConfig,
+ no_deps: bool,
progress: &dyn Fn(String),
) -> Result<RustLibSrcWorkspace> {
tracing::debug!("Loading library metadata: {library_manifest}");
@@ -333,7 +336,7 @@ impl Sysroot {
current_dir,
&cargo_config,
self,
- false,
+ no_deps,
// Make sure we never attempt to write to the sysroot
true,
progress,
diff --git a/crates/project-model/src/tests.rs b/crates/project-model/src/tests.rs
index 4f11af2d06..f229e9a650 100644
--- a/crates/project-model/src/tests.rs
+++ b/crates/project-model/src/tests.rs
@@ -240,7 +240,7 @@ fn smoke_test_real_sysroot_cargo() {
let cwd = AbsPathBuf::assert_utf8(temp_dir().join("smoke_test_real_sysroot_cargo"));
std::fs::create_dir_all(&cwd).unwrap();
let loaded_sysroot =
- sysroot.load_workspace(&RustSourceWorkspaceConfig::default_cargo(), &cwd, &|_| ());
+ sysroot.load_workspace(&RustSourceWorkspaceConfig::default_cargo(), false, &cwd, &|_| ());
if let Some(loaded_sysroot) = loaded_sysroot {
sysroot.set_workspace(loaded_sysroot);
}
diff --git a/crates/project-model/src/workspace.rs b/crates/project-model/src/workspace.rs
index 5bc64df535..43db84b4fa 100644
--- a/crates/project-model/src/workspace.rs
+++ b/crates/project-model/src/workspace.rs
@@ -7,8 +7,8 @@ use std::{collections::VecDeque, fmt, fs, iter, ops::Deref, sync, thread};
use anyhow::Context;
use base_db::{
CrateBuilderId, CrateDisplayName, CrateGraphBuilder, CrateName, CrateOrigin,
- CrateWorkspaceData, DependencyBuilder, Env, LangCrateOrigin, ProcMacroPaths,
- TargetLayoutLoadResult,
+ CrateWorkspaceData, DependencyBuilder, Env, LangCrateOrigin, ProcMacroLoadingError,
+ ProcMacroPaths, TargetLayoutLoadResult,
};
use cfg::{CfgAtom, CfgDiff, CfgOptions};
use intern::{Symbol, sym};
@@ -391,6 +391,7 @@ impl ProjectWorkspace {
toolchain.clone(),
target_dir.clone(),
)),
+ config.no_deps,
workspace_dir,
progress,
)
@@ -499,6 +500,7 @@ impl ProjectWorkspace {
if let Some(sysroot_project) = sysroot_project {
sysroot.load_workspace(
&RustSourceWorkspaceConfig::Json(*sysroot_project),
+ config.no_deps,
project_root,
progress,
)
@@ -510,6 +512,7 @@ impl ProjectWorkspace {
toolchain.clone(),
target_dir,
)),
+ config.no_deps,
project_root,
progress,
)
@@ -570,6 +573,7 @@ impl ProjectWorkspace {
toolchain.clone(),
target_dir.clone(),
)),
+ config.no_deps,
dir,
&|_| (),
);
@@ -744,7 +748,7 @@ impl ProjectWorkspace {
}
}
- pub fn find_sysroot_proc_macro_srv(&self) -> anyhow::Result<AbsPathBuf> {
+ pub fn find_sysroot_proc_macro_srv(&self) -> Option<anyhow::Result<AbsPathBuf>> {
self.sysroot.discover_proc_macro_srv()
}
@@ -1641,11 +1645,11 @@ fn add_target_crate_root(
Some((BuildScriptOutput { proc_macro_dylib_path, .. }, has_errors)) => {
match proc_macro_dylib_path {
Some(path) => Ok((cargo_name.to_owned(), path.clone())),
- None if has_errors => Err("failed to build proc-macro".to_owned()),
- None => Err("proc-macro crate build data is missing dylib path".to_owned()),
+ None if has_errors => Err(ProcMacroLoadingError::FailedToBuild),
+ None => Err(ProcMacroLoadingError::MissingDylibPath),
}
}
- None => Err("build scripts have not been built".to_owned()),
+ None => Err(ProcMacroLoadingError::NotYetBuilt),
};
proc_macros.insert(crate_id, proc_macro);
}
diff --git a/crates/rust-analyzer/src/cli/rustc_tests.rs b/crates/rust-analyzer/src/cli/rustc_tests.rs
index 740fcd81ea..f97bf83244 100644
--- a/crates/rust-analyzer/src/cli/rustc_tests.rs
+++ b/crates/rust-analyzer/src/cli/rustc_tests.rs
@@ -75,8 +75,12 @@ impl Tester {
};
let mut sysroot = Sysroot::discover(tmp_file.parent().unwrap(), &cargo_config.extra_env);
- let loaded_sysroot =
- sysroot.load_workspace(&RustSourceWorkspaceConfig::default_cargo(), &path, &|_| ());
+ let loaded_sysroot = sysroot.load_workspace(
+ &RustSourceWorkspaceConfig::default_cargo(),
+ false,
+ &path,
+ &|_| (),
+ );
if let Some(loaded_sysroot) = loaded_sysroot {
sysroot.set_workspace(loaded_sysroot);
}
diff --git a/crates/rust-analyzer/src/cli/scip.rs b/crates/rust-analyzer/src/cli/scip.rs
index d258c5d819..37f83f6dee 100644
--- a/crates/rust-analyzer/src/cli/scip.rs
+++ b/crates/rust-analyzer/src/cli/scip.rs
@@ -25,7 +25,7 @@ impl flags::Scip {
eprintln!("Generating SCIP start...");
let now = Instant::now();
- let no_progress = &|s| (eprintln!("rust-analyzer: Loading {s}"));
+ let no_progress = &|s| eprintln!("rust-analyzer: Loading {s}");
let root =
vfs::AbsPathBuf::assert_utf8(std::env::current_dir()?.join(&self.path)).normalize();
diff --git a/crates/rust-analyzer/src/config.rs b/crates/rust-analyzer/src/config.rs
index e716d14075..51d4c29aa7 100644
--- a/crates/rust-analyzer/src/config.rs
+++ b/crates/rust-analyzer/src/config.rs
@@ -1526,7 +1526,7 @@ impl Config {
CompletionConfig {
enable_postfix_completions: self.completion_postfix_enable(source_root).to_owned(),
enable_imports_on_the_fly: self.completion_autoimport_enable(source_root).to_owned()
- && self.caps.completion_item_edit_resolve(),
+ && self.caps.has_completion_item_resolve_additionalTextEdits(),
enable_self_on_the_fly: self.completion_autoself_enable(source_root).to_owned(),
enable_auto_iter: *self.completion_autoIter_enable(source_root),
enable_auto_await: *self.completion_autoAwait_enable(source_root),
@@ -2355,10 +2355,6 @@ impl Config {
.and_then(|it| it.version.as_ref())
}
- pub fn client_is_helix(&self) -> bool {
- self.client_info.as_ref().map(|it| it.name == "helix").unwrap_or_default()
- }
-
pub fn client_is_neovim(&self) -> bool {
self.client_info.as_ref().map(|it| it.name == "Neovim").unwrap_or_default()
}
diff --git a/crates/rust-analyzer/src/flycheck.rs b/crates/rust-analyzer/src/flycheck.rs
index 0e418240db..91d37bd7c9 100644
--- a/crates/rust-analyzer/src/flycheck.rs
+++ b/crates/rust-analyzer/src/flycheck.rs
@@ -6,6 +6,7 @@ use std::{fmt, io, process::Command, time::Duration};
use cargo_metadata::PackageId;
use crossbeam_channel::{Receiver, Sender, select_biased, unbounded};
use ide_db::FxHashSet;
+use itertools::Itertools;
use paths::{AbsPath, AbsPathBuf, Utf8PathBuf};
use rustc_hash::FxHashMap;
use serde::Deserialize as _;
@@ -379,7 +380,11 @@ impl FlycheckActor {
package_id = msg.package_id.repr,
"artifact received"
);
- self.report_progress(Progress::DidCheckCrate(msg.target.name));
+ self.report_progress(Progress::DidCheckCrate(format!(
+ "{} ({})",
+ msg.target.name,
+ msg.target.kind.iter().format_with(", ", |kind, f| f(&kind)),
+ )));
let package_id = Arc::new(msg.package_id);
if self.diagnostics_cleared_for.insert(package_id.clone()) {
tracing::trace!(
diff --git a/crates/rust-analyzer/src/global_state.rs b/crates/rust-analyzer/src/global_state.rs
index a870232d4a..62a28a1a68 100644
--- a/crates/rust-analyzer/src/global_state.rs
+++ b/crates/rust-analyzer/src/global_state.rs
@@ -101,7 +101,7 @@ pub(crate) struct GlobalState {
pub(crate) last_reported_status: lsp_ext::ServerStatusParams,
// proc macros
- pub(crate) proc_macro_clients: Arc<[anyhow::Result<ProcMacroClient>]>,
+ pub(crate) proc_macro_clients: Arc<[Option<anyhow::Result<ProcMacroClient>>]>,
pub(crate) build_deps_changed: bool,
// Flycheck
diff --git a/crates/rust-analyzer/src/handlers/dispatch.rs b/crates/rust-analyzer/src/handlers/dispatch.rs
index 40d05567fc..aea116e647 100644
--- a/crates/rust-analyzer/src/handlers/dispatch.rs
+++ b/crates/rust-analyzer/src/handlers/dispatch.rs
@@ -6,7 +6,7 @@ use std::{
use ide_db::base_db::{
DbPanicContext,
- salsa::{self, Cancelled, UnexpectedCycle},
+ salsa::{self, Cancelled},
};
use lsp_server::{ExtractError, Response, ResponseError};
use serde::{Serialize, de::DeserializeOwned};
@@ -350,9 +350,6 @@ where
if let Some(panic_message) = panic_message {
message.push_str(": ");
message.push_str(panic_message);
- } else if let Some(cycle) = panic.downcast_ref::<UnexpectedCycle>() {
- tracing::error!("{cycle}");
- message.push_str(": unexpected cycle");
} else if let Ok(cancelled) = panic.downcast::<Cancelled>() {
tracing::error!("Cancellation propagated out of salsa! This is a bug");
return Err(HandlerCancelledError::Inner(*cancelled));
diff --git a/crates/rust-analyzer/src/lsp/capabilities.rs b/crates/rust-analyzer/src/lsp/capabilities.rs
index 04e31f37fd..f94e7486ff 100644
--- a/crates/rust-analyzer/src/lsp/capabilities.rs
+++ b/crates/rust-analyzer/src/lsp/capabilities.rs
@@ -42,7 +42,7 @@ pub fn server_capabilities(config: &Config) -> ServerCapabilities {
hover_provider: Some(HoverProviderCapability::Simple(true)),
completion_provider: Some(CompletionOptions {
resolve_provider: if config.client_is_neovim() {
- config.completion_item_edit_resolve().then_some(true)
+ config.has_completion_item_resolve_additionalTextEdits().then_some(true)
} else {
Some(config.caps().completions_resolve_provider())
},
@@ -207,8 +207,8 @@ impl ClientCapabilities {
serde_json::from_value(self.0.experimental.as_ref()?.get(index)?.clone()).ok()
}
- /// Parses client capabilities and returns all completion resolve capabilities rust-analyzer supports.
- pub fn completion_item_edit_resolve(&self) -> bool {
+ #[allow(non_snake_case)]
+ pub fn has_completion_item_resolve_additionalTextEdits(&self) -> bool {
(|| {
Some(
self.0
diff --git a/crates/rust-analyzer/src/main_loop.rs b/crates/rust-analyzer/src/main_loop.rs
index 0c0438c4b8..00cf890510 100644
--- a/crates/rust-analyzer/src/main_loop.rs
+++ b/crates/rust-analyzer/src/main_loop.rs
@@ -783,9 +783,14 @@ impl GlobalState {
DiscoverProjectParam::Path(it) => DiscoverArgument::Path(it),
};
- let handle =
- discover.spawn(arg, &std::env::current_dir().unwrap()).unwrap();
- self.discover_handle = Some(handle);
+ let handle = discover.spawn(
+ arg,
+ &std::env::current_dir()
+ .expect("Failed to get cwd during project discovery"),
+ );
+ self.discover_handle = Some(handle.unwrap_or_else(|e| {
+ panic!("Failed to spawn project discovery command: {e}")
+ }));
}
}
}
diff --git a/crates/rust-analyzer/src/reload.rs b/crates/rust-analyzer/src/reload.rs
index 133d5a6cf7..e798aa6a8a 100644
--- a/crates/rust-analyzer/src/reload.rs
+++ b/crates/rust-analyzer/src/reload.rs
@@ -18,7 +18,7 @@ use std::{iter, mem};
use hir::{ChangeWithProcMacros, ProcMacrosBuilder, db::DefDatabase};
use ide_db::{
FxHashMap,
- base_db::{CrateGraphBuilder, ProcMacroPaths, salsa::Durability},
+ base_db::{CrateGraphBuilder, ProcMacroLoadingError, ProcMacroPaths, salsa::Durability},
};
use itertools::Itertools;
use load_cargo::{ProjectFolders, load_proc_macro};
@@ -194,8 +194,7 @@ impl GlobalState {
format_to!(message, "{e}");
});
- let proc_macro_clients =
- self.proc_macro_clients.iter().map(Some).chain(iter::repeat_with(|| None));
+ let proc_macro_clients = self.proc_macro_clients.iter().chain(iter::repeat(&None));
for (ws, proc_macro_client) in self.workspaces.iter().zip(proc_macro_clients) {
if let ProjectWorkspaceKind::Cargo { error: Some(error), .. }
@@ -252,7 +251,8 @@ impl GlobalState {
message.push_str("\n\n");
}
}
- _ => (),
+ // sysroot was explicitly not set so we didn't discover a server
+ None => {}
}
}
}
@@ -419,14 +419,11 @@ impl GlobalState {
};
let mut builder = ProcMacrosBuilder::default();
- let proc_macro_clients = proc_macro_clients
- .iter()
- .map(|res| res.as_ref().map_err(|e| e.to_string()))
- .chain(iter::repeat_with(|| Err("proc-macro-srv is not running".into())));
+ let proc_macro_clients = proc_macro_clients.iter().chain(iter::repeat(&None));
for (client, paths) in proc_macro_clients.zip(paths) {
for (crate_id, res) in paths.iter() {
let expansion_res = match client {
- Ok(client) => match res {
+ Some(Ok(client)) => match res {
Ok((crate_name, path)) => {
progress(format!("loading proc-macros: {path}"));
let ignored_proc_macros = ignored_proc_macros
@@ -438,9 +435,14 @@ impl GlobalState {
load_proc_macro(client, path, ignored_proc_macros)
}
- Err(e) => Err((e.clone(), true)),
+ Err(e) => Err(e.clone()),
},
- Err(ref e) => Err((e.clone(), true)),
+ Some(Err(e)) => Err(ProcMacroLoadingError::ProcMacroSrvError(
+ e.to_string().into_boxed_str(),
+ )),
+ None => Err(ProcMacroLoadingError::ProcMacroSrvError(
+ "proc-macro-srv is not running".into(),
+ )),
};
builder.insert(*crate_id, expansion_res)
}
@@ -655,7 +657,10 @@ impl GlobalState {
self.proc_macro_clients = Arc::from_iter(self.workspaces.iter().map(|ws| {
let path = match self.config.proc_macro_srv() {
Some(path) => path,
- None => ws.find_sysroot_proc_macro_srv()?,
+ None => match ws.find_sysroot_proc_macro_srv()? {
+ Ok(path) => path,
+ Err(e) => return Some(Err(e)),
+ },
};
let env: FxHashMap<_, _> = match &ws.kind {
@@ -682,14 +687,14 @@ impl GlobalState {
};
info!("Using proc-macro server at {path}");
- ProcMacroClient::spawn(&path, &env).map_err(|err| {
+ Some(ProcMacroClient::spawn(&path, &env).map_err(|err| {
tracing::error!(
"Failed to run proc-macro server from path {path}, error: {err:?}",
);
anyhow::format_err!(
"Failed to run proc-macro server from path {path}, error: {err:?}",
)
- })
+ }))
}))
}
@@ -753,14 +758,14 @@ impl GlobalState {
change.set_proc_macros(
crate_graph
.iter()
- .map(|id| (id, Err(("proc-macro has not been built yet".to_owned(), true))))
+ .map(|id| (id, Err(ProcMacroLoadingError::NotYetBuilt)))
.collect(),
);
} else {
change.set_proc_macros(
crate_graph
.iter()
- .map(|id| (id, Err(("proc-macro expansion is disabled".to_owned(), false))))
+ .map(|id| (id, Err(ProcMacroLoadingError::Disabled)))
.collect(),
);
}
diff --git a/crates/span/src/ast_id.rs b/crates/span/src/ast_id.rs
index 8e95971198..121d2e3324 100644
--- a/crates/span/src/ast_id.rs
+++ b/crates/span/src/ast_id.rs
@@ -107,9 +107,10 @@ impl fmt::Debug for ErasedFileAstId {
}
#[derive(Debug, Clone, Copy, Hash, PartialEq, Eq)]
+#[repr(u8)]
enum ErasedFileAstIdKind {
/// This needs to not change because it's depended upon by the proc macro server.
- Fixup,
+ Fixup = 0,
// The following are associated with `ErasedHasNameFileAstId`.
Enum,
Struct,
@@ -413,9 +414,9 @@ impl ErasedAstIdNextIndexMap {
}
macro_rules! register_enum_ast_id {
- (impl AstIdNode for $($ident:ident),+ ) => {
+ (impl $AstIdNode:ident for $($ident:ident),+ ) => {
$(
- impl AstIdNode for ast::$ident {}
+ impl $AstIdNode for ast::$ident {}
)+
};
}
@@ -426,9 +427,9 @@ register_enum_ast_id! {
}
macro_rules! register_has_name_ast_id {
- (impl AstIdNode for $($ident:ident = $name_method:ident),+ ) => {
+ (impl $AstIdNode:ident for $($ident:ident = $name_method:ident),+ ) => {
$(
- impl AstIdNode for ast::$ident {}
+ impl $AstIdNode for ast::$ident {}
)+
fn has_name_ast_id(node: &SyntaxNode, index_map: &mut ErasedAstIdNextIndexMap) -> Option<ErasedFileAstId> {
@@ -472,9 +473,9 @@ register_has_name_ast_id! {
}
macro_rules! register_assoc_item_ast_id {
- (impl AstIdNode for $($ident:ident = $name_callback:expr),+ ) => {
+ (impl $AstIdNode:ident for $($ident:ident = $name_callback:expr),+ ) => {
$(
- impl AstIdNode for ast::$ident {}
+ impl $AstIdNode for ast::$ident {}
)+
fn assoc_item_ast_id(
diff --git a/crates/span/src/hygiene.rs b/crates/span/src/hygiene.rs
index 7bb88ac365..aef3fbf051 100644
--- a/crates/span/src/hygiene.rs
+++ b/crates/span/src/hygiene.rs
@@ -97,6 +97,7 @@ const _: () = {
const LOCATION: salsa::plumbing::Location =
salsa::plumbing::Location { file: file!(), line: line!() };
const DEBUG_NAME: &'static str = "SyntaxContextData";
+ const REVISIONS: std::num::NonZeroUsize = std::num::NonZeroUsize::MAX;
type Fields<'a> = SyntaxContextData;
type Struct<'a> = SyntaxContext;
}
@@ -108,7 +109,9 @@ const _: () = {
static CACHE: zalsa_::IngredientCache<zalsa_struct_::IngredientImpl<SyntaxContext>> =
zalsa_::IngredientCache::new();
CACHE.get_or_create(db.zalsa(), || {
- db.zalsa().add_or_lookup_jar_by_type::<zalsa_struct_::JarImpl<SyntaxContext>>()
+ db.zalsa()
+ .lookup_jar_by_type::<zalsa_struct_::JarImpl<SyntaxContext>>()
+ .get_or_create()
})
}
}
@@ -130,9 +133,12 @@ const _: () = {
type MemoIngredientMap = salsa::plumbing::MemoIngredientSingletonIndex;
fn lookup_or_create_ingredient_index(
- aux: &salsa::plumbing::Zalsa,
+ zalsa: &salsa::plumbing::Zalsa,
) -> salsa::plumbing::IngredientIndices {
- aux.add_or_lookup_jar_by_type::<zalsa_struct_::JarImpl<SyntaxContext>>().into()
+ zalsa
+ .lookup_jar_by_type::<zalsa_struct_::JarImpl<SyntaxContext>>()
+ .get_or_create()
+ .into()
}
#[inline]
@@ -326,14 +332,14 @@ impl<'db> SyntaxContext {
None
} else {
// SAFETY: By our invariant, this is either a root (which we verified it's not) or a valid `salsa::Id`.
- unsafe { Some(salsa::Id::from_u32(self.0)) }
+ unsafe { Some(salsa::Id::from_index(self.0)) }
}
}
#[inline]
fn from_salsa_id(id: salsa::Id) -> Self {
// SAFETY: This comes from a Salsa ID.
- unsafe { Self::from_u32(id.as_u32()) }
+ unsafe { Self::from_u32(id.index()) }
}
#[inline]
diff --git a/crates/syntax/src/ast/make.rs b/crates/syntax/src/ast/make.rs
index 955aadaa25..309332873c 100644
--- a/crates/syntax/src/ast/make.rs
+++ b/crates/syntax/src/ast/make.rs
@@ -842,9 +842,10 @@ pub fn ref_pat(pat: ast::Pat) -> ast::RefPat {
}
pub fn match_arm(pat: ast::Pat, guard: Option<ast::MatchGuard>, expr: ast::Expr) -> ast::MatchArm {
+ let comma_str = if expr.is_block_like() { "" } else { "," };
return match guard {
- Some(guard) => from_text(&format!("{pat} {guard} => {expr}")),
- None => from_text(&format!("{pat} => {expr}")),
+ Some(guard) => from_text(&format!("{pat} {guard} => {expr}{comma_str}")),
+ None => from_text(&format!("{pat} => {expr}{comma_str}")),
};
fn from_text(text: &str) -> ast::MatchArm {
@@ -877,7 +878,7 @@ pub fn match_arm_list(arms: impl IntoIterator<Item = ast::MatchArm>) -> ast::Mat
let arms_str = arms.into_iter().fold(String::new(), |mut acc, arm| {
let needs_comma =
arm.comma_token().is_none() && arm.expr().is_none_or(|it| !it.is_block_like());
- let comma = if needs_comma { "," } else { "" };
+ let comma = if needs_comma && arm.comma_token().is_none() { "," } else { "" };
let arm = arm.syntax();
format_to_acc!(acc, " {arm}{comma}\n")
});
diff --git a/crates/syntax/src/ast/syntax_factory/constructors.rs b/crates/syntax/src/ast/syntax_factory/constructors.rs
index 429e51ba36..17cc5f9c05 100644
--- a/crates/syntax/src/ast/syntax_factory/constructors.rs
+++ b/crates/syntax/src/ast/syntax_factory/constructors.rs
@@ -1212,6 +1212,43 @@ impl SyntaxFactory {
ast
}
+ pub fn attr_outer(&self, meta: ast::Meta) -> ast::Attr {
+ let ast = make::attr_outer(meta.clone()).clone_for_update();
+
+ if let Some(mut mapping) = self.mappings() {
+ let mut builder = SyntaxMappingBuilder::new(ast.syntax().clone());
+ builder.map_node(meta.syntax().clone(), ast.meta().unwrap().syntax().clone());
+ builder.finish(&mut mapping);
+ }
+
+ ast
+ }
+
+ pub fn attr_inner(&self, meta: ast::Meta) -> ast::Attr {
+ let ast = make::attr_inner(meta.clone()).clone_for_update();
+
+ if let Some(mut mapping) = self.mappings() {
+ let mut builder = SyntaxMappingBuilder::new(ast.syntax().clone());
+ builder.map_node(meta.syntax().clone(), ast.meta().unwrap().syntax().clone());
+ builder.finish(&mut mapping);
+ }
+
+ ast
+ }
+
+ pub fn meta_token_tree(&self, path: ast::Path, tt: ast::TokenTree) -> ast::Meta {
+ let ast = make::meta_token_tree(path.clone(), tt.clone()).clone_for_update();
+
+ if let Some(mut mapping) = self.mappings() {
+ let mut builder = SyntaxMappingBuilder::new(ast.syntax().clone());
+ builder.map_node(path.syntax().clone(), ast.path().unwrap().syntax().clone());
+ builder.map_node(tt.syntax().clone(), ast.token_tree().unwrap().syntax().clone());
+ builder.finish(&mut mapping);
+ }
+
+ ast
+ }
+
pub fn token_tree(
&self,
delimiter: SyntaxKind,
@@ -1242,6 +1279,10 @@ impl SyntaxFactory {
pub fn whitespace(&self, text: &str) -> SyntaxToken {
make::tokens::whitespace(text)
}
+
+ pub fn ident(&self, text: &str) -> SyntaxToken {
+ make::tokens::ident(text)
+ }
}
// `ext` constructors
diff --git a/crates/syntax/src/syntax_editor.rs b/crates/syntax/src/syntax_editor.rs
index 31caf618be..3fa584850f 100644
--- a/crates/syntax/src/syntax_editor.rs
+++ b/crates/syntax/src/syntax_editor.rs
@@ -435,7 +435,7 @@ mod tests {
_ => {
let var_name = 2 + 2;
(var_name, true)
- }"#]];
+ },"#]];
expect.assert_eq(&edit.new_root.to_string());
assert_eq!(edit.find_annotation(placeholder_snippet).len(), 2);
diff --git a/crates/test-utils/src/fixture.rs b/crates/test-utils/src/fixture.rs
index 1d821e96e5..e830c6a7cf 100644
--- a/crates/test-utils/src/fixture.rs
+++ b/crates/test-utils/src/fixture.rs
@@ -435,14 +435,16 @@ impl MiniCore {
continue;
}
- let mut active_line_region = false;
- let mut inactive_line_region = false;
+ let mut active_line_region = 0;
+ let mut inactive_line_region = 0;
if let Some(idx) = trimmed.find("// :!") {
- inactive_line_region = true;
- inactive_regions.push(&trimmed[idx + "// :!".len()..]);
+ let regions = trimmed[idx + "// :!".len()..].split(", ");
+ inactive_line_region += regions.clone().count();
+ inactive_regions.extend(regions);
} else if let Some(idx) = trimmed.find("// :") {
- active_line_region = true;
- active_regions.push(&trimmed[idx + "// :".len()..]);
+ let regions = trimmed[idx + "// :".len()..].split(", ");
+ active_line_region += regions.clone().count();
+ active_regions.extend(regions);
}
let mut keep = true;
@@ -462,11 +464,11 @@ impl MiniCore {
if keep {
buf.push_str(line);
}
- if active_line_region {
- active_regions.pop().unwrap();
+ if active_line_region > 0 {
+ active_regions.drain(active_regions.len() - active_line_region..);
}
- if inactive_line_region {
- inactive_regions.pop().unwrap();
+ if inactive_line_region > 0 {
+ inactive_regions.drain(inactive_regions.len() - active_line_region..);
}
}
diff --git a/crates/test-utils/src/minicore.rs b/crates/test-utils/src/minicore.rs
index d48063fb86..dc1eba1a1a 100644
--- a/crates/test-utils/src/minicore.rs
+++ b/crates/test-utils/src/minicore.rs
@@ -11,10 +11,13 @@
//! add:
//! asm:
//! assert:
+//! as_mut: sized
//! as_ref: sized
//! async_fn: fn, tuple, future, copy
//! bool_impl: option, fn
//! builtin_impls:
+//! borrow: sized
+//! borrow_mut: borrow
//! cell: copy, drop
//! clone: sized
//! coerce_pointee: derive, sized, unsize, coerce_unsized, dispatch_from_dyn
@@ -228,8 +231,11 @@ pub mod hash {
}
// region:derive
- #[rustc_builtin_macro]
- pub macro Hash($item:item) {}
+ pub(crate) mod derive {
+ #[rustc_builtin_macro]
+ pub macro Hash($item:item) {}
+ }
+ pub use derive::Hash;
// endregion:derive
}
// endregion:hash
@@ -377,11 +383,30 @@ pub mod convert {
fn as_ref(&self) -> &T;
}
// endregion:as_ref
+ // region:as_mut
+ pub trait AsMut<T: crate::marker::PointeeSized>: crate::marker::PointeeSized {
+ fn as_mut(&mut self) -> &mut T;
+ }
+ // endregion:as_mut
// region:infallible
pub enum Infallible {}
// endregion:infallible
}
+pub mod borrow {
+ // region:borrow
+ pub trait Borrow<Borrowed: ?Sized> {
+ fn borrow(&self) -> &Borrowed;
+ }
+ // endregion:borrow
+
+ // region:borrow_mut
+ pub trait BorrowMut<Borrowed: ?Sized>: Borrow<Borrowed> {
+ fn borrow_mut(&mut self) -> &mut Borrowed;
+ }
+ // endregion:borrow_mut
+}
+
pub mod mem {
// region:manually_drop
use crate::marker::PointeeSized;
@@ -1264,8 +1289,11 @@ pub mod fmt {
}
// region:derive
- #[rustc_builtin_macro]
- pub macro Debug($item:item) {}
+ pub(crate) mod derive {
+ #[rustc_builtin_macro]
+ pub macro Debug($item:item) {}
+ }
+ pub use derive::Debug;
// endregion:derive
// region:builtin_impls
@@ -1931,6 +1959,8 @@ pub mod prelude {
panic, // :panic
result::Result::{self, Err, Ok}, // :result
str::FromStr, // :str
+ fmt::derive::Debug, // :fmt, derive
+ hash::derive::Hash, // :hash, derive
};
}
diff --git a/crates/tt/src/iter.rs b/crates/tt/src/iter.rs
index 0418c00174..3246156f1c 100644
--- a/crates/tt/src/iter.rs
+++ b/crates/tt/src/iter.rs
@@ -211,6 +211,7 @@ impl<'a, S: Copy> TtIter<'a, S> {
}
}
+#[derive(Clone)]
pub enum TtElement<'a, S> {
Leaf(&'a Leaf<S>),
Subtree(&'a Subtree<S>, TtIter<'a, S>),
diff --git a/crates/tt/src/lib.rs b/crates/tt/src/lib.rs
index 14574a6456..44123385c8 100644
--- a/crates/tt/src/lib.rs
+++ b/crates/tt/src/lib.rs
@@ -579,7 +579,7 @@ where
{
use rustc_lexer::LiteralKind;
- let token = rustc_lexer::tokenize(text).next_tuple();
+ let token = rustc_lexer::tokenize(text, rustc_lexer::FrontmatterAllowed::No).next_tuple();
let Some((rustc_lexer::Token {
kind: rustc_lexer::TokenKind::Literal { kind, suffix_start },
..
diff --git a/crates/vfs/src/file_set.rs b/crates/vfs/src/file_set.rs
index 1228e2e177..0c41ede5b5 100644
--- a/crates/vfs/src/file_set.rs
+++ b/crates/vfs/src/file_set.rs
@@ -5,8 +5,8 @@
use std::fmt;
use fst::{IntoStreamer, Streamer};
-use nohash_hasher::IntMap;
-use rustc_hash::FxHashMap;
+use indexmap::IndexMap;
+use rustc_hash::{FxBuildHasher, FxHashMap};
use crate::{AnchoredPath, FileId, Vfs, VfsPath};
@@ -14,7 +14,7 @@ use crate::{AnchoredPath, FileId, Vfs, VfsPath};
#[derive(Default, Clone, Eq, PartialEq)]
pub struct FileSet {
files: FxHashMap<VfsPath, FileId>,
- paths: IntMap<FileId, VfsPath>,
+ paths: IndexMap<FileId, VfsPath, FxBuildHasher>,
}
impl FileSet {
diff --git a/docs/book/src/contributing/lsp-extensions.md b/docs/book/src/contributing/lsp-extensions.md
index 1ada1cb24c..8c06f33a9f 100644
--- a/docs/book/src/contributing/lsp-extensions.md
+++ b/docs/book/src/contributing/lsp-extensions.md
@@ -694,24 +694,6 @@ interface CancelFlycheckParams {}
Cancels all running flycheck processes.
-## Syntax Tree
-
-**Method:** `rust-analyzer/syntaxTree`
-
-**Request:**
-
-```typescript
-interface SyntaxTreeParams {
- textDocument: TextDocumentIdentifier,
- range?: Range,
-}
-```
-
-**Response:** `string`
-
-Returns textual representation of a parse tree for the file/selected region.
-Primarily for debugging, but very useful for all people working on rust-analyzer itself.
-
## View Syntax Tree
**Method:** `rust-analyzer/viewSyntaxTree`