Unnamed repository; edit this file 'description' to name the repository.
-rw-r--r--.github/workflows/ci.yaml37
-rw-r--r--Cargo.lock13
-rw-r--r--Cargo.toml4
-rw-r--r--crates/base-db/src/editioned_file_id.rs2
-rw-r--r--crates/hir-def/src/expr_store/lower.rs42
-rw-r--r--crates/hir-def/src/item_scope.rs5
-rw-r--r--crates/hir-def/src/nameres.rs8
-rw-r--r--crates/hir-def/src/nameres/collector.rs12
-rw-r--r--crates/hir-def/src/nameres/tests/macros.rs6
-rw-r--r--crates/hir-def/src/resolver.rs73
-rw-r--r--crates/hir-ty/src/diagnostics/unsafe_check.rs2
-rw-r--r--crates/hir-ty/src/infer.rs4
-rw-r--r--crates/hir-ty/src/infer/expr.rs2
-rw-r--r--crates/hir-ty/src/infer/pat.rs2
-rw-r--r--crates/hir-ty/src/infer/path.rs4
-rw-r--r--crates/hir-ty/src/lower/path.rs10
-rw-r--r--crates/hir-ty/src/mir/eval.rs2
-rw-r--r--crates/hir-ty/src/mir/lower.rs4
-rw-r--r--crates/hir-ty/src/mir/lower/pattern_matching.rs4
-rw-r--r--crates/hir-ty/src/next_solver/predicate.rs7
-rw-r--r--crates/hir-ty/src/tests/never_type.rs42
-rw-r--r--crates/hir-ty/src/tests/regression.rs84
-rw-r--r--crates/hir-ty/src/tests/simple.rs35
-rw-r--r--crates/hir/src/display.rs13
-rw-r--r--crates/hir/src/lib.rs39
-rw-r--r--crates/ide-assists/src/handlers/auto_import.rs33
-rw-r--r--crates/ide-assists/src/handlers/bind_unused_param.rs2
-rw-r--r--crates/ide-assists/src/handlers/convert_bool_to_enum.rs6
-rw-r--r--crates/ide-assists/src/handlers/convert_let_else_to_match.rs6
-rw-r--r--crates/ide-assists/src/handlers/convert_tuple_return_type_to_struct.rs127
-rw-r--r--crates/ide-assists/src/handlers/destructure_tuple_binding.rs85
-rw-r--r--crates/ide-assists/src/handlers/generate_getter_or_setter.rs161
-rw-r--r--crates/ide-assists/src/handlers/generate_impl.rs21
-rw-r--r--crates/ide-assists/src/handlers/generate_mut_trait_impl.rs6
-rw-r--r--crates/ide-assists/src/handlers/generate_new.rs13
-rw-r--r--crates/ide-assists/src/handlers/generate_trait_from_impl.rs2
-rw-r--r--crates/ide-assists/src/handlers/inline_call.rs52
-rw-r--r--crates/ide-assists/src/handlers/introduce_named_lifetime.rs209
-rw-r--r--crates/ide-assists/src/handlers/move_const_to_impl.rs8
-rw-r--r--crates/ide-assists/src/handlers/replace_derive_with_manual_impl.rs23
-rw-r--r--crates/ide-assists/src/handlers/replace_if_let_with_match.rs62
-rw-r--r--crates/ide-assists/src/handlers/replace_let_with_if_let.rs8
-rw-r--r--crates/ide-assists/src/utils.rs53
-rw-r--r--crates/ide-assists/src/utils/ref_field_expr.rs18
-rw-r--r--crates/ide-completion/src/context.rs5
-rw-r--r--crates/ide-completion/src/render/function.rs19
-rw-r--r--crates/ide-db/src/imports/import_assets.rs88
-rw-r--r--crates/ide-db/src/imports/insert_use.rs200
-rw-r--r--crates/ide/src/hover/tests.rs93
-rw-r--r--crates/ide/src/lib.rs6
-rw-r--r--crates/ide/src/references.rs2
-rw-r--r--crates/ide/src/syntax_highlighting/test_data/highlight_macros.html3
-rw-r--r--crates/ide/src/syntax_highlighting/tests.rs8
-rw-r--r--crates/load-cargo/src/lib.rs7
-rw-r--r--crates/proc-macro-api/Cargo.toml1
-rw-r--r--crates/proc-macro-api/src/lib.rs8
-rw-r--r--crates/proc-macro-api/src/pool.rs28
-rw-r--r--crates/proc-macro-api/src/process.rs16
-rw-r--r--crates/rust-analyzer/src/flycheck.rs88
-rw-r--r--crates/rust-analyzer/src/global_state.rs8
-rw-r--r--crates/rust-analyzer/src/handlers/dispatch.rs17
-rw-r--r--crates/span/src/ast_id.rs2
-rw-r--r--crates/span/src/hygiene.rs51
-rw-r--r--crates/syntax/src/ast/edit_in_place.rs173
-rw-r--r--crates/syntax/src/ast/syntax_factory/constructors.rs77
-rw-r--r--crates/test-utils/src/minicore.rs24
-rw-r--r--crates/tt/src/storage.rs2
-rw-r--r--editors/code/package-lock.json12
-rw-r--r--lib/smol_str/src/borsh.rs5
-rw-r--r--lib/smol_str/tests/test.rs2
-rw-r--r--rust-version2
71 files changed, 1709 insertions, 589 deletions
diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml
index 1a0deee564..ca7d3058d8 100644
--- a/.github/workflows/ci.yaml
+++ b/.github/workflows/ci.yaml
@@ -96,7 +96,7 @@ jobs:
run: |
rustup update --no-self-update stable
rustup default stable
- rustup component add --toolchain stable rust-src clippy rustfmt
+ rustup component add --toolchain stable rust-src rustfmt
# We also install a nightly rustfmt, because we use `--file-lines` in
# a test.
rustup toolchain install nightly --profile minimal --component rustfmt
@@ -128,10 +128,6 @@ jobs:
- name: Run cargo-machete
run: cargo machete
- - name: Run Clippy
- if: matrix.os == 'macos-latest'
- run: cargo clippy --all-targets -- -D clippy::disallowed_macros -D clippy::dbg_macro -D clippy::todo -D clippy::print_stdout -D clippy::print_stderr
-
analysis-stats:
if: github.repository == 'rust-lang/rust-analyzer'
runs-on: ubuntu-latest
@@ -178,6 +174,28 @@ jobs:
- run: cargo fmt -- --check
+ clippy:
+ if: github.repository == 'rust-lang/rust-analyzer'
+ runs-on: ubuntu-latest
+
+ steps:
+ - name: Checkout repository
+ uses: actions/checkout@v4
+
+ # Note that clippy output is currently dependent on whether rust-src is installed,
+ # https://github.com/rust-lang/rust-clippy/issues/14625
+ - name: Install Rust toolchain
+ run: |
+ rustup update --no-self-update stable
+ rustup default stable
+ rustup component add --toolchain stable rust-src clippy
+
+ # https://github.com/actions-rust-lang/setup-rust-toolchain/blob/main/rust.json
+ - name: Install Rust Problem Matcher
+ run: echo "::add-matcher::.github/rust.json"
+
+ - run: cargo clippy --all-targets -- -D clippy::disallowed_macros -D clippy::dbg_macro -D clippy::todo -D clippy::print_stdout -D clippy::print_stderr
+
miri:
if: github.repository == 'rust-lang/rust-analyzer'
runs-on: ubuntu-latest
@@ -188,8 +206,11 @@ jobs:
- name: Install Rust toolchain
run: |
- rustup update --no-self-update nightly
- rustup default nightly
+ # FIXME: Pin nightly due to a regression in miri on nightly-2026-02-12.
+ # See https://github.com/rust-lang/miri/issues/4855.
+ # Revert to plain `nightly` once this is fixed upstream.
+ rustup toolchain install nightly-2026-02-10
+ rustup default nightly-2026-02-10
rustup component add miri
# - name: Cache Dependencies
@@ -309,7 +330,7 @@ jobs:
run: typos
conclusion:
- needs: [rust, rust-cross, typescript, typo-check, proc-macro-srv, miri, rustfmt, analysis-stats]
+ needs: [rust, rust-cross, typescript, typo-check, proc-macro-srv, miri, rustfmt, clippy, analysis-stats]
# We need to ensure this job does *not* get skipped if its dependencies fail,
# because a skipped job is considered a success by GitHub. So we have to
# overwrite `if:`. We use `!cancelled()` to ensure the job does still not get run
diff --git a/Cargo.lock b/Cargo.lock
index 755ae55eea..9db4dd7cb1 100644
--- a/Cargo.lock
+++ b/Cargo.lock
@@ -1845,6 +1845,7 @@ dependencies = [
"paths",
"postcard",
"proc-macro-srv",
+ "rayon",
"rustc-hash 2.1.1",
"semver",
"serde",
@@ -2453,9 +2454,9 @@ checksum = "28d3b2b1366ec20994f1fd18c3c594f05c5dd4bc44d8bb0c1c632c8d6829481f"
[[package]]
name = "salsa"
-version = "0.26.0"
+version = "0.25.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "f77debccd43ba198e9cee23efd7f10330ff445e46a98a2b107fed9094a1ee676"
+checksum = "e2e2aa2fca57727371eeafc975acc8e6f4c52f8166a78035543f6ee1c74c2dcc"
dependencies = [
"boxcar",
"crossbeam-queue",
@@ -2478,15 +2479,15 @@ dependencies = [
[[package]]
name = "salsa-macro-rules"
-version = "0.26.0"
+version = "0.25.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "ea07adbf42d91cc076b7daf3b38bc8168c19eb362c665964118a89bc55ef19a5"
+checksum = "1bfc2a1e7bf06964105515451d728f2422dedc3a112383324a00b191a5c397a3"
[[package]]
name = "salsa-macros"
-version = "0.26.0"
+version = "0.25.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "d16d4d8b66451b9c75ddf740b7fc8399bc7b8ba33e854a5d7526d18708f67b05"
+checksum = "3d844c1aa34946da46af683b5c27ec1088a3d9d84a2b837a108223fd830220e1"
dependencies = [
"proc-macro2",
"quote",
diff --git a/Cargo.toml b/Cargo.toml
index 04559f15ed..9f31e1903a 100644
--- a/Cargo.toml
+++ b/Cargo.toml
@@ -135,13 +135,13 @@ rayon = "1.10.0"
rowan = "=0.15.17"
# Ideally we'd not enable the macros feature but unfortunately the `tracked` attribute does not work
# on impls without it
-salsa = { version = "0.26", default-features = false, features = [
+salsa = { version = "0.25.2", default-features = false, features = [
"rayon",
"salsa_unstable",
"macros",
"inventory",
] }
-salsa-macros = "0.26"
+salsa-macros = "0.25.2"
semver = "1.0.26"
serde = { version = "1.0.219" }
serde_derive = { version = "1.0.219" }
diff --git a/crates/base-db/src/editioned_file_id.rs b/crates/base-db/src/editioned_file_id.rs
index dd419f48fc..13fb05d565 100644
--- a/crates/base-db/src/editioned_file_id.rs
+++ b/crates/base-db/src/editioned_file_id.rs
@@ -60,7 +60,7 @@ const _: () = {
}
}
- impl zalsa_::HashEqLike<WithoutCrate> for EditionedFileIdData {
+ impl zalsa_struct_::HashEqLike<WithoutCrate> for EditionedFileIdData {
#[inline]
fn hash<H: Hasher>(&self, state: &mut H) {
Hash::hash(self, state);
diff --git a/crates/hir-def/src/expr_store/lower.rs b/crates/hir-def/src/expr_store/lower.rs
index 701586c258..1cecd1976b 100644
--- a/crates/hir-def/src/expr_store/lower.rs
+++ b/crates/hir-def/src/expr_store/lower.rs
@@ -32,8 +32,8 @@ use triomphe::Arc;
use tt::TextRange;
use crate::{
- AdtId, BlockId, BlockLoc, DefWithBodyId, FunctionId, GenericDefId, ImplId, MacroId,
- ModuleDefId, ModuleId, TraitId, TypeAliasId, UnresolvedMacro,
+ AdtId, BlockId, BlockLoc, DefWithBodyId, FunctionId, GenericDefId, ImplId, ItemContainerId,
+ MacroId, ModuleDefId, ModuleId, TraitId, TypeAliasId, UnresolvedMacro,
attrs::AttrFlags,
db::DefDatabase,
expr_store::{
@@ -141,9 +141,19 @@ pub(super) fn lower_body(
source_map_self_param = Some(collector.expander.in_file(AstPtr::new(&self_param_syn)));
}
+ let is_extern = matches!(
+ owner,
+ DefWithBodyId::FunctionId(id)
+ if matches!(id.loc(db).container, ItemContainerId::ExternBlockId(_)),
+ );
+
for param in param_list.params() {
if collector.check_cfg(&param) {
- let param_pat = collector.collect_pat_top(param.pat());
+ let param_pat = if is_extern {
+ collector.collect_extern_fn_param(param.pat())
+ } else {
+ collector.collect_pat_top(param.pat())
+ };
params.push(param_pat);
}
}
@@ -2248,6 +2258,32 @@ impl<'db> ExprCollector<'db> {
}
}
+ fn collect_extern_fn_param(&mut self, pat: Option<ast::Pat>) -> PatId {
+ // `extern` functions cannot have pattern-matched parameters, and furthermore, the identifiers
+ // in their parameters are always interpreted as bindings, even if in a normal function they
+ // won't be, because they would refer to a path pattern.
+ let Some(pat) = pat else { return self.missing_pat() };
+
+ match &pat {
+ ast::Pat::IdentPat(bp) => {
+ // FIXME: Emit an error if `!bp.is_simple_ident()`.
+
+ let name = bp.name().map(|nr| nr.as_name()).unwrap_or_else(Name::missing);
+ let hygiene = bp
+ .name()
+ .map(|name| self.hygiene_id_for(name.syntax().text_range()))
+ .unwrap_or(HygieneId::ROOT);
+ let binding = self.alloc_binding(name, BindingAnnotation::Unannotated, hygiene);
+ let pat =
+ self.alloc_pat(Pat::Bind { id: binding, subpat: None }, AstPtr::new(&pat));
+ self.add_definition_to_binding(binding, pat);
+ pat
+ }
+ // FIXME: Emit an error.
+ _ => self.missing_pat(),
+ }
+ }
+
// region: patterns
fn collect_pat_top(&mut self, pat: Option<ast::Pat>) -> PatId {
diff --git a/crates/hir-def/src/item_scope.rs b/crates/hir-def/src/item_scope.rs
index 1303773b59..b11a8bcd90 100644
--- a/crates/hir-def/src/item_scope.rs
+++ b/crates/hir-def/src/item_scope.rs
@@ -483,6 +483,11 @@ impl ItemScope {
self.declarations.push(def)
}
+ pub(crate) fn remove_from_value_ns(&mut self, name: &Name, def: ModuleDefId) {
+ let entry = self.values.shift_remove(name);
+ assert!(entry.is_some_and(|entry| entry.def == def))
+ }
+
pub(crate) fn get_legacy_macro(&self, name: &Name) -> Option<&[MacroId]> {
self.legacy_macros.get(name).map(|it| &**it)
}
diff --git a/crates/hir-def/src/nameres.rs b/crates/hir-def/src/nameres.rs
index 1e3ea50c5a..5fda1beab4 100644
--- a/crates/hir-def/src/nameres.rs
+++ b/crates/hir-def/src/nameres.rs
@@ -211,6 +211,7 @@ struct DefMapCrateData {
/// Side table for resolving derive helpers.
exported_derives: FxHashMap<MacroId, Box<[Name]>>,
fn_proc_macro_mapping: FxHashMap<FunctionId, ProcMacroId>,
+ fn_proc_macro_mapping_back: FxHashMap<ProcMacroId, FunctionId>,
/// Custom tool modules registered with `#![register_tool]`.
registered_tools: Vec<Symbol>,
@@ -230,6 +231,7 @@ impl DefMapCrateData {
Self {
exported_derives: FxHashMap::default(),
fn_proc_macro_mapping: FxHashMap::default(),
+ fn_proc_macro_mapping_back: FxHashMap::default(),
registered_tools: PREDEFINED_TOOLS.iter().map(|it| Symbol::intern(it)).collect(),
unstable_features: FxHashSet::default(),
rustc_coherence_is_core: false,
@@ -244,6 +246,7 @@ impl DefMapCrateData {
let Self {
exported_derives,
fn_proc_macro_mapping,
+ fn_proc_macro_mapping_back,
registered_tools,
unstable_features,
rustc_coherence_is_core: _,
@@ -254,6 +257,7 @@ impl DefMapCrateData {
} = self;
exported_derives.shrink_to_fit();
fn_proc_macro_mapping.shrink_to_fit();
+ fn_proc_macro_mapping_back.shrink_to_fit();
registered_tools.shrink_to_fit();
unstable_features.shrink_to_fit();
}
@@ -570,6 +574,10 @@ impl DefMap {
self.data.fn_proc_macro_mapping.get(&id).copied()
}
+ pub fn proc_macro_as_fn(&self, id: ProcMacroId) -> Option<FunctionId> {
+ self.data.fn_proc_macro_mapping_back.get(&id).copied()
+ }
+
pub fn krate(&self) -> Crate {
self.krate
}
diff --git a/crates/hir-def/src/nameres/collector.rs b/crates/hir-def/src/nameres/collector.rs
index f51524c1b5..e672e83f01 100644
--- a/crates/hir-def/src/nameres/collector.rs
+++ b/crates/hir-def/src/nameres/collector.rs
@@ -634,6 +634,7 @@ impl<'db> DefCollector<'db> {
crate_data.exported_derives.insert(proc_macro_id.into(), helpers);
}
crate_data.fn_proc_macro_mapping.insert(fn_id, proc_macro_id);
+ crate_data.fn_proc_macro_mapping_back.insert(proc_macro_id, fn_id);
}
/// Define a macro with `macro_rules`.
@@ -2095,6 +2096,8 @@ impl ModCollector<'_, '_> {
let vis = resolve_vis(def_map, local_def_map, &self.item_tree[it.visibility]);
+ update_def(self.def_collector, fn_id.into(), &it.name, vis, false);
+
if self.def_collector.def_map.block.is_none()
&& self.def_collector.is_proc_macro
&& self.module_id == self.def_collector.def_map.root
@@ -2105,9 +2108,14 @@ impl ModCollector<'_, '_> {
InFile::new(self.file_id(), id),
fn_id,
);
- }
- update_def(self.def_collector, fn_id.into(), &it.name, vis, false);
+ // A proc macro is implemented as a function, but it's treated as a macro, not a function.
+ // You cannot call it like a function, for example, except in its defining crate.
+ // So we keep the function definition, but remove it from the scope, leaving only the macro.
+ self.def_collector.def_map[module_id]
+ .scope
+ .remove_from_value_ns(&it.name, fn_id.into());
+ }
}
ModItemId::Struct(id) => {
let it = &self.item_tree[id];
diff --git a/crates/hir-def/src/nameres/tests/macros.rs b/crates/hir-def/src/nameres/tests/macros.rs
index a943f6f0ac..a013f8b2bc 100644
--- a/crates/hir-def/src/nameres/tests/macros.rs
+++ b/crates/hir-def/src/nameres/tests/macros.rs
@@ -1068,10 +1068,8 @@ pub fn derive_macro_2(_item: TokenStream) -> TokenStream {
- AnotherTrait : macro#
- DummyTrait : macro#
- TokenStream : type value
- - attribute_macro : value macro#
- - derive_macro : value
- - derive_macro_2 : value
- - function_like_macro : value macro!
+ - attribute_macro : macro#
+ - function_like_macro : macro!
"#]],
);
}
diff --git a/crates/hir-def/src/resolver.rs b/crates/hir-def/src/resolver.rs
index 2ac0f90fb2..d32e53fc6b 100644
--- a/crates/hir-def/src/resolver.rs
+++ b/crates/hir-def/src/resolver.rs
@@ -32,7 +32,7 @@ use crate::{
BindingId, ExprId, LabelId,
generics::{GenericParams, TypeOrConstParamData},
},
- item_scope::{BUILTIN_SCOPE, BuiltinShadowMode, ImportOrExternCrate, ImportOrGlob, ItemScope},
+ item_scope::{BUILTIN_SCOPE, BuiltinShadowMode, ImportOrExternCrate, ItemScope},
lang_item::LangItemTarget,
nameres::{DefMap, LocalDefMap, MacroSubNs, ResolvePathResultPrefixInfo, block_def_map},
per_ns::PerNs,
@@ -111,8 +111,8 @@ pub enum TypeNs {
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub enum ResolveValueResult {
- ValueNs(ValueNs, Option<ImportOrGlob>),
- Partial(TypeNs, usize, Option<ImportOrExternCrate>),
+ ValueNs(ValueNs),
+ Partial(TypeNs, usize),
}
#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
@@ -332,20 +332,17 @@ impl<'db> Resolver<'db> {
Path::Normal(it) => &it.mod_path,
Path::LangItem(l, None) => {
return Some((
- ResolveValueResult::ValueNs(
- match *l {
- LangItemTarget::FunctionId(it) => ValueNs::FunctionId(it),
- LangItemTarget::StaticId(it) => ValueNs::StaticId(it),
- LangItemTarget::StructId(it) => ValueNs::StructId(it),
- LangItemTarget::EnumVariantId(it) => ValueNs::EnumVariantId(it),
- LangItemTarget::UnionId(_)
- | LangItemTarget::ImplId(_)
- | LangItemTarget::TypeAliasId(_)
- | LangItemTarget::TraitId(_)
- | LangItemTarget::EnumId(_) => return None,
- },
- None,
- ),
+ ResolveValueResult::ValueNs(match *l {
+ LangItemTarget::FunctionId(it) => ValueNs::FunctionId(it),
+ LangItemTarget::StaticId(it) => ValueNs::StaticId(it),
+ LangItemTarget::StructId(it) => ValueNs::StructId(it),
+ LangItemTarget::EnumVariantId(it) => ValueNs::EnumVariantId(it),
+ LangItemTarget::UnionId(_)
+ | LangItemTarget::ImplId(_)
+ | LangItemTarget::TypeAliasId(_)
+ | LangItemTarget::TraitId(_)
+ | LangItemTarget::EnumId(_) => return None,
+ }),
ResolvePathResultPrefixInfo::default(),
));
}
@@ -363,7 +360,7 @@ impl<'db> Resolver<'db> {
};
// Remaining segments start from 0 because lang paths have no segments other than the remaining.
return Some((
- ResolveValueResult::Partial(type_ns, 0, None),
+ ResolveValueResult::Partial(type_ns, 0),
ResolvePathResultPrefixInfo::default(),
));
}
@@ -388,10 +385,7 @@ impl<'db> Resolver<'db> {
if let Some(e) = entry {
return Some((
- ResolveValueResult::ValueNs(
- ValueNs::LocalBinding(e.binding()),
- None,
- ),
+ ResolveValueResult::ValueNs(ValueNs::LocalBinding(e.binding())),
ResolvePathResultPrefixInfo::default(),
));
}
@@ -404,14 +398,14 @@ impl<'db> Resolver<'db> {
&& *first_name == sym::Self_
{
return Some((
- ResolveValueResult::ValueNs(ValueNs::ImplSelf(impl_), None),
+ ResolveValueResult::ValueNs(ValueNs::ImplSelf(impl_)),
ResolvePathResultPrefixInfo::default(),
));
}
if let Some(id) = params.find_const_by_name(first_name, *def) {
let val = ValueNs::GenericParam(id);
return Some((
- ResolveValueResult::ValueNs(val, None),
+ ResolveValueResult::ValueNs(val),
ResolvePathResultPrefixInfo::default(),
));
}
@@ -431,7 +425,7 @@ impl<'db> Resolver<'db> {
if let &GenericDefId::ImplId(impl_) = def {
if *first_name == sym::Self_ {
return Some((
- ResolveValueResult::Partial(TypeNs::SelfType(impl_), 1, None),
+ ResolveValueResult::Partial(TypeNs::SelfType(impl_), 1),
ResolvePathResultPrefixInfo::default(),
));
}
@@ -440,14 +434,14 @@ impl<'db> Resolver<'db> {
{
let ty = TypeNs::AdtSelfType(adt);
return Some((
- ResolveValueResult::Partial(ty, 1, None),
+ ResolveValueResult::Partial(ty, 1),
ResolvePathResultPrefixInfo::default(),
));
}
if let Some(id) = params.find_type_by_name(first_name, *def) {
let ty = TypeNs::GenericParam(id);
return Some((
- ResolveValueResult::Partial(ty, 1, None),
+ ResolveValueResult::Partial(ty, 1),
ResolvePathResultPrefixInfo::default(),
));
}
@@ -473,7 +467,7 @@ impl<'db> Resolver<'db> {
&& let Some(builtin) = BuiltinType::by_name(first_name)
{
return Some((
- ResolveValueResult::Partial(TypeNs::BuiltinType(builtin), 1, None),
+ ResolveValueResult::Partial(TypeNs::BuiltinType(builtin), 1),
ResolvePathResultPrefixInfo::default(),
));
}
@@ -488,7 +482,7 @@ impl<'db> Resolver<'db> {
hygiene: HygieneId,
) -> Option<ValueNs> {
match self.resolve_path_in_value_ns(db, path, hygiene)? {
- ResolveValueResult::ValueNs(it, _) => Some(it),
+ ResolveValueResult::ValueNs(it) => Some(it),
ResolveValueResult::Partial(..) => None,
}
}
@@ -1153,12 +1147,12 @@ impl<'db> ModuleItemMap<'db> {
);
match unresolved_idx {
None => {
- let (value, import) = to_value_ns(module_def)?;
- Some((ResolveValueResult::ValueNs(value, import), prefix_info))
+ let value = to_value_ns(module_def, self.def_map)?;
+ Some((ResolveValueResult::ValueNs(value), prefix_info))
}
Some(unresolved_idx) => {
- let def = module_def.take_types_full()?;
- let ty = match def.def {
+ let def = module_def.take_types()?;
+ let ty = match def {
ModuleDefId::AdtId(it) => TypeNs::AdtId(it),
ModuleDefId::TraitId(it) => TypeNs::TraitId(it),
ModuleDefId::TypeAliasId(it) => TypeNs::TypeAliasId(it),
@@ -1171,7 +1165,7 @@ impl<'db> ModuleItemMap<'db> {
| ModuleDefId::MacroId(_)
| ModuleDefId::StaticId(_) => return None,
};
- Some((ResolveValueResult::Partial(ty, unresolved_idx, def.import), prefix_info))
+ Some((ResolveValueResult::Partial(ty, unresolved_idx), prefix_info))
}
}
}
@@ -1194,8 +1188,13 @@ impl<'db> ModuleItemMap<'db> {
}
}
-fn to_value_ns(per_ns: PerNs) -> Option<(ValueNs, Option<ImportOrGlob>)> {
- let (def, import) = per_ns.take_values_import()?;
+fn to_value_ns(per_ns: PerNs, def_map: &DefMap) -> Option<ValueNs> {
+ let def = per_ns.take_values().or_else(|| {
+ let Some(MacroId::ProcMacroId(proc_macro)) = per_ns.take_macros() else { return None };
+ // If we cannot resolve to value ns, but we can resolve to a proc macro, and this is the crate
+ // defining this proc macro - inside this crate, we should treat the macro as a function.
+ def_map.proc_macro_as_fn(proc_macro).map(ModuleDefId::FunctionId)
+ })?;
let res = match def {
ModuleDefId::FunctionId(it) => ValueNs::FunctionId(it),
ModuleDefId::AdtId(AdtId::StructId(it)) => ValueNs::StructId(it),
@@ -1210,7 +1209,7 @@ fn to_value_ns(per_ns: PerNs) -> Option<(ValueNs, Option<ImportOrGlob>)> {
| ModuleDefId::MacroId(_)
| ModuleDefId::ModuleId(_) => return None,
};
- Some((res, import))
+ Some(res)
}
fn to_type_ns(per_ns: PerNs) -> Option<(TypeNs, Option<ImportOrExternCrate>)> {
diff --git a/crates/hir-ty/src/diagnostics/unsafe_check.rs b/crates/hir-ty/src/diagnostics/unsafe_check.rs
index 50d4517d01..21f263723b 100644
--- a/crates/hir-ty/src/diagnostics/unsafe_check.rs
+++ b/crates/hir-ty/src/diagnostics/unsafe_check.rs
@@ -430,7 +430,7 @@ impl<'db> UnsafeVisitor<'db> {
fn mark_unsafe_path(&mut self, node: ExprOrPatId, path: &Path) {
let hygiene = self.body.expr_or_pat_path_hygiene(node);
let value_or_partial = self.resolver.resolve_path_in_value_ns(self.db, path, hygiene);
- if let Some(ResolveValueResult::ValueNs(ValueNs::StaticId(id), _)) = value_or_partial {
+ if let Some(ResolveValueResult::ValueNs(ValueNs::StaticId(id))) = value_or_partial {
let static_data = self.db.static_signature(id);
if static_data.flags.contains(StaticFlags::MUTABLE) {
self.on_unsafe_op(node, UnsafetyReason::MutableStatic);
diff --git a/crates/hir-ty/src/infer.rs b/crates/hir-ty/src/infer.rs
index 35d744e7d1..991acda14b 100644
--- a/crates/hir-ty/src/infer.rs
+++ b/crates/hir-ty/src/infer.rs
@@ -1584,7 +1584,7 @@ impl<'body, 'db> InferenceContext<'body, 'db> {
return (self.err_ty(), None);
};
match res {
- ResolveValueResult::ValueNs(value, _) => match value {
+ ResolveValueResult::ValueNs(value) => match value {
ValueNs::EnumVariantId(var) => {
let args = path_ctx.substs_from_path(var.into(), true, false);
drop(ctx);
@@ -1608,7 +1608,7 @@ impl<'body, 'db> InferenceContext<'body, 'db> {
return (self.err_ty(), None);
}
},
- ResolveValueResult::Partial(typens, unresolved, _) => (typens, Some(unresolved)),
+ ResolveValueResult::Partial(typens, unresolved) => (typens, Some(unresolved)),
}
} else {
match path_ctx.resolve_path_in_type_ns() {
diff --git a/crates/hir-ty/src/infer/expr.rs b/crates/hir-ty/src/infer/expr.rs
index 9f2d9d25b9..16e7d51e87 100644
--- a/crates/hir-ty/src/infer/expr.rs
+++ b/crates/hir-ty/src/infer/expr.rs
@@ -751,7 +751,7 @@ impl<'db> InferenceContext<'_, 'db> {
if let Some(lhs_ty) = lhs_ty {
self.write_pat_ty(target, lhs_ty);
- self.infer_expr_coerce(value, &Expectation::has_type(lhs_ty), ExprIsRead::No);
+ self.infer_expr_coerce(value, &Expectation::has_type(lhs_ty), ExprIsRead::Yes);
} else {
let rhs_ty = self.infer_expr(value, &Expectation::none(), ExprIsRead::Yes);
let resolver_guard =
diff --git a/crates/hir-ty/src/infer/pat.rs b/crates/hir-ty/src/infer/pat.rs
index 1b8ce5ceaf..87fd0dace3 100644
--- a/crates/hir-ty/src/infer/pat.rs
+++ b/crates/hir-ty/src/infer/pat.rs
@@ -673,7 +673,7 @@ impl<'db> InferenceContext<'_, 'db> {
pub(super) fn contains_explicit_ref_binding(body: &Body, pat_id: PatId) -> bool {
let mut res = false;
body.walk_pats(pat_id, &mut |pat| {
- res |= matches!(body[pat], Pat::Bind { id, .. } if body[id].mode == BindingAnnotation::Ref);
+ res |= matches!(body[pat], Pat::Bind { id, .. } if matches!(body[id].mode, BindingAnnotation::Ref | BindingAnnotation::RefMut));
});
res
}
diff --git a/crates/hir-ty/src/infer/path.rs b/crates/hir-ty/src/infer/path.rs
index ef1a610a32..40c6fdf3cc 100644
--- a/crates/hir-ty/src/infer/path.rs
+++ b/crates/hir-ty/src/infer/path.rs
@@ -164,11 +164,11 @@ impl<'db> InferenceContext<'_, 'db> {
let value_or_partial = path_ctx.resolve_path_in_value_ns(hygiene)?;
match value_or_partial {
- ResolveValueResult::ValueNs(it, _) => {
+ ResolveValueResult::ValueNs(it) => {
drop_ctx(ctx, no_diagnostics);
(it, None)
}
- ResolveValueResult::Partial(def, remaining_index, _) => {
+ ResolveValueResult::Partial(def, remaining_index) => {
// there may be more intermediate segments between the resolved one and
// the end. Only the last segment needs to be resolved to a value; from
// the segments before that, we need to get either a type or a trait ref.
diff --git a/crates/hir-ty/src/lower/path.rs b/crates/hir-ty/src/lower/path.rs
index f3d0de1227..517f67b828 100644
--- a/crates/hir-ty/src/lower/path.rs
+++ b/crates/hir-ty/src/lower/path.rs
@@ -396,12 +396,10 @@ impl<'a, 'b, 'db> PathLoweringContext<'a, 'b, 'db> {
}
let (mod_segments, enum_segment, resolved_segment_idx) = match res {
- ResolveValueResult::Partial(_, unresolved_segment, _) => {
+ ResolveValueResult::Partial(_, unresolved_segment) => {
(segments.take(unresolved_segment - 1), None, unresolved_segment - 1)
}
- ResolveValueResult::ValueNs(ValueNs::EnumVariantId(_), _)
- if prefix_info.enum_variant =>
- {
+ ResolveValueResult::ValueNs(ValueNs::EnumVariantId(_)) if prefix_info.enum_variant => {
(segments.strip_last_two(), segments.len().checked_sub(2), segments.len() - 1)
}
ResolveValueResult::ValueNs(..) => (segments.strip_last(), None, segments.len() - 1),
@@ -431,7 +429,7 @@ impl<'a, 'b, 'db> PathLoweringContext<'a, 'b, 'db> {
}
match &res {
- ResolveValueResult::ValueNs(resolution, _) => {
+ ResolveValueResult::ValueNs(resolution) => {
let resolved_segment_idx = self.current_segment_u32();
let resolved_segment = self.current_or_prev_segment;
@@ -469,7 +467,7 @@ impl<'a, 'b, 'db> PathLoweringContext<'a, 'b, 'db> {
| ValueNs::ConstId(_) => {}
}
}
- ResolveValueResult::Partial(resolution, _, _) => {
+ ResolveValueResult::Partial(resolution, _) => {
if !self.handle_type_ns_resolution(resolution) {
return None;
}
diff --git a/crates/hir-ty/src/mir/eval.rs b/crates/hir-ty/src/mir/eval.rs
index 5de08313f4..ec0723c3f8 100644
--- a/crates/hir-ty/src/mir/eval.rs
+++ b/crates/hir-ty/src/mir/eval.rs
@@ -1625,7 +1625,7 @@ impl<'db> Evaluator<'db> {
};
match target_ty {
rustc_type_ir::FloatTy::F32 => Owned((value as f32).to_le_bytes().to_vec()),
- rustc_type_ir::FloatTy::F64 => Owned((value as f64).to_le_bytes().to_vec()),
+ rustc_type_ir::FloatTy::F64 => Owned(value.to_le_bytes().to_vec()),
rustc_type_ir::FloatTy::F16 | rustc_type_ir::FloatTy::F128 => {
not_supported!("unstable floating point type f16 and f128");
}
diff --git a/crates/hir-ty/src/mir/lower.rs b/crates/hir-ty/src/mir/lower.rs
index 199db7a3e7..8d5e5c2e6e 100644
--- a/crates/hir-ty/src/mir/lower.rs
+++ b/crates/hir-ty/src/mir/lower.rs
@@ -1429,7 +1429,7 @@ impl<'a, 'db> MirLowerCtx<'a, 'db> {
.resolve_path_in_value_ns(self.db, c, HygieneId::ROOT)
.ok_or_else(unresolved_name)?;
match pr {
- ResolveValueResult::ValueNs(v, _) => {
+ ResolveValueResult::ValueNs(v) => {
if let ValueNs::ConstId(c) = v {
self.lower_const_to_operand(
GenericArgs::empty(self.interner()),
@@ -1439,7 +1439,7 @@ impl<'a, 'db> MirLowerCtx<'a, 'db> {
not_supported!("bad path in range pattern");
}
}
- ResolveValueResult::Partial(_, _, _) => {
+ ResolveValueResult::Partial(_, _) => {
not_supported!("associated constants in range pattern")
}
}
diff --git a/crates/hir-ty/src/mir/lower/pattern_matching.rs b/crates/hir-ty/src/mir/lower/pattern_matching.rs
index a8aacbff16..83139821e3 100644
--- a/crates/hir-ty/src/mir/lower/pattern_matching.rs
+++ b/crates/hir-ty/src/mir/lower/pattern_matching.rs
@@ -373,7 +373,7 @@ impl<'db> MirLowerCtx<'_, 'db> {
if let (
MatchingMode::Assign,
- ResolveValueResult::ValueNs(ValueNs::LocalBinding(binding), _),
+ ResolveValueResult::ValueNs(ValueNs::LocalBinding(binding)),
) = (mode, &pr)
{
let local = self.binding_local(*binding)?;
@@ -398,7 +398,7 @@ impl<'db> MirLowerCtx<'_, 'db> {
{
break 'b (c, x.1);
}
- if let ResolveValueResult::ValueNs(ValueNs::ConstId(c), _) = pr {
+ if let ResolveValueResult::ValueNs(ValueNs::ConstId(c)) = pr {
break 'b (c, GenericArgs::empty(self.interner()));
}
not_supported!("path in pattern position that is not const or variant")
diff --git a/crates/hir-ty/src/next_solver/predicate.rs b/crates/hir-ty/src/next_solver/predicate.rs
index 6f4fae7073..8658d03a9e 100644
--- a/crates/hir-ty/src/next_solver/predicate.rs
+++ b/crates/hir-ty/src/next_solver/predicate.rs
@@ -714,9 +714,9 @@ impl<'db> rustc_type_ir::inherent::Predicate<DbInterner<'db>> for Predicate<'db>
fn allow_normalization(self) -> bool {
// TODO: this should probably live in rustc_type_ir
match self.inner().as_ref().skip_binder() {
- PredicateKind::Clause(ClauseKind::WellFormed(_))
- | PredicateKind::AliasRelate(..)
- | PredicateKind::NormalizesTo(..) => false,
+ PredicateKind::Clause(ClauseKind::WellFormed(_)) | PredicateKind::AliasRelate(..) => {
+ false
+ }
PredicateKind::Clause(ClauseKind::Trait(_))
| PredicateKind::Clause(ClauseKind::RegionOutlives(_))
| PredicateKind::Clause(ClauseKind::TypeOutlives(_))
@@ -729,6 +729,7 @@ impl<'db> rustc_type_ir::inherent::Predicate<DbInterner<'db>> for Predicate<'db>
| PredicateKind::Coerce(_)
| PredicateKind::Clause(ClauseKind::ConstEvaluatable(_))
| PredicateKind::ConstEquate(_, _)
+ | PredicateKind::NormalizesTo(..)
| PredicateKind::Ambiguous => true,
}
}
diff --git a/crates/hir-ty/src/tests/never_type.rs b/crates/hir-ty/src/tests/never_type.rs
index 4d68179a88..e4e7b4cc38 100644
--- a/crates/hir-ty/src/tests/never_type.rs
+++ b/crates/hir-ty/src/tests/never_type.rs
@@ -761,6 +761,48 @@ fn coerce_ref_binding() -> ! {
}
#[test]
+fn diverging_place_match_ref_mut() {
+ check_infer_with_mismatches(
+ r#"
+//- minicore: sized
+fn coerce_ref_mut_binding() -> ! {
+ unsafe {
+ let x: *mut ! = 0 as _;
+ let ref mut _x: () = *x;
+ }
+}
+"#,
+ expect![[r#"
+ 33..120 '{ ... } }': !
+ 39..118 'unsafe... }': !
+ 60..61 'x': *mut !
+ 72..73 '0': i32
+ 72..78 '0 as _': *mut !
+ 92..102 'ref mut _x': &'? mut ()
+ 109..111 '*x': !
+ 110..111 'x': *mut !
+ 109..111: expected (), got !
+ "#]],
+ )
+}
+
+#[test]
+fn assign_never_place_no_mismatch() {
+ check_no_mismatches(
+ r#"
+//- minicore: sized
+fn foo() {
+ unsafe {
+ let p: *mut ! = 0 as _;
+ let mut x: () = ();
+ x = *p;
+ }
+}
+"#,
+ );
+}
+
+#[test]
fn never_place_isnt_diverging() {
check_infer_with_mismatches(
r#"
diff --git a/crates/hir-ty/src/tests/regression.rs b/crates/hir-ty/src/tests/regression.rs
index 3b5b4e4fa5..5291bf80e8 100644
--- a/crates/hir-ty/src/tests/regression.rs
+++ b/crates/hir-ty/src/tests/regression.rs
@@ -2363,7 +2363,6 @@ fn test() {
}
"#,
expect![[r#"
- 46..49 'Foo': Foo<N>
93..97 'self': Foo<N>
108..125 '{ ... }': usize
118..119 'N': usize
@@ -2688,3 +2687,86 @@ pub trait FilterT<F: FilterT<F, V = Self::V> = Self> {
"#,
);
}
+
+#[test]
+fn regression_21605() {
+ check_infer(
+ r#"
+//- minicore: fn, coerce_unsized, dispatch_from_dyn, iterator, iterators
+pub struct Filter<'a, 'b, T>
+where
+ T: 'b,
+ 'a: 'b,
+{
+ filter_fn: dyn Fn(&'a T) -> bool,
+ t: Option<T>,
+ b: &'b (),
+}
+
+impl<'a, 'b, T> Filter<'a, 'b, T>
+where
+ T: 'b,
+ 'a: 'b,
+{
+ pub fn new(filter_fn: dyn Fn(&T) -> bool) -> Self {
+ Self {
+ filter_fn: filter_fn,
+ t: None,
+ b: &(),
+ }
+ }
+}
+
+pub trait FilterExt<T> {
+ type Output;
+ fn filter(&self, filter: &Filter<T>) -> Self::Output;
+}
+
+impl<const N: usize, T> FilterExt<T> for [T; N]
+where
+ T: IntoIterator,
+{
+ type Output = T;
+ fn filter(&self, filter: &Filter<T>) -> Self::Output {
+ let _ = self.into_iter().filter(filter.filter_fn);
+ loop {}
+ }
+}
+"#,
+ expect![[r#"
+ 214..223 'filter_fn': dyn Fn(&'? T) -> bool + 'static
+ 253..360 '{ ... }': Filter<'a, 'b, T>
+ 263..354 'Self {... }': Filter<'a, 'b, T>
+ 293..302 'filter_fn': dyn Fn(&'? T) -> bool + 'static
+ 319..323 'None': Option<T>
+ 340..343 '&()': &'? ()
+ 341..343 '()': ()
+ 421..425 'self': &'? Self
+ 427..433 'filter': &'? Filter<'?, '?, T>
+ 580..584 'self': &'? [T; N]
+ 586..592 'filter': &'? Filter<'?, '?, T>
+ 622..704 '{ ... }': T
+ 636..637 '_': Filter<Iter<'?, T>, dyn Fn(&'? T) -> bool + '?>
+ 640..644 'self': &'? [T; N]
+ 640..656 'self.i...iter()': Iter<'?, T>
+ 640..681 'self.i...er_fn)': Filter<Iter<'?, T>, dyn Fn(&'? T) -> bool + '?>
+ 664..670 'filter': &'? Filter<'?, '?, T>
+ 664..680 'filter...ter_fn': dyn Fn(&'? T) -> bool + 'static
+ 691..698 'loop {}': !
+ 696..698 '{}': ()
+ "#]],
+ );
+}
+
+#[test]
+fn extern_fns_cannot_have_param_patterns() {
+ check_no_mismatches(
+ r#"
+pub(crate) struct Builder<'a>(&'a ());
+
+unsafe extern "C" {
+ pub(crate) fn foo<'a>(Builder: &Builder<'a>);
+}
+ "#,
+ );
+}
diff --git a/crates/hir-ty/src/tests/simple.rs b/crates/hir-ty/src/tests/simple.rs
index 7d4f04268a..dab8bdb54b 100644
--- a/crates/hir-ty/src/tests/simple.rs
+++ b/crates/hir-ty/src/tests/simple.rs
@@ -4074,3 +4074,38 @@ static S: &[u8; 158] = include_bytes!("/foo/bar/baz.txt");
"#,
);
}
+
+#[test]
+fn proc_macros_are_functions_inside_defining_crate_and_macros_outside() {
+ check_types(
+ r#"
+//- /pm.rs crate:pm
+#![crate_type = "proc-macro"]
+
+#[proc_macro_attribute]
+pub fn proc_macro() {}
+
+fn foo() {
+ proc_macro;
+ // ^^^^^^^^^^ fn proc_macro()
+}
+
+mod bar {
+ use super::proc_macro;
+
+ fn baz() {
+ super::proc_macro;
+ // ^^^^^^^^^^^^^^^^^ fn proc_macro()
+ proc_macro;
+ // ^^^^^^^^^^ fn proc_macro()
+ }
+}
+
+//- /lib.rs crate:lib deps:pm
+fn foo() {
+ pm::proc_macro;
+ // ^^^^^^^^^^^^^^ {unknown}
+}
+ "#,
+ );
+}
diff --git a/crates/hir/src/display.rs b/crates/hir/src/display.rs
index b4440dfa18..91fdcb8e63 100644
--- a/crates/hir/src/display.rs
+++ b/crates/hir/src/display.rs
@@ -172,8 +172,13 @@ fn write_function<'db>(f: &mut HirFormatter<'_, 'db>, func_id: FunctionId) -> Re
write_generic_params(GenericDefId::FunctionId(func_id), f)?;
+ let too_long_param = data.params.len() > 4;
f.write_char('(')?;
+ if too_long_param {
+ f.write_str("\n ")?;
+ }
+
let mut first = true;
let mut skip_self = 0;
if let Some(self_param) = func.self_param(db) {
@@ -182,11 +187,12 @@ fn write_function<'db>(f: &mut HirFormatter<'_, 'db>, func_id: FunctionId) -> Re
skip_self = 1;
}
+ let comma = if too_long_param { ",\n " } else { ", " };
// FIXME: Use resolved `param.ty` once we no longer discard lifetimes
let body = db.body(func_id.into());
for (type_ref, param) in data.params.iter().zip(func.assoc_fn_params(db)).skip(skip_self) {
if !first {
- f.write_str(", ")?;
+ f.write_str(comma)?;
} else {
first = false;
}
@@ -201,11 +207,14 @@ fn write_function<'db>(f: &mut HirFormatter<'_, 'db>, func_id: FunctionId) -> Re
if data.is_varargs() {
if !first {
- f.write_str(", ")?;
+ f.write_str(comma)?;
}
f.write_str("...")?;
}
+ if too_long_param {
+ f.write_char('\n')?;
+ }
f.write_char(')')?;
// `FunctionData::ret_type` will be `::core::future::Future<Output = ...>` for async fns.
diff --git a/crates/hir/src/lib.rs b/crates/hir/src/lib.rs
index 4b61566516..5820a6714b 100644
--- a/crates/hir/src/lib.rs
+++ b/crates/hir/src/lib.rs
@@ -2244,6 +2244,39 @@ impl DefWithBody {
acc.push(diag.into())
}
}
+
+ /// Returns an iterator over the inferred types of all expressions in this body.
+ pub fn expression_types<'db>(
+ self,
+ db: &'db dyn HirDatabase,
+ ) -> impl Iterator<Item = Type<'db>> {
+ self.id().into_iter().flat_map(move |def_id| {
+ let infer = InferenceResult::for_body(db, def_id);
+ let resolver = def_id.resolver(db);
+
+ infer.expression_types().map(move |(_, ty)| Type::new_with_resolver(db, &resolver, ty))
+ })
+ }
+
+ /// Returns an iterator over the inferred types of all patterns in this body.
+ pub fn pattern_types<'db>(self, db: &'db dyn HirDatabase) -> impl Iterator<Item = Type<'db>> {
+ self.id().into_iter().flat_map(move |def_id| {
+ let infer = InferenceResult::for_body(db, def_id);
+ let resolver = def_id.resolver(db);
+
+ infer.pattern_types().map(move |(_, ty)| Type::new_with_resolver(db, &resolver, ty))
+ })
+ }
+
+ /// Returns an iterator over the inferred types of all bindings in this body.
+ pub fn binding_types<'db>(self, db: &'db dyn HirDatabase) -> impl Iterator<Item = Type<'db>> {
+ self.id().into_iter().flat_map(move |def_id| {
+ let infer = InferenceResult::for_body(db, def_id);
+ let resolver = def_id.resolver(db);
+
+ infer.binding_types().map(move |(_, ty)| Type::new_with_resolver(db, &resolver, ty))
+ })
+ }
}
fn expr_store_diagnostics<'db>(
@@ -6068,11 +6101,7 @@ impl<'db> Type<'db> {
match name {
Some(name) => {
- match ctx.probe_for_name(
- method_resolution::Mode::MethodCall,
- name.clone(),
- self_ty,
- ) {
+ match ctx.probe_for_name(method_resolution::Mode::Path, name.clone(), self_ty) {
Ok(candidate)
| Err(method_resolution::MethodError::PrivateMatch(candidate)) => {
let id = candidate.item.into();
diff --git a/crates/ide-assists/src/handlers/auto_import.rs b/crates/ide-assists/src/handlers/auto_import.rs
index 2694910aa6..da5c123957 100644
--- a/crates/ide-assists/src/handlers/auto_import.rs
+++ b/crates/ide-assists/src/handlers/auto_import.rs
@@ -352,7 +352,7 @@ mod tests {
let config = TEST_CONFIG;
let ctx = AssistContext::new(sema, &config, frange);
let mut acc = Assists::new(&ctx, AssistResolveStrategy::All);
- auto_import(&mut acc, &ctx);
+ hir::attach_db(&db, || auto_import(&mut acc, &ctx));
let assists = acc.finish();
let labels = assists.iter().map(|assist| assist.label.to_string()).collect::<Vec<_>>();
@@ -1897,4 +1897,35 @@ fn foo(_: S) {}
"#,
);
}
+
+ #[test]
+ fn with_after_segments() {
+ let before = r#"
+mod foo {
+ pub mod wanted {
+ pub fn abc() {}
+ }
+}
+
+mod bar {
+ pub mod wanted {}
+}
+
+mod baz {
+ pub fn wanted() {}
+}
+
+mod quux {
+ pub struct wanted;
+}
+impl quux::wanted {
+ fn abc() {}
+}
+
+fn f() {
+ wanted$0::abc;
+}
+ "#;
+ check_auto_import_order(before, &["Import `foo::wanted`", "Import `quux::wanted`"]);
+ }
}
diff --git a/crates/ide-assists/src/handlers/bind_unused_param.rs b/crates/ide-assists/src/handlers/bind_unused_param.rs
index 771e80bb92..0e85a77822 100644
--- a/crates/ide-assists/src/handlers/bind_unused_param.rs
+++ b/crates/ide-assists/src/handlers/bind_unused_param.rs
@@ -2,7 +2,7 @@ use crate::assist_context::{AssistContext, Assists};
use ide_db::{LineIndexDatabase, assists::AssistId, defs::Definition};
use syntax::{
AstNode,
- ast::{self, HasName, edit_in_place::Indent},
+ ast::{self, HasName, edit::AstNodeEdit},
};
// Assist: bind_unused_param
diff --git a/crates/ide-assists/src/handlers/convert_bool_to_enum.rs b/crates/ide-assists/src/handlers/convert_bool_to_enum.rs
index 1ae5f64917..434fbbae05 100644
--- a/crates/ide-assists/src/handlers/convert_bool_to_enum.rs
+++ b/crates/ide-assists/src/handlers/convert_bool_to_enum.rs
@@ -11,9 +11,10 @@ use ide_db::{
source_change::SourceChangeBuilder,
};
use itertools::Itertools;
+use syntax::ast::edit::AstNodeEdit;
use syntax::{
AstNode, NodeOrToken, SyntaxKind, SyntaxNode, T,
- ast::{self, HasName, edit::IndentLevel, edit_in_place::Indent, make},
+ ast::{self, HasName, edit::IndentLevel, make},
};
use crate::{
@@ -479,10 +480,9 @@ fn add_enum_def(
ctx.sema.scope(name.syntax()).map(|scope| scope.module())
})
.any(|module| module.nearest_non_block_module(ctx.db()) != *target_module);
- let enum_def = make_bool_enum(make_enum_pub);
let indent = IndentLevel::from_node(&insert_before);
- enum_def.reindent_to(indent);
+ let enum_def = make_bool_enum(make_enum_pub).reset_indent().indent(indent);
edit.insert(
insert_before.text_range().start(),
diff --git a/crates/ide-assists/src/handlers/convert_let_else_to_match.rs b/crates/ide-assists/src/handlers/convert_let_else_to_match.rs
index ebfed9f9ca..d2336a4a5d 100644
--- a/crates/ide-assists/src/handlers/convert_let_else_to_match.rs
+++ b/crates/ide-assists/src/handlers/convert_let_else_to_match.rs
@@ -1,7 +1,6 @@
use syntax::T;
use syntax::ast::RangeItem;
-use syntax::ast::edit::IndentLevel;
-use syntax::ast::edit_in_place::Indent;
+use syntax::ast::edit::AstNodeEdit;
use syntax::ast::syntax_factory::SyntaxFactory;
use syntax::ast::{self, AstNode, HasName, LetStmt, Pat};
@@ -93,7 +92,8 @@ pub(crate) fn convert_let_else_to_match(acc: &mut Assists, ctx: &AssistContext<'
);
let else_arm = make.match_arm(make.wildcard_pat().into(), None, else_expr);
let match_ = make.expr_match(init, make.match_arm_list([binding_arm, else_arm]));
- match_.reindent_to(IndentLevel::from_node(let_stmt.syntax()));
+ let match_ = match_.reset_indent();
+ let match_ = match_.indent(let_stmt.indent_level());
if bindings.is_empty() {
editor.replace(let_stmt.syntax(), match_.syntax());
diff --git a/crates/ide-assists/src/handlers/convert_tuple_return_type_to_struct.rs b/crates/ide-assists/src/handlers/convert_tuple_return_type_to_struct.rs
index 0e5e6185d0..1740cd024a 100644
--- a/crates/ide-assists/src/handlers/convert_tuple_return_type_to_struct.rs
+++ b/crates/ide-assists/src/handlers/convert_tuple_return_type_to_struct.rs
@@ -5,15 +5,20 @@ use ide_db::{
assists::AssistId,
defs::Definition,
helpers::mod_path_to_ast,
- imports::insert_use::{ImportScope, insert_use},
+ imports::insert_use::{ImportScope, insert_use_with_editor},
search::{FileReference, UsageSearchResult},
source_change::SourceChangeBuilder,
syntax_helpers::node_ext::{for_each_tail_expr, walk_expr},
};
use syntax::{
AstNode, SyntaxNode,
- ast::{self, HasName, edit::IndentLevel, edit_in_place::Indent, make},
- match_ast, ted,
+ ast::{
+ self, HasName,
+ edit::{AstNodeEdit, IndentLevel},
+ syntax_factory::SyntaxFactory,
+ },
+ match_ast,
+ syntax_editor::SyntaxEditor,
};
use crate::assist_context::{AssistContext, Assists};
@@ -67,14 +72,15 @@ pub(crate) fn convert_tuple_return_type_to_struct(
"Convert tuple return type to tuple struct",
target,
move |edit| {
- let ret_type = edit.make_mut(ret_type);
- let fn_ = edit.make_mut(fn_);
+ let mut syntax_editor = edit.make_editor(ret_type.syntax());
+ let syntax_factory = SyntaxFactory::with_mappings();
let usages = Definition::Function(fn_def).usages(&ctx.sema).all();
let struct_name = format!("{}Result", stdx::to_camel_case(&fn_name.to_string()));
let parent = fn_.syntax().ancestors().find_map(<Either<ast::Impl, ast::Trait>>::cast);
add_tuple_struct_def(
edit,
+ &syntax_factory,
ctx,
&usages,
parent.as_ref().map(|it| it.syntax()).unwrap_or(fn_.syntax()),
@@ -83,15 +89,23 @@ pub(crate) fn convert_tuple_return_type_to_struct(
&target_module,
);
- ted::replace(
+ syntax_editor.replace(
ret_type.syntax(),
- make::ret_type(make::ty(&struct_name)).syntax().clone_for_update(),
+ syntax_factory.ret_type(syntax_factory.ty(&struct_name)).syntax(),
);
if let Some(fn_body) = fn_.body() {
- replace_body_return_values(ast::Expr::BlockExpr(fn_body), &struct_name);
+ replace_body_return_values(
+ &mut syntax_editor,
+ &syntax_factory,
+ ast::Expr::BlockExpr(fn_body),
+ &struct_name,
+ );
}
+ syntax_editor.add_mappings(syntax_factory.finish_with_mappings());
+ edit.add_file_edits(ctx.vfs_file_id(), syntax_editor);
+
replace_usages(edit, ctx, &usages, &struct_name, &target_module);
},
)
@@ -106,24 +120,37 @@ fn replace_usages(
target_module: &hir::Module,
) {
for (file_id, references) in usages.iter() {
- edit.edit_file(file_id.file_id(ctx.db()));
+ let Some(first_ref) = references.first() else { continue };
+
+ let mut editor = edit.make_editor(first_ref.name.syntax().as_node().unwrap());
+ let syntax_factory = SyntaxFactory::with_mappings();
- let refs_with_imports =
- augment_references_with_imports(edit, ctx, references, struct_name, target_module);
+ let refs_with_imports = augment_references_with_imports(
+ &syntax_factory,
+ ctx,
+ references,
+ struct_name,
+ target_module,
+ );
refs_with_imports.into_iter().rev().for_each(|(name, import_data)| {
if let Some(fn_) = name.syntax().parent().and_then(ast::Fn::cast) {
cov_mark::hit!(replace_trait_impl_fns);
if let Some(ret_type) = fn_.ret_type() {
- ted::replace(
+ editor.replace(
ret_type.syntax(),
- make::ret_type(make::ty(struct_name)).syntax().clone_for_update(),
+ syntax_factory.ret_type(syntax_factory.ty(struct_name)).syntax(),
);
}
if let Some(fn_body) = fn_.body() {
- replace_body_return_values(ast::Expr::BlockExpr(fn_body), struct_name);
+ replace_body_return_values(
+ &mut editor,
+ &syntax_factory,
+ ast::Expr::BlockExpr(fn_body),
+ struct_name,
+ );
}
} else {
// replace tuple patterns
@@ -143,22 +170,30 @@ fn replace_usages(
_ => None,
});
for tuple_pat in tuple_pats {
- ted::replace(
+ editor.replace(
tuple_pat.syntax(),
- make::tuple_struct_pat(
- make::path_from_text(struct_name),
- tuple_pat.fields(),
- )
- .clone_for_update()
- .syntax(),
+ syntax_factory
+ .tuple_struct_pat(
+ syntax_factory.path_from_text(struct_name),
+ tuple_pat.fields(),
+ )
+ .syntax(),
);
}
}
- // add imports across modules where needed
if let Some((import_scope, path)) = import_data {
- insert_use(&import_scope, path, &ctx.config.insert_use);
+ insert_use_with_editor(
+ &import_scope,
+ path,
+ &ctx.config.insert_use,
+ &mut editor,
+ &syntax_factory,
+ );
}
- })
+ });
+
+ editor.add_mappings(syntax_factory.finish_with_mappings());
+ edit.add_file_edits(file_id.file_id(ctx.db()), editor);
}
}
@@ -176,7 +211,7 @@ fn node_to_pats(node: SyntaxNode) -> Option<Vec<ast::Pat>> {
}
fn augment_references_with_imports(
- edit: &mut SourceChangeBuilder,
+ syntax_factory: &SyntaxFactory,
ctx: &AssistContext<'_>,
references: &[FileReference],
struct_name: &str,
@@ -191,8 +226,6 @@ fn augment_references_with_imports(
ctx.sema.scope(name.syntax()).map(|scope| (name, scope.module()))
})
.map(|(name, ref_module)| {
- let new_name = edit.make_mut(name);
-
// if the referenced module is not the same as the target one and has not been seen before, add an import
let import_data = if ref_module.nearest_non_block_module(ctx.db()) != *target_module
&& !visited_modules.contains(&ref_module)
@@ -201,8 +234,7 @@ fn augment_references_with_imports(
let cfg =
ctx.config.find_path_config(ctx.sema.is_nightly(ref_module.krate(ctx.sema.db)));
- let import_scope =
- ImportScope::find_insert_use_container(new_name.syntax(), &ctx.sema);
+ let import_scope = ImportScope::find_insert_use_container(name.syntax(), &ctx.sema);
let path = ref_module
.find_use_path(
ctx.sema.db,
@@ -211,12 +243,12 @@ fn augment_references_with_imports(
cfg,
)
.map(|mod_path| {
- make::path_concat(
+ syntax_factory.path_concat(
mod_path_to_ast(
&mod_path,
target_module.krate(ctx.db()).edition(ctx.db()),
),
- make::path_from_text(struct_name),
+ syntax_factory.path_from_text(struct_name),
)
});
@@ -225,7 +257,7 @@ fn augment_references_with_imports(
None
};
- (new_name, import_data)
+ (name, import_data)
})
.collect()
}
@@ -233,6 +265,7 @@ fn augment_references_with_imports(
// Adds the definition of the tuple struct before the parent function.
fn add_tuple_struct_def(
edit: &mut SourceChangeBuilder,
+ syntax_factory: &SyntaxFactory,
ctx: &AssistContext<'_>,
usages: &UsageSearchResult,
parent: &SyntaxNode,
@@ -248,22 +281,27 @@ fn add_tuple_struct_def(
ctx.sema.scope(name.syntax()).map(|scope| scope.module())
})
.any(|module| module.nearest_non_block_module(ctx.db()) != *target_module);
- let visibility = if make_struct_pub { Some(make::visibility_pub()) } else { None };
+ let visibility = if make_struct_pub { Some(syntax_factory.visibility_pub()) } else { None };
- let field_list = ast::FieldList::TupleFieldList(make::tuple_field_list(
- tuple_ty.fields().map(|ty| make::tuple_field(visibility.clone(), ty)),
+ let field_list = ast::FieldList::TupleFieldList(syntax_factory.tuple_field_list(
+ tuple_ty.fields().map(|ty| syntax_factory.tuple_field(visibility.clone(), ty)),
));
- let struct_name = make::name(struct_name);
- let struct_def = make::struct_(visibility, struct_name, None, field_list).clone_for_update();
+ let struct_name = syntax_factory.name(struct_name);
+ let struct_def = syntax_factory.struct_(visibility, struct_name, None, field_list);
let indent = IndentLevel::from_node(parent);
- struct_def.reindent_to(indent);
+ let struct_def = struct_def.indent(indent);
edit.insert(parent.text_range().start(), format!("{struct_def}\n\n{indent}"));
}
/// Replaces each returned tuple in `body` with the constructor of the tuple struct named `struct_name`.
-fn replace_body_return_values(body: ast::Expr, struct_name: &str) {
+fn replace_body_return_values(
+ syntax_editor: &mut SyntaxEditor,
+ syntax_factory: &SyntaxFactory,
+ body: ast::Expr,
+ struct_name: &str,
+) {
let mut exprs_to_wrap = Vec::new();
let tail_cb = &mut |e: &_| tail_cb_impl(&mut exprs_to_wrap, e);
@@ -278,12 +316,11 @@ fn replace_body_return_values(body: ast::Expr, struct_name: &str) {
for ret_expr in exprs_to_wrap {
if let ast::Expr::TupleExpr(tuple_expr) = &ret_expr {
- let struct_constructor = make::expr_call(
- make::expr_path(make::ext::ident_path(struct_name)),
- make::arg_list(tuple_expr.fields()),
- )
- .clone_for_update();
- ted::replace(ret_expr.syntax(), struct_constructor.syntax());
+ let struct_constructor = syntax_factory.expr_call(
+ syntax_factory.expr_path(syntax_factory.ident_path(struct_name)),
+ syntax_factory.arg_list(tuple_expr.fields()),
+ );
+ syntax_editor.replace(ret_expr.syntax(), struct_constructor.syntax());
}
}
}
diff --git a/crates/ide-assists/src/handlers/destructure_tuple_binding.rs b/crates/ide-assists/src/handlers/destructure_tuple_binding.rs
index e2afc0bf13..b8dc59f87d 100644
--- a/crates/ide-assists/src/handlers/destructure_tuple_binding.rs
+++ b/crates/ide-assists/src/handlers/destructure_tuple_binding.rs
@@ -8,8 +8,8 @@ use ide_db::{
use itertools::Itertools;
use syntax::{
T,
- ast::{self, AstNode, FieldExpr, HasName, IdentPat, make},
- ted,
+ ast::{self, AstNode, FieldExpr, HasName, IdentPat, syntax_factory::SyntaxFactory},
+ syntax_editor::{Position, SyntaxEditor},
};
use crate::{
@@ -89,13 +89,20 @@ fn destructure_tuple_edit_impl(
data: &TupleData,
in_sub_pattern: bool,
) {
- let assignment_edit = edit_tuple_assignment(ctx, edit, data, in_sub_pattern);
- let current_file_usages_edit = edit_tuple_usages(data, edit, ctx, in_sub_pattern);
+ let mut syntax_editor = edit.make_editor(data.ident_pat.syntax());
+ let syntax_factory = SyntaxFactory::with_mappings();
- assignment_edit.apply();
+ let assignment_edit =
+ edit_tuple_assignment(ctx, edit, &mut syntax_editor, &syntax_factory, data, in_sub_pattern);
+ let current_file_usages_edit = edit_tuple_usages(data, ctx, &syntax_factory, in_sub_pattern);
+
+ assignment_edit.apply(&mut syntax_editor, &syntax_factory);
if let Some(usages_edit) = current_file_usages_edit {
- usages_edit.into_iter().for_each(|usage_edit| usage_edit.apply(edit))
+ usages_edit.into_iter().for_each(|usage_edit| usage_edit.apply(edit, &mut syntax_editor))
}
+
+ syntax_editor.add_mappings(syntax_factory.finish_with_mappings());
+ edit.add_file_edits(ctx.vfs_file_id(), syntax_editor);
}
fn collect_data(ident_pat: IdentPat, ctx: &AssistContext<'_>) -> Option<TupleData> {
@@ -165,11 +172,11 @@ struct TupleData {
fn edit_tuple_assignment(
ctx: &AssistContext<'_>,
edit: &mut SourceChangeBuilder,
+ editor: &mut SyntaxEditor,
+ make: &SyntaxFactory,
data: &TupleData,
in_sub_pattern: bool,
) -> AssignmentEdit {
- let ident_pat = edit.make_mut(data.ident_pat.clone());
-
let tuple_pat = {
let original = &data.ident_pat;
let is_ref = original.ref_token().is_some();
@@ -177,10 +184,11 @@ fn edit_tuple_assignment(
let fields = data
.field_names
.iter()
- .map(|name| ast::Pat::from(make::ident_pat(is_ref, is_mut, make::name(name))));
- make::tuple_pat(fields).clone_for_update()
+ .map(|name| ast::Pat::from(make.ident_pat(is_ref, is_mut, make.name(name))));
+ make.tuple_pat(fields)
};
- let is_shorthand_field = ident_pat
+ let is_shorthand_field = data
+ .ident_pat
.name()
.as_ref()
.and_then(ast::RecordPatField::for_field_name)
@@ -189,14 +197,20 @@ fn edit_tuple_assignment(
if let Some(cap) = ctx.config.snippet_cap {
// place cursor on first tuple name
if let Some(ast::Pat::IdentPat(first_pat)) = tuple_pat.fields().next() {
- edit.add_tabstop_before(
- cap,
- first_pat.name().expect("first ident pattern should have a name"),
- )
+ let annotation = edit.make_tabstop_before(cap);
+ editor.add_annotation(
+ first_pat.name().expect("first ident pattern should have a name").syntax(),
+ annotation,
+ );
}
}
- AssignmentEdit { ident_pat, tuple_pat, in_sub_pattern, is_shorthand_field }
+ AssignmentEdit {
+ ident_pat: data.ident_pat.clone(),
+ tuple_pat,
+ in_sub_pattern,
+ is_shorthand_field,
+ }
}
struct AssignmentEdit {
ident_pat: ast::IdentPat,
@@ -206,23 +220,30 @@ struct AssignmentEdit {
}
impl AssignmentEdit {
- fn apply(self) {
+ fn apply(self, syntax_editor: &mut SyntaxEditor, syntax_mapping: &SyntaxFactory) {
// with sub_pattern: keep original tuple and add subpattern: `tup @ (_0, _1)`
if self.in_sub_pattern {
- self.ident_pat.set_pat(Some(self.tuple_pat.into()))
+ self.ident_pat.set_pat_with_editor(
+ Some(self.tuple_pat.into()),
+ syntax_editor,
+ syntax_mapping,
+ )
} else if self.is_shorthand_field {
- ted::insert(ted::Position::after(self.ident_pat.syntax()), self.tuple_pat.syntax());
- ted::insert_raw(ted::Position::after(self.ident_pat.syntax()), make::token(T![:]));
+ syntax_editor.insert(Position::after(self.ident_pat.syntax()), self.tuple_pat.syntax());
+ syntax_editor
+ .insert(Position::after(self.ident_pat.syntax()), syntax_mapping.whitespace(" "));
+ syntax_editor
+ .insert(Position::after(self.ident_pat.syntax()), syntax_mapping.token(T![:]));
} else {
- ted::replace(self.ident_pat.syntax(), self.tuple_pat.syntax())
+ syntax_editor.replace(self.ident_pat.syntax(), self.tuple_pat.syntax())
}
}
}
fn edit_tuple_usages(
data: &TupleData,
- edit: &mut SourceChangeBuilder,
ctx: &AssistContext<'_>,
+ make: &SyntaxFactory,
in_sub_pattern: bool,
) -> Option<Vec<EditTupleUsage>> {
// We need to collect edits first before actually applying them
@@ -238,20 +259,20 @@ fn edit_tuple_usages(
.as_ref()?
.as_slice()
.iter()
- .filter_map(|r| edit_tuple_usage(ctx, edit, r, data, in_sub_pattern))
+ .filter_map(|r| edit_tuple_usage(ctx, make, r, data, in_sub_pattern))
.collect_vec();
Some(edits)
}
fn edit_tuple_usage(
ctx: &AssistContext<'_>,
- builder: &mut SourceChangeBuilder,
+ make: &SyntaxFactory,
usage: &FileReference,
data: &TupleData,
in_sub_pattern: bool,
) -> Option<EditTupleUsage> {
match detect_tuple_index(usage, data) {
- Some(index) => Some(edit_tuple_field_usage(ctx, builder, data, index)),
+ Some(index) => Some(edit_tuple_field_usage(ctx, make, data, index)),
None if in_sub_pattern => {
cov_mark::hit!(destructure_tuple_call_with_subpattern);
None
@@ -262,20 +283,18 @@ fn edit_tuple_usage(
fn edit_tuple_field_usage(
ctx: &AssistContext<'_>,
- builder: &mut SourceChangeBuilder,
+ make: &SyntaxFactory,
data: &TupleData,
index: TupleIndex,
) -> EditTupleUsage {
let field_name = &data.field_names[index.index];
- let field_name = make::expr_path(make::ext::ident_path(field_name));
+ let field_name = make.expr_path(make.ident_path(field_name));
if data.ref_type.is_some() {
let (replace_expr, ref_data) = determine_ref_and_parens(ctx, &index.field_expr);
- let replace_expr = builder.make_mut(replace_expr);
- EditTupleUsage::ReplaceExpr(replace_expr, ref_data.wrap_expr(field_name))
+ EditTupleUsage::ReplaceExpr(replace_expr, ref_data.wrap_expr_with_factory(field_name, make))
} else {
- let field_expr = builder.make_mut(index.field_expr);
- EditTupleUsage::ReplaceExpr(field_expr.into(), field_name)
+ EditTupleUsage::ReplaceExpr(index.field_expr.into(), field_name)
}
}
enum EditTupleUsage {
@@ -291,14 +310,14 @@ enum EditTupleUsage {
}
impl EditTupleUsage {
- fn apply(self, edit: &mut SourceChangeBuilder) {
+ fn apply(self, edit: &mut SourceChangeBuilder, syntax_editor: &mut SyntaxEditor) {
match self {
EditTupleUsage::NoIndex(range) => {
edit.insert(range.start(), "/*");
edit.insert(range.end(), "*/");
}
EditTupleUsage::ReplaceExpr(target_expr, replace_with) => {
- ted::replace(target_expr.syntax(), replace_with.clone_for_update().syntax())
+ syntax_editor.replace(target_expr.syntax(), replace_with.syntax())
}
}
}
diff --git a/crates/ide-assists/src/handlers/generate_getter_or_setter.rs b/crates/ide-assists/src/handlers/generate_getter_or_setter.rs
index 73e93a3fbf..51b967437b 100644
--- a/crates/ide-assists/src/handlers/generate_getter_or_setter.rs
+++ b/crates/ide-assists/src/handlers/generate_getter_or_setter.rs
@@ -2,13 +2,16 @@ use ide_db::{famous_defs::FamousDefs, source_change::SourceChangeBuilder};
use stdx::{format_to, to_lower_snake_case};
use syntax::{
TextRange,
- ast::{self, AstNode, HasName, HasVisibility, edit_in_place::Indent, make},
- ted,
+ ast::{
+ self, AstNode, HasGenericParams, HasName, HasVisibility, edit::AstNodeEdit,
+ syntax_factory::SyntaxFactory,
+ },
+ syntax_editor::Position,
};
use crate::{
AssistContext, AssistId, Assists, GroupLabel,
- utils::{convert_reference_type, find_struct_impl, generate_impl},
+ utils::{convert_reference_type, find_struct_impl},
};
// Assist: generate_setter
@@ -215,12 +218,14 @@ fn generate_getter_from_info(
ctx: &AssistContext<'_>,
info: &AssistInfo,
record_field_info: &RecordFieldInfo,
+ syntax_factory: &SyntaxFactory,
) -> ast::Fn {
let (ty, body) = if matches!(info.assist_type, AssistType::MutGet) {
+ let self_expr = syntax_factory.expr_path(syntax_factory.ident_path("self"));
(
- make::ty_ref(record_field_info.field_ty.clone(), true),
- make::expr_ref(
- make::expr_field(make::ext::expr_self(), &record_field_info.field_name.text()),
+ syntax_factory.ty_ref(record_field_info.field_ty.clone(), true),
+ syntax_factory.expr_ref(
+ syntax_factory.expr_field(self_expr, &record_field_info.field_name.text()).into(),
true,
),
)
@@ -241,9 +246,14 @@ fn generate_getter_from_info(
})()
.unwrap_or_else(|| {
(
- make::ty_ref(record_field_info.field_ty.clone(), false),
- make::expr_ref(
- make::expr_field(make::ext::expr_self(), &record_field_info.field_name.text()),
+ syntax_factory.ty_ref(record_field_info.field_ty.clone(), false),
+ syntax_factory.expr_ref(
+ syntax_factory
+ .expr_field(
+ syntax_factory.expr_path(syntax_factory.ident_path("self")),
+ &record_field_info.field_name.text(),
+ )
+ .into(),
false,
),
)
@@ -251,18 +261,18 @@ fn generate_getter_from_info(
};
let self_param = if matches!(info.assist_type, AssistType::MutGet) {
- make::mut_self_param()
+ syntax_factory.mut_self_param()
} else {
- make::self_param()
+ syntax_factory.self_param()
};
let strukt = &info.strukt;
- let fn_name = make::name(&record_field_info.fn_name);
- let params = make::param_list(Some(self_param), []);
- let ret_type = Some(make::ret_type(ty));
- let body = make::block_expr([], Some(body));
+ let fn_name = syntax_factory.name(&record_field_info.fn_name);
+ let params = syntax_factory.param_list(Some(self_param), []);
+ let ret_type = Some(syntax_factory.ret_type(ty));
+ let body = syntax_factory.block_expr([], Some(body));
- make::fn_(
+ syntax_factory.fn_(
None,
strukt.visibility(),
fn_name,
@@ -278,28 +288,35 @@ fn generate_getter_from_info(
)
}
-fn generate_setter_from_info(info: &AssistInfo, record_field_info: &RecordFieldInfo) -> ast::Fn {
+fn generate_setter_from_info(
+ info: &AssistInfo,
+ record_field_info: &RecordFieldInfo,
+ syntax_factory: &SyntaxFactory,
+) -> ast::Fn {
let strukt = &info.strukt;
let field_name = &record_field_info.fn_name;
- let fn_name = make::name(&format!("set_{field_name}"));
+ let fn_name = syntax_factory.name(&format!("set_{field_name}"));
let field_ty = &record_field_info.field_ty;
// Make the param list
// `(&mut self, $field_name: $field_ty)`
- let field_param =
- make::param(make::ident_pat(false, false, make::name(field_name)).into(), field_ty.clone());
- let params = make::param_list(Some(make::mut_self_param()), [field_param]);
+ let field_param = syntax_factory.param(
+ syntax_factory.ident_pat(false, false, syntax_factory.name(field_name)).into(),
+ field_ty.clone(),
+ );
+ let params = syntax_factory.param_list(Some(syntax_factory.mut_self_param()), [field_param]);
// Make the assignment body
// `self.$field_name = $field_name`
- let self_expr = make::ext::expr_self();
- let lhs = make::expr_field(self_expr, field_name);
- let rhs = make::expr_path(make::ext::ident_path(field_name));
- let assign_stmt = make::expr_stmt(make::expr_assignment(lhs, rhs).into());
- let body = make::block_expr([assign_stmt.into()], None);
+ let self_expr = syntax_factory.expr_path(syntax_factory.ident_path("self"));
+ let lhs = syntax_factory.expr_field(self_expr, field_name);
+ let rhs = syntax_factory.expr_path(syntax_factory.ident_path(field_name));
+ let assign_stmt =
+ syntax_factory.expr_stmt(syntax_factory.expr_assignment(lhs.into(), rhs).into());
+ let body = syntax_factory.block_expr([assign_stmt.into()], None);
// Make the setter fn
- make::fn_(
+ syntax_factory.fn_(
None,
strukt.visibility(),
fn_name,
@@ -403,47 +420,69 @@ fn build_source_change(
info_of_record_fields: Vec<RecordFieldInfo>,
assist_info: AssistInfo,
) {
- let record_fields_count = info_of_record_fields.len();
-
- let impl_def = if let Some(impl_def) = &assist_info.impl_def {
- // We have an existing impl to add to
- builder.make_mut(impl_def.clone())
- } else {
- // Generate a new impl to add the methods to
- let impl_def = generate_impl(&ast::Adt::Struct(assist_info.strukt.clone()));
-
- // Insert it after the adt
- let strukt = builder.make_mut(assist_info.strukt.clone());
-
- ted::insert_all_raw(
- ted::Position::after(strukt.syntax()),
- vec![make::tokens::blank_line().into(), impl_def.syntax().clone().into()],
- );
-
- impl_def
- };
+ let syntax_factory = SyntaxFactory::without_mappings();
- let assoc_item_list = impl_def.get_or_create_assoc_item_list();
+ let items: Vec<ast::AssocItem> = info_of_record_fields
+ .iter()
+ .map(|record_field_info| {
+ let method = match assist_info.assist_type {
+ AssistType::Set => {
+ generate_setter_from_info(&assist_info, record_field_info, &syntax_factory)
+ }
+ _ => {
+ generate_getter_from_info(ctx, &assist_info, record_field_info, &syntax_factory)
+ }
+ };
+ let new_fn = method.clone_for_update();
+ let new_fn = new_fn.indent(1.into());
+ new_fn.into()
+ })
+ .collect();
- for (i, record_field_info) in info_of_record_fields.iter().enumerate() {
- // Make the new getter or setter fn
- let new_fn = match assist_info.assist_type {
- AssistType::Set => generate_setter_from_info(&assist_info, record_field_info),
- _ => generate_getter_from_info(ctx, &assist_info, record_field_info),
- }
- .clone_for_update();
- new_fn.indent(1.into());
+ if let Some(impl_def) = &assist_info.impl_def {
+ // We have an existing impl to add to
+ let mut editor = builder.make_editor(impl_def.syntax());
+ impl_def.assoc_item_list().unwrap().add_items(&mut editor, items.clone());
- // Insert a tabstop only for last method we generate
- if i == record_fields_count - 1
- && let Some(cap) = ctx.config.snippet_cap
- && let Some(name) = new_fn.name()
+ if let Some(cap) = ctx.config.snippet_cap
+ && let Some(ast::AssocItem::Fn(fn_)) = items.last()
+ && let Some(name) = fn_.name()
{
- builder.add_tabstop_before(cap, name);
+ let tabstop = builder.make_tabstop_before(cap);
+ editor.add_annotation(name.syntax(), tabstop);
}
- assoc_item_list.add_item(new_fn.clone().into());
+ builder.add_file_edits(ctx.vfs_file_id(), editor);
+ return;
}
+ let ty_params = assist_info.strukt.generic_param_list();
+ let ty_args = ty_params.as_ref().map(|it| it.to_generic_args());
+ let impl_def = syntax_factory.impl_(
+ None,
+ ty_params,
+ ty_args,
+ syntax_factory
+ .ty_path(syntax_factory.ident_path(&assist_info.strukt.name().unwrap().to_string()))
+ .into(),
+ None,
+ Some(syntax_factory.assoc_item_list(items)),
+ );
+ let mut editor = builder.make_editor(assist_info.strukt.syntax());
+ editor.insert_all(
+ Position::after(assist_info.strukt.syntax()),
+ vec![syntax_factory.whitespace("\n\n").into(), impl_def.syntax().clone().into()],
+ );
+
+ if let Some(cap) = ctx.config.snippet_cap
+ && let Some(assoc_list) = impl_def.assoc_item_list()
+ && let Some(ast::AssocItem::Fn(fn_)) = assoc_list.assoc_items().last()
+ && let Some(name) = fn_.name()
+ {
+ let tabstop = builder.make_tabstop_before(cap);
+ editor.add_annotation(name.syntax().clone(), tabstop);
+ }
+
+ builder.add_file_edits(ctx.vfs_file_id(), editor);
}
#[cfg(test)]
diff --git a/crates/ide-assists/src/handlers/generate_impl.rs b/crates/ide-assists/src/handlers/generate_impl.rs
index 77eb8efc6f..bbd42481ef 100644
--- a/crates/ide-assists/src/handlers/generate_impl.rs
+++ b/crates/ide-assists/src/handlers/generate_impl.rs
@@ -1,5 +1,5 @@
use syntax::{
- ast::{self, AstNode, HasGenericParams, HasName, edit_in_place::Indent, make},
+ ast::{self, AstNode, HasGenericParams, HasName, edit::AstNodeEdit, make},
syntax_editor::{Position, SyntaxEditor},
};
@@ -8,10 +8,14 @@ use crate::{
utils::{self, DefaultMethods, IgnoreAssocItems},
};
-fn insert_impl(editor: &mut SyntaxEditor, impl_: &ast::Impl, nominal: &impl Indent) {
+fn insert_impl(
+ editor: &mut SyntaxEditor,
+ impl_: &ast::Impl,
+ nominal: &impl AstNodeEdit,
+) -> ast::Impl {
let indent = nominal.indent_level();
- impl_.indent(indent);
+ let impl_ = impl_.indent(indent);
editor.insert_all(
Position::after(nominal.syntax()),
vec![
@@ -20,6 +24,8 @@ fn insert_impl(editor: &mut SyntaxEditor, impl_: &ast::Impl, nominal: &impl Inde
impl_.syntax().clone().into(),
],
);
+
+ impl_
}
// Assist: generate_impl
@@ -57,6 +63,8 @@ pub(crate) fn generate_impl(acc: &mut Assists, ctx: &AssistContext<'_>) -> Optio
let impl_ = utils::generate_impl(&nominal);
let mut editor = edit.make_editor(nominal.syntax());
+
+ let impl_ = insert_impl(&mut editor, &impl_, &nominal);
// Add a tabstop after the left curly brace
if let Some(cap) = ctx.config.snippet_cap
&& let Some(l_curly) = impl_.assoc_item_list().and_then(|it| it.l_curly_token())
@@ -65,7 +73,6 @@ pub(crate) fn generate_impl(acc: &mut Assists, ctx: &AssistContext<'_>) -> Optio
editor.add_annotation(l_curly, tabstop);
}
- insert_impl(&mut editor, &impl_, &nominal);
edit.add_file_edits(ctx.vfs_file_id(), editor);
},
)
@@ -106,6 +113,8 @@ pub(crate) fn generate_trait_impl(acc: &mut Assists, ctx: &AssistContext<'_>) ->
let impl_ = utils::generate_trait_impl_intransitive(&nominal, make::ty_placeholder());
let mut editor = edit.make_editor(nominal.syntax());
+
+ let impl_ = insert_impl(&mut editor, &impl_, &nominal);
// Make the trait type a placeholder snippet
if let Some(cap) = ctx.config.snippet_cap {
if let Some(trait_) = impl_.trait_() {
@@ -119,7 +128,6 @@ pub(crate) fn generate_trait_impl(acc: &mut Assists, ctx: &AssistContext<'_>) ->
}
}
- insert_impl(&mut editor, &impl_, &nominal);
edit.add_file_edits(ctx.vfs_file_id(), editor);
},
)
@@ -206,6 +214,8 @@ pub(crate) fn generate_impl_trait(acc: &mut Assists, ctx: &AssistContext<'_>) ->
make_impl_(Some(assoc_item_list))
};
+ let impl_ = insert_impl(&mut editor, &impl_, &trait_);
+
if let Some(cap) = ctx.config.snippet_cap {
if let Some(generics) = impl_.trait_().and_then(|it| it.generic_arg_list()) {
for generic in generics.generic_args() {
@@ -232,7 +242,6 @@ pub(crate) fn generate_impl_trait(acc: &mut Assists, ctx: &AssistContext<'_>) ->
}
}
- insert_impl(&mut editor, &impl_, &trait_);
edit.add_file_edits(ctx.vfs_file_id(), editor);
},
)
diff --git a/crates/ide-assists/src/handlers/generate_mut_trait_impl.rs b/crates/ide-assists/src/handlers/generate_mut_trait_impl.rs
index 53f6f4883f..3a62a8853e 100644
--- a/crates/ide-assists/src/handlers/generate_mut_trait_impl.rs
+++ b/crates/ide-assists/src/handlers/generate_mut_trait_impl.rs
@@ -1,7 +1,7 @@
use ide_db::{famous_defs::FamousDefs, traits::resolve_target_trait};
use syntax::{
AstNode, SyntaxElement, SyntaxNode, T,
- ast::{self, edit::AstNodeEdit, edit_in_place::Indent, syntax_factory::SyntaxFactory},
+ ast::{self, edit::AstNodeEdit, syntax_factory::SyntaxFactory},
syntax_editor::{Element, Position, SyntaxEditor},
};
@@ -46,7 +46,7 @@ use crate::{AssistContext, AssistId, Assists};
// ```
pub(crate) fn generate_mut_trait_impl(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
let impl_def = ctx.find_node_at_offset::<ast::Impl>()?;
- let indent = Indent::indent_level(&impl_def);
+ let indent = impl_def.indent_level();
let ast::Type::PathType(path) = impl_def.trait_()? else {
return None;
@@ -78,7 +78,7 @@ pub(crate) fn generate_mut_trait_impl(acc: &mut Assists, ctx: &AssistContext<'_>
let new_impl = ast::Impl::cast(new_root.clone()).unwrap();
- Indent::indent(&new_impl, indent);
+ let new_impl = new_impl.indent(indent);
let mut editor = edit.make_editor(impl_def.syntax());
editor.insert_all(
diff --git a/crates/ide-assists/src/handlers/generate_new.rs b/crates/ide-assists/src/handlers/generate_new.rs
index 4b923ab556..793211a27b 100644
--- a/crates/ide-assists/src/handlers/generate_new.rs
+++ b/crates/ide-assists/src/handlers/generate_new.rs
@@ -3,7 +3,7 @@ use ide_db::{
use_trivial_constructor::use_trivial_constructor,
};
use syntax::{
- ast::{self, AstNode, HasName, HasVisibility, StructKind, edit_in_place::Indent, make},
+ ast::{self, AstNode, HasName, HasVisibility, StructKind, edit::AstNodeEdit, make},
syntax_editor::Position,
};
@@ -150,14 +150,14 @@ pub(crate) fn generate_new(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option
false,
false,
)
- .clone_for_update();
- fn_.indent(1.into());
+ .clone_for_update()
+ .indent(1.into());
let mut editor = builder.make_editor(strukt.syntax());
// Get the node for set annotation
let contain_fn = if let Some(impl_def) = impl_def {
- fn_.indent(impl_def.indent_level());
+ let fn_ = fn_.indent(impl_def.indent_level());
if let Some(l_curly) = impl_def.assoc_item_list().and_then(|list| list.l_curly_token())
{
@@ -182,9 +182,8 @@ pub(crate) fn generate_new(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option
let indent_level = strukt.indent_level();
let body = vec![ast::AssocItem::Fn(fn_)];
let list = make::assoc_item_list(Some(body));
- let impl_def = generate_impl_with_item(&ast::Adt::Struct(strukt.clone()), Some(list));
-
- impl_def.indent(strukt.indent_level());
+ let impl_def = generate_impl_with_item(&ast::Adt::Struct(strukt.clone()), Some(list))
+ .indent(strukt.indent_level());
// Insert it after the adt
editor.insert_all(
diff --git a/crates/ide-assists/src/handlers/generate_trait_from_impl.rs b/crates/ide-assists/src/handlers/generate_trait_from_impl.rs
index 56500cf068..8bc4d50cf6 100644
--- a/crates/ide-assists/src/handlers/generate_trait_from_impl.rs
+++ b/crates/ide-assists/src/handlers/generate_trait_from_impl.rs
@@ -2,7 +2,7 @@ use crate::assist_context::{AssistContext, Assists};
use ide_db::assists::AssistId;
use syntax::{
AstNode, SyntaxKind, T,
- ast::{self, HasGenericParams, HasName, HasVisibility, edit_in_place::Indent, make},
+ ast::{self, HasGenericParams, HasName, HasVisibility, edit::AstNodeEdit, make},
syntax_editor::{Position, SyntaxEditor},
};
diff --git a/crates/ide-assists/src/handlers/inline_call.rs b/crates/ide-assists/src/handlers/inline_call.rs
index fa4f2a78c8..21f2249a19 100644
--- a/crates/ide-assists/src/handlers/inline_call.rs
+++ b/crates/ide-assists/src/handlers/inline_call.rs
@@ -403,6 +403,12 @@ fn inline(
.find(|tok| tok.kind() == SyntaxKind::SELF_TYPE_KW)
{
let replace_with = t.clone_subtree().syntax().clone_for_update();
+ if !is_in_type_path(&self_tok)
+ && let Some(ty) = ast::Type::cast(replace_with.clone())
+ && let Some(generic_arg_list) = ty.generic_arg_list()
+ {
+ ted::remove(generic_arg_list.syntax());
+ }
ted::replace(self_tok, replace_with);
}
}
@@ -588,6 +594,17 @@ fn inline(
}
}
+fn is_in_type_path(self_tok: &syntax::SyntaxToken) -> bool {
+ self_tok
+ .parent_ancestors()
+ .skip_while(|it| !ast::Path::can_cast(it.kind()))
+ .map_while(ast::Path::cast)
+ .last()
+ .and_then(|it| it.syntax().parent())
+ .and_then(ast::PathType::cast)
+ .is_some()
+}
+
fn path_expr_as_record_field(usage: &PathExpr) -> Option<ast::RecordExprField> {
let path = usage.path()?;
let name_ref = path.as_single_name_ref()?;
@@ -1695,6 +1712,41 @@ fn main() {
}
#[test]
+ fn inline_trait_method_call_with_lifetimes() {
+ check_assist(
+ inline_call,
+ r#"
+trait Trait {
+ fn f() -> Self;
+}
+struct Foo<'a>(&'a ());
+impl<'a> Trait for Foo<'a> {
+ fn f() -> Self { Self(&()) }
+}
+impl Foo<'_> {
+ fn new() -> Self {
+ Self::$0f()
+ }
+}
+"#,
+ r#"
+trait Trait {
+ fn f() -> Self;
+}
+struct Foo<'a>(&'a ());
+impl<'a> Trait for Foo<'a> {
+ fn f() -> Self { Self(&()) }
+}
+impl Foo<'_> {
+ fn new() -> Self {
+ Foo(&())
+ }
+}
+"#,
+ )
+ }
+
+ #[test]
fn method_by_reborrow() {
check_assist(
inline_call,
diff --git a/crates/ide-assists/src/handlers/introduce_named_lifetime.rs b/crates/ide-assists/src/handlers/introduce_named_lifetime.rs
index 264e3767a2..854e9561d2 100644
--- a/crates/ide-assists/src/handlers/introduce_named_lifetime.rs
+++ b/crates/ide-assists/src/handlers/introduce_named_lifetime.rs
@@ -1,11 +1,12 @@
-use ide_db::FxHashSet;
+use ide_db::{FileId, FxHashSet};
use syntax::{
- AstNode, TextRange,
- ast::{self, HasGenericParams, edit_in_place::GenericParamsOwnerEdit, make},
- ted::{self, Position},
+ AstNode, SmolStr, T, TextRange, ToSmolStr,
+ ast::{self, HasGenericParams, HasName, syntax_factory::SyntaxFactory},
+ format_smolstr,
+ syntax_editor::{Element, Position, SyntaxEditor},
};
-use crate::{AssistContext, AssistId, Assists, assist_context::SourceChangeBuilder};
+use crate::{AssistContext, AssistId, Assists};
static ASSIST_NAME: &str = "introduce_named_lifetime";
static ASSIST_LABEL: &str = "Introduce named lifetime";
@@ -38,100 +39,108 @@ pub(crate) fn introduce_named_lifetime(acc: &mut Assists, ctx: &AssistContext<'_
// FIXME: should also add support for the case fun(f: &Foo) -> &$0Foo
let lifetime =
ctx.find_node_at_offset::<ast::Lifetime>().filter(|lifetime| lifetime.text() == "'_")?;
+ let file_id = ctx.vfs_file_id();
let lifetime_loc = lifetime.lifetime_ident_token()?.text_range();
if let Some(fn_def) = lifetime.syntax().ancestors().find_map(ast::Fn::cast) {
- generate_fn_def_assist(acc, fn_def, lifetime_loc, lifetime)
+ generate_fn_def_assist(acc, fn_def, lifetime_loc, lifetime, file_id)
} else if let Some(impl_def) = lifetime.syntax().ancestors().find_map(ast::Impl::cast) {
- generate_impl_def_assist(acc, impl_def, lifetime_loc, lifetime)
+ generate_impl_def_assist(acc, impl_def, lifetime_loc, lifetime, file_id)
} else {
None
}
}
-/// Generate the assist for the fn def case
+/// Given a type parameter list, generate a unique lifetime parameter name
+/// which is not in the list
+fn generate_unique_lifetime_param_name(
+ existing_params: Option<ast::GenericParamList>,
+) -> Option<SmolStr> {
+ let used_lifetime_param: FxHashSet<SmolStr> = existing_params
+ .iter()
+ .flat_map(|params| params.lifetime_params())
+ .map(|p| p.syntax().text().to_smolstr())
+ .collect();
+ ('a'..='z').map(|c| format_smolstr!("'{c}")).find(|lt| !used_lifetime_param.contains(lt))
+}
+
fn generate_fn_def_assist(
acc: &mut Assists,
fn_def: ast::Fn,
lifetime_loc: TextRange,
lifetime: ast::Lifetime,
+ file_id: FileId,
) -> Option<()> {
- let param_list: ast::ParamList = fn_def.param_list()?;
- let new_lifetime_param = generate_unique_lifetime_param_name(fn_def.generic_param_list())?;
+ let param_list = fn_def.param_list()?;
+ let new_lifetime_name = generate_unique_lifetime_param_name(fn_def.generic_param_list())?;
let self_param =
- // use the self if it's a reference and has no explicit lifetime
param_list.self_param().filter(|p| p.lifetime().is_none() && p.amp_token().is_some());
- // compute the location which implicitly has the same lifetime as the anonymous lifetime
+
let loc_needing_lifetime = if let Some(self_param) = self_param {
- // if we have a self reference, use that
Some(NeedsLifetime::SelfParam(self_param))
} else {
- // otherwise, if there's a single reference parameter without a named lifetime, use that
- let fn_params_without_lifetime: Vec<_> = param_list
+ let unnamed_refs: Vec<_> = param_list
.params()
.filter_map(|param| match param.ty() {
- Some(ast::Type::RefType(ascribed_type)) if ascribed_type.lifetime().is_none() => {
- Some(NeedsLifetime::RefType(ascribed_type))
+ Some(ast::Type::RefType(ref_type)) if ref_type.lifetime().is_none() => {
+ Some(NeedsLifetime::RefType(ref_type))
}
_ => None,
})
.collect();
- match fn_params_without_lifetime.len() {
- 1 => Some(fn_params_without_lifetime.into_iter().next()?),
+
+ match unnamed_refs.len() {
+ 1 => Some(unnamed_refs.into_iter().next()?),
0 => None,
- // multiple unnamed is invalid. assist is not applicable
_ => return None,
}
};
- acc.add(AssistId::refactor(ASSIST_NAME), ASSIST_LABEL, lifetime_loc, |builder| {
- let fn_def = builder.make_mut(fn_def);
- let lifetime = builder.make_mut(lifetime);
- let loc_needing_lifetime =
- loc_needing_lifetime.and_then(|it| it.make_mut(builder).to_position());
-
- fn_def.get_or_create_generic_param_list().add_generic_param(
- make::lifetime_param(new_lifetime_param.clone()).clone_for_update().into(),
- );
- ted::replace(lifetime.syntax(), new_lifetime_param.clone_for_update().syntax());
- if let Some(position) = loc_needing_lifetime {
- ted::insert(position, new_lifetime_param.clone_for_update().syntax());
+
+ acc.add(AssistId::refactor(ASSIST_NAME), ASSIST_LABEL, lifetime_loc, |edit| {
+ let root = fn_def.syntax().ancestors().last().unwrap().clone();
+ let mut editor = SyntaxEditor::new(root);
+ let factory = SyntaxFactory::with_mappings();
+
+ if let Some(generic_list) = fn_def.generic_param_list() {
+ insert_lifetime_param(&mut editor, &factory, &generic_list, &new_lifetime_name);
+ } else {
+ insert_new_generic_param_list_fn(&mut editor, &factory, &fn_def, &new_lifetime_name);
}
+
+ editor.replace(lifetime.syntax(), factory.lifetime(&new_lifetime_name).syntax());
+
+ if let Some(pos) = loc_needing_lifetime.and_then(|l| l.to_position()) {
+ editor.insert_all(
+ pos,
+ vec![
+ factory.lifetime(&new_lifetime_name).syntax().clone().into(),
+ factory.whitespace(" ").into(),
+ ],
+ );
+ }
+
+ edit.add_file_edits(file_id, editor);
})
}
-/// Generate the assist for the impl def case
-fn generate_impl_def_assist(
- acc: &mut Assists,
- impl_def: ast::Impl,
- lifetime_loc: TextRange,
- lifetime: ast::Lifetime,
+fn insert_new_generic_param_list_fn(
+ editor: &mut SyntaxEditor,
+ factory: &SyntaxFactory,
+ fn_def: &ast::Fn,
+ lifetime_name: &str,
) -> Option<()> {
- let new_lifetime_param = generate_unique_lifetime_param_name(impl_def.generic_param_list())?;
- acc.add(AssistId::refactor(ASSIST_NAME), ASSIST_LABEL, lifetime_loc, |builder| {
- let impl_def = builder.make_mut(impl_def);
- let lifetime = builder.make_mut(lifetime);
+ let name = fn_def.name()?;
- impl_def.get_or_create_generic_param_list().add_generic_param(
- make::lifetime_param(new_lifetime_param.clone()).clone_for_update().into(),
- );
- ted::replace(lifetime.syntax(), new_lifetime_param.clone_for_update().syntax());
- })
-}
+ editor.insert_all(
+ Position::after(name.syntax()),
+ vec![
+ factory.token(T![<]).syntax_element(),
+ factory.lifetime(lifetime_name).syntax().syntax_element(),
+ factory.token(T![>]).syntax_element(),
+ ],
+ );
-/// Given a type parameter list, generate a unique lifetime parameter name
-/// which is not in the list
-fn generate_unique_lifetime_param_name(
- existing_type_param_list: Option<ast::GenericParamList>,
-) -> Option<ast::Lifetime> {
- match existing_type_param_list {
- Some(type_params) => {
- let used_lifetime_params: FxHashSet<_> =
- type_params.lifetime_params().map(|p| p.syntax().text().to_string()).collect();
- ('a'..='z').map(|it| format!("'{it}")).find(|it| !used_lifetime_params.contains(it))
- }
- None => Some("'a".to_owned()),
- }
- .map(|it| make::lifetime(&it))
+ Some(())
}
enum NeedsLifetime {
@@ -140,13 +149,6 @@ enum NeedsLifetime {
}
impl NeedsLifetime {
- fn make_mut(self, builder: &mut SourceChangeBuilder) -> Self {
- match self {
- Self::SelfParam(it) => Self::SelfParam(builder.make_mut(it)),
- Self::RefType(it) => Self::RefType(builder.make_mut(it)),
- }
- }
-
fn to_position(self) -> Option<Position> {
match self {
Self::SelfParam(it) => Some(Position::after(it.amp_token()?)),
@@ -155,6 +157,75 @@ impl NeedsLifetime {
}
}
+fn generate_impl_def_assist(
+ acc: &mut Assists,
+ impl_def: ast::Impl,
+ lifetime_loc: TextRange,
+ lifetime: ast::Lifetime,
+ file_id: FileId,
+) -> Option<()> {
+ let new_lifetime_name = generate_unique_lifetime_param_name(impl_def.generic_param_list())?;
+
+ acc.add(AssistId::refactor(ASSIST_NAME), ASSIST_LABEL, lifetime_loc, |edit| {
+ let root = impl_def.syntax().ancestors().last().unwrap().clone();
+ let mut editor = SyntaxEditor::new(root);
+ let factory = SyntaxFactory::without_mappings();
+
+ if let Some(generic_list) = impl_def.generic_param_list() {
+ insert_lifetime_param(&mut editor, &factory, &generic_list, &new_lifetime_name);
+ } else {
+ insert_new_generic_param_list_imp(&mut editor, &factory, &impl_def, &new_lifetime_name);
+ }
+
+ editor.replace(lifetime.syntax(), factory.lifetime(&new_lifetime_name).syntax());
+
+ edit.add_file_edits(file_id, editor);
+ })
+}
+
+fn insert_new_generic_param_list_imp(
+ editor: &mut SyntaxEditor,
+ factory: &SyntaxFactory,
+ impl_: &ast::Impl,
+ lifetime_name: &str,
+) -> Option<()> {
+ let impl_kw = impl_.impl_token()?;
+
+ editor.insert_all(
+ Position::after(impl_kw),
+ vec![
+ factory.token(T![<]).syntax_element(),
+ factory.lifetime(lifetime_name).syntax().syntax_element(),
+ factory.token(T![>]).syntax_element(),
+ ],
+ );
+
+ Some(())
+}
+
+fn insert_lifetime_param(
+ editor: &mut SyntaxEditor,
+ factory: &SyntaxFactory,
+ generic_list: &ast::GenericParamList,
+ lifetime_name: &str,
+) -> Option<()> {
+ let r_angle = generic_list.r_angle_token()?;
+ let needs_comma = generic_list.generic_params().next().is_some();
+
+ let mut elements = Vec::new();
+
+ if needs_comma {
+ elements.push(factory.token(T![,]).syntax_element());
+ elements.push(factory.whitespace(" ").syntax_element());
+ }
+
+ let lifetime = factory.lifetime(lifetime_name);
+ elements.push(lifetime.syntax().clone().into());
+
+ editor.insert_all(Position::before(r_angle), elements);
+ Some(())
+}
+
#[cfg(test)]
mod tests {
use super::*;
diff --git a/crates/ide-assists/src/handlers/move_const_to_impl.rs b/crates/ide-assists/src/handlers/move_const_to_impl.rs
index 102d7e6d53..b3e79e4663 100644
--- a/crates/ide-assists/src/handlers/move_const_to_impl.rs
+++ b/crates/ide-assists/src/handlers/move_const_to_impl.rs
@@ -2,7 +2,10 @@ use hir::{AsAssocItem, AssocItemContainer, FileRange, HasSource};
use ide_db::{assists::AssistId, defs::Definition, search::SearchScope};
use syntax::{
SyntaxKind,
- ast::{self, AstNode, edit::IndentLevel, edit_in_place::Indent},
+ ast::{
+ self, AstNode,
+ edit::{AstNodeEdit, IndentLevel},
+ },
};
use crate::assist_context::{AssistContext, Assists};
@@ -136,7 +139,8 @@ pub(crate) fn move_const_to_impl(acc: &mut Assists, ctx: &AssistContext<'_>) ->
let indent = IndentLevel::from_node(parent_fn.syntax());
let const_ = const_.clone_for_update();
- const_.reindent_to(indent);
+ let const_ = const_.reset_indent();
+ let const_ = const_.indent(indent);
builder.insert(insert_offset, format!("\n{indent}{const_}{fixup}"));
},
)
diff --git a/crates/ide-assists/src/handlers/replace_derive_with_manual_impl.rs b/crates/ide-assists/src/handlers/replace_derive_with_manual_impl.rs
index 11b3fd22fa..7c024263b4 100644
--- a/crates/ide-assists/src/handlers/replace_derive_with_manual_impl.rs
+++ b/crates/ide-assists/src/handlers/replace_derive_with_manual_impl.rs
@@ -1308,6 +1308,29 @@ impl<T: Clone> Clone for Foo<T> {
}
#[test]
+ fn add_custom_impl_clone_generic_tuple_struct_with_associated() {
+ check_assist(
+ replace_derive_with_manual_impl,
+ r#"
+//- minicore: clone, derive, deref
+#[derive(Clo$0ne)]
+struct Foo<T: core::ops::Deref>(T::Target);
+"#,
+ r#"
+struct Foo<T: core::ops::Deref>(T::Target);
+
+impl<T: core::ops::Deref + Clone> Clone for Foo<T>
+where T::Target: Clone
+{
+ $0fn clone(&self) -> Self {
+ Self(self.0.clone())
+ }
+}
+"#,
+ )
+ }
+
+ #[test]
fn test_ignore_derive_macro_without_input() {
check_assist_not_applicable(
replace_derive_with_manual_impl,
diff --git a/crates/ide-assists/src/handlers/replace_if_let_with_match.rs b/crates/ide-assists/src/handlers/replace_if_let_with_match.rs
index b7e5344712..915dd3ffca 100644
--- a/crates/ide-assists/src/handlers/replace_if_let_with_match.rs
+++ b/crates/ide-assists/src/handlers/replace_if_let_with_match.rs
@@ -3,7 +3,11 @@ use std::iter::successors;
use ide_db::{RootDatabase, defs::NameClass, ty_filter::TryEnum};
use syntax::{
AstNode, Edition, SyntaxKind, T, TextRange,
- ast::{self, HasName, edit::IndentLevel, edit_in_place::Indent, syntax_factory::SyntaxFactory},
+ ast::{
+ self, HasName,
+ edit::{AstNodeEdit, IndentLevel},
+ syntax_factory::SyntaxFactory,
+ },
syntax_editor::SyntaxEditor,
};
@@ -54,8 +58,7 @@ pub(crate) fn replace_if_let_with_match(acc: &mut Assists, ctx: &AssistContext<'
ast::ElseBranch::IfExpr(expr) => Some(expr),
ast::ElseBranch::Block(block) => {
let block = unwrap_trivial_block(block).clone_for_update();
- block.reindent_to(IndentLevel(1));
- else_block = Some(block);
+ else_block = Some(block.reset_indent().indent(IndentLevel(1)));
None
}
});
@@ -82,12 +85,13 @@ pub(crate) fn replace_if_let_with_match(acc: &mut Assists, ctx: &AssistContext<'
(Some(pat), guard)
}
};
- if let Some(guard) = &guard {
- guard.dedent(indent);
- guard.indent(IndentLevel(1));
- }
- let body = if_expr.then_branch()?.clone_for_update();
- body.indent(IndentLevel(1));
+ let guard = if let Some(guard) = &guard {
+ Some(guard.dedent(indent).indent(IndentLevel(1)))
+ } else {
+ guard
+ };
+
+ let body = if_expr.then_branch()?.clone_for_update().indent(IndentLevel(1));
cond_bodies.push((cond, guard, body));
}
@@ -109,7 +113,8 @@ pub(crate) fn replace_if_let_with_match(acc: &mut Assists, ctx: &AssistContext<'
let else_arm = make_else_arm(ctx, &make, else_block, &cond_bodies);
let make_match_arm =
|(pat, guard, body): (_, Option<ast::Expr>, ast::BlockExpr)| {
- body.reindent_to(IndentLevel::single());
+ // Dedent from original position, then indent for match arm
+ let body = body.dedent(indent).indent(IndentLevel::single());
let body = unwrap_trivial_block(body);
match (pat, guard.map(|it| make.match_guard(it))) {
(Some(pat), guard) => make.match_arm(pat, guard, body),
@@ -122,8 +127,8 @@ pub(crate) fn replace_if_let_with_match(acc: &mut Assists, ctx: &AssistContext<'
}
};
let arms = cond_bodies.into_iter().map(make_match_arm).chain([else_arm]);
- let match_expr = make.expr_match(scrutinee_to_be_expr, make.match_arm_list(arms));
- match_expr.indent(indent);
+ let match_expr =
+ make.expr_match(scrutinee_to_be_expr, make.match_arm_list(arms)).indent(indent);
match_expr.into()
};
@@ -131,10 +136,9 @@ pub(crate) fn replace_if_let_with_match(acc: &mut Assists, ctx: &AssistContext<'
if_expr.syntax().parent().is_some_and(|it| ast::IfExpr::can_cast(it.kind()));
let expr = if has_preceding_if_expr {
// make sure we replace the `else if let ...` with a block so we don't end up with `else expr`
- match_expr.dedent(indent);
- match_expr.indent(IndentLevel(1));
- let block_expr = make.block_expr([], Some(match_expr));
- block_expr.indent(indent);
+ let block_expr = make
+ .block_expr([], Some(match_expr.dedent(indent).indent(IndentLevel(1))))
+ .indent(indent);
block_expr.into()
} else {
match_expr
@@ -267,10 +271,7 @@ pub(crate) fn replace_match_with_if_let(acc: &mut Assists, ctx: &AssistContext<'
// wrap them in another BlockExpr.
match expr {
ast::Expr::BlockExpr(block) if block.modifier().is_none() => block,
- expr => {
- expr.indent(IndentLevel(1));
- make.block_expr([], Some(expr))
- }
+ expr => make.block_expr([], Some(expr.indent(IndentLevel(1)))),
}
};
@@ -292,18 +293,19 @@ pub(crate) fn replace_match_with_if_let(acc: &mut Assists, ctx: &AssistContext<'
} else {
condition
};
- let then_expr = then_expr.clone_for_update();
- let else_expr = else_expr.clone_for_update();
- then_expr.reindent_to(IndentLevel::single());
- else_expr.reindent_to(IndentLevel::single());
+ let then_expr =
+ then_expr.clone_for_update().reset_indent().indent(IndentLevel::single());
+ let else_expr =
+ else_expr.clone_for_update().reset_indent().indent(IndentLevel::single());
let then_block = make_block_expr(then_expr);
let else_expr = if is_empty_expr(&else_expr) { None } else { Some(else_expr) };
- let if_let_expr = make.expr_if(
- condition,
- then_block,
- else_expr.map(make_block_expr).map(ast::ElseBranch::Block),
- );
- if_let_expr.indent(IndentLevel::from_node(match_expr.syntax()));
+ let if_let_expr = make
+ .expr_if(
+ condition,
+ then_block,
+ else_expr.map(make_block_expr).map(ast::ElseBranch::Block),
+ )
+ .indent(IndentLevel::from_node(match_expr.syntax()));
let mut editor = builder.make_editor(match_expr.syntax());
editor.replace(match_expr.syntax(), if_let_expr.syntax());
diff --git a/crates/ide-assists/src/handlers/replace_let_with_if_let.rs b/crates/ide-assists/src/handlers/replace_let_with_if_let.rs
index b95e9b52b0..15977c420e 100644
--- a/crates/ide-assists/src/handlers/replace_let_with_if_let.rs
+++ b/crates/ide-assists/src/handlers/replace_let_with_if_let.rs
@@ -1,7 +1,11 @@
use ide_db::ty_filter::TryEnum;
use syntax::{
AstNode, T,
- ast::{self, edit::IndentLevel, edit_in_place::Indent, syntax_factory::SyntaxFactory},
+ ast::{
+ self,
+ edit::{AstNodeEdit, IndentLevel},
+ syntax_factory::SyntaxFactory,
+ },
};
use crate::{AssistContext, AssistId, Assists};
@@ -64,7 +68,7 @@ pub(crate) fn replace_let_with_if_let(acc: &mut Assists, ctx: &AssistContext<'_>
if let_expr_needs_paren(&init) { make.expr_paren(init).into() } else { init };
let block = make.block_expr([], None);
- block.indent(IndentLevel::from_node(let_stmt.syntax()));
+ let block = block.indent(IndentLevel::from_node(let_stmt.syntax()));
let if_expr = make.expr_if(
make.expr_let(pat, init_expr).into(),
block,
diff --git a/crates/ide-assists/src/utils.rs b/crates/ide-assists/src/utils.rs
index 5e08cba8e2..b4055e77cc 100644
--- a/crates/ide-assists/src/utils.rs
+++ b/crates/ide-assists/src/utils.rs
@@ -14,6 +14,7 @@ use ide_db::{
path_transform::PathTransform,
syntax_helpers::{node_ext::preorder_expr, prettify_macro_expansion},
};
+use itertools::Itertools;
use stdx::format_to;
use syntax::{
AstNode, AstToken, Direction, NodeOrToken, SourceFile,
@@ -765,6 +766,11 @@ fn generate_impl_inner(
});
let generic_args =
generic_params.as_ref().map(|params| params.to_generic_args().clone_for_update());
+ let adt_assoc_bounds = trait_
+ .as_ref()
+ .zip(generic_params.as_ref())
+ .and_then(|(trait_, params)| generic_param_associated_bounds(adt, trait_, params));
+
let ty = make::ty_path(make::ext::ident_path(&adt.name().unwrap().text()));
let cfg_attrs =
@@ -780,7 +786,7 @@ fn generate_impl_inner(
false,
trait_,
ty,
- None,
+ adt_assoc_bounds,
adt.where_clause(),
body,
),
@@ -789,6 +795,51 @@ fn generate_impl_inner(
.clone_for_update()
}
+fn generic_param_associated_bounds(
+ adt: &ast::Adt,
+ trait_: &ast::Type,
+ generic_params: &ast::GenericParamList,
+) -> Option<ast::WhereClause> {
+ let in_type_params = |name: &ast::NameRef| {
+ generic_params
+ .generic_params()
+ .filter_map(|param| match param {
+ ast::GenericParam::TypeParam(type_param) => type_param.name(),
+ _ => None,
+ })
+ .any(|param| param.text() == name.text())
+ };
+ let adt_body = match adt {
+ ast::Adt::Enum(e) => e.variant_list().map(|it| it.syntax().clone()),
+ ast::Adt::Struct(s) => s.field_list().map(|it| it.syntax().clone()),
+ ast::Adt::Union(u) => u.record_field_list().map(|it| it.syntax().clone()),
+ };
+ let mut trait_where_clause = adt_body
+ .into_iter()
+ .flat_map(|it| it.descendants())
+ .filter_map(ast::Path::cast)
+ .filter_map(|path| {
+ let qualifier = path.qualifier()?.as_single_segment()?;
+ let qualifier = qualifier
+ .name_ref()
+ .or_else(|| match qualifier.type_anchor()?.ty()? {
+ ast::Type::PathType(path_type) => path_type.path()?.as_single_name_ref(),
+ _ => None,
+ })
+ .filter(in_type_params)?;
+ Some((qualifier, path.segment()?.name_ref()?))
+ })
+ .map(|(qualifier, assoc_name)| {
+ let segments = [qualifier, assoc_name].map(make::path_segment);
+ let path = make::path_from_segments(segments, false);
+ let bounds = Some(make::type_bound(trait_.clone()));
+ make::where_pred(either::Either::Right(make::ty_path(path)), bounds)
+ })
+ .unique_by(|it| it.syntax().to_string())
+ .peekable();
+ trait_where_clause.peek().is_some().then(|| make::where_clause(trait_where_clause))
+}
+
pub(crate) fn add_method_to_adt(
builder: &mut SourceChangeBuilder,
adt: &ast::Adt,
diff --git a/crates/ide-assists/src/utils/ref_field_expr.rs b/crates/ide-assists/src/utils/ref_field_expr.rs
index 840b26a7ad..df8ad41112 100644
--- a/crates/ide-assists/src/utils/ref_field_expr.rs
+++ b/crates/ide-assists/src/utils/ref_field_expr.rs
@@ -5,7 +5,7 @@
//! based on the parent of the existing expression.
use syntax::{
AstNode, T,
- ast::{self, FieldExpr, MethodCallExpr, make},
+ ast::{self, FieldExpr, MethodCallExpr, make, syntax_factory::SyntaxFactory},
};
use crate::AssistContext;
@@ -130,4 +130,20 @@ impl RefData {
expr
}
+
+ pub(crate) fn wrap_expr_with_factory(
+ &self,
+ mut expr: ast::Expr,
+ syntax_factory: &SyntaxFactory,
+ ) -> ast::Expr {
+ if self.needs_deref {
+ expr = syntax_factory.expr_prefix(T![*], expr).into();
+ }
+
+ if self.needs_parentheses {
+ expr = syntax_factory.expr_paren(expr).into();
+ }
+
+ expr
+ }
}
diff --git a/crates/ide-completion/src/context.rs b/crates/ide-completion/src/context.rs
index cab8bced88..97afd07b00 100644
--- a/crates/ide-completion/src/context.rs
+++ b/crates/ide-completion/src/context.rs
@@ -821,7 +821,10 @@ impl<'db> CompletionContext<'db> {
CompleteSemicolon::DoNotComplete
} else if let Some(term_node) =
sema.token_ancestors_with_macros(token.clone()).find(|node| {
- matches!(node.kind(), BLOCK_EXPR | MATCH_ARM | CLOSURE_EXPR | ARG_LIST | PAREN_EXPR)
+ matches!(
+ node.kind(),
+ BLOCK_EXPR | MATCH_ARM | CLOSURE_EXPR | ARG_LIST | PAREN_EXPR | ARRAY_EXPR
+ )
})
{
let next_token = iter::successors(token.next_token(), |it| it.next_token())
diff --git a/crates/ide-completion/src/render/function.rs b/crates/ide-completion/src/render/function.rs
index 4713b1f1af..475e00dfcf 100644
--- a/crates/ide-completion/src/render/function.rs
+++ b/crates/ide-completion/src/render/function.rs
@@ -908,4 +908,23 @@ fn bar() {
"#,
);
}
+
+ #[test]
+ fn no_semicolon_in_array() {
+ check_edit(
+ r#"foo"#,
+ r#"
+fn foo() {}
+fn bar() {
+ let _ = [fo$0];
+}
+"#,
+ r#"
+fn foo() {}
+fn bar() {
+ let _ = [foo()$0];
+}
+"#,
+ );
+ }
}
diff --git a/crates/ide-db/src/imports/import_assets.rs b/crates/ide-db/src/imports/import_assets.rs
index 35579eb259..1c48527027 100644
--- a/crates/ide-db/src/imports/import_assets.rs
+++ b/crates/ide-db/src/imports/import_assets.rs
@@ -9,7 +9,7 @@ use hir::{
};
use itertools::Itertools;
use rustc_hash::{FxHashMap, FxHashSet};
-use smallvec::SmallVec;
+use smallvec::{SmallVec, smallvec};
use syntax::{
AstNode, SyntaxNode,
ast::{self, HasName, make},
@@ -68,6 +68,8 @@ pub struct PathImportCandidate {
pub qualifier: Vec<Name>,
/// The name the item (struct, trait, enum, etc.) should have.
pub name: NameToImport,
+ /// Potentially more segments that should resolve in the candidate.
+ pub after: Vec<Name>,
}
/// A name that will be used during item lookups.
@@ -376,7 +378,7 @@ fn path_applicable_imports(
) -> FxIndexSet<LocatedImport> {
let _p = tracing::info_span!("ImportAssets::path_applicable_imports").entered();
- match &*path_candidate.qualifier {
+ let mut result = match &*path_candidate.qualifier {
[] => {
items_locator::items_with_name(
db,
@@ -433,6 +435,75 @@ fn path_applicable_imports(
})
.take(DEFAULT_QUERY_SEARCH_LIMIT)
.collect(),
+ };
+
+ filter_candidates_by_after_path(db, scope, path_candidate, &mut result);
+
+ result
+}
+
+fn filter_candidates_by_after_path(
+ db: &RootDatabase,
+ scope: &SemanticsScope<'_>,
+ path_candidate: &PathImportCandidate,
+ imports: &mut FxIndexSet<LocatedImport>,
+) {
+ if imports.len() <= 1 {
+ // Short-circuit, as even if it doesn't match fully we want it.
+ return;
+ }
+
+ let Some((last_after, after_except_last)) = path_candidate.after.split_last() else {
+ return;
+ };
+
+ let original_imports = imports.clone();
+
+ let traits_in_scope = scope.visible_traits();
+ imports.retain(|import| {
+ let items = if after_except_last.is_empty() {
+ smallvec![import.original_item]
+ } else {
+ let ItemInNs::Types(ModuleDef::Module(item)) = import.original_item else {
+ return false;
+ };
+ // FIXME: This doesn't consider visibilities.
+ item.resolve_mod_path(db, after_except_last.iter().cloned())
+ .into_iter()
+ .flatten()
+ .collect::<SmallVec<[_; 3]>>()
+ };
+ items.into_iter().any(|item| {
+ let has_last_method = |ty: hir::Type<'_>| {
+ ty.iterate_path_candidates(db, scope, &traits_in_scope, Some(last_after), |_| {
+ Some(())
+ })
+ .is_some()
+ };
+ // FIXME: A trait can have an assoc type that has a function/const, that's two segments before last.
+ match item {
+ // A module? Can we resolve one more segment?
+ ItemInNs::Types(ModuleDef::Module(module)) => module
+ .resolve_mod_path(db, [last_after.clone()])
+ .is_some_and(|mut it| it.any(|_| true)),
+ // And ADT/Type Alias? That might be a method.
+ ItemInNs::Types(ModuleDef::Adt(it)) => has_last_method(it.ty(db)),
+ ItemInNs::Types(ModuleDef::BuiltinType(it)) => has_last_method(it.ty(db)),
+ ItemInNs::Types(ModuleDef::TypeAlias(it)) => has_last_method(it.ty(db)),
+ // A trait? Might have an associated item.
+ ItemInNs::Types(ModuleDef::Trait(it)) => it
+ .items(db)
+ .into_iter()
+ .any(|assoc_item| assoc_item.name(db) == Some(last_after.clone())),
+ // Other items? can't resolve one more segment.
+ _ => false,
+ }
+ })
+ });
+
+ if imports.is_empty() {
+ // Better one half-match than zero full matches.
+ *imports = original_imports;
}
}
@@ -759,10 +830,14 @@ impl<'db> ImportCandidate<'db> {
if sema.resolve_path(path).is_some() {
return None;
}
+ let after = std::iter::successors(path.parent_path(), |it| it.parent_path())
+ .map(|seg| seg.segment()?.name_ref().map(|name| Name::new_root(&name.text())))
+ .collect::<Option<_>>()?;
path_import_candidate(
sema,
path.qualifier(),
NameToImport::exact_case_sensitive(path.segment()?.name_ref()?.to_string()),
+ after,
)
}
@@ -777,6 +852,7 @@ impl<'db> ImportCandidate<'db> {
Some(ImportCandidate::Path(PathImportCandidate {
qualifier: vec![],
name: NameToImport::exact_case_sensitive(name.to_string()),
+ after: vec![],
}))
}
@@ -785,7 +861,8 @@ impl<'db> ImportCandidate<'db> {
fuzzy_name: String,
sema: &Semantics<'db, RootDatabase>,
) -> Option<Self> {
- path_import_candidate(sema, qualifier, NameToImport::fuzzy(fuzzy_name))
+ // Assume a fuzzy match does not want the segments after. Because... I guess why not?
+ path_import_candidate(sema, qualifier, NameToImport::fuzzy(fuzzy_name), Vec::new())
}
}
@@ -793,6 +870,7 @@ fn path_import_candidate<'db>(
sema: &Semantics<'db, RootDatabase>,
qualifier: Option<ast::Path>,
name: NameToImport,
+ after: Vec<Name>,
) -> Option<ImportCandidate<'db>> {
Some(match qualifier {
Some(qualifier) => match sema.resolve_path(&qualifier) {
@@ -802,7 +880,7 @@ fn path_import_candidate<'db>(
.segments()
.map(|seg| seg.name_ref().map(|name| Name::new_root(&name.text())))
.collect::<Option<Vec<_>>>()?;
- ImportCandidate::Path(PathImportCandidate { qualifier, name })
+ ImportCandidate::Path(PathImportCandidate { qualifier, name, after })
} else {
return None;
}
@@ -826,7 +904,7 @@ fn path_import_candidate<'db>(
}
Some(_) => return None,
},
- None => ImportCandidate::Path(PathImportCandidate { qualifier: vec![], name }),
+ None => ImportCandidate::Path(PathImportCandidate { qualifier: vec![], name, after }),
})
}
diff --git a/crates/ide-db/src/imports/insert_use.rs b/crates/ide-db/src/imports/insert_use.rs
index db1d599d55..f26952fa15 100644
--- a/crates/ide-db/src/imports/insert_use.rs
+++ b/crates/ide-db/src/imports/insert_use.rs
@@ -9,8 +9,9 @@ use syntax::{
Direction, NodeOrToken, SyntaxKind, SyntaxNode, algo,
ast::{
self, AstNode, HasAttrs, HasModuleItem, HasVisibility, PathSegmentKind,
- edit_in_place::Removable, make,
+ edit_in_place::Removable, make, syntax_factory::SyntaxFactory,
},
+ syntax_editor::{Position, SyntaxEditor},
ted,
};
@@ -146,6 +147,17 @@ pub fn insert_use(scope: &ImportScope, path: ast::Path, cfg: &InsertUseConfig) {
insert_use_with_alias_option(scope, path, cfg, None);
}
+/// Insert an import path into the given file/node. A `merge` value of none indicates that no import merging is allowed to occur.
+pub fn insert_use_with_editor(
+ scope: &ImportScope,
+ path: ast::Path,
+ cfg: &InsertUseConfig,
+ syntax_editor: &mut SyntaxEditor,
+ syntax_factory: &SyntaxFactory,
+) {
+ insert_use_with_alias_option_with_editor(scope, path, cfg, None, syntax_editor, syntax_factory);
+}
+
pub fn insert_use_as_alias(
scope: &ImportScope,
path: ast::Path,
@@ -229,6 +241,71 @@ fn insert_use_with_alias_option(
insert_use_(scope, use_item, cfg.group);
}
+fn insert_use_with_alias_option_with_editor(
+ scope: &ImportScope,
+ path: ast::Path,
+ cfg: &InsertUseConfig,
+ alias: Option<ast::Rename>,
+ syntax_editor: &mut SyntaxEditor,
+ syntax_factory: &SyntaxFactory,
+) {
+ let _p = tracing::info_span!("insert_use_with_alias_option").entered();
+ let mut mb = match cfg.granularity {
+ ImportGranularity::Crate => Some(MergeBehavior::Crate),
+ ImportGranularity::Module => Some(MergeBehavior::Module),
+ ImportGranularity::One => Some(MergeBehavior::One),
+ ImportGranularity::Item => None,
+ };
+ if !cfg.enforce_granularity {
+ let file_granularity = guess_granularity_from_scope(scope);
+ mb = match file_granularity {
+ ImportGranularityGuess::Unknown => mb,
+ ImportGranularityGuess::Item => None,
+ ImportGranularityGuess::Module => Some(MergeBehavior::Module),
+ // We use the user's setting to infer if this is module or item.
+ ImportGranularityGuess::ModuleOrItem => match mb {
+ Some(MergeBehavior::Module) | None => mb,
+ // There isn't really a way to decide between module or item here, so we just pick one.
+ // FIXME: Maybe it is possible to infer based on semantic analysis?
+ Some(MergeBehavior::One | MergeBehavior::Crate) => Some(MergeBehavior::Module),
+ },
+ ImportGranularityGuess::Crate => Some(MergeBehavior::Crate),
+ ImportGranularityGuess::CrateOrModule => match mb {
+ Some(MergeBehavior::Crate | MergeBehavior::Module) => mb,
+ Some(MergeBehavior::One) | None => Some(MergeBehavior::Crate),
+ },
+ ImportGranularityGuess::One => Some(MergeBehavior::One),
+ };
+ }
+
+ let use_tree = syntax_factory.use_tree(path, None, alias, false);
+ if mb == Some(MergeBehavior::One) && use_tree.path().is_some() {
+ use_tree.wrap_in_tree_list();
+ }
+ let use_item = make::use_(None, None, use_tree).clone_for_update();
+ for attr in
+ scope.required_cfgs.iter().map(|attr| attr.syntax().clone_subtree().clone_for_update())
+ {
+ syntax_editor.insert(Position::first_child_of(use_item.syntax()), attr);
+ }
+
+ // merge into existing imports if possible
+ if let Some(mb) = mb {
+ let filter = |it: &_| !(cfg.skip_glob_imports && ast::Use::is_simple_glob(it));
+ for existing_use in
+ scope.as_syntax_node().children().filter_map(ast::Use::cast).filter(filter)
+ {
+ if let Some(merged) = try_merge_imports(&existing_use, &use_item, mb) {
+ syntax_editor.replace(existing_use.syntax(), merged.syntax());
+ return;
+ }
+ }
+ }
+ // either we weren't allowed to merge or there is no import that fits the merge conditions
+ // so look for the place we have to insert to
+ insert_use_with_editor_(scope, use_item, cfg.group, syntax_editor, syntax_factory);
+}
+
pub fn ast_to_remove_for_path_in_use_stmt(path: &ast::Path) -> Option<Box<dyn Removable>> {
// FIXME: improve this
if path.parent_path().is_some() {
@@ -500,6 +577,127 @@ fn insert_use_(scope: &ImportScope, use_item: ast::Use, group_imports: bool) {
}
}
+fn insert_use_with_editor_(
+ scope: &ImportScope,
+ use_item: ast::Use,
+ group_imports: bool,
+ syntax_editor: &mut SyntaxEditor,
+ syntax_factory: &SyntaxFactory,
+) {
+ let scope_syntax = scope.as_syntax_node();
+ let insert_use_tree =
+ use_item.use_tree().expect("`use_item` should have a use tree for `insert_path`");
+ let group = ImportGroup::new(&insert_use_tree);
+ let path_node_iter = scope_syntax
+ .children()
+ .filter_map(|node| ast::Use::cast(node.clone()).zip(Some(node)))
+ .flat_map(|(use_, node)| {
+ let tree = use_.use_tree()?;
+ Some((tree, node))
+ });
+
+ if group_imports {
+ // Iterator that discards anything that's not in the required grouping
+ // This implementation allows the user to rearrange their import groups as this only takes the first group that fits
+ let group_iter = path_node_iter
+ .clone()
+ .skip_while(|(use_tree, ..)| ImportGroup::new(use_tree) != group)
+ .take_while(|(use_tree, ..)| ImportGroup::new(use_tree) == group);
+
+ // track the last element we iterated over, if this is still None after the iteration then that means we never iterated in the first place
+ let mut last = None;
+ // find the element that would come directly after our new import
+ let post_insert: Option<(_, SyntaxNode)> = group_iter
+ .inspect(|(.., node)| last = Some(node.clone()))
+ .find(|(use_tree, _)| use_tree_cmp(&insert_use_tree, use_tree) != Ordering::Greater);
+
+ if let Some((.., node)) = post_insert {
+ cov_mark::hit!(insert_group);
+ // insert our import before that element
+ return syntax_editor.insert(Position::before(node), use_item.syntax());
+ }
+ if let Some(node) = last {
+ cov_mark::hit!(insert_group_last);
+ // there is no element after our new import, so append it to the end of the group
+ return syntax_editor.insert(Position::after(node), use_item.syntax());
+ }
+
+ // the group we were looking for actually doesn't exist, so insert
+
+ let mut last = None;
+ // find the group that comes after where we want to insert
+ let post_group = path_node_iter
+ .inspect(|(.., node)| last = Some(node.clone()))
+ .find(|(use_tree, ..)| ImportGroup::new(use_tree) > group);
+ if let Some((.., node)) = post_group {
+ cov_mark::hit!(insert_group_new_group);
+ syntax_editor.insert(Position::before(&node), use_item.syntax());
+ if let Some(node) = algo::non_trivia_sibling(node.into(), Direction::Prev) {
+ syntax_editor.insert(Position::after(node), syntax_factory.whitespace("\n"));
+ }
+ return;
+ }
+ // there is no such group, so append after the last one
+ if let Some(node) = last {
+ cov_mark::hit!(insert_group_no_group);
+ syntax_editor.insert(Position::after(&node), use_item.syntax());
+ syntax_editor.insert(Position::after(node), syntax_factory.whitespace("\n"));
+ return;
+ }
+ } else {
+ // There exists a group, so append to the end of it
+ if let Some((_, node)) = path_node_iter.last() {
+ cov_mark::hit!(insert_no_grouping_last);
+ syntax_editor.insert(Position::after(node), use_item.syntax());
+ return;
+ }
+ }
+
+ let l_curly = match &scope.kind {
+ ImportScopeKind::File(_) => None,
+ // don't insert the imports before the item list/block expr's opening curly brace
+ ImportScopeKind::Module(item_list) => item_list.l_curly_token(),
+ // don't insert the imports before the item list's opening curly brace
+ ImportScopeKind::Block(block) => block.l_curly_token(),
+ };
+ // there are no imports in this file at all
+ // so put the import after all inner module attributes and possible license header comments
+ if let Some(last_inner_element) = scope_syntax
+ .children_with_tokens()
+ // skip the curly brace
+ .skip(l_curly.is_some() as usize)
+ .take_while(|child| match child {
+ NodeOrToken::Node(node) => is_inner_attribute(node.clone()),
+ NodeOrToken::Token(token) => {
+ [SyntaxKind::WHITESPACE, SyntaxKind::COMMENT, SyntaxKind::SHEBANG]
+ .contains(&token.kind())
+ }
+ })
+ .filter(|child| child.as_token().is_none_or(|t| t.kind() != SyntaxKind::WHITESPACE))
+ .last()
+ {
+ cov_mark::hit!(insert_empty_inner_attr);
+ syntax_editor.insert(Position::after(&last_inner_element), use_item.syntax());
+ syntax_editor.insert(Position::after(last_inner_element), syntax_factory.whitespace("\n"));
+ } else {
+ match l_curly {
+ Some(b) => {
+ cov_mark::hit!(insert_empty_module);
+ syntax_editor.insert(Position::after(&b), syntax_factory.whitespace("\n"));
+ syntax_editor.insert(Position::after(&b), use_item.syntax());
+ }
+ None => {
+ cov_mark::hit!(insert_empty_file);
+ syntax_editor.insert(
+ Position::first_child_of(scope_syntax),
+ syntax_factory.whitespace("\n\n"),
+ );
+ syntax_editor.insert(Position::first_child_of(scope_syntax), use_item.syntax());
+ }
+ }
+ }
+}
+
fn is_inner_attribute(node: SyntaxNode) -> bool {
ast::Attr::cast(node).map(|attr| attr.kind()) == Some(ast::AttrKind::Inner)
}
diff --git a/crates/ide/src/hover/tests.rs b/crates/ide/src/hover/tests.rs
index 7900a0dc99..7fbbc576dd 100644
--- a/crates/ide/src/hover/tests.rs
+++ b/crates/ide/src/hover/tests.rs
@@ -9720,6 +9720,99 @@ fn test_hover_function_with_pat_param() {
}
#[test]
+fn test_hover_function_with_too_long_param() {
+ check(
+ r#"
+fn fn_$0(
+ attrs: impl IntoIterator<Item = ast::Attr>,
+ visibility: Option<ast::Visibility>,
+ fn_name: ast::Name,
+ type_params: Option<ast::GenericParamList>,
+ where_clause: Option<ast::WhereClause>,
+ params: ast::ParamList,
+ body: ast::BlockExpr,
+ ret_type: Option<ast::RetType>,
+ is_async: bool,
+ is_const: bool,
+ is_unsafe: bool,
+ is_gen: bool,
+) -> ast::Fn {}
+ "#,
+ expect![[r#"
+ *fn_*
+
+ ```rust
+ ra_test_fixture
+ ```
+
+ ```rust
+ fn fn_(
+ attrs: impl IntoIterator<Item = ast::Attr>,
+ visibility: Option<ast::Visibility>,
+ fn_name: ast::Name,
+ type_params: Option<ast::GenericParamList>,
+ where_clause: Option<ast::WhereClause>,
+ params: ast::ParamList,
+ body: ast::BlockExpr,
+ ret_type: Option<ast::RetType>,
+ is_async: bool,
+ is_const: bool,
+ is_unsafe: bool,
+ is_gen: bool
+ ) -> ast::Fn
+ ```
+ "#]],
+ );
+
+ check(
+ r#"
+fn fn_$0(
+ &self,
+ attrs: impl IntoIterator<Item = ast::Attr>,
+ visibility: Option<ast::Visibility>,
+ fn_name: ast::Name,
+ type_params: Option<ast::GenericParamList>,
+ where_clause: Option<ast::WhereClause>,
+ params: ast::ParamList,
+ body: ast::BlockExpr,
+ ret_type: Option<ast::RetType>,
+ is_async: bool,
+ is_const: bool,
+ is_unsafe: bool,
+ is_gen: bool,
+ ...
+) -> ast::Fn {}
+ "#,
+ expect![[r#"
+ *fn_*
+
+ ```rust
+ ra_test_fixture
+ ```
+
+ ```rust
+ fn fn_(
+ &self,
+ attrs: impl IntoIterator<Item = ast::Attr>,
+ visibility: Option<ast::Visibility>,
+ fn_name: ast::Name,
+ type_params: Option<ast::GenericParamList>,
+ where_clause: Option<ast::WhereClause>,
+ params: ast::ParamList,
+ body: ast::BlockExpr,
+ ret_type: Option<ast::RetType>,
+ is_async: bool,
+ is_const: bool,
+ is_unsafe: bool,
+ is_gen: bool,
+ ...
+ ) -> ast::Fn
+ ```
+ "#]],
+ );
+}
+
+#[test]
fn hover_path_inside_block_scope() {
check(
r#"
diff --git a/crates/ide/src/lib.rs b/crates/ide/src/lib.rs
index 2e618550f9..930eaf2262 100644
--- a/crates/ide/src/lib.rs
+++ b/crates/ide/src/lib.rs
@@ -67,7 +67,7 @@ use ide_db::{
FxHashMap, FxIndexSet, LineIndexDatabase,
base_db::{
CrateOrigin, CrateWorkspaceData, Env, FileSet, RootQueryDb, SourceDatabase, VfsPath,
- salsa::{CancellationToken, Cancelled, Database},
+ salsa::{Cancelled, Database},
},
prime_caches, symbol_index,
};
@@ -947,10 +947,6 @@ impl Analysis {
// We use `attach_db_allow_change()` and not `attach_db()` because fixture injection can change the database.
hir::attach_db_allow_change(&self.db, || Cancelled::catch(|| f(&self.db)))
}
-
- pub fn cancellation_token(&self) -> CancellationToken {
- self.db.cancellation_token()
- }
}
#[test]
diff --git a/crates/ide/src/references.rs b/crates/ide/src/references.rs
index 5443021988..38ee097033 100644
--- a/crates/ide/src/references.rs
+++ b/crates/ide/src/references.rs
@@ -2073,6 +2073,7 @@ fn func() {}
expect![[r#"
identity Attribute FileId(1) 1..107 32..40
+ FileId(0) 17..25 import
FileId(0) 43..51
"#]],
);
@@ -2103,6 +2104,7 @@ mirror$0! {}
expect![[r#"
mirror ProcMacro FileId(1) 1..77 22..28
+ FileId(0) 17..23 import
FileId(0) 26..32
"#]],
)
diff --git a/crates/ide/src/syntax_highlighting/test_data/highlight_macros.html b/crates/ide/src/syntax_highlighting/test_data/highlight_macros.html
index 59612634fd..740a6272a7 100644
--- a/crates/ide/src/syntax_highlighting/test_data/highlight_macros.html
+++ b/crates/ide/src/syntax_highlighting/test_data/highlight_macros.html
@@ -41,7 +41,8 @@ pre { color: #DCDCCC; background: #3F3F3F; font-size: 22px; padd
.invalid_escape_sequence { color: #FC5555; text-decoration: wavy underline; }
.unresolved_reference { color: #FC5555; text-decoration: wavy underline; }
</style>
-<pre><code><span class="keyword">use</span> <span class="crate_root library">proc_macros</span><span class="operator">::</span><span class="brace">{</span><span class="function library">mirror</span><span class="comma">,</span> <span class="function library">identity</span><span class="comma">,</span> <span class="derive library">DeriveIdentity</span><span class="brace">}</span><span class="semicolon">;</span>
+<pre><code><span class="keyword">use</span> <span class="crate_root library">proc_macros</span><span class="operator">::</span><span class="brace">{</span><span class="proc_macro library">mirror</span><span class="comma">,</span> <span class="attribute library">identity</span><span class="comma">,</span> <span class="derive library">DeriveIdentity</span><span class="brace">}</span><span class="semicolon">;</span>
+<span class="keyword">use</span> <span class="crate_root library">pm</span><span class="operator">::</span><span class="attribute library">proc_macro</span><span class="semicolon">;</span>
<span class="proc_macro library">mirror</span><span class="macro_bang">!</span> <span class="brace">{</span>
<span class="brace macro proc_macro">{</span>
diff --git a/crates/ide/src/syntax_highlighting/tests.rs b/crates/ide/src/syntax_highlighting/tests.rs
index 8b529cf10f..c6aebd0b0c 100644
--- a/crates/ide/src/syntax_highlighting/tests.rs
+++ b/crates/ide/src/syntax_highlighting/tests.rs
@@ -55,8 +55,9 @@ fn macros() {
r#"
//- proc_macros: mirror, identity, derive_identity
//- minicore: fmt, include, concat
-//- /lib.rs crate:lib
+//- /lib.rs crate:lib deps:pm
use proc_macros::{mirror, identity, DeriveIdentity};
+use pm::proc_macro;
mirror! {
{
@@ -126,6 +127,11 @@ fn main() {
//- /foo/foo.rs crate:foo
mod foo {}
use self::foo as bar;
+//- /pm.rs crate:pm
+#![crate_type = "proc-macro"]
+
+#[proc_macro_attribute]
+pub fn proc_macro() {}
"#,
expect_file!["./test_data/highlight_macros.html"],
false,
diff --git a/crates/load-cargo/src/lib.rs b/crates/load-cargo/src/lib.rs
index c2935d94a8..70a00cf825 100644
--- a/crates/load-cargo/src/lib.rs
+++ b/crates/load-cargo/src/lib.rs
@@ -26,10 +26,7 @@ use ide_db::{
use itertools::Itertools;
use proc_macro_api::{
MacroDylib, ProcMacroClient,
- bidirectional_protocol::{
- msg::{SubRequest, SubResponse},
- reject_subrequests,
- },
+ bidirectional_protocol::msg::{SubRequest, SubResponse},
};
use project_model::{CargoConfig, PackageRoot, ProjectManifest, ProjectWorkspace};
use span::{Span, SpanAnchor, SyntaxContext};
@@ -446,7 +443,7 @@ pub fn load_proc_macro(
) -> ProcMacroLoadResult {
let res: Result<Vec<_>, _> = (|| {
let dylib = MacroDylib::new(path.to_path_buf());
- let vec = server.load_dylib(dylib, Some(&reject_subrequests)).map_err(|e| {
+ let vec = server.load_dylib(dylib).map_err(|e| {
ProcMacroLoadingError::ProcMacroSrvError(format!("{e}").into_boxed_str())
})?;
if vec.is_empty() {
diff --git a/crates/proc-macro-api/Cargo.toml b/crates/proc-macro-api/Cargo.toml
index 4de1a3e5dd..a135a469e8 100644
--- a/crates/proc-macro-api/Cargo.toml
+++ b/crates/proc-macro-api/Cargo.toml
@@ -31,6 +31,7 @@ span = { path = "../span", version = "0.0.0", default-features = false}
intern.workspace = true
postcard.workspace = true
semver.workspace = true
+rayon.workspace = true
[features]
sysroot-abi = ["proc-macro-srv", "proc-macro-srv/sysroot-abi"]
diff --git a/crates/proc-macro-api/src/lib.rs b/crates/proc-macro-api/src/lib.rs
index e4b121b033..68b3afc3e8 100644
--- a/crates/proc-macro-api/src/lib.rs
+++ b/crates/proc-macro-api/src/lib.rs
@@ -198,12 +198,8 @@ impl ProcMacroClient {
}
/// Loads a proc-macro dylib into the server process returning a list of `ProcMacro`s loaded.
- pub fn load_dylib(
- &self,
- dylib: MacroDylib,
- callback: Option<SubCallback<'_>>,
- ) -> Result<Vec<ProcMacro>, ServerError> {
- self.pool.load_dylib(&dylib, callback)
+ pub fn load_dylib(&self, dylib: MacroDylib) -> Result<Vec<ProcMacro>, ServerError> {
+ self.pool.load_dylib(&dylib)
}
/// Checks if the proc-macro server has exited.
diff --git a/crates/proc-macro-api/src/pool.rs b/crates/proc-macro-api/src/pool.rs
index a637bc0e48..e6541823da 100644
--- a/crates/proc-macro-api/src/pool.rs
+++ b/crates/proc-macro-api/src/pool.rs
@@ -1,10 +1,9 @@
//! A pool of proc-macro server processes
use std::sync::Arc;
-use crate::{
- MacroDylib, ProcMacro, ServerError, bidirectional_protocol::SubCallback,
- process::ProcMacroServerProcess,
-};
+use rayon::iter::{IntoParallelIterator, ParallelIterator};
+
+use crate::{MacroDylib, ProcMacro, ServerError, process::ProcMacroServerProcess};
#[derive(Debug, Clone)]
pub(crate) struct ProcMacroServerPool {
@@ -50,11 +49,7 @@ impl ProcMacroServerPool {
})
}
- pub(crate) fn load_dylib(
- &self,
- dylib: &MacroDylib,
- callback: Option<SubCallback<'_>>,
- ) -> Result<Vec<ProcMacro>, ServerError> {
+ pub(crate) fn load_dylib(&self, dylib: &MacroDylib) -> Result<Vec<ProcMacro>, ServerError> {
let _span = tracing::info_span!("ProcMacroServer::load_dylib").entered();
let dylib_path = Arc::new(dylib.path.clone());
@@ -64,14 +59,17 @@ impl ProcMacroServerPool {
let (first, rest) = self.workers.split_first().expect("worker pool must not be empty");
let macros = first
- .find_proc_macros(&dylib.path, callback)?
+ .find_proc_macros(&dylib.path)?
.map_err(|e| ServerError { message: e, io: None })?;
- for worker in rest {
- worker
- .find_proc_macros(&dylib.path, callback)?
- .map_err(|e| ServerError { message: e, io: None })?;
- }
+ rest.into_par_iter()
+ .map(|worker| {
+ worker
+ .find_proc_macros(&dylib.path)?
+ .map(|_| ())
+ .map_err(|e| ServerError { message: e, io: None })
+ })
+ .collect::<Result<(), _>>()?;
Ok(macros
.into_iter()
diff --git a/crates/proc-macro-api/src/process.rs b/crates/proc-macro-api/src/process.rs
index 9f80880965..80e4ed05c3 100644
--- a/crates/proc-macro-api/src/process.rs
+++ b/crates/proc-macro-api/src/process.rs
@@ -18,7 +18,11 @@ use stdx::JodChild;
use crate::{
ProcMacro, ProcMacroKind, ProtocolFormat, ServerError,
- bidirectional_protocol::{self, SubCallback, msg::BidirectionalMessage, reject_subrequests},
+ bidirectional_protocol::{
+ self, SubCallback,
+ msg::{BidirectionalMessage, SubResponse},
+ reject_subrequests,
+ },
legacy_protocol::{self, SpanMode},
version,
};
@@ -207,14 +211,18 @@ impl ProcMacroServerProcess {
pub(crate) fn find_proc_macros(
&self,
dylib_path: &AbsPath,
- callback: Option<SubCallback<'_>>,
) -> Result<Result<Vec<(String, ProcMacroKind)>, String>, ServerError> {
match self.protocol {
Protocol::LegacyJson { .. } => legacy_protocol::find_proc_macros(self, dylib_path),
Protocol::BidirectionalPostcardPrototype { .. } => {
- let cb = callback.expect("callback required for bidirectional protocol");
- bidirectional_protocol::find_proc_macros(self, dylib_path, cb)
+ bidirectional_protocol::find_proc_macros(self, dylib_path, &|_| {
+ Ok(SubResponse::Cancel {
+ reason: String::from(
+ "Server should not do a sub request when loading proc-macros",
+ ),
+ })
+ })
}
}
}
diff --git a/crates/rust-analyzer/src/flycheck.rs b/crates/rust-analyzer/src/flycheck.rs
index c74f4550fd..47f7a57f72 100644
--- a/crates/rust-analyzer/src/flycheck.rs
+++ b/crates/rust-analyzer/src/flycheck.rs
@@ -741,70 +741,42 @@ impl FlycheckActor {
flycheck_id = self.id,
message = diagnostic.message,
package_id = package_id.as_ref().map(|it| it.as_str()),
- scope = ?self.scope,
"diagnostic received"
);
-
- match &self.scope {
- FlycheckScope::Workspace => {
- if self.diagnostics_received == DiagnosticsReceived::NotYet {
- self.send(FlycheckMessage::ClearDiagnostics {
- id: self.id,
- kind: ClearDiagnosticsKind::All(ClearScope::Workspace),
- });
-
- self.diagnostics_received =
- DiagnosticsReceived::AtLeastOneAndClearedWorkspace;
- }
-
- if let Some(package_id) = package_id {
- tracing::warn!(
- "Ignoring package label {:?} and applying diagnostics to the whole workspace",
- package_id
- );
- }
-
- self.send(FlycheckMessage::AddDiagnostic {
- id: self.id,
- generation: self.generation,
- package_id: None,
- workspace_root: self.root.clone(),
- diagnostic,
- });
- }
- FlycheckScope::Package { package: flycheck_package, .. } => {
- if self.diagnostics_received == DiagnosticsReceived::NotYet {
- self.diagnostics_received = DiagnosticsReceived::AtLeastOne;
- }
-
- // If the package has been set in the diagnostic JSON, respect that. Otherwise, use the
- // package that the current flycheck is scoped to. This is useful when a project is
- // directly using rustc for its checks (e.g. custom check commands in rust-project.json).
- let package_id = package_id.unwrap_or(flycheck_package.clone());
-
- if self.diagnostics_cleared_for.insert(package_id.clone()) {
- tracing::trace!(
- flycheck_id = self.id,
- package_id = package_id.as_str(),
- "clearing diagnostics"
- );
- self.send(FlycheckMessage::ClearDiagnostics {
- id: self.id,
- kind: ClearDiagnosticsKind::All(ClearScope::Package(
- package_id.clone(),
- )),
- });
- }
-
- self.send(FlycheckMessage::AddDiagnostic {
+ if self.diagnostics_received == DiagnosticsReceived::NotYet {
+ self.diagnostics_received = DiagnosticsReceived::AtLeastOne;
+ }
+ if let Some(package_id) = &package_id {
+ if self.diagnostics_cleared_for.insert(package_id.clone()) {
+ tracing::trace!(
+ flycheck_id = self.id,
+ package_id = package_id.as_str(),
+ "clearing diagnostics"
+ );
+ self.send(FlycheckMessage::ClearDiagnostics {
id: self.id,
- generation: self.generation,
- package_id: Some(package_id),
- workspace_root: self.root.clone(),
- diagnostic,
+ kind: ClearDiagnosticsKind::All(ClearScope::Package(
+ package_id.clone(),
+ )),
});
}
+ } else if self.diagnostics_received
+ != DiagnosticsReceived::AtLeastOneAndClearedWorkspace
+ {
+ self.diagnostics_received =
+ DiagnosticsReceived::AtLeastOneAndClearedWorkspace;
+ self.send(FlycheckMessage::ClearDiagnostics {
+ id: self.id,
+ kind: ClearDiagnosticsKind::All(ClearScope::Workspace),
+ });
}
+ self.send(FlycheckMessage::AddDiagnostic {
+ id: self.id,
+ generation: self.generation,
+ package_id,
+ workspace_root: self.root.clone(),
+ diagnostic,
+ });
}
},
}
diff --git a/crates/rust-analyzer/src/global_state.rs b/crates/rust-analyzer/src/global_state.rs
index 1462727df4..afd4162de6 100644
--- a/crates/rust-analyzer/src/global_state.rs
+++ b/crates/rust-analyzer/src/global_state.rs
@@ -14,7 +14,7 @@ use hir::ChangeWithProcMacros;
use ide::{Analysis, AnalysisHost, Cancellable, FileId, SourceRootId};
use ide_db::{
MiniCore,
- base_db::{Crate, ProcMacroPaths, SourceDatabase, salsa::CancellationToken, salsa::Revision},
+ base_db::{Crate, ProcMacroPaths, SourceDatabase, salsa::Revision},
};
use itertools::Itertools;
use load_cargo::SourceRootConfig;
@@ -88,7 +88,6 @@ pub(crate) struct GlobalState {
pub(crate) task_pool: Handle<TaskPool<Task>, Receiver<Task>>,
pub(crate) fmt_pool: Handle<TaskPool<Task>, Receiver<Task>>,
pub(crate) cancellation_pool: thread::Pool,
- pub(crate) cancellation_tokens: FxHashMap<lsp_server::RequestId, CancellationToken>,
pub(crate) config: Arc<Config>,
pub(crate) config_errors: Option<ConfigErrors>,
@@ -266,7 +265,6 @@ impl GlobalState {
task_pool,
fmt_pool,
cancellation_pool,
- cancellation_tokens: Default::default(),
loader,
config: Arc::new(config.clone()),
analysis_host,
@@ -619,7 +617,6 @@ impl GlobalState {
}
pub(crate) fn respond(&mut self, response: lsp_server::Response) {
- self.cancellation_tokens.remove(&response.id);
if let Some((method, start)) = self.req_queue.incoming.complete(&response.id) {
if let Some(err) = &response.error
&& err.message.starts_with("server panicked")
@@ -634,9 +631,6 @@ impl GlobalState {
}
pub(crate) fn cancel(&mut self, request_id: lsp_server::RequestId) {
- if let Some(token) = self.cancellation_tokens.remove(&request_id) {
- token.cancel();
- }
if let Some(response) = self.req_queue.incoming.cancel(request_id) {
self.send(response.into());
}
diff --git a/crates/rust-analyzer/src/handlers/dispatch.rs b/crates/rust-analyzer/src/handlers/dispatch.rs
index 63b4e6430c..90deae2d90 100644
--- a/crates/rust-analyzer/src/handlers/dispatch.rs
+++ b/crates/rust-analyzer/src/handlers/dispatch.rs
@@ -253,9 +253,6 @@ impl RequestDispatcher<'_> {
tracing::debug!(?params);
let world = self.global_state.snapshot();
- self.global_state
- .cancellation_tokens
- .insert(req.id.clone(), world.analysis.cancellation_token());
if RUSTFMT {
&mut self.global_state.fmt_pool.handle
} else {
@@ -268,19 +265,7 @@ impl RequestDispatcher<'_> {
});
match thread_result_to_response::<R>(req.id.clone(), result) {
Ok(response) => Task::Response(response),
- Err(HandlerCancelledError::Inner(
- Cancelled::PendingWrite | Cancelled::PropagatedPanic,
- )) if ALLOW_RETRYING => Task::Retry(req),
- // Note: Technically the return value here does not matter as we have already responded to the client with this error.
- Err(HandlerCancelledError::Inner(Cancelled::Local)) => Task::Response(Response {
- id: req.id,
- result: None,
- error: Some(ResponseError {
- code: lsp_server::ErrorCode::RequestCanceled as i32,
- message: "canceled by client".to_owned(),
- data: None,
- }),
- }),
+ Err(_cancelled) if ALLOW_RETRYING => Task::Retry(req),
Err(_cancelled) => {
let error = on_cancelled();
Task::Response(Response { id: req.id, result: None, error: Some(error) })
diff --git a/crates/span/src/ast_id.rs b/crates/span/src/ast_id.rs
index 599b3c7175..f52604e139 100644
--- a/crates/span/src/ast_id.rs
+++ b/crates/span/src/ast_id.rs
@@ -88,7 +88,6 @@ impl fmt::Debug for ErasedFileAstId {
Module,
Static,
Trait,
- TraitAlias,
Variant,
Const,
Fn,
@@ -129,7 +128,6 @@ enum ErasedFileAstIdKind {
Module,
Static,
Trait,
- TraitAlias,
// Until here associated with `ErasedHasNameFileAstId`.
// The following are associated with `ErasedAssocItemFileAstId`.
Variant,
diff --git a/crates/span/src/hygiene.rs b/crates/span/src/hygiene.rs
index 0a81cef52e..fe05ef9465 100644
--- a/crates/span/src/hygiene.rs
+++ b/crates/span/src/hygiene.rs
@@ -81,24 +81,25 @@ const _: () = {
#[derive(Hash)]
struct StructKey<'db, T0, T1, T2, T3>(T0, T1, T2, T3, std::marker::PhantomData<&'db ()>);
- impl<'db, T0, T1, T2, T3> zalsa_::HashEqLike<StructKey<'db, T0, T1, T2, T3>> for SyntaxContextData
+ impl<'db, T0, T1, T2, T3> zalsa_::interned::HashEqLike<StructKey<'db, T0, T1, T2, T3>>
+ for SyntaxContextData
where
- Option<MacroCallId>: zalsa_::HashEqLike<T0>,
- Transparency: zalsa_::HashEqLike<T1>,
- Edition: zalsa_::HashEqLike<T2>,
- SyntaxContext: zalsa_::HashEqLike<T3>,
+ Option<MacroCallId>: zalsa_::interned::HashEqLike<T0>,
+ Transparency: zalsa_::interned::HashEqLike<T1>,
+ Edition: zalsa_::interned::HashEqLike<T2>,
+ SyntaxContext: zalsa_::interned::HashEqLike<T3>,
{
fn hash<H: std::hash::Hasher>(&self, h: &mut H) {
- zalsa_::HashEqLike::<T0>::hash(&self.outer_expn, &mut *h);
- zalsa_::HashEqLike::<T1>::hash(&self.outer_transparency, &mut *h);
- zalsa_::HashEqLike::<T2>::hash(&self.edition, &mut *h);
- zalsa_::HashEqLike::<T3>::hash(&self.parent, &mut *h);
+ zalsa_::interned::HashEqLike::<T0>::hash(&self.outer_expn, &mut *h);
+ zalsa_::interned::HashEqLike::<T1>::hash(&self.outer_transparency, &mut *h);
+ zalsa_::interned::HashEqLike::<T2>::hash(&self.edition, &mut *h);
+ zalsa_::interned::HashEqLike::<T3>::hash(&self.parent, &mut *h);
}
fn eq(&self, data: &StructKey<'db, T0, T1, T2, T3>) -> bool {
- zalsa_::HashEqLike::<T0>::eq(&self.outer_expn, &data.0)
- && zalsa_::HashEqLike::<T1>::eq(&self.outer_transparency, &data.1)
- && zalsa_::HashEqLike::<T2>::eq(&self.edition, &data.2)
- && zalsa_::HashEqLike::<T3>::eq(&self.parent, &data.3)
+ zalsa_::interned::HashEqLike::<T0>::eq(&self.outer_expn, &data.0)
+ && zalsa_::interned::HashEqLike::<T1>::eq(&self.outer_transparency, &data.1)
+ && zalsa_::interned::HashEqLike::<T2>::eq(&self.edition, &data.2)
+ && zalsa_::interned::HashEqLike::<T3>::eq(&self.parent, &data.3)
}
}
impl zalsa_struct_::Configuration for SyntaxContext {
@@ -202,10 +203,10 @@ const _: () = {
impl<'db> SyntaxContext {
pub fn new<
Db,
- T0: zalsa_::Lookup<Option<MacroCallId>> + std::hash::Hash,
- T1: zalsa_::Lookup<Transparency> + std::hash::Hash,
- T2: zalsa_::Lookup<Edition> + std::hash::Hash,
- T3: zalsa_::Lookup<SyntaxContext> + std::hash::Hash,
+ T0: zalsa_::interned::Lookup<Option<MacroCallId>> + std::hash::Hash,
+ T1: zalsa_::interned::Lookup<Transparency> + std::hash::Hash,
+ T2: zalsa_::interned::Lookup<Edition> + std::hash::Hash,
+ T3: zalsa_::interned::Lookup<SyntaxContext> + std::hash::Hash,
>(
db: &'db Db,
outer_expn: T0,
@@ -217,10 +218,10 @@ const _: () = {
) -> Self
where
Db: ?Sized + salsa::Database,
- Option<MacroCallId>: zalsa_::HashEqLike<T0>,
- Transparency: zalsa_::HashEqLike<T1>,
- Edition: zalsa_::HashEqLike<T2>,
- SyntaxContext: zalsa_::HashEqLike<T3>,
+ Option<MacroCallId>: zalsa_::interned::HashEqLike<T0>,
+ Transparency: zalsa_::interned::HashEqLike<T1>,
+ Edition: zalsa_::interned::HashEqLike<T2>,
+ SyntaxContext: zalsa_::interned::HashEqLike<T3>,
{
let (zalsa, zalsa_local) = db.zalsas();
@@ -235,10 +236,10 @@ const _: () = {
std::marker::PhantomData,
),
|id, data| SyntaxContextData {
- outer_expn: zalsa_::Lookup::into_owned(data.0),
- outer_transparency: zalsa_::Lookup::into_owned(data.1),
- edition: zalsa_::Lookup::into_owned(data.2),
- parent: zalsa_::Lookup::into_owned(data.3),
+ outer_expn: zalsa_::interned::Lookup::into_owned(data.0),
+ outer_transparency: zalsa_::interned::Lookup::into_owned(data.1),
+ edition: zalsa_::interned::Lookup::into_owned(data.2),
+ parent: zalsa_::interned::Lookup::into_owned(data.3),
opaque: opaque(zalsa_::FromId::from_id(id)),
opaque_and_semiopaque: opaque_and_semiopaque(zalsa_::FromId::from_id(id)),
},
diff --git a/crates/syntax/src/ast/edit_in_place.rs b/crates/syntax/src/ast/edit_in_place.rs
index 1cd8146f68..2b7dc5cd76 100644
--- a/crates/syntax/src/ast/edit_in_place.rs
+++ b/crates/syntax/src/ast/edit_in_place.rs
@@ -9,8 +9,9 @@ use crate::{
SyntaxKind::{ATTR, COMMENT, WHITESPACE},
SyntaxNode, SyntaxToken,
algo::{self, neighbor},
- ast::{self, HasGenericParams, edit::IndentLevel, make},
- ted::{self, Position},
+ ast::{self, HasGenericParams, edit::IndentLevel, make, syntax_factory::SyntaxFactory},
+ syntax_editor::{Position, SyntaxEditor},
+ ted,
};
use super::{GenericParam, HasName};
@@ -26,13 +27,13 @@ impl GenericParamsOwnerEdit for ast::Fn {
Some(it) => it,
None => {
let position = if let Some(name) = self.name() {
- Position::after(name.syntax)
+ ted::Position::after(name.syntax)
} else if let Some(fn_token) = self.fn_token() {
- Position::after(fn_token)
+ ted::Position::after(fn_token)
} else if let Some(param_list) = self.param_list() {
- Position::before(param_list.syntax)
+ ted::Position::before(param_list.syntax)
} else {
- Position::last_child_of(self.syntax())
+ ted::Position::last_child_of(self.syntax())
};
create_generic_param_list(position)
}
@@ -42,11 +43,11 @@ impl GenericParamsOwnerEdit for ast::Fn {
fn get_or_create_where_clause(&self) -> ast::WhereClause {
if self.where_clause().is_none() {
let position = if let Some(ty) = self.ret_type() {
- Position::after(ty.syntax())
+ ted::Position::after(ty.syntax())
} else if let Some(param_list) = self.param_list() {
- Position::after(param_list.syntax())
+ ted::Position::after(param_list.syntax())
} else {
- Position::last_child_of(self.syntax())
+ ted::Position::last_child_of(self.syntax())
};
create_where_clause(position);
}
@@ -60,8 +61,8 @@ impl GenericParamsOwnerEdit for ast::Impl {
Some(it) => it,
None => {
let position = match self.impl_token() {
- Some(imp_token) => Position::after(imp_token),
- None => Position::last_child_of(self.syntax()),
+ Some(imp_token) => ted::Position::after(imp_token),
+ None => ted::Position::last_child_of(self.syntax()),
};
create_generic_param_list(position)
}
@@ -71,8 +72,8 @@ impl GenericParamsOwnerEdit for ast::Impl {
fn get_or_create_where_clause(&self) -> ast::WhereClause {
if self.where_clause().is_none() {
let position = match self.assoc_item_list() {
- Some(items) => Position::before(items.syntax()),
- None => Position::last_child_of(self.syntax()),
+ Some(items) => ted::Position::before(items.syntax()),
+ None => ted::Position::last_child_of(self.syntax()),
};
create_where_clause(position);
}
@@ -86,11 +87,11 @@ impl GenericParamsOwnerEdit for ast::Trait {
Some(it) => it,
None => {
let position = if let Some(name) = self.name() {
- Position::after(name.syntax)
+ ted::Position::after(name.syntax)
} else if let Some(trait_token) = self.trait_token() {
- Position::after(trait_token)
+ ted::Position::after(trait_token)
} else {
- Position::last_child_of(self.syntax())
+ ted::Position::last_child_of(self.syntax())
};
create_generic_param_list(position)
}
@@ -100,9 +101,9 @@ impl GenericParamsOwnerEdit for ast::Trait {
fn get_or_create_where_clause(&self) -> ast::WhereClause {
if self.where_clause().is_none() {
let position = match (self.assoc_item_list(), self.semicolon_token()) {
- (Some(items), _) => Position::before(items.syntax()),
- (_, Some(tok)) => Position::before(tok),
- (None, None) => Position::last_child_of(self.syntax()),
+ (Some(items), _) => ted::Position::before(items.syntax()),
+ (_, Some(tok)) => ted::Position::before(tok),
+ (None, None) => ted::Position::last_child_of(self.syntax()),
};
create_where_clause(position);
}
@@ -116,11 +117,11 @@ impl GenericParamsOwnerEdit for ast::TypeAlias {
Some(it) => it,
None => {
let position = if let Some(name) = self.name() {
- Position::after(name.syntax)
+ ted::Position::after(name.syntax)
} else if let Some(trait_token) = self.type_token() {
- Position::after(trait_token)
+ ted::Position::after(trait_token)
} else {
- Position::last_child_of(self.syntax())
+ ted::Position::last_child_of(self.syntax())
};
create_generic_param_list(position)
}
@@ -130,10 +131,10 @@ impl GenericParamsOwnerEdit for ast::TypeAlias {
fn get_or_create_where_clause(&self) -> ast::WhereClause {
if self.where_clause().is_none() {
let position = match self.eq_token() {
- Some(tok) => Position::before(tok),
+ Some(tok) => ted::Position::before(tok),
None => match self.semicolon_token() {
- Some(tok) => Position::before(tok),
- None => Position::last_child_of(self.syntax()),
+ Some(tok) => ted::Position::before(tok),
+ None => ted::Position::last_child_of(self.syntax()),
},
};
create_where_clause(position);
@@ -148,11 +149,11 @@ impl GenericParamsOwnerEdit for ast::Struct {
Some(it) => it,
None => {
let position = if let Some(name) = self.name() {
- Position::after(name.syntax)
+ ted::Position::after(name.syntax)
} else if let Some(struct_token) = self.struct_token() {
- Position::after(struct_token)
+ ted::Position::after(struct_token)
} else {
- Position::last_child_of(self.syntax())
+ ted::Position::last_child_of(self.syntax())
};
create_generic_param_list(position)
}
@@ -166,13 +167,13 @@ impl GenericParamsOwnerEdit for ast::Struct {
ast::FieldList::TupleFieldList(it) => Some(it),
});
let position = if let Some(tfl) = tfl {
- Position::after(tfl.syntax())
+ ted::Position::after(tfl.syntax())
} else if let Some(gpl) = self.generic_param_list() {
- Position::after(gpl.syntax())
+ ted::Position::after(gpl.syntax())
} else if let Some(name) = self.name() {
- Position::after(name.syntax())
+ ted::Position::after(name.syntax())
} else {
- Position::last_child_of(self.syntax())
+ ted::Position::last_child_of(self.syntax())
};
create_where_clause(position);
}
@@ -186,11 +187,11 @@ impl GenericParamsOwnerEdit for ast::Enum {
Some(it) => it,
None => {
let position = if let Some(name) = self.name() {
- Position::after(name.syntax)
+ ted::Position::after(name.syntax)
} else if let Some(enum_token) = self.enum_token() {
- Position::after(enum_token)
+ ted::Position::after(enum_token)
} else {
- Position::last_child_of(self.syntax())
+ ted::Position::last_child_of(self.syntax())
};
create_generic_param_list(position)
}
@@ -200,11 +201,11 @@ impl GenericParamsOwnerEdit for ast::Enum {
fn get_or_create_where_clause(&self) -> ast::WhereClause {
if self.where_clause().is_none() {
let position = if let Some(gpl) = self.generic_param_list() {
- Position::after(gpl.syntax())
+ ted::Position::after(gpl.syntax())
} else if let Some(name) = self.name() {
- Position::after(name.syntax())
+ ted::Position::after(name.syntax())
} else {
- Position::last_child_of(self.syntax())
+ ted::Position::last_child_of(self.syntax())
};
create_where_clause(position);
}
@@ -212,12 +213,12 @@ impl GenericParamsOwnerEdit for ast::Enum {
}
}
-fn create_where_clause(position: Position) {
+fn create_where_clause(position: ted::Position) {
let where_clause = make::where_clause(empty()).clone_for_update();
ted::insert(position, where_clause.syntax());
}
-fn create_generic_param_list(position: Position) -> ast::GenericParamList {
+fn create_generic_param_list(position: ted::Position) -> ast::GenericParamList {
let gpl = make::generic_param_list(empty()).clone_for_update();
ted::insert_raw(position, gpl.syntax());
gpl
@@ -253,7 +254,7 @@ impl ast::GenericParamList {
pub fn add_generic_param(&self, generic_param: ast::GenericParam) {
match self.generic_params().last() {
Some(last_param) => {
- let position = Position::after(last_param.syntax());
+ let position = ted::Position::after(last_param.syntax());
let elements = vec![
make::token(T![,]).into(),
make::tokens::single_space().into(),
@@ -262,7 +263,7 @@ impl ast::GenericParamList {
ted::insert_all(position, elements);
}
None => {
- let after_l_angle = Position::after(self.l_angle_token().unwrap());
+ let after_l_angle = ted::Position::after(self.l_angle_token().unwrap());
ted::insert(after_l_angle, generic_param.syntax());
}
}
@@ -412,7 +413,7 @@ impl ast::UseTree {
match self.use_tree_list() {
Some(it) => it,
None => {
- let position = Position::last_child_of(self.syntax());
+ let position = ted::Position::last_child_of(self.syntax());
let use_tree_list = make::use_tree_list(empty()).clone_for_update();
let mut elements = Vec::with_capacity(2);
if self.coloncolon_token().is_none() {
@@ -458,7 +459,7 @@ impl ast::UseTree {
// Next, transform 'suffix' use tree into 'prefix::{suffix}'
let subtree = self.clone_subtree().clone_for_update();
ted::remove_all_iter(self.syntax().children_with_tokens());
- ted::insert(Position::first_child_of(self.syntax()), prefix.syntax());
+ ted::insert(ted::Position::first_child_of(self.syntax()), prefix.syntax());
self.get_or_create_use_tree_list().add_use_tree(subtree);
fn split_path_prefix(prefix: &ast::Path) -> Option<()> {
@@ -507,7 +508,7 @@ impl ast::UseTreeList {
pub fn add_use_tree(&self, use_tree: ast::UseTree) {
let (position, elements) = match self.use_trees().last() {
Some(last_tree) => (
- Position::after(last_tree.syntax()),
+ ted::Position::after(last_tree.syntax()),
vec![
make::token(T![,]).into(),
make::tokens::single_space().into(),
@@ -516,8 +517,8 @@ impl ast::UseTreeList {
),
None => {
let position = match self.l_curly_token() {
- Some(l_curly) => Position::after(l_curly),
- None => Position::last_child_of(self.syntax()),
+ Some(l_curly) => ted::Position::after(l_curly),
+ None => ted::Position::last_child_of(self.syntax()),
};
(position, vec![use_tree.syntax.into()])
}
@@ -582,15 +583,15 @@ impl ast::AssocItemList {
let (indent, position, whitespace) = match self.assoc_items().last() {
Some(last_item) => (
IndentLevel::from_node(last_item.syntax()),
- Position::after(last_item.syntax()),
+ ted::Position::after(last_item.syntax()),
"\n\n",
),
None => match self.l_curly_token() {
Some(l_curly) => {
normalize_ws_between_braces(self.syntax());
- (IndentLevel::from_token(&l_curly) + 1, Position::after(&l_curly), "\n")
+ (IndentLevel::from_token(&l_curly) + 1, ted::Position::after(&l_curly), "\n")
}
- None => (IndentLevel::single(), Position::last_child_of(self.syntax()), "\n"),
+ None => (IndentLevel::single(), ted::Position::last_child_of(self.syntax()), "\n"),
},
};
let elements: Vec<SyntaxElement> = vec![
@@ -618,17 +619,17 @@ impl ast::RecordExprFieldList {
let position = match self.fields().last() {
Some(last_field) => {
let comma = get_or_insert_comma_after(last_field.syntax());
- Position::after(comma)
+ ted::Position::after(comma)
}
None => match self.l_curly_token() {
- Some(it) => Position::after(it),
- None => Position::last_child_of(self.syntax()),
+ Some(it) => ted::Position::after(it),
+ None => ted::Position::last_child_of(self.syntax()),
},
};
ted::insert_all(position, vec![whitespace.into(), field.syntax().clone().into()]);
if is_multiline {
- ted::insert(Position::after(field.syntax()), ast::make::token(T![,]));
+ ted::insert(ted::Position::after(field.syntax()), ast::make::token(T![,]));
}
}
}
@@ -656,7 +657,7 @@ impl ast::RecordExprField {
ast::make::tokens::single_space().into(),
expr.syntax().clone().into(),
];
- ted::insert_all_raw(Position::last_child_of(self.syntax()), children);
+ ted::insert_all_raw(ted::Position::last_child_of(self.syntax()), children);
}
}
}
@@ -679,17 +680,17 @@ impl ast::RecordPatFieldList {
Some(last_field) => {
let syntax = last_field.syntax();
let comma = get_or_insert_comma_after(syntax);
- Position::after(comma)
+ ted::Position::after(comma)
}
None => match self.l_curly_token() {
- Some(it) => Position::after(it),
- None => Position::last_child_of(self.syntax()),
+ Some(it) => ted::Position::after(it),
+ None => ted::Position::last_child_of(self.syntax()),
},
};
ted::insert_all(position, vec![whitespace.into(), field.syntax().clone().into()]);
if is_multiline {
- ted::insert(Position::after(field.syntax()), ast::make::token(T![,]));
+ ted::insert(ted::Position::after(field.syntax()), ast::make::token(T![,]));
}
}
}
@@ -703,7 +704,7 @@ fn get_or_insert_comma_after(syntax: &SyntaxNode) -> SyntaxToken {
Some(it) => it,
None => {
let comma = ast::make::token(T![,]);
- ted::insert(Position::after(syntax), &comma);
+ ted::insert(ted::Position::after(syntax), &comma);
comma
}
}
@@ -728,7 +729,7 @@ fn normalize_ws_between_braces(node: &SyntaxNode) -> Option<()> {
}
}
Some(ws) if ws.kind() == T!['}'] => {
- ted::insert(Position::after(l), make::tokens::whitespace(&format!("\n{indent}")));
+ ted::insert(ted::Position::after(l), make::tokens::whitespace(&format!("\n{indent}")));
}
_ => (),
}
@@ -780,6 +781,56 @@ impl ast::IdentPat {
}
}
}
+
+ pub fn set_pat_with_editor(
+ &self,
+ pat: Option<ast::Pat>,
+ syntax_editor: &mut SyntaxEditor,
+ syntax_factory: &SyntaxFactory,
+ ) {
+ match pat {
+ None => {
+ if let Some(at_token) = self.at_token() {
+ // Remove `@ Pat`
+ let start = at_token.clone().into();
+ let end = self
+ .pat()
+ .map(|it| it.syntax().clone().into())
+ .unwrap_or_else(|| at_token.into());
+ syntax_editor.delete_all(start..=end);
+
+ // Remove any trailing ws
+ if let Some(last) =
+ self.syntax().last_token().filter(|it| it.kind() == WHITESPACE)
+ {
+ last.detach();
+ }
+ }
+ }
+ Some(pat) => {
+ if let Some(old_pat) = self.pat() {
+ // Replace existing pattern
+ syntax_editor.replace(old_pat.syntax(), pat.syntax())
+ } else if let Some(at_token) = self.at_token() {
+ // Have an `@` token but not a pattern yet
+ syntax_editor.insert(Position::after(at_token), pat.syntax());
+ } else {
+ // Don't have an `@`, should have a name
+ let name = self.name().unwrap();
+
+ syntax_editor.insert_all(
+ Position::after(name.syntax()),
+ vec![
+ syntax_factory.whitespace(" ").into(),
+ syntax_factory.token(T![@]).into(),
+ syntax_factory.whitespace(" ").into(),
+ pat.syntax().clone().into(),
+ ],
+ )
+ }
+ }
+ }
+ }
}
pub trait HasVisibilityEdit: ast::HasVisibility {
diff --git a/crates/syntax/src/ast/syntax_factory/constructors.rs b/crates/syntax/src/ast/syntax_factory/constructors.rs
index 5fe419ad4e..6e17d262a7 100644
--- a/crates/syntax/src/ast/syntax_factory/constructors.rs
+++ b/crates/syntax/src/ast/syntax_factory/constructors.rs
@@ -75,6 +75,24 @@ impl SyntaxFactory {
make::path_from_text(text).clone_for_update()
}
+ pub fn path_concat(&self, first: ast::Path, second: ast::Path) -> ast::Path {
+ make::path_concat(first, second).clone_for_update()
+ }
+
+ pub fn visibility_pub(&self) -> ast::Visibility {
+ make::visibility_pub()
+ }
+
+ pub fn struct_(
+ &self,
+ visibility: Option<ast::Visibility>,
+ strukt_name: ast::Name,
+ generic_param_list: Option<ast::GenericParamList>,
+ field_list: ast::FieldList,
+ ) -> ast::Struct {
+ make::struct_(visibility, strukt_name, generic_param_list, field_list).clone_for_update()
+ }
+
pub fn expr_field(&self, receiver: ast::Expr, field: &str) -> ast::FieldExpr {
let ast::Expr::FieldExpr(ast) =
make::expr_field(receiver.clone(), field).clone_for_update()
@@ -1590,6 +1608,65 @@ impl SyntaxFactory {
ast
}
+ pub fn self_param(&self) -> ast::SelfParam {
+ let ast = make::self_param().clone_for_update();
+
+ if let Some(mut mapping) = self.mappings() {
+ let builder = SyntaxMappingBuilder::new(ast.syntax().clone());
+ builder.finish(&mut mapping);
+ }
+
+ ast
+ }
+
+ pub fn impl_(
+ &self,
+ attrs: impl IntoIterator<Item = ast::Attr>,
+ generic_params: Option<ast::GenericParamList>,
+ generic_args: Option<ast::GenericArgList>,
+ path_type: ast::Type,
+ where_clause: Option<ast::WhereClause>,
+ body: Option<ast::AssocItemList>,
+ ) -> ast::Impl {
+ let (attrs, attrs_input) = iterator_input(attrs);
+ let ast = make::impl_(
+ attrs,
+ generic_params.clone(),
+ generic_args.clone(),
+ path_type.clone(),
+ where_clause.clone(),
+ body.clone(),
+ )
+ .clone_for_update();
+
+ if let Some(mut mapping) = self.mappings() {
+ let mut builder = SyntaxMappingBuilder::new(ast.syntax().clone());
+ builder.map_children(attrs_input, ast.attrs().map(|attr| attr.syntax().clone()));
+ if let Some(generic_params) = generic_params {
+ builder.map_node(
+ generic_params.syntax().clone(),
+ ast.generic_param_list().unwrap().syntax().clone(),
+ );
+ }
+ builder.map_node(path_type.syntax().clone(), ast.self_ty().unwrap().syntax().clone());
+ if let Some(where_clause) = where_clause {
+ builder.map_node(
+ where_clause.syntax().clone(),
+ ast.where_clause().unwrap().syntax().clone(),
+ );
+ }
+ if let Some(body) = body {
+ builder.map_node(
+ body.syntax().clone(),
+ ast.assoc_item_list().unwrap().syntax().clone(),
+ );
+ }
+ builder.finish(&mut mapping);
+ }
+
+ ast
+ }
+
pub fn ret_type(&self, ty: ast::Type) -> ast::RetType {
let ast = make::ret_type(ty.clone()).clone_for_update();
diff --git a/crates/test-utils/src/minicore.rs b/crates/test-utils/src/minicore.rs
index 7d95043867..c34475bbdf 100644
--- a/crates/test-utils/src/minicore.rs
+++ b/crates/test-utils/src/minicore.rs
@@ -1689,6 +1689,21 @@ pub mod iter {
}
}
+ pub struct Filter<I, P> {
+ iter: I,
+ predicate: P,
+ }
+ impl<I: Iterator, P> Iterator for Filter<I, P>
+ where
+ P: FnMut(&I::Item) -> bool,
+ {
+ type Item = I::Item;
+
+ fn next(&mut self) -> Option<I::Item> {
+ loop {}
+ }
+ }
+
pub struct FilterMap<I, F> {
iter: I,
f: F,
@@ -1705,7 +1720,7 @@ pub mod iter {
}
}
}
- pub use self::adapters::{FilterMap, Take};
+ pub use self::adapters::{Filter, FilterMap, Take};
mod sources {
mod repeat {
@@ -1756,6 +1771,13 @@ pub mod iter {
{
loop {}
}
+ fn filter<P>(self, predicate: P) -> crate::iter::Filter<Self, P>
+ where
+ Self: Sized,
+ P: FnMut(&Self::Item) -> bool,
+ {
+ loop {}
+ }
fn filter_map<B, F>(self, _f: F) -> crate::iter::FilterMap<Self, F>
where
Self: Sized,
diff --git a/crates/tt/src/storage.rs b/crates/tt/src/storage.rs
index 4dd02d875a..50a1106175 100644
--- a/crates/tt/src/storage.rs
+++ b/crates/tt/src/storage.rs
@@ -488,7 +488,7 @@ impl TopSubtree {
unreachable!()
};
*open_span = S::new(span.open.range, 0);
- *close_span = S::new(span.close.range, 0);
+ *close_span = S::new(span.close.range, 1);
}
dispatch! {
match &mut self.repr => tt => do_it(tt, span)
diff --git a/editors/code/package-lock.json b/editors/code/package-lock.json
index 57f6bf69be..84be0a666f 100644
--- a/editors/code/package-lock.json
+++ b/editors/code/package-lock.json
@@ -1486,7 +1486,6 @@
"integrity": "sha512-4gbs64bnbSzu4FpgMiQ1A+D+urxkoJk/kqlDJ2W//5SygaEiAP2B4GoS7TEdxgwol2el03gckFV9lJ4QOMiiHg==",
"dev": true,
"license": "MIT",
- "peer": true,
"dependencies": {
"@typescript-eslint/scope-manager": "8.25.0",
"@typescript-eslint/types": "8.25.0",
@@ -1870,7 +1869,6 @@
"integrity": "sha512-cl669nCJTZBsL97OF4kUQm5g5hC2uihk0NxY3WENAC0TYdILVkAyHymAntgxGkl7K+t0cXIrH5siy5S4XkFycA==",
"dev": true,
"license": "MIT",
- "peer": true,
"bin": {
"acorn": "bin/acorn"
},
@@ -2840,7 +2838,6 @@
"resolved": "https://registry.npmjs.org/d3-selection/-/d3-selection-3.0.0.tgz",
"integrity": "sha512-fmTRWbNMmsmWq6xJV8D19U/gw/bwrHfNXxrIN+HfZgnzqTHp9jOmKMhsTUjXOJnZOdZY9Q28y4yebKzqDKlxlQ==",
"license": "ISC",
- "peer": true,
"engines": {
"node": ">=12"
}
@@ -3322,7 +3319,6 @@
"integrity": "sha512-KjeihdFqTPhOMXTt7StsDxriV4n66ueuF/jfPNC3j/lduHwr/ijDwJMsF+wyMJethgiKi5wniIE243vi07d3pg==",
"dev": true,
"license": "MIT",
- "peer": true,
"dependencies": {
"@eslint-community/eslint-utils": "^4.2.0",
"@eslint-community/regexpp": "^4.12.1",
@@ -4410,7 +4406,6 @@
"resolved": "https://registry.npmjs.org/jiti/-/jiti-2.4.2.tgz",
"integrity": "sha512-rg9zJN+G4n2nfJl5MW3BMygZX56zKPNVEYYqq7adpmMh4Jn2QNEwhvQlFy6jPVdcod7txZtKHWnyZiA3a0zP7A==",
"license": "MIT",
- "peer": true,
"bin": {
"jiti": "lib/jiti-cli.mjs"
}
@@ -5584,9 +5579,9 @@
}
},
"node_modules/qs": {
- "version": "6.14.1",
- "resolved": "https://registry.npmjs.org/qs/-/qs-6.14.1.tgz",
- "integrity": "sha512-4EK3+xJl8Ts67nLYNwqw/dsFVnCf+qR7RgXSK9jEEm9unao3njwMDdmsdvoKBKHzxd7tCYz5e5M+SnMjdtXGQQ==",
+ "version": "6.14.2",
+ "resolved": "https://registry.npmjs.org/qs/-/qs-6.14.2.tgz",
+ "integrity": "sha512-V/yCWTTF7VJ9hIh18Ugr2zhJMP01MY7c5kh4J870L7imm6/DIzBsNLTXzMwUA3yZ5b/KBqLx8Kp3uRvd7xSe3Q==",
"dev": true,
"license": "BSD-3-Clause",
"dependencies": {
@@ -6678,7 +6673,6 @@
"integrity": "sha512-84MVSjMEHP+FQRPy3pX9sTVV/INIex71s9TL2Gm5FG/WG1SqXeKyZ0k7/blY/4FdOzI12CBy1vGc4og/eus0fw==",
"dev": true,
"license": "Apache-2.0",
- "peer": true,
"bin": {
"tsc": "bin/tsc",
"tsserver": "bin/tsserver"
diff --git a/lib/smol_str/src/borsh.rs b/lib/smol_str/src/borsh.rs
index 527ce85a17..b684a4910c 100644
--- a/lib/smol_str/src/borsh.rs
+++ b/lib/smol_str/src/borsh.rs
@@ -29,8 +29,9 @@ impl BorshDeserialize for SmolStr {
}))
} else {
// u8::vec_from_reader always returns Some on success in current implementation
- let vec = u8::vec_from_reader(len, reader)?
- .ok_or_else(|| Error::other("u8::vec_from_reader unexpectedly returned None"))?;
+ let vec = u8::vec_from_reader(len, reader)?.ok_or_else(|| {
+ Error::new(ErrorKind::Other, "u8::vec_from_reader unexpectedly returned None")
+ })?;
Ok(SmolStr::from(String::from_utf8(vec).map_err(|err| {
let msg = err.to_string();
Error::new(ErrorKind::InvalidData, msg)
diff --git a/lib/smol_str/tests/test.rs b/lib/smol_str/tests/test.rs
index 640e7df681..00fab2ee1c 100644
--- a/lib/smol_str/tests/test.rs
+++ b/lib/smol_str/tests/test.rs
@@ -393,7 +393,7 @@ mod test_str_ext {
}
}
-#[cfg(feature = "borsh")]
+#[cfg(all(feature = "borsh", feature = "std"))]
mod borsh_tests {
use borsh::BorshDeserialize;
use smol_str::{SmolStr, ToSmolStr};
diff --git a/rust-version b/rust-version
index a1011c4a0a..b22c6c3869 100644
--- a/rust-version
+++ b/rust-version
@@ -1 +1 @@
-ba284f468cd2cda48420251efc991758ec13d450
+139651428df86cf88443295542c12ea617cbb587