Unnamed repository; edit this file 'description' to name the repository.
Merge ref '44e34e1ac6d7' from rust-lang/rust
Pull recent changes from https://github.com/rust-lang/rust via Josh. Upstream ref: rust-lang/rust@44e34e1ac6d7e69b40856cf1403d3da145319c30 Filtered ref: rust-lang/compiler-builtins@94ca1bae6b145ee072351f21cfec7fd6c8ee5863 Upstream diff: https://github.com/rust-lang/rust/compare/23d01cd2412583491621ab1ca4f1b01e37d11e39...44e34e1ac6d7e69b40856cf1403d3da145319c30 This merge was created using https://github.com/rust-lang/josh-sync.
The rustc-josh-sync Cronjob Bot 3 months ago
parent ff14b04 · parent 8608137 · commit 2377d23
-rw-r--r--.github/workflows/ci.yaml6
-rw-r--r--CONTRIBUTING.md2
-rw-r--r--Cargo.lock8
-rw-r--r--crates/base-db/src/change.rs16
-rw-r--r--crates/base-db/src/editioned_file_id.rs3
-rw-r--r--crates/base-db/src/lib.rs18
-rw-r--r--crates/cfg/Cargo.toml3
-rw-r--r--crates/cfg/src/cfg_expr.rs30
-rw-r--r--crates/hir-def/Cargo.toml1
-rw-r--r--crates/hir-def/src/attrs.rs21
-rw-r--r--crates/hir-def/src/builtin_derive.rs149
-rw-r--r--crates/hir-def/src/expr_store.rs8
-rw-r--r--crates/hir-def/src/expr_store/expander.rs9
-rw-r--r--crates/hir-def/src/expr_store/lower.rs22
-rw-r--r--crates/hir-def/src/expr_store/lower/format_args.rs14
-rw-r--r--crates/hir-def/src/expr_store/tests/body/block.rs6
-rw-r--r--crates/hir-def/src/hir/generics.rs3
-rw-r--r--crates/hir-def/src/hir/type_ref.rs58
-rw-r--r--crates/hir-def/src/item_scope.rs38
-rw-r--r--crates/hir-def/src/item_tree.rs6
-rw-r--r--crates/hir-def/src/item_tree/attrs.rs5
-rw-r--r--crates/hir-def/src/item_tree/lower.rs2
-rw-r--r--crates/hir-def/src/item_tree/pretty.rs2
-rw-r--r--crates/hir-def/src/lang_item.rs57
-rw-r--r--crates/hir-def/src/lib.rs44
-rw-r--r--crates/hir-def/src/macro_expansion_tests/mbe.rs16
-rw-r--r--crates/hir-def/src/macro_expansion_tests/mod.rs10
-rw-r--r--crates/hir-def/src/macro_expansion_tests/proc_macros.rs8
-rw-r--r--crates/hir-def/src/nameres.rs41
-rw-r--r--crates/hir-def/src/nameres/attr_resolution.rs2
-rw-r--r--crates/hir-def/src/nameres/collector.rs245
-rw-r--r--crates/hir-def/src/nameres/path_resolution.rs2
-rw-r--r--crates/hir-def/src/nameres/proc_macro.rs51
-rw-r--r--crates/hir-def/src/nameres/tests/macros.rs8
-rw-r--r--crates/hir-def/src/resolver.rs4
-rw-r--r--crates/hir-def/src/signatures.rs11
-rw-r--r--crates/hir-def/src/test_db.rs6
-rw-r--r--crates/hir-expand/src/attrs.rs52
-rw-r--r--crates/hir-expand/src/builtin/derive_macro.rs11
-rw-r--r--crates/hir-expand/src/builtin/fn_macro.rs152
-rw-r--r--crates/hir-expand/src/builtin/quote.rs21
-rw-r--r--crates/hir-expand/src/db.rs28
-rw-r--r--crates/hir-expand/src/declarative.rs5
-rw-r--r--crates/hir-expand/src/eager.rs2
-rw-r--r--crates/hir-expand/src/fixup.rs97
-rw-r--r--crates/hir-expand/src/hygiene.rs14
-rw-r--r--crates/hir-expand/src/inert_attr_macro.rs5
-rw-r--r--crates/hir-expand/src/lib.rs44
-rw-r--r--crates/hir-expand/src/mod_path.rs14
-rw-r--r--crates/hir-expand/src/name.rs2
-rw-r--r--crates/hir-expand/src/proc_macro.rs2
-rw-r--r--crates/hir-expand/src/span_map.rs8
-rw-r--r--crates/hir-ty/src/builtin_derive.rs599
-rw-r--r--crates/hir-ty/src/consteval.rs127
-rw-r--r--crates/hir-ty/src/consteval/tests.rs4
-rw-r--r--crates/hir-ty/src/db.rs10
-rw-r--r--crates/hir-ty/src/diagnostics/decl_check.rs37
-rw-r--r--crates/hir-ty/src/display.rs120
-rw-r--r--crates/hir-ty/src/drop.rs2
-rw-r--r--crates/hir-ty/src/infer/closure.rs11
-rw-r--r--crates/hir-ty/src/infer/closure/analysis.rs194
-rw-r--r--crates/hir-ty/src/infer/coerce.rs77
-rw-r--r--crates/hir-ty/src/layout.rs10
-rw-r--r--crates/hir-ty/src/lib.rs5
-rw-r--r--crates/hir-ty/src/lower.rs58
-rw-r--r--crates/hir-ty/src/method_resolution.rs136
-rw-r--r--crates/hir-ty/src/method_resolution/probe.rs6
-rw-r--r--crates/hir-ty/src/mir/borrowck.rs3
-rw-r--r--crates/hir-ty/src/mir/eval.rs13
-rw-r--r--crates/hir-ty/src/mir/eval/shim.rs19
-rw-r--r--crates/hir-ty/src/mir/eval/shim/simd.rs15
-rw-r--r--crates/hir-ty/src/mir/lower.rs45
-rw-r--r--crates/hir-ty/src/next_solver/def_id.rs74
-rw-r--r--crates/hir-ty/src/next_solver/format_proof_tree.rs31
-rw-r--r--crates/hir-ty/src/next_solver/generic_arg.rs58
-rw-r--r--crates/hir-ty/src/next_solver/generics.rs18
-rw-r--r--crates/hir-ty/src/next_solver/infer/mod.rs8
-rw-r--r--crates/hir-ty/src/next_solver/infer/opaque_types/table.rs2
-rw-r--r--crates/hir-ty/src/next_solver/infer/select.rs8
-rw-r--r--crates/hir-ty/src/next_solver/interner.rs113
-rw-r--r--crates/hir-ty/src/next_solver/solver.rs12
-rw-r--r--crates/hir-ty/src/next_solver/ty.rs45
-rw-r--r--crates/hir-ty/src/test_db.rs6
-rw-r--r--crates/hir-ty/src/tests/closure_captures.rs25
-rw-r--r--crates/hir-ty/src/tests/incremental.rs50
-rw-r--r--crates/hir-ty/src/tests/patterns.rs19
-rw-r--r--crates/hir-ty/src/tests/regression.rs1
-rw-r--r--crates/hir-ty/src/tests/regression/new_solver.rs1
-rw-r--r--crates/hir-ty/src/tests/simple.rs61
-rw-r--r--crates/hir-ty/src/tests/traits.rs24
-rw-r--r--crates/hir-ty/src/upvars.rs319
-rw-r--r--crates/hir-ty/src/variance.rs90
-rw-r--r--crates/hir/src/attrs.rs45
-rw-r--r--crates/hir/src/display.rs343
-rw-r--r--crates/hir/src/from_id.rs102
-rw-r--r--crates/hir/src/has_source.rs74
-rw-r--r--crates/hir/src/lib.rs1053
-rw-r--r--crates/hir/src/semantics.rs84
-rw-r--r--crates/hir/src/source_analyzer.rs56
-rw-r--r--crates/ide-assists/src/assist_config.rs6
-rw-r--r--crates/ide-assists/src/handlers/convert_to_guarded_return.rs63
-rw-r--r--crates/ide-assists/src/handlers/move_guard.rs74
-rw-r--r--crates/ide-assists/src/handlers/remove_underscore.rs4
-rw-r--r--crates/ide-assists/src/tests.rs4
-rw-r--r--crates/ide-completion/src/completions.rs1
-rw-r--r--crates/ide-completion/src/completions/dot.rs3
-rw-r--r--crates/ide-completion/src/completions/macro_def.rs31
-rw-r--r--crates/ide-completion/src/completions/record.rs5
-rw-r--r--crates/ide-completion/src/context.rs5
-rw-r--r--crates/ide-completion/src/context/analysis.rs17
-rw-r--r--crates/ide-completion/src/lib.rs3
-rw-r--r--crates/ide-completion/src/render/macro_.rs87
-rw-r--r--crates/ide-completion/src/tests/flyimport.rs3
-rw-r--r--crates/ide-completion/src/tests/record.rs23
-rw-r--r--crates/ide-completion/src/tests/special.rs229
-rw-r--r--crates/ide-db/src/apply_change.rs30
-rw-r--r--crates/ide-db/src/lib.rs6
-rw-r--r--crates/ide-db/src/prime_caches.rs6
-rw-r--r--crates/ide-db/src/rename.rs53
-rw-r--r--crates/ide-db/src/symbol_index.rs20
-rw-r--r--crates/ide-db/src/syntax_helpers/node_ext.rs39
-rw-r--r--crates/ide-db/src/syntax_helpers/suggest_name.rs2
-rw-r--r--crates/ide-db/src/test_data/test_symbol_index_collection.txt24
-rw-r--r--crates/ide-diagnostics/src/handlers/incorrect_case.rs22
-rw-r--r--crates/ide-diagnostics/src/handlers/trait_impl_incorrect_safety.rs1
-rw-r--r--crates/ide-diagnostics/src/handlers/trait_impl_missing_assoc_item.rs18
-rw-r--r--crates/ide-diagnostics/src/handlers/type_mismatch.rs1
-rw-r--r--crates/ide-diagnostics/src/lib.rs8
-rw-r--r--crates/ide-ssr/src/lib.rs2
-rw-r--r--crates/ide-ssr/src/search.rs3
-rw-r--r--crates/ide-ssr/src/tests.rs3
-rw-r--r--crates/ide/src/expand_macro.rs8
-rw-r--r--crates/ide/src/hover/render.rs35
-rw-r--r--crates/ide/src/hover/tests.rs17
-rw-r--r--crates/ide/src/inlay_hints.rs47
-rw-r--r--crates/ide/src/inlay_hints/bind_pat.rs141
-rw-r--r--crates/ide/src/inlay_hints/bounds.rs2
-rw-r--r--crates/ide/src/inlay_hints/implicit_drop.rs5
-rw-r--r--crates/ide/src/inlay_hints/implied_dyn_trait.rs1
-rw-r--r--crates/ide/src/lib.rs14
-rw-r--r--crates/ide/src/navigation_target.rs62
-rw-r--r--crates/ide/src/references.rs29
-rw-r--r--crates/ide/src/rename.rs21
-rw-r--r--crates/ide/src/runnables.rs8
-rw-r--r--crates/ide/src/ssr.rs4
-rw-r--r--crates/ide/src/syntax_highlighting/test_data/highlight_strings.html22
-rw-r--r--crates/intern/Cargo.toml1
-rw-r--r--crates/intern/src/gc.rs11
-rw-r--r--crates/intern/src/intern.rs2
-rw-r--r--crates/intern/src/intern_slice.rs7
-rw-r--r--crates/intern/src/symbol/symbols.rs8
-rw-r--r--crates/load-cargo/src/lib.rs73
-rw-r--r--crates/mbe/src/benchmark.rs24
-rw-r--r--crates/mbe/src/expander.rs14
-rw-r--r--crates/mbe/src/expander/matcher.rs46
-rw-r--r--crates/mbe/src/expander/transcriber.rs56
-rw-r--r--crates/mbe/src/lib.rs18
-rw-r--r--crates/mbe/src/parser.rs69
-rw-r--r--crates/parser/src/grammar/expressions/atom.rs10
-rw-r--r--crates/parser/src/input.rs2
-rw-r--r--crates/parser/src/lexed_str.rs7
-rw-r--r--crates/parser/src/parser.rs2
-rw-r--r--crates/parser/src/syntax_kind/generated.rs2
-rw-r--r--crates/parser/test_data/generated/runner.rs4
-rw-r--r--crates/parser/test_data/parser/inline/ok/builtin_expr.rast6
-rw-r--r--crates/parser/test_data/parser/inline/ok/format_args_named_arg_keyword.rast35
-rw-r--r--crates/parser/test_data/parser/inline/ok/format_args_named_arg_keyword.rs3
-rw-r--r--crates/proc-macro-api/src/bidirectional_protocol.rs227
-rw-r--r--crates/proc-macro-api/src/bidirectional_protocol/msg.rs95
-rw-r--r--crates/proc-macro-api/src/legacy_protocol.rs28
-rw-r--r--crates/proc-macro-api/src/legacy_protocol/msg.rs33
-rw-r--r--crates/proc-macro-api/src/legacy_protocol/msg/flat.rs58
-rw-r--r--crates/proc-macro-api/src/lib.rs46
-rw-r--r--crates/proc-macro-api/src/process.rs178
-rw-r--r--crates/proc-macro-api/src/transport.rs3
-rw-r--r--crates/proc-macro-api/src/transport/codec.rs (renamed from crates/proc-macro-api/src/codec.rs)5
-rw-r--r--crates/proc-macro-api/src/transport/codec/json.rs (renamed from crates/proc-macro-api/src/legacy_protocol/json.rs)6
-rw-r--r--crates/proc-macro-api/src/transport/codec/postcard.rs (renamed from crates/proc-macro-api/src/legacy_protocol/postcard.rs)6
-rw-r--r--crates/proc-macro-api/src/transport/framing.rs (renamed from crates/proc-macro-api/src/framing.rs)6
-rw-r--r--crates/proc-macro-srv-cli/Cargo.toml2
-rw-r--r--crates/proc-macro-srv-cli/README.md65
-rw-r--r--crates/proc-macro-srv-cli/src/main.rs13
-rw-r--r--crates/proc-macro-srv-cli/src/main_loop.rs322
-rw-r--r--crates/proc-macro-srv/src/bridge.rs2
-rw-r--r--crates/proc-macro-srv/src/dylib.rs13
-rw-r--r--crates/proc-macro-srv/src/dylib/proc_macros.rs13
-rw-r--r--crates/proc-macro-srv/src/lib.rs54
-rw-r--r--crates/proc-macro-srv/src/server_impl/rust_analyzer_span.rs120
-rw-r--r--crates/proc-macro-srv/src/server_impl/token_id.rs111
-rw-r--r--crates/proc-macro-srv/src/tests/utils.rs8
-rw-r--r--crates/proc-macro-srv/src/token_stream.rs42
-rw-r--r--crates/rust-analyzer/src/cli/analysis_stats.rs38
-rw-r--r--crates/rust-analyzer/src/cli/rustc_tests.rs2
-rw-r--r--crates/rust-analyzer/src/cli/ssr.rs2
-rw-r--r--crates/rust-analyzer/src/config.rs57
-rw-r--r--crates/rust-analyzer/src/flycheck.rs8
-rw-r--r--crates/rust-analyzer/src/global_state.rs26
-rw-r--r--crates/rust-analyzer/src/handlers/request.rs6
-rw-r--r--crates/rust-analyzer/src/integrated_benchmarks.rs1
-rw-r--r--crates/rust-analyzer/src/lib.rs3
-rw-r--r--crates/rust-analyzer/src/lsp/to_proto.rs53
-rw-r--r--crates/rust-analyzer/src/main_loop.rs17
-rw-r--r--crates/rust-analyzer/src/reload.rs7
-rw-r--r--crates/rust-analyzer/src/target_spec.rs61
-rw-r--r--crates/rust-analyzer/src/task_pool.rs4
-rw-r--r--crates/span/src/ast_id.rs47
-rw-r--r--crates/span/src/hygiene.rs21
-rw-r--r--crates/span/src/lib.rs18
-rw-r--r--crates/span/src/map.rs79
-rw-r--r--crates/stdx/src/lib.rs14
-rw-r--r--crates/stdx/src/thread/pool.rs3
-rw-r--r--crates/syntax-bridge/src/lib.rs272
-rw-r--r--crates/syntax-bridge/src/tests.rs6
-rw-r--r--crates/syntax-bridge/src/to_parser_input.rs13
-rw-r--r--crates/syntax/fuzz/Cargo.toml1
-rw-r--r--crates/syntax/rust.ungram5
-rw-r--r--crates/syntax/src/ast/generated/nodes.rs50
-rw-r--r--crates/syntax/src/ast/node_ext.rs32
-rw-r--r--crates/syntax/src/lib.rs13
-rw-r--r--crates/test-fixture/Cargo.toml1
-rw-r--r--crates/test-fixture/src/lib.rs57
-rw-r--r--crates/test-utils/src/minicore.rs5
-rw-r--r--crates/tt/Cargo.toml3
-rw-r--r--crates/tt/src/buffer.rs43
-rw-r--r--crates/tt/src/iter.rs151
-rw-r--r--crates/tt/src/lib.rs870
-rw-r--r--crates/tt/src/storage.rs992
-rw-r--r--docs/book/src/configuration_generated.md68
-rw-r--r--docs/book/src/contributing/architecture.md2
-rw-r--r--editors/code/package-lock.json6
-rw-r--r--editors/code/package.json89
-rw-r--r--editors/code/src/bootstrap.ts11
-rw-r--r--editors/code/src/client.ts25
-rw-r--r--editors/code/src/ctx.ts54
-rw-r--r--editors/code/src/util.ts25
-rw-r--r--rust-version2
236 files changed, 8607 insertions, 3329 deletions
diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml
index 5975272d87..1a0deee564 100644
--- a/.github/workflows/ci.yaml
+++ b/.github/workflows/ci.yaml
@@ -122,6 +122,12 @@ jobs:
- name: Run tests
run: cargo nextest run --no-fail-fast --hide-progress-bar --status-level fail
+ - name: Install cargo-machete
+ uses: taiki-e/install-action@cargo-machete
+
+ - name: Run cargo-machete
+ run: cargo machete
+
- name: Run Clippy
if: matrix.os == 'macos-latest'
run: cargo clippy --all-targets -- -D clippy::disallowed_macros -D clippy::dbg_macro -D clippy::todo -D clippy::print_stdout -D clippy::print_stderr
diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md
index 35d03780c1..e6ab3d75a0 100644
--- a/CONTRIBUTING.md
+++ b/CONTRIBUTING.md
@@ -38,6 +38,6 @@ considered accepted feel free to just drop a comment and ask!
AI tool use is not discouraged on the rust-analyzer codebase, as long as it meets our quality standards.
We kindly ask you to disclose usage of AI tools in your contributions.
-If you used them without disclosing it, we may reject your contribution on that basis alone due to the assumption that you likely not reviewed your own submission (so why should we?).
+If you used them without disclosing it, we may reject your contribution on that basis alone due to the assumption that you have, most likely, not reviewed your own submission (so why should we?).
We may still reject AI-assisted contributions if we deem the quality of the contribution to be unsatisfactory as to reduce impact on the team's review budget.
diff --git a/Cargo.lock b/Cargo.lock
index 1e924d92f4..42eaeb01f1 100644
--- a/Cargo.lock
+++ b/Cargo.lock
@@ -234,6 +234,7 @@ dependencies = [
"intern",
"oorandom",
"rustc-hash 2.1.1",
+ "span",
"syntax",
"syntax-bridge",
"tracing",
@@ -821,7 +822,6 @@ dependencies = [
"intern",
"itertools 0.14.0",
"la-arena 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)",
- "mbe",
"query-group-macro",
"ra-ap-rustc_abi",
"ra-ap-rustc_parse_format",
@@ -1219,7 +1219,6 @@ dependencies = [
"hashbrown 0.14.5",
"rayon",
"rustc-hash 2.1.1",
- "smallvec",
"triomphe",
]
@@ -1882,7 +1881,6 @@ dependencies = [
"postcard",
"proc-macro-api",
"proc-macro-srv",
- "tt",
]
[[package]]
@@ -2782,7 +2780,6 @@ dependencies = [
"hir-expand",
"intern",
"paths",
- "rustc-hash 2.1.1",
"span",
"stdx",
"test-utils",
@@ -3088,8 +3085,11 @@ name = "tt"
version = "0.0.0"
dependencies = [
"arrayvec",
+ "indexmap",
"intern",
"ra-ap-rustc_lexer",
+ "rustc-hash 2.1.1",
+ "span",
"stdx",
"text-size 1.1.1 (registry+https://github.com/rust-lang/crates.io-index)",
]
diff --git a/crates/base-db/src/change.rs b/crates/base-db/src/change.rs
index da2fb27571..c728f3e5ca 100644
--- a/crates/base-db/src/change.rs
+++ b/crates/base-db/src/change.rs
@@ -3,11 +3,14 @@
use std::fmt;
-use salsa::Durability;
+use rustc_hash::FxHashSet;
+use salsa::{Durability, Setter as _};
use triomphe::Arc;
use vfs::FileId;
-use crate::{CrateGraphBuilder, CratesIdMap, RootQueryDb, SourceRoot, SourceRootId};
+use crate::{
+ CrateGraphBuilder, CratesIdMap, LibraryRoots, LocalRoots, RootQueryDb, SourceRoot, SourceRootId,
+};
/// Encapsulate a bunch of raw `.set` calls on the database.
#[derive(Default)]
@@ -49,8 +52,15 @@ impl FileChange {
pub fn apply(self, db: &mut dyn RootQueryDb) -> Option<CratesIdMap> {
let _p = tracing::info_span!("FileChange::apply").entered();
if let Some(roots) = self.roots {
+ let mut local_roots = FxHashSet::default();
+ let mut library_roots = FxHashSet::default();
for (idx, root) in roots.into_iter().enumerate() {
let root_id = SourceRootId(idx as u32);
+ if root.is_library {
+ library_roots.insert(root_id);
+ } else {
+ local_roots.insert(root_id);
+ }
let durability = source_root_durability(&root);
for file_id in root.iter() {
db.set_file_source_root_with_durability(file_id, root_id, durability);
@@ -58,6 +68,8 @@ impl FileChange {
db.set_source_root_with_durability(root_id, Arc::new(root), durability);
}
+ LocalRoots::get(db).set_roots(db).to(local_roots);
+ LibraryRoots::get(db).set_roots(db).to(library_roots);
}
for (file_id, text) in self.files_changed {
diff --git a/crates/base-db/src/editioned_file_id.rs b/crates/base-db/src/editioned_file_id.rs
index e2791ffe6f..13fb05d565 100644
--- a/crates/base-db/src/editioned_file_id.rs
+++ b/crates/base-db/src/editioned_file_id.rs
@@ -26,6 +26,9 @@ const _: () = {
krate: Crate,
}
+ // FIXME: This poses an invalidation problem, if one constructs an `EditionedFileId` with a
+ // different crate then whatever the input of a memo used, it will invalidate the memo causing
+ // it to recompute even if the crate is not really used.
/// We like to include the origin crate in an `EditionedFileId` (for use in the item tree),
/// but this poses us a problem.
///
diff --git a/crates/base-db/src/lib.rs b/crates/base-db/src/lib.rs
index aa06cdefe6..24f6dd59a9 100644
--- a/crates/base-db/src/lib.rs
+++ b/crates/base-db/src/lib.rs
@@ -33,7 +33,7 @@ pub use crate::{
};
use dashmap::{DashMap, mapref::entry::Entry};
pub use query_group::{self};
-use rustc_hash::FxHasher;
+use rustc_hash::{FxHashSet, FxHasher};
use salsa::{Durability, Setter};
pub use semver::{BuildMetadata, Prerelease, Version, VersionReq};
use syntax::{Parse, SyntaxError, ast};
@@ -203,6 +203,22 @@ impl Files {
}
}
+/// The set of roots for crates.io libraries.
+/// Files in libraries are assumed to never change.
+#[salsa::input(singleton, debug)]
+pub struct LibraryRoots {
+ #[returns(ref)]
+ pub roots: FxHashSet<SourceRootId>,
+}
+
+/// The set of "local" (that is, from the current workspace) roots.
+/// Files in local roots are assumed to change frequently.
+#[salsa::input(singleton, debug)]
+pub struct LocalRoots {
+ #[returns(ref)]
+ pub roots: FxHashSet<SourceRootId>,
+}
+
#[salsa_macros::input(debug)]
pub struct FileText {
#[returns(ref)]
diff --git a/crates/cfg/Cargo.toml b/crates/cfg/Cargo.toml
index 7207cfcf7d..cf2a7607b0 100644
--- a/crates/cfg/Cargo.toml
+++ b/crates/cfg/Cargo.toml
@@ -19,6 +19,7 @@ tracing.workspace = true
# locals deps
tt = { workspace = true, optional = true }
syntax = { workspace = true, optional = true }
+span = { path = "../span", version = "0.0", optional = true }
intern.workspace = true
[dev-dependencies]
@@ -35,6 +36,8 @@ cfg = { path = ".", default-features = false, features = ["tt"] }
[features]
default = []
+syntax = ["dep:syntax", "dep:span"]
+tt = ["dep:tt"]
in-rust-tree = []
[lints]
diff --git a/crates/cfg/src/cfg_expr.rs b/crates/cfg/src/cfg_expr.rs
index a0e0dc5ff0..d253f6f492 100644
--- a/crates/cfg/src/cfg_expr.rs
+++ b/crates/cfg/src/cfg_expr.rs
@@ -96,12 +96,12 @@ impl CfgExpr {
// FIXME: Parsing from `tt` is only used in a handful of places, reconsider
// if we should switch them to AST.
#[cfg(feature = "tt")]
- pub fn parse<S: Copy>(tt: &tt::TopSubtree<S>) -> CfgExpr {
+ pub fn parse(tt: &tt::TopSubtree) -> CfgExpr {
next_cfg_expr(&mut tt.iter()).unwrap_or(CfgExpr::Invalid)
}
#[cfg(feature = "tt")]
- pub fn parse_from_iter<S: Copy>(tt: &mut tt::iter::TtIter<'_, S>) -> CfgExpr {
+ pub fn parse_from_iter(tt: &mut tt::iter::TtIter<'_>) -> CfgExpr {
next_cfg_expr(tt).unwrap_or(CfgExpr::Invalid)
}
@@ -149,7 +149,16 @@ fn next_cfg_expr_from_ast(
if let Some(NodeOrToken::Token(literal)) = it.peek()
&& matches!(literal.kind(), SyntaxKind::STRING)
{
- let literal = tt::token_to_literal(literal.text(), ()).symbol;
+ let dummy_span = span::Span {
+ range: span::TextRange::empty(span::TextSize::new(0)),
+ anchor: span::SpanAnchor {
+ file_id: span::EditionedFileId::from_raw(0),
+ ast_id: span::FIXUP_ERASED_FILE_AST_ID_MARKER,
+ },
+ ctx: span::SyntaxContext::root(span::Edition::Edition2015),
+ };
+ let literal =
+ Symbol::intern(tt::token_to_literal(literal.text(), dummy_span).text());
it.next();
CfgAtom::KeyValue { key: name, value: literal.clone() }.into()
} else {
@@ -179,7 +188,7 @@ fn next_cfg_expr_from_ast(
}
#[cfg(feature = "tt")]
-fn next_cfg_expr<S: Copy>(it: &mut tt::iter::TtIter<'_, S>) -> Option<CfgExpr> {
+fn next_cfg_expr(it: &mut tt::iter::TtIter<'_>) -> Option<CfgExpr> {
use intern::sym;
use tt::iter::TtElement;
@@ -189,20 +198,21 @@ fn next_cfg_expr<S: Copy>(it: &mut tt::iter::TtIter<'_, S>) -> Option<CfgExpr> {
Some(_) => return Some(CfgExpr::Invalid),
};
- let ret = match it.peek() {
+ let mut it_clone = it.clone();
+ let ret = match it_clone.next() {
Some(TtElement::Leaf(tt::Leaf::Punct(punct)))
// Don't consume on e.g. `=>`.
if punct.char == '='
&& (punct.spacing == tt::Spacing::Alone
- || it.remaining().flat_tokens().get(1).is_none_or(|peek2| {
- !matches!(peek2, tt::TokenTree::Leaf(tt::Leaf::Punct(_)))
+ || it_clone.peek().is_none_or(|peek2| {
+ !matches!(peek2, tt::TtElement::Leaf(tt::Leaf::Punct(_)))
})) =>
{
- match it.remaining().flat_tokens().get(1) {
- Some(tt::TokenTree::Leaf(tt::Leaf::Literal(literal))) => {
+ match it_clone.next() {
+ Some(tt::TtElement::Leaf(tt::Leaf::Literal(literal))) => {
it.next();
it.next();
- CfgAtom::KeyValue { key: name, value: literal.symbol.clone() }.into()
+ CfgAtom::KeyValue { key: name, value: Symbol::intern(literal.text()) }.into()
}
_ => return Some(CfgExpr::Invalid),
}
diff --git a/crates/hir-def/Cargo.toml b/crates/hir-def/Cargo.toml
index a9b51e347d..46acf3de62 100644
--- a/crates/hir-def/Cargo.toml
+++ b/crates/hir-def/Cargo.toml
@@ -40,7 +40,6 @@ intern.workspace = true
base-db.workspace = true
syntax.workspace = true
hir-expand.workspace = true
-mbe.workspace = true
cfg.workspace = true
tt.workspace = true
span.workspace = true
diff --git a/crates/hir-def/src/attrs.rs b/crates/hir-def/src/attrs.rs
index 34a9230794..83df11f2d2 100644
--- a/crates/hir-def/src/attrs.rs
+++ b/crates/hir-def/src/attrs.rs
@@ -99,6 +99,20 @@ fn extract_ra_completions(attr_flags: &mut AttrFlags, tt: ast::TokenTree) {
}
}
+fn extract_ra_macro_style(attr_flags: &mut AttrFlags, tt: ast::TokenTree) {
+ let tt = TokenTreeChildren::new(&tt);
+ if let Ok(NodeOrToken::Token(option)) = Itertools::exactly_one(tt)
+ && option.kind().is_any_identifier()
+ {
+ match option.text() {
+ "braces" => attr_flags.insert(AttrFlags::MACRO_STYLE_BRACES),
+ "brackets" => attr_flags.insert(AttrFlags::MACRO_STYLE_BRACKETS),
+ "parentheses" => attr_flags.insert(AttrFlags::MACRO_STYLE_PARENTHESES),
+ _ => {}
+ }
+ }
+}
+
fn extract_rustc_skip_during_method_dispatch(attr_flags: &mut AttrFlags, tt: ast::TokenTree) {
let iter = TokenTreeChildren::new(&tt);
for kind in iter {
@@ -163,6 +177,7 @@ fn match_attr_flags(attr_flags: &mut AttrFlags, attr: Meta) -> ControlFlow<Infal
2 => match path.segments[0].text() {
"rust_analyzer" => match path.segments[1].text() {
"completions" => extract_ra_completions(attr_flags, tt),
+ "macro_style" => extract_ra_macro_style(attr_flags, tt),
_ => {}
},
_ => {}
@@ -188,6 +203,7 @@ fn match_attr_flags(attr_flags: &mut AttrFlags, attr: Meta) -> ControlFlow<Infal
"deprecated" => attr_flags.insert(AttrFlags::IS_DEPRECATED),
"macro_export" => attr_flags.insert(AttrFlags::IS_MACRO_EXPORT),
"no_mangle" => attr_flags.insert(AttrFlags::NO_MANGLE),
+ "pointee" => attr_flags.insert(AttrFlags::IS_POINTEE),
"non_exhaustive" => attr_flags.insert(AttrFlags::NON_EXHAUSTIVE),
"ignore" => attr_flags.insert(AttrFlags::IS_IGNORE),
"bench" => attr_flags.insert(AttrFlags::IS_BENCH),
@@ -289,6 +305,11 @@ bitflags::bitflags! {
const RUSTC_PAREN_SUGAR = 1 << 42;
const RUSTC_COINDUCTIVE = 1 << 43;
const RUSTC_FORCE_INLINE = 1 << 44;
+ const IS_POINTEE = 1 << 45;
+
+ const MACRO_STYLE_BRACES = 1 << 46;
+ const MACRO_STYLE_BRACKETS = 1 << 47;
+ const MACRO_STYLE_PARENTHESES = 1 << 48;
}
}
diff --git a/crates/hir-def/src/builtin_derive.rs b/crates/hir-def/src/builtin_derive.rs
new file mode 100644
index 0000000000..32385516ab
--- /dev/null
+++ b/crates/hir-def/src/builtin_derive.rs
@@ -0,0 +1,149 @@
+//! Definition of builtin derive impls.
+//!
+//! To save time and memory, builtin derives are not really expanded. Instead, we record them
+//! and create their impls based on lowered data, see crates/hir-ty/src/builtin_derive.rs.
+
+use hir_expand::{InFile, builtin::BuiltinDeriveExpander, name::Name};
+use intern::{Symbol, sym};
+use tt::TextRange;
+
+use crate::{
+ AdtId, BuiltinDeriveImplId, BuiltinDeriveImplLoc, FunctionId, HasModule, db::DefDatabase,
+};
+
+macro_rules! declare_enum {
+ ( $( $trait:ident => [ $( $method:ident ),* ] ),* $(,)? ) => {
+ #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
+ pub enum BuiltinDeriveImplTrait {
+ $( $trait, )*
+ }
+
+ #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
+ #[allow(non_camel_case_types)]
+ pub enum BuiltinDeriveImplMethod {
+ $( $( $method, )* )*
+ }
+
+ impl BuiltinDeriveImplTrait {
+ #[inline]
+ pub fn name(self) -> Symbol {
+ match self {
+ $( Self::$trait => sym::$trait, )*
+ }
+ }
+
+ #[inline]
+ pub fn get_id(self, lang_items: &crate::lang_item::LangItems) -> Option<crate::TraitId> {
+ match self {
+ $( Self::$trait => lang_items.$trait, )*
+ }
+ }
+
+ #[inline]
+ pub fn get_method(self, method_name: &Symbol) -> Option<BuiltinDeriveImplMethod> {
+ match self {
+ $(
+ Self::$trait => {
+ match method_name {
+ $( _ if *method_name == sym::$method => Some(BuiltinDeriveImplMethod::$method), )*
+ _ => None,
+ }
+ }
+ )*
+ }
+ }
+
+ #[inline]
+ pub fn all_methods(self) -> &'static [BuiltinDeriveImplMethod] {
+ match self {
+ $( Self::$trait => &[ $(BuiltinDeriveImplMethod::$method),* ], )*
+ }
+ }
+ }
+
+ impl BuiltinDeriveImplMethod {
+ #[inline]
+ pub fn name(self) -> Symbol {
+ match self {
+ $( $( BuiltinDeriveImplMethod::$method => sym::$method, )* )*
+ }
+ }
+ }
+ };
+}
+
+declare_enum!(
+ Copy => [],
+ Clone => [clone],
+ Default => [default],
+ Debug => [fmt],
+ Hash => [hash],
+ Ord => [cmp],
+ PartialOrd => [partial_cmp],
+ Eq => [],
+ PartialEq => [eq],
+ CoerceUnsized => [],
+ DispatchFromDyn => [],
+);
+
+impl BuiltinDeriveImplMethod {
+ pub fn trait_method(
+ self,
+ db: &dyn DefDatabase,
+ impl_: BuiltinDeriveImplId,
+ ) -> Option<FunctionId> {
+ let loc = impl_.loc(db);
+ let lang_items = crate::lang_item::lang_items(db, loc.krate(db));
+ let trait_ = impl_.loc(db).trait_.get_id(lang_items)?;
+ trait_.trait_items(db).method_by_name(&Name::new_symbol_root(self.name()))
+ }
+}
+
+pub(crate) fn with_derive_traits(
+ derive: BuiltinDeriveExpander,
+ mut f: impl FnMut(BuiltinDeriveImplTrait),
+) {
+ let trait_ = match derive {
+ BuiltinDeriveExpander::Copy => BuiltinDeriveImplTrait::Copy,
+ BuiltinDeriveExpander::Clone => BuiltinDeriveImplTrait::Clone,
+ BuiltinDeriveExpander::Default => BuiltinDeriveImplTrait::Default,
+ BuiltinDeriveExpander::Debug => BuiltinDeriveImplTrait::Debug,
+ BuiltinDeriveExpander::Hash => BuiltinDeriveImplTrait::Hash,
+ BuiltinDeriveExpander::Ord => BuiltinDeriveImplTrait::Ord,
+ BuiltinDeriveExpander::PartialOrd => BuiltinDeriveImplTrait::PartialOrd,
+ BuiltinDeriveExpander::Eq => BuiltinDeriveImplTrait::Eq,
+ BuiltinDeriveExpander::PartialEq => BuiltinDeriveImplTrait::PartialEq,
+ BuiltinDeriveExpander::CoercePointee => {
+ f(BuiltinDeriveImplTrait::CoerceUnsized);
+ f(BuiltinDeriveImplTrait::DispatchFromDyn);
+ return;
+ }
+ };
+ f(trait_);
+}
+
+impl BuiltinDeriveImplLoc {
+ pub fn source(&self, db: &dyn DefDatabase) -> InFile<TextRange> {
+ let (adt_ast_id, module) = match self.adt {
+ AdtId::StructId(adt) => {
+ let adt_loc = adt.loc(db);
+ (adt_loc.id.upcast(), adt_loc.container)
+ }
+ AdtId::UnionId(adt) => {
+ let adt_loc = adt.loc(db);
+ (adt_loc.id.upcast(), adt_loc.container)
+ }
+ AdtId::EnumId(adt) => {
+ let adt_loc = adt.loc(db);
+ (adt_loc.id.upcast(), adt_loc.container)
+ }
+ };
+ let derive_range = self.derive_attr_id.find_derive_range(
+ db,
+ module.krate(db),
+ adt_ast_id,
+ self.derive_index,
+ );
+ adt_ast_id.with_value(derive_range)
+ }
+}
diff --git a/crates/hir-def/src/expr_store.rs b/crates/hir-def/src/expr_store.rs
index 66f7e25ffa..10cd460d1d 100644
--- a/crates/hir-def/src/expr_store.rs
+++ b/crates/hir-def/src/expr_store.rs
@@ -57,8 +57,7 @@ impl HygieneId {
Self(ctx)
}
- // FIXME: Inline this
- pub(crate) fn lookup(self) -> SyntaxContext {
+ pub(crate) fn syntax_context(self) -> SyntaxContext {
self.0
}
@@ -73,7 +72,8 @@ pub type ExprSource = InFile<ExprPtr>;
pub type PatPtr = AstPtr<ast::Pat>;
pub type PatSource = InFile<PatPtr>;
-pub type LabelPtr = AstPtr<ast::Label>;
+/// BlockExpr -> Desugared label from try block
+pub type LabelPtr = AstPtr<Either<ast::Label, ast::BlockExpr>>;
pub type LabelSource = InFile<LabelPtr>;
pub type FieldPtr = AstPtr<ast::RecordExprField>;
@@ -942,7 +942,7 @@ impl ExpressionStoreSourceMap {
}
pub fn node_label(&self, node: InFile<&ast::Label>) -> Option<LabelId> {
- let src = node.map(AstPtr::new);
+ let src = node.map(AstPtr::new).map(AstPtr::wrap_left);
self.expr_only()?.label_map.get(&src).cloned()
}
diff --git a/crates/hir-def/src/expr_store/expander.rs b/crates/hir-def/src/expr_store/expander.rs
index 6a2f06b0a6..d34ec9bbc1 100644
--- a/crates/hir-def/src/expr_store/expander.rs
+++ b/crates/hir-def/src/expr_store/expander.rs
@@ -11,7 +11,7 @@ use hir_expand::{
ExpandError, ExpandErrorKind, ExpandResult, HirFileId, InFile, Lookup, MacroCallId,
eager::EagerCallBackFn, mod_path::ModPath, span_map::SpanMap,
};
-use span::{AstIdMap, Edition, SyntaxContext};
+use span::{AstIdMap, SyntaxContext};
use syntax::ast::HasAttrs;
use syntax::{AstNode, Parse, ast};
use triomphe::Arc;
@@ -61,7 +61,7 @@ impl Expander {
pub(super) fn hygiene_for_range(&self, db: &dyn DefDatabase, range: TextRange) -> HygieneId {
match self.span_map.as_ref() {
hir_expand::span_map::SpanMapRef::ExpansionSpanMap(span_map) => {
- HygieneId::new(span_map.span_at(range.start()).ctx.opaque_and_semitransparent(db))
+ HygieneId::new(span_map.span_at(range.start()).ctx.opaque_and_semiopaque(db))
}
hir_expand::span_map::SpanMapRef::RealSpanMap(_) => HygieneId::ROOT,
}
@@ -75,11 +75,6 @@ impl Expander {
AttrFlags::is_cfg_enabled_for(owner, cfg_options)
}
- pub(super) fn call_syntax_ctx(&self) -> SyntaxContext {
- // FIXME:
- SyntaxContext::root(Edition::CURRENT_FIXME)
- }
-
pub(super) fn enter_expand<T: ast::AstNode>(
&mut self,
db: &dyn DefDatabase,
diff --git a/crates/hir-def/src/expr_store/lower.rs b/crates/hir-def/src/expr_store/lower.rs
index 42b076abb2..4ae4271b92 100644
--- a/crates/hir-def/src/expr_store/lower.rs
+++ b/crates/hir-def/src/expr_store/lower.rs
@@ -1096,7 +1096,7 @@ impl<'db> ExprCollector<'db> {
ast::Expr::WhileExpr(e) => self.collect_while_loop(syntax_ptr, e),
ast::Expr::ForExpr(e) => self.collect_for_loop(syntax_ptr, e),
ast::Expr::CallExpr(e) => {
- // FIXME: Remove this once we drop support for <1.86, https://github.com/rust-lang/rust/commit/ac9cb908ac4301dfc25e7a2edee574320022ae2c
+ // FIXME(MINIMUM_SUPPORTED_TOOLCHAIN_VERSION): Remove this once we drop support for <1.86, https://github.com/rust-lang/rust/commit/ac9cb908ac4301dfc25e7a2edee574320022ae2c
let is_rustc_box = {
let attrs = e.attrs();
attrs.filter_map(|it| it.as_simple_atom()).any(|it| it == "rustc_box")
@@ -1649,7 +1649,7 @@ impl<'db> ExprCollector<'db> {
fn desugar_try_block(&mut self, e: BlockExpr) -> ExprId {
let try_from_output = self.lang_path(self.lang_items().TryTraitFromOutput);
let label = self.generate_new_name();
- let label = self.alloc_label_desugared(Label { name: label });
+ let label = self.alloc_label_desugared(Label { name: label }, AstPtr::new(&e).wrap_right());
let old_label = self.current_try_block_label.replace(label);
let ptr = AstPtr::new(&e).upcast();
@@ -2319,7 +2319,6 @@ impl<'db> ExprCollector<'db> {
ast::Pat::SlicePat(p) => {
let SlicePatComponents { prefix, slice, suffix } = p.components();
- // FIXME properly handle `RestPat`
Pat::Slice {
prefix: prefix.into_iter().map(|p| self.collect_pat(p, binding_list)).collect(),
slice: slice.map(|p| self.collect_pat(p, binding_list)),
@@ -2399,7 +2398,6 @@ impl<'db> ExprCollector<'db> {
};
let start = range_part_lower(p.start());
let end = range_part_lower(p.end());
- // FIXME: Exclusive ended pattern range is stabilised
match p.op_kind() {
Some(range_type) => Pat::Range { start, end, range_type },
None => Pat::Missing,
@@ -2519,9 +2517,9 @@ impl<'db> ExprCollector<'db> {
let mut hygiene_info = if hygiene_id.is_root() {
None
} else {
- hygiene_id.lookup().outer_expn(self.db).map(|expansion| {
+ hygiene_id.syntax_context().outer_expn(self.db).map(|expansion| {
let expansion = self.db.lookup_intern_macro_call(expansion.into());
- (hygiene_id.lookup().parent(self.db), expansion.def)
+ (hygiene_id.syntax_context().parent(self.db), expansion.def)
})
};
let name = Name::new_lifetime(&lifetime.text());
@@ -2548,7 +2546,7 @@ impl<'db> ExprCollector<'db> {
// Therefore, if we got to the rib of its declaration, give up its hygiene
// and use its parent expansion.
- hygiene_id = HygieneId::new(parent_ctx.opaque_and_semitransparent(self.db));
+ hygiene_id = HygieneId::new(parent_ctx.opaque_and_semiopaque(self.db));
hygiene_info = parent_ctx.outer_expn(self.db).map(|expansion| {
let expansion = self.db.lookup_intern_macro_call(expansion.into());
(parent_ctx.parent(self.db), expansion.def)
@@ -2727,17 +2725,17 @@ impl ExprCollector<'_> {
self.store.pats.alloc(Pat::Missing)
}
- fn alloc_label(&mut self, label: Label, ptr: LabelPtr) -> LabelId {
+ fn alloc_label(&mut self, label: Label, ptr: AstPtr<ast::Label>) -> LabelId {
+ self.alloc_label_desugared(label, ptr.wrap_left())
+ }
+
+ fn alloc_label_desugared(&mut self, label: Label, ptr: LabelPtr) -> LabelId {
let src = self.expander.in_file(ptr);
let id = self.store.labels.alloc(label);
self.store.label_map_back.insert(id, src);
self.store.label_map.insert(src, id);
id
}
- // FIXME: desugared labels don't have ptr, that's wrong and should be fixed somehow.
- fn alloc_label_desugared(&mut self, label: Label) -> LabelId {
- self.store.labels.alloc(label)
- }
fn is_lowering_awaitable_block(&self) -> &Awaitable {
self.awaitable_context.as_ref().unwrap_or(&Awaitable::No("unknown"))
diff --git a/crates/hir-def/src/expr_store/lower/format_args.rs b/crates/hir-def/src/expr_store/lower/format_args.rs
index 4bbfc5b144..7ef84f27f6 100644
--- a/crates/hir-def/src/expr_store/lower/format_args.rs
+++ b/crates/hir-def/src/expr_store/lower/format_args.rs
@@ -1,12 +1,10 @@
//! Lowering of `format_args!()`.
use base_db::FxIndexSet;
-use hir_expand::name::{AsName, Name};
+use hir_expand::name::Name;
use intern::{Symbol, sym};
-use syntax::{
- AstPtr, AstToken as _,
- ast::{self, HasName},
-};
+use span::SyntaxContext;
+use syntax::{AstPtr, AstToken as _, ast};
use crate::{
builtin_type::BuiltinUint,
@@ -32,8 +30,8 @@ impl<'db> ExprCollector<'db> {
let mut args = FormatArgumentsCollector::default();
f.args().for_each(|arg| {
args.add(FormatArgument {
- kind: match arg.name() {
- Some(name) => FormatArgumentKind::Named(name.as_name()),
+ kind: match arg.arg_name() {
+ Some(name) => FormatArgumentKind::Named(Name::new_root(name.name().text())),
None => FormatArgumentKind::Normal,
},
expr: self.collect_expr_opt(arg.expr()),
@@ -52,7 +50,7 @@ impl<'db> ExprCollector<'db> {
self.expand_macros_to_string(template.clone()).map(|it| (it, template))
}) {
Some(((s, is_direct_literal), template)) => {
- let call_ctx = self.expander.call_syntax_ctx();
+ let call_ctx = SyntaxContext::root(self.def_map.edition());
let hygiene = self.hygiene_id_for(s.syntax().text_range());
let fmt = format_args::parse(
&s,
diff --git a/crates/hir-def/src/expr_store/tests/body/block.rs b/crates/hir-def/src/expr_store/tests/body/block.rs
index 836a079e77..d457a4ca7a 100644
--- a/crates/hir-def/src/expr_store/tests/body/block.rs
+++ b/crates/hir-def/src/expr_store/tests/body/block.rs
@@ -190,13 +190,13 @@ fn f() {
"#,
expect![[r#"
ModuleIdLt {
- [salsa id]: Id(3003),
+ [salsa id]: Id(3803),
krate: Crate(
- Id(1c00),
+ Id(2400),
),
block: Some(
BlockId(
- 3c01,
+ 4401,
),
),
}"#]],
diff --git a/crates/hir-def/src/hir/generics.rs b/crates/hir-def/src/hir/generics.rs
index 1a2d5ebba4..482cf36f95 100644
--- a/crates/hir-def/src/hir/generics.rs
+++ b/crates/hir-def/src/hir/generics.rs
@@ -20,8 +20,7 @@ pub type LocalLifetimeParamId = Idx<LifetimeParamData>;
/// Data about a generic type parameter (to a function, struct, impl, ...).
#[derive(Clone, PartialEq, Eq, Debug, Hash)]
pub struct TypeParamData {
- /// [`None`] only if the type ref is an [`crate::type_ref::TypeRef::ImplTrait`]. FIXME: Might be better to just
- /// make it always be a value, giving impl trait a special name.
+ /// [`None`] only if the type ref is an [`crate::type_ref::TypeRef::ImplTrait`].
pub name: Option<Name>,
pub default: Option<TypeRefId>,
pub provenance: TypeParamProvenance,
diff --git a/crates/hir-def/src/hir/type_ref.rs b/crates/hir-def/src/hir/type_ref.rs
index ad8535413d..b64199fa26 100644
--- a/crates/hir-def/src/hir/type_ref.rs
+++ b/crates/hir-def/src/hir/type_ref.rs
@@ -1,8 +1,6 @@
//! HIR for references to types. Paths in these are not yet resolved. They can
//! be directly created from an ast::TypeRef, without further queries.
-use std::fmt::Write;
-
use hir_expand::name::Name;
use intern::Symbol;
use la_arena::Idx;
@@ -10,12 +8,11 @@ use thin_vec::ThinVec;
use crate::{
LifetimeParamId, TypeParamId,
- builtin_type::{BuiltinInt, BuiltinType, BuiltinUint},
expr_store::{
ExpressionStore,
path::{GenericArg, Path},
},
- hir::{ExprId, Literal},
+ hir::ExprId,
};
#[derive(Copy, Clone, PartialEq, Eq, Hash, Debug)]
@@ -275,56 +272,3 @@ impl TypeBound {
pub struct ConstRef {
pub expr: ExprId,
}
-
-/// A literal constant value
-#[derive(Debug, Clone, PartialEq, Eq, Hash)]
-pub enum LiteralConstRef {
- Int(i128),
- UInt(u128),
- Bool(bool),
- Char(char),
-
- /// Case of an unknown value that rustc might know but we don't
- // FIXME: this is a hack to get around chalk not being able to represent unevaluatable
- // constants
- // https://github.com/rust-lang/rust-analyzer/pull/8813#issuecomment-840679177
- // https://rust-lang.zulipchat.com/#narrow/stream/144729-wg-traits/topic/Handling.20non.20evaluatable.20constants'.20equality/near/238386348
- Unknown,
-}
-
-impl LiteralConstRef {
- pub fn builtin_type(&self) -> BuiltinType {
- match self {
- LiteralConstRef::UInt(_) | LiteralConstRef::Unknown => {
- BuiltinType::Uint(BuiltinUint::U128)
- }
- LiteralConstRef::Int(_) => BuiltinType::Int(BuiltinInt::I128),
- LiteralConstRef::Char(_) => BuiltinType::Char,
- LiteralConstRef::Bool(_) => BuiltinType::Bool,
- }
- }
-}
-
-impl From<Literal> for LiteralConstRef {
- fn from(literal: Literal) -> Self {
- match literal {
- Literal::Char(c) => Self::Char(c),
- Literal::Bool(flag) => Self::Bool(flag),
- Literal::Int(num, _) => Self::Int(num),
- Literal::Uint(num, _) => Self::UInt(num),
- _ => Self::Unknown,
- }
- }
-}
-
-impl std::fmt::Display for LiteralConstRef {
- fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> Result<(), std::fmt::Error> {
- match self {
- LiteralConstRef::Int(num) => num.fmt(f),
- LiteralConstRef::UInt(num) => num.fmt(f),
- LiteralConstRef::Bool(flag) => flag.fmt(f),
- LiteralConstRef::Char(c) => write!(f, "'{c}'"),
- LiteralConstRef::Unknown => f.write_char('_'),
- }
- }
-}
diff --git a/crates/hir-def/src/item_scope.rs b/crates/hir-def/src/item_scope.rs
index 3ffeebfaf2..a3278dd76c 100644
--- a/crates/hir-def/src/item_scope.rs
+++ b/crates/hir-def/src/item_scope.rs
@@ -9,15 +9,15 @@ use indexmap::map::Entry;
use itertools::Itertools;
use la_arena::Idx;
use rustc_hash::{FxHashMap, FxHashSet};
-use smallvec::{SmallVec, smallvec};
+use smallvec::SmallVec;
use span::Edition;
use stdx::format_to;
use syntax::ast;
use thin_vec::ThinVec;
use crate::{
- AdtId, BuiltinType, ConstId, ExternBlockId, ExternCrateId, FxIndexMap, HasModule, ImplId,
- Lookup, MacroCallStyles, MacroId, ModuleDefId, ModuleId, TraitId, UseId,
+ AdtId, BuiltinDeriveImplId, BuiltinType, ConstId, ExternBlockId, ExternCrateId, FxIndexMap,
+ HasModule, ImplId, Lookup, MacroCallStyles, MacroId, ModuleDefId, ModuleId, TraitId, UseId,
db::DefDatabase,
per_ns::{Item, MacrosItem, PerNs, TypesItem, ValuesItem},
visibility::Visibility,
@@ -158,7 +158,8 @@ pub struct ItemScope {
/// declared.
declarations: ThinVec<ModuleDefId>,
- impls: ThinVec<ImplId>,
+ impls: ThinVec<(ImplId, /* trait impl */ bool)>,
+ builtin_derive_impls: ThinVec<BuiltinDeriveImplId>,
extern_blocks: ThinVec<ExternBlockId>,
unnamed_consts: ThinVec<ConstId>,
/// Traits imported via `use Trait as _;`.
@@ -326,7 +327,19 @@ impl ItemScope {
}
pub fn impls(&self) -> impl ExactSizeIterator<Item = ImplId> + '_ {
- self.impls.iter().copied()
+ self.impls.iter().map(|&(id, _)| id)
+ }
+
+ pub fn trait_impls(&self) -> impl Iterator<Item = ImplId> + '_ {
+ self.impls.iter().filter(|&&(_, is_trait_impl)| is_trait_impl).map(|&(id, _)| id)
+ }
+
+ pub fn inherent_impls(&self) -> impl Iterator<Item = ImplId> + '_ {
+ self.impls.iter().filter(|&&(_, is_trait_impl)| !is_trait_impl).map(|&(id, _)| id)
+ }
+
+ pub fn builtin_derive_impls(&self) -> impl ExactSizeIterator<Item = BuiltinDeriveImplId> + '_ {
+ self.builtin_derive_impls.iter().copied()
}
pub fn all_macro_calls(&self) -> impl Iterator<Item = MacroCallId> + '_ {
@@ -467,8 +480,12 @@ impl ItemScope {
self.legacy_macros.get(name).map(|it| &**it)
}
- pub(crate) fn define_impl(&mut self, imp: ImplId) {
- self.impls.push(imp);
+ pub(crate) fn define_impl(&mut self, imp: ImplId, is_trait_impl: bool) {
+ self.impls.push((imp, is_trait_impl));
+ }
+
+ pub(crate) fn define_builtin_derive_impl(&mut self, imp: BuiltinDeriveImplId) {
+ self.builtin_derive_impls.push(imp);
}
pub(crate) fn define_extern_block(&mut self, extern_block: ExternBlockId) {
@@ -522,12 +539,13 @@ impl ItemScope {
adt: AstId<ast::Adt>,
attr_id: AttrId,
attr_call_id: MacroCallId,
- len: usize,
+ mut derive_call_ids: SmallVec<[Option<MacroCallId>; 4]>,
) {
+ derive_call_ids.shrink_to_fit();
self.derive_macros.entry(adt).or_default().push(DeriveMacroInvocation {
attr_id,
attr_call_id,
- derive_call_ids: smallvec![None; len],
+ derive_call_ids,
});
}
@@ -811,6 +829,7 @@ impl ItemScope {
unresolved,
declarations,
impls,
+ builtin_derive_impls,
unnamed_consts,
unnamed_trait_imports,
legacy_macros,
@@ -834,6 +853,7 @@ impl ItemScope {
unresolved.shrink_to_fit();
declarations.shrink_to_fit();
impls.shrink_to_fit();
+ builtin_derive_impls.shrink_to_fit();
unnamed_consts.shrink_to_fit();
unnamed_trait_imports.shrink_to_fit();
legacy_macros.shrink_to_fit();
diff --git a/crates/hir-def/src/item_tree.rs b/crates/hir-def/src/item_tree.rs
index 6eab8888d9..a1707f17be 100644
--- a/crates/hir-def/src/item_tree.rs
+++ b/crates/hir-def/src/item_tree.rs
@@ -103,7 +103,7 @@ fn lower_extra_crate_attrs<'a>(
struct FakeSpanMap {
file_id: span::EditionedFileId,
}
- impl syntax_bridge::SpanMapper<Span> for FakeSpanMap {
+ impl syntax_bridge::SpanMapper for FakeSpanMap {
fn span_for(&self, range: TextRange) -> Span {
Span {
range,
@@ -614,7 +614,9 @@ pub struct Trait {
}
#[derive(Debug, Clone, Eq, PartialEq)]
-pub struct Impl {}
+pub struct Impl {
+ pub is_trait_impl: bool,
+}
#[derive(Debug, Clone, PartialEq, Eq)]
pub struct TypeAlias {
diff --git a/crates/hir-def/src/item_tree/attrs.rs b/crates/hir-def/src/item_tree/attrs.rs
index 81a9b28b62..7907611284 100644
--- a/crates/hir-def/src/item_tree/attrs.rs
+++ b/crates/hir-def/src/item_tree/attrs.rs
@@ -18,7 +18,6 @@ use hir_expand::{
name::Name,
};
use intern::{Interned, Symbol, sym};
-use span::Span;
use syntax::{AstNode, T, ast};
use syntax_bridge::DocCommentDesugarMode;
use tt::token_to_literal;
@@ -49,7 +48,7 @@ impl AttrsOrCfg {
span_map: S,
) -> AttrsOrCfg
where
- S: syntax_bridge::SpanMapper<Span> + Copy,
+ S: syntax_bridge::SpanMapper + Copy,
{
let mut attrs = Vec::new();
let result =
@@ -227,7 +226,7 @@ impl<'attr> AttrQuery<'attr> {
}
#[inline]
- pub(crate) fn string_value_with_span(self) -> Option<(&'attr Symbol, span::Span)> {
+ pub(crate) fn string_value_with_span(self) -> Option<(&'attr str, span::Span)> {
self.attrs().find_map(|attr| attr.string_value_with_span())
}
diff --git a/crates/hir-def/src/item_tree/lower.rs b/crates/hir-def/src/item_tree/lower.rs
index d8519f7393..3f19e00154 100644
--- a/crates/hir-def/src/item_tree/lower.rs
+++ b/crates/hir-def/src/item_tree/lower.rs
@@ -271,7 +271,7 @@ impl<'a> Ctx<'a> {
let ast_id = self.source_ast_id_map.ast_id(impl_def);
// Note that trait impls don't get implicit `Self` unlike traits, because here they are a
// type alias rather than a type parameter, so this is handled by the resolver.
- let res = Impl {};
+ let res = Impl { is_trait_impl: impl_def.trait_().is_some() };
self.tree.small_data.insert(ast_id.upcast(), SmallModItem::Impl(res));
ast_id
}
diff --git a/crates/hir-def/src/item_tree/pretty.rs b/crates/hir-def/src/item_tree/pretty.rs
index c89299e6d8..4113a778ea 100644
--- a/crates/hir-def/src/item_tree/pretty.rs
+++ b/crates/hir-def/src/item_tree/pretty.rs
@@ -258,7 +258,7 @@ impl Printer<'_> {
w!(self, "trait {} {{ ... }}", name.display(self.db, self.edition));
}
ModItemId::Impl(ast_id) => {
- let Impl {} = &self.tree[ast_id];
+ let Impl { is_trait_impl: _ } = &self.tree[ast_id];
self.print_ast_id(ast_id.erase());
w!(self, "impl {{ ... }}");
}
diff --git a/crates/hir-def/src/lang_item.rs b/crates/hir-def/src/lang_item.rs
index fd693477a4..eba4d87ec9 100644
--- a/crates/hir-def/src/lang_item.rs
+++ b/crates/hir-def/src/lang_item.rs
@@ -2,6 +2,7 @@
//!
//! This attribute to tell the compiler about semi built-in std library
//! features, such as Fn family of traits.
+use hir_expand::name::Name;
use intern::{Symbol, sym};
use stdx::impl_from;
@@ -10,7 +11,7 @@ use crate::{
StaticId, StructId, TraitId, TypeAliasId, UnionId,
attrs::AttrFlags,
db::DefDatabase,
- nameres::{assoc::TraitItems, crate_def_map, crate_local_def_map},
+ nameres::{DefMap, assoc::TraitItems, crate_def_map, crate_local_def_map},
};
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
@@ -39,8 +40,12 @@ pub fn crate_lang_items(db: &dyn DefDatabase, krate: Crate) -> Option<Box<LangIt
let crate_def_map = crate_def_map(db, krate);
+ if !crate_def_map.is_unstable_feature_enabled(&sym::lang_items) {
+ return None;
+ }
+
for (_, module_data) in crate_def_map.modules() {
- for impl_def in module_data.scope.impls() {
+ for impl_def in module_data.scope.inherent_impls() {
lang_items.collect_lang_item(db, impl_def);
for &(_, assoc) in impl_def.impl_items(db).items.iter() {
match assoc {
@@ -93,6 +98,10 @@ pub fn crate_lang_items(db: &dyn DefDatabase, krate: Crate) -> Option<Box<LangIt
}
}
+ if matches!(krate.data(db).origin, base_db::CrateOrigin::Lang(base_db::LangCrateOrigin::Core)) {
+ lang_items.fill_non_lang_core_traits(db, crate_def_map);
+ }
+
if lang_items.is_empty() { None } else { Some(Box::new(lang_items)) }
}
@@ -135,6 +144,31 @@ impl LangItems {
}
}
+fn resolve_core_trait(
+ db: &dyn DefDatabase,
+ core_def_map: &DefMap,
+ modules: &[Symbol],
+ name: Symbol,
+) -> Option<TraitId> {
+ let mut current = &core_def_map[core_def_map.root];
+ for module in modules {
+ let Some((ModuleDefId::ModuleId(cur), _)) =
+ current.scope.type_(&Name::new_symbol_root(module.clone()))
+ else {
+ return None;
+ };
+ if cur.krate(db) != core_def_map.krate() || cur.block(db) != core_def_map.block_id() {
+ return None;
+ }
+ current = &core_def_map[cur];
+ }
+ let Some((ModuleDefId::TraitId(trait_), _)) = current.scope.type_(&Name::new_symbol_root(name))
+ else {
+ return None;
+ };
+ Some(trait_)
+}
+
#[salsa::tracked(returns(as_deref))]
pub(crate) fn crate_notable_traits(db: &dyn DefDatabase, krate: Crate) -> Option<Box<[TraitId]>> {
let mut traits = Vec::new();
@@ -158,6 +192,10 @@ macro_rules! language_item_table {
(
$LangItems:ident =>
$( $(#[$attr:meta])* $lang_item:ident, $module:ident :: $name:ident, $target:ident; )*
+
+ @non_lang_core_traits:
+
+ $( core::$($non_lang_module:ident)::*, $non_lang_trait:ident; )*
) => {
#[allow(non_snake_case)] // FIXME: Should we remove this?
#[derive(Debug, Default, Clone, PartialEq, Eq, Hash)]
@@ -166,6 +204,9 @@ macro_rules! language_item_table {
$(#[$attr])*
pub $lang_item: Option<$target>,
)*
+ $(
+ pub $non_lang_trait: Option<TraitId>,
+ )*
}
impl LangItems {
@@ -176,6 +217,7 @@ macro_rules! language_item_table {
/// Merges `self` with `other`, with preference to `self` items.
fn merge_prefer_self(&mut self, other: &Self) {
$( self.$lang_item = self.$lang_item.or(other.$lang_item); )*
+ $( self.$non_lang_trait = self.$non_lang_trait.or(other.$non_lang_trait); )*
}
fn assign_lang_item(&mut self, name: Symbol, target: LangItemTarget) {
@@ -190,6 +232,10 @@ macro_rules! language_item_table {
_ => {}
}
}
+
+ fn fill_non_lang_core_traits(&mut self, db: &dyn DefDatabase, core_def_map: &DefMap) {
+ $( self.$non_lang_trait = resolve_core_trait(db, core_def_map, &[ $(sym::$non_lang_module),* ], sym::$non_lang_trait); )*
+ }
}
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
@@ -426,4 +472,11 @@ language_item_table! { LangItems =>
String, sym::String, StructId;
CStr, sym::CStr, StructId;
Ordering, sym::Ordering, EnumId;
+
+ @non_lang_core_traits:
+ core::default, Default;
+ core::fmt, Debug;
+ core::hash, Hash;
+ core::cmp, Ord;
+ core::cmp, Eq;
}
diff --git a/crates/hir-def/src/lib.rs b/crates/hir-def/src/lib.rs
index 97af8ad93d..8d6c418d75 100644
--- a/crates/hir-def/src/lib.rs
+++ b/crates/hir-def/src/lib.rs
@@ -30,6 +30,7 @@ pub mod dyn_map;
pub mod item_tree;
+pub mod builtin_derive;
pub mod lang_item;
pub mod hir;
@@ -63,6 +64,7 @@ use base_db::{Crate, impl_intern_key};
use hir_expand::{
AstId, ExpandResult, ExpandTo, HirFileId, InFile, MacroCallId, MacroCallKind, MacroCallStyles,
MacroDefId, MacroDefKind,
+ attrs::AttrId,
builtin::{BuiltinAttrExpander, BuiltinDeriveExpander, BuiltinFnLikeExpander, EagerExpander},
db::ExpandDatabase,
eager::expand_eager_macro_input,
@@ -80,6 +82,7 @@ pub use hir_expand::{Intern, Lookup, tt};
use crate::{
attrs::AttrFlags,
+ builtin_derive::BuiltinDeriveImplTrait,
builtin_type::BuiltinType,
db::DefDatabase,
expr_store::ExpressionStoreSourceMap,
@@ -331,6 +334,21 @@ impl ImplId {
}
}
+#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
+pub struct BuiltinDeriveImplLoc {
+ pub adt: AdtId,
+ pub trait_: BuiltinDeriveImplTrait,
+ pub derive_attr_id: AttrId,
+ pub derive_index: u32,
+}
+
+#[salsa::interned(debug, no_lifetime)]
+#[derive(PartialOrd, Ord)]
+pub struct BuiltinDeriveImplId {
+ #[returns(ref)]
+ pub loc: BuiltinDeriveImplLoc,
+}
+
type UseLoc = ItemLoc<ast::Use>;
impl_intern!(UseId, UseLoc, intern_use, lookup_intern_use);
@@ -660,6 +678,18 @@ impl_from!(
for ModuleDefId
);
+impl From<DefWithBodyId> for ModuleDefId {
+ #[inline]
+ fn from(value: DefWithBodyId) -> Self {
+ match value {
+ DefWithBodyId::FunctionId(id) => id.into(),
+ DefWithBodyId::StaticId(id) => id.into(),
+ DefWithBodyId::ConstId(id) => id.into(),
+ DefWithBodyId::VariantId(id) => id.into(),
+ }
+ }
+}
+
/// A constant, which might appears as a const item, an anonymous const block in expressions
/// or patterns, or as a constant in types with const generics.
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, salsa_macros::Supertype)]
@@ -1009,6 +1039,20 @@ fn module_for_assoc_item_loc<'db>(
id.lookup(db).container.module(db)
}
+impl HasModule for BuiltinDeriveImplLoc {
+ #[inline]
+ fn module(&self, db: &dyn DefDatabase) -> ModuleId {
+ self.adt.module(db)
+ }
+}
+
+impl HasModule for BuiltinDeriveImplId {
+ #[inline]
+ fn module(&self, db: &dyn DefDatabase) -> ModuleId {
+ self.loc(db).module(db)
+ }
+}
+
impl HasModule for FunctionId {
#[inline]
fn module(&self, db: &dyn DefDatabase) -> ModuleId {
diff --git a/crates/hir-def/src/macro_expansion_tests/mbe.rs b/crates/hir-def/src/macro_expansion_tests/mbe.rs
index a12674f353..7b5d0103e6 100644
--- a/crates/hir-def/src/macro_expansion_tests/mbe.rs
+++ b/crates/hir-def/src/macro_expansion_tests/mbe.rs
@@ -35,9 +35,9 @@ macro_rules! f {
};
}
-struct#0:MacroRules[BE8F, 0]@58..64#15360# MyTraitMap2#0:MacroCall[BE8F, 0]@31..42#ROOT2024# {#0:MacroRules[BE8F, 0]@72..73#15360#
- map#0:MacroRules[BE8F, 0]@86..89#15360#:#0:MacroRules[BE8F, 0]@89..90#15360# #0:MacroRules[BE8F, 0]@89..90#15360#::#0:MacroRules[BE8F, 0]@91..93#15360#std#0:MacroRules[BE8F, 0]@93..96#15360#::#0:MacroRules[BE8F, 0]@96..98#15360#collections#0:MacroRules[BE8F, 0]@98..109#15360#::#0:MacroRules[BE8F, 0]@109..111#15360#HashSet#0:MacroRules[BE8F, 0]@111..118#15360#<#0:MacroRules[BE8F, 0]@118..119#15360#(#0:MacroRules[BE8F, 0]@119..120#15360#)#0:MacroRules[BE8F, 0]@120..121#15360#>#0:MacroRules[BE8F, 0]@121..122#15360#,#0:MacroRules[BE8F, 0]@122..123#15360#
-}#0:MacroRules[BE8F, 0]@132..133#15360#
+struct#0:MacroRules[BE8F, 0]@58..64#17408# MyTraitMap2#0:MacroCall[BE8F, 0]@31..42#ROOT2024# {#0:MacroRules[BE8F, 0]@72..73#17408#
+ map#0:MacroRules[BE8F, 0]@86..89#17408#:#0:MacroRules[BE8F, 0]@89..90#17408# #0:MacroRules[BE8F, 0]@89..90#17408#::#0:MacroRules[BE8F, 0]@91..93#17408#std#0:MacroRules[BE8F, 0]@93..96#17408#::#0:MacroRules[BE8F, 0]@96..98#17408#collections#0:MacroRules[BE8F, 0]@98..109#17408#::#0:MacroRules[BE8F, 0]@109..111#17408#HashSet#0:MacroRules[BE8F, 0]@111..118#17408#<#0:MacroRules[BE8F, 0]@118..119#17408#(#0:MacroRules[BE8F, 0]@119..120#17408#)#0:MacroRules[BE8F, 0]@120..121#17408#>#0:MacroRules[BE8F, 0]@121..122#17408#,#0:MacroRules[BE8F, 0]@122..123#17408#
+}#0:MacroRules[BE8F, 0]@132..133#17408#
"#]],
);
}
@@ -197,7 +197,7 @@ macro_rules! mk_struct {
#[macro_use]
mod foo;
-struct#1:MacroRules[DB0C, 0]@59..65#15360# Foo#0:MacroCall[DB0C, 0]@32..35#ROOT2024#(#1:MacroRules[DB0C, 0]@70..71#15360#u32#0:MacroCall[DB0C, 0]@41..44#ROOT2024#)#1:MacroRules[DB0C, 0]@74..75#15360#;#1:MacroRules[DB0C, 0]@75..76#15360#
+struct#1:MacroRules[DB0C, 0]@59..65#17408# Foo#0:MacroCall[DB0C, 0]@32..35#ROOT2024#(#1:MacroRules[DB0C, 0]@70..71#17408#u32#0:MacroCall[DB0C, 0]@41..44#ROOT2024#)#1:MacroRules[DB0C, 0]@74..75#17408#;#1:MacroRules[DB0C, 0]@75..76#17408#
"#]],
);
}
@@ -423,10 +423,10 @@ m! { foo, bar }
macro_rules! m {
($($i:ident),*) => ( impl Bar { $(fn $i() {})* } );
}
-impl#\15360# Bar#\15360# {#\15360#
- fn#\15360# foo#\ROOT2024#(#\15360#)#\15360# {#\15360#}#\15360#
- fn#\15360# bar#\ROOT2024#(#\15360#)#\15360# {#\15360#}#\15360#
-}#\15360#
+impl#\17408# Bar#\17408# {#\17408#
+ fn#\17408# foo#\ROOT2024#(#\17408#)#\17408# {#\17408#}#\17408#
+ fn#\17408# bar#\ROOT2024#(#\17408#)#\17408# {#\17408#}#\17408#
+}#\17408#
"#]],
);
}
diff --git a/crates/hir-def/src/macro_expansion_tests/mod.rs b/crates/hir-def/src/macro_expansion_tests/mod.rs
index 59bd9474a9..c63f2c1d78 100644
--- a/crates/hir-def/src/macro_expansion_tests/mod.rs
+++ b/crates/hir-def/src/macro_expansion_tests/mod.rs
@@ -53,6 +53,8 @@ use crate::{
#[track_caller]
fn check_errors(#[rust_analyzer::rust_fixture] ra_fixture: &str, expect: Expect) {
+ crate::nameres::ENABLE_BUILTIN_DERIVE_FAST_PATH.set(false);
+
let db = TestDB::with_files(ra_fixture);
let krate = db.fetch_test_crate();
let def_map = crate_def_map(&db, krate);
@@ -80,10 +82,15 @@ fn check_errors(#[rust_analyzer::rust_fixture] ra_fixture: &str, expect: Expect)
.sorted_unstable_by_key(|(range, _)| range.start())
.format_with("\n", |(range, err), format| format(&format_args!("{range:?}: {err}")))
.to_string();
+
+ crate::nameres::ENABLE_BUILTIN_DERIVE_FAST_PATH.set(true);
+
expect.assert_eq(&errors);
}
fn check(#[rust_analyzer::rust_fixture] ra_fixture: &str, mut expect: Expect) {
+ crate::nameres::ENABLE_BUILTIN_DERIVE_FAST_PATH.set(false);
+
let extra_proc_macros = vec![(
r#"
#[proc_macro_attribute]
@@ -246,6 +253,8 @@ pub fn identity_when_valid(_attr: TokenStream, item: TokenStream) -> TokenStream
}
}
+ crate::nameres::ENABLE_BUILTIN_DERIVE_FAST_PATH.set(true);
+
expect.indent(false);
expect.assert_eq(&expanded_text);
}
@@ -378,6 +387,7 @@ struct IdentityWhenValidProcMacroExpander;
impl ProcMacroExpander for IdentityWhenValidProcMacroExpander {
fn expand(
&self,
+ _: &dyn ExpandDatabase,
subtree: &TopSubtree,
_: Option<&TopSubtree>,
_: &base_db::Env,
diff --git a/crates/hir-def/src/macro_expansion_tests/proc_macros.rs b/crates/hir-def/src/macro_expansion_tests/proc_macros.rs
index 6f30ca04af..bf04a500a5 100644
--- a/crates/hir-def/src/macro_expansion_tests/proc_macros.rs
+++ b/crates/hir-def/src/macro_expansion_tests/proc_macros.rs
@@ -123,15 +123,15 @@ struct Foo {
}
#[attr1]
+#[derive(Bar)]
+#[attr2] struct S;
+#[attr1]
#[my_cool_derive()] struct Foo {
v1: i32, #[attr3]v2: fn(#[attr4]param2: u32), v3: Foo< {
456
}
>,
-}
-#[attr1]
-#[derive(Bar)]
-#[attr2] struct S;"#]],
+}"#]],
);
}
diff --git a/crates/hir-def/src/nameres.rs b/crates/hir-def/src/nameres.rs
index 3f29619bcb..5f05cdb1e2 100644
--- a/crates/hir-def/src/nameres.rs
+++ b/crates/hir-def/src/nameres.rs
@@ -87,6 +87,25 @@ use crate::{
pub use self::path_resolution::ResolvePathResultPrefixInfo;
+#[cfg(test)]
+thread_local! {
+ /// HACK: In order to test builtin derive expansion, we gate their fast path with this atomic when cfg(test).
+ pub(crate) static ENABLE_BUILTIN_DERIVE_FAST_PATH: std::cell::Cell<bool> =
+ const { std::cell::Cell::new(true) };
+}
+
+#[inline]
+#[cfg(test)]
+fn enable_builtin_derive_fast_path() -> bool {
+ ENABLE_BUILTIN_DERIVE_FAST_PATH.get()
+}
+
+#[inline(always)]
+#[cfg(not(test))]
+fn enable_builtin_derive_fast_path() -> bool {
+ true
+}
+
const PREDEFINED_TOOLS: &[SmolStr] = &[
SmolStr::new_static("clippy"),
SmolStr::new_static("rustfmt"),
@@ -483,6 +502,7 @@ impl DefMap {
}
impl DefMap {
+ /// Returns all modules in the crate that are associated with the given file.
pub fn modules_for_file<'a>(
&'a self,
db: &'a dyn DefDatabase,
@@ -490,16 +510,33 @@ impl DefMap {
) -> impl Iterator<Item = ModuleId> + 'a {
self.modules
.iter()
- .filter(move |(_id, data)| {
+ .filter(move |(_, data)| {
data.origin.file_id().map(|file_id| file_id.file_id(db)) == Some(file_id)
})
- .map(|(id, _data)| id)
+ .map(|(id, _)| id)
}
pub fn modules(&self) -> impl Iterator<Item = (ModuleId, &ModuleData)> + '_ {
self.modules.iter()
}
+ /// Returns all inline modules (mod name { ... }) in the crate that are associated with the given macro expansion.
+ pub fn inline_modules_for_macro_file(
+ &self,
+ file_id: MacroCallId,
+ ) -> impl Iterator<Item = ModuleId> + '_ {
+ self.modules
+ .iter()
+ .filter(move |(_, data)| {
+ matches!(
+ data.origin,
+ ModuleOrigin::Inline { definition_tree_id, .. }
+ if definition_tree_id.file_id().macro_file() == Some(file_id)
+ )
+ })
+ .map(|(id, _)| id)
+ }
+
pub fn derive_helpers_in_scope(
&self,
id: AstId<ast::Adt>,
diff --git a/crates/hir-def/src/nameres/attr_resolution.rs b/crates/hir-def/src/nameres/attr_resolution.rs
index 1cbd2c10b5..062b55fcef 100644
--- a/crates/hir-def/src/nameres/attr_resolution.rs
+++ b/crates/hir-def/src/nameres/attr_resolution.rs
@@ -114,7 +114,7 @@ pub(super) fn attr_macro_as_call_id(
let arg = match macro_attr.input.as_deref() {
Some(AttrInput::TokenTree(tt)) => {
let mut tt = tt.clone();
- tt.top_subtree_delimiter_mut().kind = tt::DelimiterKind::Invisible;
+ tt.set_top_subtree_delimiter_kind(tt::DelimiterKind::Invisible);
Some(tt)
}
diff --git a/crates/hir-def/src/nameres/collector.rs b/crates/hir-def/src/nameres/collector.rs
index 7e1ec526a7..87ade06517 100644
--- a/crates/hir-def/src/nameres/collector.rs
+++ b/crates/hir-def/src/nameres/collector.rs
@@ -12,26 +12,28 @@ use hir_expand::{
AttrMacroAttrIds, EditionedFileId, ErasedAstId, ExpandTo, HirFileId, InFile, MacroCallId,
MacroCallKind, MacroDefId, MacroDefKind,
attrs::{Attr, AttrId},
- builtin::{find_builtin_attr, find_builtin_derive, find_builtin_macro},
+ builtin::{BuiltinDeriveExpander, find_builtin_attr, find_builtin_derive, find_builtin_macro},
mod_path::{ModPath, PathKind},
name::{AsName, Name},
proc_macro::CustomProcMacroExpander,
};
-use intern::{Interned, sym};
+use intern::{Interned, Symbol, sym};
use itertools::izip;
use la_arena::Idx;
use rustc_hash::{FxHashMap, FxHashSet};
use smallvec::SmallVec;
use span::{Edition, FileAstId, SyntaxContext};
+use stdx::always;
use syntax::ast;
use triomphe::Arc;
use crate::{
- AdtId, AssocItemId, AstId, AstIdWithPath, ConstLoc, EnumLoc, ExternBlockLoc, ExternCrateId,
- ExternCrateLoc, FunctionId, FunctionLoc, FxIndexMap, ImplLoc, Intern, ItemContainerId, Lookup,
- Macro2Id, Macro2Loc, MacroExpander, MacroId, MacroRulesId, MacroRulesLoc, MacroRulesLocFlags,
- ModuleDefId, ModuleId, ProcMacroId, ProcMacroLoc, StaticLoc, StructLoc, TraitLoc, TypeAliasLoc,
- UnionLoc, UnresolvedMacro, UseId, UseLoc,
+ AdtId, AssocItemId, AstId, AstIdWithPath, BuiltinDeriveImplId, BuiltinDeriveImplLoc, ConstLoc,
+ EnumLoc, ExternBlockLoc, ExternCrateId, ExternCrateLoc, FunctionId, FunctionLoc, FxIndexMap,
+ ImplLoc, Intern, ItemContainerId, Lookup, Macro2Id, Macro2Loc, MacroExpander, MacroId,
+ MacroRulesId, MacroRulesLoc, MacroRulesLocFlags, ModuleDefId, ModuleId, ProcMacroId,
+ ProcMacroLoc, StaticLoc, StructLoc, TraitLoc, TypeAliasLoc, UnionLoc, UnresolvedMacro, UseId,
+ UseLoc,
db::DefDatabase,
item_scope::{GlobId, ImportId, ImportOrExternCrate, PerNsGlobImports},
item_tree::{
@@ -104,6 +106,7 @@ pub(super) fn collect_defs(
prev_active_attrs: Default::default(),
unresolved_extern_crates: Default::default(),
is_proc_macro: krate.is_proc_macro,
+ deferred_builtin_derives: Default::default(),
};
if tree_id.is_block() {
collector.seed_with_inner(tree_id);
@@ -214,6 +217,17 @@ enum MacroDirectiveKind<'db> {
},
}
+#[derive(Debug)]
+struct DeferredBuiltinDerive {
+ call_id: MacroCallId,
+ derive: BuiltinDeriveExpander,
+ module_id: ModuleId,
+ depth: usize,
+ container: ItemContainerId,
+ derive_attr_id: AttrId,
+ derive_index: u32,
+}
+
/// Walks the tree of module recursively
struct DefCollector<'db> {
db: &'db dyn DefDatabase,
@@ -252,6 +266,11 @@ struct DefCollector<'db> {
/// on the same item. Therefore, this holds all active attributes that we already
/// expanded.
prev_active_attrs: FxHashMap<AstId<ast::Item>, SmallVec<[AttrId; 1]>>,
+ /// To save memory, we do not really expand builtin derives. Instead, we save them as a `BuiltinDeriveImplId`.
+ ///
+ /// However, we can only do that when the derive is directly above the item, and there is no attribute in between.
+ /// Otherwise, all sorts of weird things can happen, like the item name resolving to something else.
+ deferred_builtin_derives: FxHashMap<AstId<ast::Item>, Vec<DeferredBuiltinDerive>>,
}
impl<'db> DefCollector<'db> {
@@ -273,13 +292,13 @@ impl<'db> DefCollector<'db> {
match () {
() if *attr_name == sym::recursion_limit => {
if let Some(limit) = attr.string_value()
- && let Ok(limit) = limit.as_str().parse()
+ && let Ok(limit) = limit.parse()
{
crate_data.recursion_limit = Some(limit);
}
}
() if *attr_name == sym::crate_type => {
- if attr.string_value() == Some(&sym::proc_dash_macro) {
+ if attr.string_value() == Some("proc-macro") {
self.is_proc_macro = true;
}
}
@@ -1241,7 +1260,7 @@ impl<'db> DefCollector<'db> {
fn resolve_macros(&mut self) -> ReachedFixedPoint {
let mut macros = mem::take(&mut self.unresolved_macros);
let mut resolved = Vec::new();
- let mut push_resolved = |directive: &MacroDirective<'_>, call_id| {
+ let push_resolved = |resolved: &mut Vec<_>, directive: &MacroDirective<'_>, call_id| {
let attr_macro_item = match &directive.kind {
MacroDirectiveKind::Attr { ast_id, .. } => Some(ast_id.ast_id),
MacroDirectiveKind::FnLike { .. } | MacroDirectiveKind::Derive { .. } => None,
@@ -1271,8 +1290,8 @@ impl<'db> DefCollector<'db> {
MacroSubNs::Attr
}
};
- let resolver = |path: &_| {
- let resolved_res = self.def_map.resolve_path_fp_with_macro(
+ let resolver = |def_map: &DefMap, path: &_| {
+ let resolved_res = def_map.resolve_path_fp_with_macro(
self.crate_local_def_map.unwrap_or(&self.local_def_map),
self.db,
ResolveMode::Other,
@@ -1283,7 +1302,7 @@ impl<'db> DefCollector<'db> {
);
resolved_res.resolved_def.take_macros().map(|it| (it, self.db.macro_def(it)))
};
- let resolver_def_id = |path: &_| resolver(path).map(|(_, it)| it);
+ let resolver_def_id = |path: &_| resolver(&self.def_map, path).map(|(_, it)| it);
match &directive.kind {
MacroDirectiveKind::FnLike { ast_id, expand_to, ctxt: call_site } => {
@@ -1306,7 +1325,7 @@ impl<'db> DefCollector<'db> {
.scope
.add_macro_invoc(ast_id.ast_id, call_id);
- push_resolved(directive, call_id);
+ push_resolved(&mut resolved, directive, call_id);
res = ReachedFixedPoint::No;
return Resolved::Yes;
@@ -1320,6 +1339,7 @@ impl<'db> DefCollector<'db> {
ctxt: call_site,
derive_macro_id,
} => {
+ // FIXME: This code is almost duplicate below.
let id = derive_macro_as_call_id(
self.db,
ast_id,
@@ -1327,7 +1347,7 @@ impl<'db> DefCollector<'db> {
*derive_pos as u32,
*call_site,
self.def_map.krate,
- resolver,
+ |path| resolver(&self.def_map, path),
*derive_macro_id,
);
@@ -1354,7 +1374,8 @@ impl<'db> DefCollector<'db> {
}
}
- push_resolved(directive, call_id);
+ push_resolved(&mut resolved, directive, call_id);
+
res = ReachedFixedPoint::No;
return Resolved::Yes;
}
@@ -1460,29 +1481,85 @@ impl<'db> DefCollector<'db> {
let ast_id = ast_id.with_value(ast_adt_id);
+ let mut derive_call_ids = SmallVec::new();
match attr.parse_path_comma_token_tree(self.db) {
Some(derive_macros) => {
let call_id = call_id();
- let mut len = 0;
for (idx, (path, call_site, _)) in derive_macros.enumerate() {
let ast_id = AstIdWithPath::new(
file_id,
ast_id.value,
Interned::new(path),
);
- self.unresolved_macros.push(MacroDirective {
- module_id: directive.module_id,
- depth: directive.depth + 1,
- kind: MacroDirectiveKind::Derive {
- ast_id,
- derive_attr: *attr_id,
- derive_pos: idx,
- ctxt: call_site.ctx,
- derive_macro_id: call_id,
- },
- container: directive.container,
- });
- len = idx;
+
+ // Try to resolve the derive immediately. If we succeed, we can also use the fast path
+ // for builtin derives. If not, we cannot use it, as it can cause the ADT to become
+ // interned while the derive is still unresolved, which will cause it to get forgotten.
+ let id = derive_macro_as_call_id(
+ self.db,
+ &ast_id,
+ *attr_id,
+ idx as u32,
+ call_site.ctx,
+ self.def_map.krate,
+ |path| resolver(&self.def_map, path),
+ call_id,
+ );
+
+ if let Ok((macro_id, def_id, call_id)) = id {
+ derive_call_ids.push(Some(call_id));
+ // Record its helper attributes.
+ if def_id.krate != self.def_map.krate {
+ let def_map = crate_def_map(self.db, def_id.krate);
+ if let Some(helpers) =
+ def_map.data.exported_derives.get(&macro_id)
+ {
+ self.def_map
+ .derive_helpers_in_scope
+ .entry(ast_id.ast_id.map(|it| it.upcast()))
+ .or_default()
+ .extend(izip!(
+ helpers.iter().cloned(),
+ iter::repeat(macro_id),
+ iter::repeat(call_id),
+ ));
+ }
+ }
+
+ if super::enable_builtin_derive_fast_path()
+ && let MacroDefKind::BuiltInDerive(_, builtin_derive) =
+ def_id.kind
+ {
+ self.deferred_builtin_derives
+ .entry(ast_id.ast_id.upcast())
+ .or_default()
+ .push(DeferredBuiltinDerive {
+ call_id,
+ derive: builtin_derive,
+ module_id: directive.module_id,
+ container: directive.container,
+ depth: directive.depth,
+ derive_attr_id: *attr_id,
+ derive_index: idx as u32,
+ });
+ } else {
+ push_resolved(&mut resolved, directive, call_id);
+ }
+ } else {
+ derive_call_ids.push(None);
+ self.unresolved_macros.push(MacroDirective {
+ module_id: directive.module_id,
+ depth: directive.depth + 1,
+ kind: MacroDirectiveKind::Derive {
+ ast_id,
+ derive_attr: *attr_id,
+ derive_pos: idx,
+ ctxt: call_site.ctx,
+ derive_macro_id: call_id,
+ },
+ container: directive.container,
+ });
+ }
}
// We treat the #[derive] macro as an attribute call, but we do not resolve it for nameres collection.
@@ -1491,7 +1568,12 @@ impl<'db> DefCollector<'db> {
// Check the comment in [`builtin_attr_macro`].
self.def_map.modules[directive.module_id]
.scope
- .init_derive_attribute(ast_id, *attr_id, call_id, len + 1);
+ .init_derive_attribute(
+ ast_id,
+ *attr_id,
+ call_id,
+ derive_call_ids,
+ );
}
None => {
let diag = DefDiagnostic::malformed_derive(
@@ -1522,12 +1604,25 @@ impl<'db> DefCollector<'db> {
}
}
+ // Clear deferred derives for this item, unfortunately we cannot use them due to the attribute.
+ if let Some(deferred_derives) = self.deferred_builtin_derives.remove(&ast_id) {
+ resolved.extend(deferred_derives.into_iter().map(|derive| {
+ (
+ derive.module_id,
+ derive.depth,
+ derive.container,
+ derive.call_id,
+ Some(ast_id),
+ )
+ }));
+ }
+
let call_id = call_id();
self.def_map.modules[directive.module_id]
.scope
.add_attr_macro_invoc(ast_id, call_id);
- push_resolved(directive, call_id);
+ push_resolved(&mut resolved, directive, call_id);
res = ReachedFixedPoint::No;
return Resolved::Yes;
}
@@ -1709,6 +1804,12 @@ impl<'db> DefCollector<'db> {
));
}
+ always!(
+ self.deferred_builtin_derives.is_empty(),
+ "self.deferred_builtin_derives={:#?}",
+ self.deferred_builtin_derives,
+ );
+
(self.def_map, self.local_def_map)
}
}
@@ -1751,6 +1852,33 @@ impl ModCollector<'_, '_> {
}
let db = self.def_collector.db;
let module_id = self.module_id;
+ let consider_deferred_derives =
+ |file_id: HirFileId,
+ deferred_derives: &mut FxHashMap<_, Vec<DeferredBuiltinDerive>>,
+ ast_id: FileAstId<ast::Adt>,
+ id: AdtId,
+ def_map: &mut DefMap| {
+ let Some(deferred_derives) =
+ deferred_derives.remove(&InFile::new(file_id, ast_id.upcast()))
+ else {
+ return;
+ };
+ let module = &mut def_map.modules[module_id];
+ for deferred_derive in deferred_derives {
+ crate::builtin_derive::with_derive_traits(deferred_derive.derive, |trait_| {
+ let impl_id = BuiltinDeriveImplId::new(
+ db,
+ BuiltinDeriveImplLoc {
+ adt: id,
+ trait_,
+ derive_attr_id: deferred_derive.derive_attr_id,
+ derive_index: deferred_derive.derive_index,
+ },
+ );
+ module.scope.define_builtin_derive_impl(impl_id);
+ });
+ }
+ };
let update_def =
|def_collector: &mut DefCollector<'_>, id, name: &Name, vis, has_constructor| {
def_collector.def_map.modules[module_id].scope.declare(id);
@@ -1900,7 +2028,9 @@ impl ModCollector<'_, '_> {
let impl_id =
ImplLoc { container: module_id, id: InFile::new(self.file_id(), imp) }
.intern(db);
- self.def_collector.def_map.modules[self.module_id].scope.define_impl(impl_id)
+ self.def_collector.def_map.modules[self.module_id]
+ .scope
+ .define_impl(impl_id, self.item_tree[imp].is_trait_impl)
}
ModItemId::Function(id) => {
let it = &self.item_tree[id];
@@ -1928,11 +2058,21 @@ impl ModCollector<'_, '_> {
let it = &self.item_tree[id];
let vis = resolve_vis(def_map, local_def_map, &self.item_tree[it.visibility]);
+ let interned = StructLoc {
+ container: module_id,
+ id: InFile::new(self.tree_id.file_id(), id),
+ }
+ .intern(db);
+ consider_deferred_derives(
+ self.tree_id.file_id(),
+ &mut self.def_collector.deferred_builtin_derives,
+ id.upcast(),
+ interned.into(),
+ def_map,
+ );
update_def(
self.def_collector,
- StructLoc { container: module_id, id: InFile::new(self.file_id(), id) }
- .intern(db)
- .into(),
+ interned.into(),
&it.name,
vis,
!matches!(it.shape, FieldsShape::Record),
@@ -1942,15 +2082,19 @@ impl ModCollector<'_, '_> {
let it = &self.item_tree[id];
let vis = resolve_vis(def_map, local_def_map, &self.item_tree[it.visibility]);
- update_def(
- self.def_collector,
- UnionLoc { container: module_id, id: InFile::new(self.file_id(), id) }
- .intern(db)
- .into(),
- &it.name,
- vis,
- false,
+ let interned = UnionLoc {
+ container: module_id,
+ id: InFile::new(self.tree_id.file_id(), id),
+ }
+ .intern(db);
+ consider_deferred_derives(
+ self.tree_id.file_id(),
+ &mut self.def_collector.deferred_builtin_derives,
+ id.upcast(),
+ interned.into(),
+ def_map,
);
+ update_def(self.def_collector, interned.into(), &it.name, vis, false);
}
ModItemId::Enum(id) => {
let it = &self.item_tree[id];
@@ -1960,6 +2104,13 @@ impl ModCollector<'_, '_> {
}
.intern(db);
+ consider_deferred_derives(
+ self.tree_id.file_id(),
+ &mut self.def_collector.deferred_builtin_derives,
+ id.upcast(),
+ enum_.into(),
+ def_map,
+ );
let vis = resolve_vis(def_map, local_def_map, &self.item_tree[it.visibility]);
update_def(self.def_collector, enum_.into(), &it.name, vis, false);
}
@@ -2311,14 +2462,14 @@ impl ModCollector<'_, '_> {
let name;
let name = match attrs.by_key(sym::rustc_builtin_macro).string_value_with_span() {
Some((it, span)) => {
- name = Name::new_symbol(it.clone(), span.ctx);
+ name = Name::new_symbol(Symbol::intern(it), span.ctx);
&name
}
None => {
let explicit_name =
attrs.by_key(sym::rustc_builtin_macro).tt_values().next().and_then(|tt| {
- match tt.token_trees().flat_tokens().first() {
- Some(tt::TokenTree::Leaf(tt::Leaf::Ident(name))) => Some(name),
+ match tt.token_trees().iter().next() {
+ Some(tt::TtElement::Leaf(tt::Leaf::Ident(name))) => Some(name),
_ => None,
}
});
diff --git a/crates/hir-def/src/nameres/path_resolution.rs b/crates/hir-def/src/nameres/path_resolution.rs
index e4b1d2a987..cf33cecf5f 100644
--- a/crates/hir-def/src/nameres/path_resolution.rs
+++ b/crates/hir-def/src/nameres/path_resolution.rs
@@ -267,7 +267,6 @@ impl DefMap {
// plain import or absolute path in 2015: crate-relative with
// fallback to extern prelude (with the simplification in
// rust-lang/rust#57745)
- // FIXME there must be a nicer way to write this condition
PathKind::Plain | PathKind::Abs
if self.data.edition == Edition::Edition2015
&& (path.kind == PathKind::Abs || mode == ResolveMode::Import) =>
@@ -383,7 +382,6 @@ impl DefMap {
// plain import or absolute path in 2015: crate-relative with
// fallback to extern prelude (with the simplification in
// rust-lang/rust#57745)
- // FIXME there must be a nicer way to write this condition
PathKind::Plain | PathKind::Abs
if self.data.edition == Edition::Edition2015
&& (path.kind == PathKind::Abs || mode == ResolveMode::Import) =>
diff --git a/crates/hir-def/src/nameres/proc_macro.rs b/crates/hir-def/src/nameres/proc_macro.rs
index cd45afe57d..91d664938b 100644
--- a/crates/hir-def/src/nameres/proc_macro.rs
+++ b/crates/hir-def/src/nameres/proc_macro.rs
@@ -2,10 +2,11 @@
use hir_expand::name::{AsName, Name};
use intern::sym;
+use itertools::Itertools;
use crate::{
item_tree::Attrs,
- tt::{Leaf, TokenTree, TopSubtree, TtElement},
+ tt::{Leaf, TopSubtree, TtElement},
};
#[derive(Debug, PartialEq, Eq)]
@@ -61,35 +62,35 @@ impl Attrs<'_> {
// This fn is intended for `#[proc_macro_derive(..)]` and `#[rustc_builtin_macro(..)]`, which have
// the same structure.
-#[rustfmt::skip]
pub(crate) fn parse_macro_name_and_helper_attrs(tt: &TopSubtree) -> Option<(Name, Box<[Name]>)> {
- match tt.token_trees().flat_tokens() {
+ if let Some([TtElement::Leaf(Leaf::Ident(trait_name))]) =
+ tt.token_trees().iter().collect_array()
+ {
// `#[proc_macro_derive(Trait)]`
// `#[rustc_builtin_macro(Trait)]`
- [TokenTree::Leaf(Leaf::Ident(trait_name))] => Some((trait_name.as_name(), Box::new([]))),
-
+ Some((trait_name.as_name(), Box::new([])))
+ } else if let Some(
+ [
+ TtElement::Leaf(Leaf::Ident(trait_name)),
+ TtElement::Leaf(Leaf::Punct(comma)),
+ TtElement::Leaf(Leaf::Ident(attributes)),
+ TtElement::Subtree(_, helpers),
+ ],
+ ) = tt.token_trees().iter().collect_array()
+ && comma.char == ','
+ && attributes.sym == sym::attributes
+ {
// `#[proc_macro_derive(Trait, attributes(helper1, helper2, ...))]`
// `#[rustc_builtin_macro(Trait, attributes(helper1, helper2, ...))]`
- [
- TokenTree::Leaf(Leaf::Ident(trait_name)),
- TokenTree::Leaf(Leaf::Punct(comma)),
- TokenTree::Leaf(Leaf::Ident(attributes)),
- TokenTree::Subtree(_),
- ..
- ] if comma.char == ',' && attributes.sym == sym::attributes =>
- {
- let helpers = tt::TokenTreesView::new(&tt.token_trees().flat_tokens()[3..]).try_into_subtree()?;
- let helpers = helpers
- .iter()
- .filter_map(|tt| match tt {
- TtElement::Leaf(Leaf::Ident(helper)) => Some(helper.as_name()),
- _ => None,
- })
- .collect::<Box<[_]>>();
-
- Some((trait_name.as_name(), helpers))
- }
+ let helpers = helpers
+ .filter_map(|tt| match tt {
+ TtElement::Leaf(Leaf::Ident(helper)) => Some(helper.as_name()),
+ _ => None,
+ })
+ .collect::<Box<[_]>>();
- _ => None,
+ Some((trait_name.as_name(), helpers))
+ } else {
+ None
}
}
diff --git a/crates/hir-def/src/nameres/tests/macros.rs b/crates/hir-def/src/nameres/tests/macros.rs
index c8eb968b35..a943f6f0ac 100644
--- a/crates/hir-def/src/nameres/tests/macros.rs
+++ b/crates/hir-def/src/nameres/tests/macros.rs
@@ -784,7 +784,7 @@ macro_rules! foo {
pub use core::clone::Clone;
"#,
- |map| assert_eq!(map.modules[map.root].scope.impls().len(), 1),
+ |map| assert_eq!(map.modules[map.root].scope.builtin_derive_impls().len(), 1),
);
}
@@ -806,7 +806,7 @@ pub macro Copy {}
#[rustc_builtin_macro]
pub macro Clone {}
"#,
- |map| assert_eq!(map.modules[map.root].scope.impls().len(), 2),
+ |map| assert_eq!(map.modules[map.root].scope.builtin_derive_impls().len(), 2),
);
}
@@ -849,7 +849,7 @@ pub macro derive($item:item) {}
#[rustc_builtin_macro]
pub macro Clone {}
"#,
- |map| assert_eq!(map.modules[map.root].scope.impls().len(), 1),
+ |map| assert_eq!(map.modules[map.root].scope.builtin_derive_impls().len(), 1),
);
}
@@ -1609,7 +1609,7 @@ macro_rules! derive { () => {} }
#[derive(Clone)]
struct S;
"#,
- |map| assert_eq!(map.modules[map.root].scope.impls().len(), 1),
+ |map| assert_eq!(map.modules[map.root].scope.builtin_derive_impls().len(), 1),
);
}
diff --git a/crates/hir-def/src/resolver.rs b/crates/hir-def/src/resolver.rs
index 263f603a0b..2ac0f90fb2 100644
--- a/crates/hir-def/src/resolver.rs
+++ b/crates/hir-def/src/resolver.rs
@@ -936,7 +936,7 @@ fn handle_macro_def_scope(
// A macro is allowed to refer to variables from before its declaration.
// Therefore, if we got to the rib of its declaration, give up its hygiene
// and use its parent expansion.
- *hygiene_id = HygieneId::new(parent_ctx.opaque_and_semitransparent(db));
+ *hygiene_id = HygieneId::new(parent_ctx.opaque_and_semiopaque(db));
*hygiene_info = parent_ctx.outer_expn(db).map(|expansion| {
let expansion = db.lookup_intern_macro_call(expansion.into());
(parent_ctx.parent(db), expansion.def)
@@ -950,7 +950,7 @@ fn hygiene_info(
hygiene_id: HygieneId,
) -> Option<(SyntaxContext, MacroDefId)> {
if !hygiene_id.is_root() {
- let ctx = hygiene_id.lookup();
+ let ctx = hygiene_id.syntax_context();
ctx.outer_expn(db).map(|expansion| {
let expansion = db.lookup_intern_macro_call(expansion.into());
(ctx.parent(db), expansion.def)
diff --git a/crates/hir-def/src/signatures.rs b/crates/hir-def/src/signatures.rs
index a13ef484ba..0dd88edbfb 100644
--- a/crates/hir-def/src/signatures.rs
+++ b/crates/hir-def/src/signatures.rs
@@ -185,6 +185,9 @@ impl UnionSignature {
bitflags! {
#[derive(Debug, Copy, Clone, PartialEq, Eq)]
pub struct EnumFlags: u8 {
+ /// Indicates whether this enum has `#[repr]`.
+ const HAS_REPR = 1 << 0;
+ /// Indicates whether the enum has a `#[rustc_has_incoherent_inherent_impls]` attribute.
const RUSTC_HAS_INCOHERENT_INHERENT_IMPLS = 1 << 1;
}
}
@@ -205,6 +208,9 @@ impl EnumSignature {
if attrs.contains(AttrFlags::RUSTC_HAS_INCOHERENT_INHERENT_IMPLS) {
flags |= EnumFlags::RUSTC_HAS_INCOHERENT_INHERENT_IMPLS;
}
+ if attrs.contains(AttrFlags::HAS_REPR) {
+ flags |= EnumFlags::HAS_REPR;
+ }
let InFile { file_id, value: source } = loc.source(db);
let (store, generic_params, source_map) = lower_generic_params(
@@ -233,6 +239,11 @@ impl EnumSignature {
_ => IntegerType::Pointer(true),
}
}
+
+ #[inline]
+ pub fn repr(&self, db: &dyn DefDatabase, id: EnumId) -> Option<ReprOptions> {
+ if self.flags.contains(EnumFlags::HAS_REPR) { AttrFlags::repr(db, id.into()) } else { None }
+ }
}
bitflags::bitflags! {
#[derive(Debug, Clone, Copy, Eq, PartialEq, Default)]
diff --git a/crates/hir-def/src/test_db.rs b/crates/hir-def/src/test_db.rs
index cdb49b2970..e8377fde49 100644
--- a/crates/hir-def/src/test_db.rs
+++ b/crates/hir-def/src/test_db.rs
@@ -49,6 +49,12 @@ impl Default for TestDB {
this.set_expand_proc_attr_macros_with_durability(true, Durability::HIGH);
// This needs to be here otherwise `CrateGraphBuilder` panics.
this.set_all_crates(Arc::new(Box::new([])));
+ _ = base_db::LibraryRoots::builder(Default::default())
+ .durability(Durability::MEDIUM)
+ .new(&this);
+ _ = base_db::LocalRoots::builder(Default::default())
+ .durability(Durability::MEDIUM)
+ .new(&this);
CrateGraphBuilder::default().set_in_db(&mut this);
this
}
diff --git a/crates/hir-expand/src/attrs.rs b/crates/hir-expand/src/attrs.rs
index e1807cd2e1..e3f10b2129 100644
--- a/crates/hir-expand/src/attrs.rs
+++ b/crates/hir-expand/src/attrs.rs
@@ -35,7 +35,8 @@ use arrayvec::ArrayVec;
use base_db::Crate;
use cfg::{CfgExpr, CfgOptions};
use either::Either;
-use intern::{Interned, Symbol};
+use intern::Interned;
+use itertools::Itertools;
use mbe::{DelimiterKind, Punct};
use parser::T;
use smallvec::SmallVec;
@@ -416,47 +417,42 @@ impl fmt::Display for AttrInput {
impl Attr {
/// #[path = "string"]
- pub fn string_value(&self) -> Option<&Symbol> {
+ pub fn string_value(&self) -> Option<&str> {
match self.input.as_deref()? {
- AttrInput::Literal(tt::Literal {
- symbol: text,
- kind: tt::LitKind::Str | tt::LitKind::StrRaw(_),
- ..
- }) => Some(text),
+ AttrInput::Literal(
+ lit @ tt::Literal { kind: tt::LitKind::Str | tt::LitKind::StrRaw(_), .. },
+ ) => Some(lit.text()),
_ => None,
}
}
/// #[path = "string"]
- pub fn string_value_with_span(&self) -> Option<(&Symbol, span::Span)> {
+ pub fn string_value_with_span(&self) -> Option<(&str, span::Span)> {
match self.input.as_deref()? {
- AttrInput::Literal(tt::Literal {
- symbol: text,
- kind: tt::LitKind::Str | tt::LitKind::StrRaw(_),
- span,
- suffix: _,
- }) => Some((text, *span)),
+ AttrInput::Literal(
+ lit @ tt::Literal { kind: tt::LitKind::Str | tt::LitKind::StrRaw(_), span, .. },
+ ) => Some((lit.text(), *span)),
_ => None,
}
}
pub fn string_value_unescape(&self) -> Option<Cow<'_, str>> {
match self.input.as_deref()? {
- AttrInput::Literal(tt::Literal {
- symbol: text, kind: tt::LitKind::StrRaw(_), ..
- }) => Some(Cow::Borrowed(text.as_str())),
- AttrInput::Literal(tt::Literal { symbol: text, kind: tt::LitKind::Str, .. }) => {
- unescape(text.as_str())
+ AttrInput::Literal(lit @ tt::Literal { kind: tt::LitKind::StrRaw(_), .. }) => {
+ Some(Cow::Borrowed(lit.text()))
+ }
+ AttrInput::Literal(lit @ tt::Literal { kind: tt::LitKind::Str, .. }) => {
+ unescape(lit.text())
}
_ => None,
}
}
/// #[path(ident)]
- pub fn single_ident_value(&self) -> Option<&tt::Ident> {
+ pub fn single_ident_value(&self) -> Option<tt::Ident> {
match self.input.as_deref()? {
- AttrInput::TokenTree(tt) => match tt.token_trees().flat_tokens() {
- [tt::TokenTree::Leaf(tt::Leaf::Ident(ident))] => Some(ident),
+ AttrInput::TokenTree(tt) => match tt.token_trees().iter().collect_array() {
+ Some([tt::TtElement::Leaf(tt::Leaf::Ident(ident))]) => Some(ident),
_ => None,
},
_ => None,
@@ -492,7 +488,7 @@ fn parse_path_comma_token_tree<'a>(
args.token_trees()
.split(|tt| matches!(tt, tt::TtElement::Leaf(tt::Leaf::Punct(Punct { char: ',', .. }))))
.filter_map(move |tts| {
- let span = tts.flat_tokens().first()?.first_span();
+ let span = tts.first_span()?;
Some((ModPath::from_tt(db, tts)?, span, tts))
})
}
@@ -611,16 +607,12 @@ impl AttrId {
else {
return derive_attr_range;
};
- let (Some(first_tt), Some(last_tt)) =
- (derive_tts.flat_tokens().first(), derive_tts.flat_tokens().last())
+ let (Some(first_span), Some(last_span)) = (derive_tts.first_span(), derive_tts.last_span())
else {
return derive_attr_range;
};
- let start = first_tt.first_span().range.start();
- let end = match last_tt {
- tt::TokenTree::Leaf(it) => it.span().range.end(),
- tt::TokenTree::Subtree(it) => it.delimiter.close.range.end(),
- };
+ let start = first_span.range.start();
+ let end = last_span.range.end();
TextRange::new(start, end)
}
}
diff --git a/crates/hir-expand/src/builtin/derive_macro.rs b/crates/hir-expand/src/builtin/derive_macro.rs
index 6582f4b075..f208203c93 100644
--- a/crates/hir-expand/src/builtin/derive_macro.rs
+++ b/crates/hir-expand/src/builtin/derive_macro.rs
@@ -5,7 +5,7 @@ use intern::sym;
use itertools::{Itertools, izip};
use parser::SyntaxKind;
use rustc_hash::FxHashSet;
-use span::{Edition, Span, SyntaxContext};
+use span::{Edition, Span};
use stdx::never;
use syntax_bridge::DocCommentDesugarMode;
use tracing::debug;
@@ -28,7 +28,7 @@ use syntax::{
};
macro_rules! register_builtin {
- ( $($trait:ident => $expand:ident),* ) => {
+ ( $($trait:ident => $expand:ident),* $(,)? ) => {
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub enum BuiltinDeriveExpander {
$($trait),*
@@ -48,7 +48,6 @@ macro_rules! register_builtin {
}
}
}
-
};
}
@@ -75,7 +74,7 @@ register_builtin! {
PartialOrd => partial_ord_expand,
Eq => eq_expand,
PartialEq => partial_eq_expand,
- CoercePointee => coerce_pointee_expand
+ CoercePointee => coerce_pointee_expand,
}
pub fn find_builtin_derive(ident: &name::Name) -> Option<BuiltinDeriveExpander> {
@@ -239,7 +238,7 @@ fn parse_adt(
fn parse_adt_from_syntax(
adt: &ast::Adt,
- tm: &span::SpanMap<SyntaxContext>,
+ tm: &span::SpanMap,
call_site: Span,
) -> Result<BasicAdtInfo, ExpandError> {
let (name, generic_param_list, where_clause, shape) = match &adt {
@@ -391,7 +390,7 @@ fn to_adt_syntax(
db: &dyn ExpandDatabase,
tt: &tt::TopSubtree,
call_site: Span,
-) -> Result<(ast::Adt, span::SpanMap<SyntaxContext>), ExpandError> {
+) -> Result<(ast::Adt, span::SpanMap), ExpandError> {
let (parsed, tm) = crate::db::token_tree_to_syntax_node(db, tt, crate::ExpandTo::Items);
let macro_items = ast::MacroItems::cast(parsed.syntax_node())
.ok_or_else(|| ExpandError::other(call_site, "invalid item definition"))?;
diff --git a/crates/hir-expand/src/builtin/fn_macro.rs b/crates/hir-expand/src/builtin/fn_macro.rs
index 3d630cfc1c..6e4b96b050 100644
--- a/crates/hir-expand/src/builtin/fn_macro.rs
+++ b/crates/hir-expand/src/builtin/fn_macro.rs
@@ -1,5 +1,7 @@
//! Builtin macro
+use std::borrow::Cow;
+
use base_db::AnchoredPath;
use cfg::CfgExpr;
use either::Either;
@@ -13,7 +15,7 @@ use span::{Edition, FileId, Span};
use stdx::format_to;
use syntax::{
format_smolstr,
- unescape::{unescape_byte, unescape_char, unescape_str},
+ unescape::{unescape_byte, unescape_char},
};
use syntax_bridge::syntax_node_to_token_tree;
@@ -177,12 +179,7 @@ fn line_expand(
// not incremental
ExpandResult::ok(tt::TopSubtree::invisible_from_leaves(
span,
- [tt::Leaf::Literal(tt::Literal {
- symbol: sym::INTEGER_0,
- span,
- kind: tt::LitKind::Integer,
- suffix: Some(sym::u32),
- })],
+ [tt::Leaf::Literal(tt::Literal::new("0", span, tt::LitKind::Integer, "u32"))],
))
}
@@ -210,7 +207,7 @@ fn stringify_expand(
tt: &tt::TopSubtree,
span: Span,
) -> ExpandResult<tt::TopSubtree> {
- let pretty = ::tt::pretty(tt.token_trees().flat_tokens());
+ let pretty = ::tt::pretty(tt.token_trees());
let expanded = quote! {span =>
#pretty
@@ -283,7 +280,7 @@ fn format_args_expand(
) -> ExpandResult<tt::TopSubtree> {
let pound = mk_pound(span);
let mut tt = tt.clone();
- tt.top_subtree_delimiter_mut().kind = tt::DelimiterKind::Parenthesis;
+ tt.set_top_subtree_delimiter_kind(tt::DelimiterKind::Parenthesis);
ExpandResult::ok(quote! {span =>
builtin #pound format_args #tt
})
@@ -297,14 +294,15 @@ fn format_args_nl_expand(
) -> ExpandResult<tt::TopSubtree> {
let pound = mk_pound(span);
let mut tt = tt.clone();
- tt.top_subtree_delimiter_mut().kind = tt::DelimiterKind::Parenthesis;
- if let Some(tt::TokenTree::Leaf(tt::Leaf::Literal(tt::Literal {
- symbol: text,
- kind: tt::LitKind::Str,
- ..
- }))) = tt.0.get_mut(1)
+ tt.set_top_subtree_delimiter_kind(tt::DelimiterKind::Parenthesis);
+ let lit = tt.as_token_trees().iter_flat_tokens().nth(1);
+ if let Some(tt::TokenTree::Leaf(tt::Leaf::Literal(
+ mut lit @ tt::Literal { kind: tt::LitKind::Str, .. },
+ ))) = lit
{
- *text = Symbol::intern(&format_smolstr!("{}\\n", text.as_str()));
+ let (text, suffix) = lit.text_and_suffix();
+ lit.text_and_suffix = Symbol::intern(&format_smolstr!("{text}\\n{suffix}"));
+ tt.set_token(1, lit.into());
}
ExpandResult::ok(quote! {span =>
builtin #pound format_args #tt
@@ -318,7 +316,7 @@ fn asm_expand(
span: Span,
) -> ExpandResult<tt::TopSubtree> {
let mut tt = tt.clone();
- tt.top_subtree_delimiter_mut().kind = tt::DelimiterKind::Parenthesis;
+ tt.set_top_subtree_delimiter_kind(tt::DelimiterKind::Parenthesis);
let pound = mk_pound(span);
let expanded = quote! {span =>
builtin #pound asm #tt
@@ -333,7 +331,7 @@ fn global_asm_expand(
span: Span,
) -> ExpandResult<tt::TopSubtree> {
let mut tt = tt.clone();
- tt.top_subtree_delimiter_mut().kind = tt::DelimiterKind::Parenthesis;
+ tt.set_top_subtree_delimiter_kind(tt::DelimiterKind::Parenthesis);
let pound = mk_pound(span);
let expanded = quote! {span =>
builtin #pound global_asm #tt
@@ -348,7 +346,7 @@ fn naked_asm_expand(
span: Span,
) -> ExpandResult<tt::TopSubtree> {
let mut tt = tt.clone();
- tt.top_subtree_delimiter_mut().kind = tt::DelimiterKind::Parenthesis;
+ tt.set_top_subtree_delimiter_kind(tt::DelimiterKind::Parenthesis);
let pound = mk_pound(span);
let expanded = quote! {span =>
builtin #pound naked_asm #tt
@@ -478,11 +476,11 @@ fn unreachable_expand(
// Pass the original arguments
let mut subtree = tt.clone();
- *subtree.top_subtree_delimiter_mut() = tt::Delimiter {
+ subtree.set_top_subtree_delimiter_kind(tt::DelimiterKind::Parenthesis);
+ subtree.set_top_subtree_delimiter_span(tt::DelimSpan {
open: call_site_span,
close: call_site_span,
- kind: tt::DelimiterKind::Parenthesis,
- };
+ });
// Expand to a macro call `$crate::panic::panic_{edition}`
let call = quote!(call_site_span =>#dollar_crate::panic::#mac! #subtree);
@@ -518,16 +516,14 @@ fn compile_error_expand(
tt: &tt::TopSubtree,
span: Span,
) -> ExpandResult<tt::TopSubtree> {
- let err = match &*tt.0 {
- [
- _,
- tt::TokenTree::Leaf(tt::Leaf::Literal(tt::Literal {
- symbol: text,
- span: _,
- kind: tt::LitKind::Str | tt::LitKind::StrRaw(_),
- suffix: _,
- })),
- ] => ExpandError::other(span, Box::from(unescape_symbol(text).as_str())),
+ let err = match tt.iter().collect_array() {
+ Some(
+ [
+ tt::TtElement::Leaf(tt::Leaf::Literal(
+ lit @ tt::Literal { kind: tt::LitKind::Str | tt::LitKind::StrRaw(_), .. },
+ )),
+ ],
+ ) => ExpandError::other(span, Box::from(unescape_str(lit.text()))),
_ => ExpandError::other(span, "`compile_error!` argument must be a string"),
};
@@ -556,7 +552,7 @@ fn concat_expand(
// to ensure the right parsing order, so skip the parentheses here. Ideally we'd
// implement rustc's model. cc https://github.com/rust-lang/rust-analyzer/pull/10623
if let TtElement::Subtree(subtree, subtree_iter) = &t
- && let [tt::TokenTree::Leaf(tt)] = subtree_iter.remaining().flat_tokens()
+ && let Some([tt::TtElement::Leaf(tt)]) = subtree_iter.clone().collect_array()
&& subtree.delimiter.kind == tt::DelimiterKind::Parenthesis
{
t = TtElement::Leaf(tt);
@@ -568,20 +564,20 @@ fn concat_expand(
// as-is.
match it.kind {
tt::LitKind::Char => {
- if let Ok(c) = unescape_char(it.symbol.as_str()) {
+ if let Ok(c) = unescape_char(it.text()) {
text.push(c);
}
record_span(it.span);
}
tt::LitKind::Integer | tt::LitKind::Float => {
- format_to!(text, "{}", it.symbol.as_str())
+ format_to!(text, "{}", it.text())
}
tt::LitKind::Str => {
- text.push_str(unescape_symbol(&it.symbol).as_str());
+ text.push_str(&unescape_str(it.text()));
record_span(it.span);
}
tt::LitKind::StrRaw(_) => {
- format_to!(text, "{}", it.symbol.as_str());
+ format_to!(text, "{}", it.text());
record_span(it.span);
}
tt::LitKind::Byte
@@ -619,7 +615,7 @@ fn concat_expand(
TtElement::Leaf(tt::Leaf::Literal(it))
if matches!(it.kind, tt::LitKind::Integer | tt::LitKind::Float) =>
{
- format_to!(text, "-{}", it.symbol.as_str());
+ format_to!(text, "-{}", it.text());
record_span(punct.span.cover(it.span));
}
_ => {
@@ -657,29 +653,25 @@ fn concat_bytes_expand(
};
for (i, t) in tt.iter().enumerate() {
match t {
- TtElement::Leaf(tt::Leaf::Literal(tt::Literal {
- symbol: text,
- span,
- kind,
- suffix: _,
- })) => {
- record_span(*span);
+ TtElement::Leaf(tt::Leaf::Literal(lit @ tt::Literal { span, kind, .. })) => {
+ let text = lit.text();
+ record_span(span);
match kind {
tt::LitKind::Byte => {
- if let Ok(b) = unescape_byte(text.as_str()) {
+ if let Ok(b) = unescape_byte(text) {
bytes.extend(
b.escape_ascii().filter_map(|it| char::from_u32(it as u32)),
);
}
}
tt::LitKind::ByteStr => {
- bytes.push_str(text.as_str());
+ bytes.push_str(text);
}
tt::LitKind::ByteStrRaw(_) => {
- bytes.extend(text.as_str().escape_debug());
+ bytes.extend(text.escape_debug());
}
_ => {
- err.get_or_insert(ExpandError::other(*span, "unexpected token"));
+ err.get_or_insert(ExpandError::other(span, "unexpected token"));
break;
}
}
@@ -705,12 +697,7 @@ fn concat_bytes_expand(
ExpandResult {
value: tt::TopSubtree::invisible_from_leaves(
span,
- [tt::Leaf::Literal(tt::Literal {
- symbol: Symbol::intern(&bytes),
- span,
- kind: tt::LitKind::ByteStr,
- suffix: None,
- })],
+ [tt::Leaf::Literal(tt::Literal::new_no_suffix(&bytes, span, tt::LitKind::ByteStr))],
),
err,
}
@@ -724,25 +711,19 @@ fn concat_bytes_expand_subtree(
) -> Result<(), ExpandError> {
for (ti, tt) in tree_iter.enumerate() {
match tt {
- TtElement::Leaf(tt::Leaf::Literal(tt::Literal {
- symbol: text,
- span,
- kind: tt::LitKind::Byte,
- suffix: _,
- })) => {
- if let Ok(b) = unescape_byte(text.as_str()) {
+ TtElement::Leaf(tt::Leaf::Literal(
+ lit @ tt::Literal { span, kind: tt::LitKind::Byte, .. },
+ )) => {
+ if let Ok(b) = unescape_byte(lit.text()) {
bytes.extend(b.escape_ascii().filter_map(|it| char::from_u32(it as u32)));
}
- record_span(*span);
+ record_span(span);
}
- TtElement::Leaf(tt::Leaf::Literal(tt::Literal {
- symbol: text,
- span,
- kind: tt::LitKind::Integer,
- suffix: _,
- })) => {
- record_span(*span);
- if let Ok(b) = text.as_str().parse::<u8>() {
+ TtElement::Leaf(tt::Leaf::Literal(
+ lit @ tt::Literal { span, kind: tt::LitKind::Integer, .. },
+ )) => {
+ record_span(span);
+ if let Ok(b) = lit.text().parse::<u8>() {
bytes.extend(b.escape_ascii().filter_map(|it| char::from_u32(it as u32)));
}
}
@@ -791,18 +772,16 @@ fn parse_string(tt: &tt::TopSubtree) -> Result<(Symbol, Span), ExpandError> {
}
match tt {
- TtElement::Leaf(tt::Leaf::Literal(tt::Literal {
- symbol: text,
+ TtElement::Leaf(tt::Leaf::Literal(lit @ tt::Literal {
span,
kind: tt::LitKind::Str,
- suffix: _,
- })) => Ok((unescape_symbol(text), *span)),
- TtElement::Leaf(tt::Leaf::Literal(tt::Literal {
- symbol: text,
+ ..
+ })) => Ok((Symbol::intern(&unescape_str(lit.text())), span)),
+ TtElement::Leaf(tt::Leaf::Literal(lit @ tt::Literal {
span,
kind: tt::LitKind::StrRaw(_),
- suffix: _,
- })) => Ok((text.clone(), *span)),
+ ..
+ })) => Ok((Symbol::intern(lit.text()), span)),
TtElement::Leaf(l) => Err(*l.span()),
TtElement::Subtree(tt, _) => Err(tt.delimiter.open.cover(tt.delimiter.close)),
}
@@ -854,10 +833,10 @@ fn include_bytes_expand(
let res = tt::TopSubtree::invisible_from_leaves(
span,
[tt::Leaf::Literal(tt::Literal {
- symbol: Symbol::empty(),
+ text_and_suffix: Symbol::empty(),
span,
kind: tt::LitKind::ByteStrRaw(1),
- suffix: None,
+ suffix_len: 0,
})],
);
ExpandResult::ok(res)
@@ -978,17 +957,16 @@ fn quote_expand(
)
}
-fn unescape_symbol(s: &Symbol) -> Symbol {
- if s.as_str().contains('\\') {
- let s = s.as_str();
+fn unescape_str(s: &str) -> Cow<'_, str> {
+ if s.contains('\\') {
let mut buf = String::with_capacity(s.len());
- unescape_str(s, |_, c| {
+ syntax::unescape::unescape_str(s, |_, c| {
if let Ok(c) = c {
buf.push(c)
}
});
- Symbol::intern(&buf)
+ Cow::Owned(buf)
} else {
- s.clone()
+ Cow::Borrowed(s)
}
}
diff --git a/crates/hir-expand/src/builtin/quote.rs b/crates/hir-expand/src/builtin/quote.rs
index 84dd4a24d9..51c4e22516 100644
--- a/crates/hir-expand/src/builtin/quote.rs
+++ b/crates/hir-expand/src/builtin/quote.rs
@@ -8,7 +8,7 @@ use tt::IdentIsRaw;
use crate::{name::Name, tt::TopSubtreeBuilder};
-pub(crate) fn dollar_crate(span: Span) -> tt::Ident<Span> {
+pub(crate) fn dollar_crate(span: Span) -> tt::Ident {
tt::Ident { sym: sym::dollar_crate, span, is_raw: tt::IdentIsRaw::No }
}
@@ -163,7 +163,7 @@ impl ToTokenTree for crate::tt::SubtreeView<'_> {
impl ToTokenTree for crate::tt::TopSubtree {
fn to_tokens(self, _: Span, builder: &mut TopSubtreeBuilder) {
- builder.extend_tt_dangerous(self.0);
+ builder.extend_with_tt(self.as_token_trees());
}
}
@@ -172,10 +172,9 @@ impl ToTokenTree for crate::tt::TtElement<'_> {
match self {
crate::tt::TtElement::Leaf(leaf) => builder.push(leaf.clone()),
crate::tt::TtElement::Subtree(subtree, subtree_iter) => {
- builder.extend_tt_dangerous(
- std::iter::once(crate::tt::TokenTree::Subtree(subtree.clone()))
- .chain(subtree_iter.remaining().flat_tokens().iter().cloned()),
- );
+ builder.open(subtree.delimiter.kind, subtree.delimiter.open);
+ builder.extend_with_tt(subtree_iter.remaining());
+ builder.close(subtree.delimiter.close);
}
}
}
@@ -200,16 +199,16 @@ impl<T: ToTokenTree + Clone> ToTokenTree for &T {
}
impl_to_to_tokentrees! {
- span: u32 => self { crate::tt::Literal{symbol: Symbol::integer(self as _), span, kind: tt::LitKind::Integer, suffix: None } };
- span: usize => self { crate::tt::Literal{symbol: Symbol::integer(self as _), span, kind: tt::LitKind::Integer, suffix: None } };
- span: i32 => self { crate::tt::Literal{symbol: Symbol::integer(self as _), span, kind: tt::LitKind::Integer, suffix: None } };
+ span: u32 => self { crate::tt::Literal{text_and_suffix: Symbol::integer(self as _), span, kind: tt::LitKind::Integer, suffix_len: 0 } };
+ span: usize => self { crate::tt::Literal{text_and_suffix: Symbol::integer(self as _), span, kind: tt::LitKind::Integer, suffix_len: 0 } };
+ span: i32 => self { crate::tt::Literal{text_and_suffix: Symbol::integer(self as _), span, kind: tt::LitKind::Integer, suffix_len: 0 } };
span: bool => self { crate::tt::Ident{sym: if self { sym::true_ } else { sym::false_ }, span, is_raw: tt::IdentIsRaw::No } };
_span: crate::tt::Leaf => self { self };
_span: crate::tt::Literal => self { self };
_span: crate::tt::Ident => self { self };
_span: crate::tt::Punct => self { self };
- span: &str => self { crate::tt::Literal{symbol: Symbol::intern(&self.escape_default().to_smolstr()), span, kind: tt::LitKind::Str, suffix: None }};
- span: String => self { crate::tt::Literal{symbol: Symbol::intern(&self.escape_default().to_smolstr()), span, kind: tt::LitKind::Str, suffix: None }};
+ span: &str => self { crate::tt::Literal{text_and_suffix: Symbol::intern(&self.escape_default().to_smolstr()), span, kind: tt::LitKind::Str, suffix_len: 0 }};
+ span: String => self { crate::tt::Literal{text_and_suffix: Symbol::intern(&self.escape_default().to_smolstr()), span, kind: tt::LitKind::Str, suffix_len: 0 }};
span: Name => self {
let (is_raw, s) = IdentIsRaw::split_from_symbol(self.as_str());
crate::tt::Ident{sym: Symbol::intern(s), span, is_raw }
diff --git a/crates/hir-expand/src/db.rs b/crates/hir-expand/src/db.rs
index 5c517e671b..51767f87ff 100644
--- a/crates/hir-expand/src/db.rs
+++ b/crates/hir-expand/src/db.rs
@@ -9,8 +9,8 @@ use triomphe::Arc;
use crate::{
AstId, BuiltinAttrExpander, BuiltinDeriveExpander, BuiltinFnLikeExpander, EagerCallInfo,
- EagerExpander, EditionedFileId, ExpandError, ExpandResult, ExpandTo, HirFileId, MacroCallId,
- MacroCallKind, MacroCallLoc, MacroDefId, MacroDefKind,
+ EagerExpander, EditionedFileId, ExpandError, ExpandResult, ExpandTo, FileRange, HirFileId,
+ MacroCallId, MacroCallKind, MacroCallLoc, MacroDefId, MacroDefKind,
attrs::Meta,
builtin::pseudo_derive_attr_expansion,
cfg_process::attr_macro_input_to_token_tree,
@@ -62,6 +62,9 @@ pub trait ExpandDatabase: RootQueryDb {
fn ast_id_map(&self, file_id: HirFileId) -> Arc<AstIdMap>;
#[salsa::transparent]
+ fn resolve_span(&self, span: Span) -> FileRange;
+
+ #[salsa::transparent]
fn parse_or_expand(&self, file_id: HirFileId) -> SyntaxNode;
/// Implementation for the macro case.
@@ -158,6 +161,13 @@ fn syntax_context(db: &dyn ExpandDatabase, file: HirFileId, edition: Edition) ->
}
}
+fn resolve_span(db: &dyn ExpandDatabase, Span { range, anchor, ctx: _ }: Span) -> FileRange {
+ let file_id = EditionedFileId::from_span_guess_origin(db, anchor.file_id);
+ let anchor_offset =
+ db.ast_id_map(file_id.into()).get_erased(anchor.ast_id).text_range().start();
+ FileRange { file_id, range: range + anchor_offset }
+}
+
/// This expands the given macro call, but with different arguments. This is
/// used for completion, where we want to see what 'would happen' if we insert a
/// token. The `token_to_map` mapped down into the expansion, with the mapped
@@ -237,7 +247,8 @@ pub fn expand_speculative(
span,
DocCommentDesugarMode::ProcMacro,
);
- *tree.top_subtree_delimiter_mut() = tt::Delimiter::invisible_spanned(span);
+ tree.set_top_subtree_delimiter_kind(tt::DelimiterKind::Invisible);
+ tree.set_top_subtree_delimiter_span(tt::DelimSpan::from_single(span));
tree
},
)
@@ -255,7 +266,7 @@ pub fn expand_speculative(
span,
DocCommentDesugarMode::ProcMacro,
);
- attr_arg.top_subtree_delimiter_mut().kind = tt::DelimiterKind::Invisible;
+ attr_arg.set_top_subtree_delimiter_kind(tt::DelimiterKind::Invisible);
Some(attr_arg)
}
_ => None,
@@ -270,7 +281,8 @@ pub fn expand_speculative(
let mut speculative_expansion = match loc.def.kind {
MacroDefKind::ProcMacro(ast, expander, _) => {
let span = db.proc_macro_span(ast);
- *tt.top_subtree_delimiter_mut() = tt::Delimiter::invisible_spanned(span);
+ tt.set_top_subtree_delimiter_kind(tt::DelimiterKind::Invisible);
+ tt.set_top_subtree_delimiter_span(tt::DelimSpan::from_single(span));
expander.expand(
db,
loc.def.krate,
@@ -430,7 +442,7 @@ fn macro_arg(db: &dyn ExpandDatabase, id: MacroCallId) -> MacroArgResult {
(
Arc::new(tt::TopSubtree::from_token_trees(
tt::Delimiter { open: span, close: span, kind },
- tt::TokenTreesView::new(&[]),
+ tt::TokenTreesView::empty(),
)),
SyntaxFixupUndoInfo::default(),
span,
@@ -478,7 +490,7 @@ fn macro_arg(db: &dyn ExpandDatabase, id: MacroCallId) -> MacroArgResult {
);
if loc.def.is_proc_macro() {
// proc macros expect their inputs without parentheses, MBEs expect it with them included
- tt.top_subtree_delimiter_mut().kind = tt::DelimiterKind::Invisible;
+ tt.set_top_subtree_delimiter_kind(tt::DelimiterKind::Invisible);
}
return (Arc::new(tt), SyntaxFixupUndoInfo::NONE, span);
}
@@ -512,7 +524,7 @@ fn macro_arg(db: &dyn ExpandDatabase, id: MacroCallId) -> MacroArgResult {
if loc.def.is_proc_macro() {
// proc macros expect their inputs without parentheses, MBEs expect it with them included
- tt.top_subtree_delimiter_mut().kind = tt::DelimiterKind::Invisible;
+ tt.set_top_subtree_delimiter_kind(tt::DelimiterKind::Invisible);
}
(Arc::new(tt), undo_info, span)
diff --git a/crates/hir-expand/src/declarative.rs b/crates/hir-expand/src/declarative.rs
index d2df9a1ff6..d10e122a5d 100644
--- a/crates/hir-expand/src/declarative.rs
+++ b/crates/hir-expand/src/declarative.rs
@@ -100,7 +100,8 @@ impl DeclarativeMacroExpander {
{
match &*value {
"transparent" => ControlFlow::Break(Transparency::Transparent),
- "semitransparent" => ControlFlow::Break(Transparency::SemiTransparent),
+ // "semitransparent" is for old rustc versions.
+ "semiopaque" | "semitransparent" => ControlFlow::Break(Transparency::SemiOpaque),
"opaque" => ControlFlow::Break(Transparency::Opaque),
_ => ControlFlow::Continue(()),
}
@@ -140,7 +141,7 @@ impl DeclarativeMacroExpander {
)),
},
transparency(ast::AnyHasAttrs::from(macro_rules))
- .unwrap_or(Transparency::SemiTransparent),
+ .unwrap_or(Transparency::SemiOpaque),
),
ast::Macro::MacroDef(macro_def) => (
match macro_def.body() {
diff --git a/crates/hir-expand/src/eager.rs b/crates/hir-expand/src/eager.rs
index 9b65bdac65..0b6124ebf3 100644
--- a/crates/hir-expand/src/eager.rs
+++ b/crates/hir-expand/src/eager.rs
@@ -96,7 +96,7 @@ pub fn expand_eager_macro_input(
DocCommentDesugarMode::Mbe,
);
- subtree.top_subtree_delimiter_mut().kind = crate::tt::DelimiterKind::Invisible;
+ subtree.set_top_subtree_delimiter_kind(crate::tt::DelimiterKind::Invisible);
let loc = MacroCallLoc {
def,
diff --git a/crates/hir-expand/src/fixup.rs b/crates/hir-expand/src/fixup.rs
index 876d870936..92ddd7fa8b 100644
--- a/crates/hir-expand/src/fixup.rs
+++ b/crates/hir-expand/src/fixup.rs
@@ -15,7 +15,7 @@ use syntax::{
};
use syntax_bridge::DocCommentDesugarMode;
use triomphe::Arc;
-use tt::Spacing;
+use tt::{Spacing, TransformTtAction, transform_tt};
use crate::{
span_map::SpanMapRef,
@@ -343,93 +343,29 @@ fn has_error_to_handle(node: &SyntaxNode) -> bool {
pub(crate) fn reverse_fixups(tt: &mut TopSubtree, undo_info: &SyntaxFixupUndoInfo) {
let Some(undo_info) = undo_info.original.as_deref() else { return };
let undo_info = &**undo_info;
- let delimiter = tt.top_subtree_delimiter_mut();
+ let top_subtree = tt.top_subtree();
+ let open_span = top_subtree.delimiter.open;
+ let close_span = top_subtree.delimiter.close;
#[allow(deprecated)]
if never!(
- delimiter.close.anchor.ast_id == FIXUP_DUMMY_AST_ID
- || delimiter.open.anchor.ast_id == FIXUP_DUMMY_AST_ID
+ close_span.anchor.ast_id == FIXUP_DUMMY_AST_ID
+ || open_span.anchor.ast_id == FIXUP_DUMMY_AST_ID
) {
let span = |file_id| Span {
range: TextRange::empty(TextSize::new(0)),
anchor: SpanAnchor { file_id, ast_id: ROOT_ERASED_FILE_AST_ID },
ctx: SyntaxContext::root(span::Edition::Edition2015),
};
- delimiter.open = span(delimiter.open.anchor.file_id);
- delimiter.close = span(delimiter.close.anchor.file_id);
+ tt.set_top_subtree_delimiter_span(tt::DelimSpan {
+ open: span(open_span.anchor.file_id),
+ close: span(close_span.anchor.file_id),
+ });
}
reverse_fixups_(tt, undo_info);
}
-#[derive(Debug)]
-enum TransformTtAction<'a> {
- Keep,
- ReplaceWith(tt::TokenTreesView<'a>),
-}
-
-impl TransformTtAction<'_> {
- fn remove() -> Self {
- Self::ReplaceWith(tt::TokenTreesView::new(&[]))
- }
-}
-
-/// This function takes a token tree, and calls `callback` with each token tree in it.
-/// Then it does what the callback says: keeps the tt or replaces it with a (possibly empty)
-/// tts view.
-fn transform_tt<'a, 'b>(
- tt: &'a mut Vec<tt::TokenTree>,
- mut callback: impl FnMut(&mut tt::TokenTree) -> TransformTtAction<'b>,
-) {
- // We need to keep a stack of the currently open subtrees, because we need to update
- // them if we change the number of items in them.
- let mut subtrees_stack = Vec::new();
- let mut i = 0;
- while i < tt.len() {
- 'pop_finished_subtrees: while let Some(&subtree_idx) = subtrees_stack.last() {
- let tt::TokenTree::Subtree(subtree) = &tt[subtree_idx] else {
- unreachable!("non-subtree on subtrees stack");
- };
- if i >= subtree_idx + 1 + subtree.usize_len() {
- subtrees_stack.pop();
- } else {
- break 'pop_finished_subtrees;
- }
- }
-
- let action = callback(&mut tt[i]);
- match action {
- TransformTtAction::Keep => {
- // This cannot be shared with the replaced case, because then we may push the same subtree
- // twice, and will update it twice which will lead to errors.
- if let tt::TokenTree::Subtree(_) = &tt[i] {
- subtrees_stack.push(i);
- }
-
- i += 1;
- }
- TransformTtAction::ReplaceWith(replacement) => {
- let old_len = 1 + match &tt[i] {
- tt::TokenTree::Leaf(_) => 0,
- tt::TokenTree::Subtree(subtree) => subtree.usize_len(),
- };
- let len_diff = replacement.len() as i64 - old_len as i64;
- tt.splice(i..i + old_len, replacement.flat_tokens().iter().cloned());
- // Skip the newly inserted replacement, we don't want to visit it.
- i += replacement.len();
-
- for &subtree_idx in &subtrees_stack {
- let tt::TokenTree::Subtree(subtree) = &mut tt[subtree_idx] else {
- unreachable!("non-subtree on subtrees stack");
- };
- subtree.len = (i64::from(subtree.len) + len_diff).try_into().unwrap();
- }
- }
- }
- }
-}
-
fn reverse_fixups_(tt: &mut TopSubtree, undo_info: &[TopSubtree]) {
- let mut tts = std::mem::take(&mut tt.0).into_vec();
- transform_tt(&mut tts, |tt| match tt {
+ transform_tt(tt, |tt| match tt {
tt::TokenTree::Leaf(leaf) => {
let span = leaf.span();
let is_real_leaf = span.anchor.ast_id != FIXUP_DUMMY_AST_ID;
@@ -459,7 +395,6 @@ fn reverse_fixups_(tt: &mut TopSubtree, undo_info: &[TopSubtree]) {
TransformTtAction::Keep
}
});
- tt.0 = tts.into_boxed_slice();
}
#[cfg(test)]
@@ -480,7 +415,7 @@ mod tests {
// `TokenTree`s, see the last assertion in `check()`.
fn check_leaf_eq(a: &tt::Leaf, b: &tt::Leaf) -> bool {
match (a, b) {
- (tt::Leaf::Literal(a), tt::Leaf::Literal(b)) => a.symbol == b.symbol,
+ (tt::Leaf::Literal(a), tt::Leaf::Literal(b)) => a.text_and_suffix == b.text_and_suffix,
(tt::Leaf::Punct(a), tt::Leaf::Punct(b)) => a.char == b.char,
(tt::Leaf::Ident(a), tt::Leaf::Ident(b)) => a.sym == b.sym,
_ => false,
@@ -488,9 +423,9 @@ mod tests {
}
fn check_subtree_eq(a: &tt::TopSubtree, b: &tt::TopSubtree) -> bool {
- let a = a.view().as_token_trees().flat_tokens();
- let b = b.view().as_token_trees().flat_tokens();
- a.len() == b.len() && std::iter::zip(a, b).all(|(a, b)| check_tt_eq(a, b))
+ let a = a.view().as_token_trees().iter_flat_tokens();
+ let b = b.view().as_token_trees().iter_flat_tokens();
+ a.len() == b.len() && std::iter::zip(a, b).all(|(a, b)| check_tt_eq(&a, &b))
}
fn check_tt_eq(a: &tt::TokenTree, b: &tt::TokenTree) -> bool {
@@ -545,7 +480,7 @@ mod tests {
// the fixed-up tree should not contain braces as punct
// FIXME: should probably instead check that it's a valid punctuation character
- for x in tt.token_trees().flat_tokens() {
+ for x in tt.token_trees().iter_flat_tokens() {
match x {
::tt::TokenTree::Leaf(::tt::Leaf::Punct(punct)) => {
assert!(!matches!(punct.char, '{' | '}' | '(' | ')' | '[' | ']'))
diff --git a/crates/hir-expand/src/hygiene.rs b/crates/hir-expand/src/hygiene.rs
index bd6f7e4f2b..ce7650d077 100644
--- a/crates/hir-expand/src/hygiene.rs
+++ b/crates/hir-expand/src/hygiene.rs
@@ -54,7 +54,7 @@ pub fn span_with_mixed_site_ctxt(
expn_id: MacroCallId,
edition: Edition,
) -> Span {
- span_with_ctxt_from_mark(db, span, expn_id, Transparency::SemiTransparent, edition)
+ span_with_ctxt_from_mark(db, span, expn_id, Transparency::SemiOpaque, edition)
}
fn span_with_ctxt_from_mark(
@@ -82,7 +82,7 @@ pub(super) fn apply_mark(
}
let call_site_ctxt = db.lookup_intern_macro_call(call_id.into()).ctxt;
- let mut call_site_ctxt = if transparency == Transparency::SemiTransparent {
+ let mut call_site_ctxt = if transparency == Transparency::SemiOpaque {
call_site_ctxt.normalize_to_macros_2_0(db)
} else {
call_site_ctxt.normalize_to_macro_rules(db)
@@ -117,16 +117,16 @@ fn apply_mark_internal(
let call_id = Some(call_id);
let mut opaque = ctxt.opaque(db);
- let mut opaque_and_semitransparent = ctxt.opaque_and_semitransparent(db);
+ let mut opaque_and_semiopaque = ctxt.opaque_and_semiopaque(db);
if transparency >= Transparency::Opaque {
let parent = opaque;
opaque = SyntaxContext::new(db, call_id, transparency, edition, parent, identity, identity);
}
- if transparency >= Transparency::SemiTransparent {
- let parent = opaque_and_semitransparent;
- opaque_and_semitransparent =
+ if transparency >= Transparency::SemiOpaque {
+ let parent = opaque_and_semiopaque;
+ opaque_and_semiopaque =
SyntaxContext::new(db, call_id, transparency, edition, parent, |_| opaque, identity);
}
@@ -138,6 +138,6 @@ fn apply_mark_internal(
edition,
parent,
|_| opaque,
- |_| opaque_and_semitransparent,
+ |_| opaque_and_semiopaque,
)
}
diff --git a/crates/hir-expand/src/inert_attr_macro.rs b/crates/hir-expand/src/inert_attr_macro.rs
index 385c98ef87..6dec2c5b32 100644
--- a/crates/hir-expand/src/inert_attr_macro.rs
+++ b/crates/hir-expand/src/inert_attr_macro.rs
@@ -429,7 +429,7 @@ pub const INERT_ATTRIBUTES: &[BuiltinAttribute] = &[
rustc_attr!(rustc_proc_macro_decls, Normal, template!(Word), WarnFollowing, INTERNAL_UNSTABLE),
rustc_attr!(
rustc_macro_transparency, Normal,
- template!(NameValueStr: "transparent|semitransparent|opaque"), ErrorFollowing,
+ template!(NameValueStr: "transparent|semiopaque|opaque"), ErrorFollowing,
"used internally for testing macro hygiene",
),
@@ -467,9 +467,6 @@ pub const INERT_ATTRIBUTES: &[BuiltinAttribute] = &[
// Used by the `rustc::untracked_query_information` lint to warn methods which
// might break incremental compilation.
rustc_attr!(rustc_lint_untracked_query_information, Normal, template!(Word), WarnFollowing, INTERNAL_UNSTABLE),
- // Used by the `rustc::untranslatable_diagnostic` and `rustc::diagnostic_outside_of_impl` lints
- // to assist in changes to diagnostic APIs.
- rustc_attr!(rustc_lint_diagnostics, Normal, template!(Word), WarnFollowing, INTERNAL_UNSTABLE),
// Used by the `rustc::bad_opt_access` lint to identify `DebuggingOptions` and `CodegenOptions`
// types (as well as any others in future).
rustc_attr!(rustc_lint_opt_ty, Normal, template!(Word), WarnFollowing, INTERNAL_UNSTABLE),
diff --git a/crates/hir-expand/src/lib.rs b/crates/hir-expand/src/lib.rs
index 047996c978..05541e782e 100644
--- a/crates/hir-expand/src/lib.rs
+++ b/crates/hir-expand/src/lib.rs
@@ -66,25 +66,7 @@ pub use crate::{
pub use base_db::EditionedFileId;
pub use mbe::{DeclarativeMacro, MacroCallStyle, MacroCallStyles, ValueResult};
-pub mod tt {
- pub use span::Span;
- pub use tt::{DelimiterKind, IdentIsRaw, LitKind, Spacing, token_to_literal};
-
- pub type Delimiter = ::tt::Delimiter<Span>;
- pub type DelimSpan = ::tt::DelimSpan<Span>;
- pub type Subtree = ::tt::Subtree<Span>;
- pub type Leaf = ::tt::Leaf<Span>;
- pub type Literal = ::tt::Literal<Span>;
- pub type Punct = ::tt::Punct<Span>;
- pub type Ident = ::tt::Ident<Span>;
- pub type TokenTree = ::tt::TokenTree<Span>;
- pub type TopSubtree = ::tt::TopSubtree<Span>;
- pub type TopSubtreeBuilder = ::tt::TopSubtreeBuilder<Span>;
- pub type TokenTreesView<'a> = ::tt::TokenTreesView<'a, Span>;
- pub type SubtreeView<'a> = ::tt::SubtreeView<'a, Span>;
- pub type TtElement<'a> = ::tt::iter::TtElement<'a, Span>;
- pub type TtIter<'a> = ::tt::iter::TtIter<'a, Span>;
-}
+pub use tt;
#[macro_export]
macro_rules! impl_intern_lookup {
@@ -919,11 +901,8 @@ impl ExpansionInfo {
let span = self.exp_map.span_at(token.start());
match &self.arg_map {
SpanMap::RealSpanMap(_) => {
- let file_id =
- EditionedFileId::from_span_guess_origin(db, span.anchor.file_id).into();
- let anchor_offset =
- db.ast_id_map(file_id).get_erased(span.anchor.ast_id).text_range().start();
- InFile { file_id, value: smallvec::smallvec![span.range + anchor_offset] }
+ let range = db.resolve_span(span);
+ InFile { file_id: range.file_id.into(), value: smallvec::smallvec![range.range] }
}
SpanMap::ExpansionSpanMap(arg_map) => {
let Some(arg_node) = &self.arg.value else {
@@ -965,7 +944,7 @@ pub fn map_node_range_up_rooted(
range: TextRange,
) -> Option<FileRange> {
let mut spans = exp_map.spans_for_range(range).filter(|span| span.ctx.is_root());
- let Span { range, anchor, ctx: _ } = spans.next()?;
+ let Span { range, anchor, ctx } = spans.next()?;
let mut start = range.start();
let mut end = range.end();
@@ -976,10 +955,7 @@ pub fn map_node_range_up_rooted(
start = start.min(span.range.start());
end = end.max(span.range.end());
}
- let file_id = EditionedFileId::from_span_guess_origin(db, anchor.file_id);
- let anchor_offset =
- db.ast_id_map(file_id.into()).get_erased(anchor.ast_id).text_range().start();
- Some(FileRange { file_id, range: TextRange::new(start, end) + anchor_offset })
+ Some(db.resolve_span(Span { range: TextRange::new(start, end), anchor, ctx }))
}
/// Maps up the text range out of the expansion hierarchy back into the original file its from.
@@ -1002,10 +978,7 @@ pub fn map_node_range_up(
start = start.min(span.range.start());
end = end.max(span.range.end());
}
- let file_id = EditionedFileId::from_span_guess_origin(db, anchor.file_id);
- let anchor_offset =
- db.ast_id_map(file_id.into()).get_erased(anchor.ast_id).text_range().start();
- Some((FileRange { file_id, range: TextRange::new(start, end) + anchor_offset }, ctx))
+ Some((db.resolve_span(Span { range: TextRange::new(start, end), anchor, ctx }), ctx))
}
/// Looks up the span at the given offset.
@@ -1015,10 +988,7 @@ pub fn span_for_offset(
offset: TextSize,
) -> (FileRange, SyntaxContext) {
let span = exp_map.span_at(offset);
- let file_id = EditionedFileId::from_span_guess_origin(db, span.anchor.file_id);
- let anchor_offset =
- db.ast_id_map(file_id.into()).get_erased(span.anchor.ast_id).text_range().start();
- (FileRange { file_id, range: span.range + anchor_offset }, span.ctx)
+ (db.resolve_span(span), span.ctx)
}
/// In Rust, macros expand token trees to token trees. When we want to turn a
diff --git a/crates/hir-expand/src/mod_path.rs b/crates/hir-expand/src/mod_path.rs
index e9805e3f86..1712c28aa8 100644
--- a/crates/hir-expand/src/mod_path.rs
+++ b/crates/hir-expand/src/mod_path.rs
@@ -355,16 +355,16 @@ fn convert_path_tt(db: &dyn ExpandDatabase, tt: tt::TokenTreesView<'_>) -> Optio
tt::Leaf::Punct(tt::Punct { char: ':', .. }) => PathKind::Abs,
_ => return None,
},
- tt::Leaf::Ident(tt::Ident { sym: text, span, .. }) if *text == sym::dollar_crate => {
+ tt::Leaf::Ident(tt::Ident { sym: text, span, .. }) if text == sym::dollar_crate => {
resolve_crate_root(db, span.ctx).map(PathKind::DollarCrate).unwrap_or(PathKind::Crate)
}
- tt::Leaf::Ident(tt::Ident { sym: text, .. }) if *text == sym::self_ => PathKind::SELF,
- tt::Leaf::Ident(tt::Ident { sym: text, .. }) if *text == sym::super_ => {
+ tt::Leaf::Ident(tt::Ident { sym: text, .. }) if text == sym::self_ => PathKind::SELF,
+ tt::Leaf::Ident(tt::Ident { sym: text, .. }) if text == sym::super_ => {
let mut deg = 1;
while let Some(tt::Leaf::Ident(tt::Ident { sym: text, span, is_raw: _ })) =
leaves.next()
{
- if *text != sym::super_ {
+ if text != sym::super_ {
segments.push(Name::new_symbol(text.clone(), span.ctx));
break;
}
@@ -372,7 +372,7 @@ fn convert_path_tt(db: &dyn ExpandDatabase, tt: tt::TokenTreesView<'_>) -> Optio
}
PathKind::Super(deg)
}
- tt::Leaf::Ident(tt::Ident { sym: text, .. }) if *text == sym::crate_ => PathKind::Crate,
+ tt::Leaf::Ident(tt::Ident { sym: text, .. }) if text == sym::crate_ => PathKind::Crate,
tt::Leaf::Ident(ident) => {
segments.push(Name::new_symbol(ident.sym.clone(), ident.span.ctx));
PathKind::Plain
@@ -401,8 +401,8 @@ pub fn resolve_crate_root(db: &dyn ExpandDatabase, mut ctxt: SyntaxContext) -> O
result_mark = Some(mark);
iter.next();
}
- // Then find the last semi-transparent mark from the end if it exists.
- while let Some((mark, Transparency::SemiTransparent)) = iter.next() {
+ // Then find the last semi-opaque mark from the end if it exists.
+ while let Some((mark, Transparency::SemiOpaque)) = iter.next() {
result_mark = Some(mark);
}
diff --git a/crates/hir-expand/src/name.rs b/crates/hir-expand/src/name.rs
index 1e5efb6e14..0408a6943d 100644
--- a/crates/hir-expand/src/name.rs
+++ b/crates/hir-expand/src/name.rs
@@ -258,7 +258,7 @@ impl AsName for ast::NameOrNameRef {
}
}
-impl<Span> AsName for tt::Ident<Span> {
+impl AsName for tt::Ident {
fn as_name(&self) -> Name {
Name::new_root(self.sym.as_str())
}
diff --git a/crates/hir-expand/src/proc_macro.rs b/crates/hir-expand/src/proc_macro.rs
index f97d721dfa..467eae3122 100644
--- a/crates/hir-expand/src/proc_macro.rs
+++ b/crates/hir-expand/src/proc_macro.rs
@@ -25,6 +25,7 @@ pub trait ProcMacroExpander: fmt::Debug + Send + Sync + RefUnwindSafe + Any {
/// [`ProcMacroKind::Attr`]), environment variables, and span information.
fn expand(
&self,
+ db: &dyn ExpandDatabase,
subtree: &tt::TopSubtree,
attrs: Option<&tt::TopSubtree>,
env: &Env,
@@ -309,6 +310,7 @@ impl CustomProcMacroExpander {
let current_dir = calling_crate.data(db).proc_macro_cwd.to_string();
match proc_macro.expander.expand(
+ db,
tt,
attr_arg,
env,
diff --git a/crates/hir-expand/src/span_map.rs b/crates/hir-expand/src/span_map.rs
index 8b0c0d72cd..586b815294 100644
--- a/crates/hir-expand/src/span_map.rs
+++ b/crates/hir-expand/src/span_map.rs
@@ -1,6 +1,6 @@
//! Span maps for real files and macro expansions.
-use span::{Span, SyntaxContext};
+use span::Span;
use syntax::{AstNode, TextRange, ast};
use triomphe::Arc;
@@ -8,7 +8,7 @@ pub use span::RealSpanMap;
use crate::{HirFileId, MacroCallId, db::ExpandDatabase};
-pub type ExpansionSpanMap = span::SpanMap<SyntaxContext>;
+pub type ExpansionSpanMap = span::SpanMap;
/// Spanmap for a macro file or a real file
#[derive(Clone, Debug, PartialEq, Eq)]
@@ -27,13 +27,13 @@ pub enum SpanMapRef<'a> {
RealSpanMap(&'a RealSpanMap),
}
-impl syntax_bridge::SpanMapper<Span> for SpanMap {
+impl syntax_bridge::SpanMapper for SpanMap {
fn span_for(&self, range: TextRange) -> Span {
self.span_for_range(range)
}
}
-impl syntax_bridge::SpanMapper<Span> for SpanMapRef<'_> {
+impl syntax_bridge::SpanMapper for SpanMapRef<'_> {
fn span_for(&self, range: TextRange) -> Span {
self.span_for_range(range)
}
diff --git a/crates/hir-ty/src/builtin_derive.rs b/crates/hir-ty/src/builtin_derive.rs
new file mode 100644
index 0000000000..f3e67d01e5
--- /dev/null
+++ b/crates/hir-ty/src/builtin_derive.rs
@@ -0,0 +1,599 @@
+//! Implementation of builtin derive impls.
+
+use std::ops::ControlFlow;
+
+use hir_def::{
+ AdtId, BuiltinDeriveImplId, BuiltinDeriveImplLoc, HasModule, LocalFieldId, TraitId,
+ TypeOrConstParamId, TypeParamId,
+ attrs::AttrFlags,
+ builtin_derive::BuiltinDeriveImplTrait,
+ hir::generics::{GenericParams, TypeOrConstParamData},
+};
+use itertools::Itertools;
+use la_arena::ArenaMap;
+use rustc_type_ir::{
+ AliasTyKind, Interner, TypeFoldable, TypeSuperVisitable, TypeVisitable, TypeVisitor, Upcast,
+ inherent::{GenericArgs as _, IntoKind},
+};
+
+use crate::{
+ GenericPredicates,
+ db::HirDatabase,
+ next_solver::{
+ Clause, Clauses, DbInterner, EarlyBinder, GenericArgs, ParamEnv, StoredEarlyBinder,
+ StoredTy, TraitRef, Ty, TyKind, fold::fold_tys, generics::Generics,
+ },
+};
+
+fn coerce_pointee_new_type_param(trait_id: TraitId) -> TypeParamId {
+ // HACK: Fake the param.
+ // We cannot use a dummy param here, because it can leak into the IDE layer and that'll cause panics
+ // when e.g. trying to display it. So we use an existing param.
+ TypeParamId::from_unchecked(TypeOrConstParamId {
+ parent: trait_id.into(),
+ local_id: la_arena::Idx::from_raw(la_arena::RawIdx::from_u32(1)),
+ })
+}
+
+pub(crate) fn generics_of<'db>(interner: DbInterner<'db>, id: BuiltinDeriveImplId) -> Generics {
+ let db = interner.db;
+ let loc = id.loc(db);
+ match loc.trait_ {
+ BuiltinDeriveImplTrait::Copy
+ | BuiltinDeriveImplTrait::Clone
+ | BuiltinDeriveImplTrait::Default
+ | BuiltinDeriveImplTrait::Debug
+ | BuiltinDeriveImplTrait::Hash
+ | BuiltinDeriveImplTrait::Ord
+ | BuiltinDeriveImplTrait::PartialOrd
+ | BuiltinDeriveImplTrait::Eq
+ | BuiltinDeriveImplTrait::PartialEq => interner.generics_of(loc.adt.into()),
+ BuiltinDeriveImplTrait::CoerceUnsized | BuiltinDeriveImplTrait::DispatchFromDyn => {
+ let mut generics = interner.generics_of(loc.adt.into());
+ let trait_id = loc
+ .trait_
+ .get_id(interner.lang_items())
+ .expect("we don't pass the impl to the solver if we can't resolve the trait");
+ generics.push_param(coerce_pointee_new_type_param(trait_id).into());
+ generics
+ }
+ }
+}
+
+pub fn generic_params_count(db: &dyn HirDatabase, id: BuiltinDeriveImplId) -> usize {
+ let loc = id.loc(db);
+ let adt_params = GenericParams::new(db, loc.adt.into());
+ let extra_params_count = match loc.trait_ {
+ BuiltinDeriveImplTrait::Copy
+ | BuiltinDeriveImplTrait::Clone
+ | BuiltinDeriveImplTrait::Default
+ | BuiltinDeriveImplTrait::Debug
+ | BuiltinDeriveImplTrait::Hash
+ | BuiltinDeriveImplTrait::Ord
+ | BuiltinDeriveImplTrait::PartialOrd
+ | BuiltinDeriveImplTrait::Eq
+ | BuiltinDeriveImplTrait::PartialEq => 0,
+ BuiltinDeriveImplTrait::CoerceUnsized | BuiltinDeriveImplTrait::DispatchFromDyn => 1,
+ };
+ adt_params.len() + extra_params_count
+}
+
+pub fn impl_trait<'db>(
+ interner: DbInterner<'db>,
+ id: BuiltinDeriveImplId,
+) -> EarlyBinder<'db, TraitRef<'db>> {
+ let db = interner.db;
+ let loc = id.loc(db);
+ let trait_id = loc
+ .trait_
+ .get_id(interner.lang_items())
+ .expect("we don't pass the impl to the solver if we can't resolve the trait");
+ match loc.trait_ {
+ BuiltinDeriveImplTrait::Copy
+ | BuiltinDeriveImplTrait::Clone
+ | BuiltinDeriveImplTrait::Default
+ | BuiltinDeriveImplTrait::Debug
+ | BuiltinDeriveImplTrait::Hash
+ | BuiltinDeriveImplTrait::Ord
+ | BuiltinDeriveImplTrait::Eq => {
+ let self_ty = Ty::new_adt(
+ interner,
+ loc.adt,
+ GenericArgs::identity_for_item(interner, loc.adt.into()),
+ );
+ EarlyBinder::bind(TraitRef::new(interner, trait_id.into(), [self_ty]))
+ }
+ BuiltinDeriveImplTrait::PartialOrd | BuiltinDeriveImplTrait::PartialEq => {
+ let self_ty = Ty::new_adt(
+ interner,
+ loc.adt,
+ GenericArgs::identity_for_item(interner, loc.adt.into()),
+ );
+ EarlyBinder::bind(TraitRef::new(interner, trait_id.into(), [self_ty, self_ty]))
+ }
+ BuiltinDeriveImplTrait::CoerceUnsized | BuiltinDeriveImplTrait::DispatchFromDyn => {
+ let generic_params = GenericParams::new(db, loc.adt.into());
+ let interner = DbInterner::new_no_crate(db);
+ let args = GenericArgs::identity_for_item(interner, loc.adt.into());
+ let self_ty = Ty::new_adt(interner, loc.adt, args);
+ let Some((pointee_param_idx, _, new_param_ty)) =
+ coerce_pointee_params(interner, loc, &generic_params, trait_id)
+ else {
+ // Malformed derive.
+ return EarlyBinder::bind(TraitRef::new(
+ interner,
+ trait_id.into(),
+ [self_ty, self_ty],
+ ));
+ };
+ let changed_args = replace_pointee(interner, pointee_param_idx, new_param_ty, args);
+ let changed_self_ty = Ty::new_adt(interner, loc.adt, changed_args);
+ EarlyBinder::bind(TraitRef::new(interner, trait_id.into(), [self_ty, changed_self_ty]))
+ }
+ }
+}
+
+#[salsa::tracked(returns(ref), unsafe(non_update_types))]
+pub fn predicates<'db>(db: &'db dyn HirDatabase, impl_: BuiltinDeriveImplId) -> GenericPredicates {
+ let loc = impl_.loc(db);
+ let generic_params = GenericParams::new(db, loc.adt.into());
+ let interner = DbInterner::new_with(db, loc.module(db).krate(db));
+ let adt_predicates = GenericPredicates::query(db, loc.adt.into());
+ let trait_id = loc
+ .trait_
+ .get_id(interner.lang_items())
+ .expect("we don't pass the impl to the solver if we can't resolve the trait");
+ match loc.trait_ {
+ BuiltinDeriveImplTrait::Copy
+ | BuiltinDeriveImplTrait::Clone
+ | BuiltinDeriveImplTrait::Debug
+ | BuiltinDeriveImplTrait::Hash
+ | BuiltinDeriveImplTrait::Ord
+ | BuiltinDeriveImplTrait::PartialOrd
+ | BuiltinDeriveImplTrait::Eq
+ | BuiltinDeriveImplTrait::PartialEq => {
+ simple_trait_predicates(interner, loc, &generic_params, adt_predicates, trait_id)
+ }
+ BuiltinDeriveImplTrait::Default => {
+ if matches!(loc.adt, AdtId::EnumId(_)) {
+ // Enums don't have extra bounds.
+ GenericPredicates::from_explicit_own_predicates(StoredEarlyBinder::bind(
+ Clauses::new_from_slice(adt_predicates.explicit_predicates().skip_binder())
+ .store(),
+ ))
+ } else {
+ simple_trait_predicates(interner, loc, &generic_params, adt_predicates, trait_id)
+ }
+ }
+ BuiltinDeriveImplTrait::CoerceUnsized | BuiltinDeriveImplTrait::DispatchFromDyn => {
+ let Some((pointee_param_idx, pointee_param_id, new_param_ty)) =
+ coerce_pointee_params(interner, loc, &generic_params, trait_id)
+ else {
+ // Malformed derive.
+ return GenericPredicates::from_explicit_own_predicates(StoredEarlyBinder::bind(
+ Clauses::default().store(),
+ ));
+ };
+ let duplicated_bounds =
+ adt_predicates.explicit_predicates().iter_identity_copied().filter_map(|pred| {
+ let mentions_pointee =
+ pred.visit_with(&mut MentionsPointee { pointee_param_idx }).is_break();
+ if !mentions_pointee {
+ return None;
+ }
+ let transformed =
+ replace_pointee(interner, pointee_param_idx, new_param_ty, pred);
+ Some(transformed)
+ });
+ let unsize_trait = interner.lang_items().Unsize;
+ let unsize_bound = unsize_trait.map(|unsize_trait| {
+ let pointee_param_ty = Ty::new_param(interner, pointee_param_id, pointee_param_idx);
+ TraitRef::new(interner, unsize_trait.into(), [pointee_param_ty, new_param_ty])
+ .upcast(interner)
+ });
+ GenericPredicates::from_explicit_own_predicates(StoredEarlyBinder::bind(
+ Clauses::new_from_iter(
+ interner,
+ adt_predicates
+ .explicit_predicates()
+ .iter_identity_copied()
+ .chain(duplicated_bounds)
+ .chain(unsize_bound),
+ )
+ .store(),
+ ))
+ }
+ }
+}
+
+/// Not cached in a query, currently used in `hir` only. If you need this in `hir-ty` consider introducing a query.
+pub fn param_env<'db>(interner: DbInterner<'db>, id: BuiltinDeriveImplId) -> ParamEnv<'db> {
+ let predicates = predicates(interner.db, id);
+ crate::lower::param_env_from_predicates(interner, predicates)
+}
+
+struct MentionsPointee {
+ pointee_param_idx: u32,
+}
+
+impl<'db> TypeVisitor<DbInterner<'db>> for MentionsPointee {
+ type Result = ControlFlow<()>;
+
+ fn visit_ty(&mut self, t: Ty<'db>) -> Self::Result {
+ if let TyKind::Param(param) = t.kind()
+ && param.index == self.pointee_param_idx
+ {
+ ControlFlow::Break(())
+ } else {
+ t.super_visit_with(self)
+ }
+ }
+}
+
+fn replace_pointee<'db, T: TypeFoldable<DbInterner<'db>>>(
+ interner: DbInterner<'db>,
+ pointee_param_idx: u32,
+ new_param_ty: Ty<'db>,
+ t: T,
+) -> T {
+ fold_tys(interner, t, |ty| match ty.kind() {
+ TyKind::Param(param) if param.index == pointee_param_idx => new_param_ty,
+ _ => ty,
+ })
+}
+
+fn simple_trait_predicates<'db>(
+ interner: DbInterner<'db>,
+ loc: &BuiltinDeriveImplLoc,
+ generic_params: &GenericParams,
+ adt_predicates: &GenericPredicates,
+ trait_id: TraitId,
+) -> GenericPredicates {
+ let extra_predicates = generic_params
+ .iter_type_or_consts()
+ .filter(|(_, data)| matches!(data, TypeOrConstParamData::TypeParamData(_)))
+ .map(|(param_idx, _)| {
+ let param_id = TypeParamId::from_unchecked(TypeOrConstParamId {
+ parent: loc.adt.into(),
+ local_id: param_idx,
+ });
+ let param_idx =
+ param_idx.into_raw().into_u32() + (generic_params.len_lifetimes() as u32);
+ let param_ty = Ty::new_param(interner, param_id, param_idx);
+ let trait_ref = TraitRef::new(interner, trait_id.into(), [param_ty]);
+ trait_ref.upcast(interner)
+ });
+ let mut assoc_type_bounds = Vec::new();
+ match loc.adt {
+ AdtId::StructId(id) => extend_assoc_type_bounds(
+ interner,
+ &mut assoc_type_bounds,
+ interner.db.field_types(id.into()),
+ trait_id,
+ ),
+ AdtId::UnionId(id) => extend_assoc_type_bounds(
+ interner,
+ &mut assoc_type_bounds,
+ interner.db.field_types(id.into()),
+ trait_id,
+ ),
+ AdtId::EnumId(id) => {
+ for &(variant_id, _, _) in &id.enum_variants(interner.db).variants {
+ extend_assoc_type_bounds(
+ interner,
+ &mut assoc_type_bounds,
+ interner.db.field_types(variant_id.into()),
+ trait_id,
+ )
+ }
+ }
+ }
+ GenericPredicates::from_explicit_own_predicates(StoredEarlyBinder::bind(
+ Clauses::new_from_iter(
+ interner,
+ adt_predicates
+ .explicit_predicates()
+ .iter_identity_copied()
+ .chain(extra_predicates)
+ .chain(assoc_type_bounds),
+ )
+ .store(),
+ ))
+}
+
+fn extend_assoc_type_bounds<'db>(
+ interner: DbInterner<'db>,
+ assoc_type_bounds: &mut Vec<Clause<'db>>,
+ fields: &ArenaMap<LocalFieldId, StoredEarlyBinder<StoredTy>>,
+ trait_: TraitId,
+) {
+ struct ProjectionFinder<'a, 'db> {
+ interner: DbInterner<'db>,
+ assoc_type_bounds: &'a mut Vec<Clause<'db>>,
+ trait_: TraitId,
+ }
+
+ impl<'db> TypeVisitor<DbInterner<'db>> for ProjectionFinder<'_, 'db> {
+ type Result = ();
+
+ fn visit_ty(&mut self, t: Ty<'db>) -> Self::Result {
+ if let TyKind::Alias(AliasTyKind::Projection, _) = t.kind() {
+ self.assoc_type_bounds.push(
+ TraitRef::new(self.interner, self.trait_.into(), [t]).upcast(self.interner),
+ );
+ }
+
+ t.super_visit_with(self)
+ }
+ }
+
+ let mut visitor = ProjectionFinder { interner, assoc_type_bounds, trait_ };
+ for (_, field) in fields.iter() {
+ field.get().instantiate_identity().visit_with(&mut visitor);
+ }
+}
+
+fn coerce_pointee_params<'db>(
+ interner: DbInterner<'db>,
+ loc: &BuiltinDeriveImplLoc,
+ generic_params: &GenericParams,
+ trait_id: TraitId,
+) -> Option<(u32, TypeParamId, Ty<'db>)> {
+ let pointee_param = {
+ if let Ok((pointee_param, _)) = generic_params
+ .iter_type_or_consts()
+ .filter(|param| matches!(param.1, TypeOrConstParamData::TypeParamData(_)))
+ .exactly_one()
+ {
+ pointee_param
+ } else {
+ let (_, generic_param_attrs) =
+ AttrFlags::query_generic_params(interner.db, loc.adt.into());
+ generic_param_attrs
+ .iter()
+ .find(|param| param.1.contains(AttrFlags::IS_POINTEE))
+ .map(|(param, _)| param)
+ .or_else(|| {
+ generic_params
+ .iter_type_or_consts()
+ .find(|param| matches!(param.1, TypeOrConstParamData::TypeParamData(_)))
+ .map(|(idx, _)| idx)
+ })?
+ }
+ };
+ let pointee_param_id = TypeParamId::from_unchecked(TypeOrConstParamId {
+ parent: loc.adt.into(),
+ local_id: pointee_param,
+ });
+ let pointee_param_idx =
+ pointee_param.into_raw().into_u32() + (generic_params.len_lifetimes() as u32);
+ let new_param_idx = generic_params.len() as u32;
+ let new_param_id = coerce_pointee_new_type_param(trait_id);
+ let new_param_ty = Ty::new_param(interner, new_param_id, new_param_idx);
+ Some((pointee_param_idx, pointee_param_id, new_param_ty))
+}
+
+#[cfg(test)]
+mod tests {
+ use expect_test::{Expect, expect};
+ use hir_def::nameres::crate_def_map;
+ use itertools::Itertools;
+ use stdx::format_to;
+ use test_fixture::WithFixture;
+
+ use crate::{builtin_derive::impl_trait, next_solver::DbInterner, test_db::TestDB};
+
+ fn check_trait_refs(#[rust_analyzer::rust_fixture] ra_fixture: &str, expectation: Expect) {
+ let db = TestDB::with_files(ra_fixture);
+ let def_map = crate_def_map(&db, db.test_crate());
+
+ let interner = DbInterner::new_with(&db, db.test_crate());
+ crate::attach_db(&db, || {
+ let mut trait_refs = Vec::new();
+ for (_, module) in def_map.modules() {
+ for derive in module.scope.builtin_derive_impls() {
+ let trait_ref = impl_trait(interner, derive).skip_binder();
+ trait_refs.push(format!("{trait_ref:?}"));
+ }
+ }
+
+ expectation.assert_eq(&trait_refs.join("\n"));
+ });
+ }
+
+ fn check_predicates(#[rust_analyzer::rust_fixture] ra_fixture: &str, expectation: Expect) {
+ let db = TestDB::with_files(ra_fixture);
+ let def_map = crate_def_map(&db, db.test_crate());
+
+ crate::attach_db(&db, || {
+ let mut predicates = String::new();
+ for (_, module) in def_map.modules() {
+ for derive in module.scope.builtin_derive_impls() {
+ let preds = super::predicates(&db, derive).all_predicates().skip_binder();
+ format_to!(
+ predicates,
+ "{}\n\n",
+ preds.iter().format_with("\n", |pred, formatter| formatter(&format_args!(
+ "{pred:?}"
+ ))),
+ );
+ }
+ }
+
+ expectation.assert_eq(&predicates);
+ });
+ }
+
+ #[test]
+ fn simple_macros_trait_ref() {
+ check_trait_refs(
+ r#"
+//- minicore: derive, clone, copy, eq, ord, hash, fmt
+
+#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
+struct Simple;
+
+trait Trait {}
+
+#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
+struct WithGenerics<'a, T: Trait, const N: usize>(&'a [T; N]);
+ "#,
+ expect![[r#"
+ Simple: Debug
+ Simple: Clone
+ Simple: Copy
+ Simple: PartialEq<[Simple]>
+ Simple: Eq
+ Simple: PartialOrd<[Simple]>
+ Simple: Ord
+ Simple: Hash
+ WithGenerics<#0, #1, #2>: Debug
+ WithGenerics<#0, #1, #2>: Clone
+ WithGenerics<#0, #1, #2>: Copy
+ WithGenerics<#0, #1, #2>: PartialEq<[WithGenerics<#0, #1, #2>]>
+ WithGenerics<#0, #1, #2>: Eq
+ WithGenerics<#0, #1, #2>: PartialOrd<[WithGenerics<#0, #1, #2>]>
+ WithGenerics<#0, #1, #2>: Ord
+ WithGenerics<#0, #1, #2>: Hash"#]],
+ );
+ }
+
+ #[test]
+ fn coerce_pointee_trait_ref() {
+ check_trait_refs(
+ r#"
+//- minicore: derive, coerce_pointee
+use core::marker::CoercePointee;
+
+#[derive(CoercePointee)]
+struct Simple<T: ?Sized>(*const T);
+
+#[derive(CoercePointee)]
+struct MultiGenericParams<'a, T, #[pointee] U: ?Sized, const N: usize>(*const U);
+ "#,
+ expect![[r#"
+ Simple<#0>: CoerceUnsized<[Simple<#1>]>
+ Simple<#0>: DispatchFromDyn<[Simple<#1>]>
+ MultiGenericParams<#0, #1, #2, #3>: CoerceUnsized<[MultiGenericParams<#0, #1, #4, #3>]>
+ MultiGenericParams<#0, #1, #2, #3>: DispatchFromDyn<[MultiGenericParams<#0, #1, #4, #3>]>"#]],
+ );
+ }
+
+ #[test]
+ fn simple_macros_predicates() {
+ check_predicates(
+ r#"
+//- minicore: derive, clone, copy, eq, ord, hash, fmt
+
+#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
+struct Simple;
+
+trait Trait {}
+
+#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
+struct WithGenerics<'a, T: Trait, const N: usize>(&'a [T; N]);
+ "#,
+ expect![[r#"
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ Clause(Binder { value: TraitPredicate(#1: Trait, polarity:Positive), bound_vars: [] })
+ Clause(Binder { value: ConstArgHasType(#2, usize), bound_vars: [] })
+ Clause(Binder { value: TraitPredicate(#1: Sized, polarity:Positive), bound_vars: [] })
+ Clause(Binder { value: TraitPredicate(#1: Debug, polarity:Positive), bound_vars: [] })
+
+ Clause(Binder { value: TraitPredicate(#1: Trait, polarity:Positive), bound_vars: [] })
+ Clause(Binder { value: ConstArgHasType(#2, usize), bound_vars: [] })
+ Clause(Binder { value: TraitPredicate(#1: Sized, polarity:Positive), bound_vars: [] })
+ Clause(Binder { value: TraitPredicate(#1: Clone, polarity:Positive), bound_vars: [] })
+
+ Clause(Binder { value: TraitPredicate(#1: Trait, polarity:Positive), bound_vars: [] })
+ Clause(Binder { value: ConstArgHasType(#2, usize), bound_vars: [] })
+ Clause(Binder { value: TraitPredicate(#1: Sized, polarity:Positive), bound_vars: [] })
+ Clause(Binder { value: TraitPredicate(#1: Copy, polarity:Positive), bound_vars: [] })
+
+ Clause(Binder { value: TraitPredicate(#1: Trait, polarity:Positive), bound_vars: [] })
+ Clause(Binder { value: ConstArgHasType(#2, usize), bound_vars: [] })
+ Clause(Binder { value: TraitPredicate(#1: Sized, polarity:Positive), bound_vars: [] })
+ Clause(Binder { value: TraitPredicate(#1: PartialEq, polarity:Positive), bound_vars: [] })
+
+ Clause(Binder { value: TraitPredicate(#1: Trait, polarity:Positive), bound_vars: [] })
+ Clause(Binder { value: ConstArgHasType(#2, usize), bound_vars: [] })
+ Clause(Binder { value: TraitPredicate(#1: Sized, polarity:Positive), bound_vars: [] })
+ Clause(Binder { value: TraitPredicate(#1: Eq, polarity:Positive), bound_vars: [] })
+
+ Clause(Binder { value: TraitPredicate(#1: Trait, polarity:Positive), bound_vars: [] })
+ Clause(Binder { value: ConstArgHasType(#2, usize), bound_vars: [] })
+ Clause(Binder { value: TraitPredicate(#1: Sized, polarity:Positive), bound_vars: [] })
+ Clause(Binder { value: TraitPredicate(#1: PartialOrd, polarity:Positive), bound_vars: [] })
+
+ Clause(Binder { value: TraitPredicate(#1: Trait, polarity:Positive), bound_vars: [] })
+ Clause(Binder { value: ConstArgHasType(#2, usize), bound_vars: [] })
+ Clause(Binder { value: TraitPredicate(#1: Sized, polarity:Positive), bound_vars: [] })
+ Clause(Binder { value: TraitPredicate(#1: Ord, polarity:Positive), bound_vars: [] })
+
+ Clause(Binder { value: TraitPredicate(#1: Trait, polarity:Positive), bound_vars: [] })
+ Clause(Binder { value: ConstArgHasType(#2, usize), bound_vars: [] })
+ Clause(Binder { value: TraitPredicate(#1: Sized, polarity:Positive), bound_vars: [] })
+ Clause(Binder { value: TraitPredicate(#1: Hash, polarity:Positive), bound_vars: [] })
+
+ "#]],
+ );
+ }
+
+ #[test]
+ fn coerce_pointee_predicates() {
+ check_predicates(
+ r#"
+//- minicore: derive, coerce_pointee
+use core::marker::CoercePointee;
+
+#[derive(CoercePointee)]
+struct Simple<T: ?Sized>(*const T);
+
+trait Trait<T> {}
+
+#[derive(CoercePointee)]
+struct MultiGenericParams<'a, T, #[pointee] U: ?Sized, const N: usize>(*const U)
+where
+ T: Trait<U>,
+ U: Trait<U>;
+ "#,
+ expect![[r#"
+ Clause(Binder { value: TraitPredicate(#0: Unsize<[#1]>, polarity:Positive), bound_vars: [] })
+
+ Clause(Binder { value: TraitPredicate(#0: Unsize<[#1]>, polarity:Positive), bound_vars: [] })
+
+ Clause(Binder { value: TraitPredicate(#1: Trait<[#2]>, polarity:Positive), bound_vars: [] })
+ Clause(Binder { value: TraitPredicate(#2: Trait<[#2]>, polarity:Positive), bound_vars: [] })
+ Clause(Binder { value: ConstArgHasType(#3, usize), bound_vars: [] })
+ Clause(Binder { value: TraitPredicate(#1: Sized, polarity:Positive), bound_vars: [] })
+ Clause(Binder { value: TraitPredicate(#1: Trait<[#4]>, polarity:Positive), bound_vars: [] })
+ Clause(Binder { value: TraitPredicate(#4: Trait<[#4]>, polarity:Positive), bound_vars: [] })
+ Clause(Binder { value: TraitPredicate(#2: Unsize<[#4]>, polarity:Positive), bound_vars: [] })
+
+ Clause(Binder { value: TraitPredicate(#1: Trait<[#2]>, polarity:Positive), bound_vars: [] })
+ Clause(Binder { value: TraitPredicate(#2: Trait<[#2]>, polarity:Positive), bound_vars: [] })
+ Clause(Binder { value: ConstArgHasType(#3, usize), bound_vars: [] })
+ Clause(Binder { value: TraitPredicate(#1: Sized, polarity:Positive), bound_vars: [] })
+ Clause(Binder { value: TraitPredicate(#1: Trait<[#4]>, polarity:Positive), bound_vars: [] })
+ Clause(Binder { value: TraitPredicate(#4: Trait<[#4]>, polarity:Positive), bound_vars: [] })
+ Clause(Binder { value: TraitPredicate(#2: Unsize<[#4]>, polarity:Positive), bound_vars: [] })
+
+ "#]],
+ );
+ }
+}
diff --git a/crates/hir-ty/src/consteval.rs b/crates/hir-ty/src/consteval.rs
index f11240e0f7..5bc2446fdd 100644
--- a/crates/hir-ty/src/consteval.rs
+++ b/crates/hir-ty/src/consteval.rs
@@ -7,9 +7,9 @@ use base_db::Crate;
use hir_def::{
ConstId, EnumVariantId, GeneralConstId, HasModule, StaticId,
attrs::AttrFlags,
+ builtin_type::{BuiltinInt, BuiltinType, BuiltinUint},
expr_store::Body,
- hir::{Expr, ExprId},
- type_ref::LiteralConstRef,
+ hir::{Expr, ExprId, Literal},
};
use hir_expand::Lookup;
use rustc_type_ir::inherent::IntoKind;
@@ -23,7 +23,7 @@ use crate::{
mir::{MirEvalError, MirLowerError},
next_solver::{
Const, ConstBytes, ConstKind, DbInterner, ErrorGuaranteed, GenericArg, GenericArgs,
- ParamEnv, StoredConst, StoredGenericArgs, Ty, ValueConst,
+ StoredConst, StoredGenericArgs, Ty, ValueConst,
},
traits::StoredParamEnvAndCrate,
};
@@ -81,47 +81,122 @@ impl From<MirEvalError> for ConstEvalError {
/// Interns a constant scalar with the given type
pub fn intern_const_ref<'a>(
db: &'a dyn HirDatabase,
- value: &LiteralConstRef,
+ value: &Literal,
ty: Ty<'a>,
- krate: Crate,
+ _krate: Crate,
) -> Const<'a> {
let interner = DbInterner::new_no_crate(db);
- let layout = db
- .layout_of_ty(ty.store(), ParamEnvAndCrate { param_env: ParamEnv::empty(), krate }.store());
let kind = match value {
- LiteralConstRef::Int(i) => {
- // FIXME: We should handle failure of layout better.
- let size = layout.map(|it| it.size.bytes_usize()).unwrap_or(16);
+ &Literal::Uint(i, builtin_ty)
+ if builtin_ty.is_none() || ty.as_builtin() == builtin_ty.map(BuiltinType::Uint) =>
+ {
+ let memory = match ty.as_builtin() {
+ Some(BuiltinType::Uint(builtin_uint)) => match builtin_uint {
+ BuiltinUint::U8 => Box::new([i as u8]) as Box<[u8]>,
+ BuiltinUint::U16 => Box::new((i as u16).to_le_bytes()),
+ BuiltinUint::U32 => Box::new((i as u32).to_le_bytes()),
+ BuiltinUint::U64 => Box::new((i as u64).to_le_bytes()),
+ BuiltinUint::U128 => Box::new((i).to_le_bytes()),
+ BuiltinUint::Usize => Box::new((i as usize).to_le_bytes()),
+ },
+ _ => return Const::new(interner, rustc_type_ir::ConstKind::Error(ErrorGuaranteed)),
+ };
rustc_type_ir::ConstKind::Value(ValueConst::new(
ty,
- ConstBytes {
- memory: i.to_le_bytes()[0..size].into(),
- memory_map: MemoryMap::default(),
+ ConstBytes { memory, memory_map: MemoryMap::default() },
+ ))
+ }
+ &Literal::Int(i, None)
+ if ty
+ .as_builtin()
+ .is_some_and(|builtin_ty| matches!(builtin_ty, BuiltinType::Uint(_))) =>
+ {
+ let memory = match ty.as_builtin() {
+ Some(BuiltinType::Uint(builtin_uint)) => match builtin_uint {
+ BuiltinUint::U8 => Box::new([i as u8]) as Box<[u8]>,
+ BuiltinUint::U16 => Box::new((i as u16).to_le_bytes()),
+ BuiltinUint::U32 => Box::new((i as u32).to_le_bytes()),
+ BuiltinUint::U64 => Box::new((i as u64).to_le_bytes()),
+ BuiltinUint::U128 => Box::new((i as u128).to_le_bytes()),
+ BuiltinUint::Usize => Box::new((i as usize).to_le_bytes()),
},
+ _ => return Const::new(interner, rustc_type_ir::ConstKind::Error(ErrorGuaranteed)),
+ };
+ rustc_type_ir::ConstKind::Value(ValueConst::new(
+ ty,
+ ConstBytes { memory, memory_map: MemoryMap::default() },
))
}
- LiteralConstRef::UInt(i) => {
- let size = layout.map(|it| it.size.bytes_usize()).unwrap_or(16);
+ &Literal::Int(i, builtin_ty)
+ if builtin_ty.is_none() || ty.as_builtin() == builtin_ty.map(BuiltinType::Int) =>
+ {
+ let memory = match ty.as_builtin() {
+ Some(BuiltinType::Int(builtin_int)) => match builtin_int {
+ BuiltinInt::I8 => Box::new([i as u8]) as Box<[u8]>,
+ BuiltinInt::I16 => Box::new((i as i16).to_le_bytes()),
+ BuiltinInt::I32 => Box::new((i as i32).to_le_bytes()),
+ BuiltinInt::I64 => Box::new((i as i64).to_le_bytes()),
+ BuiltinInt::I128 => Box::new((i).to_le_bytes()),
+ BuiltinInt::Isize => Box::new((i as isize).to_le_bytes()),
+ },
+ _ => return Const::new(interner, rustc_type_ir::ConstKind::Error(ErrorGuaranteed)),
+ };
rustc_type_ir::ConstKind::Value(ValueConst::new(
ty,
- ConstBytes {
- memory: i.to_le_bytes()[0..size].into(),
- memory_map: MemoryMap::default(),
+ ConstBytes { memory, memory_map: MemoryMap::default() },
+ ))
+ }
+ Literal::Float(float_type_wrapper, builtin_float)
+ if builtin_float.is_none()
+ || ty.as_builtin() == builtin_float.map(BuiltinType::Float) =>
+ {
+ let memory = match ty.as_builtin().unwrap() {
+ BuiltinType::Float(builtin_float) => match builtin_float {
+ // FIXME:
+ hir_def::builtin_type::BuiltinFloat::F16 => Box::new([0u8; 2]) as Box<[u8]>,
+ hir_def::builtin_type::BuiltinFloat::F32 => {
+ Box::new(float_type_wrapper.to_f32().to_le_bytes())
+ }
+ hir_def::builtin_type::BuiltinFloat::F64 => {
+ Box::new(float_type_wrapper.to_f64().to_le_bytes())
+ }
+ // FIXME:
+ hir_def::builtin_type::BuiltinFloat::F128 => Box::new([0; 16]),
},
+ _ => unreachable!(),
+ };
+ rustc_type_ir::ConstKind::Value(ValueConst::new(
+ ty,
+ ConstBytes { memory, memory_map: MemoryMap::default() },
))
}
- LiteralConstRef::Bool(b) => rustc_type_ir::ConstKind::Value(ValueConst::new(
+ Literal::Bool(b) if ty.is_bool() => rustc_type_ir::ConstKind::Value(ValueConst::new(
ty,
ConstBytes { memory: Box::new([*b as u8]), memory_map: MemoryMap::default() },
)),
- LiteralConstRef::Char(c) => rustc_type_ir::ConstKind::Value(ValueConst::new(
+ Literal::Char(c) if ty.is_char() => rustc_type_ir::ConstKind::Value(ValueConst::new(
ty,
ConstBytes {
memory: (*c as u32).to_le_bytes().into(),
memory_map: MemoryMap::default(),
},
)),
- LiteralConstRef::Unknown => rustc_type_ir::ConstKind::Error(ErrorGuaranteed),
+ Literal::String(symbol) if ty.is_str() => rustc_type_ir::ConstKind::Value(ValueConst::new(
+ ty,
+ ConstBytes {
+ memory: symbol.as_str().as_bytes().into(),
+ memory_map: MemoryMap::default(),
+ },
+ )),
+ Literal::ByteString(items) if ty.as_slice().is_some_and(|ty| ty.is_u8()) => {
+ rustc_type_ir::ConstKind::Value(ValueConst::new(
+ ty,
+ ConstBytes { memory: items.clone(), memory_map: MemoryMap::default() },
+ ))
+ }
+ // FIXME
+ Literal::CString(_items) => rustc_type_ir::ConstKind::Error(ErrorGuaranteed),
+ _ => rustc_type_ir::ConstKind::Error(ErrorGuaranteed),
};
Const::new(interner, kind)
}
@@ -130,7 +205,15 @@ pub fn intern_const_ref<'a>(
pub fn usize_const<'db>(db: &'db dyn HirDatabase, value: Option<u128>, krate: Crate) -> Const<'db> {
intern_const_ref(
db,
- &value.map_or(LiteralConstRef::Unknown, LiteralConstRef::UInt),
+ &match value {
+ Some(value) => Literal::Uint(value, Some(BuiltinUint::Usize)),
+ None => {
+ return Const::new(
+ DbInterner::new_no_crate(db),
+ rustc_type_ir::ConstKind::Error(ErrorGuaranteed),
+ );
+ }
+ },
Ty::new_uint(DbInterner::new_no_crate(db), rustc_type_ir::UintTy::Usize),
krate,
)
diff --git a/crates/hir-ty/src/consteval/tests.rs b/crates/hir-ty/src/consteval/tests.rs
index 8816e13ba7..5f6bcb4a60 100644
--- a/crates/hir-ty/src/consteval/tests.rs
+++ b/crates/hir-ty/src/consteval/tests.rs
@@ -1568,6 +1568,7 @@ const GOAL: u8 = {
}
#[test]
+#[ignore = "builtin derive macros are currently not working with MIR eval"]
fn builtin_derive_macro() {
check_number(
r#"
@@ -2208,6 +2209,7 @@ fn boxes() {
check_number(
r#"
//- minicore: coerce_unsized, deref_mut, slice
+#![feature(lang_items)]
use core::ops::{Deref, DerefMut};
use core::{marker::Unsize, ops::CoerceUnsized};
@@ -2345,6 +2347,7 @@ fn c_string() {
check_number(
r#"
//- minicore: index, slice
+#![feature(lang_items)]
#[lang = "CStr"]
pub struct CStr {
inner: [u8]
@@ -2359,6 +2362,7 @@ const GOAL: u8 = {
check_number(
r#"
//- minicore: index, slice
+#![feature(lang_items)]
#[lang = "CStr"]
pub struct CStr {
inner: [u8]
diff --git a/crates/hir-ty/src/db.rs b/crates/hir-ty/src/db.rs
index f0f65eedbc..70474fc469 100644
--- a/crates/hir-ty/src/db.rs
+++ b/crates/hir-ty/src/db.rs
@@ -2,10 +2,12 @@
//! type inference-related queries.
use base_db::{Crate, target::TargetLoadError};
+use either::Either;
use hir_def::{
- AdtId, CallableDefId, ConstId, ConstParamId, DefWithBodyId, EnumVariantId, FunctionId,
- GenericDefId, ImplId, LifetimeParamId, LocalFieldId, StaticId, TraitId, TypeAliasId, VariantId,
- db::DefDatabase, hir::ExprId, layout::TargetDataLayout,
+ AdtId, BuiltinDeriveImplId, CallableDefId, ConstId, ConstParamId, DefWithBodyId, EnumVariantId,
+ FunctionId, GenericDefId, ImplId, LifetimeParamId, LocalFieldId, StaticId, TraitId,
+ TypeAliasId, VariantId, builtin_derive::BuiltinDeriveImplMethod, db::DefDatabase, hir::ExprId,
+ layout::TargetDataLayout,
};
use la_arena::ArenaMap;
use salsa::plumbing::AsId;
@@ -83,7 +85,7 @@ pub trait HirDatabase: DefDatabase + std::fmt::Debug {
env: ParamEnvAndCrate<'db>,
func: FunctionId,
fn_subst: GenericArgs<'db>,
- ) -> (FunctionId, GenericArgs<'db>);
+ ) -> (Either<FunctionId, (BuiltinDeriveImplId, BuiltinDeriveImplMethod)>, GenericArgs<'db>);
// endregion:mir
diff --git a/crates/hir-ty/src/diagnostics/decl_check.rs b/crates/hir-ty/src/diagnostics/decl_check.rs
index a6852b87f6..29da1b0c51 100644
--- a/crates/hir-ty/src/diagnostics/decl_check.rs
+++ b/crates/hir-ty/src/diagnostics/decl_check.rs
@@ -293,12 +293,18 @@ impl<'a> DeclValidator<'a> {
fn validate_struct(&mut self, struct_id: StructId) {
// Check the structure name.
let data = self.db.struct_signature(struct_id);
- self.create_incorrect_case_diagnostic_for_item_name(
- struct_id,
- &data.name,
- CaseType::UpperCamelCase,
- IdentType::Structure,
- );
+
+ // rustc implementation excuses repr(C) since C structs predominantly don't
+ // use camel case.
+ let has_repr_c = data.repr(self.db, struct_id).is_some_and(|repr| repr.c());
+ if !has_repr_c {
+ self.create_incorrect_case_diagnostic_for_item_name(
+ struct_id,
+ &data.name,
+ CaseType::UpperCamelCase,
+ IdentType::Structure,
+ );
+ }
// Check the field names.
self.validate_struct_fields(struct_id);
@@ -378,15 +384,20 @@ impl<'a> DeclValidator<'a> {
}
fn validate_enum(&mut self, enum_id: EnumId) {
+ // Check the enum name.
let data = self.db.enum_signature(enum_id);
- // Check the enum name.
- self.create_incorrect_case_diagnostic_for_item_name(
- enum_id,
- &data.name,
- CaseType::UpperCamelCase,
- IdentType::Enum,
- );
+ // rustc implementation excuses repr(C) since C structs predominantly don't
+ // use camel case.
+ let has_repr_c = data.repr(self.db, enum_id).is_some_and(|repr| repr.c());
+ if !has_repr_c {
+ self.create_incorrect_case_diagnostic_for_item_name(
+ enum_id,
+ &data.name,
+ CaseType::UpperCamelCase,
+ IdentType::Enum,
+ );
+ }
// Check the variant names.
self.validate_enum_variants(enum_id)
diff --git a/crates/hir-ty/src/display.rs b/crates/hir-ty/src/display.rs
index b9e23464e9..43b428c3fa 100644
--- a/crates/hir-ty/src/display.rs
+++ b/crates/hir-ty/src/display.rs
@@ -7,10 +7,11 @@ use std::{
mem,
};
-use base_db::Crate;
+use base_db::{Crate, FxIndexMap};
use either::Either;
use hir_def::{
- FindPathConfig, GenericDefId, HasModule, LocalFieldId, Lookup, ModuleDefId, ModuleId, TraitId,
+ FindPathConfig, GenericDefId, GenericParamId, HasModule, LocalFieldId, Lookup, ModuleDefId,
+ ModuleId, TraitId,
db::DefDatabase,
expr_store::{ExpressionStore, path::Path},
find_path::{self, PrefixKind},
@@ -66,6 +67,7 @@ pub type Result<T = (), E = HirDisplayError> = std::result::Result<T, E>;
pub trait HirWrite: fmt::Write {
fn start_location_link(&mut self, _location: ModuleDefId) {}
+ fn start_location_link_generic(&mut self, _location: GenericParamId) {}
fn end_location_link(&mut self) {}
}
@@ -143,11 +145,15 @@ impl<'db> BoundsFormattingCtx<'db> {
}
impl<'db> HirFormatter<'_, 'db> {
- fn start_location_link(&mut self, location: ModuleDefId) {
+ pub fn start_location_link(&mut self, location: ModuleDefId) {
self.fmt.start_location_link(location);
}
- fn end_location_link(&mut self) {
+ pub fn start_location_link_generic(&mut self, location: GenericParamId) {
+ self.fmt.start_location_link_generic(location);
+ }
+
+ pub fn end_location_link(&mut self) {
self.fmt.end_location_link();
}
@@ -686,7 +692,9 @@ impl<'db> HirDisplay<'db> for Const<'db> {
ConstKind::Param(param) => {
let generics = generics(f.db, param.id.parent());
let param_data = &generics[param.id.local_id()];
+ f.start_location_link_generic(param.id.into());
write!(f, "{}", param_data.name().unwrap().display(f.db, f.edition()))?;
+ f.end_location_link();
Ok(())
}
ConstKind::Value(const_bytes) => render_const_scalar(
@@ -1383,37 +1391,30 @@ impl<'db> HirDisplay<'db> for Ty<'db> {
}
_ => (),
}
- let sig = substs
- .split_closure_args_untupled()
- .closure_sig_as_fn_ptr_ty
- .callable_sig(interner);
- if let Some(sig) = sig {
- let sig = sig.skip_binder();
- let InternedClosure(def, _) = db.lookup_intern_closure(id);
- let infer = InferenceResult::for_body(db, def);
- let (_, kind) = infer.closure_info(id);
- match f.closure_style {
- ClosureStyle::ImplFn => write!(f, "impl {kind:?}(")?,
- ClosureStyle::RANotation => write!(f, "|")?,
- _ => unreachable!(),
- }
- if sig.inputs().is_empty() {
- } else if f.should_truncate() {
- write!(f, "{TYPE_HINT_TRUNCATION}")?;
- } else {
- f.write_joined(sig.inputs(), ", ")?;
- };
- match f.closure_style {
- ClosureStyle::ImplFn => write!(f, ")")?,
- ClosureStyle::RANotation => write!(f, "|")?,
- _ => unreachable!(),
- }
- if f.closure_style == ClosureStyle::RANotation || !sig.output().is_unit() {
- write!(f, " -> ")?;
- sig.output().hir_fmt(f)?;
- }
+ let sig = interner.signature_unclosure(substs.as_closure().sig(), Safety::Safe);
+ let sig = sig.skip_binder();
+ let InternedClosure(def, _) = db.lookup_intern_closure(id);
+ let infer = InferenceResult::for_body(db, def);
+ let (_, kind) = infer.closure_info(id);
+ match f.closure_style {
+ ClosureStyle::ImplFn => write!(f, "impl {kind:?}(")?,
+ ClosureStyle::RANotation => write!(f, "|")?,
+ _ => unreachable!(),
+ }
+ if sig.inputs().is_empty() {
+ } else if f.should_truncate() {
+ write!(f, "{TYPE_HINT_TRUNCATION}")?;
} else {
- write!(f, "{{closure}}")?;
+ f.write_joined(sig.inputs(), ", ")?;
+ };
+ match f.closure_style {
+ ClosureStyle::ImplFn => write!(f, ")")?,
+ ClosureStyle::RANotation => write!(f, "|")?,
+ _ => unreachable!(),
+ }
+ if f.closure_style == ClosureStyle::RANotation || !sig.output().is_unit() {
+ write!(f, " -> ")?;
+ sig.output().hir_fmt(f)?;
}
}
TyKind::CoroutineClosure(id, args) => {
@@ -1496,6 +1497,7 @@ impl<'db> HirDisplay<'db> for Ty<'db> {
match param_data {
TypeOrConstParamData::TypeParamData(p) => match p.provenance {
TypeParamProvenance::TypeParamList | TypeParamProvenance::TraitSelf => {
+ f.start_location_link_generic(param.id.into());
write!(
f,
"{}",
@@ -1503,7 +1505,8 @@ impl<'db> HirDisplay<'db> for Ty<'db> {
.clone()
.unwrap_or_else(Name::missing)
.display(f.db, f.edition())
- )?
+ )?;
+ f.end_location_link();
}
TypeParamProvenance::ArgumentImplTrait => {
let bounds = GenericPredicates::query_all(f.db, param.id.parent())
@@ -1526,7 +1529,9 @@ impl<'db> HirDisplay<'db> for Ty<'db> {
}
},
TypeOrConstParamData::ConstParamData(p) => {
+ f.start_location_link_generic(param.id.into());
write!(f, "{}", p.name.display(f.db, f.edition()))?;
+ f.end_location_link();
}
}
}
@@ -1978,6 +1983,49 @@ fn write_bounds_like_dyn_trait<'db>(
Ok(())
}
+pub fn write_params_bounds<'db>(
+ f: &mut HirFormatter<'_, 'db>,
+ predicates: &[Clause<'db>],
+) -> Result {
+ // Use an FxIndexMap to keep user's order, as far as possible.
+ let mut per_type = FxIndexMap::<_, Vec<_>>::default();
+ for &predicate in predicates {
+ let base_ty = match predicate.kind().skip_binder() {
+ ClauseKind::Trait(clause) => Either::Left(clause.self_ty()),
+ ClauseKind::RegionOutlives(clause) => Either::Right(clause.0),
+ ClauseKind::TypeOutlives(clause) => Either::Left(clause.0),
+ ClauseKind::Projection(clause) => Either::Left(clause.self_ty()),
+ ClauseKind::ConstArgHasType(..)
+ | ClauseKind::WellFormed(_)
+ | ClauseKind::ConstEvaluatable(_)
+ | ClauseKind::HostEffect(..)
+ | ClauseKind::UnstableFeature(_) => continue,
+ };
+ per_type.entry(base_ty).or_default().push(predicate);
+ }
+
+ for (base_ty, clauses) in per_type {
+ f.write_str(" ")?;
+ match base_ty {
+ Either::Left(it) => it.hir_fmt(f)?,
+ Either::Right(it) => it.hir_fmt(f)?,
+ }
+ f.write_str(": ")?;
+ // Rudimentary approximation: type params are `Sized` by default, everything else not.
+ // FIXME: This is not correct, really. But I'm not sure how we can from the ty representation
+ // to extract the default sizedness, and if it's possible at all.
+ let default_sized = match base_ty {
+ Either::Left(ty) if matches!(ty.kind(), TyKind::Param(_)) => {
+ SizedByDefault::Sized { anchor: f.krate() }
+ }
+ _ => SizedByDefault::NotSized,
+ };
+ write_bounds_like_dyn_trait(f, base_ty, &clauses, default_sized)?;
+ f.write_str(",\n")?;
+ }
+ Ok(())
+}
+
impl<'db> HirDisplay<'db> for TraitRef<'db> {
fn hir_fmt(&self, f: &mut HirFormatter<'_, 'db>) -> Result {
let trait_ = self.def_id.0;
@@ -1995,7 +2043,9 @@ impl<'db> HirDisplay<'db> for Region<'db> {
RegionKind::ReEarlyParam(param) => {
let generics = generics(f.db, param.id.parent);
let param_data = &generics[param.id.local_id];
+ f.start_location_link_generic(param.id.into());
write!(f, "{}", param_data.name.display(f.db, f.edition()))?;
+ f.end_location_link();
Ok(())
}
RegionKind::ReBound(BoundVarIndexKind::Bound(db), idx) => {
diff --git a/crates/hir-ty/src/drop.rs b/crates/hir-ty/src/drop.rs
index 66692143bc..9d6869eee9 100644
--- a/crates/hir-ty/src/drop.rs
+++ b/crates/hir-ty/src/drop.rs
@@ -32,7 +32,7 @@ fn has_destructor(interner: DbInterner<'_>, adt: AdtId) -> bool {
},
None => TraitImpls::for_crate(db, module.krate(db)),
};
- !impls.for_trait_and_self_ty(drop_trait, &SimplifiedType::Adt(adt.into())).is_empty()
+ !impls.for_trait_and_self_ty(drop_trait, &SimplifiedType::Adt(adt.into())).0.is_empty()
}
#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord)]
diff --git a/crates/hir-ty/src/infer/closure.rs b/crates/hir-ty/src/infer/closure.rs
index 19ffa3a939..d1391ad24e 100644
--- a/crates/hir-ty/src/infer/closure.rs
+++ b/crates/hir-ty/src/infer/closure.rs
@@ -68,7 +68,6 @@ impl<'db> InferenceContext<'_, 'db> {
let ClosureSignatures { bound_sig, liberated_sig } =
self.sig_of_closure(arg_types, ret_type, expected_sig);
let body_ret_ty = bound_sig.output().skip_binder();
- let sig_ty = Ty::new_fn_ptr(interner, bound_sig);
let parent_args = GenericArgs::identity_for_item(interner, self.generic_def.into());
// FIXME: Make this an infer var and infer it later.
@@ -117,6 +116,16 @@ impl<'db> InferenceContext<'_, 'db> {
}
None => {}
};
+ let sig = bound_sig.map_bound(|sig| {
+ interner.mk_fn_sig(
+ [Ty::new_tup(interner, sig.inputs())],
+ sig.output(),
+ sig.c_variadic,
+ sig.safety,
+ sig.abi,
+ )
+ });
+ let sig_ty = Ty::new_fn_ptr(interner, sig);
// FIXME: Infer the kind later if needed.
let parts = ClosureArgsParts {
parent_args: parent_args.as_slice(),
diff --git a/crates/hir-ty/src/infer/closure/analysis.rs b/crates/hir-ty/src/infer/closure/analysis.rs
index 5b0360071d..b25901cc3b 100644
--- a/crates/hir-ty/src/infer/closure/analysis.rs
+++ b/crates/hir-ty/src/infer/closure/analysis.rs
@@ -1,10 +1,10 @@
//! Post-inference closure analysis: captures and closure kind.
-use std::{cmp, convert::Infallible, mem};
+use std::{cmp, mem};
-use either::Either;
+use base_db::Crate;
use hir_def::{
- DefWithBodyId, FieldId, HasModule, TupleFieldId, TupleId, VariantId,
+ DefWithBodyId, FieldId, HasModule, VariantId,
expr_store::path::Path,
hir::{
Array, AsmOperand, BinaryOp, BindingId, CaptureBy, Expr, ExprId, ExprOrPatId, Pat, PatId,
@@ -15,7 +15,7 @@ use hir_def::{
};
use rustc_ast_ir::Mutability;
use rustc_hash::{FxHashMap, FxHashSet};
-use rustc_type_ir::inherent::{IntoKind, Ty as _};
+use rustc_type_ir::inherent::{GenericArgs as _, IntoKind, Ty as _};
use smallvec::{SmallVec, smallvec};
use stdx::{format_to, never};
use syntax::utils::is_raw_identifier;
@@ -23,33 +23,97 @@ use syntax::utils::is_raw_identifier;
use crate::{
Adjust, Adjustment, BindingMode,
db::{HirDatabase, InternedClosure, InternedClosureId},
+ display::{DisplayTarget, HirDisplay as _},
infer::InferenceContext,
- mir::{BorrowKind, MirSpan, MutBorrowKind, ProjectionElem},
- next_solver::{DbInterner, GenericArgs, StoredEarlyBinder, StoredTy, Ty, TyKind},
+ mir::{BorrowKind, MirSpan, MutBorrowKind},
+ next_solver::{
+ DbInterner, ErrorGuaranteed, GenericArgs, ParamEnv, StoredEarlyBinder, StoredTy, Ty,
+ TyKind,
+ infer::{InferCtxt, traits::ObligationCause},
+ obligation_ctxt::ObligationCtxt,
+ },
traits::FnTrait,
};
// The below functions handle capture and closure kind (Fn, FnMut, ..)
+#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
+pub(crate) enum HirPlaceProjection {
+ Deref,
+ Field(FieldId),
+ TupleField(u32),
+}
+
+impl HirPlaceProjection {
+ fn projected_ty<'db>(
+ self,
+ infcx: &InferCtxt<'db>,
+ env: ParamEnv<'db>,
+ mut base: Ty<'db>,
+ krate: Crate,
+ ) -> Ty<'db> {
+ let interner = infcx.interner;
+ let db = interner.db;
+ if base.is_ty_error() {
+ return Ty::new_error(interner, ErrorGuaranteed);
+ }
+
+ if matches!(base.kind(), TyKind::Alias(..)) {
+ let mut ocx = ObligationCtxt::new(infcx);
+ match ocx.structurally_normalize_ty(&ObligationCause::dummy(), env, base) {
+ Ok(it) => base = it,
+ Err(_) => return Ty::new_error(interner, ErrorGuaranteed),
+ }
+ }
+ match self {
+ HirPlaceProjection::Deref => match base.kind() {
+ TyKind::RawPtr(inner, _) | TyKind::Ref(_, inner, _) => inner,
+ TyKind::Adt(adt_def, subst) if adt_def.is_box() => subst.type_at(0),
+ _ => {
+ never!(
+ "Overloaded deref on type {} is not a projection",
+ base.display(db, DisplayTarget::from_crate(db, krate))
+ );
+ Ty::new_error(interner, ErrorGuaranteed)
+ }
+ },
+ HirPlaceProjection::Field(f) => match base.kind() {
+ TyKind::Adt(_, subst) => {
+ db.field_types(f.parent)[f.local_id].get().instantiate(interner, subst)
+ }
+ ty => {
+ never!("Only adt has field, found {:?}", ty);
+ Ty::new_error(interner, ErrorGuaranteed)
+ }
+ },
+ HirPlaceProjection::TupleField(idx) => match base.kind() {
+ TyKind::Tuple(subst) => {
+ subst.as_slice().get(idx as usize).copied().unwrap_or_else(|| {
+ never!("Out of bound tuple field");
+ Ty::new_error(interner, ErrorGuaranteed)
+ })
+ }
+ ty => {
+ never!("Only tuple has tuple field: {:?}", ty);
+ Ty::new_error(interner, ErrorGuaranteed)
+ }
+ },
+ }
+ }
+}
+
#[derive(Debug, Clone, PartialEq, Eq, Hash, salsa::Update)]
pub(crate) struct HirPlace {
pub(crate) local: BindingId,
- pub(crate) projections: Vec<ProjectionElem<Infallible>>,
+ pub(crate) projections: Vec<HirPlaceProjection>,
}
impl HirPlace {
fn ty<'db>(&self, ctx: &mut InferenceContext<'_, 'db>) -> Ty<'db> {
+ let krate = ctx.krate();
let mut ty = ctx.table.resolve_completely(ctx.result.binding_ty(self.local));
for p in &self.projections {
- ty = p.projected_ty(
- &ctx.table.infer_ctxt,
- ctx.table.param_env,
- ty,
- |_, _, _| {
- unreachable!("Closure field only happens in MIR");
- },
- ctx.owner.module(ctx.db).krate(ctx.db),
- );
+ ty = p.projected_ty(ctx.infcx(), ctx.table.param_env, ty, krate);
}
ty
}
@@ -62,7 +126,7 @@ impl HirPlace {
if let CaptureKind::ByRef(BorrowKind::Mut {
kind: MutBorrowKind::Default | MutBorrowKind::TwoPhasedBorrow,
}) = current_capture
- && self.projections[len..].contains(&ProjectionElem::Deref)
+ && self.projections[len..].contains(&HirPlaceProjection::Deref)
{
current_capture =
CaptureKind::ByRef(BorrowKind::Mut { kind: MutBorrowKind::ClosureCapture });
@@ -98,12 +162,12 @@ impl CapturedItem {
/// Returns whether this place has any field (aka. non-deref) projections.
pub fn has_field_projections(&self) -> bool {
- self.place.projections.iter().any(|it| !matches!(it, ProjectionElem::Deref))
+ self.place.projections.iter().any(|it| !matches!(it, HirPlaceProjection::Deref))
}
pub fn ty<'db>(&self, db: &'db dyn HirDatabase, subst: GenericArgs<'db>) -> Ty<'db> {
let interner = DbInterner::new_no_crate(db);
- self.ty.get().instantiate(interner, subst.split_closure_args_untupled().parent_args)
+ self.ty.get().instantiate(interner, subst.as_closure().parent_args())
}
pub fn kind(&self) -> CaptureKind {
@@ -120,8 +184,8 @@ impl CapturedItem {
let mut result = body[self.place.local].name.as_str().to_owned();
for proj in &self.place.projections {
match proj {
- ProjectionElem::Deref => {}
- ProjectionElem::Field(Either::Left(f)) => {
+ HirPlaceProjection::Deref => {}
+ HirPlaceProjection::Field(f) => {
let variant_data = f.parent.fields(db);
match variant_data.shape {
FieldsShape::Record => {
@@ -138,14 +202,8 @@ impl CapturedItem {
FieldsShape::Unit => {}
}
}
- ProjectionElem::Field(Either::Right(f)) => format_to!(result, "_{}", f.index),
- &ProjectionElem::ClosureField(field) => format_to!(result, "_{field}"),
- ProjectionElem::Index(_)
- | ProjectionElem::ConstantIndex { .. }
- | ProjectionElem::Subslice { .. }
- | ProjectionElem::OpaqueCast(_) => {
- never!("Not happen in closure capture");
- continue;
+ HirPlaceProjection::TupleField(idx) => {
+ format_to!(result, "_{idx}")
}
}
}
@@ -163,8 +221,8 @@ impl CapturedItem {
for proj in &self.place.projections {
match proj {
// In source code autoderef kicks in.
- ProjectionElem::Deref => {}
- ProjectionElem::Field(Either::Left(f)) => {
+ HirPlaceProjection::Deref => {}
+ HirPlaceProjection::Field(f) => {
let variant_data = f.parent.fields(db);
match variant_data.shape {
FieldsShape::Record => format_to!(
@@ -184,19 +242,8 @@ impl CapturedItem {
FieldsShape::Unit => {}
}
}
- ProjectionElem::Field(Either::Right(f)) => {
- let field = f.index;
- format_to!(result, ".{field}");
- }
- &ProjectionElem::ClosureField(field) => {
- format_to!(result, ".{field}");
- }
- ProjectionElem::Index(_)
- | ProjectionElem::ConstantIndex { .. }
- | ProjectionElem::Subslice { .. }
- | ProjectionElem::OpaqueCast(_) => {
- never!("Not happen in closure capture");
- continue;
+ HirPlaceProjection::TupleField(idx) => {
+ format_to!(result, ".{idx}")
}
}
}
@@ -205,7 +252,7 @@ impl CapturedItem {
.projections
.iter()
.rev()
- .take_while(|proj| matches!(proj, ProjectionElem::Deref))
+ .take_while(|proj| matches!(proj, HirPlaceProjection::Deref))
.count();
result.insert_str(0, &"*".repeat(final_derefs_count));
result
@@ -219,11 +266,11 @@ impl CapturedItem {
let mut field_need_paren = false;
for proj in &self.place.projections {
match proj {
- ProjectionElem::Deref => {
+ HirPlaceProjection::Deref => {
result = format!("*{result}");
field_need_paren = true;
}
- ProjectionElem::Field(Either::Left(f)) => {
+ HirPlaceProjection::Field(f) => {
if field_need_paren {
result = format!("({result})");
}
@@ -243,28 +290,13 @@ impl CapturedItem {
result = format!("{result}.{field}");
field_need_paren = false;
}
- ProjectionElem::Field(Either::Right(f)) => {
- let field = f.index;
- if field_need_paren {
- result = format!("({result})");
- }
- result = format!("{result}.{field}");
- field_need_paren = false;
- }
- &ProjectionElem::ClosureField(field) => {
+ HirPlaceProjection::TupleField(idx) => {
if field_need_paren {
result = format!("({result})");
}
- result = format!("{result}.{field}");
+ result = format!("{result}.{idx}");
field_need_paren = false;
}
- ProjectionElem::Index(_)
- | ProjectionElem::ConstantIndex { .. }
- | ProjectionElem::Subslice { .. }
- | ProjectionElem::OpaqueCast(_) => {
- never!("Not happen in closure capture");
- continue;
- }
}
}
result
@@ -345,7 +377,9 @@ impl<'db> InferenceContext<'_, 'db> {
let mut place = self.place_of_expr(*expr)?;
let field = self.result.field_resolution(tgt_expr)?;
self.current_capture_span_stack.push(MirSpan::ExprId(tgt_expr));
- place.projections.push(ProjectionElem::Field(field));
+ place.projections.push(field.either(HirPlaceProjection::Field, |f| {
+ HirPlaceProjection::TupleField(f.index)
+ }));
return Some(place);
}
Expr::UnaryOp { expr, op: UnaryOp::Deref } => {
@@ -357,7 +391,7 @@ impl<'db> InferenceContext<'_, 'db> {
if is_builtin_deref {
let mut place = self.place_of_expr(*expr)?;
self.current_capture_span_stack.push(MirSpan::ExprId(tgt_expr));
- place.projections.push(ProjectionElem::Deref);
+ place.projections.push(HirPlaceProjection::Deref);
return Some(place);
}
}
@@ -832,9 +866,6 @@ impl<'db> InferenceContext<'_, 'db> {
&self.table.infer_ctxt,
self.table.param_env,
ty,
- |_, _, _| {
- unreachable!("Closure field only happens in MIR");
- },
self.owner.module(self.db).krate(self.db),
);
if ty.is_raw_ptr() || ty.is_union() {
@@ -853,7 +884,7 @@ impl<'db> InferenceContext<'_, 'db> {
let mut current_captures = std::mem::take(&mut self.current_captures);
for capture in &mut current_captures {
if let Some(first_deref) =
- capture.place.projections.iter().position(|proj| *proj == ProjectionElem::Deref)
+ capture.place.projections.iter().position(|proj| *proj == HirPlaceProjection::Deref)
{
self.truncate_capture_spans(capture, first_deref);
capture.place.projections.truncate(first_deref);
@@ -876,7 +907,7 @@ impl<'db> InferenceContext<'_, 'db> {
}
match it.next() {
Some(it) => {
- lookup_place.projections.push(it.clone());
+ lookup_place.projections.push(*it);
}
None => break None,
}
@@ -903,7 +934,7 @@ impl<'db> InferenceContext<'_, 'db> {
fn consume_with_pat(&mut self, mut place: HirPlace, tgt_pat: PatId) {
let adjustments_count =
self.result.pat_adjustments.get(&tgt_pat).map(|it| it.len()).unwrap_or_default();
- place.projections.extend((0..adjustments_count).map(|_| ProjectionElem::Deref));
+ place.projections.extend((0..adjustments_count).map(|_| HirPlaceProjection::Deref));
self.current_capture_span_stack
.extend((0..adjustments_count).map(|_| MirSpan::PatId(tgt_pat)));
'reset_span_stack: {
@@ -920,10 +951,7 @@ impl<'db> InferenceContext<'_, 'db> {
for (&arg, i) in it {
let mut p = place.clone();
self.current_capture_span_stack.push(MirSpan::PatId(arg));
- p.projections.push(ProjectionElem::Field(Either::Right(TupleFieldId {
- tuple: TupleId(!0), // dummy this, as its unused anyways
- index: i as u32,
- })));
+ p.projections.push(HirPlaceProjection::TupleField(i as u32));
self.consume_with_pat(p, arg);
self.current_capture_span_stack.pop();
}
@@ -950,10 +978,10 @@ impl<'db> InferenceContext<'_, 'db> {
};
let mut p = place.clone();
self.current_capture_span_stack.push(MirSpan::PatId(arg));
- p.projections.push(ProjectionElem::Field(Either::Left(FieldId {
+ p.projections.push(HirPlaceProjection::Field(FieldId {
parent: variant,
local_id,
- })));
+ }));
self.consume_with_pat(p, arg);
self.current_capture_span_stack.pop();
}
@@ -1005,10 +1033,10 @@ impl<'db> InferenceContext<'_, 'db> {
for (&arg, (i, _)) in it {
let mut p = place.clone();
self.current_capture_span_stack.push(MirSpan::PatId(arg));
- p.projections.push(ProjectionElem::Field(Either::Left(FieldId {
+ p.projections.push(HirPlaceProjection::Field(FieldId {
parent: variant,
local_id: i,
- })));
+ }));
self.consume_with_pat(p, arg);
self.current_capture_span_stack.pop();
}
@@ -1017,7 +1045,7 @@ impl<'db> InferenceContext<'_, 'db> {
}
Pat::Ref { pat, mutability: _ } => {
self.current_capture_span_stack.push(MirSpan::PatId(tgt_pat));
- place.projections.push(ProjectionElem::Deref);
+ place.projections.push(HirPlaceProjection::Deref);
self.consume_with_pat(place, *pat);
self.current_capture_span_stack.pop();
}
@@ -1071,7 +1099,7 @@ impl<'db> InferenceContext<'_, 'db> {
CaptureKind::ByRef(BorrowKind::Mut {
kind: MutBorrowKind::Default | MutBorrowKind::TwoPhasedBorrow
})
- ) && !item.place.projections.contains(&ProjectionElem::Deref)
+ ) && !item.place.projections.contains(&HirPlaceProjection::Deref)
{
// FIXME: remove the `mutated_bindings_in_closure` completely and add proper fake reads in
// MIR. I didn't do that due duplicate diagnostics.
@@ -1221,7 +1249,7 @@ fn apply_adjusts_to_place(
match &adj.kind {
Adjust::Deref(None) => {
current_capture_span_stack.push(span);
- r.projections.push(ProjectionElem::Deref);
+ r.projections.push(HirPlaceProjection::Deref);
}
_ => return None,
}
diff --git a/crates/hir-ty/src/infer/coerce.rs b/crates/hir-ty/src/infer/coerce.rs
index bb9cb1c1ca..e79868f4ae 100644
--- a/crates/hir-ty/src/infer/coerce.rs
+++ b/crates/hir-ty/src/infer/coerce.rs
@@ -46,7 +46,9 @@ use rustc_type_ir::{
BoundVar, DebruijnIndex, TyVid, TypeAndMut, TypeFoldable, TypeFolder, TypeSuperFoldable,
TypeVisitableExt,
error::TypeError,
- inherent::{Const as _, GenericArg as _, IntoKind, Safety, SliceLike, Ty as _},
+ inherent::{
+ Const as _, GenericArg as _, GenericArgs as _, IntoKind, Safety as _, SliceLike, Ty as _,
+ },
};
use smallvec::{SmallVec, smallvec};
use tracing::{debug, instrument};
@@ -54,7 +56,7 @@ use tracing::{debug, instrument};
use crate::{
Adjust, Adjustment, AutoBorrow, ParamEnvAndCrate, PointerCast, TargetFeatures,
autoderef::Autoderef,
- db::{HirDatabase, InternedClosureId},
+ db::{HirDatabase, InternedClosure, InternedClosureId},
infer::{
AllowTwoPhase, AutoBorrowMutability, InferenceContext, TypeMismatch, expr::ExprIsRead,
},
@@ -63,6 +65,7 @@ use crate::{
Canonical, ClauseKind, CoercePredicate, Const, ConstKind, DbInterner, ErrorGuaranteed,
GenericArgs, ParamEnv, PolyFnSig, PredicateKind, Region, RegionKind, TraitRef, Ty, TyKind,
TypingMode,
+ abi::Safety,
infer::{
DbInternerInferExt, InferCtxt, InferOk, InferResult,
relate::RelateResult,
@@ -71,6 +74,7 @@ use crate::{
},
obligation_ctxt::ObligationCtxt,
},
+ upvars::upvars_mentioned,
utils::TargetFeatureIsSafeInTarget,
};
@@ -893,7 +897,7 @@ where
fn coerce_closure_to_fn(
&mut self,
a: Ty<'db>,
- _closure_def_id_a: InternedClosureId,
+ closure_def_id_a: InternedClosureId,
args_a: GenericArgs<'db>,
b: Ty<'db>,
) -> CoerceResult<'db> {
@@ -901,19 +905,7 @@ where
debug_assert!(self.infcx().shallow_resolve(b) == b);
match b.kind() {
- // FIXME: We need to have an `upvars_mentioned()` query:
- // At this point we haven't done capture analysis, which means
- // that the ClosureArgs just contains an inference variable instead
- // of tuple of captured types.
- //
- // All we care here is if any variable is being captured and not the exact paths,
- // so we check `upvars_mentioned` for root variables being captured.
- TyKind::FnPtr(_, hdr) =>
- // if self
- // .db
- // .upvars_mentioned(closure_def_id_a.expect_local())
- // .is_none_or(|u| u.is_empty()) =>
- {
+ TyKind::FnPtr(_, hdr) if !is_capturing_closure(self.db(), closure_def_id_a) => {
// We coerce the closure, which has fn type
// `extern "rust-call" fn((arg0,arg1,...)) -> _`
// to
@@ -921,10 +913,8 @@ where
// or
// `unsafe fn(arg0,arg1,...) -> _`
let safety = hdr.safety;
- let closure_sig = args_a.closure_sig_untupled().map_bound(|mut sig| {
- sig.safety = hdr.safety;
- sig
- });
+ let closure_sig =
+ self.interner().signature_unclosure(args_a.as_closure().sig(), safety);
let pointer_ty = Ty::new_fn_ptr(self.interner(), closure_sig);
debug!("coerce_closure_to_fn(a={:?}, b={:?}, pty={:?})", a, b, pointer_ty);
self.unify_and(
@@ -1088,14 +1078,12 @@ impl<'db> InferenceContext<'_, 'db> {
// Special-case that coercion alone cannot handle:
// Function items or non-capturing closures of differing IDs or GenericArgs.
let (a_sig, b_sig) = {
- let is_capturing_closure = |_ty: Ty<'db>| {
- // FIXME:
- // if let TyKind::Closure(closure_def_id, _args) = ty.kind() {
- // self.db.upvars_mentioned(closure_def_id.expect_local()).is_some()
- // } else {
- // false
- // }
- false
+ let is_capturing_closure = |ty: Ty<'db>| {
+ if let TyKind::Closure(closure_def_id, _args) = ty.kind() {
+ is_capturing_closure(self.db, closure_def_id.0)
+ } else {
+ false
+ }
};
if is_capturing_closure(prev_ty) || is_capturing_closure(new_ty) {
(None, None)
@@ -1125,23 +1113,28 @@ impl<'db> InferenceContext<'_, 'db> {
}
(TyKind::Closure(_, args), TyKind::FnDef(..)) => {
let b_sig = new_ty.fn_sig(self.table.interner());
- let a_sig = args.closure_sig_untupled().map_bound(|mut sig| {
- sig.safety = b_sig.safety();
- sig
- });
+ let a_sig = self
+ .interner()
+ .signature_unclosure(args.as_closure().sig(), b_sig.safety());
(Some(a_sig), Some(b_sig))
}
(TyKind::FnDef(..), TyKind::Closure(_, args)) => {
let a_sig = prev_ty.fn_sig(self.table.interner());
- let b_sig = args.closure_sig_untupled().map_bound(|mut sig| {
- sig.safety = a_sig.safety();
- sig
- });
+ let b_sig = self
+ .interner()
+ .signature_unclosure(args.as_closure().sig(), a_sig.safety());
(Some(a_sig), Some(b_sig))
}
- (TyKind::Closure(_, args_a), TyKind::Closure(_, args_b)) => {
- (Some(args_a.closure_sig_untupled()), Some(args_b.closure_sig_untupled()))
- }
+ (TyKind::Closure(_, args_a), TyKind::Closure(_, args_b)) => (
+ Some(
+ self.interner()
+ .signature_unclosure(args_a.as_closure().sig(), Safety::Safe),
+ ),
+ Some(
+ self.interner()
+ .signature_unclosure(args_b.as_closure().sig(), Safety::Safe),
+ ),
+ ),
_ => (None, None),
}
}
@@ -1722,3 +1715,9 @@ fn coerce<'db>(
.collect();
Ok((adjustments, ty))
}
+
+fn is_capturing_closure(db: &dyn HirDatabase, closure: InternedClosureId) -> bool {
+ let InternedClosure(owner, expr) = closure.loc(db);
+ upvars_mentioned(db, owner)
+ .is_some_and(|upvars| upvars.get(&expr).is_some_and(|upvars| !upvars.is_empty()))
+}
diff --git a/crates/hir-ty/src/layout.rs b/crates/hir-ty/src/layout.rs
index b6ad3624ae..525100439f 100644
--- a/crates/hir-ty/src/layout.rs
+++ b/crates/hir-ty/src/layout.rs
@@ -14,7 +14,10 @@ use rustc_abi::{
TargetDataLayout, WrappingRange,
};
use rustc_index::IndexVec;
-use rustc_type_ir::{FloatTy, IntTy, UintTy, inherent::IntoKind};
+use rustc_type_ir::{
+ FloatTy, IntTy, UintTy,
+ inherent::{GenericArgs as _, IntoKind},
+};
use triomphe::Arc;
use crate::{
@@ -335,10 +338,7 @@ pub fn layout_of_ty_query(
let fields = captures
.iter()
.map(|it| {
- let ty = it
- .ty
- .get()
- .instantiate(interner, args.split_closure_args_untupled().parent_args);
+ let ty = it.ty.get().instantiate(interner, args.as_closure().parent_args());
db.layout_of_ty(ty.store(), trait_env.clone())
})
.collect::<Result<Vec<_>, _>>()?;
diff --git a/crates/hir-ty/src/lib.rs b/crates/hir-ty/src/lib.rs
index 1674771413..f8920904f0 100644
--- a/crates/hir-ty/src/lib.rs
+++ b/crates/hir-ty/src/lib.rs
@@ -25,6 +25,7 @@ extern crate ra_ap_rustc_next_trait_solver as rustc_next_trait_solver;
extern crate self as hir_ty;
+pub mod builtin_derive;
mod infer;
mod inhabitedness;
mod lower;
@@ -49,6 +50,7 @@ pub mod method_resolution;
pub mod mir;
pub mod primitive;
pub mod traits;
+pub mod upvars;
#[cfg(test)]
mod test_db;
@@ -228,6 +230,7 @@ pub enum FnAbi {
Win64,
Win64Unwind,
X86Interrupt,
+ RustPreserveNone,
Unknown,
}
@@ -269,6 +272,7 @@ impl FnAbi {
s if *s == sym::riscv_dash_interrupt_dash_s => FnAbi::RiscvInterruptS,
s if *s == sym::rust_dash_call => FnAbi::RustCall,
s if *s == sym::rust_dash_cold => FnAbi::RustCold,
+ s if *s == sym::rust_dash_preserve_dash_none => FnAbi::RustPreserveNone,
s if *s == sym::rust_dash_intrinsic => FnAbi::RustIntrinsic,
s if *s == sym::Rust => FnAbi::Rust,
s if *s == sym::stdcall_dash_unwind => FnAbi::StdcallUnwind,
@@ -312,6 +316,7 @@ impl FnAbi {
FnAbi::Rust => "Rust",
FnAbi::RustCall => "rust-call",
FnAbi::RustCold => "rust-cold",
+ FnAbi::RustPreserveNone => "rust-preserve-none",
FnAbi::RustIntrinsic => "rust-intrinsic",
FnAbi::Stdcall => "stdcall",
FnAbi::StdcallUnwind => "stdcall-unwind",
diff --git a/crates/hir-ty/src/lower.rs b/crates/hir-ty/src/lower.rs
index 62a5837f34..8e1ea9c478 100644
--- a/crates/hir-ty/src/lower.rs
+++ b/crates/hir-ty/src/lower.rs
@@ -27,8 +27,8 @@ use hir_def::{
resolver::{HasResolver, LifetimeNs, Resolver, TypeNs, ValueNs},
signatures::{FunctionSignature, TraitFlags, TypeAliasFlags},
type_ref::{
- ConstRef, LifetimeRefId, LiteralConstRef, PathId, TraitBoundModifier,
- TraitRef as HirTraitRef, TypeBound, TypeRef, TypeRefId,
+ ConstRef, LifetimeRefId, PathId, TraitBoundModifier, TraitRef as HirTraitRef, TypeBound,
+ TypeRef, TypeRefId,
},
};
use hir_expand::name::Name;
@@ -281,21 +281,9 @@ impl<'db, 'a> TyLoweringContext<'db, 'a> {
hir_def::hir::Expr::Path(path) => {
self.path_to_const(path).unwrap_or_else(|| unknown_const(const_type))
}
- hir_def::hir::Expr::Literal(literal) => intern_const_ref(
- self.db,
- &match *literal {
- hir_def::hir::Literal::Float(_, _)
- | hir_def::hir::Literal::String(_)
- | hir_def::hir::Literal::ByteString(_)
- | hir_def::hir::Literal::CString(_) => LiteralConstRef::Unknown,
- hir_def::hir::Literal::Char(c) => LiteralConstRef::Char(c),
- hir_def::hir::Literal::Bool(b) => LiteralConstRef::Bool(b),
- hir_def::hir::Literal::Int(val, _) => LiteralConstRef::Int(val),
- hir_def::hir::Literal::Uint(val, _) => LiteralConstRef::UInt(val),
- },
- const_type,
- self.resolver.krate(),
- ),
+ hir_def::hir::Expr::Literal(literal) => {
+ intern_const_ref(self.db, literal, const_type, self.resolver.krate())
+ }
hir_def::hir::Expr::UnaryOp { expr: inner_expr, op: hir_def::hir::UnaryOp::Neg } => {
if let hir_def::hir::Expr::Literal(literal) = &self.store[*inner_expr] {
// Only handle negation for signed integers and floats
@@ -304,7 +292,7 @@ impl<'db, 'a> TyLoweringContext<'db, 'a> {
if let Some(negated_literal) = literal.clone().negate() {
intern_const_ref(
self.db,
- &negated_literal.into(),
+ &negated_literal,
const_type,
self.resolver.krate(),
)
@@ -862,7 +850,7 @@ impl<'db, 'a> TyLoweringContext<'db, 'a> {
TermKind::Ty(ty) => {
ty.walk().any(|arg| arg == dummy_self_ty.into())
}
- // FIXME(associated_const_equality): We should walk the const instead of not doing anything
+ // FIXME(mgca): We should walk the const instead of not doing anything
TermKind::Const(_) => false,
};
@@ -1319,7 +1307,7 @@ fn type_for_struct_constructor(
db: &dyn HirDatabase,
def: StructId,
) -> Option<StoredEarlyBinder<StoredTy>> {
- let struct_data = def.fields(db);
+ let struct_data = db.struct_signature(def);
match struct_data.shape {
FieldsShape::Record => None,
FieldsShape::Unit => Some(type_for_adt(db, def.into())),
@@ -1791,6 +1779,13 @@ impl<'db> GenericPredicates {
impl GenericPredicates {
#[inline]
+ pub(crate) fn from_explicit_own_predicates(
+ predicates: StoredEarlyBinder<StoredClauses>,
+ ) -> Self {
+ Self { predicates, own_predicates_start: 0, is_trait: false, parent_is_trait: false }
+ }
+
+ #[inline]
pub fn query(db: &dyn HirDatabase, def: GenericDefId) -> &GenericPredicates {
&Self::query_with_diagnostics(db, def).0
}
@@ -1848,6 +1843,20 @@ pub(crate) fn trait_environment_for_body_query(
db.trait_environment(def)
}
+pub(crate) fn param_env_from_predicates<'db>(
+ interner: DbInterner<'db>,
+ predicates: &'db GenericPredicates,
+) -> ParamEnv<'db> {
+ let clauses = rustc_type_ir::elaborate::elaborate(
+ interner,
+ predicates.all_predicates().iter_identity_copied(),
+ );
+ let clauses = Clauses::new_from_iter(interner, clauses);
+
+ // FIXME: We should normalize projections here, like rustc does.
+ ParamEnv { clauses }
+}
+
pub(crate) fn trait_environment<'db>(db: &'db dyn HirDatabase, def: GenericDefId) -> ParamEnv<'db> {
return ParamEnv { clauses: trait_environment_query(db, def).as_ref() };
@@ -1858,13 +1867,8 @@ pub(crate) fn trait_environment<'db>(db: &'db dyn HirDatabase, def: GenericDefId
) -> StoredClauses {
let module = def.module(db);
let interner = DbInterner::new_with(db, module.krate(db));
- let predicates = GenericPredicates::query_all(db, def);
- let clauses =
- rustc_type_ir::elaborate::elaborate(interner, predicates.iter_identity_copied());
- let clauses = Clauses::new_from_iter(interner, clauses);
-
- // FIXME: We should normalize projections here, like rustc does.
- clauses.store()
+ let predicates = GenericPredicates::query(db, def);
+ param_env_from_predicates(interner, predicates).clauses.store()
}
}
diff --git a/crates/hir-ty/src/method_resolution.rs b/crates/hir-ty/src/method_resolution.rs
index c370330a87..e4681b464f 100644
--- a/crates/hir-ty/src/method_resolution.rs
+++ b/crates/hir-ty/src/method_resolution.rs
@@ -13,11 +13,13 @@ use tracing::{debug, instrument};
use base_db::Crate;
use hir_def::{
- AssocItemId, BlockId, ConstId, FunctionId, GenericParamId, HasModule, ImplId, ItemContainerId,
- ModuleId, TraitId,
+ AssocItemId, BlockId, BuiltinDeriveImplId, ConstId, FunctionId, GenericParamId, HasModule,
+ ImplId, ItemContainerId, ModuleId, TraitId,
attrs::AttrFlags,
+ builtin_derive::BuiltinDeriveImplMethod,
expr_store::path::GenericArgs as HirGenericArgs,
hir::ExprId,
+ lang_item::LangItems,
nameres::{DefMap, block_def_map, crate_def_map},
resolver::Resolver,
};
@@ -37,7 +39,7 @@ use crate::{
infer::{InferenceContext, unify::InferenceTable},
lower::GenericPredicates,
next_solver::{
- Binder, ClauseKind, DbInterner, FnSig, GenericArgs, ParamEnv, PredicateKind,
+ AnyImplId, Binder, ClauseKind, DbInterner, FnSig, GenericArgs, ParamEnv, PredicateKind,
SimplifiedType, SolverDefId, TraitRef, Ty, TyKind, TypingMode,
infer::{
BoundRegionConversionTime, DbInternerInferExt, InferCtxt, InferOk,
@@ -132,7 +134,7 @@ pub enum MethodError<'db> {
// candidate can arise. Used for error reporting only.
#[derive(Copy, Clone, Debug, Eq, PartialEq)]
pub enum CandidateSource {
- Impl(ImplId),
+ Impl(AnyImplId),
Trait(TraitId),
}
@@ -371,9 +373,13 @@ pub fn lookup_impl_const<'db>(
};
lookup_impl_assoc_item_for_trait_ref(infcx, trait_ref, env, name)
- .and_then(
- |assoc| if let (AssocItemId::ConstId(id), s) = assoc { Some((id, s)) } else { None },
- )
+ .and_then(|assoc| {
+ if let (Either::Left(AssocItemId::ConstId(id)), s) = assoc {
+ Some((id, s))
+ } else {
+ None
+ }
+ })
.unwrap_or((const_id, subs))
}
@@ -419,12 +425,12 @@ pub(crate) fn lookup_impl_method_query<'db>(
env: ParamEnvAndCrate<'db>,
func: FunctionId,
fn_subst: GenericArgs<'db>,
-) -> (FunctionId, GenericArgs<'db>) {
+) -> (Either<FunctionId, (BuiltinDeriveImplId, BuiltinDeriveImplMethod)>, GenericArgs<'db>) {
let interner = DbInterner::new_with(db, env.krate);
let infcx = interner.infer_ctxt().build(TypingMode::PostAnalysis);
let ItemContainerId::TraitId(trait_id) = func.loc(db).container else {
- return (func, fn_subst);
+ return (Either::Left(func), fn_subst);
};
let trait_params = db.generic_params(trait_id.into()).len();
let trait_ref = TraitRef::new_from_args(
@@ -434,16 +440,19 @@ pub(crate) fn lookup_impl_method_query<'db>(
);
let name = &db.function_signature(func).name;
- let Some((impl_fn, impl_subst)) = lookup_impl_assoc_item_for_trait_ref(
- &infcx,
- trait_ref,
- env.param_env,
- name,
- )
- .and_then(|assoc| {
- if let (AssocItemId::FunctionId(id), subst) = assoc { Some((id, subst)) } else { None }
- }) else {
- return (func, fn_subst);
+ let Some((impl_fn, impl_subst)) =
+ lookup_impl_assoc_item_for_trait_ref(&infcx, trait_ref, env.param_env, name).and_then(
+ |(assoc, impl_args)| {
+ let assoc = match assoc {
+ Either::Left(AssocItemId::FunctionId(id)) => Either::Left(id),
+ Either::Right(it) => Either::Right(it),
+ _ => return None,
+ };
+ Some((assoc, impl_args))
+ },
+ )
+ else {
+ return (Either::Left(func), fn_subst);
};
(
@@ -460,22 +469,33 @@ fn lookup_impl_assoc_item_for_trait_ref<'db>(
trait_ref: TraitRef<'db>,
env: ParamEnv<'db>,
name: &Name,
-) -> Option<(AssocItemId, GenericArgs<'db>)> {
+) -> Option<(Either<AssocItemId, (BuiltinDeriveImplId, BuiltinDeriveImplMethod)>, GenericArgs<'db>)>
+{
let (impl_id, impl_subst) = find_matching_impl(infcx, env, trait_ref)?;
+ let impl_id = match impl_id {
+ AnyImplId::ImplId(it) => it,
+ AnyImplId::BuiltinDeriveImplId(impl_) => {
+ return impl_
+ .loc(infcx.interner.db)
+ .trait_
+ .get_method(name.symbol())
+ .map(|method| (Either::Right((impl_, method)), impl_subst));
+ }
+ };
let item =
impl_id.impl_items(infcx.interner.db).items.iter().find_map(|(n, it)| match *it {
AssocItemId::FunctionId(f) => (n == name).then_some(AssocItemId::FunctionId(f)),
AssocItemId::ConstId(c) => (n == name).then_some(AssocItemId::ConstId(c)),
AssocItemId::TypeAliasId(_) => None,
})?;
- Some((item, impl_subst))
+ Some((Either::Left(item), impl_subst))
}
pub(crate) fn find_matching_impl<'db>(
infcx: &InferCtxt<'db>,
env: ParamEnv<'db>,
trait_ref: TraitRef<'db>,
-) -> Option<(ImplId, GenericArgs<'db>)> {
+) -> Option<(AnyImplId, GenericArgs<'db>)> {
let trait_ref = infcx.at(&ObligationCause::dummy(), env).deeply_normalize(trait_ref).ok()?;
let obligation = Obligation::new(infcx.interner, ObligationCause::dummy(), env, trait_ref);
@@ -589,12 +609,7 @@ impl InherentImpls {
map: &mut FxHashMap<SimplifiedType, Vec<ImplId>>,
) {
for (_module_id, module_data) in def_map.modules() {
- for impl_id in module_data.scope.impls() {
- let data = db.impl_signature(impl_id);
- if data.target_trait.is_some() {
- continue;
- }
-
+ for impl_id in module_data.scope.inherent_impls() {
let interner = DbInterner::new_no_crate(db);
let self_ty = db.impl_self_ty(impl_id);
let self_ty = self_ty.instantiate_identity();
@@ -635,13 +650,13 @@ impl InherentImpls {
#[derive(Debug, PartialEq)]
struct OneTraitImpls {
- non_blanket_impls: FxHashMap<SimplifiedType, Box<[ImplId]>>,
+ non_blanket_impls: FxHashMap<SimplifiedType, (Box<[ImplId]>, Box<[BuiltinDeriveImplId]>)>,
blanket_impls: Box<[ImplId]>,
}
#[derive(Default)]
struct OneTraitImplsBuilder {
- non_blanket_impls: FxHashMap<SimplifiedType, Vec<ImplId>>,
+ non_blanket_impls: FxHashMap<SimplifiedType, (Vec<ImplId>, Vec<BuiltinDeriveImplId>)>,
blanket_impls: Vec<ImplId>,
}
@@ -650,7 +665,9 @@ impl OneTraitImplsBuilder {
let mut non_blanket_impls = self
.non_blanket_impls
.into_iter()
- .map(|(self_ty, impls)| (self_ty, impls.into_boxed_slice()))
+ .map(|(self_ty, (impls, builtin_derive_impls))| {
+ (self_ty, (impls.into_boxed_slice(), builtin_derive_impls.into_boxed_slice()))
+ })
.collect::<FxHashMap<_, _>>();
non_blanket_impls.shrink_to_fit();
let blanket_impls = self.blanket_impls.into_boxed_slice();
@@ -691,8 +708,9 @@ impl TraitImpls {
impl TraitImpls {
fn collect_def_map(db: &dyn HirDatabase, def_map: &DefMap) -> Self {
+ let lang_items = hir_def::lang_item::lang_items(db, def_map.krate());
let mut map = FxHashMap::default();
- collect(db, def_map, &mut map);
+ collect(db, def_map, lang_items, &mut map);
let mut map = map
.into_iter()
.map(|(trait_id, trait_map)| (trait_id, trait_map.finish()))
@@ -703,10 +721,15 @@ impl TraitImpls {
fn collect(
db: &dyn HirDatabase,
def_map: &DefMap,
+ lang_items: &LangItems,
map: &mut FxHashMap<TraitId, OneTraitImplsBuilder>,
) {
for (_module_id, module_data) in def_map.modules() {
- for impl_id in module_data.scope.impls() {
+ for impl_id in module_data.scope.trait_impls() {
+ let trait_ref = match db.impl_trait(impl_id) {
+ Some(tr) => tr.instantiate_identity(),
+ None => continue,
+ };
// Reservation impls should be ignored during trait resolution, so we never need
// them during type analysis. See rust-lang/rust#64631 for details.
//
@@ -718,27 +741,34 @@ impl TraitImpls {
{
continue;
}
- let trait_ref = match db.impl_trait(impl_id) {
- Some(tr) => tr.instantiate_identity(),
- None => continue,
- };
let self_ty = trait_ref.self_ty();
let interner = DbInterner::new_no_crate(db);
let entry = map.entry(trait_ref.def_id.0).or_default();
match simplify_type(interner, self_ty, TreatParams::InstantiateWithInfer) {
Some(self_ty) => {
- entry.non_blanket_impls.entry(self_ty).or_default().push(impl_id)
+ entry.non_blanket_impls.entry(self_ty).or_default().0.push(impl_id)
}
None => entry.blanket_impls.push(impl_id),
}
}
+ for impl_id in module_data.scope.builtin_derive_impls() {
+ let loc = impl_id.loc(db);
+ let Some(trait_id) = loc.trait_.get_id(lang_items) else { continue };
+ let entry = map.entry(trait_id).or_default();
+ let entry = entry
+ .non_blanket_impls
+ .entry(SimplifiedType::Adt(loc.adt.into()))
+ .or_default();
+ entry.1.push(impl_id);
+ }
+
// To better support custom derives, collect impls in all unnamed const items.
// const _: () = { ... };
for konst in module_data.scope.unnamed_consts() {
let body = db.body(konst.into());
for (_, block_def_map) in body.blocks(db) {
- collect(db, block_def_map, map);
+ collect(db, block_def_map, lang_items, map);
}
}
}
@@ -761,27 +791,41 @@ impl TraitImpls {
})
}
- pub fn for_trait_and_self_ty(&self, trait_: TraitId, self_ty: &SimplifiedType) -> &[ImplId] {
+ pub fn for_trait_and_self_ty(
+ &self,
+ trait_: TraitId,
+ self_ty: &SimplifiedType,
+ ) -> (&[ImplId], &[BuiltinDeriveImplId]) {
self.map
.get(&trait_)
.and_then(|map| map.non_blanket_impls.get(self_ty))
- .map(|it| &**it)
+ .map(|it| (&*it.0, &*it.1))
.unwrap_or_default()
}
- pub fn for_trait(&self, trait_: TraitId, mut callback: impl FnMut(&[ImplId])) {
+ pub fn for_trait(
+ &self,
+ trait_: TraitId,
+ mut callback: impl FnMut(Either<&[ImplId], &[BuiltinDeriveImplId]>),
+ ) {
if let Some(impls) = self.map.get(&trait_) {
- callback(&impls.blanket_impls);
+ callback(Either::Left(&impls.blanket_impls));
for impls in impls.non_blanket_impls.values() {
- callback(impls);
+ callback(Either::Left(&impls.0));
+ callback(Either::Right(&impls.1));
}
}
}
- pub fn for_self_ty(&self, self_ty: &SimplifiedType, mut callback: impl FnMut(&[ImplId])) {
+ pub fn for_self_ty(
+ &self,
+ self_ty: &SimplifiedType,
+ mut callback: impl FnMut(Either<&[ImplId], &[BuiltinDeriveImplId]>),
+ ) {
for for_trait in self.map.values() {
if let Some(for_ty) = for_trait.non_blanket_impls.get(self_ty) {
- callback(for_ty);
+ callback(Either::Left(&for_ty.0));
+ callback(Either::Right(&for_ty.1));
}
}
}
diff --git a/crates/hir-ty/src/method_resolution/probe.rs b/crates/hir-ty/src/method_resolution/probe.rs
index cb9b810686..4a7c7d9353 100644
--- a/crates/hir-ty/src/method_resolution/probe.rs
+++ b/crates/hir-ty/src/method_resolution/probe.rs
@@ -1001,7 +1001,7 @@ impl<'a, 'db, Choice: ProbeChoice<'db>> ProbeContext<'a, 'db, Choice> {
self.with_impl_item(impl_def_id, |this, item| {
if !this.has_applicable_self(item) {
// No receiver declared. Not a candidate.
- this.record_static_candidate(CandidateSource::Impl(impl_def_id));
+ this.record_static_candidate(CandidateSource::Impl(impl_def_id.into()));
return;
}
this.push_candidate(
@@ -1490,7 +1490,7 @@ impl<'a, 'db, Choice: ProbeChoice<'db>> ProbeContext<'a, 'db, Choice> {
/// so do not use to make a decision that may lead to a successful compilation.
fn candidate_source(&self, candidate: &Candidate<'db>, self_ty: Ty<'db>) -> CandidateSource {
match candidate.kind {
- InherentImplCandidate { impl_def_id, .. } => CandidateSource::Impl(impl_def_id),
+ InherentImplCandidate { impl_def_id, .. } => CandidateSource::Impl(impl_def_id.into()),
ObjectCandidate(trait_ref) | WhereClauseCandidate(trait_ref) => {
CandidateSource::Trait(trait_ref.def_id().0)
}
@@ -1524,7 +1524,7 @@ impl<'a, 'db, Choice: ProbeChoice<'db>> ProbeContext<'a, 'db, Choice> {
fn candidate_source_from_pick(&self, pick: &Pick<'db>) -> CandidateSource {
match pick.kind {
- InherentImplPick(impl_) => CandidateSource::Impl(impl_),
+ InherentImplPick(impl_) => CandidateSource::Impl(impl_.into()),
ObjectPick(trait_) | TraitPick(trait_) => CandidateSource::Trait(trait_),
WhereClausePick(trait_ref) => CandidateSource::Trait(trait_ref.skip_binder().def_id.0),
}
diff --git a/crates/hir-ty/src/mir/borrowck.rs b/crates/hir-ty/src/mir/borrowck.rs
index 941b6c75bf..dece61a57d 100644
--- a/crates/hir-ty/src/mir/borrowck.rs
+++ b/crates/hir-ty/src/mir/borrowck.rs
@@ -8,6 +8,7 @@ use std::iter;
use hir_def::{DefWithBodyId, HasModule};
use la_arena::ArenaMap;
use rustc_hash::FxHashMap;
+use rustc_type_ir::inherent::GenericArgs as _;
use stdx::never;
use triomphe::Arc;
@@ -123,7 +124,7 @@ fn make_fetch_closure_field<'db>(
let InternedClosure(def, _) = db.lookup_intern_closure(c);
let infer = InferenceResult::for_body(db, def);
let (captures, _) = infer.closure_info(c);
- let parent_subst = subst.split_closure_args_untupled().parent_args;
+ let parent_subst = subst.as_closure().parent_args();
let interner = DbInterner::new_no_crate(db);
captures.get(f).expect("broken closure field").ty.get().instantiate(interner, parent_subst)
}
diff --git a/crates/hir-ty/src/mir/eval.rs b/crates/hir-ty/src/mir/eval.rs
index c7156bb11e..5de08313f4 100644
--- a/crates/hir-ty/src/mir/eval.rs
+++ b/crates/hir-ty/src/mir/eval.rs
@@ -27,7 +27,7 @@ use rustc_ast_ir::Mutability;
use rustc_hash::{FxHashMap, FxHashSet};
use rustc_type_ir::{
AliasTyKind,
- inherent::{AdtDef, IntoKind, Region as _, SliceLike, Ty as _},
+ inherent::{AdtDef, GenericArgs as _, IntoKind, Region as _, SliceLike, Ty as _},
};
use span::FileId;
use stdx::never;
@@ -77,12 +77,14 @@ macro_rules! from_bytes {
}).into())
};
}
+use from_bytes;
macro_rules! not_supported {
($it: expr) => {
- return Err(MirEvalError::NotSupported(format!($it)))
+ return Err($crate::mir::eval::MirEvalError::NotSupported(format!($it)))
};
}
+use not_supported;
#[derive(Debug, Default, Clone, PartialEq, Eq, GenericTypeVisitable)]
pub struct VTableMap<'db> {
@@ -731,7 +733,7 @@ impl<'db> Evaluator<'db> {
let InternedClosure(def, _) = self.db.lookup_intern_closure(c);
let infer = InferenceResult::for_body(self.db, def);
let (captures, _) = infer.closure_info(c);
- let parent_subst = subst.split_closure_args_untupled().parent_args;
+ let parent_subst = subst.as_closure().parent_args();
captures
.get(f)
.expect("broken closure field")
@@ -2622,6 +2624,9 @@ impl<'db> Evaluator<'db> {
def,
generic_args,
);
+ let Either::Left(imp) = imp else {
+ not_supported!("evaluating builtin derive impls is not supported")
+ };
let mir_body = self
.db
@@ -2771,7 +2776,7 @@ impl<'db> Evaluator<'db> {
TyKind::Closure(closure, subst) => self.exec_closure(
closure.0,
func_data,
- GenericArgs::new_from_slice(subst.split_closure_args_untupled().parent_args),
+ GenericArgs::new_from_slice(subst.as_closure().parent_args()),
destination,
&args[1..],
locals,
diff --git a/crates/hir-ty/src/mir/eval/shim.rs b/crates/hir-ty/src/mir/eval/shim.rs
index a47a8c4400..76c8701ea2 100644
--- a/crates/hir-ty/src/mir/eval/shim.rs
+++ b/crates/hir-ty/src/mir/eval/shim.rs
@@ -16,29 +16,14 @@ use crate::{
mir::eval::{
Address, AdtId, Arc, Evaluator, FunctionId, GenericArgs, HasModule, HirDisplay,
InternedClosure, Interval, IntervalAndTy, IntervalOrOwned, ItemContainerId, Layout, Locals,
- Lookup, MirEvalError, MirSpan, Mutability, Result, Ty, TyKind, pad16,
+ Lookup, MirEvalError, MirSpan, Mutability, Result, Ty, TyKind, from_bytes, not_supported,
+ pad16,
},
next_solver::Region,
};
mod simd;
-macro_rules! from_bytes {
- ($ty:tt, $value:expr) => {
- ($ty::from_le_bytes(match ($value).try_into() {
- Ok(it) => it,
- #[allow(unreachable_patterns)]
- Err(_) => return Err(MirEvalError::InternalError("mismatched size".into())),
- }))
- };
-}
-
-macro_rules! not_supported {
- ($it: expr) => {
- return Err(MirEvalError::NotSupported(format!($it)))
- };
-}
-
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
enum EvalLangItem {
BeginPanic,
diff --git a/crates/hir-ty/src/mir/eval/shim/simd.rs b/crates/hir-ty/src/mir/eval/shim/simd.rs
index 3896917cab..e0b3e571b8 100644
--- a/crates/hir-ty/src/mir/eval/shim/simd.rs
+++ b/crates/hir-ty/src/mir/eval/shim/simd.rs
@@ -6,21 +6,6 @@ use crate::consteval::try_const_usize;
use super::*;
-macro_rules! from_bytes {
- ($ty:tt, $value:expr) => {
- ($ty::from_le_bytes(match ($value).try_into() {
- Ok(it) => it,
- Err(_) => return Err(MirEvalError::InternalError("mismatched size".into())),
- }))
- };
-}
-
-macro_rules! not_supported {
- ($it: expr) => {
- return Err(MirEvalError::NotSupported(format!($it)))
- };
-}
-
impl<'db> Evaluator<'db> {
fn detect_simd_ty(&self, ty: Ty<'db>) -> Result<'db, (usize, Ty<'db>)> {
match ty.kind() {
diff --git a/crates/hir-ty/src/mir/lower.rs b/crates/hir-ty/src/mir/lower.rs
index e8d42bed9f..1579f00e92 100644
--- a/crates/hir-ty/src/mir/lower.rs
+++ b/crates/hir-ty/src/mir/lower.rs
@@ -19,7 +19,7 @@ use hir_expand::name::Name;
use la_arena::ArenaMap;
use rustc_apfloat::Float;
use rustc_hash::FxHashMap;
-use rustc_type_ir::inherent::{Const as _, IntoKind, Ty as _};
+use rustc_type_ir::inherent::{Const as _, GenericArgs as _, IntoKind, Ty as _};
use span::{Edition, FileId};
use syntax::TextRange;
use triomphe::Arc;
@@ -30,7 +30,10 @@ use crate::{
db::{HirDatabase, InternedClosure, InternedClosureId},
display::{DisplayTarget, HirDisplay, hir_display_with_store},
generics::generics,
- infer::{CaptureKind, CapturedItem, TypeMismatch, cast::CastTy},
+ infer::{
+ CaptureKind, CapturedItem, TypeMismatch, cast::CastTy,
+ closure::analysis::HirPlaceProjection,
+ },
inhabitedness::is_ty_uninhabited_from,
layout::LayoutError,
method_resolution::CandidateId,
@@ -44,6 +47,7 @@ use crate::{
next_solver::{
Const, DbInterner, ParamConst, ParamEnv, Region, StoredGenericArgs, StoredTy, TyKind,
TypingMode, UnevaluatedConst,
+ abi::Safety,
infer::{DbInternerInferExt, InferCtxt},
},
traits::FnTrait,
@@ -1257,22 +1261,16 @@ impl<'a, 'db> MirLowerCtx<'a, 'db> {
.clone()
.into_iter()
.map(|it| match it {
- ProjectionElem::Deref => ProjectionElem::Deref,
- ProjectionElem::Field(it) => ProjectionElem::Field(it),
- ProjectionElem::ClosureField(it) => {
- ProjectionElem::ClosureField(it)
- }
- ProjectionElem::ConstantIndex { offset, from_end } => {
- ProjectionElem::ConstantIndex { offset, from_end }
- }
- ProjectionElem::Subslice { from, to } => {
- ProjectionElem::Subslice { from, to }
+ HirPlaceProjection::Deref => ProjectionElem::Deref,
+ HirPlaceProjection::Field(field_id) => {
+ ProjectionElem::Field(Either::Left(field_id))
}
- ProjectionElem::OpaqueCast(it) => {
- ProjectionElem::OpaqueCast(it)
+ HirPlaceProjection::TupleField(idx) => {
+ ProjectionElem::Field(Either::Right(TupleFieldId {
+ tuple: TupleId(!0), // Dummy as it's unused
+ index: idx,
+ }))
}
- #[allow(unreachable_patterns)]
- ProjectionElem::Index(it) => match it {},
})
.collect(),
),
@@ -2138,11 +2136,7 @@ pub fn mir_body_for_closure_query<'db>(
.store(),
});
ctx.result.param_locals.push(closure_local);
- let Some(sig) =
- substs.split_closure_args_untupled().closure_sig_as_fn_ptr_ty.callable_sig(ctx.interner())
- else {
- implementation_error!("closure has not callable sig");
- };
+ let sig = ctx.interner().signature_unclosure(substs.as_closure().sig(), Safety::Safe);
let resolver_guard = ctx.resolver.update_to_inner_scope(db, owner, expr);
let current = ctx.lower_params_and_bindings(
args.iter().zip(sig.skip_binder().inputs().iter()).map(|(it, y)| (*it, *y)),
@@ -2176,10 +2170,13 @@ pub fn mir_body_for_closure_query<'db>(
for (it, y) in p.projection.lookup(store).iter().zip(it.0.place.projections.iter())
{
match (it, y) {
- (ProjectionElem::Deref, ProjectionElem::Deref) => (),
- (ProjectionElem::Field(it), ProjectionElem::Field(y)) if it == y => (),
- (ProjectionElem::ClosureField(it), ProjectionElem::ClosureField(y))
+ (ProjectionElem::Deref, HirPlaceProjection::Deref) => (),
+ (ProjectionElem::Field(Either::Left(it)), HirPlaceProjection::Field(y))
if it == y => {}
+ (
+ ProjectionElem::Field(Either::Right(it)),
+ HirPlaceProjection::TupleField(y),
+ ) if it.index == *y => (),
_ => return false,
}
}
diff --git a/crates/hir-ty/src/next_solver/def_id.rs b/crates/hir-ty/src/next_solver/def_id.rs
index b6167b4a09..aa6caefc4a 100644
--- a/crates/hir-ty/src/next_solver/def_id.rs
+++ b/crates/hir-ty/src/next_solver/def_id.rs
@@ -1,9 +1,9 @@
//! Definition of `SolverDefId`
use hir_def::{
- AdtId, AttrDefId, CallableDefId, ConstId, DefWithBodyId, EnumId, EnumVariantId, FunctionId,
- GeneralConstId, GenericDefId, HasModule, ImplId, ModuleId, StaticId, StructId, TraitId,
- TypeAliasId, UnionId, db::DefDatabase,
+ AdtId, AttrDefId, BuiltinDeriveImplId, CallableDefId, ConstId, DefWithBodyId, EnumId,
+ EnumVariantId, FunctionId, GeneralConstId, GenericDefId, ImplId, StaticId, StructId, TraitId,
+ TypeAliasId, UnionId,
};
use rustc_type_ir::inherent;
use stdx::impl_from;
@@ -24,6 +24,7 @@ pub enum SolverDefId {
ConstId(ConstId),
FunctionId(FunctionId),
ImplId(ImplId),
+ BuiltinDeriveImplId(BuiltinDeriveImplId),
StaticId(StaticId),
TraitId(TraitId),
TypeAliasId(TypeAliasId),
@@ -57,6 +58,7 @@ impl std::fmt::Debug for SolverDefId {
f.debug_tuple("FunctionId").field(&db.function_signature(id).name.as_str()).finish()
}
SolverDefId::ImplId(id) => f.debug_tuple("ImplId").field(&id).finish(),
+ SolverDefId::BuiltinDeriveImplId(id) => f.debug_tuple("ImplId").field(&id).finish(),
SolverDefId::StaticId(id) => {
f.debug_tuple("StaticId").field(&db.static_signature(id).name.as_str()).finish()
}
@@ -108,6 +110,7 @@ impl_from!(
ConstId,
FunctionId,
ImplId,
+ BuiltinDeriveImplId,
StaticId,
TraitId,
TypeAliasId,
@@ -170,7 +173,8 @@ impl TryFrom<SolverDefId> for AttrDefId {
SolverDefId::EnumVariantId(it) => Ok(it.into()),
SolverDefId::Ctor(Ctor::Struct(it)) => Ok(it.into()),
SolverDefId::Ctor(Ctor::Enum(it)) => Ok(it.into()),
- SolverDefId::InternedClosureId(_)
+ SolverDefId::BuiltinDeriveImplId(_)
+ | SolverDefId::InternedClosureId(_)
| SolverDefId::InternedCoroutineId(_)
| SolverDefId::InternedOpaqueTyId(_) => Err(()),
}
@@ -191,6 +195,7 @@ impl TryFrom<SolverDefId> for DefWithBodyId {
| SolverDefId::TraitId(_)
| SolverDefId::TypeAliasId(_)
| SolverDefId::ImplId(_)
+ | SolverDefId::BuiltinDeriveImplId(_)
| SolverDefId::InternedClosureId(_)
| SolverDefId::InternedCoroutineId(_)
| SolverDefId::Ctor(Ctor::Struct(_))
@@ -216,6 +221,7 @@ impl TryFrom<SolverDefId> for GenericDefId {
| SolverDefId::InternedCoroutineId(_)
| SolverDefId::InternedOpaqueTyId(_)
| SolverDefId::EnumVariantId(_)
+ | SolverDefId::BuiltinDeriveImplId(_)
| SolverDefId::Ctor(_) => return Err(()),
})
}
@@ -241,28 +247,6 @@ impl SolverDefId {
}
}
-impl HasModule for SolverDefId {
- fn module(&self, db: &dyn DefDatabase) -> ModuleId {
- match *self {
- SolverDefId::AdtId(id) => id.module(db),
- SolverDefId::ConstId(id) => id.module(db),
- SolverDefId::FunctionId(id) => id.module(db),
- SolverDefId::ImplId(id) => id.module(db),
- SolverDefId::StaticId(id) => id.module(db),
- SolverDefId::TraitId(id) => id.module(db),
- SolverDefId::TypeAliasId(id) => id.module(db),
- SolverDefId::InternedClosureId(id) => id.loc(db).0.module(db),
- SolverDefId::InternedCoroutineId(id) => id.loc(db).0.module(db),
- SolverDefId::InternedOpaqueTyId(id) => match id.loc(db) {
- crate::ImplTraitId::ReturnTypeImplTrait(owner, _) => owner.module(db),
- crate::ImplTraitId::TypeAliasImplTrait(owner, _) => owner.module(db),
- },
- SolverDefId::Ctor(Ctor::Enum(id)) | SolverDefId::EnumVariantId(id) => id.module(db),
- SolverDefId::Ctor(Ctor::Struct(id)) => id.module(db),
- }
- }
-}
-
impl<'db> inherent::DefId<DbInterner<'db>> for SolverDefId {
fn as_local(self) -> Option<SolverDefId> {
Some(self)
@@ -332,7 +316,6 @@ declare_id_wrapper!(TypeAliasIdWrapper, TypeAliasId);
declare_id_wrapper!(ClosureIdWrapper, InternedClosureId);
declare_id_wrapper!(CoroutineIdWrapper, InternedCoroutineId);
declare_id_wrapper!(AdtIdWrapper, AdtId);
-declare_id_wrapper!(ImplIdWrapper, ImplId);
#[derive(Clone, Copy, PartialEq, Eq, Hash)]
pub struct GeneralConstIdWrapper(pub GeneralConstId);
@@ -433,3 +416,40 @@ impl<'db> inherent::DefId<DbInterner<'db>> for CallableIdWrapper {
true
}
}
+
+#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
+pub enum AnyImplId {
+ ImplId(ImplId),
+ BuiltinDeriveImplId(BuiltinDeriveImplId),
+}
+
+impl_from!(ImplId, BuiltinDeriveImplId for AnyImplId);
+
+impl From<AnyImplId> for SolverDefId {
+ #[inline]
+ fn from(value: AnyImplId) -> SolverDefId {
+ match value {
+ AnyImplId::ImplId(it) => it.into(),
+ AnyImplId::BuiltinDeriveImplId(it) => it.into(),
+ }
+ }
+}
+impl TryFrom<SolverDefId> for AnyImplId {
+ type Error = ();
+ #[inline]
+ fn try_from(value: SolverDefId) -> Result<Self, Self::Error> {
+ match value {
+ SolverDefId::ImplId(it) => Ok(it.into()),
+ SolverDefId::BuiltinDeriveImplId(it) => Ok(it.into()),
+ _ => Err(()),
+ }
+ }
+}
+impl<'db> inherent::DefId<DbInterner<'db>> for AnyImplId {
+ fn as_local(self) -> Option<SolverDefId> {
+ Some(self.into())
+ }
+ fn is_local(self) -> bool {
+ true
+ }
+}
diff --git a/crates/hir-ty/src/next_solver/format_proof_tree.rs b/crates/hir-ty/src/next_solver/format_proof_tree.rs
index 59fb0d65c5..66da6d5400 100644
--- a/crates/hir-ty/src/next_solver/format_proof_tree.rs
+++ b/crates/hir-ty/src/next_solver/format_proof_tree.rs
@@ -1,8 +1,8 @@
use rustc_type_ir::{solve::GoalSource, solve::inspect::GoalEvaluation};
use serde_derive::{Deserialize, Serialize};
-use crate::next_solver::infer::InferCtxt;
use crate::next_solver::inspect::{InspectCandidate, InspectGoal};
+use crate::next_solver::{AnyImplId, infer::InferCtxt};
use crate::next_solver::{DbInterner, Span};
#[derive(Debug, Clone, Serialize, Deserialize)]
@@ -76,14 +76,31 @@ impl<'a, 'db> ProofTreeSerializer<'a, 'db> {
use rustc_type_ir::solve::inspect::ProbeKind;
match candidate.kind() {
ProbeKind::TraitCandidate { source, .. } => {
+ use hir_def::{Lookup, src::HasSource};
use rustc_type_ir::solve::CandidateSource;
+ let db = self.infcx.interner.db;
match source {
- CandidateSource::Impl(impl_def_id) => {
- use hir_def::{Lookup, src::HasSource};
- let db = self.infcx.interner.db;
- let impl_src = impl_def_id.0.lookup(db).source(db);
- Some(impl_src.value.to_string())
- }
+ CandidateSource::Impl(impl_def_id) => match impl_def_id {
+ AnyImplId::ImplId(impl_def_id) => {
+ let impl_src = impl_def_id.lookup(db).source(db);
+ Some(impl_src.value.to_string())
+ }
+ AnyImplId::BuiltinDeriveImplId(impl_id) => {
+ let impl_loc = impl_id.loc(db);
+ let adt_src = match impl_loc.adt {
+ hir_def::AdtId::StructId(adt) => {
+ adt.loc(db).source(db).value.to_string()
+ }
+ hir_def::AdtId::UnionId(adt) => {
+ adt.loc(db).source(db).value.to_string()
+ }
+ hir_def::AdtId::EnumId(adt) => {
+ adt.loc(db).source(db).value.to_string()
+ }
+ };
+ Some(format!("#[derive(${})]\n{}", impl_loc.trait_.name(), adt_src))
+ }
+ },
_ => None,
}
}
diff --git a/crates/hir-ty/src/next_solver/generic_arg.rs b/crates/hir-ty/src/next_solver/generic_arg.rs
index 9936e44321..72cf2f9f07 100644
--- a/crates/hir-ty/src/next_solver/generic_arg.rs
+++ b/crates/hir-ty/src/next_solver/generic_arg.rs
@@ -11,9 +11,9 @@ use std::{hint::unreachable_unchecked, marker::PhantomData, ptr::NonNull};
use hir_def::{GenericDefId, GenericParamId};
use intern::InternedRef;
use rustc_type_ir::{
- ClosureArgs, ConstVid, CoroutineArgs, CoroutineClosureArgs, FallibleTypeFolder, FnSigTys,
- GenericTypeVisitable, Interner, TyKind, TyVid, TypeFoldable, TypeFolder, TypeVisitable,
- TypeVisitor, Variance,
+ ClosureArgs, ConstVid, CoroutineArgs, CoroutineClosureArgs, FallibleTypeFolder,
+ GenericTypeVisitable, Interner, TyVid, TypeFoldable, TypeFolder, TypeVisitable, TypeVisitor,
+ Variance,
inherent::{GenericArg as _, GenericsOf, IntoKind, SliceLike, Term as _, Ty as _},
relate::{Relate, VarianceDiagInfo},
walk::TypeWalker,
@@ -21,12 +21,11 @@ use rustc_type_ir::{
use smallvec::SmallVec;
use crate::next_solver::{
- ConstInterned, PolyFnSig, RegionInterned, TyInterned, impl_foldable_for_interned_slice,
- interned_slice,
+ ConstInterned, RegionInterned, TyInterned, impl_foldable_for_interned_slice, interned_slice,
};
use super::{
- Const, DbInterner, EarlyParamRegion, ErrorGuaranteed, ParamConst, Region, SolverDefId, Ty, Tys,
+ Const, DbInterner, EarlyParamRegion, ErrorGuaranteed, ParamConst, Region, SolverDefId, Ty,
generics::Generics,
};
@@ -566,33 +565,6 @@ impl<'db> GenericArgs<'db> {
}
}
- pub fn closure_sig_untupled(self) -> PolyFnSig<'db> {
- let TyKind::FnPtr(inputs_and_output, hdr) =
- self.split_closure_args_untupled().closure_sig_as_fn_ptr_ty.kind()
- else {
- unreachable!("not a function pointer")
- };
- inputs_and_output.with(hdr)
- }
-
- /// A "sensible" `.split_closure_args()`, where the arguments are not in a tuple.
- pub fn split_closure_args_untupled(self) -> rustc_type_ir::ClosureArgsParts<DbInterner<'db>> {
- // FIXME: should use `ClosureSubst` when possible
- match self.as_slice() {
- [parent_args @ .., closure_kind_ty, sig_ty, tupled_upvars_ty] => {
- rustc_type_ir::ClosureArgsParts {
- parent_args,
- closure_sig_as_fn_ptr_ty: sig_ty.expect_ty(),
- closure_kind_ty: closure_kind_ty.expect_ty(),
- tupled_upvars_ty: tupled_upvars_ty.expect_ty(),
- }
- }
- _ => {
- unreachable!("unexpected closure sig");
- }
- }
- }
-
pub fn types(self) -> impl Iterator<Item = Ty<'db>> {
self.iter().filter_map(|it| it.as_type())
}
@@ -688,27 +660,9 @@ impl<'db> rustc_type_ir::inherent::GenericArgs<DbInterner<'db>> for GenericArgs<
// FIXME: should use `ClosureSubst` when possible
match self.as_slice() {
[parent_args @ .., closure_kind_ty, sig_ty, tupled_upvars_ty] => {
- let interner = DbInterner::conjure();
- // This is stupid, but the next solver expects the first input to actually be a tuple
- let sig_ty = match sig_ty.expect_ty().kind() {
- TyKind::FnPtr(sig_tys, header) => Ty::new(
- interner,
- TyKind::FnPtr(
- sig_tys.map_bound(|s| {
- let inputs = Ty::new_tup(interner, s.inputs());
- let output = s.output();
- FnSigTys {
- inputs_and_output: Tys::new_from_slice(&[inputs, output]),
- }
- }),
- header,
- ),
- ),
- _ => unreachable!("sig_ty should be last"),
- };
rustc_type_ir::ClosureArgsParts {
parent_args,
- closure_sig_as_fn_ptr_ty: sig_ty,
+ closure_sig_as_fn_ptr_ty: sig_ty.expect_ty(),
closure_kind_ty: closure_kind_ty.expect_ty(),
tupled_upvars_ty: tupled_upvars_ty.expect_ty(),
}
diff --git a/crates/hir-ty/src/next_solver/generics.rs b/crates/hir-ty/src/next_solver/generics.rs
index 4d164a7e3b..a8288b4e82 100644
--- a/crates/hir-ty/src/next_solver/generics.rs
+++ b/crates/hir-ty/src/next_solver/generics.rs
@@ -4,14 +4,15 @@ use hir_def::{
ConstParamId, GenericDefId, GenericParamId, LifetimeParamId, TypeOrConstParamId, TypeParamId,
hir::generics::{GenericParams, TypeOrConstParamData},
};
+use rustc_type_ir::inherent::GenericsOf;
-use crate::{db::HirDatabase, generics::parent_generic_def};
+use crate::generics::parent_generic_def;
use super::SolverDefId;
use super::DbInterner;
-pub(crate) fn generics(db: &dyn HirDatabase, def: SolverDefId) -> Generics {
+pub(crate) fn generics(interner: DbInterner<'_>, def: SolverDefId) -> Generics {
let mk_lt = |parent, index, local_id| {
let id = GenericParamId::LifetimeParamId(LifetimeParamId { parent, local_id });
GenericParamDef { index, id }
@@ -50,6 +51,7 @@ pub(crate) fn generics(db: &dyn HirDatabase, def: SolverDefId) -> Generics {
result
};
+ let db = interner.db;
let (parent, own_params) = match (def.try_into(), def) {
(Ok(def), _) => (
parent_generic_def(db, def),
@@ -66,9 +68,12 @@ pub(crate) fn generics(db: &dyn HirDatabase, def: SolverDefId) -> Generics {
}
}
}
+ (_, SolverDefId::BuiltinDeriveImplId(id)) => {
+ return crate::builtin_derive::generics_of(interner, id);
+ }
_ => panic!("No generics for {def:?}"),
};
- let parent_generics = parent.map(|def| Box::new(generics(db, def.into())));
+ let parent_generics = parent.map(|def| Box::new(generics(interner, def.into())));
Generics {
parent,
@@ -84,6 +89,13 @@ pub struct Generics {
pub own_params: Vec<GenericParamDef>,
}
+impl Generics {
+ pub(crate) fn push_param(&mut self, id: GenericParamId) {
+ let index = self.count() as u32;
+ self.own_params.push(GenericParamDef { index, id });
+ }
+}
+
#[derive(Debug)]
pub struct GenericParamDef {
index: u32,
diff --git a/crates/hir-ty/src/next_solver/infer/mod.rs b/crates/hir-ty/src/next_solver/infer/mod.rs
index 2926dc30de..7d291f7ddb 100644
--- a/crates/hir-ty/src/next_solver/infer/mod.rs
+++ b/crates/hir-ty/src/next_solver/infer/mod.rs
@@ -878,9 +878,11 @@ impl<'db> InferCtxt<'db> {
self.tainted_by_errors.set(Some(e));
}
- #[instrument(level = "debug", skip(self), ret)]
- pub fn take_opaque_types(&self) -> Vec<(OpaqueTypeKey<'db>, OpaqueHiddenType<'db>)> {
- self.inner.borrow_mut().opaque_type_storage.take_opaque_types().collect()
+ #[instrument(level = "debug", skip(self))]
+ pub fn take_opaque_types(
+ &self,
+ ) -> impl IntoIterator<Item = (OpaqueTypeKey<'db>, OpaqueHiddenType<'db>)> + use<'db> {
+ self.inner.borrow_mut().opaque_type_storage.take_opaque_types()
}
#[instrument(level = "debug", skip(self), ret)]
diff --git a/crates/hir-ty/src/next_solver/infer/opaque_types/table.rs b/crates/hir-ty/src/next_solver/infer/opaque_types/table.rs
index 00177d21ac..894fe5eb7b 100644
--- a/crates/hir-ty/src/next_solver/infer/opaque_types/table.rs
+++ b/crates/hir-ty/src/next_solver/infer/opaque_types/table.rs
@@ -61,7 +61,7 @@ impl<'db> OpaqueTypeStorage<'db> {
pub(crate) fn take_opaque_types(
&mut self,
- ) -> impl Iterator<Item = (OpaqueTypeKey<'db>, OpaqueHiddenType<'db>)> {
+ ) -> impl IntoIterator<Item = (OpaqueTypeKey<'db>, OpaqueHiddenType<'db>)> + use<'db> {
let OpaqueTypeStorage { opaque_types, duplicate_entries } = self;
std::mem::take(opaque_types).into_iter().chain(std::mem::take(duplicate_entries))
}
diff --git a/crates/hir-ty/src/next_solver/infer/select.rs b/crates/hir-ty/src/next_solver/infer/select.rs
index 52ad410df6..bd407fd157 100644
--- a/crates/hir-ty/src/next_solver/infer/select.rs
+++ b/crates/hir-ty/src/next_solver/infer/select.rs
@@ -2,7 +2,7 @@
use std::ops::ControlFlow;
-use hir_def::{ImplId, TraitId};
+use hir_def::TraitId;
use macros::{TypeFoldable, TypeVisitable};
use rustc_type_ir::{
Interner,
@@ -12,7 +12,7 @@ use rustc_type_ir::{
use crate::{
db::InternedOpaqueTyId,
next_solver::{
- Const, ErrorGuaranteed, GenericArgs, Goal, TraitRef, Ty, TypeError,
+ AnyImplId, Const, ErrorGuaranteed, GenericArgs, Goal, TraitRef, Ty, TypeError,
infer::{
InferCtxt,
select::EvaluationResult::*,
@@ -249,7 +249,7 @@ impl<'db, N> ImplSource<'db, N> {
pub(crate) struct ImplSourceUserDefinedData<'db, N> {
#[type_visitable(ignore)]
#[type_foldable(identity)]
- pub(crate) impl_def_id: ImplId,
+ pub(crate) impl_def_id: AnyImplId,
pub(crate) args: GenericArgs<'db>,
pub(crate) nested: Vec<N>,
}
@@ -395,7 +395,7 @@ fn to_selection<'db>(cand: InspectCandidate<'_, 'db>) -> Option<Selection<'db>>
// FIXME: Remove this in favor of storing this in the tree
// For impl candidates, we do the rematch manually to compute the args.
ImplSource::UserDefined(ImplSourceUserDefinedData {
- impl_def_id: impl_def_id.0,
+ impl_def_id,
args: cand.instantiate_impl_args(),
nested,
})
diff --git a/crates/hir-ty/src/next_solver/interner.rs b/crates/hir-ty/src/next_solver/interner.rs
index 2ebc5b81ba..2a3df1d32a 100644
--- a/crates/hir-ty/src/next_solver/interner.rs
+++ b/crates/hir-ty/src/next_solver/interner.rs
@@ -38,10 +38,10 @@ use crate::{
lower::GenericPredicates,
method_resolution::TraitImpls,
next_solver::{
- AdtIdWrapper, BoundConst, CallableIdWrapper, CanonicalVarKind, ClosureIdWrapper,
- CoroutineIdWrapper, Ctor, FnSig, FxIndexMap, GeneralConstIdWrapper, ImplIdWrapper,
- OpaqueTypeKey, RegionAssumptions, SimplifiedType, SolverContext, SolverDefIds,
- TraitIdWrapper, TypeAliasIdWrapper, UnevaluatedConst, util::explicit_item_bounds,
+ AdtIdWrapper, AnyImplId, BoundConst, CallableIdWrapper, CanonicalVarKind, ClosureIdWrapper,
+ CoroutineIdWrapper, Ctor, FnSig, FxIndexMap, GeneralConstIdWrapper, OpaqueTypeKey,
+ RegionAssumptions, SimplifiedType, SolverContext, SolverDefIds, TraitIdWrapper,
+ TypeAliasIdWrapper, UnevaluatedConst, util::explicit_item_bounds,
},
};
@@ -1020,7 +1020,7 @@ impl<'db> Interner for DbInterner<'db> {
type CoroutineClosureId = CoroutineIdWrapper;
type CoroutineId = CoroutineIdWrapper;
type AdtId = AdtIdWrapper;
- type ImplId = ImplIdWrapper;
+ type ImplId = AnyImplId;
type UnevaluatedConstId = GeneralConstIdWrapper;
type Span = Span;
@@ -1164,7 +1164,7 @@ impl<'db> Interner for DbInterner<'db> {
}
fn generics_of(self, def_id: Self::DefId) -> Self::GenericsOf {
- generics(self.db(), def_id)
+ generics(self, def_id)
}
fn variances_of(self, def_id: Self::DefId) -> Self::VariancesOf {
@@ -1190,6 +1190,7 @@ impl<'db> Interner for DbInterner<'db> {
| SolverDefId::TraitId(_)
| SolverDefId::TypeAliasId(_)
| SolverDefId::ImplId(_)
+ | SolverDefId::BuiltinDeriveImplId(_)
| SolverDefId::InternedClosureId(_)
| SolverDefId::InternedCoroutineId(_) => {
return VariancesOf::empty(self);
@@ -1327,6 +1328,7 @@ impl<'db> Interner for DbInterner<'db> {
| SolverDefId::AdtId(_)
| SolverDefId::TraitId(_)
| SolverDefId::ImplId(_)
+ | SolverDefId::BuiltinDeriveImplId(_)
| SolverDefId::EnumVariantId(..)
| SolverDefId::Ctor(..)
| SolverDefId::InternedOpaqueTyId(..) => panic!(),
@@ -1445,8 +1447,7 @@ impl<'db> Interner for DbInterner<'db> {
self,
def_id: Self::DefId,
) -> EarlyBinder<Self, impl IntoIterator<Item = Self::Clause>> {
- GenericPredicates::query_all(self.db, def_id.try_into().unwrap())
- .map_bound(|it| it.iter().copied())
+ predicates_of(self.db, def_id).all_predicates().map_bound(|it| it.iter().copied())
}
#[tracing::instrument(level = "debug", skip(self), ret)]
@@ -1454,8 +1455,7 @@ impl<'db> Interner for DbInterner<'db> {
self,
def_id: Self::DefId,
) -> EarlyBinder<Self, impl IntoIterator<Item = Self::Clause>> {
- GenericPredicates::query_own(self.db, def_id.try_into().unwrap())
- .map_bound(|it| it.iter().copied())
+ predicates_of(self.db, def_id).own_predicates().map_bound(|it| it.iter().copied())
}
#[tracing::instrument(skip(self), ret)]
@@ -1500,32 +1500,30 @@ impl<'db> Interner for DbInterner<'db> {
}
}
- GenericPredicates::query_explicit(self.db, def_id.try_into().unwrap()).map_bound(
- |predicates| {
- predicates
- .iter()
- .copied()
- .filter(|p| match p.kind().skip_binder() {
- ClauseKind::Trait(it) => is_self_or_assoc(it.self_ty()),
- ClauseKind::TypeOutlives(it) => is_self_or_assoc(it.0),
- ClauseKind::Projection(it) => is_self_or_assoc(it.self_ty()),
- ClauseKind::HostEffect(it) => is_self_or_assoc(it.self_ty()),
- // FIXME: Not sure is this correct to allow other clauses but we might replace
- // `generic_predicates_ns` query here with something closer to rustc's
- // `implied_bounds_with_filter`, which is more granular lowering than this
- // "lower at once and then filter" implementation.
- _ => true,
- })
- .map(|p| (p, Span::dummy()))
- },
- )
+ predicates_of(self.db, def_id).explicit_predicates().map_bound(|predicates| {
+ predicates
+ .iter()
+ .copied()
+ .filter(|p| match p.kind().skip_binder() {
+ ClauseKind::Trait(it) => is_self_or_assoc(it.self_ty()),
+ ClauseKind::TypeOutlives(it) => is_self_or_assoc(it.0),
+ ClauseKind::Projection(it) => is_self_or_assoc(it.self_ty()),
+ ClauseKind::HostEffect(it) => is_self_or_assoc(it.self_ty()),
+ // FIXME: Not sure is this correct to allow other clauses but we might replace
+ // `generic_predicates_ns` query here with something closer to rustc's
+ // `implied_bounds_with_filter`, which is more granular lowering than this
+ // "lower at once and then filter" implementation.
+ _ => true,
+ })
+ .map(|p| (p, Span::dummy()))
+ })
}
fn impl_super_outlives(
self,
impl_id: Self::ImplId,
) -> EarlyBinder<Self, impl IntoIterator<Item = Self::Clause>> {
- let trait_ref = self.db().impl_trait(impl_id.0).expect("expected an impl of trait");
+ let trait_ref = self.impl_trait_ref(impl_id);
trait_ref.map_bound(|trait_ref| {
let clause: Clause<'_> = trait_ref.upcast(self);
elaborate(self, [clause]).filter(|clause| {
@@ -1790,6 +1788,7 @@ impl<'db> Interner for DbInterner<'db> {
SolverDefId::ConstId(_)
| SolverDefId::FunctionId(_)
| SolverDefId::ImplId(_)
+ | SolverDefId::BuiltinDeriveImplId(_)
| SolverDefId::StaticId(_)
| SolverDefId::InternedClosureId(_)
| SolverDefId::InternedCoroutineId(_)
@@ -1805,7 +1804,12 @@ impl<'db> Interner for DbInterner<'db> {
type_block,
trait_block,
&mut |impls| {
- for &impl_ in impls.for_trait_and_self_ty(trait_def_id.0, &simp) {
+ let (regular_impls, builtin_derive_impls) =
+ impls.for_trait_and_self_ty(trait_def_id.0, &simp);
+ for &impl_ in regular_impls {
+ f(impl_.into());
+ }
+ for &impl_ in builtin_derive_impls {
f(impl_.into());
}
},
@@ -1927,7 +1931,10 @@ impl<'db> Interner for DbInterner<'db> {
}
fn impl_is_default(self, impl_def_id: Self::ImplId) -> bool {
- self.db.impl_signature(impl_def_id.0).is_default()
+ match impl_def_id {
+ AnyImplId::ImplId(impl_id) => self.db.impl_signature(impl_id).is_default(),
+ AnyImplId::BuiltinDeriveImplId(_) => false,
+ }
}
#[tracing::instrument(skip(self), ret)]
@@ -1935,14 +1942,24 @@ impl<'db> Interner for DbInterner<'db> {
self,
impl_id: Self::ImplId,
) -> EarlyBinder<Self, rustc_type_ir::TraitRef<Self>> {
- let db = self.db();
- db.impl_trait(impl_id.0)
- // ImplIds for impls where the trait ref can't be resolved should never reach trait solving
- .expect("invalid impl passed to trait solver")
+ match impl_id {
+ AnyImplId::ImplId(impl_id) => {
+ let db = self.db();
+ db.impl_trait(impl_id)
+ // ImplIds for impls where the trait ref can't be resolved should never reach trait solving
+ .expect("invalid impl passed to trait solver")
+ }
+ AnyImplId::BuiltinDeriveImplId(impl_id) => {
+ crate::builtin_derive::impl_trait(self, impl_id)
+ }
+ }
}
fn impl_polarity(self, impl_id: Self::ImplId) -> rustc_type_ir::ImplPolarity {
- let impl_data = self.db().impl_signature(impl_id.0);
+ let AnyImplId::ImplId(impl_id) = impl_id else {
+ return ImplPolarity::Positive;
+ };
+ let impl_data = self.db().impl_signature(impl_id);
if impl_data.flags.contains(ImplFlags::NEGATIVE) {
ImplPolarity::Negative
} else {
@@ -2230,11 +2247,13 @@ impl<'db> Interner for DbInterner<'db> {
specializing_impl_def_id: Self::ImplId,
parent_impl_def_id: Self::ImplId,
) -> bool {
- crate::specialization::specializes(
- self.db,
- specializing_impl_def_id.0,
- parent_impl_def_id.0,
- )
+ let (AnyImplId::ImplId(specializing_impl_def_id), AnyImplId::ImplId(parent_impl_def_id)) =
+ (specializing_impl_def_id, parent_impl_def_id)
+ else {
+ // No builtin derive allow specialization currently.
+ return false;
+ };
+ crate::specialization::specializes(self.db, specializing_impl_def_id, parent_impl_def_id)
}
fn next_trait_solver_globally(self) -> bool {
@@ -2349,6 +2368,14 @@ impl<'db> DbInterner<'db> {
}
}
+fn predicates_of(db: &dyn HirDatabase, def_id: SolverDefId) -> &GenericPredicates {
+ if let SolverDefId::BuiltinDeriveImplId(impl_) = def_id {
+ crate::builtin_derive::predicates(db, impl_)
+ } else {
+ GenericPredicates::query(db, def_id.try_into().unwrap())
+ }
+}
+
macro_rules! TrivialTypeTraversalImpls {
($($ty:ty,)+) => {
$(
@@ -2396,7 +2423,7 @@ TrivialTypeTraversalImpls! {
ClosureIdWrapper,
CoroutineIdWrapper,
AdtIdWrapper,
- ImplIdWrapper,
+ AnyImplId,
GeneralConstIdWrapper,
Safety,
FnAbi,
diff --git a/crates/hir-ty/src/next_solver/solver.rs b/crates/hir-ty/src/next_solver/solver.rs
index d800925ba4..15d6e2e451 100644
--- a/crates/hir-ty/src/next_solver/solver.rs
+++ b/crates/hir-ty/src/next_solver/solver.rs
@@ -12,7 +12,7 @@ use rustc_type_ir::{
use tracing::debug;
use crate::next_solver::{
- AliasTy, CanonicalVarKind, Clause, ClauseKind, CoercePredicate, GenericArgs, ImplIdWrapper,
+ AliasTy, AnyImplId, CanonicalVarKind, Clause, ClauseKind, CoercePredicate, GenericArgs,
ParamEnv, Predicate, PredicateKind, SubtypePredicate, Ty, TyKind, fold::fold_tys,
util::sizedness_fast_path,
};
@@ -174,9 +174,13 @@ impl<'db> SolverDelegate for SolverContext<'db> {
&self,
_goal_trait_ref: rustc_type_ir::TraitRef<Self::Interner>,
trait_assoc_def_id: SolverDefId,
- impl_id: ImplIdWrapper,
+ impl_id: AnyImplId,
) -> Result<Option<SolverDefId>, ErrorGuaranteed> {
- let impl_items = impl_id.0.impl_items(self.0.interner.db());
+ let AnyImplId::ImplId(impl_id) = impl_id else {
+ // Builtin derive traits don't have type/consts assoc items.
+ return Ok(None);
+ };
+ let impl_items = impl_id.impl_items(self.0.interner.db());
let id =
match trait_assoc_def_id {
SolverDefId::TypeAliasId(trait_assoc_id) => {
@@ -228,8 +232,8 @@ impl<'db> SolverDelegate for SolverContext<'db> {
fn is_transmutable(
&self,
- _dst: Ty<'db>,
_src: Ty<'db>,
+ _dst: Ty<'db>,
_assume: <Self::Interner as rustc_type_ir::Interner>::Const,
) -> Result<Certainty, NoSolution> {
// It's better to return some value while not fully implement
diff --git a/crates/hir-ty/src/next_solver/ty.rs b/crates/hir-ty/src/next_solver/ty.rs
index c89831bd40..66a24d3949 100644
--- a/crates/hir-ty/src/next_solver/ty.rs
+++ b/crates/hir-ty/src/next_solver/ty.rs
@@ -26,6 +26,7 @@ use rustc_type_ir::{
};
use crate::{
+ FnAbi,
db::{HirDatabase, InternedCoroutine},
lower::GenericPredicates,
next_solver::{
@@ -382,6 +383,11 @@ impl<'db> Ty<'db> {
matches!(self.kind(), TyKind::Bool)
}
+ #[inline]
+ pub fn is_char(self) -> bool {
+ matches!(self.kind(), TyKind::Char)
+ }
+
/// A scalar type is one that denotes an atomic datum, with no sub-components.
/// (A RawPtr is scalar because it represents a non-managed pointer, so its
/// contents are abstract to rustc.)
@@ -422,6 +428,11 @@ impl<'db> Ty<'db> {
}
#[inline]
+ pub fn is_u8(self) -> bool {
+ matches!(self.kind(), TyKind::Uint(UintTy::U8))
+ }
+
+ #[inline]
pub fn is_raw_ptr(self) -> bool {
matches!(self.kind(), TyKind::RawPtr(..))
}
@@ -456,6 +467,14 @@ impl<'db> Ty<'db> {
}
#[inline]
+ pub fn as_slice(self) -> Option<Ty<'db>> {
+ match self.kind() {
+ TyKind::Slice(ty) => Some(ty),
+ _ => None,
+ }
+ }
+
+ #[inline]
pub fn ty_vid(self) -> Option<TyVid> {
match self.kind() {
TyKind::Infer(rustc_type_ir::TyVar(vid)) => Some(vid),
@@ -495,10 +514,9 @@ impl<'db> Ty<'db> {
Some(interner.fn_sig(callable).instantiate(interner, args))
}
TyKind::FnPtr(sig, hdr) => Some(sig.with(hdr)),
- TyKind::Closure(_, closure_args) => closure_args
- .split_closure_args_untupled()
- .closure_sig_as_fn_ptr_ty
- .callable_sig(interner),
+ TyKind::Closure(_, closure_args) => {
+ Some(interner.signature_unclosure(closure_args.as_closure().sig(), Safety::Safe))
+ }
TyKind::CoroutineClosure(coroutine_id, args) => {
Some(args.as_coroutine_closure().coroutine_closure_sig().map_bound(|sig| {
let unit_ty = Ty::new_unit(interner);
@@ -1426,3 +1444,22 @@ impl<'db> PlaceholderLike<DbInterner<'db>> for PlaceholderTy {
Placeholder { universe: ui, bound: BoundTy { var, kind: BoundTyKind::Anon } }
}
}
+
+impl<'db> DbInterner<'db> {
+ /// Given a closure signature, returns an equivalent fn signature. Detuples
+ /// and so forth -- so e.g., if we have a sig with `Fn<(u32, i32)>` then
+ /// you would get a `fn(u32, i32)`.
+ /// `unsafety` determines the unsafety of the fn signature. If you pass
+ /// `Safety::Unsafe` in the previous example, then you would get
+ /// an `unsafe fn (u32, i32)`.
+ /// It cannot convert a closure that requires unsafe.
+ pub fn signature_unclosure(self, sig: PolyFnSig<'db>, safety: Safety) -> PolyFnSig<'db> {
+ sig.map_bound(|s| {
+ let params = match s.inputs()[0].kind() {
+ TyKind::Tuple(params) => params,
+ _ => panic!(),
+ };
+ self.mk_fn_sig(params, s.output(), s.c_variadic, safety, FnAbi::Rust)
+ })
+ }
+}
diff --git a/crates/hir-ty/src/test_db.rs b/crates/hir-ty/src/test_db.rs
index 7bd314cb8e..243456c85f 100644
--- a/crates/hir-ty/src/test_db.rs
+++ b/crates/hir-ty/src/test_db.rs
@@ -46,6 +46,12 @@ impl Default for TestDB {
this.set_expand_proc_attr_macros_with_durability(true, Durability::HIGH);
// This needs to be here otherwise `CrateGraphBuilder` panics.
this.set_all_crates(Arc::new(Box::new([])));
+ _ = base_db::LibraryRoots::builder(Default::default())
+ .durability(Durability::MEDIUM)
+ .new(&this);
+ _ = base_db::LocalRoots::builder(Default::default())
+ .durability(Durability::MEDIUM)
+ .new(&this);
CrateGraphBuilder::default().set_in_db(&mut this);
this
}
diff --git a/crates/hir-ty/src/tests/closure_captures.rs b/crates/hir-ty/src/tests/closure_captures.rs
index 3bdc72d015..8408c0a7bf 100644
--- a/crates/hir-ty/src/tests/closure_captures.rs
+++ b/crates/hir-ty/src/tests/closure_captures.rs
@@ -503,3 +503,28 @@ fn main() {
expect!["73..149;37..38;103..104 ByValue b Option<Box>"],
);
}
+
+#[test]
+fn alias_needs_to_be_normalized() {
+ check_closure_captures(
+ r#"
+//- minicore:copy
+trait Trait {
+ type Associated;
+}
+struct A;
+struct B { x: i32 }
+impl Trait for A {
+ type Associated = B;
+}
+struct C { b: <A as Trait>::Associated }
+fn main() {
+ let c: C = C { b: B { x: 1 } };
+ let closure = || {
+ let _move = c.b.x;
+ };
+}
+"#,
+ expect!["220..257;174..175;245..250 ByRef(Shared) c.b.x &'? i32"],
+ );
+}
diff --git a/crates/hir-ty/src/tests/incremental.rs b/crates/hir-ty/src/tests/incremental.rs
index 6558d2179f..cf7ff6f7ec 100644
--- a/crates/hir-ty/src/tests/incremental.rs
+++ b/crates/hir-ty/src/tests/incremental.rs
@@ -132,14 +132,13 @@ fn baz() -> i32 {
"trait_environment_query",
"lang_items",
"crate_lang_items",
- "AttrFlags::query_",
- "AttrFlags::query_",
"GenericPredicates::query_with_diagnostics_",
"ImplTraits::return_type_impl_traits_",
"expr_scopes_shim",
"InferenceResult::for_body_",
"function_signature_shim",
"function_signature_with_source_map_shim",
+ "AttrFlags::query_",
"body_shim",
"body_with_source_map_shim",
"trait_environment_query",
@@ -149,6 +148,7 @@ fn baz() -> i32 {
"InferenceResult::for_body_",
"function_signature_shim",
"function_signature_with_source_map_shim",
+ "AttrFlags::query_",
"body_shim",
"body_with_source_map_shim",
"trait_environment_query",
@@ -197,13 +197,13 @@ fn baz() -> i32 {
"body_with_source_map_shim",
"body_shim",
"AttrFlags::query_",
- "AttrFlags::query_",
"function_signature_with_source_map_shim",
"function_signature_shim",
"body_with_source_map_shim",
"body_shim",
"InferenceResult::for_body_",
"expr_scopes_shim",
+ "AttrFlags::query_",
"function_signature_with_source_map_shim",
"function_signature_shim",
"body_with_source_map_shim",
@@ -243,6 +243,8 @@ $0",
"parse_shim",
"real_span_map_shim",
"TraitImpls::for_crate_",
+ "lang_items",
+ "crate_lang_items",
]
"#]],
);
@@ -279,6 +281,7 @@ pub struct NewStruct {
"real_span_map_shim",
"crate_local_def_map",
"TraitImpls::for_crate_",
+ "crate_lang_items",
]
"#]],
);
@@ -314,6 +317,8 @@ $0",
"parse_shim",
"real_span_map_shim",
"TraitImpls::for_crate_",
+ "lang_items",
+ "crate_lang_items",
]
"#]],
);
@@ -351,6 +356,7 @@ pub enum SomeEnum {
"real_span_map_shim",
"crate_local_def_map",
"TraitImpls::for_crate_",
+ "crate_lang_items",
]
"#]],
);
@@ -386,6 +392,8 @@ $0",
"parse_shim",
"real_span_map_shim",
"TraitImpls::for_crate_",
+ "lang_items",
+ "crate_lang_items",
]
"#]],
);
@@ -420,6 +428,7 @@ fn bar() -> f32 {
"real_span_map_shim",
"crate_local_def_map",
"TraitImpls::for_crate_",
+ "crate_lang_items",
]
"#]],
);
@@ -459,6 +468,8 @@ $0",
"parse_shim",
"real_span_map_shim",
"TraitImpls::for_crate_",
+ "lang_items",
+ "crate_lang_items",
]
"#]],
);
@@ -501,20 +512,7 @@ impl SomeStruct {
"real_span_map_shim",
"crate_local_def_map",
"TraitImpls::for_crate_",
- "AttrFlags::query_",
- "impl_trait_with_diagnostics_query",
- "impl_signature_shim",
- "impl_signature_with_source_map_shim",
- "lang_items",
"crate_lang_items",
- "ImplItems::of_",
- "AttrFlags::query_",
- "AttrFlags::query_",
- "AttrFlags::query_",
- "AttrFlags::query_",
- "impl_self_ty_with_diagnostics_query",
- "struct_signature_shim",
- "struct_signature_with_source_map_shim",
]
"#]],
);
@@ -586,8 +584,6 @@ fn main() {
"trait_environment_query",
"lang_items",
"crate_lang_items",
- "AttrFlags::query_",
- "AttrFlags::query_",
"GenericPredicates::query_with_diagnostics_",
"GenericPredicates::query_with_diagnostics_",
"ImplTraits::return_type_impl_traits_",
@@ -600,18 +596,18 @@ fn main() {
"expr_scopes_shim",
"struct_signature_shim",
"struct_signature_with_source_map_shim",
+ "AttrFlags::query_",
"GenericPredicates::query_with_diagnostics_",
"value_ty_query",
- "VariantFields::firewall_",
- "VariantFields::query_",
"InherentImpls::for_crate_",
- "impl_signature_shim",
- "impl_signature_with_source_map_shim",
"callable_item_signature_query",
"TraitImpls::for_crate_and_deps_",
"TraitImpls::for_crate_",
"impl_trait_with_diagnostics_query",
+ "impl_signature_shim",
+ "impl_signature_with_source_map_shim",
"impl_self_ty_with_diagnostics_query",
+ "AttrFlags::query_",
"GenericPredicates::query_with_diagnostics_",
]
"#]],
@@ -680,9 +676,6 @@ fn main() {
"body_with_source_map_shim",
"body_shim",
"crate_lang_items",
- "AttrFlags::query_",
- "AttrFlags::query_",
- "AttrFlags::query_",
"GenericPredicates::query_with_diagnostics_",
"GenericPredicates::query_with_diagnostics_",
"ImplTraits::return_type_impl_traits_",
@@ -692,15 +685,16 @@ fn main() {
"ImplTraits::return_type_impl_traits_",
"expr_scopes_shim",
"struct_signature_with_source_map_shim",
+ "AttrFlags::query_",
"GenericPredicates::query_with_diagnostics_",
- "VariantFields::query_",
"InherentImpls::for_crate_",
- "impl_signature_with_source_map_shim",
- "impl_signature_shim",
"callable_item_signature_query",
"TraitImpls::for_crate_",
+ "impl_signature_with_source_map_shim",
+ "impl_signature_shim",
"impl_trait_with_diagnostics_query",
"impl_self_ty_with_diagnostics_query",
+ "AttrFlags::query_",
"GenericPredicates::query_with_diagnostics_",
]
"#]],
diff --git a/crates/hir-ty/src/tests/patterns.rs b/crates/hir-ty/src/tests/patterns.rs
index c312b16759..0b776938c5 100644
--- a/crates/hir-ty/src/tests/patterns.rs
+++ b/crates/hir-ty/src/tests/patterns.rs
@@ -794,6 +794,8 @@ fn slice_tail_pattern() {
fn box_pattern() {
check_infer(
r#"
+ #![feature(lang_items)]
+
pub struct Global;
#[lang = "owned_box"]
pub struct Box<T, A = Global>(T);
@@ -805,13 +807,13 @@ fn box_pattern() {
}
"#,
expect![[r#"
- 83..89 'params': Box<i32, Global>
- 101..155 '{ ... } }': ()
- 107..153 'match ... }': ()
- 113..119 'params': Box<i32, Global>
- 130..141 'box integer': Box<i32, Global>
- 134..141 'integer': i32
- 145..147 '{}': ()
+ 108..114 'params': Box<i32, Global>
+ 126..180 '{ ... } }': ()
+ 132..178 'match ... }': ()
+ 138..144 'params': Box<i32, Global>
+ 155..166 'box integer': Box<i32, Global>
+ 159..166 'integer': i32
+ 170..172 '{}': ()
"#]],
);
check_infer(
@@ -831,7 +833,6 @@ fn box_pattern() {
76..122 'match ... }': ()
82..88 'params': Box<i32>
99..110 'box integer': Box<i32>
- 103..110 'integer': i32
114..116 '{}': ()
"#]],
);
@@ -1142,6 +1143,7 @@ fn my_fn(#[cfg(feature = "feature")] u8: u8, u32: u32) {}
fn var_args() {
check_types(
r#"
+#![feature(lang_items)]
#[lang = "va_list"]
pub struct VaListImpl<'f>;
fn my_fn(foo: ...) {}
@@ -1156,6 +1158,7 @@ fn my_fn2(bar: u32, foo: ...) {}
fn var_args_cond() {
check_types(
r#"
+#![feature(lang_items)]
#[lang = "va_list"]
pub struct VaListImpl<'f>;
fn my_fn(bar: u32, #[cfg(FALSE)] foo: ..., #[cfg(not(FALSE))] foo: u32) {
diff --git a/crates/hir-ty/src/tests/regression.rs b/crates/hir-ty/src/tests/regression.rs
index f03f8d754f..c805f03044 100644
--- a/crates/hir-ty/src/tests/regression.rs
+++ b/crates/hir-ty/src/tests/regression.rs
@@ -2374,6 +2374,7 @@ fn rust_destruct_option_clone() {
check_types(
r#"
//- minicore: option, drop
+#![feature(lang_items)]
fn test(o: &Option<i32>) {
o.my_clone();
//^^^^^^^^^^^^ Option<i32>
diff --git a/crates/hir-ty/src/tests/regression/new_solver.rs b/crates/hir-ty/src/tests/regression/new_solver.rs
index e11cc85e7f..a4554673cd 100644
--- a/crates/hir-ty/src/tests/regression/new_solver.rs
+++ b/crates/hir-ty/src/tests/regression/new_solver.rs
@@ -234,6 +234,7 @@ fn main() {
// toolchains <= 1.88.0, before sized-hierarchy.
check_no_mismatches(
r#"
+#![feature(lang_items)]
#[lang = "sized"]
pub trait Sized {}
diff --git a/crates/hir-ty/src/tests/simple.rs b/crates/hir-ty/src/tests/simple.rs
index 2e107b2c59..a9a5e96f75 100644
--- a/crates/hir-ty/src/tests/simple.rs
+++ b/crates/hir-ty/src/tests/simple.rs
@@ -1,5 +1,7 @@
use expect_test::expect;
+use crate::tests::check_infer_with_mismatches;
+
use super::{check, check_infer, check_no_mismatches, check_types};
#[test]
@@ -2700,6 +2702,8 @@ fn box_into_vec() {
check_infer(
r#"
//- /core.rs crate:core
+#![feature(lang_items)]
+
#[lang = "sized"]
pub trait Sized {}
@@ -2743,22 +2747,22 @@ struct Astruct;
impl B for Astruct {}
"#,
expect![[r#"
- 614..618 'self': Box<[T], A>
- 647..679 '{ ... }': Vec<T, A>
- 693..863 '{ ...])); }': ()
- 703..706 'vec': Vec<i32, Global>
- 709..724 '<[_]>::into_vec': fn into_vec<i32, Global>(Box<[i32], Global>) -> Vec<i32, Global>
- 709..755 '<[_]>:...i32]))': Vec<i32, Global>
- 725..754 '#[rust...1i32])': Box<[i32; 1], Global>
- 747..753 '[1i32]': [i32; 1]
- 748..752 '1i32': i32
- 765..766 'v': Vec<Box<dyn B + 'static, Global>, Global>
- 786..803 '<[_]> ...to_vec': fn into_vec<Box<dyn B + '?, Global>, Global>(Box<[Box<dyn B + '?, Global>], Global>) -> Vec<Box<dyn B + '?, Global>, Global>
- 786..860 '<[_]> ...ct)]))': Vec<Box<dyn B + '?, Global>, Global>
- 804..859 '#[rust...uct)])': Box<[Box<dyn B + '?, Global>; 1], Global>
- 826..858 '[#[rus...ruct)]': [Box<dyn B + '?, Global>; 1]
- 827..857 '#[rust...truct)': Box<Astruct, Global>
- 849..856 'Astruct': Astruct
+ 639..643 'self': Box<[T], A>
+ 672..704 '{ ... }': Vec<T, A>
+ 718..888 '{ ...])); }': ()
+ 728..731 'vec': Vec<i32, Global>
+ 734..749 '<[_]>::into_vec': fn into_vec<i32, Global>(Box<[i32], Global>) -> Vec<i32, Global>
+ 734..780 '<[_]>:...i32]))': Vec<i32, Global>
+ 750..779 '#[rust...1i32])': Box<[i32; 1], Global>
+ 772..778 '[1i32]': [i32; 1]
+ 773..777 '1i32': i32
+ 790..791 'v': Vec<Box<dyn B + 'static, Global>, Global>
+ 811..828 '<[_]> ...to_vec': fn into_vec<Box<dyn B + '?, Global>, Global>(Box<[Box<dyn B + '?, Global>], Global>) -> Vec<Box<dyn B + '?, Global>, Global>
+ 811..885 '<[_]> ...ct)]))': Vec<Box<dyn B + '?, Global>, Global>
+ 829..884 '#[rust...uct)])': Box<[Box<dyn B + '?, Global>; 1], Global>
+ 851..883 '[#[rus...ruct)]': [Box<dyn B + '?, Global>; 1]
+ 852..882 '#[rust...truct)': Box<Astruct, Global>
+ 874..881 'Astruct': Astruct
"#]],
)
}
@@ -3645,6 +3649,8 @@ fn main() {
fn cstring_literals() {
check_types(
r#"
+#![feature(lang_items)]
+
#[lang = "CStr"]
pub struct CStr;
@@ -3702,7 +3708,7 @@ fn main() {
}
#[test]
-fn macro_semitransparent_hygiene() {
+fn macro_semiopaque_hygiene() {
check_types(
r#"
macro_rules! m {
@@ -3956,3 +3962,24 @@ fn bar() {
"#,
);
}
+
+#[test]
+fn cannot_coerce_capturing_closure_to_fn_ptr() {
+ check_infer_with_mismatches(
+ r#"
+fn foo() {
+ let a = 1;
+ let _: fn() -> i32 = || a;
+}
+ "#,
+ expect![[r#"
+ 9..58 '{ ...| a; }': ()
+ 19..20 'a': i32
+ 23..24 '1': i32
+ 34..35 '_': fn() -> i32
+ 51..55 '|| a': impl Fn() -> i32
+ 54..55 'a': i32
+ 51..55: expected fn() -> i32, got impl Fn() -> i32
+ "#]],
+ );
+}
diff --git a/crates/hir-ty/src/tests/traits.rs b/crates/hir-ty/src/tests/traits.rs
index a54c0a799d..38591f486e 100644
--- a/crates/hir-ty/src/tests/traits.rs
+++ b/crates/hir-ty/src/tests/traits.rs
@@ -851,7 +851,7 @@ struct S;
trait Trait<T> {}
impl Trait<&str> for S {}
-struct O<T>;
+struct O<T>(T);
impl<U, T: Trait<U>> O<T> {
fn foo(&self) -> U { loop {} }
}
@@ -1492,7 +1492,7 @@ fn dyn_trait_in_impl() {
trait Trait<T, U> {
fn foo(&self) -> (T, U);
}
-struct S<T, U> {}
+struct S<T, U>(T, U);
impl<T, U> S<T, U> {
fn bar(&self) -> &dyn Trait<T, U> { loop {} }
}
@@ -1506,16 +1506,16 @@ fn test(s: S<u32, i32>) {
}"#,
expect![[r#"
32..36 'self': &'? Self
- 102..106 'self': &'? S<T, U>
- 128..139 '{ loop {} }': &'? (dyn Trait<T, U> + 'static)
- 130..137 'loop {}': !
- 135..137 '{}': ()
- 175..179 'self': &'? Self
- 251..252 's': S<u32, i32>
- 267..289 '{ ...z(); }': ()
- 273..274 's': S<u32, i32>
- 273..280 's.bar()': &'? (dyn Trait<u32, i32> + 'static)
- 273..286 's.bar().baz()': (u32, i32)
+ 106..110 'self': &'? S<T, U>
+ 132..143 '{ loop {} }': &'? (dyn Trait<T, U> + 'static)
+ 134..141 'loop {}': !
+ 139..141 '{}': ()
+ 179..183 'self': &'? Self
+ 255..256 's': S<u32, i32>
+ 271..293 '{ ...z(); }': ()
+ 277..278 's': S<u32, i32>
+ 277..284 's.bar()': &'? (dyn Trait<u32, i32> + 'static)
+ 277..290 's.bar().baz()': (u32, i32)
"#]],
);
}
diff --git a/crates/hir-ty/src/upvars.rs b/crates/hir-ty/src/upvars.rs
new file mode 100644
index 0000000000..ee864ab068
--- /dev/null
+++ b/crates/hir-ty/src/upvars.rs
@@ -0,0 +1,319 @@
+//! A simple query to collect tall locals (upvars) a closure use.
+
+use hir_def::{
+ DefWithBodyId,
+ expr_store::{Body, path::Path},
+ hir::{BindingId, Expr, ExprId, ExprOrPatId, Pat},
+ resolver::{HasResolver, Resolver, ValueNs},
+};
+use hir_expand::mod_path::PathKind;
+use rustc_hash::{FxHashMap, FxHashSet};
+
+use crate::db::HirDatabase;
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+// Kept sorted.
+pub struct Upvars(Box<[BindingId]>);
+
+impl Upvars {
+ fn new(upvars: &FxHashSet<BindingId>) -> Upvars {
+ let mut upvars = upvars.iter().copied().collect::<Box<[_]>>();
+ upvars.sort_unstable();
+ Upvars(upvars)
+ }
+
+ #[inline]
+ pub fn contains(&self, local: BindingId) -> bool {
+ self.0.binary_search(&local).is_ok()
+ }
+
+ #[inline]
+ pub fn iter(&self) -> impl ExactSizeIterator<Item = BindingId> {
+ self.0.iter().copied()
+ }
+
+ #[inline]
+ pub fn is_empty(&self) -> bool {
+ self.0.is_empty()
+ }
+}
+
+/// Returns a map from `Expr::Closure` to its upvars.
+#[salsa::tracked(returns(as_deref))]
+pub fn upvars_mentioned(
+ db: &dyn HirDatabase,
+ owner: DefWithBodyId,
+) -> Option<Box<FxHashMap<ExprId, Upvars>>> {
+ let body = db.body(owner);
+ let mut resolver = owner.resolver(db);
+ let mut result = FxHashMap::default();
+ handle_expr_outside_closure(db, &mut resolver, owner, &body, body.body_expr, &mut result);
+ return if result.is_empty() {
+ None
+ } else {
+ result.shrink_to_fit();
+ Some(Box::new(result))
+ };
+
+ fn handle_expr_outside_closure<'db>(
+ db: &'db dyn HirDatabase,
+ resolver: &mut Resolver<'db>,
+ owner: DefWithBodyId,
+ body: &Body,
+ expr: ExprId,
+ closures_map: &mut FxHashMap<ExprId, Upvars>,
+ ) {
+ match &body[expr] {
+ &Expr::Closure { body: body_expr, .. } => {
+ let mut upvars = FxHashSet::default();
+ handle_expr_inside_closure(
+ db,
+ resolver,
+ owner,
+ body,
+ expr,
+ body_expr,
+ &mut upvars,
+ closures_map,
+ );
+ if !upvars.is_empty() {
+ closures_map.insert(expr, Upvars::new(&upvars));
+ }
+ }
+ _ => body.walk_child_exprs(expr, |expr| {
+ handle_expr_outside_closure(db, resolver, owner, body, expr, closures_map)
+ }),
+ }
+ }
+
+ fn handle_expr_inside_closure<'db>(
+ db: &'db dyn HirDatabase,
+ resolver: &mut Resolver<'db>,
+ owner: DefWithBodyId,
+ body: &Body,
+ current_closure: ExprId,
+ expr: ExprId,
+ upvars: &mut FxHashSet<BindingId>,
+ closures_map: &mut FxHashMap<ExprId, Upvars>,
+ ) {
+ match &body[expr] {
+ Expr::Path(path) => {
+ resolve_maybe_upvar(
+ db,
+ resolver,
+ owner,
+ body,
+ current_closure,
+ expr,
+ expr.into(),
+ upvars,
+ path,
+ );
+ }
+ &Expr::Assignment { target, .. } => {
+ body.walk_pats(target, &mut |pat| {
+ let Pat::Path(path) = &body[pat] else { return };
+ resolve_maybe_upvar(
+ db,
+ resolver,
+ owner,
+ body,
+ current_closure,
+ expr,
+ pat.into(),
+ upvars,
+ path,
+ );
+ });
+ }
+ &Expr::Closure { body: body_expr, .. } => {
+ let mut closure_upvars = FxHashSet::default();
+ handle_expr_inside_closure(
+ db,
+ resolver,
+ owner,
+ body,
+ expr,
+ body_expr,
+ &mut closure_upvars,
+ closures_map,
+ );
+ if !closure_upvars.is_empty() {
+ closures_map.insert(expr, Upvars::new(&closure_upvars));
+ // All nested closure's upvars are also upvars of the parent closure.
+ upvars.extend(
+ closure_upvars
+ .iter()
+ .copied()
+ .filter(|local| body.binding_owner(*local) != Some(current_closure)),
+ );
+ }
+ return;
+ }
+ _ => {}
+ }
+ body.walk_child_exprs(expr, |expr| {
+ handle_expr_inside_closure(
+ db,
+ resolver,
+ owner,
+ body,
+ current_closure,
+ expr,
+ upvars,
+ closures_map,
+ )
+ });
+ }
+}
+
+fn resolve_maybe_upvar<'db>(
+ db: &'db dyn HirDatabase,
+ resolver: &mut Resolver<'db>,
+ owner: DefWithBodyId,
+ body: &Body,
+ current_closure: ExprId,
+ expr: ExprId,
+ id: ExprOrPatId,
+ upvars: &mut FxHashSet<BindingId>,
+ path: &Path,
+) {
+ if let Path::BarePath(mod_path) = path
+ && matches!(mod_path.kind, PathKind::Plain)
+ && mod_path.segments().len() == 1
+ {
+ // Could be a variable.
+ let guard = resolver.update_to_inner_scope(db, owner, expr);
+ let resolution =
+ resolver.resolve_path_in_value_ns_fully(db, path, body.expr_or_pat_path_hygiene(id));
+ if let Some(ValueNs::LocalBinding(local)) = resolution
+ && body.binding_owner(local) != Some(current_closure)
+ {
+ upvars.insert(local);
+ }
+ resolver.reset_to_guard(guard);
+ }
+}
+
+#[cfg(test)]
+mod tests {
+ use expect_test::{Expect, expect};
+ use hir_def::{ModuleDefId, db::DefDatabase, nameres::crate_def_map};
+ use itertools::Itertools;
+ use span::Edition;
+ use test_fixture::WithFixture;
+
+ use crate::{test_db::TestDB, upvars::upvars_mentioned};
+
+ #[track_caller]
+ fn check(#[rust_analyzer::rust_fixture] ra_fixture: &str, expectation: Expect) {
+ let db = TestDB::with_files(ra_fixture);
+ crate::attach_db(&db, || {
+ let def_map = crate_def_map(&db, db.test_crate());
+ let func = def_map
+ .modules()
+ .flat_map(|(_, module)| module.scope.declarations())
+ .filter_map(|decl| match decl {
+ ModuleDefId::FunctionId(func) => Some(func),
+ _ => None,
+ })
+ .exactly_one()
+ .unwrap_or_else(|_| panic!("expected one function"));
+ let (body, source_map) = db.body_with_source_map(func.into());
+ let Some(upvars) = upvars_mentioned(&db, func.into()) else {
+ expectation.assert_eq("");
+ return;
+ };
+ let mut closures = Vec::new();
+ for (&closure, upvars) in upvars {
+ let closure_range = source_map.expr_syntax(closure).unwrap().value.text_range();
+ let upvars = upvars
+ .iter()
+ .map(|local| body[local].name.display(&db, Edition::CURRENT))
+ .join(", ");
+ closures.push((closure_range, upvars));
+ }
+ closures.sort_unstable_by_key(|(range, _)| (range.start(), range.end()));
+ let closures = closures
+ .into_iter()
+ .map(|(range, upvars)| format!("{range:?}: {upvars}"))
+ .join("\n");
+ expectation.assert_eq(&closures);
+ });
+ }
+
+ #[test]
+ fn simple() {
+ check(
+ r#"
+struct foo;
+fn foo(param: i32) {
+ let local = "boo";
+ || { param; foo };
+ || local;
+ || { param; local; param; local; };
+ || 0xDEAFBEAF;
+}
+ "#,
+ expect![[r#"
+ 60..77: param
+ 83..91: local
+ 97..131: param, local"#]],
+ );
+ }
+
+ #[test]
+ fn nested() {
+ check(
+ r#"
+fn foo() {
+ let (a, b);
+ || {
+ || a;
+ || b;
+ };
+}
+ "#,
+ expect![[r#"
+ 31..69: a, b
+ 44..48: a
+ 58..62: b"#]],
+ );
+ }
+
+ #[test]
+ fn closure_var() {
+ check(
+ r#"
+fn foo() {
+ let upvar = 1;
+ |closure_param: i32| {
+ let closure_local = closure_param;
+ closure_local + upvar
+ };
+}
+ "#,
+ expect!["34..135: upvar"],
+ );
+ }
+
+ #[test]
+ fn closure_var_nested() {
+ check(
+ r#"
+fn foo() {
+ let a = 1;
+ |b: i32| {
+ || {
+ let c = 123;
+ a + b + c
+ }
+ };
+}
+ "#,
+ expect![[r#"
+ 30..116: a
+ 49..110: a, b"#]],
+ );
+ }
+}
diff --git a/crates/hir-ty/src/variance.rs b/crates/hir-ty/src/variance.rs
index 5b8122a0a5..6f415a5289 100644
--- a/crates/hir-ty/src/variance.rs
+++ b/crates/hir-ty/src/variance.rs
@@ -36,13 +36,11 @@ pub(crate) fn variances_of(db: &dyn HirDatabase, def: GenericDefId) -> Variances
#[salsa::tracked(
returns(ref),
- // cycle_fn = crate::variance::variances_of_cycle_fn,
- // cycle_initial = crate::variance::variances_of_cycle_initial,
- cycle_result = crate::variance::variances_of_cycle_initial,
+ cycle_fn = crate::variance::variances_of_cycle_fn,
+ cycle_initial = crate::variance::variances_of_cycle_initial,
)]
fn variances_of_query(db: &dyn HirDatabase, def: GenericDefId) -> StoredVariancesOf {
tracing::debug!("variances_of(def={:?})", def);
- let interner = DbInterner::new_no_crate(db);
match def {
GenericDefId::FunctionId(_) => (),
GenericDefId::AdtId(adt) => {
@@ -56,43 +54,30 @@ fn variances_of_query(db: &dyn HirDatabase, def: GenericDefId) -> StoredVariance
}
}
}
- _ => return VariancesOf::empty(interner).store(),
+ _ => return VariancesOf::empty(DbInterner::new_no_crate(db)).store(),
}
let generics = generics(db, def);
let count = generics.len();
if count == 0 {
- return VariancesOf::empty(interner).store();
- }
- let mut variances =
- Context { generics, variances: vec![Variance::Bivariant; count], db }.solve();
-
- // FIXME(next-solver): This is *not* the correct behavior. I don't know if it has an actual effect,
- // since bivariance is prohibited in Rust, but rustc definitely does not fallback bivariance.
- // So why do we do this? Because, with the new solver, the effects of bivariance are catastrophic:
- // it leads to not relating types properly, and to very, very hard to debug bugs (speaking from experience).
- // Furthermore, our variance infra is known to not handle cycles properly. Therefore, at least until we fix
- // cycles, and perhaps forever at least for out tests, not allowing bivariance makes sense.
- // Why specifically invariance? I don't have a strong reason, mainly that invariance is a stronger relationship
- // (therefore, less room for mistakes) and that IMO incorrect covariance can be more problematic that incorrect
- // bivariance, at least while we don't handle lifetimes anyway.
- for variance in &mut variances {
- if *variance == Variance::Bivariant {
- *variance = Variance::Invariant;
- }
+ return VariancesOf::empty(DbInterner::new_no_crate(db)).store();
}
+ let variances =
+ Context { generics, variances: vec![Variance::Bivariant; count].into_boxed_slice(), db }
+ .solve();
VariancesOf::new_from_slice(&variances).store()
}
-// pub(crate) fn variances_of_cycle_fn(
-// _db: &dyn HirDatabase,
-// _result: &Option<Arc<[Variance]>>,
-// _count: u32,
-// _def: GenericDefId,
-// ) -> salsa::CycleRecoveryAction<Option<Arc<[Variance]>>> {
-// salsa::CycleRecoveryAction::Iterate
-// }
+pub(crate) fn variances_of_cycle_fn(
+ _db: &dyn HirDatabase,
+ _: &salsa::Cycle<'_>,
+ _last_provisional_value: &StoredVariancesOf,
+ value: StoredVariancesOf,
+ _def: GenericDefId,
+) -> StoredVariancesOf {
+ value
+}
fn glb(v1: Variance, v2: Variance) -> Variance {
// Greatest lower bound of the variance lattice as defined in The Paper:
@@ -123,18 +108,17 @@ pub(crate) fn variances_of_cycle_initial(
let generics = generics(db, def);
let count = generics.len();
- // FIXME(next-solver): Returns `Invariance` and not `Bivariance` here, see the comment in the main query.
- VariancesOf::new_from_iter(interner, std::iter::repeat_n(Variance::Invariant, count)).store()
+ VariancesOf::new_from_iter(interner, std::iter::repeat_n(Variance::Bivariant, count)).store()
}
struct Context<'db> {
db: &'db dyn HirDatabase,
generics: Generics,
- variances: Vec<Variance>,
+ variances: Box<[Variance]>,
}
impl<'db> Context<'db> {
- fn solve(mut self) -> Vec<Variance> {
+ fn solve(mut self) -> Box<[Variance]> {
tracing::debug!("solve(generics={:?})", self.generics);
match self.generics.def() {
GenericDefId::AdtId(adt) => {
@@ -484,8 +468,8 @@ struct Other<'a> {
}
"#,
expect![[r#"
- Hello['a: invariant]
- Other['a: invariant]
+ Hello['a: bivariant]
+ Other['a: bivariant]
"#]],
);
}
@@ -504,7 +488,7 @@ struct Foo<T: Trait> { //~ ERROR [T: o]
}
"#,
expect![[r#"
- Foo[T: invariant]
+ Foo[T: bivariant]
"#]],
);
}
@@ -586,9 +570,9 @@ struct TestBox<U,T:Getter<U>+Setter<U>> { //~ ERROR [U: *, T: +]
get[Self: contravariant, T: covariant]
get[Self: contravariant, T: contravariant]
TestStruct[U: covariant, T: covariant]
- TestEnum[U: invariant, T: covariant]
- TestContraStruct[U: invariant, T: covariant]
- TestBox[U: invariant, T: covariant]
+ TestEnum[U: bivariant, T: covariant]
+ TestContraStruct[U: bivariant, T: covariant]
+ TestBox[U: bivariant, T: covariant]
"#]],
);
}
@@ -708,8 +692,8 @@ enum SomeEnum<'a> { Nothing } //~ ERROR parameter `'a` is never used
trait SomeTrait<'a> { fn foo(&self); } // OK on traits.
"#,
expect![[r#"
- SomeStruct['a: invariant]
- SomeEnum['a: invariant]
+ SomeStruct['a: bivariant]
+ SomeEnum['a: bivariant]
foo[Self: contravariant, 'a: invariant]
"#]],
);
@@ -737,14 +721,14 @@ struct DoubleNothing<T> {
"#,
expect![[r#"
- SomeStruct[A: invariant]
- SomeEnum[A: invariant]
- ListCell[T: invariant]
- SelfTyAlias[T: invariant]
- WithBounds[T: invariant]
- WithWhereBounds[T: invariant]
- WithOutlivesBounds[T: invariant]
- DoubleNothing[T: invariant]
+ SomeStruct[A: bivariant]
+ SomeEnum[A: bivariant]
+ ListCell[T: bivariant]
+ SelfTyAlias[T: bivariant]
+ WithBounds[T: bivariant]
+ WithWhereBounds[T: bivariant]
+ WithOutlivesBounds[T: bivariant]
+ DoubleNothing[T: bivariant]
"#]],
);
}
@@ -855,7 +839,7 @@ struct S3<T>(S<T, T>);
"#,
expect![[r#"
S[T: covariant]
- S2[T: invariant]
+ S2[T: bivariant]
S3[T: covariant]
"#]],
);
@@ -868,7 +852,7 @@ struct S3<T>(S<T, T>);
struct FixedPoint<T, U, V>(&'static FixedPoint<(), T, U>, V);
"#,
expect![[r#"
- FixedPoint[T: invariant, U: invariant, V: invariant]
+ FixedPoint[T: covariant, U: covariant, V: covariant]
"#]],
);
}
diff --git a/crates/hir/src/attrs.rs b/crates/hir/src/attrs.rs
index d1056f31e1..cba1b39e52 100644
--- a/crates/hir/src/attrs.rs
+++ b/crates/hir/src/attrs.rs
@@ -35,6 +35,8 @@ pub enum AttrsOwner {
Field(FieldId),
LifetimeParam(LifetimeParamId),
TypeOrConstParam(TypeOrConstParamId),
+ /// Things that do not have attributes. Used for builtin derives.
+ Dummy,
}
impl AttrsOwner {
@@ -123,7 +125,9 @@ impl AttrsWithOwner {
let owner = match self.owner {
AttrsOwner::AttrDef(it) => Either::Left(it),
AttrsOwner::Field(it) => Either::Right(it),
- AttrsOwner::LifetimeParam(_) | AttrsOwner::TypeOrConstParam(_) => return &[],
+ AttrsOwner::LifetimeParam(_) | AttrsOwner::TypeOrConstParam(_) | AttrsOwner::Dummy => {
+ return &[];
+ }
};
self.attrs.doc_aliases(db, owner)
}
@@ -133,7 +137,9 @@ impl AttrsWithOwner {
let owner = match self.owner {
AttrsOwner::AttrDef(it) => Either::Left(it),
AttrsOwner::Field(it) => Either::Right(it),
- AttrsOwner::LifetimeParam(_) | AttrsOwner::TypeOrConstParam(_) => return None,
+ AttrsOwner::LifetimeParam(_) | AttrsOwner::TypeOrConstParam(_) | AttrsOwner::Dummy => {
+ return None;
+ }
};
self.attrs.cfgs(db, owner)
}
@@ -143,7 +149,9 @@ impl AttrsWithOwner {
match self.owner {
AttrsOwner::AttrDef(it) => AttrFlags::docs(db, it).as_deref(),
AttrsOwner::Field(it) => AttrFlags::field_docs(db, it),
- AttrsOwner::LifetimeParam(_) | AttrsOwner::TypeOrConstParam(_) => None,
+ AttrsOwner::LifetimeParam(_) | AttrsOwner::TypeOrConstParam(_) | AttrsOwner::Dummy => {
+ None
+ }
}
}
}
@@ -156,6 +164,9 @@ pub trait HasAttrs: Sized {
AttrsOwner::Field(it) => AttrsWithOwner::new_field(db, it),
AttrsOwner::LifetimeParam(it) => AttrsWithOwner::new_lifetime_param(db, it),
AttrsOwner::TypeOrConstParam(it) => AttrsWithOwner::new_type_or_const_param(db, it),
+ AttrsOwner::Dummy => {
+ AttrsWithOwner { attrs: AttrFlags::empty(), owner: AttrsOwner::Dummy }
+ }
}
}
@@ -167,7 +178,9 @@ pub trait HasAttrs: Sized {
match self.attr_id(db) {
AttrsOwner::AttrDef(it) => AttrFlags::docs(db, it).as_deref(),
AttrsOwner::Field(it) => AttrFlags::field_docs(db, it),
- AttrsOwner::LifetimeParam(_) | AttrsOwner::TypeOrConstParam(_) => None,
+ AttrsOwner::LifetimeParam(_) | AttrsOwner::TypeOrConstParam(_) | AttrsOwner::Dummy => {
+ None
+ }
}
}
}
@@ -190,12 +203,28 @@ impl_has_attrs![
(Trait, TraitId),
(TypeAlias, TypeAliasId),
(Macro, MacroId),
- (Function, FunctionId),
(Adt, AdtId),
- (Impl, ImplId),
(ExternCrateDecl, ExternCrateId),
];
+impl HasAttrs for Function {
+ fn attr_id(self, _db: &dyn HirDatabase) -> AttrsOwner {
+ match self.id {
+ crate::AnyFunctionId::FunctionId(id) => AttrsOwner::AttrDef(id.into()),
+ crate::AnyFunctionId::BuiltinDeriveImplMethod { .. } => AttrsOwner::Dummy,
+ }
+ }
+}
+
+impl HasAttrs for Impl {
+ fn attr_id(self, _db: &dyn HirDatabase) -> AttrsOwner {
+ match self.id {
+ hir_ty::next_solver::AnyImplId::ImplId(id) => AttrsOwner::AttrDef(id.into()),
+ hir_ty::next_solver::AnyImplId::BuiltinDeriveImplId(..) => AttrsOwner::Dummy,
+ }
+ }
+}
+
macro_rules! impl_has_attrs_enum {
($($variant:ident),* for $enum:ident) => {$(
impl HasAttrs for $variant {
@@ -294,7 +323,9 @@ fn resolve_doc_path_on_(
AttrsOwner::AttrDef(AttrDefId::MacroId(it)) => it.resolver(db),
AttrsOwner::AttrDef(AttrDefId::ExternCrateId(it)) => it.resolver(db),
AttrsOwner::Field(it) => it.parent.resolver(db),
- AttrsOwner::LifetimeParam(_) | AttrsOwner::TypeOrConstParam(_) => return None,
+ AttrsOwner::LifetimeParam(_) | AttrsOwner::TypeOrConstParam(_) | AttrsOwner::Dummy => {
+ return None;
+ }
};
let mut modpath = doc_modpath_from_str(link)?;
diff --git a/crates/hir/src/display.rs b/crates/hir/src/display.rs
index d0d8c4877d..1f9af564c3 100644
--- a/crates/hir/src/display.rs
+++ b/crates/hir/src/display.rs
@@ -2,19 +2,22 @@
use either::Either;
use hir_def::{
- AdtId, GenericDefId,
+ AdtId, BuiltinDeriveImplId, FunctionId, GenericDefId, ImplId, ItemContainerId,
+ builtin_derive::BuiltinDeriveImplMethod,
expr_store::ExpressionStore,
hir::generics::{GenericParams, TypeOrConstParamData, TypeParamProvenance, WherePredicate},
item_tree::FieldsShape,
signatures::{StaticFlags, TraitFlags},
type_ref::{TypeBound, TypeRef, TypeRefId},
};
+use hir_expand::name::Name;
use hir_ty::{
GenericPredicates,
db::HirDatabase,
display::{
HirDisplay, HirDisplayWithExpressionStore, HirFormatter, Result, SizedByDefault,
- hir_display_with_store, write_bounds_like_dyn_trait_with_prefix, write_visibility,
+ hir_display_with_store, write_bounds_like_dyn_trait_with_prefix, write_params_bounds,
+ write_visibility,
},
next_solver::ClauseKind,
};
@@ -22,25 +25,78 @@ use itertools::Itertools;
use rustc_type_ir::inherent::IntoKind;
use crate::{
- Adt, AsAssocItem, AssocItem, AssocItemContainer, Const, ConstParam, Crate, Enum,
+ Adt, AnyFunctionId, AsAssocItem, AssocItem, AssocItemContainer, Const, ConstParam, Crate, Enum,
ExternCrateDecl, Field, Function, GenericParam, HasCrate, HasVisibility, Impl, LifetimeParam,
Macro, Module, SelfParam, Static, Struct, StructKind, Trait, TraitRef, TupleField, Type,
TypeAlias, TypeNs, TypeOrConstParam, TypeParam, Union, Variant,
};
+fn write_builtin_derive_impl_method<'db>(
+ f: &mut HirFormatter<'_, 'db>,
+ impl_: BuiltinDeriveImplId,
+ method: BuiltinDeriveImplMethod,
+) -> Result {
+ let db = f.db;
+ let loc = impl_.loc(db);
+ let (adt_params, _adt_params_store) = db.generic_params_and_store(loc.adt.into());
+
+ if f.show_container_bounds() && !adt_params.is_empty() {
+ f.write_str("impl")?;
+ write_generic_params(loc.adt.into(), f)?;
+ f.write_char(' ')?;
+ let trait_id = loc.trait_.get_id(f.lang_items());
+ if let Some(trait_id) = trait_id {
+ f.start_location_link(trait_id.into());
+ }
+ write!(f, "{}", Name::new_symbol_root(loc.trait_.name()).display(db, f.edition()))?;
+ if trait_id.is_some() {
+ f.end_location_link();
+ }
+ f.write_str(" for ")?;
+ f.start_location_link(loc.adt.into());
+ write!(f, "{}", Adt::from(loc.adt).name(db).display(db, f.edition()))?;
+ f.end_location_link();
+ write_generic_args(loc.adt.into(), f)?;
+ f.write_char('\n')?;
+ }
+
+ let Some(trait_method) = method.trait_method(db, impl_) else {
+ return write!(f, "fn {}(…)", method.name());
+ };
+ let has_written_where = write_function(f, trait_method)?;
+
+ if f.show_container_bounds() && !adt_params.is_empty() {
+ if !has_written_where {
+ f.write_str("\nwhere")?
+ }
+ write!(f, "\n // Bounds from impl:")?;
+
+ let predicates =
+ hir_ty::builtin_derive::predicates(db, impl_).explicit_predicates().skip_binder();
+ write_params_bounds(f, predicates)?;
+ }
+
+ Ok(())
+}
+
impl<'db> HirDisplay<'db> for Function {
fn hir_fmt(&self, f: &mut HirFormatter<'_, 'db>) -> Result {
+ let id = match self.id {
+ AnyFunctionId::FunctionId(id) => id,
+ AnyFunctionId::BuiltinDeriveImplMethod { method, impl_ } => {
+ return write_builtin_derive_impl_method(f, impl_, method);
+ }
+ };
+
let db = f.db;
- let data = db.function_signature(self.id);
- let container = self.as_assoc_item(db).map(|it| it.container(db));
- let mut module = self.module(db);
+ let container = id.loc(db).container;
// Write container (trait or impl)
let container_params = match container {
- Some(AssocItemContainer::Trait(trait_)) => {
- let (params, params_store) = f.db.generic_params_and_store(trait_.id.into());
+ ItemContainerId::TraitId(trait_) => {
+ let (params, params_store) = f.db.generic_params_and_store(trait_.into());
if f.show_container_bounds() && !params.is_empty() {
- write_trait_header(&trait_, f)?;
+ write_trait_header(trait_.into(), f)?;
f.write_char('\n')?;
has_disaplayable_predicates(f.db, &params, &params_store)
.then_some((params, params_store))
@@ -48,10 +104,10 @@ impl<'db> HirDisplay<'db> for Function {
None
}
}
- Some(AssocItemContainer::Impl(impl_)) => {
- let (params, params_store) = f.db.generic_params_and_store(impl_.id.into());
+ ItemContainerId::ImplId(impl_) => {
+ let (params, params_store) = f.db.generic_params_and_store(impl_.into());
if f.show_container_bounds() && !params.is_empty() {
- write_impl_header(&impl_, f)?;
+ write_impl_header(impl_, f)?;
f.write_char('\n')?;
has_disaplayable_predicates(f.db, &params, &params_store)
.then_some((params, params_store))
@@ -59,140 +115,151 @@ impl<'db> HirDisplay<'db> for Function {
None
}
}
- None => None,
+ _ => None,
};
// Write signature of the function
- // Block-local impls are "hoisted" to the nearest (non-block) module.
- if let Some(AssocItemContainer::Impl(_)) = container {
- module = module.nearest_non_block_module(db);
+ let has_written_where = write_function(f, id)?;
+ if let Some((container_params, container_params_store)) = container_params {
+ if !has_written_where {
+ f.write_str("\nwhere")?;
+ }
+ let container_name = match container {
+ ItemContainerId::TraitId(_) => "trait",
+ ItemContainerId::ImplId(_) => "impl",
+ _ => unreachable!(),
+ };
+ write!(f, "\n // Bounds from {container_name}:",)?;
+ write_where_predicates(&container_params, &container_params_store, f)?;
}
- let module_id = module.id;
-
- write_visibility(module_id, self.visibility(db), f)?;
+ Ok(())
+ }
+}
- if data.is_default() {
- f.write_str("default ")?;
- }
- if data.is_const() {
- f.write_str("const ")?;
- }
- if data.is_async() {
- f.write_str("async ")?;
- }
- // FIXME: This will show `unsafe` for functions that are `#[target_feature]` but not unsafe
- // (they are conditionally unsafe to call). We probably should show something else.
- if self.is_unsafe_to_call(db, None, f.edition()) {
- f.write_str("unsafe ")?;
- }
- if let Some(abi) = &data.abi {
- write!(f, "extern \"{}\" ", abi.as_str())?;
- }
- write!(f, "fn {}", data.name.display(f.db, f.edition()))?;
+fn write_function<'db>(f: &mut HirFormatter<'_, 'db>, func_id: FunctionId) -> Result<bool> {
+ let db = f.db;
+ let func = Function::from(func_id);
+ let data = db.function_signature(func_id);
- write_generic_params(GenericDefId::FunctionId(self.id), f)?;
+ let mut module = func.module(db);
+ // Block-local impls are "hoisted" to the nearest (non-block) module.
+ if let ItemContainerId::ImplId(_) = func_id.loc(db).container {
+ module = module.nearest_non_block_module(db);
+ }
+ let module_id = module.id;
- f.write_char('(')?;
+ write_visibility(module_id, func.visibility(db), f)?;
- let mut first = true;
- let mut skip_self = 0;
- if let Some(self_param) = self.self_param(db) {
- self_param.hir_fmt(f)?;
- first = false;
- skip_self = 1;
- }
+ if data.is_default() {
+ f.write_str("default ")?;
+ }
+ if data.is_const() {
+ f.write_str("const ")?;
+ }
+ if data.is_async() {
+ f.write_str("async ")?;
+ }
+ // FIXME: This will show `unsafe` for functions that are `#[target_feature]` but not unsafe
+ // (they are conditionally unsafe to call). We probably should show something else.
+ if func.is_unsafe_to_call(db, None, f.edition()) {
+ f.write_str("unsafe ")?;
+ }
+ if let Some(abi) = &data.abi {
+ write!(f, "extern \"{}\" ", abi.as_str())?;
+ }
+ write!(f, "fn {}", data.name.display(f.db, f.edition()))?;
- // FIXME: Use resolved `param.ty` once we no longer discard lifetimes
- let body = db.body(self.id.into());
- for (type_ref, param) in data.params.iter().zip(self.assoc_fn_params(db)).skip(skip_self) {
- if !first {
- f.write_str(", ")?;
- } else {
- first = false;
- }
+ write_generic_params(GenericDefId::FunctionId(func_id), f)?;
- let pat_id = body.params[param.idx - body.self_param.is_some() as usize];
- let pat_str = body.pretty_print_pat(db, self.id.into(), pat_id, true, f.edition());
- f.write_str(&pat_str)?;
+ f.write_char('(')?;
- f.write_str(": ")?;
- type_ref.hir_fmt(f, &data.store)?;
+ let mut first = true;
+ let mut skip_self = 0;
+ if let Some(self_param) = func.self_param(db) {
+ self_param.hir_fmt(f)?;
+ first = false;
+ skip_self = 1;
+ }
+
+ // FIXME: Use resolved `param.ty` once we no longer discard lifetimes
+ let body = db.body(func_id.into());
+ for (type_ref, param) in data.params.iter().zip(func.assoc_fn_params(db)).skip(skip_self) {
+ if !first {
+ f.write_str(", ")?;
+ } else {
+ first = false;
}
- if data.is_varargs() {
- if !first {
- f.write_str(", ")?;
- }
- f.write_str("...")?;
- }
-
- f.write_char(')')?;
-
- // `FunctionData::ret_type` will be `::core::future::Future<Output = ...>` for async fns.
- // Use ugly pattern match to strip the Future trait.
- // Better way?
- let ret_type = if !data.is_async() {
- data.ret_type
- } else if let Some(ret_type) = data.ret_type {
- match &data.store[ret_type] {
- TypeRef::ImplTrait(bounds) => match &bounds[0] {
- &TypeBound::Path(path, _) => Some(
- *data.store[path]
- .segments()
- .iter()
- .last()
- .unwrap()
- .args_and_bindings
- .unwrap()
- .bindings[0]
- .type_ref
- .as_ref()
- .unwrap(),
- ),
- _ => None,
- },
+ let pat_id = body.params[param.idx - body.self_param.is_some() as usize];
+ let pat_str = body.pretty_print_pat(db, func_id.into(), pat_id, true, f.edition());
+ f.write_str(&pat_str)?;
+
+ f.write_str(": ")?;
+ type_ref.hir_fmt(f, &data.store)?;
+ }
+
+ if data.is_varargs() {
+ if !first {
+ f.write_str(", ")?;
+ }
+ f.write_str("...")?;
+ }
+
+ f.write_char(')')?;
+
+ // `FunctionData::ret_type` will be `::core::future::Future<Output = ...>` for async fns.
+ // Use ugly pattern match to strip the Future trait.
+ // Better way?
+ let ret_type = if !data.is_async() {
+ data.ret_type
+ } else if let Some(ret_type) = data.ret_type {
+ match &data.store[ret_type] {
+ TypeRef::ImplTrait(bounds) => match &bounds[0] {
+ &TypeBound::Path(path, _) => Some(
+ *data.store[path]
+ .segments()
+ .iter()
+ .last()
+ .unwrap()
+ .args_and_bindings
+ .unwrap()
+ .bindings[0]
+ .type_ref
+ .as_ref()
+ .unwrap(),
+ ),
_ => None,
- }
- } else {
- None
- };
-
- if let Some(ret_type) = ret_type {
- match &data.store[ret_type] {
- TypeRef::Tuple(tup) if tup.is_empty() => {}
- _ => {
- f.write_str(" -> ")?;
- ret_type.hir_fmt(f, &data.store)?;
- }
- }
+ },
+ _ => None,
}
+ } else {
+ None
+ };
- // Write where clauses
- let has_written_where = write_where_clause(GenericDefId::FunctionId(self.id), f)?;
- if let Some((container_params, container_params_store)) = container_params {
- if !has_written_where {
- f.write_str("\nwhere")?;
+ if let Some(ret_type) = ret_type {
+ match &data.store[ret_type] {
+ TypeRef::Tuple(tup) if tup.is_empty() => {}
+ _ => {
+ f.write_str(" -> ")?;
+ ret_type.hir_fmt(f, &data.store)?;
}
- let container_name = match container.unwrap() {
- AssocItemContainer::Trait(_) => "trait",
- AssocItemContainer::Impl(_) => "impl",
- };
- write!(f, "\n // Bounds from {container_name}:",)?;
- write_where_predicates(&container_params, &container_params_store, f)?;
}
- Ok(())
}
+
+ // Write where clauses
+ let has_written_where = write_where_clause(GenericDefId::FunctionId(func_id), f)?;
+ Ok(has_written_where)
}
-fn write_impl_header<'db>(impl_: &Impl, f: &mut HirFormatter<'_, 'db>) -> Result {
+fn write_impl_header<'db>(impl_: ImplId, f: &mut HirFormatter<'_, 'db>) -> Result {
let db = f.db;
f.write_str("impl")?;
- let def_id = GenericDefId::ImplId(impl_.id);
+ let def_id = GenericDefId::ImplId(impl_);
write_generic_params(def_id, f)?;
- let impl_data = db.impl_signature(impl_.id);
+ let impl_data = db.impl_signature(impl_);
if let Some(target_trait) = &impl_data.target_trait {
f.write_char(' ')?;
hir_display_with_store(&impl_data.store[target_trait.path], &impl_data.store).hir_fmt(f)?;
@@ -200,14 +267,28 @@ fn write_impl_header<'db>(impl_: &Impl, f: &mut HirFormatter<'_, 'db>) -> Result
}
f.write_char(' ')?;
- impl_.self_ty(db).hir_fmt(f)?;
+ Impl::from(impl_).self_ty(db).hir_fmt(f)?;
Ok(())
}
impl<'db> HirDisplay<'db> for SelfParam {
fn hir_fmt(&self, f: &mut HirFormatter<'_, 'db>) -> Result {
- let data = f.db.function_signature(self.func);
+ let func = match self.func.id {
+ AnyFunctionId::FunctionId(id) => id,
+ AnyFunctionId::BuiltinDeriveImplMethod { method, .. } => match method {
+ BuiltinDeriveImplMethod::clone
+ | BuiltinDeriveImplMethod::fmt
+ | BuiltinDeriveImplMethod::hash
+ | BuiltinDeriveImplMethod::cmp
+ | BuiltinDeriveImplMethod::partial_cmp
+ | BuiltinDeriveImplMethod::eq => return f.write_str("&self"),
+ BuiltinDeriveImplMethod::default => {
+ unreachable!("this trait method does not have a self param")
+ }
+ },
+ };
+ let data = f.db.function_signature(func);
let param = *data.params.first().unwrap();
match &data.store[param] {
TypeRef::Path(p) if p.is_self_type() => f.write_str("self"),
@@ -553,6 +634,18 @@ impl<'db> HirDisplay<'db> for ConstParam {
}
fn write_generic_params<'db>(def: GenericDefId, f: &mut HirFormatter<'_, 'db>) -> Result {
+ write_generic_params_or_args(def, f, true)
+}
+
+fn write_generic_args<'db>(def: GenericDefId, f: &mut HirFormatter<'_, 'db>) -> Result {
+ write_generic_params_or_args(def, f, false)
+}
+
+fn write_generic_params_or_args<'db>(
+ def: GenericDefId,
+ f: &mut HirFormatter<'_, 'db>,
+ include_defaults: bool,
+) -> Result {
let (params, store) = f.db.generic_params_and_store(def);
if params.iter_lt().next().is_none()
&& params.iter_type_or_consts().all(|it| it.1.const_param().is_none())
@@ -587,7 +680,7 @@ fn write_generic_params<'db>(def: GenericDefId, f: &mut HirFormatter<'_, 'db>) -
}
delim(f)?;
write!(f, "{}", name.display(f.db, f.edition()))?;
- if let Some(default) = &ty.default {
+ if include_defaults && let Some(default) = &ty.default {
f.write_str(" = ")?;
default.hir_fmt(f, &store)?;
}
@@ -597,7 +690,7 @@ fn write_generic_params<'db>(def: GenericDefId, f: &mut HirFormatter<'_, 'db>) -
write!(f, "const {}: ", name.display(f.db, f.edition()))?;
c.ty.hir_fmt(f, &store)?;
- if let Some(default) = &c.default {
+ if include_defaults && let Some(default) = &c.default {
f.write_str(" = ")?;
default.hir_fmt(f, &store)?;
}
@@ -746,7 +839,7 @@ impl<'db> HirDisplay<'db> for TraitRef<'db> {
impl<'db> HirDisplay<'db> for Trait {
fn hir_fmt(&self, f: &mut HirFormatter<'_, 'db>) -> Result {
// FIXME(trait-alias) needs special handling to print the equal sign
- write_trait_header(self, f)?;
+ write_trait_header(*self, f)?;
let def_id = GenericDefId::TraitId(self.id);
let has_where_clause = write_where_clause(def_id, f)?;
@@ -783,7 +876,7 @@ impl<'db> HirDisplay<'db> for Trait {
}
}
-fn write_trait_header<'db>(trait_: &Trait, f: &mut HirFormatter<'_, 'db>) -> Result {
+fn write_trait_header<'db>(trait_: Trait, f: &mut HirFormatter<'_, 'db>) -> Result {
write_visibility(trait_.module(f.db).id, trait_.visibility(f.db), f)?;
let data = f.db.trait_signature(trait_.id);
if data.flags.contains(TraitFlags::UNSAFE) {
diff --git a/crates/hir/src/from_id.rs b/crates/hir/src/from_id.rs
index bc025c5ef5..fc20f4b46b 100644
--- a/crates/hir/src/from_id.rs
+++ b/crates/hir/src/from_id.rs
@@ -4,14 +4,15 @@
//! are splitting the hir.
use hir_def::{
- AdtId, AssocItemId, DefWithBodyId, EnumVariantId, FieldId, GenericDefId, GenericParamId,
- ModuleDefId, VariantId,
+ AdtId, AssocItemId, BuiltinDeriveImplId, DefWithBodyId, EnumVariantId, FieldId, GenericDefId,
+ GenericParamId, ModuleDefId, VariantId,
hir::{BindingId, LabelId},
};
+use hir_ty::next_solver::AnyImplId;
use crate::{
- Adt, AssocItem, BuiltinType, DefWithBody, Field, GenericDef, GenericParam, ItemInNs, Label,
- Local, ModuleDef, Variant, VariantDef,
+ Adt, AnyFunctionId, AssocItem, BuiltinType, DefWithBody, Field, GenericDef, GenericParam,
+ ItemInNs, Label, Local, ModuleDef, Variant, VariantDef,
};
macro_rules! from_id {
@@ -39,8 +40,8 @@ from_id![
(hir_def::TraitId, crate::Trait),
(hir_def::StaticId, crate::Static),
(hir_def::ConstId, crate::Const),
- (hir_def::FunctionId, crate::Function),
- (hir_def::ImplId, crate::Impl),
+ (crate::AnyFunctionId, crate::Function),
+ (hir_ty::next_solver::AnyImplId, crate::Impl),
(hir_def::TypeOrConstParamId, crate::TypeOrConstParam),
(hir_def::TypeParamId, crate::TypeParam),
(hir_def::ConstParamId, crate::ConstParam),
@@ -119,11 +120,15 @@ impl From<ModuleDefId> for ModuleDef {
}
}
-impl From<ModuleDef> for ModuleDefId {
- fn from(id: ModuleDef) -> Self {
- match id {
+impl TryFrom<ModuleDef> for ModuleDefId {
+ type Error = ();
+ fn try_from(id: ModuleDef) -> Result<Self, Self::Error> {
+ Ok(match id {
ModuleDef::Module(it) => ModuleDefId::ModuleId(it.into()),
- ModuleDef::Function(it) => ModuleDefId::FunctionId(it.into()),
+ ModuleDef::Function(it) => match it.id {
+ AnyFunctionId::FunctionId(it) => it.into(),
+ AnyFunctionId::BuiltinDeriveImplMethod { .. } => return Err(()),
+ },
ModuleDef::Adt(it) => ModuleDefId::AdtId(it.into()),
ModuleDef::Variant(it) => ModuleDefId::EnumVariantId(it.into()),
ModuleDef::Const(it) => ModuleDefId::ConstId(it.into()),
@@ -132,18 +137,22 @@ impl From<ModuleDef> for ModuleDefId {
ModuleDef::TypeAlias(it) => ModuleDefId::TypeAliasId(it.into()),
ModuleDef::BuiltinType(it) => ModuleDefId::BuiltinType(it.into()),
ModuleDef::Macro(it) => ModuleDefId::MacroId(it.into()),
- }
+ })
}
}
-impl From<DefWithBody> for DefWithBodyId {
- fn from(def: DefWithBody) -> Self {
- match def {
- DefWithBody::Function(it) => DefWithBodyId::FunctionId(it.id),
+impl TryFrom<DefWithBody> for DefWithBodyId {
+ type Error = ();
+ fn try_from(def: DefWithBody) -> Result<Self, ()> {
+ Ok(match def {
+ DefWithBody::Function(it) => match it.id {
+ AnyFunctionId::FunctionId(it) => it.into(),
+ AnyFunctionId::BuiltinDeriveImplMethod { .. } => return Err(()),
+ },
DefWithBody::Static(it) => DefWithBodyId::StaticId(it.id),
DefWithBody::Const(it) => DefWithBodyId::ConstId(it.id),
DefWithBody::Variant(it) => DefWithBodyId::VariantId(it.into()),
- }
+ })
}
}
@@ -168,17 +177,11 @@ impl From<AssocItemId> for AssocItem {
}
}
-impl From<GenericDef> for GenericDefId {
- fn from(def: GenericDef) -> Self {
- match def {
- GenericDef::Function(it) => GenericDefId::FunctionId(it.id),
- GenericDef::Adt(it) => GenericDefId::AdtId(it.into()),
- GenericDef::Trait(it) => GenericDefId::TraitId(it.id),
- GenericDef::TypeAlias(it) => GenericDefId::TypeAliasId(it.id),
- GenericDef::Impl(it) => GenericDefId::ImplId(it.id),
- GenericDef::Const(it) => GenericDefId::ConstId(it.id),
- GenericDef::Static(it) => GenericDefId::StaticId(it.id),
- }
+impl TryFrom<GenericDef> for GenericDefId {
+ type Error = ();
+
+ fn try_from(def: GenericDef) -> Result<Self, Self::Error> {
+ def.id().ok_or(())
}
}
@@ -238,13 +241,17 @@ impl From<FieldId> for Field {
}
}
-impl From<AssocItem> for GenericDefId {
- fn from(item: AssocItem) -> Self {
- match item {
- AssocItem::Function(f) => f.id.into(),
+impl TryFrom<AssocItem> for GenericDefId {
+ type Error = ();
+ fn try_from(item: AssocItem) -> Result<Self, Self::Error> {
+ Ok(match item {
+ AssocItem::Function(f) => match f.id {
+ AnyFunctionId::FunctionId(it) => it.into(),
+ AnyFunctionId::BuiltinDeriveImplMethod { .. } => return Err(()),
+ },
AssocItem::Const(c) => c.id.into(),
AssocItem::TypeAlias(t) => t.id.into(),
- }
+ })
}
}
@@ -270,13 +277,14 @@ impl From<hir_def::item_scope::ItemInNs> for ItemInNs {
}
}
-impl From<ItemInNs> for hir_def::item_scope::ItemInNs {
- fn from(it: ItemInNs) -> Self {
- match it {
- ItemInNs::Types(it) => Self::Types(it.into()),
- ItemInNs::Values(it) => Self::Values(it.into()),
+impl TryFrom<ItemInNs> for hir_def::item_scope::ItemInNs {
+ type Error = ();
+ fn try_from(it: ItemInNs) -> Result<Self, Self::Error> {
+ Ok(match it {
+ ItemInNs::Types(it) => Self::Types(it.try_into()?),
+ ItemInNs::Values(it) => Self::Values(it.try_into()?),
ItemInNs::Macros(it) => Self::Macros(it.into()),
- }
+ })
}
}
@@ -291,3 +299,21 @@ impl From<BuiltinType> for hir_def::builtin_type::BuiltinType {
it.inner
}
}
+
+impl From<hir_def::ImplId> for crate::Impl {
+ fn from(value: hir_def::ImplId) -> Self {
+ crate::Impl { id: AnyImplId::ImplId(value) }
+ }
+}
+
+impl From<BuiltinDeriveImplId> for crate::Impl {
+ fn from(value: BuiltinDeriveImplId) -> Self {
+ crate::Impl { id: AnyImplId::BuiltinDeriveImplId(value) }
+ }
+}
+
+impl From<hir_def::FunctionId> for crate::Function {
+ fn from(value: hir_def::FunctionId) -> Self {
+ crate::Function { id: AnyFunctionId::FunctionId(value) }
+ }
+}
diff --git a/crates/hir/src/has_source.rs b/crates/hir/src/has_source.rs
index 1aa7994001..e032a16989 100644
--- a/crates/hir/src/has_source.rs
+++ b/crates/hir/src/has_source.rs
@@ -7,18 +7,18 @@ use hir_def::{
src::{HasChildSource, HasSource as _},
};
use hir_expand::{EditionedFileId, HirFileId, InFile};
-use hir_ty::db::InternedClosure;
-use syntax::ast;
+use hir_ty::{db::InternedClosure, next_solver::AnyImplId};
+use syntax::{AstNode, ast};
use tt::TextRange;
use crate::{
- Adt, Callee, Const, Enum, ExternCrateDecl, Field, FieldSource, Function, Impl,
+ Adt, AnyFunctionId, Callee, Const, Enum, ExternCrateDecl, Field, FieldSource, Function, Impl,
InlineAsmOperand, Label, LifetimeParam, LocalSource, Macro, Module, Param, SelfParam, Static,
Struct, Trait, TypeAlias, TypeOrConstParam, Union, Variant, VariantDef, db::HirDatabase,
};
-pub trait HasSource {
- type Ast;
+pub trait HasSource: Sized {
+ type Ast: AstNode;
/// Fetches the definition's source node.
/// Using [`crate::SemanticsImpl::source`] is preferred when working with [`crate::Semantics`],
/// as that caches the parsed file in the semantics' cache.
@@ -27,6 +27,20 @@ pub trait HasSource {
/// But we made this method `Option` to support rlib in the future
/// by <https://github.com/rust-lang/rust-analyzer/issues/6913>
fn source(self, db: &dyn HirDatabase) -> Option<InFile<Self::Ast>>;
+
+ /// Fetches the source node, along with its full range.
+ ///
+ /// The reason for the separate existence of this method is that some things, notably builtin derive impls,
+ /// do not really have a source node, at least not of the correct type. But we still can trace them
+ /// to source code (the derive producing them). So this method will return the range if it is supported,
+ /// and if the node is supported too it will return it as well.
+ fn source_with_range(
+ self,
+ db: &dyn HirDatabase,
+ ) -> Option<InFile<(TextRange, Option<Self::Ast>)>> {
+ let source = self.source(db)?;
+ Some(source.map(|node| (node.syntax().text_range(), Some(node))))
+ }
}
/// NB: Module is !HasSource, because it has two source nodes at the same time:
@@ -146,7 +160,30 @@ impl HasSource for Variant {
impl HasSource for Function {
type Ast = ast::Fn;
fn source(self, db: &dyn HirDatabase) -> Option<InFile<Self::Ast>> {
- Some(self.id.lookup(db).source(db))
+ match self.id {
+ AnyFunctionId::FunctionId(id) => Some(id.loc(db).source(db)),
+ // When calling `source()`, we use the trait method source, but when calling `source_with_range()`,
+ // we return `None` as the syntax node source. This is relying on the assumption that if you are calling
+ // `source_with_range()` (e.g. in navigation) you're prepared to deal with no source node, while if
+ // you call `source()` maybe you don't - therefore we fall back to the trait method, to not lose features.
+ AnyFunctionId::BuiltinDeriveImplMethod { method, impl_ } => method
+ .trait_method(db, impl_)
+ .and_then(|trait_method| Function::from(trait_method).source(db)),
+ }
+ }
+
+ fn source_with_range(
+ self,
+ db: &dyn HirDatabase,
+ ) -> Option<InFile<(TextRange, Option<Self::Ast>)>> {
+ match self.id {
+ AnyFunctionId::FunctionId(id) => Some(
+ id.loc(db).source(db).map(|source| (source.syntax().text_range(), Some(source))),
+ ),
+ AnyFunctionId::BuiltinDeriveImplMethod { impl_, .. } => {
+ Some(impl_.loc(db).source(db).map(|range| (range, None)))
+ }
+ }
}
}
impl HasSource for Const {
@@ -190,7 +227,24 @@ impl HasSource for Macro {
impl HasSource for Impl {
type Ast = ast::Impl;
fn source(self, db: &dyn HirDatabase) -> Option<InFile<Self::Ast>> {
- Some(self.id.lookup(db).source(db))
+ match self.id {
+ AnyImplId::ImplId(id) => Some(id.loc(db).source(db)),
+ AnyImplId::BuiltinDeriveImplId(_) => None,
+ }
+ }
+
+ fn source_with_range(
+ self,
+ db: &dyn HirDatabase,
+ ) -> Option<InFile<(TextRange, Option<Self::Ast>)>> {
+ match self.id {
+ AnyImplId::ImplId(id) => Some(
+ id.loc(db).source(db).map(|source| (source.syntax().text_range(), Some(source))),
+ ),
+ AnyImplId::BuiltinDeriveImplId(impl_) => {
+ Some(impl_.loc(db).source(db).map(|range| (range, None)))
+ }
+ }
}
}
@@ -224,7 +278,7 @@ impl HasSource for Param<'_> {
fn source(self, db: &dyn HirDatabase) -> Option<InFile<Self::Ast>> {
match self.func {
Callee::Def(CallableDefId::FunctionId(func)) => {
- let InFile { file_id, value } = Function { id: func }.source(db)?;
+ let InFile { file_id, value } = Function::from(func).source(db)?;
let params = value.param_list()?;
if let Some(self_param) = params.self_param() {
if let Some(idx) = self.idx.checked_sub(1) {
@@ -261,7 +315,7 @@ impl HasSource for SelfParam {
type Ast = ast::SelfParam;
fn source(self, db: &dyn HirDatabase) -> Option<InFile<Self::Ast>> {
- let InFile { file_id, value } = Function::from(self.func).source(db)?;
+ let InFile { file_id, value } = self.func.source(db)?;
value
.param_list()
.and_then(|params| params.self_param())
@@ -276,7 +330,7 @@ impl HasSource for Label {
let (_body, source_map) = db.body_with_source_map(self.parent);
let src = source_map.label_syntax(self.label_id);
let root = src.file_syntax(db);
- Some(src.map(|ast| ast.to_node(&root)))
+ src.map(|ast| ast.to_node(&root).left()).transpose()
}
}
diff --git a/crates/hir/src/lib.rs b/crates/hir/src/lib.rs
index 9fc29de4a1..78be5a7e8f 100644
--- a/crates/hir/src/lib.rs
+++ b/crates/hir/src/lib.rs
@@ -48,12 +48,13 @@ use arrayvec::ArrayVec;
use base_db::{CrateDisplayName, CrateOrigin, LangCrateOrigin};
use either::Either;
use hir_def::{
- AdtId, AssocItemId, AssocItemLoc, CallableDefId, ConstId, ConstParamId, DefWithBodyId, EnumId,
- EnumVariantId, ExternBlockId, ExternCrateId, FunctionId, GenericDefId, GenericParamId,
+ AdtId, AssocItemId, AssocItemLoc, BuiltinDeriveImplId, CallableDefId, ConstId, ConstParamId,
+ DefWithBodyId, EnumId, EnumVariantId, ExternBlockId, ExternCrateId, FunctionId, GenericDefId,
HasModule, ImplId, ItemContainerId, LifetimeParamId, LocalFieldId, Lookup, MacroExpander,
MacroId, StaticId, StructId, SyntheticSyntax, TupleId, TypeAliasId, TypeOrConstParamId,
TypeParamId, UnionId,
attrs::AttrFlags,
+ builtin_derive::BuiltinDeriveImplMethod,
expr_store::{ExpressionStoreDiagnostics, ExpressionStoreSourceMap},
hir::{
BindingAnnotation, BindingId, Expr, ExprId, ExprOrPatId, LabelId, Pat,
@@ -73,7 +74,8 @@ use hir_def::{
visibility::visibility_from_ast,
};
use hir_expand::{
- AstId, MacroCallKind, RenderedExpandError, ValueResult, proc_macro::ProcMacroKind,
+ AstId, MacroCallKind, RenderedExpandError, ValueResult, builtin::BuiltinDeriveExpander,
+ proc_macro::ProcMacroKind,
};
use hir_ty::{
GenericPredicates, InferenceResult, ParamEnvAndCrate, TyDefId, TyLoweringDiagnostic,
@@ -88,8 +90,9 @@ use hir_ty::{
},
mir::{MutBorrowKind, interpret_mir},
next_solver::{
- AliasTy, ClauseKind, ConstKind, DbInterner, ErrorGuaranteed, GenericArg, GenericArgs,
- ParamEnv, PolyFnSig, Region, SolverDefId, Ty, TyKind, TypingMode,
+ AliasTy, AnyImplId, ClauseKind, ConstKind, DbInterner, EarlyBinder, EarlyParamRegion,
+ ErrorGuaranteed, GenericArg, GenericArgs, ParamConst, ParamEnv, PolyFnSig, Region,
+ RegionKind, SolverDefId, Ty, TyKind, TypingMode,
infer::{DbInternerInferExt, InferCtxt},
},
traits::{self, is_inherent_impl_coherent, structurally_normalize_ty},
@@ -97,7 +100,8 @@ use hir_ty::{
use itertools::Itertools;
use rustc_hash::FxHashSet;
use rustc_type_ir::{
- AliasTyKind, TypeSuperVisitable, TypeVisitable, TypeVisitor, fast_reject,
+ AliasTyKind, TypeFoldable, TypeFolder, TypeSuperFoldable, TypeSuperVisitable, TypeVisitable,
+ TypeVisitor, fast_reject,
inherent::{AdtDef, GenericArgs as _, IntoKind, SliceLike, Term as _, Ty as _},
};
use smallvec::SmallVec;
@@ -105,7 +109,7 @@ use span::{AstIdNode, Edition, FileId};
use stdx::{format_to, impl_from, never, variance::PhantomCovariantLifetime};
use syntax::{
AstNode, AstPtr, SmolStr, SyntaxNode, SyntaxNodePtr, TextRange, ToSmolStr,
- ast::{self, HasName, HasVisibility as _},
+ ast::{self, HasName as _, HasVisibility as _},
format_smolstr,
};
use triomphe::{Arc, ThinArc};
@@ -146,7 +150,7 @@ pub use {
visibility::Visibility,
// FIXME: This is here since some queries take it as input that are used
// outside of hir.
- {ModuleDefId, TraitId},
+ {GenericParamId, ModuleDefId, TraitId},
},
hir_expand::{
EditionedFileId, ExpandResult, HirFileId, MacroCallId, MacroKind,
@@ -440,7 +444,10 @@ impl ModuleDef {
Adt::Union(it) => it.id.into(),
},
ModuleDef::Trait(it) => it.id.into(),
- ModuleDef::Function(it) => it.id.into(),
+ ModuleDef::Function(it) => match it.id {
+ AnyFunctionId::FunctionId(it) => it.into(),
+ AnyFunctionId::BuiltinDeriveImplMethod { .. } => return Vec::new(),
+ },
ModuleDef::TypeAlias(it) => it.id.into(),
ModuleDef::Module(it) => it.id.into(),
ModuleDef::Const(it) => it.id.into(),
@@ -504,7 +511,7 @@ impl ModuleDef {
pub fn attrs(&self, db: &dyn HirDatabase) -> Option<AttrsWithOwner> {
Some(match self {
ModuleDef::Module(it) => it.attrs(db),
- ModuleDef::Function(it) => it.attrs(db),
+ ModuleDef::Function(it) => HasAttrs::attrs(*it, db),
ModuleDef::Adt(it) => it.attrs(db),
ModuleDef::Variant(it) => it.attrs(db),
ModuleDef::Const(it) => it.attrs(db),
@@ -772,8 +779,11 @@ impl Module {
for impl_def in self.impl_defs(db) {
GenericDef::Impl(impl_def).diagnostics(db, acc);
- let loc = impl_def.id.lookup(db);
- let (impl_signature, source_map) = db.impl_signature_with_source_map(impl_def.id);
+ let AnyImplId::ImplId(impl_id) = impl_def.id else {
+ continue;
+ };
+ let loc = impl_id.lookup(db);
+ let (impl_signature, source_map) = db.impl_signature_with_source_map(impl_id);
expr_store_diagnostics(db, acc, &source_map);
let file_id = loc.id.file_id;
@@ -789,26 +799,25 @@ impl Module {
let ast_id_map = db.ast_id_map(file_id);
- for diag in impl_def.id.impl_items_with_diagnostics(db).1.iter() {
+ for diag in impl_id.impl_items_with_diagnostics(db).1.iter() {
emit_def_diagnostic(db, acc, diag, edition, loc.container.krate(db));
}
- if impl_signature.target_trait.is_none()
- && !is_inherent_impl_coherent(db, def_map, impl_def.id)
- {
+ let trait_impl = impl_signature.target_trait.is_some();
+ if !trait_impl && !is_inherent_impl_coherent(db, def_map, impl_id) {
acc.push(IncoherentImpl { impl_: ast_id_map.get(loc.id.value), file_id }.into())
}
- if !impl_def.check_orphan_rules(db) {
+ if trait_impl && !impl_def.check_orphan_rules(db) {
acc.push(TraitImplOrphan { impl_: ast_id_map.get(loc.id.value), file_id }.into())
}
- let trait_ = impl_def.trait_(db);
+ let trait_ = trait_impl.then(|| impl_def.trait_(db)).flatten();
let mut trait_is_unsafe = trait_.is_some_and(|t| t.is_unsafe(db));
let impl_is_negative = impl_def.is_negative(db);
let impl_is_unsafe = impl_def.is_unsafe(db);
- let trait_is_unresolved = trait_.is_none() && impl_signature.target_trait.is_some();
+ let trait_is_unresolved = trait_.is_none() && trait_impl;
if trait_is_unresolved {
// Ignore trait safety errors when the trait is unresolved, as otherwise we'll treat it as safe,
// which may not be correct.
@@ -822,7 +831,7 @@ impl Module {
if drop_trait != trait_.into() {
return None;
}
- let parent = impl_def.id.into();
+ let parent = impl_id.into();
let (lifetimes_attrs, type_and_consts_attrs) =
AttrFlags::query_generic_params(db, parent);
let res = lifetimes_attrs.values().any(|it| it.contains(AttrFlags::MAY_DANGLE))
@@ -851,7 +860,7 @@ impl Module {
AssocItemId::ConstId(id) => !db.const_signature(id).has_body(),
AssocItemId::TypeAliasId(it) => db.type_alias_signature(it).ty.is_none(),
});
- impl_assoc_items_scratch.extend(impl_def.id.impl_items(db).items.iter().cloned());
+ impl_assoc_items_scratch.extend(impl_id.impl_items(db).items.iter().cloned());
let redundant = impl_assoc_items_scratch
.iter()
@@ -883,11 +892,11 @@ impl Module {
.collect();
if !missing.is_empty() {
- let self_ty = db.impl_self_ty(impl_def.id).instantiate_identity();
+ let self_ty = db.impl_self_ty(impl_id).instantiate_identity();
let self_ty = structurally_normalize_ty(
&infcx,
self_ty,
- db.trait_environment(impl_def.id.into()),
+ db.trait_environment(impl_id.into()),
);
let self_ty_is_guaranteed_unsized = matches!(
self_ty.kind(),
@@ -896,7 +905,13 @@ impl Module {
if self_ty_is_guaranteed_unsized {
missing.retain(|(_, assoc_item)| {
let assoc_item = match *assoc_item {
- AssocItem::Function(it) => it.id.into(),
+ AssocItem::Function(it) => match it.id {
+ AnyFunctionId::FunctionId(id) => id.into(),
+ AnyFunctionId::BuiltinDeriveImplMethod { .. } => {
+ never!("should not have an `AnyFunctionId::BuiltinDeriveImplMethod` here");
+ return false;
+ },
+ },
AssocItem::Const(it) => it.id.into(),
AssocItem::TypeAlias(it) => it.id.into(),
};
@@ -905,6 +920,48 @@ impl Module {
}
}
+ // HACK: When specialization is enabled in the current crate, and there exists
+ // *any* blanket impl that provides a default implementation for the missing item,
+ // suppress the missing associated item diagnostic.
+ // This can lead to false negatives when the impl in question does not actually
+ // specialize that blanket impl, but determining the exact specialization
+ // relationship here would be significantly more expensive.
+ if !missing.is_empty() {
+ let krate = self.krate(db).id;
+ let def_map = crate_def_map(db, krate);
+ if def_map.is_unstable_feature_enabled(&sym::specialization)
+ || def_map.is_unstable_feature_enabled(&sym::min_specialization)
+ {
+ missing.retain(|(assoc_name, assoc_item)| {
+ let AssocItem::Function(_) = assoc_item else {
+ return true;
+ };
+
+ for &impl_ in TraitImpls::for_crate(db, krate).blanket_impls(trait_.id)
+ {
+ if impl_ == impl_id {
+ continue;
+ }
+
+ for (name, item) in &impl_.impl_items(db).items {
+ let AssocItemId::FunctionId(fn_) = item else {
+ continue;
+ };
+ if name != assoc_name {
+ continue;
+ }
+
+ if db.function_signature(*fn_).is_default() {
+ return false;
+ }
+ }
+ }
+
+ true
+ });
+ }
+ }
+
if !missing.is_empty() {
acc.push(
TraitImplMissingAssocItems {
@@ -918,20 +975,15 @@ impl Module {
impl_assoc_items_scratch.clear();
}
+ push_ty_diagnostics(db, acc, db.impl_self_ty_with_diagnostics(impl_id).1, &source_map);
push_ty_diagnostics(
db,
acc,
- db.impl_self_ty_with_diagnostics(impl_def.id).1,
- &source_map,
- );
- push_ty_diagnostics(
- db,
- acc,
- db.impl_trait_with_diagnostics(impl_def.id).and_then(|it| it.1),
+ db.impl_trait_with_diagnostics(impl_id).and_then(|it| it.1),
&source_map,
);
- for &(_, item) in impl_def.id.impl_items(db).items.iter() {
+ for &(_, item) in impl_id.impl_items(db).items.iter() {
AssocItem::from(item).diagnostics(db, acc, style_lints);
}
}
@@ -955,7 +1007,8 @@ impl Module {
pub fn impl_defs(self, db: &dyn HirDatabase) -> Vec<Impl> {
let def_map = self.id.def_map(db);
- def_map[self.id].scope.impls().map(Impl::from).collect()
+ let scope = &def_map[self.id].scope;
+ scope.impls().map(Impl::from).chain(scope.builtin_derive_impls().map(Impl::from)).collect()
}
/// Finds a path that can be used to refer to the given item from within
@@ -968,7 +1021,7 @@ impl Module {
) -> Option<ModPath> {
hir_def::find_path::find_path(
db,
- item.into().into(),
+ item.into().try_into().ok()?,
self.into(),
PrefixKind::Plain,
false,
@@ -985,7 +1038,14 @@ impl Module {
prefix_kind: PrefixKind,
cfg: FindPathConfig,
) -> Option<ModPath> {
- hir_def::find_path::find_path(db, item.into().into(), self.into(), prefix_kind, true, cfg)
+ hir_def::find_path::find_path(
+ db,
+ item.into().try_into().ok()?,
+ self.into(),
+ prefix_kind,
+ true,
+ cfg,
+ )
}
#[inline]
@@ -1863,9 +1923,9 @@ impl VariantDef {
pub fn name(&self, db: &dyn HirDatabase) -> Name {
match self {
- VariantDef::Struct(s) => s.name(db),
- VariantDef::Union(u) => u.name(db),
- VariantDef::Variant(e) => e.name(db),
+ VariantDef::Struct(s) => (*s).name(db),
+ VariantDef::Union(u) => (*u).name(db),
+ VariantDef::Variant(e) => (*e).name(db),
}
}
}
@@ -1909,24 +1969,33 @@ impl DefWithBody {
}
}
- fn id(&self) -> DefWithBodyId {
- match self {
- DefWithBody::Function(it) => it.id.into(),
+ fn id(&self) -> Option<DefWithBodyId> {
+ Some(match self {
+ DefWithBody::Function(it) => match it.id {
+ AnyFunctionId::FunctionId(id) => id.into(),
+ AnyFunctionId::BuiltinDeriveImplMethod { .. } => return None,
+ },
DefWithBody::Static(it) => it.id.into(),
DefWithBody::Const(it) => it.id.into(),
DefWithBody::Variant(it) => it.into(),
- }
+ })
}
/// A textual representation of the HIR of this def's body for debugging purposes.
pub fn debug_hir(self, db: &dyn HirDatabase) -> String {
- let body = db.body(self.id());
- body.pretty_print(db, self.id(), Edition::CURRENT)
+ let Some(id) = self.id() else {
+ return String::new();
+ };
+ let body = db.body(id);
+ body.pretty_print(db, id, Edition::CURRENT)
}
/// A textual representation of the MIR of this def's body for debugging purposes.
pub fn debug_mir(self, db: &dyn HirDatabase) -> String {
- let body = db.mir_body(self.id());
+ let Some(id) = self.id() else {
+ return String::new();
+ };
+ let body = db.mir_body(id);
match body {
Ok(body) => body.pretty_print(db, self.module(db).krate(db).to_display_target(db)),
Err(e) => format!("error:\n{e:?}"),
@@ -1939,11 +2008,17 @@ impl DefWithBody {
acc: &mut Vec<AnyDiagnostic<'db>>,
style_lints: bool,
) {
+ let Ok(id) = self.try_into() else {
+ return;
+ };
let krate = self.module(db).id.krate(db);
- let (body, source_map) = db.body_with_source_map(self.into());
+ let (body, source_map) = db.body_with_source_map(id);
let sig_source_map = match self {
- DefWithBody::Function(id) => db.function_signature_with_source_map(id.into()).1,
+ DefWithBody::Function(id) => match id.id {
+ AnyFunctionId::FunctionId(id) => db.function_signature_with_source_map(id).1,
+ AnyFunctionId::BuiltinDeriveImplMethod { .. } => return,
+ },
DefWithBody::Static(id) => db.static_signature_with_source_map(id.into()).1,
DefWithBody::Const(id) => db.const_signature_with_source_map(id.into()).1,
DefWithBody::Variant(variant) => {
@@ -1958,11 +2033,11 @@ impl DefWithBody {
expr_store_diagnostics(db, acc, &source_map);
- let infer = InferenceResult::for_body(db, self.into());
+ let infer = InferenceResult::for_body(db, id);
for d in infer.diagnostics() {
acc.extend(AnyDiagnostic::inference_diagnostic(
db,
- self.into(),
+ id,
d,
&source_map,
&sig_source_map,
@@ -1989,14 +2064,14 @@ impl DefWithBody {
acc.push(
TypeMismatch {
expr_or_pat,
- expected: Type::new(db, DefWithBodyId::from(self), mismatch.expected.as_ref()),
- actual: Type::new(db, DefWithBodyId::from(self), mismatch.actual.as_ref()),
+ expected: Type::new(db, id, mismatch.expected.as_ref()),
+ actual: Type::new(db, id, mismatch.actual.as_ref()),
}
.into(),
);
}
- let missing_unsafe = hir_ty::diagnostics::missing_unsafe(db, self.into());
+ let missing_unsafe = hir_ty::diagnostics::missing_unsafe(db, id);
for (node, reason) in missing_unsafe.unsafe_exprs {
match source_map.expr_or_pat_syntax(node) {
Ok(node) => acc.push(
@@ -2031,7 +2106,7 @@ impl DefWithBody {
}
}
- if let Ok(borrowck_results) = db.borrowck(self.into()) {
+ if let Ok(borrowck_results) = db.borrowck(id) {
for borrowck_result in borrowck_results.iter() {
let mir_body = &borrowck_result.mir_body;
for moof in &borrowck_result.moved_out_of_ref {
@@ -2088,7 +2163,7 @@ impl DefWithBody {
{
need_mut = &mir::MutabilityReason::Not;
}
- let local = Local { parent: self.into(), binding_id };
+ let local = Local { parent: id, binding_id };
let is_mut = body[binding_id].mode == BindingAnnotation::Mutable;
match (need_mut, is_mut) {
@@ -2144,17 +2219,11 @@ impl DefWithBody {
}
}
- for diagnostic in BodyValidationDiagnostic::collect(db, self.into(), style_lints) {
+ for diagnostic in BodyValidationDiagnostic::collect(db, id, style_lints) {
acc.extend(AnyDiagnostic::body_validation_diagnostic(db, diagnostic, &source_map));
}
- let def: ModuleDef = match self {
- DefWithBody::Function(it) => it.into(),
- DefWithBody::Static(it) => it.into(),
- DefWithBody::Const(it) => it.into(),
- DefWithBody::Variant(it) => it.into(),
- };
- for diag in hir_ty::diagnostics::incorrect_case(db, def.into()) {
+ for diag in hir_ty::diagnostics::incorrect_case(db, id.into()) {
acc.push(diag.into())
}
}
@@ -2192,45 +2261,181 @@ fn expr_store_diagnostics<'db>(
.macro_calls()
.for_each(|(_ast_id, call_id)| macro_call_diagnostics(db, call_id, acc));
}
+
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
+enum AnyFunctionId {
+ FunctionId(FunctionId),
+ BuiltinDeriveImplMethod { method: BuiltinDeriveImplMethod, impl_: BuiltinDeriveImplId },
+}
+
+#[derive(Clone, Copy, PartialEq, Eq, Hash)]
pub struct Function {
- pub(crate) id: FunctionId,
+ pub(crate) id: AnyFunctionId,
+}
+
+impl fmt::Debug for Function {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ fmt::Debug::fmt(&self.id, f)
+ }
}
impl Function {
pub fn module(self, db: &dyn HirDatabase) -> Module {
- self.id.module(db).into()
+ match self.id {
+ AnyFunctionId::FunctionId(id) => id.module(db).into(),
+ AnyFunctionId::BuiltinDeriveImplMethod { impl_, .. } => impl_.module(db).into(),
+ }
}
pub fn name(self, db: &dyn HirDatabase) -> Name {
- db.function_signature(self.id).name.clone()
+ match self.id {
+ AnyFunctionId::FunctionId(id) => db.function_signature(id).name.clone(),
+ AnyFunctionId::BuiltinDeriveImplMethod { method, .. } => {
+ Name::new_symbol_root(method.name())
+ }
+ }
}
pub fn ty(self, db: &dyn HirDatabase) -> Type<'_> {
- Type::from_value_def(db, self.id)
+ match self.id {
+ AnyFunctionId::FunctionId(id) => Type::from_value_def(db, id),
+ AnyFunctionId::BuiltinDeriveImplMethod { method, impl_ } => {
+ // Get the type for the trait function, as we can't get the type for the impl function
+ // because it has not `CallableDefId`.
+ let krate = impl_.module(db).krate(db);
+ let interner = DbInterner::new_with(db, krate);
+ let param_env = hir_ty::builtin_derive::param_env(interner, impl_);
+ let env = ParamEnvAndCrate { param_env, krate };
+ let Some(trait_method) = method.trait_method(db, impl_) else {
+ return Type { env, ty: Ty::new_error(interner, ErrorGuaranteed) };
+ };
+ Function::from(trait_method).ty(db)
+ }
+ }
}
pub fn fn_ptr_type(self, db: &dyn HirDatabase) -> Type<'_> {
- let resolver = self.id.resolver(db);
- let interner = DbInterner::new_no_crate(db);
- // FIXME: This shouldn't be `instantiate_identity()`, we shouldn't leak `TyKind::Param`s.
- let callable_sig = db.callable_item_signature(self.id.into()).instantiate_identity();
- let ty = Ty::new_fn_ptr(interner, callable_sig);
- Type::new_with_resolver_inner(db, &resolver, ty)
+ match self.id {
+ AnyFunctionId::FunctionId(id) => {
+ let resolver = id.resolver(db);
+ let interner = DbInterner::new_no_crate(db);
+ // FIXME: This shouldn't be `instantiate_identity()`, we shouldn't leak `TyKind::Param`s.
+ let callable_sig = db.callable_item_signature(id.into()).instantiate_identity();
+ let ty = Ty::new_fn_ptr(interner, callable_sig);
+ Type::new_with_resolver_inner(db, &resolver, ty)
+ }
+ AnyFunctionId::BuiltinDeriveImplMethod { method, impl_ } => {
+ struct ParamsShifter<'db> {
+ interner: DbInterner<'db>,
+ shift_by: i32,
+ }
+
+ impl<'db> TypeFolder<DbInterner<'db>> for ParamsShifter<'db> {
+ fn cx(&self) -> DbInterner<'db> {
+ self.interner
+ }
+
+ fn fold_ty(&mut self, ty: Ty<'db>) -> Ty<'db> {
+ if let TyKind::Param(param) = ty.kind() {
+ Ty::new_param(
+ self.interner,
+ param.id,
+ param.index.checked_add_signed(self.shift_by).unwrap(),
+ )
+ } else {
+ ty.super_fold_with(self)
+ }
+ }
+
+ fn fold_const(
+ &mut self,
+ ct: hir_ty::next_solver::Const<'db>,
+ ) -> hir_ty::next_solver::Const<'db> {
+ if let ConstKind::Param(param) = ct.kind() {
+ hir_ty::next_solver::Const::new_param(
+ self.interner,
+ ParamConst {
+ id: param.id,
+ index: param.index.checked_add_signed(self.shift_by).unwrap(),
+ },
+ )
+ } else {
+ ct.super_fold_with(self)
+ }
+ }
+
+ fn fold_region(&mut self, r: Region<'db>) -> Region<'db> {
+ if let RegionKind::ReEarlyParam(param) = r.kind() {
+ Region::new_early_param(
+ self.interner,
+ EarlyParamRegion {
+ id: param.id,
+ index: param.index.checked_add_signed(self.shift_by).unwrap(),
+ },
+ )
+ } else {
+ r
+ }
+ }
+ }
+
+ // Get the type for the trait function, as we can't get the type for the impl function
+ // because it has not `CallableDefId`.
+ let krate = impl_.module(db).krate(db);
+ let interner = DbInterner::new_with(db, krate);
+ let param_env = hir_ty::builtin_derive::param_env(interner, impl_);
+ let env = ParamEnvAndCrate { param_env, krate };
+ let Some(trait_method) = method.trait_method(db, impl_) else {
+ return Type { env, ty: Ty::new_error(interner, ErrorGuaranteed) };
+ };
+ // The procedure works as follows: the method may have additional generic parameters (e.g. `Hash::hash()`),
+ // and we want them to be params of the impl method as well. So we start with the trait method identity
+ // args and extract from them the trait method own args. In parallel, we retrieve the impl trait ref.
+ // Now we can put our args as [...impl_trait_ref.args, ...trait_method_own_args], but we have one problem:
+ // the args in `trait_method_own_args` use indices appropriate for the trait method, which are not necessarily
+ // good for the impl method. So we shift them by `impl_generics_len - trait_generics_len`, which is essentially
+ // `impl_generics_len - impl_trait_ref.args.len()`.
+ let trait_method_fn_ptr = Ty::new_fn_ptr(
+ interner,
+ db.callable_item_signature(trait_method.into()).instantiate_identity(),
+ );
+ let impl_trait_ref =
+ hir_ty::builtin_derive::impl_trait(interner, impl_).instantiate_identity();
+ let trait_method_args =
+ GenericArgs::identity_for_item(interner, trait_method.into());
+ let trait_method_own_args = GenericArgs::new_from_iter(
+ interner,
+ trait_method_args.iter().skip(impl_trait_ref.args.len()),
+ );
+ let impl_params_count = hir_ty::builtin_derive::generic_params_count(db, impl_);
+ let shift_args_by = impl_params_count as i32 - impl_trait_ref.args.len() as i32;
+ let shifted_trait_method_own_args = trait_method_own_args
+ .fold_with(&mut ParamsShifter { interner, shift_by: shift_args_by });
+ let impl_method_args = GenericArgs::new_from_iter(
+ interner,
+ impl_trait_ref.args.iter().chain(shifted_trait_method_own_args),
+ );
+ let impl_method_fn_ptr =
+ EarlyBinder::bind(trait_method_fn_ptr).instantiate(interner, impl_method_args);
+ Type { env, ty: impl_method_fn_ptr }
+ }
+ }
+ }
+
+ fn fn_sig<'db>(self, db: &'db dyn HirDatabase) -> (ParamEnvAndCrate<'db>, PolyFnSig<'db>) {
+ let fn_ptr = self.fn_ptr_type(db);
+ let TyKind::FnPtr(sig_tys, hdr) = fn_ptr.ty.kind() else {
+ unreachable!();
+ };
+ (fn_ptr.env, sig_tys.with(hdr))
}
// FIXME: Find a better API to express all combinations here, perhaps we should have `PreInstantiationType`?
/// Get this function's return type
pub fn ret_type(self, db: &dyn HirDatabase) -> Type<'_> {
- let resolver = self.id.resolver(db);
- // FIXME: This shouldn't be `instantiate_identity()`, we shouldn't leak `TyKind::Param`s.
- let ty = db
- .callable_item_signature(self.id.into())
- .instantiate_identity()
- .skip_binder()
- .output();
- Type::new_with_resolver_inner(db, &resolver, ty)
+ let (env, sig) = self.fn_sig(db);
+ Type { env, ty: sig.skip_binder().output() }
}
// FIXME: Find better API to also handle const generics
@@ -2239,30 +2444,41 @@ impl Function {
db: &'db dyn HirDatabase,
generics: impl Iterator<Item = Type<'db>>,
) -> Type<'db> {
- let resolver = self.id.resolver(db);
+ let ret_type = self.ret_type(db);
let interner = DbInterner::new_no_crate(db);
- let args = generic_args_from_tys(interner, self.id.into(), generics.map(|ty| ty.ty));
+ let args = self.adapt_generic_args(interner, generics);
+ ret_type.derived(EarlyBinder::bind(ret_type.ty).instantiate(interner, args))
+ }
- let interner = DbInterner::new_no_crate(db);
- let ty = db
- .callable_item_signature(self.id.into())
- .instantiate(interner, args)
- .skip_binder()
- .output();
- Type::new_with_resolver_inner(db, &resolver, ty)
+ fn adapt_generic_args<'db>(
+ self,
+ interner: DbInterner<'db>,
+ generics: impl Iterator<Item = Type<'db>>,
+ ) -> GenericArgs<'db> {
+ let generics = generics.map(|ty| ty.ty);
+ match self.id {
+ AnyFunctionId::FunctionId(id) => generic_args_from_tys(interner, id.into(), generics),
+ AnyFunctionId::BuiltinDeriveImplMethod { impl_, .. } => {
+ let impl_args = GenericArgs::identity_for_item(interner, impl_.into());
+ GenericArgs::new_from_iter(
+ interner,
+ impl_args.iter().chain(generics.map(Into::into)),
+ )
+ }
+ }
}
pub fn async_ret_type<'db>(self, db: &'db dyn HirDatabase) -> Option<Type<'db>> {
+ let AnyFunctionId::FunctionId(id) = self.id else {
+ return None;
+ };
if !self.is_async(db) {
return None;
}
- let resolver = self.id.resolver(db);
+ let resolver = id.resolver(db);
// FIXME: This shouldn't be `instantiate_identity()`, we shouldn't leak `TyKind::Param`s.
- let ret_ty = db
- .callable_item_signature(self.id.into())
- .instantiate_identity()
- .skip_binder()
- .output();
+ let ret_ty =
+ db.callable_item_signature(id.into()).instantiate_identity().skip_binder().output();
for pred in ret_ty.impl_trait_bounds(db).into_iter().flatten() {
if let ClauseKind::Projection(projection) = pred.kind().skip_binder()
&& let Some(output_ty) = projection.term.as_type()
@@ -2274,31 +2490,47 @@ impl Function {
}
pub fn has_self_param(self, db: &dyn HirDatabase) -> bool {
- db.function_signature(self.id).has_self_param()
+ match self.id {
+ AnyFunctionId::FunctionId(id) => db.function_signature(id).has_self_param(),
+ AnyFunctionId::BuiltinDeriveImplMethod { method, .. } => match method {
+ BuiltinDeriveImplMethod::clone
+ | BuiltinDeriveImplMethod::fmt
+ | BuiltinDeriveImplMethod::hash
+ | BuiltinDeriveImplMethod::cmp
+ | BuiltinDeriveImplMethod::partial_cmp
+ | BuiltinDeriveImplMethod::eq => true,
+ BuiltinDeriveImplMethod::default => false,
+ },
+ }
}
pub fn self_param(self, db: &dyn HirDatabase) -> Option<SelfParam> {
- self.has_self_param(db).then_some(SelfParam { func: self.id })
+ self.has_self_param(db).then_some(SelfParam { func: self })
}
pub fn assoc_fn_params(self, db: &dyn HirDatabase) -> Vec<Param<'_>> {
- let environment = param_env_from_has_crate(db, self.id);
- // FIXME: This shouldn't be `instantiate_identity()`, we shouldn't leak `TyKind::Param`s.
- let callable_sig =
- db.callable_item_signature(self.id.into()).instantiate_identity().skip_binder();
- callable_sig
+ let (env, sig) = self.fn_sig(db);
+ let func = match self.id {
+ AnyFunctionId::FunctionId(id) => Callee::Def(CallableDefId::FunctionId(id)),
+ AnyFunctionId::BuiltinDeriveImplMethod { method, impl_ } => {
+ Callee::BuiltinDeriveImplMethod { method, impl_ }
+ }
+ };
+ sig.skip_binder()
.inputs()
.iter()
.enumerate()
- .map(|(idx, &ty)| {
- let ty = Type { env: environment, ty };
- Param { func: Callee::Def(CallableDefId::FunctionId(self.id)), ty, idx }
- })
+ .map(|(idx, &ty)| Param { func: func.clone(), ty: Type { env, ty }, idx })
.collect()
}
pub fn num_params(self, db: &dyn HirDatabase) -> usize {
- db.function_signature(self.id).params.len()
+ match self.id {
+ AnyFunctionId::FunctionId(id) => db.function_signature(id).params.len(),
+ AnyFunctionId::BuiltinDeriveImplMethod { .. } => {
+ self.fn_sig(db).1.skip_binder().inputs().len()
+ }
+ }
}
pub fn method_params(self, db: &dyn HirDatabase) -> Option<Vec<Param<'_>>> {
@@ -2307,21 +2539,11 @@ impl Function {
}
pub fn params_without_self(self, db: &dyn HirDatabase) -> Vec<Param<'_>> {
- let environment = param_env_from_has_crate(db, self.id);
- // FIXME: This shouldn't be `instantiate_identity()`, we shouldn't leak `TyKind::Param`s.
- let callable_sig =
- db.callable_item_signature(self.id.into()).instantiate_identity().skip_binder();
- let skip = if db.function_signature(self.id).has_self_param() { 1 } else { 0 };
- callable_sig
- .inputs()
- .iter()
- .enumerate()
- .skip(skip)
- .map(|(idx, &ty)| {
- let ty = Type { env: environment, ty };
- Param { func: Callee::Def(CallableDefId::FunctionId(self.id)), ty, idx }
- })
- .collect()
+ let mut params = self.assoc_fn_params(db);
+ if self.has_self_param(db) {
+ params.remove(0);
+ }
+ params
}
// FIXME: Find better API to also handle const generics
@@ -2330,40 +2552,50 @@ impl Function {
db: &'db dyn HirDatabase,
generics: impl Iterator<Item = Type<'db>>,
) -> Vec<Param<'db>> {
- let environment = param_env_from_has_crate(db, self.id);
let interner = DbInterner::new_no_crate(db);
- let args = generic_args_from_tys(interner, self.id.into(), generics.map(|ty| ty.ty));
- let callable_sig =
- db.callable_item_signature(self.id.into()).instantiate(interner, args).skip_binder();
- let skip = if db.function_signature(self.id).has_self_param() { 1 } else { 0 };
- callable_sig
- .inputs()
- .iter()
- .enumerate()
- .skip(skip)
- .map(|(idx, &ty)| {
- let ty = Type { env: environment, ty };
- Param { func: Callee::Def(CallableDefId::FunctionId(self.id)), ty, idx }
+ let args = self.adapt_generic_args(interner, generics);
+ let params = self.params_without_self(db);
+ params
+ .into_iter()
+ .map(|param| Param {
+ func: param.func,
+ idx: param.idx,
+ ty: Type {
+ env: param.ty.env,
+ ty: EarlyBinder::bind(param.ty.ty).instantiate(interner, args),
+ },
})
.collect()
}
pub fn is_const(self, db: &dyn HirDatabase) -> bool {
- db.function_signature(self.id).is_const()
+ match self.id {
+ AnyFunctionId::FunctionId(id) => db.function_signature(id).is_const(),
+ AnyFunctionId::BuiltinDeriveImplMethod { .. } => false,
+ }
}
pub fn is_async(self, db: &dyn HirDatabase) -> bool {
- db.function_signature(self.id).is_async()
+ match self.id {
+ AnyFunctionId::FunctionId(id) => db.function_signature(id).is_async(),
+ AnyFunctionId::BuiltinDeriveImplMethod { .. } => false,
+ }
}
pub fn is_varargs(self, db: &dyn HirDatabase) -> bool {
- db.function_signature(self.id).is_varargs()
+ match self.id {
+ AnyFunctionId::FunctionId(id) => db.function_signature(id).is_varargs(),
+ AnyFunctionId::BuiltinDeriveImplMethod { .. } => false,
+ }
}
pub fn extern_block(self, db: &dyn HirDatabase) -> Option<ExternBlock> {
- match self.id.lookup(db).container {
- ItemContainerId::ExternBlockId(id) => Some(ExternBlock { id }),
- _ => None,
+ match self.id {
+ AnyFunctionId::FunctionId(id) => match id.lookup(db).container {
+ ItemContainerId::ExternBlockId(id) => Some(ExternBlock { id }),
+ _ => None,
+ },
+ AnyFunctionId::BuiltinDeriveImplMethod { .. } => None,
}
}
@@ -2396,33 +2628,46 @@ impl Function {
/// Does this function have `#[test]` attribute?
pub fn is_test(self, db: &dyn HirDatabase) -> bool {
- self.attrs(db).is_test()
+ self.attrs(db).contains(AttrFlags::IS_TEST)
}
/// is this a `fn main` or a function with an `export_name` of `main`?
pub fn is_main(self, db: &dyn HirDatabase) -> bool {
- self.exported_main(db)
- || self.module(db).is_crate_root(db) && db.function_signature(self.id).name == sym::main
+ match self.id {
+ AnyFunctionId::FunctionId(id) => {
+ self.exported_main(db)
+ || self.module(db).is_crate_root(db)
+ && db.function_signature(id).name == sym::main
+ }
+ AnyFunctionId::BuiltinDeriveImplMethod { .. } => false,
+ }
+ }
+
+ fn attrs(self, db: &dyn HirDatabase) -> AttrFlags {
+ match self.id {
+ AnyFunctionId::FunctionId(id) => AttrFlags::query(db, id.into()),
+ AnyFunctionId::BuiltinDeriveImplMethod { .. } => AttrFlags::empty(),
+ }
}
/// Is this a function with an `export_name` of `main`?
pub fn exported_main(self, db: &dyn HirDatabase) -> bool {
- AttrFlags::query(db, self.id.into()).contains(AttrFlags::IS_EXPORT_NAME_MAIN)
+ self.attrs(db).contains(AttrFlags::IS_EXPORT_NAME_MAIN)
}
/// Does this function have the ignore attribute?
pub fn is_ignore(self, db: &dyn HirDatabase) -> bool {
- AttrFlags::query(db, self.id.into()).contains(AttrFlags::IS_IGNORE)
+ self.attrs(db).contains(AttrFlags::IS_IGNORE)
}
/// Does this function have `#[bench]` attribute?
pub fn is_bench(self, db: &dyn HirDatabase) -> bool {
- AttrFlags::query(db, self.id.into()).contains(AttrFlags::IS_BENCH)
+ self.attrs(db).contains(AttrFlags::IS_BENCH)
}
/// Is this function marked as unstable with `#[feature]` attribute?
pub fn is_unstable(self, db: &dyn HirDatabase) -> bool {
- AttrFlags::query(db, self.id.into()).contains(AttrFlags::IS_UNSTABLE)
+ self.attrs(db).contains(AttrFlags::IS_UNSTABLE)
}
pub fn is_unsafe_to_call(
@@ -2431,9 +2676,17 @@ impl Function {
caller: Option<Function>,
call_edition: Edition,
) -> bool {
+ let AnyFunctionId::FunctionId(id) = self.id else {
+ return false;
+ };
let (target_features, target_feature_is_safe_in_target) = caller
.map(|caller| {
- let target_features = hir_ty::TargetFeatures::from_fn(db, caller.id);
+ let target_features = match caller.id {
+ AnyFunctionId::FunctionId(id) => hir_ty::TargetFeatures::from_fn(db, id),
+ AnyFunctionId::BuiltinDeriveImplMethod { .. } => {
+ hir_ty::TargetFeatures::default()
+ }
+ };
let target_feature_is_safe_in_target =
match &caller.krate(db).id.workspace_data(db).target {
Ok(target) => hir_ty::target_feature_is_safe_in_target(target),
@@ -2447,7 +2700,7 @@ impl Function {
matches!(
hir_ty::is_fn_unsafe_to_call(
db,
- self.id,
+ id,
&target_features,
call_edition,
target_feature_is_safe_in_target
@@ -2460,12 +2713,18 @@ impl Function {
///
/// This is false in the case of required (not provided) trait methods.
pub fn has_body(self, db: &dyn HirDatabase) -> bool {
- db.function_signature(self.id).has_body()
+ match self.id {
+ AnyFunctionId::FunctionId(id) => db.function_signature(id).has_body(),
+ AnyFunctionId::BuiltinDeriveImplMethod { .. } => true,
+ }
}
pub fn as_proc_macro(self, db: &dyn HirDatabase) -> Option<Macro> {
- let def_map = crate_def_map(db, HasModule::krate(&self.id, db));
- def_map.fn_as_proc_macro(self.id).map(|id| Macro { id: id.into() })
+ let AnyFunctionId::FunctionId(id) = self.id else {
+ return None;
+ };
+ let def_map = crate_def_map(db, HasModule::krate(&id, db));
+ def_map.fn_as_proc_macro(id).map(|id| Macro { id: id.into() })
}
pub fn eval(
@@ -2473,13 +2732,18 @@ impl Function {
db: &dyn HirDatabase,
span_formatter: impl Fn(FileId, TextRange) -> String,
) -> Result<String, ConstEvalError> {
+ let AnyFunctionId::FunctionId(id) = self.id else {
+ return Err(ConstEvalError::MirEvalError(MirEvalError::NotSupported(
+ "evaluation of builtin derive impl methods is not supported".to_owned(),
+ )));
+ };
let interner = DbInterner::new_no_crate(db);
let body = db.monomorphized_mir_body(
- self.id.into(),
+ id.into(),
GenericArgs::empty(interner).store(),
ParamEnvAndCrate {
- param_env: db.trait_environment(self.id.into()),
- krate: self.id.module(db).krate(db),
+ param_env: db.trait_environment(id.into()),
+ krate: id.module(db).krate(db),
}
.store(),
)?;
@@ -2596,36 +2860,47 @@ impl<'db> Param<'db> {
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub struct SelfParam {
- func: FunctionId,
+ func: Function,
}
impl SelfParam {
pub fn access(self, db: &dyn HirDatabase) -> Access {
- let func_data = db.function_signature(self.func);
- func_data
- .params
- .first()
- .map(|&param| match &func_data.store[param] {
- TypeRef::Reference(ref_) => match ref_.mutability {
- hir_def::type_ref::Mutability::Shared => Access::Shared,
- hir_def::type_ref::Mutability::Mut => Access::Exclusive,
- },
- _ => Access::Owned,
- })
- .unwrap_or(Access::Owned)
+ match self.func.id {
+ AnyFunctionId::FunctionId(id) => {
+ let func_data = db.function_signature(id);
+ func_data
+ .params
+ .first()
+ .map(|&param| match &func_data.store[param] {
+ TypeRef::Reference(ref_) => match ref_.mutability {
+ hir_def::type_ref::Mutability::Shared => Access::Shared,
+ hir_def::type_ref::Mutability::Mut => Access::Exclusive,
+ },
+ _ => Access::Owned,
+ })
+ .unwrap_or(Access::Owned)
+ }
+ AnyFunctionId::BuiltinDeriveImplMethod { method, .. } => match method {
+ BuiltinDeriveImplMethod::clone
+ | BuiltinDeriveImplMethod::fmt
+ | BuiltinDeriveImplMethod::hash
+ | BuiltinDeriveImplMethod::cmp
+ | BuiltinDeriveImplMethod::partial_cmp
+ | BuiltinDeriveImplMethod::eq => Access::Shared,
+ BuiltinDeriveImplMethod::default => {
+ unreachable!("this function does not have a self param")
+ }
+ },
+ }
}
pub fn parent_fn(&self) -> Function {
- Function::from(self.func)
+ self.func
}
pub fn ty<'db>(&self, db: &'db dyn HirDatabase) -> Type<'db> {
- // FIXME: This shouldn't be `instantiate_identity()`, we shouldn't leak `TyKind::Param`s.
- let callable_sig =
- db.callable_item_signature(self.func.into()).instantiate_identity().skip_binder();
- let environment = param_env_from_has_crate(db, self.func);
- let ty = rustc_type_ir::inherent::SliceLike::as_slice(&callable_sig.inputs())[0];
- Type { env: environment, ty }
+ let (env, sig) = self.func.fn_sig(db);
+ Type { env, ty: sig.skip_binder().inputs()[0] }
}
// FIXME: Find better API to also handle const generics
@@ -2635,18 +2910,18 @@ impl SelfParam {
generics: impl Iterator<Item = Type<'db>>,
) -> Type<'db> {
let interner = DbInterner::new_no_crate(db);
- let args = generic_args_from_tys(interner, self.func.into(), generics.map(|ty| ty.ty));
- let callable_sig =
- db.callable_item_signature(self.func.into()).instantiate(interner, args).skip_binder();
- let environment = param_env_from_has_crate(db, self.func);
- let ty = rustc_type_ir::inherent::SliceLike::as_slice(&callable_sig.inputs())[0];
- Type { env: environment, ty }
+ let args = self.func.adapt_generic_args(interner, generics);
+ let Type { env, ty } = self.ty(db);
+ Type { env, ty: EarlyBinder::bind(ty).instantiate(interner, args) }
}
}
impl HasVisibility for Function {
fn visibility(&self, db: &dyn HirDatabase) -> Visibility {
- db.assoc_visibility(self.id.into())
+ match self.id {
+ AnyFunctionId::FunctionId(id) => db.assoc_visibility(id.into()),
+ AnyFunctionId::BuiltinDeriveImplMethod { .. } => Visibility::Public,
+ }
}
}
@@ -2870,7 +3145,7 @@ impl Trait {
pub fn function(self, db: &dyn HirDatabase, name: impl PartialEq<Name>) -> Option<Function> {
self.id.trait_items(db).items.iter().find(|(n, _)| name == *n).and_then(|&(_, it)| match it
{
- AssocItemId::FunctionId(id) => Some(Function { id }),
+ AssocItemId::FunctionId(id) => Some(id.into()),
_ => None,
})
}
@@ -3151,15 +3426,15 @@ impl Macro {
)
}
- pub fn is_builtin_derive(&self, db: &dyn HirDatabase) -> bool {
- match self.id {
- MacroId::Macro2Id(it) => {
- matches!(it.lookup(db).expander, MacroExpander::BuiltInDerive(_))
- }
- MacroId::MacroRulesId(it) => {
- matches!(it.lookup(db).expander, MacroExpander::BuiltInDerive(_))
- }
- MacroId::ProcMacroId(_) => false,
+ pub fn builtin_derive_kind(&self, db: &dyn HirDatabase) -> Option<BuiltinDeriveMacroKind> {
+ let expander = match self.id {
+ MacroId::Macro2Id(it) => it.lookup(db).expander,
+ MacroId::MacroRulesId(it) => it.lookup(db).expander,
+ MacroId::ProcMacroId(_) => return None,
+ };
+ match expander {
+ MacroExpander::BuiltInDerive(kind) => Some(BuiltinDeriveMacroKind(kind)),
+ _ => None,
}
}
@@ -3195,8 +3470,55 @@ impl Macro {
pub fn is_derive(&self, db: &dyn HirDatabase) -> bool {
matches!(self.kind(db), MacroKind::Derive | MacroKind::DeriveBuiltIn)
}
+
+ pub fn preferred_brace_style(&self, db: &dyn HirDatabase) -> Option<MacroBraces> {
+ let attrs = self.attrs(db);
+ MacroBraces::extract(attrs.attrs)
+ }
+}
+
+// Feature: Macro Brace Style Attribute
+// Crate authors can declare the preferred brace style for their macro. This will affect how completion
+// insert calls to it.
+//
+// This is only supported on function-like macros.
+//
+// To do that, insert the `#[rust_analyzer::macro_style(style)]` attribute on the macro (for proc macros,
+// insert it for the macro's function). `style` can be one of:
+//
+// - `braces` for `{...}` style.
+// - `brackets` for `[...]` style.
+// - `parentheses` for `(...)` style.
+//
+// Malformed attributes will be ignored without warnings.
+//
+// Note that users have no way to override this attribute, so be careful and only include things
+// users definitely do not want to be completed!
+
+#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
+pub enum MacroBraces {
+ Braces,
+ Brackets,
+ Parentheses,
+}
+
+impl MacroBraces {
+ fn extract(attrs: AttrFlags) -> Option<Self> {
+ if attrs.contains(AttrFlags::MACRO_STYLE_BRACES) {
+ Some(Self::Braces)
+ } else if attrs.contains(AttrFlags::MACRO_STYLE_BRACKETS) {
+ Some(Self::Brackets)
+ } else if attrs.contains(AttrFlags::MACRO_STYLE_PARENTHESES) {
+ Some(Self::Parentheses)
+ } else {
+ None
+ }
+ }
}
+#[derive(Clone, Copy, PartialEq, Eq, Hash)]
+pub struct BuiltinDeriveMacroKind(BuiltinDeriveExpander);
+
impl HasVisibility for Macro {
fn visibility(&self, db: &dyn HirDatabase) -> Visibility {
match self.id {
@@ -3275,7 +3597,10 @@ pub trait AsExternAssocItem {
impl AsExternAssocItem for Function {
fn as_extern_assoc_item(self, db: &dyn HirDatabase) -> Option<ExternAssocItem> {
- as_extern_assoc_item(db, ExternAssocItem::Function, self.id)
+ let AnyFunctionId::FunctionId(id) = self.id else {
+ return None;
+ };
+ as_extern_assoc_item(db, ExternAssocItem::Function, id)
}
}
@@ -3303,7 +3628,7 @@ pub enum AssocItem {
impl From<method_resolution::CandidateId> for AssocItem {
fn from(value: method_resolution::CandidateId) -> Self {
match value {
- method_resolution::CandidateId::FunctionId(id) => AssocItem::Function(Function { id }),
+ method_resolution::CandidateId::FunctionId(id) => AssocItem::Function(id.into()),
method_resolution::CandidateId::ConstId(id) => AssocItem::Const(Const { id }),
}
}
@@ -3321,7 +3646,10 @@ pub trait AsAssocItem {
impl AsAssocItem for Function {
fn as_assoc_item(self, db: &dyn HirDatabase) -> Option<AssocItem> {
- as_assoc_item(db, AssocItem::Function, self.id)
+ match self.id {
+ AnyFunctionId::FunctionId(id) => as_assoc_item(db, AssocItem::Function, id),
+ AnyFunctionId::BuiltinDeriveImplMethod { .. } => Some(AssocItem::Function(self)),
+ }
}
}
@@ -3450,7 +3778,14 @@ impl AssocItem {
pub fn container(self, db: &dyn HirDatabase) -> AssocItemContainer {
let container = match self {
- AssocItem::Function(it) => it.id.lookup(db).container,
+ AssocItem::Function(it) => match it.id {
+ AnyFunctionId::FunctionId(id) => id.lookup(db).container,
+ AnyFunctionId::BuiltinDeriveImplMethod { impl_, .. } => {
+ return AssocItemContainer::Impl(Impl {
+ id: AnyImplId::BuiltinDeriveImplId(impl_),
+ });
+ }
+ },
AssocItem::Const(it) => it.id.lookup(db).container,
AssocItem::TypeAlias(it) => it.id.lookup(db).container,
};
@@ -3587,9 +3922,13 @@ impl_from!(
impl GenericDef {
pub fn params(self, db: &dyn HirDatabase) -> Vec<GenericParam> {
- let generics = db.generic_params(self.into());
+ let Ok(id) = self.try_into() else {
+ // Let's pretend builtin derive impls don't have generic parameters.
+ return Vec::new();
+ };
+ let generics = db.generic_params(id);
let ty_params = generics.iter_type_or_consts().map(|(local_id, _)| {
- let toc = TypeOrConstParam { id: TypeOrConstParamId { parent: self.into(), local_id } };
+ let toc = TypeOrConstParam { id: TypeOrConstParamId { parent: id, local_id } };
match toc.split(db) {
Either::Left(it) => GenericParam::ConstParam(it),
Either::Right(it) => GenericParam::TypeParam(it),
@@ -3603,39 +3942,51 @@ impl GenericDef {
}
pub fn lifetime_params(self, db: &dyn HirDatabase) -> Vec<LifetimeParam> {
- let generics = db.generic_params(self.into());
+ let Ok(id) = self.try_into() else {
+ // Let's pretend builtin derive impls don't have generic parameters.
+ return Vec::new();
+ };
+ let generics = db.generic_params(id);
generics
.iter_lt()
- .map(|(local_id, _)| LifetimeParam {
- id: LifetimeParamId { parent: self.into(), local_id },
- })
+ .map(|(local_id, _)| LifetimeParam { id: LifetimeParamId { parent: id, local_id } })
.collect()
}
pub fn type_or_const_params(self, db: &dyn HirDatabase) -> Vec<TypeOrConstParam> {
- let generics = db.generic_params(self.into());
+ let Ok(id) = self.try_into() else {
+ // Let's pretend builtin derive impls don't have generic parameters.
+ return Vec::new();
+ };
+ let generics = db.generic_params(id);
generics
.iter_type_or_consts()
.map(|(local_id, _)| TypeOrConstParam {
- id: TypeOrConstParamId { parent: self.into(), local_id },
+ id: TypeOrConstParamId { parent: id, local_id },
})
.collect()
}
- fn id(self) -> GenericDefId {
- match self {
- GenericDef::Function(it) => it.id.into(),
+ fn id(self) -> Option<GenericDefId> {
+ Some(match self {
+ GenericDef::Function(it) => match it.id {
+ AnyFunctionId::FunctionId(it) => it.into(),
+ AnyFunctionId::BuiltinDeriveImplMethod { .. } => return None,
+ },
GenericDef::Adt(it) => it.into(),
GenericDef::Trait(it) => it.id.into(),
GenericDef::TypeAlias(it) => it.id.into(),
- GenericDef::Impl(it) => it.id.into(),
+ GenericDef::Impl(it) => match it.id {
+ AnyImplId::ImplId(it) => it.into(),
+ AnyImplId::BuiltinDeriveImplId(_) => return None,
+ },
GenericDef::Const(it) => it.id.into(),
GenericDef::Static(it) => it.id.into(),
- }
+ })
}
pub fn diagnostics<'db>(self, db: &'db dyn HirDatabase, acc: &mut Vec<AnyDiagnostic<'db>>) {
- let def = self.id();
+ let Some(def) = self.id() else { return };
let generics = db.generic_params(def);
@@ -3708,6 +4059,17 @@ impl<'db> GenericSubstitution<'db> {
Self { def, subst, env }
}
+ fn new_from_fn(
+ def: Function,
+ subst: GenericArgs<'db>,
+ env: ParamEnvAndCrate<'db>,
+ ) -> Option<Self> {
+ match def.id {
+ AnyFunctionId::FunctionId(def) => Some(Self::new(def.into(), subst, env)),
+ AnyFunctionId::BuiltinDeriveImplMethod { .. } => None,
+ }
+ }
+
pub fn types(&self, db: &'db dyn HirDatabase) -> Vec<(Symbol, Type<'db>)> {
let container = match self.def {
GenericDefId::ConstId(id) => Some(id.lookup(db).container),
@@ -3820,7 +4182,9 @@ impl Local {
pub fn as_self_param(self, db: &dyn HirDatabase) -> Option<SelfParam> {
match self.parent {
- DefWithBodyId::FunctionId(func) if self.is_self(db) => Some(SelfParam { func }),
+ DefWithBodyId::FunctionId(func) if self.is_self(db) => {
+ Some(SelfParam { func: func.into() })
+ }
_ => None,
}
}
@@ -4308,7 +4672,7 @@ impl TypeOrConstParam {
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub struct Impl {
- pub(crate) id: ImplId,
+ pub(crate) id: AnyImplId,
}
impl Impl {
@@ -4320,6 +4684,7 @@ impl Impl {
fn extend_with_def_map(db: &dyn HirDatabase, def_map: &DefMap, result: &mut Vec<Impl>) {
for (_, module) in def_map.modules() {
result.extend(module.scope.impls().map(Impl::from));
+ result.extend(module.scope.builtin_derive_impls().map(Impl::from));
for unnamed_const in module.scope.unnamed_consts() {
for (_, block_def_map) in db.body(unnamed_const.into()).blocks(db) {
@@ -4331,7 +4696,7 @@ impl Impl {
}
pub fn all_in_module(db: &dyn HirDatabase, module: Module) -> Vec<Impl> {
- module.id.def_map(db)[module.id].scope.impls().map(Into::into).collect()
+ module.impl_defs(db)
}
/// **Note:** This is an **approximation** that strives to give the *human-perceived notion* of an "impl for type",
@@ -4347,20 +4712,19 @@ impl Impl {
else {
return Vec::new();
};
- let mut extend_with_impls =
- |impls: &[ImplId]| result.extend(impls.iter().copied().map(Impl::from));
- method_resolution::with_incoherent_inherent_impls(
- db,
- env.krate,
- &simplified_ty,
- &mut extend_with_impls,
- );
+ let mut extend_with_impls = |impls: Either<&[ImplId], &[BuiltinDeriveImplId]>| match impls {
+ Either::Left(impls) => result.extend(impls.iter().copied().map(Impl::from)),
+ Either::Right(impls) => result.extend(impls.iter().copied().map(Impl::from)),
+ };
+ method_resolution::with_incoherent_inherent_impls(db, env.krate, &simplified_ty, |impls| {
+ extend_with_impls(Either::Left(impls))
+ });
if let Some(module) = method_resolution::simplified_type_module(db, &simplified_ty) {
InherentImpls::for_each_crate_and_block(
db,
module.krate(db),
module.block(db),
- &mut |impls| extend_with_impls(impls.for_self_ty(&simplified_ty)),
+ &mut |impls| extend_with_impls(Either::Left(impls.for_self_ty(&simplified_ty))),
);
std::iter::successors(module.block(db), |block| block.loc(db).module.block(db))
.filter_map(|block| TraitImpls::for_block(db, block).as_deref())
@@ -4382,7 +4746,10 @@ impl Impl {
let module = trait_.module(db).id;
let mut all = Vec::new();
let mut handle_impls = |impls: &TraitImpls| {
- impls.for_trait(trait_.id, |impls| all.extend(impls.iter().copied().map(Impl::from)));
+ impls.for_trait(trait_.id, |impls| match impls {
+ Either::Left(impls) => all.extend(impls.iter().copied().map(Impl::from)),
+ Either::Right(impls) => all.extend(impls.iter().copied().map(Impl::from)),
+ });
};
for krate in module.krate(db).transitive_rev_deps(db) {
handle_impls(TraitImpls::for_crate(db, krate));
@@ -4396,75 +4763,118 @@ impl Impl {
}
pub fn trait_(self, db: &dyn HirDatabase) -> Option<Trait> {
- let trait_ref = db.impl_trait(self.id)?;
- let id = trait_ref.skip_binder().def_id;
- Some(Trait { id: id.0 })
+ match self.id {
+ AnyImplId::ImplId(id) => {
+ let trait_ref = db.impl_trait(id)?;
+ let id = trait_ref.skip_binder().def_id;
+ Some(Trait { id: id.0 })
+ }
+ AnyImplId::BuiltinDeriveImplId(id) => {
+ let loc = id.loc(db);
+ let lang_items = hir_def::lang_item::lang_items(db, loc.adt.module(db).krate(db));
+ loc.trait_.get_id(lang_items).map(Trait::from)
+ }
+ }
}
pub fn trait_ref(self, db: &dyn HirDatabase) -> Option<TraitRef<'_>> {
- let trait_ref = db.impl_trait(self.id)?.instantiate_identity();
- let resolver = self.id.resolver(db);
- Some(TraitRef::new_with_resolver(db, &resolver, trait_ref))
+ match self.id {
+ AnyImplId::ImplId(id) => {
+ let trait_ref = db.impl_trait(id)?.instantiate_identity();
+ let resolver = id.resolver(db);
+ Some(TraitRef::new_with_resolver(db, &resolver, trait_ref))
+ }
+ AnyImplId::BuiltinDeriveImplId(id) => {
+ let loc = id.loc(db);
+ let krate = loc.module(db).krate(db);
+ let interner = DbInterner::new_with(db, krate);
+ let env = ParamEnvAndCrate {
+ param_env: hir_ty::builtin_derive::param_env(interner, id),
+ krate,
+ };
+ let trait_ref =
+ hir_ty::builtin_derive::impl_trait(interner, id).instantiate_identity();
+ Some(TraitRef { env, trait_ref })
+ }
+ }
}
pub fn self_ty(self, db: &dyn HirDatabase) -> Type<'_> {
- let resolver = self.id.resolver(db);
- // FIXME: This shouldn't be `instantiate_identity()`, we shouldn't leak `TyKind::Param`s.
- let ty = db.impl_self_ty(self.id).instantiate_identity();
- Type::new_with_resolver_inner(db, &resolver, ty)
+ match self.id {
+ AnyImplId::ImplId(id) => {
+ let resolver = id.resolver(db);
+ // FIXME: This shouldn't be `instantiate_identity()`, we shouldn't leak `TyKind::Param`s.
+ let ty = db.impl_self_ty(id).instantiate_identity();
+ Type::new_with_resolver_inner(db, &resolver, ty)
+ }
+ AnyImplId::BuiltinDeriveImplId(id) => {
+ let loc = id.loc(db);
+ let krate = loc.module(db).krate(db);
+ let interner = DbInterner::new_with(db, krate);
+ let env = ParamEnvAndCrate {
+ param_env: hir_ty::builtin_derive::param_env(interner, id),
+ krate,
+ };
+ let ty = hir_ty::builtin_derive::impl_trait(interner, id)
+ .instantiate_identity()
+ .self_ty();
+ Type { env, ty }
+ }
+ }
}
pub fn items(self, db: &dyn HirDatabase) -> Vec<AssocItem> {
- self.id.impl_items(db).items.iter().map(|&(_, it)| it.into()).collect()
+ match self.id {
+ AnyImplId::ImplId(id) => {
+ id.impl_items(db).items.iter().map(|&(_, it)| it.into()).collect()
+ }
+ AnyImplId::BuiltinDeriveImplId(impl_) => impl_
+ .loc(db)
+ .trait_
+ .all_methods()
+ .iter()
+ .map(|&method| {
+ AssocItem::Function(Function {
+ id: AnyFunctionId::BuiltinDeriveImplMethod { method, impl_ },
+ })
+ })
+ .collect(),
+ }
}
pub fn is_negative(self, db: &dyn HirDatabase) -> bool {
- db.impl_signature(self.id).flags.contains(ImplFlags::NEGATIVE)
+ match self.id {
+ AnyImplId::ImplId(id) => db.impl_signature(id).flags.contains(ImplFlags::NEGATIVE),
+ AnyImplId::BuiltinDeriveImplId(_) => false,
+ }
}
pub fn is_unsafe(self, db: &dyn HirDatabase) -> bool {
- db.impl_signature(self.id).flags.contains(ImplFlags::UNSAFE)
+ match self.id {
+ AnyImplId::ImplId(id) => db.impl_signature(id).flags.contains(ImplFlags::UNSAFE),
+ AnyImplId::BuiltinDeriveImplId(_) => false,
+ }
}
pub fn module(self, db: &dyn HirDatabase) -> Module {
- self.id.lookup(db).container.into()
- }
-
- pub fn as_builtin_derive_path(self, db: &dyn HirDatabase) -> Option<InMacroFile<ast::Path>> {
- let src = self.source(db)?;
-
- let macro_file = src.file_id.macro_file()?;
- let loc = macro_file.lookup(db);
- let (derive_attr, derive_index) = match loc.kind {
- MacroCallKind::Derive { ast_id, derive_attr_index, derive_index, .. } => {
- let module_id = self.id.lookup(db).container;
- (
- module_id.def_map(db)[module_id]
- .scope
- .derive_macro_invoc(ast_id, derive_attr_index)?,
- derive_index,
- )
- }
- _ => return None,
- };
- let path = db
- .parse_macro_expansion(derive_attr)
- .value
- .0
- .syntax_node()
- .children()
- .nth(derive_index as usize)
- .and_then(<ast::Attr as AstNode>::cast)
- .and_then(|it| it.path())?;
- Some(InMacroFile { file_id: derive_attr, value: path })
+ match self.id {
+ AnyImplId::ImplId(id) => id.module(db).into(),
+ AnyImplId::BuiltinDeriveImplId(id) => id.module(db).into(),
+ }
}
pub fn check_orphan_rules(self, db: &dyn HirDatabase) -> bool {
- check_orphan_rules(db, self.id)
+ match self.id {
+ AnyImplId::ImplId(id) => check_orphan_rules(db, id),
+ AnyImplId::BuiltinDeriveImplId(_) => true,
+ }
}
fn all_macro_calls(&self, db: &dyn HirDatabase) -> Box<[(AstId<ast::Item>, MacroCallId)]> {
- self.id.impl_items(db).macro_calls.to_vec().into_boxed_slice()
+ match self.id {
+ AnyImplId::ImplId(id) => id.impl_items(db).macro_calls.to_vec().into_boxed_slice(),
+ AnyImplId::BuiltinDeriveImplId(_) => Box::default(),
+ }
}
}
@@ -5540,7 +5950,7 @@ impl<'db> Type<'db> {
else {
unreachable!("`Mode::MethodCall` can only return functions");
};
- let id = Function { id };
+ let id = Function { id: AnyFunctionId::FunctionId(id) };
match candidate.kind {
method_resolution::PickKind::InherentImplPick(_)
| method_resolution::PickKind::ObjectPick(..)
@@ -5564,7 +5974,7 @@ impl<'db> Type<'db> {
else {
unreachable!("`Mode::MethodCall` can only return functions");
};
- let id = Function { id };
+ let id = Function { id: AnyFunctionId::FunctionId(id) };
match candidate.candidate.kind {
method_resolution::CandidateKind::InherentImplCandidate {
..
@@ -5919,6 +6329,7 @@ enum Callee<'db> {
CoroutineClosure(InternedCoroutineId, GenericArgs<'db>),
FnPtr,
FnImpl(traits::FnTrait),
+ BuiltinDeriveImplMethod { method: BuiltinDeriveImplMethod, impl_: BuiltinDeriveImplId },
}
pub enum CallableKind<'db> {
@@ -5934,6 +6345,9 @@ impl<'db> Callable<'db> {
pub fn kind(&self) -> CallableKind<'db> {
match self.callee {
Callee::Def(CallableDefId::FunctionId(it)) => CallableKind::Function(it.into()),
+ Callee::BuiltinDeriveImplMethod { method, impl_ } => CallableKind::Function(Function {
+ id: AnyFunctionId::BuiltinDeriveImplMethod { method, impl_ },
+ }),
Callee::Def(CallableDefId::StructId(it)) => CallableKind::TupleStruct(it.into()),
Callee::Def(CallableDefId::EnumVariantId(it)) => {
CallableKind::TupleEnumVariant(it.into())
@@ -5948,12 +6362,22 @@ impl<'db> Callable<'db> {
Callee::FnImpl(fn_) => CallableKind::FnImpl(fn_.into()),
}
}
+
+ fn as_function(&self) -> Option<Function> {
+ match self.callee {
+ Callee::Def(CallableDefId::FunctionId(it)) => Some(it.into()),
+ Callee::BuiltinDeriveImplMethod { method, impl_ } => {
+ Some(Function { id: AnyFunctionId::BuiltinDeriveImplMethod { method, impl_ } })
+ }
+ _ => None,
+ }
+ }
+
pub fn receiver_param(&self, db: &'db dyn HirDatabase) -> Option<(SelfParam, Type<'db>)> {
- let func = match self.callee {
- Callee::Def(CallableDefId::FunctionId(it)) if self.is_bound_method => it,
- _ => return None,
- };
- let func = Function { id: func };
+ if !self.is_bound_method {
+ return None;
+ }
+ let func = self.as_function()?;
Some((
func.self_param(db)?,
self.ty.derived(self.sig.skip_binder().inputs_and_output.inputs()[0]),
@@ -6350,7 +6774,12 @@ impl HasContainer for Module {
impl HasContainer for Function {
fn container(&self, db: &dyn HirDatabase) -> ItemContainer {
- container_id_to_hir(self.id.lookup(db).container)
+ match self.id {
+ AnyFunctionId::FunctionId(id) => container_id_to_hir(id.lookup(db).container),
+ AnyFunctionId::BuiltinDeriveImplMethod { impl_, .. } => {
+ ItemContainer::Impl(Impl { id: AnyImplId::BuiltinDeriveImplId(impl_) })
+ }
+ }
}
}
@@ -6402,11 +6831,79 @@ impl HasContainer for ExternBlock {
}
}
+pub trait HasName {
+ fn name(&self, db: &dyn HirDatabase) -> Option<Name>;
+}
+
+macro_rules! impl_has_name {
+ ( $( $ty:ident ),* $(,)? ) => {
+ $(
+ impl HasName for $ty {
+ fn name(&self, db: &dyn HirDatabase) -> Option<Name> {
+ (*self).name(db).into()
+ }
+ }
+ )*
+ };
+}
+
+impl_has_name!(
+ ModuleDef,
+ Module,
+ Field,
+ Struct,
+ Union,
+ Enum,
+ Variant,
+ Adt,
+ VariantDef,
+ DefWithBody,
+ Function,
+ ExternCrateDecl,
+ Const,
+ Static,
+ Trait,
+ TypeAlias,
+ Macro,
+ ExternAssocItem,
+ AssocItem,
+ Local,
+ DeriveHelper,
+ ToolModule,
+ Label,
+ GenericParam,
+ TypeParam,
+ LifetimeParam,
+ ConstParam,
+ TypeOrConstParam,
+ InlineAsmOperand,
+);
+
+macro_rules! impl_has_name_no_db {
+ ( $( $ty:ident ),* $(,)? ) => {
+ $(
+ impl HasName for $ty {
+ fn name(&self, _db: &dyn HirDatabase) -> Option<Name> {
+ (*self).name().into()
+ }
+ }
+ )*
+ };
+}
+
+impl_has_name_no_db!(TupleField, StaticLifetime, BuiltinType, BuiltinAttr);
+
+impl HasName for Param<'_> {
+ fn name(&self, db: &dyn HirDatabase) -> Option<Name> {
+ self.name(db)
+ }
+}
+
fn container_id_to_hir(c: ItemContainerId) -> ItemContainer {
match c {
ItemContainerId::ExternBlockId(id) => ItemContainer::ExternBlock(ExternBlock { id }),
ItemContainerId::ModuleId(id) => ItemContainer::Module(Module { id }),
- ItemContainerId::ImplId(id) => ItemContainer::Impl(Impl { id }),
+ ItemContainerId::ImplId(id) => ItemContainer::Impl(id.into()),
ItemContainerId::TraitId(id) => ItemContainer::Trait(Trait { id }),
}
}
diff --git a/crates/hir/src/semantics.rs b/crates/hir/src/semantics.rs
index fcb97ab34e..f4c42537de 100644
--- a/crates/hir/src/semantics.rs
+++ b/crates/hir/src/semantics.rs
@@ -13,7 +13,7 @@ use std::{
use base_db::FxIndexSet;
use either::Either;
use hir_def::{
- DefWithBodyId, FunctionId, MacroId, StructId, TraitId, VariantId,
+ DefWithBodyId, MacroId, StructId, TraitId, VariantId,
attrs::parse_extra_crate_attrs,
expr_store::{Body, ExprOrPatSource, HygieneId, path::Path},
hir::{BindingId, Expr, ExprId, ExprOrPatId, Pat},
@@ -34,7 +34,7 @@ use hir_ty::{
diagnostics::{unsafe_operations, unsafe_operations_for_body},
infer_query_with_inspect,
next_solver::{
- DbInterner, Span,
+ AnyImplId, DbInterner, Span,
format_proof_tree::{ProofTreeData, dump_proof_tree_structured},
},
};
@@ -53,11 +53,11 @@ use syntax::{
};
use crate::{
- Adjust, Adjustment, Adt, AutoBorrow, BindingMode, BuiltinAttr, Callable, Const, ConstParam,
- Crate, DefWithBody, DeriveHelper, Enum, Field, Function, GenericSubstitution, HasSource, Impl,
- InFile, InlineAsmOperand, ItemInNs, Label, LifetimeParam, Local, Macro, Module, ModuleDef,
- Name, OverloadedDeref, ScopeDef, Static, Struct, ToolModule, Trait, TupleField, Type,
- TypeAlias, TypeParam, Union, Variant, VariantDef,
+ Adjust, Adjustment, Adt, AnyFunctionId, AutoBorrow, BindingMode, BuiltinAttr, Callable, Const,
+ ConstParam, Crate, DefWithBody, DeriveHelper, Enum, Field, Function, GenericSubstitution,
+ HasSource, Impl, InFile, InlineAsmOperand, ItemInNs, Label, LifetimeParam, Local, Macro,
+ Module, ModuleDef, Name, OverloadedDeref, ScopeDef, Static, Struct, ToolModule, Trait,
+ TupleField, Type, TypeAlias, TypeParam, Union, Variant, VariantDef,
db::HirDatabase,
semantics::source_to_def::{ChildContainer, SourceToDefCache, SourceToDefCtx},
source_analyzer::{SourceAnalyzer, resolve_hir_path},
@@ -106,7 +106,10 @@ impl PathResolution {
| PathResolution::DeriveHelper(_)
| PathResolution::ConstParam(_) => None,
PathResolution::TypeParam(param) => Some(TypeNs::GenericParam((*param).into())),
- PathResolution::SelfType(impl_def) => Some(TypeNs::SelfType((*impl_def).into())),
+ PathResolution::SelfType(impl_def) => match impl_def.id {
+ AnyImplId::ImplId(id) => Some(TypeNs::SelfType(id)),
+ AnyImplId::BuiltinDeriveImplId(_) => None,
+ },
}
}
}
@@ -345,23 +348,23 @@ impl<DB: HirDatabase + ?Sized> Semantics<'_, DB> {
}
pub fn resolve_await_to_poll(&self, await_expr: &ast::AwaitExpr) -> Option<Function> {
- self.imp.resolve_await_to_poll(await_expr).map(Function::from)
+ self.imp.resolve_await_to_poll(await_expr)
}
pub fn resolve_prefix_expr(&self, prefix_expr: &ast::PrefixExpr) -> Option<Function> {
- self.imp.resolve_prefix_expr(prefix_expr).map(Function::from)
+ self.imp.resolve_prefix_expr(prefix_expr)
}
pub fn resolve_index_expr(&self, index_expr: &ast::IndexExpr) -> Option<Function> {
- self.imp.resolve_index_expr(index_expr).map(Function::from)
+ self.imp.resolve_index_expr(index_expr)
}
pub fn resolve_bin_expr(&self, bin_expr: &ast::BinExpr) -> Option<Function> {
- self.imp.resolve_bin_expr(bin_expr).map(Function::from)
+ self.imp.resolve_bin_expr(bin_expr)
}
pub fn resolve_try_expr(&self, try_expr: &ast::TryExpr) -> Option<Function> {
- self.imp.resolve_try_expr(try_expr).map(Function::from)
+ self.imp.resolve_try_expr(try_expr)
}
pub fn resolve_variant(&self, record_lit: ast::RecordExpr) -> Option<VariantDef> {
@@ -833,7 +836,7 @@ impl<'db> SemanticsImpl<'db> {
// FIXME: Type the return type
/// Returns the range (pre-expansion) in the string literal corresponding to the resolution,
/// absolute file range (post-expansion)
- /// of the part in the format string, the corresponding string token and the resolution if it
+ /// of the part in the format string (post-expansion), the corresponding string token and the resolution if it
/// exists.
// FIXME: Remove this in favor of `check_for_format_args_template_with_file`
pub fn check_for_format_args_template(
@@ -1749,6 +1752,7 @@ impl<'db> SemanticsImpl<'db> {
func: Function,
subst: impl IntoIterator<Item = Type<'db>>,
) -> Option<Function> {
+ let AnyFunctionId::FunctionId(func) = func.id else { return Some(func) };
let interner = DbInterner::new_no_crate(self.db);
let mut subst = subst.into_iter();
let substs =
@@ -1757,7 +1761,12 @@ impl<'db> SemanticsImpl<'db> {
subst.next().expect("too few subst").ty.into()
});
assert!(subst.next().is_none(), "too many subst");
- Some(self.db.lookup_impl_method(env.env, func.into(), substs).0.into())
+ Some(match self.db.lookup_impl_method(env.env, func, substs).0 {
+ Either::Left(it) => it.into(),
+ Either::Right((impl_, method)) => {
+ Function { id: AnyFunctionId::BuiltinDeriveImplMethod { method, impl_ } }
+ }
+ })
}
fn resolve_range_pat(&self, range_pat: &ast::RangePat) -> Option<StructId> {
@@ -1768,23 +1777,23 @@ impl<'db> SemanticsImpl<'db> {
self.analyze(range_expr.syntax())?.resolve_range_expr(self.db, range_expr)
}
- fn resolve_await_to_poll(&self, await_expr: &ast::AwaitExpr) -> Option<FunctionId> {
+ fn resolve_await_to_poll(&self, await_expr: &ast::AwaitExpr) -> Option<Function> {
self.analyze(await_expr.syntax())?.resolve_await_to_poll(self.db, await_expr)
}
- fn resolve_prefix_expr(&self, prefix_expr: &ast::PrefixExpr) -> Option<FunctionId> {
+ fn resolve_prefix_expr(&self, prefix_expr: &ast::PrefixExpr) -> Option<Function> {
self.analyze(prefix_expr.syntax())?.resolve_prefix_expr(self.db, prefix_expr)
}
- fn resolve_index_expr(&self, index_expr: &ast::IndexExpr) -> Option<FunctionId> {
+ fn resolve_index_expr(&self, index_expr: &ast::IndexExpr) -> Option<Function> {
self.analyze(index_expr.syntax())?.resolve_index_expr(self.db, index_expr)
}
- fn resolve_bin_expr(&self, bin_expr: &ast::BinExpr) -> Option<FunctionId> {
+ fn resolve_bin_expr(&self, bin_expr: &ast::BinExpr) -> Option<Function> {
self.analyze(bin_expr.syntax())?.resolve_bin_expr(self.db, bin_expr)
}
- fn resolve_try_expr(&self, try_expr: &ast::TryExpr) -> Option<FunctionId> {
+ fn resolve_try_expr(&self, try_expr: &ast::TryExpr) -> Option<Function> {
self.analyze(try_expr.syntax())?.resolve_try_expr(self.db, try_expr)
}
@@ -1861,7 +1870,9 @@ impl<'db> SemanticsImpl<'db> {
}
pub fn get_unsafe_ops(&self, def: DefWithBody) -> FxHashSet<ExprOrPatSource> {
- let def = DefWithBodyId::from(def);
+ let Ok(def) = DefWithBodyId::try_from(def) else {
+ return FxHashSet::default();
+ };
let (body, source_map) = self.db.body_with_source_map(def);
let infer = InferenceResult::for_body(self.db, def);
let mut res = FxHashSet::default();
@@ -1877,7 +1888,9 @@ impl<'db> SemanticsImpl<'db> {
always!(block.unsafe_token().is_some());
let block = self.wrap_node_infile(ast::Expr::from(block));
let Some(def) = self.body_for(block.syntax()) else { return Vec::new() };
- let def = def.into();
+ let Ok(def) = def.try_into() else {
+ return Vec::new();
+ };
let (body, source_map) = self.db.body_with_source_map(def);
let infer = InferenceResult::for_body(self.db, def);
let Some(ExprOrPatId::ExprId(block)) = source_map.node_expr(block.as_ref()) else {
@@ -2023,16 +2036,22 @@ impl<'db> SemanticsImpl<'db> {
}
/// Search for a definition's source and cache its syntax tree
- pub fn source<Def: HasSource>(&self, def: Def) -> Option<InFile<Def::Ast>>
- where
- Def::Ast: AstNode,
- {
+ pub fn source<Def: HasSource>(&self, def: Def) -> Option<InFile<Def::Ast>> {
// FIXME: source call should go through the parse cache
let res = def.source(self.db)?;
self.cache(find_root(res.value.syntax()), res.file_id);
Some(res)
}
+ pub fn source_with_range<Def: HasSource>(
+ &self,
+ def: Def,
+ ) -> Option<InFile<(TextRange, Option<Def::Ast>)>> {
+ let res = def.source_with_range(self.db)?;
+ self.parse_or_expand(res.file_id);
+ Some(res)
+ }
+
pub fn body_for(&self, node: InFile<&SyntaxNode>) -> Option<DefWithBody> {
let container = self.with_ctx(|ctx| ctx.find_container(node))?;
@@ -2162,9 +2181,10 @@ impl<'db> SemanticsImpl<'db> {
let def = match &enclosing_item {
Either::Left(ast::Item::Fn(it)) if it.unsafe_token().is_some() => return true,
- Either::Left(ast::Item::Fn(it)) => {
- self.to_def(it).map(<_>::into).map(DefWithBodyId::FunctionId)
- }
+ Either::Left(ast::Item::Fn(it)) => (|| match self.to_def(it)?.id {
+ AnyFunctionId::FunctionId(id) => Some(DefWithBodyId::FunctionId(id)),
+ AnyFunctionId::BuiltinDeriveImplMethod { .. } => None,
+ })(),
Either::Left(ast::Item::Const(it)) => {
self.to_def(it).map(<_>::into).map(DefWithBodyId::ConstId)
}
@@ -2201,7 +2221,11 @@ impl<'db> SemanticsImpl<'db> {
}
pub fn impl_generated_from_derive(&self, impl_: Impl) -> Option<Adt> {
- let source = hir_def::src::HasSource::ast_ptr(&impl_.id.loc(self.db), self.db);
+ let id = match impl_.id {
+ AnyImplId::ImplId(id) => id,
+ AnyImplId::BuiltinDeriveImplId(id) => return Some(id.loc(self.db).adt.into()),
+ };
+ let source = hir_def::src::HasSource::ast_ptr(&id.loc(self.db), self.db);
let mut file_id = source.file_id;
let adt_ast_id = loop {
let macro_call = file_id.macro_file()?;
diff --git a/crates/hir/src/source_analyzer.rs b/crates/hir/src/source_analyzer.rs
index 848ad33801..6ba7a42c19 100644
--- a/crates/hir/src/source_analyzer.rs
+++ b/crates/hir/src/source_analyzer.rs
@@ -57,9 +57,9 @@ use syntax::{
use triomphe::Arc;
use crate::{
- Adt, AssocItem, BindingMode, BuiltinAttr, BuiltinType, Callable, Const, DeriveHelper, Field,
- Function, GenericSubstitution, Local, Macro, ModuleDef, Static, Struct, ToolModule, Trait,
- TupleField, Type, TypeAlias, Variant,
+ Adt, AnyFunctionId, AssocItem, BindingMode, BuiltinAttr, BuiltinType, Callable, Const,
+ DeriveHelper, Field, Function, GenericSubstitution, Local, Macro, ModuleDef, Static, Struct,
+ ToolModule, Trait, TupleField, Type, TypeAlias, Variant,
db::HirDatabase,
semantics::{PathResolution, PathResolutionPerNs},
};
@@ -431,7 +431,7 @@ impl<'db> SourceAnalyzer<'db> {
let expr_id = self.expr_id(call.clone().into())?.as_expr()?;
let (f_in_trait, substs) = self.infer()?.method_resolution(expr_id)?;
- Some(self.resolve_impl_method_or_trait_def(db, f_in_trait, substs).into())
+ Some(self.resolve_impl_method_or_trait_def(db, f_in_trait, substs))
}
pub(crate) fn resolve_method_call_fallback(
@@ -446,8 +446,8 @@ impl<'db> SourceAnalyzer<'db> {
let (fn_, subst) =
self.resolve_impl_method_or_trait_def_with_subst(db, f_in_trait, substs);
Some((
- Either::Left(fn_.into()),
- Some(GenericSubstitution::new(fn_.into(), subst, self.trait_environment(db))),
+ Either::Left(fn_),
+ GenericSubstitution::new_from_fn(fn_, subst, self.trait_environment(db)),
))
}
None => {
@@ -519,8 +519,8 @@ impl<'db> SourceAnalyzer<'db> {
None => inference_result.method_resolution(expr_id).map(|(f, substs)| {
let (f, subst) = self.resolve_impl_method_or_trait_def_with_subst(db, f, substs);
(
- Either::Right(f.into()),
- Some(GenericSubstitution::new(f.into(), subst, self.trait_environment(db))),
+ Either::Right(f),
+ GenericSubstitution::new_from_fn(f, subst, self.trait_environment(db)),
)
}),
}
@@ -569,7 +569,7 @@ impl<'db> SourceAnalyzer<'db> {
&self,
db: &'db dyn HirDatabase,
await_expr: &ast::AwaitExpr,
- ) -> Option<FunctionId> {
+ ) -> Option<Function> {
let mut ty = self.ty_of_expr(await_expr.expr()?)?;
let into_future_trait = self
@@ -605,7 +605,7 @@ impl<'db> SourceAnalyzer<'db> {
&self,
db: &'db dyn HirDatabase,
prefix_expr: &ast::PrefixExpr,
- ) -> Option<FunctionId> {
+ ) -> Option<Function> {
let (_op_trait, op_fn) = match prefix_expr.op_kind()? {
ast::UnaryOp::Deref => {
// This can be either `Deref::deref` or `DerefMut::deref_mut`.
@@ -650,7 +650,7 @@ impl<'db> SourceAnalyzer<'db> {
&self,
db: &'db dyn HirDatabase,
index_expr: &ast::IndexExpr,
- ) -> Option<FunctionId> {
+ ) -> Option<Function> {
let base_ty = self.ty_of_expr(index_expr.base()?)?;
let index_ty = self.ty_of_expr(index_expr.index()?)?;
@@ -679,7 +679,7 @@ impl<'db> SourceAnalyzer<'db> {
&self,
db: &'db dyn HirDatabase,
binop_expr: &ast::BinExpr,
- ) -> Option<FunctionId> {
+ ) -> Option<Function> {
let op = binop_expr.op_kind()?;
let lhs = self.ty_of_expr(binop_expr.lhs()?)?;
let rhs = self.ty_of_expr(binop_expr.rhs()?)?;
@@ -699,7 +699,7 @@ impl<'db> SourceAnalyzer<'db> {
&self,
db: &'db dyn HirDatabase,
try_expr: &ast::TryExpr,
- ) -> Option<FunctionId> {
+ ) -> Option<Function> {
let ty = self.ty_of_expr(try_expr.expr()?)?;
let op_fn = self.lang_items(db).TryTraitBranch?;
@@ -905,7 +905,7 @@ impl<'db> SourceAnalyzer<'db> {
subs,
self.trait_environment(db),
);
- (AssocItemId::from(f_in_trait), subst)
+ (AssocItem::Function(f_in_trait.into()), Some(subst))
}
Some(func_ty) => {
if let TyKind::FnDef(_fn_def, subs) = func_ty.kind() {
@@ -913,19 +913,19 @@ impl<'db> SourceAnalyzer<'db> {
.resolve_impl_method_or_trait_def_with_subst(
db, f_in_trait, subs,
);
- let subst = GenericSubstitution::new(
- fn_.into(),
+ let subst = GenericSubstitution::new_from_fn(
+ fn_,
subst,
self.trait_environment(db),
);
- (fn_.into(), subst)
+ (AssocItem::Function(fn_), subst)
} else {
let subst = GenericSubstitution::new(
f_in_trait.into(),
subs,
self.trait_environment(db),
);
- (f_in_trait.into(), subst)
+ (AssocItem::Function(f_in_trait.into()), Some(subst))
}
}
}
@@ -938,11 +938,11 @@ impl<'db> SourceAnalyzer<'db> {
subst,
self.trait_environment(db),
);
- (konst.into(), subst)
+ (AssocItem::Const(konst.into()), Some(subst))
}
};
- return Some((PathResolution::Def(AssocItem::from(assoc).into()), Some(subst)));
+ return Some((PathResolution::Def(assoc.into()), subst));
}
if let Some(VariantId::EnumVariantId(variant)) =
infer.variant_resolution_for_expr_or_pat(expr_id)
@@ -1401,7 +1401,7 @@ impl<'db> SourceAnalyzer<'db> {
db: &'db dyn HirDatabase,
func: FunctionId,
substs: GenericArgs<'db>,
- ) -> FunctionId {
+ ) -> Function {
self.resolve_impl_method_or_trait_def_with_subst(db, func, substs).0
}
@@ -1410,13 +1410,19 @@ impl<'db> SourceAnalyzer<'db> {
db: &'db dyn HirDatabase,
func: FunctionId,
substs: GenericArgs<'db>,
- ) -> (FunctionId, GenericArgs<'db>) {
+ ) -> (Function, GenericArgs<'db>) {
let owner = match self.resolver.body_owner() {
Some(it) => it,
- None => return (func, substs),
+ None => return (func.into(), substs),
};
let env = self.param_and(db.trait_environment_for_body(owner));
- db.lookup_impl_method(env, func, substs)
+ let (func, args) = db.lookup_impl_method(env, func, substs);
+ match func {
+ Either::Left(func) => (func.into(), args),
+ Either::Right((impl_, method)) => {
+ (Function { id: AnyFunctionId::BuiltinDeriveImplMethod { method, impl_ } }, args)
+ }
+ }
}
fn resolve_impl_const_or_trait_def_with_subst(
@@ -1802,5 +1808,5 @@ pub(crate) fn name_hygiene(db: &dyn HirDatabase, name: InFile<&SyntaxNode>) -> H
};
let span_map = db.expansion_span_map(macro_file);
let ctx = span_map.span_at(name.value.text_range().start()).ctx;
- HygieneId::new(ctx.opaque_and_semitransparent(db))
+ HygieneId::new(ctx.opaque_and_semiopaque(db))
}
diff --git a/crates/ide-assists/src/assist_config.rs b/crates/ide-assists/src/assist_config.rs
index 7d5070ace6..636acf3f00 100644
--- a/crates/ide-assists/src/assist_config.rs
+++ b/crates/ide-assists/src/assist_config.rs
@@ -9,6 +9,7 @@ use ide_db::{
SnippetCap,
assists::ExprFillDefaultMode,
imports::{import_assets::ImportPathConfig, insert_use::InsertUseConfig},
+ rename::RenameConfig,
};
use crate::AssistKind;
@@ -27,6 +28,7 @@ pub struct AssistConfig {
pub code_action_grouping: bool,
pub expr_fill_default: ExprFillDefaultMode,
pub prefer_self_ty: bool,
+ pub show_rename_conflicts: bool,
}
impl AssistConfig {
@@ -46,4 +48,8 @@ impl AssistConfig {
allow_unstable,
}
}
+
+ pub fn rename_config(&self) -> RenameConfig {
+ RenameConfig { show_conflicts: self.show_rename_conflicts }
+ }
}
diff --git a/crates/ide-assists/src/handlers/convert_to_guarded_return.rs b/crates/ide-assists/src/handlers/convert_to_guarded_return.rs
index 05ccd5b9bf..08b114072f 100644
--- a/crates/ide-assists/src/handlers/convert_to_guarded_return.rs
+++ b/crates/ide-assists/src/handlers/convert_to_guarded_return.rs
@@ -218,7 +218,7 @@ fn let_stmt_to_guarded_return(
let let_else_stmt = make::let_else_stmt(
happy_pattern,
let_stmt.ty(),
- expr,
+ expr.reset_indent(),
ast::make::tail_only_block_expr(early_expression),
);
let let_else_stmt = let_else_stmt.indent(let_indent_level);
@@ -275,11 +275,11 @@ fn flat_let_chain(mut expr: ast::Expr) -> Vec<ast::Expr> {
&& bin_expr.op_kind() == Some(ast::BinaryOp::LogicOp(ast::LogicOp::And))
&& let (Some(lhs), Some(rhs)) = (bin_expr.lhs(), bin_expr.rhs())
{
- reduce_cond(rhs);
+ reduce_cond(rhs.reset_indent());
expr = lhs;
}
- reduce_cond(expr);
+ reduce_cond(expr.reset_indent());
chains.reverse();
chains
}
@@ -1020,6 +1020,63 @@ fn main() {
}
#[test]
+ fn indentations() {
+ check_assist(
+ convert_to_guarded_return,
+ r#"
+mod indent {
+ fn main() {
+ $0if let None = Some(
+ 92
+ ) {
+ foo(
+ 93
+ );
+ }
+ }
+}
+"#,
+ r#"
+mod indent {
+ fn main() {
+ let None = Some(
+ 92
+ ) else { return };
+ foo(
+ 93
+ );
+ }
+}
+"#,
+ );
+
+ check_assist(
+ convert_to_guarded_return,
+ r#"
+//- minicore: option
+mod indent {
+ fn foo(_: i32) -> Option<i32> { None }
+ fn main() {
+ $0let x = foo(
+ 2
+ );
+ }
+}
+"#,
+ r#"
+mod indent {
+ fn foo(_: i32) -> Option<i32> { None }
+ fn main() {
+ let Some(x) = foo(
+ 2
+ ) else { return };
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
fn ignore_already_converted_if() {
check_assist_not_applicable(
convert_to_guarded_return,
diff --git a/crates/ide-assists/src/handlers/move_guard.rs b/crates/ide-assists/src/handlers/move_guard.rs
index 8daf86923d..1c0c6e43d5 100644
--- a/crates/ide-assists/src/handlers/move_guard.rs
+++ b/crates/ide-assists/src/handlers/move_guard.rs
@@ -1,6 +1,11 @@
+use itertools::Itertools;
use syntax::{
SyntaxKind::WHITESPACE,
- ast::{AstNode, BlockExpr, ElseBranch, Expr, IfExpr, MatchArm, Pat, edit::AstNodeEdit, make},
+ ast::{
+ AstNode, BlockExpr, ElseBranch, Expr, IfExpr, MatchArm, Pat, edit::AstNodeEdit, make,
+ syntax_factory::SyntaxFactory,
+ },
+ syntax_editor::Element,
};
use crate::{AssistContext, AssistId, Assists};
@@ -131,8 +136,10 @@ pub(crate) fn move_arm_cond_to_match_guard(
AssistId::refactor_rewrite("move_arm_cond_to_match_guard"),
"Move condition to match guard",
replace_node.text_range(),
- |edit| {
- edit.delete(match_arm.syntax().text_range());
+ |builder| {
+ let make = SyntaxFactory::without_mappings();
+ let mut replace_arms = vec![];
+
// Dedent if if_expr is in a BlockExpr
let dedent = if needs_dedent {
cov_mark::hit!(move_guard_ifelse_in_block);
@@ -141,47 +148,30 @@ pub(crate) fn move_arm_cond_to_match_guard(
cov_mark::hit!(move_guard_ifelse_else_block);
0
};
- let then_arm_end = match_arm.syntax().text_range().end();
let indent_level = match_arm.indent_level();
- let spaces = indent_level;
- let mut first = true;
for (cond, block) in conds_blocks {
- if !first {
- edit.insert(then_arm_end, format!("\n{spaces}"));
- } else {
- first = false;
- }
- let guard = format!("{match_pat} if {cond} => ");
- edit.insert(then_arm_end, guard);
let only_expr = block.statements().next().is_none();
- match &block.tail_expr() {
- Some(then_expr) if only_expr => {
- edit.insert(then_arm_end, then_expr.syntax().text());
- edit.insert(then_arm_end, ",");
- }
- _ => {
- let to_insert = block.dedent(dedent.into()).syntax().text();
- edit.insert(then_arm_end, to_insert)
- }
- }
+ let expr = match block.tail_expr() {
+ Some(then_expr) if only_expr => then_expr,
+ _ => block.dedent(dedent.into()).into(),
+ };
+ let guard = make.match_guard(cond);
+ let new_arm = make.match_arm(match_pat.clone(), Some(guard), expr);
+ replace_arms.push(new_arm);
}
- if let Some(e) = tail {
+ if let Some(block) = tail {
cov_mark::hit!(move_guard_ifelse_else_tail);
- let guard = format!("\n{spaces}{match_pat} => ");
- edit.insert(then_arm_end, guard);
- let only_expr = e.statements().next().is_none();
- match &e.tail_expr() {
+ let only_expr = block.statements().next().is_none();
+ let expr = match block.tail_expr() {
Some(expr) if only_expr => {
cov_mark::hit!(move_guard_ifelse_expr_only);
- edit.insert(then_arm_end, expr.syntax().text());
- edit.insert(then_arm_end, ",");
- }
- _ => {
- let to_insert = e.dedent(dedent.into()).syntax().text();
- edit.insert(then_arm_end, to_insert)
+ expr
}
- }
+ _ => block.dedent(dedent.into()).into(),
+ };
+ let new_arm = make.match_arm(match_pat, None, expr);
+ replace_arms.push(new_arm);
} else {
// There's no else branch. Add a pattern without guard, unless the following match
// arm is `_ => ...`
@@ -193,9 +183,21 @@ pub(crate) fn move_arm_cond_to_match_guard(
{
cov_mark::hit!(move_guard_ifelse_has_wildcard);
}
- _ => edit.insert(then_arm_end, format!("\n{spaces}{match_pat} => {{}}")),
+ _ => {
+ let block_expr = make.expr_empty_block().into();
+ replace_arms.push(make.match_arm(match_pat, None, block_expr));
+ }
}
}
+
+ let mut edit = builder.make_editor(match_arm.syntax());
+
+ let newline = make.whitespace(&format!("\n{indent_level}"));
+ let replace_arms = replace_arms.iter().map(|it| it.syntax().syntax_element());
+ let replace_arms = Itertools::intersperse(replace_arms, newline.syntax_element());
+ edit.replace_with_many(match_arm.syntax(), replace_arms.collect());
+
+ builder.add_file_edits(ctx.vfs_file_id(), edit);
},
)
}
diff --git a/crates/ide-assists/src/handlers/remove_underscore.rs b/crates/ide-assists/src/handlers/remove_underscore.rs
index a8e27416d5..1de1c15cf7 100644
--- a/crates/ide-assists/src/handlers/remove_underscore.rs
+++ b/crates/ide-assists/src/handlers/remove_underscore.rs
@@ -62,7 +62,9 @@ pub(crate) fn remove_underscore(acc: &mut Assists, ctx: &AssistContext<'_>) -> O
"Remove underscore from a used variable",
text_range,
|builder| {
- let changes = def.rename(&ctx.sema, new_name, RenameDefinition::Yes).unwrap();
+ let changes = def
+ .rename(&ctx.sema, new_name, RenameDefinition::Yes, &ctx.config.rename_config())
+ .unwrap();
builder.source_change = changes;
},
)
diff --git a/crates/ide-assists/src/tests.rs b/crates/ide-assists/src/tests.rs
index c9044fe111..a52bd74d14 100644
--- a/crates/ide-assists/src/tests.rs
+++ b/crates/ide-assists/src/tests.rs
@@ -38,6 +38,7 @@ pub(crate) const TEST_CONFIG: AssistConfig = AssistConfig {
code_action_grouping: true,
expr_fill_default: ExprFillDefaultMode::Todo,
prefer_self_ty: false,
+ show_rename_conflicts: true,
};
pub(crate) const TEST_CONFIG_NO_GROUPING: AssistConfig = AssistConfig {
@@ -59,6 +60,7 @@ pub(crate) const TEST_CONFIG_NO_GROUPING: AssistConfig = AssistConfig {
code_action_grouping: false,
expr_fill_default: ExprFillDefaultMode::Todo,
prefer_self_ty: false,
+ show_rename_conflicts: true,
};
pub(crate) const TEST_CONFIG_NO_SNIPPET_CAP: AssistConfig = AssistConfig {
@@ -80,6 +82,7 @@ pub(crate) const TEST_CONFIG_NO_SNIPPET_CAP: AssistConfig = AssistConfig {
code_action_grouping: true,
expr_fill_default: ExprFillDefaultMode::Todo,
prefer_self_ty: false,
+ show_rename_conflicts: true,
};
pub(crate) const TEST_CONFIG_IMPORT_ONE: AssistConfig = AssistConfig {
@@ -101,6 +104,7 @@ pub(crate) const TEST_CONFIG_IMPORT_ONE: AssistConfig = AssistConfig {
code_action_grouping: true,
expr_fill_default: ExprFillDefaultMode::Todo,
prefer_self_ty: false,
+ show_rename_conflicts: true,
};
fn assists(
diff --git a/crates/ide-completion/src/completions.rs b/crates/ide-completion/src/completions.rs
index b28764f6fc..355687b203 100644
--- a/crates/ide-completion/src/completions.rs
+++ b/crates/ide-completion/src/completions.rs
@@ -13,6 +13,7 @@ pub(crate) mod format_string;
pub(crate) mod item_list;
pub(crate) mod keyword;
pub(crate) mod lifetime;
+pub(crate) mod macro_def;
pub(crate) mod mod_;
pub(crate) mod pattern;
pub(crate) mod postfix;
diff --git a/crates/ide-completion/src/completions/dot.rs b/crates/ide-completion/src/completions/dot.rs
index 18cfa53f8e..18c1992afa 100644
--- a/crates/ide-completion/src/completions/dot.rs
+++ b/crates/ide-completion/src/completions/dot.rs
@@ -652,7 +652,7 @@ fn foo(u: U) { u.$0 }
fn test_method_completion_only_fitting_impls() {
check_no_kw(
r#"
-struct A<T> {}
+struct A<T>(T);
impl A<u32> {
fn the_method(&self) {}
}
@@ -662,6 +662,7 @@ impl A<i32> {
fn foo(a: A<u32>) { a.$0 }
"#,
expect![[r#"
+ fd 0 u32
me the_method() fn(&self)
"#]],
)
diff --git a/crates/ide-completion/src/completions/macro_def.rs b/crates/ide-completion/src/completions/macro_def.rs
new file mode 100644
index 0000000000..2c8e7a2e62
--- /dev/null
+++ b/crates/ide-completion/src/completions/macro_def.rs
@@ -0,0 +1,31 @@
+//! Completion for macro meta-variable segments
+
+use ide_db::SymbolKind;
+
+use crate::{CompletionItem, Completions, context::CompletionContext};
+
+pub(crate) fn complete_macro_segment(acc: &mut Completions, ctx: &CompletionContext<'_>) {
+ for &label in MACRO_SEGMENTS {
+ let item =
+ CompletionItem::new(SymbolKind::BuiltinAttr, ctx.source_range(), label, ctx.edition);
+ item.add_to(acc, ctx.db);
+ }
+}
+
+const MACRO_SEGMENTS: &[&str] = &[
+ "ident",
+ "block",
+ "stmt",
+ "expr",
+ "pat",
+ "ty",
+ "lifetime",
+ "literal",
+ "path",
+ "meta",
+ "tt",
+ "item",
+ "vis",
+ "expr_2021",
+ "pat_param",
+];
diff --git a/crates/ide-completion/src/completions/record.rs b/crates/ide-completion/src/completions/record.rs
index bfa567009c..c5bfdcb8b7 100644
--- a/crates/ide-completion/src/completions/record.rs
+++ b/crates/ide-completion/src/completions/record.rs
@@ -70,8 +70,11 @@ pub(crate) fn complete_record_expr_fields(
}
_ => {
let missing_fields = ctx.sema.record_literal_missing_fields(record_expr);
+ let update_exists = record_expr
+ .record_expr_field_list()
+ .is_some_and(|list| list.dotdot_token().is_some());
- if !missing_fields.is_empty() {
+ if !missing_fields.is_empty() && !update_exists {
cov_mark::hit!(functional_update_field);
add_default_update(acc, ctx, ty);
}
diff --git a/crates/ide-completion/src/context.rs b/crates/ide-completion/src/context.rs
index 963e396704..d116f665ad 100644
--- a/crates/ide-completion/src/context.rs
+++ b/crates/ide-completion/src/context.rs
@@ -13,7 +13,7 @@ use hir::{
};
use ide_db::{
FilePosition, FxHashMap, FxHashSet, RootDatabase, famous_defs::FamousDefs,
- helpers::is_editable_crate,
+ helpers::is_editable_crate, syntax_helpers::node_ext::is_in_macro_matcher,
};
use itertools::Either;
use syntax::{
@@ -389,6 +389,7 @@ pub(crate) enum CompletionAnalysis<'db> {
fake_attribute_under_caret: Option<ast::Attr>,
extern_crate: Option<ast::ExternCrate>,
},
+ MacroSegment,
}
/// Information about the field or method access we are completing.
@@ -729,7 +730,7 @@ impl<'db> CompletionContext<'db> {
let prev_token = original_token.prev_token()?;
// only has a single colon
- if prev_token.kind() != T![:] {
+ if prev_token.kind() != T![:] && !is_in_macro_matcher(&original_token) {
return None;
}
diff --git a/crates/ide-completion/src/context/analysis.rs b/crates/ide-completion/src/context/analysis.rs
index 49fb36ad04..65bae5b66e 100644
--- a/crates/ide-completion/src/context/analysis.rs
+++ b/crates/ide-completion/src/context/analysis.rs
@@ -5,7 +5,7 @@ use hir::{ExpandResult, InFile, Semantics, Type, TypeInfo, Variant};
use ide_db::{
RootDatabase, active_parameter::ActiveParameter, syntax_helpers::node_ext::find_loops,
};
-use itertools::Either;
+use itertools::{Either, Itertools};
use stdx::always;
use syntax::{
AstNode, AstToken, Direction, NodeOrToken, SyntaxElement, SyntaxKind, SyntaxNode, SyntaxToken,
@@ -510,6 +510,21 @@ fn analyze<'db>(
colon_prefix,
extern_crate: p.ancestors().find_map(ast::ExternCrate::cast),
}
+ } else if p.kind() == SyntaxKind::TOKEN_TREE
+ && p.ancestors().any(|it| ast::Macro::can_cast(it.kind()))
+ {
+ if let Some([_ident, colon, _name, dollar]) = fake_ident_token
+ .siblings_with_tokens(Direction::Prev)
+ .filter(|it| !it.kind().is_trivia())
+ .take(4)
+ .collect_array()
+ && dollar.kind() == T![$]
+ && colon.kind() == T![:]
+ {
+ CompletionAnalysis::MacroSegment
+ } else {
+ return None;
+ }
} else {
return None;
}
diff --git a/crates/ide-completion/src/lib.rs b/crates/ide-completion/src/lib.rs
index 33ab43fa61..69ca2af772 100644
--- a/crates/ide-completion/src/lib.rs
+++ b/crates/ide-completion/src/lib.rs
@@ -263,6 +263,9 @@ pub fn completions(
extern_crate.as_ref(),
);
}
+ CompletionAnalysis::MacroSegment => {
+ completions::macro_def::complete_macro_segment(acc, ctx);
+ }
CompletionAnalysis::UnexpandedAttrTT { .. } | CompletionAnalysis::String { .. } => (),
}
}
diff --git a/crates/ide-completion/src/render/macro_.rs b/crates/ide-completion/src/render/macro_.rs
index 6efa8a8455..8cdeb8abbf 100644
--- a/crates/ide-completion/src/render/macro_.rs
+++ b/crates/ide-completion/src/render/macro_.rs
@@ -1,6 +1,6 @@
//! Renderer for macro invocations.
-use hir::HirDisplay;
+use hir::{HirDisplay, db::HirDatabase};
use ide_db::{SymbolKind, documentation::Documentation};
use syntax::{SmolStr, ToSmolStr, format_smolstr};
@@ -46,17 +46,15 @@ fn render(
ctx.source_range()
};
- let orig_name = macro_.name(ctx.db());
- let (name, orig_name, escaped_name) = (
- name.as_str(),
- orig_name.as_str(),
- name.display(ctx.db(), completion.edition).to_smolstr(),
- );
+ let (name, escaped_name) =
+ (name.as_str(), name.display(ctx.db(), completion.edition).to_smolstr());
let docs = ctx.docs(macro_);
- let docs_str = docs.as_ref().map(Documentation::as_str).unwrap_or_default();
let is_fn_like = macro_.is_fn_like(completion.db);
- let (bra, ket) =
- if is_fn_like { guess_macro_braces(name, orig_name, docs_str) } else { ("", "") };
+ let (bra, ket) = if is_fn_like {
+ guess_macro_braces(ctx.db(), macro_, name, docs.as_ref())
+ } else {
+ ("", "")
+ };
let needs_bang = is_fn_like && !is_use_path && !has_macro_bang;
@@ -115,12 +113,24 @@ fn banged_name(name: &str) -> SmolStr {
}
fn guess_macro_braces(
+ db: &dyn HirDatabase,
+ macro_: hir::Macro,
macro_name: &str,
- orig_name: &str,
- docs: &str,
+ docs: Option<&Documentation<'_>>,
) -> (&'static str, &'static str) {
+ if let Some(style) = macro_.preferred_brace_style(db) {
+ return match style {
+ hir::MacroBraces::Braces => (" {", "}"),
+ hir::MacroBraces::Brackets => ("[", "]"),
+ hir::MacroBraces::Parentheses => ("(", ")"),
+ };
+ }
+
+ let orig_name = macro_.name(db);
+ let docs = docs.map(Documentation::as_str).unwrap_or_default();
+
let mut votes = [0, 0, 0];
- for (idx, s) in docs.match_indices(macro_name).chain(docs.match_indices(orig_name)) {
+ for (idx, s) in docs.match_indices(macro_name).chain(docs.match_indices(orig_name.as_str())) {
let (before, after) = (&docs[..idx], &docs[idx + s.len()..]);
// Ensure to match the full word
if after.starts_with('!')
@@ -200,6 +210,57 @@ fn main() {
}
#[test]
+ fn preferred_macro_braces() {
+ check_edit(
+ "vec!",
+ r#"
+#[rust_analyzer::macro_style(brackets)]
+macro_rules! vec { () => {} }
+
+fn main() { v$0 }
+"#,
+ r#"
+#[rust_analyzer::macro_style(brackets)]
+macro_rules! vec { () => {} }
+
+fn main() { vec![$0] }
+"#,
+ );
+
+ check_edit(
+ "foo!",
+ r#"
+#[rust_analyzer::macro_style(braces)]
+macro_rules! foo { () => {} }
+fn main() { $0 }
+"#,
+ r#"
+#[rust_analyzer::macro_style(braces)]
+macro_rules! foo { () => {} }
+fn main() { foo! {$0} }
+"#,
+ );
+
+ check_edit(
+ "bar!",
+ r#"
+#[macro_export]
+#[rust_analyzer::macro_style(brackets)]
+macro_rules! foo { () => {} }
+pub use crate::foo as bar;
+fn main() { $0 }
+"#,
+ r#"
+#[macro_export]
+#[rust_analyzer::macro_style(brackets)]
+macro_rules! foo { () => {} }
+pub use crate::foo as bar;
+fn main() { bar![$0] }
+"#,
+ );
+ }
+
+ #[test]
fn guesses_macro_braces() {
check_edit(
"vec!",
diff --git a/crates/ide-completion/src/tests/flyimport.rs b/crates/ide-completion/src/tests/flyimport.rs
index c9755525a5..797df3f163 100644
--- a/crates/ide-completion/src/tests/flyimport.rs
+++ b/crates/ide-completion/src/tests/flyimport.rs
@@ -79,6 +79,7 @@ fn macro_fuzzy_completion() {
r#"
//- /lib.rs crate:dep
/// Please call me as macro_with_curlies! {}
+#[rust_analyzer::macro_style(braces)]
#[macro_export]
macro_rules! macro_with_curlies {
() => {}
@@ -780,8 +781,8 @@ fn main() {
}
"#,
expect![[r#"
- fn weird_function() (use dep::test_mod::TestTrait) fn() DEPRECATED
ct SPECIAL_CONST (use dep::test_mod::TestTrait) u8 DEPRECATED
+ fn weird_function() (use dep::test_mod::TestTrait) fn() DEPRECATED
me random_method(…) (use dep::test_mod::TestTrait) fn(&self) DEPRECATED
"#]],
);
diff --git a/crates/ide-completion/src/tests/record.rs b/crates/ide-completion/src/tests/record.rs
index a1013b8654..d9be6556fa 100644
--- a/crates/ide-completion/src/tests/record.rs
+++ b/crates/ide-completion/src/tests/record.rs
@@ -264,6 +264,29 @@ fn main() {
}
#[test]
+fn functional_update_exist_update() {
+ check(
+ r#"
+//- minicore:default
+struct Foo { foo1: u32, foo2: u32 }
+impl Default for Foo {
+ fn default() -> Self { loop {} }
+}
+
+fn main() {
+ let thing = 1;
+ let foo = Foo { foo1: 0, foo2: 0 };
+ let foo2 = Foo { thing, $0 ..Default::default() }
+}
+"#,
+ expect![[r#"
+ fd foo1 u32
+ fd foo2 u32
+ "#]],
+ );
+}
+
+#[test]
fn empty_union_literal() {
check(
r#"
diff --git a/crates/ide-completion/src/tests/special.rs b/crates/ide-completion/src/tests/special.rs
index 59a0c144c8..b82b23541c 100644
--- a/crates/ide-completion/src/tests/special.rs
+++ b/crates/ide-completion/src/tests/special.rs
@@ -482,6 +482,226 @@ fn foo() {}
}
#[test]
+fn completes_macro_segment() {
+ check(
+ r#"
+macro_rules! foo {
+ ($x:e$0) => ();
+}
+"#,
+ expect![[r#"
+ ba block
+ ba expr
+ ba expr_2021
+ ba ident
+ ba item
+ ba lifetime
+ ba literal
+ ba meta
+ ba pat
+ ba pat_param
+ ba path
+ ba stmt
+ ba tt
+ ba ty
+ ba vis
+ "#]],
+ );
+
+ check(
+ r#"
+macro_rules! foo {
+ ($x:$0) => ();
+}
+"#,
+ expect![[r#"
+ ba block
+ ba expr
+ ba expr_2021
+ ba ident
+ ba item
+ ba lifetime
+ ba literal
+ ba meta
+ ba pat
+ ba pat_param
+ ba path
+ ba stmt
+ ba tt
+ ba ty
+ ba vis
+ "#]],
+ );
+
+ check(
+ r#"
+macro_rules! foo {
+ ($($x:$0)*) => ();
+}
+"#,
+ expect![[r#"
+ ba block
+ ba expr
+ ba expr_2021
+ ba ident
+ ba item
+ ba lifetime
+ ba literal
+ ba meta
+ ba pat
+ ba pat_param
+ ba path
+ ba stmt
+ ba tt
+ ba ty
+ ba vis
+ "#]],
+ );
+
+ check(
+ r#"
+macro foo {
+ ($($x:$0)*) => ();
+}
+"#,
+ expect![[r#"
+ ba block
+ ba expr
+ ba expr_2021
+ ba ident
+ ba item
+ ba lifetime
+ ba literal
+ ba meta
+ ba pat
+ ba pat_param
+ ba path
+ ba stmt
+ ba tt
+ ba ty
+ ba vis
+ "#]],
+ );
+
+ check(
+ r#"
+macro foo($($x:$0)*) {
+ xxx;
+}
+"#,
+ expect![[r#"
+ ba block
+ ba expr
+ ba expr_2021
+ ba ident
+ ba item
+ ba lifetime
+ ba literal
+ ba meta
+ ba pat
+ ba pat_param
+ ba path
+ ba stmt
+ ba tt
+ ba ty
+ ba vis
+ "#]],
+ );
+
+ check_edit(
+ "expr",
+ r#"
+macro foo($($x:$0)*) {
+ xxx;
+}
+"#,
+ r#"
+macro foo($($x:expr)*) {
+ xxx;
+}
+"#,
+ );
+
+ check(
+ r#"
+macro_rules! foo {
+ ($fn : e$0) => ();
+}
+"#,
+ expect![[r#"
+ ba block
+ ba expr
+ ba expr_2021
+ ba ident
+ ba item
+ ba lifetime
+ ba literal
+ ba meta
+ ba pat
+ ba pat_param
+ ba path
+ ba stmt
+ ba tt
+ ba ty
+ ba vis
+ "#]],
+ );
+
+ check_edit(
+ "expr",
+ r#"
+macro foo($($x:ex$0)*) {
+ xxx;
+}
+"#,
+ r#"
+macro foo($($x:expr)*) {
+ xxx;
+}
+"#,
+ );
+}
+
+#[test]
+fn completes_in_macro_body() {
+ check(
+ r#"
+macro_rules! foo {
+ ($x:expr) => ($y:$0);
+}
+"#,
+ expect![[r#""#]],
+ );
+
+ check(
+ r#"
+macro_rules! foo {
+ ($x:expr) => ({$y:$0});
+}
+"#,
+ expect![[r#""#]],
+ );
+
+ check(
+ r#"
+macro foo {
+ ($x:expr) => ($y:$0);
+}
+"#,
+ expect![[r#""#]],
+ );
+
+ check(
+ r#"
+macro foo($x:expr) {
+ $y:$0
+}
+"#,
+ expect![[r#""#]],
+ );
+}
+
+#[test]
fn function_mod_share_name() {
check_no_kw(
r#"
@@ -946,6 +1166,15 @@ fn foo { crate:$0 }
Some(':'),
expect![""],
);
+
+ check_with_trigger_character(
+ r#"
+macro_rules! bar { ($($x:tt)*) => ($($x)*); }
+fn foo { bar!(crate:$0) }
+"#,
+ Some(':'),
+ expect![""],
+ );
}
#[test]
diff --git a/crates/ide-db/src/apply_change.rs b/crates/ide-db/src/apply_change.rs
index 6a85c6e548..b77a18f56e 100644
--- a/crates/ide-db/src/apply_change.rs
+++ b/crates/ide-db/src/apply_change.rs
@@ -1,39 +1,15 @@
//! Applies changes to the IDE state transactionally.
-use base_db::SourceRootId;
use profile::Bytes;
-use rustc_hash::FxHashSet;
-use salsa::{Database as _, Durability, Setter as _};
+use salsa::Database as _;
-use crate::{
- ChangeWithProcMacros, RootDatabase,
- symbol_index::{LibraryRoots, LocalRoots},
-};
+use crate::{ChangeWithProcMacros, RootDatabase};
impl RootDatabase {
- pub fn request_cancellation(&mut self) {
- let _p = tracing::info_span!("RootDatabase::request_cancellation").entered();
- self.synthetic_write(Durability::LOW);
- }
-
pub fn apply_change(&mut self, change: ChangeWithProcMacros) {
let _p = tracing::info_span!("RootDatabase::apply_change").entered();
- self.request_cancellation();
+ self.trigger_cancellation();
tracing::trace!("apply_change {:?}", change);
- if let Some(roots) = &change.source_change.roots {
- let mut local_roots = FxHashSet::default();
- let mut library_roots = FxHashSet::default();
- for (idx, root) in roots.iter().enumerate() {
- let root_id = SourceRootId(idx as u32);
- if root.is_library {
- library_roots.insert(root_id);
- } else {
- local_roots.insert(root_id);
- }
- }
- LocalRoots::get(self).set_roots(self).to(local_roots);
- LibraryRoots::get(self).set_roots(self).to(library_roots);
- }
change.apply(self);
}
diff --git a/crates/ide-db/src/lib.rs b/crates/ide-db/src/lib.rs
index 338c423254..413b58bf79 100644
--- a/crates/ide-db/src/lib.rs
+++ b/crates/ide-db/src/lib.rs
@@ -75,7 +75,7 @@ pub use rustc_hash::{FxHashMap, FxHashSet, FxHasher};
pub use ::line_index;
/// `base_db` is normally also needed in places where `ide_db` is used, so this re-export is for convenience.
-pub use base_db::{self, FxIndexMap, FxIndexSet};
+pub use base_db::{self, FxIndexMap, FxIndexSet, LibraryRoots, LocalRoots};
pub use span::{self, FileId};
pub type FilePosition = FilePositionWrapper<FileId>;
@@ -200,10 +200,10 @@ impl RootDatabase {
db.set_all_crates(Arc::new(Box::new([])));
CrateGraphBuilder::default().set_in_db(&mut db);
db.set_proc_macros_with_durability(Default::default(), Durability::MEDIUM);
- _ = crate::symbol_index::LibraryRoots::builder(Default::default())
+ _ = base_db::LibraryRoots::builder(Default::default())
.durability(Durability::MEDIUM)
.new(&db);
- _ = crate::symbol_index::LocalRoots::builder(Default::default())
+ _ = base_db::LocalRoots::builder(Default::default())
.durability(Durability::MEDIUM)
.new(&db);
db.set_expand_proc_attr_macros_with_durability(false, Durability::HIGH);
diff --git a/crates/ide-db/src/prime_caches.rs b/crates/ide-db/src/prime_caches.rs
index e8f06a36be..015b06e8e0 100644
--- a/crates/ide-db/src/prime_caches.rs
+++ b/crates/ide-db/src/prime_caches.rs
@@ -108,10 +108,9 @@ pub fn parallel_prime_caches(
hir::attach_db(&db, || {
// method resolution is likely to hit all trait impls at some point
// we pre-populate it here as this will hit a lot of parses ...
- _ = hir::TraitImpls::for_crate(&db, crate_id);
- // we compute the lang items here as the work for them is also highly recursive and will be trigger by the module symbols query
+ // This also computes the lang items, which is what we want as the work for them is also highly recursive and will be trigger by the module symbols query
// slowing down leaf crate analysis tremendously as we go back to being blocked on a single thread
- _ = hir::crate_lang_items(&db, crate_id);
+ _ = hir::TraitImpls::for_crate(&db, crate_id);
})
});
@@ -271,7 +270,6 @@ pub fn parallel_prime_caches(
}
if crate_def_maps_done == crate_def_maps_total {
- // Can we trigger lru-eviction once at this point to reduce peak memory usage?
cb(ParallelPrimeCachesProgress {
crates_currently_indexing: vec![],
crates_done: crate_def_maps_done,
diff --git a/crates/ide-db/src/rename.rs b/crates/ide-db/src/rename.rs
index 82eee69f0d..b03a5b6efb 100644
--- a/crates/ide-db/src/rename.rs
+++ b/crates/ide-db/src/rename.rs
@@ -45,6 +45,11 @@ use crate::{
traits::convert_to_def_in_trait,
};
+#[derive(Clone, Debug, PartialEq, Eq)]
+pub struct RenameConfig {
+ pub show_conflicts: bool,
+}
+
pub type Result<T, E = RenameError> = std::result::Result<T, E>;
#[derive(Debug)]
@@ -81,6 +86,7 @@ impl Definition {
sema: &Semantics<'_, RootDatabase>,
new_name: &str,
rename_definition: RenameDefinition,
+ config: &RenameConfig,
) -> Result<SourceChange> {
// self.krate() returns None if
// self is a built-in attr, built-in type or tool module.
@@ -109,10 +115,15 @@ impl Definition {
bail!("Cannot rename a builtin attr.")
}
Definition::SelfType(_) => bail!("Cannot rename `Self`"),
- Definition::Macro(mac) => {
- rename_reference(sema, Definition::Macro(mac), new_name, rename_definition, edition)
- }
- def => rename_reference(sema, def, new_name, rename_definition, edition),
+ Definition::Macro(mac) => rename_reference(
+ sema,
+ Definition::Macro(mac),
+ new_name,
+ rename_definition,
+ edition,
+ config,
+ ),
+ def => rename_reference(sema, def, new_name, rename_definition, edition, config),
}
}
@@ -338,6 +349,7 @@ fn rename_reference(
new_name: &str,
rename_definition: RenameDefinition,
edition: Edition,
+ config: &RenameConfig,
) -> Result<SourceChange> {
let (mut new_name, ident_kind) = IdentifierKind::classify(edition, new_name)?;
@@ -396,7 +408,8 @@ fn rename_reference(
if rename_definition == RenameDefinition::Yes {
// This needs to come after the references edits, because we change the annotation of existing edits
// if a conflict is detected.
- let (file_id, edit) = source_edit_from_def(sema, def, &new_name, &mut source_change)?;
+ let (file_id, edit) =
+ source_edit_from_def(sema, config, def, &new_name, &mut source_change)?;
source_change.insert_source_edit(file_id, edit);
}
Ok(source_change)
@@ -554,6 +567,7 @@ fn source_edit_from_name_ref(
fn source_edit_from_def(
sema: &Semantics<'_, RootDatabase>,
+ config: &RenameConfig,
def: Definition,
new_name: &Name,
source_change: &mut SourceChange,
@@ -562,21 +576,22 @@ fn source_edit_from_def(
if let Definition::Local(local) = def {
let mut file_id = None;
- let conflict_annotation = if !sema.rename_conflicts(&local, new_name).is_empty() {
- Some(
- source_change.insert_annotation(ChangeAnnotation {
- label: "This rename will change the program's meaning".to_owned(),
- needs_confirmation: true,
- description: Some(
- "Some variable(s) will shadow the renamed variable \
+ let conflict_annotation =
+ if config.show_conflicts && !sema.rename_conflicts(&local, new_name).is_empty() {
+ Some(
+ source_change.insert_annotation(ChangeAnnotation {
+ label: "This rename will change the program's meaning".to_owned(),
+ needs_confirmation: true,
+ description: Some(
+ "Some variable(s) will shadow the renamed variable \
or be shadowed by it if the rename is performed"
- .to_owned(),
- ),
- }),
- )
- } else {
- None
- };
+ .to_owned(),
+ ),
+ }),
+ )
+ } else {
+ None
+ };
for source in local.sources(sema.db) {
let source = match source.source.clone().original_ast_node_rooted(sema.db) {
diff --git a/crates/ide-db/src/symbol_index.rs b/crates/ide-db/src/symbol_index.rs
index e15d0b33bb..eb0529d6b5 100644
--- a/crates/ide-db/src/symbol_index.rs
+++ b/crates/ide-db/src/symbol_index.rs
@@ -27,7 +27,7 @@ use std::{
ops::ControlFlow,
};
-use base_db::{RootQueryDb, SourceRootId};
+use base_db::{LibraryRoots, LocalRoots, RootQueryDb, SourceRootId};
use fst::{Automaton, Streamer, raw::IndexedValue};
use hir::{
Crate, Module,
@@ -36,7 +36,6 @@ use hir::{
symbols::{FileSymbol, SymbolCollector},
};
use rayon::prelude::*;
-use rustc_hash::FxHashSet;
use salsa::Update;
use crate::RootDatabase;
@@ -102,22 +101,6 @@ impl Query {
}
}
-/// The set of roots for crates.io libraries.
-/// Files in libraries are assumed to never change.
-#[salsa::input(singleton, debug)]
-pub struct LibraryRoots {
- #[returns(ref)]
- pub roots: FxHashSet<SourceRootId>,
-}
-
-/// The set of "local" (that is, from the current workspace) roots.
-/// Files in local roots are assumed to change frequently.
-#[salsa::input(singleton, debug)]
-pub struct LocalRoots {
- #[returns(ref)]
- pub roots: FxHashSet<SourceRootId>,
-}
-
/// The symbol indices of modules that make up a given crate.
pub fn crate_symbols(db: &dyn HirDatabase, krate: Crate) -> Box<[&SymbolIndex<'_>]> {
let _p = tracing::info_span!("crate_symbols").entered();
@@ -443,6 +426,7 @@ impl Query {
mod tests {
use expect_test::expect_file;
+ use rustc_hash::FxHashSet;
use salsa::Setter;
use test_fixture::{WORKSPACE, WithFixture};
diff --git a/crates/ide-db/src/syntax_helpers/node_ext.rs b/crates/ide-db/src/syntax_helpers/node_ext.rs
index acce066b83..94ecf6a02d 100644
--- a/crates/ide-db/src/syntax_helpers/node_ext.rs
+++ b/crates/ide-db/src/syntax_helpers/node_ext.rs
@@ -1,12 +1,15 @@
//! Various helper functions to work with SyntaxNodes.
use std::ops::ControlFlow;
+use either::Either;
use itertools::Itertools;
use parser::T;
use span::Edition;
use syntax::{
- AstNode, AstToken, Preorder, RustLanguage, WalkEvent,
+ AstNode, AstToken, Direction, Preorder, RustLanguage, SyntaxToken, WalkEvent,
+ algo::non_trivia_sibling,
ast::{self, HasLoopBody, MacroCall, PathSegmentKind, VisibilityKind},
+ syntax_editor::Element,
};
pub fn expr_as_name_ref(expr: &ast::Expr) -> Option<ast::NameRef> {
@@ -542,3 +545,37 @@ pub fn macro_call_for_string_token(string: &ast::String) -> Option<MacroCall> {
let macro_call = string.syntax().parent_ancestors().find_map(ast::MacroCall::cast)?;
Some(macro_call)
}
+
+pub fn is_in_macro_matcher(token: &SyntaxToken) -> bool {
+ let Some(macro_def) = token
+ .parent_ancestors()
+ .map_while(Either::<ast::TokenTree, ast::Macro>::cast)
+ .find_map(Either::right)
+ else {
+ return false;
+ };
+ let range = token.text_range();
+ let Some(body) = (match macro_def {
+ ast::Macro::MacroDef(macro_def) => {
+ if let Some(args) = macro_def.args() {
+ return args.syntax().text_range().contains_range(range);
+ }
+ macro_def.body()
+ }
+ ast::Macro::MacroRules(macro_rules) => macro_rules.token_tree(),
+ }) else {
+ return false;
+ };
+ if !body.syntax().text_range().contains_range(range) {
+ return false;
+ }
+ body.token_trees_and_tokens().filter_map(|tt| tt.into_node()).any(|tt| {
+ let Some(next) = non_trivia_sibling(tt.syntax().syntax_element(), Direction::Next) else {
+ return false;
+ };
+ let Some(next_next) = next.next_sibling_or_token() else { return false };
+ next.kind() == T![=]
+ && next_next.kind() == T![>]
+ && tt.syntax().text_range().contains_range(range)
+ })
+}
diff --git a/crates/ide-db/src/syntax_helpers/suggest_name.rs b/crates/ide-db/src/syntax_helpers/suggest_name.rs
index 273328a8d2..b8b9a7a768 100644
--- a/crates/ide-db/src/syntax_helpers/suggest_name.rs
+++ b/crates/ide-db/src/syntax_helpers/suggest_name.rs
@@ -44,7 +44,7 @@ const SEQUENCE_TYPES: &[&str] = &["Vec", "VecDeque", "LinkedList"];
/// `vec.as_slice()` -> `slice`
/// `args.into_config()` -> `config`
/// `bytes.to_vec()` -> `vec`
-const USELESS_METHOD_PREFIXES: &[&str] = &["into_", "as_", "to_"];
+const USELESS_METHOD_PREFIXES: &[&str] = &["try_into_", "into_", "as_", "to_"];
/// Useless methods that are stripped from expression
///
diff --git a/crates/ide-db/src/test_data/test_symbol_index_collection.txt b/crates/ide-db/src/test_data/test_symbol_index_collection.txt
index 953bc73da9..7692a7d61a 100644
--- a/crates/ide-db/src/test_data/test_symbol_index_collection.txt
+++ b/crates/ide-db/src/test_data/test_symbol_index_collection.txt
@@ -734,11 +734,11 @@
FileSymbol {
name: "generic_impl_fn",
def: Function(
- Function {
- id: FunctionId(
+ FunctionId(
+ FunctionId(
6402,
),
- },
+ ),
),
loc: DeclarationLocation {
hir_file_id: FileId(
@@ -769,11 +769,11 @@
FileSymbol {
name: "impl_fn",
def: Function(
- Function {
- id: FunctionId(
+ FunctionId(
+ FunctionId(
6401,
),
- },
+ ),
),
loc: DeclarationLocation {
hir_file_id: FileId(
@@ -839,11 +839,11 @@
FileSymbol {
name: "main",
def: Function(
- Function {
- id: FunctionId(
+ FunctionId(
+ FunctionId(
6400,
),
- },
+ ),
),
loc: DeclarationLocation {
hir_file_id: FileId(
@@ -907,11 +907,11 @@
FileSymbol {
name: "trait_fn",
def: Function(
- Function {
- id: FunctionId(
+ FunctionId(
+ FunctionId(
6403,
),
- },
+ ),
),
loc: DeclarationLocation {
hir_file_id: FileId(
diff --git a/crates/ide-diagnostics/src/handlers/incorrect_case.rs b/crates/ide-diagnostics/src/handlers/incorrect_case.rs
index 4a12c5a26d..c47449f259 100644
--- a/crates/ide-diagnostics/src/handlers/incorrect_case.rs
+++ b/crates/ide-diagnostics/src/handlers/incorrect_case.rs
@@ -44,7 +44,12 @@ fn fixes(ctx: &DiagnosticsContext<'_>, d: &hir::IncorrectCase) -> Option<Vec<Ass
let label = format!("Rename to {}", d.suggested_text);
let mut res = unresolved_fix("change_case", &label, frange.range);
if ctx.resolve.should_resolve(&res.id) {
- let source_change = def.rename(&ctx.sema, &d.suggested_text, RenameDefinition::Yes);
+ let source_change = def.rename(
+ &ctx.sema,
+ &d.suggested_text,
+ RenameDefinition::Yes,
+ &ctx.config.rename_config(),
+ );
res.source_change = Some(source_change.ok().unwrap_or_default());
}
@@ -1054,4 +1059,19 @@ fn foo(_HelloWorld: ()) {}
"#,
);
}
+
+ #[test]
+ fn allow_with_repr_c() {
+ check_diagnostics(
+ r#"
+#[repr(C)]
+struct FFI_Struct;
+
+#[repr(C)]
+enum FFI_Enum {
+ Field,
+}
+ "#,
+ );
+ }
}
diff --git a/crates/ide-diagnostics/src/handlers/trait_impl_incorrect_safety.rs b/crates/ide-diagnostics/src/handlers/trait_impl_incorrect_safety.rs
index 3414e972d5..c5b2f499d3 100644
--- a/crates/ide-diagnostics/src/handlers/trait_impl_incorrect_safety.rs
+++ b/crates/ide-diagnostics/src/handlers/trait_impl_incorrect_safety.rs
@@ -64,6 +64,7 @@ unsafe trait Unsafe {}
fn drop_may_dangle() {
check_diagnostics(
r#"
+#![feature(lang_items)]
#[lang = "drop"]
trait Drop {}
struct S<T>;
diff --git a/crates/ide-diagnostics/src/handlers/trait_impl_missing_assoc_item.rs b/crates/ide-diagnostics/src/handlers/trait_impl_missing_assoc_item.rs
index 0e18ce9674..2c05544701 100644
--- a/crates/ide-diagnostics/src/handlers/trait_impl_missing_assoc_item.rs
+++ b/crates/ide-diagnostics/src/handlers/trait_impl_missing_assoc_item.rs
@@ -156,4 +156,22 @@ impl Trait for dyn OtherTrait {}
"#,
)
}
+
+ #[test]
+ fn no_false_positive_on_specialization() {
+ check_diagnostics(
+ r#"
+#![feature(specialization)]
+
+pub trait Foo {
+ fn foo();
+}
+
+impl<T> Foo for T {
+ default fn foo() {}
+}
+impl Foo for bool {}
+"#,
+ );
+ }
}
diff --git a/crates/ide-diagnostics/src/handlers/type_mismatch.rs b/crates/ide-diagnostics/src/handlers/type_mismatch.rs
index e6702ccf13..f443dc08f5 100644
--- a/crates/ide-diagnostics/src/handlers/type_mismatch.rs
+++ b/crates/ide-diagnostics/src/handlers/type_mismatch.rs
@@ -52,6 +52,7 @@ pub(crate) fn type_mismatch(ctx: &DiagnosticsContext<'_>, d: &hir::TypeMismatch<
),
display_range,
)
+ .stable()
.with_fixes(fixes(ctx, d))
}
diff --git a/crates/ide-diagnostics/src/lib.rs b/crates/ide-diagnostics/src/lib.rs
index 0b32144249..0c6953419f 100644
--- a/crates/ide-diagnostics/src/lib.rs
+++ b/crates/ide-diagnostics/src/lib.rs
@@ -100,6 +100,7 @@ use ide_db::{
generated::lints::{CLIPPY_LINT_GROUPS, DEFAULT_LINT_GROUPS, DEFAULT_LINTS, Lint, LintGroup},
imports::insert_use::InsertUseConfig,
label::Label,
+ rename::RenameConfig,
source_change::SourceChange,
};
use syntax::{
@@ -107,7 +108,6 @@ use syntax::{
ast::{self, AstNode},
};
-// FIXME: Make this an enum
#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash)]
pub enum DiagnosticCode {
RustcHardError(&'static str),
@@ -238,6 +238,7 @@ pub struct DiagnosticsConfig {
pub prefer_absolute: bool,
pub term_search_fuel: u64,
pub term_search_borrowck: bool,
+ pub show_rename_conflicts: bool,
}
impl DiagnosticsConfig {
@@ -266,8 +267,13 @@ impl DiagnosticsConfig {
prefer_absolute: false,
term_search_fuel: 400,
term_search_borrowck: true,
+ show_rename_conflicts: true,
}
}
+
+ pub fn rename_config(&self) -> RenameConfig {
+ RenameConfig { show_conflicts: self.show_rename_conflicts }
+ }
}
struct DiagnosticsContext<'a> {
diff --git a/crates/ide-ssr/src/lib.rs b/crates/ide-ssr/src/lib.rs
index 958a26324f..a614d71c1f 100644
--- a/crates/ide-ssr/src/lib.rs
+++ b/crates/ide-ssr/src/lib.rs
@@ -85,7 +85,7 @@ pub use crate::{errors::SsrError, from_comment::ssr_from_comment, matching::Matc
use crate::{errors::bail, matching::MatchFailureReason};
use hir::{FileRange, Semantics};
-use ide_db::symbol_index::LocalRoots;
+use ide_db::LocalRoots;
use ide_db::text_edit::TextEdit;
use ide_db::{EditionedFileId, FileId, FxHashMap, RootDatabase, base_db::SourceDatabase};
use resolving::ResolvedRule;
diff --git a/crates/ide-ssr/src/search.rs b/crates/ide-ssr/src/search.rs
index 5f54c66d3c..51e4951cf6 100644
--- a/crates/ide-ssr/src/search.rs
+++ b/crates/ide-ssr/src/search.rs
@@ -6,10 +6,9 @@ use crate::{
};
use hir::FileRange;
use ide_db::{
- FileId, FxHashSet,
+ FileId, FxHashSet, LocalRoots,
defs::Definition,
search::{SearchScope, UsageSearchResult},
- symbol_index::LocalRoots,
};
use syntax::{AstNode, SyntaxKind, SyntaxNode, ast};
diff --git a/crates/ide-ssr/src/tests.rs b/crates/ide-ssr/src/tests.rs
index 852033599a..b3d09cac42 100644
--- a/crates/ide-ssr/src/tests.rs
+++ b/crates/ide-ssr/src/tests.rs
@@ -1,9 +1,8 @@
use expect_test::{Expect, expect};
use hir::{FilePosition, FileRange};
use ide_db::{
- EditionedFileId, FxHashSet,
+ EditionedFileId, FxHashSet, LocalRoots,
base_db::{SourceDatabase, salsa::Setter},
- symbol_index::LocalRoots,
};
use test_utils::RangeOrOffset;
diff --git a/crates/ide/src/expand_macro.rs b/crates/ide/src/expand_macro.rs
index d43f13b79d..7d02b80918 100644
--- a/crates/ide/src/expand_macro.rs
+++ b/crates/ide/src/expand_macro.rs
@@ -4,7 +4,7 @@ use ide_db::{
FileId, RootDatabase, base_db::Crate, helpers::pick_best_token,
syntax_helpers::prettify_macro_expansion,
};
-use span::{SpanMap, SyntaxContext, TextRange, TextSize};
+use span::{SpanMap, TextRange, TextSize};
use stdx::format_to;
use syntax::{AstNode, NodeOrToken, SyntaxKind, SyntaxNode, T, ast, ted};
@@ -142,7 +142,7 @@ fn expand_macro_recur(
sema: &Semantics<'_, RootDatabase>,
macro_call: &ast::Item,
error: &mut String,
- result_span_map: &mut SpanMap<SyntaxContext>,
+ result_span_map: &mut SpanMap,
offset_in_original_node: TextSize,
) -> Option<SyntaxNode> {
let ExpandResult { value: expanded, err } = match macro_call {
@@ -171,7 +171,7 @@ fn expand(
sema: &Semantics<'_, RootDatabase>,
expanded: SyntaxNode,
error: &mut String,
- result_span_map: &mut SpanMap<SyntaxContext>,
+ result_span_map: &mut SpanMap,
mut offset_in_original_node: i32,
) -> SyntaxNode {
let children = expanded.descendants().filter_map(ast::Item::cast);
@@ -208,7 +208,7 @@ fn format(
kind: SyntaxKind,
file_id: FileId,
expanded: SyntaxNode,
- span_map: &SpanMap<SyntaxContext>,
+ span_map: &SpanMap,
krate: Crate,
) -> String {
let expansion = prettify_macro_expansion(db, expanded, span_map, krate).to_string();
diff --git a/crates/ide/src/hover/render.rs b/crates/ide/src/hover/render.rs
index 9dc72a87af..feac5fff84 100644
--- a/crates/ide/src/hover/render.rs
+++ b/crates/ide/src/hover/render.rs
@@ -228,37 +228,14 @@ pub(super) fn underscore(
return None;
}
let parent = token.parent()?;
- let _it = match_ast! {
+ match_ast! {
match parent {
- ast::InferType(it) => it,
- ast::UnderscoreExpr(it) => return type_info_of(sema, config, &Either::Left(ast::Expr::UnderscoreExpr(it)),edition, display_target),
- ast::WildcardPat(it) => return type_info_of(sema, config, &Either::Right(ast::Pat::WildcardPat(it)),edition, display_target),
- _ => return None,
+ ast::InferType(it) => type_info(sema, config, TypeInfo { original: sema.resolve_type(&ast::Type::InferType(it))?, adjusted: None}, edition, display_target),
+ ast::UnderscoreExpr(it) => type_info(sema, config, sema.type_of_expr(&ast::Expr::UnderscoreExpr(it))?, edition, display_target),
+ ast::WildcardPat(it) => type_info(sema, config, sema.type_of_pat(&ast::Pat::WildcardPat(it))?, edition, display_target),
+ _ => None,
}
- };
- // let it = infer_type.syntax().parent()?;
- // match_ast! {
- // match it {
- // ast::LetStmt(_it) => (),
- // ast::Param(_it) => (),
- // ast::RetType(_it) => (),
- // ast::TypeArg(_it) => (),
-
- // ast::CastExpr(_it) => (),
- // ast::ParenType(_it) => (),
- // ast::TupleType(_it) => (),
- // ast::PtrType(_it) => (),
- // ast::RefType(_it) => (),
- // ast::ArrayType(_it) => (),
- // ast::SliceType(_it) => (),
- // ast::ForType(_it) => (),
- // _ => return None,
- // }
- // }
-
- // FIXME: https://github.com/rust-lang/rust-analyzer/issues/11762, this currently always returns Unknown
- // type_info(sema, config, sema.resolve_type(&ast::Type::InferType(it))?, None)
- None
+ }
}
pub(super) fn keyword(
diff --git a/crates/ide/src/hover/tests.rs b/crates/ide/src/hover/tests.rs
index 5330b7eb99..0b518021e3 100644
--- a/crates/ide/src/hover/tests.rs
+++ b/crates/ide/src/hover/tests.rs
@@ -4089,6 +4089,7 @@ fn foo() {
let fo$0o = async { S };
}
//- /core.rs crate:core
+#![feature(lang_items)]
pub mod future {
#[lang = "future_trait"]
pub trait Future {}
@@ -8199,19 +8200,31 @@ fn main() {
#[test]
fn hover_underscore_type() {
- check_hover_no_result(
+ check(
r#"
fn main() {
let x: _$0 = 0;
}
"#,
+ expect![[r#"
+ *_*
+ ```rust
+ i32
+ ```
+ "#]],
);
- check_hover_no_result(
+ check(
r#"
fn main() {
let x: (_$0,) = (0,);
}
"#,
+ expect![[r#"
+ *_*
+ ```rust
+ i32
+ ```
+ "#]],
);
}
diff --git a/crates/ide/src/inlay_hints.rs b/crates/ide/src/inlay_hints.rs
index f57f2883b1..a58dc6f030 100644
--- a/crates/ide/src/inlay_hints.rs
+++ b/crates/ide/src/inlay_hints.rs
@@ -5,8 +5,8 @@ use std::{
use either::Either;
use hir::{
- ClosureStyle, DisplayTarget, EditionedFileId, HasVisibility, HirDisplay, HirDisplayError,
- HirWrite, InRealFile, ModuleDef, ModuleDefId, Semantics, sym,
+ ClosureStyle, DisplayTarget, EditionedFileId, GenericParam, GenericParamId, HasVisibility,
+ HirDisplay, HirDisplayError, HirWrite, InRealFile, ModuleDef, ModuleDefId, Semantics, sym,
};
use ide_db::{
FileRange, MiniCore, RootDatabase, famous_defs::FamousDefs, text_edit::TextEditBuilder,
@@ -709,6 +709,21 @@ impl HirWrite for InlayHintLabelBuilder<'_> {
});
}
+ fn start_location_link_generic(&mut self, def: GenericParamId) {
+ never!(self.location.is_some(), "location link is already started");
+ self.make_new_part();
+
+ self.location = Some(if self.resolve {
+ LazyProperty::Lazy
+ } else {
+ LazyProperty::Computed({
+ let Some(location) = GenericParam::from(def).try_to_nav(self.sema) else { return };
+ let location = location.call_site();
+ FileRange { file_id: location.file_id, range: location.focus_or_full_range() }
+ })
+ });
+ }
+
fn end_location_link(&mut self) {
self.make_new_part();
}
@@ -767,14 +782,30 @@ fn label_of_ty(
)
});
+ let module_def_location = |label_builder: &mut InlayHintLabelBuilder<'_>,
+ def: ModuleDef,
+ name| {
+ let def = def.try_into();
+ if let Ok(def) = def {
+ label_builder.start_location_link(def);
+ }
+ #[expect(
+ clippy::question_mark,
+ reason = "false positive; replacing with `?` leads to 'type annotations needed' error"
+ )]
+ if let Err(err) = label_builder.write_str(name) {
+ return Err(err);
+ }
+ if def.is_ok() {
+ label_builder.end_location_link();
+ }
+ Ok(())
+ };
+
label_builder.write_str(LABEL_START)?;
- label_builder.start_location_link(ModuleDef::from(iter_trait).into());
- label_builder.write_str(LABEL_ITERATOR)?;
- label_builder.end_location_link();
+ module_def_location(label_builder, ModuleDef::from(iter_trait), LABEL_ITERATOR)?;
label_builder.write_str(LABEL_MIDDLE)?;
- label_builder.start_location_link(ModuleDef::from(item).into());
- label_builder.write_str(LABEL_ITEM)?;
- label_builder.end_location_link();
+ module_def_location(label_builder, ModuleDef::from(item), LABEL_ITEM)?;
label_builder.write_str(LABEL_MIDDLE2)?;
rec(sema, famous_defs, max_length, &ty, label_builder, config, display_target)?;
label_builder.write_str(LABEL_END)?;
diff --git a/crates/ide/src/inlay_hints/bind_pat.rs b/crates/ide/src/inlay_hints/bind_pat.rs
index de207c7821..c74e3104c1 100644
--- a/crates/ide/src/inlay_hints/bind_pat.rs
+++ b/crates/ide/src/inlay_hints/bind_pat.rs
@@ -183,7 +183,8 @@ mod tests {
use crate::{ClosureReturnTypeHints, fixture, inlay_hints::InlayHintsConfig};
use crate::inlay_hints::tests::{
- DISABLED_CONFIG, TEST_CONFIG, check, check_edit, check_no_edit, check_with_config,
+ DISABLED_CONFIG, TEST_CONFIG, check, check_edit, check_expect, check_no_edit,
+ check_with_config,
};
#[track_caller]
@@ -339,14 +340,14 @@ fn main(a: SliceIter<'_, Container>) {
fn lt_hints() {
check_types(
r#"
-struct S<'lt>;
+struct S<'lt>(*mut &'lt ());
fn f<'a>() {
- let x = S::<'static>;
+ let x = S::<'static>(loop {});
//^ S<'static>
- let y = S::<'_>;
+ let y = S::<'_>(loop {});
//^ S<'_>
- let z = S::<'a>;
+ let z = S::<'a>(loop {});
//^ S<'a>
}
@@ -632,10 +633,10 @@ fn main() {
fn multi_dyn_trait_bounds() {
check_types(
r#"
-pub struct Vec<T> {}
+pub struct Vec<T>(*mut T);
impl<T> Vec<T> {
- pub fn new() -> Self { Vec {} }
+ pub fn new() -> Self { Vec(0 as *mut T) }
}
pub struct Box<T> {}
@@ -1255,4 +1256,130 @@ where
"#,
);
}
+
+ #[test]
+ fn type_param_inlay_hint_has_location_link() {
+ check_expect(
+ InlayHintsConfig { type_hints: true, ..DISABLED_CONFIG },
+ r#"
+fn identity<T>(t: T) -> T {
+ let x = t;
+ x
+}
+"#,
+ expect![[r#"
+ [
+ (
+ 36..37,
+ [
+ InlayHintLabelPart {
+ text: "T",
+ linked_location: Some(
+ Computed(
+ FileRangeWrapper {
+ file_id: FileId(
+ 0,
+ ),
+ range: 12..13,
+ },
+ ),
+ ),
+ tooltip: "",
+ },
+ ],
+ ),
+ ]
+ "#]],
+ );
+ }
+
+ #[test]
+ fn const_param_inlay_hint_has_location_link() {
+ check_expect(
+ InlayHintsConfig { type_hints: true, ..DISABLED_CONFIG },
+ r#"
+fn f<const N: usize>() {
+ let x = [0; N];
+}
+"#,
+ expect![[r#"
+ [
+ (
+ 33..34,
+ [
+ "[i32; ",
+ InlayHintLabelPart {
+ text: "N",
+ linked_location: Some(
+ Computed(
+ FileRangeWrapper {
+ file_id: FileId(
+ 0,
+ ),
+ range: 11..12,
+ },
+ ),
+ ),
+ tooltip: "",
+ },
+ "]",
+ ],
+ ),
+ ]
+ "#]],
+ );
+ }
+
+ #[test]
+ fn lifetime_param_inlay_hint_has_location_link() {
+ check_expect(
+ InlayHintsConfig { type_hints: true, ..DISABLED_CONFIG },
+ r#"
+struct S<'lt>(*mut &'lt ());
+
+fn f<'a>() {
+ let x = S::<'a>(loop {});
+}
+"#,
+ expect![[r#"
+ [
+ (
+ 51..52,
+ [
+ InlayHintLabelPart {
+ text: "S",
+ linked_location: Some(
+ Computed(
+ FileRangeWrapper {
+ file_id: FileId(
+ 0,
+ ),
+ range: 7..8,
+ },
+ ),
+ ),
+ tooltip: "",
+ },
+ "<",
+ InlayHintLabelPart {
+ text: "'a",
+ linked_location: Some(
+ Computed(
+ FileRangeWrapper {
+ file_id: FileId(
+ 0,
+ ),
+ range: 35..37,
+ },
+ ),
+ ),
+ tooltip: "",
+ },
+ ">",
+ ],
+ ),
+ ]
+ "#]],
+ );
+ }
}
diff --git a/crates/ide/src/inlay_hints/bounds.rs b/crates/ide/src/inlay_hints/bounds.rs
index c9fbdf3ae7..045559fd7f 100644
--- a/crates/ide/src/inlay_hints/bounds.rs
+++ b/crates/ide/src/inlay_hints/bounds.rs
@@ -143,7 +143,7 @@ fn foo<T>() {}
file_id: FileId(
1,
),
- range: 446..451,
+ range: 470..475,
},
),
),
diff --git a/crates/ide/src/inlay_hints/implicit_drop.rs b/crates/ide/src/inlay_hints/implicit_drop.rs
index 1317684a08..e5e4c899ec 100644
--- a/crates/ide/src/inlay_hints/implicit_drop.rs
+++ b/crates/ide/src/inlay_hints/implicit_drop.rs
@@ -34,9 +34,10 @@ pub(super) fn hints(
let def = sema.to_def(node)?;
let def: DefWithBody = def.into();
- let (hir, source_map) = sema.db.body_with_source_map(def.into());
+ let def = def.try_into().ok()?;
+ let (hir, source_map) = sema.db.body_with_source_map(def);
- let mir = sema.db.mir_body(def.into()).ok()?;
+ let mir = sema.db.mir_body(def).ok()?;
let local_to_binding = mir.local_to_binding_map();
diff --git a/crates/ide/src/inlay_hints/implied_dyn_trait.rs b/crates/ide/src/inlay_hints/implied_dyn_trait.rs
index 4fbc88a210..ac91da9a3c 100644
--- a/crates/ide/src/inlay_hints/implied_dyn_trait.rs
+++ b/crates/ide/src/inlay_hints/implied_dyn_trait.rs
@@ -105,6 +105,7 @@ impl T {}
// ^ dyn
impl T for (T) {}
// ^ dyn
+impl T for {}
impl T
"#,
);
diff --git a/crates/ide/src/lib.rs b/crates/ide/src/lib.rs
index 5e4d930393..930eaf2262 100644
--- a/crates/ide/src/lib.rs
+++ b/crates/ide/src/lib.rs
@@ -67,7 +67,7 @@ use ide_db::{
FxHashMap, FxIndexSet, LineIndexDatabase,
base_db::{
CrateOrigin, CrateWorkspaceData, Env, FileSet, RootQueryDb, SourceDatabase, VfsPath,
- salsa::Cancelled,
+ salsa::{Cancelled, Database},
},
prime_caches, symbol_index,
};
@@ -199,8 +199,13 @@ impl AnalysisHost {
pub fn per_query_memory_usage(&mut self) -> Vec<(String, profile::Bytes, usize)> {
self.db.per_query_memory_usage()
}
- pub fn request_cancellation(&mut self) {
- self.db.request_cancellation();
+ pub fn trigger_cancellation(&mut self) {
+ self.db.trigger_cancellation();
+ }
+ pub fn trigger_garbage_collection(&mut self) {
+ self.db.trigger_lru_eviction();
+ // SAFETY: `trigger_lru_eviction` triggers cancellation, so all running queries were canceled.
+ unsafe { hir::collect_ty_garbage() };
}
pub fn raw_database(&self) -> &RootDatabase {
&self.db
@@ -853,8 +858,9 @@ impl Analysis {
&self,
file_id: FileId,
new_name_stem: &str,
+ config: &RenameConfig,
) -> Cancellable<Option<SourceChange>> {
- self.with_db(|db| rename::will_rename_file(db, file_id, new_name_stem))
+ self.with_db(|db| rename::will_rename_file(db, file_id, new_name_stem, config))
}
pub fn structural_search_replace(
diff --git a/crates/ide/src/navigation_target.rs b/crates/ide/src/navigation_target.rs
index 020f235d3a..a271cac6fc 100644
--- a/crates/ide/src/navigation_target.rs
+++ b/crates/ide/src/navigation_target.rs
@@ -6,7 +6,7 @@ use arrayvec::ArrayVec;
use either::Either;
use hir::{
AssocItem, Crate, FieldSource, HasContainer, HasCrate, HasSource, HirDisplay, HirFileId,
- InFile, LocalSource, ModuleSource, Semantics, Symbol, db::ExpandDatabase, sym,
+ InFile, LocalSource, ModuleSource, Name, Semantics, Symbol, db::ExpandDatabase, sym,
symbols::FileSymbol,
};
use ide_db::{
@@ -204,6 +204,22 @@ impl NavigationTarget {
)
}
+ pub(crate) fn from_named_with_range(
+ db: &RootDatabase,
+ ranges: InFile<(TextRange, Option<TextRange>)>,
+ name: Option<Name>,
+ kind: SymbolKind,
+ ) -> UpmappingResult<NavigationTarget> {
+ let InFile { file_id, value: (full_range, focus_range) } = ranges;
+ let name = name.map(|name| name.symbol().clone()).unwrap_or_else(|| sym::underscore);
+
+ orig_range_with_focus_r(db, file_id, full_range, focus_range).map(
+ |(FileRange { file_id, range: full_range }, focus_range)| {
+ NavigationTarget::from_syntax(file_id, name.clone(), focus_range, full_range, kind)
+ },
+ )
+ }
+
pub(crate) fn from_syntax(
file_id: FileId,
name: Symbol,
@@ -414,7 +430,13 @@ impl ToNavFromAst for hir::Trait {
impl<D> TryToNav for D
where
- D: HasSource + ToNavFromAst + Copy + HasDocs + for<'db> HirDisplay<'db> + HasCrate,
+ D: HasSource
+ + ToNavFromAst
+ + Copy
+ + HasDocs
+ + for<'db> HirDisplay<'db>
+ + HasCrate
+ + hir::HasName,
D::Ast: ast::HasName,
{
fn try_to_nav(
@@ -422,11 +444,19 @@ where
sema: &Semantics<'_, RootDatabase>,
) -> Option<UpmappingResult<NavigationTarget>> {
let db = sema.db;
- let src = self.source(db)?;
+ let src = self.source_with_range(db)?;
Some(
- NavigationTarget::from_named(
+ NavigationTarget::from_named_with_range(
db,
- src.as_ref().map(|it| it as &dyn ast::HasName),
+ src.map(|(full_range, node)| {
+ (
+ full_range,
+ node.and_then(|node| {
+ Some(ast::HasName::name(&node)?.syntax().text_range())
+ }),
+ )
+ }),
+ self.name(db),
D::KIND,
)
.map(|mut res| {
@@ -477,16 +507,16 @@ impl TryToNav for hir::Impl {
sema: &Semantics<'_, RootDatabase>,
) -> Option<UpmappingResult<NavigationTarget>> {
let db = sema.db;
- let InFile { file_id, value } = self.source(db)?;
- let derive_path = self.as_builtin_derive_path(db);
-
- let (file_id, focus, syntax) = match &derive_path {
- Some(attr) => (attr.file_id.into(), None, attr.value.syntax()),
- None => (file_id, value.self_ty(), value.syntax()),
- };
+ let InFile { file_id, value: (full_range, source) } = self.source_with_range(db)?;
- Some(orig_range_with_focus(db, file_id, syntax, focus).map(
- |(FileRange { file_id, range: full_range }, focus_range)| {
+ Some(
+ orig_range_with_focus_r(
+ db,
+ file_id,
+ full_range,
+ source.and_then(|source| Some(source.self_ty()?.syntax().text_range())),
+ )
+ .map(|(FileRange { file_id, range: full_range }, focus_range)| {
NavigationTarget::from_syntax(
file_id,
sym::kw_impl,
@@ -494,8 +524,8 @@ impl TryToNav for hir::Impl {
full_range,
SymbolKind::Impl,
)
- },
- ))
+ }),
+ )
}
}
diff --git a/crates/ide/src/references.rs b/crates/ide/src/references.rs
index 0738b7fadc..4918fe4ff9 100644
--- a/crates/ide/src/references.rs
+++ b/crates/ide/src/references.rs
@@ -38,7 +38,8 @@ use syntax::{
};
use crate::{
- Analysis, FilePosition, HighlightedRange, NavigationTarget, TryToNav, highlight_related,
+ Analysis, FilePosition, HighlightedRange, NavigationTarget, TryToNav,
+ doc_links::token_as_doc_comment, highlight_related,
};
/// Result of a reference search operation.
@@ -211,6 +212,13 @@ pub(crate) fn find_defs(
syntax: &SyntaxNode,
offset: TextSize,
) -> Option<Vec<Definition>> {
+ if let Some(token) = syntax.token_at_offset(offset).left_biased()
+ && let Some(doc_comment) = token_as_doc_comment(&token)
+ {
+ return doc_comment
+ .get_definition_with_descend_at(sema, offset, |def, _, _| Some(vec![def]));
+ }
+
let token = syntax.token_at_offset(offset).find(|t| {
matches!(
t.kind(),
@@ -786,6 +794,23 @@ fn main() {
}
#[test]
+ fn test_find_all_refs_in_comments() {
+ check(
+ r#"
+struct Foo;
+
+/// $0[`Foo`] is just above
+struct Bar;
+"#,
+ expect![[r#"
+ Foo Struct FileId(0) 0..11 7..10
+
+ (no references)
+ "#]],
+ );
+ }
+
+ #[test]
fn search_filters_by_range() {
check(
r#"
@@ -2503,7 +2528,7 @@ fn r#fn$0() {}
fn main() { r#fn(); }
"#,
expect![[r#"
- r#fn Function FileId(0) 0..12 3..7
+ fn Function FileId(0) 0..12 3..7
FileId(0) 25..29
"#]],
diff --git a/crates/ide/src/rename.rs b/crates/ide/src/rename.rs
index 7955e9b6e7..ae19e77509 100644
--- a/crates/ide/src/rename.rs
+++ b/crates/ide/src/rename.rs
@@ -31,6 +31,7 @@ pub struct RenameConfig {
pub prefer_no_std: bool,
pub prefer_prelude: bool,
pub prefer_absolute: bool,
+ pub show_conflicts: bool,
}
impl RenameConfig {
@@ -42,6 +43,10 @@ impl RenameConfig {
allow_unstable: true,
}
}
+
+ fn ide_db_config(&self) -> ide_db::rename::RenameConfig {
+ ide_db::rename::RenameConfig { show_conflicts: self.show_conflicts }
+ }
}
/// This is similar to `collect::<Result<Vec<_>, _>>`, but unlike it, it succeeds if there is *any* `Ok` item.
@@ -190,7 +195,7 @@ pub(crate) fn rename(
return rename_to_self(&sema, local);
}
}
- def.rename(&sema, new_name.as_str(), rename_def)
+ def.rename(&sema, new_name.as_str(), rename_def, &config.ide_db_config())
})),
};
@@ -205,11 +210,13 @@ pub(crate) fn will_rename_file(
db: &RootDatabase,
file_id: FileId,
new_name_stem: &str,
+ config: &RenameConfig,
) -> Option<SourceChange> {
let sema = Semantics::new(db);
let module = sema.file_to_module_def(file_id)?;
let def = Definition::Module(module);
- let mut change = def.rename(&sema, new_name_stem, RenameDefinition::Yes).ok()?;
+ let mut change =
+ def.rename(&sema, new_name_stem, RenameDefinition::Yes, &config.ide_db_config()).ok()?;
change.file_system_edits.clear();
Some(change)
}
@@ -803,8 +810,12 @@ mod tests {
use super::{RangeInfo, RenameConfig, RenameError};
- const TEST_CONFIG: RenameConfig =
- RenameConfig { prefer_no_std: false, prefer_prelude: true, prefer_absolute: false };
+ const TEST_CONFIG: RenameConfig = RenameConfig {
+ prefer_no_std: false,
+ prefer_prelude: true,
+ prefer_absolute: false,
+ show_conflicts: true,
+ };
#[track_caller]
fn check(
@@ -893,7 +904,7 @@ mod tests {
) {
let (analysis, position) = fixture::position(ra_fixture);
let source_change = analysis
- .will_rename_file(position.file_id, new_name)
+ .will_rename_file(position.file_id, new_name, &TEST_CONFIG)
.unwrap()
.expect("Expect returned a RenameError");
expect.assert_eq(&filter_expect(source_change))
diff --git a/crates/ide/src/runnables.rs b/crates/ide/src/runnables.rs
index c562a9b30b..6cec912503 100644
--- a/crates/ide/src/runnables.rs
+++ b/crates/ide/src/runnables.rs
@@ -1679,11 +1679,11 @@ mod r#mod {
[
"(TestMod, NavigationTarget { file_id: FileId(0), full_range: 1..461, focus_range: 5..10, name: \"mod\", kind: Module, description: \"mod r#mod\" })",
"(Test, NavigationTarget { file_id: FileId(0), full_range: 17..41, focus_range: 32..36, name: \"r#fn\", kind: Function })",
- "(DocTest, NavigationTarget { file_id: FileId(0), full_range: 47..84, name: \"r#for\", container_name: \"mod\" })",
- "(DocTest, NavigationTarget { file_id: FileId(0), full_range: 90..146, name: \"r#struct\", container_name: \"mod\" })",
+ "(DocTest, NavigationTarget { file_id: FileId(0), full_range: 47..84, name: \"for\", container_name: \"mod\" })",
+ "(DocTest, NavigationTarget { file_id: FileId(0), full_range: 90..146, name: \"struct\", container_name: \"mod\" })",
"(DocTest, NavigationTarget { file_id: FileId(0), full_range: 152..266, focus_range: 189..205, name: \"impl\", kind: Impl })",
- "(DocTest, NavigationTarget { file_id: FileId(0), full_range: 216..260, name: \"r#fn\" })",
- "(DocTest, NavigationTarget { file_id: FileId(0), full_range: 323..367, name: \"r#fn\" })",
+ "(DocTest, NavigationTarget { file_id: FileId(0), full_range: 216..260, name: \"fn\" })",
+ "(DocTest, NavigationTarget { file_id: FileId(0), full_range: 323..367, name: \"fn\" })",
"(DocTest, NavigationTarget { file_id: FileId(0), full_range: 401..459, focus_range: 445..456, name: \"impl\", kind: Impl })",
]
"#]],
diff --git a/crates/ide/src/ssr.rs b/crates/ide/src/ssr.rs
index dc8f343207..4dfb5fe816 100644
--- a/crates/ide/src/ssr.rs
+++ b/crates/ide/src/ssr.rs
@@ -58,9 +58,7 @@ pub(crate) fn ssr_assists(
mod tests {
use expect_test::expect;
use ide_assists::{Assist, AssistResolveStrategy};
- use ide_db::{
- FileRange, FxHashSet, RootDatabase, base_db::salsa::Setter as _, symbol_index::LocalRoots,
- };
+ use ide_db::{FileRange, FxHashSet, LocalRoots, RootDatabase, base_db::salsa::Setter as _};
use test_fixture::WithFixture;
use super::ssr_assists;
diff --git a/crates/ide/src/syntax_highlighting/test_data/highlight_strings.html b/crates/ide/src/syntax_highlighting/test_data/highlight_strings.html
index 1b0512977a..e178782c79 100644
--- a/crates/ide/src/syntax_highlighting/test_data/highlight_strings.html
+++ b/crates/ide/src/syntax_highlighting/test_data/highlight_strings.html
@@ -101,18 +101,18 @@ pre { color: #DCDCCC; background: #3F3F3F; font-size: 22px; padd
<span class="macro public">println</span><span class="macro_bang">!</span><span class="parenthesis">(</span><span class="string_literal macro">"Hello, </span><span class="format_specifier">{</span><span class="format_specifier">}</span><span class="string_literal macro">!"</span><span class="comma macro">,</span> <span class="string_literal macro">"world"</span><span class="parenthesis">)</span><span class="semicolon">;</span> <span class="comment">// =&gt; "Hello, world!"</span>
<span class="macro public">println</span><span class="macro_bang">!</span><span class="parenthesis">(</span><span class="string_literal macro">"The number is </span><span class="format_specifier">{</span><span class="format_specifier">}</span><span class="string_literal macro">"</span><span class="comma macro">,</span> <span class="numeric_literal macro">1</span><span class="parenthesis">)</span><span class="semicolon">;</span> <span class="comment">// =&gt; "The number is 1"</span>
<span class="macro public">println</span><span class="macro_bang">!</span><span class="parenthesis">(</span><span class="string_literal macro">"</span><span class="format_specifier">{</span><span class="format_specifier">:</span><span class="format_specifier">?</span><span class="format_specifier">}</span><span class="string_literal macro">"</span><span class="comma macro">,</span> <span class="parenthesis macro">(</span><span class="numeric_literal macro">3</span><span class="comma macro">,</span> <span class="numeric_literal macro">4</span><span class="parenthesis macro">)</span><span class="parenthesis">)</span><span class="semicolon">;</span> <span class="comment">// =&gt; "(3, 4)"</span>
- <span class="macro public">println</span><span class="macro_bang">!</span><span class="parenthesis">(</span><span class="string_literal macro">"</span><span class="format_specifier">{</span><span class="variable">value</span><span class="format_specifier">}</span><span class="string_literal macro">"</span><span class="comma macro">,</span> <span class="variable declaration macro">value</span><span class="operator macro">=</span><span class="numeric_literal macro">4</span><span class="parenthesis">)</span><span class="semicolon">;</span> <span class="comment">// =&gt; "4"</span>
+ <span class="macro public">println</span><span class="macro_bang">!</span><span class="parenthesis">(</span><span class="string_literal macro">"</span><span class="format_specifier">{</span><span class="variable">value</span><span class="format_specifier">}</span><span class="string_literal macro">"</span><span class="comma macro">,</span> <span class="none macro">value</span><span class="operator macro">=</span><span class="numeric_literal macro">4</span><span class="parenthesis">)</span><span class="semicolon">;</span> <span class="comment">// =&gt; "4"</span>
<span class="macro public">println</span><span class="macro_bang">!</span><span class="parenthesis">(</span><span class="string_literal macro">"</span><span class="format_specifier">{</span><span class="format_specifier">}</span><span class="string_literal macro"> </span><span class="format_specifier">{</span><span class="format_specifier">}</span><span class="string_literal macro">"</span><span class="comma macro">,</span> <span class="numeric_literal macro">1</span><span class="comma macro">,</span> <span class="numeric_literal macro">2</span><span class="parenthesis">)</span><span class="semicolon">;</span> <span class="comment">// =&gt; "1 2"</span>
<span class="macro public">println</span><span class="macro_bang">!</span><span class="parenthesis">(</span><span class="string_literal macro">"</span><span class="format_specifier">{</span><span class="format_specifier">:</span><span class="numeric_literal">0</span><span class="numeric_literal">4</span><span class="format_specifier">}</span><span class="string_literal macro">"</span><span class="comma macro">,</span> <span class="numeric_literal macro">42</span><span class="parenthesis">)</span><span class="semicolon">;</span> <span class="comment">// =&gt; "0042" with leading zerosV</span>
<span class="macro public">println</span><span class="macro_bang">!</span><span class="parenthesis">(</span><span class="string_literal macro">"</span><span class="format_specifier">{</span><span class="numeric_literal">1</span><span class="format_specifier">}</span><span class="string_literal macro"> </span><span class="format_specifier">{</span><span class="format_specifier">}</span><span class="string_literal macro"> </span><span class="format_specifier">{</span><span class="numeric_literal">0</span><span class="format_specifier">}</span><span class="string_literal macro"> </span><span class="format_specifier">{</span><span class="format_specifier">}</span><span class="string_literal macro">"</span><span class="comma macro">,</span> <span class="numeric_literal macro">1</span><span class="comma macro">,</span> <span class="numeric_literal macro">2</span><span class="parenthesis">)</span><span class="semicolon">;</span> <span class="comment">// =&gt; "2 1 1 2"</span>
- <span class="macro public">println</span><span class="macro_bang">!</span><span class="parenthesis">(</span><span class="string_literal macro">"</span><span class="format_specifier">{</span><span class="variable">argument</span><span class="format_specifier">}</span><span class="string_literal macro">"</span><span class="comma macro">,</span> <span class="variable declaration macro">argument</span> <span class="operator macro">=</span> <span class="string_literal macro">"test"</span><span class="parenthesis">)</span><span class="semicolon">;</span> <span class="comment">// =&gt; "test"</span>
- <span class="macro public">println</span><span class="macro_bang">!</span><span class="parenthesis">(</span><span class="string_literal macro">"</span><span class="format_specifier">{</span><span class="variable">name</span><span class="format_specifier">}</span><span class="string_literal macro"> </span><span class="format_specifier">{</span><span class="format_specifier">}</span><span class="string_literal macro">"</span><span class="comma macro">,</span> <span class="numeric_literal macro">1</span><span class="comma macro">,</span> <span class="variable declaration macro">name</span> <span class="operator macro">=</span> <span class="numeric_literal macro">2</span><span class="parenthesis">)</span><span class="semicolon">;</span> <span class="comment">// =&gt; "2 1"</span>
- <span class="macro public">println</span><span class="macro_bang">!</span><span class="parenthesis">(</span><span class="string_literal macro">"</span><span class="format_specifier">{</span><span class="variable">a</span><span class="format_specifier">}</span><span class="string_literal macro"> </span><span class="format_specifier">{</span><span class="variable">c</span><span class="format_specifier">}</span><span class="string_literal macro"> </span><span class="format_specifier">{</span><span class="variable">b</span><span class="format_specifier">}</span><span class="string_literal macro">"</span><span class="comma macro">,</span> <span class="variable declaration macro">a</span><span class="operator macro">=</span><span class="string_literal macro">"a"</span><span class="comma macro">,</span> <span class="variable declaration macro">b</span><span class="operator macro">=</span><span class="char_literal macro">'b'</span><span class="comma macro">,</span> <span class="variable declaration macro">c</span><span class="operator macro">=</span><span class="numeric_literal macro">3</span><span class="parenthesis">)</span><span class="semicolon">;</span> <span class="comment">// =&gt; "a 3 b"</span>
+ <span class="macro public">println</span><span class="macro_bang">!</span><span class="parenthesis">(</span><span class="string_literal macro">"</span><span class="format_specifier">{</span><span class="variable">argument</span><span class="format_specifier">}</span><span class="string_literal macro">"</span><span class="comma macro">,</span> <span class="none macro">argument</span> <span class="operator macro">=</span> <span class="string_literal macro">"test"</span><span class="parenthesis">)</span><span class="semicolon">;</span> <span class="comment">// =&gt; "test"</span>
+ <span class="macro public">println</span><span class="macro_bang">!</span><span class="parenthesis">(</span><span class="string_literal macro">"</span><span class="format_specifier">{</span><span class="variable">name</span><span class="format_specifier">}</span><span class="string_literal macro"> </span><span class="format_specifier">{</span><span class="format_specifier">}</span><span class="string_literal macro">"</span><span class="comma macro">,</span> <span class="numeric_literal macro">1</span><span class="comma macro">,</span> <span class="none macro">name</span> <span class="operator macro">=</span> <span class="numeric_literal macro">2</span><span class="parenthesis">)</span><span class="semicolon">;</span> <span class="comment">// =&gt; "2 1"</span>
+ <span class="macro public">println</span><span class="macro_bang">!</span><span class="parenthesis">(</span><span class="string_literal macro">"</span><span class="format_specifier">{</span><span class="variable">a</span><span class="format_specifier">}</span><span class="string_literal macro"> </span><span class="format_specifier">{</span><span class="variable">c</span><span class="format_specifier">}</span><span class="string_literal macro"> </span><span class="format_specifier">{</span><span class="variable">b</span><span class="format_specifier">}</span><span class="string_literal macro">"</span><span class="comma macro">,</span> <span class="none macro">a</span><span class="operator macro">=</span><span class="string_literal macro">"a"</span><span class="comma macro">,</span> <span class="none macro">b</span><span class="operator macro">=</span><span class="char_literal macro">'b'</span><span class="comma macro">,</span> <span class="none macro">c</span><span class="operator macro">=</span><span class="numeric_literal macro">3</span><span class="parenthesis">)</span><span class="semicolon">;</span> <span class="comment">// =&gt; "a 3 b"</span>
<span class="macro public">println</span><span class="macro_bang">!</span><span class="parenthesis">(</span><span class="string_literal macro">"</span><span class="escape_sequence">{{</span><span class="format_specifier">{</span><span class="format_specifier">}</span><span class="escape_sequence">}}</span><span class="string_literal macro">"</span><span class="comma macro">,</span> <span class="numeric_literal macro">2</span><span class="parenthesis">)</span><span class="semicolon">;</span> <span class="comment">// =&gt; "{2}"</span>
<span class="macro public">println</span><span class="macro_bang">!</span><span class="parenthesis">(</span><span class="string_literal macro">"Hello </span><span class="format_specifier">{</span><span class="format_specifier">:</span><span class="numeric_literal">5</span><span class="format_specifier">}</span><span class="string_literal macro">!"</span><span class="comma macro">,</span> <span class="string_literal macro">"x"</span><span class="parenthesis">)</span><span class="semicolon">;</span>
<span class="macro public">println</span><span class="macro_bang">!</span><span class="parenthesis">(</span><span class="string_literal macro">"Hello </span><span class="format_specifier">{</span><span class="format_specifier">:</span><span class="numeric_literal">1</span><span class="format_specifier">$</span><span class="format_specifier">}</span><span class="string_literal macro">!"</span><span class="comma macro">,</span> <span class="string_literal macro">"x"</span><span class="comma macro">,</span> <span class="numeric_literal macro">5</span><span class="parenthesis">)</span><span class="semicolon">;</span>
<span class="macro public">println</span><span class="macro_bang">!</span><span class="parenthesis">(</span><span class="string_literal macro">"Hello </span><span class="format_specifier">{</span><span class="numeric_literal">1</span><span class="format_specifier">:</span><span class="numeric_literal">0</span><span class="format_specifier">$</span><span class="format_specifier">}</span><span class="string_literal macro">!"</span><span class="comma macro">,</span> <span class="numeric_literal macro">5</span><span class="comma macro">,</span> <span class="string_literal macro">"x"</span><span class="parenthesis">)</span><span class="semicolon">;</span>
- <span class="macro public">println</span><span class="macro_bang">!</span><span class="parenthesis">(</span><span class="string_literal macro">"Hello </span><span class="format_specifier">{</span><span class="format_specifier">:</span><span class="variable">width</span><span class="format_specifier">$</span><span class="format_specifier">}</span><span class="string_literal macro">!"</span><span class="comma macro">,</span> <span class="string_literal macro">"x"</span><span class="comma macro">,</span> <span class="variable declaration macro">width</span> <span class="operator macro">=</span> <span class="numeric_literal macro">5</span><span class="parenthesis">)</span><span class="semicolon">;</span>
+ <span class="macro public">println</span><span class="macro_bang">!</span><span class="parenthesis">(</span><span class="string_literal macro">"Hello </span><span class="format_specifier">{</span><span class="format_specifier">:</span><span class="variable">width</span><span class="format_specifier">$</span><span class="format_specifier">}</span><span class="string_literal macro">!"</span><span class="comma macro">,</span> <span class="string_literal macro">"x"</span><span class="comma macro">,</span> <span class="none macro">width</span> <span class="operator macro">=</span> <span class="numeric_literal macro">5</span><span class="parenthesis">)</span><span class="semicolon">;</span>
<span class="macro public">println</span><span class="macro_bang">!</span><span class="parenthesis">(</span><span class="string_literal macro">"Hello </span><span class="format_specifier">{</span><span class="format_specifier">:</span><span class="format_specifier">&lt;</span><span class="numeric_literal">5</span><span class="format_specifier">}</span><span class="string_literal macro">!"</span><span class="comma macro">,</span> <span class="string_literal macro">"x"</span><span class="parenthesis">)</span><span class="semicolon">;</span>
<span class="macro public">println</span><span class="macro_bang">!</span><span class="parenthesis">(</span><span class="string_literal macro">"Hello </span><span class="format_specifier">{</span><span class="format_specifier">:</span><span class="format_specifier">-</span><span class="format_specifier">&lt;</span><span class="numeric_literal">5</span><span class="format_specifier">}</span><span class="string_literal macro">!"</span><span class="comma macro">,</span> <span class="string_literal macro">"x"</span><span class="parenthesis">)</span><span class="semicolon">;</span>
<span class="macro public">println</span><span class="macro_bang">!</span><span class="parenthesis">(</span><span class="string_literal macro">"Hello </span><span class="format_specifier">{</span><span class="format_specifier">:</span><span class="format_specifier">^</span><span class="numeric_literal">5</span><span class="format_specifier">}</span><span class="string_literal macro">!"</span><span class="comma macro">,</span> <span class="string_literal macro">"x"</span><span class="parenthesis">)</span><span class="semicolon">;</span>
@@ -127,10 +127,10 @@ pre { color: #DCDCCC; background: #3F3F3F; font-size: 22px; padd
<span class="macro public">println</span><span class="macro_bang">!</span><span class="parenthesis">(</span><span class="string_literal macro">"Hello </span><span class="format_specifier">{</span><span class="numeric_literal">0</span><span class="format_specifier">}</span><span class="string_literal macro"> is </span><span class="format_specifier">{</span><span class="numeric_literal">2</span><span class="format_specifier">:</span><span class="format_specifier">.</span><span class="numeric_literal">1</span><span class="format_specifier">$</span><span class="format_specifier">}</span><span class="string_literal macro">"</span><span class="comma macro">,</span> <span class="string_literal macro">"x"</span><span class="comma macro">,</span> <span class="numeric_literal macro">5</span><span class="comma macro">,</span> <span class="numeric_literal macro">0.01</span><span class="parenthesis">)</span><span class="semicolon">;</span>
<span class="macro public">println</span><span class="macro_bang">!</span><span class="parenthesis">(</span><span class="string_literal macro">"Hello </span><span class="format_specifier">{</span><span class="format_specifier">}</span><span class="string_literal macro"> is </span><span class="format_specifier">{</span><span class="format_specifier">:</span><span class="format_specifier">.</span><span class="format_specifier">*</span><span class="format_specifier">}</span><span class="string_literal macro">"</span><span class="comma macro">,</span> <span class="string_literal macro">"x"</span><span class="comma macro">,</span> <span class="numeric_literal macro">5</span><span class="comma macro">,</span> <span class="numeric_literal macro">0.01</span><span class="parenthesis">)</span><span class="semicolon">;</span>
<span class="macro public">println</span><span class="macro_bang">!</span><span class="parenthesis">(</span><span class="string_literal macro">"Hello </span><span class="format_specifier">{</span><span class="format_specifier">}</span><span class="string_literal macro"> is </span><span class="format_specifier">{</span><span class="numeric_literal">2</span><span class="format_specifier">:</span><span class="format_specifier">.</span><span class="format_specifier">*</span><span class="format_specifier">}</span><span class="string_literal macro">"</span><span class="comma macro">,</span> <span class="string_literal macro">"x"</span><span class="comma macro">,</span> <span class="numeric_literal macro">5</span><span class="comma macro">,</span> <span class="numeric_literal macro">0.01</span><span class="parenthesis">)</span><span class="semicolon">;</span>
- <span class="macro public">println</span><span class="macro_bang">!</span><span class="parenthesis">(</span><span class="string_literal macro">"Hello </span><span class="format_specifier">{</span><span class="format_specifier">}</span><span class="string_literal macro"> is </span><span class="format_specifier">{</span><span class="variable">number</span><span class="format_specifier">:</span><span class="format_specifier">.</span><span class="variable">prec</span><span class="format_specifier">$</span><span class="format_specifier">}</span><span class="string_literal macro">"</span><span class="comma macro">,</span> <span class="string_literal macro">"x"</span><span class="comma macro">,</span> <span class="variable declaration macro">prec</span> <span class="operator macro">=</span> <span class="numeric_literal macro">5</span><span class="comma macro">,</span> <span class="variable declaration macro">number</span> <span class="operator macro">=</span> <span class="numeric_literal macro">0.01</span><span class="parenthesis">)</span><span class="semicolon">;</span>
- <span class="macro public">println</span><span class="macro_bang">!</span><span class="parenthesis">(</span><span class="string_literal macro">"</span><span class="format_specifier">{</span><span class="format_specifier">}</span><span class="string_literal macro">, `</span><span class="format_specifier">{</span><span class="variable">name</span><span class="format_specifier">:</span><span class="format_specifier">.</span><span class="format_specifier">*</span><span class="format_specifier">}</span><span class="string_literal macro">` has 3 fractional digits"</span><span class="comma macro">,</span> <span class="string_literal macro">"Hello"</span><span class="comma macro">,</span> <span class="numeric_literal macro">3</span><span class="comma macro">,</span> <span class="variable declaration macro">name</span><span class="operator macro">=</span><span class="numeric_literal macro">1234.56</span><span class="parenthesis">)</span><span class="semicolon">;</span>
- <span class="macro public">println</span><span class="macro_bang">!</span><span class="parenthesis">(</span><span class="string_literal macro">"</span><span class="format_specifier">{</span><span class="format_specifier">}</span><span class="string_literal macro">, `</span><span class="format_specifier">{</span><span class="variable">name</span><span class="format_specifier">:</span><span class="format_specifier">.</span><span class="format_specifier">*</span><span class="format_specifier">}</span><span class="string_literal macro">` has 3 characters"</span><span class="comma macro">,</span> <span class="string_literal macro">"Hello"</span><span class="comma macro">,</span> <span class="numeric_literal macro">3</span><span class="comma macro">,</span> <span class="variable declaration macro">name</span><span class="operator macro">=</span><span class="string_literal macro">"1234.56"</span><span class="parenthesis">)</span><span class="semicolon">;</span>
- <span class="macro public">println</span><span class="macro_bang">!</span><span class="parenthesis">(</span><span class="string_literal macro">"</span><span class="format_specifier">{</span><span class="format_specifier">}</span><span class="string_literal macro">, `</span><span class="format_specifier">{</span><span class="variable">name</span><span class="format_specifier">:</span><span class="format_specifier">&gt;</span><span class="numeric_literal">8</span><span class="format_specifier">.</span><span class="format_specifier">*</span><span class="format_specifier">}</span><span class="string_literal macro">` has 3 right-aligned characters"</span><span class="comma macro">,</span> <span class="string_literal macro">"Hello"</span><span class="comma macro">,</span> <span class="numeric_literal macro">3</span><span class="comma macro">,</span> <span class="variable declaration macro">name</span><span class="operator macro">=</span><span class="string_literal macro">"1234.56"</span><span class="parenthesis">)</span><span class="semicolon">;</span>
+ <span class="macro public">println</span><span class="macro_bang">!</span><span class="parenthesis">(</span><span class="string_literal macro">"Hello </span><span class="format_specifier">{</span><span class="format_specifier">}</span><span class="string_literal macro"> is </span><span class="format_specifier">{</span><span class="variable">number</span><span class="format_specifier">:</span><span class="format_specifier">.</span><span class="variable">prec</span><span class="format_specifier">$</span><span class="format_specifier">}</span><span class="string_literal macro">"</span><span class="comma macro">,</span> <span class="string_literal macro">"x"</span><span class="comma macro">,</span> <span class="none macro">prec</span> <span class="operator macro">=</span> <span class="numeric_literal macro">5</span><span class="comma macro">,</span> <span class="none macro">number</span> <span class="operator macro">=</span> <span class="numeric_literal macro">0.01</span><span class="parenthesis">)</span><span class="semicolon">;</span>
+ <span class="macro public">println</span><span class="macro_bang">!</span><span class="parenthesis">(</span><span class="string_literal macro">"</span><span class="format_specifier">{</span><span class="format_specifier">}</span><span class="string_literal macro">, `</span><span class="format_specifier">{</span><span class="variable">name</span><span class="format_specifier">:</span><span class="format_specifier">.</span><span class="format_specifier">*</span><span class="format_specifier">}</span><span class="string_literal macro">` has 3 fractional digits"</span><span class="comma macro">,</span> <span class="string_literal macro">"Hello"</span><span class="comma macro">,</span> <span class="numeric_literal macro">3</span><span class="comma macro">,</span> <span class="none macro">name</span><span class="operator macro">=</span><span class="numeric_literal macro">1234.56</span><span class="parenthesis">)</span><span class="semicolon">;</span>
+ <span class="macro public">println</span><span class="macro_bang">!</span><span class="parenthesis">(</span><span class="string_literal macro">"</span><span class="format_specifier">{</span><span class="format_specifier">}</span><span class="string_literal macro">, `</span><span class="format_specifier">{</span><span class="variable">name</span><span class="format_specifier">:</span><span class="format_specifier">.</span><span class="format_specifier">*</span><span class="format_specifier">}</span><span class="string_literal macro">` has 3 characters"</span><span class="comma macro">,</span> <span class="string_literal macro">"Hello"</span><span class="comma macro">,</span> <span class="numeric_literal macro">3</span><span class="comma macro">,</span> <span class="none macro">name</span><span class="operator macro">=</span><span class="string_literal macro">"1234.56"</span><span class="parenthesis">)</span><span class="semicolon">;</span>
+ <span class="macro public">println</span><span class="macro_bang">!</span><span class="parenthesis">(</span><span class="string_literal macro">"</span><span class="format_specifier">{</span><span class="format_specifier">}</span><span class="string_literal macro">, `</span><span class="format_specifier">{</span><span class="variable">name</span><span class="format_specifier">:</span><span class="format_specifier">&gt;</span><span class="numeric_literal">8</span><span class="format_specifier">.</span><span class="format_specifier">*</span><span class="format_specifier">}</span><span class="string_literal macro">` has 3 right-aligned characters"</span><span class="comma macro">,</span> <span class="string_literal macro">"Hello"</span><span class="comma macro">,</span> <span class="numeric_literal macro">3</span><span class="comma macro">,</span> <span class="none macro">name</span><span class="operator macro">=</span><span class="string_literal macro">"1234.56"</span><span class="parenthesis">)</span><span class="semicolon">;</span>
<span class="keyword">let</span> <span class="punctuation">_</span> <span class="operator">=</span> <span class="string_literal">"{}"</span>
<span class="keyword">let</span> <span class="punctuation">_</span> <span class="operator">=</span> <span class="string_literal">"{{}}"</span><span class="semicolon">;</span>
@@ -154,8 +154,8 @@ pre { color: #DCDCCC; background: #3F3F3F; font-size: 22px; padd
<span class="keyword">let</span> <span class="punctuation">_</span> <span class="operator">=</span> <span class="string_literal">c"</span><span class="escape_sequence">\u{FF}</span><span class="escape_sequence">\xFF</span><span class="string_literal">"</span><span class="semicolon">;</span> <span class="comment">// valid bytes, valid unicodes</span>
<span class="keyword">let</span> <span class="variable declaration reference">backslash</span> <span class="operator">=</span> <span class="string_literal">r"\\"</span><span class="semicolon">;</span>
- <span class="macro public">println</span><span class="macro_bang">!</span><span class="parenthesis">(</span><span class="string_literal macro">"</span><span class="format_specifier">{</span><span class="escape_sequence">\x41</span><span class="format_specifier">}</span><span class="string_literal macro">"</span><span class="comma macro">,</span> <span class="variable declaration macro">A</span> <span class="operator macro">=</span> <span class="numeric_literal macro">92</span><span class="parenthesis">)</span><span class="semicolon">;</span>
- <span class="macro public">println</span><span class="macro_bang">!</span><span class="parenthesis">(</span><span class="string_literal macro">"</span><span class="format_specifier">{</span><span class="variable">ничоси</span><span class="format_specifier">}</span><span class="string_literal macro">"</span><span class="comma macro">,</span> <span class="variable declaration macro">ничоси</span> <span class="operator macro">=</span> <span class="numeric_literal macro">92</span><span class="parenthesis">)</span><span class="semicolon">;</span>
+ <span class="macro public">println</span><span class="macro_bang">!</span><span class="parenthesis">(</span><span class="string_literal macro">"</span><span class="format_specifier">{</span><span class="escape_sequence">\x41</span><span class="format_specifier">}</span><span class="string_literal macro">"</span><span class="comma macro">,</span> <span class="none macro">A</span> <span class="operator macro">=</span> <span class="numeric_literal macro">92</span><span class="parenthesis">)</span><span class="semicolon">;</span>
+ <span class="macro public">println</span><span class="macro_bang">!</span><span class="parenthesis">(</span><span class="string_literal macro">"</span><span class="format_specifier">{</span><span class="variable">ничоси</span><span class="format_specifier">}</span><span class="string_literal macro">"</span><span class="comma macro">,</span> <span class="none macro">ничоси</span> <span class="operator macro">=</span> <span class="numeric_literal macro">92</span><span class="parenthesis">)</span><span class="semicolon">;</span>
<span class="macro public">println</span><span class="macro_bang">!</span><span class="parenthesis">(</span><span class="string_literal macro">"</span><span class="format_specifier">{</span><span class="format_specifier">:</span><span class="variable">x</span><span class="format_specifier">?</span><span class="format_specifier">}</span><span class="string_literal macro"> </span><span class="format_specifier">{</span><span class="format_specifier">}</span><span class="string_literal macro"> "</span><span class="comma macro">,</span> <span class="unresolved_reference macro">thingy</span><span class="comma macro">,</span> <span class="unresolved_reference macro">n2</span><span class="parenthesis">)</span><span class="semicolon">;</span>
<span class="macro default_library library">panic</span><span class="macro_bang">!</span><span class="parenthesis">(</span><span class="string_literal macro">"{}"</span><span class="comma macro">,</span> <span class="numeric_literal macro">0</span><span class="parenthesis">)</span><span class="semicolon">;</span>
diff --git a/crates/intern/Cargo.toml b/crates/intern/Cargo.toml
index 6414f09178..ad73c191c0 100644
--- a/crates/intern/Cargo.toml
+++ b/crates/intern/Cargo.toml
@@ -18,7 +18,6 @@ dashmap.workspace = true
hashbrown.workspace = true
rustc-hash.workspace = true
triomphe.workspace = true
-smallvec.workspace = true
rayon.workspace = true
[lints]
diff --git a/crates/intern/src/gc.rs b/crates/intern/src/gc.rs
index 0d500a9714..937de26831 100644
--- a/crates/intern/src/gc.rs
+++ b/crates/intern/src/gc.rs
@@ -87,20 +87,20 @@ pub trait GcInternedSliceVisit: SliceInternable {
#[derive(Default)]
pub struct GarbageCollector {
alive: FxHashSet<usize>,
- storages: Vec<Box<dyn Storage + Send + Sync>>,
+ storages: Vec<&'static (dyn Storage + Send + Sync)>,
}
impl GarbageCollector {
pub fn add_storage<T: Internable + GcInternedVisit>(&mut self) {
const { assert!(T::USE_GC) };
- self.storages.push(Box::new(InternedStorage::<T>(PhantomData)));
+ self.storages.push(&InternedStorage::<T>(PhantomData));
}
pub fn add_slice_storage<T: SliceInternable + GcInternedSliceVisit>(&mut self) {
const { assert!(T::USE_GC) };
- self.storages.push(Box::new(InternedSliceStorage::<T>(PhantomData)));
+ self.storages.push(&InternedSliceStorage::<T>(PhantomData));
}
/// # Safety
@@ -111,11 +111,12 @@ impl GarbageCollector {
/// - [`GcInternedVisit`] and [`GcInternedSliceVisit`] must mark all values reachable from the node.
pub unsafe fn collect(mut self) {
let total_nodes = self.storages.iter().map(|storage| storage.len()).sum();
- self.alive = FxHashSet::with_capacity_and_hasher(total_nodes, FxBuildHasher);
+ self.alive.clear();
+ self.alive.reserve(total_nodes);
let storages = std::mem::take(&mut self.storages);
- for storage in &storages {
+ for &storage in &storages {
storage.mark(&mut self);
}
diff --git a/crates/intern/src/intern.rs b/crates/intern/src/intern.rs
index b7acd6624b..a96dfcfa9f 100644
--- a/crates/intern/src/intern.rs
+++ b/crates/intern/src/intern.rs
@@ -334,7 +334,7 @@ impl<T: ?Sized> InternStorage<T> {
impl<T: Internable + ?Sized> InternStorage<T> {
pub(crate) fn get(&self) -> &InternMap<T> {
- self.map.get_or_init(DashMap::default)
+ self.map.get_or_init(|| DashMap::with_capacity_and_hasher(1024, Default::default()))
}
}
diff --git a/crates/intern/src/intern_slice.rs b/crates/intern/src/intern_slice.rs
index 58de6e17bd..8857771d2e 100644
--- a/crates/intern/src/intern_slice.rs
+++ b/crates/intern/src/intern_slice.rs
@@ -292,7 +292,12 @@ impl<T: SliceInternable> InternSliceStorage<T> {
impl<T: SliceInternable> InternSliceStorage<T> {
pub(crate) fn get(&self) -> &InternMap<T> {
- self.map.get_or_init(DashMap::default)
+ self.map.get_or_init(|| {
+ DashMap::with_capacity_and_hasher(
+ (64 * 1024) / std::mem::size_of::<T::SliceType>(),
+ Default::default(),
+ )
+ })
}
}
diff --git a/crates/intern/src/symbol/symbols.rs b/crates/intern/src/symbol/symbols.rs
index 6e9c6d26b5..cbaac64be4 100644
--- a/crates/intern/src/symbol/symbols.rs
+++ b/crates/intern/src/symbol/symbols.rs
@@ -109,6 +109,7 @@ define_symbols! {
vectorcall_dash_unwind = "vectorcall-unwind",
win64_dash_unwind = "win64-unwind",
x86_dash_interrupt = "x86-interrupt",
+ rust_dash_preserve_dash_none = "preserve-none",
@PLAIN:
__ra_fixup,
@@ -297,6 +298,7 @@ define_symbols! {
iterator,
keyword,
lang,
+ lang_items,
le,
Left,
len,
@@ -441,7 +443,6 @@ define_symbols! {
rustc_skip_array_during_method_dispatch,
rustc_skip_during_method_dispatch,
rustc_force_inline,
- semitransparent,
shl_assign,
shl,
shr_assign,
@@ -525,5 +526,10 @@ define_symbols! {
arbitrary_self_types,
arbitrary_self_types_pointers,
supertrait_item_shadowing,
+ hash,
+ partial_cmp,
+ cmp,
+ CoerceUnsized,
+ DispatchFromDyn,
define_opaque,
}
diff --git a/crates/load-cargo/src/lib.rs b/crates/load-cargo/src/lib.rs
index 28fbfecfde..e8d98b1ce6 100644
--- a/crates/load-cargo/src/lib.rs
+++ b/crates/load-cargo/src/lib.rs
@@ -11,9 +11,12 @@ extern crate rustc_driver as _;
use std::{any::Any, collections::hash_map::Entry, mem, path::Path, sync};
use crossbeam_channel::{Receiver, unbounded};
-use hir_expand::proc_macro::{
- ProcMacro, ProcMacroExpander, ProcMacroExpansionError, ProcMacroKind, ProcMacroLoadResult,
- ProcMacrosBuilder,
+use hir_expand::{
+ db::ExpandDatabase,
+ proc_macro::{
+ ProcMacro, ProcMacroExpander, ProcMacroExpansionError, ProcMacroKind, ProcMacroLoadResult,
+ ProcMacrosBuilder,
+ },
};
use ide_db::{
ChangeWithProcMacros, FxHashMap, RootDatabase,
@@ -21,11 +24,18 @@ use ide_db::{
prime_caches,
};
use itertools::Itertools;
-use proc_macro_api::{MacroDylib, ProcMacroClient};
+use proc_macro_api::{
+ MacroDylib, ProcMacroClient,
+ bidirectional_protocol::{
+ msg::{SubRequest, SubResponse},
+ reject_subrequests,
+ },
+};
use project_model::{CargoConfig, PackageRoot, ProjectManifest, ProjectWorkspace};
-use span::Span;
+use span::{Span, SpanAnchor, SyntaxContext};
+use tt::{TextRange, TextSize};
use vfs::{
- AbsPath, AbsPathBuf, VfsPath,
+ AbsPath, AbsPathBuf, FileId, VfsPath,
file_set::FileSetConfig,
loader::{Handle, LoadingProgress},
};
@@ -425,7 +435,7 @@ pub fn load_proc_macro(
) -> ProcMacroLoadResult {
let res: Result<Vec<_>, _> = (|| {
let dylib = MacroDylib::new(path.to_path_buf());
- let vec = server.load_dylib(dylib).map_err(|e| {
+ let vec = server.load_dylib(dylib, Some(&mut reject_subrequests)).map_err(|e| {
ProcMacroLoadingError::ProcMacroSrvError(format!("{e}").into_boxed_str())
})?;
if vec.is_empty() {
@@ -522,14 +532,56 @@ struct Expander(proc_macro_api::ProcMacro);
impl ProcMacroExpander for Expander {
fn expand(
&self,
- subtree: &tt::TopSubtree<Span>,
- attrs: Option<&tt::TopSubtree<Span>>,
+ db: &dyn ExpandDatabase,
+ subtree: &tt::TopSubtree,
+ attrs: Option<&tt::TopSubtree>,
env: &Env,
def_site: Span,
call_site: Span,
mixed_site: Span,
current_dir: String,
- ) -> Result<tt::TopSubtree<Span>, ProcMacroExpansionError> {
+ ) -> Result<tt::TopSubtree, ProcMacroExpansionError> {
+ let mut cb = |req| match req {
+ SubRequest::LocalFilePath { file_id } => {
+ let file_id = FileId::from_raw(file_id);
+ let source_root_id = db.file_source_root(file_id).source_root_id(db);
+ let source_root = db.source_root(source_root_id).source_root(db);
+ let name = source_root
+ .path_for_file(&file_id)
+ .and_then(|path| path.as_path())
+ .map(|path| path.to_string());
+
+ Ok(SubResponse::LocalFilePathResult { name })
+ }
+ SubRequest::SourceText { file_id, ast_id, start, end } => {
+ let ast_id = span::ErasedFileAstId::from_raw(ast_id);
+ let editioned_file_id = span::EditionedFileId::from_raw(file_id);
+ let span = Span {
+ range: TextRange::new(TextSize::from(start), TextSize::from(end)),
+ anchor: SpanAnchor { file_id: editioned_file_id, ast_id },
+ ctx: SyntaxContext::root(editioned_file_id.edition()),
+ };
+ let range = db.resolve_span(span);
+ let source = db.file_text(range.file_id.file_id(db)).text(db);
+ let text = source
+ .get(usize::from(range.range.start())..usize::from(range.range.end()))
+ .map(ToOwned::to_owned);
+
+ Ok(SubResponse::SourceTextResult { text })
+ }
+ SubRequest::FilePath { file_id } => {
+ let file_id = FileId::from_raw(file_id);
+ let source_root_id = db.file_source_root(file_id).source_root_id(db);
+ let source_root = db.source_root(source_root_id).source_root(db);
+ let name = source_root
+ .path_for_file(&file_id)
+ .and_then(|path| path.as_path())
+ .map(|path| path.to_string())
+ .unwrap_or_default();
+
+ Ok(SubResponse::FilePathResult { name })
+ }
+ };
match self.0.expand(
subtree.view(),
attrs.map(|attrs| attrs.view()),
@@ -538,6 +590,7 @@ impl ProcMacroExpander for Expander {
call_site,
mixed_site,
current_dir,
+ Some(&mut cb),
) {
Ok(Ok(subtree)) => Ok(subtree),
Ok(Err(err)) => Err(ProcMacroExpansionError::Panic(err)),
diff --git a/crates/mbe/src/benchmark.rs b/crates/mbe/src/benchmark.rs
index ffe3bdd06c..603fee7306 100644
--- a/crates/mbe/src/benchmark.rs
+++ b/crates/mbe/src/benchmark.rs
@@ -2,7 +2,6 @@
use intern::Symbol;
use rustc_hash::FxHashMap;
-use span::Span;
use stdx::itertools::Itertools;
use syntax::{
AstNode,
@@ -55,7 +54,7 @@ fn benchmark_expand_macro_rules() {
.map(|(id, tt)| {
let res = rules[&id].expand(&db, &tt, |_| (), MacroCallStyle::FnLike, DUMMY);
assert!(res.err.is_none());
- res.value.0.0.len()
+ res.value.0.as_token_trees().len()
})
.sum()
};
@@ -70,7 +69,7 @@ fn macro_rules_fixtures() -> FxHashMap<String, DeclarativeMacro> {
.collect()
}
-fn macro_rules_fixtures_tt() -> FxHashMap<String, tt::TopSubtree<Span>> {
+fn macro_rules_fixtures_tt() -> FxHashMap<String, tt::TopSubtree> {
let fixture = bench_fixture::numerous_macro_rules();
let source_file = ast::SourceFile::parse(&fixture, span::Edition::CURRENT).ok().unwrap();
@@ -95,7 +94,7 @@ fn macro_rules_fixtures_tt() -> FxHashMap<String, tt::TopSubtree<Span>> {
fn invocation_fixtures(
db: &dyn salsa::Database,
rules: &FxHashMap<String, DeclarativeMacro>,
-) -> Vec<(String, tt::TopSubtree<Span>)> {
+) -> Vec<(String, tt::TopSubtree)> {
let mut seed = 123456789;
let mut res = Vec::new();
@@ -140,7 +139,7 @@ fn invocation_fixtures(
}
return res;
- fn collect_from_op(op: &Op, builder: &mut tt::TopSubtreeBuilder<Span>, seed: &mut usize) {
+ fn collect_from_op(op: &Op, builder: &mut tt::TopSubtreeBuilder, seed: &mut usize) {
return match op {
Op::Var { kind, .. } => match kind.as_ref() {
Some(MetaVarKind::Ident) => builder.push(make_ident("foo")),
@@ -226,25 +225,20 @@ fn invocation_fixtures(
*seed = usize::wrapping_add(usize::wrapping_mul(*seed, a), c);
*seed
}
- fn make_ident(ident: &str) -> tt::Leaf<Span> {
+ fn make_ident(ident: &str) -> tt::Leaf {
tt::Leaf::Ident(tt::Ident {
span: DUMMY,
sym: Symbol::intern(ident),
is_raw: tt::IdentIsRaw::No,
})
}
- fn make_punct(char: char) -> tt::Leaf<Span> {
+ fn make_punct(char: char) -> tt::Leaf {
tt::Leaf::Punct(tt::Punct { span: DUMMY, char, spacing: tt::Spacing::Alone })
}
- fn make_literal(lit: &str) -> tt::Leaf<Span> {
- tt::Leaf::Literal(tt::Literal {
- span: DUMMY,
- symbol: Symbol::intern(lit),
- kind: tt::LitKind::Str,
- suffix: None,
- })
+ fn make_literal(lit: &str) -> tt::Leaf {
+ tt::Leaf::Literal(tt::Literal::new_no_suffix(lit, DUMMY, tt::LitKind::Str))
}
- fn make_subtree(kind: tt::DelimiterKind, builder: &mut tt::TopSubtreeBuilder<Span>) {
+ fn make_subtree(kind: tt::DelimiterKind, builder: &mut tt::TopSubtreeBuilder) {
builder.open(kind, DUMMY);
builder.close(DUMMY);
}
diff --git a/crates/mbe/src/expander.rs b/crates/mbe/src/expander.rs
index 274b779c1b..25e05df7b7 100644
--- a/crates/mbe/src/expander.rs
+++ b/crates/mbe/src/expander.rs
@@ -17,11 +17,11 @@ use crate::{
pub(crate) fn expand_rules(
db: &dyn salsa::Database,
rules: &[crate::Rule],
- input: &tt::TopSubtree<Span>,
+ input: &tt::TopSubtree,
marker: impl Fn(&mut Span) + Copy,
call_style: MacroCallStyle,
call_site: Span,
-) -> ExpandResult<(tt::TopSubtree<Span>, MatchedArmIndex)> {
+) -> ExpandResult<(tt::TopSubtree, MatchedArmIndex)> {
let mut match_: Option<(matcher::Match<'_>, &crate::Rule, usize)> = None;
for (idx, rule) in rules.iter().enumerate() {
// Skip any rules that aren't relevant to the call style (fn-like/attr/derive).
@@ -129,7 +129,7 @@ enum Fragment<'a> {
Empty,
/// token fragments are just copy-pasted into the output
Tokens {
- tree: tt::TokenTreesView<'a, Span>,
+ tree: tt::TokenTreesView<'a>,
origin: TokensOrigin,
},
/// Expr ast fragments are surrounded with `()` on transcription to preserve precedence.
@@ -141,7 +141,7 @@ enum Fragment<'a> {
/// tricky to handle in the parser, and rustc doesn't handle those either.
///
/// The span of the outer delimiters is marked on transcription.
- Expr(tt::TokenTreesView<'a, Span>),
+ Expr(tt::TokenTreesView<'a>),
/// There are roughly two types of paths: paths in expression context, where a
/// separator `::` between an identifier and its following generic argument list
/// is mandatory, and paths in type context, where `::` can be omitted.
@@ -151,8 +151,8 @@ enum Fragment<'a> {
/// and is trasncribed as an expression-context path, verbatim transcription
/// would cause a syntax error. We need to fix it up just before transcribing;
/// see `transcriber::fix_up_and_push_path_tt()`.
- Path(tt::TokenTreesView<'a, Span>),
- TokensOwned(tt::TopSubtree<Span>),
+ Path(tt::TokenTreesView<'a>),
+ TokensOwned(tt::TopSubtree),
}
impl Fragment<'_> {
@@ -162,7 +162,7 @@ impl Fragment<'_> {
Fragment::Tokens { tree, .. } => tree.len() == 0,
Fragment::Expr(it) => it.len() == 0,
Fragment::Path(it) => it.len() == 0,
- Fragment::TokensOwned(it) => it.0.is_empty(),
+ Fragment::TokensOwned(_) => false, // A `TopSubtree` is never empty
}
}
}
diff --git a/crates/mbe/src/expander/matcher.rs b/crates/mbe/src/expander/matcher.rs
index a21468fbb0..8f6627a60f 100644
--- a/crates/mbe/src/expander/matcher.rs
+++ b/crates/mbe/src/expander/matcher.rs
@@ -63,7 +63,6 @@ use std::{rc::Rc, sync::Arc};
use intern::{Symbol, sym};
use smallvec::{SmallVec, smallvec};
-use span::Span;
use tt::{
DelimSpan,
iter::{TtElement, TtIter},
@@ -114,7 +113,7 @@ impl Match<'_> {
pub(super) fn match_<'t>(
db: &dyn salsa::Database,
pattern: &'t MetaTemplate,
- input: &'t tt::TopSubtree<Span>,
+ input: &'t tt::TopSubtree,
) -> Match<'t> {
let mut res = match_loop(db, pattern, input);
res.bound_count = count(res.bindings.bindings());
@@ -339,7 +338,7 @@ struct MatchState<'t> {
bindings: BindingsIdx,
/// Cached result of meta variable parsing
- meta_result: Option<(TtIter<'t, Span>, ExpandResult<Option<Fragment<'t>>>)>,
+ meta_result: Option<(TtIter<'t>, ExpandResult<Option<Fragment<'t>>>)>,
/// Is error occurred in this state, will `poised` to "parent"
is_error: bool,
@@ -366,8 +365,8 @@ struct MatchState<'t> {
#[inline]
fn match_loop_inner<'t>(
db: &dyn salsa::Database,
- src: TtIter<'t, Span>,
- stack: &[TtIter<'t, Span>],
+ src: TtIter<'t>,
+ stack: &[TtIter<'t>],
res: &mut Match<'t>,
bindings_builder: &mut BindingsBuilder<'t>,
cur_items: &mut SmallVec<[MatchState<'t>; 1]>,
@@ -375,7 +374,7 @@ fn match_loop_inner<'t>(
next_items: &mut Vec<MatchState<'t>>,
eof_items: &mut SmallVec<[MatchState<'t>; 1]>,
error_items: &mut SmallVec<[MatchState<'t>; 1]>,
- delim_span: tt::DelimSpan<Span>,
+ delim_span: tt::DelimSpan,
) {
macro_rules! try_push {
($items: expr, $it:expr) => {
@@ -518,7 +517,8 @@ fn match_loop_inner<'t>(
}
OpDelimited::Op(Op::Literal(lhs)) => {
if let Ok(rhs) = src.clone().expect_leaf() {
- if matches!(rhs, tt::Leaf::Literal(it) if it.symbol == lhs.symbol) {
+ if matches!(&rhs, tt::Leaf::Literal(it) if it.text_and_suffix == lhs.text_and_suffix)
+ {
item.dot.next();
} else {
res.add_err(ExpandError::new(
@@ -538,7 +538,7 @@ fn match_loop_inner<'t>(
}
OpDelimited::Op(Op::Ident(lhs)) => {
if let Ok(rhs) = src.clone().expect_leaf() {
- if matches!(rhs, tt::Leaf::Ident(it) if it.sym == lhs.sym) {
+ if matches!(&rhs, tt::Leaf::Ident(it) if it.sym == lhs.sym) {
item.dot.next();
} else {
res.add_err(ExpandError::new(
@@ -623,11 +623,11 @@ fn match_loop_inner<'t>(
fn match_loop<'t>(
db: &dyn salsa::Database,
pattern: &'t MetaTemplate,
- src: &'t tt::TopSubtree<Span>,
+ src: &'t tt::TopSubtree,
) -> Match<'t> {
let span = src.top_subtree().delimiter.delim_span();
let mut src = src.iter();
- let mut stack: SmallVec<[TtIter<'_, Span>; 1]> = SmallVec::new();
+ let mut stack: SmallVec<[TtIter<'_>; 1]> = SmallVec::new();
let mut res = Match::default();
let mut error_recover_item = None;
@@ -702,7 +702,7 @@ fn match_loop<'t>(
|| !(bb_items.is_empty() || next_items.is_empty())
|| bb_items.len() > 1;
if has_leftover_tokens {
- res.unmatched_tts += src.remaining().flat_tokens().len();
+ res.unmatched_tts += src.remaining().len();
res.add_err(ExpandError::new(span.open, ExpandErrorKind::LeftoverTokens));
if let Some(error_recover_item) = error_recover_item {
@@ -774,8 +774,8 @@ fn match_loop<'t>(
fn match_meta_var<'t>(
db: &dyn salsa::Database,
kind: MetaVarKind,
- input: &mut TtIter<'t, Span>,
- delim_span: DelimSpan<Span>,
+ input: &mut TtIter<'t>,
+ delim_span: DelimSpan,
) -> ExpandResult<Fragment<'t>> {
let fragment = match kind {
MetaVarKind::Path => {
@@ -879,10 +879,10 @@ fn collect_vars(collector_fun: &mut impl FnMut(Symbol), pattern: &MetaTemplate)
}
}
impl MetaTemplate {
- fn iter_delimited_with(&self, delimiter: tt::Delimiter<Span>) -> OpDelimitedIter<'_> {
+ fn iter_delimited_with(&self, delimiter: tt::Delimiter) -> OpDelimitedIter<'_> {
OpDelimitedIter { inner: &self.0, idx: 0, delimited: delimiter }
}
- fn iter_delimited(&self, span: tt::DelimSpan<Span>) -> OpDelimitedIter<'_> {
+ fn iter_delimited(&self, span: tt::DelimSpan) -> OpDelimitedIter<'_> {
OpDelimitedIter {
inner: &self.0,
idx: 0,
@@ -901,7 +901,7 @@ enum OpDelimited<'a> {
#[derive(Debug, Clone, Copy)]
struct OpDelimitedIter<'a> {
inner: &'a [Op],
- delimited: tt::Delimiter<Span>,
+ delimited: tt::Delimiter,
idx: usize,
}
@@ -945,7 +945,7 @@ impl<'a> Iterator for OpDelimitedIter<'a> {
}
}
-fn expect_separator<S: Copy>(iter: &mut TtIter<'_, S>, separator: &Separator) -> bool {
+fn expect_separator(iter: &mut TtIter<'_>, separator: &Separator) -> bool {
let mut fork = iter.clone();
let ok = match separator {
Separator::Ident(lhs) => match fork.expect_ident_or_underscore() {
@@ -954,8 +954,8 @@ fn expect_separator<S: Copy>(iter: &mut TtIter<'_, S>, separator: &Separator) ->
},
Separator::Literal(lhs) => match fork.expect_literal() {
Ok(rhs) => match rhs {
- tt::Leaf::Literal(rhs) => rhs.symbol == lhs.symbol,
- tt::Leaf::Ident(rhs) => rhs.sym == lhs.symbol,
+ tt::Leaf::Literal(rhs) => rhs.text_and_suffix == lhs.text_and_suffix,
+ tt::Leaf::Ident(rhs) => rhs.sym == lhs.text_and_suffix,
tt::Leaf::Punct(_) => false,
},
Err(_) => false,
@@ -979,7 +979,7 @@ fn expect_separator<S: Copy>(iter: &mut TtIter<'_, S>, separator: &Separator) ->
ok
}
-fn expect_tt<S: Copy>(iter: &mut TtIter<'_, S>) -> Result<(), ()> {
+fn expect_tt(iter: &mut TtIter<'_>) -> Result<(), ()> {
if let Some(TtElement::Leaf(tt::Leaf::Punct(punct))) = iter.peek() {
if punct.char == '\'' {
expect_lifetime(iter)?;
@@ -992,7 +992,7 @@ fn expect_tt<S: Copy>(iter: &mut TtIter<'_, S>) -> Result<(), ()> {
Ok(())
}
-fn expect_lifetime<'a, S: Copy>(iter: &mut TtIter<'a, S>) -> Result<&'a tt::Ident<S>, ()> {
+fn expect_lifetime<'a>(iter: &mut TtIter<'a>) -> Result<tt::Ident, ()> {
let punct = iter.expect_single_punct()?;
if punct.char != '\'' {
return Err(());
@@ -1000,8 +1000,8 @@ fn expect_lifetime<'a, S: Copy>(iter: &mut TtIter<'a, S>) -> Result<&'a tt::Iden
iter.expect_ident_or_underscore()
}
-fn eat_char<S: Copy>(iter: &mut TtIter<'_, S>, c: char) {
- if matches!(iter.peek(), Some(TtElement::Leaf(tt::Leaf::Punct(tt::Punct { char, .. }))) if *char == c)
+fn eat_char(iter: &mut TtIter<'_>, c: char) {
+ if matches!(iter.peek(), Some(TtElement::Leaf(tt::Leaf::Punct(tt::Punct { char, .. }))) if char == c)
{
iter.next().expect("already peeked");
}
diff --git a/crates/mbe/src/expander/transcriber.rs b/crates/mbe/src/expander/transcriber.rs
index 006ef1af80..e8e7928c26 100644
--- a/crates/mbe/src/expander/transcriber.rs
+++ b/crates/mbe/src/expander/transcriber.rs
@@ -3,6 +3,7 @@
use intern::{Symbol, sym};
use span::{Edition, Span};
+use stdx::itertools::Itertools;
use tt::{Delimiter, TopSubtreeBuilder, iter::TtElement};
use super::TokensOrigin;
@@ -125,7 +126,7 @@ pub(super) fn transcribe(
bindings: &Bindings<'_>,
marker: impl Fn(&mut Span) + Copy,
call_site: Span,
-) -> ExpandResult<tt::TopSubtree<Span>> {
+) -> ExpandResult<tt::TopSubtree> {
let mut ctx = ExpandCtx { bindings, nesting: Vec::new(), call_site };
let mut builder = tt::TopSubtreeBuilder::new(tt::Delimiter::invisible_spanned(ctx.call_site));
expand_subtree(&mut ctx, template, &mut builder, marker).map(|()| builder.build())
@@ -152,8 +153,8 @@ struct ExpandCtx<'a> {
fn expand_subtree_with_delimiter(
ctx: &mut ExpandCtx<'_>,
template: &MetaTemplate,
- builder: &mut tt::TopSubtreeBuilder<Span>,
- delimiter: Option<Delimiter<Span>>,
+ builder: &mut tt::TopSubtreeBuilder,
+ delimiter: Option<Delimiter>,
marker: impl Fn(&mut Span) + Copy,
) -> ExpandResult<()> {
let delimiter = delimiter.unwrap_or_else(|| tt::Delimiter::invisible_spanned(ctx.call_site));
@@ -166,7 +167,7 @@ fn expand_subtree_with_delimiter(
fn expand_subtree(
ctx: &mut ExpandCtx<'_>,
template: &MetaTemplate,
- builder: &mut tt::TopSubtreeBuilder<Span>,
+ builder: &mut tt::TopSubtreeBuilder,
marker: impl Fn(&mut Span) + Copy,
) -> ExpandResult<()> {
let mut err = None;
@@ -221,10 +222,10 @@ fn expand_subtree(
let index =
ctx.nesting.get(ctx.nesting.len() - 1 - depth).map_or(0, |nest| nest.idx);
builder.push(tt::Leaf::Literal(tt::Literal {
- symbol: Symbol::integer(index),
+ text_and_suffix: Symbol::integer(index),
span: ctx.call_site,
kind: tt::LitKind::Integer,
- suffix: None,
+ suffix_len: 0,
}));
}
Op::Len { depth } => {
@@ -233,10 +234,10 @@ fn expand_subtree(
0
});
builder.push(tt::Leaf::Literal(tt::Literal {
- symbol: Symbol::integer(length),
+ text_and_suffix: Symbol::integer(length),
span: ctx.call_site,
kind: tt::LitKind::Integer,
- suffix: None,
+ suffix_len: 0,
}));
}
Op::Count { name, depth } => {
@@ -277,9 +278,9 @@ fn expand_subtree(
let res = count(binding, 0, depth.unwrap_or(0));
builder.push(tt::Leaf::Literal(tt::Literal {
- symbol: Symbol::integer(res),
+ text_and_suffix: Symbol::integer(res),
span: ctx.call_site,
- suffix: None,
+ suffix_len: 0,
kind: tt::LitKind::Integer,
}));
}
@@ -293,7 +294,7 @@ fn expand_subtree(
ConcatMetaVarExprElem::Literal(lit) => {
// FIXME: This isn't really correct wrt. escaping, but that's what rustc does and anyway
// escaping is used most of the times for characters that are invalid in identifiers.
- concatenated.push_str(lit.symbol.as_str())
+ concatenated.push_str(lit.text())
}
ConcatMetaVarExprElem::Var(var) => {
// Handling of repetitions in `${concat}` isn't fleshed out in rustc, so we currently
@@ -324,13 +325,11 @@ fn expand_subtree(
}
_ => (None, None),
};
- let value = match values {
+ let value = match &values {
(Some(TtElement::Leaf(tt::Leaf::Ident(ident))), None) => {
ident.sym.as_str()
}
- (Some(TtElement::Leaf(tt::Leaf::Literal(lit))), None) => {
- lit.symbol.as_str()
- }
+ (Some(TtElement::Leaf(tt::Leaf::Literal(lit))), None) => lit.text(),
_ => {
if err.is_none() {
err = Some(ExpandError::binding_error(
@@ -382,7 +381,7 @@ fn expand_var(
ctx: &mut ExpandCtx<'_>,
v: &Symbol,
id: Span,
- builder: &mut tt::TopSubtreeBuilder<Span>,
+ builder: &mut tt::TopSubtreeBuilder,
marker: impl Fn(&mut Span) + Copy,
) -> ExpandResult<()> {
// We already handle $crate case in mbe parser
@@ -412,15 +411,15 @@ fn expand_var(
// Check if this is a simple negative literal (MINUS + LITERAL)
// that should not be wrapped in parentheses
let is_negative_literal = matches!(
- sub.flat_tokens(),
- [
- tt::TokenTree::Leaf(tt::Leaf::Punct(tt::Punct { char: '-', .. })),
- tt::TokenTree::Leaf(tt::Leaf::Literal(_))
- ]
+ sub.iter().collect_array(),
+ Some([
+ tt::TtElement::Leaf(tt::Leaf::Punct(tt::Punct { char: '-', .. })),
+ tt::TtElement::Leaf(tt::Leaf::Literal(_))
+ ])
);
let wrap_in_parens = !is_negative_literal
- && !matches!(sub.flat_tokens(), [tt::TokenTree::Leaf(_)])
+ && !matches!(sub.iter().collect_array(), Some([tt::TtElement::Leaf(_)]))
&& sub.try_into_subtree().is_none_or(|it| {
it.top_subtree().delimiter.kind == tt::DelimiterKind::Invisible
});
@@ -466,7 +465,7 @@ fn expand_repeat(
template: &MetaTemplate,
kind: RepeatKind,
separator: Option<&Separator>,
- builder: &mut tt::TopSubtreeBuilder<Span>,
+ builder: &mut tt::TopSubtreeBuilder,
marker: impl Fn(&mut Span) + Copy,
) -> ExpandResult<()> {
ctx.nesting.push(NestingState { idx: 0, at_end: false, hit: false });
@@ -546,8 +545,8 @@ fn expand_repeat(
/// we need this fixup.
fn fix_up_and_push_path_tt(
ctx: &ExpandCtx<'_>,
- builder: &mut tt::TopSubtreeBuilder<Span>,
- subtree: tt::TokenTreesView<'_, Span>,
+ builder: &mut tt::TopSubtreeBuilder,
+ subtree: tt::TokenTreesView<'_>,
) {
let mut prev_was_ident = false;
// Note that we only need to fix up the top-level `TokenTree`s because the
@@ -560,8 +559,8 @@ fn fix_up_and_push_path_tt(
// argument list and thus needs `::` between it and `FnOnce`. However in
// today's Rust this type of path *semantically* cannot appear as a
// top-level expression-context path, so we can safely ignore it.
- if let [tt::TokenTree::Leaf(tt::Leaf::Punct(tt::Punct { char: '<', .. }))] =
- tt.flat_tokens()
+ if let Some([tt::TtElement::Leaf(tt::Leaf::Punct(tt::Punct { char: '<', .. }))]) =
+ tt.iter().collect_array()
{
builder.extend([
tt::Leaf::Punct(tt::Punct {
@@ -577,7 +576,8 @@ fn fix_up_and_push_path_tt(
]);
}
}
- prev_was_ident = matches!(tt.flat_tokens(), [tt::TokenTree::Leaf(tt::Leaf::Ident(_))]);
+ prev_was_ident =
+ matches!(tt.iter().collect_array(), Some([tt::TtElement::Leaf(tt::Leaf::Ident(_))]));
builder.extend_with_tt(tt);
}
}
diff --git a/crates/mbe/src/lib.rs b/crates/mbe/src/lib.rs
index 1193c4290c..936cf178f0 100644
--- a/crates/mbe/src/lib.rs
+++ b/crates/mbe/src/lib.rs
@@ -152,7 +152,7 @@ impl DeclarativeMacro {
/// The old, `macro_rules! m {}` flavor.
pub fn parse_macro_rules(
- tt: &tt::TopSubtree<Span>,
+ tt: &tt::TopSubtree,
ctx_edition: impl Copy + Fn(SyntaxContext) -> Edition,
) -> DeclarativeMacro {
// Note: this parsing can be implemented using mbe machinery itself, by
@@ -191,8 +191,8 @@ impl DeclarativeMacro {
/// The new, unstable `macro m {}` flavor.
pub fn parse_macro2(
- args: Option<&tt::TopSubtree<Span>>,
- body: &tt::TopSubtree<Span>,
+ args: Option<&tt::TopSubtree>,
+ body: &tt::TopSubtree,
ctx_edition: impl Copy + Fn(SyntaxContext) -> Edition,
) -> DeclarativeMacro {
let mut rules = Vec::new();
@@ -276,11 +276,11 @@ impl DeclarativeMacro {
pub fn expand(
&self,
db: &dyn salsa::Database,
- tt: &tt::TopSubtree<Span>,
+ tt: &tt::TopSubtree,
marker: impl Fn(&mut Span) + Copy,
call_style: MacroCallStyle,
call_site: Span,
- ) -> ExpandResult<(tt::TopSubtree<Span>, MatchedArmIndex)> {
+ ) -> ExpandResult<(tt::TopSubtree, MatchedArmIndex)> {
expander::expand_rules(db, &self.rules, tt, marker, call_style, call_site)
}
}
@@ -288,7 +288,7 @@ impl DeclarativeMacro {
impl Rule {
fn parse(
edition: impl Copy + Fn(SyntaxContext) -> Edition,
- src: &mut TtIter<'_, Span>,
+ src: &mut TtIter<'_>,
) -> Result<Self, ParseError> {
// Parse an optional `attr()` or `derive()` prefix before the LHS pattern.
let style = parser::parse_rule_style(src)?;
@@ -391,10 +391,10 @@ impl<T: Default, E> From<Result<T, E>> for ValueResult<T, E> {
pub fn expect_fragment<'t>(
db: &dyn salsa::Database,
- tt_iter: &mut TtIter<'t, Span>,
+ tt_iter: &mut TtIter<'t>,
entry_point: ::parser::PrefixEntryPoint,
- delim_span: DelimSpan<Span>,
-) -> ExpandResult<tt::TokenTreesView<'t, Span>> {
+ delim_span: DelimSpan,
+) -> ExpandResult<tt::TokenTreesView<'t>> {
use ::parser;
let buffer = tt_iter.remaining();
let parser_input = to_parser_input(buffer, &mut |ctx| ctx.edition(db));
diff --git a/crates/mbe/src/parser.rs b/crates/mbe/src/parser.rs
index e1cb98abae..796ee62d48 100644
--- a/crates/mbe/src/parser.rs
+++ b/crates/mbe/src/parser.rs
@@ -13,7 +13,7 @@ use tt::{
use crate::{MacroCallStyle, ParseError};
-pub(crate) fn parse_rule_style(src: &mut TtIter<'_, Span>) -> Result<MacroCallStyle, ParseError> {
+pub(crate) fn parse_rule_style(src: &mut TtIter<'_>) -> Result<MacroCallStyle, ParseError> {
// Skip an optional `unsafe`. This is only actually allowed for `attr`
// rules, but we'll let rustc worry about that.
if let Some(TtElement::Leaf(tt::Leaf::Ident(ident))) = src.peek()
@@ -59,14 +59,14 @@ pub(crate) struct MetaTemplate(pub(crate) Box<[Op]>);
impl MetaTemplate {
pub(crate) fn parse_pattern(
edition: impl Copy + Fn(SyntaxContext) -> Edition,
- pattern: TtIter<'_, Span>,
+ pattern: TtIter<'_>,
) -> Result<Self, ParseError> {
MetaTemplate::parse(edition, pattern, Mode::Pattern)
}
pub(crate) fn parse_template(
edition: impl Copy + Fn(SyntaxContext) -> Edition,
- template: TtIter<'_, Span>,
+ template: TtIter<'_>,
) -> Result<Self, ParseError> {
MetaTemplate::parse(edition, template, Mode::Template)
}
@@ -77,7 +77,7 @@ impl MetaTemplate {
fn parse(
edition: impl Copy + Fn(SyntaxContext) -> Edition,
- mut src: TtIter<'_, Span>,
+ mut src: TtIter<'_>,
mode: Mode,
) -> Result<Self, ParseError> {
let mut res = Vec::new();
@@ -123,23 +123,23 @@ pub(crate) enum Op {
},
Subtree {
tokens: MetaTemplate,
- delimiter: tt::Delimiter<Span>,
+ delimiter: tt::Delimiter,
},
- Literal(tt::Literal<Span>),
- Punct(Box<ArrayVec<tt::Punct<Span>, MAX_GLUED_PUNCT_LEN>>),
- Ident(tt::Ident<Span>),
+ Literal(tt::Literal),
+ Punct(Box<ArrayVec<tt::Punct, MAX_GLUED_PUNCT_LEN>>),
+ Ident(tt::Ident),
}
#[derive(Clone, Debug, PartialEq, Eq)]
pub(crate) enum ConcatMetaVarExprElem {
/// There is NO preceding dollar sign, which means that this identifier should be interpreted
/// as a literal.
- Ident(tt::Ident<Span>),
+ Ident(tt::Ident),
/// There is a preceding dollar sign, which means that this identifier should be expanded
/// and interpreted as a variable.
- Var(tt::Ident<Span>),
+ Var(tt::Ident),
/// For example, a number or a string.
- Literal(tt::Literal<Span>),
+ Literal(tt::Literal),
}
#[derive(Copy, Clone, Debug, PartialEq, Eq)]
@@ -179,10 +179,10 @@ pub(crate) enum MetaVarKind {
#[derive(Clone, Debug, Eq)]
pub(crate) enum Separator {
- Literal(tt::Literal<Span>),
- Ident(tt::Ident<Span>),
- Puncts(ArrayVec<tt::Punct<Span>, MAX_GLUED_PUNCT_LEN>),
- Lifetime(tt::Punct<Span>, tt::Ident<Span>),
+ Literal(tt::Literal),
+ Ident(tt::Ident),
+ Puncts(ArrayVec<tt::Punct, MAX_GLUED_PUNCT_LEN>),
+ Lifetime(tt::Punct, tt::Ident),
}
// Note that when we compare a Separator, we just care about its textual value.
@@ -192,7 +192,7 @@ impl PartialEq for Separator {
match (self, other) {
(Ident(a), Ident(b)) => a.sym == b.sym,
- (Literal(a), Literal(b)) => a.symbol == b.symbol,
+ (Literal(a), Literal(b)) => a.text_and_suffix == b.text_and_suffix,
(Puncts(a), Puncts(b)) if a.len() == b.len() => {
let a_iter = a.iter().map(|a| a.char);
let b_iter = b.iter().map(|b| b.char);
@@ -212,8 +212,8 @@ enum Mode {
fn next_op(
edition: impl Copy + Fn(SyntaxContext) -> Edition,
- first_peeked: TtElement<'_, Span>,
- src: &mut TtIter<'_, Span>,
+ first_peeked: TtElement<'_>,
+ src: &mut TtIter<'_>,
mode: Mode,
) -> Result<Op, ParseError> {
let res = match first_peeked {
@@ -224,7 +224,7 @@ fn next_op(
None => {
return Ok(Op::Punct({
let mut res = ArrayVec::new();
- res.push(*p);
+ res.push(p);
Box::new(res)
}));
}
@@ -268,9 +268,9 @@ fn next_op(
let id = ident.span;
Op::Var { name, kind, id }
}
- tt::Leaf::Literal(lit) if is_boolean_literal(lit) => {
+ tt::Leaf::Literal(lit) if is_boolean_literal(&lit) => {
let kind = eat_fragment_kind(edition, src, mode)?;
- let name = lit.symbol.clone();
+ let name = lit.text_and_suffix.clone();
let id = lit.span;
Op::Var { name, kind, id }
}
@@ -282,7 +282,7 @@ fn next_op(
}
Mode::Template => Op::Punct({
let mut res = ArrayVec::new();
- res.push(*punct);
+ res.push(punct);
Box::new(res)
}),
},
@@ -320,7 +320,7 @@ fn next_op(
fn eat_fragment_kind(
edition: impl Copy + Fn(SyntaxContext) -> Edition,
- src: &mut TtIter<'_, Span>,
+ src: &mut TtIter<'_>,
mode: Mode,
) -> Result<Option<MetaVarKind>, ParseError> {
if let Mode::Pattern = mode {
@@ -363,11 +363,11 @@ fn eat_fragment_kind(
Ok(None)
}
-fn is_boolean_literal(lit: &tt::Literal<Span>) -> bool {
- matches!(lit.symbol.as_str(), "true" | "false")
+fn is_boolean_literal(lit: &tt::Literal) -> bool {
+ lit.text_and_suffix == sym::true_ || lit.text_and_suffix == sym::false_
}
-fn parse_repeat(src: &mut TtIter<'_, Span>) -> Result<(Option<Separator>, RepeatKind), ParseError> {
+fn parse_repeat(src: &mut TtIter<'_>) -> Result<(Option<Separator>, RepeatKind), ParseError> {
let mut separator = Separator::Puncts(ArrayVec::new());
for tt in src {
let tt = match tt {
@@ -400,7 +400,7 @@ fn parse_repeat(src: &mut TtIter<'_, Span>) -> Result<(Option<Separator>, Repeat
'?' => RepeatKind::ZeroOrOne,
_ => match &mut separator {
Separator::Puncts(puncts) if puncts.len() < 3 => {
- puncts.push(*punct);
+ puncts.push(punct);
continue;
}
_ => return Err(ParseError::InvalidRepeat),
@@ -413,7 +413,7 @@ fn parse_repeat(src: &mut TtIter<'_, Span>) -> Result<(Option<Separator>, Repeat
Err(ParseError::InvalidRepeat)
}
-fn parse_metavar_expr(src: &mut TtIter<'_, Span>) -> Result<Op, ()> {
+fn parse_metavar_expr(src: &mut TtIter<'_>) -> Result<Op, ()> {
let func = src.expect_ident()?;
let (args, mut args_iter) = src.expect_subtree()?;
@@ -475,20 +475,21 @@ fn parse_metavar_expr(src: &mut TtIter<'_, Span>) -> Result<Op, ()> {
Ok(op)
}
-fn parse_depth(src: &mut TtIter<'_, Span>) -> Result<usize, ()> {
+fn parse_depth(src: &mut TtIter<'_>) -> Result<usize, ()> {
if src.is_empty() {
Ok(0)
- } else if let tt::Leaf::Literal(tt::Literal { symbol: text, suffix: None, .. }) =
- src.expect_literal()?
+ } else if let tt::Leaf::Literal(lit) = src.expect_literal()?
+ && let (text, suffix) = lit.text_and_suffix()
+ && suffix.is_empty()
{
// Suffixes are not allowed.
- text.as_str().parse().map_err(|_| ())
+ text.parse().map_err(|_| ())
} else {
Err(())
}
}
-fn try_eat_comma(src: &mut TtIter<'_, Span>) -> bool {
+fn try_eat_comma(src: &mut TtIter<'_>) -> bool {
if let Some(TtElement::Leaf(tt::Leaf::Punct(tt::Punct { char: ',', .. }))) = src.peek() {
let _ = src.next();
return true;
@@ -496,7 +497,7 @@ fn try_eat_comma(src: &mut TtIter<'_, Span>) -> bool {
false
}
-fn try_eat_dollar(src: &mut TtIter<'_, Span>) -> bool {
+fn try_eat_dollar(src: &mut TtIter<'_>) -> bool {
if let Some(TtElement::Leaf(tt::Leaf::Punct(tt::Punct { char: '$', .. }))) = src.peek() {
let _ = src.next();
return true;
diff --git a/crates/parser/src/grammar/expressions/atom.rs b/crates/parser/src/grammar/expressions/atom.rs
index ab18309308..d83e2eb2b4 100644
--- a/crates/parser/src/grammar/expressions/atom.rs
+++ b/crates/parser/src/grammar/expressions/atom.rs
@@ -278,14 +278,20 @@ fn builtin_expr(p: &mut Parser<'_>) -> Option<CompletedMarker> {
}
Some(m.complete(p, OFFSET_OF_EXPR))
} else if p.eat_contextual_kw(T![format_args]) {
+ // test format_args_named_arg_keyword
+ // fn main() {
+ // builtin#format_args("{type}", type=1);
+ // }
p.expect(T!['(']);
expr(p);
if p.eat(T![,]) {
while !p.at(EOF) && !p.at(T![')']) {
let m = p.start();
- if p.at(IDENT) && p.nth_at(1, T![=]) && !p.nth_at(2, T![=]) {
- name(p);
+ if p.current().is_any_identifier() && p.nth_at(1, T![=]) && !p.nth_at(2, T![=]) {
+ let m = p.start();
+ p.bump_any();
p.bump(T![=]);
+ m.complete(p, FORMAT_ARGS_ARG_NAME);
}
if expr(p).is_none() {
m.abandon(p);
diff --git a/crates/parser/src/input.rs b/crates/parser/src/input.rs
index 57eeb431cd..42e8a400ed 100644
--- a/crates/parser/src/input.rs
+++ b/crates/parser/src/input.rs
@@ -97,7 +97,7 @@ impl Input {
let b_idx = n % (bits::BITS as usize);
(idx, b_idx)
}
- fn len(&self) -> usize {
+ pub fn len(&self) -> usize {
self.kind.len()
}
}
diff --git a/crates/parser/src/lexed_str.rs b/crates/parser/src/lexed_str.rs
index 7c78ba8faf..d7eec6cde8 100644
--- a/crates/parser/src/lexed_str.rs
+++ b/crates/parser/src/lexed_str.rs
@@ -150,7 +150,12 @@ struct Converter<'a> {
impl<'a> Converter<'a> {
fn new(edition: Edition, text: &'a str) -> Self {
Self {
- res: LexedStr { text, kind: Vec::new(), start: Vec::new(), error: Vec::new() },
+ res: LexedStr {
+ text,
+ kind: Vec::with_capacity(text.len() / 3),
+ start: Vec::with_capacity(text.len() / 3),
+ error: Vec::new(),
+ },
offset: 0,
edition,
}
diff --git a/crates/parser/src/parser.rs b/crates/parser/src/parser.rs
index c41bd593c6..4557078de9 100644
--- a/crates/parser/src/parser.rs
+++ b/crates/parser/src/parser.rs
@@ -32,7 +32,7 @@ const PARSER_STEP_LIMIT: usize = if cfg!(debug_assertions) { 150_000 } else { 15
impl<'t> Parser<'t> {
pub(super) fn new(inp: &'t Input) -> Parser<'t> {
- Parser { inp, pos: 0, events: Vec::new(), steps: Cell::new(0) }
+ Parser { inp, pos: 0, events: Vec::with_capacity(2 * inp.len()), steps: Cell::new(0) }
}
pub(crate) fn finish(self) -> Vec<Event> {
diff --git a/crates/parser/src/syntax_kind/generated.rs b/crates/parser/src/syntax_kind/generated.rs
index 6cf2524c16..5d22d966b2 100644
--- a/crates/parser/src/syntax_kind/generated.rs
+++ b/crates/parser/src/syntax_kind/generated.rs
@@ -201,6 +201,7 @@ pub enum SyntaxKind {
FN,
FN_PTR_TYPE,
FORMAT_ARGS_ARG,
+ FORMAT_ARGS_ARG_NAME,
FORMAT_ARGS_EXPR,
FOR_BINDER,
FOR_EXPR,
@@ -373,6 +374,7 @@ impl SyntaxKind {
| FN
| FN_PTR_TYPE
| FORMAT_ARGS_ARG
+ | FORMAT_ARGS_ARG_NAME
| FORMAT_ARGS_EXPR
| FOR_BINDER
| FOR_EXPR
diff --git a/crates/parser/test_data/generated/runner.rs b/crates/parser/test_data/generated/runner.rs
index 7b0d32d9d1..9f919f6cea 100644
--- a/crates/parser/test_data/generated/runner.rs
+++ b/crates/parser/test_data/generated/runner.rs
@@ -265,6 +265,10 @@ mod ok {
#[test]
fn for_type() { run_and_expect_no_errors("test_data/parser/inline/ok/for_type.rs"); }
#[test]
+ fn format_args_named_arg_keyword() {
+ run_and_expect_no_errors("test_data/parser/inline/ok/format_args_named_arg_keyword.rs");
+ }
+ #[test]
fn frontmatter() { run_and_expect_no_errors("test_data/parser/inline/ok/frontmatter.rs"); }
#[test]
fn full_range_expr() {
diff --git a/crates/parser/test_data/parser/inline/ok/builtin_expr.rast b/crates/parser/test_data/parser/inline/ok/builtin_expr.rast
index 19a84ac540..78c3bee4c8 100644
--- a/crates/parser/test_data/parser/inline/ok/builtin_expr.rast
+++ b/crates/parser/test_data/parser/inline/ok/builtin_expr.rast
@@ -44,10 +44,10 @@ SOURCE_FILE
COMMA ","
WHITESPACE " "
FORMAT_ARGS_ARG
- NAME
+ FORMAT_ARGS_ARG_NAME
IDENT "a"
- WHITESPACE " "
- EQ "="
+ WHITESPACE " "
+ EQ "="
WHITESPACE " "
BIN_EXPR
LITERAL
diff --git a/crates/parser/test_data/parser/inline/ok/format_args_named_arg_keyword.rast b/crates/parser/test_data/parser/inline/ok/format_args_named_arg_keyword.rast
new file mode 100644
index 0000000000..03bc2ecf7c
--- /dev/null
+++ b/crates/parser/test_data/parser/inline/ok/format_args_named_arg_keyword.rast
@@ -0,0 +1,35 @@
+SOURCE_FILE
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "main"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ EXPR_STMT
+ FORMAT_ARGS_EXPR
+ BUILTIN_KW "builtin"
+ POUND "#"
+ FORMAT_ARGS_KW "format_args"
+ L_PAREN "("
+ LITERAL
+ STRING "\"{type}\""
+ COMMA ","
+ WHITESPACE " "
+ FORMAT_ARGS_ARG
+ FORMAT_ARGS_ARG_NAME
+ TYPE_KW "type"
+ EQ "="
+ LITERAL
+ INT_NUMBER "1"
+ R_PAREN ")"
+ SEMICOLON ";"
+ WHITESPACE "\n"
+ R_CURLY "}"
+ WHITESPACE "\n"
diff --git a/crates/parser/test_data/parser/inline/ok/format_args_named_arg_keyword.rs b/crates/parser/test_data/parser/inline/ok/format_args_named_arg_keyword.rs
new file mode 100644
index 0000000000..7af45894ed
--- /dev/null
+++ b/crates/parser/test_data/parser/inline/ok/format_args_named_arg_keyword.rs
@@ -0,0 +1,3 @@
+fn main() {
+ builtin#format_args("{type}", type=1);
+}
diff --git a/crates/proc-macro-api/src/bidirectional_protocol.rs b/crates/proc-macro-api/src/bidirectional_protocol.rs
new file mode 100644
index 0000000000..e44723a6a3
--- /dev/null
+++ b/crates/proc-macro-api/src/bidirectional_protocol.rs
@@ -0,0 +1,227 @@
+//! Bidirectional protocol methods
+
+use std::{
+ io::{self, BufRead, Write},
+ sync::Arc,
+};
+
+use paths::AbsPath;
+use span::Span;
+
+use crate::{
+ Codec, ProcMacro, ProcMacroKind, ServerError,
+ bidirectional_protocol::msg::{
+ BidirectionalMessage, ExpandMacro, ExpandMacroData, ExpnGlobals, Request, Response,
+ SubRequest, SubResponse,
+ },
+ legacy_protocol::{
+ SpanMode,
+ msg::{
+ FlatTree, ServerConfig, SpanDataIndexMap, deserialize_span_data_index_map,
+ serialize_span_data_index_map,
+ },
+ },
+ process::ProcMacroServerProcess,
+ transport::codec::postcard::PostcardProtocol,
+ version,
+};
+
+pub mod msg;
+
+pub type SubCallback<'a> = &'a mut dyn FnMut(SubRequest) -> Result<SubResponse, ServerError>;
+
+pub fn run_conversation<C: Codec>(
+ writer: &mut dyn Write,
+ reader: &mut dyn BufRead,
+ buf: &mut C::Buf,
+ msg: BidirectionalMessage,
+ callback: SubCallback<'_>,
+) -> Result<BidirectionalMessage, ServerError> {
+ let encoded = C::encode(&msg).map_err(wrap_encode)?;
+ C::write(writer, &encoded).map_err(wrap_io("failed to write initial request"))?;
+
+ loop {
+ let maybe_buf = C::read(reader, buf).map_err(wrap_io("failed to read message"))?;
+ let Some(b) = maybe_buf else {
+ return Err(ServerError {
+ message: "proc-macro server closed the stream".into(),
+ io: Some(Arc::new(io::Error::new(io::ErrorKind::UnexpectedEof, "closed"))),
+ });
+ };
+
+ let msg: BidirectionalMessage = C::decode(b).map_err(wrap_decode)?;
+
+ match msg {
+ BidirectionalMessage::Response(response) => {
+ return Ok(BidirectionalMessage::Response(response));
+ }
+ BidirectionalMessage::SubRequest(sr) => {
+ let resp = callback(sr)?;
+ let reply = BidirectionalMessage::SubResponse(resp);
+ let encoded = C::encode(&reply).map_err(wrap_encode)?;
+ C::write(writer, &encoded).map_err(wrap_io("failed to write sub-response"))?;
+ }
+ _ => {
+ return Err(ServerError {
+ message: format!("unexpected message {:?}", msg),
+ io: None,
+ });
+ }
+ }
+ }
+}
+
+fn wrap_io(msg: &'static str) -> impl Fn(io::Error) -> ServerError {
+ move |err| ServerError { message: msg.into(), io: Some(Arc::new(err)) }
+}
+
+fn wrap_encode(err: io::Error) -> ServerError {
+ ServerError { message: "failed to encode message".into(), io: Some(Arc::new(err)) }
+}
+
+fn wrap_decode(err: io::Error) -> ServerError {
+ ServerError { message: "failed to decode message".into(), io: Some(Arc::new(err)) }
+}
+
+pub(crate) fn version_check(
+ srv: &ProcMacroServerProcess,
+ callback: SubCallback<'_>,
+) -> Result<u32, ServerError> {
+ let request = BidirectionalMessage::Request(Request::ApiVersionCheck {});
+
+ let response_payload = run_request(srv, request, callback)?;
+
+ match response_payload {
+ BidirectionalMessage::Response(Response::ApiVersionCheck(version)) => Ok(version),
+ other => {
+ Err(ServerError { message: format!("unexpected response: {:?}", other), io: None })
+ }
+ }
+}
+
+/// Enable support for rust-analyzer span mode if the server supports it.
+pub(crate) fn enable_rust_analyzer_spans(
+ srv: &ProcMacroServerProcess,
+ callback: SubCallback<'_>,
+) -> Result<SpanMode, ServerError> {
+ let request = BidirectionalMessage::Request(Request::SetConfig(ServerConfig {
+ span_mode: SpanMode::RustAnalyzer,
+ }));
+
+ let response_payload = run_request(srv, request, callback)?;
+
+ match response_payload {
+ BidirectionalMessage::Response(Response::SetConfig(ServerConfig { span_mode })) => {
+ Ok(span_mode)
+ }
+ _ => Err(ServerError { message: "unexpected response".to_owned(), io: None }),
+ }
+}
+
+/// Finds proc-macros in a given dynamic library.
+pub(crate) fn find_proc_macros(
+ srv: &ProcMacroServerProcess,
+ dylib_path: &AbsPath,
+ callback: SubCallback<'_>,
+) -> Result<Result<Vec<(String, ProcMacroKind)>, String>, ServerError> {
+ let request = BidirectionalMessage::Request(Request::ListMacros {
+ dylib_path: dylib_path.to_path_buf().into(),
+ });
+
+ let response_payload = run_request(srv, request, callback)?;
+
+ match response_payload {
+ BidirectionalMessage::Response(Response::ListMacros(it)) => Ok(it),
+ _ => Err(ServerError { message: "unexpected response".to_owned(), io: None }),
+ }
+}
+
+pub(crate) fn expand(
+ proc_macro: &ProcMacro,
+ subtree: tt::SubtreeView<'_>,
+ attr: Option<tt::SubtreeView<'_>>,
+ env: Vec<(String, String)>,
+ def_site: Span,
+ call_site: Span,
+ mixed_site: Span,
+ current_dir: String,
+ callback: SubCallback<'_>,
+) -> Result<Result<tt::TopSubtree, String>, crate::ServerError> {
+ let version = proc_macro.process.version();
+ let mut span_data_table = SpanDataIndexMap::default();
+ let def_site = span_data_table.insert_full(def_site).0;
+ let call_site = span_data_table.insert_full(call_site).0;
+ let mixed_site = span_data_table.insert_full(mixed_site).0;
+ let task = BidirectionalMessage::Request(Request::ExpandMacro(Box::new(ExpandMacro {
+ data: ExpandMacroData {
+ macro_body: FlatTree::from_subtree(subtree, version, &mut span_data_table),
+ macro_name: proc_macro.name.to_string(),
+ attributes: attr
+ .map(|subtree| FlatTree::from_subtree(subtree, version, &mut span_data_table)),
+ has_global_spans: ExpnGlobals {
+ serialize: version >= version::HAS_GLOBAL_SPANS,
+ def_site,
+ call_site,
+ mixed_site,
+ },
+ span_data_table: if proc_macro.process.rust_analyzer_spans() {
+ serialize_span_data_index_map(&span_data_table)
+ } else {
+ Vec::new()
+ },
+ },
+ lib: proc_macro.dylib_path.to_path_buf().into(),
+ env,
+ current_dir: Some(current_dir),
+ })));
+
+ let response_payload = run_request(&proc_macro.process, task, callback)?;
+
+ match response_payload {
+ BidirectionalMessage::Response(Response::ExpandMacro(it)) => Ok(it
+ .map(|tree| {
+ let mut expanded = FlatTree::to_subtree_resolved(tree, version, &span_data_table);
+ if proc_macro.needs_fixup_change() {
+ proc_macro.change_fixup_to_match_old_server(&mut expanded);
+ }
+ expanded
+ })
+ .map_err(|msg| msg.0)),
+ BidirectionalMessage::Response(Response::ExpandMacroExtended(it)) => Ok(it
+ .map(|resp| {
+ let mut expanded = FlatTree::to_subtree_resolved(
+ resp.tree,
+ version,
+ &deserialize_span_data_index_map(&resp.span_data_table),
+ );
+ if proc_macro.needs_fixup_change() {
+ proc_macro.change_fixup_to_match_old_server(&mut expanded);
+ }
+ expanded
+ })
+ .map_err(|msg| msg.0)),
+ _ => Err(ServerError { message: "unexpected response".to_owned(), io: None }),
+ }
+}
+
+fn run_request(
+ srv: &ProcMacroServerProcess,
+ msg: BidirectionalMessage,
+ callback: SubCallback<'_>,
+) -> Result<BidirectionalMessage, ServerError> {
+ if let Some(err) = srv.exited() {
+ return Err(err.clone());
+ }
+
+ match srv.use_postcard() {
+ true => srv.run_bidirectional::<PostcardProtocol>(msg, callback),
+ false => Err(ServerError {
+ message: "bidirectional messaging does not support JSON".to_owned(),
+ io: None,
+ }),
+ }
+}
+
+pub fn reject_subrequests(req: SubRequest) -> Result<SubResponse, ServerError> {
+ Err(ServerError { message: format!("{req:?} sub-request not supported here"), io: None })
+}
diff --git a/crates/proc-macro-api/src/bidirectional_protocol/msg.rs b/crates/proc-macro-api/src/bidirectional_protocol/msg.rs
new file mode 100644
index 0000000000..e41f8a5d7d
--- /dev/null
+++ b/crates/proc-macro-api/src/bidirectional_protocol/msg.rs
@@ -0,0 +1,95 @@
+//! Bidirectional protocol messages
+
+use paths::Utf8PathBuf;
+use serde::{Deserialize, Serialize};
+
+use crate::{
+ ProcMacroKind,
+ legacy_protocol::msg::{FlatTree, Message, PanicMessage, ServerConfig},
+};
+
+#[derive(Debug, Serialize, Deserialize)]
+pub enum SubRequest {
+ FilePath { file_id: u32 },
+ SourceText { file_id: u32, ast_id: u32, start: u32, end: u32 },
+ LocalFilePath { file_id: u32 },
+}
+
+#[derive(Debug, Serialize, Deserialize)]
+pub enum SubResponse {
+ FilePathResult { name: String },
+ SourceTextResult { text: Option<String> },
+ LocalFilePathResult { name: Option<String> },
+}
+
+#[derive(Debug, Serialize, Deserialize)]
+pub enum BidirectionalMessage {
+ Request(Request),
+ Response(Response),
+ SubRequest(SubRequest),
+ SubResponse(SubResponse),
+}
+
+#[derive(Debug, Serialize, Deserialize)]
+pub enum Request {
+ ListMacros { dylib_path: Utf8PathBuf },
+ ExpandMacro(Box<ExpandMacro>),
+ ApiVersionCheck {},
+ SetConfig(ServerConfig),
+}
+
+#[derive(Debug, Serialize, Deserialize)]
+pub enum Response {
+ ListMacros(Result<Vec<(String, ProcMacroKind)>, String>),
+ ExpandMacro(Result<FlatTree, PanicMessage>),
+ ApiVersionCheck(u32),
+ SetConfig(ServerConfig),
+ ExpandMacroExtended(Result<ExpandMacroExtended, PanicMessage>),
+}
+
+#[derive(Debug, Serialize, Deserialize)]
+pub struct ExpandMacro {
+ pub lib: Utf8PathBuf,
+ pub env: Vec<(String, String)>,
+ pub current_dir: Option<String>,
+ #[serde(flatten)]
+ pub data: ExpandMacroData,
+}
+
+#[derive(Debug, Serialize, Deserialize)]
+pub struct ExpandMacroExtended {
+ pub tree: FlatTree,
+ pub span_data_table: Vec<u32>,
+}
+
+#[derive(Debug, Serialize, Deserialize)]
+pub struct ExpandMacroData {
+ pub macro_body: FlatTree,
+ pub macro_name: String,
+ pub attributes: Option<FlatTree>,
+ #[serde(skip_serializing_if = "ExpnGlobals::skip_serializing_if")]
+ #[serde(default)]
+ pub has_global_spans: ExpnGlobals,
+
+ #[serde(skip_serializing_if = "Vec::is_empty")]
+ #[serde(default)]
+ pub span_data_table: Vec<u32>,
+}
+
+#[derive(Clone, Copy, Default, Debug, Serialize, Deserialize)]
+pub struct ExpnGlobals {
+ #[serde(skip_serializing)]
+ #[serde(default)]
+ pub serialize: bool,
+ pub def_site: usize,
+ pub call_site: usize,
+ pub mixed_site: usize,
+}
+
+impl ExpnGlobals {
+ fn skip_serializing_if(&self) -> bool {
+ !self.serialize
+ }
+}
+
+impl Message for BidirectionalMessage {}
diff --git a/crates/proc-macro-api/src/legacy_protocol.rs b/crates/proc-macro-api/src/legacy_protocol.rs
index c2b132ddcc..22a7d9868e 100644
--- a/crates/proc-macro-api/src/legacy_protocol.rs
+++ b/crates/proc-macro-api/src/legacy_protocol.rs
@@ -1,8 +1,6 @@
//! The initial proc-macro-srv protocol, soon to be deprecated.
-pub mod json;
pub mod msg;
-pub mod postcard;
use std::{
io::{BufRead, Write},
@@ -14,17 +12,14 @@ use span::Span;
use crate::{
ProcMacro, ProcMacroKind, ServerError,
- codec::Codec,
- legacy_protocol::{
- json::JsonProtocol,
- msg::{
- ExpandMacro, ExpandMacroData, ExpnGlobals, FlatTree, Message, Request, Response,
- ServerConfig, SpanDataIndexMap, deserialize_span_data_index_map,
- flat::serialize_span_data_index_map,
- },
- postcard::PostcardProtocol,
+ legacy_protocol::msg::{
+ ExpandMacro, ExpandMacroData, ExpnGlobals, FlatTree, Message, Request, Response,
+ ServerConfig, SpanDataIndexMap, deserialize_span_data_index_map,
+ flat::serialize_span_data_index_map,
},
process::ProcMacroServerProcess,
+ transport::codec::Codec,
+ transport::codec::{json::JsonProtocol, postcard::PostcardProtocol},
version,
};
@@ -82,15 +77,14 @@ pub(crate) fn find_proc_macros(
pub(crate) fn expand(
proc_macro: &ProcMacro,
- subtree: tt::SubtreeView<'_, Span>,
- attr: Option<tt::SubtreeView<'_, Span>>,
+ subtree: tt::SubtreeView<'_>,
+ attr: Option<tt::SubtreeView<'_>>,
env: Vec<(String, String)>,
def_site: Span,
call_site: Span,
mixed_site: Span,
current_dir: String,
-) -> Result<Result<tt::TopSubtree<span::SpanData<span::SyntaxContext>>, String>, crate::ServerError>
-{
+) -> Result<Result<tt::TopSubtree, String>, crate::ServerError> {
let version = proc_macro.process.version();
let mut span_data_table = SpanDataIndexMap::default();
let def_site = span_data_table.insert_full(def_site).0;
@@ -155,9 +149,9 @@ fn send_task(srv: &ProcMacroServerProcess, req: Request) -> Result<Response, Ser
}
if srv.use_postcard() {
- srv.send_task(send_request::<PostcardProtocol>, req)
+ srv.send_task::<_, _, PostcardProtocol>(send_request::<PostcardProtocol>, req)
} else {
- srv.send_task(send_request::<JsonProtocol>, req)
+ srv.send_task::<_, _, JsonProtocol>(send_request::<JsonProtocol>, req)
}
}
diff --git a/crates/proc-macro-api/src/legacy_protocol/msg.rs b/crates/proc-macro-api/src/legacy_protocol/msg.rs
index a6e228d977..4146b619ec 100644
--- a/crates/proc-macro-api/src/legacy_protocol/msg.rs
+++ b/crates/proc-macro-api/src/legacy_protocol/msg.rs
@@ -8,7 +8,7 @@ use paths::Utf8PathBuf;
use serde::de::DeserializeOwned;
use serde_derive::{Deserialize, Serialize};
-use crate::{ProcMacroKind, codec::Codec};
+use crate::{Codec, ProcMacroKind};
/// Represents requests sent from the client to the proc-macro-srv.
#[derive(Debug, Serialize, Deserialize)]
@@ -172,7 +172,7 @@ impl Message for Response {}
#[cfg(test)]
mod tests {
- use intern::{Symbol, sym};
+ use intern::Symbol;
use span::{
Edition, ROOT_ERASED_FILE_AST_ID, Span, SpanAnchor, SyntaxContext, TextRange, TextSize,
};
@@ -185,7 +185,7 @@ mod tests {
use super::*;
- fn fixture_token_tree_top_many_none() -> TopSubtree<Span> {
+ fn fixture_token_tree_top_many_none() -> TopSubtree {
let anchor = SpanAnchor {
file_id: span::EditionedFileId::new(
span::FileId::from_raw(0xe4e4e),
@@ -232,16 +232,15 @@ mod tests {
}
.into(),
);
- builder.push(Leaf::Literal(Literal {
- symbol: Symbol::intern("Foo"),
- span: Span {
+ builder.push(Leaf::Literal(Literal::new_no_suffix(
+ "Foo",
+ Span {
range: TextRange::at(TextSize::new(10), TextSize::of("\"Foo\"")),
anchor,
ctx: SyntaxContext::root(Edition::CURRENT),
},
- kind: tt::LitKind::Str,
- suffix: None,
- }));
+ tt::LitKind::Str,
+ )));
builder.push(Leaf::Punct(Punct {
char: '@',
span: Span {
@@ -267,16 +266,16 @@ mod tests {
ctx: SyntaxContext::root(Edition::CURRENT),
},
);
- builder.push(Leaf::Literal(Literal {
- symbol: sym::INTEGER_0,
- span: Span {
+ builder.push(Leaf::Literal(Literal::new(
+ "0",
+ Span {
range: TextRange::at(TextSize::new(16), TextSize::of("0u32")),
anchor,
ctx: SyntaxContext::root(Edition::CURRENT),
},
- kind: tt::LitKind::Integer,
- suffix: Some(sym::u32),
- }));
+ tt::LitKind::Integer,
+ "u32",
+ )));
builder.close(Span {
range: TextRange::at(TextSize::new(20), TextSize::of(']')),
anchor,
@@ -292,7 +291,7 @@ mod tests {
builder.build()
}
- fn fixture_token_tree_top_empty_none() -> TopSubtree<Span> {
+ fn fixture_token_tree_top_empty_none() -> TopSubtree {
let anchor = SpanAnchor {
file_id: span::EditionedFileId::new(
span::FileId::from_raw(0xe4e4e),
@@ -318,7 +317,7 @@ mod tests {
builder.build()
}
- fn fixture_token_tree_top_empty_brace() -> TopSubtree<Span> {
+ fn fixture_token_tree_top_empty_brace() -> TopSubtree {
let anchor = SpanAnchor {
file_id: span::EditionedFileId::new(
span::FileId::from_raw(0xe4e4e),
diff --git a/crates/proc-macro-api/src/legacy_protocol/msg/flat.rs b/crates/proc-macro-api/src/legacy_protocol/msg/flat.rs
index 1ac8cd4006..cd8944aa61 100644
--- a/crates/proc-macro-api/src/legacy_protocol/msg/flat.rs
+++ b/crates/proc-macro-api/src/legacy_protocol/msg/flat.rs
@@ -123,7 +123,7 @@ struct IdentRepr {
impl FlatTree {
pub fn from_subtree(
- subtree: tt::SubtreeView<'_, Span>,
+ subtree: tt::SubtreeView<'_>,
version: u32,
span_data_table: &mut SpanDataIndexMap,
) -> FlatTree {
@@ -168,7 +168,7 @@ impl FlatTree {
self,
version: u32,
span_data_table: &SpanDataIndexMap,
- ) -> tt::TopSubtree<Span> {
+ ) -> tt::TopSubtree {
Reader::<Span> {
subtree: if version >= ENCODE_CLOSE_SPAN_VERSION {
read_vec(self.subtree, SubtreeRepr::read_with_close_span)
@@ -486,16 +486,16 @@ struct Writer<'a, 'span, S: SpanTransformer, W> {
text: Vec<String>,
}
-impl<'a, T: SpanTransformer> Writer<'a, '_, T, tt::iter::TtIter<'a, T::Span>> {
- fn write_subtree(&mut self, root: tt::SubtreeView<'a, T::Span>) {
+impl<'a, T: SpanTransformer<Span = span::Span>> Writer<'a, '_, T, tt::iter::TtIter<'a>> {
+ fn write_subtree(&mut self, root: tt::SubtreeView<'a>) {
let subtree = root.top_subtree();
- self.enqueue(subtree, root.iter());
+ self.enqueue(&subtree, root.iter());
while let Some((idx, len, subtree)) = self.work.pop_front() {
self.subtree(idx, len, subtree);
}
}
- fn subtree(&mut self, idx: usize, n_tt: usize, subtree: tt::iter::TtIter<'a, T::Span>) {
+ fn subtree(&mut self, idx: usize, n_tt: usize, subtree: tt::iter::TtIter<'a>) {
let mut first_tt = self.token_tree.len();
self.token_tree.resize(first_tt + n_tt, !0);
@@ -504,7 +504,7 @@ impl<'a, T: SpanTransformer> Writer<'a, '_, T, tt::iter::TtIter<'a, T::Span>> {
for child in subtree {
let idx_tag = match child {
tt::iter::TtElement::Subtree(subtree, subtree_iter) => {
- let idx = self.enqueue(subtree, subtree_iter);
+ let idx = self.enqueue(&subtree, subtree_iter);
idx << 2
}
tt::iter::TtElement::Leaf(leaf) => match leaf {
@@ -512,9 +512,14 @@ impl<'a, T: SpanTransformer> Writer<'a, '_, T, tt::iter::TtIter<'a, T::Span>> {
let idx = self.literal.len() as u32;
let id = self.token_id_of(lit.span);
let (text, suffix) = if self.version >= EXTENDED_LEAF_DATA {
+ let (text, suffix) = lit.text_and_suffix();
(
- self.intern(lit.symbol.as_str()),
- lit.suffix.as_ref().map(|s| self.intern(s.as_str())).unwrap_or(!0),
+ self.intern_owned(text.to_owned()),
+ if suffix.is_empty() {
+ !0
+ } else {
+ self.intern_owned(suffix.to_owned())
+ },
)
} else {
(self.intern_owned(format!("{lit}")), !0)
@@ -549,11 +554,11 @@ impl<'a, T: SpanTransformer> Writer<'a, '_, T, tt::iter::TtIter<'a, T::Span>> {
let idx = self.ident.len() as u32;
let id = self.token_id_of(ident.span);
let text = if self.version >= EXTENDED_LEAF_DATA {
- self.intern(ident.sym.as_str())
+ self.intern_owned(ident.sym.as_str().to_owned())
} else if ident.is_raw.yes() {
self.intern_owned(format!("r#{}", ident.sym.as_str(),))
} else {
- self.intern(ident.sym.as_str())
+ self.intern_owned(ident.sym.as_str().to_owned())
};
self.ident.push(IdentRepr { id, text, is_raw: ident.is_raw.yes() });
(idx << 2) | 0b11
@@ -565,11 +570,7 @@ impl<'a, T: SpanTransformer> Writer<'a, '_, T, tt::iter::TtIter<'a, T::Span>> {
}
}
- fn enqueue(
- &mut self,
- subtree: &'a tt::Subtree<T::Span>,
- contents: tt::iter::TtIter<'a, T::Span>,
- ) -> u32 {
+ fn enqueue(&mut self, subtree: &tt::Subtree, contents: tt::iter::TtIter<'a>) -> u32 {
let idx = self.subtree.len();
let open = self.token_id_of(subtree.delimiter.open);
let close = self.token_id_of(subtree.delimiter.close);
@@ -586,6 +587,7 @@ impl<'a, T: SpanTransformer, U> Writer<'a, '_, T, U> {
T::token_id_of(self.span_data_table, span)
}
+ #[cfg(feature = "sysroot-abi")]
pub(crate) fn intern(&mut self, text: &'a str) -> u32 {
let table = &mut self.text;
*self.string_table.entry(text.into()).or_insert_with(|| {
@@ -739,9 +741,9 @@ struct Reader<'span, S: SpanTransformer> {
span_data_table: &'span S::Table,
}
-impl<T: SpanTransformer> Reader<'_, T> {
- pub(crate) fn read_subtree(self) -> tt::TopSubtree<T::Span> {
- let mut res: Vec<Option<(tt::Delimiter<T::Span>, Vec<tt::TokenTree<T::Span>>)>> =
+impl<T: SpanTransformer<Span = span::Span>> Reader<'_, T> {
+ pub(crate) fn read_subtree(self) -> tt::TopSubtree {
+ let mut res: Vec<Option<(tt::Delimiter, Vec<tt::TokenTree>)>> =
vec![None; self.subtree.len()];
let read_span = |id| T::span_for_token_id(self.span_data_table, id);
for i in (0..self.subtree.len()).rev() {
@@ -774,10 +776,10 @@ impl<T: SpanTransformer> Reader<'_, T> {
let span = read_span(repr.id);
s.push(
tt::Leaf::Literal(if self.version >= EXTENDED_LEAF_DATA {
- tt::Literal {
- symbol: Symbol::intern(text),
+ tt::Literal::new(
+ text,
span,
- kind: match u16::to_le_bytes(repr.kind) {
+ match u16::to_le_bytes(repr.kind) {
[0, _] => Err(()),
[1, _] => Byte,
[2, _] => Char,
@@ -791,14 +793,12 @@ impl<T: SpanTransformer> Reader<'_, T> {
[10, r] => CStrRaw(r),
_ => unreachable!(),
},
- suffix: if repr.suffix != !0 {
- Some(Symbol::intern(
- self.text[repr.suffix as usize].as_str(),
- ))
+ if repr.suffix != !0 {
+ self.text[repr.suffix as usize].as_str()
} else {
- None
+ ""
},
- }
+ )
} else {
tt::token_to_literal(text, span)
})
@@ -844,7 +844,7 @@ impl<T: SpanTransformer> Reader<'_, T> {
let (delimiter, mut res) = res[0].take().unwrap();
res.insert(0, tt::TokenTree::Subtree(tt::Subtree { delimiter, len: res.len() as u32 }));
- tt::TopSubtree(res.into_boxed_slice())
+ tt::TopSubtree::from_serialized(res)
}
}
diff --git a/crates/proc-macro-api/src/lib.rs b/crates/proc-macro-api/src/lib.rs
index 85b250eddf..f5fcc99f14 100644
--- a/crates/proc-macro-api/src/lib.rs
+++ b/crates/proc-macro-api/src/lib.rs
@@ -16,18 +16,18 @@
#[cfg(feature = "in-rust-tree")]
extern crate rustc_driver as _;
-mod codec;
-mod framing;
+pub mod bidirectional_protocol;
pub mod legacy_protocol;
mod process;
+pub mod transport;
use paths::{AbsPath, AbsPathBuf};
use semver::Version;
use span::{ErasedFileAstId, FIXUP_ERASED_FILE_AST_ID_MARKER, Span};
use std::{fmt, io, sync::Arc, time::SystemTime};
-pub use crate::codec::Codec;
-use crate::process::ProcMacroServerProcess;
+pub use crate::transport::codec::Codec;
+use crate::{bidirectional_protocol::SubCallback, process::ProcMacroServerProcess};
/// The versions of the server protocol
pub mod version {
@@ -142,9 +142,13 @@ impl ProcMacroClient {
}
/// Loads a proc-macro dylib into the server process returning a list of `ProcMacro`s loaded.
- pub fn load_dylib(&self, dylib: MacroDylib) -> Result<Vec<ProcMacro>, ServerError> {
+ pub fn load_dylib(
+ &self,
+ dylib: MacroDylib,
+ callback: Option<SubCallback<'_>>,
+ ) -> Result<Vec<ProcMacro>, ServerError> {
let _p = tracing::info_span!("ProcMacroServer::load_dylib").entered();
- let macros = self.process.find_proc_macros(&dylib.path)?;
+ let macros = self.process.find_proc_macros(&dylib.path, callback)?;
let dylib_path = Arc::new(dylib.path);
let dylib_last_modified = std::fs::metadata(dylib_path.as_path())
@@ -188,44 +192,31 @@ impl ProcMacro {
}
/// On some server versions, the fixup ast id is different than ours. So change it to match.
- fn change_fixup_to_match_old_server(&self, tt: &mut tt::TopSubtree<Span>) {
+ fn change_fixup_to_match_old_server(&self, tt: &mut tt::TopSubtree) {
const OLD_FIXUP_AST_ID: ErasedFileAstId = ErasedFileAstId::from_raw(!0 - 1);
- let change_ast_id = |ast_id: &mut ErasedFileAstId| {
+ tt.change_every_ast_id(|ast_id| {
if *ast_id == FIXUP_ERASED_FILE_AST_ID_MARKER {
*ast_id = OLD_FIXUP_AST_ID;
} else if *ast_id == OLD_FIXUP_AST_ID {
// Swap between them, that means no collision plus the change can be reversed by doing itself.
*ast_id = FIXUP_ERASED_FILE_AST_ID_MARKER;
}
- };
-
- for tt in &mut tt.0 {
- match tt {
- tt::TokenTree::Leaf(tt::Leaf::Ident(tt::Ident { span, .. }))
- | tt::TokenTree::Leaf(tt::Leaf::Literal(tt::Literal { span, .. }))
- | tt::TokenTree::Leaf(tt::Leaf::Punct(tt::Punct { span, .. })) => {
- change_ast_id(&mut span.anchor.ast_id);
- }
- tt::TokenTree::Subtree(subtree) => {
- change_ast_id(&mut subtree.delimiter.open.anchor.ast_id);
- change_ast_id(&mut subtree.delimiter.close.anchor.ast_id);
- }
- }
- }
+ });
}
/// Expands the procedural macro by sending an expansion request to the server.
/// This includes span information and environmental context.
pub fn expand(
&self,
- subtree: tt::SubtreeView<'_, Span>,
- attr: Option<tt::SubtreeView<'_, Span>>,
+ subtree: tt::SubtreeView<'_>,
+ attr: Option<tt::SubtreeView<'_>>,
env: Vec<(String, String)>,
def_site: Span,
call_site: Span,
mixed_site: Span,
current_dir: String,
- ) -> Result<Result<tt::TopSubtree<Span>, String>, ServerError> {
+ callback: Option<SubCallback<'_>>,
+ ) -> Result<Result<tt::TopSubtree, String>, ServerError> {
let (mut subtree, mut attr) = (subtree, attr);
let (mut subtree_changed, mut attr_changed);
if self.needs_fixup_change() {
@@ -240,7 +231,7 @@ impl ProcMacro {
}
}
- legacy_protocol::expand(
+ self.process.expand(
self,
subtree,
attr,
@@ -249,6 +240,7 @@ impl ProcMacro {
call_site,
mixed_site,
current_dir,
+ callback,
)
}
}
diff --git a/crates/proc-macro-api/src/process.rs b/crates/proc-macro-api/src/process.rs
index d6a8d27bfc..f6a656e3ce 100644
--- a/crates/proc-macro-api/src/process.rs
+++ b/crates/proc-macro-api/src/process.rs
@@ -9,10 +9,12 @@ use std::{
use paths::AbsPath;
use semver::Version;
+use span::Span;
use stdx::JodChild;
use crate::{
- ProcMacroKind, ServerError,
+ Codec, ProcMacro, ProcMacroKind, ServerError,
+ bidirectional_protocol::{self, SubCallback, msg::BidirectionalMessage, reject_subrequests},
legacy_protocol::{self, SpanMode},
version,
};
@@ -33,6 +35,7 @@ pub(crate) struct ProcMacroServerProcess {
pub(crate) enum Protocol {
LegacyJson { mode: SpanMode },
LegacyPostcard { mode: SpanMode },
+ BidirectionalPostcardPrototype { mode: SpanMode },
}
/// Maintains the state of the proc-macro server process.
@@ -62,6 +65,10 @@ impl ProcMacroServerProcess {
&& has_working_format_flag
{
&[
+ (
+ Some("bidirectional-postcard-prototype"),
+ Protocol::BidirectionalPostcardPrototype { mode: SpanMode::Id },
+ ),
(Some("postcard-legacy"), Protocol::LegacyPostcard { mode: SpanMode::Id }),
(Some("json-legacy"), Protocol::LegacyJson { mode: SpanMode::Id }),
]
@@ -84,7 +91,7 @@ impl ProcMacroServerProcess {
};
let mut srv = create_srv()?;
tracing::info!("sending proc-macro server version check");
- match srv.version_check() {
+ match srv.version_check(Some(&mut reject_subrequests)) {
Ok(v) if v > version::CURRENT_API_VERSION => {
#[allow(clippy::disallowed_methods)]
let process_version = Command::new(process_path)
@@ -102,12 +109,13 @@ impl ProcMacroServerProcess {
tracing::info!("Proc-macro server version: {v}");
srv.version = v;
if srv.version >= version::RUST_ANALYZER_SPAN_SUPPORT
- && let Ok(new_mode) = srv.enable_rust_analyzer_spans()
+ && let Ok(new_mode) =
+ srv.enable_rust_analyzer_spans(Some(&mut reject_subrequests))
{
match &mut srv.protocol {
- Protocol::LegacyJson { mode } | Protocol::LegacyPostcard { mode } => {
- *mode = new_mode
- }
+ Protocol::LegacyJson { mode }
+ | Protocol::LegacyPostcard { mode }
+ | Protocol::BidirectionalPostcardPrototype { mode } => *mode = new_mode,
}
}
tracing::info!("Proc-macro server protocol: {:?}", srv.protocol);
@@ -143,22 +151,36 @@ impl ProcMacroServerProcess {
match self.protocol {
Protocol::LegacyJson { mode } => mode == SpanMode::RustAnalyzer,
Protocol::LegacyPostcard { mode } => mode == SpanMode::RustAnalyzer,
+ Protocol::BidirectionalPostcardPrototype { mode } => mode == SpanMode::RustAnalyzer,
}
}
/// Checks the API version of the running proc-macro server.
- fn version_check(&self) -> Result<u32, ServerError> {
+ fn version_check(&self, callback: Option<SubCallback<'_>>) -> Result<u32, ServerError> {
match self.protocol {
- Protocol::LegacyJson { .. } => legacy_protocol::version_check(self),
- Protocol::LegacyPostcard { .. } => legacy_protocol::version_check(self),
+ Protocol::LegacyJson { .. } | Protocol::LegacyPostcard { .. } => {
+ legacy_protocol::version_check(self)
+ }
+ Protocol::BidirectionalPostcardPrototype { .. } => {
+ let cb = callback.expect("callback required for bidirectional protocol");
+ bidirectional_protocol::version_check(self, cb)
+ }
}
}
/// Enable support for rust-analyzer span mode if the server supports it.
- fn enable_rust_analyzer_spans(&self) -> Result<SpanMode, ServerError> {
+ fn enable_rust_analyzer_spans(
+ &self,
+ callback: Option<SubCallback<'_>>,
+ ) -> Result<SpanMode, ServerError> {
match self.protocol {
- Protocol::LegacyJson { .. } => legacy_protocol::enable_rust_analyzer_spans(self),
- Protocol::LegacyPostcard { .. } => legacy_protocol::enable_rust_analyzer_spans(self),
+ Protocol::LegacyJson { .. } | Protocol::LegacyPostcard { .. } => {
+ legacy_protocol::enable_rust_analyzer_spans(self)
+ }
+ Protocol::BidirectionalPostcardPrototype { .. } => {
+ let cb = callback.expect("callback required for bidirectional protocol");
+ bidirectional_protocol::enable_rust_analyzer_spans(self, cb)
+ }
}
}
@@ -166,30 +188,70 @@ impl ProcMacroServerProcess {
pub(crate) fn find_proc_macros(
&self,
dylib_path: &AbsPath,
+ callback: Option<SubCallback<'_>>,
) -> Result<Result<Vec<(String, ProcMacroKind)>, String>, ServerError> {
match self.protocol {
- Protocol::LegacyJson { .. } => legacy_protocol::find_proc_macros(self, dylib_path),
- Protocol::LegacyPostcard { .. } => legacy_protocol::find_proc_macros(self, dylib_path),
+ Protocol::LegacyJson { .. } | Protocol::LegacyPostcard { .. } => {
+ legacy_protocol::find_proc_macros(self, dylib_path)
+ }
+ Protocol::BidirectionalPostcardPrototype { .. } => {
+ let cb = callback.expect("callback required for bidirectional protocol");
+ bidirectional_protocol::find_proc_macros(self, dylib_path, cb)
+ }
+ }
+ }
+
+ pub(crate) fn expand(
+ &self,
+ proc_macro: &ProcMacro,
+ subtree: tt::SubtreeView<'_>,
+ attr: Option<tt::SubtreeView<'_>>,
+ env: Vec<(String, String)>,
+ def_site: Span,
+ call_site: Span,
+ mixed_site: Span,
+ current_dir: String,
+ callback: Option<SubCallback<'_>>,
+ ) -> Result<Result<tt::TopSubtree, String>, ServerError> {
+ match self.protocol {
+ Protocol::LegacyJson { .. } | Protocol::LegacyPostcard { .. } => {
+ legacy_protocol::expand(
+ proc_macro,
+ subtree,
+ attr,
+ env,
+ def_site,
+ call_site,
+ mixed_site,
+ current_dir,
+ )
+ }
+ Protocol::BidirectionalPostcardPrototype { .. } => bidirectional_protocol::expand(
+ proc_macro,
+ subtree,
+ attr,
+ env,
+ def_site,
+ call_site,
+ mixed_site,
+ current_dir,
+ callback.expect("callback required for bidirectional protocol"),
+ ),
}
}
- pub(crate) fn send_task<Request, Response, Buf>(
+ pub(crate) fn send_task<Request, Response, C: Codec>(
&self,
- serialize_req: impl FnOnce(
+ send: impl FnOnce(
&mut dyn Write,
&mut dyn BufRead,
Request,
- &mut Buf,
+ &mut C::Buf,
) -> Result<Option<Response>, ServerError>,
req: Request,
- ) -> Result<Response, ServerError>
- where
- Buf: Default,
- {
- let state = &mut *self.state.lock().unwrap();
- let mut buf = Buf::default();
- serialize_req(&mut state.stdin, &mut state.stdout, req, &mut buf)
- .and_then(|res| {
+ ) -> Result<Response, ServerError> {
+ self.with_locked_io::<C, _>(|writer, reader, buf| {
+ send(writer, reader, req, buf).and_then(|res| {
res.ok_or_else(|| {
let message = "proc-macro server did not respond with data".to_owned();
ServerError {
@@ -201,33 +263,51 @@ impl ProcMacroServerProcess {
}
})
})
- .map_err(|e| {
- if e.io.as_ref().map(|it| it.kind()) == Some(io::ErrorKind::BrokenPipe) {
- match state.process.child.try_wait() {
- Ok(None) | Err(_) => e,
- Ok(Some(status)) => {
- let mut msg = String::new();
- if !status.success()
- && let Some(stderr) = state.process.child.stderr.as_mut()
- {
- _ = stderr.read_to_string(&mut msg);
- }
- let server_error = ServerError {
- message: format!(
- "proc-macro server exited with {status}{}{msg}",
- if msg.is_empty() { "" } else { ": " }
- ),
- io: None,
- };
- // `AssertUnwindSafe` is fine here, we already correct initialized
- // server_error at this point.
- self.exited.get_or_init(|| AssertUnwindSafe(server_error)).0.clone()
+ })
+ }
+
+ pub(crate) fn with_locked_io<C: Codec, R>(
+ &self,
+ f: impl FnOnce(&mut dyn Write, &mut dyn BufRead, &mut C::Buf) -> Result<R, ServerError>,
+ ) -> Result<R, ServerError> {
+ let state = &mut *self.state.lock().unwrap();
+ let mut buf = C::Buf::default();
+
+ f(&mut state.stdin, &mut state.stdout, &mut buf).map_err(|e| {
+ if e.io.as_ref().map(|it| it.kind()) == Some(io::ErrorKind::BrokenPipe) {
+ match state.process.child.try_wait() {
+ Ok(None) | Err(_) => e,
+ Ok(Some(status)) => {
+ let mut msg = String::new();
+ if !status.success()
+ && let Some(stderr) = state.process.child.stderr.as_mut()
+ {
+ _ = stderr.read_to_string(&mut msg);
}
+ let server_error = ServerError {
+ message: format!(
+ "proc-macro server exited with {status}{}{msg}",
+ if msg.is_empty() { "" } else { ": " }
+ ),
+ io: None,
+ };
+ self.exited.get_or_init(|| AssertUnwindSafe(server_error)).0.clone()
}
- } else {
- e
}
- })
+ } else {
+ e
+ }
+ })
+ }
+
+ pub(crate) fn run_bidirectional<C: Codec>(
+ &self,
+ initial: BidirectionalMessage,
+ callback: SubCallback<'_>,
+ ) -> Result<BidirectionalMessage, ServerError> {
+ self.with_locked_io::<C, _>(|writer, reader, buf| {
+ bidirectional_protocol::run_conversation::<C>(writer, reader, buf, initial, callback)
+ })
}
}
diff --git a/crates/proc-macro-api/src/transport.rs b/crates/proc-macro-api/src/transport.rs
new file mode 100644
index 0000000000..b7a1d8f732
--- /dev/null
+++ b/crates/proc-macro-api/src/transport.rs
@@ -0,0 +1,3 @@
+//! Contains construct for transport of messages.
+pub mod codec;
+pub mod framing;
diff --git a/crates/proc-macro-api/src/codec.rs b/crates/proc-macro-api/src/transport/codec.rs
index baccaa6be4..c9afad260a 100644
--- a/crates/proc-macro-api/src/codec.rs
+++ b/crates/proc-macro-api/src/transport/codec.rs
@@ -4,7 +4,10 @@ use std::io;
use serde::de::DeserializeOwned;
-use crate::framing::Framing;
+use crate::transport::framing::Framing;
+
+pub mod json;
+pub mod postcard;
pub trait Codec: Framing {
fn encode<T: serde::Serialize>(msg: &T) -> io::Result<Self::Buf>;
diff --git a/crates/proc-macro-api/src/legacy_protocol/json.rs b/crates/proc-macro-api/src/transport/codec/json.rs
index 1359c05684..96db802e0b 100644
--- a/crates/proc-macro-api/src/legacy_protocol/json.rs
+++ b/crates/proc-macro-api/src/transport/codec/json.rs
@@ -3,14 +3,14 @@ use std::io::{self, BufRead, Write};
use serde::{Serialize, de::DeserializeOwned};
-use crate::{codec::Codec, framing::Framing};
+use crate::{Codec, transport::framing::Framing};
pub struct JsonProtocol;
impl Framing for JsonProtocol {
type Buf = String;
- fn read<'a, R: BufRead>(
+ fn read<'a, R: BufRead + ?Sized>(
inp: &mut R,
buf: &'a mut String,
) -> io::Result<Option<&'a mut String>> {
@@ -35,7 +35,7 @@ impl Framing for JsonProtocol {
}
}
- fn write<W: Write>(out: &mut W, buf: &String) -> io::Result<()> {
+ fn write<W: Write + ?Sized>(out: &mut W, buf: &String) -> io::Result<()> {
tracing::debug!("> {}", buf);
out.write_all(buf.as_bytes())?;
out.write_all(b"\n")?;
diff --git a/crates/proc-macro-api/src/legacy_protocol/postcard.rs b/crates/proc-macro-api/src/transport/codec/postcard.rs
index c28a9bfe3a..6f5319e75b 100644
--- a/crates/proc-macro-api/src/legacy_protocol/postcard.rs
+++ b/crates/proc-macro-api/src/transport/codec/postcard.rs
@@ -4,14 +4,14 @@ use std::io::{self, BufRead, Write};
use serde::{Serialize, de::DeserializeOwned};
-use crate::{codec::Codec, framing::Framing};
+use crate::{Codec, transport::framing::Framing};
pub struct PostcardProtocol;
impl Framing for PostcardProtocol {
type Buf = Vec<u8>;
- fn read<'a, R: BufRead>(
+ fn read<'a, R: BufRead + ?Sized>(
inp: &mut R,
buf: &'a mut Vec<u8>,
) -> io::Result<Option<&'a mut Vec<u8>>> {
@@ -23,7 +23,7 @@ impl Framing for PostcardProtocol {
Ok(Some(buf))
}
- fn write<W: Write>(out: &mut W, buf: &Vec<u8>) -> io::Result<()> {
+ fn write<W: Write + ?Sized>(out: &mut W, buf: &Vec<u8>) -> io::Result<()> {
out.write_all(buf)?;
out.flush()
}
diff --git a/crates/proc-macro-api/src/framing.rs b/crates/proc-macro-api/src/transport/framing.rs
index a1e6fc05ca..56c3b68e8c 100644
--- a/crates/proc-macro-api/src/framing.rs
+++ b/crates/proc-macro-api/src/transport/framing.rs
@@ -3,12 +3,12 @@
use std::io::{self, BufRead, Write};
pub trait Framing {
- type Buf: Default;
+ type Buf: Default + Send + Sync;
- fn read<'a, R: BufRead>(
+ fn read<'a, R: BufRead + ?Sized>(
inp: &mut R,
buf: &'a mut Self::Buf,
) -> io::Result<Option<&'a mut Self::Buf>>;
- fn write<W: Write>(out: &mut W, buf: &Self::Buf) -> io::Result<()>;
+ fn write<W: Write + ?Sized>(out: &mut W, buf: &Self::Buf) -> io::Result<()>;
}
diff --git a/crates/proc-macro-srv-cli/Cargo.toml b/crates/proc-macro-srv-cli/Cargo.toml
index aa153897fa..6b2db0b269 100644
--- a/crates/proc-macro-srv-cli/Cargo.toml
+++ b/crates/proc-macro-srv-cli/Cargo.toml
@@ -13,12 +13,12 @@ publish = false
[dependencies]
proc-macro-srv.workspace = true
proc-macro-api.workspace = true
-tt.workspace = true
postcard.workspace = true
clap = {version = "4.5.42", default-features = false, features = ["std"]}
[features]
default = []
+# default = ["sysroot-abi"]
sysroot-abi = ["proc-macro-srv/sysroot-abi", "proc-macro-api/sysroot-abi"]
in-rust-tree = ["proc-macro-srv/in-rust-tree", "sysroot-abi"]
diff --git a/crates/proc-macro-srv-cli/README.md b/crates/proc-macro-srv-cli/README.md
new file mode 100644
index 0000000000..02a67ac3ec
--- /dev/null
+++ b/crates/proc-macro-srv-cli/README.md
@@ -0,0 +1,65 @@
+# proc-macro-srv-cli
+
+A standalone binary for the `proc-macro-srv` crate that provides procedural macro expansion for rust-analyzer.
+
+## Overview
+
+rust-analyzer uses a RPC (via stdio) client-server architecture for procedural macro expansion. This is necessary because:
+
+1. Proc macros are dynamic libraries that can segfault, bringing down the entire process, so running them out of process allows rust-analyzer to recover from fatal errors.
+2. Proc macro dylibs are compiled against a specific rustc version and require matching internal APIs to load and execute, as such having this binary shipped as a rustup component allows us to always match the rustc version irrespective of the rust-analyzer version used.
+
+## The `sysroot-abi` Feature
+
+**The `sysroot-abi` feature is required for the binary to actually function.** Without it, the binary will return an error:
+
+```
+proc-macro-srv-cli needs to be compiled with the `sysroot-abi` feature to function
+```
+
+This feature is necessary because the proc-macro server needs access to unstable rustc internals (`proc_macro_internals`, `proc_macro_diagnostic`, `proc_macro_span`) which are only available on nightly or with `RUSTC_BOOTSTRAP=1`.
+rust-analyzer is a stable toolchain project though, so the feature flag is used to have it remain compilable on stable by default.
+
+### Building
+
+```bash
+# Using nightly toolchain
+cargo build -p proc-macro-srv-cli --features sysroot-abi
+
+# Or with RUSTC_BOOTSTRAP on stable
+RUSTC_BOOTSTRAP=1 cargo build -p proc-macro-srv-cli --features sysroot-abi
+```
+
+### Installing the proc-macro server
+
+For local testing purposes, you can install the proc-macro server using the xtask command:
+
+```bash
+# Recommended: use the xtask command
+cargo xtask install --proc-macro-server
+```
+
+## Testing
+
+```bash
+cargo test --features sysroot-abi -p proc-macro-srv -p proc-macro-srv-cli -p proc-macro-api
+```
+
+The tests use a test proc macro dylib built by the `proc-macro-test` crate, which compiles a small proc macro implementation during build time.
+
+**Note**: Tests only compile on nightly toolchains (or with `RUSTC_BOOTSTRAP=1`).
+
+## Usage
+
+The binary requires the `RUST_ANALYZER_INTERNALS_DO_NOT_USE` environment variable to be set. This is intentional—the binary is an implementation detail of rust-analyzer and its API is still unstable:
+
+```bash
+RUST_ANALYZER_INTERNALS_DO_NOT_USE=1 rust-analyzer-proc-macro-srv --version
+```
+
+## Related Crates
+
+- `proc-macro-srv`: The core server library that handles loading dylibs and expanding macros, but not the RPC protocol.
+- `proc-macro-api`: The client library used by rust-analyzer to communicate with this server as well as the protocol definitions.
+- `proc-macro-test`: Test harness with sample proc macros for testing
+- `proc-macro-srv-cli`: The actual server binary that handles the RPC protocol.
diff --git a/crates/proc-macro-srv-cli/src/main.rs b/crates/proc-macro-srv-cli/src/main.rs
index 813ac339a9..bdfdb50002 100644
--- a/crates/proc-macro-srv-cli/src/main.rs
+++ b/crates/proc-macro-srv-cli/src/main.rs
@@ -52,11 +52,16 @@ fn main() -> std::io::Result<()> {
enum ProtocolFormat {
JsonLegacy,
PostcardLegacy,
+ BidirectionalPostcardPrototype,
}
impl ValueEnum for ProtocolFormat {
fn value_variants<'a>() -> &'a [Self] {
- &[ProtocolFormat::JsonLegacy, ProtocolFormat::PostcardLegacy]
+ &[
+ ProtocolFormat::JsonLegacy,
+ ProtocolFormat::PostcardLegacy,
+ ProtocolFormat::BidirectionalPostcardPrototype,
+ ]
}
fn to_possible_value(&self) -> Option<clap::builder::PossibleValue> {
@@ -65,12 +70,18 @@ impl ValueEnum for ProtocolFormat {
ProtocolFormat::PostcardLegacy => {
Some(clap::builder::PossibleValue::new("postcard-legacy"))
}
+ ProtocolFormat::BidirectionalPostcardPrototype => {
+ Some(clap::builder::PossibleValue::new("bidirectional-postcard-prototype"))
+ }
}
}
fn from_str(input: &str, _ignore_case: bool) -> Result<Self, String> {
match input {
"json-legacy" => Ok(ProtocolFormat::JsonLegacy),
"postcard-legacy" => Ok(ProtocolFormat::PostcardLegacy),
+ "bidirectional-postcard-prototype" => {
+ Ok(ProtocolFormat::BidirectionalPostcardPrototype)
+ }
_ => Err(format!("unknown protocol format: {input}")),
}
}
diff --git a/crates/proc-macro-srv-cli/src/main_loop.rs b/crates/proc-macro-srv-cli/src/main_loop.rs
index df54f38cbc..b2f4b96bd2 100644
--- a/crates/proc-macro-srv-cli/src/main_loop.rs
+++ b/crates/proc-macro-srv-cli/src/main_loop.rs
@@ -1,24 +1,21 @@
//! The main loop of the proc-macro server.
-use std::io;
-
use proc_macro_api::{
Codec,
- legacy_protocol::{
- json::JsonProtocol,
- msg::{
- self, ExpandMacroData, ExpnGlobals, Message, SpanMode, SpanTransformer,
- deserialize_span_data_index_map, serialize_span_data_index_map,
- },
- postcard::PostcardProtocol,
- },
+ bidirectional_protocol::msg as bidirectional,
+ legacy_protocol::msg as legacy,
+ transport::codec::{json::JsonProtocol, postcard::PostcardProtocol},
version::CURRENT_API_VERSION,
};
+use std::io;
+
+use legacy::Message;
+
use proc_macro_srv::{EnvSnapshot, SpanId};
use crate::ProtocolFormat;
struct SpanTrans;
-impl SpanTransformer for SpanTrans {
+impl legacy::SpanTransformer for SpanTrans {
type Table = ();
type Span = SpanId;
fn token_id_of(
@@ -39,9 +36,260 @@ pub(crate) fn run(format: ProtocolFormat) -> io::Result<()> {
match format {
ProtocolFormat::JsonLegacy => run_::<JsonProtocol>(),
ProtocolFormat::PostcardLegacy => run_::<PostcardProtocol>(),
+ ProtocolFormat::BidirectionalPostcardPrototype => run_new::<PostcardProtocol>(),
+ }
+}
+
+fn run_new<C: Codec>() -> io::Result<()> {
+ fn macro_kind_to_api(kind: proc_macro_srv::ProcMacroKind) -> proc_macro_api::ProcMacroKind {
+ match kind {
+ proc_macro_srv::ProcMacroKind::CustomDerive => {
+ proc_macro_api::ProcMacroKind::CustomDerive
+ }
+ proc_macro_srv::ProcMacroKind::Bang => proc_macro_api::ProcMacroKind::Bang,
+ proc_macro_srv::ProcMacroKind::Attr => proc_macro_api::ProcMacroKind::Attr,
+ }
+ }
+
+ let mut buf = C::Buf::default();
+ let mut stdin = io::stdin();
+ let mut stdout = io::stdout();
+
+ let env_snapshot = EnvSnapshot::default();
+ let srv = proc_macro_srv::ProcMacroSrv::new(&env_snapshot);
+
+ let mut span_mode = legacy::SpanMode::Id;
+
+ 'outer: loop {
+ let req_opt =
+ bidirectional::BidirectionalMessage::read::<_, C>(&mut stdin.lock(), &mut buf)?;
+ let Some(req) = req_opt else {
+ break 'outer;
+ };
+
+ match req {
+ bidirectional::BidirectionalMessage::Request(request) => match request {
+ bidirectional::Request::ListMacros { dylib_path } => {
+ let res = srv.list_macros(&dylib_path).map(|macros| {
+ macros
+ .into_iter()
+ .map(|(name, kind)| (name, macro_kind_to_api(kind)))
+ .collect()
+ });
+
+ send_response::<C>(&stdout, bidirectional::Response::ListMacros(res))?;
+ }
+
+ bidirectional::Request::ApiVersionCheck {} => {
+ send_response::<C>(
+ &stdout,
+ bidirectional::Response::ApiVersionCheck(CURRENT_API_VERSION),
+ )?;
+ }
+
+ bidirectional::Request::SetConfig(config) => {
+ span_mode = config.span_mode;
+ send_response::<C>(&stdout, bidirectional::Response::SetConfig(config))?;
+ }
+ bidirectional::Request::ExpandMacro(task) => {
+ handle_expand::<C>(&srv, &mut stdin, &mut stdout, &mut buf, span_mode, *task)?;
+ }
+ },
+ _ => continue,
+ }
+ }
+
+ Ok(())
+}
+
+fn handle_expand<C: Codec>(
+ srv: &proc_macro_srv::ProcMacroSrv<'_>,
+ stdin: &io::Stdin,
+ stdout: &io::Stdout,
+ buf: &mut C::Buf,
+ span_mode: legacy::SpanMode,
+ task: bidirectional::ExpandMacro,
+) -> io::Result<()> {
+ match span_mode {
+ legacy::SpanMode::Id => handle_expand_id::<C>(srv, stdout, task),
+ legacy::SpanMode::RustAnalyzer => handle_expand_ra::<C>(srv, stdin, stdout, buf, task),
}
}
+fn handle_expand_id<C: Codec>(
+ srv: &proc_macro_srv::ProcMacroSrv<'_>,
+ stdout: &io::Stdout,
+ task: bidirectional::ExpandMacro,
+) -> io::Result<()> {
+ let bidirectional::ExpandMacro { lib, env, current_dir, data } = task;
+ let bidirectional::ExpandMacroData {
+ macro_body,
+ macro_name,
+ attributes,
+ has_global_spans: bidirectional::ExpnGlobals { def_site, call_site, mixed_site, .. },
+ ..
+ } = data;
+
+ let def_site = SpanId(def_site as u32);
+ let call_site = SpanId(call_site as u32);
+ let mixed_site = SpanId(mixed_site as u32);
+
+ let macro_body =
+ macro_body.to_tokenstream_unresolved::<SpanTrans>(CURRENT_API_VERSION, |_, b| b);
+ let attributes = attributes
+ .map(|it| it.to_tokenstream_unresolved::<SpanTrans>(CURRENT_API_VERSION, |_, b| b));
+
+ let res = srv
+ .expand(
+ lib,
+ &env,
+ current_dir,
+ &macro_name,
+ macro_body,
+ attributes,
+ def_site,
+ call_site,
+ mixed_site,
+ None,
+ )
+ .map(|it| {
+ legacy::FlatTree::from_tokenstream_raw::<SpanTrans>(it, call_site, CURRENT_API_VERSION)
+ })
+ .map_err(|e| legacy::PanicMessage(e.into_string().unwrap_or_default()));
+
+ send_response::<C>(&stdout, bidirectional::Response::ExpandMacro(res))
+}
+
+struct ProcMacroClientHandle<'a, C: Codec> {
+ stdin: &'a io::Stdin,
+ stdout: &'a io::Stdout,
+ buf: &'a mut C::Buf,
+}
+
+impl<'a, C: Codec> ProcMacroClientHandle<'a, C> {
+ fn roundtrip(
+ &mut self,
+ req: bidirectional::SubRequest,
+ ) -> Option<bidirectional::BidirectionalMessage> {
+ let msg = bidirectional::BidirectionalMessage::SubRequest(req);
+
+ if msg.write::<_, C>(&mut self.stdout.lock()).is_err() {
+ return None;
+ }
+
+ match bidirectional::BidirectionalMessage::read::<_, C>(&mut self.stdin.lock(), self.buf) {
+ Ok(Some(msg)) => Some(msg),
+ _ => None,
+ }
+ }
+}
+
+impl<C: Codec> proc_macro_srv::ProcMacroClientInterface for ProcMacroClientHandle<'_, C> {
+ fn file(&mut self, file_id: proc_macro_srv::span::FileId) -> String {
+ match self.roundtrip(bidirectional::SubRequest::FilePath { file_id: file_id.index() }) {
+ Some(bidirectional::BidirectionalMessage::SubResponse(
+ bidirectional::SubResponse::FilePathResult { name },
+ )) => name,
+ _ => String::new(),
+ }
+ }
+
+ fn source_text(
+ &mut self,
+ proc_macro_srv::span::Span { range, anchor, ctx: _ }: proc_macro_srv::span::Span,
+ ) -> Option<String> {
+ match self.roundtrip(bidirectional::SubRequest::SourceText {
+ file_id: anchor.file_id.as_u32(),
+ ast_id: anchor.ast_id.into_raw(),
+ start: range.start().into(),
+ end: range.end().into(),
+ }) {
+ Some(bidirectional::BidirectionalMessage::SubResponse(
+ bidirectional::SubResponse::SourceTextResult { text },
+ )) => text,
+ _ => None,
+ }
+ }
+
+ fn local_file(&mut self, file_id: proc_macro_srv::span::FileId) -> Option<String> {
+ match self.roundtrip(bidirectional::SubRequest::LocalFilePath { file_id: file_id.index() })
+ {
+ Some(bidirectional::BidirectionalMessage::SubResponse(
+ bidirectional::SubResponse::LocalFilePathResult { name },
+ )) => name,
+ _ => None,
+ }
+ }
+}
+
+fn handle_expand_ra<C: Codec>(
+ srv: &proc_macro_srv::ProcMacroSrv<'_>,
+ stdin: &io::Stdin,
+ stdout: &io::Stdout,
+ buf: &mut C::Buf,
+ task: bidirectional::ExpandMacro,
+) -> io::Result<()> {
+ let bidirectional::ExpandMacro {
+ lib,
+ env,
+ current_dir,
+ data:
+ bidirectional::ExpandMacroData {
+ macro_body,
+ macro_name,
+ attributes,
+ has_global_spans: bidirectional::ExpnGlobals { def_site, call_site, mixed_site, .. },
+ span_data_table,
+ },
+ } = task;
+
+ let mut span_data_table = legacy::deserialize_span_data_index_map(&span_data_table);
+
+ let def_site = span_data_table[def_site];
+ let call_site = span_data_table[call_site];
+ let mixed_site = span_data_table[mixed_site];
+
+ let macro_body =
+ macro_body.to_tokenstream_resolved(CURRENT_API_VERSION, &span_data_table, |a, b| {
+ srv.join_spans(a, b).unwrap_or(b)
+ });
+
+ let attributes = attributes.map(|it| {
+ it.to_tokenstream_resolved(CURRENT_API_VERSION, &span_data_table, |a, b| {
+ srv.join_spans(a, b).unwrap_or(b)
+ })
+ });
+
+ let res = srv
+ .expand(
+ lib,
+ &env,
+ current_dir,
+ &macro_name,
+ macro_body,
+ attributes,
+ def_site,
+ call_site,
+ mixed_site,
+ Some(&mut ProcMacroClientHandle::<C> { stdin, stdout, buf }),
+ )
+ .map(|it| {
+ (
+ legacy::FlatTree::from_tokenstream(
+ it,
+ CURRENT_API_VERSION,
+ call_site,
+ &mut span_data_table,
+ ),
+ legacy::serialize_span_data_index_map(&span_data_table),
+ )
+ })
+ .map(|(tree, span_data_table)| bidirectional::ExpandMacroExtended { tree, span_data_table })
+ .map_err(|e| legacy::PanicMessage(e.into_string().unwrap_or_default()));
+
+ send_response::<C>(&stdout, bidirectional::Response::ExpandMacroExtended(res))
+}
+
fn run_<C: Codec>() -> io::Result<()> {
fn macro_kind_to_api(kind: proc_macro_srv::ProcMacroKind) -> proc_macro_api::ProcMacroKind {
match kind {
@@ -54,38 +302,38 @@ fn run_<C: Codec>() -> io::Result<()> {
}
let mut buf = C::Buf::default();
- let mut read_request = || msg::Request::read::<_, C>(&mut io::stdin().lock(), &mut buf);
- let write_response = |msg: msg::Response| msg.write::<_, C>(&mut io::stdout().lock());
+ let mut read_request = || legacy::Request::read::<_, C>(&mut io::stdin().lock(), &mut buf);
+ let write_response = |msg: legacy::Response| msg.write::<_, C>(&mut io::stdout().lock());
let env = EnvSnapshot::default();
let srv = proc_macro_srv::ProcMacroSrv::new(&env);
- let mut span_mode = SpanMode::Id;
+ let mut span_mode = legacy::SpanMode::Id;
while let Some(req) = read_request()? {
let res = match req {
- msg::Request::ListMacros { dylib_path } => {
- msg::Response::ListMacros(srv.list_macros(&dylib_path).map(|macros| {
+ legacy::Request::ListMacros { dylib_path } => {
+ legacy::Response::ListMacros(srv.list_macros(&dylib_path).map(|macros| {
macros.into_iter().map(|(name, kind)| (name, macro_kind_to_api(kind))).collect()
}))
}
- msg::Request::ExpandMacro(task) => {
- let msg::ExpandMacro {
+ legacy::Request::ExpandMacro(task) => {
+ let legacy::ExpandMacro {
lib,
env,
current_dir,
data:
- ExpandMacroData {
+ legacy::ExpandMacroData {
macro_body,
macro_name,
attributes,
has_global_spans:
- ExpnGlobals { serialize: _, def_site, call_site, mixed_site },
+ legacy::ExpnGlobals { serialize: _, def_site, call_site, mixed_site },
span_data_table,
},
} = *task;
match span_mode {
- SpanMode::Id => msg::Response::ExpandMacro({
+ legacy::SpanMode::Id => legacy::Response::ExpandMacro({
let def_site = SpanId(def_site as u32);
let call_site = SpanId(call_site as u32);
let mixed_site = SpanId(mixed_site as u32);
@@ -106,19 +354,21 @@ fn run_<C: Codec>() -> io::Result<()> {
def_site,
call_site,
mixed_site,
+ None,
)
.map(|it| {
- msg::FlatTree::from_tokenstream_raw::<SpanTrans>(
+ legacy::FlatTree::from_tokenstream_raw::<SpanTrans>(
it,
call_site,
CURRENT_API_VERSION,
)
})
.map_err(|e| e.into_string().unwrap_or_default())
- .map_err(msg::PanicMessage)
+ .map_err(legacy::PanicMessage)
}),
- SpanMode::RustAnalyzer => msg::Response::ExpandMacroExtended({
- let mut span_data_table = deserialize_span_data_index_map(&span_data_table);
+ legacy::SpanMode::RustAnalyzer => legacy::Response::ExpandMacroExtended({
+ let mut span_data_table =
+ legacy::deserialize_span_data_index_map(&span_data_table);
let def_site = span_data_table[def_site];
let call_site = span_data_table[call_site];
@@ -146,31 +396,34 @@ fn run_<C: Codec>() -> io::Result<()> {
def_site,
call_site,
mixed_site,
+ None,
)
.map(|it| {
(
- msg::FlatTree::from_tokenstream(
+ legacy::FlatTree::from_tokenstream(
it,
CURRENT_API_VERSION,
call_site,
&mut span_data_table,
),
- serialize_span_data_index_map(&span_data_table),
+ legacy::serialize_span_data_index_map(&span_data_table),
)
})
- .map(|(tree, span_data_table)| msg::ExpandMacroExtended {
+ .map(|(tree, span_data_table)| legacy::ExpandMacroExtended {
tree,
span_data_table,
})
.map_err(|e| e.into_string().unwrap_or_default())
- .map_err(msg::PanicMessage)
+ .map_err(legacy::PanicMessage)
}),
}
}
- msg::Request::ApiVersionCheck {} => msg::Response::ApiVersionCheck(CURRENT_API_VERSION),
- msg::Request::SetConfig(config) => {
+ legacy::Request::ApiVersionCheck {} => {
+ legacy::Response::ApiVersionCheck(CURRENT_API_VERSION)
+ }
+ legacy::Request::SetConfig(config) => {
span_mode = config.span_mode;
- msg::Response::SetConfig(config)
+ legacy::Response::SetConfig(config)
}
};
write_response(res)?
@@ -178,3 +431,8 @@ fn run_<C: Codec>() -> io::Result<()> {
Ok(())
}
+
+fn send_response<C: Codec>(stdout: &io::Stdout, resp: bidirectional::Response) -> io::Result<()> {
+ let resp = bidirectional::BidirectionalMessage::Response(resp);
+ resp.write::<_, C>(&mut stdout.lock())
+}
diff --git a/crates/proc-macro-srv/src/bridge.rs b/crates/proc-macro-srv/src/bridge.rs
index fc063a07b5..fc62f9413a 100644
--- a/crates/proc-macro-srv/src/bridge.rs
+++ b/crates/proc-macro-srv/src/bridge.rs
@@ -1,6 +1,6 @@
//! `proc_macro::bridge` newtypes.
-use proc_macro::bridge as pm_bridge;
+use rustc_proc_macro::bridge as pm_bridge;
pub use pm_bridge::{DelimSpan, Diagnostic, ExpnGlobals, LitKind};
diff --git a/crates/proc-macro-srv/src/dylib.rs b/crates/proc-macro-srv/src/dylib.rs
index 03433197b7..9a65538675 100644
--- a/crates/proc-macro-srv/src/dylib.rs
+++ b/crates/proc-macro-srv/src/dylib.rs
@@ -3,7 +3,7 @@
mod proc_macros;
mod version;
-use proc_macro::bridge;
+use rustc_proc_macro::bridge;
use std::{fmt, fs, io, time::SystemTime};
use temp_dir::TempDir;
@@ -12,8 +12,8 @@ use object::Object;
use paths::{Utf8Path, Utf8PathBuf};
use crate::{
- PanicMessage, ProcMacroKind, ProcMacroSrvSpan, dylib::proc_macros::ProcMacros,
- token_stream::TokenStream,
+ PanicMessage, ProcMacroClientHandle, ProcMacroKind, ProcMacroSrvSpan,
+ dylib::proc_macros::ProcMacros, token_stream::TokenStream,
};
pub(crate) struct Expander {
@@ -37,7 +37,7 @@ impl Expander {
Ok(Expander { inner: library, modified_time })
}
- pub(crate) fn expand<S: ProcMacroSrvSpan>(
+ pub(crate) fn expand<'a, S: ProcMacroSrvSpan + 'a>(
&self,
macro_name: &str,
macro_body: TokenStream<S>,
@@ -45,13 +45,14 @@ impl Expander {
def_site: S,
call_site: S,
mixed_site: S,
+ callback: Option<ProcMacroClientHandle<'_>>,
) -> Result<TokenStream<S>, PanicMessage>
where
- <S::Server as bridge::server::Types>::TokenStream: Default,
+ <S::Server<'a> as bridge::server::Server>::TokenStream: Default,
{
self.inner
.proc_macros
- .expand(macro_name, macro_body, attribute, def_site, call_site, mixed_site)
+ .expand(macro_name, macro_body, attribute, def_site, call_site, mixed_site, callback)
}
pub(crate) fn list_macros(&self) -> impl Iterator<Item = (&str, ProcMacroKind)> {
diff --git a/crates/proc-macro-srv/src/dylib/proc_macros.rs b/crates/proc-macro-srv/src/dylib/proc_macros.rs
index c879c7609d..76c5097101 100644
--- a/crates/proc-macro-srv/src/dylib/proc_macros.rs
+++ b/crates/proc-macro-srv/src/dylib/proc_macros.rs
@@ -1,8 +1,6 @@
//! Proc macro ABI
-
-use proc_macro::bridge;
-
-use crate::{ProcMacroKind, ProcMacroSrvSpan, token_stream::TokenStream};
+use crate::{ProcMacroClientHandle, ProcMacroKind, ProcMacroSrvSpan, token_stream::TokenStream};
+use rustc_proc_macro::bridge;
#[repr(transparent)]
pub(crate) struct ProcMacros([bridge::client::ProcMacro]);
@@ -22,6 +20,7 @@ impl ProcMacros {
def_site: S,
call_site: S,
mixed_site: S,
+ callback: Option<ProcMacroClientHandle<'_>>,
) -> Result<TokenStream<S>, crate::PanicMessage> {
let parsed_attributes = attribute.unwrap_or_default();
@@ -32,7 +31,7 @@ impl ProcMacros {
{
let res = client.run(
&bridge::server::SameThread,
- S::make_server(call_site, def_site, mixed_site),
+ S::make_server(call_site, def_site, mixed_site, callback),
macro_body,
cfg!(debug_assertions),
);
@@ -41,7 +40,7 @@ impl ProcMacros {
bridge::client::ProcMacro::Bang { name, client } if *name == macro_name => {
let res = client.run(
&bridge::server::SameThread,
- S::make_server(call_site, def_site, mixed_site),
+ S::make_server(call_site, def_site, mixed_site, callback),
macro_body,
cfg!(debug_assertions),
);
@@ -50,7 +49,7 @@ impl ProcMacros {
bridge::client::ProcMacro::Attr { name, client } if *name == macro_name => {
let res = client.run(
&bridge::server::SameThread,
- S::make_server(call_site, def_site, mixed_site),
+ S::make_server(call_site, def_site, mixed_site, callback),
parsed_attributes,
macro_body,
cfg!(debug_assertions),
diff --git a/crates/proc-macro-srv/src/lib.rs b/crates/proc-macro-srv/src/lib.rs
index 93319df824..920d58b4e9 100644
--- a/crates/proc-macro-srv/src/lib.rs
+++ b/crates/proc-macro-srv/src/lib.rs
@@ -22,9 +22,9 @@
)]
#![deny(deprecated_safe, clippy::undocumented_unsafe_blocks)]
-extern crate proc_macro;
#[cfg(feature = "in-rust-tree")]
extern crate rustc_driver as _;
+extern crate rustc_proc_macro;
#[cfg(not(feature = "in-rust-tree"))]
extern crate ra_ap_rustc_lexer as rustc_lexer;
@@ -52,7 +52,8 @@ use temp_dir::TempDir;
pub use crate::server_impl::token_id::SpanId;
-pub use proc_macro::Delimiter;
+pub use rustc_proc_macro::Delimiter;
+pub use span;
pub use crate::bridge::*;
pub use crate::server_impl::literal_from_str;
@@ -91,6 +92,14 @@ impl<'env> ProcMacroSrv<'env> {
}
}
+pub type ProcMacroClientHandle<'a> = &'a mut (dyn ProcMacroClientInterface + Sync + Send);
+
+pub trait ProcMacroClientInterface {
+ fn file(&mut self, file_id: span::FileId) -> String;
+ fn source_text(&mut self, span: Span) -> Option<String>;
+ fn local_file(&mut self, file_id: span::FileId) -> Option<String>;
+}
+
const EXPANDER_STACK_SIZE: usize = 8 * 1024 * 1024;
impl ProcMacroSrv<'_> {
@@ -105,6 +114,7 @@ impl ProcMacroSrv<'_> {
def_site: S,
call_site: S,
mixed_site: S,
+ callback: Option<ProcMacroClientHandle<'_>>,
) -> Result<token_stream::TokenStream<S>, PanicMessage> {
let snapped_env = self.env;
let expander = self.expander(lib.as_ref()).map_err(|err| PanicMessage {
@@ -120,8 +130,10 @@ impl ProcMacroSrv<'_> {
.stack_size(EXPANDER_STACK_SIZE)
.name(macro_name.to_owned())
.spawn_scoped(s, move || {
- expander
- .expand(macro_name, macro_body, attribute, def_site, call_site, mixed_site)
+ expander.expand(
+ macro_name, macro_body, attribute, def_site, call_site, mixed_site,
+ callback,
+ )
});
match thread.unwrap().join() {
Ok(res) => res,
@@ -169,30 +181,50 @@ impl ProcMacroSrv<'_> {
}
pub trait ProcMacroSrvSpan: Copy + Send + Sync {
- type Server: proc_macro::bridge::server::Server<TokenStream = crate::token_stream::TokenStream<Self>>;
- fn make_server(call_site: Self, def_site: Self, mixed_site: Self) -> Self::Server;
+ type Server<'a>: rustc_proc_macro::bridge::server::Server<
+ TokenStream = crate::token_stream::TokenStream<Self>,
+ >;
+ fn make_server<'a>(
+ call_site: Self,
+ def_site: Self,
+ mixed_site: Self,
+ callback: Option<ProcMacroClientHandle<'a>>,
+ ) -> Self::Server<'a>;
}
impl ProcMacroSrvSpan for SpanId {
- type Server = server_impl::token_id::SpanIdServer;
-
- fn make_server(call_site: Self, def_site: Self, mixed_site: Self) -> Self::Server {
+ type Server<'a> = server_impl::token_id::SpanIdServer<'a>;
+
+ fn make_server<'a>(
+ call_site: Self,
+ def_site: Self,
+ mixed_site: Self,
+ callback: Option<ProcMacroClientHandle<'a>>,
+ ) -> Self::Server<'a> {
Self::Server {
call_site,
def_site,
mixed_site,
+ callback,
tracked_env_vars: Default::default(),
tracked_paths: Default::default(),
}
}
}
+
impl ProcMacroSrvSpan for Span {
- type Server = server_impl::rust_analyzer_span::RaSpanServer;
- fn make_server(call_site: Self, def_site: Self, mixed_site: Self) -> Self::Server {
+ type Server<'a> = server_impl::rust_analyzer_span::RaSpanServer<'a>;
+ fn make_server<'a>(
+ call_site: Self,
+ def_site: Self,
+ mixed_site: Self,
+ callback: Option<ProcMacroClientHandle<'a>>,
+ ) -> Self::Server<'a> {
Self::Server {
call_site,
def_site,
mixed_site,
+ callback,
tracked_env_vars: Default::default(),
tracked_paths: Default::default(),
}
diff --git a/crates/proc-macro-srv/src/server_impl/rust_analyzer_span.rs b/crates/proc-macro-srv/src/server_impl/rust_analyzer_span.rs
index 7c685c2da7..eacb100fbc 100644
--- a/crates/proc-macro-srv/src/server_impl/rust_analyzer_span.rs
+++ b/crates/proc-macro-srv/src/server_impl/rust_analyzer_span.rs
@@ -10,17 +10,16 @@ use std::{
};
use intern::Symbol;
-use proc_macro::bridge::server;
+use rustc_proc_macro::bridge::server;
use span::{FIXUP_ERASED_FILE_AST_ID_MARKER, Span, TextRange, TextSize};
use crate::{
+ ProcMacroClientHandle,
bridge::{Diagnostic, ExpnGlobals, Literal, TokenTree},
server_impl::literal_from_str,
};
-pub struct FreeFunctions;
-
-pub struct RaSpanServer {
+pub struct RaSpanServer<'a> {
// FIXME: Report this back to the caller to track as dependencies
pub tracked_env_vars: HashMap<Box<str>, Option<Box<str>>>,
// FIXME: Report this back to the caller to track as dependencies
@@ -28,16 +27,30 @@ pub struct RaSpanServer {
pub call_site: Span,
pub def_site: Span,
pub mixed_site: Span,
+ pub callback: Option<ProcMacroClientHandle<'a>>,
}
-impl server::Types for RaSpanServer {
- type FreeFunctions = FreeFunctions;
+impl server::Server for RaSpanServer<'_> {
type TokenStream = crate::token_stream::TokenStream<Span>;
type Span = Span;
type Symbol = Symbol;
-}
-impl server::FreeFunctions for RaSpanServer {
+ fn globals(&mut self) -> ExpnGlobals<Self::Span> {
+ ExpnGlobals {
+ def_site: self.def_site,
+ call_site: self.call_site,
+ mixed_site: self.mixed_site,
+ }
+ }
+
+ fn intern_symbol(ident: &str) -> Self::Symbol {
+ Symbol::intern(ident)
+ }
+
+ fn with_symbol_string(symbol: &Self::Symbol, f: impl FnOnce(&str)) {
+ f(symbol.as_str())
+ }
+
fn injected_env_var(&mut self, _: &str) -> Option<std::string::String> {
None
}
@@ -56,13 +69,19 @@ impl server::FreeFunctions for RaSpanServer {
fn emit_diagnostic(&mut self, _: Diagnostic<Self::Span>) {
// FIXME handle diagnostic
}
-}
-impl server::TokenStream for RaSpanServer {
- fn is_empty(&mut self, stream: &Self::TokenStream) -> bool {
+ fn ts_drop(&mut self, stream: Self::TokenStream) {
+ drop(stream);
+ }
+
+ fn ts_clone(&mut self, stream: &Self::TokenStream) -> Self::TokenStream {
+ stream.clone()
+ }
+
+ fn ts_is_empty(&mut self, stream: &Self::TokenStream) -> bool {
stream.is_empty()
}
- fn from_str(&mut self, src: &str) -> Self::TokenStream {
+ fn ts_from_str(&mut self, src: &str) -> Self::TokenStream {
Self::TokenStream::from_str(src, self.call_site).unwrap_or_else(|e| {
Self::TokenStream::from_str(
&format!("compile_error!(\"failed to parse str to token stream: {e}\")"),
@@ -71,15 +90,15 @@ impl server::TokenStream for RaSpanServer {
.unwrap()
})
}
- fn to_string(&mut self, stream: &Self::TokenStream) -> String {
+ fn ts_to_string(&mut self, stream: &Self::TokenStream) -> String {
stream.to_string()
}
- fn from_token_tree(&mut self, tree: TokenTree<Self::Span>) -> Self::TokenStream {
+ fn ts_from_token_tree(&mut self, tree: TokenTree<Self::Span>) -> Self::TokenStream {
Self::TokenStream::new(vec![tree])
}
- fn expand_expr(&mut self, self_: &Self::TokenStream) -> Result<Self::TokenStream, ()> {
+ fn ts_expand_expr(&mut self, self_: &Self::TokenStream) -> Result<Self::TokenStream, ()> {
// FIXME: requires db, more importantly this requires name resolution so we would need to
// eagerly expand this proc-macro, but we can't know that this proc-macro is eager until we
// expand it ...
@@ -88,7 +107,7 @@ impl server::TokenStream for RaSpanServer {
Ok(self_.clone())
}
- fn concat_trees(
+ fn ts_concat_trees(
&mut self,
base: Option<Self::TokenStream>,
trees: Vec<TokenTree<Self::Span>>,
@@ -104,7 +123,7 @@ impl server::TokenStream for RaSpanServer {
}
}
- fn concat_streams(
+ fn ts_concat_streams(
&mut self,
base: Option<Self::TokenStream>,
streams: Vec<Self::TokenStream>,
@@ -116,30 +135,26 @@ impl server::TokenStream for RaSpanServer {
stream
}
- fn into_trees(&mut self, stream: Self::TokenStream) -> Vec<TokenTree<Self::Span>> {
+ fn ts_into_trees(&mut self, stream: Self::TokenStream) -> Vec<TokenTree<Self::Span>> {
(*stream.0).clone()
}
-}
-impl server::Span for RaSpanServer {
- fn debug(&mut self, span: Self::Span) -> String {
+ fn span_debug(&mut self, span: Self::Span) -> String {
format!("{:?}", span)
}
- fn file(&mut self, _: Self::Span) -> String {
- // FIXME
- String::new()
+ fn span_file(&mut self, span: Self::Span) -> String {
+ self.callback.as_mut().map(|cb| cb.file(span.anchor.file_id.file_id())).unwrap_or_default()
}
- fn local_file(&mut self, _: Self::Span) -> Option<String> {
- // FIXME
- None
+ fn span_local_file(&mut self, span: Self::Span) -> Option<String> {
+ self.callback.as_mut().and_then(|cb| cb.local_file(span.anchor.file_id.file_id()))
}
- fn save_span(&mut self, _span: Self::Span) -> usize {
+ fn span_save_span(&mut self, _span: Self::Span) -> usize {
// FIXME, quote is incompatible with third-party tools
// This is called by the quote proc-macro which is expanded when the proc-macro is compiled
// As such, r-a will never observe this
0
}
- fn recover_proc_macro_span(&mut self, _id: usize) -> Self::Span {
+ fn span_recover_proc_macro_span(&mut self, _id: usize) -> Self::Span {
// FIXME, quote is incompatible with third-party tools
// This is called by the expansion of quote!, r-a will observe this, but we don't have
// access to the spans that were encoded
@@ -149,24 +164,23 @@ impl server::Span for RaSpanServer {
///
/// See PR:
/// https://github.com/rust-lang/rust/pull/55780
- fn source_text(&mut self, _span: Self::Span) -> Option<String> {
- // FIXME requires db, needs special handling wrt fixup spans
- None
+ fn span_source_text(&mut self, span: Self::Span) -> Option<String> {
+ self.callback.as_mut()?.source_text(span)
}
- fn parent(&mut self, _span: Self::Span) -> Option<Self::Span> {
+ fn span_parent(&mut self, _span: Self::Span) -> Option<Self::Span> {
// FIXME requires db, looks up the parent call site
None
}
- fn source(&mut self, span: Self::Span) -> Self::Span {
+ fn span_source(&mut self, span: Self::Span) -> Self::Span {
// FIXME requires db, returns the top level call site
span
}
- fn byte_range(&mut self, span: Self::Span) -> Range<usize> {
+ fn span_byte_range(&mut self, span: Self::Span) -> Range<usize> {
// FIXME requires db to resolve the ast id, THIS IS NOT INCREMENTAL
Range { start: span.range.start().into(), end: span.range.end().into() }
}
- fn join(&mut self, first: Self::Span, second: Self::Span) -> Option<Self::Span> {
+ fn span_join(&mut self, first: Self::Span, second: Self::Span) -> Option<Self::Span> {
// We can't modify the span range for fixup spans, those are meaningful to fixup, so just
// prefer the non-fixup span.
if first.anchor.ast_id == FIXUP_ERASED_FILE_AST_ID_MARKER {
@@ -194,7 +208,7 @@ impl server::Span for RaSpanServer {
ctx: second.ctx,
})
}
- fn subspan(
+ fn span_subspan(
&mut self,
span: Self::Span,
start: Bound<usize>,
@@ -238,11 +252,11 @@ impl server::Span for RaSpanServer {
})
}
- fn resolved_at(&mut self, span: Self::Span, at: Self::Span) -> Self::Span {
+ fn span_resolved_at(&mut self, span: Self::Span, at: Self::Span) -> Self::Span {
Span { ctx: at.ctx, ..span }
}
- fn end(&mut self, span: Self::Span) -> Self::Span {
+ fn span_end(&mut self, span: Self::Span) -> Self::Span {
// We can't modify the span range for fixup spans, those are meaningful to fixup.
if span.anchor.ast_id == FIXUP_ERASED_FILE_AST_ID_MARKER {
return span;
@@ -250,7 +264,7 @@ impl server::Span for RaSpanServer {
Span { range: TextRange::empty(span.range.end()), ..span }
}
- fn start(&mut self, span: Self::Span) -> Self::Span {
+ fn span_start(&mut self, span: Self::Span) -> Self::Span {
// We can't modify the span range for fixup spans, those are meaningful to fixup.
if span.anchor.ast_id == FIXUP_ERASED_FILE_AST_ID_MARKER {
return span;
@@ -258,38 +272,18 @@ impl server::Span for RaSpanServer {
Span { range: TextRange::empty(span.range.start()), ..span }
}
- fn line(&mut self, _span: Self::Span) -> usize {
+ fn span_line(&mut self, _span: Self::Span) -> usize {
// FIXME requires db to resolve line index, THIS IS NOT INCREMENTAL
1
}
- fn column(&mut self, _span: Self::Span) -> usize {
+ fn span_column(&mut self, _span: Self::Span) -> usize {
// FIXME requires db to resolve line index, THIS IS NOT INCREMENTAL
1
}
-}
-impl server::Symbol for RaSpanServer {
- fn normalize_and_validate_ident(&mut self, string: &str) -> Result<Self::Symbol, ()> {
+ fn symbol_normalize_and_validate_ident(&mut self, string: &str) -> Result<Self::Symbol, ()> {
// FIXME: nfc-normalize and validate idents
Ok(<Self as server::Server>::intern_symbol(string))
}
}
-
-impl server::Server for RaSpanServer {
- fn globals(&mut self) -> ExpnGlobals<Self::Span> {
- ExpnGlobals {
- def_site: self.def_site,
- call_site: self.call_site,
- mixed_site: self.mixed_site,
- }
- }
-
- fn intern_symbol(ident: &str) -> Self::Symbol {
- Symbol::intern(ident)
- }
-
- fn with_symbol_string(symbol: &Self::Symbol, f: impl FnOnce(&str)) {
- f(symbol.as_str())
- }
-}
diff --git a/crates/proc-macro-srv/src/server_impl/token_id.rs b/crates/proc-macro-srv/src/server_impl/token_id.rs
index 5ac263b9d5..70484c4dc2 100644
--- a/crates/proc-macro-srv/src/server_impl/token_id.rs
+++ b/crates/proc-macro-srv/src/server_impl/token_id.rs
@@ -6,9 +6,10 @@ use std::{
};
use intern::Symbol;
-use proc_macro::bridge::server;
+use rustc_proc_macro::bridge::server;
use crate::{
+ ProcMacroClientHandle,
bridge::{Diagnostic, ExpnGlobals, Literal, TokenTree},
server_impl::literal_from_str,
};
@@ -24,9 +25,7 @@ impl std::fmt::Debug for SpanId {
type Span = SpanId;
-pub struct FreeFunctions;
-
-pub struct SpanIdServer {
+pub struct SpanIdServer<'a> {
// FIXME: Report this back to the caller to track as dependencies
pub tracked_env_vars: HashMap<Box<str>, Option<Box<str>>>,
// FIXME: Report this back to the caller to track as dependencies
@@ -34,16 +33,30 @@ pub struct SpanIdServer {
pub call_site: Span,
pub def_site: Span,
pub mixed_site: Span,
+ pub callback: Option<ProcMacroClientHandle<'a>>,
}
-impl server::Types for SpanIdServer {
- type FreeFunctions = FreeFunctions;
+impl server::Server for SpanIdServer<'_> {
type TokenStream = crate::token_stream::TokenStream<Span>;
type Span = Span;
type Symbol = Symbol;
-}
-impl server::FreeFunctions for SpanIdServer {
+ fn globals(&mut self) -> ExpnGlobals<Self::Span> {
+ ExpnGlobals {
+ def_site: self.def_site,
+ call_site: self.call_site,
+ mixed_site: self.mixed_site,
+ }
+ }
+
+ fn intern_symbol(ident: &str) -> Self::Symbol {
+ Symbol::intern(ident)
+ }
+
+ fn with_symbol_string(symbol: &Self::Symbol, f: impl FnOnce(&str)) {
+ f(symbol.as_str())
+ }
+
fn injected_env_var(&mut self, _: &str) -> Option<std::string::String> {
None
}
@@ -59,13 +72,19 @@ impl server::FreeFunctions for SpanIdServer {
}
fn emit_diagnostic(&mut self, _: Diagnostic<Self::Span>) {}
-}
-impl server::TokenStream for SpanIdServer {
- fn is_empty(&mut self, stream: &Self::TokenStream) -> bool {
+ fn ts_drop(&mut self, stream: Self::TokenStream) {
+ drop(stream);
+ }
+
+ fn ts_clone(&mut self, stream: &Self::TokenStream) -> Self::TokenStream {
+ stream.clone()
+ }
+
+ fn ts_is_empty(&mut self, stream: &Self::TokenStream) -> bool {
stream.is_empty()
}
- fn from_str(&mut self, src: &str) -> Self::TokenStream {
+ fn ts_from_str(&mut self, src: &str) -> Self::TokenStream {
Self::TokenStream::from_str(src, self.call_site).unwrap_or_else(|e| {
Self::TokenStream::from_str(
&format!("compile_error!(\"failed to parse str to token stream: {e}\")"),
@@ -74,18 +93,18 @@ impl server::TokenStream for SpanIdServer {
.unwrap()
})
}
- fn to_string(&mut self, stream: &Self::TokenStream) -> String {
+ fn ts_to_string(&mut self, stream: &Self::TokenStream) -> String {
stream.to_string()
}
- fn from_token_tree(&mut self, tree: TokenTree<Self::Span>) -> Self::TokenStream {
+ fn ts_from_token_tree(&mut self, tree: TokenTree<Self::Span>) -> Self::TokenStream {
Self::TokenStream::new(vec![tree])
}
- fn expand_expr(&mut self, self_: &Self::TokenStream) -> Result<Self::TokenStream, ()> {
+ fn ts_expand_expr(&mut self, self_: &Self::TokenStream) -> Result<Self::TokenStream, ()> {
Ok(self_.clone())
}
- fn concat_trees(
+ fn ts_concat_trees(
&mut self,
base: Option<Self::TokenStream>,
trees: Vec<TokenTree<Self::Span>>,
@@ -101,7 +120,7 @@ impl server::TokenStream for SpanIdServer {
}
}
- fn concat_streams(
+ fn ts_concat_streams(
&mut self,
base: Option<Self::TokenStream>,
streams: Vec<Self::TokenStream>,
@@ -113,49 +132,47 @@ impl server::TokenStream for SpanIdServer {
stream
}
- fn into_trees(&mut self, stream: Self::TokenStream) -> Vec<TokenTree<Self::Span>> {
+ fn ts_into_trees(&mut self, stream: Self::TokenStream) -> Vec<TokenTree<Self::Span>> {
(*stream.0).clone()
}
-}
-impl server::Span for SpanIdServer {
- fn debug(&mut self, span: Self::Span) -> String {
+ fn span_debug(&mut self, span: Self::Span) -> String {
format!("{:?}", span.0)
}
- fn file(&mut self, _span: Self::Span) -> String {
+ fn span_file(&mut self, _span: Self::Span) -> String {
String::new()
}
- fn local_file(&mut self, _span: Self::Span) -> Option<String> {
+ fn span_local_file(&mut self, _span: Self::Span) -> Option<String> {
None
}
- fn save_span(&mut self, _span: Self::Span) -> usize {
+ fn span_save_span(&mut self, _span: Self::Span) -> usize {
0
}
- fn recover_proc_macro_span(&mut self, _id: usize) -> Self::Span {
+ fn span_recover_proc_macro_span(&mut self, _id: usize) -> Self::Span {
self.call_site
}
/// Recent feature, not yet in the proc_macro
///
/// See PR:
/// https://github.com/rust-lang/rust/pull/55780
- fn source_text(&mut self, _span: Self::Span) -> Option<String> {
+ fn span_source_text(&mut self, _span: Self::Span) -> Option<String> {
None
}
- fn parent(&mut self, _span: Self::Span) -> Option<Self::Span> {
+ fn span_parent(&mut self, _span: Self::Span) -> Option<Self::Span> {
None
}
- fn source(&mut self, span: Self::Span) -> Self::Span {
+ fn span_source(&mut self, span: Self::Span) -> Self::Span {
span
}
- fn byte_range(&mut self, _span: Self::Span) -> Range<usize> {
+ fn span_byte_range(&mut self, _span: Self::Span) -> Range<usize> {
Range { start: 0, end: 0 }
}
- fn join(&mut self, first: Self::Span, _second: Self::Span) -> Option<Self::Span> {
+ fn span_join(&mut self, first: Self::Span, _second: Self::Span) -> Option<Self::Span> {
// Just return the first span again, because some macros will unwrap the result.
Some(first)
}
- fn subspan(
+ fn span_subspan(
&mut self,
span: Self::Span,
_start: Bound<usize>,
@@ -164,48 +181,28 @@ impl server::Span for SpanIdServer {
// Just return the span again, because some macros will unwrap the result.
Some(span)
}
- fn resolved_at(&mut self, _span: Self::Span, _at: Self::Span) -> Self::Span {
+ fn span_resolved_at(&mut self, _span: Self::Span, _at: Self::Span) -> Self::Span {
self.call_site
}
- fn end(&mut self, _self_: Self::Span) -> Self::Span {
+ fn span_end(&mut self, _self_: Self::Span) -> Self::Span {
self.call_site
}
- fn start(&mut self, _self_: Self::Span) -> Self::Span {
+ fn span_start(&mut self, _self_: Self::Span) -> Self::Span {
self.call_site
}
- fn line(&mut self, _span: Self::Span) -> usize {
+ fn span_line(&mut self, _span: Self::Span) -> usize {
1
}
- fn column(&mut self, _span: Self::Span) -> usize {
+ fn span_column(&mut self, _span: Self::Span) -> usize {
1
}
-}
-impl server::Symbol for SpanIdServer {
- fn normalize_and_validate_ident(&mut self, string: &str) -> Result<Self::Symbol, ()> {
+ fn symbol_normalize_and_validate_ident(&mut self, string: &str) -> Result<Self::Symbol, ()> {
// FIXME: nfc-normalize and validate idents
Ok(<Self as server::Server>::intern_symbol(string))
}
}
-
-impl server::Server for SpanIdServer {
- fn globals(&mut self) -> ExpnGlobals<Self::Span> {
- ExpnGlobals {
- def_site: self.def_site,
- call_site: self.call_site,
- mixed_site: self.mixed_site,
- }
- }
-
- fn intern_symbol(ident: &str) -> Self::Symbol {
- Symbol::intern(ident)
- }
-
- fn with_symbol_string(symbol: &Self::Symbol, f: impl FnOnce(&str)) {
- f(symbol.as_str())
- }
-}
diff --git a/crates/proc-macro-srv/src/tests/utils.rs b/crates/proc-macro-srv/src/tests/utils.rs
index 1b12308ad6..61fcd810b1 100644
--- a/crates/proc-macro-srv/src/tests/utils.rs
+++ b/crates/proc-macro-srv/src/tests/utils.rs
@@ -59,8 +59,9 @@ fn assert_expand_impl(
let input_ts_string = format!("{input_ts:?}");
let attr_ts_string = attr_ts.as_ref().map(|it| format!("{it:?}"));
- let res =
- expander.expand(macro_name, input_ts, attr_ts, def_site, call_site, mixed_site).unwrap();
+ let res = expander
+ .expand(macro_name, input_ts, attr_ts, def_site, call_site, mixed_site, None)
+ .unwrap();
expect.assert_eq(&format!(
"{input_ts_string}{}{}{}",
if attr_ts_string.is_some() { "\n\n" } else { "" },
@@ -91,7 +92,8 @@ fn assert_expand_impl(
let fixture_string = format!("{fixture:?}");
let attr_string = attr.as_ref().map(|it| format!("{it:?}"));
- let res = expander.expand(macro_name, fixture, attr, def_site, call_site, mixed_site).unwrap();
+ let res =
+ expander.expand(macro_name, fixture, attr, def_site, call_site, mixed_site, None).unwrap();
expect_spanned.assert_eq(&format!(
"{fixture_string}{}{}{}",
if attr_string.is_some() { "\n\n" } else { "" },
diff --git a/crates/proc-macro-srv/src/token_stream.rs b/crates/proc-macro-srv/src/token_stream.rs
index 36827d2561..2358f6963c 100644
--- a/crates/proc-macro-srv/src/token_stream.rs
+++ b/crates/proc-macro-srv/src/token_stream.rs
@@ -4,8 +4,8 @@ use core::fmt;
use std::{mem, sync::Arc};
use intern::Symbol;
-use proc_macro::Delimiter;
use rustc_lexer::{DocStyle, LiteralKind};
+use rustc_proc_macro::Delimiter;
use crate::bridge::{DelimSpan, Group, Ident, LitKind, Literal, Punct, TokenTree};
@@ -52,7 +52,7 @@ impl<S> TokenStream<S> {
S: SpanLike + Copy,
{
let mut groups = Vec::new();
- groups.push((proc_macro::Delimiter::None, 0..0, vec![]));
+ groups.push((rustc_proc_macro::Delimiter::None, 0..0, vec![]));
let mut offset = 0;
let mut tokens = rustc_lexer::tokenize(s, rustc_lexer::FrontmatterAllowed::No).peekable();
while let Some(token) = tokens.next() {
@@ -102,7 +102,7 @@ impl<S> TokenStream<S> {
};
match token.kind {
rustc_lexer::TokenKind::OpenParen => {
- groups.push((proc_macro::Delimiter::Parenthesis, range, vec![]))
+ groups.push((rustc_proc_macro::Delimiter::Parenthesis, range, vec![]))
}
rustc_lexer::TokenKind::CloseParen if *open_delim != Delimiter::Parenthesis => {
return if *open_delim == Delimiter::None {
@@ -130,7 +130,7 @@ impl<S> TokenStream<S> {
);
}
rustc_lexer::TokenKind::OpenBrace => {
- groups.push((proc_macro::Delimiter::Brace, range, vec![]))
+ groups.push((rustc_proc_macro::Delimiter::Brace, range, vec![]))
}
rustc_lexer::TokenKind::CloseBrace if *open_delim != Delimiter::Brace => {
return if *open_delim == Delimiter::None {
@@ -158,7 +158,7 @@ impl<S> TokenStream<S> {
);
}
rustc_lexer::TokenKind::OpenBracket => {
- groups.push((proc_macro::Delimiter::Bracket, range, vec![]))
+ groups.push((rustc_proc_macro::Delimiter::Bracket, range, vec![]))
}
rustc_lexer::TokenKind::CloseBracket if *open_delim != Delimiter::Bracket => {
return if *open_delim == Delimiter::None {
@@ -460,10 +460,10 @@ fn display_token_tree<S>(
f,
"{}",
match delimiter {
- proc_macro::Delimiter::Parenthesis => "(",
- proc_macro::Delimiter::Brace => "{",
- proc_macro::Delimiter::Bracket => "[",
- proc_macro::Delimiter::None => "",
+ rustc_proc_macro::Delimiter::Parenthesis => "(",
+ rustc_proc_macro::Delimiter::Brace => "{",
+ rustc_proc_macro::Delimiter::Bracket => "[",
+ rustc_proc_macro::Delimiter::None => "",
}
)?;
if let Some(stream) = stream {
@@ -473,10 +473,10 @@ fn display_token_tree<S>(
f,
"{}",
match delimiter {
- proc_macro::Delimiter::Parenthesis => ")",
- proc_macro::Delimiter::Brace => "}",
- proc_macro::Delimiter::Bracket => "]",
- proc_macro::Delimiter::None => "",
+ rustc_proc_macro::Delimiter::Parenthesis => ")",
+ rustc_proc_macro::Delimiter::Brace => "}",
+ rustc_proc_macro::Delimiter::Bracket => "]",
+ rustc_proc_macro::Delimiter::None => "",
}
)?;
}
@@ -587,16 +587,16 @@ fn debug_token_tree<S: fmt::Debug>(
f,
"GROUP {}{} {:#?} {:#?} {:#?}",
match delimiter {
- proc_macro::Delimiter::Parenthesis => "(",
- proc_macro::Delimiter::Brace => "{",
- proc_macro::Delimiter::Bracket => "[",
- proc_macro::Delimiter::None => "$",
+ rustc_proc_macro::Delimiter::Parenthesis => "(",
+ rustc_proc_macro::Delimiter::Brace => "{",
+ rustc_proc_macro::Delimiter::Bracket => "[",
+ rustc_proc_macro::Delimiter::None => "$",
},
match delimiter {
- proc_macro::Delimiter::Parenthesis => ")",
- proc_macro::Delimiter::Brace => "}",
- proc_macro::Delimiter::Bracket => "]",
- proc_macro::Delimiter::None => "$",
+ rustc_proc_macro::Delimiter::Parenthesis => ")",
+ rustc_proc_macro::Delimiter::Brace => "}",
+ rustc_proc_macro::Delimiter::Bracket => "]",
+ rustc_proc_macro::Delimiter::None => "$",
},
span.open,
span.close,
diff --git a/crates/rust-analyzer/src/cli/analysis_stats.rs b/crates/rust-analyzer/src/cli/analysis_stats.rs
index 76256b0a22..a02d1a7856 100644
--- a/crates/rust-analyzer/src/cli/analysis_stats.rs
+++ b/crates/rust-analyzer/src/cli/analysis_stats.rs
@@ -354,11 +354,10 @@ impl flags::AnalysisStats {
self.run_term_search(&workspace, db, &vfs, &file_ids, verbosity);
}
- hir::clear_tls_solver_cache();
- unsafe { hir::collect_ty_garbage() };
-
let db = host.raw_database_mut();
db.trigger_lru_eviction();
+ hir::clear_tls_solver_cache();
+ unsafe { hir::collect_ty_garbage() };
let total_span = analysis_sw.elapsed();
eprintln!("{:<20} {total_span}", "Total:");
@@ -693,21 +692,24 @@ impl flags::AnalysisStats {
let mut sw = self.stop_watch();
let mut all = 0;
let mut fail = 0;
- for &body_id in bodies {
+ for &body in bodies {
bar.set_message(move || {
- format!("mir lowering: {}", full_name(db, body_id, body_id.module(db)))
+ format!("mir lowering: {}", full_name(db, body, body.module(db)))
});
bar.inc(1);
- if matches!(body_id, DefWithBody::Variant(_)) {
+ if matches!(body, DefWithBody::Variant(_)) {
continue;
}
- let module = body_id.module(db);
- if !self.should_process(db, body_id, module) {
+ let module = body.module(db);
+ if !self.should_process(db, body, module) {
continue;
}
all += 1;
- let Err(e) = db.mir_body(body_id.into()) else {
+ let Ok(body_id) = body.try_into() else {
+ continue;
+ };
+ let Err(e) = db.mir_body(body_id) else {
continue;
};
if verbosity.is_spammy() {
@@ -716,7 +718,7 @@ impl flags::AnalysisStats {
.into_iter()
.rev()
.filter_map(|it| it.name(db))
- .chain(Some(body_id.name(db).unwrap_or_else(Name::missing)))
+ .chain(Some(body.name(db).unwrap_or_else(Name::missing)))
.map(|it| it.display(db, Edition::LATEST).to_string())
.join("::");
bar.println(format!("Mir body for {full_name} failed due {e:?}"));
@@ -747,11 +749,12 @@ impl flags::AnalysisStats {
if self.parallel {
let mut inference_sw = self.stop_watch();
+ let bodies = bodies.iter().filter_map(|&body| body.try_into().ok()).collect::<Vec<_>>();
bodies
.par_iter()
.map_with(db.clone(), |snap, &body| {
- snap.body(body.into());
- InferenceResult::for_body(snap, body.into());
+ snap.body(body);
+ InferenceResult::for_body(snap, body);
})
.count();
eprintln!("{:<20} {}", "Parallel Inference:", inference_sw.elapsed());
@@ -769,6 +772,7 @@ impl flags::AnalysisStats {
let mut num_pat_type_mismatches = 0;
let mut panics = 0;
for &body_id in bodies {
+ let Ok(body_def_id) = body_id.try_into() else { continue };
let name = body_id.name(db).unwrap_or_else(Name::missing);
let module = body_id.module(db);
let display_target = module.krate(db).to_display_target(db);
@@ -807,9 +811,9 @@ impl flags::AnalysisStats {
bar.println(msg());
}
bar.set_message(msg);
- let body = db.body(body_id.into());
+ let body = db.body(body_def_id);
let inference_result =
- catch_unwind(AssertUnwindSafe(|| InferenceResult::for_body(db, body_id.into())));
+ catch_unwind(AssertUnwindSafe(|| InferenceResult::for_body(db, body_def_id)));
let inference_result = match inference_result {
Ok(inference_result) => inference_result,
Err(p) => {
@@ -826,7 +830,7 @@ impl flags::AnalysisStats {
}
};
// This query is LRU'd, so actually calling it will skew the timing results.
- let sm = || db.body_with_source_map(body_id.into()).1;
+ let sm = || db.body_with_source_map(body_def_id).1;
// region:expressions
let (previous_exprs, previous_unknown, previous_partially_unknown) =
@@ -1081,6 +1085,7 @@ impl flags::AnalysisStats {
let mut sw = self.stop_watch();
bar.tick();
for &body_id in bodies {
+ let Ok(body_def_id) = body_id.try_into() else { continue };
let module = body_id.module(db);
if !self.should_process(db, body_id, module) {
continue;
@@ -1114,7 +1119,7 @@ impl flags::AnalysisStats {
bar.println(msg());
}
bar.set_message(msg);
- db.body(body_id.into());
+ db.body(body_def_id);
bar.inc(1);
}
@@ -1188,6 +1193,7 @@ impl flags::AnalysisStats {
style_lints: false,
term_search_fuel: 400,
term_search_borrowck: true,
+ show_rename_conflicts: true,
},
ide::AssistResolveStrategy::All,
analysis.editioned_file_id_to_vfs(file_id),
diff --git a/crates/rust-analyzer/src/cli/rustc_tests.rs b/crates/rust-analyzer/src/cli/rustc_tests.rs
index eb28a47ec0..249566d2ac 100644
--- a/crates/rust-analyzer/src/cli/rustc_tests.rs
+++ b/crates/rust-analyzer/src/cli/rustc_tests.rs
@@ -185,7 +185,7 @@ impl Tester {
if !worker.is_finished() {
// attempt to cancel the worker, won't work for chalk hangs unfortunately
- self.host.request_cancellation();
+ self.host.trigger_garbage_collection();
}
worker.join().and_then(identity)
});
diff --git a/crates/rust-analyzer/src/cli/ssr.rs b/crates/rust-analyzer/src/cli/ssr.rs
index 529cf12082..3918683145 100644
--- a/crates/rust-analyzer/src/cli/ssr.rs
+++ b/crates/rust-analyzer/src/cli/ssr.rs
@@ -68,7 +68,7 @@ impl flags::Search {
match_finder.add_search_pattern(pattern)?;
}
if let Some(debug_snippet) = &self.debug {
- for &root in ide_db::symbol_index::LocalRoots::get(db).roots(db).iter() {
+ for &root in ide_db::LocalRoots::get(db).roots(db).iter() {
let sr = db.source_root(root).source_root(db);
for file_id in sr.iter() {
for debug_info in match_finder.debug_where_text_equal(
diff --git a/crates/rust-analyzer/src/config.rs b/crates/rust-analyzer/src/config.rs
index 2371f7a656..e39569e108 100644
--- a/crates/rust-analyzer/src/config.rs
+++ b/crates/rust-analyzer/src/config.rs
@@ -98,13 +98,6 @@ config_data! {
/// Code's `files.watcherExclude`.
files_exclude | files_excludeDirs: Vec<Utf8PathBuf> = vec![],
- /// This config controls the frequency in which rust-analyzer will perform its internal Garbage
- /// Collection. It is specified in revisions, roughly equivalent to number of changes. The default
- /// is 1000.
- ///
- /// Setting a smaller value may help limit peak memory usage at the expense of speed.
- gc_frequency: usize = 1000,
-
/// If this is `true`, when "Goto Implementations" and in "Implementations" lens, are triggered on a `struct` or `enum` or `union`, we filter out trait implementations that originate from `derive`s above the type.
gotoImplementations_filterAdjacentDerives: bool = false,
@@ -732,6 +725,9 @@ config_data! {
///
/// E.g. `use ::std::io::Read;`.
imports_prefixExternPrelude: bool = false,
+
+ /// Whether to warn when a rename will cause conflicts (change the meaning of the code).
+ rename_showConflicts: bool = true,
}
}
@@ -908,8 +904,24 @@ config_data! {
/// This config takes a map of crate names with the exported proc-macro names to ignore as values.
procMacro_ignored: FxHashMap<Box<str>, Box<[Box<str>]>> = FxHashMap::default(),
+ /// Subcommand used for bench runnables instead of `bench`.
+ runnables_bench_command: String = "bench".to_owned(),
+ /// Override the command used for bench runnables.
+ /// The first element of the array should be the program to execute (for example, `cargo`).
+ ///
+ /// Use the placeholders `${package}`, `${target_arg}`, `${target}`, `${test_name}` to dynamically
+ /// replace the package name, target option (such as `--bin` or `--example`), the target name and
+ /// the test name (name of test function or test mod path).
+ runnables_bench_overrideCommand: Option<Vec<String>> = None,
/// Command to be executed instead of 'cargo' for runnables.
runnables_command: Option<String> = None,
+ /// Override the command used for bench runnables.
+ /// The first element of the array should be the program to execute (for example, `cargo`).
+ ///
+ /// Use the placeholders `${package}`, `${target_arg}`, `${target}`, `${test_name}` to dynamically
+ /// replace the package name, target option (such as `--bin` or `--example`), the target name and
+ /// the test name (name of test function or test mod path).
+ runnables_doctest_overrideCommand: Option<Vec<String>> = None,
/// Additional arguments to be passed to cargo for runnables such as
/// tests or binaries. For example, it may be `--release`.
runnables_extraArgs: Vec<String> = vec![],
@@ -921,6 +933,15 @@ config_data! {
/// they will end up being interpreted as options to
/// [`rustc`’s built-in test harness (“libtest”)](https://doc.rust-lang.org/rustc/tests/index.html#cli-arguments).
runnables_extraTestBinaryArgs: Vec<String> = vec!["--nocapture".to_owned()],
+ /// Subcommand used for test runnables instead of `test`.
+ runnables_test_command: String = "test".to_owned(),
+ /// Override the command used for test runnables.
+ /// The first element of the array should be the program to execute (for example, `cargo`).
+ ///
+ /// Use the placeholders `${package}`, `${target_arg}`, `${target}`, `${test_name}` to dynamically
+ /// replace the package name, target option (such as `--bin` or `--example`), the target name and
+ /// the test name (name of test function or test mod path).
+ runnables_test_overrideCommand: Option<Vec<String>> = None,
/// Path to the Cargo.toml of the rust compiler workspace, for usage in rustc_private
/// projects, or "discover" to try to automatically find it if the `rustc-dev` component
@@ -1572,6 +1593,16 @@ pub struct RunnablesConfig {
pub cargo_extra_args: Vec<String>,
/// Additional arguments for the binary being run, if it is a test or benchmark.
pub extra_test_binary_args: Vec<String>,
+ /// Subcommand used for doctest runnables instead of `test`.
+ pub test_command: String,
+ /// Override the command used for test runnables.
+ pub test_override_command: Option<Vec<String>>,
+ /// Subcommand used for doctest runnables instead of `bench`.
+ pub bench_command: String,
+ /// Override the command used for bench runnables.
+ pub bench_override_command: Option<Vec<String>>,
+ /// Override the command used for doctest runnables.
+ pub doc_test_override_command: Option<Vec<String>>,
}
/// Configuration for workspace symbol search requests.
@@ -1709,10 +1740,6 @@ impl Config {
&self.caps
}
- pub fn gc_freq(&self) -> usize {
- *self.gc_frequency()
- }
-
pub fn assist(&self, source_root: Option<SourceRootId>) -> AssistConfig {
AssistConfig {
snippet_cap: self.snippet_cap(),
@@ -1731,6 +1758,7 @@ impl Config {
ExprFillDefaultDef::Underscore => ExprFillDefaultMode::Underscore,
},
prefer_self_ty: *self.assist_preferSelf(source_root),
+ show_rename_conflicts: *self.rename_showConflicts(source_root),
}
}
@@ -1739,6 +1767,7 @@ impl Config {
prefer_no_std: self.imports_preferNoStd(source_root).to_owned(),
prefer_prelude: self.imports_preferPrelude(source_root).to_owned(),
prefer_absolute: self.imports_prefixExternPrelude(source_root).to_owned(),
+ show_conflicts: *self.rename_showConflicts(source_root),
}
}
@@ -1838,6 +1867,7 @@ impl Config {
style_lints: self.diagnostics_styleLints_enable(source_root).to_owned(),
term_search_fuel: self.assist_termSearch_fuel(source_root).to_owned() as u64,
term_search_borrowck: self.assist_termSearch_borrowcheck(source_root).to_owned(),
+ show_rename_conflicts: *self.rename_showConflicts(source_root),
}
}
@@ -2499,6 +2529,11 @@ impl Config {
override_cargo: self.runnables_command(source_root).clone(),
cargo_extra_args: self.runnables_extraArgs(source_root).clone(),
extra_test_binary_args: self.runnables_extraTestBinaryArgs(source_root).clone(),
+ test_command: self.runnables_test_command(source_root).clone(),
+ test_override_command: self.runnables_test_overrideCommand(source_root).clone(),
+ bench_command: self.runnables_bench_command(source_root).clone(),
+ bench_override_command: self.runnables_bench_overrideCommand(source_root).clone(),
+ doc_test_override_command: self.runnables_doctest_overrideCommand(source_root).clone(),
}
}
diff --git a/crates/rust-analyzer/src/flycheck.rs b/crates/rust-analyzer/src/flycheck.rs
index 14a4a1752f..b062641691 100644
--- a/crates/rust-analyzer/src/flycheck.rs
+++ b/crates/rust-analyzer/src/flycheck.rs
@@ -147,13 +147,13 @@ pub(crate) struct FlycheckHandle {
sender: Sender<StateChange>,
_thread: stdx::thread::JoinHandle,
id: usize,
- generation: AtomicUsize,
+ generation: Arc<AtomicUsize>,
}
impl FlycheckHandle {
pub(crate) fn spawn(
id: usize,
- generation: DiagnosticsGeneration,
+ generation: Arc<AtomicUsize>,
sender: Sender<FlycheckMessage>,
config: FlycheckConfig,
sysroot_root: Option<AbsPathBuf>,
@@ -163,7 +163,7 @@ impl FlycheckHandle {
) -> FlycheckHandle {
let actor = FlycheckActor::new(
id,
- generation,
+ generation.load(Ordering::Relaxed),
sender,
config,
sysroot_root,
@@ -176,7 +176,7 @@ impl FlycheckHandle {
stdx::thread::Builder::new(stdx::thread::ThreadIntent::Worker, format!("Flycheck{id}"))
.spawn(move || actor.run(receiver))
.expect("failed to spawn thread");
- FlycheckHandle { id, generation: generation.into(), sender, _thread: thread }
+ FlycheckHandle { id, generation, sender, _thread: thread }
}
/// Schedule a re-start of the cargo check worker to do a workspace wide check.
diff --git a/crates/rust-analyzer/src/global_state.rs b/crates/rust-analyzer/src/global_state.rs
index 41783584a9..9beab3c0e4 100644
--- a/crates/rust-analyzer/src/global_state.rs
+++ b/crates/rust-analyzer/src/global_state.rs
@@ -15,7 +15,7 @@ use hir::ChangeWithProcMacros;
use ide::{Analysis, AnalysisHost, Cancellable, FileId, SourceRootId};
use ide_db::{
MiniCore,
- base_db::{Crate, ProcMacroPaths, SourceDatabase},
+ base_db::{Crate, ProcMacroPaths, SourceDatabase, salsa::Revision},
};
use itertools::Itertools;
use load_cargo::SourceRootConfig;
@@ -193,15 +193,14 @@ pub(crate) struct GlobalState {
/// which will usually end up causing a bunch of incorrect diagnostics on startup.
pub(crate) incomplete_crate_graph: bool,
- pub(crate) revisions_until_next_gc: usize,
-
pub(crate) minicore: MiniCoreRustAnalyzerInternalOnly,
+ pub(crate) last_gc_revision: Revision,
}
// FIXME: This should move to the VFS once the rewrite is done.
#[derive(Debug, Clone, Default)]
pub(crate) struct MiniCoreRustAnalyzerInternalOnly {
- pub(crate) minicore_text: Option<String>,
+ pub(crate) minicore_text: Option<Arc<str>>,
}
/// An immutable snapshot of the world's state at a point in time.
@@ -258,6 +257,8 @@ impl GlobalState {
let (discover_sender, discover_receiver) = unbounded();
+ let last_gc_revision = analysis_host.raw_database().nonce_and_revision().1;
+
let mut this = GlobalState {
sender,
req_queue: ReqQueue::default(),
@@ -321,8 +322,7 @@ impl GlobalState {
incomplete_crate_graph: false,
minicore: MiniCoreRustAnalyzerInternalOnly::default(),
-
- revisions_until_next_gc: config.gc_freq(),
+ last_gc_revision,
};
// Apply any required database inputs from the config.
this.update_configuration(config);
@@ -347,11 +347,11 @@ impl GlobalState {
let (change, modified_rust_files, workspace_structure_change) =
self.cancellation_pool.scoped(|s| {
- // start cancellation in parallel, this will kick off lru eviction
+ // start cancellation in parallel,
// allowing us to do meaningful work while waiting
let analysis_host = AssertUnwindSafe(&mut self.analysis_host);
s.spawn(thread::ThreadIntent::LatencySensitive, || {
- { analysis_host }.0.request_cancellation()
+ { analysis_host }.0.trigger_cancellation()
});
// downgrade to read lock to allow more readers while we are normalizing text
@@ -440,14 +440,6 @@ impl GlobalState {
self.analysis_host.apply_change(change);
- if self.revisions_until_next_gc == 0 {
- // SAFETY: Just changed some database inputs, all queries were canceled.
- unsafe { hir::collect_ty_garbage() };
- self.revisions_until_next_gc = self.config.gc_freq();
- } else {
- self.revisions_until_next_gc -= 1;
- }
-
if !modified_ratoml_files.is_empty()
|| !self.config.same_source_root_parent_map(&self.local_roots_parent_map)
{
@@ -741,7 +733,7 @@ impl GlobalState {
impl Drop for GlobalState {
fn drop(&mut self) {
- self.analysis_host.request_cancellation();
+ self.analysis_host.trigger_cancellation();
}
}
diff --git a/crates/rust-analyzer/src/handlers/request.rs b/crates/rust-analyzer/src/handlers/request.rs
index 4d97505768..ad07da7759 100644
--- a/crates/rust-analyzer/src/handlers/request.rs
+++ b/crates/rust-analyzer/src/handlers/request.rs
@@ -808,7 +808,11 @@ pub(crate) fn handle_will_rename_files(
}
})
.filter_map(|(file_id, new_name)| {
- snap.analysis.will_rename_file(file_id?, &new_name).ok()?
+ let file_id = file_id?;
+ let source_root = snap.analysis.source_root_id(file_id).ok();
+ snap.analysis
+ .will_rename_file(file_id, &new_name, &snap.config.rename(source_root))
+ .ok()?
})
.collect();
diff --git a/crates/rust-analyzer/src/integrated_benchmarks.rs b/crates/rust-analyzer/src/integrated_benchmarks.rs
index 38ee9cbe7f..c61825b99f 100644
--- a/crates/rust-analyzer/src/integrated_benchmarks.rs
+++ b/crates/rust-analyzer/src/integrated_benchmarks.rs
@@ -363,6 +363,7 @@ fn integrated_diagnostics_benchmark() {
prefer_absolute: false,
term_search_fuel: 400,
term_search_borrowck: true,
+ show_rename_conflicts: true,
};
host.analysis()
.full_diagnostics(&diagnostics_config, ide::AssistResolveStrategy::None, file_id)
diff --git a/crates/rust-analyzer/src/lib.rs b/crates/rust-analyzer/src/lib.rs
index 971ae2a601..4a37bb34ab 100644
--- a/crates/rust-analyzer/src/lib.rs
+++ b/crates/rust-analyzer/src/lib.rs
@@ -16,6 +16,9 @@ extern crate rustc_driver as _;
extern crate ra_ap_rustc_type_ir as rustc_type_ir;
+/*
+ If you bump this, grep for `FIXME(MINIMUM_SUPPORTED_TOOLCHAIN_VERSION)` to check for old support code we can drop
+*/
/// Any toolchain less than this version will likely not work with rust-analyzer built from this revision.
pub const MINIMUM_SUPPORTED_TOOLCHAIN_VERSION: semver::Version = semver::Version {
major: 1,
diff --git a/crates/rust-analyzer/src/lsp/to_proto.rs b/crates/rust-analyzer/src/lsp/to_proto.rs
index 86a35c7d11..6f0f57725f 100644
--- a/crates/rust-analyzer/src/lsp/to_proto.rs
+++ b/crates/rust-analyzer/src/lsp/to_proto.rs
@@ -1561,6 +1561,9 @@ pub(crate) fn runnable(
let target = spec.target.clone();
+ let override_command =
+ CargoTargetSpec::override_command(snap, Some(spec.clone()), &runnable.kind);
+
let (cargo_args, executable_args) = CargoTargetSpec::runnable_args(
snap,
Some(spec.clone()),
@@ -1576,23 +1579,41 @@ pub(crate) fn runnable(
let label = runnable.label(Some(&target));
let location = location_link(snap, None, runnable.nav)?;
- Ok(Some(lsp_ext::Runnable {
- label,
- location: Some(location),
- kind: lsp_ext::RunnableKind::Cargo,
- args: lsp_ext::RunnableArgs::Cargo(lsp_ext::CargoRunnableArgs {
- workspace_root: Some(workspace_root.into()),
- override_cargo: config.override_cargo,
- cargo_args,
- cwd: cwd.into(),
- executable_args,
- environment: spec
- .sysroot_root
- .map(|root| ("RUSTC_TOOLCHAIN".to_owned(), root.to_string()))
- .into_iter()
- .collect(),
+ let environment = spec
+ .sysroot_root
+ .map(|root| ("RUSTC_TOOLCHAIN".to_owned(), root.to_string()))
+ .into_iter()
+ .collect();
+
+ Ok(match override_command {
+ Some(override_command) => match override_command.split_first() {
+ Some((program, args)) => Some(lsp_ext::Runnable {
+ label,
+ location: Some(location),
+ kind: lsp_ext::RunnableKind::Shell,
+ args: lsp_ext::RunnableArgs::Shell(lsp_ext::ShellRunnableArgs {
+ environment,
+ cwd: cwd.into(),
+ program: program.to_string(),
+ args: args.to_vec(),
+ }),
+ }),
+ _ => None,
+ },
+ None => Some(lsp_ext::Runnable {
+ label,
+ location: Some(location),
+ kind: lsp_ext::RunnableKind::Cargo,
+ args: lsp_ext::RunnableArgs::Cargo(lsp_ext::CargoRunnableArgs {
+ workspace_root: Some(workspace_root.into()),
+ override_cargo: config.override_cargo,
+ cargo_args,
+ cwd: cwd.into(),
+ executable_args,
+ environment,
+ }),
}),
- }))
+ })
}
Some(TargetSpec::ProjectJson(spec)) => {
let label = runnable.label(Some(&spec.label));
diff --git a/crates/rust-analyzer/src/main_loop.rs b/crates/rust-analyzer/src/main_loop.rs
index 6e08b7bb88..dd0813c144 100644
--- a/crates/rust-analyzer/src/main_loop.rs
+++ b/crates/rust-analyzer/src/main_loop.rs
@@ -9,7 +9,7 @@ use std::{
};
use crossbeam_channel::{Receiver, never, select};
-use ide_db::base_db::{SourceDatabase, VfsPath, salsa::Database as _};
+use ide_db::base_db::{SourceDatabase, VfsPath};
use lsp_server::{Connection, Notification, Request};
use lsp_types::{TextDocumentIdentifier, notification::Notification as _};
use stdx::thread::ThreadIntent;
@@ -383,7 +383,7 @@ impl GlobalState {
));
}
PrimeCachesProgress::End { cancelled } => {
- self.analysis_host.raw_database_mut().trigger_lru_eviction();
+ self.analysis_host.trigger_garbage_collection();
self.prime_caches_queue.op_completed(());
if cancelled {
self.prime_caches_queue
@@ -535,6 +535,16 @@ impl GlobalState {
if project_or_mem_docs_changed && self.config.test_explorer() {
self.update_tests();
}
+
+ let current_revision = self.analysis_host.raw_database().nonce_and_revision().1;
+ // no work is currently being done, now we can block a bit and clean up our garbage
+ if self.task_pool.handle.is_empty()
+ && self.fmt_pool.handle.is_empty()
+ && current_revision != self.last_gc_revision
+ {
+ self.analysis_host.trigger_garbage_collection();
+ self.last_gc_revision = current_revision;
+ }
}
self.cleanup_discover_handles();
@@ -907,7 +917,8 @@ impl GlobalState {
// Not a lot of bad can happen from mistakenly identifying `minicore`, so proceed with that.
self.minicore.minicore_text = contents
.as_ref()
- .and_then(|contents| String::from_utf8(contents.clone()).ok());
+ .and_then(|contents| str::from_utf8(contents).ok())
+ .map(triomphe::Arc::from);
}
let path = VfsPath::from(path);
diff --git a/crates/rust-analyzer/src/reload.rs b/crates/rust-analyzer/src/reload.rs
index 317c112365..e3a5ee2219 100644
--- a/crates/rust-analyzer/src/reload.rs
+++ b/crates/rust-analyzer/src/reload.rs
@@ -13,7 +13,7 @@
//! project is currently loading and we don't have a full project model, we
//! still want to respond to various requests.
// FIXME: This is a mess that needs some untangling work
-use std::{iter, mem};
+use std::{iter, mem, sync::atomic::AtomicUsize};
use hir::{ChangeWithProcMacros, ProcMacrosBuilder, db::DefDatabase};
use ide_db::{
@@ -866,12 +866,13 @@ impl GlobalState {
let invocation_strategy = config.invocation_strategy();
let next_gen =
self.flycheck.iter().map(FlycheckHandle::generation).max().unwrap_or_default() + 1;
+ let generation = Arc::new(AtomicUsize::new(next_gen));
self.flycheck = match invocation_strategy {
crate::flycheck::InvocationStrategy::Once => {
vec![FlycheckHandle::spawn(
0,
- next_gen,
+ generation.clone(),
sender.clone(),
config,
None,
@@ -915,7 +916,7 @@ impl GlobalState {
.map(|(id, (root, manifest_path, target_dir), sysroot_root)| {
FlycheckHandle::spawn(
id,
- next_gen,
+ generation.clone(),
sender.clone(),
config.clone(),
sysroot_root,
diff --git a/crates/rust-analyzer/src/target_spec.rs b/crates/rust-analyzer/src/target_spec.rs
index e532d15553..e0f95a7830 100644
--- a/crates/rust-analyzer/src/target_spec.rs
+++ b/crates/rust-analyzer/src/target_spec.rs
@@ -123,7 +123,7 @@ impl CargoTargetSpec {
match kind {
RunnableKind::Test { test_id, attr } => {
- cargo_args.push("test".to_owned());
+ cargo_args.push(config.test_command);
executable_args.push(test_id.to_string());
if let TestId::Path(_) = test_id {
executable_args.push("--exact".to_owned());
@@ -134,12 +134,12 @@ impl CargoTargetSpec {
}
}
RunnableKind::TestMod { path } => {
- cargo_args.push("test".to_owned());
+ cargo_args.push(config.test_command);
executable_args.push(path.clone());
executable_args.extend(extra_test_binary_args);
}
RunnableKind::Bench { test_id } => {
- cargo_args.push("bench".to_owned());
+ cargo_args.push(config.bench_command);
executable_args.push(test_id.to_string());
if let TestId::Path(_) = test_id {
executable_args.push("--exact".to_owned());
@@ -154,10 +154,12 @@ impl CargoTargetSpec {
}
RunnableKind::Bin => {
let subcommand = match spec {
- Some(CargoTargetSpec { target_kind: TargetKind::Test, .. }) => "test",
- _ => "run",
+ Some(CargoTargetSpec { target_kind: TargetKind::Test, .. }) => {
+ config.test_command
+ }
+ _ => "run".to_owned(),
};
- cargo_args.push(subcommand.to_owned());
+ cargo_args.push(subcommand);
}
}
@@ -206,6 +208,53 @@ impl CargoTargetSpec {
(cargo_args, executable_args)
}
+ pub(crate) fn override_command(
+ snap: &GlobalStateSnapshot,
+ spec: Option<CargoTargetSpec>,
+ kind: &RunnableKind,
+ ) -> Option<Vec<String>> {
+ let config = snap.config.runnables(None);
+ let (args, test_name) = match kind {
+ RunnableKind::Test { test_id, .. } => {
+ (config.test_override_command, Some(test_id.to_string()))
+ }
+ RunnableKind::TestMod { path } => (config.test_override_command, Some(path.clone())),
+ RunnableKind::Bench { test_id } => {
+ (config.bench_override_command, Some(test_id.to_string()))
+ }
+ RunnableKind::DocTest { test_id } => {
+ (config.doc_test_override_command, Some(test_id.to_string()))
+ }
+ RunnableKind::Bin => match spec {
+ Some(CargoTargetSpec { target_kind: TargetKind::Test, .. }) => {
+ (config.test_override_command, None)
+ }
+ _ => (None, None),
+ },
+ };
+ let test_name = test_name.unwrap_or_default();
+
+ let target_arg = |kind| match kind {
+ TargetKind::Bin => "--bin",
+ TargetKind::Test => "--test",
+ TargetKind::Bench => "--bench",
+ TargetKind::Example => "--example",
+ TargetKind::Lib { .. } => "--lib",
+ TargetKind::BuildScript | TargetKind::Other => "",
+ };
+
+ let replace_placeholders = |arg: String| match &spec {
+ Some(spec) => arg
+ .replace("${package}", &spec.package)
+ .replace("${target_arg}", target_arg(spec.target_kind))
+ .replace("${target}", &spec.target)
+ .replace("${test_name}", &test_name),
+ _ => arg,
+ };
+
+ args.map(|args| args.into_iter().map(replace_placeholders).collect())
+ }
+
pub(crate) fn push_to(self, buf: &mut Vec<String>, kind: &RunnableKind) {
buf.push("--package".to_owned());
buf.push(self.package);
diff --git a/crates/rust-analyzer/src/task_pool.rs b/crates/rust-analyzer/src/task_pool.rs
index ef0feb1796..8b8876b801 100644
--- a/crates/rust-analyzer/src/task_pool.rs
+++ b/crates/rust-analyzer/src/task_pool.rs
@@ -43,6 +43,10 @@ impl<T> TaskPool<T> {
pub(crate) fn len(&self) -> usize {
self.pool.len()
}
+
+ pub(crate) fn is_empty(&self) -> bool {
+ self.pool.is_empty()
+ }
}
/// `DeferredTaskQueue` holds deferred tasks.
diff --git a/crates/span/src/ast_id.rs b/crates/span/src/ast_id.rs
index e54e0bd2fc..599b3c7175 100644
--- a/crates/span/src/ast_id.rs
+++ b/crates/span/src/ast_id.rs
@@ -603,8 +603,9 @@ impl AstIdMap {
// After all, the block will then contain the *outer* item, so we allocate
// an ID for it anyway.
let mut blocks = Vec::new();
- let mut curr_layer = vec![(node.clone(), None)];
- let mut next_layer = vec![];
+ let mut curr_layer = Vec::with_capacity(32);
+ curr_layer.push((node.clone(), None));
+ let mut next_layer = Vec::with_capacity(32);
while !curr_layer.is_empty() {
curr_layer.drain(..).for_each(|(node, parent_idx)| {
let mut preorder = node.preorder();
@@ -776,6 +777,48 @@ impl AstIdMap {
}
}
+#[cfg(not(no_salsa_async_drops))]
+impl Drop for AstIdMap {
+ fn drop(&mut self) {
+ let arena = std::mem::take(&mut self.arena);
+ let ptr_map = std::mem::take(&mut self.ptr_map);
+ let id_map = std::mem::take(&mut self.id_map);
+ static AST_ID_MAP_DROP_THREAD: std::sync::OnceLock<
+ std::sync::mpsc::Sender<(
+ Arena<(SyntaxNodePtr, ErasedFileAstId)>,
+ hashbrown::HashTable<ArenaId>,
+ hashbrown::HashTable<ArenaId>,
+ )>,
+ > = std::sync::OnceLock::new();
+ AST_ID_MAP_DROP_THREAD
+ .get_or_init(|| {
+ let (sender, receiver) = std::sync::mpsc::channel::<(
+ Arena<(SyntaxNodePtr, ErasedFileAstId)>,
+ hashbrown::HashTable<ArenaId>,
+ hashbrown::HashTable<ArenaId>,
+ )>();
+ std::thread::Builder::new()
+ .name("AstIdMapDropper".to_owned())
+ .spawn(move || {
+ loop {
+ // block on a receive
+ _ = receiver.recv();
+ // then drain the entire channel
+ while receiver.try_recv().is_ok() {}
+ // and sleep for a bit
+ std::thread::sleep(std::time::Duration::from_millis(100));
+ }
+ // why do this over just a `receiver.iter().for_each(drop)`? To reduce contention on the channel lock.
+ // otherwise this thread will constantly wake up and sleep again.
+ })
+ .unwrap();
+ sender
+ })
+ .send((arena, ptr_map, id_map))
+ .unwrap();
+ }
+}
+
#[inline]
fn hash_ptr(ptr: &SyntaxNodePtr) -> u64 {
FxBuildHasher.hash_one(ptr)
diff --git a/crates/span/src/hygiene.rs b/crates/span/src/hygiene.rs
index 6805417177..fdfa94dfee 100644
--- a/crates/span/src/hygiene.rs
+++ b/crates/span/src/hygiene.rs
@@ -19,9 +19,8 @@
//! # The Call-site Hierarchy
//!
//! `ExpnData::call_site` in rustc, [`MacroCallLoc::call_site`] in rust-analyzer.
-use std::fmt;
-
use crate::Edition;
+use std::fmt;
/// A syntax context describes a hierarchy tracking order of macro definitions.
#[cfg(feature = "salsa")]
@@ -47,7 +46,7 @@ const _: () = {
edition: Edition,
parent: SyntaxContext,
opaque: SyntaxContext,
- opaque_and_semitransparent: SyntaxContext,
+ opaque_and_semiopaque: SyntaxContext,
}
impl PartialEq for SyntaxContextData {
@@ -215,7 +214,7 @@ const _: () = {
edition: T2,
parent: T3,
opaque: impl FnOnce(SyntaxContext) -> SyntaxContext,
- opaque_and_semitransparent: impl FnOnce(SyntaxContext) -> SyntaxContext,
+ opaque_and_semiopaque: impl FnOnce(SyntaxContext) -> SyntaxContext,
) -> Self
where
Db: ?Sized + salsa::Database,
@@ -242,7 +241,7 @@ const _: () = {
edition: zalsa_::interned::Lookup::into_owned(data.2),
parent: zalsa_::interned::Lookup::into_owned(data.3),
opaque: opaque(zalsa_::FromId::from_id(id)),
- opaque_and_semitransparent: opaque_and_semitransparent(
+ opaque_and_semiopaque: opaque_and_semiopaque(
zalsa_::FromId::from_id(id),
),
},
@@ -302,7 +301,7 @@ const _: () = {
}
}
- /// This context, but with all transparent and semi-transparent expansions filtered away.
+ /// This context, but with all transparent and semi-opaque expansions filtered away.
pub fn opaque<Db>(self, db: &'db Db) -> SyntaxContext
where
Db: ?Sized + zalsa_::Database,
@@ -318,7 +317,7 @@ const _: () = {
}
/// This context, but with all transparent expansions filtered away.
- pub fn opaque_and_semitransparent<Db>(self, db: &'db Db) -> SyntaxContext
+ pub fn opaque_and_semiopaque<Db>(self, db: &'db Db) -> SyntaxContext
where
Db: ?Sized + zalsa_::Database,
{
@@ -326,7 +325,7 @@ const _: () = {
Some(id) => {
let zalsa = db.zalsa();
let fields = SyntaxContext::ingredient(zalsa).data(zalsa, id);
- fields.opaque_and_semitransparent
+ fields.opaque_and_semiopaque
}
None => self,
}
@@ -406,7 +405,7 @@ impl<'db> SyntaxContext {
#[inline]
pub fn normalize_to_macro_rules(self, db: &'db dyn salsa::Database) -> SyntaxContext {
- self.opaque_and_semitransparent(db)
+ self.opaque_and_semiopaque(db)
}
pub fn is_opaque(self, db: &'db dyn salsa::Database) -> bool {
@@ -477,13 +476,13 @@ pub enum Transparency {
/// Identifier produced by a transparent expansion is always resolved at call-site.
/// Call-site spans in procedural macros, hygiene opt-out in `macro` should use this.
Transparent,
- /// Identifier produced by a semi-transparent expansion may be resolved
+ /// Identifier produced by a semi-opaque expansion may be resolved
/// either at call-site or at definition-site.
/// If it's a local variable, label or `$crate` then it's resolved at def-site.
/// Otherwise it's resolved at call-site.
/// `macro_rules` macros behave like this, built-in macros currently behave like this too,
/// but that's an implementation detail.
- SemiTransparent,
+ SemiOpaque,
/// Identifier produced by an opaque expansion is always resolved at definition-site.
/// Def-site spans in procedural macros, identifiers from `macro` by default use this.
Opaque,
diff --git a/crates/span/src/lib.rs b/crates/span/src/lib.rs
index 1a8aaeb715..bfe7b2620d 100644
--- a/crates/span/src/lib.rs
+++ b/crates/span/src/lib.rs
@@ -24,8 +24,6 @@ pub use syntax::Edition;
pub use text_size::{TextRange, TextSize};
pub use vfs::FileId;
-pub type Span = SpanData<SyntaxContext>;
-
impl Span {
pub fn cover(self, other: Span) -> Span {
if self.anchor != other.anchor {
@@ -61,13 +59,17 @@ impl Span {
}
Some(Span { range: self.range.cover(other.range), anchor: other.anchor, ctx: other.ctx })
}
+
+ pub fn eq_ignoring_ctx(self, other: Self) -> bool {
+ self.anchor == other.anchor && self.range == other.range
+ }
}
/// Spans represent a region of code, used by the IDE to be able link macro inputs and outputs
/// together. Positions in spans are relative to some [`SpanAnchor`] to make them more incremental
/// friendly.
#[derive(Clone, Copy, PartialEq, Eq, Hash)]
-pub struct SpanData<Ctx> {
+pub struct Span {
/// The text range of this span, relative to the anchor.
/// We need the anchor for incrementality, as storing absolute ranges will require
/// recomputation on every change in a file at all times.
@@ -75,10 +77,10 @@ pub struct SpanData<Ctx> {
/// The anchor this span is relative to.
pub anchor: SpanAnchor,
/// The syntax context of the span.
- pub ctx: Ctx,
+ pub ctx: SyntaxContext,
}
-impl<Ctx: fmt::Debug> fmt::Debug for SpanData<Ctx> {
+impl fmt::Debug for Span {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
if f.alternate() {
fmt::Debug::fmt(&self.anchor.file_id.file_id().index(), f)?;
@@ -98,12 +100,6 @@ impl<Ctx: fmt::Debug> fmt::Debug for SpanData<Ctx> {
}
}
-impl<Ctx: Copy> SpanData<Ctx> {
- pub fn eq_ignoring_ctx(self, other: Self) -> bool {
- self.anchor == other.anchor && self.range == other.range
- }
-}
-
impl fmt::Display for Span {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
fmt::Debug::fmt(&self.anchor.file_id.file_id().index(), f)?;
diff --git a/crates/span/src/map.rs b/crates/span/src/map.rs
index d14c497474..dc7d471aa0 100644
--- a/crates/span/src/map.rs
+++ b/crates/span/src/map.rs
@@ -6,24 +6,21 @@ use std::{fmt, hash::Hash};
use stdx::{always, itertools::Itertools};
use crate::{
- EditionedFileId, ErasedFileAstId, ROOT_ERASED_FILE_AST_ID, Span, SpanAnchor, SpanData,
- SyntaxContext, TextRange, TextSize,
+ EditionedFileId, ErasedFileAstId, ROOT_ERASED_FILE_AST_ID, Span, SpanAnchor, SyntaxContext,
+ TextRange, TextSize,
};
/// Maps absolute text ranges for the corresponding file to the relevant span data.
#[derive(Debug, PartialEq, Eq, Clone, Hash)]
-pub struct SpanMap<S> {
+pub struct SpanMap {
/// The offset stored here is the *end* of the node.
- spans: Vec<(TextSize, SpanData<S>)>,
+ spans: Vec<(TextSize, Span)>,
/// Index of the matched macro arm on successful expansion for declarative macros.
// FIXME: Does it make sense to have this here?
pub matched_arm: Option<u32>,
}
-impl<S> SpanMap<S>
-where
- SpanData<S>: Copy,
-{
+impl SpanMap {
/// Creates a new empty [`SpanMap`].
pub fn empty() -> Self {
Self { spans: Vec::new(), matched_arm: None }
@@ -40,7 +37,7 @@ where
}
/// Pushes a new span onto the [`SpanMap`].
- pub fn push(&mut self, offset: TextSize, span: SpanData<S>) {
+ pub fn push(&mut self, offset: TextSize, span: Span) {
if cfg!(debug_assertions)
&& let Some(&(last_offset, _)) = self.spans.last()
{
@@ -57,11 +54,8 @@ where
/// Note this does a linear search through the entire backing vector.
pub fn ranges_with_span_exact(
&self,
- span: SpanData<S>,
- ) -> impl Iterator<Item = (TextRange, S)> + '_
- where
- S: Copy,
- {
+ span: Span,
+ ) -> impl Iterator<Item = (TextRange, SyntaxContext)> + '_ {
self.spans.iter().enumerate().filter_map(move |(idx, &(end, s))| {
if !s.eq_ignoring_ctx(span) {
return None;
@@ -74,10 +68,10 @@ where
/// Returns all [`TextRange`]s whose spans contain the given span.
///
/// Note this does a linear search through the entire backing vector.
- pub fn ranges_with_span(&self, span: SpanData<S>) -> impl Iterator<Item = (TextRange, S)> + '_
- where
- S: Copy,
- {
+ pub fn ranges_with_span(
+ &self,
+ span: Span,
+ ) -> impl Iterator<Item = (TextRange, SyntaxContext)> + '_ {
self.spans.iter().enumerate().filter_map(move |(idx, &(end, s))| {
if s.anchor != span.anchor {
return None;
@@ -91,28 +85,28 @@ where
}
/// Returns the span at the given position.
- pub fn span_at(&self, offset: TextSize) -> SpanData<S> {
+ pub fn span_at(&self, offset: TextSize) -> Span {
let entry = self.spans.partition_point(|&(it, _)| it <= offset);
self.spans[entry].1
}
/// Returns the spans associated with the given range.
/// In other words, this will return all spans that correspond to all offsets within the given range.
- pub fn spans_for_range(&self, range: TextRange) -> impl Iterator<Item = SpanData<S>> + '_ {
+ pub fn spans_for_range(&self, range: TextRange) -> impl Iterator<Item = Span> + '_ {
let (start, end) = (range.start(), range.end());
let start_entry = self.spans.partition_point(|&(it, _)| it <= start);
let end_entry = self.spans[start_entry..].partition_point(|&(it, _)| it <= end); // FIXME: this might be wrong?
self.spans[start_entry..][..end_entry].iter().map(|&(_, s)| s)
}
- pub fn iter(&self) -> impl Iterator<Item = (TextSize, SpanData<S>)> + '_ {
+ pub fn iter(&self) -> impl Iterator<Item = (TextSize, Span)> + '_ {
self.spans.iter().copied()
}
/// Merges this span map with another span map, where `other` is inserted at (and replaces) `other_range`.
///
/// The length of the replacement node needs to be `other_size`.
- pub fn merge(&mut self, other_range: TextRange, other_size: TextSize, other: &SpanMap<S>) {
+ pub fn merge(&mut self, other_range: TextRange, other_size: TextSize, other: &SpanMap) {
// I find the following diagram helpful to illustrate the bounds and why we use `<` or `<=`:
// --------------------------------------------------------------------
// 1 3 5 6 7 10 11 <-- offsets we store
@@ -157,39 +151,34 @@ where
}
#[cfg(not(no_salsa_async_drops))]
-impl<S> Drop for SpanMap<S> {
+impl Drop for SpanMap {
fn drop(&mut self) {
- struct SendPtr(*mut [()]);
- unsafe impl Send for SendPtr {}
+ let spans = std::mem::take(&mut self.spans);
static SPAN_MAP_DROP_THREAD: std::sync::OnceLock<
- std::sync::mpsc::Sender<(SendPtr, fn(SendPtr))>,
+ std::sync::mpsc::Sender<Vec<(TextSize, Span)>>,
> = std::sync::OnceLock::new();
+
SPAN_MAP_DROP_THREAD
.get_or_init(|| {
- let (sender, receiver) = std::sync::mpsc::channel::<(SendPtr, fn(SendPtr))>();
+ let (sender, receiver) = std::sync::mpsc::channel::<Vec<(TextSize, Span)>>();
std::thread::Builder::new()
.name("SpanMapDropper".to_owned())
- .spawn(move || receiver.iter().for_each(|(b, drop)| drop(b)))
+ .spawn(move || {
+ loop {
+ // block on a receive
+ _ = receiver.recv();
+ // then drain the entire channel
+ while receiver.try_recv().is_ok() {}
+ // and sleep for a bit
+ std::thread::sleep(std::time::Duration::from_millis(100));
+ }
+ // why do this over just a `receiver.iter().for_each(drop)`? To reduce contention on the channel lock.
+ // otherwise this thread will constantly wake up and sleep again.
+ })
.unwrap();
sender
})
- .send((
- unsafe {
- SendPtr(std::mem::transmute::<*mut [(TextSize, SpanData<S>)], *mut [()]>(
- Box::<[(TextSize, SpanData<S>)]>::into_raw(
- std::mem::take(&mut self.spans).into_boxed_slice(),
- ),
- ))
- },
- |b: SendPtr| {
- _ = unsafe {
- Box::from_raw(std::mem::transmute::<
- *mut [()],
- *mut [(TextSize, SpanData<S>)],
- >(b.0))
- }
- },
- ))
+ .send(spans)
.unwrap();
}
}
diff --git a/crates/stdx/src/lib.rs b/crates/stdx/src/lib.rs
index 5fa0074163..7ab26b1890 100644
--- a/crates/stdx/src/lib.rs
+++ b/crates/stdx/src/lib.rs
@@ -76,6 +76,20 @@ impl<T, U, V> TupleExt for (T, U, V) {
}
}
+impl<T> TupleExt for &T
+where
+ T: TupleExt + Copy,
+{
+ type Head = T::Head;
+ type Tail = T::Tail;
+ fn head(self) -> Self::Head {
+ (*self).head()
+ }
+ fn tail(self) -> Self::Tail {
+ (*self).tail()
+ }
+}
+
pub fn to_lower_snake_case(s: &str) -> String {
to_snake_case(s, char::to_lowercase)
}
diff --git a/crates/stdx/src/thread/pool.rs b/crates/stdx/src/thread/pool.rs
index 8d76c5fd1f..918b88d960 100644
--- a/crates/stdx/src/thread/pool.rs
+++ b/crates/stdx/src/thread/pool.rs
@@ -66,7 +66,6 @@ impl Pool {
job.requested_intent.apply_to_current_thread();
current_intent = job.requested_intent;
}
- extant_tasks.fetch_add(1, Ordering::SeqCst);
// discard the panic, we should've logged the backtrace already
drop(panic::catch_unwind(job.f));
extant_tasks.fetch_sub(1, Ordering::SeqCst);
@@ -93,6 +92,7 @@ impl Pool {
});
let job = Job { requested_intent: intent, f };
+ self.extant_tasks.fetch_add(1, Ordering::SeqCst);
self.job_sender.send(job).unwrap();
}
@@ -147,6 +147,7 @@ impl<'scope> Scope<'_, 'scope> {
>(f)
},
};
+ self.pool.extant_tasks.fetch_add(1, Ordering::SeqCst);
self.pool.job_sender.send(job).unwrap();
}
}
diff --git a/crates/syntax-bridge/src/lib.rs b/crates/syntax-bridge/src/lib.rs
index 815b4f2799..0dcf18a4ad 100644
--- a/crates/syntax-bridge/src/lib.rs
+++ b/crates/syntax-bridge/src/lib.rs
@@ -9,7 +9,7 @@ use std::{collections::VecDeque, fmt, hash::Hash};
use intern::Symbol;
use rustc_hash::{FxHashMap, FxHashSet};
-use span::{Edition, SpanAnchor, SpanData, SpanMap};
+use span::{Edition, Span, SpanAnchor, SpanMap, SyntaxContext};
use stdx::{format_to, never};
use syntax::{
AstToken, Parse, PreorderWithTokens, SmolStr, SyntaxElement,
@@ -29,21 +29,18 @@ pub use ::parser::TopEntryPoint;
#[cfg(test)]
mod tests;
-pub trait SpanMapper<S> {
- fn span_for(&self, range: TextRange) -> S;
+pub trait SpanMapper {
+ fn span_for(&self, range: TextRange) -> Span;
}
-impl<S> SpanMapper<SpanData<S>> for SpanMap<S>
-where
- SpanData<S>: Copy,
-{
- fn span_for(&self, range: TextRange) -> SpanData<S> {
+impl SpanMapper for SpanMap {
+ fn span_for(&self, range: TextRange) -> Span {
self.span_at(range.start())
}
}
-impl<S: Copy, SM: SpanMapper<S>> SpanMapper<S> for &SM {
- fn span_for(&self, range: TextRange) -> S {
+impl<SM: SpanMapper> SpanMapper for &SM {
+ fn span_for(&self, range: TextRange) -> Span {
SM::span_for(self, range)
}
}
@@ -69,7 +66,7 @@ pub mod dummy_test_span_utils {
pub struct DummyTestSpanMap;
- impl SpanMapper<Span> for DummyTestSpanMap {
+ impl SpanMapper for DummyTestSpanMap {
fn span_for(&self, range: syntax::TextRange) -> Span {
Span {
range,
@@ -97,15 +94,14 @@ pub enum DocCommentDesugarMode {
/// Converts a syntax tree to a [`tt::Subtree`] using the provided span map to populate the
/// subtree's spans.
-pub fn syntax_node_to_token_tree<Ctx, SpanMap>(
+pub fn syntax_node_to_token_tree<SpanMap>(
node: &SyntaxNode,
map: SpanMap,
- span: SpanData<Ctx>,
+ span: Span,
mode: DocCommentDesugarMode,
-) -> tt::TopSubtree<SpanData<Ctx>>
+) -> tt::TopSubtree
where
- SpanData<Ctx>: Copy + fmt::Debug,
- SpanMap: SpanMapper<SpanData<Ctx>>,
+ SpanMap: SpanMapper,
{
let mut c =
Converter::new(node, map, Default::default(), Default::default(), span, mode, |_, _| {
@@ -117,22 +113,18 @@ where
/// Converts a syntax tree to a [`tt::Subtree`] using the provided span map to populate the
/// subtree's spans. Additionally using the append and remove parameters, the additional tokens can
/// be injected or hidden from the output.
-pub fn syntax_node_to_token_tree_modified<Ctx, SpanMap, OnEvent>(
+pub fn syntax_node_to_token_tree_modified<SpanMap, OnEvent>(
node: &SyntaxNode,
map: SpanMap,
- append: FxHashMap<SyntaxElement, Vec<tt::Leaf<SpanData<Ctx>>>>,
+ append: FxHashMap<SyntaxElement, Vec<tt::Leaf>>,
remove: FxHashSet<SyntaxElement>,
- call_site: SpanData<Ctx>,
+ call_site: Span,
mode: DocCommentDesugarMode,
on_enter: OnEvent,
-) -> tt::TopSubtree<SpanData<Ctx>>
+) -> tt::TopSubtree
where
- SpanMap: SpanMapper<SpanData<Ctx>>,
- SpanData<Ctx>: Copy + fmt::Debug,
- OnEvent: FnMut(
- &mut PreorderWithTokens,
- &WalkEvent<SyntaxElement>,
- ) -> (bool, Vec<tt::Leaf<SpanData<Ctx>>>),
+ SpanMap: SpanMapper,
+ OnEvent: FnMut(&mut PreorderWithTokens, &WalkEvent<SyntaxElement>) -> (bool, Vec<tt::Leaf>),
{
let mut c = Converter::new(node, map, append, remove, call_site, mode, on_enter);
convert_tokens(&mut c)
@@ -152,13 +144,13 @@ where
/// Converts a [`tt::Subtree`] back to a [`SyntaxNode`].
/// The produced `SpanMap` contains a mapping from the syntax nodes offsets to the subtree's spans.
-pub fn token_tree_to_syntax_node<Ctx>(
- tt: &tt::TopSubtree<SpanData<Ctx>>,
+pub fn token_tree_to_syntax_node(
+ tt: &tt::TopSubtree,
entry_point: parser::TopEntryPoint,
- span_to_edition: &mut dyn FnMut(Ctx) -> Edition,
-) -> (Parse<SyntaxNode>, SpanMap<Ctx>)
+ span_to_edition: &mut dyn FnMut(SyntaxContext) -> Edition,
+) -> (Parse<SyntaxNode>, SpanMap)
where
- Ctx: Copy + fmt::Debug + PartialEq + PartialEq + Eq + Hash,
+ SyntaxContext: Copy + fmt::Debug + PartialEq + PartialEq + Eq + Hash,
{
let buffer = tt.view().strip_invisible();
let parser_input = to_parser_input(buffer, span_to_edition);
@@ -183,16 +175,12 @@ where
/// Convert a string to a `TokenTree`. The spans of the subtree will be anchored to the provided
/// anchor with the given context.
-pub fn parse_to_token_tree<Ctx>(
+pub fn parse_to_token_tree(
edition: Edition,
anchor: SpanAnchor,
- ctx: Ctx,
+ ctx: SyntaxContext,
text: &str,
-) -> Option<tt::TopSubtree<SpanData<Ctx>>>
-where
- SpanData<Ctx>: Copy + fmt::Debug,
- Ctx: Copy,
-{
+) -> Option<tt::TopSubtree> {
let lexed = parser::LexedStr::new(edition, text);
if lexed.errors().next().is_some() {
return None;
@@ -203,14 +191,11 @@ where
}
/// Convert a string to a `TokenTree`. The passed span will be used for all spans of the produced subtree.
-pub fn parse_to_token_tree_static_span<S>(
+pub fn parse_to_token_tree_static_span(
edition: Edition,
- span: S,
+ span: Span,
text: &str,
-) -> Option<tt::TopSubtree<S>>
-where
- S: Copy + fmt::Debug,
-{
+) -> Option<tt::TopSubtree> {
let lexed = parser::LexedStr::new(edition, text);
if lexed.errors().next().is_some() {
return None;
@@ -220,10 +205,9 @@ where
Some(convert_tokens(&mut conv))
}
-fn convert_tokens<S, C>(conv: &mut C) -> tt::TopSubtree<S>
+fn convert_tokens<C>(conv: &mut C) -> tt::TopSubtree
where
- C: TokenConverter<S>,
- S: Copy + fmt::Debug,
+ C: TokenConverter,
C::Token: fmt::Debug,
{
let mut builder =
@@ -239,7 +223,7 @@ where
spacing: _,
})) => {
let found_expected_delimiter =
- builder.expected_delimiters().enumerate().find(|(_, delim)| match delim.kind {
+ builder.expected_delimiters().enumerate().find(|(_, delim)| match delim {
tt::DelimiterKind::Parenthesis => char == ')',
tt::DelimiterKind::Brace => char == '}',
tt::DelimiterKind::Bracket => char == ']',
@@ -273,13 +257,11 @@ where
}
kind if kind.is_punct() && kind != UNDERSCORE => {
let found_expected_delimiter =
- builder.expected_delimiters().enumerate().find(|(_, delim)| {
- match delim.kind {
- tt::DelimiterKind::Parenthesis => kind == T![')'],
- tt::DelimiterKind::Brace => kind == T!['}'],
- tt::DelimiterKind::Bracket => kind == T![']'],
- tt::DelimiterKind::Invisible => false,
- }
+ builder.expected_delimiters().enumerate().find(|(_, delim)| match delim {
+ tt::DelimiterKind::Parenthesis => kind == T![')'],
+ tt::DelimiterKind::Brace => kind == T!['}'],
+ tt::DelimiterKind::Bracket => kind == T![']'],
+ tt::DelimiterKind::Invisible => false,
});
// Current token is a closing delimiter that we expect, fix up the closing span
@@ -327,7 +309,7 @@ where
.into()
};
}
- let leaf: tt::Leaf<_> = match kind {
+ let leaf: tt::Leaf = match kind {
k if k.is_any_identifier() => {
let text = token.to_text(conv);
tt::Ident::new(&text, conv.span_for(abs_range)).into()
@@ -435,11 +417,11 @@ pub fn desugar_doc_comment_text(text: &str, mode: DocCommentDesugarMode) -> (Sym
}
}
-fn convert_doc_comment<S: Copy>(
+fn convert_doc_comment(
token: &syntax::SyntaxToken,
- span: S,
+ span: Span,
mode: DocCommentDesugarMode,
- builder: &mut tt::TopSubtreeBuilder<S>,
+ builder: &mut tt::TopSubtreeBuilder,
) {
let Some(comment) = ast::Comment::cast(token.clone()) else { return };
let Some(doc) = comment.kind().doc else { return };
@@ -460,7 +442,7 @@ fn convert_doc_comment<S: Copy>(
text = &text[0..text.len() - 2];
}
let (text, kind) = desugar_doc_comment_text(text, mode);
- let lit = tt::Literal { symbol: text, span, kind, suffix: None };
+ let lit = tt::Literal { text_and_suffix: text, span, kind, suffix_len: 0 };
tt::Leaf::from(lit)
};
@@ -479,92 +461,84 @@ fn convert_doc_comment<S: Copy>(
}
/// A raw token (straight from lexer) converter
-struct RawConverter<'a, Ctx> {
+struct RawConverter<'a> {
lexed: parser::LexedStr<'a>,
pos: usize,
anchor: SpanAnchor,
- ctx: Ctx,
+ ctx: SyntaxContext,
mode: DocCommentDesugarMode,
}
/// A raw token (straight from lexer) converter that gives every token the same span.
-struct StaticRawConverter<'a, S> {
+struct StaticRawConverter<'a> {
lexed: parser::LexedStr<'a>,
pos: usize,
- span: S,
+ span: Span,
mode: DocCommentDesugarMode,
}
-trait SrcToken<Ctx, S> {
+trait SrcToken<Ctx> {
fn kind(&self, ctx: &Ctx) -> SyntaxKind;
fn to_char(&self, ctx: &Ctx) -> Option<char>;
fn to_text(&self, ctx: &Ctx) -> SmolStr;
- fn as_leaf(&self) -> Option<&tt::Leaf<S>> {
+ fn as_leaf(&self) -> Option<&tt::Leaf> {
None
}
}
-trait TokenConverter<S>: Sized {
- type Token: SrcToken<Self, S>;
+trait TokenConverter: Sized {
+ type Token: SrcToken<Self>;
fn convert_doc_comment(
&self,
token: &Self::Token,
- span: S,
- builder: &mut tt::TopSubtreeBuilder<S>,
+ span: Span,
+ builder: &mut tt::TopSubtreeBuilder,
);
fn bump(&mut self) -> Option<(Self::Token, TextRange)>;
fn peek(&self) -> Option<Self::Token>;
- fn span_for(&self, range: TextRange) -> S;
+ fn span_for(&self, range: TextRange) -> Span;
- fn call_site(&self) -> S;
+ fn call_site(&self) -> Span;
}
-impl<S, Ctx> SrcToken<RawConverter<'_, Ctx>, S> for usize {
- fn kind(&self, ctx: &RawConverter<'_, Ctx>) -> SyntaxKind {
+impl SrcToken<RawConverter<'_>> for usize {
+ fn kind(&self, ctx: &RawConverter<'_>) -> SyntaxKind {
ctx.lexed.kind(*self)
}
- fn to_char(&self, ctx: &RawConverter<'_, Ctx>) -> Option<char> {
+ fn to_char(&self, ctx: &RawConverter<'_>) -> Option<char> {
ctx.lexed.text(*self).chars().next()
}
- fn to_text(&self, ctx: &RawConverter<'_, Ctx>) -> SmolStr {
+ fn to_text(&self, ctx: &RawConverter<'_>) -> SmolStr {
ctx.lexed.text(*self).into()
}
}
-impl<S: Copy> SrcToken<StaticRawConverter<'_, S>, S> for usize {
- fn kind(&self, ctx: &StaticRawConverter<'_, S>) -> SyntaxKind {
+impl SrcToken<StaticRawConverter<'_>> for usize {
+ fn kind(&self, ctx: &StaticRawConverter<'_>) -> SyntaxKind {
ctx.lexed.kind(*self)
}
- fn to_char(&self, ctx: &StaticRawConverter<'_, S>) -> Option<char> {
+ fn to_char(&self, ctx: &StaticRawConverter<'_>) -> Option<char> {
ctx.lexed.text(*self).chars().next()
}
- fn to_text(&self, ctx: &StaticRawConverter<'_, S>) -> SmolStr {
+ fn to_text(&self, ctx: &StaticRawConverter<'_>) -> SmolStr {
ctx.lexed.text(*self).into()
}
}
-impl<Ctx: Copy> TokenConverter<SpanData<Ctx>> for RawConverter<'_, Ctx>
-where
- SpanData<Ctx>: Copy,
-{
+impl TokenConverter for RawConverter<'_> {
type Token = usize;
- fn convert_doc_comment(
- &self,
- &token: &usize,
- span: SpanData<Ctx>,
- builder: &mut tt::TopSubtreeBuilder<SpanData<Ctx>>,
- ) {
+ fn convert_doc_comment(&self, &token: &usize, span: Span, builder: &mut tt::TopSubtreeBuilder) {
let text = self.lexed.text(token);
convert_doc_comment(&doc_comment(text), span, self.mode, builder);
}
@@ -588,22 +562,19 @@ where
Some(self.pos)
}
- fn span_for(&self, range: TextRange) -> SpanData<Ctx> {
- SpanData { range, anchor: self.anchor, ctx: self.ctx }
+ fn span_for(&self, range: TextRange) -> Span {
+ Span { range, anchor: self.anchor, ctx: self.ctx }
}
- fn call_site(&self) -> SpanData<Ctx> {
- SpanData { range: TextRange::empty(0.into()), anchor: self.anchor, ctx: self.ctx }
+ fn call_site(&self) -> Span {
+ Span { range: TextRange::empty(0.into()), anchor: self.anchor, ctx: self.ctx }
}
}
-impl<S> TokenConverter<S> for StaticRawConverter<'_, S>
-where
- S: Copy,
-{
+impl TokenConverter for StaticRawConverter<'_> {
type Token = usize;
- fn convert_doc_comment(&self, &token: &usize, span: S, builder: &mut tt::TopSubtreeBuilder<S>) {
+ fn convert_doc_comment(&self, &token: &usize, span: Span, builder: &mut tt::TopSubtreeBuilder) {
let text = self.lexed.text(token);
convert_doc_comment(&doc_comment(text), span, self.mode, builder);
}
@@ -627,40 +598,40 @@ where
Some(self.pos)
}
- fn span_for(&self, _: TextRange) -> S {
+ fn span_for(&self, _: TextRange) -> Span {
self.span
}
- fn call_site(&self) -> S {
+ fn call_site(&self) -> Span {
self.span
}
}
-struct Converter<SpanMap, S, OnEvent> {
+struct Converter<SpanMap, OnEvent> {
current: Option<SyntaxToken>,
- current_leaves: VecDeque<tt::Leaf<S>>,
+ current_leaves: VecDeque<tt::Leaf>,
preorder: PreorderWithTokens,
range: TextRange,
punct_offset: Option<(SyntaxToken, TextSize)>,
/// Used to make the emitted text ranges in the spans relative to the span anchor.
map: SpanMap,
- append: FxHashMap<SyntaxElement, Vec<tt::Leaf<S>>>,
+ append: FxHashMap<SyntaxElement, Vec<tt::Leaf>>,
remove: FxHashSet<SyntaxElement>,
- call_site: S,
+ call_site: Span,
mode: DocCommentDesugarMode,
on_event: OnEvent,
}
-impl<SpanMap, S, OnEvent> Converter<SpanMap, S, OnEvent>
+impl<SpanMap, OnEvent> Converter<SpanMap, OnEvent>
where
- OnEvent: FnMut(&mut PreorderWithTokens, &WalkEvent<SyntaxElement>) -> (bool, Vec<tt::Leaf<S>>),
+ OnEvent: FnMut(&mut PreorderWithTokens, &WalkEvent<SyntaxElement>) -> (bool, Vec<tt::Leaf>),
{
fn new(
node: &SyntaxNode,
map: SpanMap,
- append: FxHashMap<SyntaxElement, Vec<tt::Leaf<S>>>,
+ append: FxHashMap<SyntaxElement, Vec<tt::Leaf>>,
remove: FxHashSet<SyntaxElement>,
- call_site: S,
+ call_site: Span,
mode: DocCommentDesugarMode,
on_enter: OnEvent,
) -> Self {
@@ -720,13 +691,13 @@ where
}
#[derive(Debug)]
-enum SynToken<S> {
+enum SynToken {
Ordinary(SyntaxToken),
Punct { token: SyntaxToken, offset: usize },
- Leaf(tt::Leaf<S>),
+ Leaf(tt::Leaf),
}
-impl<S> SynToken<S> {
+impl SynToken {
fn token(&self) -> &SyntaxToken {
match self {
SynToken::Ordinary(it) | SynToken::Punct { token: it, offset: _ } => it,
@@ -735,8 +706,8 @@ impl<S> SynToken<S> {
}
}
-impl<SpanMap, S, OnEvent> SrcToken<Converter<SpanMap, S, OnEvent>, S> for SynToken<S> {
- fn kind(&self, _ctx: &Converter<SpanMap, S, OnEvent>) -> SyntaxKind {
+impl<SpanMap, OnEvent> SrcToken<Converter<SpanMap, OnEvent>> for SynToken {
+ fn kind(&self, _ctx: &Converter<SpanMap, OnEvent>) -> SyntaxKind {
match self {
SynToken::Ordinary(token) => token.kind(),
SynToken::Punct { token, offset: i } => {
@@ -748,14 +719,14 @@ impl<SpanMap, S, OnEvent> SrcToken<Converter<SpanMap, S, OnEvent>, S> for SynTok
}
}
}
- fn to_char(&self, _ctx: &Converter<SpanMap, S, OnEvent>) -> Option<char> {
+ fn to_char(&self, _ctx: &Converter<SpanMap, OnEvent>) -> Option<char> {
match self {
SynToken::Ordinary(_) => None,
SynToken::Punct { token: it, offset: i } => it.text().chars().nth(*i),
SynToken::Leaf(_) => None,
}
}
- fn to_text(&self, _ctx: &Converter<SpanMap, S, OnEvent>) -> SmolStr {
+ fn to_text(&self, _ctx: &Converter<SpanMap, OnEvent>) -> SmolStr {
match self {
SynToken::Ordinary(token) | SynToken::Punct { token, offset: _ } => token.text().into(),
SynToken::Leaf(_) => {
@@ -764,7 +735,7 @@ impl<SpanMap, S, OnEvent> SrcToken<Converter<SpanMap, S, OnEvent>, S> for SynTok
}
}
}
- fn as_leaf(&self) -> Option<&tt::Leaf<S>> {
+ fn as_leaf(&self) -> Option<&tt::Leaf> {
match self {
SynToken::Ordinary(_) | SynToken::Punct { .. } => None,
SynToken::Leaf(it) => Some(it),
@@ -772,18 +743,17 @@ impl<SpanMap, S, OnEvent> SrcToken<Converter<SpanMap, S, OnEvent>, S> for SynTok
}
}
-impl<S, SpanMap, OnEvent> TokenConverter<S> for Converter<SpanMap, S, OnEvent>
+impl<SpanMap, OnEvent> TokenConverter for Converter<SpanMap, OnEvent>
where
- S: Copy,
- SpanMap: SpanMapper<S>,
- OnEvent: FnMut(&mut PreorderWithTokens, &WalkEvent<SyntaxElement>) -> (bool, Vec<tt::Leaf<S>>),
+ SpanMap: SpanMapper,
+ OnEvent: FnMut(&mut PreorderWithTokens, &WalkEvent<SyntaxElement>) -> (bool, Vec<tt::Leaf>),
{
- type Token = SynToken<S>;
+ type Token = SynToken;
fn convert_doc_comment(
&self,
token: &Self::Token,
- span: S,
- builder: &mut tt::TopSubtreeBuilder<S>,
+ span: Span,
+ builder: &mut tt::TopSubtreeBuilder,
) {
convert_doc_comment(token.token(), span, self.mode, builder);
}
@@ -847,30 +817,24 @@ where
Some(token)
}
- fn span_for(&self, range: TextRange) -> S {
+ fn span_for(&self, range: TextRange) -> Span {
self.map.span_for(range)
}
- fn call_site(&self) -> S {
+ fn call_site(&self) -> Span {
self.call_site
}
}
-struct TtTreeSink<'a, Ctx>
-where
- SpanData<Ctx>: Copy,
-{
+struct TtTreeSink<'a> {
buf: String,
- cursor: Cursor<'a, SpanData<Ctx>>,
+ cursor: Cursor<'a>,
text_pos: TextSize,
inner: SyntaxTreeBuilder,
- token_map: SpanMap<Ctx>,
+ token_map: SpanMap,
}
-impl<'a, Ctx> TtTreeSink<'a, Ctx>
-where
- SpanData<Ctx>: Copy,
-{
- fn new(cursor: Cursor<'a, SpanData<Ctx>>) -> Self {
+impl<'a> TtTreeSink<'a> {
+ fn new(cursor: Cursor<'a>) -> Self {
TtTreeSink {
buf: String::new(),
cursor,
@@ -880,7 +844,7 @@ where
}
}
- fn finish(mut self) -> (Parse<SyntaxNode>, SpanMap<Ctx>) {
+ fn finish(mut self) -> (Parse<SyntaxNode>, SpanMap) {
self.token_map.finish();
(self.inner.finish(), self.token_map)
}
@@ -898,21 +862,15 @@ fn delim_to_str(d: tt::DelimiterKind, closing: bool) -> Option<&'static str> {
Some(&texts[idx..texts.len() - (1 - idx)])
}
-impl<Ctx> TtTreeSink<'_, Ctx>
-where
- SpanData<Ctx>: Copy + fmt::Debug,
- Ctx: PartialEq,
-{
+impl TtTreeSink<'_> {
/// Parses a float literal as if it was a one to two name ref nodes with a dot inbetween.
/// This occurs when a float literal is used as a field access.
fn float_split(&mut self, has_pseudo_dot: bool) {
- let (text, span) = match self.cursor.token_tree() {
- Some(tt::TokenTree::Leaf(tt::Leaf::Literal(tt::Literal {
- symbol: text,
- span,
- kind: tt::LitKind::Float,
- suffix: _,
- }))) => (text.as_str(), *span),
+ let token_tree = self.cursor.token_tree();
+ let (text, span) = match &token_tree {
+ Some(tt::TokenTree::Leaf(tt::Leaf::Literal(
+ lit @ tt::Literal { span, kind: tt::LitKind::Float, .. },
+ ))) => (lit.text(), *span),
tt => unreachable!("{tt:?}"),
};
// FIXME: Span splitting
@@ -971,9 +929,15 @@ where
self.buf.push_str("r#");
self.text_pos += TextSize::of("r#");
}
- let r = (ident.sym.as_str(), ident.span);
+ let text = ident.sym.as_str();
+ self.buf += text;
+ self.text_pos += TextSize::of(text);
+ combined_span = match combined_span {
+ None => Some(ident.span),
+ Some(prev_span) => Some(Self::merge_spans(prev_span, ident.span)),
+ };
self.cursor.bump();
- r
+ continue 'tokens;
}
tt::Leaf::Punct(punct) => {
assert!(punct.char.is_ascii());
@@ -1053,10 +1017,10 @@ where
self.inner.error(error, self.text_pos)
}
- fn merge_spans(a: SpanData<Ctx>, b: SpanData<Ctx>) -> SpanData<Ctx> {
+ fn merge_spans(a: Span, b: Span) -> Span {
// We don't do what rustc does exactly, rustc does something clever when the spans have different syntax contexts
// but this runs afoul of our separation between `span` and `hir-expand`.
- SpanData {
+ Span {
range: if a.ctx == b.ctx && a.anchor == b.anchor {
TextRange::new(
std::cmp::min(a.range.start(), b.range.start()),
diff --git a/crates/syntax-bridge/src/tests.rs b/crates/syntax-bridge/src/tests.rs
index c8dc3131b5..16f2498bf3 100644
--- a/crates/syntax-bridge/src/tests.rs
+++ b/crates/syntax-bridge/src/tests.rs
@@ -30,15 +30,15 @@ fn check_punct_spacing(fixture: &str) {
})
.collect();
- let mut cursor = Cursor::new(&subtree.0);
+ let mut cursor = Cursor::new(subtree.as_token_trees());
while !cursor.eof() {
while let Some(token_tree) = cursor.token_tree() {
if let tt::TokenTree::Leaf(Leaf::Punct(Punct {
spacing, span: Span { range, .. }, ..
})) = token_tree
- && let Some(expected) = annotations.remove(range)
+ && let Some(expected) = annotations.remove(&range)
{
- assert_eq!(expected, *spacing);
+ assert_eq!(expected, spacing);
}
cursor.bump();
}
diff --git a/crates/syntax-bridge/src/to_parser_input.rs b/crates/syntax-bridge/src/to_parser_input.rs
index 5922994c08..851a4af864 100644
--- a/crates/syntax-bridge/src/to_parser_input.rs
+++ b/crates/syntax-bridge/src/to_parser_input.rs
@@ -1,16 +1,13 @@
//! Convert macro-by-example tokens which are specific to macro expansion into a
//! format that works for our parser.
-use std::fmt;
-use std::hash::Hash;
-
use rustc_hash::FxHashMap;
-use span::{Edition, SpanData};
+use span::{Edition, SyntaxContext};
use syntax::{SyntaxKind, SyntaxKind::*, T};
-pub fn to_parser_input<Ctx: Copy + fmt::Debug + PartialEq + Eq + Hash>(
- buffer: tt::TokenTreesView<'_, SpanData<Ctx>>,
- span_to_edition: &mut dyn FnMut(Ctx) -> Edition,
+pub fn to_parser_input(
+ buffer: tt::TokenTreesView<'_>,
+ span_to_edition: &mut dyn FnMut(SyntaxContext) -> Edition,
) -> parser::Input {
let mut res = parser::Input::with_capacity(buffer.len());
@@ -55,7 +52,7 @@ pub fn to_parser_input<Ctx: Copy + fmt::Debug + PartialEq + Eq + Hash>(
};
res.push(kind, ctx_edition(lit.span.ctx));
- if kind == FLOAT_NUMBER && !lit.symbol.as_str().ends_with('.') {
+ if kind == FLOAT_NUMBER && !lit.text().ends_with('.') {
// Tag the token as joint if it is float with a fractional part
// we use this jointness to inform the parser about what token split
// event to emit when we encounter a float literal in a field access
diff --git a/crates/syntax/fuzz/Cargo.toml b/crates/syntax/fuzz/Cargo.toml
index 8910911ff0..b2f238efc0 100644
--- a/crates/syntax/fuzz/Cargo.toml
+++ b/crates/syntax/fuzz/Cargo.toml
@@ -10,7 +10,6 @@ cargo-fuzz = true
[dependencies]
syntax = { path = "..", version = "0.0.0" }
-text-edit = { path = "../../text-edit", version = "0.0.0" }
libfuzzer-sys = "0.4.5"
# Prevent this from interfering with workspaces
diff --git a/crates/syntax/rust.ungram b/crates/syntax/rust.ungram
index d73d60c51f..991fe7d83a 100644
--- a/crates/syntax/rust.ungram
+++ b/crates/syntax/rust.ungram
@@ -438,7 +438,10 @@ FormatArgsExpr =
')'
FormatArgsArg =
- (Name '=')? Expr
+ arg_name:FormatArgsArgName? Expr
+
+FormatArgsArgName =
+ '=' // This also has a name, but it's any token and we can't put it here
MacroExpr =
MacroCall
diff --git a/crates/syntax/src/ast/generated/nodes.rs b/crates/syntax/src/ast/generated/nodes.rs
index 6c1dcf336a..7b9f5b9166 100644
--- a/crates/syntax/src/ast/generated/nodes.rs
+++ b/crates/syntax/src/ast/generated/nodes.rs
@@ -639,10 +639,16 @@ impl ForType {
pub struct FormatArgsArg {
pub(crate) syntax: SyntaxNode,
}
-impl ast::HasName for FormatArgsArg {}
impl FormatArgsArg {
#[inline]
+ pub fn arg_name(&self) -> Option<FormatArgsArgName> { support::child(&self.syntax) }
+ #[inline]
pub fn expr(&self) -> Option<Expr> { support::child(&self.syntax) }
+}
+pub struct FormatArgsArgName {
+ pub(crate) syntax: SyntaxNode,
+}
+impl FormatArgsArgName {
#[inline]
pub fn eq_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![=]) }
}
@@ -3722,6 +3728,38 @@ impl fmt::Debug for FormatArgsArg {
f.debug_struct("FormatArgsArg").field("syntax", &self.syntax).finish()
}
}
+impl AstNode for FormatArgsArgName {
+ #[inline]
+ fn kind() -> SyntaxKind
+ where
+ Self: Sized,
+ {
+ FORMAT_ARGS_ARG_NAME
+ }
+ #[inline]
+ fn can_cast(kind: SyntaxKind) -> bool { kind == FORMAT_ARGS_ARG_NAME }
+ #[inline]
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) { Some(Self { syntax }) } else { None }
+ }
+ #[inline]
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl hash::Hash for FormatArgsArgName {
+ fn hash<H: hash::Hasher>(&self, state: &mut H) { self.syntax.hash(state); }
+}
+impl Eq for FormatArgsArgName {}
+impl PartialEq for FormatArgsArgName {
+ fn eq(&self, other: &Self) -> bool { self.syntax == other.syntax }
+}
+impl Clone for FormatArgsArgName {
+ fn clone(&self) -> Self { Self { syntax: self.syntax.clone() } }
+}
+impl fmt::Debug for FormatArgsArgName {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ f.debug_struct("FormatArgsArgName").field("syntax", &self.syntax).finish()
+ }
+}
impl AstNode for FormatArgsExpr {
#[inline]
fn kind() -> SyntaxKind
@@ -8947,7 +8985,6 @@ impl AstNode for AnyHasName {
| CONST_PARAM
| ENUM
| FN
- | FORMAT_ARGS_ARG
| IDENT_PAT
| MACRO_DEF
| MACRO_RULES
@@ -9006,10 +9043,6 @@ impl From<Fn> for AnyHasName {
#[inline]
fn from(node: Fn) -> AnyHasName { AnyHasName { syntax: node.syntax } }
}
-impl From<FormatArgsArg> for AnyHasName {
- #[inline]
- fn from(node: FormatArgsArg) -> AnyHasName { AnyHasName { syntax: node.syntax } }
-}
impl From<IdentPat> for AnyHasName {
#[inline]
fn from(node: IdentPat) -> AnyHasName { AnyHasName { syntax: node.syntax } }
@@ -9541,6 +9574,11 @@ impl std::fmt::Display for FormatArgsArg {
std::fmt::Display::fmt(self.syntax(), f)
}
}
+impl std::fmt::Display for FormatArgsArgName {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
impl std::fmt::Display for FormatArgsExpr {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
std::fmt::Display::fmt(self.syntax(), f)
diff --git a/crates/syntax/src/ast/node_ext.rs b/crates/syntax/src/ast/node_ext.rs
index 800dd5f4ac..76cfea9d5b 100644
--- a/crates/syntax/src/ast/node_ext.rs
+++ b/crates/syntax/src/ast/node_ext.rs
@@ -447,24 +447,23 @@ impl ast::UseTreeList {
impl ast::Impl {
pub fn self_ty(&self) -> Option<ast::Type> {
- match self.target() {
- (Some(t), None) | (_, Some(t)) => Some(t),
- _ => None,
- }
+ self.target().1
}
pub fn trait_(&self) -> Option<ast::Type> {
- match self.target() {
- (Some(t), Some(_)) => Some(t),
- _ => None,
- }
+ self.target().0
}
fn target(&self) -> (Option<ast::Type>, Option<ast::Type>) {
- let mut types = support::children(self.syntax());
- let first = types.next();
- let second = types.next();
- (first, second)
+ let mut types = support::children(self.syntax()).peekable();
+ let for_kw = self.for_token();
+ let trait_ = types.next_if(|trait_: &ast::Type| {
+ for_kw.is_some_and(|for_kw| {
+ trait_.syntax().text_range().start() < for_kw.text_range().start()
+ })
+ });
+ let self_ty = types.next();
+ (trait_, self_ty)
}
pub fn for_trait_name_ref(name_ref: &ast::NameRef) -> Option<ast::Impl> {
@@ -1118,6 +1117,15 @@ impl From<ast::AssocItem> for ast::AnyHasAttrs {
}
}
+impl ast::FormatArgsArgName {
+ /// This is not a [`ast::Name`], because the name may be a keyword.
+ pub fn name(&self) -> SyntaxToken {
+ let name = self.syntax.first_token().unwrap();
+ assert!(name.kind().is_any_identifier());
+ name
+ }
+}
+
impl ast::OrPat {
pub fn leading_pipe(&self) -> Option<SyntaxToken> {
self.syntax
diff --git a/crates/syntax/src/lib.rs b/crates/syntax/src/lib.rs
index 9e3083066c..c510b2831e 100644
--- a/crates/syntax/src/lib.rs
+++ b/crates/syntax/src/lib.rs
@@ -218,7 +218,18 @@ impl<T> Drop for Parse<T> {
let (sender, receiver) = std::sync::mpsc::channel::<GreenNode>();
std::thread::Builder::new()
.name("ParseNodeDropper".to_owned())
- .spawn(move || receiver.iter().for_each(drop))
+ .spawn(move || {
+ loop {
+ // block on a receive
+ _ = receiver.recv();
+ // then drain the entire channel
+ while receiver.try_recv().is_ok() {}
+ // and sleep for a bit
+ std::thread::sleep(std::time::Duration::from_millis(100));
+ }
+ // why do this over just a `receiver.iter().for_each(drop)`? To reduce contention on the channel lock.
+ // otherwise this thread will constantly wake up and sleep again.
+ })
.unwrap();
sender
})
diff --git a/crates/test-fixture/Cargo.toml b/crates/test-fixture/Cargo.toml
index 7760ae7aa0..74a4f83632 100644
--- a/crates/test-fixture/Cargo.toml
+++ b/crates/test-fixture/Cargo.toml
@@ -14,7 +14,6 @@ test-utils.workspace = true
tt.workspace = true
cfg.workspace = true
base-db.workspace = true
-rustc-hash.workspace = true
span.workspace = true
stdx.workspace = true
intern.workspace = true
diff --git a/crates/test-fixture/src/lib.rs b/crates/test-fixture/src/lib.rs
index 01e4215cfb..d81f27d7c3 100644
--- a/crates/test-fixture/src/lib.rs
+++ b/crates/test-fixture/src/lib.rs
@@ -738,6 +738,7 @@ struct IdentityProcMacroExpander;
impl ProcMacroExpander for IdentityProcMacroExpander {
fn expand(
&self,
+ _: &dyn ExpandDatabase,
subtree: &TopSubtree,
_: Option<&TopSubtree>,
_: &Env,
@@ -760,6 +761,7 @@ struct Issue18089ProcMacroExpander;
impl ProcMacroExpander for Issue18089ProcMacroExpander {
fn expand(
&self,
+ _: &dyn ExpandDatabase,
subtree: &TopSubtree,
_: Option<&TopSubtree>,
_: &Env,
@@ -768,7 +770,7 @@ impl ProcMacroExpander for Issue18089ProcMacroExpander {
_: Span,
_: String,
) -> Result<TopSubtree, ProcMacroExpansionError> {
- let tt::TokenTree::Leaf(macro_name) = &subtree.0[2] else {
+ let Some(tt::TtElement::Leaf(macro_name)) = subtree.iter().nth(1) else {
return Err(ProcMacroExpansionError::Panic("incorrect input".to_owned()));
};
Ok(quote! { call_site =>
@@ -795,6 +797,7 @@ struct AttributeInputReplaceProcMacroExpander;
impl ProcMacroExpander for AttributeInputReplaceProcMacroExpander {
fn expand(
&self,
+ _: &dyn ExpandDatabase,
_: &TopSubtree,
attrs: Option<&TopSubtree>,
_: &Env,
@@ -818,6 +821,7 @@ struct Issue18840ProcMacroExpander;
impl ProcMacroExpander for Issue18840ProcMacroExpander {
fn expand(
&self,
+ _: &dyn ExpandDatabase,
fn_: &TopSubtree,
_: Option<&TopSubtree>,
_: &Env,
@@ -833,13 +837,14 @@ impl ProcMacroExpander for Issue18840ProcMacroExpander {
// ```
// The span that was created by the fixup infra.
- let fixed_up_span = fn_.token_trees().flat_tokens()[5].first_span();
+ let mut iter = fn_.iter();
+ iter.nth(2);
+ let (_, mut fn_body) = iter.expect_subtree().unwrap();
+ let fixed_up_span = fn_body.nth(1).unwrap().first_span();
let mut result =
quote! {fixed_up_span => ::core::compile_error! { "my cool compile_error!" } };
// Make it so we won't remove the top subtree when reversing fixups.
- let top_subtree_delimiter_mut = result.top_subtree_delimiter_mut();
- top_subtree_delimiter_mut.open = def_site;
- top_subtree_delimiter_mut.close = def_site;
+ result.set_top_subtree_delimiter_span(tt::DelimSpan::from_single(def_site));
Ok(result)
}
@@ -853,6 +858,7 @@ struct MirrorProcMacroExpander;
impl ProcMacroExpander for MirrorProcMacroExpander {
fn expand(
&self,
+ _: &dyn ExpandDatabase,
input: &TopSubtree,
_: Option<&TopSubtree>,
_: &Env,
@@ -891,6 +897,7 @@ struct ShortenProcMacroExpander;
impl ProcMacroExpander for ShortenProcMacroExpander {
fn expand(
&self,
+ _: &dyn ExpandDatabase,
input: &TopSubtree,
_: Option<&TopSubtree>,
_: &Env,
@@ -899,20 +906,22 @@ impl ProcMacroExpander for ShortenProcMacroExpander {
_: Span,
_: String,
) -> Result<TopSubtree, ProcMacroExpansionError> {
- let mut result = input.0.clone();
- for it in &mut result {
- if let TokenTree::Leaf(leaf) = it {
- modify_leaf(leaf)
+ let mut result = input.clone();
+ for (idx, it) in input.as_token_trees().iter_flat_tokens().enumerate() {
+ if let TokenTree::Leaf(mut leaf) = it {
+ modify_leaf(&mut leaf);
+ result.set_token(idx, leaf);
}
}
- return Ok(tt::TopSubtree(result));
+ return Ok(result);
fn modify_leaf(leaf: &mut Leaf) {
match leaf {
Leaf::Literal(it) => {
// XXX Currently replaces any literals with an empty string, but supporting
// "shortening" other literals would be nice.
- it.symbol = Symbol::empty();
+ it.text_and_suffix = Symbol::empty();
+ it.suffix_len = 0;
}
Leaf::Punct(_) => {}
Leaf::Ident(it) => {
@@ -933,6 +942,7 @@ struct Issue17479ProcMacroExpander;
impl ProcMacroExpander for Issue17479ProcMacroExpander {
fn expand(
&self,
+ _: &dyn ExpandDatabase,
subtree: &TopSubtree,
_: Option<&TopSubtree>,
_: &Env,
@@ -941,10 +951,11 @@ impl ProcMacroExpander for Issue17479ProcMacroExpander {
_: Span,
_: String,
) -> Result<TopSubtree, ProcMacroExpansionError> {
- let TokenTree::Leaf(Leaf::Literal(lit)) = &subtree.0[1] else {
+ let mut iter = subtree.iter();
+ let Some(TtElement::Leaf(tt::Leaf::Literal(lit))) = iter.next() else {
return Err(ProcMacroExpansionError::Panic("incorrect Input".into()));
};
- let symbol = &lit.symbol;
+ let symbol = Symbol::intern(lit.text());
let span = lit.span;
Ok(quote! { span =>
#symbol()
@@ -962,6 +973,7 @@ struct Issue18898ProcMacroExpander;
impl ProcMacroExpander for Issue18898ProcMacroExpander {
fn expand(
&self,
+ _: &dyn ExpandDatabase,
subtree: &TopSubtree,
_: Option<&TopSubtree>,
_: &Env,
@@ -972,10 +984,8 @@ impl ProcMacroExpander for Issue18898ProcMacroExpander {
) -> Result<TopSubtree, ProcMacroExpansionError> {
let span = subtree
.token_trees()
- .flat_tokens()
- .last()
- .ok_or_else(|| ProcMacroExpansionError::Panic("malformed input".to_owned()))?
- .first_span();
+ .last_span()
+ .ok_or_else(|| ProcMacroExpansionError::Panic("malformed input".to_owned()))?;
let overly_long_subtree = quote! {span =>
{
let a = 5;
@@ -1017,6 +1027,7 @@ struct DisallowCfgProcMacroExpander;
impl ProcMacroExpander for DisallowCfgProcMacroExpander {
fn expand(
&self,
+ _: &dyn ExpandDatabase,
subtree: &TopSubtree,
_: Option<&TopSubtree>,
_: &Env,
@@ -1025,7 +1036,7 @@ impl ProcMacroExpander for DisallowCfgProcMacroExpander {
_: Span,
_: String,
) -> Result<TopSubtree, ProcMacroExpansionError> {
- for tt in subtree.token_trees().flat_tokens() {
+ for tt in subtree.token_trees().iter_flat_tokens() {
if let tt::TokenTree::Leaf(tt::Leaf::Ident(ident)) = tt
&& (ident.sym == sym::cfg || ident.sym == sym::cfg_attr)
{
@@ -1048,6 +1059,7 @@ struct GenerateSuffixedTypeProcMacroExpander;
impl ProcMacroExpander for GenerateSuffixedTypeProcMacroExpander {
fn expand(
&self,
+ _: &dyn ExpandDatabase,
subtree: &TopSubtree,
_attrs: Option<&TopSubtree>,
_env: &Env,
@@ -1056,20 +1068,23 @@ impl ProcMacroExpander for GenerateSuffixedTypeProcMacroExpander {
_mixed_site: Span,
_current_dir: String,
) -> Result<TopSubtree, ProcMacroExpansionError> {
- let TokenTree::Leaf(Leaf::Ident(ident)) = &subtree.0[1] else {
+ let mut iter = subtree.iter();
+ let Some(TtElement::Leaf(tt::Leaf::Ident(ident))) = iter.next() else {
return Err(ProcMacroExpansionError::Panic("incorrect Input".into()));
};
let ident = match ident.sym.as_str() {
"struct" => {
- let TokenTree::Leaf(Leaf::Ident(ident)) = &subtree.0[2] else {
+ let Some(TtElement::Leaf(tt::Leaf::Ident(ident))) = iter.next() else {
return Err(ProcMacroExpansionError::Panic("incorrect Input".into()));
};
ident
}
"enum" => {
- let TokenTree::Leaf(Leaf::Ident(ident)) = &subtree.0[4] else {
+ iter.next();
+ let (_, mut iter) = iter.expect_subtree().unwrap();
+ let Some(TtElement::Leaf(tt::Leaf::Ident(ident))) = iter.next() else {
return Err(ProcMacroExpansionError::Panic("incorrect Input".into()));
};
ident
diff --git a/crates/test-utils/src/minicore.rs b/crates/test-utils/src/minicore.rs
index b7c09391ec..c3429356d9 100644
--- a/crates/test-utils/src/minicore.rs
+++ b/crates/test-utils/src/minicore.rs
@@ -80,6 +80,7 @@
//! offset_of:
#![rustc_coherence_is_core]
+#![feature(lang_items)]
pub mod marker {
// region:sized
@@ -545,11 +546,11 @@ pub mod ptr {
// endregion:non_null
// region:addr_of
- #[rustc_macro_transparency = "semitransparent"]
+ #[rustc_macro_transparency = "semiopaque"]
pub macro addr_of($place:expr) {
&raw const $place
}
- #[rustc_macro_transparency = "semitransparent"]
+ #[rustc_macro_transparency = "semiopaque"]
pub macro addr_of_mut($place:expr) {
&raw mut $place
}
diff --git a/crates/tt/Cargo.toml b/crates/tt/Cargo.toml
index 3183b72a66..6cfb76400e 100644
--- a/crates/tt/Cargo.toml
+++ b/crates/tt/Cargo.toml
@@ -15,7 +15,10 @@ doctest = false
[dependencies]
arrayvec.workspace = true
text-size.workspace = true
+rustc-hash.workspace = true
+indexmap.workspace = true
+span = { path = "../span", version = "0.0", default-features = false }
stdx.workspace = true
intern.workspace = true
ra-ap-rustc_lexer.workspace = true
diff --git a/crates/tt/src/buffer.rs b/crates/tt/src/buffer.rs
index 02a722895a..78cf4b956d 100644
--- a/crates/tt/src/buffer.rs
+++ b/crates/tt/src/buffer.rs
@@ -1,16 +1,16 @@
//! Stateful iteration over token trees.
//!
//! We use this as the source of tokens for parser.
-use crate::{Leaf, Subtree, TokenTree, TokenTreesView};
+use crate::{Leaf, Subtree, TokenTree, TokenTreesView, dispatch_ref};
-pub struct Cursor<'a, Span> {
- buffer: &'a [TokenTree<Span>],
+pub struct Cursor<'a> {
+ buffer: TokenTreesView<'a>,
index: usize,
subtrees_stack: Vec<usize>,
}
-impl<'a, Span: Copy> Cursor<'a, Span> {
- pub fn new(buffer: &'a [TokenTree<Span>]) -> Self {
+impl<'a> Cursor<'a> {
+ pub fn new(buffer: TokenTreesView<'a>) -> Self {
Self { buffer, index: 0, subtrees_stack: Vec::new() }
}
@@ -23,16 +23,22 @@ impl<'a, Span: Copy> Cursor<'a, Span> {
self.subtrees_stack.is_empty()
}
- fn last_subtree(&self) -> Option<(usize, &'a Subtree<Span>)> {
+ fn at(&self, idx: usize) -> Option<TokenTree> {
+ dispatch_ref! {
+ match self.buffer.repr => tt => Some(tt.get(idx)?.to_api(self.buffer.span_parts))
+ }
+ }
+
+ fn last_subtree(&self) -> Option<(usize, Subtree)> {
self.subtrees_stack.last().map(|&subtree_idx| {
- let TokenTree::Subtree(subtree) = &self.buffer[subtree_idx] else {
+ let Some(TokenTree::Subtree(subtree)) = self.at(subtree_idx) else {
panic!("subtree pointing to non-subtree");
};
(subtree_idx, subtree)
})
}
- pub fn end(&mut self) -> &'a Subtree<Span> {
+ pub fn end(&mut self) -> Subtree {
let (last_subtree_idx, last_subtree) =
self.last_subtree().expect("called `Cursor::end()` without an open subtree");
// +1 because `Subtree.len` excludes the subtree itself.
@@ -46,14 +52,14 @@ impl<'a, Span: Copy> Cursor<'a, Span> {
}
/// Returns the `TokenTree` at the cursor if it is not at the end of a subtree.
- pub fn token_tree(&self) -> Option<&'a TokenTree<Span>> {
+ pub fn token_tree(&self) -> Option<TokenTree> {
if let Some((last_subtree_idx, last_subtree)) = self.last_subtree() {
// +1 because `Subtree.len` excludes the subtree itself.
if last_subtree_idx + last_subtree.usize_len() + 1 == self.index {
return None;
}
}
- self.buffer.get(self.index)
+ self.at(self.index)
}
/// Bump the cursor, and enters a subtree if it is on one.
@@ -66,7 +72,7 @@ impl<'a, Span: Copy> Cursor<'a, Span> {
"called `Cursor::bump()` when at the end of a subtree"
);
}
- if let TokenTree::Subtree(_) = self.buffer[self.index] {
+ if let Some(TokenTree::Subtree(_)) = self.at(self.index) {
self.subtrees_stack.push(self.index);
}
self.index += 1;
@@ -81,13 +87,13 @@ impl<'a, Span: Copy> Cursor<'a, Span> {
}
}
// +1 because `Subtree.len` excludes the subtree itself.
- if let TokenTree::Subtree(_) = self.buffer[self.index] {
+ if let Some(TokenTree::Subtree(_)) = self.at(self.index) {
self.subtrees_stack.push(self.index);
}
self.index += 1;
}
- pub fn peek_two_leaves(&self) -> Option<[&'a Leaf<Span>; 2]> {
+ pub fn peek_two_leaves(&self) -> Option<[Leaf; 2]> {
if let Some((last_subtree_idx, last_subtree)) = self.last_subtree() {
// +1 because `Subtree.len` excludes the subtree itself.
let last_end = last_subtree_idx + last_subtree.usize_len() + 1;
@@ -95,14 +101,17 @@ impl<'a, Span: Copy> Cursor<'a, Span> {
return None;
}
}
- self.buffer.get(self.index..self.index + 2).and_then(|it| match it {
- [TokenTree::Leaf(a), TokenTree::Leaf(b)] => Some([a, b]),
+ self.at(self.index).zip(self.at(self.index + 1)).and_then(|it| match it {
+ (TokenTree::Leaf(a), TokenTree::Leaf(b)) => Some([a, b]),
_ => None,
})
}
- pub fn crossed(&self) -> TokenTreesView<'a, Span> {
+ pub fn crossed(&self) -> TokenTreesView<'a> {
assert!(self.is_root());
- TokenTreesView::new(&self.buffer[..self.index])
+ TokenTreesView {
+ repr: self.buffer.repr.get(..self.index).unwrap(),
+ span_parts: self.buffer.span_parts,
+ }
}
}
diff --git a/crates/tt/src/iter.rs b/crates/tt/src/iter.rs
index 2e89d762a0..7caacd40dd 100644
--- a/crates/tt/src/iter.rs
+++ b/crates/tt/src/iter.rs
@@ -5,58 +5,62 @@ use std::fmt;
use arrayvec::ArrayVec;
use intern::sym;
+use span::Span;
-use crate::{Ident, Leaf, MAX_GLUED_PUNCT_LEN, Punct, Spacing, Subtree, TokenTree, TokenTreesView};
+use crate::{
+ Ident, Leaf, MAX_GLUED_PUNCT_LEN, Punct, Spacing, Subtree, TokenTree, TokenTreesReprRef,
+ TokenTreesView, dispatch_ref,
+};
#[derive(Clone)]
-pub struct TtIter<'a, S> {
- inner: std::slice::Iter<'a, TokenTree<S>>,
+pub struct TtIter<'a> {
+ inner: TokenTreesView<'a>,
}
-impl<S: Copy + fmt::Debug> fmt::Debug for TtIter<'_, S> {
+impl fmt::Debug for TtIter<'_> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_struct("TtIter").field("remaining", &self.remaining()).finish()
}
}
#[derive(Clone, Copy)]
-pub struct TtIterSavepoint<'a, S>(&'a [TokenTree<S>]);
+pub struct TtIterSavepoint<'a>(TokenTreesView<'a>);
-impl<'a, S: Copy> TtIterSavepoint<'a, S> {
- pub fn remaining(self) -> TokenTreesView<'a, S> {
- TokenTreesView::new(self.0)
+impl<'a> TtIterSavepoint<'a> {
+ pub fn remaining(self) -> TokenTreesView<'a> {
+ self.0
}
}
-impl<'a, S: Copy> TtIter<'a, S> {
- pub(crate) fn new(tt: &'a [TokenTree<S>]) -> TtIter<'a, S> {
- TtIter { inner: tt.iter() }
+impl<'a> TtIter<'a> {
+ pub(crate) fn new(tt: TokenTreesView<'a>) -> TtIter<'a> {
+ TtIter { inner: tt }
}
pub fn expect_char(&mut self, char: char) -> Result<(), ()> {
match self.next() {
- Some(TtElement::Leaf(&Leaf::Punct(Punct { char: c, .. }))) if c == char => Ok(()),
+ Some(TtElement::Leaf(Leaf::Punct(Punct { char: c, .. }))) if c == char => Ok(()),
_ => Err(()),
}
}
pub fn expect_any_char(&mut self, chars: &[char]) -> Result<(), ()> {
match self.next() {
- Some(TtElement::Leaf(Leaf::Punct(Punct { char: c, .. }))) if chars.contains(c) => {
+ Some(TtElement::Leaf(Leaf::Punct(Punct { char: c, .. }))) if chars.contains(&c) => {
Ok(())
}
_ => Err(()),
}
}
- pub fn expect_subtree(&mut self) -> Result<(&'a Subtree<S>, TtIter<'a, S>), ()> {
+ pub fn expect_subtree(&mut self) -> Result<(Subtree, TtIter<'a>), ()> {
match self.next() {
Some(TtElement::Subtree(subtree, iter)) => Ok((subtree, iter)),
_ => Err(()),
}
}
- pub fn expect_leaf(&mut self) -> Result<&'a Leaf<S>, ()> {
+ pub fn expect_leaf(&mut self) -> Result<Leaf, ()> {
match self.next() {
Some(TtElement::Leaf(it)) => Ok(it),
_ => Err(()),
@@ -77,30 +81,30 @@ impl<'a, S: Copy> TtIter<'a, S> {
}
}
- pub fn expect_ident(&mut self) -> Result<&'a Ident<S>, ()> {
+ pub fn expect_ident(&mut self) -> Result<Ident, ()> {
match self.expect_leaf()? {
Leaf::Ident(it) if it.sym != sym::underscore => Ok(it),
_ => Err(()),
}
}
- pub fn expect_ident_or_underscore(&mut self) -> Result<&'a Ident<S>, ()> {
+ pub fn expect_ident_or_underscore(&mut self) -> Result<Ident, ()> {
match self.expect_leaf()? {
Leaf::Ident(it) => Ok(it),
_ => Err(()),
}
}
- pub fn expect_literal(&mut self) -> Result<&'a Leaf<S>, ()> {
+ pub fn expect_literal(&mut self) -> Result<Leaf, ()> {
let it = self.expect_leaf()?;
- match it {
+ match &it {
Leaf::Literal(_) => Ok(it),
Leaf::Ident(ident) if ident.sym == sym::true_ || ident.sym == sym::false_ => Ok(it),
_ => Err(()),
}
}
- pub fn expect_single_punct(&mut self) -> Result<&'a Punct<S>, ()> {
+ pub fn expect_single_punct(&mut self) -> Result<Punct, ()> {
match self.expect_leaf()? {
Leaf::Punct(it) => Ok(it),
_ => Err(()),
@@ -111,8 +115,8 @@ impl<'a, S: Copy> TtIter<'a, S> {
///
/// This method currently may return a single quotation, which is part of lifetime ident and
/// conceptually not a punct in the context of mbe. Callers should handle this.
- pub fn expect_glued_punct(&mut self) -> Result<ArrayVec<Punct<S>, MAX_GLUED_PUNCT_LEN>, ()> {
- let TtElement::Leaf(&Leaf::Punct(first)) = self.next().ok_or(())? else {
+ pub fn expect_glued_punct(&mut self) -> Result<ArrayVec<Punct, MAX_GLUED_PUNCT_LEN>, ()> {
+ let TtElement::Leaf(Leaf::Punct(first)) = self.next().ok_or(())? else {
return Err(());
};
@@ -140,8 +144,8 @@ impl<'a, S: Copy> TtIter<'a, S> {
let _ = self.next().unwrap();
let _ = self.next().unwrap();
res.push(first);
- res.push(*second);
- res.push(*third.unwrap());
+ res.push(second);
+ res.push(third.unwrap());
}
('-' | '!' | '*' | '/' | '&' | '%' | '^' | '+' | '<' | '=' | '>' | '|', '=', _)
| ('-' | '=' | '>', '>', _)
@@ -153,7 +157,7 @@ impl<'a, S: Copy> TtIter<'a, S> {
| ('|', '|', _) => {
let _ = self.next().unwrap();
res.push(first);
- res.push(*second);
+ res.push(second);
}
_ => res.push(first),
}
@@ -161,16 +165,20 @@ impl<'a, S: Copy> TtIter<'a, S> {
}
/// This method won't check for subtrees, so the nth token tree may not be the nth sibling of the current tree.
- fn peek_n(&self, n: usize) -> Option<&'a TokenTree<S>> {
- self.inner.as_slice().get(n)
+ fn peek_n(&self, n: usize) -> Option<TokenTree> {
+ dispatch_ref! {
+ match self.inner.repr => tt => Some(tt.get(n)?.to_api(self.inner.span_parts))
+ }
}
- pub fn peek(&self) -> Option<TtElement<'a, S>> {
- match self.inner.as_slice().first()? {
+ pub fn peek(&self) -> Option<TtElement<'a>> {
+ match self.peek_n(0)? {
TokenTree::Leaf(leaf) => Some(TtElement::Leaf(leaf)),
TokenTree::Subtree(subtree) => {
- let nested_iter =
- TtIter { inner: self.inner.as_slice()[1..][..subtree.usize_len()].iter() };
+ let nested_repr = self.inner.repr.get(1..subtree.usize_len() + 1).unwrap();
+ let nested_iter = TtIter {
+ inner: TokenTreesView { repr: nested_repr, span_parts: self.inner.span_parts },
+ };
Some(TtElement::Subtree(subtree, nested_iter))
}
}
@@ -181,30 +189,55 @@ impl<'a, S: Copy> TtIter<'a, S> {
self.inner.len() == 0
}
- pub fn next_span(&self) -> Option<S> {
- Some(self.inner.as_slice().first()?.first_span())
+ pub fn next_span(&self) -> Option<Span> {
+ Some(self.peek()?.first_span())
}
- pub fn remaining(&self) -> TokenTreesView<'a, S> {
- TokenTreesView::new(self.inner.as_slice())
+ pub fn remaining(&self) -> TokenTreesView<'a> {
+ self.inner
}
/// **Warning**: This advances `skip` **flat** token trees, subtrees account for children+1!
pub fn flat_advance(&mut self, skip: usize) {
- self.inner = self.inner.as_slice()[skip..].iter();
+ self.inner.repr = self.inner.repr.get(skip..).unwrap();
}
- pub fn savepoint(&self) -> TtIterSavepoint<'a, S> {
- TtIterSavepoint(self.inner.as_slice())
+ pub fn savepoint(&self) -> TtIterSavepoint<'a> {
+ TtIterSavepoint(self.inner)
}
- pub fn from_savepoint(&self, savepoint: TtIterSavepoint<'a, S>) -> TokenTreesView<'a, S> {
- let len = (self.inner.as_slice().as_ptr() as usize - savepoint.0.as_ptr() as usize)
- / size_of::<TokenTree<S>>();
- TokenTreesView::new(&savepoint.0[..len])
+ pub fn from_savepoint(&self, savepoint: TtIterSavepoint<'a>) -> TokenTreesView<'a> {
+ let len = match (self.inner.repr, savepoint.0.repr) {
+ (
+ TokenTreesReprRef::SpanStorage32(this),
+ TokenTreesReprRef::SpanStorage32(savepoint),
+ ) => {
+ (this.as_ptr() as usize - savepoint.as_ptr() as usize)
+ / size_of::<crate::storage::TokenTree<crate::storage::SpanStorage32>>()
+ }
+ (
+ TokenTreesReprRef::SpanStorage64(this),
+ TokenTreesReprRef::SpanStorage64(savepoint),
+ ) => {
+ (this.as_ptr() as usize - savepoint.as_ptr() as usize)
+ / size_of::<crate::storage::TokenTree<crate::storage::SpanStorage64>>()
+ }
+ (
+ TokenTreesReprRef::SpanStorage96(this),
+ TokenTreesReprRef::SpanStorage96(savepoint),
+ ) => {
+ (this.as_ptr() as usize - savepoint.as_ptr() as usize)
+ / size_of::<crate::storage::TokenTree<crate::storage::SpanStorage96>>()
+ }
+ _ => panic!("savepoint did not originate from this TtIter"),
+ };
+ TokenTreesView {
+ repr: savepoint.0.repr.get(..len).unwrap(),
+ span_parts: savepoint.0.span_parts,
+ }
}
- pub fn next_as_view(&mut self) -> Option<TokenTreesView<'a, S>> {
+ pub fn next_as_view(&mut self) -> Option<TokenTreesView<'a>> {
let savepoint = self.savepoint();
self.next()?;
Some(self.from_savepoint(savepoint))
@@ -212,12 +245,12 @@ impl<'a, S: Copy> TtIter<'a, S> {
}
#[derive(Clone)]
-pub enum TtElement<'a, S> {
- Leaf(&'a Leaf<S>),
- Subtree(&'a Subtree<S>, TtIter<'a, S>),
+pub enum TtElement<'a> {
+ Leaf(Leaf),
+ Subtree(Subtree, TtIter<'a>),
}
-impl<S: Copy + fmt::Debug> fmt::Debug for TtElement<'_, S> {
+impl fmt::Debug for TtElement<'_> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {
Self::Leaf(leaf) => f.debug_tuple("Leaf").field(leaf).finish(),
@@ -228,9 +261,9 @@ impl<S: Copy + fmt::Debug> fmt::Debug for TtElement<'_, S> {
}
}
-impl<S: Copy> TtElement<'_, S> {
+impl TtElement<'_> {
#[inline]
- pub fn first_span(&self) -> S {
+ pub fn first_span(&self) -> Span {
match self {
TtElement::Leaf(it) => *it.span(),
TtElement::Subtree(it, _) => it.delimiter.open,
@@ -238,17 +271,15 @@ impl<S: Copy> TtElement<'_, S> {
}
}
-impl<'a, S> Iterator for TtIter<'a, S> {
- type Item = TtElement<'a, S>;
+impl<'a> Iterator for TtIter<'a> {
+ type Item = TtElement<'a>;
fn next(&mut self) -> Option<Self::Item> {
- match self.inner.next()? {
- TokenTree::Leaf(leaf) => Some(TtElement::Leaf(leaf)),
- TokenTree::Subtree(subtree) => {
- let nested_iter =
- TtIter { inner: self.inner.as_slice()[..subtree.usize_len()].iter() };
- self.inner = self.inner.as_slice()[subtree.usize_len()..].iter();
- Some(TtElement::Subtree(subtree, nested_iter))
- }
- }
+ let result = self.peek()?;
+ let skip = match &result {
+ TtElement::Leaf(_) => 1,
+ TtElement::Subtree(subtree, _) => subtree.usize_len() + 1,
+ };
+ self.inner.repr = self.inner.repr.get(skip..).unwrap();
+ Some(result)
}
}
diff --git a/crates/tt/src/lib.rs b/crates/tt/src/lib.rs
index d6a743c695..72b0d762ef 100644
--- a/crates/tt/src/lib.rs
+++ b/crates/tt/src/lib.rs
@@ -15,16 +15,23 @@ extern crate rustc_lexer;
pub mod buffer;
pub mod iter;
+mod storage;
-use std::fmt;
+use std::{fmt, slice::SliceIndex};
+use arrayvec::ArrayString;
use buffer::Cursor;
use intern::Symbol;
-use iter::{TtElement, TtIter};
use stdx::{impl_from, itertools::Itertools as _};
+pub use span::Span;
pub use text_size::{TextRange, TextSize};
+use crate::storage::{CompressedSpanPart, SpanStorage};
+
+pub use self::iter::{TtElement, TtIter};
+pub use self::storage::{TopSubtree, TopSubtreeBuilder};
+
pub const MAX_GLUED_PUNCT_LEN: usize = 3;
#[derive(Clone, PartialEq, Debug)]
@@ -77,13 +84,13 @@ pub enum LitKind {
}
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
-pub enum TokenTree<S = u32> {
- Leaf(Leaf<S>),
- Subtree(Subtree<S>),
+pub enum TokenTree {
+ Leaf(Leaf),
+ Subtree(Subtree),
}
-impl_from!(Leaf<S>, Subtree<S> for TokenTree);
-impl<S: Copy> TokenTree<S> {
- pub fn first_span(&self) -> S {
+impl_from!(Leaf, Subtree for TokenTree);
+impl TokenTree {
+ pub fn first_span(&self) -> Span {
match self {
TokenTree::Leaf(l) => *l.span(),
TokenTree::Subtree(s) => s.delimiter.open,
@@ -92,14 +99,14 @@ impl<S: Copy> TokenTree<S> {
}
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
-pub enum Leaf<S> {
- Literal(Literal<S>),
- Punct(Punct<S>),
- Ident(Ident<S>),
+pub enum Leaf {
+ Literal(Literal),
+ Punct(Punct),
+ Ident(Ident),
}
-impl<S> Leaf<S> {
- pub fn span(&self) -> &S {
+impl Leaf {
+ pub fn span(&self) -> &Span {
match self {
Leaf::Literal(it) => &it.span,
Leaf::Punct(it) => &it.span,
@@ -107,282 +114,120 @@ impl<S> Leaf<S> {
}
}
}
-impl_from!(Literal<S>, Punct<S>, Ident<S> for Leaf);
+impl_from!(Literal, Punct, Ident for Leaf);
-#[derive(Debug, Clone, PartialEq, Eq, Hash)]
-pub struct Subtree<S> {
- pub delimiter: Delimiter<S>,
+#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
+pub struct Subtree {
+ pub delimiter: Delimiter,
/// Number of following token trees that belong to this subtree, excluding this subtree.
pub len: u32,
}
-impl<S> Subtree<S> {
+impl Subtree {
pub fn usize_len(&self) -> usize {
self.len as usize
}
}
-#[derive(Clone, PartialEq, Eq, Hash)]
-pub struct TopSubtree<S>(pub Box<[TokenTree<S>]>);
-
-impl<S: Copy> TopSubtree<S> {
- pub fn empty(span: DelimSpan<S>) -> Self {
- Self(Box::new([TokenTree::Subtree(Subtree {
- delimiter: Delimiter::invisible_delim_spanned(span),
- len: 0,
- })]))
- }
-
- pub fn invisible_from_leaves<const N: usize>(delim_span: S, leaves: [Leaf<S>; N]) -> Self {
- let mut builder = TopSubtreeBuilder::new(Delimiter::invisible_spanned(delim_span));
- builder.extend(leaves);
- builder.build()
- }
-
- pub fn from_token_trees(delimiter: Delimiter<S>, token_trees: TokenTreesView<'_, S>) -> Self {
- let mut builder = TopSubtreeBuilder::new(delimiter);
- builder.extend_with_tt(token_trees);
- builder.build()
- }
-
- pub fn from_subtree(subtree: SubtreeView<'_, S>) -> Self {
- Self(subtree.0.into())
- }
-
- pub fn view(&self) -> SubtreeView<'_, S> {
- SubtreeView::new(&self.0)
- }
-
- pub fn iter(&self) -> TtIter<'_, S> {
- self.view().iter()
- }
-
- pub fn top_subtree(&self) -> &Subtree<S> {
- self.view().top_subtree()
- }
-
- pub fn top_subtree_delimiter_mut(&mut self) -> &mut Delimiter<S> {
- let TokenTree::Subtree(subtree) = &mut self.0[0] else {
- unreachable!("the first token tree is always the top subtree");
- };
- &mut subtree.delimiter
- }
-
- pub fn token_trees(&self) -> TokenTreesView<'_, S> {
- self.view().token_trees()
- }
-}
-
-#[derive(Debug, Clone, PartialEq, Eq, Hash)]
-pub struct TopSubtreeBuilder<S> {
- unclosed_subtree_indices: Vec<usize>,
- token_trees: Vec<TokenTree<S>>,
- last_closed_subtree: Option<usize>,
-}
-
-impl<S: Copy> TopSubtreeBuilder<S> {
- pub fn new(top_delimiter: Delimiter<S>) -> Self {
- let mut result = Self {
- unclosed_subtree_indices: Vec::new(),
- token_trees: Vec::new(),
- last_closed_subtree: None,
- };
- let top_subtree = TokenTree::Subtree(Subtree { delimiter: top_delimiter, len: 0 });
- result.token_trees.push(top_subtree);
- result
- }
-
- pub fn open(&mut self, delimiter_kind: DelimiterKind, open_span: S) {
- self.unclosed_subtree_indices.push(self.token_trees.len());
- self.token_trees.push(TokenTree::Subtree(Subtree {
- delimiter: Delimiter {
- open: open_span,
- close: open_span, // Will be overwritten on close.
- kind: delimiter_kind,
- },
- len: 0,
- }));
- }
-
- pub fn close(&mut self, close_span: S) {
- let last_unclosed_index = self
- .unclosed_subtree_indices
- .pop()
- .expect("attempt to close a `tt::Subtree` when none is open");
- let subtree_len = (self.token_trees.len() - last_unclosed_index - 1) as u32;
- let TokenTree::Subtree(subtree) = &mut self.token_trees[last_unclosed_index] else {
- unreachable!("unclosed token tree is always a subtree");
- };
- subtree.len = subtree_len;
- subtree.delimiter.close = close_span;
- self.last_closed_subtree = Some(last_unclosed_index);
- }
-
- /// You cannot call this consecutively, it will only work once after close.
- pub fn remove_last_subtree_if_invisible(&mut self) {
- let Some(last_subtree_idx) = self.last_closed_subtree else { return };
- if let TokenTree::Subtree(Subtree {
- delimiter: Delimiter { kind: DelimiterKind::Invisible, .. },
- ..
- }) = self.token_trees[last_subtree_idx]
- {
- self.token_trees.remove(last_subtree_idx);
- self.last_closed_subtree = None;
- }
- }
-
- pub fn push(&mut self, leaf: Leaf<S>) {
- self.token_trees.push(TokenTree::Leaf(leaf));
- }
-
- pub fn extend(&mut self, leaves: impl IntoIterator<Item = Leaf<S>>) {
- self.token_trees.extend(leaves.into_iter().map(TokenTree::Leaf));
- }
-
- /// This does not check the token trees are valid, beware!
- pub fn extend_tt_dangerous(&mut self, tt: impl IntoIterator<Item = TokenTree<S>>) {
- self.token_trees.extend(tt);
- }
-
- pub fn extend_with_tt(&mut self, tt: TokenTreesView<'_, S>) {
- self.token_trees.extend(tt.0.iter().cloned());
- }
-
- /// Like [`Self::extend_with_tt()`], but makes sure the new tokens will never be
- /// joint with whatever comes after them.
- pub fn extend_with_tt_alone(&mut self, tt: TokenTreesView<'_, S>) {
- if let Some((last, before_last)) = tt.0.split_last() {
- self.token_trees.reserve(tt.0.len());
- self.token_trees.extend(before_last.iter().cloned());
- let last = if let TokenTree::Leaf(Leaf::Punct(last)) = last {
- let mut last = *last;
- last.spacing = Spacing::Alone;
- TokenTree::Leaf(Leaf::Punct(last))
- } else {
- last.clone()
- };
- self.token_trees.push(last);
+#[rust_analyzer::macro_style(braces)]
+macro_rules! dispatch_ref {
+ (
+ match $scrutinee:expr => $tt:ident => $body:expr
+ ) => {
+ match $scrutinee {
+ $crate::TokenTreesReprRef::SpanStorage32($tt) => $body,
+ $crate::TokenTreesReprRef::SpanStorage64($tt) => $body,
+ $crate::TokenTreesReprRef::SpanStorage96($tt) => $body,
}
- }
-
- pub fn expected_delimiters(&self) -> impl Iterator<Item = &Delimiter<S>> {
- self.unclosed_subtree_indices.iter().rev().map(|&subtree_idx| {
- let TokenTree::Subtree(subtree) = &self.token_trees[subtree_idx] else {
- unreachable!("unclosed token tree is always a subtree")
- };
- &subtree.delimiter
- })
- }
+ };
+}
+use dispatch_ref;
- /// Builds, and remove the top subtree if it has only one subtree child.
- pub fn build_skip_top_subtree(mut self) -> TopSubtree<S> {
- let top_tts = TokenTreesView::new(&self.token_trees[1..]);
- match top_tts.try_into_subtree() {
- Some(_) => {
- assert!(
- self.unclosed_subtree_indices.is_empty(),
- "attempt to build an unbalanced `TopSubtreeBuilder`"
- );
- TopSubtree(self.token_trees.drain(1..).collect())
+#[derive(Clone, Copy)]
+enum TokenTreesReprRef<'a> {
+ SpanStorage32(&'a [crate::storage::TokenTree<crate::storage::SpanStorage32>]),
+ SpanStorage64(&'a [crate::storage::TokenTree<crate::storage::SpanStorage64>]),
+ SpanStorage96(&'a [crate::storage::TokenTree<crate::storage::SpanStorage96>]),
+}
+
+impl<'a> TokenTreesReprRef<'a> {
+ #[inline]
+ fn get<I>(&self, index: I) -> Option<Self>
+ where
+ I: SliceIndex<
+ [crate::storage::TokenTree<crate::storage::SpanStorage32>],
+ Output = [crate::storage::TokenTree<crate::storage::SpanStorage32>],
+ >,
+ I: SliceIndex<
+ [crate::storage::TokenTree<crate::storage::SpanStorage64>],
+ Output = [crate::storage::TokenTree<crate::storage::SpanStorage64>],
+ >,
+ I: SliceIndex<
+ [crate::storage::TokenTree<crate::storage::SpanStorage96>],
+ Output = [crate::storage::TokenTree<crate::storage::SpanStorage96>],
+ >,
+ {
+ Some(match self {
+ TokenTreesReprRef::SpanStorage32(tt) => {
+ TokenTreesReprRef::SpanStorage32(tt.get(index)?)
}
- None => self.build(),
- }
- }
-
- pub fn build(mut self) -> TopSubtree<S> {
- assert!(
- self.unclosed_subtree_indices.is_empty(),
- "attempt to build an unbalanced `TopSubtreeBuilder`"
- );
- let total_len = self.token_trees.len() as u32;
- let TokenTree::Subtree(top_subtree) = &mut self.token_trees[0] else {
- unreachable!("first token tree is always a subtree");
- };
- top_subtree.len = total_len - 1;
- TopSubtree(self.token_trees.into_boxed_slice())
- }
-
- pub fn restore_point(&self) -> SubtreeBuilderRestorePoint {
- SubtreeBuilderRestorePoint {
- unclosed_subtree_indices_len: self.unclosed_subtree_indices.len(),
- token_trees_len: self.token_trees.len(),
- last_closed_subtree: self.last_closed_subtree,
- }
- }
-
- pub fn restore(&mut self, restore_point: SubtreeBuilderRestorePoint) {
- self.unclosed_subtree_indices.truncate(restore_point.unclosed_subtree_indices_len);
- self.token_trees.truncate(restore_point.token_trees_len);
- self.last_closed_subtree = restore_point.last_closed_subtree;
+ TokenTreesReprRef::SpanStorage64(tt) => {
+ TokenTreesReprRef::SpanStorage64(tt.get(index)?)
+ }
+ TokenTreesReprRef::SpanStorage96(tt) => {
+ TokenTreesReprRef::SpanStorage96(tt.get(index)?)
+ }
+ })
}
}
#[derive(Clone, Copy)]
-pub struct SubtreeBuilderRestorePoint {
- unclosed_subtree_indices_len: usize,
- token_trees_len: usize,
- last_closed_subtree: Option<usize>,
+pub struct TokenTreesView<'a> {
+ repr: TokenTreesReprRef<'a>,
+ span_parts: &'a [CompressedSpanPart],
}
-#[derive(Clone, Copy)]
-pub struct TokenTreesView<'a, S>(&'a [TokenTree<S>]);
-
-impl<'a, S: Copy> TokenTreesView<'a, S> {
- pub fn new(tts: &'a [TokenTree<S>]) -> Self {
- if cfg!(debug_assertions) {
- tts.iter().enumerate().for_each(|(idx, tt)| {
- if let TokenTree::Subtree(tt) = &tt {
- // `<` and not `<=` because `Subtree.len` does not include the subtree node itself.
- debug_assert!(
- idx + tt.usize_len() < tts.len(),
- "`TokenTreeView::new()` was given a cut-in-half list"
- );
- }
- });
- }
- Self(tts)
+impl<'a> TokenTreesView<'a> {
+ pub fn empty() -> Self {
+ Self { repr: TokenTreesReprRef::SpanStorage32(&[]), span_parts: &[] }
}
- pub fn iter(&self) -> TtIter<'a, S> {
- TtIter::new(self.0)
+ pub fn iter(&self) -> TtIter<'a> {
+ TtIter::new(*self)
}
- pub fn cursor(&self) -> Cursor<'a, S> {
- Cursor::new(self.0)
+ pub fn cursor(&self) -> Cursor<'a> {
+ Cursor::new(*self)
}
pub fn len(&self) -> usize {
- self.0.len()
+ dispatch_ref! {
+ match self.repr => tt => tt.len()
+ }
}
pub fn is_empty(&self) -> bool {
- self.0.is_empty()
+ self.len() == 0
}
- pub fn try_into_subtree(self) -> Option<SubtreeView<'a, S>> {
- if let Some(TokenTree::Subtree(subtree)) = self.0.first()
- && subtree.usize_len() == (self.0.len() - 1)
- {
- return Some(SubtreeView::new(self.0));
- }
- None
+ pub fn try_into_subtree(self) -> Option<SubtreeView<'a>> {
+ let is_subtree = dispatch_ref! {
+ match self.repr => tt => matches!(
+ tt.first(),
+ Some(crate::storage::TokenTree::Subtree { len, .. }) if (*len as usize) == (tt.len() - 1)
+ )
+ };
+ if is_subtree { Some(SubtreeView(self)) } else { None }
}
- pub fn strip_invisible(self) -> TokenTreesView<'a, S> {
+ pub fn strip_invisible(self) -> TokenTreesView<'a> {
self.try_into_subtree().map(|subtree| subtree.strip_invisible()).unwrap_or(self)
}
- /// This returns a **flat** structure of tokens (subtrees will be represented by a single node
- /// preceding their children), so it isn't suited for most use cases, only for matching leaves
- /// at the beginning/end with no subtrees before them. If you need a structured pass, use [`TtIter`].
- pub fn flat_tokens(&self) -> &'a [TokenTree<S>] {
- self.0
- }
-
pub fn split(
self,
- mut split_fn: impl FnMut(TtElement<'a, S>) -> bool,
- ) -> impl Iterator<Item = TokenTreesView<'a, S>> {
+ mut split_fn: impl FnMut(TtElement<'a>) -> bool,
+ ) -> impl Iterator<Item = TokenTreesView<'a>> {
let mut subtree_iter = self.iter();
let mut need_to_yield_even_if_empty = true;
@@ -404,9 +249,29 @@ impl<'a, S: Copy> TokenTreesView<'a, S> {
Some(result)
})
}
+
+ pub fn first_span(&self) -> Option<Span> {
+ Some(dispatch_ref! {
+ match self.repr => tt => tt.first()?.first_span().span(self.span_parts)
+ })
+ }
+
+ pub fn last_span(&self) -> Option<Span> {
+ Some(dispatch_ref! {
+ match self.repr => tt => tt.last()?.last_span().span(self.span_parts)
+ })
+ }
+
+ pub fn iter_flat_tokens(self) -> impl ExactSizeIterator<Item = TokenTree> + use<'a> {
+ (0..self.len()).map(move |idx| {
+ dispatch_ref! {
+ match self.repr => tt => tt[idx].to_api(self.span_parts)
+ }
+ })
+ }
}
-impl<S: fmt::Debug + Copy> fmt::Debug for TokenTreesView<'_, S> {
+impl fmt::Debug for TokenTreesView<'_> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
let mut iter = self.iter();
while let Some(tt) = iter.next() {
@@ -419,14 +284,14 @@ impl<S: fmt::Debug + Copy> fmt::Debug for TokenTreesView<'_, S> {
}
}
-impl<S: Copy> fmt::Display for TokenTreesView<'_, S> {
+impl fmt::Display for TokenTreesView<'_> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
return token_trees_display(f, self.iter());
- fn subtree_display<S>(
- subtree: &Subtree<S>,
+ fn subtree_display(
+ subtree: &Subtree,
f: &mut fmt::Formatter<'_>,
- iter: TtIter<'_, S>,
+ iter: TtIter<'_>,
) -> fmt::Result {
let (l, r) = match subtree.delimiter.kind {
DelimiterKind::Parenthesis => ("(", ")"),
@@ -440,7 +305,7 @@ impl<S: Copy> fmt::Display for TokenTreesView<'_, S> {
Ok(())
}
- fn token_trees_display<S>(f: &mut fmt::Formatter<'_>, iter: TtIter<'_, S>) -> fmt::Result {
+ fn token_trees_display(f: &mut fmt::Formatter<'_>, iter: TtIter<'_>) -> fmt::Result {
let mut needs_space = false;
for child in iter {
if needs_space {
@@ -451,11 +316,11 @@ impl<S: Copy> fmt::Display for TokenTreesView<'_, S> {
match child {
TtElement::Leaf(Leaf::Punct(p)) => {
needs_space = p.spacing == Spacing::Alone;
- fmt::Display::fmt(p, f)?;
+ fmt::Display::fmt(&p, f)?;
}
- TtElement::Leaf(leaf) => fmt::Display::fmt(leaf, f)?,
+ TtElement::Leaf(leaf) => fmt::Display::fmt(&leaf, f)?,
TtElement::Subtree(subtree, subtree_iter) => {
- subtree_display(subtree, f, subtree_iter)?
+ subtree_display(&subtree, f, subtree_iter)?
}
}
}
@@ -466,70 +331,80 @@ impl<S: Copy> fmt::Display for TokenTreesView<'_, S> {
#[derive(Clone, Copy)]
// Invariant: always starts with `Subtree` that covers the entire thing.
-pub struct SubtreeView<'a, S>(&'a [TokenTree<S>]);
+pub struct SubtreeView<'a>(TokenTreesView<'a>);
-impl<'a, S: Copy> SubtreeView<'a, S> {
- pub fn new(tts: &'a [TokenTree<S>]) -> Self {
- if cfg!(debug_assertions) {
- let TokenTree::Subtree(subtree) = &tts[0] else {
- panic!("first token tree must be a subtree in `SubtreeView`");
- };
- assert_eq!(
- subtree.usize_len(),
- tts.len() - 1,
- "subtree must cover the entire `SubtreeView`"
- );
- }
- Self(tts)
- }
-
- pub fn as_token_trees(self) -> TokenTreesView<'a, S> {
- TokenTreesView::new(self.0)
+impl<'a> SubtreeView<'a> {
+ pub fn as_token_trees(self) -> TokenTreesView<'a> {
+ self.0
}
- pub fn iter(&self) -> TtIter<'a, S> {
- TtIter::new(&self.0[1..])
+ pub fn iter(&self) -> TtIter<'a> {
+ self.token_trees().iter()
}
- pub fn top_subtree(&self) -> &'a Subtree<S> {
- let TokenTree::Subtree(subtree) = &self.0[0] else {
- unreachable!("the first token tree is always the top subtree");
- };
- subtree
+ pub fn top_subtree(&self) -> Subtree {
+ dispatch_ref! {
+ match self.0.repr => tt => {
+ let crate::storage::TokenTree::Subtree { len, delim_kind, open_span, close_span } =
+ &tt[0]
+ else {
+ unreachable!("the first token tree is always the top subtree");
+ };
+ Subtree {
+ delimiter: Delimiter {
+ open: open_span.span(self.0.span_parts),
+ close: close_span.span(self.0.span_parts),
+ kind: *delim_kind,
+ },
+ len: *len,
+ }
+ }
+ }
}
- pub fn strip_invisible(&self) -> TokenTreesView<'a, S> {
+ pub fn strip_invisible(&self) -> TokenTreesView<'a> {
if self.top_subtree().delimiter.kind == DelimiterKind::Invisible {
- TokenTreesView::new(&self.0[1..])
+ self.token_trees()
} else {
- TokenTreesView::new(self.0)
+ self.0
}
}
- pub fn token_trees(&self) -> TokenTreesView<'a, S> {
- TokenTreesView::new(&self.0[1..])
+ pub fn token_trees(&self) -> TokenTreesView<'a> {
+ let repr = match self.0.repr {
+ TokenTreesReprRef::SpanStorage32(token_trees) => {
+ TokenTreesReprRef::SpanStorage32(&token_trees[1..])
+ }
+ TokenTreesReprRef::SpanStorage64(token_trees) => {
+ TokenTreesReprRef::SpanStorage64(&token_trees[1..])
+ }
+ TokenTreesReprRef::SpanStorage96(token_trees) => {
+ TokenTreesReprRef::SpanStorage96(&token_trees[1..])
+ }
+ };
+ TokenTreesView { repr, ..self.0 }
}
}
-impl<S: fmt::Debug + Copy> fmt::Debug for SubtreeView<'_, S> {
+impl fmt::Debug for SubtreeView<'_> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
- fmt::Debug::fmt(&TokenTreesView(self.0), f)
+ fmt::Debug::fmt(&self.0, f)
}
}
-impl<S: Copy> fmt::Display for SubtreeView<'_, S> {
+impl fmt::Display for SubtreeView<'_> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
- fmt::Display::fmt(&TokenTreesView(self.0), f)
+ fmt::Display::fmt(&self.0, f)
}
}
#[derive(Debug, Copy, Clone, PartialEq)]
-pub struct DelimSpan<S> {
- pub open: S,
- pub close: S,
+pub struct DelimSpan {
+ pub open: Span,
+ pub close: Span,
}
-impl<Span: Copy> DelimSpan<Span> {
+impl DelimSpan {
pub fn from_single(sp: Span) -> Self {
DelimSpan { open: sp, close: sp }
}
@@ -539,22 +414,22 @@ impl<Span: Copy> DelimSpan<Span> {
}
}
#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
-pub struct Delimiter<S> {
- pub open: S,
- pub close: S,
+pub struct Delimiter {
+ pub open: Span,
+ pub close: Span,
pub kind: DelimiterKind,
}
-impl<S: Copy> Delimiter<S> {
- pub const fn invisible_spanned(span: S) -> Self {
+impl Delimiter {
+ pub const fn invisible_spanned(span: Span) -> Self {
Delimiter { open: span, close: span, kind: DelimiterKind::Invisible }
}
- pub const fn invisible_delim_spanned(span: DelimSpan<S>) -> Self {
+ pub const fn invisible_delim_spanned(span: DelimSpan) -> Self {
Delimiter { open: span.open, close: span.close, kind: DelimiterKind::Invisible }
}
- pub fn delim_span(&self) -> DelimSpan<S> {
+ pub fn delim_span(&self) -> DelimSpan {
DelimSpan { open: self.open, close: self.close }
}
}
@@ -568,18 +443,57 @@ pub enum DelimiterKind {
}
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
-pub struct Literal<S> {
- // escaped
- pub symbol: Symbol,
- pub span: S,
+pub struct Literal {
+ /// Escaped, text then suffix concatenated.
+ pub text_and_suffix: Symbol,
+ pub span: Span,
pub kind: LitKind,
- pub suffix: Option<Symbol>,
+ pub suffix_len: u8,
+}
+
+impl Literal {
+ #[inline]
+ pub fn text_and_suffix(&self) -> (&str, &str) {
+ let text_and_suffix = self.text_and_suffix.as_str();
+ text_and_suffix.split_at(text_and_suffix.len() - usize::from(self.suffix_len))
+ }
+
+ #[inline]
+ pub fn text(&self) -> &str {
+ self.text_and_suffix().0
+ }
+
+ #[inline]
+ pub fn suffix(&self) -> &str {
+ self.text_and_suffix().1
+ }
+
+ pub fn new(text: &str, span: Span, kind: LitKind, suffix: &str) -> Self {
+ const MAX_INLINE_CAPACITY: usize = 30;
+ let text_and_suffix = if suffix.is_empty() {
+ Symbol::intern(text)
+ } else if (text.len() + suffix.len()) < MAX_INLINE_CAPACITY {
+ let mut text_and_suffix = ArrayString::<MAX_INLINE_CAPACITY>::new();
+ text_and_suffix.push_str(text);
+ text_and_suffix.push_str(suffix);
+ Symbol::intern(&text_and_suffix)
+ } else {
+ let mut text_and_suffix = String::with_capacity(text.len() + suffix.len());
+ text_and_suffix.push_str(text);
+ text_and_suffix.push_str(suffix);
+ Symbol::intern(&text_and_suffix)
+ };
+
+ Self { text_and_suffix, span, kind, suffix_len: suffix.len().try_into().unwrap() }
+ }
+
+ #[inline]
+ pub fn new_no_suffix(text: &str, span: Span, kind: LitKind) -> Self {
+ Self { text_and_suffix: Symbol::intern(text), span, kind, suffix_len: 0 }
+ }
}
-pub fn token_to_literal<S>(text: &str, span: S) -> Literal<S>
-where
- S: Copy,
-{
+pub fn token_to_literal(text: &str, span: Span) -> Literal {
use rustc_lexer::LiteralKind;
let token = rustc_lexer::tokenize(text, rustc_lexer::FrontmatterAllowed::No).next_tuple();
@@ -588,12 +502,7 @@ where
..
},)) = token
else {
- return Literal {
- span,
- symbol: Symbol::intern(text),
- kind: LitKind::Err(()),
- suffix: None,
- };
+ return Literal::new_no_suffix(text, span, LitKind::Err(()));
};
let (kind, start_offset, end_offset) = match kind {
@@ -624,27 +533,22 @@ where
let (lit, suffix) = text.split_at(suffix_start as usize);
let lit = &lit[start_offset..lit.len() - end_offset];
let suffix = match suffix {
- "" | "_" => None,
+ "" | "_" => "",
// ill-suffixed literals
_ if !matches!(kind, LitKind::Integer | LitKind::Float | LitKind::Err(_)) => {
- return Literal {
- span,
- symbol: Symbol::intern(text),
- kind: LitKind::Err(()),
- suffix: None,
- };
+ return Literal::new_no_suffix(text, span, LitKind::Err(()));
}
- suffix => Some(Symbol::intern(suffix)),
+ suffix => suffix,
};
- Literal { span, symbol: Symbol::intern(lit), kind, suffix }
+ Literal::new(lit, span, kind, suffix)
}
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
-pub struct Punct<S> {
+pub struct Punct {
pub char: char,
pub spacing: Spacing,
- pub span: S,
+ pub span: Span,
}
/// Indicates whether a token can join with the following token to form a
@@ -709,25 +613,25 @@ pub enum Spacing {
/// Identifier or keyword.
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
-pub struct Ident<S> {
+pub struct Ident {
pub sym: Symbol,
- pub span: S,
+ pub span: Span,
pub is_raw: IdentIsRaw,
}
-impl<S> Ident<S> {
- pub fn new(text: &str, span: S) -> Self {
+impl Ident {
+ pub fn new(text: &str, span: Span) -> Self {
// let raw_stripped = IdentIsRaw::split_from_symbol(text.as_ref());
let (is_raw, text) = IdentIsRaw::split_from_symbol(text);
Ident { sym: Symbol::intern(text), span, is_raw }
}
}
-fn print_debug_subtree<S: fmt::Debug>(
+fn print_debug_subtree(
f: &mut fmt::Formatter<'_>,
- subtree: &Subtree<S>,
+ subtree: &Subtree,
level: usize,
- iter: TtIter<'_, S>,
+ iter: TtIter<'_>,
) -> fmt::Result {
let align = " ".repeat(level);
@@ -751,25 +655,14 @@ fn print_debug_subtree<S: fmt::Debug>(
Ok(())
}
-fn print_debug_token<S: fmt::Debug>(
- f: &mut fmt::Formatter<'_>,
- level: usize,
- tt: TtElement<'_, S>,
-) -> fmt::Result {
+fn print_debug_token(f: &mut fmt::Formatter<'_>, level: usize, tt: TtElement<'_>) -> fmt::Result {
let align = " ".repeat(level);
match tt {
TtElement::Leaf(leaf) => match leaf {
Leaf::Literal(lit) => {
- write!(
- f,
- "{}LITERAL {:?} {}{} {:#?}",
- align,
- lit.kind,
- lit.symbol,
- lit.suffix.as_ref().map(|it| it.as_str()).unwrap_or(""),
- lit.span
- )?;
+ let (text, suffix) = lit.text_and_suffix();
+ write!(f, "{}LITERAL {:?} {}{} {:#?}", align, lit.kind, text, suffix, lit.span)?;
}
Leaf::Punct(punct) => {
write!(
@@ -793,26 +686,26 @@ fn print_debug_token<S: fmt::Debug>(
}
},
TtElement::Subtree(subtree, subtree_iter) => {
- print_debug_subtree(f, subtree, level, subtree_iter)?;
+ print_debug_subtree(f, &subtree, level, subtree_iter)?;
}
}
Ok(())
}
-impl<S: fmt::Debug + Copy> fmt::Debug for TopSubtree<S> {
+impl fmt::Debug for TopSubtree {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
fmt::Debug::fmt(&self.view(), f)
}
}
-impl<S: fmt::Display + Copy> fmt::Display for TopSubtree<S> {
+impl fmt::Display for TopSubtree {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
fmt::Display::fmt(&self.view(), f)
}
}
-impl<S> fmt::Display for Leaf<S> {
+impl fmt::Display for Leaf {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {
Leaf::Ident(it) => fmt::Display::fmt(it, f),
@@ -822,155 +715,88 @@ impl<S> fmt::Display for Leaf<S> {
}
}
-impl<S> fmt::Display for Ident<S> {
+impl fmt::Display for Ident {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
fmt::Display::fmt(&self.is_raw.as_str(), f)?;
fmt::Display::fmt(&self.sym, f)
}
}
-impl<S> fmt::Display for Literal<S> {
+impl fmt::Display for Literal {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ let (text, suffix) = self.text_and_suffix();
match self.kind {
- LitKind::Byte => write!(f, "b'{}'", self.symbol),
- LitKind::Char => write!(f, "'{}'", self.symbol),
- LitKind::Integer | LitKind::Float | LitKind::Err(_) => write!(f, "{}", self.symbol),
- LitKind::Str => write!(f, "\"{}\"", self.symbol),
- LitKind::ByteStr => write!(f, "b\"{}\"", self.symbol),
- LitKind::CStr => write!(f, "c\"{}\"", self.symbol),
+ LitKind::Byte => write!(f, "b'{}'", text),
+ LitKind::Char => write!(f, "'{}'", text),
+ LitKind::Integer | LitKind::Float | LitKind::Err(_) => write!(f, "{}", text),
+ LitKind::Str => write!(f, "\"{}\"", text),
+ LitKind::ByteStr => write!(f, "b\"{}\"", text),
+ LitKind::CStr => write!(f, "c\"{}\"", text),
LitKind::StrRaw(num_of_hashes) => {
let num_of_hashes = num_of_hashes as usize;
- write!(
- f,
- r#"r{0:#<num_of_hashes$}"{text}"{0:#<num_of_hashes$}"#,
- "",
- text = self.symbol
- )
+ write!(f, r#"r{0:#<num_of_hashes$}"{text}"{0:#<num_of_hashes$}"#, "", text = text)
}
LitKind::ByteStrRaw(num_of_hashes) => {
let num_of_hashes = num_of_hashes as usize;
- write!(
- f,
- r#"br{0:#<num_of_hashes$}"{text}"{0:#<num_of_hashes$}"#,
- "",
- text = self.symbol
- )
+ write!(f, r#"br{0:#<num_of_hashes$}"{text}"{0:#<num_of_hashes$}"#, "", text = text)
}
LitKind::CStrRaw(num_of_hashes) => {
let num_of_hashes = num_of_hashes as usize;
- write!(
- f,
- r#"cr{0:#<num_of_hashes$}"{text}"{0:#<num_of_hashes$}"#,
- "",
- text = self.symbol
- )
+ write!(f, r#"cr{0:#<num_of_hashes$}"{text}"{0:#<num_of_hashes$}"#, "", text = text)
}
}?;
- if let Some(suffix) = &self.suffix {
- write!(f, "{suffix}")?;
- }
+ write!(f, "{suffix}")?;
Ok(())
}
}
-impl<S> fmt::Display for Punct<S> {
+impl fmt::Display for Punct {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
fmt::Display::fmt(&self.char, f)
}
}
-impl<S> Subtree<S> {
+impl Subtree {
/// Count the number of tokens recursively
pub fn count(&self) -> usize {
self.usize_len()
}
}
-impl<S> TopSubtree<S> {
- /// A simple line string used for debugging
- pub fn subtree_as_debug_string(&self, subtree_idx: usize) -> String {
- fn debug_subtree<S>(
- output: &mut String,
- subtree: &Subtree<S>,
- iter: &mut std::slice::Iter<'_, TokenTree<S>>,
- ) {
- let delim = match subtree.delimiter.kind {
- DelimiterKind::Brace => ("{", "}"),
- DelimiterKind::Bracket => ("[", "]"),
- DelimiterKind::Parenthesis => ("(", ")"),
- DelimiterKind::Invisible => ("$", "$"),
- };
-
- output.push_str(delim.0);
- let mut last = None;
- let mut idx = 0;
- while idx < subtree.len {
- let child = iter.next().unwrap();
- debug_token_tree(output, child, last, iter);
- last = Some(child);
- idx += 1;
- }
-
- output.push_str(delim.1);
- }
-
- fn debug_token_tree<S>(
- output: &mut String,
- tt: &TokenTree<S>,
- last: Option<&TokenTree<S>>,
- iter: &mut std::slice::Iter<'_, TokenTree<S>>,
- ) {
- match tt {
- TokenTree::Leaf(it) => {
- let s = match it {
- Leaf::Literal(it) => it.symbol.to_string(),
- Leaf::Punct(it) => it.char.to_string(),
- Leaf::Ident(it) => format!("{}{}", it.is_raw.as_str(), it.sym),
- };
- match (it, last) {
- (Leaf::Ident(_), Some(&TokenTree::Leaf(Leaf::Ident(_)))) => {
- output.push(' ');
- output.push_str(&s);
- }
- (Leaf::Punct(_), Some(TokenTree::Leaf(Leaf::Punct(punct)))) => {
- if punct.spacing == Spacing::Alone {
- output.push(' ');
- output.push_str(&s);
- } else {
- output.push_str(&s);
- }
- }
- _ => output.push_str(&s),
- }
- }
- TokenTree::Subtree(it) => debug_subtree(output, it, iter),
- }
- }
+pub fn pretty(tkns: TokenTreesView<'_>) -> String {
+ return dispatch_ref! {
+ match tkns.repr => tt => pretty_impl(tt)
+ };
- let mut res = String::new();
- debug_token_tree(
- &mut res,
- &self.0[subtree_idx],
- None,
- &mut self.0[subtree_idx + 1..].iter(),
- );
- res
- }
-}
+ use crate::storage::TokenTree;
-pub fn pretty<S>(mut tkns: &[TokenTree<S>]) -> String {
- fn tokentree_to_text<S>(tkn: &TokenTree<S>, tkns: &mut &[TokenTree<S>]) -> String {
+ fn tokentree_to_text<S: SpanStorage>(tkn: &TokenTree<S>, tkns: &mut &[TokenTree<S>]) -> String {
match tkn {
- TokenTree::Leaf(Leaf::Ident(ident)) => {
- format!("{}{}", ident.is_raw.as_str(), ident.sym)
+ TokenTree::Ident { sym, is_raw, .. } => format!("{}{}", is_raw.as_str(), sym),
+ &TokenTree::Literal { ref text_and_suffix, kind, suffix_len, span: _ } => {
+ format!(
+ "{}",
+ Literal {
+ text_and_suffix: text_and_suffix.clone(),
+ span: Span {
+ range: TextRange::empty(TextSize::new(0)),
+ anchor: span::SpanAnchor {
+ file_id: span::EditionedFileId::from_raw(0),
+ ast_id: span::FIXUP_ERASED_FILE_AST_ID_MARKER
+ },
+ ctx: span::SyntaxContext::root(span::Edition::Edition2015)
+ },
+ kind,
+ suffix_len
+ }
+ )
}
- TokenTree::Leaf(Leaf::Literal(literal)) => format!("{literal}"),
- TokenTree::Leaf(Leaf::Punct(punct)) => format!("{}", punct.char),
- TokenTree::Subtree(subtree) => {
- let (subtree_content, rest) = tkns.split_at(subtree.usize_len());
- let content = pretty(subtree_content);
+ TokenTree::Punct { char, .. } => format!("{}", char),
+ TokenTree::Subtree { len, delim_kind, .. } => {
+ let (subtree_content, rest) = tkns.split_at(*len as usize);
+ let content = pretty_impl(subtree_content);
*tkns = rest;
- let (open, close) = match subtree.delimiter.kind {
+ let (open, close) = match *delim_kind {
DelimiterKind::Brace => ("{", "}"),
DelimiterKind::Bracket => ("[", "]"),
DelimiterKind::Parenthesis => ("(", ")"),
@@ -981,18 +807,104 @@ pub fn pretty<S>(mut tkns: &[TokenTree<S>]) -> String {
}
}
- let mut last = String::new();
- let mut last_to_joint = true;
+ fn pretty_impl<S: SpanStorage>(mut tkns: &[TokenTree<S>]) -> String {
+ let mut last = String::new();
+ let mut last_to_joint = true;
+
+ while let Some((tkn, rest)) = tkns.split_first() {
+ tkns = rest;
+ last = [last, tokentree_to_text(tkn, &mut tkns)].join(if last_to_joint {
+ ""
+ } else {
+ " "
+ });
+ last_to_joint = false;
+ if let TokenTree::Punct { spacing, .. } = tkn
+ && *spacing == Spacing::Joint
+ {
+ last_to_joint = true;
+ }
+ }
+ last
+ }
+}
+
+#[derive(Debug)]
+pub enum TransformTtAction<'a> {
+ Keep,
+ ReplaceWith(TokenTreesView<'a>),
+}
+
+impl TransformTtAction<'_> {
+ #[inline]
+ pub fn remove() -> Self {
+ Self::ReplaceWith(TokenTreesView::empty())
+ }
+}
+
+/// This function takes a token tree, and calls `callback` with each token tree in it.
+/// Then it does what the callback says: keeps the tt or replaces it with a (possibly empty)
+/// tts view.
+pub fn transform_tt<'b>(
+ tt: &mut TopSubtree,
+ mut callback: impl FnMut(TokenTree) -> TransformTtAction<'b>,
+) {
+ let mut tt_vec = tt.as_token_trees().iter_flat_tokens().collect::<Vec<_>>();
+
+ // We need to keep a stack of the currently open subtrees, because we need to update
+ // them if we change the number of items in them.
+ let mut subtrees_stack = Vec::new();
+ let mut i = 0;
+ while i < tt_vec.len() {
+ 'pop_finished_subtrees: while let Some(&subtree_idx) = subtrees_stack.last() {
+ let TokenTree::Subtree(subtree) = &tt_vec[subtree_idx] else {
+ unreachable!("non-subtree on subtrees stack");
+ };
+ if i >= subtree_idx + 1 + subtree.usize_len() {
+ subtrees_stack.pop();
+ } else {
+ break 'pop_finished_subtrees;
+ }
+ }
+
+ let current = match &tt_vec[i] {
+ TokenTree::Leaf(leaf) => TokenTree::Leaf(match leaf {
+ Leaf::Literal(leaf) => Leaf::Literal(leaf.clone()),
+ Leaf::Punct(leaf) => Leaf::Punct(*leaf),
+ Leaf::Ident(leaf) => Leaf::Ident(leaf.clone()),
+ }),
+ TokenTree::Subtree(subtree) => TokenTree::Subtree(*subtree),
+ };
+ let action = callback(current);
+ match action {
+ TransformTtAction::Keep => {
+ // This cannot be shared with the replaced case, because then we may push the same subtree
+ // twice, and will update it twice which will lead to errors.
+ if let TokenTree::Subtree(_) = &tt_vec[i] {
+ subtrees_stack.push(i);
+ }
- while let Some((tkn, rest)) = tkns.split_first() {
- tkns = rest;
- last = [last, tokentree_to_text(tkn, &mut tkns)].join(if last_to_joint { "" } else { " " });
- last_to_joint = false;
- if let TokenTree::Leaf(Leaf::Punct(punct)) = tkn
- && punct.spacing == Spacing::Joint
- {
- last_to_joint = true;
+ i += 1;
+ }
+ TransformTtAction::ReplaceWith(replacement) => {
+ let old_len = 1 + match &tt_vec[i] {
+ TokenTree::Leaf(_) => 0,
+ TokenTree::Subtree(subtree) => subtree.usize_len(),
+ };
+ let len_diff = replacement.len() as i64 - old_len as i64;
+ tt_vec.splice(i..i + old_len, replacement.iter_flat_tokens());
+ // Skip the newly inserted replacement, we don't want to visit it.
+ i += replacement.len();
+
+ for &subtree_idx in &subtrees_stack {
+ let TokenTree::Subtree(subtree) = &mut tt_vec[subtree_idx] else {
+ unreachable!("non-subtree on subtrees stack");
+ };
+ subtree.len = (i64::from(subtree.len) + len_diff).try_into().unwrap();
+ }
+ }
}
}
- last
+
+ *tt = TopSubtree::from_serialized(tt_vec);
}
diff --git a/crates/tt/src/storage.rs b/crates/tt/src/storage.rs
new file mode 100644
index 0000000000..4dd02d875a
--- /dev/null
+++ b/crates/tt/src/storage.rs
@@ -0,0 +1,992 @@
+//! Spans are memory heavy, and we have a lot of token trees. Storing them straight
+//! will waste a lot of memory. So instead we implement a clever compression mechanism:
+//!
+//! A `TopSubtree` has a list of [`CompressedSpanPart`], which are the parts of a span
+//! that tend to be shared between tokens - namely, without the range. The main list
+//! of token trees is kept in one of three versions, where we use the smallest version
+//! we can for this tree:
+//!
+//! 1. In the most common version a span is just a `u32`. The bits are divided as follows:
+//! there are 4 bits that index into the [`CompressedSpanPart`] list. 20 bits
+//! store the range start, and 8 bits store the range length. In experiments,
+//! this accounts for 75%-85% of the spans.
+//! 2. In the second version a span is 64 bits. 32 bits for the range start, 16 bits
+//! for the range length, and 16 bits for the span parts index. This is used in
+//! less than 2% of all `TopSubtree`s, but they account for 15%-25% of the spans:
+//! those are mostly token tree munchers, that generate a lot of `SyntaxContext`s
+//! (because they recurse a lot), which is why they can't fit in the first version,
+//! and tend to generate a lot of code.
+//! 3. The third version is practically unused; 65,535 bytes for a token and 65,535
+//! unique span parts is more than enough for everybody. However, someone may still
+//! create a macro that requires more, therefore we have this version as a backup:
+//! it uses 96 bits, 32 for each of the range start, length and span parts index.
+
+use std::fmt;
+
+use intern::Symbol;
+use rustc_hash::FxBuildHasher;
+use span::{Span, SpanAnchor, SyntaxContext, TextRange, TextSize};
+
+use crate::{
+ DelimSpan, DelimiterKind, IdentIsRaw, LitKind, Spacing, SubtreeView, TokenTreesReprRef,
+ TokenTreesView, TtIter, dispatch_ref,
+};
+
+#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
+pub(crate) struct CompressedSpanPart {
+ pub(crate) anchor: SpanAnchor,
+ pub(crate) ctx: SyntaxContext,
+}
+
+impl CompressedSpanPart {
+ #[inline]
+ fn from_span(span: &Span) -> Self {
+ Self { anchor: span.anchor, ctx: span.ctx }
+ }
+
+ #[inline]
+ fn recombine(&self, range: TextRange) -> Span {
+ Span { range, anchor: self.anchor, ctx: self.ctx }
+ }
+}
+
+pub(crate) trait SpanStorage: Copy {
+ fn can_hold(text_range: TextRange, span_parts_index: usize) -> bool;
+
+ fn new(text_range: TextRange, span_parts_index: usize) -> Self;
+
+ fn text_range(&self) -> TextRange;
+
+ fn span_parts_index(&self) -> usize;
+
+ #[inline]
+ fn span(&self, span_parts: &[CompressedSpanPart]) -> Span {
+ span_parts[self.span_parts_index()].recombine(self.text_range())
+ }
+}
+
+#[inline]
+const fn n_bits_mask(n: u32) -> u32 {
+ (1 << n) - 1
+}
+
+#[derive(Clone, Copy, PartialEq, Eq, Hash)]
+pub(crate) struct SpanStorage32(u32);
+
+impl SpanStorage32 {
+ const SPAN_PARTS_BIT: u32 = 4;
+ const LEN_BITS: u32 = 8;
+ const OFFSET_BITS: u32 = 20;
+}
+
+const _: () = assert!(
+ (SpanStorage32::SPAN_PARTS_BIT + SpanStorage32::LEN_BITS + SpanStorage32::OFFSET_BITS)
+ == u32::BITS
+);
+
+impl SpanStorage for SpanStorage32 {
+ #[inline]
+ fn can_hold(text_range: TextRange, span_parts_index: usize) -> bool {
+ let offset = u32::from(text_range.start());
+ let len = u32::from(text_range.len());
+ let span_parts_index = span_parts_index as u32;
+
+ offset <= n_bits_mask(Self::OFFSET_BITS)
+ && len <= n_bits_mask(Self::LEN_BITS)
+ && span_parts_index <= n_bits_mask(Self::SPAN_PARTS_BIT)
+ }
+
+ #[inline]
+ fn new(text_range: TextRange, span_parts_index: usize) -> Self {
+ let offset = u32::from(text_range.start());
+ let len = u32::from(text_range.len());
+ let span_parts_index = span_parts_index as u32;
+
+ debug_assert!(offset <= n_bits_mask(Self::OFFSET_BITS));
+ debug_assert!(len <= n_bits_mask(Self::LEN_BITS));
+ debug_assert!(span_parts_index <= n_bits_mask(Self::SPAN_PARTS_BIT));
+
+ Self(
+ (offset << (Self::LEN_BITS + Self::SPAN_PARTS_BIT))
+ | (len << Self::SPAN_PARTS_BIT)
+ | span_parts_index,
+ )
+ }
+
+ #[inline]
+ fn text_range(&self) -> TextRange {
+ let offset = TextSize::new(self.0 >> (Self::SPAN_PARTS_BIT + Self::LEN_BITS));
+ let len = TextSize::new((self.0 >> Self::SPAN_PARTS_BIT) & n_bits_mask(Self::LEN_BITS));
+ TextRange::at(offset, len)
+ }
+
+ #[inline]
+ fn span_parts_index(&self) -> usize {
+ (self.0 & n_bits_mask(Self::SPAN_PARTS_BIT)) as usize
+ }
+}
+
+impl fmt::Debug for SpanStorage32 {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ f.debug_struct("SpanStorage32")
+ .field("text_range", &self.text_range())
+ .field("span_parts_index", &self.span_parts_index())
+ .finish()
+ }
+}
+
+#[derive(Clone, Copy, PartialEq, Eq, Hash)]
+pub(crate) struct SpanStorage64 {
+ offset: u32,
+ len_and_parts: u32,
+}
+
+impl SpanStorage64 {
+ const SPAN_PARTS_BIT: u32 = 16;
+ const LEN_BITS: u32 = 16;
+}
+
+const _: () = assert!((SpanStorage64::SPAN_PARTS_BIT + SpanStorage64::LEN_BITS) == u32::BITS);
+
+impl SpanStorage for SpanStorage64 {
+ #[inline]
+ fn can_hold(text_range: TextRange, span_parts_index: usize) -> bool {
+ let len = u32::from(text_range.len());
+ let span_parts_index = span_parts_index as u32;
+
+ len <= n_bits_mask(Self::LEN_BITS) && span_parts_index <= n_bits_mask(Self::SPAN_PARTS_BIT)
+ }
+
+ #[inline]
+ fn new(text_range: TextRange, span_parts_index: usize) -> Self {
+ let offset = u32::from(text_range.start());
+ let len = u32::from(text_range.len());
+ let span_parts_index = span_parts_index as u32;
+
+ debug_assert!(len <= n_bits_mask(Self::LEN_BITS));
+ debug_assert!(span_parts_index <= n_bits_mask(Self::SPAN_PARTS_BIT));
+
+ Self { offset, len_and_parts: (len << Self::SPAN_PARTS_BIT) | span_parts_index }
+ }
+
+ #[inline]
+ fn text_range(&self) -> TextRange {
+ let offset = TextSize::new(self.offset);
+ let len = TextSize::new(self.len_and_parts >> Self::SPAN_PARTS_BIT);
+ TextRange::at(offset, len)
+ }
+
+ #[inline]
+ fn span_parts_index(&self) -> usize {
+ (self.len_and_parts & n_bits_mask(Self::SPAN_PARTS_BIT)) as usize
+ }
+}
+
+impl fmt::Debug for SpanStorage64 {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ f.debug_struct("SpanStorage64")
+ .field("text_range", &self.text_range())
+ .field("span_parts_index", &self.span_parts_index())
+ .finish()
+ }
+}
+
+impl From<SpanStorage32> for SpanStorage64 {
+ #[inline]
+ fn from(value: SpanStorage32) -> Self {
+ SpanStorage64::new(value.text_range(), value.span_parts_index())
+ }
+}
+
+#[derive(Clone, Copy, PartialEq, Eq, Hash)]
+pub(crate) struct SpanStorage96 {
+ offset: u32,
+ len: u32,
+ parts: u32,
+}
+
+impl SpanStorage for SpanStorage96 {
+ #[inline]
+ fn can_hold(_text_range: TextRange, _span_parts_index: usize) -> bool {
+ true
+ }
+
+ #[inline]
+ fn new(text_range: TextRange, span_parts_index: usize) -> Self {
+ let offset = u32::from(text_range.start());
+ let len = u32::from(text_range.len());
+ let span_parts_index = span_parts_index as u32;
+
+ Self { offset, len, parts: span_parts_index }
+ }
+
+ #[inline]
+ fn text_range(&self) -> TextRange {
+ let offset = TextSize::new(self.offset);
+ let len = TextSize::new(self.len);
+ TextRange::at(offset, len)
+ }
+
+ #[inline]
+ fn span_parts_index(&self) -> usize {
+ self.parts as usize
+ }
+}
+
+impl fmt::Debug for SpanStorage96 {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ f.debug_struct("SpanStorage96")
+ .field("text_range", &self.text_range())
+ .field("span_parts_index", &self.span_parts_index())
+ .finish()
+ }
+}
+
+impl From<SpanStorage32> for SpanStorage96 {
+ #[inline]
+ fn from(value: SpanStorage32) -> Self {
+ SpanStorage96::new(value.text_range(), value.span_parts_index())
+ }
+}
+
+impl From<SpanStorage64> for SpanStorage96 {
+ #[inline]
+ fn from(value: SpanStorage64) -> Self {
+ SpanStorage96::new(value.text_range(), value.span_parts_index())
+ }
+}
+
+// We don't use structs or enum nesting here to save padding.
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub(crate) enum TokenTree<S> {
+ Literal { text_and_suffix: Symbol, span: S, kind: LitKind, suffix_len: u8 },
+ Punct { char: char, spacing: Spacing, span: S },
+ Ident { sym: Symbol, span: S, is_raw: IdentIsRaw },
+ Subtree { len: u32, delim_kind: DelimiterKind, open_span: S, close_span: S },
+}
+
+impl<S: SpanStorage> TokenTree<S> {
+ #[inline]
+ pub(crate) fn first_span(&self) -> &S {
+ match self {
+ TokenTree::Literal { span, .. } => span,
+ TokenTree::Punct { span, .. } => span,
+ TokenTree::Ident { span, .. } => span,
+ TokenTree::Subtree { open_span, .. } => open_span,
+ }
+ }
+
+ #[inline]
+ pub(crate) fn last_span(&self) -> &S {
+ match self {
+ TokenTree::Literal { span, .. } => span,
+ TokenTree::Punct { span, .. } => span,
+ TokenTree::Ident { span, .. } => span,
+ TokenTree::Subtree { close_span, .. } => close_span,
+ }
+ }
+
+ #[inline]
+ pub(crate) fn to_api(&self, span_parts: &[CompressedSpanPart]) -> crate::TokenTree {
+ match self {
+ TokenTree::Literal { text_and_suffix, span, kind, suffix_len } => {
+ crate::TokenTree::Leaf(crate::Leaf::Literal(crate::Literal {
+ text_and_suffix: text_and_suffix.clone(),
+ span: span.span(span_parts),
+ kind: *kind,
+ suffix_len: *suffix_len,
+ }))
+ }
+ TokenTree::Punct { char, spacing, span } => {
+ crate::TokenTree::Leaf(crate::Leaf::Punct(crate::Punct {
+ char: *char,
+ spacing: *spacing,
+ span: span.span(span_parts),
+ }))
+ }
+ TokenTree::Ident { sym, span, is_raw } => {
+ crate::TokenTree::Leaf(crate::Leaf::Ident(crate::Ident {
+ sym: sym.clone(),
+ span: span.span(span_parts),
+ is_raw: *is_raw,
+ }))
+ }
+ TokenTree::Subtree { len, delim_kind, open_span, close_span } => {
+ crate::TokenTree::Subtree(crate::Subtree {
+ delimiter: crate::Delimiter {
+ open: open_span.span(span_parts),
+ close: close_span.span(span_parts),
+ kind: *delim_kind,
+ },
+ len: *len,
+ })
+ }
+ }
+ }
+
+ #[inline]
+ fn convert<U: From<S>>(self) -> TokenTree<U> {
+ match self {
+ TokenTree::Literal { text_and_suffix, span, kind, suffix_len } => {
+ TokenTree::Literal { text_and_suffix, span: span.into(), kind, suffix_len }
+ }
+ TokenTree::Punct { char, spacing, span } => {
+ TokenTree::Punct { char, spacing, span: span.into() }
+ }
+ TokenTree::Ident { sym, span, is_raw } => {
+ TokenTree::Ident { sym, span: span.into(), is_raw }
+ }
+ TokenTree::Subtree { len, delim_kind, open_span, close_span } => TokenTree::Subtree {
+ len,
+ delim_kind,
+ open_span: open_span.into(),
+ close_span: close_span.into(),
+ },
+ }
+ }
+}
+
+// This is used a lot, make sure it doesn't grow unintentionally.
+const _: () = {
+ assert!(size_of::<TokenTree<SpanStorage32>>() == 16);
+ assert!(size_of::<TokenTree<SpanStorage64>>() == 24);
+ assert!(size_of::<TokenTree<SpanStorage96>>() == 32);
+};
+
+#[rust_analyzer::macro_style(braces)]
+macro_rules! dispatch {
+ (
+ match $scrutinee:expr => $tt:ident => $body:expr
+ ) => {
+ match $scrutinee {
+ TopSubtreeRepr::SpanStorage32($tt) => $body,
+ TopSubtreeRepr::SpanStorage64($tt) => $body,
+ TopSubtreeRepr::SpanStorage96($tt) => $body,
+ }
+ };
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub(crate) enum TopSubtreeRepr {
+ SpanStorage32(Box<[TokenTree<SpanStorage32>]>),
+ SpanStorage64(Box<[TokenTree<SpanStorage64>]>),
+ SpanStorage96(Box<[TokenTree<SpanStorage96>]>),
+}
+
+#[derive(Clone, PartialEq, Eq, Hash)]
+pub struct TopSubtree {
+ repr: TopSubtreeRepr,
+ span_parts: Box<[CompressedSpanPart]>,
+}
+
+impl TopSubtree {
+ pub fn empty(span: DelimSpan) -> Self {
+ Self {
+ repr: TopSubtreeRepr::SpanStorage96(Box::new([TokenTree::Subtree {
+ len: 0,
+ delim_kind: DelimiterKind::Invisible,
+ open_span: SpanStorage96::new(span.open.range, 0),
+ close_span: SpanStorage96::new(span.close.range, 1),
+ }])),
+ span_parts: Box::new([
+ CompressedSpanPart::from_span(&span.open),
+ CompressedSpanPart::from_span(&span.close),
+ ]),
+ }
+ }
+
+ pub fn invisible_from_leaves<const N: usize>(
+ delim_span: Span,
+ leaves: [crate::Leaf; N],
+ ) -> Self {
+ let mut builder = TopSubtreeBuilder::new(crate::Delimiter::invisible_spanned(delim_span));
+ builder.extend(leaves);
+ builder.build()
+ }
+
+ pub fn from_token_trees(delimiter: crate::Delimiter, token_trees: TokenTreesView<'_>) -> Self {
+ let mut builder = TopSubtreeBuilder::new(delimiter);
+ builder.extend_with_tt(token_trees);
+ builder.build()
+ }
+
+ pub fn from_serialized(tt: Vec<crate::TokenTree>) -> Self {
+ let mut tt = tt.into_iter();
+ let Some(crate::TokenTree::Subtree(top_subtree)) = tt.next() else {
+ panic!("first must always come the top subtree")
+ };
+ let mut builder = TopSubtreeBuilder::new(top_subtree.delimiter);
+ for tt in tt {
+ builder.push_token_tree(tt);
+ }
+ builder.build()
+ }
+
+ pub fn from_subtree(subtree: SubtreeView<'_>) -> Self {
+ let mut builder = TopSubtreeBuilder::new(subtree.top_subtree().delimiter);
+ builder.extend_with_tt(subtree.token_trees());
+ builder.build()
+ }
+
+ pub fn view(&self) -> SubtreeView<'_> {
+ let repr = match &self.repr {
+ TopSubtreeRepr::SpanStorage32(token_trees) => {
+ TokenTreesReprRef::SpanStorage32(token_trees)
+ }
+ TopSubtreeRepr::SpanStorage64(token_trees) => {
+ TokenTreesReprRef::SpanStorage64(token_trees)
+ }
+ TopSubtreeRepr::SpanStorage96(token_trees) => {
+ TokenTreesReprRef::SpanStorage96(token_trees)
+ }
+ };
+ SubtreeView(TokenTreesView { repr, span_parts: &self.span_parts })
+ }
+
+ pub fn iter(&self) -> TtIter<'_> {
+ self.view().iter()
+ }
+
+ pub fn top_subtree(&self) -> crate::Subtree {
+ self.view().top_subtree()
+ }
+
+ pub fn set_top_subtree_delimiter_kind(&mut self, kind: DelimiterKind) {
+ dispatch! {
+ match &mut self.repr => tt => {
+ let TokenTree::Subtree { delim_kind, .. } = &mut tt[0] else {
+ unreachable!("the first token tree is always the top subtree");
+ };
+ *delim_kind = kind;
+ }
+ }
+ }
+
+ fn ensure_can_hold(&mut self, range: TextRange) {
+ fn can_hold<S: SpanStorage>(_: &[TokenTree<S>], range: TextRange) -> bool {
+ S::can_hold(range, 0)
+ }
+ let can_hold = dispatch! {
+ match &self.repr => tt => can_hold(tt, range)
+ };
+ if can_hold {
+ return;
+ }
+
+ // Otherwise, we do something very junky: recreate the entire tree. Hopefully this should be rare.
+ let mut builder = TopSubtreeBuilder::new(self.top_subtree().delimiter);
+ builder.extend_with_tt(self.token_trees());
+ builder.ensure_can_hold(range, 0);
+ *self = builder.build();
+ }
+
+ pub fn set_top_subtree_delimiter_span(&mut self, span: DelimSpan) {
+ self.ensure_can_hold(span.open.range);
+ self.ensure_can_hold(span.close.range);
+ fn do_it<S: SpanStorage>(tt: &mut [TokenTree<S>], span: DelimSpan) {
+ let TokenTree::Subtree { open_span, close_span, .. } = &mut tt[0] else {
+ unreachable!()
+ };
+ *open_span = S::new(span.open.range, 0);
+ *close_span = S::new(span.close.range, 0);
+ }
+ dispatch! {
+ match &mut self.repr => tt => do_it(tt, span)
+ }
+ self.span_parts[0] = CompressedSpanPart::from_span(&span.open);
+ self.span_parts[1] = CompressedSpanPart::from_span(&span.close);
+ }
+
+ /// Note: this cannot change spans.
+ pub fn set_token(&mut self, idx: usize, leaf: crate::Leaf) {
+ fn do_it<S: SpanStorage>(
+ tt: &mut [TokenTree<S>],
+ idx: usize,
+ span_parts: &[CompressedSpanPart],
+ leaf: crate::Leaf,
+ ) {
+ assert!(
+ !matches!(tt[idx], TokenTree::Subtree { .. }),
+ "`TopSubtree::set_token()` must be called on a leaf"
+ );
+ let existing_span_compressed = *tt[idx].first_span();
+ let existing_span = existing_span_compressed.span(span_parts);
+ assert_eq!(
+ *leaf.span(),
+ existing_span,
+ "`TopSubtree::set_token()` cannot change spans"
+ );
+ match leaf {
+ crate::Leaf::Literal(leaf) => {
+ tt[idx] = TokenTree::Literal {
+ text_and_suffix: leaf.text_and_suffix,
+ span: existing_span_compressed,
+ kind: leaf.kind,
+ suffix_len: leaf.suffix_len,
+ }
+ }
+ crate::Leaf::Punct(leaf) => {
+ tt[idx] = TokenTree::Punct {
+ char: leaf.char,
+ spacing: leaf.spacing,
+ span: existing_span_compressed,
+ }
+ }
+ crate::Leaf::Ident(leaf) => {
+ tt[idx] = TokenTree::Ident {
+ sym: leaf.sym,
+ span: existing_span_compressed,
+ is_raw: leaf.is_raw,
+ }
+ }
+ }
+ }
+ dispatch! {
+ match &mut self.repr => tt => do_it(tt, idx, &self.span_parts, leaf)
+ }
+ }
+
+ pub fn token_trees(&self) -> TokenTreesView<'_> {
+ self.view().token_trees()
+ }
+
+ pub fn as_token_trees(&self) -> TokenTreesView<'_> {
+ self.view().as_token_trees()
+ }
+
+ pub fn change_every_ast_id(&mut self, mut callback: impl FnMut(&mut span::ErasedFileAstId)) {
+ for span_part in &mut self.span_parts {
+ callback(&mut span_part.anchor.ast_id);
+ }
+ }
+}
+
+#[rust_analyzer::macro_style(braces)]
+macro_rules! dispatch_builder {
+ (
+ match $scrutinee:expr => $tt:ident => $body:expr
+ ) => {
+ match $scrutinee {
+ TopSubtreeBuilderRepr::SpanStorage32($tt) => $body,
+ TopSubtreeBuilderRepr::SpanStorage64($tt) => $body,
+ TopSubtreeBuilderRepr::SpanStorage96($tt) => $body,
+ }
+ };
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+enum TopSubtreeBuilderRepr {
+ SpanStorage32(Vec<TokenTree<SpanStorage32>>),
+ SpanStorage64(Vec<TokenTree<SpanStorage64>>),
+ SpanStorage96(Vec<TokenTree<SpanStorage96>>),
+}
+
+type FxIndexSet<K> = indexmap::IndexSet<K, FxBuildHasher>;
+
+/// In any tree, the first two subtree parts are reserved for the top subtree.
+///
+/// We do it because `TopSubtree` exposes an API to modify the top subtree, therefore it's more convenient
+/// this way, and it's unlikely to affect memory usage.
+const RESERVED_SPAN_PARTS_LEN: usize = 2;
+
+#[derive(Debug, Clone)]
+pub struct TopSubtreeBuilder {
+ unclosed_subtree_indices: Vec<usize>,
+ token_trees: TopSubtreeBuilderRepr,
+ span_parts: FxIndexSet<CompressedSpanPart>,
+ last_closed_subtree: Option<usize>,
+ /// We need to keep those because they are not inside `span_parts`, see [`RESERVED_SPAN_PARTS_LEN`].
+ top_subtree_spans: DelimSpan,
+}
+
+impl TopSubtreeBuilder {
+ pub fn new(top_delimiter: crate::Delimiter) -> Self {
+ let mut result = Self {
+ unclosed_subtree_indices: Vec::new(),
+ token_trees: TopSubtreeBuilderRepr::SpanStorage32(Vec::new()),
+ span_parts: FxIndexSet::default(),
+ last_closed_subtree: None,
+ top_subtree_spans: top_delimiter.delim_span(),
+ };
+ result.ensure_can_hold(top_delimiter.open.range, 0);
+ result.ensure_can_hold(top_delimiter.close.range, 1);
+ fn push_first<S: SpanStorage>(tt: &mut Vec<TokenTree<S>>, top_delimiter: crate::Delimiter) {
+ tt.push(TokenTree::Subtree {
+ len: 0,
+ delim_kind: top_delimiter.kind,
+ open_span: S::new(top_delimiter.open.range, 0),
+ close_span: S::new(top_delimiter.close.range, 1),
+ });
+ }
+ dispatch_builder! {
+ match &mut result.token_trees => tt => push_first(tt, top_delimiter)
+ }
+ result
+ }
+
+ fn span_part_index(&mut self, part: CompressedSpanPart) -> usize {
+ self.span_parts.insert_full(part).0 + RESERVED_SPAN_PARTS_LEN
+ }
+
+ fn switch_repr<T: SpanStorage, U: From<T>>(repr: &mut Vec<TokenTree<T>>) -> Vec<TokenTree<U>> {
+ let repr = std::mem::take(repr);
+ repr.into_iter().map(|tt| tt.convert()).collect()
+ }
+
+ /// Ensures we have a representation that can hold these values.
+ fn ensure_can_hold(&mut self, text_range: TextRange, span_parts_index: usize) {
+ match &mut self.token_trees {
+ TopSubtreeBuilderRepr::SpanStorage32(token_trees) => {
+ if SpanStorage32::can_hold(text_range, span_parts_index) {
+ // Can hold.
+ } else if SpanStorage64::can_hold(text_range, span_parts_index) {
+ self.token_trees =
+ TopSubtreeBuilderRepr::SpanStorage64(Self::switch_repr(token_trees));
+ } else {
+ self.token_trees =
+ TopSubtreeBuilderRepr::SpanStorage96(Self::switch_repr(token_trees));
+ }
+ }
+ TopSubtreeBuilderRepr::SpanStorage64(token_trees) => {
+ if SpanStorage64::can_hold(text_range, span_parts_index) {
+ // Can hold.
+ } else {
+ self.token_trees =
+ TopSubtreeBuilderRepr::SpanStorage96(Self::switch_repr(token_trees));
+ }
+ }
+ TopSubtreeBuilderRepr::SpanStorage96(_) => {
+ // Can hold anything.
+ }
+ }
+ }
+
+ /// Not to be exposed, this assumes the subtree's children will be filled in immediately.
+ fn push_subtree(&mut self, subtree: crate::Subtree) {
+ let open_span_parts_index =
+ self.span_part_index(CompressedSpanPart::from_span(&subtree.delimiter.open));
+ self.ensure_can_hold(subtree.delimiter.open.range, open_span_parts_index);
+ let close_span_parts_index =
+ self.span_part_index(CompressedSpanPart::from_span(&subtree.delimiter.close));
+ self.ensure_can_hold(subtree.delimiter.close.range, close_span_parts_index);
+ fn do_it<S: SpanStorage>(
+ tt: &mut Vec<TokenTree<S>>,
+ open_span_parts_index: usize,
+ close_span_parts_index: usize,
+ subtree: crate::Subtree,
+ ) {
+ let open_span = S::new(subtree.delimiter.open.range, open_span_parts_index);
+ let close_span = S::new(subtree.delimiter.close.range, close_span_parts_index);
+ tt.push(TokenTree::Subtree {
+ len: subtree.len,
+ delim_kind: subtree.delimiter.kind,
+ open_span,
+ close_span,
+ });
+ }
+ dispatch_builder! {
+ match &mut self.token_trees => tt => do_it(tt, open_span_parts_index, close_span_parts_index, subtree)
+ }
+ }
+
+ pub fn open(&mut self, delimiter_kind: DelimiterKind, open_span: Span) {
+ let span_parts_index = self.span_part_index(CompressedSpanPart::from_span(&open_span));
+ self.ensure_can_hold(open_span.range, span_parts_index);
+ fn do_it<S: SpanStorage>(
+ token_trees: &mut Vec<TokenTree<S>>,
+ delimiter_kind: DelimiterKind,
+ range: TextRange,
+ span_parts_index: usize,
+ ) -> usize {
+ let open_span = S::new(range, span_parts_index);
+ token_trees.push(TokenTree::Subtree {
+ len: 0,
+ delim_kind: delimiter_kind,
+ open_span,
+ close_span: open_span, // Will be overwritten on close.
+ });
+ token_trees.len() - 1
+ }
+ let subtree_idx = dispatch_builder! {
+ match &mut self.token_trees => tt => do_it(tt, delimiter_kind, open_span.range, span_parts_index)
+ };
+ self.unclosed_subtree_indices.push(subtree_idx);
+ }
+
+ pub fn close(&mut self, close_span: Span) {
+ let span_parts_index = self.span_part_index(CompressedSpanPart::from_span(&close_span));
+ let range = close_span.range;
+ self.ensure_can_hold(range, span_parts_index);
+
+ let last_unclosed_index = self
+ .unclosed_subtree_indices
+ .pop()
+ .expect("attempt to close a `tt::Subtree` when none is open");
+ fn do_it<S: SpanStorage>(
+ token_trees: &mut [TokenTree<S>],
+ last_unclosed_index: usize,
+ range: TextRange,
+ span_parts_index: usize,
+ ) {
+ let token_trees_len = token_trees.len();
+ let TokenTree::Subtree { len, delim_kind: _, open_span: _, close_span } =
+ &mut token_trees[last_unclosed_index]
+ else {
+ unreachable!("unclosed token tree is always a subtree");
+ };
+ *len = (token_trees_len - last_unclosed_index - 1) as u32;
+ *close_span = S::new(range, span_parts_index);
+ }
+ dispatch_builder! {
+ match &mut self.token_trees => tt => do_it(tt, last_unclosed_index, range, span_parts_index)
+ }
+ self.last_closed_subtree = Some(last_unclosed_index);
+ }
+
+ /// You cannot call this consecutively, it will only work once after close.
+ pub fn remove_last_subtree_if_invisible(&mut self) {
+ let Some(last_subtree_idx) = self.last_closed_subtree else { return };
+ fn do_it<S: SpanStorage>(tt: &mut Vec<TokenTree<S>>, last_subtree_idx: usize) {
+ if let TokenTree::Subtree { delim_kind: DelimiterKind::Invisible, .. } =
+ tt[last_subtree_idx]
+ {
+ tt.remove(last_subtree_idx);
+ }
+ }
+ dispatch_builder! {
+ match &mut self.token_trees => tt => do_it(tt, last_subtree_idx)
+ }
+ self.last_closed_subtree = None;
+ }
+
+ fn push_literal(&mut self, leaf: crate::Literal) {
+ let span_parts_index = self.span_part_index(CompressedSpanPart::from_span(&leaf.span));
+ let range = leaf.span.range;
+ self.ensure_can_hold(range, span_parts_index);
+ fn do_it<S: SpanStorage>(
+ tt: &mut Vec<TokenTree<S>>,
+ range: TextRange,
+ span_parts_index: usize,
+ leaf: crate::Literal,
+ ) {
+ tt.push(TokenTree::Literal {
+ text_and_suffix: leaf.text_and_suffix,
+ span: S::new(range, span_parts_index),
+ kind: leaf.kind,
+ suffix_len: leaf.suffix_len,
+ })
+ }
+ dispatch_builder! {
+ match &mut self.token_trees => tt => do_it(tt, range, span_parts_index, leaf)
+ }
+ }
+
+ fn push_punct(&mut self, leaf: crate::Punct) {
+ let span_parts_index = self.span_part_index(CompressedSpanPart::from_span(&leaf.span));
+ let range = leaf.span.range;
+ self.ensure_can_hold(range, span_parts_index);
+ fn do_it<S: SpanStorage>(
+ tt: &mut Vec<TokenTree<S>>,
+ range: TextRange,
+ span_parts_index: usize,
+ leaf: crate::Punct,
+ ) {
+ tt.push(TokenTree::Punct {
+ char: leaf.char,
+ spacing: leaf.spacing,
+ span: S::new(range, span_parts_index),
+ })
+ }
+ dispatch_builder! {
+ match &mut self.token_trees => tt => do_it(tt, range, span_parts_index, leaf)
+ }
+ }
+
+ fn push_ident(&mut self, leaf: crate::Ident) {
+ let span_parts_index = self.span_part_index(CompressedSpanPart::from_span(&leaf.span));
+ let range = leaf.span.range;
+ self.ensure_can_hold(range, span_parts_index);
+ fn do_it<S: SpanStorage>(
+ tt: &mut Vec<TokenTree<S>>,
+ range: TextRange,
+ span_parts_index: usize,
+ leaf: crate::Ident,
+ ) {
+ tt.push(TokenTree::Ident {
+ sym: leaf.sym,
+ span: S::new(range, span_parts_index),
+ is_raw: leaf.is_raw,
+ })
+ }
+ dispatch_builder! {
+ match &mut self.token_trees => tt => do_it(tt, range, span_parts_index, leaf)
+ }
+ }
+
+ pub fn push(&mut self, leaf: crate::Leaf) {
+ match leaf {
+ crate::Leaf::Literal(leaf) => self.push_literal(leaf),
+ crate::Leaf::Punct(leaf) => self.push_punct(leaf),
+ crate::Leaf::Ident(leaf) => self.push_ident(leaf),
+ }
+ }
+
+ fn push_token_tree(&mut self, tt: crate::TokenTree) {
+ match tt {
+ crate::TokenTree::Leaf(leaf) => self.push(leaf),
+ crate::TokenTree::Subtree(subtree) => self.push_subtree(subtree),
+ }
+ }
+
+ pub fn extend(&mut self, leaves: impl IntoIterator<Item = crate::Leaf>) {
+ leaves.into_iter().for_each(|leaf| self.push(leaf));
+ }
+
+ pub fn extend_with_tt(&mut self, tt: TokenTreesView<'_>) {
+ fn do_it<S: SpanStorage>(
+ this: &mut TopSubtreeBuilder,
+ tt: &[TokenTree<S>],
+ span_parts: &[CompressedSpanPart],
+ ) {
+ for tt in tt {
+ this.push_token_tree(tt.to_api(span_parts));
+ }
+ }
+ dispatch_ref! {
+ match tt.repr => tt_repr => do_it(self, tt_repr, tt.span_parts)
+ }
+ }
+
+ /// Like [`Self::extend_with_tt()`], but makes sure the new tokens will never be
+ /// joint with whatever comes after them.
+ pub fn extend_with_tt_alone(&mut self, tt: TokenTreesView<'_>) {
+ self.extend_with_tt(tt);
+ fn do_it<S: SpanStorage>(tt: &mut [TokenTree<S>]) {
+ if let Some(TokenTree::Punct { spacing, .. }) = tt.last_mut() {
+ *spacing = Spacing::Alone;
+ }
+ }
+ if !tt.is_empty() {
+ dispatch_builder! {
+ match &mut self.token_trees => tt => do_it(tt)
+ }
+ }
+ }
+
+ pub fn expected_delimiters(&self) -> impl Iterator<Item = DelimiterKind> {
+ self.unclosed_subtree_indices.iter().rev().map(|&subtree_idx| {
+ dispatch_builder! {
+ match &self.token_trees => tt => {
+ let TokenTree::Subtree { delim_kind, .. } = tt[subtree_idx] else {
+ unreachable!("unclosed token tree is always a subtree")
+ };
+ delim_kind
+ }
+ }
+ })
+ }
+
+ /// Builds, and remove the top subtree if it has only one subtree child.
+ pub fn build_skip_top_subtree(mut self) -> TopSubtree {
+ fn remove_first_if_needed<S: SpanStorage>(
+ tt: &mut Vec<TokenTree<S>>,
+ top_delim_span: &mut DelimSpan,
+ span_parts: &FxIndexSet<CompressedSpanPart>,
+ ) {
+ let tt_len = tt.len();
+ let Some(TokenTree::Subtree { len, open_span, close_span, .. }) = tt.get_mut(1) else {
+ return;
+ };
+ if (*len as usize) != (tt_len - 2) {
+ // Subtree does not cover the whole tree (minus 2; itself, and the top span).
+ return;
+ }
+
+ // Now we need to adjust the spans, because we assume that the first two spans are always reserved.
+ let top_open_span = span_parts
+ .get_index(open_span.span_parts_index() - RESERVED_SPAN_PARTS_LEN)
+ .unwrap()
+ .recombine(open_span.text_range());
+ let top_close_span = span_parts
+ .get_index(close_span.span_parts_index() - RESERVED_SPAN_PARTS_LEN)
+ .unwrap()
+ .recombine(close_span.text_range());
+ *top_delim_span = DelimSpan { open: top_open_span, close: top_close_span };
+ // Can't remove the top spans from the map, as maybe they're used by other things as well.
+ // Now we need to reencode the spans, because their parts index changed:
+ *open_span = S::new(open_span.text_range(), 0);
+ *close_span = S::new(close_span.text_range(), 1);
+
+ tt.remove(0);
+ }
+ dispatch_builder! {
+ match &mut self.token_trees => tt => remove_first_if_needed(tt, &mut self.top_subtree_spans, &self.span_parts)
+ }
+ self.build()
+ }
+
+ pub fn build(mut self) -> TopSubtree {
+ assert!(
+ self.unclosed_subtree_indices.is_empty(),
+ "attempt to build an unbalanced `TopSubtreeBuilder`"
+ );
+ fn finish_top_len<S: SpanStorage>(tt: &mut [TokenTree<S>]) {
+ let total_len = tt.len() as u32;
+ let TokenTree::Subtree { len, .. } = &mut tt[0] else {
+ unreachable!("first token tree is always a subtree");
+ };
+ *len = total_len - 1;
+ }
+ dispatch_builder! {
+ match &mut self.token_trees => tt => finish_top_len(tt)
+ }
+
+ let span_parts = [
+ CompressedSpanPart::from_span(&self.top_subtree_spans.open),
+ CompressedSpanPart::from_span(&self.top_subtree_spans.close),
+ ]
+ .into_iter()
+ .chain(self.span_parts.iter().copied())
+ .collect();
+
+ let repr = match self.token_trees {
+ TopSubtreeBuilderRepr::SpanStorage32(tt) => {
+ TopSubtreeRepr::SpanStorage32(tt.into_boxed_slice())
+ }
+ TopSubtreeBuilderRepr::SpanStorage64(tt) => {
+ TopSubtreeRepr::SpanStorage64(tt.into_boxed_slice())
+ }
+ TopSubtreeBuilderRepr::SpanStorage96(tt) => {
+ TopSubtreeRepr::SpanStorage96(tt.into_boxed_slice())
+ }
+ };
+
+ TopSubtree { repr, span_parts }
+ }
+
+ pub fn restore_point(&self) -> SubtreeBuilderRestorePoint {
+ let token_trees_len = dispatch_builder! {
+ match &self.token_trees => tt => tt.len()
+ };
+ SubtreeBuilderRestorePoint {
+ unclosed_subtree_indices_len: self.unclosed_subtree_indices.len(),
+ token_trees_len,
+ last_closed_subtree: self.last_closed_subtree,
+ }
+ }
+
+ pub fn restore(&mut self, restore_point: SubtreeBuilderRestorePoint) {
+ self.unclosed_subtree_indices.truncate(restore_point.unclosed_subtree_indices_len);
+ dispatch_builder! {
+ match &mut self.token_trees => tt => tt.truncate(restore_point.token_trees_len)
+ }
+ self.last_closed_subtree = restore_point.last_closed_subtree;
+ }
+}
+
+#[derive(Clone, Copy)]
+pub struct SubtreeBuilderRestorePoint {
+ unclosed_subtree_indices_len: usize,
+ token_trees_len: usize,
+ last_closed_subtree: Option<usize>,
+}
diff --git a/docs/book/src/configuration_generated.md b/docs/book/src/configuration_generated.md
index 6b7ef04964..58b6363345 100644
--- a/docs/book/src/configuration_generated.md
+++ b/docs/book/src/configuration_generated.md
@@ -635,17 +635,6 @@ Default: `"client"`
Controls file watching implementation.
-## rust-analyzer.gc.frequency {#gc.frequency}
-
-Default: `1000`
-
-This config controls the frequency in which rust-analyzer will perform its internal Garbage
-Collection. It is specified in revisions, roughly equivalent to number of changes. The default
-is 1000.
-
-Setting a smaller value may help limit peak memory usage at the expense of speed.
-
-
## rust-analyzer.gotoImplementations.filterAdjacentDerives {#gotoImplementations.filterAdjacentDerives}
Default: `false`
@@ -1352,6 +1341,32 @@ Default: `false`
Exclude tests from find-all-references and call-hierarchy.
+## rust-analyzer.rename.showConflicts {#rename.showConflicts}
+
+Default: `true`
+
+Whether to warn when a rename will cause conflicts (change the meaning of the code).
+
+
+## rust-analyzer.runnables.bench.command {#runnables.bench.command}
+
+Default: `"bench"`
+
+Subcommand used for bench runnables instead of `bench`.
+
+
+## rust-analyzer.runnables.bench.overrideCommand {#runnables.bench.overrideCommand}
+
+Default: `null`
+
+Override the command used for bench runnables.
+The first element of the array should be the program to execute (for example, `cargo`).
+
+Use the placeholders `${package}`, `${target_arg}`, `${target}`, `${test_name}` to dynamically
+replace the package name, target option (such as `--bin` or `--example`), the target name and
+the test name (name of test function or test mod path).
+
+
## rust-analyzer.runnables.command {#runnables.command}
Default: `null`
@@ -1359,6 +1374,18 @@ Default: `null`
Command to be executed instead of 'cargo' for runnables.
+## rust-analyzer.runnables.doctest.overrideCommand {#runnables.doctest.overrideCommand}
+
+Default: `null`
+
+Override the command used for bench runnables.
+The first element of the array should be the program to execute (for example, `cargo`).
+
+Use the placeholders `${package}`, `${target_arg}`, `${target}`, `${test_name}` to dynamically
+replace the package name, target option (such as `--bin` or `--example`), the target name and
+the test name (name of test function or test mod path).
+
+
## rust-analyzer.runnables.extraArgs {#runnables.extraArgs}
Default: `[]`
@@ -1385,6 +1412,25 @@ they will end up being interpreted as options to
[`rustc`’s built-in test harness (“libtest”)](https://doc.rust-lang.org/rustc/tests/index.html#cli-arguments).
+## rust-analyzer.runnables.test.command {#runnables.test.command}
+
+Default: `"test"`
+
+Subcommand used for test runnables instead of `test`.
+
+
+## rust-analyzer.runnables.test.overrideCommand {#runnables.test.overrideCommand}
+
+Default: `null`
+
+Override the command used for test runnables.
+The first element of the array should be the program to execute (for example, `cargo`).
+
+Use the placeholders `${package}`, `${target_arg}`, `${target}`, `${test_name}` to dynamically
+replace the package name, target option (such as `--bin` or `--example`), the target name and
+the test name (name of test function or test mod path).
+
+
## rust-analyzer.rustc.source {#rustc.source}
Default: `null`
diff --git a/docs/book/src/contributing/architecture.md b/docs/book/src/contributing/architecture.md
index 67653ebd74..50f60bcdcc 100644
--- a/docs/book/src/contributing/architecture.md
+++ b/docs/book/src/contributing/architecture.md
@@ -10,7 +10,7 @@ See also these implementation-related blog posts:
* <https://rust-analyzer.github.io/blog/2019/11/13/find-usages.html>
* <https://rust-analyzer.github.io/blog/2020/07/20/three-architectures-for-responsive-ide.html>
-* <https://rust-analyzer.github.io/blog/2020/09/16/challeging-LR-parsing.html>
+* <https://rust-analyzer.github.io/blog/2020/09/16/challenging-LR-parsing.html>
* <https://rust-analyzer.github.io/blog/2020/09/28/how-to-make-a-light-bulb.html>
* <https://rust-analyzer.github.io/blog/2020/10/24/introducing-ungrammar.html>
diff --git a/editors/code/package-lock.json b/editors/code/package-lock.json
index 00d83e9068..57f6bf69be 100644
--- a/editors/code/package-lock.json
+++ b/editors/code/package-lock.json
@@ -5584,9 +5584,9 @@
}
},
"node_modules/qs": {
- "version": "6.14.0",
- "resolved": "https://registry.npmjs.org/qs/-/qs-6.14.0.tgz",
- "integrity": "sha512-YWWTjgABSKcvs/nWBi9PycY/JiPJqOD4JA6o9Sej2AtvSGarXxKC3OQSk4pAarbdQlKAh5D4FCQkJNkW+GAn3w==",
+ "version": "6.14.1",
+ "resolved": "https://registry.npmjs.org/qs/-/qs-6.14.1.tgz",
+ "integrity": "sha512-4EK3+xJl8Ts67nLYNwqw/dsFVnCf+qR7RgXSK9jEEm9unao3njwMDdmsdvoKBKHzxd7tCYz5e5M+SnMjdtXGQQ==",
"dev": true,
"license": "BSD-3-Clause",
"dependencies": {
diff --git a/editors/code/package.json b/editors/code/package.json
index d0410c70da..2157cbd486 100644
--- a/editors/code/package.json
+++ b/editors/code/package.json
@@ -1633,17 +1633,6 @@
}
},
{
- "title": "Gc",
- "properties": {
- "rust-analyzer.gc.frequency": {
- "markdownDescription": "This config controls the frequency in which rust-analyzer will perform its internal Garbage\nCollection. It is specified in revisions, roughly equivalent to number of changes. The default\nis 1000.\n\nSetting a smaller value may help limit peak memory usage at the expense of speed.",
- "default": 1000,
- "type": "integer",
- "minimum": 0
- }
- }
- },
- {
"title": "Goto Implementations",
"properties": {
"rust-analyzer.gotoImplementations.filterAdjacentDerives": {
@@ -2828,6 +2817,42 @@
}
},
{
+ "title": "Rename",
+ "properties": {
+ "rust-analyzer.rename.showConflicts": {
+ "markdownDescription": "Whether to warn when a rename will cause conflicts (change the meaning of the code).",
+ "default": true,
+ "type": "boolean"
+ }
+ }
+ },
+ {
+ "title": "Runnables",
+ "properties": {
+ "rust-analyzer.runnables.bench.command": {
+ "markdownDescription": "Subcommand used for bench runnables instead of `bench`.",
+ "default": "bench",
+ "type": "string"
+ }
+ }
+ },
+ {
+ "title": "Runnables",
+ "properties": {
+ "rust-analyzer.runnables.bench.overrideCommand": {
+ "markdownDescription": "Override the command used for bench runnables.\nThe first element of the array should be the program to execute (for example, `cargo`).\n\nUse the placeholders `${package}`, `${target_arg}`, `${target}`, `${test_name}` to dynamically\nreplace the package name, target option (such as `--bin` or `--example`), the target name and\nthe test name (name of test function or test mod path).",
+ "default": null,
+ "type": [
+ "null",
+ "array"
+ ],
+ "items": {
+ "type": "string"
+ }
+ }
+ }
+ },
+ {
"title": "Runnables",
"properties": {
"rust-analyzer.runnables.command": {
@@ -2843,6 +2868,22 @@
{
"title": "Runnables",
"properties": {
+ "rust-analyzer.runnables.doctest.overrideCommand": {
+ "markdownDescription": "Override the command used for bench runnables.\nThe first element of the array should be the program to execute (for example, `cargo`).\n\nUse the placeholders `${package}`, `${target_arg}`, `${target}`, `${test_name}` to dynamically\nreplace the package name, target option (such as `--bin` or `--example`), the target name and\nthe test name (name of test function or test mod path).",
+ "default": null,
+ "type": [
+ "null",
+ "array"
+ ],
+ "items": {
+ "type": "string"
+ }
+ }
+ }
+ },
+ {
+ "title": "Runnables",
+ "properties": {
"rust-analyzer.runnables.extraArgs": {
"markdownDescription": "Additional arguments to be passed to cargo for runnables such as\ntests or binaries. For example, it may be `--release`.",
"default": [],
@@ -2869,6 +2910,32 @@
}
},
{
+ "title": "Runnables",
+ "properties": {
+ "rust-analyzer.runnables.test.command": {
+ "markdownDescription": "Subcommand used for test runnables instead of `test`.",
+ "default": "test",
+ "type": "string"
+ }
+ }
+ },
+ {
+ "title": "Runnables",
+ "properties": {
+ "rust-analyzer.runnables.test.overrideCommand": {
+ "markdownDescription": "Override the command used for test runnables.\nThe first element of the array should be the program to execute (for example, `cargo`).\n\nUse the placeholders `${package}`, `${target_arg}`, `${target}`, `${test_name}` to dynamically\nreplace the package name, target option (such as `--bin` or `--example`), the target name and\nthe test name (name of test function or test mod path).",
+ "default": null,
+ "type": [
+ "null",
+ "array"
+ ],
+ "items": {
+ "type": "string"
+ }
+ }
+ }
+ },
+ {
"title": "Rustc",
"properties": {
"rust-analyzer.rustc.source": {
diff --git a/editors/code/src/bootstrap.ts b/editors/code/src/bootstrap.ts
index bddf195803..ca5b7e3ec7 100644
--- a/editors/code/src/bootstrap.ts
+++ b/editors/code/src/bootstrap.ts
@@ -1,7 +1,7 @@
import * as vscode from "vscode";
import * as os from "os";
import type { Config } from "./config";
-import { type Env, log, spawnAsync } from "./util";
+import { type Env, log, RUST_TOOLCHAIN_FILES, spawnAsync } from "./util";
import type { PersistentState } from "./persistent_state";
import { exec } from "child_process";
import { TextDecoder } from "node:util";
@@ -59,8 +59,12 @@ async function getServer(
// otherwise check if there is a toolchain override for the current vscode workspace
// and if the toolchain of this override has a rust-analyzer component
// if so, use the rust-analyzer component
- const toolchainUri = vscode.Uri.joinPath(workspaceFolder.uri, "rust-toolchain.toml");
- if (await hasToolchainFileWithRaDeclared(toolchainUri)) {
+ // Check both rust-toolchain.toml and rust-toolchain files
+ for (const toolchainFile of RUST_TOOLCHAIN_FILES) {
+ const toolchainUri = vscode.Uri.joinPath(workspaceFolder.uri, toolchainFile);
+ if (!(await hasToolchainFileWithRaDeclared(toolchainUri))) {
+ continue;
+ }
const res = await spawnAsync("rustup", ["which", "rust-analyzer"], {
env: { ...process.env },
cwd: workspaceFolder.uri.fsPath,
@@ -71,6 +75,7 @@ async function getServer(
res.stdout.trim(),
raVersionResolver,
);
+ break;
}
}
}
diff --git a/editors/code/src/client.ts b/editors/code/src/client.ts
index cb71a01138..5b358e3211 100644
--- a/editors/code/src/client.ts
+++ b/editors/code/src/client.ts
@@ -30,17 +30,24 @@ export async function createClient(
},
async configuration(
params: lc.ConfigurationParams,
- token: vscode.CancellationToken,
- next: lc.ConfigurationRequest.HandlerSignature,
+ _token: vscode.CancellationToken,
+ _next: lc.ConfigurationRequest.HandlerSignature,
) {
- const resp = await next(params, token);
- if (resp && Array.isArray(resp)) {
- return resp.map((val) => {
- return prepareVSCodeConfig(val);
- });
- } else {
- return resp;
+ // The rust-analyzer LSP only ever asks for the "rust-analyzer"
+ // section, so we only need to support that. Instead of letting
+ // the vscode-languageclient handle it, use the `cfg` property
+ // in the config.
+ if (
+ params.items.length !== 1 ||
+ params.items[0]?.section !== "rust-analyzer" ||
+ params.items[0]?.scopeUri !== undefined
+ ) {
+ return new lc.ResponseError(
+ lc.ErrorCodes.InvalidParams,
+ 'Only the "rust-analyzer" config section is supported.',
+ );
}
+ return [prepareVSCodeConfig(config.cfg)];
},
},
async handleDiagnostics(
diff --git a/editors/code/src/ctx.ts b/editors/code/src/ctx.ts
index a7b7be03b5..e07fbb8bea 100644
--- a/editors/code/src/ctx.ts
+++ b/editors/code/src/ctx.ts
@@ -1,10 +1,11 @@
import * as vscode from "vscode";
-import type * as lc from "vscode-languageclient/node";
+import * as lc from "vscode-languageclient/node";
import * as ra from "./lsp_ext";
import { Config, prepareVSCodeConfig } from "./config";
import { createClient } from "./client";
import {
+ findRustToolchainFiles,
isCargoTomlEditor,
isDocumentInWorkspace,
isRustDocument,
@@ -266,6 +267,17 @@ export class Ctx implements RustAnalyzerExtensionApi {
this.outputChannel!.show();
}),
);
+ this.pushClientCleanup(
+ this._client.onNotification(
+ lc.ShowMessageNotification.type,
+ async (params: lc.ShowMessageParams) => {
+ // When an MSRV warning is detected and a rust-toolchain file exists,
+ // show an additional message with actionable guidance about adding
+ // the rust-analyzer component.
+ await handleMsrvWarning(params.message);
+ },
+ ),
+ );
}
return this._client;
}
@@ -592,3 +604,43 @@ export interface Disposable {
// eslint-disable-next-line @typescript-eslint/no-explicit-any
export type Cmd = (...args: any[]) => unknown;
+
+/**
+ * Pattern to detect MSRV warning messages from the rust-analyzer server.
+ */
+const MSRV_WARNING_PATTERN = /using an outdated toolchain version.*rust-analyzer only supports/is;
+
+/**
+ * Handles the MSRV warning by checking for rust-toolchain files and showing
+ * an enhanced message if found.
+ */
+export async function handleMsrvWarning(message: string): Promise<boolean> {
+ if (!MSRV_WARNING_PATTERN.test(message)) {
+ return false;
+ }
+
+ const toolchainFiles = await findRustToolchainFiles();
+ if (toolchainFiles.length === 0) {
+ return false;
+ }
+
+ const openFile = "Open rust-toolchain file";
+ const result = await vscode.window.showWarningMessage(
+ "Your workspace uses a rust-toolchain file with a toolchain too old for the extension shipped rust-analyzer to work properly. " +
+ "Consider adding the rust-analyzer component to the toolchain file to use a compatible rust-analyzer version. " +
+ "Add the following to your rust-toolchain file's `[toolchain]` section:\n" +
+ 'components = ["rust-analyzer"]',
+ { modal: true },
+ openFile,
+ );
+
+ if (result === openFile) {
+ const fileToOpen = toolchainFiles[0];
+ if (fileToOpen) {
+ const document = await vscode.workspace.openTextDocument(fileToOpen);
+ await vscode.window.showTextDocument(document);
+ }
+ }
+
+ return true;
+}
diff --git a/editors/code/src/util.ts b/editors/code/src/util.ts
index 410b055100..05b475080c 100644
--- a/editors/code/src/util.ts
+++ b/editors/code/src/util.ts
@@ -328,3 +328,28 @@ export function normalizeDriveLetter(path: string, isWindowsOS: boolean = isWind
return path;
}
+
+export const RUST_TOOLCHAIN_FILES = ["rust-toolchain.toml", "rust-toolchain"] as const;
+
+export async function findRustToolchainFiles(): Promise<vscode.Uri[]> {
+ const found: vscode.Uri[] = [];
+ const workspaceFolders = vscode.workspace.workspaceFolders;
+ if (!workspaceFolders) {
+ return found;
+ }
+
+ for (const folder of workspaceFolders) {
+ for (const filename of RUST_TOOLCHAIN_FILES) {
+ const toolchainUri = vscode.Uri.joinPath(folder.uri, filename);
+ try {
+ await vscode.workspace.fs.stat(toolchainUri);
+ found.push(toolchainUri);
+ // Only add the first toolchain file found per workspace folder
+ break;
+ } catch {
+ // File doesn't exist, continue
+ }
+ }
+ }
+ return found;
+}
diff --git a/rust-version b/rust-version
index dcf82c94aa..5ffe95a0b5 100644
--- a/rust-version
+++ b/rust-version
@@ -1 +1 @@
-0208ee09be465f69005a7a12c28d5eccac7d5f34
+e7d44143a12a526488e4f0c0d7ea8e62a4fe9354