Unnamed repository; edit this file 'description' to name the repository.
Merge remote-tracking branch 'upstream/master' into sync-from-rust
Laurențiu Nicola 2024-02-02
parent 8ba31cc · parent 850ba2f · commit 4f09335
-rw-r--r--.github/workflows/ci.yaml6
-rw-r--r--Cargo.lock30
-rw-r--r--Cargo.toml23
-rw-r--r--crates/base-db/Cargo.toml3
-rw-r--r--crates/base-db/src/change.rs2
-rw-r--r--crates/base-db/src/input.rs2
-rw-r--r--crates/base-db/src/lib.rs4
-rw-r--r--crates/flycheck/src/lib.rs6
-rw-r--r--crates/hir-def/src/attr.rs4
-rw-r--r--crates/hir-def/src/body.rs2
-rw-r--r--crates/hir-def/src/body/pretty.rs2
-rw-r--r--crates/hir-def/src/body/tests.rs4
-rw-r--r--crates/hir-def/src/data.rs4
-rw-r--r--crates/hir-def/src/db.rs21
-rw-r--r--crates/hir-def/src/find_path.rs6
-rw-r--r--crates/hir-def/src/generics.rs2
-rw-r--r--crates/hir-def/src/hir/format_args.rs13
-rw-r--r--crates/hir-def/src/import_map.rs16
-rw-r--r--crates/hir-def/src/item_scope.rs6
-rw-r--r--crates/hir-def/src/item_tree.rs3
-rw-r--r--crates/hir-def/src/lang_item.rs10
-rw-r--r--crates/hir-def/src/lib.rs2
-rw-r--r--crates/hir-def/src/macro_expansion_tests/mbe/regression.rs2
-rw-r--r--crates/hir-def/src/macro_expansion_tests/mod.rs5
-rw-r--r--crates/hir-def/src/nameres.rs7
-rw-r--r--crates/hir-def/src/nameres/collector.rs18
-rw-r--r--crates/hir-def/src/nameres/path_resolution.rs6
-rw-r--r--crates/hir-def/src/per_ns.rs18
-rw-r--r--crates/hir-def/src/test_db.rs4
-rw-r--r--crates/hir-expand/src/attrs.rs4
-rw-r--r--crates/hir-expand/src/db.rs7
-rw-r--r--crates/hir-expand/src/files.rs44
-rw-r--r--crates/hir-expand/src/lib.rs93
-rw-r--r--crates/hir-expand/src/mod_path.rs2
-rw-r--r--crates/hir-expand/src/proc_macro.rs2
-rw-r--r--crates/hir-expand/src/span_map.rs9
-rw-r--r--crates/hir-ty/src/autoderef.rs2
-rw-r--r--crates/hir-ty/src/chalk_db.rs2
-rw-r--r--crates/hir-ty/src/db.rs9
-rw-r--r--crates/hir-ty/src/diagnostics/decl_check.rs2
-rw-r--r--crates/hir-ty/src/diagnostics/expr.rs3
-rw-r--r--crates/hir-ty/src/diagnostics/match_check/pat_analysis.rs5
-rw-r--r--crates/hir-ty/src/display.rs5
-rw-r--r--crates/hir-ty/src/infer.rs2
-rw-r--r--crates/hir-ty/src/infer/unify.rs3
-rw-r--r--crates/hir-ty/src/inhabitedness.rs3
-rw-r--r--crates/hir-ty/src/layout.rs2
-rw-r--r--crates/hir-ty/src/layout/adt.rs2
-rw-r--r--crates/hir-ty/src/layout/target.rs44
-rw-r--r--crates/hir-ty/src/lib.rs3
-rw-r--r--crates/hir-ty/src/method_resolution.rs15
-rw-r--r--crates/hir-ty/src/mir/borrowck.rs4
-rw-r--r--crates/hir-ty/src/mir/eval.rs151
-rw-r--r--crates/hir-ty/src/mir/eval/shim.rs284
-rw-r--r--crates/hir-ty/src/mir/eval/shim/simd.rs42
-rw-r--r--crates/hir-ty/src/mir/lower.rs7
-rw-r--r--crates/hir-ty/src/mir/lower/pattern_matching.rs2
-rw-r--r--crates/hir-ty/src/traits.rs5
-rw-r--r--crates/hir-ty/src/utils.rs14
-rw-r--r--crates/hir/Cargo.toml4
-rw-r--r--crates/hir/src/attrs.rs11
-rw-r--r--crates/hir/src/diagnostics.rs237
-rw-r--r--crates/hir/src/lib.rs250
-rw-r--r--crates/hir/src/semantics.rs107
-rw-r--r--crates/hir/src/semantics/source_to_def.rs6
-rw-r--r--crates/ide-assists/Cargo.toml3
-rw-r--r--crates/ide-assists/src/handlers/add_missing_impl_members.rs14
-rw-r--r--crates/ide-assists/src/handlers/extract_variable.rs2
-rw-r--r--crates/ide-assists/src/handlers/generate_delegate_trait.rs6
-rw-r--r--crates/ide-assists/src/handlers/generate_mut_trait_impl.rs2
-rw-r--r--crates/ide-assists/src/handlers/generate_trait_from_impl.rs2
-rw-r--r--crates/ide-assists/src/handlers/merge_imports.rs157
-rw-r--r--crates/ide-assists/src/handlers/normalize_import.rs219
-rw-r--r--crates/ide-assists/src/lib.rs2
-rw-r--r--crates/ide-assists/src/tests/generated.rs13
-rw-r--r--crates/ide-assists/src/tests/sourcegen.rs7
-rw-r--r--crates/ide-completion/Cargo.toml3
-rw-r--r--crates/ide-completion/src/completions/dot.rs10
-rw-r--r--crates/ide-completion/src/completions/expr.rs2
-rw-r--r--crates/ide-completion/src/completions/extern_abi.rs1
-rw-r--r--crates/ide-completion/src/completions/flyimport.rs10
-rw-r--r--crates/ide-completion/src/completions/item_list.rs2
-rw-r--r--crates/ide-completion/src/completions/mod_.rs2
-rw-r--r--crates/ide-completion/src/completions/postfix.rs37
-rw-r--r--crates/ide-completion/src/completions/type.rs2
-rw-r--r--crates/ide-completion/src/context.rs8
-rw-r--r--crates/ide-completion/src/context/analysis.rs7
-rw-r--r--crates/ide-completion/src/item.rs2
-rw-r--r--crates/ide-completion/src/lib.rs2
-rw-r--r--crates/ide-completion/src/render.rs6
-rw-r--r--crates/ide-completion/src/render/const_.rs2
-rw-r--r--crates/ide-completion/src/render/function.rs4
-rw-r--r--crates/ide-completion/src/render/literal.rs4
-rw-r--r--crates/ide-completion/src/render/macro_.rs4
-rw-r--r--crates/ide-completion/src/render/pattern.rs4
-rw-r--r--crates/ide-completion/src/render/type_alias.rs4
-rw-r--r--crates/ide-completion/src/tests.rs25
-rw-r--r--crates/ide-completion/src/tests/expression.rs4
-rw-r--r--crates/ide-completion/src/tests/flyimport.rs2
-rw-r--r--crates/ide-db/src/apply_change.rs5
-rw-r--r--crates/ide-db/src/defs.rs8
-rw-r--r--crates/ide-db/src/documentation.rs2
-rw-r--r--crates/ide-db/src/generated/lints.rs11
-rw-r--r--crates/ide-db/src/helpers.rs2
-rw-r--r--crates/ide-db/src/imports/import_assets.rs18
-rw-r--r--crates/ide-db/src/imports/insert_use.rs12
-rw-r--r--crates/ide-db/src/imports/insert_use/tests.rs22
-rw-r--r--crates/ide-db/src/imports/merge_imports.rs352
-rw-r--r--crates/ide-db/src/items_locator.rs13
-rw-r--r--crates/ide-db/src/search.rs44
-rw-r--r--crates/ide-db/src/symbol_index.rs10
-rw-r--r--crates/ide-diagnostics/Cargo.toml3
-rw-r--r--crates/ide-diagnostics/src/handlers/mismatched_arg_count.rs4
-rw-r--r--crates/ide-diagnostics/src/handlers/trait_impl_incorrect_safety.rs2
-rw-r--r--crates/ide-diagnostics/src/handlers/trait_impl_missing_assoc_item.rs2
-rw-r--r--crates/ide-diagnostics/src/handlers/type_mismatch.rs46
-rw-r--r--crates/ide-diagnostics/src/handlers/unresolved_field.rs4
-rw-r--r--crates/ide-diagnostics/src/handlers/unresolved_method.rs4
-rw-r--r--crates/ide-diagnostics/src/lib.rs21
-rw-r--r--crates/ide-diagnostics/src/tests.rs2
-rw-r--r--crates/ide/src/goto_definition.rs55
-rw-r--r--crates/ide/src/highlight_related.rs3
-rw-r--r--crates/ide/src/hover.rs1
-rw-r--r--crates/ide/src/hover/tests.rs8
-rw-r--r--crates/ide/src/inlay_hints.rs2
-rw-r--r--crates/ide/src/inlay_hints/implicit_drop.rs2
-rw-r--r--crates/ide/src/prime_caches.rs2
-rw-r--r--crates/ide/src/references.rs50
-rw-r--r--crates/ide/src/syntax_highlighting.rs2
-rw-r--r--crates/mbe/src/expander/transcriber.rs2
-rw-r--r--crates/proc-macro-api/src/lib.rs4
-rw-r--r--crates/proc-macro-api/src/msg.rs2
-rw-r--r--crates/proc-macro-srv-cli/src/main.rs6
-rw-r--r--crates/proc-macro-srv/proc-macro-test/build.rs10
-rw-r--r--crates/proc-macro-srv/src/server/rust_analyzer_span.rs2
-rw-r--r--crates/profile/Cargo.toml3
-rw-r--r--crates/profile/src/hprof.rs326
-rw-r--r--crates/profile/src/lib.rs3
-rw-r--r--crates/profile/src/tree.rs84
-rw-r--r--crates/project-model/src/rustc_cfg.rs2
-rw-r--r--crates/project-model/src/workspace.rs21
-rw-r--r--crates/rust-analyzer/Cargo.toml5
-rw-r--r--crates/rust-analyzer/src/bin/logger.rs137
-rw-r--r--crates/rust-analyzer/src/bin/main.rs25
-rw-r--r--crates/rust-analyzer/src/cli/analysis_stats.rs16
-rw-r--r--crates/rust-analyzer/src/cli/diagnostics.rs4
-rw-r--r--crates/rust-analyzer/src/cli/flags.rs2
-rw-r--r--crates/rust-analyzer/src/cli/lsif.rs4
-rw-r--r--crates/rust-analyzer/src/cli/parse.rs2
-rw-r--r--crates/rust-analyzer/src/cli/run_tests.rs4
-rw-r--r--crates/rust-analyzer/src/cli/rustc_tests.rs4
-rw-r--r--crates/rust-analyzer/src/cli/scip.rs8
-rw-r--r--crates/rust-analyzer/src/cli/ssr.rs4
-rw-r--r--crates/rust-analyzer/src/config.rs40
-rw-r--r--crates/rust-analyzer/src/diagnostics.rs2
-rw-r--r--crates/rust-analyzer/src/dispatch.rs20
-rw-r--r--crates/rust-analyzer/src/global_state.rs2
-rw-r--r--crates/rust-analyzer/src/handlers/notification.rs14
-rw-r--r--crates/rust-analyzer/src/handlers/request.rs106
-rw-r--r--crates/rust-analyzer/src/integrated_benchmarks.rs9
-rw-r--r--crates/rust-analyzer/src/lib.rs11
-rw-r--r--crates/rust-analyzer/src/lsp/to_proto.rs13
-rw-r--r--crates/rust-analyzer/src/main_loop.rs23
-rw-r--r--crates/rust-analyzer/src/reload.rs9
-rw-r--r--crates/rust-analyzer/src/tracing/config.rs108
-rw-r--r--crates/rust-analyzer/src/tracing/hprof.rs272
-rw-r--r--crates/rust-analyzer/tests/slow-tests/main.rs10
-rw-r--r--crates/rust-analyzer/tests/slow-tests/support.rs18
-rw-r--r--crates/stdx/Cargo.toml2
-rw-r--r--crates/stdx/src/macros.rs26
-rw-r--r--crates/syntax/Cargo.toml1
-rw-r--r--crates/syntax/src/algo.rs4
-rw-r--r--crates/syntax/src/ast/edit_in_place.rs17
-rw-r--r--crates/syntax/src/ast/make.rs18
-rw-r--r--crates/syntax/src/ast/node_ext.rs2
-rw-r--r--crates/syntax/src/tests.rs8
-rw-r--r--crates/test-utils/Cargo.toml3
-rw-r--r--crates/test-utils/src/minicore.rs14
-rw-r--r--docs/dev/syntax.md2
-rw-r--r--docs/user/generated_config.adoc5
-rw-r--r--docs/user/manual.adoc2
-rw-r--r--editors/code/language-configuration.json6
-rw-r--r--editors/code/package.json5
-rw-r--r--editors/code/src/main.ts34
-rw-r--r--lib/lsp-server/src/error.rs2
185 files changed, 2613 insertions, 1779 deletions
diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml
index be830415f9..b5c5ff0473 100644
--- a/.github/workflows/ci.yaml
+++ b/.github/workflows/ci.yaml
@@ -90,7 +90,7 @@ jobs:
- name: Switch to stable toolchain
run: |
rustup update --no-self-update stable
- rustup component add --toolchain stable rust-src
+ rustup component add --toolchain stable rust-src clippy
rustup default stable
- name: Run analysis-stats on rust-analyzer
@@ -103,6 +103,10 @@ jobs:
RUSTC_BOOTSTRAP: 1
run: target/${{ matrix.target }}/debug/rust-analyzer analysis-stats --with-deps $(rustc --print sysroot)/lib/rustlib/src/rust/library/std
+ - name: clippy
+ if: matrix.os == 'ubuntu-latest'
+ run: cargo clippy --all-targets
+
# Weird targets to catch non-portable code
rust-cross:
if: github.repository == 'rust-lang/rust-analyzer'
diff --git a/Cargo.lock b/Cargo.lock
index 7513abf17c..1b5efb4bb8 100644
--- a/Cargo.lock
+++ b/Cargo.lock
@@ -19,11 +19,11 @@ checksum = "f26201604c87b1e01bd3d98f8d5d9a8fcbb815e8cedb41ffccbeb4bf593a35fe"
[[package]]
name = "always-assert"
-version = "0.1.3"
+version = "0.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "4436e0292ab1bb631b42973c61205e704475fe8126af845c8d923c0996328127"
+checksum = "a1078fa1ce1e34b1872d8611ad921196d76bdd7027e949fbe31231abde201892"
dependencies = [
- "log",
+ "tracing",
]
[[package]]
@@ -78,6 +78,7 @@ dependencies = [
"span",
"stdx",
"syntax",
+ "tracing",
"triomphe",
"vfs",
]
@@ -494,8 +495,10 @@ dependencies = [
"profile",
"rustc-hash",
"smallvec",
+ "span",
"stdx",
"syntax",
+ "tracing",
"triomphe",
"tt",
]
@@ -670,6 +673,7 @@ dependencies = [
"test-fixture",
"test-utils",
"text-edit",
+ "tracing",
]
[[package]]
@@ -690,6 +694,7 @@ dependencies = [
"test-fixture",
"test-utils",
"text-edit",
+ "tracing",
]
[[package]]
@@ -747,6 +752,7 @@ dependencies = [
"test-fixture",
"test-utils",
"text-edit",
+ "tracing",
]
[[package]]
@@ -1342,6 +1348,7 @@ dependencies = [
"once_cell",
"perf-event",
"tikv-jemalloc-ctl",
+ "tracing",
"winapi",
]
@@ -1581,7 +1588,6 @@ dependencies = [
"tikv-jemallocator",
"toolchain",
"tracing",
- "tracing-log",
"tracing-subscriber",
"tracing-tree",
"triomphe",
@@ -1595,26 +1601,26 @@ dependencies = [
[[package]]
name = "rust-analyzer-salsa"
-version = "0.17.0-pre.5"
+version = "0.17.0-pre.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "ca9d387a9801f4fb9b366789ad1bfc08448cafc49cf148d907cfcd88ab665d7f"
+checksum = "719825638c59fd26a55412a24561c7c5bcf54364c88b9a7a04ba08a6eafaba8d"
dependencies = [
"indexmap",
"lock_api",
- "log",
"oorandom",
"parking_lot",
"rust-analyzer-salsa-macros",
"rustc-hash",
"smallvec",
+ "tracing",
"triomphe",
]
[[package]]
name = "rust-analyzer-salsa-macros"
-version = "0.17.0-pre.5"
+version = "0.17.0-pre.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "a2035f385d7fae31e9b086f40b272ee1d79c484472f31c9a10348a406e841eaf"
+checksum = "4d96498e9684848c6676c399032ebc37c52da95ecbefa83d71ccc53b9f8a4a8e"
dependencies = [
"heck",
"proc-macro2",
@@ -1661,9 +1667,9 @@ dependencies = [
[[package]]
name = "scip"
-version = "0.3.1"
+version = "0.3.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "3e84d21062a3ba08d58870c8c36b0c005b2b2261c6ad1bf7042585427c781883"
+checksum = "e5dc1bd66649133af84ab62436ddd2856c2605182b02dec2cd197f684dfe15ef"
dependencies = [
"protobuf",
]
@@ -1863,6 +1869,7 @@ dependencies = [
"stdx",
"test-utils",
"text-edit",
+ "tracing",
"triomphe",
"ungrammar",
]
@@ -1890,6 +1897,7 @@ dependencies = [
"rustc-hash",
"stdx",
"text-size",
+ "tracing",
]
[[package]]
diff --git a/Cargo.toml b/Cargo.toml
index 56db5a28c0..5a74864811 100644
--- a/Cargo.toml
+++ b/Cargo.toml
@@ -113,7 +113,7 @@ itertools = "0.12.0"
libc = "0.2.150"
nohash-hasher = "0.2.0"
rayon = "1.8.0"
-rust-analyzer-salsa = "0.17.0-pre.5"
+rust-analyzer-salsa = "0.17.0-pre.6"
rustc-hash = "1.1.0"
semver = "1.0.14"
serde = { version = "1.0.192", features = ["derive"] }
@@ -128,9 +128,9 @@ text-size = "1.1.1"
tracing = "0.1.40"
tracing-tree = "0.3.0"
tracing-subscriber = { version = "0.3.18", default-features = false, features = [
- "registry",
- "fmt",
- "tracing-log",
+ "registry",
+ "fmt",
+ "tracing-log",
] }
triomphe = { version = "0.1.10", default-features = false, features = ["std"] }
xshell = "0.2.5"
@@ -167,29 +167,14 @@ new_ret_no_self = "allow"
## Following lints should be tackled at some point
borrowed_box = "allow"
-borrow_deref_ref = "allow"
-derivable_impls = "allow"
derived_hash_with_manual_eq = "allow"
-field_reassign_with_default = "allow"
forget_non_drop = "allow"
-format_collect = "allow"
-large_enum_variant = "allow"
needless_doctest_main = "allow"
-new_without_default = "allow"
non_canonical_clone_impl = "allow"
non_canonical_partial_ord_impl = "allow"
self_named_constructors = "allow"
-skip_while_next = "allow"
too_many_arguments = "allow"
-toplevel_ref_arg = "allow"
type_complexity = "allow"
-unnecessary_cast = "allow"
-unnecessary_filter_map = "allow"
-unnecessary_lazy_evaluations = "allow"
-unnecessary_mut_passed = "allow"
-useless_conversion = "allow"
-useless_format = "allow"
-wildcard_in_or_patterns = "allow"
wrong_self_convention = "allow"
## warn at following lints
diff --git a/crates/base-db/Cargo.toml b/crates/base-db/Cargo.toml
index 1aa43175f9..485ba78846 100644
--- a/crates/base-db/Cargo.toml
+++ b/crates/base-db/Cargo.toml
@@ -17,6 +17,7 @@ rust-analyzer-salsa.workspace = true
rustc-hash.workspace = true
triomphe.workspace = true
semver.workspace = true
+tracing.workspace = true
# local deps
cfg.workspace = true
@@ -27,4 +28,4 @@ vfs.workspace = true
span.workspace = true
[lints]
-workspace = true \ No newline at end of file
+workspace = true
diff --git a/crates/base-db/src/change.rs b/crates/base-db/src/change.rs
index 4332e572e2..003ffb24d9 100644
--- a/crates/base-db/src/change.rs
+++ b/crates/base-db/src/change.rs
@@ -51,7 +51,7 @@ impl FileChange {
}
pub fn apply(self, db: &mut dyn SourceDatabaseExt) {
- let _p = profile::span("RootDatabase::apply_change");
+ let _p = tracing::span!(tracing::Level::INFO, "RootDatabase::apply_change").entered();
if let Some(roots) = self.roots {
for (idx, root) in roots.into_iter().enumerate() {
let root_id = SourceRootId(idx as u32);
diff --git a/crates/base-db/src/input.rs b/crates/base-db/src/input.rs
index 852f36ea71..51e6fdb951 100644
--- a/crates/base-db/src/input.rs
+++ b/crates/base-db/src/input.rs
@@ -494,7 +494,7 @@ impl CrateGraph {
from: CrateId,
dep: Dependency,
) -> Result<(), CyclicDependenciesError> {
- let _p = profile::span("add_dep");
+ let _p = tracing::span!(tracing::Level::INFO, "add_dep").entered();
self.check_cycle_after_dependency(from, dep.crate_id)?;
diff --git a/crates/base-db/src/lib.rs b/crates/base-db/src/lib.rs
index 90da7efd4a..d7fc9d4c95 100644
--- a/crates/base-db/src/lib.rs
+++ b/crates/base-db/src/lib.rs
@@ -65,7 +65,7 @@ pub trait SourceDatabase: FileLoader + std::fmt::Debug {
}
fn parse(db: &dyn SourceDatabase, file_id: FileId) -> Parse<ast::SourceFile> {
- let _p = profile::span("parse_query").detail(|| format!("{file_id:?}"));
+ let _p = tracing::span!(tracing::Level::INFO, "parse_query", ?file_id).entered();
let text = db.file_text(file_id);
SourceFile::parse(&text)
}
@@ -116,7 +116,7 @@ impl<T: SourceDatabaseExt> FileLoader for FileLoaderDelegate<&'_ T> {
}
fn relevant_crates(&self, file_id: FileId) -> Arc<[CrateId]> {
- let _p = profile::span("relevant_crates");
+ let _p = tracing::span!(tracing::Level::INFO, "relevant_crates").entered();
let source_root = self.0.file_source_root(file_id);
self.0.source_root_crates(source_root)
}
diff --git a/crates/flycheck/src/lib.rs b/crates/flycheck/src/lib.rs
index 68faca51e8..22603842a1 100644
--- a/crates/flycheck/src/lib.rs
+++ b/crates/flycheck/src/lib.rs
@@ -493,7 +493,9 @@ impl CargoActor {
// Skip certain kinds of messages to only spend time on what's useful
JsonMessage::Cargo(message) => match message {
cargo_metadata::Message::CompilerArtifact(artifact) if !artifact.fresh => {
- self.sender.send(CargoMessage::CompilerArtifact(artifact)).unwrap();
+ self.sender
+ .send(CargoMessage::CompilerArtifact(Box::new(artifact)))
+ .unwrap();
}
cargo_metadata::Message::CompilerMessage(msg) => {
self.sender.send(CargoMessage::Diagnostic(msg.message)).unwrap();
@@ -538,7 +540,7 @@ impl CargoActor {
}
enum CargoMessage {
- CompilerArtifact(cargo_metadata::Artifact),
+ CompilerArtifact(Box<cargo_metadata::Artifact>),
Diagnostic(Diagnostic),
}
diff --git a/crates/hir-def/src/attr.rs b/crates/hir-def/src/attr.rs
index a8e081705e..bee6f0083b 100644
--- a/crates/hir-def/src/attr.rs
+++ b/crates/hir-def/src/attr.rs
@@ -75,7 +75,7 @@ impl Attrs {
db: &dyn DefDatabase,
v: VariantId,
) -> Arc<ArenaMap<LocalFieldId, Attrs>> {
- let _p = profile::span("fields_attrs_query");
+ let _p = tracing::span!(tracing::Level::INFO, "fields_attrs_query").entered();
// FIXME: There should be some proper form of mapping between item tree field ids and hir field ids
let mut res = ArenaMap::default();
@@ -322,7 +322,7 @@ impl AttrsWithOwner {
}
pub(crate) fn attrs_query(db: &dyn DefDatabase, def: AttrDefId) -> Attrs {
- let _p = profile::span("attrs_query");
+ let _p = tracing::span!(tracing::Level::INFO, "attrs_query").entered();
// FIXME: this should use `Trace` to avoid duplication in `source_map` below
let raw_attrs = match def {
AttrDefId::ModuleId(module) => {
diff --git a/crates/hir-def/src/body.rs b/crates/hir-def/src/body.rs
index e4308c6b7f..ce8a9eab14 100644
--- a/crates/hir-def/src/body.rs
+++ b/crates/hir-def/src/body.rs
@@ -122,7 +122,7 @@ impl Body {
db: &dyn DefDatabase,
def: DefWithBodyId,
) -> (Arc<Body>, Arc<BodySourceMap>) {
- let _p = profile::span("body_with_source_map_query");
+ let _p = tracing::span!(tracing::Level::INFO, "body_with_source_map_query").entered();
let mut params = None;
let mut is_async_fn = false;
diff --git a/crates/hir-def/src/body/pretty.rs b/crates/hir-def/src/body/pretty.rs
index 0f2b279670..b821b91b89 100644
--- a/crates/hir-def/src/body/pretty.rs
+++ b/crates/hir-def/src/body/pretty.rs
@@ -33,7 +33,7 @@ pub(super) fn print_body_hir(db: &dyn DefDatabase, body: &Body, owner: DefWithBo
}
)
}),
- DefWithBodyId::InTypeConstId(_) => format!("In type const = "),
+ DefWithBodyId::InTypeConstId(_) => "In type const = ".to_string(),
DefWithBodyId::VariantId(it) => {
let loc = it.lookup(db);
let enum_loc = loc.parent.lookup(db);
diff --git a/crates/hir-def/src/body/tests.rs b/crates/hir-def/src/body/tests.rs
index a76ddffb41..a27ffe2167 100644
--- a/crates/hir-def/src/body/tests.rs
+++ b/crates/hir-def/src/body/tests.rs
@@ -256,7 +256,7 @@ impl SsrError {
"##,
);
- assert_eq!(db.body_with_source_map(def.into()).1.diagnostics(), &[]);
+ assert_eq!(db.body_with_source_map(def).1.diagnostics(), &[]);
expect![[r#"
fn main() {
_ = $crate::error::SsrError::new(
@@ -309,7 +309,7 @@ fn f() {
"#,
);
- let (_, source_map) = db.body_with_source_map(def.into());
+ let (_, source_map) = db.body_with_source_map(def);
assert_eq!(source_map.diagnostics(), &[]);
for (_, def_map) in body.blocks(&db) {
diff --git a/crates/hir-def/src/data.rs b/crates/hir-def/src/data.rs
index ca02b5d68e..7ce05b64d0 100644
--- a/crates/hir-def/src/data.rs
+++ b/crates/hir-def/src/data.rs
@@ -340,7 +340,7 @@ impl ImplData {
db: &dyn DefDatabase,
id: ImplId,
) -> (Arc<ImplData>, DefDiagnostics) {
- let _p = profile::span("impl_data_with_diagnostics_query");
+ let _p = tracing::span!(tracing::Level::INFO, "impl_data_with_diagnostics_query").entered();
let ItemLoc { container: module_id, id: tree_id } = id.lookup(db);
let item_tree = tree_id.item_tree(db);
@@ -782,7 +782,7 @@ impl<'a> AssocItemCollector<'a> {
self.diagnostics.push(DefDiagnostic::macro_expansion_parse_error(
self.module_id.local_id,
error_call_kind(),
- errors.into(),
+ errors,
));
}
diff --git a/crates/hir-def/src/db.rs b/crates/hir-def/src/db.rs
index 708abb5369..68f57600ec 100644
--- a/crates/hir-def/src/db.rs
+++ b/crates/hir-def/src/db.rs
@@ -1,9 +1,10 @@
//! Defines database & queries for name resolution.
-use base_db::{salsa, CrateId, SourceDatabase, Upcast};
+use base_db::{salsa, CrateId, FileId, SourceDatabase, Upcast};
use either::Either;
use hir_expand::{db::ExpandDatabase, HirFileId, MacroDefId};
use intern::Interned;
use la_arena::ArenaMap;
+use span::MacroCallId;
use syntax::{ast, AstPtr};
use triomphe::Arc;
@@ -234,10 +235,26 @@ pub trait DefDatabase: InternDatabase + ExpandDatabase + Upcast<dyn ExpandDataba
fn crate_notable_traits(&self, krate: CrateId) -> Option<Arc<[TraitId]>>;
fn crate_supports_no_std(&self, crate_id: CrateId) -> bool;
+
+ fn include_macro_invoc(&self, crate_id: CrateId) -> Vec<(MacroCallId, FileId)>;
+}
+
+// return: macro call id and include file id
+fn include_macro_invoc(db: &dyn DefDatabase, krate: CrateId) -> Vec<(MacroCallId, FileId)> {
+ db.crate_def_map(krate)
+ .modules
+ .values()
+ .flat_map(|m| m.scope.iter_macro_invoc())
+ .filter_map(|invoc| {
+ db.lookup_intern_macro_call(*invoc.1)
+ .include_file_id(db.upcast(), *invoc.1)
+ .map(|x| (*invoc.1, x))
+ })
+ .collect()
}
fn crate_def_map_wait(db: &dyn DefDatabase, krate: CrateId) -> Arc<DefMap> {
- let _p = profile::span("crate_def_map:wait");
+ let _p = tracing::span!(tracing::Level::INFO, "crate_def_map:wait").entered();
db.crate_def_map_query(krate)
}
diff --git a/crates/hir-def/src/find_path.rs b/crates/hir-def/src/find_path.rs
index efda8abf4b..515a2cc824 100644
--- a/crates/hir-def/src/find_path.rs
+++ b/crates/hir-def/src/find_path.rs
@@ -26,7 +26,7 @@ pub fn find_path(
prefer_no_std: bool,
prefer_prelude: bool,
) -> Option<ModPath> {
- let _p = profile::span("find_path");
+ let _p = tracing::span!(tracing::Level::INFO, "find_path").entered();
find_path_inner(FindPathCtx { db, prefixed: None, prefer_no_std, prefer_prelude }, item, from)
}
@@ -38,7 +38,7 @@ pub fn find_path_prefixed(
prefer_no_std: bool,
prefer_prelude: bool,
) -> Option<ModPath> {
- let _p = profile::span("find_path_prefixed");
+ let _p = tracing::span!(tracing::Level::INFO, "find_path_prefixed").entered();
find_path_inner(
FindPathCtx { db, prefixed: Some(prefix_kind), prefer_no_std, prefer_prelude },
item,
@@ -497,7 +497,7 @@ fn find_local_import_locations(
item: ItemInNs,
from: ModuleId,
) -> Vec<(ModuleId, Name)> {
- let _p = profile::span("find_local_import_locations");
+ let _p = tracing::span!(tracing::Level::INFO, "find_local_import_locations").entered();
// `from` can import anything below `from` with visibility of at least `from`, and anything
// above `from` with any visibility. That means we do not need to descend into private siblings
diff --git a/crates/hir-def/src/generics.rs b/crates/hir-def/src/generics.rs
index 7daae821f8..349d327aaa 100644
--- a/crates/hir-def/src/generics.rs
+++ b/crates/hir-def/src/generics.rs
@@ -373,7 +373,7 @@ impl GenericParams {
db: &dyn DefDatabase,
def: GenericDefId,
) -> Interned<GenericParams> {
- let _p = profile::span("generic_params_query");
+ let _p = tracing::span!(tracing::Level::INFO, "generic_params_query").entered();
let krate = def.module(db).krate;
let cfg_options = db.crate_graph();
diff --git a/crates/hir-def/src/hir/format_args.rs b/crates/hir-def/src/hir/format_args.rs
index c0d1738b50..b097a721c7 100644
--- a/crates/hir-def/src/hir/format_args.rs
+++ b/crates/hir-def/src/hir/format_args.rs
@@ -166,6 +166,7 @@ enum PositionUsedAs {
}
use PositionUsedAs::*;
+#[allow(clippy::unnecessary_lazy_evaluations)]
pub(crate) fn parse(
s: &ast::String,
fmt_snippet: Option<String>,
@@ -177,9 +178,9 @@ pub(crate) fn parse(
let text = s.text_without_quotes();
let str_style = match s.quote_offsets() {
Some(offsets) => {
- let raw = u32::from(offsets.quotes.0.len()) - 1;
+ let raw = usize::from(offsets.quotes.0.len()) - 1;
// subtract 1 for the `r` prefix
- (raw != 0).then(|| raw as usize - 1)
+ (raw != 0).then(|| raw - 1)
}
None => None,
};
@@ -214,7 +215,7 @@ pub(crate) fn parse(
let mut used = vec![false; args.explicit_args().len()];
let mut invalid_refs = Vec::new();
- let mut numeric_refences_to_named_arg = Vec::new();
+ let mut numeric_references_to_named_arg = Vec::new();
enum ArgRef<'a> {
Index(usize),
@@ -231,7 +232,7 @@ pub(crate) fn parse(
used[index] = true;
if arg.kind.ident().is_some() {
// This was a named argument, but it was used as a positional argument.
- numeric_refences_to_named_arg.push((index, span, used_as));
+ numeric_references_to_named_arg.push((index, span, used_as));
}
Ok(index)
} else {
@@ -432,7 +433,7 @@ pub(crate) fn parse(
}
}
-#[derive(Debug, Clone, PartialEq, Eq)]
+#[derive(Clone, Debug, Default, Eq, PartialEq)]
pub struct FormatArgumentsCollector {
arguments: Vec<FormatArgument>,
num_unnamed_args: usize,
@@ -451,7 +452,7 @@ impl FormatArgumentsCollector {
}
pub fn new() -> Self {
- Self { arguments: vec![], names: vec![], num_unnamed_args: 0, num_explicit_args: 0 }
+ Default::default()
}
pub fn add(&mut self, arg: FormatArgument) -> usize {
diff --git a/crates/hir-def/src/import_map.rs b/crates/hir-def/src/import_map.rs
index 15c127f156..c698510ca9 100644
--- a/crates/hir-def/src/import_map.rs
+++ b/crates/hir-def/src/import_map.rs
@@ -75,7 +75,7 @@ impl ImportMap {
}
pub(crate) fn import_map_query(db: &dyn DefDatabase, krate: CrateId) -> Arc<Self> {
- let _p = profile::span("import_map_query");
+ let _p = tracing::span!(tracing::Level::INFO, "import_map_query").entered();
let map = Self::collect_import_map(db, krate);
@@ -126,7 +126,7 @@ impl ImportMap {
}
fn collect_import_map(db: &dyn DefDatabase, krate: CrateId) -> ImportMapIndex {
- let _p = profile::span("collect_import_map");
+ let _p = tracing::span!(tracing::Level::INFO, "collect_import_map").entered();
let def_map = db.crate_def_map(krate);
let mut map = FxIndexMap::default();
@@ -216,7 +216,7 @@ impl ImportMap {
is_type_in_ns: bool,
trait_import_info: &ImportInfo,
) {
- let _p = profile::span("collect_trait_assoc_items");
+ let _p = tracing::span!(tracing::Level::INFO, "collect_trait_assoc_items").entered();
for &(ref assoc_item_name, item) in &db.trait_data(tr).items {
let module_def_id = match item {
AssocItemId::FunctionId(f) => ModuleDefId::from(f),
@@ -297,7 +297,7 @@ impl SearchMode {
SearchMode::Exact => candidate.eq_ignore_ascii_case(query),
SearchMode::Prefix => {
query.len() <= candidate.len() && {
- let prefix = &candidate[..query.len() as usize];
+ let prefix = &candidate[..query.len()];
if case_sensitive {
prefix == query
} else {
@@ -396,9 +396,9 @@ impl Query {
pub fn search_dependencies(
db: &dyn DefDatabase,
krate: CrateId,
- ref query: Query,
+ query: &Query,
) -> FxHashSet<ItemInNs> {
- let _p = profile::span("search_dependencies").detail(|| format!("{query:?}"));
+ let _p = tracing::span!(tracing::Level::INFO, "search_dependencies", ?query).entered();
let graph = db.crate_graph();
@@ -446,7 +446,7 @@ fn search_maps(
let end = (value & 0xFFFF_FFFF) as usize;
let start = (value >> 32) as usize;
let ImportMap { item_to_info_map, importables, .. } = &*import_maps[import_map_idx];
- let importables = &importables[start as usize..end];
+ let importables = &importables[start..end];
let iter = importables
.iter()
@@ -516,7 +516,7 @@ mod tests {
})
.expect("could not find crate");
- let actual = search_dependencies(db.upcast(), krate, query)
+ let actual = search_dependencies(db.upcast(), krate, &query)
.into_iter()
.filter_map(|dependency| {
let dependency_krate = dependency.krate(db.upcast())?;
diff --git a/crates/hir-def/src/item_scope.rs b/crates/hir-def/src/item_scope.rs
index 168ee4acff..6237ea7353 100644
--- a/crates/hir-def/src/item_scope.rs
+++ b/crates/hir-def/src/item_scope.rs
@@ -336,6 +336,12 @@ impl ItemScope {
pub(crate) fn macro_invoc(&self, call: AstId<ast::MacroCall>) -> Option<MacroCallId> {
self.macro_invocations.get(&call).copied()
}
+
+ pub(crate) fn iter_macro_invoc(
+ &self,
+ ) -> impl Iterator<Item = (&AstId<ast::MacroCall>, &MacroCallId)> {
+ self.macro_invocations.iter()
+ }
}
impl ItemScope {
diff --git a/crates/hir-def/src/item_tree.rs b/crates/hir-def/src/item_tree.rs
index c37cf52155..299ad33c34 100644
--- a/crates/hir-def/src/item_tree.rs
+++ b/crates/hir-def/src/item_tree.rs
@@ -109,7 +109,8 @@ pub struct ItemTree {
impl ItemTree {
pub(crate) fn file_item_tree_query(db: &dyn DefDatabase, file_id: HirFileId) -> Arc<ItemTree> {
- let _p = profile::span("file_item_tree_query").detail(|| format!("{file_id:?}"));
+ let _p = tracing::span!(tracing::Level::INFO, "file_item_tree_query", ?file_id).entered();
+
let syntax = db.parse_or_expand(file_id);
let ctx = lower::Ctx::new(db, file_id);
diff --git a/crates/hir-def/src/lang_item.rs b/crates/hir-def/src/lang_item.rs
index 60c8baf424..7d98f6cfe8 100644
--- a/crates/hir-def/src/lang_item.rs
+++ b/crates/hir-def/src/lang_item.rs
@@ -91,7 +91,7 @@ impl LangItems {
db: &dyn DefDatabase,
krate: CrateId,
) -> Option<Arc<LangItems>> {
- let _p = profile::span("crate_lang_items_query");
+ let _p = tracing::span!(tracing::Level::INFO, "crate_lang_items_query").entered();
let mut lang_items = LangItems::default();
@@ -163,7 +163,7 @@ impl LangItems {
start_crate: CrateId,
item: LangItem,
) -> Option<LangItemTarget> {
- let _p = profile::span("lang_item_query");
+ let _p = tracing::span!(tracing::Level::INFO, "lang_item_query").entered();
if let Some(target) =
db.crate_lang_items(start_crate).and_then(|it| it.items.get(&item).copied())
{
@@ -183,7 +183,7 @@ impl LangItems {
) where
T: Into<AttrDefId> + Copy,
{
- let _p = profile::span("collect_lang_item");
+ let _p = tracing::span!(tracing::Level::INFO, "collect_lang_item").entered();
if let Some(lang_item) = lang_attr(db, item.into()) {
self.items.entry(lang_item).or_insert_with(|| constructor(item));
}
@@ -199,7 +199,7 @@ pub(crate) fn notable_traits_in_deps(
db: &dyn DefDatabase,
krate: CrateId,
) -> Arc<[Arc<[TraitId]>]> {
- let _p = profile::span("notable_traits_in_deps").detail(|| format!("{krate:?}"));
+ let _p = tracing::span!(tracing::Level::INFO, "notable_traits_in_deps", ?krate).entered();
let crate_graph = db.crate_graph();
Arc::from_iter(
@@ -208,7 +208,7 @@ pub(crate) fn notable_traits_in_deps(
}
pub(crate) fn crate_notable_traits(db: &dyn DefDatabase, krate: CrateId) -> Option<Arc<[TraitId]>> {
- let _p = profile::span("crate_notable_traits").detail(|| format!("{krate:?}"));
+ let _p = tracing::span!(tracing::Level::INFO, "crate_notable_traits", ?krate).entered();
let mut traits = Vec::new();
diff --git a/crates/hir-def/src/lib.rs b/crates/hir-def/src/lib.rs
index 243de66397..71bc521333 100644
--- a/crates/hir-def/src/lib.rs
+++ b/crates/hir-def/src/lib.rs
@@ -745,7 +745,7 @@ impl InTypeConstId {
}
}
-/// A constant, which might appears as a const item, an annonymous const block in expressions
+/// A constant, which might appears as a const item, an anonymous const block in expressions
/// or patterns, or as a constant in types with const generics.
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub enum GeneralConstId {
diff --git a/crates/hir-def/src/macro_expansion_tests/mbe/regression.rs b/crates/hir-def/src/macro_expansion_tests/mbe/regression.rs
index 71ba497217..226aa01827 100644
--- a/crates/hir-def/src/macro_expansion_tests/mbe/regression.rs
+++ b/crates/hir-def/src/macro_expansion_tests/mbe/regression.rs
@@ -72,7 +72,7 @@ fn main() {
}
"#]],
);
- // FIXME we should ahev testing infra for multi level expansion tests
+ // FIXME we should have testing infra for multi level expansion tests
check(
r#"
macro_rules! __rust_force_expr {
diff --git a/crates/hir-def/src/macro_expansion_tests/mod.rs b/crates/hir-def/src/macro_expansion_tests/mod.rs
index ec29940538..e315414e9b 100644
--- a/crates/hir-def/src/macro_expansion_tests/mod.rs
+++ b/crates/hir-def/src/macro_expansion_tests/mod.rs
@@ -25,7 +25,7 @@ use hir_expand::{
InFile, MacroFileId, MacroFileIdExt,
};
use span::Span;
-use stdx::format_to;
+use stdx::{format_to, format_to_acc};
use syntax::{
ast::{self, edit::IndentLevel},
AstNode,
@@ -149,8 +149,7 @@ pub fn identity_when_valid(_attr: TokenStream, item: TokenStream) -> TokenStream
if tree {
let tree = format!("{:#?}", parse.syntax_node())
.split_inclusive('\n')
- .map(|line| format!("// {line}"))
- .collect::<String>();
+ .fold(String::new(), |mut acc, line| format_to_acc!(acc, "// {line}"));
format_to!(expn_text, "\n{}", tree)
}
let range = call.syntax().text_range();
diff --git a/crates/hir-def/src/nameres.rs b/crates/hir-def/src/nameres.rs
index 2295df16fd..1fa975789e 100644
--- a/crates/hir-def/src/nameres.rs
+++ b/crates/hir-def/src/nameres.rs
@@ -306,9 +306,10 @@ impl DefMap {
pub const ROOT: LocalModuleId = LocalModuleId::from_raw(la_arena::RawIdx::from_u32(0));
pub(crate) fn crate_def_map_query(db: &dyn DefDatabase, krate: CrateId) -> Arc<DefMap> {
- let _p = profile::span("crate_def_map_query").detail(|| {
- db.crate_graph()[krate].display_name.as_deref().unwrap_or_default().to_string()
- });
+ let crate_graph = db.crate_graph();
+ let krate_name = crate_graph[krate].display_name.as_deref().unwrap_or_default();
+
+ let _p = tracing::span!(tracing::Level::INFO, "crate_def_map_query", ?krate_name).entered();
let crate_graph = db.crate_graph();
diff --git a/crates/hir-def/src/nameres/collector.rs b/crates/hir-def/src/nameres/collector.rs
index 248d3213d5..fb6fd867a1 100644
--- a/crates/hir-def/src/nameres/collector.rs
+++ b/crates/hir-def/src/nameres/collector.rs
@@ -273,7 +273,7 @@ struct DefCollector<'a> {
impl DefCollector<'_> {
fn seed_with_top_level(&mut self) {
- let _p = profile::span("seed_with_top_level");
+ let _p = tracing::span!(tracing::Level::INFO, "seed_with_top_level").entered();
let file_id = self.db.crate_graph()[self.def_map.krate].root_file_id;
let item_tree = self.db.file_item_tree(file_id.into());
@@ -401,7 +401,7 @@ impl DefCollector<'_> {
}
fn resolution_loop(&mut self) {
- let _p = profile::span("DefCollector::resolution_loop");
+ let _p = tracing::span!(tracing::Level::INFO, "DefCollector::resolution_loop").entered();
// main name resolution fixed-point loop.
let mut i = 0;
@@ -410,7 +410,7 @@ impl DefCollector<'_> {
self.db.unwind_if_cancelled();
{
- let _p = profile::span("resolve_imports loop");
+ let _p = tracing::span!(tracing::Level::INFO, "resolve_imports loop").entered();
'resolve_imports: loop {
if self.resolve_imports() == ReachedFixedPoint::Yes {
@@ -436,7 +436,7 @@ impl DefCollector<'_> {
}
fn collect(&mut self) {
- let _p = profile::span("DefCollector::collect");
+ let _p = tracing::span!(tracing::Level::INFO, "DefCollector::collect").entered();
self.resolution_loop();
@@ -792,8 +792,8 @@ impl DefCollector<'_> {
}
fn resolve_import(&self, module_id: LocalModuleId, import: &Import) -> PartialResolvedImport {
- let _p = profile::span("resolve_import")
- .detail(|| format!("{}", import.path.display(self.db.upcast())));
+ let _p = tracing::span!(tracing::Level::INFO, "resolve_import", import_path = %import.path.display(self.db.upcast()))
+ .entered();
tracing::debug!("resolving import: {:?} ({:?})", import, self.def_map.data.edition);
match import.source {
ImportSource::ExternCrate { .. } => {
@@ -856,7 +856,7 @@ impl DefCollector<'_> {
}
fn record_resolved_import(&mut self, directive: &ImportDirective) {
- let _p = profile::span("record_resolved_import");
+ let _p = tracing::span!(tracing::Level::INFO, "record_resolved_import").entered();
let module_id = directive.module_id;
let import = &directive.import;
@@ -1430,7 +1430,7 @@ impl DefCollector<'_> {
fn finish(mut self) -> DefMap {
// Emit diagnostics for all remaining unexpanded macros.
- let _p = profile::span("DefCollector::finish");
+ let _p = tracing::span!(tracing::Level::INFO, "DefCollector::finish").entered();
for directive in &self.unresolved_macros {
match &directive.kind {
@@ -1924,7 +1924,7 @@ impl ModCollector<'_, '_> {
item_tree: self.item_tree,
mod_dir,
}
- .collect_in_top_module(&*items);
+ .collect_in_top_module(items);
if is_macro_use {
self.import_all_legacy_macros(module_id);
}
diff --git a/crates/hir-def/src/nameres/path_resolution.rs b/crates/hir-def/src/nameres/path_resolution.rs
index 01f79f042f..70da0ef8e1 100644
--- a/crates/hir-def/src/nameres/path_resolution.rs
+++ b/crates/hir-def/src/nameres/path_resolution.rs
@@ -269,7 +269,7 @@ impl DefMap {
stdx::never!(module.is_block_module());
if self.block != def_map.block {
- // If we have a different `DefMap` from `self` (the orignal `DefMap` we started
+ // If we have a different `DefMap` from `self` (the original `DefMap` we started
// with), resolve the remaining path segments in that `DefMap`.
let path =
ModPath::from_segments(PathKind::Super(0), path.segments().iter().cloned());
@@ -475,7 +475,7 @@ impl DefMap {
let macro_use_prelude = || {
self.macro_use_prelude.get(name).map_or(PerNs::none(), |&(it, _extern_crate)| {
PerNs::macros(
- it.into(),
+ it,
Visibility::Public,
// FIXME?
None, // extern_crate.map(ImportOrExternCrate::ExternCrate),
@@ -540,7 +540,7 @@ impl DefMap {
}
}
-/// Given a block module, returns its nearest non-block module and the `DefMap` it blongs to.
+/// Given a block module, returns its nearest non-block module and the `DefMap` it belongs to.
fn adjust_to_nearest_non_block_module(
db: &dyn DefDatabase,
def_map: &DefMap,
diff --git a/crates/hir-def/src/per_ns.rs b/crates/hir-def/src/per_ns.rs
index 14890364d0..36ab62d0f7 100644
--- a/crates/hir-def/src/per_ns.rs
+++ b/crates/hir-def/src/per_ns.rs
@@ -16,19 +16,13 @@ pub enum Namespace {
Macros,
}
-#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
+#[derive(Clone, Copy, Debug, Default, Eq, Hash, PartialEq)]
pub struct PerNs {
pub types: Option<(ModuleDefId, Visibility, Option<ImportOrExternCrate>)>,
pub values: Option<(ModuleDefId, Visibility, Option<ImportId>)>,
pub macros: Option<(MacroId, Visibility, Option<ImportId>)>,
}
-impl Default for PerNs {
- fn default() -> Self {
- PerNs { types: None, values: None, macros: None }
- }
-}
-
impl PerNs {
pub fn none() -> PerNs {
PerNs { types: None, values: None, macros: None }
@@ -92,7 +86,7 @@ impl PerNs {
}
pub fn filter_visibility(self, mut f: impl FnMut(Visibility) -> bool) -> PerNs {
- let _p = profile::span("PerNs::filter_visibility");
+ let _p = tracing::span!(tracing::Level::INFO, "PerNs::filter_visibility").entered();
PerNs {
types: self.types.filter(|&(_, v, _)| f(v)),
values: self.values.filter(|&(_, v, _)| f(v)),
@@ -125,19 +119,17 @@ impl PerNs {
}
pub fn iter_items(self) -> impl Iterator<Item = (ItemInNs, Option<ImportOrExternCrate>)> {
- let _p = profile::span("PerNs::iter_items");
+ let _p = tracing::span!(tracing::Level::INFO, "PerNs::iter_items").entered();
self.types
.map(|it| (ItemInNs::Types(it.0), it.2))
.into_iter()
.chain(
self.values
- .map(|it| (ItemInNs::Values(it.0), it.2.map(ImportOrExternCrate::Import)))
- .into_iter(),
+ .map(|it| (ItemInNs::Values(it.0), it.2.map(ImportOrExternCrate::Import))),
)
.chain(
self.macros
- .map(|it| (ItemInNs::Macros(it.0), it.2.map(ImportOrExternCrate::Import)))
- .into_iter(),
+ .map(|it| (ItemInNs::Macros(it.0), it.2.map(ImportOrExternCrate::Import))),
)
}
}
diff --git a/crates/hir-def/src/test_db.rs b/crates/hir-def/src/test_db.rs
index c992c3c920..9edb03c7ca 100644
--- a/crates/hir-def/src/test_db.rs
+++ b/crates/hir-def/src/test_db.rs
@@ -41,13 +41,13 @@ impl Default for TestDB {
impl Upcast<dyn ExpandDatabase> for TestDB {
fn upcast(&self) -> &(dyn ExpandDatabase + 'static) {
- &*self
+ self
}
}
impl Upcast<dyn DefDatabase> for TestDB {
fn upcast(&self) -> &(dyn DefDatabase + 'static) {
- &*self
+ self
}
}
diff --git a/crates/hir-expand/src/attrs.rs b/crates/hir-expand/src/attrs.rs
index 30d38299d9..c20c1639e1 100644
--- a/crates/hir-expand/src/attrs.rs
+++ b/crates/hir-expand/src/attrs.rs
@@ -230,12 +230,12 @@ impl Attr {
)
)
})
- .unwrap_or_else(|| tt.len());
+ .unwrap_or(tt.len());
let (path, input) = tt.split_at(path_end);
let path = Interned::new(ModPath::from_tt(db, path)?);
- let input = match input.get(0) {
+ let input = match input.first() {
Some(tt::TokenTree::Subtree(tree)) => {
Some(Interned::new(AttrInput::TokenTree(Box::new(tree.clone()))))
}
diff --git a/crates/hir-expand/src/db.rs b/crates/hir-expand/src/db.rs
index 8c43017971..f220284fae 100644
--- a/crates/hir-expand/src/db.rs
+++ b/crates/hir-expand/src/db.rs
@@ -80,6 +80,9 @@ pub trait ExpandDatabase: SourceDatabase {
#[salsa::invoke(SpanMap::new)]
fn span_map(&self, file_id: HirFileId) -> SpanMap;
+ #[salsa::transparent]
+ #[salsa::invoke(crate::span_map::expansion_span_map)]
+ fn expansion_span_map(&self, file_id: MacroFileId) -> Arc<ExpansionSpanMap>;
#[salsa::invoke(crate::span_map::real_span_map)]
fn real_span_map(&self, file_id: FileId) -> Arc<RealSpanMap>;
@@ -280,7 +283,7 @@ fn parse_macro_expansion(
db: &dyn ExpandDatabase,
macro_file: MacroFileId,
) -> ExpandResult<(Parse<SyntaxNode>, Arc<ExpansionSpanMap>)> {
- let _p = profile::span("parse_macro_expansion");
+ let _p = tracing::span!(tracing::Level::INFO, "parse_macro_expansion").entered();
let loc = db.lookup_intern_macro_call(macro_file.macro_call_id);
let expand_to = loc.expand_to();
let mbe::ValueResult { value: tt, err } = macro_expand(db, macro_file.macro_call_id, loc);
@@ -501,7 +504,7 @@ fn macro_expand(
macro_call_id: MacroCallId,
loc: MacroCallLoc,
) -> ExpandResult<CowArc<tt::Subtree>> {
- let _p = profile::span("macro_expand");
+ let _p = tracing::span!(tracing::Level::INFO, "macro_expand").entered();
let ExpandResult { value: tt, mut err } = match loc.def.kind {
MacroDefKind::ProcMacro(..) => return db.expand_proc_macro(macro_call_id).map(CowArc::Arc),
diff --git a/crates/hir-expand/src/files.rs b/crates/hir-expand/src/files.rs
index d0a1bef11c..707daf0402 100644
--- a/crates/hir-expand/src/files.rs
+++ b/crates/hir-expand/src/files.rs
@@ -5,7 +5,7 @@ use either::Either;
use span::{FileId, FileRange, HirFileId, HirFileIdRepr, MacroFileId, SyntaxContextId};
use syntax::{AstNode, SyntaxNode, SyntaxToken, TextRange, TextSize};
-use crate::{db, ExpansionInfo, MacroFileIdExt};
+use crate::{db, map_node_range_up, span_for_offset, MacroFileIdExt};
/// `InFile<T>` stores a value of `T` inside a particular file/syntax tree.
///
@@ -147,7 +147,7 @@ impl InFile<&SyntaxNode> {
HirFileIdRepr::FileId(file_id) => FileRange { file_id, range: self.value.text_range() },
HirFileIdRepr::MacroFile(mac_file) => {
if let Some((res, ctxt)) =
- ExpansionInfo::new(db, mac_file).map_node_range_up(db, self.value.text_range())
+ map_node_range_up(db, &db.expansion_span_map(mac_file), self.value.text_range())
{
// FIXME: Figure out an API that makes proper use of ctx, this only exists to
// keep pre-token map rewrite behaviour.
@@ -163,12 +163,15 @@ impl InFile<&SyntaxNode> {
}
/// Falls back to the macro call range if the node cannot be mapped up fully.
- pub fn original_file_range_full(self, db: &dyn db::ExpandDatabase) -> FileRange {
+ pub fn original_file_range_with_macro_call_body(
+ self,
+ db: &dyn db::ExpandDatabase,
+ ) -> FileRange {
match self.file_id.repr() {
HirFileIdRepr::FileId(file_id) => FileRange { file_id, range: self.value.text_range() },
HirFileIdRepr::MacroFile(mac_file) => {
if let Some((res, ctxt)) =
- ExpansionInfo::new(db, mac_file).map_node_range_up(db, self.value.text_range())
+ map_node_range_up(db, &db.expansion_span_map(mac_file), self.value.text_range())
{
// FIXME: Figure out an API that makes proper use of ctx, this only exists to
// keep pre-token map rewrite behaviour.
@@ -193,7 +196,7 @@ impl InFile<&SyntaxNode> {
Some((FileRange { file_id, range: self.value.text_range() }, SyntaxContextId::ROOT))
}
HirFileIdRepr::MacroFile(mac_file) => {
- ExpansionInfo::new(db, mac_file).map_node_range_up(db, self.value.text_range())
+ map_node_range_up(db, &db.expansion_span_map(mac_file), self.value.text_range())
}
}
}
@@ -215,7 +218,7 @@ impl InFile<&SyntaxNode> {
}
let (FileRange { file_id, range }, ctx) =
- ExpansionInfo::new(db, file_id).map_node_range_up(db, self.value.text_range())?;
+ map_node_range_up(db, &db.expansion_span_map(file_id), self.value.text_range())?;
// FIXME: Figure out an API that makes proper use of ctx, this only exists to
// keep pre-token map rewrite behaviour.
@@ -246,8 +249,11 @@ impl InFile<SyntaxToken> {
match self.file_id.repr() {
HirFileIdRepr::FileId(file_id) => FileRange { file_id, range: self.value.text_range() },
HirFileIdRepr::MacroFile(mac_file) => {
- let (range, ctxt) = ExpansionInfo::new(db, mac_file)
- .span_for_offset(db, self.value.text_range().start());
+ let (range, ctxt) = span_for_offset(
+ db,
+ &db.expansion_span_map(mac_file),
+ self.value.text_range().start(),
+ );
// FIXME: Figure out an API that makes proper use of ctx, this only exists to
// keep pre-token map rewrite behaviour.
@@ -269,8 +275,11 @@ impl InFile<SyntaxToken> {
Some(FileRange { file_id, range: self.value.text_range() })
}
HirFileIdRepr::MacroFile(mac_file) => {
- let (range, ctxt) = ExpansionInfo::new(db, mac_file)
- .span_for_offset(db, self.value.text_range().start());
+ let (range, ctxt) = span_for_offset(
+ db,
+ &db.expansion_span_map(mac_file),
+ self.value.text_range().start(),
+ );
// FIXME: Figure out an API that makes proper use of ctx, this only exists to
// keep pre-token map rewrite behaviour.
@@ -286,7 +295,7 @@ impl InFile<SyntaxToken> {
impl InMacroFile<TextSize> {
pub fn original_file_range(self, db: &dyn db::ExpandDatabase) -> (FileRange, SyntaxContextId) {
- ExpansionInfo::new(db, self.file_id).span_for_offset(db, self.value)
+ span_for_offset(db, &db.expansion_span_map(self.file_id), self.value)
}
}
@@ -300,7 +309,7 @@ impl InFile<TextRange> {
(FileRange { file_id, range: self.value }, SyntaxContextId::ROOT)
}
HirFileIdRepr::MacroFile(mac_file) => {
- match ExpansionInfo::new(db, mac_file).map_node_range_up(db, self.value) {
+ match map_node_range_up(db, &db.expansion_span_map(mac_file), self.value) {
Some(it) => it,
None => {
let loc = db.lookup_intern_macro_call(mac_file.macro_call_id);
@@ -315,7 +324,7 @@ impl InFile<TextRange> {
match self.file_id.repr() {
HirFileIdRepr::FileId(file_id) => FileRange { file_id, range: self.value },
HirFileIdRepr::MacroFile(mac_file) => {
- match ExpansionInfo::new(db, mac_file).map_node_range_up(db, self.value) {
+ match map_node_range_up(db, &db.expansion_span_map(mac_file), self.value) {
Some((it, SyntaxContextId::ROOT)) => it,
_ => {
let loc = db.lookup_intern_macro_call(mac_file.macro_call_id);
@@ -335,7 +344,7 @@ impl InFile<TextRange> {
Some((FileRange { file_id, range: self.value }, SyntaxContextId::ROOT))
}
HirFileIdRepr::MacroFile(mac_file) => {
- ExpansionInfo::new(db, mac_file).map_node_range_up(db, self.value)
+ map_node_range_up(db, &db.expansion_span_map(mac_file), self.value)
}
}
}
@@ -355,8 +364,11 @@ impl<N: AstNode> InFile<N> {
return None;
}
- let (FileRange { file_id, range }, ctx) = ExpansionInfo::new(db, file_id)
- .map_node_range_up(db, self.value.syntax().text_range())?;
+ let (FileRange { file_id, range }, ctx) = map_node_range_up(
+ db,
+ &db.expansion_span_map(file_id),
+ self.value.syntax().text_range(),
+ )?;
// FIXME: Figure out an API that makes proper use of ctx, this only exists to
// keep pre-token map rewrite behaviour.
diff --git a/crates/hir-expand/src/lib.rs b/crates/hir-expand/src/lib.rs
index 05f12527a4..bd25052490 100644
--- a/crates/hir-expand/src/lib.rs
+++ b/crates/hir-expand/src/lib.rs
@@ -523,6 +523,24 @@ impl MacroCallLoc {
}
}
}
+
+ pub fn include_file_id(
+ &self,
+ db: &dyn ExpandDatabase,
+ macro_call_id: MacroCallId,
+ ) -> Option<FileId> {
+ if self.def.is_include() {
+ if let Some(eager) = &self.eager {
+ if let Ok(it) =
+ builtin_fn_macro::include_input_to_file_id(db, macro_call_id, &eager.arg)
+ {
+ return Some(it);
+ }
+ }
+ }
+
+ None
+ }
}
impl MacroCallKind {
@@ -659,6 +677,10 @@ impl ExpansionInfo {
Some(self.arg.with_value(self.arg.value.as_ref()?.parent()?))
}
+ pub fn call_file(&self) -> HirFileId {
+ self.arg.file_id
+ }
+
/// Maps the passed in file range down into a macro expansion if it is the input to a macro call.
pub fn map_range_down(
&self,
@@ -679,13 +701,7 @@ impl ExpansionInfo {
offset: TextSize,
) -> (FileRange, SyntaxContextId) {
debug_assert!(self.expanded.value.text_range().contains(offset));
- let span = self.exp_map.span_at(offset);
- let anchor_offset = db
- .ast_id_map(span.anchor.file_id.into())
- .get_erased(span.anchor.ast_id)
- .text_range()
- .start();
- (FileRange { file_id: span.anchor.file_id, range: span.range + anchor_offset }, span.ctx)
+ span_for_offset(db, &self.exp_map, offset)
}
/// Maps up the text range out of the expansion hierarchy back into the original file its from.
@@ -695,27 +711,7 @@ impl ExpansionInfo {
range: TextRange,
) -> Option<(FileRange, SyntaxContextId)> {
debug_assert!(self.expanded.value.text_range().contains_range(range));
- let mut spans = self.exp_map.spans_for_range(range);
- let Span { range, anchor, ctx } = spans.next()?;
- let mut start = range.start();
- let mut end = range.end();
-
- for span in spans {
- if span.anchor != anchor || span.ctx != ctx {
- return None;
- }
- start = start.min(span.range.start());
- end = end.max(span.range.end());
- }
- let anchor_offset =
- db.ast_id_map(anchor.file_id.into()).get_erased(anchor.ast_id).text_range().start();
- Some((
- FileRange {
- file_id: anchor.file_id,
- range: TextRange::new(start, end) + anchor_offset,
- },
- ctx,
- ))
+ map_node_range_up(db, &self.exp_map, range)
}
/// Maps up the text range out of the expansion into is macro call.
@@ -804,6 +800,47 @@ impl ExpansionInfo {
}
}
+/// Maps up the text range out of the expansion hierarchy back into the original file its from.
+pub fn map_node_range_up(
+ db: &dyn ExpandDatabase,
+ exp_map: &ExpansionSpanMap,
+ range: TextRange,
+) -> Option<(FileRange, SyntaxContextId)> {
+ let mut spans = exp_map.spans_for_range(range);
+ let Span { range, anchor, ctx } = spans.next()?;
+ let mut start = range.start();
+ let mut end = range.end();
+
+ for span in spans {
+ if span.anchor != anchor || span.ctx != ctx {
+ return None;
+ }
+ start = start.min(span.range.start());
+ end = end.max(span.range.end());
+ }
+ let anchor_offset =
+ db.ast_id_map(anchor.file_id.into()).get_erased(anchor.ast_id).text_range().start();
+ Some((
+ FileRange { file_id: anchor.file_id, range: TextRange::new(start, end) + anchor_offset },
+ ctx,
+ ))
+}
+
+/// Looks up the span at the given offset.
+pub fn span_for_offset(
+ db: &dyn ExpandDatabase,
+ exp_map: &ExpansionSpanMap,
+ offset: TextSize,
+) -> (FileRange, SyntaxContextId) {
+ let span = exp_map.span_at(offset);
+ let anchor_offset = db
+ .ast_id_map(span.anchor.file_id.into())
+ .get_erased(span.anchor.ast_id)
+ .text_range()
+ .start();
+ (FileRange { file_id: span.anchor.file_id, range: span.range + anchor_offset }, span.ctx)
+}
+
/// In Rust, macros expand token trees to token trees. When we want to turn a
/// token tree into an AST node, we need to figure out what kind of AST node we
/// want: something like `foo` can be a type, an expression, or a pattern.
diff --git a/crates/hir-expand/src/mod_path.rs b/crates/hir-expand/src/mod_path.rs
index 0eb1fc1eb5..dd41bcaee2 100644
--- a/crates/hir-expand/src/mod_path.rs
+++ b/crates/hir-expand/src/mod_path.rs
@@ -232,7 +232,7 @@ fn convert_path(
ast::PathSegmentKind::SuperKw => {
let mut deg = 1;
let mut next_segment = None;
- while let Some(segment) = segments.next() {
+ for segment in segments.by_ref() {
match segment.kind()? {
ast::PathSegmentKind::SuperKw => deg += 1,
ast::PathSegmentKind::Name(name) => {
diff --git a/crates/hir-expand/src/proc_macro.rs b/crates/hir-expand/src/proc_macro.rs
index 25c78fade8..70b47fc54b 100644
--- a/crates/hir-expand/src/proc_macro.rs
+++ b/crates/hir-expand/src/proc_macro.rs
@@ -1,4 +1,4 @@
-//! Proc Macro Expander stub
+//! Proc Macro Expander stuff
use core::fmt;
use std::{panic::RefUnwindSafe, sync};
diff --git a/crates/hir-expand/src/span_map.rs b/crates/hir-expand/src/span_map.rs
index 8e624f5585..4a60a94856 100644
--- a/crates/hir-expand/src/span_map.rs
+++ b/crates/hir-expand/src/span_map.rs
@@ -1,5 +1,5 @@
//! Span maps for real files and macro expansions.
-use span::{FileId, HirFileId, HirFileIdRepr, Span};
+use span::{FileId, HirFileId, HirFileIdRepr, MacroFileId, Span};
use syntax::{AstNode, TextRange};
use triomphe::Arc;
@@ -94,3 +94,10 @@ pub(crate) fn real_span_map(db: &dyn ExpandDatabase, file_id: FileId) -> Arc<Rea
tree.syntax().text_range().end(),
))
}
+
+pub(crate) fn expansion_span_map(
+ db: &dyn ExpandDatabase,
+ file_id: MacroFileId,
+) -> Arc<ExpansionSpanMap> {
+ db.parse_macro_expansion(file_id).value.1
+}
diff --git a/crates/hir-ty/src/autoderef.rs b/crates/hir-ty/src/autoderef.rs
index 4625a3b01a..991fd2f91d 100644
--- a/crates/hir-ty/src/autoderef.rs
+++ b/crates/hir-ty/src/autoderef.rs
@@ -142,7 +142,7 @@ pub(crate) fn deref_by_trait(
table @ &mut InferenceTable { db, .. }: &mut InferenceTable<'_>,
ty: Ty,
) -> Option<Ty> {
- let _p = profile::span("deref_by_trait");
+ let _p = tracing::span!(tracing::Level::INFO, "deref_by_trait").entered();
if table.resolve_ty_shallow(&ty).inference_var(Interner).is_some() {
// don't try to deref unknown variables
return None;
diff --git a/crates/hir-ty/src/chalk_db.rs b/crates/hir-ty/src/chalk_db.rs
index 4d509f20d0..7e460f9f86 100644
--- a/crates/hir-ty/src/chalk_db.rs
+++ b/crates/hir-ty/src/chalk_db.rs
@@ -689,7 +689,7 @@ pub(crate) fn impl_datum_query(
krate: CrateId,
impl_id: ImplId,
) -> Arc<ImplDatum> {
- let _p = profile::span("impl_datum");
+ let _p = tracing::span!(tracing::Level::INFO, "impl_datum").entered();
debug!("impl_datum {:?}", impl_id);
let impl_: hir_def::ImplId = from_chalk(db, impl_id);
impl_def_datum(db, krate, impl_id, impl_)
diff --git a/crates/hir-ty/src/db.rs b/crates/hir-ty/src/db.rs
index aa7ca48b18..21679150b3 100644
--- a/crates/hir-ty/src/db.rs
+++ b/crates/hir-ty/src/db.rs
@@ -118,7 +118,7 @@ pub trait HirDatabase: DefDatabase + Upcast<dyn DefDatabase> {
fn layout_of_ty(&self, ty: Ty, env: Arc<TraitEnvironment>) -> Result<Arc<Layout>, LayoutError>;
#[salsa::invoke(crate::layout::target_data_layout_query)]
- fn target_data_layout(&self, krate: CrateId) -> Option<Arc<TargetDataLayout>>;
+ fn target_data_layout(&self, krate: CrateId) -> Result<Arc<TargetDataLayout>, Arc<str>>;
#[salsa::invoke(crate::method_resolution::lookup_impl_method_query)]
fn lookup_impl_method(
@@ -281,7 +281,7 @@ pub trait HirDatabase: DefDatabase + Upcast<dyn DefDatabase> {
}
fn infer_wait(db: &dyn HirDatabase, def: DefWithBodyId) -> Arc<InferenceResult> {
- let _p = profile::span("infer:wait").detail(|| match def {
+ let detail = match def {
DefWithBodyId::FunctionId(it) => db.function_data(it).name.display(db.upcast()).to_string(),
DefWithBodyId::StaticId(it) => {
db.static_data(it).name.clone().display(db.upcast()).to_string()
@@ -297,7 +297,8 @@ fn infer_wait(db: &dyn HirDatabase, def: DefWithBodyId) -> Arc<InferenceResult>
db.enum_variant_data(it).name.display(db.upcast()).to_string()
}
DefWithBodyId::InTypeConstId(it) => format!("in type const {it:?}"),
- });
+ };
+ let _p = tracing::span!(tracing::Level::INFO, "infer:wait", ?detail).entered();
db.infer_query(def)
}
@@ -307,7 +308,7 @@ fn trait_solve_wait(
block: Option<BlockId>,
goal: crate::Canonical<crate::InEnvironment<crate::Goal>>,
) -> Option<crate::Solution> {
- let _p = profile::span("trait_solve::wait");
+ let _p = tracing::span!(tracing::Level::INFO, "trait_solve::wait").entered();
db.trait_solve_query(krate, block, goal)
}
diff --git a/crates/hir-ty/src/diagnostics/decl_check.rs b/crates/hir-ty/src/diagnostics/decl_check.rs
index a37dba4805..78f2005e67 100644
--- a/crates/hir-ty/src/diagnostics/decl_check.rs
+++ b/crates/hir-ty/src/diagnostics/decl_check.rs
@@ -45,7 +45,7 @@ mod allow {
}
pub fn incorrect_case(db: &dyn HirDatabase, owner: ModuleDefId) -> Vec<IncorrectCase> {
- let _p = profile::span("validate_module_item");
+ let _p = tracing::span!(tracing::Level::INFO, "validate_module_item").entered();
let mut validator = DeclValidator::new(db);
validator.validate_item(owner);
validator.sink
diff --git a/crates/hir-ty/src/diagnostics/expr.rs b/crates/hir-ty/src/diagnostics/expr.rs
index 530608292e..eda8f2371c 100644
--- a/crates/hir-ty/src/diagnostics/expr.rs
+++ b/crates/hir-ty/src/diagnostics/expr.rs
@@ -48,7 +48,8 @@ pub enum BodyValidationDiagnostic {
impl BodyValidationDiagnostic {
pub fn collect(db: &dyn HirDatabase, owner: DefWithBodyId) -> Vec<BodyValidationDiagnostic> {
- let _p = profile::span("BodyValidationDiagnostic::collect");
+ let _p =
+ tracing::span!(tracing::Level::INFO, "BodyValidationDiagnostic::collect").entered();
let infer = db.infer(owner);
let mut validator = ExprValidator::new(owner, infer);
validator.validate_body(db);
diff --git a/crates/hir-ty/src/diagnostics/match_check/pat_analysis.rs b/crates/hir-ty/src/diagnostics/match_check/pat_analysis.rs
index cd67ca5993..0b595042cd 100644
--- a/crates/hir-ty/src/diagnostics/match_check/pat_analysis.rs
+++ b/crates/hir-ty/src/diagnostics/match_check/pat_analysis.rs
@@ -237,7 +237,7 @@ impl<'p> MatchCheckCtx<'p> {
ctor = Or;
// Collect here because `Arena::alloc_extend` panics on reentrancy.
let subpats: SmallVec<[_; 2]> =
- pats.into_iter().map(|pat| self.lower_pat(pat)).collect();
+ pats.iter().map(|pat| self.lower_pat(pat)).collect();
fields = self.pattern_arena.alloc_extend(subpats);
}
}
@@ -460,7 +460,8 @@ impl<'p> TypeCx for MatchCheckCtx<'p> {
_f: &mut fmt::Formatter<'_>,
_pat: &rustc_pattern_analysis::pat::DeconstructedPat<'_, Self>,
) -> fmt::Result {
- unimplemented!()
+ // FIXME: implement this, as using `unimplemented!()` causes panics in `tracing`.
+ Ok(())
}
fn bug(&self, fmt: fmt::Arguments<'_>) -> ! {
diff --git a/crates/hir-ty/src/display.rs b/crates/hir-ty/src/display.rs
index 96c7949e3d..2327c8df1b 100644
--- a/crates/hir-ty/src/display.rs
+++ b/crates/hir-ty/src/display.rs
@@ -605,8 +605,11 @@ fn render_const_scalar(
write!(f, "{}", f.db.union_data(u).name.display(f.db.upcast()))
}
hir_def::AdtId::EnumId(e) => {
+ let Ok(target_data_layout) = f.db.target_data_layout(trait_env.krate) else {
+ return f.write_str("<target-layout-not-available>");
+ };
let Some((var_id, var_layout)) =
- detect_variant_from_bytes(&layout, f.db, trait_env, b, e)
+ detect_variant_from_bytes(&layout, f.db, &target_data_layout, b, e)
else {
return f.write_str("<failed-to-detect-variant>");
};
diff --git a/crates/hir-ty/src/infer.rs b/crates/hir-ty/src/infer.rs
index 0d89269b32..71c3f89716 100644
--- a/crates/hir-ty/src/infer.rs
+++ b/crates/hir-ty/src/infer.rs
@@ -75,7 +75,7 @@ pub(crate) use closure::{CaptureKind, CapturedItem, CapturedItemWithoutTy};
/// The entry point of type inference.
pub(crate) fn infer_query(db: &dyn HirDatabase, def: DefWithBodyId) -> Arc<InferenceResult> {
- let _p = profile::span("infer_query");
+ let _p = tracing::span!(tracing::Level::INFO, "infer_query").entered();
let resolver = def.resolver(db.upcast());
let body = db.body(def);
let mut ctx = InferenceContext::new(db, def, &body, resolver);
diff --git a/crates/hir-ty/src/infer/unify.rs b/crates/hir-ty/src/infer/unify.rs
index 9c41540077..61c8233950 100644
--- a/crates/hir-ty/src/infer/unify.rs
+++ b/crates/hir-ty/src/infer/unify.rs
@@ -509,7 +509,8 @@ impl<'a> InferenceTable<'a> {
}
pub(crate) fn resolve_obligations_as_possible(&mut self) {
- let _span = profile::span("resolve_obligations_as_possible");
+ let _span =
+ tracing::span!(tracing::Level::INFO, "resolve_obligations_as_possible").entered();
let mut changed = true;
let mut obligations = mem::take(&mut self.resolve_obligations_buffer);
while mem::take(&mut changed) {
diff --git a/crates/hir-ty/src/inhabitedness.rs b/crates/hir-ty/src/inhabitedness.rs
index a63556f450..532b650e8f 100644
--- a/crates/hir-ty/src/inhabitedness.rs
+++ b/crates/hir-ty/src/inhabitedness.rs
@@ -84,8 +84,7 @@ impl TypeVisitor<Interner> for UninhabitedFrom<'_> {
Some(0) | None => CONTINUE_OPAQUELY_INHABITED,
Some(1..) => item_ty.super_visit_with(self, outer_binder),
},
-
- TyKind::Ref(..) | _ => CONTINUE_OPAQUELY_INHABITED,
+ _ => CONTINUE_OPAQUELY_INHABITED,
};
self.recursive_ty.remove(ty);
self.max_depth += 1;
diff --git a/crates/hir-ty/src/layout.rs b/crates/hir-ty/src/layout.rs
index 2b84cb6b13..310c4cc9ff 100644
--- a/crates/hir-ty/src/layout.rs
+++ b/crates/hir-ty/src/layout.rs
@@ -198,7 +198,7 @@ pub fn layout_of_ty_query(
trait_env: Arc<TraitEnvironment>,
) -> Result<Arc<Layout>, LayoutError> {
let krate = trait_env.krate;
- let Some(target) = db.target_data_layout(krate) else {
+ let Ok(target) = db.target_data_layout(krate) else {
return Err(LayoutError::TargetLayoutNotAvailable);
};
let cx = LayoutCx { target: &target };
diff --git a/crates/hir-ty/src/layout/adt.rs b/crates/hir-ty/src/layout/adt.rs
index 47930358a1..4cc7dffc24 100644
--- a/crates/hir-ty/src/layout/adt.rs
+++ b/crates/hir-ty/src/layout/adt.rs
@@ -32,7 +32,7 @@ pub fn layout_of_adt_query(
trait_env: Arc<TraitEnvironment>,
) -> Result<Arc<Layout>, LayoutError> {
let krate = trait_env.krate;
- let Some(target) = db.target_data_layout(krate) else {
+ let Ok(target) = db.target_data_layout(krate) else {
return Err(LayoutError::TargetLayoutNotAvailable);
};
let cx = LayoutCx { target: &target };
diff --git a/crates/hir-ty/src/layout/target.rs b/crates/hir-ty/src/layout/target.rs
index b2185a03ea..b67bb6c866 100644
--- a/crates/hir-ty/src/layout/target.rs
+++ b/crates/hir-ty/src/layout/target.rs
@@ -2,6 +2,7 @@
use base_db::CrateId;
use hir_def::layout::TargetDataLayout;
+use ra_ap_rustc_abi::{AlignFromBytesError, TargetDataLayoutErrors};
use triomphe::Arc;
use crate::db::HirDatabase;
@@ -9,15 +10,40 @@ use crate::db::HirDatabase;
pub fn target_data_layout_query(
db: &dyn HirDatabase,
krate: CrateId,
-) -> Option<Arc<TargetDataLayout>> {
+) -> Result<Arc<TargetDataLayout>, Arc<str>> {
let crate_graph = db.crate_graph();
- let target_layout = crate_graph[krate].target_layout.as_ref().ok()?;
- let res = TargetDataLayout::parse_from_llvm_datalayout_string(target_layout);
- if let Err(_e) = &res {
- // FIXME: Print the error here once it implements debug/display
- // also logging here is somewhat wrong, but unfortunately this is the earliest place we can
- // parse that doesn't impose a dependency to the rust-abi crate for project-model
- tracing::error!("Failed to parse target data layout for {krate:?}");
+ let res = crate_graph[krate].target_layout.as_deref();
+ match res {
+ Ok(it) => match TargetDataLayout::parse_from_llvm_datalayout_string(it) {
+ Ok(it) => Ok(Arc::new(it)),
+ Err(e) => {
+ Err(match e {
+ TargetDataLayoutErrors::InvalidAddressSpace { addr_space, cause, err } => {
+ format!(
+ r#"invalid address space `{addr_space}` for `{cause}` in "data-layout": {err}"#
+ )
+ }
+ TargetDataLayoutErrors::InvalidBits { kind, bit, cause, err } => format!(r#"invalid {kind} `{bit}` for `{cause}` in "data-layout": {err}"#),
+ TargetDataLayoutErrors::MissingAlignment { cause } => format!(r#"missing alignment for `{cause}` in "data-layout""#),
+ TargetDataLayoutErrors::InvalidAlignment { cause, err } => format!(
+ r#"invalid alignment for `{cause}` in "data-layout": `{align}` is {err_kind}"#,
+ align = err.align(),
+ err_kind = match err {
+ AlignFromBytesError::NotPowerOfTwo(_) => "not a power of two",
+ AlignFromBytesError::TooLarge(_) => "too large",
+ }
+ ),
+ TargetDataLayoutErrors::InconsistentTargetArchitecture { dl, target } => {
+ format!(r#"inconsistent target specification: "data-layout" claims architecture is {dl}-endian, while "target-endian" is `{target}`"#)
+ }
+ TargetDataLayoutErrors::InconsistentTargetPointerWidth {
+ pointer_size,
+ target,
+ } => format!(r#"inconsistent target specification: "data-layout" claims pointers are {pointer_size}-bit, while "target-pointer-width" is `{target}`"#),
+ TargetDataLayoutErrors::InvalidBitsSize { err } => err,
+ }.into())
+ }
+ },
+ Err(e) => Err(Arc::from(&**e)),
}
- res.ok().map(Arc::new)
}
diff --git a/crates/hir-ty/src/lib.rs b/crates/hir-ty/src/lib.rs
index 54e91e7b29..288c42405d 100644
--- a/crates/hir-ty/src/lib.rs
+++ b/crates/hir-ty/src/lib.rs
@@ -363,7 +363,6 @@ has_interner!(CallableSig);
pub enum FnAbi {
Aapcs,
AapcsUnwind,
- AmdgpuKernel,
AvrInterrupt,
AvrNonBlockingInterrupt,
C,
@@ -422,7 +421,6 @@ impl FnAbi {
match s {
"aapcs-unwind" => FnAbi::AapcsUnwind,
"aapcs" => FnAbi::Aapcs,
- "amdgpu-kernel" => FnAbi::AmdgpuKernel,
"avr-interrupt" => FnAbi::AvrInterrupt,
"avr-non-blocking-interrupt" => FnAbi::AvrNonBlockingInterrupt,
"C-cmse-nonsecure-call" => FnAbi::CCmseNonsecureCall,
@@ -465,7 +463,6 @@ impl FnAbi {
match self {
FnAbi::Aapcs => "aapcs",
FnAbi::AapcsUnwind => "aapcs-unwind",
- FnAbi::AmdgpuKernel => "amdgpu-kernel",
FnAbi::AvrInterrupt => "avr-interrupt",
FnAbi::AvrNonBlockingInterrupt => "avr-non-blocking-interrupt",
FnAbi::C => "C",
diff --git a/crates/hir-ty/src/method_resolution.rs b/crates/hir-ty/src/method_resolution.rs
index f8ce3008f1..1c068bf684 100644
--- a/crates/hir-ty/src/method_resolution.rs
+++ b/crates/hir-ty/src/method_resolution.rs
@@ -143,7 +143,8 @@ pub struct TraitImpls {
impl TraitImpls {
pub(crate) fn trait_impls_in_crate_query(db: &dyn HirDatabase, krate: CrateId) -> Arc<Self> {
- let _p = profile::span("trait_impls_in_crate_query").detail(|| format!("{krate:?}"));
+ let _p =
+ tracing::span!(tracing::Level::INFO, "trait_impls_in_crate_query", ?krate).entered();
let mut impls = FxHashMap::default();
Self::collect_def_map(db, &mut impls, &db.crate_def_map(krate));
@@ -155,7 +156,7 @@ impl TraitImpls {
db: &dyn HirDatabase,
block: BlockId,
) -> Option<Arc<Self>> {
- let _p = profile::span("trait_impls_in_block_query");
+ let _p = tracing::span!(tracing::Level::INFO, "trait_impls_in_block_query").entered();
let mut impls = FxHashMap::default();
Self::collect_def_map(db, &mut impls, &db.block_def_map(block));
@@ -171,7 +172,8 @@ impl TraitImpls {
db: &dyn HirDatabase,
krate: CrateId,
) -> Arc<[Arc<Self>]> {
- let _p = profile::span("trait_impls_in_deps_query").detail(|| format!("{krate:?}"));
+ let _p =
+ tracing::span!(tracing::Level::INFO, "trait_impls_in_deps_query", ?krate).entered();
let crate_graph = db.crate_graph();
Arc::from_iter(
@@ -272,7 +274,8 @@ pub struct InherentImpls {
impl InherentImpls {
pub(crate) fn inherent_impls_in_crate_query(db: &dyn HirDatabase, krate: CrateId) -> Arc<Self> {
- let _p = profile::span("inherent_impls_in_crate_query").detail(|| format!("{krate:?}"));
+ let _p =
+ tracing::span!(tracing::Level::INFO, "inherent_impls_in_crate_query", ?krate).entered();
let mut impls = Self { map: FxHashMap::default(), invalid_impls: Vec::default() };
let crate_def_map = db.crate_def_map(krate);
@@ -286,7 +289,7 @@ impl InherentImpls {
db: &dyn HirDatabase,
block: BlockId,
) -> Option<Arc<Self>> {
- let _p = profile::span("inherent_impls_in_block_query");
+ let _p = tracing::span!(tracing::Level::INFO, "inherent_impls_in_block_query").entered();
let mut impls = Self { map: FxHashMap::default(), invalid_impls: Vec::default() };
let block_def_map = db.block_def_map(block);
@@ -359,7 +362,7 @@ pub(crate) fn incoherent_inherent_impl_crates(
krate: CrateId,
fp: TyFingerprint,
) -> SmallVec<[CrateId; 2]> {
- let _p = profile::span("inherent_impl_crates_query");
+ let _p = tracing::span!(tracing::Level::INFO, "inherent_impl_crates_query").entered();
let mut res = SmallVec::new();
let crate_graph = db.crate_graph();
diff --git a/crates/hir-ty/src/mir/borrowck.rs b/crates/hir-ty/src/mir/borrowck.rs
index f7d043fc4e..ea4e60cad3 100644
--- a/crates/hir-ty/src/mir/borrowck.rs
+++ b/crates/hir-ty/src/mir/borrowck.rs
@@ -71,7 +71,7 @@ pub fn borrowck_query(
db: &dyn HirDatabase,
def: DefWithBodyId,
) -> Result<Arc<[BorrowckResult]>, MirLowerError> {
- let _p = profile::span("borrowck_query");
+ let _p = tracing::span!(tracing::Level::INFO, "borrowck_query").entered();
let mut res = vec![];
all_mir_bodies(db, def, |body| {
res.push(BorrowckResult {
@@ -444,7 +444,7 @@ fn mutability_of_locals(
}
if destination.projection.lookup(&body.projection_store).is_empty() {
if ever_init_map.get(destination.local).copied().unwrap_or_default() {
- push_mut_span(destination.local, MirSpan::Unknown, &mut result);
+ push_mut_span(destination.local, terminator.span, &mut result);
} else {
ever_init_map.insert(destination.local, true);
}
diff --git a/crates/hir-ty/src/mir/eval.rs b/crates/hir-ty/src/mir/eval.rs
index 8143dc05c3..84ee60b56b 100644
--- a/crates/hir-ty/src/mir/eval.rs
+++ b/crates/hir-ty/src/mir/eval.rs
@@ -17,6 +17,7 @@ use hir_def::{
use hir_expand::{mod_path::ModPath, HirFileIdExt, InFile};
use intern::Interned;
use la_arena::ArenaMap;
+use ra_ap_rustc_abi::TargetDataLayout;
use rustc_hash::{FxHashMap, FxHashSet};
use stdx::never;
use syntax::{SyntaxNodePtr, TextRange};
@@ -51,7 +52,7 @@ macro_rules! from_bytes {
($ty:tt, $value:expr) => {
($ty::from_le_bytes(match ($value).try_into() {
Ok(it) => it,
- Err(_) => return Err(MirEvalError::TypeError(stringify!(mismatched size in constructing $ty))),
+ Err(_) => return Err(MirEvalError::InternalError(stringify!(mismatched size in constructing $ty).into())),
}))
};
}
@@ -145,6 +146,7 @@ enum MirOrDynIndex {
pub struct Evaluator<'a> {
db: &'a dyn HirDatabase,
trait_env: Arc<TraitEnvironment>,
+ target_data_layout: Arc<TargetDataLayout>,
stack: Vec<u8>,
heap: Vec<u8>,
code_stack: Vec<StackFrame>,
@@ -316,12 +318,12 @@ impl Address {
pub enum MirEvalError {
ConstEvalError(String, Box<ConstEvalError>),
LayoutError(LayoutError, Ty),
- /// Means that code had type errors (or mismatched args) and we shouldn't generate mir in first place.
- TypeError(&'static str),
+ TargetDataLayoutNotAvailable(Arc<str>),
/// Means that code had undefined behavior. We don't try to actively detect UB, but if it was detected
/// then use this type of error.
UndefinedBehavior(String),
Panic(String),
+ // FIXME: This should be folded into ConstEvalError?
MirLowerError(FunctionId, MirLowerError),
MirLowerErrorForClosure(ClosureId, MirLowerError),
TypeIsUnsized(Ty, &'static str),
@@ -330,11 +332,12 @@ pub enum MirEvalError {
InFunction(Box<MirEvalError>, Vec<(Either<FunctionId, ClosureId>, MirSpan, DefWithBodyId)>),
ExecutionLimitExceeded,
StackOverflow,
- TargetDataLayoutNotAvailable,
+ /// FIXME: Fold this into InternalError
InvalidVTableId(usize),
+ /// ?
CoerceUnsizedError(Ty),
- LangItemNotFound(LangItem),
- BrokenLayout(Box<Layout>),
+ /// These should not occur, usually indicates a bug in mir lowering.
+ InternalError(Box<str>),
}
impl MirEvalError {
@@ -359,8 +362,8 @@ impl MirEvalError {
func
)?;
}
- Either::Right(clos) => {
- writeln!(f, "In {:?}", clos)?;
+ Either::Right(closure) => {
+ writeln!(f, "In {:?}", closure)?;
}
}
let source_map = db.body_with_source_map(*def).1;
@@ -406,8 +409,8 @@ impl MirEvalError {
span_formatter,
)?;
}
- MirEvalError::TypeError(_)
- | MirEvalError::UndefinedBehavior(_)
+ MirEvalError::UndefinedBehavior(_)
+ | MirEvalError::TargetDataLayoutNotAvailable(_)
| MirEvalError::Panic(_)
| MirEvalError::MirLowerErrorForClosure(_, _)
| MirEvalError::TypeIsUnsized(_, _)
@@ -415,10 +418,8 @@ impl MirEvalError {
| MirEvalError::InvalidConst(_)
| MirEvalError::ExecutionLimitExceeded
| MirEvalError::StackOverflow
- | MirEvalError::TargetDataLayoutNotAvailable
| MirEvalError::CoerceUnsizedError(_)
- | MirEvalError::LangItemNotFound(_)
- | MirEvalError::BrokenLayout(_)
+ | MirEvalError::InternalError(_)
| MirEvalError::InvalidVTableId(_) => writeln!(f, "{:?}", err)?,
}
Ok(())
@@ -431,16 +432,16 @@ impl std::fmt::Debug for MirEvalError {
Self::ConstEvalError(arg0, arg1) => {
f.debug_tuple("ConstEvalError").field(arg0).field(arg1).finish()
}
- Self::LangItemNotFound(arg0) => f.debug_tuple("LangItemNotFound").field(arg0).finish(),
Self::LayoutError(arg0, arg1) => {
f.debug_tuple("LayoutError").field(arg0).field(arg1).finish()
}
- Self::TypeError(arg0) => f.debug_tuple("TypeError").field(arg0).finish(),
Self::UndefinedBehavior(arg0) => {
f.debug_tuple("UndefinedBehavior").field(arg0).finish()
}
Self::Panic(msg) => write!(f, "Panic with message:\n{msg:?}"),
- Self::TargetDataLayoutNotAvailable => write!(f, "TargetDataLayoutNotAvailable"),
+ Self::TargetDataLayoutNotAvailable(arg0) => {
+ f.debug_tuple("TargetDataLayoutNotAvailable").field(arg0).finish()
+ }
Self::TypeIsUnsized(ty, it) => write!(f, "{ty:?} is unsized. {it} should be sized."),
Self::ExecutionLimitExceeded => write!(f, "execution limit exceeded"),
Self::StackOverflow => write!(f, "stack overflow"),
@@ -453,7 +454,7 @@ impl std::fmt::Debug for MirEvalError {
Self::CoerceUnsizedError(arg0) => {
f.debug_tuple("CoerceUnsizedError").field(arg0).finish()
}
- Self::BrokenLayout(arg0) => f.debug_tuple("BrokenLayout").field(arg0).finish(),
+ Self::InternalError(arg0) => f.debug_tuple("InternalError").field(arg0).finish(),
Self::InvalidVTableId(arg0) => f.debug_tuple("InvalidVTableId").field(arg0).finish(),
Self::NotSupported(arg0) => f.debug_tuple("NotSupported").field(arg0).finish(),
Self::InvalidConst(arg0) => {
@@ -530,7 +531,11 @@ pub fn interpret_mir(
trait_env: Option<Arc<TraitEnvironment>>,
) -> (Result<Const>, MirOutput) {
let ty = body.locals[return_slot()].ty.clone();
- let mut evaluator = Evaluator::new(db, body.owner, assert_placeholder_ty_is_unused, trait_env);
+ let mut evaluator =
+ match Evaluator::new(db, body.owner, assert_placeholder_ty_is_unused, trait_env) {
+ Ok(it) => it,
+ Err(e) => return (Err(e), MirOutput { stdout: vec![], stderr: vec![] }),
+ };
let it: Result<Const> = (|| {
if evaluator.ptr_size() != std::mem::size_of::<usize>() {
not_supported!("targets with different pointer size from host");
@@ -566,9 +571,15 @@ impl Evaluator<'_> {
owner: DefWithBodyId,
assert_placeholder_ty_is_unused: bool,
trait_env: Option<Arc<TraitEnvironment>>,
- ) -> Evaluator<'_> {
+ ) -> Result<Evaluator<'_>> {
let crate_id = owner.module(db.upcast()).krate();
- Evaluator {
+ let target_data_layout = match db.target_data_layout(crate_id) {
+ Ok(target_data_layout) => target_data_layout,
+ Err(e) => return Err(MirEvalError::TargetDataLayoutNotAvailable(e)),
+ };
+ let cached_ptr_size = target_data_layout.pointer_size.bytes_usize();
+ Ok(Evaluator {
+ target_data_layout,
stack: vec![0],
heap: vec![0],
code_stack: vec![],
@@ -590,10 +601,7 @@ impl Evaluator<'_> {
not_special_fn_cache: RefCell::new(Default::default()),
mir_or_dyn_index_cache: RefCell::new(Default::default()),
unused_locals_store: RefCell::new(Default::default()),
- cached_ptr_size: match db.target_data_layout(crate_id) {
- Some(it) => it.pointer_size.bytes_usize(),
- None => 8,
- },
+ cached_ptr_size,
cached_fn_trait_func: db
.lang_item(crate_id, LangItem::Fn)
.and_then(|x| x.as_trait())
@@ -606,7 +614,7 @@ impl Evaluator<'_> {
.lang_item(crate_id, LangItem::FnOnce)
.and_then(|x| x.as_trait())
.and_then(|x| db.trait_data(x).method_by_name(&name![call_once])),
- }
+ })
}
fn place_addr(&self, p: &Place, locals: &Locals) -> Result<Address> {
@@ -754,8 +762,8 @@ impl Evaluator<'_> {
RustcEnumVariantIdx(it.lookup(self.db.upcast()).index as usize)
}
_ => {
- return Err(MirEvalError::TypeError(
- "Multivariant layout only happens for enums",
+ return Err(MirEvalError::InternalError(
+ "mismatched layout".into(),
))
}
}]
@@ -993,12 +1001,12 @@ impl Evaluator<'_> {
IntervalOrOwned::Borrowed(value) => interval.write_from_interval(self, value)?,
}
if remain_args == 0 {
- return Err(MirEvalError::TypeError("more arguments provided"));
+ return Err(MirEvalError::InternalError("too many arguments".into()));
}
remain_args -= 1;
}
if remain_args > 0 {
- return Err(MirEvalError::TypeError("not enough arguments provided"));
+ return Err(MirEvalError::InternalError("too few arguments".into()));
}
Ok(())
}
@@ -1071,8 +1079,8 @@ impl Evaluator<'_> {
match metadata {
Some(m) => m,
None => {
- return Err(MirEvalError::TypeError(
- "type without metadata is used for Rvalue::Len",
+ return Err(MirEvalError::InternalError(
+ "type without metadata is used for Rvalue::Len".into(),
));
}
}
@@ -1312,7 +1320,7 @@ impl Evaluator<'_> {
}
AggregateKind::Tuple(ty) => {
let layout = self.layout(ty)?;
- Owned(self.make_by_layout(
+ Owned(self.construct_with_layout(
layout.size.bytes_usize(),
&layout,
None,
@@ -1334,7 +1342,7 @@ impl Evaluator<'_> {
AggregateKind::Adt(it, subst) => {
let (size, variant_layout, tag) =
self.layout_of_variant(*it, subst.clone(), locals)?;
- Owned(self.make_by_layout(
+ Owned(self.construct_with_layout(
size,
&variant_layout,
tag,
@@ -1343,7 +1351,7 @@ impl Evaluator<'_> {
}
AggregateKind::Closure(ty) => {
let layout = self.layout(ty)?;
- Owned(self.make_by_layout(
+ Owned(self.construct_with_layout(
layout.size.bytes_usize(),
&layout,
None,
@@ -1415,10 +1423,7 @@ impl Evaluator<'_> {
Ok(r)
}
Variants::Multiple { tag, tag_encoding, variants, .. } => {
- let Some(target_data_layout) = self.db.target_data_layout(self.crate_id) else {
- not_supported!("missing target data layout");
- };
- let size = tag.size(&*target_data_layout).bytes_usize();
+ let size = tag.size(&*self.target_data_layout).bytes_usize();
let offset = layout.fields.offset(0).bytes_usize(); // The only field on enum variants is the tag field
match tag_encoding {
TagEncoding::Direct => {
@@ -1458,9 +1463,8 @@ impl Evaluator<'_> {
if let TyKind::Adt(id, subst) = kind {
if let AdtId::StructId(struct_id) = id.0 {
let field_types = self.db.field_types(struct_id.into());
- let mut field_types = field_types.iter();
if let Some(ty) =
- field_types.next().map(|it| it.1.clone().substitute(Interner, subst))
+ field_types.iter().last().map(|it| it.1.clone().substitute(Interner, subst))
{
return self.coerce_unsized_look_through_fields(&ty, goal);
}
@@ -1578,10 +1582,6 @@ impl Evaluator<'_> {
Ok(match &layout.variants {
Variants::Single { .. } => (layout.size.bytes_usize(), layout, None),
Variants::Multiple { variants, tag, tag_encoding, .. } => {
- let cx = self
- .db
- .target_data_layout(self.crate_id)
- .ok_or(MirEvalError::TargetDataLayoutNotAvailable)?;
let enum_variant_id = match it {
VariantId::EnumVariantId(it) => it,
_ => not_supported!("multi variant layout for non-enums"),
@@ -1612,7 +1612,7 @@ impl Evaluator<'_> {
if have_tag {
Some((
layout.fields.offset(0).bytes_usize(),
- tag.size(&*cx).bytes_usize(),
+ tag.size(&*self.target_data_layout).bytes_usize(),
discriminant,
))
} else {
@@ -1623,7 +1623,7 @@ impl Evaluator<'_> {
})
}
- fn make_by_layout(
+ fn construct_with_layout(
&mut self,
size: usize, // Not necessarily equal to variant_layout.size
variant_layout: &Layout,
@@ -1634,7 +1634,14 @@ impl Evaluator<'_> {
if let Some((offset, size, value)) = tag {
match result.get_mut(offset..offset + size) {
Some(it) => it.copy_from_slice(&value.to_le_bytes()[0..size]),
- None => return Err(MirEvalError::BrokenLayout(Box::new(variant_layout.clone()))),
+ None => {
+ return Err(MirEvalError::InternalError(
+ format!(
+ "encoded tag ({offset}, {size}, {value}) is out of bounds 0..{size}"
+ )
+ .into(),
+ ))
+ }
}
}
for (i, op) in values.enumerate() {
@@ -1642,7 +1649,11 @@ impl Evaluator<'_> {
let op = op.get(self)?;
match result.get_mut(offset..offset + op.len()) {
Some(it) => it.copy_from_slice(op),
- None => return Err(MirEvalError::BrokenLayout(Box::new(variant_layout.clone()))),
+ None => {
+ return Err(MirEvalError::InternalError(
+ format!("field offset ({offset}) is out of bounds 0..{size}").into(),
+ ))
+ }
}
}
Ok(result)
@@ -1695,28 +1706,29 @@ impl Evaluator<'_> {
}
ConstScalar::Unknown => not_supported!("evaluating unknown const"),
};
- let mut v: Cow<'_, [u8]> = Cow::Borrowed(v);
let patch_map = memory_map.transform_addresses(|b, align| {
let addr = self.heap_allocate(b.len(), align)?;
self.write_memory(addr, b)?;
Ok(addr.to_usize())
})?;
let (size, align) = self.size_align_of(ty, locals)?.unwrap_or((v.len(), 1));
- if size != v.len() {
+ let v: Cow<'_, [u8]> = if size != v.len() {
// Handle self enum
if size == 16 && v.len() < 16 {
- v = Cow::Owned(pad16(&v, false).to_vec());
+ Cow::Owned(pad16(v, false).to_vec())
} else if size < 16 && v.len() == 16 {
- v = Cow::Owned(v[0..size].to_vec());
+ Cow::Borrowed(&v[0..size])
} else {
return Err(MirEvalError::InvalidConst(konst.clone()));
}
- }
+ } else {
+ Cow::Borrowed(v)
+ };
let addr = self.heap_allocate(size, align)?;
self.write_memory(addr, &v)?;
self.patch_addresses(
&patch_map,
- |bytes| match &memory_map {
+ |bytes| match memory_map {
MemoryMap::Empty | MemoryMap::Simple(_) => {
Err(MirEvalError::InvalidVTableId(from_bytes!(usize, bytes)))
}
@@ -2000,7 +2012,7 @@ impl Evaluator<'_> {
if let Some((v, l)) = detect_variant_from_bytes(
&layout,
this.db,
- this.trait_env.clone(),
+ &this.target_data_layout,
bytes,
e,
) {
@@ -2079,7 +2091,7 @@ impl Evaluator<'_> {
if let Some((ev, layout)) = detect_variant_from_bytes(
&layout,
self.db,
- self.trait_env.clone(),
+ &self.target_data_layout,
self.read_memory(addr, layout.size.bytes_usize())?,
e,
) {
@@ -2153,14 +2165,14 @@ impl Evaluator<'_> {
) -> Result<Option<StackFrame>> {
let id = from_bytes!(usize, bytes.get(self)?);
let next_ty = self.vtable_map.ty(id)?.clone();
- match &next_ty.kind(Interner) {
+ match next_ty.kind(Interner) {
TyKind::FnDef(def, generic_args) => {
self.exec_fn_def(*def, generic_args, destination, args, locals, target_bb, span)
}
TyKind::Closure(id, subst) => {
self.exec_closure(*id, bytes.slice(0..0), subst, destination, args, locals, span)
}
- _ => Err(MirEvalError::TypeError("function pointer to non function")),
+ _ => Err(MirEvalError::InternalError("function pointer to non function".into())),
}
}
@@ -2241,7 +2253,7 @@ impl Evaluator<'_> {
CallableDefId::StructId(id) => {
let (size, variant_layout, tag) =
self.layout_of_variant(id.into(), generic_args, locals)?;
- let result = self.make_by_layout(
+ let result = self.construct_with_layout(
size,
&variant_layout,
tag,
@@ -2253,7 +2265,7 @@ impl Evaluator<'_> {
CallableDefId::EnumVariantId(id) => {
let (size, variant_layout, tag) =
self.layout_of_variant(id.into(), generic_args, locals)?;
- let result = self.make_by_layout(
+ let result = self.construct_with_layout(
size,
&variant_layout,
tag,
@@ -2407,7 +2419,9 @@ impl Evaluator<'_> {
target_bb: Option<BasicBlockId>,
span: MirSpan,
) -> Result<Option<StackFrame>> {
- let func = args.first().ok_or(MirEvalError::TypeError("fn trait with no arg"))?;
+ let func = args
+ .first()
+ .ok_or_else(|| MirEvalError::InternalError("fn trait with no arg".into()))?;
let mut func_ty = func.ty.clone();
let mut func_data = func.interval;
while let TyKind::Ref(_, _, z) = func_ty.kind(Interner) {
@@ -2450,7 +2464,7 @@ impl Evaluator<'_> {
)
.intern(Interner);
let layout = self.layout(&ty)?;
- let result = self.make_by_layout(
+ let result = self.construct_with_layout(
layout.size.bytes_usize(),
&layout,
None,
@@ -2634,7 +2648,7 @@ pub fn render_const_using_debug_impl(
owner: ConstId,
c: &Const,
) -> Result<String> {
- let mut evaluator = Evaluator::new(db, owner.into(), false, None);
+ let mut evaluator = Evaluator::new(db, owner.into(), false, None)?;
let locals = &Locals {
ptr: ArenaMap::new(),
body: db
@@ -2699,12 +2713,7 @@ pub fn render_const_using_debug_impl(
pub fn pad16(it: &[u8], is_signed: bool) -> [u8; 16] {
let is_negative = is_signed && it.last().unwrap_or(&0) > &127;
- let fill_with = if is_negative { 255 } else { 0 };
- it.iter()
- .copied()
- .chain(iter::repeat(fill_with))
- .take(16)
- .collect::<Vec<u8>>()
- .try_into()
- .expect("iterator take is not working")
+ let mut res = [if is_negative { 255 } else { 0 }; 16];
+ res[..it.len()].copy_from_slice(it);
+ res
}
diff --git a/crates/hir-ty/src/mir/eval/shim.rs b/crates/hir-ty/src/mir/eval/shim.rs
index 4336e1e53b..b4fb99acae 100644
--- a/crates/hir-ty/src/mir/eval/shim.rs
+++ b/crates/hir-ty/src/mir/eval/shim.rs
@@ -18,7 +18,7 @@ macro_rules! from_bytes {
($ty:tt, $value:expr) => {
($ty::from_le_bytes(match ($value).try_into() {
Ok(it) => it,
- Err(_) => return Err(MirEvalError::TypeError("mismatched size")),
+ Err(_) => return Err(MirEvalError::InternalError("mismatched size".into())),
}))
};
}
@@ -249,7 +249,9 @@ impl Evaluator<'_> {
match alloc_fn {
"rustc_allocator_zeroed" | "rustc_allocator" => {
let [size, align] = args else {
- return Err(MirEvalError::TypeError("rustc_allocator args are not provided"));
+ return Err(MirEvalError::InternalError(
+ "rustc_allocator args are not provided".into(),
+ ));
};
let size = from_bytes!(usize, size.get(self)?);
let align = from_bytes!(usize, align.get(self)?);
@@ -259,7 +261,9 @@ impl Evaluator<'_> {
"rustc_deallocator" => { /* no-op for now */ }
"rustc_reallocator" => {
let [ptr, old_size, align, new_size] = args else {
- return Err(MirEvalError::TypeError("rustc_allocator args are not provided"));
+ return Err(MirEvalError::InternalError(
+ "rustc_allocator args are not provided".into(),
+ ));
};
let old_size = from_bytes!(usize, old_size.get(self)?);
let new_size = from_bytes!(usize, new_size.get(self)?);
@@ -339,22 +343,22 @@ impl Evaluator<'_> {
Err(MirEvalError::Panic(message))
}
SliceLen => {
- let arg = args
- .next()
- .ok_or(MirEvalError::TypeError("argument of <[T]>::len() is not provided"))?;
+ let arg = args.next().ok_or(MirEvalError::InternalError(
+ "argument of <[T]>::len() is not provided".into(),
+ ))?;
let ptr_size = arg.len() / 2;
Ok(arg[ptr_size..].into())
}
DropInPlace => {
let ty =
generic_args.as_slice(Interner).first().and_then(|it| it.ty(Interner)).ok_or(
- MirEvalError::TypeError(
- "generic argument of drop_in_place is not provided",
+ MirEvalError::InternalError(
+ "generic argument of drop_in_place is not provided".into(),
),
)?;
- let arg = args
- .next()
- .ok_or(MirEvalError::TypeError("argument of drop_in_place is not provided"))?;
+ let arg = args.next().ok_or(MirEvalError::InternalError(
+ "argument of drop_in_place is not provided".into(),
+ ))?;
self.run_drop_glue_deep(
ty.clone(),
locals,
@@ -380,7 +384,9 @@ impl Evaluator<'_> {
318 => {
// SYS_getrandom
let [buf, len, _flags] = args else {
- return Err(MirEvalError::TypeError("SYS_getrandom args are not provided"));
+ return Err(MirEvalError::InternalError(
+ "SYS_getrandom args are not provided".into(),
+ ));
};
let addr = Address::from_bytes(buf.get(self)?)?;
let size = from_bytes!(usize, len.get(self)?);
@@ -408,7 +414,7 @@ impl Evaluator<'_> {
match as_str {
"memcmp" => {
let [ptr1, ptr2, size] = args else {
- return Err(MirEvalError::TypeError("memcmp args are not provided"));
+ return Err(MirEvalError::InternalError("memcmp args are not provided".into()));
};
let addr1 = Address::from_bytes(ptr1.get(self)?)?;
let addr2 = Address::from_bytes(ptr2.get(self)?)?;
@@ -424,7 +430,9 @@ impl Evaluator<'_> {
}
"write" => {
let [fd, ptr, len] = args else {
- return Err(MirEvalError::TypeError("libc::write args are not provided"));
+ return Err(MirEvalError::InternalError(
+ "libc::write args are not provided".into(),
+ ));
};
let fd = u128::from_le_bytes(pad16(fd.get(self)?, false));
let interval = Interval {
@@ -446,14 +454,16 @@ impl Evaluator<'_> {
"pthread_key_create" => {
let key = self.thread_local_storage.create_key();
let Some(arg0) = args.first() else {
- return Err(MirEvalError::TypeError("pthread_key_create arg0 is not provided"));
+ return Err(MirEvalError::InternalError(
+ "pthread_key_create arg0 is not provided".into(),
+ ));
};
let arg0_addr = Address::from_bytes(arg0.get(self)?)?;
let key_ty = if let Some((ty, ..)) = arg0.ty.as_reference_or_ptr() {
ty
} else {
- return Err(MirEvalError::TypeError(
- "pthread_key_create arg0 is not a pointer",
+ return Err(MirEvalError::InternalError(
+ "pthread_key_create arg0 is not a pointer".into(),
));
};
let arg0_interval = Interval::new(
@@ -467,8 +477,8 @@ impl Evaluator<'_> {
}
"pthread_getspecific" => {
let Some(arg0) = args.first() else {
- return Err(MirEvalError::TypeError(
- "pthread_getspecific arg0 is not provided",
+ return Err(MirEvalError::InternalError(
+ "pthread_getspecific arg0 is not provided".into(),
));
};
let key = from_bytes!(usize, &pad16(arg0.get(self)?, false)[0..8]);
@@ -478,14 +488,14 @@ impl Evaluator<'_> {
}
"pthread_setspecific" => {
let Some(arg0) = args.first() else {
- return Err(MirEvalError::TypeError(
- "pthread_setspecific arg0 is not provided",
+ return Err(MirEvalError::InternalError(
+ "pthread_setspecific arg0 is not provided".into(),
));
};
let key = from_bytes!(usize, &pad16(arg0.get(self)?, false)[0..8]);
let Some(arg1) = args.get(1) else {
- return Err(MirEvalError::TypeError(
- "pthread_setspecific arg1 is not provided",
+ return Err(MirEvalError::InternalError(
+ "pthread_setspecific arg1 is not provided".into(),
));
};
let value = from_bytes!(u128, pad16(arg1.get(self)?, false));
@@ -502,14 +512,16 @@ impl Evaluator<'_> {
}
"syscall" => {
let Some((id, rest)) = args.split_first() else {
- return Err(MirEvalError::TypeError("syscall arg1 is not provided"));
+ return Err(MirEvalError::InternalError("syscall arg1 is not provided".into()));
};
let id = from_bytes!(i64, id.get(self)?);
self.exec_syscall(id, rest, destination, locals, span)
}
"sched_getaffinity" => {
let [_pid, _set_size, set] = args else {
- return Err(MirEvalError::TypeError("libc::write args are not provided"));
+ return Err(MirEvalError::InternalError(
+ "libc::write args are not provided".into(),
+ ));
};
let set = Address::from_bytes(set.get(self)?)?;
// Only enable core 0 (we are single threaded anyway), which is bitset 0x0000001
@@ -520,7 +532,9 @@ impl Evaluator<'_> {
}
"getenv" => {
let [name] = args else {
- return Err(MirEvalError::TypeError("libc::write args are not provided"));
+ return Err(MirEvalError::InternalError(
+ "libc::write args are not provided".into(),
+ ));
};
let mut name_buf = vec![];
let name = {
@@ -586,8 +600,8 @@ impl Evaluator<'_> {
"sqrt" | "sin" | "cos" | "exp" | "exp2" | "log" | "log10" | "log2" | "fabs"
| "floor" | "ceil" | "trunc" | "rint" | "nearbyint" | "round" | "roundeven" => {
let [arg] = args else {
- return Err(MirEvalError::TypeError(
- "f64 intrinsic signature doesn't match fn (f64) -> f64",
+ return Err(MirEvalError::InternalError(
+ "f64 intrinsic signature doesn't match fn (f64) -> f64".into(),
));
};
let arg = from_bytes!(f64, arg.get(self)?);
@@ -614,8 +628,8 @@ impl Evaluator<'_> {
}
"pow" | "minnum" | "maxnum" | "copysign" => {
let [arg1, arg2] = args else {
- return Err(MirEvalError::TypeError(
- "f64 intrinsic signature doesn't match fn (f64, f64) -> f64",
+ return Err(MirEvalError::InternalError(
+ "f64 intrinsic signature doesn't match fn (f64, f64) -> f64".into(),
));
};
let arg1 = from_bytes!(f64, arg1.get(self)?);
@@ -630,8 +644,8 @@ impl Evaluator<'_> {
}
"powi" => {
let [arg1, arg2] = args else {
- return Err(MirEvalError::TypeError(
- "powif64 signature doesn't match fn (f64, i32) -> f64",
+ return Err(MirEvalError::InternalError(
+ "powif64 signature doesn't match fn (f64, i32) -> f64".into(),
));
};
let arg1 = from_bytes!(f64, arg1.get(self)?);
@@ -640,8 +654,8 @@ impl Evaluator<'_> {
}
"fma" => {
let [arg1, arg2, arg3] = args else {
- return Err(MirEvalError::TypeError(
- "fmaf64 signature doesn't match fn (f64, f64, f64) -> f64",
+ return Err(MirEvalError::InternalError(
+ "fmaf64 signature doesn't match fn (f64, f64, f64) -> f64".into(),
));
};
let arg1 = from_bytes!(f64, arg1.get(self)?);
@@ -658,8 +672,8 @@ impl Evaluator<'_> {
"sqrt" | "sin" | "cos" | "exp" | "exp2" | "log" | "log10" | "log2" | "fabs"
| "floor" | "ceil" | "trunc" | "rint" | "nearbyint" | "round" | "roundeven" => {
let [arg] = args else {
- return Err(MirEvalError::TypeError(
- "f32 intrinsic signature doesn't match fn (f32) -> f32",
+ return Err(MirEvalError::InternalError(
+ "f32 intrinsic signature doesn't match fn (f32) -> f32".into(),
));
};
let arg = from_bytes!(f32, arg.get(self)?);
@@ -686,8 +700,8 @@ impl Evaluator<'_> {
}
"pow" | "minnum" | "maxnum" | "copysign" => {
let [arg1, arg2] = args else {
- return Err(MirEvalError::TypeError(
- "f32 intrinsic signature doesn't match fn (f32, f32) -> f32",
+ return Err(MirEvalError::InternalError(
+ "f32 intrinsic signature doesn't match fn (f32, f32) -> f32".into(),
));
};
let arg1 = from_bytes!(f32, arg1.get(self)?);
@@ -702,8 +716,8 @@ impl Evaluator<'_> {
}
"powi" => {
let [arg1, arg2] = args else {
- return Err(MirEvalError::TypeError(
- "powif32 signature doesn't match fn (f32, i32) -> f32",
+ return Err(MirEvalError::InternalError(
+ "powif32 signature doesn't match fn (f32, i32) -> f32".into(),
));
};
let arg1 = from_bytes!(f32, arg1.get(self)?);
@@ -712,8 +726,8 @@ impl Evaluator<'_> {
}
"fma" => {
let [arg1, arg2, arg3] = args else {
- return Err(MirEvalError::TypeError(
- "fmaf32 signature doesn't match fn (f32, f32, f32) -> f32",
+ return Err(MirEvalError::InternalError(
+ "fmaf32 signature doesn't match fn (f32, f32, f32) -> f32".into(),
));
};
let arg1 = from_bytes!(f32, arg1.get(self)?);
@@ -730,7 +744,9 @@ impl Evaluator<'_> {
let Some(ty) =
generic_args.as_slice(Interner).first().and_then(|it| it.ty(Interner))
else {
- return Err(MirEvalError::TypeError("size_of generic arg is not provided"));
+ return Err(MirEvalError::InternalError(
+ "size_of generic arg is not provided".into(),
+ ));
};
let size = self.size_of_sized(ty, locals, "size_of arg")?;
destination.write_from_bytes(self, &size.to_le_bytes()[0..destination.size])
@@ -739,7 +755,9 @@ impl Evaluator<'_> {
let Some(ty) =
generic_args.as_slice(Interner).first().and_then(|it| it.ty(Interner))
else {
- return Err(MirEvalError::TypeError("align_of generic arg is not provided"));
+ return Err(MirEvalError::InternalError(
+ "align_of generic arg is not provided".into(),
+ ));
};
let align = self.layout(ty)?.align.abi.bytes();
destination.write_from_bytes(self, &align.to_le_bytes()[0..destination.size])
@@ -748,10 +766,14 @@ impl Evaluator<'_> {
let Some(ty) =
generic_args.as_slice(Interner).first().and_then(|it| it.ty(Interner))
else {
- return Err(MirEvalError::TypeError("size_of_val generic arg is not provided"));
+ return Err(MirEvalError::InternalError(
+ "size_of_val generic arg is not provided".into(),
+ ));
};
let [arg] = args else {
- return Err(MirEvalError::TypeError("size_of_val args are not provided"));
+ return Err(MirEvalError::InternalError(
+ "size_of_val args are not provided".into(),
+ ));
};
if let Some((size, _)) = self.size_align_of(ty, locals)? {
destination.write_from_bytes(self, &size.to_le_bytes())
@@ -765,12 +787,14 @@ impl Evaluator<'_> {
let Some(ty) =
generic_args.as_slice(Interner).first().and_then(|it| it.ty(Interner))
else {
- return Err(MirEvalError::TypeError(
- "min_align_of_val generic arg is not provided",
+ return Err(MirEvalError::InternalError(
+ "min_align_of_val generic arg is not provided".into(),
));
};
let [arg] = args else {
- return Err(MirEvalError::TypeError("min_align_of_val args are not provided"));
+ return Err(MirEvalError::InternalError(
+ "min_align_of_val args are not provided".into(),
+ ));
};
if let Some((_, align)) = self.size_align_of(ty, locals)? {
destination.write_from_bytes(self, &align.to_le_bytes())
@@ -784,7 +808,9 @@ impl Evaluator<'_> {
let Some(ty) =
generic_args.as_slice(Interner).first().and_then(|it| it.ty(Interner))
else {
- return Err(MirEvalError::TypeError("type_name generic arg is not provided"));
+ return Err(MirEvalError::InternalError(
+ "type_name generic arg is not provided".into(),
+ ));
};
let ty_name = match ty.display_source_code(
self.db,
@@ -808,7 +834,9 @@ impl Evaluator<'_> {
let Some(ty) =
generic_args.as_slice(Interner).first().and_then(|it| it.ty(Interner))
else {
- return Err(MirEvalError::TypeError("size_of generic arg is not provided"));
+ return Err(MirEvalError::InternalError(
+ "size_of generic arg is not provided".into(),
+ ));
};
let result = !ty.clone().is_copy(self.db, locals.body.owner);
destination.write_from_bytes(self, &[u8::from(result)])
@@ -817,14 +845,18 @@ impl Evaluator<'_> {
// FIXME: this is wrong for const eval, it should return 2 in some
// cases.
let [lhs, rhs] = args else {
- return Err(MirEvalError::TypeError("wrapping_add args are not provided"));
+ return Err(MirEvalError::InternalError(
+ "wrapping_add args are not provided".into(),
+ ));
};
let ans = lhs.get(self)? == rhs.get(self)?;
destination.write_from_bytes(self, &[u8::from(ans)])
}
"saturating_add" | "saturating_sub" => {
let [lhs, rhs] = args else {
- return Err(MirEvalError::TypeError("saturating_add args are not provided"));
+ return Err(MirEvalError::InternalError(
+ "saturating_add args are not provided".into(),
+ ));
};
let lhs = u128::from_le_bytes(pad16(lhs.get(self)?, false));
let rhs = u128::from_le_bytes(pad16(rhs.get(self)?, false));
@@ -844,7 +876,9 @@ impl Evaluator<'_> {
}
"wrapping_add" | "unchecked_add" => {
let [lhs, rhs] = args else {
- return Err(MirEvalError::TypeError("wrapping_add args are not provided"));
+ return Err(MirEvalError::InternalError(
+ "wrapping_add args are not provided".into(),
+ ));
};
let lhs = u128::from_le_bytes(pad16(lhs.get(self)?, false));
let rhs = u128::from_le_bytes(pad16(rhs.get(self)?, false));
@@ -853,7 +887,9 @@ impl Evaluator<'_> {
}
"ptr_offset_from_unsigned" | "ptr_offset_from" => {
let [lhs, rhs] = args else {
- return Err(MirEvalError::TypeError("wrapping_sub args are not provided"));
+ return Err(MirEvalError::InternalError(
+ "wrapping_sub args are not provided".into(),
+ ));
};
let lhs = i128::from_le_bytes(pad16(lhs.get(self)?, false));
let rhs = i128::from_le_bytes(pad16(rhs.get(self)?, false));
@@ -861,8 +897,8 @@ impl Evaluator<'_> {
let Some(ty) =
generic_args.as_slice(Interner).first().and_then(|it| it.ty(Interner))
else {
- return Err(MirEvalError::TypeError(
- "ptr_offset_from generic arg is not provided",
+ return Err(MirEvalError::InternalError(
+ "ptr_offset_from generic arg is not provided".into(),
));
};
let size = self.size_of_sized(ty, locals, "ptr_offset_from arg")? as i128;
@@ -871,7 +907,9 @@ impl Evaluator<'_> {
}
"wrapping_sub" | "unchecked_sub" => {
let [lhs, rhs] = args else {
- return Err(MirEvalError::TypeError("wrapping_sub args are not provided"));
+ return Err(MirEvalError::InternalError(
+ "wrapping_sub args are not provided".into(),
+ ));
};
let lhs = u128::from_le_bytes(pad16(lhs.get(self)?, false));
let rhs = u128::from_le_bytes(pad16(rhs.get(self)?, false));
@@ -880,7 +918,9 @@ impl Evaluator<'_> {
}
"wrapping_mul" | "unchecked_mul" => {
let [lhs, rhs] = args else {
- return Err(MirEvalError::TypeError("wrapping_mul args are not provided"));
+ return Err(MirEvalError::InternalError(
+ "wrapping_mul args are not provided".into(),
+ ));
};
let lhs = u128::from_le_bytes(pad16(lhs.get(self)?, false));
let rhs = u128::from_le_bytes(pad16(rhs.get(self)?, false));
@@ -890,7 +930,9 @@ impl Evaluator<'_> {
"wrapping_shl" | "unchecked_shl" => {
// FIXME: signed
let [lhs, rhs] = args else {
- return Err(MirEvalError::TypeError("unchecked_shl args are not provided"));
+ return Err(MirEvalError::InternalError(
+ "unchecked_shl args are not provided".into(),
+ ));
};
let lhs = u128::from_le_bytes(pad16(lhs.get(self)?, false));
let rhs = u128::from_le_bytes(pad16(rhs.get(self)?, false));
@@ -900,7 +942,9 @@ impl Evaluator<'_> {
"wrapping_shr" | "unchecked_shr" => {
// FIXME: signed
let [lhs, rhs] = args else {
- return Err(MirEvalError::TypeError("unchecked_shr args are not provided"));
+ return Err(MirEvalError::InternalError(
+ "unchecked_shr args are not provided".into(),
+ ));
};
let lhs = u128::from_le_bytes(pad16(lhs.get(self)?, false));
let rhs = u128::from_le_bytes(pad16(rhs.get(self)?, false));
@@ -910,7 +954,9 @@ impl Evaluator<'_> {
"unchecked_rem" => {
// FIXME: signed
let [lhs, rhs] = args else {
- return Err(MirEvalError::TypeError("unchecked_rem args are not provided"));
+ return Err(MirEvalError::InternalError(
+ "unchecked_rem args are not provided".into(),
+ ));
};
let lhs = u128::from_le_bytes(pad16(lhs.get(self)?, false));
let rhs = u128::from_le_bytes(pad16(rhs.get(self)?, false));
@@ -922,7 +968,9 @@ impl Evaluator<'_> {
"unchecked_div" | "exact_div" => {
// FIXME: signed
let [lhs, rhs] = args else {
- return Err(MirEvalError::TypeError("unchecked_div args are not provided"));
+ return Err(MirEvalError::InternalError(
+ "unchecked_div args are not provided".into(),
+ ));
};
let lhs = u128::from_le_bytes(pad16(lhs.get(self)?, false));
let rhs = u128::from_le_bytes(pad16(rhs.get(self)?, false));
@@ -933,7 +981,9 @@ impl Evaluator<'_> {
}
"add_with_overflow" | "sub_with_overflow" | "mul_with_overflow" => {
let [lhs, rhs] = args else {
- return Err(MirEvalError::TypeError("const_eval_select args are not provided"));
+ return Err(MirEvalError::InternalError(
+ "const_eval_select args are not provided".into(),
+ ));
};
let result_ty = TyKind::Tuple(
2,
@@ -954,7 +1004,7 @@ impl Evaluator<'_> {
|| ans.to_le_bytes()[op_size..].iter().any(|&it| it != 0 && it != 255);
let is_overflow = vec![u8::from(is_overflow)];
let layout = self.layout(&result_ty)?;
- let result = self.make_by_layout(
+ let result = self.construct_with_layout(
layout.size.bytes_usize(),
&layout,
None,
@@ -966,15 +1016,15 @@ impl Evaluator<'_> {
}
"copy" | "copy_nonoverlapping" => {
let [src, dst, offset] = args else {
- return Err(MirEvalError::TypeError(
- "copy_nonoverlapping args are not provided",
+ return Err(MirEvalError::InternalError(
+ "copy_nonoverlapping args are not provided".into(),
));
};
let Some(ty) =
generic_args.as_slice(Interner).first().and_then(|it| it.ty(Interner))
else {
- return Err(MirEvalError::TypeError(
- "copy_nonoverlapping generic arg is not provided",
+ return Err(MirEvalError::InternalError(
+ "copy_nonoverlapping generic arg is not provided".into(),
));
};
let src = Address::from_bytes(src.get(self)?)?;
@@ -988,18 +1038,22 @@ impl Evaluator<'_> {
}
"offset" | "arith_offset" => {
let [ptr, offset] = args else {
- return Err(MirEvalError::TypeError("offset args are not provided"));
+ return Err(MirEvalError::InternalError("offset args are not provided".into()));
};
let ty = if name == "offset" {
let Some(ty0) =
generic_args.as_slice(Interner).first().and_then(|it| it.ty(Interner))
else {
- return Err(MirEvalError::TypeError("offset generic arg is not provided"));
+ return Err(MirEvalError::InternalError(
+ "offset generic arg is not provided".into(),
+ ));
};
let Some(ty1) =
generic_args.as_slice(Interner).get(1).and_then(|it| it.ty(Interner))
else {
- return Err(MirEvalError::TypeError("offset generic arg is not provided"));
+ return Err(MirEvalError::InternalError(
+ "offset generic arg is not provided".into(),
+ ));
};
if !matches!(
ty1.as_builtin(),
@@ -1008,15 +1062,15 @@ impl Evaluator<'_> {
| BuiltinType::Uint(BuiltinUint::Usize)
)
) {
- return Err(MirEvalError::TypeError(
- "offset generic arg is not usize or isize",
+ return Err(MirEvalError::InternalError(
+ "offset generic arg is not usize or isize".into(),
));
}
match ty0.as_raw_ptr() {
Some((ty, _)) => ty,
None => {
- return Err(MirEvalError::TypeError(
- "offset generic arg is not a raw pointer",
+ return Err(MirEvalError::InternalError(
+ "offset generic arg is not a raw pointer".into(),
));
}
}
@@ -1024,8 +1078,8 @@ impl Evaluator<'_> {
let Some(ty) =
generic_args.as_slice(Interner).first().and_then(|it| it.ty(Interner))
else {
- return Err(MirEvalError::TypeError(
- "arith_offset generic arg is not provided",
+ return Err(MirEvalError::InternalError(
+ "arith_offset generic arg is not provided".into(),
));
};
ty
@@ -1046,19 +1100,21 @@ impl Evaluator<'_> {
}
"transmute" => {
let [arg] = args else {
- return Err(MirEvalError::TypeError("transmute arg is not provided"));
+ return Err(MirEvalError::InternalError(
+ "transmute arg is not provided".into(),
+ ));
};
destination.write_from_interval(self, arg.interval)
}
"likely" | "unlikely" => {
let [arg] = args else {
- return Err(MirEvalError::TypeError("likely arg is not provided"));
+ return Err(MirEvalError::InternalError("likely arg is not provided".into()));
};
destination.write_from_interval(self, arg.interval)
}
"ctpop" => {
let [arg] = args else {
- return Err(MirEvalError::TypeError("ctpop arg is not provided"));
+ return Err(MirEvalError::InternalError("ctpop arg is not provided".into()));
};
let result = u128::from_le_bytes(pad16(arg.get(self)?, false)).count_ones();
destination
@@ -1066,7 +1122,7 @@ impl Evaluator<'_> {
}
"ctlz" | "ctlz_nonzero" => {
let [arg] = args else {
- return Err(MirEvalError::TypeError("ctlz arg is not provided"));
+ return Err(MirEvalError::InternalError("ctlz arg is not provided".into()));
};
let result =
u128::from_le_bytes(pad16(arg.get(self)?, false)).leading_zeros() as usize;
@@ -1076,7 +1132,7 @@ impl Evaluator<'_> {
}
"cttz" | "cttz_nonzero" => {
let [arg] = args else {
- return Err(MirEvalError::TypeError("cttz arg is not provided"));
+ return Err(MirEvalError::InternalError("cttz arg is not provided".into()));
};
let result = u128::from_le_bytes(pad16(arg.get(self)?, false)).trailing_zeros();
destination
@@ -1084,7 +1140,9 @@ impl Evaluator<'_> {
}
"rotate_left" => {
let [lhs, rhs] = args else {
- return Err(MirEvalError::TypeError("rotate_left args are not provided"));
+ return Err(MirEvalError::InternalError(
+ "rotate_left args are not provided".into(),
+ ));
};
let lhs = &lhs.get(self)?[0..destination.size];
let rhs = rhs.get(self)?[0] as u32;
@@ -1114,7 +1172,9 @@ impl Evaluator<'_> {
}
"rotate_right" => {
let [lhs, rhs] = args else {
- return Err(MirEvalError::TypeError("rotate_right args are not provided"));
+ return Err(MirEvalError::InternalError(
+ "rotate_right args are not provided".into(),
+ ));
};
let lhs = &lhs.get(self)?[0..destination.size];
let rhs = rhs.get(self)?[0] as u32;
@@ -1144,13 +1204,15 @@ impl Evaluator<'_> {
}
"discriminant_value" => {
let [arg] = args else {
- return Err(MirEvalError::TypeError("discriminant_value arg is not provided"));
+ return Err(MirEvalError::InternalError(
+ "discriminant_value arg is not provided".into(),
+ ));
};
let Some(ty) =
generic_args.as_slice(Interner).first().and_then(|it| it.ty(Interner))
else {
- return Err(MirEvalError::TypeError(
- "discriminant_value generic arg is not provided",
+ return Err(MirEvalError::InternalError(
+ "discriminant_value generic arg is not provided".into(),
));
};
let addr = Address::from_bytes(arg.get(self)?)?;
@@ -1161,11 +1223,15 @@ impl Evaluator<'_> {
}
"const_eval_select" => {
let [tuple, const_fn, _] = args else {
- return Err(MirEvalError::TypeError("const_eval_select args are not provided"));
+ return Err(MirEvalError::InternalError(
+ "const_eval_select args are not provided".into(),
+ ));
};
let mut args = vec![const_fn.clone()];
let TyKind::Tuple(_, fields) = tuple.ty.kind(Interner) else {
- return Err(MirEvalError::TypeError("const_eval_select arg[0] is not a tuple"));
+ return Err(MirEvalError::InternalError(
+ "const_eval_select arg[0] is not a tuple".into(),
+ ));
};
let layout = self.layout(&tuple.ty)?;
for (i, field) in fields.iter(Interner).enumerate() {
@@ -1196,21 +1262,25 @@ impl Evaluator<'_> {
}
"read_via_copy" | "volatile_load" => {
let [arg] = args else {
- return Err(MirEvalError::TypeError("read_via_copy args are not provided"));
+ return Err(MirEvalError::InternalError(
+ "read_via_copy args are not provided".into(),
+ ));
};
let addr = Address::from_bytes(arg.interval.get(self)?)?;
destination.write_from_interval(self, Interval { addr, size: destination.size })
}
"write_via_move" => {
let [ptr, val] = args else {
- return Err(MirEvalError::TypeError("write_via_move args are not provided"));
+ return Err(MirEvalError::InternalError(
+ "write_via_move args are not provided".into(),
+ ));
};
let dst = Address::from_bytes(ptr.get(self)?)?;
let Some(ty) =
generic_args.as_slice(Interner).first().and_then(|it| it.ty(Interner))
else {
- return Err(MirEvalError::TypeError(
- "write_via_copy generic arg is not provided",
+ return Err(MirEvalError::InternalError(
+ "write_via_copy generic arg is not provided".into(),
));
};
let size = self.size_of_sized(ty, locals, "write_via_move ptr type")?;
@@ -1219,14 +1289,18 @@ impl Evaluator<'_> {
}
"write_bytes" => {
let [dst, val, count] = args else {
- return Err(MirEvalError::TypeError("write_bytes args are not provided"));
+ return Err(MirEvalError::InternalError(
+ "write_bytes args are not provided".into(),
+ ));
};
let count = from_bytes!(usize, count.get(self)?);
let val = from_bytes!(u8, val.get(self)?);
let Some(ty) =
generic_args.as_slice(Interner).first().and_then(|it| it.ty(Interner))
else {
- return Err(MirEvalError::TypeError("write_bytes generic arg is not provided"));
+ return Err(MirEvalError::InternalError(
+ "write_bytes generic arg is not provided".into(),
+ ));
};
let dst = Address::from_bytes(dst.get(self)?)?;
let size = self.size_of_sized(ty, locals, "copy_nonoverlapping ptr type")?;
@@ -1310,10 +1384,14 @@ impl Evaluator<'_> {
let Some(ty) = generic_args.as_slice(Interner).first().and_then(|it| it.ty(Interner))
else {
- return Err(MirEvalError::TypeError("atomic intrinsic generic arg is not provided"));
+ return Err(MirEvalError::InternalError(
+ "atomic intrinsic generic arg is not provided".into(),
+ ));
};
let Some(arg0) = args.first() else {
- return Err(MirEvalError::TypeError("atomic intrinsic arg0 is not provided"));
+ return Err(MirEvalError::InternalError(
+ "atomic intrinsic arg0 is not provided".into(),
+ ));
};
let arg0_addr = Address::from_bytes(arg0.get(self)?)?;
let arg0_interval =
@@ -1322,7 +1400,9 @@ impl Evaluator<'_> {
return destination.write_from_interval(self, arg0_interval);
}
let Some(arg1) = args.get(1) else {
- return Err(MirEvalError::TypeError("atomic intrinsic arg1 is not provided"));
+ return Err(MirEvalError::InternalError(
+ "atomic intrinsic arg1 is not provided".into(),
+ ));
};
if name.starts_with("store_") {
return arg0_interval.write_from_interval(self, arg1.interval);
@@ -1374,7 +1454,9 @@ impl Evaluator<'_> {
return arg0_interval.write_from_bytes(self, &ans.to_le_bytes()[0..destination.size]);
}
let Some(arg2) = args.get(2) else {
- return Err(MirEvalError::TypeError("atomic intrinsic arg2 is not provided"));
+ return Err(MirEvalError::InternalError(
+ "atomic intrinsic arg2 is not provided".into(),
+ ));
};
if name.starts_with("cxchg_") || name.starts_with("cxchgweak_") {
let dest = if arg1.get(self)? == arg0_interval.get(self)? {
@@ -1389,7 +1471,7 @@ impl Evaluator<'_> {
)
.intern(Interner);
let layout = self.layout(&result_ty)?;
- let result = self.make_by_layout(
+ let result = self.construct_with_layout(
layout.size.bytes_usize(),
&layout,
None,
diff --git a/crates/hir-ty/src/mir/eval/shim/simd.rs b/crates/hir-ty/src/mir/eval/shim/simd.rs
index f9156417f2..eddfd0acfb 100644
--- a/crates/hir-ty/src/mir/eval/shim/simd.rs
+++ b/crates/hir-ty/src/mir/eval/shim/simd.rs
@@ -10,7 +10,7 @@ macro_rules! from_bytes {
($ty:tt, $value:expr) => {
($ty::from_le_bytes(match ($value).try_into() {
Ok(it) => it,
- Err(_) => return Err(MirEvalError::TypeError("mismatched size")),
+ Err(_) => return Err(MirEvalError::InternalError("mismatched size".into())),
}))
};
}
@@ -40,7 +40,9 @@ impl Evaluator<'_> {
.substitute(Interner, subst);
return Ok((fields.len(), field_ty));
}
- return Err(MirEvalError::TypeError("simd type with no len param"));
+ return Err(MirEvalError::InternalError(
+ "simd type with no len param".into(),
+ ));
}
};
match try_const_usize(self.db, len) {
@@ -48,14 +50,18 @@ impl Evaluator<'_> {
let Some(ty) =
subst.as_slice(Interner).first().and_then(|it| it.ty(Interner))
else {
- return Err(MirEvalError::TypeError("simd type with no ty param"));
+ return Err(MirEvalError::InternalError(
+ "simd type with no ty param".into(),
+ ));
};
Ok((len as usize, ty.clone()))
}
- None => Err(MirEvalError::TypeError("simd type with unevaluatable len param")),
+ None => Err(MirEvalError::InternalError(
+ "simd type with unevaluatable len param".into(),
+ )),
}
}
- _ => Err(MirEvalError::TypeError("simd type which is not a struct")),
+ _ => Err(MirEvalError::InternalError("simd type which is not a struct".into())),
}
}
@@ -71,7 +77,9 @@ impl Evaluator<'_> {
match name {
"and" | "or" | "xor" => {
let [left, right] = args else {
- return Err(MirEvalError::TypeError("simd bit op args are not provided"));
+ return Err(MirEvalError::InternalError(
+ "simd bit op args are not provided".into(),
+ ));
};
let result = left
.get(self)?
@@ -88,7 +96,7 @@ impl Evaluator<'_> {
}
"eq" | "ne" | "lt" | "le" | "gt" | "ge" => {
let [left, right] = args else {
- return Err(MirEvalError::TypeError("simd args are not provided"));
+ return Err(MirEvalError::InternalError("simd args are not provided".into()));
};
let (len, ty) = self.detect_simd_ty(&left.ty)?;
let is_signed = matches!(ty.as_builtin(), Some(BuiltinType::Int(_)));
@@ -125,7 +133,9 @@ impl Evaluator<'_> {
}
"bitmask" => {
let [op] = args else {
- return Err(MirEvalError::TypeError("simd_bitmask args are not provided"));
+ return Err(MirEvalError::InternalError(
+ "simd_bitmask args are not provided".into(),
+ ));
};
let (op_len, _) = self.detect_simd_ty(&op.ty)?;
let op_count = op.interval.size / op_len;
@@ -139,18 +149,20 @@ impl Evaluator<'_> {
}
"shuffle" => {
let [left, right, index] = args else {
- return Err(MirEvalError::TypeError("simd_shuffle args are not provided"));
+ return Err(MirEvalError::InternalError(
+ "simd_shuffle args are not provided".into(),
+ ));
};
let TyKind::Array(_, index_len) = index.ty.kind(Interner) else {
- return Err(MirEvalError::TypeError(
- "simd_shuffle index argument has non-array type",
+ return Err(MirEvalError::InternalError(
+ "simd_shuffle index argument has non-array type".into(),
));
};
let index_len = match try_const_usize(self.db, index_len) {
Some(it) => it as usize,
None => {
- return Err(MirEvalError::TypeError(
- "simd type with unevaluatable len param",
+ return Err(MirEvalError::InternalError(
+ "simd type with unevaluatable len param".into(),
))
}
};
@@ -164,8 +176,8 @@ impl Evaluator<'_> {
let val = match vector.clone().nth(index) {
Some(it) => it,
None => {
- return Err(MirEvalError::TypeError(
- "out of bound access in simd shuffle",
+ return Err(MirEvalError::InternalError(
+ "out of bound access in simd shuffle".into(),
))
}
};
diff --git a/crates/hir-ty/src/mir/lower.rs b/crates/hir-ty/src/mir/lower.rs
index 99930798e8..74bac8cbf1 100644
--- a/crates/hir-ty/src/mir/lower.rs
+++ b/crates/hir-ty/src/mir/lower.rs
@@ -939,7 +939,7 @@ impl<'ctx> MirLowerCtx<'ctx> {
Ok(Some(current))
}
Expr::BinaryOp { lhs, rhs, op } => {
- let op = op.ok_or(MirLowerError::IncompleteExpr)?;
+ let op: BinaryOp = op.ok_or(MirLowerError::IncompleteExpr)?;
let is_builtin = 'b: {
// Without adjust here is a hack. We assume that we know every possible adjustment
// for binary operator, and use without adjust to simplify our conditions.
@@ -2068,7 +2068,7 @@ pub fn mir_body_for_closure_query(
}
pub fn mir_body_query(db: &dyn HirDatabase, def: DefWithBodyId) -> Result<Arc<MirBody>> {
- let _p = profile::span("mir_body_query").detail(|| match def {
+ let detail = match def {
DefWithBodyId::FunctionId(it) => db.function_data(it).name.display(db.upcast()).to_string(),
DefWithBodyId::StaticId(it) => db.static_data(it).name.display(db.upcast()).to_string(),
DefWithBodyId::ConstId(it) => db
@@ -2082,7 +2082,8 @@ pub fn mir_body_query(db: &dyn HirDatabase, def: DefWithBodyId) -> Result<Arc<Mi
db.enum_variant_data(it).name.display(db.upcast()).to_string()
}
DefWithBodyId::InTypeConstId(it) => format!("in type const {it:?}"),
- });
+ };
+ let _p = tracing::span!(tracing::Level::INFO, "mir_body_query", ?detail).entered();
let body = db.body(def);
let infer = db.infer(def);
let mut result = lower_to_mir(db, def, &body, &infer, body.body_expr)?;
diff --git a/crates/hir-ty/src/mir/lower/pattern_matching.rs b/crates/hir-ty/src/mir/lower/pattern_matching.rs
index 65ab12929d..8202bac532 100644
--- a/crates/hir-ty/src/mir/lower/pattern_matching.rs
+++ b/crates/hir-ty/src/mir/lower/pattern_matching.rs
@@ -114,7 +114,7 @@ impl MirLowerCtx<'_> {
index: i as u32,
}))
}),
- &mut cond_place,
+ &cond_place,
mode,
)?
}
diff --git a/crates/hir-ty/src/traits.rs b/crates/hir-ty/src/traits.rs
index b6bc76bc98..3a1a4e63ea 100644
--- a/crates/hir-ty/src/traits.rs
+++ b/crates/hir-ty/src/traits.rs
@@ -100,13 +100,14 @@ pub(crate) fn trait_solve_query(
block: Option<BlockId>,
goal: Canonical<InEnvironment<Goal>>,
) -> Option<Solution> {
- let _p = profile::span("trait_solve_query").detail(|| match &goal.value.goal.data(Interner) {
+ let detail = match &goal.value.goal.data(Interner) {
GoalData::DomainGoal(DomainGoal::Holds(WhereClause::Implemented(it))) => {
db.trait_data(it.hir_trait_id()).name.display(db.upcast()).to_string()
}
GoalData::DomainGoal(DomainGoal::Holds(WhereClause::AliasEq(_))) => "alias_eq".to_string(),
_ => "??".to_string(),
- });
+ };
+ let _p = tracing::span!(tracing::Level::INFO, "trait_solve_query", ?detail).entered();
tracing::info!("trait_solve_query({:?})", goal.value.goal);
if let GoalData::DomainGoal(DomainGoal::Holds(WhereClause::AliasEq(AliasEq {
diff --git a/crates/hir-ty/src/utils.rs b/crates/hir-ty/src/utils.rs
index 2cdee5a15a..c0ca9d713c 100644
--- a/crates/hir-ty/src/utils.rs
+++ b/crates/hir-ty/src/utils.rs
@@ -24,18 +24,18 @@ use hir_def::{
};
use hir_expand::name::Name;
use intern::Interned;
+use ra_ap_rustc_abi::TargetDataLayout;
use rustc_hash::FxHashSet;
use smallvec::{smallvec, SmallVec};
use stdx::never;
-use triomphe::Arc;
use crate::{
consteval::unknown_const,
db::HirDatabase,
layout::{Layout, TagEncoding},
mir::pad16,
- ChalkTraitId, Const, ConstScalar, GenericArg, Interner, Substitution, TraitEnvironment,
- TraitRef, TraitRefExt, Ty, WhereClause,
+ ChalkTraitId, Const, ConstScalar, GenericArg, Interner, Substitution, TraitRef, TraitRefExt,
+ Ty, WhereClause,
};
pub(crate) fn fn_traits(
@@ -192,7 +192,7 @@ pub(crate) fn generics(db: &dyn DefDatabase, def: GenericDefId) -> Generics {
/// and it doesn't store the closure types and fields.
///
/// Codes should not assume this ordering, and should always use methods available
-/// on this struct for retriving, and `TyBuilder::substs_for_closure` for creating.
+/// on this struct for retrieving, and `TyBuilder::substs_for_closure` for creating.
pub(crate) struct ClosureSubst<'a>(pub(crate) &'a Substitution);
impl<'a> ClosureSubst<'a> {
@@ -431,18 +431,16 @@ impl FallibleTypeFolder<Interner> for UnevaluatedConstEvaluatorFolder<'_> {
pub(crate) fn detect_variant_from_bytes<'a>(
layout: &'a Layout,
db: &dyn HirDatabase,
- trait_env: Arc<TraitEnvironment>,
+ target_data_layout: &TargetDataLayout,
b: &[u8],
e: EnumId,
) -> Option<(EnumVariantId, &'a Layout)> {
- let krate = trait_env.krate;
let (var_id, var_layout) = match &layout.variants {
hir_def::layout::Variants::Single { index } => {
(db.enum_data(e).variants[index.0].0, layout)
}
hir_def::layout::Variants::Multiple { tag, tag_encoding, variants, .. } => {
- let target_data_layout = db.target_data_layout(krate)?;
- let size = tag.size(&*target_data_layout).bytes_usize();
+ let size = tag.size(target_data_layout).bytes_usize();
let offset = layout.fields.offset(0).bytes_usize(); // The only field on enum variants is the tag field
let tag = i128::from_le_bytes(pad16(&b[offset..offset + size], false));
match tag_encoding {
diff --git a/crates/hir/Cargo.toml b/crates/hir/Cargo.toml
index e4e4bcea61..7fea837287 100644
--- a/crates/hir/Cargo.toml
+++ b/crates/hir/Cargo.toml
@@ -17,6 +17,7 @@ either.workspace = true
arrayvec.workspace = true
itertools.workspace = true
smallvec.workspace = true
+tracing.workspace = true
triomphe.workspace = true
once_cell = "1.17.1"
@@ -30,9 +31,10 @@ profile.workspace = true
stdx.workspace = true
syntax.workspace = true
tt.workspace = true
+span.workspace = true
[features]
in-rust-tree = []
[lints]
-workspace = true \ No newline at end of file
+workspace = true
diff --git a/crates/hir/src/attrs.rs b/crates/hir/src/attrs.rs
index fc0a196df7..5c369f42e6 100644
--- a/crates/hir/src/attrs.rs
+++ b/crates/hir/src/attrs.rs
@@ -239,10 +239,9 @@ fn resolve_impl_trait_item(
) -> Option<DocLinkDef> {
let canonical = ty.canonical();
let krate = ty.krate(db);
- let environment = resolver.generic_def().map_or_else(
- || crate::TraitEnvironment::empty(krate.id).into(),
- |d| db.trait_environment(d),
- );
+ let environment = resolver
+ .generic_def()
+ .map_or_else(|| crate::TraitEnvironment::empty(krate.id), |d| db.trait_environment(d));
let traits_in_scope = resolver.traits_in_scope(db.upcast());
let mut result = None;
@@ -297,7 +296,7 @@ fn as_module_def_if_namespace_matches(
AssocItem::TypeAlias(it) => (ModuleDef::TypeAlias(it), Namespace::Types),
};
- (ns.unwrap_or(expected_ns) == expected_ns).then(|| DocLinkDef::ModuleDef(def))
+ (ns.unwrap_or(expected_ns) == expected_ns).then_some(DocLinkDef::ModuleDef(def))
}
fn modpath_from_str(link: &str) -> Option<ModPath> {
@@ -311,7 +310,7 @@ fn modpath_from_str(link: &str) -> Option<ModPath> {
"self" => PathKind::Super(0),
"super" => {
let mut deg = 1;
- while let Some(segment) = parts.next() {
+ for segment in parts.by_ref() {
if segment == "super" {
deg += 1;
} else {
diff --git a/crates/hir/src/diagnostics.rs b/crates/hir/src/diagnostics.rs
index bf29a53913..2d8f1dbad5 100644
--- a/crates/hir/src/diagnostics.rs
+++ b/crates/hir/src/diagnostics.rs
@@ -4,11 +4,12 @@
//! This probably isn't the best way to do this -- ideally, diagnostics should
//! be expressed in terms of hir types themselves.
pub use hir_ty::diagnostics::{CaseType, IncorrectCase};
+use hir_ty::{db::HirDatabase, diagnostics::BodyValidationDiagnostic, InferenceDiagnostic};
use base_db::CrateId;
use cfg::{CfgExpr, CfgOptions};
use either::Either;
-use hir_def::{path::ModPath, AssocItemId};
+use hir_def::{body::SyntheticSyntax, hir::ExprOrPatId, path::ModPath, AssocItemId, DefWithBodyId};
use hir_expand::{name::Name, HirFileId, InFile};
use syntax::{ast, AstPtr, SyntaxError, SyntaxNodePtr, TextRange};
@@ -30,14 +31,28 @@ macro_rules! diagnostics {
)*
};
}
+// FIXME Accept something like the following in the macro call instead
+// diagnostics![
+// pub struct BreakOutsideOfLoop {
+// pub expr: InFile<AstPtr<ast::Expr>>,
+// pub is_break: bool,
+// pub bad_value_break: bool,
+// }, ...
+// or more concisely
+// BreakOutsideOfLoop {
+// expr: InFile<AstPtr<ast::Expr>>,
+// is_break: bool,
+// bad_value_break: bool,
+// }, ...
+// ]
diagnostics![
BreakOutsideOfLoop,
ExpectedFunction,
InactiveCode,
+ IncoherentImpl,
IncorrectCase,
InvalidDeriveTarget,
- IncoherentImpl,
MacroDefError,
MacroError,
MacroExpansionParseError,
@@ -55,8 +70,8 @@ diagnostics![
ReplaceFilterMapNextWithFindMap,
TraitImplIncorrectSafety,
TraitImplMissingAssocItems,
- TraitImplRedundantAssocItems,
TraitImplOrphan,
+ TraitImplRedundantAssocItems,
TypedHole,
TypeMismatch,
UndeclaredLabel,
@@ -326,3 +341,219 @@ pub struct TraitImplRedundantAssocItems {
pub impl_: AstPtr<ast::Impl>,
pub assoc_item: (Name, AssocItem),
}
+
+impl AnyDiagnostic {
+ pub(crate) fn body_validation_diagnostic(
+ db: &dyn HirDatabase,
+ diagnostic: BodyValidationDiagnostic,
+ source_map: &hir_def::body::BodySourceMap,
+ ) -> Option<AnyDiagnostic> {
+ match diagnostic {
+ BodyValidationDiagnostic::RecordMissingFields { record, variant, missed_fields } => {
+ let variant_data = variant.variant_data(db.upcast());
+ let missed_fields = missed_fields
+ .into_iter()
+ .map(|idx| variant_data.fields()[idx].name.clone())
+ .collect();
+
+ match record {
+ Either::Left(record_expr) => match source_map.expr_syntax(record_expr) {
+ Ok(source_ptr) => {
+ let root = source_ptr.file_syntax(db.upcast());
+ if let ast::Expr::RecordExpr(record_expr) =
+ source_ptr.value.to_node(&root)
+ {
+ if record_expr.record_expr_field_list().is_some() {
+ let field_list_parent_path =
+ record_expr.path().map(|path| AstPtr::new(&path));
+ return Some(
+ MissingFields {
+ file: source_ptr.file_id,
+ field_list_parent: AstPtr::new(&Either::Left(
+ record_expr,
+ )),
+ field_list_parent_path,
+ missed_fields,
+ }
+ .into(),
+ );
+ }
+ }
+ }
+ Err(SyntheticSyntax) => (),
+ },
+ Either::Right(record_pat) => match source_map.pat_syntax(record_pat) {
+ Ok(source_ptr) => {
+ if let Some(ptr) = source_ptr.value.cast::<ast::RecordPat>() {
+ let root = source_ptr.file_syntax(db.upcast());
+ let record_pat = ptr.to_node(&root);
+ if record_pat.record_pat_field_list().is_some() {
+ let field_list_parent_path =
+ record_pat.path().map(|path| AstPtr::new(&path));
+ return Some(
+ MissingFields {
+ file: source_ptr.file_id,
+ field_list_parent: AstPtr::new(&Either::Right(
+ record_pat,
+ )),
+ field_list_parent_path,
+ missed_fields,
+ }
+ .into(),
+ );
+ }
+ }
+ }
+ Err(SyntheticSyntax) => (),
+ },
+ }
+ }
+ BodyValidationDiagnostic::ReplaceFilterMapNextWithFindMap { method_call_expr } => {
+ if let Ok(next_source_ptr) = source_map.expr_syntax(method_call_expr) {
+ return Some(
+ ReplaceFilterMapNextWithFindMap {
+ file: next_source_ptr.file_id,
+ next_expr: next_source_ptr.value,
+ }
+ .into(),
+ );
+ }
+ }
+ BodyValidationDiagnostic::MissingMatchArms { match_expr, uncovered_patterns } => {
+ match source_map.expr_syntax(match_expr) {
+ Ok(source_ptr) => {
+ let root = source_ptr.file_syntax(db.upcast());
+ if let ast::Expr::MatchExpr(match_expr) = &source_ptr.value.to_node(&root) {
+ match match_expr.expr() {
+ Some(scrut_expr) if match_expr.match_arm_list().is_some() => {
+ return Some(
+ MissingMatchArms {
+ scrutinee_expr: InFile::new(
+ source_ptr.file_id,
+ AstPtr::new(&scrut_expr),
+ ),
+ uncovered_patterns,
+ }
+ .into(),
+ );
+ }
+ _ => {}
+ }
+ }
+ }
+ Err(SyntheticSyntax) => (),
+ }
+ }
+ }
+ None
+ }
+
+ pub(crate) fn inference_diagnostic(
+ db: &dyn HirDatabase,
+ def: DefWithBodyId,
+ d: &InferenceDiagnostic,
+ source_map: &hir_def::body::BodySourceMap,
+ ) -> Option<AnyDiagnostic> {
+ let expr_syntax = |expr| source_map.expr_syntax(expr).expect("unexpected synthetic");
+ let pat_syntax = |pat| source_map.pat_syntax(pat).expect("unexpected synthetic");
+ Some(match d {
+ &InferenceDiagnostic::NoSuchField { field: expr, private } => {
+ let expr_or_pat = match expr {
+ ExprOrPatId::ExprId(expr) => {
+ source_map.field_syntax(expr).map(AstPtr::wrap_left)
+ }
+ ExprOrPatId::PatId(pat) => {
+ source_map.pat_field_syntax(pat).map(AstPtr::wrap_right)
+ }
+ };
+ NoSuchField { field: expr_or_pat, private }.into()
+ }
+ &InferenceDiagnostic::MismatchedArgCount { call_expr, expected, found } => {
+ MismatchedArgCount { call_expr: expr_syntax(call_expr), expected, found }.into()
+ }
+ &InferenceDiagnostic::PrivateField { expr, field } => {
+ let expr = expr_syntax(expr);
+ let field = field.into();
+ PrivateField { expr, field }.into()
+ }
+ &InferenceDiagnostic::PrivateAssocItem { id, item } => {
+ let expr_or_pat = match id {
+ ExprOrPatId::ExprId(expr) => expr_syntax(expr).map(AstPtr::wrap_left),
+ ExprOrPatId::PatId(pat) => pat_syntax(pat).map(AstPtr::wrap_right),
+ };
+ let item = item.into();
+ PrivateAssocItem { expr_or_pat, item }.into()
+ }
+ InferenceDiagnostic::ExpectedFunction { call_expr, found } => {
+ let call_expr = expr_syntax(*call_expr);
+ ExpectedFunction { call: call_expr, found: Type::new(db, def, found.clone()) }
+ .into()
+ }
+ InferenceDiagnostic::UnresolvedField {
+ expr,
+ receiver,
+ name,
+ method_with_same_name_exists,
+ } => {
+ let expr = expr_syntax(*expr);
+ UnresolvedField {
+ expr,
+ name: name.clone(),
+ receiver: Type::new(db, def, receiver.clone()),
+ method_with_same_name_exists: *method_with_same_name_exists,
+ }
+ .into()
+ }
+ InferenceDiagnostic::UnresolvedMethodCall {
+ expr,
+ receiver,
+ name,
+ field_with_same_name,
+ assoc_func_with_same_name,
+ } => {
+ let expr = expr_syntax(*expr);
+ UnresolvedMethodCall {
+ expr,
+ name: name.clone(),
+ receiver: Type::new(db, def, receiver.clone()),
+ field_with_same_name: field_with_same_name
+ .clone()
+ .map(|ty| Type::new(db, def, ty)),
+ assoc_func_with_same_name: *assoc_func_with_same_name,
+ }
+ .into()
+ }
+ &InferenceDiagnostic::UnresolvedAssocItem { id } => {
+ let expr_or_pat = match id {
+ ExprOrPatId::ExprId(expr) => expr_syntax(expr).map(AstPtr::wrap_left),
+ ExprOrPatId::PatId(pat) => pat_syntax(pat).map(AstPtr::wrap_right),
+ };
+ UnresolvedAssocItem { expr_or_pat }.into()
+ }
+ &InferenceDiagnostic::BreakOutsideOfLoop { expr, is_break, bad_value_break } => {
+ let expr = expr_syntax(expr);
+ BreakOutsideOfLoop { expr, is_break, bad_value_break }.into()
+ }
+ InferenceDiagnostic::TypedHole { expr, expected } => {
+ let expr = expr_syntax(*expr);
+ TypedHole { expr, expected: Type::new(db, def, expected.clone()) }.into()
+ }
+ &InferenceDiagnostic::MismatchedTupleStructPatArgCount { pat, expected, found } => {
+ let expr_or_pat = match pat {
+ ExprOrPatId::ExprId(expr) => expr_syntax(expr).map(AstPtr::wrap_left),
+ ExprOrPatId::PatId(pat) => {
+ let InFile { file_id, value } =
+ source_map.pat_syntax(pat).expect("unexpected synthetic");
+
+ // cast from Either<Pat, SelfParam> -> Either<_, Pat>
+ let Some(ptr) = AstPtr::try_from_raw(value.syntax_node_ptr()) else {
+ return None;
+ };
+ InFile { file_id, value: ptr }
+ }
+ };
+ MismatchedTupleStructPatArgCount { expr_or_pat, expected, found }.into()
+ }
+ })
+ }
+}
diff --git a/crates/hir/src/lib.rs b/crates/hir/src/lib.rs
index c332ab0050..1e21045e98 100644
--- a/crates/hir/src/lib.rs
+++ b/crates/hir/src/lib.rs
@@ -61,7 +61,7 @@ use hir_def::{
use hir_expand::{attrs::collect_attrs, name::name, proc_macro::ProcMacroKind, MacroCallKind};
use hir_ty::{
all_super_traits, autoderef, check_orphan_rules,
- consteval::{try_const_usize, unknown_const_as_generic, ConstEvalError, ConstExt},
+ consteval::{try_const_usize, unknown_const_as_generic, ConstExt},
diagnostics::BodyValidationDiagnostic,
known_const_to_ast,
layout::{Layout as TyLayout, RustcEnumVariantIdx, RustcFieldIdx, TagEncoding},
@@ -70,9 +70,9 @@ use hir_ty::{
primitive::UintTy,
traits::FnTrait,
AliasTy, CallableDefId, CallableSig, Canonical, CanonicalVarKinds, Cast, ClosureId, GenericArg,
- GenericArgData, InferenceDiagnostic, Interner, ParamKind, QuantifiedWhereClause, Scalar,
- Substitution, TraitEnvironment, TraitRefExt, Ty, TyBuilder, TyDefId, TyExt, TyKind,
- ValueTyDefId, WhereClause,
+ GenericArgData, Interner, ParamKind, QuantifiedWhereClause, Scalar, Substitution,
+ TraitEnvironment, TraitRefExt, Ty, TyBuilder, TyDefId, TyExt, TyKind, ValueTyDefId,
+ WhereClause,
};
use itertools::Itertools;
use nameres::diagnostics::DefDiagnosticKind;
@@ -131,8 +131,10 @@ pub use {
MacroFileIdExt,
},
hir_ty::{
+ consteval::ConstEvalError,
display::{ClosureStyle, HirDisplay, HirDisplayError, HirWrite},
layout::LayoutError,
+ mir::{MirEvalError, MirLowerError},
PointerCast, Safety,
},
// FIXME: Properly encapsulate mir
@@ -233,8 +235,8 @@ impl Crate {
db: &dyn DefDatabase,
query: import_map::Query,
) -> impl Iterator<Item = Either<ModuleDef, Macro>> {
- let _p = profile::span("query_external_importables");
- import_map::search_dependencies(db, self.into(), query).into_iter().map(|item| {
+ let _p = tracing::span!(tracing::Level::INFO, "query_external_importables");
+ import_map::search_dependencies(db, self.into(), &query).into_iter().map(|item| {
match ItemInNs::from(item) {
ItemInNs::Types(mod_id) | ItemInNs::Values(mod_id) => Either::Left(mod_id),
ItemInNs::Macros(mac_id) => Either::Right(mac_id),
@@ -537,13 +539,8 @@ impl Module {
/// Fills `acc` with the module's diagnostics.
pub fn diagnostics(self, db: &dyn HirDatabase, acc: &mut Vec<AnyDiagnostic>) {
- let _p = profile::span("Module::diagnostics").detail(|| {
- format!(
- "{:?}",
- self.name(db)
- .map_or("<unknown>".into(), |name| name.display(db.upcast()).to_string())
- )
- });
+ let name = self.name(db);
+ let _p = tracing::span!(tracing::Level::INFO, "Module::diagnostics", ?name);
let def_map = self.id.def_map(db.upcast());
for diag in def_map.diagnostics() {
if diag.in_module != self.id.local_id {
@@ -906,7 +903,7 @@ fn emit_def_diagnostic_(
}
DefDiagnosticKind::InvalidDeriveTarget { ast, id } => {
let node = ast.to_node(db.upcast());
- let derive = node.attrs().nth(*id as usize);
+ let derive = node.attrs().nth(*id);
match derive {
Some(derive) => {
acc.push(
@@ -921,7 +918,7 @@ fn emit_def_diagnostic_(
}
DefDiagnosticKind::MalformedDerive { ast, id } => {
let node = ast.to_node(db.upcast());
- let derive = node.attrs().nth(*id as usize);
+ let derive = node.attrs().nth(*id);
match derive {
Some(derive) => {
acc.push(
@@ -1626,116 +1623,8 @@ impl DefWithBody {
}
let infer = db.infer(self.into());
- let expr_syntax = |expr| source_map.expr_syntax(expr).expect("unexpected synthetic");
- let pat_syntax = |pat| source_map.pat_syntax(pat).expect("unexpected synthetic");
for d in &infer.diagnostics {
- acc.push(match d {
- &InferenceDiagnostic::NoSuchField { field: expr, private } => {
- let expr_or_pat = match expr {
- ExprOrPatId::ExprId(expr) => {
- source_map.field_syntax(expr).map(AstPtr::wrap_left)
- }
- ExprOrPatId::PatId(pat) => {
- source_map.pat_field_syntax(pat).map(AstPtr::wrap_right)
- }
- };
- NoSuchField { field: expr_or_pat, private }.into()
- }
- &InferenceDiagnostic::MismatchedArgCount { call_expr, expected, found } => {
- MismatchedArgCount { call_expr: expr_syntax(call_expr), expected, found }.into()
- }
- &InferenceDiagnostic::PrivateField { expr, field } => {
- let expr = expr_syntax(expr);
- let field = field.into();
- PrivateField { expr, field }.into()
- }
- &InferenceDiagnostic::PrivateAssocItem { id, item } => {
- let expr_or_pat = match id {
- ExprOrPatId::ExprId(expr) => expr_syntax(expr).map(AstPtr::wrap_left),
- ExprOrPatId::PatId(pat) => pat_syntax(pat).map(AstPtr::wrap_right),
- };
- let item = item.into();
- PrivateAssocItem { expr_or_pat, item }.into()
- }
- InferenceDiagnostic::ExpectedFunction { call_expr, found } => {
- let call_expr = expr_syntax(*call_expr);
- ExpectedFunction {
- call: call_expr,
- found: Type::new(db, DefWithBodyId::from(self), found.clone()),
- }
- .into()
- }
- InferenceDiagnostic::UnresolvedField {
- expr,
- receiver,
- name,
- method_with_same_name_exists,
- } => {
- let expr = expr_syntax(*expr);
- UnresolvedField {
- expr,
- name: name.clone(),
- receiver: Type::new(db, DefWithBodyId::from(self), receiver.clone()),
- method_with_same_name_exists: *method_with_same_name_exists,
- }
- .into()
- }
- InferenceDiagnostic::UnresolvedMethodCall {
- expr,
- receiver,
- name,
- field_with_same_name,
- assoc_func_with_same_name,
- } => {
- let expr = expr_syntax(*expr);
- UnresolvedMethodCall {
- expr,
- name: name.clone(),
- receiver: Type::new(db, DefWithBodyId::from(self), receiver.clone()),
- field_with_same_name: field_with_same_name
- .clone()
- .map(|ty| Type::new(db, DefWithBodyId::from(self), ty)),
- assoc_func_with_same_name: *assoc_func_with_same_name,
- }
- .into()
- }
- &InferenceDiagnostic::UnresolvedAssocItem { id } => {
- let expr_or_pat = match id {
- ExprOrPatId::ExprId(expr) => expr_syntax(expr).map(AstPtr::wrap_left),
- ExprOrPatId::PatId(pat) => pat_syntax(pat).map(AstPtr::wrap_right),
- };
- UnresolvedAssocItem { expr_or_pat }.into()
- }
- &InferenceDiagnostic::BreakOutsideOfLoop { expr, is_break, bad_value_break } => {
- let expr = expr_syntax(expr);
- BreakOutsideOfLoop { expr, is_break, bad_value_break }.into()
- }
- InferenceDiagnostic::TypedHole { expr, expected } => {
- let expr = expr_syntax(*expr);
-
- TypedHole {
- expr,
- expected: Type::new(db, DefWithBodyId::from(self), expected.clone()),
- }
- .into()
- }
- &InferenceDiagnostic::MismatchedTupleStructPatArgCount { pat, expected, found } => {
- let expr_or_pat = match pat {
- ExprOrPatId::ExprId(expr) => expr_syntax(expr).map(AstPtr::wrap_left),
- ExprOrPatId::PatId(pat) => {
- let InFile { file_id, value } =
- source_map.pat_syntax(pat).expect("unexpected synthetic");
-
- // cast from Either<Pat, SelfParam> -> Either<_, Pat>
- let Some(ptr) = AstPtr::try_from_raw(value.syntax_node_ptr()) else {
- continue;
- };
- InFile { file_id, value: ptr }
- }
- };
- MismatchedTupleStructPatArgCount { expr_or_pat, expected, found }.into()
- }
- });
+ acc.extend(AnyDiagnostic::inference_diagnostic(db, self.into(), d, &source_map));
}
for (pat_or_expr, mismatch) in infer.type_mismatches() {
let expr_or_pat = match pat_or_expr {
@@ -1857,109 +1746,7 @@ impl DefWithBody {
}
for diagnostic in BodyValidationDiagnostic::collect(db, self.into()) {
- match diagnostic {
- BodyValidationDiagnostic::RecordMissingFields {
- record,
- variant,
- missed_fields,
- } => {
- let variant_data = variant.variant_data(db.upcast());
- let missed_fields = missed_fields
- .into_iter()
- .map(|idx| variant_data.fields()[idx].name.clone())
- .collect();
-
- match record {
- Either::Left(record_expr) => match source_map.expr_syntax(record_expr) {
- Ok(source_ptr) => {
- let root = source_ptr.file_syntax(db.upcast());
- if let ast::Expr::RecordExpr(record_expr) =
- source_ptr.value.to_node(&root)
- {
- if record_expr.record_expr_field_list().is_some() {
- let field_list_parent_path =
- record_expr.path().map(|path| AstPtr::new(&path));
- acc.push(
- MissingFields {
- file: source_ptr.file_id,
- field_list_parent: AstPtr::new(&Either::Left(
- record_expr,
- )),
- field_list_parent_path,
- missed_fields,
- }
- .into(),
- )
- }
- }
- }
- Err(SyntheticSyntax) => (),
- },
- Either::Right(record_pat) => match source_map.pat_syntax(record_pat) {
- Ok(source_ptr) => {
- if let Some(ptr) = source_ptr.value.cast::<ast::RecordPat>() {
- let root = source_ptr.file_syntax(db.upcast());
- let record_pat = ptr.to_node(&root);
- if record_pat.record_pat_field_list().is_some() {
- let field_list_parent_path =
- record_pat.path().map(|path| AstPtr::new(&path));
- acc.push(
- MissingFields {
- file: source_ptr.file_id,
- field_list_parent: AstPtr::new(&Either::Right(
- record_pat,
- )),
- field_list_parent_path,
- missed_fields,
- }
- .into(),
- )
- }
- }
- }
- Err(SyntheticSyntax) => (),
- },
- }
- }
- BodyValidationDiagnostic::ReplaceFilterMapNextWithFindMap { method_call_expr } => {
- if let Ok(next_source_ptr) = source_map.expr_syntax(method_call_expr) {
- acc.push(
- ReplaceFilterMapNextWithFindMap {
- file: next_source_ptr.file_id,
- next_expr: next_source_ptr.value,
- }
- .into(),
- );
- }
- }
- BodyValidationDiagnostic::MissingMatchArms { match_expr, uncovered_patterns } => {
- match source_map.expr_syntax(match_expr) {
- Ok(source_ptr) => {
- let root = source_ptr.file_syntax(db.upcast());
- if let ast::Expr::MatchExpr(match_expr) =
- &source_ptr.value.to_node(&root)
- {
- match match_expr.expr() {
- Some(scrut_expr) if match_expr.match_arm_list().is_some() => {
- acc.push(
- MissingMatchArms {
- scrutinee_expr: InFile::new(
- source_ptr.file_id,
- AstPtr::new(&scrut_expr),
- ),
- uncovered_patterns,
- }
- .into(),
- );
- }
- _ => {}
- }
- }
- }
- Err(SyntheticSyntax) => (),
- }
- }
- }
+ acc.extend(AnyDiagnostic::body_validation_diagnostic(db, diagnostic, &source_map));
}
let def: ModuleDef = match self {
@@ -1975,7 +1762,6 @@ impl DefWithBody {
}
}
}
-
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub struct Function {
pub(crate) id: FunctionId,
@@ -4266,7 +4052,7 @@ impl Type {
name: Option<&Name>,
mut callback: impl FnMut(Function) -> Option<T>,
) -> Option<T> {
- let _p = profile::span("iterate_method_candidates");
+ let _p = tracing::span!(tracing::Level::INFO, "iterate_method_candidates");
let mut slot = None;
self.iterate_method_candidates_dyn(
@@ -4345,7 +4131,7 @@ impl Type {
name: Option<&Name>,
mut callback: impl FnMut(AssocItem) -> Option<T>,
) -> Option<T> {
- let _p = profile::span("iterate_path_candidates");
+ let _p = tracing::span!(tracing::Level::INFO, "iterate_path_candidates");
let mut slot = None;
self.iterate_path_candidates_dyn(
db,
@@ -4411,7 +4197,7 @@ impl Type {
&'a self,
db: &'a dyn HirDatabase,
) -> impl Iterator<Item = Trait> + 'a {
- let _p = profile::span("applicable_inherent_traits");
+ let _p = tracing::span!(tracing::Level::INFO, "applicable_inherent_traits");
self.autoderef_(db)
.filter_map(|ty| ty.dyn_trait())
.flat_map(move |dyn_trait_id| hir_ty::all_super_traits(db.upcast(), dyn_trait_id))
@@ -4419,7 +4205,7 @@ impl Type {
}
pub fn env_traits<'a>(&'a self, db: &'a dyn HirDatabase) -> impl Iterator<Item = Trait> + 'a {
- let _p = profile::span("env_traits");
+ let _p = tracing::span!(tracing::Level::INFO, "env_traits");
self.autoderef_(db)
.filter(|ty| matches!(ty.kind(Interner), TyKind::Placeholder(_)))
.flat_map(|ty| {
diff --git a/crates/hir/src/semantics.rs b/crates/hir/src/semantics.rs
index fdb94a6d5a..a869029d09 100644
--- a/crates/hir/src/semantics.rs
+++ b/crates/hir/src/semantics.rs
@@ -25,6 +25,7 @@ use hir_expand::{
use itertools::Itertools;
use rustc_hash::{FxHashMap, FxHashSet};
use smallvec::{smallvec, SmallVec};
+use span::{Span, SyntaxContextId, ROOT_ERASED_FILE_AST_ID};
use stdx::TupleExt;
use syntax::{
algo::skip_trivia_token,
@@ -131,6 +132,7 @@ pub struct SemanticsImpl<'db> {
/// Rootnode to HirFileId cache
cache: RefCell<FxHashMap<SyntaxNode, HirFileId>>,
// These 2 caches are mainly useful for semantic highlighting as nothing else descends a lot of tokens
+ // So we might wanna move them out into something specific for semantic highlighting
expansion_info_cache: RefCell<FxHashMap<MacroFileId, ExpansionInfo>>,
/// MacroCall to its expansion's MacroFileId cache
macro_call_cache: RefCell<FxHashMap<InFile<ast::MacroCall>, MacroFileId>>,
@@ -607,29 +609,102 @@ impl<'db> SemanticsImpl<'db> {
res
}
- fn descend_into_macros_impl(
+ // return:
+ // SourceAnalyzer(file_id that original call include!)
+ // macro file id
+ // token in include! macro mapped from token in params
+ // span for the mapped token
+ fn is_from_include_file(
&self,
token: SyntaxToken,
- f: &mut dyn FnMut(InFile<SyntaxToken>) -> ControlFlow<()>,
- ) {
- let _p = profile::span("descend_into_macros");
- let sa = match token.parent().and_then(|parent| self.analyze_no_infer(&parent)) {
- Some(it) => it,
- None => return,
- };
+ ) -> Option<(SourceAnalyzer, HirFileId, SyntaxToken, Span)> {
+ let parent = token.parent()?;
+ let file_id = self.find_file(&parent).file_id.file_id()?;
+
+ let mut cache = self.expansion_info_cache.borrow_mut();
+
+ // iterate related crates and find all include! invocations that include_file_id matches
+ for (invoc, _) in self
+ .db
+ .relevant_crates(file_id)
+ .iter()
+ .flat_map(|krate| self.db.include_macro_invoc(*krate))
+ .filter(|&(_, include_file_id)| include_file_id == file_id)
+ {
+ let macro_file = invoc.as_macro_file();
+ let expansion_info = cache
+ .entry(macro_file)
+ .or_insert_with(|| macro_file.expansion_info(self.db.upcast()));
- let span = match sa.file_id.file_id() {
- Some(file_id) => self.db.real_span_map(file_id).span_for_range(token.text_range()),
- None => {
- stdx::never!();
- return;
+ // Create the source analyzer for the macro call scope
+ let Some(sa) = self.analyze_no_infer(&self.parse_or_expand(expansion_info.call_file()))
+ else {
+ continue;
+ };
+ {
+ let InMacroFile { file_id: macro_file, value } = expansion_info.expanded();
+ self.cache(value, macro_file.into());
}
- };
+
+ // get mapped token in the include! macro file
+ let span = span::SpanData {
+ range: token.text_range(),
+ anchor: span::SpanAnchor { file_id, ast_id: ROOT_ERASED_FILE_AST_ID },
+ ctx: SyntaxContextId::ROOT,
+ };
+ let Some(InMacroFile { file_id, value: mut mapped_tokens }) =
+ expansion_info.map_range_down(span)
+ else {
+ continue;
+ };
+
+ // if we find one, then return
+ if let Some(t) = mapped_tokens.next() {
+ return Some((sa, file_id.into(), t, span));
+ }
+ }
+
+ None
+ }
+
+ fn descend_into_macros_impl(
+ &self,
+ mut token: SyntaxToken,
+ f: &mut dyn FnMut(InFile<SyntaxToken>) -> ControlFlow<()>,
+ ) {
+ let _p = tracing::span!(tracing::Level::INFO, "descend_into_macros");
+ let (sa, span, file_id) =
+ match token.parent().and_then(|parent| self.analyze_no_infer(&parent)) {
+ Some(sa) => match sa.file_id.file_id() {
+ Some(file_id) => (
+ sa,
+ self.db.real_span_map(file_id).span_for_range(token.text_range()),
+ file_id.into(),
+ ),
+ None => {
+ stdx::never!();
+ return;
+ }
+ },
+ None => {
+ // if we cannot find a source analyzer for this token, then we try to find out
+ // whether this file is an included file and treat that as the include input
+ let Some((it, macro_file_id, mapped_token, s)) =
+ self.is_from_include_file(token)
+ else {
+ return;
+ };
+ token = mapped_token;
+ (it, s, macro_file_id)
+ }
+ };
let mut cache = self.expansion_info_cache.borrow_mut();
let mut mcache = self.macro_call_cache.borrow_mut();
let def_map = sa.resolver.def_map();
+ let mut stack: Vec<(_, SmallVec<[_; 2]>)> = vec![(file_id, smallvec![token])];
+
let mut process_expansion_for_token = |stack: &mut Vec<_>, macro_file| {
let expansion_info = cache
.entry(macro_file)
@@ -651,8 +726,6 @@ impl<'db> SemanticsImpl<'db> {
res
};
- let mut stack: Vec<(_, SmallVec<[_; 2]>)> = vec![(sa.file_id, smallvec![token])];
-
while let Some((file_id, mut tokens)) = stack.pop() {
while let Some(token) = tokens.pop() {
let was_not_remapped = (|| {
@@ -1222,7 +1295,7 @@ impl<'db> SemanticsImpl<'db> {
offset: Option<TextSize>,
infer_body: bool,
) -> Option<SourceAnalyzer> {
- let _p = profile::span("Semantics::analyze_impl");
+ let _p = tracing::span!(tracing::Level::INFO, "Semantics::analyze_impl");
let node = self.find_file(node);
let container = self.with_ctx(|ctx| ctx.find_container(node))?;
diff --git a/crates/hir/src/semantics/source_to_def.rs b/crates/hir/src/semantics/source_to_def.rs
index f60b3749b0..14dbe69240 100644
--- a/crates/hir/src/semantics/source_to_def.rs
+++ b/crates/hir/src/semantics/source_to_def.rs
@@ -117,7 +117,7 @@ pub(super) struct SourceToDefCtx<'a, 'b> {
impl SourceToDefCtx<'_, '_> {
pub(super) fn file_to_def(&self, file: FileId) -> SmallVec<[ModuleId; 1]> {
- let _p = profile::span("SourceBinder::to_module_def");
+ let _p = tracing::span!(tracing::Level::INFO, "SourceBinder::to_module_def");
let mut mods = SmallVec::new();
for &crate_id in self.db.relevant_crates(file).iter() {
// FIXME: inner items
@@ -132,7 +132,7 @@ impl SourceToDefCtx<'_, '_> {
}
pub(super) fn module_to_def(&self, src: InFile<ast::Module>) -> Option<ModuleId> {
- let _p = profile::span("module_to_def");
+ let _p = tracing::span!(tracing::Level::INFO, "module_to_def");
let parent_declaration = src
.syntax()
.ancestors_with_macros_skip_attr_item(self.db.upcast())
@@ -153,7 +153,7 @@ impl SourceToDefCtx<'_, '_> {
}
pub(super) fn source_file_to_def(&self, src: InFile<ast::SourceFile>) -> Option<ModuleId> {
- let _p = profile::span("source_file_to_def");
+ let _p = tracing::span!(tracing::Level::INFO, "source_file_to_def");
let file_id = src.file_id.original_file(self.db.upcast());
self.file_to_def(file_id).first().copied()
}
diff --git a/crates/ide-assists/Cargo.toml b/crates/ide-assists/Cargo.toml
index 4d4bac5fb9..98961a18de 100644
--- a/crates/ide-assists/Cargo.toml
+++ b/crates/ide-assists/Cargo.toml
@@ -17,6 +17,7 @@ cov-mark = "2.0.0-pre.1"
itertools.workspace = true
either.workspace = true
smallvec.workspace = true
+tracing.workspace = true
# local deps
stdx.workspace = true
@@ -38,4 +39,4 @@ sourcegen.workspace = true
in-rust-tree = []
[lints]
-workspace = true \ No newline at end of file
+workspace = true
diff --git a/crates/ide-assists/src/handlers/add_missing_impl_members.rs b/crates/ide-assists/src/handlers/add_missing_impl_members.rs
index 410c623109..c1b95bb1e2 100644
--- a/crates/ide-assists/src/handlers/add_missing_impl_members.rs
+++ b/crates/ide-assists/src/handlers/add_missing_impl_members.rs
@@ -105,7 +105,7 @@ fn add_missing_impl_members_inner(
assist_id: &'static str,
label: &'static str,
) -> Option<()> {
- let _p = profile::span("add_missing_impl_members_inner");
+ let _p = tracing::span!(tracing::Level::INFO, "add_missing_impl_members_inner");
let impl_def = ctx.find_node_at_offset::<ast::Impl>()?;
let impl_ = ctx.sema.to_def(&impl_def)?;
@@ -370,17 +370,17 @@ impl<U> Foo<U> for S {
add_missing_impl_members,
r#"
pub trait Trait<'a, 'b, A, B, C> {
- fn foo(&self, one: &'a A, anoter: &'b B) -> &'a C;
+ fn foo(&self, one: &'a A, another: &'b B) -> &'a C;
}
impl<'x, 'y, T, V, U> Trait<'x, 'y, T, V, U> for () {$0}"#,
r#"
pub trait Trait<'a, 'b, A, B, C> {
- fn foo(&self, one: &'a A, anoter: &'b B) -> &'a C;
+ fn foo(&self, one: &'a A, another: &'b B) -> &'a C;
}
impl<'x, 'y, T, V, U> Trait<'x, 'y, T, V, U> for () {
- fn foo(&self, one: &'x T, anoter: &'y V) -> &'x U {
+ fn foo(&self, one: &'x T, another: &'y V) -> &'x U {
${0:todo!()}
}
}"#,
@@ -393,7 +393,7 @@ impl<'x, 'y, T, V, U> Trait<'x, 'y, T, V, U> for () {
add_missing_default_members,
r#"
pub trait Trait<'a, 'b, A, B, C: Default> {
- fn foo(&self, _one: &'a A, _anoter: &'b B) -> (C, &'a i32) {
+ fn foo(&self, _one: &'a A, _another: &'b B) -> (C, &'a i32) {
let value: &'a i32 = &0;
(C::default(), value)
}
@@ -402,14 +402,14 @@ pub trait Trait<'a, 'b, A, B, C: Default> {
impl<'x, 'y, T, V, U: Default> Trait<'x, 'y, T, V, U> for () {$0}"#,
r#"
pub trait Trait<'a, 'b, A, B, C: Default> {
- fn foo(&self, _one: &'a A, _anoter: &'b B) -> (C, &'a i32) {
+ fn foo(&self, _one: &'a A, _another: &'b B) -> (C, &'a i32) {
let value: &'a i32 = &0;
(C::default(), value)
}
}
impl<'x, 'y, T, V, U: Default> Trait<'x, 'y, T, V, U> for () {
- $0fn foo(&self, _one: &'x T, _anoter: &'y V) -> (U, &'x i32) {
+ $0fn foo(&self, _one: &'x T, _another: &'y V) -> (U, &'x i32) {
let value: &'x i32 = &0;
(<U>::default(), value)
}
diff --git a/crates/ide-assists/src/handlers/extract_variable.rs b/crates/ide-assists/src/handlers/extract_variable.rs
index 0f23b69908..22d16cf6b3 100644
--- a/crates/ide-assists/src/handlers/extract_variable.rs
+++ b/crates/ide-assists/src/handlers/extract_variable.rs
@@ -163,7 +163,7 @@ pub(crate) fn extract_variable(acc: &mut Assists, ctx: &AssistContext<'_>) -> Op
block
} else {
// `expr_replace` is a descendant of `to_wrap`, so both steps need to be
- // handled seperately, otherwise we wrap the wrong expression
+ // handled separately, otherwise we wrap the wrong expression
let to_wrap = edit.make_mut(to_wrap);
// Replace the target expr first so that we don't need to find where
diff --git a/crates/ide-assists/src/handlers/generate_delegate_trait.rs b/crates/ide-assists/src/handlers/generate_delegate_trait.rs
index 154a1f59c7..3964b14f47 100644
--- a/crates/ide-assists/src/handlers/generate_delegate_trait.rs
+++ b/crates/ide-assists/src/handlers/generate_delegate_trait.rs
@@ -491,7 +491,7 @@ fn remove_useless_where_clauses(trait_ty: &ast::Type, self_ty: &ast::Type, wc: a
// Generate generic args that should be apply to current impl.
//
-// For exmaple, say we have implementation `impl<A, B, C> Trait for B<A>`,
+// For example, say we have implementation `impl<A, B, C> Trait for B<A>`,
// and `b: B<T>` in struct `S<T>`. Then the `A` should be instantiated to `T`.
// While the last two generic args `B` and `C` doesn't change, it remains
// `<B, C>`. So we apply `<T, B, C>` as generic arguments to impl.
@@ -637,7 +637,7 @@ fn const_assoc_item(item: syntax::ast::Const, qual_path_ty: ast::Path) -> Option
let path_expr_segment = make::path_from_text(item.name()?.to_string().as_str());
// We want rhs of the const assignment to be a qualified path
- // The general case for const assigment can be found [here](`https://doc.rust-lang.org/reference/items/constant-items.html`)
+ // The general case for const assignment can be found [here](`https://doc.rust-lang.org/reference/items/constant-items.html`)
// The qualified will have the following generic syntax :
// <Base as Trait<GenArgs>>::ConstName;
// FIXME : We can't rely on `make::path_qualified` for now but it would be nice to replace the following with it.
@@ -779,7 +779,7 @@ impl Trait for Base {}
#[test]
fn test_self_ty() {
- // trait whith `Self` type cannot be delegated
+ // trait with `Self` type cannot be delegated
//
// See the function `fn f() -> Self`.
// It should be `fn f() -> Base` in `Base`, and `fn f() -> S` in `S`
diff --git a/crates/ide-assists/src/handlers/generate_mut_trait_impl.rs b/crates/ide-assists/src/handlers/generate_mut_trait_impl.rs
index d90d366ffe..91eaa96b6c 100644
--- a/crates/ide-assists/src/handlers/generate_mut_trait_impl.rs
+++ b/crates/ide-assists/src/handlers/generate_mut_trait_impl.rs
@@ -6,7 +6,7 @@ use syntax::{
use crate::{AssistContext, AssistId, AssistKind, Assists};
-// FIXME: Generate proper `index_mut` method body refer to `index` method body may impossible due to the unpredicable case [#15581].
+// FIXME: Generate proper `index_mut` method body refer to `index` method body may impossible due to the unpredictable case [#15581].
// Here just leave the `index_mut` method body be same as `index` method body, user can modify it manually to meet their need.
// Assist: generate_mut_trait_impl
diff --git a/crates/ide-assists/src/handlers/generate_trait_from_impl.rs b/crates/ide-assists/src/handlers/generate_trait_from_impl.rs
index a8817436ba..8881aa69f2 100644
--- a/crates/ide-assists/src/handlers/generate_trait_from_impl.rs
+++ b/crates/ide-assists/src/handlers/generate_trait_from_impl.rs
@@ -183,7 +183,7 @@ fn remove_items_visibility(item: &ast::AssocItem) {
fn strip_body(item: &ast::AssocItem) {
if let ast::AssocItem::Fn(f) = item {
if let Some(body) = f.body() {
- // In constrast to function bodies, we want to see no ws before a semicolon.
+ // In contrast to function bodies, we want to see no ws before a semicolon.
// So let's remove them if we see any.
if let Some(prev) = body.syntax().prev_sibling_or_token() {
if prev.kind() == SyntaxKind::WHITESPACE {
diff --git a/crates/ide-assists/src/handlers/merge_imports.rs b/crates/ide-assists/src/handlers/merge_imports.rs
index 2beab26dce..797c5c0653 100644
--- a/crates/ide-assists/src/handlers/merge_imports.rs
+++ b/crates/ide-assists/src/handlers/merge_imports.rs
@@ -1,8 +1,9 @@
use either::Either;
use ide_db::imports::{
insert_use::{ImportGranularity, InsertUseConfig},
- merge_imports::{try_merge_imports, try_merge_trees, MergeBehavior},
+ merge_imports::{try_merge_imports, try_merge_trees, try_normalize_use_tree, MergeBehavior},
};
+use itertools::Itertools;
use syntax::{
algo::neighbor,
ast::{self, edit_in_place::Removable},
@@ -32,24 +33,13 @@ use Edit::*;
pub(crate) fn merge_imports(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
let (target, edits) = if ctx.has_empty_selection() {
// Merge a neighbor
- let mut tree: ast::UseTree = ctx.find_node_at_offset()?;
- if ctx.config.insert_use.granularity == ImportGranularity::One
- && tree.parent_use_tree_list().is_some()
- {
- cov_mark::hit!(resolve_top_use_tree_for_import_one);
- tree = tree.top_use_tree();
- }
+ cov_mark::hit!(merge_with_use_item_neighbors);
+ let tree = ctx.find_node_at_offset::<ast::UseTree>()?.top_use_tree();
let target = tree.syntax().text_range();
- let edits = if let Some(use_item) = tree.syntax().parent().and_then(ast::Use::cast) {
- cov_mark::hit!(merge_with_use_item_neighbors);
- let mut neighbor = next_prev().find_map(|dir| neighbor(&use_item, dir)).into_iter();
- use_item.try_merge_from(&mut neighbor, &ctx.config.insert_use)
- } else {
- cov_mark::hit!(merge_with_use_tree_neighbors);
- let mut neighbor = next_prev().find_map(|dir| neighbor(&tree, dir)).into_iter();
- tree.clone().try_merge_from(&mut neighbor, &ctx.config.insert_use)
- };
+ let use_item = tree.syntax().parent().and_then(ast::Use::cast)?;
+ let mut neighbor = next_prev().find_map(|dir| neighbor(&use_item, dir)).into_iter();
+ let edits = use_item.try_merge_from(&mut neighbor, &ctx.config.insert_use);
(target, edits?)
} else {
// Merge selected
@@ -94,7 +84,35 @@ pub(crate) fn merge_imports(acc: &mut Assists, ctx: &AssistContext<'_>) -> Optio
for edit in edits_mut {
match edit {
Remove(it) => it.as_ref().either(Removable::remove, Removable::remove),
- Replace(old, new) => ted::replace(old, new),
+ Replace(old, new) => {
+ ted::replace(old, &new);
+
+ // If there's a selection and we're replacing a use tree in a tree list,
+ // normalize the parent use tree if it only contains the merged subtree.
+ if !ctx.has_empty_selection() {
+ let normalized_use_tree = ast::UseTree::cast(new)
+ .as_ref()
+ .and_then(ast::UseTree::parent_use_tree_list)
+ .and_then(|use_tree_list| {
+ if use_tree_list.use_trees().collect_tuple::<(_,)>().is_some() {
+ Some(use_tree_list.parent_use_tree())
+ } else {
+ None
+ }
+ })
+ .and_then(|target_tree| {
+ try_normalize_use_tree(
+ &target_tree,
+ ctx.config.insert_use.granularity.into(),
+ )
+ .map(|top_use_tree_flat| (target_tree, top_use_tree_flat))
+ });
+ if let Some((old_tree, new_tree)) = normalized_use_tree {
+ cov_mark::hit!(replace_parent_with_normalized_use_tree);
+ ted::replace(old_tree.syntax(), new_tree.syntax());
+ }
+ }
+ }
}
}
},
@@ -201,20 +219,17 @@ use std::fmt$0::{Display, Debug};
use std::fmt::{Display, Debug};
",
r"
-use std::fmt::{Display, Debug};
+use std::fmt::{Debug, Display};
",
);
// The assist macro below calls `check_assist_import_one` 4 times with different input
- // use item variations based on the first 2 input parameters, but only 2 calls
- // contain `use {std::fmt$0::{Display, Debug}};` for which the top use tree will need
- // to be resolved.
- cov_mark::check_count!(resolve_top_use_tree_for_import_one, 2);
+ // use item variations based on the first 2 input parameters.
cov_mark::check_count!(merge_with_use_item_neighbors, 4);
check_assist_import_one_variations!(
"std::fmt$0::{Display, Debug}",
"std::fmt::{Display, Debug}",
- "use {std::fmt::{Display, Debug}};"
+ "use {std::fmt::{Debug, Display}};"
);
}
@@ -257,7 +272,7 @@ use std::fmt::{Debug, Display};
}
#[test]
- fn merge_self1() {
+ fn merge_self() {
check_assist(
merge_imports,
r"
@@ -276,21 +291,8 @@ use std::fmt::{self, Display};
}
#[test]
- fn merge_self2() {
- check_assist(
- merge_imports,
- r"
-use std::{fmt, $0fmt::Display};
-",
- r"
-use std::{fmt::{self, Display}};
-",
- );
- }
-
- #[test]
- fn not_applicable_to_single_one_style_import() {
- cov_mark::check!(resolve_top_use_tree_for_import_one);
+ fn not_applicable_to_single_import() {
+ check_assist_not_applicable(merge_imports, "use std::{fmt, $0fmt::Display};");
check_assist_not_applicable_for_import_one(
merge_imports,
"use {std::{fmt, $0fmt::Display}};",
@@ -385,14 +387,14 @@ pub(in this::path) use std::fmt::{Debug, Display};
#[test]
fn test_merge_nested() {
- cov_mark::check!(merge_with_use_tree_neighbors);
check_assist(
merge_imports,
r"
-use std::{fmt$0::Debug, fmt::Display};
+use std::{fmt$0::Debug, fmt::Error};
+use std::{fmt::Write, fmt::Display};
",
r"
-use std::{fmt::{Debug, Display}};
+use std::fmt::{Debug, Display, Error, Write};
",
);
}
@@ -402,10 +404,11 @@ use std::{fmt::{Debug, Display}};
check_assist(
merge_imports,
r"
-use std::{fmt::Debug, fmt$0::Display};
+use std::{fmt::Debug, fmt$0::Error};
+use std::{fmt::Write, fmt::Display};
",
r"
-use std::{fmt::{Debug, Display}};
+use std::fmt::{Debug, Display, Error, Write};
",
);
}
@@ -419,13 +422,13 @@ use std$0::{fmt::{Write, Display}};
use std::{fmt::{self, Debug}};
",
r"
-use std::{fmt::{self, Debug, Display, Write}};
+use std::fmt::{self, Debug, Display, Write};
",
);
check_assist_import_one_variations!(
"std$0::{fmt::{Write, Display}}",
"std::{fmt::{self, Debug}}",
- "use {std::{fmt::{self, Debug, Display, Write}}};"
+ "use {std::fmt::{self, Debug, Display, Write}};"
);
}
@@ -438,26 +441,13 @@ use std$0::{fmt::{self, Debug}};
use std::{fmt::{Write, Display}};
",
r"
-use std::{fmt::{self, Debug, Display, Write}};
+use std::fmt::{self, Debug, Display, Write};
",
);
check_assist_import_one_variations!(
"std$0::{fmt::{self, Debug}}",
"std::{fmt::{Write, Display}}",
- "use {std::{fmt::{self, Debug, Display, Write}}};"
- );
- }
-
- #[test]
- fn test_merge_self_with_nested_self_item() {
- check_assist(
- merge_imports,
- r"
-use std::{fmt$0::{self, Debug}, fmt::{Write, Display}};
-",
- r"
-use std::{fmt::{self, Debug, Display, Write}};
-",
+ "use {std::fmt::{self, Debug, Display, Write}};"
);
}
@@ -470,13 +460,13 @@ use foo::$0{bar::{self}};
use foo::{bar};
",
r"
-use foo::{bar::{self}};
+use foo::bar;
",
);
check_assist_import_one_variations!(
"foo::$0{bar::{self}}",
"foo::{bar}",
- "use {foo::{bar::{self}}};"
+ "use {foo::bar};"
);
}
@@ -489,13 +479,13 @@ use foo::$0{bar};
use foo::{bar::{self}};
",
r"
-use foo::{bar::{self}};
+use foo::bar;
",
);
check_assist_import_one_variations!(
"foo::$0{bar}",
"foo::{bar::{self}}",
- "use {foo::{bar::{self}}};"
+ "use {foo::bar};"
);
}
@@ -508,13 +498,13 @@ use std$0::{fmt::*};
use std::{fmt::{self, Display}};
",
r"
-use std::{fmt::{self, Display, *}};
+use std::fmt::{self, Display, *};
",
);
check_assist_import_one_variations!(
"std$0::{fmt::*}",
"std::{fmt::{self, Display}}",
- "use {std::{fmt::{self, Display, *}}};"
+ "use {std::fmt::{self, Display, *}};"
);
}
@@ -579,29 +569,27 @@ use foo::{bar, baz};
check_assist(
merge_imports,
r"
-use {
- foo$0::bar,
- foo::baz,
+use foo$0::{
+ bar, baz,
};
+use foo::qux;
",
r"
-use {
- foo::{bar, baz},
+use foo::{
+ bar, baz, qux,
};
",
);
check_assist(
merge_imports,
r"
-use {
- foo::baz,
- foo$0::bar,
+use foo::{
+ baz, bar,
};
+use foo$0::qux;
",
r"
-use {
- foo::{bar, baz},
-};
+use foo::{bar, baz, qux};
",
);
}
@@ -711,12 +699,19 @@ use std::{
};",
);
- // FIXME: Remove redundant braces. See also unnecessary-braces diagnostic.
cov_mark::check!(merge_with_selected_use_tree_neighbors);
check_assist(
merge_imports,
+ r"use std::{fmt::Result, $0fmt::Display, fmt::Debug$0};",
+ r"use std::{fmt::Result, fmt::{Debug, Display}};",
+ );
+
+ cov_mark::check!(merge_with_selected_use_tree_neighbors);
+ cov_mark::check!(replace_parent_with_normalized_use_tree);
+ check_assist(
+ merge_imports,
r"use std::$0{fmt::Display, fmt::Debug}$0;",
- r"use std::{fmt::{Debug, Display}};",
+ r"use std::fmt::{Debug, Display};",
);
}
}
diff --git a/crates/ide-assists/src/handlers/normalize_import.rs b/crates/ide-assists/src/handlers/normalize_import.rs
new file mode 100644
index 0000000000..7d003efe72
--- /dev/null
+++ b/crates/ide-assists/src/handlers/normalize_import.rs
@@ -0,0 +1,219 @@
+use ide_db::imports::merge_imports::try_normalize_import;
+use syntax::{ast, AstNode};
+
+use crate::{
+ assist_context::{AssistContext, Assists},
+ AssistId, AssistKind,
+};
+
+// Assist: normalize_import
+//
+// Normalizes an import.
+//
+// ```
+// use$0 std::{io, {fmt::Formatter}};
+// ```
+// ->
+// ```
+// use std::{fmt::Formatter, io};
+// ```
+pub(crate) fn normalize_import(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
+ let use_item = if ctx.has_empty_selection() {
+ ctx.find_node_at_offset()?
+ } else {
+ ctx.covering_element().ancestors().find_map(ast::Use::cast)?
+ };
+
+ let target = use_item.syntax().text_range();
+ let normalized_use_item =
+ try_normalize_import(&use_item, ctx.config.insert_use.granularity.into())?;
+
+ acc.add(
+ AssistId("normalize_import", AssistKind::RefactorRewrite),
+ "Normalize import",
+ target,
+ |builder| {
+ builder.replace_ast(use_item, normalized_use_item);
+ },
+ )
+}
+
+#[cfg(test)]
+mod tests {
+ use crate::tests::{
+ check_assist, check_assist_import_one, check_assist_not_applicable,
+ check_assist_not_applicable_for_import_one,
+ };
+
+ use super::*;
+
+ macro_rules! check_assist_variations {
+ ($fixture: literal, $expected: literal) => {
+ check_assist(
+ normalize_import,
+ concat!("use $0", $fixture, ";"),
+ concat!("use ", $expected, ";"),
+ );
+ check_assist(
+ normalize_import,
+ concat!("$0use ", $fixture, ";"),
+ concat!("use ", $expected, ";"),
+ );
+
+ check_assist_import_one(
+ normalize_import,
+ concat!("use $0", $fixture, ";"),
+ concat!("use {", $expected, "};"),
+ );
+ check_assist_import_one(
+ normalize_import,
+ concat!("$0use ", $fixture, ";"),
+ concat!("use {", $expected, "};"),
+ );
+
+ check_assist_import_one(
+ normalize_import,
+ concat!("use $0{", $fixture, "};"),
+ concat!("use {", $expected, "};"),
+ );
+ check_assist_import_one(
+ normalize_import,
+ concat!("$0use {", $fixture, "};"),
+ concat!("use {", $expected, "};"),
+ );
+
+ check_assist(
+ normalize_import,
+ concat!("use $0", $fixture, "$0;"),
+ concat!("use ", $expected, ";"),
+ );
+ check_assist(
+ normalize_import,
+ concat!("$0use ", $fixture, ";$0"),
+ concat!("use ", $expected, ";"),
+ );
+ };
+ }
+
+ macro_rules! check_assist_not_applicable_variations {
+ ($fixture: literal) => {
+ check_assist_not_applicable(normalize_import, concat!("use $0", $fixture, ";"));
+ check_assist_not_applicable(normalize_import, concat!("$0use ", $fixture, ";"));
+
+ check_assist_not_applicable_for_import_one(
+ normalize_import,
+ concat!("use $0{", $fixture, "};"),
+ );
+ check_assist_not_applicable_for_import_one(
+ normalize_import,
+ concat!("$0use {", $fixture, "};"),
+ );
+ };
+ }
+
+ #[test]
+ fn test_order() {
+ check_assist_variations!(
+ "foo::{*, Qux, bar::{Quux, Bar}, baz, FOO_BAZ, self, Baz}",
+ "foo::{self, bar::{Bar, Quux}, baz, Baz, Qux, FOO_BAZ, *}"
+ );
+ }
+
+ #[test]
+ fn test_redundant_braces() {
+ check_assist_variations!("foo::{bar::{baz, Qux}}", "foo::bar::{baz, Qux}");
+ check_assist_variations!("foo::{bar::{self}}", "foo::bar");
+ check_assist_variations!("foo::{bar::{*}}", "foo::bar::*");
+ check_assist_variations!("foo::{bar::{Qux as Quux}}", "foo::bar::Qux as Quux");
+ check_assist_variations!(
+ "foo::bar::{{FOO_BAZ, Qux, self}, {*, baz}}",
+ "foo::bar::{self, baz, Qux, FOO_BAZ, *}"
+ );
+ check_assist_variations!(
+ "foo::bar::{{{FOO_BAZ}, {{Qux}, {self}}}, {{*}, {baz}}}",
+ "foo::bar::{self, baz, Qux, FOO_BAZ, *}"
+ );
+ }
+
+ #[test]
+ fn test_merge() {
+ check_assist_variations!(
+ "foo::{*, bar, {FOO_BAZ, qux}, bar::{*, baz}, {Quux}}",
+ "foo::{bar::{self, baz, *}, qux, Quux, FOO_BAZ, *}"
+ );
+ check_assist_variations!(
+ "foo::{*, bar, {FOO_BAZ, qux}, bar::{*, baz}, {Quux, bar::{baz::Foo}}}",
+ "foo::{bar::{self, baz::{self, Foo}, *}, qux, Quux, FOO_BAZ, *}"
+ );
+ }
+
+ #[test]
+ fn test_merge_self() {
+ check_assist_variations!("std::{fmt, fmt::Display}", "std::fmt::{self, Display}");
+ }
+
+ #[test]
+ fn test_merge_nested() {
+ check_assist_variations!("std::{fmt::Debug, fmt::Display}", "std::fmt::{Debug, Display}");
+ }
+
+ #[test]
+ fn test_merge_nested2() {
+ check_assist_variations!("std::{fmt::Debug, fmt::Display}", "std::fmt::{Debug, Display}");
+ }
+
+ #[test]
+ fn test_merge_self_with_nested_self_item() {
+ check_assist_variations!(
+ "std::{fmt::{self, Debug}, fmt::{Write, Display}}",
+ "std::fmt::{self, Debug, Display, Write}"
+ );
+ }
+
+ #[test]
+ fn works_with_trailing_comma() {
+ check_assist(
+ normalize_import,
+ r"
+use $0{
+ foo::bar,
+ foo::baz,
+};
+ ",
+ r"
+use foo::{bar, baz};
+ ",
+ );
+ check_assist_import_one(
+ normalize_import,
+ r"
+use $0{
+ foo::bar,
+ foo::baz,
+};
+",
+ r"
+use {
+ foo::{bar, baz},
+};
+",
+ );
+ }
+
+ #[test]
+ fn not_applicable_to_normalized_import() {
+ check_assist_not_applicable_variations!("foo::bar");
+ check_assist_not_applicable_variations!("foo::bar::*");
+ check_assist_not_applicable_variations!("foo::bar::Qux as Quux");
+ check_assist_not_applicable_variations!("foo::bar::{self, baz, Qux, FOO_BAZ, *}");
+ check_assist_not_applicable_variations!(
+ "foo::{self, bar::{Bar, Quux}, baz, Baz, Qux, FOO_BAZ, *}"
+ );
+ check_assist_not_applicable_variations!(
+ "foo::{bar::{self, baz, *}, qux, Quux, FOO_BAZ, *}"
+ );
+ check_assist_not_applicable_variations!(
+ "foo::{bar::{self, baz::{self, Foo}, *}, qux, Quux, FOO_BAZ, *}"
+ );
+ }
+}
diff --git a/crates/ide-assists/src/lib.rs b/crates/ide-assists/src/lib.rs
index edcf52a9b3..2fec104323 100644
--- a/crates/ide-assists/src/lib.rs
+++ b/crates/ide-assists/src/lib.rs
@@ -183,6 +183,7 @@ mod handlers {
mod move_guard;
mod move_module_to_file;
mod move_to_mod_rs;
+ mod normalize_import;
mod number_representation;
mod promote_local_to_const;
mod pull_assignment_up;
@@ -300,6 +301,7 @@ mod handlers {
move_module_to_file::move_module_to_file,
move_to_mod_rs::move_to_mod_rs,
move_from_mod_rs::move_from_mod_rs,
+ normalize_import::normalize_import,
number_representation::reformat_number_literal,
pull_assignment_up::pull_assignment_up,
promote_local_to_const::promote_local_to_const,
diff --git a/crates/ide-assists/src/tests/generated.rs b/crates/ide-assists/src/tests/generated.rs
index 0ce89ae0a9..8d7c49d52c 100644
--- a/crates/ide-assists/src/tests/generated.rs
+++ b/crates/ide-assists/src/tests/generated.rs
@@ -2218,6 +2218,19 @@ fn t() {}
}
#[test]
+fn doctest_normalize_import() {
+ check_doc_test(
+ "normalize_import",
+ r#####"
+use$0 std::{io, {fmt::Formatter}};
+"#####,
+ r#####"
+use std::{fmt::Formatter, io};
+"#####,
+ )
+}
+
+#[test]
fn doctest_promote_local_to_const() {
check_doc_test(
"promote_local_to_const",
diff --git a/crates/ide-assists/src/tests/sourcegen.rs b/crates/ide-assists/src/tests/sourcegen.rs
index ad5ec83287..088d93f9a6 100644
--- a/crates/ide-assists/src/tests/sourcegen.rs
+++ b/crates/ide-assists/src/tests/sourcegen.rs
@@ -2,6 +2,7 @@
use std::{fmt, fs, path::Path};
+use stdx::format_to_acc;
use test_utils::project_root;
#[test]
@@ -172,8 +173,7 @@ impl fmt::Display for Assist {
fn hide_hash_comments(text: &str) -> String {
text.split('\n') // want final newline
.filter(|&it| !(it.starts_with("# ") || it == "#"))
- .map(|it| format!("{it}\n"))
- .collect()
+ .fold(String::new(), |mut acc, it| format_to_acc!(acc, "{it}\n"))
}
fn reveal_hash_comments(text: &str) -> String {
@@ -187,6 +187,5 @@ fn reveal_hash_comments(text: &str) -> String {
it
}
})
- .map(|it| format!("{it}\n"))
- .collect()
+ .fold(String::new(), |mut acc, it| format_to_acc!(acc, "{it}\n"))
}
diff --git a/crates/ide-completion/Cargo.toml b/crates/ide-completion/Cargo.toml
index 7fbcf3d19e..f2a11276ba 100644
--- a/crates/ide-completion/Cargo.toml
+++ b/crates/ide-completion/Cargo.toml
@@ -14,6 +14,7 @@ doctest = false
[dependencies]
cov-mark = "2.0.0-pre.1"
itertools.workspace = true
+tracing.workspace = true
once_cell = "1.17.0"
smallvec.workspace = true
@@ -38,4 +39,4 @@ test-utils.workspace = true
test-fixture.workspace = true
[lints]
-workspace = true \ No newline at end of file
+workspace = true
diff --git a/crates/ide-completion/src/completions/dot.rs b/crates/ide-completion/src/completions/dot.rs
index e5fdac327c..00135a6d20 100644
--- a/crates/ide-completion/src/completions/dot.rs
+++ b/crates/ide-completion/src/completions/dot.rs
@@ -31,7 +31,7 @@ pub(crate) fn complete_dot(
}
let is_field_access = matches!(dot_access.kind, DotAccessKind::Field { .. });
- let is_method_acces_with_parens =
+ let is_method_access_with_parens =
matches!(dot_access.kind, DotAccessKind::Method { has_parens: true });
complete_fields(
@@ -41,7 +41,7 @@ pub(crate) fn complete_dot(
|acc, field, ty| acc.add_field(ctx, dot_access, None, field, &ty),
|acc, field, ty| acc.add_tuple_field(ctx, None, field, &ty),
is_field_access,
- is_method_acces_with_parens,
+ is_method_access_with_parens,
);
complete_methods(ctx, receiver_ty, |func| acc.add_method(ctx, dot_access, func, None, None));
@@ -114,14 +114,14 @@ fn complete_fields(
mut named_field: impl FnMut(&mut Completions, hir::Field, hir::Type),
mut tuple_index: impl FnMut(&mut Completions, usize, hir::Type),
is_field_access: bool,
- is_method_acess_with_parens: bool,
+ is_method_access_with_parens: bool,
) {
let mut seen_names = FxHashSet::default();
for receiver in receiver.autoderef(ctx.db) {
for (field, ty) in receiver.fields(ctx.db) {
if seen_names.insert(field.name(ctx.db))
&& (is_field_access
- || (is_method_acess_with_parens && (ty.is_fn() || ty.is_closure())))
+ || (is_method_access_with_parens && (ty.is_fn() || ty.is_closure())))
{
named_field(acc, field, ty);
}
@@ -131,7 +131,7 @@ fn complete_fields(
// already seen without inserting into the hashset.
if !seen_names.contains(&hir::Name::new_tuple_field(i))
&& (is_field_access
- || (is_method_acess_with_parens && (ty.is_fn() || ty.is_closure())))
+ || (is_method_access_with_parens && (ty.is_fn() || ty.is_closure())))
{
// Tuple fields are always public (tuple struct fields are handled above).
tuple_index(acc, i, ty);
diff --git a/crates/ide-completion/src/completions/expr.rs b/crates/ide-completion/src/completions/expr.rs
index d3c817d4b4..1433216d61 100644
--- a/crates/ide-completion/src/completions/expr.rs
+++ b/crates/ide-completion/src/completions/expr.rs
@@ -15,7 +15,7 @@ pub(crate) fn complete_expr_path(
path_ctx @ PathCompletionCtx { qualified, .. }: &PathCompletionCtx,
expr_ctx: &ExprCtx,
) {
- let _p = profile::span("complete_expr_path");
+ let _p = tracing::span!(tracing::Level::INFO, "complete_expr_path").entered();
if !ctx.qualifier_ctx.none() {
return;
}
diff --git a/crates/ide-completion/src/completions/extern_abi.rs b/crates/ide-completion/src/completions/extern_abi.rs
index 75017cf66f..b5d5604c75 100644
--- a/crates/ide-completion/src/completions/extern_abi.rs
+++ b/crates/ide-completion/src/completions/extern_abi.rs
@@ -26,7 +26,6 @@ const SUPPORTED_CALLING_CONVENTIONS: &[&str] = &[
"ptx-kernel",
"msp430-interrupt",
"x86-interrupt",
- "amdgpu-kernel",
"efiapi",
"avr-interrupt",
"avr-non-blocking-interrupt",
diff --git a/crates/ide-completion/src/completions/flyimport.rs b/crates/ide-completion/src/completions/flyimport.rs
index e330430d6b..0e04ad35d3 100644
--- a/crates/ide-completion/src/completions/flyimport.rs
+++ b/crates/ide-completion/src/completions/flyimport.rs
@@ -207,7 +207,8 @@ fn import_on_the_fly(
position: SyntaxNode,
potential_import_name: String,
) -> Option<()> {
- let _p = profile::span("import_on_the_fly").detail(|| potential_import_name.clone());
+ let _p =
+ tracing::span!(tracing::Level::INFO, "import_on_the_fly", ?potential_import_name).entered();
ImportScope::find_insert_use_container(&position, &ctx.sema)?;
@@ -293,7 +294,8 @@ fn import_on_the_fly_pat_(
position: SyntaxNode,
potential_import_name: String,
) -> Option<()> {
- let _p = profile::span("import_on_the_fly_pat").detail(|| potential_import_name.clone());
+ let _p = tracing::span!(tracing::Level::INFO, "import_on_the_fly_pat", ?potential_import_name)
+ .entered();
ImportScope::find_insert_use_container(&position, &ctx.sema)?;
@@ -343,7 +345,9 @@ fn import_on_the_fly_method(
position: SyntaxNode,
potential_import_name: String,
) -> Option<()> {
- let _p = profile::span("import_on_the_fly_method").detail(|| potential_import_name.clone());
+ let _p =
+ tracing::span!(tracing::Level::INFO, "import_on_the_fly_method", ?potential_import_name)
+ .entered();
ImportScope::find_insert_use_container(&position, &ctx.sema)?;
diff --git a/crates/ide-completion/src/completions/item_list.rs b/crates/ide-completion/src/completions/item_list.rs
index 4de15ab759..addd9dac1a 100644
--- a/crates/ide-completion/src/completions/item_list.rs
+++ b/crates/ide-completion/src/completions/item_list.rs
@@ -28,7 +28,7 @@ pub(crate) fn complete_item_list(
path_ctx @ PathCompletionCtx { qualified, .. }: &PathCompletionCtx,
kind: &ItemListKind,
) {
- let _p = profile::span("complete_item_list");
+ let _p = tracing::span!(tracing::Level::INFO, "complete_item_list").entered();
if path_ctx.is_trivial_path() {
add_keywords(acc, ctx, Some(kind));
}
diff --git a/crates/ide-completion/src/completions/mod_.rs b/crates/ide-completion/src/completions/mod_.rs
index 5d138eea46..ecf5b29e2c 100644
--- a/crates/ide-completion/src/completions/mod_.rs
+++ b/crates/ide-completion/src/completions/mod_.rs
@@ -21,7 +21,7 @@ pub(crate) fn complete_mod(
return None;
}
- let _p = profile::span("completion::complete_mod");
+ let _p = tracing::span!(tracing::Level::INFO, "completion::complete_mod").entered();
let mut current_module = ctx.module;
// For `mod $0`, `ctx.module` is its parent, but for `mod f$0`, it's `mod f` itself, but we're
diff --git a/crates/ide-completion/src/completions/postfix.rs b/crates/ide-completion/src/completions/postfix.rs
index a846ffe10e..af83d4104f 100644
--- a/crates/ide-completion/src/completions/postfix.rs
+++ b/crates/ide-completion/src/completions/postfix.rs
@@ -85,6 +85,13 @@ pub(crate) fn complete_postfix(
.add_to(acc, ctx.db);
postfix_snippet(
+ "lete",
+ "let Ok else {}",
+ &format!("let Ok($1) = {receiver_text} else {{\n $2\n}};\n$0"),
+ )
+ .add_to(acc, ctx.db);
+
+ postfix_snippet(
"while",
"while let Ok {}",
&format!("while let Ok($1) = {receiver_text} {{\n $0\n}}"),
@@ -100,6 +107,13 @@ pub(crate) fn complete_postfix(
.add_to(acc, ctx.db);
postfix_snippet(
+ "lete",
+ "let Some else {}",
+ &format!("let Some($1) = {receiver_text} else {{\n $2\n}};\n$0"),
+ )
+ .add_to(acc, ctx.db);
+
+ postfix_snippet(
"while",
"while let Some {}",
&format!("while let Some($1) = {receiver_text} {{\n $0\n}}"),
@@ -470,6 +484,29 @@ fn main() {
}
#[test]
+ fn option_letelse() {
+ check_edit(
+ "lete",
+ r#"
+//- minicore: option
+fn main() {
+ let bar = Some(true);
+ bar.$0
+}
+"#,
+ r#"
+fn main() {
+ let bar = Some(true);
+ let Some($1) = bar else {
+ $2
+};
+$0
+}
+"#,
+ );
+ }
+
+ #[test]
fn result_match() {
check_edit(
"match",
diff --git a/crates/ide-completion/src/completions/type.rs b/crates/ide-completion/src/completions/type.rs
index a30fd13b1d..e6a4335c3f 100644
--- a/crates/ide-completion/src/completions/type.rs
+++ b/crates/ide-completion/src/completions/type.rs
@@ -15,7 +15,7 @@ pub(crate) fn complete_type_path(
path_ctx @ PathCompletionCtx { qualified, .. }: &PathCompletionCtx,
location: &TypeLocation,
) {
- let _p = profile::span("complete_type_path");
+ let _p = tracing::span!(tracing::Level::INFO, "complete_type_path").entered();
let scope_def_applicable = |def| {
use hir::{GenericParam::*, ModuleDef::*};
diff --git a/crates/ide-completion/src/context.rs b/crates/ide-completion/src/context.rs
index 575f524209..2c0370c58f 100644
--- a/crates/ide-completion/src/context.rs
+++ b/crates/ide-completion/src/context.rs
@@ -568,7 +568,8 @@ impl CompletionContext<'_> {
/// A version of [`SemanticsScope::process_all_names`] that filters out `#[doc(hidden)]` items and
/// passes all doc-aliases along, to funnel it into [`Completions::add_path_resolution`].
pub(crate) fn process_all_names(&self, f: &mut dyn FnMut(Name, ScopeDef, Vec<SmolStr>)) {
- let _p = profile::span("CompletionContext::process_all_names");
+ let _p =
+ tracing::span!(tracing::Level::INFO, "CompletionContext::process_all_names").entered();
self.scope.process_all_names(&mut |name, def| {
if self.is_scope_def_hidden(def) {
return;
@@ -579,7 +580,8 @@ impl CompletionContext<'_> {
}
pub(crate) fn process_all_names_raw(&self, f: &mut dyn FnMut(Name, ScopeDef)) {
- let _p = profile::span("CompletionContext::process_all_names_raw");
+ let _p = tracing::span!(tracing::Level::INFO, "CompletionContext::process_all_names_raw")
+ .entered();
self.scope.process_all_names(f);
}
@@ -637,7 +639,7 @@ impl<'a> CompletionContext<'a> {
position @ FilePosition { file_id, offset }: FilePosition,
config: &'a CompletionConfig,
) -> Option<(CompletionContext<'a>, CompletionAnalysis)> {
- let _p = profile::span("CompletionContext::new");
+ let _p = tracing::span!(tracing::Level::INFO, "CompletionContext::new").entered();
let sema = Semantics::new(db);
let original_file = sema.parse(file_id);
diff --git a/crates/ide-completion/src/context/analysis.rs b/crates/ide-completion/src/context/analysis.rs
index 8a4ac00de9..c06b64df1c 100644
--- a/crates/ide-completion/src/context/analysis.rs
+++ b/crates/ide-completion/src/context/analysis.rs
@@ -72,7 +72,7 @@ fn expand(
mut fake_ident_token: SyntaxToken,
relative_offset: TextSize,
) -> ExpansionResult {
- let _p = profile::span("CompletionContext::expand");
+ let _p = tracing::span!(tracing::Level::INFO, "CompletionContext::expand").entered();
let mut derive_ctx = None;
'expansion: loop {
@@ -211,7 +211,7 @@ fn analyze(
original_token: &SyntaxToken,
self_token: &SyntaxToken,
) -> Option<(CompletionAnalysis, (Option<Type>, Option<ast::NameOrNameRef>), QualifierCtx)> {
- let _p = profile::span("CompletionContext::analyze");
+ let _p = tracing::span!(tracing::Level::INFO, "CompletionContext::analyze").entered();
let ExpansionResult { original_file, speculative_file, offset, fake_ident_token, derive_ctx } =
expansion_result;
@@ -1267,8 +1267,7 @@ fn pattern_context_for(
pat
.syntax()
.ancestors()
- .skip_while(|it| ast::Pat::can_cast(it.kind()))
- .next()
+ .find(|it| !ast::Pat::can_cast(it.kind()))
.map_or((PatternRefutability::Irrefutable, false), |node| {
let refutability = match_ast! {
match node {
diff --git a/crates/ide-completion/src/item.rs b/crates/ide-completion/src/item.rs
index 864b993f71..bcf169f465 100644
--- a/crates/ide-completion/src/item.rs
+++ b/crates/ide-completion/src/item.rs
@@ -433,7 +433,7 @@ impl Builder {
}
pub(crate) fn build(self, db: &RootDatabase) -> CompletionItem {
- let _p = profile::span("item::Builder::build");
+ let _p = tracing::span!(tracing::Level::INFO, "item::Builder::build").entered();
let label = self.label;
let mut label_detail = None;
diff --git a/crates/ide-completion/src/lib.rs b/crates/ide-completion/src/lib.rs
index d26b6f431b..733523d369 100644
--- a/crates/ide-completion/src/lib.rs
+++ b/crates/ide-completion/src/lib.rs
@@ -236,7 +236,7 @@ pub fn resolve_completion_edits(
FilePosition { file_id, offset }: FilePosition,
imports: impl IntoIterator<Item = (String, String)>,
) -> Option<Vec<TextEdit>> {
- let _p = profile::span("resolve_completion_edits");
+ let _p = tracing::span!(tracing::Level::INFO, "resolve_completion_edits").entered();
let sema = hir::Semantics::new(db);
let original_file = sema.parse(file_id);
diff --git a/crates/ide-completion/src/render.rs b/crates/ide-completion/src/render.rs
index ad26280ae7..4d49d2f498 100644
--- a/crates/ide-completion/src/render.rs
+++ b/crates/ide-completion/src/render.rs
@@ -292,7 +292,7 @@ fn render_resolution_pat(
import_to_add: Option<LocatedImport>,
resolution: ScopeDef,
) -> Builder {
- let _p = profile::span("render_resolution");
+ let _p = tracing::span!(tracing::Level::INFO, "render_resolution").entered();
use hir::ModuleDef::*;
if let ScopeDef::ModuleDef(Macro(mac)) = resolution {
@@ -310,7 +310,7 @@ fn render_resolution_path(
import_to_add: Option<LocatedImport>,
resolution: ScopeDef,
) -> Builder {
- let _p = profile::span("render_resolution");
+ let _p = tracing::span!(tracing::Level::INFO, "render_resolution").entered();
use hir::ModuleDef::*;
match resolution {
@@ -418,7 +418,7 @@ fn render_resolution_simple_(
import_to_add: Option<LocatedImport>,
resolution: ScopeDef,
) -> Builder {
- let _p = profile::span("render_resolution");
+ let _p = tracing::span!(tracing::Level::INFO, "render_resolution").entered();
let db = ctx.db();
let ctx = ctx.import_to_add(import_to_add);
diff --git a/crates/ide-completion/src/render/const_.rs b/crates/ide-completion/src/render/const_.rs
index 3c73983c39..a2bfac994f 100644
--- a/crates/ide-completion/src/render/const_.rs
+++ b/crates/ide-completion/src/render/const_.rs
@@ -6,7 +6,7 @@ use ide_db::SymbolKind;
use crate::{item::CompletionItem, render::RenderContext};
pub(crate) fn render_const(ctx: RenderContext<'_>, const_: hir::Const) -> Option<CompletionItem> {
- let _p = profile::span("render_const");
+ let _p = tracing::span!(tracing::Level::INFO, "render_const").entered();
render(ctx, const_)
}
diff --git a/crates/ide-completion/src/render/function.rs b/crates/ide-completion/src/render/function.rs
index 0f2608d132..4ae7ea861c 100644
--- a/crates/ide-completion/src/render/function.rs
+++ b/crates/ide-completion/src/render/function.rs
@@ -25,7 +25,7 @@ pub(crate) fn render_fn(
local_name: Option<hir::Name>,
func: hir::Function,
) -> Builder {
- let _p = profile::span("render_fn");
+ let _p = tracing::span!(tracing::Level::INFO, "render_fn").entered();
render(ctx, local_name, func, FuncKind::Function(path_ctx))
}
@@ -36,7 +36,7 @@ pub(crate) fn render_method(
local_name: Option<hir::Name>,
func: hir::Function,
) -> Builder {
- let _p = profile::span("render_method");
+ let _p = tracing::span!(tracing::Level::INFO, "render_method").entered();
render(ctx, local_name, func, FuncKind::Method(dot_access, receiver))
}
diff --git a/crates/ide-completion/src/render/literal.rs b/crates/ide-completion/src/render/literal.rs
index f2d67df01d..f52a5f7625 100644
--- a/crates/ide-completion/src/render/literal.rs
+++ b/crates/ide-completion/src/render/literal.rs
@@ -27,7 +27,7 @@ pub(crate) fn render_variant_lit(
variant: hir::Variant,
path: Option<hir::ModPath>,
) -> Option<Builder> {
- let _p = profile::span("render_enum_variant");
+ let _p = tracing::span!(tracing::Level::INFO, "render_enum_variant").entered();
let db = ctx.db();
let name = local_name.unwrap_or_else(|| variant.name(db));
@@ -41,7 +41,7 @@ pub(crate) fn render_struct_literal(
path: Option<hir::ModPath>,
local_name: Option<hir::Name>,
) -> Option<Builder> {
- let _p = profile::span("render_struct_literal");
+ let _p = tracing::span!(tracing::Level::INFO, "render_struct_literal").entered();
let db = ctx.db();
let name = local_name.unwrap_or_else(|| strukt.name(db));
diff --git a/crates/ide-completion/src/render/macro_.rs b/crates/ide-completion/src/render/macro_.rs
index 915a245ab6..540cfd03d6 100644
--- a/crates/ide-completion/src/render/macro_.rs
+++ b/crates/ide-completion/src/render/macro_.rs
@@ -17,7 +17,7 @@ pub(crate) fn render_macro(
name: hir::Name,
macro_: hir::Macro,
) -> Builder {
- let _p = profile::span("render_macro");
+ let _p = tracing::span!(tracing::Level::INFO, "render_macro").entered();
render(ctx, *kind == PathKind::Use, *has_macro_bang, *has_call_parens, name, macro_)
}
@@ -27,7 +27,7 @@ pub(crate) fn render_macro_pat(
name: hir::Name,
macro_: hir::Macro,
) -> Builder {
- let _p = profile::span("render_macro");
+ let _p = tracing::span!(tracing::Level::INFO, "render_macro").entered();
render(ctx, false, false, false, name, macro_)
}
diff --git a/crates/ide-completion/src/render/pattern.rs b/crates/ide-completion/src/render/pattern.rs
index 6f998119b7..a5f851566c 100644
--- a/crates/ide-completion/src/render/pattern.rs
+++ b/crates/ide-completion/src/render/pattern.rs
@@ -20,7 +20,7 @@ pub(crate) fn render_struct_pat(
strukt: hir::Struct,
local_name: Option<Name>,
) -> Option<CompletionItem> {
- let _p = profile::span("render_struct_pat");
+ let _p = tracing::span!(tracing::Level::INFO, "render_struct_pat").entered();
let fields = strukt.fields(ctx.db());
let (visible_fields, fields_omitted) = visible_fields(ctx.completion, &fields, strukt)?;
@@ -50,7 +50,7 @@ pub(crate) fn render_variant_pat(
local_name: Option<Name>,
path: Option<&hir::ModPath>,
) -> Option<CompletionItem> {
- let _p = profile::span("render_variant_pat");
+ let _p = tracing::span!(tracing::Level::INFO, "render_variant_pat").entered();
let fields = variant.fields(ctx.db());
let (visible_fields, fields_omitted) = visible_fields(ctx.completion, &fields, variant)?;
diff --git a/crates/ide-completion/src/render/type_alias.rs b/crates/ide-completion/src/render/type_alias.rs
index 343ba7e28d..b192309e93 100644
--- a/crates/ide-completion/src/render/type_alias.rs
+++ b/crates/ide-completion/src/render/type_alias.rs
@@ -10,7 +10,7 @@ pub(crate) fn render_type_alias(
ctx: RenderContext<'_>,
type_alias: hir::TypeAlias,
) -> Option<CompletionItem> {
- let _p = profile::span("render_type_alias");
+ let _p = tracing::span!(tracing::Level::INFO, "render_type_alias").entered();
render(ctx, type_alias, false)
}
@@ -18,7 +18,7 @@ pub(crate) fn render_type_alias_with_eq(
ctx: RenderContext<'_>,
type_alias: hir::TypeAlias,
) -> Option<CompletionItem> {
- let _p = profile::span("render_type_alias_with_eq");
+ let _p = tracing::span!(tracing::Level::INFO, "render_type_alias_with_eq").entered();
render(ctx, type_alias, true)
}
diff --git a/crates/ide-completion/src/tests.rs b/crates/ide-completion/src/tests.rs
index c421be51a0..154b69875a 100644
--- a/crates/ide-completion/src/tests.rs
+++ b/crates/ide-completion/src/tests.rs
@@ -210,23 +210,14 @@ pub(crate) fn check_edit_with_config(
let mut combined_edit = completion.text_edit.clone();
- resolve_completion_edits(
- &db,
- &config,
- position,
- completion
- .import_to_add
- .iter()
- .cloned()
- .filter_map(|(import_path, import_name)| Some((import_path, import_name))),
- )
- .into_iter()
- .flatten()
- .for_each(|text_edit| {
- combined_edit.union(text_edit).expect(
- "Failed to apply completion resolve changes: change ranges overlap, but should not",
- )
- });
+ resolve_completion_edits(&db, &config, position, completion.import_to_add.iter().cloned())
+ .into_iter()
+ .flatten()
+ .for_each(|text_edit| {
+ combined_edit.union(text_edit).expect(
+ "Failed to apply completion resolve changes: change ranges overlap, but should not",
+ )
+ });
combined_edit.apply(&mut actual);
assert_eq_text!(&ra_fixture_after, &actual)
diff --git a/crates/ide-completion/src/tests/expression.rs b/crates/ide-completion/src/tests/expression.rs
index b4f936b35a..758c254a88 100644
--- a/crates/ide-completion/src/tests/expression.rs
+++ b/crates/ide-completion/src/tests/expression.rs
@@ -105,7 +105,7 @@ fn func(param0 @ (param1, param2): (i32, i32)) {
fn completes_all_the_things_in_fn_body() {
check(
r#"
-use non_existant::Unresolved;
+use non_existent::Unresolved;
mod qualified { pub enum Enum { Variant } }
impl Unit {
@@ -170,7 +170,7 @@ impl Unit {
);
check(
r#"
-use non_existant::Unresolved;
+use non_existent::Unresolved;
mod qualified { pub enum Enum { Variant } }
impl Unit {
diff --git a/crates/ide-completion/src/tests/flyimport.rs b/crates/ide-completion/src/tests/flyimport.rs
index 1af16ef857..eaa1bebc03 100644
--- a/crates/ide-completion/src/tests/flyimport.rs
+++ b/crates/ide-completion/src/tests/flyimport.rs
@@ -106,7 +106,7 @@ fn main() {
}
"#,
r#"
-use dep::{FirstStruct, some_module::{SecondStruct, ThirdStruct}};
+use dep::{some_module::{SecondStruct, ThirdStruct}, FirstStruct};
fn main() {
ThirdStruct
diff --git a/crates/ide-db/src/apply_change.rs b/crates/ide-db/src/apply_change.rs
index 766d1c1e43..296253aa1e 100644
--- a/crates/ide-db/src/apply_change.rs
+++ b/crates/ide-db/src/apply_change.rs
@@ -15,12 +15,13 @@ use crate::{symbol_index::SymbolsDatabase, Change, RootDatabase};
impl RootDatabase {
pub fn request_cancellation(&mut self) {
- let _p = profile::span("RootDatabase::request_cancellation");
+ let _p =
+ tracing::span!(tracing::Level::INFO, "RootDatabase::request_cancellation").entered();
self.salsa_runtime_mut().synthetic_write(Durability::LOW);
}
pub fn apply_change(&mut self, change: Change) {
- let _p = profile::span("RootDatabase::apply_change");
+ let _p = tracing::span!(tracing::Level::INFO, "RootDatabase::apply_change").entered();
self.request_cancellation();
tracing::trace!("apply_change {:?}", change);
if let Some(roots) = &change.source_change.roots {
diff --git a/crates/ide-db/src/defs.rs b/crates/ide-db/src/defs.rs
index 5995b318e8..81f2f87d96 100644
--- a/crates/ide-db/src/defs.rs
+++ b/crates/ide-db/src/defs.rs
@@ -404,7 +404,7 @@ impl NameClass {
}
pub fn classify(sema: &Semantics<'_, RootDatabase>, name: &ast::Name) -> Option<NameClass> {
- let _p = profile::span("classify_name");
+ let _p = tracing::span!(tracing::Level::INFO, "classify_name").entered();
let parent = name.syntax().parent()?;
@@ -496,7 +496,7 @@ impl NameClass {
sema: &Semantics<'_, RootDatabase>,
lifetime: &ast::Lifetime,
) -> Option<NameClass> {
- let _p = profile::span("classify_lifetime").detail(|| lifetime.to_string());
+ let _p = tracing::span!(tracing::Level::INFO, "classify_lifetime", ?lifetime).entered();
let parent = lifetime.syntax().parent()?;
if let Some(it) = ast::LifetimeParam::cast(parent.clone()) {
@@ -587,7 +587,7 @@ impl NameRefClass {
sema: &Semantics<'_, RootDatabase>,
name_ref: &ast::NameRef,
) -> Option<NameRefClass> {
- let _p = profile::span("classify_name_ref").detail(|| name_ref.to_string());
+ let _p = tracing::span!(tracing::Level::INFO, "classify_name_ref", ?name_ref).entered();
let parent = name_ref.syntax().parent()?;
@@ -686,7 +686,7 @@ impl NameRefClass {
sema: &Semantics<'_, RootDatabase>,
lifetime: &ast::Lifetime,
) -> Option<NameRefClass> {
- let _p = profile::span("classify_lifetime_ref").detail(|| lifetime.to_string());
+ let _p = tracing::span!(tracing::Level::INFO, "classify_lifetime_ref", ?lifetime).entered();
let parent = lifetime.syntax().parent()?;
match parent.kind() {
SyntaxKind::BREAK_EXPR | SyntaxKind::CONTINUE_EXPR => {
diff --git a/crates/ide-db/src/documentation.rs b/crates/ide-db/src/documentation.rs
index cc8e843170..72ca354365 100644
--- a/crates/ide-db/src/documentation.rs
+++ b/crates/ide-db/src/documentation.rs
@@ -1,4 +1,4 @@
-//! Documentation attribute related utilties.
+//! Documentation attribute related utilities.
use either::Either;
use hir::{
db::{DefDatabase, HirDatabase},
diff --git a/crates/ide-db/src/generated/lints.rs b/crates/ide-db/src/generated/lints.rs
index f160def0af..be8419686f 100644
--- a/crates/ide-db/src/generated/lints.rs
+++ b/crates/ide-db/src/generated/lints.rs
@@ -967,17 +967,6 @@ The tracking issue for this feature is: [#44839]
"##,
},
Lint {
- label: "abi_amdgpu_kernel",
- description: r##"# `abi_amdgpu_kernel`
-
-The tracking issue for this feature is: [#51575]
-
-[#51575]: https://github.com/rust-lang/rust/issues/51575
-
-------------------------
-"##,
- },
- Lint {
label: "abi_avr_interrupt",
description: r##"# `abi_avr_interrupt`
diff --git a/crates/ide-db/src/helpers.rs b/crates/ide-db/src/helpers.rs
index 9363bdfa14..0b5ad7060e 100644
--- a/crates/ide-db/src/helpers.rs
+++ b/crates/ide-db/src/helpers.rs
@@ -35,7 +35,7 @@ pub fn pick_token<T: AstToken>(mut tokens: TokenAtOffset<SyntaxToken>) -> Option
/// Converts the mod path struct into its ast representation.
pub fn mod_path_to_ast(path: &hir::ModPath) -> ast::Path {
- let _p = profile::span("mod_path_to_ast");
+ let _p = tracing::span!(tracing::Level::INFO, "mod_path_to_ast").entered();
let mut segments = Vec::new();
let mut is_abs = false;
diff --git a/crates/ide-db/src/imports/import_assets.rs b/crates/ide-db/src/imports/import_assets.rs
index 089bd44c2a..cb3f01f345 100644
--- a/crates/ide-db/src/imports/import_assets.rs
+++ b/crates/ide-db/src/imports/import_assets.rs
@@ -208,7 +208,8 @@ impl ImportAssets {
prefer_no_std: bool,
prefer_prelude: bool,
) -> impl Iterator<Item = LocatedImport> {
- let _p = profile::span("import_assets::search_for_imports");
+ let _p =
+ tracing::span!(tracing::Level::INFO, "import_assets::search_for_imports").entered();
self.search_for(sema, Some(prefix_kind), prefer_no_std, prefer_prelude)
}
@@ -219,7 +220,8 @@ impl ImportAssets {
prefer_no_std: bool,
prefer_prelude: bool,
) -> impl Iterator<Item = LocatedImport> {
- let _p = profile::span("import_assets::search_for_relative_paths");
+ let _p = tracing::span!(tracing::Level::INFO, "import_assets::search_for_relative_paths")
+ .entered();
self.search_for(sema, None, prefer_no_std, prefer_prelude)
}
@@ -260,7 +262,7 @@ impl ImportAssets {
prefer_no_std: bool,
prefer_prelude: bool,
) -> impl Iterator<Item = LocatedImport> {
- let _p = profile::span("import_assets::search_for");
+ let _p = tracing::span!(tracing::Level::INFO, "import_assets::search_for").entered();
let scope = match sema.scope(&self.candidate_node) {
Some(it) => it,
@@ -305,7 +307,7 @@ impl ImportAssets {
}
fn scope_definitions(&self, sema: &Semantics<'_, RootDatabase>) -> FxHashSet<ScopeDef> {
- let _p = profile::span("import_assets::scope_definitions");
+ let _p = tracing::span!(tracing::Level::INFO, "import_assets::scope_definitions").entered();
let mut scope_definitions = FxHashSet::default();
if let Some(scope) = sema.scope(&self.candidate_node) {
scope.process_all_names(&mut |_, scope_def| {
@@ -323,7 +325,8 @@ fn path_applicable_imports(
mod_path: impl Fn(ItemInNs) -> Option<ModPath> + Copy,
scope_filter: impl Fn(ItemInNs) -> bool + Copy,
) -> FxHashSet<LocatedImport> {
- let _p = profile::span("import_assets::path_applicable_imports");
+ let _p =
+ tracing::span!(tracing::Level::INFO, "import_assets::path_applicable_imports").entered();
match &path_candidate.qualifier {
None => {
@@ -370,7 +373,7 @@ fn import_for_item(
original_item: ItemInNs,
scope_filter: impl Fn(ItemInNs) -> bool,
) -> Option<LocatedImport> {
- let _p = profile::span("import_assets::import_for_item");
+ let _p = tracing::span!(tracing::Level::INFO, "import_assets::import_for_item").entered();
let [first_segment, ..] = unresolved_qualifier else { return None };
let item_as_assoc = item_as_assoc(db, original_item);
@@ -504,7 +507,8 @@ fn trait_applicable_items(
mod_path: impl Fn(ItemInNs) -> Option<ModPath>,
scope_filter: impl Fn(hir::Trait) -> bool,
) -> FxHashSet<LocatedImport> {
- let _p = profile::span("import_assets::trait_applicable_items");
+ let _p =
+ tracing::span!(tracing::Level::INFO, "import_assets::trait_applicable_items").entered();
let db = sema.db;
diff --git a/crates/ide-db/src/imports/insert_use.rs b/crates/ide-db/src/imports/insert_use.rs
index 09b4a1c1ba..f29f91eea8 100644
--- a/crates/ide-db/src/imports/insert_use.rs
+++ b/crates/ide-db/src/imports/insert_use.rs
@@ -17,6 +17,7 @@ use syntax::{
use crate::{
imports::merge_imports::{
common_prefix, eq_attrs, eq_visibility, try_merge_imports, use_tree_cmp, MergeBehavior,
+ NormalizationStyle,
},
RootDatabase,
};
@@ -40,6 +41,15 @@ pub enum ImportGranularity {
One,
}
+impl From<ImportGranularity> for NormalizationStyle {
+ fn from(granularity: ImportGranularity) -> Self {
+ match granularity {
+ ImportGranularity::One => NormalizationStyle::One,
+ _ => NormalizationStyle::Default,
+ }
+ }
+}
+
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
pub struct InsertUseConfig {
pub granularity: ImportGranularity,
@@ -184,7 +194,7 @@ fn insert_use_with_alias_option(
cfg: &InsertUseConfig,
alias: Option<ast::Rename>,
) {
- let _p = profile::span("insert_use");
+ let _p = tracing::span!(tracing::Level::INFO, "insert_use").entered();
let mut mb = match cfg.granularity {
ImportGranularity::Crate => Some(MergeBehavior::Crate),
ImportGranularity::Module => Some(MergeBehavior::Module),
diff --git a/crates/ide-db/src/imports/insert_use/tests.rs b/crates/ide-db/src/imports/insert_use/tests.rs
index 2ed6069887..6b0fecae26 100644
--- a/crates/ide-db/src/imports/insert_use/tests.rs
+++ b/crates/ide-db/src/imports/insert_use/tests.rs
@@ -635,7 +635,7 @@ use std::io;",
check_one(
"std::io",
r"use {std::fmt::{Result, Display}};",
- r"use {std::{fmt::{Result, Display}, io}};",
+ r"use {std::{fmt::{Display, Result}, io}};",
);
}
@@ -650,12 +650,12 @@ fn merge_groups_full() {
check_crate(
"std::io",
r"use std::fmt::{Result, Display};",
- r"use std::{fmt::{Result, Display}, io};",
+ r"use std::{fmt::{Display, Result}, io};",
);
check_one(
"std::io",
r"use {std::fmt::{Result, Display}};",
- r"use {std::{fmt::{Result, Display}, io}};",
+ r"use {std::{fmt::{Display, Result}, io}};",
);
}
@@ -749,12 +749,12 @@ fn merge_groups_full_nested_deep() {
check_crate(
"std::foo::bar::quux::Baz",
r"use std::foo::bar::{Qux, quux::{Fez, Fizz}};",
- r"use std::foo::bar::{Qux, quux::{Baz, Fez, Fizz}};",
+ r"use std::foo::bar::{quux::{Baz, Fez, Fizz}, Qux};",
);
check_one(
"std::foo::bar::quux::Baz",
r"use {std::foo::bar::{Qux, quux::{Fez, Fizz}}};",
- r"use {std::foo::bar::{Qux, quux::{Baz, Fez, Fizz}}};",
+ r"use {std::foo::bar::{quux::{Baz, Fez, Fizz}, Qux}};",
);
}
@@ -763,7 +763,7 @@ fn merge_groups_full_nested_long() {
check_crate(
"std::foo::bar::Baz",
r"use std::{foo::bar::Qux};",
- r"use std::{foo::bar::{Baz, Qux}};",
+ r"use std::foo::bar::{Baz, Qux};",
);
}
@@ -772,12 +772,12 @@ fn merge_groups_last_nested_long() {
check_crate(
"std::foo::bar::Baz",
r"use std::{foo::bar::Qux};",
- r"use std::{foo::bar::{Baz, Qux}};",
+ r"use std::foo::bar::{Baz, Qux};",
);
check_one(
"std::foo::bar::Baz",
r"use {std::{foo::bar::Qux}};",
- r"use {std::{foo::bar::{Baz, Qux}}};",
+ r"use {std::foo::bar::{Baz, Qux}};",
);
}
@@ -898,7 +898,7 @@ fn merge_glob() {
r"
use syntax::{SyntaxKind::*};",
r"
-use syntax::{SyntaxKind::{self, *}};",
+use syntax::SyntaxKind::{self, *};",
)
}
@@ -907,7 +907,7 @@ fn merge_glob_nested() {
check_crate(
"foo::bar::quux::Fez",
r"use foo::bar::{Baz, quux::*};",
- r"use foo::bar::{Baz, quux::{Fez, *}};",
+ r"use foo::bar::{quux::{Fez, *}, Baz};",
)
}
@@ -1211,7 +1211,7 @@ fn insert_with_renamed_import_complex_use() {
use self::foo::{self, Foo as _, Bar};
"#,
r#"
-use self::foo::{self, Foo, Bar};
+use self::foo::{self, Bar, Foo};
"#,
&InsertUseConfig {
granularity: ImportGranularity::Crate,
diff --git a/crates/ide-db/src/imports/merge_imports.rs b/crates/ide-db/src/imports/merge_imports.rs
index 7ec38c317d..b153aafa0e 100644
--- a/crates/ide-db/src/imports/merge_imports.rs
+++ b/crates/ide-db/src/imports/merge_imports.rs
@@ -1,15 +1,17 @@
//! Handle syntactic aspects of merging UseTrees.
use std::cmp::Ordering;
-use std::iter::empty;
use itertools::{EitherOrBoth, Itertools};
use parser::T;
use stdx::is_upper_snake_case;
use syntax::{
algo,
- ast::{self, make, AstNode, HasAttrs, HasName, HasVisibility, PathSegmentKind},
+ ast::{
+ self, edit_in_place::Removable, make, AstNode, HasAttrs, HasName, HasVisibility,
+ PathSegmentKind,
+ },
ted::{self, Position},
- Direction,
+ Direction, SyntaxElement,
};
use crate::syntax_helpers::node_ext::vis_eq;
@@ -58,6 +60,10 @@ pub fn try_merge_imports(
let lhs_tree = lhs.use_tree()?;
let rhs_tree = rhs.use_tree()?;
try_merge_trees_mut(&lhs_tree, &rhs_tree, merge_behavior)?;
+
+ // Ignore `None` result because normalization should not affect the merge result.
+ try_normalize_use_tree_mut(&lhs_tree, merge_behavior.into());
+
Some(lhs)
}
@@ -71,6 +77,10 @@ pub fn try_merge_trees(
let lhs = lhs.clone_subtree().clone_for_update();
let rhs = rhs.clone_subtree().clone_for_update();
try_merge_trees_mut(&lhs, &rhs, merge)?;
+
+ // Ignore `None` result because normalization should not affect the merge result.
+ try_normalize_use_tree_mut(&lhs, merge.into());
+
Some(lhs)
}
@@ -173,61 +183,301 @@ fn recursive_merge(lhs: &ast::UseTree, rhs: &ast::UseTree, merge: MergeBehavior)
}
Err(insert_idx) => {
use_trees.insert(insert_idx, rhs_t.clone());
- match lhs.use_tree_list() {
- // Creates a new use tree list with the item.
- None => lhs.get_or_create_use_tree_list().add_use_tree(rhs_t),
- // Recreates the use tree list with sorted items (see `use_tree_cmp` doc).
- Some(use_tree_list) => {
- if use_tree_list.l_curly_token().is_none() {
- ted::insert_raw(
- Position::first_child_of(use_tree_list.syntax()),
- make::token(T!['{']),
- );
- }
- if use_tree_list.r_curly_token().is_none() {
- ted::insert_raw(
- Position::last_child_of(use_tree_list.syntax()),
- make::token(T!['}']),
- );
- }
+ // We simply add the use tree to the end of tree list. Ordering of use trees
+ // and imports is done by the `try_normalize_*` functions. The sorted `use_trees`
+ // vec is only used for binary search.
+ lhs.get_or_create_use_tree_list().add_use_tree(rhs_t);
+ }
+ }
+ }
+ Some(())
+}
- let mut elements = Vec::new();
- for (idx, tree) in use_trees.iter().enumerate() {
- if idx > 0 {
- elements.push(make::token(T![,]).into());
- elements.push(make::tokens::single_space().into());
- }
- elements.push(tree.syntax().clone().into());
- }
+/// Style to follow when normalizing a use tree.
+#[derive(Copy, Clone, Debug, PartialEq, Eq)]
+pub enum NormalizationStyle {
+ /// Merges all descendant use tree lists with only one child use tree into their parent use tree.
+ ///
+ /// Examples:
+ /// - `foo::{bar::{Qux}}` -> `foo::bar::Qux`
+ /// - `foo::{bar::{self}}` -> `foo::bar`
+ /// - `{foo::bar}` -> `foo::bar`
+ Default,
+ /// Same as default but wraps the root use tree in a use tree list.
+ ///
+ /// Examples:
+ /// - `foo::{bar::{Qux}}` -> `{foo::bar::Qux}`
+ /// - `foo::{bar::{self}}` -> `{foo::bar}`
+ /// - `{foo::bar}` -> `{foo::bar}`
+ One,
+}
+
+impl From<MergeBehavior> for NormalizationStyle {
+ fn from(mb: MergeBehavior) -> Self {
+ match mb {
+ MergeBehavior::One => NormalizationStyle::One,
+ _ => NormalizationStyle::Default,
+ }
+ }
+}
+
+/// Normalizes a use item by:
+/// - Ordering all use trees
+/// - Merging use trees with common prefixes
+/// - Removing redundant braces based on the specified normalization style
+/// (see [`NormalizationStyle`] doc)
+///
+/// Examples:
+///
+/// Using the "Default" normalization style
+///
+/// - `foo::{bar::Qux, bar::{self}}` -> `foo::bar::{self, Qux}`
+/// - `foo::bar::{self}` -> `foo::bar`
+/// - `{foo::bar}` -> `foo::bar`
+///
+/// Using the "One" normalization style
+///
+/// - `foo::{bar::Qux, bar::{self}}` -> `{foo::bar::{self, Qux}}`
+/// - `foo::bar::{self}` -> `{foo::bar}`
+/// - `foo::bar` -> `{foo::bar}`
+pub fn try_normalize_import(use_item: &ast::Use, style: NormalizationStyle) -> Option<ast::Use> {
+ let use_item = use_item.clone_subtree().clone_for_update();
+ try_normalize_use_tree_mut(&use_item.use_tree()?, style)?;
+ Some(use_item)
+}
+
+/// Normalizes a use tree (see [`try_normalize_import`] doc).
+pub fn try_normalize_use_tree(
+ use_tree: &ast::UseTree,
+ style: NormalizationStyle,
+) -> Option<ast::UseTree> {
+ let use_tree = use_tree.clone_subtree().clone_for_update();
+ try_normalize_use_tree_mut(&use_tree, style)?;
+ Some(use_tree)
+}
+
+pub fn try_normalize_use_tree_mut(
+ use_tree: &ast::UseTree,
+ style: NormalizationStyle,
+) -> Option<()> {
+ if style == NormalizationStyle::One {
+ let mut modified = false;
+ modified |= use_tree.wrap_in_tree_list().is_some();
+ modified |= recursive_normalize(use_tree, style).is_some();
+ if !modified {
+ // Either the use tree was already normalized or its semantically empty.
+ return None;
+ }
+ } else {
+ recursive_normalize(use_tree, NormalizationStyle::Default)?;
+ }
+ Some(())
+}
- let start = use_tree_list
- .l_curly_token()
- .and_then(|l_curly| {
- algo::non_trivia_sibling(l_curly.into(), Direction::Next)
- })
- .filter(|it| it.kind() != T!['}']);
- let end = use_tree_list
- .r_curly_token()
- .and_then(|r_curly| {
- algo::non_trivia_sibling(r_curly.into(), Direction::Prev)
- })
- .filter(|it| it.kind() != T!['{']);
- if let Some((start, end)) = start.zip(end) {
- // Attempt to insert elements while preserving preceding and trailing trivia.
- ted::replace_all(start..=end, elements);
+/// Recursively normalizes a use tree and its subtrees (if any).
+fn recursive_normalize(use_tree: &ast::UseTree, style: NormalizationStyle) -> Option<()> {
+ let use_tree_list = use_tree.use_tree_list()?;
+ let merge_subtree_into_parent_tree = |single_subtree: &ast::UseTree| {
+ let merged_path = match (use_tree.path(), single_subtree.path()) {
+ (None, None) => None,
+ (Some(outer), None) => Some(outer),
+ (None, Some(inner)) if path_is_self(&inner) => None,
+ (None, Some(inner)) => Some(inner),
+ (Some(outer), Some(inner)) if path_is_self(&inner) => Some(outer),
+ (Some(outer), Some(inner)) => Some(make::path_concat(outer, inner).clone_for_update()),
+ };
+ if merged_path.is_some()
+ || single_subtree.use_tree_list().is_some()
+ || single_subtree.star_token().is_some()
+ {
+ ted::remove_all_iter(use_tree.syntax().children_with_tokens());
+ if let Some(path) = merged_path {
+ ted::insert_raw(Position::first_child_of(use_tree.syntax()), path.syntax());
+ if single_subtree.use_tree_list().is_some() || single_subtree.star_token().is_some()
+ {
+ ted::insert_raw(
+ Position::last_child_of(use_tree.syntax()),
+ make::token(T![::]),
+ );
+ }
+ }
+ if let Some(inner_use_tree_list) = single_subtree.use_tree_list() {
+ ted::insert_raw(
+ Position::last_child_of(use_tree.syntax()),
+ inner_use_tree_list.syntax(),
+ );
+ } else if single_subtree.star_token().is_some() {
+ ted::insert_raw(Position::last_child_of(use_tree.syntax()), make::token(T![*]));
+ } else if let Some(rename) = single_subtree.rename() {
+ ted::insert_raw(
+ Position::last_child_of(use_tree.syntax()),
+ make::tokens::single_space(),
+ );
+ ted::insert_raw(Position::last_child_of(use_tree.syntax()), rename.syntax());
+ }
+ Some(())
+ } else {
+ // Bail on semantically empty use trees.
+ None
+ }
+ };
+ let one_style_tree_list = |subtree: &ast::UseTree| match (
+ subtree.path().is_none() && subtree.star_token().is_none() && subtree.rename().is_none(),
+ subtree.use_tree_list(),
+ ) {
+ (true, tree_list) => tree_list,
+ _ => None,
+ };
+ let add_element_to_list = |elem: SyntaxElement, elements: &mut Vec<SyntaxElement>| {
+ if !elements.is_empty() {
+ elements.push(make::token(T![,]).into());
+ elements.push(make::tokens::single_space().into());
+ }
+ elements.push(elem);
+ };
+ if let Some((single_subtree,)) = use_tree_list.use_trees().collect_tuple() {
+ if style == NormalizationStyle::One {
+ // Only normalize descendant subtrees if the normalization style is "one".
+ recursive_normalize(&single_subtree, NormalizationStyle::Default)?;
+ } else {
+ // Otherwise, merge the single subtree into it's parent (if possible)
+ // and then normalize the result.
+ merge_subtree_into_parent_tree(&single_subtree)?;
+ recursive_normalize(use_tree, style);
+ }
+ } else {
+ // Tracks whether any changes have been made to the use tree.
+ let mut modified = false;
+
+ // Recursively un-nests (if necessary) and then normalizes each subtree in the tree list.
+ for subtree in use_tree_list.use_trees() {
+ if let Some(one_tree_list) = one_style_tree_list(&subtree) {
+ let mut elements = Vec::new();
+ let mut one_tree_list_iter = one_tree_list.use_trees();
+ let mut prev_skipped = Vec::new();
+ loop {
+ let mut prev_skipped_iter = prev_skipped.into_iter();
+ let mut curr_skipped = Vec::new();
+
+ while let Some(sub_sub_tree) =
+ one_tree_list_iter.next().or(prev_skipped_iter.next())
+ {
+ if let Some(sub_one_tree_list) = one_style_tree_list(&sub_sub_tree) {
+ curr_skipped.extend(sub_one_tree_list.use_trees());
} else {
- let new_use_tree_list = make::use_tree_list(empty()).clone_for_update();
- let trees_pos = match new_use_tree_list.l_curly_token() {
- Some(l_curly) => Position::after(l_curly),
- None => Position::last_child_of(new_use_tree_list.syntax()),
- };
- ted::insert_all_raw(trees_pos, elements);
- ted::replace(use_tree_list.syntax(), new_use_tree_list.syntax());
+ modified |=
+ recursive_normalize(&sub_sub_tree, NormalizationStyle::Default)
+ .is_some();
+ add_element_to_list(
+ sub_sub_tree.syntax().clone().into(),
+ &mut elements,
+ );
}
}
+
+ if curr_skipped.is_empty() {
+ // Un-nesting is complete.
+ break;
+ }
+ prev_skipped = curr_skipped;
+ }
+
+ // Either removes the subtree (if its semantically empty) or replaces it with
+ // the un-nested elements.
+ if elements.is_empty() {
+ subtree.remove();
+ } else {
+ ted::replace_with_many(subtree.syntax(), elements);
}
+ modified = true;
+ } else {
+ modified |= recursive_normalize(&subtree, NormalizationStyle::Default).is_some();
}
}
+
+ // Merge all merge-able subtrees.
+ let mut tree_list_iter = use_tree_list.use_trees();
+ let mut anchor = tree_list_iter.next()?;
+ let mut prev_skipped = Vec::new();
+ loop {
+ let mut has_merged = false;
+ let mut prev_skipped_iter = prev_skipped.into_iter();
+ let mut next_anchor = None;
+ let mut curr_skipped = Vec::new();
+
+ while let Some(candidate) = tree_list_iter.next().or(prev_skipped_iter.next()) {
+ let result = try_merge_trees_mut(&anchor, &candidate, MergeBehavior::Crate);
+ if result.is_some() {
+ // Remove merged subtree.
+ candidate.remove();
+ has_merged = true;
+ } else if next_anchor.is_none() {
+ next_anchor = Some(candidate);
+ } else {
+ curr_skipped.push(candidate);
+ }
+ }
+
+ if has_merged {
+ // Normalize the merge result.
+ recursive_normalize(&anchor, NormalizationStyle::Default);
+ modified = true;
+ }
+
+ let (Some(next_anchor), true) = (next_anchor, !curr_skipped.is_empty()) else {
+ // Merging is complete.
+ break;
+ };
+
+ // Try to merge the remaining subtrees in the next iteration.
+ anchor = next_anchor;
+ prev_skipped = curr_skipped;
+ }
+
+ let mut subtrees: Vec<_> = use_tree_list.use_trees().collect();
+ // Merge the remaining subtree into its parent, if its only one and
+ // the normalization style is not "one".
+ if subtrees.len() == 1 && style != NormalizationStyle::One {
+ modified |= merge_subtree_into_parent_tree(&subtrees[0]).is_some();
+ }
+ // Order the remaining subtrees (if necessary).
+ if subtrees.len() > 1 {
+ let mut did_sort = false;
+ subtrees.sort_unstable_by(|a, b| {
+ let order = use_tree_cmp_bin_search(a, b);
+ if !did_sort && order == Ordering::Less {
+ did_sort = true;
+ }
+ order
+ });
+ if did_sort {
+ let start = use_tree_list
+ .l_curly_token()
+ .and_then(|l_curly| algo::non_trivia_sibling(l_curly.into(), Direction::Next))
+ .filter(|it| it.kind() != T!['}']);
+ let end = use_tree_list
+ .r_curly_token()
+ .and_then(|r_curly| algo::non_trivia_sibling(r_curly.into(), Direction::Prev))
+ .filter(|it| it.kind() != T!['{']);
+ if let Some((start, end)) = start.zip(end) {
+ // Attempt to insert elements while preserving preceding and trailing trivia.
+ let mut elements = Vec::new();
+ for subtree in subtrees {
+ add_element_to_list(subtree.syntax().clone().into(), &mut elements);
+ }
+ ted::replace_all(start..=end, elements);
+ } else {
+ let new_use_tree_list = make::use_tree_list(subtrees).clone_for_update();
+ ted::replace(use_tree_list.syntax(), new_use_tree_list.syntax());
+ }
+ modified = true;
+ }
+ }
+
+ if !modified {
+ // Either the use tree was already normalized or its semantically empty.
+ return None;
+ }
}
Some(())
}
@@ -280,7 +530,7 @@ fn use_tree_cmp_bin_search(lhs: &ast::UseTree, rhs: &ast::UseTree) -> Ordering {
/// and `crate` first, then identifier imports with lowercase ones first and upper snake case
/// (e.g. UPPER_SNAKE_CASE) ones last, then glob imports, and at last list imports.
///
-/// Example foo::{self, foo, baz, Baz, Qux, FOO_BAZ, *, {Bar}}
+/// Example: `foo::{self, baz, foo, Baz, Qux, FOO_BAZ, *, {Bar}}`
/// Ref: <https://github.com/rust-lang/rustfmt/blob/6356fca675bd756d71f5c123cd053d17b16c573e/src/imports.rs#L83-L86>.
pub(super) fn use_tree_cmp(a: &ast::UseTree, b: &ast::UseTree) -> Ordering {
let a_is_simple_path = a.is_simple_path() && a.rename().is_none();
diff --git a/crates/ide-db/src/items_locator.rs b/crates/ide-db/src/items_locator.rs
index 432f1d745d..1b6f650768 100644
--- a/crates/ide-db/src/items_locator.rs
+++ b/crates/ide-db/src/items_locator.rs
@@ -20,14 +20,9 @@ pub fn items_with_name<'a>(
name: NameToImport,
assoc_item_search: AssocSearchMode,
) -> impl Iterator<Item = ItemInNs> + 'a {
- let _p = profile::span("items_with_name").detail(|| {
- format!(
- "Name: {}, crate: {:?}, assoc items: {:?}",
- name.text(),
- assoc_item_search,
- krate.display_name(sema.db).map(|name| name.to_string()),
- )
- });
+ let krate_name = krate.display_name(sema.db).map(|name| name.to_string());
+ let _p = tracing::span!(tracing::Level::INFO, "items_with_name", name = name.text(), assoc_item_search = ?assoc_item_search, crate = ?krate_name)
+ .entered();
let prefix = matches!(name, NameToImport::Prefix(..));
let (local_query, external_query) = match name {
@@ -77,7 +72,7 @@ fn find_items<'a>(
local_query: symbol_index::Query,
external_query: import_map::Query,
) -> impl Iterator<Item = ItemInNs> + 'a {
- let _p = profile::span("find_items");
+ let _p = tracing::span!(tracing::Level::INFO, "find_items").entered();
let db = sema.db;
// NOTE: `external_query` includes `assoc_item_search`, so we don't need to
diff --git a/crates/ide-db/src/search.rs b/crates/ide-db/src/search.rs
index 7769d8fba1..006d8882c1 100644
--- a/crates/ide-db/src/search.rs
+++ b/crates/ide-db/src/search.rs
@@ -134,6 +134,7 @@ pub enum ReferenceCategory {
// FIXME: Some day should be able to search in doc comments. Would probably
// need to switch from enum to bitflags then?
// DocComment
+ Test,
}
/// Generally, `search_scope` returns files that might contain references for the element.
@@ -273,7 +274,7 @@ impl IntoIterator for SearchScope {
impl Definition {
fn search_scope(&self, db: &RootDatabase) -> SearchScope {
- let _p = profile::span("search_scope");
+ let _p = tracing::span!(tracing::Level::INFO, "search_scope").entered();
if let Definition::BuiltinType(_) = self {
return SearchScope::crate_graph(db);
@@ -303,14 +304,18 @@ impl Definition {
DefWithBody::InTypeConst(_) => return SearchScope::empty(),
};
return match def {
- Some(def) => SearchScope::file_range(def.as_ref().original_file_range_full(db)),
+ Some(def) => SearchScope::file_range(
+ def.as_ref().original_file_range_with_macro_call_body(db),
+ ),
None => SearchScope::single_file(file_id),
};
}
if let Definition::SelfType(impl_) = self {
return match impl_.source(db).map(|src| src.syntax().cloned()) {
- Some(def) => SearchScope::file_range(def.as_ref().original_file_range_full(db)),
+ Some(def) => SearchScope::file_range(
+ def.as_ref().original_file_range_with_macro_call_body(db),
+ ),
None => SearchScope::single_file(file_id),
};
}
@@ -327,7 +332,9 @@ impl Definition {
hir::GenericDef::Const(it) => it.source(db).map(|src| src.syntax().cloned()),
};
return match def {
- Some(def) => SearchScope::file_range(def.as_ref().original_file_range_full(db)),
+ Some(def) => SearchScope::file_range(
+ def.as_ref().original_file_range_with_macro_call_body(db),
+ ),
None => SearchScope::single_file(file_id),
};
}
@@ -435,7 +442,7 @@ impl<'a> FindUsages<'a> {
}
pub fn search(&self, sink: &mut dyn FnMut(FileId, FileReference) -> bool) {
- let _p = profile::span("FindUsages:search");
+ let _p = tracing::span!(tracing::Level::INFO, "FindUsages:search").entered();
let sema = self.sema;
let search_scope = {
@@ -743,7 +750,7 @@ impl<'a> FindUsages<'a> {
let reference = FileReference {
range,
name: FileReferenceNode::NameRef(name_ref.clone()),
- category: ReferenceCategory::new(&def, name_ref),
+ category: ReferenceCategory::new(self.sema, &def, name_ref),
};
sink(file_id, reference)
}
@@ -759,7 +766,7 @@ impl<'a> FindUsages<'a> {
let reference = FileReference {
range,
name: FileReferenceNode::NameRef(name_ref.clone()),
- category: ReferenceCategory::new(&def, name_ref),
+ category: ReferenceCategory::new(self.sema, &def, name_ref),
};
sink(file_id, reference)
}
@@ -769,7 +776,7 @@ impl<'a> FindUsages<'a> {
let reference = FileReference {
range,
name: FileReferenceNode::NameRef(name_ref.clone()),
- category: ReferenceCategory::new(&def, name_ref),
+ category: ReferenceCategory::new(self.sema, &def, name_ref),
};
sink(file_id, reference)
} else {
@@ -783,10 +790,10 @@ impl<'a> FindUsages<'a> {
let local = Definition::Local(local);
let access = match self.def {
Definition::Field(_) if field == self.def => {
- ReferenceCategory::new(&field, name_ref)
+ ReferenceCategory::new(self.sema, &field, name_ref)
}
Definition::Local(_) if local == self.def => {
- ReferenceCategory::new(&local, name_ref)
+ ReferenceCategory::new(self.sema, &local, name_ref)
}
_ => return false,
};
@@ -871,7 +878,15 @@ fn def_to_ty(sema: &Semantics<'_, RootDatabase>, def: &Definition) -> Option<hir
}
impl ReferenceCategory {
- fn new(def: &Definition, r: &ast::NameRef) -> Option<ReferenceCategory> {
+ fn new(
+ sema: &Semantics<'_, RootDatabase>,
+ def: &Definition,
+ r: &ast::NameRef,
+ ) -> Option<ReferenceCategory> {
+ if is_name_ref_in_test(sema, r) {
+ return Some(ReferenceCategory::Test);
+ }
+
// Only Locals and Fields have accesses for now.
if !matches!(def, Definition::Local(_) | Definition::Field(_)) {
return is_name_ref_in_import(r).then_some(ReferenceCategory::Import);
@@ -910,3 +925,10 @@ fn is_name_ref_in_import(name_ref: &ast::NameRef) -> bool {
.and_then(|it| it.parent_path().top_path().syntax().parent())
.map_or(false, |it| it.kind() == SyntaxKind::USE_TREE)
}
+
+fn is_name_ref_in_test(sema: &Semantics<'_, RootDatabase>, name_ref: &ast::NameRef) -> bool {
+ name_ref.syntax().ancestors().any(|node| match ast::Fn::cast(node) {
+ Some(it) => sema.to_def(&it).map_or(false, |func| func.is_test(sema.db)),
+ None => false,
+ })
+}
diff --git a/crates/ide-db/src/symbol_index.rs b/crates/ide-db/src/symbol_index.rs
index 7774b0834d..92c09089e1 100644
--- a/crates/ide-db/src/symbol_index.rs
+++ b/crates/ide-db/src/symbol_index.rs
@@ -124,7 +124,7 @@ pub trait SymbolsDatabase: HirDatabase + SourceDatabaseExt + Upcast<dyn HirDatab
}
fn library_symbols(db: &dyn SymbolsDatabase, source_root_id: SourceRootId) -> Arc<SymbolIndex> {
- let _p = profile::span("library_symbols");
+ let _p = tracing::span!(tracing::Level::INFO, "library_symbols").entered();
let mut symbol_collector = SymbolCollector::new(db.upcast());
@@ -142,14 +142,14 @@ fn library_symbols(db: &dyn SymbolsDatabase, source_root_id: SourceRootId) -> Ar
}
fn module_symbols(db: &dyn SymbolsDatabase, module: Module) -> Arc<SymbolIndex> {
- let _p = profile::span("module_symbols");
+ let _p = tracing::span!(tracing::Level::INFO, "module_symbols").entered();
let symbols = SymbolCollector::collect_module(db.upcast(), module);
Arc::new(SymbolIndex::new(symbols))
}
pub fn crate_symbols(db: &dyn SymbolsDatabase, krate: Crate) -> Box<[Arc<SymbolIndex>]> {
- let _p = profile::span("crate_symbols");
+ let _p = tracing::span!(tracing::Level::INFO, "crate_symbols").entered();
krate.modules(db.upcast()).into_iter().map(|module| db.module_symbols(module)).collect()
}
@@ -200,7 +200,7 @@ impl<DB> std::ops::Deref for Snap<DB> {
// | VS Code | kbd:[Ctrl+T]
// |===
pub fn world_symbols(db: &RootDatabase, query: Query) -> Vec<FileSymbol> {
- let _p = profile::span("world_symbols").detail(|| query.query.clone());
+ let _p = tracing::span!(tracing::Level::INFO, "world_symbols", query = ?query.query).entered();
let indices: Vec<_> = if query.libs {
db.library_roots()
@@ -320,7 +320,7 @@ impl Query {
indices: &'sym [Arc<SymbolIndex>],
cb: impl FnMut(&'sym FileSymbol),
) {
- let _p = profile::span("symbol_index::Query::search");
+ let _p = tracing::span!(tracing::Level::INFO, "symbol_index::Query::search").entered();
let mut op = fst::map::OpBuilder::new();
match self.mode {
SearchMode::Exact => {
diff --git a/crates/ide-diagnostics/Cargo.toml b/crates/ide-diagnostics/Cargo.toml
index 3ed48457a2..6976804138 100644
--- a/crates/ide-diagnostics/Cargo.toml
+++ b/crates/ide-diagnostics/Cargo.toml
@@ -16,6 +16,7 @@ cov-mark = "2.0.0-pre.1"
either.workspace = true
itertools.workspace = true
serde_json.workspace = true
+tracing.workspace = true
once_cell = "1.17.0"
# local deps
@@ -39,4 +40,4 @@ sourcegen.workspace = true
in-rust-tree = []
[lints]
-workspace = true \ No newline at end of file
+workspace = true
diff --git a/crates/ide-diagnostics/src/handlers/mismatched_arg_count.rs b/crates/ide-diagnostics/src/handlers/mismatched_arg_count.rs
index e75d897372..66ebf59350 100644
--- a/crates/ide-diagnostics/src/handlers/mismatched_arg_count.rs
+++ b/crates/ide-diagnostics/src/handlers/mismatched_arg_count.rs
@@ -6,7 +6,7 @@ use syntax::{
AstNode, AstPtr,
};
-use crate::{adjusted_display_range_new, Diagnostic, DiagnosticCode, DiagnosticsContext};
+use crate::{adjusted_display_range, Diagnostic, DiagnosticCode, DiagnosticsContext};
// Diagnostic: mismatched-tuple-struct-pat-arg-count
//
@@ -50,7 +50,7 @@ fn invalid_args_range(
expected: usize,
found: usize,
) -> FileRange {
- adjusted_display_range_new(ctx, source, &|expr| {
+ adjusted_display_range(ctx, source, &|expr| {
let (text_range, r_paren_token, expected_arg) = match expr {
Either::Left(ast::Expr::CallExpr(call)) => {
let arg_list = call.arg_list()?;
diff --git a/crates/ide-diagnostics/src/handlers/trait_impl_incorrect_safety.rs b/crates/ide-diagnostics/src/handlers/trait_impl_incorrect_safety.rs
index 251a645292..6be2c54e60 100644
--- a/crates/ide-diagnostics/src/handlers/trait_impl_incorrect_safety.rs
+++ b/crates/ide-diagnostics/src/handlers/trait_impl_incorrect_safety.rs
@@ -19,7 +19,7 @@ pub(crate) fn trait_impl_incorrect_safety(
},
adjusted_display_range::<ast::Impl>(
ctx,
- InFile { file_id: d.file_id, value: d.impl_.syntax_node_ptr() },
+ InFile { file_id: d.file_id, value: d.impl_ },
&|impl_| {
if d.should_be_safe {
Some(match (impl_.unsafe_token(), impl_.impl_token()) {
diff --git a/crates/ide-diagnostics/src/handlers/trait_impl_missing_assoc_item.rs b/crates/ide-diagnostics/src/handlers/trait_impl_missing_assoc_item.rs
index 56188cddf0..58d1b7f31d 100644
--- a/crates/ide-diagnostics/src/handlers/trait_impl_missing_assoc_item.rs
+++ b/crates/ide-diagnostics/src/handlers/trait_impl_missing_assoc_item.rs
@@ -25,7 +25,7 @@ pub(crate) fn trait_impl_missing_assoc_item(
format!("not all trait items implemented, missing: {missing}"),
adjusted_display_range::<ast::Impl>(
ctx,
- InFile { file_id: d.file_id, value: d.impl_.syntax_node_ptr() },
+ InFile { file_id: d.file_id, value: d.impl_ },
&|impl_| impl_.trait_().map(|t| t.syntax().text_range()),
),
)
diff --git a/crates/ide-diagnostics/src/handlers/type_mismatch.rs b/crates/ide-diagnostics/src/handlers/type_mismatch.rs
index 23042e222b..750189beec 100644
--- a/crates/ide-diagnostics/src/handlers/type_mismatch.rs
+++ b/crates/ide-diagnostics/src/handlers/type_mismatch.rs
@@ -1,3 +1,4 @@
+use either::Either;
use hir::{db::ExpandDatabase, ClosureStyle, HirDisplay, HirFileIdExt, InFile, Type};
use ide_db::{famous_defs::FamousDefs, source_change::SourceChange};
use syntax::{
@@ -13,33 +14,24 @@ use crate::{adjusted_display_range, fix, Assist, Diagnostic, DiagnosticCode, Dia
// This diagnostic is triggered when the type of an expression or pattern does not match
// the expected type.
pub(crate) fn type_mismatch(ctx: &DiagnosticsContext<'_>, d: &hir::TypeMismatch) -> Diagnostic {
- let display_range = match &d.expr_or_pat.value {
- expr if ast::Expr::can_cast(expr.kind()) => adjusted_display_range::<ast::Expr>(
- ctx,
- InFile { file_id: d.expr_or_pat.file_id, value: expr.syntax_node_ptr() },
- &|expr| {
- let salient_token_range = match expr {
- ast::Expr::IfExpr(it) => it.if_token()?.text_range(),
- ast::Expr::LoopExpr(it) => it.loop_token()?.text_range(),
- ast::Expr::ForExpr(it) => it.for_token()?.text_range(),
- ast::Expr::WhileExpr(it) => it.while_token()?.text_range(),
- ast::Expr::BlockExpr(it) => it.stmt_list()?.r_curly_token()?.text_range(),
- ast::Expr::MatchExpr(it) => it.match_token()?.text_range(),
- ast::Expr::MethodCallExpr(it) => it.name_ref()?.ident_token()?.text_range(),
- ast::Expr::FieldExpr(it) => it.name_ref()?.ident_token()?.text_range(),
- ast::Expr::AwaitExpr(it) => it.await_token()?.text_range(),
- _ => return None,
- };
-
- cov_mark::hit!(type_mismatch_range_adjustment);
- Some(salient_token_range)
- },
- ),
- pat => ctx.sema.diagnostics_display_range(InFile {
- file_id: d.expr_or_pat.file_id,
- value: pat.syntax_node_ptr(),
- }),
- };
+ let display_range = adjusted_display_range(ctx, d.expr_or_pat, &|node| {
+ let Either::Left(expr) = node else { return None };
+ let salient_token_range = match expr {
+ ast::Expr::IfExpr(it) => it.if_token()?.text_range(),
+ ast::Expr::LoopExpr(it) => it.loop_token()?.text_range(),
+ ast::Expr::ForExpr(it) => it.for_token()?.text_range(),
+ ast::Expr::WhileExpr(it) => it.while_token()?.text_range(),
+ ast::Expr::BlockExpr(it) => it.stmt_list()?.r_curly_token()?.text_range(),
+ ast::Expr::MatchExpr(it) => it.match_token()?.text_range(),
+ ast::Expr::MethodCallExpr(it) => it.name_ref()?.ident_token()?.text_range(),
+ ast::Expr::FieldExpr(it) => it.name_ref()?.ident_token()?.text_range(),
+ ast::Expr::AwaitExpr(it) => it.await_token()?.text_range(),
+ _ => return None,
+ };
+
+ cov_mark::hit!(type_mismatch_range_adjustment);
+ Some(salient_token_range)
+ });
let mut diag = Diagnostic::new(
DiagnosticCode::RustcHardError("E0308"),
format!(
diff --git a/crates/ide-diagnostics/src/handlers/unresolved_field.rs b/crates/ide-diagnostics/src/handlers/unresolved_field.rs
index 3214594121..0e7a5720d4 100644
--- a/crates/ide-diagnostics/src/handlers/unresolved_field.rs
+++ b/crates/ide-diagnostics/src/handlers/unresolved_field.rs
@@ -8,7 +8,7 @@ use ide_db::{
use syntax::{ast, AstNode, AstPtr};
use text_edit::TextEdit;
-use crate::{adjusted_display_range_new, Diagnostic, DiagnosticCode, DiagnosticsContext};
+use crate::{adjusted_display_range, Diagnostic, DiagnosticCode, DiagnosticsContext};
// Diagnostic: unresolved-field
//
@@ -29,7 +29,7 @@ pub(crate) fn unresolved_field(
d.name.display(ctx.sema.db),
d.receiver.display(ctx.sema.db)
),
- adjusted_display_range_new(ctx, d.expr, &|expr| {
+ adjusted_display_range(ctx, d.expr, &|expr| {
Some(
match expr {
ast::Expr::MethodCallExpr(it) => it.name_ref(),
diff --git a/crates/ide-diagnostics/src/handlers/unresolved_method.rs b/crates/ide-diagnostics/src/handlers/unresolved_method.rs
index 41fb672908..9f8fee67f3 100644
--- a/crates/ide-diagnostics/src/handlers/unresolved_method.rs
+++ b/crates/ide-diagnostics/src/handlers/unresolved_method.rs
@@ -11,7 +11,7 @@ use syntax::{
};
use text_edit::TextEdit;
-use crate::{adjusted_display_range_new, Diagnostic, DiagnosticCode, DiagnosticsContext};
+use crate::{adjusted_display_range, Diagnostic, DiagnosticCode, DiagnosticsContext};
// Diagnostic: unresolved-method
//
@@ -34,7 +34,7 @@ pub(crate) fn unresolved_method(
d.name.display(ctx.sema.db),
d.receiver.display(ctx.sema.db)
),
- adjusted_display_range_new(ctx, d.expr, &|expr| {
+ adjusted_display_range(ctx, d.expr, &|expr| {
Some(
match expr {
ast::Expr::MethodCallExpr(it) => it.name_ref(),
diff --git a/crates/ide-diagnostics/src/lib.rs b/crates/ide-diagnostics/src/lib.rs
index f35fc5b533..5ad7069e31 100644
--- a/crates/ide-diagnostics/src/lib.rs
+++ b/crates/ide-diagnostics/src/lib.rs
@@ -89,7 +89,6 @@ use ide_db::{
use once_cell::sync::Lazy;
use stdx::never;
use syntax::{
- algo::find_node_at_range,
ast::{self, AstNode},
AstPtr, SyntaxNode, SyntaxNodePtr, TextRange,
};
@@ -293,7 +292,7 @@ pub fn diagnostics(
resolve: &AssistResolveStrategy,
file_id: FileId,
) -> Vec<Diagnostic> {
- let _p = profile::span("diagnostics");
+ let _p = tracing::span!(tracing::Level::INFO, "diagnostics").entered();
let sema = Semantics::new(db);
let parse = db.parse(file_id);
let mut res = Vec::new();
@@ -572,24 +571,6 @@ fn unresolved_fix(id: &'static str, label: &str, target: TextRange) -> Assist {
fn adjusted_display_range<N: AstNode>(
ctx: &DiagnosticsContext<'_>,
- diag_ptr: InFile<SyntaxNodePtr>,
- adj: &dyn Fn(N) -> Option<TextRange>,
-) -> FileRange {
- let FileRange { file_id, range } = ctx.sema.diagnostics_display_range(diag_ptr);
-
- let source_file = ctx.sema.db.parse(file_id);
- FileRange {
- file_id,
- range: find_node_at_range::<N>(&source_file.syntax_node(), range)
- .filter(|it| it.syntax().text_range() == range)
- .and_then(adj)
- .unwrap_or(range),
- }
-}
-
-// FIXME Replace the one above with this one?
-fn adjusted_display_range_new<N: AstNode>(
- ctx: &DiagnosticsContext<'_>,
diag_ptr: InFile<AstPtr<N>>,
adj: &dyn Fn(N) -> Option<TextRange>,
) -> FileRange {
diff --git a/crates/ide-diagnostics/src/tests.rs b/crates/ide-diagnostics/src/tests.rs
index 742db32564..f394a491b5 100644
--- a/crates/ide-diagnostics/src/tests.rs
+++ b/crates/ide-diagnostics/src/tests.rs
@@ -173,7 +173,7 @@ fn minicore_smoke_test() {
fn check(minicore: MiniCore) {
let source = minicore.source_code();
let mut config = DiagnosticsConfig::test_sample();
- // This should be ignored since we conditionaly remove code which creates single item use with braces
+ // This should be ignored since we conditionally remove code which creates single item use with braces
config.disabled.insert("unused_braces".to_string());
check_diagnostics_with_config(config, &source);
}
diff --git a/crates/ide/src/goto_definition.rs b/crates/ide/src/goto_definition.rs
index d64295bdd6..4fed1f9158 100644
--- a/crates/ide/src/goto_definition.rs
+++ b/crates/ide/src/goto_definition.rs
@@ -226,6 +226,7 @@ mod tests {
.map(|(FileRange { file_id, range }, _)| FileRange { file_id, range })
.sorted_by_key(cmp)
.collect::<Vec<_>>();
+
assert_eq!(expected, navs);
}
@@ -237,6 +238,60 @@ mod tests {
}
#[test]
+ fn goto_def_in_included_file() {
+ check(
+ r#"
+//- minicore:include
+//- /main.rs
+
+include!("a.rs");
+
+fn main() {
+ foo();
+}
+
+//- /a.rs
+fn func_in_include() {
+ //^^^^^^^^^^^^^^^
+}
+
+fn foo() {
+ func_in_include$0();
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn goto_def_in_included_file_nested() {
+ check(
+ r#"
+//- minicore:include
+//- /main.rs
+
+macro_rules! passthrough {
+ ($($tt:tt)*) => { $($tt)* }
+}
+
+passthrough!(include!("a.rs"));
+
+fn main() {
+ foo();
+}
+
+//- /a.rs
+fn func_in_include() {
+ //^^^^^^^^^^^^^^^
+}
+
+fn foo() {
+ func_in_include$0();
+}
+"#,
+ );
+ }
+
+ #[test]
fn goto_def_if_items_same_name() {
check(
r#"
diff --git a/crates/ide/src/highlight_related.rs b/crates/ide/src/highlight_related.rs
index c3a403b107..979ca4575d 100644
--- a/crates/ide/src/highlight_related.rs
+++ b/crates/ide/src/highlight_related.rs
@@ -55,7 +55,7 @@ pub(crate) fn highlight_related(
config: HighlightRelatedConfig,
pos @ FilePosition { offset, file_id }: FilePosition,
) -> Option<Vec<HighlightedRange>> {
- let _p = profile::span("highlight_related");
+ let _p = tracing::span!(tracing::Level::INFO, "highlight_related").entered();
let syntax = sema.parse(file_id).syntax().clone();
let token = pick_best_token(syntax.token_at_offset(offset), |kind| match kind {
@@ -519,6 +519,7 @@ mod tests {
ReferenceCategory::Read => "read",
ReferenceCategory::Write => "write",
ReferenceCategory::Import => "import",
+ ReferenceCategory::Test => "test",
}
.to_string()
}),
diff --git a/crates/ide/src/hover.rs b/crates/ide/src/hover.rs
index 77a06a97e2..19b181ae3b 100644
--- a/crates/ide/src/hover.rs
+++ b/crates/ide/src/hover.rs
@@ -120,6 +120,7 @@ pub(crate) fn hover(
Some(res)
}
+#[allow(clippy::field_reassign_with_default)]
fn hover_simple(
sema: &Semantics<'_, RootDatabase>,
FilePosition { file_id, offset }: FilePosition,
diff --git a/crates/ide/src/hover/tests.rs b/crates/ide/src/hover/tests.rs
index 348308d710..9f4427090e 100644
--- a/crates/ide/src/hover/tests.rs
+++ b/crates/ide/src/hover/tests.rs
@@ -7196,8 +7196,8 @@ impl Iterator for S {
file_id: FileId(
1,
),
- full_range: 6012..6220,
- focus_range: 6077..6083,
+ full_range: 6156..6364,
+ focus_range: 6221..6227,
name: "Future",
kind: Trait,
container_name: "future",
@@ -7210,8 +7210,8 @@ impl Iterator for S {
file_id: FileId(
1,
),
- full_range: 6850..7316,
- focus_range: 6894..6902,
+ full_range: 6994..7460,
+ focus_range: 7038..7046,
name: "Iterator",
kind: Trait,
container_name: "iterator",
diff --git a/crates/ide/src/inlay_hints.rs b/crates/ide/src/inlay_hints.rs
index 46e5901852..8311e770b4 100644
--- a/crates/ide/src/inlay_hints.rs
+++ b/crates/ide/src/inlay_hints.rs
@@ -454,7 +454,7 @@ pub(crate) fn inlay_hints(
range_limit: Option<RangeLimit>,
config: &InlayHintsConfig,
) -> Vec<InlayHint> {
- let _p = profile::span("inlay_hints");
+ let _p = tracing::span!(tracing::Level::INFO, "inlay_hints").entered();
let sema = Semantics::new(db);
let file = sema.parse(file_id);
let file = file.syntax();
diff --git a/crates/ide/src/inlay_hints/implicit_drop.rs b/crates/ide/src/inlay_hints/implicit_drop.rs
index 5a206643ac..3104b85768 100644
--- a/crates/ide/src/inlay_hints/implicit_drop.rs
+++ b/crates/ide/src/inlay_hints/implicit_drop.rs
@@ -177,7 +177,7 @@ mod tests {
#[test]
fn try_operator() {
- // We currently show drop inlay hint for every `?` operator that may potentialy drop something. We probably need to
+ // We currently show drop inlay hint for every `?` operator that may potentially drop something. We probably need to
// make it configurable as it doesn't seem very useful.
check_with_config(
ONLY_DROP_CONFIG,
diff --git a/crates/ide/src/prime_caches.rs b/crates/ide/src/prime_caches.rs
index d704d12a05..a95d1771ce 100644
--- a/crates/ide/src/prime_caches.rs
+++ b/crates/ide/src/prime_caches.rs
@@ -33,7 +33,7 @@ pub(crate) fn parallel_prime_caches(
num_worker_threads: u8,
cb: &(dyn Fn(ParallelPrimeCachesProgress) + Sync),
) {
- let _p = profile::span("prime_caches");
+ let _p = tracing::span!(tracing::Level::INFO, "prime_caches").entered();
let graph = db.crate_graph();
let mut crates_to_prime = {
diff --git a/crates/ide/src/references.rs b/crates/ide/src/references.rs
index 78fe84f70d..bdda25a111 100644
--- a/crates/ide/src/references.rs
+++ b/crates/ide/src/references.rs
@@ -57,7 +57,7 @@ pub(crate) fn find_all_refs(
position: FilePosition,
search_scope: Option<SearchScope>,
) -> Option<Vec<ReferenceSearchResult>> {
- let _p = profile::span("find_all_refs");
+ let _p = tracing::span!(tracing::Level::INFO, "find_all_refs").entered();
let syntax = sema.parse(position.file_id).syntax().clone();
let make_searcher = |literal_search: bool| {
move |def: Definition| {
@@ -308,6 +308,51 @@ mod tests {
use crate::{fixture, SearchScope};
#[test]
+ fn exclude_tests() {
+ check(
+ r#"
+fn test_func() {}
+
+fn func() {
+ test_func$0();
+}
+
+#[test]
+fn test() {
+ test_func();
+}
+"#,
+ expect![[r#"
+ test_func Function FileId(0) 0..17 3..12
+
+ FileId(0) 35..44
+ FileId(0) 75..84 Test
+ "#]],
+ );
+
+ check(
+ r#"
+fn test_func() {}
+
+fn func() {
+ test_func$0();
+}
+
+#[::core::prelude::v1::test]
+fn test() {
+ test_func();
+}
+"#,
+ expect![[r#"
+ test_func Function FileId(0) 0..17 3..12
+
+ FileId(0) 35..44
+ FileId(0) 96..105 Test
+ "#]],
+ );
+ }
+
+ #[test]
fn test_struct_literal_after_space() {
check(
r#"
@@ -454,6 +499,7 @@ fn main() {
"#]],
);
}
+
#[test]
fn test_variant_tuple_before_paren() {
check(
@@ -1435,7 +1481,7 @@ fn test$0() {
expect![[r#"
test Function FileId(0) 0..33 11..15
- FileId(0) 24..28
+ FileId(0) 24..28 Test
"#]],
);
}
diff --git a/crates/ide/src/syntax_highlighting.rs b/crates/ide/src/syntax_highlighting.rs
index 8c6f5e2e9c..dfcbaf54d4 100644
--- a/crates/ide/src/syntax_highlighting.rs
+++ b/crates/ide/src/syntax_highlighting.rs
@@ -186,7 +186,7 @@ pub(crate) fn highlight(
file_id: FileId,
range_to_highlight: Option<TextRange>,
) -> Vec<HlRange> {
- let _p = profile::span("highlight");
+ let _p = tracing::span!(tracing::Level::INFO, "highlight").entered();
let sema = Semantics::new(db);
// Determine the root based on the given range.
diff --git a/crates/mbe/src/expander/transcriber.rs b/crates/mbe/src/expander/transcriber.rs
index 5b7a25408a..800bc994ad 100644
--- a/crates/mbe/src/expander/transcriber.rs
+++ b/crates/mbe/src/expander/transcriber.rs
@@ -292,7 +292,7 @@ fn expand_subtree<S: Span>(
Err(e) => {
// XXX: It *might* make sense to emit a dummy integer value like `0` here.
// That would type inference a bit more robust in cases like
- // `v[${count(t)}]` where index doesn't matter, but also coult also lead to
+ // `v[${count(t)}]` where index doesn't matter, but also could lead to
// wrong infefrence for cases like `tup.${count(t)}` where index itself
// does matter.
if err.is_none() {
diff --git a/crates/proc-macro-api/src/lib.rs b/crates/proc-macro-api/src/lib.rs
index 208051113a..379d184dd6 100644
--- a/crates/proc-macro-api/src/lib.rs
+++ b/crates/proc-macro-api/src/lib.rs
@@ -113,7 +113,7 @@ impl ProcMacroServer {
}
pub fn load_dylib(&self, dylib: MacroDylib) -> Result<Vec<ProcMacro>, ServerError> {
- let _p = profile::span("ProcMacroClient::load_dylib");
+ let _p = tracing::span!(tracing::Level::INFO, "ProcMacroClient::load_dylib").entered();
let macros =
self.process.lock().unwrap_or_else(|e| e.into_inner()).find_proc_macros(&dylib.path)?;
@@ -184,7 +184,7 @@ impl ProcMacro {
.process
.lock()
.unwrap_or_else(|e| e.into_inner())
- .send_task(msg::Request::ExpandMacro(task))?;
+ .send_task(msg::Request::ExpandMacro(Box::new(task)))?;
match response {
msg::Response::ExpandMacro(it) => {
diff --git a/crates/proc-macro-api/src/msg.rs b/crates/proc-macro-api/src/msg.rs
index 557ddba5c7..e28fe387b8 100644
--- a/crates/proc-macro-api/src/msg.rs
+++ b/crates/proc-macro-api/src/msg.rs
@@ -29,7 +29,7 @@ pub enum Request {
/// Since [`NO_VERSION_CHECK_VERSION`]
ListMacros { dylib_path: PathBuf },
/// Since [`NO_VERSION_CHECK_VERSION`]
- ExpandMacro(ExpandMacro),
+ ExpandMacro(Box<ExpandMacro>),
/// Since [`VERSION_CHECK_VERSION`]
ApiVersionCheck {},
/// Since [`RUST_ANALYZER_SPAN_SUPPORT`]
diff --git a/crates/proc-macro-srv-cli/src/main.rs b/crates/proc-macro-srv-cli/src/main.rs
index af9a03826f..a36200cdb4 100644
--- a/crates/proc-macro-srv-cli/src/main.rs
+++ b/crates/proc-macro-srv-cli/src/main.rs
@@ -45,9 +45,11 @@ fn run() -> io::Result<()> {
msg::Response::ListMacros(srv.list_macros(&dylib_path))
}
msg::Request::ExpandMacro(task) => match srv.span_mode() {
- msg::SpanMode::Id => msg::Response::ExpandMacro(srv.expand(task).map(|(it, _)| it)),
+ msg::SpanMode::Id => {
+ msg::Response::ExpandMacro(srv.expand(*task).map(|(it, _)| it))
+ }
msg::SpanMode::RustAnalyzer => msg::Response::ExpandMacroExtended(
- srv.expand(task).map(|(tree, span_data_table)| msg::ExpandMacroExtended {
+ srv.expand(*task).map(|(tree, span_data_table)| msg::ExpandMacroExtended {
tree,
span_data_table,
}),
diff --git a/crates/proc-macro-srv/proc-macro-test/build.rs b/crates/proc-macro-srv/proc-macro-test/build.rs
index c9b605a808..ff62980e4f 100644
--- a/crates/proc-macro-srv/proc-macro-test/build.rs
+++ b/crates/proc-macro-srv/proc-macro-test/build.rs
@@ -109,11 +109,11 @@ fn main() {
let mut artifact_path = None;
for message in Message::parse_stream(output.stdout.as_slice()) {
if let Message::CompilerArtifact(artifact) = message.unwrap() {
- if artifact.target.kind.contains(&"proc-macro".to_string()) {
- if artifact.package_id.repr.starts_with(&repr) || artifact.package_id.repr == pkgid
- {
- artifact_path = Some(PathBuf::from(&artifact.filenames[0]));
- }
+ if artifact.target.kind.contains(&"proc-macro".to_string())
+ && (artifact.package_id.repr.starts_with(&repr)
+ || artifact.package_id.repr == pkgid)
+ {
+ artifact_path = Some(PathBuf::from(&artifact.filenames[0]));
}
}
}
diff --git a/crates/proc-macro-srv/src/server/rust_analyzer_span.rs b/crates/proc-macro-srv/src/server/rust_analyzer_span.rs
index bcf3600d27..b864a5e4fd 100644
--- a/crates/proc-macro-srv/src/server/rust_analyzer_span.rs
+++ b/crates/proc-macro-srv/src/server/rust_analyzer_span.rs
@@ -1,4 +1,4 @@
-//! proc-macro server backend based on rust-analyzer's internal span represention
+//! proc-macro server backend based on rust-analyzer's internal span representation
//! This backend is used solely by rust-analyzer as it ties into rust-analyzer internals.
//!
//! It is an unfortunate result of how the proc-macro API works that we need to look into the
diff --git a/crates/profile/Cargo.toml b/crates/profile/Cargo.toml
index 5350023c88..a87b67f5c6 100644
--- a/crates/profile/Cargo.toml
+++ b/crates/profile/Cargo.toml
@@ -13,6 +13,7 @@ doctest = false
[dependencies]
once_cell = "1.17.0"
+tracing.workspace = true
cfg-if = "1.0.0"
la-arena.workspace = true
libc.workspace = true
@@ -33,4 +34,4 @@ jemalloc = ["jemalloc-ctl"]
# default = [ "cpu_profiler" ]
[lints]
-workspace = true \ No newline at end of file
+workspace = true
diff --git a/crates/profile/src/hprof.rs b/crates/profile/src/hprof.rs
deleted file mode 100644
index ea89a89c5c..0000000000
--- a/crates/profile/src/hprof.rs
+++ /dev/null
@@ -1,326 +0,0 @@
-//! Simple hierarchical profiler
-use std::{
- cell::RefCell,
- collections::{BTreeMap, HashSet},
- env, fmt,
- io::{stderr, Write},
- sync::{
- atomic::{AtomicBool, Ordering},
- RwLock,
- },
- time::{Duration, Instant},
-};
-
-use once_cell::sync::Lazy;
-
-use crate::tree::{Idx, Tree};
-
-/// Filtering syntax
-/// env RA_PROFILE=* // dump everything
-/// env RA_PROFILE=foo|bar|baz // enabled only selected entries
-/// env RA_PROFILE=*@3>10 // dump everything, up to depth 3, if it takes more than 10 ms
-pub fn init() {
- countme::enable(env::var("RA_COUNT").is_ok());
- let spec = env::var("RA_PROFILE").unwrap_or_default();
- init_from(&spec);
-}
-
-pub fn init_from(spec: &str) {
- let filter = if spec.is_empty() { Filter::disabled() } else { Filter::from_spec(spec) };
- filter.install();
-}
-
-type Label = &'static str;
-
-/// This function starts a profiling scope in the current execution stack with a given description.
-/// It returns a `Profile` struct that measures elapsed time between this method invocation and `Profile` struct drop.
-/// It supports nested profiling scopes in case when this function is invoked multiple times at the execution stack.
-/// In this case the profiling information will be nested at the output.
-/// Profiling information is being printed in the stderr.
-///
-/// # Example
-/// ```
-/// profile::init_from("profile1|profile2@2");
-/// profiling_function1();
-///
-/// fn profiling_function1() {
-/// let _p = profile::span("profile1");
-/// profiling_function2();
-/// }
-///
-/// fn profiling_function2() {
-/// let _p = profile::span("profile2");
-/// }
-/// ```
-/// This will print in the stderr the following:
-/// ```text
-/// 0ms - profile
-/// 0ms - profile2
-/// ```
-#[inline]
-pub fn span(label: Label) -> ProfileSpan {
- debug_assert!(!label.is_empty());
-
- let enabled = PROFILING_ENABLED.load(Ordering::Relaxed);
- if enabled && with_profile_stack(|stack| stack.push(label)) {
- ProfileSpan(Some(ProfilerImpl { label, detail: None }))
- } else {
- ProfileSpan(None)
- }
-}
-
-#[inline]
-pub fn heartbeat_span() -> HeartbeatSpan {
- let enabled = PROFILING_ENABLED.load(Ordering::Relaxed);
- HeartbeatSpan::new(enabled)
-}
-
-#[inline]
-pub fn heartbeat() {
- let enabled = PROFILING_ENABLED.load(Ordering::Relaxed);
- if enabled {
- with_profile_stack(|it| it.heartbeat(1));
- }
-}
-
-pub struct ProfileSpan(Option<ProfilerImpl>);
-
-struct ProfilerImpl {
- label: Label,
- detail: Option<String>,
-}
-
-impl ProfileSpan {
- pub fn detail(mut self, detail: impl FnOnce() -> String) -> ProfileSpan {
- if let Some(profiler) = &mut self.0 {
- profiler.detail = Some(detail());
- }
- self
- }
-}
-
-impl Drop for ProfilerImpl {
- #[inline]
- fn drop(&mut self) {
- with_profile_stack(|it| it.pop(self.label, self.detail.take()));
- }
-}
-
-pub struct HeartbeatSpan {
- enabled: bool,
-}
-
-impl HeartbeatSpan {
- #[inline]
- pub fn new(enabled: bool) -> Self {
- if enabled {
- with_profile_stack(|it| it.heartbeats(true));
- }
- Self { enabled }
- }
-}
-
-impl Drop for HeartbeatSpan {
- fn drop(&mut self) {
- if self.enabled {
- with_profile_stack(|it| it.heartbeats(false));
- }
- }
-}
-
-static PROFILING_ENABLED: AtomicBool = AtomicBool::new(false);
-static FILTER: Lazy<RwLock<Filter>> = Lazy::new(Default::default);
-
-fn with_profile_stack<T>(f: impl FnOnce(&mut ProfileStack) -> T) -> T {
- thread_local!(static STACK: RefCell<ProfileStack> = RefCell::new(ProfileStack::new()));
- STACK.with(|it| f(&mut it.borrow_mut()))
-}
-
-#[derive(Default, Clone, Debug)]
-struct Filter {
- depth: usize,
- allowed: HashSet<String>,
- longer_than: Duration,
- heartbeat_longer_than: Duration,
- version: usize,
-}
-
-impl Filter {
- fn disabled() -> Filter {
- Filter::default()
- }
-
- fn from_spec(mut spec: &str) -> Filter {
- let longer_than = if let Some(idx) = spec.rfind('>') {
- let longer_than = spec[idx + 1..].parse().expect("invalid profile longer_than");
- spec = &spec[..idx];
- Duration::from_millis(longer_than)
- } else {
- Duration::new(0, 0)
- };
- let heartbeat_longer_than = longer_than;
-
- let depth = if let Some(idx) = spec.rfind('@') {
- let depth: usize = spec[idx + 1..].parse().expect("invalid profile depth");
- spec = &spec[..idx];
- depth
- } else {
- 999
- };
- let allowed =
- if spec == "*" { HashSet::new() } else { spec.split('|').map(String::from).collect() };
- Filter { depth, allowed, longer_than, heartbeat_longer_than, version: 0 }
- }
-
- fn install(mut self) {
- PROFILING_ENABLED.store(self.depth > 0, Ordering::SeqCst);
- let mut old = FILTER.write().unwrap();
- self.version = old.version + 1;
- *old = self;
- }
-}
-
-struct ProfileStack {
- frames: Vec<Frame>,
- filter: Filter,
- messages: Tree<Message>,
- heartbeats: bool,
-}
-
-struct Frame {
- t: Instant,
- heartbeats: u32,
-}
-
-#[derive(Default)]
-struct Message {
- duration: Duration,
- label: Label,
- detail: Option<String>,
-}
-
-impl ProfileStack {
- fn new() -> ProfileStack {
- ProfileStack {
- frames: Vec::new(),
- messages: Tree::default(),
- filter: Default::default(),
- heartbeats: false,
- }
- }
-
- fn push(&mut self, label: Label) -> bool {
- if self.frames.is_empty() {
- if let Ok(f) = FILTER.try_read() {
- if f.version > self.filter.version {
- self.filter = f.clone();
- }
- };
- }
- if self.frames.len() > self.filter.depth {
- return false;
- }
- let allowed = &self.filter.allowed;
- if self.frames.is_empty() && !allowed.is_empty() && !allowed.contains(label) {
- return false;
- }
-
- self.frames.push(Frame { t: Instant::now(), heartbeats: 0 });
- self.messages.start();
- true
- }
-
- fn pop(&mut self, label: Label, detail: Option<String>) {
- let frame = self.frames.pop().unwrap();
- let duration = frame.t.elapsed();
-
- if self.heartbeats {
- self.heartbeat(frame.heartbeats);
- let avg_span = duration / (frame.heartbeats + 1);
- if avg_span > self.filter.heartbeat_longer_than {
- eprintln!("Too few heartbeats {label} ({}/{duration:?})?", frame.heartbeats);
- }
- }
-
- self.messages.finish(Message { duration, label, detail });
- if self.frames.is_empty() {
- let longer_than = self.filter.longer_than;
- // Convert to millis for comparison to avoid problems with rounding
- // (otherwise we could print `0ms` despite user's `>0` filter when
- // `duration` is just a few nanos).
- if duration.as_millis() > longer_than.as_millis() {
- if let Some(root) = self.messages.root() {
- print(&self.messages, root, 0, longer_than, &mut stderr().lock());
- }
- }
- self.messages.clear();
- }
- }
-
- fn heartbeats(&mut self, yes: bool) {
- self.heartbeats = yes;
- }
- fn heartbeat(&mut self, n: u32) {
- if let Some(frame) = self.frames.last_mut() {
- frame.heartbeats += n;
- }
- }
-}
-
-fn print(
- tree: &Tree<Message>,
- curr: Idx<Message>,
- level: u32,
- longer_than: Duration,
- out: &mut impl Write,
-) {
- let current_indent = " ".repeat(level as usize);
- let detail = tree[curr].detail.as_ref().map(|it| format!(" @ {it}")).unwrap_or_default();
- writeln!(
- out,
- "{}{} - {}{}",
- current_indent,
- ms(tree[curr].duration),
- tree[curr].label,
- detail,
- )
- .expect("printing profiling info");
-
- let mut accounted_for = Duration::default();
- let mut short_children = BTreeMap::new(); // Use `BTreeMap` to get deterministic output.
- for child in tree.children(curr) {
- accounted_for += tree[child].duration;
-
- if tree[child].duration.as_millis() > longer_than.as_millis() {
- print(tree, child, level + 1, longer_than, out);
- } else {
- let (total_duration, cnt) =
- short_children.entry(tree[child].label).or_insert((Duration::default(), 0));
- *total_duration += tree[child].duration;
- *cnt += 1;
- }
- }
-
- for (child_msg, (duration, count)) in &short_children {
- writeln!(out, " {current_indent}{} - {child_msg} ({count} calls)", ms(*duration))
- .expect("printing profiling info");
- }
-
- let unaccounted = tree[curr].duration - accounted_for;
- if tree.children(curr).next().is_some() && unaccounted > longer_than {
- writeln!(out, " {current_indent}{} - ???", ms(unaccounted))
- .expect("printing profiling info");
- }
-}
-
-#[allow(non_camel_case_types)]
-struct ms(Duration);
-
-impl fmt::Display for ms {
- fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
- match self.0.as_millis() {
- 0 => f.write_str(" 0 "),
- n => write!(f, "{n:5}ms"),
- }
- }
-}
diff --git a/crates/profile/src/lib.rs b/crates/profile/src/lib.rs
index d86aa0c414..38c5b3fc9c 100644
--- a/crates/profile/src/lib.rs
+++ b/crates/profile/src/lib.rs
@@ -4,15 +4,12 @@
#[cfg(feature = "cpu_profiler")]
mod google_cpu_profiler;
-mod hprof;
mod memory_usage;
mod stop_watch;
-mod tree;
use std::cell::RefCell;
pub use crate::{
- hprof::{heartbeat, heartbeat_span, init, init_from, span},
memory_usage::{Bytes, MemoryUsage},
stop_watch::{StopWatch, StopWatchSpan},
};
diff --git a/crates/profile/src/tree.rs b/crates/profile/src/tree.rs
deleted file mode 100644
index 1290fba36f..0000000000
--- a/crates/profile/src/tree.rs
+++ /dev/null
@@ -1,84 +0,0 @@
-//! A simple tree implementation which tries to not allocate all over the place.
-use std::ops;
-
-use la_arena::Arena;
-
-#[derive(Default)]
-pub(crate) struct Tree<T> {
- nodes: Arena<Node<T>>,
- current_path: Vec<(Idx<T>, Option<Idx<T>>)>,
-}
-
-pub(crate) type Idx<T> = la_arena::Idx<Node<T>>;
-
-impl<T> Tree<T> {
- pub(crate) fn start(&mut self)
- where
- T: Default,
- {
- let me = self.nodes.alloc(Node::new(T::default()));
- if let Some((parent, last_child)) = self.current_path.last_mut() {
- let slot = match *last_child {
- Some(last_child) => &mut self.nodes[last_child].next_sibling,
- None => &mut self.nodes[*parent].first_child,
- };
- let prev = slot.replace(me);
- assert!(prev.is_none());
- *last_child = Some(me);
- }
-
- self.current_path.push((me, None));
- }
-
- pub(crate) fn finish(&mut self, data: T) {
- let (me, _last_child) = self.current_path.pop().unwrap();
- self.nodes[me].data = data;
- }
-
- pub(crate) fn root(&self) -> Option<Idx<T>> {
- self.nodes.iter().next().map(|(idx, _)| idx)
- }
-
- pub(crate) fn children(&self, idx: Idx<T>) -> impl Iterator<Item = Idx<T>> + '_ {
- NodeIter { nodes: &self.nodes, next: self.nodes[idx].first_child }
- }
- pub(crate) fn clear(&mut self) {
- self.nodes.clear();
- self.current_path.clear();
- }
-}
-
-impl<T> ops::Index<Idx<T>> for Tree<T> {
- type Output = T;
- fn index(&self, index: Idx<T>) -> &T {
- &self.nodes[index].data
- }
-}
-
-pub(crate) struct Node<T> {
- data: T,
- first_child: Option<Idx<T>>,
- next_sibling: Option<Idx<T>>,
-}
-
-impl<T> Node<T> {
- fn new(data: T) -> Node<T> {
- Node { data, first_child: None, next_sibling: None }
- }
-}
-
-struct NodeIter<'a, T> {
- nodes: &'a Arena<Node<T>>,
- next: Option<Idx<T>>,
-}
-
-impl<T> Iterator for NodeIter<'_, T> {
- type Item = Idx<T>;
-
- fn next(&mut self) -> Option<Idx<T>> {
- self.next.map(|next| {
- self.next = self.nodes[next].next_sibling;
- next
- })
- }
-}
diff --git a/crates/project-model/src/rustc_cfg.rs b/crates/project-model/src/rustc_cfg.rs
index c5d55f7d21..cf12d5b71d 100644
--- a/crates/project-model/src/rustc_cfg.rs
+++ b/crates/project-model/src/rustc_cfg.rs
@@ -26,7 +26,7 @@ pub(crate) fn get(
extra_env: &FxHashMap<String, String>,
config: RustcCfgConfig<'_>,
) -> Vec<CfgFlag> {
- let _p = profile::span("rustc_cfg::get");
+ let _p = tracing::span!(tracing::Level::INFO, "rustc_cfg::get").entered();
let mut res = Vec::with_capacity(6 * 2 + 1);
// Some nightly-only cfgs, which are required for stdlib
diff --git a/crates/project-model/src/workspace.rs b/crates/project-model/src/workspace.rs
index 88974e889e..8c5ea0619a 100644
--- a/crates/project-model/src/workspace.rs
+++ b/crates/project-model/src/workspace.rs
@@ -60,7 +60,7 @@ pub enum ProjectWorkspace {
cargo: CargoWorkspace,
build_scripts: WorkspaceBuildScripts,
sysroot: Result<Sysroot, Option<String>>,
- rustc: Result<(CargoWorkspace, WorkspaceBuildScripts), Option<String>>,
+ rustc: Result<Box<(CargoWorkspace, WorkspaceBuildScripts)>, Option<String>>,
/// Holds cfg flags for the current target. We get those by running
/// `rustc --print cfg`.
///
@@ -119,7 +119,7 @@ impl fmt::Debug for ProjectWorkspace {
.field("sysroot", &sysroot.is_ok())
.field(
"n_rustc_compiler_crates",
- &rustc.as_ref().map_or(0, |(rc, _)| rc.packages().len()),
+ &rustc.as_ref().map(|a| a.as_ref()).map_or(0, |(rc, _)| rc.packages().len()),
)
.field("n_rustc_cfg", &rustc_cfg.len())
.field("n_cfg_overrides", &cfg_overrides.len())
@@ -265,7 +265,7 @@ impl ProjectWorkspace {
cargo_toml.parent(),
&config.extra_env,
);
- Ok((workspace, buildscripts))
+ Ok(Box::new((workspace, buildscripts)))
}
Err(e) => {
tracing::error!(
@@ -603,7 +603,7 @@ impl ProjectWorkspace {
PackageRoot { is_local, include, exclude }
})
.chain(mk_sysroot(sysroot.as_ref(), Some(cargo.workspace_root())))
- .chain(rustc.iter().flat_map(|(rustc, _)| {
+ .chain(rustc.iter().map(|a| a.as_ref()).flat_map(|(rustc, _)| {
rustc.packages().map(move |krate| PackageRoot {
is_local: false,
include: vec![rustc[krate].manifest.parent().to_path_buf()],
@@ -631,7 +631,8 @@ impl ProjectWorkspace {
sysroot_package_len + project.n_crates()
}
ProjectWorkspace::Cargo { cargo, sysroot, rustc, .. } => {
- let rustc_package_len = rustc.as_ref().map_or(0, |(it, _)| it.packages().len());
+ let rustc_package_len =
+ rustc.as_ref().map(|a| a.as_ref()).map_or(0, |(it, _)| it.packages().len());
let sysroot_package_len = sysroot.as_ref().map_or(0, |it| it.num_packages());
cargo.packages().len() + sysroot_package_len + rustc_package_len
}
@@ -647,7 +648,7 @@ impl ProjectWorkspace {
load: &mut dyn FnMut(&AbsPath) -> Option<FileId>,
extra_env: &FxHashMap<String, String>,
) -> (CrateGraph, ProcMacroPaths) {
- let _p = profile::span("ProjectWorkspace::to_crate_graph");
+ let _p = tracing::span!(tracing::Level::INFO, "ProjectWorkspace::to_crate_graph").entered();
let (mut crate_graph, proc_macros) = match self {
ProjectWorkspace::Json { project, sysroot, rustc_cfg, toolchain } => {
@@ -672,7 +673,7 @@ impl ProjectWorkspace {
target_layout,
} => cargo_to_crate_graph(
load,
- rustc.as_ref().ok(),
+ rustc.as_ref().map(|a| a.as_ref()).ok(),
cargo,
sysroot.as_ref().ok(),
rustc_cfg.clone(),
@@ -891,7 +892,7 @@ fn cargo_to_crate_graph(
target_layout: TargetLayoutLoadResult,
toolchain: Option<&Version>,
) -> (CrateGraph, ProcMacroPaths) {
- let _p = profile::span("cargo_to_crate_graph");
+ let _p = tracing::span!(tracing::Level::INFO, "cargo_to_crate_graph").entered();
let mut res = (CrateGraph::default(), ProcMacroPaths::default());
let crate_graph = &mut res.0;
let proc_macros = &mut res.1;
@@ -1088,7 +1089,7 @@ fn detached_files_to_crate_graph(
sysroot: Option<&Sysroot>,
target_layout: TargetLayoutLoadResult,
) -> (CrateGraph, ProcMacroPaths) {
- let _p = profile::span("detached_files_to_crate_graph");
+ let _p = tracing::span!(tracing::Level::INFO, "detached_files_to_crate_graph").entered();
let mut crate_graph = CrateGraph::default();
let (public_deps, _libproc_macro) = match sysroot {
Some(sysroot) => sysroot_to_crate_graph(
@@ -1384,7 +1385,7 @@ fn sysroot_to_crate_graph(
load: &mut dyn FnMut(&AbsPath) -> Option<FileId>,
toolchain: Option<&Version>,
) -> (SysrootPublicDeps, Option<CrateId>) {
- let _p = profile::span("sysroot_to_crate_graph");
+ let _p = tracing::span!(tracing::Level::INFO, "sysroot_to_crate_graph").entered();
match sysroot.mode() {
SysrootMode::Workspace(cargo) => {
let (mut cg, mut pm) = cargo_to_crate_graph(
diff --git a/crates/rust-analyzer/Cargo.toml b/crates/rust-analyzer/Cargo.toml
index db5cabaf76..a212041e66 100644
--- a/crates/rust-analyzer/Cargo.toml
+++ b/crates/rust-analyzer/Cargo.toml
@@ -23,7 +23,7 @@ anyhow.workspace = true
crossbeam-channel = "0.5.5"
dissimilar.workspace = true
itertools.workspace = true
-scip = "0.3.1"
+scip = "0.3.3"
lsp-types = { version = "=0.95.0", features = ["proposed"] }
parking_lot = "0.12.1"
xflags = "0.3.0"
@@ -37,11 +37,10 @@ mimalloc = { version = "0.1.30", default-features = false, optional = true }
lsp-server.workspace = true
tracing.workspace = true
tracing-subscriber.workspace = true
-tracing-log = "0.2.0"
tracing-tree.workspace = true
triomphe.workspace = true
nohash-hasher.workspace = true
-always-assert = "0.1.2"
+always-assert = "0.2.0"
walkdir = "2.3.2"
cfg.workspace = true
diff --git a/crates/rust-analyzer/src/bin/logger.rs b/crates/rust-analyzer/src/bin/logger.rs
deleted file mode 100644
index 1f923f6cf8..0000000000
--- a/crates/rust-analyzer/src/bin/logger.rs
+++ /dev/null
@@ -1,137 +0,0 @@
-//! Simple logger that logs either to stderr or to a file, using `tracing_subscriber`
-//! filter syntax and `tracing_appender` for non blocking output.
-
-use std::{
- fmt,
- fs::File,
- io::{self, Stderr},
- sync::Arc,
-};
-
-use anyhow::Context;
-use tracing::{level_filters::LevelFilter, Event, Subscriber};
-use tracing_log::NormalizeEvent;
-use tracing_subscriber::{
- filter::Targets,
- fmt::{
- format::Writer, writer::BoxMakeWriter, FmtContext, FormatEvent, FormatFields,
- FormattedFields, MakeWriter,
- },
- layer::SubscriberExt,
- registry::LookupSpan,
- util::SubscriberInitExt,
- Registry,
-};
-use tracing_tree::HierarchicalLayer;
-
-pub(crate) struct LoggerConfig {
- pub(crate) log_file: Option<File>,
- pub(crate) filter: String,
- pub(crate) chalk_filter: Option<String>,
-}
-
-struct MakeWriterStderr;
-
-impl MakeWriter<'_> for MakeWriterStderr {
- type Writer = Stderr;
-
- fn make_writer(&self) -> Self::Writer {
- io::stderr()
- }
-}
-
-impl LoggerConfig {
- pub(crate) fn init(self) -> anyhow::Result<()> {
- let mut filter: Targets = self
- .filter
- .parse()
- .with_context(|| format!("invalid log filter: `{}`", self.filter))?;
-
- let mut chalk_layer = None;
- if let Some(chalk_filter) = self.chalk_filter {
- let level: LevelFilter =
- chalk_filter.parse().with_context(|| "invalid chalk log filter")?;
- chalk_layer = Some(
- HierarchicalLayer::default()
- .with_indent_lines(true)
- .with_ansi(false)
- .with_indent_amount(2)
- .with_writer(io::stderr),
- );
- filter = filter
- .with_target("chalk_solve", level)
- .with_target("chalk_ir", level)
- .with_target("chalk_recursive", level);
- };
-
- let writer = match self.log_file {
- Some(file) => BoxMakeWriter::new(Arc::new(file)),
- None => BoxMakeWriter::new(io::stderr),
- };
- let ra_fmt_layer =
- tracing_subscriber::fmt::layer().event_format(LoggerFormatter).with_writer(writer);
-
- let registry = Registry::default().with(filter).with(ra_fmt_layer);
- match chalk_layer {
- Some(chalk_layer) => registry.with(chalk_layer).init(),
- None => registry.init(),
- }
- Ok(())
- }
-}
-
-#[derive(Debug)]
-struct LoggerFormatter;
-
-impl<S, N> FormatEvent<S, N> for LoggerFormatter
-where
- S: Subscriber + for<'a> LookupSpan<'a>,
- N: for<'a> FormatFields<'a> + 'static,
-{
- fn format_event(
- &self,
- ctx: &FmtContext<'_, S, N>,
- mut writer: Writer<'_>,
- event: &Event<'_>,
- ) -> fmt::Result {
- // Write level and target
- let level = *event.metadata().level();
-
- // If this event is issued from `log` crate, then the value of target is
- // always "log". `tracing-log` has hard coded it for some reason, so we
- // need to extract it using `normalized_metadata` method which is part of
- // `tracing_log::NormalizeEvent`.
- let target = match event.normalized_metadata() {
- // This event is issued from `log` crate
- Some(log) => log.target(),
- None => event.metadata().target(),
- };
- write!(writer, "[{level} {target}] ")?;
-
- // Write spans and fields of each span
- ctx.visit_spans(|span| {
- write!(writer, "{}", span.name())?;
-
- let ext = span.extensions();
-
- // `FormattedFields` is a formatted representation of the span's
- // fields, which is stored in its extensions by the `fmt` layer's
- // `new_span` method. The fields will have been formatted
- // by the same field formatter that's provided to the event
- // formatter in the `FmtContext`.
- let fields = &ext.get::<FormattedFields<N>>().expect("will never be `None`");
-
- if !fields.is_empty() {
- write!(writer, "{{{fields}}}")?;
- }
- write!(writer, ": ")?;
-
- Ok(())
- })?;
-
- // Write fields on the event
- ctx.field_format().format_fields(writer.by_ref(), event)?;
-
- writeln!(writer)
- }
-}
diff --git a/crates/rust-analyzer/src/bin/main.rs b/crates/rust-analyzer/src/bin/main.rs
index 0438729190..66b680571a 100644
--- a/crates/rust-analyzer/src/bin/main.rs
+++ b/crates/rust-analyzer/src/bin/main.rs
@@ -7,14 +7,14 @@
#[cfg(feature = "in-rust-tree")]
extern crate rustc_driver as _;
-mod logger;
mod rustc_wrapper;
-use std::{env, fs, path::PathBuf, process};
+use std::{env, fs, path::PathBuf, process, sync::Arc};
use anyhow::Context;
use lsp_server::Connection;
use rust_analyzer::{cli::flags, config::Config, from_json};
+use tracing_subscriber::fmt::writer::BoxMakeWriter;
use vfs::AbsPathBuf;
#[cfg(feature = "mimalloc")]
@@ -123,26 +123,21 @@ fn setup_logging(log_file_flag: Option<PathBuf>) -> anyhow::Result<()> {
None => None,
};
- logger::LoggerConfig {
- log_file,
+ let writer = match log_file {
+ Some(file) => BoxMakeWriter::new(Arc::new(file)),
+ None => BoxMakeWriter::new(std::io::stderr),
+ };
+
+ rust_analyzer::tracing::Config {
+ writer,
// Deliberately enable all `error` logs if the user has not set RA_LOG, as there is usually
// useful information in there for debugging.
filter: env::var("RA_LOG").ok().unwrap_or_else(|| "error".to_string()),
- // The meaning of CHALK_DEBUG I suspected is to tell chalk crates
- // (i.e. chalk-solve, chalk-ir, chalk-recursive) how to filter tracing
- // logs. But now we can only have just one filter, which means we have to
- // merge chalk filter to our main filter (from RA_LOG env).
- //
- // The acceptable syntax of CHALK_DEBUG is `target[span{field=value}]=level`.
- // As the value should only affect chalk crates, we'd better manually
- // specify the target. And for simplicity, CHALK_DEBUG only accept the value
- // that specify level.
chalk_filter: env::var("CHALK_DEBUG").ok(),
+ profile_filter: env::var("RA_PROFILE").ok(),
}
.init()?;
- profile::init();
-
Ok(())
}
diff --git a/crates/rust-analyzer/src/cli/analysis_stats.rs b/crates/rust-analyzer/src/cli/analysis_stats.rs
index f42e14f2e5..31bdd2a0e8 100644
--- a/crates/rust-analyzer/src/cli/analysis_stats.rs
+++ b/crates/rust-analyzer/src/cli/analysis_stats.rs
@@ -58,12 +58,14 @@ impl flags::AnalysisStats {
Rand32::new(seed)
};
- let mut cargo_config = CargoConfig::default();
- cargo_config.sysroot = match self.no_sysroot {
- true => None,
- false => Some(RustLibSource::Discover),
+ let cargo_config = CargoConfig {
+ sysroot: match self.no_sysroot {
+ true => None,
+ false => Some(RustLibSource::Discover),
+ },
+ sysroot_query_metadata: self.query_sysroot_metadata,
+ ..Default::default()
};
- cargo_config.sysroot_query_metadata = self.query_sysroot_metadata;
let no_progress = &|_| ();
let mut db_load_sw = self.stop_watch();
@@ -302,13 +304,13 @@ impl flags::AnalysisStats {
let mut fail = 0;
for &c in consts {
all += 1;
- let Err(e) = c.render_eval(db) else {
+ let Err(error) = c.render_eval(db) else {
continue;
};
if verbosity.is_spammy() {
let full_name =
full_name_of_item(db, c.module(db), c.name(db).unwrap_or(Name::missing()));
- println!("Const eval for {full_name} failed due {e:?}");
+ println!("Const eval for {full_name} failed due {error:?}");
}
fail += 1;
}
diff --git a/crates/rust-analyzer/src/cli/diagnostics.rs b/crates/rust-analyzer/src/cli/diagnostics.rs
index 0182cf5402..6d2e97be20 100644
--- a/crates/rust-analyzer/src/cli/diagnostics.rs
+++ b/crates/rust-analyzer/src/cli/diagnostics.rs
@@ -13,8 +13,8 @@ use crate::cli::flags;
impl flags::Diagnostics {
pub fn run(self) -> anyhow::Result<()> {
- let mut cargo_config = CargoConfig::default();
- cargo_config.sysroot = Some(RustLibSource::Discover);
+ let cargo_config =
+ CargoConfig { sysroot: Some(RustLibSource::Discover), ..Default::default() };
let with_proc_macro_server = if let Some(p) = &self.proc_macro_srv {
let path = vfs::AbsPathBuf::assert(std::env::current_dir()?.join(p));
ProcMacroServerChoice::Explicit(path)
diff --git a/crates/rust-analyzer/src/cli/flags.rs b/crates/rust-analyzer/src/cli/flags.rs
index cc9e2a7ce2..252b1e1a48 100644
--- a/crates/rust-analyzer/src/cli/flags.rs
+++ b/crates/rust-analyzer/src/cli/flags.rs
@@ -71,7 +71,7 @@ xflags::xflags! {
optional --with-deps
/// Don't load sysroot crates (`std`, `core` & friends).
optional --no-sysroot
- /// Run cargo metadata on the sysroot to analyze its third-pary dependencies.
+ /// Run cargo metadata on the sysroot to analyze its third-party dependencies.
/// Requires --no-sysroot to not be set.
optional --query-sysroot-metadata
diff --git a/crates/rust-analyzer/src/cli/lsif.rs b/crates/rust-analyzer/src/cli/lsif.rs
index 2138ecead5..64f965e22a 100644
--- a/crates/rust-analyzer/src/cli/lsif.rs
+++ b/crates/rust-analyzer/src/cli/lsif.rs
@@ -287,8 +287,8 @@ impl flags::Lsif {
pub fn run(self) -> anyhow::Result<()> {
eprintln!("Generating LSIF started...");
let now = Instant::now();
- let mut cargo_config = CargoConfig::default();
- cargo_config.sysroot = Some(RustLibSource::Discover);
+ let cargo_config =
+ CargoConfig { sysroot: Some(RustLibSource::Discover), ..Default::default() };
let no_progress = &|_| ();
let load_cargo_config = LoadCargoConfig {
load_out_dirs_from_check: true,
diff --git a/crates/rust-analyzer/src/cli/parse.rs b/crates/rust-analyzer/src/cli/parse.rs
index 5ef8cdff4c..757f2dd70c 100644
--- a/crates/rust-analyzer/src/cli/parse.rs
+++ b/crates/rust-analyzer/src/cli/parse.rs
@@ -5,7 +5,7 @@ use crate::cli::{flags, read_stdin};
impl flags::Parse {
pub fn run(self) -> anyhow::Result<()> {
- let _p = profile::span("parsing");
+ let _p = tracing::span!(tracing::Level::INFO, "parsing").entered();
let text = read_stdin()?;
let file = SourceFile::parse(&text).tree();
if !self.no_dump {
diff --git a/crates/rust-analyzer/src/cli/run_tests.rs b/crates/rust-analyzer/src/cli/run_tests.rs
index e170419915..d07dcdec25 100644
--- a/crates/rust-analyzer/src/cli/run_tests.rs
+++ b/crates/rust-analyzer/src/cli/run_tests.rs
@@ -13,8 +13,8 @@ use crate::cli::{flags, full_name_of_item, Result};
impl flags::RunTests {
pub fn run(self) -> Result<()> {
- let mut cargo_config = CargoConfig::default();
- cargo_config.sysroot = Some(RustLibSource::Discover);
+ let cargo_config =
+ CargoConfig { sysroot: Some(RustLibSource::Discover), ..Default::default() };
let load_cargo_config = LoadCargoConfig {
load_out_dirs_from_check: true,
with_proc_macro_server: ProcMacroServerChoice::Sysroot,
diff --git a/crates/rust-analyzer/src/cli/rustc_tests.rs b/crates/rust-analyzer/src/cli/rustc_tests.rs
index 522eb53128..be7e434aca 100644
--- a/crates/rust-analyzer/src/cli/rustc_tests.rs
+++ b/crates/rust-analyzer/src/cli/rustc_tests.rs
@@ -59,8 +59,8 @@ impl Tester {
path.push("ra-rustc-test.rs");
let tmp_file = AbsPathBuf::try_from(path).unwrap();
std::fs::write(&tmp_file, "")?;
- let mut cargo_config = CargoConfig::default();
- cargo_config.sysroot = Some(RustLibSource::Discover);
+ let cargo_config =
+ CargoConfig { sysroot: Some(RustLibSource::Discover), ..Default::default() };
let workspace = ProjectWorkspace::DetachedFiles {
files: vec![tmp_file.clone()],
sysroot: Ok(Sysroot::discover(
diff --git a/crates/rust-analyzer/src/cli/scip.rs b/crates/rust-analyzer/src/cli/scip.rs
index c9cf40db3a..81622a4617 100644
--- a/crates/rust-analyzer/src/cli/scip.rs
+++ b/crates/rust-analyzer/src/cli/scip.rs
@@ -143,11 +143,14 @@ impl flags::Scip {
.map(|hover| hover.markup.as_str())
.filter(|it| !it.is_empty())
.map(|it| vec![it.to_owned()]);
+ let position_encoding =
+ scip_types::PositionEncoding::UTF8CodeUnitOffsetFromLineStart.into();
let signature_documentation =
token.signature.clone().map(|text| scip_types::Document {
relative_path: relative_path.clone(),
language: "rust".to_string(),
text,
+ position_encoding,
..Default::default()
});
let symbol_info = scip_types::SymbolInformation {
@@ -181,13 +184,16 @@ impl flags::Scip {
continue;
}
+ let position_encoding =
+ scip_types::PositionEncoding::UTF8CodeUnitOffsetFromLineStart.into();
documents.push(scip_types::Document {
relative_path,
language: "rust".to_string(),
occurrences,
symbols,
- special_fields: Default::default(),
text: String::new(),
+ position_encoding,
+ special_fields: Default::default(),
});
}
diff --git a/crates/rust-analyzer/src/cli/ssr.rs b/crates/rust-analyzer/src/cli/ssr.rs
index f87dcb889a..8f11d82f8f 100644
--- a/crates/rust-analyzer/src/cli/ssr.rs
+++ b/crates/rust-analyzer/src/cli/ssr.rs
@@ -10,8 +10,8 @@ use crate::cli::flags;
impl flags::Ssr {
pub fn run(self) -> anyhow::Result<()> {
use ide_db::base_db::SourceDatabaseExt;
- let mut cargo_config = CargoConfig::default();
- cargo_config.sysroot = Some(RustLibSource::Discover);
+ let cargo_config =
+ CargoConfig { sysroot: Some(RustLibSource::Discover), ..Default::default() };
let load_cargo_config = LoadCargoConfig {
load_out_dirs_from_check: true,
with_proc_macro_server: ProcMacroServerChoice::Sysroot,
diff --git a/crates/rust-analyzer/src/config.rs b/crates/rust-analyzer/src/config.rs
index 3c1b464c3c..815f6ea12e 100644
--- a/crates/rust-analyzer/src/config.rs
+++ b/crates/rust-analyzer/src/config.rs
@@ -32,6 +32,7 @@ use project_model::{
};
use rustc_hash::{FxHashMap, FxHashSet};
use serde::{de::DeserializeOwned, Deserialize};
+use stdx::format_to_acc;
use vfs::{AbsPath, AbsPathBuf};
use crate::{
@@ -494,6 +495,9 @@ config_data! {
/// Exclude imports from find-all-references.
references_excludeImports: bool = "false",
+ /// Exclude tests from find-all-references.
+ references_excludeTests: bool = "false",
+
/// Allow renaming of items not belonging to the loaded workspaces.
rename_allowExternalItems: bool = "false",
@@ -1545,6 +1549,10 @@ impl Config {
self.data.references_excludeImports
}
+ pub fn find_all_refs_exclude_tests(&self) -> bool {
+ self.data.references_excludeTests
+ }
+
pub fn snippet_cap(&self) -> bool {
self.experimental("snippetTextEdit")
}
@@ -1737,7 +1745,7 @@ impl Config {
}
pub fn main_loop_num_threads(&self) -> usize {
- self.data.numThreads.unwrap_or(num_cpus::get_physical().try_into().unwrap_or(1))
+ self.data.numThreads.unwrap_or(num_cpus::get_physical())
}
pub fn typing_autoclose_angle(&self) -> bool {
@@ -2556,14 +2564,13 @@ fn field_props(field: &str, ty: &str, doc: &[&str], default: &str) -> serde_json
#[cfg(test)]
fn manual(fields: &[(&'static str, &'static str, &[&str], &str)]) -> String {
- fields
- .iter()
- .map(|(field, _ty, doc, default)| {
- let name = format!("rust-analyzer.{}", field.replace('_', "."));
- let doc = doc_comment_to_string(doc);
- if default.contains('\n') {
- format!(
- r#"[[{name}]]{name}::
+ fields.iter().fold(String::new(), |mut acc, (field, _ty, doc, default)| {
+ let name = format!("rust-analyzer.{}", field.replace('_', "."));
+ let doc = doc_comment_to_string(doc);
+ if default.contains('\n') {
+ format_to_acc!(
+ acc,
+ r#"[[{name}]]{name}::
+
--
Default:
@@ -2573,16 +2580,17 @@ Default:
{doc}
--
"#
- )
- } else {
- format!("[[{name}]]{name} (default: `{default}`)::\n+\n--\n{doc}--\n")
- }
- })
- .collect::<String>()
+ )
+ } else {
+ format_to_acc!(acc, "[[{name}]]{name} (default: `{default}`)::\n+\n--\n{doc}--\n")
+ }
+ })
}
fn doc_comment_to_string(doc: &[&str]) -> String {
- doc.iter().map(|it| it.strip_prefix(' ').unwrap_or(it)).map(|it| format!("{it}\n")).collect()
+ doc.iter()
+ .map(|it| it.strip_prefix(' ').unwrap_or(it))
+ .fold(String::new(), |mut acc, it| format_to_acc!(acc, "{it}\n"))
}
#[cfg(test)]
diff --git a/crates/rust-analyzer/src/diagnostics.rs b/crates/rust-analyzer/src/diagnostics.rs
index ab3881f438..c91b22999d 100644
--- a/crates/rust-analyzer/src/diagnostics.rs
+++ b/crates/rust-analyzer/src/diagnostics.rs
@@ -128,7 +128,7 @@ pub(crate) fn fetch_native_diagnostics(
snapshot: GlobalStateSnapshot,
subscriptions: Vec<FileId>,
) -> Vec<(FileId, Vec<lsp_types::Diagnostic>)> {
- let _p = profile::span("fetch_native_diagnostics");
+ let _p = tracing::span!(tracing::Level::INFO, "fetch_native_diagnostics").entered();
let _ctx = stdx::panic_context::enter("fetch_native_diagnostics".to_owned());
let convert_diagnostic =
diff --git a/crates/rust-analyzer/src/dispatch.rs b/crates/rust-analyzer/src/dispatch.rs
index 7da4311888..fa856a796a 100644
--- a/crates/rust-analyzer/src/dispatch.rs
+++ b/crates/rust-analyzer/src/dispatch.rs
@@ -1,5 +1,8 @@
//! See [RequestDispatcher].
-use std::{fmt, panic, thread};
+use std::{
+ fmt::{self, Debug},
+ panic, thread,
+};
use ide::Cancelled;
use lsp_server::ExtractError;
@@ -49,6 +52,8 @@ impl RequestDispatcher<'_> {
Some(it) => it,
None => return self,
};
+ let _guard = tracing::span!(tracing::Level::INFO, "request", method = ?req.method, "request_id" = ?req.id).entered();
+ tracing::debug!(?params);
let result = {
let _pctx = stdx::panic_context::enter(panic_context);
f(self.global_state, params)
@@ -74,6 +79,8 @@ impl RequestDispatcher<'_> {
Some(it) => it,
None => return self,
};
+ let _guard = tracing::span!(tracing::Level::INFO, "request", method = ?req.method, "request_id" = ?req.id).entered();
+ tracing::debug!(?params);
let global_state_snapshot = self.global_state.snapshot();
let result = panic::catch_unwind(move || {
@@ -192,6 +199,8 @@ impl RequestDispatcher<'_> {
Some(it) => it,
None => return self,
};
+ let _guard = tracing::span!(tracing::Level::INFO, "request", method = ?req.method, "request_id" = ?req.id).entered();
+ tracing::debug!(?params);
let world = self.global_state.snapshot();
if MAIN_POOL {
@@ -313,12 +322,16 @@ impl NotificationDispatcher<'_> {
) -> anyhow::Result<&mut Self>
where
N: lsp_types::notification::Notification,
- N::Params: DeserializeOwned + Send,
+ N::Params: DeserializeOwned + Send + Debug,
{
let not = match self.not.take() {
Some(it) => it,
None => return Ok(self),
};
+
+ let _guard =
+ tracing::span!(tracing::Level::INFO, "notification", method = ?not.method).entered();
+
let params = match not.extract::<N::Params>(N::METHOD) {
Ok(it) => it,
Err(ExtractError::JsonError { method, error }) => {
@@ -329,6 +342,9 @@ impl NotificationDispatcher<'_> {
return Ok(self);
}
};
+
+ tracing::debug!(?params);
+
let _pctx = stdx::panic_context::enter(format!(
"\nversion: {}\nnotification: {}",
version(),
diff --git a/crates/rust-analyzer/src/global_state.rs b/crates/rust-analyzer/src/global_state.rs
index 232c03ae6c..2f226d0115 100644
--- a/crates/rust-analyzer/src/global_state.rs
+++ b/crates/rust-analyzer/src/global_state.rs
@@ -215,7 +215,7 @@ impl GlobalState {
}
pub(crate) fn process_changes(&mut self) -> bool {
- let _p = profile::span("GlobalState::process_changes");
+ let _p = tracing::span!(tracing::Level::INFO, "GlobalState::process_changes").entered();
let mut file_changes = FxHashMap::<_, (bool, ChangedFile)>::default();
let (change, modified_rust_files, workspace_structure_change) = {
diff --git a/crates/rust-analyzer/src/handlers/notification.rs b/crates/rust-analyzer/src/handlers/notification.rs
index c556fdee50..1f24e95010 100644
--- a/crates/rust-analyzer/src/handlers/notification.rs
+++ b/crates/rust-analyzer/src/handlers/notification.rs
@@ -54,7 +54,7 @@ pub(crate) fn handle_did_open_text_document(
state: &mut GlobalState,
params: DidOpenTextDocumentParams,
) -> anyhow::Result<()> {
- let _p = profile::span("handle_did_open_text_document");
+ let _p = tracing::span!(tracing::Level::INFO, "handle_did_open_text_document").entered();
if let Ok(path) = from_proto::vfs_path(&params.text_document.uri) {
let already_exists = state
@@ -79,7 +79,7 @@ pub(crate) fn handle_did_change_text_document(
state: &mut GlobalState,
params: DidChangeTextDocumentParams,
) -> anyhow::Result<()> {
- let _p = profile::span("handle_did_change_text_document");
+ let _p = tracing::span!(tracing::Level::INFO, "handle_did_change_text_document").entered();
if let Ok(path) = from_proto::vfs_path(&params.text_document.uri) {
let data = match state.mem_docs.get_mut(&path) {
@@ -113,7 +113,7 @@ pub(crate) fn handle_did_close_text_document(
state: &mut GlobalState,
params: DidCloseTextDocumentParams,
) -> anyhow::Result<()> {
- let _p = profile::span("handle_did_close_text_document");
+ let _p = tracing::span!(tracing::Level::INFO, "handle_did_close_text_document").entered();
if let Ok(path) = from_proto::vfs_path(&params.text_document.uri) {
if state.mem_docs.remove(&path).is_err() {
@@ -247,7 +247,7 @@ pub(crate) fn handle_did_change_watched_files(
}
fn run_flycheck(state: &mut GlobalState, vfs_path: VfsPath) -> bool {
- let _p = profile::span("run_flycheck");
+ let _p = tracing::span!(tracing::Level::INFO, "run_flycheck").entered();
let file_id = state.vfs.read().0.file_id(&vfs_path);
if let Some(file_id) = file_id {
@@ -326,13 +326,13 @@ fn run_flycheck(state: &mut GlobalState, vfs_path: VfsPath) -> bool {
}
pub(crate) fn handle_cancel_flycheck(state: &mut GlobalState, _: ()) -> anyhow::Result<()> {
- let _p = profile::span("handle_stop_flycheck");
+ let _p = tracing::span!(tracing::Level::INFO, "handle_stop_flycheck").entered();
state.flycheck.iter().for_each(|flycheck| flycheck.cancel());
Ok(())
}
pub(crate) fn handle_clear_flycheck(state: &mut GlobalState, _: ()) -> anyhow::Result<()> {
- let _p = profile::span("handle_clear_flycheck");
+ let _p = tracing::span!(tracing::Level::INFO, "handle_clear_flycheck").entered();
state.diagnostics.clear_check_all();
Ok(())
}
@@ -341,7 +341,7 @@ pub(crate) fn handle_run_flycheck(
state: &mut GlobalState,
params: RunFlycheckParams,
) -> anyhow::Result<()> {
- let _p = profile::span("handle_run_flycheck");
+ let _p = tracing::span!(tracing::Level::INFO, "handle_run_flycheck").entered();
if let Some(text_document) = params.text_document {
if let Ok(vfs_path) = from_proto::vfs_path(&text_document.uri) {
if run_flycheck(state, vfs_path) {
diff --git a/crates/rust-analyzer/src/handlers/request.rs b/crates/rust-analyzer/src/handlers/request.rs
index 1a55dcebc1..2be2ba5c44 100644
--- a/crates/rust-analyzer/src/handlers/request.rs
+++ b/crates/rust-analyzer/src/handlers/request.rs
@@ -70,7 +70,7 @@ pub(crate) fn handle_analyzer_status(
snap: GlobalStateSnapshot,
params: lsp_ext::AnalyzerStatusParams,
) -> anyhow::Result<String> {
- let _p = profile::span("handle_analyzer_status");
+ let _p = tracing::span!(tracing::Level::INFO, "handle_analyzer_status").entered();
let mut buf = String::new();
@@ -114,7 +114,7 @@ pub(crate) fn handle_analyzer_status(
}
pub(crate) fn handle_memory_usage(state: &mut GlobalState, _: ()) -> anyhow::Result<String> {
- let _p = profile::span("handle_memory_usage");
+ let _p = tracing::span!(tracing::Level::INFO, "handle_memory_usage").entered();
let mem = state.analysis_host.per_query_memory_usage();
let mut out = String::new();
@@ -135,7 +135,7 @@ pub(crate) fn handle_syntax_tree(
snap: GlobalStateSnapshot,
params: lsp_ext::SyntaxTreeParams,
) -> anyhow::Result<String> {
- let _p = profile::span("handle_syntax_tree");
+ let _p = tracing::span!(tracing::Level::INFO, "handle_syntax_tree").entered();
let id = from_proto::file_id(&snap, &params.text_document.uri)?;
let line_index = snap.file_line_index(id)?;
let text_range = params.range.and_then(|r| from_proto::text_range(&line_index, r).ok());
@@ -147,7 +147,7 @@ pub(crate) fn handle_view_hir(
snap: GlobalStateSnapshot,
params: lsp_types::TextDocumentPositionParams,
) -> anyhow::Result<String> {
- let _p = profile::span("handle_view_hir");
+ let _p = tracing::span!(tracing::Level::INFO, "handle_view_hir").entered();
let position = from_proto::file_position(&snap, params)?;
let res = snap.analysis.view_hir(position)?;
Ok(res)
@@ -157,7 +157,7 @@ pub(crate) fn handle_view_mir(
snap: GlobalStateSnapshot,
params: lsp_types::TextDocumentPositionParams,
) -> anyhow::Result<String> {
- let _p = profile::span("handle_view_mir");
+ let _p = tracing::span!(tracing::Level::INFO, "handle_view_mir").entered();
let position = from_proto::file_position(&snap, params)?;
let res = snap.analysis.view_mir(position)?;
Ok(res)
@@ -167,7 +167,7 @@ pub(crate) fn handle_interpret_function(
snap: GlobalStateSnapshot,
params: lsp_types::TextDocumentPositionParams,
) -> anyhow::Result<String> {
- let _p = profile::span("handle_interpret_function");
+ let _p = tracing::span!(tracing::Level::INFO, "handle_interpret_function").entered();
let position = from_proto::file_position(&snap, params)?;
let res = snap.analysis.interpret_function(position)?;
Ok(res)
@@ -185,7 +185,7 @@ pub(crate) fn handle_view_item_tree(
snap: GlobalStateSnapshot,
params: lsp_ext::ViewItemTreeParams,
) -> anyhow::Result<String> {
- let _p = profile::span("handle_view_item_tree");
+ let _p = tracing::span!(tracing::Level::INFO, "handle_view_item_tree").entered();
let file_id = from_proto::file_id(&snap, &params.text_document.uri)?;
let res = snap.analysis.view_item_tree(file_id)?;
Ok(res)
@@ -195,7 +195,7 @@ pub(crate) fn handle_view_crate_graph(
snap: GlobalStateSnapshot,
params: ViewCrateGraphParams,
) -> anyhow::Result<String> {
- let _p = profile::span("handle_view_crate_graph");
+ let _p = tracing::span!(tracing::Level::INFO, "handle_view_crate_graph").entered();
let dot = snap.analysis.view_crate_graph(params.full)?.map_err(anyhow::Error::msg)?;
Ok(dot)
}
@@ -204,7 +204,7 @@ pub(crate) fn handle_expand_macro(
snap: GlobalStateSnapshot,
params: lsp_ext::ExpandMacroParams,
) -> anyhow::Result<Option<lsp_ext::ExpandedMacro>> {
- let _p = profile::span("handle_expand_macro");
+ let _p = tracing::span!(tracing::Level::INFO, "handle_expand_macro").entered();
let file_id = from_proto::file_id(&snap, &params.text_document.uri)?;
let line_index = snap.file_line_index(file_id)?;
let offset = from_proto::offset(&line_index, params.position)?;
@@ -217,7 +217,7 @@ pub(crate) fn handle_selection_range(
snap: GlobalStateSnapshot,
params: lsp_types::SelectionRangeParams,
) -> anyhow::Result<Option<Vec<lsp_types::SelectionRange>>> {
- let _p = profile::span("handle_selection_range");
+ let _p = tracing::span!(tracing::Level::INFO, "handle_selection_range").entered();
let file_id = from_proto::file_id(&snap, &params.text_document.uri)?;
let line_index = snap.file_line_index(file_id)?;
let res: anyhow::Result<Vec<lsp_types::SelectionRange>> = params
@@ -260,7 +260,7 @@ pub(crate) fn handle_matching_brace(
snap: GlobalStateSnapshot,
params: lsp_ext::MatchingBraceParams,
) -> anyhow::Result<Vec<Position>> {
- let _p = profile::span("handle_matching_brace");
+ let _p = tracing::span!(tracing::Level::INFO, "handle_matching_brace").entered();
let file_id = from_proto::file_id(&snap, &params.text_document.uri)?;
let line_index = snap.file_line_index(file_id)?;
params
@@ -283,7 +283,7 @@ pub(crate) fn handle_join_lines(
snap: GlobalStateSnapshot,
params: lsp_ext::JoinLinesParams,
) -> anyhow::Result<Vec<lsp_types::TextEdit>> {
- let _p = profile::span("handle_join_lines");
+ let _p = tracing::span!(tracing::Level::INFO, "handle_join_lines").entered();
let config = snap.config.join_lines();
let file_id = from_proto::file_id(&snap, &params.text_document.uri)?;
@@ -308,7 +308,7 @@ pub(crate) fn handle_on_enter(
snap: GlobalStateSnapshot,
params: lsp_types::TextDocumentPositionParams,
) -> anyhow::Result<Option<Vec<lsp_ext::SnippetTextEdit>>> {
- let _p = profile::span("handle_on_enter");
+ let _p = tracing::span!(tracing::Level::INFO, "handle_on_enter").entered();
let position = from_proto::file_position(&snap, params)?;
let edit = match snap.analysis.on_enter(position)? {
None => return Ok(None),
@@ -323,7 +323,7 @@ pub(crate) fn handle_on_type_formatting(
snap: GlobalStateSnapshot,
params: lsp_types::DocumentOnTypeFormattingParams,
) -> anyhow::Result<Option<Vec<lsp_ext::SnippetTextEdit>>> {
- let _p = profile::span("handle_on_type_formatting");
+ let _p = tracing::span!(tracing::Level::INFO, "handle_on_type_formatting").entered();
let mut position = from_proto::file_position(&snap, params.text_document_position)?;
let line_index = snap.file_line_index(position.file_id)?;
@@ -364,7 +364,7 @@ pub(crate) fn handle_document_symbol(
snap: GlobalStateSnapshot,
params: lsp_types::DocumentSymbolParams,
) -> anyhow::Result<Option<lsp_types::DocumentSymbolResponse>> {
- let _p = profile::span("handle_document_symbol");
+ let _p = tracing::span!(tracing::Level::INFO, "handle_document_symbol").entered();
let file_id = from_proto::file_id(&snap, &params.text_document.uri)?;
let line_index = snap.file_line_index(file_id)?;
@@ -453,7 +453,7 @@ pub(crate) fn handle_workspace_symbol(
snap: GlobalStateSnapshot,
params: WorkspaceSymbolParams,
) -> anyhow::Result<Option<lsp_types::WorkspaceSymbolResponse>> {
- let _p = profile::span("handle_workspace_symbol");
+ let _p = tracing::span!(tracing::Level::INFO, "handle_workspace_symbol").entered();
let config = snap.config.workspace_symbol();
let (all_symbols, libs) = decide_search_scope_and_kind(&params, &config);
@@ -545,7 +545,7 @@ pub(crate) fn handle_will_rename_files(
snap: GlobalStateSnapshot,
params: lsp_types::RenameFilesParams,
) -> anyhow::Result<Option<lsp_types::WorkspaceEdit>> {
- let _p = profile::span("handle_will_rename_files");
+ let _p = tracing::span!(tracing::Level::INFO, "handle_will_rename_files").entered();
let source_changes: Vec<SourceChange> = params
.files
@@ -607,7 +607,7 @@ pub(crate) fn handle_goto_definition(
snap: GlobalStateSnapshot,
params: lsp_types::GotoDefinitionParams,
) -> anyhow::Result<Option<lsp_types::GotoDefinitionResponse>> {
- let _p = profile::span("handle_goto_definition");
+ let _p = tracing::span!(tracing::Level::INFO, "handle_goto_definition").entered();
let position = from_proto::file_position(&snap, params.text_document_position_params)?;
let nav_info = match snap.analysis.goto_definition(position)? {
None => return Ok(None),
@@ -622,7 +622,7 @@ pub(crate) fn handle_goto_declaration(
snap: GlobalStateSnapshot,
params: lsp_types::request::GotoDeclarationParams,
) -> anyhow::Result<Option<lsp_types::request::GotoDeclarationResponse>> {
- let _p = profile::span("handle_goto_declaration");
+ let _p = tracing::span!(tracing::Level::INFO, "handle_goto_declaration").entered();
let position = from_proto::file_position(&snap, params.text_document_position_params.clone())?;
let nav_info = match snap.analysis.goto_declaration(position)? {
None => return handle_goto_definition(snap, params),
@@ -637,7 +637,7 @@ pub(crate) fn handle_goto_implementation(
snap: GlobalStateSnapshot,
params: lsp_types::request::GotoImplementationParams,
) -> anyhow::Result<Option<lsp_types::request::GotoImplementationResponse>> {
- let _p = profile::span("handle_goto_implementation");
+ let _p = tracing::span!(tracing::Level::INFO, "handle_goto_implementation").entered();
let position = from_proto::file_position(&snap, params.text_document_position_params)?;
let nav_info = match snap.analysis.goto_implementation(position)? {
None => return Ok(None),
@@ -652,7 +652,7 @@ pub(crate) fn handle_goto_type_definition(
snap: GlobalStateSnapshot,
params: lsp_types::request::GotoTypeDefinitionParams,
) -> anyhow::Result<Option<lsp_types::request::GotoTypeDefinitionResponse>> {
- let _p = profile::span("handle_goto_type_definition");
+ let _p = tracing::span!(tracing::Level::INFO, "handle_goto_type_definition").entered();
let position = from_proto::file_position(&snap, params.text_document_position_params)?;
let nav_info = match snap.analysis.goto_type_definition(position)? {
None => return Ok(None),
@@ -667,7 +667,7 @@ pub(crate) fn handle_parent_module(
snap: GlobalStateSnapshot,
params: lsp_types::TextDocumentPositionParams,
) -> anyhow::Result<Option<lsp_types::GotoDefinitionResponse>> {
- let _p = profile::span("handle_parent_module");
+ let _p = tracing::span!(tracing::Level::INFO, "handle_parent_module").entered();
if let Ok(file_path) = &params.text_document.uri.to_file_path() {
if file_path.file_name().unwrap_or_default() == "Cargo.toml" {
// search workspaces for parent packages or fallback to workspace root
@@ -734,7 +734,7 @@ pub(crate) fn handle_runnables(
snap: GlobalStateSnapshot,
params: lsp_ext::RunnablesParams,
) -> anyhow::Result<Vec<lsp_ext::Runnable>> {
- let _p = profile::span("handle_runnables");
+ let _p = tracing::span!(tracing::Level::INFO, "handle_runnables").entered();
let file_id = from_proto::file_id(&snap, &params.text_document.uri)?;
let line_index = snap.file_line_index(file_id)?;
let offset = params.position.and_then(|it| from_proto::offset(&line_index, it).ok());
@@ -829,7 +829,7 @@ pub(crate) fn handle_related_tests(
snap: GlobalStateSnapshot,
params: lsp_types::TextDocumentPositionParams,
) -> anyhow::Result<Vec<lsp_ext::TestInfo>> {
- let _p = profile::span("handle_related_tests");
+ let _p = tracing::span!(tracing::Level::INFO, "handle_related_tests").entered();
let position = from_proto::file_position(&snap, params)?;
let tests = snap.analysis.related_tests(position, None)?;
@@ -847,7 +847,7 @@ pub(crate) fn handle_completion(
snap: GlobalStateSnapshot,
params: lsp_types::CompletionParams,
) -> anyhow::Result<Option<lsp_types::CompletionResponse>> {
- let _p = profile::span("handle_completion");
+ let _p = tracing::span!(tracing::Level::INFO, "handle_completion").entered();
let text_document_position = params.text_document_position.clone();
let position = from_proto::file_position(&snap, params.text_document_position)?;
let completion_trigger_character =
@@ -875,7 +875,7 @@ pub(crate) fn handle_completion_resolve(
snap: GlobalStateSnapshot,
mut original_completion: CompletionItem,
) -> anyhow::Result<CompletionItem> {
- let _p = profile::span("handle_completion_resolve");
+ let _p = tracing::span!(tracing::Level::INFO, "handle_completion_resolve").entered();
if !all_edits_are_disjoint(&original_completion, &[]) {
return Err(invalid_params_error(
@@ -931,7 +931,7 @@ pub(crate) fn handle_folding_range(
snap: GlobalStateSnapshot,
params: FoldingRangeParams,
) -> anyhow::Result<Option<Vec<FoldingRange>>> {
- let _p = profile::span("handle_folding_range");
+ let _p = tracing::span!(tracing::Level::INFO, "handle_folding_range").entered();
let file_id = from_proto::file_id(&snap, &params.text_document.uri)?;
let folds = snap.analysis.folding_ranges(file_id)?;
let text = snap.analysis.file_text(file_id)?;
@@ -948,7 +948,7 @@ pub(crate) fn handle_signature_help(
snap: GlobalStateSnapshot,
params: lsp_types::SignatureHelpParams,
) -> anyhow::Result<Option<lsp_types::SignatureHelp>> {
- let _p = profile::span("handle_signature_help");
+ let _p = tracing::span!(tracing::Level::INFO, "handle_signature_help").entered();
let position = from_proto::file_position(&snap, params.text_document_position_params)?;
let help = match snap.analysis.signature_help(position)? {
Some(it) => it,
@@ -963,7 +963,7 @@ pub(crate) fn handle_hover(
snap: GlobalStateSnapshot,
params: lsp_ext::HoverParams,
) -> anyhow::Result<Option<lsp_ext::Hover>> {
- let _p = profile::span("handle_hover");
+ let _p = tracing::span!(tracing::Level::INFO, "handle_hover").entered();
let range = match params.position {
PositionOrRange::Position(position) => Range::new(position, position),
PositionOrRange::Range(range) => range,
@@ -1000,7 +1000,7 @@ pub(crate) fn handle_prepare_rename(
snap: GlobalStateSnapshot,
params: lsp_types::TextDocumentPositionParams,
) -> anyhow::Result<Option<PrepareRenameResponse>> {
- let _p = profile::span("handle_prepare_rename");
+ let _p = tracing::span!(tracing::Level::INFO, "handle_prepare_rename").entered();
let position = from_proto::file_position(&snap, params)?;
let change = snap.analysis.prepare_rename(position)?.map_err(to_proto::rename_error)?;
@@ -1014,7 +1014,7 @@ pub(crate) fn handle_rename(
snap: GlobalStateSnapshot,
params: RenameParams,
) -> anyhow::Result<Option<WorkspaceEdit>> {
- let _p = profile::span("handle_rename");
+ let _p = tracing::span!(tracing::Level::INFO, "handle_rename").entered();
let position = from_proto::file_position(&snap, params.text_document_position)?;
let mut change = snap
@@ -1051,10 +1051,11 @@ pub(crate) fn handle_references(
snap: GlobalStateSnapshot,
params: lsp_types::ReferenceParams,
) -> anyhow::Result<Option<Vec<Location>>> {
- let _p = profile::span("handle_references");
+ let _p = tracing::span!(tracing::Level::INFO, "handle_references").entered();
let position = from_proto::file_position(&snap, params.text_document_position)?;
let exclude_imports = snap.config.find_all_refs_exclude_imports();
+ let exclude_tests = snap.config.find_all_refs_exclude_tests();
let refs = match snap.analysis.find_all_refs(position, None)? {
None => return Ok(None),
@@ -1078,7 +1079,8 @@ pub(crate) fn handle_references(
.flat_map(|(file_id, refs)| {
refs.into_iter()
.filter(|&(_, category)| {
- !exclude_imports || category != Some(ReferenceCategory::Import)
+ (!exclude_imports || category != Some(ReferenceCategory::Import))
+ && (!exclude_tests || category != Some(ReferenceCategory::Test))
})
.map(move |(range, _)| FileRange { file_id, range })
})
@@ -1094,7 +1096,7 @@ pub(crate) fn handle_formatting(
snap: GlobalStateSnapshot,
params: lsp_types::DocumentFormattingParams,
) -> anyhow::Result<Option<Vec<lsp_types::TextEdit>>> {
- let _p = profile::span("handle_formatting");
+ let _p = tracing::span!(tracing::Level::INFO, "handle_formatting").entered();
run_rustfmt(&snap, params.text_document, None)
}
@@ -1103,7 +1105,7 @@ pub(crate) fn handle_range_formatting(
snap: GlobalStateSnapshot,
params: lsp_types::DocumentRangeFormattingParams,
) -> anyhow::Result<Option<Vec<lsp_types::TextEdit>>> {
- let _p = profile::span("handle_range_formatting");
+ let _p = tracing::span!(tracing::Level::INFO, "handle_range_formatting").entered();
run_rustfmt(&snap, params.text_document, Some(params.range))
}
@@ -1112,7 +1114,7 @@ pub(crate) fn handle_code_action(
snap: GlobalStateSnapshot,
params: lsp_types::CodeActionParams,
) -> anyhow::Result<Option<Vec<lsp_ext::CodeAction>>> {
- let _p = profile::span("handle_code_action");
+ let _p = tracing::span!(tracing::Level::INFO, "handle_code_action").entered();
if !snap.config.code_action_literals() {
// We intentionally don't support command-based actions, as those either
@@ -1186,7 +1188,7 @@ pub(crate) fn handle_code_action_resolve(
snap: GlobalStateSnapshot,
mut code_action: lsp_ext::CodeAction,
) -> anyhow::Result<lsp_ext::CodeAction> {
- let _p = profile::span("handle_code_action_resolve");
+ let _p = tracing::span!(tracing::Level::INFO, "handle_code_action_resolve").entered();
let params = match code_action.data.take() {
Some(it) => it,
None => return Err(invalid_params_error("code action without data".to_string()).into()),
@@ -1276,7 +1278,7 @@ pub(crate) fn handle_code_lens(
snap: GlobalStateSnapshot,
params: lsp_types::CodeLensParams,
) -> anyhow::Result<Option<Vec<CodeLens>>> {
- let _p = profile::span("handle_code_lens");
+ let _p = tracing::span!(tracing::Level::INFO, "handle_code_lens").entered();
let lens_config = snap.config.lens();
if lens_config.none() {
@@ -1346,7 +1348,7 @@ pub(crate) fn handle_document_highlight(
snap: GlobalStateSnapshot,
params: lsp_types::DocumentHighlightParams,
) -> anyhow::Result<Option<Vec<lsp_types::DocumentHighlight>>> {
- let _p = profile::span("handle_document_highlight");
+ let _p = tracing::span!(tracing::Level::INFO, "handle_document_highlight").entered();
let position = from_proto::file_position(&snap, params.text_document_position_params)?;
let line_index = snap.file_line_index(position.file_id)?;
@@ -1368,7 +1370,7 @@ pub(crate) fn handle_ssr(
snap: GlobalStateSnapshot,
params: lsp_ext::SsrParams,
) -> anyhow::Result<lsp_types::WorkspaceEdit> {
- let _p = profile::span("handle_ssr");
+ let _p = tracing::span!(tracing::Level::INFO, "handle_ssr").entered();
let selections = params
.selections
.iter()
@@ -1388,7 +1390,7 @@ pub(crate) fn handle_inlay_hints(
snap: GlobalStateSnapshot,
params: InlayHintParams,
) -> anyhow::Result<Option<Vec<InlayHint>>> {
- let _p = profile::span("handle_inlay_hints");
+ let _p = tracing::span!(tracing::Level::INFO, "handle_inlay_hints").entered();
let document_uri = &params.text_document.uri;
let FileRange { file_id, range } = from_proto::file_range(
&snap,
@@ -1418,7 +1420,7 @@ pub(crate) fn handle_inlay_hints_resolve(
snap: GlobalStateSnapshot,
mut original_hint: InlayHint,
) -> anyhow::Result<InlayHint> {
- let _p = profile::span("handle_inlay_hints_resolve");
+ let _p = tracing::span!(tracing::Level::INFO, "handle_inlay_hints_resolve").entered();
let data = match original_hint.data.take() {
Some(it) => it,
@@ -1465,7 +1467,7 @@ pub(crate) fn handle_call_hierarchy_prepare(
snap: GlobalStateSnapshot,
params: CallHierarchyPrepareParams,
) -> anyhow::Result<Option<Vec<CallHierarchyItem>>> {
- let _p = profile::span("handle_call_hierarchy_prepare");
+ let _p = tracing::span!(tracing::Level::INFO, "handle_call_hierarchy_prepare").entered();
let position = from_proto::file_position(&snap, params.text_document_position_params)?;
let nav_info = match snap.analysis.call_hierarchy(position)? {
@@ -1487,7 +1489,7 @@ pub(crate) fn handle_call_hierarchy_incoming(
snap: GlobalStateSnapshot,
params: CallHierarchyIncomingCallsParams,
) -> anyhow::Result<Option<Vec<CallHierarchyIncomingCall>>> {
- let _p = profile::span("handle_call_hierarchy_incoming");
+ let _p = tracing::span!(tracing::Level::INFO, "handle_call_hierarchy_incoming").entered();
let item = params.item;
let doc = TextDocumentIdentifier::new(item.uri);
@@ -1522,7 +1524,7 @@ pub(crate) fn handle_call_hierarchy_outgoing(
snap: GlobalStateSnapshot,
params: CallHierarchyOutgoingCallsParams,
) -> anyhow::Result<Option<Vec<CallHierarchyOutgoingCall>>> {
- let _p = profile::span("handle_call_hierarchy_outgoing");
+ let _p = tracing::span!(tracing::Level::INFO, "handle_call_hierarchy_outgoing").entered();
let item = params.item;
let doc = TextDocumentIdentifier::new(item.uri);
@@ -1557,7 +1559,7 @@ pub(crate) fn handle_semantic_tokens_full(
snap: GlobalStateSnapshot,
params: SemanticTokensParams,
) -> anyhow::Result<Option<SemanticTokensResult>> {
- let _p = profile::span("handle_semantic_tokens_full");
+ let _p = tracing::span!(tracing::Level::INFO, "handle_semantic_tokens_full").entered();
let file_id = from_proto::file_id(&snap, &params.text_document.uri)?;
let text = snap.analysis.file_text(file_id)?;
@@ -1587,7 +1589,7 @@ pub(crate) fn handle_semantic_tokens_full_delta(
snap: GlobalStateSnapshot,
params: SemanticTokensDeltaParams,
) -> anyhow::Result<Option<SemanticTokensFullDeltaResult>> {
- let _p = profile::span("handle_semantic_tokens_full_delta");
+ let _p = tracing::span!(tracing::Level::INFO, "handle_semantic_tokens_full_delta").entered();
let file_id = from_proto::file_id(&snap, &params.text_document.uri)?;
let text = snap.analysis.file_text(file_id)?;
@@ -1630,7 +1632,7 @@ pub(crate) fn handle_semantic_tokens_range(
snap: GlobalStateSnapshot,
params: SemanticTokensRangeParams,
) -> anyhow::Result<Option<SemanticTokensRangeResult>> {
- let _p = profile::span("handle_semantic_tokens_range");
+ let _p = tracing::span!(tracing::Level::INFO, "handle_semantic_tokens_range").entered();
let frange = from_proto::file_range(&snap, &params.text_document, params.range)?;
let text = snap.analysis.file_text(frange.file_id)?;
@@ -1656,7 +1658,7 @@ pub(crate) fn handle_open_docs(
snap: GlobalStateSnapshot,
params: lsp_types::TextDocumentPositionParams,
) -> anyhow::Result<ExternalDocsResponse> {
- let _p = profile::span("handle_open_docs");
+ let _p = tracing::span!(tracing::Level::INFO, "handle_open_docs").entered();
let position = from_proto::file_position(&snap, params)?;
let ws_and_sysroot = snap.workspaces.iter().find_map(|ws| match ws {
@@ -1695,7 +1697,7 @@ pub(crate) fn handle_open_cargo_toml(
snap: GlobalStateSnapshot,
params: lsp_ext::OpenCargoTomlParams,
) -> anyhow::Result<Option<lsp_types::GotoDefinitionResponse>> {
- let _p = profile::span("handle_open_cargo_toml");
+ let _p = tracing::span!(tracing::Level::INFO, "handle_open_cargo_toml").entered();
let file_id = from_proto::file_id(&snap, &params.text_document.uri)?;
let cargo_spec = match CargoTargetSpec::for_file(&snap, file_id)? {
@@ -1713,7 +1715,7 @@ pub(crate) fn handle_move_item(
snap: GlobalStateSnapshot,
params: lsp_ext::MoveItemParams,
) -> anyhow::Result<Vec<lsp_ext::SnippetTextEdit>> {
- let _p = profile::span("handle_move_item");
+ let _p = tracing::span!(tracing::Level::INFO, "handle_move_item").entered();
let file_id = from_proto::file_id(&snap, &params.text_document.uri)?;
let range = from_proto::file_range(&snap, &params.text_document, params.range)?;
@@ -1735,7 +1737,7 @@ pub(crate) fn handle_view_recursive_memory_layout(
snap: GlobalStateSnapshot,
params: lsp_types::TextDocumentPositionParams,
) -> anyhow::Result<Option<lsp_ext::RecursiveMemoryLayout>> {
- let _p = profile::span("view_recursive_memory_layout");
+ let _p = tracing::span!(tracing::Level::INFO, "view_recursive_memory_layout").entered();
let file_id = from_proto::file_id(&snap, &params.text_document.uri)?;
let line_index = snap.file_line_index(file_id)?;
let offset = from_proto::offset(&line_index, params.position)?;
diff --git a/crates/rust-analyzer/src/integrated_benchmarks.rs b/crates/rust-analyzer/src/integrated_benchmarks.rs
index d94f7cefa6..acc02d6447 100644
--- a/crates/rust-analyzer/src/integrated_benchmarks.rs
+++ b/crates/rust-analyzer/src/integrated_benchmarks.rs
@@ -60,7 +60,7 @@ fn integrated_highlighting_benchmark() {
analysis.highlight_as_html(file_id, false).unwrap();
}
- profile::init_from("*>100");
+ crate::tracing::hprof::init("*>100");
{
let _it = stdx::timeit("change");
@@ -152,8 +152,7 @@ fn integrated_completion_benchmark() {
analysis.completions(&config, position, None).unwrap();
}
- profile::init_from("*>5");
- // let _s = profile::heartbeat_span();
+ crate::tracing::hprof::init("*>5");
let completion_offset = {
let _it = stdx::timeit("change");
@@ -168,7 +167,7 @@ fn integrated_completion_benchmark() {
};
{
- let _p = profile::span("unqualified path completion");
+ let _p = tracing::span!(tracing::Level::INFO, "unqualified path completion").entered();
let _span = profile::cpu_span();
let analysis = host.analysis();
let config = CompletionConfig {
@@ -209,7 +208,7 @@ fn integrated_completion_benchmark() {
};
{
- let _p = profile::span("dot completion");
+ let _p = tracing::span!(tracing::Level::INFO, "dot completion").entered();
let _span = profile::cpu_span();
let analysis = host.analysis();
let config = CompletionConfig {
diff --git a/crates/rust-analyzer/src/lib.rs b/crates/rust-analyzer/src/lib.rs
index 29bc0b80d8..b1809f58ae 100644
--- a/crates/rust-analyzer/src/lib.rs
+++ b/crates/rust-analyzer/src/lib.rs
@@ -13,11 +13,6 @@
pub mod cli;
-#[allow(unused)]
-macro_rules! eprintln {
- ($($tt:tt)*) => { stdx::eprintln!($($tt)*) };
-}
-
mod caps;
mod cargo_target_spec;
mod diagnostics;
@@ -37,6 +32,12 @@ mod handlers {
pub(crate) mod request;
}
+pub mod tracing {
+ pub mod config;
+ pub use config::Config;
+ pub mod hprof;
+}
+
pub mod config;
pub mod lsp;
use self::lsp::ext as lsp_ext;
diff --git a/crates/rust-analyzer/src/lsp/to_proto.rs b/crates/rust-analyzer/src/lsp/to_proto.rs
index fe381fbeb3..d363ac69fd 100644
--- a/crates/rust-analyzer/src/lsp/to_proto.rs
+++ b/crates/rust-analyzer/src/lsp/to_proto.rs
@@ -92,6 +92,7 @@ pub(crate) fn document_highlight_kind(
ReferenceCategory::Read => Some(lsp_types::DocumentHighlightKind::READ),
ReferenceCategory::Write => Some(lsp_types::DocumentHighlightKind::WRITE),
ReferenceCategory::Import => None,
+ ReferenceCategory::Test => None,
}
}
@@ -311,16 +312,14 @@ fn completion_item(
set_score(&mut lsp_item, max_relevance, item.relevance);
if config.completion().enable_imports_on_the_fly && !item.import_to_add.is_empty() {
- let imports: Vec<_> = item
+ let imports = item
.import_to_add
.into_iter()
- .filter_map(|(import_path, import_name)| {
- Some(lsp_ext::CompletionImport {
- full_import_path: import_path,
- imported_name: import_name,
- })
+ .map(|(import_path, import_name)| lsp_ext::CompletionImport {
+ full_import_path: import_path,
+ imported_name: import_name,
})
- .collect();
+ .collect::<Vec<_>>();
if !imports.is_empty() {
let data = lsp_ext::CompletionResolveData { position: tdpp.clone(), imports };
lsp_item.data = Some(to_value(data).unwrap());
diff --git a/crates/rust-analyzer/src/main_loop.rs b/crates/rust-analyzer/src/main_loop.rs
index 0173805d44..f3ead6d04f 100644
--- a/crates/rust-analyzer/src/main_loop.rs
+++ b/crates/rust-analyzer/src/main_loop.rs
@@ -60,6 +60,17 @@ enum Event {
Flycheck(flycheck::Message),
}
+impl fmt::Display for Event {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ match self {
+ Event::Lsp(_) => write!(f, "Event::Lsp"),
+ Event::Task(_) => write!(f, "Event::Task"),
+ Event::Vfs(_) => write!(f, "Event::Vfs"),
+ Event::Flycheck(_) => write!(f, "Event::Flycheck"),
+ }
+ }
+}
+
#[derive(Debug)]
pub(crate) enum Task {
Response(lsp_server::Response),
@@ -196,7 +207,8 @@ impl GlobalState {
fn handle_event(&mut self, event: Event) -> anyhow::Result<()> {
let loop_start = Instant::now();
// NOTE: don't count blocking select! call as a loop-turn time
- let _p = profile::span("GlobalState::handle_event");
+ let _p = tracing::span!(tracing::Level::INFO, "GlobalState::handle_event", event = %event)
+ .entered();
let event_dbg_msg = format!("{event:?}");
tracing::debug!("{:?} handle_event({})", loop_start, event_dbg_msg);
@@ -215,7 +227,8 @@ impl GlobalState {
lsp_server::Message::Response(resp) => self.complete_request(resp),
},
Event::Task(task) => {
- let _p = profile::span("GlobalState::handle_event/task");
+ let _p = tracing::span!(tracing::Level::INFO, "GlobalState::handle_event/task")
+ .entered();
let mut prime_caches_progress = Vec::new();
self.handle_task(&mut prime_caches_progress, task);
@@ -269,7 +282,8 @@ impl GlobalState {
}
}
Event::Vfs(message) => {
- let _p = profile::span("GlobalState::handle_event/vfs");
+ let _p =
+ tracing::span!(tracing::Level::INFO, "GlobalState::handle_event/vfs").entered();
self.handle_vfs_msg(message);
// Coalesce many VFS event into a single loop turn
while let Ok(message) = self.loader.receiver.try_recv() {
@@ -277,7 +291,8 @@ impl GlobalState {
}
}
Event::Flycheck(message) => {
- let _p = profile::span("GlobalState::handle_event/flycheck");
+ let _p = tracing::span!(tracing::Level::INFO, "GlobalState::handle_event/flycheck")
+ .entered();
self.handle_flycheck_msg(message);
// Coalesce many flycheck updates into a single loop turn
while let Ok(message) = self.flycheck_receiver.try_recv() {
diff --git a/crates/rust-analyzer/src/reload.rs b/crates/rust-analyzer/src/reload.rs
index 969211f440..65c00cc08d 100644
--- a/crates/rust-analyzer/src/reload.rs
+++ b/crates/rust-analyzer/src/reload.rs
@@ -70,7 +70,8 @@ impl GlobalState {
}
pub(crate) fn update_configuration(&mut self, config: Config) {
- let _p = profile::span("GlobalState::update_configuration");
+ let _p =
+ tracing::span!(tracing::Level::INFO, "GlobalState::update_configuration").entered();
let old_config = mem::replace(&mut self.config, Arc::new(config));
if self.config.lru_parse_query_capacity() != old_config.lru_parse_query_capacity() {
self.analysis_host.update_lru_capacity(self.config.lru_parse_query_capacity());
@@ -355,7 +356,7 @@ impl GlobalState {
}
pub(crate) fn switch_workspaces(&mut self, cause: Cause) {
- let _p = profile::span("GlobalState::switch_workspaces");
+ let _p = tracing::span!(tracing::Level::INFO, "GlobalState::switch_workspaces").entered();
tracing::info!(%cause, "will switch workspaces");
let Some((workspaces, force_reload_crate_graph)) =
@@ -502,7 +503,7 @@ impl GlobalState {
let mut crate_graph_file_dependencies = FxHashSet::default();
let mut load = |path: &AbsPath| {
- let _p = profile::span("switch_workspaces::load");
+ let _p = tracing::span!(tracing::Level::INFO, "switch_workspaces::load").entered();
let vfs_path = vfs::VfsPath::from(path.to_path_buf());
crate_graph_file_dependencies.insert(vfs_path.clone());
match vfs.file_id(&vfs_path) {
@@ -585,7 +586,7 @@ impl GlobalState {
}
fn reload_flycheck(&mut self) {
- let _p = profile::span("GlobalState::reload_flycheck");
+ let _p = tracing::span!(tracing::Level::INFO, "GlobalState::reload_flycheck").entered();
let config = self.config.flycheck();
let sender = self.flycheck_sender.clone();
let invocation_strategy = match config {
diff --git a/crates/rust-analyzer/src/tracing/config.rs b/crates/rust-analyzer/src/tracing/config.rs
new file mode 100644
index 0000000000..fcdbd1e6d9
--- /dev/null
+++ b/crates/rust-analyzer/src/tracing/config.rs
@@ -0,0 +1,108 @@
+//! Simple logger that logs either to stderr or to a file, using `tracing_subscriber`
+//! filter syntax and `tracing_appender` for non blocking output.
+
+use std::io;
+
+use anyhow::Context;
+use tracing::{level_filters::LevelFilter, Level};
+use tracing_subscriber::{
+ filter::{self, Targets},
+ fmt::{format::FmtSpan, MakeWriter},
+ layer::SubscriberExt,
+ util::SubscriberInitExt,
+ Layer, Registry,
+};
+use tracing_tree::HierarchicalLayer;
+
+use crate::tracing::hprof;
+
+pub struct Config<T> {
+ pub writer: T,
+ pub filter: String,
+ /// The meaning of CHALK_DEBUG is to tell chalk crates
+ /// (i.e. chalk-solve, chalk-ir, chalk-recursive) how to filter tracing
+ /// logs. But now we can only have just one filter, which means we have to
+ /// merge chalk filter to our main filter (from RA_LOG env).
+ ///
+ /// The acceptable syntax of CHALK_DEBUG is `target[span{field=value}]=level`.
+ /// As the value should only affect chalk crates, we'd better manually
+ /// specify the target. And for simplicity, CHALK_DEBUG only accept the value
+ /// that specify level.
+ pub chalk_filter: Option<String>,
+ /// Filtering syntax, set in a shell:
+ /// ```
+ /// env RA_PROFILE=* // dump everything
+ /// env RA_PROFILE=foo|bar|baz // enabled only selected entries
+ /// env RA_PROFILE=*@3>10 // dump everything, up to depth 3, if it takes more than 10
+ /// ```
+ pub profile_filter: Option<String>,
+}
+
+impl<T> Config<T>
+where
+ T: for<'writer> MakeWriter<'writer> + Send + Sync + 'static,
+{
+ pub fn init(self) -> anyhow::Result<()> {
+ let filter: Targets = self
+ .filter
+ .parse()
+ .with_context(|| format!("invalid log filter: `{}`", self.filter))?;
+
+ let writer = self.writer;
+
+ let ra_fmt_layer = tracing_subscriber::fmt::layer()
+ .with_span_events(FmtSpan::CLOSE)
+ .with_writer(writer)
+ .with_filter(filter);
+
+ let mut chalk_layer = None;
+ if let Some(chalk_filter) = self.chalk_filter {
+ let level: LevelFilter =
+ chalk_filter.parse().with_context(|| "invalid chalk log filter")?;
+
+ let chalk_filter = Targets::new()
+ .with_target("chalk_solve", level)
+ .with_target("chalk_ir", level)
+ .with_target("chalk_recursive", level);
+ chalk_layer = Some(
+ HierarchicalLayer::default()
+ .with_indent_lines(true)
+ .with_ansi(false)
+ .with_indent_amount(2)
+ .with_writer(io::stderr)
+ .with_filter(chalk_filter),
+ );
+ };
+
+ let mut profiler_layer = None;
+ if let Some(spec) = self.profile_filter {
+ let (write_filter, allowed_names) = hprof::WriteFilter::from_spec(&spec);
+
+ // this filter the first pass for `tracing`: these are all the "profiling" spans, but things like
+ // span depth or duration are not filtered here: that only occurs at write time.
+ let profile_filter = filter::filter_fn(move |metadata| {
+ let allowed = match &allowed_names {
+ Some(names) => names.contains(metadata.name()),
+ None => true,
+ };
+
+ metadata.is_span()
+ && allowed
+ && metadata.level() >= &Level::INFO
+ && !metadata.target().starts_with("salsa")
+ && !metadata.target().starts_with("chalk")
+ });
+
+ let layer = hprof::SpanTree::default()
+ .aggregate(true)
+ .spec_filter(write_filter)
+ .with_filter(profile_filter);
+
+ profiler_layer = Some(layer);
+ }
+
+ Registry::default().with(ra_fmt_layer).with(chalk_layer).with(profiler_layer).try_init()?;
+
+ Ok(())
+ }
+}
diff --git a/crates/rust-analyzer/src/tracing/hprof.rs b/crates/rust-analyzer/src/tracing/hprof.rs
new file mode 100644
index 0000000000..c99b551df8
--- /dev/null
+++ b/crates/rust-analyzer/src/tracing/hprof.rs
@@ -0,0 +1,272 @@
+//! Consumer of `tracing` data, which prints a hierarchical profile.
+//!
+//! Based on https://github.com/davidbarsky/tracing-tree, but does less, while
+//! actually printing timings for spans by default. The code here is vendored from
+//! https://github.com/matklad/tracing-span-tree.
+//!
+//! Usage:
+//!
+//! ```rust
+//! let layer = hprof::SpanTree::default();
+//! Registry::default().with(layer).init();
+//! ```
+//!
+//! Example output:
+//!
+//! ```text
+//! 8.37ms top_level
+//! 1.09ms middle
+//! 1.06ms leaf
+//! 1.06ms middle
+//! 3.12ms middle
+//! 1.06ms leaf
+//! 3.06ms middle
+//! ```
+//!
+//! Same data, but with `.aggregate(true)`:
+//!
+//! ```text
+//! 8.39ms top_level
+//! 8.35ms 4 middle
+//! 2.13ms 2 leaf
+//! ```
+
+use std::{
+ fmt::Write,
+ mem,
+ time::{Duration, Instant},
+};
+
+use rustc_hash::FxHashSet;
+use tracing::{
+ field::{Field, Visit},
+ span::Attributes,
+ Event, Id, Level, Subscriber,
+};
+use tracing_subscriber::{
+ filter,
+ layer::{Context, SubscriberExt},
+ registry::LookupSpan,
+ Layer, Registry,
+};
+
+use crate::tracing::hprof;
+
+pub fn init(spec: &str) {
+ let (write_filter, allowed_names) = WriteFilter::from_spec(spec);
+
+ // this filter the first pass for `tracing`: these are all the "profiling" spans, but things like
+ // span depth or duration are not filtered here: that only occurs at write time.
+ let profile_filter = filter::filter_fn(move |metadata| {
+ let allowed = match &allowed_names {
+ Some(names) => names.contains(metadata.name()),
+ None => true,
+ };
+
+ metadata.is_span()
+ && allowed
+ && metadata.level() >= &Level::INFO
+ && !metadata.target().starts_with("salsa")
+ && !metadata.target().starts_with("chalk")
+ });
+
+ let layer = hprof::SpanTree::default()
+ .aggregate(true)
+ .spec_filter(write_filter)
+ .with_filter(profile_filter);
+
+ let subscriber = Registry::default().with(layer);
+ tracing::subscriber::set_global_default(subscriber).unwrap();
+}
+
+#[derive(Default, Debug)]
+pub(crate) struct SpanTree {
+ aggregate: bool,
+ write_filter: WriteFilter,
+}
+
+impl SpanTree {
+ /// Merge identical sibling spans together.
+ pub(crate) fn aggregate(self, yes: bool) -> SpanTree {
+ SpanTree { aggregate: yes, ..self }
+ }
+
+ /// Add a write-time filter for span duration or tree depth.
+ pub(crate) fn spec_filter(self, write_filter: WriteFilter) -> SpanTree {
+ SpanTree { write_filter, ..self }
+ }
+}
+
+struct Data {
+ start: Instant,
+ children: Vec<Node>,
+ fields: String,
+}
+
+impl Data {
+ fn new(attrs: &Attributes<'_>) -> Self {
+ let mut data = Self { start: Instant::now(), children: Vec::new(), fields: String::new() };
+
+ let mut visitor = DataVisitor { string: &mut data.fields };
+ attrs.record(&mut visitor);
+ data
+ }
+
+ fn into_node(self, name: &'static str) -> Node {
+ Node {
+ name,
+ fields: self.fields,
+ count: 1,
+ duration: self.start.elapsed(),
+ children: self.children,
+ }
+ }
+}
+
+pub struct DataVisitor<'a> {
+ string: &'a mut String,
+}
+
+impl<'a> Visit for DataVisitor<'a> {
+ fn record_debug(&mut self, field: &Field, value: &dyn std::fmt::Debug) {
+ write!(self.string, "{} = {:?} ", field.name(), value).unwrap();
+ }
+}
+
+impl<S> Layer<S> for SpanTree
+where
+ S: Subscriber + for<'span> LookupSpan<'span>,
+{
+ fn on_new_span(&self, attrs: &Attributes<'_>, id: &Id, ctx: Context<'_, S>) {
+ let span = ctx.span(id).unwrap();
+
+ let data = Data::new(attrs);
+ span.extensions_mut().insert(data);
+ }
+
+ fn on_event(&self, _event: &Event<'_>, _ctx: Context<'_, S>) {}
+
+ fn on_close(&self, id: Id, ctx: Context<'_, S>) {
+ let span = ctx.span(&id).unwrap();
+ let data = span.extensions_mut().remove::<Data>().unwrap();
+ let mut node = data.into_node(span.name());
+
+ match span.parent() {
+ Some(parent_span) => {
+ parent_span.extensions_mut().get_mut::<Data>().unwrap().children.push(node);
+ }
+ None => {
+ if self.aggregate {
+ node.aggregate()
+ }
+ node.print(&self.write_filter)
+ }
+ }
+ }
+}
+
+#[derive(Default)]
+struct Node {
+ name: &'static str,
+ fields: String,
+ count: u32,
+ duration: Duration,
+ children: Vec<Node>,
+}
+
+impl Node {
+ fn print(&self, filter: &WriteFilter) {
+ self.go(0, filter)
+ }
+
+ fn go(&self, level: usize, filter: &WriteFilter) {
+ if self.duration > filter.longer_than && level < filter.depth {
+ let duration = ms(self.duration);
+ let current_indent = level * 2;
+
+ let mut out = String::new();
+ let _ = write!(out, "{:current_indent$} {duration} {:<6}", "", self.name);
+
+ if !self.fields.is_empty() {
+ let _ = write!(out, " @ {}", self.fields);
+ }
+
+ if self.count > 1 {
+ let _ = write!(out, " ({} calls)", self.count);
+ }
+
+ eprintln!("{}", out);
+
+ for child in &self.children {
+ child.go(level + 1, filter)
+ }
+ }
+ }
+
+ fn aggregate(&mut self) {
+ if self.children.is_empty() {
+ return;
+ }
+
+ self.children.sort_by_key(|it| it.name);
+ let mut idx = 0;
+ for i in 1..self.children.len() {
+ if self.children[idx].name == self.children[i].name {
+ let child = mem::take(&mut self.children[i]);
+ self.children[idx].duration += child.duration;
+ self.children[idx].count += child.count;
+ self.children[idx].children.extend(child.children);
+ } else {
+ idx += 1;
+ assert!(idx <= i);
+ self.children.swap(idx, i);
+ }
+ }
+ self.children.truncate(idx + 1);
+ for child in &mut self.children {
+ child.aggregate()
+ }
+ }
+}
+
+#[derive(Default, Clone, Debug)]
+pub(crate) struct WriteFilter {
+ depth: usize,
+ longer_than: Duration,
+}
+
+impl WriteFilter {
+ pub(crate) fn from_spec(mut spec: &str) -> (WriteFilter, Option<FxHashSet<String>>) {
+ let longer_than = if let Some(idx) = spec.rfind('>') {
+ let longer_than = spec[idx + 1..].parse().expect("invalid profile longer_than");
+ spec = &spec[..idx];
+ Duration::from_millis(longer_than)
+ } else {
+ Duration::new(0, 0)
+ };
+
+ let depth = if let Some(idx) = spec.rfind('@') {
+ let depth: usize = spec[idx + 1..].parse().expect("invalid profile depth");
+ spec = &spec[..idx];
+ depth
+ } else {
+ 999
+ };
+ let allowed = if spec == "*" {
+ None
+ } else {
+ Some(FxHashSet::from_iter(spec.split('|').map(String::from)))
+ };
+ (WriteFilter { depth, longer_than }, allowed)
+ }
+}
+
+#[allow(non_camel_case_types)]
+struct ms(Duration);
+
+impl std::fmt::Display for ms {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ let n = self.0.as_millis();
+ write!(f, "{n:5}ms")
+ }
+}
diff --git a/crates/rust-analyzer/tests/slow-tests/main.rs b/crates/rust-analyzer/tests/slow-tests/main.rs
index 58a99cc447..19890110d5 100644
--- a/crates/rust-analyzer/tests/slow-tests/main.rs
+++ b/crates/rust-analyzer/tests/slow-tests/main.rs
@@ -31,6 +31,7 @@ use lsp_types::{
};
use rust_analyzer::lsp::ext::{OnEnter, Runnables, RunnablesParams};
use serde_json::json;
+use stdx::format_to_acc;
use test_utils::skip_slow_tests;
use crate::{
@@ -38,9 +39,6 @@ use crate::{
testdir::TestDir,
};
-const PROFILE: &str = "";
-// const PROFILE: &'static str = "*@3>100";
-
#[test]
fn completes_items_from_standard_library() {
if skip_slow_tests() {
@@ -594,8 +592,10 @@ fn diagnostics_dont_block_typing() {
return;
}
- let librs: String = (0..10).map(|i| format!("mod m{i};")).collect();
- let libs: String = (0..10).map(|i| format!("//- /src/m{i}.rs\nfn foo() {{}}\n\n")).collect();
+ let librs: String = (0..10).fold(String::new(), |mut acc, i| format_to_acc!(acc, "mod m{i};"));
+ let libs: String = (0..10).fold(String::new(), |mut acc, i| {
+ format_to_acc!(acc, "//- /src/m{i}.rs\nfn foo() {{}}\n\n")
+ });
let server = Project::with_fixture(&format!(
r#"
//- /Cargo.toml
diff --git a/crates/rust-analyzer/tests/slow-tests/support.rs b/crates/rust-analyzer/tests/slow-tests/support.rs
index e16990eabd..d699374f9c 100644
--- a/crates/rust-analyzer/tests/slow-tests/support.rs
+++ b/crates/rust-analyzer/tests/slow-tests/support.rs
@@ -9,11 +9,11 @@ use std::{
use crossbeam_channel::{after, select, Receiver};
use lsp_server::{Connection, Message, Notification, Request};
use lsp_types::{notification::Exit, request::Shutdown, TextDocumentIdentifier, Url};
-use rust_analyzer::{config::Config, lsp, main_loop};
+use rust_analyzer::{config::Config, lsp, main_loop, tracing};
use serde::Serialize;
use serde_json::{json, to_string_pretty, Value};
use test_utils::FixtureWithProjectMeta;
-use tracing_subscriber::{prelude::*, Layer};
+use tracing_subscriber::fmt::TestWriter;
use vfs::AbsPathBuf;
use crate::testdir::TestDir;
@@ -91,12 +91,14 @@ impl Project<'_> {
static INIT: Once = Once::new();
INIT.call_once(|| {
- let filter: tracing_subscriber::filter::Targets =
- std::env::var("RA_LOG").ok().and_then(|it| it.parse().ok()).unwrap_or_default();
- let layer =
- tracing_subscriber::fmt::Layer::new().with_test_writer().with_filter(filter);
- tracing_subscriber::Registry::default().with(layer).init();
- profile::init_from(crate::PROFILE);
+ let _ = tracing::Config {
+ writer: TestWriter::default(),
+ // Deliberately enable all `error` logs if the user has not set RA_LOG, as there is usually
+ // useful information in there for debugging.
+ filter: std::env::var("RA_LOG").ok().unwrap_or_else(|| "error".to_string()),
+ chalk_filter: std::env::var("CHALK_DEBUG").ok(),
+ profile_filter: std::env::var("RA_PROFILE").ok(),
+ };
});
let FixtureWithProjectMeta { fixture, mini_core, proc_macro_names, toolchain } =
diff --git a/crates/stdx/Cargo.toml b/crates/stdx/Cargo.toml
index e6014cf812..2e3f9113b0 100644
--- a/crates/stdx/Cargo.toml
+++ b/crates/stdx/Cargo.toml
@@ -13,7 +13,7 @@ doctest = false
[dependencies]
backtrace = { version = "0.3.67", optional = true }
-always-assert = { version = "0.1.2", features = ["log"] }
+always-assert = { version = "0.2.0", features = ["tracing"] }
jod-thread = "0.1.2"
libc.workspace = true
crossbeam-channel = "0.5.5"
diff --git a/crates/stdx/src/macros.rs b/crates/stdx/src/macros.rs
index d71e418c89..85d9008fe1 100644
--- a/crates/stdx/src/macros.rs
+++ b/crates/stdx/src/macros.rs
@@ -1,15 +1,5 @@
//! Convenience macros.
-#[macro_export]
-macro_rules! eprintln {
- ($($tt:tt)*) => {{
- if $crate::is_ci() {
- panic!("Forgot to remove debug-print?")
- }
- std::eprintln!($($tt)*)
- }}
-}
-
/// Appends formatted string to a `String`.
#[macro_export]
macro_rules! format_to {
@@ -24,6 +14,22 @@ macro_rules! format_to {
};
}
+/// Appends formatted string to a `String` and returns the `String`.
+///
+/// Useful for folding iterators into a `String`.
+#[macro_export]
+macro_rules! format_to_acc {
+ ($buf:expr, $lit:literal $($arg:tt)*) => {
+ {
+ use ::std::fmt::Write as _;
+ // We can't do ::std::fmt::Write::write_fmt($buf, format_args!($lit $($arg)*))
+ // unfortunately, as that loses out on autoref behavior.
+ _ = $buf.write_fmt(format_args!($lit $($arg)*));
+ $buf
+ }
+ };
+}
+
/// Generates `From` impls for `Enum E { Foo(Foo), Bar(Bar) }` enums
///
/// # Example
diff --git a/crates/syntax/Cargo.toml b/crates/syntax/Cargo.toml
index 9f78614bba..a0fd73ee13 100644
--- a/crates/syntax/Cargo.toml
+++ b/crates/syntax/Cargo.toml
@@ -22,6 +22,7 @@ once_cell = "1.17.0"
indexmap.workspace = true
smol_str.workspace = true
triomphe.workspace = true
+tracing.workspace = true
ra-ap-rustc_lexer.workspace = true
diff --git a/crates/syntax/src/algo.rs b/crates/syntax/src/algo.rs
index c4548b1647..01f2af419e 100644
--- a/crates/syntax/src/algo.rs
+++ b/crates/syntax/src/algo.rs
@@ -120,7 +120,7 @@ pub struct TreeDiff {
impl TreeDiff {
pub fn into_text_edit(&self, builder: &mut TextEditBuilder) {
- let _p = profile::span("into_text_edit");
+ let _p = tracing::span!(tracing::Level::INFO, "into_text_edit").entered();
for (anchor, to) in &self.insertions {
let offset = match anchor {
@@ -149,7 +149,7 @@ impl TreeDiff {
///
/// This function tries to find a fine-grained diff.
pub fn diff(from: &SyntaxNode, to: &SyntaxNode) -> TreeDiff {
- let _p = profile::span("diff");
+ let _p = tracing::span!(tracing::Level::INFO, "diff").entered();
let mut diff = TreeDiff {
replacements: FxHashMap::default(),
diff --git a/crates/syntax/src/ast/edit_in_place.rs b/crates/syntax/src/ast/edit_in_place.rs
index 247dfe0b45..c9944b75b0 100644
--- a/crates/syntax/src/ast/edit_in_place.rs
+++ b/crates/syntax/src/ast/edit_in_place.rs
@@ -538,9 +538,13 @@ impl ast::UseTree {
/// `foo::bar` -> `{foo::bar}`
///
/// `{foo::bar}` -> `{foo::bar}`
- pub fn wrap_in_tree_list(&self) {
- if self.path().is_none() {
- return;
+ pub fn wrap_in_tree_list(&self) -> Option<()> {
+ if self.use_tree_list().is_some()
+ && self.path().is_none()
+ && self.star_token().is_none()
+ && self.rename().is_none()
+ {
+ return None;
}
let subtree = self.clone_subtree().clone_for_update();
ted::remove_all_iter(self.syntax().children_with_tokens());
@@ -548,6 +552,7 @@ impl ast::UseTree {
self.syntax(),
make::use_tree_list(once(subtree)).clone_for_update().syntax(),
);
+ Some(())
}
}
@@ -960,10 +965,10 @@ impl ast::IdentPat {
}
pub trait HasVisibilityEdit: ast::HasVisibility {
- fn set_visibility(&self, visbility: ast::Visibility) {
+ fn set_visibility(&self, visibility: ast::Visibility) {
match self.visibility() {
Some(current_visibility) => {
- ted::replace(current_visibility.syntax(), visbility.syntax())
+ ted::replace(current_visibility.syntax(), visibility.syntax())
}
None => {
let vis_before = self
@@ -972,7 +977,7 @@ pub trait HasVisibilityEdit: ast::HasVisibility {
.find(|it| !matches!(it.kind(), WHITESPACE | COMMENT | ATTR))
.unwrap_or_else(|| self.syntax().first_child_or_token().unwrap());
- ted::insert(ted::Position::before(vis_before), visbility.syntax());
+ ted::insert(ted::Position::before(vis_before), visibility.syntax());
}
}
}
diff --git a/crates/syntax/src/ast/make.rs b/crates/syntax/src/ast/make.rs
index 62d64319e3..d5eda8f15e 100644
--- a/crates/syntax/src/ast/make.rs
+++ b/crates/syntax/src/ast/make.rs
@@ -9,10 +9,11 @@
//! API should require to assemble every node piecewise. The trick of
//! `parse(format!())` we use internally is an implementation detail -- long
//! term, it will be replaced with direct tree manipulation.
+
use itertools::Itertools;
use parser::T;
use rowan::NodeOrToken;
-use stdx::{format_to, never};
+use stdx::{format_to, format_to_acc, never};
use crate::{ast, utils::is_raw_identifier, AstNode, SourceFile, SyntaxKind, SyntaxToken};
@@ -759,15 +760,12 @@ pub fn match_arm_with_guard(
}
pub fn match_arm_list(arms: impl IntoIterator<Item = ast::MatchArm>) -> ast::MatchArmList {
- let arms_str = arms
- .into_iter()
- .map(|arm| {
- let needs_comma = arm.expr().map_or(true, |it| !it.is_block_like());
- let comma = if needs_comma { "," } else { "" };
- let arm = arm.syntax();
- format!(" {arm}{comma}\n")
- })
- .collect::<String>();
+ let arms_str = arms.into_iter().fold(String::new(), |mut acc, arm| {
+ let needs_comma = arm.expr().map_or(true, |it| !it.is_block_like());
+ let comma = if needs_comma { "," } else { "" };
+ let arm = arm.syntax();
+ format_to_acc!(acc, " {arm}{comma}\n")
+ });
return from_text(&arms_str);
fn from_text(text: &str) -> ast::MatchArmList {
diff --git a/crates/syntax/src/ast/node_ext.rs b/crates/syntax/src/ast/node_ext.rs
index ce01ee1c35..6e5e4127f4 100644
--- a/crates/syntax/src/ast/node_ext.rs
+++ b/crates/syntax/src/ast/node_ext.rs
@@ -384,7 +384,7 @@ impl ast::UseTreeList {
// the below remove the innermost {}, got `use crate::{{{A}}}`
remove_brace_in_use_tree_list(&self);
- // the below remove othe unnecessary {}, got `use crate::A`
+ // the below remove other unnecessary {}, got `use crate::A`
while let Some(parent_use_tree_list) = self.parent_use_tree().parent_use_tree_list() {
remove_brace_in_use_tree_list(&parent_use_tree_list);
self = parent_use_tree_list;
diff --git a/crates/syntax/src/tests.rs b/crates/syntax/src/tests.rs
index 8ae1242cf7..4c0a538f71 100644
--- a/crates/syntax/src/tests.rs
+++ b/crates/syntax/src/tests.rs
@@ -11,6 +11,7 @@ use std::{
use ast::HasName;
use expect_test::expect_file;
use rayon::prelude::*;
+use stdx::format_to_acc;
use test_utils::{bench, bench_fixture, project_root};
use crate::{ast, fuzz, AstNode, SourceFile, SyntaxError};
@@ -104,10 +105,9 @@ fn self_hosting_parsing() {
.collect::<Vec<_>>();
if !errors.is_empty() {
- let errors = errors
- .into_iter()
- .map(|(path, err)| format!("{}: {:?}\n", path.display(), err[0]))
- .collect::<String>();
+ let errors = errors.into_iter().fold(String::new(), |mut acc, (path, err)| {
+ format_to_acc!(acc, "{}: {:?}\n", path.display(), err[0])
+ });
panic!("Parsing errors:\n{errors}\n");
}
}
diff --git a/crates/test-utils/Cargo.toml b/crates/test-utils/Cargo.toml
index 56067d8341..2ff1fad6c2 100644
--- a/crates/test-utils/Cargo.toml
+++ b/crates/test-utils/Cargo.toml
@@ -15,10 +15,11 @@ doctest = false
# Avoid adding deps here, this crate is widely used in tests it should compile fast!
dissimilar = "1.0.7"
text-size.workspace = true
+tracing.workspace = true
rustc-hash.workspace = true
stdx.workspace = true
profile.workspace = true
[lints]
-workspace = true \ No newline at end of file
+workspace = true
diff --git a/crates/test-utils/src/minicore.rs b/crates/test-utils/src/minicore.rs
index b015dd69b5..9c25d88cb8 100644
--- a/crates/test-utils/src/minicore.rs
+++ b/crates/test-utils/src/minicore.rs
@@ -28,7 +28,7 @@
//! env: option
//! eq: sized
//! error: fmt
-//! fmt: result, transmute, coerce_unsized
+//! fmt: option, result, transmute, coerce_unsized
//! fn:
//! from: sized
//! future: pin
@@ -987,6 +987,10 @@ pub mod fmt {
Arguments { pieces, fmt: None, args }
}
+ pub const fn new_const(pieces: &'a [&'static str]) -> Arguments<'a> {
+ Arguments { pieces, fmt: None, args: &[] }
+ }
+
pub fn new_v1_formatted(
pieces: &'a [&'static str],
args: &'a [rt::Argument<'a>],
@@ -1346,6 +1350,9 @@ pub mod iter {
// region:panic
mod panic {
pub macro panic_2021 {
+ () => (
+ $crate::panicking::panic("explicit panic")
+ ),
($($t:tt)+) => (
$crate::panicking::panic_fmt($crate::const_format_args!($($t)+))
),
@@ -1357,6 +1364,11 @@ mod panicking {
pub const fn panic_fmt(_fmt: crate::fmt::Arguments<'_>) -> ! {
loop {}
}
+
+ #[lang = "panic"]
+ pub const fn panic(expr: &'static str) -> ! {
+ panic_fmt(crate::fmt::Arguments::new_const(&[expr]))
+ }
}
// endregion:panic
diff --git a/docs/dev/syntax.md b/docs/dev/syntax.md
index fd6f220f4f..6c4daecc58 100644
--- a/docs/dev/syntax.md
+++ b/docs/dev/syntax.md
@@ -128,7 +128,7 @@ Interior nodes are shared as well (for example in `(1 + 1) * (1 + 1)`).
Note that, the result of the interning is an `Arc<Node>`.
That is, it's not an index into interning table, so you don't have to have the table around to do anything with the tree.
Each tree is fully self-contained (although different trees might share parts).
-Currently, the interner is created per-file, but it will be easy to use a per-thread or per-some-contex one.
+Currently, the interner is created per-file, but it will be easy to use a per-thread or per-some-context one.
We use a `TextSize`, a newtyped `u32`, to store the length of the text.
diff --git a/docs/user/generated_config.adoc b/docs/user/generated_config.adoc
index f887bb9df3..cfa7503d73 100644
--- a/docs/user/generated_config.adoc
+++ b/docs/user/generated_config.adoc
@@ -777,6 +777,11 @@ Internal config, path to proc-macro server executable.
--
Exclude imports from find-all-references.
--
+[[rust-analyzer.references.excludeTests]]rust-analyzer.references.excludeTests (default: `false`)::
++
+--
+Exclude tests from find-all-references.
+--
[[rust-analyzer.rename.allowExternalItems]]rust-analyzer.rename.allowExternalItems (default: `false`)::
+
--
diff --git a/docs/user/manual.adoc b/docs/user/manual.adoc
index 069a62ddbf..9e9ea25779 100644
--- a/docs/user/manual.adoc
+++ b/docs/user/manual.adoc
@@ -369,7 +369,7 @@ EOF
See https://sharksforarms.dev/posts/neovim-rust/ for more tips on getting started.
-Check out https://github.com/simrat39/rust-tools.nvim for a batteries included rust-analyzer setup for Neovim.
+Check out https://github.com/mrcjkb/rustaceanvim for a batteries included rust-analyzer setup for Neovim.
==== vim-lsp
diff --git a/editors/code/language-configuration.json b/editors/code/language-configuration.json
index a2af8b51a9..1c348b63f1 100644
--- a/editors/code/language-configuration.json
+++ b/editors/code/language-configuration.json
@@ -6,9 +6,7 @@
"brackets": [
["{", "}"],
["[", "]"],
- ["(", ")"],
- ["#[", "]"],
- ["#![", "]"]
+ ["(", ")"]
],
"colorizedBracketPairs": [
["{", "}"],
@@ -19,8 +17,6 @@
{ "open": "{", "close": "}" },
{ "open": "[", "close": "]" },
{ "open": "(", "close": ")" },
- { "open": "#[", "close": "]" },
- { "open": "#![", "close": "]" },
{ "open": "\"", "close": "\"", "notIn": ["string"] },
{ "open": "/*", "close": " */" },
{ "open": "`", "close": "`", "notIn": ["string"] }
diff --git a/editors/code/package.json b/editors/code/package.json
index 5ed5146ea1..841e364ed8 100644
--- a/editors/code/package.json
+++ b/editors/code/package.json
@@ -1505,6 +1505,11 @@
"default": false,
"type": "boolean"
},
+ "rust-analyzer.references.excludeTests": {
+ "markdownDescription": "Exclude tests from find-all-references.",
+ "default": false,
+ "type": "boolean"
+ },
"rust-analyzer.rename.allowExternalItems": {
"markdownDescription": "Allow renaming of items not belonging to the loaded workspaces.",
"default": false,
diff --git a/editors/code/src/main.ts b/editors/code/src/main.ts
index 599cfb4ff7..c386b9e5d8 100644
--- a/editors/code/src/main.ts
+++ b/editors/code/src/main.ts
@@ -25,16 +25,7 @@ export async function deactivate() {
export async function activate(
context: vscode.ExtensionContext,
): Promise<RustAnalyzerExtensionApi> {
- if (vscode.extensions.getExtension("rust-lang.rust")) {
- vscode.window
- .showWarningMessage(
- `You have both the rust-analyzer (rust-lang.rust-analyzer) and Rust (rust-lang.rust) ` +
- "plugins enabled. These are known to conflict and cause various functions of " +
- "both plugins to not work correctly. You should disable one of them.",
- "Got it",
- )
- .then(() => {}, console.error);
- }
+ checkConflictingExtensions();
const ctx = new Ctx(context, createCommands(), fetchWorkspace());
// VS Code doesn't show a notification when an extension fails to activate
@@ -200,3 +191,26 @@ function createCommands(): Record<string, CommandFactory> {
revealDependency: { enabled: commands.revealDependency },
};
}
+
+function checkConflictingExtensions() {
+ if (vscode.extensions.getExtension("rust-lang.rust")) {
+ vscode.window
+ .showWarningMessage(
+ `You have both the rust-analyzer (rust-lang.rust-analyzer) and Rust (rust-lang.rust) ` +
+ "plugins enabled. These are known to conflict and cause various functions of " +
+ "both plugins to not work correctly. You should disable one of them.",
+ "Got it",
+ )
+ .then(() => {}, console.error);
+ }
+
+ if (vscode.extensions.getExtension("panicbit.cargo")) {
+ vscode.window
+ .showWarningMessage(
+ `You have both the rust-analyzer (rust-lang.rust-analyzer) and Cargo (panicbit.cargo) plugins enabled` +
+ 'you can disable it or set {"cargo.automaticCheck": false} in settings.json to avoid invoking cargo twice',
+ "Got it",
+ )
+ .then(() => {}, console.error);
+ }
+}
diff --git a/lib/lsp-server/src/error.rs b/lib/lsp-server/src/error.rs
index ebdd153b5b..da55393339 100644
--- a/lib/lsp-server/src/error.rs
+++ b/lib/lsp-server/src/error.rs
@@ -14,7 +14,7 @@ impl ProtocolError {
ProtocolError("disconnected channel".into(), true)
}
- /// Whether this error occured due to a disconnected channel.
+ /// Whether this error occurred due to a disconnected channel.
pub fn channel_is_disconnected(&self) -> bool {
self.1
}