Unnamed repository; edit this file 'description' to name the repository.
Merge from rustc
The Miri Conjob Bot 2023-12-19
parent 608974f · parent fedbf63 · commit 018ba21
-rw-r--r--.github/workflows/metrics.yaml9
-rw-r--r--Cargo.lock11
-rw-r--r--Cargo.toml2
-rw-r--r--crates/base-db/src/span.rs11
-rw-r--r--crates/hir-def/src/attr/builtin.rs34
-rw-r--r--crates/hir-def/src/child_by_source.rs7
-rw-r--r--crates/hir-def/src/generics.rs9
-rw-r--r--crates/hir-def/src/import_map.rs72
-rw-r--r--crates/hir-def/src/item_tree.rs8
-rw-r--r--crates/hir-def/src/lib.rs20
-rw-r--r--crates/hir-def/src/macro_expansion_tests/mbe/regression.rs96
-rw-r--r--crates/hir-def/src/resolver.rs11
-rw-r--r--crates/hir-expand/src/db.rs24
-rw-r--r--crates/hir-expand/src/eager.rs2
-rw-r--r--crates/hir-expand/src/fixup.rs39
-rw-r--r--crates/hir-expand/src/lib.rs41
-rw-r--r--crates/hir-expand/src/span.rs29
-rw-r--r--crates/hir-ty/src/infer.rs2
-rw-r--r--crates/hir-ty/src/infer/pat.rs40
-rw-r--r--crates/hir-ty/src/infer/path.rs63
-rw-r--r--crates/hir-ty/src/lower.rs22
-rw-r--r--crates/hir-ty/src/tests/incremental.rs72
-rw-r--r--crates/hir-ty/src/tests/patterns.rs38
-rw-r--r--crates/hir/src/semantics.rs27
-rw-r--r--crates/hir/src/source_analyzer.rs5
-rw-r--r--crates/hir/src/symbols.rs44
-rw-r--r--crates/ide-assists/src/handlers/convert_tuple_struct_to_named_struct.rs2
-rw-r--r--crates/ide-completion/src/item.rs12
-rw-r--r--crates/ide-completion/src/snippet.rs2
-rw-r--r--crates/ide-completion/src/tests/flyimport.rs26
-rw-r--r--crates/ide-db/src/imports/import_assets.rs202
-rw-r--r--crates/ide-db/src/items_locator.rs12
-rw-r--r--crates/ide-db/src/symbol_index.rs2
-rw-r--r--crates/ide-db/src/test_data/test_doc_alias.txt7
-rw-r--r--crates/ide-db/src/test_data/test_symbol_index_collection.txt29
-rw-r--r--crates/ide-diagnostics/src/handlers/unresolved_extern_crate.rs15
-rw-r--r--crates/ide-diagnostics/src/handlers/unresolved_macro_call.rs12
-rw-r--r--crates/ide-diagnostics/src/lib.rs4
-rw-r--r--crates/ide/src/inlay_hints.rs27
-rw-r--r--crates/ide/src/inlay_hints/bind_pat.rs8
-rw-r--r--crates/ide/src/lib.rs8
-rw-r--r--crates/mbe/src/syntax_bridge.rs21
-rw-r--r--crates/mbe/src/tt_iter.rs11
-rw-r--r--crates/proc-macro-api/src/version.rs2
-rw-r--r--crates/proc-macro-srv/src/server.rs4
-rw-r--r--crates/rust-analyzer/Cargo.toml1
-rw-r--r--crates/rust-analyzer/src/bin/main.rs1
-rw-r--r--crates/rust-analyzer/src/cli.rs1
-rw-r--r--crates/rust-analyzer/src/cli/flags.rs16
-rw-r--r--crates/rust-analyzer/src/cli/rustc_tests.rs236
-rw-r--r--crates/rust-analyzer/src/config.rs1
-rw-r--r--crates/rust-analyzer/src/handlers/request.rs19
-rw-r--r--crates/rust-analyzer/src/integrated_benchmarks.rs54
-rw-r--r--crates/syntax/src/ast/node_ext.rs13
-rw-r--r--crates/syntax/src/ast/traits.rs3
-rw-r--r--crates/syntax/src/utils.rs42
-rw-r--r--crates/vfs/src/lib.rs6
-rw-r--r--lib/line-index/Cargo.toml2
-rw-r--r--xtask/src/flags.rs3
-rw-r--r--xtask/src/metrics.rs17
60 files changed, 1083 insertions, 476 deletions
diff --git a/.github/workflows/metrics.yaml b/.github/workflows/metrics.yaml
index 741e559953..e6a9917a0b 100644
--- a/.github/workflows/metrics.yaml
+++ b/.github/workflows/metrics.yaml
@@ -67,7 +67,7 @@ jobs:
other_metrics:
strategy:
matrix:
- names: [self, ripgrep-13.0.0, webrender-2022, diesel-1.4.8, hyper-0.14.18]
+ names: [self, rustc_tests, ripgrep-13.0.0, webrender-2022, diesel-1.4.8, hyper-0.14.18]
runs-on: ubuntu-latest
needs: [setup_cargo, build_metrics]
@@ -118,6 +118,11 @@ jobs:
with:
name: self-${{ github.sha }}
+ - name: Download rustc_tests metrics
+ uses: actions/download-artifact@v3
+ with:
+ name: rustc_tests-${{ github.sha }}
+
- name: Download ripgrep-13.0.0 metrics
uses: actions/download-artifact@v3
with:
@@ -146,7 +151,7 @@ jobs:
chmod 700 ~/.ssh
git clone --depth 1 [email protected]:rust-analyzer/metrics.git
- jq -s ".[0] * .[1] * .[2] * .[3] * .[4] * .[5]" build.json self.json ripgrep-13.0.0.json webrender-2022.json diesel-1.4.8.json hyper-0.14.18.json -c >> metrics/metrics.json
+ jq -s ".[0] * .[1] * .[2] * .[3] * .[4] * .[5] * .[6]" build.json self.json rustc_tests.json ripgrep-13.0.0.json webrender-2022.json diesel-1.4.8.json hyper-0.14.18.json -c >> metrics/metrics.json
cd metrics
git add .
git -c user.name=Bot -c [email protected] commit --message 📈
diff --git a/Cargo.lock b/Cargo.lock
index 46efbdd93c..227d1db0ec 100644
--- a/Cargo.lock
+++ b/Cargo.lock
@@ -684,7 +684,7 @@ dependencies = [
"indexmap",
"itertools",
"limit",
- "line-index 0.1.0-pre.1",
+ "line-index 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)",
"memchr",
"nohash-hasher",
"once_cell",
@@ -881,9 +881,7 @@ version = "0.0.0"
[[package]]
name = "line-index"
-version = "0.1.0-pre.1"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "2cad96769710c1745e11d4f940a8ff36000ade4bbada4285b001cb8aa2f745ce"
+version = "0.1.1"
dependencies = [
"nohash-hasher",
"text-size",
@@ -891,7 +889,9 @@ dependencies = [
[[package]]
name = "line-index"
-version = "0.1.0"
+version = "0.1.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "67d61795376ae2683928c218fda7d7d7db136fd38c06b7552904667f0d55580a"
dependencies = [
"nohash-hasher",
"text-size",
@@ -1545,6 +1545,7 @@ dependencies = [
"triomphe",
"vfs",
"vfs-notify",
+ "walkdir",
"winapi",
"xflags",
"xshell",
diff --git a/Cargo.toml b/Cargo.toml
index f3f01aab8e..1213979c39 100644
--- a/Cargo.toml
+++ b/Cargo.toml
@@ -85,7 +85,7 @@ rustc-dependencies = { path = "./crates/rustc-dependencies", version = "0.0.0" }
proc-macro-test = { path = "./crates/proc-macro-test" }
# In-tree crates that are published separately and follow semver. See lib/README.md
-line-index = { version = "0.1.0-pre.1" }
+line-index = { version = "0.1.1" }
la-arena = { version = "0.3.1" }
lsp-server = { version = "0.7.4" }
diff --git a/crates/base-db/src/span.rs b/crates/base-db/src/span.rs
index 3464f4cb6d..d8990eb7ca 100644
--- a/crates/base-db/src/span.rs
+++ b/crates/base-db/src/span.rs
@@ -151,21 +151,26 @@ impl fmt::Debug for HirFileIdRepr {
impl From<FileId> for HirFileId {
fn from(id: FileId) -> Self {
- assert!(id.index() < Self::MAX_FILE_ID);
+ _ = Self::ASSERT_MAX_FILE_ID_IS_SAME;
+ assert!(id.index() <= Self::MAX_HIR_FILE_ID, "FileId index {} is too large", id.index());
HirFileId(id.index())
}
}
impl From<MacroFileId> for HirFileId {
fn from(MacroFileId { macro_call_id: MacroCallId(id) }: MacroFileId) -> Self {
+ _ = Self::ASSERT_MAX_FILE_ID_IS_SAME;
let id = id.as_u32();
- assert!(id < Self::MAX_FILE_ID);
+ assert!(id <= Self::MAX_HIR_FILE_ID, "MacroCallId index {} is too large", id);
HirFileId(id | Self::MACRO_FILE_TAG_MASK)
}
}
impl HirFileId {
- const MAX_FILE_ID: u32 = u32::MAX ^ Self::MACRO_FILE_TAG_MASK;
+ const ASSERT_MAX_FILE_ID_IS_SAME: () =
+ [()][(Self::MAX_HIR_FILE_ID != FileId::MAX_FILE_ID) as usize];
+
+ const MAX_HIR_FILE_ID: u32 = u32::MAX ^ Self::MACRO_FILE_TAG_MASK;
const MACRO_FILE_TAG_MASK: u32 = 1 << 31;
#[inline]
diff --git a/crates/hir-def/src/attr/builtin.rs b/crates/hir-def/src/attr/builtin.rs
index 15dceeb8af..48a596f7f5 100644
--- a/crates/hir-def/src/attr/builtin.rs
+++ b/crates/hir-def/src/attr/builtin.rs
@@ -2,7 +2,7 @@
//!
//! The actual definitions were copied from rustc's `compiler/rustc_feature/src/builtin_attrs.rs`.
//!
-//! It was last synchronized with upstream commit e29821ff85a2a3000d226f99f62f89464028d5d6.
+//! It was last synchronized with upstream commit c3def263a44e07e09ae6d57abfc8650227fb4972.
//!
//! The macros were adjusted to only expand to the attribute name, since that is all we need to do
//! name resolution, and `BUILTIN_ATTRIBUTES` is almost entirely unchanged from the original, to
@@ -240,7 +240,7 @@ pub const INERT_ATTRIBUTES: &[BuiltinAttribute] = &[
template!(List: "address, kcfi, memory, thread"), DuplicatesOk,
experimental!(no_sanitize)
),
- gated!(coverage, Normal, template!(Word, List: "on|off"), WarnFollowing, experimental!(coverage)),
+ gated!(coverage, Normal, template!(Word, List: "on|off"), WarnFollowing, coverage_attribute, experimental!(coverage)),
ungated!(
doc, Normal, template!(List: "hidden|inline|...", NameValueStr: "string"), DuplicatesOk
@@ -364,7 +364,6 @@ pub const INERT_ATTRIBUTES: &[BuiltinAttribute] = &[
allow_internal_unsafe, Normal, template!(Word), WarnFollowing,
"allow_internal_unsafe side-steps the unsafe_code lint",
),
- ungated!(rustc_safe_intrinsic, Normal, template!(Word), DuplicatesOk),
rustc_attr!(rustc_allowed_through_unstable_modules, Normal, template!(Word), WarnFollowing,
"rustc_allowed_through_unstable_modules special cases accidental stabilizations of stable items \
through unstable paths"),
@@ -453,6 +452,12 @@ pub const INERT_ATTRIBUTES: &[BuiltinAttribute] = &[
ErrorFollowing,
INTERNAL_UNSTABLE
),
+ rustc_attr!(
+ rustc_confusables, Normal,
+ template!(List: r#""name1", "name2", ..."#),
+ ErrorFollowing,
+ INTERNAL_UNSTABLE,
+ ),
// Enumerates "identity-like" conversion methods to suggest on type mismatch.
rustc_attr!(
rustc_conversion_suggestion, Normal, template!(Word), WarnFollowing, INTERNAL_UNSTABLE
@@ -488,6 +493,10 @@ pub const INERT_ATTRIBUTES: &[BuiltinAttribute] = &[
rustc_attr!(
rustc_do_not_const_check, Normal, template!(Word), WarnFollowing, INTERNAL_UNSTABLE
),
+ // Ensure the argument to this function is &&str during const-check.
+ rustc_attr!(
+ rustc_const_panic_str, Normal, template!(Word), WarnFollowing, INTERNAL_UNSTABLE
+ ),
// ==========================================================================
// Internal attributes, Layout related:
@@ -521,6 +530,10 @@ pub const INERT_ATTRIBUTES: &[BuiltinAttribute] = &[
"#[rustc_pass_by_value] is used to mark types that must be passed by value instead of reference."
),
rustc_attr!(
+ rustc_never_returns_null_ptr, Normal, template!(Word), ErrorFollowing,
+ "#[rustc_never_returns_null_ptr] is used to mark functions returning non-null pointers."
+ ),
+ rustc_attr!(
rustc_coherence_is_core, AttributeType::CrateLevel, template!(Word), ErrorFollowing, @only_local: true,
"#![rustc_coherence_is_core] allows inherent methods on builtin types, only intended to be used in `core`."
),
@@ -533,7 +546,11 @@ pub const INERT_ATTRIBUTES: &[BuiltinAttribute] = &[
"#[rustc_allow_incoherent_impl] has to be added to all impl items of an incoherent inherent impl."
),
rustc_attr!(
- rustc_deny_explicit_impl, AttributeType::Normal, template!(Word), ErrorFollowing, @only_local: false,
+ rustc_deny_explicit_impl,
+ AttributeType::Normal,
+ template!(List: "implement_via_object = (true|false)"),
+ ErrorFollowing,
+ @only_local: true,
"#[rustc_deny_explicit_impl] enforces that a trait can have no user-provided impls"
),
rustc_attr!(
@@ -614,6 +631,10 @@ pub const INERT_ATTRIBUTES: &[BuiltinAttribute] = &[
rustc_doc_primitive, Normal, template!(NameValueStr: "primitive name"), ErrorFollowing,
r#"`rustc_doc_primitive` is a rustc internal attribute"#,
),
+ rustc_attr!(
+ rustc_safe_intrinsic, Normal, template!(Word), WarnFollowing,
+ "the `#[rustc_safe_intrinsic]` attribute is used internally to mark intrinsics as safe"
+ ),
// ==========================================================================
// Internal attributes, Testing:
@@ -625,13 +646,16 @@ pub const INERT_ATTRIBUTES: &[BuiltinAttribute] = &[
rustc_attr!(TEST, rustc_insignificant_dtor, Normal, template!(Word), WarnFollowing),
rustc_attr!(TEST, rustc_strict_coherence, Normal, template!(Word), WarnFollowing),
rustc_attr!(TEST, rustc_variance, Normal, template!(Word), WarnFollowing),
+ rustc_attr!(TEST, rustc_variance_of_opaques, Normal, template!(Word), WarnFollowing),
+ rustc_attr!(TEST, rustc_hidden_type_of_opaques, Normal, template!(Word), WarnFollowing),
rustc_attr!(TEST, rustc_layout, Normal, template!(List: "field1, field2, ..."), WarnFollowing),
+ rustc_attr!(TEST, rustc_abi, Normal, template!(List: "field1, field2, ..."), WarnFollowing),
rustc_attr!(TEST, rustc_regions, Normal, template!(Word), WarnFollowing),
rustc_attr!(
TEST, rustc_error, Normal,
template!(Word, List: "span_delayed_bug_from_inside_query"), WarnFollowingWordOnly
),
- rustc_attr!(TEST, rustc_dump_user_substs, Normal, template!(Word), WarnFollowing),
+ rustc_attr!(TEST, rustc_dump_user_args, Normal, template!(Word), WarnFollowing),
rustc_attr!(TEST, rustc_evaluate_where_clauses, Normal, template!(Word), WarnFollowing),
rustc_attr!(
TEST, rustc_if_this_changed, Normal, template!(Word, List: "DepNode"), DuplicatesOk
diff --git a/crates/hir-def/src/child_by_source.rs b/crates/hir-def/src/child_by_source.rs
index 4cfd318a43..c82d2347de 100644
--- a/crates/hir-def/src/child_by_source.rs
+++ b/crates/hir-def/src/child_by_source.rs
@@ -5,8 +5,7 @@
//! node for a *child*, and get its hir.
use either::Either;
-use hir_expand::HirFileId;
-use syntax::ast::HasDocComments;
+use hir_expand::{attrs::collect_attrs, HirFileId};
use crate::{
db::DefDatabase,
@@ -118,8 +117,8 @@ impl ChildBySource for ItemScope {
|(ast_id, calls)| {
let adt = ast_id.to_node(db.upcast());
calls.for_each(|(attr_id, call_id, calls)| {
- if let Some(Either::Left(attr)) =
- adt.doc_comments_and_attrs().nth(attr_id.ast_index())
+ if let Some((_, Either::Left(attr))) =
+ collect_attrs(&adt).nth(attr_id.ast_index())
{
res[keys::DERIVE_MACRO_CALL].insert(attr, (attr_id, call_id, calls.into()));
}
diff --git a/crates/hir-def/src/generics.rs b/crates/hir-def/src/generics.rs
index 0d95d916ff..f5324f052e 100644
--- a/crates/hir-def/src/generics.rs
+++ b/crates/hir-def/src/generics.rs
@@ -222,11 +222,10 @@ impl GenericParams {
let module = loc.container.module(db);
let func_data = db.function_data(id);
- // Don't create an `Expander` nor call `loc.source(db)` if not needed since this
- // causes a reparse after the `ItemTree` has been created.
- let mut expander = Lazy::new(|| {
- (module.def_map(db), Expander::new(db, loc.source(db).file_id, module))
- });
+ // Don't create an `Expander` if not needed since this
+ // could cause a reparse after the `ItemTree` has been created due to the spanmap.
+ let mut expander =
+ Lazy::new(|| (module.def_map(db), Expander::new(db, loc.id.file_id(), module)));
for param in func_data.params.iter() {
generic_params.fill_implicit_impl_trait_args(db, &mut expander, param);
}
diff --git a/crates/hir-def/src/import_map.rs b/crates/hir-def/src/import_map.rs
index d589fbe347..26d333f9a0 100644
--- a/crates/hir-def/src/import_map.rs
+++ b/crates/hir-def/src/import_map.rs
@@ -9,6 +9,7 @@ use indexmap::IndexMap;
use itertools::Itertools;
use rustc_hash::{FxHashSet, FxHasher};
use smallvec::SmallVec;
+use stdx::format_to;
use triomphe::Arc;
use crate::{
@@ -53,13 +54,25 @@ pub struct ImportMap {
fst: fst::Map<Vec<u8>>,
}
-#[derive(Copy, Clone, PartialEq, Eq)]
+#[derive(Copy, Clone, Debug, PartialEq, Eq, Ord, PartialOrd)]
enum IsTraitAssocItem {
Yes,
No,
}
impl ImportMap {
+ pub fn dump(&self, db: &dyn DefDatabase) -> String {
+ let mut out = String::new();
+ for (k, v) in self.map.iter() {
+ format_to!(out, "{:?} ({:?}) -> ", k, v.1);
+ for v in &v.0 {
+ format_to!(out, "{}:{:?}, ", v.name.display(db.upcast()), v.container);
+ }
+ format_to!(out, "\n");
+ }
+ out
+ }
+
pub(crate) fn import_map_query(db: &dyn DefDatabase, krate: CrateId) -> Arc<Self> {
let _p = profile::span("import_map_query");
@@ -68,26 +81,31 @@ impl ImportMap {
let mut importables: Vec<_> = map
.iter()
// We've only collected items, whose name cannot be tuple field.
- .flat_map(|(&item, (info, _))| {
- info.iter()
- .map(move |info| (item, info.name.as_str().unwrap().to_ascii_lowercase()))
+ .flat_map(|(&item, (info, is_assoc))| {
+ info.iter().map(move |info| {
+ (item, *is_assoc, info.name.as_str().unwrap().to_ascii_lowercase())
+ })
})
.collect();
- importables.sort_by(|(_, lhs_name), (_, rhs_name)| lhs_name.cmp(rhs_name));
+ importables.sort_by(|(_, l_is_assoc, lhs_name), (_, r_is_assoc, rhs_name)| {
+ lhs_name.cmp(rhs_name).then_with(|| l_is_assoc.cmp(r_is_assoc))
+ });
importables.dedup();
// Build the FST, taking care not to insert duplicate values.
let mut builder = fst::MapBuilder::memory();
- let iter =
- importables.iter().enumerate().dedup_by(|(_, (_, lhs)), (_, (_, rhs))| lhs == rhs);
- for (start_idx, (_, name)) in iter {
+ let iter = importables
+ .iter()
+ .enumerate()
+ .dedup_by(|(_, (_, _, lhs)), (_, (_, _, rhs))| lhs == rhs);
+ for (start_idx, (_, _, name)) in iter {
let _ = builder.insert(name, start_idx as u64);
}
Arc::new(ImportMap {
map,
fst: builder.into_map(),
- importables: importables.into_iter().map(|(item, _)| item).collect(),
+ importables: importables.into_iter().map(|(item, _, _)| item).collect(),
})
}
@@ -328,20 +346,20 @@ impl Query {
}
/// Checks whether the import map entry matches the query.
- fn import_matches(
- &self,
- db: &dyn DefDatabase,
- import: &ImportInfo,
- enforce_lowercase: bool,
- ) -> bool {
+ fn import_matches(&self, import: &ImportInfo, enforce_lowercase: bool) -> bool {
let _p = profile::span("import_map::Query::import_matches");
// FIXME: Can we get rid of the alloc here?
- let mut input = import.name.display(db.upcast()).to_string();
+ let input = import.name.to_smol_str();
+ let mut _s_slot;
let case_insensitive = enforce_lowercase || !self.case_sensitive;
- if case_insensitive {
- input.make_ascii_lowercase();
- }
+ let input = if case_insensitive {
+ _s_slot = String::from(input);
+ _s_slot.make_ascii_lowercase();
+ &*_s_slot
+ } else {
+ &*input
+ };
let query_string = if case_insensitive { &self.lowercased } else { &self.query };
@@ -351,7 +369,7 @@ impl Query {
SearchMode::Fuzzy => {
let mut input_chars = input.chars();
for query_char in query_string.chars() {
- if input_chars.find(|&it| it == query_char).is_none() {
+ if !input_chars.any(|it| it == query_char) {
return false;
}
}
@@ -372,6 +390,7 @@ pub fn search_dependencies(
let _p = profile::span("search_dependencies").detail(|| format!("{query:?}"));
let graph = db.crate_graph();
+
let import_maps: Vec<_> =
graph[krate].dependencies.iter().map(|dep| db.import_map(dep.crate_id)).collect();
@@ -386,22 +405,23 @@ pub fn search_dependencies(
let mut res = FxHashSet::default();
let mut common_importable_data_scratch = vec![];
+ // FIXME: Improve this, its rather unreadable and does duplicate amount of work
while let Some((_, indexed_values)) = stream.next() {
for &IndexedValue { index, value } in indexed_values {
let import_map = &import_maps[index];
let importables @ [importable, ..] = &import_map.importables[value as usize..] else {
continue;
};
-
let &(ref importable_data, is_trait_assoc_item) = &import_map.map[importable];
if !query.matches_assoc_mode(is_trait_assoc_item) {
continue;
}
+ // Fetch all the known names of this importable item (to handle import aliases/renames)
common_importable_data_scratch.extend(
importable_data
.iter()
- .filter(|&info| query.import_matches(db, info, true))
+ .filter(|&info| query.import_matches(info, true))
// Name shared by the importable items in this group.
.map(|info| info.name.to_smol_str()),
);
@@ -415,6 +435,7 @@ pub fn search_dependencies(
common_importable_data_scratch.drain(..).flat_map(|common_importable_name| {
// Add the items from this name group. Those are all subsequent items in
// `importables` whose name match `common_importable_name`.
+
importables
.iter()
.copied()
@@ -430,11 +451,8 @@ pub fn search_dependencies(
.filter(move |item| {
!query.case_sensitive || {
// we've already checked the common importables name case-insensitively
- let &(ref import_infos, assoc_mode) = &import_map.map[item];
- query.matches_assoc_mode(assoc_mode)
- && import_infos
- .iter()
- .any(|info| query.import_matches(db, info, false))
+ let &(ref import_infos, _) = &import_map.map[item];
+ import_infos.iter().any(|info| query.import_matches(info, false))
}
})
});
diff --git a/crates/hir-def/src/item_tree.rs b/crates/hir-def/src/item_tree.rs
index 16144394e3..3d2cddffa3 100644
--- a/crates/hir-def/src/item_tree.rs
+++ b/crates/hir-def/src/item_tree.rs
@@ -106,11 +106,6 @@ impl ItemTree {
pub(crate) fn file_item_tree_query(db: &dyn DefDatabase, file_id: HirFileId) -> Arc<ItemTree> {
let _p = profile::span("file_item_tree_query").detail(|| format!("{file_id:?}"));
let syntax = db.parse_or_expand(file_id);
- if never!(syntax.kind() == SyntaxKind::ERROR, "{:?} from {:?} {}", file_id, syntax, syntax)
- {
- // FIXME: not 100% sure why these crop up, but return an empty tree to avoid a panic
- return Default::default();
- }
let ctx = lower::Ctx::new(db, file_id);
let mut top_attrs = None;
@@ -129,6 +124,9 @@ impl ItemTree {
ctx.lower_macro_stmts(stmts)
},
_ => {
+ if never!(syntax.kind() == SyntaxKind::ERROR, "{:?} from {:?} {}", file_id, syntax, syntax) {
+ return Default::default();
+ }
panic!("cannot create item tree for file {file_id:?} from {syntax:?} {syntax}");
},
}
diff --git a/crates/hir-def/src/lib.rs b/crates/hir-def/src/lib.rs
index 7cf13a202e..b5333861cc 100644
--- a/crates/hir-def/src/lib.rs
+++ b/crates/hir-def/src/lib.rs
@@ -569,6 +569,8 @@ pub struct ConstBlockLoc {
pub root: hir::ExprId,
}
+/// Something that holds types, required for the current const arg lowering implementation as they
+/// need to be able to query where they are defined.
#[derive(Debug, Clone, Copy, Eq, PartialEq, Hash)]
pub enum TypeOwnerId {
FunctionId(FunctionId),
@@ -581,9 +583,6 @@ pub enum TypeOwnerId {
TypeAliasId(TypeAliasId),
ImplId(ImplId),
EnumVariantId(EnumVariantId),
- // FIXME(const-generic-body): ModuleId should not be a type owner. This needs to be fixed to make `TypeOwnerId` actually
- // useful for assigning ids to in type consts.
- ModuleId(ModuleId),
}
impl TypeOwnerId {
@@ -597,9 +596,7 @@ impl TypeOwnerId {
TypeOwnerId::TypeAliasId(it) => GenericDefId::TypeAliasId(it),
TypeOwnerId::ImplId(it) => GenericDefId::ImplId(it),
TypeOwnerId::EnumVariantId(it) => GenericDefId::EnumVariantId(it),
- TypeOwnerId::InTypeConstId(_) | TypeOwnerId::ModuleId(_) | TypeOwnerId::StaticId(_) => {
- return None
- }
+ TypeOwnerId::InTypeConstId(_) | TypeOwnerId::StaticId(_) => return None,
})
}
}
@@ -614,8 +611,7 @@ impl_from!(
TraitAliasId,
TypeAliasId,
ImplId,
- EnumVariantId,
- ModuleId
+ EnumVariantId
for TypeOwnerId
);
@@ -713,12 +709,15 @@ pub struct InTypeConstLoc {
pub id: AstId<ast::ConstArg>,
/// The thing this const arg appears in
pub owner: TypeOwnerId,
- pub thing: Box<dyn OpaqueInternableThing>,
+ // FIXME(const-generic-body): The expected type should not be
+ pub expected_ty: Box<dyn OpaqueInternableThing>,
}
impl PartialEq for InTypeConstLoc {
fn eq(&self, other: &Self) -> bool {
- self.id == other.id && self.owner == other.owner && &*self.thing == &*other.thing
+ self.id == other.id
+ && self.owner == other.owner
+ && &*self.expected_ty == &*other.expected_ty
}
}
@@ -1041,7 +1040,6 @@ impl HasModule for TypeOwnerId {
TypeOwnerId::TypeAliasId(it) => it.lookup(db).module(db),
TypeOwnerId::ImplId(it) => it.lookup(db).container,
TypeOwnerId::EnumVariantId(it) => it.parent.lookup(db).container,
- TypeOwnerId::ModuleId(it) => *it,
}
}
}
diff --git a/crates/hir-def/src/macro_expansion_tests/mbe/regression.rs b/crates/hir-def/src/macro_expansion_tests/mbe/regression.rs
index 9010050ee6..71ba497217 100644
--- a/crates/hir-def/src/macro_expansion_tests/mbe/regression.rs
+++ b/crates/hir-def/src/macro_expansion_tests/mbe/regression.rs
@@ -13,37 +13,97 @@ fn test_vec() {
check(
r#"
macro_rules! vec {
- ($($item:expr),*) => {{
- let mut v = Vec::new();
- $( v.push($item); )*
- v
- }};
+ () => (
+ $crate::__rust_force_expr!($crate::vec::Vec::new())
+ );
+ ($elem:expr; $n:expr) => (
+ $crate::__rust_force_expr!($crate::vec::from_elem($elem, $n))
+ );
+ ($($x:expr),+ $(,)?) => (
+ $crate::__rust_force_expr!(<[_]>::into_vec(
+ // This rustc_box is not required, but it produces a dramatic improvement in compile
+ // time when constructing arrays with many elements.
+ #[rustc_box]
+ $crate::boxed::Box::new([$($x),+])
+ ))
+ );
+}
+
+macro_rules! __rust_force_expr {
+ ($e:expr) => {
+ $e
+ };
}
+
fn main() {
vec!();
vec![1u32,2];
+ vec![a.];
}
"#,
expect![[r#"
macro_rules! vec {
- ($($item:expr),*) => {{
- let mut v = Vec::new();
- $( v.push($item); )*
- v
- }};
+ () => (
+ $crate::__rust_force_expr!($crate::vec::Vec::new())
+ );
+ ($elem:expr; $n:expr) => (
+ $crate::__rust_force_expr!($crate::vec::from_elem($elem, $n))
+ );
+ ($($x:expr),+ $(,)?) => (
+ $crate::__rust_force_expr!(<[_]>::into_vec(
+ // This rustc_box is not required, but it produces a dramatic improvement in compile
+ // time when constructing arrays with many elements.
+ #[rustc_box]
+ $crate::boxed::Box::new([$($x),+])
+ ))
+ );
+}
+
+macro_rules! __rust_force_expr {
+ ($e:expr) => {
+ $e
+ };
}
+
fn main() {
- {
- let mut v = Vec::new();
- v
+ $crate::__rust_force_expr!($crate:: vec:: Vec:: new());
+ $crate::__rust_force_expr!(<[_]>:: into_vec(#[rustc_box]$crate:: boxed:: Box:: new([1u32, 2])));
+ /* error: expected Expr */$crate::__rust_force_expr!($crate:: vec:: from_elem((a.), $n));
+}
+"#]],
+ );
+ // FIXME we should ahev testing infra for multi level expansion tests
+ check(
+ r#"
+macro_rules! __rust_force_expr {
+ ($e:expr) => {
+ $e
};
- {
- let mut v = Vec::new();
- v.push(1u32);
- v.push(2);
- v
+}
+
+fn main() {
+ __rust_force_expr!(crate:: vec:: Vec:: new());
+ __rust_force_expr!(<[_]>:: into_vec(#[rustc_box] crate:: boxed:: Box:: new([1u32, 2])));
+ __rust_force_expr/*+errors*/!(crate:: vec:: from_elem((a.), $n));
+}
+"#,
+ expect![[r#"
+macro_rules! __rust_force_expr {
+ ($e:expr) => {
+ $e
};
}
+
+fn main() {
+ (crate ::vec::Vec::new());
+ (<[_]>::into_vec(#[rustc_box] crate ::boxed::Box::new([1u32, 2])));
+ /* error: expected Expr *//* parse error: expected field name or number */
+/* parse error: expected expression */
+/* parse error: expected R_PAREN */
+/* parse error: expected COMMA */
+/* parse error: expected expression, item or let statement */
+(crate ::vec::from_elem((a.), $n));
+}
"#]],
);
}
diff --git a/crates/hir-def/src/resolver.rs b/crates/hir-def/src/resolver.rs
index ba0a2c0224..2ac1516ec0 100644
--- a/crates/hir-def/src/resolver.rs
+++ b/crates/hir-def/src/resolver.rs
@@ -589,6 +589,16 @@ impl Resolver {
})
}
+ pub fn type_owner(&self) -> Option<TypeOwnerId> {
+ self.scopes().find_map(|scope| match scope {
+ Scope::BlockScope(_) => None,
+ &Scope::GenericParams { def, .. } => Some(def.into()),
+ &Scope::ImplDefScope(id) => Some(id.into()),
+ &Scope::AdtScope(adt) => Some(adt.into()),
+ Scope::ExprScope(it) => Some(it.owner.into()),
+ })
+ }
+
pub fn impl_def(&self) -> Option<ImplId> {
self.scopes().find_map(|scope| match scope {
Scope::ImplDefScope(def) => Some(*def),
@@ -1079,7 +1089,6 @@ impl HasResolver for TypeOwnerId {
TypeOwnerId::TypeAliasId(it) => it.resolver(db),
TypeOwnerId::ImplId(it) => it.resolver(db),
TypeOwnerId::EnumVariantId(it) => it.resolver(db),
- TypeOwnerId::ModuleId(it) => it.resolver(db),
}
}
}
diff --git a/crates/hir-expand/src/db.rs b/crates/hir-expand/src/db.rs
index d2c6559b06..935669d49b 100644
--- a/crates/hir-expand/src/db.rs
+++ b/crates/hir-expand/src/db.rs
@@ -10,17 +10,17 @@ use limit::Limit;
use mbe::{syntax_node_to_token_tree, ValueResult};
use rustc_hash::FxHashSet;
use syntax::{
- ast::{self, HasAttrs, HasDocComments},
+ ast::{self, HasAttrs},
AstNode, Parse, SyntaxError, SyntaxNode, SyntaxToken, T,
};
use triomphe::Arc;
use crate::{
ast_id_map::AstIdMap,
- attrs::RawAttrs,
+ attrs::{collect_attrs, RawAttrs},
builtin_attr_macro::pseudo_derive_attr_expansion,
builtin_fn_macro::EagerExpander,
- fixup::{self, SyntaxFixupUndoInfo},
+ fixup::{self, reverse_fixups, SyntaxFixupUndoInfo},
hygiene::{apply_mark, SyntaxContextData, Transparency},
span::{RealSpanMap, SpanMap, SpanMapRef},
tt, AstId, BuiltinAttrExpander, BuiltinDeriveExpander, BuiltinFnLikeExpander, EagerCallInfo,
@@ -216,9 +216,9 @@ pub fn expand_speculative(
// Attributes may have an input token tree, build the subtree and map for this as well
// then try finding a token id for our token if it is inside this input subtree.
let item = ast::Item::cast(speculative_args.clone())?;
- item.doc_comments_and_attrs()
+ collect_attrs(&item)
.nth(invoc_attr_index.ast_index())
- .and_then(Either::left)
+ .and_then(|x| Either::left(x.1))
}?;
match attr.token_tree() {
Some(token_tree) => {
@@ -421,6 +421,15 @@ fn macro_arg(
syntax::NodeOrToken::Token(_) => true,
});
fixups.remove.extend(censor);
+ {
+ let mut tt = mbe::syntax_node_to_token_tree_modified(
+ &syntax,
+ map.as_ref(),
+ fixups.append.clone(),
+ fixups.remove.clone(),
+ );
+ reverse_fixups(&mut tt, &fixups.undo_info);
+ }
(
mbe::syntax_node_to_token_tree_modified(
&syntax,
@@ -479,10 +488,9 @@ fn censor_for_macro_input(loc: &MacroCallLoc, node: &SyntaxNode) -> FxHashSet<Sy
MacroCallKind::Attr { .. } if loc.def.is_attribute_derive() => return None,
MacroCallKind::Attr { invoc_attr_index, .. } => {
cov_mark::hit!(attribute_macro_attr_censoring);
- ast::Item::cast(node.clone())?
- .doc_comments_and_attrs()
+ collect_attrs(&ast::Item::cast(node.clone())?)
.nth(invoc_attr_index.ast_index())
- .and_then(Either::left)
+ .and_then(|x| Either::left(x.1))
.map(|attr| attr.syntax().clone())
.into_iter()
.collect()
diff --git a/crates/hir-expand/src/eager.rs b/crates/hir-expand/src/eager.rs
index 1e2722e846..8d55240aef 100644
--- a/crates/hir-expand/src/eager.rs
+++ b/crates/hir-expand/src/eager.rs
@@ -88,7 +88,7 @@ pub fn expand_eager_macro_input(
let loc = MacroCallLoc {
def,
krate,
- eager: Some(Box::new(EagerCallInfo { arg: Arc::new(subtree), arg_id, error: err.clone() })),
+ eager: Some(Arc::new(EagerCallInfo { arg: Arc::new(subtree), arg_id, error: err.clone() })),
kind: MacroCallKind::FnLike { ast_id: call_id, expand_to },
call_site,
};
diff --git a/crates/hir-expand/src/fixup.rs b/crates/hir-expand/src/fixup.rs
index 11775c531d..346cd39a76 100644
--- a/crates/hir-expand/src/fixup.rs
+++ b/crates/hir-expand/src/fixup.rs
@@ -8,12 +8,13 @@ use base_db::{
use la_arena::RawIdx;
use rustc_hash::{FxHashMap, FxHashSet};
use smallvec::SmallVec;
+use stdx::never;
use syntax::{
ast::{self, AstNode, HasLoopBody},
match_ast, SyntaxElement, SyntaxKind, SyntaxNode, TextRange, TextSize,
};
use triomphe::Arc;
-use tt::Spacing;
+use tt::{Spacing, Span};
use crate::{
span::SpanMapRef,
@@ -45,19 +46,20 @@ impl SyntaxFixupUndoInfo {
// replacement -> censor + append
// append -> insert a fake node, here we need to assemble some dummy span that we can figure out how
// to remove later
+const FIXUP_DUMMY_FILE: FileId = FileId::from_raw(FileId::MAX_FILE_ID);
+const FIXUP_DUMMY_AST_ID: ErasedFileAstId = ErasedFileAstId::from_raw(RawIdx::from_u32(!0));
+const FIXUP_DUMMY_RANGE: TextRange = TextRange::empty(TextSize::new(0));
+const FIXUP_DUMMY_RANGE_END: TextSize = TextSize::new(!0);
pub(crate) fn fixup_syntax(span_map: SpanMapRef<'_>, node: &SyntaxNode) -> SyntaxFixups {
let mut append = FxHashMap::<SyntaxElement, _>::default();
let mut remove = FxHashSet::<SyntaxNode>::default();
let mut preorder = node.preorder();
let mut original = Vec::new();
- let dummy_range = TextRange::empty(TextSize::new(0));
+ let dummy_range = FIXUP_DUMMY_RANGE;
// we use a file id of `FileId(!0)` to signal a fake node, and the text range's start offset as
// the index into the replacement vec but only if the end points to !0
- let dummy_anchor = SpanAnchor {
- file_id: FileId::from_raw(!0),
- ast_id: ErasedFileAstId::from_raw(RawIdx::from(!0)),
- };
+ let dummy_anchor = SpanAnchor { file_id: FIXUP_DUMMY_FILE, ast_id: FIXUP_DUMMY_AST_ID };
let fake_span = |range| SpanData {
range: dummy_range,
anchor: dummy_anchor,
@@ -76,7 +78,7 @@ pub(crate) fn fixup_syntax(span_map: SpanMapRef<'_>, node: &SyntaxNode) -> Synta
let replacement = Leaf::Ident(Ident {
text: "__ra_fixup".into(),
span: SpanData {
- range: TextRange::new(TextSize::new(idx), TextSize::new(!0)),
+ range: TextRange::new(TextSize::new(idx), FIXUP_DUMMY_RANGE_END),
anchor: dummy_anchor,
ctx: span_map.span_for_range(node_range).ctx,
},
@@ -299,6 +301,13 @@ fn has_error_to_handle(node: &SyntaxNode) -> bool {
pub(crate) fn reverse_fixups(tt: &mut Subtree, undo_info: &SyntaxFixupUndoInfo) {
let Some(undo_info) = undo_info.original.as_deref() else { return };
let undo_info = &**undo_info;
+ if never!(
+ tt.delimiter.close.anchor.file_id == FIXUP_DUMMY_FILE
+ || tt.delimiter.open.anchor.file_id == FIXUP_DUMMY_FILE
+ ) {
+ tt.delimiter.close = SpanData::DUMMY;
+ tt.delimiter.open = SpanData::DUMMY;
+ }
reverse_fixups_(tt, undo_info);
}
@@ -310,17 +319,28 @@ fn reverse_fixups_(tt: &mut Subtree, undo_info: &[Subtree]) {
.filter(|tt| match tt {
tt::TokenTree::Leaf(leaf) => {
let span = leaf.span();
- span.anchor.file_id != FileId::from_raw(!0) || span.range.end() == TextSize::new(!0)
+ let is_real_leaf = span.anchor.file_id != FIXUP_DUMMY_FILE;
+ let is_replaced_node = span.range.end() == FIXUP_DUMMY_RANGE_END;
+ is_real_leaf || is_replaced_node
}
tt::TokenTree::Subtree(_) => true,
})
.flat_map(|tt| match tt {
tt::TokenTree::Subtree(mut tt) => {
+ if tt.delimiter.close.anchor.file_id == FIXUP_DUMMY_FILE
+ || tt.delimiter.open.anchor.file_id == FIXUP_DUMMY_FILE
+ {
+ // Even though fixup never creates subtrees with fixup spans, the old proc-macro server
+ // might copy them if the proc-macro asks for it, so we need to filter those out
+ // here as well.
+ return SmallVec::new_const();
+ }
reverse_fixups_(&mut tt, undo_info);
SmallVec::from_const([tt.into()])
}
tt::TokenTree::Leaf(leaf) => {
- if leaf.span().anchor.file_id == FileId::from_raw(!0) {
+ if leaf.span().anchor.file_id == FIXUP_DUMMY_FILE {
+ // we have a fake node here, we need to replace it again with the original
let original = undo_info[u32::from(leaf.span().range.start()) as usize].clone();
if original.delimiter.kind == tt::DelimiterKind::Invisible {
original.token_trees.into()
@@ -328,6 +348,7 @@ fn reverse_fixups_(tt: &mut Subtree, undo_info: &[Subtree]) {
SmallVec::from_const([original.into()])
}
} else {
+ // just a normal leaf
SmallVec::from_const([leaf.into()])
}
}
diff --git a/crates/hir-expand/src/lib.rs b/crates/hir-expand/src/lib.rs
index 74089593ac..d7819b315c 100644
--- a/crates/hir-expand/src/lib.rs
+++ b/crates/hir-expand/src/lib.rs
@@ -22,6 +22,7 @@ pub mod span;
pub mod files;
mod fixup;
+use attrs::collect_attrs;
use triomphe::Arc;
use std::{fmt, hash::Hash};
@@ -32,7 +33,7 @@ use base_db::{
};
use either::Either;
use syntax::{
- ast::{self, AstNode, HasDocComments},
+ ast::{self, AstNode},
SyntaxNode, SyntaxToken, TextRange, TextSize,
};
@@ -116,7 +117,7 @@ pub struct MacroCallLoc {
pub krate: CrateId,
/// Some if this is a macro call for an eager macro. Note that this is `None`
/// for the eager input macro file.
- eager: Option<Box<EagerCallInfo>>,
+ eager: Option<Arc<EagerCallInfo>>,
pub kind: MacroCallKind,
pub call_site: SyntaxContextId,
}
@@ -438,9 +439,9 @@ impl MacroCallLoc {
MacroCallKind::Derive { ast_id, derive_attr_index, .. } => {
// FIXME: handle `cfg_attr`
ast_id.with_value(ast_id.to_node(db)).map(|it| {
- it.doc_comments_and_attrs()
+ collect_attrs(&it)
.nth(derive_attr_index.ast_index())
- .and_then(|it| match it {
+ .and_then(|it| match it.1 {
Either::Left(attr) => Some(attr.syntax().clone()),
Either::Right(_) => None,
})
@@ -451,9 +452,9 @@ impl MacroCallLoc {
if self.def.is_attribute_derive() {
// FIXME: handle `cfg_attr`
ast_id.with_value(ast_id.to_node(db)).map(|it| {
- it.doc_comments_and_attrs()
+ collect_attrs(&it)
.nth(invoc_attr_index.ast_index())
- .and_then(|it| match it {
+ .and_then(|it| match it.1 {
Either::Left(attr) => Some(attr.syntax().clone()),
Either::Right(_) => None,
})
@@ -549,24 +550,24 @@ impl MacroCallKind {
MacroCallKind::Derive { ast_id, derive_attr_index, .. } => {
// FIXME: should be the range of the macro name, not the whole derive
// FIXME: handle `cfg_attr`
- ast_id
- .to_node(db)
- .doc_comments_and_attrs()
+ collect_attrs(&ast_id.to_node(db))
.nth(derive_attr_index.ast_index())
.expect("missing derive")
+ .1
.expect_left("derive is a doc comment?")
.syntax()
.text_range()
}
// FIXME: handle `cfg_attr`
- MacroCallKind::Attr { ast_id, invoc_attr_index, .. } => ast_id
- .to_node(db)
- .doc_comments_and_attrs()
- .nth(invoc_attr_index.ast_index())
- .expect("missing attribute")
- .expect_left("attribute macro is a doc comment?")
- .syntax()
- .text_range(),
+ MacroCallKind::Attr { ast_id, invoc_attr_index, .. } => {
+ collect_attrs(&ast_id.to_node(db))
+ .nth(invoc_attr_index.ast_index())
+ .expect("missing attribute")
+ .1
+ .expect_left("attribute macro is a doc comment?")
+ .syntax()
+ .text_range()
+ }
};
FileRange { range, file_id }
@@ -737,11 +738,9 @@ impl ExpansionInfo {
let attr_input_or_mac_def = def.or_else(|| match loc.kind {
MacroCallKind::Attr { ast_id, invoc_attr_index, .. } => {
// FIXME: handle `cfg_attr`
- let tt = ast_id
- .to_node(db)
- .doc_comments_and_attrs()
+ let tt = collect_attrs(&ast_id.to_node(db))
.nth(invoc_attr_index.ast_index())
- .and_then(Either::left)?
+ .and_then(|x| Either::left(x.1))?
.token_tree()?;
Some(InFile::new(ast_id.file_id, tt))
}
diff --git a/crates/hir-expand/src/span.rs b/crates/hir-expand/src/span.rs
index 0a6c22fe42..fe476a40fe 100644
--- a/crates/hir-expand/src/span.rs
+++ b/crates/hir-expand/src/span.rs
@@ -75,27 +75,40 @@ pub struct RealSpanMap {
/// Invariant: Sorted vec over TextSize
// FIXME: SortedVec<(TextSize, ErasedFileAstId)>?
pairs: Box<[(TextSize, ErasedFileAstId)]>,
+ end: TextSize,
}
impl RealSpanMap {
/// Creates a real file span map that returns absolute ranges (relative ranges to the root ast id).
pub fn absolute(file_id: FileId) -> Self {
- RealSpanMap { file_id, pairs: Box::from([(TextSize::new(0), ROOT_ERASED_FILE_AST_ID)]) }
+ RealSpanMap {
+ file_id,
+ pairs: Box::from([(TextSize::new(0), ROOT_ERASED_FILE_AST_ID)]),
+ end: TextSize::new(!0),
+ }
}
pub fn from_file(db: &dyn ExpandDatabase, file_id: FileId) -> Self {
let mut pairs = vec![(TextSize::new(0), ROOT_ERASED_FILE_AST_ID)];
let ast_id_map = db.ast_id_map(file_id.into());
- pairs.extend(
- db.parse(file_id)
- .tree()
- .items()
- .map(|item| (item.syntax().text_range().start(), ast_id_map.ast_id(&item).erase())),
- );
- RealSpanMap { file_id, pairs: pairs.into_boxed_slice() }
+ let tree = db.parse(file_id).tree();
+ pairs
+ .extend(tree.items().map(|item| {
+ (item.syntax().text_range().start(), ast_id_map.ast_id(&item).erase())
+ }));
+ RealSpanMap {
+ file_id,
+ pairs: pairs.into_boxed_slice(),
+ end: tree.syntax().text_range().end(),
+ }
}
pub fn span_for_range(&self, range: TextRange) -> SpanData {
+ assert!(
+ range.end() <= self.end,
+ "range {range:?} goes beyond the end of the file {:?}",
+ self.end
+ );
let start = range.start();
let idx = self
.pairs
diff --git a/crates/hir-ty/src/infer.rs b/crates/hir-ty/src/infer.rs
index 8262edec22..6f724e4587 100644
--- a/crates/hir-ty/src/infer.rs
+++ b/crates/hir-ty/src/infer.rs
@@ -113,7 +113,7 @@ pub(crate) fn infer_query(db: &dyn HirDatabase, def: DefWithBodyId) -> Arc<Infer
// FIXME(const-generic-body): We should not get the return type in this way.
ctx.return_ty = c
.lookup(db.upcast())
- .thing
+ .expected_ty
.box_any()
.downcast::<InTypeConstIdMetadata>()
.unwrap()
diff --git a/crates/hir-ty/src/infer/pat.rs b/crates/hir-ty/src/infer/pat.rs
index 7ff12e5b7f..acdb540289 100644
--- a/crates/hir-ty/src/infer/pat.rs
+++ b/crates/hir-ty/src/infer/pat.rs
@@ -262,7 +262,7 @@ impl InferenceContext<'_> {
fn infer_pat(&mut self, pat: PatId, expected: &Ty, mut default_bm: BindingMode) -> Ty {
let mut expected = self.resolve_ty_shallow(expected);
- if is_non_ref_pat(self.body, pat) {
+ if self.is_non_ref_pat(self.body, pat) {
let mut pat_adjustments = Vec::new();
while let Some((inner, _lifetime, mutability)) = expected.as_reference() {
pat_adjustments.push(expected.clone());
@@ -496,24 +496,28 @@ impl InferenceContext<'_> {
self.infer_expr(expr, &Expectation::has_type(expected.clone()))
}
-}
-fn is_non_ref_pat(body: &hir_def::body::Body, pat: PatId) -> bool {
- match &body[pat] {
- Pat::Tuple { .. }
- | Pat::TupleStruct { .. }
- | Pat::Record { .. }
- | Pat::Range { .. }
- | Pat::Slice { .. } => true,
- Pat::Or(pats) => pats.iter().all(|p| is_non_ref_pat(body, *p)),
- // FIXME: ConstBlock/Path/Lit might actually evaluate to ref, but inference is unimplemented.
- Pat::Path(..) => true,
- Pat::ConstBlock(..) => true,
- Pat::Lit(expr) => !matches!(
- body[*expr],
- Expr::Literal(Literal::String(..) | Literal::CString(..) | Literal::ByteString(..))
- ),
- Pat::Wild | Pat::Bind { .. } | Pat::Ref { .. } | Pat::Box { .. } | Pat::Missing => false,
+ fn is_non_ref_pat(&mut self, body: &hir_def::body::Body, pat: PatId) -> bool {
+ match &body[pat] {
+ Pat::Tuple { .. }
+ | Pat::TupleStruct { .. }
+ | Pat::Record { .. }
+ | Pat::Range { .. }
+ | Pat::Slice { .. } => true,
+ Pat::Or(pats) => pats.iter().all(|p| self.is_non_ref_pat(body, *p)),
+ Pat::Path(p) => {
+ let v = self.resolve_value_path_inner(p, pat.into());
+ v.is_some_and(|x| !matches!(x.0, hir_def::resolver::ValueNs::ConstId(_)))
+ }
+ Pat::ConstBlock(..) => false,
+ Pat::Lit(expr) => !matches!(
+ body[*expr],
+ Expr::Literal(Literal::String(..) | Literal::CString(..) | Literal::ByteString(..))
+ ),
+ Pat::Wild | Pat::Bind { .. } | Pat::Ref { .. } | Pat::Box { .. } | Pat::Missing => {
+ false
+ }
+ }
}
}
diff --git a/crates/hir-ty/src/infer/path.rs b/crates/hir-ty/src/infer/path.rs
index fcfe1a3b5c..49fb78f67a 100644
--- a/crates/hir-ty/src/infer/path.rs
+++ b/crates/hir-ty/src/infer/path.rs
@@ -40,33 +40,7 @@ impl InferenceContext<'_> {
}
fn resolve_value_path(&mut self, path: &Path, id: ExprOrPatId) -> Option<ValuePathResolution> {
- let (value, self_subst) = if let Some(type_ref) = path.type_anchor() {
- let last = path.segments().last()?;
-
- // Don't use `self.make_ty()` here as we need `orig_ns`.
- let ctx =
- crate::lower::TyLoweringContext::new(self.db, &self.resolver, self.owner.into());
- let (ty, orig_ns) = ctx.lower_ty_ext(type_ref);
- let ty = self.table.insert_type_vars(ty);
- let ty = self.table.normalize_associated_types_in(ty);
-
- let remaining_segments_for_ty = path.segments().take(path.segments().len() - 1);
- let (ty, _) = ctx.lower_ty_relative_path(ty, orig_ns, remaining_segments_for_ty);
- let ty = self.table.insert_type_vars(ty);
- let ty = self.table.normalize_associated_types_in(ty);
- self.resolve_ty_assoc_item(ty, last.name, id).map(|(it, substs)| (it, Some(substs)))?
- } else {
- // FIXME: report error, unresolved first path segment
- let value_or_partial =
- self.resolver.resolve_path_in_value_ns(self.db.upcast(), path)?;
-
- match value_or_partial {
- ResolveValueResult::ValueNs(it, _) => (it, None),
- ResolveValueResult::Partial(def, remaining_index, _) => self
- .resolve_assoc_item(def, path, remaining_index, id)
- .map(|(it, substs)| (it, Some(substs)))?,
- }
- };
+ let (value, self_subst) = self.resolve_value_path_inner(path, id)?;
let value_def = match value {
ValueNs::LocalBinding(pat) => match self.result.type_of_binding.get(pat) {
@@ -144,6 +118,41 @@ impl InferenceContext<'_> {
Some(ValuePathResolution::GenericDef(value_def, generic_def, substs))
}
+ pub(super) fn resolve_value_path_inner(
+ &mut self,
+ path: &Path,
+ id: ExprOrPatId,
+ ) -> Option<(ValueNs, Option<chalk_ir::Substitution<Interner>>)> {
+ let (value, self_subst) = if let Some(type_ref) = path.type_anchor() {
+ let last = path.segments().last()?;
+
+ // Don't use `self.make_ty()` here as we need `orig_ns`.
+ let ctx =
+ crate::lower::TyLoweringContext::new(self.db, &self.resolver, self.owner.into());
+ let (ty, orig_ns) = ctx.lower_ty_ext(type_ref);
+ let ty = self.table.insert_type_vars(ty);
+ let ty = self.table.normalize_associated_types_in(ty);
+
+ let remaining_segments_for_ty = path.segments().take(path.segments().len() - 1);
+ let (ty, _) = ctx.lower_ty_relative_path(ty, orig_ns, remaining_segments_for_ty);
+ let ty = self.table.insert_type_vars(ty);
+ let ty = self.table.normalize_associated_types_in(ty);
+ self.resolve_ty_assoc_item(ty, last.name, id).map(|(it, substs)| (it, Some(substs)))?
+ } else {
+ // FIXME: report error, unresolved first path segment
+ let value_or_partial =
+ self.resolver.resolve_path_in_value_ns(self.db.upcast(), path)?;
+
+ match value_or_partial {
+ ResolveValueResult::ValueNs(it, _) => (it, None),
+ ResolveValueResult::Partial(def, remaining_index, _) => self
+ .resolve_assoc_item(def, path, remaining_index, id)
+ .map(|(it, substs)| (it, Some(substs)))?,
+ }
+ };
+ Some((value, self_subst))
+ }
+
fn add_required_obligations_for_value_path(&mut self, def: GenericDefId, subst: &Substitution) {
let predicates = self.db.generic_predicates(def);
for predicate in predicates.iter() {
diff --git a/crates/hir-ty/src/lower.rs b/crates/hir-ty/src/lower.rs
index c86fe9adff..97c4a741ff 100644
--- a/crates/hir-ty/src/lower.rs
+++ b/crates/hir-ty/src/lower.rs
@@ -113,7 +113,9 @@ pub struct TyLoweringContext<'a> {
pub db: &'a dyn HirDatabase,
resolver: &'a Resolver,
in_binders: DebruijnIndex,
- owner: TypeOwnerId,
+ // FIXME: Should not be an `Option` but `Resolver` currently does not return owners in all cases
+ // where expected
+ owner: Option<TypeOwnerId>,
/// Note: Conceptually, it's thinkable that we could be in a location where
/// some type params should be represented as placeholders, and others
/// should be converted to variables. I think in practice, this isn't
@@ -127,6 +129,14 @@ pub struct TyLoweringContext<'a> {
impl<'a> TyLoweringContext<'a> {
pub fn new(db: &'a dyn HirDatabase, resolver: &'a Resolver, owner: TypeOwnerId) -> Self {
+ Self::new_maybe_unowned(db, resolver, Some(owner))
+ }
+
+ pub fn new_maybe_unowned(
+ db: &'a dyn HirDatabase,
+ resolver: &'a Resolver,
+ owner: Option<TypeOwnerId>,
+ ) -> Self {
let impl_trait_mode = ImplTraitLoweringState::Disallowed;
let type_param_mode = ParamLoweringMode::Placeholder;
let in_binders = DebruijnIndex::INNERMOST;
@@ -213,10 +223,11 @@ impl<'a> TyLoweringContext<'a> {
}
pub fn lower_const(&self, const_ref: &ConstRef, const_type: Ty) -> Const {
+ let Some(owner) = self.owner else { return unknown_const(const_type) };
const_or_path_to_chalk(
self.db,
self.resolver,
- self.owner,
+ owner,
const_type,
const_ref,
self.type_param_mode,
@@ -1768,10 +1779,11 @@ fn type_for_type_alias(db: &dyn HirDatabase, t: TypeAliasId) -> Binders<Ty> {
let resolver = t.resolver(db.upcast());
let ctx = TyLoweringContext::new(db, &resolver, t.into())
.with_type_param_mode(ParamLoweringMode::Variable);
- if db.type_alias_data(t).is_extern {
+ let type_alias_data = db.type_alias_data(t);
+ if type_alias_data.is_extern {
Binders::empty(Interner, TyKind::Foreign(crate::to_foreign_def_id(t)).intern(Interner))
} else {
- let type_ref = &db.type_alias_data(t).type_ref;
+ let type_ref = &type_alias_data.type_ref;
let inner = ctx.lower_ty(type_ref.as_deref().unwrap_or(&TypeRef::Error));
make_binders(db, &generics, inner)
}
@@ -2042,7 +2054,7 @@ pub(crate) fn const_or_path_to_chalk(
.intern_in_type_const(InTypeConstLoc {
id: it,
owner,
- thing: Box::new(InTypeConstIdMetadata(expected_ty.clone())),
+ expected_ty: Box::new(InTypeConstIdMetadata(expected_ty.clone())),
})
.into();
intern_const_scalar(
diff --git a/crates/hir-ty/src/tests/incremental.rs b/crates/hir-ty/src/tests/incremental.rs
index bb15ca8c43..28e84e480d 100644
--- a/crates/hir-ty/src/tests/incremental.rs
+++ b/crates/hir-ty/src/tests/incremental.rs
@@ -9,11 +9,10 @@ use super::visit_module;
fn typing_whitespace_inside_a_function_should_not_invalidate_types() {
let (mut db, pos) = TestDB::with_position(
"
- //- /lib.rs
- fn foo() -> i32 {
- $01 + 1
- }
- ",
+//- /lib.rs
+fn foo() -> i32 {
+ $01 + 1
+}",
);
{
let events = db.log_executed(|| {
@@ -27,12 +26,11 @@ fn typing_whitespace_inside_a_function_should_not_invalidate_types() {
}
let new_text = "
- fn foo() -> i32 {
- 1
- +
- 1
- }
- ";
+fn foo() -> i32 {
+ 1
+ +
+ 1
+}";
db.set_file_text(pos.file_id, Arc::from(new_text));
@@ -47,3 +45,55 @@ fn typing_whitespace_inside_a_function_should_not_invalidate_types() {
assert!(!format!("{events:?}").contains("infer"), "{events:#?}")
}
}
+
+#[test]
+fn typing_inside_a_function_should_not_invalidate_types_in_another() {
+ let (mut db, pos) = TestDB::with_position(
+ "
+//- /lib.rs
+fn foo() -> f32 {
+ 1.0 + 2.0
+}
+fn bar() -> i32 {
+ $01 + 1
+}
+fn baz() -> i32 {
+ 1 + 1
+}",
+ );
+ {
+ let events = db.log_executed(|| {
+ let module = db.module_for_file(pos.file_id);
+ let crate_def_map = module.def_map(&db);
+ visit_module(&db, &crate_def_map, module.local_id, &mut |def| {
+ db.infer(def);
+ });
+ });
+ assert!(format!("{events:?}").contains("infer"))
+ }
+
+ let new_text = "
+fn foo() -> f32 {
+ 1.0 + 2.0
+}
+fn bar() -> i32 {
+ 53
+}
+fn baz() -> i32 {
+ 1 + 1
+}
+";
+
+ db.set_file_text(pos.file_id, Arc::from(new_text));
+
+ {
+ let events = db.log_executed(|| {
+ let module = db.module_for_file(pos.file_id);
+ let crate_def_map = module.def_map(&db);
+ visit_module(&db, &crate_def_map, module.local_id, &mut |def| {
+ db.infer(def);
+ });
+ });
+ assert!(format!("{events:?}").matches("infer").count() == 1, "{events:#?}")
+ }
+}
diff --git a/crates/hir-ty/src/tests/patterns.rs b/crates/hir-ty/src/tests/patterns.rs
index 5d7bab09c2..7234af2d68 100644
--- a/crates/hir-ty/src/tests/patterns.rs
+++ b/crates/hir-ty/src/tests/patterns.rs
@@ -1153,3 +1153,41 @@ fn main() {
"#,
);
}
+
+#[test]
+fn type_mismatch_pat_const_reference() {
+ check_no_mismatches(
+ r#"
+const TEST_STR: &'static str = "abcd";
+
+fn main() {
+ let s = "abcd";
+ match s {
+ TEST_STR => (),
+ _ => (),
+ }
+}
+
+ "#,
+ );
+ check(
+ r#"
+struct Foo<T>(T);
+
+impl<T> Foo<T> {
+ const TEST_I32_REF: &'static i32 = &3;
+ const TEST_I32: i32 = 3;
+}
+
+fn main() {
+ match &6 {
+ Foo::<i32>::TEST_I32_REF => (),
+ Foo::<i32>::TEST_I32 => (),
+ //^^^^^^^^^^^^^^^^^^^^ expected &i32, got i32
+ _ => (),
+ }
+}
+
+ "#,
+ );
+}
diff --git a/crates/hir/src/semantics.rs b/crates/hir/src/semantics.rs
index 92fa76c96f..a03ff22074 100644
--- a/crates/hir/src/semantics.rs
+++ b/crates/hir/src/semantics.rs
@@ -20,8 +20,8 @@ use hir_def::{
AsMacroCall, DefWithBodyId, FunctionId, MacroId, TraitId, VariantId,
};
use hir_expand::{
- db::ExpandDatabase, files::InRealFile, name::AsName, ExpansionInfo, InMacroFile, MacroCallId,
- MacroFileId, MacroFileIdExt,
+ attrs::collect_attrs, db::ExpandDatabase, files::InRealFile, name::AsName, ExpansionInfo,
+ InMacroFile, MacroCallId, MacroFileId, MacroFileIdExt,
};
use itertools::Itertools;
use rustc_hash::{FxHashMap, FxHashSet};
@@ -29,7 +29,7 @@ use smallvec::{smallvec, SmallVec};
use stdx::TupleExt;
use syntax::{
algo::skip_trivia_token,
- ast::{self, HasAttrs as _, HasDocComments, HasGenericParams, HasLoopBody, IsString as _},
+ ast::{self, HasAttrs as _, HasGenericParams, HasLoopBody, IsString as _},
match_ast, AstNode, AstToken, Direction, SyntaxKind, SyntaxNode, SyntaxNodePtr, SyntaxToken,
TextRange, TextSize,
};
@@ -673,11 +673,22 @@ impl<'db> SemanticsImpl<'db> {
}
_ => 0,
};
+ // FIXME: here, the attribute's text range is used to strip away all
+ // entries from the start of the attribute "list" up the the invoking
+ // attribute. But in
+ // ```
+ // mod foo {
+ // #![inner]
+ // }
+ // ```
+ // we don't wanna strip away stuff in the `mod foo {` range, that is
+ // here if the id corresponds to an inner attribute we got strip all
+ // text ranges of the outer ones, and then all of the inner ones up
+ // to the invoking attribute so that the inbetween is ignored.
let text_range = item.syntax().text_range();
- let start = item
- .doc_comments_and_attrs()
+ let start = collect_attrs(&item)
.nth(attr_id)
- .map(|attr| match attr {
+ .map(|attr| match attr.1 {
Either::Left(it) => it.syntax().text_range().start(),
Either::Right(it) => it.syntax().text_range().start(),
})
@@ -937,10 +948,10 @@ impl<'db> SemanticsImpl<'db> {
pub fn resolve_type(&self, ty: &ast::Type) -> Option<Type> {
let analyze = self.analyze(ty.syntax())?;
let ctx = LowerCtx::with_file_id(self.db.upcast(), analyze.file_id);
- let ty = hir_ty::TyLoweringContext::new(
+ let ty = hir_ty::TyLoweringContext::new_maybe_unowned(
self.db,
&analyze.resolver,
- analyze.resolver.module().into(),
+ analyze.resolver.type_owner(),
)
.lower_ty(&crate::TypeRef::from_ast(&ctx, ty.clone()));
Some(Type::new_with_resolver(self.db, &analyze.resolver, ty))
diff --git a/crates/hir/src/source_analyzer.rs b/crates/hir/src/source_analyzer.rs
index 73db6f8f0b..d05118bbc2 100644
--- a/crates/hir/src/source_analyzer.rs
+++ b/crates/hir/src/source_analyzer.rs
@@ -1040,8 +1040,9 @@ fn resolve_hir_path_(
let types = || {
let (ty, unresolved) = match path.type_anchor() {
Some(type_ref) => {
- let (_, res) = TyLoweringContext::new(db, resolver, resolver.module().into())
- .lower_ty_ext(type_ref);
+ let (_, res) =
+ TyLoweringContext::new_maybe_unowned(db, resolver, resolver.type_owner())
+ .lower_ty_ext(type_ref);
res.map(|ty_ns| (ty_ns, path.segments().first()))
}
None => {
diff --git a/crates/hir/src/symbols.rs b/crates/hir/src/symbols.rs
index 03112f6de5..a2a30edeb0 100644
--- a/crates/hir/src/symbols.rs
+++ b/crates/hir/src/symbols.rs
@@ -23,6 +23,7 @@ pub struct FileSymbol {
pub loc: DeclarationLocation,
pub container_name: Option<SmolStr>,
pub is_alias: bool,
+ pub is_assoc: bool,
}
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
@@ -121,34 +122,34 @@ impl<'a> SymbolCollector<'a> {
match module_def_id {
ModuleDefId::ModuleId(id) => self.push_module(id),
ModuleDefId::FunctionId(id) => {
- self.push_decl(id);
+ self.push_decl(id, false);
self.collect_from_body(id);
}
- ModuleDefId::AdtId(AdtId::StructId(id)) => self.push_decl(id),
- ModuleDefId::AdtId(AdtId::EnumId(id)) => self.push_decl(id),
- ModuleDefId::AdtId(AdtId::UnionId(id)) => self.push_decl(id),
+ ModuleDefId::AdtId(AdtId::StructId(id)) => self.push_decl(id, false),
+ ModuleDefId::AdtId(AdtId::EnumId(id)) => self.push_decl(id, false),
+ ModuleDefId::AdtId(AdtId::UnionId(id)) => self.push_decl(id, false),
ModuleDefId::ConstId(id) => {
- self.push_decl(id);
+ self.push_decl(id, false);
self.collect_from_body(id);
}
ModuleDefId::StaticId(id) => {
- self.push_decl(id);
+ self.push_decl(id, false);
self.collect_from_body(id);
}
ModuleDefId::TraitId(id) => {
- self.push_decl(id);
+ self.push_decl(id, false);
self.collect_from_trait(id);
}
ModuleDefId::TraitAliasId(id) => {
- self.push_decl(id);
+ self.push_decl(id, false);
}
ModuleDefId::TypeAliasId(id) => {
- self.push_decl(id);
+ self.push_decl(id, false);
}
ModuleDefId::MacroId(id) => match id {
- MacroId::Macro2Id(id) => self.push_decl(id),
- MacroId::MacroRulesId(id) => self.push_decl(id),
- MacroId::ProcMacroId(id) => self.push_decl(id),
+ MacroId::Macro2Id(id) => self.push_decl(id, false),
+ MacroId::MacroRulesId(id) => self.push_decl(id, false),
+ MacroId::ProcMacroId(id) => self.push_decl(id, false),
},
// Don't index these.
ModuleDefId::BuiltinType(_) => {}
@@ -190,6 +191,7 @@ impl<'a> SymbolCollector<'a> {
container_name: self.current_container_name.clone(),
loc: dec_loc,
is_alias: false,
+ is_assoc: false,
});
});
}
@@ -202,9 +204,9 @@ impl<'a> SymbolCollector<'a> {
for &id in id {
if id.module(self.db.upcast()) == module_id {
match id {
- MacroId::Macro2Id(id) => self.push_decl(id),
- MacroId::MacroRulesId(id) => self.push_decl(id),
- MacroId::ProcMacroId(id) => self.push_decl(id),
+ MacroId::Macro2Id(id) => self.push_decl(id, false),
+ MacroId::MacroRulesId(id) => self.push_decl(id, false),
+ MacroId::ProcMacroId(id) => self.push_decl(id, false),
}
}
}
@@ -266,13 +268,13 @@ impl<'a> SymbolCollector<'a> {
fn push_assoc_item(&mut self, assoc_item_id: AssocItemId) {
match assoc_item_id {
- AssocItemId::FunctionId(id) => self.push_decl(id),
- AssocItemId::ConstId(id) => self.push_decl(id),
- AssocItemId::TypeAliasId(id) => self.push_decl(id),
+ AssocItemId::FunctionId(id) => self.push_decl(id, true),
+ AssocItemId::ConstId(id) => self.push_decl(id, true),
+ AssocItemId::TypeAliasId(id) => self.push_decl(id, true),
}
}
- fn push_decl<L>(&mut self, id: L)
+ fn push_decl<L>(&mut self, id: L, is_assoc: bool)
where
L: Lookup + Into<ModuleDefId>,
<L as Lookup>::Data: HasSource,
@@ -296,6 +298,7 @@ impl<'a> SymbolCollector<'a> {
loc: dec_loc.clone(),
container_name: self.current_container_name.clone(),
is_alias: true,
+ is_assoc,
});
}
}
@@ -306,6 +309,7 @@ impl<'a> SymbolCollector<'a> {
container_name: self.current_container_name.clone(),
loc: dec_loc,
is_alias: false,
+ is_assoc,
});
}
@@ -331,6 +335,7 @@ impl<'a> SymbolCollector<'a> {
loc: dec_loc.clone(),
container_name: self.current_container_name.clone(),
is_alias: true,
+ is_assoc: false,
});
}
}
@@ -341,6 +346,7 @@ impl<'a> SymbolCollector<'a> {
container_name: self.current_container_name.clone(),
loc: dec_loc,
is_alias: false,
+ is_assoc: false,
});
}
}
diff --git a/crates/ide-assists/src/handlers/convert_tuple_struct_to_named_struct.rs b/crates/ide-assists/src/handlers/convert_tuple_struct_to_named_struct.rs
index 017853a4a2..435d7c4a53 100644
--- a/crates/ide-assists/src/handlers/convert_tuple_struct_to_named_struct.rs
+++ b/crates/ide-assists/src/handlers/convert_tuple_struct_to_named_struct.rs
@@ -216,7 +216,7 @@ fn edit_field_references(
edit.edit_file(file_id);
for r in refs {
if let Some(name_ref) = r.name.as_name_ref() {
- edit.replace(name_ref.syntax().text_range(), name.text());
+ edit.replace(ctx.sema.original_range(name_ref.syntax()).range, name.text());
}
}
}
diff --git a/crates/ide-completion/src/item.rs b/crates/ide-completion/src/item.rs
index b982322a73..de41a5bd70 100644
--- a/crates/ide-completion/src/item.rs
+++ b/crates/ide-completion/src/item.rs
@@ -458,13 +458,11 @@ impl Builder {
}
if let [import_edit] = &*self.imports_to_add {
// snippets can have multiple imports, but normal completions only have up to one
- if let Some(original_path) = import_edit.original_path.as_ref() {
- label_detail.replace(SmolStr::from(format!(
- "{} (use {})",
- label_detail.as_deref().unwrap_or_default(),
- original_path.display(db)
- )));
- }
+ label_detail.replace(SmolStr::from(format!(
+ "{} (use {})",
+ label_detail.as_deref().unwrap_or_default(),
+ import_edit.import_path.display(db)
+ )));
} else if let Some(trait_name) = self.trait_name {
label_detail.replace(SmolStr::from(format!(
"{} (as {trait_name})",
diff --git a/crates/ide-completion/src/snippet.rs b/crates/ide-completion/src/snippet.rs
index 50618296ee..e667e2e016 100644
--- a/crates/ide-completion/src/snippet.rs
+++ b/crates/ide-completion/src/snippet.rs
@@ -181,7 +181,7 @@ fn import_edits(ctx: &CompletionContext<'_>, requires: &[GreenNode]) -> Option<V
ctx.config.prefer_no_std,
ctx.config.prefer_prelude,
)?;
- Some((path.len() > 1).then(|| LocatedImport::new(path.clone(), item, item, None)))
+ Some((path.len() > 1).then(|| LocatedImport::new(path.clone(), item, item)))
};
let mut res = Vec::with_capacity(requires.len());
for import in requires {
diff --git a/crates/ide-completion/src/tests/flyimport.rs b/crates/ide-completion/src/tests/flyimport.rs
index 9a4a94a245..c58374f2e8 100644
--- a/crates/ide-completion/src/tests/flyimport.rs
+++ b/crates/ide-completion/src/tests/flyimport.rs
@@ -597,8 +597,8 @@ fn main() {
}
"#,
expect![[r#"
- ct SPECIAL_CONST (use dep::test_mod::TestTrait) u8 DEPRECATED
fn weird_function() (use dep::test_mod::TestTrait) fn() DEPRECATED
+ ct SPECIAL_CONST (use dep::test_mod::TestTrait) u8 DEPRECATED
"#]],
);
}
@@ -717,7 +717,7 @@ fn main() {
check(
fixture,
expect![[r#"
- st Item (use foo::bar::baz::Item) Item
+ st Item (use foo::bar) Item
"#]],
);
@@ -725,19 +725,19 @@ fn main() {
"Item",
fixture,
r#"
- use foo::bar;
+use foo::bar;
- mod foo {
- pub mod bar {
- pub mod baz {
- pub struct Item;
- }
- }
+mod foo {
+ pub mod bar {
+ pub mod baz {
+ pub struct Item;
}
+ }
+}
- fn main() {
- bar::baz::Item
- }"#,
+fn main() {
+ bar::baz::Item
+}"#,
);
}
@@ -803,7 +803,7 @@ fn main() {
check(
fixture,
expect![[r#"
- ct TEST_ASSOC (use foo::bar::Item) usize
+ ct TEST_ASSOC (use foo::bar) usize
"#]],
);
diff --git a/crates/ide-db/src/imports/import_assets.rs b/crates/ide-db/src/imports/import_assets.rs
index 04263d15d0..a4f0a6df78 100644
--- a/crates/ide-db/src/imports/import_assets.rs
+++ b/crates/ide-db/src/imports/import_assets.rs
@@ -1,14 +1,14 @@
//! Look up accessible paths for items.
+
use hir::{
- AsAssocItem, AssocItem, AssocItemContainer, Crate, ItemInNs, ModPath, Module, ModuleDef,
+ AsAssocItem, AssocItem, AssocItemContainer, Crate, ItemInNs, ModPath, Module, ModuleDef, Name,
PathResolution, PrefixKind, ScopeDef, Semantics, SemanticsScope, Type,
};
-use itertools::Itertools;
-use rustc_hash::FxHashSet;
+use itertools::{EitherOrBoth, Itertools};
+use rustc_hash::{FxHashMap, FxHashSet};
use syntax::{
ast::{self, make, HasName},
- utils::path_to_string_stripping_turbo_fish,
- AstNode, SyntaxNode,
+ AstNode, SmolStr, SyntaxNode,
};
use crate::{
@@ -51,18 +51,11 @@ pub struct TraitImportCandidate {
#[derive(Debug)]
pub struct PathImportCandidate {
/// Optional qualifier before name.
- pub qualifier: Option<FirstSegmentUnresolved>,
+ pub qualifier: Option<Vec<SmolStr>>,
/// The name the item (struct, trait, enum, etc.) should have.
pub name: NameToImport,
}
-/// A qualifier that has a first segment and it's unresolved.
-#[derive(Debug)]
-pub struct FirstSegmentUnresolved {
- fist_segment: ast::NameRef,
- full_qualifier: ast::Path,
-}
-
/// A name that will be used during item lookups.
#[derive(Debug, Clone)]
pub enum NameToImport {
@@ -195,18 +188,11 @@ pub struct LocatedImport {
/// the original item is the associated constant, but the import has to be a trait that
/// defines this constant.
pub original_item: ItemInNs,
- /// A path of the original item.
- pub original_path: Option<ModPath>,
}
impl LocatedImport {
- pub fn new(
- import_path: ModPath,
- item_to_import: ItemInNs,
- original_item: ItemInNs,
- original_path: Option<ModPath>,
- ) -> Self {
- Self { import_path, item_to_import, original_item, original_path }
+ pub fn new(import_path: ModPath, item_to_import: ItemInNs, original_item: ItemInNs) -> Self {
+ Self { import_path, item_to_import, original_item }
}
}
@@ -351,64 +337,75 @@ fn path_applicable_imports(
)
.filter_map(|item| {
let mod_path = mod_path(item)?;
- Some(LocatedImport::new(mod_path.clone(), item, item, Some(mod_path)))
- })
- .collect()
- }
- Some(first_segment_unresolved) => {
- let unresolved_qualifier =
- path_to_string_stripping_turbo_fish(&first_segment_unresolved.full_qualifier);
- let unresolved_first_segment = first_segment_unresolved.fist_segment.text();
- items_locator::items_with_name(
- sema,
- current_crate,
- path_candidate.name.clone(),
- AssocSearchMode::Include,
- Some(DEFAULT_QUERY_SEARCH_LIMIT.inner()),
- )
- .filter_map(|item| {
- import_for_item(
- sema.db,
- mod_path,
- &unresolved_first_segment,
- &unresolved_qualifier,
- item,
- )
+ Some(LocatedImport::new(mod_path, item, item))
})
.collect()
}
+ Some(qualifier) => items_locator::items_with_name(
+ sema,
+ current_crate,
+ path_candidate.name.clone(),
+ AssocSearchMode::Include,
+ Some(DEFAULT_QUERY_SEARCH_LIMIT.inner()),
+ )
+ .filter_map(|item| import_for_item(sema.db, mod_path, &qualifier, item))
+ .collect(),
}
}
fn import_for_item(
db: &RootDatabase,
mod_path: impl Fn(ItemInNs) -> Option<ModPath>,
- unresolved_first_segment: &str,
- unresolved_qualifier: &str,
+ unresolved_qualifier: &[SmolStr],
original_item: ItemInNs,
) -> Option<LocatedImport> {
let _p = profile::span("import_assets::import_for_item");
+ let [first_segment, ..] = unresolved_qualifier else { return None };
- let original_item_candidate = item_for_path_search(db, original_item)?;
+ let item_as_assoc = item_as_assoc(db, original_item);
+
+ let (original_item_candidate, trait_item_to_import) = match item_as_assoc {
+ Some(assoc_item) => match assoc_item.container(db) {
+ AssocItemContainer::Trait(trait_) => {
+ let trait_ = ItemInNs::from(ModuleDef::from(trait_));
+ (trait_, Some(trait_))
+ }
+ AssocItemContainer::Impl(impl_) => {
+ (ItemInNs::from(ModuleDef::from(impl_.self_ty(db).as_adt()?)), None)
+ }
+ },
+ None => (original_item, None),
+ };
let import_path_candidate = mod_path(original_item_candidate)?;
- let import_path_string = import_path_candidate.display(db).to_string();
- let expected_import_end = if item_as_assoc(db, original_item).is_some() {
- unresolved_qualifier.to_string()
- } else {
- format!("{unresolved_qualifier}::{}", item_name(db, original_item)?.display(db))
+ let mut import_path_candidate_segments = import_path_candidate.segments().iter().rev();
+ let predicate = |it: EitherOrBoth<&SmolStr, &Name>| match it {
+ // segments match, check next one
+ EitherOrBoth::Both(a, b) if b.as_str() == Some(&**a) => None,
+ // segments mismatch / qualifier is longer than the path, bail out
+ EitherOrBoth::Both(..) | EitherOrBoth::Left(_) => Some(false),
+ // all segments match and we have exhausted the qualifier, proceed
+ EitherOrBoth::Right(_) => Some(true),
};
- if !import_path_string.contains(unresolved_first_segment)
- || !import_path_string.ends_with(&expected_import_end)
- {
+ if item_as_assoc.is_none() {
+ let item_name = item_name(db, original_item)?.as_text()?;
+ let last_segment = import_path_candidate_segments.next()?;
+ if last_segment.as_str() != Some(&*item_name) {
+ return None;
+ }
+ }
+ let ends_with = unresolved_qualifier
+ .iter()
+ .rev()
+ .zip_longest(import_path_candidate_segments)
+ .find_map(predicate)
+ .unwrap_or(true);
+ if !ends_with {
return None;
}
- let segment_import =
- find_import_for_segment(db, original_item_candidate, unresolved_first_segment)?;
- let trait_item_to_import = item_as_assoc(db, original_item)
- .and_then(|assoc| assoc.containing_trait(db))
- .map(|trait_| ItemInNs::from(ModuleDef::from(trait_)));
+ let segment_import = find_import_for_segment(db, original_item_candidate, first_segment)?;
+
Some(match (segment_import == original_item_candidate, trait_item_to_import) {
(true, Some(_)) => {
// FIXME we should be able to import both the trait and the segment,
@@ -416,42 +413,37 @@ fn import_for_item(
// especially in case of lazy completion edit resolutions.
return None;
}
- (false, Some(trait_to_import)) => LocatedImport::new(
- mod_path(trait_to_import)?,
- trait_to_import,
- original_item,
- mod_path(original_item),
- ),
- (true, None) => LocatedImport::new(
- import_path_candidate,
- original_item_candidate,
- original_item,
- mod_path(original_item),
- ),
- (false, None) => LocatedImport::new(
- mod_path(segment_import)?,
- segment_import,
- original_item,
- mod_path(original_item),
- ),
+ (false, Some(trait_to_import)) => {
+ LocatedImport::new(mod_path(trait_to_import)?, trait_to_import, original_item)
+ }
+ (true, None) => {
+ LocatedImport::new(import_path_candidate, original_item_candidate, original_item)
+ }
+ (false, None) => {
+ LocatedImport::new(mod_path(segment_import)?, segment_import, original_item)
+ }
})
}
pub fn item_for_path_search(db: &RootDatabase, item: ItemInNs) -> Option<ItemInNs> {
Some(match item {
ItemInNs::Types(_) | ItemInNs::Values(_) => match item_as_assoc(db, item) {
- Some(assoc_item) => match assoc_item.container(db) {
- AssocItemContainer::Trait(trait_) => ItemInNs::from(ModuleDef::from(trait_)),
- AssocItemContainer::Impl(impl_) => {
- ItemInNs::from(ModuleDef::from(impl_.self_ty(db).as_adt()?))
- }
- },
+ Some(assoc_item) => item_for_path_search_assoc(db, assoc_item)?,
None => item,
},
ItemInNs::Macros(_) => item,
})
}
+fn item_for_path_search_assoc(db: &RootDatabase, assoc_item: AssocItem) -> Option<ItemInNs> {
+ Some(match assoc_item.container(db) {
+ AssocItemContainer::Trait(trait_) => ItemInNs::from(ModuleDef::from(trait_)),
+ AssocItemContainer::Impl(impl_) => {
+ ItemInNs::from(ModuleDef::from(impl_.self_ty(db).as_adt()?))
+ }
+ })
+}
+
fn find_import_for_segment(
db: &RootDatabase,
original_item: ItemInNs,
@@ -528,6 +520,7 @@ fn trait_applicable_items(
.collect();
let mut located_imports = FxHashSet::default();
+ let mut trait_import_paths = FxHashMap::default();
if trait_assoc_item {
trait_candidate.receiver_ty.iterate_path_candidates(
@@ -545,12 +538,14 @@ fn trait_applicable_items(
}
let located_trait = assoc.containing_trait(db)?;
let trait_item = ItemInNs::from(ModuleDef::from(located_trait));
- let original_item = assoc_to_item(assoc);
+ let import_path = trait_import_paths
+ .entry(trait_item)
+ .or_insert_with(|| mod_path(trait_item))
+ .clone()?;
located_imports.insert(LocatedImport::new(
- mod_path(trait_item)?,
+ import_path,
trait_item,
- original_item,
- mod_path(original_item),
+ assoc_to_item(assoc),
));
}
None::<()>
@@ -568,12 +563,14 @@ fn trait_applicable_items(
if required_assoc_items.contains(&assoc) {
let located_trait = assoc.containing_trait(db)?;
let trait_item = ItemInNs::from(ModuleDef::from(located_trait));
- let original_item = assoc_to_item(assoc);
+ let import_path = trait_import_paths
+ .entry(trait_item)
+ .or_insert_with(|| mod_path(trait_item))
+ .clone()?;
located_imports.insert(LocatedImport::new(
- mod_path(trait_item)?,
+ import_path,
trait_item,
- original_item,
- mod_path(original_item),
+ assoc_to_item(assoc),
));
}
None::<()>
@@ -671,18 +668,13 @@ fn path_import_candidate(
Some(match qualifier {
Some(qualifier) => match sema.resolve_path(&qualifier) {
None => {
- let qualifier_start =
- qualifier.syntax().descendants().find_map(ast::NameRef::cast)?;
- let qualifier_start_path =
- qualifier_start.syntax().ancestors().find_map(ast::Path::cast)?;
- if sema.resolve_path(&qualifier_start_path).is_none() {
- ImportCandidate::Path(PathImportCandidate {
- qualifier: Some(FirstSegmentUnresolved {
- fist_segment: qualifier_start,
- full_qualifier: qualifier,
- }),
- name,
- })
+ if qualifier.first_qualifier().map_or(true, |it| sema.resolve_path(&it).is_none()) {
+ let mut qualifier = qualifier
+ .segments_of_this_path_only_rev()
+ .map(|seg| seg.name_ref().map(|name| SmolStr::new(name.text())))
+ .collect::<Option<Vec<_>>>()?;
+ qualifier.reverse();
+ ImportCandidate::Path(PathImportCandidate { qualifier: Some(qualifier), name })
} else {
return None;
}
diff --git a/crates/ide-db/src/items_locator.rs b/crates/ide-db/src/items_locator.rs
index 67ed44f08b..4a5d234f73 100644
--- a/crates/ide-db/src/items_locator.rs
+++ b/crates/ide-db/src/items_locator.rs
@@ -3,13 +3,13 @@
//! The main reason for this module to exist is the fact that project's items and dependencies' items
//! are located in different caches, with different APIs.
use either::Either;
-use hir::{import_map, AsAssocItem, Crate, ItemInNs, Semantics};
+use hir::{import_map, Crate, ItemInNs, Semantics};
use limit::Limit;
use crate::{imports::import_assets::NameToImport, symbol_index, RootDatabase};
/// A value to use, when uncertain which limit to pick.
-pub static DEFAULT_QUERY_SEARCH_LIMIT: Limit = Limit::new(40);
+pub static DEFAULT_QUERY_SEARCH_LIMIT: Limit = Limit::new(100);
pub use import_map::AssocSearchMode;
@@ -36,7 +36,9 @@ pub fn items_with_name<'a>(
NameToImport::Prefix(exact_name, case_sensitive)
| NameToImport::Exact(exact_name, case_sensitive) => {
let mut local_query = symbol_index::Query::new(exact_name.clone());
- let mut external_query = import_map::Query::new(exact_name);
+ let mut external_query =
+ // import_map::Query::new(exact_name).assoc_search_mode(assoc_item_search);
+ import_map::Query::new(exact_name);
if prefix {
local_query.prefix();
external_query = external_query.prefix();
@@ -101,8 +103,8 @@ fn find_items<'a>(
.into_iter()
.filter(move |candidate| match assoc_item_search {
AssocSearchMode::Include => true,
- AssocSearchMode::Exclude => candidate.def.as_assoc_item(db).is_none(),
- AssocSearchMode::AssocItemsOnly => candidate.def.as_assoc_item(db).is_some(),
+ AssocSearchMode::Exclude => !candidate.is_assoc,
+ AssocSearchMode::AssocItemsOnly => candidate.is_assoc,
})
.map(|local_candidate| match local_candidate.def {
hir::ModuleDef::Macro(macro_def) => ItemInNs::Macros(macro_def),
diff --git a/crates/ide-db/src/symbol_index.rs b/crates/ide-db/src/symbol_index.rs
index 3e89159c2c..be8566b759 100644
--- a/crates/ide-db/src/symbol_index.rs
+++ b/crates/ide-db/src/symbol_index.rs
@@ -50,7 +50,7 @@ enum SearchMode {
Prefix,
}
-#[derive(Debug)]
+#[derive(Debug, Clone)]
pub struct Query {
query: String,
lowercased: String,
diff --git a/crates/ide-db/src/test_data/test_doc_alias.txt b/crates/ide-db/src/test_data/test_doc_alias.txt
index 4a72881fe5..7c01ac0693 100644
--- a/crates/ide-db/src/test_data/test_doc_alias.txt
+++ b/crates/ide-db/src/test_data/test_doc_alias.txt
@@ -36,6 +36,7 @@
},
container_name: None,
is_alias: false,
+ is_assoc: false,
},
FileSymbol {
name: "Struct",
@@ -65,6 +66,7 @@
},
container_name: None,
is_alias: false,
+ is_assoc: false,
},
FileSymbol {
name: "mul1",
@@ -94,6 +96,7 @@
},
container_name: None,
is_alias: true,
+ is_assoc: false,
},
FileSymbol {
name: "mul2",
@@ -123,6 +126,7 @@
},
container_name: None,
is_alias: true,
+ is_assoc: false,
},
FileSymbol {
name: "s1",
@@ -152,6 +156,7 @@
},
container_name: None,
is_alias: true,
+ is_assoc: false,
},
FileSymbol {
name: "s1",
@@ -181,6 +186,7 @@
},
container_name: None,
is_alias: true,
+ is_assoc: false,
},
FileSymbol {
name: "s2",
@@ -210,6 +216,7 @@
},
container_name: None,
is_alias: true,
+ is_assoc: false,
},
],
),
diff --git a/crates/ide-db/src/test_data/test_symbol_index_collection.txt b/crates/ide-db/src/test_data/test_symbol_index_collection.txt
index da1f3167d7..c9875c7f8f 100644
--- a/crates/ide-db/src/test_data/test_symbol_index_collection.txt
+++ b/crates/ide-db/src/test_data/test_symbol_index_collection.txt
@@ -34,6 +34,7 @@
},
container_name: None,
is_alias: false,
+ is_assoc: false,
},
FileSymbol {
name: "CONST",
@@ -61,6 +62,7 @@
},
container_name: None,
is_alias: false,
+ is_assoc: false,
},
FileSymbol {
name: "CONST_WITH_INNER",
@@ -88,6 +90,7 @@
},
container_name: None,
is_alias: false,
+ is_assoc: false,
},
FileSymbol {
name: "Enum",
@@ -117,6 +120,7 @@
},
container_name: None,
is_alias: false,
+ is_assoc: false,
},
FileSymbol {
name: "ItemLikeMacro",
@@ -146,6 +150,7 @@
},
container_name: None,
is_alias: false,
+ is_assoc: false,
},
FileSymbol {
name: "Macro",
@@ -175,6 +180,7 @@
},
container_name: None,
is_alias: false,
+ is_assoc: false,
},
FileSymbol {
name: "STATIC",
@@ -202,6 +208,7 @@
},
container_name: None,
is_alias: false,
+ is_assoc: false,
},
FileSymbol {
name: "Struct",
@@ -231,6 +238,7 @@
},
container_name: None,
is_alias: false,
+ is_assoc: false,
},
FileSymbol {
name: "StructFromMacro",
@@ -260,6 +268,7 @@
},
container_name: None,
is_alias: false,
+ is_assoc: false,
},
FileSymbol {
name: "StructInFn",
@@ -291,6 +300,7 @@
"main",
),
is_alias: false,
+ is_assoc: false,
},
FileSymbol {
name: "StructInNamedConst",
@@ -322,6 +332,7 @@
"CONST_WITH_INNER",
),
is_alias: false,
+ is_assoc: false,
},
FileSymbol {
name: "StructInUnnamedConst",
@@ -351,6 +362,7 @@
},
container_name: None,
is_alias: false,
+ is_assoc: false,
},
FileSymbol {
name: "Trait",
@@ -378,6 +390,7 @@
},
container_name: None,
is_alias: false,
+ is_assoc: false,
},
FileSymbol {
name: "Trait",
@@ -407,6 +420,7 @@
},
container_name: None,
is_alias: false,
+ is_assoc: false,
},
FileSymbol {
name: "Union",
@@ -436,6 +450,7 @@
},
container_name: None,
is_alias: false,
+ is_assoc: false,
},
FileSymbol {
name: "a_mod",
@@ -465,6 +480,7 @@
},
container_name: None,
is_alias: false,
+ is_assoc: false,
},
FileSymbol {
name: "b_mod",
@@ -494,6 +510,7 @@
},
container_name: None,
is_alias: false,
+ is_assoc: false,
},
FileSymbol {
name: "define_struct",
@@ -523,6 +540,7 @@
},
container_name: None,
is_alias: false,
+ is_assoc: false,
},
FileSymbol {
name: "impl_fn",
@@ -550,6 +568,7 @@
},
container_name: None,
is_alias: false,
+ is_assoc: true,
},
FileSymbol {
name: "macro_rules_macro",
@@ -579,6 +598,7 @@
},
container_name: None,
is_alias: false,
+ is_assoc: false,
},
FileSymbol {
name: "main",
@@ -606,6 +626,7 @@
},
container_name: None,
is_alias: false,
+ is_assoc: false,
},
FileSymbol {
name: "really_define_struct",
@@ -635,6 +656,7 @@
},
container_name: None,
is_alias: false,
+ is_assoc: false,
},
FileSymbol {
name: "trait_fn",
@@ -664,6 +686,7 @@
"Trait",
),
is_alias: false,
+ is_assoc: true,
},
],
),
@@ -704,6 +727,7 @@
},
container_name: None,
is_alias: false,
+ is_assoc: false,
},
],
),
@@ -744,6 +768,7 @@
},
container_name: None,
is_alias: false,
+ is_assoc: false,
},
FileSymbol {
name: "StructInModB",
@@ -773,6 +798,7 @@
},
container_name: None,
is_alias: false,
+ is_assoc: false,
},
FileSymbol {
name: "SuperItemLikeMacro",
@@ -802,6 +828,7 @@
},
container_name: None,
is_alias: false,
+ is_assoc: false,
},
FileSymbol {
name: "ThisStruct",
@@ -831,6 +858,7 @@
},
container_name: None,
is_alias: false,
+ is_assoc: false,
},
FileSymbol {
name: "ThisStruct",
@@ -860,6 +888,7 @@
},
container_name: None,
is_alias: false,
+ is_assoc: false,
},
],
),
diff --git a/crates/ide-diagnostics/src/handlers/unresolved_extern_crate.rs b/crates/ide-diagnostics/src/handlers/unresolved_extern_crate.rs
index 71c501a336..f8265b6327 100644
--- a/crates/ide-diagnostics/src/handlers/unresolved_extern_crate.rs
+++ b/crates/ide-diagnostics/src/handlers/unresolved_extern_crate.rs
@@ -47,19 +47,4 @@ use foo::Foo as Bar;
"#,
);
}
-
- #[test]
- fn regression_panic_with_inner_attribute_in_presence_of_unresolved_crate() {
- check_diagnostics(
- r#"
-//- /lib.rs
- #[macro_use] extern crate doesnotexist;
-//^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ error: unresolved extern crate
- mod _test_inner {
- #![empty_attr]
- //^^^^^^^^^^^^^^ error: unresolved macro `empty_attr`
- }
-"#,
- );
- }
}
diff --git a/crates/ide-diagnostics/src/handlers/unresolved_macro_call.rs b/crates/ide-diagnostics/src/handlers/unresolved_macro_call.rs
index 33e7c2e37c..c8ff54cba3 100644
--- a/crates/ide-diagnostics/src/handlers/unresolved_macro_call.rs
+++ b/crates/ide-diagnostics/src/handlers/unresolved_macro_call.rs
@@ -70,4 +70,16 @@ self::m!(); self::m2!();
"#,
);
}
+
+ #[test]
+ fn regression_panic_with_inner_attribute_in_presence_of_unresolved_crate() {
+ check_diagnostics(
+ r#"
+ mod _test_inner {
+ #![empty_attr]
+ //^^^^^^^^^^^^^^ error: unresolved macro `empty_attr`
+ }
+"#,
+ );
+ }
}
diff --git a/crates/ide-diagnostics/src/lib.rs b/crates/ide-diagnostics/src/lib.rs
index 6541bf6057..579386c72e 100644
--- a/crates/ide-diagnostics/src/lib.rs
+++ b/crates/ide-diagnostics/src/lib.rs
@@ -94,7 +94,7 @@ use syntax::{
};
// FIXME: Make this an enum
-#[derive(Copy, Clone, Debug, PartialEq, Eq)]
+#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash)]
pub enum DiagnosticCode {
RustcHardError(&'static str),
RustcLint(&'static str),
@@ -198,7 +198,7 @@ impl Diagnostic {
}
}
-#[derive(Debug, Copy, Clone, PartialEq, Eq)]
+#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
pub enum Severity {
Error,
Warning,
diff --git a/crates/ide/src/inlay_hints.rs b/crates/ide/src/inlay_hints.rs
index ca334e9157..e82d730e4a 100644
--- a/crates/ide/src/inlay_hints.rs
+++ b/crates/ide/src/inlay_hints.rs
@@ -422,6 +422,11 @@ fn ty_to_text_edit(
Some(builder.finish())
}
+pub enum RangeLimit {
+ Fixed(TextRange),
+ NearestParent(TextSize),
+}
+
// Feature: Inlay Hints
//
// rust-analyzer shows additional information inline with the source code.
@@ -443,7 +448,7 @@ fn ty_to_text_edit(
pub(crate) fn inlay_hints(
db: &RootDatabase,
file_id: FileId,
- range_limit: Option<TextRange>,
+ range_limit: Option<RangeLimit>,
config: &InlayHintsConfig,
) -> Vec<InlayHint> {
let _p = profile::span("inlay_hints");
@@ -458,13 +463,31 @@ pub(crate) fn inlay_hints(
let hints = |node| hints(&mut acc, &famous_defs, config, file_id, node);
match range_limit {
- Some(range) => match file.covering_element(range) {
+ Some(RangeLimit::Fixed(range)) => match file.covering_element(range) {
NodeOrToken::Token(_) => return acc,
NodeOrToken::Node(n) => n
.descendants()
.filter(|descendant| range.intersect(descendant.text_range()).is_some())
.for_each(hints),
},
+ Some(RangeLimit::NearestParent(position)) => {
+ match file.token_at_offset(position).left_biased() {
+ Some(token) => {
+ if let Some(parent_block) =
+ token.parent_ancestors().find_map(ast::BlockExpr::cast)
+ {
+ parent_block.syntax().descendants().for_each(hints)
+ } else if let Some(parent_item) =
+ token.parent_ancestors().find_map(ast::Item::cast)
+ {
+ parent_item.syntax().descendants().for_each(hints)
+ } else {
+ return acc;
+ }
+ }
+ None => return acc,
+ }
+ }
None => file.descendants().for_each(hints),
};
}
diff --git a/crates/ide/src/inlay_hints/bind_pat.rs b/crates/ide/src/inlay_hints/bind_pat.rs
index 680035c721..45b51e3557 100644
--- a/crates/ide/src/inlay_hints/bind_pat.rs
+++ b/crates/ide/src/inlay_hints/bind_pat.rs
@@ -177,7 +177,11 @@ mod tests {
use syntax::{TextRange, TextSize};
use test_utils::extract_annotations;
- use crate::{fixture, inlay_hints::InlayHintsConfig, ClosureReturnTypeHints};
+ use crate::{
+ fixture,
+ inlay_hints::{InlayHintsConfig, RangeLimit},
+ ClosureReturnTypeHints,
+ };
use crate::inlay_hints::tests::{
check, check_edit, check_no_edit, check_with_config, DISABLED_CONFIG, TEST_CONFIG,
@@ -400,7 +404,7 @@ fn main() {
.inlay_hints(
&InlayHintsConfig { type_hints: true, ..DISABLED_CONFIG },
file_id,
- Some(TextRange::new(TextSize::from(500), TextSize::from(600))),
+ Some(RangeLimit::Fixed(TextRange::new(TextSize::from(500), TextSize::from(600)))),
)
.unwrap();
let actual =
diff --git a/crates/ide/src/lib.rs b/crates/ide/src/lib.rs
index d8f6e4e1b1..a19952e4ca 100644
--- a/crates/ide/src/lib.rs
+++ b/crates/ide/src/lib.rs
@@ -94,7 +94,7 @@ pub use crate::{
inlay_hints::{
AdjustmentHints, AdjustmentHintsMode, ClosureReturnTypeHints, DiscriminantHints,
InlayFieldsToResolve, InlayHint, InlayHintLabel, InlayHintLabelPart, InlayHintPosition,
- InlayHintsConfig, InlayKind, InlayTooltip, LifetimeElisionHints,
+ InlayHintsConfig, InlayKind, InlayTooltip, LifetimeElisionHints, RangeLimit,
},
join_lines::JoinLinesConfig,
markup::Markup,
@@ -133,7 +133,9 @@ pub use ide_db::{
symbol_index::Query,
RootDatabase, SymbolKind,
};
-pub use ide_diagnostics::{Diagnostic, DiagnosticsConfig, ExprFillDefaultMode, Severity};
+pub use ide_diagnostics::{
+ Diagnostic, DiagnosticCode, DiagnosticsConfig, ExprFillDefaultMode, Severity,
+};
pub use ide_ssr::SsrError;
pub use syntax::{TextRange, TextSize};
pub use text_edit::{Indel, TextEdit};
@@ -397,7 +399,7 @@ impl Analysis {
&self,
config: &InlayHintsConfig,
file_id: FileId,
- range: Option<TextRange>,
+ range: Option<RangeLimit>,
) -> Cancellable<Vec<InlayHint>> {
self.with_db(|db| inlay_hints::inlay_hints(db, file_id, range, config))
}
diff --git a/crates/mbe/src/syntax_bridge.rs b/crates/mbe/src/syntax_bridge.rs
index 1c46471a38..b89bfd74a6 100644
--- a/crates/mbe/src/syntax_bridge.rs
+++ b/crates/mbe/src/syntax_bridge.rs
@@ -311,7 +311,7 @@ where
let ident = tt::Leaf::from(tt::Ident {
text: SmolStr::new(&token.to_text(conv)[1..]),
- span: conv.span_for(TextRange::at(
+ span: conv.span_for(TextRange::new(
abs_range.start() + TextSize::of('\''),
abs_range.end(),
)),
@@ -625,25 +625,6 @@ impl<SpanMap, S> Converter<SpanMap, S> {
}
fn next_token(&mut self) -> Option<SyntaxToken> {
- // while let Some(ev) = self.preorder.next() {
- // match ev {
- // WalkEvent::Enter(SyntaxElement::Token(t)) => {
- // if let Some(leafs) = self.append.remove(&t.clone().into()) {
- // self.current_leafs.extend(leafs);
- // }
- // return Some(t);
- // }
- // WalkEvent::Enter(SyntaxElement::Node(n)) if self.remove.contains(&n) => {
- // self.preorder.skip_subtree();
- // if let Some(leafs) = self.append.remove(&n.into()) {
- // self.current_leafs.extend(leafs);
- // }
- // }
- // _ => (),
- // }
- // }
- // None;
-
while let Some(ev) = self.preorder.next() {
match ev {
WalkEvent::Enter(SyntaxElement::Token(t)) => return Some(t),
diff --git a/crates/mbe/src/tt_iter.rs b/crates/mbe/src/tt_iter.rs
index 595691b177..40e8a2385f 100644
--- a/crates/mbe/src/tt_iter.rs
+++ b/crates/mbe/src/tt_iter.rs
@@ -131,7 +131,6 @@ impl<'a, S: Span> TtIter<'a, S> {
let buffer = tt::buffer::TokenBuffer::from_tokens(self.inner.as_slice());
let parser_input = to_parser_input(&buffer);
let tree_traversal = entry_point.parse(&parser_input);
-
let mut cursor = buffer.begin();
let mut error = false;
for step in tree_traversal.iter() {
@@ -163,12 +162,10 @@ impl<'a, S: Span> TtIter<'a, S> {
let mut curr = buffer.begin();
let mut res = vec![];
- if cursor.is_root() {
- while curr != cursor {
- let Some(token) = curr.token_tree() else { break };
- res.push(token.cloned());
- curr = curr.bump();
- }
+ while curr != cursor {
+ let Some(token) = curr.token_tree() else { break };
+ res.push(token.cloned());
+ curr = curr.bump();
}
self.inner = self.inner.as_slice()[res.len()..].iter();
diff --git a/crates/proc-macro-api/src/version.rs b/crates/proc-macro-api/src/version.rs
index 87118a6265..5ff1f36c54 100644
--- a/crates/proc-macro-api/src/version.rs
+++ b/crates/proc-macro-api/src/version.rs
@@ -131,7 +131,7 @@ pub fn read_version(dylib_path: &AbsPath) -> io::Result<String> {
let len_bytes = &dot_rustc[8..16];
let data_len = u64::from_le_bytes(len_bytes.try_into().unwrap()) as usize;
(&dot_rustc[16..data_len + 12], 17)
- }
+ }
_ => {
return Err(io::Error::new(
io::ErrorKind::InvalidData,
diff --git a/crates/proc-macro-srv/src/server.rs b/crates/proc-macro-srv/src/server.rs
index 54430e0d19..917d8a6e26 100644
--- a/crates/proc-macro-srv/src/server.rs
+++ b/crates/proc-macro-srv/src/server.rs
@@ -58,6 +58,10 @@ impl server::Types for RustAnalyzer {
}
impl server::FreeFunctions for RustAnalyzer {
+ fn injected_env_var(&mut self, _var: &str) -> Option<String> {
+ None
+ }
+
fn track_env_var(&mut self, _var: &str, _value: Option<&str>) {
// FIXME: track env var accesses
// https://github.com/rust-lang/rust/pull/71858
diff --git a/crates/rust-analyzer/Cargo.toml b/crates/rust-analyzer/Cargo.toml
index 408c1fb6f3..39ac338aa1 100644
--- a/crates/rust-analyzer/Cargo.toml
+++ b/crates/rust-analyzer/Cargo.toml
@@ -42,6 +42,7 @@ tracing-tree.workspace = true
triomphe.workspace = true
nohash-hasher.workspace = true
always-assert = "0.1.2"
+walkdir = "2.3.2"
cfg.workspace = true
flycheck.workspace = true
diff --git a/crates/rust-analyzer/src/bin/main.rs b/crates/rust-analyzer/src/bin/main.rs
index 29bd02f92d..8472e49de9 100644
--- a/crates/rust-analyzer/src/bin/main.rs
+++ b/crates/rust-analyzer/src/bin/main.rs
@@ -87,6 +87,7 @@ fn main() -> anyhow::Result<()> {
flags::RustAnalyzerCmd::Lsif(cmd) => cmd.run()?,
flags::RustAnalyzerCmd::Scip(cmd) => cmd.run()?,
flags::RustAnalyzerCmd::RunTests(cmd) => cmd.run()?,
+ flags::RustAnalyzerCmd::RustcTests(cmd) => cmd.run()?,
}
Ok(())
}
diff --git a/crates/rust-analyzer/src/cli.rs b/crates/rust-analyzer/src/cli.rs
index 64646b33ad..de00c4192b 100644
--- a/crates/rust-analyzer/src/cli.rs
+++ b/crates/rust-analyzer/src/cli.rs
@@ -10,6 +10,7 @@ mod ssr;
mod lsif;
mod scip;
mod run_tests;
+mod rustc_tests;
mod progress_report;
diff --git a/crates/rust-analyzer/src/cli/flags.rs b/crates/rust-analyzer/src/cli/flags.rs
index fe5022f860..5633c0c488 100644
--- a/crates/rust-analyzer/src/cli/flags.rs
+++ b/crates/rust-analyzer/src/cli/flags.rs
@@ -98,6 +98,15 @@ xflags::xflags! {
required path: PathBuf
}
+ /// Run unit tests of the project using mir interpreter
+ cmd rustc-tests {
+ /// Directory with Cargo.toml.
+ required rustc_repo: PathBuf
+
+ /// Only run tests with filter as substring
+ optional --filter path: String
+ }
+
cmd diagnostics {
/// Directory with Cargo.toml.
required path: PathBuf
@@ -159,6 +168,7 @@ pub enum RustAnalyzerCmd {
Highlight(Highlight),
AnalysisStats(AnalysisStats),
RunTests(RunTests),
+ RustcTests(RustcTests),
Diagnostics(Diagnostics),
Ssr(Ssr),
Search(Search),
@@ -212,6 +222,12 @@ pub struct RunTests {
}
#[derive(Debug)]
+pub struct RustcTests {
+ pub rustc_repo: PathBuf,
+ pub filter: Option<String>,
+}
+
+#[derive(Debug)]
pub struct Diagnostics {
pub path: PathBuf,
diff --git a/crates/rust-analyzer/src/cli/rustc_tests.rs b/crates/rust-analyzer/src/cli/rustc_tests.rs
new file mode 100644
index 0000000000..c89b88ac0f
--- /dev/null
+++ b/crates/rust-analyzer/src/cli/rustc_tests.rs
@@ -0,0 +1,236 @@
+//! Run all tests in a project, similar to `cargo test`, but using the mir interpreter.
+
+use std::{
+ cell::RefCell, collections::HashMap, fs::read_to_string, panic::AssertUnwindSafe, path::PathBuf,
+};
+
+use hir::Crate;
+use ide::{AnalysisHost, Change, DiagnosticCode, DiagnosticsConfig};
+use profile::StopWatch;
+use project_model::{CargoConfig, ProjectWorkspace, RustLibSource, Sysroot};
+
+use load_cargo::{load_workspace, LoadCargoConfig, ProcMacroServerChoice};
+use triomphe::Arc;
+use vfs::{AbsPathBuf, FileId};
+use walkdir::WalkDir;
+
+use crate::cli::{flags, report_metric, Result};
+
+struct Tester {
+ host: AnalysisHost,
+ root_file: FileId,
+ pass_count: u64,
+ ignore_count: u64,
+ fail_count: u64,
+ stopwatch: StopWatch,
+}
+
+fn string_to_diagnostic_code_leaky(code: &str) -> DiagnosticCode {
+ thread_local! {
+ static LEAK_STORE: RefCell<HashMap<String, DiagnosticCode>> = RefCell::new(HashMap::new());
+ }
+ LEAK_STORE.with_borrow_mut(|s| match s.get(code) {
+ Some(c) => *c,
+ None => {
+ let v = DiagnosticCode::RustcHardError(format!("E{code}").leak());
+ s.insert(code.to_owned(), v);
+ v
+ }
+ })
+}
+
+fn detect_errors_from_rustc_stderr_file(p: PathBuf) -> HashMap<DiagnosticCode, usize> {
+ let text = read_to_string(p).unwrap();
+ let mut result = HashMap::new();
+ {
+ let mut text = &*text;
+ while let Some(p) = text.find("error[E") {
+ text = &text[p + 7..];
+ let code = string_to_diagnostic_code_leaky(&text[..4]);
+ *result.entry(code).or_insert(0) += 1;
+ }
+ }
+ result
+}
+
+impl Tester {
+ fn new() -> Result<Self> {
+ let tmp_file = AbsPathBuf::assert("/tmp/ra-rustc-test.rs".into());
+ std::fs::write(&tmp_file, "")?;
+ let mut cargo_config = CargoConfig::default();
+ cargo_config.sysroot = Some(RustLibSource::Discover);
+ let workspace = ProjectWorkspace::DetachedFiles {
+ files: vec![tmp_file.clone()],
+ sysroot: Ok(
+ Sysroot::discover(tmp_file.parent().unwrap(), &cargo_config.extra_env).unwrap()
+ ),
+ rustc_cfg: vec![],
+ };
+ let load_cargo_config = LoadCargoConfig {
+ load_out_dirs_from_check: false,
+ with_proc_macro_server: ProcMacroServerChoice::Sysroot,
+ prefill_caches: false,
+ };
+ let (host, _vfs, _proc_macro) =
+ load_workspace(workspace, &cargo_config.extra_env, &load_cargo_config)?;
+ let db = host.raw_database();
+ let krates = Crate::all(db);
+ let root_crate = krates.iter().cloned().find(|krate| krate.origin(db).is_local()).unwrap();
+ let root_file = root_crate.root_file(db);
+ Ok(Self {
+ host,
+ root_file,
+ pass_count: 0,
+ ignore_count: 0,
+ fail_count: 0,
+ stopwatch: StopWatch::start(),
+ })
+ }
+
+ fn test(&mut self, p: PathBuf) {
+ if p.parent().unwrap().file_name().unwrap() == "auxiliary" {
+ // These are not tests
+ return;
+ }
+ if IGNORED_TESTS.iter().any(|ig| p.file_name().is_some_and(|x| x == *ig)) {
+ println!("{p:?} IGNORE");
+ self.ignore_count += 1;
+ return;
+ }
+ let stderr_path = p.with_extension("stderr");
+ let expected = if stderr_path.exists() {
+ detect_errors_from_rustc_stderr_file(stderr_path)
+ } else {
+ HashMap::new()
+ };
+ let text = read_to_string(&p).unwrap();
+ let mut change = Change::new();
+ // Ignore unstable tests, since they move too fast and we do not intend to support all of them.
+ let mut ignore_test = text.contains("#![feature");
+ // Ignore test with extern crates, as this infra don't support them yet.
+ ignore_test |= text.contains("// aux-build:") || text.contains("// aux-crate:");
+ // Ignore test with extern modules similarly.
+ ignore_test |= text.contains("mod ");
+ // These should work, but they don't, and I don't know why, so ignore them.
+ ignore_test |= text.contains("extern crate proc_macro");
+ let should_have_no_error = text.contains("// check-pass")
+ || text.contains("// build-pass")
+ || text.contains("// run-pass");
+ change.change_file(self.root_file, Some(Arc::from(text)));
+ self.host.apply_change(change);
+ let diagnostic_config = DiagnosticsConfig::test_sample();
+ let diags = self
+ .host
+ .analysis()
+ .diagnostics(&diagnostic_config, ide::AssistResolveStrategy::None, self.root_file)
+ .unwrap();
+ let mut actual = HashMap::new();
+ for diag in diags {
+ if !matches!(diag.code, DiagnosticCode::RustcHardError(_)) {
+ continue;
+ }
+ if !should_have_no_error && !SUPPORTED_DIAGNOSTICS.contains(&diag.code) {
+ continue;
+ }
+ *actual.entry(diag.code).or_insert(0) += 1;
+ }
+ // Ignore tests with diagnostics that we don't emit.
+ ignore_test |= expected.keys().any(|k| !SUPPORTED_DIAGNOSTICS.contains(k));
+ if ignore_test {
+ println!("{p:?} IGNORE");
+ self.ignore_count += 1;
+ } else if actual == expected {
+ println!("{p:?} PASS");
+ self.pass_count += 1;
+ } else {
+ println!("{p:?} FAIL");
+ println!("actual (r-a) = {:?}", actual);
+ println!("expected (rustc) = {:?}", expected);
+ self.fail_count += 1;
+ }
+ }
+
+ fn report(&mut self) {
+ println!(
+ "Pass count = {}, Fail count = {}, Ignore count = {}",
+ self.pass_count, self.fail_count, self.ignore_count
+ );
+ println!("Testing time and memory = {}", self.stopwatch.elapsed());
+ report_metric("rustc failed tests", self.fail_count, "#");
+ report_metric("rustc testing time", self.stopwatch.elapsed().time.as_millis() as u64, "ms");
+ }
+}
+
+/// These tests break rust-analyzer (either by panicking or hanging) so we should ignore them.
+const IGNORED_TESTS: &[&str] = &[
+ "trait-with-missing-associated-type-restriction.rs", // #15646
+ "trait-with-missing-associated-type-restriction-fixable.rs", // #15646
+ "resolve-self-in-impl.rs",
+ "basic.rs", // ../rust/tests/ui/associated-type-bounds/return-type-notation/basic.rs
+ "issue-26056.rs",
+ "float-field.rs",
+ "invalid_operator_trait.rs",
+ "type-alias-impl-trait-assoc-dyn.rs",
+ "deeply-nested_closures.rs", // exponential time
+ "hang-on-deeply-nested-dyn.rs", // exponential time
+ "dyn-rpit-and-let.rs", // unexpected free variable with depth `^1.0` with outer binder ^0
+ "issue-16098.rs", // Huge recursion limit for macros?
+ "issue-83471.rs", // crates/hir-ty/src/builder.rs:78:9: assertion failed: self.remaining() > 0
+];
+
+const SUPPORTED_DIAGNOSTICS: &[DiagnosticCode] = &[
+ DiagnosticCode::RustcHardError("E0023"),
+ DiagnosticCode::RustcHardError("E0046"),
+ DiagnosticCode::RustcHardError("E0063"),
+ DiagnosticCode::RustcHardError("E0107"),
+ DiagnosticCode::RustcHardError("E0117"),
+ DiagnosticCode::RustcHardError("E0133"),
+ DiagnosticCode::RustcHardError("E0210"),
+ DiagnosticCode::RustcHardError("E0268"),
+ DiagnosticCode::RustcHardError("E0308"),
+ DiagnosticCode::RustcHardError("E0384"),
+ DiagnosticCode::RustcHardError("E0407"),
+ DiagnosticCode::RustcHardError("E0432"),
+ DiagnosticCode::RustcHardError("E0451"),
+ DiagnosticCode::RustcHardError("E0507"),
+ DiagnosticCode::RustcHardError("E0583"),
+ DiagnosticCode::RustcHardError("E0559"),
+ DiagnosticCode::RustcHardError("E0616"),
+ DiagnosticCode::RustcHardError("E0618"),
+ DiagnosticCode::RustcHardError("E0624"),
+ DiagnosticCode::RustcHardError("E0774"),
+ DiagnosticCode::RustcHardError("E0767"),
+ DiagnosticCode::RustcHardError("E0777"),
+];
+
+impl flags::RustcTests {
+ pub fn run(self) -> Result<()> {
+ let mut tester = Tester::new()?;
+ let walk_dir = WalkDir::new(self.rustc_repo.join("tests/ui"));
+ for i in walk_dir {
+ let i = i?;
+ let p = i.into_path();
+ if let Some(f) = &self.filter {
+ if !p.as_os_str().to_string_lossy().contains(f) {
+ continue;
+ }
+ }
+ if p.extension().map_or(true, |x| x != "rs") {
+ continue;
+ }
+ if let Err(e) = std::panic::catch_unwind({
+ let tester = AssertUnwindSafe(&mut tester);
+ let p = p.clone();
+ move || {
+ let tester = tester;
+ tester.0.test(p);
+ }
+ }) {
+ println!("panic detected at test {:?}", p);
+ std::panic::resume_unwind(e);
+ }
+ }
+ tester.report();
+ Ok(())
+ }
+}
diff --git a/crates/rust-analyzer/src/config.rs b/crates/rust-analyzer/src/config.rs
index 90d1d6b055..258f741063 100644
--- a/crates/rust-analyzer/src/config.rs
+++ b/crates/rust-analyzer/src/config.rs
@@ -1354,6 +1354,7 @@ impl Config {
}
}
+ // FIXME: This should be an AbsolutePathBuf
fn target_dir_from_config(&self) -> Option<PathBuf> {
self.data.rust_analyzerTargetDir.as_ref().and_then(|target_dir| match target_dir {
TargetDirectory::UseSubdirectory(yes) if *yes => {
diff --git a/crates/rust-analyzer/src/handlers/request.rs b/crates/rust-analyzer/src/handlers/request.rs
index 57955ebf89..d8a590c808 100644
--- a/crates/rust-analyzer/src/handlers/request.rs
+++ b/crates/rust-analyzer/src/handlers/request.rs
@@ -12,8 +12,8 @@ use anyhow::Context;
use ide::{
AnnotationConfig, AssistKind, AssistResolveStrategy, Cancellable, FilePosition, FileRange,
- HoverAction, HoverGotoTypeData, InlayFieldsToResolve, Query, RangeInfo, ReferenceCategory,
- Runnable, RunnableKind, SingleResolve, SourceChange, TextEdit,
+ HoverAction, HoverGotoTypeData, InlayFieldsToResolve, Query, RangeInfo, RangeLimit,
+ ReferenceCategory, Runnable, RunnableKind, SingleResolve, SourceChange, TextEdit,
};
use ide_db::SymbolKind;
use lsp_server::ErrorCode;
@@ -1409,7 +1409,7 @@ pub(crate) fn handle_inlay_hints(
let inlay_hints_config = snap.config.inlay_hints();
Ok(Some(
snap.analysis
- .inlay_hints(&inlay_hints_config, file_id, Some(range))?
+ .inlay_hints(&inlay_hints_config, file_id, Some(RangeLimit::Fixed(range)))?
.into_iter()
.map(|it| {
to_proto::inlay_hint(
@@ -1440,22 +1440,13 @@ pub(crate) fn handle_inlay_hints_resolve(
anyhow::ensure!(snap.file_exists(file_id), "Invalid LSP resolve data");
let line_index = snap.file_line_index(file_id)?;
- let range = from_proto::text_range(
- &line_index,
- lsp_types::Range { start: original_hint.position, end: original_hint.position },
- )?;
- let range_start = range.start();
- let range_end = range.end();
- let large_range = TextRange::new(
- range_start.checked_sub(1.into()).unwrap_or(range_start),
- range_end.checked_add(1.into()).unwrap_or(range_end),
- );
+ let hint_position = from_proto::offset(&line_index, original_hint.position)?;
let mut forced_resolve_inlay_hints_config = snap.config.inlay_hints();
forced_resolve_inlay_hints_config.fields_to_resolve = InlayFieldsToResolve::empty();
let resolve_hints = snap.analysis.inlay_hints(
&forced_resolve_inlay_hints_config,
file_id,
- Some(large_range),
+ Some(RangeLimit::NearestParent(hint_position)),
)?;
let mut resolved_hints = resolve_hints
diff --git a/crates/rust-analyzer/src/integrated_benchmarks.rs b/crates/rust-analyzer/src/integrated_benchmarks.rs
index ed2cf07551..41ff17f5e4 100644
--- a/crates/rust-analyzer/src/integrated_benchmarks.rs
+++ b/crates/rust-analyzer/src/integrated_benchmarks.rs
@@ -32,7 +32,10 @@ fn integrated_highlighting_benchmark() {
let workspace_to_load = project_root();
let file = "./crates/rust-analyzer/src/config.rs";
- let cargo_config = CargoConfig::default();
+ let cargo_config = CargoConfig {
+ sysroot: Some(project_model::RustLibSource::Discover),
+ ..CargoConfig::default()
+ };
let load_cargo_config = LoadCargoConfig {
load_out_dirs_from_check: true,
with_proc_macro_server: ProcMacroServerChoice::None,
@@ -85,7 +88,10 @@ fn integrated_completion_benchmark() {
let workspace_to_load = project_root();
let file = "./crates/hir/src/lib.rs";
- let cargo_config = CargoConfig::default();
+ let cargo_config = CargoConfig {
+ sysroot: Some(project_model::RustLibSource::Discover),
+ ..CargoConfig::default()
+ };
let load_cargo_config = LoadCargoConfig {
load_out_dirs_from_check: true,
with_proc_macro_server: ProcMacroServerChoice::None,
@@ -103,10 +109,46 @@ fn integrated_completion_benchmark() {
vfs.file_id(&path).unwrap_or_else(|| panic!("can't find virtual file for {path}"))
};
+ // kick off parsing and index population
+
+ let completion_offset = {
+ let _it = stdx::timeit("change");
+ let mut text = host.analysis().file_text(file_id).unwrap().to_string();
+ let completion_offset =
+ patch(&mut text, "db.struct_data(self.id)", "sel;\ndb.struct_data(self.id)")
+ + "sel".len();
+ let mut change = Change::new();
+ change.change_file(file_id, Some(Arc::from(text)));
+ host.apply_change(change);
+ completion_offset
+ };
+
{
- let _it = stdx::timeit("initial");
+ let _span = profile::cpu_span();
let analysis = host.analysis();
- analysis.highlight_as_html(file_id, false).unwrap();
+ let config = CompletionConfig {
+ enable_postfix_completions: true,
+ enable_imports_on_the_fly: true,
+ enable_self_on_the_fly: true,
+ enable_private_editable: true,
+ full_function_signatures: false,
+ callable: Some(CallableSnippets::FillArguments),
+ snippet_cap: SnippetCap::new(true),
+ insert_use: InsertUseConfig {
+ granularity: ImportGranularity::Crate,
+ prefix_kind: hir::PrefixKind::ByCrate,
+ enforce_granularity: true,
+ group: true,
+ skip_glob_imports: true,
+ },
+ snippets: Vec::new(),
+ prefer_no_std: false,
+ prefer_prelude: true,
+ limit: None,
+ };
+ let position =
+ FilePosition { file_id, offset: TextSize::try_from(completion_offset).unwrap() };
+ analysis.completions(&config, position, None).unwrap();
}
profile::init_from("*>5");
@@ -116,8 +158,8 @@ fn integrated_completion_benchmark() {
let _it = stdx::timeit("change");
let mut text = host.analysis().file_text(file_id).unwrap().to_string();
let completion_offset =
- patch(&mut text, "db.struct_data(self.id)", "sel;\ndb.struct_data(self.id)")
- + "sel".len();
+ patch(&mut text, "sel;\ndb.struct_data(self.id)", ";sel;\ndb.struct_data(self.id)")
+ + ";sel".len();
let mut change = Change::new();
change.change_file(file_id, Some(Arc::from(text)));
host.apply_change(change);
diff --git a/crates/syntax/src/ast/node_ext.rs b/crates/syntax/src/ast/node_ext.rs
index be5b954ad3..f81dff8840 100644
--- a/crates/syntax/src/ast/node_ext.rs
+++ b/crates/syntax/src/ast/node_ext.rs
@@ -275,10 +275,19 @@ impl ast::Path {
successors(Some(self.clone()), ast::Path::qualifier).last().unwrap()
}
+ pub fn first_qualifier(&self) -> Option<ast::Path> {
+ successors(self.qualifier(), ast::Path::qualifier).last()
+ }
+
pub fn first_segment(&self) -> Option<ast::PathSegment> {
self.first_qualifier_or_self().segment()
}
+ // FIXME: Check usages of Self::segments, they might be wrong because of the logic of the bloew function
+ pub fn segments_of_this_path_only_rev(&self) -> impl Iterator<Item = ast::PathSegment> + Clone {
+ self.qualifiers_and_self().filter_map(|it| it.segment())
+ }
+
pub fn segments(&self) -> impl Iterator<Item = ast::PathSegment> + Clone {
successors(self.first_segment(), |p| {
p.parent_path().parent_path().and_then(|p| p.segment())
@@ -289,6 +298,10 @@ impl ast::Path {
successors(self.qualifier(), |p| p.qualifier())
}
+ pub fn qualifiers_and_self(&self) -> impl Iterator<Item = ast::Path> + Clone {
+ successors(Some(self.clone()), |p| p.qualifier())
+ }
+
pub fn top_path(&self) -> ast::Path {
let mut this = self.clone();
while let Some(path) = this.parent_path() {
diff --git a/crates/syntax/src/ast/traits.rs b/crates/syntax/src/ast/traits.rs
index 3e43df2d0d..16f7356b1e 100644
--- a/crates/syntax/src/ast/traits.rs
+++ b/crates/syntax/src/ast/traits.rs
@@ -76,9 +76,6 @@ pub trait HasDocComments: HasAttrs {
fn doc_comments(&self) -> DocCommentIter {
DocCommentIter { iter: self.syntax().children_with_tokens() }
}
- fn doc_comments_and_attrs(&self) -> AttrDocCommentIter {
- AttrDocCommentIter { iter: self.syntax().children_with_tokens() }
- }
}
impl DocCommentIter {
diff --git a/crates/syntax/src/utils.rs b/crates/syntax/src/utils.rs
index 25f34ea9d3..a38f8b2b55 100644
--- a/crates/syntax/src/utils.rs
+++ b/crates/syntax/src/utils.rs
@@ -1,48 +1,8 @@
//! A set of utils methods to reuse on other abstraction levels
-use itertools::Itertools;
-
-use crate::{ast, match_ast, AstNode, SyntaxKind};
-
-pub fn path_to_string_stripping_turbo_fish(path: &ast::Path) -> String {
- path.syntax()
- .children()
- .filter_map(|node| {
- match_ast! {
- match node {
- ast::PathSegment(it) => {
- Some(it.name_ref()?.to_string())
- },
- ast::Path(it) => {
- Some(path_to_string_stripping_turbo_fish(&it))
- },
- _ => None,
- }
- }
- })
- .join("::")
-}
+use crate::SyntaxKind;
pub fn is_raw_identifier(name: &str) -> bool {
let is_keyword = SyntaxKind::from_keyword(name).is_some();
is_keyword && !matches!(name, "self" | "crate" | "super" | "Self")
}
-
-#[cfg(test)]
-mod tests {
- use super::path_to_string_stripping_turbo_fish;
- use crate::ast::make;
-
- #[test]
- fn turbofishes_are_stripped() {
- assert_eq!("Vec", path_to_string_stripping_turbo_fish(&make::path_from_text("Vec::<i32>")),);
- assert_eq!(
- "Vec::new",
- path_to_string_stripping_turbo_fish(&make::path_from_text("Vec::<i32>::new")),
- );
- assert_eq!(
- "Vec::new",
- path_to_string_stripping_turbo_fish(&make::path_from_text("Vec::new()")),
- );
- }
-}
diff --git a/crates/vfs/src/lib.rs b/crates/vfs/src/lib.rs
index 8ffda5d78d..ef5b10ee9d 100644
--- a/crates/vfs/src/lib.rs
+++ b/crates/vfs/src/lib.rs
@@ -61,13 +61,17 @@ pub use paths::{AbsPath, AbsPathBuf};
/// Most functions in rust-analyzer use this when they need to refer to a file.
#[derive(Copy, Clone, Debug, Ord, PartialOrd, Eq, PartialEq, Hash)]
pub struct FileId(u32);
+// pub struct FileId(NonMaxU32);
impl FileId {
/// Think twice about using this outside of tests. If this ends up in a wrong place it will cause panics!
+ // FIXME: To be removed once we get rid of all `SpanData::DUMMY` usages.
pub const BOGUS: FileId = FileId(0xe4e4e);
+ pub const MAX_FILE_ID: u32 = 0x7fff_ffff;
#[inline]
- pub fn from_raw(raw: u32) -> FileId {
+ pub const fn from_raw(raw: u32) -> FileId {
+ assert!(raw <= Self::MAX_FILE_ID);
FileId(raw)
}
diff --git a/lib/line-index/Cargo.toml b/lib/line-index/Cargo.toml
index b7b4a01818..494a7fa979 100644
--- a/lib/line-index/Cargo.toml
+++ b/lib/line-index/Cargo.toml
@@ -1,6 +1,6 @@
[package]
name = "line-index"
-version = "0.1.0"
+version = "0.1.1"
description = "Maps flat `TextSize` offsets to/from `(line, column)` representation."
license = "MIT OR Apache-2.0"
repository = "https://github.com/rust-lang/rust-analyzer/tree/master/lib/line-index"
diff --git a/xtask/src/flags.rs b/xtask/src/flags.rs
index e52cbfca3e..092ab8c593 100644
--- a/xtask/src/flags.rs
+++ b/xtask/src/flags.rs
@@ -110,6 +110,7 @@ pub struct PublishReleaseNotes {
#[derive(Debug)]
pub enum MeasurementType {
Build,
+ RustcTests,
AnalyzeSelf,
AnalyzeRipgrep,
AnalyzeWebRender,
@@ -122,6 +123,7 @@ impl FromStr for MeasurementType {
fn from_str(s: &str) -> Result<Self, Self::Err> {
match s {
"build" => Ok(Self::Build),
+ "rustc_tests" => Ok(Self::RustcTests),
"self" => Ok(Self::AnalyzeSelf),
"ripgrep-13.0.0" => Ok(Self::AnalyzeRipgrep),
"webrender-2022" => Ok(Self::AnalyzeWebRender),
@@ -135,6 +137,7 @@ impl AsRef<str> for MeasurementType {
fn as_ref(&self) -> &str {
match self {
Self::Build => "build",
+ Self::RustcTests => "rustc_tests",
Self::AnalyzeSelf => "self",
Self::AnalyzeRipgrep => "ripgrep-13.0.0",
Self::AnalyzeWebRender => "webrender-2022",
diff --git a/xtask/src/metrics.rs b/xtask/src/metrics.rs
index 59d41d8e4b..845928432c 100644
--- a/xtask/src/metrics.rs
+++ b/xtask/src/metrics.rs
@@ -36,6 +36,9 @@ impl flags::Metrics {
MeasurementType::Build => {
metrics.measure_build(sh)?;
}
+ MeasurementType::RustcTests => {
+ metrics.measure_rustc_tests(sh)?;
+ }
MeasurementType::AnalyzeSelf => {
metrics.measure_analysis_stats_self(sh)?;
}
@@ -50,6 +53,7 @@ impl flags::Metrics {
}
None => {
metrics.measure_build(sh)?;
+ metrics.measure_rustc_tests(sh)?;
metrics.measure_analysis_stats_self(sh)?;
metrics.measure_analysis_stats(sh, MeasurementType::AnalyzeRipgrep.as_ref())?;
metrics.measure_analysis_stats(sh, MeasurementType::AnalyzeWebRender.as_ref())?;
@@ -78,6 +82,19 @@ impl Metrics {
self.report("build", time.as_millis() as u64, "ms".into());
Ok(())
}
+
+ fn measure_rustc_tests(&mut self, sh: &Shell) -> anyhow::Result<()> {
+ eprintln!("\nMeasuring rustc tests");
+
+ cmd!(sh, "git clone --depth=1 https://github.com/rust-lang/rust").run()?;
+
+ let output = cmd!(sh, "./target/release/rust-analyzer rustc-tests ./rust").read()?;
+ for (metric, value, unit) in parse_metrics(&output) {
+ self.report(metric, value, unit.into());
+ }
+ Ok(())
+ }
+
fn measure_analysis_stats_self(&mut self, sh: &Shell) -> anyhow::Result<()> {
self.measure_analysis_stats_path(sh, "self", ".")
}