Unnamed repository; edit this file 'description' to name the repository.
-rw-r--r--Cargo.lock6
-rw-r--r--Cargo.toml5
-rw-r--r--crates/base-db/src/editioned_file_id.rs291
-rw-r--r--crates/base-db/src/input.rs7
-rw-r--r--crates/base-db/src/lib.rs39
-rw-r--r--crates/cfg/Cargo.toml1
-rw-r--r--crates/cfg/src/cfg_expr.rs59
-rw-r--r--crates/cfg/src/tests.rs42
-rw-r--r--crates/hir-def/Cargo.toml4
-rw-r--r--crates/hir-def/src/attr.rs900
-rw-r--r--crates/hir-def/src/attrs.rs1610
-rw-r--r--crates/hir-def/src/db.rs70
-rw-r--r--crates/hir-def/src/expr_store/expander.rs14
-rw-r--r--crates/hir-def/src/expr_store/lower.rs22
-rw-r--r--crates/hir-def/src/expr_store/pretty.rs19
-rw-r--r--crates/hir-def/src/expr_store/tests/body/block.rs4
-rw-r--r--crates/hir-def/src/expr_store/tests/signatures.rs14
-rw-r--r--crates/hir-def/src/import_map.rs34
-rw-r--r--crates/hir-def/src/item_tree.rs40
-rw-r--r--crates/hir-def/src/item_tree/attrs.rs220
-rw-r--r--crates/hir-def/src/item_tree/lower.rs35
-rw-r--r--crates/hir-def/src/item_tree/pretty.rs12
-rw-r--r--crates/hir-def/src/item_tree/tests.rs9
-rw-r--r--crates/hir-def/src/lang_item.rs17
-rw-r--r--crates/hir-def/src/lib.rs94
-rw-r--r--crates/hir-def/src/macro_expansion_tests/mbe.rs43
-rw-r--r--crates/hir-def/src/macro_expansion_tests/mod.rs15
-rw-r--r--crates/hir-def/src/macro_expansion_tests/proc_macros.rs96
-rw-r--r--crates/hir-def/src/nameres.rs15
-rw-r--r--crates/hir-def/src/nameres/assoc.rs39
-rw-r--r--crates/hir-def/src/nameres/attr_resolution.rs10
-rw-r--r--crates/hir-def/src/nameres/collector.rs177
-rw-r--r--crates/hir-def/src/nameres/diagnostics.rs14
-rw-r--r--crates/hir-def/src/nameres/mod_resolution.rs5
-rw-r--r--crates/hir-def/src/nameres/proc_macro.rs24
-rw-r--r--crates/hir-def/src/signatures.rs140
-rw-r--r--crates/hir-def/src/src.rs9
-rw-r--r--crates/hir-def/src/test_db.rs33
-rw-r--r--crates/hir-expand/Cargo.toml2
-rw-r--r--crates/hir-expand/src/attrs.rs808
-rw-r--r--crates/hir-expand/src/builtin/fn_macro.rs2
-rw-r--r--crates/hir-expand/src/cfg_process.rs638
-rw-r--r--crates/hir-expand/src/db.rs181
-rw-r--r--crates/hir-expand/src/declarative.rs58
-rw-r--r--crates/hir-expand/src/files.rs33
-rw-r--r--crates/hir-expand/src/fixup.rs5
-rw-r--r--crates/hir-expand/src/lib.rs165
-rw-r--r--crates/hir-expand/src/mod_path.rs59
-rw-r--r--crates/hir-expand/src/span_map.rs13
-rw-r--r--crates/hir-ty/src/consteval.rs3
-rw-r--r--crates/hir-ty/src/diagnostics/decl_check.rs8
-rw-r--r--crates/hir-ty/src/diagnostics/match_check/pat_analysis.rs6
-rw-r--r--crates/hir-ty/src/diagnostics/unsafe_check.rs4
-rw-r--r--crates/hir-ty/src/infer.rs12
-rw-r--r--crates/hir-ty/src/infer/coerce.rs21
-rw-r--r--crates/hir-ty/src/infer/expr.rs14
-rw-r--r--crates/hir-ty/src/layout.rs4
-rw-r--r--crates/hir-ty/src/layout/adt.rs35
-rw-r--r--crates/hir-ty/src/method_resolution.rs10
-rw-r--r--crates/hir-ty/src/mir/eval/shim.rs41
-rw-r--r--crates/hir-ty/src/next_solver/interner.rs59
-rw-r--r--crates/hir-ty/src/target_feature.rs46
-rw-r--r--crates/hir-ty/src/tests/incremental.rs55
-rw-r--r--crates/hir-ty/src/utils.rs8
-rw-r--r--crates/hir/src/attrs.rs260
-rw-r--r--crates/hir/src/diagnostics.rs13
-rw-r--r--crates/hir/src/lib.rs267
-rw-r--r--crates/hir/src/semantics.rs71
-rw-r--r--crates/hir/src/semantics/child_by_source.rs13
-rw-r--r--crates/hir/src/symbols.rs8
-rw-r--r--crates/ide-assists/src/handlers/add_missing_match_arms.rs6
-rw-r--r--crates/ide-assists/src/handlers/destructure_struct_binding.rs4
-rw-r--r--crates/ide-assists/src/handlers/move_module_to_file.rs10
-rw-r--r--crates/ide-assists/src/lib.rs4
-rw-r--r--crates/ide-assists/src/tests.rs4
-rw-r--r--crates/ide-assists/src/utils.rs13
-rw-r--r--crates/ide-completion/src/completions/attribute/lint.rs2
-rw-r--r--crates/ide-completion/src/completions/flyimport.rs4
-rw-r--r--crates/ide-completion/src/completions/postfix.rs2
-rw-r--r--crates/ide-completion/src/completions/snippet.rs2
-rw-r--r--crates/ide-completion/src/context.rs84
-rw-r--r--crates/ide-completion/src/item.rs12
-rw-r--r--crates/ide-completion/src/render.rs13
-rw-r--r--crates/ide-completion/src/render/literal.rs2
-rw-r--r--crates/ide-completion/src/render/pattern.rs2
-rw-r--r--crates/ide-completion/src/render/variant.rs6
-rw-r--r--crates/ide-completion/src/tests.rs4
-rw-r--r--crates/ide-completion/src/tests/flyimport.rs2
-rw-r--r--crates/ide-db/src/defs.rs38
-rw-r--r--crates/ide-db/src/documentation.rs351
-rw-r--r--crates/ide-db/src/ra_fixture.rs12
-rw-r--r--crates/ide-db/src/rust_doc.rs2
-rw-r--r--crates/ide-db/src/search.rs16
-rw-r--r--crates/ide-db/src/test_data/test_doc_alias.txt30
-rw-r--r--crates/ide-db/src/test_data/test_symbol_index_collection.txt134
-rw-r--r--crates/ide-db/src/test_data/test_symbols_exclude_imports.txt2
-rw-r--r--crates/ide-db/src/test_data/test_symbols_with_imports.txt4
-rw-r--r--crates/ide-db/src/traits.rs6
-rw-r--r--crates/ide-diagnostics/src/handlers/inactive_code.rs3
-rw-r--r--crates/ide-diagnostics/src/handlers/invalid_derive_target.rs4
-rw-r--r--crates/ide-diagnostics/src/handlers/macro_error.rs22
-rw-r--r--crates/ide-diagnostics/src/handlers/malformed_derive.rs4
-rw-r--r--crates/ide-diagnostics/src/handlers/unresolved_macro_call.rs5
-rw-r--r--crates/ide-diagnostics/src/lib.rs43
-rw-r--r--crates/ide-ssr/src/from_comment.rs2
-rw-r--r--crates/ide-ssr/src/lib.rs6
-rw-r--r--crates/ide-ssr/src/search.rs8
-rw-r--r--crates/ide/src/doc_links.rs38
-rw-r--r--crates/ide/src/doc_links/tests.rs73
-rw-r--r--crates/ide/src/fixture.rs32
-rw-r--r--crates/ide/src/goto_implementation.rs2
-rw-r--r--crates/ide/src/highlight_related.rs2
-rw-r--r--crates/ide/src/hover/render.rs52
-rw-r--r--crates/ide/src/inlay_hints.rs4
-rw-r--r--crates/ide/src/lib.rs13
-rw-r--r--crates/ide/src/navigation_target.rs38
-rw-r--r--crates/ide/src/references.rs5
-rw-r--r--crates/ide/src/runnables.rs50
-rw-r--r--crates/ide/src/signature_help.rs36
-rw-r--r--crates/ide/src/static_index.rs6
-rw-r--r--crates/ide/src/syntax_highlighting.rs2
-rw-r--r--crates/ide/src/syntax_highlighting/highlight.rs8
-rw-r--r--crates/ide/src/syntax_highlighting/html.rs2
-rw-r--r--crates/ide/src/syntax_highlighting/inject.rs189
-rw-r--r--crates/ide/src/syntax_highlighting/test_data/highlight_doctest.html72
-rw-r--r--crates/ide/src/typing.rs5
-rw-r--r--crates/ide/src/typing/on_enter.rs2
-rw-r--r--crates/ide/src/view_item_tree.rs2
-rw-r--r--crates/rust-analyzer/src/cli/analysis_stats.rs8
-rw-r--r--crates/rust-analyzer/src/cli/scip.rs8
-rw-r--r--crates/rust-analyzer/src/cli/ssr.rs2
-rw-r--r--crates/rust-analyzer/src/cli/unresolved_references.rs2
-rw-r--r--crates/rust-analyzer/src/lsp/to_proto.rs4
-rw-r--r--crates/syntax-bridge/src/lib.rs68
-rw-r--r--crates/syntax/src/ast.rs4
-rw-r--r--crates/syntax/src/ast/node_ext.rs38
-rw-r--r--crates/syntax/src/ast/token_ext.rs6
-rw-r--r--crates/syntax/src/ast/traits.rs67
-rw-r--r--crates/test-fixture/src/lib.rs45
139 files changed, 5008 insertions, 3855 deletions
diff --git a/Cargo.lock b/Cargo.lock
index 7e1bd6320d..efe56cb7f6 100644
--- a/Cargo.lock
+++ b/Cargo.lock
@@ -845,6 +845,7 @@ dependencies = [
name = "hir-expand"
version = "0.0.0"
dependencies = [
+ "arrayvec",
"base-db",
"cfg",
"cov-mark",
@@ -863,6 +864,7 @@ dependencies = [
"stdx",
"syntax",
"syntax-bridge",
+ "thin-vec",
"tracing",
"triomphe",
"tt",
@@ -2272,9 +2274,9 @@ checksum = "7a2d987857b319362043e95f5353c0535c1f58eec5336fdfcf626430af7def58"
[[package]]
name = "rowan"
-version = "0.15.15"
+version = "0.15.17"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "32a58fa8a7ccff2aec4f39cc45bf5f985cec7125ab271cf681c279fd00192b49"
+checksum = "d4f1e4a001f863f41ea8d0e6a0c34b356d5b733db50dadab3efef640bafb779b"
dependencies = [
"countme",
"hashbrown 0.14.5",
diff --git a/Cargo.toml b/Cargo.toml
index d263cb121d..6991eeec73 100644
--- a/Cargo.toml
+++ b/Cargo.toml
@@ -52,7 +52,7 @@ debug = 2
# local crates
macros = { path = "./crates/macros", version = "0.0.0" }
base-db = { path = "./crates/base-db", version = "0.0.0" }
-cfg = { path = "./crates/cfg", version = "0.0.0", features = ["tt"] }
+cfg = { path = "./crates/cfg", version = "0.0.0", features = ["tt", "syntax"] }
hir = { path = "./crates/hir", version = "0.0.0" }
hir-def = { path = "./crates/hir-def", version = "0.0.0" }
hir-expand = { path = "./crates/hir-expand", version = "0.0.0" }
@@ -132,7 +132,7 @@ process-wrap = { version = "8.2.1", features = ["std"] }
pulldown-cmark-to-cmark = "10.0.4"
pulldown-cmark = { version = "0.9.6", default-features = false }
rayon = "1.10.0"
-rowan = "=0.15.15"
+rowan = "=0.15.17"
# Ideally we'd not enable the macros feature but unfortunately the `tracked` attribute does not work
# on impls without it
salsa = { version = "0.24.0", default-features = false, features = [
@@ -170,6 +170,7 @@ tracing-subscriber = { version = "0.3.20", default-features = false, features =
triomphe = { version = "0.1.14", default-features = false, features = ["std"] }
url = "2.5.4"
xshell = "0.2.7"
+thin-vec = "0.2.14"
petgraph = { version = "0.8.2", default-features = false }
# We need to freeze the version of the crate, as the raw-api feature is considered unstable
diff --git a/crates/base-db/src/editioned_file_id.rs b/crates/base-db/src/editioned_file_id.rs
new file mode 100644
index 0000000000..2f8969c0ea
--- /dev/null
+++ b/crates/base-db/src/editioned_file_id.rs
@@ -0,0 +1,291 @@
+//! Defines [`EditionedFileId`], an interned wrapper around [`span::EditionedFileId`] that
+//! is interned (so queries can take it) and remembers its crate.
+
+use core::fmt;
+use std::hash::{Hash, Hasher};
+
+use span::Edition;
+use vfs::FileId;
+
+use crate::{Crate, RootQueryDb};
+
+#[derive(Copy, Clone, PartialEq, Eq, Hash, PartialOrd, Ord)]
+pub struct EditionedFileId(
+ salsa::Id,
+ std::marker::PhantomData<&'static salsa::plumbing::interned::Value<EditionedFileId>>,
+);
+
+const _: () = {
+ use salsa::plumbing as zalsa_;
+ use zalsa_::interned as zalsa_struct_;
+ type Configuration_ = EditionedFileId;
+
+ #[derive(Debug, Clone, PartialEq, Eq)]
+ pub struct EditionedFileIdData {
+ editioned_file_id: span::EditionedFileId,
+ krate: Crate,
+ }
+
+ /// We like to include the origin crate in an `EditionedFileId` (for use in the item tree),
+ /// but this poses us a problem.
+ ///
+ /// Spans contain `EditionedFileId`s, and we don't want to make them store the crate too
+ /// because that will increase their size, which will increase memory usage significantly.
+ /// Furthermore, things using spans do not generally need the crate: they are using the
+ /// file id for queries like `ast_id_map` or `parse`, which do not care about the crate.
+ ///
+ /// To solve this, we hash **only the `span::EditionedFileId`**, but on still compare
+ /// the crate in equality check. This preserves the invariant of `Hash` and `Eq` -
+ /// although same hashes can be used for different items, same file ids used for multiple
+ /// crates is a rare thing, and different items always have different hashes. Then,
+ /// when we only have a `span::EditionedFileId`, we use the `intern()` method to
+ /// reuse existing file ids, and create new one only if needed. See [`from_span_guess_origin`].
+ ///
+ /// See this for more info: https://rust-lang.zulipchat.com/#narrow/channel/185405-t-compiler.2Frust-analyzer/topic/Letting.20EditionedFileId.20know.20its.20crate/near/530189401
+ ///
+ /// [`from_span_guess_origin`]: EditionedFileId::from_span_guess_origin
+ #[derive(Hash, PartialEq, Eq)]
+ struct WithoutCrate {
+ editioned_file_id: span::EditionedFileId,
+ }
+
+ impl Hash for EditionedFileIdData {
+ #[inline]
+ fn hash<H: Hasher>(&self, state: &mut H) {
+ let EditionedFileIdData { editioned_file_id, krate: _ } = *self;
+ editioned_file_id.hash(state);
+ }
+ }
+
+ impl zalsa_struct_::HashEqLike<WithoutCrate> for EditionedFileIdData {
+ #[inline]
+ fn hash<H: Hasher>(&self, state: &mut H) {
+ Hash::hash(self, state);
+ }
+
+ #[inline]
+ fn eq(&self, data: &WithoutCrate) -> bool {
+ let EditionedFileIdData { editioned_file_id, krate: _ } = *self;
+ editioned_file_id == data.editioned_file_id
+ }
+ }
+
+ impl zalsa_::HasJar for EditionedFileId {
+ type Jar = zalsa_struct_::JarImpl<EditionedFileId>;
+ const KIND: zalsa_::JarKind = zalsa_::JarKind::Struct;
+ }
+
+ zalsa_::register_jar! {
+ zalsa_::ErasedJar::erase::<EditionedFileId>()
+ }
+
+ impl zalsa_struct_::Configuration for EditionedFileId {
+ const LOCATION: salsa::plumbing::Location =
+ salsa::plumbing::Location { file: file!(), line: line!() };
+ const DEBUG_NAME: &'static str = "EditionedFileId";
+ const REVISIONS: std::num::NonZeroUsize = std::num::NonZeroUsize::MAX;
+ const PERSIST: bool = false;
+
+ type Fields<'a> = EditionedFileIdData;
+ type Struct<'db> = EditionedFileId;
+
+ fn serialize<S>(_: &Self::Fields<'_>, _: S) -> Result<S::Ok, S::Error>
+ where
+ S: zalsa_::serde::Serializer,
+ {
+ unimplemented!("attempted to serialize value that set `PERSIST` to false")
+ }
+
+ fn deserialize<'de, D>(_: D) -> Result<Self::Fields<'static>, D::Error>
+ where
+ D: zalsa_::serde::Deserializer<'de>,
+ {
+ unimplemented!("attempted to deserialize value that cannot set `PERSIST` to false");
+ }
+ }
+
+ impl Configuration_ {
+ pub fn ingredient(zalsa: &zalsa_::Zalsa) -> &zalsa_struct_::IngredientImpl<Self> {
+ static CACHE: zalsa_::IngredientCache<zalsa_struct_::IngredientImpl<EditionedFileId>> =
+ zalsa_::IngredientCache::new();
+
+ // SAFETY: `lookup_jar_by_type` returns a valid ingredient index, and the only
+ // ingredient created by our jar is the struct ingredient.
+ unsafe {
+ CACHE.get_or_create(zalsa, || {
+ zalsa.lookup_jar_by_type::<zalsa_struct_::JarImpl<EditionedFileId>>()
+ })
+ }
+ }
+ }
+
+ impl zalsa_::AsId for EditionedFileId {
+ fn as_id(&self) -> salsa::Id {
+ self.0.as_id()
+ }
+ }
+ impl zalsa_::FromId for EditionedFileId {
+ fn from_id(id: salsa::Id) -> Self {
+ Self(<salsa::Id>::from_id(id), std::marker::PhantomData)
+ }
+ }
+
+ unsafe impl Send for EditionedFileId {}
+ unsafe impl Sync for EditionedFileId {}
+
+ impl std::fmt::Debug for EditionedFileId {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ Self::default_debug_fmt(*self, f)
+ }
+ }
+
+ impl zalsa_::SalsaStructInDb for EditionedFileId {
+ type MemoIngredientMap = salsa::plumbing::MemoIngredientSingletonIndex;
+
+ fn lookup_ingredient_index(aux: &zalsa_::Zalsa) -> salsa::plumbing::IngredientIndices {
+ aux.lookup_jar_by_type::<zalsa_struct_::JarImpl<EditionedFileId>>().into()
+ }
+
+ fn entries(zalsa: &zalsa_::Zalsa) -> impl Iterator<Item = zalsa_::DatabaseKeyIndex> + '_ {
+ let _ingredient_index =
+ zalsa.lookup_jar_by_type::<zalsa_struct_::JarImpl<EditionedFileId>>();
+ <EditionedFileId>::ingredient(zalsa).entries(zalsa).map(|entry| entry.key())
+ }
+
+ #[inline]
+ fn cast(id: salsa::Id, type_id: std::any::TypeId) -> Option<Self> {
+ if type_id == std::any::TypeId::of::<EditionedFileId>() {
+ Some(<Self as salsa::plumbing::FromId>::from_id(id))
+ } else {
+ None
+ }
+ }
+
+ #[inline]
+ unsafe fn memo_table(
+ zalsa: &zalsa_::Zalsa,
+ id: zalsa_::Id,
+ current_revision: zalsa_::Revision,
+ ) -> zalsa_::MemoTableWithTypes<'_> {
+ // SAFETY: Guaranteed by caller.
+ unsafe {
+ zalsa.table().memos::<zalsa_struct_::Value<EditionedFileId>>(id, current_revision)
+ }
+ }
+ }
+
+ unsafe impl zalsa_::Update for EditionedFileId {
+ unsafe fn maybe_update(old_pointer: *mut Self, new_value: Self) -> bool {
+ if unsafe { *old_pointer } != new_value {
+ unsafe { *old_pointer = new_value };
+ true
+ } else {
+ false
+ }
+ }
+ }
+
+ impl EditionedFileId {
+ pub fn from_span(
+ db: &(impl salsa::Database + ?Sized),
+ editioned_file_id: span::EditionedFileId,
+ krate: Crate,
+ ) -> Self {
+ let (zalsa, zalsa_local) = db.zalsas();
+ Configuration_::ingredient(zalsa).intern(
+ zalsa,
+ zalsa_local,
+ EditionedFileIdData { editioned_file_id, krate },
+ |_, data| data,
+ )
+ }
+
+ /// Guesses the crate for the file.
+ ///
+ /// Only use this if you cannot precisely determine the origin. This can happen in one of two cases:
+ ///
+ /// 1. The file is not in the module tree.
+ /// 2. You are latency sensitive and cannot afford calling the def map to precisely compute the origin
+ /// (e.g. on enter feature, folding, etc.).
+ pub fn from_span_guess_origin(
+ db: &dyn RootQueryDb,
+ editioned_file_id: span::EditionedFileId,
+ ) -> Self {
+ let (zalsa, zalsa_local) = db.zalsas();
+ Configuration_::ingredient(zalsa).intern(
+ zalsa,
+ zalsa_local,
+ WithoutCrate { editioned_file_id },
+ |_, _| {
+ // FileId not in the database.
+ let krate = db
+ .relevant_crates(editioned_file_id.file_id())
+ .first()
+ .copied()
+ .unwrap_or_else(|| db.all_crates()[0]);
+ EditionedFileIdData { editioned_file_id, krate }
+ },
+ )
+ }
+
+ pub fn editioned_file_id(self, db: &dyn salsa::Database) -> span::EditionedFileId {
+ let zalsa = db.zalsa();
+ let fields = Configuration_::ingredient(zalsa).fields(zalsa, self);
+ fields.editioned_file_id
+ }
+
+ pub fn krate(self, db: &dyn salsa::Database) -> Crate {
+ let zalsa = db.zalsa();
+ let fields = Configuration_::ingredient(zalsa).fields(zalsa, self);
+ fields.krate
+ }
+
+ /// Default debug formatting for this struct (may be useful if you define your own `Debug` impl)
+ pub fn default_debug_fmt(this: Self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ zalsa_::with_attached_database(|db| {
+ let zalsa = db.zalsa();
+ let fields = Configuration_::ingredient(zalsa).fields(zalsa, this);
+ fmt::Debug::fmt(fields, f)
+ })
+ .unwrap_or_else(|| {
+ f.debug_tuple("EditionedFileId").field(&zalsa_::AsId::as_id(&this)).finish()
+ })
+ }
+ }
+};
+
+impl EditionedFileId {
+ #[inline]
+ pub fn new(db: &dyn salsa::Database, file_id: FileId, edition: Edition, krate: Crate) -> Self {
+ EditionedFileId::from_span(db, span::EditionedFileId::new(file_id, edition), krate)
+ }
+
+ /// Attaches the current edition and guesses the crate for the file.
+ ///
+ /// Only use this if you cannot precisely determine the origin. This can happen in one of two cases:
+ ///
+ /// 1. The file is not in the module tree.
+ /// 2. You are latency sensitive and cannot afford calling the def map to precisely compute the origin
+ /// (e.g. on enter feature, folding, etc.).
+ #[inline]
+ pub fn current_edition_guess_origin(db: &dyn RootQueryDb, file_id: FileId) -> Self {
+ Self::from_span_guess_origin(db, span::EditionedFileId::current_edition(file_id))
+ }
+
+ #[inline]
+ pub fn file_id(self, db: &dyn salsa::Database) -> vfs::FileId {
+ let id = self.editioned_file_id(db);
+ id.file_id()
+ }
+
+ #[inline]
+ pub fn unpack(self, db: &dyn salsa::Database) -> (vfs::FileId, span::Edition) {
+ let id = self.editioned_file_id(db);
+ (id.file_id(), id.edition())
+ }
+
+ #[inline]
+ pub fn edition(self, db: &dyn salsa::Database) -> Edition {
+ self.editioned_file_id(db).edition()
+ }
+}
diff --git a/crates/base-db/src/input.rs b/crates/base-db/src/input.rs
index 5149d2d005..1b41386adf 100644
--- a/crates/base-db/src/input.rs
+++ b/crates/base-db/src/input.rs
@@ -857,9 +857,10 @@ impl CrateGraphBuilder {
}
}
-impl BuiltCrateData {
- pub fn root_file_id(&self, db: &dyn salsa::Database) -> EditionedFileId {
- EditionedFileId::new(db, self.root_file_id, self.edition)
+impl Crate {
+ pub fn root_file_id(self, db: &dyn salsa::Database) -> EditionedFileId {
+ let data = self.data(db);
+ EditionedFileId::new(db, data.root_file_id, data.edition, self)
}
}
diff --git a/crates/base-db/src/lib.rs b/crates/base-db/src/lib.rs
index 3629a001b8..9793892410 100644
--- a/crates/base-db/src/lib.rs
+++ b/crates/base-db/src/lib.rs
@@ -5,6 +5,7 @@ pub use salsa_macros;
// FIXME: Rename this crate, base db is non descriptive
mod change;
+mod editioned_file_id;
mod input;
pub mod target;
@@ -17,6 +18,7 @@ use std::{
pub use crate::{
change::FileChange,
+ editioned_file_id::EditionedFileId,
input::{
BuiltCrateData, BuiltDependency, Crate, CrateBuilder, CrateBuilderId, CrateDataBuilder,
CrateDisplayName, CrateGraphBuilder, CrateName, CrateOrigin, CratesIdMap, CratesMap,
@@ -29,7 +31,6 @@ pub use query_group::{self};
use rustc_hash::FxHasher;
use salsa::{Durability, Setter};
pub use semver::{BuildMetadata, Prerelease, Version, VersionReq};
-use span::Edition;
use syntax::{Parse, SyntaxError, ast};
use triomphe::Arc;
pub use vfs::{AnchoredPath, AnchoredPathBuf, FileId, VfsPath, file_set::FileSet};
@@ -175,42 +176,6 @@ impl Files {
}
}
-#[salsa_macros::interned(no_lifetime, debug, constructor=from_span, revisions = usize::MAX)]
-#[derive(PartialOrd, Ord)]
-pub struct EditionedFileId {
- pub editioned_file_id: span::EditionedFileId,
-}
-
-impl EditionedFileId {
- // Salsa already uses the name `new`...
- #[inline]
- pub fn new(db: &dyn salsa::Database, file_id: FileId, edition: Edition) -> Self {
- EditionedFileId::from_span(db, span::EditionedFileId::new(file_id, edition))
- }
-
- #[inline]
- pub fn current_edition(db: &dyn salsa::Database, file_id: FileId) -> Self {
- EditionedFileId::new(db, file_id, Edition::CURRENT)
- }
-
- #[inline]
- pub fn file_id(self, db: &dyn salsa::Database) -> vfs::FileId {
- let id = self.editioned_file_id(db);
- id.file_id()
- }
-
- #[inline]
- pub fn unpack(self, db: &dyn salsa::Database) -> (vfs::FileId, span::Edition) {
- let id = self.editioned_file_id(db);
- (id.file_id(), id.edition())
- }
-
- #[inline]
- pub fn edition(self, db: &dyn SourceDatabase) -> Edition {
- self.editioned_file_id(db).edition()
- }
-}
-
#[salsa_macros::input(debug)]
pub struct FileText {
#[returns(ref)]
diff --git a/crates/cfg/Cargo.toml b/crates/cfg/Cargo.toml
index e17969bd82..9e2a95dbf3 100644
--- a/crates/cfg/Cargo.toml
+++ b/crates/cfg/Cargo.toml
@@ -18,6 +18,7 @@ tracing.workspace = true
# locals deps
tt = { workspace = true, optional = true }
+syntax = { workspace = true, optional = true }
intern.workspace = true
[dev-dependencies]
diff --git a/crates/cfg/src/cfg_expr.rs b/crates/cfg/src/cfg_expr.rs
index 7a21015e14..76e0aba859 100644
--- a/crates/cfg/src/cfg_expr.rs
+++ b/crates/cfg/src/cfg_expr.rs
@@ -63,6 +63,8 @@ impl From<CfgAtom> for CfgExpr {
}
impl CfgExpr {
+ // FIXME: Parsing from `tt` is only used in a handful of places, reconsider
+ // if we should switch them to AST.
#[cfg(feature = "tt")]
pub fn parse<S: Copy>(tt: &tt::TopSubtree<S>) -> CfgExpr {
next_cfg_expr(&mut tt.iter()).unwrap_or(CfgExpr::Invalid)
@@ -73,6 +75,13 @@ impl CfgExpr {
next_cfg_expr(tt).unwrap_or(CfgExpr::Invalid)
}
+ #[cfg(feature = "syntax")]
+ pub fn parse_from_ast(
+ ast: &mut std::iter::Peekable<syntax::ast::TokenTreeChildren>,
+ ) -> CfgExpr {
+ next_cfg_expr_from_ast(ast).unwrap_or(CfgExpr::Invalid)
+ }
+
/// Fold the cfg by querying all basic `Atom` and `KeyValue` predicates.
pub fn fold(&self, query: &dyn Fn(&CfgAtom) -> bool) -> Option<bool> {
match self {
@@ -89,6 +98,56 @@ impl CfgExpr {
}
}
+#[cfg(feature = "syntax")]
+fn next_cfg_expr_from_ast(
+ it: &mut std::iter::Peekable<syntax::ast::TokenTreeChildren>,
+) -> Option<CfgExpr> {
+ use intern::sym;
+ use syntax::{NodeOrToken, SyntaxKind, T, ast};
+
+ let name = match it.next() {
+ None => return None,
+ Some(NodeOrToken::Token(ident)) if ident.kind().is_any_identifier() => {
+ Symbol::intern(ident.text())
+ }
+ Some(_) => return Some(CfgExpr::Invalid),
+ };
+
+ let ret = match it.peek() {
+ Some(NodeOrToken::Token(eq)) if eq.kind() == T![=] => {
+ it.next();
+ if let Some(NodeOrToken::Token(literal)) = it.peek()
+ && matches!(literal.kind(), SyntaxKind::STRING)
+ {
+ let literal = tt::token_to_literal(literal.text(), ()).symbol;
+ it.next();
+ CfgAtom::KeyValue { key: name, value: literal.clone() }.into()
+ } else {
+ return Some(CfgExpr::Invalid);
+ }
+ }
+ Some(NodeOrToken::Node(subtree)) => {
+ let mut subtree_iter = ast::TokenTreeChildren::new(subtree).peekable();
+ it.next();
+ let mut subs = std::iter::from_fn(|| next_cfg_expr_from_ast(&mut subtree_iter));
+ match name {
+ s if s == sym::all => CfgExpr::All(subs.collect()),
+ s if s == sym::any => CfgExpr::Any(subs.collect()),
+ s if s == sym::not => {
+ CfgExpr::Not(Box::new(subs.next().unwrap_or(CfgExpr::Invalid)))
+ }
+ _ => CfgExpr::Invalid,
+ }
+ }
+ _ => CfgAtom::Flag(name).into(),
+ };
+
+ // Eat comma separator
+ while it.next().is_some_and(|it| it.as_token().is_none_or(|it| it.kind() != T![,])) {}
+
+ Some(ret)
+}
+
#[cfg(feature = "tt")]
fn next_cfg_expr<S: Copy>(it: &mut tt::iter::TtIter<'_, S>) -> Option<CfgExpr> {
use intern::sym;
diff --git a/crates/cfg/src/tests.rs b/crates/cfg/src/tests.rs
index 6766748097..52c581dbbd 100644
--- a/crates/cfg/src/tests.rs
+++ b/crates/cfg/src/tests.rs
@@ -1,7 +1,10 @@
use arbitrary::{Arbitrary, Unstructured};
use expect_test::{Expect, expect};
use intern::Symbol;
-use syntax::{AstNode, Edition, ast};
+use syntax::{
+ AstNode, Edition,
+ ast::{self, TokenTreeChildren},
+};
use syntax_bridge::{
DocCommentDesugarMode,
dummy_test_span_utils::{DUMMY, DummyTestSpanMap},
@@ -10,24 +13,33 @@ use syntax_bridge::{
use crate::{CfgAtom, CfgExpr, CfgOptions, DnfExpr};
+#[track_caller]
+fn parse_ast_cfg(tt: &ast::TokenTree) -> CfgExpr {
+ CfgExpr::parse_from_ast(&mut TokenTreeChildren::new(tt).peekable())
+}
+
+#[track_caller]
fn assert_parse_result(input: &str, expected: CfgExpr) {
let source_file = ast::SourceFile::parse(input, Edition::CURRENT).ok().unwrap();
- let tt = source_file.syntax().descendants().find_map(ast::TokenTree::cast).unwrap();
+ let tt_ast = source_file.syntax().descendants().find_map(ast::TokenTree::cast).unwrap();
let tt = syntax_node_to_token_tree(
- tt.syntax(),
+ tt_ast.syntax(),
DummyTestSpanMap,
DUMMY,
DocCommentDesugarMode::ProcMacro,
);
let cfg = CfgExpr::parse(&tt);
assert_eq!(cfg, expected);
+ let cfg = parse_ast_cfg(&tt_ast);
+ assert_eq!(cfg, expected);
}
+#[track_caller]
fn check_dnf(input: &str, expect: Expect) {
let source_file = ast::SourceFile::parse(input, Edition::CURRENT).ok().unwrap();
- let tt = source_file.syntax().descendants().find_map(ast::TokenTree::cast).unwrap();
+ let tt_ast = source_file.syntax().descendants().find_map(ast::TokenTree::cast).unwrap();
let tt = syntax_node_to_token_tree(
- tt.syntax(),
+ tt_ast.syntax(),
DummyTestSpanMap,
DUMMY,
DocCommentDesugarMode::ProcMacro,
@@ -35,13 +47,17 @@ fn check_dnf(input: &str, expect: Expect) {
let cfg = CfgExpr::parse(&tt);
let actual = format!("#![cfg({})]", DnfExpr::new(&cfg));
expect.assert_eq(&actual);
+ let cfg = parse_ast_cfg(&tt_ast);
+ let actual = format!("#![cfg({})]", DnfExpr::new(&cfg));
+ expect.assert_eq(&actual);
}
+#[track_caller]
fn check_why_inactive(input: &str, opts: &CfgOptions, expect: Expect) {
let source_file = ast::SourceFile::parse(input, Edition::CURRENT).ok().unwrap();
- let tt = source_file.syntax().descendants().find_map(ast::TokenTree::cast).unwrap();
+ let tt_ast = source_file.syntax().descendants().find_map(ast::TokenTree::cast).unwrap();
let tt = syntax_node_to_token_tree(
- tt.syntax(),
+ tt_ast.syntax(),
DummyTestSpanMap,
DUMMY,
DocCommentDesugarMode::ProcMacro,
@@ -50,14 +66,18 @@ fn check_why_inactive(input: &str, opts: &CfgOptions, expect: Expect) {
let dnf = DnfExpr::new(&cfg);
let why_inactive = dnf.why_inactive(opts).unwrap().to_string();
expect.assert_eq(&why_inactive);
+ let cfg = parse_ast_cfg(&tt_ast);
+ let dnf = DnfExpr::new(&cfg);
+ let why_inactive = dnf.why_inactive(opts).unwrap().to_string();
+ expect.assert_eq(&why_inactive);
}
#[track_caller]
fn check_enable_hints(input: &str, opts: &CfgOptions, expected_hints: &[&str]) {
let source_file = ast::SourceFile::parse(input, Edition::CURRENT).ok().unwrap();
- let tt = source_file.syntax().descendants().find_map(ast::TokenTree::cast).unwrap();
+ let tt_ast = source_file.syntax().descendants().find_map(ast::TokenTree::cast).unwrap();
let tt = syntax_node_to_token_tree(
- tt.syntax(),
+ tt_ast.syntax(),
DummyTestSpanMap,
DUMMY,
DocCommentDesugarMode::ProcMacro,
@@ -66,6 +86,10 @@ fn check_enable_hints(input: &str, opts: &CfgOptions, expected_hints: &[&str]) {
let dnf = DnfExpr::new(&cfg);
let hints = dnf.compute_enable_hints(opts).map(|diff| diff.to_string()).collect::<Vec<_>>();
assert_eq!(hints, expected_hints);
+ let cfg = parse_ast_cfg(&tt_ast);
+ let dnf = DnfExpr::new(&cfg);
+ let hints = dnf.compute_enable_hints(opts).map(|diff| diff.to_string()).collect::<Vec<_>>();
+ assert_eq!(hints, expected_hints);
}
#[test]
diff --git a/crates/hir-def/Cargo.toml b/crates/hir-def/Cargo.toml
index e174ca5a3b..a9b51e347d 100644
--- a/crates/hir-def/Cargo.toml
+++ b/crates/hir-def/Cargo.toml
@@ -44,7 +44,8 @@ mbe.workspace = true
cfg.workspace = true
tt.workspace = true
span.workspace = true
-thin-vec = "0.2.14"
+thin-vec.workspace = true
+syntax-bridge.workspace = true
[dev-dependencies]
expect-test.workspace = true
@@ -52,7 +53,6 @@ expect-test.workspace = true
# local deps
test-utils.workspace = true
test-fixture.workspace = true
-syntax-bridge.workspace = true
[features]
in-rust-tree = ["hir-expand/in-rust-tree"]
diff --git a/crates/hir-def/src/attr.rs b/crates/hir-def/src/attr.rs
deleted file mode 100644
index 6d3005e01c..0000000000
--- a/crates/hir-def/src/attr.rs
+++ /dev/null
@@ -1,900 +0,0 @@
-//! A higher level attributes based on TokenTree, with also some shortcuts.
-
-use std::{borrow::Cow, convert::identity, hash::Hash, ops};
-
-use base_db::Crate;
-use cfg::{CfgExpr, CfgOptions};
-use either::Either;
-use hir_expand::{
- HirFileId, InFile,
- attrs::{Attr, AttrId, RawAttrs, collect_attrs},
- span_map::SpanMapRef,
-};
-use intern::{Symbol, sym};
-use la_arena::{ArenaMap, Idx, RawIdx};
-use mbe::DelimiterKind;
-use rustc_abi::ReprOptions;
-use span::AstIdNode;
-use syntax::{
- AstPtr,
- ast::{self, HasAttrs},
-};
-use triomphe::Arc;
-use tt::iter::{TtElement, TtIter};
-
-use crate::{
- AdtId, AstIdLoc, AttrDefId, GenericParamId, HasModule, LocalFieldId, Lookup, MacroId,
- VariantId,
- db::DefDatabase,
- item_tree::block_item_tree_query,
- nameres::{ModuleOrigin, ModuleSource},
- src::{HasChildSource, HasSource},
-};
-
-/// Desugared attributes of an item post `cfg_attr` expansion.
-#[derive(Default, Debug, Clone, PartialEq, Eq)]
-pub struct Attrs(RawAttrs);
-
-#[derive(Debug, Clone, PartialEq, Eq)]
-pub struct AttrsWithOwner {
- attrs: Attrs,
- owner: AttrDefId,
-}
-
-impl Attrs {
- pub fn new(
- db: &dyn DefDatabase,
- owner: &dyn ast::HasAttrs,
- span_map: SpanMapRef<'_>,
- cfg_options: &CfgOptions,
- ) -> Self {
- Attrs(RawAttrs::new_expanded(db, owner, span_map, cfg_options))
- }
-
- pub fn get(&self, id: AttrId) -> Option<&Attr> {
- (**self).iter().find(|attr| attr.id == id)
- }
-
- pub(crate) fn expand_cfg_attr(
- db: &dyn DefDatabase,
- krate: Crate,
- raw_attrs: RawAttrs,
- ) -> Attrs {
- Attrs(raw_attrs.expand_cfg_attr(db, krate))
- }
-
- pub(crate) fn is_cfg_enabled_for(
- db: &dyn DefDatabase,
- owner: &dyn ast::HasAttrs,
- span_map: SpanMapRef<'_>,
- cfg_options: &CfgOptions,
- ) -> Result<(), CfgExpr> {
- RawAttrs::attrs_iter_expanded::<false>(db, owner, span_map, cfg_options)
- .filter_map(|attr| attr.cfg())
- .find_map(|cfg| match cfg_options.check(&cfg).is_none_or(identity) {
- true => None,
- false => Some(cfg),
- })
- .map_or(Ok(()), Err)
- }
-}
-
-impl ops::Deref for Attrs {
- type Target = [Attr];
-
- fn deref(&self) -> &[Attr] {
- &self.0
- }
-}
-
-impl ops::Deref for AttrsWithOwner {
- type Target = Attrs;
-
- fn deref(&self) -> &Attrs {
- &self.attrs
- }
-}
-
-impl Attrs {
- pub const EMPTY: Self = Self(RawAttrs::EMPTY);
-
- pub(crate) fn fields_attrs_query(
- db: &dyn DefDatabase,
- v: VariantId,
- ) -> Arc<ArenaMap<LocalFieldId, Attrs>> {
- let _p = tracing::info_span!("fields_attrs_query").entered();
- let mut res = ArenaMap::default();
- let (fields, file_id, krate) = match v {
- VariantId::EnumVariantId(it) => {
- let loc = it.lookup(db);
- let krate = loc.parent.lookup(db).container.krate;
- let source = loc.source(db);
- (source.value.field_list(), source.file_id, krate)
- }
- VariantId::StructId(it) => {
- let loc = it.lookup(db);
- let krate = loc.container.krate;
- let source = loc.source(db);
- (source.value.field_list(), source.file_id, krate)
- }
- VariantId::UnionId(it) => {
- let loc = it.lookup(db);
- let krate = loc.container.krate;
- let source = loc.source(db);
- (
- source.value.record_field_list().map(ast::FieldList::RecordFieldList),
- source.file_id,
- krate,
- )
- }
- };
- let Some(fields) = fields else {
- return Arc::new(res);
- };
-
- let cfg_options = krate.cfg_options(db);
- let span_map = db.span_map(file_id);
-
- match fields {
- ast::FieldList::RecordFieldList(fields) => {
- let mut idx = 0;
- for field in fields.fields() {
- let attrs =
- Attrs(RawAttrs::new_expanded(db, &field, span_map.as_ref(), cfg_options));
- if attrs.is_cfg_enabled(cfg_options).is_ok() {
- res.insert(Idx::from_raw(RawIdx::from(idx)), attrs);
- idx += 1;
- }
- }
- }
- ast::FieldList::TupleFieldList(fields) => {
- let mut idx = 0;
- for field in fields.fields() {
- let attrs =
- Attrs(RawAttrs::new_expanded(db, &field, span_map.as_ref(), cfg_options));
- if attrs.is_cfg_enabled(cfg_options).is_ok() {
- res.insert(Idx::from_raw(RawIdx::from(idx)), attrs);
- idx += 1;
- }
- }
- }
- }
-
- res.shrink_to_fit();
- Arc::new(res)
- }
-}
-
-impl Attrs {
- #[inline]
- pub fn by_key(&self, key: Symbol) -> AttrQuery<'_> {
- AttrQuery { attrs: self, key }
- }
-
- #[inline]
- pub fn rust_analyzer_tool(&self) -> impl Iterator<Item = &Attr> {
- self.iter()
- .filter(|&attr| attr.path.segments().first().is_some_and(|s| *s == sym::rust_analyzer))
- }
-
- #[inline]
- pub fn cfg(&self) -> Option<CfgExpr> {
- let mut cfgs = self.by_key(sym::cfg).tt_values().map(CfgExpr::parse);
- let first = cfgs.next()?;
- match cfgs.next() {
- Some(second) => {
- let cfgs = [first, second].into_iter().chain(cfgs);
- Some(CfgExpr::All(cfgs.collect()))
- }
- None => Some(first),
- }
- }
-
- #[inline]
- pub fn cfgs(&self) -> impl Iterator<Item = CfgExpr> + '_ {
- self.by_key(sym::cfg).tt_values().map(CfgExpr::parse)
- }
-
- #[inline]
- pub(crate) fn is_cfg_enabled(&self, cfg_options: &CfgOptions) -> Result<(), CfgExpr> {
- self.cfgs().try_for_each(|cfg| {
- if cfg_options.check(&cfg) != Some(false) { Ok(()) } else { Err(cfg) }
- })
- }
-
- #[inline]
- pub fn lang(&self) -> Option<&Symbol> {
- self.by_key(sym::lang).string_value()
- }
-
- #[inline]
- pub fn lang_item(&self) -> Option<&Symbol> {
- self.by_key(sym::lang).string_value()
- }
-
- #[inline]
- pub fn has_doc_hidden(&self) -> bool {
- self.by_key(sym::doc).tt_values().any(|tt| {
- tt.top_subtree().delimiter.kind == DelimiterKind::Parenthesis &&
- matches!(tt.token_trees().flat_tokens(), [tt::TokenTree::Leaf(tt::Leaf::Ident(ident))] if ident.sym == sym::hidden)
- })
- }
-
- #[inline]
- pub fn has_doc_notable_trait(&self) -> bool {
- self.by_key(sym::doc).tt_values().any(|tt| {
- tt.top_subtree().delimiter.kind == DelimiterKind::Parenthesis &&
- matches!(tt.token_trees().flat_tokens(), [tt::TokenTree::Leaf(tt::Leaf::Ident(ident))] if ident.sym == sym::notable_trait)
- })
- }
-
- #[inline]
- pub fn doc_exprs(&self) -> impl Iterator<Item = DocExpr> + '_ {
- self.by_key(sym::doc).tt_values().map(DocExpr::parse)
- }
-
- #[inline]
- pub fn doc_aliases(&self) -> impl Iterator<Item = Symbol> + '_ {
- self.doc_exprs().flat_map(|doc_expr| doc_expr.aliases().to_vec())
- }
-
- #[inline]
- pub fn export_name(&self) -> Option<&Symbol> {
- self.by_key(sym::export_name).string_value()
- }
-
- #[inline]
- pub fn is_proc_macro(&self) -> bool {
- self.by_key(sym::proc_macro).exists()
- }
-
- #[inline]
- pub fn is_proc_macro_attribute(&self) -> bool {
- self.by_key(sym::proc_macro_attribute).exists()
- }
-
- #[inline]
- pub fn is_proc_macro_derive(&self) -> bool {
- self.by_key(sym::proc_macro_derive).exists()
- }
-
- #[inline]
- pub fn is_test(&self) -> bool {
- self.iter().any(|it| {
- it.path()
- .segments()
- .iter()
- .rev()
- .zip([sym::core, sym::prelude, sym::v1, sym::test].iter().rev())
- .all(|it| it.0 == it.1)
- })
- }
-
- #[inline]
- pub fn is_ignore(&self) -> bool {
- self.by_key(sym::ignore).exists()
- }
-
- #[inline]
- pub fn is_bench(&self) -> bool {
- self.by_key(sym::bench).exists()
- }
-
- #[inline]
- pub fn is_unstable(&self) -> bool {
- self.by_key(sym::unstable).exists()
- }
-
- #[inline]
- pub fn rustc_legacy_const_generics(&self) -> Option<Box<Box<[u32]>>> {
- self.by_key(sym::rustc_legacy_const_generics)
- .tt_values()
- .next()
- .map(parse_rustc_legacy_const_generics)
- .filter(|it| !it.is_empty())
- .map(Box::new)
- }
-
- #[inline]
- pub fn repr(&self) -> Option<ReprOptions> {
- self.by_key(sym::repr).tt_values().filter_map(parse_repr_tt).fold(None, |acc, repr| {
- acc.map_or(Some(repr), |mut acc| {
- merge_repr(&mut acc, repr);
- Some(acc)
- })
- })
- }
-}
-
-fn parse_rustc_legacy_const_generics(tt: &crate::tt::TopSubtree) -> Box<[u32]> {
- let mut indices = Vec::new();
- let mut iter = tt.iter();
- while let (Some(first), second) = (iter.next(), iter.next()) {
- match first {
- TtElement::Leaf(tt::Leaf::Literal(lit)) => match lit.symbol.as_str().parse() {
- Ok(index) => indices.push(index),
- Err(_) => break,
- },
- _ => break,
- }
-
- if let Some(comma) = second {
- match comma {
- TtElement::Leaf(tt::Leaf::Punct(punct)) if punct.char == ',' => {}
- _ => break,
- }
- }
- }
-
- indices.into_boxed_slice()
-}
-
-fn merge_repr(this: &mut ReprOptions, other: ReprOptions) {
- let ReprOptions { int, align, pack, flags, field_shuffle_seed: _ } = this;
- flags.insert(other.flags);
- *align = (*align).max(other.align);
- *pack = match (*pack, other.pack) {
- (Some(pack), None) | (None, Some(pack)) => Some(pack),
- _ => (*pack).min(other.pack),
- };
- if other.int.is_some() {
- *int = other.int;
- }
-}
-
-fn parse_repr_tt(tt: &crate::tt::TopSubtree) -> Option<ReprOptions> {
- use crate::builtin_type::{BuiltinInt, BuiltinUint};
- use rustc_abi::{Align, Integer, IntegerType, ReprFlags, ReprOptions};
-
- match tt.top_subtree().delimiter {
- tt::Delimiter { kind: DelimiterKind::Parenthesis, .. } => {}
- _ => return None,
- }
-
- let mut acc = ReprOptions::default();
- let mut tts = tt.iter();
- while let Some(tt) = tts.next() {
- let TtElement::Leaf(tt::Leaf::Ident(ident)) = tt else {
- continue;
- };
- let repr = match &ident.sym {
- s if *s == sym::packed => {
- let pack = if let Some(TtElement::Subtree(_, mut tt_iter)) = tts.peek() {
- tts.next();
- if let Some(TtElement::Leaf(tt::Leaf::Literal(lit))) = tt_iter.next() {
- lit.symbol.as_str().parse().unwrap_or_default()
- } else {
- 0
- }
- } else {
- 0
- };
- let pack = Some(Align::from_bytes(pack).unwrap_or(Align::ONE));
- ReprOptions { pack, ..Default::default() }
- }
- s if *s == sym::align => {
- let mut align = None;
- if let Some(TtElement::Subtree(_, mut tt_iter)) = tts.peek() {
- tts.next();
- if let Some(TtElement::Leaf(tt::Leaf::Literal(lit))) = tt_iter.next()
- && let Ok(a) = lit.symbol.as_str().parse()
- {
- align = Align::from_bytes(a).ok();
- }
- }
- ReprOptions { align, ..Default::default() }
- }
- s if *s == sym::C => ReprOptions { flags: ReprFlags::IS_C, ..Default::default() },
- s if *s == sym::transparent => {
- ReprOptions { flags: ReprFlags::IS_TRANSPARENT, ..Default::default() }
- }
- s if *s == sym::simd => ReprOptions { flags: ReprFlags::IS_SIMD, ..Default::default() },
- repr => {
- let mut int = None;
- if let Some(builtin) = BuiltinInt::from_suffix_sym(repr)
- .map(Either::Left)
- .or_else(|| BuiltinUint::from_suffix_sym(repr).map(Either::Right))
- {
- int = Some(match builtin {
- Either::Left(bi) => match bi {
- BuiltinInt::Isize => IntegerType::Pointer(true),
- BuiltinInt::I8 => IntegerType::Fixed(Integer::I8, true),
- BuiltinInt::I16 => IntegerType::Fixed(Integer::I16, true),
- BuiltinInt::I32 => IntegerType::Fixed(Integer::I32, true),
- BuiltinInt::I64 => IntegerType::Fixed(Integer::I64, true),
- BuiltinInt::I128 => IntegerType::Fixed(Integer::I128, true),
- },
- Either::Right(bu) => match bu {
- BuiltinUint::Usize => IntegerType::Pointer(false),
- BuiltinUint::U8 => IntegerType::Fixed(Integer::I8, false),
- BuiltinUint::U16 => IntegerType::Fixed(Integer::I16, false),
- BuiltinUint::U32 => IntegerType::Fixed(Integer::I32, false),
- BuiltinUint::U64 => IntegerType::Fixed(Integer::I64, false),
- BuiltinUint::U128 => IntegerType::Fixed(Integer::I128, false),
- },
- });
- }
- ReprOptions { int, ..Default::default() }
- }
- };
- merge_repr(&mut acc, repr);
- }
-
- Some(acc)
-}
-
-#[derive(Debug, Clone, PartialEq, Eq, Hash)]
-pub enum DocAtom {
- /// eg. `#[doc(hidden)]`
- Flag(Symbol),
- /// eg. `#[doc(alias = "it")]`
- ///
- /// Note that a key can have multiple values that are all considered "active" at the same time.
- /// For example, `#[doc(alias = "x")]` and `#[doc(alias = "y")]`.
- KeyValue { key: Symbol, value: Symbol },
-}
-
-#[derive(Debug, Clone, PartialEq, Eq, Hash)]
-pub enum DocExpr {
- Invalid,
- /// eg. `#[doc(hidden)]`, `#[doc(alias = "x")]`
- Atom(DocAtom),
- /// eg. `#[doc(alias("x", "y"))]`
- Alias(Vec<Symbol>),
-}
-
-impl From<DocAtom> for DocExpr {
- fn from(atom: DocAtom) -> Self {
- DocExpr::Atom(atom)
- }
-}
-
-impl DocExpr {
- fn parse<S: Copy>(tt: &tt::TopSubtree<S>) -> DocExpr {
- next_doc_expr(tt.iter()).unwrap_or(DocExpr::Invalid)
- }
-
- pub fn aliases(&self) -> &[Symbol] {
- match self {
- DocExpr::Atom(DocAtom::KeyValue { key, value }) if *key == sym::alias => {
- std::slice::from_ref(value)
- }
- DocExpr::Alias(aliases) => aliases,
- _ => &[],
- }
- }
-}
-
-fn next_doc_expr<S: Copy>(mut it: TtIter<'_, S>) -> Option<DocExpr> {
- let name = match it.next() {
- None => return None,
- Some(TtElement::Leaf(tt::Leaf::Ident(ident))) => ident.sym.clone(),
- Some(_) => return Some(DocExpr::Invalid),
- };
-
- // Peek
- let ret = match it.peek() {
- Some(TtElement::Leaf(tt::Leaf::Punct(punct))) if punct.char == '=' => {
- it.next();
- match it.next() {
- Some(TtElement::Leaf(tt::Leaf::Literal(tt::Literal {
- symbol: text,
- kind: tt::LitKind::Str,
- ..
- }))) => DocAtom::KeyValue { key: name, value: text.clone() }.into(),
- _ => return Some(DocExpr::Invalid),
- }
- }
- Some(TtElement::Subtree(_, subtree_iter)) => {
- it.next();
- let subs = parse_comma_sep(subtree_iter);
- match &name {
- s if *s == sym::alias => DocExpr::Alias(subs),
- _ => DocExpr::Invalid,
- }
- }
- _ => DocAtom::Flag(name).into(),
- };
- Some(ret)
-}
-
-fn parse_comma_sep<S>(iter: TtIter<'_, S>) -> Vec<Symbol> {
- iter.filter_map(|tt| match tt {
- TtElement::Leaf(tt::Leaf::Literal(tt::Literal {
- kind: tt::LitKind::Str, symbol, ..
- })) => Some(symbol.clone()),
- _ => None,
- })
- .collect()
-}
-
-impl AttrsWithOwner {
- pub fn new(db: &dyn DefDatabase, owner: AttrDefId) -> Self {
- Self { attrs: db.attrs(owner), owner }
- }
-
- pub(crate) fn attrs_query(db: &dyn DefDatabase, def: AttrDefId) -> Attrs {
- let _p = tracing::info_span!("attrs_query").entered();
- // FIXME: this should use `Trace` to avoid duplication in `source_map` below
- match def {
- AttrDefId::ModuleId(module) => {
- let def_map = module.def_map(db);
- let mod_data = &def_map[module.local_id];
-
- let raw_attrs = match mod_data.origin {
- ModuleOrigin::File { definition, declaration_tree_id, declaration, .. } => {
- let decl_attrs = declaration_tree_id
- .item_tree(db)
- .raw_attrs(declaration.upcast())
- .clone();
- let tree = db.file_item_tree(definition.into());
- let def_attrs = tree.top_level_raw_attrs().clone();
- decl_attrs.merge(def_attrs)
- }
- ModuleOrigin::CrateRoot { definition } => {
- let tree = db.file_item_tree(definition.into());
- tree.top_level_raw_attrs().clone()
- }
- ModuleOrigin::Inline { definition_tree_id, definition } => {
- definition_tree_id.item_tree(db).raw_attrs(definition.upcast()).clone()
- }
- ModuleOrigin::BlockExpr { id, .. } => {
- let tree = block_item_tree_query(db, id);
- tree.top_level_raw_attrs().clone()
- }
- };
- Attrs::expand_cfg_attr(db, module.krate, raw_attrs)
- }
- AttrDefId::FieldId(it) => db.fields_attrs(it.parent)[it.local_id].clone(),
- AttrDefId::EnumVariantId(it) => attrs_from_ast_id_loc(db, it),
- AttrDefId::AdtId(it) => match it {
- AdtId::StructId(it) => attrs_from_ast_id_loc(db, it),
- AdtId::EnumId(it) => attrs_from_ast_id_loc(db, it),
- AdtId::UnionId(it) => attrs_from_ast_id_loc(db, it),
- },
- AttrDefId::TraitId(it) => attrs_from_ast_id_loc(db, it),
- AttrDefId::MacroId(it) => match it {
- MacroId::Macro2Id(it) => attrs_from_ast_id_loc(db, it),
- MacroId::MacroRulesId(it) => attrs_from_ast_id_loc(db, it),
- MacroId::ProcMacroId(it) => attrs_from_ast_id_loc(db, it),
- },
- AttrDefId::ImplId(it) => attrs_from_ast_id_loc(db, it),
- AttrDefId::ConstId(it) => attrs_from_ast_id_loc(db, it),
- AttrDefId::StaticId(it) => attrs_from_ast_id_loc(db, it),
- AttrDefId::FunctionId(it) => attrs_from_ast_id_loc(db, it),
- AttrDefId::TypeAliasId(it) => attrs_from_ast_id_loc(db, it),
- AttrDefId::GenericParamId(it) => match it {
- GenericParamId::ConstParamId(it) => {
- let src = it.parent().child_source(db);
- // FIXME: We should be never getting `None` here.
- Attrs(match src.value.get(it.local_id()) {
- Some(val) => RawAttrs::new_expanded(
- db,
- val,
- db.span_map(src.file_id).as_ref(),
- def.krate(db).cfg_options(db),
- ),
- None => RawAttrs::EMPTY,
- })
- }
- GenericParamId::TypeParamId(it) => {
- let src = it.parent().child_source(db);
- // FIXME: We should be never getting `None` here.
- Attrs(match src.value.get(it.local_id()) {
- Some(val) => RawAttrs::new_expanded(
- db,
- val,
- db.span_map(src.file_id).as_ref(),
- def.krate(db).cfg_options(db),
- ),
- None => RawAttrs::EMPTY,
- })
- }
- GenericParamId::LifetimeParamId(it) => {
- let src = it.parent.child_source(db);
- // FIXME: We should be never getting `None` here.
- Attrs(match src.value.get(it.local_id) {
- Some(val) => RawAttrs::new_expanded(
- db,
- val,
- db.span_map(src.file_id).as_ref(),
- def.krate(db).cfg_options(db),
- ),
- None => RawAttrs::EMPTY,
- })
- }
- },
- AttrDefId::ExternBlockId(it) => attrs_from_ast_id_loc(db, it),
- AttrDefId::ExternCrateId(it) => attrs_from_ast_id_loc(db, it),
- AttrDefId::UseId(it) => attrs_from_ast_id_loc(db, it),
- }
- }
-
- pub fn source_map(&self, db: &dyn DefDatabase) -> AttrSourceMap {
- let owner = match self.owner {
- AttrDefId::ModuleId(module) => {
- // Modules can have 2 attribute owners (the `mod x;` item, and the module file itself).
-
- let def_map = module.def_map(db);
- let mod_data = &def_map[module.local_id];
- match mod_data.declaration_source(db) {
- Some(it) => {
- let mut map = AttrSourceMap::new(InFile::new(it.file_id, &it.value));
- if let InFile { file_id, value: ModuleSource::SourceFile(file) } =
- mod_data.definition_source(db)
- {
- map.append_module_inline_attrs(AttrSourceMap::new(InFile::new(
- file_id, &file,
- )));
- }
- return map;
- }
- None => {
- let InFile { file_id, value } = mod_data.definition_source(db);
- let attrs_owner = match &value {
- ModuleSource::SourceFile(file) => file as &dyn ast::HasAttrs,
- ModuleSource::Module(module) => module as &dyn ast::HasAttrs,
- ModuleSource::BlockExpr(block) => block as &dyn ast::HasAttrs,
- };
- return AttrSourceMap::new(InFile::new(file_id, attrs_owner));
- }
- }
- }
- AttrDefId::FieldId(id) => {
- let map = db.fields_attrs_source_map(id.parent);
- let file_id = id.parent.file_id(db);
- let root = db.parse_or_expand(file_id);
- let owner = ast::AnyHasAttrs::new(map[id.local_id].to_node(&root));
- InFile::new(file_id, owner)
- }
- AttrDefId::AdtId(adt) => match adt {
- AdtId::StructId(id) => any_has_attrs(db, id),
- AdtId::UnionId(id) => any_has_attrs(db, id),
- AdtId::EnumId(id) => any_has_attrs(db, id),
- },
- AttrDefId::FunctionId(id) => any_has_attrs(db, id),
- AttrDefId::EnumVariantId(id) => any_has_attrs(db, id),
- AttrDefId::StaticId(id) => any_has_attrs(db, id),
- AttrDefId::ConstId(id) => any_has_attrs(db, id),
- AttrDefId::TraitId(id) => any_has_attrs(db, id),
- AttrDefId::TypeAliasId(id) => any_has_attrs(db, id),
- AttrDefId::MacroId(id) => match id {
- MacroId::Macro2Id(id) => any_has_attrs(db, id),
- MacroId::MacroRulesId(id) => any_has_attrs(db, id),
- MacroId::ProcMacroId(id) => any_has_attrs(db, id),
- },
- AttrDefId::ImplId(id) => any_has_attrs(db, id),
- AttrDefId::GenericParamId(id) => match id {
- GenericParamId::ConstParamId(id) => id
- .parent()
- .child_source(db)
- .map(|source| ast::AnyHasAttrs::new(source[id.local_id()].clone())),
- GenericParamId::TypeParamId(id) => id
- .parent()
- .child_source(db)
- .map(|source| ast::AnyHasAttrs::new(source[id.local_id()].clone())),
- GenericParamId::LifetimeParamId(id) => id
- .parent
- .child_source(db)
- .map(|source| ast::AnyHasAttrs::new(source[id.local_id].clone())),
- },
- AttrDefId::ExternBlockId(id) => any_has_attrs(db, id),
- AttrDefId::ExternCrateId(id) => any_has_attrs(db, id),
- AttrDefId::UseId(id) => any_has_attrs(db, id),
- };
-
- AttrSourceMap::new(owner.as_ref().map(|node| node as &dyn HasAttrs))
- }
-}
-
-#[derive(Debug)]
-pub struct AttrSourceMap {
- source: Vec<Either<ast::Attr, ast::Comment>>,
- file_id: HirFileId,
- /// If this map is for a module, this will be the [`HirFileId`] of the module's definition site,
- /// while `file_id` will be the one of the module declaration site.
- /// The usize is the index into `source` from which point on the entries reside in the def site
- /// file.
- mod_def_site_file_id: Option<(HirFileId, usize)>,
-}
-
-impl AttrSourceMap {
- fn new(owner: InFile<&dyn ast::HasAttrs>) -> Self {
- Self {
- source: collect_attrs(owner.value).map(|(_, it)| it).collect(),
- file_id: owner.file_id,
- mod_def_site_file_id: None,
- }
- }
-
- /// Append a second source map to this one, this is required for modules, whose outline and inline
- /// attributes can reside in different files
- fn append_module_inline_attrs(&mut self, other: Self) {
- assert!(self.mod_def_site_file_id.is_none() && other.mod_def_site_file_id.is_none());
- let len = self.source.len();
- self.source.extend(other.source);
- if other.file_id != self.file_id {
- self.mod_def_site_file_id = Some((other.file_id, len));
- }
- }
-
- /// Maps the lowered `Attr` back to its original syntax node.
- ///
- /// `attr` must come from the `owner` used for AttrSourceMap
- ///
- /// Note that the returned syntax node might be a `#[cfg_attr]`, or a doc comment, instead of
- /// the attribute represented by `Attr`.
- pub fn source_of(&self, attr: &Attr) -> InFile<&Either<ast::Attr, ast::Comment>> {
- self.source_of_id(attr.id)
- }
-
- pub fn source_of_id(&self, id: AttrId) -> InFile<&Either<ast::Attr, ast::Comment>> {
- let ast_idx = id.ast_index();
- let file_id = match self.mod_def_site_file_id {
- Some((file_id, def_site_cut)) if def_site_cut <= ast_idx => file_id,
- _ => self.file_id,
- };
-
- self.source
- .get(ast_idx)
- .map(|it| InFile::new(file_id, it))
- .unwrap_or_else(|| panic!("cannot find attr at index {id:?}"))
- }
-}
-
-#[derive(Debug, Clone)]
-pub struct AttrQuery<'attr> {
- attrs: &'attr Attrs,
- key: Symbol,
-}
-
-impl<'attr> AttrQuery<'attr> {
- #[inline]
- pub fn tt_values(self) -> impl Iterator<Item = &'attr crate::tt::TopSubtree> {
- self.attrs().filter_map(|attr| attr.token_tree_value())
- }
-
- #[inline]
- pub fn string_value(self) -> Option<&'attr Symbol> {
- self.attrs().find_map(|attr| attr.string_value())
- }
-
- #[inline]
- pub fn string_value_with_span(self) -> Option<(&'attr Symbol, span::Span)> {
- self.attrs().find_map(|attr| attr.string_value_with_span())
- }
-
- #[inline]
- pub fn string_value_unescape(self) -> Option<Cow<'attr, str>> {
- self.attrs().find_map(|attr| attr.string_value_unescape())
- }
-
- #[inline]
- pub fn exists(self) -> bool {
- self.attrs().next().is_some()
- }
-
- #[inline]
- pub fn attrs(self) -> impl Iterator<Item = &'attr Attr> + Clone {
- let key = self.key;
- self.attrs.iter().filter(move |attr| attr.path.as_ident().is_some_and(|s| *s == key))
- }
-
- /// Find string value for a specific key inside token tree
- ///
- /// ```ignore
- /// #[doc(html_root_url = "url")]
- /// ^^^^^^^^^^^^^ key
- /// ```
- #[inline]
- pub fn find_string_value_in_tt(self, key: Symbol) -> Option<&'attr str> {
- self.tt_values().find_map(|tt| {
- let name = tt.iter()
- .skip_while(|tt| !matches!(tt, TtElement::Leaf(tt::Leaf::Ident(tt::Ident { sym, ..} )) if *sym == key))
- .nth(2);
-
- match name {
- Some(TtElement::Leaf(tt::Leaf::Literal(tt::Literal{ symbol: text, kind: tt::LitKind::Str | tt::LitKind::StrRaw(_) , ..}))) => Some(text.as_str()),
- _ => None
- }
- })
- }
-}
-
-fn any_has_attrs<'db>(
- db: &(dyn DefDatabase + 'db),
- id: impl Lookup<Database = dyn DefDatabase, Data = impl HasSource<Value = impl ast::HasAttrs>>,
-) -> InFile<ast::AnyHasAttrs> {
- id.lookup(db).source(db).map(ast::AnyHasAttrs::new)
-}
-
-fn attrs_from_ast_id_loc<'db, N: AstIdNode + HasAttrs>(
- db: &(dyn DefDatabase + 'db),
- lookup: impl Lookup<Database = dyn DefDatabase, Data = impl AstIdLoc<Ast = N> + HasModule>,
-) -> Attrs {
- let loc = lookup.lookup(db);
- let source = loc.source(db);
- let span_map = db.span_map(source.file_id);
- let cfg_options = loc.krate(db).cfg_options(db);
- Attrs(RawAttrs::new_expanded(db, &source.value, span_map.as_ref(), cfg_options))
-}
-
-pub(crate) fn fields_attrs_source_map(
- db: &dyn DefDatabase,
- def: VariantId,
-) -> Arc<ArenaMap<LocalFieldId, AstPtr<Either<ast::TupleField, ast::RecordField>>>> {
- let mut res = ArenaMap::default();
- let child_source = def.child_source(db);
-
- for (idx, variant) in child_source.value.iter() {
- res.insert(
- idx,
- variant
- .as_ref()
- .either(|l| AstPtr::new(l).wrap_left(), |r| AstPtr::new(r).wrap_right()),
- );
- }
-
- Arc::new(res)
-}
-
-#[cfg(test)]
-mod tests {
- //! This module contains tests for doc-expression parsing.
- //! Currently, it tests `#[doc(hidden)]` and `#[doc(alias)]`.
-
- use intern::Symbol;
- use span::EditionedFileId;
- use triomphe::Arc;
-
- use hir_expand::span_map::{RealSpanMap, SpanMap};
- use span::FileId;
- use syntax::{AstNode, TextRange, ast};
- use syntax_bridge::{DocCommentDesugarMode, syntax_node_to_token_tree};
-
- use crate::attr::{DocAtom, DocExpr};
-
- fn assert_parse_result(input: &str, expected: DocExpr) {
- let source_file = ast::SourceFile::parse(input, span::Edition::CURRENT).ok().unwrap();
- let tt = source_file.syntax().descendants().find_map(ast::TokenTree::cast).unwrap();
- let map = SpanMap::RealSpanMap(Arc::new(RealSpanMap::absolute(
- EditionedFileId::current_edition(FileId::from_raw(0)),
- )));
- let tt = syntax_node_to_token_tree(
- tt.syntax(),
- map.as_ref(),
- map.span_for_range(TextRange::empty(0.into())),
- DocCommentDesugarMode::ProcMacro,
- );
- let cfg = DocExpr::parse(&tt);
- assert_eq!(cfg, expected);
- }
-
- #[test]
- fn test_doc_expr_parser() {
- assert_parse_result("#![doc(hidden)]", DocAtom::Flag(Symbol::intern("hidden")).into());
-
- assert_parse_result(
- r#"#![doc(alias = "foo")]"#,
- DocAtom::KeyValue { key: Symbol::intern("alias"), value: Symbol::intern("foo") }.into(),
- );
-
- assert_parse_result(
- r#"#![doc(alias("foo"))]"#,
- DocExpr::Alias([Symbol::intern("foo")].into()),
- );
- assert_parse_result(
- r#"#![doc(alias("foo", "bar", "baz"))]"#,
- DocExpr::Alias(
- [Symbol::intern("foo"), Symbol::intern("bar"), Symbol::intern("baz")].into(),
- ),
- );
-
- assert_parse_result(
- r#"
- #[doc(alias("Bar", "Qux"))]
- struct Foo;"#,
- DocExpr::Alias([Symbol::intern("Bar"), Symbol::intern("Qux")].into()),
- );
- }
-}
diff --git a/crates/hir-def/src/attrs.rs b/crates/hir-def/src/attrs.rs
new file mode 100644
index 0000000000..f4a1a3130e
--- /dev/null
+++ b/crates/hir-def/src/attrs.rs
@@ -0,0 +1,1610 @@
+//! Attributes for anything that is not name resolution.
+//!
+//! The fundamental idea of this module stems from the observation that most "interesting"
+//! attributes have a more memory-compact form than storing their full syntax, and
+//! that most of the attributes are flags, and those that are not are rare. Therefore,
+//! this module defines [`AttrFlags`], which is a bitflag enum that contains only a yes/no
+//! answer to whether an attribute is present on an item. For most attributes, that's all
+//! that is interesting us; for the rest of them, we define another query that extracts
+//! their data. A key part is that every one of those queries will have a wrapper method
+//! that queries (or is given) the `AttrFlags` and checks for the presence of the attribute;
+//! if it is not present, we do not call the query, to prevent Salsa from needing to record
+//! its value. This way, queries are only called on items that have the attribute, which is
+//! usually only a few.
+//!
+//! An exception to this model that is also defined in this module is documentation (doc
+//! comments and `#[doc = "..."]` attributes). But it also has a more compact form than
+//! the attribute: a concatenated string of the full docs as well as a source map
+//! to map it back to AST (which is needed for things like resolving links in doc comments
+//! and highlight injection). The lowering and upmapping of doc comments is a bit complicated,
+//! but it is encapsulated in the [`Docs`] struct.
+
+use std::{
+ convert::Infallible,
+ iter::Peekable,
+ ops::{ControlFlow, Range},
+};
+
+use base_db::Crate;
+use cfg::{CfgExpr, CfgOptions};
+use either::Either;
+use hir_expand::{
+ HirFileId, InFile, Lookup,
+ attrs::{Meta, expand_cfg_attr, expand_cfg_attr_with_doc_comments},
+};
+use intern::Symbol;
+use itertools::Itertools;
+use la_arena::ArenaMap;
+use rustc_abi::ReprOptions;
+use rustc_hash::FxHashSet;
+use smallvec::SmallVec;
+use syntax::{
+ AstNode, AstToken, NodeOrToken, SmolStr, SyntaxNode, SyntaxToken, T,
+ ast::{self, AttrDocCommentIter, HasAttrs, IsString, TokenTreeChildren},
+};
+use tt::{TextRange, TextSize};
+
+use crate::{
+ AdtId, AstIdLoc, AttrDefId, FieldId, FunctionId, GenericDefId, HasModule, InternedModuleId,
+ LifetimeParamId, LocalFieldId, MacroId, TypeOrConstParamId, VariantId,
+ db::DefDatabase,
+ hir::generics::{GenericParams, LocalLifetimeParamId, LocalTypeOrConstParamId},
+ nameres::ModuleOrigin,
+ src::{HasChildSource, HasSource},
+};
+
+#[inline]
+fn attrs_from_ast_id_loc<N: AstNode + Into<ast::AnyHasAttrs>>(
+ db: &dyn DefDatabase,
+ lookup: impl Lookup<Database = dyn DefDatabase, Data = impl AstIdLoc<Ast = N> + HasModule>,
+) -> (InFile<ast::AnyHasAttrs>, Crate) {
+ let loc = lookup.lookup(db);
+ let source = loc.source(db);
+ let krate = loc.krate(db);
+ (source.map(|it| it.into()), krate)
+}
+
+#[inline]
+fn extract_doc_tt_attr(attr_flags: &mut AttrFlags, tt: ast::TokenTree) {
+ for atom in DocAtom::parse(tt) {
+ match atom {
+ DocAtom::Flag(flag) => match &*flag {
+ "notable_trait" => attr_flags.insert(AttrFlags::IS_DOC_NOTABLE_TRAIT),
+ "hidden" => attr_flags.insert(AttrFlags::IS_DOC_HIDDEN),
+ _ => {}
+ },
+ DocAtom::KeyValue { key, value: _ } => match &*key {
+ "alias" => attr_flags.insert(AttrFlags::HAS_DOC_ALIASES),
+ "keyword" => attr_flags.insert(AttrFlags::HAS_DOC_KEYWORD),
+ _ => {}
+ },
+ DocAtom::Alias(_) => attr_flags.insert(AttrFlags::HAS_DOC_ALIASES),
+ }
+ }
+}
+
+fn extract_ra_completions(attr_flags: &mut AttrFlags, tt: ast::TokenTree) {
+ let tt = TokenTreeChildren::new(&tt);
+ if let Ok(NodeOrToken::Token(option)) = tt.exactly_one()
+ && option.kind().is_any_identifier()
+ {
+ match option.text() {
+ "ignore_flyimport" => attr_flags.insert(AttrFlags::COMPLETE_IGNORE_FLYIMPORT),
+ "ignore_methods" => attr_flags.insert(AttrFlags::COMPLETE_IGNORE_METHODS),
+ "ignore_flyimport_methods" => {
+ attr_flags.insert(AttrFlags::COMPLETE_IGNORE_FLYIMPORT_METHODS)
+ }
+ _ => {}
+ }
+ }
+}
+
+fn extract_rustc_skip_during_method_dispatch(attr_flags: &mut AttrFlags, tt: ast::TokenTree) {
+ let iter = TokenTreeChildren::new(&tt);
+ for kind in iter {
+ if let NodeOrToken::Token(kind) = kind
+ && kind.kind().is_any_identifier()
+ {
+ match kind.text() {
+ "array" => attr_flags.insert(AttrFlags::RUSTC_SKIP_ARRAY_DURING_METHOD_DISPATCH),
+ "boxed_slice" => {
+ attr_flags.insert(AttrFlags::RUSTC_SKIP_BOXED_SLICE_DURING_METHOD_DISPATCH)
+ }
+ _ => {}
+ }
+ }
+ }
+}
+
+#[inline]
+fn match_attr_flags(attr_flags: &mut AttrFlags, attr: Meta) -> ControlFlow<Infallible> {
+ match attr {
+ Meta::NamedKeyValue { name: Some(name), value, .. } => match name.text() {
+ "deprecated" => attr_flags.insert(AttrFlags::IS_DEPRECATED),
+ "lang" => attr_flags.insert(AttrFlags::LANG_ITEM),
+ "path" => attr_flags.insert(AttrFlags::HAS_PATH),
+ "unstable" => attr_flags.insert(AttrFlags::IS_UNSTABLE),
+ "export_name" => {
+ if let Some(value) = value
+ && let Some(value) = ast::String::cast(value)
+ && let Ok(value) = value.value()
+ && *value == *"main"
+ {
+ attr_flags.insert(AttrFlags::IS_EXPORT_NAME_MAIN);
+ }
+ }
+ _ => {}
+ },
+ Meta::TokenTree { path, tt } => match path.segments.len() {
+ 1 => match path.segments[0].text() {
+ "deprecated" => attr_flags.insert(AttrFlags::IS_DEPRECATED),
+ "cfg" => attr_flags.insert(AttrFlags::HAS_CFG),
+ "doc" => extract_doc_tt_attr(attr_flags, tt),
+ "repr" => attr_flags.insert(AttrFlags::HAS_REPR),
+ "target_feature" => attr_flags.insert(AttrFlags::HAS_TARGET_FEATURE),
+ "proc_macro_derive" | "rustc_builtin_macro" => {
+ attr_flags.insert(AttrFlags::IS_DERIVE_OR_BUILTIN_MACRO)
+ }
+ "unstable" => attr_flags.insert(AttrFlags::IS_UNSTABLE),
+ "rustc_layout_scalar_valid_range_start" | "rustc_layout_scalar_valid_range_end" => {
+ attr_flags.insert(AttrFlags::RUSTC_LAYOUT_SCALAR_VALID_RANGE)
+ }
+ "rustc_legacy_const_generics" => {
+ attr_flags.insert(AttrFlags::HAS_LEGACY_CONST_GENERICS)
+ }
+ "rustc_skip_during_method_dispatch" => {
+ extract_rustc_skip_during_method_dispatch(attr_flags, tt)
+ }
+ _ => {}
+ },
+ 2 => match path.segments[0].text() {
+ "rust_analyzer" => match path.segments[1].text() {
+ "completions" => extract_ra_completions(attr_flags, tt),
+ _ => {}
+ },
+ _ => {}
+ },
+ _ => {}
+ },
+ Meta::Path { path } => {
+ match path.segments.len() {
+ 1 => match path.segments[0].text() {
+ "rustc_has_incoherent_inherent_impls" => {
+ attr_flags.insert(AttrFlags::RUSTC_HAS_INCOHERENT_INHERENT_IMPLS)
+ }
+ "rustc_allow_incoherent_impl" => {
+ attr_flags.insert(AttrFlags::RUSTC_ALLOW_INCOHERENT_IMPL)
+ }
+ "fundamental" => attr_flags.insert(AttrFlags::FUNDAMENTAL),
+ "no_std" => attr_flags.insert(AttrFlags::IS_NO_STD),
+ "may_dangle" => attr_flags.insert(AttrFlags::MAY_DANGLE),
+ "rustc_paren_sugar" => attr_flags.insert(AttrFlags::RUSTC_PAREN_SUGAR),
+ "rustc_coinductive" => attr_flags.insert(AttrFlags::RUSTC_COINDUCTIVE),
+ "rustc_force_inline" => attr_flags.insert(AttrFlags::RUSTC_FORCE_INLINE),
+ "unstable" => attr_flags.insert(AttrFlags::IS_UNSTABLE),
+ "deprecated" => attr_flags.insert(AttrFlags::IS_DEPRECATED),
+ "macro_export" => attr_flags.insert(AttrFlags::IS_MACRO_EXPORT),
+ "no_mangle" => attr_flags.insert(AttrFlags::NO_MANGLE),
+ "non_exhaustive" => attr_flags.insert(AttrFlags::NON_EXHAUSTIVE),
+ "ignore" => attr_flags.insert(AttrFlags::IS_IGNORE),
+ "bench" => attr_flags.insert(AttrFlags::IS_BENCH),
+ "rustc_const_panic_str" => attr_flags.insert(AttrFlags::RUSTC_CONST_PANIC_STR),
+ "rustc_intrinsic" => attr_flags.insert(AttrFlags::RUSTC_INTRINSIC),
+ "rustc_safe_intrinsic" => attr_flags.insert(AttrFlags::RUSTC_SAFE_INTRINSIC),
+ "rustc_intrinsic_must_be_overridden" => {
+ attr_flags.insert(AttrFlags::RUSTC_INTRINSIC_MUST_BE_OVERRIDDEN)
+ }
+ "rustc_allocator" => attr_flags.insert(AttrFlags::RUSTC_ALLOCATOR),
+ "rustc_deallocator" => attr_flags.insert(AttrFlags::RUSTC_DEALLOCATOR),
+ "rustc_reallocator" => attr_flags.insert(AttrFlags::RUSTC_REALLOCATOR),
+ "rustc_allocator_zeroed" => {
+ attr_flags.insert(AttrFlags::RUSTC_ALLOCATOR_ZEROED)
+ }
+ "rustc_reservation_impl" => {
+ attr_flags.insert(AttrFlags::RUSTC_RESERVATION_IMPL)
+ }
+ "rustc_deprecated_safe_2024" => {
+ attr_flags.insert(AttrFlags::RUSTC_DEPRECATED_SAFE_2024)
+ }
+ "rustc_skip_array_during_method_dispatch" => {
+ attr_flags.insert(AttrFlags::RUSTC_SKIP_ARRAY_DURING_METHOD_DISPATCH)
+ }
+ _ => {}
+ },
+ 2 => match path.segments[0].text() {
+ "rust_analyzer" => match path.segments[1].text() {
+ "skip" => attr_flags.insert(AttrFlags::RUST_ANALYZER_SKIP),
+ _ => {}
+ },
+ _ => {}
+ },
+ _ => {}
+ }
+
+ if path.is_test {
+ attr_flags.insert(AttrFlags::IS_TEST);
+ }
+ }
+ _ => {}
+ };
+ ControlFlow::Continue(())
+}
+
+bitflags::bitflags! {
+ #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
+ pub struct AttrFlags: u64 {
+ const RUST_ANALYZER_SKIP = 1 << 0;
+
+ const LANG_ITEM = 1 << 1;
+
+ const HAS_DOC_ALIASES = 1 << 2;
+ const HAS_DOC_KEYWORD = 1 << 3;
+ const IS_DOC_NOTABLE_TRAIT = 1 << 4;
+ const IS_DOC_HIDDEN = 1 << 5;
+
+ const RUSTC_HAS_INCOHERENT_INHERENT_IMPLS = 1 << 6;
+ const RUSTC_ALLOW_INCOHERENT_IMPL = 1 << 7;
+ const FUNDAMENTAL = 1 << 8;
+ const RUSTC_SKIP_ARRAY_DURING_METHOD_DISPATCH = 1 << 9;
+ const RUSTC_SKIP_BOXED_SLICE_DURING_METHOD_DISPATCH = 1 << 10;
+ const HAS_REPR = 1 << 11;
+ const HAS_TARGET_FEATURE = 1 << 12;
+ const RUSTC_DEPRECATED_SAFE_2024 = 1 << 13;
+ const HAS_LEGACY_CONST_GENERICS = 1 << 14;
+ const NO_MANGLE = 1 << 15;
+ const NON_EXHAUSTIVE = 1 << 16;
+ const RUSTC_RESERVATION_IMPL = 1 << 17;
+ const RUSTC_CONST_PANIC_STR = 1 << 18;
+ const MAY_DANGLE = 1 << 19;
+
+ const RUSTC_INTRINSIC = 1 << 20;
+ const RUSTC_SAFE_INTRINSIC = 1 << 21;
+ const RUSTC_INTRINSIC_MUST_BE_OVERRIDDEN = 1 << 22;
+ const RUSTC_ALLOCATOR = 1 << 23;
+ const RUSTC_DEALLOCATOR = 1 << 24;
+ const RUSTC_REALLOCATOR = 1 << 25;
+ const RUSTC_ALLOCATOR_ZEROED = 1 << 26;
+
+ const IS_UNSTABLE = 1 << 27;
+ const IS_IGNORE = 1 << 28;
+ // FIXME: `IS_TEST` and `IS_BENCH` should be based on semantic information, not textual match.
+ const IS_BENCH = 1 << 29;
+ const IS_TEST = 1 << 30;
+ const IS_EXPORT_NAME_MAIN = 1 << 31;
+ const IS_MACRO_EXPORT = 1 << 32;
+ const IS_NO_STD = 1 << 33;
+ const IS_DERIVE_OR_BUILTIN_MACRO = 1 << 34;
+ const IS_DEPRECATED = 1 << 35;
+ const HAS_PATH = 1 << 36;
+ const HAS_CFG = 1 << 37;
+
+ const COMPLETE_IGNORE_FLYIMPORT = 1 << 38;
+ const COMPLETE_IGNORE_FLYIMPORT_METHODS = 1 << 39;
+ const COMPLETE_IGNORE_METHODS = 1 << 40;
+
+ const RUSTC_LAYOUT_SCALAR_VALID_RANGE = 1 << 41;
+ const RUSTC_PAREN_SUGAR = 1 << 42;
+ const RUSTC_COINDUCTIVE = 1 << 43;
+ const RUSTC_FORCE_INLINE = 1 << 44;
+ }
+}
+
+fn attrs_source(
+ db: &dyn DefDatabase,
+ owner: AttrDefId,
+) -> (InFile<ast::AnyHasAttrs>, Option<InFile<ast::Module>>, Crate) {
+ let (owner, krate) = match owner {
+ AttrDefId::ModuleId(id) => {
+ let id = id.loc(db);
+ let def_map = id.def_map(db);
+ let (definition, declaration) = match def_map[id.local_id].origin {
+ ModuleOrigin::CrateRoot { definition } => {
+ let file = db.parse(definition).tree();
+ (InFile::new(definition.into(), ast::AnyHasAttrs::from(file)), None)
+ }
+ ModuleOrigin::File { declaration, declaration_tree_id, definition, .. } => {
+ let declaration = InFile::new(declaration_tree_id.file_id(), declaration);
+ let declaration = declaration.with_value(declaration.to_node(db));
+ let definition_source = db.parse(definition).tree();
+ (InFile::new(definition.into(), definition_source.into()), Some(declaration))
+ }
+ ModuleOrigin::Inline { definition_tree_id, definition } => {
+ let definition = InFile::new(definition_tree_id.file_id(), definition);
+ let definition = definition.with_value(definition.to_node(db).into());
+ (definition, None)
+ }
+ ModuleOrigin::BlockExpr { block, .. } => {
+ let definition = block.to_node(db);
+ (block.with_value(definition.into()), None)
+ }
+ };
+ return (definition, declaration, id.krate);
+ }
+ AttrDefId::AdtId(AdtId::StructId(it)) => attrs_from_ast_id_loc(db, it),
+ AttrDefId::AdtId(AdtId::UnionId(it)) => attrs_from_ast_id_loc(db, it),
+ AttrDefId::AdtId(AdtId::EnumId(it)) => attrs_from_ast_id_loc(db, it),
+ AttrDefId::FunctionId(it) => attrs_from_ast_id_loc(db, it),
+ AttrDefId::EnumVariantId(it) => attrs_from_ast_id_loc(db, it),
+ AttrDefId::StaticId(it) => attrs_from_ast_id_loc(db, it),
+ AttrDefId::ConstId(it) => attrs_from_ast_id_loc(db, it),
+ AttrDefId::TraitId(it) => attrs_from_ast_id_loc(db, it),
+ AttrDefId::TypeAliasId(it) => attrs_from_ast_id_loc(db, it),
+ AttrDefId::MacroId(MacroId::MacroRulesId(it)) => attrs_from_ast_id_loc(db, it),
+ AttrDefId::MacroId(MacroId::Macro2Id(it)) => attrs_from_ast_id_loc(db, it),
+ AttrDefId::MacroId(MacroId::ProcMacroId(it)) => attrs_from_ast_id_loc(db, it),
+ AttrDefId::ImplId(it) => attrs_from_ast_id_loc(db, it),
+ AttrDefId::ExternBlockId(it) => attrs_from_ast_id_loc(db, it),
+ AttrDefId::ExternCrateId(it) => attrs_from_ast_id_loc(db, it),
+ AttrDefId::UseId(it) => attrs_from_ast_id_loc(db, it),
+ };
+ (owner, None, krate)
+}
+
+fn collect_attrs<BreakValue>(
+ db: &dyn DefDatabase,
+ owner: AttrDefId,
+ mut callback: impl FnMut(Meta) -> ControlFlow<BreakValue>,
+) -> Option<BreakValue> {
+ let (source, outer_mod_decl, krate) = attrs_source(db, owner);
+
+ let mut cfg_options = None;
+ expand_cfg_attr(
+ outer_mod_decl
+ .into_iter()
+ .flat_map(|it| it.value.attrs())
+ .chain(ast::attrs_including_inner(&source.value)),
+ || cfg_options.get_or_insert_with(|| krate.cfg_options(db)),
+ move |meta, _, _, _| callback(meta),
+ )
+}
+
+fn collect_field_attrs<T>(
+ db: &dyn DefDatabase,
+ variant: VariantId,
+ mut field_attrs: impl FnMut(&CfgOptions, InFile<ast::AnyHasAttrs>) -> T,
+) -> ArenaMap<LocalFieldId, T> {
+ let (variant_syntax, krate) = match variant {
+ VariantId::EnumVariantId(it) => attrs_from_ast_id_loc(db, it),
+ VariantId::StructId(it) => attrs_from_ast_id_loc(db, it),
+ VariantId::UnionId(it) => attrs_from_ast_id_loc(db, it),
+ };
+ let cfg_options = krate.cfg_options(db);
+ let variant_syntax = variant_syntax
+ .with_value(ast::VariantDef::cast(variant_syntax.value.syntax().clone()).unwrap());
+ let fields = match &variant_syntax.value {
+ ast::VariantDef::Struct(it) => it.field_list(),
+ ast::VariantDef::Union(it) => it.record_field_list().map(ast::FieldList::RecordFieldList),
+ ast::VariantDef::Variant(it) => it.field_list(),
+ };
+ let Some(fields) = fields else {
+ return ArenaMap::new();
+ };
+
+ let mut result = ArenaMap::new();
+ let mut idx = 0;
+ match fields {
+ ast::FieldList::RecordFieldList(fields) => {
+ for field in fields.fields() {
+ if AttrFlags::is_cfg_enabled_for(&field, cfg_options).is_ok() {
+ result.insert(
+ la_arena::Idx::from_raw(la_arena::RawIdx::from_u32(idx)),
+ field_attrs(cfg_options, variant_syntax.with_value(field.into())),
+ );
+ idx += 1;
+ }
+ }
+ }
+ ast::FieldList::TupleFieldList(fields) => {
+ for field in fields.fields() {
+ if AttrFlags::is_cfg_enabled_for(&field, cfg_options).is_ok() {
+ result.insert(
+ la_arena::Idx::from_raw(la_arena::RawIdx::from_u32(idx)),
+ field_attrs(cfg_options, variant_syntax.with_value(field.into())),
+ );
+ idx += 1;
+ }
+ }
+ }
+ }
+ result.shrink_to_fit();
+ result
+}
+
+#[derive(Debug, Default, Clone, Copy, PartialEq, Eq, Hash)]
+pub struct RustcLayoutScalarValidRange {
+ pub start: Option<u128>,
+ pub end: Option<u128>,
+}
+
+#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
+struct DocsSourceMapLine {
+ /// The offset in [`Docs::docs`].
+ string_offset: TextSize,
+ /// The offset in the AST of the text.
+ ast_offset: TextSize,
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct Docs {
+ /// The concatenated string of all `#[doc = "..."]` attributes and documentation comments.
+ docs: String,
+ /// A sorted map from an offset in `docs` to an offset in the source code.
+ docs_source_map: Vec<DocsSourceMapLine>,
+ /// If the item is an outlined module (`mod foo;`), `docs_source_map` store the concatenated
+ /// list of the outline and inline docs (outline first). Then, this field contains the [`HirFileId`]
+ /// of the outline declaration, and the index in `docs` from which the inline docs
+ /// begin.
+ outline_mod: Option<(HirFileId, usize)>,
+ inline_file: HirFileId,
+ /// The size the prepended prefix, which does not map to real doc comments.
+ prefix_len: TextSize,
+ /// The offset in `docs` from which the docs are inner attributes/comments.
+ inline_inner_docs_start: Option<TextSize>,
+ /// Like `inline_inner_docs_start`, but for `outline_mod`. This can happen only when merging `Docs`
+ /// (as outline modules don't have inner attributes).
+ outline_inner_docs_start: Option<TextSize>,
+}
+
+#[derive(Debug, Clone, Copy, PartialEq, Eq)]
+pub enum IsInnerDoc {
+ No,
+ Yes,
+}
+
+impl IsInnerDoc {
+ #[inline]
+ pub fn yes(self) -> bool {
+ self == IsInnerDoc::Yes
+ }
+}
+
+impl Docs {
+ #[inline]
+ pub fn docs(&self) -> &str {
+ &self.docs
+ }
+
+ #[inline]
+ pub fn into_docs(self) -> String {
+ self.docs
+ }
+
+ pub fn find_ast_range(
+ &self,
+ mut string_range: TextRange,
+ ) -> Option<(InFile<TextRange>, IsInnerDoc)> {
+ if string_range.start() < self.prefix_len {
+ return None;
+ }
+ string_range -= self.prefix_len;
+
+ let mut file = self.inline_file;
+ let mut inner_docs_start = self.inline_inner_docs_start;
+ // Check whether the range is from the outline, the inline, or both.
+ let source_map = if let Some((outline_mod_file, outline_mod_end)) = self.outline_mod {
+ if let Some(first_inline) = self.docs_source_map.get(outline_mod_end) {
+ if string_range.end() <= first_inline.string_offset {
+ // The range is completely in the outline.
+ file = outline_mod_file;
+ inner_docs_start = self.outline_inner_docs_start;
+ &self.docs_source_map[..outline_mod_end]
+ } else if string_range.start() >= first_inline.string_offset {
+ // The range is completely in the inline.
+ &self.docs_source_map[outline_mod_end..]
+ } else {
+ // The range is combined from the outline and the inline - cannot map it back.
+ return None;
+ }
+ } else {
+ // There is no inline.
+ file = outline_mod_file;
+ inner_docs_start = self.outline_inner_docs_start;
+ &self.docs_source_map
+ }
+ } else {
+ // There is no outline.
+ &self.docs_source_map
+ };
+
+ let after_range =
+ source_map.partition_point(|line| line.string_offset <= string_range.start()) - 1;
+ let after_range = &source_map[after_range..];
+ let line = after_range.first()?;
+ if after_range.get(1).is_some_and(|next_line| next_line.string_offset < string_range.end())
+ {
+ // The range is combined from two lines - cannot map it back.
+ return None;
+ }
+ let ast_range = string_range - line.string_offset + line.ast_offset;
+ let is_inner = if inner_docs_start
+ .is_some_and(|inner_docs_start| string_range.start() >= inner_docs_start)
+ {
+ IsInnerDoc::Yes
+ } else {
+ IsInnerDoc::No
+ };
+ Some((InFile::new(file, ast_range), is_inner))
+ }
+
+ #[inline]
+ pub fn shift_by(&mut self, offset: TextSize) {
+ self.prefix_len += offset;
+ }
+
+ pub fn prepend_str(&mut self, s: &str) {
+ self.prefix_len += TextSize::of(s);
+ self.docs.insert_str(0, s);
+ }
+
+ pub fn append_str(&mut self, s: &str) {
+ self.docs.push_str(s);
+ }
+
+ pub fn append(&mut self, other: &Docs) {
+ let other_offset = TextSize::of(&self.docs);
+
+ assert!(
+ self.outline_mod.is_none() && other.outline_mod.is_none(),
+ "cannot merge `Docs` that have `outline_mod` set"
+ );
+ self.outline_mod = Some((self.inline_file, self.docs_source_map.len()));
+ self.inline_file = other.inline_file;
+ self.outline_inner_docs_start = self.inline_inner_docs_start;
+ self.inline_inner_docs_start = other.inline_inner_docs_start.map(|it| it + other_offset);
+
+ self.docs.push_str(&other.docs);
+ self.docs_source_map.extend(other.docs_source_map.iter().map(
+ |&DocsSourceMapLine { string_offset, ast_offset }| DocsSourceMapLine {
+ ast_offset,
+ string_offset: string_offset + other_offset,
+ },
+ ));
+ }
+
+ fn extend_with_doc_comment(&mut self, comment: ast::Comment, indent: &mut usize) {
+ let Some((doc, offset)) = comment.doc_comment() else { return };
+ self.extend_with_doc_str(doc, comment.syntax().text_range().start() + offset, indent);
+ }
+
+ fn extend_with_doc_attr(&mut self, value: SyntaxToken, indent: &mut usize) {
+ let Some(value) = ast::String::cast(value) else { return };
+ let Some(value_offset) = value.text_range_between_quotes() else { return };
+ let value_offset = value_offset.start();
+ let Ok(value) = value.value() else { return };
+ // FIXME: Handle source maps for escaped text.
+ self.extend_with_doc_str(&value, value_offset, indent);
+ }
+
+ fn extend_with_doc_str(&mut self, doc: &str, mut offset_in_ast: TextSize, indent: &mut usize) {
+ for line in doc.split('\n') {
+ self.docs_source_map.push(DocsSourceMapLine {
+ string_offset: TextSize::of(&self.docs),
+ ast_offset: offset_in_ast,
+ });
+ offset_in_ast += TextSize::of(line) + TextSize::of("\n");
+
+ let line = line.trim_end();
+ if let Some(line_indent) = line.chars().position(|ch| !ch.is_whitespace()) {
+ // Empty lines are handled because `position()` returns `None` for them.
+ *indent = std::cmp::min(*indent, line_indent);
+ }
+ self.docs.push_str(line);
+ self.docs.push('\n');
+ }
+ }
+
+ fn remove_indent(&mut self, indent: usize, start_source_map_index: usize) {
+ /// In case of panics, we want to avoid corrupted UTF-8 in `self.docs`, so we clear it.
+ struct Guard<'a>(&'a mut Docs);
+ impl Drop for Guard<'_> {
+ fn drop(&mut self) {
+ let Docs {
+ docs,
+ docs_source_map,
+ outline_mod,
+ inline_file: _,
+ prefix_len: _,
+ inline_inner_docs_start: _,
+ outline_inner_docs_start: _,
+ } = self.0;
+ // Don't use `String::clear()` here because it's not guaranteed to not do UTF-8-dependent things,
+ // and we may have temporarily broken the string's encoding.
+ unsafe { docs.as_mut_vec() }.clear();
+ // This is just to avoid panics down the road.
+ docs_source_map.clear();
+ *outline_mod = None;
+ }
+ }
+
+ if self.docs.is_empty() {
+ return;
+ }
+
+ let guard = Guard(self);
+ let source_map = &mut guard.0.docs_source_map[start_source_map_index..];
+ let Some(&DocsSourceMapLine { string_offset: mut copy_into, .. }) = source_map.first()
+ else {
+ return;
+ };
+ // We basically want to remove multiple ranges from a string. Doing this efficiently (without O(N^2)
+ // or allocations) requires unsafe. Basically, for each line, we copy the line minus the indent into
+ // consecutive to the previous line (which may have moved). Then at the end we truncate.
+ let mut accumulated_offset = TextSize::new(0);
+ for idx in 0..source_map.len() {
+ let string_end_offset = source_map
+ .get(idx + 1)
+ .map_or_else(|| TextSize::of(&guard.0.docs), |next_attr| next_attr.string_offset);
+ let line_source = &mut source_map[idx];
+ let line_docs =
+ &guard.0.docs[TextRange::new(line_source.string_offset, string_end_offset)];
+ let line_docs_len = TextSize::of(line_docs);
+ let indent_size = line_docs.char_indices().nth(indent).map_or_else(
+ || TextSize::of(line_docs) - TextSize::of("\n"),
+ |(offset, _)| TextSize::new(offset as u32),
+ );
+ unsafe { guard.0.docs.as_bytes_mut() }.copy_within(
+ Range::<usize>::from(TextRange::new(
+ line_source.string_offset + indent_size,
+ string_end_offset,
+ )),
+ copy_into.into(),
+ );
+ copy_into += line_docs_len - indent_size;
+
+ if let Some(inner_attrs_start) = &mut guard.0.inline_inner_docs_start
+ && *inner_attrs_start == line_source.string_offset
+ {
+ *inner_attrs_start -= accumulated_offset;
+ }
+ // The removals in the string accumulate, but in the AST not, because it already points
+ // to the beginning of each attribute.
+ // Also, we need to shift the AST offset of every line, but the string offset of the first
+ // line should not get shifted (in general, the shift for the string offset is by the
+ // number of lines until the current one, excluding the current one).
+ line_source.string_offset -= accumulated_offset;
+ line_source.ast_offset += indent_size;
+
+ accumulated_offset += indent_size;
+ }
+ // Don't use `String::truncate()` here because it's not guaranteed to not do UTF-8-dependent things,
+ // and we may have temporarily broken the string's encoding.
+ unsafe { guard.0.docs.as_mut_vec() }.truncate(copy_into.into());
+
+ std::mem::forget(guard);
+ }
+
+ fn remove_last_newline(&mut self) {
+ self.docs.truncate(self.docs.len().saturating_sub(1));
+ }
+
+ fn shrink_to_fit(&mut self) {
+ let Docs {
+ docs,
+ docs_source_map,
+ outline_mod: _,
+ inline_file: _,
+ prefix_len: _,
+ inline_inner_docs_start: _,
+ outline_inner_docs_start: _,
+ } = self;
+ docs.shrink_to_fit();
+ docs_source_map.shrink_to_fit();
+ }
+}
+
+#[derive(Debug, PartialEq, Eq, Hash)]
+pub struct DeriveInfo {
+ pub trait_name: Symbol,
+ pub helpers: Box<[Symbol]>,
+}
+
+fn extract_doc_aliases(result: &mut Vec<Symbol>, attr: Meta) -> ControlFlow<Infallible> {
+ if let Meta::TokenTree { path, tt } = attr
+ && path.is1("doc")
+ {
+ for atom in DocAtom::parse(tt) {
+ match atom {
+ DocAtom::Alias(aliases) => {
+ result.extend(aliases.into_iter().map(|alias| Symbol::intern(&alias)))
+ }
+ DocAtom::KeyValue { key, value } if key == "alias" => {
+ result.push(Symbol::intern(&value))
+ }
+ _ => {}
+ }
+ }
+ }
+ ControlFlow::Continue(())
+}
+
+fn extract_cfgs(result: &mut Vec<CfgExpr>, attr: Meta) -> ControlFlow<Infallible> {
+ if let Meta::TokenTree { path, tt } = attr
+ && path.is1("cfg")
+ {
+ result.push(CfgExpr::parse_from_ast(&mut TokenTreeChildren::new(&tt).peekable()));
+ }
+ ControlFlow::Continue(())
+}
+
+fn extract_docs<'a>(
+ get_cfg_options: &dyn Fn() -> &'a CfgOptions,
+ source: InFile<ast::AnyHasAttrs>,
+ outer_mod_decl: Option<InFile<ast::Module>>,
+ inner_attrs_node: Option<SyntaxNode>,
+) -> Option<Box<Docs>> {
+ let mut result = Docs {
+ docs: String::new(),
+ docs_source_map: Vec::new(),
+ outline_mod: None,
+ inline_file: source.file_id,
+ prefix_len: TextSize::new(0),
+ inline_inner_docs_start: None,
+ outline_inner_docs_start: None,
+ };
+
+ let mut cfg_options = None;
+ let mut extend_with_attrs =
+ |result: &mut Docs, node: &SyntaxNode, expect_inner_attrs, indent: &mut usize| {
+ expand_cfg_attr_with_doc_comments::<_, Infallible>(
+ AttrDocCommentIter::from_syntax_node(node).filter(|attr| match attr {
+ Either::Left(attr) => attr.kind().is_inner() == expect_inner_attrs,
+ Either::Right(comment) => comment.kind().doc.is_some_and(|kind| {
+ (kind == ast::CommentPlacement::Inner) == expect_inner_attrs
+ }),
+ }),
+ || cfg_options.get_or_insert_with(get_cfg_options),
+ |attr| {
+ match attr {
+ Either::Right(doc_comment) => {
+ result.extend_with_doc_comment(doc_comment, indent)
+ }
+ Either::Left((attr, _, _, _)) => match attr {
+ // FIXME: Handle macros: `#[doc = concat!("foo", "bar")]`.
+ Meta::NamedKeyValue {
+ name: Some(name), value: Some(value), ..
+ } if name.text() == "doc" => {
+ result.extend_with_doc_attr(value, indent);
+ }
+ _ => {}
+ },
+ }
+ ControlFlow::Continue(())
+ },
+ );
+ };
+
+ if let Some(outer_mod_decl) = outer_mod_decl {
+ let mut indent = usize::MAX;
+ extend_with_attrs(&mut result, outer_mod_decl.value.syntax(), false, &mut indent);
+ result.remove_indent(indent, 0);
+ result.outline_mod = Some((outer_mod_decl.file_id, result.docs_source_map.len()));
+ }
+
+ let inline_source_map_start = result.docs_source_map.len();
+ let mut indent = usize::MAX;
+ extend_with_attrs(&mut result, source.value.syntax(), false, &mut indent);
+ if let Some(inner_attrs_node) = &inner_attrs_node {
+ result.inline_inner_docs_start = Some(TextSize::of(&result.docs));
+ extend_with_attrs(&mut result, inner_attrs_node, true, &mut indent);
+ }
+ result.remove_indent(indent, inline_source_map_start);
+
+ result.remove_last_newline();
+
+ result.shrink_to_fit();
+
+ if result.docs.is_empty() { None } else { Some(Box::new(result)) }
+}
+
+#[salsa::tracked]
+impl AttrFlags {
+ #[salsa::tracked]
+ pub fn query(db: &dyn DefDatabase, owner: AttrDefId) -> AttrFlags {
+ let mut attr_flags = AttrFlags::empty();
+ collect_attrs(db, owner, |attr| match_attr_flags(&mut attr_flags, attr));
+ attr_flags
+ }
+
+ #[inline]
+ pub fn query_field(db: &dyn DefDatabase, field: FieldId) -> AttrFlags {
+ return field_attr_flags(db, field.parent)
+ .get(field.local_id)
+ .copied()
+ .unwrap_or_else(AttrFlags::empty);
+
+ #[salsa::tracked(returns(ref))]
+ fn field_attr_flags(
+ db: &dyn DefDatabase,
+ variant: VariantId,
+ ) -> ArenaMap<LocalFieldId, AttrFlags> {
+ collect_field_attrs(db, variant, |cfg_options, field| {
+ let mut attr_flags = AttrFlags::empty();
+ expand_cfg_attr(
+ field.value.attrs(),
+ || cfg_options,
+ |attr, _, _, _| match_attr_flags(&mut attr_flags, attr),
+ );
+ attr_flags
+ })
+ }
+ }
+
+ #[inline]
+ pub fn query_generic_params(
+ db: &dyn DefDatabase,
+ def: GenericDefId,
+ ) -> &(ArenaMap<LocalLifetimeParamId, AttrFlags>, ArenaMap<LocalTypeOrConstParamId, AttrFlags>)
+ {
+ let generic_params = GenericParams::new(db, def);
+ let params_count_excluding_self =
+ generic_params.len() - usize::from(generic_params.trait_self_param().is_some());
+ if params_count_excluding_self == 0 {
+ return const { &(ArenaMap::new(), ArenaMap::new()) };
+ }
+ return generic_params_attr_flags(db, def);
+
+ #[salsa::tracked(returns(ref))]
+ fn generic_params_attr_flags(
+ db: &dyn DefDatabase,
+ def: GenericDefId,
+ ) -> (ArenaMap<LocalLifetimeParamId, AttrFlags>, ArenaMap<LocalTypeOrConstParamId, AttrFlags>)
+ {
+ let mut lifetimes = ArenaMap::new();
+ let mut type_and_consts = ArenaMap::new();
+
+ let mut cfg_options = None;
+ let mut cfg_options =
+ || *cfg_options.get_or_insert_with(|| def.krate(db).cfg_options(db));
+
+ let lifetimes_source = HasChildSource::<LocalLifetimeParamId>::child_source(&def, db);
+ for (lifetime_id, lifetime) in lifetimes_source.value.iter() {
+ let mut attr_flags = AttrFlags::empty();
+ expand_cfg_attr(lifetime.attrs(), &mut cfg_options, |attr, _, _, _| {
+ match_attr_flags(&mut attr_flags, attr)
+ });
+ if !attr_flags.is_empty() {
+ lifetimes.insert(lifetime_id, attr_flags);
+ }
+ }
+
+ let type_and_consts_source =
+ HasChildSource::<LocalTypeOrConstParamId>::child_source(&def, db);
+ for (type_or_const_id, type_or_const) in type_and_consts_source.value.iter() {
+ let mut attr_flags = AttrFlags::empty();
+ expand_cfg_attr(type_or_const.attrs(), &mut cfg_options, |attr, _, _, _| {
+ match_attr_flags(&mut attr_flags, attr)
+ });
+ if !attr_flags.is_empty() {
+ type_and_consts.insert(type_or_const_id, attr_flags);
+ }
+ }
+
+ lifetimes.shrink_to_fit();
+ type_and_consts.shrink_to_fit();
+ (lifetimes, type_and_consts)
+ }
+ }
+
+ #[inline]
+ pub fn query_lifetime_param(db: &dyn DefDatabase, owner: LifetimeParamId) -> AttrFlags {
+ AttrFlags::query_generic_params(db, owner.parent)
+ .0
+ .get(owner.local_id)
+ .copied()
+ .unwrap_or_else(AttrFlags::empty)
+ }
+ #[inline]
+ pub fn query_type_or_const_param(db: &dyn DefDatabase, owner: TypeOrConstParamId) -> AttrFlags {
+ AttrFlags::query_generic_params(db, owner.parent)
+ .1
+ .get(owner.local_id)
+ .copied()
+ .unwrap_or_else(AttrFlags::empty)
+ }
+
+ pub(crate) fn is_cfg_enabled_for(
+ owner: &dyn HasAttrs,
+ cfg_options: &CfgOptions,
+ ) -> Result<(), CfgExpr> {
+ let attrs = ast::attrs_including_inner(owner);
+ let result = expand_cfg_attr(
+ attrs,
+ || cfg_options,
+ |attr, _, _, _| {
+ if let Meta::TokenTree { path, tt } = attr
+ && path.is1("cfg")
+ && let cfg =
+ CfgExpr::parse_from_ast(&mut TokenTreeChildren::new(&tt).peekable())
+ && cfg_options.check(&cfg) == Some(false)
+ {
+ ControlFlow::Break(cfg)
+ } else {
+ ControlFlow::Continue(())
+ }
+ },
+ );
+ match result {
+ Some(cfg) => Err(cfg),
+ None => Ok(()),
+ }
+ }
+
+ #[inline]
+ pub fn lang_item(db: &dyn DefDatabase, owner: AttrDefId) -> Option<Symbol> {
+ AttrFlags::query(db, owner).lang_item_with_attrs(db, owner)
+ }
+
+ #[inline]
+ pub fn lang_item_with_attrs(self, db: &dyn DefDatabase, owner: AttrDefId) -> Option<Symbol> {
+ if !self.contains(AttrFlags::LANG_ITEM) {
+ // Don't create the query in case this is not a lang item, this wastes memory.
+ return None;
+ }
+
+ return lang_item(db, owner);
+
+ #[salsa::tracked]
+ fn lang_item(db: &dyn DefDatabase, owner: AttrDefId) -> Option<Symbol> {
+ collect_attrs(db, owner, |attr| {
+ if let Meta::NamedKeyValue { name: Some(name), value: Some(value), .. } = attr
+ && name.text() == "lang"
+ && let Some(value) = ast::String::cast(value)
+ && let Ok(value) = value.value()
+ {
+ ControlFlow::Break(Symbol::intern(&value))
+ } else {
+ ControlFlow::Continue(())
+ }
+ })
+ }
+ }
+
+ #[inline]
+ pub fn repr(db: &dyn DefDatabase, owner: AdtId) -> Option<ReprOptions> {
+ if !AttrFlags::query(db, owner.into()).contains(AttrFlags::HAS_REPR) {
+ // Don't create the query in case this has no repr, this wastes memory.
+ return None;
+ }
+
+ return repr(db, owner);
+
+ #[salsa::tracked]
+ fn repr(db: &dyn DefDatabase, owner: AdtId) -> Option<ReprOptions> {
+ let mut result = None;
+ collect_attrs::<Infallible>(db, owner.into(), |attr| {
+ if let Meta::TokenTree { path, tt } = attr
+ && path.is1("repr")
+ && let Some(repr) = parse_repr_tt(&tt)
+ {
+ match &mut result {
+ Some(existing) => merge_repr(existing, repr),
+ None => result = Some(repr),
+ }
+ }
+ ControlFlow::Continue(())
+ });
+ result
+ }
+ }
+
+ /// Call this only if there are legacy const generics, to save memory.
+ #[salsa::tracked(returns(ref))]
+ pub(crate) fn legacy_const_generic_indices(
+ db: &dyn DefDatabase,
+ owner: FunctionId,
+ ) -> Option<Box<[u32]>> {
+ let result = collect_attrs(db, owner.into(), |attr| {
+ if let Meta::TokenTree { path, tt } = attr
+ && path.is1("rustc_legacy_const_generics")
+ {
+ let result = parse_rustc_legacy_const_generics(tt);
+ ControlFlow::Break(result)
+ } else {
+ ControlFlow::Continue(())
+ }
+ });
+ result.filter(|it| !it.is_empty())
+ }
+
+ // There aren't typically many crates, so it's okay to always make this a query without a flag.
+ #[salsa::tracked(returns(ref))]
+ pub fn doc_html_root_url(db: &dyn DefDatabase, krate: Crate) -> Option<SmolStr> {
+ let root_file_id = krate.root_file_id(db);
+ let syntax = db.parse(root_file_id).tree();
+
+ let mut cfg_options = None;
+ expand_cfg_attr(
+ syntax.attrs(),
+ || cfg_options.get_or_insert(krate.cfg_options(db)),
+ |attr, _, _, _| {
+ if let Meta::TokenTree { path, tt } = attr
+ && path.is1("doc")
+ && let Some(result) = DocAtom::parse(tt).into_iter().find_map(|atom| {
+ if let DocAtom::KeyValue { key, value } = atom
+ && key == "html_root_url"
+ {
+ Some(value)
+ } else {
+ None
+ }
+ })
+ {
+ ControlFlow::Break(result)
+ } else {
+ ControlFlow::Continue(())
+ }
+ },
+ )
+ }
+
+ #[inline]
+ pub fn target_features(db: &dyn DefDatabase, owner: FunctionId) -> &FxHashSet<Symbol> {
+ if !AttrFlags::query(db, owner.into()).contains(AttrFlags::HAS_TARGET_FEATURE) {
+ return const { &FxHashSet::with_hasher(rustc_hash::FxBuildHasher) };
+ }
+
+ return target_features(db, owner);
+
+ #[salsa::tracked(returns(ref))]
+ fn target_features(db: &dyn DefDatabase, owner: FunctionId) -> FxHashSet<Symbol> {
+ let mut result = FxHashSet::default();
+ collect_attrs::<Infallible>(db, owner.into(), |attr| {
+ if let Meta::TokenTree { path, tt } = attr
+ && path.is1("target_feature")
+ && let mut tt = TokenTreeChildren::new(&tt)
+ && let Some(NodeOrToken::Token(enable_ident)) = tt.next()
+ && enable_ident.text() == "enable"
+ && let Some(NodeOrToken::Token(eq_token)) = tt.next()
+ && eq_token.kind() == T![=]
+ && let Some(NodeOrToken::Token(features)) = tt.next()
+ && let Some(features) = ast::String::cast(features)
+ && let Ok(features) = features.value()
+ && tt.next().is_none()
+ {
+ result.extend(features.split(',').map(Symbol::intern));
+ }
+ ControlFlow::Continue(())
+ });
+ result.shrink_to_fit();
+ result
+ }
+ }
+
+ #[inline]
+ pub fn rustc_layout_scalar_valid_range(
+ db: &dyn DefDatabase,
+ owner: AdtId,
+ ) -> RustcLayoutScalarValidRange {
+ if !AttrFlags::query(db, owner.into()).contains(AttrFlags::RUSTC_LAYOUT_SCALAR_VALID_RANGE)
+ {
+ return RustcLayoutScalarValidRange::default();
+ }
+
+ return rustc_layout_scalar_valid_range(db, owner);
+
+ #[salsa::tracked]
+ fn rustc_layout_scalar_valid_range(
+ db: &dyn DefDatabase,
+ owner: AdtId,
+ ) -> RustcLayoutScalarValidRange {
+ let mut result = RustcLayoutScalarValidRange::default();
+ collect_attrs::<Infallible>(db, owner.into(), |attr| {
+ if let Meta::TokenTree { path, tt } = attr
+ && (path.is1("rustc_layout_scalar_valid_range_start")
+ || path.is1("rustc_layout_scalar_valid_range_end"))
+ && let tt = TokenTreeChildren::new(&tt)
+ && let Ok(NodeOrToken::Token(value)) = tt.exactly_one()
+ && let Some(value) = ast::IntNumber::cast(value)
+ && let Ok(value) = value.value()
+ {
+ if path.is1("rustc_layout_scalar_valid_range_start") {
+ result.start = Some(value)
+ } else {
+ result.end = Some(value);
+ }
+ }
+ ControlFlow::Continue(())
+ });
+ result
+ }
+ }
+
+ #[inline]
+ pub fn doc_aliases(self, db: &dyn DefDatabase, owner: Either<AttrDefId, FieldId>) -> &[Symbol] {
+ if !self.contains(AttrFlags::HAS_DOC_ALIASES) {
+ return &[];
+ }
+ return match owner {
+ Either::Left(it) => doc_aliases(db, it),
+ Either::Right(field) => fields_doc_aliases(db, field.parent)
+ .get(field.local_id)
+ .map(|it| &**it)
+ .unwrap_or_default(),
+ };
+
+ #[salsa::tracked(returns(ref))]
+ fn doc_aliases(db: &dyn DefDatabase, owner: AttrDefId) -> Box<[Symbol]> {
+ let mut result = Vec::new();
+ collect_attrs::<Infallible>(db, owner, |attr| extract_doc_aliases(&mut result, attr));
+ result.into_boxed_slice()
+ }
+
+ #[salsa::tracked(returns(ref))]
+ fn fields_doc_aliases(
+ db: &dyn DefDatabase,
+ variant: VariantId,
+ ) -> ArenaMap<LocalFieldId, Box<[Symbol]>> {
+ collect_field_attrs(db, variant, |cfg_options, field| {
+ let mut result = Vec::new();
+ expand_cfg_attr(
+ field.value.attrs(),
+ || cfg_options,
+ |attr, _, _, _| extract_doc_aliases(&mut result, attr),
+ );
+ result.into_boxed_slice()
+ })
+ }
+ }
+
+ #[inline]
+ pub fn cfgs(self, db: &dyn DefDatabase, owner: Either<AttrDefId, FieldId>) -> Option<&CfgExpr> {
+ if !self.contains(AttrFlags::HAS_CFG) {
+ return None;
+ }
+ return match owner {
+ Either::Left(it) => cfgs(db, it).as_ref(),
+ Either::Right(field) => {
+ fields_cfgs(db, field.parent).get(field.local_id).and_then(|it| it.as_ref())
+ }
+ };
+
+ // We LRU this query because it is only used by IDE.
+ #[salsa::tracked(returns(ref), lru = 250)]
+ fn cfgs(db: &dyn DefDatabase, owner: AttrDefId) -> Option<CfgExpr> {
+ let mut result = Vec::new();
+ collect_attrs::<Infallible>(db, owner, |attr| extract_cfgs(&mut result, attr));
+ match result.len() {
+ 0 => None,
+ 1 => result.into_iter().next(),
+ _ => Some(CfgExpr::All(result.into_boxed_slice())),
+ }
+ }
+
+ // We LRU this query because it is only used by IDE.
+ #[salsa::tracked(returns(ref), lru = 50)]
+ fn fields_cfgs(
+ db: &dyn DefDatabase,
+ variant: VariantId,
+ ) -> ArenaMap<LocalFieldId, Option<CfgExpr>> {
+ collect_field_attrs(db, variant, |cfg_options, field| {
+ let mut result = Vec::new();
+ expand_cfg_attr(
+ field.value.attrs(),
+ || cfg_options,
+ |attr, _, _, _| extract_cfgs(&mut result, attr),
+ );
+ match result.len() {
+ 0 => None,
+ 1 => result.into_iter().next(),
+ _ => Some(CfgExpr::All(result.into_boxed_slice())),
+ }
+ })
+ }
+ }
+
+ #[inline]
+ pub fn doc_keyword(db: &dyn DefDatabase, owner: InternedModuleId) -> Option<Symbol> {
+ if !AttrFlags::query(db, AttrDefId::ModuleId(owner)).contains(AttrFlags::HAS_DOC_KEYWORD) {
+ return None;
+ }
+ return doc_keyword(db, owner);
+
+ #[salsa::tracked]
+ fn doc_keyword(db: &dyn DefDatabase, owner: InternedModuleId) -> Option<Symbol> {
+ collect_attrs(db, AttrDefId::ModuleId(owner), |attr| {
+ if let Meta::TokenTree { path, tt } = attr
+ && path.is1("doc")
+ {
+ for atom in DocAtom::parse(tt) {
+ if let DocAtom::KeyValue { key, value } = atom
+ && key == "keyword"
+ {
+ return ControlFlow::Break(Symbol::intern(&value));
+ }
+ }
+ }
+ ControlFlow::Continue(())
+ })
+ }
+ }
+
+ // We LRU this query because it is only used by IDE.
+ #[salsa::tracked(returns(ref), lru = 250)]
+ pub fn docs(db: &dyn DefDatabase, owner: AttrDefId) -> Option<Box<Docs>> {
+ let (source, outer_mod_decl, krate) = attrs_source(db, owner);
+ let inner_attrs_node = source.value.inner_attributes_node();
+ extract_docs(&|| krate.cfg_options(db), source, outer_mod_decl, inner_attrs_node)
+ }
+
+ #[inline]
+ pub fn field_docs(db: &dyn DefDatabase, field: FieldId) -> Option<&Docs> {
+ return fields_docs(db, field.parent).get(field.local_id).and_then(|it| it.as_deref());
+
+ // We LRU this query because it is only used by IDE.
+ #[salsa::tracked(returns(ref), lru = 50)]
+ pub fn fields_docs(
+ db: &dyn DefDatabase,
+ variant: VariantId,
+ ) -> ArenaMap<LocalFieldId, Option<Box<Docs>>> {
+ collect_field_attrs(db, variant, |cfg_options, field| {
+ extract_docs(&|| cfg_options, field, None, None)
+ })
+ }
+ }
+
+ #[inline]
+ pub fn derive_info(db: &dyn DefDatabase, owner: MacroId) -> Option<&DeriveInfo> {
+ if !AttrFlags::query(db, owner.into()).contains(AttrFlags::IS_DERIVE_OR_BUILTIN_MACRO) {
+ return None;
+ }
+
+ return derive_info(db, owner).as_ref();
+
+ #[salsa::tracked(returns(ref))]
+ fn derive_info(db: &dyn DefDatabase, owner: MacroId) -> Option<DeriveInfo> {
+ collect_attrs(db, owner.into(), |attr| {
+ if let Meta::TokenTree { path, tt } = attr
+ && path.segments.len() == 1
+ && matches!(
+ path.segments[0].text(),
+ "proc_macro_derive" | "rustc_builtin_macro"
+ )
+ && let mut tt = TokenTreeChildren::new(&tt)
+ && let Some(NodeOrToken::Token(trait_name)) = tt.next()
+ && trait_name.kind().is_any_identifier()
+ {
+ let trait_name = Symbol::intern(trait_name.text());
+
+ let helpers = if let Some(NodeOrToken::Token(comma)) = tt.next()
+ && comma.kind() == T![,]
+ && let Some(NodeOrToken::Token(attributes)) = tt.next()
+ && attributes.text() == "attributes"
+ && let Some(NodeOrToken::Node(attributes)) = tt.next()
+ {
+ attributes
+ .syntax()
+ .children_with_tokens()
+ .filter_map(NodeOrToken::into_token)
+ .filter(|it| it.kind().is_any_identifier())
+ .map(|it| Symbol::intern(it.text()))
+ .collect::<Box<[_]>>()
+ } else {
+ Box::new([])
+ };
+
+ ControlFlow::Break(DeriveInfo { trait_name, helpers })
+ } else {
+ ControlFlow::Continue(())
+ }
+ })
+ }
+ }
+}
+
+fn merge_repr(this: &mut ReprOptions, other: ReprOptions) {
+ let ReprOptions { int, align, pack, flags, field_shuffle_seed: _ } = this;
+ flags.insert(other.flags);
+ *align = (*align).max(other.align);
+ *pack = match (*pack, other.pack) {
+ (Some(pack), None) | (None, Some(pack)) => Some(pack),
+ _ => (*pack).min(other.pack),
+ };
+ if other.int.is_some() {
+ *int = other.int;
+ }
+}
+
+fn parse_repr_tt(tt: &ast::TokenTree) -> Option<ReprOptions> {
+ use crate::builtin_type::{BuiltinInt, BuiltinUint};
+ use rustc_abi::{Align, Integer, IntegerType, ReprFlags, ReprOptions};
+
+ let mut tts = TokenTreeChildren::new(tt).peekable();
+
+ let mut acc = ReprOptions::default();
+ while let Some(tt) = tts.next() {
+ let NodeOrToken::Token(ident) = tt else {
+ continue;
+ };
+ if !ident.kind().is_any_identifier() {
+ continue;
+ }
+ let repr = match ident.text() {
+ "packed" => {
+ let pack = if let Some(NodeOrToken::Node(tt)) = tts.peek() {
+ let tt = tt.clone();
+ tts.next();
+ let mut tt_iter = TokenTreeChildren::new(&tt);
+ if let Some(NodeOrToken::Token(lit)) = tt_iter.next()
+ && let Some(lit) = ast::IntNumber::cast(lit)
+ && let Ok(lit) = lit.value()
+ && let Ok(lit) = lit.try_into()
+ {
+ lit
+ } else {
+ 0
+ }
+ } else {
+ 0
+ };
+ let pack = Some(Align::from_bytes(pack).unwrap_or(Align::ONE));
+ ReprOptions { pack, ..Default::default() }
+ }
+ "align" => {
+ let mut align = None;
+ if let Some(NodeOrToken::Node(tt)) = tts.peek() {
+ let tt = tt.clone();
+ tts.next();
+ let mut tt_iter = TokenTreeChildren::new(&tt);
+ if let Some(NodeOrToken::Token(lit)) = tt_iter.next()
+ && let Some(lit) = ast::IntNumber::cast(lit)
+ && let Ok(lit) = lit.value()
+ && let Ok(lit) = lit.try_into()
+ {
+ align = Align::from_bytes(lit).ok();
+ }
+ }
+ ReprOptions { align, ..Default::default() }
+ }
+ "C" => ReprOptions { flags: ReprFlags::IS_C, ..Default::default() },
+ "transparent" => ReprOptions { flags: ReprFlags::IS_TRANSPARENT, ..Default::default() },
+ "simd" => ReprOptions { flags: ReprFlags::IS_SIMD, ..Default::default() },
+ repr => {
+ let mut int = None;
+ if let Some(builtin) = BuiltinInt::from_suffix(repr)
+ .map(Either::Left)
+ .or_else(|| BuiltinUint::from_suffix(repr).map(Either::Right))
+ {
+ int = Some(match builtin {
+ Either::Left(bi) => match bi {
+ BuiltinInt::Isize => IntegerType::Pointer(true),
+ BuiltinInt::I8 => IntegerType::Fixed(Integer::I8, true),
+ BuiltinInt::I16 => IntegerType::Fixed(Integer::I16, true),
+ BuiltinInt::I32 => IntegerType::Fixed(Integer::I32, true),
+ BuiltinInt::I64 => IntegerType::Fixed(Integer::I64, true),
+ BuiltinInt::I128 => IntegerType::Fixed(Integer::I128, true),
+ },
+ Either::Right(bu) => match bu {
+ BuiltinUint::Usize => IntegerType::Pointer(false),
+ BuiltinUint::U8 => IntegerType::Fixed(Integer::I8, false),
+ BuiltinUint::U16 => IntegerType::Fixed(Integer::I16, false),
+ BuiltinUint::U32 => IntegerType::Fixed(Integer::I32, false),
+ BuiltinUint::U64 => IntegerType::Fixed(Integer::I64, false),
+ BuiltinUint::U128 => IntegerType::Fixed(Integer::I128, false),
+ },
+ });
+ }
+ ReprOptions { int, ..Default::default() }
+ }
+ };
+ merge_repr(&mut acc, repr);
+ }
+
+ Some(acc)
+}
+
+fn parse_rustc_legacy_const_generics(tt: ast::TokenTree) -> Box<[u32]> {
+ TokenTreeChildren::new(&tt)
+ .filter_map(|param| {
+ ast::IntNumber::cast(param.into_token()?)?.value().ok()?.try_into().ok()
+ })
+ .collect()
+}
+
+#[derive(Debug)]
+enum DocAtom {
+ /// eg. `#[doc(hidden)]`
+ Flag(SmolStr),
+ /// eg. `#[doc(alias = "it")]`
+ ///
+ /// Note that a key can have multiple values that are all considered "active" at the same time.
+ /// For example, `#[doc(alias = "x")]` and `#[doc(alias = "y")]`.
+ KeyValue { key: SmolStr, value: SmolStr },
+ /// eg. `#[doc(alias("x", "y"))]`
+ Alias(Vec<SmolStr>),
+}
+
+impl DocAtom {
+ fn parse(tt: ast::TokenTree) -> SmallVec<[DocAtom; 1]> {
+ let mut iter = TokenTreeChildren::new(&tt).peekable();
+ let mut result = SmallVec::new();
+ while iter.peek().is_some() {
+ if let Some(expr) = next_doc_expr(&mut iter) {
+ result.push(expr);
+ }
+ }
+ result
+ }
+}
+
+fn next_doc_expr(it: &mut Peekable<TokenTreeChildren>) -> Option<DocAtom> {
+ let name = match it.next() {
+ Some(NodeOrToken::Token(token)) if token.kind().is_any_identifier() => {
+ SmolStr::new(token.text())
+ }
+ _ => return None,
+ };
+
+ let ret = match it.peek() {
+ Some(NodeOrToken::Token(eq)) if eq.kind() == T![=] => {
+ it.next();
+ if let Some(NodeOrToken::Token(value)) = it.next()
+ && let Some(value) = ast::String::cast(value)
+ && let Ok(value) = value.value()
+ {
+ DocAtom::KeyValue { key: name, value: SmolStr::new(&*value) }
+ } else {
+ return None;
+ }
+ }
+ Some(NodeOrToken::Node(subtree)) => {
+ if name != "alias" {
+ return None;
+ }
+ let aliases = TokenTreeChildren::new(subtree)
+ .filter_map(|alias| {
+ Some(SmolStr::new(&*ast::String::cast(alias.into_token()?)?.value().ok()?))
+ })
+ .collect();
+ it.next();
+ DocAtom::Alias(aliases)
+ }
+ _ => DocAtom::Flag(name),
+ };
+ Some(ret)
+}
+
+#[cfg(test)]
+mod tests {
+ use expect_test::expect;
+ use hir_expand::InFile;
+ use test_fixture::WithFixture;
+ use tt::{TextRange, TextSize};
+
+ use crate::attrs::IsInnerDoc;
+ use crate::{attrs::Docs, test_db::TestDB};
+
+ #[test]
+ fn docs() {
+ let (_db, file_id) = TestDB::with_single_file("");
+ let mut docs = Docs {
+ docs: String::new(),
+ docs_source_map: Vec::new(),
+ outline_mod: None,
+ inline_file: file_id.into(),
+ prefix_len: TextSize::new(0),
+ inline_inner_docs_start: None,
+ outline_inner_docs_start: None,
+ };
+ let mut indent = usize::MAX;
+
+ let outer = " foo\n\tbar baz";
+ let mut ast_offset = TextSize::new(123);
+ for line in outer.split('\n') {
+ docs.extend_with_doc_str(line, ast_offset, &mut indent);
+ ast_offset += TextSize::of(line) + TextSize::of("\n");
+ }
+
+ docs.inline_inner_docs_start = Some(TextSize::of(&docs.docs));
+ ast_offset += TextSize::new(123);
+ let inner = " bar \n baz";
+ for line in inner.split('\n') {
+ docs.extend_with_doc_str(line, ast_offset, &mut indent);
+ ast_offset += TextSize::of(line) + TextSize::of("\n");
+ }
+
+ assert_eq!(indent, 1);
+ expect![[r#"
+ [
+ DocsSourceMapLine {
+ string_offset: 0,
+ ast_offset: 123,
+ },
+ DocsSourceMapLine {
+ string_offset: 5,
+ ast_offset: 128,
+ },
+ DocsSourceMapLine {
+ string_offset: 15,
+ ast_offset: 261,
+ },
+ DocsSourceMapLine {
+ string_offset: 20,
+ ast_offset: 267,
+ },
+ ]
+ "#]]
+ .assert_debug_eq(&docs.docs_source_map);
+
+ docs.remove_indent(indent, 0);
+
+ assert_eq!(docs.inline_inner_docs_start, Some(TextSize::new(13)));
+
+ assert_eq!(docs.docs, "foo\nbar baz\nbar\nbaz\n");
+ expect![[r#"
+ [
+ DocsSourceMapLine {
+ string_offset: 0,
+ ast_offset: 124,
+ },
+ DocsSourceMapLine {
+ string_offset: 4,
+ ast_offset: 129,
+ },
+ DocsSourceMapLine {
+ string_offset: 13,
+ ast_offset: 262,
+ },
+ DocsSourceMapLine {
+ string_offset: 17,
+ ast_offset: 268,
+ },
+ ]
+ "#]]
+ .assert_debug_eq(&docs.docs_source_map);
+
+ docs.append(&docs.clone());
+ docs.prepend_str("prefix---");
+ assert_eq!(docs.docs, "prefix---foo\nbar baz\nbar\nbaz\nfoo\nbar baz\nbar\nbaz\n");
+ expect![[r#"
+ [
+ DocsSourceMapLine {
+ string_offset: 0,
+ ast_offset: 124,
+ },
+ DocsSourceMapLine {
+ string_offset: 4,
+ ast_offset: 129,
+ },
+ DocsSourceMapLine {
+ string_offset: 13,
+ ast_offset: 262,
+ },
+ DocsSourceMapLine {
+ string_offset: 17,
+ ast_offset: 268,
+ },
+ DocsSourceMapLine {
+ string_offset: 21,
+ ast_offset: 124,
+ },
+ DocsSourceMapLine {
+ string_offset: 25,
+ ast_offset: 129,
+ },
+ DocsSourceMapLine {
+ string_offset: 34,
+ ast_offset: 262,
+ },
+ DocsSourceMapLine {
+ string_offset: 38,
+ ast_offset: 268,
+ },
+ ]
+ "#]]
+ .assert_debug_eq(&docs.docs_source_map);
+
+ let range = |start, end| TextRange::new(TextSize::new(start), TextSize::new(end));
+ let in_file = |range| InFile::new(file_id.into(), range);
+ assert_eq!(docs.find_ast_range(range(0, 2)), None);
+ assert_eq!(docs.find_ast_range(range(8, 10)), None);
+ assert_eq!(
+ docs.find_ast_range(range(9, 10)),
+ Some((in_file(range(124, 125)), IsInnerDoc::No))
+ );
+ assert_eq!(docs.find_ast_range(range(20, 23)), None);
+ assert_eq!(
+ docs.find_ast_range(range(23, 25)),
+ Some((in_file(range(263, 265)), IsInnerDoc::Yes))
+ );
+ }
+}
diff --git a/crates/hir-def/src/db.rs b/crates/hir-def/src/db.rs
index d6bba6306b..cc311a41db 100644
--- a/crates/hir-def/src/db.rs
+++ b/crates/hir-def/src/db.rs
@@ -1,23 +1,21 @@
//! Defines database & queries for name resolution.
use base_db::{Crate, RootQueryDb, SourceDatabase};
-use either::Either;
use hir_expand::{
EditionedFileId, HirFileId, InFile, Lookup, MacroCallId, MacroDefId, MacroDefKind,
db::ExpandDatabase,
};
-use intern::sym;
use la_arena::ArenaMap;
-use syntax::{AstPtr, ast};
use triomphe::Arc;
use crate::{
- AssocItemId, AttrDefId, BlockId, BlockLoc, ConstId, ConstLoc, DefWithBodyId, EnumId, EnumLoc,
- EnumVariantId, EnumVariantLoc, ExternBlockId, ExternBlockLoc, ExternCrateId, ExternCrateLoc,
- FunctionId, FunctionLoc, GenericDefId, ImplId, ImplLoc, LocalFieldId, Macro2Id, Macro2Loc,
- MacroExpander, MacroId, MacroRulesId, MacroRulesLoc, MacroRulesLocFlags, ProcMacroId,
- ProcMacroLoc, StaticId, StaticLoc, StructId, StructLoc, TraitId, TraitLoc, TypeAliasId,
- TypeAliasLoc, UnionId, UnionLoc, UseId, UseLoc, VariantId,
- attr::{Attrs, AttrsWithOwner},
+ AssocItemId, AttrDefId, BlockId, BlockLoc, ConstId, ConstLoc, CrateRootModuleId, DefWithBodyId,
+ EnumId, EnumLoc, EnumVariantId, EnumVariantLoc, ExternBlockId, ExternBlockLoc, ExternCrateId,
+ ExternCrateLoc, FunctionId, FunctionLoc, GenericDefId, HasModule, ImplId, ImplLoc,
+ InternedModuleId, LocalFieldId, Macro2Id, Macro2Loc, MacroExpander, MacroId, MacroRulesId,
+ MacroRulesLoc, MacroRulesLocFlags, ProcMacroId, ProcMacroLoc, StaticId, StaticLoc, StructId,
+ StructLoc, TraitId, TraitLoc, TypeAliasId, TypeAliasLoc, UnionId, UnionLoc, UseId, UseLoc,
+ VariantId,
+ attrs::AttrFlags,
expr_store::{
Body, BodySourceMap, ExpressionStore, ExpressionStoreSourceMap, scope::ExprScopes,
},
@@ -29,7 +27,6 @@ use crate::{
ConstSignature, EnumSignature, FunctionSignature, ImplSignature, StaticSignature,
StructSignature, TraitSignature, TypeAliasSignature, UnionSignature,
},
- tt,
visibility::{self, Visibility},
};
@@ -237,24 +234,6 @@ pub trait DefDatabase: InternDatabase + ExpandDatabase + SourceDatabase {
def: GenericDefId,
) -> (Arc<GenericParams>, Arc<ExpressionStore>, Arc<ExpressionStoreSourceMap>);
- // region:attrs
-
- #[salsa::invoke(Attrs::fields_attrs_query)]
- fn fields_attrs(&self, def: VariantId) -> Arc<ArenaMap<LocalFieldId, Attrs>>;
-
- // should this really be a query?
- #[salsa::invoke(crate::attr::fields_attrs_source_map)]
- fn fields_attrs_source_map(
- &self,
- def: VariantId,
- ) -> Arc<ArenaMap<LocalFieldId, AstPtr<Either<ast::TupleField, ast::RecordField>>>>;
-
- // FIXME: Make this a non-interned query.
- #[salsa::invoke_interned(AttrsWithOwner::attrs_query)]
- fn attrs(&self, def: AttrDefId) -> Attrs;
-
- // endregion:attrs
-
#[salsa::invoke(ImportMap::import_map_query)]
fn import_map(&self, krate: Crate) -> Arc<ImportMap>;
@@ -297,36 +276,9 @@ fn include_macro_invoc(
}
fn crate_supports_no_std(db: &dyn DefDatabase, crate_id: Crate) -> bool {
- let file = crate_id.data(db).root_file_id(db);
- let item_tree = db.file_item_tree(file.into());
- let attrs = item_tree.top_level_raw_attrs();
- for attr in &**attrs {
- match attr.path().as_ident() {
- Some(ident) if *ident == sym::no_std => return true,
- Some(ident) if *ident == sym::cfg_attr => {}
- _ => continue,
- }
-
- // This is a `cfg_attr`; check if it could possibly expand to `no_std`.
- // Syntax is: `#[cfg_attr(condition(cfg, style), attr0, attr1, <...>)]`
- let tt = match attr.token_tree_value() {
- Some(tt) => tt.token_trees(),
- None => continue,
- };
-
- let segments =
- tt.split(|tt| matches!(tt, tt::TtElement::Leaf(tt::Leaf::Punct(p)) if p.char == ','));
- for output in segments.skip(1) {
- match output.flat_tokens() {
- [tt::TokenTree::Leaf(tt::Leaf::Ident(ident))] if ident.sym == sym::no_std => {
- return true;
- }
- _ => {}
- }
- }
- }
-
- false
+ let root_module = CrateRootModuleId::from(crate_id).module(db);
+ let attrs = AttrFlags::query(db, AttrDefId::ModuleId(InternedModuleId::new(db, root_module)));
+ attrs.contains(AttrFlags::IS_NO_STD)
}
fn macro_def(db: &dyn DefDatabase, id: MacroId) -> MacroDefId {
diff --git a/crates/hir-def/src/expr_store/expander.rs b/crates/hir-def/src/expr_store/expander.rs
index 23b9712d1e..6a2f06b0a6 100644
--- a/crates/hir-def/src/expr_store/expander.rs
+++ b/crates/hir-def/src/expr_store/expander.rs
@@ -17,11 +17,10 @@ use syntax::{AstNode, Parse, ast};
use triomphe::Arc;
use tt::TextRange;
-use crate::attr::Attrs;
-use crate::expr_store::HygieneId;
-use crate::macro_call_as_call_id;
-use crate::nameres::DefMap;
-use crate::{MacroId, UnresolvedMacro, db::DefDatabase};
+use crate::{
+ MacroId, UnresolvedMacro, attrs::AttrFlags, db::DefDatabase, expr_store::HygieneId,
+ macro_call_as_call_id, nameres::DefMap,
+};
#[derive(Debug)]
pub(super) struct Expander {
@@ -70,11 +69,10 @@ impl Expander {
pub(super) fn is_cfg_enabled(
&self,
- db: &dyn DefDatabase,
- has_attrs: &dyn HasAttrs,
+ owner: &dyn HasAttrs,
cfg_options: &CfgOptions,
) -> Result<(), cfg::CfgExpr> {
- Attrs::is_cfg_enabled_for(db, has_attrs, self.span_map.as_ref(), cfg_options)
+ AttrFlags::is_cfg_enabled_for(owner, cfg_options)
}
pub(super) fn call_syntax_ctx(&self) -> SyntaxContext {
diff --git a/crates/hir-def/src/expr_store/lower.rs b/crates/hir-def/src/expr_store/lower.rs
index 238538dc58..26a50b5325 100644
--- a/crates/hir-def/src/expr_store/lower.rs
+++ b/crates/hir-def/src/expr_store/lower.rs
@@ -12,7 +12,6 @@ use cfg::CfgOptions;
use either::Either;
use hir_expand::{
HirFileId, InFile, MacroDefId,
- mod_path::tool_path,
name::{AsName, Name},
span_map::SpanMapRef,
};
@@ -34,6 +33,7 @@ use tt::TextRange;
use crate::{
AdtId, BlockId, BlockLoc, DefWithBodyId, FunctionId, GenericDefId, ImplId, MacroId,
ModuleDefId, ModuleId, TraitId, TypeAliasId, UnresolvedMacro,
+ attrs::AttrFlags,
builtin_type::BuiltinUint,
db::DefDatabase,
expr_store::{
@@ -87,14 +87,16 @@ pub(super) fn lower_body(
let mut params = vec![];
let mut collector = ExprCollector::new(db, module, current_file_id);
- let skip_body = match owner {
- DefWithBodyId::FunctionId(it) => db.attrs(it.into()),
- DefWithBodyId::StaticId(it) => db.attrs(it.into()),
- DefWithBodyId::ConstId(it) => db.attrs(it.into()),
- DefWithBodyId::VariantId(it) => db.attrs(it.into()),
- }
- .rust_analyzer_tool()
- .any(|attr| *attr.path() == tool_path![skip]);
+ let skip_body = AttrFlags::query(
+ db,
+ match owner {
+ DefWithBodyId::FunctionId(it) => it.into(),
+ DefWithBodyId::StaticId(it) => it.into(),
+ DefWithBodyId::ConstId(it) => it.into(),
+ DefWithBodyId::VariantId(it) => it.into(),
+ },
+ )
+ .contains(AttrFlags::RUST_ANALYZER_SKIP);
// If #[rust_analyzer::skip] annotated, only construct enough information for the signature
// and skip the body.
if skip_body {
@@ -2498,7 +2500,7 @@ impl<'db> ExprCollector<'db> {
/// Returns `None` (and emits diagnostics) when `owner` if `#[cfg]`d out, and `Some(())` when
/// not.
fn check_cfg(&mut self, owner: &dyn ast::HasAttrs) -> bool {
- let enabled = self.expander.is_cfg_enabled(self.db, owner, self.cfg_options);
+ let enabled = self.expander.is_cfg_enabled(owner, self.cfg_options);
match enabled {
Ok(()) => true,
Err(cfg) => {
diff --git a/crates/hir-def/src/expr_store/pretty.rs b/crates/hir-def/src/expr_store/pretty.rs
index 42d3e07d9c..f5ef8e1a35 100644
--- a/crates/hir-def/src/expr_store/pretty.rs
+++ b/crates/hir-def/src/expr_store/pretty.rs
@@ -12,7 +12,8 @@ use span::Edition;
use syntax::ast::{HasName, RangeOp};
use crate::{
- AdtId, DefWithBodyId, GenericDefId, TypeParamId, VariantId,
+ AdtId, DefWithBodyId, FunctionId, GenericDefId, StructId, TypeParamId, VariantId,
+ attrs::AttrFlags,
expr_store::path::{GenericArg, GenericArgs},
hir::{
Array, BindingAnnotation, CaptureBy, ClosureKind, Literal, Movability, Statement,
@@ -167,7 +168,7 @@ pub fn print_signature(db: &dyn DefDatabase, owner: GenericDefId, edition: Editi
GenericDefId::AdtId(id) => match id {
AdtId::StructId(id) => {
let signature = db.struct_signature(id);
- print_struct(db, &signature, edition)
+ print_struct(db, id, &signature, edition)
}
AdtId::UnionId(id) => {
format!("unimplemented {id:?}")
@@ -179,7 +180,7 @@ pub fn print_signature(db: &dyn DefDatabase, owner: GenericDefId, edition: Editi
GenericDefId::ConstId(id) => format!("unimplemented {id:?}"),
GenericDefId::FunctionId(id) => {
let signature = db.function_signature(id);
- print_function(db, &signature, edition)
+ print_function(db, id, &signature, edition)
}
GenericDefId::ImplId(id) => format!("unimplemented {id:?}"),
GenericDefId::StaticId(id) => format!("unimplemented {id:?}"),
@@ -208,7 +209,8 @@ pub fn print_path(
pub fn print_struct(
db: &dyn DefDatabase,
- StructSignature { name, generic_params, store, flags, shape, repr }: &StructSignature,
+ id: StructId,
+ StructSignature { name, generic_params, store, flags, shape }: &StructSignature,
edition: Edition,
) -> String {
let mut p = Printer {
@@ -219,7 +221,7 @@ pub fn print_struct(
line_format: LineFormat::Newline,
edition,
};
- if let Some(repr) = repr {
+ if let Some(repr) = AttrFlags::repr(db, id.into()) {
if repr.c() {
wln!(p, "#[repr(C)]");
}
@@ -255,7 +257,8 @@ pub fn print_struct(
pub fn print_function(
db: &dyn DefDatabase,
- FunctionSignature {
+ id: FunctionId,
+ signature @ FunctionSignature {
name,
generic_params,
store,
@@ -263,10 +266,10 @@ pub fn print_function(
ret_type,
abi,
flags,
- legacy_const_generics_indices,
}: &FunctionSignature,
edition: Edition,
) -> String {
+ let legacy_const_generics_indices = signature.legacy_const_generics_indices(db, id);
let mut p = Printer {
db,
store,
@@ -298,7 +301,7 @@ pub fn print_function(
if i != 0 {
w!(p, ", ");
}
- if legacy_const_generics_indices.as_ref().is_some_and(|idx| idx.contains(&(i as u32))) {
+ if legacy_const_generics_indices.is_some_and(|idx| idx.contains(&(i as u32))) {
w!(p, "const: ");
}
p.print_type_ref(*param);
diff --git a/crates/hir-def/src/expr_store/tests/body/block.rs b/crates/hir-def/src/expr_store/tests/body/block.rs
index e8334cd973..4501ff4df5 100644
--- a/crates/hir-def/src/expr_store/tests/body/block.rs
+++ b/crates/hir-def/src/expr_store/tests/body/block.rs
@@ -189,8 +189,8 @@ fn f() {
}
"#,
expect![[r#"
- BlockId(3c01) in BlockRelativeModuleId { block: Some(BlockId(3c00)), local_id: Idx::<ModuleData>(1) }
- BlockId(3c00) in BlockRelativeModuleId { block: None, local_id: Idx::<ModuleData>(0) }
+ BlockId(3801) in BlockRelativeModuleId { block: Some(BlockId(3800)), local_id: Idx::<ModuleData>(1) }
+ BlockId(3800) in BlockRelativeModuleId { block: None, local_id: Idx::<ModuleData>(0) }
crate scope
"#]],
);
diff --git a/crates/hir-def/src/expr_store/tests/signatures.rs b/crates/hir-def/src/expr_store/tests/signatures.rs
index b68674c7a7..2dac4e7fc8 100644
--- a/crates/hir-def/src/expr_store/tests/signatures.rs
+++ b/crates/hir-def/src/expr_store/tests/signatures.rs
@@ -38,14 +38,24 @@ fn lower_and_print(#[rust_analyzer::rust_fixture] ra_fixture: &str, expect: Expe
match def {
GenericDefId::AdtId(adt_id) => match adt_id {
crate::AdtId::StructId(struct_id) => {
- out += &print_struct(&db, &db.struct_signature(struct_id), Edition::CURRENT);
+ out += &print_struct(
+ &db,
+ struct_id,
+ &db.struct_signature(struct_id),
+ Edition::CURRENT,
+ );
}
crate::AdtId::UnionId(_id) => (),
crate::AdtId::EnumId(_id) => (),
},
GenericDefId::ConstId(_id) => (),
GenericDefId::FunctionId(function_id) => {
- out += &print_function(&db, &db.function_signature(function_id), Edition::CURRENT)
+ out += &print_function(
+ &db,
+ function_id,
+ &db.function_signature(function_id),
+ Edition::CURRENT,
+ )
}
GenericDefId::ImplId(_id) => (),
diff --git a/crates/hir-def/src/import_map.rs b/crates/hir-def/src/import_map.rs
index f31f355cfa..67cf466276 100644
--- a/crates/hir-def/src/import_map.rs
+++ b/crates/hir-def/src/import_map.rs
@@ -13,7 +13,8 @@ use stdx::format_to;
use triomphe::Arc;
use crate::{
- AssocItemId, AttrDefId, Complete, FxIndexMap, ModuleDefId, ModuleId, TraitId,
+ AssocItemId, AttrDefId, Complete, FxIndexMap, InternedModuleId, ModuleDefId, ModuleId, TraitId,
+ attrs::AttrFlags,
db::DefDatabase,
item_scope::{ImportOrExternCrate, ItemInNs},
nameres::{DefMap, assoc::TraitItems, crate_def_map},
@@ -165,17 +166,34 @@ impl ImportMap {
}
} else {
match item {
- ItemInNs::Types(id) | ItemInNs::Values(id) => id.try_into().ok(),
+ ItemInNs::Types(id) | ItemInNs::Values(id) => match id {
+ ModuleDefId::ModuleId(it) => {
+ Some(AttrDefId::ModuleId(InternedModuleId::new(db, it)))
+ }
+ ModuleDefId::FunctionId(it) => Some(it.into()),
+ ModuleDefId::AdtId(it) => Some(it.into()),
+ ModuleDefId::EnumVariantId(it) => Some(it.into()),
+ ModuleDefId::ConstId(it) => Some(it.into()),
+ ModuleDefId::StaticId(it) => Some(it.into()),
+ ModuleDefId::TraitId(it) => Some(it.into()),
+ ModuleDefId::TypeAliasId(it) => Some(it.into()),
+ ModuleDefId::MacroId(it) => Some(it.into()),
+ ModuleDefId::BuiltinType(_) => None,
+ },
ItemInNs::Macros(id) => Some(id.into()),
}
};
let (is_doc_hidden, is_unstable, do_not_complete) = match attr_id {
None => (false, false, Complete::Yes),
Some(attr_id) => {
- let attrs = db.attrs(attr_id);
+ let attrs = AttrFlags::query(db, attr_id);
let do_not_complete =
- Complete::extract(matches!(attr_id, AttrDefId::TraitId(_)), &attrs);
- (attrs.has_doc_hidden(), attrs.is_unstable(), do_not_complete)
+ Complete::extract(matches!(attr_id, AttrDefId::TraitId(_)), attrs);
+ (
+ attrs.contains(AttrFlags::IS_DOC_HIDDEN),
+ attrs.contains(AttrFlags::IS_UNSTABLE),
+ do_not_complete,
+ )
}
};
@@ -239,15 +257,15 @@ impl ImportMap {
};
let attr_id = item.into();
- let attrs = &db.attrs(attr_id);
+ let attrs = AttrFlags::query(db, attr_id);
let item_do_not_complete = Complete::extract(false, attrs);
let do_not_complete =
Complete::for_trait_item(trait_import_info.complete, item_do_not_complete);
let assoc_item_info = ImportInfo {
container: trait_import_info.container,
name: assoc_item_name.clone(),
- is_doc_hidden: attrs.has_doc_hidden(),
- is_unstable: attrs.is_unstable(),
+ is_doc_hidden: attrs.contains(AttrFlags::IS_DOC_HIDDEN),
+ is_unstable: attrs.contains(AttrFlags::IS_UNSTABLE),
complete: do_not_complete,
};
diff --git a/crates/hir-def/src/item_tree.rs b/crates/hir-def/src/item_tree.rs
index f35df8d3a7..2a104fff2b 100644
--- a/crates/hir-def/src/item_tree.rs
+++ b/crates/hir-def/src/item_tree.rs
@@ -30,6 +30,7 @@
//! In general, any item in the `ItemTree` stores its `AstId`, which allows mapping it back to its
//! surface syntax.
+mod attrs;
mod lower;
mod pretty;
#[cfg(test)]
@@ -43,10 +44,8 @@ use std::{
};
use ast::{AstNode, StructKind};
-use base_db::Crate;
use hir_expand::{
ExpandTo, HirFileId,
- attrs::RawAttrs,
mod_path::{ModPath, PathKind},
name::Name,
};
@@ -59,9 +58,12 @@ use syntax::{SyntaxKind, ast, match_ast};
use thin_vec::ThinVec;
use triomphe::Arc;
-use crate::{BlockId, Lookup, attr::Attrs, db::DefDatabase};
+use crate::{BlockId, Lookup, db::DefDatabase};
-pub(crate) use crate::item_tree::lower::{lower_use_tree, visibility_from_ast};
+pub(crate) use crate::item_tree::{
+ attrs::*,
+ lower::{lower_use_tree, visibility_from_ast},
+};
#[derive(Copy, Clone, Eq, PartialEq)]
pub(crate) struct RawVisibilityId(u32);
@@ -96,7 +98,7 @@ pub(crate) fn file_item_tree_query(db: &dyn DefDatabase, file_id: HirFileId) ->
let mut item_tree = match_ast! {
match syntax {
ast::SourceFile(file) => {
- let top_attrs = RawAttrs::new(db, &file, ctx.span_map());
+ let top_attrs = ctx.lower_attrs(&file);
let mut item_tree = ctx.lower_module_items(&file);
item_tree.top_attrs = top_attrs;
item_tree
@@ -132,7 +134,7 @@ pub(crate) fn file_item_tree_query(db: &dyn DefDatabase, file_id: HirFileId) ->
attrs: FxHashMap::default(),
small_data: FxHashMap::default(),
big_data: FxHashMap::default(),
- top_attrs: RawAttrs::EMPTY,
+ top_attrs: AttrsOrCfg::empty(),
vis: ItemVisibilities { arena: ThinVec::new() },
})
})
@@ -168,7 +170,7 @@ pub(crate) fn block_item_tree_query(db: &dyn DefDatabase, block: BlockId) -> Arc
attrs: FxHashMap::default(),
small_data: FxHashMap::default(),
big_data: FxHashMap::default(),
- top_attrs: RawAttrs::EMPTY,
+ top_attrs: AttrsOrCfg::empty(),
vis: ItemVisibilities { arena: ThinVec::new() },
})
})
@@ -182,8 +184,8 @@ pub(crate) fn block_item_tree_query(db: &dyn DefDatabase, block: BlockId) -> Arc
#[derive(Debug, Default, Eq, PartialEq)]
pub struct ItemTree {
top_level: Box<[ModItemId]>,
- top_attrs: RawAttrs,
- attrs: FxHashMap<FileAstId<ast::Item>, RawAttrs>,
+ top_attrs: AttrsOrCfg,
+ attrs: FxHashMap<FileAstId<ast::Item>, AttrsOrCfg>,
vis: ItemVisibilities,
big_data: FxHashMap<FileAstId<ast::Item>, BigModItem>,
small_data: FxHashMap<FileAstId<ast::Item>, SmallModItem>,
@@ -197,26 +199,12 @@ impl ItemTree {
}
/// Returns the inner attributes of the source file.
- pub(crate) fn top_level_raw_attrs(&self) -> &RawAttrs {
+ pub(crate) fn top_level_attrs(&self) -> &AttrsOrCfg {
&self.top_attrs
}
- /// Returns the inner attributes of the source file.
- pub(crate) fn top_level_attrs(&self, db: &dyn DefDatabase, krate: Crate) -> Attrs {
- Attrs::expand_cfg_attr(db, krate, self.top_attrs.clone())
- }
-
- pub(crate) fn raw_attrs(&self, of: FileAstId<ast::Item>) -> &RawAttrs {
- self.attrs.get(&of).unwrap_or(&RawAttrs::EMPTY)
- }
-
- pub(crate) fn attrs(
- &self,
- db: &dyn DefDatabase,
- krate: Crate,
- of: FileAstId<ast::Item>,
- ) -> Attrs {
- Attrs::expand_cfg_attr(db, krate, self.raw_attrs(of).clone())
+ pub(crate) fn attrs(&self, of: FileAstId<ast::Item>) -> Option<&AttrsOrCfg> {
+ self.attrs.get(&of)
}
/// Returns a count of a few, expensive items.
diff --git a/crates/hir-def/src/item_tree/attrs.rs b/crates/hir-def/src/item_tree/attrs.rs
new file mode 100644
index 0000000000..5c635a4b38
--- /dev/null
+++ b/crates/hir-def/src/item_tree/attrs.rs
@@ -0,0 +1,220 @@
+//! Defines attribute helpers for name resolution.
+//!
+//! Notice we don't preserve all attributes for name resolution, to save space:
+//! for example, we skip doc comments (desugared to `#[doc = "..."]` attributes)
+//! and `#[inline]`. The filtered attributes are listed in [`hir_expand::attrs`].
+
+use std::{
+ borrow::Cow,
+ convert::Infallible,
+ ops::{self, ControlFlow},
+};
+
+use cfg::{CfgExpr, CfgOptions};
+use either::Either;
+use hir_expand::{
+ attrs::{Attr, AttrId, AttrInput, Meta, collect_item_tree_attrs},
+ mod_path::ModPath,
+ name::Name,
+ span_map::SpanMapRef,
+};
+use intern::{Interned, Symbol, sym};
+use syntax::{AstNode, T, ast};
+use syntax_bridge::DocCommentDesugarMode;
+use tt::token_to_literal;
+
+use crate::{db::DefDatabase, item_tree::lower::Ctx};
+
+#[derive(Debug, PartialEq, Eq)]
+pub(crate) enum AttrsOrCfg {
+ Enabled {
+ attrs: AttrsOwned,
+ },
+ /// This only collects the attributes up to the disabled `cfg` (this is what needed for crate-level attributes.)
+ CfgDisabled(Box<(CfgExpr, AttrsOwned)>),
+}
+
+impl Default for AttrsOrCfg {
+ #[inline]
+ fn default() -> Self {
+ AttrsOrCfg::Enabled { attrs: AttrsOwned(Box::new([])) }
+ }
+}
+
+impl AttrsOrCfg {
+ pub(crate) fn lower<'a>(
+ db: &dyn DefDatabase,
+ owner: &dyn ast::HasAttrs,
+ cfg_options: &dyn Fn() -> &'a CfgOptions,
+ span_map: SpanMapRef<'_>,
+ ) -> AttrsOrCfg {
+ let mut attrs = Vec::new();
+ let result =
+ collect_item_tree_attrs::<Infallible>(owner, cfg_options, |meta, container, _, _| {
+ // NOTE: We cannot early return from this function, *every* attribute must be pushed, otherwise we'll mess the `AttrId`
+ // tracking.
+ let (span, path_range, input) = match meta {
+ Meta::NamedKeyValue { path_range, name: _, value } => {
+ let span = span_map.span_for_range(path_range);
+ let input = value.map(|value| {
+ Box::new(AttrInput::Literal(token_to_literal(
+ value.text(),
+ span_map.span_for_range(value.text_range()),
+ )))
+ });
+ (span, path_range, input)
+ }
+ Meta::TokenTree { path, tt } => {
+ let span = span_map.span_for_range(path.range);
+ let tt = syntax_bridge::syntax_node_to_token_tree(
+ tt.syntax(),
+ span_map,
+ span,
+ DocCommentDesugarMode::ProcMacro,
+ );
+ let input = Some(Box::new(AttrInput::TokenTree(tt)));
+ (span, path.range, input)
+ }
+ Meta::Path { path } => {
+ let span = span_map.span_for_range(path.range);
+ (span, path.range, None)
+ }
+ };
+
+ let path = container.token_at_offset(path_range.start()).right_biased().and_then(
+ |first_path_token| {
+ let is_abs = matches!(first_path_token.kind(), T![:] | T![::]);
+ let segments =
+ std::iter::successors(Some(first_path_token), |it| it.next_token())
+ .take_while(|it| it.text_range().end() <= path_range.end())
+ .filter(|it| it.kind().is_any_identifier());
+ ModPath::from_tokens(
+ db,
+ &mut |range| span_map.span_for_range(range).ctx,
+ is_abs,
+ segments,
+ )
+ },
+ );
+ let path = path.unwrap_or_else(|| Name::missing().into());
+
+ attrs.push(Attr { path: Interned::new(path), input, ctxt: span.ctx });
+ ControlFlow::Continue(())
+ });
+ let attrs = AttrsOwned(attrs.into_boxed_slice());
+ match result {
+ Some(Either::Right(cfg)) => AttrsOrCfg::CfgDisabled(Box::new((cfg, attrs))),
+ None => AttrsOrCfg::Enabled { attrs },
+ }
+ }
+}
+
+#[derive(Debug, PartialEq, Eq)]
+pub(crate) struct AttrsOwned(Box<[Attr]>);
+
+#[derive(Debug, Clone, Copy)]
+pub(crate) struct Attrs<'a>(&'a [Attr]);
+
+impl ops::Deref for Attrs<'_> {
+ type Target = [Attr];
+
+ #[inline]
+ fn deref(&self) -> &Self::Target {
+ self.0
+ }
+}
+
+impl Ctx<'_> {
+ #[inline]
+ pub(super) fn lower_attrs(&self, owner: &dyn ast::HasAttrs) -> AttrsOrCfg {
+ AttrsOrCfg::lower(self.db, owner, &|| self.cfg_options(), self.span_map())
+ }
+}
+
+impl AttrsOwned {
+ #[inline]
+ pub(crate) fn as_ref(&self) -> Attrs<'_> {
+ Attrs(&self.0)
+ }
+}
+
+impl<'a> Attrs<'a> {
+ pub(crate) const EMPTY: Self = Attrs(&[]);
+
+ #[inline]
+ pub(crate) fn by_key(self, key: Symbol) -> AttrQuery<'a> {
+ AttrQuery { attrs: self, key }
+ }
+
+ #[inline]
+ pub(crate) fn iter(self) -> impl Iterator<Item = (AttrId, &'a Attr)> {
+ self.0.iter().enumerate().map(|(id, attr)| (AttrId::from_item_tree_index(id as u32), attr))
+ }
+
+ #[inline]
+ pub(crate) fn iter_after(
+ self,
+ after: Option<AttrId>,
+ ) -> impl Iterator<Item = (AttrId, &'a Attr)> {
+ let skip = after.map_or(0, |after| after.item_tree_index() + 1);
+ self.0[skip as usize..]
+ .iter()
+ .enumerate()
+ .map(move |(id, attr)| (AttrId::from_item_tree_index(id as u32 + skip), attr))
+ }
+
+ #[inline]
+ pub(crate) fn is_proc_macro(&self) -> bool {
+ self.by_key(sym::proc_macro).exists()
+ }
+
+ #[inline]
+ pub(crate) fn is_proc_macro_attribute(&self) -> bool {
+ self.by_key(sym::proc_macro_attribute).exists()
+ }
+}
+#[derive(Debug, Clone)]
+pub(crate) struct AttrQuery<'attr> {
+ attrs: Attrs<'attr>,
+ key: Symbol,
+}
+
+impl<'attr> AttrQuery<'attr> {
+ #[inline]
+ pub(crate) fn tt_values(self) -> impl Iterator<Item = &'attr crate::tt::TopSubtree> {
+ self.attrs().filter_map(|attr| attr.token_tree_value())
+ }
+
+ #[inline]
+ pub(crate) fn string_value_with_span(self) -> Option<(&'attr Symbol, span::Span)> {
+ self.attrs().find_map(|attr| attr.string_value_with_span())
+ }
+
+ #[inline]
+ pub(crate) fn string_value_unescape(self) -> Option<Cow<'attr, str>> {
+ self.attrs().find_map(|attr| attr.string_value_unescape())
+ }
+
+ #[inline]
+ pub(crate) fn exists(self) -> bool {
+ self.attrs().next().is_some()
+ }
+
+ #[inline]
+ pub(crate) fn attrs(self) -> impl Iterator<Item = &'attr Attr> + Clone {
+ let key = self.key;
+ self.attrs.0.iter().filter(move |attr| attr.path.as_ident().is_some_and(|s| *s == key))
+ }
+}
+
+impl AttrsOrCfg {
+ #[inline]
+ pub(super) fn empty() -> Self {
+ AttrsOrCfg::Enabled { attrs: AttrsOwned(Box::new([])) }
+ }
+
+ #[inline]
+ pub(super) fn is_empty(&self) -> bool {
+ matches!(self, AttrsOrCfg::Enabled { attrs } if attrs.as_ref().is_empty())
+ }
+}
diff --git a/crates/hir-def/src/item_tree/lower.rs b/crates/hir-def/src/item_tree/lower.rs
index db50e6585d..d8519f7393 100644
--- a/crates/hir-def/src/item_tree/lower.rs
+++ b/crates/hir-def/src/item_tree/lower.rs
@@ -1,8 +1,9 @@
//! AST -> `ItemTree` lowering code.
-use std::{cell::OnceCell, collections::hash_map::Entry};
+use std::cell::OnceCell;
use base_db::FxIndexSet;
+use cfg::CfgOptions;
use hir_expand::{
HirFileId,
mod_path::PathKind,
@@ -22,18 +23,19 @@ use crate::{
item_tree::{
BigModItem, Const, Enum, ExternBlock, ExternCrate, FieldsShape, Function, Impl,
ImportAlias, Interned, ItemTree, ItemTreeAstId, Macro2, MacroCall, MacroRules, Mod,
- ModItemId, ModKind, ModPath, RawAttrs, RawVisibility, RawVisibilityId, SmallModItem,
- Static, Struct, StructKind, Trait, TypeAlias, Union, Use, UseTree, UseTreeKind,
- VisibilityExplicitness,
+ ModItemId, ModKind, ModPath, RawVisibility, RawVisibilityId, SmallModItem, Static, Struct,
+ StructKind, Trait, TypeAlias, Union, Use, UseTree, UseTreeKind, VisibilityExplicitness,
+ attrs::AttrsOrCfg,
},
};
pub(super) struct Ctx<'a> {
- db: &'a dyn DefDatabase,
+ pub(super) db: &'a dyn DefDatabase,
tree: ItemTree,
source_ast_id_map: Arc<AstIdMap>,
span_map: OnceCell<SpanMap>,
file: HirFileId,
+ cfg_options: OnceCell<&'a CfgOptions>,
top_level: Vec<ModItemId>,
visibilities: FxIndexSet<RawVisibility>,
}
@@ -45,12 +47,18 @@ impl<'a> Ctx<'a> {
tree: ItemTree::default(),
source_ast_id_map: db.ast_id_map(file),
file,
+ cfg_options: OnceCell::new(),
span_map: OnceCell::new(),
visibilities: FxIndexSet::default(),
top_level: Vec::new(),
}
}
+ #[inline]
+ pub(super) fn cfg_options(&self) -> &'a CfgOptions {
+ self.cfg_options.get_or_init(|| self.file.krate(self.db).cfg_options(self.db))
+ }
+
pub(super) fn span_map(&self) -> SpanMapRef<'_> {
self.span_map.get_or_init(|| self.db.span_map(self.file)).as_ref()
}
@@ -98,7 +106,7 @@ impl<'a> Ctx<'a> {
}
pub(super) fn lower_block(mut self, block: &ast::BlockExpr) -> ItemTree {
- self.tree.top_attrs = RawAttrs::new(self.db, block, self.span_map());
+ self.tree.top_attrs = self.lower_attrs(block);
self.top_level = block
.statements()
.filter_map(|stmt| match stmt {
@@ -144,22 +152,15 @@ impl<'a> Ctx<'a> {
// FIXME: Handle `global_asm!()`.
ast::Item::AsmExpr(_) => return None,
};
- let attrs = RawAttrs::new(self.db, item, self.span_map());
+ let attrs = self.lower_attrs(item);
self.add_attrs(mod_item.ast_id(), attrs);
Some(mod_item)
}
- fn add_attrs(&mut self, item: FileAstId<ast::Item>, attrs: RawAttrs) {
+ fn add_attrs(&mut self, item: FileAstId<ast::Item>, attrs: AttrsOrCfg) {
if !attrs.is_empty() {
- match self.tree.attrs.entry(item) {
- Entry::Occupied(mut entry) => {
- *entry.get_mut() = entry.get().merge(attrs);
- }
- Entry::Vacant(entry) => {
- entry.insert(attrs);
- }
- }
+ self.tree.attrs.insert(item, attrs);
}
}
@@ -352,7 +353,7 @@ impl<'a> Ctx<'a> {
ast::ExternItem::TypeAlias(ty) => self.lower_type_alias(ty)?.into(),
ast::ExternItem::MacroCall(call) => self.lower_macro_call(call)?.into(),
};
- let attrs = RawAttrs::new(self.db, &item, self.span_map());
+ let attrs = self.lower_attrs(&item);
self.add_attrs(mod_item.ast_id(), attrs);
Some(mod_item)
})
diff --git a/crates/hir-def/src/item_tree/pretty.rs b/crates/hir-def/src/item_tree/pretty.rs
index 94a6cce3ce..66a2d14a73 100644
--- a/crates/hir-def/src/item_tree/pretty.rs
+++ b/crates/hir-def/src/item_tree/pretty.rs
@@ -7,8 +7,8 @@ use span::{Edition, ErasedFileAstId};
use crate::{
item_tree::{
Const, DefDatabase, Enum, ExternBlock, ExternCrate, FieldsShape, Function, Impl, ItemTree,
- Macro2, MacroCall, MacroRules, Mod, ModItemId, ModKind, RawAttrs, RawVisibilityId, Static,
- Struct, Trait, TypeAlias, Union, Use, UseTree, UseTreeKind,
+ Macro2, MacroCall, MacroRules, Mod, ModItemId, ModKind, RawVisibilityId, Static, Struct,
+ Trait, TypeAlias, Union, Use, UseTree, UseTreeKind, attrs::AttrsOrCfg,
},
visibility::RawVisibility,
};
@@ -85,9 +85,13 @@ impl Printer<'_> {
}
}
- fn print_attrs(&mut self, attrs: &RawAttrs, inner: bool, separated_by: &str) {
+ fn print_attrs(&mut self, attrs: &AttrsOrCfg, inner: bool, separated_by: &str) {
+ let AttrsOrCfg::Enabled { attrs } = attrs else {
+ w!(self, "#[cfg(false)]{separated_by}");
+ return;
+ };
let inner = if inner { "!" } else { "" };
- for attr in &**attrs {
+ for attr in &*attrs.as_ref() {
w!(
self,
"#{}[{}{}]{}",
diff --git a/crates/hir-def/src/item_tree/tests.rs b/crates/hir-def/src/item_tree/tests.rs
index 91b42bef8f..a57432f33c 100644
--- a/crates/hir-def/src/item_tree/tests.rs
+++ b/crates/hir-def/src/item_tree/tests.rs
@@ -30,10 +30,8 @@ use crate::{A, B};
use a::{c, d::{e}};
"#,
- expect![[r##"
- #![doc = " file comment"]
+ expect![[r#"
#![no_std]
- #![doc = " another file comment"]
// AstId: ExternCrate[070B, 0]
pub(self) extern crate self as renamed;
@@ -47,13 +45,12 @@ use a::{c, d::{e}};
// AstId: Use[0000, 1]
pub(self) use globs::*;
- #[doc = " docs on import"]
// AstId: Use[0000, 2]
pub(self) use crate::{A, B};
// AstId: Use[0000, 3]
pub(self) use a::{c, d::{e}};
- "##]],
+ "#]],
);
}
@@ -195,8 +192,6 @@ mod inline {
mod outline;
"#,
expect![[r##"
- #[doc = " outer"]
- #[doc = " inner"]
// AstId: Module[03AE, 0]
pub(self) mod inline {
// AstId: Use[0000, 0]
diff --git a/crates/hir-def/src/lang_item.rs b/crates/hir-def/src/lang_item.rs
index e634a95b67..31e9d8749d 100644
--- a/crates/hir-def/src/lang_item.rs
+++ b/crates/hir-def/src/lang_item.rs
@@ -8,6 +8,7 @@ use stdx::impl_from;
use crate::{
AdtId, AssocItemId, AttrDefId, Crate, EnumId, EnumVariantId, FunctionId, ImplId, ModuleDefId,
StaticId, StructId, TraitId, TypeAliasId, UnionId,
+ attrs::AttrFlags,
db::DefDatabase,
nameres::{assoc::TraitItems, crate_def_map, crate_local_def_map},
};
@@ -127,7 +128,7 @@ impl LangItems {
T: Into<AttrDefId> + Into<LangItemTarget> + Copy,
{
let _p = tracing::info_span!("collect_lang_item").entered();
- if let Some(lang_item) = db.attrs(item.into()).lang_item() {
+ if let Some(lang_item) = AttrFlags::lang_item(db, item.into()) {
self.assign_lang_item(lang_item, item.into());
}
}
@@ -142,7 +143,7 @@ pub(crate) fn crate_notable_traits(db: &dyn DefDatabase, krate: Crate) -> Option
for (_, module_data) in crate_def_map.modules() {
for def in module_data.scope.declarations() {
if let ModuleDefId::TraitId(trait_) = def
- && db.attrs(trait_.into()).has_doc_notable_trait()
+ && AttrFlags::query(db, trait_.into()).contains(AttrFlags::IS_DOC_NOTABLE_TRAIT)
{
traits.push(trait_);
}
@@ -177,10 +178,10 @@ macro_rules! language_item_table {
$( self.$lang_item = self.$lang_item.or(other.$lang_item); )*
}
- fn assign_lang_item(&mut self, name: &Symbol, target: LangItemTarget) {
+ fn assign_lang_item(&mut self, name: Symbol, target: LangItemTarget) {
match name {
$(
- _ if *name == $module::$name => {
+ _ if name == $module::$name => {
if let LangItemTarget::$target(target) = target {
self.$lang_item = Some(target);
}
@@ -206,6 +207,14 @@ macro_rules! language_item_table {
$( LangItemEnum::$lang_item => lang_items.$lang_item.map(Into::into), )*
}
}
+
+ #[inline]
+ pub fn from_symbol(symbol: &Symbol) -> Option<Self> {
+ match symbol {
+ $( _ if *symbol == $module::$name => Some(Self::$lang_item), )*
+ _ => None,
+ }
+ }
}
}
}
diff --git a/crates/hir-def/src/lib.rs b/crates/hir-def/src/lib.rs
index 52d99911ac..eceadd6742 100644
--- a/crates/hir-def/src/lib.rs
+++ b/crates/hir-def/src/lib.rs
@@ -19,7 +19,7 @@ extern crate ra_ap_rustc_abi as rustc_abi;
pub mod db;
-pub mod attr;
+pub mod attrs;
pub mod builtin_type;
pub mod item_scope;
pub mod per_ns;
@@ -45,7 +45,7 @@ pub mod find_path;
pub mod import_map;
pub mod visibility;
-use intern::{Interned, Symbol, sym};
+use intern::{Interned, Symbol};
pub use rustc_abi as layout;
use thin_vec::ThinVec;
use triomphe::Arc;
@@ -80,7 +80,7 @@ use syntax::{AstNode, ast};
pub use hir_expand::{Intern, Lookup, tt};
use crate::{
- attr::Attrs,
+ attrs::AttrFlags,
builtin_type::BuiltinType,
db::DefDatabase,
expr_store::ExpressionStoreSourceMap,
@@ -956,10 +956,16 @@ impl CallableDefId {
}
}
-#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
+// FIXME: We probably should use this in more places.
+/// This is used to avoid interning the whole `AttrDefId`, so we intern just modules and not everything.
+#[salsa_macros::interned(debug, no_lifetime)]
+pub struct InternedModuleId {
+ pub loc: ModuleId,
+}
+
+#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, salsa_macros::Supertype)]
pub enum AttrDefId {
- ModuleId(ModuleId),
- FieldId(FieldId),
+ ModuleId(InternedModuleId),
AdtId(AdtId),
FunctionId(FunctionId),
EnumVariantId(EnumVariantId),
@@ -969,15 +975,12 @@ pub enum AttrDefId {
TypeAliasId(TypeAliasId),
MacroId(MacroId),
ImplId(ImplId),
- GenericParamId(GenericParamId),
ExternBlockId(ExternBlockId),
ExternCrateId(ExternCrateId),
UseId(UseId),
}
impl_from!(
- ModuleId,
- FieldId,
AdtId(StructId, EnumId, UnionId),
EnumVariantId,
StaticId,
@@ -987,41 +990,11 @@ impl_from!(
TypeAliasId,
MacroId(Macro2Id, MacroRulesId, ProcMacroId),
ImplId,
- GenericParamId,
ExternCrateId,
UseId
for AttrDefId
);
-impl TryFrom<ModuleDefId> for AttrDefId {
- type Error = ();
-
- fn try_from(value: ModuleDefId) -> Result<Self, Self::Error> {
- match value {
- ModuleDefId::ModuleId(it) => Ok(it.into()),
- ModuleDefId::FunctionId(it) => Ok(it.into()),
- ModuleDefId::AdtId(it) => Ok(it.into()),
- ModuleDefId::EnumVariantId(it) => Ok(it.into()),
- ModuleDefId::ConstId(it) => Ok(it.into()),
- ModuleDefId::StaticId(it) => Ok(it.into()),
- ModuleDefId::TraitId(it) => Ok(it.into()),
- ModuleDefId::TypeAliasId(it) => Ok(it.into()),
- ModuleDefId::MacroId(id) => Ok(id.into()),
- ModuleDefId::BuiltinType(_) => Err(()),
- }
- }
-}
-
-impl From<ItemContainerId> for AttrDefId {
- fn from(acid: ItemContainerId) -> Self {
- match acid {
- ItemContainerId::ModuleId(mid) => AttrDefId::ModuleId(mid),
- ItemContainerId::ImplId(iid) => AttrDefId::ImplId(iid),
- ItemContainerId::TraitId(tid) => AttrDefId::TraitId(tid),
- ItemContainerId::ExternBlockId(id) => AttrDefId::ExternBlockId(id),
- }
- }
-}
impl From<AssocItemId> for AttrDefId {
fn from(assoc: AssocItemId) -> Self {
match assoc {
@@ -1262,8 +1235,7 @@ impl HasModule for GenericDefId {
impl HasModule for AttrDefId {
fn module(&self, db: &dyn DefDatabase) -> ModuleId {
match self {
- AttrDefId::ModuleId(it) => *it,
- AttrDefId::FieldId(it) => it.parent.module(db),
+ AttrDefId::ModuleId(it) => it.loc(db),
AttrDefId::AdtId(it) => it.module(db),
AttrDefId::FunctionId(it) => it.module(db),
AttrDefId::EnumVariantId(it) => it.module(db),
@@ -1273,12 +1245,6 @@ impl HasModule for AttrDefId {
AttrDefId::TypeAliasId(it) => it.module(db),
AttrDefId::ImplId(it) => it.module(db),
AttrDefId::ExternBlockId(it) => it.module(db),
- AttrDefId::GenericParamId(it) => match it {
- GenericParamId::TypeParamId(it) => it.parent(),
- GenericParamId::ConstParamId(it) => it.parent(),
- GenericParamId::LifetimeParamId(it) => it.parent,
- }
- .module(db),
AttrDefId::MacroId(it) => it.module(db),
AttrDefId::ExternCrateId(it) => it.module(db),
AttrDefId::UseId(it) => it.module(db),
@@ -1402,32 +1368,18 @@ pub enum Complete {
}
impl Complete {
- pub fn extract(is_trait: bool, attrs: &Attrs) -> Complete {
- let mut do_not_complete = Complete::Yes;
- for ra_attr in attrs.rust_analyzer_tool() {
- let segments = ra_attr.path.segments();
- if segments.len() != 2 {
- continue;
- }
- let action = segments[1].symbol();
- if *action == sym::completions {
- match ra_attr.token_tree_value().map(|tt| tt.token_trees().flat_tokens()) {
- Some([tt::TokenTree::Leaf(tt::Leaf::Ident(ident))]) => {
- if ident.sym == sym::ignore_flyimport {
- do_not_complete = Complete::IgnoreFlyimport;
- } else if is_trait {
- if ident.sym == sym::ignore_methods {
- do_not_complete = Complete::IgnoreMethods;
- } else if ident.sym == sym::ignore_flyimport_methods {
- do_not_complete = Complete::IgnoreFlyimportMethods;
- }
- }
- }
- _ => {}
- }
+ #[inline]
+ pub fn extract(is_trait: bool, attrs: AttrFlags) -> Complete {
+ if attrs.contains(AttrFlags::COMPLETE_IGNORE_FLYIMPORT) {
+ return Complete::IgnoreFlyimport;
+ } else if is_trait {
+ if attrs.contains(AttrFlags::COMPLETE_IGNORE_METHODS) {
+ return Complete::IgnoreMethods;
+ } else if attrs.contains(AttrFlags::COMPLETE_IGNORE_FLYIMPORT_METHODS) {
+ return Complete::IgnoreFlyimportMethods;
}
}
- do_not_complete
+ Complete::Yes
}
#[inline]
diff --git a/crates/hir-def/src/macro_expansion_tests/mbe.rs b/crates/hir-def/src/macro_expansion_tests/mbe.rs
index 445caef85f..947a54f888 100644
--- a/crates/hir-def/src/macro_expansion_tests/mbe.rs
+++ b/crates/hir-def/src/macro_expansion_tests/mbe.rs
@@ -300,21 +300,21 @@ fn match_by_first_token_literally() {
check(
r#"
macro_rules! m {
- ($i:ident) => ( mod $i {} );
+ ($i:ident) => ( enum $i {} );
(= $i:ident) => ( fn $i() {} );
(+ $i:ident) => ( struct $i; )
}
-m! { foo }
+m! { Foo }
m! { = bar }
m! { + Baz }
"#,
expect![[r#"
macro_rules! m {
- ($i:ident) => ( mod $i {} );
+ ($i:ident) => ( enum $i {} );
(= $i:ident) => ( fn $i() {} );
(+ $i:ident) => ( struct $i; )
}
-mod foo {}
+enum Foo {}
fn bar() {}
struct Baz;
"#]],
@@ -326,21 +326,21 @@ fn match_by_last_token_literally() {
check(
r#"
macro_rules! m {
- ($i:ident) => ( mod $i {} );
+ ($i:ident) => ( enum $i {} );
($i:ident =) => ( fn $i() {} );
($i:ident +) => ( struct $i; )
}
-m! { foo }
+m! { Foo }
m! { bar = }
m! { Baz + }
"#,
expect![[r#"
macro_rules! m {
- ($i:ident) => ( mod $i {} );
+ ($i:ident) => ( enum $i {} );
($i:ident =) => ( fn $i() {} );
($i:ident +) => ( struct $i; )
}
-mod foo {}
+enum Foo {}
fn bar() {}
struct Baz;
"#]],
@@ -352,21 +352,21 @@ fn match_by_ident() {
check(
r#"
macro_rules! m {
- ($i:ident) => ( mod $i {} );
+ ($i:ident) => ( enum $i {} );
(spam $i:ident) => ( fn $i() {} );
(eggs $i:ident) => ( struct $i; )
}
-m! { foo }
+m! { Foo }
m! { spam bar }
m! { eggs Baz }
"#,
expect![[r#"
macro_rules! m {
- ($i:ident) => ( mod $i {} );
+ ($i:ident) => ( enum $i {} );
(spam $i:ident) => ( fn $i() {} );
(eggs $i:ident) => ( struct $i; )
}
-mod foo {}
+enum Foo {}
fn bar() {}
struct Baz;
"#]],
@@ -378,12 +378,12 @@ fn match_by_separator_token() {
check(
r#"
macro_rules! m {
- ($($i:ident),*) => ($(mod $i {} )*);
+ ($($i:ident),*) => ($(enum $i {} )*);
($($i:ident)#*) => ($(fn $i() {} )*);
($i:ident ,# $ j:ident) => ( struct $i; struct $ j; )
}
-m! { foo, bar }
+m! { Baz, Qux }
m! { foo# bar }
@@ -391,13 +391,13 @@ m! { Foo,# Bar }
"#,
expect![[r#"
macro_rules! m {
- ($($i:ident),*) => ($(mod $i {} )*);
+ ($($i:ident),*) => ($(enum $i {} )*);
($($i:ident)#*) => ($(fn $i() {} )*);
($i:ident ,# $ j:ident) => ( struct $i; struct $ j; )
}
-mod foo {}
-mod bar {}
+enum Baz {}
+enum Qux {}
fn foo() {}
fn bar() {}
@@ -1114,11 +1114,11 @@ fn test_single_item() {
check(
r#"
macro_rules! m { ($i:item) => ( $i ) }
-m! { mod c {} }
+m! { struct C {} }
"#,
expect![[r#"
macro_rules! m { ($i:item) => ( $i ) }
-mod c {}
+struct C {}
"#]],
)
}
@@ -1144,6 +1144,7 @@ m! {
type T = u8;
}
"#,
+ // The modules are counted twice, once because of the module and once because of the macro call.
expect![[r#"
macro_rules! m { ($($i:item)*) => ($($i )*) }
extern crate a;
@@ -1161,7 +1162,9 @@ trait J {}
fn h() {}
extern {}
type T = u8;
-"#]],
+
+mod b;
+mod c {}"#]],
);
}
diff --git a/crates/hir-def/src/macro_expansion_tests/mod.rs b/crates/hir-def/src/macro_expansion_tests/mod.rs
index ffefa8365f..98b3115814 100644
--- a/crates/hir-def/src/macro_expansion_tests/mod.rs
+++ b/crates/hir-def/src/macro_expansion_tests/mod.rs
@@ -245,6 +245,21 @@ pub fn identity_when_valid(_attr: TokenStream, item: TokenStream) -> TokenStream
}
}
+ for (_, module) in def_map.modules() {
+ let Some(src) = module.declaration_source(&db) else {
+ continue;
+ };
+ if let Some(macro_file) = src.file_id.macro_file() {
+ let pp = pretty_print_macro_expansion(
+ src.value.syntax().clone(),
+ db.span_map(macro_file.into()).as_ref(),
+ false,
+ false,
+ );
+ format_to!(expanded_text, "\n{}", pp)
+ }
+ }
+
for impl_id in def_map[local_id].scope.impls() {
let src = impl_id.lookup(&db).source(&db);
if let Some(macro_file) = src.file_id.macro_file()
diff --git a/crates/hir-def/src/macro_expansion_tests/proc_macros.rs b/crates/hir-def/src/macro_expansion_tests/proc_macros.rs
index 6952a9da10..3f0afe61e0 100644
--- a/crates/hir-def/src/macro_expansion_tests/proc_macros.rs
+++ b/crates/hir-def/src/macro_expansion_tests/proc_macros.rs
@@ -9,37 +9,93 @@ use crate::macro_expansion_tests::{check, check_errors};
#[test]
fn attribute_macro_attr_censoring() {
- cov_mark::check!(attribute_macro_attr_censoring);
check(
r#"
//- proc_macros: identity
-#[attr1] #[proc_macros::identity] #[attr2]
+//- minicore: derive
+#[attr1] #[derive()] #[proc_macros::identity] #[attr2]
struct S;
+
+/// Foo
+#[cfg_attr(false, doc = "abc...", attr1)]
+mod foo {
+ #![cfg_attr(true, cfg_attr(true, foo, cfg_attr(false, bar), proc_macros::identity))]
+ #![cfg_attr(true, doc = "123...", attr2)]
+ #![attr3]
+
+ #[cfg_attr(true, cfg(false))]
+ fn foo() {}
+
+ #[cfg(true)]
+ fn bar() {}
+}
"#,
- expect![[r#"
-#[attr1] #[proc_macros::identity] #[attr2]
+ expect![[r##"
+#[attr1] #[derive()] #[proc_macros::identity] #[attr2]
struct S;
+/// Foo
+#[cfg_attr(false, doc = "abc...", attr1)]
+mod foo {
+ #![cfg_attr(true, cfg_attr(true, foo, cfg_attr(false, bar), proc_macros::identity))]
+ #![cfg_attr(true, doc = "123...", attr2)]
+ #![attr3]
+
+ #[cfg_attr(true, cfg(false))]
+ fn foo() {}
+
+ #[cfg(true)]
+ fn bar() {}
+}
+
#[attr1]
-#[attr2] struct S;"#]],
+#[attr2] struct S;
+#[doc = " Foo"] mod foo {
+ # ![foo]
+ # ![doc = "123..."]
+ # ![attr2]
+ # ![attr3]
+ #[cfg_attr(true , cfg(false ))] fn foo() {}
+ #[cfg(true )] fn bar() {}
+}"##]],
);
}
#[test]
fn derive_censoring() {
- cov_mark::check!(derive_censoring);
check(
r#"
//- proc_macros: derive_identity
//- minicore:derive
+use derive as my_cool_derive;
#[attr1]
#[derive(Foo)]
#[derive(proc_macros::DeriveIdentity)]
#[derive(Bar)]
#[attr2]
struct S;
+
+#[my_cool_derive()]
+#[cfg_attr(true, derive(), attr1, derive(proc_macros::DeriveIdentity))]
+#[my_cool_derive()]
+struct Foo {
+ #[cfg_attr(false, cfg(false), attr2)]
+ v1: i32,
+ #[cfg_attr(true, cfg(false), attr2)]
+ v1: i32,
+ #[cfg_attr(true, attr3)]
+ v2: fn(#[cfg(false)] param: i32, #[cfg_attr(true, attr4)] param2: u32),
+ v3: Foo<{
+ #[cfg(false)]
+ let foo = 123;
+ 456
+ }>,
+ #[cfg(false)]
+ v4: bool // No comma here
+}
"#,
expect![[r#"
+use derive as my_cool_derive;
#[attr1]
#[derive(Foo)]
#[derive(proc_macros::DeriveIdentity)]
@@ -47,6 +103,32 @@ struct S;
#[attr2]
struct S;
+#[my_cool_derive()]
+#[cfg_attr(true, derive(), attr1, derive(proc_macros::DeriveIdentity))]
+#[my_cool_derive()]
+struct Foo {
+ #[cfg_attr(false, cfg(false), attr2)]
+ v1: i32,
+ #[cfg_attr(true, cfg(false), attr2)]
+ v1: i32,
+ #[cfg_attr(true, attr3)]
+ v2: fn(#[cfg(false)] param: i32, #[cfg_attr(true, attr4)] param2: u32),
+ v3: Foo<{
+ #[cfg(false)]
+ let foo = 123;
+ 456
+ }>,
+ #[cfg(false)]
+ v4: bool // No comma here
+}
+
+#[attr1]
+#[my_cool_derive()] struct Foo {
+ v1: i32, #[attr3]v2: fn(#[attr4]param2: u32), v3: Foo< {
+ 456
+ }
+ >,
+}
#[attr1]
#[derive(Bar)]
#[attr2] struct S;"#]],
@@ -87,7 +169,7 @@ fn foo() { bar.; blub }
fn foo() { bar.; blub }
fn foo() {
- bar. ;
+ bar.;
blub
}"#]],
);
diff --git a/crates/hir-def/src/nameres.rs b/crates/hir-def/src/nameres.rs
index f910008833..5f45e188e0 100644
--- a/crates/hir-def/src/nameres.rs
+++ b/crates/hir-def/src/nameres.rs
@@ -391,19 +391,14 @@ pub(crate) fn crate_local_def_map(db: &dyn DefDatabase, crate_id: Crate) -> DefM
)
.entered();
- let module_data = ModuleData::new(
- ModuleOrigin::CrateRoot { definition: krate.root_file_id(db) },
- Visibility::Public,
- );
+ let root_file_id = crate_id.root_file_id(db);
+ let module_data =
+ ModuleData::new(ModuleOrigin::CrateRoot { definition: root_file_id }, Visibility::Public);
let def_map =
DefMap::empty(crate_id, Arc::new(DefMapCrateData::new(krate.edition)), module_data, None);
- let (def_map, local_def_map) = collector::collect_defs(
- db,
- def_map,
- TreeId::new(krate.root_file_id(db).into(), None),
- None,
- );
+ let (def_map, local_def_map) =
+ collector::collect_defs(db, def_map, TreeId::new(root_file_id.into(), None), None);
DefMapPair::new(db, def_map, local_def_map)
}
diff --git a/crates/hir-def/src/nameres/assoc.rs b/crates/hir-def/src/nameres/assoc.rs
index 8d2a386de8..b67853347b 100644
--- a/crates/hir-def/src/nameres/assoc.rs
+++ b/crates/hir-def/src/nameres/assoc.rs
@@ -4,7 +4,8 @@ use std::mem;
use cfg::CfgOptions;
use hir_expand::{
- AstId, ExpandTo, HirFileId, InFile, Intern, Lookup, MacroCallKind, MacroDefKind,
+ AstId, AttrMacroAttrIds, ExpandTo, HirFileId, InFile, Intern, Lookup, MacroCallKind,
+ MacroDefKind,
mod_path::ModPath,
name::{AsName, Name},
span_map::SpanMap,
@@ -21,8 +22,8 @@ use triomphe::Arc;
use crate::{
AssocItemId, AstIdWithPath, ConstLoc, FunctionId, FunctionLoc, ImplId, ItemContainerId,
ItemLoc, MacroCallId, ModuleId, TraitId, TypeAliasId, TypeAliasLoc,
- attr::Attrs,
db::DefDatabase,
+ item_tree::AttrsOrCfg,
macro_call_as_call_id,
nameres::{
DefMap, LocalDefMap, MacroSubNs,
@@ -191,19 +192,22 @@ impl<'a> AssocItemCollector<'a> {
fn collect_item(&mut self, item: ast::AssocItem) {
let ast_id = self.ast_id_map.ast_id(&item);
- let attrs = Attrs::new(self.db, &item, self.span_map.as_ref(), self.cfg_options);
- if let Err(cfg) = attrs.is_cfg_enabled(self.cfg_options) {
- self.diagnostics.push(DefDiagnostic::unconfigured_code(
- self.module_id.local_id,
- InFile::new(self.file_id, ast_id.erase()),
- cfg,
- self.cfg_options.clone(),
- ));
- return;
- }
+ let attrs =
+ match AttrsOrCfg::lower(self.db, &item, &|| self.cfg_options, self.span_map.as_ref()) {
+ AttrsOrCfg::Enabled { attrs } => attrs,
+ AttrsOrCfg::CfgDisabled(cfg) => {
+ self.diagnostics.push(DefDiagnostic::unconfigured_code(
+ self.module_id.local_id,
+ InFile::new(self.file_id, ast_id.erase()),
+ cfg.0,
+ self.cfg_options.clone(),
+ ));
+ return;
+ }
+ };
let ast_id = InFile::new(self.file_id, ast_id.upcast());
- 'attrs: for attr in &*attrs {
+ 'attrs: for (attr_id, attr) in attrs.as_ref().iter() {
let ast_id_with_path = AstIdWithPath { path: attr.path.clone(), ast_id };
match self.def_map.resolve_attr_macro(
@@ -212,6 +216,7 @@ impl<'a> AssocItemCollector<'a> {
self.module_id.local_id,
ast_id_with_path,
attr,
+ attr_id,
) {
Ok(ResolvedAttr::Macro(call_id)) => {
let loc = self.db.lookup_intern_macro_call(call_id);
@@ -240,8 +245,12 @@ impl<'a> AssocItemCollector<'a> {
Err(_) => {
self.diagnostics.push(DefDiagnostic::unresolved_macro_call(
self.module_id.local_id,
- MacroCallKind::Attr { ast_id, attr_args: None, invoc_attr_index: attr.id },
- attr.path().clone(),
+ MacroCallKind::Attr {
+ ast_id,
+ attr_args: None,
+ censored_attr_ids: AttrMacroAttrIds::from_one(attr_id),
+ },
+ (*attr.path).clone(),
));
}
}
diff --git a/crates/hir-def/src/nameres/attr_resolution.rs b/crates/hir-def/src/nameres/attr_resolution.rs
index 2f56d608fc..fb755026c3 100644
--- a/crates/hir-def/src/nameres/attr_resolution.rs
+++ b/crates/hir-def/src/nameres/attr_resolution.rs
@@ -2,7 +2,7 @@
use base_db::Crate;
use hir_expand::{
- MacroCallId, MacroCallKind, MacroDefId,
+ AttrMacroAttrIds, MacroCallId, MacroCallKind, MacroDefId,
attrs::{Attr, AttrId, AttrInput},
inert_attr_macro::find_builtin_attr_idx,
mod_path::{ModPath, PathKind},
@@ -28,6 +28,7 @@ pub enum ResolvedAttr {
}
impl DefMap {
+ /// This cannot be used to resolve items that allow derives.
pub(crate) fn resolve_attr_macro(
&self,
local_def_map: &LocalDefMap,
@@ -35,6 +36,7 @@ impl DefMap {
original_module: LocalModuleId,
ast_id: AstIdWithPath<ast::Item>,
attr: &Attr,
+ attr_id: AttrId,
) -> Result<ResolvedAttr, UnresolvedMacro> {
// NB: does not currently work for derive helpers as they aren't recorded in the `DefMap`
@@ -68,6 +70,9 @@ impl DefMap {
db,
&ast_id,
attr,
+ // There aren't any active attributes before this one, because attribute macros
+ // replace their input, and derive macros are not allowed in this function.
+ AttrMacroAttrIds::from_one(attr_id),
self.krate,
db.macro_def(def),
)))
@@ -102,6 +107,7 @@ pub(super) fn attr_macro_as_call_id(
db: &dyn DefDatabase,
item_attr: &AstIdWithPath<ast::Item>,
macro_attr: &Attr,
+ censored_attr_ids: AttrMacroAttrIds,
krate: Crate,
def: MacroDefId,
) -> MacroCallId {
@@ -121,7 +127,7 @@ pub(super) fn attr_macro_as_call_id(
MacroCallKind::Attr {
ast_id: item_attr.ast_id,
attr_args: arg.map(Arc::new),
- invoc_attr_index: macro_attr.id,
+ censored_attr_ids,
},
macro_attr.ctxt,
)
diff --git a/crates/hir-def/src/nameres/collector.rs b/crates/hir-def/src/nameres/collector.rs
index a030ed1e0d..9ac8a43999 100644
--- a/crates/hir-def/src/nameres/collector.rs
+++ b/crates/hir-def/src/nameres/collector.rs
@@ -3,14 +3,14 @@
//! `DefCollector::collect` contains the fixed-point iteration loop which
//! resolves imports and expands macros.
-use std::{cmp::Ordering, iter, mem, ops::Not};
+use std::{cmp::Ordering, iter, mem};
use base_db::{BuiltDependency, Crate, CrateOrigin, LangCrateOrigin};
use cfg::{CfgAtom, CfgExpr, CfgOptions};
use either::Either;
use hir_expand::{
- EditionedFileId, ErasedAstId, ExpandTo, HirFileId, InFile, MacroCallId, MacroCallKind,
- MacroDefId, MacroDefKind,
+ AttrMacroAttrIds, EditionedFileId, ErasedAstId, ExpandTo, HirFileId, InFile, MacroCallId,
+ MacroCallKind, MacroDefId, MacroDefKind,
attrs::{Attr, AttrId},
builtin::{find_builtin_attr, find_builtin_derive, find_builtin_macro},
mod_path::{ModPath, PathKind},
@@ -18,9 +18,10 @@ use hir_expand::{
proc_macro::CustomProcMacroExpander,
};
use intern::{Interned, sym};
-use itertools::{Itertools, izip};
+use itertools::izip;
use la_arena::Idx;
use rustc_hash::{FxHashMap, FxHashSet};
+use smallvec::SmallVec;
use span::{Edition, FileAstId, SyntaxContext};
use syntax::ast;
use triomphe::Arc;
@@ -32,12 +33,11 @@ use crate::{
MacroRulesId, MacroRulesLoc, MacroRulesLocFlags, ModuleDefId, ModuleId, ProcMacroId,
ProcMacroLoc, StaticLoc, StructLoc, TraitLoc, TypeAliasLoc, UnionLoc, UnresolvedMacro, UseId,
UseLoc,
- attr::Attrs,
db::DefDatabase,
item_scope::{GlobId, ImportId, ImportOrExternCrate, PerNsGlobImports},
item_tree::{
- self, FieldsShape, ImportAlias, ImportKind, ItemTree, ItemTreeAstId, Macro2, MacroCall,
- MacroRules, Mod, ModItemId, ModKind, TreeId,
+ self, Attrs, AttrsOrCfg, FieldsShape, ImportAlias, ImportKind, ItemTree, ItemTreeAstId,
+ Macro2, MacroCall, MacroRules, Mod, ModItemId, ModKind, TreeId,
},
macro_call_as_call_id,
nameres::{
@@ -102,6 +102,7 @@ pub(super) fn collect_defs(
proc_macros,
from_glob_import: Default::default(),
skip_attrs: Default::default(),
+ prev_active_attrs: Default::default(),
unresolved_extern_crates: Default::default(),
is_proc_macro: krate.is_proc_macro,
};
@@ -206,6 +207,7 @@ enum MacroDirectiveKind<'db> {
},
Attr {
ast_id: AstIdWithPath<ast::Item>,
+ attr_id: AttrId,
attr: Attr,
mod_item: ModItemId,
/* is this needed? */ tree: TreeId,
@@ -246,28 +248,27 @@ struct DefCollector<'db> {
/// This also stores the attributes to skip when we resolve derive helpers and non-macro
/// non-builtin attributes in general.
// FIXME: There has to be a better way to do this
- skip_attrs: FxHashMap<InFile<FileAstId<ast::Item>>, AttrId>,
+ skip_attrs: FxHashMap<AstId<ast::Item>, AttrId>,
+ /// When we expand attributes, we need to censor all previous active attributes
+ /// on the same item. Therefore, this holds all active attributes that we already
+ /// expanded.
+ prev_active_attrs: FxHashMap<AstId<ast::Item>, SmallVec<[AttrId; 1]>>,
}
impl<'db> DefCollector<'db> {
fn seed_with_top_level(&mut self) {
let _p = tracing::info_span!("seed_with_top_level").entered();
- let file_id = self.def_map.krate.data(self.db).root_file_id(self.db);
+ let file_id = self.def_map.krate.root_file_id(self.db);
let item_tree = self.db.file_item_tree(file_id.into());
- let attrs = item_tree.top_level_attrs(self.db, self.def_map.krate);
+ let attrs = match item_tree.top_level_attrs() {
+ AttrsOrCfg::Enabled { attrs } => attrs.as_ref(),
+ AttrsOrCfg::CfgDisabled(it) => it.1.as_ref(),
+ };
let crate_data = Arc::get_mut(&mut self.def_map.data).unwrap();
- let mut process = true;
-
// Process other crate-level attributes.
for attr in &*attrs {
- if let Some(cfg) = attr.cfg()
- && self.cfg_options.check(&cfg) == Some(false)
- {
- process = false;
- break;
- }
let Some(attr_name) = attr.path.as_ident() else { continue };
match () {
@@ -291,7 +292,7 @@ impl<'db> DefCollector<'db> {
() if *attr_name == sym::feature => {
let features =
attr.parse_path_comma_token_tree(self.db).into_iter().flatten().filter_map(
- |(feat, _)| match feat.segments() {
+ |(feat, _, _)| match feat.segments() {
[name] => Some(name.symbol().clone()),
_ => None,
},
@@ -344,7 +345,7 @@ impl<'db> DefCollector<'db> {
self.inject_prelude();
- if !process {
+ if matches!(item_tree.top_level_attrs(), AttrsOrCfg::CfgDisabled(_)) {
return;
}
@@ -362,10 +363,7 @@ impl<'db> DefCollector<'db> {
fn seed_with_inner(&mut self, tree_id: TreeId) {
let item_tree = tree_id.item_tree(self.db);
- let is_cfg_enabled = item_tree
- .top_level_attrs(self.db, self.def_map.krate)
- .cfg()
- .is_none_or(|cfg| self.cfg_options.check(&cfg) != Some(false));
+ let is_cfg_enabled = matches!(item_tree.top_level_attrs(), AttrsOrCfg::Enabled { .. });
if is_cfg_enabled {
self.inject_prelude();
@@ -456,18 +454,18 @@ impl<'db> DefCollector<'db> {
self.unresolved_macros.iter().enumerate().find_map(|(idx, directive)| match &directive
.kind
{
- MacroDirectiveKind::Attr { ast_id, mod_item, attr, tree, item_tree } => {
+ MacroDirectiveKind::Attr { ast_id, mod_item, attr_id, attr, tree, item_tree } => {
self.def_map.diagnostics.push(DefDiagnostic::unresolved_macro_call(
directive.module_id,
MacroCallKind::Attr {
ast_id: ast_id.ast_id,
attr_args: None,
- invoc_attr_index: attr.id,
+ censored_attr_ids: AttrMacroAttrIds::from_one(*attr_id),
},
- attr.path().clone(),
+ (*attr.path).clone(),
));
- self.skip_attrs.insert(ast_id.ast_id.with_value(mod_item.ast_id()), attr.id);
+ self.skip_attrs.insert(ast_id.ast_id.with_value(mod_item.ast_id()), *attr_id);
Some((idx, directive, *mod_item, *tree, *item_tree))
}
@@ -1350,6 +1348,7 @@ impl<'db> DefCollector<'db> {
MacroDirectiveKind::Attr {
ast_id: file_ast_id,
mod_item,
+ attr_id,
attr,
tree,
item_tree,
@@ -1362,7 +1361,7 @@ impl<'db> DefCollector<'db> {
let mod_dir = collector.mod_dirs[&directive.module_id].clone();
collector
.skip_attrs
- .insert(InFile::new(file_id, mod_item.ast_id()), attr.id);
+ .insert(InFile::new(file_id, mod_item.ast_id()), *attr_id);
ModCollector {
def_collector: collector,
@@ -1398,7 +1397,6 @@ impl<'db> DefCollector<'db> {
// being cfg'ed out).
// Ideally we will just expand them to nothing here. But we are only collecting macro calls,
// not expanding them, so we have no way to do that.
- // If you add an ignored attribute here, also add it to `Semantics::might_be_inside_macro_call()`.
if matches!(
def.kind,
MacroDefKind::BuiltInAttr(_, expander)
@@ -1410,8 +1408,18 @@ impl<'db> DefCollector<'db> {
}
}
- let call_id = || {
- attr_macro_as_call_id(self.db, file_ast_id, attr, self.def_map.krate, def)
+ let mut call_id = || {
+ let active_attrs = self.prev_active_attrs.entry(ast_id).or_default();
+ active_attrs.push(*attr_id);
+
+ attr_macro_as_call_id(
+ self.db,
+ file_ast_id,
+ attr,
+ AttrMacroAttrIds::from_many(active_attrs),
+ self.def_map.krate,
+ def,
+ )
};
if matches!(def,
MacroDefId { kind: MacroDefKind::BuiltInAttr(_, exp), .. }
@@ -1429,7 +1437,7 @@ impl<'db> DefCollector<'db> {
let diag = DefDiagnostic::invalid_derive_target(
directive.module_id,
ast_id,
- attr.id,
+ *attr_id,
);
self.def_map.diagnostics.push(diag);
return recollect_without(self);
@@ -1442,7 +1450,7 @@ impl<'db> DefCollector<'db> {
Some(derive_macros) => {
let call_id = call_id();
let mut len = 0;
- for (idx, (path, call_site)) in derive_macros.enumerate() {
+ for (idx, (path, call_site, _)) in derive_macros.enumerate() {
let ast_id = AstIdWithPath::new(
file_id,
ast_id.value,
@@ -1453,7 +1461,7 @@ impl<'db> DefCollector<'db> {
depth: directive.depth + 1,
kind: MacroDirectiveKind::Derive {
ast_id,
- derive_attr: attr.id,
+ derive_attr: *attr_id,
derive_pos: idx,
ctxt: call_site.ctx,
derive_macro_id: call_id,
@@ -1469,13 +1477,13 @@ impl<'db> DefCollector<'db> {
// Check the comment in [`builtin_attr_macro`].
self.def_map.modules[directive.module_id]
.scope
- .init_derive_attribute(ast_id, attr.id, call_id, len + 1);
+ .init_derive_attribute(ast_id, *attr_id, call_id, len + 1);
}
None => {
let diag = DefDiagnostic::malformed_derive(
directive.module_id,
ast_id,
- attr.id,
+ *attr_id,
);
self.def_map.diagnostics.push(diag);
}
@@ -1712,16 +1720,17 @@ impl ModCollector<'_, '_> {
};
let mut process_mod_item = |item: ModItemId| {
- let attrs = self.item_tree.attrs(db, krate, item.ast_id());
- if let Some(cfg) = attrs.cfg()
- && !self.is_cfg_enabled(&cfg)
- {
- let ast_id = item.ast_id().erase();
- self.emit_unconfigured_diagnostic(InFile::new(self.file_id(), ast_id), &cfg);
- return;
- }
+ let attrs = match self.item_tree.attrs(item.ast_id()) {
+ Some(AttrsOrCfg::Enabled { attrs }) => attrs.as_ref(),
+ None => Attrs::EMPTY,
+ Some(AttrsOrCfg::CfgDisabled(cfg)) => {
+ let ast_id = item.ast_id().erase();
+ self.emit_unconfigured_diagnostic(InFile::new(self.file_id(), ast_id), &cfg.0);
+ return;
+ }
+ };
- if let Err(()) = self.resolve_attributes(&attrs, item, container) {
+ if let Err(()) = self.resolve_attributes(attrs, item, container) {
// Do not process the item. It has at least one non-builtin attribute, so the
// fixed-point algorithm is required to resolve the rest of them.
return;
@@ -1733,7 +1742,7 @@ impl ModCollector<'_, '_> {
self.def_collector.crate_local_def_map.unwrap_or(&self.def_collector.local_def_map);
match item {
- ModItemId::Mod(m) => self.collect_module(m, &attrs),
+ ModItemId::Mod(m) => self.collect_module(m, attrs),
ModItemId::Use(item_tree_id) => {
let id =
UseLoc { container: module, id: InFile::new(self.file_id(), item_tree_id) }
@@ -2006,7 +2015,7 @@ impl ModCollector<'_, '_> {
);
return;
};
- for (path, _) in paths {
+ for (path, _, _) in paths {
if let Some(name) = path.as_ident() {
single_imports.push(name.clone());
}
@@ -2020,7 +2029,7 @@ impl ModCollector<'_, '_> {
);
}
- fn collect_module(&mut self, module_ast_id: ItemTreeAstId<Mod>, attrs: &Attrs) {
+ fn collect_module(&mut self, module_ast_id: ItemTreeAstId<Mod>, attrs: Attrs<'_>) {
let path_attr = attrs.by_key(sym::path).string_value_unescape();
let is_macro_use = attrs.by_key(sym::macro_use).exists();
let module = &self.item_tree[module_ast_id];
@@ -2061,23 +2070,18 @@ impl ModCollector<'_, '_> {
self.file_id(),
&module.name,
path_attr.as_deref(),
+ self.def_collector.def_map.krate,
) {
Ok((file_id, is_mod_rs, mod_dir)) => {
let item_tree = db.file_item_tree(file_id.into());
- let krate = self.def_collector.def_map.krate;
- let is_enabled = item_tree
- .top_level_attrs(db, krate)
- .cfg()
- .and_then(|cfg| self.is_cfg_enabled(&cfg).not().then_some(cfg))
- .map_or(Ok(()), Err);
- match is_enabled {
- Err(cfg) => {
+ match item_tree.top_level_attrs() {
+ AttrsOrCfg::CfgDisabled(cfg) => {
self.emit_unconfigured_diagnostic(
InFile::new(self.file_id(), module_ast_id.erase()),
- &cfg,
+ &cfg.0,
);
}
- Ok(()) => {
+ AttrsOrCfg::Enabled { attrs } => {
let module_id = self.push_child_module(
module.name.clone(),
ast_id.value,
@@ -2093,11 +2097,8 @@ impl ModCollector<'_, '_> {
mod_dir,
}
.collect_in_top_module(item_tree.top_level_items());
- let is_macro_use = is_macro_use
- || item_tree
- .top_level_attrs(db, krate)
- .by_key(sym::macro_use)
- .exists();
+ let is_macro_use =
+ is_macro_use || attrs.as_ref().by_key(sym::macro_use).exists();
if is_macro_use {
self.import_all_legacy_macros(module_id);
}
@@ -2185,36 +2186,16 @@ impl ModCollector<'_, '_> {
/// assumed to be resolved already.
fn resolve_attributes(
&mut self,
- attrs: &Attrs,
+ attrs: Attrs<'_>,
mod_item: ModItemId,
container: ItemContainerId,
) -> Result<(), ()> {
- let mut ignore_up_to = self
+ let ignore_up_to = self
.def_collector
.skip_attrs
.get(&InFile::new(self.file_id(), mod_item.ast_id()))
.copied();
- let iter = attrs
- .iter()
- .dedup_by(|a, b| {
- // FIXME: this should not be required, all attributes on an item should have a
- // unique ID!
- // Still, this occurs because `#[cfg_attr]` can "expand" to multiple attributes:
- // #[cfg_attr(not(off), unresolved, unresolved)]
- // struct S;
- // We should come up with a different way to ID attributes.
- a.id == b.id
- })
- .skip_while(|attr| match ignore_up_to {
- Some(id) if attr.id == id => {
- ignore_up_to = None;
- true
- }
- Some(_) => true,
- None => false,
- });
-
- for attr in iter {
+ for (attr_id, attr) in attrs.iter_after(ignore_up_to) {
if self.def_collector.def_map.is_builtin_or_registered_attr(&attr.path) {
continue;
}
@@ -2229,6 +2210,7 @@ impl ModCollector<'_, '_> {
depth: self.macro_depth + 1,
kind: MacroDirectiveKind::Attr {
ast_id,
+ attr_id,
attr: attr.clone(),
mod_item,
tree: self.tree_id,
@@ -2246,7 +2228,13 @@ impl ModCollector<'_, '_> {
fn collect_macro_rules(&mut self, ast_id: ItemTreeAstId<MacroRules>, module: ModuleId) {
let krate = self.def_collector.def_map.krate;
let mac = &self.item_tree[ast_id];
- let attrs = self.item_tree.attrs(self.def_collector.db, krate, ast_id.upcast());
+ let attrs = match self.item_tree.attrs(ast_id.upcast()) {
+ Some(AttrsOrCfg::Enabled { attrs }) => attrs.as_ref(),
+ None => Attrs::EMPTY,
+ Some(AttrsOrCfg::CfgDisabled(_)) => {
+ unreachable!("we only get here if the macro is not cfg'ed out")
+ }
+ };
let f_ast_id = InFile::new(self.file_id(), ast_id.upcast());
let export_attr = || attrs.by_key(sym::macro_export);
@@ -2331,7 +2319,13 @@ impl ModCollector<'_, '_> {
fn collect_macro_def(&mut self, ast_id: ItemTreeAstId<Macro2>, module: ModuleId) {
let krate = self.def_collector.def_map.krate;
let mac = &self.item_tree[ast_id];
- let attrs = self.item_tree.attrs(self.def_collector.db, krate, ast_id.upcast());
+ let attrs = match self.item_tree.attrs(ast_id.upcast()) {
+ Some(AttrsOrCfg::Enabled { attrs }) => attrs.as_ref(),
+ None => Attrs::EMPTY,
+ Some(AttrsOrCfg::CfgDisabled(_)) => {
+ unreachable!("we only get here if the macro is not cfg'ed out")
+ }
+ };
let f_ast_id = InFile::new(self.file_id(), ast_id.upcast());
// Case 1: builtin macros
@@ -2515,10 +2509,6 @@ impl ModCollector<'_, '_> {
Some((a, b))
}
- fn is_cfg_enabled(&self, cfg: &CfgExpr) -> bool {
- self.def_collector.cfg_options.check(cfg) != Some(false)
- }
-
fn emit_unconfigured_diagnostic(&mut self, ast_id: ErasedAstId, cfg: &CfgExpr) {
self.def_collector.def_map.diagnostics.push(DefDiagnostic::unconfigured_code(
self.module_id,
@@ -2558,6 +2548,7 @@ mod tests {
proc_macros: Default::default(),
from_glob_import: Default::default(),
skip_attrs: Default::default(),
+ prev_active_attrs: Default::default(),
is_proc_macro: false,
unresolved_extern_crates: Default::default(),
};
diff --git a/crates/hir-def/src/nameres/diagnostics.rs b/crates/hir-def/src/nameres/diagnostics.rs
index c495a07449..6a07c56aee 100644
--- a/crates/hir-def/src/nameres/diagnostics.rs
+++ b/crates/hir-def/src/nameres/diagnostics.rs
@@ -17,8 +17,8 @@ pub enum DefDiagnosticKind {
UnconfiguredCode { ast_id: ErasedAstId, cfg: CfgExpr, opts: CfgOptions },
UnresolvedMacroCall { ast: MacroCallKind, path: ModPath },
UnimplementedBuiltinMacro { ast: AstId<ast::Macro> },
- InvalidDeriveTarget { ast: AstId<ast::Item>, id: usize },
- MalformedDerive { ast: AstId<ast::Adt>, id: usize },
+ InvalidDeriveTarget { ast: AstId<ast::Item>, id: AttrId },
+ MalformedDerive { ast: AstId<ast::Adt>, id: AttrId },
MacroDefError { ast: AstId<ast::Macro>, message: String },
MacroError { ast: AstId<ast::Item>, path: ModPath, err: ExpandErrorKind },
}
@@ -119,10 +119,7 @@ impl DefDiagnostic {
ast: AstId<ast::Item>,
id: AttrId,
) -> Self {
- Self {
- in_module: container,
- kind: DefDiagnosticKind::InvalidDeriveTarget { ast, id: id.ast_index() },
- }
+ Self { in_module: container, kind: DefDiagnosticKind::InvalidDeriveTarget { ast, id } }
}
pub(super) fn malformed_derive(
@@ -130,9 +127,6 @@ impl DefDiagnostic {
ast: AstId<ast::Adt>,
id: AttrId,
) -> Self {
- Self {
- in_module: container,
- kind: DefDiagnosticKind::MalformedDerive { ast, id: id.ast_index() },
- }
+ Self { in_module: container, kind: DefDiagnosticKind::MalformedDerive { ast, id } }
}
}
diff --git a/crates/hir-def/src/nameres/mod_resolution.rs b/crates/hir-def/src/nameres/mod_resolution.rs
index 0c50f13edf..140b77ac00 100644
--- a/crates/hir-def/src/nameres/mod_resolution.rs
+++ b/crates/hir-def/src/nameres/mod_resolution.rs
@@ -1,6 +1,6 @@
//! This module resolves `mod foo;` declaration to file.
use arrayvec::ArrayVec;
-use base_db::AnchoredPath;
+use base_db::{AnchoredPath, Crate};
use hir_expand::{EditionedFileId, name::Name};
use crate::{HirFileId, db::DefDatabase};
@@ -62,6 +62,7 @@ impl ModDir {
file_id: HirFileId,
name: &Name,
attr_path: Option<&str>,
+ krate: Crate,
) -> Result<(EditionedFileId, bool, ModDir), Box<[String]>> {
let name = name.as_str();
@@ -91,7 +92,7 @@ impl ModDir {
if let Some(mod_dir) = self.child(dir_path, !root_dir_owner) {
return Ok((
// FIXME: Edition, is this rightr?
- EditionedFileId::new(db, file_id, orig_file_id.edition(db)),
+ EditionedFileId::new(db, file_id, orig_file_id.edition(db), krate),
is_mod_rs,
mod_dir,
));
diff --git a/crates/hir-def/src/nameres/proc_macro.rs b/crates/hir-def/src/nameres/proc_macro.rs
index cd8882183b..cd45afe57d 100644
--- a/crates/hir-def/src/nameres/proc_macro.rs
+++ b/crates/hir-def/src/nameres/proc_macro.rs
@@ -3,8 +3,10 @@
use hir_expand::name::{AsName, Name};
use intern::sym;
-use crate::attr::Attrs;
-use crate::tt::{Leaf, TokenTree, TopSubtree, TtElement};
+use crate::{
+ item_tree::Attrs,
+ tt::{Leaf, TokenTree, TopSubtree, TtElement},
+};
#[derive(Debug, PartialEq, Eq)]
pub struct ProcMacroDef {
@@ -29,8 +31,8 @@ impl ProcMacroKind {
}
}
-impl Attrs {
- pub fn parse_proc_macro_decl(&self, func_name: &Name) -> Option<ProcMacroDef> {
+impl Attrs<'_> {
+ pub(crate) fn parse_proc_macro_decl(&self, func_name: &Name) -> Option<ProcMacroDef> {
if self.is_proc_macro() {
Some(ProcMacroDef { name: func_name.clone(), kind: ProcMacroKind::Bang })
} else if self.is_proc_macro_attribute() {
@@ -51,15 +53,10 @@ impl Attrs {
}
}
- pub fn parse_proc_macro_derive(&self) -> Option<(Name, Box<[Name]>)> {
+ pub(crate) fn parse_proc_macro_derive(&self) -> Option<(Name, Box<[Name]>)> {
let derive = self.by_key(sym::proc_macro_derive).tt_values().next()?;
parse_macro_name_and_helper_attrs(derive)
}
-
- pub fn parse_rustc_builtin_macro(&self) -> Option<(Name, Box<[Name]>)> {
- let derive = self.by_key(sym::rustc_builtin_macro).tt_values().next()?;
- parse_macro_name_and_helper_attrs(derive)
- }
}
// This fn is intended for `#[proc_macro_derive(..)]` and `#[rustc_builtin_macro(..)]`, which have
@@ -84,14 +81,11 @@ pub(crate) fn parse_macro_name_and_helper_attrs(tt: &TopSubtree) -> Option<(Name
let helpers = tt::TokenTreesView::new(&tt.token_trees().flat_tokens()[3..]).try_into_subtree()?;
let helpers = helpers
.iter()
- .filter(
- |tt| !matches!(tt, TtElement::Leaf(Leaf::Punct(comma)) if comma.char == ','),
- )
- .map(|tt| match tt {
+ .filter_map(|tt| match tt {
TtElement::Leaf(Leaf::Ident(helper)) => Some(helper.as_name()),
_ => None,
})
- .collect::<Option<Box<[_]>>>()?;
+ .collect::<Box<[_]>>();
Some((trait_name.as_name(), helpers))
}
diff --git a/crates/hir-def/src/signatures.rs b/crates/hir-def/src/signatures.rs
index 405bb44559..e8ccf56059 100644
--- a/crates/hir-def/src/signatures.rs
+++ b/crates/hir-def/src/signatures.rs
@@ -21,7 +21,7 @@ use triomphe::Arc;
use crate::{
ConstId, EnumId, EnumVariantId, EnumVariantLoc, ExternBlockId, FunctionId, HasModule, ImplId,
ItemContainerId, ModuleId, StaticId, StructId, TraitId, TypeAliasId, UnionId, VariantId,
- attr::Attrs,
+ attrs::AttrFlags,
db::DefDatabase,
expr_store::{
ExpressionStore, ExpressionStoreSourceMap,
@@ -47,12 +47,13 @@ pub struct StructSignature {
pub store: Arc<ExpressionStore>,
pub flags: StructFlags,
pub shape: FieldsShape,
- pub repr: Option<ReprOptions>,
}
bitflags! {
#[derive(Debug, Copy, Clone, PartialEq, Eq)]
pub struct StructFlags: u8 {
+ /// Indicates whether this struct has `#[repr]`.
+ const HAS_REPR = 1 << 0;
/// Indicates whether the struct has a `#[rustc_has_incoherent_inherent_impls]` attribute.
const RUSTC_HAS_INCOHERENT_INHERENT_IMPLS = 1 << 1;
/// Indicates whether the struct has a `#[fundamental]` attribute.
@@ -74,26 +75,28 @@ impl StructSignature {
pub fn query(db: &dyn DefDatabase, id: StructId) -> (Arc<Self>, Arc<ExpressionStoreSourceMap>) {
let loc = id.lookup(db);
let InFile { file_id, value: source } = loc.source(db);
- let attrs = db.attrs(id.into());
+ let attrs = AttrFlags::query(db, id.into());
let mut flags = StructFlags::empty();
- if attrs.by_key(sym::rustc_has_incoherent_inherent_impls).exists() {
+ if attrs.contains(AttrFlags::RUSTC_HAS_INCOHERENT_INHERENT_IMPLS) {
flags |= StructFlags::RUSTC_HAS_INCOHERENT_INHERENT_IMPLS;
}
- if attrs.by_key(sym::fundamental).exists() {
+ if attrs.contains(AttrFlags::FUNDAMENTAL) {
flags |= StructFlags::FUNDAMENTAL;
}
- if let Some(lang) = attrs.lang_item() {
+ if attrs.contains(AttrFlags::HAS_REPR) {
+ flags |= StructFlags::HAS_REPR;
+ }
+ if let Some(lang) = attrs.lang_item_with_attrs(db, id.into()) {
match lang {
- _ if *lang == sym::phantom_data => flags |= StructFlags::IS_PHANTOM_DATA,
- _ if *lang == sym::owned_box => flags |= StructFlags::IS_BOX,
- _ if *lang == sym::manually_drop => flags |= StructFlags::IS_MANUALLY_DROP,
- _ if *lang == sym::unsafe_cell => flags |= StructFlags::IS_UNSAFE_CELL,
- _ if *lang == sym::unsafe_pinned => flags |= StructFlags::IS_UNSAFE_PINNED,
+ _ if lang == sym::phantom_data => flags |= StructFlags::IS_PHANTOM_DATA,
+ _ if lang == sym::owned_box => flags |= StructFlags::IS_BOX,
+ _ if lang == sym::manually_drop => flags |= StructFlags::IS_MANUALLY_DROP,
+ _ if lang == sym::unsafe_cell => flags |= StructFlags::IS_UNSAFE_CELL,
+ _ if lang == sym::unsafe_pinned => flags |= StructFlags::IS_UNSAFE_PINNED,
_ => (),
}
}
- let repr = attrs.repr();
let shape = adt_shape(source.kind());
let (store, generic_params, source_map) = lower_generic_params(
@@ -111,11 +114,19 @@ impl StructSignature {
flags,
shape,
name: as_name_opt(source.name()),
- repr,
}),
Arc::new(source_map),
)
}
+
+ #[inline]
+ pub fn repr(&self, db: &dyn DefDatabase, id: StructId) -> Option<ReprOptions> {
+ if self.flags.contains(StructFlags::HAS_REPR) {
+ AttrFlags::repr(db, id.into())
+ } else {
+ None
+ }
+ }
}
#[inline]
@@ -133,22 +144,22 @@ pub struct UnionSignature {
pub generic_params: Arc<GenericParams>,
pub store: Arc<ExpressionStore>,
pub flags: StructFlags,
- pub repr: Option<ReprOptions>,
}
impl UnionSignature {
pub fn query(db: &dyn DefDatabase, id: UnionId) -> (Arc<Self>, Arc<ExpressionStoreSourceMap>) {
let loc = id.lookup(db);
- let attrs = db.attrs(id.into());
+ let attrs = AttrFlags::query(db, id.into());
let mut flags = StructFlags::empty();
- if attrs.by_key(sym::rustc_has_incoherent_inherent_impls).exists() {
+ if attrs.contains(AttrFlags::RUSTC_HAS_INCOHERENT_INHERENT_IMPLS) {
flags |= StructFlags::RUSTC_HAS_INCOHERENT_INHERENT_IMPLS;
}
- if attrs.by_key(sym::fundamental).exists() {
+ if attrs.contains(AttrFlags::FUNDAMENTAL) {
flags |= StructFlags::FUNDAMENTAL;
}
-
- let repr = attrs.repr();
+ if attrs.contains(AttrFlags::HAS_REPR) {
+ flags |= StructFlags::HAS_REPR;
+ }
let InFile { file_id, value: source } = loc.source(db);
let (store, generic_params, source_map) = lower_generic_params(
@@ -164,7 +175,6 @@ impl UnionSignature {
generic_params,
store,
flags,
- repr,
name: as_name_opt(source.name()),
}),
Arc::new(source_map),
@@ -185,20 +195,17 @@ pub struct EnumSignature {
pub generic_params: Arc<GenericParams>,
pub store: Arc<ExpressionStore>,
pub flags: EnumFlags,
- pub repr: Option<ReprOptions>,
}
impl EnumSignature {
pub fn query(db: &dyn DefDatabase, id: EnumId) -> (Arc<Self>, Arc<ExpressionStoreSourceMap>) {
let loc = id.lookup(db);
- let attrs = db.attrs(id.into());
+ let attrs = AttrFlags::query(db, id.into());
let mut flags = EnumFlags::empty();
- if attrs.by_key(sym::rustc_has_incoherent_inherent_impls).exists() {
+ if attrs.contains(AttrFlags::RUSTC_HAS_INCOHERENT_INHERENT_IMPLS) {
flags |= EnumFlags::RUSTC_HAS_INCOHERENT_INHERENT_IMPLS;
}
- let repr = attrs.repr();
-
let InFile { file_id, value: source } = loc.source(db);
let (store, generic_params, source_map) = lower_generic_params(
db,
@@ -214,15 +221,14 @@ impl EnumSignature {
generic_params,
store,
flags,
- repr,
name: as_name_opt(source.name()),
}),
Arc::new(source_map),
)
}
- pub fn variant_body_type(&self) -> IntegerType {
- match self.repr {
+ pub fn variant_body_type(db: &dyn DefDatabase, id: EnumId) -> IntegerType {
+ match AttrFlags::repr(db, id.into()) {
Some(ReprOptions { int: Some(builtin), .. }) => builtin,
_ => IntegerType::Pointer(true),
}
@@ -250,9 +256,9 @@ impl ConstSignature {
let loc = id.lookup(db);
let module = loc.container.module(db);
- let attrs = db.attrs(id.into());
+ let attrs = AttrFlags::query(db, id.into());
let mut flags = ConstFlags::empty();
- if attrs.by_key(sym::rustc_allow_incoherent_impl).exists() {
+ if attrs.contains(AttrFlags::RUSTC_ALLOW_INCOHERENT_IMPL) {
flags |= ConstFlags::RUSTC_ALLOW_INCOHERENT_IMPL;
}
let source = loc.source(db);
@@ -305,9 +311,9 @@ impl StaticSignature {
let loc = id.lookup(db);
let module = loc.container.module(db);
- let attrs = db.attrs(id.into());
+ let attrs = AttrFlags::query(db, id.into());
let mut flags = StaticFlags::empty();
- if attrs.by_key(sym::rustc_allow_incoherent_impl).exists() {
+ if attrs.contains(AttrFlags::RUSTC_ALLOW_INCOHERENT_IMPL) {
flags |= StaticFlags::RUSTC_ALLOW_INCOHERENT_IMPL;
}
@@ -432,7 +438,7 @@ impl TraitSignature {
let loc = id.lookup(db);
let mut flags = TraitFlags::empty();
- let attrs = db.attrs(id.into());
+ let attrs = AttrFlags::query(db, id.into());
let source = loc.source(db);
if source.value.auto_token().is_some() {
flags.insert(TraitFlags::AUTO);
@@ -443,34 +449,23 @@ impl TraitSignature {
if source.value.eq_token().is_some() {
flags.insert(TraitFlags::ALIAS);
}
- if attrs.by_key(sym::fundamental).exists() {
+ if attrs.contains(AttrFlags::FUNDAMENTAL) {
flags |= TraitFlags::FUNDAMENTAL;
}
- if attrs.by_key(sym::rustc_has_incoherent_inherent_impls).exists() {
+ if attrs.contains(AttrFlags::RUSTC_HAS_INCOHERENT_INHERENT_IMPLS) {
flags |= TraitFlags::RUSTC_HAS_INCOHERENT_INHERENT_IMPLS;
}
- if attrs.by_key(sym::rustc_paren_sugar).exists() {
+ if attrs.contains(AttrFlags::RUSTC_PAREN_SUGAR) {
flags |= TraitFlags::RUSTC_PAREN_SUGAR;
}
- if attrs.by_key(sym::rustc_coinductive).exists() {
+ if attrs.contains(AttrFlags::RUSTC_COINDUCTIVE) {
flags |= TraitFlags::COINDUCTIVE;
}
- let mut skip_array_during_method_dispatch =
- attrs.by_key(sym::rustc_skip_array_during_method_dispatch).exists();
- let mut skip_boxed_slice_during_method_dispatch = false;
- for tt in attrs.by_key(sym::rustc_skip_during_method_dispatch).tt_values() {
- for tt in tt.iter() {
- if let tt::iter::TtElement::Leaf(tt::Leaf::Ident(ident)) = tt {
- skip_array_during_method_dispatch |= ident.sym == sym::array;
- skip_boxed_slice_during_method_dispatch |= ident.sym == sym::boxed_slice;
- }
- }
- }
- if skip_array_during_method_dispatch {
+ if attrs.contains(AttrFlags::RUSTC_SKIP_ARRAY_DURING_METHOD_DISPATCH) {
flags |= TraitFlags::SKIP_ARRAY_DURING_METHOD_DISPATCH;
}
- if skip_boxed_slice_during_method_dispatch {
+ if attrs.contains(AttrFlags::RUSTC_SKIP_BOXED_SLICE_DURING_METHOD_DISPATCH) {
flags |= TraitFlags::SKIP_BOXED_SLICE_DURING_METHOD_DISPATCH;
}
@@ -502,7 +497,8 @@ bitflags! {
const HAS_TARGET_FEATURE = 1 << 9;
const DEPRECATED_SAFE_2024 = 1 << 10;
const EXPLICIT_SAFE = 1 << 11;
- const RUSTC_INTRINSIC = 1 << 12;
+ const HAS_LEGACY_CONST_GENERICS = 1 << 12;
+ const RUSTC_INTRINSIC = 1 << 13;
}
}
@@ -515,8 +511,6 @@ pub struct FunctionSignature {
pub ret_type: Option<TypeRefId>,
pub abi: Option<Symbol>,
pub flags: FnFlags,
- // FIXME: we should put this behind a fn flags + query to avoid bloating the struct
- pub legacy_const_generics_indices: Option<Box<Box<[u32]>>>,
}
impl FunctionSignature {
@@ -528,23 +522,26 @@ impl FunctionSignature {
let module = loc.container.module(db);
let mut flags = FnFlags::empty();
- let attrs = db.attrs(id.into());
- if attrs.by_key(sym::rustc_allow_incoherent_impl).exists() {
+ let attrs = AttrFlags::query(db, id.into());
+ if attrs.contains(AttrFlags::RUSTC_ALLOW_INCOHERENT_IMPL) {
flags.insert(FnFlags::RUSTC_ALLOW_INCOHERENT_IMPL);
}
- if attrs.by_key(sym::target_feature).exists() {
+ if attrs.contains(AttrFlags::HAS_TARGET_FEATURE) {
flags.insert(FnFlags::HAS_TARGET_FEATURE);
}
- if attrs.by_key(sym::rustc_intrinsic).exists() {
+
+ if attrs.contains(AttrFlags::RUSTC_INTRINSIC) {
flags.insert(FnFlags::RUSTC_INTRINSIC);
}
- let legacy_const_generics_indices = attrs.rustc_legacy_const_generics();
+ if attrs.contains(AttrFlags::HAS_LEGACY_CONST_GENERICS) {
+ flags.insert(FnFlags::HAS_LEGACY_CONST_GENERICS);
+ }
let source = loc.source(db);
if source.value.unsafe_token().is_some() {
- if attrs.by_key(sym::rustc_deprecated_safe_2024).exists() {
+ if attrs.contains(AttrFlags::RUSTC_DEPRECATED_SAFE_2024) {
flags.insert(FnFlags::DEPRECATED_SAFE_2024);
} else {
flags.insert(FnFlags::UNSAFE);
@@ -586,7 +583,6 @@ impl FunctionSignature {
ret_type,
abi,
flags,
- legacy_const_generics_indices,
name,
}),
Arc::new(source_map),
@@ -635,6 +631,19 @@ impl FunctionSignature {
self.flags.contains(FnFlags::HAS_TARGET_FEATURE)
}
+ #[inline]
+ pub fn legacy_const_generics_indices<'db>(
+ &self,
+ db: &'db dyn DefDatabase,
+ id: FunctionId,
+ ) -> Option<&'db [u32]> {
+ if !self.flags.contains(FnFlags::HAS_LEGACY_CONST_GENERICS) {
+ return None;
+ }
+
+ AttrFlags::legacy_const_generic_indices(db, id).as_deref()
+ }
+
pub fn is_intrinsic(db: &dyn DefDatabase, id: FunctionId) -> bool {
let data = db.function_signature(id);
data.flags.contains(FnFlags::RUSTC_INTRINSIC)
@@ -678,11 +687,11 @@ impl TypeAliasSignature {
let loc = id.lookup(db);
let mut flags = TypeAliasFlags::empty();
- let attrs = db.attrs(id.into());
- if attrs.by_key(sym::rustc_has_incoherent_inherent_impls).exists() {
+ let attrs = AttrFlags::query(db, id.into());
+ if attrs.contains(AttrFlags::RUSTC_HAS_INCOHERENT_INHERENT_IMPLS) {
flags.insert(TypeAliasFlags::RUSTC_HAS_INCOHERENT_INHERENT_IMPL);
}
- if attrs.by_key(sym::rustc_allow_incoherent_impl).exists() {
+ if attrs.contains(AttrFlags::RUSTC_ALLOW_INCOHERENT_IMPL) {
flags.insert(TypeAliasFlags::RUSTC_ALLOW_INCOHERENT_IMPL);
}
if matches!(loc.container, ItemContainerId::ExternBlockId(_)) {
@@ -865,7 +874,7 @@ fn lower_fields<Field: ast::HasAttrs + ast::HasVisibility>(
let mut has_fields = false;
for (ty, field) in fields.value {
has_fields = true;
- match Attrs::is_cfg_enabled_for(db, &field, col.span_map(), cfg_options) {
+ match AttrFlags::is_cfg_enabled_for(&field, cfg_options) {
Ok(()) => {
let type_ref =
col.lower_type_ref_opt(ty, &mut ExprCollector::impl_trait_error_allocator);
@@ -927,7 +936,6 @@ impl EnumVariants {
let loc = e.lookup(db);
let source = loc.source(db);
let ast_id_map = db.ast_id_map(source.file_id);
- let span_map = db.span_map(source.file_id);
let mut diagnostics = ThinVec::new();
let cfg_options = loc.container.krate.cfg_options(db);
@@ -939,7 +947,7 @@ impl EnumVariants {
.variants()
.filter_map(|variant| {
let ast_id = ast_id_map.ast_id(&variant);
- match Attrs::is_cfg_enabled_for(db, &variant, span_map.as_ref(), cfg_options) {
+ match AttrFlags::is_cfg_enabled_for(&variant, cfg_options) {
Ok(()) => {
let enum_variant =
EnumVariantLoc { id: source.with_value(ast_id), parent: e, index }
diff --git a/crates/hir-def/src/src.rs b/crates/hir-def/src/src.rs
index 367b543cf9..153fd195f0 100644
--- a/crates/hir-def/src/src.rs
+++ b/crates/hir-def/src/src.rs
@@ -7,7 +7,7 @@ use syntax::{AstNode, AstPtr, ast};
use crate::{
AstIdLoc, GenericDefId, LocalFieldId, LocalLifetimeParamId, LocalTypeOrConstParamId, Lookup,
- UseId, VariantId, attr::Attrs, db::DefDatabase,
+ UseId, VariantId, attrs::AttrFlags, db::DefDatabase,
};
pub trait HasSource {
@@ -145,15 +145,13 @@ impl HasChildSource<LocalFieldId> for VariantId {
(lookup.source(db).map(|it| it.kind()), lookup.container)
}
};
- let span_map = db.span_map(src.file_id);
let mut map = ArenaMap::new();
match &src.value {
ast::StructKind::Tuple(fl) => {
let cfg_options = container.krate.cfg_options(db);
let mut idx = 0;
for fd in fl.fields() {
- let enabled =
- Attrs::is_cfg_enabled_for(db, &fd, span_map.as_ref(), cfg_options).is_ok();
+ let enabled = AttrFlags::is_cfg_enabled_for(&fd, cfg_options).is_ok();
if !enabled {
continue;
}
@@ -168,8 +166,7 @@ impl HasChildSource<LocalFieldId> for VariantId {
let cfg_options = container.krate.cfg_options(db);
let mut idx = 0;
for fd in fl.fields() {
- let enabled =
- Attrs::is_cfg_enabled_for(db, &fd, span_map.as_ref(), cfg_options).is_ok();
+ let enabled = AttrFlags::is_cfg_enabled_for(&fd, cfg_options).is_ok();
if !enabled {
continue;
}
diff --git a/crates/hir-def/src/test_db.rs b/crates/hir-def/src/test_db.rs
index 12a1c1554c..3bb9c361b3 100644
--- a/crates/hir-def/src/test_db.rs
+++ b/crates/hir-def/src/test_db.rs
@@ -190,7 +190,15 @@ impl TestDB {
let mut res = DefMap::ROOT;
for (module, data) in def_map.modules() {
let src = data.definition_source(self);
- if src.file_id != position.file_id {
+ // We're not comparing the `base_db::EditionedFileId`, but rather the VFS `FileId`, because
+ // `position.file_id` is created before the def map, causing it to have to wrong crate
+ // attached often, which means it won't compare equal. This should not be a problem in real
+ // r-a session, only in tests, because in real r-a we only guess the crate on syntactic-only
+ // (e.g. on-enter) handlers. The rest pick the `EditionedFileId` from the def map.
+ let Some(file_id) = src.file_id.file_id() else {
+ continue;
+ };
+ if file_id.file_id(self) != position.file_id.file_id(self) {
continue;
}
@@ -230,7 +238,15 @@ impl TestDB {
let mut fn_def = None;
for (_, module) in def_map.modules() {
let file_id = module.definition_source(self).file_id;
- if file_id != position.file_id {
+ // We're not comparing the `base_db::EditionedFileId`, but rather the VFS `FileId`, because
+ // `position.file_id` is created before the def map, causing it to have to wrong crate
+ // attached often, which means it won't compare equal. This should not be a problem in real
+ // r-a session, only in tests, because in real r-a we only guess the crate on syntactic-only
+ // (e.g. on-enter) handlers. The rest pick the `EditionedFileId` from the def map.
+ let Some(file_id) = file_id.file_id() else {
+ continue;
+ };
+ if file_id.file_id(self) != position.file_id.file_id(self) {
continue;
}
for decl in module.scope.declarations() {
@@ -253,26 +269,25 @@ impl TestDB {
};
if size != Some(new_size) {
size = Some(new_size);
- fn_def = Some(it);
+ fn_def = Some((it, file_id));
}
}
}
}
// Find the innermost block expression that has a `DefMap`.
- let def_with_body = fn_def?.into();
+ let (def_with_body, file_id) = fn_def?;
+ let def_with_body = def_with_body.into();
let source_map = self.body_with_source_map(def_with_body).1;
let scopes = self.expr_scopes(def_with_body);
- let root_syntax_node = self.parse(position.file_id).syntax_node();
+ let root_syntax_node = self.parse(file_id).syntax_node();
let scope_iter =
algo::ancestors_at_offset(&root_syntax_node, position.offset).filter_map(|node| {
let block = ast::BlockExpr::cast(node)?;
let expr = ast::Expr::from(block);
- let expr_id = source_map
- .node_expr(InFile::new(position.file_id.into(), &expr))?
- .as_expr()
- .unwrap();
+ let expr_id =
+ source_map.node_expr(InFile::new(file_id.into(), &expr))?.as_expr().unwrap();
let scope = scopes.scope_for(expr_id).unwrap();
Some(scope)
});
diff --git a/crates/hir-expand/Cargo.toml b/crates/hir-expand/Cargo.toml
index 80a3c08486..4fa476afb6 100644
--- a/crates/hir-expand/Cargo.toml
+++ b/crates/hir-expand/Cargo.toml
@@ -23,6 +23,8 @@ triomphe.workspace = true
query-group.workspace = true
salsa.workspace = true
salsa-macros.workspace = true
+arrayvec.workspace = true
+thin-vec.workspace = true
# local deps
stdx.workspace = true
diff --git a/crates/hir-expand/src/attrs.rs b/crates/hir-expand/src/attrs.rs
index 986f8764f5..e1807cd2e1 100644
--- a/crates/hir-expand/src/attrs.rs
+++ b/crates/hir-expand/src/attrs.rs
@@ -1,200 +1,397 @@
-//! A higher level attributes based on TokenTree, with also some shortcuts.
-use std::iter;
-use std::{borrow::Cow, fmt, ops};
+//! Defines the basics of attributes lowering.
+//!
+//! The heart and soul of this module is [`expand_cfg_attr()`], alongside its sibling
+//! [`expand_cfg_attr_with_doc_comments()`]. It is used to implement all attribute lowering
+//! in r-a. Its basic job is to list attributes; however, attributes do not necessarily map
+//! into [`ast::Attr`], because `cfg_attr` can map to zero, one, or more attributes
+//! (`#[cfg_attr(predicate, attr1, attr2, ...)]`). To bridge this gap, this module defines
+//! [`Meta`], which represents a desugared attribute. Various bits of r-a need different
+//! things from [`Meta`], therefore it contains many parts. The basic idea is:
+//!
+//! - There are three kinds of attributes, `path = value`, `path`, and `path(token_tree)`.
+//! - Most bits of rust-analyzer only need to deal with some paths. Therefore, we keep
+//! the path only if it has up to 2 segments, or one segment for `path = value`.
+//! We also only keep the value in `path = value` if it is a literal. However, we always
+//! save the all relevant ranges of attributes (the path range, and the full attribute range)
+//! for parts of r-a (e.g. name resolution) that need a faithful representation of the
+//! attribute.
+//!
+//! [`expand_cfg_attr()`] expands `cfg_attr`s as it goes (as its name implies), to list
+//! all attributes.
+//!
+//! Another thing to note is that we need to be able to map an attribute back to a range
+//! (for diagnostic purposes etc.). This is only ever needed for attributes that participate
+//! in name resolution. An attribute is mapped back by its [`AttrId`], which is just an
+//! index into the item tree attributes list. To minimize the risk of bugs, we have one
+//! place (here) and one function ([`is_item_tree_filtered_attr()`]) that decides whether
+//! an attribute participate in name resolution.
+
+use std::{
+ borrow::Cow, cell::OnceCell, convert::Infallible, fmt, iter::Peekable, ops::ControlFlow,
+};
+use ::tt::{TextRange, TextSize};
+use arrayvec::ArrayVec;
use base_db::Crate;
use cfg::{CfgExpr, CfgOptions};
use either::Either;
-use intern::{Interned, Symbol, sym};
-
+use intern::{Interned, Symbol};
use mbe::{DelimiterKind, Punct};
-use smallvec::{SmallVec, smallvec};
-use span::{Span, SyntaxContext};
-use syntax::unescape;
-use syntax::{AstNode, AstToken, SyntaxNode, ast, match_ast};
-use syntax_bridge::{DocCommentDesugarMode, desugar_doc_comment_text, syntax_node_to_token_tree};
-use triomphe::ThinArc;
+use parser::T;
+use smallvec::SmallVec;
+use span::{RealSpanMap, Span, SyntaxContext};
+use syntax::{
+ AstNode, NodeOrToken, SyntaxNode, SyntaxToken,
+ ast::{self, TokenTreeChildren},
+ unescape,
+};
+use syntax_bridge::DocCommentDesugarMode;
use crate::{
+ AstId,
db::ExpandDatabase,
mod_path::ModPath,
- name::Name,
span_map::SpanMapRef,
- tt::{self, TopSubtree, token_to_literal},
+ tt::{self, TopSubtree},
};
-/// Syntactical attributes, without filtering of `cfg_attr`s.
-#[derive(Default, Debug, Clone, PartialEq, Eq)]
-pub struct RawAttrs {
- // FIXME: This can become `Box<[Attr]>` if https://internals.rust-lang.org/t/layout-of-dst-box/21728?u=chrefr is accepted.
- entries: Option<ThinArc<(), Attr>>,
-}
-
-impl ops::Deref for RawAttrs {
- type Target = [Attr];
-
- fn deref(&self) -> &[Attr] {
- match &self.entries {
- Some(it) => &it.slice,
- None => &[],
- }
- }
+#[derive(Debug)]
+pub struct AttrPath {
+ /// This can be empty if the path is not of 1 or 2 segments exactly.
+ pub segments: ArrayVec<SyntaxToken, 2>,
+ pub range: TextRange,
+ // FIXME: This shouldn't be textual, `#[test]` needs name resolution.
+ // And if textual, it shouldn't be here, it should be in hir-def/src/attrs.rs. But some macros
+ // fully qualify `test` as `core::prelude::vX::test`, and this is more than 2 segments, so hir-def
+ // attrs can't find it. But this will mean we have to push every up-to-4-segments path, which
+ // may impact perf. So it was easier to just hack it here.
+ pub is_test: bool,
}
-impl RawAttrs {
- pub const EMPTY: Self = Self { entries: None };
-
- pub fn new(
- db: &dyn ExpandDatabase,
- owner: &dyn ast::HasAttrs,
- span_map: SpanMapRef<'_>,
- ) -> Self {
- let entries: Vec<_> = Self::attrs_iter::<true>(db, owner, span_map).collect();
-
- let entries = if entries.is_empty() {
- None
- } else {
- Some(ThinArc::from_header_and_iter((), entries.into_iter()))
- };
-
- RawAttrs { entries }
- }
-
- /// A [`RawAttrs`] that has its `#[cfg_attr(...)]` attributes expanded.
- pub fn new_expanded(
- db: &dyn ExpandDatabase,
- owner: &dyn ast::HasAttrs,
- span_map: SpanMapRef<'_>,
- cfg_options: &CfgOptions,
- ) -> Self {
- let entries: Vec<_> =
- Self::attrs_iter_expanded::<true>(db, owner, span_map, cfg_options).collect();
-
- let entries = if entries.is_empty() {
- None
- } else {
- Some(ThinArc::from_header_and_iter((), entries.into_iter()))
- };
-
- RawAttrs { entries }
- }
-
- pub fn attrs_iter<const DESUGAR_COMMENTS: bool>(
- db: &dyn ExpandDatabase,
- owner: &dyn ast::HasAttrs,
- span_map: SpanMapRef<'_>,
- ) -> impl Iterator<Item = Attr> {
- collect_attrs(owner).filter_map(move |(id, attr)| match attr {
- Either::Left(attr) => {
- attr.meta().and_then(|meta| Attr::from_src(db, meta, span_map, id))
+impl AttrPath {
+ #[inline]
+ fn extract(path: &ast::Path) -> Self {
+ let mut is_test = false;
+ let segments = (|| {
+ let mut segments = ArrayVec::new();
+ let segment2 = path.segment()?.name_ref()?.syntax().first_token()?;
+ if segment2.text() == "test" {
+ // `#[test]` or `#[core::prelude::vX::test]`.
+ is_test = true;
}
- Either::Right(comment) if DESUGAR_COMMENTS => comment.doc_comment().map(|doc| {
- let span = span_map.span_for_range(comment.syntax().text_range());
- let (text, kind) = desugar_doc_comment_text(doc, DocCommentDesugarMode::ProcMacro);
- Attr {
- id,
- input: Some(Box::new(AttrInput::Literal(tt::Literal {
- symbol: text,
- span,
- kind,
- suffix: None,
- }))),
- path: Interned::new(ModPath::from(Name::new_symbol(sym::doc, span.ctx))),
- ctxt: span.ctx,
+ let segment1 = path.qualifier();
+ if let Some(segment1) = segment1 {
+ if segment1.qualifier().is_some() {
+ None
+ } else {
+ let segment1 = segment1.segment()?.name_ref()?.syntax().first_token()?;
+ segments.push(segment1);
+ segments.push(segment2);
+ Some(segments)
}
- }),
- Either::Right(_) => None,
- })
+ } else {
+ segments.push(segment2);
+ Some(segments)
+ }
+ })();
+ AttrPath {
+ segments: segments.unwrap_or(ArrayVec::new()),
+ range: path.syntax().text_range(),
+ is_test,
+ }
}
- pub fn attrs_iter_expanded<const DESUGAR_COMMENTS: bool>(
- db: &dyn ExpandDatabase,
- owner: &dyn ast::HasAttrs,
- span_map: SpanMapRef<'_>,
- cfg_options: &CfgOptions,
- ) -> impl Iterator<Item = Attr> {
- Self::attrs_iter::<DESUGAR_COMMENTS>(db, owner, span_map)
- .flat_map(|attr| attr.expand_cfg_attr(db, cfg_options))
+ #[inline]
+ pub fn is1(&self, segment: &str) -> bool {
+ self.segments.len() == 1 && self.segments[0].text() == segment
}
+}
- pub fn merge(&self, other: Self) -> Self {
- match (&self.entries, other.entries) {
- (None, None) => Self::EMPTY,
- (None, entries @ Some(_)) => Self { entries },
- (Some(entries), None) => Self { entries: Some(entries.clone()) },
- (Some(a), Some(b)) => {
- let last_ast_index = a.slice.last().map_or(0, |it| it.id.ast_index() + 1);
- let items = a
- .slice
- .iter()
- .cloned()
- .chain(b.slice.iter().map(|it| {
- let mut it = it.clone();
- let id = it.id.ast_index() + last_ast_index;
- it.id = AttrId::new(id, it.id.is_inner_attr());
- it
- }))
- .collect::<Vec<_>>();
- Self { entries: Some(ThinArc::from_header_and_iter((), items.into_iter())) }
- }
+#[derive(Debug)]
+pub enum Meta {
+ /// `name` is `None` if not a single token. `value` is a literal or `None`.
+ NamedKeyValue {
+ path_range: TextRange,
+ name: Option<SyntaxToken>,
+ value: Option<SyntaxToken>,
+ },
+ TokenTree {
+ path: AttrPath,
+ tt: ast::TokenTree,
+ },
+ Path {
+ path: AttrPath,
+ },
+}
+
+impl Meta {
+ #[inline]
+ pub fn path_range(&self) -> TextRange {
+ match self {
+ Meta::NamedKeyValue { path_range, .. } => *path_range,
+ Meta::TokenTree { path, .. } | Meta::Path { path } => path.range,
}
}
- /// Processes `cfg_attr`s
- pub fn expand_cfg_attr(self, db: &dyn ExpandDatabase, krate: Crate) -> RawAttrs {
- let has_cfg_attrs =
- self.iter().any(|attr| attr.path.as_ident().is_some_and(|name| *name == sym::cfg_attr));
- if !has_cfg_attrs {
- return self;
+ fn extract(iter: &mut Peekable<TokenTreeChildren>) -> Option<(Self, TextSize)> {
+ let mut start_offset = None;
+ if let Some(NodeOrToken::Token(colon1)) = iter.peek()
+ && colon1.kind() == T![:]
+ {
+ start_offset = Some(colon1.text_range().start());
+ iter.next();
+ iter.next_if(|it| it.as_token().is_some_and(|it| it.kind() == T![:]));
+ }
+ let first_segment = iter
+ .next_if(|it| it.as_token().is_some_and(|it| it.kind().is_any_identifier()))?
+ .into_token()?;
+ let mut is_test = first_segment.text() == "test";
+ let start_offset = start_offset.unwrap_or_else(|| first_segment.text_range().start());
+
+ let mut segments_len = 1;
+ let mut second_segment = None;
+ let mut path_range = first_segment.text_range();
+ while iter.peek().and_then(NodeOrToken::as_token).is_some_and(|it| it.kind() == T![:])
+ && let _ = iter.next()
+ && iter.peek().and_then(NodeOrToken::as_token).is_some_and(|it| it.kind() == T![:])
+ && let _ = iter.next()
+ && let Some(NodeOrToken::Token(segment)) = iter.peek()
+ && segment.kind().is_any_identifier()
+ {
+ segments_len += 1;
+ is_test = segment.text() == "test";
+ second_segment = Some(segment.clone());
+ path_range = TextRange::new(path_range.start(), segment.text_range().end());
+ iter.next();
}
- let cfg_options = krate.cfg_options(db);
- let new_attrs = self
- .iter()
- .cloned()
- .flat_map(|attr| attr.expand_cfg_attr(db, cfg_options))
- .collect::<Vec<_>>();
- let entries = if new_attrs.is_empty() {
- None
- } else {
- Some(ThinArc::from_header_and_iter((), new_attrs.into_iter()))
+ let segments = |first, second| {
+ let mut segments = ArrayVec::new();
+ if segments_len <= 2 {
+ segments.push(first);
+ if let Some(second) = second {
+ segments.push(second);
+ }
+ }
+ segments
+ };
+ let meta = match iter.peek() {
+ Some(NodeOrToken::Token(eq)) if eq.kind() == T![=] => {
+ iter.next();
+ let value = match iter.peek() {
+ Some(NodeOrToken::Token(token)) if token.kind().is_literal() => {
+ // No need to consume it, it will be consumed by `extract_and_eat_comma()`.
+ Some(token.clone())
+ }
+ _ => None,
+ };
+ let name = if second_segment.is_none() { Some(first_segment) } else { None };
+ Meta::NamedKeyValue { path_range, name, value }
+ }
+ Some(NodeOrToken::Node(tt)) => Meta::TokenTree {
+ path: AttrPath {
+ segments: segments(first_segment, second_segment),
+ range: path_range,
+ is_test,
+ },
+ tt: tt.clone(),
+ },
+ _ => Meta::Path {
+ path: AttrPath {
+ segments: segments(first_segment, second_segment),
+ range: path_range,
+ is_test,
+ },
+ },
};
- RawAttrs { entries }
+ Some((meta, start_offset))
}
- pub fn is_empty(&self) -> bool {
- self.entries.is_none()
+ fn extract_possibly_unsafe(
+ iter: &mut Peekable<TokenTreeChildren>,
+ container: &ast::TokenTree,
+ ) -> Option<(Self, TextRange)> {
+ if iter.peek().is_some_and(|it| it.as_token().is_some_and(|it| it.kind() == T![unsafe])) {
+ iter.next();
+ let tt = iter.next()?.into_node()?;
+ let result = Self::extract(&mut TokenTreeChildren::new(&tt).peekable()).map(
+ |(meta, start_offset)| (meta, TextRange::new(start_offset, tt_end_offset(&tt))),
+ );
+ while iter.next().is_some_and(|it| it.as_token().is_none_or(|it| it.kind() != T![,])) {}
+ result
+ } else {
+ Self::extract(iter).map(|(meta, start_offset)| {
+ let end_offset = 'find_end_offset: {
+ for it in iter {
+ if let NodeOrToken::Token(it) = it
+ && it.kind() == T![,]
+ {
+ break 'find_end_offset it.text_range().start();
+ }
+ }
+ tt_end_offset(container)
+ };
+ (meta, TextRange::new(start_offset, end_offset))
+ })
+ }
}
}
-#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
-pub struct AttrId {
- id: u32,
+fn tt_end_offset(tt: &ast::TokenTree) -> TextSize {
+ tt.syntax().last_token().unwrap().text_range().start()
}
-// FIXME: This only handles a single level of cfg_attr nesting
-// that is `#[cfg_attr(all(), cfg_attr(all(), cfg(any())))]` breaks again
-impl AttrId {
- const INNER_ATTR_SET_BIT: u32 = 1 << 31;
+/// The callback is passed a desugared form of the attribute ([`Meta`]), a [`SyntaxNode`] fully containing it
+/// (note: it may not be the direct parent), the range within the [`SyntaxNode`] bounding the attribute,
+/// and the outermost `ast::Attr`. Note that one node may map to multiple [`Meta`]s due to `cfg_attr`.
+#[inline]
+pub fn expand_cfg_attr<'a, BreakValue>(
+ attrs: impl Iterator<Item = ast::Attr>,
+ cfg_options: impl FnMut() -> &'a CfgOptions,
+ mut callback: impl FnMut(Meta, &SyntaxNode, TextRange, &ast::Attr) -> ControlFlow<BreakValue>,
+) -> Option<BreakValue> {
+ expand_cfg_attr_with_doc_comments::<Infallible, _>(
+ attrs.map(Either::Left),
+ cfg_options,
+ move |Either::Left((meta, container, range, top_attr))| {
+ callback(meta, container, range, top_attr)
+ },
+ )
+}
- pub fn new(id: usize, is_inner: bool) -> Self {
- assert!(id <= !Self::INNER_ATTR_SET_BIT as usize);
- let id = id as u32;
- Self { id: if is_inner { id | Self::INNER_ATTR_SET_BIT } else { id } }
- }
+#[inline]
+pub fn expand_cfg_attr_with_doc_comments<'a, DocComment, BreakValue>(
+ mut attrs: impl Iterator<Item = Either<ast::Attr, DocComment>>,
+ mut cfg_options: impl FnMut() -> &'a CfgOptions,
+ mut callback: impl FnMut(
+ Either<(Meta, &SyntaxNode, TextRange, &ast::Attr), DocComment>,
+ ) -> ControlFlow<BreakValue>,
+) -> Option<BreakValue> {
+ let mut stack = SmallVec::<[_; 1]>::new();
+ let result = attrs.try_for_each(|top_attr| {
+ let top_attr = match top_attr {
+ Either::Left(it) => it,
+ Either::Right(comment) => return callback(Either::Right(comment)),
+ };
+ if let Some((attr_name, tt)) = top_attr.as_simple_call()
+ && attr_name == "cfg_attr"
+ {
+ let mut tt_iter = TokenTreeChildren::new(&tt).peekable();
+ let cfg = cfg::CfgExpr::parse_from_ast(&mut tt_iter);
+ if cfg_options().check(&cfg) != Some(false) {
+ stack.push((tt_iter, tt));
+ while let Some((tt_iter, tt)) = stack.last_mut() {
+ let Some((attr, range)) = Meta::extract_possibly_unsafe(tt_iter, tt) else {
+ stack.pop();
+ continue;
+ };
+ if let Meta::TokenTree { path, tt: nested_tt } = &attr
+ && path.is1("cfg_attr")
+ {
+ let mut nested_tt_iter = TokenTreeChildren::new(nested_tt).peekable();
+ let cfg = cfg::CfgExpr::parse_from_ast(&mut nested_tt_iter);
+ if cfg_options().check(&cfg) != Some(false) {
+ stack.push((nested_tt_iter, nested_tt.clone()));
+ }
+ } else {
+ callback(Either::Left((attr, tt.syntax(), range, &top_attr)))?;
+ }
+ }
+ }
+ } else if let Some(ast_meta) = top_attr.meta()
+ && let Some(path) = ast_meta.path()
+ {
+ let path = AttrPath::extract(&path);
+ let meta = if let Some(tt) = ast_meta.token_tree() {
+ Meta::TokenTree { path, tt }
+ } else if let Some(value) = ast_meta.expr() {
+ let value =
+ if let ast::Expr::Literal(value) = value { Some(value.token()) } else { None };
+ let name =
+ if path.segments.len() == 1 { Some(path.segments[0].clone()) } else { None };
+ Meta::NamedKeyValue { name, value, path_range: path.range }
+ } else {
+ Meta::Path { path }
+ };
+ callback(Either::Left((
+ meta,
+ ast_meta.syntax(),
+ ast_meta.syntax().text_range(),
+ &top_attr,
+ )))?;
+ }
+ ControlFlow::Continue(())
+ });
+ result.break_value()
+}
- pub fn ast_index(&self) -> usize {
- (self.id & !Self::INNER_ATTR_SET_BIT) as usize
- }
+#[inline]
+pub(crate) fn is_item_tree_filtered_attr(name: &str) -> bool {
+ matches!(
+ name,
+ "doc"
+ | "stable"
+ | "unstable"
+ | "target_feature"
+ | "allow"
+ | "expect"
+ | "warn"
+ | "deny"
+ | "forbid"
+ | "repr"
+ | "inline"
+ | "track_caller"
+ | "must_use"
+ )
+}
- pub fn is_inner_attr(&self) -> bool {
- self.id & Self::INNER_ATTR_SET_BIT != 0
- }
+/// This collects attributes exactly as the item tree needs them. This is used for the item tree,
+/// as well as for resolving [`AttrId`]s.
+pub fn collect_item_tree_attrs<'a, BreakValue>(
+ owner: &dyn ast::HasAttrs,
+ cfg_options: impl Fn() -> &'a CfgOptions,
+ mut on_attr: impl FnMut(Meta, &SyntaxNode, &ast::Attr, TextRange) -> ControlFlow<BreakValue>,
+) -> Option<Either<BreakValue, CfgExpr>> {
+ let attrs = ast::attrs_including_inner(owner);
+ expand_cfg_attr(
+ attrs,
+ || cfg_options(),
+ |attr, container, range, top_attr| {
+ // We filter builtin attributes that we don't need for nameres, because this saves memory.
+ // I only put the most common attributes, but if some attribute becomes common feel free to add it.
+ // Notice, however: for an attribute to be filtered out, it *must* not be shadowable with a macro!
+ let filter = match &attr {
+ Meta::NamedKeyValue { name: Some(name), .. } => {
+ is_item_tree_filtered_attr(name.text())
+ }
+ Meta::TokenTree { path, tt } if path.segments.len() == 1 => {
+ let name = path.segments[0].text();
+ if name == "cfg" {
+ let cfg =
+ CfgExpr::parse_from_ast(&mut TokenTreeChildren::new(tt).peekable());
+ if cfg_options().check(&cfg) == Some(false) {
+ return ControlFlow::Break(Either::Right(cfg));
+ }
+ true
+ } else {
+ is_item_tree_filtered_attr(name)
+ }
+ }
+ Meta::Path { path } => {
+ path.segments.len() == 1 && is_item_tree_filtered_attr(path.segments[0].text())
+ }
+ _ => false,
+ };
+ if !filter && let ControlFlow::Break(v) = on_attr(attr, container, top_attr, range) {
+ return ControlFlow::Break(Either::Left(v));
+ }
+ ControlFlow::Continue(())
+ },
+ )
}
#[derive(Debug, Clone, PartialEq, Eq)]
pub struct Attr {
- pub id: AttrId,
pub path: Interned<ModPath>,
pub input: Option<Box<AttrInput>>,
pub ctxt: SyntaxContext,
@@ -218,131 +415,6 @@ impl fmt::Display for AttrInput {
}
impl Attr {
- fn from_src(
- db: &dyn ExpandDatabase,
- ast: ast::Meta,
- span_map: SpanMapRef<'_>,
- id: AttrId,
- ) -> Option<Attr> {
- let path = ast.path()?;
- let range = path.syntax().text_range();
- let path = Interned::new(ModPath::from_src(db, path, &mut |range| {
- span_map.span_for_range(range).ctx
- })?);
- let span = span_map.span_for_range(range);
- let input = if let Some(ast::Expr::Literal(lit)) = ast.expr() {
- let token = lit.token();
- Some(Box::new(AttrInput::Literal(token_to_literal(token.text(), span))))
- } else if let Some(tt) = ast.token_tree() {
- let tree = syntax_node_to_token_tree(
- tt.syntax(),
- span_map,
- span,
- DocCommentDesugarMode::ProcMacro,
- );
- Some(Box::new(AttrInput::TokenTree(tree)))
- } else {
- None
- };
- Some(Attr { id, path, input, ctxt: span.ctx })
- }
-
- fn from_tt(
- db: &dyn ExpandDatabase,
- mut tt: tt::TokenTreesView<'_>,
- id: AttrId,
- ) -> Option<Attr> {
- if matches!(tt.flat_tokens(),
- [tt::TokenTree::Leaf(tt::Leaf::Ident(tt::Ident { sym, .. })), ..]
- if *sym == sym::unsafe_
- ) {
- match tt.iter().nth(1) {
- Some(tt::TtElement::Subtree(_, iter)) => tt = iter.remaining(),
- _ => return None,
- }
- }
- let first = tt.flat_tokens().first()?;
- let ctxt = first.first_span().ctx;
- let (path, input) = {
- let mut iter = tt.iter();
- let start = iter.savepoint();
- let mut input = tt::TokenTreesView::new(&[]);
- let mut path = iter.from_savepoint(start);
- let mut path_split_savepoint = iter.savepoint();
- while let Some(tt) = iter.next() {
- path = iter.from_savepoint(start);
- if !matches!(
- tt,
- tt::TtElement::Leaf(
- tt::Leaf::Punct(tt::Punct { char: ':' | '$', .. }) | tt::Leaf::Ident(_),
- )
- ) {
- input = path_split_savepoint.remaining();
- break;
- }
- path_split_savepoint = iter.savepoint();
- }
- (path, input)
- };
-
- let path = Interned::new(ModPath::from_tt(db, path)?);
-
- let input = match (input.flat_tokens().first(), input.try_into_subtree()) {
- (_, Some(tree)) => {
- Some(Box::new(AttrInput::TokenTree(tt::TopSubtree::from_subtree(tree))))
- }
- (Some(tt::TokenTree::Leaf(tt::Leaf::Punct(tt::Punct { char: '=', .. }))), _) => {
- match input.flat_tokens().get(1) {
- Some(tt::TokenTree::Leaf(tt::Leaf::Literal(lit))) => {
- Some(Box::new(AttrInput::Literal(lit.clone())))
- }
- _ => None,
- }
- }
- _ => None,
- };
- Some(Attr { id, path, input, ctxt })
- }
-
- pub fn path(&self) -> &ModPath {
- &self.path
- }
-
- pub fn expand_cfg_attr(
- self,
- db: &dyn ExpandDatabase,
- cfg_options: &CfgOptions,
- ) -> impl IntoIterator<Item = Self> {
- let is_cfg_attr = self.path.as_ident().is_some_and(|name| *name == sym::cfg_attr);
- if !is_cfg_attr {
- return smallvec![self];
- }
-
- let subtree = match self.token_tree_value() {
- Some(it) => it,
- _ => return smallvec![self.clone()],
- };
-
- let (cfg, parts) = match parse_cfg_attr_input(subtree) {
- Some(it) => it,
- None => return smallvec![self.clone()],
- };
- let index = self.id;
- let attrs = parts.filter_map(|attr| Attr::from_tt(db, attr, index));
-
- let cfg = TopSubtree::from_token_trees(subtree.top_subtree().delimiter, cfg);
- let cfg = CfgExpr::parse(&cfg);
- if cfg_options.check(&cfg) == Some(false) {
- smallvec![]
- } else {
- cov_mark::hit!(cfg_attr_active);
-
- attrs.collect::<SmallVec<[_; 1]>>()
- }
- }
-}
-
-impl Attr {
/// #[path = "string"]
pub fn string_value(&self) -> Option<&Symbol> {
match self.input.as_deref()? {
@@ -403,30 +475,26 @@ impl Attr {
pub fn parse_path_comma_token_tree<'a>(
&'a self,
db: &'a dyn ExpandDatabase,
- ) -> Option<impl Iterator<Item = (ModPath, Span)> + 'a> {
+ ) -> Option<impl Iterator<Item = (ModPath, Span, tt::TokenTreesView<'a>)> + 'a> {
let args = self.token_tree_value()?;
if args.top_subtree().delimiter.kind != DelimiterKind::Parenthesis {
return None;
}
- let paths = args
- .token_trees()
- .split(|tt| matches!(tt, tt::TtElement::Leaf(tt::Leaf::Punct(Punct { char: ',', .. }))))
- .filter_map(move |tts| {
- let span = tts.flat_tokens().first()?.first_span();
- Some((ModPath::from_tt(db, tts)?, span))
- });
-
- Some(paths)
+ Some(parse_path_comma_token_tree(db, args))
}
+}
- pub fn cfg(&self) -> Option<CfgExpr> {
- if *self.path.as_ident()? == sym::cfg {
- self.token_tree_value().map(CfgExpr::parse)
- } else {
- None
- }
- }
+fn parse_path_comma_token_tree<'a>(
+ db: &'a dyn ExpandDatabase,
+ args: &'a tt::TopSubtree,
+) -> impl Iterator<Item = (ModPath, Span, tt::TokenTreesView<'a>)> {
+ args.token_trees()
+ .split(|tt| matches!(tt, tt::TtElement::Leaf(tt::Leaf::Punct(Punct { char: ',', .. }))))
+ .filter_map(move |tts| {
+ let span = tts.flat_tokens().first()?.first_span();
+ Some((ModPath::from_tt(db, tts)?, span, tts))
+ })
}
fn unescape(s: &str) -> Option<Cow<'_, str>> {
@@ -455,58 +523,104 @@ fn unescape(s: &str) -> Option<Cow<'_, str>> {
}
}
-pub fn collect_attrs(
- owner: &dyn ast::HasAttrs,
-) -> impl Iterator<Item = (AttrId, Either<ast::Attr, ast::Comment>)> {
- let inner_attrs =
- inner_attributes(owner.syntax()).into_iter().flatten().zip(iter::repeat(true));
- let outer_attrs = ast::AttrDocCommentIter::from_syntax_node(owner.syntax())
- .filter(|el| match el {
- Either::Left(attr) => attr.kind().is_outer(),
- Either::Right(comment) => comment.is_outer(),
- })
- .zip(iter::repeat(false));
- outer_attrs
- .chain(inner_attrs)
- .enumerate()
- .map(|(id, (attr, is_inner))| (AttrId::new(id, is_inner), attr))
+/// This is an index of an attribute *that always points to the item tree attributes*.
+///
+/// Outer attributes are counted first, then inner attributes. This does not support
+/// out-of-line modules, which may have attributes spread across 2 files!
+#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
+pub struct AttrId {
+ id: u32,
}
-fn inner_attributes(
- syntax: &SyntaxNode,
-) -> Option<impl Iterator<Item = Either<ast::Attr, ast::Comment>>> {
- let node = match_ast! {
- match syntax {
- ast::SourceFile(_) => syntax.clone(),
- ast::ExternBlock(it) => it.extern_item_list()?.syntax().clone(),
- ast::Fn(it) => it.body()?.stmt_list()?.syntax().clone(),
- ast::Impl(it) => it.assoc_item_list()?.syntax().clone(),
- ast::Module(it) => it.item_list()?.syntax().clone(),
- ast::BlockExpr(it) => {
- if !it.may_carry_attributes() {
- return None
+impl AttrId {
+ #[inline]
+ pub fn from_item_tree_index(id: u32) -> Self {
+ Self { id }
+ }
+
+ #[inline]
+ pub fn item_tree_index(self) -> u32 {
+ self.id
+ }
+
+ /// Returns the containing `ast::Attr` (note that it may contain other attributes as well due
+ /// to `cfg_attr`), a `SyntaxNode` guaranteed to contain the attribute, the full range of the
+ /// attribute, and its desugared [`Meta`].
+ pub fn find_attr_range<N: ast::HasAttrs>(
+ self,
+ db: &dyn ExpandDatabase,
+ krate: Crate,
+ owner: AstId<N>,
+ ) -> (ast::Attr, SyntaxNode, TextRange, Meta) {
+ self.find_attr_range_with_source(db, krate, &owner.to_node(db))
+ }
+
+ /// Returns the containing `ast::Attr` (note that it may contain other attributes as well due
+ /// to `cfg_attr`), a `SyntaxNode` guaranteed to contain the attribute, the full range of the
+ /// attribute, and its desugared [`Meta`].
+ pub fn find_attr_range_with_source(
+ self,
+ db: &dyn ExpandDatabase,
+ krate: Crate,
+ owner: &dyn ast::HasAttrs,
+ ) -> (ast::Attr, SyntaxNode, TextRange, Meta) {
+ let cfg_options = OnceCell::new();
+ let mut index = 0;
+ let result = collect_item_tree_attrs(
+ owner,
+ || cfg_options.get_or_init(|| krate.cfg_options(db)),
+ |meta, container, top_attr, range| {
+ if index == self.id {
+ return ControlFlow::Break((top_attr.clone(), container.clone(), range, meta));
}
- syntax.clone()
+ index += 1;
+ ControlFlow::Continue(())
},
- _ => return None,
+ );
+ match result {
+ Some(Either::Left(it)) => it,
+ _ => {
+ panic!("used an incorrect `AttrId`; crate={krate:?}, attr_id={self:?}");
+ }
}
- };
-
- let attrs = ast::AttrDocCommentIter::from_syntax_node(&node).filter(|el| match el {
- Either::Left(attr) => attr.kind().is_inner(),
- Either::Right(comment) => comment.is_inner(),
- });
- Some(attrs)
-}
+ }
-// Input subtree is: `(cfg, $(attr),+)`
-// Split it up into a `cfg` subtree and the `attr` subtrees.
-fn parse_cfg_attr_input(
- subtree: &TopSubtree,
-) -> Option<(tt::TokenTreesView<'_>, impl Iterator<Item = tt::TokenTreesView<'_>>)> {
- let mut parts = subtree
- .token_trees()
- .split(|tt| matches!(tt, tt::TtElement::Leaf(tt::Leaf::Punct(Punct { char: ',', .. }))));
- let cfg = parts.next()?;
- Some((cfg, parts.filter(|it| !it.is_empty())))
+ pub fn find_derive_range(
+ self,
+ db: &dyn ExpandDatabase,
+ krate: Crate,
+ owner: AstId<ast::Adt>,
+ derive_index: u32,
+ ) -> TextRange {
+ let (_, _, derive_attr_range, derive_attr) = self.find_attr_range(db, krate, owner);
+ let Meta::TokenTree { tt, .. } = derive_attr else {
+ return derive_attr_range;
+ };
+ // Fake the span map, as we don't really need spans here, just the offsets of the node in the file.
+ let span_map = RealSpanMap::absolute(span::EditionedFileId::current_edition(
+ span::FileId::from_raw(0),
+ ));
+ let tt = syntax_bridge::syntax_node_to_token_tree(
+ tt.syntax(),
+ SpanMapRef::RealSpanMap(&span_map),
+ span_map.span_for_range(tt.syntax().text_range()),
+ DocCommentDesugarMode::ProcMacro,
+ );
+ let Some((_, _, derive_tts)) =
+ parse_path_comma_token_tree(db, &tt).nth(derive_index as usize)
+ else {
+ return derive_attr_range;
+ };
+ let (Some(first_tt), Some(last_tt)) =
+ (derive_tts.flat_tokens().first(), derive_tts.flat_tokens().last())
+ else {
+ return derive_attr_range;
+ };
+ let start = first_tt.first_span().range.start();
+ let end = match last_tt {
+ tt::TokenTree::Leaf(it) => it.span().range.end(),
+ tt::TokenTree::Subtree(it) => it.delimiter.close.range.end(),
+ };
+ TextRange::new(start, end)
+ }
}
diff --git a/crates/hir-expand/src/builtin/fn_macro.rs b/crates/hir-expand/src/builtin/fn_macro.rs
index 56f68d8277..9ee9d33746 100644
--- a/crates/hir-expand/src/builtin/fn_macro.rs
+++ b/crates/hir-expand/src/builtin/fn_macro.rs
@@ -772,7 +772,7 @@ fn relative_file(
if res == call_site && !allow_recursion {
Err(ExpandError::other(err_span, format!("recursive inclusion of `{path_str}`")))
} else {
- Ok(EditionedFileId::new(db, res, lookup.krate.data(db).edition))
+ Ok(EditionedFileId::new(db, res, lookup.krate.data(db).edition, lookup.krate))
}
}
diff --git a/crates/hir-expand/src/cfg_process.rs b/crates/hir-expand/src/cfg_process.rs
index d5ebd6ee19..8b82671ed4 100644
--- a/crates/hir-expand/src/cfg_process.rs
+++ b/crates/hir-expand/src/cfg_process.rs
@@ -1,373 +1,343 @@
//! Processes out #[cfg] and #[cfg_attr] attributes from the input for the derive macro
-use std::iter::Peekable;
+use std::{cell::OnceCell, ops::ControlFlow};
+use ::tt::TextRange;
use base_db::Crate;
-use cfg::{CfgAtom, CfgExpr};
-use intern::{Symbol, sym};
-use rustc_hash::FxHashSet;
+use cfg::CfgExpr;
+use parser::T;
+use smallvec::SmallVec;
use syntax::{
- AstNode, NodeOrToken, SyntaxElement, SyntaxKind, SyntaxNode, T,
- ast::{self, Attr, HasAttrs, Meta, TokenTree, VariantList},
+ AstNode, PreorderWithTokens, SyntaxElement, SyntaxNode, SyntaxToken, WalkEvent,
+ ast::{self, HasAttrs, TokenTreeChildren},
};
-use tracing::{debug, warn};
+use syntax_bridge::DocCommentDesugarMode;
-use crate::{MacroCallLoc, MacroDefKind, db::ExpandDatabase, proc_macro::ProcMacroKind};
+use crate::{
+ attrs::{AttrId, Meta, expand_cfg_attr, is_item_tree_filtered_attr},
+ db::ExpandDatabase,
+ fixup::{self, SyntaxFixupUndoInfo},
+ span_map::SpanMapRef,
+ tt::{self, DelimSpan, Span},
+};
-fn check_cfg(db: &dyn ExpandDatabase, attr: &Attr, krate: Crate) -> Option<bool> {
- if !attr.simple_name().as_deref().map(|v| v == "cfg")? {
- return None;
- }
- let cfg = parse_from_attr_token_tree(&attr.meta()?.token_tree()?)?;
- let enabled = krate.cfg_options(db).check(&cfg) != Some(false);
- Some(enabled)
+struct ItemIsCfgedOut;
+
+#[derive(Debug)]
+struct ExpandedAttrToProcess {
+ range: TextRange,
}
-fn check_cfg_attr(db: &dyn ExpandDatabase, attr: &Attr, krate: Crate) -> Option<bool> {
- if !attr.simple_name().as_deref().map(|v| v == "cfg_attr")? {
- return None;
- }
- check_cfg_attr_value(db, &attr.token_tree()?, krate)
+#[derive(Debug, Clone, Copy, PartialEq, Eq)]
+enum NextExpandedAttrState {
+ NotStarted,
+ InTheMiddle,
}
-pub fn check_cfg_attr_value(
- db: &dyn ExpandDatabase,
- attr: &TokenTree,
- krate: Crate,
-) -> Option<bool> {
- let cfg_expr = parse_from_attr_token_tree(attr)?;
- let enabled = krate.cfg_options(db).check(&cfg_expr) != Some(false);
- Some(enabled)
+#[derive(Debug)]
+struct AstAttrToProcess {
+ range: TextRange,
+ expanded_attrs: SmallVec<[ExpandedAttrToProcess; 1]>,
+ expanded_attrs_idx: usize,
+ next_expanded_attr: NextExpandedAttrState,
+ pound_span: Span,
+ brackets_span: DelimSpan,
+ /// If `Some`, this is an inner attribute.
+ excl_span: Option<Span>,
}
-fn process_has_attrs_with_possible_comma<I: HasAttrs>(
+fn macro_input_callback(
db: &dyn ExpandDatabase,
- items: impl Iterator<Item = I>,
+ is_derive: bool,
+ censor_item_tree_attr_ids: &[AttrId],
krate: Crate,
- remove: &mut FxHashSet<SyntaxElement>,
-) -> Option<()> {
- for item in items {
- let field_attrs = item.attrs();
- 'attrs: for attr in field_attrs {
- if let Some(enabled) = check_cfg(db, &attr, krate) {
- if enabled {
- debug!("censoring {:?}", attr.syntax());
- remove.insert(attr.syntax().clone().into());
- } else {
- debug!("censoring {:?}", item.syntax());
- remove.insert(item.syntax().clone().into());
- // We need to remove the , as well
- remove_possible_comma(&item, remove);
- break 'attrs;
- }
- }
+ default_span: Span,
+ span_map: SpanMapRef<'_>,
+) -> impl FnMut(&mut PreorderWithTokens, &WalkEvent<SyntaxElement>) -> (bool, Vec<tt::Leaf>) {
+ let cfg_options = OnceCell::new();
+ let cfg_options = move || *cfg_options.get_or_init(|| krate.cfg_options(db));
- if let Some(enabled) = check_cfg_attr(db, &attr, krate) {
- if enabled {
- debug!("Removing cfg_attr tokens {:?}", attr);
- let meta = attr.meta()?;
- let removes_from_cfg_attr = remove_tokens_within_cfg_attr(meta)?;
- remove.extend(removes_from_cfg_attr);
- } else {
- debug!("censoring type cfg_attr {:?}", item.syntax());
- remove.insert(attr.syntax().clone().into());
- }
+ let mut should_strip_attr = {
+ let mut item_tree_attr_id = 0;
+ let mut censor_item_tree_attr_ids_index = 0;
+ move || {
+ let mut result = false;
+ if let Some(&next_censor_attr_id) =
+ censor_item_tree_attr_ids.get(censor_item_tree_attr_ids_index)
+ && next_censor_attr_id.item_tree_index() == item_tree_attr_id
+ {
+ censor_item_tree_attr_ids_index += 1;
+ result = true;
}
+ item_tree_attr_id += 1;
+ result
}
- }
- Some(())
-}
+ };
-#[derive(Debug, PartialEq, Eq, Clone, Copy)]
-enum CfgExprStage {
- /// Stripping the CFGExpr part of the attribute
- StrippigCfgExpr,
- /// Found the comma after the CFGExpr. Will keep all tokens until the next comma or the end of the attribute
- FoundComma,
- /// Everything following the attribute. This could be another attribute or the end of the attribute.
- // FIXME: cfg_attr with multiple attributes will not be handled correctly. We will only keep the first attribute
- // Related Issue: https://github.com/rust-lang/rust-analyzer/issues/10110
- EverythingElse,
-}
+ let mut attrs = Vec::new();
+ let mut attrs_idx = 0;
+ let mut has_inner_attrs_owner = false;
+ let mut in_attr = false;
+ let mut done_with_attrs = false;
+ let mut did_top_attrs = false;
+ move |preorder, event| {
+ match event {
+ WalkEvent::Enter(SyntaxElement::Node(node)) => {
+ if done_with_attrs {
+ return (true, Vec::new());
+ }
-/// This function creates its own set of tokens to remove. To help prevent malformed syntax as input.
-fn remove_tokens_within_cfg_attr(meta: Meta) -> Option<FxHashSet<SyntaxElement>> {
- let mut remove: FxHashSet<SyntaxElement> = FxHashSet::default();
- debug!("Enabling attribute {}", meta);
- let meta_path = meta.path()?;
- debug!("Removing {:?}", meta_path.syntax());
- remove.insert(meta_path.syntax().clone().into());
+ if ast::Attr::can_cast(node.kind()) {
+ in_attr = true;
+ let node_range = node.text_range();
+ while attrs
+ .get(attrs_idx)
+ .is_some_and(|it: &AstAttrToProcess| it.range != node_range)
+ {
+ attrs_idx += 1;
+ }
+ } else if let Some(has_attrs) = ast::AnyHasAttrs::cast(node.clone()) {
+ if has_inner_attrs_owner {
+ has_inner_attrs_owner = false;
+ return (true, Vec::new());
+ }
- let meta_tt = meta.token_tree()?;
- debug!("meta_tt {}", meta_tt);
- let mut stage = CfgExprStage::StrippigCfgExpr;
- for tt in meta_tt.token_trees_and_tokens() {
- debug!("Checking {:?}. Stage: {:?}", tt, stage);
- match (stage, tt) {
- (CfgExprStage::StrippigCfgExpr, syntax::NodeOrToken::Node(node)) => {
- remove.insert(node.syntax().clone().into());
- }
- (CfgExprStage::StrippigCfgExpr, syntax::NodeOrToken::Token(token)) => {
- if token.kind() == T![,] {
- stage = CfgExprStage::FoundComma;
- }
- remove.insert(token.into());
- }
- (CfgExprStage::FoundComma, syntax::NodeOrToken::Token(token))
- if (token.kind() == T![,] || token.kind() == T![')']) =>
- {
- // The end of the attribute or separator for the next attribute
- stage = CfgExprStage::EverythingElse;
- remove.insert(token.into());
- }
- (CfgExprStage::EverythingElse, syntax::NodeOrToken::Node(node)) => {
- remove.insert(node.syntax().clone().into());
- }
- (CfgExprStage::EverythingElse, syntax::NodeOrToken::Token(token)) => {
- remove.insert(token.into());
- }
- // This is an actual attribute
- _ => {}
- }
- }
- if stage != CfgExprStage::EverythingElse {
- warn!("Invalid cfg_attr attribute. {:?}", meta_tt);
- return None;
- }
- Some(remove)
-}
-/// Removes a possible comma after the [AstNode]
-fn remove_possible_comma(item: &impl AstNode, res: &mut FxHashSet<SyntaxElement>) {
- if let Some(comma) = item.syntax().next_sibling_or_token().filter(|it| it.kind() == T![,]) {
- res.insert(comma);
- }
-}
-fn process_enum(
- db: &dyn ExpandDatabase,
- variants: VariantList,
- krate: Crate,
- remove: &mut FxHashSet<SyntaxElement>,
-) -> Option<()> {
- 'variant: for variant in variants.variants() {
- for attr in variant.attrs() {
- if let Some(enabled) = check_cfg(db, &attr, krate) {
- if enabled {
- debug!("censoring {:?}", attr.syntax());
- remove.insert(attr.syntax().clone().into());
- } else {
- // Rustc does not strip the attribute if it is enabled. So we will leave it
- debug!("censoring type {:?}", variant.syntax());
- remove.insert(variant.syntax().clone().into());
- // We need to remove the , as well
- remove_possible_comma(&variant, remove);
- continue 'variant;
- }
- }
+ if did_top_attrs && !is_derive {
+ // Derives need all attributes handled, but attribute macros need only the top attributes handled.
+ done_with_attrs = true;
+ return (true, Vec::new());
+ }
+ did_top_attrs = true;
+
+ if let Some(inner_attrs_node) = has_attrs.inner_attributes_node()
+ && inner_attrs_node != *node
+ {
+ has_inner_attrs_owner = true;
+ }
+
+ let node_attrs = ast::attrs_including_inner(&has_attrs);
+
+ attrs.clear();
+ node_attrs.clone().for_each(|attr| {
+ let span_for = |token: Option<SyntaxToken>| {
+ token
+ .map(|token| span_map.span_for_range(token.text_range()))
+ .unwrap_or(default_span)
+ };
+ attrs.push(AstAttrToProcess {
+ range: attr.syntax().text_range(),
+ pound_span: span_for(attr.pound_token()),
+ brackets_span: DelimSpan {
+ open: span_for(attr.l_brack_token()),
+ close: span_for(attr.r_brack_token()),
+ },
+ excl_span: attr
+ .excl_token()
+ .map(|token| span_map.span_for_range(token.text_range())),
+ expanded_attrs: SmallVec::new(),
+ expanded_attrs_idx: 0,
+ next_expanded_attr: NextExpandedAttrState::NotStarted,
+ });
+ });
+
+ attrs_idx = 0;
+ let strip_current_item = expand_cfg_attr(
+ node_attrs,
+ &cfg_options,
+ |attr, _container, range, top_attr| {
+ // Find the attr.
+ while attrs[attrs_idx].range != top_attr.syntax().text_range() {
+ attrs_idx += 1;
+ }
+
+ let mut strip_current_attr = false;
+ match attr {
+ Meta::NamedKeyValue { name, .. } => {
+ if name
+ .is_none_or(|name| !is_item_tree_filtered_attr(name.text()))
+ {
+ strip_current_attr = should_strip_attr();
+ }
+ }
+ Meta::TokenTree { path, tt } => {
+ if path.segments.len() != 1
+ || !is_item_tree_filtered_attr(path.segments[0].text())
+ {
+ strip_current_attr = should_strip_attr();
+ }
+
+ if path.segments.len() == 1 {
+ let name = path.segments[0].text();
+
+ if name == "cfg" {
+ let cfg_expr = CfgExpr::parse_from_ast(
+ &mut TokenTreeChildren::new(&tt).peekable(),
+ );
+ if cfg_options().check(&cfg_expr) == Some(false) {
+ return ControlFlow::Break(ItemIsCfgedOut);
+ }
+ strip_current_attr = true;
+ }
+ }
+ }
+ Meta::Path { path } => {
+ if path.segments.len() != 1
+ || !is_item_tree_filtered_attr(path.segments[0].text())
+ {
+ strip_current_attr = should_strip_attr();
+ }
+ }
+ }
+
+ if !strip_current_attr {
+ attrs[attrs_idx]
+ .expanded_attrs
+ .push(ExpandedAttrToProcess { range });
+ }
- if let Some(enabled) = check_cfg_attr(db, &attr, krate) {
- if enabled {
- debug!("Removing cfg_attr tokens {:?}", attr);
- let meta = attr.meta()?;
- let removes_from_cfg_attr = remove_tokens_within_cfg_attr(meta)?;
- remove.extend(removes_from_cfg_attr);
- } else {
- debug!("censoring type cfg_attr {:?}", variant.syntax());
- remove.insert(attr.syntax().clone().into());
+ ControlFlow::Continue(())
+ },
+ );
+ attrs_idx = 0;
+
+ if strip_current_item.is_some() {
+ preorder.skip_subtree();
+ attrs.clear();
+
+ 'eat_comma: {
+ // If there is a comma after this node, eat it too.
+ let mut events_until_comma = 0;
+ for event in preorder.clone() {
+ match event {
+ WalkEvent::Enter(SyntaxElement::Node(_))
+ | WalkEvent::Leave(_) => {}
+ WalkEvent::Enter(SyntaxElement::Token(token)) => {
+ let kind = token.kind();
+ if kind == T![,] {
+ break;
+ } else if !kind.is_trivia() {
+ break 'eat_comma;
+ }
+ }
+ }
+ events_until_comma += 1;
+ }
+ preorder.nth(events_until_comma);
+ }
+
+ return (false, Vec::new());
+ }
}
}
- }
- if let Some(fields) = variant.field_list() {
- match fields {
- ast::FieldList::RecordFieldList(fields) => {
- process_has_attrs_with_possible_comma(db, fields.fields(), krate, remove)?;
- }
- ast::FieldList::TupleFieldList(fields) => {
- process_has_attrs_with_possible_comma(db, fields.fields(), krate, remove)?;
+ WalkEvent::Leave(SyntaxElement::Node(node)) => {
+ if ast::Attr::can_cast(node.kind()) {
+ in_attr = false;
+ attrs_idx += 1;
}
}
- }
- }
- Some(())
-}
+ WalkEvent::Enter(SyntaxElement::Token(token)) => {
+ if !in_attr {
+ return (true, Vec::new());
+ }
-pub(crate) fn process_cfg_attrs(
- db: &dyn ExpandDatabase,
- node: &SyntaxNode,
- loc: &MacroCallLoc,
-) -> Option<FxHashSet<SyntaxElement>> {
- // FIXME: #[cfg_eval] is not implemented. But it is not stable yet
- let is_derive = match loc.def.kind {
- MacroDefKind::BuiltInDerive(..)
- | MacroDefKind::ProcMacro(_, _, ProcMacroKind::CustomDerive) => true,
- MacroDefKind::BuiltInAttr(_, expander) => expander.is_derive(),
- _ => false,
- };
- let mut remove = FxHashSet::default();
+ let Some(ast_attr) = attrs.get_mut(attrs_idx) else {
+ return (true, Vec::new());
+ };
+ let token_range = token.text_range();
+ let Some(expanded_attr) = ast_attr.expanded_attrs.get(ast_attr.expanded_attrs_idx)
+ else {
+ // No expanded attributes in this `ast::Attr`, or we finished them all already, either way
+ // the remaining tokens should be discarded.
+ return (false, Vec::new());
+ };
+ match ast_attr.next_expanded_attr {
+ NextExpandedAttrState::NotStarted => {
+ if token_range.start() >= expanded_attr.range.start() {
+ // We started the next attribute.
+ let mut insert_tokens = Vec::with_capacity(3);
+ insert_tokens.push(tt::Leaf::Punct(tt::Punct {
+ char: '#',
+ spacing: tt::Spacing::Alone,
+ span: ast_attr.pound_span,
+ }));
+ if let Some(span) = ast_attr.excl_span {
+ insert_tokens.push(tt::Leaf::Punct(tt::Punct {
+ char: '!',
+ spacing: tt::Spacing::Alone,
+ span,
+ }));
+ }
+ insert_tokens.push(tt::Leaf::Punct(tt::Punct {
+ char: '[',
+ spacing: tt::Spacing::Alone,
+ span: ast_attr.brackets_span.open,
+ }));
- let item = ast::Item::cast(node.clone())?;
- for attr in item.attrs() {
- if let Some(enabled) = check_cfg_attr(db, &attr, loc.krate) {
- if enabled {
- debug!("Removing cfg_attr tokens {:?}", attr);
- let meta = attr.meta()?;
- let removes_from_cfg_attr = remove_tokens_within_cfg_attr(meta)?;
- remove.extend(removes_from_cfg_attr);
- } else {
- debug!("Removing type cfg_attr {:?}", item.syntax());
- remove.insert(attr.syntax().clone().into());
- }
- }
- }
+ ast_attr.next_expanded_attr = NextExpandedAttrState::InTheMiddle;
- if is_derive {
- // Only derives get their code cfg-clean, normal attribute macros process only the cfg at their level
- // (cfg_attr is handled above, cfg is handled in the def map).
- match item {
- ast::Item::Struct(it) => match it.field_list()? {
- ast::FieldList::RecordFieldList(fields) => {
- process_has_attrs_with_possible_comma(
- db,
- fields.fields(),
- loc.krate,
- &mut remove,
- )?;
- }
- ast::FieldList::TupleFieldList(fields) => {
- process_has_attrs_with_possible_comma(
- db,
- fields.fields(),
- loc.krate,
- &mut remove,
- )?;
- }
- },
- ast::Item::Enum(it) => {
- process_enum(db, it.variant_list()?, loc.krate, &mut remove)?;
- }
- ast::Item::Union(it) => {
- process_has_attrs_with_possible_comma(
- db,
- it.record_field_list()?.fields(),
- loc.krate,
- &mut remove,
- )?;
- }
- // FIXME: Implement for other items if necessary. As we do not support #[cfg_eval] yet, we do not need to implement it for now
- _ => {}
- }
- }
- Some(remove)
-}
-/// Parses a `cfg` attribute from the meta
-fn parse_from_attr_token_tree(tt: &TokenTree) -> Option<CfgExpr> {
- let mut iter = tt
- .token_trees_and_tokens()
- .filter(is_not_whitespace)
- .skip(1)
- .take_while(is_not_closing_paren)
- .peekable();
- next_cfg_expr_from_syntax(&mut iter)
-}
+ return (true, insert_tokens);
+ } else {
+ // Before any attribute or between the attributes.
+ return (false, Vec::new());
+ }
+ }
+ NextExpandedAttrState::InTheMiddle => {
+ if token_range.start() >= expanded_attr.range.end() {
+ // Finished the current attribute.
+ let insert_tokens = vec![tt::Leaf::Punct(tt::Punct {
+ char: ']',
+ spacing: tt::Spacing::Alone,
+ span: ast_attr.brackets_span.close,
+ })];
-fn is_not_closing_paren(element: &NodeOrToken<ast::TokenTree, syntax::SyntaxToken>) -> bool {
- !matches!(element, NodeOrToken::Token(token) if (token.kind() == syntax::T![')']))
-}
-fn is_not_whitespace(element: &NodeOrToken<ast::TokenTree, syntax::SyntaxToken>) -> bool {
- !matches!(element, NodeOrToken::Token(token) if (token.kind() == SyntaxKind::WHITESPACE))
-}
+ ast_attr.next_expanded_attr = NextExpandedAttrState::NotStarted;
+ ast_attr.expanded_attrs_idx += 1;
-fn next_cfg_expr_from_syntax<I>(iter: &mut Peekable<I>) -> Option<CfgExpr>
-where
- I: Iterator<Item = NodeOrToken<ast::TokenTree, syntax::SyntaxToken>>,
-{
- let name = match iter.next() {
- None => return None,
- Some(NodeOrToken::Token(element)) => match element.kind() {
- syntax::T![ident] => Symbol::intern(element.text()),
- _ => return Some(CfgExpr::Invalid),
- },
- Some(_) => return Some(CfgExpr::Invalid),
- };
- let result = match &name {
- s if [&sym::all, &sym::any, &sym::not].contains(&s) => {
- let mut preds = Vec::new();
- let Some(NodeOrToken::Node(tree)) = iter.next() else {
- return Some(CfgExpr::Invalid);
- };
- let mut tree_iter = tree
- .token_trees_and_tokens()
- .filter(is_not_whitespace)
- .skip(1)
- .take_while(is_not_closing_paren)
- .peekable();
- while tree_iter.peek().is_some() {
- let pred = next_cfg_expr_from_syntax(&mut tree_iter);
- if let Some(pred) = pred {
- preds.push(pred);
- }
- }
- let group = match &name {
- s if *s == sym::all => CfgExpr::All(preds.into_boxed_slice()),
- s if *s == sym::any => CfgExpr::Any(preds.into_boxed_slice()),
- s if *s == sym::not => {
- CfgExpr::Not(Box::new(preds.pop().unwrap_or(CfgExpr::Invalid)))
- }
- _ => unreachable!(),
- };
- Some(group)
- }
- _ => match iter.peek() {
- Some(NodeOrToken::Token(element)) if (element.kind() == syntax::T![=]) => {
- iter.next();
- match iter.next() {
- Some(NodeOrToken::Token(value_token))
- if (value_token.kind() == syntax::SyntaxKind::STRING) =>
- {
- let value = value_token.text();
- Some(CfgExpr::Atom(CfgAtom::KeyValue {
- key: name,
- value: Symbol::intern(value.trim_matches('"')),
- }))
+ // It's safe to ignore the current token because between attributes
+ // there is always at least one token we skip - either the closing bracket
+ // in `#[]` or the comma in case of multiple attrs in `cfg_attr` expansion.
+ return (false, insert_tokens);
+ } else {
+ // Still in the middle.
+ return (true, Vec::new());
+ }
}
- _ => None,
}
}
- _ => Some(CfgExpr::Atom(CfgAtom::Flag(name))),
- },
- };
- if let Some(NodeOrToken::Token(element)) = iter.peek()
- && element.kind() == syntax::T![,]
- {
- iter.next();
+ WalkEvent::Leave(SyntaxElement::Token(_)) => {}
+ }
+ (true, Vec::new())
}
- result
}
-#[cfg(test)]
-mod tests {
- use cfg::DnfExpr;
- use expect_test::{Expect, expect};
- use syntax::{AstNode, SourceFile, ast::Attr};
-
- use crate::cfg_process::parse_from_attr_token_tree;
- fn check_dnf_from_syntax(input: &str, expect: Expect) {
- let parse = SourceFile::parse(input, span::Edition::CURRENT);
- let node = match parse.tree().syntax().descendants().find_map(Attr::cast) {
- Some(it) => it,
- None => {
- let node = std::any::type_name::<Attr>();
- panic!("Failed to make ast node `{node}` from text {input}")
- }
- };
- let node = node.clone_subtree();
- assert_eq!(node.syntax().text_range().start(), 0.into());
+pub(crate) fn attr_macro_input_to_token_tree(
+ db: &dyn ExpandDatabase,
+ node: &SyntaxNode,
+ span_map: SpanMapRef<'_>,
+ span: Span,
+ is_derive: bool,
+ censor_item_tree_attr_ids: &[AttrId],
+ krate: Crate,
+) -> (tt::TopSubtree, SyntaxFixupUndoInfo) {
+ let fixups = fixup::fixup_syntax(span_map, node, span, DocCommentDesugarMode::ProcMacro);
+ (
+ syntax_bridge::syntax_node_to_token_tree_modified(
+ node,
+ span_map,
+ fixups.append,
+ fixups.remove,
+ span,
+ DocCommentDesugarMode::ProcMacro,
+ macro_input_callback(db, is_derive, censor_item_tree_attr_ids, krate, span, span_map),
+ ),
+ fixups.undo_info,
+ )
+}
- let cfg = parse_from_attr_token_tree(&node.meta().unwrap().token_tree().unwrap()).unwrap();
- let actual = format!("#![cfg({})]", DnfExpr::new(&cfg));
- expect.assert_eq(&actual);
- }
- #[test]
- fn cfg_from_attr() {
- check_dnf_from_syntax(r#"#[cfg(test)]"#, expect![[r#"#![cfg(test)]"#]]);
- check_dnf_from_syntax(r#"#[cfg(not(never))]"#, expect![[r#"#![cfg(not(never))]"#]]);
- }
+pub fn check_cfg_attr_value(
+ db: &dyn ExpandDatabase,
+ attr: &ast::TokenTree,
+ krate: Crate,
+) -> Option<bool> {
+ let cfg_expr = CfgExpr::parse_from_ast(&mut TokenTreeChildren::new(attr).peekable());
+ krate.cfg_options(db).check(&cfg_expr)
}
diff --git a/crates/hir-expand/src/db.rs b/crates/hir-expand/src/db.rs
index 944161ff00..5c517e671b 100644
--- a/crates/hir-expand/src/db.rs
+++ b/crates/hir-expand/src/db.rs
@@ -1,11 +1,9 @@
//! Defines database & queries for macro expansion.
use base_db::{Crate, RootQueryDb};
-use either::Either;
use mbe::MatchedArmIndex;
-use rustc_hash::FxHashSet;
use span::{AstIdMap, Edition, Span, SyntaxContext};
-use syntax::{AstNode, Parse, SyntaxElement, SyntaxError, SyntaxNode, SyntaxToken, T, ast};
+use syntax::{AstNode, Parse, SyntaxError, SyntaxNode, SyntaxToken, T, ast};
use syntax_bridge::{DocCommentDesugarMode, syntax_node_to_token_tree};
use triomphe::Arc;
@@ -13,9 +11,9 @@ use crate::{
AstId, BuiltinAttrExpander, BuiltinDeriveExpander, BuiltinFnLikeExpander, EagerCallInfo,
EagerExpander, EditionedFileId, ExpandError, ExpandResult, ExpandTo, HirFileId, MacroCallId,
MacroCallKind, MacroCallLoc, MacroDefId, MacroDefKind,
- attrs::{AttrId, AttrInput, RawAttrs, collect_attrs},
+ attrs::Meta,
builtin::pseudo_derive_attr_expansion,
- cfg_process,
+ cfg_process::attr_macro_input_to_token_tree,
declarative::DeclarativeMacroExpander,
fixup::{self, SyntaxFixupUndoInfo},
hygiene::{span_with_call_site_ctxt, span_with_def_site_ctxt, span_with_mixed_site_ctxt},
@@ -177,7 +175,7 @@ pub fn expand_speculative(
let span_map = SpanMapRef::RealSpanMap(&span_map);
// Build the subtree and token mapping for the speculative args
- let (mut tt, undo_info) = match loc.kind {
+ let (mut tt, undo_info) = match &loc.kind {
MacroCallKind::FnLike { .. } => (
syntax_bridge::syntax_node_to_token_tree(
speculative_args,
@@ -200,48 +198,35 @@ pub fn expand_speculative(
),
SyntaxFixupUndoInfo::NONE,
),
- MacroCallKind::Derive { derive_attr_index: index, .. }
- | MacroCallKind::Attr { invoc_attr_index: index, .. } => {
- let censor = if let MacroCallKind::Derive { .. } = loc.kind {
- censor_derive_input(index, &ast::Adt::cast(speculative_args.clone())?)
- } else {
- attr_source(index, &ast::Item::cast(speculative_args.clone())?)
- .into_iter()
- .map(|it| it.syntax().clone().into())
- .collect()
+ MacroCallKind::Derive { derive_macro_id, .. } => {
+ let MacroCallKind::Attr { censored_attr_ids: attr_ids, .. } =
+ &derive_macro_id.loc(db).kind
+ else {
+ unreachable!("`derive_macro_id` should be `MacroCallKind::Attr`");
};
-
- let censor_cfg =
- cfg_process::process_cfg_attrs(db, speculative_args, &loc).unwrap_or_default();
- let mut fixups = fixup::fixup_syntax(
- span_map,
+ attr_macro_input_to_token_tree(
+ db,
speculative_args,
+ span_map,
span,
- DocCommentDesugarMode::ProcMacro,
- );
- fixups.append.retain(|it, _| match it {
- syntax::NodeOrToken::Token(_) => true,
- it => !censor.contains(it) && !censor_cfg.contains(it),
- });
- fixups.remove.extend(censor);
- fixups.remove.extend(censor_cfg);
-
- (
- syntax_bridge::syntax_node_to_token_tree_modified(
- speculative_args,
- span_map,
- fixups.append,
- fixups.remove,
- span,
- DocCommentDesugarMode::ProcMacro,
- ),
- fixups.undo_info,
+ true,
+ attr_ids,
+ loc.krate,
)
}
+ MacroCallKind::Attr { censored_attr_ids: attr_ids, .. } => attr_macro_input_to_token_tree(
+ db,
+ speculative_args,
+ span_map,
+ span,
+ false,
+ attr_ids,
+ loc.krate,
+ ),
};
- let attr_arg = match loc.kind {
- MacroCallKind::Attr { invoc_attr_index, .. } => {
+ let attr_arg = match &loc.kind {
+ MacroCallKind::Attr { censored_attr_ids: attr_ids, .. } => {
if loc.def.is_attribute_derive() {
// for pseudo-derive expansion we actually pass the attribute itself only
ast::Attr::cast(speculative_args.clone()).and_then(|attr| attr.token_tree()).map(
@@ -260,18 +245,21 @@ pub fn expand_speculative(
// Attributes may have an input token tree, build the subtree and map for this as well
// then try finding a token id for our token if it is inside this input subtree.
let item = ast::Item::cast(speculative_args.clone())?;
- let attrs = RawAttrs::new_expanded(db, &item, span_map, loc.krate.cfg_options(db));
- attrs.iter().find(|attr| attr.id == invoc_attr_index).and_then(|attr| {
- match attr.input.as_deref()? {
- AttrInput::TokenTree(tt) => {
- let mut attr_arg = tt.clone();
- attr_arg.top_subtree_delimiter_mut().kind =
- tt::DelimiterKind::Invisible;
- Some(attr_arg)
- }
- AttrInput::Literal(_) => None,
+ let (_, _, _, meta) =
+ attr_ids.invoc_attr().find_attr_range_with_source(db, loc.krate, &item);
+ match meta {
+ Meta::TokenTree { tt, .. } => {
+ let mut attr_arg = syntax_bridge::syntax_node_to_token_tree(
+ tt.syntax(),
+ span_map,
+ span,
+ DocCommentDesugarMode::ProcMacro,
+ );
+ attr_arg.top_subtree_delimiter_mut().kind = tt::DelimiterKind::Invisible;
+ Some(attr_arg)
}
- })
+ _ => None,
+ }
}
}
_ => None,
@@ -430,7 +418,7 @@ fn macro_arg(db: &dyn ExpandDatabase, id: MacroCallId) -> MacroArgResult {
let (parse, map) = parse_with_map(db, loc.kind.file_id());
let root = parse.syntax_node();
- let (censor, item_node, span) = match loc.kind {
+ let (is_derive, censor_item_tree_attr_ids, item_node, span) = match &loc.kind {
MacroCallKind::FnLike { ast_id, .. } => {
let node = &ast_id.to_ptr(db).to_node(&root);
let path_range = node
@@ -498,53 +486,29 @@ fn macro_arg(db: &dyn ExpandDatabase, id: MacroCallId) -> MacroArgResult {
MacroCallKind::Derive { .. } => {
unreachable!("`ExpandDatabase::macro_arg` called with `MacroCallKind::Derive`")
}
- MacroCallKind::Attr { ast_id, invoc_attr_index, .. } => {
+ MacroCallKind::Attr { ast_id, censored_attr_ids: attr_ids, .. } => {
let node = ast_id.to_ptr(db).to_node(&root);
- let attr_source = attr_source(invoc_attr_index, &node);
-
- let span = map.span_for_range(
- attr_source
- .as_ref()
- .and_then(|it| it.path())
- .map_or_else(|| node.syntax().text_range(), |it| it.syntax().text_range()),
- );
- // If derive attribute we need to censor the derive input
- if matches!(loc.def.kind, MacroDefKind::BuiltInAttr(_, expander) if expander.is_derive())
- && ast::Adt::can_cast(node.syntax().kind())
- {
- let adt = ast::Adt::cast(node.syntax().clone()).unwrap();
- let censor_derive_input = censor_derive_input(invoc_attr_index, &adt);
- (censor_derive_input, node, span)
- } else {
- (attr_source.into_iter().map(|it| it.syntax().clone().into()).collect(), node, span)
- }
+ let range = attr_ids
+ .invoc_attr()
+ .find_attr_range_with_source(db, loc.krate, &node)
+ .3
+ .path_range();
+ let span = map.span_for_range(range);
+
+ let is_derive = matches!(loc.def.kind, MacroDefKind::BuiltInAttr(_, expander) if expander.is_derive());
+ (is_derive, &**attr_ids, node, span)
}
};
- let (mut tt, undo_info) = {
- let syntax = item_node.syntax();
- let censor_cfg = cfg_process::process_cfg_attrs(db, syntax, &loc).unwrap_or_default();
- let mut fixups =
- fixup::fixup_syntax(map.as_ref(), syntax, span, DocCommentDesugarMode::ProcMacro);
- fixups.append.retain(|it, _| match it {
- syntax::NodeOrToken::Token(_) => true,
- it => !censor.contains(it) && !censor_cfg.contains(it),
- });
- fixups.remove.extend(censor);
- fixups.remove.extend(censor_cfg);
-
- (
- syntax_bridge::syntax_node_to_token_tree_modified(
- syntax,
- map,
- fixups.append,
- fixups.remove,
- span,
- DocCommentDesugarMode::ProcMacro,
- ),
- fixups.undo_info,
- )
- };
+ let (mut tt, undo_info) = attr_macro_input_to_token_tree(
+ db,
+ item_node.syntax(),
+ map.as_ref(),
+ span,
+ is_derive,
+ censor_item_tree_attr_ids,
+ loc.krate,
+ );
if loc.def.is_proc_macro() {
// proc macros expect their inputs without parentheses, MBEs expect it with them included
@@ -554,31 +518,6 @@ fn macro_arg(db: &dyn ExpandDatabase, id: MacroCallId) -> MacroArgResult {
(Arc::new(tt), undo_info, span)
}
-// FIXME: Censoring info should be calculated by the caller! Namely by name resolution
-/// Derives expect all `#[derive(..)]` invocations up to (and including) the currently invoked one to be stripped
-fn censor_derive_input(derive_attr_index: AttrId, node: &ast::Adt) -> FxHashSet<SyntaxElement> {
- // FIXME: handle `cfg_attr`
- cov_mark::hit!(derive_censoring);
- collect_attrs(node)
- .take(derive_attr_index.ast_index() + 1)
- .filter_map(|(_, attr)| Either::left(attr))
- // FIXME, this resolution should not be done syntactically
- // derive is a proper macro now, no longer builtin
- // But we do not have resolution at this stage, this means
- // we need to know about all macro calls for the given ast item here
- // so we require some kind of mapping...
- .filter(|attr| attr.simple_name().as_deref() == Some("derive"))
- .map(|it| it.syntax().clone().into())
- .collect()
-}
-
-/// Attributes expect the invoking attribute to be stripped
-fn attr_source(invoc_attr_index: AttrId, node: &ast::Item) -> Option<ast::Attr> {
- // FIXME: handle `cfg_attr`
- cov_mark::hit!(attribute_macro_attr_censoring);
- collect_attrs(node).nth(invoc_attr_index.ast_index()).and_then(|(_, attr)| Either::left(attr))
-}
-
impl TokenExpander {
fn macro_expander(db: &dyn ExpandDatabase, id: MacroDefId) -> TokenExpander {
match id.kind {
diff --git a/crates/hir-expand/src/declarative.rs b/crates/hir-expand/src/declarative.rs
index d36d85a141..d2df9a1ff6 100644
--- a/crates/hir-expand/src/declarative.rs
+++ b/crates/hir-expand/src/declarative.rs
@@ -1,17 +1,21 @@
//! Compiled declarative macro expanders (`macro_rules!` and `macro`)
+use std::{cell::OnceCell, ops::ControlFlow};
+
use base_db::Crate;
-use intern::sym;
use span::{Edition, Span, SyntaxContext};
use stdx::TupleExt;
-use syntax::{AstNode, ast};
+use syntax::{
+ AstNode, AstToken,
+ ast::{self, HasAttrs},
+};
use syntax_bridge::DocCommentDesugarMode;
use triomphe::Arc;
use crate::{
AstId, ExpandError, ExpandErrorKind, ExpandResult, HirFileId, Lookup, MacroCallId,
MacroCallStyle,
- attrs::RawAttrs,
+ attrs::{Meta, expand_cfg_attr},
db::ExpandDatabase,
hygiene::{Transparency, apply_mark},
tt,
@@ -83,29 +87,28 @@ impl DeclarativeMacroExpander {
let (root, map) = crate::db::parse_with_map(db, id.file_id);
let root = root.syntax_node();
- let transparency = |node| {
- // ... would be nice to have the item tree here
- let attrs = RawAttrs::new_expanded(db, node, map.as_ref(), def_crate.cfg_options(db));
- match attrs
- .iter()
- .find(|it| {
- it.path
- .as_ident()
- .map(|it| *it == sym::rustc_macro_transparency)
- .unwrap_or(false)
- })?
- .token_tree_value()?
- .token_trees()
- .flat_tokens()
- {
- [tt::TokenTree::Leaf(tt::Leaf::Ident(i)), ..] => match &i.sym {
- s if *s == sym::transparent => Some(Transparency::Transparent),
- s if *s == sym::semitransparent => Some(Transparency::SemiTransparent),
- s if *s == sym::opaque => Some(Transparency::Opaque),
- _ => None,
+ let transparency = |node: ast::AnyHasAttrs| {
+ let cfg_options = OnceCell::new();
+ expand_cfg_attr(
+ node.attrs(),
+ || cfg_options.get_or_init(|| def_crate.cfg_options(db)),
+ |attr, _, _, _| {
+ if let Meta::NamedKeyValue { name: Some(name), value, .. } = attr
+ && name.text() == "rustc_macro_transparency"
+ && let Some(value) = value.and_then(ast::String::cast)
+ && let Ok(value) = value.value()
+ {
+ match &*value {
+ "transparent" => ControlFlow::Break(Transparency::Transparent),
+ "semitransparent" => ControlFlow::Break(Transparency::SemiTransparent),
+ "opaque" => ControlFlow::Break(Transparency::Opaque),
+ _ => ControlFlow::Continue(()),
+ }
+ } else {
+ ControlFlow::Continue(())
+ }
},
- _ => None,
- }
+ )
};
let ctx_edition = |ctx: SyntaxContext| {
if ctx.is_root() {
@@ -136,7 +139,8 @@ impl DeclarativeMacroExpander {
"expected a token tree".into(),
)),
},
- transparency(&macro_rules).unwrap_or(Transparency::SemiTransparent),
+ transparency(ast::AnyHasAttrs::from(macro_rules))
+ .unwrap_or(Transparency::SemiTransparent),
),
ast::Macro::MacroDef(macro_def) => (
match macro_def.body() {
@@ -164,7 +168,7 @@ impl DeclarativeMacroExpander {
"expected a token tree".into(),
)),
},
- transparency(&macro_def).unwrap_or(Transparency::Opaque),
+ transparency(macro_def.into()).unwrap_or(Transparency::Opaque),
),
};
let edition = ctx_edition(match id.file_id {
diff --git a/crates/hir-expand/src/files.rs b/crates/hir-expand/src/files.rs
index a7f3e27a45..fe557d6802 100644
--- a/crates/hir-expand/src/files.rs
+++ b/crates/hir-expand/src/files.rs
@@ -55,30 +55,6 @@ impl From<FilePosition> for HirFilePosition {
}
}
-impl FilePositionWrapper<span::FileId> {
- pub fn with_edition(self, db: &dyn ExpandDatabase, edition: span::Edition) -> FilePosition {
- FilePositionWrapper {
- file_id: EditionedFileId::new(db, self.file_id, edition),
- offset: self.offset,
- }
- }
-}
-
-impl FileRangeWrapper<span::FileId> {
- pub fn with_edition(self, db: &dyn ExpandDatabase, edition: span::Edition) -> FileRange {
- FileRangeWrapper {
- file_id: EditionedFileId::new(db, self.file_id, edition),
- range: self.range,
- }
- }
-}
-
-impl<T> InFileWrapper<span::FileId, T> {
- pub fn with_edition(self, db: &dyn ExpandDatabase, edition: span::Edition) -> InRealFile<T> {
- InRealFile { file_id: EditionedFileId::new(db, self.file_id, edition), value: self.value }
- }
-}
-
impl HirFileRange {
pub fn file_range(self) -> Option<FileRange> {
Some(FileRange { file_id: self.file_id.file_id()?, range: self.range })
@@ -407,7 +383,7 @@ impl InFile<SyntaxToken> {
// Fall back to whole macro call.
let loc = db.lookup_intern_macro_call(mac_file);
- loc.kind.original_call_range(db)
+ loc.kind.original_call_range(db, loc.krate)
}
}
}
@@ -453,7 +429,10 @@ impl InFile<TextRange> {
Some(it) => it,
None => {
let loc = db.lookup_intern_macro_call(mac_file);
- (loc.kind.original_call_range(db), SyntaxContext::root(loc.def.edition))
+ (
+ loc.kind.original_call_range(db, loc.krate),
+ SyntaxContext::root(loc.def.edition),
+ )
}
}
}
@@ -468,7 +447,7 @@ impl InFile<TextRange> {
Some(it) => it,
_ => {
let loc = db.lookup_intern_macro_call(mac_file);
- loc.kind.original_call_range(db)
+ loc.kind.original_call_range(db, loc.krate)
}
}
}
diff --git a/crates/hir-expand/src/fixup.rs b/crates/hir-expand/src/fixup.rs
index 094a5f0677..876d870936 100644
--- a/crates/hir-expand/src/fixup.rs
+++ b/crates/hir-expand/src/fixup.rs
@@ -523,6 +523,7 @@ mod tests {
fixups.remove,
span_map.span_for_range(TextRange::empty(0.into())),
DocCommentDesugarMode::Mbe,
+ |_, _| (true, Vec::new()),
);
let actual = format!("{tt}\n");
@@ -697,7 +698,7 @@ fn foo() {
}
"#,
expect![[r#"
-fn foo () {a . __ra_fixup ;}
+fn foo () {a .__ra_fixup ;}
"#]],
)
}
@@ -712,7 +713,7 @@ fn foo() {
}
"#,
expect![[r#"
-fn foo () {a . __ra_fixup ; bar () ;}
+fn foo () {a .__ra_fixup ; bar () ;}
"#]],
)
}
diff --git a/crates/hir-expand/src/lib.rs b/crates/hir-expand/src/lib.rs
index 77f61dd830..157a5310bd 100644
--- a/crates/hir-expand/src/lib.rs
+++ b/crates/hir-expand/src/lib.rs
@@ -25,18 +25,17 @@ mod cfg_process;
mod fixup;
mod prettify_macro_expansion_;
-use attrs::collect_attrs;
-use rustc_hash::FxHashMap;
use salsa::plumbing::{AsId, FromId};
use stdx::TupleExt;
+use thin_vec::ThinVec;
use triomphe::Arc;
use core::fmt;
-use std::hash::Hash;
+use std::{hash::Hash, ops};
use base_db::Crate;
use either::Either;
-use span::{Edition, ErasedFileAstId, FileAstId, Span, SpanAnchor, SyntaxContext};
+use span::{Edition, ErasedFileAstId, FileAstId, Span, SyntaxContext};
use syntax::{
SyntaxNode, SyntaxToken, TextRange, TextSize,
ast::{self, AstNode},
@@ -317,9 +316,6 @@ pub enum MacroCallKind {
Derive {
ast_id: AstId<ast::Adt>,
/// Syntactical index of the invoking `#[derive]` attribute.
- ///
- /// Outer attributes are counted first, then inner attributes. This does not support
- /// out-of-line modules, which may have attributes spread across 2 files!
derive_attr_index: AttrId,
/// Index of the derive macro in the derive attribute
derive_index: u32,
@@ -329,17 +325,68 @@ pub enum MacroCallKind {
},
Attr {
ast_id: AstId<ast::Item>,
- // FIXME: This shouldn't be here, we can derive this from `invoc_attr_index`
- // but we need to fix the `cfg_attr` handling first.
+ // FIXME: This shouldn't be here, we can derive this from `invoc_attr_index`.
attr_args: Option<Arc<tt::TopSubtree>>,
- /// Syntactical index of the invoking `#[attribute]`.
+ /// This contains the list of all *active* attributes (derives and attr macros) preceding this
+ /// attribute, including this attribute. You can retrieve the [`AttrId`] of the current attribute
+ /// by calling [`invoc_attr()`] on this.
+ ///
+ /// The macro should not see the attributes here.
///
- /// Outer attributes are counted first, then inner attributes. This does not support
- /// out-of-line modules, which may have attributes spread across 2 files!
- invoc_attr_index: AttrId,
+ /// [`invoc_attr()`]: AttrMacroAttrIds::invoc_attr
+ censored_attr_ids: AttrMacroAttrIds,
},
}
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct AttrMacroAttrIds(AttrMacroAttrIdsRepr);
+
+impl AttrMacroAttrIds {
+ #[inline]
+ pub fn from_one(id: AttrId) -> Self {
+ Self(AttrMacroAttrIdsRepr::One(id))
+ }
+
+ #[inline]
+ pub fn from_many(ids: &[AttrId]) -> Self {
+ if let &[id] = ids {
+ Self(AttrMacroAttrIdsRepr::One(id))
+ } else {
+ Self(AttrMacroAttrIdsRepr::ManyDerives(ids.iter().copied().collect()))
+ }
+ }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+enum AttrMacroAttrIdsRepr {
+ One(AttrId),
+ ManyDerives(ThinVec<AttrId>),
+}
+
+impl ops::Deref for AttrMacroAttrIds {
+ type Target = [AttrId];
+
+ #[inline]
+ fn deref(&self) -> &Self::Target {
+ match &self.0 {
+ AttrMacroAttrIdsRepr::One(one) => std::slice::from_ref(one),
+ AttrMacroAttrIdsRepr::ManyDerives(many) => many,
+ }
+ }
+}
+
+impl AttrMacroAttrIds {
+ #[inline]
+ pub fn invoc_attr(&self) -> AttrId {
+ match &self.0 {
+ AttrMacroAttrIdsRepr::One(it) => *it,
+ AttrMacroAttrIdsRepr::ManyDerives(it) => {
+ *it.last().expect("should always have at least one `AttrId`")
+ }
+ }
+ }
+}
+
impl MacroCallKind {
pub(crate) fn call_style(&self) -> MacroCallStyle {
match self {
@@ -597,34 +644,20 @@ impl MacroDefId {
impl MacroCallLoc {
pub fn to_node(&self, db: &dyn ExpandDatabase) -> InFile<SyntaxNode> {
- match self.kind {
+ match &self.kind {
MacroCallKind::FnLike { ast_id, .. } => {
ast_id.with_value(ast_id.to_node(db).syntax().clone())
}
MacroCallKind::Derive { ast_id, derive_attr_index, .. } => {
// FIXME: handle `cfg_attr`
- ast_id.with_value(ast_id.to_node(db)).map(|it| {
- collect_attrs(&it)
- .nth(derive_attr_index.ast_index())
- .and_then(|it| match it.1 {
- Either::Left(attr) => Some(attr.syntax().clone()),
- Either::Right(_) => None,
- })
- .unwrap_or_else(|| it.syntax().clone())
- })
+ let (attr, _, _, _) = derive_attr_index.find_attr_range(db, self.krate, *ast_id);
+ ast_id.with_value(attr.syntax().clone())
}
- MacroCallKind::Attr { ast_id, invoc_attr_index, .. } => {
+ MacroCallKind::Attr { ast_id, censored_attr_ids: attr_ids, .. } => {
if self.def.is_attribute_derive() {
- // FIXME: handle `cfg_attr`
- ast_id.with_value(ast_id.to_node(db)).map(|it| {
- collect_attrs(&it)
- .nth(invoc_attr_index.ast_index())
- .and_then(|it| match it.1 {
- Either::Left(attr) => Some(attr.syntax().clone()),
- Either::Right(_) => None,
- })
- .unwrap_or_else(|| it.syntax().clone())
- })
+ let (attr, _, _, _) =
+ attr_ids.invoc_attr().find_attr_range(db, self.krate, *ast_id);
+ ast_id.with_value(attr.syntax().clone())
} else {
ast_id.with_value(ast_id.to_node(db).syntax().clone())
}
@@ -729,7 +762,7 @@ impl MacroCallKind {
/// Here we try to roughly match what rustc does to improve diagnostics: fn-like macros
/// get the macro path (rustc shows the whole `ast::MacroCall`), attribute macros get the
/// attribute's range, and derives get only the specific derive that is being referred to.
- pub fn original_call_range(self, db: &dyn ExpandDatabase) -> FileRange {
+ pub fn original_call_range(self, db: &dyn ExpandDatabase, krate: Crate) -> FileRange {
let mut kind = self;
let file_id = loop {
match kind.file_id() {
@@ -751,24 +784,11 @@ impl MacroCallKind {
}
MacroCallKind::Derive { ast_id, derive_attr_index, .. } => {
// FIXME: should be the range of the macro name, not the whole derive
- // FIXME: handle `cfg_attr`
- collect_attrs(&ast_id.to_node(db))
- .nth(derive_attr_index.ast_index())
- .expect("missing derive")
- .1
- .expect_left("derive is a doc comment?")
- .syntax()
- .text_range()
+ derive_attr_index.find_attr_range(db, krate, ast_id).2
}
// FIXME: handle `cfg_attr`
- MacroCallKind::Attr { ast_id, invoc_attr_index, .. } => {
- collect_attrs(&ast_id.to_node(db))
- .nth(invoc_attr_index.ast_index())
- .expect("missing attribute")
- .1
- .expect_left("attribute macro is a doc comment?")
- .syntax()
- .text_range()
+ MacroCallKind::Attr { ast_id, censored_attr_ids: attr_ids, .. } => {
+ attr_ids.invoc_attr().find_attr_range(db, krate, ast_id).2
}
};
@@ -887,7 +907,8 @@ impl ExpansionInfo {
let span = self.exp_map.span_at(token.start());
match &self.arg_map {
SpanMap::RealSpanMap(_) => {
- let file_id = EditionedFileId::from_span(db, span.anchor.file_id).into();
+ let file_id =
+ EditionedFileId::from_span_guess_origin(db, span.anchor.file_id).into();
let anchor_offset =
db.ast_id_map(file_id).get_erased(span.anchor.ast_id).text_range().start();
InFile { file_id, value: smallvec::smallvec![span.range + anchor_offset] }
@@ -943,7 +964,7 @@ pub fn map_node_range_up_rooted(
start = start.min(span.range.start());
end = end.max(span.range.end());
}
- let file_id = EditionedFileId::from_span(db, anchor.file_id);
+ let file_id = EditionedFileId::from_span_guess_origin(db, anchor.file_id);
let anchor_offset =
db.ast_id_map(file_id.into()).get_erased(anchor.ast_id).text_range().start();
Some(FileRange { file_id, range: TextRange::new(start, end) + anchor_offset })
@@ -969,36 +990,12 @@ pub fn map_node_range_up(
start = start.min(span.range.start());
end = end.max(span.range.end());
}
- let file_id = EditionedFileId::from_span(db, anchor.file_id);
+ let file_id = EditionedFileId::from_span_guess_origin(db, anchor.file_id);
let anchor_offset =
db.ast_id_map(file_id.into()).get_erased(anchor.ast_id).text_range().start();
Some((FileRange { file_id, range: TextRange::new(start, end) + anchor_offset }, ctx))
}
-/// Maps up the text range out of the expansion hierarchy back into the original file its from.
-/// This version will aggregate the ranges of all spans with the same anchor and syntax context.
-pub fn map_node_range_up_aggregated(
- db: &dyn ExpandDatabase,
- exp_map: &ExpansionSpanMap,
- range: TextRange,
-) -> FxHashMap<(SpanAnchor, SyntaxContext), TextRange> {
- let mut map = FxHashMap::default();
- for span in exp_map.spans_for_range(range) {
- let range = map.entry((span.anchor, span.ctx)).or_insert_with(|| span.range);
- *range = TextRange::new(
- range.start().min(span.range.start()),
- range.end().max(span.range.end()),
- );
- }
- for ((anchor, _), range) in &mut map {
- let file_id = EditionedFileId::from_span(db, anchor.file_id);
- let anchor_offset =
- db.ast_id_map(file_id.into()).get_erased(anchor.ast_id).text_range().start();
- *range += anchor_offset;
- }
- map
-}
-
/// Looks up the span at the given offset.
pub fn span_for_offset(
db: &dyn ExpandDatabase,
@@ -1006,7 +1003,7 @@ pub fn span_for_offset(
offset: TextSize,
) -> (FileRange, SyntaxContext) {
let span = exp_map.span_at(offset);
- let file_id = EditionedFileId::from_span(db, span.anchor.file_id);
+ let file_id = EditionedFileId::from_span_guess_origin(db, span.anchor.file_id);
let anchor_offset =
db.ast_id_map(file_id.into()).get_erased(span.anchor.ast_id).text_range().start();
(FileRange { file_id, range: span.range + anchor_offset }, span.ctx)
@@ -1076,7 +1073,7 @@ impl ExpandTo {
}
}
-intern::impl_internable!(ModPath, attrs::AttrInput);
+intern::impl_internable!(ModPath);
#[salsa_macros::interned(no_lifetime, debug, revisions = usize::MAX)]
#[doc(alias = "MacroFileId")]
@@ -1139,6 +1136,14 @@ impl HirFileId {
HirFileId::MacroFile(_) => None,
}
}
+
+ #[inline]
+ pub fn krate(self, db: &dyn ExpandDatabase) -> Crate {
+ match self {
+ HirFileId::FileId(it) => it.krate(db),
+ HirFileId::MacroFile(it) => it.loc(db).krate,
+ }
+ }
}
impl PartialEq<EditionedFileId> for HirFileId {
diff --git a/crates/hir-expand/src/mod_path.rs b/crates/hir-expand/src/mod_path.rs
index d84d978cdb..e9805e3f86 100644
--- a/crates/hir-expand/src/mod_path.rs
+++ b/crates/hir-expand/src/mod_path.rs
@@ -2,7 +2,7 @@
use std::{
fmt::{self, Display as _},
- iter,
+ iter::{self, Peekable},
};
use crate::{
@@ -12,10 +12,11 @@ use crate::{
tt,
};
use base_db::Crate;
-use intern::sym;
+use intern::{Symbol, sym};
+use parser::T;
use smallvec::SmallVec;
use span::{Edition, SyntaxContext};
-use syntax::{AstNode, ast};
+use syntax::{AstNode, SyntaxToken, ast};
#[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)]
pub struct ModPath {
@@ -64,6 +65,58 @@ impl ModPath {
ModPath { kind, segments: SmallVec::new_const() }
}
+ pub fn from_tokens(
+ db: &dyn ExpandDatabase,
+ span_for_range: &mut dyn FnMut(::tt::TextRange) -> SyntaxContext,
+ is_abs: bool,
+ segments: impl Iterator<Item = SyntaxToken>,
+ ) -> Option<ModPath> {
+ let mut segments = segments.peekable();
+ let mut result = SmallVec::new_const();
+ let path_kind = if is_abs {
+ PathKind::Abs
+ } else {
+ let first = segments.next()?;
+ match first.kind() {
+ T![crate] => PathKind::Crate,
+ T![self] => PathKind::Super(handle_super(&mut segments)),
+ T![super] => PathKind::Super(1 + handle_super(&mut segments)),
+ T![ident] => {
+ let first_text = first.text();
+ if first_text == "$crate" {
+ let ctxt = span_for_range(first.text_range());
+ resolve_crate_root(db, ctxt)
+ .map(PathKind::DollarCrate)
+ .unwrap_or(PathKind::Crate)
+ } else {
+ result.push(Name::new_symbol_root(Symbol::intern(first_text)));
+ PathKind::Plain
+ }
+ }
+ _ => return None,
+ }
+ };
+ for segment in segments {
+ if segment.kind() != T![ident] {
+ return None;
+ }
+ result.push(Name::new_symbol_root(Symbol::intern(segment.text())));
+ }
+ if result.is_empty() {
+ return None;
+ }
+ result.shrink_to_fit();
+ return Some(ModPath { kind: path_kind, segments: result });
+
+ fn handle_super(segments: &mut Peekable<impl Iterator<Item = SyntaxToken>>) -> u8 {
+ let mut result = 0;
+ while segments.next_if(|it| it.kind() == T![super]).is_some() {
+ result += 1;
+ }
+ result
+ }
+ }
+
pub fn segments(&self) -> &[Name] {
&self.segments
}
diff --git a/crates/hir-expand/src/span_map.rs b/crates/hir-expand/src/span_map.rs
index e5a778a95c..8b0c0d72cd 100644
--- a/crates/hir-expand/src/span_map.rs
+++ b/crates/hir-expand/src/span_map.rs
@@ -1,13 +1,12 @@
//! Span maps for real files and macro expansions.
use span::{Span, SyntaxContext};
-use stdx::TupleExt;
use syntax::{AstNode, TextRange, ast};
use triomphe::Arc;
pub use span::RealSpanMap;
-use crate::{HirFileId, MacroCallId, attrs::collect_attrs, db::ExpandDatabase};
+use crate::{HirFileId, MacroCallId, db::ExpandDatabase};
pub type ExpansionSpanMap = span::SpanMap<SyntaxContext>;
@@ -110,26 +109,24 @@ pub(crate) fn real_span_map(
// them anchors too, but only if they have no attributes attached, as those might be proc-macros
// and using different anchors inside of them will prevent spans from being joinable.
tree.items().for_each(|item| match &item {
- ast::Item::ExternBlock(it)
- if !collect_attrs(it).map(TupleExt::tail).any(|it| it.is_left()) =>
- {
+ ast::Item::ExternBlock(it) if ast::attrs_including_inner(it).next().is_none() => {
if let Some(extern_item_list) = it.extern_item_list() {
pairs.extend(
extern_item_list.extern_items().map(ast::Item::from).map(item_to_entry),
);
}
}
- ast::Item::Impl(it) if !collect_attrs(it).map(TupleExt::tail).any(|it| it.is_left()) => {
+ ast::Item::Impl(it) if ast::attrs_including_inner(it).next().is_none() => {
if let Some(assoc_item_list) = it.assoc_item_list() {
pairs.extend(assoc_item_list.assoc_items().map(ast::Item::from).map(item_to_entry));
}
}
- ast::Item::Module(it) if !collect_attrs(it).map(TupleExt::tail).any(|it| it.is_left()) => {
+ ast::Item::Module(it) if ast::attrs_including_inner(it).next().is_none() => {
if let Some(item_list) = it.item_list() {
pairs.extend(item_list.items().map(item_to_entry));
}
}
- ast::Item::Trait(it) if !collect_attrs(it).map(TupleExt::tail).any(|it| it.is_left()) => {
+ ast::Item::Trait(it) if ast::attrs_including_inner(it).next().is_none() => {
if let Some(assoc_item_list) = it.assoc_item_list() {
pairs.extend(assoc_item_list.assoc_items().map(ast::Item::from).map(item_to_entry));
}
diff --git a/crates/hir-ty/src/consteval.rs b/crates/hir-ty/src/consteval.rs
index 8430c3a41c..65250f94c2 100644
--- a/crates/hir-ty/src/consteval.rs
+++ b/crates/hir-ty/src/consteval.rs
@@ -6,6 +6,7 @@ mod tests;
use base_db::Crate;
use hir_def::{
ConstId, EnumVariantId, GeneralConstId, StaticId,
+ attrs::AttrFlags,
expr_store::Body,
hir::{Expr, ExprId},
type_ref::LiteralConstRef,
@@ -200,7 +201,7 @@ pub(crate) fn const_eval_discriminant_variant<'db>(
return Ok(value);
}
- let repr = db.enum_signature(loc.parent).repr;
+ let repr = AttrFlags::repr(db, loc.parent.into());
let is_signed = repr.and_then(|repr| repr.int).is_none_or(|int| int.is_signed());
let mir_body = db.monomorphized_mir_body(
diff --git a/crates/hir-ty/src/diagnostics/decl_check.rs b/crates/hir-ty/src/diagnostics/decl_check.rs
index f6992dfc9f..79dc6e3672 100644
--- a/crates/hir-ty/src/diagnostics/decl_check.rs
+++ b/crates/hir-ty/src/diagnostics/decl_check.rs
@@ -17,8 +17,8 @@ use std::fmt;
use hir_def::{
AdtId, ConstId, EnumId, EnumVariantId, FunctionId, HasModule, ItemContainerId, Lookup,
- ModuleDefId, ModuleId, StaticId, StructId, TraitId, TypeAliasId, db::DefDatabase, hir::Pat,
- item_tree::FieldsShape, signatures::StaticFlags, src::HasSource,
+ ModuleDefId, ModuleId, StaticId, StructId, TraitId, TypeAliasId, attrs::AttrFlags,
+ db::DefDatabase, hir::Pat, item_tree::FieldsShape, signatures::StaticFlags, src::HasSource,
};
use hir_expand::{
HirFileId,
@@ -201,7 +201,7 @@ impl<'a> DeclValidator<'a> {
// Don't run the lint on extern "[not Rust]" fn items with the
// #[no_mangle] attribute.
- let no_mangle = self.db.attrs(func.into()).by_key(sym::no_mangle).exists();
+ let no_mangle = AttrFlags::query(self.db, func.into()).contains(AttrFlags::NO_MANGLE);
if no_mangle && data.abi.as_ref().is_some_and(|abi| *abi != sym::Rust) {
cov_mark::hit!(extern_func_no_mangle_ignored);
} else {
@@ -563,7 +563,7 @@ impl<'a> DeclValidator<'a> {
cov_mark::hit!(extern_static_incorrect_case_ignored);
return;
}
- if self.db.attrs(static_id.into()).by_key(sym::no_mangle).exists() {
+ if AttrFlags::query(self.db, static_id.into()).contains(AttrFlags::NO_MANGLE) {
cov_mark::hit!(no_mangle_static_incorrect_case_ignored);
return;
}
diff --git a/crates/hir-ty/src/diagnostics/match_check/pat_analysis.rs b/crates/hir-ty/src/diagnostics/match_check/pat_analysis.rs
index fb942e336e..c70c6b6119 100644
--- a/crates/hir-ty/src/diagnostics/match_check/pat_analysis.rs
+++ b/crates/hir-ty/src/diagnostics/match_check/pat_analysis.rs
@@ -2,7 +2,9 @@
use std::{cell::LazyCell, fmt};
-use hir_def::{EnumId, EnumVariantId, HasModule, LocalFieldId, ModuleId, VariantId};
+use hir_def::{
+ EnumId, EnumVariantId, HasModule, LocalFieldId, ModuleId, VariantId, attrs::AttrFlags,
+};
use intern::sym;
use rustc_pattern_analysis::{
IndexVec, PatCx, PrivateUninhabitedField,
@@ -118,7 +120,7 @@ impl<'a, 'db> MatchCheckCtx<'a, 'db> {
/// Returns whether the given ADT is from another crate declared `#[non_exhaustive]`.
fn is_foreign_non_exhaustive(&self, adt: hir_def::AdtId) -> bool {
let is_local = adt.krate(self.db) == self.module.krate();
- !is_local && self.db.attrs(adt.into()).by_key(sym::non_exhaustive).exists()
+ !is_local && AttrFlags::query(self.db, adt.into()).contains(AttrFlags::NON_EXHAUSTIVE)
}
fn variant_id_for_adt(
diff --git a/crates/hir-ty/src/diagnostics/unsafe_check.rs b/crates/hir-ty/src/diagnostics/unsafe_check.rs
index 53524d66a3..8ac7ab19cd 100644
--- a/crates/hir-ty/src/diagnostics/unsafe_check.rs
+++ b/crates/hir-ty/src/diagnostics/unsafe_check.rs
@@ -144,7 +144,7 @@ struct UnsafeVisitor<'db> {
inside_assignment: bool,
inside_union_destructure: bool,
callback: &'db mut dyn FnMut(UnsafeDiagnostic),
- def_target_features: TargetFeatures,
+ def_target_features: TargetFeatures<'db>,
// FIXME: This needs to be the edition of the span of each call.
edition: Edition,
/// On some targets (WASM), calling safe functions with `#[target_feature]` is always safe, even when
@@ -162,7 +162,7 @@ impl<'db> UnsafeVisitor<'db> {
) -> Self {
let resolver = def.resolver(db);
let def_target_features = match def {
- DefWithBodyId::FunctionId(func) => TargetFeatures::from_attrs(&db.attrs(func.into())),
+ DefWithBodyId::FunctionId(func) => TargetFeatures::from_fn(db, func),
_ => TargetFeatures::default(),
};
let krate = resolver.module().krate();
diff --git a/crates/hir-ty/src/infer.rs b/crates/hir-ty/src/infer.rs
index 411e8b8dc7..36b7015817 100644
--- a/crates/hir-ty/src/infer.rs
+++ b/crates/hir-ty/src/infer.rs
@@ -40,7 +40,7 @@ use hir_def::{
lang_item::LangItems,
layout::Integer,
resolver::{HasResolver, ResolveValueResult, Resolver, TypeNs, ValueNs},
- signatures::{ConstSignature, StaticSignature},
+ signatures::{ConstSignature, EnumSignature, StaticSignature},
type_ref::{ConstRef, LifetimeRefId, TypeRef, TypeRefId},
};
use hir_expand::{mod_path::ModPath, name::Name};
@@ -108,7 +108,7 @@ pub(crate) fn infer_query(db: &dyn HirDatabase, def: DefWithBodyId) -> Arc<Infer
DefWithBodyId::ConstId(c) => ctx.collect_const(c, &db.const_signature(c)),
DefWithBodyId::StaticId(s) => ctx.collect_static(&db.static_signature(s)),
DefWithBodyId::VariantId(v) => {
- ctx.return_ty = match db.enum_signature(v.lookup(db).parent).variant_body_type() {
+ ctx.return_ty = match EnumSignature::variant_body_type(db, v.lookup(db).parent) {
hir_def::layout::IntegerType::Pointer(signed) => match signed {
true => ctx.types.isize,
false => ctx.types.usize,
@@ -829,7 +829,7 @@ pub(crate) struct InferenceContext<'body, 'db> {
/// Generally you should not resolve things via this resolver. Instead create a TyLoweringContext
/// and resolve the path via its methods. This will ensure proper error reporting.
pub(crate) resolver: Resolver<'db>,
- target_features: OnceCell<(TargetFeatures, TargetFeatureIsSafeInTarget)>,
+ target_features: OnceCell<(TargetFeatures<'db>, TargetFeatureIsSafeInTarget)>,
pub(crate) unstable_features: MethodResolutionUnstableFeatures,
pub(crate) edition: Edition,
pub(crate) generic_def: GenericDefId,
@@ -975,12 +975,10 @@ impl<'body, 'db> InferenceContext<'body, 'db> {
self.resolver.krate()
}
- fn target_features(&self) -> (&TargetFeatures, TargetFeatureIsSafeInTarget) {
+ fn target_features(&self) -> (&TargetFeatures<'db>, TargetFeatureIsSafeInTarget) {
let (target_features, target_feature_is_safe) = self.target_features.get_or_init(|| {
let target_features = match self.owner {
- DefWithBodyId::FunctionId(id) => {
- TargetFeatures::from_attrs(&self.db.attrs(id.into()))
- }
+ DefWithBodyId::FunctionId(id) => TargetFeatures::from_fn(self.db, id),
_ => TargetFeatures::default(),
};
let target_feature_is_safe = match &self.krate().workspace_data(self.db).target {
diff --git a/crates/hir-ty/src/infer/coerce.rs b/crates/hir-ty/src/infer/coerce.rs
index 773a02c5f6..3e99ec22ed 100644
--- a/crates/hir-ty/src/infer/coerce.rs
+++ b/crates/hir-ty/src/infer/coerce.rs
@@ -37,10 +37,10 @@
use hir_def::{
CallableDefId,
+ attrs::AttrFlags,
hir::{ExprId, ExprOrPatId},
signatures::FunctionSignature,
};
-use intern::sym;
use rustc_ast_ir::Mutability;
use rustc_type_ir::{
BoundVar, DebruijnIndex, TyVid, TypeAndMut, TypeFoldable, TypeFolder, TypeSuperFoldable,
@@ -78,7 +78,7 @@ use crate::{
trait CoerceDelegate<'db> {
fn infcx(&self) -> &InferCtxt<'db>;
fn env(&self) -> &TraitEnvironment<'db>;
- fn target_features(&self) -> (&TargetFeatures, TargetFeatureIsSafeInTarget);
+ fn target_features(&self) -> (&TargetFeatures<'db>, TargetFeatureIsSafeInTarget);
fn set_diverging(&mut self, diverging_ty: Ty<'db>);
@@ -852,14 +852,14 @@ where
return Err(TypeError::IntrinsicCast);
}
- let attrs = self.db().attrs(def_id.into());
- if attrs.by_key(sym::rustc_force_inline).exists() {
+ let attrs = AttrFlags::query(self.db(), def_id.into());
+ if attrs.contains(AttrFlags::RUSTC_FORCE_INLINE) {
return Err(TypeError::ForceInlineCast);
}
- if b_hdr.safety.is_safe() && attrs.by_key(sym::target_feature).exists() {
+ if b_hdr.safety.is_safe() && attrs.contains(AttrFlags::HAS_TARGET_FEATURE) {
let fn_target_features =
- TargetFeatures::from_attrs_no_implications(&attrs);
+ TargetFeatures::from_fn_no_implications(self.db(), def_id);
// Allow the coercion if the current function has all the features that would be
// needed to call the coercee safely.
let (target_features, target_feature_is_safe) =
@@ -978,8 +978,9 @@ impl<'db> CoerceDelegate<'db> for InferenceCoercionDelegate<'_, '_, 'db> {
fn env(&self) -> &TraitEnvironment<'db> {
&self.0.table.trait_env
}
+
#[inline]
- fn target_features(&self) -> (&TargetFeatures, TargetFeatureIsSafeInTarget) {
+ fn target_features(&self) -> (&TargetFeatures<'db>, TargetFeatureIsSafeInTarget) {
self.0.target_features()
}
@@ -1072,7 +1073,7 @@ impl<'db> InferenceContext<'_, 'db> {
let is_force_inline = |ty: Ty<'db>| {
if let TyKind::FnDef(CallableIdWrapper(CallableDefId::FunctionId(did)), _) = ty.kind() {
- self.db.attrs(did.into()).by_key(sym::rustc_force_inline).exists()
+ AttrFlags::query(self.db, did.into()).contains(AttrFlags::RUSTC_FORCE_INLINE)
} else {
false
}
@@ -1548,7 +1549,7 @@ pub fn could_coerce<'db>(
struct HirCoercionDelegate<'a, 'db> {
infcx: &'a InferCtxt<'db>,
env: &'a TraitEnvironment<'db>,
- target_features: &'a TargetFeatures,
+ target_features: &'a TargetFeatures<'db>,
}
impl<'db> CoerceDelegate<'db> for HirCoercionDelegate<'_, 'db> {
@@ -1560,7 +1561,7 @@ impl<'db> CoerceDelegate<'db> for HirCoercionDelegate<'_, 'db> {
fn env(&self) -> &TraitEnvironment<'db> {
self.env
}
- fn target_features(&self) -> (&TargetFeatures, TargetFeatureIsSafeInTarget) {
+ fn target_features(&self) -> (&TargetFeatures<'db>, TargetFeatureIsSafeInTarget) {
(self.target_features, TargetFeatureIsSafeInTarget::No)
}
fn set_diverging(&mut self, _diverging_ty: Ty<'db>) {}
diff --git a/crates/hir-ty/src/infer/expr.rs b/crates/hir-ty/src/infer/expr.rs
index 200b3d71f8..3d4d6cecef 100644
--- a/crates/hir-ty/src/infer/expr.rs
+++ b/crates/hir-ty/src/infer/expr.rs
@@ -2188,9 +2188,11 @@ impl<'db> InferenceContext<'_, 'db> {
};
let data = self.db.function_signature(func);
- let Some(legacy_const_generics_indices) = &data.legacy_const_generics_indices else {
+ let Some(legacy_const_generics_indices) = data.legacy_const_generics_indices(self.db, func)
+ else {
return Default::default();
};
+ let mut legacy_const_generics_indices = Box::<[u32]>::from(legacy_const_generics_indices);
// only use legacy const generics if the param count matches with them
if data.params.len() + legacy_const_generics_indices.len() != args.len() {
@@ -2199,9 +2201,8 @@ impl<'db> InferenceContext<'_, 'db> {
} else {
// there are more parameters than there should be without legacy
// const params; use them
- let mut indices = legacy_const_generics_indices.as_ref().clone();
- indices.sort();
- return indices;
+ legacy_const_generics_indices.sort_unstable();
+ return legacy_const_generics_indices;
}
}
@@ -2214,9 +2215,8 @@ impl<'db> InferenceContext<'_, 'db> {
self.infer_expr(args[arg_idx as usize], &expected, ExprIsRead::Yes);
// FIXME: evaluate and unify with the const
}
- let mut indices = legacy_const_generics_indices.as_ref().clone();
- indices.sort();
- indices
+ legacy_const_generics_indices.sort_unstable();
+ legacy_const_generics_indices
}
pub(super) fn with_breakable_ctx<T>(
diff --git a/crates/hir-ty/src/layout.rs b/crates/hir-ty/src/layout.rs
index 908b4dc5d7..658172d4b0 100644
--- a/crates/hir-ty/src/layout.rs
+++ b/crates/hir-ty/src/layout.rs
@@ -4,6 +4,7 @@ use std::fmt;
use hir_def::{
AdtId, LocalFieldId, StructId,
+ attrs::AttrFlags,
layout::{LayoutCalculatorError, LayoutData},
};
use la_arena::{Idx, RawIdx};
@@ -174,8 +175,7 @@ pub fn layout_of_ty_query<'db>(
TyKind::Adt(def, args) => {
match def.inner().id {
hir_def::AdtId::StructId(s) => {
- let data = db.struct_signature(s);
- let repr = data.repr.unwrap_or_default();
+ let repr = AttrFlags::repr(db, s.into()).unwrap_or_default();
if repr.simd() {
return layout_of_simd_ty(db, s, repr.packed(), &args, trait_env, &target);
}
diff --git a/crates/hir-ty/src/layout/adt.rs b/crates/hir-ty/src/layout/adt.rs
index a8f04bf8c1..ecebf7935d 100644
--- a/crates/hir-ty/src/layout/adt.rs
+++ b/crates/hir-ty/src/layout/adt.rs
@@ -4,9 +4,9 @@ use std::{cmp, ops::Bound};
use hir_def::{
AdtId, VariantId,
+ attrs::AttrFlags,
signatures::{StructFlags, VariantFields},
};
-use intern::sym;
use rustc_abi::{Integer, ReprOptions, TargetDataLayout};
use rustc_index::IndexVec;
use smallvec::SmallVec;
@@ -44,15 +44,15 @@ pub fn layout_of_adt_query<'db>(
r.push(handle_variant(s.into(), s.fields(db))?);
(
r,
- sig.repr.unwrap_or_default(),
+ AttrFlags::repr(db, s.into()).unwrap_or_default(),
sig.flags.intersects(StructFlags::IS_UNSAFE_CELL | StructFlags::IS_UNSAFE_PINNED),
)
}
AdtId::UnionId(id) => {
- let data = db.union_signature(id);
+ let repr = AttrFlags::repr(db, id.into());
let mut r = SmallVec::new();
r.push(handle_variant(id.into(), id.fields(db))?);
- (r, data.repr.unwrap_or_default(), false)
+ (r, repr.unwrap_or_default(), false)
}
AdtId::EnumId(e) => {
let variants = e.enum_variants(db);
@@ -61,7 +61,7 @@ pub fn layout_of_adt_query<'db>(
.iter()
.map(|&(v, _, _)| handle_variant(v.into(), v.fields(db)))
.collect::<Result<SmallVec<_>, _>>()?;
- (r, db.enum_signature(e).repr.unwrap_or_default(), false)
+ (r, AttrFlags::repr(db, e.into()).unwrap_or_default(), false)
}
};
let variants = variants
@@ -105,27 +105,12 @@ pub(crate) fn layout_of_adt_cycle_result<'db>(
}
fn layout_scalar_valid_range(db: &dyn HirDatabase, def: AdtId) -> (Bound<u128>, Bound<u128>) {
- let attrs = db.attrs(def.into());
- let get = |name| {
- let attr = attrs.by_key(name).tt_values();
- for tree in attr {
- if let Some(it) = tree.iter().next_as_view() {
- let text = it.to_string().replace('_', "");
- let (text, base) = match text.as_bytes() {
- [b'0', b'x', ..] => (&text[2..], 16),
- [b'0', b'o', ..] => (&text[2..], 8),
- [b'0', b'b', ..] => (&text[2..], 2),
- _ => (&*text, 10),
- };
-
- if let Ok(it) = u128::from_str_radix(text, base) {
- return Bound::Included(it);
- }
- }
- }
- Bound::Unbounded
+ let range = AttrFlags::rustc_layout_scalar_valid_range(db, def);
+ let get = |value| match value {
+ Some(it) => Bound::Included(it),
+ None => Bound::Unbounded,
};
- (get(sym::rustc_layout_scalar_valid_range_start), get(sym::rustc_layout_scalar_valid_range_end))
+ (get(range.start), get(range.end))
}
/// Finds the appropriate Integer type and signedness for the given
diff --git a/crates/hir-ty/src/method_resolution.rs b/crates/hir-ty/src/method_resolution.rs
index 4ec01e61b8..8da6baba5d 100644
--- a/crates/hir-ty/src/method_resolution.rs
+++ b/crates/hir-ty/src/method_resolution.rs
@@ -15,6 +15,7 @@ use base_db::Crate;
use hir_def::{
AssocItemId, BlockId, ConstId, FunctionId, GenericParamId, HasModule, ImplId, ItemContainerId,
ModuleId, TraitId,
+ attrs::AttrFlags,
expr_store::path::GenericArgs as HirGenericArgs,
hir::ExprId,
nameres::{DefMap, block_def_map, crate_def_map},
@@ -509,9 +510,8 @@ fn crates_containing_incoherent_inherent_impls(db: &dyn HirDatabase) -> Box<[Cra
pub fn incoherent_inherent_impls(db: &dyn HirDatabase, self_ty: SimplifiedType) -> &[ImplId] {
let has_incoherent_impls = match self_ty.def() {
Some(def_id) => match def_id.try_into() {
- Ok(def_id) => {
- db.attrs(def_id).by_key(sym::rustc_has_incoherent_inherent_impls).exists()
- }
+ Ok(def_id) => AttrFlags::query(db, def_id)
+ .contains(AttrFlags::RUSTC_HAS_INCOHERENT_INHERENT_IMPLS),
Err(()) => true,
},
_ => true,
@@ -715,7 +715,9 @@ impl TraitImpls {
// FIXME: Reservation impls should be considered during coherence checks. If we are
// (ever) to implement coherence checks, this filtering should be done by the trait
// solver.
- if db.attrs(impl_id.into()).by_key(sym::rustc_reservation_impl).exists() {
+ if AttrFlags::query(db, impl_id.into())
+ .contains(AttrFlags::RUSTC_RESERVATION_IMPL)
+ {
continue;
}
let trait_ref = match db.impl_trait(impl_id) {
diff --git a/crates/hir-ty/src/mir/eval/shim.rs b/crates/hir-ty/src/mir/eval/shim.rs
index c61afd62c4..cf08c82408 100644
--- a/crates/hir-ty/src/mir/eval/shim.rs
+++ b/crates/hir-ty/src/mir/eval/shim.rs
@@ -3,9 +3,9 @@
//!
use std::cmp::{self, Ordering};
-use hir_def::signatures::FunctionSignature;
+use hir_def::{attrs::AttrFlags, signatures::FunctionSignature};
use hir_expand::name::Name;
-use intern::{Symbol, sym};
+use intern::sym;
use rustc_type_ir::inherent::{AdtDef, IntoKind, SliceLike, Ty as _};
use stdx::never;
@@ -60,7 +60,7 @@ impl<'db> Evaluator<'db> {
}
let function_data = self.db.function_signature(def);
- let attrs = self.db.attrs(def.into());
+ let attrs = AttrFlags::query(self.db, def.into());
let is_intrinsic = FunctionSignature::is_intrinsic(self.db, def);
if is_intrinsic {
@@ -72,7 +72,7 @@ impl<'db> Evaluator<'db> {
locals,
span,
!function_data.has_body()
- || attrs.by_key(sym::rustc_intrinsic_must_be_overridden).exists(),
+ || attrs.contains(AttrFlags::RUSTC_INTRINSIC_MUST_BE_OVERRIDDEN),
);
}
let is_extern_c = match def.lookup(self.db).container {
@@ -92,18 +92,13 @@ impl<'db> Evaluator<'db> {
.map(|()| true);
}
- let alloc_fn =
- attrs.iter().filter_map(|it| it.path().as_ident()).map(|it| it.symbol()).find(|it| {
- [
- &sym::rustc_allocator,
- &sym::rustc_deallocator,
- &sym::rustc_reallocator,
- &sym::rustc_allocator_zeroed,
- ]
- .contains(it)
- });
- if let Some(alloc_fn) = alloc_fn {
- self.exec_alloc_fn(alloc_fn, args, destination)?;
+ if attrs.intersects(
+ AttrFlags::RUSTC_ALLOCATOR
+ | AttrFlags::RUSTC_DEALLOCATOR
+ | AttrFlags::RUSTC_REALLOCATOR
+ | AttrFlags::RUSTC_ALLOCATOR_ZEROED,
+ ) {
+ self.exec_alloc_fn(attrs, args, destination)?;
return Ok(true);
}
if let Some(it) = self.detect_lang_function(def) {
@@ -248,12 +243,14 @@ impl<'db> Evaluator<'db> {
fn exec_alloc_fn(
&mut self,
- alloc_fn: &Symbol,
+ alloc_fn: AttrFlags,
args: &[IntervalAndTy<'db>],
destination: Interval,
) -> Result<'db, ()> {
match alloc_fn {
- _ if *alloc_fn == sym::rustc_allocator_zeroed || *alloc_fn == sym::rustc_allocator => {
+ _ if alloc_fn
+ .intersects(AttrFlags::RUSTC_ALLOCATOR_ZEROED | AttrFlags::RUSTC_ALLOCATOR) =>
+ {
let [size, align] = args else {
return Err(MirEvalError::InternalError(
"rustc_allocator args are not provided".into(),
@@ -264,8 +261,8 @@ impl<'db> Evaluator<'db> {
let result = self.heap_allocate(size, align)?;
destination.write_from_bytes(self, &result.to_bytes())?;
}
- _ if *alloc_fn == sym::rustc_deallocator => { /* no-op for now */ }
- _ if *alloc_fn == sym::rustc_reallocator => {
+ _ if alloc_fn.contains(AttrFlags::RUSTC_DEALLOCATOR) => { /* no-op for now */ }
+ _ if alloc_fn.contains(AttrFlags::RUSTC_REALLOCATOR) => {
let [ptr, old_size, align, new_size] = args else {
return Err(MirEvalError::InternalError(
"rustc_allocator args are not provided".into(),
@@ -292,9 +289,9 @@ impl<'db> Evaluator<'db> {
fn detect_lang_function(&self, def: FunctionId) -> Option<EvalLangItem> {
use EvalLangItem::*;
let lang_items = self.lang_items();
- let attrs = self.db.attrs(def.into());
+ let attrs = AttrFlags::query(self.db, def.into());
- if attrs.by_key(sym::rustc_const_panic_str).exists() {
+ if attrs.contains(AttrFlags::RUSTC_CONST_PANIC_STR) {
// `#[rustc_const_panic_str]` is treated like `lang = "begin_panic"` by rustc CTFE.
return Some(BeginPanic);
}
diff --git a/crates/hir-ty/src/next_solver/interner.rs b/crates/hir-ty/src/next_solver/interner.rs
index 82c4235b2f..75b5c618d0 100644
--- a/crates/hir-ty/src/next_solver/interner.rs
+++ b/crates/hir-ty/src/next_solver/interner.rs
@@ -10,6 +10,7 @@ use base_db::Crate;
use hir_def::{
AdtId, BlockId, CallableDefId, DefWithBodyId, EnumVariantId, HasModule, ItemContainerId,
StructId, UnionId, VariantId,
+ attrs::AttrFlags,
lang_item::LangItems,
signatures::{FieldData, FnFlags, ImplFlags, StructFlags, TraitFlags},
};
@@ -507,28 +508,28 @@ impl AdtDef {
let variants = vec![(VariantIdx(0), VariantDef::Struct(struct_id))];
- let mut repr = ReprOptions::default();
- repr.align = data.repr.and_then(|r| r.align);
- repr.pack = data.repr.and_then(|r| r.pack);
- repr.int = data.repr.and_then(|r| r.int);
-
+ let data_repr = data.repr(db, struct_id);
let mut repr_flags = ReprFlags::empty();
if flags.is_box {
repr_flags.insert(ReprFlags::IS_LINEAR);
}
- if data.repr.is_some_and(|r| r.c()) {
+ if data_repr.is_some_and(|r| r.c()) {
repr_flags.insert(ReprFlags::IS_C);
}
- if data.repr.is_some_and(|r| r.simd()) {
+ if data_repr.is_some_and(|r| r.simd()) {
repr_flags.insert(ReprFlags::IS_SIMD);
}
- repr.flags = repr_flags;
+ let repr = ReprOptions {
+ align: data_repr.and_then(|r| r.align),
+ pack: data_repr.and_then(|r| r.pack),
+ int: data_repr.and_then(|r| r.int),
+ flags: repr_flags,
+ ..ReprOptions::default()
+ };
(flags, variants, repr)
}
AdtId::UnionId(union_id) => {
- let data = db.union_signature(union_id);
-
let flags = AdtFlags {
is_enum: false,
is_union: true,
@@ -541,22 +542,24 @@ impl AdtDef {
let variants = vec![(VariantIdx(0), VariantDef::Union(union_id))];
- let mut repr = ReprOptions::default();
- repr.align = data.repr.and_then(|r| r.align);
- repr.pack = data.repr.and_then(|r| r.pack);
- repr.int = data.repr.and_then(|r| r.int);
-
+ let data_repr = AttrFlags::repr(db, union_id.into());
let mut repr_flags = ReprFlags::empty();
if flags.is_box {
repr_flags.insert(ReprFlags::IS_LINEAR);
}
- if data.repr.is_some_and(|r| r.c()) {
+ if data_repr.is_some_and(|r| r.c()) {
repr_flags.insert(ReprFlags::IS_C);
}
- if data.repr.is_some_and(|r| r.simd()) {
+ if data_repr.is_some_and(|r| r.simd()) {
repr_flags.insert(ReprFlags::IS_SIMD);
}
- repr.flags = repr_flags;
+ let repr = ReprOptions {
+ align: data_repr.and_then(|r| r.align),
+ pack: data_repr.and_then(|r| r.pack),
+ int: data_repr.and_then(|r| r.int),
+ flags: repr_flags,
+ ..ReprOptions::default()
+ };
(flags, variants, repr)
}
@@ -580,24 +583,26 @@ impl AdtDef {
.map(|(idx, v)| (idx, VariantDef::Enum(v.0)))
.collect();
- let data = db.enum_signature(enum_id);
-
- let mut repr = ReprOptions::default();
- repr.align = data.repr.and_then(|r| r.align);
- repr.pack = data.repr.and_then(|r| r.pack);
- repr.int = data.repr.and_then(|r| r.int);
+ let data_repr = AttrFlags::repr(db, enum_id.into());
let mut repr_flags = ReprFlags::empty();
if flags.is_box {
repr_flags.insert(ReprFlags::IS_LINEAR);
}
- if data.repr.is_some_and(|r| r.c()) {
+ if data_repr.is_some_and(|r| r.c()) {
repr_flags.insert(ReprFlags::IS_C);
}
- if data.repr.is_some_and(|r| r.simd()) {
+ if data_repr.is_some_and(|r| r.simd()) {
repr_flags.insert(ReprFlags::IS_SIMD);
}
- repr.flags = repr_flags;
+
+ let repr = ReprOptions {
+ align: data_repr.and_then(|r| r.align),
+ pack: data_repr.and_then(|r| r.pack),
+ int: data_repr.and_then(|r| r.int),
+ flags: repr_flags,
+ ..ReprOptions::default()
+ };
(flags, variants, repr)
}
diff --git a/crates/hir-ty/src/target_feature.rs b/crates/hir-ty/src/target_feature.rs
index 0a8ed2cf0c..2bd675ba12 100644
--- a/crates/hir-ty/src/target_feature.rs
+++ b/crates/hir-ty/src/target_feature.rs
@@ -1,31 +1,35 @@
//! Stuff for handling `#[target_feature]` (needed for unsafe check).
+use std::borrow::Cow;
use std::sync::LazyLock;
-use hir_def::attr::Attrs;
-use hir_def::tt;
-use intern::{Symbol, sym};
+use hir_def::FunctionId;
+use hir_def::attrs::AttrFlags;
+use intern::Symbol;
use rustc_hash::{FxHashMap, FxHashSet};
+use crate::db::HirDatabase;
+
#[derive(Debug, Default, Clone)]
-pub struct TargetFeatures {
- pub(crate) enabled: FxHashSet<Symbol>,
+pub struct TargetFeatures<'db> {
+ pub(crate) enabled: Cow<'db, FxHashSet<Symbol>>,
}
-impl TargetFeatures {
- pub fn from_attrs(attrs: &Attrs) -> Self {
- let mut result = TargetFeatures::from_attrs_no_implications(attrs);
+impl<'db> TargetFeatures<'db> {
+ pub fn from_fn(db: &'db dyn HirDatabase, owner: FunctionId) -> Self {
+ let mut result = TargetFeatures::from_fn_no_implications(db, owner);
result.expand_implications();
result
}
fn expand_implications(&mut self) {
let all_implications = LazyLock::force(&TARGET_FEATURE_IMPLICATIONS);
- let mut queue = self.enabled.iter().cloned().collect::<Vec<_>>();
+ let enabled = self.enabled.to_mut();
+ let mut queue = enabled.iter().cloned().collect::<Vec<_>>();
while let Some(feature) = queue.pop() {
if let Some(implications) = all_implications.get(&feature) {
for implication in implications {
- if self.enabled.insert(implication.clone()) {
+ if enabled.insert(implication.clone()) {
queue.push(implication.clone());
}
}
@@ -34,25 +38,9 @@ impl TargetFeatures {
}
/// Retrieves the target features from the attributes, and does not expand the target features implied by them.
- pub(crate) fn from_attrs_no_implications(attrs: &Attrs) -> Self {
- let enabled = attrs
- .by_key(sym::target_feature)
- .tt_values()
- .filter_map(|tt| match tt.token_trees().flat_tokens() {
- [
- tt::TokenTree::Leaf(tt::Leaf::Ident(enable_ident)),
- tt::TokenTree::Leaf(tt::Leaf::Punct(tt::Punct { char: '=', .. })),
- tt::TokenTree::Leaf(tt::Leaf::Literal(tt::Literal {
- kind: tt::LitKind::Str,
- symbol: features,
- ..
- })),
- ] if enable_ident.sym == sym::enable => Some(features),
- _ => None,
- })
- .flat_map(|features| features.as_str().split(',').map(Symbol::intern))
- .collect();
- Self { enabled }
+ pub(crate) fn from_fn_no_implications(db: &'db dyn HirDatabase, owner: FunctionId) -> Self {
+ let enabled = AttrFlags::target_features(db, owner);
+ Self { enabled: Cow::Borrowed(enabled) }
}
}
diff --git a/crates/hir-ty/src/tests/incremental.rs b/crates/hir-ty/src/tests/incremental.rs
index 62ca2a0592..87291b619a 100644
--- a/crates/hir-ty/src/tests/incremental.rs
+++ b/crates/hir-ty/src/tests/incremental.rs
@@ -31,7 +31,6 @@ fn foo() -> i32 {
&[("infer_shim", 1)],
expect_test::expect![[r#"
[
- "source_root_crates_shim",
"crate_local_def_map",
"file_item_tree_query",
"ast_id_map_shim",
@@ -40,7 +39,7 @@ fn foo() -> i32 {
"infer_shim",
"function_signature_shim",
"function_signature_with_source_map_shim",
- "attrs_shim",
+ "AttrFlags::query_",
"body_shim",
"body_with_source_map_shim",
"trait_environment_shim",
@@ -80,7 +79,7 @@ fn foo() -> i32 {
"ast_id_map_shim",
"file_item_tree_query",
"real_span_map_shim",
- "attrs_shim",
+ "AttrFlags::query_",
"function_signature_with_source_map_shim",
"function_signature_shim",
"body_with_source_map_shim",
@@ -119,7 +118,6 @@ fn baz() -> i32 {
&[("infer_shim", 3)],
expect_test::expect![[r#"
[
- "source_root_crates_shim",
"crate_local_def_map",
"file_item_tree_query",
"ast_id_map_shim",
@@ -128,14 +126,14 @@ fn baz() -> i32 {
"infer_shim",
"function_signature_shim",
"function_signature_with_source_map_shim",
- "attrs_shim",
+ "AttrFlags::query_",
"body_shim",
"body_with_source_map_shim",
"trait_environment_shim",
"lang_items",
"crate_lang_items",
- "attrs_shim",
- "attrs_shim",
+ "AttrFlags::query_",
+ "AttrFlags::query_",
"GenericPredicates < 'db >::query_with_diagnostics_",
"ImplTraits < 'db >::return_type_impl_traits_",
"expr_scopes_shim",
@@ -193,13 +191,13 @@ fn baz() -> i32 {
"ast_id_map_shim",
"file_item_tree_query",
"real_span_map_shim",
- "attrs_shim",
+ "AttrFlags::query_",
"function_signature_with_source_map_shim",
"function_signature_shim",
"body_with_source_map_shim",
"body_shim",
- "attrs_shim",
- "attrs_shim",
+ "AttrFlags::query_",
+ "AttrFlags::query_",
"function_signature_with_source_map_shim",
"function_signature_shim",
"body_with_source_map_shim",
@@ -239,7 +237,6 @@ $0",
&[("TraitImpls::for_crate_", 1)],
expect_test::expect![[r#"
[
- "source_root_crates_shim",
"crate_local_def_map",
"file_item_tree_query",
"ast_id_map_shim",
@@ -311,7 +308,6 @@ $0",
&[("TraitImpls::for_crate_", 1)],
expect_test::expect![[r#"
[
- "source_root_crates_shim",
"crate_local_def_map",
"file_item_tree_query",
"ast_id_map_shim",
@@ -384,7 +380,6 @@ $0",
&[("TraitImpls::for_crate_", 1)],
expect_test::expect![[r#"
[
- "source_root_crates_shim",
"crate_local_def_map",
"file_item_tree_query",
"ast_id_map_shim",
@@ -458,7 +453,6 @@ $0",
&[("TraitImpls::for_crate_", 1)],
expect_test::expect![[r#"
[
- "source_root_crates_shim",
"crate_local_def_map",
"file_item_tree_query",
"ast_id_map_shim",
@@ -507,17 +501,17 @@ impl SomeStruct {
"real_span_map_shim",
"crate_local_def_map",
"TraitImpls::for_crate_",
- "attrs_shim",
+ "AttrFlags::query_",
"impl_trait_with_diagnostics_shim",
"impl_signature_shim",
"impl_signature_with_source_map_shim",
"lang_items",
"crate_lang_items",
"ImplItems::of_",
- "attrs_shim",
- "attrs_shim",
- "attrs_shim",
- "attrs_shim",
+ "AttrFlags::query_",
+ "AttrFlags::query_",
+ "AttrFlags::query_",
+ "AttrFlags::query_",
"impl_self_ty_with_diagnostics_shim",
"struct_signature_shim",
"struct_signature_with_source_map_shim",
@@ -570,7 +564,6 @@ fn main() {
&[("trait_solve_shim", 0)],
expect_test::expect![[r#"
[
- "source_root_crates_shim",
"crate_local_def_map",
"file_item_tree_query",
"ast_id_map_shim",
@@ -579,22 +572,22 @@ fn main() {
"TraitItems::query_with_diagnostics_",
"body_shim",
"body_with_source_map_shim",
- "attrs_shim",
+ "AttrFlags::query_",
"ImplItems::of_",
"infer_shim",
"trait_signature_shim",
"trait_signature_with_source_map_shim",
- "attrs_shim",
+ "AttrFlags::query_",
"function_signature_shim",
"function_signature_with_source_map_shim",
- "attrs_shim",
+ "AttrFlags::query_",
"body_shim",
"body_with_source_map_shim",
"trait_environment_shim",
"lang_items",
"crate_lang_items",
- "attrs_shim",
- "attrs_shim",
+ "AttrFlags::query_",
+ "AttrFlags::query_",
"GenericPredicates < 'db >::query_with_diagnostics_",
"GenericPredicates < 'db >::query_with_diagnostics_",
"ImplTraits < 'db >::return_type_impl_traits_",
@@ -675,21 +668,21 @@ fn main() {
"crate_local_def_map",
"TraitItems::query_with_diagnostics_",
"body_with_source_map_shim",
- "attrs_shim",
+ "AttrFlags::query_",
"body_shim",
"ImplItems::of_",
"infer_shim",
- "attrs_shim",
+ "AttrFlags::query_",
"trait_signature_with_source_map_shim",
- "attrs_shim",
+ "AttrFlags::query_",
"function_signature_with_source_map_shim",
"function_signature_shim",
"body_with_source_map_shim",
"body_shim",
"crate_lang_items",
- "attrs_shim",
- "attrs_shim",
- "attrs_shim",
+ "AttrFlags::query_",
+ "AttrFlags::query_",
+ "AttrFlags::query_",
"GenericPredicates < 'db >::query_with_diagnostics_",
"GenericPredicates < 'db >::query_with_diagnostics_",
"ImplTraits < 'db >::return_type_impl_traits_",
diff --git a/crates/hir-ty/src/utils.rs b/crates/hir-ty/src/utils.rs
index 820ccd9908..efc0ac2bf8 100644
--- a/crates/hir-ty/src/utils.rs
+++ b/crates/hir-ty/src/utils.rs
@@ -6,6 +6,7 @@ use std::cell::LazyCell;
use base_db::target::{self, TargetData};
use hir_def::{
EnumId, EnumVariantId, FunctionId, Lookup, TraitId,
+ attrs::AttrFlags,
db::DefDatabase,
hir::generics::WherePredicate,
lang_item::LangItems,
@@ -114,7 +115,7 @@ pub fn target_feature_is_safe_in_target(target: &TargetData) -> TargetFeatureIsS
pub fn is_fn_unsafe_to_call(
db: &dyn HirDatabase,
func: FunctionId,
- caller_target_features: &TargetFeatures,
+ caller_target_features: &TargetFeatures<'_>,
call_edition: Edition,
target_feature_is_safe: TargetFeatureIsSafeInTarget,
) -> Unsafety {
@@ -125,8 +126,7 @@ pub fn is_fn_unsafe_to_call(
if data.has_target_feature() && target_feature_is_safe == TargetFeatureIsSafeInTarget::No {
// RFC 2396 <https://rust-lang.github.io/rfcs/2396-target-feature-1.1.html>.
- let callee_target_features =
- TargetFeatures::from_attrs_no_implications(&db.attrs(func.into()));
+ let callee_target_features = TargetFeatures::from_fn_no_implications(db, func);
if !caller_target_features.enabled.is_superset(&callee_target_features.enabled) {
return Unsafety::Unsafe;
}
@@ -147,7 +147,7 @@ pub fn is_fn_unsafe_to_call(
if is_intrinsic_block {
// legacy intrinsics
// extern "rust-intrinsic" intrinsics are unsafe unless they have the rustc_safe_intrinsic attribute
- if db.attrs(func.into()).by_key(sym::rustc_safe_intrinsic).exists() {
+ if AttrFlags::query(db, func.into()).contains(AttrFlags::RUSTC_SAFE_INTRINSIC) {
Unsafety::Safe
} else {
Unsafety::Unsafe
diff --git a/crates/hir/src/attrs.rs b/crates/hir/src/attrs.rs
index f1ca6cc4a9..b57ca9a120 100644
--- a/crates/hir/src/attrs.rs
+++ b/crates/hir/src/attrs.rs
@@ -1,8 +1,11 @@
//! Attributes & documentation for hir types.
+use cfg::CfgExpr;
+use either::Either;
use hir_def::{
- AssocItemId, AttrDefId, ModuleDefId,
- attr::AttrsWithOwner,
+ AssocItemId, AttrDefId, FieldId, InternedModuleId, LifetimeParamId, ModuleDefId,
+ TypeOrConstParamId,
+ attrs::{AttrFlags, Docs, IsInnerDoc},
expr_store::path::Path,
item_scope::ItemInNs,
per_ns::Namespace,
@@ -19,35 +22,169 @@ use hir_ty::{
},
next_solver::{DbInterner, TypingMode, infer::DbInternerInferExt},
};
+use intern::Symbol;
use crate::{
Adt, AsAssocItem, AssocItem, BuiltinType, Const, ConstParam, DocLinkDef, Enum, ExternCrateDecl,
- Field, Function, GenericParam, HasCrate, Impl, LifetimeParam, Macro, Module, ModuleDef, Static,
- Struct, Trait, Type, TypeAlias, TypeParam, Union, Variant, VariantDef,
+ Field, Function, GenericParam, HasCrate, Impl, LangItem, LifetimeParam, Macro, Module,
+ ModuleDef, Static, Struct, Trait, Type, TypeAlias, TypeParam, Union, Variant, VariantDef,
};
-pub trait HasAttrs {
- fn attrs(self, db: &dyn HirDatabase) -> AttrsWithOwner;
+#[derive(Debug, Clone, Copy)]
+pub enum AttrsOwner {
+ AttrDef(AttrDefId),
+ Field(FieldId),
+ LifetimeParam(LifetimeParamId),
+ TypeOrConstParam(TypeOrConstParamId),
+}
+
+impl AttrsOwner {
+ #[inline]
+ fn attr_def(&self) -> Option<AttrDefId> {
+ match self {
+ AttrsOwner::AttrDef(it) => Some(*it),
+ _ => None,
+ }
+ }
+}
+
+#[derive(Debug, Clone)]
+pub struct AttrsWithOwner {
+ pub(crate) attrs: AttrFlags,
+ owner: AttrsOwner,
+}
+
+impl AttrsWithOwner {
+ fn new(db: &dyn HirDatabase, owner: AttrDefId) -> Self {
+ Self { attrs: AttrFlags::query(db, owner), owner: AttrsOwner::AttrDef(owner) }
+ }
+
+ fn new_field(db: &dyn HirDatabase, owner: FieldId) -> Self {
+ Self { attrs: AttrFlags::query_field(db, owner), owner: AttrsOwner::Field(owner) }
+ }
+
+ fn new_lifetime_param(db: &dyn HirDatabase, owner: LifetimeParamId) -> Self {
+ Self {
+ attrs: AttrFlags::query_lifetime_param(db, owner),
+ owner: AttrsOwner::LifetimeParam(owner),
+ }
+ }
+ fn new_type_or_const_param(db: &dyn HirDatabase, owner: TypeOrConstParamId) -> Self {
+ Self {
+ attrs: AttrFlags::query_type_or_const_param(db, owner),
+ owner: AttrsOwner::TypeOrConstParam(owner),
+ }
+ }
+
+ #[inline]
+ pub fn is_unstable(&self) -> bool {
+ self.attrs.contains(AttrFlags::IS_UNSTABLE)
+ }
+
+ #[inline]
+ pub fn is_macro_export(&self) -> bool {
+ self.attrs.contains(AttrFlags::IS_MACRO_EXPORT)
+ }
+
+ #[inline]
+ pub fn is_doc_notable_trait(&self) -> bool {
+ self.attrs.contains(AttrFlags::IS_DOC_NOTABLE_TRAIT)
+ }
+
+ #[inline]
+ pub fn is_doc_hidden(&self) -> bool {
+ self.attrs.contains(AttrFlags::IS_DOC_HIDDEN)
+ }
+
+ #[inline]
+ pub fn is_deprecated(&self) -> bool {
+ self.attrs.contains(AttrFlags::IS_DEPRECATED)
+ }
+
+ #[inline]
+ pub fn is_non_exhaustive(&self) -> bool {
+ self.attrs.contains(AttrFlags::NON_EXHAUSTIVE)
+ }
+
+ #[inline]
+ pub fn is_test(&self) -> bool {
+ self.attrs.contains(AttrFlags::IS_TEST)
+ }
+
+ #[inline]
+ pub fn lang(&self, db: &dyn HirDatabase) -> Option<LangItem> {
+ self.owner
+ .attr_def()
+ .and_then(|owner| self.attrs.lang_item_with_attrs(db, owner))
+ .and_then(|lang| LangItem::from_symbol(&lang))
+ }
+
+ #[inline]
+ pub fn doc_aliases<'db>(&self, db: &'db dyn HirDatabase) -> &'db [Symbol] {
+ let owner = match self.owner {
+ AttrsOwner::AttrDef(it) => Either::Left(it),
+ AttrsOwner::Field(it) => Either::Right(it),
+ AttrsOwner::LifetimeParam(_) | AttrsOwner::TypeOrConstParam(_) => return &[],
+ };
+ self.attrs.doc_aliases(db, owner)
+ }
+
+ #[inline]
+ pub fn cfgs<'db>(&self, db: &'db dyn HirDatabase) -> Option<&'db CfgExpr> {
+ let owner = match self.owner {
+ AttrsOwner::AttrDef(it) => Either::Left(it),
+ AttrsOwner::Field(it) => Either::Right(it),
+ AttrsOwner::LifetimeParam(_) | AttrsOwner::TypeOrConstParam(_) => return None,
+ };
+ self.attrs.cfgs(db, owner)
+ }
+
+ #[inline]
+ pub fn hir_docs<'db>(&self, db: &'db dyn HirDatabase) -> Option<&'db Docs> {
+ match self.owner {
+ AttrsOwner::AttrDef(it) => AttrFlags::docs(db, it).as_deref(),
+ AttrsOwner::Field(it) => AttrFlags::field_docs(db, it),
+ AttrsOwner::LifetimeParam(_) | AttrsOwner::TypeOrConstParam(_) => None,
+ }
+ }
+}
+
+pub trait HasAttrs: Sized {
+ #[inline]
+ fn attrs(self, db: &dyn HirDatabase) -> AttrsWithOwner {
+ match self.attr_id(db) {
+ AttrsOwner::AttrDef(it) => AttrsWithOwner::new(db, it),
+ AttrsOwner::Field(it) => AttrsWithOwner::new_field(db, it),
+ AttrsOwner::LifetimeParam(it) => AttrsWithOwner::new_lifetime_param(db, it),
+ AttrsOwner::TypeOrConstParam(it) => AttrsWithOwner::new_type_or_const_param(db, it),
+ }
+ }
+
#[doc(hidden)]
- fn attr_id(self) -> AttrDefId;
+ fn attr_id(self, db: &dyn HirDatabase) -> AttrsOwner;
+
+ #[inline]
+ fn hir_docs(self, db: &dyn HirDatabase) -> Option<&Docs> {
+ match self.attr_id(db) {
+ AttrsOwner::AttrDef(it) => AttrFlags::docs(db, it).as_deref(),
+ AttrsOwner::Field(it) => AttrFlags::field_docs(db, it),
+ AttrsOwner::LifetimeParam(_) | AttrsOwner::TypeOrConstParam(_) => None,
+ }
+ }
}
macro_rules! impl_has_attrs {
($(($def:ident, $def_id:ident),)*) => {$(
impl HasAttrs for $def {
- fn attrs(self, db: &dyn HirDatabase) -> AttrsWithOwner {
- let def = AttrDefId::$def_id(self.into());
- AttrsWithOwner::new(db, def)
- }
- fn attr_id(self) -> AttrDefId {
- AttrDefId::$def_id(self.into())
+ #[inline]
+ fn attr_id(self, _db: &dyn HirDatabase) -> AttrsOwner {
+ AttrsOwner::AttrDef(AttrDefId::$def_id(self.into()))
}
}
)*};
}
impl_has_attrs![
- (Field, FieldId),
(Variant, EnumVariantId),
(Static, StaticId),
(Const, ConstId),
@@ -56,8 +193,6 @@ impl_has_attrs![
(Macro, MacroId),
(Function, FunctionId),
(Adt, AdtId),
- (Module, ModuleId),
- (GenericParam, GenericParamId),
(Impl, ImplId),
(ExternCrateDecl, ExternCrateId),
];
@@ -65,11 +200,9 @@ impl_has_attrs![
macro_rules! impl_has_attrs_enum {
($($variant:ident),* for $enum:ident) => {$(
impl HasAttrs for $variant {
- fn attrs(self, db: &dyn HirDatabase) -> AttrsWithOwner {
- $enum::$variant(self).attrs(db)
- }
- fn attr_id(self) -> AttrDefId {
- $enum::$variant(self).attr_id()
+ #[inline]
+ fn attr_id(self, db: &dyn HirDatabase) -> AttrsOwner {
+ $enum::$variant(self).attr_id(db)
}
}
)*};
@@ -78,30 +211,46 @@ macro_rules! impl_has_attrs_enum {
impl_has_attrs_enum![Struct, Union, Enum for Adt];
impl_has_attrs_enum![TypeParam, ConstParam, LifetimeParam for GenericParam];
-impl HasAttrs for AssocItem {
- fn attrs(self, db: &dyn HirDatabase) -> AttrsWithOwner {
+impl HasAttrs for Module {
+ #[inline]
+ fn attr_id(self, db: &dyn HirDatabase) -> AttrsOwner {
+ AttrsOwner::AttrDef(AttrDefId::ModuleId(InternedModuleId::new(db, self.id)))
+ }
+}
+
+impl HasAttrs for GenericParam {
+ #[inline]
+ fn attr_id(self, _db: &dyn HirDatabase) -> AttrsOwner {
match self {
- AssocItem::Function(it) => it.attrs(db),
- AssocItem::Const(it) => it.attrs(db),
- AssocItem::TypeAlias(it) => it.attrs(db),
+ GenericParam::TypeParam(it) => AttrsOwner::TypeOrConstParam(it.merge().into()),
+ GenericParam::ConstParam(it) => AttrsOwner::TypeOrConstParam(it.merge().into()),
+ GenericParam::LifetimeParam(it) => AttrsOwner::LifetimeParam(it.into()),
}
}
- fn attr_id(self) -> AttrDefId {
+}
+
+impl HasAttrs for AssocItem {
+ #[inline]
+ fn attr_id(self, db: &dyn HirDatabase) -> AttrsOwner {
match self {
- AssocItem::Function(it) => it.attr_id(),
- AssocItem::Const(it) => it.attr_id(),
- AssocItem::TypeAlias(it) => it.attr_id(),
+ AssocItem::Function(it) => it.attr_id(db),
+ AssocItem::Const(it) => it.attr_id(db),
+ AssocItem::TypeAlias(it) => it.attr_id(db),
}
}
}
impl HasAttrs for crate::Crate {
- fn attrs(self, db: &dyn HirDatabase) -> AttrsWithOwner {
- let def = AttrDefId::ModuleId(self.root_module().id);
- AttrsWithOwner::new(db, def)
+ #[inline]
+ fn attr_id(self, db: &dyn HirDatabase) -> AttrsOwner {
+ self.root_module().attr_id(db)
}
- fn attr_id(self) -> AttrDefId {
- AttrDefId::ModuleId(self.root_module().id)
+}
+
+impl HasAttrs for Field {
+ #[inline]
+ fn attr_id(self, _db: &dyn HirDatabase) -> AttrsOwner {
+ AttrsOwner::Field(self.into())
}
}
@@ -111,21 +260,22 @@ pub fn resolve_doc_path_on(
def: impl HasAttrs + Copy,
link: &str,
ns: Option<Namespace>,
- is_inner_doc: bool,
+ is_inner_doc: IsInnerDoc,
) -> Option<DocLinkDef> {
- resolve_doc_path_on_(db, link, def.attr_id(), ns, is_inner_doc)
+ resolve_doc_path_on_(db, link, def.attr_id(db), ns, is_inner_doc)
}
fn resolve_doc_path_on_(
db: &dyn HirDatabase,
link: &str,
- attr_id: AttrDefId,
+ attr_id: AttrsOwner,
ns: Option<Namespace>,
- is_inner_doc: bool,
+ is_inner_doc: IsInnerDoc,
) -> Option<DocLinkDef> {
let resolver = match attr_id {
- AttrDefId::ModuleId(it) => {
- if is_inner_doc {
+ AttrsOwner::AttrDef(AttrDefId::ModuleId(it)) => {
+ let it = it.loc(db);
+ if is_inner_doc.yes() {
it.resolver(db)
} else if let Some(parent) = Module::from(it).parent(db) {
parent.id.resolver(db)
@@ -133,20 +283,20 @@ fn resolve_doc_path_on_(
it.resolver(db)
}
}
- AttrDefId::FieldId(it) => it.parent.resolver(db),
- AttrDefId::AdtId(it) => it.resolver(db),
- AttrDefId::FunctionId(it) => it.resolver(db),
- AttrDefId::EnumVariantId(it) => it.resolver(db),
- AttrDefId::StaticId(it) => it.resolver(db),
- AttrDefId::ConstId(it) => it.resolver(db),
- AttrDefId::TraitId(it) => it.resolver(db),
- AttrDefId::TypeAliasId(it) => it.resolver(db),
- AttrDefId::ImplId(it) => it.resolver(db),
- AttrDefId::ExternBlockId(it) => it.resolver(db),
- AttrDefId::UseId(it) => it.resolver(db),
- AttrDefId::MacroId(it) => it.resolver(db),
- AttrDefId::ExternCrateId(it) => it.resolver(db),
- AttrDefId::GenericParamId(_) => return None,
+ AttrsOwner::AttrDef(AttrDefId::AdtId(it)) => it.resolver(db),
+ AttrsOwner::AttrDef(AttrDefId::FunctionId(it)) => it.resolver(db),
+ AttrsOwner::AttrDef(AttrDefId::EnumVariantId(it)) => it.resolver(db),
+ AttrsOwner::AttrDef(AttrDefId::StaticId(it)) => it.resolver(db),
+ AttrsOwner::AttrDef(AttrDefId::ConstId(it)) => it.resolver(db),
+ AttrsOwner::AttrDef(AttrDefId::TraitId(it)) => it.resolver(db),
+ AttrsOwner::AttrDef(AttrDefId::TypeAliasId(it)) => it.resolver(db),
+ AttrsOwner::AttrDef(AttrDefId::ImplId(it)) => it.resolver(db),
+ AttrsOwner::AttrDef(AttrDefId::ExternBlockId(it)) => it.resolver(db),
+ AttrsOwner::AttrDef(AttrDefId::UseId(it)) => it.resolver(db),
+ AttrsOwner::AttrDef(AttrDefId::MacroId(it)) => it.resolver(db),
+ AttrsOwner::AttrDef(AttrDefId::ExternCrateId(it)) => it.resolver(db),
+ AttrsOwner::Field(it) => it.parent.resolver(db),
+ AttrsOwner::LifetimeParam(_) | AttrsOwner::TypeOrConstParam(_) => return None,
};
let mut modpath = doc_modpath_from_str(link)?;
diff --git a/crates/hir/src/diagnostics.rs b/crates/hir/src/diagnostics.rs
index a6d67e8fb4..6ef6ea272e 100644
--- a/crates/hir/src/diagnostics.rs
+++ b/crates/hir/src/diagnostics.rs
@@ -153,8 +153,7 @@ pub struct UnresolvedImport {
#[derive(Debug, Clone, Eq, PartialEq)]
pub struct UnresolvedMacroCall {
- pub macro_call: InFile<SyntaxNodePtr>,
- pub precise_location: Option<TextRange>,
+ pub range: InFile<TextRange>,
pub path: ModPath,
pub is_bang: bool,
}
@@ -185,8 +184,7 @@ pub struct InactiveCode {
#[derive(Debug, Clone, Eq, PartialEq)]
pub struct MacroError {
- pub node: InFile<SyntaxNodePtr>,
- pub precise_location: Option<TextRange>,
+ pub range: InFile<TextRange>,
pub message: String,
pub error: bool,
pub kind: &'static str,
@@ -194,8 +192,7 @@ pub struct MacroError {
#[derive(Debug, Clone, Eq, PartialEq)]
pub struct MacroExpansionParseError {
- pub node: InFile<SyntaxNodePtr>,
- pub precise_location: Option<TextRange>,
+ pub range: InFile<TextRange>,
pub errors: Arc<[SyntaxError]>,
}
@@ -213,12 +210,12 @@ pub struct UnimplementedBuiltinMacro {
#[derive(Debug)]
pub struct InvalidDeriveTarget {
- pub node: InFile<SyntaxNodePtr>,
+ pub range: InFile<TextRange>,
}
#[derive(Debug)]
pub struct MalformedDerive {
- pub node: InFile<SyntaxNodePtr>,
+ pub range: InFile<TextRange>,
}
#[derive(Debug)]
diff --git a/crates/hir/src/lib.rs b/crates/hir/src/lib.rs
index 4c21b98066..a79dea1949 100644
--- a/crates/hir/src/lib.rs
+++ b/crates/hir/src/lib.rs
@@ -48,11 +48,12 @@ use arrayvec::ArrayVec;
use base_db::{CrateDisplayName, CrateOrigin, LangCrateOrigin};
use either::Either;
use hir_def::{
- AdtId, AssocItemId, AssocItemLoc, AttrDefId, CallableDefId, ConstId, ConstParamId,
- CrateRootModuleId, DefWithBodyId, EnumId, EnumVariantId, ExternBlockId, ExternCrateId,
- FunctionId, GenericDefId, GenericParamId, HasModule, ImplId, ItemContainerId, LifetimeParamId,
+ AdtId, AssocItemId, AssocItemLoc, CallableDefId, ConstId, ConstParamId, CrateRootModuleId,
+ DefWithBodyId, EnumId, EnumVariantId, ExternBlockId, ExternCrateId, FunctionId, GenericDefId,
+ GenericParamId, HasModule, ImplId, InternedModuleId, ItemContainerId, LifetimeParamId,
LocalFieldId, Lookup, MacroExpander, MacroId, StaticId, StructId, SyntheticSyntax, TupleId,
TypeAliasId, TypeOrConstParamId, TypeParamId, UnionId,
+ attrs::AttrFlags,
expr_store::{ExpressionStoreDiagnostics, ExpressionStoreSourceMap},
hir::{
BindingAnnotation, BindingId, Expr, ExprId, ExprOrPatId, LabelId, Pat,
@@ -67,13 +68,12 @@ use hir_def::{
},
per_ns::PerNs,
resolver::{HasResolver, Resolver},
- signatures::{ImplFlags, StaticFlags, StructFlags, TraitFlags, VariantFields},
+ signatures::{EnumSignature, ImplFlags, StaticFlags, StructFlags, TraitFlags, VariantFields},
src::HasSource as _,
visibility::visibility_from_ast,
};
use hir_expand::{
- AstId, MacroCallKind, RenderedExpandError, ValueResult, attrs::collect_attrs,
- proc_macro::ProcMacroKind,
+ AstId, MacroCallKind, RenderedExpandError, ValueResult, proc_macro::ProcMacroKind,
};
use hir_ty::{
GenericPredicates, TraitEnvironment, TyDefId, TyLoweringDiagnostic, ValueTyDefId,
@@ -104,8 +104,8 @@ use smallvec::SmallVec;
use span::{AstIdNode, Edition, FileId};
use stdx::{format_to, impl_from, never};
use syntax::{
- AstNode, AstPtr, SmolStr, SyntaxNode, SyntaxNodePtr, T, TextRange, ToSmolStr,
- ast::{self, HasAttrs as _, HasName, HasVisibility as _},
+ AstNode, AstPtr, SmolStr, SyntaxNode, SyntaxNodePtr, TextRange, ToSmolStr,
+ ast::{self, HasName, HasVisibility as _},
format_smolstr,
};
use triomphe::{Arc, ThinArc};
@@ -113,7 +113,7 @@ use triomphe::{Arc, ThinArc};
use crate::db::{DefDatabase, HirDatabase};
pub use crate::{
- attrs::{HasAttrs, resolve_doc_path_on},
+ attrs::{AttrsWithOwner, HasAttrs, resolve_doc_path_on},
diagnostics::*,
has_source::HasSource,
semantics::{
@@ -136,7 +136,7 @@ pub use {
hir_def::{
Complete,
FindPathConfig,
- attr::{AttrSourceMap, Attrs, AttrsWithOwner},
+ attrs::{Docs, IsInnerDoc},
find_path::PrefixKind,
import_map,
lang_item::{LangItemEnum as LangItem, crate_lang_items},
@@ -150,7 +150,6 @@ pub use {
},
hir_expand::{
EditionedFileId, ExpandResult, HirFileId, MacroCallId, MacroKind,
- attrs::{Attr, AttrId},
change::ChangeWithProcMacros,
files::{
FilePosition, FilePositionWrapper, FileRange, FileRangeWrapper, HirFilePosition,
@@ -306,11 +305,10 @@ impl Crate {
}
/// Try to get the root URL of the documentation of a crate.
- pub fn get_html_root_url(self: &Crate, db: &dyn HirDatabase) -> Option<String> {
+ pub fn get_html_root_url(self, db: &dyn HirDatabase) -> Option<String> {
// Look for #![doc(html_root_url = "...")]
- let attrs = db.attrs(AttrDefId::ModuleId(self.root_module().into()));
- let doc_url = attrs.by_key(sym::doc).find_string_value_in_tt(sym::html_root_url);
- doc_url.map(|s| s.trim_matches('"').trim_end_matches('/').to_owned() + "/")
+ let doc_url = AttrFlags::doc_html_root_url(db, self.id);
+ doc_url.as_ref().map(|s| s.trim_matches('"').trim_end_matches('/').to_owned() + "/")
}
pub fn cfg<'db>(&self, db: &'db dyn HirDatabase) -> &'db CfgOptions {
@@ -655,7 +653,7 @@ impl Module {
// FIXME: This is accidentally quadratic.
continue;
}
- emit_def_diagnostic(db, acc, diag, edition);
+ emit_def_diagnostic(db, acc, diag, edition, def_map.krate());
}
if !self.id.is_block_module() {
@@ -674,8 +672,9 @@ impl Module {
acc.extend(def.diagnostics(db, style_lints))
}
ModuleDef::Trait(t) => {
+ let krate = t.krate(db);
for diag in TraitItems::query_with_diagnostics(db, t.id).1.iter() {
- emit_def_diagnostic(db, acc, diag, edition);
+ emit_def_diagnostic(db, acc, diag, edition, krate.id);
}
for item in t.items(db) {
@@ -791,7 +790,7 @@ impl Module {
let ast_id_map = db.ast_id_map(file_id);
for diag in impl_def.id.impl_items_with_diagnostics(db).1.iter() {
- emit_def_diagnostic(db, acc, diag, edition);
+ emit_def_diagnostic(db, acc, diag, edition, loc.container.krate());
}
if impl_signature.target_trait.is_none()
@@ -824,21 +823,10 @@ impl Module {
return None;
}
let parent = impl_def.id.into();
- let generic_params = db.generic_params(parent);
- let lifetime_params = generic_params.iter_lt().map(|(local_id, _)| {
- GenericParamId::LifetimeParamId(LifetimeParamId { parent, local_id })
- });
- let type_params = generic_params
- .iter_type_or_consts()
- .filter(|(_, it)| it.type_param().is_some())
- .map(|(local_id, _)| {
- GenericParamId::TypeParamId(TypeParamId::from_unchecked(
- TypeOrConstParamId { parent, local_id },
- ))
- });
- let res = type_params.chain(lifetime_params).any(|p| {
- db.attrs(AttrDefId::GenericParamId(p)).by_key(sym::may_dangle).exists()
- });
+ let (lifetimes_attrs, type_and_consts_attrs) =
+ AttrFlags::query_generic_params(db, parent);
+ let res = lifetimes_attrs.values().any(|it| it.contains(AttrFlags::MAY_DANGLE))
+ || type_and_consts_attrs.values().any(|it| it.contains(AttrFlags::MAY_DANGLE));
Some(res)
})()
.unwrap_or(false);
@@ -999,6 +987,17 @@ impl Module {
) -> Option<ModPath> {
hir_def::find_path::find_path(db, item.into().into(), self.into(), prefix_kind, true, cfg)
}
+
+ #[inline]
+ pub fn doc_keyword(self, db: &dyn HirDatabase) -> Option<Symbol> {
+ AttrFlags::doc_keyword(db, InternedModuleId::new(db, self.id))
+ }
+
+ /// Whether it has `#[path = "..."]` attribute.
+ #[inline]
+ pub fn has_path(&self, db: &dyn HirDatabase) -> bool {
+ self.attrs(db).attrs.contains(AttrFlags::HAS_PATH)
+ }
}
fn macro_call_diagnostics<'db>(
@@ -1013,31 +1012,19 @@ fn macro_call_diagnostics<'db>(
if let Some(err) = err {
let loc = db.lookup_intern_macro_call(macro_call_id);
let file_id = loc.kind.file_id();
- let node =
- InFile::new(file_id, db.ast_id_map(file_id).get_erased(loc.kind.erased_ast_id()));
+ let mut range = precise_macro_call_location(&loc.kind, db, loc.krate);
let RenderedExpandError { message, error, kind } = err.render_to_string(db);
- let editioned_file_id = EditionedFileId::from_span(db, err.span().anchor.file_id);
- let precise_location = if editioned_file_id == file_id {
- Some(
- err.span().range
- + db.ast_id_map(editioned_file_id.into())
- .get_erased(err.span().anchor.ast_id)
- .text_range()
- .start(),
- )
- } else {
- None
- };
- acc.push(MacroError { node, precise_location, message, error, kind }.into());
+ if Some(err.span().anchor.file_id) == file_id.file_id().map(|it| it.editioned_file_id(db)) {
+ range.value = err.span().range
+ + db.ast_id_map(file_id).get_erased(err.span().anchor.ast_id).text_range().start();
+ }
+ acc.push(MacroError { range, message, error, kind }.into());
}
if !parse_errors.is_empty() {
let loc = db.lookup_intern_macro_call(macro_call_id);
- let (node, precise_location) = precise_macro_call_location(&loc.kind, db);
- acc.push(
- MacroExpansionParseError { node, precise_location, errors: parse_errors.clone() }
- .into(),
- )
+ let range = precise_macro_call_location(&loc.kind, db, loc.krate);
+ acc.push(MacroExpansionParseError { range, errors: parse_errors.clone() }.into())
}
}
@@ -1061,6 +1048,7 @@ fn emit_macro_def_diagnostics<'db>(
acc,
&DefDiagnosticKind::MacroDefError { ast, message: e.to_string() },
edition,
+ m.krate(db).id,
);
}
}
@@ -1070,8 +1058,9 @@ fn emit_def_diagnostic<'db>(
acc: &mut Vec<AnyDiagnostic<'db>>,
diag: &DefDiagnostic,
edition: Edition,
+ krate: base_db::Crate,
) {
- emit_def_diagnostic_(db, acc, &diag.kind, edition)
+ emit_def_diagnostic_(db, acc, &diag.kind, edition, krate)
}
fn emit_def_diagnostic_<'db>(
@@ -1079,6 +1068,7 @@ fn emit_def_diagnostic_<'db>(
acc: &mut Vec<AnyDiagnostic<'db>>,
diag: &DefDiagnosticKind,
edition: Edition,
+ krate: base_db::Crate,
) {
match diag {
DefDiagnosticKind::UnresolvedModule { ast: declaration, candidates } => {
@@ -1101,8 +1091,7 @@ fn emit_def_diagnostic_<'db>(
let RenderedExpandError { message, error, kind } = err.render_to_string(db);
acc.push(
MacroError {
- node: InFile::new(ast.file_id, item.syntax_node_ptr()),
- precise_location: None,
+ range: InFile::new(ast.file_id, item.text_range()),
message: format!("{}: {message}", path.display(db, edition)),
error,
kind,
@@ -1132,11 +1121,10 @@ fn emit_def_diagnostic_<'db>(
);
}
DefDiagnosticKind::UnresolvedMacroCall { ast, path } => {
- let (node, precise_location) = precise_macro_call_location(ast, db);
+ let location = precise_macro_call_location(ast, db, krate);
acc.push(
UnresolvedMacroCall {
- macro_call: node,
- precise_location,
+ range: location,
path: path.clone(),
is_bang: matches!(ast, MacroCallKind::FnLike { .. }),
}
@@ -1155,34 +1143,12 @@ fn emit_def_diagnostic_<'db>(
);
}
DefDiagnosticKind::InvalidDeriveTarget { ast, id } => {
- let node = ast.to_node(db);
- let derive = node.attrs().nth(*id);
- match derive {
- Some(derive) => {
- acc.push(
- InvalidDeriveTarget {
- node: ast.with_value(SyntaxNodePtr::from(AstPtr::new(&derive))),
- }
- .into(),
- );
- }
- None => stdx::never!("derive diagnostic on item without derive attribute"),
- }
+ let derive = id.find_attr_range(db, krate, *ast).3.path_range();
+ acc.push(InvalidDeriveTarget { range: ast.with_value(derive) }.into());
}
DefDiagnosticKind::MalformedDerive { ast, id } => {
- let node = ast.to_node(db);
- let derive = node.attrs().nth(*id);
- match derive {
- Some(derive) => {
- acc.push(
- MalformedDerive {
- node: ast.with_value(SyntaxNodePtr::from(AstPtr::new(&derive))),
- }
- .into(),
- );
- }
- None => stdx::never!("derive diagnostic on item without derive attribute"),
- }
+ let derive = id.find_attr_range(db, krate, *ast).2;
+ acc.push(MalformedDerive { range: ast.with_value(derive) }.into());
}
DefDiagnosticKind::MacroDefError { ast, message } => {
let node = ast.to_node(db);
@@ -1201,61 +1167,28 @@ fn emit_def_diagnostic_<'db>(
fn precise_macro_call_location(
ast: &MacroCallKind,
db: &dyn HirDatabase,
-) -> (InFile<SyntaxNodePtr>, Option<TextRange>) {
+ krate: base_db::Crate,
+) -> InFile<TextRange> {
// FIXME: maybe we actually want slightly different ranges for the different macro diagnostics
// - e.g. the full attribute for macro errors, but only the name for name resolution
match ast {
MacroCallKind::FnLike { ast_id, .. } => {
let node = ast_id.to_node(db);
- (
- ast_id.with_value(SyntaxNodePtr::from(AstPtr::new(&node))),
- node.path()
- .and_then(|it| it.segment())
- .and_then(|it| it.name_ref())
- .map(|it| it.syntax().text_range()),
- )
+ let range = node
+ .path()
+ .and_then(|it| it.segment())
+ .and_then(|it| it.name_ref())
+ .map(|it| it.syntax().text_range());
+ let range = range.unwrap_or_else(|| node.syntax().text_range());
+ ast_id.with_value(range)
}
MacroCallKind::Derive { ast_id, derive_attr_index, derive_index, .. } => {
- let node = ast_id.to_node(db);
- // Compute the precise location of the macro name's token in the derive
- // list.
- let token = (|| {
- let derive_attr = collect_attrs(&node)
- .nth(derive_attr_index.ast_index())
- .and_then(|x| Either::left(x.1))?;
- let token_tree = derive_attr.meta()?.token_tree()?;
- let chunk_by = token_tree
- .syntax()
- .children_with_tokens()
- .filter_map(|elem| match elem {
- syntax::NodeOrToken::Token(tok) => Some(tok),
- _ => None,
- })
- .chunk_by(|t| t.kind() == T![,]);
- let (_, mut group) = chunk_by
- .into_iter()
- .filter(|&(comma, _)| !comma)
- .nth(*derive_index as usize)?;
- group.find(|t| t.kind() == T![ident])
- })();
- (
- ast_id.with_value(SyntaxNodePtr::from(AstPtr::new(&node))),
- token.as_ref().map(|tok| tok.text_range()),
- )
+ let range = derive_attr_index.find_derive_range(db, krate, *ast_id, *derive_index);
+ ast_id.with_value(range)
}
- MacroCallKind::Attr { ast_id, invoc_attr_index, .. } => {
- let node = ast_id.to_node(db);
- let attr = collect_attrs(&node)
- .nth(invoc_attr_index.ast_index())
- .and_then(|x| Either::left(x.1))
- .unwrap_or_else(|| {
- panic!("cannot find attribute #{}", invoc_attr_index.ast_index())
- });
-
- (
- ast_id.with_value(SyntaxNodePtr::from(AstPtr::new(&attr))),
- Some(attr.syntax().text_range()),
- )
+ MacroCallKind::Attr { ast_id, censored_attr_ids: attr_ids, .. } => {
+ let attr_range = attr_ids.invoc_attr().find_attr_range(db, krate, *ast_id).2;
+ ast_id.with_value(attr_range)
}
}
}
@@ -1452,7 +1385,7 @@ impl Struct {
}
pub fn repr(self, db: &dyn HirDatabase) -> Option<ReprOptions> {
- db.struct_signature(self.id).repr
+ AttrFlags::repr(db, self.id.into())
}
pub fn kind(self, db: &dyn HirDatabase) -> StructKind {
@@ -1468,7 +1401,7 @@ impl Struct {
}
pub fn is_unstable(self, db: &dyn HirDatabase) -> bool {
- db.attrs(self.id.into()).is_unstable()
+ AttrFlags::query(db, self.id.into()).contains(AttrFlags::IS_UNSTABLE)
}
pub fn instantiate_infer<'db>(self, infer_ctxt: &InferCtxt<'db>) -> InstantiatedStruct<'db> {
@@ -1556,7 +1489,7 @@ impl Union {
.collect()
}
pub fn is_unstable(self, db: &dyn HirDatabase) -> bool {
- db.attrs(self.id.into()).is_unstable()
+ AttrFlags::query(db, self.id.into()).contains(AttrFlags::IS_UNSTABLE)
}
}
@@ -1591,7 +1524,7 @@ impl Enum {
}
pub fn repr(self, db: &dyn HirDatabase) -> Option<ReprOptions> {
- db.enum_signature(self.id).repr
+ AttrFlags::repr(db, self.id.into())
}
pub fn ty<'db>(self, db: &'db dyn HirDatabase) -> Type<'db> {
@@ -1607,7 +1540,7 @@ impl Enum {
let interner = DbInterner::new_no_crate(db);
Type::new_for_crate(
self.id.lookup(db).container.krate(),
- match db.enum_signature(self.id).variant_body_type() {
+ match EnumSignature::variant_body_type(db, self.id) {
layout::IntegerType::Pointer(sign) => match sign {
true => Ty::new_int(interner, rustc_type_ir::IntTy::Isize),
false => Ty::new_uint(interner, rustc_type_ir::UintTy::Usize),
@@ -1648,7 +1581,7 @@ impl Enum {
}
pub fn is_unstable(self, db: &dyn HirDatabase) -> bool {
- db.attrs(self.id.into()).is_unstable()
+ AttrFlags::query(db, self.id.into()).contains(AttrFlags::IS_UNSTABLE)
}
}
@@ -1748,7 +1681,7 @@ impl Variant {
}
pub fn is_unstable(self, db: &dyn HirDatabase) -> bool {
- db.attrs(self.id.into()).is_unstable()
+ AttrFlags::query(db, self.id.into()).contains(AttrFlags::IS_UNSTABLE)
}
pub fn instantiate_infer<'db>(self, infer_ctxt: &InferCtxt<'db>) -> InstantiatedVariant<'db> {
@@ -2233,8 +2166,7 @@ fn expr_store_diagnostics<'db>(
InactiveCode { node: *node, cfg: cfg.clone(), opts: opts.clone() }.into()
}
ExpressionStoreDiagnostics::UnresolvedMacroCall { node, path } => UnresolvedMacroCall {
- macro_call: (*node).map(|ast_ptr| ast_ptr.into()),
- precise_location: None,
+ range: node.map(|ptr| ptr.text_range()),
path: path.clone(),
is_bang: true,
}
@@ -2459,33 +2391,33 @@ impl Function {
/// Does this function have `#[test]` attribute?
pub fn is_test(self, db: &dyn HirDatabase) -> bool {
- db.attrs(self.id.into()).is_test()
+ self.attrs(db).is_test()
}
/// is this a `fn main` or a function with an `export_name` of `main`?
pub fn is_main(self, db: &dyn HirDatabase) -> bool {
- db.attrs(self.id.into()).export_name() == Some(&sym::main)
+ self.exported_main(db)
|| self.module(db).is_crate_root() && db.function_signature(self.id).name == sym::main
}
/// Is this a function with an `export_name` of `main`?
pub fn exported_main(self, db: &dyn HirDatabase) -> bool {
- db.attrs(self.id.into()).export_name() == Some(&sym::main)
+ AttrFlags::query(db, self.id.into()).contains(AttrFlags::IS_EXPORT_NAME_MAIN)
}
/// Does this function have the ignore attribute?
pub fn is_ignore(self, db: &dyn HirDatabase) -> bool {
- db.attrs(self.id.into()).is_ignore()
+ AttrFlags::query(db, self.id.into()).contains(AttrFlags::IS_IGNORE)
}
/// Does this function have `#[bench]` attribute?
pub fn is_bench(self, db: &dyn HirDatabase) -> bool {
- db.attrs(self.id.into()).is_bench()
+ AttrFlags::query(db, self.id.into()).contains(AttrFlags::IS_BENCH)
}
/// Is this function marked as unstable with `#[feature]` attribute?
pub fn is_unstable(self, db: &dyn HirDatabase) -> bool {
- db.attrs(self.id.into()).is_unstable()
+ AttrFlags::query(db, self.id.into()).contains(AttrFlags::IS_UNSTABLE)
}
pub fn is_unsafe_to_call(
@@ -2496,8 +2428,7 @@ impl Function {
) -> bool {
let (target_features, target_feature_is_safe_in_target) = caller
.map(|caller| {
- let target_features =
- hir_ty::TargetFeatures::from_attrs(&db.attrs(caller.id.into()));
+ let target_features = hir_ty::TargetFeatures::from_fn(db, caller.id);
let target_feature_is_safe_in_target =
match &caller.krate(db).id.workspace_data(db).target {
Ok(target) => hir_ty::target_feature_is_safe_in_target(target),
@@ -2528,14 +2459,6 @@ impl Function {
}
pub fn as_proc_macro(self, db: &dyn HirDatabase) -> Option<Macro> {
- let attrs = db.attrs(self.id.into());
- // FIXME: Store this in FunctionData flags?
- if !(attrs.is_proc_macro()
- || attrs.is_proc_macro_attribute()
- || attrs.is_proc_macro_derive())
- {
- return None;
- }
let def_map = crate_def_map(db, HasModule::krate(&self.id, db));
def_map.fn_as_proc_macro(self.id).map(|id| Macro { id: id.into() })
}
@@ -2994,7 +2917,7 @@ impl Trait {
/// `#[rust_analyzer::completions(...)]` mode.
pub fn complete(self, db: &dyn HirDatabase) -> Complete {
- Complete::extract(true, &self.attrs(db))
+ Complete::extract(true, self.attrs(db).attrs)
}
}
@@ -3165,10 +3088,10 @@ impl Macro {
let loc = id.lookup(db);
let source = loc.source(db);
match loc.kind {
- ProcMacroKind::CustomDerive => db
- .attrs(id.into())
- .parse_proc_macro_derive()
- .map_or_else(|| as_name_opt(source.value.name()), |(it, _)| it),
+ ProcMacroKind::CustomDerive => AttrFlags::derive_info(db, self.id).map_or_else(
+ || as_name_opt(source.value.name()),
+ |info| Name::new_symbol_root(info.trait_name.clone()),
+ ),
ProcMacroKind::Bang | ProcMacroKind::Attr => as_name_opt(source.value.name()),
}
}
@@ -3176,7 +3099,7 @@ impl Macro {
}
pub fn is_macro_export(self, db: &dyn HirDatabase) -> bool {
- matches!(self.id, MacroId::MacroRulesId(_) if db.attrs(self.id.into()).by_key(sym::macro_export).exists())
+ matches!(self.id, MacroId::MacroRulesId(_) if AttrFlags::query(db, self.id.into()).contains(AttrFlags::IS_MACRO_EXPORT))
}
pub fn is_proc_macro(self) -> bool {
@@ -4009,18 +3932,10 @@ impl DeriveHelper {
}
pub fn name(&self, db: &dyn HirDatabase) -> Name {
- match self.derive {
- makro @ MacroId::Macro2Id(_) => db
- .attrs(makro.into())
- .parse_rustc_builtin_macro()
- .and_then(|(_, helpers)| helpers.get(self.idx as usize).cloned()),
- MacroId::MacroRulesId(_) => None,
- makro @ MacroId::ProcMacroId(_) => db
- .attrs(makro.into())
- .parse_proc_macro_derive()
- .and_then(|(_, helpers)| helpers.get(self.idx as usize).cloned()),
- }
- .unwrap_or_else(Name::missing)
+ AttrFlags::derive_info(db, self.derive)
+ .and_then(|it| it.helpers.get(self.idx as usize))
+ .map(|helper| Name::new_symbol_root(helper.clone()))
+ .unwrap_or_else(Name::missing)
}
}
@@ -4244,7 +4159,7 @@ impl TypeParam {
}
pub fn is_unstable(self, db: &dyn HirDatabase) -> bool {
- db.attrs(GenericParamId::from(self.id).into()).is_unstable()
+ self.attrs(db).is_unstable()
}
}
diff --git a/crates/hir/src/semantics.rs b/crates/hir/src/semantics.rs
index 220a3bf432..8cd218f04e 100644
--- a/crates/hir/src/semantics.rs
+++ b/crates/hir/src/semantics.rs
@@ -21,7 +21,6 @@ use hir_def::{
};
use hir_expand::{
EditionedFileId, ExpandResult, FileRange, HirFileId, InMacroFile, MacroCallId,
- attrs::collect_attrs,
builtin::{BuiltinFnLikeExpander, EagerExpander},
db::ExpandDatabase,
files::{FileRangeWrapper, HirFileRange, InRealFile},
@@ -36,7 +35,7 @@ use intern::{Interned, Symbol, sym};
use itertools::Itertools;
use rustc_hash::{FxHashMap, FxHashSet};
use smallvec::{SmallVec, smallvec};
-use span::{Edition, FileId, SyntaxContext};
+use span::{FileId, SyntaxContext};
use stdx::{TupleExt, always};
use syntax::{
AstNode, AstToken, Direction, SyntaxKind, SyntaxNode, SyntaxNodePtr, SyntaxToken, TextRange,
@@ -386,17 +385,14 @@ impl<'db> SemanticsImpl<'db> {
}
pub fn attach_first_edition(&self, file: FileId) -> Option<EditionedFileId> {
- Some(EditionedFileId::new(
- self.db,
- file,
- self.file_to_module_defs(file).next()?.krate().edition(self.db),
- ))
+ let krate = self.file_to_module_defs(file).next()?.krate();
+ Some(EditionedFileId::new(self.db, file, krate.edition(self.db), krate.id))
}
pub fn parse_guess_edition(&self, file_id: FileId) -> ast::SourceFile {
let file_id = self
.attach_first_edition(file_id)
- .unwrap_or_else(|| EditionedFileId::new(self.db, file_id, Edition::CURRENT));
+ .unwrap_or_else(|| EditionedFileId::current_edition_guess_origin(self.db, file_id));
let tree = self.db.parse(file_id).tree();
self.cache(tree.syntax().clone(), file_id.into());
@@ -1197,33 +1193,34 @@ impl<'db> SemanticsImpl<'db> {
.zip(Some(item))
})
.map(|(call_id, item)| {
- let attr_id = match db.lookup_intern_macro_call(call_id).kind {
+ let item_range = item.syntax().text_range();
+ let loc = db.lookup_intern_macro_call(call_id);
+ let text_range = match loc.kind {
hir_expand::MacroCallKind::Attr {
- invoc_attr_index, ..
- } => invoc_attr_index.ast_index(),
- _ => 0,
+ censored_attr_ids: attr_ids,
+ ..
+ } => {
+ // FIXME: here, the attribute's text range is used to strip away all
+ // entries from the start of the attribute "list" up the invoking
+ // attribute. But in
+ // ```
+ // mod foo {
+ // #![inner]
+ // }
+ // ```
+ // we don't wanna strip away stuff in the `mod foo {` range, that is
+ // here if the id corresponds to an inner attribute we got strip all
+ // text ranges of the outer ones, and then all of the inner ones up
+ // to the invoking attribute so that the inbetween is ignored.
+ // FIXME: Should cfg_attr be handled differently?
+ let (attr, _, _, _) = attr_ids
+ .invoc_attr()
+ .find_attr_range_with_source(db, loc.krate, &item);
+ let start = attr.syntax().text_range().start();
+ TextRange::new(start, item_range.end())
+ }
+ _ => item_range,
};
- // FIXME: here, the attribute's text range is used to strip away all
- // entries from the start of the attribute "list" up the invoking
- // attribute. But in
- // ```
- // mod foo {
- // #![inner]
- // }
- // ```
- // we don't wanna strip away stuff in the `mod foo {` range, that is
- // here if the id corresponds to an inner attribute we got strip all
- // text ranges of the outer ones, and then all of the inner ones up
- // to the invoking attribute so that the inbetween is ignored.
- let text_range = item.syntax().text_range();
- let start = collect_attrs(&item)
- .nth(attr_id)
- .map(|attr| match attr.1 {
- Either::Left(it) => it.syntax().text_range().start(),
- Either::Right(it) => it.syntax().text_range().start(),
- })
- .unwrap_or_else(|| text_range.start());
- let text_range = TextRange::new(start, text_range.end());
filter_duplicates(tokens, text_range);
process_expansion_for_token(ctx, &mut stack, call_id)
})
@@ -1473,6 +1470,14 @@ impl<'db> SemanticsImpl<'db> {
FileRangeWrapper { file_id: file_id.file_id(self.db), range }
}
+ pub fn diagnostics_display_range_for_range(
+ &self,
+ src: InFile<TextRange>,
+ ) -> FileRangeWrapper<FileId> {
+ let FileRange { file_id, range } = src.original_node_file_range_rooted(self.db);
+ FileRangeWrapper { file_id: file_id.file_id(self.db), range }
+ }
+
fn token_ancestors_with_macros(
&self,
token: SyntaxToken,
diff --git a/crates/hir/src/semantics/child_by_source.rs b/crates/hir/src/semantics/child_by_source.rs
index 5019a5987e..165ac7e4a0 100644
--- a/crates/hir/src/semantics/child_by_source.rs
+++ b/crates/hir/src/semantics/child_by_source.rs
@@ -5,7 +5,7 @@
//! node for a *child*, and get its hir.
use either::Either;
-use hir_expand::{HirFileId, attrs::collect_attrs};
+use hir_expand::HirFileId;
use span::AstIdNode;
use syntax::{AstPtr, ast};
@@ -94,6 +94,7 @@ impl ChildBySource for ModuleId {
impl ChildBySource for ItemScope {
fn child_by_source_to(&self, db: &dyn DefDatabase, res: &mut DynMap, file_id: HirFileId) {
+ let krate = file_id.krate(db);
self.declarations().for_each(|item| add_module_def(db, res, file_id, item));
self.impls().for_each(|imp| insert_item_loc(db, res, file_id, imp, keys::IMPL));
self.extern_blocks().for_each(|extern_block| {
@@ -123,12 +124,10 @@ impl ChildBySource for ItemScope {
|(ast_id, calls)| {
let adt = ast_id.to_node(db);
calls.for_each(|(attr_id, call_id, calls)| {
- if let Some((_, Either::Left(attr))) =
- collect_attrs(&adt).nth(attr_id.ast_index())
- {
- res[keys::DERIVE_MACRO_CALL]
- .insert(AstPtr::new(&attr), (attr_id, call_id, calls.into()));
- }
+ // FIXME: Fix cfg_attr handling.
+ let (attr, _, _, _) = attr_id.find_attr_range_with_source(db, krate, &adt);
+ res[keys::DERIVE_MACRO_CALL]
+ .insert(AstPtr::new(&attr), (attr_id, call_id, calls.into()));
});
},
);
diff --git a/crates/hir/src/symbols.rs b/crates/hir/src/symbols.rs
index bd4cff50b2..1530e697a3 100644
--- a/crates/hir/src/symbols.rs
+++ b/crates/hir/src/symbols.rs
@@ -416,12 +416,12 @@ impl<'a> SymbolCollector<'a> {
let mut do_not_complete = Complete::Yes;
if let Some(attrs) = def.attrs(self.db) {
- do_not_complete = Complete::extract(matches!(def, ModuleDef::Trait(_)), &attrs);
+ do_not_complete = Complete::extract(matches!(def, ModuleDef::Trait(_)), attrs.attrs);
if let Some(trait_do_not_complete) = trait_do_not_complete {
do_not_complete = Complete::for_trait_item(trait_do_not_complete, do_not_complete);
}
- for alias in attrs.doc_aliases() {
+ for alias in attrs.doc_aliases(self.db) {
self.symbols.insert(FileSymbol {
name: alias.clone(),
def,
@@ -465,9 +465,9 @@ impl<'a> SymbolCollector<'a> {
let mut do_not_complete = Complete::Yes;
if let Some(attrs) = def.attrs(self.db) {
- do_not_complete = Complete::extract(matches!(def, ModuleDef::Trait(_)), &attrs);
+ do_not_complete = Complete::extract(matches!(def, ModuleDef::Trait(_)), attrs.attrs);
- for alias in attrs.doc_aliases() {
+ for alias in attrs.doc_aliases(self.db) {
self.symbols.insert(FileSymbol {
name: alias.clone(),
def,
diff --git a/crates/ide-assists/src/handlers/add_missing_match_arms.rs b/crates/ide-assists/src/handlers/add_missing_match_arms.rs
index 3eeff2ad60..248ce2ad61 100644
--- a/crates/ide-assists/src/handlers/add_missing_match_arms.rs
+++ b/crates/ide-assists/src/handlers/add_missing_match_arms.rs
@@ -1,7 +1,7 @@
use std::iter::{self, Peekable};
use either::Either;
-use hir::{Adt, AsAssocItem, Crate, FindPathConfig, HasAttrs, ModuleDef, Semantics, sym};
+use hir::{Adt, AsAssocItem, Crate, FindPathConfig, HasAttrs, ModuleDef, Semantics};
use ide_db::RootDatabase;
use ide_db::assists::ExprFillDefaultMode;
use ide_db::syntax_helpers::suggest_name;
@@ -401,7 +401,7 @@ impl ExtendedVariant {
fn should_be_hidden(self, db: &RootDatabase, krate: Crate) -> bool {
match self {
ExtendedVariant::Variant { variant: var, .. } => {
- var.attrs(db).has_doc_hidden() && var.module(db).krate() != krate
+ var.attrs(db).is_doc_hidden() && var.module(db).krate() != krate
}
_ => false,
}
@@ -424,7 +424,7 @@ impl ExtendedEnum {
fn is_non_exhaustive(&self, db: &RootDatabase, krate: Crate) -> bool {
match self {
ExtendedEnum::Enum { enum_: e, .. } => {
- e.attrs(db).by_key(sym::non_exhaustive).exists() && e.module(db).krate() != krate
+ e.attrs(db).is_non_exhaustive() && e.module(db).krate() != krate
}
_ => false,
}
diff --git a/crates/ide-assists/src/handlers/destructure_struct_binding.rs b/crates/ide-assists/src/handlers/destructure_struct_binding.rs
index 8b24d33bf9..46f210804d 100644
--- a/crates/ide-assists/src/handlers/destructure_struct_binding.rs
+++ b/crates/ide-assists/src/handlers/destructure_struct_binding.rs
@@ -1,4 +1,4 @@
-use hir::{HasVisibility, sym};
+use hir::HasVisibility;
use ide_db::{
FxHashMap, FxHashSet,
assists::AssistId,
@@ -93,7 +93,7 @@ fn collect_data(ident_pat: ast::IdentPat, ctx: &AssistContext<'_>) -> Option<Str
let kind = struct_type.kind(ctx.db());
let struct_def_path = module.find_path(ctx.db(), struct_def, cfg)?;
- let is_non_exhaustive = struct_def.attrs(ctx.db())?.by_key(sym::non_exhaustive).exists();
+ let is_non_exhaustive = struct_def.attrs(ctx.db())?.is_non_exhaustive();
let is_foreign_crate = struct_def.module(ctx.db()).is_some_and(|m| m.krate() != module.krate());
let fields = struct_type.fields(ctx.db());
diff --git a/crates/ide-assists/src/handlers/move_module_to_file.rs b/crates/ide-assists/src/handlers/move_module_to_file.rs
index da62b817fc..503003bc6b 100644
--- a/crates/ide-assists/src/handlers/move_module_to_file.rs
+++ b/crates/ide-assists/src/handlers/move_module_to_file.rs
@@ -1,7 +1,6 @@
use std::iter;
use ast::edit::IndentLevel;
-use hir::{HasAttrs, sym};
use ide_db::base_db::AnchoredPathBuf;
use itertools::Itertools;
use stdx::format_to;
@@ -53,14 +52,7 @@ pub(crate) fn move_module_to_file(acc: &mut Assists, ctx: &AssistContext<'_>) ->
let mut buf = String::from("./");
let db = ctx.db();
match parent_module.name(db) {
- Some(name)
- if !parent_module.is_mod_rs(db)
- && parent_module
- .attrs(db)
- .by_key(sym::path)
- .string_value_unescape()
- .is_none() =>
- {
+ Some(name) if !parent_module.is_mod_rs(db) && !parent_module.has_path(db) => {
format_to!(buf, "{}/", name.as_str())
}
_ => (),
diff --git a/crates/ide-assists/src/lib.rs b/crates/ide-assists/src/lib.rs
index ca468905fb..8f754bcf21 100644
--- a/crates/ide-assists/src/lib.rs
+++ b/crates/ide-assists/src/lib.rs
@@ -68,7 +68,7 @@ pub mod utils;
use hir::Semantics;
use ide_db::{EditionedFileId, RootDatabase};
-use syntax::{Edition, TextRange};
+use syntax::TextRange;
pub(crate) use crate::assist_context::{AssistContext, Assists};
@@ -90,7 +90,7 @@ pub fn assists(
let sema = Semantics::new(db);
let file_id = sema
.attach_first_edition(range.file_id)
- .unwrap_or_else(|| EditionedFileId::new(db, range.file_id, Edition::CURRENT));
+ .unwrap_or_else(|| EditionedFileId::current_edition_guess_origin(db, range.file_id));
let ctx = AssistContext::new(sema, config, hir::FileRange { file_id, range: range.range });
let mut acc = Assists::new(&ctx, resolve);
handlers::all().iter().for_each(|handler| {
diff --git a/crates/ide-assists/src/tests.rs b/crates/ide-assists/src/tests.rs
index ade60691b5..2e220b129f 100644
--- a/crates/ide-assists/src/tests.rs
+++ b/crates/ide-assists/src/tests.rs
@@ -321,11 +321,13 @@ fn check_with_config(
let _tracing = setup_tracing();
let (mut db, file_with_caret_id, range_or_offset) = RootDatabase::with_range_or_offset(before);
db.enable_proc_attr_macros();
+ let sema = Semantics::new(&db);
+ let file_with_caret_id =
+ sema.attach_first_edition(file_with_caret_id.file_id(&db)).unwrap_or(file_with_caret_id);
let text_without_caret = db.file_text(file_with_caret_id.file_id(&db)).text(&db).to_string();
let frange = hir::FileRange { file_id: file_with_caret_id, range: range_or_offset.into() };
- let sema = Semantics::new(&db);
let ctx = AssistContext::new(sema, &config, frange);
let resolve = match expected {
ExpectedResult::Unresolved => AssistResolveStrategy::None,
diff --git a/crates/ide-assists/src/utils.rs b/crates/ide-assists/src/utils.rs
index a00af92626..de8c4b6bca 100644
--- a/crates/ide-assists/src/utils.rs
+++ b/crates/ide-assists/src/utils.rs
@@ -101,16 +101,7 @@ pub fn test_related_attribute_syn(fn_def: &ast::Fn) -> Option<ast::Attr> {
}
pub fn has_test_related_attribute(attrs: &hir::AttrsWithOwner) -> bool {
- attrs.iter().any(|attr| {
- let path = attr.path();
- (|| {
- Some(
- path.segments().first()?.as_str().starts_with("test")
- || path.segments().last()?.as_str().ends_with("test"),
- )
- })()
- .unwrap_or_default()
- })
+ attrs.is_test()
}
#[derive(Clone, Copy, PartialEq)]
@@ -136,7 +127,7 @@ pub fn filter_assoc_items(
.copied()
.filter(|assoc_item| {
if ignore_items == IgnoreAssocItems::DocHiddenAttrPresent
- && assoc_item.attrs(sema.db).has_doc_hidden()
+ && assoc_item.attrs(sema.db).is_doc_hidden()
{
if let hir::AssocItem::Function(f) = assoc_item
&& !f.has_body(sema.db)
diff --git a/crates/ide-completion/src/completions/attribute/lint.rs b/crates/ide-completion/src/completions/attribute/lint.rs
index c87c46d981..df577b8ed0 100644
--- a/crates/ide-completion/src/completions/attribute/lint.rs
+++ b/crates/ide-completion/src/completions/attribute/lint.rs
@@ -56,7 +56,7 @@ pub(super) fn complete_lint(
};
let mut item =
CompletionItem::new(SymbolKind::Attribute, ctx.source_range(), label, ctx.edition);
- item.documentation(Documentation::new(description.to_owned()));
+ item.documentation(Documentation::new_owned(description.to_owned()));
item.add_to(acc, ctx.db)
}
}
diff --git a/crates/ide-completion/src/completions/flyimport.rs b/crates/ide-completion/src/completions/flyimport.rs
index d1e05a4359..20d01485a4 100644
--- a/crates/ide-completion/src/completions/flyimport.rs
+++ b/crates/ide-completion/src/completions/flyimport.rs
@@ -266,7 +266,7 @@ fn import_on_the_fly(
let original_item = &import.original_item;
!ctx.is_item_hidden(&import.item_to_import)
&& !ctx.is_item_hidden(original_item)
- && ctx.check_stability(original_item.attrs(ctx.db).as_deref())
+ && ctx.check_stability(original_item.attrs(ctx.db).as_ref())
})
.filter(|import| filter_excluded_flyimport(ctx, import))
.sorted_by(|a, b| {
@@ -313,7 +313,7 @@ fn import_on_the_fly_pat_(
let original_item = &import.original_item;
!ctx.is_item_hidden(&import.item_to_import)
&& !ctx.is_item_hidden(original_item)
- && ctx.check_stability(original_item.attrs(ctx.db).as_deref())
+ && ctx.check_stability(original_item.attrs(ctx.db).as_ref())
})
.sorted_by(|a, b| {
let key = |import_path| {
diff --git a/crates/ide-completion/src/completions/postfix.rs b/crates/ide-completion/src/completions/postfix.rs
index ba1fe649dc..4dd84daf06 100644
--- a/crates/ide-completion/src/completions/postfix.rs
+++ b/crates/ide-completion/src/completions/postfix.rs
@@ -446,7 +446,7 @@ fn add_custom_postfix_completions(
let body = snippet.postfix_snippet(receiver_text);
let mut builder =
postfix_snippet(trigger, snippet.description.as_deref().unwrap_or_default(), &body);
- builder.documentation(Documentation::new(format!("```rust\n{body}\n```")));
+ builder.documentation(Documentation::new_owned(format!("```rust\n{body}\n```")));
for import in imports.into_iter() {
builder.add_import(import);
}
diff --git a/crates/ide-completion/src/completions/snippet.rs b/crates/ide-completion/src/completions/snippet.rs
index ead9852eff..04450aea75 100644
--- a/crates/ide-completion/src/completions/snippet.rs
+++ b/crates/ide-completion/src/completions/snippet.rs
@@ -139,7 +139,7 @@ fn add_custom_completions(
};
let body = snip.snippet();
let mut builder = snippet(ctx, cap, trigger, &body);
- builder.documentation(Documentation::new(format!("```rust\n{body}\n```")));
+ builder.documentation(Documentation::new_owned(format!("```rust\n{body}\n```")));
for import in imports.into_iter() {
builder.add_import(import);
}
diff --git a/crates/ide-completion/src/context.rs b/crates/ide-completion/src/context.rs
index 23318e1d19..971067d4a2 100644
--- a/crates/ide-completion/src/context.rs
+++ b/crates/ide-completion/src/context.rs
@@ -559,7 +559,7 @@ impl CompletionContext<'_> {
I: hir::HasAttrs + Copy,
{
let attrs = item.attrs(self.db);
- attrs.doc_aliases().map(|it| it.as_str().into()).collect()
+ attrs.doc_aliases(self.db).iter().map(|it| it.as_str().into()).collect()
}
/// Check if an item is `#[doc(hidden)]`.
@@ -573,7 +573,7 @@ impl CompletionContext<'_> {
}
/// Checks whether this item should be listed in regards to stability. Returns `true` if we should.
- pub(crate) fn check_stability(&self, attrs: Option<&hir::Attrs>) -> bool {
+ pub(crate) fn check_stability(&self, attrs: Option<&hir::AttrsWithOwner>) -> bool {
let Some(attrs) = attrs else {
return true;
};
@@ -591,15 +591,15 @@ impl CompletionContext<'_> {
/// Whether the given trait is an operator trait or not.
pub(crate) fn is_ops_trait(&self, trait_: hir::Trait) -> bool {
- match trait_.attrs(self.db).lang() {
- Some(lang) => OP_TRAIT_LANG_NAMES.contains(&lang.as_str()),
+ match trait_.attrs(self.db).lang(self.db) {
+ Some(lang) => OP_TRAIT_LANG.contains(&lang),
None => false,
}
}
/// Whether the given trait has `#[doc(notable_trait)]`
pub(crate) fn is_doc_notable_trait(&self, trait_: hir::Trait) -> bool {
- trait_.attrs(self.db).has_doc_notable_trait()
+ trait_.attrs(self.db).is_doc_notable_trait()
}
/// Returns the traits in scope, with the [`Drop`] trait removed.
@@ -656,7 +656,7 @@ impl CompletionContext<'_> {
fn is_visible_impl(
&self,
vis: &hir::Visibility,
- attrs: &hir::Attrs,
+ attrs: &hir::AttrsWithOwner,
defining_crate: hir::Crate,
) -> Visible {
if !self.check_stability(Some(attrs)) {
@@ -678,14 +678,18 @@ impl CompletionContext<'_> {
if self.is_doc_hidden(attrs, defining_crate) { Visible::No } else { Visible::Yes }
}
- pub(crate) fn is_doc_hidden(&self, attrs: &hir::Attrs, defining_crate: hir::Crate) -> bool {
+ pub(crate) fn is_doc_hidden(
+ &self,
+ attrs: &hir::AttrsWithOwner,
+ defining_crate: hir::Crate,
+ ) -> bool {
// `doc(hidden)` items are only completed within the defining crate.
- self.krate != defining_crate && attrs.has_doc_hidden()
+ self.krate != defining_crate && attrs.is_doc_hidden()
}
pub(crate) fn doc_aliases_in_scope(&self, scope_def: ScopeDef) -> Vec<SmolStr> {
if let Some(attrs) = scope_def.attrs(self.db) {
- attrs.doc_aliases().map(|it| it.as_str().into()).collect()
+ attrs.doc_aliases(self.db).iter().map(|it| it.as_str().into()).collect()
} else {
vec![]
}
@@ -889,35 +893,35 @@ impl<'db> CompletionContext<'db> {
}
}
-const OP_TRAIT_LANG_NAMES: &[&str] = &[
- "add_assign",
- "add",
- "bitand_assign",
- "bitand",
- "bitor_assign",
- "bitor",
- "bitxor_assign",
- "bitxor",
- "deref_mut",
- "deref",
- "div_assign",
- "div",
- "eq",
- "fn_mut",
- "fn_once",
- "fn",
- "index_mut",
- "index",
- "mul_assign",
- "mul",
- "neg",
- "not",
- "partial_ord",
- "rem_assign",
- "rem",
- "shl_assign",
- "shl",
- "shr_assign",
- "shr",
- "sub",
+const OP_TRAIT_LANG: &[hir::LangItem] = &[
+ hir::LangItem::AddAssign,
+ hir::LangItem::Add,
+ hir::LangItem::BitAndAssign,
+ hir::LangItem::BitAnd,
+ hir::LangItem::BitOrAssign,
+ hir::LangItem::BitOr,
+ hir::LangItem::BitXorAssign,
+ hir::LangItem::BitXor,
+ hir::LangItem::DerefMut,
+ hir::LangItem::Deref,
+ hir::LangItem::DivAssign,
+ hir::LangItem::Div,
+ hir::LangItem::PartialEq,
+ hir::LangItem::FnMut,
+ hir::LangItem::FnOnce,
+ hir::LangItem::Fn,
+ hir::LangItem::IndexMut,
+ hir::LangItem::Index,
+ hir::LangItem::MulAssign,
+ hir::LangItem::Mul,
+ hir::LangItem::Neg,
+ hir::LangItem::Not,
+ hir::LangItem::PartialOrd,
+ hir::LangItem::RemAssign,
+ hir::LangItem::Rem,
+ hir::LangItem::ShlAssign,
+ hir::LangItem::Shl,
+ hir::LangItem::ShrAssign,
+ hir::LangItem::Shr,
+ hir::LangItem::Sub,
];
diff --git a/crates/ide-completion/src/item.rs b/crates/ide-completion/src/item.rs
index 303c71230d..c526c7f070 100644
--- a/crates/ide-completion/src/item.rs
+++ b/crates/ide-completion/src/item.rs
@@ -57,7 +57,8 @@ pub struct CompletionItem {
/// Additional info to show in the UI pop up.
pub detail: Option<String>,
- pub documentation: Option<Documentation>,
+ // FIXME: Make this with `'db` lifetime.
+ pub documentation: Option<Documentation<'static>>,
/// Whether this item is marked as deprecated
pub deprecated: bool,
@@ -488,7 +489,8 @@ pub(crate) struct Builder {
insert_text: Option<String>,
is_snippet: bool,
detail: Option<String>,
- documentation: Option<Documentation>,
+ // FIXME: Make this with `'db` lifetime.
+ documentation: Option<Documentation<'static>>,
lookup: Option<SmolStr>,
kind: CompletionItemKind,
text_edit: Option<TextEdit>,
@@ -644,11 +646,11 @@ impl Builder {
self
}
#[allow(unused)]
- pub(crate) fn documentation(&mut self, docs: Documentation) -> &mut Builder {
+ pub(crate) fn documentation(&mut self, docs: Documentation<'_>) -> &mut Builder {
self.set_documentation(Some(docs))
}
- pub(crate) fn set_documentation(&mut self, docs: Option<Documentation>) -> &mut Builder {
- self.documentation = docs;
+ pub(crate) fn set_documentation(&mut self, docs: Option<Documentation<'_>>) -> &mut Builder {
+ self.documentation = docs.map(Documentation::into_owned);
self
}
pub(crate) fn set_deprecated(&mut self, deprecated: bool) -> &mut Builder {
diff --git a/crates/ide-completion/src/render.rs b/crates/ide-completion/src/render.rs
index c0f09e1d95..bc5589a645 100644
--- a/crates/ide-completion/src/render.rs
+++ b/crates/ide-completion/src/render.rs
@@ -10,7 +10,7 @@ pub(crate) mod type_alias;
pub(crate) mod union_literal;
pub(crate) mod variant;
-use hir::{AsAssocItem, HasAttrs, HirDisplay, ModuleDef, ScopeDef, Type, sym};
+use hir::{AsAssocItem, HasAttrs, HirDisplay, ModuleDef, ScopeDef, Type};
use ide_db::text_edit::TextEdit;
use ide_db::{
RootDatabase, SnippetCap, SymbolKind,
@@ -91,8 +91,7 @@ impl<'a> RenderContext<'a> {
}
fn is_deprecated(&self, def: impl HasAttrs) -> bool {
- let attrs = def.attrs(self.db());
- attrs.by_key(sym::deprecated).exists()
+ def.attrs(self.db()).is_deprecated()
}
fn is_deprecated_assoc_item(&self, as_assoc_item: impl AsAssocItem) -> bool {
@@ -115,7 +114,7 @@ impl<'a> RenderContext<'a> {
}
// FIXME: remove this
- fn docs(&self, def: impl HasDocs) -> Option<Documentation> {
+ fn docs(&self, def: impl HasDocs) -> Option<Documentation<'a>> {
def.docs(self.db())
}
}
@@ -320,7 +319,9 @@ pub(crate) fn render_expr(
);
let edit = TextEdit::replace(source_range, snippet);
item.snippet_edit(ctx.config.snippet_cap?, edit);
- item.documentation(Documentation::new(String::from("Autogenerated expression by term search")));
+ item.documentation(Documentation::new_owned(String::from(
+ "Autogenerated expression by term search",
+ )));
item.set_relevance(crate::CompletionRelevance {
type_match: compute_type_match(ctx, &expr.ty(ctx.db)),
..Default::default()
@@ -553,7 +554,7 @@ fn res_to_kind(resolution: ScopeDef) -> CompletionItemKind {
}
}
-fn scope_def_docs(db: &RootDatabase, resolution: ScopeDef) -> Option<Documentation> {
+fn scope_def_docs(db: &RootDatabase, resolution: ScopeDef) -> Option<Documentation<'_>> {
use hir::ModuleDef::*;
match resolution {
ScopeDef::ModuleDef(Module(it)) => it.docs(db),
diff --git a/crates/ide-completion/src/render/literal.rs b/crates/ide-completion/src/render/literal.rs
index 6c89e49f94..8b14f05b72 100644
--- a/crates/ide-completion/src/render/literal.rs
+++ b/crates/ide-completion/src/render/literal.rs
@@ -180,7 +180,7 @@ impl Variant {
}
}
- fn docs(self, db: &dyn HirDatabase) -> Option<Documentation> {
+ fn docs(self, db: &dyn HirDatabase) -> Option<Documentation<'_>> {
match self {
Variant::Struct(it) => it.docs(db),
Variant::EnumVariant(it) => it.docs(db),
diff --git a/crates/ide-completion/src/render/pattern.rs b/crates/ide-completion/src/render/pattern.rs
index 312d3bd426..60474a31b4 100644
--- a/crates/ide-completion/src/render/pattern.rs
+++ b/crates/ide-completion/src/render/pattern.rs
@@ -108,7 +108,7 @@ fn build_completion(
label: SmolStr,
lookup: SmolStr,
pat: String,
- def: impl HasDocs + Copy,
+ def: impl HasDocs,
adt_ty: hir::Type<'_>,
// Missing in context of match statement completions
is_variant_missing: bool,
diff --git a/crates/ide-completion/src/render/variant.rs b/crates/ide-completion/src/render/variant.rs
index 37d0fa18c4..cfd6340f1e 100644
--- a/crates/ide-completion/src/render/variant.rs
+++ b/crates/ide-completion/src/render/variant.rs
@@ -1,7 +1,7 @@
//! Code common to structs, unions, and enum variants.
use crate::context::CompletionContext;
-use hir::{HasAttrs, HasCrate, HasVisibility, HirDisplay, StructKind, sym};
+use hir::{HasAttrs, HasCrate, HasVisibility, HirDisplay, StructKind};
use ide_db::SnippetCap;
use itertools::Itertools;
use syntax::SmolStr;
@@ -105,8 +105,8 @@ pub(crate) fn visible_fields(
.copied()
.collect::<Vec<_>>();
let has_invisible_field = n_fields - fields.len() > 0;
- let is_foreign_non_exhaustive = item.attrs(ctx.db).by_key(sym::non_exhaustive).exists()
- && item.krate(ctx.db) != module.krate();
+ let is_foreign_non_exhaustive =
+ item.attrs(ctx.db).is_non_exhaustive() && item.krate(ctx.db) != module.krate();
let fields_omitted = has_invisible_field || is_foreign_non_exhaustive;
Some((fields, fields_omitted))
}
diff --git a/crates/ide-completion/src/tests.rs b/crates/ide-completion/src/tests.rs
index 83606d21f5..cb1adfcfb6 100644
--- a/crates/ide-completion/src/tests.rs
+++ b/crates/ide-completion/src/tests.rs
@@ -160,12 +160,12 @@ pub(crate) fn position(
#[rust_analyzer::rust_fixture] ra_fixture: &str,
) -> (RootDatabase, FilePosition) {
let mut database = RootDatabase::default();
- let change_fixture = ChangeFixture::parse(&database, ra_fixture);
+ let change_fixture = ChangeFixture::parse(ra_fixture);
database.enable_proc_attr_macros();
database.apply_change(change_fixture.change);
let (file_id, range_or_offset) = change_fixture.file_position.expect("expected a marker ($0)");
let offset = range_or_offset.expect_offset();
- let position = FilePosition { file_id: file_id.file_id(&database), offset };
+ let position = FilePosition { file_id: file_id.file_id(), offset };
(database, position)
}
diff --git a/crates/ide-completion/src/tests/flyimport.rs b/crates/ide-completion/src/tests/flyimport.rs
index 155f0b5a98..94530bf51d 100644
--- a/crates/ide-completion/src/tests/flyimport.rs
+++ b/crates/ide-completion/src/tests/flyimport.rs
@@ -781,8 +781,8 @@ fn main() {
"#,
expect![[r#"
me random_method(…) (use dep::test_mod::TestTrait) fn(&self) DEPRECATED
- ct SPECIAL_CONST (use dep::test_mod::TestTrait) u8 DEPRECATED
fn weird_function() (use dep::test_mod::TestTrait) fn() DEPRECATED
+ ct SPECIAL_CONST (use dep::test_mod::TestTrait) u8 DEPRECATED
"#]],
);
}
diff --git a/crates/ide-db/src/defs.rs b/crates/ide-db/src/defs.rs
index c051fd863d..9ce85b2bf3 100644
--- a/crates/ide-db/src/defs.rs
+++ b/crates/ide-db/src/defs.rs
@@ -5,8 +5,10 @@
// FIXME: this badly needs rename/rewrite (matklad, 2020-02-06).
+use std::borrow::Cow;
+
use crate::RootDatabase;
-use crate::documentation::{DocsRangeMap, Documentation, HasDocs};
+use crate::documentation::{Documentation, HasDocs};
use crate::famous_defs::FamousDefs;
use arrayvec::ArrayVec;
use either::Either;
@@ -21,7 +23,7 @@ use hir::{
use span::Edition;
use stdx::{format_to, impl_from};
use syntax::{
- SyntaxKind, SyntaxNode, SyntaxToken, TextSize,
+ SyntaxKind, SyntaxNode, SyntaxToken,
ast::{self, AstNode},
match_ast,
};
@@ -199,21 +201,25 @@ impl Definition {
Some(name)
}
- pub fn docs(
+ pub fn docs<'db>(
&self,
- db: &RootDatabase,
+ db: &'db RootDatabase,
famous_defs: Option<&FamousDefs<'_, '_>>,
display_target: DisplayTarget,
- ) -> Option<Documentation> {
- self.docs_with_rangemap(db, famous_defs, display_target).map(|(docs, _)| docs)
+ ) -> Option<Documentation<'db>> {
+ self.docs_with_rangemap(db, famous_defs, display_target).map(|docs| match docs {
+ Either::Left(Cow::Borrowed(docs)) => Documentation::new_borrowed(docs.docs()),
+ Either::Left(Cow::Owned(docs)) => Documentation::new_owned(docs.into_docs()),
+ Either::Right(docs) => docs,
+ })
}
- pub fn docs_with_rangemap(
+ pub fn docs_with_rangemap<'db>(
&self,
- db: &RootDatabase,
+ db: &'db RootDatabase,
famous_defs: Option<&FamousDefs<'_, '_>>,
display_target: DisplayTarget,
- ) -> Option<(Documentation, Option<DocsRangeMap>)> {
+ ) -> Option<Either<Cow<'db, hir::Docs>, Documentation<'db>>> {
let docs = match self {
Definition::Macro(it) => it.docs_with_rangemap(db),
Definition::Field(it) => it.docs_with_rangemap(db),
@@ -229,15 +235,13 @@ impl Definition {
it.docs_with_rangemap(db).or_else(|| {
// docs are missing, try to fall back to the docs of the aliased item.
let adt = it.ty(db).as_adt()?;
- let (docs, range_map) = adt.docs_with_rangemap(db)?;
+ let mut docs = adt.docs_with_rangemap(db)?.into_owned();
let header_docs = format!(
"*This is the documentation for* `{}`\n\n",
adt.display(db, display_target)
);
- let offset = TextSize::new(header_docs.len() as u32);
- let range_map = range_map.shift_docstring_line_range(offset);
- let docs = header_docs + docs.as_str();
- Some((Documentation::new(docs), range_map))
+ docs.prepend_str(&header_docs);
+ Some(Cow::Owned(docs))
})
}
Definition::BuiltinType(it) => {
@@ -246,7 +250,7 @@ impl Definition {
let primitive_mod =
format!("prim_{}", it.name().display(fd.0.db, display_target.edition));
let doc_owner = find_std_module(fd, &primitive_mod, display_target.edition)?;
- doc_owner.docs_with_rangemap(fd.0.db)
+ doc_owner.docs_with_rangemap(db)
})
}
Definition::BuiltinLifetime(StaticLifetime) => None,
@@ -282,7 +286,7 @@ impl Definition {
);
}
- return Some((Documentation::new(docs.replace('*', "\\*")), None));
+ return Some(Either::Right(Documentation::new_owned(docs.replace('*', "\\*"))));
}
Definition::ToolModule(_) => None,
Definition::DeriveHelper(_) => None,
@@ -299,7 +303,7 @@ impl Definition {
let item = trait_.items(db).into_iter().find(|it| it.name(db) == name)?;
item.docs_with_rangemap(db)
})
- .map(|(docs, range_map)| (docs, Some(range_map)))
+ .map(Either::Left)
}
pub fn label(&self, db: &RootDatabase, display_target: DisplayTarget) -> String {
diff --git a/crates/ide-db/src/documentation.rs b/crates/ide-db/src/documentation.rs
index cab19aadfd..4c4691cca2 100644
--- a/crates/ide-db/src/documentation.rs
+++ b/crates/ide-db/src/documentation.rs
@@ -1,337 +1,100 @@
//! Documentation attribute related utilities.
-use either::Either;
-use hir::{
- AttrId, AttrSourceMap, AttrsWithOwner, HasAttrs, InFile,
- db::{DefDatabase, HirDatabase},
- resolve_doc_path_on, sym,
-};
-use itertools::Itertools;
-use span::{TextRange, TextSize};
-use syntax::{
- AstToken,
- ast::{self, IsString},
-};
+use std::borrow::Cow;
+
+use hir::{HasAttrs, db::HirDatabase, resolve_doc_path_on};
/// Holds documentation
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
-pub struct Documentation(String);
+pub struct Documentation<'db>(Cow<'db, str>);
+
+impl<'db> Documentation<'db> {
+ #[inline]
+ pub fn new_owned(s: String) -> Self {
+ Documentation(Cow::Owned(s))
+ }
-impl Documentation {
- pub fn new(s: String) -> Self {
- Documentation(s)
+ #[inline]
+ pub fn new_borrowed(s: &'db str) -> Self {
+ Documentation(Cow::Borrowed(s))
}
+ #[inline]
+ pub fn into_owned(self) -> Documentation<'static> {
+ Documentation::new_owned(self.0.into_owned())
+ }
+
+ #[inline]
pub fn as_str(&self) -> &str {
&self.0
}
}
-impl From<Documentation> for String {
- fn from(Documentation(string): Documentation) -> Self {
- string
+pub trait HasDocs: HasAttrs + Copy {
+ fn docs(self, db: &dyn HirDatabase) -> Option<Documentation<'_>> {
+ let docs = match self.docs_with_rangemap(db)? {
+ Cow::Borrowed(docs) => Documentation::new_borrowed(docs.docs()),
+ Cow::Owned(docs) => Documentation::new_owned(docs.into_docs()),
+ };
+ Some(docs)
+ }
+ fn docs_with_rangemap(self, db: &dyn HirDatabase) -> Option<Cow<'_, hir::Docs>> {
+ self.hir_docs(db).map(Cow::Borrowed)
}
-}
-
-pub trait HasDocs: HasAttrs {
- fn docs(self, db: &dyn HirDatabase) -> Option<Documentation>;
- fn docs_with_rangemap(self, db: &dyn HirDatabase) -> Option<(Documentation, DocsRangeMap)>;
fn resolve_doc_path(
self,
db: &dyn HirDatabase,
link: &str,
ns: Option<hir::Namespace>,
- is_inner_doc: bool,
- ) -> Option<hir::DocLinkDef>;
-}
-/// A struct to map text ranges from [`Documentation`] back to TextRanges in the syntax tree.
-#[derive(Debug)]
-pub struct DocsRangeMap {
- source_map: AttrSourceMap,
- // (docstring-line-range, attr_index, attr-string-range)
- // a mapping from the text range of a line of the [`Documentation`] to the attribute index and
- // the original (untrimmed) syntax doc line
- mapping: Vec<(TextRange, AttrId, TextRange)>,
-}
-
-impl DocsRangeMap {
- /// Maps a [`TextRange`] relative to the documentation string back to its AST range
- pub fn map(&self, range: TextRange) -> Option<(InFile<TextRange>, AttrId)> {
- let found = self.mapping.binary_search_by(|(probe, ..)| probe.ordering(range)).ok()?;
- let (line_docs_range, idx, original_line_src_range) = self.mapping[found];
- if !line_docs_range.contains_range(range) {
- return None;
- }
-
- let relative_range = range - line_docs_range.start();
-
- let InFile { file_id, value: source } = self.source_map.source_of_id(idx);
- match source {
- Either::Left(attr) => {
- let string = get_doc_string_in_attr(attr)?;
- let text_range = string.open_quote_text_range()?;
- let range = TextRange::at(
- text_range.end() + original_line_src_range.start() + relative_range.start(),
- string.syntax().text_range().len().min(range.len()),
- );
- Some((InFile { file_id, value: range }, idx))
- }
- Either::Right(comment) => {
- let text_range = comment.syntax().text_range();
- let range = TextRange::at(
- text_range.start()
- + TextSize::try_from(comment.prefix().len()).ok()?
- + original_line_src_range.start()
- + relative_range.start(),
- text_range.len().min(range.len()),
- );
- Some((InFile { file_id, value: range }, idx))
- }
- }
- }
-
- pub fn shift_docstring_line_range(self, offset: TextSize) -> DocsRangeMap {
- let mapping = self
- .mapping
- .into_iter()
- .map(|(buf_offset, id, base_offset)| {
- let buf_offset = buf_offset.checked_add(offset).unwrap();
- (buf_offset, id, base_offset)
- })
- .collect_vec();
- DocsRangeMap { source_map: self.source_map, mapping }
- }
-}
-
-pub fn docs_with_rangemap(
- db: &dyn DefDatabase,
- attrs: &AttrsWithOwner,
-) -> Option<(Documentation, DocsRangeMap)> {
- let docs = attrs
- .by_key(sym::doc)
- .attrs()
- .filter_map(|attr| attr.string_value_unescape().map(|s| (s, attr.id)));
- let indent = doc_indent(attrs);
- let mut buf = String::new();
- let mut mapping = Vec::new();
- for (doc, idx) in docs {
- if !doc.is_empty() {
- let mut base_offset = 0;
- for raw_line in doc.split('\n') {
- let line = raw_line.trim_end();
- let line_len = line.len();
- let (offset, line) = match line.char_indices().nth(indent) {
- Some((offset, _)) => (offset, &line[offset..]),
- None => (0, line),
- };
- let buf_offset = buf.len();
- buf.push_str(line);
- mapping.push((
- TextRange::new(buf_offset.try_into().ok()?, buf.len().try_into().ok()?),
- idx,
- TextRange::at(
- (base_offset + offset).try_into().ok()?,
- line_len.try_into().ok()?,
- ),
- ));
- buf.push('\n');
- base_offset += raw_line.len() + 1;
- }
- } else {
- buf.push('\n');
- }
- }
- buf.pop();
- if buf.is_empty() {
- None
- } else {
- Some((Documentation(buf), DocsRangeMap { mapping, source_map: attrs.source_map(db) }))
- }
-}
-
-pub fn docs_from_attrs(attrs: &hir::Attrs) -> Option<String> {
- let docs = attrs.by_key(sym::doc).attrs().filter_map(|attr| attr.string_value_unescape());
- let indent = doc_indent(attrs);
- let mut buf = String::new();
- for doc in docs {
- // str::lines doesn't yield anything for the empty string
- if !doc.is_empty() {
- // We don't trim trailing whitespace from doc comments as multiple trailing spaces
- // indicates a hard line break in Markdown.
- let lines = doc.lines().map(|line| {
- line.char_indices().nth(indent).map_or(line, |(offset, _)| &line[offset..])
- });
-
- buf.extend(Itertools::intersperse(lines, "\n"));
- }
- buf.push('\n');
+ is_inner_doc: hir::IsInnerDoc,
+ ) -> Option<hir::DocLinkDef> {
+ resolve_doc_path_on(db, self, link, ns, is_inner_doc)
}
- buf.pop();
- if buf.is_empty() { None } else { Some(buf) }
}
macro_rules! impl_has_docs {
($($def:ident,)*) => {$(
- impl HasDocs for hir::$def {
- fn docs(self, db: &dyn HirDatabase) -> Option<Documentation> {
- docs_from_attrs(&self.attrs(db)).map(Documentation)
- }
- fn docs_with_rangemap(
- self,
- db: &dyn HirDatabase,
- ) -> Option<(Documentation, DocsRangeMap)> {
- docs_with_rangemap(db, &self.attrs(db))
- }
- fn resolve_doc_path(
- self,
- db: &dyn HirDatabase,
- link: &str,
- ns: Option<hir::Namespace>,
- is_inner_doc: bool,
- ) -> Option<hir::DocLinkDef> {
- resolve_doc_path_on(db, self, link, ns, is_inner_doc)
- }
- }
+ impl HasDocs for hir::$def {}
)*};
}
impl_has_docs![
Variant, Field, Static, Const, Trait, TypeAlias, Macro, Function, Adt, Module, Impl, Crate,
+ AssocItem, Struct, Union, Enum,
];
-macro_rules! impl_has_docs_enum {
- ($($variant:ident),* for $enum:ident) => {$(
- impl HasDocs for hir::$variant {
- fn docs(self, db: &dyn HirDatabase) -> Option<Documentation> {
- hir::$enum::$variant(self).docs(db)
- }
-
- fn docs_with_rangemap(
- self,
- db: &dyn HirDatabase,
- ) -> Option<(Documentation, DocsRangeMap)> {
- hir::$enum::$variant(self).docs_with_rangemap(db)
- }
- fn resolve_doc_path(
- self,
- db: &dyn HirDatabase,
- link: &str,
- ns: Option<hir::Namespace>,
- is_inner_doc: bool,
- ) -> Option<hir::DocLinkDef> {
- hir::$enum::$variant(self).resolve_doc_path(db, link, ns, is_inner_doc)
- }
- }
- )*};
-}
-
-impl_has_docs_enum![Struct, Union, Enum for Adt];
-
-impl HasDocs for hir::AssocItem {
- fn docs(self, db: &dyn HirDatabase) -> Option<Documentation> {
- match self {
- hir::AssocItem::Function(it) => it.docs(db),
- hir::AssocItem::Const(it) => it.docs(db),
- hir::AssocItem::TypeAlias(it) => it.docs(db),
- }
- }
-
- fn docs_with_rangemap(self, db: &dyn HirDatabase) -> Option<(Documentation, DocsRangeMap)> {
- match self {
- hir::AssocItem::Function(it) => it.docs_with_rangemap(db),
- hir::AssocItem::Const(it) => it.docs_with_rangemap(db),
- hir::AssocItem::TypeAlias(it) => it.docs_with_rangemap(db),
- }
- }
-
- fn resolve_doc_path(
- self,
- db: &dyn HirDatabase,
- link: &str,
- ns: Option<hir::Namespace>,
- is_inner_doc: bool,
- ) -> Option<hir::DocLinkDef> {
- match self {
- hir::AssocItem::Function(it) => it.resolve_doc_path(db, link, ns, is_inner_doc),
- hir::AssocItem::Const(it) => it.resolve_doc_path(db, link, ns, is_inner_doc),
- hir::AssocItem::TypeAlias(it) => it.resolve_doc_path(db, link, ns, is_inner_doc),
- }
- }
-}
-
impl HasDocs for hir::ExternCrateDecl {
- fn docs(self, db: &dyn HirDatabase) -> Option<Documentation> {
- let crate_docs = docs_from_attrs(&self.resolved_crate(db)?.root_module().attrs(db));
- let decl_docs = docs_from_attrs(&self.attrs(db));
+ fn docs(self, db: &dyn HirDatabase) -> Option<Documentation<'_>> {
+ let crate_docs = self.resolved_crate(db)?.hir_docs(db);
+ let decl_docs = self.hir_docs(db);
match (decl_docs, crate_docs) {
(None, None) => None,
- (Some(decl_docs), None) => Some(decl_docs),
- (None, Some(crate_docs)) => Some(crate_docs),
- (Some(mut decl_docs), Some(crate_docs)) => {
- decl_docs.push('\n');
- decl_docs.push('\n');
- decl_docs += &crate_docs;
- Some(decl_docs)
+ (Some(docs), None) | (None, Some(docs)) => {
+ Some(Documentation::new_borrowed(docs.docs()))
+ }
+ (Some(decl_docs), Some(crate_docs)) => {
+ let mut docs = String::with_capacity(
+ decl_docs.docs().len() + "\n\n".len() + crate_docs.docs().len(),
+ );
+ docs.push_str(decl_docs.docs());
+ docs.push_str("\n\n");
+ docs.push_str(crate_docs.docs());
+ Some(Documentation::new_owned(docs))
}
}
- .map(Documentation::new)
}
- fn docs_with_rangemap(self, db: &dyn HirDatabase) -> Option<(Documentation, DocsRangeMap)> {
- let crate_docs = docs_with_rangemap(db, &self.resolved_crate(db)?.root_module().attrs(db));
- let decl_docs = docs_with_rangemap(db, &self.attrs(db));
+ fn docs_with_rangemap(self, db: &dyn HirDatabase) -> Option<Cow<'_, hir::Docs>> {
+ let crate_docs = self.resolved_crate(db)?.hir_docs(db);
+ let decl_docs = self.hir_docs(db);
match (decl_docs, crate_docs) {
(None, None) => None,
- (Some(decl_docs), None) => Some(decl_docs),
- (None, Some(crate_docs)) => Some(crate_docs),
- (
- Some((Documentation(mut decl_docs), mut decl_range_map)),
- Some((Documentation(crate_docs), crate_range_map)),
- ) => {
- decl_docs.push('\n');
- decl_docs.push('\n');
- let offset = TextSize::new(decl_docs.len() as u32);
- decl_docs += &crate_docs;
- let crate_range_map = crate_range_map.shift_docstring_line_range(offset);
- decl_range_map.mapping.extend(crate_range_map.mapping);
- Some((Documentation(decl_docs), decl_range_map))
+ (Some(docs), None) | (None, Some(docs)) => Some(Cow::Borrowed(docs)),
+ (Some(decl_docs), Some(crate_docs)) => {
+ let mut docs = decl_docs.clone();
+ docs.append_str("\n\n");
+ docs.append(crate_docs);
+ Some(Cow::Owned(docs))
}
}
}
- fn resolve_doc_path(
- self,
- db: &dyn HirDatabase,
- link: &str,
- ns: Option<hir::Namespace>,
- is_inner_doc: bool,
- ) -> Option<hir::DocLinkDef> {
- resolve_doc_path_on(db, self, link, ns, is_inner_doc)
- }
-}
-
-fn get_doc_string_in_attr(it: &ast::Attr) -> Option<ast::String> {
- match it.expr() {
- // #[doc = lit]
- Some(ast::Expr::Literal(lit)) => match lit.kind() {
- ast::LiteralKind::String(it) => Some(it),
- _ => None,
- },
- // #[cfg_attr(..., doc = "", ...)]
- None => {
- // FIXME: See highlight injection for what to do here
- None
- }
- _ => None,
- }
-}
-
-fn doc_indent(attrs: &hir::Attrs) -> usize {
- let mut min = !0;
- for val in attrs.by_key(sym::doc).attrs().filter_map(|attr| attr.string_value_unescape()) {
- if let Some(m) =
- val.lines().filter_map(|line| line.chars().position(|c| !c.is_whitespace())).min()
- {
- min = min.min(m);
- }
- }
- min
}
diff --git a/crates/ide-db/src/ra_fixture.rs b/crates/ide-db/src/ra_fixture.rs
index a9d596d8f5..c9a670b2d1 100644
--- a/crates/ide-db/src/ra_fixture.rs
+++ b/crates/ide-db/src/ra_fixture.rs
@@ -25,18 +25,14 @@ impl RootDatabase {
// We don't want a mistake in the fixture to crash r-a, so we wrap this in `catch_unwind()`.
std::panic::catch_unwind(|| {
let mut db = RootDatabase::default();
- let fixture = test_fixture::ChangeFixture::parse_with_proc_macros(
- &db,
- text,
- minicore.0,
- Vec::new(),
- );
+ let fixture =
+ test_fixture::ChangeFixture::parse_with_proc_macros(text, minicore.0, Vec::new());
db.apply_change(fixture.change);
let files = fixture
.files
.into_iter()
.zip(fixture.file_lines)
- .map(|(file_id, range)| (file_id.file_id(&db), range))
+ .map(|(file_id, range)| (file_id.file_id(), range))
.collect();
(db, files, fixture.sysroot_files)
})
@@ -526,7 +522,7 @@ impl_empty_upmap_from_ra_fixture!(
String,
Symbol,
SmolStr,
- Documentation,
+ Documentation<'_>,
SymbolKind,
CfgExpr,
ReferenceCategory,
diff --git a/crates/ide-db/src/rust_doc.rs b/crates/ide-db/src/rust_doc.rs
index eacd9b9b4d..36a6938af6 100644
--- a/crates/ide-db/src/rust_doc.rs
+++ b/crates/ide-db/src/rust_doc.rs
@@ -33,7 +33,7 @@ pub fn is_rust_fence(s: &str) -> bool {
const RUSTDOC_FENCES: [&str; 2] = ["```", "~~~"];
-pub fn format_docs(src: &Documentation) -> String {
+pub fn format_docs(src: &Documentation<'_>) -> String {
format_docs_(src.as_str())
}
diff --git a/crates/ide-db/src/search.rs b/crates/ide-db/src/search.rs
index 018c841897..f52b345007 100644
--- a/crates/ide-db/src/search.rs
+++ b/crates/ide-db/src/search.rs
@@ -12,7 +12,7 @@ use either::Either;
use hir::{
Adt, AsAssocItem, DefWithBody, EditionedFileId, FileRange, FileRangeWrapper, HasAttrs,
HasContainer, HasSource, InFile, InFileWrapper, InRealFile, InlineAsmOperand, ItemContainer,
- ModuleSource, PathResolution, Semantics, Visibility, sym,
+ ModuleSource, PathResolution, Semantics, Visibility,
};
use memchr::memmem::Finder;
use parser::SyntaxKind;
@@ -169,7 +169,7 @@ impl SearchScope {
entries.extend(
source_root
.iter()
- .map(|id| (EditionedFileId::new(db, id, crate_data.edition), None)),
+ .map(|id| (EditionedFileId::new(db, id, crate_data.edition, krate), None)),
);
}
SearchScope { entries }
@@ -183,11 +183,9 @@ impl SearchScope {
let source_root = db.file_source_root(root_file).source_root_id(db);
let source_root = db.source_root(source_root).source_root(db);
- entries.extend(
- source_root
- .iter()
- .map(|id| (EditionedFileId::new(db, id, rev_dep.edition(db)), None)),
- );
+ entries.extend(source_root.iter().map(|id| {
+ (EditionedFileId::new(db, id, rev_dep.edition(db), rev_dep.into()), None)
+ }));
}
SearchScope { entries }
}
@@ -201,7 +199,7 @@ impl SearchScope {
SearchScope {
entries: source_root
.iter()
- .map(|id| (EditionedFileId::new(db, id, of.edition(db)), None))
+ .map(|id| (EditionedFileId::new(db, id, of.edition(db), of.into()), None))
.collect(),
}
}
@@ -368,7 +366,7 @@ impl Definition {
if let Definition::Macro(macro_def) = self {
return match macro_def.kind(db) {
hir::MacroKind::Declarative => {
- if macro_def.attrs(db).by_key(sym::macro_export).exists() {
+ if macro_def.attrs(db).is_macro_export() {
SearchScope::reverse_dependencies(db, module.krate())
} else {
SearchScope::krate(db, module.krate())
diff --git a/crates/ide-db/src/test_data/test_doc_alias.txt b/crates/ide-db/src/test_data/test_doc_alias.txt
index 30d1df4f8e..427a510559 100644
--- a/crates/ide-db/src/test_data/test_doc_alias.txt
+++ b/crates/ide-db/src/test_data/test_doc_alias.txt
@@ -3,7 +3,7 @@
Module {
id: ModuleId {
krate: Crate(
- Id(3000),
+ Id(2c00),
),
block: None,
local_id: Idx::<ModuleData>(0),
@@ -16,7 +16,7 @@
Struct(
Struct {
id: StructId(
- 3401,
+ 3801,
),
},
),
@@ -24,7 +24,7 @@
loc: DeclarationLocation {
hir_file_id: FileId(
EditionedFileId(
- Id(2000),
+ Id(3000),
),
),
ptr: SyntaxNodePtr {
@@ -50,7 +50,7 @@
Struct(
Struct {
id: StructId(
- 3400,
+ 3800,
),
},
),
@@ -58,7 +58,7 @@
loc: DeclarationLocation {
hir_file_id: FileId(
EditionedFileId(
- Id(2000),
+ Id(3000),
),
),
ptr: SyntaxNodePtr {
@@ -84,7 +84,7 @@
Struct(
Struct {
id: StructId(
- 3400,
+ 3800,
),
},
),
@@ -92,7 +92,7 @@
loc: DeclarationLocation {
hir_file_id: FileId(
EditionedFileId(
- Id(2000),
+ Id(3000),
),
),
ptr: SyntaxNodePtr {
@@ -118,7 +118,7 @@
Struct(
Struct {
id: StructId(
- 3400,
+ 3800,
),
},
),
@@ -126,7 +126,7 @@
loc: DeclarationLocation {
hir_file_id: FileId(
EditionedFileId(
- Id(2000),
+ Id(3000),
),
),
ptr: SyntaxNodePtr {
@@ -152,7 +152,7 @@
Struct(
Struct {
id: StructId(
- 3400,
+ 3800,
),
},
),
@@ -160,7 +160,7 @@
loc: DeclarationLocation {
hir_file_id: FileId(
EditionedFileId(
- Id(2000),
+ Id(3000),
),
),
ptr: SyntaxNodePtr {
@@ -186,7 +186,7 @@
Struct(
Struct {
id: StructId(
- 3401,
+ 3801,
),
},
),
@@ -194,7 +194,7 @@
loc: DeclarationLocation {
hir_file_id: FileId(
EditionedFileId(
- Id(2000),
+ Id(3000),
),
),
ptr: SyntaxNodePtr {
@@ -220,7 +220,7 @@
Struct(
Struct {
id: StructId(
- 3400,
+ 3800,
),
},
),
@@ -228,7 +228,7 @@
loc: DeclarationLocation {
hir_file_id: FileId(
EditionedFileId(
- Id(2000),
+ Id(3000),
),
),
ptr: SyntaxNodePtr {
diff --git a/crates/ide-db/src/test_data/test_symbol_index_collection.txt b/crates/ide-db/src/test_data/test_symbol_index_collection.txt
index 5ef0ecbcf8..cc130194cb 100644
--- a/crates/ide-db/src/test_data/test_symbol_index_collection.txt
+++ b/crates/ide-db/src/test_data/test_symbol_index_collection.txt
@@ -3,7 +3,7 @@
Module {
id: ModuleId {
krate: Crate(
- Id(3000),
+ Id(2c00),
),
block: None,
local_id: Idx::<ModuleData>(0),
@@ -22,7 +22,7 @@
loc: DeclarationLocation {
hir_file_id: FileId(
EditionedFileId(
- Id(2000),
+ Id(3000),
),
),
ptr: SyntaxNodePtr {
@@ -49,14 +49,14 @@
def: TypeAlias(
TypeAlias {
id: TypeAliasId(
- 6800,
+ 6c00,
),
},
),
loc: DeclarationLocation {
hir_file_id: FileId(
EditionedFileId(
- Id(2000),
+ Id(3000),
),
),
ptr: SyntaxNodePtr {
@@ -88,7 +88,7 @@
loc: DeclarationLocation {
hir_file_id: FileId(
EditionedFileId(
- Id(2000),
+ Id(3000),
),
),
ptr: SyntaxNodePtr {
@@ -115,14 +115,14 @@
def: Const(
Const {
id: ConstId(
- 6000,
+ 6400,
),
},
),
loc: DeclarationLocation {
hir_file_id: FileId(
EditionedFileId(
- Id(2000),
+ Id(3000),
),
),
ptr: SyntaxNodePtr {
@@ -147,14 +147,14 @@
def: Const(
Const {
id: ConstId(
- 6002,
+ 6402,
),
},
),
loc: DeclarationLocation {
hir_file_id: FileId(
EditionedFileId(
- Id(2000),
+ Id(3000),
),
),
ptr: SyntaxNodePtr {
@@ -180,7 +180,7 @@
Enum(
Enum {
id: EnumId(
- 4c00,
+ 5000,
),
},
),
@@ -188,7 +188,7 @@
loc: DeclarationLocation {
hir_file_id: FileId(
EditionedFileId(
- Id(2000),
+ Id(3000),
),
),
ptr: SyntaxNodePtr {
@@ -214,7 +214,7 @@
Macro {
id: Macro2Id(
Macro2Id(
- 4800,
+ 4c00,
),
),
},
@@ -222,7 +222,7 @@
loc: DeclarationLocation {
hir_file_id: FileId(
EditionedFileId(
- Id(2000),
+ Id(3000),
),
),
ptr: SyntaxNodePtr {
@@ -248,7 +248,7 @@
Macro {
id: Macro2Id(
Macro2Id(
- 4800,
+ 4c00,
),
),
},
@@ -256,7 +256,7 @@
loc: DeclarationLocation {
hir_file_id: FileId(
EditionedFileId(
- Id(2000),
+ Id(3000),
),
),
ptr: SyntaxNodePtr {
@@ -281,14 +281,14 @@
def: Static(
Static {
id: StaticId(
- 6400,
+ 6800,
),
},
),
loc: DeclarationLocation {
hir_file_id: FileId(
EditionedFileId(
- Id(2000),
+ Id(3000),
),
),
ptr: SyntaxNodePtr {
@@ -314,7 +314,7 @@
Struct(
Struct {
id: StructId(
- 4401,
+ 4801,
),
},
),
@@ -322,7 +322,7 @@
loc: DeclarationLocation {
hir_file_id: FileId(
EditionedFileId(
- Id(2000),
+ Id(3000),
),
),
ptr: SyntaxNodePtr {
@@ -348,7 +348,7 @@
Struct(
Struct {
id: StructId(
- 4400,
+ 4800,
),
},
),
@@ -356,7 +356,7 @@
loc: DeclarationLocation {
hir_file_id: MacroFile(
MacroCallId(
- Id(3c00),
+ Id(4000),
),
),
ptr: SyntaxNodePtr {
@@ -382,7 +382,7 @@
Struct(
Struct {
id: StructId(
- 4405,
+ 4805,
),
},
),
@@ -390,7 +390,7 @@
loc: DeclarationLocation {
hir_file_id: FileId(
EditionedFileId(
- Id(2000),
+ Id(3000),
),
),
ptr: SyntaxNodePtr {
@@ -418,7 +418,7 @@
Struct(
Struct {
id: StructId(
- 4406,
+ 4806,
),
},
),
@@ -426,7 +426,7 @@
loc: DeclarationLocation {
hir_file_id: FileId(
EditionedFileId(
- Id(2000),
+ Id(3000),
),
),
ptr: SyntaxNodePtr {
@@ -454,7 +454,7 @@
Struct(
Struct {
id: StructId(
- 4407,
+ 4807,
),
},
),
@@ -462,7 +462,7 @@
loc: DeclarationLocation {
hir_file_id: FileId(
EditionedFileId(
- Id(2000),
+ Id(3000),
),
),
ptr: SyntaxNodePtr {
@@ -488,7 +488,7 @@
Struct(
Struct {
id: StructId(
- 4402,
+ 4802,
),
},
),
@@ -496,7 +496,7 @@
loc: DeclarationLocation {
hir_file_id: FileId(
EditionedFileId(
- Id(2000),
+ Id(3000),
),
),
ptr: SyntaxNodePtr {
@@ -521,14 +521,14 @@
def: Trait(
Trait {
id: TraitId(
- 5800,
+ 5c00,
),
},
),
loc: DeclarationLocation {
hir_file_id: FileId(
EditionedFileId(
- Id(2000),
+ Id(3000),
),
),
ptr: SyntaxNodePtr {
@@ -554,7 +554,7 @@
Macro {
id: Macro2Id(
Macro2Id(
- 4800,
+ 4c00,
),
),
},
@@ -562,7 +562,7 @@
loc: DeclarationLocation {
hir_file_id: FileId(
EditionedFileId(
- Id(2000),
+ Id(3000),
),
),
ptr: SyntaxNodePtr {
@@ -588,7 +588,7 @@
Union(
Union {
id: UnionId(
- 5000,
+ 5400,
),
},
),
@@ -596,7 +596,7 @@
loc: DeclarationLocation {
hir_file_id: FileId(
EditionedFileId(
- Id(2000),
+ Id(3000),
),
),
ptr: SyntaxNodePtr {
@@ -622,7 +622,7 @@
Module {
id: ModuleId {
krate: Crate(
- Id(3000),
+ Id(2c00),
),
block: None,
local_id: Idx::<ModuleData>(1),
@@ -632,7 +632,7 @@
loc: DeclarationLocation {
hir_file_id: FileId(
EditionedFileId(
- Id(2000),
+ Id(3000),
),
),
ptr: SyntaxNodePtr {
@@ -658,7 +658,7 @@
Module {
id: ModuleId {
krate: Crate(
- Id(3000),
+ Id(2c00),
),
block: None,
local_id: Idx::<ModuleData>(2),
@@ -668,7 +668,7 @@
loc: DeclarationLocation {
hir_file_id: FileId(
EditionedFileId(
- Id(2000),
+ Id(3000),
),
),
ptr: SyntaxNodePtr {
@@ -694,7 +694,7 @@
Macro {
id: MacroRulesId(
MacroRulesId(
- 3801,
+ 3c01,
),
),
},
@@ -702,7 +702,7 @@
loc: DeclarationLocation {
hir_file_id: FileId(
EditionedFileId(
- Id(2000),
+ Id(3000),
),
),
ptr: SyntaxNodePtr {
@@ -727,14 +727,14 @@
def: Function(
Function {
id: FunctionId(
- 5c02,
+ 6002,
),
},
),
loc: DeclarationLocation {
hir_file_id: FileId(
EditionedFileId(
- Id(2000),
+ Id(3000),
),
),
ptr: SyntaxNodePtr {
@@ -761,14 +761,14 @@
def: Function(
Function {
id: FunctionId(
- 5c01,
+ 6001,
),
},
),
loc: DeclarationLocation {
hir_file_id: FileId(
EditionedFileId(
- Id(2000),
+ Id(3000),
),
),
ptr: SyntaxNodePtr {
@@ -796,7 +796,7 @@
Macro {
id: MacroRulesId(
MacroRulesId(
- 3800,
+ 3c00,
),
),
},
@@ -804,7 +804,7 @@
loc: DeclarationLocation {
hir_file_id: FileId(
EditionedFileId(
- Id(2000),
+ Id(3000),
),
),
ptr: SyntaxNodePtr {
@@ -829,14 +829,14 @@
def: Function(
Function {
id: FunctionId(
- 5c00,
+ 6000,
),
},
),
loc: DeclarationLocation {
hir_file_id: FileId(
EditionedFileId(
- Id(2000),
+ Id(3000),
),
),
ptr: SyntaxNodePtr {
@@ -862,7 +862,7 @@
Macro {
id: MacroRulesId(
MacroRulesId(
- 3801,
+ 3c01,
),
),
},
@@ -870,7 +870,7 @@
loc: DeclarationLocation {
hir_file_id: FileId(
EditionedFileId(
- Id(2000),
+ Id(3000),
),
),
ptr: SyntaxNodePtr {
@@ -895,14 +895,14 @@
def: Function(
Function {
id: FunctionId(
- 5c03,
+ 6003,
),
},
),
loc: DeclarationLocation {
hir_file_id: FileId(
EditionedFileId(
- Id(2000),
+ Id(3000),
),
),
ptr: SyntaxNodePtr {
@@ -930,7 +930,7 @@
Module {
id: ModuleId {
krate: Crate(
- Id(3000),
+ Id(2c00),
),
block: None,
local_id: Idx::<ModuleData>(1),
@@ -943,7 +943,7 @@
Struct(
Struct {
id: StructId(
- 4403,
+ 4803,
),
},
),
@@ -951,7 +951,7 @@
loc: DeclarationLocation {
hir_file_id: FileId(
EditionedFileId(
- Id(2000),
+ Id(3000),
),
),
ptr: SyntaxNodePtr {
@@ -977,7 +977,7 @@
Module {
id: ModuleId {
krate: Crate(
- Id(3000),
+ Id(2c00),
),
block: None,
local_id: Idx::<ModuleData>(2),
@@ -989,14 +989,14 @@
def: Trait(
Trait {
id: TraitId(
- 5800,
+ 5c00,
),
},
),
loc: DeclarationLocation {
hir_file_id: FileId(
EditionedFileId(
- Id(2001),
+ Id(3001),
),
),
ptr: SyntaxNodePtr {
@@ -1022,7 +1022,7 @@
Macro {
id: Macro2Id(
Macro2Id(
- 4800,
+ 4c00,
),
),
},
@@ -1030,7 +1030,7 @@
loc: DeclarationLocation {
hir_file_id: FileId(
EditionedFileId(
- Id(2001),
+ Id(3001),
),
),
ptr: SyntaxNodePtr {
@@ -1056,7 +1056,7 @@
Struct(
Struct {
id: StructId(
- 4404,
+ 4804,
),
},
),
@@ -1064,7 +1064,7 @@
loc: DeclarationLocation {
hir_file_id: FileId(
EditionedFileId(
- Id(2001),
+ Id(3001),
),
),
ptr: SyntaxNodePtr {
@@ -1090,7 +1090,7 @@
Macro {
id: Macro2Id(
Macro2Id(
- 4800,
+ 4c00,
),
),
},
@@ -1098,7 +1098,7 @@
loc: DeclarationLocation {
hir_file_id: FileId(
EditionedFileId(
- Id(2001),
+ Id(3001),
),
),
ptr: SyntaxNodePtr {
@@ -1124,7 +1124,7 @@
Struct(
Struct {
id: StructId(
- 4404,
+ 4804,
),
},
),
@@ -1132,7 +1132,7 @@
loc: DeclarationLocation {
hir_file_id: FileId(
EditionedFileId(
- Id(2001),
+ Id(3001),
),
),
ptr: SyntaxNodePtr {
diff --git a/crates/ide-db/src/test_data/test_symbols_exclude_imports.txt b/crates/ide-db/src/test_data/test_symbols_exclude_imports.txt
index 22872b577f..3ab837aa61 100644
--- a/crates/ide-db/src/test_data/test_symbols_exclude_imports.txt
+++ b/crates/ide-db/src/test_data/test_symbols_exclude_imports.txt
@@ -13,7 +13,7 @@
loc: DeclarationLocation {
hir_file_id: FileId(
EditionedFileId(
- Id(2001),
+ Id(3001),
),
),
ptr: SyntaxNodePtr {
diff --git a/crates/ide-db/src/test_data/test_symbols_with_imports.txt b/crates/ide-db/src/test_data/test_symbols_with_imports.txt
index 9f98bf87e2..a6a808d616 100644
--- a/crates/ide-db/src/test_data/test_symbols_with_imports.txt
+++ b/crates/ide-db/src/test_data/test_symbols_with_imports.txt
@@ -13,7 +13,7 @@
loc: DeclarationLocation {
hir_file_id: FileId(
EditionedFileId(
- Id(2001),
+ Id(3001),
),
),
ptr: SyntaxNodePtr {
@@ -47,7 +47,7 @@
loc: DeclarationLocation {
hir_file_id: FileId(
EditionedFileId(
- Id(2000),
+ Id(3000),
),
),
ptr: SyntaxNodePtr {
diff --git a/crates/ide-db/src/traits.rs b/crates/ide-db/src/traits.rs
index 61e28386d0..7b9fdb1e1c 100644
--- a/crates/ide-db/src/traits.rs
+++ b/crates/ide-db/src/traits.rs
@@ -114,8 +114,7 @@ fn assoc_item_of_trait(
#[cfg(test)]
mod tests {
use expect_test::{Expect, expect};
- use hir::FilePosition;
- use hir::Semantics;
+ use hir::{EditionedFileId, FilePosition, Semantics};
use span::Edition;
use syntax::ast::{self, AstNode};
use test_fixture::ChangeFixture;
@@ -127,10 +126,11 @@ mod tests {
#[rust_analyzer::rust_fixture] ra_fixture: &str,
) -> (RootDatabase, FilePosition) {
let mut database = RootDatabase::default();
- let change_fixture = ChangeFixture::parse(&database, ra_fixture);
+ let change_fixture = ChangeFixture::parse(ra_fixture);
database.apply_change(change_fixture.change);
let (file_id, range_or_offset) =
change_fixture.file_position.expect("expected a marker ($0)");
+ let file_id = EditionedFileId::from_span_guess_origin(&database, file_id);
let offset = range_or_offset.expect_offset();
(database, FilePosition { file_id, offset })
}
diff --git a/crates/ide-diagnostics/src/handlers/inactive_code.rs b/crates/ide-diagnostics/src/handlers/inactive_code.rs
index 8611ef653b..dfa9639f6e 100644
--- a/crates/ide-diagnostics/src/handlers/inactive_code.rs
+++ b/crates/ide-diagnostics/src/handlers/inactive_code.rs
@@ -95,7 +95,7 @@ fn f() {
//^^^^^^^^^^^^^^^^^^^^^^^^ weak: code is inactive due to #[cfg] directives: no is disabled
#[cfg(no)] #[cfg(no2)] mod m;
- //^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ weak: code is inactive due to #[cfg] directives: no and no2 are disabled
+ //^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ weak: code is inactive due to #[cfg] directives: no is disabled
#[cfg(all(not(a), b))] enum E {}
//^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ weak: code is inactive due to #[cfg] directives: b is disabled
@@ -130,7 +130,6 @@ trait Bar {
/// Tests that `cfg` attributes behind `cfg_attr` is handled properly.
#[test]
fn inactive_via_cfg_attr() {
- cov_mark::check!(cfg_attr_active);
check(
r#"
#[cfg_attr(not(never), cfg(no))] fn f() {}
diff --git a/crates/ide-diagnostics/src/handlers/invalid_derive_target.rs b/crates/ide-diagnostics/src/handlers/invalid_derive_target.rs
index 8b708f229d..9aa7aed169 100644
--- a/crates/ide-diagnostics/src/handlers/invalid_derive_target.rs
+++ b/crates/ide-diagnostics/src/handlers/invalid_derive_target.rs
@@ -8,7 +8,7 @@ pub(crate) fn invalid_derive_target(
ctx: &DiagnosticsContext<'_>,
d: &hir::InvalidDeriveTarget,
) -> Diagnostic {
- let display_range = ctx.sema.diagnostics_display_range(d.node);
+ let display_range = ctx.sema.diagnostics_display_range_for_range(d.range);
Diagnostic::new(
DiagnosticCode::RustcHardError("E0774"),
@@ -29,7 +29,7 @@ mod tests {
//- minicore:derive
mod __ {
#[derive()]
- //^^^^^^^^^^^ error: `derive` may only be applied to `struct`s, `enum`s and `union`s
+ // ^^^^^^ error: `derive` may only be applied to `struct`s, `enum`s and `union`s
fn main() {}
}
"#,
diff --git a/crates/ide-diagnostics/src/handlers/macro_error.rs b/crates/ide-diagnostics/src/handlers/macro_error.rs
index 6a1ecae651..a44b043f43 100644
--- a/crates/ide-diagnostics/src/handlers/macro_error.rs
+++ b/crates/ide-diagnostics/src/handlers/macro_error.rs
@@ -13,7 +13,7 @@ use crate::{Diagnostic, DiagnosticCode, DiagnosticsContext, Severity};
// This diagnostic is shown for proc macros that have been specifically disabled via `rust-analyzer.procMacro.ignored`.
pub(crate) fn macro_error(ctx: &DiagnosticsContext<'_>, d: &hir::MacroError) -> Diagnostic {
// Use more accurate position if available.
- let display_range = ctx.resolve_precise_location(&d.node, d.precise_location);
+ let display_range = ctx.sema.diagnostics_display_range_for_range(d.range);
Diagnostic::new(
DiagnosticCode::Ra(d.kind, if d.error { Severity::Error } else { Severity::WeakWarning }),
d.message.clone(),
@@ -27,8 +27,10 @@ pub(crate) fn macro_error(ctx: &DiagnosticsContext<'_>, d: &hir::MacroError) ->
// This diagnostic is shown for macro expansion errors.
pub(crate) fn macro_def_error(ctx: &DiagnosticsContext<'_>, d: &hir::MacroDefError) -> Diagnostic {
// Use more accurate position if available.
- let display_range =
- ctx.resolve_precise_location(&d.node.map(|it| it.syntax_node_ptr()), d.name);
+ let display_range = match d.name {
+ Some(name) => ctx.sema.diagnostics_display_range_for_range(d.node.with_value(name)),
+ None => ctx.sema.diagnostics_display_range(d.node.map(|it| it.syntax_node_ptr())),
+ };
Diagnostic::new(
DiagnosticCode::Ra("macro-def-error", Severity::Error),
d.message.clone(),
@@ -135,10 +137,12 @@ macro_rules! env { () => {} }
#[rustc_builtin_macro]
macro_rules! concat { () => {} }
- include!(concat!(env!("OUT_DIR"), "/out.rs"));
- //^^^^^^^^^ error: `OUT_DIR` not set, build scripts may have failed to run
- //^^^^^^^^^^^^^^^ error: `OUT_DIR` not set, build scripts may have failed to run
- //^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ error: `OUT_DIR` not set, build scripts may have failed to run
+ include!(concat!(
+ // ^^^^^^ error: `OUT_DIR` not set, build scripts may have failed to run
+ env!(
+ //^^^ error: `OUT_DIR` not set, build scripts may have failed to run
+ "OUT_DIR"), "/out.rs"));
+ //^^^^^^^^^ error: `OUT_DIR` not set, build scripts may have failed to run
"#,
);
}
@@ -182,7 +186,7 @@ fn main() {
//^^^^^^^^^^^^^^^^ error: failed to load file `does not exist`
include!(concat!("does ", "not ", "exist"));
- //^^^^^^^^^^^^^^^^^^^^^^^^^^ error: failed to load file `does not exist`
+ // ^^^^^^^^^^^^^^^^^^^^^^^^ error: failed to load file `does not exist`
env!(invalid);
//^^^^^^^ error: expected string literal
@@ -289,7 +293,7 @@ include!("include-me.rs");
//- /include-me.rs
/// long doc that pushes the diagnostic range beyond the first file's text length
#[err]
-//^^^^^^error: unresolved macro `err`
+ // ^^^ error: unresolved macro `err`
mod prim_never {}
"#,
);
diff --git a/crates/ide-diagnostics/src/handlers/malformed_derive.rs b/crates/ide-diagnostics/src/handlers/malformed_derive.rs
index 701b30b9b5..7d0c71f4fa 100644
--- a/crates/ide-diagnostics/src/handlers/malformed_derive.rs
+++ b/crates/ide-diagnostics/src/handlers/malformed_derive.rs
@@ -7,7 +7,7 @@ pub(crate) fn malformed_derive(
ctx: &DiagnosticsContext<'_>,
d: &hir::MalformedDerive,
) -> Diagnostic {
- let display_range = ctx.sema.diagnostics_display_range(d.node);
+ let display_range = ctx.sema.diagnostics_display_range_for_range(d.range);
Diagnostic::new(
DiagnosticCode::RustcHardError("E0777"),
@@ -28,7 +28,7 @@ mod tests {
//- minicore:derive
mod __ {
#[derive = "aaaa"]
- //^^^^^^^^^^^^^^^^^^ error: malformed derive input, derive attributes are of the form `#[derive(Derive1, Derive2, ...)]`
+ // ^^^^^^^^^^^^^^^ error: malformed derive input, derive attributes are of the form `#[derive(Derive1, Derive2, ...)]`
struct Foo;
}
"#,
diff --git a/crates/ide-diagnostics/src/handlers/unresolved_macro_call.rs b/crates/ide-diagnostics/src/handlers/unresolved_macro_call.rs
index a87b8c42ac..030c82ca0b 100644
--- a/crates/ide-diagnostics/src/handlers/unresolved_macro_call.rs
+++ b/crates/ide-diagnostics/src/handlers/unresolved_macro_call.rs
@@ -8,8 +8,7 @@ pub(crate) fn unresolved_macro_call(
ctx: &DiagnosticsContext<'_>,
d: &hir::UnresolvedMacroCall,
) -> Diagnostic {
- // Use more accurate position if available.
- let display_range = ctx.resolve_precise_location(&d.macro_call, d.precise_location);
+ let display_range = ctx.sema.diagnostics_display_range_for_range(d.range);
let bang = if d.is_bang { "!" } else { "" };
Diagnostic::new(
DiagnosticCode::RustcHardError("unresolved-macro-call"),
@@ -76,7 +75,7 @@ self::m!(); self::m2!();
r#"
mod _test_inner {
#![empty_attr]
- //^^^^^^^^^^^^^^ error: unresolved macro `empty_attr`
+ // ^^^^^^^^^^ error: unresolved macro `empty_attr`
}
"#,
);
diff --git a/crates/ide-diagnostics/src/lib.rs b/crates/ide-diagnostics/src/lib.rs
index 1530e64652..5c8f030de4 100644
--- a/crates/ide-diagnostics/src/lib.rs
+++ b/crates/ide-diagnostics/src/lib.rs
@@ -102,7 +102,7 @@ use ide_db::{
use itertools::Itertools;
use syntax::{
AstPtr, Edition, NodeOrToken, SmolStr, SyntaxKind, SyntaxNode, SyntaxNodePtr, T, TextRange,
- ast::{self, AstNode, HasAttrs},
+ ast::{self, AstNode},
};
// FIXME: Make this an enum
@@ -277,31 +277,6 @@ struct DiagnosticsContext<'a> {
is_nightly: bool,
}
-impl DiagnosticsContext<'_> {
- fn resolve_precise_location(
- &self,
- node: &InFile<SyntaxNodePtr>,
- precise_location: Option<TextRange>,
- ) -> FileRange {
- let sema = &self.sema;
- (|| {
- let precise_location = precise_location?;
- let root = sema.parse_or_expand(node.file_id);
- match root.covering_element(precise_location) {
- syntax::NodeOrToken::Node(it) => Some(sema.original_range(&it)),
- syntax::NodeOrToken::Token(it) => {
- node.with_value(it).original_file_range_opt(sema.db)
- }
- }
- })()
- .map(|frange| ide_db::FileRange {
- file_id: frange.file_id.file_id(self.sema.db),
- range: frange.range,
- })
- .unwrap_or_else(|| sema.diagnostics_display_range(*node))
- }
-}
-
/// Request parser level diagnostics for the given [`FileId`].
pub fn syntax_diagnostics(
db: &RootDatabase,
@@ -317,7 +292,7 @@ pub fn syntax_diagnostics(
let sema = Semantics::new(db);
let editioned_file_id = sema
.attach_first_edition(file_id)
- .unwrap_or_else(|| EditionedFileId::current_edition(db, file_id));
+ .unwrap_or_else(|| EditionedFileId::current_edition_guess_origin(db, file_id));
let (file_id, _) = editioned_file_id.unpack(db);
@@ -348,7 +323,7 @@ pub fn semantic_diagnostics(
let sema = Semantics::new(db);
let editioned_file_id = sema
.attach_first_edition(file_id)
- .unwrap_or_else(|| EditionedFileId::current_edition(db, file_id));
+ .unwrap_or_else(|| EditionedFileId::current_edition_guess_origin(db, file_id));
let (file_id, edition) = editioned_file_id.unpack(db);
let mut res = Vec::new();
@@ -426,7 +401,7 @@ pub fn semantic_diagnostics(
Diagnostic::new(
DiagnosticCode::SyntaxError,
format!("Syntax Error in Expansion: {err}"),
- ctx.resolve_precise_location(&d.node.clone(), d.precise_location),
+ ctx.sema.diagnostics_display_range_for_range(d.range),
)
}));
continue;
@@ -677,7 +652,7 @@ fn find_outline_mod_lint_severity(
let lint_groups = lint_groups(&diag.code, edition);
lint_attrs(
sema,
- ast::AnyHasAttrs::cast(module_source_file.value).expect("SourceFile always has attrs"),
+ &ast::AnyHasAttrs::cast(module_source_file.value).expect("SourceFile always has attrs"),
edition,
)
.for_each(|(lint, severity)| {
@@ -698,7 +673,7 @@ fn lint_severity_at(
.ancestors()
.filter_map(ast::AnyHasAttrs::cast)
.find_map(|ancestor| {
- lint_attrs(sema, ancestor, edition)
+ lint_attrs(sema, &ancestor, edition)
.find_map(|(lint, severity)| lint_groups.contains(&lint).then_some(severity))
})
.or_else(|| {
@@ -706,13 +681,13 @@ fn lint_severity_at(
})
}
+// FIXME: Switch this to analysis' `expand_cfg_attr`.
fn lint_attrs<'a>(
sema: &'a Semantics<'a, RootDatabase>,
- ancestor: ast::AnyHasAttrs,
+ ancestor: &'a ast::AnyHasAttrs,
edition: Edition,
) -> impl Iterator<Item = (SmolStr, Severity)> + 'a {
- ancestor
- .attrs_including_inner()
+ ast::attrs_including_inner(ancestor)
.filter_map(|attr| {
attr.as_simple_call().and_then(|(name, value)| match &*name {
"allow" | "expect" => Some(Either::Left(iter::once((Severity::Allow, value)))),
diff --git a/crates/ide-ssr/src/from_comment.rs b/crates/ide-ssr/src/from_comment.rs
index 181cc74a51..de26879c29 100644
--- a/crates/ide-ssr/src/from_comment.rs
+++ b/crates/ide-ssr/src/from_comment.rs
@@ -17,7 +17,7 @@ pub fn ssr_from_comment(
frange: FileRange,
) -> Option<(MatchFinder<'_>, TextRange)> {
let comment = {
- let file_id = EditionedFileId::current_edition(db, frange.file_id);
+ let file_id = EditionedFileId::current_edition_guess_origin(db, frange.file_id);
let file = db.parse(file_id);
file.tree().syntax().token_at_offset(frange.range.start()).find_map(ast::Comment::cast)
diff --git a/crates/ide-ssr/src/lib.rs b/crates/ide-ssr/src/lib.rs
index 66ece4e4f0..7086e978dd 100644
--- a/crates/ide-ssr/src/lib.rs
+++ b/crates/ide-ssr/src/lib.rs
@@ -125,9 +125,9 @@ impl<'db> MatchFinder<'db> {
) -> Result<MatchFinder<'db>, SsrError> {
restrict_ranges.retain(|range| !range.range.is_empty());
let sema = Semantics::new(db);
- let file_id = sema
- .attach_first_edition(lookup_context.file_id)
- .unwrap_or_else(|| EditionedFileId::current_edition(db, lookup_context.file_id));
+ let file_id = sema.attach_first_edition(lookup_context.file_id).unwrap_or_else(|| {
+ EditionedFileId::current_edition_guess_origin(db, lookup_context.file_id)
+ });
let resolution_scope = resolving::ResolutionScope::new(
&sema,
hir::FilePosition { file_id, offset: lookup_context.offset },
diff --git a/crates/ide-ssr/src/search.rs b/crates/ide-ssr/src/search.rs
index 56484ae7a6..5700428b81 100644
--- a/crates/ide-ssr/src/search.rs
+++ b/crates/ide-ssr/src/search.rs
@@ -136,11 +136,9 @@ impl<'db> MatchFinder<'db> {
// seems to get put into a single source root.
let mut files = Vec::new();
self.search_files_do(|file_id| {
- files.push(
- self.sema
- .attach_first_edition(file_id)
- .unwrap_or_else(|| EditionedFileId::current_edition(self.sema.db, file_id)),
- );
+ files.push(self.sema.attach_first_edition(file_id).unwrap_or_else(|| {
+ EditionedFileId::current_edition_guess_origin(self.sema.db, file_id)
+ }));
});
SearchScope::files(&files)
}
diff --git a/crates/ide/src/doc_links.rs b/crates/ide/src/doc_links.rs
index c197d559aa..0ed91cf7f5 100644
--- a/crates/ide/src/doc_links.rs
+++ b/crates/ide/src/doc_links.rs
@@ -13,13 +13,13 @@ use stdx::format_to;
use url::Url;
use hir::{
- Adt, AsAssocItem, AssocItem, AssocItemContainer, AttrsWithOwner, HasAttrs, db::HirDatabase, sym,
+ Adt, AsAssocItem, AssocItem, AssocItemContainer, AttrsWithOwner, HasAttrs, db::HirDatabase,
};
use ide_db::{
RootDatabase,
base_db::{CrateOrigin, LangCrateOrigin, ReleaseChannel, RootQueryDb},
defs::{Definition, NameClass, NameRefClass},
- documentation::{DocsRangeMap, Documentation, HasDocs, docs_with_rangemap},
+ documentation::{Documentation, HasDocs},
helpers::pick_best_token,
};
use syntax::{
@@ -54,7 +54,7 @@ pub(crate) fn rewrite_links(
db: &RootDatabase,
markdown: &str,
definition: Definition,
- range_map: Option<DocsRangeMap>,
+ range_map: Option<&hir::Docs>,
) -> String {
let mut cb = broken_link_clone_cb;
let doc = Parser::new_with_broken_link_callback(markdown, MARKDOWN_OPTIONS, Some(&mut cb))
@@ -74,9 +74,9 @@ pub(crate) fn rewrite_links(
TextRange::new(range.start.try_into().unwrap(), range.end.try_into().unwrap());
let is_inner_doc = range_map
.as_ref()
- .and_then(|range_map| range_map.map(text_range))
- .map(|(_, attr_id)| attr_id.is_inner_attr())
- .unwrap_or(false);
+ .and_then(|range_map| range_map.find_ast_range(text_range))
+ .map(|(_, is_inner)| is_inner)
+ .unwrap_or(hir::IsInnerDoc::No);
if let Some((target, title)) =
rewrite_intra_doc_link(db, definition, target, title, is_inner_doc, link_type)
{
@@ -187,7 +187,7 @@ pub(crate) fn external_docs(
/// Extracts all links from a given markdown text returning the definition text range, link-text
/// and the namespace if known.
pub(crate) fn extract_definitions_from_docs(
- docs: &Documentation,
+ docs: &Documentation<'_>,
) -> Vec<(TextRange, String, Option<hir::Namespace>)> {
Parser::new_with_broken_link_callback(
docs.as_str(),
@@ -214,7 +214,7 @@ pub(crate) fn resolve_doc_path_for_def(
def: Definition,
link: &str,
ns: Option<hir::Namespace>,
- is_inner_doc: bool,
+ is_inner_doc: hir::IsInnerDoc,
) -> Option<Definition> {
match def {
Definition::Module(it) => it.resolve_doc_path(db, link, ns, is_inner_doc),
@@ -324,11 +324,11 @@ impl DocCommentToken {
let token_start = t.text_range().start();
let abs_in_expansion_offset = token_start + relative_comment_offset + descended_prefix_len;
let (attributes, def) = Self::doc_attributes(sema, &node, is_inner)?;
- let (docs, doc_mapping) = docs_with_rangemap(sema.db, &attributes)?;
+ let doc_mapping = attributes.hir_docs(sema.db)?;
let (in_expansion_range, link, ns, is_inner) =
- extract_definitions_from_docs(&docs).into_iter().find_map(|(range, link, ns)| {
- let (mapped, idx) = doc_mapping.map(range)?;
- (mapped.value.contains(abs_in_expansion_offset)).then_some((mapped.value, link, ns, idx.is_inner_attr()))
+ extract_definitions_from_docs(&Documentation::new_borrowed(doc_mapping.docs())).into_iter().find_map(|(range, link, ns)| {
+ let (mapped, is_inner) = doc_mapping.find_ast_range(range)?;
+ (mapped.value.contains(abs_in_expansion_offset)).then_some((mapped.value, link, ns, is_inner))
})?;
// get the relative range to the doc/attribute in the expansion
let in_expansion_relative_range = in_expansion_range - descended_prefix_len - token_start;
@@ -416,7 +416,7 @@ fn rewrite_intra_doc_link(
def: Definition,
target: &str,
title: &str,
- is_inner_doc: bool,
+ is_inner_doc: hir::IsInnerDoc,
link_type: LinkType,
) -> Option<(String, String)> {
let (link, ns) = parse_intra_doc_link(target);
@@ -659,14 +659,12 @@ fn filename_and_frag_for_def(
Definition::Crate(_) => String::from("index.html"),
Definition::Module(m) => match m.name(db) {
// `#[doc(keyword = "...")]` is internal used only by rust compiler
- Some(name) => {
- match m.attrs(db).by_key(sym::doc).find_string_value_in_tt(sym::keyword) {
- Some(kw) => {
- format!("keyword.{kw}.html")
- }
- None => format!("{}/index.html", name.as_str()),
+ Some(name) => match m.doc_keyword(db) {
+ Some(kw) => {
+ format!("keyword.{kw}.html")
}
- }
+ None => format!("{}/index.html", name.as_str()),
+ },
None => String::from("index.html"),
},
Definition::Trait(t) => {
diff --git a/crates/ide/src/doc_links/tests.rs b/crates/ide/src/doc_links/tests.rs
index 3fd885535a..34ffc11c4b 100644
--- a/crates/ide/src/doc_links/tests.rs
+++ b/crates/ide/src/doc_links/tests.rs
@@ -1,11 +1,11 @@
-use std::iter;
+use std::{borrow::Cow, iter};
use expect_test::{Expect, expect};
use hir::Semantics;
use ide_db::{
FilePosition, FileRange, RootDatabase,
defs::Definition,
- documentation::{DocsRangeMap, Documentation, HasDocs},
+ documentation::{Documentation, HasDocs},
};
use itertools::Itertools;
use syntax::{AstNode, SyntaxNode, ast, match_ast};
@@ -45,9 +45,9 @@ fn check_external_docs(
fn check_rewrite(#[rust_analyzer::rust_fixture] ra_fixture: &str, expect: Expect) {
let (analysis, position) = fixture::position(ra_fixture);
let sema = &Semantics::new(&analysis.db);
- let (cursor_def, docs, range) = def_under_cursor(sema, &position);
+ let (cursor_def, docs) = def_under_cursor(sema, &position);
let res =
- hir::attach_db(sema.db, || rewrite_links(sema.db, docs.as_str(), cursor_def, Some(range)));
+ hir::attach_db(sema.db, || rewrite_links(sema.db, docs.docs(), cursor_def, Some(&docs)));
expect.assert_eq(&res)
}
@@ -57,33 +57,36 @@ fn check_doc_links(#[rust_analyzer::rust_fixture] ra_fixture: &str) {
let (analysis, position, mut expected) = fixture::annotations(ra_fixture);
expected.sort_by_key(key_fn);
let sema = &Semantics::new(&analysis.db);
- let (cursor_def, docs, range) = def_under_cursor(sema, &position);
- let defs = extract_definitions_from_docs(&docs);
- let actual: Vec<_> = defs
- .into_iter()
- .flat_map(|(text_range, link, ns)| {
- let attr = range.map(text_range);
- let is_inner_attr = attr.map(|(_file, attr)| attr.is_inner_attr()).unwrap_or(false);
- let def = hir::attach_db(sema.db, || {
- resolve_doc_path_for_def(sema.db, cursor_def, &link, ns, is_inner_attr)
- .unwrap_or_else(|| panic!("Failed to resolve {link}"))
- });
- def.try_to_nav(sema).unwrap().into_iter().zip(iter::repeat(link))
- })
- .map(|(nav_target, link)| {
- let range =
- FileRange { file_id: nav_target.file_id, range: nav_target.focus_or_full_range() };
- (range, link)
- })
- .sorted_by_key(key_fn)
- .collect();
- assert_eq!(expected, actual);
-}
-
-fn def_under_cursor(
- sema: &Semantics<'_, RootDatabase>,
+ hir::attach_db(sema.db, || {
+ let (cursor_def, docs) = def_under_cursor(sema, &position);
+ let defs = extract_definitions_from_docs(&Documentation::new_borrowed(docs.docs()));
+ let actual: Vec<_> = defs
+ .into_iter()
+ .flat_map(|(text_range, link, ns)| {
+ let attr = docs.find_ast_range(text_range);
+ let is_inner_attr =
+ attr.map(|(_file, is_inner)| is_inner).unwrap_or(hir::IsInnerDoc::No);
+ let def = resolve_doc_path_for_def(sema.db, cursor_def, &link, ns, is_inner_attr)
+ .unwrap_or_else(|| panic!("Failed to resolve {link}"));
+ def.try_to_nav(sema).unwrap().into_iter().zip(iter::repeat(link))
+ })
+ .map(|(nav_target, link)| {
+ let range = FileRange {
+ file_id: nav_target.file_id,
+ range: nav_target.focus_or_full_range(),
+ };
+ (range, link)
+ })
+ .sorted_by_key(key_fn)
+ .collect();
+ assert_eq!(expected, actual);
+ });
+}
+
+fn def_under_cursor<'db>(
+ sema: &Semantics<'db, RootDatabase>,
position: &FilePosition,
-) -> (Definition, Documentation, DocsRangeMap) {
+) -> (Definition, Cow<'db, hir::Docs>) {
let (docs, def) = sema
.parse_guess_edition(position.file_id)
.syntax()
@@ -94,14 +97,14 @@ fn def_under_cursor(
.find_map(|it| node_to_def(sema, &it))
.expect("no def found")
.unwrap();
- let (docs, range) = docs.expect("no docs found for cursor def");
- (def, docs, range)
+ let docs = docs.expect("no docs found for cursor def");
+ (def, docs)
}
-fn node_to_def(
- sema: &Semantics<'_, RootDatabase>,
+fn node_to_def<'db>(
+ sema: &Semantics<'db, RootDatabase>,
node: &SyntaxNode,
-) -> Option<Option<(Option<(Documentation, DocsRangeMap)>, Definition)>> {
+) -> Option<Option<(Option<Cow<'db, hir::Docs>>, Definition)>> {
Some(match_ast! {
match node {
ast::SourceFile(it) => sema.to_def(&it).map(|def| (def.docs_with_rangemap(sema.db), Definition::Module(def))),
diff --git a/crates/ide/src/fixture.rs b/crates/ide/src/fixture.rs
index fbf89042fa..1a8591d25d 100644
--- a/crates/ide/src/fixture.rs
+++ b/crates/ide/src/fixture.rs
@@ -7,10 +7,10 @@ use crate::{Analysis, AnalysisHost, FileId, FilePosition, FileRange};
/// Creates analysis for a single file.
pub(crate) fn file(#[rust_analyzer::rust_fixture] ra_fixture: &str) -> (Analysis, FileId) {
let mut host = AnalysisHost::default();
- let change_fixture = ChangeFixture::parse(&host.db, ra_fixture);
+ let change_fixture = ChangeFixture::parse(ra_fixture);
host.db.enable_proc_attr_macros();
host.db.apply_change(change_fixture.change);
- (host.analysis(), change_fixture.files[0].file_id(&host.db))
+ (host.analysis(), change_fixture.files[0].file_id())
}
/// Creates analysis from a multi-file fixture, returns positions marked with $0.
@@ -18,23 +18,23 @@ pub(crate) fn position(
#[rust_analyzer::rust_fixture] ra_fixture: &str,
) -> (Analysis, FilePosition) {
let mut host = AnalysisHost::default();
- let change_fixture = ChangeFixture::parse(&host.db, ra_fixture);
+ let change_fixture = ChangeFixture::parse(ra_fixture);
host.db.enable_proc_attr_macros();
host.db.apply_change(change_fixture.change);
let (file_id, range_or_offset) = change_fixture.file_position.expect("expected a marker ($0)");
let offset = range_or_offset.expect_offset();
- (host.analysis(), FilePosition { file_id: file_id.file_id(&host.db), offset })
+ (host.analysis(), FilePosition { file_id: file_id.file_id(), offset })
}
/// Creates analysis for a single file, returns range marked with a pair of $0.
pub(crate) fn range(#[rust_analyzer::rust_fixture] ra_fixture: &str) -> (Analysis, FileRange) {
let mut host = AnalysisHost::default();
- let change_fixture = ChangeFixture::parse(&host.db, ra_fixture);
+ let change_fixture = ChangeFixture::parse(ra_fixture);
host.db.enable_proc_attr_macros();
host.db.apply_change(change_fixture.change);
let (file_id, range_or_offset) = change_fixture.file_position.expect("expected a marker ($0)");
let range = range_or_offset.expect_range();
- (host.analysis(), FileRange { file_id: file_id.file_id(&host.db), range })
+ (host.analysis(), FileRange { file_id: file_id.file_id(), range })
}
/// Creates analysis for a single file, returns range marked with a pair of $0 or a position marked with $0.
@@ -42,11 +42,11 @@ pub(crate) fn range_or_position(
#[rust_analyzer::rust_fixture] ra_fixture: &str,
) -> (Analysis, FileId, RangeOrOffset) {
let mut host = AnalysisHost::default();
- let change_fixture = ChangeFixture::parse(&host.db, ra_fixture);
+ let change_fixture = ChangeFixture::parse(ra_fixture);
host.db.enable_proc_attr_macros();
host.db.apply_change(change_fixture.change);
let (file_id, range_or_offset) = change_fixture.file_position.expect("expected a marker ($0)");
- (host.analysis(), file_id.file_id(&host.db), range_or_offset)
+ (host.analysis(), file_id.file_id(), range_or_offset)
}
/// Creates analysis from a multi-file fixture, returns positions marked with $0.
@@ -54,25 +54,24 @@ pub(crate) fn annotations(
#[rust_analyzer::rust_fixture] ra_fixture: &str,
) -> (Analysis, FilePosition, Vec<(FileRange, String)>) {
let mut host = AnalysisHost::default();
- let change_fixture = ChangeFixture::parse(&host.db, ra_fixture);
+ let change_fixture = ChangeFixture::parse(ra_fixture);
host.db.enable_proc_attr_macros();
host.db.apply_change(change_fixture.change);
let (file_id, range_or_offset) = change_fixture.file_position.expect("expected a marker ($0)");
let offset = range_or_offset.expect_offset();
- let db = &host.db;
let annotations = change_fixture
.files
.iter()
.flat_map(|&file_id| {
- let file_text = host.analysis().file_text(file_id.file_id(&host.db)).unwrap();
+ let file_text = host.analysis().file_text(file_id.file_id()).unwrap();
let annotations = extract_annotations(&file_text);
annotations
.into_iter()
- .map(move |(range, data)| (FileRange { file_id: file_id.file_id(db), range }, data))
+ .map(move |(range, data)| (FileRange { file_id: file_id.file_id(), range }, data))
})
.collect();
- (host.analysis(), FilePosition { file_id: file_id.file_id(&host.db), offset }, annotations)
+ (host.analysis(), FilePosition { file_id: file_id.file_id(), offset }, annotations)
}
/// Creates analysis from a multi-file fixture with annotations without $0
@@ -80,20 +79,19 @@ pub(crate) fn annotations_without_marker(
#[rust_analyzer::rust_fixture] ra_fixture: &str,
) -> (Analysis, Vec<(FileRange, String)>) {
let mut host = AnalysisHost::default();
- let change_fixture = ChangeFixture::parse(&host.db, ra_fixture);
+ let change_fixture = ChangeFixture::parse(ra_fixture);
host.db.enable_proc_attr_macros();
host.db.apply_change(change_fixture.change);
- let db = &host.db;
let annotations = change_fixture
.files
.iter()
.flat_map(|&file_id| {
- let file_text = host.analysis().file_text(file_id.file_id(db)).unwrap();
+ let file_text = host.analysis().file_text(file_id.file_id()).unwrap();
let annotations = extract_annotations(&file_text);
annotations
.into_iter()
- .map(move |(range, data)| (FileRange { file_id: file_id.file_id(db), range }, data))
+ .map(move |(range, data)| (FileRange { file_id: file_id.file_id(), range }, data))
})
.collect();
(host.analysis(), annotations)
diff --git a/crates/ide/src/goto_implementation.rs b/crates/ide/src/goto_implementation.rs
index 0572bca445..a29da4f258 100644
--- a/crates/ide/src/goto_implementation.rs
+++ b/crates/ide/src/goto_implementation.rs
@@ -384,7 +384,7 @@ trait Bar {}
fn test() {
#[derive(Copy)]
- //^^^^^^^^^^^^^^^
+ // ^^^^^^^^^^^^
struct Foo$0;
impl Foo {}
diff --git a/crates/ide/src/highlight_related.rs b/crates/ide/src/highlight_related.rs
index 04ce5a7567..f7870032ea 100644
--- a/crates/ide/src/highlight_related.rs
+++ b/crates/ide/src/highlight_related.rs
@@ -62,7 +62,7 @@ pub(crate) fn highlight_related(
let _p = tracing::info_span!("highlight_related").entered();
let file_id = sema
.attach_first_edition(file_id)
- .unwrap_or_else(|| EditionedFileId::current_edition(sema.db, file_id));
+ .unwrap_or_else(|| EditionedFileId::current_edition_guess_origin(sema.db, file_id));
let syntax = sema.parse(file_id).syntax().clone();
let token = pick_best_token(syntax.token_at_offset(offset), |kind| match kind {
diff --git a/crates/ide/src/hover/render.rs b/crates/ide/src/hover/render.rs
index 8c2d3f0e5f..a8929859bc 100644
--- a/crates/ide/src/hover/render.rs
+++ b/crates/ide/src/hover/render.rs
@@ -1,5 +1,5 @@
//! Logic for rendering the different hover messages
-use std::{env, mem, ops::Not};
+use std::{borrow::Cow, env, mem, ops::Not};
use either::Either;
use hir::{
@@ -11,7 +11,7 @@ use hir::{
use ide_db::{
RootDatabase,
defs::{Definition, find_std_module},
- documentation::{DocsRangeMap, HasDocs},
+ documentation::{Documentation, HasDocs},
famous_defs::FamousDefs,
generated::lints::{CLIPPY_LINTS, DEFAULT_LINTS, FEATURES},
syntax_helpers::prettify_macro_expansion,
@@ -278,9 +278,9 @@ pub(super) fn keyword(
keyword_hints(sema, token, parent, edition, display_target);
let doc_owner = find_std_module(&famous_defs, &keyword_mod, edition)?;
- let (docs, range_map) = doc_owner.docs_with_rangemap(sema.db)?;
+ let docs = doc_owner.docs_with_rangemap(sema.db)?;
let (markup, range_map) =
- markup(Some(docs.into()), Some(range_map), description, None, None, String::new());
+ markup(Some(Either::Left(docs)), description, None, None, String::new());
let markup = process_markup(sema.db, Definition::Module(doc_owner), &markup, range_map, config);
Some(HoverResult { markup, actions })
}
@@ -370,12 +370,12 @@ pub(super) fn process_markup(
db: &RootDatabase,
def: Definition,
markup: &Markup,
- markup_range_map: Option<DocsRangeMap>,
+ markup_range_map: Option<hir::Docs>,
config: &HoverConfig<'_>,
) -> Markup {
let markup = markup.as_str();
let markup = if config.links_in_hover {
- rewrite_links(db, markup, def, markup_range_map)
+ rewrite_links(db, markup, def, markup_range_map.as_ref())
} else {
remove_links(markup)
};
@@ -484,7 +484,7 @@ pub(super) fn definition(
config: &HoverConfig<'_>,
edition: Edition,
display_target: DisplayTarget,
-) -> (Markup, Option<DocsRangeMap>) {
+) -> (Markup, Option<hir::Docs>) {
let mod_path = definition_path(db, &def, edition);
let label = match def {
Definition::Trait(trait_) => trait_
@@ -520,12 +520,7 @@ pub(super) fn definition(
}
_ => def.label(db, display_target),
};
- let (docs, range_map) =
- if let Some((docs, doc_range)) = def.docs_with_rangemap(db, famous_defs, display_target) {
- (Some(docs), doc_range)
- } else {
- (None, None)
- };
+ let docs = def.docs_with_rangemap(db, famous_defs, display_target);
let value = || match def {
Definition::Variant(it) => {
if !it.parent_enum(db).is_data_carrying(db) {
@@ -842,14 +837,7 @@ pub(super) fn definition(
}
};
- markup(
- docs.map(Into::into),
- range_map,
- desc,
- extra.is_empty().not().then_some(extra),
- mod_path,
- subst_types,
- )
+ markup(docs, desc, extra.is_empty().not().then_some(extra), mod_path, subst_types)
}
#[derive(Debug)]
@@ -1124,13 +1112,12 @@ fn definition_path(db: &RootDatabase, &def: &Definition, edition: Edition) -> Op
}
fn markup(
- docs: Option<String>,
- range_map: Option<DocsRangeMap>,
+ docs: Option<Either<Cow<'_, hir::Docs>, Documentation<'_>>>,
rust: String,
extra: Option<String>,
mod_path: Option<String>,
subst_types: String,
-) -> (Markup, Option<DocsRangeMap>) {
+) -> (Markup, Option<hir::Docs>) {
let mut buf = String::new();
if let Some(mod_path) = mod_path
@@ -1151,10 +1138,21 @@ fn markup(
if let Some(doc) = docs {
format_to!(buf, "\n___\n\n");
let offset = TextSize::new(buf.len() as u32);
- let buf_range_map = range_map.map(|range_map| range_map.shift_docstring_line_range(offset));
- format_to!(buf, "{}", doc);
+ let docs_str = match &doc {
+ Either::Left(docs) => docs.docs(),
+ Either::Right(docs) => docs.as_str(),
+ };
+ format_to!(buf, "{}", docs_str);
+ let range_map = match doc {
+ Either::Left(range_map) => {
+ let mut range_map = range_map.into_owned();
+ range_map.shift_by(offset);
+ Some(range_map)
+ }
+ Either::Right(_) => None,
+ };
- (buf.into(), buf_range_map)
+ (buf.into(), range_map)
} else {
(buf.into(), None)
}
diff --git a/crates/ide/src/inlay_hints.rs b/crates/ide/src/inlay_hints.rs
index 6dd9e84a57..155d6b52f1 100644
--- a/crates/ide/src/inlay_hints.rs
+++ b/crates/ide/src/inlay_hints.rs
@@ -91,7 +91,7 @@ pub(crate) fn inlay_hints(
let sema = Semantics::new(db);
let file_id = sema
.attach_first_edition(file_id)
- .unwrap_or_else(|| EditionedFileId::current_edition(db, file_id));
+ .unwrap_or_else(|| EditionedFileId::current_edition_guess_origin(db, file_id));
let file = sema.parse(file_id);
let file = file.syntax();
@@ -144,7 +144,7 @@ pub(crate) fn inlay_hints_resolve(
let sema = Semantics::new(db);
let file_id = sema
.attach_first_edition(file_id)
- .unwrap_or_else(|| EditionedFileId::current_edition(db, file_id));
+ .unwrap_or_else(|| EditionedFileId::current_edition_guess_origin(db, file_id));
let file = sema.parse(file_id);
let file = file.syntax();
diff --git a/crates/ide/src/lib.rs b/crates/ide/src/lib.rs
index 113cb83d17..9436264904 100644
--- a/crates/ide/src/lib.rs
+++ b/crates/ide/src/lib.rs
@@ -332,7 +332,8 @@ impl Analysis {
pub fn parse(&self, file_id: FileId) -> Cancellable<SourceFile> {
// FIXME edition
self.with_db(|db| {
- let editioned_file_id_wrapper = EditionedFileId::current_edition(&self.db, file_id);
+ let editioned_file_id_wrapper =
+ EditionedFileId::current_edition_guess_origin(&self.db, file_id);
db.parse(editioned_file_id_wrapper).tree()
})
@@ -361,7 +362,7 @@ impl Analysis {
/// supported).
pub fn matching_brace(&self, position: FilePosition) -> Cancellable<Option<TextSize>> {
self.with_db(|db| {
- let file_id = EditionedFileId::current_edition(&self.db, position.file_id);
+ let file_id = EditionedFileId::current_edition_guess_origin(&self.db, position.file_id);
let parse = db.parse(file_id);
let file = parse.tree();
matching_brace::matching_brace(&file, position.offset)
@@ -422,7 +423,7 @@ impl Analysis {
pub fn join_lines(&self, config: &JoinLinesConfig, frange: FileRange) -> Cancellable<TextEdit> {
self.with_db(|db| {
let editioned_file_id_wrapper =
- EditionedFileId::current_edition(&self.db, frange.file_id);
+ EditionedFileId::current_edition_guess_origin(&self.db, frange.file_id);
let parse = db.parse(editioned_file_id_wrapper);
join_lines::join_lines(config, &parse.tree(), frange.range)
})
@@ -463,7 +464,8 @@ impl Analysis {
) -> Cancellable<Vec<StructureNode>> {
// FIXME: Edition
self.with_db(|db| {
- let editioned_file_id_wrapper = EditionedFileId::current_edition(&self.db, file_id);
+ let editioned_file_id_wrapper =
+ EditionedFileId::current_edition_guess_origin(&self.db, file_id);
let source_file = db.parse(editioned_file_id_wrapper).tree();
file_structure::file_structure(&source_file, config)
})
@@ -494,7 +496,8 @@ impl Analysis {
/// Returns the set of folding ranges.
pub fn folding_ranges(&self, file_id: FileId) -> Cancellable<Vec<Fold>> {
self.with_db(|db| {
- let editioned_file_id_wrapper = EditionedFileId::current_edition(&self.db, file_id);
+ let editioned_file_id_wrapper =
+ EditionedFileId::current_edition_guess_origin(&self.db, file_id);
folding_ranges::folding_ranges(&db.parse(editioned_file_id_wrapper).tree())
})
diff --git a/crates/ide/src/navigation_target.rs b/crates/ide/src/navigation_target.rs
index 7d5d9057cb..4c4d57f0f4 100644
--- a/crates/ide/src/navigation_target.rs
+++ b/crates/ide/src/navigation_target.rs
@@ -51,7 +51,8 @@ pub struct NavigationTarget {
pub kind: Option<SymbolKind>,
pub container_name: Option<Symbol>,
pub description: Option<String>,
- pub docs: Option<Documentation>,
+ // FIXME: Use the database lifetime here.
+ pub docs: Option<Documentation<'static>>,
/// In addition to a `name` field, a `NavigationTarget` may also be aliased
/// In such cases we want a `NavigationTarget` to be accessible by its alias
pub alias: Option<Symbol>,
@@ -157,7 +158,7 @@ impl NavigationTarget {
full_range,
SymbolKind::Module,
);
- res.docs = module.docs(db);
+ res.docs = module.docs(db).map(Documentation::into_owned);
res.description = Some(
module.display(db, module.krate().to_display_target(db)).to_string(),
);
@@ -429,7 +430,7 @@ where
D::KIND,
)
.map(|mut res| {
- res.docs = self.docs(db);
+ res.docs = self.docs(db).map(Documentation::into_owned);
res.description = hir::attach_db(db, || {
Some(self.display(db, self.krate(db).to_display_target(db)).to_string())
});
@@ -522,7 +523,7 @@ impl TryToNav for hir::ExternCrateDecl {
SymbolKind::Module,
);
- res.docs = self.docs(db);
+ res.docs = self.docs(db).map(Documentation::into_owned);
res.description = Some(self.display(db, krate.to_display_target(db)).to_string());
res.container_name = container_name(db, *self);
res
@@ -544,10 +545,9 @@ impl TryToNav for hir::Field {
FieldSource::Named(it) => {
NavigationTarget::from_named(db, src.with_value(it), SymbolKind::Field).map(
|mut res| {
- res.docs = self.docs(db);
- res.description = hir::attach_db(db, || {
- Some(self.display(db, krate.to_display_target(db)).to_string())
- });
+ res.docs = self.docs(db).map(Documentation::into_owned);
+ res.description =
+ Some(self.display(db, krate.to_display_target(db)).to_string());
res
},
)
@@ -586,7 +586,7 @@ impl TryToNav for hir::Macro {
self.kind(db).into(),
)
.map(|mut res| {
- res.docs = self.docs(db);
+ res.docs = self.docs(db).map(Documentation::into_owned);
res
}),
)
@@ -916,7 +916,7 @@ pub(crate) fn orig_range_with_focus_r(
) -> UpmappingResult<(FileRange, Option<TextRange>)> {
let Some(name) = focus_range else { return orig_range_r(db, hir_file, value) };
- let call_kind = || db.lookup_intern_macro_call(hir_file.macro_file().unwrap()).kind;
+ let call = || db.lookup_intern_macro_call(hir_file.macro_file().unwrap());
let def_range =
|| db.lookup_intern_macro_call(hir_file.macro_file().unwrap()).def.definition_range(db);
@@ -942,7 +942,8 @@ pub(crate) fn orig_range_with_focus_r(
// name lies outside the node, so instead point to the macro call which
// *should* contain the name
_ => {
- let kind = call_kind();
+ let call = call();
+ let kind = call.kind;
let range = kind.clone().original_call_range_with_input(db);
//If the focus range is in the attribute/derive body, we
// need to point the call site to the entire body, if not, fall back
@@ -954,7 +955,7 @@ pub(crate) fn orig_range_with_focus_r(
{
range
} else {
- kind.original_call_range(db)
+ kind.original_call_range(db, call.krate)
}
}
},
@@ -983,11 +984,14 @@ pub(crate) fn orig_range_with_focus_r(
},
),
// node is in macro def, just show the focus
- _ => (
- // show the macro call
- (call_kind().original_call_range(db), None),
- Some((focus_range, Some(focus_range))),
- ),
+ _ => {
+ let call = call();
+ (
+ // show the macro call
+ (call.kind.original_call_range(db, call.krate), None),
+ Some((focus_range, Some(focus_range))),
+ )
+ }
}
}
// lost name? can't happen for single tokens
diff --git a/crates/ide/src/references.rs b/crates/ide/src/references.rs
index 516cc7fe60..6bd60e2a13 100644
--- a/crates/ide/src/references.rs
+++ b/crates/ide/src/references.rs
@@ -1124,7 +1124,10 @@ pub(super) struct Foo$0 {
check_with_scope(
code,
Some(&mut |db| {
- SearchScope::single_file(EditionedFileId::current_edition(db, FileId::from_raw(2)))
+ SearchScope::single_file(EditionedFileId::current_edition_guess_origin(
+ db,
+ FileId::from_raw(2),
+ ))
}),
expect![[r#"
quux Function FileId(0) 19..35 26..30
diff --git a/crates/ide/src/runnables.rs b/crates/ide/src/runnables.rs
index 2086a199b8..9e17ab2bab 100644
--- a/crates/ide/src/runnables.rs
+++ b/crates/ide/src/runnables.rs
@@ -3,17 +3,13 @@ use std::{fmt, sync::OnceLock};
use arrayvec::ArrayVec;
use ast::HasName;
use cfg::{CfgAtom, CfgExpr};
-use hir::{
- AsAssocItem, AttrsWithOwner, HasAttrs, HasCrate, HasSource, Semantics, Symbol, db::HirDatabase,
- sym,
-};
+use hir::{AsAssocItem, HasAttrs, HasCrate, HasSource, Semantics, Symbol, db::HirDatabase, sym};
use ide_assists::utils::{has_test_related_attribute, test_related_attribute_syn};
use ide_db::impl_empty_upmap_from_ra_fixture;
use ide_db::{
FilePosition, FxHashMap, FxIndexMap, FxIndexSet, RootDatabase, SymbolKind,
base_db::RootQueryDb,
defs::Definition,
- documentation::docs_from_attrs,
helpers::visit_file_defs,
search::{FileReferenceNode, SearchScope},
};
@@ -323,7 +319,7 @@ pub(crate) fn runnable_fn(
def: hir::Function,
) -> Option<Runnable> {
let edition = def.krate(sema.db).edition(sema.db);
- let under_cfg_test = has_cfg_test(def.module(sema.db).attrs(sema.db));
+ let under_cfg_test = has_cfg_test(def.module(sema.db).attrs(sema.db).cfgs(sema.db));
let kind = if !under_cfg_test && def.is_main(sema.db) {
RunnableKind::Bin
} else {
@@ -358,7 +354,7 @@ pub(crate) fn runnable_fn(
let file_range = fn_source.syntax().original_file_range_with_macro_call_input(sema.db);
let update_test = UpdateTest::find_snapshot_macro(sema, file_range);
- let cfg = def.attrs(sema.db).cfg();
+ let cfg = def.attrs(sema.db).cfgs(sema.db).cloned();
Some(Runnable { use_name_in_title: false, nav, kind, cfg, update_test })
}
@@ -366,8 +362,8 @@ pub(crate) fn runnable_mod(
sema: &Semantics<'_, RootDatabase>,
def: hir::Module,
) -> Option<Runnable> {
- if !has_test_function_or_multiple_test_submodules(sema, &def, has_cfg_test(def.attrs(sema.db)))
- {
+ let cfg = def.attrs(sema.db).cfgs(sema.db);
+ if !has_test_function_or_multiple_test_submodules(sema, &def, has_cfg_test(cfg)) {
return None;
}
let path = def
@@ -381,8 +377,7 @@ pub(crate) fn runnable_mod(
})
.join("::");
- let attrs = def.attrs(sema.db);
- let cfg = attrs.cfg();
+ let cfg = cfg.cloned();
let nav = NavigationTarget::from_module_to_decl(sema.db, def).call_site();
let module_source = sema.module_definition_node(def);
@@ -409,10 +404,10 @@ pub(crate) fn runnable_impl(
let display_target = def.module(sema.db).krate().to_display_target(sema.db);
let edition = display_target.edition;
let attrs = def.attrs(sema.db);
- if !has_runnable_doc_test(&attrs) {
+ if !has_runnable_doc_test(sema.db, &attrs) {
return None;
}
- let cfg = attrs.cfg();
+ let cfg = attrs.cfgs(sema.db).cloned();
let nav = def.try_to_nav(sema)?.call_site();
let ty = def.self_ty(sema.db);
let adt_name = ty.as_adt()?.name(sema.db);
@@ -442,8 +437,16 @@ pub(crate) fn runnable_impl(
})
}
-fn has_cfg_test(attrs: AttrsWithOwner) -> bool {
- attrs.cfgs().any(|cfg| matches!(&cfg, CfgExpr::Atom(CfgAtom::Flag(s)) if *s == sym::test))
+fn has_cfg_test(cfg: Option<&CfgExpr>) -> bool {
+ return cfg.is_some_and(has_cfg_test_impl);
+
+ fn has_cfg_test_impl(cfg: &CfgExpr) -> bool {
+ match cfg {
+ CfgExpr::Atom(CfgAtom::Flag(s)) => *s == sym::test,
+ CfgExpr::Any(cfgs) | CfgExpr::All(cfgs) => cfgs.iter().any(has_cfg_test_impl),
+ _ => false,
+ }
+ }
}
/// Creates a test mod runnable for outline modules at the top of their definition.
@@ -453,8 +456,8 @@ fn runnable_mod_outline_definition(
) -> Option<Runnable> {
def.as_source_file_id(sema.db)?;
- if !has_test_function_or_multiple_test_submodules(sema, &def, has_cfg_test(def.attrs(sema.db)))
- {
+ let cfg = def.attrs(sema.db).cfgs(sema.db);
+ if !has_test_function_or_multiple_test_submodules(sema, &def, has_cfg_test(cfg)) {
return None;
}
let path = def
@@ -468,8 +471,7 @@ fn runnable_mod_outline_definition(
})
.join("::");
- let attrs = def.attrs(sema.db);
- let cfg = attrs.cfg();
+ let cfg = cfg.cloned();
let mod_source = sema.module_definition_node(def);
let mod_syntax = mod_source.file_syntax(sema.db);
@@ -508,7 +510,7 @@ fn module_def_doctest(sema: &Semantics<'_, RootDatabase>, def: Definition) -> Op
let display_target = krate
.unwrap_or_else(|| (*db.all_crates().last().expect("no crate graph present")).into())
.to_display_target(db);
- if !has_runnable_doc_test(&attrs) {
+ if !has_runnable_doc_test(db, &attrs) {
return None;
}
let def_name = def.name(db)?;
@@ -554,7 +556,7 @@ fn module_def_doctest(sema: &Semantics<'_, RootDatabase>, def: Definition) -> Op
use_name_in_title: false,
nav,
kind: RunnableKind::DocTest { test_id },
- cfg: attrs.cfg(),
+ cfg: attrs.cfgs(db).cloned(),
update_test: UpdateTest::default(),
};
Some(res)
@@ -571,15 +573,15 @@ impl TestAttr {
}
}
-fn has_runnable_doc_test(attrs: &hir::Attrs) -> bool {
+fn has_runnable_doc_test(db: &RootDatabase, attrs: &hir::AttrsWithOwner) -> bool {
const RUSTDOC_FENCES: [&str; 2] = ["```", "~~~"];
const RUSTDOC_CODE_BLOCK_ATTRIBUTES_RUNNABLE: &[&str] =
&["", "rust", "should_panic", "edition2015", "edition2018", "edition2021"];
- docs_from_attrs(attrs).is_some_and(|doc| {
+ attrs.hir_docs(db).is_some_and(|doc| {
let mut in_code_block = false;
- for line in doc.lines() {
+ for line in doc.docs().lines() {
if let Some(header) =
RUSTDOC_FENCES.into_iter().find_map(|fence| line.strip_prefix(fence))
{
diff --git a/crates/ide/src/signature_help.rs b/crates/ide/src/signature_help.rs
index f9ec44813a..9c3204c199 100644
--- a/crates/ide/src/signature_help.rs
+++ b/crates/ide/src/signature_help.rs
@@ -31,7 +31,7 @@ use crate::RootDatabase;
/// edited.
#[derive(Debug)]
pub struct SignatureHelp {
- pub doc: Option<Documentation>,
+ pub doc: Option<Documentation<'static>>,
pub signature: String,
pub active_parameter: Option<usize>,
parameters: Vec<TextRange>,
@@ -174,7 +174,7 @@ fn signature_help_for_call(
let mut fn_params = None;
match callable.kind() {
hir::CallableKind::Function(func) => {
- res.doc = func.docs(db);
+ res.doc = func.docs(db).map(Documentation::into_owned);
if func.is_async(db) {
format_to!(res.signature, "async ");
}
@@ -199,7 +199,7 @@ fn signature_help_for_call(
});
}
hir::CallableKind::TupleStruct(strukt) => {
- res.doc = strukt.docs(db);
+ res.doc = strukt.docs(db).map(Documentation::into_owned);
format_to!(res.signature, "struct {}", strukt.name(db).display(db, edition));
let generic_params = GenericDef::Adt(strukt.into())
@@ -212,7 +212,7 @@ fn signature_help_for_call(
}
}
hir::CallableKind::TupleEnumVariant(variant) => {
- res.doc = variant.docs(db);
+ res.doc = variant.docs(db).map(Documentation::into_owned);
format_to!(
res.signature,
"enum {}",
@@ -320,33 +320,33 @@ fn signature_help_for_generics(
let db = sema.db;
match generics_def {
hir::GenericDef::Function(it) => {
- res.doc = it.docs(db);
+ res.doc = it.docs(db).map(Documentation::into_owned);
format_to!(res.signature, "fn {}", it.name(db).display(db, edition));
}
hir::GenericDef::Adt(hir::Adt::Enum(it)) => {
- res.doc = it.docs(db);
+ res.doc = it.docs(db).map(Documentation::into_owned);
format_to!(res.signature, "enum {}", it.name(db).display(db, edition));
if let Some(variant) = variant {
// In paths, generics of an enum can be specified *after* one of its variants.
// eg. `None::<u8>`
// We'll use the signature of the enum, but include the docs of the variant.
- res.doc = variant.docs(db);
+ res.doc = variant.docs(db).map(Documentation::into_owned);
}
}
hir::GenericDef::Adt(hir::Adt::Struct(it)) => {
- res.doc = it.docs(db);
+ res.doc = it.docs(db).map(Documentation::into_owned);
format_to!(res.signature, "struct {}", it.name(db).display(db, edition));
}
hir::GenericDef::Adt(hir::Adt::Union(it)) => {
- res.doc = it.docs(db);
+ res.doc = it.docs(db).map(Documentation::into_owned);
format_to!(res.signature, "union {}", it.name(db).display(db, edition));
}
hir::GenericDef::Trait(it) => {
- res.doc = it.docs(db);
+ res.doc = it.docs(db).map(Documentation::into_owned);
format_to!(res.signature, "trait {}", it.name(db).display(db, edition));
}
hir::GenericDef::TypeAlias(it) => {
- res.doc = it.docs(db);
+ res.doc = it.docs(db).map(Documentation::into_owned);
format_to!(res.signature, "type {}", it.name(db).display(db, edition));
}
// These don't have generic args that can be specified
@@ -501,7 +501,7 @@ fn signature_help_for_tuple_struct_pat(
let fields: Vec<_> = if let PathResolution::Def(ModuleDef::Variant(variant)) = path_res {
let en = variant.parent_enum(db);
- res.doc = en.docs(db);
+ res.doc = en.docs(db).map(Documentation::into_owned);
format_to!(
res.signature,
"enum {}::{} (",
@@ -518,7 +518,7 @@ fn signature_help_for_tuple_struct_pat(
match adt {
hir::Adt::Struct(it) => {
- res.doc = it.docs(db);
+ res.doc = it.docs(db).map(Documentation::into_owned);
format_to!(res.signature, "struct {} (", it.name(db).display(db, edition));
it.fields(db)
}
@@ -628,7 +628,7 @@ fn signature_help_for_record_<'db>(
fields = variant.fields(db);
let en = variant.parent_enum(db);
- res.doc = en.docs(db);
+ res.doc = en.docs(db).map(Documentation::into_owned);
format_to!(
res.signature,
"enum {}::{} {{ ",
@@ -645,12 +645,12 @@ fn signature_help_for_record_<'db>(
match adt {
hir::Adt::Struct(it) => {
fields = it.fields(db);
- res.doc = it.docs(db);
+ res.doc = it.docs(db).map(Documentation::into_owned);
format_to!(res.signature, "struct {} {{ ", it.name(db).display(db, edition));
}
hir::Adt::Union(it) => {
fields = it.fields(db);
- res.doc = it.docs(db);
+ res.doc = it.docs(db).map(Documentation::into_owned);
format_to!(res.signature, "union {} {{ ", it.name(db).display(db, edition));
}
_ => return None,
@@ -746,12 +746,12 @@ mod tests {
#[rust_analyzer::rust_fixture] ra_fixture: &str,
) -> (RootDatabase, FilePosition) {
let mut database = RootDatabase::default();
- let change_fixture = ChangeFixture::parse(&database, ra_fixture);
+ let change_fixture = ChangeFixture::parse(ra_fixture);
database.apply_change(change_fixture.change);
let (file_id, range_or_offset) =
change_fixture.file_position.expect("expected a marker ($0)");
let offset = range_or_offset.expect_offset();
- let position = FilePosition { file_id: file_id.file_id(&database), offset };
+ let position = FilePosition { file_id: file_id.file_id(), offset };
(database, position)
}
diff --git a/crates/ide/src/static_index.rs b/crates/ide/src/static_index.rs
index 52b201df96..7dc91fbc9e 100644
--- a/crates/ide/src/static_index.rs
+++ b/crates/ide/src/static_index.rs
@@ -42,7 +42,8 @@ pub struct ReferenceData {
#[derive(Debug)]
pub struct TokenStaticData {
- pub documentation: Option<Documentation>,
+ // FIXME: Make this have the lifetime of the database.
+ pub documentation: Option<Documentation<'static>>,
pub hover: Option<HoverResult>,
/// The position of the token itself.
///
@@ -117,7 +118,7 @@ fn documentation_for_definition(
sema: &Semantics<'_, RootDatabase>,
def: Definition,
scope_node: &SyntaxNode,
-) -> Option<Documentation> {
+) -> Option<Documentation<'static>> {
let famous_defs = match &def {
Definition::BuiltinType(_) => Some(FamousDefs(sema, sema.scope(scope_node)?.krate())),
_ => None,
@@ -132,6 +133,7 @@ fn documentation_for_definition(
})
.to_display_target(sema.db),
)
+ .map(Documentation::into_owned)
}
// FIXME: This is a weird function
diff --git a/crates/ide/src/syntax_highlighting.rs b/crates/ide/src/syntax_highlighting.rs
index 531c7e1f4d..8d0f0c3e68 100644
--- a/crates/ide/src/syntax_highlighting.rs
+++ b/crates/ide/src/syntax_highlighting.rs
@@ -199,7 +199,7 @@ pub(crate) fn highlight(
let sema = Semantics::new(db);
let file_id = sema
.attach_first_edition(file_id)
- .unwrap_or_else(|| EditionedFileId::current_edition(db, file_id));
+ .unwrap_or_else(|| EditionedFileId::current_edition_guess_origin(db, file_id));
// Determine the root based on the given range.
let (root, range_to_highlight) = {
diff --git a/crates/ide/src/syntax_highlighting/highlight.rs b/crates/ide/src/syntax_highlighting/highlight.rs
index 33df4a8a13..8937f8249c 100644
--- a/crates/ide/src/syntax_highlighting/highlight.rs
+++ b/crates/ide/src/syntax_highlighting/highlight.rs
@@ -3,7 +3,7 @@
use std::ops::ControlFlow;
use either::Either;
-use hir::{AsAssocItem, HasAttrs, HasVisibility, Semantics, sym};
+use hir::{AsAssocItem, HasAttrs, HasVisibility, Semantics};
use ide_db::{
FxHashMap, RootDatabase, SymbolKind,
defs::{Definition, IdentClass, NameClass, NameRefClass},
@@ -413,7 +413,7 @@ fn highlight_name_ref(
if is_from_builtin_crate {
h |= HlMod::DefaultLibrary;
}
- let is_deprecated = resolved_krate.attrs(sema.db).by_key(sym::deprecated).exists();
+ let is_deprecated = resolved_krate.attrs(sema.db).is_deprecated();
if is_deprecated {
h |= HlMod::Deprecated;
}
@@ -701,7 +701,7 @@ pub(super) fn highlight_def(
}
if let Some(attrs) = attrs
- && attrs.by_key(sym::deprecated).exists()
+ && attrs.is_deprecated()
{
h |= HlMod::Deprecated;
}
@@ -751,7 +751,7 @@ fn highlight_method_call(
let is_from_other_crate = krate.as_ref().map_or(false, |krate| def_crate != *krate);
let is_from_builtin_crate = def_crate.is_builtin(sema.db);
let is_public = func.visibility(sema.db) == hir::Visibility::Public;
- let is_deprecated = func.attrs(sema.db).by_key(sym::deprecated).exists();
+ let is_deprecated = func.attrs(sema.db).is_deprecated();
if is_from_other_crate {
h |= HlMod::Library;
diff --git a/crates/ide/src/syntax_highlighting/html.rs b/crates/ide/src/syntax_highlighting/html.rs
index ff617b3408..dae3e69fee 100644
--- a/crates/ide/src/syntax_highlighting/html.rs
+++ b/crates/ide/src/syntax_highlighting/html.rs
@@ -20,7 +20,7 @@ pub(crate) fn highlight_as_html_with_config(
let sema = Semantics::new(db);
let file_id = sema
.attach_first_edition(file_id)
- .unwrap_or_else(|| EditionedFileId::current_edition(db, file_id));
+ .unwrap_or_else(|| EditionedFileId::current_edition_guess_origin(db, file_id));
let file = sema.parse(file_id);
let file = file.syntax();
fn rainbowify(seed: u64) -> String {
diff --git a/crates/ide/src/syntax_highlighting/inject.rs b/crates/ide/src/syntax_highlighting/inject.rs
index 7955f5ac0d..26d2bb5e02 100644
--- a/crates/ide/src/syntax_highlighting/inject.rs
+++ b/crates/ide/src/syntax_highlighting/inject.rs
@@ -1,16 +1,13 @@
//! "Recursive" Syntax highlighting for code in doctests and fixtures.
-use std::mem;
-
-use either::Either;
-use hir::{EditionedFileId, HirFileId, InFile, Semantics, sym};
-use ide_db::range_mapper::RangeMapper;
+use hir::{EditionedFileId, HirFileId, InFile, Semantics};
use ide_db::{
- SymbolKind, defs::Definition, documentation::docs_with_rangemap, rust_doc::is_rust_fence,
+ SymbolKind, defs::Definition, documentation::Documentation, range_mapper::RangeMapper,
+ rust_doc::is_rust_fence,
};
use syntax::{
- AstToken, NodeOrToken, SyntaxNode, TextRange, TextSize,
- ast::{self, AstNode, IsString, QuoteOffsets},
+ SyntaxNode, TextRange, TextSize,
+ ast::{self, IsString},
};
use crate::{
@@ -96,118 +93,79 @@ pub(super) fn doc_comment(
None => return,
};
let src_file_id: HirFileId = src_file_id.into();
+ let Some(docs) = attributes.hir_docs(sema.db) else { return };
// Extract intra-doc links and emit highlights for them.
- if let Some((docs, doc_mapping)) = docs_with_rangemap(sema.db, &attributes) {
- extract_definitions_from_docs(&docs)
- .into_iter()
- .filter_map(|(range, link, ns)| {
- doc_mapping
- .map(range)
- .filter(|(mapping, _)| mapping.file_id == src_file_id)
- .and_then(|(InFile { value: mapped_range, .. }, attr_id)| {
- Some(mapped_range).zip(resolve_doc_path_for_def(
- sema.db,
- def,
- &link,
- ns,
- attr_id.is_inner_attr(),
- ))
- })
- })
- .for_each(|(range, def)| {
- hl.add(HlRange {
- range,
- highlight: module_def_to_hl_tag(def)
- | HlMod::Documentation
- | HlMod::Injected
- | HlMod::IntraDocLink,
- binding_hash: None,
+ extract_definitions_from_docs(&Documentation::new_borrowed(docs.docs()))
+ .into_iter()
+ .filter_map(|(range, link, ns)| {
+ docs.find_ast_range(range)
+ .filter(|(mapping, _)| mapping.file_id == src_file_id)
+ .and_then(|(InFile { value: mapped_range, .. }, is_inner)| {
+ Some(mapped_range)
+ .zip(resolve_doc_path_for_def(sema.db, def, &link, ns, is_inner))
})
+ })
+ .for_each(|(range, def)| {
+ hl.add(HlRange {
+ range,
+ highlight: module_def_to_hl_tag(def)
+ | HlMod::Documentation
+ | HlMod::Injected
+ | HlMod::IntraDocLink,
+ binding_hash: None,
})
- }
+ });
// Extract doc-test sources from the docs and calculate highlighting for them.
let mut inj = RangeMapper::default();
inj.add_unmapped("fn doctest() {\n");
- let attrs_source_map = attributes.source_map(sema.db);
-
let mut is_codeblock = false;
let mut is_doctest = false;
- let mut new_comments = Vec::new();
- let mut string;
+ let mut has_doctests = false;
+
+ let mut docs_offset = TextSize::new(0);
+ for mut line in docs.docs().split('\n') {
+ let mut line_docs_offset = docs_offset;
+ docs_offset += TextSize::of(line) + TextSize::of("\n");
+
+ match RUSTDOC_FENCES.into_iter().find_map(|fence| line.find(fence)) {
+ Some(idx) => {
+ is_codeblock = !is_codeblock;
+ // Check whether code is rust by inspecting fence guards
+ let guards = &line[idx + RUSTDOC_FENCE_LENGTH..];
+ let is_rust = is_rust_fence(guards);
+ is_doctest = is_codeblock && is_rust;
+ continue;
+ }
+ None if !is_doctest => continue,
+ None => (),
+ }
+
+ // lines marked with `#` should be ignored in output, we skip the `#` char
+ if line.starts_with('#') {
+ line_docs_offset += TextSize::of("#");
+ line = &line["#".len()..];
+ }
- for attr in attributes.by_key(sym::doc).attrs() {
- let InFile { file_id, value: src } = attrs_source_map.source_of(attr);
+ let Some((InFile { file_id, value: mapped_range }, _)) =
+ docs.find_ast_range(TextRange::at(line_docs_offset, TextSize::of(line)))
+ else {
+ continue;
+ };
if file_id != src_file_id {
continue;
}
- let (line, range) = match &src {
- Either::Left(it) => {
- string = match find_doc_string_in_attr(attr, it) {
- Some(it) => it,
- None => continue,
- };
- let text = string.text();
- let text_range = string.syntax().text_range();
- match string.quote_offsets() {
- Some(QuoteOffsets { contents, .. }) => {
- (&text[contents - text_range.start()], contents)
- }
- None => (text, text_range),
- }
- }
- Either::Right(comment) => {
- let value = comment.prefix().len();
- let range = comment.syntax().text_range();
- (
- &comment.text()[value..],
- TextRange::new(range.start() + TextSize::try_from(value).unwrap(), range.end()),
- )
- }
- };
-
- let mut range_start = range.start();
- for line in line.split('\n') {
- let line_len = TextSize::from(line.len() as u32);
- let prev_range_start = {
- let next_range_start = range_start + line_len + TextSize::from(1);
- mem::replace(&mut range_start, next_range_start)
- };
- let mut pos = TextSize::from(0);
-
- match RUSTDOC_FENCES.into_iter().find_map(|fence| line.find(fence)) {
- Some(idx) => {
- is_codeblock = !is_codeblock;
- // Check whether code is rust by inspecting fence guards
- let guards = &line[idx + RUSTDOC_FENCE_LENGTH..];
- let is_rust = is_rust_fence(guards);
- is_doctest = is_codeblock && is_rust;
- continue;
- }
- None if !is_doctest => continue,
- None => (),
- }
- // whitespace after comment is ignored
- if let Some(ws) = line[pos.into()..].chars().next().filter(|c| c.is_whitespace()) {
- pos += TextSize::of(ws);
- }
- // lines marked with `#` should be ignored in output, we skip the `#` char
- if line[pos.into()..].starts_with('#') {
- pos += TextSize::of('#');
- }
-
- new_comments.push(TextRange::at(prev_range_start, pos));
- inj.add(&line[pos.into()..], TextRange::new(pos, line_len) + prev_range_start);
- inj.add_unmapped("\n");
- }
+ has_doctests = true;
+ inj.add(line, mapped_range);
+ inj.add_unmapped("\n");
}
- if new_comments.is_empty() {
+ if !has_doctests {
return; // no need to run an analysis on an empty file
}
@@ -240,37 +198,6 @@ pub(super) fn doc_comment(
}
}
}
-
- for range in new_comments {
- hl.add(HlRange {
- range,
- highlight: HlTag::Comment | HlMod::Documentation,
- binding_hash: None,
- });
- }
-}
-
-fn find_doc_string_in_attr(attr: &hir::Attr, it: &ast::Attr) -> Option<ast::String> {
- match it.expr() {
- // #[doc = lit]
- Some(ast::Expr::Literal(lit)) => match lit.kind() {
- ast::LiteralKind::String(it) => Some(it),
- _ => None,
- },
- // #[cfg_attr(..., doc = "", ...)]
- None => {
- // We gotta hunt the string token manually here
- let text = attr.string_value()?.as_str();
- // FIXME: We just pick the first string literal that has the same text as the doc attribute
- // This means technically we might highlight the wrong one
- it.syntax()
- .descendants_with_tokens()
- .filter_map(NodeOrToken::into_token)
- .filter_map(ast::String::cast)
- .find(|string| string.text().get(1..string.text().len() - 1) == Some(text))
- }
- _ => None,
- }
}
fn module_def_to_hl_tag(def: Definition) -> HlTag {
diff --git a/crates/ide/src/syntax_highlighting/test_data/highlight_doctest.html b/crates/ide/src/syntax_highlighting/test_data/highlight_doctest.html
index e1c45e96b1..b5c3df6ee4 100644
--- a/crates/ide/src/syntax_highlighting/test_data/highlight_doctest.html
+++ b/crates/ide/src/syntax_highlighting/test_data/highlight_doctest.html
@@ -43,21 +43,21 @@ pre { color: #DCDCCC; background: #3F3F3F; font-size: 22px; padd
</style>
<pre><code><span class="comment documentation">//! This is a module to test doc injection.</span>
<span class="comment documentation">//! ```</span>
-<span class="comment documentation">//!</span><span class="comment documentation"> </span><span class="keyword injected">fn</span><span class="none injected"> </span><span class="function declaration injected">test</span><span class="parenthesis injected">(</span><span class="parenthesis injected">)</span><span class="none injected"> </span><span class="brace injected">{</span><span class="brace injected">}</span>
+<span class="comment documentation">//! </span><span class="keyword injected">fn</span><span class="none injected"> </span><span class="function declaration injected">test</span><span class="parenthesis injected">(</span><span class="parenthesis injected">)</span><span class="none injected"> </span><span class="brace injected">{</span><span class="brace injected">}</span>
<span class="comment documentation">//! ```</span>
<span class="comment documentation">//! Syntactic name ref highlighting testing</span>
<span class="comment documentation">//! ```rust</span>
-<span class="comment documentation">//!</span><span class="comment documentation"> </span><span class="keyword injected">extern</span><span class="none injected"> </span><span class="keyword injected">crate</span><span class="none injected"> </span><span class="self_keyword crate_root injected">self</span><span class="semicolon injected">;</span>
-<span class="comment documentation">//!</span><span class="comment documentation"> </span><span class="keyword injected">extern</span><span class="none injected"> </span><span class="keyword injected">crate</span><span class="none injected"> </span><span class="module crate_root injected">other</span><span class="none injected"> </span><span class="keyword injected">as</span><span class="none injected"> </span><span class="module crate_root declaration injected">otter</span><span class="semicolon injected">;</span>
-<span class="comment documentation">//!</span><span class="comment documentation"> </span><span class="keyword injected">extern</span><span class="none injected"> </span><span class="keyword injected">crate</span><span class="none injected"> </span><span class="module crate_root injected">core</span><span class="semicolon injected">;</span>
-<span class="comment documentation">//!</span><span class="comment documentation"> </span><span class="keyword injected">trait</span><span class="none injected"> </span><span class="trait declaration injected">T</span><span class="none injected"> </span><span class="brace injected">{</span><span class="none injected"> </span><span class="keyword injected">type</span><span class="none injected"> </span><span class="type_alias associated declaration injected static trait">Assoc</span><span class="semicolon injected">;</span><span class="none injected"> </span><span class="brace injected">}</span>
-<span class="comment documentation">//!</span><span class="comment documentation"> </span><span class="keyword injected">fn</span><span class="none injected"> </span><span class="function declaration injected">f</span><span class="angle injected">&lt;</span><span class="type_param declaration injected">Arg</span><span class="angle injected">&gt;</span><span class="parenthesis injected">(</span><span class="parenthesis injected">)</span><span class="none injected"> </span><span class="operator injected">-&gt;</span><span class="none injected"> </span><span class="keyword injected">use</span><span class="angle injected">&lt;</span><span class="struct injected">Arg</span><span class="angle injected">&gt;</span><span class="none injected"> </span><span class="keyword injected">where</span><span class="none injected"> </span><span class="parenthesis injected">(</span><span class="parenthesis injected">)</span><span class="colon injected">:</span><span class="none injected"> </span><span class="trait injected">T</span><span class="comparison injected">&lt;</span><span class="struct injected">Assoc</span><span class="none injected"> </span><span class="operator injected">=</span><span class="none injected"> </span><span class="parenthesis injected">(</span><span class="parenthesis injected">)</span><span class="comparison injected">&gt;</span><span class="none injected"> </span><span class="brace injected">{</span><span class="brace injected">}</span>
+<span class="comment documentation">//! </span><span class="keyword injected">extern</span><span class="none injected"> </span><span class="keyword injected">crate</span><span class="none injected"> </span><span class="self_keyword crate_root injected">self</span><span class="semicolon injected">;</span>
+<span class="comment documentation">//! </span><span class="keyword injected">extern</span><span class="none injected"> </span><span class="keyword injected">crate</span><span class="none injected"> </span><span class="module crate_root injected">other</span><span class="none injected"> </span><span class="keyword injected">as</span><span class="none injected"> </span><span class="module crate_root declaration injected">otter</span><span class="semicolon injected">;</span>
+<span class="comment documentation">//! </span><span class="keyword injected">extern</span><span class="none injected"> </span><span class="keyword injected">crate</span><span class="none injected"> </span><span class="module crate_root injected">core</span><span class="semicolon injected">;</span>
+<span class="comment documentation">//! </span><span class="keyword injected">trait</span><span class="none injected"> </span><span class="trait declaration injected">T</span><span class="none injected"> </span><span class="brace injected">{</span><span class="none injected"> </span><span class="keyword injected">type</span><span class="none injected"> </span><span class="type_alias associated declaration injected static trait">Assoc</span><span class="semicolon injected">;</span><span class="none injected"> </span><span class="brace injected">}</span>
+<span class="comment documentation">//! </span><span class="keyword injected">fn</span><span class="none injected"> </span><span class="function declaration injected">f</span><span class="angle injected">&lt;</span><span class="type_param declaration injected">Arg</span><span class="angle injected">&gt;</span><span class="parenthesis injected">(</span><span class="parenthesis injected">)</span><span class="none injected"> </span><span class="operator injected">-&gt;</span><span class="none injected"> </span><span class="keyword injected">use</span><span class="angle injected">&lt;</span><span class="struct injected">Arg</span><span class="angle injected">&gt;</span><span class="none injected"> </span><span class="keyword injected">where</span><span class="none injected"> </span><span class="parenthesis injected">(</span><span class="parenthesis injected">)</span><span class="colon injected">:</span><span class="none injected"> </span><span class="trait injected">T</span><span class="comparison injected">&lt;</span><span class="struct injected">Assoc</span><span class="none injected"> </span><span class="operator injected">=</span><span class="none injected"> </span><span class="parenthesis injected">(</span><span class="parenthesis injected">)</span><span class="comparison injected">&gt;</span><span class="none injected"> </span><span class="brace injected">{</span><span class="brace injected">}</span>
<span class="comment documentation">//! ```</span>
<span class="keyword">mod</span> <span class="module declaration">outline_module</span><span class="semicolon">;</span>
<span class="comment documentation">/// ```</span>
-<span class="comment documentation">///</span><span class="comment documentation"> </span><span class="keyword injected">let</span><span class="none injected"> </span><span class="punctuation injected">_</span><span class="none injected"> </span><span class="operator injected">=</span><span class="none injected"> </span><span class="string_literal injected">"early doctests should not go boom"</span><span class="semicolon injected">;</span>
+<span class="comment documentation">/// </span><span class="keyword injected">let</span><span class="none injected"> </span><span class="punctuation injected">_</span><span class="none injected"> </span><span class="operator injected">=</span><span class="none injected"> </span><span class="string_literal injected">"early doctests should not go boom"</span><span class="semicolon injected">;</span>
<span class="comment documentation">/// ```</span>
<span class="keyword">struct</span> <span class="struct declaration">Foo</span> <span class="brace">{</span>
<span class="field declaration">bar</span><span class="colon">:</span> <span class="builtin_type">bool</span><span class="comma">,</span>
@@ -66,15 +66,15 @@ pre { color: #DCDCCC; background: #3F3F3F; font-size: 22px; padd
<span class="comment documentation">/// This is an impl of </span><span class="struct documentation injected intra_doc_link">[`Foo`]</span><span class="comment documentation"> with a code block.</span>
<span class="comment documentation">///</span>
<span class="comment documentation">/// ```</span>
-<span class="comment documentation">///</span><span class="comment documentation"> </span><span class="keyword injected">fn</span><span class="none injected"> </span><span class="function declaration injected">foo</span><span class="parenthesis injected">(</span><span class="parenthesis injected">)</span><span class="none injected"> </span><span class="brace injected">{</span>
+<span class="comment documentation">/// </span><span class="keyword injected">fn</span><span class="none injected"> </span><span class="function declaration injected">foo</span><span class="parenthesis injected">(</span><span class="parenthesis injected">)</span><span class="none injected"> </span><span class="brace injected">{</span>
<span class="comment documentation">///</span>
-<span class="comment documentation">///</span><span class="comment documentation"> </span><span class="brace injected">}</span>
+<span class="comment documentation">/// </span><span class="brace injected">}</span>
<span class="comment documentation">/// ```</span>
<span class="keyword">impl</span> <span class="struct">Foo</span> <span class="brace">{</span>
<span class="comment documentation">/// ```</span>
- <span class="comment documentation">///</span><span class="comment documentation"> </span><span class="keyword injected">let</span><span class="none injected"> </span><span class="punctuation injected">_</span><span class="none injected"> </span><span class="operator injected">=</span><span class="none injected"> </span><span class="string_literal injected">"Call me</span>
+ <span class="comment documentation">/// </span><span class="keyword injected">let</span><span class="none injected"> </span><span class="punctuation injected">_</span><span class="none injected"> </span><span class="operator injected">=</span><span class="none injected"> </span><span class="string_literal injected">"Call me</span>
<span class="comment">// KILLER WHALE</span>
- <span class="comment documentation">///</span><span class="comment documentation"> </span><span class="string_literal injected"> Ishmael."</span><span class="semicolon injected">;</span>
+ <span class="comment documentation">/// </span><span class="string_literal injected"> Ishmael."</span><span class="semicolon injected">;</span>
<span class="comment documentation">/// ```</span>
<span class="keyword">pub</span> <span class="keyword const">const</span> <span class="constant associated const declaration public static">bar</span><span class="colon">:</span> <span class="builtin_type">bool</span> <span class="operator">=</span> <span class="bool_literal">true</span><span class="semicolon">;</span>
@@ -83,8 +83,8 @@ pre { color: #DCDCCC; background: #3F3F3F; font-size: 22px; padd
<span class="comment documentation">/// # Examples</span>
<span class="comment documentation">///</span>
<span class="comment documentation">/// ```</span>
- <span class="comment documentation">///</span><span class="comment documentation"> #</span><span class="none injected"> </span><span class="attribute_bracket attribute injected">#</span><span class="attribute_bracket attribute injected">!</span><span class="attribute_bracket attribute injected">[</span><span class="builtin_attr attribute injected">allow</span><span class="parenthesis attribute injected">(</span><span class="none attribute injected">unused_mut</span><span class="parenthesis attribute injected">)</span><span class="attribute_bracket attribute injected">]</span>
- <span class="comment documentation">///</span><span class="comment documentation"> </span><span class="keyword injected">let</span><span class="none injected"> </span><span class="keyword injected">mut</span><span class="none injected"> </span><span class="variable declaration injected mutable">foo</span><span class="colon injected">:</span><span class="none injected"> </span><span class="struct injected">Foo</span><span class="none injected"> </span><span class="operator injected">=</span><span class="none injected"> </span><span class="struct injected">Foo</span><span class="operator injected">::</span><span class="function injected">new</span><span class="parenthesis injected">(</span><span class="parenthesis injected">)</span><span class="semicolon injected">;</span>
+ <span class="comment documentation">/// #</span><span class="none injected"> </span><span class="attribute_bracket attribute injected">#</span><span class="attribute_bracket attribute injected">!</span><span class="attribute_bracket attribute injected">[</span><span class="builtin_attr attribute injected">allow</span><span class="parenthesis attribute injected">(</span><span class="none attribute injected">unused_mut</span><span class="parenthesis attribute injected">)</span><span class="attribute_bracket attribute injected">]</span>
+ <span class="comment documentation">/// </span><span class="keyword injected">let</span><span class="none injected"> </span><span class="keyword injected">mut</span><span class="none injected"> </span><span class="variable declaration injected mutable">foo</span><span class="colon injected">:</span><span class="none injected"> </span><span class="struct injected">Foo</span><span class="none injected"> </span><span class="operator injected">=</span><span class="none injected"> </span><span class="struct injected">Foo</span><span class="operator injected">::</span><span class="function injected">new</span><span class="parenthesis injected">(</span><span class="parenthesis injected">)</span><span class="semicolon injected">;</span>
<span class="comment documentation">/// ```</span>
<span class="keyword">pub</span> <span class="keyword const">const</span> <span class="keyword">fn</span> <span class="function associated const declaration public static">new</span><span class="parenthesis">(</span><span class="parenthesis">)</span> <span class="operator">-&gt;</span> <span class="struct">Foo</span> <span class="brace">{</span>
<span class="struct">Foo</span> <span class="brace">{</span> <span class="field">bar</span><span class="colon">:</span> <span class="bool_literal">true</span> <span class="brace">}</span>
@@ -95,38 +95,38 @@ pre { color: #DCDCCC; background: #3F3F3F; font-size: 22px; padd
<span class="comment documentation">/// # Examples</span>
<span class="comment documentation">///</span>
<span class="comment documentation">/// ```</span>
- <span class="comment documentation">///</span><span class="comment documentation"> </span><span class="keyword injected">use</span><span class="none injected"> </span><span class="module injected">x</span><span class="operator injected">::</span><span class="module injected">y</span><span class="semicolon injected">;</span>
+ <span class="comment documentation">/// </span><span class="keyword injected">use</span><span class="none injected"> </span><span class="module injected">x</span><span class="operator injected">::</span><span class="module injected">y</span><span class="semicolon injected">;</span>
<span class="comment documentation">///</span>
- <span class="comment documentation">///</span><span class="comment documentation"> </span><span class="keyword injected">let</span><span class="none injected"> </span><span class="variable declaration injected">foo</span><span class="none injected"> </span><span class="operator injected">=</span><span class="none injected"> </span><span class="struct injected">Foo</span><span class="operator injected">::</span><span class="function injected">new</span><span class="parenthesis injected">(</span><span class="parenthesis injected">)</span><span class="semicolon injected">;</span>
+ <span class="comment documentation">/// </span><span class="keyword injected">let</span><span class="none injected"> </span><span class="variable declaration injected">foo</span><span class="none injected"> </span><span class="operator injected">=</span><span class="none injected"> </span><span class="struct injected">Foo</span><span class="operator injected">::</span><span class="function injected">new</span><span class="parenthesis injected">(</span><span class="parenthesis injected">)</span><span class="semicolon injected">;</span>
<span class="comment documentation">///</span>
- <span class="comment documentation">///</span><span class="comment documentation"> </span><span class="comment injected">// calls bar on foo</span>
- <span class="comment documentation">///</span><span class="comment documentation"> </span><span class="macro injected">assert</span><span class="macro_bang injected">!</span><span class="parenthesis injected">(</span><span class="none injected">foo</span><span class="operator injected">.</span><span class="none injected">bar</span><span class="parenthesis injected">(</span><span class="parenthesis injected">)</span><span class="parenthesis injected">)</span><span class="semicolon injected">;</span>
+ <span class="comment documentation">/// </span><span class="comment injected">// calls bar on foo</span>
+ <span class="comment documentation">/// </span><span class="macro injected">assert</span><span class="macro_bang injected">!</span><span class="parenthesis injected">(</span><span class="none injected">foo</span><span class="operator injected">.</span><span class="none injected">bar</span><span class="parenthesis injected">(</span><span class="parenthesis injected">)</span><span class="parenthesis injected">)</span><span class="semicolon injected">;</span>
<span class="comment documentation">///</span>
- <span class="comment documentation">///</span><span class="comment documentation"> </span><span class="keyword injected">let</span><span class="none injected"> </span><span class="variable declaration injected">bar</span><span class="none injected"> </span><span class="operator injected">=</span><span class="none injected"> </span><span class="variable injected">foo</span><span class="operator injected">.</span><span class="field injected">bar</span><span class="none injected"> </span><span class="logical injected">||</span><span class="none injected"> </span><span class="struct injected">Foo</span><span class="operator injected">::</span><span class="constant injected">bar</span><span class="semicolon injected">;</span>
+ <span class="comment documentation">/// </span><span class="keyword injected">let</span><span class="none injected"> </span><span class="variable declaration injected">bar</span><span class="none injected"> </span><span class="operator injected">=</span><span class="none injected"> </span><span class="variable injected">foo</span><span class="operator injected">.</span><span class="field injected">bar</span><span class="none injected"> </span><span class="logical injected">||</span><span class="none injected"> </span><span class="struct injected">Foo</span><span class="operator injected">::</span><span class="constant injected">bar</span><span class="semicolon injected">;</span>
<span class="comment documentation">///</span>
- <span class="comment documentation">///</span><span class="comment documentation"> </span><span class="comment injected">/* multi-line</span>
- <span class="comment documentation">///</span><span class="comment documentation"> </span><span class="comment injected"> comment */</span>
+ <span class="comment documentation">/// </span><span class="comment injected">/* multi-line</span>
+ <span class="comment documentation">/// </span><span class="comment injected"> comment */</span>
<span class="comment documentation">///</span>
- <span class="comment documentation">///</span><span class="comment documentation"> </span><span class="keyword injected">let</span><span class="none injected"> </span><span class="variable declaration injected reference">multi_line_string</span><span class="none injected"> </span><span class="operator injected">=</span><span class="none injected"> </span><span class="string_literal injected">"Foo</span>
- <span class="comment documentation">///</span><span class="comment documentation"> </span><span class="string_literal injected"> bar</span><span class="escape_sequence injected">\n</span>
- <span class="comment documentation">///</span><span class="comment documentation"> </span><span class="string_literal injected"> "</span><span class="semicolon injected">;</span>
+ <span class="comment documentation">/// </span><span class="keyword injected">let</span><span class="none injected"> </span><span class="variable declaration injected reference">multi_line_string</span><span class="none injected"> </span><span class="operator injected">=</span><span class="none injected"> </span><span class="string_literal injected">"Foo</span>
+ <span class="comment documentation">/// </span><span class="string_literal injected"> bar</span><span class="escape_sequence injected">\n</span>
+ <span class="comment documentation">/// </span><span class="string_literal injected"> "</span><span class="semicolon injected">;</span>
<span class="comment documentation">///</span>
<span class="comment documentation">/// ```</span>
<span class="comment documentation">///</span>
<span class="comment documentation">/// ```rust,no_run</span>
- <span class="comment documentation">///</span><span class="comment documentation"> </span><span class="keyword injected">let</span><span class="none injected"> </span><span class="variable declaration injected">foobar</span><span class="none injected"> </span><span class="operator injected">=</span><span class="none injected"> </span><span class="struct injected">Foo</span><span class="operator injected">::</span><span class="function injected">new</span><span class="parenthesis injected">(</span><span class="parenthesis injected">)</span><span class="operator injected">.</span><span class="method injected">bar</span><span class="parenthesis injected">(</span><span class="parenthesis injected">)</span><span class="semicolon injected">;</span>
+ <span class="comment documentation">/// </span><span class="keyword injected">let</span><span class="none injected"> </span><span class="variable declaration injected">foobar</span><span class="none injected"> </span><span class="operator injected">=</span><span class="none injected"> </span><span class="struct injected">Foo</span><span class="operator injected">::</span><span class="function injected">new</span><span class="parenthesis injected">(</span><span class="parenthesis injected">)</span><span class="operator injected">.</span><span class="method injected">bar</span><span class="parenthesis injected">(</span><span class="parenthesis injected">)</span><span class="semicolon injected">;</span>
<span class="comment documentation">/// ```</span>
<span class="comment documentation">///</span>
<span class="comment documentation">/// ~~~rust,no_run</span>
- <span class="comment documentation">///</span><span class="comment documentation"> </span><span class="comment injected">// code block with tilde.</span>
- <span class="comment documentation">///</span><span class="comment documentation"> </span><span class="keyword injected">let</span><span class="none injected"> </span><span class="variable declaration injected">foobar</span><span class="none injected"> </span><span class="operator injected">=</span><span class="none injected"> </span><span class="struct injected">Foo</span><span class="operator injected">::</span><span class="function injected">new</span><span class="parenthesis injected">(</span><span class="parenthesis injected">)</span><span class="operator injected">.</span><span class="method injected">bar</span><span class="parenthesis injected">(</span><span class="parenthesis injected">)</span><span class="semicolon injected">;</span>
+ <span class="comment documentation">/// </span><span class="comment injected">// code block with tilde.</span>
+ <span class="comment documentation">/// </span><span class="keyword injected">let</span><span class="none injected"> </span><span class="variable declaration injected">foobar</span><span class="none injected"> </span><span class="operator injected">=</span><span class="none injected"> </span><span class="struct injected">Foo</span><span class="operator injected">::</span><span class="function injected">new</span><span class="parenthesis injected">(</span><span class="parenthesis injected">)</span><span class="operator injected">.</span><span class="method injected">bar</span><span class="parenthesis injected">(</span><span class="parenthesis injected">)</span><span class="semicolon injected">;</span>
<span class="comment documentation">/// ~~~</span>
<span class="comment documentation">///</span>
<span class="comment documentation">/// ```</span>
- <span class="comment documentation">///</span><span class="comment documentation"> </span><span class="comment injected">// functions</span>
- <span class="comment documentation">///</span><span class="comment documentation"> </span><span class="keyword injected">fn</span><span class="none injected"> </span><span class="function declaration injected">foo</span><span class="angle injected">&lt;</span><span class="type_param declaration injected">T</span><span class="comma injected">,</span><span class="none injected"> </span><span class="keyword const injected">const</span><span class="none injected"> </span><span class="const_param const declaration injected">X</span><span class="colon injected">:</span><span class="none injected"> </span><span class="builtin_type injected">usize</span><span class="angle injected">&gt;</span><span class="parenthesis injected">(</span><span class="value_param declaration injected">arg</span><span class="colon injected">:</span><span class="none injected"> </span><span class="builtin_type injected">i32</span><span class="parenthesis injected">)</span><span class="none injected"> </span><span class="brace injected">{</span>
- <span class="comment documentation">///</span><span class="comment documentation"> </span><span class="none injected"> </span><span class="keyword injected">let</span><span class="none injected"> </span><span class="variable declaration injected">x</span><span class="colon injected">:</span><span class="none injected"> </span><span class="type_param injected">T</span><span class="none injected"> </span><span class="operator injected">=</span><span class="none injected"> </span><span class="const_param const injected">X</span><span class="semicolon injected">;</span>
- <span class="comment documentation">///</span><span class="comment documentation"> </span><span class="brace injected">}</span>
+ <span class="comment documentation">/// </span><span class="comment injected">// functions</span>
+ <span class="comment documentation">/// </span><span class="keyword injected">fn</span><span class="none injected"> </span><span class="function declaration injected">foo</span><span class="angle injected">&lt;</span><span class="type_param declaration injected">T</span><span class="comma injected">,</span><span class="none injected"> </span><span class="keyword const injected">const</span><span class="none injected"> </span><span class="const_param const declaration injected">X</span><span class="colon injected">:</span><span class="none injected"> </span><span class="builtin_type injected">usize</span><span class="angle injected">&gt;</span><span class="parenthesis injected">(</span><span class="value_param declaration injected">arg</span><span class="colon injected">:</span><span class="none injected"> </span><span class="builtin_type injected">i32</span><span class="parenthesis injected">)</span><span class="none injected"> </span><span class="brace injected">{</span>
+ <span class="comment documentation">/// </span><span class="none injected"> </span><span class="keyword injected">let</span><span class="none injected"> </span><span class="variable declaration injected">x</span><span class="colon injected">:</span><span class="none injected"> </span><span class="type_param injected">T</span><span class="none injected"> </span><span class="operator injected">=</span><span class="none injected"> </span><span class="const_param const injected">X</span><span class="semicolon injected">;</span>
+ <span class="comment documentation">/// </span><span class="brace injected">}</span>
<span class="comment documentation">/// ```</span>
<span class="comment documentation">///</span>
<span class="comment documentation">/// ```sh</span>
@@ -151,8 +151,8 @@ pre { color: #DCDCCC; background: #3F3F3F; font-size: 22px; padd
<span class="brace">}</span>
<span class="comment documentation">/// ```</span>
-<span class="comment documentation">///</span><span class="comment documentation"> </span><span class="keyword injected">macro_rules</span><span class="macro_bang injected">!</span><span class="none injected"> </span><span class="macro declaration injected public">noop</span><span class="none injected"> </span><span class="brace injected">{</span><span class="none injected"> </span><span class="parenthesis injected">(</span><span class="punctuation injected">$</span><span class="none injected">expr</span><span class="colon injected">:</span><span class="none injected">expr</span><span class="parenthesis injected">)</span><span class="none injected"> </span><span class="operator injected">=</span><span class="operator injected">&gt;</span><span class="none injected"> </span><span class="brace injected">{</span><span class="none injected"> </span><span class="punctuation injected">$</span><span class="none injected">expr</span><span class="none injected"> </span><span class="brace injected">}</span><span class="brace injected">}</span>
-<span class="comment documentation">///</span><span class="comment documentation"> </span><span class="macro injected public">noop</span><span class="macro_bang injected">!</span><span class="parenthesis injected">(</span><span class="numeric_literal injected macro">1</span><span class="parenthesis injected">)</span><span class="semicolon injected">;</span>
+<span class="comment documentation">/// </span><span class="keyword injected">macro_rules</span><span class="macro_bang injected">!</span><span class="none injected"> </span><span class="macro declaration injected public">noop</span><span class="none injected"> </span><span class="brace injected">{</span><span class="none injected"> </span><span class="parenthesis injected">(</span><span class="punctuation injected">$</span><span class="none injected">expr</span><span class="colon injected">:</span><span class="none injected">expr</span><span class="parenthesis injected">)</span><span class="none injected"> </span><span class="operator injected">=</span><span class="operator injected">&gt;</span><span class="none injected"> </span><span class="brace injected">{</span><span class="none injected"> </span><span class="punctuation injected">$</span><span class="none injected">expr</span><span class="none injected"> </span><span class="brace injected">}</span><span class="brace injected">}</span>
+<span class="comment documentation">/// </span><span class="macro injected public">noop</span><span class="macro_bang injected">!</span><span class="parenthesis injected">(</span><span class="numeric_literal injected macro">1</span><span class="parenthesis injected">)</span><span class="semicolon injected">;</span>
<span class="comment documentation">/// ```</span>
<span class="keyword">macro_rules</span><span class="macro_bang">!</span> <span class="macro declaration public">noop</span> <span class="brace">{</span>
<span class="parenthesis">(</span><span class="punctuation">$</span>expr<span class="colon">:</span>expr<span class="parenthesis">)</span> <span class="operator">=</span><span class="operator">&gt;</span> <span class="brace">{</span>
@@ -161,18 +161,18 @@ pre { color: #DCDCCC; background: #3F3F3F; font-size: 22px; padd
<span class="brace">}</span>
<span class="comment documentation">/// ```rust</span>
-<span class="comment documentation">///</span><span class="comment documentation"> </span><span class="keyword injected">let</span><span class="none injected"> </span><span class="punctuation injected">_</span><span class="none injected"> </span><span class="operator injected">=</span><span class="none injected"> </span><span class="function injected">example</span><span class="parenthesis injected">(</span><span class="operator injected">&</span><span class="bracket injected">[</span><span class="numeric_literal injected">1</span><span class="comma injected">,</span><span class="none injected"> </span><span class="numeric_literal injected">2</span><span class="comma injected">,</span><span class="none injected"> </span><span class="numeric_literal injected">3</span><span class="bracket injected">]</span><span class="parenthesis injected">)</span><span class="semicolon injected">;</span>
+<span class="comment documentation">///</span><span class="none injected"> </span><span class="keyword injected">let</span><span class="none injected"> </span><span class="punctuation injected">_</span><span class="none injected"> </span><span class="operator injected">=</span><span class="none injected"> </span><span class="function injected">example</span><span class="parenthesis injected">(</span><span class="operator injected">&</span><span class="bracket injected">[</span><span class="numeric_literal injected">1</span><span class="comma injected">,</span><span class="none injected"> </span><span class="numeric_literal injected">2</span><span class="comma injected">,</span><span class="none injected"> </span><span class="numeric_literal injected">3</span><span class="bracket injected">]</span><span class="parenthesis injected">)</span><span class="semicolon injected">;</span>
<span class="comment documentation">/// ```</span>
<span class="comment documentation">///</span>
<span class="comment documentation">/// ```</span>
-<span class="comment documentation">///</span><span class="comment documentation"> </span><span class="keyword control injected">loop</span><span class="none injected"> </span><span class="brace injected">{</span><span class="brace injected">}</span>
+<span class="comment documentation">///</span><span class="none injected"> </span><span class="keyword control injected">loop</span><span class="none injected"> </span><span class="brace injected">{</span><span class="brace injected">}</span>
<span class="attribute_bracket attribute">#</span><span class="attribute_bracket attribute">[</span><span class="builtin_attr attribute">cfg_attr</span><span class="parenthesis attribute">(</span><span class="none attribute">not</span><span class="parenthesis attribute">(</span><span class="none attribute">feature</span> <span class="operator attribute">=</span> <span class="string_literal attribute">"false"</span><span class="parenthesis attribute">)</span><span class="comma attribute">,</span> <span class="none attribute">doc</span> <span class="operator attribute">=</span> <span class="string_literal attribute">"</span><span class="keyword control injected">loop</span><span class="none injected"> </span><span class="brace injected">{</span><span class="brace injected">}</span><span class="string_literal attribute">"</span><span class="parenthesis attribute">)</span><span class="attribute_bracket attribute">]</span>
<span class="attribute_bracket attribute">#</span><span class="attribute_bracket attribute">[</span><span class="builtin_attr attribute">doc</span> <span class="operator attribute">=</span> <span class="string_literal attribute">"</span><span class="keyword control injected">loop</span><span class="none injected"> </span><span class="brace injected">{</span><span class="brace injected">}</span><span class="string_literal attribute">"</span><span class="attribute_bracket attribute">]</span>
<span class="comment documentation">/// ```</span>
<span class="comment documentation">///</span>
<span class="attribute_bracket attribute">#</span><span class="attribute_bracket attribute">[</span><span class="builtin_attr attribute">cfg_attr</span><span class="parenthesis attribute">(</span><span class="none attribute">feature</span> <span class="operator attribute">=</span> <span class="string_literal attribute">"alloc"</span><span class="comma attribute">,</span> <span class="none attribute">doc</span> <span class="operator attribute">=</span> <span class="string_literal attribute">"```rust"</span><span class="parenthesis attribute">)</span><span class="attribute_bracket attribute">]</span>
<span class="attribute_bracket attribute">#</span><span class="attribute_bracket attribute">[</span><span class="builtin_attr attribute">cfg_attr</span><span class="parenthesis attribute">(</span><span class="none attribute">not</span><span class="parenthesis attribute">(</span><span class="none attribute">feature</span> <span class="operator attribute">=</span> <span class="string_literal attribute">"alloc"</span><span class="parenthesis attribute">)</span><span class="comma attribute">,</span> <span class="none attribute">doc</span> <span class="operator attribute">=</span> <span class="string_literal attribute">"```ignore"</span><span class="parenthesis attribute">)</span><span class="attribute_bracket attribute">]</span>
-<span class="comment documentation">///</span><span class="comment documentation"> </span><span class="keyword injected">let</span><span class="none injected"> </span><span class="punctuation injected">_</span><span class="none injected"> </span><span class="operator injected">=</span><span class="none injected"> </span><span class="function injected">example</span><span class="parenthesis injected">(</span><span class="operator injected">&</span><span class="module injected">alloc</span><span class="operator injected">::</span><span class="macro injected">vec</span><span class="macro_bang injected">!</span><span class="bracket injected">[</span><span class="numeric_literal injected">1</span><span class="comma injected">,</span><span class="none injected"> </span><span class="numeric_literal injected">2</span><span class="comma injected">,</span><span class="none injected"> </span><span class="numeric_literal injected">3</span><span class="bracket injected">]</span><span class="parenthesis injected">)</span><span class="semicolon injected">;</span>
+<span class="comment documentation">///</span><span class="none injected"> </span><span class="keyword injected">let</span><span class="none injected"> </span><span class="punctuation injected">_</span><span class="none injected"> </span><span class="operator injected">=</span><span class="none injected"> </span><span class="function injected">example</span><span class="parenthesis injected">(</span><span class="operator injected">&</span><span class="module injected">alloc</span><span class="operator injected">::</span><span class="macro injected">vec</span><span class="macro_bang injected">!</span><span class="bracket injected">[</span><span class="numeric_literal injected">1</span><span class="comma injected">,</span><span class="none injected"> </span><span class="numeric_literal injected">2</span><span class="comma injected">,</span><span class="none injected"> </span><span class="numeric_literal injected">3</span><span class="bracket injected">]</span><span class="parenthesis injected">)</span><span class="semicolon injected">;</span>
<span class="comment documentation">/// ```</span>
<span class="keyword">pub</span> <span class="keyword">fn</span> <span class="function declaration public">mix_and_match</span><span class="parenthesis">(</span><span class="parenthesis">)</span> <span class="brace">{</span><span class="brace">}</span>
@@ -188,7 +188,7 @@ It is beyond me why you'd use these when you got ///
<span class="comment documentation">/**
Really, I don't get it
```rust
-</span><span class="comment documentation"> </span><span class="none injected"> </span><span class="keyword injected">let</span><span class="none injected"> </span><span class="punctuation injected">_</span><span class="none injected"> </span><span class="operator injected">=</span><span class="none injected"> </span><span class="function injected">example</span><span class="parenthesis injected">(</span><span class="operator injected">&</span><span class="bracket injected">[</span><span class="numeric_literal injected">1</span><span class="comma injected">,</span><span class="none injected"> </span><span class="numeric_literal injected">2</span><span class="comma injected">,</span><span class="none injected"> </span><span class="numeric_literal injected">3</span><span class="bracket injected">]</span><span class="parenthesis injected">)</span><span class="semicolon injected">;</span><span class="comment documentation">
+ </span><span class="keyword injected">let</span><span class="none injected"> </span><span class="punctuation injected">_</span><span class="none injected"> </span><span class="operator injected">=</span><span class="none injected"> </span><span class="function injected">example</span><span class="parenthesis injected">(</span><span class="operator injected">&</span><span class="bracket injected">[</span><span class="numeric_literal injected">1</span><span class="comma injected">,</span><span class="none injected"> </span><span class="numeric_literal injected">2</span><span class="comma injected">,</span><span class="none injected"> </span><span class="numeric_literal injected">3</span><span class="bracket injected">]</span><span class="parenthesis injected">)</span><span class="semicolon injected">;</span><span class="comment documentation">
```
</span><span class="function documentation injected intra_doc_link">[`block_comments`]</span><span class="comment documentation"> tests these without indentation
*/</span>
diff --git a/crates/ide/src/typing.rs b/crates/ide/src/typing.rs
index ed55ac5bf0..0381865fed 100644
--- a/crates/ide/src/typing.rs
+++ b/crates/ide/src/typing.rs
@@ -75,7 +75,10 @@ pub(crate) fn on_char_typed(
// FIXME: We are hitting the database here, if we are unlucky this call might block momentarily
// causing the editor to feel sluggish!
let edition = Edition::CURRENT_FIXME;
- let editioned_file_id_wrapper = EditionedFileId::new(db, position.file_id, edition);
+ let editioned_file_id_wrapper = EditionedFileId::from_span_guess_origin(
+ db,
+ span::EditionedFileId::new(position.file_id, edition),
+ );
let file = &db.parse(editioned_file_id_wrapper);
let char_matches_position =
file.tree().syntax().text().char_at(position.offset) == Some(char_typed);
diff --git a/crates/ide/src/typing/on_enter.rs b/crates/ide/src/typing/on_enter.rs
index fdc583a15c..76a2802d29 100644
--- a/crates/ide/src/typing/on_enter.rs
+++ b/crates/ide/src/typing/on_enter.rs
@@ -51,7 +51,7 @@ use ide_db::text_edit::TextEdit;
// ![On Enter](https://user-images.githubusercontent.com/48062697/113065578-04c21800-91b1-11eb-82b8-22b8c481e645.gif)
pub(crate) fn on_enter(db: &RootDatabase, position: FilePosition) -> Option<TextEdit> {
let editioned_file_id_wrapper =
- ide_db::base_db::EditionedFileId::current_edition(db, position.file_id);
+ ide_db::base_db::EditionedFileId::current_edition_guess_origin(db, position.file_id);
let parse = db.parse(editioned_file_id_wrapper);
let file = parse.tree();
let token = file.syntax().token_at_offset(position.offset).left_biased()?;
diff --git a/crates/ide/src/view_item_tree.rs b/crates/ide/src/view_item_tree.rs
index 2cd751463b..c9a2f31696 100644
--- a/crates/ide/src/view_item_tree.rs
+++ b/crates/ide/src/view_item_tree.rs
@@ -12,6 +12,6 @@ pub(crate) fn view_item_tree(db: &RootDatabase, file_id: FileId) -> String {
let sema = Semantics::new(db);
let file_id = sema
.attach_first_edition(file_id)
- .unwrap_or_else(|| EditionedFileId::current_edition(db, file_id));
+ .unwrap_or_else(|| EditionedFileId::current_edition_guess_origin(db, file_id));
db.file_item_tree(file_id.into()).pretty_print(db, file_id.edition(db))
}
diff --git a/crates/rust-analyzer/src/cli/analysis_stats.rs b/crates/rust-analyzer/src/cli/analysis_stats.rs
index 219fc9eb0b..ef73b00db4 100644
--- a/crates/rust-analyzer/src/cli/analysis_stats.rs
+++ b/crates/rust-analyzer/src/cli/analysis_stats.rs
@@ -145,7 +145,9 @@ impl flags::AnalysisStats {
if !source_root.is_library || self.with_deps {
let length = db.file_text(file_id).text(db).lines().count();
let item_stats = db
- .file_item_tree(EditionedFileId::current_edition(db, file_id).into())
+ .file_item_tree(
+ EditionedFileId::current_edition_guess_origin(db, file_id).into(),
+ )
.item_tree_stats()
.into();
@@ -155,7 +157,9 @@ impl flags::AnalysisStats {
} else {
let length = db.file_text(file_id).text(db).lines().count();
let item_stats = db
- .file_item_tree(EditionedFileId::current_edition(db, file_id).into())
+ .file_item_tree(
+ EditionedFileId::current_edition_guess_origin(db, file_id).into(),
+ )
.item_tree_stats()
.into();
diff --git a/crates/rust-analyzer/src/cli/scip.rs b/crates/rust-analyzer/src/cli/scip.rs
index f822efb53d..fbf3082e1b 100644
--- a/crates/rust-analyzer/src/cli/scip.rs
+++ b/crates/rust-analyzer/src/cli/scip.rs
@@ -522,12 +522,12 @@ mod test {
fn position(#[rust_analyzer::rust_fixture] ra_fixture: &str) -> (AnalysisHost, FilePosition) {
let mut host = AnalysisHost::default();
- let change_fixture = ChangeFixture::parse(host.raw_database(), ra_fixture);
+ let change_fixture = ChangeFixture::parse(ra_fixture);
host.raw_database_mut().apply_change(change_fixture.change);
let (file_id, range_or_offset) =
change_fixture.file_position.expect("expected a marker ()");
let offset = range_or_offset.expect_offset();
- let position = FilePosition { file_id: file_id.file_id(host.raw_database()), offset };
+ let position = FilePosition { file_id: file_id.file_id(), offset };
(host, position)
}
@@ -878,7 +878,7 @@ pub mod example_mod {
let s = "/// foo\nfn bar() {}";
let mut host = AnalysisHost::default();
- let change_fixture = ChangeFixture::parse(host.raw_database(), s);
+ let change_fixture = ChangeFixture::parse(s);
host.raw_database_mut().apply_change(change_fixture.change);
let analysis = host.analysis();
@@ -901,7 +901,7 @@ pub mod example_mod {
let s = "fn foo() {}";
let mut host = AnalysisHost::default();
- let change_fixture = ChangeFixture::parse(host.raw_database(), s);
+ let change_fixture = ChangeFixture::parse(s);
host.raw_database_mut().apply_change(change_fixture.change);
let analysis = host.analysis();
diff --git a/crates/rust-analyzer/src/cli/ssr.rs b/crates/rust-analyzer/src/cli/ssr.rs
index 975e81a4af..529cf12082 100644
--- a/crates/rust-analyzer/src/cli/ssr.rs
+++ b/crates/rust-analyzer/src/cli/ssr.rs
@@ -72,7 +72,7 @@ impl flags::Search {
let sr = db.source_root(root).source_root(db);
for file_id in sr.iter() {
for debug_info in match_finder.debug_where_text_equal(
- EditionedFileId::current_edition(db, file_id),
+ EditionedFileId::current_edition_guess_origin(db, file_id),
debug_snippet,
) {
println!("{debug_info:#?}");
diff --git a/crates/rust-analyzer/src/cli/unresolved_references.rs b/crates/rust-analyzer/src/cli/unresolved_references.rs
index 0362e13b88..2cb0fe9eef 100644
--- a/crates/rust-analyzer/src/cli/unresolved_references.rs
+++ b/crates/rust-analyzer/src/cli/unresolved_references.rs
@@ -141,7 +141,7 @@ fn all_unresolved_references(
) -> Vec<TextRange> {
let file_id = sema
.attach_first_edition(file_id)
- .unwrap_or_else(|| EditionedFileId::current_edition(sema.db, file_id));
+ .unwrap_or_else(|| EditionedFileId::current_edition_guess_origin(sema.db, file_id));
let file = sema.parse(file_id);
let root = file.syntax();
diff --git a/crates/rust-analyzer/src/lsp/to_proto.rs b/crates/rust-analyzer/src/lsp/to_proto.rs
index e585c3f638..86a35c7d11 100644
--- a/crates/rust-analyzer/src/lsp/to_proto.rs
+++ b/crates/rust-analyzer/src/lsp/to_proto.rs
@@ -119,7 +119,7 @@ pub(crate) fn diagnostic_severity(severity: Severity) -> lsp_types::DiagnosticSe
}
}
-pub(crate) fn documentation(documentation: Documentation) -> lsp_types::Documentation {
+pub(crate) fn documentation(documentation: Documentation<'_>) -> lsp_types::Documentation {
let value = format_docs(&documentation);
let markup_content = lsp_types::MarkupContent { kind: lsp_types::MarkupKind::Markdown, value };
lsp_types::Documentation::MarkupContent(markup_content)
@@ -1975,7 +1975,7 @@ pub(crate) fn markup_content(
ide::HoverDocFormat::Markdown => lsp_types::MarkupKind::Markdown,
ide::HoverDocFormat::PlainText => lsp_types::MarkupKind::PlainText,
};
- let value = format_docs(&Documentation::new(markup.into()));
+ let value = format_docs(&Documentation::new_owned(markup.into()));
lsp_types::MarkupContent { kind, value }
}
diff --git a/crates/syntax-bridge/src/lib.rs b/crates/syntax-bridge/src/lib.rs
index 7d858ac8c1..1ded2b4113 100644
--- a/crates/syntax-bridge/src/lib.rs
+++ b/crates/syntax-bridge/src/lib.rs
@@ -1,6 +1,6 @@
//! Conversions between [`SyntaxNode`] and [`tt::TokenTree`].
-use std::{fmt, hash::Hash};
+use std::{collections::VecDeque, fmt, hash::Hash};
use intern::Symbol;
use rustc_hash::{FxHashMap, FxHashSet};
@@ -102,26 +102,34 @@ where
SpanData<Ctx>: Copy + fmt::Debug,
SpanMap: SpanMapper<SpanData<Ctx>>,
{
- let mut c = Converter::new(node, map, Default::default(), Default::default(), span, mode);
+ let mut c =
+ Converter::new(node, map, Default::default(), Default::default(), span, mode, |_, _| {
+ (true, Vec::new())
+ });
convert_tokens(&mut c)
}
/// Converts a syntax tree to a [`tt::Subtree`] using the provided span map to populate the
/// subtree's spans. Additionally using the append and remove parameters, the additional tokens can
/// be injected or hidden from the output.
-pub fn syntax_node_to_token_tree_modified<Ctx, SpanMap>(
+pub fn syntax_node_to_token_tree_modified<Ctx, SpanMap, OnEvent>(
node: &SyntaxNode,
map: SpanMap,
append: FxHashMap<SyntaxElement, Vec<tt::Leaf<SpanData<Ctx>>>>,
remove: FxHashSet<SyntaxElement>,
call_site: SpanData<Ctx>,
mode: DocCommentDesugarMode,
+ on_enter: OnEvent,
) -> tt::TopSubtree<SpanData<Ctx>>
where
SpanMap: SpanMapper<SpanData<Ctx>>,
SpanData<Ctx>: Copy + fmt::Debug,
+ OnEvent: FnMut(
+ &mut PreorderWithTokens,
+ &WalkEvent<SyntaxElement>,
+ ) -> (bool, Vec<tt::Leaf<SpanData<Ctx>>>),
{
- let mut c = Converter::new(node, map, append, remove, call_site, mode);
+ let mut c = Converter::new(node, map, append, remove, call_site, mode, on_enter);
convert_tokens(&mut c)
}
@@ -623,9 +631,9 @@ where
}
}
-struct Converter<SpanMap, S> {
+struct Converter<SpanMap, S, OnEvent> {
current: Option<SyntaxToken>,
- current_leaves: Vec<tt::Leaf<S>>,
+ current_leaves: VecDeque<tt::Leaf<S>>,
preorder: PreorderWithTokens,
range: TextRange,
punct_offset: Option<(SyntaxToken, TextSize)>,
@@ -635,9 +643,13 @@ struct Converter<SpanMap, S> {
remove: FxHashSet<SyntaxElement>,
call_site: S,
mode: DocCommentDesugarMode,
+ on_event: OnEvent,
}
-impl<SpanMap, S> Converter<SpanMap, S> {
+impl<SpanMap, S, OnEvent> Converter<SpanMap, S, OnEvent>
+where
+ OnEvent: FnMut(&mut PreorderWithTokens, &WalkEvent<SyntaxElement>) -> (bool, Vec<tt::Leaf<S>>),
+{
fn new(
node: &SyntaxNode,
map: SpanMap,
@@ -645,8 +657,9 @@ impl<SpanMap, S> Converter<SpanMap, S> {
remove: FxHashSet<SyntaxElement>,
call_site: S,
mode: DocCommentDesugarMode,
+ on_enter: OnEvent,
) -> Self {
- let mut this = Converter {
+ let mut converter = Converter {
current: None,
preorder: node.preorder_with_tokens(),
range: node.text_range(),
@@ -655,16 +668,21 @@ impl<SpanMap, S> Converter<SpanMap, S> {
append,
remove,
call_site,
- current_leaves: vec![],
+ current_leaves: VecDeque::new(),
mode,
+ on_event: on_enter,
};
- let first = this.next_token();
- this.current = first;
- this
+ converter.current = converter.next_token();
+ converter
}
fn next_token(&mut self) -> Option<SyntaxToken> {
while let Some(ev) = self.preorder.next() {
+ let (keep_event, insert_leaves) = (self.on_event)(&mut self.preorder, &ev);
+ self.current_leaves.extend(insert_leaves);
+ if !keep_event {
+ continue;
+ }
match ev {
WalkEvent::Enter(token) => {
if self.remove.contains(&token) {
@@ -674,10 +692,9 @@ impl<SpanMap, S> Converter<SpanMap, S> {
}
node => {
self.preorder.skip_subtree();
- if let Some(mut v) = self.append.remove(&node) {
- v.reverse();
+ if let Some(v) = self.append.remove(&node) {
self.current_leaves.extend(v);
- return None;
+ continue;
}
}
}
@@ -686,10 +703,9 @@ impl<SpanMap, S> Converter<SpanMap, S> {
}
}
WalkEvent::Leave(ele) => {
- if let Some(mut v) = self.append.remove(&ele) {
- v.reverse();
+ if let Some(v) = self.append.remove(&ele) {
self.current_leaves.extend(v);
- return None;
+ continue;
}
}
}
@@ -714,8 +730,8 @@ impl<S> SynToken<S> {
}
}
-impl<SpanMap, S> SrcToken<Converter<SpanMap, S>, S> for SynToken<S> {
- fn kind(&self, _ctx: &Converter<SpanMap, S>) -> SyntaxKind {
+impl<SpanMap, S, OnEvent> SrcToken<Converter<SpanMap, S, OnEvent>, S> for SynToken<S> {
+ fn kind(&self, _ctx: &Converter<SpanMap, S, OnEvent>) -> SyntaxKind {
match self {
SynToken::Ordinary(token) => token.kind(),
SynToken::Punct { token, offset: i } => {
@@ -727,14 +743,14 @@ impl<SpanMap, S> SrcToken<Converter<SpanMap, S>, S> for SynToken<S> {
}
}
}
- fn to_char(&self, _ctx: &Converter<SpanMap, S>) -> Option<char> {
+ fn to_char(&self, _ctx: &Converter<SpanMap, S, OnEvent>) -> Option<char> {
match self {
SynToken::Ordinary(_) => None,
SynToken::Punct { token: it, offset: i } => it.text().chars().nth(*i),
SynToken::Leaf(_) => None,
}
}
- fn to_text(&self, _ctx: &Converter<SpanMap, S>) -> SmolStr {
+ fn to_text(&self, _ctx: &Converter<SpanMap, S, OnEvent>) -> SmolStr {
match self {
SynToken::Ordinary(token) | SynToken::Punct { token, offset: _ } => token.text().into(),
SynToken::Leaf(_) => {
@@ -751,10 +767,11 @@ impl<SpanMap, S> SrcToken<Converter<SpanMap, S>, S> for SynToken<S> {
}
}
-impl<S, SpanMap> TokenConverter<S> for Converter<SpanMap, S>
+impl<S, SpanMap, OnEvent> TokenConverter<S> for Converter<SpanMap, S, OnEvent>
where
S: Copy,
SpanMap: SpanMapper<S>,
+ OnEvent: FnMut(&mut PreorderWithTokens, &WalkEvent<SyntaxElement>) -> (bool, Vec<tt::Leaf<S>>),
{
type Token = SynToken<S>;
fn convert_doc_comment(
@@ -780,10 +797,7 @@ where
));
}
- if let Some(leaf) = self.current_leaves.pop() {
- if self.current_leaves.is_empty() {
- self.current = self.next_token();
- }
+ if let Some(leaf) = self.current_leaves.pop_front() {
return Some((SynToken::Leaf(leaf), TextRange::empty(TextSize::new(0))));
}
diff --git a/crates/syntax/src/ast.rs b/crates/syntax/src/ast.rs
index aea99a4389..5d67fd4491 100644
--- a/crates/syntax/src/ast.rs
+++ b/crates/syntax/src/ast.rs
@@ -26,7 +26,8 @@ pub use self::{
generated::{nodes::*, tokens::*},
node_ext::{
AttrKind, FieldKind, Macro, NameLike, NameOrNameRef, PathSegmentKind, SelfParamKind,
- SlicePatComponents, StructKind, TypeBoundKind, TypeOrConstParam, VisibilityKind,
+ SlicePatComponents, StructKind, TokenTreeChildren, TypeBoundKind, TypeOrConstParam,
+ VisibilityKind,
},
operators::{ArithOp, BinaryOp, CmpOp, LogicOp, Ordering, RangeOp, UnaryOp},
token_ext::{
@@ -35,6 +36,7 @@ pub use self::{
traits::{
AttrDocCommentIter, DocCommentIter, HasArgList, HasAttrs, HasDocComments, HasGenericArgs,
HasGenericParams, HasLoopBody, HasModuleItem, HasName, HasTypeBounds, HasVisibility,
+ attrs_including_inner,
},
};
diff --git a/crates/syntax/src/ast/node_ext.rs b/crates/syntax/src/ast/node_ext.rs
index af741d100f..901d17bb14 100644
--- a/crates/syntax/src/ast/node_ext.rs
+++ b/crates/syntax/src/ast/node_ext.rs
@@ -10,7 +10,7 @@ use parser::SyntaxKind;
use rowan::{GreenNodeData, GreenTokenData};
use crate::{
- NodeOrToken, SmolStr, SyntaxElement, SyntaxToken, T, TokenText,
+ NodeOrToken, SmolStr, SyntaxElement, SyntaxElementChildren, SyntaxToken, T, TokenText,
ast::{
self, AstNode, AstToken, HasAttrs, HasGenericArgs, HasGenericParams, HasName,
HasTypeBounds, SyntaxNode, support,
@@ -1114,3 +1114,39 @@ impl ast::OrPat {
.filter(|it| it.kind() == T![|])
}
}
+
+/// An iterator over the elements in an [`ast::TokenTree`].
+///
+/// Does not yield trivia or the delimiters.
+#[derive(Clone)]
+pub struct TokenTreeChildren {
+ iter: SyntaxElementChildren,
+}
+
+impl TokenTreeChildren {
+ #[inline]
+ pub fn new(tt: &ast::TokenTree) -> Self {
+ let mut iter = tt.syntax.children_with_tokens();
+ iter.next(); // Bump the opening delimiter.
+ Self { iter }
+ }
+}
+
+impl Iterator for TokenTreeChildren {
+ type Item = NodeOrToken<ast::TokenTree, SyntaxToken>;
+
+ #[inline]
+ fn next(&mut self) -> Option<Self::Item> {
+ self.iter.find_map(|item| match item {
+ NodeOrToken::Node(node) => ast::TokenTree::cast(node).map(NodeOrToken::Node),
+ NodeOrToken::Token(token) => {
+ let kind = token.kind();
+ (!matches!(
+ kind,
+ SyntaxKind::WHITESPACE | SyntaxKind::COMMENT | T![')'] | T![']'] | T!['}']
+ ))
+ .then_some(NodeOrToken::Token(token))
+ }
+ })
+ }
+}
diff --git a/crates/syntax/src/ast/token_ext.rs b/crates/syntax/src/ast/token_ext.rs
index e1a9f3ac03..83ab87c1c6 100644
--- a/crates/syntax/src/ast/token_ext.rs
+++ b/crates/syntax/src/ast/token_ext.rs
@@ -40,8 +40,8 @@ impl ast::Comment {
}
/// Returns the textual content of a doc comment node as a single string with prefix and suffix
- /// removed.
- pub fn doc_comment(&self) -> Option<&str> {
+ /// removed, plus the offset of the returned string from the beginning of the comment.
+ pub fn doc_comment(&self) -> Option<(&str, TextSize)> {
let kind = self.kind();
match kind {
CommentKind { shape, doc: Some(_) } => {
@@ -52,7 +52,7 @@ impl ast::Comment {
} else {
text
};
- Some(text)
+ Some((text, TextSize::of(prefix)))
}
_ => None,
}
diff --git a/crates/syntax/src/ast/traits.rs b/crates/syntax/src/ast/traits.rs
index 5290f32dd2..2f4109a2c9 100644
--- a/crates/syntax/src/ast/traits.rs
+++ b/crates/syntax/src/ast/traits.rs
@@ -4,8 +4,9 @@
use either::Either;
use crate::{
- SyntaxElement, SyntaxToken, T,
+ SyntaxElement, SyntaxNode, SyntaxToken, T,
ast::{self, AstChildren, AstNode, AstToken, support},
+ match_ast,
syntax_node::SyntaxElementChildren,
};
@@ -76,34 +77,44 @@ pub trait HasAttrs: AstNode {
self.attrs().filter_map(|x| x.as_simple_atom()).any(|x| x == atom)
}
- /// Returns all attributes of this node, including inner attributes that may not be directly under this node
- /// but under a child.
- fn attrs_including_inner(self) -> impl Iterator<Item = ast::Attr>
- where
- Self: Sized,
- {
- let inner_attrs_node = if let Some(it) =
- support::child::<ast::BlockExpr>(self.syntax()).and_then(|it| it.stmt_list())
- {
- Some(it.syntax)
- } else if let Some(it) = support::child::<ast::MatchArmList>(self.syntax()) {
- Some(it.syntax)
- } else if let Some(it) = support::child::<ast::AssocItemList>(self.syntax()) {
- Some(it.syntax)
- } else if let Some(it) = support::child::<ast::ItemList>(self.syntax()) {
- Some(it.syntax)
- } else if let Some(it) = support::child::<ast::ExternItemList>(self.syntax()) {
- Some(it.syntax)
- } else if let Some(it) = support::child::<ast::MacroItems>(self.syntax()) {
- Some(it.syntax)
- } else {
- None
- };
-
- self.attrs().chain(inner_attrs_node.into_iter().flat_map(|it| support::children(&it)))
+ /// This may return the same node as called with (with `SourceFile`). The caller has the responsibility
+ /// to avoid duplicate attributes.
+ fn inner_attributes_node(&self) -> Option<SyntaxNode> {
+ let syntax = self.syntax();
+ Some(match_ast! {
+ match syntax {
+ // A `SourceFile` contains the inner attributes of itself.
+ ast::SourceFile(_) => syntax.clone(),
+ ast::ExternBlock(it) => it.extern_item_list()?.syntax().clone(),
+ ast::Fn(it) => it.body()?.stmt_list()?.syntax().clone(),
+ ast::MatchExpr(it) => it.match_arm_list()?.syntax().clone(),
+ ast::Impl(it) => it.assoc_item_list()?.syntax().clone(),
+ ast::Trait(it) => it.assoc_item_list()?.syntax().clone(),
+ ast::Module(it) => it.item_list()?.syntax().clone(),
+ ast::BlockExpr(it) => {
+ if !it.may_carry_attributes() {
+ return None;
+ }
+ syntax.clone()
+ },
+ _ => return None,
+ }
+ })
}
}
+/// Returns all attributes of this node, including inner attributes that may not be directly under this node
+/// but under a child.
+pub fn attrs_including_inner(owner: &dyn HasAttrs) -> impl Iterator<Item = ast::Attr> + Clone {
+ owner.attrs().filter(|attr| attr.kind().is_outer()).chain(
+ owner
+ .inner_attributes_node()
+ .into_iter()
+ .flat_map(|node| support::children::<ast::Attr>(&node))
+ .filter(|attr| attr.kind().is_inner()),
+ )
+}
+
pub trait HasDocComments: HasAttrs {
fn doc_comments(&self) -> DocCommentIter {
DocCommentIter { iter: self.syntax().children_with_tokens() }
@@ -118,7 +129,7 @@ impl DocCommentIter {
#[cfg(test)]
pub fn doc_comment_text(self) -> Option<String> {
let docs = itertools::Itertools::join(
- &mut self.filter_map(|comment| comment.doc_comment().map(ToOwned::to_owned)),
+ &mut self.filter_map(|comment| comment.doc_comment().map(|it| it.0.to_owned())),
"\n",
);
if docs.is_empty() { None } else { Some(docs) }
@@ -151,7 +162,7 @@ impl AttrDocCommentIter {
impl Iterator for AttrDocCommentIter {
type Item = Either<ast::Attr, ast::Comment>;
fn next(&mut self) -> Option<Self::Item> {
- self.iter.by_ref().find_map(|el| match el {
+ self.iter.find_map(|el| match el {
SyntaxElement::Node(node) => ast::Attr::cast(node).map(Either::Left),
SyntaxElement::Token(tok) => {
ast::Comment::cast(tok).filter(ast::Comment::is_doc).map(Either::Right)
diff --git a/crates/test-fixture/src/lib.rs b/crates/test-fixture/src/lib.rs
index a718b96a82..457cd3ac85 100644
--- a/crates/test-fixture/src/lib.rs
+++ b/crates/test-fixture/src/lib.rs
@@ -5,7 +5,7 @@ use base_db::target::TargetData;
use base_db::{
Crate, CrateDisplayName, CrateGraphBuilder, CrateName, CrateOrigin, CrateWorkspaceData,
DependencyBuilder, Env, FileChange, FileSet, FxIndexMap, LangCrateOrigin, SourceDatabase,
- SourceRoot, Version, VfsPath, salsa,
+ SourceRoot, Version, VfsPath,
};
use cfg::CfgOptions;
use hir_expand::{
@@ -37,10 +37,11 @@ pub trait WithFixture: Default + ExpandDatabase + SourceDatabase + 'static {
#[rust_analyzer::rust_fixture] ra_fixture: &str,
) -> (Self, EditionedFileId) {
let mut db = Self::default();
- let fixture = ChangeFixture::parse(&db, ra_fixture);
+ let fixture = ChangeFixture::parse(ra_fixture);
fixture.change.apply(&mut db);
assert_eq!(fixture.files.len(), 1, "Multiple file found in the fixture");
- (db, fixture.files[0])
+ let file = EditionedFileId::from_span_guess_origin(&db, fixture.files[0]);
+ (db, file)
}
#[track_caller]
@@ -48,16 +49,21 @@ pub trait WithFixture: Default + ExpandDatabase + SourceDatabase + 'static {
#[rust_analyzer::rust_fixture] ra_fixture: &str,
) -> (Self, Vec<EditionedFileId>) {
let mut db = Self::default();
- let fixture = ChangeFixture::parse(&db, ra_fixture);
+ let fixture = ChangeFixture::parse(ra_fixture);
fixture.change.apply(&mut db);
assert!(fixture.file_position.is_none());
- (db, fixture.files)
+ let files = fixture
+ .files
+ .into_iter()
+ .map(|file| EditionedFileId::from_span_guess_origin(&db, file))
+ .collect();
+ (db, files)
}
#[track_caller]
fn with_files(#[rust_analyzer::rust_fixture] ra_fixture: &str) -> Self {
let mut db = Self::default();
- let fixture = ChangeFixture::parse(&db, ra_fixture);
+ let fixture = ChangeFixture::parse(ra_fixture);
fixture.change.apply(&mut db);
assert!(fixture.file_position.is_none());
db
@@ -69,12 +75,8 @@ pub trait WithFixture: Default + ExpandDatabase + SourceDatabase + 'static {
proc_macros: Vec<(String, ProcMacro)>,
) -> Self {
let mut db = Self::default();
- let fixture = ChangeFixture::parse_with_proc_macros(
- &db,
- ra_fixture,
- MiniCore::RAW_SOURCE,
- proc_macros,
- );
+ let fixture =
+ ChangeFixture::parse_with_proc_macros(ra_fixture, MiniCore::RAW_SOURCE, proc_macros);
fixture.change.apply(&mut db);
assert!(fixture.file_position.is_none());
db
@@ -99,12 +101,13 @@ pub trait WithFixture: Default + ExpandDatabase + SourceDatabase + 'static {
#[rust_analyzer::rust_fixture] ra_fixture: &str,
) -> (Self, EditionedFileId, RangeOrOffset) {
let mut db = Self::default();
- let fixture = ChangeFixture::parse(&db, ra_fixture);
+ let fixture = ChangeFixture::parse(ra_fixture);
fixture.change.apply(&mut db);
let (file_id, range_or_offset) = fixture
.file_position
.expect("Could not find file position in fixture. Did you forget to add an `$0`?");
+ let file_id = EditionedFileId::from_span_guess_origin(&db, file_id);
(db, file_id, range_or_offset)
}
@@ -116,9 +119,9 @@ pub trait WithFixture: Default + ExpandDatabase + SourceDatabase + 'static {
impl<DB: ExpandDatabase + SourceDatabase + Default + 'static> WithFixture for DB {}
pub struct ChangeFixture {
- pub file_position: Option<(EditionedFileId, RangeOrOffset)>,
+ pub file_position: Option<(span::EditionedFileId, RangeOrOffset)>,
pub file_lines: Vec<usize>,
- pub files: Vec<EditionedFileId>,
+ pub files: Vec<span::EditionedFileId>,
pub change: ChangeWithProcMacros,
pub sysroot_files: Vec<FileId>,
}
@@ -126,15 +129,11 @@ pub struct ChangeFixture {
const SOURCE_ROOT_PREFIX: &str = "/";
impl ChangeFixture {
- pub fn parse(
- db: &dyn salsa::Database,
- #[rust_analyzer::rust_fixture] ra_fixture: &str,
- ) -> ChangeFixture {
- Self::parse_with_proc_macros(db, ra_fixture, MiniCore::RAW_SOURCE, Vec::new())
+ pub fn parse(#[rust_analyzer::rust_fixture] ra_fixture: &str) -> ChangeFixture {
+ Self::parse_with_proc_macros(ra_fixture, MiniCore::RAW_SOURCE, Vec::new())
}
pub fn parse_with_proc_macros(
- db: &dyn salsa::Database,
#[rust_analyzer::rust_fixture] ra_fixture: &str,
minicore_raw: &str,
mut proc_macro_defs: Vec<(String, ProcMacro)>,
@@ -202,7 +201,7 @@ impl ChangeFixture {
let meta = FileMeta::from_fixture(entry, current_source_root_kind);
if let Some(range_or_offset) = range_or_offset {
file_position =
- Some((EditionedFileId::new(db, file_id, meta.edition), range_or_offset));
+ Some((span::EditionedFileId::new(file_id, meta.edition), range_or_offset));
}
assert!(meta.path.starts_with(SOURCE_ROOT_PREFIX));
@@ -259,7 +258,7 @@ impl ChangeFixture {
source_change.change_file(file_id, Some(text));
let path = VfsPath::new_virtual_path(meta.path);
file_set.insert(file_id, path);
- files.push(EditionedFileId::new(db, file_id, meta.edition));
+ files.push(span::EditionedFileId::new(file_id, meta.edition));
file_id = FileId::from_raw(file_id.index() + 1);
}