Unnamed repository; edit this file 'description' to name the repository.
Merge pull request #19617 from ChayimFriedman2/more-actual
internal: Make `HirFileId`, `EditionedFileId` and macro files Salsa struct
Chayim Refael Friedman 2025-04-20
parent 6568e8f · parent c58ddaf · commit 150bb4a
-rw-r--r--crates/base-db/src/input.rs8
-rw-r--r--crates/base-db/src/lib.rs28
-rw-r--r--crates/hir-def/src/db.rs76
-rw-r--r--crates/hir-def/src/expr_store.rs14
-rw-r--r--crates/hir-def/src/expr_store/expander.rs9
-rw-r--r--crates/hir-def/src/expr_store/lower.rs12
-rw-r--r--crates/hir-def/src/expr_store/lower/path.rs2
-rw-r--r--crates/hir-def/src/expr_store/scope.rs15
-rw-r--r--crates/hir-def/src/expr_store/tests/body/block.rs4
-rw-r--r--crates/hir-def/src/macro_expansion_tests/mbe.rs16
-rw-r--r--crates/hir-def/src/macro_expansion_tests/mod.rs8
-rw-r--r--crates/hir-def/src/nameres.rs22
-rw-r--r--crates/hir-def/src/nameres/assoc.rs8
-rw-r--r--crates/hir-def/src/nameres/collector.rs12
-rw-r--r--crates/hir-def/src/nameres/mod_resolution.rs7
-rw-r--r--crates/hir-def/src/nameres/tests/incremental.rs6
-rw-r--r--crates/hir-def/src/resolver.rs4
-rw-r--r--crates/hir-def/src/test_db.rs11
-rw-r--r--crates/hir-expand/src/builtin/attr_macro.rs4
-rw-r--r--crates/hir-expand/src/builtin/derive_macro.rs6
-rw-r--r--crates/hir-expand/src/builtin/fn_macro.rs30
-rw-r--r--crates/hir-expand/src/db.rs95
-rw-r--r--crates/hir-expand/src/declarative.rs18
-rw-r--r--crates/hir-expand/src/eager.rs10
-rw-r--r--crates/hir-expand/src/files.rs111
-rw-r--r--crates/hir-expand/src/hygiene.rs2
-rw-r--r--crates/hir-expand/src/lib.rs267
-rw-r--r--crates/hir-expand/src/mod_path.rs4
-rw-r--r--crates/hir-expand/src/prettify_macro_expansion_.rs2
-rw-r--r--crates/hir-expand/src/span_map.rs20
-rw-r--r--crates/hir-ty/src/consteval.rs10
-rw-r--r--crates/hir-ty/src/consteval/tests.rs6
-rw-r--r--crates/hir-ty/src/db.rs75
-rw-r--r--crates/hir-ty/src/diagnostics/decl_check.rs4
-rw-r--r--crates/hir-ty/src/drop.rs8
-rw-r--r--crates/hir-ty/src/dyn_compatibility/tests.rs4
-rw-r--r--crates/hir-ty/src/layout.rs3
-rw-r--r--crates/hir-ty/src/layout/adt.rs3
-rw-r--r--crates/hir-ty/src/layout/tests.rs28
-rw-r--r--crates/hir-ty/src/lower.rs3
-rw-r--r--crates/hir-ty/src/mir/eval.rs4
-rw-r--r--crates/hir-ty/src/mir/eval/tests.rs7
-rw-r--r--crates/hir-ty/src/mir/monomorphization.rs3
-rw-r--r--crates/hir-ty/src/test_db.rs7
-rw-r--r--crates/hir-ty/src/tests.rs10
-rw-r--r--crates/hir-ty/src/tests/closure_captures.rs6
-rw-r--r--crates/hir-ty/src/tests/incremental.rs12
-rw-r--r--crates/hir-ty/src/variance.rs2
-rw-r--r--crates/hir/src/has_source.rs3
-rw-r--r--crates/hir/src/lib.rs23
-rw-r--r--crates/hir/src/semantics.rs118
-rw-r--r--crates/hir/src/semantics/child_by_source.rs2
-rw-r--r--crates/hir/src/semantics/source_to_def.rs38
-rw-r--r--crates/hir/src/source_analyzer.rs21
-rw-r--r--crates/ide-assists/src/assist_context.rs17
-rw-r--r--crates/ide-assists/src/handlers/add_braces.rs2
-rw-r--r--crates/ide-assists/src/handlers/add_missing_match_arms.rs10
-rw-r--r--crates/ide-assists/src/handlers/add_turbo_fish.rs4
-rw-r--r--crates/ide-assists/src/handlers/apply_demorgan.rs4
-rw-r--r--crates/ide-assists/src/handlers/bind_unused_param.rs2
-rw-r--r--crates/ide-assists/src/handlers/convert_bool_then.rs4
-rw-r--r--crates/ide-assists/src/handlers/convert_bool_to_enum.rs2
-rw-r--r--crates/ide-assists/src/handlers/convert_closure_to_fn.rs11
-rw-r--r--crates/ide-assists/src/handlers/convert_for_to_while_let.rs2
-rw-r--r--crates/ide-assists/src/handlers/convert_named_struct_to_tuple_struct.rs6
-rw-r--r--crates/ide-assists/src/handlers/convert_tuple_return_type_to_struct.rs2
-rw-r--r--crates/ide-assists/src/handlers/convert_tuple_struct_to_named_struct.rs8
-rw-r--r--crates/ide-assists/src/handlers/destructure_struct_binding.rs2
-rw-r--r--crates/ide-assists/src/handlers/expand_rest_pattern.rs4
-rw-r--r--crates/ide-assists/src/handlers/extract_module.rs27
-rw-r--r--crates/ide-assists/src/handlers/extract_struct_from_enum_variant.rs4
-rw-r--r--crates/ide-assists/src/handlers/extract_type_alias.rs2
-rw-r--r--crates/ide-assists/src/handlers/extract_variable.rs4
-rw-r--r--crates/ide-assists/src/handlers/fix_visibility.rs14
-rw-r--r--crates/ide-assists/src/handlers/flip_binexpr.rs2
-rw-r--r--crates/ide-assists/src/handlers/flip_comma.rs2
-rw-r--r--crates/ide-assists/src/handlers/flip_or_pattern.rs2
-rw-r--r--crates/ide-assists/src/handlers/flip_trait_bound.rs2
-rw-r--r--crates/ide-assists/src/handlers/generate_constant.rs6
-rw-r--r--crates/ide-assists/src/handlers/generate_delegate_trait.rs2
-rw-r--r--crates/ide-assists/src/handlers/generate_enum_variant.rs2
-rw-r--r--crates/ide-assists/src/handlers/generate_fn_type_alias.rs2
-rw-r--r--crates/ide-assists/src/handlers/generate_function.rs16
-rw-r--r--crates/ide-assists/src/handlers/inline_call.rs8
-rw-r--r--crates/ide-assists/src/handlers/inline_local_variable.rs2
-rw-r--r--crates/ide-assists/src/handlers/inline_macro.rs6
-rw-r--r--crates/ide-assists/src/handlers/inline_type_alias.rs6
-rw-r--r--crates/ide-assists/src/handlers/introduce_named_type_parameter.rs2
-rw-r--r--crates/ide-assists/src/handlers/move_const_to_impl.rs2
-rw-r--r--crates/ide-assists/src/handlers/move_from_mod_rs.rs6
-rw-r--r--crates/ide-assists/src/handlers/move_module_to_file.rs2
-rw-r--r--crates/ide-assists/src/handlers/move_to_mod_rs.rs6
-rw-r--r--crates/ide-assists/src/handlers/remove_mut.rs2
-rw-r--r--crates/ide-assists/src/handlers/remove_parentheses.rs2
-rw-r--r--crates/ide-assists/src/handlers/remove_unused_imports.rs2
-rw-r--r--crates/ide-assists/src/handlers/remove_unused_param.rs14
-rw-r--r--crates/ide-assists/src/handlers/reorder_fields.rs2
-rw-r--r--crates/ide-assists/src/handlers/reorder_impl_items.rs2
-rw-r--r--crates/ide-assists/src/handlers/replace_derive_with_manual_impl.rs2
-rw-r--r--crates/ide-assists/src/handlers/replace_if_let_with_match.rs4
-rw-r--r--crates/ide-assists/src/handlers/replace_let_with_if_let.rs2
-rw-r--r--crates/ide-assists/src/handlers/unwrap_return_type.rs2
-rw-r--r--crates/ide-assists/src/handlers/wrap_return_type.rs2
-rw-r--r--crates/ide-assists/src/lib.rs4
-rw-r--r--crates/ide-assists/src/tests.rs69
-rw-r--r--crates/ide-completion/src/completions/env_vars.rs1
-rw-r--r--crates/ide-completion/src/completions/item_list/trait_impl.rs6
-rw-r--r--crates/ide-completion/src/completions/mod_.rs12
-rw-r--r--crates/ide-completion/src/completions/postfix.rs2
-rw-r--r--crates/ide-completion/src/context.rs11
-rw-r--r--crates/ide-completion/src/lib.rs3
-rw-r--r--crates/ide-completion/src/tests.rs5
-rw-r--r--crates/ide-db/src/imports/insert_use/tests.rs5
-rw-r--r--crates/ide-db/src/lib.rs5
-rw-r--r--crates/ide-db/src/rename.rs24
-rw-r--r--crates/ide-db/src/search.rs41
-rw-r--r--crates/ide-db/src/symbol_index.rs4
-rw-r--r--crates/ide-db/src/syntax_helpers/suggest_name.rs3
-rw-r--r--crates/ide-db/src/test_data/test_doc_alias.txt65
-rw-r--r--crates/ide-db/src/test_data/test_symbol_index_collection.txt282
-rw-r--r--crates/ide-db/src/traits.rs13
-rw-r--r--crates/ide-diagnostics/src/handlers/field_shorthand.rs18
-rw-r--r--crates/ide-diagnostics/src/handlers/generic_args_prohibited.rs2
-rw-r--r--crates/ide-diagnostics/src/handlers/json_is_not_rust.rs7
-rw-r--r--crates/ide-diagnostics/src/handlers/missing_fields.rs9
-rw-r--r--crates/ide-diagnostics/src/handlers/missing_unsafe.rs8
-rw-r--r--crates/ide-diagnostics/src/handlers/mutability_errors.rs4
-rw-r--r--crates/ide-diagnostics/src/handlers/no_such_field.rs4
-rw-r--r--crates/ide-diagnostics/src/handlers/remove_trailing_return.rs2
-rw-r--r--crates/ide-diagnostics/src/handlers/remove_unnecessary_else.rs8
-rw-r--r--crates/ide-diagnostics/src/handlers/replace_filter_map_next_with_find_map.rs5
-rw-r--r--crates/ide-diagnostics/src/handlers/trait_impl_redundant_assoc_item.rs6
-rw-r--r--crates/ide-diagnostics/src/handlers/type_mismatch.rs28
-rw-r--r--crates/ide-diagnostics/src/handlers/typed_hole.rs2
-rw-r--r--crates/ide-diagnostics/src/handlers/unlinked_file.rs20
-rw-r--r--crates/ide-diagnostics/src/handlers/unresolved_field.rs10
-rw-r--r--crates/ide-diagnostics/src/handlers/unresolved_method.rs6
-rw-r--r--crates/ide-diagnostics/src/handlers/unresolved_module.rs4
-rw-r--r--crates/ide-diagnostics/src/handlers/unused_variables.rs2
-rw-r--r--crates/ide-diagnostics/src/handlers/useless_braces.rs6
-rw-r--r--crates/ide-diagnostics/src/lib.rs45
-rw-r--r--crates/ide-diagnostics/src/tests.rs14
-rw-r--r--crates/ide-ssr/src/from_comment.rs9
-rw-r--r--crates/ide-ssr/src/lib.rs16
-rw-r--r--crates/ide-ssr/src/matching.rs16
-rw-r--r--crates/ide-ssr/src/replacing.rs2
-rw-r--r--crates/ide-ssr/src/resolving.rs9
-rw-r--r--crates/ide-ssr/src/search.rs14
-rw-r--r--crates/ide-ssr/src/tests.rs46
-rw-r--r--crates/ide/src/annotations.rs2
-rw-r--r--crates/ide/src/call_hierarchy.rs6
-rw-r--r--crates/ide/src/expand_macro.rs4
-rw-r--r--crates/ide/src/fixture.rs32
-rw-r--r--crates/ide/src/goto_definition.rs7
-rw-r--r--crates/ide/src/highlight_related.rs38
-rw-r--r--crates/ide/src/hover.rs4
-rw-r--r--crates/ide/src/inlay_hints.rs26
-rw-r--r--crates/ide/src/inlay_hints/closure_captures.rs12
-rw-r--r--crates/ide/src/inlay_hints/generic_param.rs5
-rw-r--r--crates/ide/src/inlay_hints/implicit_drop.rs2
-rw-r--r--crates/ide/src/inlay_hints/param_name.rs7
-rw-r--r--crates/ide/src/lib.rs49
-rw-r--r--crates/ide/src/navigation_target.rs24
-rw-r--r--crates/ide/src/parent_module.rs4
-rw-r--r--crates/ide/src/references.rs23
-rw-r--r--crates/ide/src/rename.rs25
-rw-r--r--crates/ide/src/runnables.rs10
-rw-r--r--crates/ide/src/signature_help.rs7
-rw-r--r--crates/ide/src/ssr.rs2
-rw-r--r--crates/ide/src/static_index.rs14
-rw-r--r--crates/ide/src/syntax_highlighting.rs26
-rw-r--r--crates/ide/src/syntax_highlighting/highlight.rs2
-rw-r--r--crates/ide/src/syntax_highlighting/html.rs12
-rw-r--r--crates/ide/src/syntax_highlighting/inject.rs3
-rw-r--r--crates/ide/src/typing.rs13
-rw-r--r--crates/ide/src/typing/on_enter.rs9
-rw-r--r--crates/ide/src/view_item_tree.rs7
-rw-r--r--crates/query-group-macro/src/lib.rs15
-rw-r--r--crates/query-group-macro/tests/arity.rs4
-rw-r--r--crates/query-group-macro/tests/cycle.rs40
-rw-r--r--crates/query-group-macro/tests/hello_world.rs8
-rw-r--r--crates/query-group-macro/tests/interned.rs4
-rw-r--r--crates/query-group-macro/tests/lru.rs3
-rw-r--r--crates/query-group-macro/tests/multiple_dbs.rs2
-rw-r--r--crates/query-group-macro/tests/old_and_new.rs12
-rw-r--r--crates/query-group-macro/tests/result.rs2
-rw-r--r--crates/query-group-macro/tests/supertrait.rs1
-rw-r--r--crates/query-group-macro/tests/tuples.rs1
-rw-r--r--crates/rust-analyzer/src/cli/analysis_stats.rs40
-rw-r--r--crates/rust-analyzer/src/cli/diagnostics.rs8
-rw-r--r--crates/rust-analyzer/src/cli/scip.rs7
-rw-r--r--crates/rust-analyzer/src/cli/ssr.rs2
-rw-r--r--crates/rust-analyzer/src/cli/unresolved_references.rs25
-rw-r--r--crates/span/src/lib.rs66
-rw-r--r--crates/test-fixture/src/lib.rs28
195 files changed, 1471 insertions, 1523 deletions
diff --git a/crates/base-db/src/input.rs b/crates/base-db/src/input.rs
index cfc22b7246..3f6766b332 100644
--- a/crates/base-db/src/input.rs
+++ b/crates/base-db/src/input.rs
@@ -16,11 +16,11 @@ use intern::Symbol;
use la_arena::{Arena, Idx, RawIdx};
use rustc_hash::{FxHashMap, FxHashSet, FxHasher};
use salsa::{Durability, Setter};
-use span::{Edition, EditionedFileId};
+use span::Edition;
use triomphe::Arc;
use vfs::{AbsPathBuf, AnchoredPath, FileId, VfsPath, file_set::FileSet};
-use crate::{CrateWorkspaceData, RootQueryDb};
+use crate::{CrateWorkspaceData, EditionedFileId, RootQueryDb};
pub type ProcMacroPaths = FxHashMap<CrateBuilderId, Result<(String, AbsPathBuf), String>>;
@@ -773,8 +773,8 @@ pub(crate) fn transitive_rev_deps(db: &dyn RootQueryDb, of: Crate) -> FxHashSet<
}
impl BuiltCrateData {
- pub fn root_file_id(&self) -> EditionedFileId {
- EditionedFileId::new(self.root_file_id, self.edition)
+ pub fn root_file_id(&self, db: &dyn salsa::Database) -> EditionedFileId {
+ EditionedFileId::new(db, self.root_file_id, self.edition)
}
}
diff --git a/crates/base-db/src/lib.rs b/crates/base-db/src/lib.rs
index 15ce934dc9..55010a0349 100644
--- a/crates/base-db/src/lib.rs
+++ b/crates/base-db/src/lib.rs
@@ -20,6 +20,7 @@ use rustc_hash::{FxHashSet, FxHasher};
pub use salsa::{self};
use salsa::{Durability, Setter};
pub use semver::{BuildMetadata, Prerelease, Version, VersionReq};
+use span::Edition;
use syntax::{Parse, SyntaxError, ast};
use triomphe::Arc;
pub use vfs::{AnchoredPath, AnchoredPathBuf, FileId, VfsPath, file_set::FileSet};
@@ -151,21 +152,39 @@ impl Files {
}
}
-#[salsa::interned(no_lifetime)]
+#[salsa::interned(no_lifetime, constructor=from_span)]
pub struct EditionedFileId {
pub editioned_file_id: span::EditionedFileId,
}
impl EditionedFileId {
- pub fn file_id(&self, db: &dyn salsa::Database) -> vfs::FileId {
+ // Salsa already uses the name `new`...
+ #[inline]
+ pub fn new(db: &dyn salsa::Database, file_id: FileId, edition: Edition) -> Self {
+ EditionedFileId::from_span(db, span::EditionedFileId::new(file_id, edition))
+ }
+
+ #[inline]
+ pub fn current_edition(db: &dyn salsa::Database, file_id: FileId) -> Self {
+ EditionedFileId::new(db, file_id, Edition::CURRENT)
+ }
+
+ #[inline]
+ pub fn file_id(self, db: &dyn salsa::Database) -> vfs::FileId {
let id = self.editioned_file_id(db);
id.file_id()
}
- fn unpack(&self, db: &dyn salsa::Database) -> (vfs::FileId, span::Edition) {
+ #[inline]
+ pub fn unpack(self, db: &dyn salsa::Database) -> (vfs::FileId, span::Edition) {
let id = self.editioned_file_id(db);
(id.file_id(), id.edition())
}
+
+ #[inline]
+ pub fn edition(self, db: &dyn SourceDatabase) -> Edition {
+ self.editioned_file_id(db).edition()
+ }
}
#[salsa::input]
@@ -189,7 +208,7 @@ pub struct SourceRootInput {
#[query_group::query_group]
pub trait RootQueryDb: SourceDatabase + salsa::Database {
/// Parses the file into the syntax tree.
- #[salsa::invoke_actual(parse)]
+ #[salsa::invoke(parse)]
#[salsa::lru(128)]
fn parse(&self, file_id: EditionedFileId) -> Parse<ast::SourceFile>;
@@ -201,6 +220,7 @@ pub trait RootQueryDb: SourceDatabase + salsa::Database {
fn toolchain_channel(&self, krate: Crate) -> Option<ReleaseChannel>;
/// Crates whose root file is in `id`.
+ #[salsa::invoke_interned(source_root_crates)]
fn source_root_crates(&self, id: SourceRootId) -> Arc<[Crate]>;
#[salsa::transparent]
diff --git a/crates/hir-def/src/db.rs b/crates/hir-def/src/db.rs
index a50b94a139..a5e58ec350 100644
--- a/crates/hir-def/src/db.rs
+++ b/crates/hir-def/src/db.rs
@@ -1,10 +1,9 @@
//! Defines database & queries for name resolution.
use base_db::{Crate, RootQueryDb, SourceDatabase};
use either::Either;
-use hir_expand::{HirFileId, MacroDefId, db::ExpandDatabase};
+use hir_expand::{EditionedFileId, HirFileId, MacroCallId, MacroDefId, db::ExpandDatabase};
use intern::sym;
use la_arena::ArenaMap;
-use span::{EditionedFileId, MacroCallId};
use syntax::{AstPtr, ast};
use thin_vec::ThinVec;
use triomphe::Arc;
@@ -109,26 +108,26 @@ pub trait DefDatabase: InternDatabase + ExpandDatabase + SourceDatabase {
#[salsa::invoke(ItemTree::file_item_tree_query)]
fn file_item_tree(&self, file_id: HirFileId) -> Arc<ItemTree>;
- #[salsa::invoke_actual(ItemTree::block_item_tree_query)]
+ #[salsa::invoke(ItemTree::block_item_tree_query)]
fn block_item_tree(&self, block_id: BlockId) -> Arc<ItemTree>;
- #[salsa::invoke_actual(DefMap::crate_local_def_map_query)]
+ #[salsa::invoke(DefMap::crate_local_def_map_query)]
fn crate_local_def_map(&self, krate: Crate) -> (Arc<DefMap>, Arc<LocalDefMap>);
- #[salsa::invoke_actual(DefMap::crate_def_map_query)]
+ #[salsa::invoke(DefMap::crate_def_map_query)]
fn crate_def_map(&self, krate: Crate) -> Arc<DefMap>;
/// Computes the block-level `DefMap`.
- #[salsa::invoke_actual(DefMap::block_def_map_query)]
+ #[salsa::invoke(DefMap::block_def_map_query)]
fn block_def_map(&self, block: BlockId) -> Arc<DefMap>;
/// Turns a MacroId into a MacroDefId, describing the macro's definition post name resolution.
- #[salsa::invoke_actual(macro_def)]
+ #[salsa::invoke(macro_def)]
fn macro_def(&self, m: MacroId) -> MacroDefId;
// region:data
- #[salsa::invoke_actual(VariantFields::query)]
+ #[salsa::invoke(VariantFields::query)]
fn variant_fields_with_source_map(
&self,
id: VariantId,
@@ -139,24 +138,24 @@ pub trait DefDatabase: InternDatabase + ExpandDatabase + SourceDatabase {
self.enum_variants_with_diagnostics(id).0
}
- #[salsa::invoke_actual(EnumVariants::enum_variants_query)]
+ #[salsa::invoke(EnumVariants::enum_variants_query)]
fn enum_variants_with_diagnostics(
&self,
id: EnumId,
) -> (Arc<EnumVariants>, Option<Arc<ThinVec<InactiveEnumVariantCode>>>);
#[salsa::transparent]
- #[salsa::invoke_actual(ImplItems::impl_items_query)]
+ #[salsa::invoke(ImplItems::impl_items_query)]
fn impl_items(&self, e: ImplId) -> Arc<ImplItems>;
- #[salsa::invoke_actual(ImplItems::impl_items_with_diagnostics_query)]
+ #[salsa::invoke(ImplItems::impl_items_with_diagnostics_query)]
fn impl_items_with_diagnostics(&self, e: ImplId) -> (Arc<ImplItems>, DefDiagnostics);
#[salsa::transparent]
- #[salsa::invoke_actual(TraitItems::trait_items_query)]
+ #[salsa::invoke(TraitItems::trait_items_query)]
fn trait_items(&self, e: TraitId) -> Arc<TraitItems>;
- #[salsa::invoke_actual(TraitItems::trait_items_with_diagnostics_query)]
+ #[salsa::invoke(TraitItems::trait_items_with_diagnostics_query)]
fn trait_items_with_diagnostics(&self, tr: TraitId) -> (Arc<TraitItems>, DefDiagnostics);
#[salsa::tracked]
@@ -214,61 +213,61 @@ pub trait DefDatabase: InternDatabase + ExpandDatabase + SourceDatabase {
self.type_alias_signature_with_source_map(e).0
}
- #[salsa::invoke_actual(TraitSignature::query)]
+ #[salsa::invoke(TraitSignature::query)]
fn trait_signature_with_source_map(
&self,
trait_: TraitId,
) -> (Arc<TraitSignature>, Arc<ExpressionStoreSourceMap>);
- #[salsa::invoke_actual(ImplSignature::query)]
+ #[salsa::invoke(ImplSignature::query)]
fn impl_signature_with_source_map(
&self,
impl_: ImplId,
) -> (Arc<ImplSignature>, Arc<ExpressionStoreSourceMap>);
- #[salsa::invoke_actual(StructSignature::query)]
+ #[salsa::invoke(StructSignature::query)]
fn struct_signature_with_source_map(
&self,
struct_: StructId,
) -> (Arc<StructSignature>, Arc<ExpressionStoreSourceMap>);
- #[salsa::invoke_actual(UnionSignature::query)]
+ #[salsa::invoke(UnionSignature::query)]
fn union_signature_with_source_map(
&self,
union_: UnionId,
) -> (Arc<UnionSignature>, Arc<ExpressionStoreSourceMap>);
- #[salsa::invoke_actual(EnumSignature::query)]
+ #[salsa::invoke(EnumSignature::query)]
fn enum_signature_with_source_map(
&self,
e: EnumId,
) -> (Arc<EnumSignature>, Arc<ExpressionStoreSourceMap>);
- #[salsa::invoke_actual(ConstSignature::query)]
+ #[salsa::invoke(ConstSignature::query)]
fn const_signature_with_source_map(
&self,
e: ConstId,
) -> (Arc<ConstSignature>, Arc<ExpressionStoreSourceMap>);
- #[salsa::invoke_actual(StaticSignature::query)]
+ #[salsa::invoke(StaticSignature::query)]
fn static_signature_with_source_map(
&self,
e: StaticId,
) -> (Arc<StaticSignature>, Arc<ExpressionStoreSourceMap>);
- #[salsa::invoke_actual(FunctionSignature::query)]
+ #[salsa::invoke(FunctionSignature::query)]
fn function_signature_with_source_map(
&self,
e: FunctionId,
) -> (Arc<FunctionSignature>, Arc<ExpressionStoreSourceMap>);
- #[salsa::invoke_actual(TraitAliasSignature::query)]
+ #[salsa::invoke(TraitAliasSignature::query)]
fn trait_alias_signature_with_source_map(
&self,
e: TraitAliasId,
) -> (Arc<TraitAliasSignature>, Arc<ExpressionStoreSourceMap>);
- #[salsa::invoke_actual(TypeAliasSignature::query)]
+ #[salsa::invoke(TypeAliasSignature::query)]
fn type_alias_signature_with_source_map(
&self,
e: TypeAliasId,
@@ -283,7 +282,7 @@ pub trait DefDatabase: InternDatabase + ExpandDatabase + SourceDatabase {
#[salsa::invoke(Body::body_query)]
fn body(&self, def: DefWithBodyId) -> Arc<Body>;
- #[salsa::invoke_actual(ExprScopes::expr_scopes_query)]
+ #[salsa::invoke(ExprScopes::expr_scopes_query)]
fn expr_scopes(&self, def: DefWithBodyId) -> Arc<ExprScopes>;
#[salsa::transparent]
@@ -306,17 +305,18 @@ pub trait DefDatabase: InternDatabase + ExpandDatabase + SourceDatabase {
// region:attrs
- #[salsa::invoke_actual(Attrs::fields_attrs_query)]
+ #[salsa::invoke(Attrs::fields_attrs_query)]
fn fields_attrs(&self, def: VariantId) -> Arc<ArenaMap<LocalFieldId, Attrs>>;
// should this really be a query?
- #[salsa::invoke_actual(crate::attr::fields_attrs_source_map)]
+ #[salsa::invoke(crate::attr::fields_attrs_source_map)]
fn fields_attrs_source_map(
&self,
def: VariantId,
) -> Arc<ArenaMap<LocalFieldId, AstPtr<Either<ast::TupleField, ast::RecordField>>>>;
- #[salsa::invoke(AttrsWithOwner::attrs_query)]
+ // FIXME: Make this a non-interned query.
+ #[salsa::invoke_interned(AttrsWithOwner::attrs_query)]
fn attrs(&self, def: AttrDefId) -> Attrs;
#[salsa::transparent]
@@ -328,39 +328,39 @@ pub trait DefDatabase: InternDatabase + ExpandDatabase + SourceDatabase {
#[salsa::invoke(LangItems::lang_item_query)]
fn lang_item(&self, start_crate: Crate, item: LangItem) -> Option<LangItemTarget>;
- #[salsa::invoke_actual(ImportMap::import_map_query)]
+ #[salsa::invoke(ImportMap::import_map_query)]
fn import_map(&self, krate: Crate) -> Arc<ImportMap>;
// region:visibilities
- #[salsa::invoke_actual(visibility::field_visibilities_query)]
+ #[salsa::invoke(visibility::field_visibilities_query)]
fn field_visibilities(&self, var: VariantId) -> Arc<ArenaMap<LocalFieldId, Visibility>>;
// FIXME: unify function_visibility and const_visibility?
- #[salsa::invoke_actual(visibility::function_visibility_query)]
+ #[salsa::invoke(visibility::function_visibility_query)]
fn function_visibility(&self, def: FunctionId) -> Visibility;
- #[salsa::invoke_actual(visibility::const_visibility_query)]
+ #[salsa::invoke(visibility::const_visibility_query)]
fn const_visibility(&self, def: ConstId) -> Visibility;
- #[salsa::invoke_actual(visibility::type_alias_visibility_query)]
+ #[salsa::invoke(visibility::type_alias_visibility_query)]
fn type_alias_visibility(&self, def: TypeAliasId) -> Visibility;
// endregion:visibilities
- #[salsa::invoke_actual(LangItems::crate_lang_items_query)]
+ #[salsa::invoke(LangItems::crate_lang_items_query)]
fn crate_lang_items(&self, krate: Crate) -> Option<Arc<LangItems>>;
- #[salsa::invoke_actual(crate::lang_item::notable_traits_in_deps)]
+ #[salsa::invoke(crate::lang_item::notable_traits_in_deps)]
fn notable_traits_in_deps(&self, krate: Crate) -> Arc<[Arc<[TraitId]>]>;
- #[salsa::invoke_actual(crate::lang_item::crate_notable_traits)]
+ #[salsa::invoke(crate::lang_item::crate_notable_traits)]
fn crate_notable_traits(&self, krate: Crate) -> Option<Arc<[TraitId]>>;
- #[salsa::invoke_actual(crate_supports_no_std)]
+ #[salsa::invoke(crate_supports_no_std)]
fn crate_supports_no_std(&self, crate_id: Crate) -> bool;
- #[salsa::invoke_actual(include_macro_invoc)]
+ #[salsa::invoke(include_macro_invoc)]
fn include_macro_invoc(&self, crate_id: Crate) -> Arc<[(MacroCallId, EditionedFileId)]>;
}
@@ -382,7 +382,7 @@ fn include_macro_invoc(
}
fn crate_supports_no_std(db: &dyn DefDatabase, crate_id: Crate) -> bool {
- let file = crate_id.data(db).root_file_id();
+ let file = crate_id.data(db).root_file_id(db);
let item_tree = db.file_item_tree(file.into());
let attrs = item_tree.raw_attrs(AttrOwner::TopLevel);
for attr in &**attrs {
diff --git a/crates/hir-def/src/expr_store.rs b/crates/hir-def/src/expr_store.rs
index 9349ee7740..3bc43666fe 100644
--- a/crates/hir-def/src/expr_store.rs
+++ b/crates/hir-def/src/expr_store.rs
@@ -13,11 +13,11 @@ use std::ops::{Deref, Index};
use cfg::{CfgExpr, CfgOptions};
use either::Either;
-use hir_expand::{ExpandError, InFile, mod_path::ModPath, name::Name};
+use hir_expand::{ExpandError, InFile, MacroCallId, mod_path::ModPath, name::Name};
use la_arena::{Arena, ArenaMap};
use rustc_hash::FxHashMap;
use smallvec::SmallVec;
-use span::{Edition, MacroFileId, SyntaxContext};
+use span::{Edition, SyntaxContext};
use syntax::{AstPtr, SyntaxNodePtr, ast};
use triomphe::Arc;
use tt::TextRange;
@@ -138,7 +138,7 @@ pub struct ExpressionStoreSourceMap {
template_map: Option<Box<FormatTemplate>>,
- pub expansions: FxHashMap<InFile<MacroCallPtr>, MacroFileId>,
+ pub expansions: FxHashMap<InFile<MacroCallPtr>, MacroCallId>,
/// Diagnostics accumulated during lowering. These contain `AstPtr`s and so are stored in
/// the source map (since they're just as volatile).
@@ -645,12 +645,12 @@ impl ExpressionStoreSourceMap {
self.expr_map.get(&src).cloned()
}
- pub fn node_macro_file(&self, node: InFile<&ast::MacroCall>) -> Option<MacroFileId> {
+ pub fn node_macro_file(&self, node: InFile<&ast::MacroCall>) -> Option<MacroCallId> {
let src = node.map(AstPtr::new);
self.expansions.get(&src).cloned()
}
- pub fn macro_calls(&self) -> impl Iterator<Item = (InFile<MacroCallPtr>, MacroFileId)> + '_ {
+ pub fn macro_calls(&self) -> impl Iterator<Item = (InFile<MacroCallPtr>, MacroCallId)> + '_ {
self.expansions.iter().map(|(&a, &b)| (a, b))
}
@@ -696,11 +696,11 @@ impl ExpressionStoreSourceMap {
self.expr_map.get(&src).copied()
}
- pub fn expansions(&self) -> impl Iterator<Item = (&InFile<MacroCallPtr>, &MacroFileId)> {
+ pub fn expansions(&self) -> impl Iterator<Item = (&InFile<MacroCallPtr>, &MacroCallId)> {
self.expansions.iter()
}
- pub fn expansion(&self, node: InFile<&ast::MacroCall>) -> Option<MacroFileId> {
+ pub fn expansion(&self, node: InFile<&ast::MacroCall>) -> Option<MacroCallId> {
self.expansions.get(&node.map(AstPtr::new)).copied()
}
diff --git a/crates/hir-def/src/expr_store/expander.rs b/crates/hir-def/src/expr_store/expander.rs
index 836b58523c..7eec913dd6 100644
--- a/crates/hir-def/src/expr_store/expander.rs
+++ b/crates/hir-def/src/expr_store/expander.rs
@@ -4,11 +4,9 @@ use std::mem;
use base_db::Crate;
use drop_bomb::DropBomb;
-use hir_expand::attrs::RawAttrs;
-use hir_expand::eager::EagerCallBackFn;
use hir_expand::{
ExpandError, ExpandErrorKind, ExpandResult, HirFileId, InFile, Lookup, MacroCallId,
- mod_path::ModPath, span_map::SpanMap,
+ attrs::RawAttrs, eager::EagerCallBackFn, mod_path::ModPath, span_map::SpanMap,
};
use span::{AstIdMap, Edition, SyntaxContext};
use syntax::ast::HasAttrs;
@@ -183,8 +181,7 @@ impl Expander {
));
}
- let macro_file = call_id.as_macro_file();
- let res = db.parse_macro_expansion(macro_file);
+ let res = db.parse_macro_expansion(call_id);
let err = err.or(res.err);
ExpandResult {
@@ -192,7 +189,7 @@ impl Expander {
let parse = res.value.0.cast::<T>();
self.recursion_depth += 1;
- let old_file_id = std::mem::replace(&mut self.current_file_id, macro_file.into());
+ let old_file_id = std::mem::replace(&mut self.current_file_id, call_id.into());
let old_span_map =
std::mem::replace(&mut self.span_map, db.span_map(self.current_file_id));
let prev_ast_id_map =
diff --git a/crates/hir-def/src/expr_store/lower.rs b/crates/hir-def/src/expr_store/lower.rs
index 68bc44048b..cda8605728 100644
--- a/crates/hir-def/src/expr_store/lower.rs
+++ b/crates/hir-def/src/expr_store/lower.rs
@@ -9,13 +9,12 @@ use std::mem;
use either::Either;
use hir_expand::{
- InFile, Lookup, MacroDefId,
+ HirFileId, InFile, Lookup, MacroDefId,
mod_path::tool_path,
name::{AsName, Name},
};
use intern::{Symbol, sym};
use rustc_hash::FxHashMap;
-use span::HirFileId;
use stdx::never;
use syntax::{
AstNode, AstPtr, AstToken as _, SyntaxNodePtr,
@@ -1887,10 +1886,7 @@ impl ExprCollector<'_> {
self.module.krate(),
resolver,
&mut |ptr, call| {
- _ = self
- .source_map
- .expansions
- .insert(ptr.map(|(it, _)| it), call.as_macro_file());
+ _ = self.source_map.expansions.insert(ptr.map(|(it, _)| it), call);
},
)
}
@@ -2516,7 +2512,7 @@ impl ExprCollector<'_> {
None
} else {
hygiene_id.lookup().outer_expn(self.db).map(|expansion| {
- let expansion = self.db.lookup_intern_macro_call(expansion);
+ let expansion = self.db.lookup_intern_macro_call(expansion.into());
(hygiene_id.lookup().parent(self.db), expansion.def)
})
};
@@ -2546,7 +2542,7 @@ impl ExprCollector<'_> {
hygiene_id =
HygieneId::new(parent_ctx.opaque_and_semitransparent(self.db));
hygiene_info = parent_ctx.outer_expn(self.db).map(|expansion| {
- let expansion = self.db.lookup_intern_macro_call(expansion);
+ let expansion = self.db.lookup_intern_macro_call(expansion.into());
(parent_ctx.parent(self.db), expansion.def)
});
}
diff --git a/crates/hir-def/src/expr_store/lower/path.rs b/crates/hir-def/src/expr_store/lower/path.rs
index a428801e62..5f847b6752 100644
--- a/crates/hir-def/src/expr_store/lower/path.rs
+++ b/crates/hir-def/src/expr_store/lower/path.rs
@@ -215,7 +215,7 @@ pub(super) fn lower_path(
if let Some(_macro_call) = path.syntax().parent().and_then(ast::MacroCall::cast) {
let syn_ctxt = collector.expander.ctx_for_range(path.segment()?.syntax().text_range());
if let Some(macro_call_id) = syn_ctxt.outer_expn(collector.db) {
- if collector.db.lookup_intern_macro_call(macro_call_id).def.local_inner {
+ if collector.db.lookup_intern_macro_call(macro_call_id.into()).def.local_inner {
kind = match resolve_crate_root(collector.db, syn_ctxt) {
Some(crate_root) => PathKind::DollarCrate(crate_root),
None => PathKind::Crate,
diff --git a/crates/hir-def/src/expr_store/scope.rs b/crates/hir-def/src/expr_store/scope.rs
index 62a1e3b30a..431ea9eb1d 100644
--- a/crates/hir-def/src/expr_store/scope.rs
+++ b/crates/hir-def/src/expr_store/scope.rs
@@ -319,7 +319,6 @@ fn compute_expr_scopes(
mod tests {
use base_db::RootQueryDb;
use hir_expand::{InFile, name::AsName};
- use salsa::AsDynDatabase;
use span::FileId;
use syntax::{AstNode, algo::find_node_at_offset, ast};
use test_fixture::WithFixture;
@@ -331,7 +330,7 @@ mod tests {
let krate = db.test_crate();
let crate_def_map = db.crate_def_map(krate);
- let module = crate_def_map.modules_for_file(file_id).next().unwrap();
+ let module = crate_def_map.modules_for_file(db, file_id).next().unwrap();
let (_, def) = crate_def_map[module].scope.entries().next().unwrap();
match def.take_values().unwrap() {
ModuleDefId::FunctionId(it) => it,
@@ -354,11 +353,9 @@ mod tests {
let editioned_file_id = position.file_id;
let offset = position.offset;
- let (file_id, _) = editioned_file_id.unpack();
- let editioned_file_id_wrapper =
- base_db::EditionedFileId::new(db.as_dyn_database(), editioned_file_id);
+ let (file_id, _) = editioned_file_id.unpack(&db);
- let file_syntax = db.parse(editioned_file_id_wrapper).syntax_node();
+ let file_syntax = db.parse(editioned_file_id).syntax_node();
let marker: ast::PathExpr = find_node_at_offset(&file_syntax, offset).unwrap();
let function = find_function(&db, file_id);
@@ -512,11 +509,9 @@ fn foo() {
let editioned_file_id = position.file_id;
let offset = position.offset;
- let (file_id, _) = editioned_file_id.unpack();
- let file_id_wrapper =
- base_db::EditionedFileId::new(db.as_dyn_database(), editioned_file_id);
+ let (file_id, _) = editioned_file_id.unpack(&db);
- let file = db.parse(file_id_wrapper).ok().unwrap();
+ let file = db.parse(editioned_file_id).ok().unwrap();
let expected_name = find_node_at_offset::<ast::Name>(file.syntax(), expected_offset.into())
.expect("failed to find a name at the target offset");
let name_ref: ast::NameRef = find_node_at_offset(file.syntax(), offset).unwrap();
diff --git a/crates/hir-def/src/expr_store/tests/body/block.rs b/crates/hir-def/src/expr_store/tests/body/block.rs
index c908f7d54f..da3b65d420 100644
--- a/crates/hir-def/src/expr_store/tests/body/block.rs
+++ b/crates/hir-def/src/expr_store/tests/body/block.rs
@@ -189,8 +189,8 @@ fn f() {
}
"#,
expect![[r#"
- BlockId(4801) in BlockRelativeModuleId { block: Some(BlockId(4800)), local_id: Idx::<ModuleData>(1) }
- BlockId(4800) in BlockRelativeModuleId { block: None, local_id: Idx::<ModuleData>(0) }
+ BlockId(3801) in BlockRelativeModuleId { block: Some(BlockId(3800)), local_id: Idx::<ModuleData>(1) }
+ BlockId(3800) in BlockRelativeModuleId { block: None, local_id: Idx::<ModuleData>(0) }
crate scope
"#]],
);
diff --git a/crates/hir-def/src/macro_expansion_tests/mbe.rs b/crates/hir-def/src/macro_expansion_tests/mbe.rs
index 95edeb70fe..abb5bd5ed7 100644
--- a/crates/hir-def/src/macro_expansion_tests/mbe.rs
+++ b/crates/hir-def/src/macro_expansion_tests/mbe.rs
@@ -35,9 +35,9 @@ macro_rules! f {
};
}
-struct#0:[email protected]#19456# MyTraitMap2#0:[email protected]#ROOT2024# {#0:[email protected]#19456#
- map#0:[email protected]#19456#:#0:[email protected]#19456# #0:[email protected]#19456#::#0:[email protected]#19456#std#0:[email protected]#19456#::#0:[email protected]#19456#collections#0:[email protected]#19456#::#0:[email protected]#19456#HashSet#0:[email protected]#19456#<#0:[email protected]#19456#(#0:[email protected]#19456#)#0:[email protected]#19456#>#0:[email protected]#19456#,#0:[email protected]#19456#
-}#0:[email protected]#19456#
+struct#0:[email protected]#14336# MyTraitMap2#0:[email protected]#ROOT2024# {#0:[email protected]#14336#
+ map#0:[email protected]#14336#:#0:[email protected]#14336# #0:[email protected]#14336#::#0:[email protected]#14336#std#0:[email protected]#14336#::#0:[email protected]#14336#collections#0:[email protected]#14336#::#0:[email protected]#14336#HashSet#0:[email protected]#14336#<#0:[email protected]#14336#(#0:[email protected]#14336#)#0:[email protected]#14336#>#0:[email protected]#14336#,#0:[email protected]#14336#
+}#0:[email protected]#14336#
"#]],
);
}
@@ -197,7 +197,7 @@ macro_rules! mk_struct {
#[macro_use]
mod foo;
-struct#1:[email protected]#19456# Foo#0:[email protected]#ROOT2024#(#1:[email protected]#19456#u32#0:[email protected]#ROOT2024#)#1:[email protected]#19456#;#1:[email protected]#19456#
+struct#1:[email protected]#14336# Foo#0:[email protected]#ROOT2024#(#1:[email protected]#14336#u32#0:[email protected]#ROOT2024#)#1:[email protected]#14336#;#1:[email protected]#14336#
"#]],
);
}
@@ -423,10 +423,10 @@ m! { foo, bar }
macro_rules! m {
($($i:ident),*) => ( impl Bar { $(fn $i() {})* } );
}
-impl#\19456# Bar#\19456# {#\19456#
- fn#\19456# foo#\ROOT2024#(#\19456#)#\19456# {#\19456#}#\19456#
- fn#\19456# bar#\ROOT2024#(#\19456#)#\19456# {#\19456#}#\19456#
-}#\19456#
+impl#\14336# Bar#\14336# {#\14336#
+ fn#\14336# foo#\ROOT2024#(#\14336#)#\14336# {#\14336#}#\14336#
+ fn#\14336# bar#\ROOT2024#(#\14336#)#\14336# {#\14336#}#\14336#
+}#\14336#
"#]],
);
}
diff --git a/crates/hir-def/src/macro_expansion_tests/mod.rs b/crates/hir-def/src/macro_expansion_tests/mod.rs
index 2368b5f9ca..143b5df773 100644
--- a/crates/hir-def/src/macro_expansion_tests/mod.rs
+++ b/crates/hir-def/src/macro_expansion_tests/mod.rs
@@ -19,14 +19,13 @@ use std::{iter, ops::Range, sync};
use base_db::RootQueryDb;
use expect_test::Expect;
use hir_expand::{
- InFile, MacroCallKind, MacroFileId, MacroFileIdExt, MacroKind,
+ InFile, MacroCallKind, MacroKind,
db::ExpandDatabase,
proc_macro::{ProcMacro, ProcMacroExpander, ProcMacroExpansionError, ProcMacroKind},
span_map::SpanMapRef,
};
use intern::Symbol;
use itertools::Itertools;
-use salsa::AsDynDatabase;
use span::{Edition, Span};
use stdx::{format_to, format_to_acc};
use syntax::{
@@ -67,8 +66,6 @@ fn check_errors(#[rust_analyzer::rust_fixture] ra_fixture: &str, expect: Expect)
let editioned_file_id =
ast_id.file_id.file_id().expect("macros inside macros are not supported");
- let editioned_file_id =
- base_db::EditionedFileId::new(db.as_dyn_database(), editioned_file_id);
let ast = db.parse(editioned_file_id).syntax_node();
let ast_id_map = db.ast_id_map(ast_id.file_id);
@@ -143,8 +140,7 @@ pub fn identity_when_valid(_attr: TokenStream, item: TokenStream) -> TokenStream
)
.unwrap();
let macro_call_id = res.value.unwrap();
- let macro_file = MacroFileId { macro_call_id };
- let mut expansion_result = db.parse_macro_expansion(macro_file);
+ let mut expansion_result = db.parse_macro_expansion(macro_call_id);
expansion_result.err = expansion_result.err.or(res.err);
expansions.push((macro_call.value.clone(), expansion_result));
}
diff --git a/crates/hir-def/src/nameres.rs b/crates/hir-def/src/nameres.rs
index a31244b1e1..fc66d8e28d 100644
--- a/crates/hir-def/src/nameres.rs
+++ b/crates/hir-def/src/nameres.rs
@@ -62,14 +62,14 @@ use std::ops::Deref;
use base_db::Crate;
use hir_expand::{
- ErasedAstId, HirFileId, InFile, MacroCallId, MacroDefId, mod_path::ModPath, name::Name,
- proc_macro::ProcMacroKind,
+ EditionedFileId, ErasedAstId, HirFileId, InFile, MacroCallId, MacroDefId, mod_path::ModPath,
+ name::Name, proc_macro::ProcMacroKind,
};
use intern::Symbol;
use itertools::Itertools;
use la_arena::Arena;
use rustc_hash::{FxHashMap, FxHashSet};
-use span::{Edition, EditionedFileId, FileAstId, FileId, ROOT_ERASED_FILE_AST_ID};
+use span::{Edition, FileAstId, FileId, ROOT_ERASED_FILE_AST_ID};
use stdx::format_to;
use syntax::{AstNode, SmolStr, SyntaxNode, ToSmolStr, ast};
use triomphe::Arc;
@@ -328,9 +328,7 @@ impl ModuleOrigin {
match self {
&ModuleOrigin::File { definition: editioned_file_id, .. }
| &ModuleOrigin::CrateRoot { definition: editioned_file_id } => {
- let definition = base_db::EditionedFileId::new(db, editioned_file_id);
-
- let sf = db.parse(definition).tree();
+ let sf = db.parse(editioned_file_id).tree();
InFile::new(editioned_file_id.into(), ModuleSource::SourceFile(sf))
}
&ModuleOrigin::Inline { definition, definition_tree_id } => InFile::new(
@@ -389,7 +387,7 @@ impl DefMap {
.entered();
let module_data = ModuleData::new(
- ModuleOrigin::CrateRoot { definition: krate.root_file_id() },
+ ModuleOrigin::CrateRoot { definition: krate.root_file_id(db) },
Visibility::Public,
);
@@ -402,7 +400,7 @@ impl DefMap {
let (def_map, local_def_map) = collector::collect_defs(
db,
def_map,
- TreeId::new(krate.root_file_id().into(), None),
+ TreeId::new(krate.root_file_id(db).into(), None),
None,
);
@@ -488,11 +486,15 @@ impl DefMap {
}
impl DefMap {
- pub fn modules_for_file(&self, file_id: FileId) -> impl Iterator<Item = LocalModuleId> + '_ {
+ pub fn modules_for_file<'a>(
+ &'a self,
+ db: &'a dyn DefDatabase,
+ file_id: FileId,
+ ) -> impl Iterator<Item = LocalModuleId> + 'a {
self.modules
.iter()
.filter(move |(_id, data)| {
- data.origin.file_id().map(EditionedFileId::file_id) == Some(file_id)
+ data.origin.file_id().map(|file_id| file_id.file_id(db)) == Some(file_id)
})
.map(|(id, _data)| id)
}
diff --git a/crates/hir-def/src/nameres/assoc.rs b/crates/hir-def/src/nameres/assoc.rs
index 7d3961e383..b097065529 100644
--- a/crates/hir-def/src/nameres/assoc.rs
+++ b/crates/hir-def/src/nameres/assoc.rs
@@ -1,13 +1,12 @@
//! Expansion of associated items
use hir_expand::{AstId, InFile, Intern, Lookup, MacroCallKind, MacroDefKind, name::Name};
-use span::MacroCallId;
use syntax::ast;
use triomphe::Arc;
use crate::{
AssocItemId, AstIdWithPath, ConstLoc, FunctionId, FunctionLoc, ImplId, ItemContainerId,
- ItemLoc, ModuleId, TraitId, TypeAliasId, TypeAliasLoc,
+ ItemLoc, MacroCallId, ModuleId, TraitId, TypeAliasId, TypeAliasLoc,
db::DefDatabase,
item_tree::{AssocItem, ItemTree, ItemTreeId, MacroCall, ModItem, TreeId},
macro_call_as_call_id,
@@ -296,9 +295,8 @@ impl<'a> AssocItemCollector<'a> {
tracing::warn!("macro expansion is too deep");
return;
}
- let file_id = macro_call_id.as_file();
- let tree_id = TreeId::new(file_id, None);
- let item_tree = self.db.file_item_tree(file_id);
+ let tree_id = TreeId::new(macro_call_id.into(), None);
+ let item_tree = self.db.file_item_tree(macro_call_id.into());
self.depth += 1;
for item in item_tree.top_level_items().iter().filter_map(ModItem::as_assoc_item) {
diff --git a/crates/hir-def/src/nameres/collector.rs b/crates/hir-def/src/nameres/collector.rs
index a106fb5e98..0ea7fe6b86 100644
--- a/crates/hir-def/src/nameres/collector.rs
+++ b/crates/hir-def/src/nameres/collector.rs
@@ -9,8 +9,8 @@ use base_db::{BuiltDependency, Crate, CrateOrigin, LangCrateOrigin};
use cfg::{CfgAtom, CfgExpr, CfgOptions};
use either::Either;
use hir_expand::{
- ExpandTo, HirFileId, InFile, MacroCallId, MacroCallKind, MacroDefId, MacroDefKind,
- MacroFileIdExt,
+ EditionedFileId, ExpandTo, HirFileId, InFile, MacroCallId, MacroCallKind, MacroDefId,
+ MacroDefKind,
attrs::{Attr, AttrId},
builtin::{find_builtin_attr, find_builtin_derive, find_builtin_macro},
mod_path::{ModPath, PathKind},
@@ -21,7 +21,7 @@ use intern::{Interned, sym};
use itertools::{Itertools, izip};
use la_arena::Idx;
use rustc_hash::{FxHashMap, FxHashSet};
-use span::{Edition, EditionedFileId, FileAstId, SyntaxContext};
+use span::{Edition, FileAstId, SyntaxContext};
use syntax::ast;
use triomphe::Arc;
@@ -250,7 +250,7 @@ impl DefCollector<'_> {
fn seed_with_top_level(&mut self) {
let _p = tracing::info_span!("seed_with_top_level").entered();
- let file_id = self.def_map.krate.data(self.db).root_file_id();
+ let file_id = self.def_map.krate.data(self.db).root_file_id(self.db);
let item_tree = self.db.file_item_tree(file_id.into());
let attrs = item_tree.top_level_attrs(self.db, self.def_map.krate);
let crate_data = Arc::get_mut(&mut self.def_map.data).unwrap();
@@ -1512,11 +1512,11 @@ impl DefCollector<'_> {
tracing::warn!("macro expansion is too deep");
return;
}
- let file_id = macro_call_id.as_file();
+ let file_id = macro_call_id.into();
let item_tree = self.db.file_item_tree(file_id);
- let mod_dir = if macro_call_id.as_macro_file().is_include_macro(self.db) {
+ let mod_dir = if macro_call_id.is_include_macro(self.db) {
ModDir::root()
} else {
self.mod_dirs[&module_id].clone()
diff --git a/crates/hir-def/src/nameres/mod_resolution.rs b/crates/hir-def/src/nameres/mod_resolution.rs
index 49cfa83abd..d6c9f5a00c 100644
--- a/crates/hir-def/src/nameres/mod_resolution.rs
+++ b/crates/hir-def/src/nameres/mod_resolution.rs
@@ -1,8 +1,7 @@
//! This module resolves `mod foo;` declaration to file.
use arrayvec::ArrayVec;
use base_db::AnchoredPath;
-use hir_expand::{HirFileIdExt, name::Name};
-use span::EditionedFileId;
+use hir_expand::{EditionedFileId, name::Name};
use crate::{HirFileId, db::DefDatabase};
@@ -79,7 +78,7 @@ impl ModDir {
let orig_file_id = file_id.original_file_respecting_includes(db);
for candidate in candidate_files.iter() {
- let path = AnchoredPath { anchor: orig_file_id.file_id(), path: candidate.as_str() };
+ let path = AnchoredPath { anchor: orig_file_id.file_id(db), path: candidate.as_str() };
if let Some(file_id) = db.resolve_path(path) {
let is_mod_rs = candidate.ends_with("/mod.rs");
@@ -92,7 +91,7 @@ impl ModDir {
if let Some(mod_dir) = self.child(dir_path, !root_dir_owner) {
return Ok((
// FIXME: Edition, is this rightr?
- EditionedFileId::new(file_id, orig_file_id.edition()),
+ EditionedFileId::new(db, file_id, orig_file_id.edition(db)),
is_mod_rs,
mod_dir,
));
diff --git a/crates/hir-def/src/nameres/tests/incremental.rs b/crates/hir-def/src/nameres/tests/incremental.rs
index e9f0c22217..179a9c8fec 100644
--- a/crates/hir-def/src/nameres/tests/incremental.rs
+++ b/crates/hir-def/src/nameres/tests/incremental.rs
@@ -18,7 +18,7 @@ fn check_def_map_is_not_recomputed(ra_fixture_initial: &str, ra_fixture_change:
});
assert!(format!("{events:?}").contains("crate_def_map"), "{events:#?}")
}
- db.set_file_text(pos.file_id.file_id(), ra_fixture_change);
+ db.set_file_text(pos.file_id.file_id(&db), ra_fixture_change);
{
let events = db.log_executed(|| {
@@ -55,7 +55,7 @@ pub const BAZ: u32 = 0;
let mut add_crate = |crate_name, root_file_idx: usize| {
new_crate_graph.add_crate_root(
- files[root_file_idx].file_id(),
+ files[root_file_idx].file_id(&db),
Edition::CURRENT,
Some(CrateDisplayName::from_canonical_name(crate_name)),
None,
@@ -348,7 +348,7 @@ fn quux() { 92 }
m!(Y);
m!(Z);
"#;
- db.set_file_text(pos.file_id.file_id(), new_text);
+ db.set_file_text(pos.file_id.file_id(&db), new_text);
{
let events = db.log_executed(|| {
diff --git a/crates/hir-def/src/resolver.rs b/crates/hir-def/src/resolver.rs
index 7e941854a2..d2048c8757 100644
--- a/crates/hir-def/src/resolver.rs
+++ b/crates/hir-def/src/resolver.rs
@@ -917,7 +917,7 @@ fn handle_macro_def_scope(
// and use its parent expansion.
*hygiene_id = HygieneId::new(parent_ctx.opaque_and_semitransparent(db));
*hygiene_info = parent_ctx.outer_expn(db).map(|expansion| {
- let expansion = db.lookup_intern_macro_call(expansion);
+ let expansion = db.lookup_intern_macro_call(expansion.into());
(parent_ctx.parent(db), expansion.def)
});
}
@@ -932,7 +932,7 @@ fn hygiene_info(
if !hygiene_id.is_root() {
let ctx = hygiene_id.lookup();
ctx.outer_expn(db).map(|expansion| {
- let expansion = db.lookup_intern_macro_call(expansion);
+ let expansion = db.lookup_intern_macro_call(expansion.into());
(ctx.parent(db), expansion.def)
})
} else {
diff --git a/crates/hir-def/src/test_db.rs b/crates/hir-def/src/test_db.rs
index a5f295179e..2f7675134c 100644
--- a/crates/hir-def/src/test_db.rs
+++ b/crates/hir-def/src/test_db.rs
@@ -8,7 +8,7 @@ use base_db::{
};
use hir_expand::{InFile, files::FilePosition};
use salsa::{AsDynDatabase, Durability};
-use span::{EditionedFileId, FileId};
+use span::FileId;
use syntax::{AstNode, algo, ast};
use triomphe::Arc;
@@ -135,7 +135,7 @@ impl TestDB {
for &krate in self.relevant_crates(file_id).iter() {
let crate_def_map = self.crate_def_map(krate);
for (local_id, data) in crate_def_map.modules() {
- if data.origin.file_id().map(EditionedFileId::file_id) == Some(file_id) {
+ if data.origin.file_id().map(|file_id| file_id.file_id(self)) == Some(file_id) {
return crate_def_map.module_id(local_id);
}
}
@@ -144,7 +144,7 @@ impl TestDB {
}
pub(crate) fn module_at_position(&self, position: FilePosition) -> ModuleId {
- let file_module = self.module_for_file(position.file_id.file_id());
+ let file_module = self.module_for_file(position.file_id.file_id(self));
let mut def_map = file_module.def_map(self);
let module = self.mod_at_position(&def_map, position);
@@ -246,10 +246,7 @@ impl TestDB {
let source_map = self.body_with_source_map(def_with_body).1;
let scopes = self.expr_scopes(def_with_body);
- let editioned_file_id_wrapper =
- base_db::EditionedFileId::new(self.as_dyn_database(), position.file_id);
-
- let root_syntax_node = self.parse(editioned_file_id_wrapper).syntax_node();
+ let root_syntax_node = self.parse(position.file_id).syntax_node();
let scope_iter =
algo::ancestors_at_offset(&root_syntax_node, position.offset).filter_map(|node| {
let block = ast::BlockExpr::cast(node)?;
diff --git a/crates/hir-expand/src/builtin/attr_macro.rs b/crates/hir-expand/src/builtin/attr_macro.rs
index 0463ce11f6..25dd933f26 100644
--- a/crates/hir-expand/src/builtin/attr_macro.rs
+++ b/crates/hir-expand/src/builtin/attr_macro.rs
@@ -1,8 +1,8 @@
//! Builtin attributes.
use intern::sym;
-use span::{MacroCallId, Span};
+use span::Span;
-use crate::{ExpandResult, MacroCallKind, db::ExpandDatabase, name, tt};
+use crate::{ExpandResult, MacroCallId, MacroCallKind, db::ExpandDatabase, name, tt};
use super::quote;
diff --git a/crates/hir-expand/src/builtin/derive_macro.rs b/crates/hir-expand/src/builtin/derive_macro.rs
index 428bed89b5..68283b916d 100644
--- a/crates/hir-expand/src/builtin/derive_macro.rs
+++ b/crates/hir-expand/src/builtin/derive_macro.rs
@@ -4,13 +4,13 @@ use intern::sym;
use itertools::{Itertools, izip};
use parser::SyntaxKind;
use rustc_hash::FxHashSet;
-use span::{Edition, MacroCallId, Span, SyntaxContext};
+use span::{Edition, Span, SyntaxContext};
use stdx::never;
use syntax_bridge::DocCommentDesugarMode;
use tracing::debug;
use crate::{
- ExpandError, ExpandResult,
+ ExpandError, ExpandResult, MacroCallId,
builtin::quote::{dollar_crate, quote},
db::ExpandDatabase,
hygiene::span_with_def_site_ctxt,
@@ -59,7 +59,7 @@ impl BuiltinDeriveExpander {
tt: &tt::TopSubtree,
span: Span,
) -> ExpandResult<tt::TopSubtree> {
- let span = span_with_def_site_ctxt(db, span, id, Edition::CURRENT);
+ let span = span_with_def_site_ctxt(db, span, id.into(), Edition::CURRENT);
self.expander()(db, span, tt)
}
}
diff --git a/crates/hir-expand/src/builtin/fn_macro.rs b/crates/hir-expand/src/builtin/fn_macro.rs
index ca06979190..b99eac7aea 100644
--- a/crates/hir-expand/src/builtin/fn_macro.rs
+++ b/crates/hir-expand/src/builtin/fn_macro.rs
@@ -8,7 +8,7 @@ use intern::{
sym::{self},
};
use mbe::{DelimiterKind, expect_fragment};
-use span::{Edition, EditionedFileId, FileId, Span};
+use span::{Edition, FileId, Span};
use stdx::format_to;
use syntax::{
format_smolstr,
@@ -17,7 +17,7 @@ use syntax::{
use syntax_bridge::syntax_node_to_token_tree;
use crate::{
- ExpandError, ExpandResult, HirFileIdExt, Lookup as _, MacroCallId,
+ EditionedFileId, ExpandError, ExpandResult, Lookup as _, MacroCallId,
builtin::quote::{WithDelimiter, dollar_crate, quote},
db::ExpandDatabase,
hygiene::{span_with_call_site_ctxt, span_with_def_site_ctxt},
@@ -72,7 +72,7 @@ impl BuiltinFnLikeExpander {
tt: &tt::TopSubtree,
span: Span,
) -> ExpandResult<tt::TopSubtree> {
- let span = span_with_def_site_ctxt(db, span, id, Edition::CURRENT);
+ let span = span_with_def_site_ctxt(db, span, id.into(), Edition::CURRENT);
self.expander()(db, id, tt, span)
}
@@ -89,7 +89,7 @@ impl EagerExpander {
tt: &tt::TopSubtree,
span: Span,
) -> ExpandResult<tt::TopSubtree> {
- let span = span_with_def_site_ctxt(db, span, id, Edition::CURRENT);
+ let span = span_with_def_site_ctxt(db, span, id.into(), Edition::CURRENT);
self.expander()(db, id, tt, span)
}
@@ -224,7 +224,7 @@ fn assert_expand(
tt: &tt::TopSubtree,
span: Span,
) -> ExpandResult<tt::TopSubtree> {
- let call_site_span = span_with_call_site_ctxt(db, span, id, Edition::CURRENT);
+ let call_site_span = span_with_call_site_ctxt(db, span, id.into(), Edition::CURRENT);
let mut iter = tt.iter();
@@ -345,7 +345,7 @@ fn panic_expand(
span: Span,
) -> ExpandResult<tt::TopSubtree> {
let dollar_crate = dollar_crate(span);
- let call_site_span = span_with_call_site_ctxt(db, span, id, Edition::CURRENT);
+ let call_site_span = span_with_call_site_ctxt(db, span, id.into(), Edition::CURRENT);
let mac = if use_panic_2021(db, call_site_span) {
sym::panic_2021.clone()
@@ -376,7 +376,7 @@ fn unreachable_expand(
span: Span,
) -> ExpandResult<tt::TopSubtree> {
let dollar_crate = dollar_crate(span);
- let call_site_span = span_with_call_site_ctxt(db, span, id, Edition::CURRENT);
+ let call_site_span = span_with_call_site_ctxt(db, span, id.into(), Edition::CURRENT);
let mac = if use_panic_2021(db, call_site_span) {
sym::unreachable_2021.clone()
@@ -407,7 +407,7 @@ fn use_panic_2021(db: &dyn ExpandDatabase, span: Span) -> bool {
let Some(expn) = span.ctx.outer_expn(db) else {
break false;
};
- let expn = db.lookup_intern_macro_call(expn);
+ let expn = db.lookup_intern_macro_call(expn.into());
// FIXME: Record allow_internal_unstable in the macro def (not been done yet because it
// would consume quite a bit extra memory for all call locs...)
// if let Some(features) = expn.def.allow_internal_unstable {
@@ -663,7 +663,7 @@ fn relative_file(
err_span: Span,
) -> Result<EditionedFileId, ExpandError> {
let lookup = db.lookup_intern_macro_call(call_id);
- let call_site = lookup.kind.file_id().original_file_respecting_includes(db).file_id();
+ let call_site = lookup.kind.file_id().original_file_respecting_includes(db).file_id(db);
let path = AnchoredPath { anchor: call_site, path: path_str };
let res: FileId = db
.resolve_path(path)
@@ -672,7 +672,7 @@ fn relative_file(
if res == call_site && !allow_recursion {
Err(ExpandError::other(err_span, format!("recursive inclusion of `{path_str}`")))
} else {
- Ok(EditionedFileId::new(res, lookup.krate.data(db).edition))
+ Ok(EditionedFileId::new(db, res, lookup.krate.data(db).edition))
}
}
@@ -731,10 +731,8 @@ fn include_expand(
tt: &tt::TopSubtree,
span: Span,
) -> ExpandResult<tt::TopSubtree> {
- let (file_id_wrapper, editioned_file_id) = match include_input_to_file_id(db, arg_id, tt) {
- Ok(editioned_file_id) => {
- (base_db::EditionedFileId::new(db, editioned_file_id), editioned_file_id)
- }
+ let editioned_file_id = match include_input_to_file_id(db, arg_id, tt) {
+ Ok(editioned_file_id) => editioned_file_id,
Err(e) => {
return ExpandResult::new(
tt::TopSubtree::empty(DelimSpan { open: span, close: span }),
@@ -745,7 +743,7 @@ fn include_expand(
let span_map = db.real_span_map(editioned_file_id);
// FIXME: Parse errors
ExpandResult::ok(syntax_node_to_token_tree(
- &db.parse(file_id_wrapper).syntax_node(),
+ &db.parse(editioned_file_id).syntax_node(),
SpanMap::RealSpanMap(span_map),
span,
syntax_bridge::DocCommentDesugarMode::ProcMacro,
@@ -807,7 +805,7 @@ fn include_str_expand(
}
};
- let text = db.file_text(file_id.file_id());
+ let text = db.file_text(file_id.file_id(db));
let text = &*text.text(db);
ExpandResult::ok(quote!(call_site =>#text))
diff --git a/crates/hir-expand/src/db.rs b/crates/hir-expand/src/db.rs
index 2fe251d298..29b7b33fd0 100644
--- a/crates/hir-expand/src/db.rs
+++ b/crates/hir-expand/src/db.rs
@@ -4,19 +4,15 @@ use base_db::{Crate, RootQueryDb};
use either::Either;
use mbe::MatchedArmIndex;
use rustc_hash::FxHashSet;
-use salsa::plumbing::AsId;
-use span::{
- AstIdMap, Edition, EditionedFileId, HirFileId, HirFileIdRepr, MacroCallId, MacroFileId, Span,
- SyntaxContext,
-};
+use span::{AstIdMap, Edition, Span, SyntaxContext};
use syntax::{AstNode, Parse, SyntaxElement, SyntaxError, SyntaxNode, SyntaxToken, T, ast};
use syntax_bridge::{DocCommentDesugarMode, syntax_node_to_token_tree};
use triomphe::Arc;
use crate::{
AstId, BuiltinAttrExpander, BuiltinDeriveExpander, BuiltinFnLikeExpander, EagerCallInfo,
- EagerExpander, ExpandError, ExpandResult, ExpandTo, MacroCallKind, MacroCallLoc, MacroDefId,
- MacroDefKind,
+ EagerExpander, EditionedFileId, ExpandError, ExpandResult, ExpandTo, HirFileId, MacroCallId,
+ MacroCallKind, MacroCallLoc, MacroDefId, MacroDefKind,
attrs::{AttrId, collect_attrs},
builtin::pseudo_derive_attr_expansion,
cfg_process,
@@ -63,9 +59,10 @@ pub trait ExpandDatabase: RootQueryDb {
fn proc_macros(&self) -> Arc<ProcMacros>;
/// Incrementality query to prevent queries from directly depending on `ExpandDatabase::proc_macros`.
- #[salsa::invoke_actual(crate::proc_macro::proc_macros_for_crate)]
+ #[salsa::invoke(crate::proc_macro::proc_macros_for_crate)]
fn proc_macros_for_crate(&self, krate: Crate) -> Option<Arc<CrateProcMacros>>;
+ #[salsa::invoke(ast_id_map)]
fn ast_id_map(&self, file_id: HirFileId) -> Arc<AstIdMap>;
#[salsa::transparent]
@@ -75,7 +72,7 @@ pub trait ExpandDatabase: RootQueryDb {
#[salsa::lru(512)]
fn parse_macro_expansion(
&self,
- macro_file: span::MacroFileId,
+ macro_file: MacroCallId,
) -> ExpandResult<(Parse<SyntaxNode>, Arc<ExpansionSpanMap>)>;
#[salsa::transparent]
@@ -84,7 +81,7 @@ pub trait ExpandDatabase: RootQueryDb {
#[salsa::transparent]
#[salsa::invoke(crate::span_map::expansion_span_map)]
- fn expansion_span_map(&self, file_id: MacroFileId) -> Arc<ExpansionSpanMap>;
+ fn expansion_span_map(&self, file_id: MacroCallId) -> Arc<ExpansionSpanMap>;
#[salsa::invoke(crate::span_map::real_span_map)]
fn real_span_map(&self, file_id: EditionedFileId) -> Arc<RealSpanMap>;
@@ -136,6 +133,7 @@ pub trait ExpandDatabase: RootQueryDb {
/// directly depend on as that would cause to frequent invalidations, mainly because of the
/// parse queries being LRU cached. If they weren't the invalidations would only happen if the
/// user wrote in the file that defines the proc-macro.
+ #[salsa::invoke_interned(proc_macro_span)]
fn proc_macro_span(&self, fun: AstId<ast::Fn>) -> Span;
/// Firewall query that returns the errors from the `parse_macro_expansion` query.
@@ -149,30 +147,17 @@ pub trait ExpandDatabase: RootQueryDb {
fn syntax_context(&self, file: HirFileId, edition: Edition) -> SyntaxContext;
}
-#[salsa::interned(no_lifetime, id = span::MacroCallId)]
-pub struct MacroCallWrapper {
- pub loc: MacroCallLoc,
-}
-
-fn intern_macro_call(db: &dyn ExpandDatabase, macro_call: MacroCallLoc) -> MacroCallId {
- MacroCallWrapper::new(db, macro_call).0
-}
-
-fn lookup_intern_macro_call(db: &dyn ExpandDatabase, macro_call: MacroCallId) -> MacroCallLoc {
- MacroCallWrapper::ingredient(db).data(db.as_dyn_database(), macro_call.as_id()).0.clone()
-}
-
#[salsa::interned(no_lifetime, id = span::SyntaxContext)]
pub struct SyntaxContextWrapper {
pub data: SyntaxContext,
}
fn syntax_context(db: &dyn ExpandDatabase, file: HirFileId, edition: Edition) -> SyntaxContext {
- match file.repr() {
- HirFileIdRepr::FileId(_) => SyntaxContext::root(edition),
- HirFileIdRepr::MacroFile(m) => {
- let kind = db.lookup_intern_macro_call(m.macro_call_id).kind;
- db.macro_arg_considering_derives(m.macro_call_id, &kind).2.ctx
+ match file {
+ HirFileId::FileId(_) => SyntaxContext::root(edition),
+ HirFileId::MacroFile(m) => {
+ let kind = db.lookup_intern_macro_call(m).kind;
+ db.macro_arg_considering_derives(m, &kind).2.ctx
}
}
}
@@ -300,9 +285,9 @@ pub fn expand_speculative(
loc.krate,
&tt,
attr_arg.as_ref(),
- span_with_def_site_ctxt(db, span, actual_macro_call, loc.def.edition),
- span_with_call_site_ctxt(db, span, actual_macro_call, loc.def.edition),
- span_with_mixed_site_ctxt(db, span, actual_macro_call, loc.def.edition),
+ span_with_def_site_ctxt(db, span, actual_macro_call.into(), loc.def.edition),
+ span_with_call_site_ctxt(db, span, actual_macro_call.into(), loc.def.edition),
+ span_with_mixed_site_ctxt(db, span, actual_macro_call.into(), loc.def.edition),
)
}
MacroDefKind::BuiltInAttr(_, it) if it.is_derive() => {
@@ -346,20 +331,16 @@ pub fn expand_speculative(
Some((node.syntax_node(), token))
}
-fn ast_id_map(db: &dyn ExpandDatabase, file_id: span::HirFileId) -> triomphe::Arc<AstIdMap> {
+fn ast_id_map(db: &dyn ExpandDatabase, file_id: HirFileId) -> triomphe::Arc<AstIdMap> {
triomphe::Arc::new(AstIdMap::from_source(&db.parse_or_expand(file_id)))
}
/// Main public API -- parses a hir file, not caring whether it's a real
/// file or a macro expansion.
fn parse_or_expand(db: &dyn ExpandDatabase, file_id: HirFileId) -> SyntaxNode {
- match file_id.repr() {
- HirFileIdRepr::FileId(editioned_file_id) => {
- let file_id = base_db::EditionedFileId::new(db, editioned_file_id);
- db.parse(file_id).syntax_node()
- }
-
- HirFileIdRepr::MacroFile(macro_file) => {
+ match file_id {
+ HirFileId::FileId(file_id) => db.parse(file_id).syntax_node(),
+ HirFileId::MacroFile(macro_file) => {
db.parse_macro_expansion(macro_file).value.0.syntax_node()
}
}
@@ -369,14 +350,13 @@ fn parse_or_expand(db: &dyn ExpandDatabase, file_id: HirFileId) -> SyntaxNode {
// instead of having it be untyped
fn parse_macro_expansion(
db: &dyn ExpandDatabase,
- macro_file: MacroFileId,
+ macro_file: MacroCallId,
) -> ExpandResult<(Parse<SyntaxNode>, Arc<ExpansionSpanMap>)> {
let _p = tracing::info_span!("parse_macro_expansion").entered();
- let loc = db.lookup_intern_macro_call(macro_file.macro_call_id);
+ let loc = db.lookup_intern_macro_call(macro_file);
let def_edition = loc.def.edition;
let expand_to = loc.expand_to();
- let mbe::ValueResult { value: (tt, matched_arm), err } =
- macro_expand(db, macro_file.macro_call_id, loc);
+ let mbe::ValueResult { value: (tt, matched_arm), err } = macro_expand(db, macro_file, loc);
let (parse, mut rev_token_map) = token_tree_to_syntax_node(
db,
@@ -397,7 +377,7 @@ fn parse_macro_expansion_error(
macro_call_id: MacroCallId,
) -> Option<Arc<ExpandResult<Arc<[SyntaxError]>>>> {
let e: ExpandResult<Arc<[SyntaxError]>> =
- db.parse_macro_expansion(MacroFileId { macro_call_id }).map(|it| Arc::from(it.0.errors()));
+ db.parse_macro_expansion(macro_call_id).map(|it| Arc::from(it.0.errors()));
if e.value.is_empty() && e.err.is_none() { None } else { Some(Arc::new(e)) }
}
@@ -405,16 +385,11 @@ pub(crate) fn parse_with_map(
db: &dyn ExpandDatabase,
file_id: HirFileId,
) -> (Parse<SyntaxNode>, SpanMap) {
- match file_id.repr() {
- HirFileIdRepr::FileId(editioned_file_id) => {
- let file_id = base_db::EditionedFileId::new(db, editioned_file_id);
-
- (
- db.parse(file_id).to_syntax(),
- SpanMap::RealSpanMap(db.real_span_map(editioned_file_id)),
- )
+ match file_id {
+ HirFileId::FileId(file_id) => {
+ (db.parse(file_id).to_syntax(), SpanMap::RealSpanMap(db.real_span_map(file_id)))
}
- HirFileIdRepr::MacroFile(macro_file) => {
+ HirFileId::MacroFile(macro_file) => {
let (parse, map) = db.parse_macro_expansion(macro_file).value;
(parse, SpanMap::ExpansionSpanMap(map))
}
@@ -734,9 +709,9 @@ fn expand_proc_macro(
loc.krate,
&macro_arg,
attr_arg,
- span_with_def_site_ctxt(db, span, id, loc.def.edition),
- span_with_call_site_ctxt(db, span, id, loc.def.edition),
- span_with_mixed_site_ctxt(db, span, id, loc.def.edition),
+ span_with_def_site_ctxt(db, span, id.into(), loc.def.edition),
+ span_with_call_site_ctxt(db, span, id.into(), loc.def.edition),
+ span_with_mixed_site_ctxt(db, span, id.into(), loc.def.edition),
)
};
@@ -784,3 +759,11 @@ fn check_tt_count(tt: &tt::TopSubtree) -> Result<(), ExpandResult<()>> {
})
}
}
+
+fn intern_macro_call(db: &dyn ExpandDatabase, macro_call: MacroCallLoc) -> MacroCallId {
+ MacroCallId::new(db, macro_call)
+}
+
+fn lookup_intern_macro_call(db: &dyn ExpandDatabase, macro_call: MacroCallId) -> MacroCallLoc {
+ macro_call.loc(db)
+}
diff --git a/crates/hir-expand/src/declarative.rs b/crates/hir-expand/src/declarative.rs
index 063410230d..50ef17c2a5 100644
--- a/crates/hir-expand/src/declarative.rs
+++ b/crates/hir-expand/src/declarative.rs
@@ -2,14 +2,14 @@
use base_db::Crate;
use intern::sym;
-use span::{Edition, HirFileIdRepr, MacroCallId, Span, SyntaxContext};
+use span::{Edition, Span, SyntaxContext};
use stdx::TupleExt;
use syntax::{AstNode, ast};
use syntax_bridge::DocCommentDesugarMode;
use triomphe::Arc;
use crate::{
- AstId, ExpandError, ExpandErrorKind, ExpandResult, Lookup,
+ AstId, ExpandError, ExpandErrorKind, ExpandResult, HirFileId, Lookup, MacroCallId,
attrs::RawAttrs,
db::ExpandDatabase,
hygiene::{Transparency, apply_mark},
@@ -42,7 +42,10 @@ impl DeclarativeMacroExpander {
.mac
.expand(
&tt,
- |s| s.ctx = apply_mark(db, s.ctx, call_id, self.transparency, self.edition),
+ |s| {
+ s.ctx =
+ apply_mark(db, s.ctx, call_id.into(), self.transparency, self.edition)
+ },
span,
loc.def.edition,
)
@@ -106,7 +109,8 @@ impl DeclarativeMacroExpander {
def_crate.data(db).edition
} else {
// UNWRAP-SAFETY: Only the root context has no outer expansion
- let krate = db.lookup_intern_macro_call(ctx.outer_expn(db).unwrap()).def.krate;
+ let krate =
+ db.lookup_intern_macro_call(ctx.outer_expn(db).unwrap().into()).def.krate;
krate.data(db).edition
}
};
@@ -160,9 +164,9 @@ impl DeclarativeMacroExpander {
transparency(&macro_def).unwrap_or(Transparency::Opaque),
),
};
- let edition = ctx_edition(match id.file_id.repr() {
- HirFileIdRepr::MacroFile(macro_file) => macro_file.macro_call_id.lookup(db).ctxt,
- HirFileIdRepr::FileId(file) => SyntaxContext::root(file.edition()),
+ let edition = ctx_edition(match id.file_id {
+ HirFileId::MacroFile(macro_file) => macro_file.lookup(db).ctxt,
+ HirFileId::FileId(file) => SyntaxContext::root(file.edition(db)),
});
Arc::new(DeclarativeMacroExpander { mac, transparency, edition })
}
diff --git a/crates/hir-expand/src/eager.rs b/crates/hir-expand/src/eager.rs
index 0cfd5c457a..28d3fcdab9 100644
--- a/crates/hir-expand/src/eager.rs
+++ b/crates/hir-expand/src/eager.rs
@@ -63,7 +63,7 @@ pub fn expand_eager_macro_input(
#[allow(deprecated)] // builtin eager macros are never derives
let (_, _, span) = db.macro_arg(arg_id);
let ExpandResult { value: (arg_exp, arg_exp_map), err: parse_err } =
- db.parse_macro_expansion(arg_id.as_macro_file());
+ db.parse_macro_expansion(arg_id);
let mut arg_map = ExpansionSpanMap::empty();
@@ -73,7 +73,7 @@ pub fn expand_eager_macro_input(
&arg_exp_map,
&mut arg_map,
TextSize::new(0),
- InFile::new(arg_id.as_file(), arg_exp.syntax_node()),
+ InFile::new(arg_id.into(), arg_exp.syntax_node()),
krate,
call_site,
resolver,
@@ -134,10 +134,8 @@ fn lazy_expand(
call_site,
);
eager_callback(ast_id.map(|ast_id| (AstPtr::new(macro_call), ast_id)), id);
- let macro_file = id.as_macro_file();
- db.parse_macro_expansion(macro_file)
- .map(|parse| (InFile::new(macro_file.into(), parse.0), parse.1))
+ db.parse_macro_expansion(id).map(|parse| (InFile::new(id.into(), parse.0), parse.1))
}
fn eager_macro_recur(
@@ -224,7 +222,7 @@ fn eager_macro_recur(
call_id,
);
let ExpandResult { value: (parse, map), err: err2 } =
- db.parse_macro_expansion(call_id.as_macro_file());
+ db.parse_macro_expansion(call_id);
map.iter().for_each(|(o, span)| expanded_map.push(o + offset, span));
diff --git a/crates/hir-expand/src/files.rs b/crates/hir-expand/src/files.rs
index e6c5bcd1c9..321ee8feb9 100644
--- a/crates/hir-expand/src/files.rs
+++ b/crates/hir-expand/src/files.rs
@@ -2,14 +2,11 @@
use std::borrow::Borrow;
use either::Either;
-use span::{
- AstIdNode, EditionedFileId, ErasedFileAstId, FileAstId, HirFileId, HirFileIdRepr, MacroFileId,
- SyntaxContext,
-};
+use span::{AstIdNode, ErasedFileAstId, FileAstId, FileId, SyntaxContext};
use syntax::{AstNode, AstPtr, SyntaxNode, SyntaxNodePtr, SyntaxToken, TextRange, TextSize};
use crate::{
- MacroFileIdExt, MacroKind,
+ EditionedFileId, HirFileId, MacroCallId, MacroKind,
db::{self, ExpandDatabase},
map_node_range_up, map_node_range_up_rooted, span_for_offset,
};
@@ -27,7 +24,7 @@ pub struct InFileWrapper<FileKind, T> {
pub value: T,
}
pub type InFile<T> = InFileWrapper<HirFileId, T>;
-pub type InMacroFile<T> = InFileWrapper<MacroFileId, T>;
+pub type InMacroFile<T> = InFileWrapper<MacroCallId, T>;
pub type InRealFile<T> = InFileWrapper<EditionedFileId, T>;
#[derive(Debug, PartialEq, Eq, Clone, Copy, Hash)]
@@ -36,12 +33,13 @@ pub struct FilePositionWrapper<FileKind> {
pub offset: TextSize,
}
pub type HirFilePosition = FilePositionWrapper<HirFileId>;
-pub type MacroFilePosition = FilePositionWrapper<MacroFileId>;
+pub type MacroFilePosition = FilePositionWrapper<MacroCallId>;
pub type FilePosition = FilePositionWrapper<EditionedFileId>;
-impl From<FilePositionWrapper<EditionedFileId>> for FilePositionWrapper<span::FileId> {
- fn from(value: FilePositionWrapper<EditionedFileId>) -> Self {
- FilePositionWrapper { file_id: value.file_id.into(), offset: value.offset }
+impl FilePosition {
+ #[inline]
+ pub fn into_file_id(self, db: &dyn ExpandDatabase) -> FilePositionWrapper<FileId> {
+ FilePositionWrapper { file_id: self.file_id.file_id(db), offset: self.offset }
}
}
#[derive(Debug, PartialEq, Eq, Clone, Copy, Hash)]
@@ -50,12 +48,13 @@ pub struct FileRangeWrapper<FileKind> {
pub range: TextRange,
}
pub type HirFileRange = FileRangeWrapper<HirFileId>;
-pub type MacroFileRange = FileRangeWrapper<MacroFileId>;
+pub type MacroFileRange = FileRangeWrapper<MacroCallId>;
pub type FileRange = FileRangeWrapper<EditionedFileId>;
-impl From<FileRangeWrapper<EditionedFileId>> for FileRangeWrapper<span::FileId> {
- fn from(value: FileRangeWrapper<EditionedFileId>) -> Self {
- FileRangeWrapper { file_id: value.file_id.into(), range: value.range }
+impl FileRange {
+ #[inline]
+ pub fn into_file_id(self, db: &dyn ExpandDatabase) -> FileRangeWrapper<FileId> {
+ FileRangeWrapper { file_id: self.file_id.file_id(db), range: self.range }
}
}
@@ -163,12 +162,10 @@ trait FileIdToSyntax: Copy {
impl FileIdToSyntax for EditionedFileId {
fn file_syntax(self, db: &dyn db::ExpandDatabase) -> SyntaxNode {
- let file_id = base_db::EditionedFileId::new(db, self);
-
- db.parse(file_id).syntax_node()
+ db.parse(self).syntax_node()
}
}
-impl FileIdToSyntax for MacroFileId {
+impl FileIdToSyntax for MacroCallId {
fn file_syntax(self, db: &dyn db::ExpandDatabase) -> SyntaxNode {
db.parse_macro_expansion(self).value.0.syntax_node()
}
@@ -221,7 +218,7 @@ impl<SN: Borrow<SyntaxNode>> InFile<SN> {
let succ = move |node: &InFile<SyntaxNode>| match node.value.parent() {
Some(parent) => Some(node.with_value(parent)),
None => db
- .lookup_intern_macro_call(node.file_id.macro_file()?.macro_call_id)
+ .lookup_intern_macro_call(node.file_id.macro_file()?)
.to_node_item(db)
.syntax()
.cloned()
@@ -238,7 +235,7 @@ impl<SN: Borrow<SyntaxNode>> InFile<SN> {
let succ = move |node: &InFile<SyntaxNode>| match node.value.parent() {
Some(parent) => Some(node.with_value(parent)),
None => db
- .lookup_intern_macro_call(node.file_id.macro_file()?.macro_call_id)
+ .lookup_intern_macro_call(node.file_id.macro_file()?)
.to_node_item(db)
.syntax()
.cloned()
@@ -278,11 +275,11 @@ impl<SN: Borrow<SyntaxNode>> InFile<SN> {
) -> Option<InRealFile<SyntaxNode>> {
// This kind of upmapping can only be achieved in attribute expanded files,
// as we don't have node inputs otherwise and therefore can't find an `N` node in the input
- let file_id = match self.file_id.repr() {
- HirFileIdRepr::FileId(file_id) => {
+ let file_id = match self.file_id {
+ HirFileId::FileId(file_id) => {
return Some(InRealFile { file_id, value: self.value.borrow().clone() });
}
- HirFileIdRepr::MacroFile(m)
+ HirFileId::MacroFile(m)
if matches!(m.kind(db), MacroKind::Attr | MacroKind::AttrBuiltIn) =>
{
m
@@ -296,11 +293,9 @@ impl<SN: Borrow<SyntaxNode>> InFile<SN> {
self.value.borrow().text_range(),
)?;
- let file_id = base_db::EditionedFileId::new(db, editioned_file_id);
-
let kind = self.kind();
let value = db
- .parse(file_id)
+ .parse(editioned_file_id)
.syntax_node()
.covering_element(range)
.ancestors()
@@ -332,9 +327,9 @@ impl InMacroFile<SyntaxToken> {
impl InFile<SyntaxToken> {
/// Falls back to the macro call range if the node cannot be mapped up fully.
pub fn original_file_range(self, db: &dyn db::ExpandDatabase) -> FileRange {
- match self.file_id.repr() {
- HirFileIdRepr::FileId(file_id) => FileRange { file_id, range: self.value.text_range() },
- HirFileIdRepr::MacroFile(mac_file) => {
+ match self.file_id {
+ HirFileId::FileId(file_id) => FileRange { file_id, range: self.value.text_range() },
+ HirFileId::MacroFile(mac_file) => {
let (range, ctxt) = span_for_offset(
db,
&db.expansion_span_map(mac_file),
@@ -348,7 +343,7 @@ impl InFile<SyntaxToken> {
}
// Fall back to whole macro call.
- let loc = db.lookup_intern_macro_call(mac_file.macro_call_id);
+ let loc = db.lookup_intern_macro_call(mac_file);
loc.kind.original_call_range(db)
}
}
@@ -356,11 +351,11 @@ impl InFile<SyntaxToken> {
/// Attempts to map the syntax node back up its macro calls.
pub fn original_file_range_opt(self, db: &dyn db::ExpandDatabase) -> Option<FileRange> {
- match self.file_id.repr() {
- HirFileIdRepr::FileId(file_id) => {
+ match self.file_id {
+ HirFileId::FileId(file_id) => {
Some(FileRange { file_id, range: self.value.text_range() })
}
- HirFileIdRepr::MacroFile(mac_file) => {
+ HirFileId::MacroFile(mac_file) => {
let (range, ctxt) = span_for_offset(
db,
&db.expansion_span_map(mac_file),
@@ -386,15 +381,15 @@ impl InFile<TextRange> {
self,
db: &dyn db::ExpandDatabase,
) -> (FileRange, SyntaxContext) {
- match self.file_id.repr() {
- HirFileIdRepr::FileId(file_id) => {
- (FileRange { file_id, range: self.value }, SyntaxContext::root(file_id.edition()))
+ match self.file_id {
+ HirFileId::FileId(file_id) => {
+ (FileRange { file_id, range: self.value }, SyntaxContext::root(file_id.edition(db)))
}
- HirFileIdRepr::MacroFile(mac_file) => {
+ HirFileId::MacroFile(mac_file) => {
match map_node_range_up(db, &db.expansion_span_map(mac_file), self.value) {
Some(it) => it,
None => {
- let loc = db.lookup_intern_macro_call(mac_file.macro_call_id);
+ let loc = db.lookup_intern_macro_call(mac_file);
(loc.kind.original_call_range(db), SyntaxContext::root(loc.def.edition))
}
}
@@ -403,13 +398,13 @@ impl InFile<TextRange> {
}
pub fn original_node_file_range_rooted(self, db: &dyn db::ExpandDatabase) -> FileRange {
- match self.file_id.repr() {
- HirFileIdRepr::FileId(file_id) => FileRange { file_id, range: self.value },
- HirFileIdRepr::MacroFile(mac_file) => {
+ match self.file_id {
+ HirFileId::FileId(file_id) => FileRange { file_id, range: self.value },
+ HirFileId::MacroFile(mac_file) => {
match map_node_range_up_rooted(db, &db.expansion_span_map(mac_file), self.value) {
Some(it) => it,
_ => {
- let loc = db.lookup_intern_macro_call(mac_file.macro_call_id);
+ let loc = db.lookup_intern_macro_call(mac_file);
loc.kind.original_call_range(db)
}
}
@@ -421,13 +416,13 @@ impl InFile<TextRange> {
self,
db: &dyn db::ExpandDatabase,
) -> FileRange {
- match self.file_id.repr() {
- HirFileIdRepr::FileId(file_id) => FileRange { file_id, range: self.value },
- HirFileIdRepr::MacroFile(mac_file) => {
+ match self.file_id {
+ HirFileId::FileId(file_id) => FileRange { file_id, range: self.value },
+ HirFileId::MacroFile(mac_file) => {
match map_node_range_up_rooted(db, &db.expansion_span_map(mac_file), self.value) {
Some(it) => it,
_ => {
- let loc = db.lookup_intern_macro_call(mac_file.macro_call_id);
+ let loc = db.lookup_intern_macro_call(mac_file);
loc.kind.original_call_range_with_body(db)
}
}
@@ -439,12 +434,12 @@ impl InFile<TextRange> {
self,
db: &dyn db::ExpandDatabase,
) -> Option<(FileRange, SyntaxContext)> {
- match self.file_id.repr() {
- HirFileIdRepr::FileId(file_id) => Some((
+ match self.file_id {
+ HirFileId::FileId(file_id) => Some((
FileRange { file_id, range: self.value },
- SyntaxContext::root(file_id.edition()),
+ SyntaxContext::root(file_id.edition(db)),
)),
- HirFileIdRepr::MacroFile(mac_file) => {
+ HirFileId::MacroFile(mac_file) => {
map_node_range_up(db, &db.expansion_span_map(mac_file), self.value)
}
}
@@ -455,11 +450,11 @@ impl<N: AstNode> InFile<N> {
pub fn original_ast_node_rooted(self, db: &dyn db::ExpandDatabase) -> Option<InRealFile<N>> {
// This kind of upmapping can only be achieved in attribute expanded files,
// as we don't have node inputs otherwise and therefore can't find an `N` node in the input
- let file_id = match self.file_id.repr() {
- HirFileIdRepr::FileId(file_id) => {
+ let file_id = match self.file_id {
+ HirFileId::FileId(file_id) => {
return Some(InRealFile { file_id, value: self.value });
}
- HirFileIdRepr::MacroFile(m) => m,
+ HirFileId::MacroFile(m) => m,
};
if !matches!(file_id.kind(db), MacroKind::Attr | MacroKind::AttrBuiltIn) {
return None;
@@ -471,10 +466,8 @@ impl<N: AstNode> InFile<N> {
self.value.syntax().text_range(),
)?;
- let file_id = base_db::EditionedFileId::new(db, editioned_file_id);
-
// FIXME: This heuristic is brittle and with the right macro may select completely unrelated nodes?
- let anc = db.parse(file_id).syntax_node().covering_element(range);
+ let anc = db.parse(editioned_file_id).syntax_node().covering_element(range);
let value = anc.ancestors().find_map(N::cast)?;
Some(InRealFile::new(editioned_file_id, value))
}
@@ -482,9 +475,9 @@ impl<N: AstNode> InFile<N> {
impl<T> InFile<T> {
pub fn into_real_file(self) -> Result<InRealFile<T>, InFile<T>> {
- match self.file_id.repr() {
- HirFileIdRepr::FileId(file_id) => Ok(InRealFile { file_id, value: self.value }),
- HirFileIdRepr::MacroFile(_) => Err(self),
+ match self.file_id {
+ HirFileId::FileId(file_id) => Ok(InRealFile { file_id, value: self.value }),
+ HirFileId::MacroFile(_) => Err(self),
}
}
}
diff --git a/crates/hir-expand/src/hygiene.rs b/crates/hir-expand/src/hygiene.rs
index d684a2f9ce..e7856920bc 100644
--- a/crates/hir-expand/src/hygiene.rs
+++ b/crates/hir-expand/src/hygiene.rs
@@ -81,7 +81,7 @@ pub(super) fn apply_mark(
return apply_mark_internal(db, ctxt, call_id, transparency, edition);
}
- let call_site_ctxt = db.lookup_intern_macro_call(call_id).ctxt;
+ let call_site_ctxt = db.lookup_intern_macro_call(call_id.into()).ctxt;
let mut call_site_ctxt = if transparency == Transparency::SemiTransparent {
call_site_ctxt.normalize_to_macros_2_0(db)
} else {
diff --git a/crates/hir-expand/src/lib.rs b/crates/hir-expand/src/lib.rs
index 7a993ed509..cd2448bad4 100644
--- a/crates/hir-expand/src/lib.rs
+++ b/crates/hir-expand/src/lib.rs
@@ -27,6 +27,7 @@ mod prettify_macro_expansion_;
use attrs::collect_attrs;
use rustc_hash::FxHashMap;
+use salsa::plumbing::{AsId, FromId};
use stdx::TupleExt;
use triomphe::Arc;
@@ -35,10 +36,7 @@ use std::hash::Hash;
use base_db::Crate;
use either::Either;
-use span::{
- Edition, EditionedFileId, ErasedFileAstId, FileAstId, HirFileIdRepr, Span, SpanAnchor,
- SyntaxContext,
-};
+use span::{Edition, ErasedFileAstId, FileAstId, Span, SpanAnchor, SyntaxContext};
use syntax::{
SyntaxNode, SyntaxToken, TextRange, TextSize,
ast::{self, AstNode},
@@ -62,8 +60,8 @@ pub use crate::{
prettify_macro_expansion_::prettify_macro_expansion,
};
+pub use base_db::EditionedFileId;
pub use mbe::{DeclarativeMacro, ValueResult};
-pub use span::{HirFileId, MacroCallId, MacroFileId};
pub mod tt {
pub use span::Span;
@@ -343,51 +341,34 @@ pub enum MacroCallKind {
},
}
-pub trait HirFileIdExt {
- fn edition(self, db: &dyn ExpandDatabase) -> Edition;
- /// Returns the original file of this macro call hierarchy.
- fn original_file(self, db: &dyn ExpandDatabase) -> EditionedFileId;
-
- /// Returns the original file of this macro call hierarchy while going into the included file if
- /// one of the calls comes from an `include!``.
- fn original_file_respecting_includes(self, db: &dyn ExpandDatabase) -> EditionedFileId;
-
- /// If this is a macro call, returns the syntax node of the very first macro call this file resides in.
- fn original_call_node(self, db: &dyn ExpandDatabase) -> Option<InRealFile<SyntaxNode>>;
-
- fn as_builtin_derive_attr_node(&self, db: &dyn ExpandDatabase) -> Option<InFile<ast::Attr>>;
-}
-
-impl HirFileIdExt for HirFileId {
- fn edition(self, db: &dyn ExpandDatabase) -> Edition {
- match self.repr() {
- HirFileIdRepr::FileId(file_id) => file_id.edition(),
- HirFileIdRepr::MacroFile(m) => db.lookup_intern_macro_call(m.macro_call_id).def.edition,
+impl HirFileId {
+ pub fn edition(self, db: &dyn ExpandDatabase) -> Edition {
+ match self {
+ HirFileId::FileId(file_id) => file_id.editioned_file_id(db).edition(),
+ HirFileId::MacroFile(m) => db.lookup_intern_macro_call(m).def.edition,
}
}
- fn original_file(self, db: &dyn ExpandDatabase) -> EditionedFileId {
+ pub fn original_file(self, db: &dyn ExpandDatabase) -> EditionedFileId {
let mut file_id = self;
loop {
- match file_id.repr() {
- HirFileIdRepr::FileId(id) => break id,
- HirFileIdRepr::MacroFile(MacroFileId { macro_call_id }) => {
+ match file_id {
+ HirFileId::FileId(id) => break id,
+ HirFileId::MacroFile(macro_call_id) => {
file_id = db.lookup_intern_macro_call(macro_call_id).kind.file_id()
}
}
}
}
- fn original_file_respecting_includes(mut self, db: &dyn ExpandDatabase) -> EditionedFileId {
+ pub fn original_file_respecting_includes(mut self, db: &dyn ExpandDatabase) -> EditionedFileId {
loop {
- match self.repr() {
- HirFileIdRepr::FileId(id) => break id,
- HirFileIdRepr::MacroFile(file) => {
- let loc = db.lookup_intern_macro_call(file.macro_call_id);
+ match self {
+ HirFileId::FileId(id) => break id,
+ HirFileId::MacroFile(file) => {
+ let loc = db.lookup_intern_macro_call(file);
if loc.def.is_include() {
if let MacroCallKind::FnLike { eager: Some(eager), .. } = &loc.kind {
- if let Ok(it) =
- include_input_to_file_id(db, file.macro_call_id, &eager.arg)
- {
+ if let Ok(it) = include_input_to_file_id(db, file, &eager.arg) {
break it;
}
}
@@ -398,23 +379,26 @@ impl HirFileIdExt for HirFileId {
}
}
- fn original_call_node(self, db: &dyn ExpandDatabase) -> Option<InRealFile<SyntaxNode>> {
- let mut call = db.lookup_intern_macro_call(self.macro_file()?.macro_call_id).to_node(db);
+ pub fn original_call_node(self, db: &dyn ExpandDatabase) -> Option<InRealFile<SyntaxNode>> {
+ let mut call = db.lookup_intern_macro_call(self.macro_file()?).to_node(db);
loop {
- match call.file_id.repr() {
- HirFileIdRepr::FileId(file_id) => {
+ match call.file_id {
+ HirFileId::FileId(file_id) => {
break Some(InRealFile { file_id, value: call.value });
}
- HirFileIdRepr::MacroFile(MacroFileId { macro_call_id }) => {
+ HirFileId::MacroFile(macro_call_id) => {
call = db.lookup_intern_macro_call(macro_call_id).to_node(db);
}
}
}
}
- fn as_builtin_derive_attr_node(&self, db: &dyn ExpandDatabase) -> Option<InFile<ast::Attr>> {
+ pub fn as_builtin_derive_attr_node(
+ &self,
+ db: &dyn ExpandDatabase,
+ ) -> Option<InFile<ast::Attr>> {
let macro_file = self.macro_file()?;
- let loc = db.lookup_intern_macro_call(macro_file.macro_call_id);
+ let loc = db.lookup_intern_macro_call(macro_file);
let attr = match loc.def.kind {
MacroDefKind::BuiltInDerive(..) => loc.to_node(db),
_ => return None,
@@ -441,57 +425,34 @@ pub enum MacroKind {
ProcMacro,
}
-pub trait MacroFileIdExt {
- fn is_env_or_option_env(&self, db: &dyn ExpandDatabase) -> bool;
- fn is_include_like_macro(&self, db: &dyn ExpandDatabase) -> bool;
- fn eager_arg(&self, db: &dyn ExpandDatabase) -> Option<MacroCallId>;
- fn expansion_level(self, db: &dyn ExpandDatabase) -> u32;
- /// If this is a macro call, returns the syntax node of the call.
- fn call_node(self, db: &dyn ExpandDatabase) -> InFile<SyntaxNode>;
- fn parent(self, db: &dyn ExpandDatabase) -> HirFileId;
-
- fn expansion_info(self, db: &dyn ExpandDatabase) -> ExpansionInfo;
-
- fn kind(&self, db: &dyn ExpandDatabase) -> MacroKind;
-
- /// Return whether this file is an include macro
- fn is_include_macro(&self, db: &dyn ExpandDatabase) -> bool;
-
- fn is_eager(&self, db: &dyn ExpandDatabase) -> bool;
-
- /// Return whether this file is the pseudo expansion of the derive attribute.
- /// See [`crate::builtin_attr_macro::derive_attr_expand`].
- fn is_derive_attr_pseudo_expansion(&self, db: &dyn ExpandDatabase) -> bool;
-}
-
-impl MacroFileIdExt for MacroFileId {
- fn call_node(self, db: &dyn ExpandDatabase) -> InFile<SyntaxNode> {
- db.lookup_intern_macro_call(self.macro_call_id).to_node(db)
+impl MacroCallId {
+ pub fn call_node(self, db: &dyn ExpandDatabase) -> InFile<SyntaxNode> {
+ db.lookup_intern_macro_call(self).to_node(db)
}
- fn expansion_level(self, db: &dyn ExpandDatabase) -> u32 {
+ pub fn expansion_level(self, db: &dyn ExpandDatabase) -> u32 {
let mut level = 0;
let mut macro_file = self;
loop {
- let loc = db.lookup_intern_macro_call(macro_file.macro_call_id);
+ let loc = db.lookup_intern_macro_call(macro_file);
level += 1;
- macro_file = match loc.kind.file_id().repr() {
- HirFileIdRepr::FileId(_) => break level,
- HirFileIdRepr::MacroFile(it) => it,
+ macro_file = match loc.kind.file_id() {
+ HirFileId::FileId(_) => break level,
+ HirFileId::MacroFile(it) => it,
};
}
}
- fn parent(self, db: &dyn ExpandDatabase) -> HirFileId {
- db.lookup_intern_macro_call(self.macro_call_id).kind.file_id()
+ pub fn parent(self, db: &dyn ExpandDatabase) -> HirFileId {
+ db.lookup_intern_macro_call(self).kind.file_id()
}
/// Return expansion information if it is a macro-expansion file
- fn expansion_info(self, db: &dyn ExpandDatabase) -> ExpansionInfo {
+ pub fn expansion_info(self, db: &dyn ExpandDatabase) -> ExpansionInfo {
ExpansionInfo::new(db, self)
}
- fn kind(&self, db: &dyn ExpandDatabase) -> MacroKind {
- match db.lookup_intern_macro_call(self.macro_call_id).def.kind {
+ pub fn kind(self, db: &dyn ExpandDatabase) -> MacroKind {
+ match db.lookup_intern_macro_call(self).def.kind {
MacroDefKind::Declarative(..) => MacroKind::Declarative,
MacroDefKind::BuiltIn(..) | MacroDefKind::BuiltInEager(..) => {
MacroKind::DeclarativeBuiltIn
@@ -504,33 +465,33 @@ impl MacroFileIdExt for MacroFileId {
}
}
- fn is_include_macro(&self, db: &dyn ExpandDatabase) -> bool {
- db.lookup_intern_macro_call(self.macro_call_id).def.is_include()
+ pub fn is_include_macro(self, db: &dyn ExpandDatabase) -> bool {
+ db.lookup_intern_macro_call(self).def.is_include()
}
- fn is_include_like_macro(&self, db: &dyn ExpandDatabase) -> bool {
- db.lookup_intern_macro_call(self.macro_call_id).def.is_include_like()
+ pub fn is_include_like_macro(self, db: &dyn ExpandDatabase) -> bool {
+ db.lookup_intern_macro_call(self).def.is_include_like()
}
- fn is_env_or_option_env(&self, db: &dyn ExpandDatabase) -> bool {
- db.lookup_intern_macro_call(self.macro_call_id).def.is_env_or_option_env()
+ pub fn is_env_or_option_env(self, db: &dyn ExpandDatabase) -> bool {
+ db.lookup_intern_macro_call(self).def.is_env_or_option_env()
}
- fn is_eager(&self, db: &dyn ExpandDatabase) -> bool {
- let loc = db.lookup_intern_macro_call(self.macro_call_id);
+ pub fn is_eager(self, db: &dyn ExpandDatabase) -> bool {
+ let loc = db.lookup_intern_macro_call(self);
matches!(loc.def.kind, MacroDefKind::BuiltInEager(..))
}
- fn eager_arg(&self, db: &dyn ExpandDatabase) -> Option<MacroCallId> {
- let loc = db.lookup_intern_macro_call(self.macro_call_id);
+ pub fn eager_arg(self, db: &dyn ExpandDatabase) -> Option<MacroCallId> {
+ let loc = db.lookup_intern_macro_call(self);
match &loc.kind {
MacroCallKind::FnLike { eager, .. } => eager.as_ref().map(|it| it.arg_id),
_ => None,
}
}
- fn is_derive_attr_pseudo_expansion(&self, db: &dyn ExpandDatabase) -> bool {
- let loc = db.lookup_intern_macro_call(self.macro_call_id);
+ pub fn is_derive_attr_pseudo_expansion(self, db: &dyn ExpandDatabase) -> bool {
+ let loc = db.lookup_intern_macro_call(self);
loc.def.is_attribute_derive()
}
}
@@ -728,11 +689,11 @@ impl MacroCallKind {
pub fn original_call_range_with_body(self, db: &dyn ExpandDatabase) -> FileRange {
let mut kind = self;
let file_id = loop {
- match kind.file_id().repr() {
- HirFileIdRepr::MacroFile(file) => {
- kind = db.lookup_intern_macro_call(file.macro_call_id).kind;
+ match kind.file_id() {
+ HirFileId::MacroFile(file) => {
+ kind = db.lookup_intern_macro_call(file).kind;
}
- HirFileIdRepr::FileId(file_id) => break file_id,
+ HirFileId::FileId(file_id) => break file_id,
}
};
@@ -753,11 +714,11 @@ impl MacroCallKind {
pub fn original_call_range(self, db: &dyn ExpandDatabase) -> FileRange {
let mut kind = self;
let file_id = loop {
- match kind.file_id().repr() {
- HirFileIdRepr::MacroFile(file) => {
- kind = db.lookup_intern_macro_call(file.macro_call_id).kind;
+ match kind.file_id() {
+ HirFileId::MacroFile(file) => {
+ kind = db.lookup_intern_macro_call(file).kind;
}
- HirFileIdRepr::FileId(file_id) => break file_id,
+ HirFileId::FileId(file_id) => break file_id,
}
};
@@ -898,7 +859,7 @@ impl ExpansionInfo {
let span = self.exp_map.span_at(token.start());
match &self.arg_map {
SpanMap::RealSpanMap(_) => {
- let file_id = span.anchor.file_id.into();
+ let file_id = EditionedFileId::from_span(db, span.anchor.file_id).into();
let anchor_offset =
db.ast_id_map(file_id).get_erased(span.anchor.ast_id).text_range().start();
InFile { file_id, value: smallvec::smallvec![span.range + anchor_offset] }
@@ -921,9 +882,9 @@ impl ExpansionInfo {
}
}
- pub fn new(db: &dyn ExpandDatabase, macro_file: MacroFileId) -> ExpansionInfo {
+ pub fn new(db: &dyn ExpandDatabase, macro_file: MacroCallId) -> ExpansionInfo {
let _p = tracing::info_span!("ExpansionInfo::new").entered();
- let loc = db.lookup_intern_macro_call(macro_file.macro_call_id);
+ let loc = db.lookup_intern_macro_call(macro_file);
let arg_tt = loc.kind.arg(db);
let arg_map = db.span_map(arg_tt.file_id);
@@ -955,9 +916,10 @@ pub fn map_node_range_up_rooted(
start = start.min(span.range.start());
end = end.max(span.range.end());
}
+ let file_id = EditionedFileId::from_span(db, anchor.file_id);
let anchor_offset =
- db.ast_id_map(anchor.file_id.into()).get_erased(anchor.ast_id).text_range().start();
- Some(FileRange { file_id: anchor.file_id, range: TextRange::new(start, end) + anchor_offset })
+ db.ast_id_map(file_id.into()).get_erased(anchor.ast_id).text_range().start();
+ Some(FileRange { file_id, range: TextRange::new(start, end) + anchor_offset })
}
/// Maps up the text range out of the expansion hierarchy back into the original file its from.
@@ -980,12 +942,10 @@ pub fn map_node_range_up(
start = start.min(span.range.start());
end = end.max(span.range.end());
}
+ let file_id = EditionedFileId::from_span(db, anchor.file_id);
let anchor_offset =
- db.ast_id_map(anchor.file_id.into()).get_erased(anchor.ast_id).text_range().start();
- Some((
- FileRange { file_id: anchor.file_id, range: TextRange::new(start, end) + anchor_offset },
- ctx,
- ))
+ db.ast_id_map(file_id.into()).get_erased(anchor.ast_id).text_range().start();
+ Some((FileRange { file_id, range: TextRange::new(start, end) + anchor_offset }, ctx))
}
/// Maps up the text range out of the expansion hierarchy back into the original file its from.
@@ -1004,8 +964,9 @@ pub fn map_node_range_up_aggregated(
);
}
for ((anchor, _), range) in &mut map {
+ let file_id = EditionedFileId::from_span(db, anchor.file_id);
let anchor_offset =
- db.ast_id_map(anchor.file_id.into()).get_erased(anchor.ast_id).text_range().start();
+ db.ast_id_map(file_id.into()).get_erased(anchor.ast_id).text_range().start();
*range += anchor_offset;
}
map
@@ -1018,12 +979,10 @@ pub fn span_for_offset(
offset: TextSize,
) -> (FileRange, SyntaxContext) {
let span = exp_map.span_at(offset);
- let anchor_offset = db
- .ast_id_map(span.anchor.file_id.into())
- .get_erased(span.anchor.ast_id)
- .text_range()
- .start();
- (FileRange { file_id: span.anchor.file_id, range: span.range + anchor_offset }, span.ctx)
+ let file_id = EditionedFileId::from_span(db, span.anchor.file_id);
+ let anchor_offset =
+ db.ast_id_map(file_id.into()).get_erased(span.anchor.ast_id).text_range().start();
+ (FileRange { file_id, range: span.range + anchor_offset }, span.ctx)
}
/// In Rust, macros expand token trees to token trees. When we want to turn a
@@ -1091,3 +1050,77 @@ impl ExpandTo {
}
intern::impl_internable!(ModPath, attrs::AttrInput);
+
+#[salsa::interned(no_lifetime)]
+#[doc(alias = "MacroFileId")]
+pub struct MacroCallId {
+ pub loc: MacroCallLoc,
+}
+
+impl From<span::MacroCallId> for MacroCallId {
+ #[inline]
+ fn from(value: span::MacroCallId) -> Self {
+ MacroCallId::from_id(value.0)
+ }
+}
+
+impl From<MacroCallId> for span::MacroCallId {
+ #[inline]
+ fn from(value: MacroCallId) -> span::MacroCallId {
+ span::MacroCallId(value.as_id())
+ }
+}
+
+#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, salsa::Supertype)]
+pub enum HirFileId {
+ FileId(EditionedFileId),
+ MacroFile(MacroCallId),
+}
+
+impl From<EditionedFileId> for HirFileId {
+ #[inline]
+ fn from(file_id: EditionedFileId) -> Self {
+ HirFileId::FileId(file_id)
+ }
+}
+
+impl From<MacroCallId> for HirFileId {
+ #[inline]
+ fn from(file_id: MacroCallId) -> Self {
+ HirFileId::MacroFile(file_id)
+ }
+}
+
+impl HirFileId {
+ #[inline]
+ pub fn macro_file(self) -> Option<MacroCallId> {
+ match self {
+ HirFileId::FileId(_) => None,
+ HirFileId::MacroFile(it) => Some(it),
+ }
+ }
+
+ #[inline]
+ pub fn is_macro(self) -> bool {
+ matches!(self, HirFileId::MacroFile(_))
+ }
+
+ #[inline]
+ pub fn file_id(self) -> Option<EditionedFileId> {
+ match self {
+ HirFileId::FileId(it) => Some(it),
+ HirFileId::MacroFile(_) => None,
+ }
+ }
+}
+
+impl PartialEq<EditionedFileId> for HirFileId {
+ fn eq(&self, &other: &EditionedFileId) -> bool {
+ *self == HirFileId::from(other)
+ }
+}
+impl PartialEq<HirFileId> for EditionedFileId {
+ fn eq(&self, &other: &HirFileId) -> bool {
+ other == HirFileId::from(*self)
+ }
+}
diff --git a/crates/hir-expand/src/mod_path.rs b/crates/hir-expand/src/mod_path.rs
index 7f8dc0dba6..79a2e58b04 100644
--- a/crates/hir-expand/src/mod_path.rs
+++ b/crates/hir-expand/src/mod_path.rs
@@ -278,7 +278,7 @@ fn convert_path(
if let Some(_macro_call) = path.syntax().parent().and_then(ast::MacroCall::cast) {
let syn_ctx = span_for_range(segment.syntax().text_range());
if let Some(macro_call_id) = syn_ctx.outer_expn(db) {
- if db.lookup_intern_macro_call(macro_call_id).def.local_inner {
+ if db.lookup_intern_macro_call(macro_call_id.into()).def.local_inner {
mod_path.kind = match resolve_crate_root(db, syn_ctx) {
Some(crate_root) => PathKind::DollarCrate(crate_root),
None => PathKind::Crate,
@@ -353,7 +353,7 @@ pub fn resolve_crate_root(db: &dyn ExpandDatabase, mut ctxt: SyntaxContext) -> O
result_mark = Some(mark);
}
- result_mark.map(|call| db.lookup_intern_macro_call(call).def.krate)
+ result_mark.map(|call| db.lookup_intern_macro_call(call.into()).def.krate)
}
pub use crate::name as __name;
diff --git a/crates/hir-expand/src/prettify_macro_expansion_.rs b/crates/hir-expand/src/prettify_macro_expansion_.rs
index 953aea65b2..11cc434c2d 100644
--- a/crates/hir-expand/src/prettify_macro_expansion_.rs
+++ b/crates/hir-expand/src/prettify_macro_expansion_.rs
@@ -26,7 +26,7 @@ pub fn prettify_macro_expansion(
syntax_ctx_id_to_dollar_crate_replacement.entry(ctx).or_insert_with(|| {
let macro_call_id =
ctx.outer_expn(db).expect("`$crate` cannot come from `SyntaxContextId::ROOT`");
- let macro_call = db.lookup_intern_macro_call(macro_call_id);
+ let macro_call = db.lookup_intern_macro_call(macro_call_id.into());
let macro_def_crate = macro_call.def.krate;
// First, if this is the same crate as the macro, nothing will work but `crate`.
// If not, if the target trait has the macro's crate as a dependency, using the dependency name
diff --git a/crates/hir-expand/src/span_map.rs b/crates/hir-expand/src/span_map.rs
index 1965e76707..e5a778a95c 100644
--- a/crates/hir-expand/src/span_map.rs
+++ b/crates/hir-expand/src/span_map.rs
@@ -1,13 +1,13 @@
//! Span maps for real files and macro expansions.
-use span::{EditionedFileId, HirFileId, HirFileIdRepr, MacroFileId, Span, SyntaxContext};
+use span::{Span, SyntaxContext};
use stdx::TupleExt;
use syntax::{AstNode, TextRange, ast};
use triomphe::Arc;
pub use span::RealSpanMap;
-use crate::{attrs::collect_attrs, db::ExpandDatabase};
+use crate::{HirFileId, MacroCallId, attrs::collect_attrs, db::ExpandDatabase};
pub type ExpansionSpanMap = span::SpanMap<SyntaxContext>;
@@ -61,9 +61,9 @@ impl SpanMap {
#[inline]
pub(crate) fn new(db: &dyn ExpandDatabase, file_id: HirFileId) -> SpanMap {
- match file_id.repr() {
- HirFileIdRepr::FileId(file_id) => SpanMap::RealSpanMap(db.real_span_map(file_id)),
- HirFileIdRepr::MacroFile(m) => {
+ match file_id {
+ HirFileId::FileId(file_id) => SpanMap::RealSpanMap(db.real_span_map(file_id)),
+ HirFileId::MacroFile(m) => {
SpanMap::ExpansionSpanMap(db.parse_macro_expansion(m).value.1)
}
}
@@ -81,15 +81,13 @@ impl SpanMapRef<'_> {
pub(crate) fn real_span_map(
db: &dyn ExpandDatabase,
- editioned_file_id: EditionedFileId,
+ editioned_file_id: base_db::EditionedFileId,
) -> Arc<RealSpanMap> {
use syntax::ast::HasModuleItem;
let mut pairs = vec![(syntax::TextSize::new(0), span::ROOT_ERASED_FILE_AST_ID)];
let ast_id_map = db.ast_id_map(editioned_file_id.into());
- let file_id = base_db::EditionedFileId::new(db, editioned_file_id);
-
- let tree = db.parse(file_id).tree();
+ let tree = db.parse(editioned_file_id).tree();
// This is an incrementality layer. Basically we can't use absolute ranges for our spans as that
// would mean we'd invalidate everything whenever we type. So instead we make the text ranges
// relative to some AstIds reducing the risk of invalidation as typing somewhere no longer
@@ -140,7 +138,7 @@ pub(crate) fn real_span_map(
});
Arc::new(RealSpanMap::from_file(
- editioned_file_id,
+ editioned_file_id.editioned_file_id(db),
pairs.into_boxed_slice(),
tree.syntax().text_range().end(),
))
@@ -148,7 +146,7 @@ pub(crate) fn real_span_map(
pub(crate) fn expansion_span_map(
db: &dyn ExpandDatabase,
- file_id: MacroFileId,
+ file_id: MacroCallId,
) -> Arc<ExpansionSpanMap> {
db.parse_macro_expansion(file_id).value.1
}
diff --git a/crates/hir-ty/src/consteval.rs b/crates/hir-ty/src/consteval.rs
index 6c1b35a68c..8d66d47334 100644
--- a/crates/hir-ty/src/consteval.rs
+++ b/crates/hir-ty/src/consteval.rs
@@ -16,13 +16,8 @@ use triomphe::Arc;
use crate::{
Const, ConstData, ConstScalar, ConstValue, GenericArg, Interner, MemoryMap, Substitution,
- TraitEnvironment, Ty, TyBuilder,
- db::{HirDatabase, HirDatabaseData},
- display::DisplayTarget,
- generics::Generics,
- infer::InferenceContext,
- lower::ParamLoweringMode,
- to_placeholder_idx,
+ TraitEnvironment, Ty, TyBuilder, db::HirDatabase, display::DisplayTarget, generics::Generics,
+ infer::InferenceContext, lower::ParamLoweringMode, to_placeholder_idx,
};
use super::mir::{MirEvalError, MirLowerError, interpret_mir, lower_to_mir, pad16};
@@ -228,7 +223,6 @@ pub fn try_const_isize(db: &dyn HirDatabase, c: &Const) -> Option<i128> {
pub(crate) fn const_eval_recover(
_: &dyn HirDatabase,
_: &Cycle,
- _: HirDatabaseData,
_: GeneralConstId,
_: Substitution,
_: Option<Arc<TraitEnvironment>>,
diff --git a/crates/hir-ty/src/consteval/tests.rs b/crates/hir-ty/src/consteval/tests.rs
index f9c30b1c5c..6449a4dc7e 100644
--- a/crates/hir-ty/src/consteval/tests.rs
+++ b/crates/hir-ty/src/consteval/tests.rs
@@ -1,11 +1,11 @@
use base_db::RootQueryDb;
use chalk_ir::Substitution;
use hir_def::db::DefDatabase;
+use hir_expand::EditionedFileId;
use rustc_apfloat::{
Float,
ieee::{Half as f16, Quad as f128},
};
-use span::EditionedFileId;
use test_fixture::WithFixture;
use test_utils::skip_slow_tests;
@@ -116,14 +116,14 @@ fn pretty_print_err(e: ConstEvalError, db: TestDB) -> String {
}
fn eval_goal(db: &TestDB, file_id: EditionedFileId) -> Result<Const, ConstEvalError> {
- let module_id = db.module_for_file(file_id.file_id());
+ let module_id = db.module_for_file(file_id.file_id(db));
let def_map = module_id.def_map(db);
let scope = &def_map[module_id.local_id].scope;
let const_id = scope
.declarations()
.find_map(|x| match x {
hir_def::ModuleDefId::ConstId(x) => {
- if db.const_signature(x).name.as_ref()?.display(db, file_id.edition()).to_string()
+ if db.const_signature(x).name.as_ref()?.display(db, file_id.edition(db)).to_string()
== "GOAL"
{
Some(x)
diff --git a/crates/hir-ty/src/db.rs b/crates/hir-ty/src/db.rs
index b4039672be..563d3c7141 100644
--- a/crates/hir-ty/src/db.rs
+++ b/crates/hir-ty/src/db.rs
@@ -30,16 +30,16 @@ use crate::{
#[query_group::query_group]
pub trait HirDatabase: DefDatabase + std::fmt::Debug {
- #[salsa::invoke_actual(crate::infer::infer_query)]
+ #[salsa::invoke(crate::infer::infer_query)]
fn infer(&self, def: DefWithBodyId) -> Arc<InferenceResult>;
// region:mir
- #[salsa::invoke_actual(crate::mir::mir_body_query)]
+ #[salsa::invoke(crate::mir::mir_body_query)]
#[salsa::cycle(crate::mir::mir_body_recover)]
fn mir_body(&self, def: DefWithBodyId) -> Result<Arc<MirBody>, MirLowerError>;
- #[salsa::invoke_actual(crate::mir::mir_body_for_closure_query)]
+ #[salsa::invoke(crate::mir::mir_body_for_closure_query)]
fn mir_body_for_closure(&self, def: InternedClosureId) -> Result<Arc<MirBody>, MirLowerError>;
#[salsa::invoke(crate::mir::monomorphized_mir_body_query)]
@@ -59,7 +59,7 @@ pub trait HirDatabase: DefDatabase + std::fmt::Debug {
env: Arc<TraitEnvironment>,
) -> Result<Arc<MirBody>, MirLowerError>;
- #[salsa::invoke_actual(crate::mir::borrowck_query)]
+ #[salsa::invoke(crate::mir::borrowck_query)]
#[salsa::lru(2024)]
fn borrowck(&self, def: DefWithBodyId) -> Result<Arc<[BorrowckResult]>, MirLowerError>;
@@ -72,11 +72,11 @@ pub trait HirDatabase: DefDatabase + std::fmt::Debug {
trait_env: Option<Arc<TraitEnvironment>>,
) -> Result<Const, ConstEvalError>;
- #[salsa::invoke_actual(crate::consteval::const_eval_static_query)]
+ #[salsa::invoke(crate::consteval::const_eval_static_query)]
#[salsa::cycle(crate::consteval::const_eval_static_recover)]
fn const_eval_static(&self, def: StaticId) -> Result<Const, ConstEvalError>;
- #[salsa::invoke_actual(crate::consteval::const_eval_discriminant_variant)]
+ #[salsa::invoke(crate::consteval::const_eval_discriminant_variant)]
#[salsa::cycle(crate::consteval::const_eval_discriminant_recover)]
fn const_eval_discriminant(&self, def: EnumVariantId) -> Result<i128, ConstEvalError>;
@@ -103,10 +103,10 @@ pub trait HirDatabase: DefDatabase + std::fmt::Debug {
#[salsa::cycle(crate::layout::layout_of_ty_recover)]
fn layout_of_ty(&self, ty: Ty, env: Arc<TraitEnvironment>) -> Result<Arc<Layout>, LayoutError>;
- #[salsa::invoke_actual(crate::layout::target_data_layout_query)]
+ #[salsa::invoke(crate::layout::target_data_layout_query)]
fn target_data_layout(&self, krate: Crate) -> Result<Arc<TargetDataLayout>, Arc<str>>;
- #[salsa::invoke_actual(crate::dyn_compatibility::dyn_compatibility_of_trait_query)]
+ #[salsa::invoke(crate::dyn_compatibility::dyn_compatibility_of_trait_query)]
fn dyn_compatibility_of_trait(&self, trait_: TraitId) -> Option<DynCompatibilityViolation>;
#[salsa::invoke(crate::lower::ty_query)]
@@ -114,53 +114,54 @@ pub trait HirDatabase: DefDatabase + std::fmt::Debug {
fn ty(&self, def: TyDefId) -> Binders<Ty>;
#[salsa::cycle(crate::lower::type_for_type_alias_with_diagnostics_query_recover)]
- #[salsa::invoke_actual(crate::lower::type_for_type_alias_with_diagnostics_query)]
+ #[salsa::invoke(crate::lower::type_for_type_alias_with_diagnostics_query)]
fn type_for_type_alias_with_diagnostics(&self, def: TypeAliasId) -> (Binders<Ty>, Diagnostics);
/// Returns the type of the value of the given constant, or `None` if the `ValueTyDefId` is
/// a `StructId` or `EnumVariantId` with a record constructor.
- #[salsa::invoke_actual(crate::lower::value_ty_query)]
+ #[salsa::invoke(crate::lower::value_ty_query)]
fn value_ty(&self, def: ValueTyDefId) -> Option<Binders<Ty>>;
- #[salsa::invoke_actual(crate::lower::impl_self_ty_with_diagnostics_query)]
+ #[salsa::invoke(crate::lower::impl_self_ty_with_diagnostics_query)]
#[salsa::cycle(crate::lower::impl_self_ty_with_diagnostics_recover)]
fn impl_self_ty_with_diagnostics(&self, def: ImplId) -> (Binders<Ty>, Diagnostics);
- #[salsa::invoke_actual(crate::lower::impl_self_ty_query)]
+ #[salsa::invoke(crate::lower::impl_self_ty_query)]
#[salsa::transparent]
fn impl_self_ty(&self, def: ImplId) -> Binders<Ty>;
- #[salsa::invoke(crate::lower::const_param_ty_with_diagnostics_query)]
+ // FIXME: Make this a non-interned query.
+ #[salsa::invoke_interned(crate::lower::const_param_ty_with_diagnostics_query)]
fn const_param_ty_with_diagnostics(&self, def: ConstParamId) -> (Ty, Diagnostics);
#[salsa::invoke(crate::lower::const_param_ty_query)]
#[salsa::transparent]
fn const_param_ty(&self, def: ConstParamId) -> Ty;
- #[salsa::invoke_actual(crate::lower::impl_trait_with_diagnostics_query)]
+ #[salsa::invoke(crate::lower::impl_trait_with_diagnostics_query)]
fn impl_trait_with_diagnostics(&self, def: ImplId) -> Option<(Binders<TraitRef>, Diagnostics)>;
- #[salsa::invoke_actual(crate::lower::impl_trait_query)]
+ #[salsa::invoke(crate::lower::impl_trait_query)]
#[salsa::transparent]
fn impl_trait(&self, def: ImplId) -> Option<Binders<TraitRef>>;
- #[salsa::invoke_actual(crate::lower::field_types_with_diagnostics_query)]
+ #[salsa::invoke(crate::lower::field_types_with_diagnostics_query)]
fn field_types_with_diagnostics(
&self,
var: VariantId,
) -> (Arc<ArenaMap<LocalFieldId, Binders<Ty>>>, Diagnostics);
- #[salsa::invoke_actual(crate::lower::field_types_query)]
+ #[salsa::invoke(crate::lower::field_types_query)]
#[salsa::transparent]
fn field_types(&self, var: VariantId) -> Arc<ArenaMap<LocalFieldId, Binders<Ty>>>;
- #[salsa::invoke_actual(crate::lower::callable_item_signature_query)]
+ #[salsa::invoke(crate::lower::callable_item_signature_query)]
fn callable_item_signature(&self, def: CallableDefId) -> PolyFnSig;
- #[salsa::invoke_actual(crate::lower::return_type_impl_traits)]
+ #[salsa::invoke(crate::lower::return_type_impl_traits)]
fn return_type_impl_traits(&self, def: FunctionId) -> Option<Arc<Binders<ImplTraits>>>;
- #[salsa::invoke_actual(crate::lower::type_alias_impl_traits)]
+ #[salsa::invoke(crate::lower::type_alias_impl_traits)]
fn type_alias_impl_traits(&self, def: TypeAliasId) -> Option<Arc<Binders<ImplTraits>>>;
#[salsa::invoke(crate::lower::generic_predicates_for_param_query)]
@@ -172,41 +173,41 @@ pub trait HirDatabase: DefDatabase + std::fmt::Debug {
assoc_name: Option<Name>,
) -> GenericPredicates;
- #[salsa::invoke_actual(crate::lower::generic_predicates_query)]
+ #[salsa::invoke(crate::lower::generic_predicates_query)]
fn generic_predicates(&self, def: GenericDefId) -> GenericPredicates;
- #[salsa::invoke_actual(crate::lower::generic_predicates_without_parent_with_diagnostics_query)]
+ #[salsa::invoke(crate::lower::generic_predicates_without_parent_with_diagnostics_query)]
fn generic_predicates_without_parent_with_diagnostics(
&self,
def: GenericDefId,
) -> (GenericPredicates, Diagnostics);
- #[salsa::invoke_actual(crate::lower::generic_predicates_without_parent_query)]
+ #[salsa::invoke(crate::lower::generic_predicates_without_parent_query)]
#[salsa::transparent]
fn generic_predicates_without_parent(&self, def: GenericDefId) -> GenericPredicates;
- #[salsa::invoke_actual(crate::lower::trait_environment_for_body_query)]
+ #[salsa::invoke(crate::lower::trait_environment_for_body_query)]
#[salsa::transparent]
fn trait_environment_for_body(&self, def: DefWithBodyId) -> Arc<TraitEnvironment>;
- #[salsa::invoke_actual(crate::lower::trait_environment_query)]
+ #[salsa::invoke(crate::lower::trait_environment_query)]
fn trait_environment(&self, def: GenericDefId) -> Arc<TraitEnvironment>;
- #[salsa::invoke_actual(crate::lower::generic_defaults_with_diagnostics_query)]
+ #[salsa::invoke(crate::lower::generic_defaults_with_diagnostics_query)]
#[salsa::cycle(crate::lower::generic_defaults_with_diagnostics_recover)]
fn generic_defaults_with_diagnostics(
&self,
def: GenericDefId,
) -> (GenericDefaults, Diagnostics);
- #[salsa::invoke_actual(crate::lower::generic_defaults_query)]
+ #[salsa::invoke(crate::lower::generic_defaults_query)]
#[salsa::transparent]
fn generic_defaults(&self, def: GenericDefId) -> GenericDefaults;
- #[salsa::invoke_actual(InherentImpls::inherent_impls_in_crate_query)]
+ #[salsa::invoke(InherentImpls::inherent_impls_in_crate_query)]
fn inherent_impls_in_crate(&self, krate: Crate) -> Arc<InherentImpls>;
- #[salsa::invoke_actual(InherentImpls::inherent_impls_in_block_query)]
+ #[salsa::invoke(InherentImpls::inherent_impls_in_block_query)]
fn inherent_impls_in_block(&self, block: BlockId) -> Option<Arc<InherentImpls>>;
/// Collects all crates in the dependency graph that have impls for the
@@ -220,13 +221,13 @@ pub trait HirDatabase: DefDatabase + std::fmt::Debug {
fp: TyFingerprint,
) -> SmallVec<[Crate; 2]>;
- #[salsa::invoke_actual(TraitImpls::trait_impls_in_crate_query)]
+ #[salsa::invoke(TraitImpls::trait_impls_in_crate_query)]
fn trait_impls_in_crate(&self, krate: Crate) -> Arc<TraitImpls>;
- #[salsa::invoke_actual(TraitImpls::trait_impls_in_block_query)]
+ #[salsa::invoke(TraitImpls::trait_impls_in_block_query)]
fn trait_impls_in_block(&self, block: BlockId) -> Option<Arc<TraitImpls>>;
- #[salsa::invoke_actual(TraitImpls::trait_impls_in_deps_query)]
+ #[salsa::invoke(TraitImpls::trait_impls_in_deps_query)]
fn trait_impls_in_deps(&self, krate: Crate) -> Arc<[Arc<TraitImpls>]>;
// Interned IDs for Chalk integration
@@ -251,7 +252,7 @@ pub trait HirDatabase: DefDatabase + std::fmt::Debug {
#[salsa::interned]
fn intern_coroutine(&self, id: InternedCoroutine) -> InternedCoroutineId;
- #[salsa::invoke_actual(chalk_db::associated_ty_data_query)]
+ #[salsa::invoke(chalk_db::associated_ty_data_query)]
fn associated_ty_data(&self, id: TypeAliasId) -> sync::Arc<chalk_db::AssociatedTyDatum>;
#[salsa::invoke(chalk_db::trait_datum_query)]
@@ -268,16 +269,16 @@ pub trait HirDatabase: DefDatabase + std::fmt::Debug {
fn impl_datum(&self, krate: Crate, impl_id: chalk_db::ImplId)
-> sync::Arc<chalk_db::ImplDatum>;
- #[salsa::invoke_actual(chalk_db::fn_def_datum_query)]
+ #[salsa::invoke(chalk_db::fn_def_datum_query)]
fn fn_def_datum(&self, fn_def_id: CallableDefId) -> sync::Arc<chalk_db::FnDefDatum>;
- #[salsa::invoke_actual(chalk_db::fn_def_variance_query)]
+ #[salsa::invoke(chalk_db::fn_def_variance_query)]
fn fn_def_variance(&self, fn_def_id: CallableDefId) -> chalk_db::Variances;
- #[salsa::invoke_actual(chalk_db::adt_variance_query)]
+ #[salsa::invoke(chalk_db::adt_variance_query)]
fn adt_variance(&self, adt_id: AdtId) -> chalk_db::Variances;
- #[salsa::invoke_actual(crate::variance::variances_of)]
+ #[salsa::invoke(crate::variance::variances_of)]
#[salsa::cycle(crate::variance::variances_of_cycle)]
fn variances_of(&self, def: GenericDefId) -> Option<Arc<[crate::variance::Variance]>>;
diff --git a/crates/hir-ty/src/diagnostics/decl_check.rs b/crates/hir-ty/src/diagnostics/decl_check.rs
index 3cba71c607..c812b69839 100644
--- a/crates/hir-ty/src/diagnostics/decl_check.rs
+++ b/crates/hir-ty/src/diagnostics/decl_check.rs
@@ -21,7 +21,7 @@ use hir_def::{
item_tree::FieldsShape, signatures::StaticFlags, src::HasSource,
};
use hir_expand::{
- HirFileId, HirFileIdExt,
+ HirFileId,
name::{AsName, Name},
};
use intern::sym;
@@ -644,7 +644,7 @@ impl<'a> DeclValidator<'a> {
return;
};
- let edition = file_id.original_file(self.db).edition();
+ let edition = file_id.original_file(self.db).edition(self.db);
let diagnostic = IncorrectCase {
file: file_id,
ident_type,
diff --git a/crates/hir-ty/src/drop.rs b/crates/hir-ty/src/drop.rs
index cd46f87008..9ea0b58559 100644
--- a/crates/hir-ty/src/drop.rs
+++ b/crates/hir-ty/src/drop.rs
@@ -7,12 +7,11 @@ use hir_def::signatures::StructFlags;
use stdx::never;
use triomphe::Arc;
-use crate::db::HirDatabaseData;
use crate::{
- AliasTy, Canonical, CanonicalVarKinds, InEnvironment, Interner, ProjectionTy, TraitEnvironment,
- Ty, TyBuilder, TyKind, db::HirDatabase, method_resolution::TyFingerprint,
+ AliasTy, Canonical, CanonicalVarKinds, ConcreteConst, ConstScalar, ConstValue, InEnvironment,
+ Interner, ProjectionTy, TraitEnvironment, Ty, TyBuilder, TyKind, db::HirDatabase,
+ method_resolution::TyFingerprint,
};
-use crate::{ConcreteConst, ConstScalar, ConstValue};
fn has_destructor(db: &dyn HirDatabase, adt: AdtId) -> bool {
let module = match adt {
@@ -197,7 +196,6 @@ fn is_copy(db: &dyn HirDatabase, ty: Ty, env: Arc<TraitEnvironment>) -> bool {
pub(crate) fn has_drop_glue_recover(
_db: &dyn HirDatabase,
_cycle: &salsa::Cycle,
- _: HirDatabaseData,
_ty: Ty,
_env: Arc<TraitEnvironment>,
) -> DropGlue {
diff --git a/crates/hir-ty/src/dyn_compatibility/tests.rs b/crates/hir-ty/src/dyn_compatibility/tests.rs
index 4c63214eaa..5078e8cfaa 100644
--- a/crates/hir-ty/src/dyn_compatibility/tests.rs
+++ b/crates/hir-ty/src/dyn_compatibility/tests.rs
@@ -33,7 +33,7 @@ fn check_dyn_compatibility<'a>(
expected.into_iter().map(|(id, osvs)| (id, FxHashSet::from_iter(osvs))).collect();
let (db, file_ids) = TestDB::with_many_files(ra_fixture);
for (trait_id, name) in file_ids.into_iter().flat_map(|file_id| {
- let module_id = db.module_for_file(file_id);
+ let module_id = db.module_for_file(file_id.file_id(&db));
let def_map = module_id.def_map(&db);
let scope = &def_map[module_id.local_id].scope;
scope
@@ -43,7 +43,7 @@ fn check_dyn_compatibility<'a>(
let name = db
.trait_signature(trait_id)
.name
- .display_no_db(file_id.edition())
+ .display_no_db(file_id.edition(&db))
.to_smolstr();
Some((trait_id, name))
} else {
diff --git a/crates/hir-ty/src/layout.rs b/crates/hir-ty/src/layout.rs
index 6dfc095df0..a7a4e8d404 100644
--- a/crates/hir-ty/src/layout.rs
+++ b/crates/hir-ty/src/layout.rs
@@ -20,7 +20,7 @@ use triomphe::Arc;
use crate::{
Interner, ProjectionTy, Substitution, TraitEnvironment, Ty,
consteval::try_const_usize,
- db::{HirDatabase, HirDatabaseData, InternedClosure},
+ db::{HirDatabase, InternedClosure},
infer::normalize,
utils::ClosureSubst,
};
@@ -368,7 +368,6 @@ pub fn layout_of_ty_query(
pub(crate) fn layout_of_ty_recover(
_: &dyn HirDatabase,
_: &Cycle,
- _: HirDatabaseData,
_: Ty,
_: Arc<TraitEnvironment>,
) -> Result<Arc<Layout>, LayoutError> {
diff --git a/crates/hir-ty/src/layout/adt.rs b/crates/hir-ty/src/layout/adt.rs
index d81d3dc1b5..de6a82a098 100644
--- a/crates/hir-ty/src/layout/adt.rs
+++ b/crates/hir-ty/src/layout/adt.rs
@@ -20,7 +20,7 @@ use crate::{
layout::{Layout, LayoutError, field_ty},
};
-use super::{HirDatabaseData, LayoutCx};
+use super::LayoutCx;
pub fn layout_of_adt_query(
db: &dyn HirDatabase,
@@ -134,7 +134,6 @@ fn layout_scalar_valid_range(db: &dyn HirDatabase, def: AdtId) -> (Bound<u128>,
pub(crate) fn layout_of_adt_recover(
_: &dyn HirDatabase,
_: &Cycle,
- _: HirDatabaseData,
_: AdtId,
_: Substitution,
_: Arc<TraitEnvironment>,
diff --git a/crates/hir-ty/src/layout/tests.rs b/crates/hir-ty/src/layout/tests.rs
index 7edd0fb6cc..cc7d74f4fb 100644
--- a/crates/hir-ty/src/layout/tests.rs
+++ b/crates/hir-ty/src/layout/tests.rs
@@ -38,7 +38,7 @@ fn eval_goal(
let adt_or_type_alias_id = file_ids
.into_iter()
.find_map(|file_id| {
- let module_id = db.module_for_file(file_id.file_id());
+ let module_id = db.module_for_file(file_id.file_id(&db));
let def_map = module_id.def_map(&db);
let scope = &def_map[module_id.local_id].scope;
let adt_or_type_alias_id = scope.declarations().find_map(|x| match x {
@@ -47,14 +47,18 @@ fn eval_goal(
hir_def::AdtId::StructId(x) => db
.struct_signature(x)
.name
- .display_no_db(file_id.edition())
+ .display_no_db(file_id.edition(&db))
+ .to_smolstr(),
+ hir_def::AdtId::UnionId(x) => db
+ .union_signature(x)
+ .name
+ .display_no_db(file_id.edition(&db))
+ .to_smolstr(),
+ hir_def::AdtId::EnumId(x) => db
+ .enum_signature(x)
+ .name
+ .display_no_db(file_id.edition(&db))
.to_smolstr(),
- hir_def::AdtId::UnionId(x) => {
- db.union_signature(x).name.display_no_db(file_id.edition()).to_smolstr()
- }
- hir_def::AdtId::EnumId(x) => {
- db.enum_signature(x).name.display_no_db(file_id.edition()).to_smolstr()
- }
};
(name == "Goal").then_some(Either::Left(x))
}
@@ -62,7 +66,7 @@ fn eval_goal(
let name = db
.type_alias_signature(x)
.name
- .display_no_db(file_id.edition())
+ .display_no_db(file_id.edition(&db))
.to_smolstr();
(name == "Goal").then_some(Either::Right(x))
}
@@ -99,7 +103,7 @@ fn eval_expr(
);
let (db, file_id) = TestDB::with_single_file(&ra_fixture);
- let module_id = db.module_for_file(file_id.file_id());
+ let module_id = db.module_for_file(file_id.file_id(&db));
let def_map = module_id.def_map(&db);
let scope = &def_map[module_id.local_id].scope;
let function_id = scope
@@ -107,7 +111,7 @@ fn eval_expr(
.find_map(|x| match x {
hir_def::ModuleDefId::FunctionId(x) => {
let name =
- db.function_signature(x).name.display_no_db(file_id.edition()).to_smolstr();
+ db.function_signature(x).name.display_no_db(file_id.edition(&db)).to_smolstr();
(name == "main").then_some(x)
}
_ => None,
@@ -117,7 +121,7 @@ fn eval_expr(
let b = hir_body
.bindings
.iter()
- .find(|x| x.1.name.display_no_db(file_id.edition()).to_smolstr() == "goal")
+ .find(|x| x.1.name.display_no_db(file_id.edition(&db)).to_smolstr() == "goal")
.unwrap()
.0;
let infer = db.infer(function_id.into());
diff --git a/crates/hir-ty/src/lower.rs b/crates/hir-ty/src/lower.rs
index a0b0afb0ee..2d2ec2866e 100644
--- a/crates/hir-ty/src/lower.rs
+++ b/crates/hir-ty/src/lower.rs
@@ -59,7 +59,7 @@ use crate::{
QuantifiedWhereClauses, Substitution, TraitEnvironment, TraitRef, TraitRefExt, Ty, TyBuilder,
TyKind, WhereClause, all_super_traits,
consteval::{intern_const_ref, path_to_const, unknown_const, unknown_const_as_generic},
- db::{HirDatabase, HirDatabaseData},
+ db::HirDatabase,
error_lifetime,
generics::{Generics, generics, trait_self_param_idx},
lower::{
@@ -957,7 +957,6 @@ pub(crate) fn generic_predicates_for_param_query(
pub(crate) fn generic_predicates_for_param_recover(
_db: &dyn HirDatabase,
_cycle: &salsa::Cycle,
- _: HirDatabaseData,
_def: GenericDefId,
_param_id: TypeOrConstParamId,
_assoc_name: Option<Name>,
diff --git a/crates/hir-ty/src/mir/eval.rs b/crates/hir-ty/src/mir/eval.rs
index 29b468fef7..dfcfa209c5 100644
--- a/crates/hir-ty/src/mir/eval.rs
+++ b/crates/hir-ty/src/mir/eval.rs
@@ -16,7 +16,7 @@ use hir_def::{
resolver::{HasResolver, TypeNs, ValueNs},
signatures::{StaticFlags, StructFlags},
};
-use hir_expand::{HirFileIdExt, InFile, mod_path::path, name::Name};
+use hir_expand::{InFile, mod_path::path, name::Name};
use intern::sym;
use la_arena::ArenaMap;
use rustc_abi::TargetDataLayout;
@@ -409,7 +409,7 @@ impl MirEvalError {
};
let file_id = span.file_id.original_file(db);
let text_range = span.value.text_range();
- writeln!(f, "{}", span_formatter(file_id.file_id(), text_range))?;
+ writeln!(f, "{}", span_formatter(file_id.file_id(db), text_range))?;
}
}
match err {
diff --git a/crates/hir-ty/src/mir/eval/tests.rs b/crates/hir-ty/src/mir/eval/tests.rs
index b865dd7af0..3abbbe45e6 100644
--- a/crates/hir-ty/src/mir/eval/tests.rs
+++ b/crates/hir-ty/src/mir/eval/tests.rs
@@ -1,5 +1,6 @@
use hir_def::db::DefDatabase;
-use span::{Edition, EditionedFileId};
+use hir_expand::EditionedFileId;
+use span::Edition;
use syntax::{TextRange, TextSize};
use test_fixture::WithFixture;
@@ -9,7 +10,7 @@ use crate::{Interner, Substitution, db::HirDatabase, mir::MirLowerError, test_db
use super::{MirEvalError, interpret_mir};
fn eval_main(db: &TestDB, file_id: EditionedFileId) -> Result<(String, String), MirEvalError> {
- let module_id = db.module_for_file(file_id);
+ let module_id = db.module_for_file(file_id.file_id(db));
let def_map = module_id.def_map(db);
let scope = &def_map[module_id.local_id].scope;
let func_id = scope
@@ -69,7 +70,7 @@ fn check_pass_and_stdio(
let span_formatter = |file, range: TextRange| {
format!("{:?} {:?}..{:?}", file, line_index(range.start()), line_index(range.end()))
};
- let krate = db.module_for_file(file_id).krate();
+ let krate = db.module_for_file(file_id.file_id(&db)).krate();
e.pretty_print(&mut err, &db, span_formatter, DisplayTarget::from_crate(&db, krate))
.unwrap();
panic!("Error in interpreting: {err}");
diff --git a/crates/hir-ty/src/mir/monomorphization.rs b/crates/hir-ty/src/mir/monomorphization.rs
index 0467ce4bea..91081d70cc 100644
--- a/crates/hir-ty/src/mir/monomorphization.rs
+++ b/crates/hir-ty/src/mir/monomorphization.rs
@@ -19,7 +19,7 @@ use triomphe::Arc;
use crate::{
Const, Interner, ProjectionTy, Substitution, TraitEnvironment, Ty, TyKind,
consteval::{intern_const_scalar, unknown_const},
- db::{HirDatabase, HirDatabaseData, InternedClosure, InternedClosureId},
+ db::{HirDatabase, InternedClosure, InternedClosureId},
from_placeholder_idx,
generics::{Generics, generics},
infer::normalize,
@@ -316,7 +316,6 @@ pub fn monomorphized_mir_body_query(
pub(crate) fn monomorphized_mir_body_recover(
_: &dyn HirDatabase,
_: &salsa::Cycle,
- _: HirDatabaseData,
_: DefWithBodyId,
_: Substitution,
_: Arc<crate::TraitEnvironment>,
diff --git a/crates/hir-ty/src/test_db.rs b/crates/hir-ty/src/test_db.rs
index 693d39f596..d2bba120b6 100644
--- a/crates/hir-ty/src/test_db.rs
+++ b/crates/hir-ty/src/test_db.rs
@@ -8,9 +8,10 @@ use base_db::{
};
use hir_def::{ModuleId, db::DefDatabase};
+use hir_expand::EditionedFileId;
use rustc_hash::FxHashMap;
use salsa::{AsDynDatabase, Durability};
-use span::{EditionedFileId, FileId};
+use span::FileId;
use syntax::TextRange;
use test_utils::extract_annotations;
use triomphe::Arc;
@@ -119,7 +120,7 @@ impl TestDB {
for &krate in self.relevant_crates(file_id).iter() {
let crate_def_map = self.crate_def_map(krate);
for (local_id, data) in crate_def_map.modules() {
- if data.origin.file_id().map(EditionedFileId::file_id) == Some(file_id) {
+ if data.origin.file_id().map(|file_id| file_id.file_id(self)) == Some(file_id) {
return Some(crate_def_map.module_id(local_id));
}
}
@@ -145,7 +146,7 @@ impl TestDB {
files
.into_iter()
.filter_map(|file_id| {
- let text = self.file_text(file_id.file_id());
+ let text = self.file_text(file_id.file_id(self));
let annotations = extract_annotations(&text.text(self));
if annotations.is_empty() {
return None;
diff --git a/crates/hir-ty/src/tests.rs b/crates/hir-ty/src/tests.rs
index 1c07b5d078..cc37f65c26 100644
--- a/crates/hir-ty/src/tests.rs
+++ b/crates/hir-ty/src/tests.rs
@@ -126,7 +126,7 @@ fn check_impl(
let mut defs: Vec<(DefWithBodyId, Crate)> = Vec::new();
for file_id in files {
- let module = db.module_for_file_opt(file_id);
+ let module = db.module_for_file_opt(file_id.file_id(&db));
let module = match module {
Some(m) => m,
None => continue,
@@ -387,7 +387,7 @@ fn infer_with_mismatches(content: &str, include_mismatches: bool) -> String {
}
};
- let module = db.module_for_file(file_id);
+ let module = db.module_for_file(file_id.file_id(&db));
let def_map = module.def_map(&db);
let mut defs: Vec<(DefWithBodyId, Crate)> = Vec::new();
@@ -568,7 +568,7 @@ fn salsa_bug() {
",
);
- let module = db.module_for_file(pos.file_id);
+ let module = db.module_for_file(pos.file_id.file_id(&db));
let crate_def_map = module.def_map(&db);
visit_module(&db, &crate_def_map, module.local_id, &mut |def| {
db.infer(match def {
@@ -605,9 +605,9 @@ fn salsa_bug() {
}
";
- db.set_file_text(pos.file_id.file_id(), new_text);
+ db.set_file_text(pos.file_id.file_id(&db), new_text);
- let module = db.module_for_file(pos.file_id);
+ let module = db.module_for_file(pos.file_id.file_id(&db));
let crate_def_map = module.def_map(&db);
visit_module(&db, &crate_def_map, module.local_id, &mut |def| {
db.infer(match def {
diff --git a/crates/hir-ty/src/tests/closure_captures.rs b/crates/hir-ty/src/tests/closure_captures.rs
index 7d4218939e..73f1ae5645 100644
--- a/crates/hir-ty/src/tests/closure_captures.rs
+++ b/crates/hir-ty/src/tests/closure_captures.rs
@@ -1,9 +1,9 @@
use expect_test::{Expect, expect};
use hir_def::db::DefDatabase;
-use hir_expand::files::InFileWrapper;
+use hir_expand::{HirFileId, files::InFileWrapper};
use itertools::Itertools;
use salsa::plumbing::FromId;
-use span::{HirFileId, TextRange};
+use span::TextRange;
use syntax::{AstNode, AstPtr};
use test_fixture::WithFixture;
@@ -16,7 +16,7 @@ use super::visit_module;
fn check_closure_captures(#[rust_analyzer::rust_fixture] ra_fixture: &str, expect: Expect) {
let (db, file_id) = TestDB::with_single_file(ra_fixture);
- let module = db.module_for_file(file_id);
+ let module = db.module_for_file(file_id.file_id(&db));
let def_map = module.def_map(&db);
let mut defs = Vec::new();
diff --git a/crates/hir-ty/src/tests/incremental.rs b/crates/hir-ty/src/tests/incremental.rs
index 7ed8d47bd5..0542be0ba8 100644
--- a/crates/hir-ty/src/tests/incremental.rs
+++ b/crates/hir-ty/src/tests/incremental.rs
@@ -17,7 +17,7 @@ fn foo() -> i32 {
);
{
let events = db.log_executed(|| {
- let module = db.module_for_file(pos.file_id.file_id());
+ let module = db.module_for_file(pos.file_id.file_id(&db));
let crate_def_map = module.def_map(&db);
visit_module(&db, &crate_def_map, module.local_id, &mut |def| {
if let ModuleDefId::FunctionId(it) = def {
@@ -35,11 +35,11 @@ fn foo() -> i32 {
1
}";
- db.set_file_text(pos.file_id.file_id(), new_text);
+ db.set_file_text(pos.file_id.file_id(&db), new_text);
{
let events = db.log_executed(|| {
- let module = db.module_for_file(pos.file_id.file_id());
+ let module = db.module_for_file(pos.file_id.file_id(&db));
let crate_def_map = module.def_map(&db);
visit_module(&db, &crate_def_map, module.local_id, &mut |def| {
if let ModuleDefId::FunctionId(it) = def {
@@ -68,7 +68,7 @@ fn baz() -> i32 {
);
{
let events = db.log_executed(|| {
- let module = db.module_for_file(pos.file_id.file_id());
+ let module = db.module_for_file(pos.file_id.file_id(&db));
let crate_def_map = module.def_map(&db);
visit_module(&db, &crate_def_map, module.local_id, &mut |def| {
if let ModuleDefId::FunctionId(it) = def {
@@ -91,11 +91,11 @@ fn baz() -> i32 {
}
";
- db.set_file_text(pos.file_id.file_id(), new_text);
+ db.set_file_text(pos.file_id.file_id(&db), new_text);
{
let events = db.log_executed(|| {
- let module = db.module_for_file(pos.file_id.file_id());
+ let module = db.module_for_file(pos.file_id.file_id(&db));
let crate_def_map = module.def_map(&db);
visit_module(&db, &crate_def_map, module.local_id, &mut |def| {
if let ModuleDefId::FunctionId(it) = def {
diff --git a/crates/hir-ty/src/variance.rs b/crates/hir-ty/src/variance.rs
index 11dd2a1bbc..d711f2e57b 100644
--- a/crates/hir-ty/src/variance.rs
+++ b/crates/hir-ty/src/variance.rs
@@ -979,7 +979,7 @@ struct FixedPoint<T, U, V>(&'static FixedPoint<(), T, U>, V);
let (db, file_id) = TestDB::with_single_file(ra_fixture);
let mut defs: Vec<GenericDefId> = Vec::new();
- let module = db.module_for_file_opt(file_id).unwrap();
+ let module = db.module_for_file_opt(file_id.file_id(&db)).unwrap();
let def_map = module.def_map(&db);
crate::tests::visit_module(&db, &def_map, module.local_id, &mut |it| {
defs.push(match it {
diff --git a/crates/hir/src/has_source.rs b/crates/hir/src/has_source.rs
index a7b7f42e5e..fe7429c867 100644
--- a/crates/hir/src/has_source.rs
+++ b/crates/hir/src/has_source.rs
@@ -6,9 +6,8 @@ use hir_def::{
nameres::{ModuleOrigin, ModuleSource},
src::{HasChildSource, HasSource as _},
};
-use hir_expand::{HirFileId, InFile};
+use hir_expand::{EditionedFileId, HirFileId, InFile};
use hir_ty::db::InternedClosure;
-use span::EditionedFileId;
use syntax::ast;
use tt::TextRange;
diff --git a/crates/hir/src/lib.rs b/crates/hir/src/lib.rs
index 65c8ec0c25..4414c79f8b 100644
--- a/crates/hir/src/lib.rs
+++ b/crates/hir/src/lib.rs
@@ -82,7 +82,7 @@ use itertools::Itertools;
use nameres::diagnostics::DefDiagnosticKind;
use rustc_hash::FxHashSet;
use smallvec::SmallVec;
-use span::{Edition, EditionedFileId, FileId, MacroCallId};
+use span::{Edition, FileId};
use stdx::{format_to, impl_from, never};
use syntax::{
AstNode, AstPtr, SmolStr, SyntaxNode, SyntaxNodePtr, T, TextRange, ToSmolStr,
@@ -129,7 +129,7 @@ pub use {
{ModuleDefId, TraitId},
},
hir_expand::{
- ExpandResult, HirFileId, HirFileIdExt, MacroFileId, MacroFileIdExt, MacroKind,
+ EditionedFileId, ExpandResult, HirFileId, MacroCallId, MacroKind,
attrs::{Attr, AttrId},
change::ChangeWithProcMacros,
files::{
@@ -954,10 +954,11 @@ fn macro_call_diagnostics(
let node =
InFile::new(file_id, db.ast_id_map(file_id).get_erased(loc.kind.erased_ast_id()));
let RenderedExpandError { message, error, kind } = err.render_to_string(db);
- let precise_location = if err.span().anchor.file_id == file_id {
+ let editioned_file_id = EditionedFileId::from_span(db, err.span().anchor.file_id);
+ let precise_location = if editioned_file_id == file_id {
Some(
err.span().range
- + db.ast_id_map(err.span().anchor.file_id.into())
+ + db.ast_id_map(editioned_file_id.into())
.get_erased(err.span().anchor.ast_id)
.text_range()
.start(),
@@ -1926,7 +1927,7 @@ impl DefWithBody {
source_map
.macro_calls()
- .for_each(|(_ast_id, call_id)| macro_call_diagnostics(db, call_id.macro_call_id, acc));
+ .for_each(|(_ast_id, call_id)| macro_call_diagnostics(db, call_id, acc));
expr_store_diagnostics(db, acc, &source_map);
@@ -2145,10 +2146,11 @@ fn expr_store_diagnostics(
ExpressionStoreDiagnostics::MacroError { node, err } => {
let RenderedExpandError { message, error, kind } = err.render_to_string(db);
- let precise_location = if err.span().anchor.file_id == node.file_id {
+ let editioned_file_id = EditionedFileId::from_span(db, err.span().anchor.file_id);
+ let precise_location = if editioned_file_id == node.file_id {
Some(
err.span().range
- + db.ast_id_map(err.span().anchor.file_id.into())
+ + db.ast_id_map(editioned_file_id.into())
.get_erased(err.span().anchor.ast_id)
.text_range()
.start(),
@@ -4475,7 +4477,7 @@ impl Impl {
let src = self.source(db)?;
let macro_file = src.file_id.macro_file()?;
- let loc = macro_file.macro_call_id.lookup(db);
+ let loc = macro_file.lookup(db);
let (derive_attr, derive_index) = match loc.kind {
MacroCallKind::Derive { ast_id, derive_attr_index, derive_index, .. } => {
let module_id = self.id.lookup(db).container;
@@ -4488,9 +4490,8 @@ impl Impl {
}
_ => return None,
};
- let file_id = MacroFileId { macro_call_id: derive_attr };
let path = db
- .parse_macro_expansion(file_id)
+ .parse_macro_expansion(derive_attr)
.value
.0
.syntax_node()
@@ -4498,7 +4499,7 @@ impl Impl {
.nth(derive_index as usize)
.and_then(<ast::Attr as AstNode>::cast)
.and_then(|it| it.path())?;
- Some(InMacroFile { file_id, value: path })
+ Some(InMacroFile { file_id: derive_attr, value: path })
}
pub fn check_orphan_rules(self, db: &dyn HirDatabase) -> bool {
diff --git a/crates/hir/src/semantics.rs b/crates/hir/src/semantics.rs
index c0f99e09e3..0e5da85606 100644
--- a/crates/hir/src/semantics.rs
+++ b/crates/hir/src/semantics.rs
@@ -20,11 +20,11 @@ use hir_def::{
type_ref::Mutability,
};
use hir_expand::{
- ExpandResult, FileRange, HirFileIdExt, InMacroFile, MacroCallId, MacroFileId, MacroFileIdExt,
+ EditionedFileId, ExpandResult, FileRange, HirFileId, InMacroFile, MacroCallId,
attrs::collect_attrs,
builtin::{BuiltinFnLikeExpander, EagerExpander},
db::ExpandDatabase,
- files::InRealFile,
+ files::{FileRangeWrapper, InRealFile},
hygiene::SyntaxContextExt as _,
inert_attr_macro::find_builtin_attr_idx,
mod_path::{ModPath, PathKind},
@@ -35,7 +35,7 @@ use intern::{Interned, Symbol, sym};
use itertools::Itertools;
use rustc_hash::{FxHashMap, FxHashSet};
use smallvec::{SmallVec, smallvec};
-use span::{EditionedFileId, FileId, HirFileIdRepr, SyntaxContext};
+use span::{Edition, FileId, SyntaxContext};
use stdx::TupleExt;
use syntax::{
AstNode, AstToken, Direction, SyntaxKind, SyntaxNode, SyntaxNodePtr, SyntaxToken, TextRange,
@@ -46,10 +46,10 @@ use syntax::{
use crate::{
Adjust, Adjustment, Adt, AutoBorrow, BindingMode, BuiltinAttr, Callable, Const, ConstParam,
- Crate, DefWithBody, DeriveHelper, Enum, Field, Function, GenericSubstitution, HasSource,
- HirFileId, Impl, InFile, InlineAsmOperand, ItemInNs, Label, LifetimeParam, Local, Macro,
- Module, ModuleDef, Name, OverloadedDeref, ScopeDef, Static, Struct, ToolModule, Trait,
- TraitAlias, TupleField, Type, TypeAlias, TypeParam, Union, Variant, VariantDef,
+ Crate, DefWithBody, DeriveHelper, Enum, Field, Function, GenericSubstitution, HasSource, Impl,
+ InFile, InlineAsmOperand, ItemInNs, Label, LifetimeParam, Local, Macro, Module, ModuleDef,
+ Name, OverloadedDeref, ScopeDef, Static, Struct, ToolModule, Trait, TraitAlias, TupleField,
+ Type, TypeAlias, TypeParam, Union, Variant, VariantDef,
db::HirDatabase,
semantics::source_to_def::{ChildContainer, SourceToDefCache, SourceToDefCtx},
source_analyzer::{SourceAnalyzer, name_hygiene, resolve_hir_path},
@@ -136,8 +136,8 @@ pub struct Semantics<'db, DB> {
pub struct SemanticsImpl<'db> {
pub db: &'db dyn HirDatabase,
s2d_cache: RefCell<SourceToDefCache>,
- /// MacroCall to its expansion's MacroFileId cache
- macro_call_cache: RefCell<FxHashMap<InFile<ast::MacroCall>, MacroFileId>>,
+ /// MacroCall to its expansion's MacroCallId cache
+ macro_call_cache: RefCell<FxHashMap<InFile<ast::MacroCall>, MacroCallId>>,
}
impl<DB> fmt::Debug for Semantics<'_, DB> {
@@ -305,8 +305,8 @@ impl<'db> SemanticsImpl<'db> {
SemanticsImpl { db, s2d_cache: Default::default(), macro_call_cache: Default::default() }
}
- pub fn parse(&self, file_id: base_db::EditionedFileId) -> ast::SourceFile {
- let hir_file_id = file_id.editioned_file_id(self.db).into();
+ pub fn parse(&self, file_id: EditionedFileId) -> ast::SourceFile {
+ let hir_file_id = file_id.into();
let tree = self.db.parse(file_id).tree();
self.cache(tree.syntax().clone(), hir_file_id);
tree
@@ -322,27 +322,26 @@ impl<'db> SemanticsImpl<'db> {
pub fn attach_first_edition(&self, file: FileId) -> Option<EditionedFileId> {
Some(EditionedFileId::new(
+ self.db,
file,
self.file_to_module_defs(file).next()?.krate().edition(self.db),
))
}
pub fn parse_guess_edition(&self, file_id: FileId) -> ast::SourceFile {
- let editioned_file_id = self
+ let file_id = self
.attach_first_edition(file_id)
- .unwrap_or_else(|| EditionedFileId::current_edition(file_id));
-
- let file_id = base_db::EditionedFileId::new(self.db, editioned_file_id);
+ .unwrap_or_else(|| EditionedFileId::new(self.db, file_id, Edition::CURRENT));
let tree = self.db.parse(file_id).tree();
- self.cache(tree.syntax().clone(), editioned_file_id.into());
+ self.cache(tree.syntax().clone(), file_id.into());
tree
}
pub fn find_parent_file(&self, file_id: HirFileId) -> Option<InFile<SyntaxNode>> {
- match file_id.repr() {
- HirFileIdRepr::FileId(file_id) => {
- let module = self.file_to_module_defs(file_id.file_id()).next()?;
+ match file_id {
+ HirFileId::FileId(file_id) => {
+ let module = self.file_to_module_defs(file_id.file_id(self.db)).next()?;
let def_map = self.db.crate_def_map(module.krate().id);
match def_map[module.id.local_id].origin {
ModuleOrigin::CrateRoot { .. } => None,
@@ -357,9 +356,8 @@ impl<'db> SemanticsImpl<'db> {
_ => unreachable!("FileId can only belong to a file module"),
}
}
- HirFileIdRepr::MacroFile(macro_file) => {
- let node =
- self.db.lookup_intern_macro_call(macro_file.macro_call_id).to_node(self.db);
+ HirFileId::MacroFile(macro_file) => {
+ let node = self.db.lookup_intern_macro_call(macro_file).to_node(self.db);
let root = find_root(&node.value);
self.cache(root, node.file_id);
Some(node)
@@ -384,7 +382,7 @@ impl<'db> SemanticsImpl<'db> {
node
}
- pub fn expand(&self, file_id: MacroFileId) -> ExpandResult<SyntaxNode> {
+ pub fn expand(&self, file_id: MacroCallId) -> ExpandResult<SyntaxNode> {
let res = self.db.parse_macro_expansion(file_id).map(|it| it.0.syntax_node());
self.cache(res.value.clone(), file_id.into());
res
@@ -402,13 +400,11 @@ impl<'db> SemanticsImpl<'db> {
pub fn check_cfg_attr(&self, attr: &ast::TokenTree) -> Option<bool> {
let file_id = self.find_file(attr.syntax()).file_id;
- let krate = match file_id.repr() {
- HirFileIdRepr::FileId(file_id) => {
- self.file_to_module_defs(file_id.file_id()).next()?.krate().id
- }
- HirFileIdRepr::MacroFile(macro_file) => {
- self.db.lookup_intern_macro_call(macro_file.macro_call_id).krate
+ let krate = match file_id {
+ HirFileId::FileId(file_id) => {
+ self.file_to_module_defs(file_id.file_id(self.db)).next()?.krate().id
}
+ HirFileId::MacroFile(macro_file) => self.db.lookup_intern_macro_call(macro_file).krate,
};
hir_expand::check_cfg_attr_value(self.db, attr, krate)
}
@@ -423,7 +419,7 @@ impl<'db> SemanticsImpl<'db> {
let macro_call = InFile::new(sa.file_id, macro_call);
let file_id = sa.expand(self.db, macro_call)?;
- let macro_call = self.db.lookup_intern_macro_call(file_id.macro_call_id);
+ let macro_call = self.db.lookup_intern_macro_call(file_id);
let skip = matches!(
macro_call.def.kind,
@@ -456,7 +452,7 @@ impl<'db> SemanticsImpl<'db> {
pub fn expand_attr_macro(&self, item: &ast::Item) -> Option<ExpandResult<SyntaxNode>> {
let src = self.wrap_node_infile(item.clone());
let macro_call_id = self.with_ctx(|ctx| ctx.item_to_macro_call(src.as_ref()))?;
- Some(self.expand(macro_call_id.as_macro_file()))
+ Some(self.expand(macro_call_id))
}
pub fn expand_derive_as_pseudo_attr_macro(&self, attr: &ast::Attr) -> Option<SyntaxNode> {
@@ -465,7 +461,7 @@ impl<'db> SemanticsImpl<'db> {
let call_id = self.with_ctx(|ctx| {
ctx.attr_to_derive_macro_call(src.with_value(&adt), src).map(|(_, it, _)| it)
})?;
- Some(self.parse_or_expand(call_id.as_file()))
+ Some(self.parse_or_expand(call_id.into()))
}
pub fn resolve_derive_macro(&self, attr: &ast::Attr) -> Option<Vec<Option<Macro>>> {
@@ -485,7 +481,7 @@ impl<'db> SemanticsImpl<'db> {
.derive_macro_calls(attr)?
.into_iter()
.flat_map(|call| {
- let file_id = call?.as_macro_file();
+ let file_id = call?;
let ExpandResult { value, err } = self.db.parse_macro_expansion(file_id);
let root_node = value.0.syntax_node();
self.cache(root_node.clone(), file_id.into());
@@ -526,7 +522,7 @@ impl<'db> SemanticsImpl<'db> {
Some(result)
}
- pub fn derive_helper(&self, attr: &ast::Attr) -> Option<Vec<(Macro, MacroFileId)>> {
+ pub fn derive_helper(&self, attr: &ast::Attr) -> Option<Vec<(Macro, MacroCallId)>> {
let adt = attr.syntax().ancestors().find_map(ast::Item::cast).and_then(|it| match it {
ast::Item::Struct(it) => Some(ast::Adt::Struct(it)),
ast::Item::Enum(it) => Some(ast::Adt::Enum(it)),
@@ -542,7 +538,7 @@ impl<'db> SemanticsImpl<'db> {
.derive_helpers_in_scope(InFile::new(sa.file_id, id))?
.iter()
.filter(|&(name, _, _)| *name == attr_name)
- .map(|&(_, macro_, call)| (macro_.into(), call.as_macro_file()))
+ .map(|&(_, macro_, call)| (macro_.into(), call))
.collect();
res.is_empty().not().then_some(res)
}
@@ -564,7 +560,7 @@ impl<'db> SemanticsImpl<'db> {
let macro_file = analyzer.expansion(macro_call)?;
hir_expand::db::expand_speculative(
self.db,
- macro_file.macro_call_id,
+ macro_file,
speculative_args.syntax(),
token_to_map,
)
@@ -572,16 +568,11 @@ impl<'db> SemanticsImpl<'db> {
pub fn speculative_expand_raw(
&self,
- macro_file: MacroFileId,
+ macro_file: MacroCallId,
speculative_args: &SyntaxNode,
token_to_map: SyntaxToken,
) -> Option<(SyntaxNode, Vec<(SyntaxToken, u8)>)> {
- hir_expand::db::expand_speculative(
- self.db,
- macro_file.macro_call_id,
- speculative_args,
- token_to_map,
- )
+ hir_expand::db::expand_speculative(self.db, macro_file, speculative_args, token_to_map)
}
/// Expand the macro call with a different item as the input, mapping the `token_to_map` down into the
@@ -1025,7 +1016,7 @@ impl<'db> SemanticsImpl<'db> {
None => {
stack.push((
file_id.into(),
- smallvec![(token, SyntaxContext::root(file_id.edition()))],
+ smallvec![(token, SyntaxContext::root(file_id.edition(self.db)))],
));
}
}
@@ -1054,7 +1045,6 @@ impl<'db> SemanticsImpl<'db> {
})
});
if let Some((call_id, item)) = containing_attribute_macro_call {
- let file_id = call_id.as_macro_file();
let attr_id = match self.db.lookup_intern_macro_call(call_id).kind {
hir_expand::MacroCallKind::Attr { invoc_attr_index, .. } => {
invoc_attr_index.ast_index()
@@ -1083,7 +1073,7 @@ impl<'db> SemanticsImpl<'db> {
.unwrap_or_else(|| text_range.start());
let text_range = TextRange::new(start, text_range.end());
filter_duplicates(tokens, text_range);
- return process_expansion_for_token(&mut stack, file_id);
+ return process_expansion_for_token(&mut stack, call_id);
}
// Then check for token trees, that means we are either in a function-like macro or
@@ -1127,7 +1117,7 @@ impl<'db> SemanticsImpl<'db> {
.eager_arg(self.db)
.and_then(|arg| {
// also descend into eager expansions
- process_expansion_for_token(&mut stack, arg.as_macro_file())
+ process_expansion_for_token(&mut stack, arg)
}))
}
// derive or derive helper
@@ -1151,7 +1141,6 @@ impl<'db> SemanticsImpl<'db> {
match derive_call {
Some(call_id) => {
// resolved to a derive
- let file_id = call_id.as_macro_file();
let text_range = attr.syntax().text_range();
// remove any other token in this macro input, all their mappings are the
// same as this
@@ -1159,7 +1148,7 @@ impl<'db> SemanticsImpl<'db> {
!text_range.contains_range(t.text_range())
});
return process_expansion_for_token(
- &mut stack, file_id,
+ &mut stack, call_id,
);
}
None => Some(adt),
@@ -1207,10 +1196,7 @@ impl<'db> SemanticsImpl<'db> {
// as there may be multiple derives registering the same helper
// name, we gotta make sure to call this for all of them!
// FIXME: We need to call `f` for all of them as well though!
- res = res.or(process_expansion_for_token(
- &mut stack,
- derive.as_macro_file(),
- ));
+ res = res.or(process_expansion_for_token(&mut stack, *derive));
}
res
}
@@ -1288,10 +1274,14 @@ impl<'db> SemanticsImpl<'db> {
)
}
- pub fn diagnostics_display_range(&self, src: InFile<SyntaxNodePtr>) -> FileRange {
+ pub fn diagnostics_display_range(
+ &self,
+ src: InFile<SyntaxNodePtr>,
+ ) -> FileRangeWrapper<FileId> {
let root = self.parse_or_expand(src.file_id);
let node = src.map(|it| it.to_node(&root));
- node.as_ref().original_file_range_rooted(self.db)
+ let FileRange { file_id, range } = node.as_ref().original_file_range_rooted(self.db);
+ FileRangeWrapper { file_id: file_id.file_id(self.db), range }
}
fn token_ancestors_with_macros(
@@ -1890,20 +1880,16 @@ fn macro_call_to_macro_id(
ctx: &mut SourceToDefCtx<'_, '_>,
macro_call_id: MacroCallId,
) -> Option<MacroId> {
- use span::HirFileIdRepr;
-
let db: &dyn ExpandDatabase = ctx.db;
let loc = db.lookup_intern_macro_call(macro_call_id);
match loc.def.ast_id() {
Either::Left(it) => {
- let node = match it.file_id.repr() {
- HirFileIdRepr::FileId(editioned_file_id) => {
- let file_id = base_db::EditionedFileId::new(db, editioned_file_id);
-
+ let node = match it.file_id {
+ HirFileId::FileId(file_id) => {
it.to_ptr(db).to_node(&db.parse(file_id).syntax_node())
}
- HirFileIdRepr::MacroFile(macro_file) => {
+ HirFileId::MacroFile(macro_file) => {
let expansion_info = ctx.cache.get_or_insert_expansion(ctx.db, macro_file);
it.to_ptr(db).to_node(&expansion_info.expanded().value)
}
@@ -1911,13 +1897,11 @@ fn macro_call_to_macro_id(
ctx.macro_to_def(InFile::new(it.file_id, &node))
}
Either::Right(it) => {
- let node = match it.file_id.repr() {
- HirFileIdRepr::FileId(editioned_file_id) => {
- let file_id = base_db::EditionedFileId::new(db, editioned_file_id);
-
+ let node = match it.file_id {
+ HirFileId::FileId(file_id) => {
it.to_ptr(db).to_node(&db.parse(file_id).syntax_node())
}
- HirFileIdRepr::MacroFile(macro_file) => {
+ HirFileId::MacroFile(macro_file) => {
let expansion_info = ctx.cache.get_or_insert_expansion(ctx.db, macro_file);
it.to_ptr(db).to_node(&expansion_info.expanded().value)
}
diff --git a/crates/hir/src/semantics/child_by_source.rs b/crates/hir/src/semantics/child_by_source.rs
index 22d683cb23..9393d08ad3 100644
--- a/crates/hir/src/semantics/child_by_source.rs
+++ b/crates/hir/src/semantics/child_by_source.rs
@@ -197,7 +197,7 @@ impl ChildBySource for DefWithBodyId {
}
sm.expansions().filter(|(ast, _)| ast.file_id == file_id).for_each(|(ast, &exp_id)| {
- res[keys::MACRO_CALL].insert(ast.value, exp_id.macro_call_id);
+ res[keys::MACRO_CALL].insert(ast.value, exp_id);
});
for (block, def_map) in body.blocks(db) {
diff --git a/crates/hir/src/semantics/source_to_def.rs b/crates/hir/src/semantics/source_to_def.rs
index 2f06a1dc68..466bf7f6c8 100644
--- a/crates/hir/src/semantics/source_to_def.rs
+++ b/crates/hir/src/semantics/source_to_def.rs
@@ -98,12 +98,12 @@ use hir_def::{
hir::{BindingId, Expr, LabelId},
};
use hir_expand::{
- ExpansionInfo, HirFileId, HirFileIdExt, InMacroFile, MacroCallId, MacroFileId, MacroFileIdExt,
- attrs::AttrId, name::AsName,
+ EditionedFileId, ExpansionInfo, HirFileId, InMacroFile, MacroCallId, attrs::AttrId,
+ name::AsName,
};
use rustc_hash::FxHashMap;
use smallvec::SmallVec;
-use span::{EditionedFileId, FileId};
+use span::FileId;
use stdx::impl_from;
use syntax::{
AstNode, AstPtr, SyntaxNode,
@@ -116,9 +116,9 @@ use crate::{InFile, InlineAsmOperand, db::HirDatabase, semantics::child_by_sourc
#[derive(Default)]
pub(super) struct SourceToDefCache {
pub(super) dynmap_cache: FxHashMap<(ChildContainer, HirFileId), DynMap>,
- expansion_info_cache: FxHashMap<MacroFileId, ExpansionInfo>,
+ expansion_info_cache: FxHashMap<MacroCallId, ExpansionInfo>,
pub(super) file_to_def_cache: FxHashMap<FileId, SmallVec<[ModuleId; 1]>>,
- pub(super) included_file_cache: FxHashMap<EditionedFileId, Option<MacroFileId>>,
+ pub(super) included_file_cache: FxHashMap<EditionedFileId, Option<MacroCallId>>,
/// Rootnode to HirFileId cache
pub(super) root_to_file_cache: FxHashMap<SyntaxNode, HirFileId>,
}
@@ -138,14 +138,14 @@ impl SourceToDefCache {
&mut self,
db: &dyn HirDatabase,
file: EditionedFileId,
- ) -> Option<MacroFileId> {
+ ) -> Option<MacroCallId> {
if let Some(&m) = self.included_file_cache.get(&file) {
return m;
}
self.included_file_cache.insert(file, None);
- for &crate_id in db.relevant_crates(file.into()).iter() {
+ for &crate_id in db.relevant_crates(file.file_id(db)).iter() {
db.include_macro_invoc(crate_id).iter().for_each(|&(macro_call_id, file_id)| {
- self.included_file_cache.insert(file_id, Some(MacroFileId { macro_call_id }));
+ self.included_file_cache.insert(file_id, Some(macro_call_id));
});
}
self.included_file_cache.get(&file).copied().flatten()
@@ -154,7 +154,7 @@ impl SourceToDefCache {
pub(super) fn get_or_insert_expansion(
&mut self,
db: &dyn HirDatabase,
- macro_file: MacroFileId,
+ macro_file: MacroCallId,
) -> &ExpansionInfo {
self.expansion_info_cache.entry(macro_file).or_insert_with(|| {
let exp_info = macro_file.expansion_info(db);
@@ -184,7 +184,7 @@ impl SourceToDefCtx<'_, '_> {
let n_mods = mods.len();
let modules = |file| {
crate_def_map
- .modules_for_file(file)
+ .modules_for_file(self.db, file)
.map(|local_id| crate_def_map.module_id(local_id))
};
mods.extend(modules(file));
@@ -193,18 +193,16 @@ impl SourceToDefCtx<'_, '_> {
self.db
.include_macro_invoc(crate_id)
.iter()
- .filter(|&&(_, file_id)| file_id == file)
+ .filter(|&&(_, file_id)| file_id.file_id(self.db) == file)
.flat_map(|&(macro_call_id, file_id)| {
- self.cache
- .included_file_cache
- .insert(file_id, Some(MacroFileId { macro_call_id }));
+ self.cache.included_file_cache.insert(file_id, Some(macro_call_id));
modules(
macro_call_id
.lookup(self.db)
.kind
.file_id()
.original_file(self.db)
- .file_id(),
+ .file_id(self.db),
)
}),
);
@@ -234,7 +232,7 @@ impl SourceToDefCtx<'_, '_> {
}
None => {
let file_id = src.file_id.original_file(self.db);
- self.file_to_def(file_id.file_id()).first().copied()
+ self.file_to_def(file_id.file_id(self.db)).first().copied()
}
}?;
@@ -247,7 +245,7 @@ impl SourceToDefCtx<'_, '_> {
pub(super) fn source_file_to_def(&mut self, src: InFile<&ast::SourceFile>) -> Option<ModuleId> {
let _p = tracing::info_span!("source_file_to_def").entered();
let file_id = src.file_id.original_file(self.db);
- self.file_to_def(file_id.file_id()).first().copied()
+ self.file_to_def(file_id.file_id(self.db)).first().copied()
}
pub(super) fn trait_to_def(&mut self, src: InFile<&ast::Trait>) -> Option<TraitId> {
@@ -526,8 +524,10 @@ impl SourceToDefCtx<'_, '_> {
return Some(def);
}
- let def =
- self.file_to_def(src.file_id.original_file(self.db).file_id()).first().copied()?;
+ let def = self
+ .file_to_def(src.file_id.original_file(self.db).file_id(self.db))
+ .first()
+ .copied()?;
Some(def.into())
}
diff --git a/crates/hir/src/source_analyzer.rs b/crates/hir/src/source_analyzer.rs
index b86fe1fa39..b2ca2c6c4d 100644
--- a/crates/hir/src/source_analyzer.rs
+++ b/crates/hir/src/source_analyzer.rs
@@ -29,7 +29,7 @@ use hir_def::{
type_ref::{Mutability, TypeRef, TypeRefId},
};
use hir_expand::{
- HirFileId, InFile, MacroFileId, MacroFileIdExt,
+ HirFileId, InFile, MacroCallId,
mod_path::{ModPath, PathKind, path},
name::{AsName, Name},
};
@@ -47,10 +47,9 @@ use hir_ty::{
use intern::sym;
use itertools::Itertools;
use smallvec::SmallVec;
-use syntax::ast::{RangeItem, RangeOp};
use syntax::{
SyntaxKind, SyntaxNode, TextRange, TextSize,
- ast::{self, AstNode},
+ ast::{self, AstNode, RangeItem, RangeOp},
};
use triomphe::Arc;
@@ -216,7 +215,7 @@ impl SourceAnalyzer {
})
}
- pub(crate) fn expansion(&self, node: InFile<&ast::MacroCall>) -> Option<MacroFileId> {
+ pub(crate) fn expansion(&self, node: InFile<&ast::MacroCall>) -> Option<MacroCallId> {
self.store_sm()?.expansion(node)
}
@@ -750,7 +749,7 @@ impl SourceAnalyzer {
let bs = self.store_sm()?;
bs.expansion(macro_call).and_then(|it| {
// FIXME: Block def maps
- let def = it.macro_call_id.lookup(db).def;
+ let def = it.lookup(db).def;
db.crate_def_map(def.krate)
.macro_def_to_macro_id
.get(&def.kind.erased_ast_id())
@@ -1197,15 +1196,11 @@ impl SourceAnalyzer {
&self,
db: &dyn HirDatabase,
macro_call: InFile<&ast::MacroCall>,
- ) -> Option<MacroFileId> {
+ ) -> Option<MacroCallId> {
self.store_sm().and_then(|bs| bs.expansion(macro_call)).or_else(|| {
- self.resolver
- .item_scope()
- .macro_invoc(
- macro_call
- .with_value(db.ast_id_map(macro_call.file_id).ast_id(macro_call.value)),
- )
- .map(|it| it.as_macro_file())
+ self.resolver.item_scope().macro_invoc(
+ macro_call.with_value(db.ast_id_map(macro_call.file_id).ast_id(macro_call.value)),
+ )
})
}
diff --git a/crates/ide-assists/src/assist_context.rs b/crates/ide-assists/src/assist_context.rs
index afb2229d3e..9eb9452a2b 100644
--- a/crates/ide-assists/src/assist_context.rs
+++ b/crates/ide-assists/src/assist_context.rs
@@ -1,8 +1,6 @@
//! See [`AssistContext`].
-use hir::{FileRange, Semantics};
-use ide_db::EditionedFileId;
-use ide_db::base_db::salsa::AsDynDatabase;
+use hir::{EditionedFileId, FileRange, Semantics};
use ide_db::{FileId, RootDatabase, label::Label};
use syntax::Edition;
use syntax::{
@@ -65,10 +63,7 @@ impl<'a> AssistContext<'a> {
config: &'a AssistConfig,
frange: FileRange,
) -> AssistContext<'a> {
- let editioned_file_id =
- ide_db::base_db::EditionedFileId::new(sema.db.as_dyn_database(), frange.file_id);
-
- let source_file = sema.parse(editioned_file_id);
+ let source_file = sema.parse(frange.file_id);
let start = frange.range.start();
let end = frange.range.end();
@@ -109,12 +104,16 @@ impl<'a> AssistContext<'a> {
self.frange.range.start()
}
+ pub(crate) fn vfs_file_id(&self) -> FileId {
+ self.frange.file_id.file_id(self.db())
+ }
+
pub(crate) fn file_id(&self) -> EditionedFileId {
self.frange.file_id
}
pub(crate) fn edition(&self) -> Edition {
- self.frange.file_id.edition()
+ self.frange.file_id.edition(self.db())
}
pub(crate) fn has_empty_selection(&self) -> bool {
@@ -169,7 +168,7 @@ impl Assists {
pub(crate) fn new(ctx: &AssistContext<'_>, resolve: AssistResolveStrategy) -> Assists {
Assists {
resolve,
- file: ctx.frange.file_id.file_id(),
+ file: ctx.frange.file_id.file_id(ctx.db()),
buf: Vec::new(),
allowed: ctx.config.allowed.clone(),
}
diff --git a/crates/ide-assists/src/handlers/add_braces.rs b/crates/ide-assists/src/handlers/add_braces.rs
index b6e3b0a977..38d80ca606 100644
--- a/crates/ide-assists/src/handlers/add_braces.rs
+++ b/crates/ide-assists/src/handlers/add_braces.rs
@@ -48,7 +48,7 @@ pub(crate) fn add_braces(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<(
editor.replace(expr.syntax(), block_expr.syntax());
editor.add_mappings(make.finish_with_mappings());
- builder.add_file_edits(ctx.file_id(), editor);
+ builder.add_file_edits(ctx.vfs_file_id(), editor);
},
)
}
diff --git a/crates/ide-assists/src/handlers/add_missing_match_arms.rs b/crates/ide-assists/src/handlers/add_missing_match_arms.rs
index 777e40eb44..8600121bef 100644
--- a/crates/ide-assists/src/handlers/add_missing_match_arms.rs
+++ b/crates/ide-assists/src/handlers/add_missing_match_arms.rs
@@ -3,7 +3,6 @@ use std::iter::{self, Peekable};
use either::Either;
use hir::{Adt, Crate, HasAttrs, ImportPathConfig, ModuleDef, Semantics, sym};
use ide_db::RootDatabase;
-use ide_db::base_db::salsa::AsDynDatabase;
use ide_db::syntax_helpers::suggest_name;
use ide_db::{famous_defs::FamousDefs, helpers::mod_path_to_ast};
use itertools::Itertools;
@@ -257,12 +256,7 @@ pub(crate) fn add_missing_match_arms(acc: &mut Assists, ctx: &AssistContext<'_>)
// Just replace the element that the original range came from
let old_place = {
// Find the original element
- let editioned_file_id = ide_db::base_db::EditionedFileId::new(
- ctx.sema.db.as_dyn_database(),
- arm_list_range.file_id,
- );
-
- let file = ctx.sema.parse(editioned_file_id);
+ let file = ctx.sema.parse(arm_list_range.file_id);
let old_place = file.syntax().covering_element(arm_list_range.range);
match old_place {
@@ -300,7 +294,7 @@ pub(crate) fn add_missing_match_arms(acc: &mut Assists, ctx: &AssistContext<'_>)
}
editor.add_mappings(make.take());
- builder.add_file_edits(ctx.file_id(), editor);
+ builder.add_file_edits(ctx.vfs_file_id(), editor);
},
)
}
diff --git a/crates/ide-assists/src/handlers/add_turbo_fish.rs b/crates/ide-assists/src/handlers/add_turbo_fish.rs
index 245aa3ae21..be13b04873 100644
--- a/crates/ide-assists/src/handlers/add_turbo_fish.rs
+++ b/crates/ide-assists/src/handlers/add_turbo_fish.rs
@@ -119,7 +119,7 @@ pub(crate) fn add_turbo_fish(acc: &mut Assists, ctx: &AssistContext<'_>) -> Opti
}
}
- builder.add_file_edits(ctx.file_id(), editor);
+ builder.add_file_edits(ctx.vfs_file_id(), editor);
},
)?
} else {
@@ -181,7 +181,7 @@ pub(crate) fn add_turbo_fish(acc: &mut Assists, ctx: &AssistContext<'_>) -> Opti
}
editor.add_mappings(make.finish_with_mappings());
- builder.add_file_edits(ctx.file_id(), editor);
+ builder.add_file_edits(ctx.vfs_file_id(), editor);
},
)
}
diff --git a/crates/ide-assists/src/handlers/apply_demorgan.rs b/crates/ide-assists/src/handlers/apply_demorgan.rs
index e03b4abd8f..3b447d1f6d 100644
--- a/crates/ide-assists/src/handlers/apply_demorgan.rs
+++ b/crates/ide-assists/src/handlers/apply_demorgan.rs
@@ -147,7 +147,7 @@ pub(crate) fn apply_demorgan(acc: &mut Assists, ctx: &AssistContext<'_>) -> Opti
}
editor.add_mappings(make.finish_with_mappings());
- builder.add_file_edits(ctx.file_id(), editor);
+ builder.add_file_edits(ctx.vfs_file_id(), editor);
},
)
}
@@ -230,7 +230,7 @@ pub(crate) fn apply_demorgan_iterator(acc: &mut Assists, ctx: &AssistContext<'_>
}
editor.add_mappings(make.finish_with_mappings());
- builder.add_file_edits(ctx.file_id(), editor);
+ builder.add_file_edits(ctx.vfs_file_id(), editor);
},
)
}
diff --git a/crates/ide-assists/src/handlers/bind_unused_param.rs b/crates/ide-assists/src/handlers/bind_unused_param.rs
index dadc38e6f9..00c7d25b25 100644
--- a/crates/ide-assists/src/handlers/bind_unused_param.rs
+++ b/crates/ide-assists/src/handlers/bind_unused_param.rs
@@ -42,7 +42,7 @@ pub(crate) fn bind_unused_param(acc: &mut Assists, ctx: &AssistContext<'_>) -> O
format!("Bind as `let _ = {ident_pat};`"),
param.syntax().text_range(),
|builder| {
- let line_index = ctx.db().line_index(ctx.file_id().into());
+ let line_index = ctx.db().line_index(ctx.vfs_file_id());
let indent = func.indent_level();
let text_indent = indent + 1;
diff --git a/crates/ide-assists/src/handlers/convert_bool_then.rs b/crates/ide-assists/src/handlers/convert_bool_then.rs
index ba5488ea0e..0a2eab201b 100644
--- a/crates/ide-assists/src/handlers/convert_bool_then.rs
+++ b/crates/ide-assists/src/handlers/convert_bool_then.rs
@@ -135,7 +135,7 @@ pub(crate) fn convert_if_to_bool_then(acc: &mut Assists, ctx: &AssistContext<'_>
editor.replace(expr.syntax(), mcall.syntax());
editor.add_mappings(make.finish_with_mappings());
- builder.add_file_edits(ctx.file_id(), editor);
+ builder.add_file_edits(ctx.vfs_file_id(), editor);
},
)
}
@@ -233,7 +233,7 @@ pub(crate) fn convert_bool_then_to_if(acc: &mut Assists, ctx: &AssistContext<'_>
editor.replace(mcall.syntax().clone(), if_expr.syntax().clone());
editor.add_mappings(make.finish_with_mappings());
- builder.add_file_edits(ctx.file_id(), editor);
+ builder.add_file_edits(ctx.vfs_file_id(), editor);
},
)
}
diff --git a/crates/ide-assists/src/handlers/convert_bool_to_enum.rs b/crates/ide-assists/src/handlers/convert_bool_to_enum.rs
index e3ca28d64d..00e9fdf124 100644
--- a/crates/ide-assists/src/handlers/convert_bool_to_enum.rs
+++ b/crates/ide-assists/src/handlers/convert_bool_to_enum.rs
@@ -209,7 +209,7 @@ fn replace_usages(
delayed_mutations: &mut Vec<(ImportScope, ast::Path)>,
) {
for (file_id, references) in usages {
- edit.edit_file(file_id.file_id());
+ edit.edit_file(file_id.file_id(ctx.db()));
let refs_with_imports = augment_references_with_imports(ctx, references, target_module);
diff --git a/crates/ide-assists/src/handlers/convert_closure_to_fn.rs b/crates/ide-assists/src/handlers/convert_closure_to_fn.rs
index fbd925e55e..1d3a2db335 100644
--- a/crates/ide-assists/src/handlers/convert_closure_to_fn.rs
+++ b/crates/ide-assists/src/handlers/convert_closure_to_fn.rs
@@ -249,7 +249,7 @@ pub(crate) fn convert_closure_to_fn(acc: &mut Assists, ctx: &AssistContext<'_>)
);
fn_ = fn_.dedent(IndentLevel::from_token(&fn_.syntax().last_token().unwrap()));
- builder.edit_file(ctx.file_id());
+ builder.edit_file(ctx.vfs_file_id());
match &closure_name {
Some((closure_decl, _, _)) => {
fn_ = fn_.indent(closure_decl.indent_level());
@@ -506,9 +506,8 @@ fn wrap_capture_in_deref_if_needed(
}
fn capture_as_arg(ctx: &AssistContext<'_>, capture: &ClosureCapture) -> ast::Expr {
- let place =
- parse_expr_from_str(&capture.display_place_source_code(ctx.db()), ctx.file_id().edition())
- .expect("`display_place_source_code()` produced an invalid expr");
+ let place = parse_expr_from_str(&capture.display_place_source_code(ctx.db()), ctx.edition())
+ .expect("`display_place_source_code()` produced an invalid expr");
let needs_mut = match capture.kind() {
CaptureKind::SharedRef => false,
CaptureKind::MutableRef | CaptureKind::UniqueSharedRef => true,
@@ -587,7 +586,7 @@ fn handle_call(
let indent =
if insert_newlines { first_arg_indent.unwrap().to_string() } else { String::new() };
// FIXME: This text manipulation seems risky.
- let text = ctx.db().file_text(file_id.file_id()).text(ctx.db());
+ let text = ctx.db().file_text(file_id.file_id(ctx.db())).text(ctx.db());
let mut text = text[..u32::from(range.end()).try_into().unwrap()].trim_end();
if !text.ends_with(')') {
return None;
@@ -630,7 +629,7 @@ fn handle_call(
to_insert.push(',');
}
- builder.edit_file(file_id);
+ builder.edit_file(file_id.file_id(ctx.db()));
builder.insert(offset, to_insert);
Some(())
diff --git a/crates/ide-assists/src/handlers/convert_for_to_while_let.rs b/crates/ide-assists/src/handlers/convert_for_to_while_let.rs
index 0ba292f5fc..7c5c9830f7 100644
--- a/crates/ide-assists/src/handlers/convert_for_to_while_let.rs
+++ b/crates/ide-assists/src/handlers/convert_for_to_while_let.rs
@@ -101,7 +101,7 @@ pub(crate) fn convert_for_loop_to_while_let(
editor.replace(for_loop.syntax(), while_loop.syntax());
editor.add_mappings(make.finish_with_mappings());
- builder.add_file_edits(ctx.file_id(), editor);
+ builder.add_file_edits(ctx.vfs_file_id(), editor);
},
)
}
diff --git a/crates/ide-assists/src/handlers/convert_named_struct_to_tuple_struct.rs b/crates/ide-assists/src/handlers/convert_named_struct_to_tuple_struct.rs
index 53a5a9465e..ed8aad7b2c 100644
--- a/crates/ide-assists/src/handlers/convert_named_struct_to_tuple_struct.rs
+++ b/crates/ide-assists/src/handlers/convert_named_struct_to_tuple_struct.rs
@@ -99,7 +99,7 @@ fn edit_struct_def(
let tuple_fields = ast::make::tuple_field_list(tuple_fields);
let record_fields_text_range = record_fields.syntax().text_range();
- edit.edit_file(ctx.file_id());
+ edit.edit_file(ctx.vfs_file_id());
edit.replace(record_fields_text_range, tuple_fields.syntax().text());
if let Either::Left(strukt) = strukt {
@@ -149,7 +149,7 @@ fn edit_struct_references(
let usages = strukt_def.usages(&ctx.sema).include_self_refs().all();
for (file_id, refs) in usages {
- edit.edit_file(file_id.file_id());
+ edit.edit_file(file_id.file_id(ctx.db()));
for r in refs {
process_struct_name_reference(ctx, r, edit);
}
@@ -227,7 +227,7 @@ fn edit_field_references(
let def = Definition::Field(field);
let usages = def.usages(&ctx.sema).all();
for (file_id, refs) in usages {
- edit.edit_file(file_id.file_id());
+ edit.edit_file(file_id.file_id(ctx.db()));
for r in refs {
if let Some(name_ref) = r.name.as_name_ref() {
// Only edit the field reference if it's part of a `.field` access
diff --git a/crates/ide-assists/src/handlers/convert_tuple_return_type_to_struct.rs b/crates/ide-assists/src/handlers/convert_tuple_return_type_to_struct.rs
index 87eafc39f8..cca4cb9d8f 100644
--- a/crates/ide-assists/src/handlers/convert_tuple_return_type_to_struct.rs
+++ b/crates/ide-assists/src/handlers/convert_tuple_return_type_to_struct.rs
@@ -106,7 +106,7 @@ fn replace_usages(
target_module: &hir::Module,
) {
for (file_id, references) in usages.iter() {
- edit.edit_file(file_id.file_id());
+ edit.edit_file(file_id.file_id(ctx.db()));
let refs_with_imports =
augment_references_with_imports(edit, ctx, references, struct_name, target_module);
diff --git a/crates/ide-assists/src/handlers/convert_tuple_struct_to_named_struct.rs b/crates/ide-assists/src/handlers/convert_tuple_struct_to_named_struct.rs
index ae23fa05f9..777e366da9 100644
--- a/crates/ide-assists/src/handlers/convert_tuple_struct_to_named_struct.rs
+++ b/crates/ide-assists/src/handlers/convert_tuple_struct_to_named_struct.rs
@@ -95,7 +95,7 @@ fn edit_struct_def(
let record_fields = ast::make::record_field_list(record_fields);
let tuple_fields_text_range = tuple_fields.syntax().text_range();
- edit.edit_file(ctx.file_id());
+ edit.edit_file(ctx.vfs_file_id());
if let Either::Left(strukt) = strukt {
if let Some(w) = strukt.where_clause() {
@@ -142,7 +142,7 @@ fn edit_struct_references(
match node {
ast::TupleStructPat(tuple_struct_pat) => {
let file_range = ctx.sema.original_range_opt(&node)?;
- edit.edit_file(file_range.file_id);
+ edit.edit_file(file_range.file_id.file_id(ctx.db()));
edit.replace(
file_range.range,
ast::make::record_pat_with_fields(
@@ -197,7 +197,7 @@ fn edit_struct_references(
};
for (file_id, refs) in usages {
- edit.edit_file(file_id.file_id());
+ edit.edit_file(file_id.file_id(ctx.db()));
for r in refs {
for node in r.name.syntax().ancestors() {
if edit_node(edit, node).is_some() {
@@ -222,7 +222,7 @@ fn edit_field_references(
let def = Definition::Field(field);
let usages = def.usages(&ctx.sema).all();
for (file_id, refs) in usages {
- edit.edit_file(file_id.file_id());
+ edit.edit_file(file_id.file_id(ctx.db()));
for r in refs {
if let Some(name_ref) = r.name.as_name_ref() {
edit.replace(ctx.sema.original_range(name_ref.syntax()).range, name.text());
diff --git a/crates/ide-assists/src/handlers/destructure_struct_binding.rs b/crates/ide-assists/src/handlers/destructure_struct_binding.rs
index 0bb96b5d9c..22416d9a9f 100644
--- a/crates/ide-assists/src/handlers/destructure_struct_binding.rs
+++ b/crates/ide-assists/src/handlers/destructure_struct_binding.rs
@@ -66,7 +66,7 @@ fn destructure_struct_binding_impl(
let mut editor = builder.make_editor(data.ident_pat.syntax());
destructure_pat(ctx, &mut editor, data, &field_names);
update_usages(ctx, &mut editor, data, &field_names.into_iter().collect());
- builder.add_file_edits(ctx.file_id(), editor);
+ builder.add_file_edits(ctx.vfs_file_id(), editor);
}
struct StructEditData {
diff --git a/crates/ide-assists/src/handlers/expand_rest_pattern.rs b/crates/ide-assists/src/handlers/expand_rest_pattern.rs
index 1b5fc63e52..4e487e2162 100644
--- a/crates/ide-assists/src/handlers/expand_rest_pattern.rs
+++ b/crates/ide-assists/src/handlers/expand_rest_pattern.rs
@@ -64,7 +64,7 @@ fn expand_record_rest_pattern(
editor.replace(old_field_list.syntax(), new_field_list.syntax());
editor.add_mappings(make.finish_with_mappings());
- builder.add_file_edits(ctx.file_id(), editor);
+ builder.add_file_edits(ctx.vfs_file_id(), editor);
},
)
}
@@ -154,7 +154,7 @@ fn expand_tuple_struct_rest_pattern(
editor.replace(pat.syntax(), new_pat.syntax());
editor.add_mappings(make.finish_with_mappings());
- builder.add_file_edits(ctx.file_id(), editor);
+ builder.add_file_edits(ctx.vfs_file_id(), editor);
},
)
}
diff --git a/crates/ide-assists/src/handlers/extract_module.rs b/crates/ide-assists/src/handlers/extract_module.rs
index a77b5e4a1a..b82b7984d4 100644
--- a/crates/ide-assists/src/handlers/extract_module.rs
+++ b/crates/ide-assists/src/handlers/extract_module.rs
@@ -1,8 +1,7 @@
use std::iter;
use either::Either;
-use hir::{HasSource, HirFileIdExt, ModuleSource};
-use ide_db::base_db::salsa::AsDynDatabase;
+use hir::{HasSource, ModuleSource};
use ide_db::{
FileId, FxHashMap, FxHashSet,
assists::AssistId,
@@ -114,7 +113,7 @@ pub(crate) fn extract_module(acc: &mut Assists, ctx: &AssistContext<'_>) -> Opti
let (usages_to_be_processed, record_fields, use_stmts_to_be_inserted) =
module.get_usages_and_record_fields(ctx);
- builder.edit_file(ctx.file_id());
+ builder.edit_file(ctx.vfs_file_id());
use_stmts_to_be_inserted.into_iter().for_each(|(_, use_stmt)| {
builder.insert(ctx.selection_trimmed().end(), format!("\n{use_stmt}"));
});
@@ -126,7 +125,7 @@ pub(crate) fn extract_module(acc: &mut Assists, ctx: &AssistContext<'_>) -> Opti
let mut usages_to_be_processed_for_cur_file = vec![];
for (file_id, usages) in usages_to_be_processed {
- if file_id == ctx.file_id() {
+ if file_id == ctx.vfs_file_id() {
usages_to_be_processed_for_cur_file = usages;
continue;
}
@@ -136,7 +135,7 @@ pub(crate) fn extract_module(acc: &mut Assists, ctx: &AssistContext<'_>) -> Opti
}
}
- builder.edit_file(ctx.file_id());
+ builder.edit_file(ctx.vfs_file_id());
for (text_range, usage) in usages_to_be_processed_for_cur_file {
builder.replace(text_range, usage);
}
@@ -333,10 +332,7 @@ impl Module {
let mut use_stmts_set = FxHashSet::default();
for (file_id, refs) in node_def.usages(&ctx.sema).all() {
- let editioned_file_id =
- ide_db::base_db::EditionedFileId::new(ctx.sema.db.as_dyn_database(), file_id);
-
- let source_file = ctx.sema.parse(editioned_file_id);
+ let source_file = ctx.sema.parse(file_id);
let usages = refs.into_iter().filter_map(|FileReference { range, .. }| {
// handle normal usages
let name_ref = find_node_at_range::<ast::NameRef>(source_file.syntax(), range)?;
@@ -368,7 +364,7 @@ impl Module {
None
});
- refs_in_files.entry(file_id.file_id()).or_default().extend(usages);
+ refs_in_files.entry(file_id.file_id(ctx.db())).or_default().extend(usages);
}
}
@@ -463,10 +459,7 @@ impl Module {
let file_id = ctx.file_id();
let usage_res = def.usages(&ctx.sema).in_scope(&SearchScope::single_file(file_id)).all();
- let editioned_file_id =
- ide_db::base_db::EditionedFileId::new(ctx.sema.db.as_dyn_database(), file_id);
-
- let file = ctx.sema.parse(editioned_file_id);
+ let file = ctx.sema.parse(file_id);
// track uses which does not exists in `Use`
let mut uses_exist_in_sel = false;
@@ -492,7 +485,7 @@ impl Module {
ctx,
curr_parent_module,
selection_range,
- file_id.file_id(),
+ file_id.file_id(ctx.db()),
);
// Find use stmt that use def in current file
@@ -679,7 +672,7 @@ fn check_def_in_mod_and_out_sel(
let have_same_parent = if let Some(ast_module) = &curr_parent_module {
ctx.sema.to_module_def(ast_module).is_some_and(|it| it == $x.module(ctx.db()))
} else {
- source.file_id.original_file(ctx.db()) == curr_file_id
+ source.file_id.original_file(ctx.db()).file_id(ctx.db()) == curr_file_id
};
let in_sel = !selection_range.contains_range(source.value.syntax().text_range());
@@ -695,7 +688,7 @@ fn check_def_in_mod_and_out_sel(
(Some(ast_module), Some(hir_module)) => {
ctx.sema.to_module_def(ast_module).is_some_and(|it| it == hir_module)
}
- _ => source.file_id.original_file(ctx.db()) == curr_file_id,
+ _ => source.file_id.original_file(ctx.db()).file_id(ctx.db()) == curr_file_id,
};
if have_same_parent {
diff --git a/crates/ide-assists/src/handlers/extract_struct_from_enum_variant.rs b/crates/ide-assists/src/handlers/extract_struct_from_enum_variant.rs
index 72fdae271d..b9c42285d2 100644
--- a/crates/ide-assists/src/handlers/extract_struct_from_enum_variant.rs
+++ b/crates/ide-assists/src/handlers/extract_struct_from_enum_variant.rs
@@ -74,7 +74,7 @@ pub(crate) fn extract_struct_from_enum_variant(
def_file_references = Some(references);
continue;
}
- builder.edit_file(file_id.file_id());
+ builder.edit_file(file_id.file_id(ctx.db()));
let processed = process_references(
ctx,
builder,
@@ -87,7 +87,7 @@ pub(crate) fn extract_struct_from_enum_variant(
apply_references(ctx.config.insert_use, path, node, import, edition)
});
}
- builder.edit_file(ctx.file_id());
+ builder.edit_file(ctx.vfs_file_id());
let variant = builder.make_mut(variant.clone());
if let Some(references) = def_file_references {
diff --git a/crates/ide-assists/src/handlers/extract_type_alias.rs b/crates/ide-assists/src/handlers/extract_type_alias.rs
index c1c9a82dd1..d843ac6456 100644
--- a/crates/ide-assists/src/handlers/extract_type_alias.rs
+++ b/crates/ide-assists/src/handlers/extract_type_alias.rs
@@ -87,7 +87,7 @@ pub(crate) fn extract_type_alias(acc: &mut Assists, ctx: &AssistContext<'_>) ->
],
);
- builder.add_file_edits(ctx.file_id(), edit);
+ builder.add_file_edits(ctx.vfs_file_id(), edit);
},
)
}
diff --git a/crates/ide-assists/src/handlers/extract_variable.rs b/crates/ide-assists/src/handlers/extract_variable.rs
index f44f4bab9b..3971b60f25 100644
--- a/crates/ide-assists/src/handlers/extract_variable.rs
+++ b/crates/ide-assists/src/handlers/extract_variable.rs
@@ -263,7 +263,7 @@ pub(crate) fn extract_variable(acc: &mut Assists, ctx: &AssistContext<'_>) -> Op
}
editor.add_mappings(make.finish_with_mappings());
- edit.add_file_edits(ctx.file_id(), editor);
+ edit.add_file_edits(ctx.vfs_file_id(), editor);
edit.rename();
},
);
@@ -378,7 +378,7 @@ fn get_literal_name(ctx: &AssistContext<'_>, expr: &ast::Expr) -> Option<String>
return None;
}
- match LexedStr::single_token(ctx.file_id().edition(), &inner) {
+ match LexedStr::single_token(ctx.edition(), &inner) {
Some((SyntaxKind::IDENT, None)) => Some(inner),
_ => None,
}
diff --git a/crates/ide-assists/src/handlers/fix_visibility.rs b/crates/ide-assists/src/handlers/fix_visibility.rs
index c24c784683..19e0a73f33 100644
--- a/crates/ide-assists/src/handlers/fix_visibility.rs
+++ b/crates/ide-assists/src/handlers/fix_visibility.rs
@@ -1,6 +1,4 @@
-use hir::{
- HasSource, HasVisibility, HirFileIdExt, ModuleDef, PathResolution, ScopeDef, db::HirDatabase,
-};
+use hir::{HasSource, HasVisibility, ModuleDef, PathResolution, ScopeDef, db::HirDatabase};
use ide_db::FileId;
use syntax::{
AstNode, TextRange,
@@ -132,7 +130,7 @@ fn add_vis_to_referenced_record_field(acc: &mut Assists, ctx: &AssistContext<'_>
);
acc.add(AssistId::quick_fix("fix_visibility"), assist_label, target, |edit| {
- edit.edit_file(target_file.file_id());
+ edit.edit_file(target_file.file_id(ctx.db()));
let vis_owner = edit.make_mut(vis_owner);
vis_owner.set_visibility(Some(missing_visibility.clone_for_update()));
@@ -159,7 +157,11 @@ fn target_data_for_def(
let in_file_syntax = source.syntax();
let file_id = in_file_syntax.file_id;
let range = in_file_syntax.value.text_range();
- Some((ast::AnyHasVisibility::new(source.value), range, file_id.original_file(db).file_id()))
+ Some((
+ ast::AnyHasVisibility::new(source.value),
+ range,
+ file_id.original_file(db).file_id(db),
+ ))
}
let target_name;
@@ -201,7 +203,7 @@ fn target_data_for_def(
let in_file_source = m.declaration_source(db)?;
let file_id = in_file_source.file_id.original_file(db);
let range = in_file_source.value.syntax().text_range();
- (ast::AnyHasVisibility::new(in_file_source.value), range, file_id.file_id())
+ (ast::AnyHasVisibility::new(in_file_source.value), range, file_id.file_id(db))
}
// FIXME
hir::ModuleDef::Macro(_) => return None,
diff --git a/crates/ide-assists/src/handlers/flip_binexpr.rs b/crates/ide-assists/src/handlers/flip_binexpr.rs
index 2ac9fd8cb4..247e8109ab 100644
--- a/crates/ide-assists/src/handlers/flip_binexpr.rs
+++ b/crates/ide-assists/src/handlers/flip_binexpr.rs
@@ -55,7 +55,7 @@ pub(crate) fn flip_binexpr(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option
editor.replace(lhs.syntax(), rhs.syntax());
editor.replace(rhs.syntax(), lhs.syntax());
editor.add_mappings(make.finish_with_mappings());
- builder.add_file_edits(ctx.file_id(), editor);
+ builder.add_file_edits(ctx.vfs_file_id(), editor);
},
)
}
diff --git a/crates/ide-assists/src/handlers/flip_comma.rs b/crates/ide-assists/src/handlers/flip_comma.rs
index 7045e4bbc0..1e95d47723 100644
--- a/crates/ide-assists/src/handlers/flip_comma.rs
+++ b/crates/ide-assists/src/handlers/flip_comma.rs
@@ -55,7 +55,7 @@ pub(crate) fn flip_comma(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<(
editor.replace(next.clone(), prev.clone());
}
- builder.add_file_edits(ctx.file_id(), editor);
+ builder.add_file_edits(ctx.vfs_file_id(), editor);
})
}
diff --git a/crates/ide-assists/src/handlers/flip_or_pattern.rs b/crates/ide-assists/src/handlers/flip_or_pattern.rs
index c84d23ffd0..4829f5bec2 100644
--- a/crates/ide-assists/src/handlers/flip_or_pattern.rs
+++ b/crates/ide-assists/src/handlers/flip_or_pattern.rs
@@ -35,7 +35,7 @@ pub(crate) fn flip_or_pattern(acc: &mut Assists, ctx: &AssistContext<'_>) -> Opt
let mut editor = builder.make_editor(parent.syntax());
editor.replace(before.clone(), after.clone());
editor.replace(after, before);
- builder.add_file_edits(ctx.file_id(), editor);
+ builder.add_file_edits(ctx.vfs_file_id(), editor);
})
}
diff --git a/crates/ide-assists/src/handlers/flip_trait_bound.rs b/crates/ide-assists/src/handlers/flip_trait_bound.rs
index 2f3b047b27..9756268c7c 100644
--- a/crates/ide-assists/src/handlers/flip_trait_bound.rs
+++ b/crates/ide-assists/src/handlers/flip_trait_bound.rs
@@ -36,7 +36,7 @@ pub(crate) fn flip_trait_bound(acc: &mut Assists, ctx: &AssistContext<'_>) -> Op
let mut editor = builder.make_editor(parent.syntax());
editor.replace(before.clone(), after.clone());
editor.replace(after, before);
- builder.add_file_edits(ctx.file_id(), editor);
+ builder.add_file_edits(ctx.vfs_file_id(), editor);
},
)
}
diff --git a/crates/ide-assists/src/handlers/generate_constant.rs b/crates/ide-assists/src/handlers/generate_constant.rs
index b31805ace2..fce0ce3994 100644
--- a/crates/ide-assists/src/handlers/generate_constant.rs
+++ b/crates/ide-assists/src/handlers/generate_constant.rs
@@ -1,5 +1,5 @@
use crate::assist_context::{AssistContext, Assists};
-use hir::{HasVisibility, HirDisplay, HirFileIdExt, Module};
+use hir::{HasVisibility, HirDisplay, Module};
use ide_db::{
FileId,
assists::AssistId,
@@ -134,9 +134,9 @@ fn target_data_for_generate_constant(
.any(|it| it.kind() == SyntaxKind::WHITESPACE && it.to_string().contains('\n'));
let post_string =
if siblings_has_newline { format!("{indent}") } else { format!("\n{indent}") };
- Some((offset, indent + 1, Some(file_id.file_id()), post_string))
+ Some((offset, indent + 1, Some(file_id.file_id(ctx.db())), post_string))
}
- _ => Some((TextSize::from(0), 0.into(), Some(file_id.file_id()), "\n".into())),
+ _ => Some((TextSize::from(0), 0.into(), Some(file_id.file_id(ctx.db())), "\n".into())),
}
}
diff --git a/crates/ide-assists/src/handlers/generate_delegate_trait.rs b/crates/ide-assists/src/handlers/generate_delegate_trait.rs
index be61173521..848c63810a 100644
--- a/crates/ide-assists/src/handlers/generate_delegate_trait.rs
+++ b/crates/ide-assists/src/handlers/generate_delegate_trait.rs
@@ -124,7 +124,7 @@ impl Field {
) -> Option<Field> {
let db = ctx.sema.db;
- let module = ctx.sema.file_to_module_def(ctx.file_id())?;
+ let module = ctx.sema.file_to_module_def(ctx.vfs_file_id())?;
let edition = module.krate().edition(ctx.db());
let (name, range, ty) = match f {
diff --git a/crates/ide-assists/src/handlers/generate_enum_variant.rs b/crates/ide-assists/src/handlers/generate_enum_variant.rs
index 4c454124c7..3514ebb811 100644
--- a/crates/ide-assists/src/handlers/generate_enum_variant.rs
+++ b/crates/ide-assists/src/handlers/generate_enum_variant.rs
@@ -66,7 +66,7 @@ pub(crate) fn generate_enum_variant(acc: &mut Assists, ctx: &AssistContext<'_>)
if let Some(it) = enum_node.variant_list() {
it.add_variant(&mut editor, &variant);
}
- builder.add_file_edits(file_id, editor);
+ builder.add_file_edits(file_id.file_id(ctx.db()), editor);
})
}
diff --git a/crates/ide-assists/src/handlers/generate_fn_type_alias.rs b/crates/ide-assists/src/handlers/generate_fn_type_alias.rs
index 7ece5f8ea5..b63baa696d 100644
--- a/crates/ide-assists/src/handlers/generate_fn_type_alias.rs
+++ b/crates/ide-assists/src/handlers/generate_fn_type_alias.rs
@@ -117,7 +117,7 @@ pub(crate) fn generate_fn_type_alias(acc: &mut Assists, ctx: &AssistContext<'_>)
}
}
- builder.add_file_edits(ctx.file_id(), edit);
+ builder.add_file_edits(ctx.vfs_file_id(), edit);
},
);
}
diff --git a/crates/ide-assists/src/handlers/generate_function.rs b/crates/ide-assists/src/handlers/generate_function.rs
index 0e9acd1809..824380253a 100644
--- a/crates/ide-assists/src/handlers/generate_function.rs
+++ b/crates/ide-assists/src/handlers/generate_function.rs
@@ -1,8 +1,7 @@
use hir::{
- Adt, AsAssocItem, HasSource, HirDisplay, HirFileIdExt, Module, PathResolution, Semantics,
- StructKind, Type, TypeInfo,
+ Adt, AsAssocItem, HasSource, HirDisplay, Module, PathResolution, Semantics, StructKind, Type,
+ TypeInfo,
};
-use ide_db::base_db::salsa::AsDynDatabase;
use ide_db::{
FileId, FxHashMap, FxHashSet, RootDatabase, SnippetCap,
defs::{Definition, NameRefClass},
@@ -207,14 +206,11 @@ fn get_adt_source(
) -> Option<(Option<ast::Impl>, FileId)> {
let range = adt.source(ctx.sema.db)?.syntax().original_file_range_rooted(ctx.sema.db);
- let editioned_file_id =
- ide_db::base_db::EditionedFileId::new(ctx.sema.db.as_dyn_database(), range.file_id);
-
- let file = ctx.sema.parse(editioned_file_id);
+ let file = ctx.sema.parse(range.file_id);
let adt_source =
ctx.sema.find_node_at_offset_with_macros(file.syntax(), range.range.start())?;
find_struct_impl(ctx, &adt_source, &[fn_name.to_owned()])
- .map(|impl_| (impl_, range.file_id.file_id()))
+ .map(|impl_| (impl_, range.file_id.file_id(ctx.db())))
}
struct FunctionBuilder {
@@ -501,7 +497,7 @@ fn get_fn_target(
target_module: Option<Module>,
call: CallExpr,
) -> Option<(GeneratedFunctionTarget, FileId)> {
- let mut file = ctx.file_id().into();
+ let mut file = ctx.vfs_file_id();
let target = match target_module {
Some(target_module) => {
let (in_file, target) = next_space_for_fn_in_module(ctx.db(), target_module);
@@ -1191,7 +1187,7 @@ fn next_space_for_fn_in_module(
}
};
- (file.file_id(), assist_item)
+ (file.file_id(db), assist_item)
}
#[derive(Clone, Copy)]
diff --git a/crates/ide-assists/src/handlers/inline_call.rs b/crates/ide-assists/src/handlers/inline_call.rs
index 4df209b22a..c02983029e 100644
--- a/crates/ide-assists/src/handlers/inline_call.rs
+++ b/crates/ide-assists/src/handlers/inline_call.rs
@@ -71,6 +71,7 @@ use crate::{
// ```
pub(crate) fn inline_into_callers(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
let def_file = ctx.file_id();
+ let vfs_def_file = ctx.vfs_file_id();
let name = ctx.find_node_at_offset::<ast::Name>()?;
let ast_func = name.syntax().parent().and_then(ast::Fn::cast)?;
let func_body = ast_func.body()?;
@@ -106,7 +107,8 @@ pub(crate) fn inline_into_callers(acc: &mut Assists, ctx: &AssistContext<'_>) ->
let current_file_usage = usages.references.remove(&def_file);
let mut remove_def = true;
- let mut inline_refs_for_file = |file_id, refs: Vec<FileReference>| {
+ let mut inline_refs_for_file = |file_id: EditionedFileId, refs: Vec<FileReference>| {
+ let file_id = file_id.file_id(ctx.db());
builder.edit_file(file_id);
let call_krate = ctx.sema.file_to_module_def(file_id).map(|it| it.krate());
let count = refs.len();
@@ -143,7 +145,7 @@ pub(crate) fn inline_into_callers(acc: &mut Assists, ctx: &AssistContext<'_>) ->
}
match current_file_usage {
Some(refs) => inline_refs_for_file(def_file, refs),
- None => builder.edit_file(def_file),
+ None => builder.edit_file(vfs_def_file),
}
if remove_def {
builder.delete(ast_func.syntax().text_range());
@@ -194,7 +196,7 @@ pub(crate) fn inline_call(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<
let name_ref: ast::NameRef = ctx.find_node_at_offset()?;
let call_info = CallInfo::from_name_ref(
name_ref.clone(),
- ctx.sema.file_to_module_def(ctx.file_id())?.krate().into(),
+ ctx.sema.file_to_module_def(ctx.vfs_file_id())?.krate().into(),
)?;
let (function, label) = match &call_info.node {
ast::CallableExpr::Call(call) => {
diff --git a/crates/ide-assists/src/handlers/inline_local_variable.rs b/crates/ide-assists/src/handlers/inline_local_variable.rs
index 297a53a78a..5d4bdc6ec7 100644
--- a/crates/ide-assists/src/handlers/inline_local_variable.rs
+++ b/crates/ide-assists/src/handlers/inline_local_variable.rs
@@ -110,7 +110,7 @@ pub(crate) fn inline_local_variable(acc: &mut Assists, ctx: &AssistContext<'_>)
}
editor.add_mappings(make.finish_with_mappings());
- builder.add_file_edits(ctx.file_id(), editor);
+ builder.add_file_edits(ctx.vfs_file_id(), editor);
},
)
}
diff --git a/crates/ide-assists/src/handlers/inline_macro.rs b/crates/ide-assists/src/handlers/inline_macro.rs
index 37d9b340d0..b09bef36ae 100644
--- a/crates/ide-assists/src/handlers/inline_macro.rs
+++ b/crates/ide-assists/src/handlers/inline_macro.rs
@@ -38,7 +38,7 @@ use crate::{AssistContext, AssistId, Assists};
pub(crate) fn inline_macro(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
let unexpanded = ctx.find_node_at_offset::<ast::MacroCall>()?;
let macro_call = ctx.sema.to_def(&unexpanded)?;
- let target_crate_id = ctx.sema.file_to_module_def(ctx.file_id())?.krate().into();
+ let target_crate_id = ctx.sema.file_to_module_def(ctx.vfs_file_id())?.krate().into();
let text_range = unexpanded.syntax().text_range();
acc.add(
@@ -46,8 +46,8 @@ pub(crate) fn inline_macro(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option
"Inline macro".to_owned(),
text_range,
|builder| {
- let expanded = ctx.sema.parse_or_expand(macro_call.as_file());
- let span_map = ctx.sema.db.expansion_span_map(macro_call.as_macro_file());
+ let expanded = ctx.sema.parse_or_expand(macro_call.into());
+ let span_map = ctx.sema.db.expansion_span_map(macro_call);
// Don't call `prettify_macro_expansion()` outside the actual assist action; it does some heavy rowan tree manipulation,
// which can be very costly for big macros when it is done *even without the assist being invoked*.
let expanded = prettify_macro_expansion(ctx.db(), expanded, &span_map, target_crate_id);
diff --git a/crates/ide-assists/src/handlers/inline_type_alias.rs b/crates/ide-assists/src/handlers/inline_type_alias.rs
index 6a132e119e..4511072b04 100644
--- a/crates/ide-assists/src/handlers/inline_type_alias.rs
+++ b/crates/ide-assists/src/handlers/inline_type_alias.rs
@@ -87,17 +87,17 @@ pub(crate) fn inline_type_alias_uses(acc: &mut Assists, ctx: &AssistContext<'_>)
builder.replace(target, replacement);
}
- if file_id == ctx.file_id() {
+ if file_id == ctx.vfs_file_id() {
builder.delete(ast_alias.syntax().text_range());
definition_deleted = true;
}
};
for (file_id, refs) in usages.into_iter() {
- inline_refs_for_file(file_id.file_id(), refs);
+ inline_refs_for_file(file_id.file_id(ctx.db()), refs);
}
if !definition_deleted {
- builder.edit_file(ctx.file_id());
+ builder.edit_file(ctx.vfs_file_id());
builder.delete(ast_alias.syntax().text_range());
}
},
diff --git a/crates/ide-assists/src/handlers/introduce_named_type_parameter.rs b/crates/ide-assists/src/handlers/introduce_named_type_parameter.rs
index 37dc92b2a7..db51070a64 100644
--- a/crates/ide-assists/src/handlers/introduce_named_type_parameter.rs
+++ b/crates/ide-assists/src/handlers/introduce_named_type_parameter.rs
@@ -59,7 +59,7 @@ pub(crate) fn introduce_named_type_parameter(
}
editor.add_mappings(make.finish_with_mappings());
- builder.add_file_edits(ctx.file_id(), editor);
+ builder.add_file_edits(ctx.vfs_file_id(), editor);
},
)
}
diff --git a/crates/ide-assists/src/handlers/move_const_to_impl.rs b/crates/ide-assists/src/handlers/move_const_to_impl.rs
index 5c9318f5cf..0c1dc9eb93 100644
--- a/crates/ide-assists/src/handlers/move_const_to_impl.rs
+++ b/crates/ide-assists/src/handlers/move_const_to_impl.rs
@@ -105,7 +105,7 @@ pub(crate) fn move_const_to_impl(acc: &mut Assists, ctx: &AssistContext<'_>) ->
builder.delete(range_to_delete);
let usages = usages.iter().flat_map(|(file_id, usages)| {
- let edition = file_id.edition();
+ let edition = file_id.edition(ctx.db());
usages.iter().map(move |usage| (edition, usage.range))
});
for (edition, range) in usages {
diff --git a/crates/ide-assists/src/handlers/move_from_mod_rs.rs b/crates/ide-assists/src/handlers/move_from_mod_rs.rs
index 00469f07ba..a36d3136a1 100644
--- a/crates/ide-assists/src/handlers/move_from_mod_rs.rs
+++ b/crates/ide-assists/src/handlers/move_from_mod_rs.rs
@@ -22,7 +22,7 @@ use crate::{
// ```
pub(crate) fn move_from_mod_rs(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
let source_file = ctx.find_node_at_offset::<ast::SourceFile>()?;
- let module = ctx.sema.file_to_module_def(ctx.file_id())?;
+ let module = ctx.sema.file_to_module_def(ctx.vfs_file_id())?;
// Enable this assist if the user select all "meaningful" content in the source file
let trimmed_selected_range = trimmed_text_range(&source_file, ctx.selection_trimmed());
let trimmed_file_range = trimmed_text_range(&source_file, source_file.syntax().text_range());
@@ -38,13 +38,13 @@ pub(crate) fn move_from_mod_rs(acc: &mut Assists, ctx: &AssistContext<'_>) -> Op
let target = source_file.syntax().text_range();
let module_name = module.name(ctx.db())?.as_str().to_smolstr();
let path = format!("../{module_name}.rs");
- let dst = AnchoredPathBuf { anchor: ctx.file_id().into(), path };
+ let dst = AnchoredPathBuf { anchor: ctx.vfs_file_id(), path };
acc.add(
AssistId::refactor("move_from_mod_rs"),
format!("Convert {module_name}/mod.rs to {module_name}.rs"),
target,
|builder| {
- builder.move_file(ctx.file_id(), dst);
+ builder.move_file(ctx.vfs_file_id(), dst);
},
)
}
diff --git a/crates/ide-assists/src/handlers/move_module_to_file.rs b/crates/ide-assists/src/handlers/move_module_to_file.rs
index cfca89fce4..33792896c4 100644
--- a/crates/ide-assists/src/handlers/move_module_to_file.rs
+++ b/crates/ide-assists/src/handlers/move_module_to_file.rs
@@ -104,7 +104,7 @@ pub(crate) fn move_module_to_file(acc: &mut Assists, ctx: &AssistContext<'_>) ->
buf,
);
- let dst = AnchoredPathBuf { anchor: ctx.file_id().into(), path };
+ let dst = AnchoredPathBuf { anchor: ctx.vfs_file_id(), path };
builder.create_file(dst, contents);
},
)
diff --git a/crates/ide-assists/src/handlers/move_to_mod_rs.rs b/crates/ide-assists/src/handlers/move_to_mod_rs.rs
index ffa3894fe5..5e95b264fc 100644
--- a/crates/ide-assists/src/handlers/move_to_mod_rs.rs
+++ b/crates/ide-assists/src/handlers/move_to_mod_rs.rs
@@ -22,7 +22,7 @@ use crate::{
// ```
pub(crate) fn move_to_mod_rs(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
let source_file = ctx.find_node_at_offset::<ast::SourceFile>()?;
- let module = ctx.sema.file_to_module_def(ctx.file_id())?;
+ let module = ctx.sema.file_to_module_def(ctx.vfs_file_id())?;
// Enable this assist if the user select all "meaningful" content in the source file
let trimmed_selected_range = trimmed_text_range(&source_file, ctx.selection_trimmed());
let trimmed_file_range = trimmed_text_range(&source_file, source_file.syntax().text_range());
@@ -38,13 +38,13 @@ pub(crate) fn move_to_mod_rs(acc: &mut Assists, ctx: &AssistContext<'_>) -> Opti
let target = source_file.syntax().text_range();
let module_name = module.name(ctx.db())?.as_str().to_smolstr();
let path = format!("./{module_name}/mod.rs");
- let dst = AnchoredPathBuf { anchor: ctx.file_id().into(), path };
+ let dst = AnchoredPathBuf { anchor: ctx.vfs_file_id(), path };
acc.add(
AssistId::refactor("move_to_mod_rs"),
format!("Convert {module_name}.rs to {module_name}/mod.rs"),
target,
|builder| {
- builder.move_file(ctx.file_id(), dst);
+ builder.move_file(ctx.vfs_file_id(), dst);
},
)
}
diff --git a/crates/ide-assists/src/handlers/remove_mut.rs b/crates/ide-assists/src/handlers/remove_mut.rs
index 1b2a8336a8..b07a361adf 100644
--- a/crates/ide-assists/src/handlers/remove_mut.rs
+++ b/crates/ide-assists/src/handlers/remove_mut.rs
@@ -28,6 +28,6 @@ pub(crate) fn remove_mut(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<(
_ => (),
}
editor.delete(mut_token);
- builder.add_file_edits(ctx.file_id(), editor);
+ builder.add_file_edits(ctx.vfs_file_id(), editor);
})
}
diff --git a/crates/ide-assists/src/handlers/remove_parentheses.rs b/crates/ide-assists/src/handlers/remove_parentheses.rs
index 9349c53cc9..d514c1c291 100644
--- a/crates/ide-assists/src/handlers/remove_parentheses.rs
+++ b/crates/ide-assists/src/handlers/remove_parentheses.rs
@@ -59,7 +59,7 @@ pub(crate) fn remove_parentheses(acc: &mut Assists, ctx: &AssistContext<'_>) ->
editor.add_mappings(make.finish_with_mappings());
}
editor.replace(parens.syntax(), expr.syntax());
- builder.add_file_edits(ctx.file_id(), editor);
+ builder.add_file_edits(ctx.vfs_file_id(), editor);
},
)
}
diff --git a/crates/ide-assists/src/handlers/remove_unused_imports.rs b/crates/ide-assists/src/handlers/remove_unused_imports.rs
index 09697eb9b2..1baf814ca6 100644
--- a/crates/ide-assists/src/handlers/remove_unused_imports.rs
+++ b/crates/ide-assists/src/handlers/remove_unused_imports.rs
@@ -1,6 +1,6 @@
use std::collections::hash_map::Entry;
-use hir::{FileRange, HirFileIdExt, InFile, InRealFile, Module, ModuleSource};
+use hir::{FileRange, InFile, InRealFile, Module, ModuleSource};
use ide_db::text_edit::TextRange;
use ide_db::{
FxHashMap, RootDatabase,
diff --git a/crates/ide-assists/src/handlers/remove_unused_param.rs b/crates/ide-assists/src/handlers/remove_unused_param.rs
index 5ed1efe614..8b824c7c7f 100644
--- a/crates/ide-assists/src/handlers/remove_unused_param.rs
+++ b/crates/ide-assists/src/handlers/remove_unused_param.rs
@@ -1,6 +1,4 @@
-use ide_db::{
- EditionedFileId, base_db::salsa::AsDynDatabase, defs::Definition, search::FileReference,
-};
+use ide_db::{EditionedFileId, defs::Definition, search::FileReference};
use syntax::{
AstNode, SourceFile, SyntaxElement, SyntaxKind, SyntaxNode, T, TextRange,
algo::{find_node_at_range, least_common_ancestor_element},
@@ -90,7 +88,7 @@ pub(crate) fn remove_unused_param(acc: &mut Assists, ctx: &AssistContext<'_>) ->
for (file_id, references) in fn_def.usages(&ctx.sema).all() {
process_usages(ctx, builder, file_id, references, param_position, is_self_present);
}
- builder.add_file_edits(ctx.file_id(), editor);
+ builder.add_file_edits(ctx.vfs_file_id(), editor);
},
)
}
@@ -98,15 +96,13 @@ pub(crate) fn remove_unused_param(acc: &mut Assists, ctx: &AssistContext<'_>) ->
fn process_usages(
ctx: &AssistContext<'_>,
builder: &mut SourceChangeBuilder,
- file_id: EditionedFileId,
+ editioned_file_id: EditionedFileId,
references: Vec<FileReference>,
arg_to_remove: usize,
is_self_present: bool,
) {
- let editioned_file_id_wrapper =
- ide_db::base_db::EditionedFileId::new(ctx.sema.db.as_dyn_database(), file_id);
-
- let source_file = ctx.sema.parse(editioned_file_id_wrapper);
+ let source_file = ctx.sema.parse(editioned_file_id);
+ let file_id = editioned_file_id.file_id(ctx.db());
builder.edit_file(file_id);
let possible_ranges = references
.into_iter()
diff --git a/crates/ide-assists/src/handlers/reorder_fields.rs b/crates/ide-assists/src/handlers/reorder_fields.rs
index 1951d007ca..990677d372 100644
--- a/crates/ide-assists/src/handlers/reorder_fields.rs
+++ b/crates/ide-assists/src/handlers/reorder_fields.rs
@@ -82,7 +82,7 @@ pub(crate) fn reorder_fields(acc: &mut Assists, ctx: &AssistContext<'_>) -> Opti
}
}
- builder.add_file_edits(ctx.file_id(), editor);
+ builder.add_file_edits(ctx.vfs_file_id(), editor);
},
)
}
diff --git a/crates/ide-assists/src/handlers/reorder_impl_items.rs b/crates/ide-assists/src/handlers/reorder_impl_items.rs
index 1222ba928c..0ad5ec9d44 100644
--- a/crates/ide-assists/src/handlers/reorder_impl_items.rs
+++ b/crates/ide-assists/src/handlers/reorder_impl_items.rs
@@ -106,7 +106,7 @@ pub(crate) fn reorder_impl_items(acc: &mut Assists, ctx: &AssistContext<'_>) ->
.zip(sorted)
.for_each(|(old, new)| editor.replace(old.syntax(), new.syntax()));
- builder.add_file_edits(ctx.file_id(), editor);
+ builder.add_file_edits(ctx.vfs_file_id(), editor);
},
)
}
diff --git a/crates/ide-assists/src/handlers/replace_derive_with_manual_impl.rs b/crates/ide-assists/src/handlers/replace_derive_with_manual_impl.rs
index feeea8872a..6dcdf5edbd 100644
--- a/crates/ide-assists/src/handlers/replace_derive_with_manual_impl.rs
+++ b/crates/ide-assists/src/handlers/replace_derive_with_manual_impl.rs
@@ -1,4 +1,4 @@
-use hir::{InFile, MacroFileIdExt, ModuleDef};
+use hir::{InFile, ModuleDef};
use ide_db::{helpers::mod_path_to_ast, imports::import_assets::NameToImport, items_locator};
use itertools::Itertools;
use syntax::{
diff --git a/crates/ide-assists/src/handlers/replace_if_let_with_match.rs b/crates/ide-assists/src/handlers/replace_if_let_with_match.rs
index 2b356a112a..15d3db5e74 100644
--- a/crates/ide-assists/src/handlers/replace_if_let_with_match.rs
+++ b/crates/ide-assists/src/handlers/replace_if_let_with_match.rs
@@ -142,7 +142,7 @@ pub(crate) fn replace_if_let_with_match(acc: &mut Assists, ctx: &AssistContext<'
let mut editor = builder.make_editor(if_expr.syntax());
editor.replace(if_expr.syntax(), expr.syntax());
editor.add_mappings(make.finish_with_mappings());
- builder.add_file_edits(ctx.file_id(), editor);
+ builder.add_file_edits(ctx.vfs_file_id(), editor);
},
)
}
@@ -291,7 +291,7 @@ pub(crate) fn replace_match_with_if_let(acc: &mut Assists, ctx: &AssistContext<'
let mut editor = builder.make_editor(match_expr.syntax());
editor.replace(match_expr.syntax(), if_let_expr.syntax());
editor.add_mappings(make.finish_with_mappings());
- builder.add_file_edits(ctx.file_id(), editor);
+ builder.add_file_edits(ctx.vfs_file_id(), editor);
},
)
}
diff --git a/crates/ide-assists/src/handlers/replace_let_with_if_let.rs b/crates/ide-assists/src/handlers/replace_let_with_if_let.rs
index c92a494c2c..90f4ff7ad2 100644
--- a/crates/ide-assists/src/handlers/replace_let_with_if_let.rs
+++ b/crates/ide-assists/src/handlers/replace_let_with_if_let.rs
@@ -74,7 +74,7 @@ pub(crate) fn replace_let_with_if_let(acc: &mut Assists, ctx: &AssistContext<'_>
editor.replace(let_stmt.syntax(), if_stmt.syntax());
editor.add_mappings(make.finish_with_mappings());
- builder.add_file_edits(ctx.file_id(), editor);
+ builder.add_file_edits(ctx.vfs_file_id(), editor);
},
)
}
diff --git a/crates/ide-assists/src/handlers/unwrap_return_type.rs b/crates/ide-assists/src/handlers/unwrap_return_type.rs
index 1c4c3732d7..cf38262fbf 100644
--- a/crates/ide-assists/src/handlers/unwrap_return_type.rs
+++ b/crates/ide-assists/src/handlers/unwrap_return_type.rs
@@ -169,7 +169,7 @@ pub(crate) fn unwrap_return_type(acc: &mut Assists, ctx: &AssistContext<'_>) ->
}
editor.add_mappings(make.finish_with_mappings());
- builder.add_file_edits(ctx.file_id(), editor);
+ builder.add_file_edits(ctx.vfs_file_id(), editor);
})
}
diff --git a/crates/ide-assists/src/handlers/wrap_return_type.rs b/crates/ide-assists/src/handlers/wrap_return_type.rs
index 64251edd78..e4abf02364 100644
--- a/crates/ide-assists/src/handlers/wrap_return_type.rs
+++ b/crates/ide-assists/src/handlers/wrap_return_type.rs
@@ -133,7 +133,7 @@ pub(crate) fn wrap_return_type(acc: &mut Assists, ctx: &AssistContext<'_>) -> Op
}
editor.add_mappings(make.finish_with_mappings());
- builder.add_file_edits(ctx.file_id(), editor);
+ builder.add_file_edits(ctx.vfs_file_id(), editor);
},
);
}
diff --git a/crates/ide-assists/src/lib.rs b/crates/ide-assists/src/lib.rs
index 7e9d596614..fee5a9dd73 100644
--- a/crates/ide-assists/src/lib.rs
+++ b/crates/ide-assists/src/lib.rs
@@ -68,7 +68,7 @@ pub mod utils;
use hir::Semantics;
use ide_db::{EditionedFileId, RootDatabase};
-use syntax::TextRange;
+use syntax::{Edition, TextRange};
pub(crate) use crate::assist_context::{AssistContext, Assists};
@@ -90,7 +90,7 @@ pub fn assists(
let sema = Semantics::new(db);
let file_id = sema
.attach_first_edition(range.file_id)
- .unwrap_or_else(|| EditionedFileId::current_edition(range.file_id));
+ .unwrap_or_else(|| EditionedFileId::new(db, range.file_id, Edition::CURRENT));
let ctx = AssistContext::new(sema, config, hir::FileRange { file_id, range: range.range });
let mut acc = Assists::new(&ctx, resolve);
handlers::all().iter().for_each(|handler| {
diff --git a/crates/ide-assists/src/tests.rs b/crates/ide-assists/src/tests.rs
index ef7b09f3c5..0593e6930d 100644
--- a/crates/ide-assists/src/tests.rs
+++ b/crates/ide-assists/src/tests.rs
@@ -1,9 +1,9 @@
mod generated;
use expect_test::expect;
-use hir::{FileRange, Semantics};
+use hir::Semantics;
use ide_db::{
- EditionedFileId, RootDatabase, SnippetCap,
+ EditionedFileId, FileRange, RootDatabase, SnippetCap,
base_db::SourceDatabase,
imports::insert_use::{ImportGranularity, InsertUseConfig},
source_change::FileSystemEdit,
@@ -222,17 +222,17 @@ pub(crate) fn check_assist_unresolved(
fn check_doc_test(assist_id: &str, before: &str, after: &str) {
let after = trim_indent(after);
let (db, file_id, selection) = RootDatabase::with_range_or_offset(before);
- let before = db.file_text(file_id.file_id()).text(&db).to_string();
- let frange = FileRange { file_id, range: selection.into() };
+ let before = db.file_text(file_id.file_id(&db)).text(&db).to_string();
+ let frange = ide_db::FileRange { file_id: file_id.file_id(&db), range: selection.into() };
- let assist = assists(&db, &TEST_CONFIG, AssistResolveStrategy::All, frange.into())
+ let assist = assists(&db, &TEST_CONFIG, AssistResolveStrategy::All, frange)
.into_iter()
.find(|assist| assist.id.0 == assist_id)
.unwrap_or_else(|| {
panic!(
"\n\nAssist is not applicable: {}\nAvailable assists: {}",
assist_id,
- assists(&db, &TEST_CONFIG, AssistResolveStrategy::None, frange.into())
+ assists(&db, &TEST_CONFIG, AssistResolveStrategy::None, frange)
.into_iter()
.map(|assist| assist.id.0)
.collect::<Vec<_>>()
@@ -247,7 +247,7 @@ fn check_doc_test(assist_id: &str, before: &str, after: &str) {
.expect("Assist did not contain any source changes");
let mut actual = before;
if let Some((source_file_edit, snippet_edit)) =
- source_change.get_source_and_snippet_edit(file_id.file_id())
+ source_change.get_source_and_snippet_edit(file_id.file_id(&db))
{
source_file_edit.apply(&mut actual);
if let Some(snippet_edit) = snippet_edit {
@@ -281,9 +281,9 @@ fn check_with_config(
) {
let (mut db, file_with_caret_id, range_or_offset) = RootDatabase::with_range_or_offset(before);
db.enable_proc_attr_macros();
- let text_without_caret = db.file_text(file_with_caret_id.into()).text(&db).to_string();
+ let text_without_caret = db.file_text(file_with_caret_id.file_id(&db)).text(&db).to_string();
- let frange = FileRange { file_id: file_with_caret_id, range: range_or_offset.into() };
+ let frange = hir::FileRange { file_id: file_with_caret_id, range: range_or_offset.into() };
let sema = Semantics::new(&db);
let ctx = AssistContext::new(sema, &config, frange);
@@ -390,8 +390,9 @@ fn assist_order_field_struct() {
let before = "struct Foo { $0bar: u32 }";
let (before_cursor_pos, before) = extract_offset(before);
let (db, file_id) = with_single_file(&before);
- let frange = FileRange { file_id, range: TextRange::empty(before_cursor_pos) };
- let assists = assists(&db, &TEST_CONFIG, AssistResolveStrategy::None, frange.into());
+ let frange =
+ FileRange { file_id: file_id.file_id(&db), range: TextRange::empty(before_cursor_pos) };
+ let assists = assists(&db, &TEST_CONFIG, AssistResolveStrategy::None, frange);
let mut assists = assists.iter();
assert_eq!(assists.next().expect("expected assist").label, "Change visibility to pub(crate)");
@@ -417,7 +418,12 @@ pub fn test_some_range(a: int) -> bool {
"#,
);
- let assists = assists(&db, &TEST_CONFIG, AssistResolveStrategy::None, frange.into());
+ let assists = assists(
+ &db,
+ &TEST_CONFIG,
+ AssistResolveStrategy::None,
+ FileRange { file_id: frange.file_id.file_id(&db), range: frange.range },
+ );
let expected = labels(&assists);
expect![[r#"
@@ -445,7 +451,12 @@ pub fn test_some_range(a: int) -> bool {
let mut cfg = TEST_CONFIG;
cfg.allowed = Some(vec![AssistKind::Refactor]);
- let assists = assists(&db, &cfg, AssistResolveStrategy::None, frange.into());
+ let assists = assists(
+ &db,
+ &cfg,
+ AssistResolveStrategy::None,
+ FileRange { file_id: frange.file_id.file_id(&db), range: frange.range },
+ );
let expected = labels(&assists);
expect![[r#"
@@ -459,7 +470,12 @@ pub fn test_some_range(a: int) -> bool {
{
let mut cfg = TEST_CONFIG;
cfg.allowed = Some(vec![AssistKind::RefactorExtract]);
- let assists = assists(&db, &cfg, AssistResolveStrategy::None, frange.into());
+ let assists = assists(
+ &db,
+ &cfg,
+ AssistResolveStrategy::None,
+ FileRange { file_id: frange.file_id.file_id(&db), range: frange.range },
+ );
let expected = labels(&assists);
expect![[r#"
@@ -471,7 +487,12 @@ pub fn test_some_range(a: int) -> bool {
{
let mut cfg = TEST_CONFIG;
cfg.allowed = Some(vec![AssistKind::QuickFix]);
- let assists = assists(&db, &cfg, AssistResolveStrategy::None, frange.into());
+ let assists = assists(
+ &db,
+ &cfg,
+ AssistResolveStrategy::None,
+ FileRange { file_id: frange.file_id.file_id(&db), range: frange.range },
+ );
let expected = labels(&assists);
expect![[r#""#]].assert_eq(&expected);
@@ -496,7 +517,12 @@ pub fn test_some_range(a: int) -> bool {
cfg.allowed = Some(vec![AssistKind::RefactorExtract]);
{
- let assists = assists(&db, &cfg, AssistResolveStrategy::None, frange.into());
+ let assists = assists(
+ &db,
+ &cfg,
+ AssistResolveStrategy::None,
+ FileRange { file_id: frange.file_id.file_id(&db), range: frange.range },
+ );
assert_eq!(4, assists.len());
let mut assists = assists.into_iter();
@@ -594,7 +620,7 @@ pub fn test_some_range(a: int) -> bool {
assist_kind: AssistKind::RefactorExtract,
assist_subtype: None,
}),
- frange.into(),
+ FileRange { file_id: frange.file_id.file_id(&db), range: frange.range },
);
assert_eq!(4, assists.len());
let mut assists = assists.into_iter();
@@ -693,7 +719,7 @@ pub fn test_some_range(a: int) -> bool {
assist_kind: AssistKind::RefactorExtract,
assist_subtype: None,
}),
- frange.into(),
+ FileRange { file_id: frange.file_id.file_id(&db), range: frange.range },
);
assert_eq!(4, assists.len());
let mut assists = assists.into_iter();
@@ -830,7 +856,12 @@ pub fn test_some_range(a: int) -> bool {
}
{
- let assists = assists(&db, &cfg, AssistResolveStrategy::All, frange.into());
+ let assists = assists(
+ &db,
+ &cfg,
+ AssistResolveStrategy::All,
+ FileRange { file_id: frange.file_id.file_id(&db), range: frange.range },
+ );
assert_eq!(4, assists.len());
let mut assists = assists.into_iter();
diff --git a/crates/ide-completion/src/completions/env_vars.rs b/crates/ide-completion/src/completions/env_vars.rs
index ab8d8a6169..cd18b3dcfd 100644
--- a/crates/ide-completion/src/completions/env_vars.rs
+++ b/crates/ide-completion/src/completions/env_vars.rs
@@ -1,6 +1,5 @@
//! Completes environment variables defined by Cargo
//! (<https://doc.rust-lang.org/cargo/reference/environment-variables.html>)
-use hir::MacroFileIdExt;
use ide_db::syntax_helpers::node_ext::macro_call_for_string_token;
use syntax::{
AstToken,
diff --git a/crates/ide-completion/src/completions/item_list/trait_impl.rs b/crates/ide-completion/src/completions/item_list/trait_impl.rs
index e08271f39a..58aead73fd 100644
--- a/crates/ide-completion/src/completions/item_list/trait_impl.rs
+++ b/crates/ide-completion/src/completions/item_list/trait_impl.rs
@@ -31,7 +31,7 @@
//! }
//! ```
-use hir::{MacroFileId, Name, db::ExpandDatabase};
+use hir::{MacroCallId, Name, db::ExpandDatabase};
use ide_db::text_edit::TextEdit;
use ide_db::{
SymbolKind, documentation::HasDocs, path_transform::PathTransform,
@@ -465,7 +465,7 @@ fn add_const_impl(
fn make_const_compl_syntax(
ctx: &CompletionContext<'_>,
const_: &ast::Const,
- macro_file: Option<MacroFileId>,
+ macro_file: Option<MacroCallId>,
) -> SmolStr {
let const_ = if let Some(macro_file) = macro_file {
let span_map = ctx.db.expansion_span_map(macro_file);
@@ -493,7 +493,7 @@ fn make_const_compl_syntax(
fn function_declaration(
ctx: &CompletionContext<'_>,
node: &ast::Fn,
- macro_file: Option<MacroFileId>,
+ macro_file: Option<MacroCallId>,
) -> String {
let node = if let Some(macro_file) = macro_file {
let span_map = ctx.db.expansion_span_map(macro_file);
diff --git a/crates/ide-completion/src/completions/mod_.rs b/crates/ide-completion/src/completions/mod_.rs
index dc81e49c53..013747e4d0 100644
--- a/crates/ide-completion/src/completions/mod_.rs
+++ b/crates/ide-completion/src/completions/mod_.rs
@@ -2,7 +2,7 @@
use std::iter;
-use hir::{HirFileIdExt, Module};
+use hir::Module;
use ide_db::{
FxHashSet, RootDatabase, SymbolKind,
base_db::{SourceDatabase, VfsPath},
@@ -44,13 +44,13 @@ pub(crate) fn complete_mod(
let module_definition_file =
current_module.definition_source_file_id(ctx.db).original_file(ctx.db);
let source_root_id =
- ctx.db.file_source_root(module_definition_file.file_id()).source_root_id(ctx.db);
+ ctx.db.file_source_root(module_definition_file.file_id(ctx.db)).source_root_id(ctx.db);
let source_root = ctx.db.source_root(source_root_id).source_root(ctx.db);
let directory_to_look_for_submodules = directory_to_look_for_submodules(
current_module,
ctx.db,
- source_root.path_for_file(&module_definition_file.file_id())?,
+ source_root.path_for_file(&module_definition_file.file_id(ctx.db))?,
)?;
let existing_mod_declarations = current_module
@@ -66,9 +66,11 @@ pub(crate) fn complete_mod(
source_root
.iter()
- .filter(|&submodule_candidate_file| submodule_candidate_file != module_definition_file)
.filter(|&submodule_candidate_file| {
- module_declaration_file.is_none_or(|it| it != submodule_candidate_file)
+ submodule_candidate_file != module_definition_file.file_id(ctx.db)
+ })
+ .filter(|&submodule_candidate_file| {
+ module_declaration_file.is_none_or(|it| it.file_id(ctx.db) != submodule_candidate_file)
})
.filter_map(|submodule_file| {
let submodule_path = source_root.path_for_file(&submodule_file)?;
diff --git a/crates/ide-completion/src/completions/postfix.rs b/crates/ide-completion/src/completions/postfix.rs
index 3a55e83026..54be7d2fbc 100644
--- a/crates/ide-completion/src/completions/postfix.rs
+++ b/crates/ide-completion/src/completions/postfix.rs
@@ -283,7 +283,7 @@ fn get_receiver_text(
if receiver_is_ambiguous_float_literal {
range.range = TextRange::at(range.range.start(), range.range.len() - TextSize::of('.'))
}
- let file_text = sema.db.file_text(range.file_id.file_id());
+ let file_text = sema.db.file_text(range.file_id.file_id(sema.db));
let mut text = file_text.text(sema.db)[range.range].to_owned();
// The receiver texts should be interpreted as-is, as they are expected to be
diff --git a/crates/ide-completion/src/context.rs b/crates/ide-completion/src/context.rs
index fd25ee05e0..3baf1f3de6 100644
--- a/crates/ide-completion/src/context.rs
+++ b/crates/ide-completion/src/context.rs
@@ -6,7 +6,7 @@ mod tests;
use std::{iter, ops::ControlFlow};
-use base_db::{RootQueryDb as _, salsa::AsDynDatabase};
+use base_db::RootQueryDb as _;
use hir::{
DisplayTarget, HasAttrs, Local, ModuleDef, ModuleSource, Name, PathResolution, ScopeDef,
Semantics, SemanticsScope, Symbol, Type, TypeInfo,
@@ -705,17 +705,14 @@ impl<'a> CompletionContext<'a> {
let sema = Semantics::new(db);
let editioned_file_id = sema.attach_first_edition(file_id)?;
- let editioned_file_id_wrapper =
- ide_db::base_db::EditionedFileId::new(sema.db.as_dyn_database(), editioned_file_id);
-
- let original_file = sema.parse(editioned_file_id_wrapper);
+ let original_file = sema.parse(editioned_file_id);
// Insert a fake ident to get a valid parse tree. We will use this file
// to determine context, though the original_file will be used for
// actual completion.
let file_with_fake_ident = {
- let (_, edition) = editioned_file_id.unpack();
- let parse = db.parse(editioned_file_id_wrapper);
+ let (_, edition) = editioned_file_id.unpack(db);
+ let parse = db.parse(editioned_file_id);
parse.reparse(TextRange::empty(offset), COMPLETION_MARKER, edition).tree()
};
diff --git a/crates/ide-completion/src/lib.rs b/crates/ide-completion/src/lib.rs
index 8d8dfb083e..1fdd4cdb1c 100644
--- a/crates/ide-completion/src/lib.rs
+++ b/crates/ide-completion/src/lib.rs
@@ -12,7 +12,6 @@ mod tests;
use ide_db::{
FilePosition, FxHashSet, RootDatabase,
- base_db::salsa::AsDynDatabase,
imports::insert_use::{self, ImportScope},
syntax_helpers::tree_diff::diff,
text_edit::TextEdit,
@@ -277,8 +276,6 @@ pub fn resolve_completion_edits(
let sema = hir::Semantics::new(db);
let editioned_file_id = sema.attach_first_edition(file_id)?;
- let editioned_file_id =
- ide_db::base_db::EditionedFileId::new(db.as_dyn_database(), editioned_file_id);
let original_file = sema.parse(editioned_file_id);
let original_token =
diff --git a/crates/ide-completion/src/tests.rs b/crates/ide-completion/src/tests.rs
index 85987f0de5..fdc3d9a13b 100644
--- a/crates/ide-completion/src/tests.rs
+++ b/crates/ide-completion/src/tests.rs
@@ -155,13 +155,14 @@ fn completion_list_with_config(
pub(crate) fn position(
#[rust_analyzer::rust_fixture] ra_fixture: &str,
) -> (RootDatabase, FilePosition) {
- let change_fixture = ChangeFixture::parse(ra_fixture);
let mut database = RootDatabase::default();
+ let change_fixture = ChangeFixture::parse(&database, ra_fixture);
database.enable_proc_attr_macros();
database.apply_change(change_fixture.change);
let (file_id, range_or_offset) = change_fixture.file_position.expect("expected a marker ($0)");
let offset = range_or_offset.expect_offset();
- (database, FilePosition { file_id: file_id.file_id(), offset })
+ let position = FilePosition { file_id: file_id.file_id(&database), offset };
+ (database, position)
}
pub(crate) fn do_completion(code: &str, kind: CompletionItemKind) -> Vec<CompletionItem> {
diff --git a/crates/ide-db/src/imports/insert_use/tests.rs b/crates/ide-db/src/imports/insert_use/tests.rs
index 541f6c379e..428ba1d511 100644
--- a/crates/ide-db/src/imports/insert_use/tests.rs
+++ b/crates/ide-db/src/imports/insert_use/tests.rs
@@ -1,4 +1,3 @@
-use salsa::AsDynDatabase;
use stdx::trim_indent;
use test_fixture::WithFixture;
use test_utils::{CURSOR_MARKER, assert_eq_text};
@@ -1252,14 +1251,10 @@ fn check_with_config(
let (db, file_id, pos) = if ra_fixture_before.contains(CURSOR_MARKER) {
let (db, file_id, range_or_offset) = RootDatabase::with_range_or_offset(ra_fixture_before);
- let file_id = crate::base_db::EditionedFileId::new(db.as_dyn_database(), file_id);
-
(db, file_id, Some(range_or_offset))
} else {
let (db, file_id) = RootDatabase::with_single_file(ra_fixture_before);
- let file_id = crate::base_db::EditionedFileId::new(db.as_dyn_database(), file_id);
-
(db, file_id, None)
};
let sema = &Semantics::new(&db);
diff --git a/crates/ide-db/src/lib.rs b/crates/ide-db/src/lib.rs
index 0fb4bdfa88..d3934e14ab 100644
--- a/crates/ide-db/src/lib.rs
+++ b/crates/ide-db/src/lib.rs
@@ -45,7 +45,7 @@ pub mod syntax_helpers {
pub use parser::LexedStr;
}
-pub use hir::ChangeWithProcMacros;
+pub use hir::{ChangeWithProcMacros, EditionedFileId};
use salsa::Durability;
use std::{fmt, mem::ManuallyDrop};
@@ -67,7 +67,7 @@ pub use ::line_index;
/// `base_db` is normally also needed in places where `ide_db` is used, so this re-export is for convenience.
pub use base_db;
-pub use span::{EditionedFileId, FileId};
+pub use span::{self, FileId};
pub type FxIndexSet<T> = indexmap::IndexSet<T, std::hash::BuildHasherDefault<rustc_hash::FxHasher>>;
pub type FxIndexMap<K, V> =
@@ -246,6 +246,7 @@ impl RootDatabase {
#[query_group::query_group]
pub trait LineIndexDatabase: base_db::RootQueryDb {
+ #[salsa::invoke_interned(line_index)]
fn line_index(&self, file_id: FileId) -> Arc<LineIndex>;
}
diff --git a/crates/ide-db/src/rename.rs b/crates/ide-db/src/rename.rs
index 4a5a28e824..b8119e1aab 100644
--- a/crates/ide-db/src/rename.rs
+++ b/crates/ide-db/src/rename.rs
@@ -28,8 +28,8 @@ use crate::{
};
use base_db::AnchoredPathBuf;
use either::Either;
-use hir::{FieldSource, FileRange, HirFileIdExt, InFile, ModuleSource, Semantics};
-use span::{Edition, EditionedFileId, FileId, SyntaxContext};
+use hir::{EditionedFileId, FieldSource, FileRange, InFile, ModuleSource, Semantics};
+use span::{Edition, FileId, SyntaxContext};
use stdx::{TupleExt, never};
use syntax::{
AstNode, SyntaxKind, T, TextRange,
@@ -249,7 +249,7 @@ fn rename_mod(
let InFile { file_id, value: def_source } = module.definition_source(sema.db);
if let ModuleSource::SourceFile(..) = def_source {
- let anchor = file_id.original_file(sema.db).file_id();
+ let anchor = file_id.original_file(sema.db).file_id(sema.db);
let is_mod_rs = module.is_mod_rs(sema.db);
let has_detached_child = module.children(sema.db).any(|child| !child.is_inline(sema.db));
@@ -296,13 +296,13 @@ fn rename_mod(
.original_file_range_opt(sema.db)
.map(TupleExt::head)
{
- let new_name = if is_raw_identifier(new_name, file_id.edition()) {
+ let new_name = if is_raw_identifier(new_name, file_id.edition(sema.db)) {
format!("r#{new_name}")
} else {
new_name.to_owned()
};
source_change.insert_source_edit(
- file_id.file_id(),
+ file_id.file_id(sema.db),
TextEdit::replace(file_range.range, new_name),
)
};
@@ -315,8 +315,8 @@ fn rename_mod(
let usages = def.usages(sema).all();
let ref_edits = usages.iter().map(|(file_id, references)| {
(
- EditionedFileId::file_id(file_id),
- source_edit_from_references(references, def, new_name, file_id.edition()),
+ file_id.file_id(sema.db),
+ source_edit_from_references(references, def, new_name, file_id.edition(sema.db)),
)
});
source_change.extend(ref_edits);
@@ -362,8 +362,8 @@ fn rename_reference(
let mut source_change = SourceChange::default();
source_change.extend(usages.iter().map(|(file_id, references)| {
(
- EditionedFileId::file_id(file_id),
- source_edit_from_references(references, def, new_name, file_id.edition()),
+ file_id.file_id(sema.db),
+ source_edit_from_references(references, def, new_name, file_id.edition(sema.db)),
)
}));
@@ -541,7 +541,7 @@ fn source_edit_from_def(
source_change: &mut SourceChange,
) -> Result<(FileId, TextEdit)> {
let new_name_edition_aware = |new_name: &str, file_id: EditionedFileId| {
- if is_raw_identifier(new_name, file_id.edition()) {
+ if is_raw_identifier(new_name, file_id.edition(sema.db)) {
format!("r#{new_name}")
} else {
new_name.to_owned()
@@ -638,7 +638,7 @@ fn source_edit_from_def(
edit.set_annotation(conflict_annotation);
let Some(file_id) = file_id else { bail!("No file available to rename") };
- return Ok((EditionedFileId::file_id(file_id), edit));
+ return Ok((file_id.file_id(sema.db), edit));
}
let FileRange { file_id, range } = def
.range_for_rename(sema)
@@ -654,7 +654,7 @@ fn source_edit_from_def(
_ => (range, new_name.to_owned()),
};
edit.replace(range, new_name_edition_aware(&new_name, file_id));
- Ok((file_id.file_id(), edit.finish()))
+ Ok((file_id.file_id(sema.db), edit.finish()))
}
#[derive(Copy, Clone, Debug, PartialEq)]
diff --git a/crates/ide-db/src/search.rs b/crates/ide-db/src/search.rs
index 6e1374f024..0107fb2329 100644
--- a/crates/ide-db/src/search.rs
+++ b/crates/ide-db/src/search.rs
@@ -10,15 +10,14 @@ use std::{cell::LazyCell, cmp::Reverse};
use base_db::{RootQueryDb, SourceDatabase};
use either::Either;
use hir::{
- Adt, AsAssocItem, DefWithBody, FileRange, FileRangeWrapper, HasAttrs, HasContainer, HasSource,
- HirFileIdExt, InFile, InFileWrapper, InRealFile, InlineAsmOperand, ItemContainer, ModuleSource,
- PathResolution, Semantics, Visibility, sym,
+ Adt, AsAssocItem, DefWithBody, EditionedFileId, FileRange, FileRangeWrapper, HasAttrs,
+ HasContainer, HasSource, InFile, InFileWrapper, InRealFile, InlineAsmOperand, ItemContainer,
+ ModuleSource, PathResolution, Semantics, Visibility, sym,
};
use memchr::memmem::Finder;
use parser::SyntaxKind;
use rustc_hash::{FxHashMap, FxHashSet};
use salsa::Database;
-use span::EditionedFileId;
use syntax::{
AstNode, AstToken, SmolStr, SyntaxElement, SyntaxNode, TextRange, TextSize, ToSmolStr,
ast::{self, HasName, Rename},
@@ -168,7 +167,9 @@ impl SearchScope {
let source_root = db.file_source_root(crate_data.root_file_id).source_root_id(db);
let source_root = db.source_root(source_root).source_root(db);
entries.extend(
- source_root.iter().map(|id| (EditionedFileId::new(id, crate_data.edition), None)),
+ source_root
+ .iter()
+ .map(|id| (EditionedFileId::new(db, id, crate_data.edition), None)),
);
}
SearchScope { entries }
@@ -183,7 +184,9 @@ impl SearchScope {
let source_root = db.file_source_root(root_file).source_root_id(db);
let source_root = db.source_root(source_root).source_root(db);
entries.extend(
- source_root.iter().map(|id| (EditionedFileId::new(id, rev_dep.edition(db)), None)),
+ source_root
+ .iter()
+ .map(|id| (EditionedFileId::new(db, id, rev_dep.edition(db)), None)),
);
}
SearchScope { entries }
@@ -198,7 +201,7 @@ impl SearchScope {
SearchScope {
entries: source_root
.iter()
- .map(|id| (EditionedFileId::new(id, of.edition(db)), None))
+ .map(|id| (EditionedFileId::new(db, id, of.edition(db)), None))
.collect(),
}
}
@@ -482,7 +485,7 @@ impl<'a> FindUsages<'a> {
scope: &'b SearchScope,
) -> impl Iterator<Item = (Arc<str>, EditionedFileId, TextRange)> + 'b {
scope.entries.iter().map(|(&file_id, &search_range)| {
- let text = db.file_text(file_id.file_id()).text(db);
+ let text = db.file_text(file_id.file_id(db)).text(db);
let search_range =
search_range.unwrap_or_else(|| TextRange::up_to(TextSize::of(&*text)));
@@ -648,8 +651,6 @@ impl<'a> FindUsages<'a> {
for (file_text, file_id, search_range) in
FindUsages::scope_files(db, &current_to_process_search_scope)
{
- let file_id = crate::base_db::EditionedFileId::new(db, file_id);
-
let tree = LazyCell::new(move || sema.parse(file_id).syntax().clone());
for offset in FindUsages::match_indices(&file_text, &finder, search_range) {
@@ -809,9 +810,7 @@ impl<'a> FindUsages<'a> {
sink: &mut dyn FnMut(EditionedFileId, FileReference) -> bool,
) {
for (file_text, file_id, search_range) in files {
- let file_id_wrapper = crate::base_db::EditionedFileId::new(this.sema.db, file_id);
-
- let tree = LazyCell::new(move || this.sema.parse(file_id_wrapper).syntax().clone());
+ let tree = LazyCell::new(move || this.sema.parse(file_id).syntax().clone());
for offset in FindUsages::match_indices(&file_text, finder, search_range) {
let usages = FindUsages::find_nodes(this.sema, name, &tree, offset)
@@ -856,7 +855,10 @@ impl<'a> FindUsages<'a> {
name,
is_possibly_self.into_iter().map(|position| {
(
- self.sema.db.file_text(position.file_id.file_id()).text(self.sema.db),
+ self.sema
+ .db
+ .file_text(position.file_id.file_id(self.sema.db))
+ .text(self.sema.db),
position.file_id,
position.range,
)
@@ -950,9 +952,7 @@ impl<'a> FindUsages<'a> {
let include_self_kw_refs =
self.include_self_kw_refs.as_ref().map(|ty| (ty, Finder::new("Self")));
for (text, file_id, search_range) in Self::scope_files(sema.db, &search_scope) {
- let file_id_wrapper = crate::base_db::EditionedFileId::new(sema.db, file_id);
-
- let tree = LazyCell::new(move || sema.parse(file_id_wrapper).syntax().clone());
+ let tree = LazyCell::new(move || sema.parse(file_id).syntax().clone());
// Search for occurrences of the items name
for offset in Self::match_indices(&text, finder, search_range) {
@@ -1007,8 +1007,7 @@ impl<'a> FindUsages<'a> {
for (text, file_id, search_range) in Self::scope_files(sema.db, &scope) {
self.sema.db.unwind_if_revision_cancelled();
- let file_id_wrapper = crate::base_db::EditionedFileId::new(sema.db, file_id);
- let tree = LazyCell::new(move || sema.parse(file_id_wrapper).syntax().clone());
+ let tree = LazyCell::new(move || sema.parse(file_id).syntax().clone());
for offset in Self::match_indices(&text, finder, search_range) {
for name_ref in Self::find_nodes(sema, "super", &tree, offset)
@@ -1056,13 +1055,11 @@ impl<'a> FindUsages<'a> {
return;
};
- let file_text = sema.db.file_text(file_id.file_id());
+ let file_text = sema.db.file_text(file_id.file_id(self.sema.db));
let text = file_text.text(sema.db);
let search_range =
search_range.unwrap_or_else(|| TextRange::up_to(TextSize::of(&*text)));
- let file_id = crate::base_db::EditionedFileId::new(sema.db, file_id);
-
let tree = LazyCell::new(|| sema.parse(file_id).syntax().clone());
let finder = &Finder::new("self");
diff --git a/crates/ide-db/src/symbol_index.rs b/crates/ide-db/src/symbol_index.rs
index 8ba398c196..d1ba79e8c7 100644
--- a/crates/ide-db/src/symbol_index.rs
+++ b/crates/ide-db/src/symbol_index.rs
@@ -100,9 +100,13 @@ impl Query {
pub trait SymbolsDatabase: HirDatabase + SourceDatabase {
/// The symbol index for a given module. These modules should only be in source roots that
/// are inside local_roots.
+ // FIXME: Is it worth breaking the encapsulation boundary of `hir`, and make this take a `ModuleId`,
+ // in order for it to be a non-interned query?
+ #[salsa::invoke_interned(module_symbols)]
fn module_symbols(&self, module: Module) -> Arc<SymbolIndex>;
/// The symbol index for a given source root within library_roots.
+ #[salsa::invoke_interned(library_symbols)]
fn library_symbols(&self, source_root_id: SourceRootId) -> Arc<SymbolIndex>;
#[salsa::transparent]
diff --git a/crates/ide-db/src/syntax_helpers/suggest_name.rs b/crates/ide-db/src/syntax_helpers/suggest_name.rs
index 5e68ccd3d3..9b9f450bc7 100644
--- a/crates/ide-db/src/syntax_helpers/suggest_name.rs
+++ b/crates/ide-db/src/syntax_helpers/suggest_name.rs
@@ -455,8 +455,7 @@ mod tests {
let frange = FileRange { file_id, range: range_or_offset.into() };
let sema = Semantics::new(&db);
- let file_id = crate::base_db::EditionedFileId::new(sema.db, frange.file_id);
- let source_file = sema.parse(file_id);
+ let source_file = sema.parse(frange.file_id);
let element = source_file.syntax().covering_element(frange.range);
let expr =
diff --git a/crates/ide-db/src/test_data/test_doc_alias.txt b/crates/ide-db/src/test_data/test_doc_alias.txt
index c96e428fd5..455a680590 100644
--- a/crates/ide-db/src/test_data/test_doc_alias.txt
+++ b/crates/ide-db/src/test_data/test_doc_alias.txt
@@ -3,7 +3,7 @@
Module {
id: ModuleId {
krate: Crate(
- Id(2c00),
+ Id(3000),
),
block: None,
local_id: Idx::<ModuleData>(0),
@@ -16,17 +16,16 @@
Struct(
Struct {
id: StructId(
- 4001,
+ 3401,
),
},
),
),
loc: DeclarationLocation {
- hir_file_id: EditionedFileId(
- FileId(
- 0,
+ hir_file_id: FileId(
+ EditionedFileId(
+ Id(2000),
),
- Edition2024,
),
ptr: SyntaxNodePtr {
kind: STRUCT,
@@ -50,17 +49,16 @@
Struct(
Struct {
id: StructId(
- 4000,
+ 3400,
),
},
),
),
loc: DeclarationLocation {
- hir_file_id: EditionedFileId(
- FileId(
- 0,
+ hir_file_id: FileId(
+ EditionedFileId(
+ Id(2000),
),
- Edition2024,
),
ptr: SyntaxNodePtr {
kind: STRUCT,
@@ -84,17 +82,16 @@
Struct(
Struct {
id: StructId(
- 4000,
+ 3400,
),
},
),
),
loc: DeclarationLocation {
- hir_file_id: EditionedFileId(
- FileId(
- 0,
+ hir_file_id: FileId(
+ EditionedFileId(
+ Id(2000),
),
- Edition2024,
),
ptr: SyntaxNodePtr {
kind: STRUCT,
@@ -118,17 +115,16 @@
Struct(
Struct {
id: StructId(
- 4000,
+ 3400,
),
},
),
),
loc: DeclarationLocation {
- hir_file_id: EditionedFileId(
- FileId(
- 0,
+ hir_file_id: FileId(
+ EditionedFileId(
+ Id(2000),
),
- Edition2024,
),
ptr: SyntaxNodePtr {
kind: STRUCT,
@@ -152,17 +148,16 @@
Struct(
Struct {
id: StructId(
- 4000,
+ 3400,
),
},
),
),
loc: DeclarationLocation {
- hir_file_id: EditionedFileId(
- FileId(
- 0,
+ hir_file_id: FileId(
+ EditionedFileId(
+ Id(2000),
),
- Edition2024,
),
ptr: SyntaxNodePtr {
kind: STRUCT,
@@ -186,17 +181,16 @@
Struct(
Struct {
id: StructId(
- 4001,
+ 3401,
),
},
),
),
loc: DeclarationLocation {
- hir_file_id: EditionedFileId(
- FileId(
- 0,
+ hir_file_id: FileId(
+ EditionedFileId(
+ Id(2000),
),
- Edition2024,
),
ptr: SyntaxNodePtr {
kind: STRUCT,
@@ -220,17 +214,16 @@
Struct(
Struct {
id: StructId(
- 4000,
+ 3400,
),
},
),
),
loc: DeclarationLocation {
- hir_file_id: EditionedFileId(
- FileId(
- 0,
+ hir_file_id: FileId(
+ EditionedFileId(
+ Id(2000),
),
- Edition2024,
),
ptr: SyntaxNodePtr {
kind: STRUCT,
diff --git a/crates/ide-db/src/test_data/test_symbol_index_collection.txt b/crates/ide-db/src/test_data/test_symbol_index_collection.txt
index 944f828081..5e5ae1d168 100644
--- a/crates/ide-db/src/test_data/test_symbol_index_collection.txt
+++ b/crates/ide-db/src/test_data/test_symbol_index_collection.txt
@@ -3,7 +3,7 @@
Module {
id: ModuleId {
krate: Crate(
- Id(2c00),
+ Id(3000),
),
block: None,
local_id: Idx::<ModuleData>(0),
@@ -15,16 +15,15 @@
def: TypeAlias(
TypeAlias {
id: TypeAliasId(
- 7c00,
+ 6800,
),
},
),
loc: DeclarationLocation {
- hir_file_id: EditionedFileId(
- FileId(
- 0,
+ hir_file_id: FileId(
+ EditionedFileId(
+ Id(2000),
),
- Edition2024,
),
ptr: SyntaxNodePtr {
kind: TYPE_ALIAS,
@@ -47,16 +46,15 @@
def: Const(
Const {
id: ConstId(
- 7400,
+ 6000,
),
},
),
loc: DeclarationLocation {
- hir_file_id: EditionedFileId(
- FileId(
- 0,
+ hir_file_id: FileId(
+ EditionedFileId(
+ Id(2000),
),
- Edition2024,
),
ptr: SyntaxNodePtr {
kind: CONST,
@@ -79,16 +77,15 @@
def: Const(
Const {
id: ConstId(
- 7402,
+ 6002,
),
},
),
loc: DeclarationLocation {
- hir_file_id: EditionedFileId(
- FileId(
- 0,
+ hir_file_id: FileId(
+ EditionedFileId(
+ Id(2000),
),
- Edition2024,
),
ptr: SyntaxNodePtr {
kind: CONST,
@@ -112,17 +109,16 @@
Enum(
Enum {
id: EnumId(
- 6000,
+ 4c00,
),
},
),
),
loc: DeclarationLocation {
- hir_file_id: EditionedFileId(
- FileId(
- 0,
+ hir_file_id: FileId(
+ EditionedFileId(
+ Id(2000),
),
- Edition2024,
),
ptr: SyntaxNodePtr {
kind: ENUM,
@@ -146,17 +142,16 @@
Macro {
id: Macro2Id(
Macro2Id(
- 5c00,
+ 4800,
),
),
},
),
loc: DeclarationLocation {
- hir_file_id: EditionedFileId(
- FileId(
- 0,
+ hir_file_id: FileId(
+ EditionedFileId(
+ Id(2000),
),
- Edition2024,
),
ptr: SyntaxNodePtr {
kind: USE_TREE,
@@ -180,17 +175,16 @@
Macro {
id: Macro2Id(
Macro2Id(
- 5c00,
+ 4800,
),
),
},
),
loc: DeclarationLocation {
- hir_file_id: EditionedFileId(
- FileId(
- 0,
+ hir_file_id: FileId(
+ EditionedFileId(
+ Id(2000),
),
- Edition2024,
),
ptr: SyntaxNodePtr {
kind: MACRO_DEF,
@@ -213,16 +207,15 @@
def: Static(
Static {
id: StaticId(
- 7800,
+ 6400,
),
},
),
loc: DeclarationLocation {
- hir_file_id: EditionedFileId(
- FileId(
- 0,
+ hir_file_id: FileId(
+ EditionedFileId(
+ Id(2000),
),
- Edition2024,
),
ptr: SyntaxNodePtr {
kind: STATIC,
@@ -246,17 +239,16 @@
Struct(
Struct {
id: StructId(
- 5801,
+ 4401,
),
},
),
),
loc: DeclarationLocation {
- hir_file_id: EditionedFileId(
- FileId(
- 0,
+ hir_file_id: FileId(
+ EditionedFileId(
+ Id(2000),
),
- Edition2024,
),
ptr: SyntaxNodePtr {
kind: STRUCT,
@@ -280,14 +272,16 @@
Struct(
Struct {
id: StructId(
- 5800,
+ 4400,
),
},
),
),
loc: DeclarationLocation {
hir_file_id: MacroFile(
- Id(4400),
+ MacroCallId(
+ Id(3800),
+ ),
),
ptr: SyntaxNodePtr {
kind: STRUCT,
@@ -311,17 +305,16 @@
Struct(
Struct {
id: StructId(
- 5805,
+ 4405,
),
},
),
),
loc: DeclarationLocation {
- hir_file_id: EditionedFileId(
- FileId(
- 0,
+ hir_file_id: FileId(
+ EditionedFileId(
+ Id(2000),
),
- Edition2024,
),
ptr: SyntaxNodePtr {
kind: STRUCT,
@@ -347,17 +340,16 @@
Struct(
Struct {
id: StructId(
- 5806,
+ 4406,
),
},
),
),
loc: DeclarationLocation {
- hir_file_id: EditionedFileId(
- FileId(
- 0,
+ hir_file_id: FileId(
+ EditionedFileId(
+ Id(2000),
),
- Edition2024,
),
ptr: SyntaxNodePtr {
kind: STRUCT,
@@ -383,17 +375,16 @@
Struct(
Struct {
id: StructId(
- 5807,
+ 4407,
),
},
),
),
loc: DeclarationLocation {
- hir_file_id: EditionedFileId(
- FileId(
- 0,
+ hir_file_id: FileId(
+ EditionedFileId(
+ Id(2000),
),
- Edition2024,
),
ptr: SyntaxNodePtr {
kind: STRUCT,
@@ -417,17 +408,16 @@
Struct(
Struct {
id: StructId(
- 5802,
+ 4402,
),
},
),
),
loc: DeclarationLocation {
- hir_file_id: EditionedFileId(
- FileId(
- 0,
+ hir_file_id: FileId(
+ EditionedFileId(
+ Id(2000),
),
- Edition2024,
),
ptr: SyntaxNodePtr {
kind: STRUCT,
@@ -450,16 +440,15 @@
def: Trait(
Trait {
id: TraitId(
- 6c00,
+ 5800,
),
},
),
loc: DeclarationLocation {
- hir_file_id: EditionedFileId(
- FileId(
- 0,
+ hir_file_id: FileId(
+ EditionedFileId(
+ Id(2000),
),
- Edition2024,
),
ptr: SyntaxNodePtr {
kind: TRAIT,
@@ -483,17 +472,16 @@
Macro {
id: Macro2Id(
Macro2Id(
- 5c00,
+ 4800,
),
),
},
),
loc: DeclarationLocation {
- hir_file_id: EditionedFileId(
- FileId(
- 0,
+ hir_file_id: FileId(
+ EditionedFileId(
+ Id(2000),
),
- Edition2024,
),
ptr: SyntaxNodePtr {
kind: USE_TREE,
@@ -517,17 +505,16 @@
Union(
Union {
id: UnionId(
- 6400,
+ 5000,
),
},
),
),
loc: DeclarationLocation {
- hir_file_id: EditionedFileId(
- FileId(
- 0,
+ hir_file_id: FileId(
+ EditionedFileId(
+ Id(2000),
),
- Edition2024,
),
ptr: SyntaxNodePtr {
kind: UNION,
@@ -551,7 +538,7 @@
Module {
id: ModuleId {
krate: Crate(
- Id(2c00),
+ Id(3000),
),
block: None,
local_id: Idx::<ModuleData>(1),
@@ -559,11 +546,10 @@
},
),
loc: DeclarationLocation {
- hir_file_id: EditionedFileId(
- FileId(
- 0,
+ hir_file_id: FileId(
+ EditionedFileId(
+ Id(2000),
),
- Edition2024,
),
ptr: SyntaxNodePtr {
kind: MODULE,
@@ -587,7 +573,7 @@
Module {
id: ModuleId {
krate: Crate(
- Id(2c00),
+ Id(3000),
),
block: None,
local_id: Idx::<ModuleData>(2),
@@ -595,11 +581,10 @@
},
),
loc: DeclarationLocation {
- hir_file_id: EditionedFileId(
- FileId(
- 0,
+ hir_file_id: FileId(
+ EditionedFileId(
+ Id(2000),
),
- Edition2024,
),
ptr: SyntaxNodePtr {
kind: MODULE,
@@ -623,17 +608,16 @@
Macro {
id: MacroRulesId(
MacroRulesId(
- 4001,
+ 3401,
),
),
},
),
loc: DeclarationLocation {
- hir_file_id: EditionedFileId(
- FileId(
- 0,
+ hir_file_id: FileId(
+ EditionedFileId(
+ Id(2000),
),
- Edition2024,
),
ptr: SyntaxNodePtr {
kind: MACRO_RULES,
@@ -656,16 +640,15 @@
def: Function(
Function {
id: FunctionId(
- 7002,
+ 5c02,
),
},
),
loc: DeclarationLocation {
- hir_file_id: EditionedFileId(
- FileId(
- 0,
+ hir_file_id: FileId(
+ EditionedFileId(
+ Id(2000),
),
- Edition2024,
),
ptr: SyntaxNodePtr {
kind: FN,
@@ -690,16 +673,15 @@
def: Function(
Function {
id: FunctionId(
- 7001,
+ 5c01,
),
},
),
loc: DeclarationLocation {
- hir_file_id: EditionedFileId(
- FileId(
- 0,
+ hir_file_id: FileId(
+ EditionedFileId(
+ Id(2000),
),
- Edition2024,
),
ptr: SyntaxNodePtr {
kind: FN,
@@ -725,17 +707,16 @@
Macro {
id: MacroRulesId(
MacroRulesId(
- 4000,
+ 3400,
),
),
},
),
loc: DeclarationLocation {
- hir_file_id: EditionedFileId(
- FileId(
- 0,
+ hir_file_id: FileId(
+ EditionedFileId(
+ Id(2000),
),
- Edition2024,
),
ptr: SyntaxNodePtr {
kind: MACRO_RULES,
@@ -758,16 +739,15 @@
def: Function(
Function {
id: FunctionId(
- 7000,
+ 5c00,
),
},
),
loc: DeclarationLocation {
- hir_file_id: EditionedFileId(
- FileId(
- 0,
+ hir_file_id: FileId(
+ EditionedFileId(
+ Id(2000),
),
- Edition2024,
),
ptr: SyntaxNodePtr {
kind: FN,
@@ -791,17 +771,16 @@
Macro {
id: MacroRulesId(
MacroRulesId(
- 4001,
+ 3401,
),
),
},
),
loc: DeclarationLocation {
- hir_file_id: EditionedFileId(
- FileId(
- 0,
+ hir_file_id: FileId(
+ EditionedFileId(
+ Id(2000),
),
- Edition2024,
),
ptr: SyntaxNodePtr {
kind: USE_TREE,
@@ -824,16 +803,15 @@
def: Function(
Function {
id: FunctionId(
- 7003,
+ 5c03,
),
},
),
loc: DeclarationLocation {
- hir_file_id: EditionedFileId(
- FileId(
- 0,
+ hir_file_id: FileId(
+ EditionedFileId(
+ Id(2000),
),
- Edition2024,
),
ptr: SyntaxNodePtr {
kind: FN,
@@ -859,7 +837,7 @@
Module {
id: ModuleId {
krate: Crate(
- Id(2c00),
+ Id(3000),
),
block: None,
local_id: Idx::<ModuleData>(1),
@@ -872,17 +850,16 @@
Struct(
Struct {
id: StructId(
- 5803,
+ 4403,
),
},
),
),
loc: DeclarationLocation {
- hir_file_id: EditionedFileId(
- FileId(
- 0,
+ hir_file_id: FileId(
+ EditionedFileId(
+ Id(2000),
),
- Edition2024,
),
ptr: SyntaxNodePtr {
kind: STRUCT,
@@ -906,7 +883,7 @@
Module {
id: ModuleId {
krate: Crate(
- Id(2c00),
+ Id(3000),
),
block: None,
local_id: Idx::<ModuleData>(2),
@@ -918,16 +895,15 @@
def: Trait(
Trait {
id: TraitId(
- 6c00,
+ 5800,
),
},
),
loc: DeclarationLocation {
- hir_file_id: EditionedFileId(
- FileId(
- 1,
+ hir_file_id: FileId(
+ EditionedFileId(
+ Id(2001),
),
- Edition2024,
),
ptr: SyntaxNodePtr {
kind: USE_TREE,
@@ -951,17 +927,16 @@
Macro {
id: Macro2Id(
Macro2Id(
- 5c00,
+ 4800,
),
),
},
),
loc: DeclarationLocation {
- hir_file_id: EditionedFileId(
- FileId(
- 1,
+ hir_file_id: FileId(
+ EditionedFileId(
+ Id(2001),
),
- Edition2024,
),
ptr: SyntaxNodePtr {
kind: USE_TREE,
@@ -985,17 +960,16 @@
Struct(
Struct {
id: StructId(
- 5804,
+ 4404,
),
},
),
),
loc: DeclarationLocation {
- hir_file_id: EditionedFileId(
- FileId(
- 1,
+ hir_file_id: FileId(
+ EditionedFileId(
+ Id(2001),
),
- Edition2024,
),
ptr: SyntaxNodePtr {
kind: STRUCT,
@@ -1019,17 +993,16 @@
Macro {
id: Macro2Id(
Macro2Id(
- 5c00,
+ 4800,
),
),
},
),
loc: DeclarationLocation {
- hir_file_id: EditionedFileId(
- FileId(
- 1,
+ hir_file_id: FileId(
+ EditionedFileId(
+ Id(2001),
),
- Edition2024,
),
ptr: SyntaxNodePtr {
kind: USE_TREE,
@@ -1053,17 +1026,16 @@
Struct(
Struct {
id: StructId(
- 5804,
+ 4404,
),
},
),
),
loc: DeclarationLocation {
- hir_file_id: EditionedFileId(
- FileId(
- 1,
+ hir_file_id: FileId(
+ EditionedFileId(
+ Id(2001),
),
- Edition2024,
),
ptr: SyntaxNodePtr {
kind: USE_TREE,
diff --git a/crates/ide-db/src/traits.rs b/crates/ide-db/src/traits.rs
index 22a695c9a8..61e28386d0 100644
--- a/crates/ide-db/src/traits.rs
+++ b/crates/ide-db/src/traits.rs
@@ -116,7 +116,6 @@ mod tests {
use expect_test::{Expect, expect};
use hir::FilePosition;
use hir::Semantics;
- use salsa::AsDynDatabase;
use span::Edition;
use syntax::ast::{self, AstNode};
use test_fixture::ChangeFixture;
@@ -127,8 +126,8 @@ mod tests {
pub(crate) fn position(
#[rust_analyzer::rust_fixture] ra_fixture: &str,
) -> (RootDatabase, FilePosition) {
- let change_fixture = ChangeFixture::parse(ra_fixture);
let mut database = RootDatabase::default();
+ let change_fixture = ChangeFixture::parse(&database, ra_fixture);
database.apply_change(change_fixture.change);
let (file_id, range_or_offset) =
change_fixture.file_position.expect("expected a marker ($0)");
@@ -140,10 +139,7 @@ mod tests {
let (db, position) = position(ra_fixture);
let sema = Semantics::new(&db);
- let editioned_file_id =
- crate::base_db::EditionedFileId::new(sema.db.as_dyn_database(), position.file_id);
-
- let file = sema.parse(editioned_file_id);
+ let file = sema.parse(position.file_id);
let impl_block: ast::Impl =
sema.find_node_at_offset_with_descend(file.syntax(), position.offset).unwrap();
let trait_ = crate::traits::resolve_target_trait(&sema, &impl_block);
@@ -158,10 +154,7 @@ mod tests {
let (db, position) = position(ra_fixture);
let sema = Semantics::new(&db);
- let editioned_file_id =
- crate::base_db::EditionedFileId::new(sema.db.as_dyn_database(), position.file_id);
-
- let file = sema.parse(editioned_file_id);
+ let file = sema.parse(position.file_id);
let impl_block: ast::Impl =
sema.find_node_at_offset_with_descend(file.syntax(), position.offset).unwrap();
let items = crate::traits::get_missing_assoc_items(&sema, &impl_block);
diff --git a/crates/ide-diagnostics/src/handlers/field_shorthand.rs b/crates/ide-diagnostics/src/handlers/field_shorthand.rs
index f7020a2c50..1dc6a7bf9c 100644
--- a/crates/ide-diagnostics/src/handlers/field_shorthand.rs
+++ b/crates/ide-diagnostics/src/handlers/field_shorthand.rs
@@ -1,6 +1,7 @@
//! Suggests shortening `Foo { field: field }` to `Foo { field }` in both
//! expressions and patterns.
+use ide_db::RootDatabase;
use ide_db::text_edit::TextEdit;
use ide_db::{EditionedFileId, FileRange, source_change::SourceChange};
use syntax::{AstNode, SyntaxNode, ast, match_ast};
@@ -8,20 +9,22 @@ use syntax::{AstNode, SyntaxNode, ast, match_ast};
use crate::{Diagnostic, DiagnosticCode, fix};
pub(crate) fn field_shorthand(
+ db: &RootDatabase,
acc: &mut Vec<Diagnostic>,
file_id: EditionedFileId,
node: &SyntaxNode,
) {
match_ast! {
match node {
- ast::RecordExpr(it) => check_expr_field_shorthand(acc, file_id, it),
- ast::RecordPat(it) => check_pat_field_shorthand(acc, file_id, it),
+ ast::RecordExpr(it) => check_expr_field_shorthand(db, acc, file_id, it),
+ ast::RecordPat(it) => check_pat_field_shorthand(db, acc, file_id, it),
_ => ()
}
};
}
fn check_expr_field_shorthand(
+ db: &RootDatabase,
acc: &mut Vec<Diagnostic>,
file_id: EditionedFileId,
record_expr: ast::RecordExpr,
@@ -49,16 +52,17 @@ fn check_expr_field_shorthand(
let edit = edit_builder.finish();
let field_range = record_field.syntax().text_range();
+ let vfs_file_id = file_id.file_id(db);
acc.push(
Diagnostic::new(
DiagnosticCode::Clippy("redundant_field_names"),
"Shorthand struct initialization",
- FileRange { file_id: file_id.into(), range: field_range },
+ FileRange { file_id: vfs_file_id, range: field_range },
)
.with_fixes(Some(vec![fix(
"use_expr_field_shorthand",
"Use struct shorthand initialization",
- SourceChange::from_text_edit(file_id, edit),
+ SourceChange::from_text_edit(vfs_file_id, edit),
field_range,
)])),
);
@@ -66,6 +70,7 @@ fn check_expr_field_shorthand(
}
fn check_pat_field_shorthand(
+ db: &RootDatabase,
acc: &mut Vec<Diagnostic>,
file_id: EditionedFileId,
record_pat: ast::RecordPat,
@@ -93,16 +98,17 @@ fn check_pat_field_shorthand(
let edit = edit_builder.finish();
let field_range = record_pat_field.syntax().text_range();
+ let vfs_file_id = file_id.file_id(db);
acc.push(
Diagnostic::new(
DiagnosticCode::Clippy("redundant_field_names"),
"Shorthand struct pattern",
- FileRange { file_id: file_id.into(), range: field_range },
+ FileRange { file_id: vfs_file_id, range: field_range },
)
.with_fixes(Some(vec![fix(
"use_pat_field_shorthand",
"Use struct field shorthand",
- SourceChange::from_text_edit(file_id, edit),
+ SourceChange::from_text_edit(vfs_file_id, edit),
field_range,
)])),
);
diff --git a/crates/ide-diagnostics/src/handlers/generic_args_prohibited.rs b/crates/ide-diagnostics/src/handlers/generic_args_prohibited.rs
index 3fa8c0133a..b79894dd15 100644
--- a/crates/ide-diagnostics/src/handlers/generic_args_prohibited.rs
+++ b/crates/ide-diagnostics/src/handlers/generic_args_prohibited.rs
@@ -63,7 +63,7 @@ fn fixes(ctx: &DiagnosticsContext<'_>, d: &hir::GenericArgsProhibited) -> Option
Some(vec![fix(
"remove_generic_args",
"Remove these generics",
- SourceChange::from_text_edit(file_id, TextEdit::delete(range)),
+ SourceChange::from_text_edit(file_id.file_id(ctx.sema.db), TextEdit::delete(range)),
syntax.syntax().text_range(),
)])
}
diff --git a/crates/ide-diagnostics/src/handlers/json_is_not_rust.rs b/crates/ide-diagnostics/src/handlers/json_is_not_rust.rs
index 38eeecb29b..ac1b599c49 100644
--- a/crates/ide-diagnostics/src/handlers/json_is_not_rust.rs
+++ b/crates/ide-diagnostics/src/handlers/json_is_not_rust.rs
@@ -128,14 +128,15 @@ pub(crate) fn json_in_items(
state.has_serialize = serialize_resolved.is_some();
state.build_struct("Root", &it);
edit.insert(range.start(), state.result);
+ let vfs_file_id = file_id.file_id(sema.db);
acc.push(
Diagnostic::new(
DiagnosticCode::Ra("json-is-not-rust", Severity::WeakWarning),
"JSON syntax is not valid as a Rust item",
- FileRange { file_id: file_id.into(), range },
+ FileRange { file_id: vfs_file_id, range },
)
.with_fixes(Some(vec![{
- let mut scb = SourceChangeBuilder::new(file_id);
+ let mut scb = SourceChangeBuilder::new(vfs_file_id);
let scope = match import_scope {
ImportScope::File(it) => ImportScope::File(scb.make_mut(it)),
ImportScope::Module(it) => ImportScope::Module(scb.make_mut(it)),
@@ -183,7 +184,7 @@ pub(crate) fn json_in_items(
}
}
let mut sc = scb.finish();
- sc.insert_source_edit(file_id, edit.finish());
+ sc.insert_source_edit(vfs_file_id, edit.finish());
fix("convert_json_to_struct", "Convert JSON to struct", sc, range)
}])),
);
diff --git a/crates/ide-diagnostics/src/handlers/missing_fields.rs b/crates/ide-diagnostics/src/handlers/missing_fields.rs
index 2d19045bc4..220f4e0a03 100644
--- a/crates/ide-diagnostics/src/handlers/missing_fields.rs
+++ b/crates/ide-diagnostics/src/handlers/missing_fields.rs
@@ -1,6 +1,6 @@
use either::Either;
use hir::{
- AssocItem, HirDisplay, HirFileIdExt, ImportPathConfig, InFile, Type,
+ AssocItem, HirDisplay, ImportPathConfig, InFile, Type,
db::{ExpandDatabase, HirDatabase},
sym,
};
@@ -85,7 +85,7 @@ fn fixes(ctx: &DiagnosticsContext<'_>, d: &hir::MissingFields) -> Option<Vec<Ass
Some(vec![fix(
"fill_missing_fields",
"Fill struct fields",
- SourceChange::from_text_edit(range.file_id, edit),
+ SourceChange::from_text_edit(range.file_id.file_id(ctx.sema.db), edit),
range.range,
)])
};
@@ -207,7 +207,10 @@ fn get_default_constructor(
}
}
- let krate = ctx.sema.file_to_module_def(d.file.original_file(ctx.sema.db))?.krate();
+ let krate = ctx
+ .sema
+ .file_to_module_def(d.file.original_file(ctx.sema.db).file_id(ctx.sema.db))?
+ .krate();
let module = krate.root_module();
// Look for a ::new() associated function
diff --git a/crates/ide-diagnostics/src/handlers/missing_unsafe.rs b/crates/ide-diagnostics/src/handlers/missing_unsafe.rs
index c851a9c239..3c36b455ca 100644
--- a/crates/ide-diagnostics/src/handlers/missing_unsafe.rs
+++ b/crates/ide-diagnostics/src/handlers/missing_unsafe.rs
@@ -1,5 +1,5 @@
use hir::db::ExpandDatabase;
-use hir::{HirFileIdExt, UnsafeLint, UnsafetyReason};
+use hir::{UnsafeLint, UnsafetyReason};
use ide_db::text_edit::TextEdit;
use ide_db::{assists::Assist, source_change::SourceChange};
use syntax::{AstNode, match_ast};
@@ -51,8 +51,10 @@ fn fixes(ctx: &DiagnosticsContext<'_>, d: &hir::MissingUnsafe) -> Option<Vec<Ass
let replacement = format!("unsafe {{ {} }}", node_to_add_unsafe_block.text());
let edit = TextEdit::replace(node_to_add_unsafe_block.text_range(), replacement);
- let source_change =
- SourceChange::from_text_edit(d.node.file_id.original_file(ctx.sema.db), edit);
+ let source_change = SourceChange::from_text_edit(
+ d.node.file_id.original_file(ctx.sema.db).file_id(ctx.sema.db),
+ edit,
+ );
Some(vec![fix("add_unsafe", "Add unsafe block", source_change, expr.syntax().text_range())])
}
diff --git a/crates/ide-diagnostics/src/handlers/mutability_errors.rs b/crates/ide-diagnostics/src/handlers/mutability_errors.rs
index 4f528dea55..5d25f2c6a9 100644
--- a/crates/ide-diagnostics/src/handlers/mutability_errors.rs
+++ b/crates/ide-diagnostics/src/handlers/mutability_errors.rs
@@ -39,7 +39,7 @@ pub(crate) fn need_mut(ctx: &DiagnosticsContext<'_>, d: &hir::NeedMut) -> Option
Some(vec![fix(
"add_mut",
"Change it to be mutable",
- SourceChange::from_text_edit(file_id, edit),
+ SourceChange::from_text_edit(file_id.file_id(ctx.sema.db), edit),
use_range,
)])
})();
@@ -82,7 +82,7 @@ pub(crate) fn unused_mut(ctx: &DiagnosticsContext<'_>, d: &hir::UnusedMut) -> Op
Some(vec![fix(
"remove_mut",
"Remove unnecessary `mut`",
- SourceChange::from_text_edit(file_id, edit),
+ SourceChange::from_text_edit(file_id.file_id(ctx.sema.db), edit),
use_range,
)])
})();
diff --git a/crates/ide-diagnostics/src/handlers/no_such_field.rs b/crates/ide-diagnostics/src/handlers/no_such_field.rs
index 7f84389bcd..fa3347aa12 100644
--- a/crates/ide-diagnostics/src/handlers/no_such_field.rs
+++ b/crates/ide-diagnostics/src/handlers/no_such_field.rs
@@ -1,5 +1,5 @@
use either::Either;
-use hir::{HasSource, HirDisplay, HirFileIdExt, Semantics, VariantId, db::ExpandDatabase};
+use hir::{HasSource, HirDisplay, Semantics, VariantId, db::ExpandDatabase};
use ide_db::text_edit::TextEdit;
use ide_db::{EditionedFileId, RootDatabase, source_change::SourceChange};
use syntax::{
@@ -108,7 +108,7 @@ fn missing_record_expr_field_fixes(
}
let source_change = SourceChange::from_text_edit(
- def_file_id,
+ def_file_id.file_id(sema.db),
TextEdit::insert(last_field_syntax.text_range().end(), new_field),
);
diff --git a/crates/ide-diagnostics/src/handlers/remove_trailing_return.rs b/crates/ide-diagnostics/src/handlers/remove_trailing_return.rs
index a95cce5ffd..6b78645002 100644
--- a/crates/ide-diagnostics/src/handlers/remove_trailing_return.rs
+++ b/crates/ide-diagnostics/src/handlers/remove_trailing_return.rs
@@ -49,7 +49,7 @@ fn fixes(ctx: &DiagnosticsContext<'_>, d: &RemoveTrailingReturn) -> Option<Vec<A
let replacement =
return_expr.expr().map_or_else(String::new, |expr| format!("{}", expr.syntax().text()));
let edit = TextEdit::replace(range, replacement);
- let source_change = SourceChange::from_text_edit(file_id, edit);
+ let source_change = SourceChange::from_text_edit(file_id.file_id(ctx.sema.db), edit);
Some(vec![fix(
"remove_trailing_return",
diff --git a/crates/ide-diagnostics/src/handlers/remove_unnecessary_else.rs b/crates/ide-diagnostics/src/handlers/remove_unnecessary_else.rs
index d055d36c2a..8d717b9093 100644
--- a/crates/ide-diagnostics/src/handlers/remove_unnecessary_else.rs
+++ b/crates/ide-diagnostics/src/handlers/remove_unnecessary_else.rs
@@ -1,4 +1,4 @@
-use hir::{HirFileIdExt, db::ExpandDatabase, diagnostics::RemoveUnnecessaryElse};
+use hir::{db::ExpandDatabase, diagnostics::RemoveUnnecessaryElse};
use ide_db::text_edit::TextEdit;
use ide_db::{assists::Assist, source_change::SourceChange};
use itertools::Itertools;
@@ -90,8 +90,10 @@ fn fixes(ctx: &DiagnosticsContext<'_>, d: &RemoveUnnecessaryElse) -> Option<Vec<
};
let edit = TextEdit::replace(range, replacement);
- let source_change =
- SourceChange::from_text_edit(d.if_expr.file_id.original_file(ctx.sema.db), edit);
+ let source_change = SourceChange::from_text_edit(
+ d.if_expr.file_id.original_file(ctx.sema.db).file_id(ctx.sema.db),
+ edit,
+ );
Some(vec![fix(
"remove_unnecessary_else",
diff --git a/crates/ide-diagnostics/src/handlers/replace_filter_map_next_with_find_map.rs b/crates/ide-diagnostics/src/handlers/replace_filter_map_next_with_find_map.rs
index 6de22455f7..6b335c52de 100644
--- a/crates/ide-diagnostics/src/handlers/replace_filter_map_next_with_find_map.rs
+++ b/crates/ide-diagnostics/src/handlers/replace_filter_map_next_with_find_map.rs
@@ -1,4 +1,4 @@
-use hir::{HirFileIdExt, InFile, db::ExpandDatabase};
+use hir::{InFile, db::ExpandDatabase};
use ide_db::source_change::SourceChange;
use ide_db::text_edit::TextEdit;
use syntax::{
@@ -43,7 +43,8 @@ fn fixes(
let edit = TextEdit::replace(range_to_replace, replacement);
- let source_change = SourceChange::from_text_edit(d.file.original_file(ctx.sema.db), edit);
+ let source_change =
+ SourceChange::from_text_edit(d.file.original_file(ctx.sema.db).file_id(ctx.sema.db), edit);
Some(vec![fix(
"replace_with_find_map",
diff --git a/crates/ide-diagnostics/src/handlers/trait_impl_redundant_assoc_item.rs b/crates/ide-diagnostics/src/handlers/trait_impl_redundant_assoc_item.rs
index 9f1901ff95..d5c4bcf768 100644
--- a/crates/ide-diagnostics/src/handlers/trait_impl_redundant_assoc_item.rs
+++ b/crates/ide-diagnostics/src/handlers/trait_impl_redundant_assoc_item.rs
@@ -54,10 +54,12 @@ pub(crate) fn trait_impl_redundant_assoc_item(
}
};
+ let hir::FileRange { file_id, range } =
+ hir::InFile::new(d.file_id, diagnostic_range).original_node_file_range_rooted(db);
Diagnostic::new(
DiagnosticCode::RustcHardError("E0407"),
format!("{redundant_item_name} is not a member of trait `{trait_name}`"),
- hir::InFile::new(d.file_id, diagnostic_range).original_node_file_range_rooted(db),
+ ide_db::FileRange { file_id: file_id.file_id(ctx.sema.db), range },
)
.with_fixes(quickfix_for_redundant_assoc_item(
ctx,
@@ -93,7 +95,7 @@ fn quickfix_for_redundant_assoc_item(
Some(())
};
let file_id = d.file_id.file_id()?;
- let mut source_change_builder = SourceChangeBuilder::new(file_id);
+ let mut source_change_builder = SourceChangeBuilder::new(file_id.file_id(ctx.sema.db));
add_assoc_item_def(&mut source_change_builder)?;
Some(vec![Assist {
diff --git a/crates/ide-diagnostics/src/handlers/type_mismatch.rs b/crates/ide-diagnostics/src/handlers/type_mismatch.rs
index 591213d983..8f6ed1a7bd 100644
--- a/crates/ide-diagnostics/src/handlers/type_mismatch.rs
+++ b/crates/ide-diagnostics/src/handlers/type_mismatch.rs
@@ -1,5 +1,5 @@
use either::Either;
-use hir::{CallableKind, ClosureStyle, HirDisplay, HirFileIdExt, InFile, db::ExpandDatabase};
+use hir::{CallableKind, ClosureStyle, HirDisplay, InFile, db::ExpandDatabase};
use ide_db::{
famous_defs::FamousDefs,
source_change::{SourceChange, SourceChangeBuilder},
@@ -150,7 +150,7 @@ fn add_missing_ok_or_some(
}
let source_change = SourceChange::from_text_edit(
- expr_ptr.file_id.original_file(ctx.sema.db),
+ expr_ptr.file_id.original_file(ctx.sema.db).file_id(ctx.sema.db),
builder.finish(),
);
let name = format!("Insert {variant_name}(()) as the tail of this block");
@@ -164,7 +164,7 @@ fn add_missing_ok_or_some(
builder
.insert(ret_expr.syntax().text_range().end(), format!(" {variant_name}(())"));
let source_change = SourceChange::from_text_edit(
- expr_ptr.file_id.original_file(ctx.sema.db),
+ expr_ptr.file_id.original_file(ctx.sema.db).file_id(ctx.sema.db),
builder.finish(),
);
let name = format!("Insert {variant_name}(()) as the return value");
@@ -177,8 +177,10 @@ fn add_missing_ok_or_some(
let mut builder = TextEdit::builder();
builder.insert(expr.syntax().text_range().start(), format!("{variant_name}("));
builder.insert(expr.syntax().text_range().end(), ")".to_owned());
- let source_change =
- SourceChange::from_text_edit(expr_ptr.file_id.original_file(ctx.sema.db), builder.finish());
+ let source_change = SourceChange::from_text_edit(
+ expr_ptr.file_id.original_file(ctx.sema.db).file_id(ctx.sema.db),
+ builder.finish(),
+ );
let name = format!("Wrap in {variant_name}");
acc.push(fix("wrap_in_constructor", &name, source_change, expr_range));
Some(())
@@ -220,7 +222,7 @@ fn remove_unnecessary_wrapper(
let inner_arg = call_expr.arg_list()?.args().next()?;
let file_id = expr_ptr.file_id.original_file(db);
- let mut builder = SourceChangeBuilder::new(file_id);
+ let mut builder = SourceChangeBuilder::new(file_id.file_id(ctx.sema.db));
let mut editor;
match inner_arg {
// We're returning `()`
@@ -256,7 +258,7 @@ fn remove_unnecessary_wrapper(
}
}
- builder.add_file_edits(file_id, editor);
+ builder.add_file_edits(file_id.file_id(ctx.sema.db), editor);
let name = format!("Remove unnecessary {}() wrapper", variant.name(db).as_str());
acc.push(fix(
"remove_unnecessary_wrapper",
@@ -288,8 +290,10 @@ fn remove_semicolon(
let semicolon_range = expr_before_semi.semicolon_token()?.text_range();
let edit = TextEdit::delete(semicolon_range);
- let source_change =
- SourceChange::from_text_edit(expr_ptr.file_id.original_file(ctx.sema.db), edit);
+ let source_change = SourceChange::from_text_edit(
+ expr_ptr.file_id.original_file(ctx.sema.db).file_id(ctx.sema.db),
+ edit,
+ );
acc.push(fix("remove_semicolon", "Remove this semicolon", source_change, semicolon_range));
Some(())
@@ -316,8 +320,10 @@ fn str_ref_to_owned(
let to_owned = ".to_owned()".to_owned();
let edit = TextEdit::insert(expr.syntax().text_range().end(), to_owned);
- let source_change =
- SourceChange::from_text_edit(expr_ptr.file_id.original_file(ctx.sema.db), edit);
+ let source_change = SourceChange::from_text_edit(
+ expr_ptr.file_id.original_file(ctx.sema.db).file_id(ctx.sema.db),
+ edit,
+ );
acc.push(fix("str_ref_to_owned", "Add .to_owned() here", source_change, expr_range));
Some(())
diff --git a/crates/ide-diagnostics/src/handlers/typed_hole.rs b/crates/ide-diagnostics/src/handlers/typed_hole.rs
index 0bef91f337..277aff2e08 100644
--- a/crates/ide-diagnostics/src/handlers/typed_hole.rs
+++ b/crates/ide-diagnostics/src/handlers/typed_hole.rs
@@ -83,7 +83,7 @@ fn fixes(ctx: &DiagnosticsContext<'_>, d: &hir::TypedHole) -> Option<Vec<Assist>
group: Some(GroupLabel("Replace `_` with a term".to_owned())),
target: original_range.range,
source_change: Some(SourceChange::from_text_edit(
- original_range.file_id,
+ original_range.file_id.file_id(ctx.sema.db),
TextEdit::replace(original_range.range, code),
)),
command: None,
diff --git a/crates/ide-diagnostics/src/handlers/unlinked_file.rs b/crates/ide-diagnostics/src/handlers/unlinked_file.rs
index 03bc49abdb..47fa305936 100644
--- a/crates/ide-diagnostics/src/handlers/unlinked_file.rs
+++ b/crates/ide-diagnostics/src/handlers/unlinked_file.rs
@@ -105,7 +105,8 @@ fn fixes(
let root_module = &crate_def_map[DefMap::ROOT];
let Some(root_file_id) = root_module.origin.file_id() else { continue };
- let Some(crate_root_path) = source_root.path_for_file(&root_file_id.file_id()) else {
+ let Some(crate_root_path) = source_root.path_for_file(&root_file_id.file_id(ctx.sema.db))
+ else {
continue;
};
let Some(rel) = parent.strip_prefix(&crate_root_path.parent()?) else { continue };
@@ -131,7 +132,12 @@ fn fixes(
let InFile { file_id: parent_file_id, value: source } =
current.definition_source(ctx.sema.db);
let parent_file_id = parent_file_id.file_id()?;
- return make_fixes(parent_file_id.file_id(), source, &module_name, trigger_range);
+ return make_fixes(
+ parent_file_id.file_id(ctx.sema.db),
+ source,
+ &module_name,
+ trigger_range,
+ );
}
// if we aren't adding to a crate root, walk backwards such that we support `#[path = ...]` overrides if possible
@@ -152,7 +158,8 @@ fn fixes(
'crates: for &krate in relevant_crates.iter() {
let crate_def_map = ctx.sema.db.crate_def_map(krate);
let Some((_, module)) = crate_def_map.modules().find(|(_, module)| {
- module.origin.file_id().map(Into::into) == Some(parent_id) && !module.origin.is_inline()
+ module.origin.file_id().map(|file_id| file_id.file_id(ctx.sema.db)) == Some(parent_id)
+ && !module.origin.is_inline()
}) else {
continue;
};
@@ -182,7 +189,12 @@ fn fixes(
let InFile { file_id: parent_file_id, value: source } =
current.definition_source(ctx.sema.db);
let parent_file_id = parent_file_id.file_id()?;
- return make_fixes(parent_file_id.file_id(), source, &module_name, trigger_range);
+ return make_fixes(
+ parent_file_id.file_id(ctx.sema.db),
+ source,
+ &module_name,
+ trigger_range,
+ );
}
}
diff --git a/crates/ide-diagnostics/src/handlers/unresolved_field.rs b/crates/ide-diagnostics/src/handlers/unresolved_field.rs
index 986ebb8818..a4f4813cf5 100644
--- a/crates/ide-diagnostics/src/handlers/unresolved_field.rs
+++ b/crates/ide-diagnostics/src/handlers/unresolved_field.rs
@@ -119,7 +119,7 @@ fn add_variant_to_union(
let (offset, record_field) =
record_field_layout(None, field_name, suggested_type, field_list, adt_syntax.value)?;
- let mut src_change_builder = SourceChangeBuilder::new(range.file_id);
+ let mut src_change_builder = SourceChangeBuilder::new(range.file_id.file_id(ctx.sema.db));
src_change_builder.insert(offset, record_field);
Some(Assist {
id: AssistId::quick_fix("add-variant-to-union"),
@@ -165,7 +165,8 @@ fn add_field_to_struct_fix(
struct_syntax.value,
)?;
- let mut src_change_builder = SourceChangeBuilder::new(struct_range.file_id);
+ let mut src_change_builder =
+ SourceChangeBuilder::new(struct_range.file_id.file_id(ctx.sema.db));
// FIXME: Allow for choosing a visibility modifier see https://github.com/rust-lang/rust-analyzer/issues/11563
src_change_builder.insert(offset, record_field);
@@ -180,7 +181,8 @@ fn add_field_to_struct_fix(
}
None => {
// Add a field list to the Unit Struct
- let mut src_change_builder = SourceChangeBuilder::new(struct_range.file_id);
+ let mut src_change_builder =
+ SourceChangeBuilder::new(struct_range.file_id.file_id(ctx.sema.db));
let field_name = match field_name.chars().next() {
// FIXME : See match arm below regarding tuple structs.
Some(ch) if ch.is_numeric() => return None,
@@ -270,7 +272,7 @@ fn method_fix(
group: None,
target: range,
source_change: Some(SourceChange::from_text_edit(
- file_id,
+ file_id.file_id(ctx.sema.db),
TextEdit::insert(range.end(), "()".to_owned()),
)),
command: None,
diff --git a/crates/ide-diagnostics/src/handlers/unresolved_method.rs b/crates/ide-diagnostics/src/handlers/unresolved_method.rs
index b5d7aa113a..4422d8f826 100644
--- a/crates/ide-diagnostics/src/handlers/unresolved_method.rs
+++ b/crates/ide-diagnostics/src/handlers/unresolved_method.rs
@@ -101,8 +101,8 @@ fn field_fix(
group: None,
target: range,
source_change: Some(SourceChange::from_iter([
- (file_id.into(), TextEdit::insert(range.start(), "(".to_owned())),
- (file_id.into(), TextEdit::insert(range.end(), ")".to_owned())),
+ (file_id.file_id(ctx.sema.db), TextEdit::insert(range.start(), "(".to_owned())),
+ (file_id.file_id(ctx.sema.db), TextEdit::insert(range.end(), ")".to_owned())),
])),
command: None,
})
@@ -182,7 +182,7 @@ fn assoc_func_fix(ctx: &DiagnosticsContext<'_>, d: &hir::UnresolvedMethodCall) -
group: None,
target: range,
source_change: Some(SourceChange::from_text_edit(
- file_id,
+ file_id.file_id(ctx.sema.db),
TextEdit::replace(range, assoc_func_call_expr_string),
)),
command: None,
diff --git a/crates/ide-diagnostics/src/handlers/unresolved_module.rs b/crates/ide-diagnostics/src/handlers/unresolved_module.rs
index 7b9dd55e01..599cabe3e4 100644
--- a/crates/ide-diagnostics/src/handlers/unresolved_module.rs
+++ b/crates/ide-diagnostics/src/handlers/unresolved_module.rs
@@ -1,4 +1,4 @@
-use hir::{HirFileIdExt, db::ExpandDatabase};
+use hir::db::ExpandDatabase;
use ide_db::{assists::Assist, base_db::AnchoredPathBuf, source_change::FileSystemEdit};
use itertools::Itertools;
use syntax::AstNode;
@@ -43,7 +43,7 @@ fn fixes(ctx: &DiagnosticsContext<'_>, d: &hir::UnresolvedModule) -> Option<Vec<
&format!("Create module at `{candidate}`"),
FileSystemEdit::CreateFile {
dst: AnchoredPathBuf {
- anchor: d.decl.file_id.original_file(ctx.sema.db).file_id(),
+ anchor: d.decl.file_id.original_file(ctx.sema.db).file_id(ctx.sema.db),
path: candidate.clone(),
},
initial_contents: "".to_owned(),
diff --git a/crates/ide-diagnostics/src/handlers/unused_variables.rs b/crates/ide-diagnostics/src/handlers/unused_variables.rs
index 5857b1dab2..77b1075ea5 100644
--- a/crates/ide-diagnostics/src/handlers/unused_variables.rs
+++ b/crates/ide-diagnostics/src/handlers/unused_variables.rs
@@ -46,7 +46,7 @@ pub(crate) fn unused_variables(
ctx.sema.db,
var_name,
it.range,
- diagnostic_range.into(),
+ diagnostic_range,
ast.file_id.is_macro(),
ctx.edition,
)
diff --git a/crates/ide-diagnostics/src/handlers/useless_braces.rs b/crates/ide-diagnostics/src/handlers/useless_braces.rs
index ac9b34f416..e0ea4b3373 100644
--- a/crates/ide-diagnostics/src/handlers/useless_braces.rs
+++ b/crates/ide-diagnostics/src/handlers/useless_braces.rs
@@ -1,4 +1,5 @@
use hir::InFile;
+use ide_db::RootDatabase;
use ide_db::text_edit::TextEdit;
use ide_db::{EditionedFileId, FileRange, source_change::SourceChange};
use itertools::Itertools;
@@ -10,6 +11,7 @@ use crate::{Diagnostic, DiagnosticCode, fix};
//
// Diagnostic for unnecessary braces in `use` items.
pub(crate) fn useless_braces(
+ db: &RootDatabase,
acc: &mut Vec<Diagnostic>,
file_id: EditionedFileId,
node: &SyntaxNode,
@@ -38,13 +40,13 @@ pub(crate) fn useless_braces(
Diagnostic::new(
DiagnosticCode::RustcLint("unused_braces"),
"Unnecessary braces in use statement".to_owned(),
- FileRange { file_id: file_id.into(), range: use_range },
+ FileRange { file_id: file_id.file_id(db), range: use_range },
)
.with_main_node(InFile::new(file_id.into(), SyntaxNodePtr::new(node)))
.with_fixes(Some(vec![fix(
"remove_braces",
"Remove unnecessary braces",
- SourceChange::from_text_edit(file_id, edit),
+ SourceChange::from_text_edit(file_id.file_id(db), edit),
use_range,
)])),
);
diff --git a/crates/ide-diagnostics/src/lib.rs b/crates/ide-diagnostics/src/lib.rs
index e667d484be..d367dd929b 100644
--- a/crates/ide-diagnostics/src/lib.rs
+++ b/crates/ide-diagnostics/src/lib.rs
@@ -86,7 +86,6 @@ use hir::{
Crate, DisplayTarget, HirFileId, InFile, Semantics, db::ExpandDatabase,
diagnostics::AnyDiagnostic,
};
-use ide_db::base_db::salsa::AsDynDatabase;
use ide_db::{
EditionedFileId, FileId, FileRange, FxHashMap, FxHashSet, RootDatabase, Severity, SnippetCap,
assists::{Assist, AssistId, AssistResolveStrategy},
@@ -303,8 +302,11 @@ impl DiagnosticsContext<'_> {
}
}
})()
+ .map(|frange| ide_db::FileRange {
+ file_id: frange.file_id.file_id(self.sema.db),
+ range: frange.range,
+ })
.unwrap_or_else(|| sema.diagnostics_display_range(*node))
- .into()
}
}
@@ -323,15 +325,12 @@ pub fn syntax_diagnostics(
let sema = Semantics::new(db);
let editioned_file_id = sema
.attach_first_edition(file_id)
- .unwrap_or_else(|| EditionedFileId::current_edition(file_id));
-
- let (file_id, _) = editioned_file_id.unpack();
+ .unwrap_or_else(|| EditionedFileId::current_edition(db, file_id));
- let editioned_file_id_wrapper =
- ide_db::base_db::EditionedFileId::new(db.as_dyn_database(), editioned_file_id);
+ let (file_id, _) = editioned_file_id.unpack(db);
// [#3434] Only take first 128 errors to prevent slowing down editor/ide, the number 128 is chosen arbitrarily.
- db.parse_errors(editioned_file_id_wrapper)
+ db.parse_errors(editioned_file_id)
.into_iter()
.flatten()
.take(128)
@@ -357,22 +356,19 @@ pub fn semantic_diagnostics(
let sema = Semantics::new(db);
let editioned_file_id = sema
.attach_first_edition(file_id)
- .unwrap_or_else(|| EditionedFileId::current_edition(file_id));
-
- let (file_id, edition) = editioned_file_id.unpack();
- let editioned_file_id_wrapper =
- ide_db::base_db::EditionedFileId::new(db.as_dyn_database(), editioned_file_id);
+ .unwrap_or_else(|| EditionedFileId::current_edition(db, file_id));
+ let (file_id, edition) = editioned_file_id.unpack(db);
let mut res = Vec::new();
- let parse = sema.parse(editioned_file_id_wrapper);
+ let parse = sema.parse(editioned_file_id);
// FIXME: This iterates the entire file which is a rather expensive operation.
// We should implement these differently in some form?
// Salsa caching + incremental re-parse would be better here
for node in parse.syntax().descendants() {
- handlers::useless_braces::useless_braces(&mut res, editioned_file_id, &node);
- handlers::field_shorthand::field_shorthand(&mut res, editioned_file_id, &node);
+ handlers::useless_braces::useless_braces(db, &mut res, editioned_file_id, &node);
+ handlers::field_shorthand::field_shorthand(db, &mut res, editioned_file_id, &node);
handlers::json_is_not_rust::json_in_items(
&sema,
&mut res,
@@ -408,11 +404,13 @@ pub fn semantic_diagnostics(
// A bunch of parse errors in a file indicate some bigger structural parse changes in the
// file, so we skip semantic diagnostics so we can show these faster.
Some(m) => {
- if db.parse_errors(editioned_file_id_wrapper).is_none_or(|es| es.len() < 16) {
+ if db.parse_errors(editioned_file_id).is_none_or(|es| es.len() < 16) {
m.diagnostics(db, &mut diags, config.style_lints);
}
}
- None => handlers::unlinked_file::unlinked_file(&ctx, &mut res, editioned_file_id.file_id()),
+ None => {
+ handlers::unlinked_file::unlinked_file(&ctx, &mut res, editioned_file_id.file_id(db))
+ }
}
for diag in diags {
@@ -530,7 +528,7 @@ pub fn semantic_diagnostics(
&mut FxHashMap::default(),
&mut lints,
&mut Vec::new(),
- editioned_file_id.edition(),
+ editioned_file_id.edition(db),
);
res.retain(|d| d.severity != Severity::Allow);
@@ -573,8 +571,7 @@ fn handle_diag_from_macros(
let mut spans = span_map.spans_for_range(node.text_range());
if spans.any(|span| {
span.ctx.outer_expn(sema.db).is_some_and(|expansion| {
- let macro_call =
- sema.db.lookup_intern_macro_call(expansion.as_macro_file().macro_call_id);
+ let macro_call = sema.db.lookup_intern_macro_call(expansion.into());
// We don't want to show diagnostics for non-local macros at all, but proc macros authors
// seem to rely on being able to emit non-warning-free code, so we don't want to show warnings
// for them even when the proc macro comes from the same workspace (in rustc that's not a
@@ -1006,8 +1003,8 @@ fn adjusted_display_range<N: AstNode>(
) -> FileRange {
let source_file = ctx.sema.parse_or_expand(diag_ptr.file_id);
let node = diag_ptr.value.to_node(&source_file);
- diag_ptr
+ let hir::FileRange { file_id, range } = diag_ptr
.with_value(adj(node).unwrap_or_else(|| diag_ptr.value.text_range()))
- .original_node_file_range_rooted(ctx.sema.db)
- .into()
+ .original_node_file_range_rooted(ctx.sema.db);
+ ide_db::FileRange { file_id: file_id.file_id(ctx.sema.db), range }
}
diff --git a/crates/ide-diagnostics/src/tests.rs b/crates/ide-diagnostics/src/tests.rs
index 64710ea58b..13d08d46de 100644
--- a/crates/ide-diagnostics/src/tests.rs
+++ b/crates/ide-diagnostics/src/tests.rs
@@ -75,7 +75,7 @@ fn check_nth_fix_with_config(
&db,
&config,
&AssistResolveStrategy::All,
- file_position.file_id.into(),
+ file_position.file_id.file_id(&db),
)
.pop()
.expect("no diagnostics");
@@ -128,7 +128,7 @@ pub(crate) fn check_has_fix(
&db,
&conf,
&AssistResolveStrategy::All,
- file_position.file_id.into(),
+ file_position.file_id.file_id(&db),
)
.into_iter()
.find(|d| {
@@ -175,7 +175,7 @@ pub(crate) fn check_has_single_fix(
&db,
&conf,
&AssistResolveStrategy::All,
- file_position.file_id.into(),
+ file_position.file_id.file_id(&db),
)
.into_iter()
.find(|d| {
@@ -216,7 +216,7 @@ pub(crate) fn check_no_fix(#[rust_analyzer::rust_fixture] ra_fixture: &str) {
&db,
&DiagnosticsConfig::test_sample(),
&AssistResolveStrategy::All,
- file_position.file_id.into(),
+ file_position.file_id.file_id(&db),
)
.pop()
.unwrap();
@@ -250,7 +250,7 @@ pub(crate) fn check_diagnostics_with_config(
.iter()
.copied()
.flat_map(|file_id| {
- super::full_diagnostics(&db, &config, &AssistResolveStrategy::All, file_id.into())
+ super::full_diagnostics(&db, &config, &AssistResolveStrategy::All, file_id.file_id(&db))
.into_iter()
.map(|d| {
let mut annotation = String::new();
@@ -272,7 +272,7 @@ pub(crate) fn check_diagnostics_with_config(
.map(|(diagnostic, annotation)| (diagnostic.file_id, (diagnostic.range, annotation)))
.into_group_map();
for file_id in files {
- let file_id = file_id.into();
+ let file_id = file_id.file_id(&db);
let line_index = db.line_index(file_id);
let mut actual = annotations.remove(&file_id).unwrap_or_default();
@@ -317,7 +317,7 @@ fn test_disabled_diagnostics() {
config.disabled.insert("E0583".into());
let (db, file_id) = RootDatabase::with_single_file(r#"mod foo;"#);
- let file_id = file_id.into();
+ let file_id = file_id.file_id(&db);
let diagnostics = super::full_diagnostics(&db, &config, &AssistResolveStrategy::All, file_id);
assert!(diagnostics.is_empty());
diff --git a/crates/ide-ssr/src/from_comment.rs b/crates/ide-ssr/src/from_comment.rs
index 5921a5df53..181cc74a51 100644
--- a/crates/ide-ssr/src/from_comment.rs
+++ b/crates/ide-ssr/src/from_comment.rs
@@ -1,10 +1,7 @@
//! This module allows building an SSR MatchFinder by parsing the SSR rule
//! from a comment.
-use ide_db::{
- EditionedFileId, FilePosition, FileRange, RootDatabase,
- base_db::{RootQueryDb, salsa::AsDynDatabase},
-};
+use ide_db::{EditionedFileId, FilePosition, FileRange, RootDatabase, base_db::RootQueryDb};
use syntax::{
TextRange,
ast::{self, AstNode, AstToken},
@@ -20,9 +17,7 @@ pub fn ssr_from_comment(
frange: FileRange,
) -> Option<(MatchFinder<'_>, TextRange)> {
let comment = {
- let editioned_file_id = EditionedFileId::current_edition(frange.file_id);
- let file_id =
- ide_db::base_db::EditionedFileId::new(db.as_dyn_database(), editioned_file_id);
+ let file_id = EditionedFileId::current_edition(db, frange.file_id);
let file = db.parse(file_id);
file.tree().syntax().token_at_offset(frange.range.start()).find_map(ast::Comment::cast)
diff --git a/crates/ide-ssr/src/lib.rs b/crates/ide-ssr/src/lib.rs
index 757c77b673..339c199ec2 100644
--- a/crates/ide-ssr/src/lib.rs
+++ b/crates/ide-ssr/src/lib.rs
@@ -82,10 +82,7 @@ use crate::{errors::bail, matching::MatchFailureReason};
use hir::{FileRange, Semantics};
use ide_db::symbol_index::SymbolsDatabase;
use ide_db::text_edit::TextEdit;
-use ide_db::{
- EditionedFileId, FileId, FxHashMap, RootDatabase,
- base_db::{SourceDatabase, salsa::AsDynDatabase},
-};
+use ide_db::{EditionedFileId, FileId, FxHashMap, RootDatabase, base_db::SourceDatabase};
use resolving::ResolvedRule;
use syntax::{AstNode, SyntaxNode, TextRange, ast};
@@ -130,7 +127,7 @@ impl<'db> MatchFinder<'db> {
let sema = Semantics::new(db);
let file_id = sema
.attach_first_edition(lookup_context.file_id)
- .unwrap_or_else(|| EditionedFileId::current_edition(lookup_context.file_id));
+ .unwrap_or_else(|| EditionedFileId::current_edition(db, lookup_context.file_id));
let resolution_scope = resolving::ResolutionScope::new(
&sema,
hir::FilePosition { file_id, offset: lookup_context.offset },
@@ -176,7 +173,7 @@ impl<'db> MatchFinder<'db> {
let mut matches_by_file = FxHashMap::default();
for m in self.matches().matches {
matches_by_file
- .entry(m.range.file_id.file_id())
+ .entry(m.range.file_id.file_id(self.sema.db))
.or_insert_with(SsrMatches::default)
.matches
.push(m);
@@ -228,12 +225,9 @@ impl<'db> MatchFinder<'db> {
file_id: EditionedFileId,
snippet: &str,
) -> Vec<MatchDebugInfo> {
- let editioned_file_id_wrapper =
- ide_db::base_db::EditionedFileId::new(self.sema.db.as_dyn_database(), file_id);
-
- let file = self.sema.parse(editioned_file_id_wrapper);
+ let file = self.sema.parse(file_id);
let mut res = Vec::new();
- let file_text = self.sema.db.file_text(file_id.into()).text(self.sema.db);
+ let file_text = self.sema.db.file_text(file_id.file_id(self.sema.db)).text(self.sema.db);
let mut remaining_text = &*file_text;
let mut base = 0;
let len = snippet.len() as u32;
diff --git a/crates/ide-ssr/src/matching.rs b/crates/ide-ssr/src/matching.rs
index 84a5943b1c..cff4eede04 100644
--- a/crates/ide-ssr/src/matching.rs
+++ b/crates/ide-ssr/src/matching.rs
@@ -806,10 +806,20 @@ mod tests {
let input = "fn foo() {} fn bar() {} fn main() { foo(1+2); }";
let (db, position, selections) = crate::tests::single_file(input);
+ let position = ide_db::FilePosition {
+ file_id: position.file_id.file_id(&db),
+ offset: position.offset,
+ };
let mut match_finder = MatchFinder::in_context(
&db,
- position.into(),
- selections.into_iter().map(Into::into).collect(),
+ position,
+ selections
+ .into_iter()
+ .map(|frange| ide_db::FileRange {
+ file_id: frange.file_id.file_id(&db),
+ range: frange.range,
+ })
+ .collect(),
)
.unwrap();
match_finder.add_rule(rule).unwrap();
@@ -820,7 +830,7 @@ mod tests {
let edits = match_finder.edits();
assert_eq!(edits.len(), 1);
- let edit = &edits[&position.file_id.into()];
+ let edit = &edits[&position.file_id];
let mut after = input.to_owned();
edit.apply(&mut after);
assert_eq!(after, "fn foo() {} fn bar() {} fn main() { bar(1+2); }");
diff --git a/crates/ide-ssr/src/replacing.rs b/crates/ide-ssr/src/replacing.rs
index 2ad562f242..3c92697926 100644
--- a/crates/ide-ssr/src/replacing.rs
+++ b/crates/ide-ssr/src/replacing.rs
@@ -34,7 +34,7 @@ fn matches_to_edit_at_offset(
for m in &matches.matches {
edit_builder.replace(
m.range.range.checked_sub(relative_start).unwrap(),
- render_replace(db, m, file_src, rules, m.range.file_id.edition()),
+ render_replace(db, m, file_src, rules, m.range.file_id.edition(db)),
);
}
edit_builder.finish()
diff --git a/crates/ide-ssr/src/resolving.rs b/crates/ide-ssr/src/resolving.rs
index ba6d981b8c..a687db4bf5 100644
--- a/crates/ide-ssr/src/resolving.rs
+++ b/crates/ide-ssr/src/resolving.rs
@@ -1,7 +1,7 @@
//! This module is responsible for resolving paths within rules.
use hir::AsAssocItem;
-use ide_db::{FxHashMap, base_db::salsa::AsDynDatabase};
+use ide_db::FxHashMap;
use parsing::Placeholder;
use syntax::{
SmolStr, SyntaxKind, SyntaxNode, SyntaxToken,
@@ -198,12 +198,7 @@ impl<'db> ResolutionScope<'db> {
resolve_context: hir::FilePosition,
) -> Option<ResolutionScope<'db>> {
use syntax::ast::AstNode;
- let editioned_file_id_wrapper = ide_db::base_db::EditionedFileId::new(
- sema.db.as_dyn_database(),
- resolve_context.file_id,
- );
-
- let file = sema.parse(editioned_file_id_wrapper);
+ let file = sema.parse(resolve_context.file_id);
// Find a node at the requested position, falling back to the whole file.
let node = file
.syntax()
diff --git a/crates/ide-ssr/src/search.rs b/crates/ide-ssr/src/search.rs
index 73dbefb51b..d89911fca4 100644
--- a/crates/ide-ssr/src/search.rs
+++ b/crates/ide-ssr/src/search.rs
@@ -7,7 +7,6 @@ use crate::{
use hir::FileRange;
use ide_db::{
EditionedFileId, FileId, FxHashSet,
- base_db::salsa::AsDynDatabase,
defs::Definition,
search::{SearchScope, UsageSearchResult},
};
@@ -74,12 +73,7 @@ impl MatchFinder<'_> {
resolved_path: &ResolvedPath,
file_range: FileRange,
) -> Vec<SyntaxNode> {
- let editioned_file_id_wrapper = ide_db::base_db::EditionedFileId::new(
- self.sema.db.as_dyn_database(),
- file_range.file_id,
- );
-
- let file = self.sema.parse(editioned_file_id_wrapper);
+ let file = self.sema.parse(file_range.file_id);
let depth = resolved_path.depth as usize;
let offset = file_range.range.start();
@@ -144,7 +138,7 @@ impl MatchFinder<'_> {
files.push(
self.sema
.attach_first_edition(file_id)
- .unwrap_or_else(|| EditionedFileId::current_edition(file_id)),
+ .unwrap_or_else(|| EditionedFileId::current_edition(self.sema.db, file_id)),
);
});
SearchScope::files(&files)
@@ -235,7 +229,9 @@ impl MatchFinder<'_> {
}
let Some(node_range) = self.sema.original_range_opt(code) else { return false };
for range in &self.restrict_ranges {
- if range.file_id == node_range.file_id && range.range.contains_range(node_range.range) {
+ if range.file_id == node_range.file_id.file_id(self.sema.db)
+ && range.range.contains_range(node_range.range)
+ {
return true;
}
}
diff --git a/crates/ide-ssr/src/tests.rs b/crates/ide-ssr/src/tests.rs
index b26ea35f02..46b633b8a3 100644
--- a/crates/ide-ssr/src/tests.rs
+++ b/crates/ide-ssr/src/tests.rs
@@ -98,10 +98,18 @@ fn assert_ssr_transform(rule: &str, input: &str, expected: Expect) {
fn assert_ssr_transforms(rules: &[&str], input: &str, expected: Expect) {
let (db, position, selections) = single_file(input);
+ let position =
+ ide_db::FilePosition { file_id: position.file_id.file_id(&db), offset: position.offset };
let mut match_finder = MatchFinder::in_context(
&db,
- position.into(),
- selections.into_iter().map(Into::into).collect(),
+ position,
+ selections
+ .into_iter()
+ .map(|selection| ide_db::FileRange {
+ file_id: selection.file_id.file_id(&db),
+ range: selection.range,
+ })
+ .collect(),
)
.unwrap();
for rule in rules {
@@ -114,8 +122,8 @@ fn assert_ssr_transforms(rules: &[&str], input: &str, expected: Expect) {
}
// Note, db.file_text is not necessarily the same as `input`, since fixture parsing alters
// stuff.
- let mut actual = db.file_text(position.file_id.into()).text(&db).to_string();
- edits[&position.file_id.into()].apply(&mut actual);
+ let mut actual = db.file_text(position.file_id).text(&db).to_string();
+ edits[&position.file_id].apply(&mut actual);
expected.assert_eq(&actual);
}
@@ -136,8 +144,14 @@ fn assert_matches(pattern: &str, code: &str, expected: &[&str]) {
let (db, position, selections) = single_file(code);
let mut match_finder = MatchFinder::in_context(
&db,
- position.into(),
- selections.into_iter().map(Into::into).collect(),
+ ide_db::FilePosition { file_id: position.file_id.file_id(&db), offset: position.offset },
+ selections
+ .into_iter()
+ .map(|selection| ide_db::FileRange {
+ file_id: selection.file_id.file_id(&db),
+ range: selection.range,
+ })
+ .collect(),
)
.unwrap();
match_finder.add_search_pattern(pattern.parse().unwrap()).unwrap();
@@ -153,8 +167,14 @@ fn assert_no_match(pattern: &str, code: &str) {
let (db, position, selections) = single_file(code);
let mut match_finder = MatchFinder::in_context(
&db,
- position.into(),
- selections.into_iter().map(Into::into).collect(),
+ ide_db::FilePosition { file_id: position.file_id.file_id(&db), offset: position.offset },
+ selections
+ .into_iter()
+ .map(|selection| ide_db::FileRange {
+ file_id: selection.file_id.file_id(&db),
+ range: selection.range,
+ })
+ .collect(),
)
.unwrap();
match_finder.add_search_pattern(pattern.parse().unwrap()).unwrap();
@@ -169,8 +189,14 @@ fn assert_match_failure_reason(pattern: &str, code: &str, snippet: &str, expecte
let (db, position, selections) = single_file(code);
let mut match_finder = MatchFinder::in_context(
&db,
- position.into(),
- selections.into_iter().map(Into::into).collect(),
+ ide_db::FilePosition { file_id: position.file_id.file_id(&db), offset: position.offset },
+ selections
+ .into_iter()
+ .map(|selection| ide_db::FileRange {
+ file_id: selection.file_id.file_id(&db),
+ range: selection.range,
+ })
+ .collect(),
)
.unwrap();
match_finder.add_search_pattern(pattern.parse().unwrap()).unwrap();
diff --git a/crates/ide/src/annotations.rs b/crates/ide/src/annotations.rs
index d44d4bfd2b..3d71da985b 100644
--- a/crates/ide/src/annotations.rs
+++ b/crates/ide/src/annotations.rs
@@ -149,7 +149,7 @@ pub(crate) fn annotations(
source_file_id: FileId,
) -> Option<(TextRange, Option<TextRange>)> {
if let Some(InRealFile { file_id, value }) = node.original_ast_node_rooted(db) {
- if file_id == source_file_id {
+ if file_id.file_id(db) == source_file_id {
return Some((
value.syntax().text_range(),
value.name().map(|name| name.syntax().text_range()),
diff --git a/crates/ide/src/call_hierarchy.rs b/crates/ide/src/call_hierarchy.rs
index 5c0e113d5c..4b8d07a253 100644
--- a/crates/ide/src/call_hierarchy.rs
+++ b/crates/ide/src/call_hierarchy.rs
@@ -76,9 +76,9 @@ pub(crate) fn incoming_calls(
}
let range = sema.original_range(name.syntax());
- calls.add(nav.call_site, range.into());
+ calls.add(nav.call_site, range.into_file_id(db));
if let Some(other) = nav.def_site {
- calls.add(other, range.into());
+ calls.add(other, range.into_file_id(db));
}
}
}
@@ -143,7 +143,7 @@ pub(crate) fn outgoing_calls(
Some(nav_target.into_iter().zip(iter::repeat(range)))
})
.flatten()
- .for_each(|(nav, range)| calls.add(nav, range.into()));
+ .for_each(|(nav, range)| calls.add(nav, range.into_file_id(db)));
Some(calls.into_items())
}
diff --git a/crates/ide/src/expand_macro.rs b/crates/ide/src/expand_macro.rs
index 3d540813a7..241a702038 100644
--- a/crates/ide/src/expand_macro.rs
+++ b/crates/ide/src/expand_macro.rs
@@ -1,5 +1,5 @@
use hir::db::ExpandDatabase;
-use hir::{ExpandResult, InFile, MacroFileIdExt, Semantics};
+use hir::{ExpandResult, InFile, Semantics};
use ide_db::{
FileId, RootDatabase, base_db::Crate, helpers::pick_best_token,
syntax_helpers::prettify_macro_expansion,
@@ -99,7 +99,7 @@ pub(crate) fn expand_macro(db: &RootDatabase, position: FilePosition) -> Option<
.display(
db,
sema.attach_first_edition(position.file_id)
- .map(|it| it.edition())
+ .map(|it| it.edition(db))
.unwrap_or(Edition::CURRENT),
)
.to_string(),
diff --git a/crates/ide/src/fixture.rs b/crates/ide/src/fixture.rs
index 73b7e771ca..fbf89042fa 100644
--- a/crates/ide/src/fixture.rs
+++ b/crates/ide/src/fixture.rs
@@ -7,10 +7,10 @@ use crate::{Analysis, AnalysisHost, FileId, FilePosition, FileRange};
/// Creates analysis for a single file.
pub(crate) fn file(#[rust_analyzer::rust_fixture] ra_fixture: &str) -> (Analysis, FileId) {
let mut host = AnalysisHost::default();
- let change_fixture = ChangeFixture::parse(ra_fixture);
+ let change_fixture = ChangeFixture::parse(&host.db, ra_fixture);
host.db.enable_proc_attr_macros();
host.db.apply_change(change_fixture.change);
- (host.analysis(), change_fixture.files[0].into())
+ (host.analysis(), change_fixture.files[0].file_id(&host.db))
}
/// Creates analysis from a multi-file fixture, returns positions marked with $0.
@@ -18,23 +18,23 @@ pub(crate) fn position(
#[rust_analyzer::rust_fixture] ra_fixture: &str,
) -> (Analysis, FilePosition) {
let mut host = AnalysisHost::default();
- let change_fixture = ChangeFixture::parse(ra_fixture);
+ let change_fixture = ChangeFixture::parse(&host.db, ra_fixture);
host.db.enable_proc_attr_macros();
host.db.apply_change(change_fixture.change);
let (file_id, range_or_offset) = change_fixture.file_position.expect("expected a marker ($0)");
let offset = range_or_offset.expect_offset();
- (host.analysis(), FilePosition { file_id: file_id.into(), offset })
+ (host.analysis(), FilePosition { file_id: file_id.file_id(&host.db), offset })
}
/// Creates analysis for a single file, returns range marked with a pair of $0.
pub(crate) fn range(#[rust_analyzer::rust_fixture] ra_fixture: &str) -> (Analysis, FileRange) {
let mut host = AnalysisHost::default();
- let change_fixture = ChangeFixture::parse(ra_fixture);
+ let change_fixture = ChangeFixture::parse(&host.db, ra_fixture);
host.db.enable_proc_attr_macros();
host.db.apply_change(change_fixture.change);
let (file_id, range_or_offset) = change_fixture.file_position.expect("expected a marker ($0)");
let range = range_or_offset.expect_range();
- (host.analysis(), FileRange { file_id: file_id.into(), range })
+ (host.analysis(), FileRange { file_id: file_id.file_id(&host.db), range })
}
/// Creates analysis for a single file, returns range marked with a pair of $0 or a position marked with $0.
@@ -42,11 +42,11 @@ pub(crate) fn range_or_position(
#[rust_analyzer::rust_fixture] ra_fixture: &str,
) -> (Analysis, FileId, RangeOrOffset) {
let mut host = AnalysisHost::default();
- let change_fixture = ChangeFixture::parse(ra_fixture);
+ let change_fixture = ChangeFixture::parse(&host.db, ra_fixture);
host.db.enable_proc_attr_macros();
host.db.apply_change(change_fixture.change);
let (file_id, range_or_offset) = change_fixture.file_position.expect("expected a marker ($0)");
- (host.analysis(), file_id.into(), range_or_offset)
+ (host.analysis(), file_id.file_id(&host.db), range_or_offset)
}
/// Creates analysis from a multi-file fixture, returns positions marked with $0.
@@ -54,24 +54,25 @@ pub(crate) fn annotations(
#[rust_analyzer::rust_fixture] ra_fixture: &str,
) -> (Analysis, FilePosition, Vec<(FileRange, String)>) {
let mut host = AnalysisHost::default();
- let change_fixture = ChangeFixture::parse(ra_fixture);
+ let change_fixture = ChangeFixture::parse(&host.db, ra_fixture);
host.db.enable_proc_attr_macros();
host.db.apply_change(change_fixture.change);
let (file_id, range_or_offset) = change_fixture.file_position.expect("expected a marker ($0)");
let offset = range_or_offset.expect_offset();
+ let db = &host.db;
let annotations = change_fixture
.files
.iter()
.flat_map(|&file_id| {
- let file_text = host.analysis().file_text(file_id.into()).unwrap();
+ let file_text = host.analysis().file_text(file_id.file_id(&host.db)).unwrap();
let annotations = extract_annotations(&file_text);
annotations
.into_iter()
- .map(move |(range, data)| (FileRange { file_id: file_id.into(), range }, data))
+ .map(move |(range, data)| (FileRange { file_id: file_id.file_id(db), range }, data))
})
.collect();
- (host.analysis(), FilePosition { file_id: file_id.into(), offset }, annotations)
+ (host.analysis(), FilePosition { file_id: file_id.file_id(&host.db), offset }, annotations)
}
/// Creates analysis from a multi-file fixture with annotations without $0
@@ -79,19 +80,20 @@ pub(crate) fn annotations_without_marker(
#[rust_analyzer::rust_fixture] ra_fixture: &str,
) -> (Analysis, Vec<(FileRange, String)>) {
let mut host = AnalysisHost::default();
- let change_fixture = ChangeFixture::parse(ra_fixture);
+ let change_fixture = ChangeFixture::parse(&host.db, ra_fixture);
host.db.enable_proc_attr_macros();
host.db.apply_change(change_fixture.change);
+ let db = &host.db;
let annotations = change_fixture
.files
.iter()
.flat_map(|&file_id| {
- let file_text = host.analysis().file_text(file_id.into()).unwrap();
+ let file_text = host.analysis().file_text(file_id.file_id(db)).unwrap();
let annotations = extract_annotations(&file_text);
annotations
.into_iter()
- .map(move |(range, data)| (FileRange { file_id: file_id.into(), range }, data))
+ .map(move |(range, data)| (FileRange { file_id: file_id.file_id(db), range }, data))
})
.collect();
(host.analysis(), annotations)
diff --git a/crates/ide/src/goto_definition.rs b/crates/ide/src/goto_definition.rs
index a7c8a92f0e..ebd68983ed 100644
--- a/crates/ide/src/goto_definition.rs
+++ b/crates/ide/src/goto_definition.rs
@@ -6,8 +6,7 @@ use crate::{
navigation_target::{self, ToNav},
};
use hir::{
- AsAssocItem, AssocItem, CallableKind, FileRange, HasCrate, InFile, MacroFileIdExt, ModuleDef,
- Semantics, sym,
+ AsAssocItem, AssocItem, CallableKind, FileRange, HasCrate, InFile, ModuleDef, Semantics, sym,
};
use ide_db::{
RootDatabase, SymbolKind,
@@ -44,7 +43,7 @@ pub(crate) fn goto_definition(
let sema = &Semantics::new(db);
let file = sema.parse_guess_edition(file_id).syntax().clone();
let edition =
- sema.attach_first_edition(file_id).map(|it| it.edition()).unwrap_or(Edition::CURRENT);
+ sema.attach_first_edition(file_id).map(|it| it.edition(db)).unwrap_or(Edition::CURRENT);
let original_token = pick_best_token(file.token_at_offset(offset), |kind| match kind {
IDENT
| INT_NUMBER
@@ -364,7 +363,7 @@ fn nav_for_exit_points(
if let Some(FileRange { file_id, range }) = focus_frange {
let contains_frange = |nav: &NavigationTarget| {
- nav.file_id == file_id && nav.full_range.contains_range(range)
+ nav.file_id == file_id.file_id(db) && nav.full_range.contains_range(range)
};
if let Some(def_site) = nav.def_site.as_mut() {
diff --git a/crates/ide/src/highlight_related.rs b/crates/ide/src/highlight_related.rs
index bb96c92519..80624eeae8 100644
--- a/crates/ide/src/highlight_related.rs
+++ b/crates/ide/src/highlight_related.rs
@@ -1,9 +1,8 @@
use std::iter;
-use hir::{FilePosition, FileRange, HirFileId, InFile, Semantics, db};
+use hir::{EditionedFileId, FilePosition, FileRange, HirFileId, InFile, Semantics, db};
use ide_db::{
FxHashMap, FxHashSet, RootDatabase,
- base_db::salsa::AsDynDatabase,
defs::{Definition, IdentClass},
helpers::pick_best_token,
search::{FileReference, ReferenceCategory, SearchScope},
@@ -12,7 +11,7 @@ use ide_db::{
preorder_expr_with_ctx_checker,
},
};
-use span::EditionedFileId;
+use span::FileId;
use syntax::{
AstNode,
SyntaxKind::{self, IDENT, INT_NUMBER},
@@ -61,16 +60,14 @@ pub(crate) fn highlight_related(
let _p = tracing::info_span!("highlight_related").entered();
let file_id = sema
.attach_first_edition(file_id)
- .unwrap_or_else(|| EditionedFileId::current_edition(file_id));
- let editioned_file_id_wrapper =
- ide_db::base_db::EditionedFileId::new(sema.db.as_dyn_database(), file_id);
-
- let syntax = sema.parse(editioned_file_id_wrapper).syntax().clone();
+ .unwrap_or_else(|| EditionedFileId::current_edition(sema.db, file_id));
+ let span_file_id = file_id.editioned_file_id(sema.db);
+ let syntax = sema.parse(file_id).syntax().clone();
let token = pick_best_token(syntax.token_at_offset(offset), |kind| match kind {
T![?] => 4, // prefer `?` when the cursor is sandwiched like in `await$0?`
T![->] => 4,
- kind if kind.is_keyword(file_id.edition()) => 3,
+ kind if kind.is_keyword(span_file_id.edition()) => 3,
IDENT | INT_NUMBER => 2,
T![|] => 1,
_ => 0,
@@ -92,11 +89,18 @@ pub(crate) fn highlight_related(
T![break] | T![loop] | T![while] | T![continue] if config.break_points => {
highlight_break_points(sema, token).remove(&file_id)
}
- T![|] if config.closure_captures => highlight_closure_captures(sema, token, file_id),
- T![move] if config.closure_captures => highlight_closure_captures(sema, token, file_id),
- _ if config.references => {
- highlight_references(sema, token, FilePosition { file_id, offset })
+ T![|] if config.closure_captures => {
+ highlight_closure_captures(sema, token, file_id, span_file_id.file_id())
+ }
+ T![move] if config.closure_captures => {
+ highlight_closure_captures(sema, token, file_id, span_file_id.file_id())
}
+ _ if config.references => highlight_references(
+ sema,
+ token,
+ FilePosition { file_id, offset },
+ span_file_id.file_id(),
+ ),
_ => None,
}
}
@@ -105,6 +109,7 @@ fn highlight_closure_captures(
sema: &Semantics<'_, RootDatabase>,
token: SyntaxToken,
file_id: EditionedFileId,
+ vfs_file_id: FileId,
) -> Option<Vec<HighlightedRange>> {
let closure = token.parent_ancestors().take(2).find_map(ast::ClosureExpr::cast)?;
let search_range = closure.body()?.syntax().text_range();
@@ -137,7 +142,7 @@ fn highlight_closure_captures(
.sources(sema.db)
.into_iter()
.flat_map(|x| x.to_nav(sema.db))
- .filter(|decl| decl.file_id == file_id)
+ .filter(|decl| decl.file_id == vfs_file_id)
.filter_map(|decl| decl.focus_range)
.map(move |range| HighlightedRange { range, category })
.chain(usages)
@@ -150,6 +155,7 @@ fn highlight_references(
sema: &Semantics<'_, RootDatabase>,
token: SyntaxToken,
FilePosition { file_id, offset }: FilePosition,
+ vfs_file_id: FileId,
) -> Option<Vec<HighlightedRange>> {
let defs = if let Some((range, resolution)) =
sema.check_for_format_args_template(token.clone(), offset)
@@ -261,7 +267,7 @@ fn highlight_references(
.sources(sema.db)
.into_iter()
.flat_map(|x| x.to_nav(sema.db))
- .filter(|decl| decl.file_id == file_id)
+ .filter(|decl| decl.file_id == vfs_file_id)
.filter_map(|decl| decl.focus_range)
.map(|range| HighlightedRange { range, category })
.for_each(|x| {
@@ -279,7 +285,7 @@ fn highlight_references(
},
};
for nav in navs {
- if nav.file_id != file_id {
+ if nav.file_id != vfs_file_id {
continue;
}
let hl_range = nav.focus_range.map(|range| {
diff --git a/crates/ide/src/hover.rs b/crates/ide/src/hover.rs
index 6fd522095e..2f2d2252f8 100644
--- a/crates/ide/src/hover.rs
+++ b/crates/ide/src/hover.rs
@@ -133,7 +133,7 @@ pub(crate) fn hover(
let sema = &hir::Semantics::new(db);
let file = sema.parse_guess_edition(file_id).syntax().clone();
let edition =
- sema.attach_first_edition(file_id).map(|it| it.edition()).unwrap_or(Edition::CURRENT);
+ sema.attach_first_edition(file_id).map(|it| it.edition(db)).unwrap_or(Edition::CURRENT);
let display_target = sema.first_crate(file_id)?.to_display_target(db);
let mut res = if range.is_empty() {
hover_offset(
@@ -551,7 +551,7 @@ fn runnable_action(
Definition::Module(it) => runnable_mod(sema, it).map(HoverAction::Runnable),
Definition::Function(func) => {
let src = func.source(sema.db)?;
- if src.file_id != file_id {
+ if src.file_id.file_id().is_none_or(|f| f.file_id(sema.db) != file_id) {
cov_mark::hit!(hover_macro_generated_struct_fn_doc_comment);
cov_mark::hit!(hover_macro_generated_struct_fn_doc_attr);
return None;
diff --git a/crates/ide/src/inlay_hints.rs b/crates/ide/src/inlay_hints.rs
index 8f9e62d398..5b1738b66e 100644
--- a/crates/ide/src/inlay_hints.rs
+++ b/crates/ide/src/inlay_hints.rs
@@ -5,14 +5,13 @@ use std::{
use either::Either;
use hir::{
- ClosureStyle, DisplayTarget, HasVisibility, HirDisplay, HirDisplayError, HirWrite, ModuleDef,
- ModuleDefId, Semantics, sym,
+ ClosureStyle, DisplayTarget, EditionedFileId, HasVisibility, HirDisplay, HirDisplayError,
+ HirWrite, ModuleDef, ModuleDefId, Semantics, sym,
};
-use ide_db::{FileRange, RootDatabase, base_db::salsa::AsDynDatabase, famous_defs::FamousDefs};
+use ide_db::{FileRange, RootDatabase, famous_defs::FamousDefs};
use ide_db::{FxHashSet, text_edit::TextEdit};
use itertools::Itertools;
use smallvec::{SmallVec, smallvec};
-use span::EditionedFileId;
use stdx::never;
use syntax::{
SmolStr, SyntaxNode, TextRange, TextSize, WalkEvent,
@@ -86,10 +85,8 @@ pub(crate) fn inlay_hints(
let sema = Semantics::new(db);
let file_id = sema
.attach_first_edition(file_id)
- .unwrap_or_else(|| EditionedFileId::current_edition(file_id));
- let editioned_file_id_wrapper =
- ide_db::base_db::EditionedFileId::new(sema.db.as_dyn_database(), file_id);
- let file = sema.parse(editioned_file_id_wrapper);
+ .unwrap_or_else(|| EditionedFileId::current_edition(db, file_id));
+ let file = sema.parse(file_id);
let file = file.syntax();
let mut acc = Vec::new();
@@ -139,10 +136,8 @@ pub(crate) fn inlay_hints_resolve(
let sema = Semantics::new(db);
let file_id = sema
.attach_first_edition(file_id)
- .unwrap_or_else(|| EditionedFileId::current_edition(file_id));
- let editioned_file_id_wrapper =
- ide_db::base_db::EditionedFileId::new(sema.db.as_dyn_database(), file_id);
- let file = sema.parse(editioned_file_id_wrapper);
+ .unwrap_or_else(|| EditionedFileId::current_edition(db, file_id));
+ let file = sema.parse(file_id);
let file = file.syntax();
let scope = sema.scope(file)?;
@@ -212,6 +207,7 @@ fn hints(
file_id: EditionedFileId,
node: SyntaxNode,
) {
+ let file_id = file_id.editioned_file_id(sema.db);
let Some(krate) = sema.first_crate(file_id.file_id()) else {
return;
};
@@ -227,12 +223,12 @@ fn hints(
chaining::hints(hints, famous_defs, config, display_target, &expr);
adjustment::hints(hints, famous_defs, config, display_target, &expr);
match expr {
- ast::Expr::CallExpr(it) => param_name::hints(hints, famous_defs, config, file_id, ast::Expr::from(it)),
+ ast::Expr::CallExpr(it) => param_name::hints(hints, famous_defs, config, ast::Expr::from(it)),
ast::Expr::MethodCallExpr(it) => {
- param_name::hints(hints, famous_defs, config, file_id, ast::Expr::from(it))
+ param_name::hints(hints, famous_defs, config, ast::Expr::from(it))
}
ast::Expr::ClosureExpr(it) => {
- closure_captures::hints(hints, famous_defs, config, file_id, it.clone());
+ closure_captures::hints(hints, famous_defs, config, it.clone());
closure_ret::hints(hints, famous_defs, config, display_target, it)
},
ast::Expr::RangeExpr(it) => range_exclusive::hints(hints, famous_defs, config, file_id, it),
diff --git a/crates/ide/src/inlay_hints/closure_captures.rs b/crates/ide/src/inlay_hints/closure_captures.rs
index 07a86b2c9d..3186a566d2 100644
--- a/crates/ide/src/inlay_hints/closure_captures.rs
+++ b/crates/ide/src/inlay_hints/closure_captures.rs
@@ -3,7 +3,6 @@
//! Tests live in [`bind_pat`][super::bind_pat] module.
use ide_db::famous_defs::FamousDefs;
use ide_db::text_edit::{TextRange, TextSize};
-use span::EditionedFileId;
use stdx::{TupleExt, never};
use syntax::ast::{self, AstNode};
@@ -15,7 +14,6 @@ pub(super) fn hints(
acc: &mut Vec<InlayHint>,
FamousDefs(sema, _): &FamousDefs<'_, '_>,
config: &InlayHintsConfig,
- _file_id: EditionedFileId,
closure: ast::ClosureExpr,
) -> Option<()> {
if !config.closure_capture_hints {
@@ -75,10 +73,12 @@ pub(super) fn hints(
// force cache the source file, otherwise sema lookup will potentially panic
_ = sema.parse_or_expand(source.file());
source.name().and_then(|name| {
- name.syntax()
- .original_file_range_opt(sema.db)
- .map(TupleExt::head)
- .map(Into::into)
+ name.syntax().original_file_range_opt(sema.db).map(TupleExt::head).map(
+ |frange| ide_db::FileRange {
+ file_id: frange.file_id.file_id(sema.db),
+ range: frange.range,
+ },
+ )
})
}),
tooltip: None,
diff --git a/crates/ide/src/inlay_hints/generic_param.rs b/crates/ide/src/inlay_hints/generic_param.rs
index 730732df86..6e1b3bdbdf 100644
--- a/crates/ide/src/inlay_hints/generic_param.rs
+++ b/crates/ide/src/inlay_hints/generic_param.rs
@@ -102,7 +102,10 @@ pub(crate) fn hints(
}
};
let linked_location = source_syntax.and_then(|it| sema.original_range_opt(&it));
- linked_location.map(Into::into)
+ linked_location.map(|frange| ide_db::FileRange {
+ file_id: frange.file_id.file_id(sema.db),
+ range: frange.range,
+ })
}),
);
diff --git a/crates/ide/src/inlay_hints/implicit_drop.rs b/crates/ide/src/inlay_hints/implicit_drop.rs
index 668232d301..f52e27946f 100644
--- a/crates/ide/src/inlay_hints/implicit_drop.rs
+++ b/crates/ide/src/inlay_hints/implicit_drop.rs
@@ -108,7 +108,7 @@ pub(super) fn hints(
.and_then(|d| source_map.pat_syntax(*d).ok())
.and_then(|d| {
Some(FileRange {
- file_id: d.file_id.file_id()?.into(),
+ file_id: d.file_id.file_id()?.file_id(sema.db),
range: d.value.text_range(),
})
})
diff --git a/crates/ide/src/inlay_hints/param_name.rs b/crates/ide/src/inlay_hints/param_name.rs
index 99c698ce02..5ff9fee60a 100644
--- a/crates/ide/src/inlay_hints/param_name.rs
+++ b/crates/ide/src/inlay_hints/param_name.rs
@@ -10,7 +10,6 @@ use either::Either;
use hir::Semantics;
use ide_db::{RootDatabase, famous_defs::FamousDefs};
-use span::EditionedFileId;
use stdx::to_lower_snake_case;
use syntax::ast::{self, AstNode, HasArgList, HasName, UnaryOp};
@@ -20,7 +19,6 @@ pub(super) fn hints(
acc: &mut Vec<InlayHint>,
FamousDefs(sema, krate): &FamousDefs<'_, '_>,
config: &InlayHintsConfig,
- _file_id: EditionedFileId,
expr: ast::Expr,
) -> Option<()> {
if !config.parameter_hints {
@@ -67,7 +65,10 @@ pub(super) fn hints(
_ => None,
},
}?;
- sema.original_range_opt(name_syntax.syntax()).map(Into::into)
+ sema.original_range_opt(name_syntax.syntax()).map(|frange| ide_db::FileRange {
+ file_id: frange.file_id.file_id(sema.db),
+ range: frange.range,
+ })
}),
);
InlayHint {
diff --git a/crates/ide/src/lib.rs b/crates/ide/src/lib.rs
index 1d44993564..2a6f108cda 100644
--- a/crates/ide/src/lib.rs
+++ b/crates/ide/src/lib.rs
@@ -58,20 +58,19 @@ mod view_memory_layout;
mod view_mir;
mod view_syntax_tree;
-use std::panic::UnwindSafe;
+use std::panic::{AssertUnwindSafe, UnwindSafe};
use cfg::CfgOptions;
use fetch_crates::CrateInfo;
-use hir::{ChangeWithProcMacros, sym};
+use hir::{ChangeWithProcMacros, EditionedFileId, sym};
use ide_db::{
FxHashMap, FxIndexSet, LineIndexDatabase,
base_db::{
CrateOrigin, CrateWorkspaceData, Env, FileSet, RootQueryDb, SourceDatabase, VfsPath,
- salsa::{AsDynDatabase, Cancelled},
+ salsa::Cancelled,
},
prime_caches, symbol_index,
};
-use span::EditionedFileId;
use syntax::SourceFile;
use triomphe::Arc;
use view_memory_layout::{RecursiveMemoryLayout, view_memory_layout};
@@ -306,10 +305,7 @@ impl Analysis {
pub fn parse(&self, file_id: FileId) -> Cancellable<SourceFile> {
// FIXME edition
self.with_db(|db| {
- let editioned_file_id_wrapper = ide_db::base_db::EditionedFileId::new(
- self.db.as_dyn_database(),
- EditionedFileId::current_edition(file_id),
- );
+ let editioned_file_id_wrapper = EditionedFileId::current_edition(&self.db, file_id);
db.parse(editioned_file_id_wrapper).tree()
})
@@ -338,10 +334,7 @@ impl Analysis {
/// supported).
pub fn matching_brace(&self, position: FilePosition) -> Cancellable<Option<TextSize>> {
self.with_db(|db| {
- let file_id = ide_db::base_db::EditionedFileId::new(
- self.db.as_dyn_database(),
- EditionedFileId::current_edition(position.file_id),
- );
+ let file_id = EditionedFileId::current_edition(&self.db, position.file_id);
let parse = db.parse(file_id);
let file = parse.tree();
matching_brace::matching_brace(&file, position.offset)
@@ -401,10 +394,8 @@ impl Analysis {
/// stuff like trailing commas.
pub fn join_lines(&self, config: &JoinLinesConfig, frange: FileRange) -> Cancellable<TextEdit> {
self.with_db(|db| {
- let editioned_file_id_wrapper = ide_db::base_db::EditionedFileId::new(
- self.db.as_dyn_database(),
- EditionedFileId::current_edition(frange.file_id),
- );
+ let editioned_file_id_wrapper =
+ EditionedFileId::current_edition(&self.db, frange.file_id);
let parse = db.parse(editioned_file_id_wrapper);
join_lines::join_lines(config, &parse.tree(), frange.range)
})
@@ -441,10 +432,7 @@ impl Analysis {
pub fn file_structure(&self, file_id: FileId) -> Cancellable<Vec<StructureNode>> {
// FIXME: Edition
self.with_db(|db| {
- let editioned_file_id_wrapper = ide_db::base_db::EditionedFileId::new(
- self.db.as_dyn_database(),
- EditionedFileId::current_edition(file_id),
- );
+ let editioned_file_id_wrapper = EditionedFileId::current_edition(&self.db, file_id);
file_structure::file_structure(&db.parse(editioned_file_id_wrapper).tree())
})
@@ -475,10 +463,7 @@ impl Analysis {
/// Returns the set of folding ranges.
pub fn folding_ranges(&self, file_id: FileId) -> Cancellable<Vec<Fold>> {
self.with_db(|db| {
- let editioned_file_id_wrapper = ide_db::base_db::EditionedFileId::new(
- self.db.as_dyn_database(),
- EditionedFileId::current_edition(file_id),
- );
+ let editioned_file_id_wrapper = EditionedFileId::current_edition(&self.db, file_id);
folding_ranges::folding_ranges(&db.parse(editioned_file_id_wrapper).tree())
})
@@ -534,7 +519,11 @@ impl Analysis {
position: FilePosition,
search_scope: Option<SearchScope>,
) -> Cancellable<Option<Vec<ReferenceSearchResult>>> {
- self.with_db(|db| references::find_all_refs(&Semantics::new(db), position, search_scope))
+ let search_scope = AssertUnwindSafe(search_scope);
+ self.with_db(|db| {
+ let _ = &search_scope;
+ references::find_all_refs(&Semantics::new(db), position, search_scope.0)
+ })
}
/// Returns a short text describing element at position.
@@ -656,7 +645,11 @@ impl Analysis {
position: FilePosition,
search_scope: Option<SearchScope>,
) -> Cancellable<Vec<Runnable>> {
- self.with_db(|db| runnables::related_tests(db, position, search_scope))
+ let search_scope = AssertUnwindSafe(search_scope);
+ self.with_db(|db| {
+ let _ = &search_scope;
+ runnables::related_tests(db, position, search_scope.0)
+ })
}
/// Computes syntax highlighting for the given file
@@ -849,6 +842,10 @@ impl Analysis {
self.with_db(|db| view_memory_layout(db, position))
}
+ pub fn editioned_file_id_to_vfs(&self, file_id: hir::EditionedFileId) -> FileId {
+ file_id.file_id(&self.db)
+ }
+
/// Performs an operation on the database that may be canceled.
///
/// rust-analyzer needs to be able to answer semantic questions about the
diff --git a/crates/ide/src/navigation_target.rs b/crates/ide/src/navigation_target.rs
index 4679794287..9334b73fc7 100644
--- a/crates/ide/src/navigation_target.rs
+++ b/crates/ide/src/navigation_target.rs
@@ -817,14 +817,10 @@ pub(crate) fn orig_range_with_focus_r(
) -> UpmappingResult<(FileRange, Option<TextRange>)> {
let Some(name) = focus_range else { return orig_range_r(db, hir_file, value) };
- let call_kind =
- || db.lookup_intern_macro_call(hir_file.macro_file().unwrap().macro_call_id).kind;
+ let call_kind = || db.lookup_intern_macro_call(hir_file.macro_file().unwrap()).kind;
- let def_range = || {
- db.lookup_intern_macro_call(hir_file.macro_file().unwrap().macro_call_id)
- .def
- .definition_range(db)
- };
+ let def_range =
+ || db.lookup_intern_macro_call(hir_file.macro_file().unwrap()).def.definition_range(db);
// FIXME: Also make use of the syntax context to determine which site we are at?
let value_range = InFile::new(hir_file, value).original_node_file_range_opt(db);
@@ -901,7 +897,7 @@ pub(crate) fn orig_range_with_focus_r(
UpmappingResult {
call_site: (
- call_site_range.into(),
+ call_site_range.into_file_id(db),
call_site_focus.and_then(|hir::FileRange { file_id, range }| {
if call_site_range.file_id == file_id && call_site_range.range.contains_range(range)
{
@@ -913,7 +909,7 @@ pub(crate) fn orig_range_with_focus_r(
),
def_site: def_site.map(|(def_site_range, def_site_focus)| {
(
- def_site_range.into(),
+ def_site_range.into_file_id(db),
def_site_focus.and_then(|hir::FileRange { file_id, range }| {
if def_site_range.file_id == file_id
&& def_site_range.range.contains_range(range)
@@ -934,7 +930,10 @@ fn orig_range(
value: &SyntaxNode,
) -> UpmappingResult<(FileRange, Option<TextRange>)> {
UpmappingResult {
- call_site: (InFile::new(hir_file, value).original_file_range_rooted(db).into(), None),
+ call_site: (
+ InFile::new(hir_file, value).original_file_range_rooted(db).into_file_id(db),
+ None,
+ ),
def_site: None,
}
}
@@ -945,7 +944,10 @@ fn orig_range_r(
value: TextRange,
) -> UpmappingResult<(FileRange, Option<TextRange>)> {
UpmappingResult {
- call_site: (InFile::new(hir_file, value).original_node_file_range(db).0.into(), None),
+ call_site: (
+ InFile::new(hir_file, value).original_node_file_range(db).0.into_file_id(db),
+ None,
+ ),
def_site: None,
}
}
diff --git a/crates/ide/src/parent_module.rs b/crates/ide/src/parent_module.rs
index cdc9d5a769..6dc01c4506 100644
--- a/crates/ide/src/parent_module.rs
+++ b/crates/ide/src/parent_module.rs
@@ -57,7 +57,9 @@ pub(crate) fn crates_for(db: &RootDatabase, file_id: FileId) -> Vec<Crate> {
db.relevant_crates(file_id)
.iter()
.copied()
- .filter(|&crate_id| db.crate_def_map(crate_id).modules_for_file(file_id).next().is_some())
+ .filter(|&crate_id| {
+ db.crate_def_map(crate_id).modules_for_file(db, file_id).next().is_some()
+ })
.sorted()
.collect()
}
diff --git a/crates/ide/src/references.rs b/crates/ide/src/references.rs
index 2487543dcf..4fa116444b 100644
--- a/crates/ide/src/references.rs
+++ b/crates/ide/src/references.rs
@@ -68,7 +68,7 @@ pub(crate) fn find_all_refs(
.into_iter()
.map(|(file_id, refs)| {
(
- file_id.into(),
+ file_id.file_id(sema.db),
refs.into_iter()
.map(|file_ref| (file_ref.range, file_ref.category))
.unique()
@@ -307,8 +307,10 @@ fn handle_control_flow_keywords(
FilePosition { file_id, offset }: FilePosition,
) -> Option<ReferenceSearchResult> {
let file = sema.parse_guess_edition(file_id);
- let edition =
- sema.attach_first_edition(file_id).map(|it| it.edition()).unwrap_or(Edition::CURRENT);
+ let edition = sema
+ .attach_first_edition(file_id)
+ .map(|it| it.edition(sema.db))
+ .unwrap_or(Edition::CURRENT);
let token = file.syntax().token_at_offset(offset).find(|t| t.kind().is_keyword(edition))?;
let references = match token.kind() {
@@ -328,7 +330,7 @@ fn handle_control_flow_keywords(
.into_iter()
.map(|HighlightedRange { range, category }| (range, category))
.collect();
- (file_id.into(), ranges)
+ (file_id.file_id(sema.db), ranges)
})
.collect();
@@ -338,8 +340,8 @@ fn handle_control_flow_keywords(
#[cfg(test)]
mod tests {
use expect_test::{Expect, expect};
- use ide_db::FileId;
- use span::EditionedFileId;
+ use hir::EditionedFileId;
+ use ide_db::{FileId, RootDatabase};
use stdx::format_to;
use crate::{SearchScope, fixture};
@@ -1004,7 +1006,9 @@ pub(super) struct Foo$0 {
check_with_scope(
code,
- Some(SearchScope::single_file(EditionedFileId::current_edition(FileId::from_raw(2)))),
+ Some(&mut |db| {
+ SearchScope::single_file(EditionedFileId::current_edition(db, FileId::from_raw(2)))
+ }),
expect![[r#"
quux Function FileId(0) 19..35 26..30
@@ -1260,11 +1264,12 @@ impl Foo {
fn check_with_scope(
#[rust_analyzer::rust_fixture] ra_fixture: &str,
- search_scope: Option<SearchScope>,
+ search_scope: Option<&mut dyn FnMut(&RootDatabase) -> SearchScope>,
expect: Expect,
) {
let (analysis, pos) = fixture::position(ra_fixture);
- let refs = analysis.find_all_refs(pos, search_scope).unwrap().unwrap();
+ let refs =
+ analysis.find_all_refs(pos, search_scope.map(|it| it(&analysis.db))).unwrap().unwrap();
let mut actual = String::new();
for mut refs in refs {
diff --git a/crates/ide/src/rename.rs b/crates/ide/src/rename.rs
index 8fdd460a09..e6cda60cd9 100644
--- a/crates/ide/src/rename.rs
+++ b/crates/ide/src/rename.rs
@@ -4,10 +4,9 @@
//! tests. This module also implements a couple of magic tricks, like renaming
//! `self` and to `self` (to switch between associated function and method).
-use hir::{AsAssocItem, HirFileIdExt, InFile, Semantics};
+use hir::{AsAssocItem, InFile, Semantics};
use ide_db::{
FileId, FileRange, RootDatabase,
- base_db::salsa::AsDynDatabase,
defs::{Definition, NameClass, NameRefClass},
rename::{IdentifierKind, bail, format_err, source_edit_from_references},
source_change::SourceChangeBuilder,
@@ -86,9 +85,7 @@ pub(crate) fn rename(
let file_id = sema
.attach_first_edition(position.file_id)
.ok_or_else(|| format_err!("No references found at position"))?;
- let editioned_file_id_wrapper =
- ide_db::base_db::EditionedFileId::new(db.as_dyn_database(), file_id);
- let source_file = sema.parse(editioned_file_id_wrapper);
+ let source_file = sema.parse(file_id);
let syntax = source_file.syntax();
let defs = find_definitions(&sema, syntax, position)?;
@@ -123,7 +120,7 @@ pub(crate) fn rename(
source_change.extend(usages.references.get_mut(&file_id).iter().map(|refs| {
(
position.file_id,
- source_edit_from_references(refs, def, new_name, file_id.edition()),
+ source_edit_from_references(refs, def, new_name, file_id.edition(db)),
)
}));
@@ -300,7 +297,7 @@ fn find_definitions(
// remove duplicates, comparing `Definition`s
Ok(v.into_iter()
.unique_by(|&(.., def)| def)
- .map(|(a, b, c)| (a.into(), b, c))
+ .map(|(a, b, c)| (a.into_file_id(sema.db), b, c))
.collect::<Vec<_>>()
.into_iter())
}
@@ -371,10 +368,13 @@ fn rename_to_self(
let usages = def.usages(sema).all();
let mut source_change = SourceChange::default();
source_change.extend(usages.iter().map(|(file_id, references)| {
- (file_id.into(), source_edit_from_references(references, def, "self", file_id.edition()))
+ (
+ file_id.file_id(sema.db),
+ source_edit_from_references(references, def, "self", file_id.edition(sema.db)),
+ )
}));
source_change.insert_source_edit(
- file_id.original_file(sema.db),
+ file_id.original_file(sema.db).file_id(sema.db),
TextEdit::replace(param_source.syntax().text_range(), String::from(self_param)),
);
Ok(source_change)
@@ -405,9 +405,12 @@ fn rename_self_to_param(
bail!("Cannot rename reference to `_` as it is being referenced multiple times");
}
let mut source_change = SourceChange::default();
- source_change.insert_source_edit(file_id.original_file(sema.db), edit);
+ source_change.insert_source_edit(file_id.original_file(sema.db).file_id(sema.db), edit);
source_change.extend(usages.iter().map(|(file_id, references)| {
- (file_id.into(), source_edit_from_references(references, def, new_name, file_id.edition()))
+ (
+ file_id.file_id(sema.db),
+ source_edit_from_references(references, def, new_name, file_id.edition(sema.db)),
+ )
}));
Ok(source_change)
}
diff --git a/crates/ide/src/runnables.rs b/crates/ide/src/runnables.rs
index fac690c27e..ab13960240 100644
--- a/crates/ide/src/runnables.rs
+++ b/crates/ide/src/runnables.rs
@@ -4,8 +4,8 @@ use arrayvec::ArrayVec;
use ast::HasName;
use cfg::{CfgAtom, CfgExpr};
use hir::{
- AsAssocItem, AttrsWithOwner, HasAttrs, HasCrate, HasSource, HirFileIdExt, ModPath, Name,
- PathKind, Semantics, Symbol, db::HirDatabase, sym, symbols::FxIndexSet,
+ AsAssocItem, AttrsWithOwner, HasAttrs, HasCrate, HasSource, ModPath, Name, PathKind, Semantics,
+ Symbol, db::HirDatabase, sym, symbols::FxIndexSet,
};
use ide_assists::utils::{has_test_related_attribute, test_related_attribute_syn};
use ide_db::{
@@ -285,8 +285,10 @@ fn find_related_tests_in_module(
let file_id = mod_source.file_id.original_file(sema.db);
let mod_scope = SearchScope::file_range(hir::FileRange { file_id, range: mod_source.value });
- let fn_pos =
- FilePosition { file_id: file_id.into(), offset: fn_name.syntax().text_range().start() };
+ let fn_pos = FilePosition {
+ file_id: file_id.file_id(sema.db),
+ offset: fn_name.syntax().text_range().start(),
+ };
find_related_tests(sema, syntax, fn_pos, Some(mod_scope), tests)
}
diff --git a/crates/ide/src/signature_help.rs b/crates/ide/src/signature_help.rs
index 44a1f2d97a..0e17b35590 100644
--- a/crates/ide/src/signature_help.rs
+++ b/crates/ide/src/signature_help.rs
@@ -85,7 +85,7 @@ pub(crate) fn signature_help(
.and_then(|tok| algo::skip_trivia_token(tok, Direction::Prev))?;
let token = sema.descend_into_macros_single_exact(token);
let edition =
- sema.attach_first_edition(file_id).map(|it| it.edition()).unwrap_or(Edition::CURRENT);
+ sema.attach_first_edition(file_id).map(|it| it.edition(db)).unwrap_or(Edition::CURRENT);
let display_target = sema.first_crate(file_id)?.to_display_target(db);
for node in token.parent_ancestors() {
@@ -744,13 +744,14 @@ mod tests {
pub(crate) fn position(
#[rust_analyzer::rust_fixture] ra_fixture: &str,
) -> (RootDatabase, FilePosition) {
- let change_fixture = ChangeFixture::parse(ra_fixture);
let mut database = RootDatabase::default();
+ let change_fixture = ChangeFixture::parse(&database, ra_fixture);
database.apply_change(change_fixture.change);
let (file_id, range_or_offset) =
change_fixture.file_position.expect("expected a marker ($0)");
let offset = range_or_offset.expect_offset();
- (database, FilePosition { file_id: file_id.into(), offset })
+ let position = FilePosition { file_id: file_id.file_id(&database), offset };
+ (database, position)
}
#[track_caller]
diff --git a/crates/ide/src/ssr.rs b/crates/ide/src/ssr.rs
index 0e06b8e2da..7df4499a0c 100644
--- a/crates/ide/src/ssr.rs
+++ b/crates/ide/src/ssr.rs
@@ -78,7 +78,7 @@ mod tests {
ssr_assists(
&db,
&resolve,
- FileRange { file_id: file_id.into(), range: range_or_offset.into() },
+ FileRange { file_id: file_id.file_id(&db), range: range_or_offset.into() },
)
}
diff --git a/crates/ide/src/static_index.rs b/crates/ide/src/static_index.rs
index 057d635271..efee39c13d 100644
--- a/crates/ide/src/static_index.rs
+++ b/crates/ide/src/static_index.rs
@@ -2,7 +2,7 @@
//! read-only code browsers and emitting LSIF
use arrayvec::ArrayVec;
-use hir::{Crate, HirFileIdExt, Module, Semantics, db::HirDatabase};
+use hir::{Crate, Module, Semantics, db::HirDatabase};
use ide_db::{
FileId, FileRange, FxHashMap, FxHashSet, RootDatabase,
base_db::{RootQueryDb, SourceDatabase, VfsPath},
@@ -191,8 +191,10 @@ impl StaticIndex<'_> {
// hovers
let sema = hir::Semantics::new(self.db);
let root = sema.parse_guess_edition(file_id).syntax().clone();
- let edition =
- sema.attach_first_edition(file_id).map(|it| it.edition()).unwrap_or(Edition::CURRENT);
+ let edition = sema
+ .attach_first_edition(file_id)
+ .map(|it| it.edition(self.db))
+ .unwrap_or(Edition::CURRENT);
let display_target = match sema.first_crate(file_id) {
Some(krate) => krate.to_display_target(sema.db),
None => return,
@@ -292,11 +294,11 @@ impl StaticIndex<'_> {
let db = &analysis.db;
let work = all_modules(db).into_iter().filter(|module| {
let file_id = module.definition_source_file_id(db).original_file(db);
- let source_root = db.file_source_root(file_id.into()).source_root_id(db);
+ let source_root = db.file_source_root(file_id.file_id(&analysis.db)).source_root_id(db);
let source_root = db.source_root(source_root).source_root(db);
let is_vendored = match vendored_libs_config {
VendoredLibrariesConfig::Included { workspace_root } => source_root
- .path_for_file(&file_id.into())
+ .path_for_file(&file_id.file_id(&analysis.db))
.is_some_and(|module_path| module_path.starts_with(workspace_root)),
VendoredLibrariesConfig::Excluded => false,
};
@@ -316,7 +318,7 @@ impl StaticIndex<'_> {
if visited_files.contains(&file_id) {
continue;
}
- this.add_file(file_id.into());
+ this.add_file(file_id.file_id(&analysis.db));
// mark the file
visited_files.insert(file_id);
}
diff --git a/crates/ide/src/syntax_highlighting.rs b/crates/ide/src/syntax_highlighting.rs
index ea0e01e536..e1bc76318f 100644
--- a/crates/ide/src/syntax_highlighting.rs
+++ b/crates/ide/src/syntax_highlighting.rs
@@ -15,13 +15,8 @@ mod tests;
use std::ops::ControlFlow;
use either::Either;
-use hir::{
- DefWithBody, HirFileIdExt, InFile, InRealFile, MacroFileIdExt, MacroKind, Name, Semantics,
-};
-use ide_db::{
- FxHashMap, FxHashSet, Ranker, RootDatabase, SymbolKind, base_db::salsa::AsDynDatabase,
-};
-use span::EditionedFileId;
+use hir::{DefWithBody, EditionedFileId, InFile, InRealFile, MacroKind, Name, Semantics};
+use ide_db::{FxHashMap, FxHashSet, Ranker, RootDatabase, SymbolKind};
use syntax::{
AstNode, AstToken, NodeOrToken,
SyntaxKind::*,
@@ -201,13 +196,11 @@ pub(crate) fn highlight(
let sema = Semantics::new(db);
let file_id = sema
.attach_first_edition(file_id)
- .unwrap_or_else(|| EditionedFileId::current_edition(file_id));
+ .unwrap_or_else(|| EditionedFileId::current_edition(db, file_id));
// Determine the root based on the given range.
let (root, range_to_highlight) = {
- let editioned_file_id_wrapper =
- ide_db::base_db::EditionedFileId::new(db.as_dyn_database(), file_id);
- let file = sema.parse(editioned_file_id_wrapper);
+ let file = sema.parse(file_id);
let source_file = file.syntax();
match range_to_highlight {
Some(range) => {
@@ -235,7 +228,7 @@ fn traverse(
krate: Option<hir::Crate>,
range_to_highlight: TextRange,
) {
- let is_unlinked = sema.file_to_module_def(file_id).is_none();
+ let is_unlinked = sema.file_to_module_def(file_id.file_id(sema.db)).is_none();
enum AttrOrDerive {
Attr(ast::Item),
@@ -509,7 +502,14 @@ fn string_injections(
{
return ControlFlow::Break(());
}
- highlight_format_string(hl, sema, krate, &string, &descended_string, file_id.edition());
+ highlight_format_string(
+ hl,
+ sema,
+ krate,
+ &string,
+ &descended_string,
+ file_id.edition(sema.db),
+ );
if !string.is_raw() {
highlight_escape_string(hl, &string);
diff --git a/crates/ide/src/syntax_highlighting/highlight.rs b/crates/ide/src/syntax_highlighting/highlight.rs
index 5914a8f799..87db0cd7dc 100644
--- a/crates/ide/src/syntax_highlighting/highlight.rs
+++ b/crates/ide/src/syntax_highlighting/highlight.rs
@@ -3,7 +3,7 @@
use std::ops::ControlFlow;
use either::Either;
-use hir::{AsAssocItem, HasVisibility, MacroFileIdExt, Semantics};
+use hir::{AsAssocItem, HasVisibility, Semantics};
use ide_db::{
FxHashMap, RootDatabase, SymbolKind,
defs::{Definition, IdentClass, NameClass, NameRefClass},
diff --git a/crates/ide/src/syntax_highlighting/html.rs b/crates/ide/src/syntax_highlighting/html.rs
index cd69a6eb23..9fd807f031 100644
--- a/crates/ide/src/syntax_highlighting/html.rs
+++ b/crates/ide/src/syntax_highlighting/html.rs
@@ -1,9 +1,7 @@
//! Renders a bit of code as HTML.
-use hir::Semantics;
-use ide_db::base_db::salsa::AsDynDatabase;
+use hir::{EditionedFileId, Semantics};
use oorandom::Rand32;
-use span::EditionedFileId;
use stdx::format_to;
use syntax::AstNode;
@@ -16,10 +14,8 @@ pub(crate) fn highlight_as_html(db: &RootDatabase, file_id: FileId, rainbow: boo
let sema = Semantics::new(db);
let file_id = sema
.attach_first_edition(file_id)
- .unwrap_or_else(|| EditionedFileId::current_edition(file_id));
- let editioned_file_id_wrapper =
- ide_db::base_db::EditionedFileId::new(db.as_dyn_database(), file_id);
- let file = sema.parse(editioned_file_id_wrapper);
+ .unwrap_or_else(|| EditionedFileId::current_edition(db, file_id));
+ let file = sema.parse(file_id);
let file = file.syntax();
fn rainbowify(seed: u64) -> String {
let mut rng = Rand32::new(seed);
@@ -43,7 +39,7 @@ pub(crate) fn highlight_as_html(db: &RootDatabase, file_id: FileId, rainbow: boo
macro_bang: true,
syntactic_name_ref_highlighting: false,
},
- file_id.into(),
+ file_id.file_id(db),
None,
);
let text = file.to_string();
diff --git a/crates/ide/src/syntax_highlighting/inject.rs b/crates/ide/src/syntax_highlighting/inject.rs
index 13922eba19..4ff23393df 100644
--- a/crates/ide/src/syntax_highlighting/inject.rs
+++ b/crates/ide/src/syntax_highlighting/inject.rs
@@ -3,12 +3,11 @@
use std::mem;
use either::Either;
-use hir::{HirFileId, InFile, Semantics, sym};
+use hir::{EditionedFileId, HirFileId, InFile, Semantics, sym};
use ide_db::{
SymbolKind, active_parameter::ActiveParameter, defs::Definition,
documentation::docs_with_rangemap, rust_doc::is_rust_fence,
};
-use span::EditionedFileId;
use syntax::{
AstToken, NodeOrToken, SyntaxNode, TextRange, TextSize,
ast::{self, AstNode, IsString, QuoteOffsets},
diff --git a/crates/ide/src/typing.rs b/crates/ide/src/typing.rs
index bb04fdbe88..4df7e25223 100644
--- a/crates/ide/src/typing.rs
+++ b/crates/ide/src/typing.rs
@@ -15,11 +15,9 @@
mod on_enter;
-use ide_db::{
- FilePosition, RootDatabase,
- base_db::{RootQueryDb, salsa::AsDynDatabase},
-};
-use span::{Edition, EditionedFileId};
+use hir::EditionedFileId;
+use ide_db::{FilePosition, RootDatabase, base_db::RootQueryDb};
+use span::Edition;
use std::iter;
use syntax::{
@@ -76,10 +74,7 @@ pub(crate) fn on_char_typed(
// FIXME: We are hitting the database here, if we are unlucky this call might block momentarily
// causing the editor to feel sluggish!
let edition = Edition::CURRENT_FIXME;
- let editioned_file_id_wrapper = ide_db::base_db::EditionedFileId::new(
- db.as_dyn_database(),
- EditionedFileId::new(position.file_id, edition),
- );
+ let editioned_file_id_wrapper = EditionedFileId::new(db, position.file_id, edition);
let file = &db.parse(editioned_file_id_wrapper);
let char_matches_position =
file.tree().syntax().text().char_at(position.offset) == Some(char_typed);
diff --git a/crates/ide/src/typing/on_enter.rs b/crates/ide/src/typing/on_enter.rs
index d684b0efc5..fdc583a15c 100644
--- a/crates/ide/src/typing/on_enter.rs
+++ b/crates/ide/src/typing/on_enter.rs
@@ -2,8 +2,7 @@
//! comments, but should handle indent some time in the future as well.
use ide_db::base_db::RootQueryDb;
-use ide_db::{FilePosition, RootDatabase, base_db::salsa::AsDynDatabase};
-use span::EditionedFileId;
+use ide_db::{FilePosition, RootDatabase};
use syntax::{
AstNode, SmolStr, SourceFile,
SyntaxKind::*,
@@ -51,10 +50,8 @@ use ide_db::text_edit::TextEdit;
//
// ![On Enter](https://user-images.githubusercontent.com/48062697/113065578-04c21800-91b1-11eb-82b8-22b8c481e645.gif)
pub(crate) fn on_enter(db: &RootDatabase, position: FilePosition) -> Option<TextEdit> {
- let editioned_file_id_wrapper = ide_db::base_db::EditionedFileId::new(
- db.as_dyn_database(),
- EditionedFileId::current_edition(position.file_id),
- );
+ let editioned_file_id_wrapper =
+ ide_db::base_db::EditionedFileId::current_edition(db, position.file_id);
let parse = db.parse(editioned_file_id_wrapper);
let file = parse.tree();
let token = file.syntax().token_at_offset(position.offset).left_biased()?;
diff --git a/crates/ide/src/view_item_tree.rs b/crates/ide/src/view_item_tree.rs
index a230b30ed3..2cd751463b 100644
--- a/crates/ide/src/view_item_tree.rs
+++ b/crates/ide/src/view_item_tree.rs
@@ -1,6 +1,5 @@
-use hir::{Semantics, db::DefDatabase};
+use hir::{EditionedFileId, Semantics, db::DefDatabase};
use ide_db::{FileId, RootDatabase};
-use span::EditionedFileId;
// Feature: Debug ItemTree
//
@@ -13,6 +12,6 @@ pub(crate) fn view_item_tree(db: &RootDatabase, file_id: FileId) -> String {
let sema = Semantics::new(db);
let file_id = sema
.attach_first_edition(file_id)
- .unwrap_or_else(|| EditionedFileId::current_edition(file_id));
- db.file_item_tree(file_id.into()).pretty_print(db, file_id.edition())
+ .unwrap_or_else(|| EditionedFileId::current_edition(db, file_id));
+ db.file_item_tree(file_id.into()).pretty_print(db, file_id.edition(db))
}
diff --git a/crates/query-group-macro/src/lib.rs b/crates/query-group-macro/src/lib.rs
index 958723790c..2e2a24908e 100644
--- a/crates/query-group-macro/src/lib.rs
+++ b/crates/query-group-macro/src/lib.rs
@@ -136,7 +136,7 @@ pub(crate) fn query_group_impl(
let (_attrs, salsa_attrs) = filter_attrs(method.attrs.clone());
- let mut query_kind = QueryKind::Tracked;
+ let mut query_kind = QueryKind::TrackedWithSalsaStruct;
let mut invoke = None;
let mut cycle = None;
let mut interned_struct_path = None;
@@ -183,14 +183,17 @@ pub(crate) fn query_group_impl(
interned_struct_path = Some(path.path.clone());
query_kind = QueryKind::Interned;
}
- "invoke" => {
+ "invoke_interned" => {
let path = syn::parse::<Parenthesized<Path>>(tts)?;
invoke = Some(path.0.clone());
+ query_kind = QueryKind::Tracked;
}
- "invoke_actual" => {
+ "invoke" => {
let path = syn::parse::<Parenthesized<Path>>(tts)?;
invoke = Some(path.0.clone());
- query_kind = QueryKind::TrackedWithSalsaStruct;
+ if query_kind != QueryKind::Transparent {
+ query_kind = QueryKind::TrackedWithSalsaStruct;
+ }
}
"tracked" if method.default.is_some() => {
query_kind = QueryKind::TrackedWithSalsaStruct;
@@ -292,10 +295,6 @@ pub(crate) fn query_group_impl(
trait_methods.push(Queries::TrackedQuery(method));
}
(QueryKind::TrackedWithSalsaStruct, invoke) => {
- // while it is possible to make this reachable, it's not really worthwhile for a migration aid.
- // doing this would require attaching an attribute to the salsa struct parameter
- // in the query.
- assert_ne!(invoke.is_none(), method.default.is_none());
let method = TrackedQuery {
trait_name: trait_name_ident.clone(),
generated_struct: None,
diff --git a/crates/query-group-macro/tests/arity.rs b/crates/query-group-macro/tests/arity.rs
index 440db7b821..f1b29612a1 100644
--- a/crates/query-group-macro/tests/arity.rs
+++ b/crates/query-group-macro/tests/arity.rs
@@ -2,12 +2,16 @@ use query_group_macro::query_group;
#[query_group]
pub trait ArityDb: salsa::Database {
+ #[salsa::invoke_interned(one)]
fn one(&self, a: ()) -> String;
+ #[salsa::invoke_interned(two)]
fn two(&self, a: (), b: ()) -> String;
+ #[salsa::invoke_interned(three)]
fn three(&self, a: (), b: (), c: ()) -> String;
+ #[salsa::invoke_interned(none)]
fn none(&self) -> String;
}
diff --git a/crates/query-group-macro/tests/cycle.rs b/crates/query-group-macro/tests/cycle.rs
index 12df4ae3ef..8d195cbd8d 100644
--- a/crates/query-group-macro/tests/cycle.rs
+++ b/crates/query-group-macro/tests/cycle.rs
@@ -94,12 +94,7 @@ fn cycle_a(db: &dyn CycleDatabase, abc: ABC) -> Result<(), Error> {
abc.a(db).invoke(db, abc)
}
-fn recover_a(
- _db: &dyn CycleDatabase,
- cycle: &salsa::Cycle,
- _: CycleDatabaseData,
- _abc: ABC,
-) -> Result<(), Error> {
+fn recover_a(_db: &dyn CycleDatabase, cycle: &salsa::Cycle, _abc: ABC) -> Result<(), Error> {
Err(Error { cycle: cycle.participant_keys().map(|k| format!("{k:?}")).collect() })
}
@@ -107,12 +102,7 @@ fn cycle_b(db: &dyn CycleDatabase, abc: ABC) -> Result<(), Error> {
abc.b(db).invoke(db, abc)
}
-fn recover_b(
- _db: &dyn CycleDatabase,
- cycle: &salsa::Cycle,
- _: CycleDatabaseData,
- _abc: ABC,
-) -> Result<(), Error> {
+fn recover_b(_db: &dyn CycleDatabase, cycle: &salsa::Cycle, _abc: ABC) -> Result<(), Error> {
Err(Error { cycle: cycle.participant_keys().map(|k| format!("{k:?}")).collect() })
}
@@ -156,11 +146,11 @@ fn inner_cycle() {
let err = db.cycle_c(abc);
assert!(err.is_err());
let expected = expect![[r#"
- [
- "cycle_a_shim(Id(1400))",
- "cycle_b_shim(Id(1000))",
- ]
- "#]];
+ [
+ "cycle_a_shim(Id(0))",
+ "cycle_b_shim(Id(0))",
+ ]
+ "#]];
expected.assert_debug_eq(&err.unwrap_err().cycle);
}
@@ -241,16 +231,16 @@ fn cycle_multiple() {
let expected = expect![[r#"
(
[
- "cycle_a_shim(Id(1000))",
- "cycle_b_shim(Id(1400))",
+ "cycle_a_shim(Id(0))",
+ "cycle_b_shim(Id(0))",
],
[
- "cycle_a_shim(Id(1000))",
- "cycle_b_shim(Id(1400))",
+ "cycle_a_shim(Id(0))",
+ "cycle_b_shim(Id(0))",
],
[
- "cycle_a_shim(Id(1000))",
- "cycle_b_shim(Id(1400))",
+ "cycle_a_shim(Id(0))",
+ "cycle_b_shim(Id(0))",
],
)
"#]];
@@ -267,8 +257,8 @@ fn cycle_mixed_1() {
let expected = expect![[r#"
[
- "cycle_b_shim(Id(1000))",
- "cycle_c_shim(Id(c00))",
+ "cycle_b_shim(Id(0))",
+ "cycle_c_shim(Id(0))",
]
"#]];
expected.assert_debug_eq(&db.cycle_c(abc).unwrap_err().cycle);
diff --git a/crates/query-group-macro/tests/hello_world.rs b/crates/query-group-macro/tests/hello_world.rs
index 86cf591f52..b0aec8dc53 100644
--- a/crates/query-group-macro/tests/hello_world.rs
+++ b/crates/query-group-macro/tests/hello_world.rs
@@ -12,13 +12,14 @@ pub trait HelloWorldDatabase: salsa::Database {
fn input_string(&self) -> String;
// unadorned query
+ #[salsa::invoke_interned(length_query)]
fn length_query(&self, key: ()) -> usize;
// unadorned query
fn length_query_with_no_params(&self) -> usize;
// renamed/invoke query
- #[salsa::invoke(invoke_length_query_actual)]
+ #[salsa::invoke_interned(invoke_length_query_actual)]
fn invoke_length_query(&self, key: ()) -> usize;
// not a query. should not invoked
@@ -26,7 +27,7 @@ pub trait HelloWorldDatabase: salsa::Database {
fn transparent_length(&self, key: ()) -> usize;
#[salsa::transparent]
- #[salsa::invoke(transparent_and_invoke_length_actual)]
+ #[salsa::invoke_interned(transparent_and_invoke_length_actual)]
fn transparent_and_invoke_length(&self, key: ()) -> usize;
}
@@ -121,5 +122,8 @@ fn transparent_invoke() {
"salsa_event(WillExecute { database_key: create_data_HelloWorldDatabase(Id(0)) })",
"salsa_event(WillCheckCancellation)",
"salsa_event(DidValidateMemoizedValue { database_key: create_data_HelloWorldDatabase(Id(0)) })",
+ "salsa_event(WillCheckCancellation)",
+ "salsa_event(WillExecute { database_key: transparent_and_invoke_length_shim(Id(800)) })",
+ "salsa_event(WillCheckCancellation)",
]"#]]);
}
diff --git a/crates/query-group-macro/tests/interned.rs b/crates/query-group-macro/tests/interned.rs
index be4fc4f52b..26ed316122 100644
--- a/crates/query-group-macro/tests/interned.rs
+++ b/crates/query-group-macro/tests/interned.rs
@@ -45,8 +45,6 @@ fn intern_with_query() {
db.assert_logs(expect![[r#"
[
"salsa_event(WillCheckCancellation)",
- "salsa_event(WillExecute { database_key: create_data_InternedDB(Id(400)) })",
- "salsa_event(WillCheckCancellation)",
- "salsa_event(WillExecute { database_key: interned_len_shim(Id(c00)) })",
+ "salsa_event(WillExecute { database_key: interned_len_shim(Id(0)) })",
]"#]]);
}
diff --git a/crates/query-group-macro/tests/lru.rs b/crates/query-group-macro/tests/lru.rs
index d2eee42ba8..f56dd5c2f9 100644
--- a/crates/query-group-macro/tests/lru.rs
+++ b/crates/query-group-macro/tests/lru.rs
@@ -11,10 +11,11 @@ pub trait LruDB: salsa::Database {
fn input_string(&self) -> String;
#[salsa::lru(16)]
+ #[salsa::invoke_interned(length_query)]
fn length_query(&self, key: ()) -> usize;
#[salsa::lru(16)]
- #[salsa::invoke(invoked_query)]
+ #[salsa::invoke_interned(invoked_query)]
fn length_query_invoke(&self, key: ()) -> usize;
}
diff --git a/crates/query-group-macro/tests/multiple_dbs.rs b/crates/query-group-macro/tests/multiple_dbs.rs
index 802077bfd3..f36e7fdbeb 100644
--- a/crates/query-group-macro/tests/multiple_dbs.rs
+++ b/crates/query-group-macro/tests/multiple_dbs.rs
@@ -6,11 +6,13 @@ pub trait DatabaseOne: salsa::Database {
fn input_string(&self) -> String;
// unadorned query
+ #[salsa::invoke_interned(length)]
fn length(&self, key: ()) -> usize;
}
#[query_group]
pub trait DatabaseTwo: DatabaseOne {
+ #[salsa::invoke_interned(second_length)]
fn second_length(&self, key: ()) -> usize;
}
diff --git a/crates/query-group-macro/tests/old_and_new.rs b/crates/query-group-macro/tests/old_and_new.rs
index 733b3793ae..a18b23a7d8 100644
--- a/crates/query-group-macro/tests/old_and_new.rs
+++ b/crates/query-group-macro/tests/old_and_new.rs
@@ -46,9 +46,7 @@ fn unadorned_query() {
db.assert_logs(expect![[r#"
[
"salsa_event(WillCheckCancellation)",
- "salsa_event(WillExecute { database_key: create_data_PartialMigrationDatabase(Id(400)) })",
- "salsa_event(WillCheckCancellation)",
- "salsa_event(WillExecute { database_key: length_query_shim(Id(c00)) })",
+ "salsa_event(WillExecute { database_key: length_query_shim(Id(0)) })",
]"#]]);
}
@@ -63,9 +61,7 @@ fn invoke_query() {
db.assert_logs(expect![[r#"
[
"salsa_event(WillCheckCancellation)",
- "salsa_event(WillExecute { database_key: create_data_PartialMigrationDatabase(Id(400)) })",
- "salsa_event(WillCheckCancellation)",
- "salsa_event(WillExecute { database_key: invoke_length_query_shim(Id(c00)) })",
+ "salsa_event(WillExecute { database_key: invoke_length_query_shim(Id(0)) })",
]"#]]);
}
@@ -81,9 +77,7 @@ fn invoke_tracked_query() {
db.assert_logs(expect![[r#"
[
"salsa_event(WillCheckCancellation)",
- "salsa_event(WillExecute { database_key: create_data_PartialMigrationDatabase(Id(400)) })",
- "salsa_event(WillCheckCancellation)",
- "salsa_event(WillExecute { database_key: invoke_length_tracked_shim(Id(c00)) })",
+ "salsa_event(WillExecute { database_key: invoke_length_tracked_shim(Id(0)) })",
"salsa_event(WillCheckCancellation)",
"salsa_event(WillExecute { database_key: invoke_length_tracked_actual(Id(0)) })",
]"#]]);
diff --git a/crates/query-group-macro/tests/result.rs b/crates/query-group-macro/tests/result.rs
index 51adde2a7e..06f7f403c7 100644
--- a/crates/query-group-macro/tests/result.rs
+++ b/crates/query-group-macro/tests/result.rs
@@ -12,8 +12,10 @@ pub trait ResultDatabase: salsa::Database {
#[salsa::input]
fn input_string(&self) -> String;
+ #[salsa::invoke_interned(length)]
fn length(&self, key: ()) -> Result<usize, Error>;
+ #[salsa::invoke_interned(length2)]
fn length2(&self, key: ()) -> Result<usize, Error>;
}
diff --git a/crates/query-group-macro/tests/supertrait.rs b/crates/query-group-macro/tests/supertrait.rs
index f7361eaa90..70073ac1de 100644
--- a/crates/query-group-macro/tests/supertrait.rs
+++ b/crates/query-group-macro/tests/supertrait.rs
@@ -8,6 +8,7 @@ pub trait SourceDb: salsa::Database {
#[query_group]
pub trait RootDb: SourceDb {
+ #[salsa::invoke_interned(parse)]
fn parse(&self, id: usize) -> String;
}
diff --git a/crates/query-group-macro/tests/tuples.rs b/crates/query-group-macro/tests/tuples.rs
index 01f225ee10..af0e852695 100644
--- a/crates/query-group-macro/tests/tuples.rs
+++ b/crates/query-group-macro/tests/tuples.rs
@@ -9,6 +9,7 @@ pub trait HelloWorldDatabase: salsa::Database {
#[salsa::input]
fn input_string(&self) -> String;
+ #[salsa::invoke_interned(length_query)]
fn length_query(&self, key: ()) -> (usize, usize);
}
diff --git a/crates/rust-analyzer/src/cli/analysis_stats.rs b/crates/rust-analyzer/src/cli/analysis_stats.rs
index ea832cab44..56940726c6 100644
--- a/crates/rust-analyzer/src/cli/analysis_stats.rs
+++ b/crates/rust-analyzer/src/cli/analysis_stats.rs
@@ -9,8 +9,7 @@ use std::{
use cfg::{CfgAtom, CfgDiff};
use hir::{
- Adt, AssocItem, Crate, DefWithBody, HasSource, HirDisplay, HirFileIdExt, ImportPathConfig,
- ModuleDef, Name,
+ Adt, AssocItem, Crate, DefWithBody, HasSource, HirDisplay, ImportPathConfig, ModuleDef, Name,
db::{DefDatabase, ExpandDatabase, HirDatabase},
};
use hir_def::{
@@ -142,7 +141,9 @@ impl flags::AnalysisStats {
if !source_root.is_library || self.with_deps {
let length = db.file_text(file_id).text(db).lines().count();
let item_stats = db
- .file_item_tree(EditionedFileId::current_edition(file_id).into())
+ .file_item_tree(
+ EditionedFileId::current_edition(db, file_id).into(),
+ )
.item_tree_stats()
.into();
@@ -152,7 +153,9 @@ impl flags::AnalysisStats {
} else {
let length = db.file_text(file_id).text(db).lines().count();
let item_stats = db
- .file_item_tree(EditionedFileId::current_edition(file_id).into())
+ .file_item_tree(
+ EditionedFileId::current_edition(db, file_id).into(),
+ )
.item_tree_stats()
.into();
@@ -203,7 +206,7 @@ impl flags::AnalysisStats {
let file_id = module.definition_source_file_id(db);
let file_id = file_id.original_file(db);
- let source_root = db.file_source_root(file_id.into()).source_root_id(db);
+ let source_root = db.file_source_root(file_id.file_id(db)).source_root_id(db);
let source_root = db.source_root(source_root).source_root(db);
if !source_root.is_library || self.with_deps {
num_crates += 1;
@@ -457,6 +460,7 @@ impl flags::AnalysisStats {
let mut sw = self.stop_watch();
for &file_id in &file_ids {
+ let file_id = file_id.editioned_file_id(db);
let sema = hir::Semantics::new(db);
let display_target = match sema.first_crate(file_id.file_id()) {
Some(krate) => krate.to_display_target(sema.db),
@@ -756,7 +760,7 @@ impl flags::AnalysisStats {
};
if let Some(src) = source {
let original_file = src.file_id.original_file(db);
- let path = vfs.file_path(original_file.into());
+ let path = vfs.file_path(original_file.file_id(db));
let syntax_range = src.text_range();
format!("processing: {} ({} {:?})", full_name(), path, syntax_range)
} else {
@@ -1069,7 +1073,7 @@ impl flags::AnalysisStats {
};
if let Some(src) = source {
let original_file = src.file_id.original_file(db);
- let path = vfs.file_path(original_file.into());
+ let path = vfs.file_path(original_file.file_id(db));
let syntax_range = src.text_range();
format!("processing: {} ({} {:?})", full_name(), path, syntax_range)
} else {
@@ -1123,7 +1127,7 @@ impl flags::AnalysisStats {
term_search_borrowck: true,
},
ide::AssistResolveStrategy::All,
- file_id.into(),
+ analysis.editioned_file_id_to_vfs(file_id),
);
}
for &file_id in &file_ids {
@@ -1158,7 +1162,7 @@ impl flags::AnalysisStats {
fields_to_resolve: InlayFieldsToResolve::empty(),
range_exclusive_hints: true,
},
- file_id.into(),
+ analysis.editioned_file_id_to_vfs(file_id),
None,
);
}
@@ -1174,7 +1178,7 @@ impl flags::AnalysisStats {
annotate_enum_variant_references: false,
location: ide::AnnotationLocation::AboveName,
},
- file_id.into(),
+ analysis.editioned_file_id_to_vfs(file_id),
)
.unwrap()
.into_iter()
@@ -1199,8 +1203,8 @@ fn location_csv_expr(db: &RootDatabase, vfs: &Vfs, sm: &BodySourceMap, expr_id:
let root = db.parse_or_expand(src.file_id);
let node = src.map(|e| e.to_node(&root).syntax().clone());
let original_range = node.as_ref().original_file_range_rooted(db);
- let path = vfs.file_path(original_range.file_id.into());
- let line_index = db.line_index(original_range.file_id.into());
+ let path = vfs.file_path(original_range.file_id.file_id(db));
+ let line_index = db.line_index(original_range.file_id.file_id(db));
let text_range = original_range.range;
let (start, end) =
(line_index.line_col(text_range.start()), line_index.line_col(text_range.end()));
@@ -1215,8 +1219,8 @@ fn location_csv_pat(db: &RootDatabase, vfs: &Vfs, sm: &BodySourceMap, pat_id: Pa
let root = db.parse_or_expand(src.file_id);
let node = src.map(|e| e.to_node(&root).syntax().clone());
let original_range = node.as_ref().original_file_range_rooted(db);
- let path = vfs.file_path(original_range.file_id.into());
- let line_index = db.line_index(original_range.file_id.into());
+ let path = vfs.file_path(original_range.file_id.file_id(db));
+ let line_index = db.line_index(original_range.file_id.file_id(db));
let text_range = original_range.range;
let (start, end) =
(line_index.line_col(text_range.start()), line_index.line_col(text_range.end()));
@@ -1234,8 +1238,8 @@ fn expr_syntax_range<'a>(
let root = db.parse_or_expand(src.file_id);
let node = src.map(|e| e.to_node(&root).syntax().clone());
let original_range = node.as_ref().original_file_range_rooted(db);
- let path = vfs.file_path(original_range.file_id.into());
- let line_index = db.line_index(original_range.file_id.into());
+ let path = vfs.file_path(original_range.file_id.file_id(db));
+ let line_index = db.line_index(original_range.file_id.file_id(db));
let text_range = original_range.range;
let (start, end) =
(line_index.line_col(text_range.start()), line_index.line_col(text_range.end()));
@@ -1255,8 +1259,8 @@ fn pat_syntax_range<'a>(
let root = db.parse_or_expand(src.file_id);
let node = src.map(|e| e.to_node(&root).syntax().clone());
let original_range = node.as_ref().original_file_range_rooted(db);
- let path = vfs.file_path(original_range.file_id.into());
- let line_index = db.line_index(original_range.file_id.into());
+ let path = vfs.file_path(original_range.file_id.file_id(db));
+ let line_index = db.line_index(original_range.file_id.file_id(db));
let text_range = original_range.range;
let (start, end) =
(line_index.line_col(text_range.start()), line_index.line_col(text_range.end()));
diff --git a/crates/rust-analyzer/src/cli/diagnostics.rs b/crates/rust-analyzer/src/cli/diagnostics.rs
index 4dc6abce04..7c4eeebdfa 100644
--- a/crates/rust-analyzer/src/cli/diagnostics.rs
+++ b/crates/rust-analyzer/src/cli/diagnostics.rs
@@ -4,7 +4,7 @@
use project_model::{CargoConfig, RustLibSource};
use rustc_hash::FxHashSet;
-use hir::{Crate, HirFileIdExt, Module, db::HirDatabase, sym};
+use hir::{Crate, Module, db::HirDatabase, sym};
use ide::{AnalysisHost, AssistResolveStrategy, Diagnostic, DiagnosticsConfig, Severity};
use ide_db::{LineIndexDatabase, base_db::SourceDatabase};
use load_cargo::{LoadCargoConfig, ProcMacroServerChoice, load_workspace_at};
@@ -51,7 +51,7 @@ impl flags::Diagnostics {
let work = all_modules(db).into_iter().filter(|module| {
let file_id = module.definition_source_file_id(db).original_file(db);
- let source_root = db.file_source_root(file_id.into()).source_root_id(db);
+ let source_root = db.file_source_root(file_id.file_id(db)).source_root_id(db);
let source_root = db.source_root(source_root).source_root(db);
!source_root.is_library
});
@@ -63,13 +63,13 @@ impl flags::Diagnostics {
module.krate().display_name(db).as_deref().unwrap_or(&sym::unknown).to_owned();
println!(
"processing crate: {crate_name}, module: {}",
- _vfs.file_path(file_id.into())
+ _vfs.file_path(file_id.file_id(db))
);
for diagnostic in analysis
.full_diagnostics(
&DiagnosticsConfig::test_sample(),
AssistResolveStrategy::None,
- file_id.into(),
+ file_id.file_id(db),
)
.unwrap()
{
diff --git a/crates/rust-analyzer/src/cli/scip.rs b/crates/rust-analyzer/src/cli/scip.rs
index d29678815f..2062294f80 100644
--- a/crates/rust-analyzer/src/cli/scip.rs
+++ b/crates/rust-analyzer/src/cli/scip.rs
@@ -514,12 +514,13 @@ mod test {
fn position(#[rust_analyzer::rust_fixture] ra_fixture: &str) -> (AnalysisHost, FilePosition) {
let mut host = AnalysisHost::default();
- let change_fixture = ChangeFixture::parse(ra_fixture);
+ let change_fixture = ChangeFixture::parse(host.raw_database(), ra_fixture);
host.raw_database_mut().apply_change(change_fixture.change);
let (file_id, range_or_offset) =
change_fixture.file_position.expect("expected a marker ()");
let offset = range_or_offset.expect_offset();
- (host, FilePosition { file_id: file_id.into(), offset })
+ let position = FilePosition { file_id: file_id.file_id(host.raw_database()), offset };
+ (host, position)
}
/// If expected == "", then assert that there are no symbols (this is basically local symbol)
@@ -869,7 +870,7 @@ pub mod example_mod {
let s = "/// foo\nfn bar() {}";
let mut host = AnalysisHost::default();
- let change_fixture = ChangeFixture::parse(s);
+ let change_fixture = ChangeFixture::parse(host.raw_database(), s);
host.raw_database_mut().apply_change(change_fixture.change);
let analysis = host.analysis();
diff --git a/crates/rust-analyzer/src/cli/ssr.rs b/crates/rust-analyzer/src/cli/ssr.rs
index 0f0faa879e..e3e3a143de 100644
--- a/crates/rust-analyzer/src/cli/ssr.rs
+++ b/crates/rust-analyzer/src/cli/ssr.rs
@@ -73,7 +73,7 @@ impl flags::Search {
let sr = db.source_root(root).source_root(db);
for file_id in sr.iter() {
for debug_info in match_finder.debug_where_text_equal(
- EditionedFileId::current_edition(file_id),
+ EditionedFileId::current_edition(db, file_id),
debug_snippet,
) {
println!("{debug_info:#?}");
diff --git a/crates/rust-analyzer/src/cli/unresolved_references.rs b/crates/rust-analyzer/src/cli/unresolved_references.rs
index 0e4b763117..bca7c8a098 100644
--- a/crates/rust-analyzer/src/cli/unresolved_references.rs
+++ b/crates/rust-analyzer/src/cli/unresolved_references.rs
@@ -1,10 +1,8 @@
//! Reports references in code that the IDE layer cannot resolve.
-use hir::{AnyDiagnostic, Crate, HirFileIdExt as _, Module, Semantics, db::HirDatabase, sym};
+use hir::{AnyDiagnostic, Crate, Module, Semantics, db::HirDatabase, sym};
use ide::{AnalysisHost, RootDatabase, TextRange};
use ide_db::{
- EditionedFileId, FxHashSet, LineIndexDatabase as _,
- base_db::{SourceDatabase, salsa::AsDynDatabase},
- defs::NameRefClass,
+ EditionedFileId, FxHashSet, LineIndexDatabase as _, base_db::SourceDatabase, defs::NameRefClass,
};
use load_cargo::{LoadCargoConfig, ProcMacroServerChoice, load_workspace_at};
use parser::SyntaxKind;
@@ -57,23 +55,24 @@ impl flags::UnresolvedReferences {
let work = all_modules(db).into_iter().filter(|module| {
let file_id = module.definition_source_file_id(db).original_file(db);
- let source_root = db.file_source_root(file_id.into()).source_root_id(db);
+ let source_root = db.file_source_root(file_id.file_id(db)).source_root_id(db);
let source_root = db.source_root(source_root).source_root(db);
!source_root.is_library
});
for module in work {
let file_id = module.definition_source_file_id(db).original_file(db);
+ let file_id = file_id.file_id(db);
if !visited_files.contains(&file_id) {
let crate_name =
module.krate().display_name(db).as_deref().unwrap_or(&sym::unknown).to_owned();
- let file_path = vfs.file_path(file_id.into());
+ let file_path = vfs.file_path(file_id);
eprintln!("processing crate: {crate_name}, module: {file_path}",);
- let line_index = db.line_index(file_id.into());
- let file_text = db.file_text(file_id.into());
+ let line_index = db.line_index(file_id);
+ let file_text = db.file_text(file_id);
- for range in find_unresolved_references(db, &sema, file_id.into(), &module) {
+ for range in find_unresolved_references(db, &sema, file_id, &module) {
let line_col = line_index.line_col(range.start());
let line = line_col.line + 1;
let col = line_col.col + 1;
@@ -124,7 +123,7 @@ fn find_unresolved_references(
let node = inactive_code.node;
let range = node.map(|it| it.text_range()).original_node_file_range_rooted(db);
- if range.file_id != file_id {
+ if range.file_id.file_id(db) != file_id {
continue;
}
@@ -140,10 +139,8 @@ fn all_unresolved_references(
) -> Vec<TextRange> {
let file_id = sema
.attach_first_edition(file_id)
- .unwrap_or_else(|| EditionedFileId::current_edition(file_id));
- let editioned_file_id_wrapper =
- ide_db::base_db::EditionedFileId::new(sema.db.as_dyn_database(), file_id);
- let file = sema.parse(editioned_file_id_wrapper);
+ .unwrap_or_else(|| EditionedFileId::current_edition(sema.db, file_id));
+ let file = sema.parse(file_id);
let root = file.syntax();
let mut unresolved_references = Vec::new();
diff --git a/crates/span/src/lib.rs b/crates/span/src/lib.rs
index f3f6d80ad2..67f49928f8 100644
--- a/crates/span/src/lib.rs
+++ b/crates/span/src/lib.rs
@@ -183,7 +183,7 @@ impl EditionedFileId {
#[cfg(not(feature = "salsa"))]
mod salsa {
#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, PartialOrd, Ord)]
- pub(crate) struct Id(u32);
+ pub struct Id(u32);
impl Id {
pub(crate) const fn from_u32(u32: u32) -> Self {
@@ -210,32 +210,11 @@ mod salsa {
/// (`MacroCallId` uses the location interning. You can check details here:
/// <https://en.wikipedia.org/wiki/String_interning>).
///
-/// The two variants are encoded in a single u32 which are differentiated by the MSB.
-/// If the MSB is 0, the value represents a `FileId`, otherwise the remaining 31 bits represent a
-/// `MacroCallId`.
+/// Internally this holds a `salsa::Id`, but we cannot use this definition here
+/// as it references things from base-db and hir-expand.
// FIXME: Give this a better fitting name
#[derive(Clone, Copy, PartialEq, Eq, Hash, PartialOrd, Ord)]
-pub struct HirFileId(salsa::Id);
-
-#[cfg(feature = "salsa")]
-impl salsa::plumbing::AsId for HirFileId {
- fn as_id(&self) -> salsa::Id {
- self.0
- }
-}
-
-#[cfg(feature = "salsa")]
-impl salsa::plumbing::FromId for HirFileId {
- fn from_id(id: salsa::Id) -> Self {
- HirFileId(id)
- }
-}
-
-impl From<HirFileId> for u32 {
- fn from(value: HirFileId) -> Self {
- value.0.as_u32()
- }
-}
+pub struct HirFileId(pub salsa::Id);
impl From<MacroCallId> for HirFileId {
fn from(value: MacroCallId) -> Self {
@@ -260,27 +239,6 @@ impl PartialEq<HirFileId> for FileId {
}
}
-impl PartialEq<EditionedFileId> for HirFileId {
- fn eq(&self, &other: &EditionedFileId) -> bool {
- *self == HirFileId::from(other)
- }
-}
-impl PartialEq<HirFileId> for EditionedFileId {
- fn eq(&self, &other: &HirFileId) -> bool {
- other == HirFileId::from(*self)
- }
-}
-impl PartialEq<EditionedFileId> for FileId {
- fn eq(&self, &other: &EditionedFileId) -> bool {
- *self == FileId::from(other)
- }
-}
-impl PartialEq<FileId> for EditionedFileId {
- fn eq(&self, &other: &FileId) -> bool {
- other == FileId::from(*self)
- }
-}
-
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub struct MacroFileId {
pub macro_call_id: MacroCallId,
@@ -289,21 +247,7 @@ pub struct MacroFileId {
/// `MacroCallId` identifies a particular macro invocation, like
/// `println!("Hello, {}", world)`.
#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
-pub struct MacroCallId(salsa::Id);
-
-#[cfg(feature = "salsa")]
-impl salsa::plumbing::AsId for MacroCallId {
- fn as_id(&self) -> salsa::Id {
- self.0
- }
-}
-
-#[cfg(feature = "salsa")]
-impl salsa::plumbing::FromId for MacroCallId {
- fn from_id(id: salsa::Id) -> Self {
- MacroCallId(id)
- }
-}
+pub struct MacroCallId(pub salsa::Id);
impl MacroCallId {
pub const MAX_ID: u32 = 0x7fff_ffff;
diff --git a/crates/test-fixture/src/lib.rs b/crates/test-fixture/src/lib.rs
index 059397ba4b..2f379d419e 100644
--- a/crates/test-fixture/src/lib.rs
+++ b/crates/test-fixture/src/lib.rs
@@ -4,11 +4,11 @@ use std::{mem, str::FromStr, sync};
use base_db::{
Crate, CrateDisplayName, CrateGraphBuilder, CrateName, CrateOrigin, CrateWorkspaceData,
DependencyBuilder, Env, FileChange, FileSet, LangCrateOrigin, SourceDatabase, SourceRoot,
- Version, VfsPath,
+ Version, VfsPath, salsa,
};
use cfg::CfgOptions;
use hir_expand::{
- FileRange,
+ EditionedFileId, FileRange,
change::ChangeWithProcMacros,
db::ExpandDatabase,
files::FilePosition,
@@ -21,7 +21,7 @@ use hir_expand::{
use intern::{Symbol, sym};
use paths::AbsPathBuf;
use rustc_hash::FxHashMap;
-use span::{Edition, EditionedFileId, FileId, Span};
+use span::{Edition, FileId, Span};
use stdx::itertools::Itertools;
use test_utils::{
CURSOR_MARKER, ESCAPED_CURSOR_MARKER, Fixture, FixtureWithProjectMeta, RangeOrOffset,
@@ -36,8 +36,8 @@ pub trait WithFixture: Default + ExpandDatabase + SourceDatabase + 'static {
fn with_single_file(
#[rust_analyzer::rust_fixture] ra_fixture: &str,
) -> (Self, EditionedFileId) {
- let fixture = ChangeFixture::parse(ra_fixture);
let mut db = Self::default();
+ let fixture = ChangeFixture::parse(&db, ra_fixture);
fixture.change.apply(&mut db);
assert_eq!(fixture.files.len(), 1, "Multiple file found in the fixture");
(db, fixture.files[0])
@@ -47,8 +47,8 @@ pub trait WithFixture: Default + ExpandDatabase + SourceDatabase + 'static {
fn with_many_files(
#[rust_analyzer::rust_fixture] ra_fixture: &str,
) -> (Self, Vec<EditionedFileId>) {
- let fixture = ChangeFixture::parse(ra_fixture);
let mut db = Self::default();
+ let fixture = ChangeFixture::parse(&db, ra_fixture);
fixture.change.apply(&mut db);
assert!(fixture.file_position.is_none());
(db, fixture.files)
@@ -56,8 +56,8 @@ pub trait WithFixture: Default + ExpandDatabase + SourceDatabase + 'static {
#[track_caller]
fn with_files(#[rust_analyzer::rust_fixture] ra_fixture: &str) -> Self {
- let fixture = ChangeFixture::parse(ra_fixture);
let mut db = Self::default();
+ let fixture = ChangeFixture::parse(&db, ra_fixture);
fixture.change.apply(&mut db);
assert!(fixture.file_position.is_none());
db
@@ -68,8 +68,8 @@ pub trait WithFixture: Default + ExpandDatabase + SourceDatabase + 'static {
#[rust_analyzer::rust_fixture] ra_fixture: &str,
proc_macros: Vec<(String, ProcMacro)>,
) -> Self {
- let fixture = ChangeFixture::parse_with_proc_macros(ra_fixture, proc_macros);
let mut db = Self::default();
+ let fixture = ChangeFixture::parse_with_proc_macros(&db, ra_fixture, proc_macros);
fixture.change.apply(&mut db);
assert!(fixture.file_position.is_none());
db
@@ -93,8 +93,8 @@ pub trait WithFixture: Default + ExpandDatabase + SourceDatabase + 'static {
fn with_range_or_offset(
#[rust_analyzer::rust_fixture] ra_fixture: &str,
) -> (Self, EditionedFileId, RangeOrOffset) {
- let fixture = ChangeFixture::parse(ra_fixture);
let mut db = Self::default();
+ let fixture = ChangeFixture::parse(&db, ra_fixture);
fixture.change.apply(&mut db);
let (file_id, range_or_offset) = fixture
@@ -119,11 +119,15 @@ pub struct ChangeFixture {
const SOURCE_ROOT_PREFIX: &str = "/";
impl ChangeFixture {
- pub fn parse(#[rust_analyzer::rust_fixture] ra_fixture: &str) -> ChangeFixture {
- Self::parse_with_proc_macros(ra_fixture, Vec::new())
+ pub fn parse(
+ db: &dyn salsa::Database,
+ #[rust_analyzer::rust_fixture] ra_fixture: &str,
+ ) -> ChangeFixture {
+ Self::parse_with_proc_macros(db, ra_fixture, Vec::new())
}
pub fn parse_with_proc_macros(
+ db: &dyn salsa::Database,
#[rust_analyzer::rust_fixture] ra_fixture: &str,
mut proc_macro_defs: Vec<(String, ProcMacro)>,
) -> ChangeFixture {
@@ -184,7 +188,7 @@ impl ChangeFixture {
let meta = FileMeta::from_fixture(entry, current_source_root_kind);
if let Some(range_or_offset) = range_or_offset {
file_position =
- Some((EditionedFileId::new(file_id, meta.edition), range_or_offset));
+ Some((EditionedFileId::new(db, file_id, meta.edition), range_or_offset));
}
assert!(meta.path.starts_with(SOURCE_ROOT_PREFIX));
@@ -241,7 +245,7 @@ impl ChangeFixture {
source_change.change_file(file_id, Some(text));
let path = VfsPath::new_virtual_path(meta.path);
file_set.insert(file_id, path);
- files.push(EditionedFileId::new(file_id, meta.edition));
+ files.push(EditionedFileId::new(db, file_id, meta.edition));
file_id = FileId::from_raw(file_id.index() + 1);
}