//! See `Semantics`. mod child_by_source; mod source_to_def; use std::{ cell::RefCell, convert::Infallible, fmt, iter, mem, ops::{self, ControlFlow, Not}, }; use base_db::FxIndexSet; use either::Either; use hir_def::{ BuiltinDeriveImplId, DefWithBodyId, HasModule, MacroId, StructId, TraitId, VariantId, attrs::parse_extra_crate_attrs, expr_store::{Body, ExprOrPatSource, HygieneId, path::Path}, hir::{BindingId, Expr, ExprId, ExprOrPatId, Pat}, nameres::{ModuleOrigin, crate_def_map}, resolver::{self, HasResolver, Resolver, TypeNs, ValueNs}, type_ref::Mutability, }; use hir_expand::{ EditionedFileId, ExpandResult, FileRange, HirFileId, InMacroFile, MacroCallId, builtin::{BuiltinFnLikeExpander, EagerExpander}, db::ExpandDatabase, files::{FileRangeWrapper, HirFileRange, InRealFile}, mod_path::{ModPath, PathKind}, name::AsName, }; use hir_ty::{ InferenceResult, diagnostics::{unsafe_operations, unsafe_operations_for_body}, infer_query_with_inspect, next_solver::{ AnyImplId, DbInterner, Span, format_proof_tree::{ProofTreeData, dump_proof_tree_structured}, }, }; use intern::{Interned, Symbol, sym}; use itertools::Itertools; use rustc_hash::{FxHashMap, FxHashSet}; use rustc_type_ir::inherent::Span as _; use smallvec::{SmallVec, smallvec}; use span::{FileId, SyntaxContext}; use stdx::{TupleExt, always}; use syntax::{ AstNode, AstToken, Direction, SmolStr, SmolStrBuilder, SyntaxElement, SyntaxKind, SyntaxNode, SyntaxNodePtr, SyntaxToken, T, TextRange, TextSize, algo::skip_trivia_token, ast::{self, HasAttrs as _, HasGenericParams}, }; use crate::{ Adjust, Adjustment, Adt, AnyFunctionId, AutoBorrow, BindingMode, BuiltinAttr, Callable, Const, ConstParam, Crate, DefWithBody, DeriveHelper, Enum, Field, Function, GenericSubstitution, HasSource, Impl, InFile, InlineAsmOperand, ItemInNs, Label, LifetimeParam, Local, Macro, Module, ModuleDef, Name, OverloadedDeref, ScopeDef, Static, Struct, ToolModule, Trait, TupleField, Type, TypeAlias, TypeParam, Union, Variant, VariantDef, db::HirDatabase, semantics::source_to_def::{ChildContainer, SourceToDefCache, SourceToDefCtx}, source_analyzer::{SourceAnalyzer, resolve_hir_path}, }; const CONTINUE_NO_BREAKS: ControlFlow = ControlFlow::Continue(()); #[derive(Debug, Copy, Clone, PartialEq, Eq)] pub enum PathResolution { /// An item Def(ModuleDef), /// A local binding (only value namespace) Local(Local), /// A type parameter TypeParam(TypeParam), /// A const parameter ConstParam(ConstParam), SelfType(Impl), BuiltinAttr(BuiltinAttr), ToolModule(ToolModule), DeriveHelper(DeriveHelper), } impl PathResolution { pub(crate) fn in_type_ns(&self) -> Option { match self { PathResolution::Def(ModuleDef::Adt(adt)) => Some(TypeNs::AdtId((*adt).into())), PathResolution::Def(ModuleDef::BuiltinType(builtin)) => { Some(TypeNs::BuiltinType((*builtin).into())) } PathResolution::Def( ModuleDef::Const(_) | ModuleDef::Variant(_) | ModuleDef::Macro(_) | ModuleDef::Function(_) | ModuleDef::Module(_) | ModuleDef::Static(_) | ModuleDef::Trait(_), ) => None, PathResolution::Def(ModuleDef::TypeAlias(alias)) => { Some(TypeNs::TypeAliasId((*alias).into())) } PathResolution::BuiltinAttr(_) | PathResolution::ToolModule(_) | PathResolution::Local(_) | PathResolution::DeriveHelper(_) | PathResolution::ConstParam(_) => None, PathResolution::TypeParam(param) => Some(TypeNs::GenericParam((*param).into())), PathResolution::SelfType(impl_def) => match impl_def.id { AnyImplId::ImplId(id) => Some(TypeNs::SelfType(id)), AnyImplId::BuiltinDeriveImplId(_) => None, }, } } } #[derive(Debug, Copy, Clone, PartialEq, Eq)] pub struct PathResolutionPerNs { pub type_ns: Option, pub value_ns: Option, pub macro_ns: Option, } impl PathResolutionPerNs { pub fn new( type_ns: Option, value_ns: Option, macro_ns: Option, ) -> Self { PathResolutionPerNs { type_ns, value_ns, macro_ns } } pub fn any(&self) -> Option { self.type_ns.or(self.value_ns).or(self.macro_ns) } } #[derive(Debug)] pub struct TypeInfo<'db> { /// The original type of the expression or pattern. pub original: Type<'db>, /// The adjusted type, if an adjustment happened. pub adjusted: Option>, } impl<'db> TypeInfo<'db> { pub fn original(self) -> Type<'db> { self.original } pub fn has_adjustment(&self) -> bool { self.adjusted.is_some() } /// The adjusted type, or the original in case no adjustments occurred. pub fn adjusted(self) -> Type<'db> { self.adjusted.unwrap_or(self.original) } } /// Primary API to get semantic information, like types, from syntax trees. pub struct Semantics<'db, DB: ?Sized> { pub db: &'db DB, imp: SemanticsImpl<'db>, } pub struct SemanticsImpl<'db> { pub db: &'db dyn HirDatabase, s2d_cache: RefCell, /// MacroCall to its expansion's MacroCallId cache macro_call_cache: RefCell, MacroCallId>>, } impl fmt::Debug for Semantics<'_, DB> { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { write!(f, "Semantics {{ ... }}") } } impl<'db, DB: ?Sized> ops::Deref for Semantics<'db, DB> { type Target = SemanticsImpl<'db>; fn deref(&self) -> &Self::Target { &self.imp } } #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] pub enum LintAttr { Allow, Expect, Warn, Deny, Forbid, } // Note: while this variant of `Semantics<'_, _>` might seem unused, as it does not // find actual use within the rust-analyzer project itself, it exists to enable the use // within e.g. tracked salsa functions in third-party crates that build upon `ra_ap_hir`. impl Semantics<'_, dyn HirDatabase> { /// Creates an instance that's weakly coupled to its underlying database type. pub fn new_dyn(db: &'_ dyn HirDatabase) -> Semantics<'_, dyn HirDatabase> { let impl_ = SemanticsImpl::new(db); Semantics { db, imp: impl_ } } } impl Semantics<'_, DB> { /// Creates an instance that's strongly coupled to its underlying database type. pub fn new(db: &DB) -> Semantics<'_, DB> { let impl_ = SemanticsImpl::new(db); Semantics { db, imp: impl_ } } } // Note: We take `DB` as `?Sized` here in order to support type-erased // use of `Semantics` via `Semantics<'_, dyn HirDatabase>`: impl Semantics<'_, DB> { pub fn hir_file_for(&self, syntax_node: &SyntaxNode) -> HirFileId { self.imp.find_file(syntax_node).file_id } pub fn token_ancestors_with_macros( &self, token: SyntaxToken, ) -> impl Iterator + '_ { token.parent().into_iter().flat_map(move |it| self.ancestors_with_macros(it)) } /// Find an AstNode by offset inside SyntaxNode, if it is inside *Macrofile*, /// search up until it is of the target AstNode type pub fn find_node_at_offset_with_macros( &self, node: &SyntaxNode, offset: TextSize, ) -> Option { self.imp.ancestors_at_offset_with_macros(node, offset).find_map(N::cast) } /// Find an AstNode by offset inside SyntaxNode, if it is inside *MacroCall*, /// descend it and find again // FIXME: Rethink this API pub fn find_node_at_offset_with_descend( &self, node: &SyntaxNode, offset: TextSize, ) -> Option { self.imp.descend_node_at_offset(node, offset).flatten().find_map(N::cast) } /// Find an AstNode by offset inside SyntaxNode, if it is inside an attribute macro call, /// descend it and find again // FIXME: Rethink this API pub fn find_nodes_at_offset_with_descend<'slf, N: AstNode + 'slf>( &'slf self, node: &SyntaxNode, offset: TextSize, ) -> impl Iterator + 'slf { self.imp.descend_node_at_offset(node, offset).filter_map(|mut it| it.find_map(N::cast)) } // FIXME: Rethink this API pub fn find_namelike_at_offset_with_descend<'slf>( &'slf self, node: &SyntaxNode, offset: TextSize, ) -> impl Iterator + 'slf { node.token_at_offset(offset) .map(move |token| self.descend_into_macros_no_opaque(token, true)) .map(|descendants| descendants.into_iter().filter_map(move |it| it.value.parent())) // re-order the tokens from token_at_offset by returning the ancestors with the smaller first nodes first // See algo::ancestors_at_offset, which uses the same approach .kmerge_by(|left, right| left.text_range().len().lt(&right.text_range().len())) .filter_map(ast::NameLike::cast) } pub fn lint_attrs( &self, file_id: FileId, krate: Crate, item: ast::AnyHasAttrs, ) -> impl DoubleEndedIterator { let mut cfg_options = None; let cfg_options = || *cfg_options.get_or_insert_with(|| krate.id.cfg_options(self.db)); let is_crate_root = file_id == krate.root_file(self.imp.db); let is_source_file = ast::SourceFile::can_cast(item.syntax().kind()); let extra_crate_attrs = (is_crate_root && is_source_file) .then(|| { parse_extra_crate_attrs(self.imp.db, krate.id) .into_iter() .flat_map(|src| src.attrs()) }) .into_iter() .flatten(); let mut result = Vec::new(); hir_expand::attrs::expand_cfg_attr::( extra_crate_attrs.chain(ast::attrs_including_inner(&item)), cfg_options, |attr, _, _, _| { let hir_expand::attrs::Meta::TokenTree { path, tt } = attr else { return ControlFlow::Continue(()); }; if path.segments.len() != 1 { return ControlFlow::Continue(()); } let lint_attr = match path.segments[0].text() { "allow" => LintAttr::Allow, "expect" => LintAttr::Expect, "warn" => LintAttr::Warn, "deny" => LintAttr::Deny, "forbid" => LintAttr::Forbid, _ => return ControlFlow::Continue(()), }; let mut lint = SmolStrBuilder::new(); for token in tt.syntax().children_with_tokens().filter_map(SyntaxElement::into_token) { match token.kind() { T![:] | T![::] => lint.push_str(token.text()), kind if kind.is_any_identifier() => lint.push_str(token.text()), T![,] => { let lint = mem::replace(&mut lint, SmolStrBuilder::new()).finish(); if !lint.is_empty() { result.push((lint_attr, lint)); } } _ => {} } } let lint = lint.finish(); if !lint.is_empty() { result.push((lint_attr, lint)); } ControlFlow::Continue(()) }, ); result.into_iter() } pub fn resolve_range_pat(&self, range_pat: &ast::RangePat) -> Option { self.imp.resolve_range_pat(range_pat).map(Struct::from) } pub fn resolve_range_expr(&self, range_expr: &ast::RangeExpr) -> Option { self.imp.resolve_range_expr(range_expr).map(Struct::from) } pub fn resolve_await_to_poll(&self, await_expr: &ast::AwaitExpr) -> Option { self.imp.resolve_await_to_poll(await_expr) } pub fn resolve_prefix_expr(&self, prefix_expr: &ast::PrefixExpr) -> Option { self.imp.resolve_prefix_expr(prefix_expr) } pub fn resolve_index_expr(&self, index_expr: &ast::IndexExpr) -> Option { self.imp.resolve_index_expr(index_expr) } pub fn resolve_bin_expr(&self, bin_expr: &ast::BinExpr) -> Option { self.imp.resolve_bin_expr(bin_expr) } pub fn resolve_try_expr(&self, try_expr: &ast::TryExpr) -> Option { self.imp.resolve_try_expr(try_expr) } pub fn resolve_variant(&self, record_lit: ast::RecordExpr) -> Option { self.imp.resolve_variant(record_lit).map(VariantDef::from) } pub fn file_to_module_def(&self, file: impl Into) -> Option { self.imp.file_to_module_defs(file.into()).next() } pub fn file_to_module_defs(&self, file: impl Into) -> impl Iterator { self.imp.file_to_module_defs(file.into()) } pub fn hir_file_to_module_def(&self, file: impl Into) -> Option { self.imp.hir_file_to_module_defs(file.into()).next() } pub fn hir_file_to_module_defs( &self, file: impl Into, ) -> impl Iterator { self.imp.hir_file_to_module_defs(file.into()) } pub fn is_nightly(&self, krate: Crate) -> bool { let toolchain = self.db.toolchain_channel(krate.into()); // `toolchain == None` means we're in some detached files. Since we have no information on // the toolchain being used, let's just allow unstable items to be listed. matches!(toolchain, Some(base_db::ReleaseChannel::Nightly) | None) } pub fn to_adt_def(&self, a: &ast::Adt) -> Option { self.imp.to_def(a) } pub fn to_const_def(&self, c: &ast::Const) -> Option { self.imp.to_def(c) } pub fn to_enum_def(&self, e: &ast::Enum) -> Option { self.imp.to_def(e) } pub fn to_enum_variant_def(&self, v: &ast::Variant) -> Option { self.imp.to_def(v) } pub fn to_fn_def(&self, f: &ast::Fn) -> Option { self.imp.to_def(f) } pub fn to_impl_def(&self, i: &ast::Impl) -> Option { self.imp.to_def(i) } pub fn to_macro_def(&self, m: &ast::Macro) -> Option { self.imp.to_def(m) } pub fn to_module_def(&self, m: &ast::Module) -> Option { self.imp.to_def(m) } pub fn to_static_def(&self, s: &ast::Static) -> Option { self.imp.to_def(s) } pub fn to_struct_def(&self, s: &ast::Struct) -> Option { self.imp.to_def(s) } pub fn to_trait_def(&self, t: &ast::Trait) -> Option { self.imp.to_def(t) } pub fn to_type_alias_def(&self, t: &ast::TypeAlias) -> Option { self.imp.to_def(t) } pub fn to_union_def(&self, u: &ast::Union) -> Option { self.imp.to_def(u) } } impl<'db> SemanticsImpl<'db> { fn new(db: &'db dyn HirDatabase) -> Self { SemanticsImpl { db, s2d_cache: Default::default(), macro_call_cache: Default::default() } } pub fn parse(&self, file_id: EditionedFileId) -> ast::SourceFile { let hir_file_id = file_id.into(); let tree = self.db.parse(file_id).tree(); self.cache(tree.syntax().clone(), hir_file_id); tree } /// If not crate is found for the file, try to return the last crate in topological order. pub fn first_crate(&self, file: FileId) -> Option { match self.file_to_module_defs(file).next() { Some(module) => Some(module.krate(self.db)), None => self.db.all_crates().last().copied().map(Into::into), } } pub fn attach_first_edition_opt(&self, file: FileId) -> Option { let krate = self.file_to_module_defs(file).next()?.krate(self.db); Some(EditionedFileId::new(self.db, file, krate.edition(self.db), krate.id)) } pub fn attach_first_edition(&self, file: FileId) -> EditionedFileId { self.attach_first_edition_opt(file) .unwrap_or_else(|| EditionedFileId::current_edition_guess_origin(self.db, file)) } pub fn parse_guess_edition(&self, file_id: FileId) -> ast::SourceFile { let file_id = self.attach_first_edition(file_id); let tree = self.db.parse(file_id).tree(); self.cache(tree.syntax().clone(), file_id.into()); tree } pub fn adjust_edition(&self, file_id: HirFileId) -> HirFileId { if let Some(editioned_file_id) = file_id.file_id() { self.attach_first_edition_opt(editioned_file_id.file_id(self.db)) .map_or(file_id, Into::into) } else { file_id } } pub fn find_parent_file(&self, file_id: HirFileId) -> Option> { match file_id { HirFileId::FileId(file_id) => { let module = self.file_to_module_defs(file_id.file_id(self.db)).next()?; let def_map = crate_def_map(self.db, module.krate(self.db).id); match def_map[module.id].origin { ModuleOrigin::CrateRoot { .. } => None, ModuleOrigin::File { declaration, declaration_tree_id, .. } => { let file_id = declaration_tree_id.file_id(); let in_file = InFile::new(file_id, declaration); let node = in_file.to_node(self.db); let root = find_root(node.syntax()); self.cache(root, file_id); Some(in_file.with_value(node.syntax().clone())) } _ => unreachable!("FileId can only belong to a file module"), } } HirFileId::MacroFile(macro_file) => { let node = self.db.lookup_intern_macro_call(macro_file).to_node(self.db); let root = find_root(&node.value); self.cache(root, node.file_id); Some(node) } } } /// Returns the `SyntaxNode` of the module. If this is a file module, returns /// the `SyntaxNode` of the *definition* file, not of the *declaration*. pub fn module_definition_node(&self, module: Module) -> InFile { let def_map = module.id.def_map(self.db); let definition = def_map[module.id].origin.definition_source(self.db); let definition = definition.map(|it| it.node()); let root_node = find_root(&definition.value); self.cache(root_node, definition.file_id); definition } pub fn parse_or_expand(&self, file_id: HirFileId) -> SyntaxNode { let node = self.db.parse_or_expand(file_id); self.cache(node.clone(), file_id); node } pub fn expand(&self, file_id: MacroCallId) -> ExpandResult { let res = self.db.parse_macro_expansion(file_id).map(|it| it.0.syntax_node()); self.cache(res.value.clone(), file_id.into()); res } pub fn expand_macro_call(&self, macro_call: &ast::MacroCall) -> Option> { let file_id = self.to_def(macro_call)?; let node = self.parse_or_expand(file_id.into()); Some(InFile::new(file_id.into(), node)) } pub fn check_cfg_attr(&self, attr: &ast::TokenTree) -> Option { let file_id = self.find_file(attr.syntax()).file_id; let krate = match file_id { HirFileId::FileId(file_id) => { self.file_to_module_defs(file_id.file_id(self.db)).next()?.krate(self.db).id } HirFileId::MacroFile(macro_file) => self.db.lookup_intern_macro_call(macro_file).krate, }; hir_expand::check_cfg_attr_value(self.db, attr, krate) } /// Expands the macro if it isn't one of the built-in ones that expand to custom syntax or dummy /// expansions. pub fn expand_allowed_builtins( &self, macro_call: &ast::MacroCall, ) -> Option> { let file_id = self.to_def(macro_call)?; let macro_call = self.db.lookup_intern_macro_call(file_id); let skip = matches!( macro_call.def.kind, hir_expand::MacroDefKind::BuiltIn( _, BuiltinFnLikeExpander::Column | BuiltinFnLikeExpander::File | BuiltinFnLikeExpander::ModulePath | BuiltinFnLikeExpander::Asm | BuiltinFnLikeExpander::GlobalAsm | BuiltinFnLikeExpander::NakedAsm | BuiltinFnLikeExpander::LogSyntax | BuiltinFnLikeExpander::TraceMacros | BuiltinFnLikeExpander::FormatArgs | BuiltinFnLikeExpander::FormatArgsNl | BuiltinFnLikeExpander::ConstFormatArgs, ) | hir_expand::MacroDefKind::BuiltInEager(_, EagerExpander::CompileError) ); if skip { // these macros expand to custom builtin syntax and/or dummy things, no point in // showing these to the user return None; } let node = self.expand(file_id); Some(node) } /// If `item` has an attribute macro attached to it, expands it. pub fn expand_attr_macro(&self, item: &ast::Item) -> Option>> { let src = self.wrap_node_infile(item.clone()); let macro_call_id = self.with_ctx(|ctx| ctx.item_to_macro_call(src.as_ref()))?; Some(self.expand(macro_call_id).map(|it| InFile::new(macro_call_id.into(), it))) } pub fn expand_derive_as_pseudo_attr_macro(&self, attr: &ast::Attr) -> Option { let adt = attr.syntax().parent().and_then(ast::Adt::cast)?; let src = self.wrap_node_infile(attr.clone()); let call_id = self.with_ctx(|ctx| { ctx.attr_to_derive_macro_call(src.with_value(&adt), src).map(|(_, it, _)| it) })?; Some(self.parse_or_expand(call_id.into())) } pub fn resolve_derive_macro(&self, attr: &ast::Attr) -> Option>> { let calls = self.derive_macro_calls(attr)?; self.with_ctx(|ctx| { Some( calls .into_iter() .map(|call| { let call = call?; match call { Either::Left(call) => { macro_call_to_macro_id(ctx, call).map(|id| Macro { id }) } Either::Right(call) => { let call = call.loc(self.db); let krate = call.krate(self.db); let lang_items = hir_def::lang_item::lang_items(self.db, krate); call.trait_.derive_macro(lang_items).map(|id| Macro { id }) } } }) .collect(), ) }) } pub fn expand_derive_macro( &self, attr: &ast::Attr, ) -> Option>>> { let res: Vec<_> = self .derive_macro_calls(attr)? .into_iter() .map(|call| { let file_id = call?.left()?; let ExpandResult { value, err } = self.db.parse_macro_expansion(file_id); let root_node = value.0.syntax_node(); self.cache(root_node.clone(), file_id.into()); Some(ExpandResult { value: root_node, err }) }) .collect(); Some(res) } fn derive_macro_calls( &self, attr: &ast::Attr, ) -> Option>>> { let adt = attr.syntax().parent().and_then(ast::Adt::cast)?; let file_id = self.find_file(adt.syntax()).file_id; let adt = InFile::new(file_id, &adt); let src = InFile::new(file_id, attr.clone()); self.with_ctx(|ctx| { let (.., res) = ctx.attr_to_derive_macro_call(adt, src)?; Some(res.to_vec()) }) } pub fn is_derive_annotated(&self, adt: InFile<&ast::Adt>) -> bool { self.with_ctx(|ctx| ctx.file_of_adt_has_derives(adt)) } pub fn derive_helpers_in_scope(&self, adt: &ast::Adt) -> Option> { let sa = self.analyze_no_infer(adt.syntax())?; let id = self.db.ast_id_map(sa.file_id).ast_id(adt); let result = sa .resolver .def_map() .derive_helpers_in_scope(InFile::new(sa.file_id, id))? .iter() .map(|(name, macro_, _)| { let macro_name = Macro::from(*macro_).name(self.db).symbol().clone(); (name.symbol().clone(), macro_name) }) .collect(); Some(result) } pub fn derive_helper(&self, attr: &ast::Attr) -> Option> { let adt = attr.syntax().ancestors().find_map(ast::Item::cast).and_then(|it| match it { ast::Item::Struct(it) => Some(ast::Adt::Struct(it)), ast::Item::Enum(it) => Some(ast::Adt::Enum(it)), ast::Item::Union(it) => Some(ast::Adt::Union(it)), _ => None, })?; let attr_name = attr.path().and_then(|it| it.as_single_name_ref())?.as_name(); let sa = self.analyze_no_infer(adt.syntax())?; let id = self.db.ast_id_map(sa.file_id).ast_id(&adt); let res: Vec<_> = sa .resolver .def_map() .derive_helpers_in_scope(InFile::new(sa.file_id, id))? .iter() .filter(|&(name, _, _)| *name == attr_name) .filter_map(|&(_, macro_, call)| Some((macro_.into(), call.left()?))) .collect(); // FIXME: We filter our builtin derive "fake" expansions, is this correct? Should we still expose them somehow? res.is_empty().not().then_some(res) } pub fn is_attr_macro_call(&self, item: InFile<&ast::Item>) -> bool { self.with_ctx(|ctx| ctx.item_to_macro_call(item).is_some()) } /// Expand the macro call with a different token tree, mapping the `token_to_map` down into the /// expansion. `token_to_map` should be a token from the `speculative args` node. pub fn speculative_expand_macro_call( &self, actual_macro_call: &ast::MacroCall, speculative_args: &ast::TokenTree, token_to_map: SyntaxToken, ) -> Option<(SyntaxNode, Vec<(SyntaxToken, u8)>)> { let macro_file = self.to_def(actual_macro_call)?; hir_expand::db::expand_speculative( self.db, macro_file, speculative_args.syntax(), token_to_map, ) } pub fn speculative_expand_raw( &self, macro_file: MacroCallId, speculative_args: &SyntaxNode, token_to_map: SyntaxToken, ) -> Option<(SyntaxNode, Vec<(SyntaxToken, u8)>)> { hir_expand::db::expand_speculative(self.db, macro_file, speculative_args, token_to_map) } /// Expand the macro call with a different item as the input, mapping the `token_to_map` down into the /// expansion. `token_to_map` should be a token from the `speculative args` node. pub fn speculative_expand_attr_macro( &self, actual_macro_call: &ast::Item, speculative_args: &ast::Item, token_to_map: SyntaxToken, ) -> Option<(SyntaxNode, Vec<(SyntaxToken, u8)>)> { let macro_call = self.wrap_node_infile(actual_macro_call.clone()); let macro_call_id = self.with_ctx(|ctx| ctx.item_to_macro_call(macro_call.as_ref()))?; hir_expand::db::expand_speculative( self.db, macro_call_id, speculative_args.syntax(), token_to_map, ) } pub fn speculative_expand_derive_as_pseudo_attr_macro( &self, actual_macro_call: &ast::Attr, speculative_args: &ast::Attr, token_to_map: SyntaxToken, ) -> Option<(SyntaxNode, Vec<(SyntaxToken, u8)>)> { let attr = self.wrap_node_infile(actual_macro_call.clone()); let adt = actual_macro_call.syntax().parent().and_then(ast::Adt::cast)?; let macro_call_id = self.with_ctx(|ctx| { ctx.attr_to_derive_macro_call(attr.with_value(&adt), attr).map(|(_, it, _)| it) })?; hir_expand::db::expand_speculative( self.db, macro_call_id, speculative_args.syntax(), token_to_map, ) } /// Checks if renaming `renamed` to `new_name` may introduce conflicts with other locals, /// and returns the conflicting locals. pub fn rename_conflicts(&self, to_be_renamed: &Local, new_name: &Name) -> Vec { let body = self.db.body(to_be_renamed.parent); let resolver = to_be_renamed.parent.resolver(self.db); let starting_expr = body.binding_owner(to_be_renamed.binding_id).unwrap_or(body.body_expr); let mut visitor = RenameConflictsVisitor { body: &body, conflicts: FxHashSet::default(), db: self.db, new_name: new_name.symbol().clone(), old_name: to_be_renamed.name(self.db).symbol().clone(), owner: to_be_renamed.parent, to_be_renamed: to_be_renamed.binding_id, resolver, }; visitor.rename_conflicts(starting_expr); visitor .conflicts .into_iter() .map(|binding_id| Local { parent: to_be_renamed.parent, binding_id }) .collect() } /// Retrieves all the formatting parts of the format_args! (or `asm!`) template string. pub fn as_format_args_parts( &self, string: &ast::String, ) -> Option>)>> { let string_start = string.syntax().text_range().start(); let token = self.wrap_token_infile(string.syntax().clone()); self.descend_into_macros_breakable(token, |token, _| { (|| { let token = token.value; let string = ast::String::cast(token)?; let literal = string.syntax().parent().filter(|it| it.kind() == SyntaxKind::LITERAL)?; let parent = literal.parent()?; if let Some(format_args) = ast::FormatArgsExpr::cast(parent.clone()) { let source_analyzer = self.analyze_no_infer(format_args.syntax())?; let format_args = self.wrap_node_infile(format_args); let res = source_analyzer .as_format_args_parts(self.db, format_args.as_ref())? .map(|(range, res)| (range + string_start, res.map(Either::Left))) .collect(); Some(res) } else { let asm = ast::AsmExpr::cast(parent)?; let source_analyzer = self.analyze_no_infer(asm.syntax())?; let line = asm.template().position(|it| *it.syntax() == literal)?; let asm = self.wrap_node_infile(asm); let (owner, (expr, asm_parts)) = source_analyzer.as_asm_parts(asm.as_ref())?; let res = asm_parts .get(line)? .iter() .map(|&(range, index)| { ( range + string_start, Some(Either::Right(InlineAsmOperand { owner, expr, index })), ) }) .collect(); Some(res) } })() .map_or(ControlFlow::Continue(()), ControlFlow::Break) }) } /// Retrieves the formatting part of the format_args! template string at the given offset. /// // FIXME: Type the return type /// Returns the range (pre-expansion) in the string literal corresponding to the resolution, /// absolute file range (post-expansion) /// of the part in the format string (post-expansion), the corresponding string token and the resolution if it /// exists. // FIXME: Remove this in favor of `check_for_format_args_template_with_file` pub fn check_for_format_args_template( &self, original_token: SyntaxToken, offset: TextSize, ) -> Option<( TextRange, HirFileRange, ast::String, Option>, )> { let original_token = self.wrap_token_infile(original_token).map(ast::String::cast).transpose()?; self.check_for_format_args_template_with_file(original_token, offset) } /// Retrieves the formatting part of the format_args! template string at the given offset. /// // FIXME: Type the return type /// Returns the range (pre-expansion) in the string literal corresponding to the resolution, /// absolute file range (post-expansion) /// of the part in the format string, the corresponding string token and the resolution if it /// exists. pub fn check_for_format_args_template_with_file( &self, original_token: InFile, offset: TextSize, ) -> Option<( TextRange, HirFileRange, ast::String, Option>, )> { let relative_offset = offset.checked_sub(original_token.value.syntax().text_range().start())?; self.descend_into_macros_breakable( original_token.as_ref().map(|it| it.syntax().clone()), |token, _| { (|| { let token = token.map(ast::String::cast).transpose()?; self.resolve_offset_in_format_args(token.as_ref(), relative_offset).map( |(range, res)| { ( range + original_token.value.syntax().text_range().start(), HirFileRange { file_id: token.file_id, range: range + token.value.syntax().text_range().start(), }, token.value, res, ) }, ) })() .map_or(ControlFlow::Continue(()), ControlFlow::Break) }, ) } fn resolve_offset_in_format_args( &self, InFile { value: string, file_id }: InFile<&ast::String>, offset: TextSize, ) -> Option<(TextRange, Option>)> { debug_assert!(offset <= string.syntax().text_range().len()); let literal = string.syntax().parent().filter(|it| it.kind() == SyntaxKind::LITERAL)?; let parent = literal.parent()?; if let Some(format_args) = ast::FormatArgsExpr::cast(parent.clone()) { let source_analyzer = &self.analyze_impl(InFile::new(file_id, format_args.syntax()), None, false)?; source_analyzer .resolve_offset_in_format_args(self.db, InFile::new(file_id, &format_args), offset) .map(|(range, res)| (range, res.map(Either::Left))) } else { let asm = ast::AsmExpr::cast(parent)?; let source_analyzer = self.analyze_impl(InFile::new(file_id, asm.syntax()), None, false)?; let line = asm.template().position(|it| *it.syntax() == literal)?; source_analyzer .resolve_offset_in_asm_template(InFile::new(file_id, &asm), line, offset) .map(|(owner, (expr, range, index))| { (range, Some(Either::Right(InlineAsmOperand { owner, expr, index }))) }) } } pub fn debug_hir_at(&self, token: SyntaxToken) -> Option { self.analyze_no_infer(&token.parent()?).and_then(|it| { Some(match it.body_or_sig.as_ref()? { crate::source_analyzer::BodyOrSig::Body { def, body, .. } => { hir_def::expr_store::pretty::print_body_hir( self.db, body, *def, it.file_id.edition(self.db), ) } &crate::source_analyzer::BodyOrSig::VariantFields { def, .. } => { hir_def::expr_store::pretty::print_variant_body_hir( self.db, def, it.file_id.edition(self.db), ) } &crate::source_analyzer::BodyOrSig::Sig { def, .. } => { hir_def::expr_store::pretty::print_signature( self.db, def, it.file_id.edition(self.db), ) } }) }) } /// Descends the token into the include expansion, if its file is an included file. pub fn descend_token_into_include_expansion( &self, tok: InRealFile, ) -> InFile { let Some(include) = self.s2d_cache.borrow_mut().get_or_insert_include_for(self.db, tok.file_id) else { return tok.into(); }; let span = self.db.real_span_map(tok.file_id).span_for_range(tok.value.text_range()); let Some(InMacroFile { file_id, value: mut mapped_tokens }) = self.with_ctx(|ctx| { Some( ctx.cache .get_or_insert_expansion(ctx.db, include) .map_range_down(span)? .map(SmallVec::<[_; 2]>::from_iter), ) }) else { return tok.into(); }; // We should only get one result at most mapped_tokens.pop().map_or_else(|| tok.into(), |(tok, _)| InFile::new(file_id.into(), tok)) } /// Maps a node down by mapping its first and last token down. pub fn descend_node_into_attributes(&self, node: N) -> SmallVec<[N; 1]> { // This might not be the correct way to do this, but it works for now let mut res = smallvec![]; let tokens = (|| { // FIXME: the trivia skipping should not be necessary let first = skip_trivia_token(node.syntax().first_token()?, Direction::Next)?; let last = skip_trivia_token(node.syntax().last_token()?, Direction::Prev)?; Some((first, last)) })(); let (first, last) = match tokens { Some(it) => it, None => return res, }; let file = self.find_file(node.syntax()); if first == last { // node is just the token, so descend the token self.descend_into_macros_all( InFile::new(file.file_id, first), false, &mut |InFile { value, .. }, _ctx| { if let Some(node) = value .parent_ancestors() .take_while(|it| it.text_range() == value.text_range()) .find_map(N::cast) { res.push(node) } }, ); } else { // Descend first and last token, then zip them to look for the node they belong to let mut scratch: SmallVec<[_; 1]> = smallvec![]; self.descend_into_macros_all( InFile::new(file.file_id, first), false, &mut |token, _ctx| scratch.push(token), ); let mut scratch = scratch.into_iter(); self.descend_into_macros_all( InFile::new(file.file_id, last), false, &mut |InFile { value: last, file_id: last_fid }, _ctx| { if let Some(InFile { value: first, file_id: first_fid }) = scratch.next() && first_fid == last_fid && let Some(p) = first.parent() { let range = first.text_range().cover(last.text_range()); let node = find_root(&p) .covering_element(range) .ancestors() .take_while(|it| it.text_range() == range) .find_map(N::cast); if let Some(node) = node { res.push(node); } } }, ); } res } /// Returns true if the given input is within a macro call. /// /// Note that if this token itself is within the context of a macro expansion does not matter. /// That is, we strictly check if it lies inside the input of a macro call. pub fn is_inside_macro_call(&self, token @ InFile { value, .. }: InFile<&SyntaxToken>) -> bool { value.parent_ancestors().any(|ancestor| { if ast::MacroCall::can_cast(ancestor.kind()) { return true; } let Some(item) = ast::Item::cast(ancestor) else { return false; }; self.with_ctx(|ctx| { if ctx.item_to_macro_call(token.with_value(&item)).is_some() { return true; } let adt = match item { ast::Item::Struct(it) => it.into(), ast::Item::Enum(it) => it.into(), ast::Item::Union(it) => it.into(), _ => return false, }; ctx.file_of_adt_has_derives(token.with_value(&adt)) }) }) } pub fn descend_into_macros_cb( &self, token: SyntaxToken, mut cb: impl FnMut(InFile, SyntaxContext), ) { self.descend_into_macros_all(self.wrap_token_infile(token), false, &mut |t, ctx| { cb(t, ctx) }); } pub fn descend_into_macros(&self, token: SyntaxToken) -> SmallVec<[SyntaxToken; 1]> { let mut res = smallvec![]; self.descend_into_macros_all( self.wrap_token_infile(token.clone()), false, &mut |t, _ctx| res.push(t.value), ); if res.is_empty() { res.push(token); } res } pub fn descend_into_macros_no_opaque( &self, token: SyntaxToken, always_descend_into_derives: bool, ) -> SmallVec<[InFile; 1]> { let mut res = smallvec![]; let token = self.wrap_token_infile(token); self.descend_into_macros_all(token.clone(), always_descend_into_derives, &mut |t, ctx| { if !ctx.is_opaque(self.db) { // Don't descend into opaque contexts res.push(t); } }); if res.is_empty() { res.push(token); } res } pub fn descend_into_macros_breakable( &self, token: InFile, mut cb: impl FnMut(InFile, SyntaxContext) -> ControlFlow, ) -> Option { self.descend_into_macros_impl(token, false, &mut cb) } /// Descends the token into expansions, returning the tokens that matches the input /// token's [`SyntaxKind`] and text. pub fn descend_into_macros_exact(&self, token: SyntaxToken) -> SmallVec<[SyntaxToken; 1]> { let mut r = smallvec![]; let text = token.text(); let kind = token.kind(); self.descend_into_macros_cb(token.clone(), |InFile { value, file_id: _ }, ctx| { let mapped_kind = value.kind(); let any_ident_match = || kind.is_any_identifier() && value.kind().is_any_identifier(); let matches = (kind == mapped_kind || any_ident_match()) && text == value.text() && !ctx.is_opaque(self.db); if matches { r.push(value); } }); if r.is_empty() { r.push(token); } r } /// Descends the token into expansions, returning the tokens that matches the input /// token's [`SyntaxKind`] and text. pub fn descend_into_macros_exact_with_file( &self, token: SyntaxToken, ) -> SmallVec<[InFile; 1]> { let mut r = smallvec![]; let text = token.text(); let kind = token.kind(); self.descend_into_macros_cb(token.clone(), |InFile { value, file_id }, ctx| { let mapped_kind = value.kind(); let any_ident_match = || kind.is_any_identifier() && value.kind().is_any_identifier(); let matches = (kind == mapped_kind || any_ident_match()) && text == value.text() && !ctx.is_opaque(self.db); if matches { r.push(InFile { value, file_id }); } }); if r.is_empty() { r.push(self.wrap_token_infile(token)); } r } /// Descends the token into expansions, returning the first token that matches the input /// token's [`SyntaxKind`] and text. pub fn descend_into_macros_single_exact(&self, token: SyntaxToken) -> SyntaxToken { let text = token.text(); let kind = token.kind(); self.descend_into_macros_breakable( self.wrap_token_infile(token.clone()), |InFile { value, file_id: _ }, _ctx| { let mapped_kind = value.kind(); let any_ident_match = || kind.is_any_identifier() && value.kind().is_any_identifier(); let matches = (kind == mapped_kind || any_ident_match()) && text == value.text(); if matches { ControlFlow::Break(value) } else { ControlFlow::Continue(()) } }, ) .unwrap_or(token) } fn descend_into_macros_all( &self, token: InFile, always_descend_into_derives: bool, f: &mut dyn FnMut(InFile, SyntaxContext), ) { self.descend_into_macros_impl(token, always_descend_into_derives, &mut |tok, ctx| { f(tok, ctx); CONTINUE_NO_BREAKS }); } fn descend_into_macros_impl( &self, InFile { value: token, file_id }: InFile, always_descend_into_derives: bool, f: &mut dyn FnMut(InFile, SyntaxContext) -> ControlFlow, ) -> Option { let _p = tracing::info_span!("descend_into_macros_impl").entered(); let db = self.db; let span = db.span_map(file_id).span_for_range(token.text_range()); // Process the expansion of a call, pushing all tokens with our span in the expansion back onto our stack let process_expansion_for_token = |ctx: &mut SourceToDefCtx<'_, '_>, stack: &mut Vec<_>, macro_file| { let InMacroFile { file_id, value: mapped_tokens } = ctx .cache .get_or_insert_expansion(ctx.db, macro_file) .map_range_down(span)? .map(SmallVec::<[_; 2]>::from_iter); // we have found a mapping for the token if the vec is non-empty let res = mapped_tokens.is_empty().not().then_some(()); // requeue the tokens we got from mapping our current token down stack.push((HirFileId::from(file_id), mapped_tokens)); res }; // A stack of tokens to process, along with the file they came from // These are tracked to know which macro calls we still have to look into // the tokens themselves aren't that interesting as the span that is being used to map // things down never changes. let mut stack: Vec<(_, SmallVec<[_; 2]>)> = vec![]; let include = file_id .file_id() .and_then(|file_id| self.s2d_cache.borrow_mut().get_or_insert_include_for(db, file_id)); match include { Some(include) => { // include! inputs are always from real files, so they only need to be handled once upfront self.with_ctx(|ctx| process_expansion_for_token(ctx, &mut stack, include))?; } None => { stack.push((file_id, smallvec![(token, span.ctx)])); } } let mut m_cache = self.macro_call_cache.borrow_mut(); // Filters out all tokens that contain the given range (usually the macro call), any such // token is redundant as the corresponding macro call has already been processed let filter_duplicates = |tokens: &mut SmallVec<_>, range: TextRange| { tokens.retain(|(t, _): &mut (SyntaxToken, _)| !range.contains_range(t.text_range())) }; while let Some((expansion, ref mut tokens)) = stack.pop() { // Reverse the tokens so we prefer first tokens (to accommodate for popping from the // back) // alternatively we could pop from the front but that would shift the content on every pop tokens.reverse(); while let Some((token, ctx)) = tokens.pop() { let was_not_remapped = (|| { // First expand into attribute invocations, this is required to be handled // upfront as any other macro call within will not semantically resolve unless // also descended. let res = self.with_ctx(|ctx| { token .parent_ancestors() .filter_map(ast::Item::cast) // FIXME: This might work incorrectly when we have a derive, followed by // an attribute on an item, like: // ``` // #[derive(Debug$0)] // #[my_attr] // struct MyStruct; // ``` // here we should not consider the attribute at all, as our cursor // technically lies outside of its expansion .find_map(|item| { // Don't force populate the dyn cache for items that don't have an attribute anyways item.attrs().next()?; ctx.item_to_macro_call(InFile::new(expansion, &item)) .zip(Some(item)) }) .map(|(call_id, item)| { let item_range = item.syntax().text_range(); let loc = db.lookup_intern_macro_call(call_id); let text_range = match loc.kind { hir_expand::MacroCallKind::Attr { censored_attr_ids: attr_ids, .. } => { // FIXME: here, the attribute's text range is used to strip away all // entries from the start of the attribute "list" up the invoking // attribute. But in // ``` // mod foo { // #![inner] // } // ``` // we don't wanna strip away stuff in the `mod foo {` range, that is // here if the id corresponds to an inner attribute we got strip all // text ranges of the outer ones, and then all of the inner ones up // to the invoking attribute so that the inbetween is ignored. // FIXME: Should cfg_attr be handled differently? let (attr, _, _, _) = attr_ids .invoc_attr() .find_attr_range_with_source(db, loc.krate, &item); let start = attr.syntax().text_range().start(); TextRange::new(start, item_range.end()) } _ => item_range, }; filter_duplicates(tokens, text_range); process_expansion_for_token(ctx, &mut stack, call_id) }) }); if let Some(res) = res { return res; } if always_descend_into_derives { let res = self.with_ctx(|ctx| { let (derives, adt) = token .parent_ancestors() .filter_map(ast::Adt::cast) .find_map(|adt| { Some(( ctx.derive_macro_calls(InFile::new(expansion, &adt))? .map(|(a, b, c)| (a, b, c.to_owned())) .collect::>(), adt, )) })?; for (_, derive_attr, derives) in derives { // as there may be multiple derives registering the same helper // name, we gotta make sure to call this for all of them! // FIXME: We need to call `f` for all of them as well though! process_expansion_for_token(ctx, &mut stack, derive_attr); for derive in derives.into_iter().flatten() { let Either::Left(derive) = derive else { continue }; process_expansion_for_token(ctx, &mut stack, derive); } } // remove all tokens that are within the derives expansion filter_duplicates(tokens, adt.syntax().text_range()); Some(()) }); // if we found derives, we can early exit. There is no way we can be in any // macro call at this point given we are not in a token tree if let Some(()) = res { // Note: derives do not remap the original token. Furthermore, we want // the original token to be before the derives in the list, because if they // upmap to the same token and we deduplicate them (e.g. in rename), we // want the original token to remain, not the derive. return None; } } // Then check for token trees, that means we are either in a function-like macro or // secondary attribute inputs let tt = token .parent_ancestors() .map_while(Either::::cast) .last()?; match tt { // function-like macro call Either::Left(tt) => { let macro_call = tt.syntax().parent().and_then(ast::MacroCall::cast)?; if tt.left_delimiter_token().map_or(false, |it| it == token) { return None; } if tt.right_delimiter_token().map_or(false, |it| it == token) { return None; } let mcall = InFile::new(expansion, macro_call); let file_id = match m_cache.get(&mcall) { Some(&it) => it, None => { let it = ast::MacroCall::to_def(self, mcall.as_ref())?; m_cache.insert(mcall, it); it } }; let text_range = tt.syntax().text_range(); filter_duplicates(tokens, text_range); self.with_ctx(|ctx| { process_expansion_for_token(ctx, &mut stack, file_id).or(file_id .eager_arg(db) .and_then(|arg| { // also descend into eager expansions process_expansion_for_token(ctx, &mut stack, arg) })) }) } Either::Right(_) if always_descend_into_derives => None, // derive or derive helper Either::Right(meta) => { // attribute we failed expansion for earlier, this might be a derive invocation // or derive helper attribute let attr = meta.parent_attr()?; let adt = match attr.syntax().parent().and_then(ast::Adt::cast) { Some(adt) => { // this might be a derive on an ADT let res = self.with_ctx(|ctx| { // so try downmapping the token into the pseudo derive expansion // see [hir_expand::builtin_attr_macro] for how the pseudo derive expansion works let derive_call = ctx .attr_to_derive_macro_call( InFile::new(expansion, &adt), InFile::new(expansion, attr.clone()), )? .1; // resolved to a derive let text_range = attr.syntax().text_range(); // remove any other token in this macro input, all their mappings are the // same as this tokens.retain(|(t, _)| { !text_range.contains_range(t.text_range()) }); Some(process_expansion_for_token( ctx, &mut stack, derive_call, )) }); if let Some(res) = res { return res; } Some(adt) } None => { // Otherwise this could be a derive helper on a variant or field attr.syntax().ancestors().find_map(ast::Item::cast).and_then( |it| match it { ast::Item::Struct(it) => Some(ast::Adt::Struct(it)), ast::Item::Enum(it) => Some(ast::Adt::Enum(it)), ast::Item::Union(it) => Some(ast::Adt::Union(it)), _ => None, }, ) } }?; let attr_name = attr.path().and_then(|it| it.as_single_name_ref())?.as_name(); // Not an attribute, nor a derive, so it's either an inert attribute or a derive helper // Try to resolve to a derive helper and downmap let resolver = &token .parent() .and_then(|parent| { self.analyze_impl(InFile::new(expansion, &parent), None, false) })? .resolver; let id = db.ast_id_map(expansion).ast_id(&adt); let helpers = resolver .def_map() .derive_helpers_in_scope(InFile::new(expansion, id))?; if !helpers.is_empty() { let text_range = attr.syntax().text_range(); filter_duplicates(tokens, text_range); } let mut res = None; self.with_ctx(|ctx| { for (.., derive) in helpers.iter().filter(|(helper, ..)| *helper == attr_name) { let Either::Left(derive) = *derive else { continue }; // as there may be multiple derives registering the same helper // name, we gotta make sure to call this for all of them! // FIXME: We need to call `f` for all of them as well though! res = res .or(process_expansion_for_token(ctx, &mut stack, derive)); } res }) } } })() .is_none(); if was_not_remapped && let ControlFlow::Break(b) = f(InFile::new(expansion, token), ctx) { return Some(b); } } } None } // Note this return type is deliberate as [`find_nodes_at_offset_with_descend`] wants to stop // traversing the inner iterator when it finds a node. // The outer iterator is over the tokens descendants // The inner iterator is the ancestors of a descendant fn descend_node_at_offset( &self, node: &SyntaxNode, offset: TextSize, ) -> impl Iterator + '_> + '_ { node.token_at_offset(offset) .map(move |token| self.descend_into_macros_exact(token)) .map(|descendants| { descendants.into_iter().map(move |it| self.token_ancestors_with_macros(it)) }) // re-order the tokens from token_at_offset by returning the ancestors with the smaller first nodes first // See algo::ancestors_at_offset, which uses the same approach .kmerge_by(|left, right| { left.clone() .map(|node| node.text_range().len()) .lt(right.clone().map(|node| node.text_range().len())) }) } /// Attempts to map the node out of macro expanded files returning the original file range. /// If upmapping is not possible, this will fall back to the range of the macro call of the /// macro file the node resides in. pub fn original_range(&self, node: &SyntaxNode) -> FileRange { let node = self.find_file(node); node.original_file_range_rooted(self.db) } /// Attempts to map the node out of macro expanded files returning the original file range. pub fn original_range_opt(&self, node: &SyntaxNode) -> Option { let node = self.find_file(node); node.original_file_range_opt(self.db).filter(|(_, ctx)| ctx.is_root()).map(TupleExt::head) } /// Attempts to map the node out of macro expanded files. /// This only work for attribute expansions, as other ones do not have nodes as input. pub fn original_ast_node(&self, node: N) -> Option { self.wrap_node_infile(node).original_ast_node_rooted(self.db).map( |InRealFile { file_id, value }| { self.cache(find_root(value.syntax()), file_id.into()); value }, ) } /// Attempts to map the node out of macro expanded files. /// This only work for attribute expansions, as other ones do not have nodes as input. pub fn original_syntax_node_rooted(&self, node: &SyntaxNode) -> Option { let InFile { file_id, .. } = self.find_file(node); InFile::new(file_id, node).original_syntax_node_rooted(self.db).map( |InRealFile { file_id, value }| { self.cache(find_root(&value), file_id.into()); value }, ) } pub fn diagnostics_display_range( &self, src: InFile, ) -> FileRangeWrapper { let root = self.parse_or_expand(src.file_id); let node = src.map(|it| it.to_node(&root)); let FileRange { file_id, range } = node.as_ref().original_file_range_rooted(self.db); FileRangeWrapper { file_id: file_id.file_id(self.db), range } } pub fn diagnostics_display_range_for_range( &self, src: InFile, ) -> FileRangeWrapper { let FileRange { file_id, range } = src.original_node_file_range_rooted(self.db); FileRangeWrapper { file_id: file_id.file_id(self.db), range } } fn token_ancestors_with_macros( &self, token: SyntaxToken, ) -> impl Iterator + Clone + '_ { token.parent().into_iter().flat_map(move |parent| self.ancestors_with_macros(parent)) } /// Iterates the ancestors of the given node, climbing up macro expansions while doing so. // FIXME: Replace with `ancestors_with_macros_file` when all usages are updated. pub fn ancestors_with_macros( &self, node: SyntaxNode, ) -> impl Iterator + Clone + '_ { let node = self.find_file(&node); self.ancestors_with_macros_file(node.cloned()).map(|it| it.value) } /// Iterates the ancestors of the given node, climbing up macro expansions while doing so. pub fn ancestors_with_macros_file( &self, node: InFile, ) -> impl Iterator> + Clone + '_ { iter::successors(Some(node), move |&InFile { file_id, ref value }| match value.parent() { Some(parent) => Some(InFile::new(file_id, parent)), None => { let macro_file = file_id.macro_file()?; self.with_ctx(|ctx| { let expansion_info = ctx.cache.get_or_insert_expansion(ctx.db, macro_file); expansion_info.arg().map(|node| node?.parent()).transpose() }) } }) } pub fn ancestors_at_offset_with_macros( &self, node: &SyntaxNode, offset: TextSize, ) -> impl Iterator + '_ { node.token_at_offset(offset) .map(|token| self.token_ancestors_with_macros(token)) .kmerge_by(|node1, node2| node1.text_range().len() < node2.text_range().len()) } pub fn resolve_lifetime_param(&self, lifetime: &ast::Lifetime) -> Option { let text = lifetime.text(); let lifetime_param = lifetime.syntax().ancestors().find_map(|syn| { let gpl = ast::AnyHasGenericParams::cast(syn)?.generic_param_list()?; gpl.lifetime_params() .find(|tp| tp.lifetime().as_ref().map(|lt| lt.text()).as_ref() == Some(&text)) })?; let src = self.wrap_node_infile(lifetime_param); ToDef::to_def(self, src.as_ref()) } pub fn resolve_label(&self, label: &ast::Lifetime) -> Option