Unnamed repository; edit this file 'description' to name the repository.
| -rw-r--r-- | crates/ide/src/goto_definition.rs | 238 | ||||
| -rw-r--r-- | crates/ide/src/highlight_related.rs | 230 | ||||
| -rw-r--r-- | crates/ide/src/navigation_target.rs | 5 | ||||
| -rw-r--r-- | crates/ide/src/references.rs | 25 |
4 files changed, 278 insertions, 220 deletions
diff --git a/crates/ide/src/goto_definition.rs b/crates/ide/src/goto_definition.rs index ed9db32631..a0008f4c8f 100644 --- a/crates/ide/src/goto_definition.rs +++ b/crates/ide/src/goto_definition.rs @@ -2,10 +2,10 @@ use std::{iter, mem::discriminant}; use crate::{ doc_links::token_as_doc_comment, navigation_target::ToNav, FilePosition, NavigationTarget, - RangeInfo, TryToNav, UpmappingResult, + RangeInfo, TryToNav, }; use hir::{ - AsAssocItem, AssocItem, DescendPreference, InFile, MacroFileIdExt, ModuleDef, Semantics, + AsAssocItem, AssocItem, DescendPreference, FileRange, InFile, MacroFileIdExt, ModuleDef, Semantics }; use ide_db::{ base_db::{AnchoredPath, FileLoader}, @@ -14,11 +14,12 @@ use ide_db::{ FileId, RootDatabase, }; use itertools::Itertools; + use syntax::{ - ast::{self, HasLoopBody, Label}, + ast::{self, HasLoopBody}, match_ast, AstNode, AstToken, SyntaxKind::*, - SyntaxToken, TextRange, T, + SyntaxNode, SyntaxToken, TextRange, T, }; // Feature: Go to Definition @@ -208,136 +209,127 @@ fn handle_control_flow_keywords( match token.kind() { // For `fn` / `loop` / `while` / `for` / `async`, return the keyword it self, // so that VSCode will find the references when using `ctrl + click` - T![fn] | T![async] | T![try] | T![return] => try_find_fn_or_closure(sema, token), - T![loop] | T![while] | T![break] | T![continue] => try_find_loop(sema, token), + T![fn] | T![async] | T![try] | T![return] => nav_for_exit_points(sema, token), + T![loop] | T![while] | T![break] | T![continue] => nav_for_break_points(sema, token), T![for] if token.parent().and_then(ast::ForExpr::cast).is_some() => { - try_find_loop(sema, token) + nav_for_break_points(sema, token) } _ => None, } } -fn try_find_fn_or_closure( +pub(crate) fn find_fn_or_blocks( + sema: &Semantics<'_, RootDatabase>, + token: &SyntaxToken, +) -> Vec<SyntaxNode> { + let find_ancestors = |token: SyntaxToken| { + let token_kind = token.kind(); + + for anc in sema.token_ancestors_with_macros(token) { + let node = match_ast! { + match anc { + ast::Fn(fn_) => fn_.syntax().clone(), + ast::ClosureExpr(c) => c.syntax().clone(), + ast::BlockExpr(blk) => { + match blk.modifier() { + Some(ast::BlockModifier::Async(_)) => blk.syntax().clone(), + Some(ast::BlockModifier::Try(_)) if token_kind != T![return] => blk.syntax().clone(), + _ => continue, + } + }, + _ => continue, + } + }; + + return Some(node); + } + None + }; + + sema.descend_into_macros(DescendPreference::None, token.clone()) + .into_iter() + .filter_map(find_ancestors) + .collect_vec() +} + +fn nav_for_exit_points( sema: &Semantics<'_, RootDatabase>, token: &SyntaxToken, ) -> Option<Vec<NavigationTarget>> { - fn find_exit_point( - sema: &Semantics<'_, RootDatabase>, - token: SyntaxToken, - ) -> Option<UpmappingResult<NavigationTarget>> { - let db = sema.db; - - for anc in sema.token_ancestors_with_macros(token.clone()) { - let file_id = sema.hir_file_for(&anc); + let db = sema.db; + let token_kind = token.kind(); + + let navs = find_fn_or_blocks(sema, token) + .into_iter() + .filter_map(|node| { + let file_id = sema.hir_file_for(&node); + match_ast! { - match anc { + match node { ast::Fn(fn_) => { - let fn_: ast::Fn = fn_; - let nav = sema.to_def(&fn_)?.try_to_nav(db)?; + let mut nav = sema.to_def(&fn_)?.try_to_nav(db)?; // For async token, we navigate to itself, which triggers // VSCode to find the references - let focus_token = if matches!(token.kind(), T![async]) { + let focus_token = if matches!(token_kind, T![async]) { fn_.async_token()? } else { fn_.fn_token()? }; - let focus_range = InFile::new(file_id, focus_token.text_range()) - .original_node_file_range_opt(db) - .map(|(frange, _)| frange.range); - return Some(nav.map(|it| { - if focus_range.is_some_and(|range| it.full_range.contains_range(range)) { - NavigationTarget { focus_range, ..it } - } else { - it + let focus_frange = InFile::new(file_id, focus_token.text_range()) + .original_node_file_range_opt(db) + .map(|(frange, _)| frange); + + if let Some(FileRange { file_id, range }) = focus_frange { + let contains_frange = |nav: &NavigationTarget| { + nav.file_id == file_id && nav.full_range.contains_range(range) + }; + + if let Some(def_site) = nav.def_site.as_mut() { + if contains_frange(def_site) { + def_site.focus_range = Some(range); + } + } else if contains_frange(&nav.call_site) { + nav.call_site.focus_range = Some(range); } - })); + } + + Some(nav) }, ast::ClosureExpr(c) => { - let pipe_tok = c.param_list().and_then(|it| it.pipe_token())?.into(); - let c_infile = InFile::new(file_id, c.into()); - let nav = NavigationTarget::from_expr(db, c_infile, pipe_tok); - return Some(nav); + let pipe_tok = c.param_list().and_then(|it| it.pipe_token())?.text_range(); + let closure_in_file = InFile::new(file_id, c.into()); + Some(NavigationTarget::from_expr(db, closure_in_file, Some(pipe_tok))) }, ast::BlockExpr(blk) => { match blk.modifier() { Some(ast::BlockModifier::Async(_)) => { - let async_tok = blk.async_token()?.into(); - let blk_infile = InFile::new(file_id, blk.into()); - let nav = NavigationTarget::from_expr(db, blk_infile, async_tok); - return Some(nav); + let async_tok = blk.async_token()?.text_range(); + let blk_in_file = InFile::new(file_id, blk.into()); + Some(NavigationTarget::from_expr(db, blk_in_file, Some(async_tok))) }, - Some(ast::BlockModifier::Try(_)) if token.kind() != T![return] => { - let try_tok = blk.try_token()?.into(); - let blk_infile = InFile::new(file_id, blk.into()); - let nav = NavigationTarget::from_expr(db, blk_infile, try_tok); - return Some(nav); + Some(ast::BlockModifier::Try(_)) if token_kind != T![return] => { + let try_tok = blk.try_token()?.text_range(); + let blk_in_file = InFile::new(file_id, blk.into()); + Some(NavigationTarget::from_expr(db, blk_in_file, Some(try_tok))) }, - _ => {} + _ => None, } }, - _ => {} + _ => None, } } - } - None - } - - sema.descend_into_macros(DescendPreference::None, token.clone()) - .into_iter() - .filter_map(|descended| find_exit_point(sema, descended)) + }) .flatten() - .collect_vec() - .into() + .collect_vec(); + + Some(navs) } -fn try_find_loop( +pub(crate) fn find_loops( sema: &Semantics<'_, RootDatabase>, token: &SyntaxToken, -) -> Option<Vec<NavigationTarget>> { - fn find_break_point( - sema: &Semantics<'_, RootDatabase>, - token: SyntaxToken, - label_matches: impl Fn(Option<Label>) -> bool, - ) -> Option<UpmappingResult<NavigationTarget>> { - let db = sema.db; - let file_id = sema.hir_file_for(&token.parent()?); - - for anc in sema.token_ancestors_with_macros(token.clone()).filter_map(ast::Expr::cast) { - match anc { - ast::Expr::LoopExpr(loop_) if label_matches(loop_.label()) => { - let expr = ast::Expr::LoopExpr(loop_.clone()); - let loop_tok = loop_.loop_token()?.into(); - let nav = NavigationTarget::from_expr(db, InFile::new(file_id, expr), loop_tok); - return Some(nav); - } - ast::Expr::WhileExpr(while_) if label_matches(while_.label()) => { - let expr = ast::Expr::WhileExpr(while_.clone()); - let while_tok = while_.while_token()?.into(); - let nav = - NavigationTarget::from_expr(db, InFile::new(file_id, expr), while_tok); - return Some(nav); - } - ast::Expr::ForExpr(for_) if label_matches(for_.label()) => { - let expr = ast::Expr::ForExpr(for_.clone()); - let for_tok = for_.for_token()?.into(); - let nav = NavigationTarget::from_expr(db, InFile::new(file_id, expr), for_tok); - return Some(nav); - } - ast::Expr::BlockExpr(blk) - if blk.label().is_some() && label_matches(blk.label()) => - { - let expr = ast::Expr::BlockExpr(blk.clone()); - let lbl = blk.label().unwrap().syntax().clone().into(); - let nav = NavigationTarget::from_expr(db, InFile::new(file_id, expr), lbl); - return Some(nav); - } - _ => {} - } - } - None - } - +) -> Option<Vec<ast::Expr>> { let parent = token.parent()?; let lbl = match_ast! { match parent { @@ -353,14 +345,60 @@ fn try_find_loop( (Some(_), None) => false, }; + let find_ancestors = |token: SyntaxToken| { + for anc in sema.token_ancestors_with_macros(token).filter_map(ast::Expr::cast) { + let node = match &anc { + ast::Expr::LoopExpr(loop_) if label_matches(loop_.label()) => anc, + ast::Expr::WhileExpr(while_) if label_matches(while_.label()) => anc, + ast::Expr::ForExpr(for_) if label_matches(for_.label()) => anc, + ast::Expr::BlockExpr(blk) + if blk.label().is_some() && label_matches(blk.label()) => + { + anc + } + _ => continue, + }; + + return Some(node); + } + None + }; + sema.descend_into_macros(DescendPreference::None, token.clone()) .into_iter() - .filter_map(|descended| find_break_point(sema, descended, label_matches)) - .flatten() + .filter_map(find_ancestors) .collect_vec() .into() } +fn nav_for_break_points( + sema: &Semantics<'_, RootDatabase>, + token: &SyntaxToken, +) -> Option<Vec<NavigationTarget>> { + let db = sema.db; + + let navs = find_loops(sema, token)? + .into_iter() + .filter_map(|expr| { + let file_id = sema.hir_file_for(expr.syntax()); + let expr_in_file = InFile::new(file_id, expr.clone()); + let focus_range = match expr { + ast::Expr::LoopExpr(loop_) => loop_.loop_token()?.text_range(), + ast::Expr::WhileExpr(while_) => while_.while_token()?.text_range(), + ast::Expr::ForExpr(for_) => for_.for_token()?.text_range(), + // We garentee that the label exists + ast::Expr::BlockExpr(blk) => blk.label().unwrap().syntax().text_range(), + _ => return None, + }; + let nav = NavigationTarget::from_expr(db, expr_in_file, Some(focus_range)); + Some(nav) + }) + .flatten() + .collect_vec(); + + Some(navs) +} + fn def_to_nav(db: &RootDatabase, def: Definition) -> Vec<NavigationTarget> { def.try_to_nav(db).map(|it| it.collect()).unwrap_or_default() } diff --git a/crates/ide/src/highlight_related.rs b/crates/ide/src/highlight_related.rs index b1efd90d4e..a10deed42a 100644 --- a/crates/ide/src/highlight_related.rs +++ b/crates/ide/src/highlight_related.rs @@ -2,14 +2,10 @@ use std::iter; use hir::{db, DescendPreference, FilePosition, FileRange, HirFileId, InFile, Semantics}; use ide_db::{ - defs::{Definition, IdentClass}, - helpers::pick_best_token, - search::{FileReference, ReferenceCategory, SearchScope}, - syntax_helpers::node_ext::{ + defs::{Definition, IdentClass}, helpers::pick_best_token, search::{FileReference, ReferenceCategory, SearchScope}, syntax_helpers::node_ext::{ eq_label_lt, for_each_tail_expr, full_path_of_name_ref, is_closure_or_blk_with_modif, preorder_expr_with_ctx_checker, - }, - FxHashSet, RootDatabase, + }, FxHashMap, FxHashSet, RootDatabase }; use span::EditionedFileId; use syntax::{ @@ -19,7 +15,7 @@ use syntax::{ SyntaxToken, TextRange, WalkEvent, T, }; -use crate::{navigation_target::ToNav, NavigationTarget, TryToNav}; +use crate::{goto_definition, navigation_target::ToNav, NavigationTarget, TryToNav}; #[derive(PartialEq, Eq, Hash)] pub struct HighlightedRange { @@ -73,15 +69,19 @@ pub(crate) fn highlight_related( // most if not all of these should be re-implemented with information seeded from hir match token.kind() { T![?] if config.exit_points && token.parent().and_then(ast::TryExpr::cast).is_some() => { - highlight_exit_points(sema, token) + highlight_exit_points(sema, token).remove(&file_id) + } + T![fn] | T![return] | T![->] if config.exit_points => { + highlight_exit_points(sema, token).remove(&file_id) + } + T![await] | T![async] if config.yield_points => { + highlight_yield_points(sema, token).remove(&file_id) } - T![fn] | T![return] | T![->] if config.exit_points => highlight_exit_points(sema, token), - T![await] | T![async] if config.yield_points => highlight_yield_points(sema, token), T![for] if config.break_points && token.parent().and_then(ast::ForExpr::cast).is_some() => { - highlight_break_points(sema, token) + highlight_break_points(sema, token).remove(&file_id) } T![break] | T![loop] | T![while] | T![continue] if config.break_points => { - highlight_break_points(sema, token) + highlight_break_points(sema, token).remove(&file_id) } T![|] if config.closure_captures => highlight_closure_captures(sema, token, file_id), T![move] if config.closure_captures => highlight_closure_captures(sema, token, file_id), @@ -277,24 +277,35 @@ fn highlight_references( } } +// If `file_id` is None, pub(crate) fn highlight_exit_points( sema: &Semantics<'_, RootDatabase>, token: SyntaxToken, -) -> Option<Vec<HighlightedRange>> { +) -> FxHashMap<EditionedFileId, Vec<HighlightedRange>> { fn hl( sema: &Semantics<'_, RootDatabase>, - def_range: Option<TextRange>, + def_token: Option<SyntaxToken>, body: ast::Expr, - ) -> Option<Vec<HighlightedRange>> { - let mut highlights = Vec::new(); - if let Some(range) = def_range { - highlights.push(HighlightedRange { category: ReferenceCategory::empty(), range }); + ) -> Option<FxHashMap<EditionedFileId, Vec<HighlightedRange>>> { + let mut highlights: FxHashMap<EditionedFileId, Vec<_>> = FxHashMap::default(); + + let mut push_to_highlights = |file_id, range| { + if let Some(FileRange { file_id, range }) = original_frange(sema.db, file_id, range) { + let hrange = HighlightedRange { category: ReferenceCategory::empty(), range }; + highlights.entry(file_id).or_default().push(hrange); + } + }; + + if let Some(tok) = def_token { + let file_id = sema.hir_file_for(&tok.parent()?); + let range = Some(tok.text_range()); + push_to_highlights(file_id, range); } WalkExpandedExprCtx::new(sema).walk(&body, &mut |_, expr| { let file_id = sema.hir_file_for(expr.syntax()); - let text_range = match &expr { + let range = match &expr { ast::Expr::TryExpr(try_) => { try_.question_mark_token().map(|token| token.text_range()) } @@ -306,29 +317,24 @@ pub(crate) fn highlight_exit_points( _ => None, }; - if let Some(range) = original_range(sema.db, file_id, text_range) { - highlights.push(HighlightedRange { category: ReferenceCategory::empty(), range }) - } + push_to_highlights(file_id, range); }); - // We should handle `return` separately because when it is used in `try` block - // it will exit the outside function instead of the block it self. + // We should handle `return` separately, because when it is used in a `try` block, + // it will exit the outside function instead of the block itself. WalkExpandedExprCtx::new(sema) .with_check_ctx(&WalkExpandedExprCtx::is_async_const_block_or_closure) .walk(&body, &mut |_, expr| { let file_id = sema.hir_file_for(expr.syntax()); - let text_range = match &expr { + let range = match &expr { ast::Expr::ReturnExpr(expr) => { expr.return_token().map(|token| token.text_range()) } _ => None, }; - if let Some(range) = original_range(sema.db, file_id, text_range) { - highlights - .push(HighlightedRange { category: ReferenceCategory::empty(), range }) - } + push_to_highlights(file_id, range); }); let tail = match body { @@ -338,59 +344,74 @@ pub(crate) fn highlight_exit_points( if let Some(tail) = tail { for_each_tail_expr(&tail, &mut |tail| { + let file_id = sema.hir_file_for(tail.syntax()); let range = match tail { ast::Expr::BreakExpr(b) => b .break_token() .map_or_else(|| tail.syntax().text_range(), |tok| tok.text_range()), _ => tail.syntax().text_range(), }; - highlights.push(HighlightedRange { category: ReferenceCategory::empty(), range }) + push_to_highlights(file_id, Some(range)); }); } Some(highlights) } - for anc in token.parent_ancestors() { - return match_ast! { - match anc { - ast::Fn(fn_) => hl(sema, fn_.fn_token().map(|it| it.text_range()), ast::Expr::BlockExpr(fn_.body()?)), - ast::ClosureExpr(closure) => hl( - sema, - closure.param_list().and_then(|p| p.pipe_token()).map(|tok| tok.text_range()), - closure.body()? - ), + let mut res = FxHashMap::default(); + for def in goto_definition::find_fn_or_blocks(sema, &token) { + let new_map = match_ast! { + match def { + ast::Fn(fn_) => fn_.body().and_then(|body| hl(sema, fn_.fn_token(), body.into())), + ast::ClosureExpr(closure) => { + let pipe_tok = closure.param_list().and_then(|p| p.pipe_token()); + closure.body().and_then(|body| hl(sema, pipe_tok, body)) + }, ast::BlockExpr(blk) => match blk.modifier() { - Some(ast::BlockModifier::Async(t)) => hl(sema, Some(t.text_range()), blk.into()), + Some(ast::BlockModifier::Async(t)) => hl(sema, Some(t), blk.into()), Some(ast::BlockModifier::Try(t)) if token.kind() != T![return] => { - hl(sema, Some(t.text_range()), blk.into()) + hl(sema, Some(t), blk.into()) }, _ => continue, }, _ => continue, } }; + merge_map(&mut res, new_map); } - None + + res } pub(crate) fn highlight_break_points( sema: &Semantics<'_, RootDatabase>, token: SyntaxToken, -) -> Option<Vec<HighlightedRange>> { - fn hl( +) -> FxHashMap<EditionedFileId, Vec<HighlightedRange>> { + pub(crate) fn hl( sema: &Semantics<'_, RootDatabase>, cursor_token_kind: SyntaxKind, loop_token: Option<SyntaxToken>, label: Option<ast::Label>, expr: ast::Expr, - ) -> Option<Vec<HighlightedRange>> { - let mut highlights = Vec::new(); + ) -> Option<FxHashMap<EditionedFileId, Vec<HighlightedRange>>> { + let mut highlights: FxHashMap<EditionedFileId, Vec<_>> = FxHashMap::default(); + + let mut push_to_highlights = |file_id, range| { + if let Some(FileRange { file_id, range }) = original_frange(sema.db, file_id, range) { + let hrange = HighlightedRange { category: ReferenceCategory::empty(), range }; + highlights.entry(file_id).or_default().push(hrange); + } + }; - let (label_range, label_lt) = label - .map_or((None, None), |label| (Some(label.syntax().text_range()), label.lifetime())); + let label_lt = label.as_ref().and_then(|it| it.lifetime()); - if let Some(range) = cover_range(loop_token.map(|tok| tok.text_range()), label_range) { - highlights.push(HighlightedRange { category: ReferenceCategory::empty(), range }) + if let Some(range) = cover_range( + loop_token.as_ref().map(|tok| tok.text_range()), + label.as_ref().map(|it| it.syntax().text_range()), + ) { + let file_id = loop_token + .and_then(|tok| Some(sema.hir_file_for(&tok.parent()?))) + .unwrap_or_else(|| sema.hir_file_for(label.unwrap().syntax())); + push_to_highlights(file_id, Some(range)); } WalkExpandedExprCtx::new(sema) @@ -418,68 +439,53 @@ pub(crate) fn highlight_break_points( token_lt.map(|it| it.syntax().text_range()), ); - if let Some(range) = original_range(sema.db, file_id, text_range) { - highlights - .push(HighlightedRange { category: ReferenceCategory::empty(), range }) - } + push_to_highlights(file_id, text_range); }); Some(highlights) } - let parent = token.parent()?; - let lbl = match_ast! { - match parent { - ast::BreakExpr(b) => b.lifetime(), - ast::ContinueExpr(c) => c.lifetime(), - ast::LoopExpr(l) => l.label().and_then(|it| it.lifetime()), - ast::ForExpr(f) => f.label().and_then(|it| it.lifetime()), - ast::WhileExpr(w) => w.label().and_then(|it| it.lifetime()), - ast::BlockExpr(b) => Some(b.label().and_then(|it| it.lifetime())?), - _ => return None, - } + let mut res = FxHashMap::default(); + let token_kind = token.kind(); + let Some(loops) = goto_definition::find_loops(sema, &token) else { + return res; }; - - let label_matches = |def_lbl: Option<ast::Label>| match lbl.as_ref() { - Some(lbl) => { - Some(lbl.text()) == def_lbl.and_then(|it| it.lifetime()).as_ref().map(|it| it.text()) - } - None => true, - }; - - for anc in token.parent_ancestors().flat_map(ast::Expr::cast) { - return match &anc { - ast::Expr::LoopExpr(l) if label_matches(l.label()) => { - hl(sema, token.kind(), l.loop_token(), l.label(), anc) - } - ast::Expr::ForExpr(f) if label_matches(f.label()) => { - hl(sema, token.kind(), f.for_token(), f.label(), anc) - } - ast::Expr::WhileExpr(w) if label_matches(w.label()) => { - hl(sema, token.kind(), w.while_token(), w.label(), anc) - } - ast::Expr::BlockExpr(e) if e.label().is_some() && label_matches(e.label()) => { - hl(sema, token.kind(), None, e.label(), anc) - } + for expr in loops { + let new_map = match &expr { + ast::Expr::LoopExpr(l) => hl(sema, token_kind, l.loop_token(), l.label(), expr), + ast::Expr::ForExpr(f) => hl(sema, token_kind, f.for_token(), f.label(), expr), + ast::Expr::WhileExpr(w) => hl(sema, token_kind, w.while_token(), w.label(), expr), + ast::Expr::BlockExpr(e) => hl(sema, token_kind, None, e.label(), expr), _ => continue, }; + merge_map(&mut res, new_map); } - None + + res } pub(crate) fn highlight_yield_points( sema: &Semantics<'_, RootDatabase>, token: SyntaxToken, -) -> Option<Vec<HighlightedRange>> { +) -> FxHashMap<EditionedFileId, Vec<HighlightedRange>> { fn hl( sema: &Semantics<'_, RootDatabase>, async_token: Option<SyntaxToken>, body: Option<ast::Expr>, - ) -> Option<Vec<HighlightedRange>> { - let mut highlights = vec![HighlightedRange { - category: ReferenceCategory::empty(), - range: async_token?.text_range(), - }]; + ) -> Option<FxHashMap<EditionedFileId, Vec<HighlightedRange>>> { + let mut highlights: FxHashMap<EditionedFileId, Vec<_>> = FxHashMap::default(); + + let mut push_to_highlights = |file_id, range| { + if let Some(FileRange { file_id, range }) = original_frange(sema.db, file_id, range) { + let hrange = HighlightedRange { category: ReferenceCategory::empty(), range }; + highlights.entry(file_id).or_default().push(hrange); + } + }; + + let async_token = async_token?; + let async_tok_file_id = sema.hir_file_for(&async_token.parent()?); + push_to_highlights(async_tok_file_id, Some(async_token.text_range())); + let Some(body) = body else { return Some(highlights); }; @@ -487,22 +493,22 @@ pub(crate) fn highlight_yield_points( WalkExpandedExprCtx::new(sema).walk(&body, &mut |_, expr| { let file_id = sema.hir_file_for(expr.syntax()); - let token_range = match expr { + let text_range = match expr { ast::Expr::AwaitExpr(expr) => expr.await_token(), ast::Expr::ReturnExpr(expr) => expr.return_token(), _ => None, } .map(|it| it.text_range()); - if let Some(range) = original_range(sema.db, file_id, token_range) { - highlights.push(HighlightedRange { category: ReferenceCategory::empty(), range }); - } + push_to_highlights(file_id, text_range); }); Some(highlights) } - for anc in token.parent_ancestors() { - return match_ast! { + + let mut res = FxHashMap::default(); + for anc in goto_definition::find_fn_or_blocks(sema, &token) { + let new_map = match_ast! { match anc { ast::Fn(fn_) => hl(sema, fn_.async_token(), fn_.body().map(ast::Expr::BlockExpr)), ast::BlockExpr(block_expr) => { @@ -515,8 +521,10 @@ pub(crate) fn highlight_yield_points( _ => continue, } }; + merge_map(&mut res, new_map); } - None + + res } fn cover_range(r0: Option<TextRange>, r1: Option<TextRange>) -> Option<TextRange> { @@ -536,14 +544,24 @@ fn find_defs(sema: &Semantics<'_, RootDatabase>, token: SyntaxToken) -> FxHashSe .collect() } -fn original_range( +fn original_frange( db: &dyn db::ExpandDatabase, file_id: HirFileId, text_range: Option<TextRange>, -) -> Option<TextRange> { - InFile::new(file_id, text_range?) - .original_node_file_range_opt(db) - .map(|(frange, _)| frange.range) +) -> Option<FileRange> { + InFile::new(file_id, text_range?).original_node_file_range_opt(db).map(|(frange, _)| frange) +} + +fn merge_map( + res: &mut FxHashMap<EditionedFileId, Vec<HighlightedRange>>, + new: Option<FxHashMap<EditionedFileId, Vec<HighlightedRange>>>, +) { + let Some(new) = new else { + return; + }; + new.into_iter().for_each(|(file_id, ranges)| { + res.entry(file_id).or_default().extend(ranges); + }); } /// Preorder walk all the expression's child expressions. diff --git a/crates/ide/src/navigation_target.rs b/crates/ide/src/navigation_target.rs index f1e80ab15d..3eb2651314 100644 --- a/crates/ide/src/navigation_target.rs +++ b/crates/ide/src/navigation_target.rs @@ -16,7 +16,7 @@ use ide_db::{ use stdx::never; use syntax::{ ast::{self, HasName}, - format_smolstr, AstNode, SmolStr, SyntaxElement, SyntaxNode, TextRange, ToSmolStr, + format_smolstr, AstNode, SmolStr, SyntaxNode, TextRange, ToSmolStr, }; /// `NavigationTarget` represents an element in the editor's UI which you can @@ -155,11 +155,10 @@ impl NavigationTarget { pub(crate) fn from_expr( db: &RootDatabase, InFile { file_id, value }: InFile<ast::Expr>, - focus_syntax: SyntaxElement, + focus_range: Option<TextRange>, ) -> UpmappingResult<NavigationTarget> { let name: SmolStr = "<expr>".into(); let kind = SymbolKind::Label; - let focus_range = Some(focus_syntax.text_range()); orig_range_with_focus_r(db, file_id, value.syntax().text_range(), focus_range).map( |(FileRange { file_id, range: full_range }, focus_range)| { diff --git a/crates/ide/src/references.rs b/crates/ide/src/references.rs index e6900888fc..6d73425fcb 100644 --- a/crates/ide/src/references.rs +++ b/crates/ide/src/references.rs @@ -305,28 +305,31 @@ fn handle_control_flow_keywords( sema: &Semantics<'_, RootDatabase>, FilePosition { file_id, offset }: FilePosition, ) -> Option<ReferenceSearchResult> { - let file = sema.parse(file_id); + let file = sema.parse_guess_edition(file_id); let token = file.syntax().token_at_offset(offset).find(|t| t.kind().is_keyword())?; - let refs = match token.kind() { - T![fn] | T![return] | T![try] => highlight_related::highlight_exit_points(sema, token)?, - T![async] => highlight_related::highlight_yield_points(sema, token)?, + let references = match token.kind() { + T![fn] | T![return] | T![try] => highlight_related::highlight_exit_points(sema, token), + T![async] => highlight_related::highlight_yield_points(sema, token), T![loop] | T![while] | T![break] | T![continue] => { - highlight_related::highlight_break_points(sema, token)? + highlight_related::highlight_break_points(sema, token) } T![for] if token.parent().and_then(ast::ForExpr::cast).is_some() => { - highlight_related::highlight_break_points(sema, token)? + highlight_related::highlight_break_points(sema, token) } _ => return None, } .into_iter() - .map(|HighlightedRange { range, category }| (range, category)) + .map(|(file_id, ranges)| { + let ranges = ranges + .into_iter() + .map(|HighlightedRange { range, category }| (range, category)) + .collect(); + (file_id.into(), ranges) + }) .collect(); - Some(ReferenceSearchResult { - declaration: None, - references: IntMap::from_iter([(file_id, refs)]), - }) + Some(ReferenceSearchResult { declaration: None, references }) } #[cfg(test)] |