Unnamed repository; edit this file 'description' to name the repository.
Diffstat (limited to 'crates/ide-diagnostics/src/lib.rs')
-rw-r--r--crates/ide-diagnostics/src/lib.rs496
1 files changed, 364 insertions, 132 deletions
diff --git a/crates/ide-diagnostics/src/lib.rs b/crates/ide-diagnostics/src/lib.rs
index 9b50a435e4..45c723d09d 100644
--- a/crates/ide-diagnostics/src/lib.rs
+++ b/crates/ide-diagnostics/src/lib.rs
@@ -30,6 +30,7 @@ mod handlers {
pub(crate) mod inactive_code;
pub(crate) mod incoherent_impl;
pub(crate) mod incorrect_case;
+ pub(crate) mod invalid_cast;
pub(crate) mod invalid_derive_target;
pub(crate) mod macro_error;
pub(crate) mod malformed_derive;
@@ -75,9 +76,10 @@ mod handlers {
#[cfg(test)]
mod tests;
-use std::sync::LazyLock;
+use std::{collections::hash_map, iter, sync::LazyLock};
-use hir::{diagnostics::AnyDiagnostic, InFile, Semantics};
+use either::Either;
+use hir::{db::ExpandDatabase, diagnostics::AnyDiagnostic, Crate, HirFileId, InFile, Semantics};
use ide_db::{
assists::{Assist, AssistId, AssistKind, AssistResolveStrategy},
base_db::SourceDatabase,
@@ -88,10 +90,10 @@ use ide_db::{
syntax_helpers::node_ext::parse_tt_as_comma_sep_paths,
EditionedFileId, FileId, FileRange, FxHashMap, FxHashSet, RootDatabase, SnippetCap,
};
-use stdx::never;
+use itertools::Itertools;
use syntax::{
- ast::{self, AstNode},
- AstPtr, Edition, SyntaxNode, SyntaxNodePtr, TextRange,
+ ast::{self, AstNode, HasAttrs},
+ AstPtr, Edition, NodeOrToken, SmolStr, SyntaxKind, SyntaxNode, SyntaxNodePtr, TextRange, T,
};
// FIXME: Make this an enum
@@ -390,6 +392,7 @@ pub fn semantic_diagnostics(
for diag in diags {
let d = match diag {
AnyDiagnostic::AwaitOutsideOfAsync(d) => handlers::await_outside_of_async::await_outside_of_async(&ctx, &d),
+ AnyDiagnostic::CastToUnsized(d) => handlers::invalid_cast::cast_to_unsized(&ctx, &d),
AnyDiagnostic::ExpectedFunction(d) => handlers::expected_function::expected_function(&ctx, &d),
AnyDiagnostic::InactiveCode(d) => match handlers::inactive_code::inactive_code(&ctx, &d) {
Some(it) => it,
@@ -397,6 +400,7 @@ pub fn semantic_diagnostics(
}
AnyDiagnostic::IncoherentImpl(d) => handlers::incoherent_impl::incoherent_impl(&ctx, &d),
AnyDiagnostic::IncorrectCase(d) => handlers::incorrect_case::incorrect_case(&ctx, &d),
+ AnyDiagnostic::InvalidCast(d) => handlers::invalid_cast::invalid_cast(&ctx, &d),
AnyDiagnostic::InvalidDeriveTarget(d) => handlers::invalid_derive_target::invalid_derive_target(&ctx, &d),
AnyDiagnostic::MacroDefError(d) => handlers::macro_error::macro_def_error(&ctx, &d),
AnyDiagnostic::MacroError(d) => handlers::macro_error::macro_error(&ctx, &d),
@@ -473,8 +477,9 @@ pub fn semantic_diagnostics(
|| ctx.config.disable_experimental && d.experimental)
});
- let mut diagnostics_of_range = res
+ let mut lints = res
.iter_mut()
+ .filter(|it| matches!(it.code, DiagnosticCode::Clippy(_) | DiagnosticCode::RustcLint(_)))
.filter_map(|it| {
Some((
it.main_node.map(|ptr| {
@@ -483,27 +488,31 @@ pub fn semantic_diagnostics(
it,
))
})
- .collect::<FxHashMap<_, _>>();
+ .collect::<Vec<_>>();
- if diagnostics_of_range.is_empty() {
- return res;
- }
-
- let mut rustc_stack: FxHashMap<String, Vec<Severity>> = FxHashMap::default();
- let mut clippy_stack: FxHashMap<String, Vec<Severity>> = FxHashMap::default();
-
- // FIXME: This becomes quite expensive for big files
- handle_lint_attributes(
+ // The edition isn't accurate (each diagnostics may have its own edition due to macros),
+ // but it's okay as it's only being used for error recovery.
+ handle_lints(
&ctx.sema,
- parse.syntax(),
- &mut rustc_stack,
- &mut clippy_stack,
- &mut diagnostics_of_range,
- ctx.edition,
+ &mut FxHashMap::default(),
+ &mut lints,
+ &mut Vec::new(),
+ file_id.edition(),
);
res.retain(|d| d.severity != Severity::Allow);
+ res.retain_mut(|diag| {
+ if let Some(node) = diag
+ .main_node
+ .map(|ptr| ptr.map(|node| node.to_node(&ctx.sema.parse_or_expand(ptr.file_id))))
+ {
+ handle_diag_from_macros(&ctx.sema, diag, &node)
+ } else {
+ true
+ }
+ });
+
res
}
@@ -520,6 +529,35 @@ pub fn full_diagnostics(
res
}
+/// Returns whether to keep this diagnostic (or remove it).
+fn handle_diag_from_macros(
+ sema: &Semantics<'_, RootDatabase>,
+ diag: &mut Diagnostic,
+ node: &InFile<SyntaxNode>,
+) -> bool {
+ let Some(macro_file) = node.file_id.macro_file() else { return true };
+ let span_map = sema.db.expansion_span_map(macro_file);
+ let mut spans = span_map.spans_for_range(node.text_range());
+ if spans.any(|span| {
+ sema.db.lookup_intern_syntax_context(span.ctx).outer_expn.is_some_and(|expansion| {
+ let macro_call =
+ sema.db.lookup_intern_macro_call(expansion.as_macro_file().macro_call_id);
+ !Crate::from(macro_call.def.krate).origin(sema.db).is_local()
+ })
+ }) {
+ // Disable suggestions for external macros, they'll change library code and it's just bad.
+ diag.fixes = None;
+
+ // All Clippy lints report in macros, see https://github.com/rust-lang/rust-clippy/blob/903293b199364/declare_clippy_lint/src/lib.rs#L172.
+ if let DiagnosticCode::RustcLint(lint) = diag.code {
+ if !LINTS_TO_REPORT_IN_EXTERNAL_MACROS.contains(lint) {
+ return false;
+ }
+ };
+ }
+ true
+}
+
// `__RA_EVERY_LINT` is a fake lint group to allow every lint in proc macros
static RUSTC_LINT_GROUPS_DICT: LazyLock<FxHashMap<&str, Vec<&str>>> =
@@ -528,153 +566,347 @@ static RUSTC_LINT_GROUPS_DICT: LazyLock<FxHashMap<&str, Vec<&str>>> =
static CLIPPY_LINT_GROUPS_DICT: LazyLock<FxHashMap<&str, Vec<&str>>> =
LazyLock::new(|| build_group_dict(CLIPPY_LINT_GROUPS, &["__RA_EVERY_LINT"], "clippy::"));
+// FIXME: Autogenerate this instead of enumerating by hand.
+static LINTS_TO_REPORT_IN_EXTERNAL_MACROS: LazyLock<FxHashSet<&str>> =
+ LazyLock::new(|| FxHashSet::from_iter([]));
+
fn build_group_dict(
lint_group: &'static [LintGroup],
all_groups: &'static [&'static str],
prefix: &'static str,
) -> FxHashMap<&'static str, Vec<&'static str>> {
- let mut r: FxHashMap<&str, Vec<&str>> = FxHashMap::default();
+ let mut map_with_prefixes: FxHashMap<&str, Vec<&str>> = FxHashMap::default();
for g in lint_group {
- for child in g.children {
- r.entry(child.strip_prefix(prefix).unwrap())
- .or_default()
- .push(g.lint.label.strip_prefix(prefix).unwrap());
+ let mut add_children = |label: &'static str| {
+ for child in g.children {
+ map_with_prefixes.entry(child).or_default().push(label);
+ }
+ };
+ add_children(g.lint.label);
+
+ if g.lint.label == "nonstandard_style" {
+ // Also add `bad_style`, which for some reason isn't listed in the groups.
+ add_children("bad_style");
}
}
- for (lint, groups) in r.iter_mut() {
+ for (lint, groups) in map_with_prefixes.iter_mut() {
groups.push(lint);
groups.extend_from_slice(all_groups);
}
- r
+ map_with_prefixes.into_iter().map(|(k, v)| (k.strip_prefix(prefix).unwrap(), v)).collect()
}
-fn handle_lint_attributes(
+/// Thd default severity for lints that are not warn by default.
+// FIXME: Autogenerate this instead of write manually.
+static LINTS_DEFAULT_SEVERITY: LazyLock<FxHashMap<&str, Severity>> =
+ LazyLock::new(|| FxHashMap::from_iter([("unsafe_op_in_unsafe_fn", Severity::Allow)]));
+
+fn handle_lints(
sema: &Semantics<'_, RootDatabase>,
- root: &SyntaxNode,
- rustc_stack: &mut FxHashMap<String, Vec<Severity>>,
- clippy_stack: &mut FxHashMap<String, Vec<Severity>>,
- diagnostics_of_range: &mut FxHashMap<InFile<SyntaxNode>, &mut Diagnostic>,
+ cache: &mut FxHashMap<HirFileId, FxHashMap<SmolStr, SeverityAttr>>,
+ diagnostics: &mut [(InFile<SyntaxNode>, &mut Diagnostic)],
+ cache_stack: &mut Vec<HirFileId>,
edition: Edition,
) {
- let _g = tracing::info_span!("handle_lint_attributes").entered();
- let file_id = sema.hir_file_for(root);
- let preorder = root.preorder();
- for ev in preorder {
- match ev {
- syntax::WalkEvent::Enter(node) => {
- for attr in node.children().filter_map(ast::Attr::cast) {
- parse_lint_attribute(
- attr,
- rustc_stack,
- clippy_stack,
- |stack, severity| {
- stack.push(severity);
- },
- edition,
- );
- }
- if let Some(it) =
- diagnostics_of_range.get_mut(&InFile { file_id, value: node.clone() })
- {
- const EMPTY_LINTS: &[&str] = &[];
- let (names, stack) = match it.code {
- DiagnosticCode::RustcLint(name) => (
- RUSTC_LINT_GROUPS_DICT.get(name).map_or(EMPTY_LINTS, |it| &**it),
- &mut *rustc_stack,
- ),
- DiagnosticCode::Clippy(name) => (
- CLIPPY_LINT_GROUPS_DICT.get(name).map_or(EMPTY_LINTS, |it| &**it),
- &mut *clippy_stack,
- ),
- _ => continue,
- };
- for &name in names {
- if let Some(s) = stack.get(name).and_then(|it| it.last()) {
- it.severity = *s;
+ for (node, diag) in diagnostics {
+ let lint = match diag.code {
+ DiagnosticCode::RustcLint(lint) | DiagnosticCode::Clippy(lint) => lint,
+ _ => panic!("non-lint passed to `handle_lints()`"),
+ };
+ if let Some(&default_severity) = LINTS_DEFAULT_SEVERITY.get(lint) {
+ diag.severity = default_severity;
+ }
+
+ let mut diag_severity = fill_lint_attrs(sema, node, cache, cache_stack, diag, edition);
+
+ if let outline_diag_severity @ Some(_) =
+ find_outline_mod_lint_severity(sema, node, diag, edition)
+ {
+ diag_severity = outline_diag_severity;
+ }
+
+ if let Some(diag_severity) = diag_severity {
+ diag.severity = diag_severity;
+ }
+ }
+}
+
+fn find_outline_mod_lint_severity(
+ sema: &Semantics<'_, RootDatabase>,
+ node: &InFile<SyntaxNode>,
+ diag: &Diagnostic,
+ edition: Edition,
+) -> Option<Severity> {
+ let mod_node = node.value.ancestors().find_map(ast::Module::cast)?;
+ if mod_node.item_list().is_some() {
+ // Inline modules will be handled by `fill_lint_attrs()`.
+ return None;
+ }
+
+ let mod_def = sema.to_module_def(&mod_node)?;
+ let module_source_file = sema.module_definition_node(mod_def);
+ let mut result = None;
+ let lint_groups = lint_groups(&diag.code);
+ lint_attrs(
+ sema,
+ ast::AnyHasAttrs::cast(module_source_file.value).expect("SourceFile always has attrs"),
+ edition,
+ )
+ .for_each(|(lint, severity)| {
+ if lint_groups.contains(&&*lint) {
+ result = Some(severity);
+ }
+ });
+ result
+}
+
+#[derive(Debug, Clone, Copy)]
+struct SeverityAttr {
+ severity: Severity,
+ /// This field counts how far we are from the main node. Bigger values mean more far.
+ ///
+ /// Note this isn't accurate: there can be gaps between values (created when merging severity maps).
+ /// The important thing is that if an attr is closer to the main node, it will have smaller value.
+ ///
+ /// This is necessary even though we take care to never overwrite a value from deeper nesting
+ /// because of lint groups. For example, in the following code:
+ /// ```
+ /// #[warn(non_snake_case)]
+ /// mod foo {
+ /// #[allow(nonstandard_style)]
+ /// mod bar;
+ /// }
+ /// ```
+ /// We want to not warn on non snake case inside `bar`. If we are traversing this for the first
+ /// time, everything will be fine, because we will set `diag_severity` on the first matching group
+ /// and never overwrite it since then. But if `bar` is cached, the cache will contain both
+ /// `#[warn(non_snake_case)]` and `#[allow(nonstandard_style)]`, and without this field, we have
+ /// no way of differentiating between the two.
+ depth: u32,
+}
+
+fn fill_lint_attrs(
+ sema: &Semantics<'_, RootDatabase>,
+ node: &InFile<SyntaxNode>,
+ cache: &mut FxHashMap<HirFileId, FxHashMap<SmolStr, SeverityAttr>>,
+ cache_stack: &mut Vec<HirFileId>,
+ diag: &Diagnostic,
+ edition: Edition,
+) -> Option<Severity> {
+ let mut collected_lint_attrs = FxHashMap::<SmolStr, SeverityAttr>::default();
+ let mut diag_severity = None;
+
+ let mut ancestors = node.value.ancestors().peekable();
+ let mut depth = 0;
+ loop {
+ let ancestor = ancestors.next().expect("we always return from top-level nodes");
+ depth += 1;
+
+ if ancestors.peek().is_none() {
+ // We don't want to insert too many nodes into cache, but top level nodes (aka. outline modules
+ // or macro expansions) need to touch the database so they seem like a good fit to cache.
+
+ if let Some(cached) = cache.get_mut(&node.file_id) {
+ // This node (and everything above it) is already cached; the attribute is either here or nowhere.
+
+ // Workaround for the borrow checker.
+ let cached = std::mem::take(cached);
+
+ cached.iter().for_each(|(lint, severity)| {
+ for item in &*cache_stack {
+ let node_cache_entry = cache
+ .get_mut(item)
+ .expect("we always insert cached nodes into the cache map");
+ let lint_cache_entry = node_cache_entry.entry(lint.clone());
+ if let hash_map::Entry::Vacant(lint_cache_entry) = lint_cache_entry {
+ // Do not overwrite existing lint attributes, as we go bottom to top and bottom attrs
+ // overwrite top attrs.
+ lint_cache_entry.insert(SeverityAttr {
+ severity: severity.severity,
+ depth: severity.depth + depth,
+ });
}
}
+ });
+
+ let all_matching_groups = lint_groups(&diag.code)
+ .iter()
+ .filter_map(|lint_group| cached.get(&**lint_group));
+ let cached_severity =
+ all_matching_groups.min_by_key(|it| it.depth).map(|it| it.severity);
+
+ cache.insert(node.file_id, cached);
+
+ return diag_severity.or(cached_severity);
+ }
+
+ // Insert this node's descendants' attributes into any outline descendant, but not including this node.
+ // This must come before inserting this node's own attributes to preserve order.
+ collected_lint_attrs.drain().for_each(|(lint, severity)| {
+ if diag_severity.is_none() && lint_groups(&diag.code).contains(&&*lint) {
+ diag_severity = Some(severity.severity);
}
- if let Some(item) = ast::Item::cast(node.clone()) {
- if let Some(me) = sema.expand_attr_macro(&item) {
- for stack in [&mut *rustc_stack, &mut *clippy_stack] {
- stack
- .entry("__RA_EVERY_LINT".to_owned())
- .or_default()
- .push(Severity::Allow);
- }
- handle_lint_attributes(
- sema,
- &me,
- rustc_stack,
- clippy_stack,
- diagnostics_of_range,
- edition,
- );
- for stack in [&mut *rustc_stack, &mut *clippy_stack] {
- stack.entry("__RA_EVERY_LINT".to_owned()).or_default().pop();
- }
+
+ for item in &*cache_stack {
+ let node_cache_entry = cache
+ .get_mut(item)
+ .expect("we always insert cached nodes into the cache map");
+ let lint_cache_entry = node_cache_entry.entry(lint.clone());
+ if let hash_map::Entry::Vacant(lint_cache_entry) = lint_cache_entry {
+ // Do not overwrite existing lint attributes, as we go bottom to top and bottom attrs
+ // overwrite top attrs.
+ lint_cache_entry.insert(severity);
}
}
- if let Some(mc) = ast::MacroCall::cast(node) {
- if let Some(me) = sema.expand(&mc) {
- handle_lint_attributes(
- sema,
- &me,
- rustc_stack,
- clippy_stack,
- diagnostics_of_range,
- edition,
- );
+ });
+
+ cache_stack.push(node.file_id);
+ cache.insert(node.file_id, FxHashMap::default());
+
+ if let Some(ancestor) = ast::AnyHasAttrs::cast(ancestor) {
+ // Insert this node's attributes into any outline descendant, including this node.
+ lint_attrs(sema, ancestor, edition).for_each(|(lint, severity)| {
+ if diag_severity.is_none() && lint_groups(&diag.code).contains(&&*lint) {
+ diag_severity = Some(severity);
}
- }
+
+ for item in &*cache_stack {
+ let node_cache_entry = cache
+ .get_mut(item)
+ .expect("we always insert cached nodes into the cache map");
+ let lint_cache_entry = node_cache_entry.entry(lint.clone());
+ if let hash_map::Entry::Vacant(lint_cache_entry) = lint_cache_entry {
+ // Do not overwrite existing lint attributes, as we go bottom to top and bottom attrs
+ // overwrite top attrs.
+ lint_cache_entry.insert(SeverityAttr { severity, depth });
+ }
+ }
+ });
}
- syntax::WalkEvent::Leave(node) => {
- for attr in node.children().filter_map(ast::Attr::cast) {
- parse_lint_attribute(
- attr,
- rustc_stack,
- clippy_stack,
- |stack, severity| {
- if stack.pop() != Some(severity) {
- never!("Mismatched serevity in walking lint attributes");
- }
- },
- edition,
- );
+
+ let parent_node = sema.find_parent_file(node.file_id);
+ if let Some(parent_node) = parent_node {
+ let parent_severity =
+ fill_lint_attrs(sema, &parent_node, cache, cache_stack, diag, edition);
+ if diag_severity.is_none() {
+ diag_severity = parent_severity;
}
}
+ cache_stack.pop();
+ return diag_severity;
+ } else if let Some(ancestor) = ast::AnyHasAttrs::cast(ancestor) {
+ lint_attrs(sema, ancestor, edition).for_each(|(lint, severity)| {
+ if diag_severity.is_none() && lint_groups(&diag.code).contains(&&*lint) {
+ diag_severity = Some(severity);
+ }
+
+ let lint_cache_entry = collected_lint_attrs.entry(lint);
+ if let hash_map::Entry::Vacant(lint_cache_entry) = lint_cache_entry {
+ // Do not overwrite existing lint attributes, as we go bottom to top and bottom attrs
+ // overwrite top attrs.
+ lint_cache_entry.insert(SeverityAttr { severity, depth });
+ }
+ });
}
}
}
-fn parse_lint_attribute(
- attr: ast::Attr,
- rustc_stack: &mut FxHashMap<String, Vec<Severity>>,
- clippy_stack: &mut FxHashMap<String, Vec<Severity>>,
- job: impl Fn(&mut Vec<Severity>, Severity),
+fn lint_attrs<'a>(
+ sema: &'a Semantics<'a, RootDatabase>,
+ ancestor: ast::AnyHasAttrs,
edition: Edition,
+) -> impl Iterator<Item = (SmolStr, Severity)> + 'a {
+ ancestor
+ .attrs_including_inner()
+ .filter_map(|attr| {
+ attr.as_simple_call().and_then(|(name, value)| match &*name {
+ "allow" | "expect" => Some(Either::Left(iter::once((Severity::Allow, value)))),
+ "warn" => Some(Either::Left(iter::once((Severity::Warning, value)))),
+ "forbid" | "deny" => Some(Either::Left(iter::once((Severity::Error, value)))),
+ "cfg_attr" => {
+ let mut lint_attrs = Vec::new();
+ cfg_attr_lint_attrs(sema, &value, &mut lint_attrs);
+ Some(Either::Right(lint_attrs.into_iter()))
+ }
+ _ => None,
+ })
+ })
+ .flatten()
+ .flat_map(move |(severity, lints)| {
+ parse_tt_as_comma_sep_paths(lints, edition).into_iter().flat_map(move |lints| {
+ // Rejoin the idents with `::`, so we have no spaces in between.
+ lints.into_iter().map(move |lint| {
+ (
+ lint.segments().filter_map(|segment| segment.name_ref()).join("::").into(),
+ severity,
+ )
+ })
+ })
+ })
+}
+
+fn cfg_attr_lint_attrs(
+ sema: &Semantics<'_, RootDatabase>,
+ value: &ast::TokenTree,
+ lint_attrs: &mut Vec<(Severity, ast::TokenTree)>,
) {
- let Some((tag, args_tt)) = attr.as_simple_call() else {
- return;
- };
- let severity = match tag.as_str() {
- "allow" => Severity::Allow,
- "warn" => Severity::Warning,
- "forbid" | "deny" => Severity::Error,
- _ => return,
- };
- for lint in parse_tt_as_comma_sep_paths(args_tt, edition).into_iter().flatten() {
- if let Some(lint) = lint.as_single_name_ref() {
- job(rustc_stack.entry(lint.to_string()).or_default(), severity);
+ let prev_len = lint_attrs.len();
+
+ let mut iter = value.token_trees_and_tokens().filter(|it| match it {
+ NodeOrToken::Node(_) => true,
+ NodeOrToken::Token(it) => !it.kind().is_trivia(),
+ });
+
+ // Skip the condition.
+ for value in &mut iter {
+ if value.as_token().is_some_and(|it| it.kind() == T![,]) {
+ break;
}
- if let Some(tool) = lint.qualifier().and_then(|it| it.as_single_name_ref()) {
- if let Some(name_ref) = &lint.segment().and_then(|it| it.name_ref()) {
- if tool.to_string() == "clippy" {
- job(clippy_stack.entry(name_ref.to_string()).or_default(), severity);
+ }
+
+ while let Some(value) = iter.next() {
+ if let Some(token) = value.as_token() {
+ if token.kind() == SyntaxKind::IDENT {
+ let severity = match token.text() {
+ "allow" | "expect" => Some(Severity::Allow),
+ "warn" => Some(Severity::Warning),
+ "forbid" | "deny" => Some(Severity::Error),
+ "cfg_attr" => {
+ if let Some(NodeOrToken::Node(value)) = iter.next() {
+ cfg_attr_lint_attrs(sema, &value, lint_attrs);
+ }
+ None
+ }
+ _ => None,
+ };
+ if let Some(severity) = severity {
+ let lints = iter.next();
+ if let Some(NodeOrToken::Node(lints)) = lints {
+ lint_attrs.push((severity, lints));
+ }
}
}
}
}
+
+ if prev_len != lint_attrs.len() {
+ if let Some(false) | None = sema.check_cfg_attr(value) {
+ // Discard the attributes when the condition is false.
+ lint_attrs.truncate(prev_len);
+ }
+ }
+}
+
+fn lint_groups(lint: &DiagnosticCode) -> &'static [&'static str] {
+ match lint {
+ DiagnosticCode::RustcLint(name) => {
+ RUSTC_LINT_GROUPS_DICT.get(name).map(|it| &**it).unwrap_or_default()
+ }
+ DiagnosticCode::Clippy(name) => {
+ CLIPPY_LINT_GROUPS_DICT.get(name).map(|it| &**it).unwrap_or_default()
+ }
+ _ => &[],
+ }
}
fn fix(id: &'static str, label: &str, source_change: SourceChange, target: TextRange) -> Assist {