Unnamed repository; edit this file 'description' to name the repository.
-rw-r--r--crates/hir-expand/src/files.rs61
-rw-r--r--crates/hir/src/source_analyzer.rs12
-rw-r--r--crates/hir/src/term_search/tactics.rs3
-rw-r--r--crates/ide-db/src/rust_doc.rs6
-rw-r--r--crates/ide-diagnostics/src/handlers/trait_impl_redundant_assoc_item.rs15
-rw-r--r--crates/rust-analyzer/src/cli/analysis_stats.rs4
-rw-r--r--crates/rust-analyzer/src/config.rs2
-rw-r--r--crates/rust-analyzer/tests/slow-tests/support.rs3
-rw-r--r--crates/rust-analyzer/tests/slow-tests/tidy.rs2
-rw-r--r--crates/test-utils/src/lib.rs2
-rw-r--r--xtask/src/codegen/diagnostics_docs.rs2
-rw-r--r--xtask/src/release.rs3
12 files changed, 62 insertions, 53 deletions
diff --git a/crates/hir-expand/src/files.rs b/crates/hir-expand/src/files.rs
index 90d9e61a69..fc9fa93268 100644
--- a/crates/hir-expand/src/files.rs
+++ b/crates/hir-expand/src/files.rs
@@ -1,4 +1,6 @@
//! Things to wrap other things in file ids.
+use std::borrow::Borrow;
+
use either::Either;
use span::{
AstIdNode, ErasedFileAstId, FileAstId, FileId, FileRange, HirFileId, HirFileIdRepr,
@@ -76,6 +78,13 @@ impl<FileKind: Copy, T> InFileWrapper<FileKind, T> {
pub fn as_ref(&self) -> InFileWrapper<FileKind, &T> {
self.with_value(&self.value)
}
+
+ pub fn borrow<U>(&self) -> InFileWrapper<FileKind, &U>
+ where
+ T: Borrow<U>,
+ {
+ self.with_value(self.value.borrow())
+ }
}
impl<FileKind: Copy, T: Clone> InFileWrapper<FileKind, &T> {
@@ -156,8 +165,13 @@ impl<FileId: Copy, N: AstNode> InFileWrapper<FileId, &N> {
}
// region:specific impls
+impl<SN: Borrow<SyntaxNode>> InRealFile<SN> {
+ pub fn file_range(&self) -> FileRange {
+ FileRange { file_id: self.file_id, range: self.value.borrow().text_range() }
+ }
+}
-impl InFile<&SyntaxNode> {
+impl<SN: Borrow<SyntaxNode>> InFile<SN> {
pub fn parent_ancestors_with_macros(
self,
db: &dyn db::ExpandDatabase,
@@ -172,7 +186,7 @@ impl InFile<&SyntaxNode> {
.map(|node| node.parent())
.transpose(),
};
- std::iter::successors(succ(&self.cloned()), succ)
+ std::iter::successors(succ(&self.borrow().cloned()), succ)
}
pub fn ancestors_with_macros(
@@ -189,7 +203,15 @@ impl InFile<&SyntaxNode> {
.map(|node| node.parent())
.transpose(),
};
- std::iter::successors(Some(self.cloned()), succ)
+ std::iter::successors(Some(self.borrow().cloned()), succ)
+ }
+
+ pub fn kind(&self) -> parser::SyntaxKind {
+ self.value.borrow().kind()
+ }
+
+ pub fn text_range(&self) -> TextRange {
+ self.value.borrow().text_range()
}
/// Falls back to the macro call range if the node cannot be mapped up fully.
@@ -197,7 +219,7 @@ impl InFile<&SyntaxNode> {
/// For attributes and derives, this will point back to the attribute only.
/// For the entire item use [`InFile::original_file_range_full`].
pub fn original_file_range_rooted(self, db: &dyn db::ExpandDatabase) -> FileRange {
- self.map(SyntaxNode::text_range).original_node_file_range_rooted(db)
+ self.borrow().map(SyntaxNode::text_range).original_node_file_range_rooted(db)
}
/// Falls back to the macro call range if the node cannot be mapped up fully.
@@ -205,15 +227,7 @@ impl InFile<&SyntaxNode> {
self,
db: &dyn db::ExpandDatabase,
) -> FileRange {
- self.map(SyntaxNode::text_range).original_node_file_range_with_macro_call_body(db)
- }
-
- /// Attempts to map the syntax node back up its macro calls.
- pub fn original_file_range_opt(
- self,
- db: &dyn db::ExpandDatabase,
- ) -> Option<(FileRange, SyntaxContextId)> {
- self.map(SyntaxNode::text_range).original_node_file_range_opt(db)
+ self.borrow().map(SyntaxNode::text_range).original_node_file_range_with_macro_call_body(db)
}
pub fn original_syntax_node_rooted(
@@ -224,16 +238,19 @@ impl InFile<&SyntaxNode> {
// as we don't have node inputs otherwise and therefore can't find an `N` node in the input
let file_id = match self.file_id.repr() {
HirFileIdRepr::FileId(file_id) => {
- return Some(InRealFile { file_id, value: self.value.clone() })
+ return Some(InRealFile { file_id, value: self.value.borrow().clone() })
}
HirFileIdRepr::MacroFile(m) if m.is_attr_macro(db) => m,
_ => return None,
};
- let FileRange { file_id, range } =
- map_node_range_up_rooted(db, &db.expansion_span_map(file_id), self.value.text_range())?;
+ let FileRange { file_id, range } = map_node_range_up_rooted(
+ db,
+ &db.expansion_span_map(file_id),
+ self.value.borrow().text_range(),
+ )?;
- let kind = self.value.kind();
+ let kind = self.kind();
let value = db
.parse(file_id)
.syntax_node()
@@ -245,6 +262,16 @@ impl InFile<&SyntaxNode> {
}
}
+impl InFile<&SyntaxNode> {
+ /// Attempts to map the syntax node back up its macro calls.
+ pub fn original_file_range_opt(
+ self,
+ db: &dyn db::ExpandDatabase,
+ ) -> Option<(FileRange, SyntaxContextId)> {
+ self.borrow().map(SyntaxNode::text_range).original_node_file_range_opt(db)
+ }
+}
+
impl InMacroFile<SyntaxToken> {
pub fn upmap_once(
self,
diff --git a/crates/hir/src/source_analyzer.rs b/crates/hir/src/source_analyzer.rs
index be8e9c49a0..81c57f6cae 100644
--- a/crates/hir/src/source_analyzer.rs
+++ b/crates/hir/src/source_analyzer.rs
@@ -96,10 +96,10 @@ impl SourceAnalyzer {
None => scope_for(db, &scopes, &source_map, node),
Some(offset) => {
debug_assert!(
- node.value.text_range().contains_inclusive(offset),
+ node.text_range().contains_inclusive(offset),
"{:?} not in {:?}",
offset,
- node.value.text_range()
+ node.text_range()
);
scope_for_offset(db, &scopes, &source_map, node.file_id, offset)
}
@@ -966,9 +966,7 @@ fn scope_for(
node: InFile<&SyntaxNode>,
) -> Option<ScopeId> {
node.ancestors_with_macros(db.upcast())
- .take_while(|it| {
- !ast::Item::can_cast(it.value.kind()) || ast::MacroCall::can_cast(it.value.kind())
- })
+ .take_while(|it| !ast::Item::can_cast(it.kind()) || ast::MacroCall::can_cast(it.kind()))
.filter_map(|it| it.map(ast::Expr::cast).transpose())
.filter_map(|it| source_map.node_expr(it.as_ref()))
.find_map(|it| scopes.scope_for(it))
@@ -996,8 +994,8 @@ fn scope_for_offset(
Some(it.file_id.macro_file()?.call_node(db.upcast()))
})
.find(|it| it.file_id == from_file)
- .filter(|it| it.value.kind() == SyntaxKind::MACRO_CALL)?;
- Some((source.value.text_range(), scope))
+ .filter(|it| it.kind() == SyntaxKind::MACRO_CALL)?;
+ Some((source.text_range(), scope))
})
.filter(|(expr_range, _scope)| expr_range.start() <= offset && offset <= expr_range.end())
// find containing scope
diff --git a/crates/hir/src/term_search/tactics.rs b/crates/hir/src/term_search/tactics.rs
index d1fc68d009..1b0e6f8bd5 100644
--- a/crates/hir/src/term_search/tactics.rs
+++ b/crates/hir/src/term_search/tactics.rs
@@ -598,9 +598,8 @@ pub(super) fn famous_types<'a, DB: HirDatabase>(
Expr::FamousType { ty: Type::new(db, module.id, TyBuilder::unit()), value: "()" },
]
.into_iter()
- .map(|exprs| {
+ .inspect(|exprs| {
lookup.insert(exprs.ty(db), std::iter::once(exprs.clone()));
- exprs
})
.filter(|expr| expr.ty(db).could_unify_with_deeply(db, &ctx.goal))
}
diff --git a/crates/ide-db/src/rust_doc.rs b/crates/ide-db/src/rust_doc.rs
index ab2a250289..eacd9b9b4d 100644
--- a/crates/ide-db/src/rust_doc.rs
+++ b/crates/ide-db/src/rust_doc.rs
@@ -7,11 +7,7 @@ pub fn is_rust_fence(s: &str) -> bool {
let mut seen_rust_tags = false;
let mut seen_other_tags = false;
- let tokens = s
- .trim()
- .split(|c| c == ',' || c == ' ' || c == '\t')
- .map(str::trim)
- .filter(|t| !t.is_empty());
+ let tokens = s.trim().split([',', ' ', '\t']).map(str::trim).filter(|t| !t.is_empty());
for token in tokens {
match token {
diff --git a/crates/ide-diagnostics/src/handlers/trait_impl_redundant_assoc_item.rs b/crates/ide-diagnostics/src/handlers/trait_impl_redundant_assoc_item.rs
index be1e6ed572..a470ce72fc 100644
--- a/crates/ide-diagnostics/src/handlers/trait_impl_redundant_assoc_item.rs
+++ b/crates/ide-diagnostics/src/handlers/trait_impl_redundant_assoc_item.rs
@@ -28,10 +28,7 @@ pub(crate) fn trait_impl_redundant_assoc_item(
let function = id;
(
format!("`fn {redundant_assoc_item_name}`"),
- function
- .source(db)
- .map(|it| it.syntax().value.text_range())
- .unwrap_or(default_range),
+ function.source(db).map(|it| it.syntax().text_range()).unwrap_or(default_range),
format!("\n {};", function.display(db)),
)
}
@@ -39,10 +36,7 @@ pub(crate) fn trait_impl_redundant_assoc_item(
let constant = id;
(
format!("`const {redundant_assoc_item_name}`"),
- constant
- .source(db)
- .map(|it| it.syntax().value.text_range())
- .unwrap_or(default_range),
+ constant.source(db).map(|it| it.syntax().text_range()).unwrap_or(default_range),
format!("\n {};", constant.display(db)),
)
}
@@ -50,10 +44,7 @@ pub(crate) fn trait_impl_redundant_assoc_item(
let type_alias = id;
(
format!("`type {redundant_assoc_item_name}`"),
- type_alias
- .source(db)
- .map(|it| it.syntax().value.text_range())
- .unwrap_or(default_range),
+ type_alias.source(db).map(|it| it.syntax().text_range()).unwrap_or(default_range),
format!("\n type {};", type_alias.name(ctx.sema.db).to_smol_str()),
)
}
diff --git a/crates/rust-analyzer/src/cli/analysis_stats.rs b/crates/rust-analyzer/src/cli/analysis_stats.rs
index 90b81d0a80..a188adbe35 100644
--- a/crates/rust-analyzer/src/cli/analysis_stats.rs
+++ b/crates/rust-analyzer/src/cli/analysis_stats.rs
@@ -651,7 +651,7 @@ impl flags::AnalysisStats {
if let Some(src) = source {
let original_file = src.file_id.original_file(db);
let path = vfs.file_path(original_file);
- let syntax_range = src.value.text_range();
+ let syntax_range = src.text_range();
format!("processing: {} ({} {:?})", full_name(), path, syntax_range)
} else {
format!("processing: {}", full_name())
@@ -945,7 +945,7 @@ impl flags::AnalysisStats {
if let Some(src) = source {
let original_file = src.file_id.original_file(db);
let path = vfs.file_path(original_file);
- let syntax_range = src.value.text_range();
+ let syntax_range = src.text_range();
format!("processing: {} ({} {:?})", full_name(), path, syntax_range)
} else {
format!("processing: {}", full_name())
diff --git a/crates/rust-analyzer/src/config.rs b/crates/rust-analyzer/src/config.rs
index f719a47248..42ba162e4f 100644
--- a/crates/rust-analyzer/src/config.rs
+++ b/crates/rust-analyzer/src/config.rs
@@ -3367,7 +3367,7 @@ mod tests {
for idx in url_offsets {
let link = &schema[idx..];
// matching on whitespace to ignore normal links
- if let Some(link_end) = link.find(|c| c == ' ' || c == '[') {
+ if let Some(link_end) = link.find([' ', '[']) {
if link.chars().nth(link_end) == Some('[') {
if let Some(link_text_end) = link.find(']') {
let link_text = link[link_end..(link_text_end + 1)].to_string();
diff --git a/crates/rust-analyzer/tests/slow-tests/support.rs b/crates/rust-analyzer/tests/slow-tests/support.rs
index c438325532..66100971fb 100644
--- a/crates/rust-analyzer/tests/slow-tests/support.rs
+++ b/crates/rust-analyzer/tests/slow-tests/support.rs
@@ -388,9 +388,8 @@ impl Server {
}
fn recv(&self) -> Result<Option<Message>, Timeout> {
let msg = recv_timeout(&self.client.receiver)?;
- let msg = msg.map(|msg| {
+ let msg = msg.inspect(|msg| {
self.messages.borrow_mut().push(msg.clone());
- msg
});
Ok(msg)
}
diff --git a/crates/rust-analyzer/tests/slow-tests/tidy.rs b/crates/rust-analyzer/tests/slow-tests/tidy.rs
index 7dd6382cfa..8cd5cbf1c7 100644
--- a/crates/rust-analyzer/tests/slow-tests/tidy.rs
+++ b/crates/rust-analyzer/tests/slow-tests/tidy.rs
@@ -155,7 +155,7 @@ Zlib OR Apache-2.0 OR MIT
let meta = cmd!(sh, "cargo metadata --format-version 1").read().unwrap();
let mut licenses = meta
- .split(|c| c == ',' || c == '{' || c == '}')
+ .split([',', '{', '}'])
.filter(|it| it.contains(r#""license""#))
.map(|it| it.trim())
.map(|it| it[r#""license":"#.len()..].trim_matches('"'))
diff --git a/crates/test-utils/src/lib.rs b/crates/test-utils/src/lib.rs
index 43f62d0d1e..53d4a28bc3 100644
--- a/crates/test-utils/src/lib.rs
+++ b/crates/test-utils/src/lib.rs
@@ -305,7 +305,7 @@ fn extract_line_annotations(mut line: &str) -> Vec<LineAnnotation> {
}
let range = TextRange::at(offset, len.try_into().unwrap());
let line_no_caret = &line[len..];
- let end_marker = line_no_caret.find(|c| c == '$');
+ let end_marker = line_no_caret.find('$');
let next = line_no_caret.find(marker).map_or(line.len(), |it| it + len);
let cond = |end_marker| {
diff --git a/xtask/src/codegen/diagnostics_docs.rs b/xtask/src/codegen/diagnostics_docs.rs
index cf30531e7f..dcc9c76a50 100644
--- a/xtask/src/codegen/diagnostics_docs.rs
+++ b/xtask/src/codegen/diagnostics_docs.rs
@@ -63,7 +63,7 @@ fn is_valid_diagnostic_name(diagnostic: &str) -> Result<(), String> {
if diagnostic.chars().any(|c| c.is_ascii_uppercase()) {
return Err("Diagnostic names can't contain uppercase symbols".into());
}
- if diagnostic.chars().any(|c| !c.is_ascii()) {
+ if !diagnostic.is_ascii() {
return Err("Diagnostic can't contain non-ASCII symbols".into());
}
diff --git a/xtask/src/release.rs b/xtask/src/release.rs
index 5699053a23..b936876b52 100644
--- a/xtask/src/release.rs
+++ b/xtask/src/release.rs
@@ -119,12 +119,11 @@ impl flags::RustcPull {
// Fetch given rustc commit.
cmd!(sh, "git fetch http://localhost:{JOSH_PORT}/rust-lang/rust.git@{commit}{JOSH_FILTER}.git")
.run()
- .map_err(|e| {
+ .inspect_err(|_| {
// Try to un-do the previous `git commit`, to leave the repo in the state we found it it.
cmd!(sh, "git reset --hard HEAD^")
.run()
.expect("FAILED to clean up again after failed `git fetch`, sorry for that");
- e
})
.context("FAILED to fetch new commits, something went wrong (committing the rust-version file has been undone)")?;