Unnamed repository; edit this file 'description' to name the repository.
Refactor global_search as a dynamic Picker
Michael Davis 2024-07-15
parent 5622db6 · commit 1d023b0
-rw-r--r--helix-term/src/commands.rs383
1 files changed, 183 insertions, 200 deletions
diff --git a/helix-term/src/commands.rs b/helix-term/src/commands.rs
index f4d2c634..b7ffeeb7 100644
--- a/helix-term/src/commands.rs
+++ b/helix-term/src/commands.rs
@@ -3,6 +3,7 @@ pub(crate) mod lsp;
pub(crate) mod typed;
pub use dap::*;
+use futures_util::FutureExt;
use helix_event::status;
use helix_stdx::{
path::expand_tilde,
@@ -2255,222 +2256,204 @@ fn global_search(cx: &mut Context) {
}
}
+ struct GlobalSearchConfig {
+ smart_case: bool,
+ file_picker_config: helix_view::editor::FilePickerConfig,
+ }
+
let config = cx.editor.config();
- let smart_case = config.search.smart_case;
- let file_picker_config = config.file_picker.clone();
+ let config = GlobalSearchConfig {
+ smart_case: config.search.smart_case,
+ file_picker_config: config.file_picker.clone(),
+ };
- let reg = cx.register.unwrap_or('/');
- let completions = search_completions(cx, Some(reg));
- ui::raw_regex_prompt(
- cx,
- "global-search:".into(),
- Some(reg),
- move |_editor: &Editor, input: &str| {
- completions
- .iter()
- .filter(|comp| comp.starts_with(input))
- .map(|comp| (0.., std::borrow::Cow::Owned(comp.clone())))
- .collect()
- },
- move |cx, _, input, event| {
- if event != PromptEvent::Validate {
- return;
- }
- cx.editor.registers.last_search_register = reg;
+ let columns = vec![
+ PickerColumn::new("path", |item: &FileResult, _| {
+ let path = helix_stdx::path::get_relative_path(&item.path);
+ format!("{}:{}", path.to_string_lossy(), item.line_num + 1).into()
+ }),
+ PickerColumn::new("contents", |item: &FileResult, _| {
+ item.line_content.as_str().into()
+ })
+ .without_filtering(),
+ ];
- let current_path = doc_mut!(cx.editor).path().cloned();
- let documents: Vec<_> = cx
- .editor
- .documents()
- .map(|doc| (doc.path().cloned(), doc.text().to_owned()))
- .collect();
+ let get_files = |query: &str,
+ editor: &mut Editor,
+ config: std::sync::Arc<GlobalSearchConfig>,
+ injector: &ui::picker::Injector<_, _>| {
+ if query.is_empty() {
+ return async { Ok(()) }.boxed();
+ }
- if let Ok(matcher) = RegexMatcherBuilder::new()
- .case_smart(smart_case)
- .build(input)
- {
- let search_root = helix_stdx::env::current_working_dir();
- if !search_root.exists() {
- cx.editor
- .set_error("Current working directory does not exist");
- return;
- }
+ let search_root = helix_stdx::env::current_working_dir();
+ if !search_root.exists() {
+ return async { Err(anyhow::anyhow!("Current working directory does not exist")) }
+ .boxed();
+ }
+
+ let documents: Vec<_> = editor
+ .documents()
+ .map(|doc| (doc.path().cloned(), doc.text().to_owned()))
+ .collect();
- let columns = vec![
- PickerColumn::new(
- "path",
- |item: &FileResult, current_path: &Option<PathBuf>| {
- let relative_path = helix_stdx::path::get_relative_path(&item.path)
- .to_string_lossy()
- .into_owned();
- if current_path
+ let matcher = match RegexMatcherBuilder::new()
+ .case_smart(config.smart_case)
+ .build(query)
+ {
+ Ok(matcher) => {
+ // Clear any "Failed to compile regex" errors out of the statusline.
+ editor.clear_status();
+ matcher
+ }
+ Err(err) => {
+ log::info!("Failed to compile search pattern in global search: {}", err);
+ return async { Err(anyhow::anyhow!("Failed to compile regex")) }.boxed();
+ }
+ };
+
+ let dedup_symlinks = config.file_picker_config.deduplicate_links;
+ let absolute_root = search_root
+ .canonicalize()
+ .unwrap_or_else(|_| search_root.clone());
+
+ let injector = injector.clone();
+ async move {
+ let searcher = SearcherBuilder::new()
+ .binary_detection(BinaryDetection::quit(b'\x00'))
+ .build();
+ WalkBuilder::new(search_root)
+ .hidden(config.file_picker_config.hidden)
+ .parents(config.file_picker_config.parents)
+ .ignore(config.file_picker_config.ignore)
+ .follow_links(config.file_picker_config.follow_symlinks)
+ .git_ignore(config.file_picker_config.git_ignore)
+ .git_global(config.file_picker_config.git_global)
+ .git_exclude(config.file_picker_config.git_exclude)
+ .max_depth(config.file_picker_config.max_depth)
+ .filter_entry(move |entry| {
+ filter_picker_entry(entry, &absolute_root, dedup_symlinks)
+ })
+ .add_custom_ignore_filename(helix_loader::config_dir().join("ignore"))
+ .add_custom_ignore_filename(".helix/ignore")
+ .build_parallel()
+ .run(|| {
+ let mut searcher = searcher.clone();
+ let matcher = matcher.clone();
+ let injector = injector.clone();
+ let documents = &documents;
+ Box::new(move |entry: Result<DirEntry, ignore::Error>| -> WalkState {
+ let entry = match entry {
+ Ok(entry) => entry,
+ Err(_) => return WalkState::Continue,
+ };
+
+ match entry.file_type() {
+ Some(entry) if entry.is_file() => {}
+ // skip everything else
+ _ => return WalkState::Continue,
+ };
+
+ let mut stop = false;
+ let sink = sinks::UTF8(|line_num, line_content| {
+ stop = injector
+ .push(FileResult::new(
+ entry.path(),
+ line_num as usize - 1,
+ line_content.to_string(),
+ ))
+ .is_err();
+
+ Ok(!stop)
+ });
+ let doc = documents.iter().find(|&(doc_path, _)| {
+ doc_path
.as_ref()
- .map(|p| p == &item.path)
- .unwrap_or(false)
- {
- format!("{} (*)", relative_path).into()
+ .map_or(false, |doc_path| doc_path == entry.path())
+ });
+
+ let result = if let Some((_, doc)) = doc {
+ // there is already a buffer for this file
+ // search the buffer instead of the file because it's faster
+ // and captures new edits without requiring a save
+ if searcher.multi_line_with_matcher(&matcher) {
+ // in this case a continous buffer is required
+ // convert the rope to a string
+ let text = doc.to_string();
+ searcher.search_slice(&matcher, text.as_bytes(), sink)
} else {
- relative_path.into()
+ searcher.search_reader(
+ &matcher,
+ RopeReader::new(doc.slice(..)),
+ sink,
+ )
}
- },
- ),
- PickerColumn::new("contents", |item: &FileResult, _| {
- item.line_content.as_str().into()
- }),
- ];
- let (picker, injector) = Picker::stream(columns, current_path);
-
- let dedup_symlinks = file_picker_config.deduplicate_links;
- let absolute_root = search_root
- .canonicalize()
- .unwrap_or_else(|_| search_root.clone());
- let injector_ = injector.clone();
-
- std::thread::spawn(move || {
- let searcher = SearcherBuilder::new()
- .binary_detection(BinaryDetection::quit(b'\x00'))
- .build();
-
- let mut walk_builder = WalkBuilder::new(search_root);
-
- walk_builder
- .hidden(file_picker_config.hidden)
- .parents(file_picker_config.parents)
- .ignore(file_picker_config.ignore)
- .follow_links(file_picker_config.follow_symlinks)
- .git_ignore(file_picker_config.git_ignore)
- .git_global(file_picker_config.git_global)
- .git_exclude(file_picker_config.git_exclude)
- .max_depth(file_picker_config.max_depth)
- .filter_entry(move |entry| {
- filter_picker_entry(entry, &absolute_root, dedup_symlinks)
- })
- .add_custom_ignore_filename(helix_loader::config_dir().join("ignore"))
- .add_custom_ignore_filename(".helix/ignore")
- .build_parallel()
- .run(|| {
- let mut searcher = searcher.clone();
- let matcher = matcher.clone();
- let injector = injector_.clone();
- let documents = &documents;
- Box::new(move |entry: Result<DirEntry, ignore::Error>| -> WalkState {
- let entry = match entry {
- Ok(entry) => entry,
- Err(_) => return WalkState::Continue,
- };
-
- match entry.file_type() {
- Some(entry) if entry.is_file() => {}
- // skip everything else
- _ => return WalkState::Continue,
- };
-
- let mut stop = false;
- let sink = sinks::UTF8(|line_num, line_content| {
- stop = injector
- .push(FileResult::new(
- entry.path(),
- line_num as usize - 1,
- line_content.to_string(),
- ))
- .is_err();
-
- Ok(!stop)
- });
- let doc = documents.iter().find(|&(doc_path, _)| {
- doc_path
- .as_ref()
- .map_or(false, |doc_path| doc_path == entry.path())
- });
-
- let result = if let Some((_, doc)) = doc {
- // there is already a buffer for this file
- // search the buffer instead of the file because it's faster
- // and captures new edits without requiring a save
- if searcher.multi_line_with_matcher(&matcher) {
- // in this case a continous buffer is required
- // convert the rope to a string
- let text = doc.to_string();
- searcher.search_slice(&matcher, text.as_bytes(), sink)
- } else {
- searcher.search_reader(
- &matcher,
- RopeReader::new(doc.slice(..)),
- sink,
- )
- }
- } else {
- searcher.search_path(&matcher, entry.path(), sink)
- };
-
- if let Err(err) = result {
- log::error!(
- "Global search error: {}, {}",
- entry.path().display(),
- err
- );
- }
- if stop {
- WalkState::Quit
- } else {
- WalkState::Continue
- }
- })
- });
+ } else {
+ searcher.search_path(&matcher, entry.path(), sink)
+ };
+
+ if let Err(err) = result {
+ log::error!("Global search error: {}, {}", entry.path().display(), err);
+ }
+ if stop {
+ WalkState::Quit
+ } else {
+ WalkState::Continue
+ }
+ })
});
+ Ok(())
+ }
+ .boxed()
+ };
- cx.jobs.callback(async move {
- let call = move |_: &mut Editor, compositor: &mut Compositor| {
- let picker = Picker::with_stream(
- picker,
- 0,
- injector,
- move |cx, FileResult { path, line_num, .. }, action| {
- let doc = match cx.editor.open(path, action) {
- Ok(id) => doc_mut!(cx.editor, &id),
- Err(e) => {
- cx.editor.set_error(format!(
- "Failed to open file '{}': {}",
- path.display(),
- e
- ));
- return;
- }
- };
+ let mut picker = Picker::new(
+ columns,
+ 1, // contents
+ vec![],
+ config,
+ move |cx, FileResult { path, line_num, .. }, action| {
+ let doc = match cx.editor.open(path, action) {
+ Ok(id) => doc_mut!(cx.editor, &id),
+ Err(e) => {
+ cx.editor
+ .set_error(format!("Failed to open file '{}': {}", path.display(), e));
+ return;
+ }
+ };
- let line_num = *line_num;
- let view = view_mut!(cx.editor);
- let text = doc.text();
- if line_num >= text.len_lines() {
- cx.editor.set_error(
+ let line_num = *line_num;
+ let view = view_mut!(cx.editor);
+ let text = doc.text();
+ if line_num >= text.len_lines() {
+ cx.editor.set_error(
"The line you jumped to does not exist anymore because the file has changed.",
);
- return;
- }
- let start = text.line_to_char(line_num);
- let end = text.line_to_char((line_num + 1).min(text.len_lines()));
+ return;
+ }
+ let start = text.line_to_char(line_num);
+ let end = text.line_to_char((line_num + 1).min(text.len_lines()));
- doc.set_selection(view.id, Selection::single(start, end));
- if action.align_view(view, doc.id()) {
- align_view(doc, view, Align::Center);
- }
- },
- )
- .with_preview(
- |_editor, FileResult { path, line_num, .. }| {
- Some((path.clone().into(), Some((*line_num, *line_num))))
- },
- );
- compositor.push(Box::new(overlaid(picker)))
- };
- Ok(Callback::EditorCompositor(Box::new(call)))
- })
- } else {
- // Otherwise do nothing
- // log::warn!("Global Search Invalid Pattern")
+ doc.set_selection(view.id, Selection::single(start, end));
+ if action.align_view(view, doc.id()) {
+ align_view(doc, view, Align::Center);
}
},
- );
+ )
+ .with_preview(|_editor, FileResult { path, line_num, .. }| {
+ Some((path.clone().into(), Some((*line_num, *line_num))))
+ })
+ .with_dynamic_query(get_files, Some(275));
+
+ if let Some((reg, line)) = cx
+ .register
+ .and_then(|reg| Some((reg, cx.editor.registers.first(reg, cx.editor)?)))
+ {
+ picker = picker.with_line(line.into_owned(), cx.editor);
+ cx.editor.registers.last_search_register = reg;
+ }
+
+ cx.push_layer(Box::new(overlaid(picker)));
}
enum Extend {