Unnamed repository; edit this file 'description' to name the repository.
Merge pull request #9647 from helix-editor/pickers-v2
`Picker`s "v2"
Blaž Hrastnik 2024-07-15
parent 6345b78 · parent 9de5f5c · commit 08ee8b9
-rw-r--r--Cargo.lock17
-rw-r--r--Cargo.toml2
-rw-r--r--book/src/themes.md2
-rw-r--r--helix-core/Cargo.toml1
-rw-r--r--helix-core/src/fuzzy.rs10
-rw-r--r--helix-core/src/lib.rs3
-rw-r--r--helix-core/src/uri.rs122
-rw-r--r--helix-event/src/lib.rs4
-rw-r--r--helix-event/src/redraw.rs9
-rw-r--r--helix-term/Cargo.toml1
-rw-r--r--helix-term/src/application.rs26
-rw-r--r--helix-term/src/commands.rs632
-rw-r--r--helix-term/src/commands/dap.rs72
-rw-r--r--helix-term/src/commands/lsp.rs457
-rw-r--r--helix-term/src/commands/typed.rs23
-rw-r--r--helix-term/src/ui/menu.rs24
-rw-r--r--helix-term/src/ui/mod.rs19
-rw-r--r--helix-term/src/ui/picker.rs755
-rw-r--r--helix-term/src/ui/picker/handlers.rs182
-rw-r--r--helix-term/src/ui/picker/query.rs368
-rw-r--r--helix-term/src/ui/prompt.rs33
-rw-r--r--helix-view/src/document.rs4
-rw-r--r--helix-view/src/editor.rs12
-rw-r--r--helix-view/src/handlers/lsp.rs67
24 files changed, 1862 insertions, 983 deletions
diff --git a/Cargo.lock b/Cargo.lock
index c614cadf..f1cd1632 100644
--- a/Cargo.lock
+++ b/Cargo.lock
@@ -210,12 +210,6 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e496a50fda8aacccc86d7529e2c1e0892dbd0f898a6b5645b5561b89c3210efa"
[[package]]
-name = "cov-mark"
-version = "1.1.0"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "9ffa3d3e0138386cd4361f63537765cac7ee40698028844635a54495a92f67f3"
-
-[[package]]
name = "crc32fast"
version = "1.3.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
@@ -1337,6 +1331,7 @@ dependencies = [
"unicode-general-category",
"unicode-segmentation",
"unicode-width",
+ "url",
]
[[package]]
@@ -1467,6 +1462,7 @@ dependencies = [
"smallvec",
"tempfile",
"termini",
+ "thiserror",
"tokio",
"tokio-stream",
"toml",
@@ -1784,9 +1780,9 @@ dependencies = [
[[package]]
name = "nucleo"
-version = "0.2.1"
+version = "0.5.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "ae5331f4bcce475cf28cb29c95366c3091af4b0aa7703f1a6bc858f29718fdf3"
+checksum = "5262af4c94921c2646c5ac6ff7900c2af9cbb08dc26a797e18130a7019c039d4"
dependencies = [
"nucleo-matcher",
"parking_lot",
@@ -1795,11 +1791,10 @@ dependencies = [
[[package]]
name = "nucleo-matcher"
-version = "0.2.0"
+version = "0.3.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "1b702b402fe286162d1f00b552a046ce74365d2ac473a2607ff36ba650f9bd57"
+checksum = "bf33f538733d1a5a3494b836ba913207f14d9d4a1d3cd67030c5061bdd2cac85"
dependencies = [
- "cov-mark",
"memchr",
"unicode-segmentation",
]
diff --git a/Cargo.toml b/Cargo.toml
index 9be265fc..e7f78442 100644
--- a/Cargo.toml
+++ b/Cargo.toml
@@ -38,7 +38,7 @@ package.helix-term.opt-level = 2
[workspace.dependencies]
tree-sitter = { version = "0.22" }
-nucleo = "0.2.0"
+nucleo = "0.5.0"
slotmap = "1.0.7"
thiserror = "1.0"
diff --git a/book/src/themes.md b/book/src/themes.md
index e3b95c0a..b8e27137 100644
--- a/book/src/themes.md
+++ b/book/src/themes.md
@@ -297,6 +297,8 @@ These scopes are used for theming the editor interface:
| `ui.bufferline.background` | Style for bufferline background |
| `ui.popup` | Documentation popups (e.g. Space + k) |
| `ui.popup.info` | Prompt for multiple key options |
+| `ui.picker.header` | Column names in pickers with multiple columns |
+| `ui.picker.header.active` | The column name in pickers with multiple columns where the cursor is entering into. |
| `ui.window` | Borderlines separating splits |
| `ui.help` | Description box for commands |
| `ui.text` | Default text style, command prompts, popup text, etc. |
diff --git a/helix-core/Cargo.toml b/helix-core/Cargo.toml
index c6b87e68..392b4a4c 100644
--- a/helix-core/Cargo.toml
+++ b/helix-core/Cargo.toml
@@ -34,6 +34,7 @@ bitflags = "2.6"
ahash = "0.8.11"
hashbrown = { version = "0.14.5", features = ["raw"] }
dunce = "1.0"
+url = "2.5.0"
log = "0.4"
serde = { version = "1.0", features = ["derive"] }
diff --git a/helix-core/src/fuzzy.rs b/helix-core/src/fuzzy.rs
index 549c6b0e..da46518f 100644
--- a/helix-core/src/fuzzy.rs
+++ b/helix-core/src/fuzzy.rs
@@ -1,6 +1,6 @@
use std::ops::DerefMut;
-use nucleo::pattern::{Atom, AtomKind, CaseMatching};
+use nucleo::pattern::{Atom, AtomKind, CaseMatching, Normalization};
use nucleo::Config;
use parking_lot::Mutex;
@@ -38,6 +38,12 @@ pub fn fuzzy_match<T: AsRef<str>>(
if path {
matcher.config.set_match_paths();
}
- let pattern = Atom::new(pattern, CaseMatching::Smart, AtomKind::Fuzzy, false);
+ let pattern = Atom::new(
+ pattern,
+ CaseMatching::Smart,
+ Normalization::Smart,
+ AtomKind::Fuzzy,
+ false,
+ );
pattern.match_list(items, &mut matcher)
}
diff --git a/helix-core/src/lib.rs b/helix-core/src/lib.rs
index 1abd90d1..681d3456 100644
--- a/helix-core/src/lib.rs
+++ b/helix-core/src/lib.rs
@@ -27,6 +27,7 @@ pub mod test;
pub mod text_annotations;
pub mod textobject;
mod transaction;
+pub mod uri;
pub mod wrap;
pub mod unicode {
@@ -66,3 +67,5 @@ pub use diagnostic::Diagnostic;
pub use line_ending::{LineEnding, NATIVE_LINE_ENDING};
pub use transaction::{Assoc, Change, ChangeSet, Deletion, Operation, Transaction};
+
+pub use uri::Uri;
diff --git a/helix-core/src/uri.rs b/helix-core/src/uri.rs
new file mode 100644
index 00000000..4e03c58b
--- /dev/null
+++ b/helix-core/src/uri.rs
@@ -0,0 +1,122 @@
+use std::path::{Path, PathBuf};
+
+/// A generic pointer to a file location.
+///
+/// Currently this type only supports paths to local files.
+#[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord)]
+#[non_exhaustive]
+pub enum Uri {
+ File(PathBuf),
+}
+
+impl Uri {
+ // This clippy allow mirrors url::Url::from_file_path
+ #[allow(clippy::result_unit_err)]
+ pub fn to_url(&self) -> Result<url::Url, ()> {
+ match self {
+ Uri::File(path) => url::Url::from_file_path(path),
+ }
+ }
+
+ pub fn as_path(&self) -> Option<&Path> {
+ match self {
+ Self::File(path) => Some(path),
+ }
+ }
+
+ pub fn as_path_buf(self) -> Option<PathBuf> {
+ match self {
+ Self::File(path) => Some(path),
+ }
+ }
+}
+
+impl From<PathBuf> for Uri {
+ fn from(path: PathBuf) -> Self {
+ Self::File(path)
+ }
+}
+
+impl TryFrom<Uri> for PathBuf {
+ type Error = ();
+
+ fn try_from(uri: Uri) -> Result<Self, Self::Error> {
+ match uri {
+ Uri::File(path) => Ok(path),
+ }
+ }
+}
+
+#[derive(Debug)]
+pub struct UrlConversionError {
+ source: url::Url,
+ kind: UrlConversionErrorKind,
+}
+
+#[derive(Debug)]
+pub enum UrlConversionErrorKind {
+ UnsupportedScheme,
+ UnableToConvert,
+}
+
+impl std::fmt::Display for UrlConversionError {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ match self.kind {
+ UrlConversionErrorKind::UnsupportedScheme => {
+ write!(f, "unsupported scheme in URL: {}", self.source.scheme())
+ }
+ UrlConversionErrorKind::UnableToConvert => {
+ write!(f, "unable to convert URL to file path: {}", self.source)
+ }
+ }
+ }
+}
+
+impl std::error::Error for UrlConversionError {}
+
+fn convert_url_to_uri(url: &url::Url) -> Result<Uri, UrlConversionErrorKind> {
+ if url.scheme() == "file" {
+ url.to_file_path()
+ .map(|path| Uri::File(helix_stdx::path::normalize(path)))
+ .map_err(|_| UrlConversionErrorKind::UnableToConvert)
+ } else {
+ Err(UrlConversionErrorKind::UnsupportedScheme)
+ }
+}
+
+impl TryFrom<url::Url> for Uri {
+ type Error = UrlConversionError;
+
+ fn try_from(url: url::Url) -> Result<Self, Self::Error> {
+ convert_url_to_uri(&url).map_err(|kind| Self::Error { source: url, kind })
+ }
+}
+
+impl TryFrom<&url::Url> for Uri {
+ type Error = UrlConversionError;
+
+ fn try_from(url: &url::Url) -> Result<Self, Self::Error> {
+ convert_url_to_uri(url).map_err(|kind| Self::Error {
+ source: url.clone(),
+ kind,
+ })
+ }
+}
+
+#[cfg(test)]
+mod test {
+ use super::*;
+ use url::Url;
+
+ #[test]
+ fn unknown_scheme() {
+ let url = Url::parse("csharp:/metadata/foo/bar/Baz.cs").unwrap();
+ assert!(matches!(
+ Uri::try_from(url),
+ Err(UrlConversionError {
+ kind: UrlConversionErrorKind::UnsupportedScheme,
+ ..
+ })
+ ));
+ }
+}
diff --git a/helix-event/src/lib.rs b/helix-event/src/lib.rs
index 894de5e8..de018a79 100644
--- a/helix-event/src/lib.rs
+++ b/helix-event/src/lib.rs
@@ -34,7 +34,9 @@
use anyhow::Result;
pub use cancel::{cancelable_future, cancelation, CancelRx, CancelTx};
pub use debounce::{send_blocking, AsyncHook};
-pub use redraw::{lock_frame, redraw_requested, request_redraw, start_frame, RenderLockGuard};
+pub use redraw::{
+ lock_frame, redraw_requested, request_redraw, start_frame, RenderLockGuard, RequestRedrawOnDrop,
+};
pub use registry::Event;
mod cancel;
diff --git a/helix-event/src/redraw.rs b/helix-event/src/redraw.rs
index 8fadb8ae..d1a18899 100644
--- a/helix-event/src/redraw.rs
+++ b/helix-event/src/redraw.rs
@@ -51,3 +51,12 @@ pub fn start_frame() {
pub fn lock_frame() -> RenderLockGuard {
RENDER_LOCK.read()
}
+
+/// A zero sized type that requests a redraw via [request_redraw] when the type [Drop]s.
+pub struct RequestRedrawOnDrop;
+
+impl Drop for RequestRedrawOnDrop {
+ fn drop(&mut self) {
+ request_redraw();
+ }
+}
diff --git a/helix-term/Cargo.toml b/helix-term/Cargo.toml
index fb850f7c..c50165e9 100644
--- a/helix-term/Cargo.toml
+++ b/helix-term/Cargo.toml
@@ -56,6 +56,7 @@ ignore = "0.4"
pulldown-cmark = { version = "0.11", default-features = false }
# file type detection
content_inspector = "0.2.4"
+thiserror = "1.0"
# opening URLs
open = "5.2.0"
diff --git a/helix-term/src/application.rs b/helix-term/src/application.rs
index 9adc764c..9695703b 100644
--- a/helix-term/src/application.rs
+++ b/helix-term/src/application.rs
@@ -735,10 +735,10 @@ impl Application {
}
}
Notification::PublishDiagnostics(mut params) => {
- let path = match params.uri.to_file_path() {
- Ok(path) => helix_stdx::path::normalize(path),
- Err(_) => {
- log::error!("Unsupported file URI: {}", params.uri);
+ let uri = match helix_core::Uri::try_from(params.uri) {
+ Ok(uri) => uri,
+ Err(err) => {
+ log::error!("{err}");
return;
}
};
@@ -749,11 +749,11 @@ impl Application {
}
// have to inline the function because of borrow checking...
let doc = self.editor.documents.values_mut()
- .find(|doc| doc.path().map(|p| p == &path).unwrap_or(false))
+ .find(|doc| doc.uri().is_some_and(|u| u == uri))
.filter(|doc| {
if let Some(version) = params.version {
if version != doc.version() {
- log::info!("Version ({version}) is out of date for {path:?} (expected ({}), dropping PublishDiagnostic notification", doc.version());
+ log::info!("Version ({version}) is out of date for {uri:?} (expected ({}), dropping PublishDiagnostic notification", doc.version());
return false;
}
}
@@ -765,7 +765,7 @@ impl Application {
let lang_conf = doc.language.clone();
if let Some(lang_conf) = &lang_conf {
- if let Some(old_diagnostics) = self.editor.diagnostics.get(&path) {
+ if let Some(old_diagnostics) = self.editor.diagnostics.get(&uri) {
if !lang_conf.persistent_diagnostic_sources.is_empty() {
// Sort diagnostics first by severity and then by line numbers.
// Note: The `lsp::DiagnosticSeverity` enum is already defined in decreasing order
@@ -798,7 +798,7 @@ impl Application {
// Insert the original lsp::Diagnostics here because we may have no open document
// for diagnosic message and so we can't calculate the exact position.
// When using them later in the diagnostics picker, we calculate them on-demand.
- let diagnostics = match self.editor.diagnostics.entry(path) {
+ let diagnostics = match self.editor.diagnostics.entry(uri) {
Entry::Occupied(o) => {
let current_diagnostics = o.into_mut();
// there may entries of other language servers, which is why we can't overwrite the whole entry
@@ -1132,20 +1132,22 @@ impl Application {
..
} = params;
- let path = match uri.to_file_path() {
- Ok(path) => path,
+ let uri = match helix_core::Uri::try_from(uri) {
+ Ok(uri) => uri,
Err(err) => {
- log::error!("unsupported file URI: {}: {:?}", uri, err);
+ log::error!("{err}");
return lsp::ShowDocumentResult { success: false };
}
};
+ // If `Uri` gets another variant other than `Path` this may not be valid.
+ let path = uri.as_path().expect("URIs are valid paths");
let action = match take_focus {
Some(true) => helix_view::editor::Action::Replace,
_ => helix_view::editor::Action::VerticalSplit,
};
- let doc_id = match self.editor.open(&path, action) {
+ let doc_id = match self.editor.open(path, action) {
Ok(id) => id,
Err(err) => {
log::error!("failed to open path: {:?}: {:?}", uri, err);
diff --git a/helix-term/src/commands.rs b/helix-term/src/commands.rs
index 69496eb6..097c3493 100644
--- a/helix-term/src/commands.rs
+++ b/helix-term/src/commands.rs
@@ -3,6 +3,7 @@ pub(crate) mod lsp;
pub(crate) mod typed;
pub use dap::*;
+use futures_util::FutureExt;
use helix_event::status;
use helix_stdx::{
path::expand_tilde,
@@ -10,10 +11,7 @@ use helix_stdx::{
};
use helix_vcs::{FileChange, Hunk};
pub use lsp::*;
-use tui::{
- text::Span,
- widgets::{Cell, Row},
-};
+use tui::text::Span;
pub use typed::*;
use helix_core::{
@@ -61,8 +59,7 @@ use crate::{
compositor::{self, Component, Compositor},
filter_picker_entry,
job::Callback,
- keymap::ReverseKeymap,
- ui::{self, menu::Item, overlay::overlaid, Picker, Popup, Prompt, PromptEvent},
+ ui::{self, overlay::overlaid, Picker, PickerColumn, Popup, Prompt, PromptEvent},
};
use crate::job::{self, Jobs};
@@ -2257,216 +2254,193 @@ fn global_search(cx: &mut Context) {
}
}
- impl ui::menu::Item for FileResult {
- type Data = Option<PathBuf>;
-
- fn format(&self, current_path: &Self::Data) -> Row {
- let relative_path = helix_stdx::path::get_relative_path(&self.path)
- .to_string_lossy()
- .into_owned();
- if current_path
- .as_ref()
- .map(|p| p == &self.path)
- .unwrap_or(false)
- {
- format!("{} (*)", relative_path).into()
- } else {
- relative_path.into()
- }
- }
+ struct GlobalSearchConfig {
+ smart_case: bool,
+ file_picker_config: helix_view::editor::FilePickerConfig,
}
let config = cx.editor.config();
- let smart_case = config.search.smart_case;
- let file_picker_config = config.file_picker.clone();
+ let config = GlobalSearchConfig {
+ smart_case: config.search.smart_case,
+ file_picker_config: config.file_picker.clone(),
+ };
- let reg = cx.register.unwrap_or('/');
- let completions = search_completions(cx, Some(reg));
- ui::raw_regex_prompt(
- cx,
- "global-search:".into(),
- Some(reg),
- move |_editor: &Editor, input: &str| {
- completions
- .iter()
- .filter(|comp| comp.starts_with(input))
- .map(|comp| (0.., std::borrow::Cow::Owned(comp.clone())))
- .collect()
- },
- move |cx, _, input, event| {
- if event != PromptEvent::Validate {
- return;
- }
- cx.editor.registers.last_search_register = reg;
+ let columns = [
+ PickerColumn::new("path", |item: &FileResult, _| {
+ let path = helix_stdx::path::get_relative_path(&item.path);
+ format!("{}:{}", path.to_string_lossy(), item.line_num + 1).into()
+ }),
+ PickerColumn::hidden("contents"),
+ ];
- let current_path = doc_mut!(cx.editor).path().cloned();
- let documents: Vec<_> = cx
- .editor
- .documents()
- .map(|doc| (doc.path().cloned(), doc.text().to_owned()))
- .collect();
+ let get_files = |query: &str,
+ editor: &mut Editor,
+ config: std::sync::Arc<GlobalSearchConfig>,
+ injector: &ui::picker::Injector<_, _>| {
+ if query.is_empty() {
+ return async { Ok(()) }.boxed();
+ }
- if let Ok(matcher) = RegexMatcherBuilder::new()
- .case_smart(smart_case)
- .build(input)
- {
- let search_root = helix_stdx::env::current_working_dir();
- if !search_root.exists() {
- cx.editor
- .set_error("Current working directory does not exist");
- return;
- }
+ let search_root = helix_stdx::env::current_working_dir();
+ if !search_root.exists() {
+ return async { Err(anyhow::anyhow!("Current working directory does not exist")) }
+ .boxed();
+ }
- let (picker, injector) = Picker::stream(current_path);
-
- let dedup_symlinks = file_picker_config.deduplicate_links;
- let absolute_root = search_root
- .canonicalize()
- .unwrap_or_else(|_| search_root.clone());
- let injector_ = injector.clone();
-
- std::thread::spawn(move || {
- let searcher = SearcherBuilder::new()
- .binary_detection(BinaryDetection::quit(b'\x00'))
- .build();
-
- let mut walk_builder = WalkBuilder::new(search_root);
-
- walk_builder
- .hidden(file_picker_config.hidden)
- .parents(file_picker_config.parents)
- .ignore(file_picker_config.ignore)
- .follow_links(file_picker_config.follow_symlinks)
- .git_ignore(file_picker_config.git_ignore)
- .git_global(file_picker_config.git_global)
- .git_exclude(file_picker_config.git_exclude)
- .max_depth(file_picker_config.max_depth)
- .filter_entry(move |entry| {
- filter_picker_entry(entry, &absolute_root, dedup_symlinks)
+ let documents: Vec<_> = editor
+ .documents()
+ .map(|doc| (doc.path().cloned(), doc.text().to_owned()))
+ .collect();
+
+ let matcher = match RegexMatcherBuilder::new()
+ .case_smart(config.smart_case)
+ .build(query)
+ {
+ Ok(matcher) => {
+ // Clear any "Failed to compile regex" errors out of the statusline.
+ editor.clear_status();
+ matcher
+ }
+ Err(err) => {
+ log::info!("Failed to compile search pattern in global search: {}", err);
+ return async { Err(anyhow::anyhow!("Failed to compile regex")) }.boxed();
+ }
+ };
+
+ let dedup_symlinks = config.file_picker_config.deduplicate_links;
+ let absolute_root = search_root
+ .canonicalize()
+ .unwrap_or_else(|_| search_root.clone());
+
+ let injector = injector.clone();
+ async move {
+ let searcher = SearcherBuilder::new()
+ .binary_detection(BinaryDetection::quit(b'\x00'))
+ .build();
+ WalkBuilder::new(search_root)
+ .hidden(config.file_picker_config.hidden)
+ .parents(config.file_picker_config.parents)
+ .ignore(config.file_picker_config.ignore)
+ .follow_links(config.file_picker_config.follow_symlinks)
+ .git_ignore(config.file_picker_config.git_ignore)
+ .git_global(config.file_picker_config.git_global)
+ .git_exclude(config.file_picker_config.git_exclude)
+ .max_depth(config.file_picker_config.max_depth)
+ .filter_entry(move |entry| {
+ filter_picker_entry(entry, &absolute_root, dedup_symlinks)
+ })
+ .add_custom_ignore_filename(helix_loader::config_dir().join("ignore"))
+ .add_custom_ignore_filename(".helix/ignore")
+ .build_parallel()
+ .run(|| {
+ let mut searcher = searcher.clone();
+ let matcher = matcher.clone();
+ let injector = injector.clone();
+ let documents = &documents;
+ Box::new(move |entry: Result<DirEntry, ignore::Error>| -> WalkState {
+ let entry = match entry {
+ Ok(entry) => entry,
+ Err(_) => return WalkState::Continue,
+ };
+
+ match entry.file_type() {
+ Some(entry) if entry.is_file() => {}
+ // skip everything else
+ _ => return WalkState::Continue,
+ };
+
+ let mut stop = false;
+ let sink = sinks::UTF8(|line_num, _line_content| {
+ stop = injector
+ .push(FileResult::new(entry.path(), line_num as usize - 1))
+ .is_err();
+
+ Ok(!stop)
+ });
+ let doc = documents.iter().find(|&(doc_path, _)| {
+ doc_path
+ .as_ref()
+ .map_or(false, |doc_path| doc_path == entry.path())
});
- walk_builder
- .add_custom_ignore_filename(helix_loader::config_dir().join("ignore"));
- walk_builder.add_custom_ignore_filename(".helix/ignore");
-
- walk_builder.build_parallel().run(|| {
- let mut searcher = searcher.clone();
- let matcher = matcher.clone();
- let injector = injector_.clone();
- let documents = &documents;
- Box::new(move |entry: Result<DirEntry, ignore::Error>| -> WalkState {
- let entry = match entry {
- Ok(entry) => entry,
- Err(_) => return WalkState::Continue,
- };
-
- match entry.file_type() {
- Some(entry) if entry.is_file() => {}
- // skip everything else
- _ => return WalkState::Continue,
- };
-
- let mut stop = false;
- let sink = sinks::UTF8(|line_num, _| {
- stop = injector
- .push(FileResult::new(entry.path(), line_num as usize - 1))
- .is_err();
-
- Ok(!stop)
- });
- let doc = documents.iter().find(|&(doc_path, _)| {
- doc_path
- .as_ref()
- .map_or(false, |doc_path| doc_path == entry.path())
- });
-
- let result = if let Some((_, doc)) = doc {
- // there is already a buffer for this file
- // search the buffer instead of the file because it's faster
- // and captures new edits without requiring a save
- if searcher.multi_line_with_matcher(&matcher) {
- // in this case a continous buffer is required
- // convert the rope to a string
- let text = doc.to_string();
- searcher.search_slice(&matcher, text.as_bytes(), sink)
- } else {
- searcher.search_reader(
- &matcher,
- RopeReader::new(doc.slice(..)),
- sink,
- )
- }
+ let result = if let Some((_, doc)) = doc {
+ // there is already a buffer for this file
+ // search the buffer instead of the file because it's faster
+ // and captures new edits without requiring a save
+ if searcher.multi_line_with_matcher(&matcher) {
+ // in this case a continous buffer is required
+ // convert the rope to a string
+ let text = doc.to_string();
+ searcher.search_slice(&matcher, text.as_bytes(), sink)
} else {
- searcher.search_path(&matcher, entry.path(), sink)
- };
-
- if let Err(err) = result {
- log::error!(
- "Global search error: {}, {}",
- entry.path().display(),
- err
- );
+ searcher.search_reader(
+ &matcher,
+ RopeReader::new(doc.slice(..)),
+ sink,
+ )
}
- if stop {
- WalkState::Quit
- } else {
- WalkState::Continue
- }
- })
- });
+ } else {
+ searcher.search_path(&matcher, entry.path(), sink)
+ };
+
+ if let Err(err) = result {
+ log::error!("Global search error: {}, {}", entry.path().display(), err);
+ }
+ if stop {
+ WalkState::Quit
+ } else {
+ WalkState::Continue
+ }
+ })
});
+ Ok(())
+ }
+ .boxed()
+ };
- cx.jobs.callback(async move {
- let call = move |_: &mut Editor, compositor: &mut Compositor| {
- let picker = Picker::with_stream(
- picker,
- injector,
- move |cx, FileResult { path, line_num }, action| {
- let doc = match cx.editor.open(path, action) {
- Ok(id) => doc_mut!(cx.editor, &id),
- Err(e) => {
- cx.editor.set_error(format!(
- "Failed to open file '{}': {}",
- path.display(),
- e
- ));
- return;
- }
- };
+ let reg = cx.register.unwrap_or('/');
+ cx.editor.registers.last_search_register = reg;
+
+ let picker = Picker::new(
+ columns,
+ 1, // contents
+ [],
+ config,
+ move |cx, FileResult { path, line_num, .. }, action| {
+ let doc = match cx.editor.open(path, action) {
+ Ok(id) => doc_mut!(cx.editor, &id),
+ Err(e) => {
+ cx.editor
+ .set_error(format!("Failed to open file '{}': {}", path.display(), e));
+ return;
+ }
+ };
- let line_num = *line_num;
- let view = view_mut!(cx.editor);
- let text = doc.text();
- if line_num >= text.len_lines() {
- cx.editor.set_error(
+ let line_num = *line_num;
+ let view = view_mut!(cx.editor);
+ let text = doc.text();
+ if line_num >= text.len_lines() {
+ cx.editor.set_error(
"The line you jumped to does not exist anymore because the file has changed.",
);
- return;
- }
- let start = text.line_to_char(line_num);
- let end = text.line_to_char((line_num + 1).min(text.len_lines()));
+ return;
+ }
+ let start = text.line_to_char(line_num);
+ let end = text.line_to_char((line_num + 1).min(text.len_lines()));
- doc.set_selection(view.id, Selection::single(start, end));
- if action.align_view(view, doc.id()) {
- align_view(doc, view, Align::Center);
- }
- },
- )
- .with_preview(
- |_editor, FileResult { path, line_num }| {
- Some((path.clone().into(), Some((*line_num, *line_num))))
- },
- );
- compositor.push(Box::new(overlaid(picker)))
- };
- Ok(Callback::EditorCompositor(Box::new(call)))
- })
- } else {
- // Otherwise do nothing
- // log::warn!("Global Search Invalid Pattern")
+ doc.set_selection(view.id, Selection::single(start, end));
+ if action.align_view(view, doc.id()) {
+ align_view(doc, view, Align::Center);
}
},
- );
+ )
+ .with_preview(|_editor, FileResult { path, line_num, .. }| {
+ Some((path.as_path().into(), Some((*line_num, *line_num))))
+ })
+ .with_history_register(Some(reg))
+ .with_dynamic_query(get_files, Some(275));
+
+ cx.push_layer(Box::new(overlaid(picker)));
}
enum Extend {
@@ -2894,31 +2868,6 @@ fn buffer_picker(cx: &mut Context) {
focused_at: std::time::Instant,
}
- impl ui::menu::Item for BufferMeta {
- type Data = ();
-
- fn format(&self, _data: &Self::Data) -> Row {
- let path = self
- .path
- .as_deref()
- .map(helix_stdx::path::get_relative_path);
- let path = match path.as_deref().and_then(Path::to_str) {
- Some(path) => path,
- None => SCRATCH_BUFFER_NAME,
- };
-
- let mut flags = String::new();
- if self.is_modified {
- flags.push('+');
- }
- if self.is_current {
- flags.push('*');
- }
-
- Row::new([self.id.to_string(), flags, path.to_string()])
- }
- }
-
let new_meta = |doc: &Document| BufferMeta {
id: doc.id(),
path: doc.path().cloned(),
@@ -2937,7 +2886,31 @@ fn buffer_picker(cx: &mut Context) {
// mru
items.sort_unstable_by_key(|item| std::cmp::Reverse(item.focused_at));
- let picker = Picker::new(items, (), |cx, meta, action| {
+ let columns = [
+ PickerColumn::new("id", |meta: &BufferMeta, _| meta.id.to_string().into()),
+ PickerColumn::new("flags", |meta: &BufferMeta, _| {
+ let mut flags = String::new();
+ if meta.is_modified {
+ flags.push('+');
+ }
+ if meta.is_current {
+ flags.push('*');
+ }
+ flags.into()
+ }),
+ PickerColumn::new("path", |meta: &BufferMeta, _| {
+ let path = meta
+ .path
+ .as_deref()
+ .map(helix_stdx::path::get_relative_path);
+ path.as_deref()
+ .and_then(Path::to_str)
+ .unwrap_or(SCRATCH_BUFFER_NAME)
+ .to_string()
+ .into()
+ }),
+ ];
+ let picker = Picker::new(columns, 2, items, (), |cx, meta, action| {
cx.editor.switch(meta.id, action);
})
.with_preview(|editor, meta| {
@@ -2961,33 +2934,6 @@ fn jumplist_picker(cx: &mut Context) {
is_current: bool,
}
- impl ui::menu::Item for JumpMeta {
- type Data = ();
-
- fn format(&self, _data: &Self::Data) -> Row {
- let path = self
- .path
- .as_deref()
- .map(helix_stdx::path::get_relative_path);
- let path = match path.as_deref().and_then(Path::to_str) {
- Some(path) => path,
- None => SCRATCH_BUFFER_NAME,
- };
-
- let mut flags = Vec::new();
- if self.is_current {
- flags.push("*");
- }
-
- let flag = if flags.is_empty() {
- "".into()
- } else {
- format!(" ({})", flags.join(""))
- };
- format!("{} {}{} {}", self.id, path, flag, self.text).into()
- }
- }
-
for (view, _) in cx.editor.tree.views_mut() {
for doc_id in view.jumps.iter().map(|e| e.0).collect::<Vec<_>>().iter() {
let doc = doc_mut!(cx.editor, doc_id);
@@ -3014,17 +2960,43 @@ fn jumplist_picker(cx: &mut Context) {
}
};
+ let columns = [
+ ui::PickerColumn::new("id", |item: &JumpMeta, _| item.id.to_string().into()),
+ ui::PickerColumn::new("path", |item: &JumpMeta, _| {
+ let path = item
+ .path
+ .as_deref()
+ .map(helix_stdx::path::get_relative_path);
+ path.as_deref()
+ .and_then(Path::to_str)
+ .unwrap_or(SCRATCH_BUFFER_NAME)
+ .to_string()
+ .into()
+ }),
+ ui::PickerColumn::new("flags", |item: &JumpMeta, _| {
+ let mut flags = Vec::new();
+ if item.is_current {
+ flags.push("*");
+ }
+
+ if flags.is_empty() {
+ "".into()
+ } else {
+ format!(" ({})", flags.join("")).into()
+ }
+ }),
+ ui::PickerColumn::new("contents", |item: &JumpMeta, _| item.text.as_str().into()),
+ ];
+
let picker = Picker::new(
- cx.editor
- .tree
- .views()
- .flat_map(|(view, _)| {
- view.jumps
- .iter()
- .rev()
- .map(|(doc_id, selection)| new_meta(view, *doc_id, selection.clone()))
- })
- .collect(),
+ columns,
+ 1, // path
+ cx.editor.tree.views().flat_map(|(view, _)| {
+ view.jumps
+ .iter()
+ .rev()
+ .map(|(doc_id, selection)| new_meta(view, *doc_id, selection.clone()))
+ }),
(),
|cx, meta, action| {
cx.editor.switch(meta.id, action);
@@ -3054,33 +3026,6 @@ fn changed_file_picker(cx: &mut Context) {
style_renamed: Style,
}
- impl Item for FileChange {
- type Data = FileChangeData;
-
- fn format(&self, data: &Self::Data) -> Row {
- let process_path = |path: &PathBuf| {
- path.strip_prefix(&data.cwd)
- .unwrap_or(path)
- .display()
- .to_string()
- };
-
- let (sign, style, content) = match self {
- Self::Untracked { path } => ("[+]", data.style_untracked, process_path(path)),
- Self::Modified { path } => ("[~]", data.style_modified, process_path(path)),
- Self::Conflict { path } => ("[x]", data.style_conflict, process_path(path)),
- Self::Deleted { path } => ("[-]", data.style_deleted, process_path(path)),
- Self::Renamed { from_path, to_path } => (
- "[>]",
- data.style_renamed,
- format!("{} -> {}", process_path(from_path), process_path(to_path)),
- ),
- };
-
- Row::new([Cell::from(Span::styled(sign, style)), Cell::from(content)])
- }
- }
-
let cwd = helix_stdx::env::current_working_dir();
if !cwd.exists() {
cx.editor
@@ -3094,8 +3039,41 @@ fn changed_file_picker(cx: &mut Context) {
let deleted = cx.editor.theme.get("diff.minus");
let renamed = cx.editor.theme.get("diff.delta.moved");
+ let columns = [
+ PickerColumn::new("change", |change: &FileChange, data: &FileChangeData| {
+ match change {
+ FileChange::Untracked { .. } => Span::styled("+ untracked", data.style_untracked),
+ FileChange::Modified { .. } => Span::styled("~ modified", data.style_modified),
+ FileChange::Conflict { .. } => Span::styled("x conflict", data.style_conflict),
+ FileChange::Deleted { .. } => Span::styled("- deleted", data.style_deleted),
+ FileChange::Renamed { .. } => Span::styled("> renamed", data.style_renamed),
+ }
+ .into()
+ }),
+ PickerColumn::new("path", |change: &FileChange, data: &FileChangeData| {
+ let display_path = |path: &PathBuf| {
+ path.strip_prefix(&data.cwd)
+ .unwrap_or(path)
+ .display()
+ .to_string()
+ };
+ match change {
+ FileChange::Untracked { path } => display_path(path),
+ FileChange::Modified { path } => display_path(path),
+ FileChange::Conflict { path } => display_path(path),
+ FileChange::Deleted { path } => display_path(path),
+ FileChange::Renamed { from_path, to_path } => {
+ format!("{} -> {}", display_path(from_path), display_path(to_path))
+ }
+ }
+ .into()
+ }),
+ ];
+
let picker = Picker::new(
- Vec::new(),
+ columns,
+ 1, // path
+ [],
FileChangeData {
cwd: cwd.clone(),
style_untracked: added,
@@ -3116,7 +3094,7 @@ fn changed_file_picker(cx: &mut Context) {
}
},
)
- .with_preview(|_editor, meta| Some((meta.path().to_path_buf().into(), None)));
+ .with_preview(|_editor, meta| Some((meta.path().into(), None)));
let injector = picker.injector();
cx.editor
@@ -3132,35 +3110,6 @@ fn changed_file_picker(cx: &mut Context) {
cx.push_layer(Box::new(overlaid(picker)));
}
-impl ui::menu::Item for MappableCommand {
- type Data = ReverseKeymap;
-
- fn format(&self, keymap: &Self::Data) -> Row {
- let fmt_binding = |bindings: &Vec<Vec<KeyEvent>>| -> String {
- bindings.iter().fold(String::new(), |mut acc, bind| {
- if !acc.is_empty() {
- acc.push(' ');
- }
- for key in bind {
- acc.push_str(&key.key_sequence_format());
- }
- acc
- })
- };
-
- match self {
- MappableCommand::Typable { doc, name, .. } => match keymap.get(name as &String) {
- Some(bindings) => format!("{} ({}) [:{}]", doc, fmt_binding(bindings), name).into(),
- None => format!("{} [:{}]", doc, name).into(),
- },
- MappableCommand::Static { doc, name, .. } => match keymap.get(*name) {
- Some(bindings) => format!("{} ({}) [{}]", doc, fmt_binding(bindings), name).into(),
- None => format!("{} [{}]", doc, name).into(),
- },
- }
- }
-}
-
pub fn command_palette(cx: &mut Context) {
let register = cx.register;
let count = cx.count;
@@ -3171,16 +3120,45 @@ pub fn command_palette(cx: &mut Context) {
[&cx.editor.mode]
.reverse_map();
- let mut commands: Vec<MappableCommand> = MappableCommand::STATIC_COMMAND_LIST.into();
- commands.extend(typed::TYPABLE_COMMAND_LIST.iter().map(|cmd| {
- MappableCommand::Typable {
- name: cmd.name.to_owned(),
- doc: cmd.doc.to_owned(),
- args: Vec::new(),
- }
- }));
+ let commands = MappableCommand::STATIC_COMMAND_LIST.iter().cloned().chain(
+ typed::TYPABLE_COMMAND_LIST
+ .iter()
+ .map(|cmd| MappableCommand::Typable {
+ name: cmd.name.to_owned(),
+ args: Vec::new(),
+ doc: cmd.doc.to_owned(),
+ }),
+ );
+
+ let columns = [
+ ui::PickerColumn::new("name", |item, _| match item {
+ MappableCommand::Typable { name, .. } => format!(":{name}").into(),
+ MappableCommand::Static { name, .. } => (*name).into(),
+ }),
+ ui::PickerColumn::new(
+ "bindings",
+ |item: &MappableCommand, keymap: &crate::keymap::ReverseKeymap| {
+ keymap
+ .get(item.name())
+ .map(|bindings| {
+ bindings.iter().fold(String::new(), |mut acc, bind| {
+ if !acc.is_empty() {
+ acc.push(' ');
+ }
+ for key in bind {
+ acc.push_str(&key.key_sequence_format());
+ }
+ acc
+ })
+ })
+ .unwrap_or_default()
+ .into()
+ },
+ ),
+ ui::PickerColumn::new("doc", |item: &MappableCommand, _| item.doc().into()),
+ ];
- let picker = Picker::new(commands, keymap, move |cx, command, _action| {
+ let picker = Picker::new(columns, 0, commands, keymap, move |cx, command, _action| {
let mut ctx = Context {
register,
count,
diff --git a/helix-term/src/commands/dap.rs b/helix-term/src/commands/dap.rs
index 0e50377a..0b754bc2 100644
--- a/helix-term/src/commands/dap.rs
+++ b/helix-term/src/commands/dap.rs
@@ -12,7 +12,7 @@ use helix_view::editor::Breakpoint;
use serde_json::{to_value, Value};
use tokio_stream::wrappers::UnboundedReceiverStream;
-use tui::{text::Spans, widgets::Row};
+use tui::text::Spans;
use std::collections::HashMap;
use std::future::Future;
@@ -22,38 +22,6 @@ use anyhow::{anyhow, bail};
use helix_view::handlers::dap::{breakpoints_changed, jump_to_stack_frame, select_thread_id};
-impl ui::menu::Item for StackFrame {
- type Data = ();
-
- fn format(&self, _data: &Self::Data) -> Row {
- self.name.as_str().into() // TODO: include thread_states in the label
- }
-}
-
-impl ui::menu::Item for DebugTemplate {
- type Data = ();
-
- fn format(&self, _data: &Self::Data) -> Row {
- self.name.as_str().into()
- }
-}
-
-impl ui::menu::Item for Thread {
- type Data = ThreadStates;
-
- fn format(&self, thread_states: &Self::Data) -> Row {
- format!(
- "{} ({})",
- self.name,
- thread_states
- .get(&self.id)
- .map(|state| state.as_str())
- .unwrap_or("unknown")
- )
- .into()
- }
-}
-
fn thread_picker(
cx: &mut Context,
callback_fn: impl Fn(&mut Editor, &dap::Thread) + Send + 'static,
@@ -73,13 +41,27 @@ fn thread_picker(
let debugger = debugger!(editor);
let thread_states = debugger.thread_states.clone();
- let picker = Picker::new(threads, thread_states, move |cx, thread, _action| {
- callback_fn(cx.editor, thread)
- })
+ let columns = [
+ ui::PickerColumn::new("name", |item: &Thread, _| item.name.as_str().into()),
+ ui::PickerColumn::new("state", |item: &Thread, thread_states: &ThreadStates| {
+ thread_states
+ .get(&item.id)
+ .map(|state| state.as_str())
+ .unwrap_or("unknown")
+ .into()
+ }),
+ ];
+ let picker = Picker::new(
+ columns,
+ 0,
+ threads,
+ thread_states,
+ move |cx, thread, _action| callback_fn(cx.editor, thread),
+ )
.with_preview(move |editor, thread| {
let frames = editor.debugger.as_ref()?.stack_frames.get(&thread.id)?;
let frame = frames.first()?;
- let path = frame.source.as_ref()?.path.clone()?;
+ let path = frame.source.as_ref()?.path.as_ref()?.as_path();
let pos = Some((
frame.line.saturating_sub(1),
frame.end_line.unwrap_or(frame.line).saturating_sub(1),
@@ -268,7 +250,14 @@ pub fn dap_launch(cx: &mut Context) {
let templates = config.templates.clone();
+ let columns = [ui::PickerColumn::new(
+ "template",
+ |item: &DebugTemplate, _| item.name.as_str().into(),
+ )];
+
cx.push_layer(Box::new(overlaid(Picker::new(
+ columns,
+ 0,
templates,
(),
|cx, template, _action| {
@@ -736,7 +725,10 @@ pub fn dap_switch_stack_frame(cx: &mut Context) {
let frames = debugger.stack_frames[&thread_id].clone();
- let picker = Picker::new(frames, (), move |cx, frame, _action| {
+ let columns = [ui::PickerColumn::new("frame", |item: &StackFrame, _| {
+ item.name.as_str().into() // TODO: include thread_states in the label
+ })];
+ let picker = Picker::new(columns, 0, frames, (), move |cx, frame, _action| {
let debugger = debugger!(cx.editor);
// TODO: this should be simpler to find
let pos = debugger.stack_frames[&thread_id]
@@ -755,10 +747,10 @@ pub fn dap_switch_stack_frame(cx: &mut Context) {
frame
.source
.as_ref()
- .and_then(|source| source.path.clone())
+ .and_then(|source| source.path.as_ref())
.map(|path| {
(
- path.into(),
+ path.as_path().into(),
Some((
frame.line.saturating_sub(1),
frame.end_line.unwrap_or(frame.line).saturating_sub(1),
diff --git a/helix-term/src/commands/lsp.rs b/helix-term/src/commands/lsp.rs
index d585e1be..103d1df2 100644
--- a/helix-term/src/commands/lsp.rs
+++ b/helix-term/src/commands/lsp.rs
@@ -9,14 +9,13 @@ use helix_lsp::{
Client, LanguageServerId, OffsetEncoding,
};
use tokio_stream::StreamExt;
-use tui::{
- text::{Span, Spans},
- widgets::Row,
-};
+use tui::{text::Span, widgets::Row};
use super::{align_view, push_jump, Align, Context, Editor};
-use helix_core::{syntax::LanguageServerFeature, text_annotations::InlineAnnotation, Selection};
+use helix_core::{
+ syntax::LanguageServerFeature, text_annotations::InlineAnnotation, Selection, Uri,
+};
use helix_stdx::path;
use helix_view::{
document::{DocumentInlayHints, DocumentInlayHintsId},
@@ -29,7 +28,7 @@ use helix_view::{
use crate::{
compositor::{self, Compositor},
job::Callback,
- ui::{self, overlay::overlaid, DynamicPicker, FileLocation, Picker, Popup, PromptEvent},
+ ui::{self, overlay::overlaid, FileLocation, Picker, Popup, PromptEvent},
};
use std::{
@@ -37,7 +36,7 @@ use std::{
collections::{BTreeMap, HashSet},
fmt::Write,
future::Future,
- path::{Path, PathBuf},
+ path::Path,
};
/// Gets the first language server that is attached to a document which supports a specific feature.
@@ -62,67 +61,10 @@ macro_rules! language_server_with_feature {
}};
}
-impl ui::menu::Item for lsp::Location {
- /// Current working directory.
- type Data = PathBuf;
-
- fn format(&self, cwdir: &Self::Data) -> Row {
- // The preallocation here will overallocate a few characters since it will account for the
- // URL's scheme, which is not used most of the time since that scheme will be "file://".
- // Those extra chars will be used to avoid allocating when writing the line number (in the
- // common case where it has 5 digits or less, which should be enough for a cast majority
- // of usages).
- let mut res = String::with_capacity(self.uri.as_str().len());
-
- if self.uri.scheme() == "file" {
- // With the preallocation above and UTF-8 paths already, this closure will do one (1)
- // allocation, for `to_file_path`, else there will be two (2), with `to_string_lossy`.
- let mut write_path_to_res = || -> Option<()> {
- let path = self.uri.to_file_path().ok()?;
- res.push_str(&path.strip_prefix(cwdir).unwrap_or(&path).to_string_lossy());
- Some(())
- };
- write_path_to_res();
- } else {
- // Never allocates since we declared the string with this capacity already.
- res.push_str(self.uri.as_str());
- }
-
- // Most commonly, this will not allocate, especially on Unix systems where the root prefix
- // is a simple `/` and not `C:\` (with whatever drive letter)
- write!(&mut res, ":{}", self.range.start.line + 1)
- .expect("Will only failed if allocating fail");
- res.into()
- }
-}
-
struct SymbolInformationItem {
symbol: lsp::SymbolInformation,
offset_encoding: OffsetEncoding,
-}
-
-impl ui::menu::Item for SymbolInformationItem {
- /// Path to currently focussed document
- type Data = Option<lsp::Url>;
-
- fn format(&self, current_doc_path: &Self::Data) -> Row {
- if current_doc_path.as_ref() == Some(&self.symbol.location.uri) {
- self.symbol.name.as_str().into()
- } else {
- match self.symbol.location.uri.to_file_path() {
- Ok(path) => {
- let get_relative_path = path::get_relative_path(path.as_path());
- format!(
- "{} ({})",
- &self.symbol.name,
- get_relative_path.to_string_lossy()
- )
- .into()
- }
- Err(_) => format!("{} ({})", &self.symbol.name, &self.symbol.location.uri).into(),
- }
- }
- }
+ uri: Uri,
}
struct DiagnosticStyles {
@@ -133,60 +75,15 @@ struct DiagnosticStyles {
}
struct PickerDiagnostic {
- path: PathBuf,
+ uri: Uri,
diag: lsp::Diagnostic,
offset_encoding: OffsetEncoding,
}
-impl ui::menu::Item for PickerDiagnostic {
- type Data = (DiagnosticStyles, DiagnosticsFormat);
-
- fn format(&self, (styles, format): &Self::Data) -> Row {
- let mut style = self
- .diag
- .severity
- .map(|s| match s {
- DiagnosticSeverity::HINT => styles.hint,
- DiagnosticSeverity::INFORMATION => styles.info,
- DiagnosticSeverity::WARNING => styles.warning,
- DiagnosticSeverity::ERROR => styles.error,
- _ => Style::default(),
- })
- .unwrap_or_default();
-
- // remove background as it is distracting in the picker list
- style.bg = None;
-
- let code = match self.diag.code.as_ref() {
- Some(NumberOrString::Number(n)) => format!(" ({n})"),
- Some(NumberOrString::String(s)) => format!(" ({s})"),
- None => String::new(),
- };
-
- let path = match format {
- DiagnosticsFormat::HideSourcePath => String::new(),
- DiagnosticsFormat::ShowSourcePath => {
- let path = path::get_truncated_path(&self.path);
- format!("{}: ", path.to_string_lossy())
- }
- };
-
- Spans::from(vec![
- Span::raw(path),
- Span::styled(&self.diag.message, style),
- Span::styled(code, style),
- ])
- .into()
- }
-}
-
-fn location_to_file_location(location: &lsp::Location) -> FileLocation {
- let path = location.uri.to_file_path().unwrap();
- let line = Some((
- location.range.start.line as usize,
- location.range.end.line as usize,
- ));
- (path.into(), line)
+fn uri_to_file_location<'a>(uri: &'a Uri, range: &lsp::Range) -> Option<FileLocation<'a>> {
+ let path = uri.as_path()?;
+ let line = Some((range.start.line as usize, range.end.line as usize));
+ Some((path.into(), line))
}
fn jump_to_location(
@@ -241,20 +138,39 @@ fn jump_to_position(
}
}
-type SymbolPicker = Picker<SymbolInformationItem>;
-
-fn sym_picker(symbols: Vec<SymbolInformationItem>, current_path: Option<lsp::Url>) -> SymbolPicker {
- // TODO: drop current_path comparison and instead use workspace: bool flag?
- Picker::new(symbols, current_path, move |cx, item, action| {
- jump_to_location(
- cx.editor,
- &item.symbol.location,
- item.offset_encoding,
- action,
- );
- })
- .with_preview(move |_editor, item| Some(location_to_file_location(&item.symbol.location)))
- .truncate_start(false)
+fn display_symbol_kind(kind: lsp::SymbolKind) -> &'static str {
+ match kind {
+ lsp::SymbolKind::FILE => "file",
+ lsp::SymbolKind::MODULE => "module",
+ lsp::SymbolKind::NAMESPACE => "namespace",
+ lsp::SymbolKind::PACKAGE => "package",
+ lsp::SymbolKind::CLASS => "class",
+ lsp::SymbolKind::METHOD => "method",
+ lsp::SymbolKind::PROPERTY => "property",
+ lsp::SymbolKind::FIELD => "field",
+ lsp::SymbolKind::CONSTRUCTOR => "construct",
+ lsp::SymbolKind::ENUM => "enum",
+ lsp::SymbolKind::INTERFACE => "interface",
+ lsp::SymbolKind::FUNCTION => "function",
+ lsp::SymbolKind::VARIABLE => "variable",
+ lsp::SymbolKind::CONSTANT => "constant",
+ lsp::SymbolKind::STRING => "string",
+ lsp::SymbolKind::NUMBER => "number",
+ lsp::SymbolKind::BOOLEAN => "boolean",
+ lsp::SymbolKind::ARRAY => "array",
+ lsp::SymbolKind::OBJECT => "object",
+ lsp::SymbolKind::KEY => "key",
+ lsp::SymbolKind::NULL => "null",
+ lsp::SymbolKind::ENUM_MEMBER => "enummem",
+ lsp::SymbolKind::STRUCT => "struct",
+ lsp::SymbolKind::EVENT => "event",
+ lsp::SymbolKind::OPERATOR => "operator",
+ lsp::SymbolKind::TYPE_PARAMETER => "typeparam",
+ _ => {
+ log::warn!("Unknown symbol kind: {:?}", kind);
+ ""
+ }
+ }
}
#[derive(Copy, Clone, PartialEq)]
@@ -263,22 +179,24 @@ enum DiagnosticsFormat {
HideSourcePath,
}
+type DiagnosticsPicker = Picker<PickerDiagnostic, DiagnosticStyles>;
+
fn diag_picker(
cx: &Context,
- diagnostics: BTreeMap<PathBuf, Vec<(lsp::Diagnostic, LanguageServerId)>>,
+ diagnostics: BTreeMap<Uri, Vec<(lsp::Diagnostic, LanguageServerId)>>,
format: DiagnosticsFormat,
-) -> Picker<PickerDiagnostic> {
+) -> DiagnosticsPicker {
// TODO: drop current_path comparison and instead use workspace: bool flag?
// flatten the map to a vec of (url, diag) pairs
let mut flat_diag = Vec::new();
- for (path, diags) in diagnostics {
+ for (uri, diags) in diagnostics {
flat_diag.reserve(diags.len());
for (diag, ls) in diags {
if let Some(ls) = cx.editor.language_server_by_id(ls) {
flat_diag.push(PickerDiagnostic {
- path: path.clone(),
+ uri: uri.clone(),
diag,
offset_encoding: ls.offset_encoding(),
});
@@ -293,22 +211,72 @@ fn diag_picker(
error: cx.editor.theme.get("error"),
};
+ let mut columns = vec![
+ ui::PickerColumn::new(
+ "severity",
+ |item: &PickerDiagnostic, styles: &DiagnosticStyles| {
+ match item.diag.severity {
+ Some(DiagnosticSeverity::HINT) => Span::styled("HINT", styles.hint),
+ Some(DiagnosticSeverity::INFORMATION) => Span::styled("INFO", styles.info),
+ Some(DiagnosticSeverity::WARNING) => Span::styled("WARN", styles.warning),
+ Some(DiagnosticSeverity::ERROR) => Span::styled("ERROR", styles.error),
+ _ => Span::raw(""),
+ }
+ .into()
+ },
+ ),
+ ui::PickerColumn::new("code", |item: &PickerDiagnostic, _| {
+ match item.diag.code.as_ref() {
+ Some(NumberOrString::Number(n)) => n.to_string().into(),
+ Some(NumberOrString::String(s)) => s.as_str().into(),
+ None => "".into(),
+ }
+ }),
+ ui::PickerColumn::new("message", |item: &PickerDiagnostic, _| {
+ item.diag.message.as_str().into()
+ }),
+ ];
+ let mut primary_column = 2; // message
+
+ if format == DiagnosticsFormat::ShowSourcePath {
+ columns.insert(
+ // between message code and message
+ 2,
+ ui::PickerColumn::new("path", |item: &PickerDiagnostic, _| {
+ if let Some(path) = item.uri.as_path() {
+ path::get_truncated_path(path)
+ .to_string_lossy()
+ .to_string()
+ .into()
+ } else {
+ Default::default()
+ }
+ }),
+ );
+ primary_column += 1;
+ }
+
Picker::new(
+ columns,
+ primary_column,
flat_diag,
- (styles, format),
+ styles,
move |cx,
PickerDiagnostic {
- path,
+ uri,
diag,
offset_encoding,
},
action| {
+ let Some(path) = uri.as_path() else {
+ return;
+ };
jump_to_position(cx.editor, path, diag.range, *offset_encoding, action)
},
)
- .with_preview(move |_editor, PickerDiagnostic { path, diag, .. }| {
+ .with_preview(move |_editor, PickerDiagnostic { uri, diag, .. }| {
let line = Some((diag.range.start.line as usize, diag.range.end.line as usize));
- Some((path.clone().into(), line))
+ Some((uri.as_path()?.into(), line))
})
.truncate_start(false)
}
@@ -317,6 +285,7 @@ pub fn symbol_picker(cx: &mut Context) {
fn nested_to_flat(
list: &mut Vec<SymbolInformationItem>,
file: &lsp::TextDocumentIdentifier,
+ uri: &Uri,
symbol: lsp::DocumentSymbol,
offset_encoding: OffsetEncoding,
) {
@@ -331,9 +300,10 @@ pub fn symbol_picker(cx: &mut Context) {
container_name: None,
},
offset_encoding,
+ uri: uri.clone(),
});
for child in symbol.children.into_iter().flatten() {
- nested_to_flat(list, file, child, offset_encoding);
+ nested_to_flat(list, file, uri, child, offset_encoding);
}
}
let doc = doc!(cx.editor);
@@ -347,6 +317,9 @@ pub fn symbol_picker(cx: &mut Context) {
let request = language_server.document_symbols(doc.identifier()).unwrap();
let offset_encoding = language_server.offset_encoding();
let doc_id = doc.identifier();
+ let doc_uri = doc
+ .uri()
+ .expect("docs with active language servers must be backed by paths");
async move {
let json = request.await?;
@@ -361,6 +334,7 @@ pub fn symbol_picker(cx: &mut Context) {
lsp::DocumentSymbolResponse::Flat(symbols) => symbols
.into_iter()
.map(|symbol| SymbolInformationItem {
+ uri: doc_uri.clone(),
symbol,
offset_encoding,
})
@@ -368,7 +342,13 @@ pub fn symbol_picker(cx: &mut Context) {
lsp::DocumentSymbolResponse::Nested(symbols) => {
let mut flat_symbols = Vec::new();
for symbol in symbols {
- nested_to_flat(&mut flat_symbols, &doc_id, symbol, offset_encoding)
+ nested_to_flat(
+ &mut flat_symbols,
+ &doc_id,
+ &doc_uri,
+ symbol,
+ offset_encoding,
+ )
}
flat_symbols
}
@@ -377,7 +357,6 @@ pub fn symbol_picker(cx: &mut Context) {
}
})
.collect();
- let current_url = doc.url();
if futures.is_empty() {
cx.editor
@@ -392,7 +371,37 @@ pub fn symbol_picker(cx: &mut Context) {
symbols.append(&mut lsp_items);
}
let call = move |_editor: &mut Editor, compositor: &mut Compositor| {
- let picker = sym_picker(symbols, current_url);
+ let columns = [
+ ui::PickerColumn::new("kind", |item: &SymbolInformationItem, _| {
+ display_symbol_kind(item.symbol.kind).into()
+ }),
+ // Some symbols in the document symbol picker may have a URI that isn't
+ // the current file. It should be rare though, so we concatenate that
+ // URI in with the symbol name in this picker.
+ ui::PickerColumn::new("name", |item: &SymbolInformationItem, _| {
+ item.symbol.name.as_str().into()
+ }),
+ ];
+
+ let picker = Picker::new(
+ columns,
+ 1, // name column
+ symbols,
+ (),
+ move |cx, item, action| {
+ jump_to_location(
+ cx.editor,
+ &item.symbol.location,
+ item.offset_encoding,
+ action,
+ );
+ },
+ )
+ .with_preview(move |_editor, item| {
+ uri_to_file_location(&item.uri, &item.symbol.location.range)
+ })
+ .truncate_start(false);
+
compositor.push(Box::new(overlaid(picker)))
};
@@ -401,6 +410,8 @@ pub fn symbol_picker(cx: &mut Context) {
}
pub fn workspace_symbol_picker(cx: &mut Context) {
+ use crate::ui::picker::Injector;
+
let doc = doc!(cx.editor);
if doc
.language_servers_with_feature(LanguageServerFeature::WorkspaceSymbols)
@@ -412,25 +423,37 @@ pub fn workspace_symbol_picker(cx: &mut Context) {
return;
}
- let get_symbols = move |pattern: String, editor: &mut Editor| {
+ let get_symbols = |pattern: &str, editor: &mut Editor, _data, injector: &Injector<_, _>| {
let doc = doc!(editor);
let mut seen_language_servers = HashSet::new();
let mut futures: FuturesOrdered<_> = doc
.language_servers_with_feature(LanguageServerFeature::WorkspaceSymbols)
.filter(|ls| seen_language_servers.insert(ls.id()))
.map(|language_server| {
- let request = language_server.workspace_symbols(pattern.clone()).unwrap();
+ let request = language_server
+ .workspace_symbols(pattern.to_string())
+ .unwrap();
let offset_encoding = language_server.offset_encoding();
async move {
let json = request.await?;
- let response =
+ let response: Vec<_> =
serde_json::from_value::<Option<Vec<lsp::SymbolInformation>>>(json)?
.unwrap_or_default()
.into_iter()
- .map(|symbol| SymbolInformationItem {
- symbol,
- offset_encoding,
+ .filter_map(|symbol| {
+ let uri = match Uri::try_from(&symbol.location.uri) {
+ Ok(uri) => uri,
+ Err(err) => {
+ log::warn!("discarding symbol with invalid URI: {err}");
+ return None;
+ }
+ };
+ Some(SymbolInformationItem {
+ symbol,
+ uri,
+ offset_encoding,
+ })
})
.collect();
@@ -443,44 +466,66 @@ pub fn workspace_symbol_picker(cx: &mut Context) {
editor.set_error("No configured language server supports workspace symbols");
}
+ let injector = injector.clone();
async move {
- let mut symbols = Vec::new();
// TODO if one symbol request errors, all other requests are discarded (even if they're valid)
- while let Some(mut lsp_items) = futures.try_next().await? {
- symbols.append(&mut lsp_items);
+ while let Some(lsp_items) = futures.try_next().await? {
+ for item in lsp_items {
+ injector.push(item)?;
+ }
}
- anyhow::Ok(symbols)
+ Ok(())
}
.boxed()
};
+ let columns = [
+ ui::PickerColumn::new("kind", |item: &SymbolInformationItem, _| {
+ display_symbol_kind(item.symbol.kind).into()
+ }),
+ ui::PickerColumn::new("name", |item: &SymbolInformationItem, _| {
+ item.symbol.name.as_str().into()
+ })
+ .without_filtering(),
+ ui::PickerColumn::new("path", |item: &SymbolInformationItem, _| {
+ if let Some(path) = item.uri.as_path() {
+ path::get_relative_path(path)
+ .to_string_lossy()
+ .to_string()
+ .into()
+ } else {
+ item.symbol.location.uri.to_string().into()
+ }
+ }),
+ ];
+
+ let picker = Picker::new(
+ columns,
+ 1, // name column
+ [],
+ (),
+ move |cx, item, action| {
+ jump_to_location(
+ cx.editor,
+ &item.symbol.location,
+ item.offset_encoding,
+ action,
+ );
+ },
+ )
+ .with_preview(|_editor, item| uri_to_file_location(&item.uri, &item.symbol.location.range))
+ .with_dynamic_query(get_symbols, None)
+ .truncate_start(false);
- let current_url = doc.url();
- let initial_symbols = get_symbols("".to_owned(), cx.editor);
-
- cx.jobs.callback(async move {
- let symbols = initial_symbols.await?;
- let call = move |_editor: &mut Editor, compositor: &mut Compositor| {
- let picker = sym_picker(symbols, current_url);
- let dyn_picker = DynamicPicker::new(picker, Box::new(get_symbols));
- compositor.push(Box::new(overlaid(dyn_picker)))
- };
-
- Ok(Callback::EditorCompositor(Box::new(call)))
- });
+ cx.push_layer(Box::new(overlaid(picker)));
}
pub fn diagnostics_picker(cx: &mut Context) {
let doc = doc!(cx.editor);
- if let Some(current_path) = doc.path() {
- let diagnostics = cx
- .editor
- .diagnostics
- .get(current_path)
- .cloned()
- .unwrap_or_default();
+ if let Some(uri) = doc.uri() {
+ let diagnostics = cx.editor.diagnostics.get(&uri).cloned().unwrap_or_default();
let picker = diag_picker(
cx,
- [(current_path.clone(), diagnostics)].into(),
+ [(uri, diagnostics)].into(),
DiagnosticsFormat::HideSourcePath,
);
cx.push_layer(Box::new(overlaid(picker)));
@@ -741,13 +786,6 @@ pub fn code_action(cx: &mut Context) {
});
}
-impl ui::menu::Item for lsp::Command {
- type Data = ();
- fn format(&self, _data: &Self::Data) -> Row {
- self.title.as_str().into()
- }
-}
-
pub fn execute_lsp_command(
editor: &mut Editor,
language_server_id: LanguageServerId,
@@ -817,10 +855,67 @@ fn goto_impl(
}
[] => unreachable!("`locations` should be non-empty for `goto_impl`"),
_locations => {
- let picker = Picker::new(locations, cwdir, move |cx, location, action| {
+ let columns = [ui::PickerColumn::new(
+ "location",
+ |item: &lsp::Location, cwdir: &std::path::PathBuf| {
+ // The preallocation here will overallocate a few characters since it will account for the
+ // URL's scheme, which is not used most of the time since that scheme will be "file://".
+ // Those extra chars will be used to avoid allocating when writing the line number (in the
+ // common case where it has 5 digits or less, which should be enough for a cast majority
+ // of usages).
+ let mut res = String::with_capacity(item.uri.as_str().len());
+
+ if item.uri.scheme() == "file" {
+ // With the preallocation above and UTF-8 paths already, this closure will do one (1)
+ // allocation, for `to_file_path`, else there will be two (2), with `to_string_lossy`.
+ if let Ok(path) = item.uri.to_file_path() {
+ // We don't convert to a `helix_core::Uri` here because we've already checked the scheme.
+ // This path won't be normalized but it's only used for display.
+ res.push_str(
+ &path.strip_prefix(cwdir).unwrap_or(&path).to_string_lossy(),
+ );
+ }
+ } else {
+ // Never allocates since we declared the string with this capacity already.
+ res.push_str(item.uri.as_str());
+ }
+
+ // Most commonly, this will not allocate, especially on Unix systems where the root prefix
+ // is a simple `/` and not `C:\` (with whatever drive letter)
+ write!(&mut res, ":{}", item.range.start.line + 1)
+ .expect("Will only failed if allocating fail");
+ res.into()
+ },
+ )];
+
+ let picker = Picker::new(columns, 0, locations, cwdir, move |cx, location, action| {
jump_to_location(cx.editor, location, offset_encoding, action)
})
- .with_preview(move |_editor, location| Some(location_to_file_location(location)));
+ .with_preview(move |_editor, location| {
+ use crate::ui::picker::PathOrId;
+
+ let lines = Some((
+ location.range.start.line as usize,
+ location.range.end.line as usize,
+ ));
+
+ // TODO: we should avoid allocating by doing the Uri conversion ahead of time.
+ //
+ // To do this, introduce a `Location` type in `helix-core` that reuses the core
+ // `Uri` type instead of the LSP `Url` type and replaces the LSP `Range` type.
+ // Refactor the callers of `goto_impl` to pass iterators that translate the
+ // LSP location type to the custom one in core, or have them collect and pass
+ // `Vec<Location>`s. Replace the `uri_to_file_location` function with
+ // `location_to_file_location` that takes only `&helix_core::Location` as
+ // parameters.
+ //
+ // By doing this we can also eliminate the duplicated URI info in the
+ // `SymbolInformationItem` type and introduce a custom Symbol type in `helix-core`
+ // which will be reused in the future for tree-sitter based symbol pickers.
+ let path = Uri::try_from(&location.uri).ok()?.as_path_buf()?;
+ #[allow(deprecated)]
+ Some((PathOrId::from_path_buf(path), lines))
+ });
compositor.push(Box::new(overlaid(picker)));
}
}
diff --git a/helix-term/src/commands/typed.rs b/helix-term/src/commands/typed.rs
index ed1547f1..1b828b3f 100644
--- a/helix-term/src/commands/typed.rs
+++ b/helix-term/src/commands/typed.rs
@@ -9,7 +9,6 @@ use super::*;
use helix_core::fuzzy::fuzzy_match;
use helix_core::indent::MAX_INDENT;
use helix_core::{line_ending, shellwords::Shellwords};
-use helix_lsp::LanguageServerId;
use helix_view::document::{read_to_string, DEFAULT_LANGUAGE_NAME};
use helix_view::editor::{CloseError, ConfigEvent};
use serde_json::Value;
@@ -1378,16 +1377,6 @@ fn lsp_workspace_command(
return Ok(());
}
- struct LsIdCommand(LanguageServerId, helix_lsp::lsp::Command);
-
- impl ui::menu::Item for LsIdCommand {
- type Data = ();
-
- fn format(&self, _data: &Self::Data) -> Row {
- self.1.title.as_str().into()
- }
- }
-
let doc = doc!(cx.editor);
let ls_id_commands = doc
.language_servers_with_feature(LanguageServerFeature::WorkspaceCommand)
@@ -1402,7 +1391,7 @@ fn lsp_workspace_command(
if args.is_empty() {
let commands = ls_id_commands
.map(|(ls_id, command)| {
- LsIdCommand(
+ (
ls_id,
helix_lsp::lsp::Command {
title: command.clone(),
@@ -1415,10 +1404,18 @@ fn lsp_workspace_command(
let callback = async move {
let call: job::Callback = Callback::EditorCompositor(Box::new(
move |_editor: &mut Editor, compositor: &mut Compositor| {
+ let columns = [ui::PickerColumn::new(
+ "title",
+ |(_ls_id, command): &(_, helix_lsp::lsp::Command), _| {
+ command.title.as_str().into()
+ },
+ )];
let picker = ui::Picker::new(
+ columns,
+ 0,
commands,
(),
- move |cx, LsIdCommand(ls_id, command), _action| {
+ move |cx, (ls_id, command), _action| {
execute_lsp_command(cx.editor, *ls_id, command.clone());
},
);
diff --git a/helix-term/src/ui/menu.rs b/helix-term/src/ui/menu.rs
index c5006f95..c120d0b2 100644
--- a/helix-term/src/ui/menu.rs
+++ b/helix-term/src/ui/menu.rs
@@ -1,11 +1,11 @@
-use std::{borrow::Cow, cmp::Reverse, path::PathBuf};
+use std::{borrow::Cow, cmp::Reverse};
use crate::{
compositor::{Callback, Component, Compositor, Context, Event, EventResult},
ctrl, key, shift,
};
use helix_core::fuzzy::MATCHER;
-use nucleo::pattern::{Atom, AtomKind, CaseMatching};
+use nucleo::pattern::{Atom, AtomKind, CaseMatching, Normalization};
use nucleo::{Config, Utf32Str};
use tui::{buffer::Buffer as Surface, widgets::Table};
@@ -31,18 +31,6 @@ pub trait Item: Sync + Send + 'static {
}
}
-impl Item for PathBuf {
- /// Root prefix to strip.
- type Data = PathBuf;
-
- fn format(&self, root_path: &Self::Data) -> Row {
- self.strip_prefix(root_path)
- .unwrap_or(self)
- .to_string_lossy()
- .into()
- }
-}
-
pub type MenuCallback<T> = Box<dyn Fn(&mut Editor, Option<&T>, MenuEvent)>;
pub struct Menu<T: Item> {
@@ -92,7 +80,13 @@ impl<T: Item> Menu<T> {
pub fn score(&mut self, pattern: &str, incremental: bool) {
let mut matcher = MATCHER.lock();
matcher.config = Config::DEFAULT;
- let pattern = Atom::new(pattern, CaseMatching::Ignore, AtomKind::Fuzzy, false);
+ let pattern = Atom::new(
+ pattern,
+ CaseMatching::Ignore,
+ Normalization::Smart,
+ AtomKind::Fuzzy,
+ false,
+ );
let mut buf = Vec::new();
if incremental {
self.matches.retain_mut(|(index, score)| {
diff --git a/helix-term/src/ui/mod.rs b/helix-term/src/ui/mod.rs
index 0a65b12b..fae64062 100644
--- a/helix-term/src/ui/mod.rs
+++ b/helix-term/src/ui/mod.rs
@@ -21,7 +21,7 @@ pub use editor::EditorView;
use helix_stdx::rope;
pub use markdown::Markdown;
pub use menu::Menu;
-pub use picker::{DynamicPicker, FileLocation, Picker};
+pub use picker::{Column as PickerColumn, FileLocation, Picker};
pub use popup::Popup;
pub use prompt::{Prompt, PromptEvent};
pub use spinner::{ProgressSpinners, Spinner};
@@ -170,7 +170,9 @@ pub fn raw_regex_prompt(
cx.push_layer(Box::new(prompt));
}
-pub fn file_picker(root: PathBuf, config: &helix_view::editor::Config) -> Picker<PathBuf> {
+type FilePicker = Picker<PathBuf, PathBuf>;
+
+pub fn file_picker(root: PathBuf, config: &helix_view::editor::Config) -> FilePicker {
use ignore::{types::TypesBuilder, WalkBuilder};
use std::time::Instant;
@@ -217,7 +219,16 @@ pub fn file_picker(root: PathBuf, config: &helix_view::editor::Config) -> Picker
});
log::debug!("file_picker init {:?}", Instant::now().duration_since(now));
- let picker = Picker::new(Vec::new(), root, move |cx, path: &PathBuf, action| {
+ let columns = [PickerColumn::new(
+ "path",
+ |item: &PathBuf, root: &PathBuf| {
+ item.strip_prefix(root)
+ .unwrap_or(item)
+ .to_string_lossy()
+ .into()
+ },
+ )];
+ let picker = Picker::new(columns, 0, [], root, move |cx, path: &PathBuf, action| {
if let Err(e) = cx.editor.open(path, action) {
let err = if let Some(err) = e.source() {
format!("{}", err)
@@ -227,7 +238,7 @@ pub fn file_picker(root: PathBuf, config: &helix_view::editor::Config) -> Picker
cx.editor.set_error(err);
}
})
- .with_preview(|_editor, path| Some((path.clone().into(), None)));
+ .with_preview(|_editor, path| Some((path.as_path().into(), None)));
let injector = picker.injector();
let timeout = std::time::Instant::now() + std::time::Duration::from_millis(30);
diff --git a/helix-term/src/ui/picker.rs b/helix-term/src/ui/picker.rs
index b8ec57d5..07901239 100644
--- a/helix-term/src/ui/picker.rs
+++ b/helix-term/src/ui/picker.rs
@@ -1,33 +1,39 @@
+mod handlers;
+mod query;
+
use crate::{
alt,
compositor::{self, Component, Compositor, Context, Event, EventResult},
- ctrl,
- job::Callback,
- key, shift,
+ ctrl, key, shift,
ui::{
self,
document::{render_document, LineDecoration, LinePos, TextRenderer},
+ picker::query::PickerQuery,
EditorView,
},
};
-use futures_util::{future::BoxFuture, FutureExt};
-use nucleo::pattern::CaseMatching;
-use nucleo::{Config, Nucleo, Utf32String};
+use futures_util::future::BoxFuture;
+use helix_event::AsyncHook;
+use nucleo::pattern::{CaseMatching, Normalization};
+use nucleo::{Config, Nucleo};
+use thiserror::Error;
+use tokio::sync::mpsc::Sender;
use tui::{
buffer::Buffer as Surface,
layout::Constraint,
text::{Span, Spans},
- widgets::{Block, BorderType, Cell, Table},
+ widgets::{Block, BorderType, Cell, Row, Table},
};
use tui::widgets::Widget;
use std::{
+ borrow::Cow,
collections::HashMap,
io::Read,
- path::PathBuf,
+ path::{Path, PathBuf},
sync::{
- atomic::{self, AtomicBool},
+ atomic::{self, AtomicUsize},
Arc,
},
};
@@ -36,7 +42,6 @@ use crate::ui::{Prompt, PromptEvent};
use helix_core::{
char_idx_at_visual_offset, fuzzy::MATCHER, movement::Direction,
text_annotations::TextAnnotations, unicode::segmentation::UnicodeSegmentation, Position,
- Syntax,
};
use helix_view::{
editor::Action,
@@ -46,45 +51,50 @@ use helix_view::{
Document, DocumentId, Editor,
};
+use self::handlers::{DynamicQueryHandler, PreviewHighlightHandler};
+
pub const ID: &str = "picker";
-use super::{menu::Item, overlay::Overlay};
pub const MIN_AREA_WIDTH_FOR_PREVIEW: u16 = 72;
/// Biggest file size to preview in bytes
pub const MAX_FILE_SIZE_FOR_PREVIEW: u64 = 10 * 1024 * 1024;
#[derive(PartialEq, Eq, Hash)]
-pub enum PathOrId {
+pub enum PathOrId<'a> {
Id(DocumentId),
- Path(PathBuf),
+ // See [PathOrId::from_path_buf]: this will eventually become `Path(&Path)`.
+ Path(Cow<'a, Path>),
}
-impl PathOrId {
- fn get_canonicalized(self) -> Self {
- use PathOrId::*;
- match self {
- Path(path) => Path(helix_stdx::path::canonicalize(path)),
- Id(id) => Id(id),
- }
+impl<'a> PathOrId<'a> {
+ /// Creates a [PathOrId] from a PathBuf
+ ///
+ /// # Deprecated
+ /// The owned version of PathOrId will be removed in a future refactor
+ /// and replaced with `&'a Path`. See the caller of this function for
+ /// more details on its removal.
+ #[deprecated]
+ pub fn from_path_buf(path_buf: PathBuf) -> Self {
+ Self::Path(Cow::Owned(path_buf))
}
}
-impl From<PathBuf> for PathOrId {
- fn from(v: PathBuf) -> Self {
- Self::Path(v)
+impl<'a> From<&'a Path> for PathOrId<'a> {
+ fn from(path: &'a Path) -> Self {
+ Self::Path(Cow::Borrowed(path))
}
}
-impl From<DocumentId> for PathOrId {
+impl<'a> From<DocumentId> for PathOrId<'a> {
fn from(v: DocumentId) -> Self {
Self::Id(v)
}
}
-type FileCallback<T> = Box<dyn Fn(&Editor, &T) -> Option<FileLocation>>;
+type FileCallback<T> = Box<dyn for<'a> Fn(&'a Editor, &'a T) -> Option<FileLocation<'a>>>;
/// File path and range of lines (used to align and highlight lines)
-pub type FileLocation = (PathOrId, Option<(usize, usize)>);
+pub type FileLocation<'a> = (PathOrId<'a>, Option<(usize, usize)>);
pub enum CachedPreview {
Document(Box<Document>),
@@ -123,62 +133,120 @@ impl Preview<'_, '_> {
}
}
-fn item_to_nucleo<T: Item>(item: T, editor_data: &T::Data) -> Option<(T, Utf32String)> {
- let row = item.format(editor_data);
- let mut cells = row.cells.iter();
- let mut text = String::with_capacity(row.cell_text().map(|cell| cell.len()).sum());
- let cell = cells.next()?;
- if let Some(cell) = cell.content.lines.first() {
- for span in &cell.0 {
- text.push_str(&span.content);
- }
- }
-
- for cell in cells {
- text.push(' ');
- if let Some(cell) = cell.content.lines.first() {
- for span in &cell.0 {
- text.push_str(&span.content);
- }
+fn inject_nucleo_item<T, D>(
+ injector: &nucleo::Injector<T>,
+ columns: &[Column<T, D>],
+ item: T,
+ editor_data: &D,
+) {
+ injector.push(item, |item, dst| {
+ for (column, text) in columns.iter().filter(|column| column.filter).zip(dst) {
+ *text = column.format_text(item, editor_data).into()
}
- }
- Some((item, text.into()))
+ });
}
-pub struct Injector<T: Item> {
+pub struct Injector<T, D> {
dst: nucleo::Injector<T>,
- editor_data: Arc<T::Data>,
- shutown: Arc<AtomicBool>,
+ columns: Arc<[Column<T, D>]>,
+ editor_data: Arc<D>,
+ version: usize,
+ picker_version: Arc<AtomicUsize>,
+ /// A marker that requests a redraw when the injector drops.
+ /// This marker causes the "running" indicator to disappear when a background job
+ /// providing items is finished and drops. This could be wrapped in an [Arc] to ensure
+ /// that the redraw is only requested when all Injectors drop for a Picker (which removes
+ /// the "running" indicator) but the redraw handle is debounced so this is unnecessary.
+ _redraw: helix_event::RequestRedrawOnDrop,
}
-impl<T: Item> Clone for Injector<T> {
+impl<I, D> Clone for Injector<I, D> {
fn clone(&self) -> Self {
Injector {
dst: self.dst.clone(),
+ columns: self.columns.clone(),
editor_data: self.editor_data.clone(),
- shutown: self.shutown.clone(),
+ version: self.version,
+ picker_version: self.picker_version.clone(),
+ _redraw: helix_event::RequestRedrawOnDrop,
}
}
}
+#[derive(Error, Debug)]
+#[error("picker has been shut down")]
pub struct InjectorShutdown;
-impl<T: Item> Injector<T> {
+impl<T, D> Injector<T, D> {
pub fn push(&self, item: T) -> Result<(), InjectorShutdown> {
- if self.shutown.load(atomic::Ordering::Relaxed) {
+ if self.version != self.picker_version.load(atomic::Ordering::Relaxed) {
return Err(InjectorShutdown);
}
- if let Some((item, matcher_text)) = item_to_nucleo(item, &self.editor_data) {
- self.dst.push(item, |dst| dst[0] = matcher_text);
- }
+ inject_nucleo_item(&self.dst, &self.columns, item, &self.editor_data);
Ok(())
}
}
-pub struct Picker<T: Item> {
- editor_data: Arc<T::Data>,
- shutdown: Arc<AtomicBool>,
+type ColumnFormatFn<T, D> = for<'a> fn(&'a T, &'a D) -> Cell<'a>;
+
+pub struct Column<T, D> {
+ name: Arc<str>,
+ format: ColumnFormatFn<T, D>,
+ /// Whether the column should be passed to nucleo for matching and filtering.
+ /// `DynamicPicker` uses this so that the dynamic column (for example regex in
+ /// global search) is not used for filtering twice.
+ filter: bool,
+ hidden: bool,
+}
+
+impl<T, D> Column<T, D> {
+ pub fn new(name: impl Into<Arc<str>>, format: ColumnFormatFn<T, D>) -> Self {
+ Self {
+ name: name.into(),
+ format,
+ filter: true,
+ hidden: false,
+ }
+ }
+
+ /// A column which does not display any contents
+ pub fn hidden(name: impl Into<Arc<str>>) -> Self {
+ let format = |_: &T, _: &D| unreachable!();
+
+ Self {
+ name: name.into(),
+ format,
+ filter: false,
+ hidden: true,
+ }
+ }
+
+ pub fn without_filtering(mut self) -> Self {
+ self.filter = false;
+ self
+ }
+
+ fn format<'a>(&self, item: &'a T, data: &'a D) -> Cell<'a> {
+ (self.format)(item, data)
+ }
+
+ fn format_text<'a>(&self, item: &'a T, data: &'a D) -> Cow<'a, str> {
+ let text: String = self.format(item, data).content.into();
+ text.into()
+ }
+}
+
+/// Returns a new list of options to replace the contents of the picker
+/// when called with the current picker query,
+type DynQueryCallback<T, D> =
+ fn(&str, &mut Editor, Arc<D>, &Injector<T, D>) -> BoxFuture<'static, anyhow::Result<()>>;
+
+pub struct Picker<T: 'static + Send + Sync, D: 'static> {
+ columns: Arc<[Column<T, D>]>,
+ primary_column: usize,
+ editor_data: Arc<D>,
+ version: Arc<AtomicUsize>,
matcher: Nucleo<T>,
/// Current height of the completions box
@@ -186,7 +254,7 @@ pub struct Picker<T: Item> {
cursor: u32,
prompt: Prompt,
- previous_pattern: String,
+ query: PickerQuery,
/// Whether to show the preview panel (default true)
show_preview: bool,
@@ -197,67 +265,101 @@ pub struct Picker<T: Item> {
pub truncate_start: bool,
/// Caches paths to documents
- preview_cache: HashMap<PathBuf, CachedPreview>,
+ preview_cache: HashMap<Arc<Path>, CachedPreview>,
read_buffer: Vec<u8>,
/// Given an item in the picker, return the file path and line number to display.
file_fn: Option<FileCallback<T>>,
+ /// An event handler for syntax highlighting the currently previewed file.
+ preview_highlight_handler: Sender<Arc<Path>>,
+ dynamic_query_handler: Option<Sender<Arc<str>>>,
}
-impl<T: Item + 'static> Picker<T> {
- pub fn stream(editor_data: T::Data) -> (Nucleo<T>, Injector<T>) {
+impl<T: 'static + Send + Sync, D: 'static + Send + Sync> Picker<T, D> {
+ pub fn stream(
+ columns: impl IntoIterator<Item = Column<T, D>>,
+ editor_data: D,
+ ) -> (Nucleo<T>, Injector<T, D>) {
+ let columns: Arc<[_]> = columns.into_iter().collect();
+ let matcher_columns = columns.iter().filter(|col| col.filter).count() as u32;
+ assert!(matcher_columns > 0);
let matcher = Nucleo::new(
Config::DEFAULT,
Arc::new(helix_event::request_redraw),
None,
- 1,
+ matcher_columns,
);
let streamer = Injector {
dst: matcher.injector(),
+ columns,
editor_data: Arc::new(editor_data),
- shutown: Arc::new(AtomicBool::new(false)),
+ version: 0,
+ picker_version: Arc::new(AtomicUsize::new(0)),
+ _redraw: helix_event::RequestRedrawOnDrop,
};
(matcher, streamer)
}
- pub fn new(
- options: Vec<T>,
- editor_data: T::Data,
- callback_fn: impl Fn(&mut Context, &T, Action) + 'static,
- ) -> Self {
+ pub fn new<C, O, F>(
+ columns: C,
+ primary_column: usize,
+ options: O,
+ editor_data: D,
+ callback_fn: F,
+ ) -> Self
+ where
+ C: IntoIterator<Item = Column<T, D>>,
+ O: IntoIterator<Item = T>,
+ F: Fn(&mut Context, &T, Action) + 'static,
+ {
+ let columns: Arc<[_]> = columns.into_iter().collect();
+ let matcher_columns = columns.iter().filter(|col| col.filter).count() as u32;
+ assert!(matcher_columns > 0);
let matcher = Nucleo::new(
Config::DEFAULT,
Arc::new(helix_event::request_redraw),
None,
- 1,
+ matcher_columns,
);
let injector = matcher.injector();
for item in options {
- if let Some((item, matcher_text)) = item_to_nucleo(item, &editor_data) {
- injector.push(item, |dst| dst[0] = matcher_text);
- }
+ inject_nucleo_item(&injector, &columns, item, &editor_data);
}
Self::with(
matcher,
+ columns,
+ primary_column,
Arc::new(editor_data),
- Arc::new(AtomicBool::new(false)),
+ Arc::new(AtomicUsize::new(0)),
callback_fn,
)
}
pub fn with_stream(
matcher: Nucleo<T>,
- injector: Injector<T>,
+ primary_column: usize,
+ injector: Injector<T, D>,
callback_fn: impl Fn(&mut Context, &T, Action) + 'static,
) -> Self {
- Self::with(matcher, injector.editor_data, injector.shutown, callback_fn)
+ Self::with(
+ matcher,
+ injector.columns,
+ primary_column,
+ injector.editor_data,
+ injector.picker_version,
+ callback_fn,
+ )
}
fn with(
matcher: Nucleo<T>,
- editor_data: Arc<T::Data>,
- shutdown: Arc<AtomicBool>,
+ columns: Arc<[Column<T, D>]>,
+ default_column: usize,
+ editor_data: Arc<D>,
+ version: Arc<AtomicUsize>,
callback_fn: impl Fn(&mut Context, &T, Action) + 'static,
) -> Self {
+ assert!(!columns.is_empty());
+
let prompt = Prompt::new(
"".into(),
None,
@@ -265,29 +367,43 @@ impl<T: Item + 'static> Picker<T> {
|_editor: &mut Context, _pattern: &str, _event: PromptEvent| {},
);
+ let widths = columns
+ .iter()
+ .map(|column| Constraint::Length(column.name.chars().count() as u16))
+ .collect();
+
+ let query = PickerQuery::new(columns.iter().map(|col| &col.name).cloned(), default_column);
+
Self {
+ columns,
+ primary_column: default_column,
matcher,
editor_data,
- shutdown,
+ version,
cursor: 0,
prompt,
- previous_pattern: String::new(),
+ query,
truncate_start: true,
show_preview: true,
callback_fn: Box::new(callback_fn),
completion_height: 0,
- widths: Vec::new(),
+ widths,
preview_cache: HashMap::new(),
read_buffer: Vec::with_capacity(1024),
file_fn: None,
+ preview_highlight_handler: PreviewHighlightHandler::<T, D>::default().spawn(),
+ dynamic_query_handler: None,
}
}
- pub fn injector(&self) -> Injector<T> {
+ pub fn injector(&self) -> Injector<T, D> {
Injector {
dst: self.matcher.injector(),
+ columns: self.columns.clone(),
editor_data: self.editor_data.clone(),
- shutown: self.shutdown.clone(),
+ version: self.version.load(atomic::Ordering::Relaxed),
+ picker_version: self.version.clone(),
+ _redraw: helix_event::RequestRedrawOnDrop,
}
}
@@ -298,7 +414,7 @@ impl<T: Item + 'static> Picker<T> {
pub fn with_preview(
mut self,
- preview_fn: impl Fn(&Editor, &T) -> Option<FileLocation> + 'static,
+ preview_fn: impl for<'a> Fn(&'a Editor, &'a T) -> Option<FileLocation<'a>> + 'static,
) -> Self {
self.file_fn = Some(Box::new(preview_fn));
// assumption: if we have a preview we are matching paths... If this is ever
@@ -307,14 +423,20 @@ impl<T: Item + 'static> Picker<T> {
self
}
- pub fn set_options(&mut self, new_options: Vec<T>) {
- self.matcher.restart(false);
- let injector = self.matcher.injector();
- for item in new_options {
- if let Some((item, matcher_text)) = item_to_nucleo(item, &self.editor_data) {
- injector.push(item, |dst| dst[0] = matcher_text);
- }
- }
+ pub fn with_history_register(mut self, history_register: Option<char>) -> Self {
+ self.prompt.with_history_register(history_register);
+ self
+ }
+
+ pub fn with_dynamic_query(
+ mut self,
+ callback: DynQueryCallback<T, D>,
+ debounce_ms: Option<u64>,
+ ) -> Self {
+ let handler = DynamicQueryHandler::new(callback, debounce_ms).spawn();
+ helix_event::send_blocking(&handler, self.primary_query());
+ self.dynamic_query_handler = Some(handler);
+ self
}
/// Move the cursor by a number of lines, either down (`Forward`) or up (`Backward`)
@@ -367,52 +489,107 @@ impl<T: Item + 'static> Picker<T> {
.map(|item| item.data)
}
+ fn primary_query(&self) -> Arc<str> {
+ self.query
+ .get(&self.columns[self.primary_column].name)
+ .cloned()
+ .unwrap_or_else(|| "".into())
+ }
+
+ fn header_height(&self) -> u16 {
+ if self.columns.len() > 1 {
+ 1
+ } else {
+ 0
+ }
+ }
+
pub fn toggle_preview(&mut self) {
self.show_preview = !self.show_preview;
}
fn prompt_handle_event(&mut self, event: &Event, cx: &mut Context) -> EventResult {
if let EventResult::Consumed(_) = self.prompt.handle_event(event, cx) {
- let pattern = self.prompt.line();
- // TODO: better track how the pattern has changed
- if pattern != &self.previous_pattern {
- self.matcher.pattern.reparse(
- 0,
- pattern,
- CaseMatching::Smart,
- pattern.starts_with(&self.previous_pattern),
- );
- self.previous_pattern = pattern.clone();
- }
+ self.handle_prompt_change();
}
EventResult::Consumed(None)
}
- fn current_file(&self, editor: &Editor) -> Option<FileLocation> {
- self.selection()
- .and_then(|current| (self.file_fn.as_ref()?)(editor, current))
- .map(|(path_or_id, line)| (path_or_id.get_canonicalized(), line))
+ fn handle_prompt_change(&mut self) {
+ // TODO: better track how the pattern has changed
+ let line = self.prompt.line();
+ let old_query = self.query.parse(line);
+ if self.query == old_query {
+ return;
+ }
+ // If the query has meaningfully changed, reset the cursor to the top of the results.
+ self.cursor = 0;
+ // Have nucleo reparse each changed column.
+ for (i, column) in self
+ .columns
+ .iter()
+ .filter(|column| column.filter)
+ .enumerate()
+ {
+ let pattern = self
+ .query
+ .get(&column.name)
+ .map(|f| &**f)
+ .unwrap_or_default();
+ let old_pattern = old_query
+ .get(&column.name)
+ .map(|f| &**f)
+ .unwrap_or_default();
+ // Fastlane: most columns will remain unchanged after each edit.
+ if pattern == old_pattern {
+ continue;
+ }
+ let is_append = pattern.starts_with(old_pattern);
+ self.matcher.pattern.reparse(
+ i,
+ pattern,
+ CaseMatching::Smart,
+ Normalization::Smart,
+ is_append,
+ );
+ }
+ // If this is a dynamic picker, notify the query hook that the primary
+ // query might have been updated.
+ if let Some(handler) = &self.dynamic_query_handler {
+ helix_event::send_blocking(handler, self.primary_query());
+ }
}
- /// Get (cached) preview for a given path. If a document corresponding
+ /// Get (cached) preview for the currently selected item. If a document corresponding
/// to the path is already open in the editor, it is used instead.
fn get_preview<'picker, 'editor>(
&'picker mut self,
- path_or_id: PathOrId,
editor: &'editor Editor,
- ) -> Preview<'picker, 'editor> {
+ ) -> Option<(Preview<'picker, 'editor>, Option<(usize, usize)>)> {
+ let current = self.selection()?;
+ let (path_or_id, range) = (self.file_fn.as_ref()?)(editor, current)?;
+
match path_or_id {
PathOrId::Path(path) => {
- let path = &path;
+ let path = path.as_ref();
if let Some(doc) = editor.document_by_path(path) {
- return Preview::EditorDocument(doc);
+ return Some((Preview::EditorDocument(doc), range));
}
if self.preview_cache.contains_key(path) {
- return Preview::Cached(&self.preview_cache[path]);
+ // NOTE: we use `HashMap::get_key_value` here instead of indexing so we can
+ // retrieve the `Arc<Path>` key. The `path` in scope here is a `&Path` and
+ // we can cheaply clone the key for the preview highlight handler.
+ let (path, preview) = self.preview_cache.get_key_value(path).unwrap();
+ if matches!(preview, CachedPreview::Document(doc) if doc.language_config().is_none())
+ {
+ helix_event::send_blocking(&self.preview_highlight_handler, path.clone());
+ }
+ return Some((Preview::Cached(preview), range));
}
- let data = std::fs::File::open(path).and_then(|file| {
+ let path: Arc<Path> = path.into();
+ let data = std::fs::File::open(&path).and_then(|file| {
let metadata = file.metadata()?;
// Read up to 1kb to detect the content type
let n = file.take(1024).read_to_end(&mut self.read_buffer)?;
@@ -427,100 +604,29 @@ impl<T: Item + 'static> Picker<T> {
(size, _) if size > MAX_FILE_SIZE_FOR_PREVIEW => {
CachedPreview::LargeFile
}
- _ => Document::open(path, None, None, editor.config.clone())
- .map(|doc| CachedPreview::Document(Box::new(doc)))
+ _ => Document::open(&path, None, None, editor.config.clone())
+ .map(|doc| {
+ // Asynchronously highlight the new document
+ helix_event::send_blocking(
+ &self.preview_highlight_handler,
+ path.clone(),
+ );
+ CachedPreview::Document(Box::new(doc))
+ })
.unwrap_or(CachedPreview::NotFound),
},
)
.unwrap_or(CachedPreview::NotFound);
- self.preview_cache.insert(path.to_owned(), preview);
- Preview::Cached(&self.preview_cache[path])
+ self.preview_cache.insert(path.clone(), preview);
+ Some((Preview::Cached(&self.preview_cache[&path]), range))
}
PathOrId::Id(id) => {
let doc = editor.documents.get(&id).unwrap();
- Preview::EditorDocument(doc)
+ Some((Preview::EditorDocument(doc), range))
}
}
}
- fn handle_idle_timeout(&mut self, cx: &mut Context) -> EventResult {
- let Some((current_file, _)) = self.current_file(cx.editor) else {
- return EventResult::Consumed(None);
- };
-
- // Try to find a document in the cache
- let doc = match &current_file {
- PathOrId::Id(doc_id) => doc_mut!(cx.editor, doc_id),
- PathOrId::Path(path) => match self.preview_cache.get_mut(path) {
- Some(CachedPreview::Document(ref mut doc)) => doc,
- _ => return EventResult::Consumed(None),
- },
- };
-
- let mut callback: Option<compositor::Callback> = None;
-
- // Then attempt to highlight it if it has no language set
- if doc.language_config().is_none() {
- if let Some(language_config) = doc.detect_language_config(&cx.editor.syn_loader.load())
- {
- doc.language = Some(language_config.clone());
- let text = doc.text().clone();
- let loader = cx.editor.syn_loader.clone();
- let job = tokio::task::spawn_blocking(move || {
- let syntax = language_config
- .highlight_config(&loader.load().scopes())
- .and_then(|highlight_config| {
- Syntax::new(text.slice(..), highlight_config, loader)
- });
- let callback = move |editor: &mut Editor, compositor: &mut Compositor| {
- let Some(syntax) = syntax else {
- log::info!("highlighting picker item failed");
- return;
- };
- let picker = match compositor.find::<Overlay<Self>>() {
- Some(Overlay { content, .. }) => Some(content),
- None => compositor
- .find::<Overlay<DynamicPicker<T>>>()
- .map(|overlay| &mut overlay.content.file_picker),
- };
- let Some(picker) = picker else {
- log::info!("picker closed before syntax highlighting finished");
- return;
- };
- // Try to find a document in the cache
- let doc = match current_file {
- PathOrId::Id(doc_id) => doc_mut!(editor, &doc_id),
- PathOrId::Path(path) => match picker.preview_cache.get_mut(&path) {
- Some(CachedPreview::Document(ref mut doc)) => {
- let diagnostics = Editor::doc_diagnostics(
- &editor.language_servers,
- &editor.diagnostics,
- doc,
- );
- doc.replace_diagnostics(diagnostics, &[], None);
- doc
- }
- _ => return,
- },
- };
- doc.syntax = Some(syntax);
- };
- Callback::EditorCompositor(Box::new(callback))
- });
- let tmp: compositor::Callback = Box::new(move |_, ctx| {
- ctx.jobs
- .callback(job.map(|res| res.map_err(anyhow::Error::from)))
- });
- callback = Some(Box::new(tmp))
- }
- }
-
- // QUESTION: do we want to compute inlay hints in pickers too ? Probably not for now
- // but it could be interesting in the future
-
- EventResult::Consumed(callback)
- }
-
fn render_picker(&mut self, area: Rect, surface: &mut Surface, cx: &mut Context) {
let status = self.matcher.tick(10);
let snapshot = self.matcher.snapshot();
@@ -554,7 +660,11 @@ impl<T: Item + 'static> Picker<T> {
let count = format!(
"{}{}/{}",
- if status.running { "(running) " } else { "" },
+ if status.running || self.matcher.active_injectors() > 0 {
+ "(running) "
+ } else {
+ ""
+ },
snapshot.matched_item_count(),
snapshot.item_count(),
);
@@ -578,7 +688,7 @@ impl<T: Item + 'static> Picker<T> {
// -- Render the contents:
// subtract area of prompt from top
let inner = inner.clip_top(2);
- let rows = inner.height as u32;
+ let rows = inner.height.saturating_sub(self.header_height()) as u32;
let offset = self.cursor - (self.cursor % std::cmp::max(1, rows));
let cursor = self.cursor.saturating_sub(offset);
let end = offset
@@ -592,83 +702,110 @@ impl<T: Item + 'static> Picker<T> {
}
let options = snapshot.matched_items(offset..end).map(|item| {
- snapshot.pattern().column_pattern(0).indices(
- item.matcher_columns[0].slice(..),
- &mut matcher,
- &mut indices,
- );
- indices.sort_unstable();
- indices.dedup();
- let mut row = item.data.format(&self.editor_data);
-
- let mut grapheme_idx = 0u32;
- let mut indices = indices.drain(..);
- let mut next_highlight_idx = indices.next().unwrap_or(u32::MAX);
- if self.widths.len() < row.cells.len() {
- self.widths.resize(row.cells.len(), Constraint::Length(0));
- }
let mut widths = self.widths.iter_mut();
- for cell in &mut row.cells {
+ let mut matcher_index = 0;
+
+ Row::new(self.columns.iter().map(|column| {
+ if column.hidden {
+ return Cell::default();
+ }
+
let Some(Constraint::Length(max_width)) = widths.next() else {
unreachable!();
};
-
- // merge index highlights on top of existing hightlights
- let mut span_list = Vec::new();
- let mut current_span = String::new();
- let mut current_style = Style::default();
- let mut width = 0;
-
- let spans: &[Span] = cell.content.lines.first().map_or(&[], |it| it.0.as_slice());
- for span in spans {
- // this looks like a bug on first glance, we are iterating
- // graphemes but treating them as char indices. The reason that
- // this is correct is that nucleo will only ever consider the first char
- // of a grapheme (and discard the rest of the grapheme) so the indices
- // returned by nucleo are essentially grapheme indecies
- for grapheme in span.content.graphemes(true) {
- let style = if grapheme_idx == next_highlight_idx {
- next_highlight_idx = indices.next().unwrap_or(u32::MAX);
- span.style.patch(highlight_style)
- } else {
- span.style
- };
- if style != current_style {
- if !current_span.is_empty() {
- span_list.push(Span::styled(current_span, current_style))
+ let mut cell = column.format(item.data, &self.editor_data);
+ let width = if column.filter {
+ snapshot.pattern().column_pattern(matcher_index).indices(
+ item.matcher_columns[matcher_index].slice(..),
+ &mut matcher,
+ &mut indices,
+ );
+ indices.sort_unstable();
+ indices.dedup();
+ let mut indices = indices.drain(..);
+ let mut next_highlight_idx = indices.next().unwrap_or(u32::MAX);
+ let mut span_list = Vec::new();
+ let mut current_span = String::new();
+ let mut current_style = Style::default();
+ let mut grapheme_idx = 0u32;
+ let mut width = 0;
+
+ let spans: &[Span] =
+ cell.content.lines.first().map_or(&[], |it| it.0.as_slice());
+ for span in spans {
+ // this looks like a bug on first glance, we are iterating
+ // graphemes but treating them as char indices. The reason that
+ // this is correct is that nucleo will only ever consider the first char
+ // of a grapheme (and discard the rest of the grapheme) so the indices
+ // returned by nucleo are essentially grapheme indecies
+ for grapheme in span.content.graphemes(true) {
+ let style = if grapheme_idx == next_highlight_idx {
+ next_highlight_idx = indices.next().unwrap_or(u32::MAX);
+ span.style.patch(highlight_style)
+ } else {
+ span.style
+ };
+ if style != current_style {
+ if !current_span.is_empty() {
+ span_list.push(Span::styled(current_span, current_style))
+ }
+ current_span = String::new();
+ current_style = style;
}
- current_span = String::new();
- current_style = style;
+ current_span.push_str(grapheme);
+ grapheme_idx += 1;
}
- current_span.push_str(grapheme);
- grapheme_idx += 1;
+ width += span.width();
}
- width += span.width();
- }
- span_list.push(Span::styled(current_span, current_style));
+ span_list.push(Span::styled(current_span, current_style));
+ cell = Cell::from(Spans::from(span_list));
+ matcher_index += 1;
+ width
+ } else {
+ cell.content
+ .lines
+ .first()
+ .map(|line| line.width())
+ .unwrap_or_default()
+ };
+
if width as u16 > *max_width {
*max_width = width as u16;
}
- *cell = Cell::from(Spans::from(span_list));
-
- // spacer
- if grapheme_idx == next_highlight_idx {
- next_highlight_idx = indices.next().unwrap_or(u32::MAX);
- }
- grapheme_idx += 1;
- }
- row
+ cell
+ }))
});
- let table = Table::new(options)
+ let mut table = Table::new(options)
.style(text_style)
.highlight_style(selected)
.highlight_symbol(" > ")
.column_spacing(1)
.widths(&self.widths);
+ // -- Header
+ if self.columns.len() > 1 {
+ let active_column = self.query.active_column(self.prompt.position());
+ let header_style = cx.editor.theme.get("ui.picker.header");
+
+ table = table.header(Row::new(self.columns.iter().map(|column| {
+ if column.hidden {
+ Cell::default()
+ } else {
+ let style = if active_column.is_some_and(|name| Arc::ptr_eq(name, &column.name))
+ {
+ cx.editor.theme.get("ui.picker.header.active")
+ } else {
+ header_style
+ };
+
+ Cell::from(Span::styled(Cow::from(&*column.name), style))
+ }
+ })));
+ }
+
use tui::widgets::TableState;
table.render_table(
@@ -698,8 +835,7 @@ impl<T: Item + 'static> Picker<T> {
let inner = inner.inner(margin);
BLOCK.render(area, surface);
- if let Some((path, range)) = self.current_file(cx.editor) {
- let preview = self.get_preview(path, cx.editor);
+ if let Some((preview, range)) = self.get_preview(cx.editor) {
let doc = match preview.document() {
Some(doc)
if range.map_or(true, |(start, end)| {
@@ -798,7 +934,7 @@ impl<T: Item + 'static> Picker<T> {
}
}
-impl<T: Item + 'static + Send + Sync> Component for Picker<T> {
+impl<I: 'static + Send + Sync, D: 'static + Send + Sync> Component for Picker<I, D> {
fn render(&mut self, area: Rect, surface: &mut Surface, cx: &mut Context) {
// +---------+ +---------+
// |prompt | |preview |
@@ -826,9 +962,6 @@ impl<T: Item + 'static + Send + Sync> Component for Picker<T> {
}
fn handle_event(&mut self, event: &Event, ctx: &mut Context) -> EventResult {
- if let Event::IdleTimeout = event {
- return self.handle_idle_timeout(ctx);
- }
// TODO: keybinds for scrolling preview
let key_event = match event {
@@ -852,7 +985,7 @@ impl<T: Item + 'static + Send + Sync> Component for Picker<T> {
// be restarting the stream somehow once the picker gets
// reopened instead (like for an FS crawl) that would also remove the
// need for the special case above but that is pretty tricky
- picker.shutdown.store(true, atomic::Ordering::Relaxed);
+ picker.version.fetch_add(1, atomic::Ordering::Relaxed);
Box::new(|compositor: &mut Compositor, _ctx| {
// remove the layer
compositor.last_picker = compositor.pop();
@@ -861,9 +994,6 @@ impl<T: Item + 'static + Send + Sync> Component for Picker<T> {
EventResult::Consumed(Some(callback))
};
- // So that idle timeout retriggers
- ctx.editor.reset_idle_timer();
-
match key_event {
shift!(Tab) | key!(Up) | ctrl!('p') => {
self.move_by(1, Direction::Backward);
@@ -890,10 +1020,21 @@ impl<T: Item + 'static + Send + Sync> Component for Picker<T> {
}
}
key!(Enter) => {
- if let Some(option) = self.selection() {
- (self.callback_fn)(ctx, option, Action::Replace);
+ // If the prompt has a history completion and is empty, use enter to accept
+ // that completion
+ if let Some(completion) = self
+ .prompt
+ .first_history_completion(ctx.editor)
+ .filter(|_| self.prompt.line().is_empty())
+ {
+ self.prompt.set_line(completion.to_string(), ctx.editor);
+ self.handle_prompt_change();
+ } else {
+ if let Some(option) = self.selection() {
+ (self.callback_fn)(ctx, option, Action::Replace);
+ }
+ return close_fn(self);
}
- return close_fn(self);
}
ctrl!('s') => {
if let Some(option) = self.selection() {
@@ -930,7 +1071,7 @@ impl<T: Item + 'static + Send + Sync> Component for Picker<T> {
}
fn required_size(&mut self, (width, height): (u16, u16)) -> Option<(u16, u16)> {
- self.completion_height = height.saturating_sub(4);
+ self.completion_height = height.saturating_sub(4 + self.header_height());
Some((width, height))
}
@@ -938,81 +1079,11 @@ impl<T: Item + 'static + Send + Sync> Component for Picker<T> {
Some(ID)
}
}
-impl<T: Item> Drop for Picker<T> {
+impl<T: 'static + Send + Sync, D> Drop for Picker<T, D> {
fn drop(&mut self) {
// ensure we cancel any ongoing background threads streaming into the picker
- self.shutdown.store(true, atomic::Ordering::Relaxed)
+ self.version.fetch_add(1, atomic::Ordering::Relaxed);
}
}
type PickerCallback<T> = Box<dyn Fn(&mut Context, &T, Action)>;
-
-/// Returns a new list of options to replace the contents of the picker
-/// when called with the current picker query,
-pub type DynQueryCallback<T> =
- Box<dyn Fn(String, &mut Editor) -> BoxFuture<'static, anyhow::Result<Vec<T>>>>;
-
-/// A picker that updates its contents via a callback whenever the
-/// query string changes. Useful for live grep, workspace symbols, etc.
-pub struct DynamicPicker<T: ui::menu::Item + Send + Sync> {
- file_picker: Picker<T>,
- query_callback: DynQueryCallback<T>,
- query: String,
-}
-
-impl<T: ui::menu::Item + Send + Sync> DynamicPicker<T> {
- pub fn new(file_picker: Picker<T>, query_callback: DynQueryCallback<T>) -> Self {
- Self {
- file_picker,
- query_callback,
- query: String::new(),
- }
- }
-}
-
-impl<T: Item + Send + Sync + 'static> Component for DynamicPicker<T> {
- fn render(&mut self, area: Rect, surface: &mut Surface, cx: &mut Context) {
- self.file_picker.render(area, surface, cx);
- }
-
- fn handle_event(&mut self, event: &Event, cx: &mut Context) -> EventResult {
- let event_result = self.file_picker.handle_event(event, cx);
- let current_query = self.file_picker.prompt.line();
-
- if !matches!(event, Event::IdleTimeout) || self.query == *current_query {
- return event_result;
- }
-
- self.query.clone_from(current_query);
-
- let new_options = (self.query_callback)(current_query.to_owned(), cx.editor);
-
- cx.jobs.callback(async move {
- let new_options = new_options.await?;
- let callback = Callback::EditorCompositor(Box::new(move |editor, compositor| {
- // Wrapping of pickers in overlay is done outside the picker code,
- // so this is fragile and will break if wrapped in some other widget.
- let picker = match compositor.find_id::<Overlay<DynamicPicker<T>>>(ID) {
- Some(overlay) => &mut overlay.content.file_picker,
- None => return,
- };
- picker.set_options(new_options);
- editor.reset_idle_timer();
- }));
- anyhow::Ok(callback)
- });
- EventResult::Consumed(None)
- }
-
- fn cursor(&self, area: Rect, ctx: &Editor) -> (Option<Position>, CursorKind) {
- self.file_picker.cursor(area, ctx)
- }
-
- fn required_size(&mut self, viewport: (u16, u16)) -> Option<(u16, u16)> {
- self.file_picker.required_size(viewport)
- }
-
- fn id(&self) -> Option<&'static str> {
- Some(ID)
- }
-}
diff --git a/helix-term/src/ui/picker/handlers.rs b/helix-term/src/ui/picker/handlers.rs
new file mode 100644
index 00000000..4896ccbc
--- /dev/null
+++ b/helix-term/src/ui/picker/handlers.rs
@@ -0,0 +1,182 @@
+use std::{
+ path::Path,
+ sync::{atomic, Arc},
+ time::Duration,
+};
+
+use helix_event::AsyncHook;
+use tokio::time::Instant;
+
+use crate::{job, ui::overlay::Overlay};
+
+use super::{CachedPreview, DynQueryCallback, Picker};
+
+pub(super) struct PreviewHighlightHandler<T: 'static + Send + Sync, D: 'static + Send + Sync> {
+ trigger: Option<Arc<Path>>,
+ phantom_data: std::marker::PhantomData<(T, D)>,
+}
+
+impl<T: 'static + Send + Sync, D: 'static + Send + Sync> Default for PreviewHighlightHandler<T, D> {
+ fn default() -> Self {
+ Self {
+ trigger: None,
+ phantom_data: Default::default(),
+ }
+ }
+}
+
+impl<T: 'static + Send + Sync, D: 'static + Send + Sync> AsyncHook
+ for PreviewHighlightHandler<T, D>
+{
+ type Event = Arc<Path>;
+
+ fn handle_event(
+ &mut self,
+ path: Self::Event,
+ timeout: Option<tokio::time::Instant>,
+ ) -> Option<tokio::time::Instant> {
+ if self
+ .trigger
+ .as_ref()
+ .is_some_and(|trigger| trigger == &path)
+ {
+ // If the path hasn't changed, don't reset the debounce
+ timeout
+ } else {
+ self.trigger = Some(path);
+ Some(Instant::now() + Duration::from_millis(150))
+ }
+ }
+
+ fn finish_debounce(&mut self) {
+ let Some(path) = self.trigger.take() else {
+ return;
+ };
+
+ job::dispatch_blocking(move |editor, compositor| {
+ let Some(Overlay {
+ content: picker, ..
+ }) = compositor.find::<Overlay<Picker<T, D>>>()
+ else {
+ return;
+ };
+
+ let Some(CachedPreview::Document(ref mut doc)) = picker.preview_cache.get_mut(&path)
+ else {
+ return;
+ };
+
+ if doc.language_config().is_some() {
+ return;
+ }
+
+ let Some(language_config) = doc.detect_language_config(&editor.syn_loader.load())
+ else {
+ return;
+ };
+ doc.language = Some(language_config.clone());
+ let text = doc.text().clone();
+ let loader = editor.syn_loader.clone();
+
+ tokio::task::spawn_blocking(move || {
+ let Some(syntax) = language_config
+ .highlight_config(&loader.load().scopes())
+ .and_then(|highlight_config| {
+ helix_core::Syntax::new(text.slice(..), highlight_config, loader)
+ })
+ else {
+ log::info!("highlighting picker item failed");
+ return;
+ };
+
+ job::dispatch_blocking(move |editor, compositor| {
+ let Some(Overlay {
+ content: picker, ..
+ }) = compositor.find::<Overlay<Picker<T, D>>>()
+ else {
+ log::info!("picker closed before syntax highlighting finished");
+ return;
+ };
+ let Some(CachedPreview::Document(ref mut doc)) =
+ picker.preview_cache.get_mut(&path)
+ else {
+ return;
+ };
+ let diagnostics = helix_view::Editor::doc_diagnostics(
+ &editor.language_servers,
+ &editor.diagnostics,
+ doc,
+ );
+ doc.replace_diagnostics(diagnostics, &[], None);
+ doc.syntax = Some(syntax);
+ });
+ });
+ });
+ }
+}
+
+pub(super) struct DynamicQueryHandler<T: 'static + Send + Sync, D: 'static + Send + Sync> {
+ callback: Arc<DynQueryCallback<T, D>>,
+ // Duration used as a debounce.
+ // Defaults to 100ms if not provided via `Picker::with_dynamic_query`. Callers may want to set
+ // this higher if the dynamic query is expensive - for example global search.
+ debounce: Duration,
+ last_query: Arc<str>,
+ query: Option<Arc<str>>,
+}
+
+impl<T: 'static + Send + Sync, D: 'static + Send + Sync> DynamicQueryHandler<T, D> {
+ pub(super) fn new(callback: DynQueryCallback<T, D>, duration_ms: Option<u64>) -> Self {
+ Self {
+ callback: Arc::new(callback),
+ debounce: Duration::from_millis(duration_ms.unwrap_or(100)),
+ last_query: "".into(),
+ query: None,
+ }
+ }
+}
+
+impl<T: 'static + Send + Sync, D: 'static + Send + Sync> AsyncHook for DynamicQueryHandler<T, D> {
+ type Event = Arc<str>;
+
+ fn handle_event(&mut self, query: Self::Event, _timeout: Option<Instant>) -> Option<Instant> {
+ if query == self.last_query {
+ // If the search query reverts to the last one we requested, no need to
+ // make a new request.
+ self.query = None;
+ None
+ } else {
+ self.query = Some(query);
+ Some(Instant::now() + self.debounce)
+ }
+ }
+
+ fn finish_debounce(&mut self) {
+ let Some(query) = self.query.take() else {
+ return;
+ };
+ self.last_query = query.clone();
+ let callback = self.callback.clone();
+
+ job::dispatch_blocking(move |editor, compositor| {
+ let Some(Overlay {
+ content: picker, ..
+ }) = compositor.find::<Overlay<Picker<T, D>>>()
+ else {
+ return;
+ };
+ // Increment the version number to cancel any ongoing requests.
+ picker.version.fetch_add(1, atomic::Ordering::Relaxed);
+ picker.matcher.restart(false);
+ let injector = picker.injector();
+ let get_options = (callback)(&query, editor, picker.editor_data.clone(), &injector);
+ tokio::spawn(async move {
+ if let Err(err) = get_options.await {
+ log::info!("Dynamic request failed: {err}");
+ }
+ // NOTE: the Drop implementation of Injector will request a redraw when the
+ // injector falls out of scope here, clearing the "running" indicator.
+ });
+ })
+ }
+}
diff --git a/helix-term/src/ui/picker/query.rs b/helix-term/src/ui/picker/query.rs
new file mode 100644
index 00000000..e433a11f
--- /dev/null
+++ b/helix-term/src/ui/picker/query.rs
@@ -0,0 +1,368 @@
+use std::{collections::HashMap, mem, ops::Range, sync::Arc};
+
+#[derive(Debug)]
+pub(super) struct PickerQuery {
+ /// The column names of the picker.
+ column_names: Box<[Arc<str>]>,
+ /// The index of the primary column in `column_names`.
+ /// The primary column is selected by default unless another
+ /// field is specified explicitly with `%fieldname`.
+ primary_column: usize,
+ /// The mapping between column names and input in the query
+ /// for those columns.
+ inner: HashMap<Arc<str>, Arc<str>>,
+ /// The byte ranges of the input text which are used as input for each column.
+ /// This is calculated at parsing time for use in [Self::active_column].
+ /// This Vec is naturally sorted in ascending order and ranges do not overlap.
+ column_ranges: Vec<(Range<usize>, Option<Arc<str>>)>,
+}
+
+impl PartialEq<HashMap<Arc<str>, Arc<str>>> for PickerQuery {
+ fn eq(&self, other: &HashMap<Arc<str>, Arc<str>>) -> bool {
+ self.inner.eq(other)
+ }
+}
+
+impl PickerQuery {
+ pub(super) fn new<I: Iterator<Item = Arc<str>>>(
+ column_names: I,
+ primary_column: usize,
+ ) -> Self {
+ let column_names: Box<[_]> = column_names.collect();
+ let inner = HashMap::with_capacity(column_names.len());
+ let column_ranges = vec![(0..usize::MAX, Some(column_names[primary_column].clone()))];
+ Self {
+ column_names,
+ primary_column,
+ inner,
+ column_ranges,
+ }
+ }
+
+ pub(super) fn get(&self, column: &str) -> Option<&Arc<str>> {
+ self.inner.get(column)
+ }
+
+ pub(super) fn parse(&mut self, input: &str) -> HashMap<Arc<str>, Arc<str>> {
+ let mut fields: HashMap<Arc<str>, String> = HashMap::new();
+ let primary_field = &self.column_names[self.primary_column];
+ let mut escaped = false;
+ let mut in_field = false;
+ let mut field = None;
+ let mut text = String::new();
+ self.column_ranges.clear();
+ self.column_ranges
+ .push((0..usize::MAX, Some(primary_field.clone())));
+
+ macro_rules! finish_field {
+ () => {
+ let key = field.take().unwrap_or(primary_field);
+
+ if let Some(pattern) = fields.get_mut(key) {
+ pattern.push(' ');
+ pattern.push_str(text.trim());
+ } else {
+ fields.insert(key.clone(), text.trim().to_string());
+ }
+ text.clear();
+ };
+ }
+
+ for (idx, ch) in input.char_indices() {
+ match ch {
+ // Backslash escaping
+ _ if escaped => {
+ // '%' is the only character that is special cased.
+ // You can escape it to prevent parsing the text that
+ // follows it as a field name.
+ if ch != '%' {
+ text.push('\\');
+ }
+ text.push(ch);
+ escaped = false;
+ }
+ '\\' => escaped = !escaped,
+ '%' => {
+ if !text.is_empty() {
+ finish_field!();
+ }
+ let (range, _field) = self
+ .column_ranges
+ .last_mut()
+ .expect("column_ranges is non-empty");
+ range.end = idx;
+ in_field = true;
+ }
+ ' ' if in_field => {
+ text.clear();
+ in_field = false;
+ }
+ _ if in_field => {
+ text.push(ch);
+ // Go over all columns and their indices, find all that starts with field key,
+ // select a column that fits key the most.
+ field = self
+ .column_names
+ .iter()
+ .filter(|col| col.starts_with(&text))
+ // select "fittest" column
+ .min_by_key(|col| col.len());
+
+ // Update the column range for this column.
+ if let Some((_range, current_field)) = self
+ .column_ranges
+ .last_mut()
+ .filter(|(range, _)| range.end == usize::MAX)
+ {
+ *current_field = field.cloned();
+ } else {
+ self.column_ranges.push((idx..usize::MAX, field.cloned()));
+ }
+ }
+ _ => text.push(ch),
+ }
+ }
+
+ if !in_field && !text.is_empty() {
+ finish_field!();
+ }
+
+ let new_inner: HashMap<_, _> = fields
+ .into_iter()
+ .map(|(field, query)| (field, query.as_str().into()))
+ .collect();
+
+ mem::replace(&mut self.inner, new_inner)
+ }
+
+ /// Finds the column which the cursor is 'within' in the last parse.
+ ///
+ /// The cursor is considered to be within a column when it is placed within any
+ /// of a column's text. See the `active_column_test` unit test below for examples.
+ ///
+ /// `cursor` is a byte index that represents the location of the prompt's cursor.
+ pub fn active_column(&self, cursor: usize) -> Option<&Arc<str>> {
+ let point = self
+ .column_ranges
+ .partition_point(|(range, _field)| cursor > range.end);
+
+ self.column_ranges
+ .get(point)
+ .filter(|(range, _field)| cursor >= range.start && cursor <= range.end)
+ .and_then(|(_range, field)| field.as_ref())
+ }
+}
+
+#[cfg(test)]
+mod test {
+ use helix_core::hashmap;
+
+ use super::*;
+
+ #[test]
+ fn parse_query_test() {
+ let mut query = PickerQuery::new(
+ [
+ "primary".into(),
+ "field1".into(),
+ "field2".into(),
+ "another".into(),
+ "anode".into(),
+ ]
+ .into_iter(),
+ 0,
+ );
+
+ // Basic field splitting
+ query.parse("hello world");
+ assert_eq!(
+ query,
+ hashmap!(
+ "primary".into() => "hello world".into(),
+ )
+ );
+ query.parse("hello %field1 world %field2 !");
+ assert_eq!(
+ query,
+ hashmap!(
+ "primary".into() => "hello".into(),
+ "field1".into() => "world".into(),
+ "field2".into() => "!".into(),
+ )
+ );
+ query.parse("%field1 abc %field2 def xyz");
+ assert_eq!(
+ query,
+ hashmap!(
+ "field1".into() => "abc".into(),
+ "field2".into() => "def xyz".into(),
+ )
+ );
+
+ // Trailing space is trimmed
+ query.parse("hello ");
+ assert_eq!(
+ query,
+ hashmap!(
+ "primary".into() => "hello".into(),
+ )
+ );
+
+ // Unknown fields are trimmed.
+ query.parse("hello %foo");
+ assert_eq!(
+ query,
+ hashmap!(
+ "primary".into() => "hello".into(),
+ )
+ );
+
+ // Multiple words in a field
+ query.parse("hello %field1 a b c");
+ assert_eq!(
+ query,
+ hashmap!(
+ "primary".into() => "hello".into(),
+ "field1".into() => "a b c".into(),
+ )
+ );
+
+ // Escaping
+ query.parse(r#"hello\ world"#);
+ assert_eq!(
+ query,
+ hashmap!(
+ "primary".into() => r#"hello\ world"#.into(),
+ )
+ );
+ query.parse(r#"hello \%field1 world"#);
+ assert_eq!(
+ query,
+ hashmap!(
+ "primary".into() => "hello %field1 world".into(),
+ )
+ );
+ query.parse(r#"%field1 hello\ world"#);
+ assert_eq!(
+ query,
+ hashmap!(
+ "field1".into() => r#"hello\ world"#.into(),
+ )
+ );
+ query.parse(r#"hello %field1 a\"b"#);
+ assert_eq!(
+ query,
+ hashmap!(
+ "primary".into() => "hello".into(),
+ "field1".into() => r#"a\"b"#.into(),
+ )
+ );
+ query.parse(r#"%field1 hello\ world"#);
+ assert_eq!(
+ query,
+ hashmap!(
+ "field1".into() => r#"hello\ world"#.into(),
+ )
+ );
+ query.parse(r#"\bfoo\b"#);
+ assert_eq!(
+ query,
+ hashmap!(
+ "primary".into() => r#"\bfoo\b"#.into(),
+ )
+ );
+ query.parse(r#"\\n"#);
+ assert_eq!(
+ query,
+ hashmap!(
+ "primary".into() => r#"\\n"#.into(),
+ )
+ );
+
+ // Only the prefix of a field is required.
+ query.parse("hello %anot abc");
+ assert_eq!(
+ query,
+ hashmap!(
+ "primary".into() => "hello".into(),
+ "another".into() => "abc".into(),
+ )
+ );
+ // The shortest matching the prefix is selected.
+ query.parse("hello %ano abc");
+ assert_eq!(
+ query,
+ hashmap!(
+ "primary".into() => "hello".into(),
+ "anode".into() => "abc".into()
+ )
+ );
+ // Multiple uses of a column are concatenated with space separators.
+ query.parse("hello %field1 xyz %fie abc");
+ assert_eq!(
+ query,
+ hashmap!(
+ "primary".into() => "hello".into(),
+ "field1".into() => "xyz abc".into()
+ )
+ );
+ query.parse("hello %fie abc");
+ assert_eq!(
+ query,
+ hashmap!(
+ "primary".into() => "hello".into(),
+ "field1".into() => "abc".into()
+ )
+ );
+ // The primary column can be explicitly qualified.
+ query.parse("hello %fie abc %prim world");
+ assert_eq!(
+ query,
+ hashmap!(
+ "primary".into() => "hello world".into(),
+ "field1".into() => "abc".into()
+ )
+ );
+ }
+
+ #[test]
+ fn active_column_test() {
+ fn active_column<'a>(query: &'a mut PickerQuery, input: &str) -> Option<&'a str> {
+ let cursor = input.find('|').expect("cursor must be indicated with '|'");
+ let input = input.replace('|', "");
+ query.parse(&input);
+ query.active_column(cursor).map(AsRef::as_ref)
+ }
+
+ let mut query = PickerQuery::new(
+ ["primary".into(), "foo".into(), "bar".into()].into_iter(),
+ 0,
+ );
+
+ assert_eq!(active_column(&mut query, "|"), Some("primary"));
+ assert_eq!(active_column(&mut query, "hello| world"), Some("primary"));
+ assert_eq!(active_column(&mut query, "|%foo hello"), Some("primary"));
+ assert_eq!(active_column(&mut query, "%foo|"), Some("foo"));
+ assert_eq!(active_column(&mut query, "%|"), None);
+ assert_eq!(active_column(&mut query, "%baz|"), None);
+ assert_eq!(active_column(&mut query, "%quiz%|"), None);
+ assert_eq!(active_column(&mut query, "%foo hello| world"), Some("foo"));
+ assert_eq!(active_column(&mut query, "%foo hello world|"), Some("foo"));
+ assert_eq!(active_column(&mut query, "%foo| hello world"), Some("foo"));
+ assert_eq!(active_column(&mut query, "%|foo hello world"), Some("foo"));
+ assert_eq!(active_column(&mut query, "%f|oo hello world"), Some("foo"));
+ assert_eq!(active_column(&mut query, "hello %f|oo world"), Some("foo"));
+ assert_eq!(
+ active_column(&mut query, "hello %f|oo world %bar !"),
+ Some("foo")
+ );
+ assert_eq!(
+ active_column(&mut query, "hello %foo wo|rld %bar !"),
+ Some("foo")
+ );
+ assert_eq!(
+ active_column(&mut query, "hello %foo world %bar !|"),
+ Some("bar")
+ );
+ }
+}
diff --git a/helix-term/src/ui/prompt.rs b/helix-term/src/ui/prompt.rs
index 14b242df..3518ddf7 100644
--- a/helix-term/src/ui/prompt.rs
+++ b/helix-term/src/ui/prompt.rs
@@ -92,12 +92,22 @@ impl Prompt {
}
}
+ /// Gets the byte index in the input representing the current cursor location.
+ #[inline]
+ pub(crate) fn position(&self) -> usize {
+ self.cursor
+ }
+
pub fn with_line(mut self, line: String, editor: &Editor) -> Self {
+ self.set_line(line, editor);
+ self
+ }
+
+ pub fn set_line(&mut self, line: String, editor: &Editor) {
let cursor = line.len();
self.line = line;
self.cursor = cursor;
self.recalculate_completion(editor);
- self
}
pub fn with_language(
@@ -113,6 +123,19 @@ impl Prompt {
&self.line
}
+ pub fn with_history_register(&mut self, history_register: Option<char>) -> &mut Self {
+ self.history_register = history_register;
+ self
+ }
+
+ pub(crate) fn first_history_completion<'a>(
+ &'a self,
+ editor: &'a Editor,
+ ) -> Option<Cow<'a, str>> {
+ self.history_register
+ .and_then(|reg| editor.registers.first(reg, editor))
+ }
+
pub fn recalculate_completion(&mut self, editor: &Editor) {
self.exit_selection();
self.completion = (self.completion_fn)(editor, &self.line);
@@ -476,10 +499,7 @@ impl Prompt {
let line_area = area.clip_left(self.prompt.len() as u16).clip_top(line);
if self.line.is_empty() {
// Show the most recently entered value as a suggestion.
- if let Some(suggestion) = self
- .history_register
- .and_then(|reg| cx.editor.registers.first(reg, cx.editor))
- {
+ if let Some(suggestion) = self.first_history_completion(cx.editor) {
surface.set_string(line_area.x, line_area.y, suggestion, suggestion_color);
}
} else if let Some((language, loader)) = self.language.as_ref() {
@@ -574,8 +594,7 @@ impl Component for Prompt {
self.recalculate_completion(cx.editor);
} else {
let last_item = self
- .history_register
- .and_then(|reg| cx.editor.registers.first(reg, cx.editor))
+ .first_history_completion(cx.editor)
.map(|entry| entry.to_string())
.unwrap_or_else(|| String::from(""));
diff --git a/helix-view/src/document.rs b/helix-view/src/document.rs
index ccf2fa8c..3314a243 100644
--- a/helix-view/src/document.rs
+++ b/helix-view/src/document.rs
@@ -1741,6 +1741,10 @@ impl Document {
Url::from_file_path(self.path()?).ok()
}
+ pub fn uri(&self) -> Option<helix_core::Uri> {
+ Some(self.path()?.clone().into())
+ }
+
#[inline]
pub fn text(&self) -> &Rope {
&self.text
diff --git a/helix-view/src/editor.rs b/helix-view/src/editor.rs
index 50b6c931..29059095 100644
--- a/helix-view/src/editor.rs
+++ b/helix-view/src/editor.rs
@@ -44,7 +44,7 @@ pub use helix_core::diagnostic::Severity;
use helix_core::{
auto_pairs::AutoPairs,
syntax::{self, AutoPairConfig, IndentationHeuristic, LanguageServerFeature, SoftWrap},
- Change, LineEnding, Position, Range, Selection, NATIVE_LINE_ENDING,
+ Change, LineEnding, Position, Range, Selection, Uri, NATIVE_LINE_ENDING,
};
use helix_dap as dap;
use helix_lsp::lsp;
@@ -1022,7 +1022,7 @@ pub struct Editor {
pub macro_recording: Option<(char, Vec<KeyEvent>)>,
pub macro_replaying: Vec<char>,
pub language_servers: helix_lsp::Registry,
- pub diagnostics: BTreeMap<PathBuf, Vec<(lsp::Diagnostic, LanguageServerId)>>,
+ pub diagnostics: BTreeMap<Uri, Vec<(lsp::Diagnostic, LanguageServerId)>>,
pub diff_providers: DiffProviderRegistry,
pub debugger: Option<dap::Client>,
@@ -1931,7 +1931,7 @@ impl Editor {
/// Returns all supported diagnostics for the document
pub fn doc_diagnostics<'a>(
language_servers: &'a helix_lsp::Registry,
- diagnostics: &'a BTreeMap<PathBuf, Vec<(lsp::Diagnostic, LanguageServerId)>>,
+ diagnostics: &'a BTreeMap<Uri, Vec<(lsp::Diagnostic, LanguageServerId)>>,
document: &Document,
) -> impl Iterator<Item = helix_core::Diagnostic> + 'a {
Editor::doc_diagnostics_with_filter(language_servers, diagnostics, document, |_, _| true)
@@ -1941,15 +1941,15 @@ impl Editor {
/// filtered by `filter` which is invocated with the raw `lsp::Diagnostic` and the language server id it came from
pub fn doc_diagnostics_with_filter<'a>(
language_servers: &'a helix_lsp::Registry,
- diagnostics: &'a BTreeMap<PathBuf, Vec<(lsp::Diagnostic, LanguageServerId)>>,
+ diagnostics: &'a BTreeMap<Uri, Vec<(lsp::Diagnostic, LanguageServerId)>>,
document: &Document,
filter: impl Fn(&lsp::Diagnostic, LanguageServerId) -> bool + 'a,
) -> impl Iterator<Item = helix_core::Diagnostic> + 'a {
let text = document.text().clone();
let language_config = document.language.clone();
document
- .path()
- .and_then(|path| diagnostics.get(path))
+ .uri()
+ .and_then(|uri| diagnostics.get(&uri))
.map(|diags| {
diags.iter().filter_map(move |(diagnostic, lsp_id)| {
let ls = language_servers.get_by_id(*lsp_id)?;
diff --git a/helix-view/src/handlers/lsp.rs b/helix-view/src/handlers/lsp.rs
index beb106b2..d817a423 100644
--- a/helix-view/src/handlers/lsp.rs
+++ b/helix-view/src/handlers/lsp.rs
@@ -1,6 +1,7 @@
use crate::editor::Action;
use crate::Editor;
use crate::{DocumentId, ViewId};
+use helix_core::Uri;
use helix_lsp::util::generate_transaction_from_edits;
use helix_lsp::{lsp, OffsetEncoding};
@@ -54,18 +55,30 @@ pub struct ApplyEditError {
pub enum ApplyEditErrorKind {
DocumentChanged,
FileNotFound,
- UnknownURISchema,
+ InvalidUrl(helix_core::uri::UrlConversionError),
IoError(std::io::Error),
// TODO: check edits before applying and propagate failure
// InvalidEdit,
}
+impl From<std::io::Error> for ApplyEditErrorKind {
+ fn from(err: std::io::Error) -> Self {
+ ApplyEditErrorKind::IoError(err)
+ }
+}
+
+impl From<helix_core::uri::UrlConversionError> for ApplyEditErrorKind {
+ fn from(err: helix_core::uri::UrlConversionError) -> Self {
+ ApplyEditErrorKind::InvalidUrl(err)
+ }
+}
+
impl ToString for ApplyEditErrorKind {
fn to_string(&self) -> String {
match self {
ApplyEditErrorKind::DocumentChanged => "document has changed".to_string(),
ApplyEditErrorKind::FileNotFound => "file not found".to_string(),
- ApplyEditErrorKind::UnknownURISchema => "URI schema not supported".to_string(),
+ ApplyEditErrorKind::InvalidUrl(err) => err.to_string(),
ApplyEditErrorKind::IoError(err) => err.to_string(),
}
}
@@ -74,25 +87,28 @@ impl ToString for ApplyEditErrorKind {
impl Editor {
fn apply_text_edits(
&mut self,
- uri: &helix_lsp::Url,
+ url: &helix_lsp::Url,
version: Option<i32>,
text_edits: Vec<lsp::TextEdit>,
offset_encoding: OffsetEncoding,
) -> Result<(), ApplyEditErrorKind> {
- let path = match uri.to_file_path() {
- Ok(path) => path,
- Err(_) => {
- let err = format!("unable to convert URI to filepath: {}", uri);
- log::error!("{}", err);
- self.set_error(err);
- return Err(ApplyEditErrorKind::UnknownURISchema);
+ let uri = match Uri::try_from(url) {
+ Ok(uri) => uri,
+ Err(err) => {
+ log::error!("{err}");
+ return Err(err.into());
}
};
+ let path = uri.as_path().expect("URIs are valid paths");
- let doc_id = match self.open(&path, Action::Load) {
+ let doc_id = match self.open(path, Action::Load) {
Ok(doc_id) => doc_id,
Err(err) => {
- let err = format!("failed to open document: {}: {}", uri, err);
+ let err = format!(
+ "failed to open document: {}: {}",
+ path.to_string_lossy(),
+ err
+ );
log::error!("{}", err);
self.set_error(err);
return Err(ApplyEditErrorKind::FileNotFound);
@@ -158,9 +174,9 @@ impl Editor {
for (i, operation) in operations.iter().enumerate() {
match operation {
lsp::DocumentChangeOperation::Op(op) => {
- self.apply_document_resource_op(op).map_err(|io| {
+ self.apply_document_resource_op(op).map_err(|err| {
ApplyEditError {
- kind: ApplyEditErrorKind::IoError(io),
+ kind: err,
failed_change_idx: i,
}
})?;
@@ -214,12 +230,18 @@ impl Editor {
Ok(())
}
- fn apply_document_resource_op(&mut self, op: &lsp::ResourceOp) -> std::io::Result<()> {
+ fn apply_document_resource_op(
+ &mut self,
+ op: &lsp::ResourceOp,
+ ) -> Result<(), ApplyEditErrorKind> {
use lsp::ResourceOp;
use std::fs;
+ // NOTE: If `Uri` gets another variant than `Path`, the below `expect`s
+ // may no longer be valid.
match op {
ResourceOp::Create(op) => {
- let path = op.uri.to_file_path().unwrap();
+ let uri = Uri::try_from(&op.uri)?;
+ let path = uri.as_path_buf().expect("URIs are valid paths");
let ignore_if_exists = op.options.as_ref().map_or(false, |options| {
!options.overwrite.unwrap_or(false) && options.ignore_if_exists.unwrap_or(false)
});
@@ -236,7 +258,8 @@ impl Editor {
}
}
ResourceOp::Delete(op) => {
- let path = op.uri.to_file_path().unwrap();
+ let uri = Uri::try_from(&op.uri)?;
+ let path = uri.as_path_buf().expect("URIs are valid paths");
if path.is_dir() {
let recursive = op
.options
@@ -251,17 +274,19 @@ impl Editor {
}
self.language_servers.file_event_handler.file_changed(path);
} else if path.is_file() {
- fs::remove_file(&path)?;
+ fs::remove_file(path)?;
}
}
ResourceOp::Rename(op) => {
- let from = op.old_uri.to_file_path().unwrap();
- let to = op.new_uri.to_file_path().unwrap();
+ let from_uri = Uri::try_from(&op.old_uri)?;
+ let from = from_uri.as_path().expect("URIs are valid paths");
+ let to_uri = Uri::try_from(&op.new_uri)?;
+ let to = to_uri.as_path().expect("URIs are valid paths");
let ignore_if_exists = op.options.as_ref().map_or(false, |options| {
!options.overwrite.unwrap_or(false) && options.ignore_if_exists.unwrap_or(false)
});
if !ignore_if_exists || !to.exists() {
- self.move_path(&from, &to)?;
+ self.move_path(from, to)?;
}
}
}