From cc19db1e5306e6cbb396ae33c51e99927e879584 Mon Sep 17 00:00:00 2001 From: Philipp Mildenberger Date: Sun, 25 Feb 2024 19:12:36 +0100 Subject: [PATCH 01/19] Adds support for path completion for unix paths * Autocompletion is triggered with `/`. * Documentation preview (file type, file permissions, canonicalized full path). * Home-path resolution (`~/path`, `$HOME/path`, `${HOME}/path`) * Link resolution (makes sense for preview, since the LSP specification (`CompletionItemKind`) only supports files and folders but not symlinks) * Async (via `spawn_blocking` instead of tokios file accessor functions, as they IMHO make the code less readable and are quite a bit slower than just spawning a "thread") * Configurable with `editor.path-completion` (default `true`), per-language overrideable path-completion support --- book/src/editor.md | 1 + book/src/languages.md | 1 + helix-core/src/completion.rs | 9 + helix-core/src/lib.rs | 2 + helix-core/src/syntax.rs | 3 + helix-term/src/handlers/completion.rs | 248 +++++++++++++++- helix-term/src/handlers/completion/resolve.rs | 18 +- helix-term/src/ui/completion.rs | 277 ++++++++++++------ helix-term/src/ui/mod.rs | 2 +- helix-view/src/document.rs | 6 + helix-view/src/editor.rs | 6 + 11 files changed, 469 insertions(+), 104 deletions(-) create mode 100644 helix-core/src/completion.rs diff --git a/book/src/editor.md b/book/src/editor.md index 82d5f8461ef7..cc5ce8d58e51 100644 --- a/book/src/editor.md +++ b/book/src/editor.md @@ -32,6 +32,7 @@ | `cursorcolumn` | Highlight all columns with a cursor | `false` | | `gutters` | Gutters to display: Available are `diagnostics` and `diff` and `line-numbers` and `spacer`, note that `diagnostics` also includes other features like breakpoints, 1-width padding will be inserted if gutters is non-empty | `["diagnostics", "spacer", "line-numbers", "spacer", "diff"]` | | `auto-completion` | Enable automatic pop up of auto-completion | `true` | +| `path-completion` | Enable filepath completion. Show files and directories if an existing path at the cursor was recognized, either absolute or relative to the current opened document or current working directory (if the buffer is not yet saved). Defaults to true. | `true` | | `auto-format` | Enable automatic formatting on save | `true` | | `idle-timeout` | Time in milliseconds since last keypress before idle timers trigger. | `250` | | `completion-timeout` | Time in milliseconds after typing a word character before completions are shown, set to 5 for instant. | `250` | diff --git a/book/src/languages.md b/book/src/languages.md index fe105cced820..2a1c6d652f18 100644 --- a/book/src/languages.md +++ b/book/src/languages.md @@ -69,6 +69,7 @@ These configuration keys are available: | `formatter` | The formatter for the language, it will take precedence over the lsp when defined. The formatter must be able to take the original file as input from stdin and write the formatted file to stdout | | `soft-wrap` | [editor.softwrap](./configuration.md#editorsoft-wrap-section) | `text-width` | Maximum line length. Used for the `:reflow` command and soft-wrapping if `soft-wrap.wrap-at-text-width` is set, defaults to `editor.text-width` | +| `path-completion` | Overrides the `editor.path-completion` config key for the language. | | `workspace-lsp-roots` | Directories relative to the workspace root that are treated as LSP roots. Should only be set in `.helix/config.toml`. Overwrites the setting of the same name in `config.toml` if set. | | `persistent-diagnostic-sources` | An array of LSP diagnostic sources assumed unchanged when the language server resends the same set of diagnostics. Helix can track the position for these diagnostics internally instead. Useful for diagnostics that are recomputed on save. diff --git a/helix-core/src/completion.rs b/helix-core/src/completion.rs new file mode 100644 index 000000000000..765d947b0679 --- /dev/null +++ b/helix-core/src/completion.rs @@ -0,0 +1,9 @@ +use crate::Transaction; + +#[derive(Debug, PartialEq, Clone)] +pub struct CompletionItem { + pub transaction: Transaction, + pub label: String, + /// Containing Markdown + pub documentation: String, +} diff --git a/helix-core/src/lib.rs b/helix-core/src/lib.rs index 9165560d0aa5..413c2da77ae0 100644 --- a/helix-core/src/lib.rs +++ b/helix-core/src/lib.rs @@ -3,6 +3,7 @@ pub use encoding_rs as encoding; pub mod auto_pairs; pub mod chars; pub mod comment; +pub mod completion; pub mod config; pub mod diagnostic; pub mod diff; @@ -63,6 +64,7 @@ pub use selection::{Range, Selection}; pub use smallvec::{smallvec, SmallVec}; pub use syntax::Syntax; +pub use completion::CompletionItem; pub use diagnostic::Diagnostic; pub use line_ending::{LineEnding, NATIVE_LINE_ENDING}; diff --git a/helix-core/src/syntax.rs b/helix-core/src/syntax.rs index 7be512f52e2c..00027fc84c7b 100644 --- a/helix-core/src/syntax.rs +++ b/helix-core/src/syntax.rs @@ -125,6 +125,9 @@ pub struct LanguageConfiguration { #[serde(skip_serializing_if = "Option::is_none")] pub formatter: Option, + /// If set, overrides `editor.path-completion`. + pub path_completion: Option, + #[serde(default)] pub diagnostic_severity: Severity, diff --git a/helix-term/src/handlers/completion.rs b/helix-term/src/handlers/completion.rs index 68956c85f504..4eb61f6df253 100644 --- a/helix-term/src/handlers/completion.rs +++ b/helix-term/src/handlers/completion.rs @@ -1,20 +1,26 @@ use std::collections::HashSet; +use std::ffi::OsStr; +use std::path::PathBuf; use std::sync::Arc; use std::time::Duration; use arc_swap::ArcSwap; +use futures_util::future::BoxFuture; use futures_util::stream::FuturesUnordered; +use futures_util::FutureExt; use helix_core::chars::char_is_word; use helix_core::syntax::LanguageServerFeature; +use helix_core::Transaction; use helix_event::{ cancelable_future, cancelation, register_hook, send_blocking, CancelRx, CancelTx, }; use helix_lsp::lsp; use helix_lsp::util::pos_to_lsp_pos; -use helix_stdx::rope::RopeSliceExt; +use helix_stdx::rope::{self, RopeSliceExt}; use helix_view::document::{Mode, SavePoint}; use helix_view::handlers::lsp::CompletionEvent; use helix_view::{DocumentId, Editor, ViewId}; +use once_cell::sync::Lazy; use tokio::sync::mpsc::Sender; use tokio::time::Instant; use tokio_stream::StreamExt; @@ -27,7 +33,7 @@ use crate::job::{dispatch, dispatch_blocking}; use crate::keymap::MappableCommand; use crate::ui::editor::InsertEvent; use crate::ui::lsp::SignatureHelp; -use crate::ui::{self, CompletionItem, Popup}; +use crate::ui::{self, CompletionItem, LspCompletionItem, PathCompletionItem, PathKind, Popup}; use super::Handlers; pub use resolve::ResolveHandler; @@ -251,15 +257,19 @@ fn request_completion( None => Vec::new(), } .into_iter() - .map(|item| CompletionItem { - item, - provider: language_server_id, - resolved: false, + .map(|item| { + CompletionItem::Lsp(LspCompletionItem { + item, + provider: language_server_id, + resolved: false, + }) }) .collect(); anyhow::Ok(items) } + .boxed() }) + .chain(path_completion(cursor, text.clone(), doc)) .collect(); let future = async move { @@ -291,6 +301,226 @@ fn request_completion( }); } +fn path_completion( + cursor: usize, + text: helix_core::Rope, + doc: &helix_view::Document, +) -> Option>>> { + if !doc.supports_path_completion() { + return None; + } + + // TODO find a good regex for most use cases (especially Windows, which is not yet covered...) + // currently only one path match per line is possible in unix + static PATH_REGEX: Lazy = + Lazy::new(|| rope::Regex::new(r"((?:~|\$HOME|\$\{HOME\})?(?:\.{0,2}/)+.*)").unwrap()); + + let cur_line = text.char_to_line(cursor); + let start = text.line_to_char(cur_line).max(cursor.saturating_sub(1000)); + let line_until_cursor = text.slice(..).regex_input_at(start..cursor); + + let (dir_path, typed_file_name) = PATH_REGEX + .find_iter(line_until_cursor) + .last() // this is a workaround for missing "match end of slice" support in `rope::Regex` + .and_then(|matched_path| { + let end = text.byte_to_char(matched_path.end()); + if end != cursor { + return None; + } + let start = text.byte_to_char(matched_path.start()); + let matched_path = &text.slice(start..end).to_string(); + + // resolve home dir (~/, $HOME/, ${HOME}/) on unix + #[cfg(unix)] + let mut path = { + static HOME_DIR: Lazy> = + Lazy::new(|| std::env::var_os("HOME")); + + let home_resolved_path = if let Some(home) = &*HOME_DIR { + let first_separator_after_home = if matched_path.starts_with("~/") { + Some(1) + } else if matched_path.starts_with("$HOME") { + Some(5) + } else if matched_path.starts_with("${HOME}") { + Some(7) + } else { + None + }; + if let Some(start_char) = first_separator_after_home { + let mut path = home.to_owned(); + path.push(&matched_path[start_char..]); + path + } else { + matched_path.into() + } + } else { + matched_path.into() + }; + PathBuf::from(home_resolved_path) + }; + #[cfg(not(unix))] + let mut path = PathBuf::from(matched_path); + + if path.is_relative() { + if let Some(doc_path) = doc.path().and_then(|dp| dp.parent()) { + path = doc_path.join(path); + } else if let Ok(work_dir) = std::env::current_dir() { + path = work_dir.join(path); + } + } + let ends_with_slash = match matched_path.chars().last() { + Some(std::path::MAIN_SEPARATOR) => true, + None => return None, + _ => false, + }; + // check if there are chars after the last slash, and if these chars represent a directory + match std::fs::metadata(path.clone()).ok() { + Some(m) if m.is_dir() && ends_with_slash => { + Some((PathBuf::from(path.as_path()), None)) + } + _ if !ends_with_slash => path.parent().map(|parent_path| { + ( + PathBuf::from(parent_path), + path.file_name().and_then(|f| f.to_str().map(String::from)), + ) + }), + _ => None, + } + })?; + + // The async file accessor functions of tokio were considered, but they were a bit slower + // and less ergonomic than just using the std functions in a separate "thread" + let future = tokio::task::spawn_blocking(move || { + let Some(read_dir) = std::fs::read_dir(&dir_path).ok() else { + return Vec::new(); + }; + + read_dir + .filter_map(Result::ok) + .filter_map(|dir_entry| { + let path = dir_entry.path(); + // check if in / matches the start of the filename + let filename_starts_with_prefix = + match (path.file_name().and_then(OsStr::to_str), &typed_file_name) { + (Some(re_stem), Some(t)) => re_stem.starts_with(t), + _ => true, + }; + if filename_starts_with_prefix { + dir_entry.metadata().ok().map(|md| (path, md)) + } else { + None + } + }) + .map(|(path, md)| { + let file_name = path + .file_name() + .unwrap_or_default() + .to_string_lossy() + .to_string(); + + let full_path = path.canonicalize().unwrap_or_default(); + let full_path_name = full_path.to_string_lossy().into_owned(); + + let is_dir = full_path.is_dir(); + + let kind = if md.is_symlink() { + PathKind::Link + } else if is_dir { + PathKind::Folder + } else { + #[cfg(unix)] + { + use std::os::unix::fs::FileTypeExt; + if md.file_type().is_block_device() { + PathKind::Block + } else if md.file_type().is_socket() { + PathKind::Socket + } else if md.file_type().is_char_device() { + PathKind::CharacterDevice + } else if md.file_type().is_fifo() { + PathKind::Fifo + } else { + PathKind::File + } + } + #[cfg(not(unix))] + PathKind::File + }; + + let resolved = if kind == PathKind::Link { + "resolved " + } else { + "" + }; + + let documentation = { + #[cfg(unix)] + { + use std::os::unix::prelude::PermissionsExt; + let mode = md.permissions().mode(); + + let perms = [ + (libc::S_IRUSR, 'r'), + (libc::S_IWUSR, 'w'), + (libc::S_IXUSR, 'x'), + (libc::S_IRGRP, 'r'), + (libc::S_IWGRP, 'w'), + (libc::S_IXGRP, 'x'), + (libc::S_IROTH, 'r'), + (libc::S_IWOTH, 'w'), + (libc::S_IXOTH, 'x'), + ] + .into_iter() + .fold( + String::with_capacity(9), + |mut acc, (p, s)| { + // This cast is necessary on some platforms such as macos as `mode_t` is u16 there + #[allow(clippy::unnecessary_cast)] + acc.push(if mode & (p as u32) > 0 { s } else { '-' }); + acc + }, + ); + + // TODO it would be great to be able to individually color the documentation, + // but this will likely require a custom doc implementation (i.e. not `lsp::Documentation`) + // and/or different rendering in completion.rs + format!( + "type: `{kind}`\n\ + permissions: `[{perms}]`\n\ + {resolved}full path: `{full_path_name}`", + ) + } + #[cfg(not(unix))] + { + format!( + "type: `{path_kind}`\n\ + {resolved}full path: `{full_path_name}`", + ) + } + }; + + let edit_diff = typed_file_name.as_ref().map(|f| f.len()).unwrap_or(0); + + let transaction = Transaction::change( + &text, + std::iter::once((cursor - edit_diff, cursor, Some(file_name.as_str().into()))), + ); + + CompletionItem::Path(PathCompletionItem { + kind, + item: helix_core::CompletionItem { + transaction, + label: file_name, + documentation, + }, + }) + }) + .collect::>() + }); + + Some(async move { Ok(future.await?) }.boxed()) +} + fn show_completion( editor: &mut Editor, compositor: &mut Compositor, @@ -346,7 +576,11 @@ pub fn trigger_auto_completion( .. }) if triggers.iter().any(|trigger| text.ends_with(trigger))) }); - if is_trigger_char { + + let trigger_path_completion = + text.ends_with(std::path::MAIN_SEPARATOR_STR) && doc.supports_path_completion(); + + if is_trigger_char || trigger_path_completion { send_blocking( tx, CompletionEvent::TriggerChar { diff --git a/helix-term/src/handlers/completion/resolve.rs b/helix-term/src/handlers/completion/resolve.rs index 0b2c90672f51..9337c982b922 100644 --- a/helix-term/src/handlers/completion/resolve.rs +++ b/helix-term/src/handlers/completion/resolve.rs @@ -9,6 +9,7 @@ use helix_view::Editor; use crate::handlers::completion::CompletionItem; use crate::job; +use crate::ui::LspCompletionItem; /// A hook for resolving incomplete completion items. /// @@ -22,7 +23,7 @@ use crate::job; /// > 'completionItem/resolve' request is sent with the selected completion item as a parameter. /// > The returned completion item should have the documentation property filled in. pub struct ResolveHandler { - last_request: Option>, + last_request: Option>, resolver: Sender, } @@ -38,7 +39,7 @@ impl ResolveHandler { } } - pub fn ensure_item_resolved(&mut self, editor: &mut Editor, item: &mut CompletionItem) { + pub fn ensure_item_resolved(&mut self, editor: &mut Editor, item: &mut LspCompletionItem) { if item.resolved { return; } @@ -93,14 +94,14 @@ impl ResolveHandler { } struct ResolveRequest { - item: Arc, + item: Arc, ls: Arc, } #[derive(Default)] struct ResolveTimeout { next_request: Option, - in_flight: Option<(helix_event::CancelTx, Arc)>, + in_flight: Option<(helix_event::CancelTx, Arc)>, } impl AsyncHook for ResolveTimeout { @@ -152,8 +153,8 @@ impl ResolveRequest { .unwrap() .completion { - let resolved_item = match resolved_item { - Ok(item) => CompletionItem { + let resolved_item = CompletionItem::Lsp(match resolved_item { + Ok(item) => LspCompletionItem { item, resolved: true, ..*self.item @@ -166,8 +167,9 @@ impl ResolveRequest { item.resolved = true; item } - }; - completion.replace_item(&self.item, resolved_item); + }); + let old_item = CompletionItem::Lsp((*self.item).clone()); + completion.replace_item(&old_item, resolved_item); }; }) .await diff --git a/helix-term/src/ui/completion.rs b/helix-term/src/ui/completion.rs index 14397bb5c4c7..688cd35f6b42 100644 --- a/helix-term/src/ui/completion.rs +++ b/helix-term/src/ui/completion.rs @@ -28,30 +28,35 @@ impl menu::Item for CompletionItem { #[inline] fn filter_text(&self, _data: &Self::Data) -> Cow { - self.item - .filter_text - .as_ref() - .unwrap_or(&self.item.label) - .as_str() - .into() + match self { + CompletionItem::Lsp(LspCompletionItem { item, .. }) => item + .filter_text + .as_ref() + .unwrap_or(&item.label) + .as_str() + .into(), + CompletionItem::Path(PathCompletionItem { item, .. }) => Cow::from(&item.label), + } } fn format(&self, _data: &Self::Data) -> menu::Row { - let deprecated = self.item.deprecated.unwrap_or_default() - || self.item.tags.as_ref().map_or(false, |tags| { - tags.contains(&lsp::CompletionItemTag::DEPRECATED) - }); + let deprecated = match self { + CompletionItem::Lsp(LspCompletionItem { item, .. }) => { + item.deprecated.unwrap_or_default() + || item.tags.as_ref().map_or(false, |tags| { + tags.contains(&lsp::CompletionItemTag::DEPRECATED) + }) + } + CompletionItem::Path(_) => false, + }; - menu::Row::new(vec![ - menu::Cell::from(Span::styled( - self.item.label.as_str(), - if deprecated { - Style::default().add_modifier(Modifier::CROSSED_OUT) - } else { - Style::default() - }, - )), - menu::Cell::from(match self.item.kind { + let label = match self { + CompletionItem::Lsp(LspCompletionItem { item, .. }) => &item.label, + CompletionItem::Path(PathCompletionItem { item, .. }) => &item.label, + }; + + let kind = match self { + CompletionItem::Lsp(LspCompletionItem { item, .. }) => match item.kind { Some(lsp::CompletionItemKind::TEXT) => "text", Some(lsp::CompletionItemKind::METHOD) => "method", Some(lsp::CompletionItemKind::FUNCTION) => "function", @@ -82,18 +87,83 @@ impl menu::Item for CompletionItem { "" } None => "", - }), + }, + CompletionItem::Path(PathCompletionItem { kind, .. }) => kind.as_str(), + }; + + menu::Row::new(vec![ + menu::Cell::from(Span::styled( + label, + if deprecated { + Style::default().add_modifier(Modifier::CROSSED_OUT) + } else { + Style::default() + }, + )), + menu::Cell::from(kind), ]) } } -#[derive(Debug, PartialEq, Default, Clone)] -pub struct CompletionItem { +#[derive(Debug, PartialEq, Clone)] +pub enum PathKind { + Folder, + File, + Link, + Block, + Socket, + CharacterDevice, + Fifo, +} + +impl PathKind { + fn as_str(&self) -> &'static str { + match self { + PathKind::Folder => "folder", + PathKind::File => "file", + PathKind::Link => "link", + PathKind::Block => "block", + PathKind::Socket => "socket", + PathKind::CharacterDevice => "char_device", + PathKind::Fifo => "fifo", + } + } +} + +impl std::fmt::Display for PathKind { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!(f, "{}", self.as_str()) + } +} + +#[derive(Debug, PartialEq, Clone)] +pub struct LspCompletionItem { pub item: lsp::CompletionItem, pub provider: LanguageServerId, pub resolved: bool, } +#[derive(Debug, PartialEq, Clone)] +pub struct PathCompletionItem { + pub kind: PathKind, + pub item: helix_core::CompletionItem, +} + +#[derive(Debug, PartialEq, Clone)] +pub enum CompletionItem { + Lsp(LspCompletionItem), + Path(PathCompletionItem), +} + +impl CompletionItem { + pub fn preselect(&self) -> bool { + match self { + CompletionItem::Lsp(LspCompletionItem { item, .. }) => item.preselect.unwrap_or(false), + CompletionItem::Path(_) => false, + } + } +} + /// Wraps a Menu. pub struct Completion { popup: Popup>, @@ -115,11 +185,11 @@ impl Completion { let preview_completion_insert = editor.config().preview_completion_insert; let replace_mode = editor.config().completion_replace; // Sort completion items according to their preselect status (given by the LSP server) - items.sort_by_key(|item| !item.item.preselect.unwrap_or(false)); + items.sort_by_key(|item| !item.preselect()); // Then create the menu let menu = Menu::new(items, (), move |editor: &mut Editor, item, event| { - fn item_to_transaction( + fn lsp_item_to_transaction( doc: &Document, view_id: ViewId, item: &lsp::CompletionItem, @@ -257,16 +327,23 @@ impl Completion { // always present here let item = item.unwrap(); - let transaction = item_to_transaction( - doc, - view.id, - &item.item, - language_server!(item).offset_encoding(), - trigger_offset, - true, - replace_mode, - ); - doc.apply_temporary(&transaction, view.id); + match item { + CompletionItem::Lsp(item) => doc.apply_temporary( + &lsp_item_to_transaction( + doc, + view.id, + &item.item, + language_server!(item).offset_encoding(), + trigger_offset, + true, + replace_mode, + ), + view.id, + ), + CompletionItem::Path(PathCompletionItem { item, .. }) => { + doc.apply_temporary(&item.transaction, view.id) + } + }; } PromptEvent::Update => {} PromptEvent::Validate => { @@ -275,32 +352,62 @@ impl Completion { { doc.restore(view, &savepoint, false); } - // always present here - let mut item = item.unwrap().clone(); - - let language_server = language_server!(item); - let offset_encoding = language_server.offset_encoding(); - if !item.resolved { - if let Some(resolved) = - Self::resolve_completion_item(language_server, item.item.clone()) - { - item.item = resolved; - } - }; // if more text was entered, remove it doc.restore(view, &savepoint, true); // save an undo checkpoint before the completion doc.append_changes_to_history(view); - let transaction = item_to_transaction( - doc, - view.id, - &item.item, - offset_encoding, - trigger_offset, - false, - replace_mode, - ); + + // item always present here + let (mut transaction, additional_edits) = match item.unwrap().clone() { + CompletionItem::Lsp(mut item) => { + let language_server = language_server!(item); + + // resolve item if not yet resolved + if !item.resolved { + if let Some(resolved_item) = Self::resolve_completion_item( + language_server, + item.item.clone(), + ) { + item.item = resolved_item; + } + }; + + let encoding = language_server.offset_encoding(); + ( + lsp_item_to_transaction( + doc, + view.id, + &item.item, + encoding, + trigger_offset, + false, + replace_mode, + ), + item.item.additional_text_edits.map(|e| (e, encoding)), + ) + } + CompletionItem::Path(PathCompletionItem { item, .. }) => { + (item.transaction, None) + } + }; + + if let Some((additional_edits, offset_encoding)) = additional_edits { + if !additional_edits.is_empty() { + // use the selection of the completion, instead of the (empty) one from the additional text edits + let selection = transaction.selection().cloned(); + transaction = + transaction.compose(util::generate_transaction_from_edits( + doc.text(), + additional_edits, + offset_encoding, // TODO: should probably transcode in Client + )); + if let Some(selection) = selection { + transaction = transaction.with_selection(selection); + } + } + } + doc.apply(&transaction, view.id); editor.last_completion = Some(CompleteAction::Applied { @@ -308,17 +415,6 @@ impl Completion { changes: completion_changes(&transaction, trigger_offset), }); - // TODO: add additional _edits to completion_changes? - if let Some(additional_edits) = item.item.additional_text_edits { - if !additional_edits.is_empty() { - let transaction = util::generate_transaction_from_edits( - doc.text(), - additional_edits, - offset_encoding, // TODO: should probably transcode in Client - ); - doc.apply(&transaction, view.id); - } - } // we could have just inserted a trigger char (like a `crate::` completion for rust // so we want to retrigger immediately when accepting a completion. trigger_auto_completion(&editor.handlers.completions, editor, true); @@ -440,7 +536,7 @@ impl Component for Completion { Some(option) => option, None => return, }; - if !option.resolved { + if let CompletionItem::Lsp(option) = option { self.resolve_handler.ensure_item_resolved(cx.editor, option); } // need to render: @@ -465,27 +561,32 @@ impl Component for Completion { Markdown::new(md, cx.editor.syn_loader.clone()) }; - let mut markdown_doc = match &option.item.documentation { - Some(lsp::Documentation::String(contents)) - | Some(lsp::Documentation::MarkupContent(lsp::MarkupContent { - kind: lsp::MarkupKind::PlainText, - value: contents, - })) => { - // TODO: convert to wrapped text - markdowned(language, option.item.detail.as_deref(), Some(contents)) - } - Some(lsp::Documentation::MarkupContent(lsp::MarkupContent { - kind: lsp::MarkupKind::Markdown, - value: contents, - })) => { - // TODO: set language based on doc scope - markdowned(language, option.item.detail.as_deref(), Some(contents)) - } - None if option.item.detail.is_some() => { - // TODO: set language based on doc scope - markdowned(language, option.item.detail.as_deref(), None) + let mut markdown_doc = match option { + CompletionItem::Lsp(option) => match &option.item.documentation { + Some(lsp::Documentation::String(contents)) + | Some(lsp::Documentation::MarkupContent(lsp::MarkupContent { + kind: lsp::MarkupKind::PlainText, + value: contents, + })) => { + // TODO: convert to wrapped text + markdowned(language, option.item.detail.as_deref(), Some(contents)) + } + Some(lsp::Documentation::MarkupContent(lsp::MarkupContent { + kind: lsp::MarkupKind::Markdown, + value: contents, + })) => { + // TODO: set language based on doc scope + markdowned(language, option.item.detail.as_deref(), Some(contents)) + } + None if option.item.detail.is_some() => { + // TODO: set language based on doc scope + markdowned(language, option.item.detail.as_deref(), None) + } + None => return, + }, + CompletionItem::Path(option) => { + markdowned(language, None, Some(&option.item.documentation)) } - None => return, }; let popup_area = self.popup.area(area, cx.editor); diff --git a/helix-term/src/ui/mod.rs b/helix-term/src/ui/mod.rs index 6a3e198c1051..63e817548a28 100644 --- a/helix-term/src/ui/mod.rs +++ b/helix-term/src/ui/mod.rs @@ -17,7 +17,7 @@ mod text_decorations; use crate::compositor::Compositor; use crate::filter_picker_entry; use crate::job::{self, Callback}; -pub use completion::{Completion, CompletionItem}; +pub use completion::{Completion, CompletionItem, LspCompletionItem, PathCompletionItem, PathKind}; pub use editor::EditorView; use helix_stdx::rope; pub use markdown::Markdown; diff --git a/helix-view/src/document.rs b/helix-view/src/document.rs index 15aa81daee35..c1e4abe77f30 100644 --- a/helix-view/src/document.rs +++ b/helix-view/src/document.rs @@ -1713,6 +1713,12 @@ impl Document { self.version } + pub fn supports_path_completion(&self) -> bool { + self.language_config() + .and_then(|lang_config| lang_config.path_completion) + .unwrap_or_else(|| self.config.load().path_completion) + } + /// maintains the order as configured in the language_servers TOML array pub fn language_servers(&self) -> impl Iterator { self.language_config().into_iter().flat_map(move |config| { diff --git a/helix-view/src/editor.rs b/helix-view/src/editor.rs index 1708b3b4e053..bb6f20e436cd 100644 --- a/helix-view/src/editor.rs +++ b/helix-view/src/editor.rs @@ -267,6 +267,11 @@ pub struct Config { pub auto_pairs: AutoPairConfig, /// Automatic auto-completion, automatically pop up without user trigger. Defaults to true. pub auto_completion: bool, + /// Enable filepath completion. + /// Show files and directories if an existing path at the cursor was recognized, + /// either absolute or relative to the current opened document or current working directory (if the buffer is not yet saved). + /// Defaults to true. + pub path_completion: bool, /// Automatic formatting on save. Defaults to true. pub auto_format: bool, /// Automatic save on focus lost and/or after delay. @@ -944,6 +949,7 @@ impl Default for Config { middle_click_paste: true, auto_pairs: AutoPairConfig::default(), auto_completion: true, + path_completion: true, auto_format: true, auto_save: AutoSave::default(), idle_timeout: Duration::from_millis(250), From aaf7c2076ce41044ea25483e0a3b2ea56d70f589 Mon Sep 17 00:00:00 2001 From: Philipp Mildenberger Date: Sat, 24 Aug 2024 21:35:32 +0200 Subject: [PATCH 02/19] Handle windows CI --- helix-term/src/handlers/completion.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/helix-term/src/handlers/completion.rs b/helix-term/src/handlers/completion.rs index 4eb61f6df253..f40b123f8267 100644 --- a/helix-term/src/handlers/completion.rs +++ b/helix-term/src/handlers/completion.rs @@ -493,7 +493,7 @@ fn path_completion( #[cfg(not(unix))] { format!( - "type: `{path_kind}`\n\ + "type: `{kind}`\n\ {resolved}full path: `{full_path_name}`", ) } From ee5b319ceeba85a5d4caffc7b2ec591911c0dcb0 Mon Sep 17 00:00:00 2001 From: Philipp Mildenberger Date: Thu, 29 Aug 2024 22:02:28 +0200 Subject: [PATCH 03/19] Micro-optimization and revert of composing additional text edits --- helix-term/src/handlers/completion/resolve.rs | 64 ++++++++++++------- helix-term/src/ui/completion.rs | 55 ++++++++-------- 2 files changed, 67 insertions(+), 52 deletions(-) diff --git a/helix-term/src/handlers/completion/resolve.rs b/helix-term/src/handlers/completion/resolve.rs index 9337c982b922..410d32d935da 100644 --- a/helix-term/src/handlers/completion/resolve.rs +++ b/helix-term/src/handlers/completion/resolve.rs @@ -23,10 +23,19 @@ use crate::ui::LspCompletionItem; /// > 'completionItem/resolve' request is sent with the selected completion item as a parameter. /// > The returned completion item should have the documentation property filled in. pub struct ResolveHandler { - last_request: Option>, + last_request: Option>, resolver: Sender, } +macro_rules! lsp_variant { + ($item: expr) => { + match $item { + CompletionItem::Lsp(item) => item, + _ => unreachable!("This should always be an lsp completion item"), + } + }; +} + impl ResolveHandler { pub fn new() -> ResolveHandler { ResolveHandler { @@ -39,8 +48,12 @@ impl ResolveHandler { } } - pub fn ensure_item_resolved(&mut self, editor: &mut Editor, item: &mut LspCompletionItem) { - if item.resolved { + /// # Panics + /// When the item is not a `CompletionItem::Lsp(_)` + pub fn ensure_item_resolved(&mut self, editor: &mut Editor, item: &mut CompletionItem) { + let lsp_item = lsp_variant!(item); + + if lsp_item.resolved { return; } // We consider an item to be fully resolved if it has non-empty, none-`None` details, @@ -48,7 +61,7 @@ impl ResolveHandler { // check but some language servers send values like `Some([])` for additional text // edits although the items need to be resolved. This is probably a consequence of // how `null` works in the JavaScript world. - let is_resolved = item + let is_resolved = lsp_item .item .documentation .as_ref() @@ -56,25 +69,31 @@ impl ResolveHandler { lsp::Documentation::String(text) => !text.is_empty(), lsp::Documentation::MarkupContent(markup) => !markup.value.is_empty(), }) - && item + && lsp_item .item .detail .as_ref() .is_some_and(|detail| !detail.is_empty()) - && item + && lsp_item .item .additional_text_edits .as_ref() .is_some_and(|edits| !edits.is_empty()); if is_resolved { - item.resolved = true; + lsp_item.resolved = true; return; } if self.last_request.as_deref().is_some_and(|it| it == item) { return; } - let Some(ls) = editor.language_servers.get_by_id(item.provider).cloned() else { - item.resolved = true; + + let lsp_item = lsp_variant!(item); + let Some(ls) = editor + .language_servers + .get_by_id(lsp_item.provider) + .cloned() + else { + lsp_item.resolved = true; return; }; if matches!( @@ -88,20 +107,20 @@ impl ResolveHandler { self.last_request = Some(item.clone()); send_blocking(&self.resolver, ResolveRequest { item, ls }) } else { - item.resolved = true; + lsp_item.resolved = true; } } } struct ResolveRequest { - item: Arc, + item: Arc, ls: Arc, } #[derive(Default)] struct ResolveTimeout { next_request: Option, - in_flight: Option<(helix_event::CancelTx, Arc)>, + in_flight: Option<(helix_event::CancelTx, Arc)>, } impl AsyncHook for ResolveTimeout { @@ -121,7 +140,7 @@ impl AsyncHook for ResolveTimeout { } else if self .in_flight .as_ref() - .is_some_and(|(_, old_request)| old_request.item == request.item.item) + .is_some_and(|(_, old_request)| *old_request == request.item) { self.next_request = None; None @@ -143,7 +162,9 @@ impl AsyncHook for ResolveTimeout { impl ResolveRequest { async fn execute(self, cancel: CancelRx) { - let future = self.ls.resolve_completion_item(&self.item.item); + let lsp_item = &lsp_variant!(&*self.item).item; + + let future = self.ls.resolve_completion_item(lsp_item); let Some(resolved_item) = helix_event::cancelable_future(future, cancel).await else { return; }; @@ -153,23 +174,22 @@ impl ResolveRequest { .unwrap() .completion { - let resolved_item = CompletionItem::Lsp(match resolved_item { - Ok(item) => LspCompletionItem { + let resolved_item = match resolved_item { + Ok(item) => CompletionItem::Lsp(LspCompletionItem { item, + provider: lsp_variant!(&*self.item).provider, resolved: true, - ..*self.item - }, + }), Err(err) => { log::error!("completion resolve request failed: {err}"); // set item to resolved so we don't request it again // we could also remove it but that oculd be odd ui let mut item = (*self.item).clone(); - item.resolved = true; + lsp_variant!(&mut item).resolved = true; item } - }); - let old_item = CompletionItem::Lsp((*self.item).clone()); - completion.replace_item(&old_item, resolved_item); + }; + completion.replace_item(&self.item, resolved_item); }; }) .await diff --git a/helix-term/src/ui/completion.rs b/helix-term/src/ui/completion.rs index 688cd35f6b42..f4d64446d6e7 100644 --- a/helix-term/src/ui/completion.rs +++ b/helix-term/src/ui/completion.rs @@ -359,7 +359,7 @@ impl Completion { doc.append_changes_to_history(view); // item always present here - let (mut transaction, additional_edits) = match item.unwrap().clone() { + let (transaction, additional_edits) = match item.unwrap().clone() { CompletionItem::Lsp(mut item) => { let language_server = language_server!(item); @@ -374,40 +374,24 @@ impl Completion { }; let encoding = language_server.offset_encoding(); - ( - lsp_item_to_transaction( - doc, - view.id, - &item.item, - encoding, - trigger_offset, - false, - replace_mode, - ), - item.item.additional_text_edits.map(|e| (e, encoding)), - ) + let transaction = lsp_item_to_transaction( + doc, + view.id, + &item.item, + encoding, + trigger_offset, + false, + replace_mode, + ); + let add_edits = item.item.additional_text_edits; + + (transaction, add_edits.map(|edits| (edits, encoding))) } CompletionItem::Path(PathCompletionItem { item, .. }) => { (item.transaction, None) } }; - if let Some((additional_edits, offset_encoding)) = additional_edits { - if !additional_edits.is_empty() { - // use the selection of the completion, instead of the (empty) one from the additional text edits - let selection = transaction.selection().cloned(); - transaction = - transaction.compose(util::generate_transaction_from_edits( - doc.text(), - additional_edits, - offset_encoding, // TODO: should probably transcode in Client - )); - if let Some(selection) = selection { - transaction = transaction.with_selection(selection); - } - } - } - doc.apply(&transaction, view.id); editor.last_completion = Some(CompleteAction::Applied { @@ -415,6 +399,17 @@ impl Completion { changes: completion_changes(&transaction, trigger_offset), }); + // TODO: add additional _edits to completion_changes? + if let Some((additional_edits, offset_encoding)) = additional_edits { + if !additional_edits.is_empty() { + let transaction = util::generate_transaction_from_edits( + doc.text(), + additional_edits, + offset_encoding, // TODO: should probably transcode in Client + ); + doc.apply(&transaction, view.id); + } + } // we could have just inserted a trigger char (like a `crate::` completion for rust // so we want to retrigger immediately when accepting a completion. trigger_auto_completion(&editor.handlers.completions, editor, true); @@ -536,7 +531,7 @@ impl Component for Completion { Some(option) => option, None => return, }; - if let CompletionItem::Lsp(option) = option { + if let CompletionItem::Lsp(_) = option { self.resolve_handler.ensure_item_resolved(cx.editor, option); } // need to render: From be1e616b937503994f5f2b43ce4fd982b8cc26a9 Mon Sep 17 00:00:00 2001 From: Philipp Mildenberger Date: Sat, 7 Sep 2024 17:20:34 +0200 Subject: [PATCH 04/19] Use `PartialEq` for `LspCompletionItem` and don't prefix-filter path completion items --- helix-term/src/handlers/completion.rs | 16 +---- helix-term/src/handlers/completion/resolve.rs | 63 +++++++------------ helix-term/src/ui/completion.rs | 26 +++++++- helix-term/src/ui/menu.rs | 2 +- 4 files changed, 47 insertions(+), 60 deletions(-) diff --git a/helix-term/src/handlers/completion.rs b/helix-term/src/handlers/completion.rs index f40b123f8267..b27c9075b67d 100644 --- a/helix-term/src/handlers/completion.rs +++ b/helix-term/src/handlers/completion.rs @@ -1,5 +1,4 @@ use std::collections::HashSet; -use std::ffi::OsStr; use std::path::PathBuf; use std::sync::Arc; use std::time::Duration; @@ -397,20 +396,7 @@ fn path_completion( read_dir .filter_map(Result::ok) - .filter_map(|dir_entry| { - let path = dir_entry.path(); - // check if in / matches the start of the filename - let filename_starts_with_prefix = - match (path.file_name().and_then(OsStr::to_str), &typed_file_name) { - (Some(re_stem), Some(t)) => re_stem.starts_with(t), - _ => true, - }; - if filename_starts_with_prefix { - dir_entry.metadata().ok().map(|md| (path, md)) - } else { - None - } - }) + .filter_map(|dir_entry| dir_entry.metadata().ok().map(|md| (dir_entry.path(), md))) .map(|(path, md)| { let file_name = path .file_name() diff --git a/helix-term/src/handlers/completion/resolve.rs b/helix-term/src/handlers/completion/resolve.rs index 410d32d935da..8c5674fe7e21 100644 --- a/helix-term/src/handlers/completion/resolve.rs +++ b/helix-term/src/handlers/completion/resolve.rs @@ -23,19 +23,10 @@ use crate::ui::LspCompletionItem; /// > 'completionItem/resolve' request is sent with the selected completion item as a parameter. /// > The returned completion item should have the documentation property filled in. pub struct ResolveHandler { - last_request: Option>, + last_request: Option>, resolver: Sender, } -macro_rules! lsp_variant { - ($item: expr) => { - match $item { - CompletionItem::Lsp(item) => item, - _ => unreachable!("This should always be an lsp completion item"), - } - }; -} - impl ResolveHandler { pub fn new() -> ResolveHandler { ResolveHandler { @@ -48,12 +39,8 @@ impl ResolveHandler { } } - /// # Panics - /// When the item is not a `CompletionItem::Lsp(_)` - pub fn ensure_item_resolved(&mut self, editor: &mut Editor, item: &mut CompletionItem) { - let lsp_item = lsp_variant!(item); - - if lsp_item.resolved { + pub fn ensure_item_resolved(&mut self, editor: &mut Editor, item: &mut LspCompletionItem) { + if item.resolved { return; } // We consider an item to be fully resolved if it has non-empty, none-`None` details, @@ -61,7 +48,7 @@ impl ResolveHandler { // check but some language servers send values like `Some([])` for additional text // edits although the items need to be resolved. This is probably a consequence of // how `null` works in the JavaScript world. - let is_resolved = lsp_item + let is_resolved = item .item .documentation .as_ref() @@ -69,31 +56,25 @@ impl ResolveHandler { lsp::Documentation::String(text) => !text.is_empty(), lsp::Documentation::MarkupContent(markup) => !markup.value.is_empty(), }) - && lsp_item + && item .item .detail .as_ref() .is_some_and(|detail| !detail.is_empty()) - && lsp_item + && item .item .additional_text_edits .as_ref() .is_some_and(|edits| !edits.is_empty()); if is_resolved { - lsp_item.resolved = true; + item.resolved = true; return; } if self.last_request.as_deref().is_some_and(|it| it == item) { return; } - - let lsp_item = lsp_variant!(item); - let Some(ls) = editor - .language_servers - .get_by_id(lsp_item.provider) - .cloned() - else { - lsp_item.resolved = true; + let Some(ls) = editor.language_servers.get_by_id(item.provider).cloned() else { + item.resolved = true; return; }; if matches!( @@ -107,20 +88,20 @@ impl ResolveHandler { self.last_request = Some(item.clone()); send_blocking(&self.resolver, ResolveRequest { item, ls }) } else { - lsp_item.resolved = true; + item.resolved = true; } } } struct ResolveRequest { - item: Arc, + item: Arc, ls: Arc, } #[derive(Default)] struct ResolveTimeout { next_request: Option, - in_flight: Option<(helix_event::CancelTx, Arc)>, + in_flight: Option<(helix_event::CancelTx, Arc)>, } impl AsyncHook for ResolveTimeout { @@ -140,7 +121,7 @@ impl AsyncHook for ResolveTimeout { } else if self .in_flight .as_ref() - .is_some_and(|(_, old_request)| *old_request == request.item) + .is_some_and(|(_, old_request)| old_request.item == request.item.item) { self.next_request = None; None @@ -162,9 +143,7 @@ impl AsyncHook for ResolveTimeout { impl ResolveRequest { async fn execute(self, cancel: CancelRx) { - let lsp_item = &lsp_variant!(&*self.item).item; - - let future = self.ls.resolve_completion_item(lsp_item); + let future = self.ls.resolve_completion_item(&self.item.item); let Some(resolved_item) = helix_event::cancelable_future(future, cancel).await else { return; }; @@ -174,22 +153,22 @@ impl ResolveRequest { .unwrap() .completion { - let resolved_item = match resolved_item { - Ok(item) => CompletionItem::Lsp(LspCompletionItem { + let resolved_item = CompletionItem::Lsp(match resolved_item { + Ok(item) => LspCompletionItem { item, - provider: lsp_variant!(&*self.item).provider, resolved: true, - }), + ..*self.item + }, Err(err) => { log::error!("completion resolve request failed: {err}"); // set item to resolved so we don't request it again // we could also remove it but that oculd be odd ui let mut item = (*self.item).clone(); - lsp_variant!(&mut item).resolved = true; + item.resolved = true; item } - }; - completion.replace_item(&self.item, resolved_item); + }); + completion.replace_item(&*self.item, resolved_item); }; }) .await diff --git a/helix-term/src/ui/completion.rs b/helix-term/src/ui/completion.rs index f4d64446d6e7..43e778fcbb28 100644 --- a/helix-term/src/ui/completion.rs +++ b/helix-term/src/ui/completion.rs @@ -155,6 +155,24 @@ pub enum CompletionItem { Path(PathCompletionItem), } +impl PartialEq for LspCompletionItem { + fn eq(&self, other: &CompletionItem) -> bool { + match other { + CompletionItem::Lsp(other) => self == other, + _ => false, + } + } +} + +impl PartialEq for PathCompletionItem { + fn eq(&self, other: &CompletionItem) -> bool { + match other { + CompletionItem::Path(other) => self == other, + _ => false, + } + } +} + impl CompletionItem { pub fn preselect(&self) -> bool { match self { @@ -505,7 +523,11 @@ impl Completion { self.popup.contents().is_empty() } - pub fn replace_item(&mut self, old_item: &CompletionItem, new_item: CompletionItem) { + pub fn replace_item( + &mut self, + old_item: &impl PartialEq, + new_item: CompletionItem, + ) { self.popup.contents_mut().replace_option(old_item, new_item); } @@ -531,7 +553,7 @@ impl Component for Completion { Some(option) => option, None => return, }; - if let CompletionItem::Lsp(_) = option { + if let CompletionItem::Lsp(option) = option { self.resolve_handler.ensure_item_resolved(cx.editor, option); } // need to render: diff --git a/helix-term/src/ui/menu.rs b/helix-term/src/ui/menu.rs index c120d0b25cf3..ffe3ebb3cbf5 100644 --- a/helix-term/src/ui/menu.rs +++ b/helix-term/src/ui/menu.rs @@ -228,7 +228,7 @@ impl Menu { } impl Menu { - pub fn replace_option(&mut self, old_option: &T, new_option: T) { + pub fn replace_option(&mut self, old_option: &impl PartialEq, new_option: T) { for option in &mut self.options { if old_option == option { *option = new_option; From f20cd4cfa7360c4fa564cb974bc59c0dc779a7c6 Mon Sep 17 00:00:00 2001 From: Philipp Mildenberger Date: Thu, 3 Oct 2024 20:55:21 +0200 Subject: [PATCH 05/19] Apply suggestions from code review Co-authored-by: Michael Davis --- helix-term/src/handlers/completion.rs | 4 ++-- helix-term/src/ui/completion.rs | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/helix-term/src/handlers/completion.rs b/helix-term/src/handlers/completion.rs index b27c9075b67d..c9a36ccb276b 100644 --- a/helix-term/src/handlers/completion.rs +++ b/helix-term/src/handlers/completion.rs @@ -390,7 +390,7 @@ fn path_completion( // The async file accessor functions of tokio were considered, but they were a bit slower // and less ergonomic than just using the std functions in a separate "thread" let future = tokio::task::spawn_blocking(move || { - let Some(read_dir) = std::fs::read_dir(&dir_path).ok() else { + let Ok(read_dir) = std::fs::read_dir(&dir_path) else { return Vec::new(); }; @@ -485,7 +485,7 @@ fn path_completion( } }; - let edit_diff = typed_file_name.as_ref().map(|f| f.len()).unwrap_or(0); + let edit_diff = typed_file_name.as_ref().map(|f| f.len()).unwrap_or_default(); let transaction = Transaction::change( &text, diff --git a/helix-term/src/ui/completion.rs b/helix-term/src/ui/completion.rs index 43e778fcbb28..56470dfb6966 100644 --- a/helix-term/src/ui/completion.rs +++ b/helix-term/src/ui/completion.rs @@ -91,7 +91,7 @@ impl menu::Item for CompletionItem { CompletionItem::Path(PathCompletionItem { kind, .. }) => kind.as_str(), }; - menu::Row::new(vec![ + menu::Row::new([ menu::Cell::from(Span::styled( label, if deprecated { From d02470f2d7d921b721b2a8ed48ec4078105b6231 Mon Sep 17 00:00:00 2001 From: Philipp Mildenberger Date: Thu, 3 Oct 2024 20:56:01 +0200 Subject: [PATCH 06/19] Apply suggestions from code review --- helix-stdx/src/path.rs | 2 +- helix-term/src/handlers/completion.rs | 141 +++++++++++--------------- 2 files changed, 63 insertions(+), 80 deletions(-) diff --git a/helix-stdx/src/path.rs b/helix-stdx/src/path.rs index 968596a703fc..03a05d13283b 100644 --- a/helix-stdx/src/path.rs +++ b/helix-stdx/src/path.rs @@ -51,7 +51,7 @@ where /// Normalize a path without resolving symlinks. // Strategy: start from the first component and move up. Cannonicalize previous path, -// join component, cannonicalize new path, strip prefix and join to the final result. +// join component, canonicalize new path, strip prefix and join to the final result. pub fn normalize(path: impl AsRef) -> PathBuf { let mut components = path.as_ref().components().peekable(); let mut ret = if let Some(c @ Component::Prefix(..)) = components.peek().cloned() { diff --git a/helix-term/src/handlers/completion.rs b/helix-term/src/handlers/completion.rs index c9a36ccb276b..4c9a9a249c84 100644 --- a/helix-term/src/handlers/completion.rs +++ b/helix-term/src/handlers/completion.rs @@ -1,4 +1,5 @@ use std::collections::HashSet; +use std::ffi::OsString; use std::path::PathBuf; use std::sync::Arc; use std::time::Duration; @@ -15,6 +16,7 @@ use helix_event::{ }; use helix_lsp::lsp; use helix_lsp::util::pos_to_lsp_pos; +use helix_stdx::path::{canonicalize, fold_home_dir}; use helix_stdx::rope::{self, RopeSliceExt}; use helix_view::document::{Mode, SavePoint}; use helix_view::handlers::lsp::CompletionEvent; @@ -312,80 +314,69 @@ fn path_completion( // TODO find a good regex for most use cases (especially Windows, which is not yet covered...) // currently only one path match per line is possible in unix static PATH_REGEX: Lazy = - Lazy::new(|| rope::Regex::new(r"((?:~|\$HOME|\$\{HOME\})?(?:\.{0,2}/)+.*)").unwrap()); + Lazy::new(|| rope::Regex::new(r"((?:~|\$HOME|\$\{HOME\})?(?:\.{0,2}/)+.*)$").unwrap()); let cur_line = text.char_to_line(cursor); let start = text.line_to_char(cur_line).max(cursor.saturating_sub(1000)); - let line_until_cursor = text.slice(..).regex_input_at(start..cursor); - - let (dir_path, typed_file_name) = PATH_REGEX - .find_iter(line_until_cursor) - .last() // this is a workaround for missing "match end of slice" support in `rope::Regex` - .and_then(|matched_path| { - let end = text.byte_to_char(matched_path.end()); - if end != cursor { - return None; - } - let start = text.byte_to_char(matched_path.start()); - let matched_path = &text.slice(start..end).to_string(); - - // resolve home dir (~/, $HOME/, ${HOME}/) on unix - #[cfg(unix)] - let mut path = { - static HOME_DIR: Lazy> = - Lazy::new(|| std::env::var_os("HOME")); - - let home_resolved_path = if let Some(home) = &*HOME_DIR { - let first_separator_after_home = if matched_path.starts_with("~/") { - Some(1) - } else if matched_path.starts_with("$HOME") { - Some(5) - } else if matched_path.starts_with("${HOME}") { - Some(7) - } else { - None - }; - if let Some(start_char) = first_separator_after_home { - let mut path = home.to_owned(); - path.push(&matched_path[start_char..]); - path - } else { - matched_path.into() - } + let line_until_cursor = text.slice(start..cursor).regex_input_at(..); + + let (dir_path, typed_file_name) = PATH_REGEX.search(line_until_cursor).and_then(|m| { + let matched_path = &text.byte_slice(m.start()..m.end()).to_string(); + + // resolve home dir (~/, $HOME/, ${HOME}/) on unix + #[cfg(unix)] + let mut path = { + static HOME_DIR: Lazy> = Lazy::new(|| std::env::var_os("HOME")); + + let home_resolved_path = if let Some(home) = &*HOME_DIR { + let first_separator_after_home = if matched_path.starts_with("~/") { + Some(1) + } else if matched_path.starts_with("$HOME") { + Some(5) + } else if matched_path.starts_with("${HOME}") { + Some(7) } else { - matched_path.into() + None }; - PathBuf::from(home_resolved_path) - }; - #[cfg(not(unix))] - let mut path = PathBuf::from(matched_path); - - if path.is_relative() { - if let Some(doc_path) = doc.path().and_then(|dp| dp.parent()) { - path = doc_path.join(path); - } else if let Ok(work_dir) = std::env::current_dir() { - path = work_dir.join(path); + if let Some(start_char) = first_separator_after_home { + let mut path = home.to_owned(); + path.push(&matched_path[start_char..]); + path + } else { + matched_path.into() } - } - let ends_with_slash = match matched_path.chars().last() { - Some(std::path::MAIN_SEPARATOR) => true, - None => return None, - _ => false, + } else { + matched_path.into() }; - // check if there are chars after the last slash, and if these chars represent a directory - match std::fs::metadata(path.clone()).ok() { - Some(m) if m.is_dir() && ends_with_slash => { - Some((PathBuf::from(path.as_path()), None)) - } - _ if !ends_with_slash => path.parent().map(|parent_path| { - ( - PathBuf::from(parent_path), - path.file_name().and_then(|f| f.to_str().map(String::from)), - ) - }), - _ => None, + PathBuf::from(home_resolved_path) + }; + #[cfg(not(unix))] + let mut path = PathBuf::from(matched_path); + + if path.is_relative() { + if let Some(doc_path) = doc.path().and_then(|dp| dp.parent()) { + path = doc_path.join(path); + } else if let Ok(work_dir) = std::env::current_dir() { + path = work_dir.join(path); } - })?; + } + let ends_with_slash = match matched_path.chars().last() { + Some(std::path::MAIN_SEPARATOR) => true, + None => return None, + _ => false, + }; + // check if there are chars after the last slash, and if these chars represent a directory + match std::fs::metadata(path.clone()).ok() { + Some(m) if m.is_dir() && ends_with_slash => Some((PathBuf::from(path.as_path()), None)), + _ if !ends_with_slash => path.parent().map(|parent_path| { + ( + PathBuf::from(parent_path), + path.file_name().and_then(|f| f.to_str().map(String::from)), + ) + }), + _ => None, + } + })?; // The async file accessor functions of tokio were considered, but they were a bit slower // and less ergonomic than just using the std functions in a separate "thread" @@ -404,14 +395,12 @@ fn path_completion( .to_string_lossy() .to_string(); - let full_path = path.canonicalize().unwrap_or_default(); - let full_path_name = full_path.to_string_lossy().into_owned(); - - let is_dir = full_path.is_dir(); + let full_path = fold_home_dir(canonicalize(path)); + let full_path_name = full_path.to_string_lossy(); let kind = if md.is_symlink() { PathKind::Link - } else if is_dir { + } else if md.is_dir() { PathKind::Folder } else { #[cfg(unix)] @@ -433,12 +422,6 @@ fn path_completion( PathKind::File }; - let resolved = if kind == PathKind::Link { - "resolved " - } else { - "" - }; - let documentation = { #[cfg(unix)] { @@ -473,14 +456,14 @@ fn path_completion( format!( "type: `{kind}`\n\ permissions: `[{perms}]`\n\ - {resolved}full path: `{full_path_name}`", + full path: `{full_path_name}`", ) } #[cfg(not(unix))] { format!( "type: `{kind}`\n\ - {resolved}full path: `{full_path_name}`", + full path: `{full_path_name}`", ) } }; From de75bdef159e9bb2362457005aba63b34679810f Mon Sep 17 00:00:00 2001 From: Philipp Mildenberger Date: Thu, 3 Oct 2024 20:58:11 +0200 Subject: [PATCH 07/19] Fix cargo fmt --- helix-term/src/handlers/completion.rs | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/helix-term/src/handlers/completion.rs b/helix-term/src/handlers/completion.rs index 4c9a9a249c84..2c890b0f3e4a 100644 --- a/helix-term/src/handlers/completion.rs +++ b/helix-term/src/handlers/completion.rs @@ -468,7 +468,10 @@ fn path_completion( } }; - let edit_diff = typed_file_name.as_ref().map(|f| f.len()).unwrap_or_default(); + let edit_diff = typed_file_name + .as_ref() + .map(|f| f.len()) + .unwrap_or_default(); let transaction = Transaction::change( &text, From 59b2cf5337c88269d6b0d0afa02a92a72f53976d Mon Sep 17 00:00:00 2001 From: Philipp Mildenberger Date: Thu, 3 Oct 2024 21:44:29 +0200 Subject: [PATCH 08/19] Fix byte offsets in `matched_path` --- helix-term/src/handlers/completion.rs | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/helix-term/src/handlers/completion.rs b/helix-term/src/handlers/completion.rs index 2c890b0f3e4a..9d0a2dbdebf3 100644 --- a/helix-term/src/handlers/completion.rs +++ b/helix-term/src/handlers/completion.rs @@ -321,7 +321,10 @@ fn path_completion( let line_until_cursor = text.slice(start..cursor).regex_input_at(..); let (dir_path, typed_file_name) = PATH_REGEX.search(line_until_cursor).and_then(|m| { - let matched_path = &text.byte_slice(m.start()..m.end()).to_string(); + let start_byte = text.char_to_byte(start); + let matched_path = &text + .byte_slice((start_byte + m.start())..(start_byte + m.end())) + .to_string(); // resolve home dir (~/, $HOME/, ${HOME}/) on unix #[cfg(unix)] From b08e6b60c8ab0f9c514e43378ff6ff3440a6d25e Mon Sep 17 00:00:00 2001 From: Pascal Kuthe Date: Sun, 6 Oct 2024 18:18:49 +0200 Subject: [PATCH 09/19] implement robust path detection and env expansion --- Cargo.lock | 10 +- helix-stdx/Cargo.toml | 2 + helix-stdx/src/env.rs | 126 ++++++++++++++++++++++- helix-stdx/src/path.rs | 223 ++++++++++++++++++++++++++++++++++++++++- 4 files changed, 354 insertions(+), 7 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index d33f430f92c2..d2aa5ac4524d 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1427,6 +1427,8 @@ dependencies = [ "bitflags 2.6.0", "dunce", "etcetera", + "once_cell", + "regex-automata", "regex-cursor", "ropey", "rustix", @@ -2039,9 +2041,9 @@ dependencies = [ [[package]] name = "regex-automata" -version = "0.4.5" +version = "0.4.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5bb987efffd3c6d0d8f5f89510bb458559eab11e4f869acb20bf845e016259cd" +checksum = "368758f23274712b504848e9d5a6f010445cc8b87a7cdb4d7cbee666c1288da3" dependencies = [ "aho-corasick", "memchr", @@ -2063,9 +2065,9 @@ dependencies = [ [[package]] name = "regex-syntax" -version = "0.8.2" +version = "0.8.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c08c74e62047bb2de4ff487b251e4a92e24f48745648451635cec7d591162d9f" +checksum = "2b15c43186be67a4fd63bee50d0303afffcef381492ebe2c5d87f324e1b8815c" [[package]] name = "ropey" diff --git a/helix-stdx/Cargo.toml b/helix-stdx/Cargo.toml index 1c0d06ab1249..9cac1bc0aad0 100644 --- a/helix-stdx/Cargo.toml +++ b/helix-stdx/Cargo.toml @@ -18,6 +18,8 @@ ropey = { version = "1.6.1", default-features = false } which = "6.0" regex-cursor = "0.1.4" bitflags = "2.6" +once_cell = "1.19" +regex-automata = "0.4.8" [target.'cfg(windows)'.dependencies] windows-sys = { version = "0.59", features = ["Win32_Foundation", "Win32_Security", "Win32_Security_Authorization", "Win32_Storage_FileSystem", "Win32_System_Threading"] } diff --git a/helix-stdx/src/env.rs b/helix-stdx/src/env.rs index 51450d225870..d29a98fdebed 100644 --- a/helix-stdx/src/env.rs +++ b/helix-stdx/src/env.rs @@ -1,9 +1,12 @@ use std::{ - ffi::OsStr, + borrow::Cow, + ffi::{OsStr, OsString}, path::{Path, PathBuf}, sync::RwLock, }; +use once_cell::sync::Lazy; + static CWD: RwLock> = RwLock::new(None); // Get the current working directory. @@ -59,6 +62,93 @@ pub fn which>( }) } +fn find_brace_end(src: &[u8]) -> Option { + use regex_automata::meta::Regex; + + static REGEX: Lazy = Lazy::new(|| Regex::builder().build("[{}]").unwrap()); + let mut depth = 0; + for mat in REGEX.find_iter(src) { + let pos = mat.start(); + match src[pos] { + b'{' => depth += 1, + b'}' if depth == 0 => return Some(pos), + b'}' => depth -= 1, + _ => unreachable!(), + } + } + None +} + +fn expand_impl(src: &OsStr, mut resolve: impl FnMut(&OsStr) -> Option) -> Cow { + use regex_automata::meta::Regex; + + static REGEX: Lazy = Lazy::new(|| { + Regex::builder() + .build_many(&[ + r"\$\{([^\}:]+):-", + r"\$\{([^\}:]+):=", + r"\$\{([^\}-]+)-", + r"\$\{([^\}=]+)=", + r"\$\{([^\}]+)", + r"\$(\w+)", + ]) + .unwrap() + }); + + let bytes = src.as_encoded_bytes(); + let mut res = Vec::with_capacity(bytes.len()); + let mut pos = 0; + for captures in REGEX.captures_iter(bytes) { + let mat = captures.get_match().unwrap(); + let pattern_id = mat.pattern().as_usize(); + let mut range = mat.range(); + let var = &bytes[captures.get_group(1).unwrap().range()]; + let default = if pattern_id != 5 { + let Some(bracket_pos) = find_brace_end(&bytes[range.end..]) else { + break; + }; + let default = &bytes[range.end..range.end + bracket_pos]; + range.end += bracket_pos + 1; + default + } else { + &[] + }; + // safety: this is a codepoint aligned substring of an osstr (always valid) + let var = unsafe { OsStr::from_encoded_bytes_unchecked(var) }; + let expansion = resolve(var); + let expansion = match &expansion { + Some(val) => { + if val.is_empty() && pattern_id < 2 { + default + } else { + val.as_encoded_bytes() + } + } + None => default, + }; + res.extend_from_slice(&bytes[pos..range.start]); + pos = range.end; + res.extend_from_slice(expansion); + } + if pos == 0 { + src.into() + } else { + res.extend_from_slice(&bytes[pos..]); + // safety: this is a composition of valid osstr (and codepoint aligned slices which are also valid) + unsafe { OsString::from_encoded_bytes_unchecked(res) }.into() + } +} + +/// performs substitution of enviorment variables. Supports the following (POSIX) syntax: +/// +/// * `$`, `${}` +/// * `${:-}`, `${-}` +/// * `${:=}`, `${=default}` +/// +pub fn expand + ?Sized>(src: &S) -> Cow { + expand_impl(src.as_ref(), |var| std::env::var_os(var)) +} + #[derive(Debug)] pub struct ExecutableNotFoundError { command: String, @@ -75,7 +165,9 @@ impl std::error::Error for ExecutableNotFoundError {} #[cfg(test)] mod tests { - use super::{current_working_dir, set_current_working_dir}; + use std::ffi::{OsStr, OsString}; + + use super::{current_working_dir, expand_impl, set_current_working_dir}; #[test] fn current_dir_is_set() { @@ -88,4 +180,34 @@ mod tests { let cwd = current_working_dir(); assert_eq!(cwd, new_path); } + + macro_rules! assert_env_expand { + ($env: expr, $lhs: expr, $rhs: expr) => { + assert_eq!(&*expand_impl($lhs.as_ref(), $env), OsStr::new($rhs)); + }; + } + + /// paths that should work on all platforms + #[test] + fn test_env_expand() { + let env = |var: &OsStr| -> Option { + match var.to_str().unwrap() { + "FOO" => Some("foo".into()), + "EMPTY" => Some("".into()), + _ => None, + } + }; + assert_env_expand!(env, "pass_trough", "pass_trough"); + assert_env_expand!(env, "$FOO", "foo"); + assert_env_expand!(env, "bar/$FOO/baz", "bar/foo/baz"); + assert_env_expand!(env, "bar/${FOO}/baz", "bar/foo/baz"); + assert_env_expand!(env, "baz/${BAR:-bar}/foo", "baz/bar/foo"); + assert_env_expand!(env, "baz/${BAR:=bar}/foo", "baz/bar/foo"); + assert_env_expand!(env, "baz/${BAR-bar}/foo", "baz/bar/foo"); + assert_env_expand!(env, "baz/${BAR=bar}/foo", "baz/bar/foo"); + assert_env_expand!(env, "baz/${EMPTY:-bar}/foo", "baz/bar/foo"); + assert_env_expand!(env, "baz/${EMPTY:=bar}/foo", "baz/bar/foo"); + assert_env_expand!(env, "baz/${EMPTY-bar}/foo", "baz//foo"); + assert_env_expand!(env, "baz/${EMPTY=bar}/foo", "baz//foo"); + } } diff --git a/helix-stdx/src/path.rs b/helix-stdx/src/path.rs index 03a05d13283b..a88f8ca1d084 100644 --- a/helix-stdx/src/path.rs +++ b/helix-stdx/src/path.rs @@ -1,8 +1,12 @@ pub use etcetera::home_dir; +use once_cell::sync::Lazy; +use regex_cursor::{engines::meta::Regex, Input}; +use ropey::RopeSlice; use std::{ borrow::Cow, ffi::OsString, + ops::Range, path::{Component, Path, PathBuf, MAIN_SEPARATOR_STR}, }; @@ -201,6 +205,97 @@ pub fn get_truncated_path(path: impl AsRef) -> PathBuf { ret } +fn path_comonent_regex(windows: bool) -> String { + // TODO: support backslash path escape on windows (when using git bash for example) + let space_escape = if windows { r"[\^`]\s" } else { r"[\\]\s" }; + // partially baesd on what's allowed in an url but with some care to avoid + // false positivies (like any kind of brackets or quotes) + r"[\w@.\-+#$%?!,;~&]|".to_owned() + space_escape +} + +/// regex for delimeted enviorment captures like `${HOME}` +fn braced_env_regex(windows: bool) -> String { + // use + r"\$\{(?:".to_owned() + &path_comonent_regex(windows) + r"|[/:=])+\}" +} + +fn compile_path_regex( + prefix: &str, + postfix: &str, + match_single_file: bool, + windows: bool, +) -> Regex { + let first_component = format!( + "(?:{}|(?:{}))", + braced_env_regex(windows), + path_comonent_regex(windows) + ); + // for all components except the first we allow an equals so that `foo=/ + // bar/baz` does not include foo this is primarily inteded for url queries + // (where an equals is never in the first component) + let component = format!("(?:{first_component}|=)"); + let sep = if windows { r"[/\\]" } else { "/" }; + let url_prefix = r"[\w+\-.]+://??"; + let path_prefix = if windows { + // single slash handles most windows prefixes (like\\server\...) but `\ + // \?\C:\..` (and C:\) needs a specical since we don't allow : in path + // components (so that colon seperted paths and : work) + r"\\\\\?\\\w:|\w:|\\|" + } else { + "" + }; + let path_start = format!("(?:{first_component}+|~|{path_prefix}{url_prefix})"); + let optional = if match_single_file { + format!("|{path_start}") + } else { + String::new() + }; + let path_regex = format!( + "{prefix}(?:{path_start}?(?:(?:{sep}{component}+)+{sep}?|{sep}){optional}){postfix}" + ); + Regex::new(&path_regex).unwrap() +} + +/// if `src` ends with a path then this function returns the part of the slice +pub fn get_path_suffix(src: RopeSlice<'_>, match_single_file: bool) -> Option> { + let regex = if match_single_file { + static REGEX: Lazy = Lazy::new(|| compile_path_regex("", "$", true, cfg!(windows))); + &*REGEX + } else { + static REGEX: Lazy = Lazy::new(|| compile_path_regex("", "$", false, cfg!(windows))); + &*REGEX + }; + + regex + .find(Input::new(src)) + .map(|mat| src.byte_slice(mat.range())) +} + +/// returns an iterator of the **byte** ranges in src that contain a path +pub fn find_paths( + src: RopeSlice<'_>, + match_single_file: bool, +) -> impl Iterator> + '_ { + let regex = if match_single_file { + static REGEX: Lazy = Lazy::new(|| compile_path_regex("", "", true, cfg!(windows))); + &*REGEX + } else { + static REGEX: Lazy = Lazy::new(|| compile_path_regex("", "", false, cfg!(windows))); + &*REGEX + }; + regex.find_iter(Input::new(src)).map(|mat| mat.range()) +} + +/// performs substitution of `~` and enviorment variables, see [`env::expand`](crate::env::expand) and [`expand_tilde`] +pub fn expand + ?Sized>(path: &T) -> Cow<'_, Path> { + let path = path.as_ref(); + let path = expand_tilde(path); + match crate::env::expand(&*path) { + Cow::Borrowed(_) => path, + Cow::Owned(path) => PathBuf::from(path).into(), + } +} + #[cfg(test)] mod tests { use std::{ @@ -208,7 +303,10 @@ mod tests { path::{Component, Path}, }; - use crate::path; + use regex_cursor::Input; + use ropey::RopeSlice; + + use crate::path::{self, compile_path_regex}; #[test] fn expand_tilde() { @@ -228,4 +326,127 @@ mod tests { assert_ne!(component_count, 0); } } + + macro_rules! assert_match { + ($regex: expr, $haystack: expr) => { + let haystack = Input::new(RopeSlice::from($haystack)); + assert!( + $regex.is_match(haystack), + "regex should match {}", + $haystack + ); + }; + } + macro_rules! assert_no_match { + ($regex: expr, $haystack: expr) => { + let haystack = Input::new(RopeSlice::from($haystack)); + assert!( + !$regex.is_match(haystack), + "regex should not match {}", + $haystack + ); + }; + } + + macro_rules! assert_matches { + ($regex: expr, $haystack: expr, [$($matches: expr),*]) => { + let src = $haystack; + let matches: Vec<_> = $regex + .find_iter(Input::new(RopeSlice::from(src))) + .map(|it| &src[it.range()]) + .collect(); + assert_eq!(matches, vec![$($matches),*]); + }; + } + + /// linux-only path + #[test] + fn path_regex_unix() { + // due to ambigueity with the \ path seperator we can't support space escapes `\ ` on windows + let regex = compile_path_regex("^", "$", false, false); + assert_match!(regex, "${FOO}/hello\\ world"); + assert_match!(regex, "${FOO}/\\ "); + } + + /// windows-only paths + #[test] + fn path_regex_windows() { + let regex = compile_path_regex("^", "$", false, true); + assert_match!(regex, "${FOO}/hello^ world"); + assert_match!(regex, "${FOO}/hello` world"); + assert_match!(regex, "${FOO}/^ "); + assert_match!(regex, "${FOO}/` "); + assert_match!(regex, r"foo\bar"); + assert_match!(regex, r"foo\bar"); + assert_match!(regex, r"..\bar"); + assert_match!(regex, r"..\"); + assert_match!(regex, r"C:\"); + assert_match!(regex, r"\\?\C:\foo"); + assert_match!(regex, r"\\server\foo"); + } + + /// paths that should work on all platforms + #[test] + fn path_regex() { + for windows in [false, true] { + let regex = compile_path_regex("^", "$", false, windows); + assert_no_match!(regex, "foo"); + assert_no_match!(regex, ""); + assert_match!(regex, "https://github.com/notifications/query=foo"); + assert_match!(regex, "file:///foo/bar"); + assert_match!(regex, "foo/bar"); + assert_match!(regex, "$HOME/foo"); + assert_match!(regex, "${FOO:-bar}/baz"); + assert_match!(regex, "foo/bar_"); + assert_match!(regex, "/home/bar"); + assert_match!(regex, "foo/"); + assert_match!(regex, "./"); + assert_match!(regex, "../"); + assert_match!(regex, "../.."); + assert_match!(regex, "./foo"); + assert_match!(regex, "./foo.rs"); + assert_match!(regex, "/"); + assert_match!(regex, "~/"); + assert_match!(regex, "~/foo"); + assert_match!(regex, "~/foo"); + assert_match!(regex, "~/foo/../baz"); + assert_match!(regex, "${HOME}/foo"); + assert_match!(regex, "$HOME/foo"); + assert_match!(regex, "/$FOO"); + assert_match!(regex, "/${FOO}"); + assert_match!(regex, "/${FOO}/${BAR}"); + assert_match!(regex, "/${FOO}/${BAR}/foo"); + assert_match!(regex, "/${FOO}/${BAR}"); + assert_match!(regex, "${FOO}/hello_$WORLD"); + assert_match!(regex, "${FOO}/hello_${WORLD}"); + let regex = compile_path_regex("", "", false, windows); + assert_no_match!(regex, ""); + assert_matches!( + regex, + r#"${FOO}/hello_${WORLD} ${FOO}/hello_${WORLD} foo("./bar", "/home/foo")""#, + [ + "${FOO}/hello_${WORLD}", + "${FOO}/hello_${WORLD}", + "./bar", + "/home/foo" + ] + ); + assert_matches!( + regex, + r#"--> helix-stdx/src/path.rs:427:13"#, + ["helix-stdx/src/path.rs"] + ); + assert_matches!( + regex, + r#"PATH=/foo/bar:/bar/baz:${foo:-/foo}/bar:${PATH}"#, + ["/foo/bar", "/bar/baz", "${foo:-/foo}/bar"] + ); + let regex = compile_path_regex("^", "$", true, windows); + assert_no_match!(regex, ""); + assert_match!(regex, "foo"); + assert_match!(regex, "foo/"); + assert_match!(regex, "$FOO"); + assert_match!(regex, "${BAR}"); + } + } } From a3e12ac7ef657fe183fa7e9fff8f08a9639302ec Mon Sep 17 00:00:00 2001 From: Pascal Kuthe Date: Sun, 6 Oct 2024 18:19:12 +0200 Subject: [PATCH 10/19] use path::expand in path completions --- helix-term/src/handlers/completion.rs | 106 +++++++++----------------- 1 file changed, 37 insertions(+), 69 deletions(-) diff --git a/helix-term/src/handlers/completion.rs b/helix-term/src/handlers/completion.rs index 9d0a2dbdebf3..e81f7468430e 100644 --- a/helix-term/src/handlers/completion.rs +++ b/helix-term/src/handlers/completion.rs @@ -1,9 +1,11 @@ +use std::borrow::Cow; use std::collections::HashSet; -use std::ffi::OsString; -use std::path::PathBuf; +use std::path::{Path, PathBuf}; +use std::str::FromStr; use std::sync::Arc; use std::time::Duration; +use ::url::Url; use arc_swap::ArcSwap; use futures_util::future::BoxFuture; use futures_util::stream::FuturesUnordered; @@ -16,12 +18,11 @@ use helix_event::{ }; use helix_lsp::lsp; use helix_lsp::util::pos_to_lsp_pos; -use helix_stdx::path::{canonicalize, fold_home_dir}; -use helix_stdx::rope::{self, RopeSliceExt}; +use helix_stdx::path::{self, canonicalize, fold_home_dir, get_path_suffix}; +use helix_stdx::rope::RopeSliceExt; use helix_view::document::{Mode, SavePoint}; use helix_view::handlers::lsp::CompletionEvent; use helix_view::{DocumentId, Editor, ViewId}; -use once_cell::sync::Lazy; use tokio::sync::mpsc::Sender; use tokio::time::Instant; use tokio_stream::StreamExt; @@ -311,75 +312,42 @@ fn path_completion( return None; } - // TODO find a good regex for most use cases (especially Windows, which is not yet covered...) - // currently only one path match per line is possible in unix - static PATH_REGEX: Lazy = - Lazy::new(|| rope::Regex::new(r"((?:~|\$HOME|\$\{HOME\})?(?:\.{0,2}/)+.*)$").unwrap()); - let cur_line = text.char_to_line(cursor); let start = text.line_to_char(cur_line).max(cursor.saturating_sub(1000)); - let line_until_cursor = text.slice(start..cursor).regex_input_at(..); - - let (dir_path, typed_file_name) = PATH_REGEX.search(line_until_cursor).and_then(|m| { - let start_byte = text.char_to_byte(start); - let matched_path = &text - .byte_slice((start_byte + m.start())..(start_byte + m.end())) - .to_string(); - - // resolve home dir (~/, $HOME/, ${HOME}/) on unix - #[cfg(unix)] - let mut path = { - static HOME_DIR: Lazy> = Lazy::new(|| std::env::var_os("HOME")); - - let home_resolved_path = if let Some(home) = &*HOME_DIR { - let first_separator_after_home = if matched_path.starts_with("~/") { - Some(1) - } else if matched_path.starts_with("$HOME") { - Some(5) - } else if matched_path.starts_with("${HOME}") { - Some(7) - } else { - None - }; - if let Some(start_char) = first_separator_after_home { - let mut path = home.to_owned(); - path.push(&matched_path[start_char..]); - path - } else { - matched_path.into() - } + let line_until_cursor = text.slice(start..cursor); + + let (dir_path, typed_file_name) = + get_path_suffix(line_until_cursor, false).and_then(|matched_path| { + let matched_path = Cow::from(matched_path); + let path: Cow<_> = if matched_path.starts_with("file://") { + Url::from_str(&matched_path) + .ok() + .and_then(|url| url.to_file_path().ok())? + .into() } else { - matched_path.into() + Path::new(&*matched_path).into() }; - PathBuf::from(home_resolved_path) - }; - #[cfg(not(unix))] - let mut path = PathBuf::from(matched_path); - - if path.is_relative() { - if let Some(doc_path) = doc.path().and_then(|dp| dp.parent()) { - path = doc_path.join(path); - } else if let Ok(work_dir) = std::env::current_dir() { - path = work_dir.join(path); + let path = path::expand(&path); + let parent_dir = doc.path().and_then(|dp| dp.parent()); + let path = match parent_dir { + Some(parent_dir) if path.is_absolute() => parent_dir.join(&path), + _ => path.into_owned(), + }; + let ends_with_slash = matches!(matched_path.as_bytes().last(), Some(b'/' | b'\\')); + // check if there are chars after the last slash, and if these chars represent a directory + match std::fs::metadata(path.clone()).ok() { + Some(m) if m.is_dir() && ends_with_slash => { + Some((PathBuf::from(path.as_path()), None)) + } + _ if !ends_with_slash => path.parent().map(|parent_path| { + ( + PathBuf::from(parent_path), + path.file_name().and_then(|f| f.to_str().map(String::from)), + ) + }), + _ => None, } - } - let ends_with_slash = match matched_path.chars().last() { - Some(std::path::MAIN_SEPARATOR) => true, - None => return None, - _ => false, - }; - // check if there are chars after the last slash, and if these chars represent a directory - match std::fs::metadata(path.clone()).ok() { - Some(m) if m.is_dir() && ends_with_slash => Some((PathBuf::from(path.as_path()), None)), - _ if !ends_with_slash => path.parent().map(|parent_path| { - ( - PathBuf::from(parent_path), - path.file_name().and_then(|f| f.to_str().map(String::from)), - ) - }), - _ => None, - } - })?; + })?; // The async file accessor functions of tokio were considered, but they were a bit slower // and less ergonomic than just using the std functions in a separate "thread" From 133460626e39fad79c3e1a9104c50c82c6e27c01 Mon Sep 17 00:00:00 2001 From: Pascal Kuthe Date: Sun, 6 Oct 2024 19:46:08 +0200 Subject: [PATCH 11/19] use new path detection and expansions in goto_file --- helix-term/src/commands.rs | 68 +++++++++++++------------------------- 1 file changed, 23 insertions(+), 45 deletions(-) diff --git a/helix-term/src/commands.rs b/helix-term/src/commands.rs index 6e037a471ffc..ef2afaebecf3 100644 --- a/helix-term/src/commands.rs +++ b/helix-term/src/commands.rs @@ -6,7 +6,7 @@ pub use dap::*; use futures_util::FutureExt; use helix_event::status; use helix_stdx::{ - path::expand_tilde, + path::{self, find_paths}, rope::{self, RopeSliceExt}, }; use helix_vcs::{FileChange, Hunk}; @@ -1272,53 +1272,31 @@ fn goto_file_impl(cx: &mut Context, action: Action) { .unwrap_or_default(); let paths: Vec<_> = if selections.len() == 1 && primary.len() == 1 { - // Secial case: if there is only one one-width selection, try to detect the - // path under the cursor. - let is_valid_path_char = |c: &char| { - #[cfg(target_os = "windows")] - let valid_chars = &[ - '@', '/', '\\', '.', '-', '_', '+', '#', '$', '%', '{', '}', '[', ']', ':', '!', - '~', '=', - ]; - #[cfg(not(target_os = "windows"))] - let valid_chars = &['@', '/', '.', '-', '_', '+', '#', '$', '%', '~', '=', ':']; - - valid_chars.contains(c) || c.is_alphabetic() || c.is_numeric() - }; - - let cursor_pos = primary.cursor(text.slice(..)); - let pre_cursor_pos = cursor_pos.saturating_sub(1); - let post_cursor_pos = cursor_pos + 1; - let start_pos = if is_valid_path_char(&text.char(cursor_pos)) { - cursor_pos - } else if is_valid_path_char(&text.char(pre_cursor_pos)) { - pre_cursor_pos - } else { - post_cursor_pos - }; - - let prefix_len = text - .chars_at(start_pos) - .reversed() - .take_while(is_valid_path_char) - .count(); - - let postfix_len = text - .chars_at(start_pos) - .take_while(is_valid_path_char) - .count(); - - let path: String = text - .slice((start_pos - prefix_len)..(start_pos + postfix_len)) - .into(); - log::debug!("goto_file auto-detected path: {}", path); - - vec![path] + let mut pos = primary.cursor(text.slice(..)); + pos = text.char_to_byte(pos); + let search_start = text + .line_to_byte(text.byte_to_line(pos)) + .max(pos.saturating_sub(1000)); + let search_end = text + .line_to_byte(text.byte_to_line(pos) + 1) + .min(pos + 1000); + let search_range = text.slice(search_start..search_end); + // we also allow paths that are next to the cursor (can be ambigous but + // rarely so in practice) so that gf on quoted/braced path works (not sure about this + // but apparently that is how gf has worked historically in helix) + let path = find_paths(search_range, true) + .inspect(|mat| println!("{mat:?} {:?}", pos - search_start)) + .take_while(|range| search_start + range.start <= pos + 1) + .find(|range| pos <= search_start + range.end) + .map(|range| Cow::from(search_range.byte_slice(range))); + log::debug!("goto_file auto-detected path: {path:?}"); + let path = path.unwrap_or_else(|| primary.fragment(text.slice(..))); + vec![path.into_owned()] } else { // Otherwise use each selection, trimmed. selections .fragments(text.slice(..)) - .map(|sel| sel.trim().to_string()) + .map(|sel| sel.trim().to_owned()) .filter(|sel| !sel.is_empty()) .collect() }; @@ -1329,7 +1307,7 @@ fn goto_file_impl(cx: &mut Context, action: Action) { continue; } - let path = expand_tilde(Cow::from(PathBuf::from(sel))); + let path = path::expand(&sel); let path = &rel_path.join(path); if path.is_dir() { let picker = ui::file_picker(path.into(), &cx.editor.config()); From 101bcc7bd372d59c5bec30b36bdb7273cd08496c Mon Sep 17 00:00:00 2001 From: Philipp Mildenberger Date: Mon, 14 Oct 2024 14:56:44 +0200 Subject: [PATCH 12/19] Fix some typos. --- helix-stdx/src/path.rs | 31 +++++++++++++++---------------- 1 file changed, 15 insertions(+), 16 deletions(-) diff --git a/helix-stdx/src/path.rs b/helix-stdx/src/path.rs index a88f8ca1d084..72b233cca90c 100644 --- a/helix-stdx/src/path.rs +++ b/helix-stdx/src/path.rs @@ -205,7 +205,7 @@ pub fn get_truncated_path(path: impl AsRef) -> PathBuf { ret } -fn path_comonent_regex(windows: bool) -> String { +fn path_component_regex(windows: bool) -> String { // TODO: support backslash path escape on windows (when using git bash for example) let space_escape = if windows { r"[\^`]\s" } else { r"[\\]\s" }; // partially baesd on what's allowed in an url but with some care to avoid @@ -213,10 +213,9 @@ fn path_comonent_regex(windows: bool) -> String { r"[\w@.\-+#$%?!,;~&]|".to_owned() + space_escape } -/// regex for delimeted enviorment captures like `${HOME}` +/// Regex for delimited environment captures like `${HOME}`. fn braced_env_regex(windows: bool) -> String { - // use - r"\$\{(?:".to_owned() + &path_comonent_regex(windows) + r"|[/:=])+\}" + r"\$\{(?:".to_owned() + &path_component_regex(windows) + r"|[/:=])+\}" } fn compile_path_regex( @@ -228,18 +227,18 @@ fn compile_path_regex( let first_component = format!( "(?:{}|(?:{}))", braced_env_regex(windows), - path_comonent_regex(windows) + path_component_regex(windows) ); - // for all components except the first we allow an equals so that `foo=/ - // bar/baz` does not include foo this is primarily inteded for url queries + // For all components except the first we allow an equals so that `foo=/ + // bar/baz` does not include foo. This is primarily intended for url queries // (where an equals is never in the first component) let component = format!("(?:{first_component}|=)"); let sep = if windows { r"[/\\]" } else { "/" }; let url_prefix = r"[\w+\-.]+://??"; let path_prefix = if windows { // single slash handles most windows prefixes (like\\server\...) but `\ - // \?\C:\..` (and C:\) needs a specical since we don't allow : in path - // components (so that colon seperted paths and : work) + // \?\C:\..` (and C:\) needs special handling, since we don't allow : in path + // components (so that colon separated paths and : work) r"\\\\\?\\\w:|\w:|\\|" } else { "" @@ -256,7 +255,7 @@ fn compile_path_regex( Regex::new(&path_regex).unwrap() } -/// if `src` ends with a path then this function returns the part of the slice +/// If `src` ends with a path then this function returns the part of the slice. pub fn get_path_suffix(src: RopeSlice<'_>, match_single_file: bool) -> Option> { let regex = if match_single_file { static REGEX: Lazy = Lazy::new(|| compile_path_regex("", "$", true, cfg!(windows))); @@ -271,7 +270,7 @@ pub fn get_path_suffix(src: RopeSlice<'_>, match_single_file: bool) -> Option, match_single_file: bool, @@ -286,7 +285,7 @@ pub fn find_paths( regex.find_iter(Input::new(src)).map(|mat| mat.range()) } -/// performs substitution of `~` and enviorment variables, see [`env::expand`](crate::env::expand) and [`expand_tilde`] +/// Performs substitution of `~` and environment variables, see [`env::expand`](crate::env::expand) and [`expand_tilde`] pub fn expand + ?Sized>(path: &T) -> Cow<'_, Path> { let path = path.as_ref(); let path = expand_tilde(path); @@ -359,16 +358,16 @@ mod tests { }; } - /// linux-only path + /// Linux-only path #[test] fn path_regex_unix() { - // due to ambigueity with the \ path seperator we can't support space escapes `\ ` on windows + // due to ambiguity with the `\` path separator we can't support space escapes `\ ` on windows let regex = compile_path_regex("^", "$", false, false); assert_match!(regex, "${FOO}/hello\\ world"); assert_match!(regex, "${FOO}/\\ "); } - /// windows-only paths + /// Windows-only paths #[test] fn path_regex_windows() { let regex = compile_path_regex("^", "$", false, true); @@ -385,7 +384,7 @@ mod tests { assert_match!(regex, r"\\server\foo"); } - /// paths that should work on all platforms + /// Paths that should work on all platforms #[test] fn path_regex() { for windows in [false, true] { From 8af7c670c3b8eb3bdcb209694205a9ac15fbfa04 Mon Sep 17 00:00:00 2001 From: Philipp Mildenberger Date: Mon, 14 Oct 2024 17:49:53 +0200 Subject: [PATCH 13/19] Apply review suggestions, and other smaller fixes. --- helix-core/src/completion.rs | 5 +- helix-term/src/handlers/completion.rs | 200 +++--------------- helix-term/src/handlers/completion/item.rs | 43 ++++ helix-term/src/handlers/completion/path.rs | 186 ++++++++++++++++ helix-term/src/handlers/completion/resolve.rs | 2 +- helix-term/src/ui/completion.rs | 108 ++-------- helix-term/src/ui/editor.rs | 3 +- helix-term/src/ui/mod.rs | 2 +- helix-view/src/document.rs | 2 +- 9 files changed, 281 insertions(+), 270 deletions(-) create mode 100644 helix-term/src/handlers/completion/item.rs create mode 100644 helix-term/src/handlers/completion/path.rs diff --git a/helix-core/src/completion.rs b/helix-core/src/completion.rs index 765d947b0679..0bd111eb4767 100644 --- a/helix-core/src/completion.rs +++ b/helix-core/src/completion.rs @@ -1,9 +1,12 @@ +use std::borrow::Cow; + use crate::Transaction; #[derive(Debug, PartialEq, Clone)] pub struct CompletionItem { pub transaction: Transaction, - pub label: String, + pub label: Cow<'static, str>, + pub kind: Cow<'static, str>, /// Containing Markdown pub documentation: String, } diff --git a/helix-term/src/handlers/completion.rs b/helix-term/src/handlers/completion.rs index e81f7468430e..ce9ed7f7ce29 100644 --- a/helix-term/src/handlers/completion.rs +++ b/helix-term/src/handlers/completion.rs @@ -1,31 +1,26 @@ -use std::borrow::Cow; use std::collections::HashSet; -use std::path::{Path, PathBuf}; -use std::str::FromStr; +use std::sync::atomic::AtomicBool; use std::sync::Arc; use std::time::Duration; -use ::url::Url; use arc_swap::ArcSwap; -use futures_util::future::BoxFuture; use futures_util::stream::FuturesUnordered; use futures_util::FutureExt; use helix_core::chars::char_is_word; use helix_core::syntax::LanguageServerFeature; -use helix_core::Transaction; use helix_event::{ cancelable_future, cancelation, register_hook, send_blocking, CancelRx, CancelTx, }; use helix_lsp::lsp; use helix_lsp::util::pos_to_lsp_pos; -use helix_stdx::path::{self, canonicalize, fold_home_dir, get_path_suffix}; use helix_stdx::rope::RopeSliceExt; use helix_view::document::{Mode, SavePoint}; use helix_view::handlers::lsp::CompletionEvent; use helix_view::{DocumentId, Editor, ViewId}; +use path::path_completion; use tokio::sync::mpsc::Sender; use tokio::time::Instant; -use tokio_stream::StreamExt; +use tokio_stream::StreamExt as _; use crate::commands; use crate::compositor::Compositor; @@ -35,10 +30,13 @@ use crate::job::{dispatch, dispatch_blocking}; use crate::keymap::MappableCommand; use crate::ui::editor::InsertEvent; use crate::ui::lsp::SignatureHelp; -use crate::ui::{self, CompletionItem, LspCompletionItem, PathCompletionItem, PathKind, Popup}; +use crate::ui::{self, Popup}; use super::Handlers; +pub use item::{CompletionItem, LspCompletionItem}; pub use resolve::ResolveHandler; +mod item; +mod path; mod resolve; #[derive(Debug, PartialEq, Eq, Clone, Copy)] @@ -204,6 +202,7 @@ fn request_completion( // necessary from our side too. trigger.pos = cursor; let trigger_text = text.slice(..cursor); + let cancel_completion = Arc::new(AtomicBool::new(false)); let mut seen_language_servers = HashSet::new(); let mut futures: FuturesUnordered<_> = doc @@ -271,7 +270,12 @@ fn request_completion( } .boxed() }) - .chain(path_completion(cursor, text.clone(), doc)) + .chain(path_completion( + cursor, + text.clone(), + doc, + cancel_completion.clone(), + )) .collect(); let future = async move { @@ -292,7 +296,13 @@ fn request_completion( let ui = compositor.find::().unwrap(); ui.last_insert.1.push(InsertEvent::RequestCompletion); tokio::spawn(async move { - let items = cancelable_future(future, cancel).await.unwrap_or_default(); + let items = match cancelable_future(future, cancel).await { + Some(v) => v, + None => { + cancel_completion.store(true, std::sync::atomic::Ordering::Relaxed); + Vec::new() + } + }; if items.is_empty() { return; } @@ -303,167 +313,6 @@ fn request_completion( }); } -fn path_completion( - cursor: usize, - text: helix_core::Rope, - doc: &helix_view::Document, -) -> Option>>> { - if !doc.supports_path_completion() { - return None; - } - - let cur_line = text.char_to_line(cursor); - let start = text.line_to_char(cur_line).max(cursor.saturating_sub(1000)); - let line_until_cursor = text.slice(start..cursor); - - let (dir_path, typed_file_name) = - get_path_suffix(line_until_cursor, false).and_then(|matched_path| { - let matched_path = Cow::from(matched_path); - let path: Cow<_> = if matched_path.starts_with("file://") { - Url::from_str(&matched_path) - .ok() - .and_then(|url| url.to_file_path().ok())? - .into() - } else { - Path::new(&*matched_path).into() - }; - let path = path::expand(&path); - let parent_dir = doc.path().and_then(|dp| dp.parent()); - let path = match parent_dir { - Some(parent_dir) if path.is_absolute() => parent_dir.join(&path), - _ => path.into_owned(), - }; - let ends_with_slash = matches!(matched_path.as_bytes().last(), Some(b'/' | b'\\')); - // check if there are chars after the last slash, and if these chars represent a directory - match std::fs::metadata(path.clone()).ok() { - Some(m) if m.is_dir() && ends_with_slash => { - Some((PathBuf::from(path.as_path()), None)) - } - _ if !ends_with_slash => path.parent().map(|parent_path| { - ( - PathBuf::from(parent_path), - path.file_name().and_then(|f| f.to_str().map(String::from)), - ) - }), - _ => None, - } - })?; - - // The async file accessor functions of tokio were considered, but they were a bit slower - // and less ergonomic than just using the std functions in a separate "thread" - let future = tokio::task::spawn_blocking(move || { - let Ok(read_dir) = std::fs::read_dir(&dir_path) else { - return Vec::new(); - }; - - read_dir - .filter_map(Result::ok) - .filter_map(|dir_entry| dir_entry.metadata().ok().map(|md| (dir_entry.path(), md))) - .map(|(path, md)| { - let file_name = path - .file_name() - .unwrap_or_default() - .to_string_lossy() - .to_string(); - - let full_path = fold_home_dir(canonicalize(path)); - let full_path_name = full_path.to_string_lossy(); - - let kind = if md.is_symlink() { - PathKind::Link - } else if md.is_dir() { - PathKind::Folder - } else { - #[cfg(unix)] - { - use std::os::unix::fs::FileTypeExt; - if md.file_type().is_block_device() { - PathKind::Block - } else if md.file_type().is_socket() { - PathKind::Socket - } else if md.file_type().is_char_device() { - PathKind::CharacterDevice - } else if md.file_type().is_fifo() { - PathKind::Fifo - } else { - PathKind::File - } - } - #[cfg(not(unix))] - PathKind::File - }; - - let documentation = { - #[cfg(unix)] - { - use std::os::unix::prelude::PermissionsExt; - let mode = md.permissions().mode(); - - let perms = [ - (libc::S_IRUSR, 'r'), - (libc::S_IWUSR, 'w'), - (libc::S_IXUSR, 'x'), - (libc::S_IRGRP, 'r'), - (libc::S_IWGRP, 'w'), - (libc::S_IXGRP, 'x'), - (libc::S_IROTH, 'r'), - (libc::S_IWOTH, 'w'), - (libc::S_IXOTH, 'x'), - ] - .into_iter() - .fold( - String::with_capacity(9), - |mut acc, (p, s)| { - // This cast is necessary on some platforms such as macos as `mode_t` is u16 there - #[allow(clippy::unnecessary_cast)] - acc.push(if mode & (p as u32) > 0 { s } else { '-' }); - acc - }, - ); - - // TODO it would be great to be able to individually color the documentation, - // but this will likely require a custom doc implementation (i.e. not `lsp::Documentation`) - // and/or different rendering in completion.rs - format!( - "type: `{kind}`\n\ - permissions: `[{perms}]`\n\ - full path: `{full_path_name}`", - ) - } - #[cfg(not(unix))] - { - format!( - "type: `{kind}`\n\ - full path: `{full_path_name}`", - ) - } - }; - - let edit_diff = typed_file_name - .as_ref() - .map(|f| f.len()) - .unwrap_or_default(); - - let transaction = Transaction::change( - &text, - std::iter::once((cursor - edit_diff, cursor, Some(file_name.as_str().into()))), - ); - - CompletionItem::Path(PathCompletionItem { - kind, - item: helix_core::CompletionItem { - transaction, - label: file_name, - documentation, - }, - }) - }) - .collect::>() - }); - - Some(async move { Ok(future.await?) }.boxed()) -} - fn show_completion( editor: &mut Editor, compositor: &mut Compositor, @@ -520,8 +369,11 @@ pub fn trigger_auto_completion( }) if triggers.iter().any(|trigger| text.ends_with(trigger))) }); - let trigger_path_completion = - text.ends_with(std::path::MAIN_SEPARATOR_STR) && doc.supports_path_completion(); + let trigger_path_completion = matches!( + text.get_bytes_at(text.len_bytes()) + .and_then(|t| t.reversed().next()), + Some(b'/' | b'\\') + ) && doc.path_completion_enabled(); if is_trigger_char || trigger_path_completion { send_blocking( diff --git a/helix-term/src/handlers/completion/item.rs b/helix-term/src/handlers/completion/item.rs new file mode 100644 index 000000000000..501a1dd50f51 --- /dev/null +++ b/helix-term/src/handlers/completion/item.rs @@ -0,0 +1,43 @@ +use helix_lsp::{lsp, LanguageServerId}; + +#[derive(Debug, PartialEq, Clone)] +pub struct LspCompletionItem { + pub item: lsp::CompletionItem, + pub provider: LanguageServerId, + pub resolved: bool, +} + +#[derive(Debug, PartialEq, Clone)] +pub enum CompletionItem { + Lsp(LspCompletionItem), + Other(helix_core::CompletionItem), +} + +impl PartialEq for LspCompletionItem { + fn eq(&self, other: &CompletionItem) -> bool { + match other { + CompletionItem::Lsp(other) => self == other, + _ => false, + } + } +} + +impl PartialEq for helix_core::CompletionItem { + fn eq(&self, other: &CompletionItem) -> bool { + match other { + CompletionItem::Other(other) => self == other, + _ => false, + } + } +} + +impl CompletionItem { + pub fn preselect(&self) -> bool { + match self { + CompletionItem::Lsp(LspCompletionItem { item, .. }) => item.preselect.unwrap_or(false), + CompletionItem::Other(_) => false, + } + } +} + + diff --git a/helix-term/src/handlers/completion/path.rs b/helix-term/src/handlers/completion/path.rs new file mode 100644 index 000000000000..5c8fa4753b7f --- /dev/null +++ b/helix-term/src/handlers/completion/path.rs @@ -0,0 +1,186 @@ +use std::{ + borrow::Cow, + path::{Path, PathBuf}, + str::FromStr as _, + sync::{atomic::AtomicBool, Arc}, +}; + +use futures_util::{future::BoxFuture, FutureExt as _}; +use helix_core as core; +use helix_core::Transaction; +use helix_stdx::path::{self, canonicalize, fold_home_dir, get_path_suffix}; +use helix_view::Document; +use url::Url; + +use super::item::CompletionItem; + +pub(crate) fn path_completion( + cursor: usize, + text: core::Rope, + doc: &Document, + cancel: Arc, +) -> Option>>> { + if !doc.path_completion_enabled() { + return None; + } + + let cur_line = text.char_to_line(cursor); + let start = text.line_to_char(cur_line).max(cursor.saturating_sub(1000)); + let line_until_cursor = text.slice(start..cursor); + + let (dir_path, typed_file_name) = + get_path_suffix(line_until_cursor, false).and_then(|matched_path| { + let matched_path = Cow::from(matched_path); + let path: Cow<_> = if matched_path.starts_with("file://") { + Url::from_str(&matched_path) + .ok() + .and_then(|url| url.to_file_path().ok())? + .into() + } else { + Path::new(&*matched_path).into() + }; + let path = path::expand(&path); + let parent_dir = doc.path().and_then(|dp| dp.parent()); + let path = match parent_dir { + Some(parent_dir) if path.is_relative() => parent_dir.join(&path), + _ => path.into_owned(), + }; + let ends_with_slash = matches!(matched_path.as_bytes().last(), Some(b'/' | b'\\')); + if ends_with_slash { + Some((PathBuf::from(path.as_path()), None)) + } else { + path.parent().map(|parent_path| { + ( + PathBuf::from(parent_path), + path.file_name().and_then(|f| f.to_str().map(String::from)), + ) + }) + } + })?; + + if cancel.load(std::sync::atomic::Ordering::Relaxed) { + return None; + } + + // The async file accessor functions of tokio were considered, but they were a bit slower + // and less ergonomic than just using the std functions in a separate "thread" + let future = tokio::task::spawn_blocking(move || { + let Ok(read_dir) = std::fs::read_dir(&dir_path) else { + return Vec::new(); + }; + + if cancel.load(std::sync::atomic::Ordering::Relaxed) { + return Vec::new(); + } + + read_dir + .filter_map(Result::ok) + .filter_map(|dir_entry| { + dir_entry + .metadata() + .ok() + .map(|md| (dir_entry.file_name(), md)) + }) + .map_while(|(file_name, md)| { + if cancel.load(std::sync::atomic::Ordering::Relaxed) { + return None; + } + + let file_name_str = file_name.to_string_lossy().to_string(); + + let full_path = fold_home_dir(canonicalize(dir_path.join(file_name))); + let full_path_name = full_path.to_string_lossy(); + + let kind = if md.is_symlink() { + "link" + } else if md.is_dir() { + "folder" + } else { + #[cfg(unix)] + { + use std::os::unix::fs::FileTypeExt; + if md.file_type().is_block_device() { + "block" + } else if md.file_type().is_socket() { + "socket" + } else if md.file_type().is_char_device() { + "char_device" + } else if md.file_type().is_fifo() { + "fifo" + } else { + "file" + } + } + #[cfg(not(unix))] + "file" + }; + let kind = Cow::Borrowed(kind); + + let documentation = { + #[cfg(unix)] + { + use std::os::unix::prelude::PermissionsExt; + let mode = md.permissions().mode(); + + let perms = [ + (libc::S_IRUSR, 'r'), + (libc::S_IWUSR, 'w'), + (libc::S_IXUSR, 'x'), + (libc::S_IRGRP, 'r'), + (libc::S_IWGRP, 'w'), + (libc::S_IXGRP, 'x'), + (libc::S_IROTH, 'r'), + (libc::S_IWOTH, 'w'), + (libc::S_IXOTH, 'x'), + ] + .into_iter() + .fold( + String::with_capacity(9), + |mut acc, (p, s)| { + // This cast is necessary on some platforms such as macos as `mode_t` is u16 there + #[allow(clippy::unnecessary_cast)] + acc.push(if mode & (p as u32) > 0 { s } else { '-' }); + acc + }, + ); + + // TODO it would be great to be able to individually color the documentation, + // but this will likely require a custom doc implementation (i.e. not `lsp::Documentation`) + // and/or different rendering in completion.rs + format!( + "type: `{kind}`\n\ + permissions: `[{perms}]`\n\ + full path: `{full_path_name}`", + ) + } + #[cfg(not(unix))] + { + format!( + "type: `{kind}`\n\ + full path: `{full_path_name}`", + ) + } + }; + + let edit_diff = typed_file_name + .as_ref() + .map(|f| f.len()) + .unwrap_or_default(); + + let transaction = Transaction::change( + &text, + std::iter::once((cursor - edit_diff, cursor, Some((&file_name_str).into()))), + ); + + Some(CompletionItem::Other(core::CompletionItem { + kind, + transaction, + label: file_name_str.into(), + documentation, + })) + }) + .collect::>() + }); + + Some(async move { Ok(future.await?) }.boxed()) +} diff --git a/helix-term/src/handlers/completion/resolve.rs b/helix-term/src/handlers/completion/resolve.rs index 8c5674fe7e21..1d220908c103 100644 --- a/helix-term/src/handlers/completion/resolve.rs +++ b/helix-term/src/handlers/completion/resolve.rs @@ -9,7 +9,7 @@ use helix_view::Editor; use crate::handlers::completion::CompletionItem; use crate::job; -use crate::ui::LspCompletionItem; +use super::LspCompletionItem; /// A hook for resolving incomplete completion items. /// diff --git a/helix-term/src/ui/completion.rs b/helix-term/src/ui/completion.rs index 56470dfb6966..faa5898b6546 100644 --- a/helix-term/src/ui/completion.rs +++ b/helix-term/src/ui/completion.rs @@ -1,6 +1,9 @@ use crate::{ compositor::{Component, Context, Event, EventResult}, - handlers::{completion::ResolveHandler, trigger_auto_completion}, + handlers::{ + completion::{CompletionItem, LspCompletionItem, ResolveHandler}, + trigger_auto_completion, + }, }; use helix_view::{ document::SavePoint, @@ -13,12 +16,12 @@ use tui::{buffer::Buffer as Surface, text::Span}; use std::{borrow::Cow, sync::Arc}; -use helix_core::{chars, Change, Transaction}; +use helix_core::{self as core, chars, Change, Transaction}; use helix_view::{graphics::Rect, Document, Editor}; use crate::ui::{menu, Markdown, Menu, Popup, PromptEvent}; -use helix_lsp::{lsp, util, LanguageServerId, OffsetEncoding}; +use helix_lsp::{lsp, util, OffsetEncoding}; impl menu::Item for CompletionItem { type Data = (); @@ -35,7 +38,7 @@ impl menu::Item for CompletionItem { .unwrap_or(&item.label) .as_str() .into(), - CompletionItem::Path(PathCompletionItem { item, .. }) => Cow::from(&item.label), + CompletionItem::Other(core::CompletionItem { label, .. }) => label.clone(), } } @@ -47,12 +50,12 @@ impl menu::Item for CompletionItem { tags.contains(&lsp::CompletionItemTag::DEPRECATED) }) } - CompletionItem::Path(_) => false, + CompletionItem::Other(_) => false, }; let label = match self { - CompletionItem::Lsp(LspCompletionItem { item, .. }) => &item.label, - CompletionItem::Path(PathCompletionItem { item, .. }) => &item.label, + CompletionItem::Lsp(LspCompletionItem { item, .. }) => item.label.as_str(), + CompletionItem::Other(core::CompletionItem { label, .. }) => &label, }; let kind = match self { @@ -88,7 +91,7 @@ impl menu::Item for CompletionItem { } None => "", }, - CompletionItem::Path(PathCompletionItem { kind, .. }) => kind.as_str(), + CompletionItem::Other(core::CompletionItem { kind, .. }) => kind, }; menu::Row::new([ @@ -105,83 +108,6 @@ impl menu::Item for CompletionItem { } } -#[derive(Debug, PartialEq, Clone)] -pub enum PathKind { - Folder, - File, - Link, - Block, - Socket, - CharacterDevice, - Fifo, -} - -impl PathKind { - fn as_str(&self) -> &'static str { - match self { - PathKind::Folder => "folder", - PathKind::File => "file", - PathKind::Link => "link", - PathKind::Block => "block", - PathKind::Socket => "socket", - PathKind::CharacterDevice => "char_device", - PathKind::Fifo => "fifo", - } - } -} - -impl std::fmt::Display for PathKind { - fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - write!(f, "{}", self.as_str()) - } -} - -#[derive(Debug, PartialEq, Clone)] -pub struct LspCompletionItem { - pub item: lsp::CompletionItem, - pub provider: LanguageServerId, - pub resolved: bool, -} - -#[derive(Debug, PartialEq, Clone)] -pub struct PathCompletionItem { - pub kind: PathKind, - pub item: helix_core::CompletionItem, -} - -#[derive(Debug, PartialEq, Clone)] -pub enum CompletionItem { - Lsp(LspCompletionItem), - Path(PathCompletionItem), -} - -impl PartialEq for LspCompletionItem { - fn eq(&self, other: &CompletionItem) -> bool { - match other { - CompletionItem::Lsp(other) => self == other, - _ => false, - } - } -} - -impl PartialEq for PathCompletionItem { - fn eq(&self, other: &CompletionItem) -> bool { - match other { - CompletionItem::Path(other) => self == other, - _ => false, - } - } -} - -impl CompletionItem { - pub fn preselect(&self) -> bool { - match self { - CompletionItem::Lsp(LspCompletionItem { item, .. }) => item.preselect.unwrap_or(false), - CompletionItem::Path(_) => false, - } - } -} - /// Wraps a Menu. pub struct Completion { popup: Popup>, @@ -358,8 +284,8 @@ impl Completion { ), view.id, ), - CompletionItem::Path(PathCompletionItem { item, .. }) => { - doc.apply_temporary(&item.transaction, view.id) + CompletionItem::Other(core::CompletionItem { transaction, .. }) => { + doc.apply_temporary(transaction, view.id) } }; } @@ -405,8 +331,8 @@ impl Completion { (transaction, add_edits.map(|edits| (edits, encoding))) } - CompletionItem::Path(PathCompletionItem { item, .. }) => { - (item.transaction, None) + CompletionItem::Other(core::CompletionItem { transaction, .. }) => { + (transaction, None) } }; @@ -601,8 +527,8 @@ impl Component for Completion { } None => return, }, - CompletionItem::Path(option) => { - markdowned(language, None, Some(&option.item.documentation)) + CompletionItem::Other(option) => { + markdowned(language, None, Some(&option.documentation)) } }; diff --git a/helix-term/src/ui/editor.rs b/helix-term/src/ui/editor.rs index f7541fe25750..5179be4f4e1c 100644 --- a/helix-term/src/ui/editor.rs +++ b/helix-term/src/ui/editor.rs @@ -2,13 +2,14 @@ use crate::{ commands::{self, OnKeyCallback}, compositor::{Component, Context, Event, EventResult}, events::{OnModeSwitch, PostCommand}, + handlers::completion::CompletionItem, key, keymap::{KeymapResult, Keymaps}, ui::{ document::{render_document, LinePos, TextRenderer}, statusline, text_decorations::{self, Decoration, DecorationManager, InlineDiagnostics}, - Completion, CompletionItem, ProgressSpinners, + Completion, ProgressSpinners, }, }; diff --git a/helix-term/src/ui/mod.rs b/helix-term/src/ui/mod.rs index 63e817548a28..ab9b5392bace 100644 --- a/helix-term/src/ui/mod.rs +++ b/helix-term/src/ui/mod.rs @@ -17,7 +17,7 @@ mod text_decorations; use crate::compositor::Compositor; use crate::filter_picker_entry; use crate::job::{self, Callback}; -pub use completion::{Completion, CompletionItem, LspCompletionItem, PathCompletionItem, PathKind}; +pub use completion::Completion; pub use editor::EditorView; use helix_stdx::rope; pub use markdown::Markdown; diff --git a/helix-view/src/document.rs b/helix-view/src/document.rs index c1e4abe77f30..39296989b4d7 100644 --- a/helix-view/src/document.rs +++ b/helix-view/src/document.rs @@ -1713,7 +1713,7 @@ impl Document { self.version } - pub fn supports_path_completion(&self) -> bool { + pub fn path_completion_enabled(&self) -> bool { self.language_config() .and_then(|lang_config| lang_config.path_completion) .unwrap_or_else(|| self.config.load().path_completion) From 57e34aa2fe8aaf09ca278b7b0f3708525f785669 Mon Sep 17 00:00:00 2001 From: Philipp Mildenberger Date: Mon, 14 Oct 2024 18:02:10 +0200 Subject: [PATCH 14/19] Modularize path completion into separate functions --- helix-term/src/handlers/completion/path.rs | 170 +++++++++++---------- 1 file changed, 89 insertions(+), 81 deletions(-) diff --git a/helix-term/src/handlers/completion/path.rs b/helix-term/src/handlers/completion/path.rs index 5c8fa4753b7f..15e2423c6746 100644 --- a/helix-term/src/handlers/completion/path.rs +++ b/helix-term/src/handlers/completion/path.rs @@ -1,8 +1,12 @@ use std::{ borrow::Cow, + fs, path::{Path, PathBuf}, str::FromStr as _, - sync::{atomic::AtomicBool, Arc}, + sync::{ + atomic::{AtomicBool, Ordering}, + Arc, + }, }; use futures_util::{future::BoxFuture, FutureExt as _}; @@ -58,18 +62,16 @@ pub(crate) fn path_completion( } })?; - if cancel.load(std::sync::atomic::Ordering::Relaxed) { + if cancel.load(Ordering::Relaxed) { return None; } - // The async file accessor functions of tokio were considered, but they were a bit slower - // and less ergonomic than just using the std functions in a separate "thread" let future = tokio::task::spawn_blocking(move || { let Ok(read_dir) = std::fs::read_dir(&dir_path) else { return Vec::new(); }; - if cancel.load(std::sync::atomic::Ordering::Relaxed) { + if cancel.load(Ordering::Relaxed) { return Vec::new(); } @@ -82,85 +84,14 @@ pub(crate) fn path_completion( .map(|md| (dir_entry.file_name(), md)) }) .map_while(|(file_name, md)| { - if cancel.load(std::sync::atomic::Ordering::Relaxed) { + if cancel.load(Ordering::Relaxed) { return None; } let file_name_str = file_name.to_string_lossy().to_string(); - let full_path = fold_home_dir(canonicalize(dir_path.join(file_name))); - let full_path_name = full_path.to_string_lossy(); - - let kind = if md.is_symlink() { - "link" - } else if md.is_dir() { - "folder" - } else { - #[cfg(unix)] - { - use std::os::unix::fs::FileTypeExt; - if md.file_type().is_block_device() { - "block" - } else if md.file_type().is_socket() { - "socket" - } else if md.file_type().is_char_device() { - "char_device" - } else if md.file_type().is_fifo() { - "fifo" - } else { - "file" - } - } - #[cfg(not(unix))] - "file" - }; - let kind = Cow::Borrowed(kind); - - let documentation = { - #[cfg(unix)] - { - use std::os::unix::prelude::PermissionsExt; - let mode = md.permissions().mode(); - - let perms = [ - (libc::S_IRUSR, 'r'), - (libc::S_IWUSR, 'w'), - (libc::S_IXUSR, 'x'), - (libc::S_IRGRP, 'r'), - (libc::S_IWGRP, 'w'), - (libc::S_IXGRP, 'x'), - (libc::S_IROTH, 'r'), - (libc::S_IWOTH, 'w'), - (libc::S_IXOTH, 'x'), - ] - .into_iter() - .fold( - String::with_capacity(9), - |mut acc, (p, s)| { - // This cast is necessary on some platforms such as macos as `mode_t` is u16 there - #[allow(clippy::unnecessary_cast)] - acc.push(if mode & (p as u32) > 0 { s } else { '-' }); - acc - }, - ); - - // TODO it would be great to be able to individually color the documentation, - // but this will likely require a custom doc implementation (i.e. not `lsp::Documentation`) - // and/or different rendering in completion.rs - format!( - "type: `{kind}`\n\ - permissions: `[{perms}]`\n\ - full path: `{full_path_name}`", - ) - } - #[cfg(not(unix))] - { - format!( - "type: `{kind}`\n\ - full path: `{full_path_name}`", - ) - } - }; + let kind = path_kind(&md); + let documentation = path_documentation(&md, &dir_path.join(file_name), kind); let edit_diff = typed_file_name .as_ref() @@ -173,9 +104,9 @@ pub(crate) fn path_completion( ); Some(CompletionItem::Other(core::CompletionItem { - kind, - transaction, + kind: Cow::Borrowed(kind), label: file_name_str.into(), + transaction, documentation, })) }) @@ -184,3 +115,80 @@ pub(crate) fn path_completion( Some(async move { Ok(future.await?) }.boxed()) } + +#[cfg(unix)] +fn path_documentation(md: &fs::Metadata, full_path: &Path, kind: &str) -> String { + let full_path = fold_home_dir(canonicalize(full_path)); + let full_path_name = full_path.to_string_lossy(); + + use std::os::unix::prelude::PermissionsExt; + let mode = md.permissions().mode(); + + let perms = [ + (libc::S_IRUSR, 'r'), + (libc::S_IWUSR, 'w'), + (libc::S_IXUSR, 'x'), + (libc::S_IRGRP, 'r'), + (libc::S_IWGRP, 'w'), + (libc::S_IXGRP, 'x'), + (libc::S_IROTH, 'r'), + (libc::S_IWOTH, 'w'), + (libc::S_IXOTH, 'x'), + ] + .into_iter() + .fold(String::with_capacity(9), |mut acc, (p, s)| { + // This cast is necessary on some platforms such as macos as `mode_t` is u16 there + #[allow(clippy::unnecessary_cast)] + acc.push(if mode & (p as u32) > 0 { s } else { '-' }); + acc + }); + + // TODO it would be great to be able to individually color the documentation, + // but this will likely require a custom doc implementation (i.e. not `lsp::Documentation`) + // and/or different rendering in completion.rs + format!( + "type: `{kind}`\n\ + permissions: `[{perms}]`\n\ + full path: `{full_path_name}`", + ) +} + +#[cfg(not(unix))] +fn path_documentation(md: &fs::Metadata, full_path: &Path, kind: &str) -> String { + let full_path = fold_home_dir(canonicalize(full_path)); + let full_path_name = full_path.to_string_lossy(); + format!("type: `{kind}`\nfull path: `{full_path_name}`",) +} + +#[cfg(unix)] +fn path_kind(md: &fs::Metadata) -> &'static str { + if md.is_symlink() { + "link" + } else if md.is_dir() { + "folder" + } else { + use std::os::unix::fs::FileTypeExt; + if md.file_type().is_block_device() { + "block" + } else if md.file_type().is_socket() { + "socket" + } else if md.file_type().is_char_device() { + "char_device" + } else if md.file_type().is_fifo() { + "fifo" + } else { + "file" + } + } +} + +#[cfg(not(unix))] +fn path_kind(md: &fs::Metadata) -> &'static str { + if md.is_symlink() { + "link" + } else if md.is_dir() { + "folder" + } else { + "file" + } +} From d4a873d8f7149200d7dacf17d8008d14ecdac724 Mon Sep 17 00:00:00 2001 From: Philipp Mildenberger Date: Mon, 14 Oct 2024 18:20:11 +0200 Subject: [PATCH 15/19] cargo fmt --- helix-term/src/handlers/completion/item.rs | 2 -- helix-term/src/handlers/completion/resolve.rs | 2 +- 2 files changed, 1 insertion(+), 3 deletions(-) diff --git a/helix-term/src/handlers/completion/item.rs b/helix-term/src/handlers/completion/item.rs index 501a1dd50f51..bcd35cd5411e 100644 --- a/helix-term/src/handlers/completion/item.rs +++ b/helix-term/src/handlers/completion/item.rs @@ -39,5 +39,3 @@ impl CompletionItem { } } } - - diff --git a/helix-term/src/handlers/completion/resolve.rs b/helix-term/src/handlers/completion/resolve.rs index 1d220908c103..cc31560172e4 100644 --- a/helix-term/src/handlers/completion/resolve.rs +++ b/helix-term/src/handlers/completion/resolve.rs @@ -7,9 +7,9 @@ use tokio::time::{Duration, Instant}; use helix_event::{send_blocking, AsyncHook, CancelRx}; use helix_view::Editor; +use super::LspCompletionItem; use crate::handlers::completion::CompletionItem; use crate::job; -use super::LspCompletionItem; /// A hook for resolving incomplete completion items. /// From ce885df98efc27f55b3f303a67fceca6d396c135 Mon Sep 17 00:00:00 2001 From: Philipp Mildenberger Date: Mon, 14 Oct 2024 18:23:54 +0200 Subject: [PATCH 16/19] cargo clippy fix --- helix-term/src/ui/completion.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/helix-term/src/ui/completion.rs b/helix-term/src/ui/completion.rs index faa5898b6546..cb0af6fc638a 100644 --- a/helix-term/src/ui/completion.rs +++ b/helix-term/src/ui/completion.rs @@ -55,7 +55,7 @@ impl menu::Item for CompletionItem { let label = match self { CompletionItem::Lsp(LspCompletionItem { item, .. }) => item.label.as_str(), - CompletionItem::Other(core::CompletionItem { label, .. }) => &label, + CompletionItem::Other(core::CompletionItem { label, .. }) => label, }; let kind = match self { From 381732197e7ca268ab209c12c10744c22a12b085 Mon Sep 17 00:00:00 2001 From: Philipp Mildenberger Date: Sun, 20 Oct 2024 00:59:15 +0200 Subject: [PATCH 17/19] Address review feedback --- helix-term/src/handlers/completion.rs | 15 +++++++++------ helix-term/src/handlers/completion/path.rs | 14 ++++++++------ 2 files changed, 17 insertions(+), 12 deletions(-) diff --git a/helix-term/src/handlers/completion.rs b/helix-term/src/handlers/completion.rs index ce9ed7f7ce29..edd54c5e3a88 100644 --- a/helix-term/src/handlers/completion.rs +++ b/helix-term/src/handlers/completion.rs @@ -369,13 +369,16 @@ pub fn trigger_auto_completion( }) if triggers.iter().any(|trigger| text.ends_with(trigger))) }); - let trigger_path_completion = matches!( - text.get_bytes_at(text.len_bytes()) - .and_then(|t| t.reversed().next()), - Some(b'/' | b'\\') - ) && doc.path_completion_enabled(); + let cursor_char = text + .get_bytes_at(text.len_bytes()) + .and_then(|t| t.reversed().next()); - if is_trigger_char || trigger_path_completion { + #[cfg(windows)] + let is_path_completion_trigger = matches!(cursor_char, Some(b'/' | b'\\')); + #[cfg(not(windows))] + let is_path_completion_trigger = matches!(cursor_char, Some(b'/')); + + if is_trigger_char || (is_path_completion_trigger && doc.path_completion_enabled()) { send_blocking( tx, CompletionEvent::TriggerChar { diff --git a/helix-term/src/handlers/completion/path.rs b/helix-term/src/handlers/completion/path.rs index 15e2423c6746..2bd04c65fe27 100644 --- a/helix-term/src/handlers/completion/path.rs +++ b/helix-term/src/handlers/completion/path.rs @@ -49,7 +49,11 @@ pub(crate) fn path_completion( Some(parent_dir) if path.is_relative() => parent_dir.join(&path), _ => path.into_owned(), }; + #[cfg(windows)] let ends_with_slash = matches!(matched_path.as_bytes().last(), Some(b'/' | b'\\')); + #[cfg(not(windows))] + let ends_with_slash = matches!(matched_path.as_bytes().last(), Some(b'/')); + if ends_with_slash { Some((PathBuf::from(path.as_path()), None)) } else { @@ -81,17 +85,15 @@ pub(crate) fn path_completion( dir_entry .metadata() .ok() - .map(|md| (dir_entry.file_name(), md)) + .and_then(|md| Some((dir_entry.file_name().into_string().ok()?, md))) }) .map_while(|(file_name, md)| { if cancel.load(Ordering::Relaxed) { return None; } - let file_name_str = file_name.to_string_lossy().to_string(); - let kind = path_kind(&md); - let documentation = path_documentation(&md, &dir_path.join(file_name), kind); + let documentation = path_documentation(&md, &dir_path.join(&file_name), kind); let edit_diff = typed_file_name .as_ref() @@ -100,12 +102,12 @@ pub(crate) fn path_completion( let transaction = Transaction::change( &text, - std::iter::once((cursor - edit_diff, cursor, Some((&file_name_str).into()))), + std::iter::once((cursor - edit_diff, cursor, Some((&file_name).into()))), ); Some(CompletionItem::Other(core::CompletionItem { kind: Cow::Borrowed(kind), - label: file_name_str.into(), + label: file_name.into(), transaction, documentation, })) From d73f158b438973918a5a136148b11be669b9e2e9 Mon Sep 17 00:00:00 2001 From: Pascal Kuthe Date: Tue, 22 Oct 2024 00:53:28 +0200 Subject: [PATCH 18/19] improve completion cancelation --- helix-event/src/cancel.rs | 280 +++++++++++++++++- helix-event/src/lib.rs | 2 +- helix-term/src/handlers/completion.rs | 65 ++-- helix-term/src/handlers/completion/path.rs | 15 +- helix-term/src/handlers/completion/resolve.rs | 21 +- helix-term/src/handlers/signature_help.rs | 25 +- 6 files changed, 324 insertions(+), 84 deletions(-) diff --git a/helix-event/src/cancel.rs b/helix-event/src/cancel.rs index f027be80e8de..f80ca3d9bf0e 100644 --- a/helix-event/src/cancel.rs +++ b/helix-event/src/cancel.rs @@ -1,15 +1,18 @@ +use std::borrow::Borrow; use std::future::Future; +use std::sync::atomic::AtomicU64; +use std::sync::atomic::Ordering::Relaxed; +use std::sync::Arc; -pub use oneshot::channel as cancelation; -use tokio::sync::oneshot; +use tokio::sync::Notify; -pub type CancelTx = oneshot::Sender<()>; -pub type CancelRx = oneshot::Receiver<()>; - -pub async fn cancelable_future(future: impl Future, cancel: CancelRx) -> Option { +pub async fn cancelable_future( + future: impl Future, + cancel: impl Borrow, +) -> Option { tokio::select! { biased; - _ = cancel => { + _ = cancel.borrow().canceled() => { None } res = future => { @@ -17,3 +20,266 @@ pub async fn cancelable_future(future: impl Future, cancel: Cance } } } + +#[derive(Default, Debug)] +struct Shared { + state: AtomicU64, + // notify has some features that we don't really need here because it + // supports waking single tasks (notify_one) and does it's own (more + // complicated) state tracking, we could reimplement the waiter linked list + // with modest effort and reduce memory consumption by one word/8 bytes and + // reduce code complexity/number of atomic operations. + // + // I don't think that's worth the complexity (unsafe code). + // + // if we only cared about async code then we could also only use a notify + // (without the generation count), this would be equivalent (or maybe more + // correct if we want to allow cloning the TX) but it would be extremly slow + // to frequently check for cancelation from sync code + notify: Notify, +} + +impl Shared { + fn generation(&self) -> u32 { + self.state.load(Relaxed) as u32 + } + + fn num_running(&self) -> u32 { + (self.state.load(Relaxed) >> 32) as u32 + } + + /// increments the generation count and sets num_running + /// to the provided value, this operation is not with + /// regard to the generation counter (doesn't use fetch_add) + /// so the calling code must ensure it cannot execute concurrently + /// to maintain correctness (but not safety) + fn inc_generation(&self, num_running: u32) -> (u32, u32) { + let state = self.state.load(Relaxed); + let generation = state as u32; + let prev_running = (state >> 32) as u32; + // no need to create a new generation if the refcount is zero (fastaph) + if prev_running == 0 && num_running == 0 { + return (generation, 0); + } + let new_generation = generation.saturating_add(1); + self.state.store( + new_generation as u64 | ((num_running as u64) << 32), + Relaxed, + ); + self.notify.notify_waiters(); + (new_generation, prev_running) + } + + fn inc_running(&self, generation: u32) { + let mut state = self.state.load(Relaxed); + loop { + let current_generation = state as u32; + if current_generation != generation { + break; + } + let off = 1 << 32; + let res = self.state.compare_exchange_weak( + state, + state.saturating_add(off), + Relaxed, + Relaxed, + ); + match res { + Ok(_) => break, + Err(new_state) => state = new_state, + } + } + } + + fn dec_running(&self, generation: u32) { + let mut state = self.state.load(Relaxed); + loop { + let current_generation = state as u32; + if current_generation != generation { + break; + } + let num_running = (state >> 32) as u32; + // running can't be zero here, that would mean we misscounted somewhere + assert_ne!(num_running, 0); + let off = 1 << 32; + let res = self + .state + .compare_exchange_weak(state, state - off, Relaxed, Relaxed); + match res { + Ok(_) => break, + Err(new_state) => state = new_state, + } + } + } +} + +// this intentionally doesn't implement clone and requires amutable reference +// for cancelation to avoid races (in inc_generation) + +/// A task controller allows managing a single subtask enabling the contorller +/// to cancel the subtask and to check wether it is still running. For efficency +/// reasons the controller can be reused/restarted, in that case the previous +/// task is automatically cancelled. +/// +/// If the controller is dropped the subtasks are automatically canceled. +#[derive(Default, Debug)] +pub struct TaskController { + shared: Arc, +} + +impl TaskController { + pub fn new() -> Self { + TaskController::default() + } + /// Cancels the active task (handle) + /// + /// returns wether any tasks were still running before the canellation + pub fn cancel(&mut self) -> bool { + self.shared.inc_generation(0).1 != 0 + } + + /// checks wether there are any task handles + /// that haven't been dropped (or canceled) yet + pub fn is_running(&self) -> bool { + self.shared.num_running() != 0 + } + + /// Starts a new task and cancels the previous task (handles) + pub fn restart(&mut self) -> TaskHandle { + TaskHandle { + generation: self.shared.inc_generation(1).0, + shared: self.shared.clone(), + } + } +} + +impl Drop for TaskController { + fn drop(&mut self) { + self.cancel(); + } +} + +/// A handle that is used to link a task with a task controller, it can be +/// used to cancel async futures very efficently but can also be checked for +/// cancaellation very quickly (single atomic read) in blocking code. The +/// handle can be cheaply cloned (referenced counted). +/// +/// The TaskController can check wether a task is "running" by inspecting the +/// refcount of the (current) tasks handeles. Therefore, if that information +/// is important ensure that the handle is not dropped until the task fully +/// completes +pub struct TaskHandle { + shared: Arc, + generation: u32, +} + +impl Clone for TaskHandle { + fn clone(&self) -> Self { + self.shared.inc_running(self.generation); + TaskHandle { + shared: self.shared.clone(), + generation: self.generation, + } + } +} + +impl Drop for TaskHandle { + fn drop(&mut self) { + self.shared.dec_running(self.generation); + } +} + +impl TaskHandle { + /// waits until [`TaskController::cancel`] is called for the corresponding + /// [`TaskController`]. Immidietly returns if `cancel` was already called since + pub async fn canceled(&self) { + let notified = self.shared.notify.notified(); + if !self.is_canceled() { + notified.await + } + } + + pub fn is_canceled(&self) -> bool { + self.generation != self.shared.generation() + } +} + +#[cfg(test)] +mod tests { + use std::future::poll_fn; + + use futures_executor::block_on; + use tokio::task::yield_now; + + use crate::{cancelable_future, TaskController}; + + #[test] + fn immidiate_cancel() { + let mut controller = TaskController::new(); + let handle = controller.restart(); + controller.cancel(); + assert!(handle.is_canceled()); + controller.restart(); + assert!(handle.is_canceled()); + + let res = block_on(cancelable_future( + poll_fn(|_cx| std::task::Poll::Ready(())), + handle, + )); + assert!(res.is_none()); + } + + #[test] + fn running_count() { + let mut controller = TaskController::new(); + let handle = controller.restart(); + assert!(controller.is_running()); + assert!(!handle.is_canceled()); + drop(handle); + assert!(!controller.is_running()); + assert!(!controller.cancel()); + let handle = controller.restart(); + assert!(!handle.is_canceled()); + assert!(controller.is_running()); + let handle2 = handle.clone(); + assert!(!handle.is_canceled()); + assert!(controller.is_running()); + drop(handle2); + assert!(!handle.is_canceled()); + assert!(controller.is_running()); + assert!(controller.cancel()); + assert!(handle.is_canceled()); + assert!(!controller.is_running()); + } + + #[test] + fn no_cancel() { + let mut controller = TaskController::new(); + let handle = controller.restart(); + assert!(!handle.is_canceled()); + + let res = block_on(cancelable_future( + poll_fn(|_cx| std::task::Poll::Ready(())), + handle, + )); + assert!(res.is_some()); + } + + #[test] + fn delayed_cancel() { + let mut controller = TaskController::new(); + let handle = controller.restart(); + + let mut hit = false; + let res = block_on(cancelable_future( + async { + controller.cancel(); + hit = true; + yield_now().await; + }, + handle, + )); + assert!(res.is_none()); + assert!(hit); + } +} diff --git a/helix-event/src/lib.rs b/helix-event/src/lib.rs index de018a79ddca..8aa6b52fa2f9 100644 --- a/helix-event/src/lib.rs +++ b/helix-event/src/lib.rs @@ -32,7 +32,7 @@ //! to helix-view in the future if we manage to detach the compositor from its rendering backend. use anyhow::Result; -pub use cancel::{cancelable_future, cancelation, CancelRx, CancelTx}; +pub use cancel::{cancelable_future, TaskController, TaskHandle}; pub use debounce::{send_blocking, AsyncHook}; pub use redraw::{ lock_frame, redraw_requested, request_redraw, start_frame, RenderLockGuard, RequestRedrawOnDrop, diff --git a/helix-term/src/handlers/completion.rs b/helix-term/src/handlers/completion.rs index edd54c5e3a88..f3223487c6ca 100644 --- a/helix-term/src/handlers/completion.rs +++ b/helix-term/src/handlers/completion.rs @@ -1,5 +1,4 @@ use std::collections::HashSet; -use std::sync::atomic::AtomicBool; use std::sync::Arc; use std::time::Duration; @@ -8,9 +7,7 @@ use futures_util::stream::FuturesUnordered; use futures_util::FutureExt; use helix_core::chars::char_is_word; use helix_core::syntax::LanguageServerFeature; -use helix_event::{ - cancelable_future, cancelation, register_hook, send_blocking, CancelRx, CancelTx, -}; +use helix_event::{cancelable_future, register_hook, send_blocking, TaskController, TaskHandle}; use helix_lsp::lsp; use helix_lsp::util::pos_to_lsp_pos; use helix_stdx::rope::RopeSliceExt; @@ -59,12 +56,8 @@ pub(super) struct CompletionHandler { /// currently active trigger which will cause a /// completion request after the timeout trigger: Option, - /// A handle for currently active completion request. - /// This can be used to determine whether the current - /// request is still active (and new triggers should be - /// ignored) and can also be used to abort the current - /// request (by dropping the handle) - request: Option, + in_flight: Option, + task_controller: TaskController, config: Arc>, } @@ -72,8 +65,9 @@ impl CompletionHandler { pub fn new(config: Arc>) -> CompletionHandler { Self { config, - request: None, + task_controller: TaskController::new(), trigger: None, + in_flight: None, } } } @@ -86,6 +80,9 @@ impl helix_event::AsyncHook for CompletionHandler { event: Self::Event, _old_timeout: Option, ) -> Option { + if self.in_flight.is_some() && !self.task_controller.is_running() { + self.in_flight = None; + } match event { CompletionEvent::AutoTrigger { cursor: trigger_pos, @@ -96,7 +93,7 @@ impl helix_event::AsyncHook for CompletionHandler { // but people may create weird keymaps/use the mouse so lets be extra careful if self .trigger - .as_ref() + .or(self.in_flight) .map_or(true, |trigger| trigger.doc != doc || trigger.view != view) { self.trigger = Some(Trigger { @@ -109,7 +106,7 @@ impl helix_event::AsyncHook for CompletionHandler { } CompletionEvent::TriggerChar { cursor, doc, view } => { // immediately request completions and drop all auto completion requests - self.request = None; + self.task_controller.cancel(); self.trigger = Some(Trigger { pos: cursor, view, @@ -119,7 +116,6 @@ impl helix_event::AsyncHook for CompletionHandler { } CompletionEvent::ManualTrigger { cursor, doc, view } => { // immediately request completions and drop all auto completion requests - self.request = None; self.trigger = Some(Trigger { pos: cursor, view, @@ -132,21 +128,21 @@ impl helix_event::AsyncHook for CompletionHandler { } CompletionEvent::Cancel => { self.trigger = None; - self.request = None; + self.task_controller.cancel(); } CompletionEvent::DeleteText { cursor } => { // if we deleted the original trigger, abort the completion - if matches!(self.trigger, Some(Trigger{ pos, .. }) if cursor < pos) { + if matches!(self.trigger.or(self.in_flight), Some(Trigger{ pos, .. }) if cursor < pos) + { self.trigger = None; - self.request = None; + self.task_controller.cancel(); } } } self.trigger.map(|trigger| { // if the current request was closed forget about it // otherwise immediately restart the completion request - let cancel = self.request.take().map_or(false, |req| !req.is_closed()); - let timeout = if trigger.kind == TriggerKind::Auto && !cancel { + let timeout = if trigger.kind == TriggerKind::Auto { self.config.load().editor.completion_timeout } else { // we want almost instant completions for trigger chars @@ -161,17 +157,17 @@ impl helix_event::AsyncHook for CompletionHandler { fn finish_debounce(&mut self) { let trigger = self.trigger.take().expect("debounce always has a trigger"); - let (tx, rx) = cancelation(); - self.request = Some(tx); + self.in_flight = Some(trigger); + let handle = self.task_controller.restart(); dispatch_blocking(move |editor, compositor| { - request_completion(trigger, rx, editor, compositor) + request_completion(trigger, handle, editor, compositor) }); } } fn request_completion( mut trigger: Trigger, - cancel: CancelRx, + handle: TaskHandle, editor: &mut Editor, compositor: &mut Compositor, ) { @@ -202,7 +198,6 @@ fn request_completion( // necessary from our side too. trigger.pos = cursor; let trigger_text = text.slice(..cursor); - let cancel_completion = Arc::new(AtomicBool::new(false)); let mut seen_language_servers = HashSet::new(); let mut futures: FuturesUnordered<_> = doc @@ -270,12 +265,7 @@ fn request_completion( } .boxed() }) - .chain(path_completion( - cursor, - text.clone(), - doc, - cancel_completion.clone(), - )) + .chain(path_completion(cursor, text.clone(), doc, handle.clone())) .collect(); let future = async move { @@ -296,18 +286,13 @@ fn request_completion( let ui = compositor.find::().unwrap(); ui.last_insert.1.push(InsertEvent::RequestCompletion); tokio::spawn(async move { - let items = match cancelable_future(future, cancel).await { - Some(v) => v, - None => { - cancel_completion.store(true, std::sync::atomic::Ordering::Relaxed); - Vec::new() - } - }; - if items.is_empty() { + let items = cancelable_future(future, &handle).await; + let Some(items) = items.filter(|items| !items.is_empty()) else { return; - } + }; dispatch(move |editor, compositor| { - show_completion(editor, compositor, items, trigger, savepoint) + show_completion(editor, compositor, items, trigger, savepoint); + drop(handle) }) .await }); diff --git a/helix-term/src/handlers/completion/path.rs b/helix-term/src/handlers/completion/path.rs index 2bd04c65fe27..b7b6050738da 100644 --- a/helix-term/src/handlers/completion/path.rs +++ b/helix-term/src/handlers/completion/path.rs @@ -3,15 +3,12 @@ use std::{ fs, path::{Path, PathBuf}, str::FromStr as _, - sync::{ - atomic::{AtomicBool, Ordering}, - Arc, - }, }; use futures_util::{future::BoxFuture, FutureExt as _}; use helix_core as core; use helix_core::Transaction; +use helix_event::TaskHandle; use helix_stdx::path::{self, canonicalize, fold_home_dir, get_path_suffix}; use helix_view::Document; use url::Url; @@ -22,7 +19,7 @@ pub(crate) fn path_completion( cursor: usize, text: core::Rope, doc: &Document, - cancel: Arc, + handle: TaskHandle, ) -> Option>>> { if !doc.path_completion_enabled() { return None; @@ -66,7 +63,7 @@ pub(crate) fn path_completion( } })?; - if cancel.load(Ordering::Relaxed) { + if handle.is_canceled() { return None; } @@ -75,10 +72,6 @@ pub(crate) fn path_completion( return Vec::new(); }; - if cancel.load(Ordering::Relaxed) { - return Vec::new(); - } - read_dir .filter_map(Result::ok) .filter_map(|dir_entry| { @@ -88,7 +81,7 @@ pub(crate) fn path_completion( .and_then(|md| Some((dir_entry.file_name().into_string().ok()?, md))) }) .map_while(|(file_name, md)| { - if cancel.load(Ordering::Relaxed) { + if handle.is_canceled() { return None; } diff --git a/helix-term/src/handlers/completion/resolve.rs b/helix-term/src/handlers/completion/resolve.rs index cc31560172e4..802d6f51d81c 100644 --- a/helix-term/src/handlers/completion/resolve.rs +++ b/helix-term/src/handlers/completion/resolve.rs @@ -4,7 +4,7 @@ use helix_lsp::lsp; use tokio::sync::mpsc::Sender; use tokio::time::{Duration, Instant}; -use helix_event::{send_blocking, AsyncHook, CancelRx}; +use helix_event::{send_blocking, AsyncHook, TaskController, TaskHandle}; use helix_view::Editor; use super::LspCompletionItem; @@ -31,11 +31,7 @@ impl ResolveHandler { pub fn new() -> ResolveHandler { ResolveHandler { last_request: None, - resolver: ResolveTimeout { - next_request: None, - in_flight: None, - } - .spawn(), + resolver: ResolveTimeout::default().spawn(), } } @@ -101,7 +97,8 @@ struct ResolveRequest { #[derive(Default)] struct ResolveTimeout { next_request: Option, - in_flight: Option<(helix_event::CancelTx, Arc)>, + in_flight: Option>, + task_controller: TaskController, } impl AsyncHook for ResolveTimeout { @@ -121,7 +118,7 @@ impl AsyncHook for ResolveTimeout { } else if self .in_flight .as_ref() - .is_some_and(|(_, old_request)| old_request.item == request.item.item) + .is_some_and(|old_request| old_request.item == request.item.item) { self.next_request = None; None @@ -135,14 +132,14 @@ impl AsyncHook for ResolveTimeout { let Some(request) = self.next_request.take() else { return; }; - let (tx, rx) = helix_event::cancelation(); - self.in_flight = Some((tx, request.item.clone())); - tokio::spawn(request.execute(rx)); + let token = self.task_controller.restart(); + self.in_flight = Some(request.item.clone()); + tokio::spawn(request.execute(token)); } } impl ResolveRequest { - async fn execute(self, cancel: CancelRx) { + async fn execute(self, cancel: TaskHandle) { let future = self.ls.resolve_completion_item(&self.item.item); let Some(resolved_item) = helix_event::cancelable_future(future, cancel).await else { return; diff --git a/helix-term/src/handlers/signature_help.rs b/helix-term/src/handlers/signature_help.rs index aaa97b9a058d..e4f7e935a7cd 100644 --- a/helix-term/src/handlers/signature_help.rs +++ b/helix-term/src/handlers/signature_help.rs @@ -2,9 +2,7 @@ use std::sync::Arc; use std::time::Duration; use helix_core::syntax::LanguageServerFeature; -use helix_event::{ - cancelable_future, cancelation, register_hook, send_blocking, CancelRx, CancelTx, -}; +use helix_event::{cancelable_future, register_hook, send_blocking, TaskController, TaskHandle}; use helix_lsp::lsp::{self, SignatureInformation}; use helix_stdx::rope::RopeSliceExt; use helix_view::document::Mode; @@ -22,11 +20,11 @@ use crate::ui::lsp::{Signature, SignatureHelp}; use crate::ui::Popup; use crate::{job, ui}; -#[derive(Debug)] +#[derive(Debug, PartialEq, Eq)] enum State { Open, Closed, - Pending { request: CancelTx }, + Pending, } /// debounce timeout in ms, value taken from VSCode @@ -37,6 +35,7 @@ const TIMEOUT: u64 = 120; pub(super) struct SignatureHelpHandler { trigger: Option, state: State, + task_controller: TaskController, } impl SignatureHelpHandler { @@ -44,6 +43,7 @@ impl SignatureHelpHandler { SignatureHelpHandler { trigger: None, state: State::Closed, + task_controller: TaskController::new(), } } } @@ -76,12 +76,11 @@ impl helix_event::AsyncHook for SignatureHelpHandler { } SignatureHelpEvent::RequestComplete { open } => { // don't cancel rerequest that was already triggered - if let State::Pending { request } = &self.state { - if !request.is_closed() { - return timeout; - } + if self.state == State::Pending && self.task_controller.is_running() { + return timeout; } self.state = if open { State::Open } else { State::Closed }; + self.task_controller.cancel(); return timeout; } @@ -94,16 +93,16 @@ impl helix_event::AsyncHook for SignatureHelpHandler { fn finish_debounce(&mut self) { let invocation = self.trigger.take().unwrap(); - let (tx, rx) = cancelation(); - self.state = State::Pending { request: tx }; - job::dispatch_blocking(move |editor, _| request_signature_help(editor, invocation, rx)) + self.state = State::Pending; + let handle = self.task_controller.restart(); + job::dispatch_blocking(move |editor, _| request_signature_help(editor, invocation, handle)) } } pub fn request_signature_help( editor: &mut Editor, invoked: SignatureHelpInvoked, - cancel: CancelRx, + cancel: TaskHandle, ) { let (view, doc) = current!(editor); From 3dbcc4ed345e74c70cb53cf6d893c8157da07700 Mon Sep 17 00:00:00 2001 From: Philipp Mildenberger Date: Tue, 22 Oct 2024 20:12:03 +0200 Subject: [PATCH 19/19] Fix typos, and other cosmetic stuff --- helix-event/src/cancel.rs | 60 ++++++++++++++++++++------------------- 1 file changed, 31 insertions(+), 29 deletions(-) diff --git a/helix-event/src/cancel.rs b/helix-event/src/cancel.rs index f80ca3d9bf0e..2029c9456b97 100644 --- a/helix-event/src/cancel.rs +++ b/helix-event/src/cancel.rs @@ -24,8 +24,8 @@ pub async fn cancelable_future( #[derive(Default, Debug)] struct Shared { state: AtomicU64, - // notify has some features that we don't really need here because it - // supports waking single tasks (notify_one) and does it's own (more + // `Notify` has some features that we don't really need here because it + // supports waking single tasks (`notify_one`) and does its own (more // complicated) state tracking, we could reimplement the waiter linked list // with modest effort and reduce memory consumption by one word/8 bytes and // reduce code complexity/number of atomic operations. @@ -48,16 +48,16 @@ impl Shared { (self.state.load(Relaxed) >> 32) as u32 } - /// increments the generation count and sets num_running + /// Increments the generation count and sets `num_running` /// to the provided value, this operation is not with - /// regard to the generation counter (doesn't use fetch_add) + /// regard to the generation counter (doesn't use `fetch_add`) /// so the calling code must ensure it cannot execute concurrently /// to maintain correctness (but not safety) fn inc_generation(&self, num_running: u32) -> (u32, u32) { let state = self.state.load(Relaxed); let generation = state as u32; let prev_running = (state >> 32) as u32; - // no need to create a new generation if the refcount is zero (fastaph) + // no need to create a new generation if the refcount is zero (fastpath) if prev_running == 0 && num_running == 0 { return (generation, 0); } @@ -99,7 +99,7 @@ impl Shared { break; } let num_running = (state >> 32) as u32; - // running can't be zero here, that would mean we misscounted somewhere + // running can't be zero here, that would mean we miscounted somewhere assert_ne!(num_running, 0); let off = 1 << 32; let res = self @@ -113,15 +113,16 @@ impl Shared { } } -// this intentionally doesn't implement clone and requires amutable reference -// for cancelation to avoid races (in inc_generation) +// This intentionally doesn't implement `Clone` and requires a mutable reference +// for cancelation to avoid races (in inc_generation). -/// A task controller allows managing a single subtask enabling the contorller -/// to cancel the subtask and to check wether it is still running. For efficency -/// reasons the controller can be reused/restarted, in that case the previous -/// task is automatically cancelled. +/// A task controller allows managing a single subtask enabling the controller +/// to cancel the subtask and to check whether it is still running. /// -/// If the controller is dropped the subtasks are automatically canceled. +/// For efficiency reasons the controller can be reused/restarted, +/// in that case the previous task is automatically canceled. +/// +/// If the controller is dropped, the subtasks are automatically canceled. #[derive(Default, Debug)] pub struct TaskController { shared: Arc, @@ -131,20 +132,20 @@ impl TaskController { pub fn new() -> Self { TaskController::default() } - /// Cancels the active task (handle) + /// Cancels the active task (handle). /// - /// returns wether any tasks were still running before the canellation + /// Returns whether any tasks were still running before the cancelation. pub fn cancel(&mut self) -> bool { self.shared.inc_generation(0).1 != 0 } - /// checks wether there are any task handles - /// that haven't been dropped (or canceled) yet + /// Checks whether there are any task handles + /// that haven't been dropped (or canceled) yet. pub fn is_running(&self) -> bool { self.shared.num_running() != 0 } - /// Starts a new task and cancels the previous task (handles) + /// Starts a new task and cancels the previous task (handles). pub fn restart(&mut self) -> TaskHandle { TaskHandle { generation: self.shared.inc_generation(1).0, @@ -159,15 +160,16 @@ impl Drop for TaskController { } } -/// A handle that is used to link a task with a task controller, it can be -/// used to cancel async futures very efficently but can also be checked for -/// cancaellation very quickly (single atomic read) in blocking code. The -/// handle can be cheaply cloned (referenced counted). +/// A handle that is used to link a task with a task controller. +/// +/// It can be used to cancel async futures very efficiently but can also be checked for +/// cancelation very quickly (single atomic read) in blocking code. +/// The handle can be cheaply cloned (reference counted). /// -/// The TaskController can check wether a task is "running" by inspecting the -/// refcount of the (current) tasks handeles. Therefore, if that information -/// is important ensure that the handle is not dropped until the task fully -/// completes +/// The TaskController can check whether a task is "running" by inspecting the +/// refcount of the (current) tasks handles. Therefore, if that information +/// is important, ensure that the handle is not dropped until the task fully +/// completes. pub struct TaskHandle { shared: Arc, generation: u32, @@ -190,8 +192,8 @@ impl Drop for TaskHandle { } impl TaskHandle { - /// waits until [`TaskController::cancel`] is called for the corresponding - /// [`TaskController`]. Immidietly returns if `cancel` was already called since + /// Waits until [`TaskController::cancel`] is called for the corresponding + /// [`TaskController`]. Immediately returns if `cancel` was already called since pub async fn canceled(&self) { let notified = self.shared.notify.notified(); if !self.is_canceled() { @@ -214,7 +216,7 @@ mod tests { use crate::{cancelable_future, TaskController}; #[test] - fn immidiate_cancel() { + fn immediate_cancel() { let mut controller = TaskController::new(); let handle = controller.restart(); controller.cancel();