transition to nucleo for fuzzy matching (#7814)
* transition to nucleo for fuzzy matching * drop flakey test case since the picker streams in results now any test that relies on the picker containing results is potentially flakely * use crates.io version of nucleo * Fix typo in commands.rs Co-authored-by: Skyler Hawthorne <skyler@dead10ck.com> --------- Co-authored-by: Skyler Hawthorne <skyler@dead10ck.com>
This commit is contained in:
parent
40d7e6c9c8
commit
0cb595e226
26 changed files with 756 additions and 1051 deletions
126
Cargo.lock
generated
126
Cargo.lock
generated
|
@ -248,6 +248,12 @@ version = "0.8.4"
|
|||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "e496a50fda8aacccc86d7529e2c1e0892dbd0f898a6b5645b5561b89c3210efa"
|
||||
|
||||
[[package]]
|
||||
name = "cov-mark"
|
||||
version = "1.1.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "9ffa3d3e0138386cd4361f63537765cac7ee40698028844635a54495a92f67f3"
|
||||
|
||||
[[package]]
|
||||
name = "crc32fast"
|
||||
version = "1.3.2"
|
||||
|
@ -257,6 +263,49 @@ dependencies = [
|
|||
"cfg-if",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "crossbeam-channel"
|
||||
version = "0.5.8"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "a33c2bf77f2df06183c3aa30d1e96c0695a313d4f9c453cc3762a6db39f99200"
|
||||
dependencies = [
|
||||
"cfg-if",
|
||||
"crossbeam-utils",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "crossbeam-deque"
|
||||
version = "0.8.3"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "ce6fd6f855243022dcecf8702fef0c297d4338e226845fe067f6341ad9fa0cef"
|
||||
dependencies = [
|
||||
"cfg-if",
|
||||
"crossbeam-epoch",
|
||||
"crossbeam-utils",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "crossbeam-epoch"
|
||||
version = "0.9.15"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "ae211234986c545741a7dc064309f67ee1e5ad243d0e48335adc0484d960bcc7"
|
||||
dependencies = [
|
||||
"autocfg",
|
||||
"cfg-if",
|
||||
"crossbeam-utils",
|
||||
"memoffset",
|
||||
"scopeguard",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "crossbeam-utils"
|
||||
version = "0.8.16"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "5a22b2d63d4d1dc0b7f1b6b2747dd0088008a9be28b6ddf0b1e7d335e3037294"
|
||||
dependencies = [
|
||||
"cfg-if",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "crossterm"
|
||||
version = "0.27.0"
|
||||
|
@ -505,15 +554,6 @@ dependencies = [
|
|||
"slab",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "fuzzy-matcher"
|
||||
version = "0.3.7"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "54614a3312934d066701a80f20f15fa3b56d67ac7722b39eea5b4c9dd1d66c94"
|
||||
dependencies = [
|
||||
"thread_local",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "getrandom"
|
||||
version = "0.2.9"
|
||||
|
@ -1227,7 +1267,9 @@ dependencies = [
|
|||
"imara-diff",
|
||||
"indoc",
|
||||
"log",
|
||||
"nucleo",
|
||||
"once_cell",
|
||||
"parking_lot",
|
||||
"quickcheck",
|
||||
"regex",
|
||||
"ropey",
|
||||
|
@ -1259,6 +1301,14 @@ dependencies = [
|
|||
"which",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "helix-event"
|
||||
version = "0.6.0"
|
||||
dependencies = [
|
||||
"parking_lot",
|
||||
"tokio",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "helix-loader"
|
||||
version = "0.6.0"
|
||||
|
@ -1315,11 +1365,11 @@ dependencies = [
|
|||
"crossterm",
|
||||
"fern",
|
||||
"futures-util",
|
||||
"fuzzy-matcher",
|
||||
"grep-regex",
|
||||
"grep-searcher",
|
||||
"helix-core",
|
||||
"helix-dap",
|
||||
"helix-event",
|
||||
"helix-loader",
|
||||
"helix-lsp",
|
||||
"helix-tui",
|
||||
|
@ -1329,6 +1379,7 @@ dependencies = [
|
|||
"indoc",
|
||||
"libc",
|
||||
"log",
|
||||
"nucleo",
|
||||
"once_cell",
|
||||
"pulldown-cmark",
|
||||
"serde",
|
||||
|
@ -1367,6 +1418,7 @@ dependencies = [
|
|||
"arc-swap",
|
||||
"gix",
|
||||
"helix-core",
|
||||
"helix-event",
|
||||
"imara-diff",
|
||||
"log",
|
||||
"parking_lot",
|
||||
|
@ -1387,6 +1439,7 @@ dependencies = [
|
|||
"futures-util",
|
||||
"helix-core",
|
||||
"helix-dap",
|
||||
"helix-event",
|
||||
"helix-loader",
|
||||
"helix-lsp",
|
||||
"helix-tui",
|
||||
|
@ -1654,6 +1707,15 @@ dependencies = [
|
|||
"libc",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "memoffset"
|
||||
version = "0.9.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "5a634b1c61a95585bd15607c6ab0c4e5b226e695ff2800ba0cdccddf208c406c"
|
||||
dependencies = [
|
||||
"autocfg",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "minimal-lexical"
|
||||
version = "0.2.1"
|
||||
|
@ -1700,6 +1762,28 @@ dependencies = [
|
|||
"minimal-lexical",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "nucleo"
|
||||
version = "0.2.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "0ccab936f2c8ad271bb31430944d98d358f74153566ea323265497f5639b11b6"
|
||||
dependencies = [
|
||||
"nucleo-matcher",
|
||||
"parking_lot",
|
||||
"rayon",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "nucleo-matcher"
|
||||
version = "0.2.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "1b702b402fe286162d1f00b552a046ce74365d2ac473a2607ff36ba650f9bd57"
|
||||
dependencies = [
|
||||
"cov-mark",
|
||||
"memchr",
|
||||
"unicode-segmentation",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "num-traits"
|
||||
version = "0.2.15"
|
||||
|
@ -1846,6 +1930,28 @@ dependencies = [
|
|||
"getrandom",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "rayon"
|
||||
version = "1.7.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "1d2df5196e37bcc87abebc0053e20787d73847bb33134a69841207dd0a47f03b"
|
||||
dependencies = [
|
||||
"either",
|
||||
"rayon-core",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "rayon-core"
|
||||
version = "1.11.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "4b8f95bd6966f5c87776639160a66bd8ab9895d9d4ab01ddba9fc60661aebe8d"
|
||||
dependencies = [
|
||||
"crossbeam-channel",
|
||||
"crossbeam-deque",
|
||||
"crossbeam-utils",
|
||||
"num_cpus",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "redox_syscall"
|
||||
version = "0.2.16"
|
||||
|
|
|
@ -5,6 +5,7 @@ members = [
|
|||
"helix-term",
|
||||
"helix-tui",
|
||||
"helix-lsp",
|
||||
"helix-event",
|
||||
"helix-dap",
|
||||
"helix-loader",
|
||||
"helix-vcs",
|
||||
|
@ -35,3 +36,4 @@ package.helix-term.opt-level = 2
|
|||
|
||||
[workspace.dependencies]
|
||||
tree-sitter = { version = "0.20", git = "https://github.com/tree-sitter/tree-sitter", rev = "ab09ae20d640711174b8da8a654f6b3dec93da1a" }
|
||||
nucleo = "0.2.0"
|
||||
|
|
|
@ -48,6 +48,9 @@ chrono = { version = "0.4", default-features = false, features = ["alloc", "std"
|
|||
etcetera = "0.8"
|
||||
textwrap = "0.16.0"
|
||||
|
||||
nucleo.workspace = true
|
||||
parking_lot = "0.12"
|
||||
|
||||
[dev-dependencies]
|
||||
quickcheck = { version = "1", default-features = false }
|
||||
indoc = "2.0.3"
|
||||
|
|
43
helix-core/src/fuzzy.rs
Normal file
43
helix-core/src/fuzzy.rs
Normal file
|
@ -0,0 +1,43 @@
|
|||
use std::ops::DerefMut;
|
||||
|
||||
use nucleo::pattern::{AtomKind, CaseMatching, Pattern};
|
||||
use nucleo::Config;
|
||||
use parking_lot::Mutex;
|
||||
|
||||
pub struct LazyMutex<T> {
|
||||
inner: Mutex<Option<T>>,
|
||||
init: fn() -> T,
|
||||
}
|
||||
|
||||
impl<T> LazyMutex<T> {
|
||||
pub const fn new(init: fn() -> T) -> Self {
|
||||
Self {
|
||||
inner: Mutex::new(None),
|
||||
init,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn lock(&self) -> impl DerefMut<Target = T> + '_ {
|
||||
parking_lot::MutexGuard::map(self.inner.lock(), |val| val.get_or_insert_with(self.init))
|
||||
}
|
||||
}
|
||||
|
||||
pub static MATCHER: LazyMutex<nucleo::Matcher> = LazyMutex::new(nucleo::Matcher::default);
|
||||
|
||||
/// convenience function to easily fuzzy match
|
||||
/// on a (relatively small list of inputs). This is not recommended for building a full tui
|
||||
/// application that can match large numbers of matches as all matching is done on the current
|
||||
/// thread, effectively blocking the UI
|
||||
pub fn fuzzy_match<T: AsRef<str>>(
|
||||
pattern: &str,
|
||||
items: impl IntoIterator<Item = T>,
|
||||
path: bool,
|
||||
) -> Vec<(T, u32)> {
|
||||
let mut matcher = MATCHER.lock();
|
||||
matcher.config = Config::DEFAULT;
|
||||
if path {
|
||||
matcher.config.set_match_paths();
|
||||
}
|
||||
let pattern = Pattern::new(pattern, CaseMatching::Smart, AtomKind::Fuzzy);
|
||||
pattern.match_list(items, &mut matcher)
|
||||
}
|
|
@ -7,6 +7,7 @@ pub mod config;
|
|||
pub mod diagnostic;
|
||||
pub mod diff;
|
||||
pub mod doc_formatter;
|
||||
pub mod fuzzy;
|
||||
pub mod graphemes;
|
||||
pub mod history;
|
||||
pub mod increment;
|
||||
|
|
15
helix-event/Cargo.toml
Normal file
15
helix-event/Cargo.toml
Normal file
|
@ -0,0 +1,15 @@
|
|||
[package]
|
||||
name = "helix-event"
|
||||
version = "0.6.0"
|
||||
authors = ["Blaž Hrastnik <blaz@mxxn.io>"]
|
||||
edition = "2021"
|
||||
license = "MPL-2.0"
|
||||
categories = ["editor"]
|
||||
repository = "https://github.com/helix-editor/helix"
|
||||
homepage = "https://helix-editor.com"
|
||||
|
||||
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
||||
|
||||
[dependencies]
|
||||
tokio = { version = "1", features = ["rt", "rt-multi-thread", "time", "sync", "parking_lot"] }
|
||||
parking_lot = { version = "0.12", features = ["send_guard"] }
|
8
helix-event/src/lib.rs
Normal file
8
helix-event/src/lib.rs
Normal file
|
@ -0,0 +1,8 @@
|
|||
//! `helix-event` contains systems that allow (often async) communication between
|
||||
//! different editor components without strongly coupling them. Currently this
|
||||
//! crate only contains some smaller facilities but the intend is to add more
|
||||
//! functionality in the future ( like a generic hook system)
|
||||
|
||||
pub use redraw::{lock_frame, redraw_requested, request_redraw, start_frame, RenderLockGuard};
|
||||
|
||||
mod redraw;
|
49
helix-event/src/redraw.rs
Normal file
49
helix-event/src/redraw.rs
Normal file
|
@ -0,0 +1,49 @@
|
|||
//! Signals that control when/if the editor redraws
|
||||
|
||||
use std::future::Future;
|
||||
|
||||
use parking_lot::{RwLock, RwLockReadGuard};
|
||||
use tokio::sync::Notify;
|
||||
|
||||
/// A `Notify` instance that can be used to (asynchronously) request
|
||||
/// the editor the render a new frame.
|
||||
static REDRAW_NOTIFY: Notify = Notify::const_new();
|
||||
|
||||
/// A `RwLock` that prevents the next frame from being
|
||||
/// drawn until an exclusive (write) lock can be acquired.
|
||||
/// This allows asynchsonous tasks to acquire `non-exclusive`
|
||||
/// locks (read) to prevent the next frame from being drawn
|
||||
/// until a certain computation has finished.
|
||||
static RENDER_LOCK: RwLock<()> = RwLock::new(());
|
||||
|
||||
pub type RenderLockGuard = RwLockReadGuard<'static, ()>;
|
||||
|
||||
/// Requests that the editor is redrawn. The redraws are debounced (currently to
|
||||
/// 30FPS) so this can be called many times without causing a ton of frames to
|
||||
/// be rendered.
|
||||
pub fn request_redraw() {
|
||||
REDRAW_NOTIFY.notify_one();
|
||||
}
|
||||
|
||||
/// Returns a future that will yield once a redraw has been asynchronously
|
||||
/// requested using [`request_redraw`].
|
||||
pub fn redraw_requested() -> impl Future<Output = ()> {
|
||||
REDRAW_NOTIFY.notified()
|
||||
}
|
||||
|
||||
/// Wait until all locks acquired with [`lock_frame`] have been released.
|
||||
/// This function is called before rendering and is intended to allow the frame
|
||||
/// to wait for async computations that should be included in the current frame.
|
||||
pub fn start_frame() {
|
||||
drop(RENDER_LOCK.write());
|
||||
// exhaust any leftover redraw notifications
|
||||
let notify = REDRAW_NOTIFY.notified();
|
||||
tokio::pin!(notify);
|
||||
notify.enable();
|
||||
}
|
||||
|
||||
/// Acquires the render lock which will prevent the next frame from being drawn
|
||||
/// until the returned guard is dropped.
|
||||
pub fn lock_frame() -> RenderLockGuard {
|
||||
RENDER_LOCK.read()
|
||||
}
|
|
@ -24,6 +24,7 @@ path = "src/main.rs"
|
|||
|
||||
[dependencies]
|
||||
helix-core = { version = "0.6", path = "../helix-core" }
|
||||
helix-event = { version = "0.6", path = "../helix-event" }
|
||||
helix-view = { version = "0.6", path = "../helix-view" }
|
||||
helix-lsp = { version = "0.6", path = "../helix-lsp" }
|
||||
helix-dap = { version = "0.6", path = "../helix-dap" }
|
||||
|
@ -49,7 +50,7 @@ chrono = { version = "0.4", default-features = false, features = ["clock"] }
|
|||
log = "0.4"
|
||||
|
||||
# File picker
|
||||
fuzzy-matcher = "0.3"
|
||||
nucleo.workspace = true
|
||||
ignore = "0.4"
|
||||
# markdown doc rendering
|
||||
pulldown-cmark = { version = "0.9", default-features = false }
|
||||
|
|
|
@ -257,16 +257,8 @@ impl Application {
|
|||
scroll: None,
|
||||
};
|
||||
|
||||
// Acquire mutable access to the redraw_handle lock
|
||||
// to ensure that there are no tasks running that want to block rendering
|
||||
drop(cx.editor.redraw_handle.1.write().await);
|
||||
helix_event::start_frame();
|
||||
cx.editor.needs_redraw = false;
|
||||
{
|
||||
// exhaust any leftover redraw notifications
|
||||
let notify = cx.editor.redraw_handle.0.notified();
|
||||
tokio::pin!(notify);
|
||||
notify.enable();
|
||||
}
|
||||
|
||||
let area = self
|
||||
.terminal
|
||||
|
@ -590,7 +582,7 @@ impl Application {
|
|||
EditorEvent::LanguageServerMessage((id, call)) => {
|
||||
self.handle_language_server_message(call, id).await;
|
||||
// limit render calls for fast language server messages
|
||||
self.editor.redraw_handle.0.notify_one();
|
||||
helix_event::request_redraw();
|
||||
}
|
||||
EditorEvent::DebuggerEvent(payload) => {
|
||||
let needs_render = self.editor.handle_debugger_message(payload).await;
|
||||
|
|
|
@ -43,7 +43,6 @@ use helix_view::{
|
|||
};
|
||||
|
||||
use anyhow::{anyhow, bail, ensure, Context as _};
|
||||
use fuzzy_matcher::FuzzyMatcher;
|
||||
use insert::*;
|
||||
use movement::Movement;
|
||||
|
||||
|
@ -60,7 +59,7 @@ use crate::{
|
|||
};
|
||||
|
||||
use crate::job::{self, Jobs};
|
||||
use futures_util::{stream::FuturesUnordered, StreamExt, TryStreamExt};
|
||||
use futures_util::{stream::FuturesUnordered, TryStreamExt};
|
||||
use std::{collections::HashMap, fmt, future::Future};
|
||||
use std::{collections::HashSet, num::NonZeroUsize};
|
||||
|
||||
|
@ -75,7 +74,6 @@ use serde::de::{self, Deserialize, Deserializer};
|
|||
use grep_regex::RegexMatcherBuilder;
|
||||
use grep_searcher::{sinks, BinaryDetection, SearcherBuilder};
|
||||
use ignore::{DirEntry, WalkBuilder, WalkState};
|
||||
use tokio_stream::wrappers::UnboundedReceiverStream;
|
||||
|
||||
pub type OnKeyCallback = Box<dyn FnOnce(&mut Context, KeyEvent)>;
|
||||
|
||||
|
@ -1715,8 +1713,8 @@ fn select_regex(cx: &mut Context) {
|
|||
"select:".into(),
|
||||
Some(reg),
|
||||
ui::completers::none,
|
||||
move |editor, regex, event| {
|
||||
let (view, doc) = current!(editor);
|
||||
move |cx, regex, event| {
|
||||
let (view, doc) = current!(cx.editor);
|
||||
if !matches!(event, PromptEvent::Update | PromptEvent::Validate) {
|
||||
return;
|
||||
}
|
||||
|
@ -1737,8 +1735,8 @@ fn split_selection(cx: &mut Context) {
|
|||
"split:".into(),
|
||||
Some(reg),
|
||||
ui::completers::none,
|
||||
move |editor, regex, event| {
|
||||
let (view, doc) = current!(editor);
|
||||
move |cx, regex, event| {
|
||||
let (view, doc) = current!(cx.editor);
|
||||
if !matches!(event, PromptEvent::Update | PromptEvent::Validate) {
|
||||
return;
|
||||
}
|
||||
|
@ -1902,14 +1900,14 @@ fn searcher(cx: &mut Context, direction: Direction) {
|
|||
.map(|comp| (0.., std::borrow::Cow::Owned(comp.clone())))
|
||||
.collect()
|
||||
},
|
||||
move |editor, regex, event| {
|
||||
move |cx, regex, event| {
|
||||
if event == PromptEvent::Validate {
|
||||
editor.registers.last_search_register = reg;
|
||||
cx.editor.registers.last_search_register = reg;
|
||||
} else if event != PromptEvent::Update {
|
||||
return;
|
||||
}
|
||||
search_impl(
|
||||
editor,
|
||||
cx.editor,
|
||||
&contents,
|
||||
®ex,
|
||||
Movement::Move,
|
||||
|
@ -2078,13 +2076,11 @@ fn global_search(cx: &mut Context) {
|
|||
}
|
||||
}
|
||||
|
||||
let (all_matches_sx, all_matches_rx) = tokio::sync::mpsc::unbounded_channel::<FileResult>();
|
||||
let config = cx.editor.config();
|
||||
let smart_case = config.search.smart_case;
|
||||
let file_picker_config = config.file_picker.clone();
|
||||
|
||||
let reg = cx.register.unwrap_or('/');
|
||||
|
||||
let completions = search_completions(cx, Some(reg));
|
||||
ui::regex_prompt(
|
||||
cx,
|
||||
|
@ -2097,166 +2093,173 @@ fn global_search(cx: &mut Context) {
|
|||
.map(|comp| (0.., std::borrow::Cow::Owned(comp.clone())))
|
||||
.collect()
|
||||
},
|
||||
move |editor, regex, event| {
|
||||
move |cx, regex, event| {
|
||||
if event != PromptEvent::Validate {
|
||||
return;
|
||||
}
|
||||
editor.registers.last_search_register = reg;
|
||||
cx.editor.registers.last_search_register = reg;
|
||||
|
||||
let documents: Vec<_> = editor
|
||||
let current_path = doc_mut!(cx.editor).path().cloned();
|
||||
let documents: Vec<_> = cx
|
||||
.editor
|
||||
.documents()
|
||||
.map(|doc| (doc.path(), doc.text()))
|
||||
.map(|doc| (doc.path().cloned(), doc.text().to_owned()))
|
||||
.collect();
|
||||
|
||||
if let Ok(matcher) = RegexMatcherBuilder::new()
|
||||
.case_smart(smart_case)
|
||||
.build(regex.as_str())
|
||||
{
|
||||
let searcher = SearcherBuilder::new()
|
||||
.binary_detection(BinaryDetection::quit(b'\x00'))
|
||||
.build();
|
||||
|
||||
let search_root = helix_loader::current_working_dir();
|
||||
if !search_root.exists() {
|
||||
editor.set_error("Current working directory does not exist");
|
||||
cx.editor
|
||||
.set_error("Current working directory does not exist");
|
||||
return;
|
||||
}
|
||||
|
||||
let (picker, injector) = Picker::stream(current_path);
|
||||
|
||||
let dedup_symlinks = file_picker_config.deduplicate_links;
|
||||
let absolute_root = search_root
|
||||
.canonicalize()
|
||||
.unwrap_or_else(|_| search_root.clone());
|
||||
let injector_ = injector.clone();
|
||||
|
||||
WalkBuilder::new(search_root)
|
||||
.hidden(file_picker_config.hidden)
|
||||
.parents(file_picker_config.parents)
|
||||
.ignore(file_picker_config.ignore)
|
||||
.follow_links(file_picker_config.follow_symlinks)
|
||||
.git_ignore(file_picker_config.git_ignore)
|
||||
.git_global(file_picker_config.git_global)
|
||||
.git_exclude(file_picker_config.git_exclude)
|
||||
.max_depth(file_picker_config.max_depth)
|
||||
.filter_entry(move |entry| {
|
||||
filter_picker_entry(entry, &absolute_root, dedup_symlinks)
|
||||
})
|
||||
.build_parallel()
|
||||
.run(|| {
|
||||
let mut searcher = searcher.clone();
|
||||
let matcher = matcher.clone();
|
||||
let all_matches_sx = all_matches_sx.clone();
|
||||
let documents = &documents;
|
||||
Box::new(move |entry: Result<DirEntry, ignore::Error>| -> WalkState {
|
||||
let entry = match entry {
|
||||
Ok(entry) => entry,
|
||||
Err(_) => return WalkState::Continue,
|
||||
};
|
||||
|
||||
match entry.file_type() {
|
||||
Some(entry) if entry.is_file() => {}
|
||||
// skip everything else
|
||||
_ => return WalkState::Continue,
|
||||
};
|
||||
|
||||
let sink = sinks::UTF8(|line_num, _| {
|
||||
all_matches_sx
|
||||
.send(FileResult::new(entry.path(), line_num as usize - 1))
|
||||
.unwrap();
|
||||
|
||||
Ok(true)
|
||||
});
|
||||
let doc = documents.iter().find(|&(doc_path, _)| {
|
||||
doc_path.map_or(false, |doc_path| doc_path == entry.path())
|
||||
});
|
||||
|
||||
let result = if let Some((_, doc)) = doc {
|
||||
// there is already a buffer for this file
|
||||
// search the buffer instead of the file because it's faster
|
||||
// and captures new edits without requireing a save
|
||||
if searcher.multi_line_with_matcher(&matcher) {
|
||||
// in this case a continous buffer is required
|
||||
// convert the rope to a string
|
||||
let text = doc.to_string();
|
||||
searcher.search_slice(&matcher, text.as_bytes(), sink)
|
||||
} else {
|
||||
searcher.search_reader(
|
||||
&matcher,
|
||||
RopeReader::new(doc.slice(..)),
|
||||
sink,
|
||||
)
|
||||
}
|
||||
} else {
|
||||
searcher.search_path(&matcher, entry.path(), sink)
|
||||
};
|
||||
|
||||
if let Err(err) = result {
|
||||
log::error!(
|
||||
"Global search error: {}, {}",
|
||||
entry.path().display(),
|
||||
err
|
||||
);
|
||||
}
|
||||
WalkState::Continue
|
||||
std::thread::spawn(move || {
|
||||
let searcher = SearcherBuilder::new()
|
||||
.binary_detection(BinaryDetection::quit(b'\x00'))
|
||||
.build();
|
||||
WalkBuilder::new(search_root)
|
||||
.hidden(file_picker_config.hidden)
|
||||
.parents(file_picker_config.parents)
|
||||
.ignore(file_picker_config.ignore)
|
||||
.follow_links(file_picker_config.follow_symlinks)
|
||||
.git_ignore(file_picker_config.git_ignore)
|
||||
.git_global(file_picker_config.git_global)
|
||||
.git_exclude(file_picker_config.git_exclude)
|
||||
.max_depth(file_picker_config.max_depth)
|
||||
.filter_entry(move |entry| {
|
||||
filter_picker_entry(entry, &absolute_root, dedup_symlinks)
|
||||
})
|
||||
});
|
||||
.build_parallel()
|
||||
.run(|| {
|
||||
let mut searcher = searcher.clone();
|
||||
let matcher = matcher.clone();
|
||||
let injector = injector_.clone();
|
||||
let documents = &documents;
|
||||
Box::new(move |entry: Result<DirEntry, ignore::Error>| -> WalkState {
|
||||
let entry = match entry {
|
||||
Ok(entry) => entry,
|
||||
Err(_) => return WalkState::Continue,
|
||||
};
|
||||
|
||||
match entry.file_type() {
|
||||
Some(entry) if entry.is_file() => {}
|
||||
// skip everything else
|
||||
_ => return WalkState::Continue,
|
||||
};
|
||||
|
||||
let mut stop = false;
|
||||
let sink = sinks::UTF8(|line_num, _| {
|
||||
stop = injector
|
||||
.push(FileResult::new(entry.path(), line_num as usize - 1))
|
||||
.is_err();
|
||||
|
||||
Ok(!stop)
|
||||
});
|
||||
let doc = documents.iter().find(|&(doc_path, _)| {
|
||||
doc_path
|
||||
.as_ref()
|
||||
.map_or(false, |doc_path| doc_path == entry.path())
|
||||
});
|
||||
|
||||
let result = if let Some((_, doc)) = doc {
|
||||
// there is already a buffer for this file
|
||||
// search the buffer instead of the file because it's faster
|
||||
// and captures new edits without requiring a save
|
||||
if searcher.multi_line_with_matcher(&matcher) {
|
||||
// in this case a continous buffer is required
|
||||
// convert the rope to a string
|
||||
let text = doc.to_string();
|
||||
searcher.search_slice(&matcher, text.as_bytes(), sink)
|
||||
} else {
|
||||
searcher.search_reader(
|
||||
&matcher,
|
||||
RopeReader::new(doc.slice(..)),
|
||||
sink,
|
||||
)
|
||||
}
|
||||
} else {
|
||||
searcher.search_path(&matcher, entry.path(), sink)
|
||||
};
|
||||
|
||||
if let Err(err) = result {
|
||||
log::error!(
|
||||
"Global search error: {}, {}",
|
||||
entry.path().display(),
|
||||
err
|
||||
);
|
||||
}
|
||||
if stop {
|
||||
WalkState::Quit
|
||||
} else {
|
||||
WalkState::Continue
|
||||
}
|
||||
})
|
||||
});
|
||||
});
|
||||
|
||||
cx.jobs.callback(async move {
|
||||
let call = move |_: &mut Editor, compositor: &mut Compositor| {
|
||||
let picker = Picker::with_stream(
|
||||
picker,
|
||||
injector,
|
||||
move |cx, FileResult { path, line_num }, action| {
|
||||
let doc = match cx.editor.open(path, action) {
|
||||
Ok(id) => doc_mut!(cx.editor, &id),
|
||||
Err(e) => {
|
||||
cx.editor.set_error(format!(
|
||||
"Failed to open file '{}': {}",
|
||||
path.display(),
|
||||
e
|
||||
));
|
||||
return;
|
||||
}
|
||||
};
|
||||
|
||||
let line_num = *line_num;
|
||||
let view = view_mut!(cx.editor);
|
||||
let text = doc.text();
|
||||
if line_num >= text.len_lines() {
|
||||
cx.editor.set_error(
|
||||
"The line you jumped to does not exist anymore because the file has changed.",
|
||||
);
|
||||
return;
|
||||
}
|
||||
let start = text.line_to_char(line_num);
|
||||
let end = text.line_to_char((line_num + 1).min(text.len_lines()));
|
||||
|
||||
doc.set_selection(view.id, Selection::single(start, end));
|
||||
if action.align_view(view, doc.id()) {
|
||||
align_view(doc, view, Align::Center);
|
||||
}
|
||||
},
|
||||
)
|
||||
.with_preview(
|
||||
|_editor, FileResult { path, line_num }| {
|
||||
Some((path.clone().into(), Some((*line_num, *line_num))))
|
||||
},
|
||||
);
|
||||
compositor.push(Box::new(overlaid(picker)))
|
||||
};
|
||||
Ok(Callback::EditorCompositor(Box::new(call)))
|
||||
})
|
||||
} else {
|
||||
// Otherwise do nothing
|
||||
// log::warn!("Global Search Invalid Pattern")
|
||||
}
|
||||
},
|
||||
);
|
||||
|
||||
let current_path = doc_mut!(cx.editor).path().cloned();
|
||||
|
||||
let show_picker = async move {
|
||||
let all_matches: Vec<FileResult> =
|
||||
UnboundedReceiverStream::new(all_matches_rx).collect().await;
|
||||
let call: job::Callback = Callback::EditorCompositor(Box::new(
|
||||
move |editor: &mut Editor, compositor: &mut Compositor| {
|
||||
if all_matches.is_empty() {
|
||||
if !editor.is_err() {
|
||||
editor.set_status("No matches found");
|
||||
}
|
||||
return;
|
||||
}
|
||||
|
||||
let picker = Picker::new(
|
||||
all_matches,
|
||||
current_path,
|
||||
move |cx, FileResult { path, line_num }, action| {
|
||||
let doc = match cx.editor.open(path, action) {
|
||||
Ok(id) => doc_mut!(cx.editor, &id),
|
||||
Err(e) => {
|
||||
cx.editor.set_error(format!(
|
||||
"Failed to open file '{}': {}",
|
||||
path.display(),
|
||||
e
|
||||
));
|
||||
return;
|
||||
}
|
||||
};
|
||||
let line_num = *line_num;
|
||||
let view = view_mut!(cx.editor);
|
||||
let text = doc.text();
|
||||
if line_num >= text.len_lines() {
|
||||
cx.editor.set_error("The line you jumped to does not exist anymore because the file has changed.");
|
||||
return;
|
||||
}
|
||||
let start = text.line_to_char(line_num);
|
||||
let end = text.line_to_char((line_num + 1).min(text.len_lines()));
|
||||
|
||||
doc.set_selection(view.id, Selection::single(start, end));
|
||||
if action.align_view(view, doc.id()){
|
||||
align_view(doc, view, Align::Center);
|
||||
}
|
||||
}).with_preview(|_editor, FileResult { path, line_num }| {
|
||||
Some((path.clone().into(), Some((*line_num, *line_num))))
|
||||
});
|
||||
compositor.push(Box::new(overlaid(picker)));
|
||||
},
|
||||
));
|
||||
Ok(call)
|
||||
};
|
||||
cx.jobs.callback(show_picker);
|
||||
}
|
||||
|
||||
enum Extend {
|
||||
|
@ -4310,8 +4313,8 @@ fn keep_or_remove_selections_impl(cx: &mut Context, remove: bool) {
|
|||
if remove { "remove:" } else { "keep:" }.into(),
|
||||
Some(reg),
|
||||
ui::completers::none,
|
||||
move |editor, regex, event| {
|
||||
let (view, doc) = current!(editor);
|
||||
move |cx, regex, event| {
|
||||
let (view, doc) = current!(cx.editor);
|
||||
if !matches!(event, PromptEvent::Update | PromptEvent::Validate) {
|
||||
return;
|
||||
}
|
||||
|
|
|
@ -5,6 +5,7 @@ use crate::job::Job;
|
|||
|
||||
use super::*;
|
||||
|
||||
use helix_core::fuzzy::fuzzy_match;
|
||||
use helix_core::{encoding, shellwords::Shellwords};
|
||||
use helix_view::document::DEFAULT_LANGUAGE_NAME;
|
||||
use helix_view::editor::{Action, CloseError, ConfigEvent};
|
||||
|
@ -1265,12 +1266,10 @@ fn reload(
|
|||
}
|
||||
|
||||
let scrolloff = cx.editor.config().scrolloff;
|
||||
let redraw_handle = cx.editor.redraw_handle.clone();
|
||||
let (view, doc) = current!(cx.editor);
|
||||
doc.reload(view, &cx.editor.diff_providers, redraw_handle)
|
||||
.map(|_| {
|
||||
view.ensure_cursor_in_view(doc, scrolloff);
|
||||
})?;
|
||||
doc.reload(view, &cx.editor.diff_providers).map(|_| {
|
||||
view.ensure_cursor_in_view(doc, scrolloff);
|
||||
})?;
|
||||
if let Some(path) = doc.path() {
|
||||
cx.editor
|
||||
.language_servers
|
||||
|
@ -1316,8 +1315,7 @@ fn reload_all(
|
|||
// Ensure that the view is synced with the document's history.
|
||||
view.sync_changes(doc);
|
||||
|
||||
let redraw_handle = cx.editor.redraw_handle.clone();
|
||||
doc.reload(view, &cx.editor.diff_providers, redraw_handle)?;
|
||||
doc.reload(view, &cx.editor.diff_providers)?;
|
||||
if let Some(path) = doc.path() {
|
||||
cx.editor
|
||||
.language_servers
|
||||
|
@ -2902,28 +2900,18 @@ pub(super) fn command_mode(cx: &mut Context) {
|
|||
":".into(),
|
||||
Some(':'),
|
||||
|editor: &Editor, input: &str| {
|
||||
static FUZZY_MATCHER: Lazy<fuzzy_matcher::skim::SkimMatcherV2> =
|
||||
Lazy::new(fuzzy_matcher::skim::SkimMatcherV2::default);
|
||||
|
||||
let shellwords = Shellwords::from(input);
|
||||
let words = shellwords.words();
|
||||
|
||||
if words.is_empty() || (words.len() == 1 && !shellwords.ends_with_whitespace()) {
|
||||
// If the command has not been finished yet, complete commands.
|
||||
let mut matches: Vec<_> = typed::TYPABLE_COMMAND_LIST
|
||||
.iter()
|
||||
.filter_map(|command| {
|
||||
FUZZY_MATCHER
|
||||
.fuzzy_match(command.name, input)
|
||||
.map(|score| (command.name, score))
|
||||
})
|
||||
.collect();
|
||||
|
||||
matches.sort_unstable_by_key(|(_file, score)| std::cmp::Reverse(*score));
|
||||
matches
|
||||
.into_iter()
|
||||
.map(|(name, _)| (0.., name.into()))
|
||||
.collect()
|
||||
fuzzy_match(
|
||||
input,
|
||||
TYPABLE_COMMAND_LIST.iter().map(|command| command.name),
|
||||
false,
|
||||
)
|
||||
.into_iter()
|
||||
.map(|(name, _)| (0.., name.into()))
|
||||
.collect()
|
||||
} else {
|
||||
// Otherwise, use the command's completer and the last shellword
|
||||
// as completion input.
|
||||
|
|
|
@ -1,239 +0,0 @@
|
|||
use fuzzy_matcher::skim::SkimMatcherV2 as Matcher;
|
||||
use fuzzy_matcher::FuzzyMatcher;
|
||||
|
||||
#[cfg(test)]
|
||||
mod test;
|
||||
|
||||
struct QueryAtom {
|
||||
kind: QueryAtomKind,
|
||||
atom: String,
|
||||
ignore_case: bool,
|
||||
inverse: bool,
|
||||
}
|
||||
impl QueryAtom {
|
||||
fn new(atom: &str) -> Option<QueryAtom> {
|
||||
let mut atom = atom.to_string();
|
||||
let inverse = atom.starts_with('!');
|
||||
if inverse {
|
||||
atom.remove(0);
|
||||
}
|
||||
|
||||
let mut kind = match atom.chars().next() {
|
||||
Some('^') => QueryAtomKind::Prefix,
|
||||
Some('\'') => QueryAtomKind::Substring,
|
||||
_ if inverse => QueryAtomKind::Substring,
|
||||
_ => QueryAtomKind::Fuzzy,
|
||||
};
|
||||
|
||||
if atom.starts_with(['^', '\'']) {
|
||||
atom.remove(0);
|
||||
}
|
||||
|
||||
if atom.is_empty() {
|
||||
return None;
|
||||
}
|
||||
|
||||
if atom.ends_with('$') && !atom.ends_with("\\$") {
|
||||
atom.pop();
|
||||
kind = if kind == QueryAtomKind::Prefix {
|
||||
QueryAtomKind::Exact
|
||||
} else {
|
||||
QueryAtomKind::Postfix
|
||||
}
|
||||
}
|
||||
|
||||
Some(QueryAtom {
|
||||
kind,
|
||||
atom: atom.replace('\\', ""),
|
||||
// not ideal but fuzzy_matches only knows ascii uppercase so more consistent
|
||||
// to behave the same
|
||||
ignore_case: kind != QueryAtomKind::Fuzzy
|
||||
&& atom.chars().all(|c| c.is_ascii_lowercase()),
|
||||
inverse,
|
||||
})
|
||||
}
|
||||
|
||||
fn indices(&self, matcher: &Matcher, item: &str, indices: &mut Vec<usize>) -> bool {
|
||||
// for inverse there are no indices to return
|
||||
// just return whether we matched
|
||||
if self.inverse {
|
||||
return self.matches(matcher, item);
|
||||
}
|
||||
let buf;
|
||||
let item = if self.ignore_case {
|
||||
buf = item.to_ascii_lowercase();
|
||||
&buf
|
||||
} else {
|
||||
item
|
||||
};
|
||||
let off = match self.kind {
|
||||
QueryAtomKind::Fuzzy => {
|
||||
if let Some((_, fuzzy_indices)) = matcher.fuzzy_indices(item, &self.atom) {
|
||||
indices.extend_from_slice(&fuzzy_indices);
|
||||
return true;
|
||||
} else {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
QueryAtomKind::Substring => {
|
||||
if let Some(off) = item.find(&self.atom) {
|
||||
off
|
||||
} else {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
QueryAtomKind::Prefix if item.starts_with(&self.atom) => 0,
|
||||
QueryAtomKind::Postfix if item.ends_with(&self.atom) => item.len() - self.atom.len(),
|
||||
QueryAtomKind::Exact if item == self.atom => 0,
|
||||
_ => return false,
|
||||
};
|
||||
|
||||
indices.extend(off..(off + self.atom.len()));
|
||||
true
|
||||
}
|
||||
|
||||
fn matches(&self, matcher: &Matcher, item: &str) -> bool {
|
||||
let buf;
|
||||
let item = if self.ignore_case {
|
||||
buf = item.to_ascii_lowercase();
|
||||
&buf
|
||||
} else {
|
||||
item
|
||||
};
|
||||
let mut res = match self.kind {
|
||||
QueryAtomKind::Fuzzy => matcher.fuzzy_match(item, &self.atom).is_some(),
|
||||
QueryAtomKind::Substring => item.contains(&self.atom),
|
||||
QueryAtomKind::Prefix => item.starts_with(&self.atom),
|
||||
QueryAtomKind::Postfix => item.ends_with(&self.atom),
|
||||
QueryAtomKind::Exact => item == self.atom,
|
||||
};
|
||||
if self.inverse {
|
||||
res = !res;
|
||||
}
|
||||
res
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq, Eq, Clone, Copy)]
|
||||
enum QueryAtomKind {
|
||||
/// Item is a fuzzy match of this behaviour
|
||||
///
|
||||
/// Usage: `foo`
|
||||
Fuzzy,
|
||||
/// Item contains query atom as a continuous substring
|
||||
///
|
||||
/// Usage `'foo`
|
||||
Substring,
|
||||
/// Item starts with query atom
|
||||
///
|
||||
/// Usage: `^foo`
|
||||
Prefix,
|
||||
/// Item ends with query atom
|
||||
///
|
||||
/// Usage: `foo$`
|
||||
Postfix,
|
||||
/// Item is equal to query atom
|
||||
///
|
||||
/// Usage `^foo$`
|
||||
Exact,
|
||||
}
|
||||
|
||||
#[derive(Default)]
|
||||
pub struct FuzzyQuery {
|
||||
first_fuzzy_atom: Option<String>,
|
||||
query_atoms: Vec<QueryAtom>,
|
||||
}
|
||||
|
||||
fn query_atoms(query: &str) -> impl Iterator<Item = &str> + '_ {
|
||||
let mut saw_backslash = false;
|
||||
query.split(move |c| {
|
||||
saw_backslash = match c {
|
||||
' ' if !saw_backslash => return true,
|
||||
'\\' => true,
|
||||
_ => false,
|
||||
};
|
||||
false
|
||||
})
|
||||
}
|
||||
|
||||
impl FuzzyQuery {
|
||||
pub fn refine(&self, query: &str, old_query: &str) -> (FuzzyQuery, bool) {
|
||||
// TODO: we could be a lot smarter about this
|
||||
let new_query = Self::new(query);
|
||||
let mut is_refinement = query.starts_with(old_query);
|
||||
|
||||
// if the last atom is an inverse atom adding more text to it
|
||||
// will actually increase the number of matches and we can not refine
|
||||
// the matches.
|
||||
if is_refinement && !self.query_atoms.is_empty() {
|
||||
let last_idx = self.query_atoms.len() - 1;
|
||||
if self.query_atoms[last_idx].inverse
|
||||
&& self.query_atoms[last_idx].atom != new_query.query_atoms[last_idx].atom
|
||||
{
|
||||
is_refinement = false;
|
||||
}
|
||||
}
|
||||
|
||||
(new_query, is_refinement)
|
||||
}
|
||||
|
||||
pub fn new(query: &str) -> FuzzyQuery {
|
||||
let mut first_fuzzy_query = None;
|
||||
let query_atoms = query_atoms(query)
|
||||
.filter_map(|atom| {
|
||||
let atom = QueryAtom::new(atom)?;
|
||||
if atom.kind == QueryAtomKind::Fuzzy && first_fuzzy_query.is_none() {
|
||||
first_fuzzy_query = Some(atom.atom);
|
||||
None
|
||||
} else {
|
||||
Some(atom)
|
||||
}
|
||||
})
|
||||
.collect();
|
||||
FuzzyQuery {
|
||||
first_fuzzy_atom: first_fuzzy_query,
|
||||
query_atoms,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn fuzzy_match(&self, item: &str, matcher: &Matcher) -> Option<i64> {
|
||||
// use the rank of the first fuzzzy query for the rank, because merging ranks is not really possible
|
||||
// this behaviour matches fzf and skim
|
||||
let score = self
|
||||
.first_fuzzy_atom
|
||||
.as_ref()
|
||||
.map_or(Some(0), |atom| matcher.fuzzy_match(item, atom))?;
|
||||
if self
|
||||
.query_atoms
|
||||
.iter()
|
||||
.any(|atom| !atom.matches(matcher, item))
|
||||
{
|
||||
return None;
|
||||
}
|
||||
Some(score)
|
||||
}
|
||||
|
||||
pub fn fuzzy_indices(&self, item: &str, matcher: &Matcher) -> Option<(i64, Vec<usize>)> {
|
||||
let (score, mut indices) = self.first_fuzzy_atom.as_ref().map_or_else(
|
||||
|| Some((0, Vec::new())),
|
||||
|atom| matcher.fuzzy_indices(item, atom),
|
||||
)?;
|
||||
|
||||
// fast path for the common case of just a single atom
|
||||
if self.query_atoms.is_empty() {
|
||||
return Some((score, indices));
|
||||
}
|
||||
|
||||
for atom in &self.query_atoms {
|
||||
if !atom.indices(matcher, item, &mut indices) {
|
||||
return None;
|
||||
}
|
||||
}
|
||||
|
||||
// deadup and remove duplicate matches
|
||||
indices.sort_unstable();
|
||||
indices.dedup();
|
||||
|
||||
Some((score, indices))
|
||||
}
|
||||
}
|
|
@ -1,47 +0,0 @@
|
|||
use crate::ui::fuzzy_match::FuzzyQuery;
|
||||
use crate::ui::fuzzy_match::Matcher;
|
||||
|
||||
fn run_test<'a>(query: &str, items: &'a [&'a str]) -> Vec<String> {
|
||||
let query = FuzzyQuery::new(query);
|
||||
let matcher = Matcher::default();
|
||||
items
|
||||
.iter()
|
||||
.filter_map(|item| {
|
||||
let (_, indices) = query.fuzzy_indices(item, &matcher)?;
|
||||
let matched_string = indices
|
||||
.iter()
|
||||
.map(|&pos| item.chars().nth(pos).unwrap())
|
||||
.collect();
|
||||
Some(matched_string)
|
||||
})
|
||||
.collect()
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn match_single_value() {
|
||||
let matches = run_test("foo", &["foobar", "foo", "bar"]);
|
||||
assert_eq!(matches, &["foo", "foo"])
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn match_multiple_values() {
|
||||
let matches = run_test(
|
||||
"foo bar",
|
||||
&["foo bar", "foo bar", "bar foo", "bar", "foo"],
|
||||
);
|
||||
assert_eq!(matches, &["foobar", "foobar", "barfoo"])
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn space_escape() {
|
||||
let matches = run_test(r"foo\ bar", &["bar foo", "foo bar", "foobar"]);
|
||||
assert_eq!(matches, &["foo bar"])
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn trim() {
|
||||
let matches = run_test(r" foo bar ", &["bar foo", "foo bar", "foobar"]);
|
||||
assert_eq!(matches, &["barfoo", "foobar", "foobar"]);
|
||||
let matches = run_test(r" foo bar\ ", &["bar foo", "foo bar", "foobar"]);
|
||||
assert_eq!(matches, &["bar foo"])
|
||||
}
|
|
@ -1,22 +1,22 @@
|
|||
use std::{borrow::Cow, path::PathBuf};
|
||||
use std::{borrow::Cow, cmp::Reverse, path::PathBuf};
|
||||
|
||||
use crate::{
|
||||
compositor::{Callback, Component, Compositor, Context, Event, EventResult},
|
||||
ctrl, key, shift,
|
||||
};
|
||||
use helix_core::fuzzy::MATCHER;
|
||||
use nucleo::pattern::{AtomKind, CaseMatching, Pattern};
|
||||
use nucleo::{Config, Utf32Str};
|
||||
use tui::{buffer::Buffer as Surface, widgets::Table};
|
||||
|
||||
pub use tui::widgets::{Cell, Row};
|
||||
|
||||
use fuzzy_matcher::skim::SkimMatcherV2 as Matcher;
|
||||
use fuzzy_matcher::FuzzyMatcher;
|
||||
|
||||
use helix_view::{editor::SmartTabConfig, graphics::Rect, Editor};
|
||||
use tui::layout::Constraint;
|
||||
|
||||
pub trait Item {
|
||||
pub trait Item: Sync + Send + 'static {
|
||||
/// Additional editor state that is used for label calculation.
|
||||
type Data;
|
||||
type Data: Sync + Send + 'static;
|
||||
|
||||
fn format(&self, data: &Self::Data) -> Row;
|
||||
|
||||
|
@ -51,9 +51,8 @@ pub struct Menu<T: Item> {
|
|||
|
||||
cursor: Option<usize>,
|
||||
|
||||
matcher: Box<Matcher>,
|
||||
/// (index, score)
|
||||
matches: Vec<(usize, i64)>,
|
||||
matches: Vec<(u32, u32)>,
|
||||
|
||||
widths: Vec<Constraint>,
|
||||
|
||||
|
@ -75,11 +74,10 @@ impl<T: Item> Menu<T> {
|
|||
editor_data: <T as Item>::Data,
|
||||
callback_fn: impl Fn(&mut Editor, Option<&T>, MenuEvent) + 'static,
|
||||
) -> Self {
|
||||
let matches = (0..options.len()).map(|i| (i, 0)).collect();
|
||||
let matches = (0..options.len() as u32).map(|i| (i, 0)).collect();
|
||||
Self {
|
||||
options,
|
||||
editor_data,
|
||||
matcher: Box::new(Matcher::default().ignore_case()),
|
||||
matches,
|
||||
cursor: None,
|
||||
widths: Vec::new(),
|
||||
|
@ -94,20 +92,19 @@ impl<T: Item> Menu<T> {
|
|||
pub fn score(&mut self, pattern: &str) {
|
||||
// reuse the matches allocation
|
||||
self.matches.clear();
|
||||
self.matches.extend(
|
||||
self.options
|
||||
.iter()
|
||||
.enumerate()
|
||||
.filter_map(|(index, option)| {
|
||||
let text = option.filter_text(&self.editor_data);
|
||||
// TODO: using fuzzy_indices could give us the char idx for match highlighting
|
||||
self.matcher
|
||||
.fuzzy_match(&text, pattern)
|
||||
.map(|score| (index, score))
|
||||
}),
|
||||
);
|
||||
// Order of equal elements needs to be preserved as LSP preselected items come in order of high to low priority
|
||||
self.matches.sort_by_key(|(_, score)| -score);
|
||||
let mut matcher = MATCHER.lock();
|
||||
matcher.config = Config::DEFAULT;
|
||||
let pattern = Pattern::new(pattern, CaseMatching::Ignore, AtomKind::Fuzzy);
|
||||
let mut buf = Vec::new();
|
||||
let matches = self.options.iter().enumerate().filter_map(|(i, option)| {
|
||||
let text = option.filter_text(&self.editor_data);
|
||||
pattern
|
||||
.score(Utf32Str::new(&text, &mut buf), &mut matcher)
|
||||
.map(|score| (i as u32, score))
|
||||
});
|
||||
self.matches.extend(matches);
|
||||
self.matches
|
||||
.sort_unstable_by_key(|&(i, score)| (Reverse(score), i));
|
||||
|
||||
// reset cursor position
|
||||
self.cursor = None;
|
||||
|
@ -201,7 +198,7 @@ impl<T: Item> Menu<T> {
|
|||
self.cursor.and_then(|cursor| {
|
||||
self.matches
|
||||
.get(cursor)
|
||||
.map(|(index, _score)| &self.options[*index])
|
||||
.map(|(index, _score)| &self.options[*index as usize])
|
||||
})
|
||||
}
|
||||
|
||||
|
@ -209,7 +206,7 @@ impl<T: Item> Menu<T> {
|
|||
self.cursor.and_then(|cursor| {
|
||||
self.matches
|
||||
.get(cursor)
|
||||
.map(|(index, _score)| &mut self.options[*index])
|
||||
.map(|(index, _score)| &mut self.options[*index as usize])
|
||||
})
|
||||
}
|
||||
|
||||
|
@ -332,7 +329,7 @@ impl<T: Item + 'static> Component for Menu<T> {
|
|||
.iter()
|
||||
.map(|(index, _score)| {
|
||||
// (index, self.options.get(*index).unwrap()) // get_unchecked
|
||||
&self.options[*index] // get_unchecked
|
||||
&self.options[*index as usize] // get_unchecked
|
||||
})
|
||||
.collect();
|
||||
|
||||
|
|
|
@ -1,7 +1,6 @@
|
|||
mod completion;
|
||||
mod document;
|
||||
pub(crate) mod editor;
|
||||
mod fuzzy_match;
|
||||
mod info;
|
||||
pub mod lsp;
|
||||
mod markdown;
|
||||
|
@ -64,7 +63,7 @@ pub fn regex_prompt(
|
|||
prompt: std::borrow::Cow<'static, str>,
|
||||
history_register: Option<char>,
|
||||
completion_fn: impl FnMut(&Editor, &str) -> Vec<prompt::Completion> + 'static,
|
||||
fun: impl Fn(&mut Editor, Regex, PromptEvent) + 'static,
|
||||
fun: impl Fn(&mut crate::compositor::Context, Regex, PromptEvent) + 'static,
|
||||
) {
|
||||
let (view, doc) = current!(cx.editor);
|
||||
let doc_id = view.doc;
|
||||
|
@ -111,7 +110,7 @@ pub fn regex_prompt(
|
|||
view.jumps.push((doc_id, snapshot.clone()));
|
||||
}
|
||||
|
||||
fun(cx.editor, regex, event);
|
||||
fun(cx, regex, event);
|
||||
|
||||
let (view, doc) = current!(cx.editor);
|
||||
view.ensure_cursor_in_view(doc, config.scrolloff);
|
||||
|
@ -174,6 +173,7 @@ pub fn file_picker(root: PathBuf, config: &helix_view::editor::Config) -> Picker
|
|||
.git_ignore(config.file_picker.git_ignore)
|
||||
.git_global(config.file_picker.git_global)
|
||||
.git_exclude(config.file_picker.git_exclude)
|
||||
.sort_by_file_name(|name1, name2| name1.cmp(name2))
|
||||
.max_depth(config.file_picker.max_depth)
|
||||
.filter_entry(move |entry| filter_picker_entry(entry, &absolute_root, dedup_symlinks));
|
||||
|
||||
|
@ -190,32 +190,16 @@ pub fn file_picker(root: PathBuf, config: &helix_view::editor::Config) -> Picker
|
|||
.build()
|
||||
.expect("failed to build excluded_types");
|
||||
walk_builder.types(excluded_types);
|
||||
|
||||
// We want files along with their modification date for sorting
|
||||
let files = walk_builder.build().filter_map(|entry| {
|
||||
let entry = entry.ok()?;
|
||||
// This is faster than entry.path().is_dir() since it uses cached fs::Metadata fetched by ignore/walkdir
|
||||
if entry.file_type()?.is_file() {
|
||||
Some(entry.into_path())
|
||||
} else {
|
||||
None
|
||||
if !entry.file_type()?.is_file() {
|
||||
return None;
|
||||
}
|
||||
Some(entry.into_path())
|
||||
});
|
||||
|
||||
// Cap the number of files if we aren't in a git project, preventing
|
||||
// hangs when using the picker in your home directory
|
||||
let mut files: Vec<PathBuf> = if root.join(".git").exists() {
|
||||
files.collect()
|
||||
} else {
|
||||
// const MAX: usize = 8192;
|
||||
const MAX: usize = 100_000;
|
||||
files.take(MAX).collect()
|
||||
};
|
||||
files.sort();
|
||||
|
||||
log::debug!("file_picker init {:?}", Instant::now().duration_since(now));
|
||||
|
||||
Picker::new(files, root, move |cx, path: &PathBuf, action| {
|
||||
let picker = Picker::new(Vec::new(), root, move |cx, path: &PathBuf, action| {
|
||||
if let Err(e) = cx.editor.open(path, action) {
|
||||
let err = if let Some(err) = e.source() {
|
||||
format!("{}", err)
|
||||
|
@ -225,20 +209,27 @@ pub fn file_picker(root: PathBuf, config: &helix_view::editor::Config) -> Picker
|
|||
cx.editor.set_error(err);
|
||||
}
|
||||
})
|
||||
.with_preview(|_editor, path| Some((path.clone().into(), None)))
|
||||
.with_preview(|_editor, path| Some((path.clone().into(), None)));
|
||||
let injector = picker.injector();
|
||||
std::thread::spawn(move || {
|
||||
for file in files {
|
||||
if injector.push(file).is_err() {
|
||||
break;
|
||||
}
|
||||
}
|
||||
});
|
||||
picker
|
||||
}
|
||||
|
||||
pub mod completers {
|
||||
use crate::ui::prompt::Completion;
|
||||
use fuzzy_matcher::skim::SkimMatcherV2 as Matcher;
|
||||
use fuzzy_matcher::FuzzyMatcher;
|
||||
use helix_core::fuzzy::fuzzy_match;
|
||||
use helix_core::syntax::LanguageServerFeature;
|
||||
use helix_view::document::SCRATCH_BUFFER_NAME;
|
||||
use helix_view::theme;
|
||||
use helix_view::{editor::Config, Editor};
|
||||
use once_cell::sync::Lazy;
|
||||
use std::borrow::Cow;
|
||||
use std::cmp::Reverse;
|
||||
|
||||
pub type Completer = fn(&Editor, &str) -> Vec<Completion>;
|
||||
|
||||
|
@ -247,31 +238,16 @@ pub mod completers {
|
|||
}
|
||||
|
||||
pub fn buffer(editor: &Editor, input: &str) -> Vec<Completion> {
|
||||
let mut names: Vec<_> = editor
|
||||
.documents
|
||||
.values()
|
||||
.map(|doc| {
|
||||
let name = doc
|
||||
.relative_path()
|
||||
.map(|p| p.display().to_string())
|
||||
.unwrap_or_else(|| String::from(SCRATCH_BUFFER_NAME));
|
||||
((0..), Cow::from(name))
|
||||
})
|
||||
.collect();
|
||||
let names = editor.documents.values().map(|doc| {
|
||||
doc.relative_path()
|
||||
.map(|p| p.display().to_string().into())
|
||||
.unwrap_or_else(|| Cow::from(SCRATCH_BUFFER_NAME))
|
||||
});
|
||||
|
||||
let matcher = Matcher::default();
|
||||
|
||||
let mut matches: Vec<_> = names
|
||||
fuzzy_match(input, names, true)
|
||||
.into_iter()
|
||||
.filter_map(|(_range, name)| {
|
||||
matcher.fuzzy_match(&name, input).map(|score| (name, score))
|
||||
})
|
||||
.collect();
|
||||
|
||||
matches.sort_unstable_by_key(|(_file, score)| Reverse(*score));
|
||||
names = matches.into_iter().map(|(name, _)| ((0..), name)).collect();
|
||||
|
||||
names
|
||||
.map(|(name, _)| ((0..), name))
|
||||
.collect()
|
||||
}
|
||||
|
||||
pub fn theme(_editor: &Editor, input: &str) -> Vec<Completion> {
|
||||
|
@ -284,26 +260,10 @@ pub mod completers {
|
|||
names.sort();
|
||||
names.dedup();
|
||||
|
||||
let mut names: Vec<_> = names
|
||||
fuzzy_match(input, names, false)
|
||||
.into_iter()
|
||||
.map(|name| ((0..), Cow::from(name)))
|
||||
.collect();
|
||||
|
||||
let matcher = Matcher::default();
|
||||
|
||||
let mut matches: Vec<_> = names
|
||||
.into_iter()
|
||||
.filter_map(|(_range, name)| {
|
||||
matcher.fuzzy_match(&name, input).map(|score| (name, score))
|
||||
})
|
||||
.collect();
|
||||
|
||||
matches.sort_unstable_by(|(name1, score1), (name2, score2)| {
|
||||
(Reverse(*score1), name1).cmp(&(Reverse(*score2), name2))
|
||||
});
|
||||
names = matches.into_iter().map(|(name, _)| ((0..), name)).collect();
|
||||
|
||||
names
|
||||
.map(|(name, _)| ((0..), name.into()))
|
||||
.collect()
|
||||
}
|
||||
|
||||
/// Recursive function to get all keys from this value and add them to vec
|
||||
|
@ -330,15 +290,7 @@ pub mod completers {
|
|||
keys
|
||||
});
|
||||
|
||||
let matcher = Matcher::default();
|
||||
|
||||
let mut matches: Vec<_> = KEYS
|
||||
.iter()
|
||||
.filter_map(|name| matcher.fuzzy_match(name, input).map(|score| (name, score)))
|
||||
.collect();
|
||||
|
||||
matches.sort_unstable_by_key(|(_file, score)| Reverse(*score));
|
||||
matches
|
||||
fuzzy_match(input, &*KEYS, false)
|
||||
.into_iter()
|
||||
.map(|(name, _)| ((0..), name.into()))
|
||||
.collect()
|
||||
|
@ -365,8 +317,6 @@ pub mod completers {
|
|||
}
|
||||
|
||||
pub fn language(editor: &Editor, input: &str) -> Vec<Completion> {
|
||||
let matcher = Matcher::default();
|
||||
|
||||
let text: String = "text".into();
|
||||
|
||||
let language_ids = editor
|
||||
|
@ -375,27 +325,13 @@ pub mod completers {
|
|||
.map(|config| &config.language_id)
|
||||
.chain(std::iter::once(&text));
|
||||
|
||||
let mut matches: Vec<_> = language_ids
|
||||
.filter_map(|language_id| {
|
||||
matcher
|
||||
.fuzzy_match(language_id, input)
|
||||
.map(|score| (language_id, score))
|
||||
})
|
||||
.collect();
|
||||
|
||||
matches.sort_unstable_by(|(language1, score1), (language2, score2)| {
|
||||
(Reverse(*score1), language1).cmp(&(Reverse(*score2), language2))
|
||||
});
|
||||
|
||||
matches
|
||||
fuzzy_match(input, language_ids, false)
|
||||
.into_iter()
|
||||
.map(|(language, _score)| ((0..), language.clone().into()))
|
||||
.map(|(name, _)| ((0..), name.to_owned().into()))
|
||||
.collect()
|
||||
}
|
||||
|
||||
pub fn lsp_workspace_command(editor: &Editor, input: &str) -> Vec<Completion> {
|
||||
let matcher = Matcher::default();
|
||||
|
||||
let Some(options) = doc!(editor)
|
||||
.language_servers_with_feature(LanguageServerFeature::WorkspaceCommand)
|
||||
.find_map(|ls| ls.capabilities().execute_command_provider.as_ref())
|
||||
|
@ -403,23 +339,9 @@ pub mod completers {
|
|||
return vec![];
|
||||
};
|
||||
|
||||
let mut matches: Vec<_> = options
|
||||
.commands
|
||||
.iter()
|
||||
.filter_map(|command| {
|
||||
matcher
|
||||
.fuzzy_match(command, input)
|
||||
.map(|score| (command, score))
|
||||
})
|
||||
.collect();
|
||||
|
||||
matches.sort_unstable_by(|(command1, score1), (command2, score2)| {
|
||||
(Reverse(*score1), command1).cmp(&(Reverse(*score2), command2))
|
||||
});
|
||||
|
||||
matches
|
||||
fuzzy_match(input, &options.commands, false)
|
||||
.into_iter()
|
||||
.map(|(command, _score)| ((0..), command.clone().into()))
|
||||
.map(|(name, _)| ((0..), name.to_owned().into()))
|
||||
.collect()
|
||||
}
|
||||
|
||||
|
@ -500,7 +422,7 @@ pub mod completers {
|
|||
|
||||
let end = input.len()..;
|
||||
|
||||
let mut files: Vec<_> = WalkBuilder::new(&dir)
|
||||
let files = WalkBuilder::new(&dir)
|
||||
.hidden(false)
|
||||
.follow_links(false) // We're scanning over depth 1
|
||||
.git_ignore(git_ignore)
|
||||
|
@ -532,43 +454,25 @@ pub mod completers {
|
|||
path.push("");
|
||||
}
|
||||
|
||||
let path = path.to_str()?.to_owned();
|
||||
Some((end.clone(), Cow::from(path)))
|
||||
let path = path.into_os_string().into_string().ok()?;
|
||||
Some(Cow::from(path))
|
||||
})
|
||||
}) // TODO: unwrap or skip
|
||||
.filter(|(_, path)| !path.is_empty()) // TODO
|
||||
.collect();
|
||||
.filter(|path| !path.is_empty());
|
||||
|
||||
// if empty, return a list of dirs and files in current dir
|
||||
if let Some(file_name) = file_name {
|
||||
let matcher = Matcher::default();
|
||||
|
||||
// inefficient, but we need to calculate the scores, filter out None, then sort.
|
||||
let mut matches: Vec<_> = files
|
||||
.into_iter()
|
||||
.filter_map(|(_range, file)| {
|
||||
matcher
|
||||
.fuzzy_match(&file, &file_name)
|
||||
.map(|score| (file, score))
|
||||
})
|
||||
.collect();
|
||||
|
||||
let range = (input.len().saturating_sub(file_name.len()))..;
|
||||
|
||||
matches.sort_unstable_by(|(file1, score1), (file2, score2)| {
|
||||
(Reverse(*score1), file1).cmp(&(Reverse(*score2), file2))
|
||||
});
|
||||
|
||||
files = matches
|
||||
fuzzy_match(&file_name, files, true)
|
||||
.into_iter()
|
||||
.map(|(file, _)| (range.clone(), file))
|
||||
.collect();
|
||||
.map(|(name, _)| (range.clone(), name))
|
||||
.collect()
|
||||
|
||||
// TODO: complete to longest common match
|
||||
} else {
|
||||
let mut files: Vec<_> = files.map(|file| (end.clone(), file)).collect();
|
||||
files.sort_unstable_by(|(_, path1), (_, path2)| path1.cmp(path2));
|
||||
files
|
||||
}
|
||||
|
||||
files
|
||||
}
|
||||
}
|
||||
|
|
|
@ -7,11 +7,12 @@ use crate::{
|
|||
ui::{
|
||||
self,
|
||||
document::{render_document, LineDecoration, LinePos, TextRenderer},
|
||||
fuzzy_match::FuzzyQuery,
|
||||
EditorView,
|
||||
},
|
||||
};
|
||||
use futures_util::{future::BoxFuture, FutureExt};
|
||||
use nucleo::pattern::CaseMatching;
|
||||
use nucleo::{Config, Nucleo, Utf32String};
|
||||
use tui::{
|
||||
buffer::Buffer as Surface,
|
||||
layout::Constraint,
|
||||
|
@ -19,16 +20,23 @@ use tui::{
|
|||
widgets::{Block, BorderType, Borders, Cell, Table},
|
||||
};
|
||||
|
||||
use fuzzy_matcher::skim::SkimMatcherV2 as Matcher;
|
||||
use tui::widgets::Widget;
|
||||
|
||||
use std::cmp::{self, Ordering};
|
||||
use std::{collections::HashMap, io::Read, path::PathBuf};
|
||||
use std::{
|
||||
collections::HashMap,
|
||||
io::Read,
|
||||
path::PathBuf,
|
||||
sync::{
|
||||
atomic::{self, AtomicBool},
|
||||
Arc,
|
||||
},
|
||||
};
|
||||
|
||||
use crate::ui::{Prompt, PromptEvent};
|
||||
use helix_core::{
|
||||
char_idx_at_visual_offset, movement::Direction, text_annotations::TextAnnotations,
|
||||
unicode::segmentation::UnicodeSegmentation, Position, Syntax,
|
||||
char_idx_at_visual_offset, fuzzy::MATCHER, movement::Direction,
|
||||
text_annotations::TextAnnotations, unicode::segmentation::UnicodeSegmentation, Position,
|
||||
Syntax,
|
||||
};
|
||||
use helix_view::{
|
||||
editor::Action,
|
||||
|
@ -114,20 +122,71 @@ impl Preview<'_, '_> {
|
|||
}
|
||||
}
|
||||
|
||||
fn item_to_nucleo<T: Item>(item: T, editor_data: &T::Data) -> Option<(T, Utf32String)> {
|
||||
let row = item.format(editor_data);
|
||||
let mut cells = row.cells.iter();
|
||||
let mut text = String::with_capacity(row.cell_text().map(|cell| cell.len()).sum());
|
||||
let cell = cells.next()?;
|
||||
if let Some(cell) = cell.content.lines.first() {
|
||||
for span in &cell.0 {
|
||||
text.push_str(&span.content);
|
||||
}
|
||||
}
|
||||
|
||||
for cell in cells {
|
||||
text.push(' ');
|
||||
if let Some(cell) = cell.content.lines.first() {
|
||||
for span in &cell.0 {
|
||||
text.push_str(&span.content);
|
||||
}
|
||||
}
|
||||
}
|
||||
Some((item, text.into()))
|
||||
}
|
||||
|
||||
pub struct Injector<T: Item> {
|
||||
dst: nucleo::Injector<T>,
|
||||
editor_data: Arc<T::Data>,
|
||||
shutown: Arc<AtomicBool>,
|
||||
}
|
||||
|
||||
impl<T: Item> Clone for Injector<T> {
|
||||
fn clone(&self) -> Self {
|
||||
Injector {
|
||||
dst: self.dst.clone(),
|
||||
editor_data: self.editor_data.clone(),
|
||||
shutown: Arc::new(AtomicBool::new(false)),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub struct InjectorShutdown;
|
||||
|
||||
impl<T: Item> Injector<T> {
|
||||
pub fn push(&self, item: T) -> Result<(), InjectorShutdown> {
|
||||
if self.shutown.load(atomic::Ordering::Relaxed) {
|
||||
return Err(InjectorShutdown);
|
||||
}
|
||||
|
||||
if let Some((item, matcher_text)) = item_to_nucleo(item, &self.editor_data) {
|
||||
self.dst.push(item, |dst| dst[0] = matcher_text);
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
pub struct Picker<T: Item> {
|
||||
options: Vec<T>,
|
||||
editor_data: T::Data,
|
||||
// filter: String,
|
||||
matcher: Box<Matcher>,
|
||||
matches: Vec<PickerMatch>,
|
||||
editor_data: Arc<T::Data>,
|
||||
shutdown: Arc<AtomicBool>,
|
||||
matcher: Nucleo<T>,
|
||||
|
||||
/// Current height of the completions box
|
||||
completion_height: u16,
|
||||
|
||||
cursor: usize,
|
||||
// pattern: String,
|
||||
cursor: u32,
|
||||
prompt: Prompt,
|
||||
previous_pattern: (String, FuzzyQuery),
|
||||
previous_pattern: String,
|
||||
|
||||
/// Whether to show the preview panel (default true)
|
||||
show_preview: bool,
|
||||
/// Constraints for tabular formatting
|
||||
|
@ -144,10 +203,59 @@ pub struct Picker<T: Item> {
|
|||
}
|
||||
|
||||
impl<T: Item + 'static> Picker<T> {
|
||||
pub fn stream(editor_data: T::Data) -> (Nucleo<T>, Injector<T>) {
|
||||
let matcher = Nucleo::new(
|
||||
Config::DEFAULT,
|
||||
Arc::new(helix_event::request_redraw),
|
||||
None,
|
||||
1,
|
||||
);
|
||||
let streamer = Injector {
|
||||
dst: matcher.injector(),
|
||||
editor_data: Arc::new(editor_data),
|
||||
shutown: Arc::new(AtomicBool::new(false)),
|
||||
};
|
||||
(matcher, streamer)
|
||||
}
|
||||
|
||||
pub fn new(
|
||||
options: Vec<T>,
|
||||
editor_data: T::Data,
|
||||
callback_fn: impl Fn(&mut Context, &T, Action) + 'static,
|
||||
) -> Self {
|
||||
let matcher = Nucleo::new(
|
||||
Config::DEFAULT,
|
||||
Arc::new(helix_event::request_redraw),
|
||||
None,
|
||||
1,
|
||||
);
|
||||
let injector = matcher.injector();
|
||||
for item in options {
|
||||
if let Some((item, matcher_text)) = item_to_nucleo(item, &editor_data) {
|
||||
injector.push(item, |dst| dst[0] = matcher_text);
|
||||
}
|
||||
}
|
||||
Self::with(
|
||||
matcher,
|
||||
Arc::new(editor_data),
|
||||
Arc::new(AtomicBool::new(false)),
|
||||
callback_fn,
|
||||
)
|
||||
}
|
||||
|
||||
pub fn with_stream(
|
||||
matcher: Nucleo<T>,
|
||||
injector: Injector<T>,
|
||||
callback_fn: impl Fn(&mut Context, &T, Action) + 'static,
|
||||
) -> Self {
|
||||
Self::with(matcher, injector.editor_data, injector.shutown, callback_fn)
|
||||
}
|
||||
|
||||
fn with(
|
||||
matcher: Nucleo<T>,
|
||||
editor_data: Arc<T::Data>,
|
||||
shutdown: Arc<AtomicBool>,
|
||||
callback_fn: impl Fn(&mut Context, &T, Action) + 'static,
|
||||
) -> Self {
|
||||
let prompt = Prompt::new(
|
||||
"".into(),
|
||||
|
@ -156,14 +264,13 @@ impl<T: Item + 'static> Picker<T> {
|
|||
|_editor: &mut Context, _pattern: &str, _event: PromptEvent| {},
|
||||
);
|
||||
|
||||
let mut picker = Self {
|
||||
options,
|
||||
Self {
|
||||
matcher,
|
||||
editor_data,
|
||||
matcher: Box::default(),
|
||||
matches: Vec::new(),
|
||||
shutdown,
|
||||
cursor: 0,
|
||||
prompt,
|
||||
previous_pattern: (String::new(), FuzzyQuery::default()),
|
||||
previous_pattern: String::new(),
|
||||
truncate_start: true,
|
||||
show_preview: true,
|
||||
callback_fn: Box::new(callback_fn),
|
||||
|
@ -172,24 +279,15 @@ impl<T: Item + 'static> Picker<T> {
|
|||
preview_cache: HashMap::new(),
|
||||
read_buffer: Vec::with_capacity(1024),
|
||||
file_fn: None,
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
picker.calculate_column_widths();
|
||||
|
||||
// scoring on empty input
|
||||
// TODO: just reuse score()
|
||||
picker
|
||||
.matches
|
||||
.extend(picker.options.iter().enumerate().map(|(index, option)| {
|
||||
let text = option.filter_text(&picker.editor_data);
|
||||
PickerMatch {
|
||||
index,
|
||||
score: 0,
|
||||
len: text.chars().count(),
|
||||
}
|
||||
}));
|
||||
|
||||
picker
|
||||
pub fn injector(&self) -> Injector<T> {
|
||||
Injector {
|
||||
dst: self.matcher.injector(),
|
||||
editor_data: self.editor_data.clone(),
|
||||
shutown: self.shutdown.clone(),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn truncate_start(mut self, truncate_start: bool) -> Self {
|
||||
|
@ -202,122 +300,25 @@ impl<T: Item + 'static> Picker<T> {
|
|||
preview_fn: impl Fn(&Editor, &T) -> Option<FileLocation> + 'static,
|
||||
) -> Self {
|
||||
self.file_fn = Some(Box::new(preview_fn));
|
||||
// assumption: if we have a preview we are matching paths... If this is ever
|
||||
// not true this could be a separate builder function
|
||||
self.matcher.update_config(Config::DEFAULT.match_paths());
|
||||
self
|
||||
}
|
||||
|
||||
pub fn set_options(&mut self, new_options: Vec<T>) {
|
||||
self.options = new_options;
|
||||
self.cursor = 0;
|
||||
self.force_score();
|
||||
self.calculate_column_widths();
|
||||
}
|
||||
|
||||
/// Calculate the width constraints using the maximum widths of each column
|
||||
/// for the current options.
|
||||
fn calculate_column_widths(&mut self) {
|
||||
let n = self
|
||||
.options
|
||||
.first()
|
||||
.map(|option| option.format(&self.editor_data).cells.len())
|
||||
.unwrap_or_default();
|
||||
let max_lens = self.options.iter().fold(vec![0; n], |mut acc, option| {
|
||||
let row = option.format(&self.editor_data);
|
||||
// maintain max for each column
|
||||
for (acc, cell) in acc.iter_mut().zip(row.cells.iter()) {
|
||||
let width = cell.content.width();
|
||||
if width > *acc {
|
||||
*acc = width;
|
||||
}
|
||||
self.matcher.restart(false);
|
||||
let injector = self.matcher.injector();
|
||||
for item in new_options {
|
||||
if let Some((item, matcher_text)) = item_to_nucleo(item, &self.editor_data) {
|
||||
injector.push(item, |dst| dst[0] = matcher_text);
|
||||
}
|
||||
acc
|
||||
});
|
||||
self.widths = max_lens
|
||||
.into_iter()
|
||||
.map(|len| Constraint::Length(len as u16))
|
||||
.collect();
|
||||
}
|
||||
|
||||
pub fn score(&mut self) {
|
||||
let pattern = self.prompt.line();
|
||||
|
||||
if pattern == &self.previous_pattern.0 {
|
||||
return;
|
||||
}
|
||||
|
||||
let (query, is_refined) = self
|
||||
.previous_pattern
|
||||
.1
|
||||
.refine(pattern, &self.previous_pattern.0);
|
||||
|
||||
if pattern.is_empty() {
|
||||
// Fast path for no pattern.
|
||||
self.matches.clear();
|
||||
self.matches
|
||||
.extend(self.options.iter().enumerate().map(|(index, option)| {
|
||||
let text = option.filter_text(&self.editor_data);
|
||||
PickerMatch {
|
||||
index,
|
||||
score: 0,
|
||||
len: text.chars().count(),
|
||||
}
|
||||
}));
|
||||
} else if is_refined {
|
||||
// optimization: if the pattern is a more specific version of the previous one
|
||||
// then we can score the filtered set.
|
||||
self.matches.retain_mut(|pmatch| {
|
||||
let option = &self.options[pmatch.index];
|
||||
let text = option.sort_text(&self.editor_data);
|
||||
|
||||
match query.fuzzy_match(&text, &self.matcher) {
|
||||
Some(s) => {
|
||||
// Update the score
|
||||
pmatch.score = s;
|
||||
true
|
||||
}
|
||||
None => false,
|
||||
}
|
||||
});
|
||||
|
||||
self.matches.sort_unstable();
|
||||
} else {
|
||||
self.force_score();
|
||||
}
|
||||
|
||||
// reset cursor position
|
||||
self.cursor = 0;
|
||||
let pattern = self.prompt.line();
|
||||
self.previous_pattern.0.clone_from(pattern);
|
||||
self.previous_pattern.1 = query;
|
||||
}
|
||||
|
||||
pub fn force_score(&mut self) {
|
||||
let pattern = self.prompt.line();
|
||||
|
||||
let query = FuzzyQuery::new(pattern);
|
||||
self.matches.clear();
|
||||
self.matches.extend(
|
||||
self.options
|
||||
.iter()
|
||||
.enumerate()
|
||||
.filter_map(|(index, option)| {
|
||||
let text = option.filter_text(&self.editor_data);
|
||||
|
||||
query
|
||||
.fuzzy_match(&text, &self.matcher)
|
||||
.map(|score| PickerMatch {
|
||||
index,
|
||||
score,
|
||||
len: text.chars().count(),
|
||||
})
|
||||
}),
|
||||
);
|
||||
|
||||
self.matches.sort_unstable();
|
||||
}
|
||||
|
||||
/// Move the cursor by a number of lines, either down (`Forward`) or up (`Backward`)
|
||||
pub fn move_by(&mut self, amount: usize, direction: Direction) {
|
||||
let len = self.matches.len();
|
||||
pub fn move_by(&mut self, amount: u32, direction: Direction) {
|
||||
let len = self.matcher.snapshot().matched_item_count();
|
||||
|
||||
if len == 0 {
|
||||
// No results, can't move.
|
||||
|
@ -336,12 +337,12 @@ impl<T: Item + 'static> Picker<T> {
|
|||
|
||||
/// Move the cursor down by exactly one page. After the last page comes the first page.
|
||||
pub fn page_up(&mut self) {
|
||||
self.move_by(self.completion_height as usize, Direction::Backward);
|
||||
self.move_by(self.completion_height as u32, Direction::Backward);
|
||||
}
|
||||
|
||||
/// Move the cursor up by exactly one page. After the first page comes the last page.
|
||||
pub fn page_down(&mut self) {
|
||||
self.move_by(self.completion_height as usize, Direction::Forward);
|
||||
self.move_by(self.completion_height as u32, Direction::Forward);
|
||||
}
|
||||
|
||||
/// Move the cursor to the first entry
|
||||
|
@ -351,13 +352,18 @@ impl<T: Item + 'static> Picker<T> {
|
|||
|
||||
/// Move the cursor to the last entry
|
||||
pub fn to_end(&mut self) {
|
||||
self.cursor = self.matches.len().saturating_sub(1);
|
||||
self.cursor = self
|
||||
.matcher
|
||||
.snapshot()
|
||||
.matched_item_count()
|
||||
.saturating_sub(1);
|
||||
}
|
||||
|
||||
pub fn selection(&self) -> Option<&T> {
|
||||
self.matches
|
||||
.get(self.cursor)
|
||||
.map(|pmatch| &self.options[pmatch.index])
|
||||
self.matcher
|
||||
.snapshot()
|
||||
.get_matched_item(self.cursor)
|
||||
.map(|item| item.data)
|
||||
}
|
||||
|
||||
pub fn toggle_preview(&mut self) {
|
||||
|
@ -366,8 +372,17 @@ impl<T: Item + 'static> Picker<T> {
|
|||
|
||||
fn prompt_handle_event(&mut self, event: &Event, cx: &mut Context) -> EventResult {
|
||||
if let EventResult::Consumed(_) = self.prompt.handle_event(event, cx) {
|
||||
// TODO: recalculate only if pattern changed
|
||||
self.score();
|
||||
let pattern = self.prompt.line();
|
||||
// TODO: better track how the pattern has changed
|
||||
if pattern != &self.previous_pattern {
|
||||
self.matcher.pattern.reparse(
|
||||
0,
|
||||
pattern,
|
||||
CaseMatching::Smart,
|
||||
pattern.starts_with(&self.previous_pattern),
|
||||
);
|
||||
self.previous_pattern = pattern.clone();
|
||||
}
|
||||
}
|
||||
EventResult::Consumed(None)
|
||||
}
|
||||
|
@ -411,12 +426,9 @@ impl<T: Item + 'static> Picker<T> {
|
|||
(size, _) if size > MAX_FILE_SIZE_FOR_PREVIEW => {
|
||||
CachedPreview::LargeFile
|
||||
}
|
||||
_ => {
|
||||
// TODO: enable syntax highlighting; blocked by async rendering
|
||||
Document::open(path, None, None, editor.config.clone())
|
||||
.map(|doc| CachedPreview::Document(Box::new(doc)))
|
||||
.unwrap_or(CachedPreview::NotFound)
|
||||
}
|
||||
_ => Document::open(path, None, None, editor.config.clone())
|
||||
.map(|doc| CachedPreview::Document(Box::new(doc)))
|
||||
.unwrap_or(CachedPreview::NotFound),
|
||||
},
|
||||
)
|
||||
.unwrap_or(CachedPreview::NotFound);
|
||||
|
@ -495,6 +507,14 @@ impl<T: Item + 'static> Picker<T> {
|
|||
}
|
||||
|
||||
fn render_picker(&mut self, area: Rect, surface: &mut Surface, cx: &mut Context) {
|
||||
let status = self.matcher.tick(10);
|
||||
let snapshot = self.matcher.snapshot();
|
||||
if status.changed {
|
||||
self.cursor = self
|
||||
.cursor
|
||||
.min(snapshot.matched_item_count().saturating_sub(1))
|
||||
}
|
||||
|
||||
let text_style = cx.editor.theme.get("ui.text");
|
||||
let selected = cx.editor.theme.get("ui.text.focus");
|
||||
let highlight_style = cx.editor.theme.get("special").add_modifier(Modifier::BOLD);
|
||||
|
@ -515,8 +535,15 @@ impl<T: Item + 'static> Picker<T> {
|
|||
// -- Render the input bar:
|
||||
|
||||
let area = inner.clip_left(1).with_height(1);
|
||||
// render the prompt first since it will clear its background
|
||||
self.prompt.render(area, surface, cx);
|
||||
|
||||
let count = format!("{}/{}", self.matches.len(), self.options.len());
|
||||
let count = format!(
|
||||
"{}{}/{}",
|
||||
if status.running { "(running) " } else { "" },
|
||||
snapshot.matched_item_count(),
|
||||
snapshot.item_count(),
|
||||
);
|
||||
surface.set_stringn(
|
||||
(area.x + area.width).saturating_sub(count.len() as u16 + 1),
|
||||
area.y,
|
||||
|
@ -525,8 +552,6 @@ impl<T: Item + 'static> Picker<T> {
|
|||
text_style,
|
||||
);
|
||||
|
||||
self.prompt.render(area, surface, cx);
|
||||
|
||||
// -- Separator
|
||||
let sep_style = cx.editor.theme.get("ui.background.separator");
|
||||
let borders = BorderType::line_symbols(BorderType::Plain);
|
||||
|
@ -539,106 +564,89 @@ impl<T: Item + 'static> Picker<T> {
|
|||
// -- Render the contents:
|
||||
// subtract area of prompt from top
|
||||
let inner = inner.clip_top(2);
|
||||
|
||||
let rows = inner.height;
|
||||
let offset = self.cursor - (self.cursor % std::cmp::max(1, rows as usize));
|
||||
let rows = inner.height as u32;
|
||||
let offset = self.cursor - (self.cursor % std::cmp::max(1, rows));
|
||||
let cursor = self.cursor.saturating_sub(offset);
|
||||
let end = offset
|
||||
.saturating_add(rows)
|
||||
.min(snapshot.matched_item_count());
|
||||
let mut indices = Vec::new();
|
||||
let mut matcher = MATCHER.lock();
|
||||
matcher.config = Config::DEFAULT;
|
||||
if self.file_fn.is_some() {
|
||||
matcher.config.set_match_paths()
|
||||
}
|
||||
|
||||
let options = self
|
||||
.matches
|
||||
.iter()
|
||||
.skip(offset)
|
||||
.take(rows as usize)
|
||||
.map(|pmatch| &self.options[pmatch.index])
|
||||
.map(|option| option.format(&self.editor_data))
|
||||
.map(|mut row| {
|
||||
const TEMP_CELL_SEP: &str = " ";
|
||||
let options = snapshot.matched_items(offset..end).map(|item| {
|
||||
snapshot.pattern().column_pattern(0).indices(
|
||||
item.matcher_columns[0].slice(..),
|
||||
&mut matcher,
|
||||
&mut indices,
|
||||
);
|
||||
indices.sort_unstable();
|
||||
indices.dedup();
|
||||
let mut row = item.data.format(&self.editor_data);
|
||||
|
||||
let line = row.cell_text().fold(String::new(), |mut s, frag| {
|
||||
s.push_str(&frag);
|
||||
s.push_str(TEMP_CELL_SEP);
|
||||
s
|
||||
});
|
||||
let mut grapheme_idx = 0u32;
|
||||
let mut indices = indices.drain(..);
|
||||
let mut next_highlight_idx = indices.next().unwrap_or(u32::MAX);
|
||||
if self.widths.len() < row.cells.len() {
|
||||
self.widths.resize(row.cells.len(), Constraint::Length(0));
|
||||
}
|
||||
let mut widths = self.widths.iter_mut();
|
||||
for cell in &mut row.cells {
|
||||
let Some(Constraint::Length(max_width)) = widths.next() else {
|
||||
unreachable!();
|
||||
};
|
||||
|
||||
// Items are filtered by using the text returned by menu::Item::filter_text
|
||||
// but we do highlighting here using the text in Row and therefore there
|
||||
// might be inconsistencies. This is the best we can do since only the
|
||||
// text in Row is displayed to the end user.
|
||||
let (_score, highlights) = FuzzyQuery::new(self.prompt.line())
|
||||
.fuzzy_indices(&line, &self.matcher)
|
||||
.unwrap_or_default();
|
||||
// merge index highlights on top of existing hightlights
|
||||
let mut span_list = Vec::new();
|
||||
let mut current_span = String::new();
|
||||
let mut current_style = Style::default();
|
||||
let mut width = 0;
|
||||
|
||||
let highlight_byte_ranges: Vec<_> = line
|
||||
.char_indices()
|
||||
.enumerate()
|
||||
.filter_map(|(char_idx, (byte_offset, ch))| {
|
||||
highlights
|
||||
.contains(&char_idx)
|
||||
.then(|| byte_offset..byte_offset + ch.len_utf8())
|
||||
})
|
||||
.collect();
|
||||
|
||||
// The starting byte index of the current (iterating) cell
|
||||
let mut cell_start_byte_offset = 0;
|
||||
for cell in row.cells.iter_mut() {
|
||||
let spans = match cell.content.lines.get(0) {
|
||||
Some(s) => s,
|
||||
None => {
|
||||
cell_start_byte_offset += TEMP_CELL_SEP.len();
|
||||
continue;
|
||||
}
|
||||
};
|
||||
|
||||
let mut cell_len = 0;
|
||||
|
||||
let graphemes_with_style: Vec<_> = spans
|
||||
.0
|
||||
.iter()
|
||||
.flat_map(|span| {
|
||||
span.content
|
||||
.grapheme_indices(true)
|
||||
.zip(std::iter::repeat(span.style))
|
||||
})
|
||||
.map(|((grapheme_byte_offset, grapheme), style)| {
|
||||
cell_len += grapheme.len();
|
||||
let start = cell_start_byte_offset;
|
||||
|
||||
let grapheme_byte_range =
|
||||
grapheme_byte_offset..grapheme_byte_offset + grapheme.len();
|
||||
|
||||
if highlight_byte_ranges.iter().any(|hl_rng| {
|
||||
hl_rng.start >= start + grapheme_byte_range.start
|
||||
&& hl_rng.end <= start + grapheme_byte_range.end
|
||||
}) {
|
||||
(grapheme, style.patch(highlight_style))
|
||||
} else {
|
||||
(grapheme, style)
|
||||
}
|
||||
})
|
||||
.collect();
|
||||
|
||||
let mut span_list: Vec<(String, Style)> = Vec::new();
|
||||
for (grapheme, style) in graphemes_with_style {
|
||||
if span_list.last().map(|(_, sty)| sty) == Some(&style) {
|
||||
let (string, _) = span_list.last_mut().unwrap();
|
||||
string.push_str(grapheme);
|
||||
let spans: &[Span] = cell.content.lines.first().map_or(&[], |it| it.0.as_slice());
|
||||
for span in spans {
|
||||
// this looks like a bug on first glance, we are iterating
|
||||
// graphemes but treating them as char indices. The reason that
|
||||
// this is correct is that nucleo will only ever consider the first char
|
||||
// of a grapheme (and discard the rest of the grapheme) so the indices
|
||||
// returned by nucleo are essentially grapheme indecies
|
||||
for grapheme in span.content.graphemes(true) {
|
||||
let style = if grapheme_idx == next_highlight_idx {
|
||||
next_highlight_idx = indices.next().unwrap_or(u32::MAX);
|
||||
span.style.patch(highlight_style)
|
||||
} else {
|
||||
span_list.push((String::from(grapheme), style))
|
||||
span.style
|
||||
};
|
||||
if style != current_style {
|
||||
if !current_span.is_empty() {
|
||||
span_list.push(Span::styled(current_span, current_style))
|
||||
}
|
||||
current_span = String::new();
|
||||
current_style = style;
|
||||
}
|
||||
current_span.push_str(grapheme);
|
||||
grapheme_idx += 1;
|
||||
}
|
||||
|
||||
let spans: Vec<Span> = span_list
|
||||
.into_iter()
|
||||
.map(|(string, style)| Span::styled(string, style))
|
||||
.collect();
|
||||
let spans: Spans = spans.into();
|
||||
*cell = Cell::from(spans);
|
||||
|
||||
cell_start_byte_offset += cell_len + TEMP_CELL_SEP.len();
|
||||
width += span.width();
|
||||
}
|
||||
|
||||
row
|
||||
});
|
||||
span_list.push(Span::styled(current_span, current_style));
|
||||
if width as u16 > *max_width {
|
||||
*max_width = width as u16;
|
||||
}
|
||||
*cell = Cell::from(Spans::from(span_list));
|
||||
|
||||
// spacer
|
||||
if grapheme_idx == next_highlight_idx {
|
||||
next_highlight_idx = indices.next().unwrap_or(u32::MAX);
|
||||
}
|
||||
grapheme_idx += 1;
|
||||
}
|
||||
|
||||
row
|
||||
});
|
||||
|
||||
let table = Table::new(options)
|
||||
.style(text_style)
|
||||
|
@ -654,7 +662,7 @@ impl<T: Item + 'static> Picker<T> {
|
|||
surface,
|
||||
&mut TableState {
|
||||
offset: 0,
|
||||
selected: Some(cursor),
|
||||
selected: Some(cursor as usize),
|
||||
},
|
||||
self.truncate_start,
|
||||
);
|
||||
|
@ -755,7 +763,7 @@ impl<T: Item + 'static> Picker<T> {
|
|||
}
|
||||
}
|
||||
|
||||
impl<T: Item + 'static> Component for Picker<T> {
|
||||
impl<T: Item + 'static + Send + Sync> Component for Picker<T> {
|
||||
fn render(&mut self, area: Rect, surface: &mut Surface, cx: &mut Context) {
|
||||
// +---------+ +---------+
|
||||
// |prompt | |preview |
|
||||
|
@ -875,29 +883,10 @@ impl<T: Item + 'static> Component for Picker<T> {
|
|||
Some((width, height))
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(PartialEq, Eq, Debug)]
|
||||
struct PickerMatch {
|
||||
score: i64,
|
||||
index: usize,
|
||||
len: usize,
|
||||
}
|
||||
|
||||
impl PickerMatch {
|
||||
fn key(&self) -> impl Ord {
|
||||
(cmp::Reverse(self.score), self.len, self.index)
|
||||
}
|
||||
}
|
||||
|
||||
impl PartialOrd for PickerMatch {
|
||||
fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
|
||||
Some(self.cmp(other))
|
||||
}
|
||||
}
|
||||
|
||||
impl Ord for PickerMatch {
|
||||
fn cmp(&self, other: &Self) -> Ordering {
|
||||
self.key().cmp(&other.key())
|
||||
impl<T: Item> Drop for Picker<T> {
|
||||
fn drop(&mut self) {
|
||||
// ensure we cancel any ongoing background threads streaming into the picker
|
||||
self.shutdown.store(true, atomic::Ordering::Relaxed)
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -910,13 +899,13 @@ pub type DynQueryCallback<T> =
|
|||
|
||||
/// A picker that updates its contents via a callback whenever the
|
||||
/// query string changes. Useful for live grep, workspace symbols, etc.
|
||||
pub struct DynamicPicker<T: ui::menu::Item + Send> {
|
||||
pub struct DynamicPicker<T: ui::menu::Item + Send + Sync> {
|
||||
file_picker: Picker<T>,
|
||||
query_callback: DynQueryCallback<T>,
|
||||
query: String,
|
||||
}
|
||||
|
||||
impl<T: ui::menu::Item + Send> DynamicPicker<T> {
|
||||
impl<T: ui::menu::Item + Send + Sync> DynamicPicker<T> {
|
||||
pub const ID: &'static str = "dynamic-picker";
|
||||
|
||||
pub fn new(file_picker: Picker<T>, query_callback: DynQueryCallback<T>) -> Self {
|
||||
|
@ -928,7 +917,7 @@ impl<T: ui::menu::Item + Send> DynamicPicker<T> {
|
|||
}
|
||||
}
|
||||
|
||||
impl<T: Item + Send + 'static> Component for DynamicPicker<T> {
|
||||
impl<T: Item + Send + Sync + 'static> Component for DynamicPicker<T> {
|
||||
fn render(&mut self, area: Rect, surface: &mut Surface, cx: &mut Context) {
|
||||
self.file_picker.render(area, surface, cx);
|
||||
}
|
||||
|
|
|
@ -20,7 +20,6 @@ mod test {
|
|||
mod commands;
|
||||
mod languages;
|
||||
mod movement;
|
||||
mod picker;
|
||||
mod prompt;
|
||||
mod splits;
|
||||
}
|
||||
|
|
|
@ -1,80 +0,0 @@
|
|||
use std::fs;
|
||||
|
||||
use helix_core::{path::get_canonicalized_path, Range};
|
||||
use helix_loader::{current_working_dir, set_current_working_dir};
|
||||
use helix_view::{current_ref, editor::Action};
|
||||
use tempfile::{Builder, TempDir};
|
||||
|
||||
use super::*;
|
||||
|
||||
#[tokio::test(flavor = "multi_thread")]
|
||||
async fn test_picker_alt_ret() -> anyhow::Result<()> {
|
||||
// Create two files, open the first and run a global search for a word
|
||||
// from the second file. Press <alt-ret> to have helix open the second file in the
|
||||
// new buffer, but not change focus. Then check whether the word is highlighted
|
||||
// correctly and the view of the first file has not changed.
|
||||
let tmp_dir = TempDir::new()?;
|
||||
set_current_working_dir(tmp_dir.path().into())?;
|
||||
|
||||
let mut app = AppBuilder::new().build()?;
|
||||
|
||||
log::debug!(
|
||||
"set current working directory to {:?}",
|
||||
current_working_dir()
|
||||
);
|
||||
|
||||
// Add prefix so helix doesn't hide these files in a picker
|
||||
let files = [
|
||||
Builder::new().prefix("1").tempfile_in(&tmp_dir)?,
|
||||
Builder::new().prefix("2").tempfile_in(&tmp_dir)?,
|
||||
];
|
||||
let paths = files
|
||||
.iter()
|
||||
.map(|f| get_canonicalized_path(f.path()))
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
fs::write(&paths[0], "1\n2\n3\n4")?;
|
||||
fs::write(&paths[1], "first\nsecond")?;
|
||||
|
||||
log::debug!(
|
||||
"created and wrote two temporary files: {:?} & {:?}",
|
||||
paths[0],
|
||||
paths[1]
|
||||
);
|
||||
|
||||
// Manually open to save the offset, otherwise we won't be able to change the state in the Fn trait
|
||||
app.editor.open(files[0].path(), Action::Replace)?;
|
||||
let view_offset = current_ref!(app.editor).0.offset;
|
||||
|
||||
test_key_sequences(
|
||||
&mut app,
|
||||
vec![
|
||||
(Some("<space>/"), None),
|
||||
(Some("second<ret>"), None),
|
||||
(
|
||||
Some("<A-ret><esc>"),
|
||||
Some(&|app| {
|
||||
let (view, doc) = current_ref!(app.editor);
|
||||
assert_eq!(doc.path().unwrap(), &paths[0]);
|
||||
let select_ranges = doc.selection(view.id).ranges();
|
||||
assert_eq!(select_ranges[0], Range::new(0, 1));
|
||||
assert_eq!(view.offset, view_offset);
|
||||
}),
|
||||
),
|
||||
(
|
||||
Some(":buffer<minus>next<ret>"),
|
||||
Some(&|app| {
|
||||
let (view, doc) = current_ref!(app.editor);
|
||||
assert_eq!(doc.path().unwrap(), &paths[1]);
|
||||
let select_ranges = doc.selection(view.id).ranges();
|
||||
assert_eq!(select_ranges.len(), 1);
|
||||
assert_eq!(select_ranges[0], Range::new(6, 12));
|
||||
}),
|
||||
),
|
||||
],
|
||||
false,
|
||||
)
|
||||
.await?;
|
||||
|
||||
Ok(())
|
||||
}
|
|
@ -12,6 +12,7 @@ homepage = "https://helix-editor.com"
|
|||
|
||||
[dependencies]
|
||||
helix-core = { version = "0.6", path = "../helix-core" }
|
||||
helix-event = { version = "0.6", path = "../helix-event" }
|
||||
|
||||
tokio = { version = "1", features = ["rt", "rt-multi-thread", "time", "sync", "parking_lot", "macros"] }
|
||||
parking_lot = "0.12"
|
||||
|
|
|
@ -2,10 +2,10 @@ use std::ops::Range;
|
|||
use std::sync::Arc;
|
||||
|
||||
use helix_core::Rope;
|
||||
use helix_event::RenderLockGuard;
|
||||
use imara_diff::Algorithm;
|
||||
use parking_lot::{Mutex, MutexGuard};
|
||||
use tokio::sync::mpsc::{unbounded_channel, UnboundedSender};
|
||||
use tokio::sync::{Notify, OwnedRwLockReadGuard, RwLock};
|
||||
use tokio::task::JoinHandle;
|
||||
use tokio::time::Instant;
|
||||
|
||||
|
@ -14,11 +14,9 @@ use crate::diff::worker::DiffWorker;
|
|||
mod line_cache;
|
||||
mod worker;
|
||||
|
||||
type RedrawHandle = (Arc<Notify>, Arc<RwLock<()>>);
|
||||
|
||||
/// A rendering lock passed to the differ the prevents redraws from occurring
|
||||
struct RenderLock {
|
||||
pub lock: OwnedRwLockReadGuard<()>,
|
||||
pub lock: RenderLockGuard,
|
||||
pub timeout: Option<Instant>,
|
||||
}
|
||||
|
||||
|
@ -38,28 +36,22 @@ struct DiffInner {
|
|||
#[derive(Clone, Debug)]
|
||||
pub struct DiffHandle {
|
||||
channel: UnboundedSender<Event>,
|
||||
render_lock: Arc<RwLock<()>>,
|
||||
diff: Arc<Mutex<DiffInner>>,
|
||||
inverted: bool,
|
||||
}
|
||||
|
||||
impl DiffHandle {
|
||||
pub fn new(diff_base: Rope, doc: Rope, redraw_handle: RedrawHandle) -> DiffHandle {
|
||||
DiffHandle::new_with_handle(diff_base, doc, redraw_handle).0
|
||||
pub fn new(diff_base: Rope, doc: Rope) -> DiffHandle {
|
||||
DiffHandle::new_with_handle(diff_base, doc).0
|
||||
}
|
||||
|
||||
fn new_with_handle(
|
||||
diff_base: Rope,
|
||||
doc: Rope,
|
||||
redraw_handle: RedrawHandle,
|
||||
) -> (DiffHandle, JoinHandle<()>) {
|
||||
fn new_with_handle(diff_base: Rope, doc: Rope) -> (DiffHandle, JoinHandle<()>) {
|
||||
let (sender, receiver) = unbounded_channel();
|
||||
let diff: Arc<Mutex<DiffInner>> = Arc::default();
|
||||
let worker = DiffWorker {
|
||||
channel: receiver,
|
||||
diff: diff.clone(),
|
||||
new_hunks: Vec::default(),
|
||||
redraw_notify: redraw_handle.0,
|
||||
diff_finished_notify: Arc::default(),
|
||||
};
|
||||
let handle = tokio::spawn(worker.run(diff_base, doc));
|
||||
|
@ -67,7 +59,6 @@ impl DiffHandle {
|
|||
channel: sender,
|
||||
diff,
|
||||
inverted: false,
|
||||
render_lock: redraw_handle.1,
|
||||
};
|
||||
(differ, handle)
|
||||
}
|
||||
|
@ -87,11 +78,7 @@ impl DiffHandle {
|
|||
/// This function is only intended to be called from within the rendering loop
|
||||
/// if called from elsewhere it may fail to acquire the render lock and panic
|
||||
pub fn update_document(&self, doc: Rope, block: bool) -> bool {
|
||||
// unwrap is ok here because the rendering lock is
|
||||
// only exclusively locked during redraw.
|
||||
// This function is only intended to be called
|
||||
// from the core rendering loop where no redraw can happen in parallel
|
||||
let lock = self.render_lock.clone().try_read_owned().unwrap();
|
||||
let lock = helix_event::lock_frame();
|
||||
let timeout = if block {
|
||||
None
|
||||
} else {
|
||||
|
|
|
@ -23,7 +23,6 @@ pub(super) struct DiffWorker {
|
|||
pub channel: UnboundedReceiver<Event>,
|
||||
pub diff: Arc<Mutex<DiffInner>>,
|
||||
pub new_hunks: Vec<Hunk>,
|
||||
pub redraw_notify: Arc<Notify>,
|
||||
pub diff_finished_notify: Arc<Notify>,
|
||||
}
|
||||
|
||||
|
@ -32,11 +31,7 @@ impl DiffWorker {
|
|||
let mut accumulator = EventAccumulator::new();
|
||||
accumulator.handle_event(event).await;
|
||||
accumulator
|
||||
.accumulate_debounced_events(
|
||||
&mut self.channel,
|
||||
self.redraw_notify.clone(),
|
||||
self.diff_finished_notify.clone(),
|
||||
)
|
||||
.accumulate_debounced_events(&mut self.channel, self.diff_finished_notify.clone())
|
||||
.await;
|
||||
(accumulator.doc, accumulator.diff_base)
|
||||
}
|
||||
|
@ -137,7 +132,6 @@ impl<'a> EventAccumulator {
|
|||
async fn accumulate_debounced_events(
|
||||
&mut self,
|
||||
channel: &mut UnboundedReceiver<Event>,
|
||||
redraw_notify: Arc<Notify>,
|
||||
diff_finished_notify: Arc<Notify>,
|
||||
) {
|
||||
let async_debounce = Duration::from_millis(DIFF_DEBOUNCE_TIME_ASYNC);
|
||||
|
@ -164,7 +158,7 @@ impl<'a> EventAccumulator {
|
|||
None => {
|
||||
tokio::spawn(async move {
|
||||
diff_finished_notify.notified().await;
|
||||
redraw_notify.notify_one();
|
||||
helix_event::request_redraw();
|
||||
});
|
||||
}
|
||||
// diff is performed inside the rendering loop
|
||||
|
@ -190,7 +184,7 @@ impl<'a> EventAccumulator {
|
|||
// and wait until the diff occurs to trigger an async redraw
|
||||
log::info!("Diff computation timed out, update of diffs might appear delayed");
|
||||
diff_finished_notify.notified().await;
|
||||
redraw_notify.notify_one();
|
||||
helix_event::request_redraw()
|
||||
});
|
||||
}
|
||||
// a blocking diff is performed inside the rendering loop
|
||||
|
|
|
@ -5,11 +5,7 @@ use crate::diff::{DiffHandle, Hunk};
|
|||
|
||||
impl DiffHandle {
|
||||
fn new_test(diff_base: &str, doc: &str) -> (DiffHandle, JoinHandle<()>) {
|
||||
DiffHandle::new_with_handle(
|
||||
Rope::from_str(diff_base),
|
||||
Rope::from_str(doc),
|
||||
Default::default(),
|
||||
)
|
||||
DiffHandle::new_with_handle(Rope::from_str(diff_base), Rope::from_str(doc))
|
||||
}
|
||||
async fn into_diff(self, handle: JoinHandle<()>) -> Vec<Hunk> {
|
||||
let diff = self.diff;
|
||||
|
|
|
@ -17,6 +17,7 @@ term = ["crossterm"]
|
|||
bitflags = "2.4"
|
||||
anyhow = "1"
|
||||
helix-core = { version = "0.6", path = "../helix-core" }
|
||||
helix-event = { version = "0.6", path = "../helix-event" }
|
||||
helix-loader = { version = "0.6", path = "../helix-loader" }
|
||||
helix-lsp = { version = "0.6", path = "../helix-lsp" }
|
||||
helix-dap = { version = "0.6", path = "../helix-dap" }
|
||||
|
|
|
@ -33,7 +33,7 @@ use helix_core::{
|
|||
ChangeSet, Diagnostic, LineEnding, Range, Rope, RopeBuilder, Selection, Syntax, Transaction,
|
||||
};
|
||||
|
||||
use crate::editor::{Config, RedrawHandle};
|
||||
use crate::editor::Config;
|
||||
use crate::{DocumentId, Editor, Theme, View, ViewId};
|
||||
|
||||
/// 8kB of buffer space for encoding and decoding `Rope`s.
|
||||
|
@ -995,7 +995,6 @@ impl Document {
|
|||
&mut self,
|
||||
view: &mut View,
|
||||
provider_registry: &DiffProviderRegistry,
|
||||
redraw_handle: RedrawHandle,
|
||||
) -> Result<(), Error> {
|
||||
let encoding = self.encoding;
|
||||
let path = self
|
||||
|
@ -1023,7 +1022,7 @@ impl Document {
|
|||
self.detect_indent_and_line_ending();
|
||||
|
||||
match provider_registry.get_diff_base(&path) {
|
||||
Some(diff_base) => self.set_diff_base(diff_base, redraw_handle),
|
||||
Some(diff_base) => self.set_diff_base(diff_base),
|
||||
None => self.diff_handle = None,
|
||||
}
|
||||
|
||||
|
@ -1583,13 +1582,13 @@ impl Document {
|
|||
}
|
||||
|
||||
/// Intialize/updates the differ for this document with a new base.
|
||||
pub fn set_diff_base(&mut self, diff_base: Vec<u8>, redraw_handle: RedrawHandle) {
|
||||
pub fn set_diff_base(&mut self, diff_base: Vec<u8>) {
|
||||
if let Ok((diff_base, ..)) = from_reader(&mut diff_base.as_slice(), Some(self.encoding)) {
|
||||
if let Some(differ) = &self.diff_handle {
|
||||
differ.update_diff_base(diff_base);
|
||||
return;
|
||||
}
|
||||
self.diff_handle = Some(DiffHandle::new(diff_base, self.text.clone(), redraw_handle))
|
||||
self.diff_handle = Some(DiffHandle::new(diff_base, self.text.clone()))
|
||||
} else {
|
||||
self.diff_handle = None;
|
||||
}
|
||||
|
|
|
@ -32,7 +32,7 @@ use std::{
|
|||
use tokio::{
|
||||
sync::{
|
||||
mpsc::{unbounded_channel, UnboundedReceiver, UnboundedSender},
|
||||
oneshot, Notify, RwLock,
|
||||
oneshot,
|
||||
},
|
||||
time::{sleep, Duration, Instant, Sleep},
|
||||
};
|
||||
|
@ -925,10 +925,6 @@ pub struct Editor {
|
|||
pub exit_code: i32,
|
||||
|
||||
pub config_events: (UnboundedSender<ConfigEvent>, UnboundedReceiver<ConfigEvent>),
|
||||
/// Allows asynchronous tasks to control the rendering
|
||||
/// The `Notify` allows asynchronous tasks to request the editor to perform a redraw
|
||||
/// The `RwLock` blocks the editor from performing the render until an exclusive lock can be acquired
|
||||
pub redraw_handle: RedrawHandle,
|
||||
pub needs_redraw: bool,
|
||||
/// Cached position of the cursor calculated during rendering.
|
||||
/// The content of `cursor_cache` is returned by `Editor::cursor` if
|
||||
|
@ -955,8 +951,6 @@ pub struct Editor {
|
|||
|
||||
pub type Motion = Box<dyn Fn(&mut Editor)>;
|
||||
|
||||
pub type RedrawHandle = (Arc<Notify>, Arc<RwLock<()>>);
|
||||
|
||||
#[derive(Debug)]
|
||||
pub enum EditorEvent {
|
||||
DocumentSaved(DocumentSavedEventResult),
|
||||
|
@ -1062,7 +1056,6 @@ impl Editor {
|
|||
auto_pairs,
|
||||
exit_code: 0,
|
||||
config_events: unbounded_channel(),
|
||||
redraw_handle: Default::default(),
|
||||
needs_redraw: false,
|
||||
cursor_cache: Cell::new(None),
|
||||
completion_request_handle: None,
|
||||
|
@ -1453,7 +1446,7 @@ impl Editor {
|
|||
)?;
|
||||
|
||||
if let Some(diff_base) = self.diff_providers.get_diff_base(&path) {
|
||||
doc.set_diff_base(diff_base, self.redraw_handle.clone());
|
||||
doc.set_diff_base(diff_base);
|
||||
}
|
||||
doc.set_version_control_head(self.diff_providers.get_current_head_name(&path));
|
||||
|
||||
|
@ -1752,7 +1745,7 @@ impl Editor {
|
|||
return EditorEvent::DebuggerEvent(event)
|
||||
}
|
||||
|
||||
_ = self.redraw_handle.0.notified() => {
|
||||
_ = helix_event::redraw_requested() => {
|
||||
if !self.needs_redraw{
|
||||
self.needs_redraw = true;
|
||||
let timeout = Instant::now() + Duration::from_millis(33);
|
||||
|
|
Loading…
Add table
Reference in a new issue