Create helix-stdx crate for stdlib extensions
helix-stdx is meant to carry extensions to the stdlib or low-level dependencies that are useful in all other crates. This commit starts with all of the path functions from helix-core and the CWD tracking that lived in helix-loader. The CWD tracking in helix-loader was previously unable to call the canonicalization functions in helix-core. Switching to our custom canonicalization code should make no noticeable difference though since `std::env::current_dir` returns a canonicalized path with symlinks resolved (at least on unix).
This commit is contained in:
parent
af8e524a7d
commit
1f916e65cf
27 changed files with 163 additions and 111 deletions
14
Cargo.lock
generated
14
Cargo.lock
generated
|
@ -1060,6 +1060,7 @@ dependencies = [
|
|||
"etcetera",
|
||||
"hashbrown 0.14.3",
|
||||
"helix-loader",
|
||||
"helix-stdx",
|
||||
"imara-diff",
|
||||
"indoc",
|
||||
"log",
|
||||
|
@ -1074,7 +1075,6 @@ dependencies = [
|
|||
"slotmap",
|
||||
"smallvec",
|
||||
"smartstring",
|
||||
"tempfile",
|
||||
"textwrap",
|
||||
"toml",
|
||||
"tree-sitter",
|
||||
|
@ -1136,6 +1136,7 @@ dependencies = [
|
|||
"helix-core",
|
||||
"helix-loader",
|
||||
"helix-parsec",
|
||||
"helix-stdx",
|
||||
"log",
|
||||
"lsp-types",
|
||||
"parking_lot",
|
||||
|
@ -1151,6 +1152,15 @@ dependencies = [
|
|||
name = "helix-parsec"
|
||||
version = "23.10.0"
|
||||
|
||||
[[package]]
|
||||
name = "helix-stdx"
|
||||
version = "23.10.0"
|
||||
dependencies = [
|
||||
"dunce",
|
||||
"etcetera",
|
||||
"tempfile",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "helix-term"
|
||||
version = "23.10.0"
|
||||
|
@ -1169,6 +1179,7 @@ dependencies = [
|
|||
"helix-event",
|
||||
"helix-loader",
|
||||
"helix-lsp",
|
||||
"helix-stdx",
|
||||
"helix-tui",
|
||||
"helix-vcs",
|
||||
"helix-view",
|
||||
|
@ -1241,6 +1252,7 @@ dependencies = [
|
|||
"helix-event",
|
||||
"helix-loader",
|
||||
"helix-lsp",
|
||||
"helix-stdx",
|
||||
"helix-tui",
|
||||
"helix-vcs",
|
||||
"libc",
|
||||
|
|
|
@ -11,6 +11,7 @@ members = [
|
|||
"helix-loader",
|
||||
"helix-vcs",
|
||||
"helix-parsec",
|
||||
"helix-stdx",
|
||||
"xtask",
|
||||
]
|
||||
|
||||
|
|
|
@ -16,6 +16,7 @@ unicode-lines = ["ropey/unicode_lines"]
|
|||
integration = []
|
||||
|
||||
[dependencies]
|
||||
helix-stdx = { path = "../helix-stdx" }
|
||||
helix-loader = { path = "../helix-loader" }
|
||||
|
||||
ropey = { version = "1.6.1", default-features = false, features = ["simd"] }
|
||||
|
@ -55,4 +56,3 @@ parking_lot = "0.12"
|
|||
[dev-dependencies]
|
||||
quickcheck = { version = "1", default-features = false }
|
||||
indoc = "2.0.4"
|
||||
tempfile = "3.9"
|
||||
|
|
|
@ -17,7 +17,6 @@ pub mod macros;
|
|||
pub mod match_brackets;
|
||||
pub mod movement;
|
||||
pub mod object;
|
||||
pub mod path;
|
||||
mod position;
|
||||
pub mod search;
|
||||
pub mod selection;
|
||||
|
|
|
@ -1,14 +1,13 @@
|
|||
pub mod config;
|
||||
pub mod grammar;
|
||||
|
||||
use helix_stdx::{env::current_working_dir, path};
|
||||
|
||||
use etcetera::base_strategy::{choose_base_strategy, BaseStrategy};
|
||||
use std::path::{Path, PathBuf};
|
||||
use std::sync::RwLock;
|
||||
|
||||
pub const VERSION_AND_GIT_HASH: &str = env!("VERSION_AND_GIT_HASH");
|
||||
|
||||
static CWD: RwLock<Option<PathBuf>> = RwLock::new(None);
|
||||
|
||||
static RUNTIME_DIRS: once_cell::sync::Lazy<Vec<PathBuf>> =
|
||||
once_cell::sync::Lazy::new(prioritize_runtime_dirs);
|
||||
|
||||
|
@ -16,31 +15,6 @@ static CONFIG_FILE: once_cell::sync::OnceCell<PathBuf> = once_cell::sync::OnceCe
|
|||
|
||||
static LOG_FILE: once_cell::sync::OnceCell<PathBuf> = once_cell::sync::OnceCell::new();
|
||||
|
||||
// Get the current working directory.
|
||||
// This information is managed internally as the call to std::env::current_dir
|
||||
// might fail if the cwd has been deleted.
|
||||
pub fn current_working_dir() -> PathBuf {
|
||||
if let Some(path) = &*CWD.read().unwrap() {
|
||||
return path.clone();
|
||||
}
|
||||
|
||||
let path = std::env::current_dir()
|
||||
.and_then(dunce::canonicalize)
|
||||
.expect("Couldn't determine current working directory");
|
||||
let mut cwd = CWD.write().unwrap();
|
||||
*cwd = Some(path.clone());
|
||||
|
||||
path
|
||||
}
|
||||
|
||||
pub fn set_current_working_dir(path: impl AsRef<Path>) -> std::io::Result<()> {
|
||||
let path = dunce::canonicalize(path)?;
|
||||
std::env::set_current_dir(&path)?;
|
||||
let mut cwd = CWD.write().unwrap();
|
||||
*cwd = Some(path);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn initialize_config_file(specified_file: Option<PathBuf>) {
|
||||
let config_file = specified_file.unwrap_or_else(default_config_file);
|
||||
ensure_parent_dir(&config_file);
|
||||
|
@ -280,21 +254,9 @@ fn ensure_parent_dir(path: &Path) {
|
|||
mod merge_toml_tests {
|
||||
use std::str;
|
||||
|
||||
use super::{current_working_dir, merge_toml_values, set_current_working_dir};
|
||||
use super::merge_toml_values;
|
||||
use toml::Value;
|
||||
|
||||
#[test]
|
||||
fn current_dir_is_set() {
|
||||
let new_path = dunce::canonicalize(std::env::temp_dir()).unwrap();
|
||||
let cwd = current_working_dir();
|
||||
assert_ne!(cwd, new_path);
|
||||
|
||||
set_current_working_dir(&new_path).expect("Couldn't set new path");
|
||||
|
||||
let cwd = current_working_dir();
|
||||
assert_eq!(cwd, new_path);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn language_toml_map_merges() {
|
||||
const USER: &str = r#"
|
||||
|
|
|
@ -13,6 +13,7 @@ homepage.workspace = true
|
|||
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
||||
|
||||
[dependencies]
|
||||
helix-stdx = { path = "../helix-stdx" }
|
||||
helix-core = { path = "../helix-core" }
|
||||
helix-loader = { path = "../helix-loader" }
|
||||
helix-parsec = { path = "../helix-parsec" }
|
||||
|
|
|
@ -4,8 +4,9 @@ use crate::{
|
|||
Call, Error, OffsetEncoding, Result,
|
||||
};
|
||||
|
||||
use helix_core::{find_workspace, path, syntax::LanguageServerFeature, ChangeSet, Rope};
|
||||
use helix_core::{find_workspace, syntax::LanguageServerFeature, ChangeSet, Rope};
|
||||
use helix_loader::{self, VERSION_AND_GIT_HASH};
|
||||
use helix_stdx::path;
|
||||
use lsp::{
|
||||
notification::DidChangeWorkspaceFolders, CodeActionCapabilityResolveSupport,
|
||||
DidChangeWorkspaceFoldersParams, OneOf, PositionEncodingKind, WorkspaceFolder,
|
||||
|
@ -68,7 +69,7 @@ impl Client {
|
|||
may_support_workspace: bool,
|
||||
) -> bool {
|
||||
let (workspace, workspace_is_cwd) = find_workspace();
|
||||
let workspace = path::get_normalized_path(&workspace);
|
||||
let workspace = path::normalize(workspace);
|
||||
let root = find_lsp_workspace(
|
||||
doc_path
|
||||
.and_then(|x| x.parent().and_then(|x| x.to_str()))
|
||||
|
@ -204,7 +205,7 @@ impl Client {
|
|||
let (server_rx, server_tx, initialize_notify) =
|
||||
Transport::start(reader, writer, stderr, id, name.clone());
|
||||
let (workspace, workspace_is_cwd) = find_workspace();
|
||||
let workspace = path::get_normalized_path(&workspace);
|
||||
let workspace = path::normalize(workspace);
|
||||
let root = find_lsp_workspace(
|
||||
doc_path
|
||||
.and_then(|x| x.parent().and_then(|x| x.to_str()))
|
||||
|
|
|
@ -11,10 +11,10 @@ pub use lsp::{Position, Url};
|
|||
pub use lsp_types as lsp;
|
||||
|
||||
use futures_util::stream::select_all::SelectAll;
|
||||
use helix_core::{
|
||||
path,
|
||||
syntax::{LanguageConfiguration, LanguageServerConfiguration, LanguageServerFeatures},
|
||||
use helix_core::syntax::{
|
||||
LanguageConfiguration, LanguageServerConfiguration, LanguageServerFeatures,
|
||||
};
|
||||
use helix_stdx::path;
|
||||
use tokio::sync::mpsc::UnboundedReceiver;
|
||||
|
||||
use std::{
|
||||
|
@ -958,10 +958,10 @@ pub fn find_lsp_workspace(
|
|||
let mut file = if file.is_absolute() {
|
||||
file.to_path_buf()
|
||||
} else {
|
||||
let current_dir = helix_loader::current_working_dir();
|
||||
let current_dir = helix_stdx::env::current_working_dir();
|
||||
current_dir.join(file)
|
||||
};
|
||||
file = path::get_normalized_path(&file);
|
||||
file = path::normalize(&file);
|
||||
|
||||
if !file.starts_with(workspace) {
|
||||
return None;
|
||||
|
@ -978,7 +978,7 @@ pub fn find_lsp_workspace(
|
|||
|
||||
if root_dirs
|
||||
.iter()
|
||||
.any(|root_dir| path::get_normalized_path(&workspace.join(root_dir)) == ancestor)
|
||||
.any(|root_dir| path::normalize(workspace.join(root_dir)) == ancestor)
|
||||
{
|
||||
// if the worskapce is the cwd do not search any higher for workspaces
|
||||
// but specify
|
||||
|
|
19
helix-stdx/Cargo.toml
Normal file
19
helix-stdx/Cargo.toml
Normal file
|
@ -0,0 +1,19 @@
|
|||
[package]
|
||||
name = "helix-stdx"
|
||||
description = "Standard library extensions"
|
||||
include = ["src/**/*", "README.md"]
|
||||
version.workspace = true
|
||||
authors.workspace = true
|
||||
edition.workspace = true
|
||||
license.workspace = true
|
||||
rust-version.workspace = true
|
||||
categories.workspace = true
|
||||
repository.workspace = true
|
||||
homepage.workspace = true
|
||||
|
||||
[dependencies]
|
||||
dunce = "1.0"
|
||||
etcetera = "0.8"
|
||||
|
||||
[dev-dependencies]
|
||||
tempfile = "3.9"
|
48
helix-stdx/src/env.rs
Normal file
48
helix-stdx/src/env.rs
Normal file
|
@ -0,0 +1,48 @@
|
|||
use std::{
|
||||
path::{Path, PathBuf},
|
||||
sync::RwLock,
|
||||
};
|
||||
|
||||
static CWD: RwLock<Option<PathBuf>> = RwLock::new(None);
|
||||
|
||||
// Get the current working directory.
|
||||
// This information is managed internally as the call to std::env::current_dir
|
||||
// might fail if the cwd has been deleted.
|
||||
pub fn current_working_dir() -> PathBuf {
|
||||
if let Some(path) = &*CWD.read().unwrap() {
|
||||
return path.clone();
|
||||
}
|
||||
|
||||
let path = std::env::current_dir()
|
||||
.map(crate::path::normalize)
|
||||
.expect("Couldn't determine current working directory");
|
||||
let mut cwd = CWD.write().unwrap();
|
||||
*cwd = Some(path.clone());
|
||||
|
||||
path
|
||||
}
|
||||
|
||||
pub fn set_current_working_dir(path: impl AsRef<Path>) -> std::io::Result<()> {
|
||||
let path = crate::path::canonicalize(path);
|
||||
std::env::set_current_dir(&path)?;
|
||||
let mut cwd = CWD.write().unwrap();
|
||||
*cwd = Some(path);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::{current_working_dir, set_current_working_dir};
|
||||
|
||||
#[test]
|
||||
fn current_dir_is_set() {
|
||||
let new_path = dunce::canonicalize(std::env::temp_dir()).unwrap();
|
||||
let cwd = current_working_dir();
|
||||
assert_ne!(cwd, new_path);
|
||||
|
||||
set_current_working_dir(&new_path).expect("Couldn't set new path");
|
||||
|
||||
let cwd = current_working_dir();
|
||||
assert_eq!(cwd, new_path);
|
||||
}
|
||||
}
|
2
helix-stdx/src/lib.rs
Normal file
2
helix-stdx/src/lib.rs
Normal file
|
@ -0,0 +1,2 @@
|
|||
pub mod env;
|
||||
pub mod path;
|
|
@ -1,6 +1,9 @@
|
|||
use etcetera::home_dir;
|
||||
pub use etcetera::home_dir;
|
||||
|
||||
use std::path::{Component, Path, PathBuf};
|
||||
|
||||
use crate::env::current_working_dir;
|
||||
|
||||
/// Replaces users home directory from `path` with tilde `~` if the directory
|
||||
/// is available, otherwise returns the path unchanged.
|
||||
pub fn fold_home_dir(path: &Path) -> PathBuf {
|
||||
|
@ -16,7 +19,8 @@ pub fn fold_home_dir(path: &Path) -> PathBuf {
|
|||
/// Expands tilde `~` into users home directory if available, otherwise returns the path
|
||||
/// unchanged. The tilde will only be expanded when present as the first component of the path
|
||||
/// and only slash follows it.
|
||||
pub fn expand_tilde(path: &Path) -> PathBuf {
|
||||
pub fn expand_tilde(path: impl AsRef<Path>) -> PathBuf {
|
||||
let path = path.as_ref();
|
||||
let mut components = path.components().peekable();
|
||||
if let Some(Component::Normal(c)) = components.peek() {
|
||||
if c == &"~" {
|
||||
|
@ -33,8 +37,8 @@ pub fn expand_tilde(path: &Path) -> PathBuf {
|
|||
/// Normalize a path without resolving symlinks.
|
||||
// Strategy: start from the first component and move up. Cannonicalize previous path,
|
||||
// join component, cannonicalize new path, strip prefix and join to the final result.
|
||||
pub fn get_normalized_path(path: &Path) -> PathBuf {
|
||||
let mut components = path.components().peekable();
|
||||
pub fn normalize(path: impl AsRef<Path>) -> PathBuf {
|
||||
let mut components = path.as_ref().components().peekable();
|
||||
let mut ret = if let Some(c @ Component::Prefix(..)) = components.peek().cloned() {
|
||||
components.next();
|
||||
PathBuf::from(c.as_os_str())
|
||||
|
@ -104,22 +108,22 @@ pub fn get_normalized_path(path: &Path) -> PathBuf {
|
|||
///
|
||||
/// This function is used instead of [`std::fs::canonicalize`] because we don't want to verify
|
||||
/// here if the path exists, just normalize it's components.
|
||||
pub fn get_canonicalized_path(path: &Path) -> PathBuf {
|
||||
pub fn canonicalize(path: impl AsRef<Path>) -> PathBuf {
|
||||
let path = expand_tilde(path);
|
||||
let path = if path.is_relative() {
|
||||
helix_loader::current_working_dir().join(path)
|
||||
current_working_dir().join(path)
|
||||
} else {
|
||||
path
|
||||
};
|
||||
|
||||
get_normalized_path(path.as_path())
|
||||
normalize(path)
|
||||
}
|
||||
|
||||
pub fn get_relative_path(path: &Path) -> PathBuf {
|
||||
let path = PathBuf::from(path);
|
||||
pub fn get_relative_path(path: impl AsRef<Path>) -> PathBuf {
|
||||
let path = PathBuf::from(path.as_ref());
|
||||
let path = if path.is_absolute() {
|
||||
let cwdir = get_normalized_path(&helix_loader::current_working_dir());
|
||||
get_normalized_path(&path)
|
||||
let cwdir = normalize(current_working_dir());
|
||||
normalize(&path)
|
||||
.strip_prefix(cwdir)
|
||||
.map(PathBuf::from)
|
||||
.unwrap_or(path)
|
||||
|
@ -136,7 +140,7 @@ pub fn get_relative_path(path: &Path) -> PathBuf {
|
|||
/// Note that this function does not check if the truncated path is unambiguous.
|
||||
///
|
||||
/// ```
|
||||
/// use helix_core::path::get_truncated_path;
|
||||
/// use helix_stdx::path::get_truncated_path;
|
||||
/// use std::path::Path;
|
||||
///
|
||||
/// assert_eq!(
|
||||
|
@ -158,8 +162,8 @@ pub fn get_relative_path(path: &Path) -> PathBuf {
|
|||
/// assert_eq!(get_truncated_path("").as_path(), Path::new(""));
|
||||
/// ```
|
||||
///
|
||||
pub fn get_truncated_path<P: AsRef<Path>>(path: P) -> PathBuf {
|
||||
let cwd = helix_loader::current_working_dir();
|
||||
pub fn get_truncated_path(path: impl AsRef<Path>) -> PathBuf {
|
||||
let cwd = current_working_dir();
|
||||
let path = path
|
||||
.as_ref()
|
||||
.strip_prefix(cwd)
|
|
@ -6,7 +6,7 @@ use std::{
|
|||
path::{Component, Path, PathBuf},
|
||||
};
|
||||
|
||||
use helix_core::path::get_normalized_path;
|
||||
use helix_stdx::path;
|
||||
use tempfile::Builder;
|
||||
|
||||
// Paths on Windows are almost always case-insensitive.
|
||||
|
@ -34,7 +34,7 @@ fn test_case_folding_windows() -> Result<(), Box<dyn Error>> {
|
|||
);
|
||||
let test_path = root_without_prefix.join(lowercase_case);
|
||||
assert_eq!(
|
||||
get_normalized_path(&test_path),
|
||||
path::normalize(&test_path),
|
||||
case.path().strip_prefix(&tmp_prefix)?
|
||||
);
|
||||
|
||||
|
@ -80,7 +80,7 @@ fn test_normalize_path() -> Result<(), Box<dyn Error>> {
|
|||
// root/link
|
||||
let path = link.strip_prefix(&tmp_prefix)?;
|
||||
assert_eq!(
|
||||
get_normalized_path(path),
|
||||
path::normalize(path),
|
||||
path,
|
||||
"input {:?} and symlink last component shouldn't be resolved",
|
||||
path
|
||||
|
@ -98,7 +98,7 @@ fn test_normalize_path() -> Result<(), Box<dyn Error>> {
|
|||
.unwrap()
|
||||
.join(Component::ParentDir);
|
||||
assert_eq!(
|
||||
get_normalized_path(&path),
|
||||
path::normalize(&path),
|
||||
expected,
|
||||
"input {:?} and \"..\" should not erase the simlink that goes ahead",
|
||||
&path
|
||||
|
@ -118,7 +118,7 @@ fn test_normalize_path() -> Result<(), Box<dyn Error>> {
|
|||
.unwrap()
|
||||
.join(Component::ParentDir)
|
||||
.join(Component::ParentDir);
|
||||
assert_eq!(get_normalized_path(&path), expected, "input {:?}", &path);
|
||||
assert_eq!(path::normalize(&path), expected, "input {:?}", &path);
|
||||
|
||||
Ok(())
|
||||
}
|
|
@ -23,6 +23,7 @@ name = "hx"
|
|||
path = "src/main.rs"
|
||||
|
||||
[dependencies]
|
||||
helix-stdx = { path = "../helix-stdx" }
|
||||
helix-core = { path = "../helix-core" }
|
||||
helix-event = { path = "../helix-event" }
|
||||
helix-view = { path = "../helix-view" }
|
||||
|
|
|
@ -1,11 +1,12 @@
|
|||
use arc_swap::{access::Map, ArcSwap};
|
||||
use futures_util::Stream;
|
||||
use helix_core::{path::get_relative_path, pos_at_coords, syntax, Selection};
|
||||
use helix_core::{pos_at_coords, syntax, Selection};
|
||||
use helix_lsp::{
|
||||
lsp::{self, notification::Notification},
|
||||
util::lsp_range_to_range,
|
||||
LspProgressMap,
|
||||
};
|
||||
use helix_stdx::path::get_relative_path;
|
||||
use helix_view::{
|
||||
align_view,
|
||||
document::DocumentSavedEventResult,
|
||||
|
|
|
@ -2169,7 +2169,7 @@ fn global_search(cx: &mut Context) {
|
|||
type Data = Option<PathBuf>;
|
||||
|
||||
fn format(&self, current_path: &Self::Data) -> Row {
|
||||
let relative_path = helix_core::path::get_relative_path(&self.path)
|
||||
let relative_path = helix_stdx::path::get_relative_path(&self.path)
|
||||
.to_string_lossy()
|
||||
.into_owned();
|
||||
if current_path
|
||||
|
@ -2218,7 +2218,7 @@ fn global_search(cx: &mut Context) {
|
|||
.case_smart(smart_case)
|
||||
.build(regex.as_str())
|
||||
{
|
||||
let search_root = helix_loader::current_working_dir();
|
||||
let search_root = helix_stdx::env::current_working_dir();
|
||||
if !search_root.exists() {
|
||||
cx.editor
|
||||
.set_error("Current working directory does not exist");
|
||||
|
@ -2731,7 +2731,7 @@ fn file_picker_in_current_buffer_directory(cx: &mut Context) {
|
|||
}
|
||||
|
||||
fn file_picker_in_current_directory(cx: &mut Context) {
|
||||
let cwd = helix_loader::current_working_dir();
|
||||
let cwd = helix_stdx::env::current_working_dir();
|
||||
if !cwd.exists() {
|
||||
cx.editor
|
||||
.set_error("Current working directory does not exist");
|
||||
|
@ -2759,7 +2759,7 @@ fn buffer_picker(cx: &mut Context) {
|
|||
let path = self
|
||||
.path
|
||||
.as_deref()
|
||||
.map(helix_core::path::get_relative_path);
|
||||
.map(helix_stdx::path::get_relative_path);
|
||||
let path = match path.as_deref().and_then(Path::to_str) {
|
||||
Some(path) => path,
|
||||
None => SCRATCH_BUFFER_NAME,
|
||||
|
@ -2826,7 +2826,7 @@ fn jumplist_picker(cx: &mut Context) {
|
|||
let path = self
|
||||
.path
|
||||
.as_deref()
|
||||
.map(helix_core::path::get_relative_path);
|
||||
.map(helix_stdx::path::get_relative_path);
|
||||
let path = match path.as_deref().and_then(Path::to_str) {
|
||||
Some(path) => path,
|
||||
None => SCRATCH_BUFFER_NAME,
|
||||
|
|
|
@ -217,7 +217,7 @@ pub fn dap_start_impl(
|
|||
}
|
||||
}
|
||||
|
||||
args.insert("cwd", to_value(helix_loader::current_working_dir())?);
|
||||
args.insert("cwd", to_value(helix_stdx::env::current_working_dir())?);
|
||||
|
||||
let args = to_value(args).unwrap();
|
||||
|
||||
|
|
|
@ -17,9 +17,8 @@ use tui::{
|
|||
|
||||
use super::{align_view, push_jump, Align, Context, Editor, Open};
|
||||
|
||||
use helix_core::{
|
||||
path, syntax::LanguageServerFeature, text_annotations::InlineAnnotation, Selection,
|
||||
};
|
||||
use helix_core::{syntax::LanguageServerFeature, text_annotations::InlineAnnotation, Selection};
|
||||
use helix_stdx::path;
|
||||
use helix_view::{
|
||||
document::{DocumentInlayHints, DocumentInlayHintsId, Mode},
|
||||
editor::Action,
|
||||
|
@ -1018,7 +1017,7 @@ fn goto_impl(
|
|||
locations: Vec<lsp::Location>,
|
||||
offset_encoding: OffsetEncoding,
|
||||
) {
|
||||
let cwdir = helix_loader::current_working_dir();
|
||||
let cwdir = helix_stdx::env::current_working_dir();
|
||||
|
||||
match locations.as_slice() {
|
||||
[location] => {
|
||||
|
|
|
@ -7,7 +7,7 @@ use super::*;
|
|||
|
||||
use helix_core::fuzzy::fuzzy_match;
|
||||
use helix_core::indent::MAX_INDENT;
|
||||
use helix_core::{encoding, line_ending, path::get_canonicalized_path, shellwords::Shellwords};
|
||||
use helix_core::{encoding, line_ending, shellwords::Shellwords};
|
||||
use helix_lsp::{OffsetEncoding, Url};
|
||||
use helix_view::document::DEFAULT_LANGUAGE_NAME;
|
||||
use helix_view::editor::{Action, CloseError, ConfigEvent};
|
||||
|
@ -111,7 +111,7 @@ fn open(cx: &mut compositor::Context, args: &[Cow<str>], event: PromptEvent) ->
|
|||
ensure!(!args.is_empty(), "wrong argument count");
|
||||
for arg in args {
|
||||
let (path, pos) = args::parse_file(arg);
|
||||
let path = helix_core::path::expand_tilde(&path);
|
||||
let path = helix_stdx::path::expand_tilde(&path);
|
||||
// If the path is a directory, open a file picker on that directory and update the status
|
||||
// message
|
||||
if let Ok(true) = std::fs::canonicalize(&path).map(|p| p.is_dir()) {
|
||||
|
@ -1079,18 +1079,17 @@ fn change_current_directory(
|
|||
return Ok(());
|
||||
}
|
||||
|
||||
let dir = helix_core::path::expand_tilde(
|
||||
let dir = helix_stdx::path::expand_tilde(
|
||||
args.first()
|
||||
.context("target directory not provided")?
|
||||
.as_ref()
|
||||
.as_ref(),
|
||||
);
|
||||
|
||||
helix_loader::set_current_working_dir(dir)?;
|
||||
helix_stdx::env::set_current_working_dir(dir)?;
|
||||
|
||||
cx.editor.set_status(format!(
|
||||
"Current working directory is now {}",
|
||||
helix_loader::current_working_dir().display()
|
||||
helix_stdx::env::current_working_dir().display()
|
||||
));
|
||||
Ok(())
|
||||
}
|
||||
|
@ -1104,7 +1103,7 @@ fn show_current_directory(
|
|||
return Ok(());
|
||||
}
|
||||
|
||||
let cwd = helix_loader::current_working_dir();
|
||||
let cwd = helix_stdx::env::current_working_dir();
|
||||
let message = format!("Current working directory is {}", cwd.display());
|
||||
|
||||
if cwd.exists() {
|
||||
|
@ -2409,7 +2408,8 @@ fn move_buffer(
|
|||
ensure!(args.len() == 1, format!(":move takes one argument"));
|
||||
let doc = doc!(cx.editor);
|
||||
|
||||
let new_path = get_canonicalized_path(&PathBuf::from(args.first().unwrap().to_string()));
|
||||
let new_path =
|
||||
helix_stdx::path::canonicalize(&PathBuf::from(args.first().unwrap().to_string()));
|
||||
let old_path = doc
|
||||
.path()
|
||||
.ok_or_else(|| anyhow!("Scratch buffer cannot be moved. Use :write instead"))?
|
||||
|
|
|
@ -118,16 +118,16 @@ FLAGS:
|
|||
|
||||
// Before setting the working directory, resolve all the paths in args.files
|
||||
for (path, _) in args.files.iter_mut() {
|
||||
*path = helix_core::path::get_canonicalized_path(path);
|
||||
*path = helix_stdx::path::canonicalize(&path);
|
||||
}
|
||||
|
||||
// NOTE: Set the working directory early so the correct configuration is loaded. Be aware that
|
||||
// Application::new() depends on this logic so it must be updated if this changes.
|
||||
if let Some(path) = &args.working_directory {
|
||||
helix_loader::set_current_working_dir(path)?;
|
||||
helix_stdx::env::set_current_working_dir(path)?;
|
||||
} else if let Some((path, _)) = args.files.first().filter(|p| p.0.is_dir()) {
|
||||
// If the first file is a directory, it will be the working directory unless -w was specified
|
||||
helix_loader::set_current_working_dir(path)?;
|
||||
helix_stdx::env::set_current_working_dir(path)?;
|
||||
}
|
||||
|
||||
let config = match Config::load_default() {
|
||||
|
|
|
@ -409,7 +409,7 @@ pub mod completers {
|
|||
use std::path::Path;
|
||||
|
||||
let is_tilde = input == "~";
|
||||
let path = helix_core::path::expand_tilde(Path::new(input));
|
||||
let path = helix_stdx::path::expand_tilde(Path::new(input));
|
||||
|
||||
let (dir, file_name) = if input.ends_with(std::path::MAIN_SEPARATOR) {
|
||||
(path, None)
|
||||
|
@ -430,7 +430,7 @@ pub mod completers {
|
|||
match path.parent() {
|
||||
Some(path) if !path.as_os_str().is_empty() => path.to_path_buf(),
|
||||
// Path::new("h")'s parent is Some("")...
|
||||
_ => helix_loader::current_working_dir(),
|
||||
_ => helix_stdx::env::current_working_dir(),
|
||||
}
|
||||
};
|
||||
|
||||
|
|
|
@ -63,7 +63,7 @@ impl PathOrId {
|
|||
fn get_canonicalized(self) -> Self {
|
||||
use PathOrId::*;
|
||||
match self {
|
||||
Path(path) => Path(helix_core::path::get_canonicalized_path(&path)),
|
||||
Path(path) => Path(helix_stdx::path::canonicalize(path)),
|
||||
Id(id) => Id(id),
|
||||
}
|
||||
}
|
||||
|
|
|
@ -3,7 +3,8 @@ use std::{
|
|||
ops::RangeInclusive,
|
||||
};
|
||||
|
||||
use helix_core::{diagnostic::Severity, path::get_normalized_path};
|
||||
use helix_core::diagnostic::Severity;
|
||||
use helix_stdx::path;
|
||||
use helix_view::doc;
|
||||
|
||||
use super::*;
|
||||
|
@ -23,7 +24,7 @@ async fn test_write_quit_fail() -> anyhow::Result<()> {
|
|||
assert_eq!(1, docs.len());
|
||||
|
||||
let doc = docs.pop().unwrap();
|
||||
assert_eq!(Some(&get_normalized_path(file.path())), doc.path());
|
||||
assert_eq!(Some(&path::normalize(file.path())), doc.path());
|
||||
assert_eq!(&Severity::Error, app.editor.get_status().unwrap().1);
|
||||
}),
|
||||
false,
|
||||
|
@ -269,7 +270,7 @@ async fn test_write_scratch_to_new_path() -> anyhow::Result<()> {
|
|||
assert_eq!(1, docs.len());
|
||||
|
||||
let doc = docs.pop().unwrap();
|
||||
assert_eq!(Some(&get_normalized_path(file.path())), doc.path());
|
||||
assert_eq!(Some(&path::normalize(file.path())), doc.path());
|
||||
}),
|
||||
false,
|
||||
)
|
||||
|
@ -341,7 +342,7 @@ async fn test_write_new_path() -> anyhow::Result<()> {
|
|||
Some(&|app| {
|
||||
let doc = doc!(app.editor);
|
||||
assert!(!app.editor.is_err());
|
||||
assert_eq!(&get_normalized_path(file1.path()), doc.path().unwrap());
|
||||
assert_eq!(&path::normalize(file1.path()), doc.path().unwrap());
|
||||
}),
|
||||
),
|
||||
(
|
||||
|
@ -349,7 +350,7 @@ async fn test_write_new_path() -> anyhow::Result<()> {
|
|||
Some(&|app| {
|
||||
let doc = doc!(app.editor);
|
||||
assert!(!app.editor.is_err());
|
||||
assert_eq!(&get_normalized_path(file2.path()), doc.path().unwrap());
|
||||
assert_eq!(&path::normalize(file2.path()), doc.path().unwrap());
|
||||
assert!(app.editor.document_by_path(file1.path()).is_none());
|
||||
}),
|
||||
),
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
use super::*;
|
||||
|
||||
use helix_core::path::get_normalized_path;
|
||||
use helix_stdx::path;
|
||||
|
||||
#[tokio::test(flavor = "multi_thread")]
|
||||
async fn test_split_write_quit_all() -> anyhow::Result<()> {
|
||||
|
@ -27,21 +27,21 @@ async fn test_split_write_quit_all() -> anyhow::Result<()> {
|
|||
|
||||
let doc1 = docs
|
||||
.iter()
|
||||
.find(|doc| doc.path().unwrap() == &get_normalized_path(file1.path()))
|
||||
.find(|doc| doc.path().unwrap() == &path::normalize(file1.path()))
|
||||
.unwrap();
|
||||
|
||||
assert_eq!("hello1", doc1.text().to_string());
|
||||
|
||||
let doc2 = docs
|
||||
.iter()
|
||||
.find(|doc| doc.path().unwrap() == &get_normalized_path(file2.path()))
|
||||
.find(|doc| doc.path().unwrap() == &path::normalize(file2.path()))
|
||||
.unwrap();
|
||||
|
||||
assert_eq!("hello2", doc2.text().to_string());
|
||||
|
||||
let doc3 = docs
|
||||
.iter()
|
||||
.find(|doc| doc.path().unwrap() == &get_normalized_path(file3.path()))
|
||||
.find(|doc| doc.path().unwrap() == &path::normalize(file3.path()))
|
||||
.unwrap();
|
||||
|
||||
assert_eq!("hello3", doc3.text().to_string());
|
||||
|
|
|
@ -15,6 +15,7 @@ default = []
|
|||
term = ["crossterm"]
|
||||
|
||||
[dependencies]
|
||||
helix-stdx = { path = "../helix-stdx" }
|
||||
helix-core = { path = "../helix-core" }
|
||||
helix-event = { path = "../helix-event" }
|
||||
helix-loader = { path = "../helix-loader" }
|
||||
|
|
|
@ -855,7 +855,7 @@ impl Document {
|
|||
let text = self.text().clone();
|
||||
|
||||
let path = match path {
|
||||
Some(path) => helix_core::path::get_canonicalized_path(&path),
|
||||
Some(path) => helix_stdx::path::canonicalize(path),
|
||||
None => {
|
||||
if self.path.is_none() {
|
||||
bail!("Can't save with no path set!");
|
||||
|
@ -1049,7 +1049,7 @@ impl Document {
|
|||
}
|
||||
|
||||
pub fn set_path(&mut self, path: Option<&Path>) {
|
||||
let path = path.map(helix_core::path::get_canonicalized_path);
|
||||
let path = path.map(helix_stdx::path::canonicalize);
|
||||
|
||||
// if parent doesn't exist we still want to open the document
|
||||
// and error out when document is saved
|
||||
|
@ -1672,7 +1672,7 @@ impl Document {
|
|||
pub fn relative_path(&self) -> Option<PathBuf> {
|
||||
self.path
|
||||
.as_deref()
|
||||
.map(helix_core::path::get_relative_path)
|
||||
.map(helix_stdx::path::get_relative_path)
|
||||
}
|
||||
|
||||
pub fn display_name(&self) -> Cow<'static, str> {
|
||||
|
|
|
@ -1464,7 +1464,7 @@ impl Editor {
|
|||
|
||||
// ??? possible use for integration tests
|
||||
pub fn open(&mut self, path: &Path, action: Action) -> Result<DocumentId, Error> {
|
||||
let path = helix_core::path::get_canonicalized_path(path);
|
||||
let path = helix_stdx::path::canonicalize(path);
|
||||
let id = self.document_by_path(&path).map(|doc| doc.id);
|
||||
|
||||
let id = if let Some(id) = id {
|
||||
|
|
Loading…
Add table
Reference in a new issue