Create helix-stdx crate for stdlib extensions
helix-stdx is meant to carry extensions to the stdlib or low-level dependencies that are useful in all other crates. This commit starts with all of the path functions from helix-core and the CWD tracking that lived in helix-loader. The CWD tracking in helix-loader was previously unable to call the canonicalization functions in helix-core. Switching to our custom canonicalization code should make no noticeable difference though since `std::env::current_dir` returns a canonicalized path with symlinks resolved (at least on unix).
This commit is contained in:
parent
af8e524a7d
commit
1f916e65cf
27 changed files with 163 additions and 111 deletions
14
Cargo.lock
generated
14
Cargo.lock
generated
|
@ -1060,6 +1060,7 @@ dependencies = [
|
||||||
"etcetera",
|
"etcetera",
|
||||||
"hashbrown 0.14.3",
|
"hashbrown 0.14.3",
|
||||||
"helix-loader",
|
"helix-loader",
|
||||||
|
"helix-stdx",
|
||||||
"imara-diff",
|
"imara-diff",
|
||||||
"indoc",
|
"indoc",
|
||||||
"log",
|
"log",
|
||||||
|
@ -1074,7 +1075,6 @@ dependencies = [
|
||||||
"slotmap",
|
"slotmap",
|
||||||
"smallvec",
|
"smallvec",
|
||||||
"smartstring",
|
"smartstring",
|
||||||
"tempfile",
|
|
||||||
"textwrap",
|
"textwrap",
|
||||||
"toml",
|
"toml",
|
||||||
"tree-sitter",
|
"tree-sitter",
|
||||||
|
@ -1136,6 +1136,7 @@ dependencies = [
|
||||||
"helix-core",
|
"helix-core",
|
||||||
"helix-loader",
|
"helix-loader",
|
||||||
"helix-parsec",
|
"helix-parsec",
|
||||||
|
"helix-stdx",
|
||||||
"log",
|
"log",
|
||||||
"lsp-types",
|
"lsp-types",
|
||||||
"parking_lot",
|
"parking_lot",
|
||||||
|
@ -1151,6 +1152,15 @@ dependencies = [
|
||||||
name = "helix-parsec"
|
name = "helix-parsec"
|
||||||
version = "23.10.0"
|
version = "23.10.0"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "helix-stdx"
|
||||||
|
version = "23.10.0"
|
||||||
|
dependencies = [
|
||||||
|
"dunce",
|
||||||
|
"etcetera",
|
||||||
|
"tempfile",
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "helix-term"
|
name = "helix-term"
|
||||||
version = "23.10.0"
|
version = "23.10.0"
|
||||||
|
@ -1169,6 +1179,7 @@ dependencies = [
|
||||||
"helix-event",
|
"helix-event",
|
||||||
"helix-loader",
|
"helix-loader",
|
||||||
"helix-lsp",
|
"helix-lsp",
|
||||||
|
"helix-stdx",
|
||||||
"helix-tui",
|
"helix-tui",
|
||||||
"helix-vcs",
|
"helix-vcs",
|
||||||
"helix-view",
|
"helix-view",
|
||||||
|
@ -1241,6 +1252,7 @@ dependencies = [
|
||||||
"helix-event",
|
"helix-event",
|
||||||
"helix-loader",
|
"helix-loader",
|
||||||
"helix-lsp",
|
"helix-lsp",
|
||||||
|
"helix-stdx",
|
||||||
"helix-tui",
|
"helix-tui",
|
||||||
"helix-vcs",
|
"helix-vcs",
|
||||||
"libc",
|
"libc",
|
||||||
|
|
|
@ -11,6 +11,7 @@ members = [
|
||||||
"helix-loader",
|
"helix-loader",
|
||||||
"helix-vcs",
|
"helix-vcs",
|
||||||
"helix-parsec",
|
"helix-parsec",
|
||||||
|
"helix-stdx",
|
||||||
"xtask",
|
"xtask",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
|
|
@ -16,6 +16,7 @@ unicode-lines = ["ropey/unicode_lines"]
|
||||||
integration = []
|
integration = []
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
|
helix-stdx = { path = "../helix-stdx" }
|
||||||
helix-loader = { path = "../helix-loader" }
|
helix-loader = { path = "../helix-loader" }
|
||||||
|
|
||||||
ropey = { version = "1.6.1", default-features = false, features = ["simd"] }
|
ropey = { version = "1.6.1", default-features = false, features = ["simd"] }
|
||||||
|
@ -55,4 +56,3 @@ parking_lot = "0.12"
|
||||||
[dev-dependencies]
|
[dev-dependencies]
|
||||||
quickcheck = { version = "1", default-features = false }
|
quickcheck = { version = "1", default-features = false }
|
||||||
indoc = "2.0.4"
|
indoc = "2.0.4"
|
||||||
tempfile = "3.9"
|
|
||||||
|
|
|
@ -17,7 +17,6 @@ pub mod macros;
|
||||||
pub mod match_brackets;
|
pub mod match_brackets;
|
||||||
pub mod movement;
|
pub mod movement;
|
||||||
pub mod object;
|
pub mod object;
|
||||||
pub mod path;
|
|
||||||
mod position;
|
mod position;
|
||||||
pub mod search;
|
pub mod search;
|
||||||
pub mod selection;
|
pub mod selection;
|
||||||
|
|
|
@ -1,14 +1,13 @@
|
||||||
pub mod config;
|
pub mod config;
|
||||||
pub mod grammar;
|
pub mod grammar;
|
||||||
|
|
||||||
|
use helix_stdx::{env::current_working_dir, path};
|
||||||
|
|
||||||
use etcetera::base_strategy::{choose_base_strategy, BaseStrategy};
|
use etcetera::base_strategy::{choose_base_strategy, BaseStrategy};
|
||||||
use std::path::{Path, PathBuf};
|
use std::path::{Path, PathBuf};
|
||||||
use std::sync::RwLock;
|
|
||||||
|
|
||||||
pub const VERSION_AND_GIT_HASH: &str = env!("VERSION_AND_GIT_HASH");
|
pub const VERSION_AND_GIT_HASH: &str = env!("VERSION_AND_GIT_HASH");
|
||||||
|
|
||||||
static CWD: RwLock<Option<PathBuf>> = RwLock::new(None);
|
|
||||||
|
|
||||||
static RUNTIME_DIRS: once_cell::sync::Lazy<Vec<PathBuf>> =
|
static RUNTIME_DIRS: once_cell::sync::Lazy<Vec<PathBuf>> =
|
||||||
once_cell::sync::Lazy::new(prioritize_runtime_dirs);
|
once_cell::sync::Lazy::new(prioritize_runtime_dirs);
|
||||||
|
|
||||||
|
@ -16,31 +15,6 @@ static CONFIG_FILE: once_cell::sync::OnceCell<PathBuf> = once_cell::sync::OnceCe
|
||||||
|
|
||||||
static LOG_FILE: once_cell::sync::OnceCell<PathBuf> = once_cell::sync::OnceCell::new();
|
static LOG_FILE: once_cell::sync::OnceCell<PathBuf> = once_cell::sync::OnceCell::new();
|
||||||
|
|
||||||
// Get the current working directory.
|
|
||||||
// This information is managed internally as the call to std::env::current_dir
|
|
||||||
// might fail if the cwd has been deleted.
|
|
||||||
pub fn current_working_dir() -> PathBuf {
|
|
||||||
if let Some(path) = &*CWD.read().unwrap() {
|
|
||||||
return path.clone();
|
|
||||||
}
|
|
||||||
|
|
||||||
let path = std::env::current_dir()
|
|
||||||
.and_then(dunce::canonicalize)
|
|
||||||
.expect("Couldn't determine current working directory");
|
|
||||||
let mut cwd = CWD.write().unwrap();
|
|
||||||
*cwd = Some(path.clone());
|
|
||||||
|
|
||||||
path
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn set_current_working_dir(path: impl AsRef<Path>) -> std::io::Result<()> {
|
|
||||||
let path = dunce::canonicalize(path)?;
|
|
||||||
std::env::set_current_dir(&path)?;
|
|
||||||
let mut cwd = CWD.write().unwrap();
|
|
||||||
*cwd = Some(path);
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn initialize_config_file(specified_file: Option<PathBuf>) {
|
pub fn initialize_config_file(specified_file: Option<PathBuf>) {
|
||||||
let config_file = specified_file.unwrap_or_else(default_config_file);
|
let config_file = specified_file.unwrap_or_else(default_config_file);
|
||||||
ensure_parent_dir(&config_file);
|
ensure_parent_dir(&config_file);
|
||||||
|
@ -280,21 +254,9 @@ fn ensure_parent_dir(path: &Path) {
|
||||||
mod merge_toml_tests {
|
mod merge_toml_tests {
|
||||||
use std::str;
|
use std::str;
|
||||||
|
|
||||||
use super::{current_working_dir, merge_toml_values, set_current_working_dir};
|
use super::merge_toml_values;
|
||||||
use toml::Value;
|
use toml::Value;
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn current_dir_is_set() {
|
|
||||||
let new_path = dunce::canonicalize(std::env::temp_dir()).unwrap();
|
|
||||||
let cwd = current_working_dir();
|
|
||||||
assert_ne!(cwd, new_path);
|
|
||||||
|
|
||||||
set_current_working_dir(&new_path).expect("Couldn't set new path");
|
|
||||||
|
|
||||||
let cwd = current_working_dir();
|
|
||||||
assert_eq!(cwd, new_path);
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn language_toml_map_merges() {
|
fn language_toml_map_merges() {
|
||||||
const USER: &str = r#"
|
const USER: &str = r#"
|
||||||
|
|
|
@ -13,6 +13,7 @@ homepage.workspace = true
|
||||||
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
|
helix-stdx = { path = "../helix-stdx" }
|
||||||
helix-core = { path = "../helix-core" }
|
helix-core = { path = "../helix-core" }
|
||||||
helix-loader = { path = "../helix-loader" }
|
helix-loader = { path = "../helix-loader" }
|
||||||
helix-parsec = { path = "../helix-parsec" }
|
helix-parsec = { path = "../helix-parsec" }
|
||||||
|
|
|
@ -4,8 +4,9 @@ use crate::{
|
||||||
Call, Error, OffsetEncoding, Result,
|
Call, Error, OffsetEncoding, Result,
|
||||||
};
|
};
|
||||||
|
|
||||||
use helix_core::{find_workspace, path, syntax::LanguageServerFeature, ChangeSet, Rope};
|
use helix_core::{find_workspace, syntax::LanguageServerFeature, ChangeSet, Rope};
|
||||||
use helix_loader::{self, VERSION_AND_GIT_HASH};
|
use helix_loader::{self, VERSION_AND_GIT_HASH};
|
||||||
|
use helix_stdx::path;
|
||||||
use lsp::{
|
use lsp::{
|
||||||
notification::DidChangeWorkspaceFolders, CodeActionCapabilityResolveSupport,
|
notification::DidChangeWorkspaceFolders, CodeActionCapabilityResolveSupport,
|
||||||
DidChangeWorkspaceFoldersParams, OneOf, PositionEncodingKind, WorkspaceFolder,
|
DidChangeWorkspaceFoldersParams, OneOf, PositionEncodingKind, WorkspaceFolder,
|
||||||
|
@ -68,7 +69,7 @@ impl Client {
|
||||||
may_support_workspace: bool,
|
may_support_workspace: bool,
|
||||||
) -> bool {
|
) -> bool {
|
||||||
let (workspace, workspace_is_cwd) = find_workspace();
|
let (workspace, workspace_is_cwd) = find_workspace();
|
||||||
let workspace = path::get_normalized_path(&workspace);
|
let workspace = path::normalize(workspace);
|
||||||
let root = find_lsp_workspace(
|
let root = find_lsp_workspace(
|
||||||
doc_path
|
doc_path
|
||||||
.and_then(|x| x.parent().and_then(|x| x.to_str()))
|
.and_then(|x| x.parent().and_then(|x| x.to_str()))
|
||||||
|
@ -204,7 +205,7 @@ impl Client {
|
||||||
let (server_rx, server_tx, initialize_notify) =
|
let (server_rx, server_tx, initialize_notify) =
|
||||||
Transport::start(reader, writer, stderr, id, name.clone());
|
Transport::start(reader, writer, stderr, id, name.clone());
|
||||||
let (workspace, workspace_is_cwd) = find_workspace();
|
let (workspace, workspace_is_cwd) = find_workspace();
|
||||||
let workspace = path::get_normalized_path(&workspace);
|
let workspace = path::normalize(workspace);
|
||||||
let root = find_lsp_workspace(
|
let root = find_lsp_workspace(
|
||||||
doc_path
|
doc_path
|
||||||
.and_then(|x| x.parent().and_then(|x| x.to_str()))
|
.and_then(|x| x.parent().and_then(|x| x.to_str()))
|
||||||
|
|
|
@ -11,10 +11,10 @@ pub use lsp::{Position, Url};
|
||||||
pub use lsp_types as lsp;
|
pub use lsp_types as lsp;
|
||||||
|
|
||||||
use futures_util::stream::select_all::SelectAll;
|
use futures_util::stream::select_all::SelectAll;
|
||||||
use helix_core::{
|
use helix_core::syntax::{
|
||||||
path,
|
LanguageConfiguration, LanguageServerConfiguration, LanguageServerFeatures,
|
||||||
syntax::{LanguageConfiguration, LanguageServerConfiguration, LanguageServerFeatures},
|
|
||||||
};
|
};
|
||||||
|
use helix_stdx::path;
|
||||||
use tokio::sync::mpsc::UnboundedReceiver;
|
use tokio::sync::mpsc::UnboundedReceiver;
|
||||||
|
|
||||||
use std::{
|
use std::{
|
||||||
|
@ -958,10 +958,10 @@ pub fn find_lsp_workspace(
|
||||||
let mut file = if file.is_absolute() {
|
let mut file = if file.is_absolute() {
|
||||||
file.to_path_buf()
|
file.to_path_buf()
|
||||||
} else {
|
} else {
|
||||||
let current_dir = helix_loader::current_working_dir();
|
let current_dir = helix_stdx::env::current_working_dir();
|
||||||
current_dir.join(file)
|
current_dir.join(file)
|
||||||
};
|
};
|
||||||
file = path::get_normalized_path(&file);
|
file = path::normalize(&file);
|
||||||
|
|
||||||
if !file.starts_with(workspace) {
|
if !file.starts_with(workspace) {
|
||||||
return None;
|
return None;
|
||||||
|
@ -978,7 +978,7 @@ pub fn find_lsp_workspace(
|
||||||
|
|
||||||
if root_dirs
|
if root_dirs
|
||||||
.iter()
|
.iter()
|
||||||
.any(|root_dir| path::get_normalized_path(&workspace.join(root_dir)) == ancestor)
|
.any(|root_dir| path::normalize(workspace.join(root_dir)) == ancestor)
|
||||||
{
|
{
|
||||||
// if the worskapce is the cwd do not search any higher for workspaces
|
// if the worskapce is the cwd do not search any higher for workspaces
|
||||||
// but specify
|
// but specify
|
||||||
|
|
19
helix-stdx/Cargo.toml
Normal file
19
helix-stdx/Cargo.toml
Normal file
|
@ -0,0 +1,19 @@
|
||||||
|
[package]
|
||||||
|
name = "helix-stdx"
|
||||||
|
description = "Standard library extensions"
|
||||||
|
include = ["src/**/*", "README.md"]
|
||||||
|
version.workspace = true
|
||||||
|
authors.workspace = true
|
||||||
|
edition.workspace = true
|
||||||
|
license.workspace = true
|
||||||
|
rust-version.workspace = true
|
||||||
|
categories.workspace = true
|
||||||
|
repository.workspace = true
|
||||||
|
homepage.workspace = true
|
||||||
|
|
||||||
|
[dependencies]
|
||||||
|
dunce = "1.0"
|
||||||
|
etcetera = "0.8"
|
||||||
|
|
||||||
|
[dev-dependencies]
|
||||||
|
tempfile = "3.9"
|
48
helix-stdx/src/env.rs
Normal file
48
helix-stdx/src/env.rs
Normal file
|
@ -0,0 +1,48 @@
|
||||||
|
use std::{
|
||||||
|
path::{Path, PathBuf},
|
||||||
|
sync::RwLock,
|
||||||
|
};
|
||||||
|
|
||||||
|
static CWD: RwLock<Option<PathBuf>> = RwLock::new(None);
|
||||||
|
|
||||||
|
// Get the current working directory.
|
||||||
|
// This information is managed internally as the call to std::env::current_dir
|
||||||
|
// might fail if the cwd has been deleted.
|
||||||
|
pub fn current_working_dir() -> PathBuf {
|
||||||
|
if let Some(path) = &*CWD.read().unwrap() {
|
||||||
|
return path.clone();
|
||||||
|
}
|
||||||
|
|
||||||
|
let path = std::env::current_dir()
|
||||||
|
.map(crate::path::normalize)
|
||||||
|
.expect("Couldn't determine current working directory");
|
||||||
|
let mut cwd = CWD.write().unwrap();
|
||||||
|
*cwd = Some(path.clone());
|
||||||
|
|
||||||
|
path
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn set_current_working_dir(path: impl AsRef<Path>) -> std::io::Result<()> {
|
||||||
|
let path = crate::path::canonicalize(path);
|
||||||
|
std::env::set_current_dir(&path)?;
|
||||||
|
let mut cwd = CWD.write().unwrap();
|
||||||
|
*cwd = Some(path);
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod tests {
|
||||||
|
use super::{current_working_dir, set_current_working_dir};
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn current_dir_is_set() {
|
||||||
|
let new_path = dunce::canonicalize(std::env::temp_dir()).unwrap();
|
||||||
|
let cwd = current_working_dir();
|
||||||
|
assert_ne!(cwd, new_path);
|
||||||
|
|
||||||
|
set_current_working_dir(&new_path).expect("Couldn't set new path");
|
||||||
|
|
||||||
|
let cwd = current_working_dir();
|
||||||
|
assert_eq!(cwd, new_path);
|
||||||
|
}
|
||||||
|
}
|
2
helix-stdx/src/lib.rs
Normal file
2
helix-stdx/src/lib.rs
Normal file
|
@ -0,0 +1,2 @@
|
||||||
|
pub mod env;
|
||||||
|
pub mod path;
|
|
@ -1,6 +1,9 @@
|
||||||
use etcetera::home_dir;
|
pub use etcetera::home_dir;
|
||||||
|
|
||||||
use std::path::{Component, Path, PathBuf};
|
use std::path::{Component, Path, PathBuf};
|
||||||
|
|
||||||
|
use crate::env::current_working_dir;
|
||||||
|
|
||||||
/// Replaces users home directory from `path` with tilde `~` if the directory
|
/// Replaces users home directory from `path` with tilde `~` if the directory
|
||||||
/// is available, otherwise returns the path unchanged.
|
/// is available, otherwise returns the path unchanged.
|
||||||
pub fn fold_home_dir(path: &Path) -> PathBuf {
|
pub fn fold_home_dir(path: &Path) -> PathBuf {
|
||||||
|
@ -16,7 +19,8 @@ pub fn fold_home_dir(path: &Path) -> PathBuf {
|
||||||
/// Expands tilde `~` into users home directory if available, otherwise returns the path
|
/// Expands tilde `~` into users home directory if available, otherwise returns the path
|
||||||
/// unchanged. The tilde will only be expanded when present as the first component of the path
|
/// unchanged. The tilde will only be expanded when present as the first component of the path
|
||||||
/// and only slash follows it.
|
/// and only slash follows it.
|
||||||
pub fn expand_tilde(path: &Path) -> PathBuf {
|
pub fn expand_tilde(path: impl AsRef<Path>) -> PathBuf {
|
||||||
|
let path = path.as_ref();
|
||||||
let mut components = path.components().peekable();
|
let mut components = path.components().peekable();
|
||||||
if let Some(Component::Normal(c)) = components.peek() {
|
if let Some(Component::Normal(c)) = components.peek() {
|
||||||
if c == &"~" {
|
if c == &"~" {
|
||||||
|
@ -33,8 +37,8 @@ pub fn expand_tilde(path: &Path) -> PathBuf {
|
||||||
/// Normalize a path without resolving symlinks.
|
/// Normalize a path without resolving symlinks.
|
||||||
// Strategy: start from the first component and move up. Cannonicalize previous path,
|
// Strategy: start from the first component and move up. Cannonicalize previous path,
|
||||||
// join component, cannonicalize new path, strip prefix and join to the final result.
|
// join component, cannonicalize new path, strip prefix and join to the final result.
|
||||||
pub fn get_normalized_path(path: &Path) -> PathBuf {
|
pub fn normalize(path: impl AsRef<Path>) -> PathBuf {
|
||||||
let mut components = path.components().peekable();
|
let mut components = path.as_ref().components().peekable();
|
||||||
let mut ret = if let Some(c @ Component::Prefix(..)) = components.peek().cloned() {
|
let mut ret = if let Some(c @ Component::Prefix(..)) = components.peek().cloned() {
|
||||||
components.next();
|
components.next();
|
||||||
PathBuf::from(c.as_os_str())
|
PathBuf::from(c.as_os_str())
|
||||||
|
@ -104,22 +108,22 @@ pub fn get_normalized_path(path: &Path) -> PathBuf {
|
||||||
///
|
///
|
||||||
/// This function is used instead of [`std::fs::canonicalize`] because we don't want to verify
|
/// This function is used instead of [`std::fs::canonicalize`] because we don't want to verify
|
||||||
/// here if the path exists, just normalize it's components.
|
/// here if the path exists, just normalize it's components.
|
||||||
pub fn get_canonicalized_path(path: &Path) -> PathBuf {
|
pub fn canonicalize(path: impl AsRef<Path>) -> PathBuf {
|
||||||
let path = expand_tilde(path);
|
let path = expand_tilde(path);
|
||||||
let path = if path.is_relative() {
|
let path = if path.is_relative() {
|
||||||
helix_loader::current_working_dir().join(path)
|
current_working_dir().join(path)
|
||||||
} else {
|
} else {
|
||||||
path
|
path
|
||||||
};
|
};
|
||||||
|
|
||||||
get_normalized_path(path.as_path())
|
normalize(path)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn get_relative_path(path: &Path) -> PathBuf {
|
pub fn get_relative_path(path: impl AsRef<Path>) -> PathBuf {
|
||||||
let path = PathBuf::from(path);
|
let path = PathBuf::from(path.as_ref());
|
||||||
let path = if path.is_absolute() {
|
let path = if path.is_absolute() {
|
||||||
let cwdir = get_normalized_path(&helix_loader::current_working_dir());
|
let cwdir = normalize(current_working_dir());
|
||||||
get_normalized_path(&path)
|
normalize(&path)
|
||||||
.strip_prefix(cwdir)
|
.strip_prefix(cwdir)
|
||||||
.map(PathBuf::from)
|
.map(PathBuf::from)
|
||||||
.unwrap_or(path)
|
.unwrap_or(path)
|
||||||
|
@ -135,8 +139,8 @@ pub fn get_relative_path(path: &Path) -> PathBuf {
|
||||||
/// Also strip the current working directory from the beginning of the path.
|
/// Also strip the current working directory from the beginning of the path.
|
||||||
/// Note that this function does not check if the truncated path is unambiguous.
|
/// Note that this function does not check if the truncated path is unambiguous.
|
||||||
///
|
///
|
||||||
/// ```
|
/// ```
|
||||||
/// use helix_core::path::get_truncated_path;
|
/// use helix_stdx::path::get_truncated_path;
|
||||||
/// use std::path::Path;
|
/// use std::path::Path;
|
||||||
///
|
///
|
||||||
/// assert_eq!(
|
/// assert_eq!(
|
||||||
|
@ -158,8 +162,8 @@ pub fn get_relative_path(path: &Path) -> PathBuf {
|
||||||
/// assert_eq!(get_truncated_path("").as_path(), Path::new(""));
|
/// assert_eq!(get_truncated_path("").as_path(), Path::new(""));
|
||||||
/// ```
|
/// ```
|
||||||
///
|
///
|
||||||
pub fn get_truncated_path<P: AsRef<Path>>(path: P) -> PathBuf {
|
pub fn get_truncated_path(path: impl AsRef<Path>) -> PathBuf {
|
||||||
let cwd = helix_loader::current_working_dir();
|
let cwd = current_working_dir();
|
||||||
let path = path
|
let path = path
|
||||||
.as_ref()
|
.as_ref()
|
||||||
.strip_prefix(cwd)
|
.strip_prefix(cwd)
|
|
@ -6,7 +6,7 @@ use std::{
|
||||||
path::{Component, Path, PathBuf},
|
path::{Component, Path, PathBuf},
|
||||||
};
|
};
|
||||||
|
|
||||||
use helix_core::path::get_normalized_path;
|
use helix_stdx::path;
|
||||||
use tempfile::Builder;
|
use tempfile::Builder;
|
||||||
|
|
||||||
// Paths on Windows are almost always case-insensitive.
|
// Paths on Windows are almost always case-insensitive.
|
||||||
|
@ -34,7 +34,7 @@ fn test_case_folding_windows() -> Result<(), Box<dyn Error>> {
|
||||||
);
|
);
|
||||||
let test_path = root_without_prefix.join(lowercase_case);
|
let test_path = root_without_prefix.join(lowercase_case);
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
get_normalized_path(&test_path),
|
path::normalize(&test_path),
|
||||||
case.path().strip_prefix(&tmp_prefix)?
|
case.path().strip_prefix(&tmp_prefix)?
|
||||||
);
|
);
|
||||||
|
|
||||||
|
@ -80,7 +80,7 @@ fn test_normalize_path() -> Result<(), Box<dyn Error>> {
|
||||||
// root/link
|
// root/link
|
||||||
let path = link.strip_prefix(&tmp_prefix)?;
|
let path = link.strip_prefix(&tmp_prefix)?;
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
get_normalized_path(path),
|
path::normalize(path),
|
||||||
path,
|
path,
|
||||||
"input {:?} and symlink last component shouldn't be resolved",
|
"input {:?} and symlink last component shouldn't be resolved",
|
||||||
path
|
path
|
||||||
|
@ -98,7 +98,7 @@ fn test_normalize_path() -> Result<(), Box<dyn Error>> {
|
||||||
.unwrap()
|
.unwrap()
|
||||||
.join(Component::ParentDir);
|
.join(Component::ParentDir);
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
get_normalized_path(&path),
|
path::normalize(&path),
|
||||||
expected,
|
expected,
|
||||||
"input {:?} and \"..\" should not erase the simlink that goes ahead",
|
"input {:?} and \"..\" should not erase the simlink that goes ahead",
|
||||||
&path
|
&path
|
||||||
|
@ -118,7 +118,7 @@ fn test_normalize_path() -> Result<(), Box<dyn Error>> {
|
||||||
.unwrap()
|
.unwrap()
|
||||||
.join(Component::ParentDir)
|
.join(Component::ParentDir)
|
||||||
.join(Component::ParentDir);
|
.join(Component::ParentDir);
|
||||||
assert_eq!(get_normalized_path(&path), expected, "input {:?}", &path);
|
assert_eq!(path::normalize(&path), expected, "input {:?}", &path);
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
|
@ -23,6 +23,7 @@ name = "hx"
|
||||||
path = "src/main.rs"
|
path = "src/main.rs"
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
|
helix-stdx = { path = "../helix-stdx" }
|
||||||
helix-core = { path = "../helix-core" }
|
helix-core = { path = "../helix-core" }
|
||||||
helix-event = { path = "../helix-event" }
|
helix-event = { path = "../helix-event" }
|
||||||
helix-view = { path = "../helix-view" }
|
helix-view = { path = "../helix-view" }
|
||||||
|
|
|
@ -1,11 +1,12 @@
|
||||||
use arc_swap::{access::Map, ArcSwap};
|
use arc_swap::{access::Map, ArcSwap};
|
||||||
use futures_util::Stream;
|
use futures_util::Stream;
|
||||||
use helix_core::{path::get_relative_path, pos_at_coords, syntax, Selection};
|
use helix_core::{pos_at_coords, syntax, Selection};
|
||||||
use helix_lsp::{
|
use helix_lsp::{
|
||||||
lsp::{self, notification::Notification},
|
lsp::{self, notification::Notification},
|
||||||
util::lsp_range_to_range,
|
util::lsp_range_to_range,
|
||||||
LspProgressMap,
|
LspProgressMap,
|
||||||
};
|
};
|
||||||
|
use helix_stdx::path::get_relative_path;
|
||||||
use helix_view::{
|
use helix_view::{
|
||||||
align_view,
|
align_view,
|
||||||
document::DocumentSavedEventResult,
|
document::DocumentSavedEventResult,
|
||||||
|
|
|
@ -2169,7 +2169,7 @@ fn global_search(cx: &mut Context) {
|
||||||
type Data = Option<PathBuf>;
|
type Data = Option<PathBuf>;
|
||||||
|
|
||||||
fn format(&self, current_path: &Self::Data) -> Row {
|
fn format(&self, current_path: &Self::Data) -> Row {
|
||||||
let relative_path = helix_core::path::get_relative_path(&self.path)
|
let relative_path = helix_stdx::path::get_relative_path(&self.path)
|
||||||
.to_string_lossy()
|
.to_string_lossy()
|
||||||
.into_owned();
|
.into_owned();
|
||||||
if current_path
|
if current_path
|
||||||
|
@ -2218,7 +2218,7 @@ fn global_search(cx: &mut Context) {
|
||||||
.case_smart(smart_case)
|
.case_smart(smart_case)
|
||||||
.build(regex.as_str())
|
.build(regex.as_str())
|
||||||
{
|
{
|
||||||
let search_root = helix_loader::current_working_dir();
|
let search_root = helix_stdx::env::current_working_dir();
|
||||||
if !search_root.exists() {
|
if !search_root.exists() {
|
||||||
cx.editor
|
cx.editor
|
||||||
.set_error("Current working directory does not exist");
|
.set_error("Current working directory does not exist");
|
||||||
|
@ -2731,7 +2731,7 @@ fn file_picker_in_current_buffer_directory(cx: &mut Context) {
|
||||||
}
|
}
|
||||||
|
|
||||||
fn file_picker_in_current_directory(cx: &mut Context) {
|
fn file_picker_in_current_directory(cx: &mut Context) {
|
||||||
let cwd = helix_loader::current_working_dir();
|
let cwd = helix_stdx::env::current_working_dir();
|
||||||
if !cwd.exists() {
|
if !cwd.exists() {
|
||||||
cx.editor
|
cx.editor
|
||||||
.set_error("Current working directory does not exist");
|
.set_error("Current working directory does not exist");
|
||||||
|
@ -2759,7 +2759,7 @@ fn buffer_picker(cx: &mut Context) {
|
||||||
let path = self
|
let path = self
|
||||||
.path
|
.path
|
||||||
.as_deref()
|
.as_deref()
|
||||||
.map(helix_core::path::get_relative_path);
|
.map(helix_stdx::path::get_relative_path);
|
||||||
let path = match path.as_deref().and_then(Path::to_str) {
|
let path = match path.as_deref().and_then(Path::to_str) {
|
||||||
Some(path) => path,
|
Some(path) => path,
|
||||||
None => SCRATCH_BUFFER_NAME,
|
None => SCRATCH_BUFFER_NAME,
|
||||||
|
@ -2826,7 +2826,7 @@ fn jumplist_picker(cx: &mut Context) {
|
||||||
let path = self
|
let path = self
|
||||||
.path
|
.path
|
||||||
.as_deref()
|
.as_deref()
|
||||||
.map(helix_core::path::get_relative_path);
|
.map(helix_stdx::path::get_relative_path);
|
||||||
let path = match path.as_deref().and_then(Path::to_str) {
|
let path = match path.as_deref().and_then(Path::to_str) {
|
||||||
Some(path) => path,
|
Some(path) => path,
|
||||||
None => SCRATCH_BUFFER_NAME,
|
None => SCRATCH_BUFFER_NAME,
|
||||||
|
|
|
@ -217,7 +217,7 @@ pub fn dap_start_impl(
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
args.insert("cwd", to_value(helix_loader::current_working_dir())?);
|
args.insert("cwd", to_value(helix_stdx::env::current_working_dir())?);
|
||||||
|
|
||||||
let args = to_value(args).unwrap();
|
let args = to_value(args).unwrap();
|
||||||
|
|
||||||
|
|
|
@ -17,9 +17,8 @@ use tui::{
|
||||||
|
|
||||||
use super::{align_view, push_jump, Align, Context, Editor, Open};
|
use super::{align_view, push_jump, Align, Context, Editor, Open};
|
||||||
|
|
||||||
use helix_core::{
|
use helix_core::{syntax::LanguageServerFeature, text_annotations::InlineAnnotation, Selection};
|
||||||
path, syntax::LanguageServerFeature, text_annotations::InlineAnnotation, Selection,
|
use helix_stdx::path;
|
||||||
};
|
|
||||||
use helix_view::{
|
use helix_view::{
|
||||||
document::{DocumentInlayHints, DocumentInlayHintsId, Mode},
|
document::{DocumentInlayHints, DocumentInlayHintsId, Mode},
|
||||||
editor::Action,
|
editor::Action,
|
||||||
|
@ -1018,7 +1017,7 @@ fn goto_impl(
|
||||||
locations: Vec<lsp::Location>,
|
locations: Vec<lsp::Location>,
|
||||||
offset_encoding: OffsetEncoding,
|
offset_encoding: OffsetEncoding,
|
||||||
) {
|
) {
|
||||||
let cwdir = helix_loader::current_working_dir();
|
let cwdir = helix_stdx::env::current_working_dir();
|
||||||
|
|
||||||
match locations.as_slice() {
|
match locations.as_slice() {
|
||||||
[location] => {
|
[location] => {
|
||||||
|
|
|
@ -7,7 +7,7 @@ use super::*;
|
||||||
|
|
||||||
use helix_core::fuzzy::fuzzy_match;
|
use helix_core::fuzzy::fuzzy_match;
|
||||||
use helix_core::indent::MAX_INDENT;
|
use helix_core::indent::MAX_INDENT;
|
||||||
use helix_core::{encoding, line_ending, path::get_canonicalized_path, shellwords::Shellwords};
|
use helix_core::{encoding, line_ending, shellwords::Shellwords};
|
||||||
use helix_lsp::{OffsetEncoding, Url};
|
use helix_lsp::{OffsetEncoding, Url};
|
||||||
use helix_view::document::DEFAULT_LANGUAGE_NAME;
|
use helix_view::document::DEFAULT_LANGUAGE_NAME;
|
||||||
use helix_view::editor::{Action, CloseError, ConfigEvent};
|
use helix_view::editor::{Action, CloseError, ConfigEvent};
|
||||||
|
@ -111,7 +111,7 @@ fn open(cx: &mut compositor::Context, args: &[Cow<str>], event: PromptEvent) ->
|
||||||
ensure!(!args.is_empty(), "wrong argument count");
|
ensure!(!args.is_empty(), "wrong argument count");
|
||||||
for arg in args {
|
for arg in args {
|
||||||
let (path, pos) = args::parse_file(arg);
|
let (path, pos) = args::parse_file(arg);
|
||||||
let path = helix_core::path::expand_tilde(&path);
|
let path = helix_stdx::path::expand_tilde(&path);
|
||||||
// If the path is a directory, open a file picker on that directory and update the status
|
// If the path is a directory, open a file picker on that directory and update the status
|
||||||
// message
|
// message
|
||||||
if let Ok(true) = std::fs::canonicalize(&path).map(|p| p.is_dir()) {
|
if let Ok(true) = std::fs::canonicalize(&path).map(|p| p.is_dir()) {
|
||||||
|
@ -1079,18 +1079,17 @@ fn change_current_directory(
|
||||||
return Ok(());
|
return Ok(());
|
||||||
}
|
}
|
||||||
|
|
||||||
let dir = helix_core::path::expand_tilde(
|
let dir = helix_stdx::path::expand_tilde(
|
||||||
args.first()
|
args.first()
|
||||||
.context("target directory not provided")?
|
.context("target directory not provided")?
|
||||||
.as_ref()
|
|
||||||
.as_ref(),
|
.as_ref(),
|
||||||
);
|
);
|
||||||
|
|
||||||
helix_loader::set_current_working_dir(dir)?;
|
helix_stdx::env::set_current_working_dir(dir)?;
|
||||||
|
|
||||||
cx.editor.set_status(format!(
|
cx.editor.set_status(format!(
|
||||||
"Current working directory is now {}",
|
"Current working directory is now {}",
|
||||||
helix_loader::current_working_dir().display()
|
helix_stdx::env::current_working_dir().display()
|
||||||
));
|
));
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
@ -1104,7 +1103,7 @@ fn show_current_directory(
|
||||||
return Ok(());
|
return Ok(());
|
||||||
}
|
}
|
||||||
|
|
||||||
let cwd = helix_loader::current_working_dir();
|
let cwd = helix_stdx::env::current_working_dir();
|
||||||
let message = format!("Current working directory is {}", cwd.display());
|
let message = format!("Current working directory is {}", cwd.display());
|
||||||
|
|
||||||
if cwd.exists() {
|
if cwd.exists() {
|
||||||
|
@ -2409,7 +2408,8 @@ fn move_buffer(
|
||||||
ensure!(args.len() == 1, format!(":move takes one argument"));
|
ensure!(args.len() == 1, format!(":move takes one argument"));
|
||||||
let doc = doc!(cx.editor);
|
let doc = doc!(cx.editor);
|
||||||
|
|
||||||
let new_path = get_canonicalized_path(&PathBuf::from(args.first().unwrap().to_string()));
|
let new_path =
|
||||||
|
helix_stdx::path::canonicalize(&PathBuf::from(args.first().unwrap().to_string()));
|
||||||
let old_path = doc
|
let old_path = doc
|
||||||
.path()
|
.path()
|
||||||
.ok_or_else(|| anyhow!("Scratch buffer cannot be moved. Use :write instead"))?
|
.ok_or_else(|| anyhow!("Scratch buffer cannot be moved. Use :write instead"))?
|
||||||
|
|
|
@ -118,16 +118,16 @@ FLAGS:
|
||||||
|
|
||||||
// Before setting the working directory, resolve all the paths in args.files
|
// Before setting the working directory, resolve all the paths in args.files
|
||||||
for (path, _) in args.files.iter_mut() {
|
for (path, _) in args.files.iter_mut() {
|
||||||
*path = helix_core::path::get_canonicalized_path(path);
|
*path = helix_stdx::path::canonicalize(&path);
|
||||||
}
|
}
|
||||||
|
|
||||||
// NOTE: Set the working directory early so the correct configuration is loaded. Be aware that
|
// NOTE: Set the working directory early so the correct configuration is loaded. Be aware that
|
||||||
// Application::new() depends on this logic so it must be updated if this changes.
|
// Application::new() depends on this logic so it must be updated if this changes.
|
||||||
if let Some(path) = &args.working_directory {
|
if let Some(path) = &args.working_directory {
|
||||||
helix_loader::set_current_working_dir(path)?;
|
helix_stdx::env::set_current_working_dir(path)?;
|
||||||
} else if let Some((path, _)) = args.files.first().filter(|p| p.0.is_dir()) {
|
} else if let Some((path, _)) = args.files.first().filter(|p| p.0.is_dir()) {
|
||||||
// If the first file is a directory, it will be the working directory unless -w was specified
|
// If the first file is a directory, it will be the working directory unless -w was specified
|
||||||
helix_loader::set_current_working_dir(path)?;
|
helix_stdx::env::set_current_working_dir(path)?;
|
||||||
}
|
}
|
||||||
|
|
||||||
let config = match Config::load_default() {
|
let config = match Config::load_default() {
|
||||||
|
|
|
@ -409,7 +409,7 @@ pub mod completers {
|
||||||
use std::path::Path;
|
use std::path::Path;
|
||||||
|
|
||||||
let is_tilde = input == "~";
|
let is_tilde = input == "~";
|
||||||
let path = helix_core::path::expand_tilde(Path::new(input));
|
let path = helix_stdx::path::expand_tilde(Path::new(input));
|
||||||
|
|
||||||
let (dir, file_name) = if input.ends_with(std::path::MAIN_SEPARATOR) {
|
let (dir, file_name) = if input.ends_with(std::path::MAIN_SEPARATOR) {
|
||||||
(path, None)
|
(path, None)
|
||||||
|
@ -430,7 +430,7 @@ pub mod completers {
|
||||||
match path.parent() {
|
match path.parent() {
|
||||||
Some(path) if !path.as_os_str().is_empty() => path.to_path_buf(),
|
Some(path) if !path.as_os_str().is_empty() => path.to_path_buf(),
|
||||||
// Path::new("h")'s parent is Some("")...
|
// Path::new("h")'s parent is Some("")...
|
||||||
_ => helix_loader::current_working_dir(),
|
_ => helix_stdx::env::current_working_dir(),
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|
|
@ -63,7 +63,7 @@ impl PathOrId {
|
||||||
fn get_canonicalized(self) -> Self {
|
fn get_canonicalized(self) -> Self {
|
||||||
use PathOrId::*;
|
use PathOrId::*;
|
||||||
match self {
|
match self {
|
||||||
Path(path) => Path(helix_core::path::get_canonicalized_path(&path)),
|
Path(path) => Path(helix_stdx::path::canonicalize(path)),
|
||||||
Id(id) => Id(id),
|
Id(id) => Id(id),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -3,7 +3,8 @@ use std::{
|
||||||
ops::RangeInclusive,
|
ops::RangeInclusive,
|
||||||
};
|
};
|
||||||
|
|
||||||
use helix_core::{diagnostic::Severity, path::get_normalized_path};
|
use helix_core::diagnostic::Severity;
|
||||||
|
use helix_stdx::path;
|
||||||
use helix_view::doc;
|
use helix_view::doc;
|
||||||
|
|
||||||
use super::*;
|
use super::*;
|
||||||
|
@ -23,7 +24,7 @@ async fn test_write_quit_fail() -> anyhow::Result<()> {
|
||||||
assert_eq!(1, docs.len());
|
assert_eq!(1, docs.len());
|
||||||
|
|
||||||
let doc = docs.pop().unwrap();
|
let doc = docs.pop().unwrap();
|
||||||
assert_eq!(Some(&get_normalized_path(file.path())), doc.path());
|
assert_eq!(Some(&path::normalize(file.path())), doc.path());
|
||||||
assert_eq!(&Severity::Error, app.editor.get_status().unwrap().1);
|
assert_eq!(&Severity::Error, app.editor.get_status().unwrap().1);
|
||||||
}),
|
}),
|
||||||
false,
|
false,
|
||||||
|
@ -269,7 +270,7 @@ async fn test_write_scratch_to_new_path() -> anyhow::Result<()> {
|
||||||
assert_eq!(1, docs.len());
|
assert_eq!(1, docs.len());
|
||||||
|
|
||||||
let doc = docs.pop().unwrap();
|
let doc = docs.pop().unwrap();
|
||||||
assert_eq!(Some(&get_normalized_path(file.path())), doc.path());
|
assert_eq!(Some(&path::normalize(file.path())), doc.path());
|
||||||
}),
|
}),
|
||||||
false,
|
false,
|
||||||
)
|
)
|
||||||
|
@ -341,7 +342,7 @@ async fn test_write_new_path() -> anyhow::Result<()> {
|
||||||
Some(&|app| {
|
Some(&|app| {
|
||||||
let doc = doc!(app.editor);
|
let doc = doc!(app.editor);
|
||||||
assert!(!app.editor.is_err());
|
assert!(!app.editor.is_err());
|
||||||
assert_eq!(&get_normalized_path(file1.path()), doc.path().unwrap());
|
assert_eq!(&path::normalize(file1.path()), doc.path().unwrap());
|
||||||
}),
|
}),
|
||||||
),
|
),
|
||||||
(
|
(
|
||||||
|
@ -349,7 +350,7 @@ async fn test_write_new_path() -> anyhow::Result<()> {
|
||||||
Some(&|app| {
|
Some(&|app| {
|
||||||
let doc = doc!(app.editor);
|
let doc = doc!(app.editor);
|
||||||
assert!(!app.editor.is_err());
|
assert!(!app.editor.is_err());
|
||||||
assert_eq!(&get_normalized_path(file2.path()), doc.path().unwrap());
|
assert_eq!(&path::normalize(file2.path()), doc.path().unwrap());
|
||||||
assert!(app.editor.document_by_path(file1.path()).is_none());
|
assert!(app.editor.document_by_path(file1.path()).is_none());
|
||||||
}),
|
}),
|
||||||
),
|
),
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
use super::*;
|
use super::*;
|
||||||
|
|
||||||
use helix_core::path::get_normalized_path;
|
use helix_stdx::path;
|
||||||
|
|
||||||
#[tokio::test(flavor = "multi_thread")]
|
#[tokio::test(flavor = "multi_thread")]
|
||||||
async fn test_split_write_quit_all() -> anyhow::Result<()> {
|
async fn test_split_write_quit_all() -> anyhow::Result<()> {
|
||||||
|
@ -27,21 +27,21 @@ async fn test_split_write_quit_all() -> anyhow::Result<()> {
|
||||||
|
|
||||||
let doc1 = docs
|
let doc1 = docs
|
||||||
.iter()
|
.iter()
|
||||||
.find(|doc| doc.path().unwrap() == &get_normalized_path(file1.path()))
|
.find(|doc| doc.path().unwrap() == &path::normalize(file1.path()))
|
||||||
.unwrap();
|
.unwrap();
|
||||||
|
|
||||||
assert_eq!("hello1", doc1.text().to_string());
|
assert_eq!("hello1", doc1.text().to_string());
|
||||||
|
|
||||||
let doc2 = docs
|
let doc2 = docs
|
||||||
.iter()
|
.iter()
|
||||||
.find(|doc| doc.path().unwrap() == &get_normalized_path(file2.path()))
|
.find(|doc| doc.path().unwrap() == &path::normalize(file2.path()))
|
||||||
.unwrap();
|
.unwrap();
|
||||||
|
|
||||||
assert_eq!("hello2", doc2.text().to_string());
|
assert_eq!("hello2", doc2.text().to_string());
|
||||||
|
|
||||||
let doc3 = docs
|
let doc3 = docs
|
||||||
.iter()
|
.iter()
|
||||||
.find(|doc| doc.path().unwrap() == &get_normalized_path(file3.path()))
|
.find(|doc| doc.path().unwrap() == &path::normalize(file3.path()))
|
||||||
.unwrap();
|
.unwrap();
|
||||||
|
|
||||||
assert_eq!("hello3", doc3.text().to_string());
|
assert_eq!("hello3", doc3.text().to_string());
|
||||||
|
|
|
@ -15,6 +15,7 @@ default = []
|
||||||
term = ["crossterm"]
|
term = ["crossterm"]
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
|
helix-stdx = { path = "../helix-stdx" }
|
||||||
helix-core = { path = "../helix-core" }
|
helix-core = { path = "../helix-core" }
|
||||||
helix-event = { path = "../helix-event" }
|
helix-event = { path = "../helix-event" }
|
||||||
helix-loader = { path = "../helix-loader" }
|
helix-loader = { path = "../helix-loader" }
|
||||||
|
|
|
@ -855,7 +855,7 @@ impl Document {
|
||||||
let text = self.text().clone();
|
let text = self.text().clone();
|
||||||
|
|
||||||
let path = match path {
|
let path = match path {
|
||||||
Some(path) => helix_core::path::get_canonicalized_path(&path),
|
Some(path) => helix_stdx::path::canonicalize(path),
|
||||||
None => {
|
None => {
|
||||||
if self.path.is_none() {
|
if self.path.is_none() {
|
||||||
bail!("Can't save with no path set!");
|
bail!("Can't save with no path set!");
|
||||||
|
@ -1049,7 +1049,7 @@ impl Document {
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn set_path(&mut self, path: Option<&Path>) {
|
pub fn set_path(&mut self, path: Option<&Path>) {
|
||||||
let path = path.map(helix_core::path::get_canonicalized_path);
|
let path = path.map(helix_stdx::path::canonicalize);
|
||||||
|
|
||||||
// if parent doesn't exist we still want to open the document
|
// if parent doesn't exist we still want to open the document
|
||||||
// and error out when document is saved
|
// and error out when document is saved
|
||||||
|
@ -1672,7 +1672,7 @@ impl Document {
|
||||||
pub fn relative_path(&self) -> Option<PathBuf> {
|
pub fn relative_path(&self) -> Option<PathBuf> {
|
||||||
self.path
|
self.path
|
||||||
.as_deref()
|
.as_deref()
|
||||||
.map(helix_core::path::get_relative_path)
|
.map(helix_stdx::path::get_relative_path)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn display_name(&self) -> Cow<'static, str> {
|
pub fn display_name(&self) -> Cow<'static, str> {
|
||||||
|
|
|
@ -1464,7 +1464,7 @@ impl Editor {
|
||||||
|
|
||||||
// ??? possible use for integration tests
|
// ??? possible use for integration tests
|
||||||
pub fn open(&mut self, path: &Path, action: Action) -> Result<DocumentId, Error> {
|
pub fn open(&mut self, path: &Path, action: Action) -> Result<DocumentId, Error> {
|
||||||
let path = helix_core::path::get_canonicalized_path(path);
|
let path = helix_stdx::path::canonicalize(path);
|
||||||
let id = self.document_by_path(&path).map(|doc| doc.id);
|
let id = self.document_by_path(&path).map(|doc| doc.id);
|
||||||
|
|
||||||
let id = if let Some(id) = id {
|
let id = if let Some(id) = id {
|
||||||
|
|
Loading…
Add table
Reference in a new issue