Canonicalize paths before stripping current dir as prefix (#6290)

Co-authored-by: jazzfool <shamoslover69@gmail.com>
This commit is contained in:
jazzfool 2023-03-31 03:21:40 +11:00 committed by GitHub
parent 5b3dd6a678
commit d04288e0f3
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
6 changed files with 37 additions and 14 deletions

1
Cargo.lock generated
View file

@ -1082,6 +1082,7 @@ dependencies = [
"arc-swap", "arc-swap",
"bitflags 2.0.2", "bitflags 2.0.2",
"chrono", "chrono",
"dunce",
"encoding_rs", "encoding_rs",
"etcetera", "etcetera",
"hashbrown 0.13.2", "hashbrown 0.13.2",

View file

@ -32,6 +32,7 @@ regex = "1"
bitflags = "2.0" bitflags = "2.0"
ahash = "0.8.3" ahash = "0.8.3"
hashbrown = { version = "0.13.2", features = ["raw"] } hashbrown = { version = "0.13.2", features = ["raw"] }
dunce = "1.0"
log = "0.4" log = "0.4"
serde = { version = "1.0", features = ["derive"] } serde = { version = "1.0", features = ["derive"] }

View file

@ -40,6 +40,21 @@ pub fn expand_tilde(path: &Path) -> PathBuf {
/// needs to improve on. /// needs to improve on.
/// Copied from cargo: <https://github.com/rust-lang/cargo/blob/070e459c2d8b79c5b2ac5218064e7603329c92ae/crates/cargo-util/src/paths.rs#L81> /// Copied from cargo: <https://github.com/rust-lang/cargo/blob/070e459c2d8b79c5b2ac5218064e7603329c92ae/crates/cargo-util/src/paths.rs#L81>
pub fn get_normalized_path(path: &Path) -> PathBuf { pub fn get_normalized_path(path: &Path) -> PathBuf {
// normalization strategy is to canonicalize first ancestor path that exists (i.e., canonicalize as much as possible),
// then run handrolled normalization on the non-existent remainder
let (base, path) = path
.ancestors()
.find_map(|base| {
let canonicalized_base = dunce::canonicalize(base).ok()?;
let remainder = path.strip_prefix(base).ok()?.into();
Some((canonicalized_base, remainder))
})
.unwrap_or_else(|| (PathBuf::new(), PathBuf::from(path)));
if path.as_os_str().is_empty() {
return base;
}
let mut components = path.components().peekable(); let mut components = path.components().peekable();
let mut ret = if let Some(c @ Component::Prefix(..)) = components.peek().cloned() { let mut ret = if let Some(c @ Component::Prefix(..)) = components.peek().cloned() {
components.next(); components.next();
@ -63,7 +78,7 @@ pub fn get_normalized_path(path: &Path) -> PathBuf {
} }
} }
} }
ret base.join(ret)
} }
/// Returns the canonical, absolute form of a path with all intermediate components normalized. /// Returns the canonical, absolute form of a path with all intermediate components normalized.
@ -82,13 +97,19 @@ pub fn get_canonicalized_path(path: &Path) -> std::io::Result<PathBuf> {
} }
pub fn get_relative_path(path: &Path) -> PathBuf { pub fn get_relative_path(path: &Path) -> PathBuf {
let path = PathBuf::from(path);
let path = if path.is_absolute() { let path = if path.is_absolute() {
let cwdir = std::env::current_dir().expect("couldn't determine current directory"); let cwdir = std::env::current_dir()
path.strip_prefix(cwdir).unwrap_or(path) .map(|path| get_normalized_path(&path))
.expect("couldn't determine current directory");
get_normalized_path(&path)
.strip_prefix(cwdir)
.map(PathBuf::from)
.unwrap_or(path)
} else { } else {
path path
}; };
fold_home_dir(path) fold_home_dir(&path)
} }
/// Returns a truncated filepath where the basepart of the path is reduced to the first /// Returns a truncated filepath where the basepart of the path is reduced to the first

View file

@ -2,8 +2,6 @@
mod test { mod test {
mod helpers; mod helpers;
use std::path::PathBuf;
use helix_core::{syntax::AutoPairConfig, Selection}; use helix_core::{syntax::AutoPairConfig, Selection};
use helix_term::config::Config; use helix_term::config::Config;

View file

@ -3,7 +3,7 @@ use std::{
ops::RangeInclusive, ops::RangeInclusive,
}; };
use helix_core::diagnostic::Severity; use helix_core::{diagnostic::Severity, path::get_normalized_path};
use helix_view::doc; use helix_view::doc;
use super::*; use super::*;
@ -23,7 +23,7 @@ async fn test_write_quit_fail() -> anyhow::Result<()> {
assert_eq!(1, docs.len()); assert_eq!(1, docs.len());
let doc = docs.pop().unwrap(); let doc = docs.pop().unwrap();
assert_eq!(Some(file.path()), doc.path().map(PathBuf::as_path)); assert_eq!(Some(&get_normalized_path(file.path())), doc.path());
assert_eq!(&Severity::Error, app.editor.get_status().unwrap().1); assert_eq!(&Severity::Error, app.editor.get_status().unwrap().1);
}), }),
false, false,
@ -269,7 +269,7 @@ async fn test_write_scratch_to_new_path() -> anyhow::Result<()> {
assert_eq!(1, docs.len()); assert_eq!(1, docs.len());
let doc = docs.pop().unwrap(); let doc = docs.pop().unwrap();
assert_eq!(Some(&file.path().to_path_buf()), doc.path()); assert_eq!(Some(&get_normalized_path(file.path())), doc.path());
}), }),
false, false,
) )
@ -341,7 +341,7 @@ async fn test_write_new_path() -> anyhow::Result<()> {
Some(&|app| { Some(&|app| {
let doc = doc!(app.editor); let doc = doc!(app.editor);
assert!(!app.editor.is_err()); assert!(!app.editor.is_err());
assert_eq!(file1.path(), doc.path().unwrap()); assert_eq!(&get_normalized_path(file1.path()), doc.path().unwrap());
}), }),
), ),
( (
@ -349,7 +349,7 @@ async fn test_write_new_path() -> anyhow::Result<()> {
Some(&|app| { Some(&|app| {
let doc = doc!(app.editor); let doc = doc!(app.editor);
assert!(!app.editor.is_err()); assert!(!app.editor.is_err());
assert_eq!(file2.path(), doc.path().unwrap()); assert_eq!(&get_normalized_path(file2.path()), doc.path().unwrap());
assert!(app.editor.document_by_path(file1.path()).is_none()); assert!(app.editor.document_by_path(file1.path()).is_none());
}), }),
), ),

View file

@ -1,5 +1,7 @@
use super::*; use super::*;
use helix_core::path::get_normalized_path;
#[tokio::test(flavor = "multi_thread")] #[tokio::test(flavor = "multi_thread")]
async fn test_split_write_quit_all() -> anyhow::Result<()> { async fn test_split_write_quit_all() -> anyhow::Result<()> {
let mut file1 = tempfile::NamedTempFile::new()?; let mut file1 = tempfile::NamedTempFile::new()?;
@ -25,21 +27,21 @@ async fn test_split_write_quit_all() -> anyhow::Result<()> {
let doc1 = docs let doc1 = docs
.iter() .iter()
.find(|doc| doc.path().unwrap() == file1.path()) .find(|doc| doc.path().unwrap() == &get_normalized_path(file1.path()))
.unwrap(); .unwrap();
assert_eq!("hello1", doc1.text().to_string()); assert_eq!("hello1", doc1.text().to_string());
let doc2 = docs let doc2 = docs
.iter() .iter()
.find(|doc| doc.path().unwrap() == file2.path()) .find(|doc| doc.path().unwrap() == &get_normalized_path(file2.path()))
.unwrap(); .unwrap();
assert_eq!("hello2", doc2.text().to_string()); assert_eq!("hello2", doc2.text().to_string());
let doc3 = docs let doc3 = docs
.iter() .iter()
.find(|doc| doc.path().unwrap() == file3.path()) .find(|doc| doc.path().unwrap() == &get_normalized_path(file3.path()))
.unwrap(); .unwrap();
assert_eq!("hello3", doc3.text().to_string()); assert_eq!("hello3", doc3.text().to_string());