Skip to content

Commit

Permalink
Canonicalize paths before stripping current dir as prefix (#6290)
Browse files Browse the repository at this point in the history
Co-authored-by: jazzfool <[email protected]>
  • Loading branch information
jazzfool and jazzfool committed Mar 30, 2023
1 parent 5b3dd6a commit d04288e
Show file tree
Hide file tree
Showing 6 changed files with 37 additions and 14 deletions.
1 change: 1 addition & 0 deletions Cargo.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

1 change: 1 addition & 0 deletions helix-core/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -32,6 +32,7 @@ regex = "1"
bitflags = "2.0"
ahash = "0.8.3"
hashbrown = { version = "0.13.2", features = ["raw"] }
dunce = "1.0"

log = "0.4"
serde = { version = "1.0", features = ["derive"] }
Expand Down
29 changes: 25 additions & 4 deletions helix-core/src/path.rs
Original file line number Diff line number Diff line change
Expand Up @@ -40,6 +40,21 @@ pub fn expand_tilde(path: &Path) -> PathBuf {
/// needs to improve on.
/// Copied from cargo: <https://github.com/rust-lang/cargo/blob/070e459c2d8b79c5b2ac5218064e7603329c92ae/crates/cargo-util/src/paths.rs#L81>
pub fn get_normalized_path(path: &Path) -> PathBuf {
// normalization strategy is to canonicalize first ancestor path that exists (i.e., canonicalize as much as possible),
// then run handrolled normalization on the non-existent remainder
let (base, path) = path
.ancestors()
.find_map(|base| {
let canonicalized_base = dunce::canonicalize(base).ok()?;
let remainder = path.strip_prefix(base).ok()?.into();
Some((canonicalized_base, remainder))
})
.unwrap_or_else(|| (PathBuf::new(), PathBuf::from(path)));

if path.as_os_str().is_empty() {
return base;
}

let mut components = path.components().peekable();
let mut ret = if let Some(c @ Component::Prefix(..)) = components.peek().cloned() {
components.next();
Expand All @@ -63,7 +78,7 @@ pub fn get_normalized_path(path: &Path) -> PathBuf {
}
}
}
ret
base.join(ret)
}

/// Returns the canonical, absolute form of a path with all intermediate components normalized.
Expand All @@ -82,13 +97,19 @@ pub fn get_canonicalized_path(path: &Path) -> std::io::Result<PathBuf> {
}

pub fn get_relative_path(path: &Path) -> PathBuf {
let path = PathBuf::from(path);
let path = if path.is_absolute() {
let cwdir = std::env::current_dir().expect("couldn't determine current directory");
path.strip_prefix(cwdir).unwrap_or(path)
let cwdir = std::env::current_dir()
.map(|path| get_normalized_path(&path))
.expect("couldn't determine current directory");
get_normalized_path(&path)
.strip_prefix(cwdir)
.map(PathBuf::from)
.unwrap_or(path)
} else {
path
};
fold_home_dir(path)
fold_home_dir(&path)
}

/// Returns a truncated filepath where the basepart of the path is reduced to the first
Expand Down
2 changes: 0 additions & 2 deletions helix-term/tests/integration.rs
Original file line number Diff line number Diff line change
Expand Up @@ -2,8 +2,6 @@
mod test {
mod helpers;

use std::path::PathBuf;

use helix_core::{syntax::AutoPairConfig, Selection};
use helix_term::config::Config;

Expand Down
10 changes: 5 additions & 5 deletions helix-term/tests/test/commands/write.rs
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@ use std::{
ops::RangeInclusive,
};

use helix_core::diagnostic::Severity;
use helix_core::{diagnostic::Severity, path::get_normalized_path};
use helix_view::doc;

use super::*;
Expand All @@ -23,7 +23,7 @@ async fn test_write_quit_fail() -> anyhow::Result<()> {
assert_eq!(1, docs.len());

let doc = docs.pop().unwrap();
assert_eq!(Some(file.path()), doc.path().map(PathBuf::as_path));
assert_eq!(Some(&get_normalized_path(file.path())), doc.path());
assert_eq!(&Severity::Error, app.editor.get_status().unwrap().1);
}),
false,
Expand Down Expand Up @@ -269,7 +269,7 @@ async fn test_write_scratch_to_new_path() -> anyhow::Result<()> {
assert_eq!(1, docs.len());

let doc = docs.pop().unwrap();
assert_eq!(Some(&file.path().to_path_buf()), doc.path());
assert_eq!(Some(&get_normalized_path(file.path())), doc.path());
}),
false,
)
Expand Down Expand Up @@ -341,15 +341,15 @@ async fn test_write_new_path() -> anyhow::Result<()> {
Some(&|app| {
let doc = doc!(app.editor);
assert!(!app.editor.is_err());
assert_eq!(file1.path(), doc.path().unwrap());
assert_eq!(&get_normalized_path(file1.path()), doc.path().unwrap());
}),
),
(
Some(&format!(":w {}<ret>", file2.path().to_string_lossy())),
Some(&|app| {
let doc = doc!(app.editor);
assert!(!app.editor.is_err());
assert_eq!(file2.path(), doc.path().unwrap());
assert_eq!(&get_normalized_path(file2.path()), doc.path().unwrap());
assert!(app.editor.document_by_path(file1.path()).is_none());
}),
),
Expand Down
8 changes: 5 additions & 3 deletions helix-term/tests/test/splits.rs
Original file line number Diff line number Diff line change
@@ -1,5 +1,7 @@
use super::*;

use helix_core::path::get_normalized_path;

#[tokio::test(flavor = "multi_thread")]
async fn test_split_write_quit_all() -> anyhow::Result<()> {
let mut file1 = tempfile::NamedTempFile::new()?;
Expand All @@ -25,21 +27,21 @@ async fn test_split_write_quit_all() -> anyhow::Result<()> {

let doc1 = docs
.iter()
.find(|doc| doc.path().unwrap() == file1.path())
.find(|doc| doc.path().unwrap() == &get_normalized_path(file1.path()))
.unwrap();

assert_eq!("hello1", doc1.text().to_string());

let doc2 = docs
.iter()
.find(|doc| doc.path().unwrap() == file2.path())
.find(|doc| doc.path().unwrap() == &get_normalized_path(file2.path()))
.unwrap();

assert_eq!("hello2", doc2.text().to_string());

let doc3 = docs
.iter()
.find(|doc| doc.path().unwrap() == file3.path())
.find(|doc| doc.path().unwrap() == &get_normalized_path(file3.path()))
.unwrap();

assert_eq!("hello3", doc3.text().to_string());
Expand Down

0 comments on commit d04288e

Please sign in to comment.