From 60c6baaf92be8ea662dcd6bd15ec50af746278b6 Mon Sep 17 00:00:00 2001 From: Shafkath Shuhan Date: Wed, 18 Jan 2023 17:32:52 -0500 Subject: [PATCH 01/35] implement workspace locking per instance --- Cargo.lock | 3 + helix-view/Cargo.toml | 4 + helix-view/src/editor.rs | 5 + helix-view/src/lib.rs | 1 + helix-view/src/session/mod.rs | 200 +++++++++++++++++++++++++++++++++ helix-view/src/session/undo.rs | 29 +++++ 6 files changed, 242 insertions(+) create mode 100644 helix-view/src/session/mod.rs create mode 100644 helix-view/src/session/undo.rs diff --git a/Cargo.lock b/Cargo.lock index 1a76beed265b..6fcea8b14b1f 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1267,6 +1267,7 @@ dependencies = [ "chardetng", "clipboard-win", "crossterm", + "either", "futures-util", "helix-core", "helix-dap", @@ -1279,12 +1280,14 @@ dependencies = [ "once_cell", "serde", "serde_json", + "sha1_smol", "slotmap", "tokio", "tokio-stream", "toml", "url", "which", + "winapi", ] [[package]] diff --git a/helix-view/Cargo.toml b/helix-view/Cargo.toml index 17e07e9a2dd6..775ccc93bd9a 100644 --- a/helix-view/Cargo.toml +++ b/helix-view/Cargo.toml @@ -23,6 +23,10 @@ helix-dap = { version = "0.6", path = "../helix-dap" } crossterm = { version = "0.25", optional = true } helix-vcs = { version = "0.6", path = "../helix-vcs" } +winapi = "0.3" +sha1_smol = "1.0" +either = "1.8" + # Conversion traits once_cell = "1.17" url = "2" diff --git a/helix-view/src/editor.rs b/helix-view/src/editor.rs index 042f5bdb4269..118a83a92624 100644 --- a/helix-view/src/editor.rs +++ b/helix-view/src/editor.rs @@ -5,6 +5,7 @@ use crate::{ graphics::{CursorKind, Rect}, info::Info, input::KeyEvent, + session::Session, theme::{self, Theme}, tree::{self, Tree}, view::ViewPosition, @@ -974,6 +975,10 @@ impl Editor { } } + pub fn session(&self) -> anyhow::Result { + Session::new(std::env::current_dir()?) + } + /// Current editing mode for the [`Editor`]. pub fn mode(&self) -> Mode { self.mode diff --git a/helix-view/src/lib.rs b/helix-view/src/lib.rs index c3f67345b361..53a1615a9e88 100644 --- a/helix-view/src/lib.rs +++ b/helix-view/src/lib.rs @@ -7,6 +7,7 @@ pub mod editor; pub mod env; pub mod graphics; pub mod gutter; +pub mod session; pub mod handlers { pub mod dap; pub mod lsp; diff --git a/helix-view/src/session/mod.rs b/helix-view/src/session/mod.rs new file mode 100644 index 000000000000..0da2f8d89ce2 --- /dev/null +++ b/helix-view/src/session/mod.rs @@ -0,0 +1,200 @@ +pub mod state; +pub mod undo; + +use std::{ + fs::{File, OpenOptions}, + path::PathBuf, +}; + +use anyhow::{Context, Result}; +use sha1_smol::Sha1; + +// Needs to mimic borrowing rules. +// Allow multiple read-only references, and only one mutable reference w/ no read-only. +// Should not lock unless actively used. And should be unlocked automatically when all file handles are dropped. +pub struct Session { + path: PathBuf, + lock: Option, +} + +impl Session { + // TODO: Allow custom session names to be passed. + pub fn new(path: PathBuf) -> Result { + let bytes = sys::path_as_bytes(path.as_path()); + let hash = Sha1::from(bytes).digest().to_string(); + let path = helix_loader::cache_dir().join("sessions").join(hash); + Ok(Self { path, lock: None }) + } + + pub fn get(&mut self, filename: String) -> Result { + if self.lock.is_none() { + let lock = FileLock::shared(self.path.join(".helix.lock"))?; + lock.lock()?; + + self.lock = Some(lock); + } + + OpenOptions::new() + .read(true) + .open(self.path.join(filename)) + .context("failed to open file") + } + + // TODO: Return a FileLockGuard instead. + pub fn get_mut(&mut self, filename: String) -> Result { + if self.lock.is_none() { + let lock = FileLock::exclusive(self.path.join(".helix.lock"))?; + lock.lock()?; + + self.lock = Some(lock); + } + + OpenOptions::new() + .read(true) + .write(true) + .create(true) + .open(self.path.join(filename)) + .context("failed to open file") + } +} + +pub struct FileLock { + file: File, + shared: bool, +} + +impl FileLock { + pub fn exclusive(path: PathBuf) -> Result { + let file = Self::open_lock(path)?; + Ok(Self { + file, + shared: false, + }) + } + + pub fn shared(path: PathBuf) -> Result { + let file = Self::open_lock(path)?; + Ok(Self { file, shared: true }) + } + + pub fn lock(&self) -> Result<()> { + sys::lock(&self.file, self.shared) + } + + fn open_lock(path: PathBuf) -> std::io::Result { + if let Some(parent) = path.parent() { + if !parent.exists() { + std::fs::DirBuilder::new().recursive(true).create(parent)?; + } + } + OpenOptions::new().write(true).create(true).open(path) + } +} + +impl Drop for FileLock { + fn drop(&mut self) { + let _ = sys::unlock(&self.file); + } +} + +// `sys` impls from https://github.com/rust-lang/cargo/blob/fc2242a8c5606be36aecfd61dd464422271dad9d/src/cargo/util/flock.rs +#[cfg(unix)] +mod sys { + use anyhow::Result; + use std::fs::File; + use std::io::{Error, Result}; + use std::os::unix::io::AsRawFd; + + pub(super) fn unlock(file: &File) -> Result<()> { + flock(file, libc::LOCK_UN) + } + + pub(super) fn lock(file: &File) -> Result<()> { + flock(file, libc::LOCK_EX) + } + + #[cfg(not(target_os = "solaris"))] + fn flock(file: &File, flag: libc::c_int) -> Result<()> { + let ret = unsafe { libc::flock(file.as_raw_fd(), flag) }; + if ret < 0 { + anyhow::bail!(Error::last_os_error()) + } else { + Ok(()) + } + } + + #[cfg(target_os = "solaris")] + fn flock(file: &File, flag: libc::c_int) -> Result<()> { + // Solaris lacks flock(), so try to emulate using fcntl() + let mut flock = libc::flock { + l_type: 0, + l_whence: 0, + l_start: 0, + l_len: 0, + l_sysid: 0, + l_pid: 0, + l_pad: [0, 0, 0, 0], + }; + flock.l_type = if flag & libc::LOCK_UN != 0 { + libc::F_UNLCK + } else if flag & libc::LOCK_EX != 0 { + libc::F_WRLCK + } else if flag & libc::LOCK_SH != 0 { + libc::F_RDLCK + } else { + panic!("unexpected flock() operation") + }; + + let mut cmd = libc::F_SETLKW; + if (flag & libc::LOCK_NB) != 0 { + cmd = libc::F_SETLK; + } + + let ret = unsafe { libc::fcntl(file.as_raw_fd(), cmd, &flock) }; + + if ret < 0 { + anyhow::bail!(Error::last_os_error()) + } else { + Ok(()) + } + } +} + +#[cfg(windows)] +mod sys { + use std::{fs::File, io::Error, os::windows::prelude::AsRawHandle, path::Path}; + + use winapi::um::{ + fileapi::{LockFileEx, UnlockFile}, + minwinbase::LOCKFILE_EXCLUSIVE_LOCK, + }; + + pub(super) fn path_as_bytes(path: &Path) -> &[u8] { + path.to_str().unwrap().as_bytes() + } + + /// Blocks until the lock is acquired. + pub(super) fn lock(file: &File, shared: bool) -> anyhow::Result<()> { + let flag = if shared { 0 } else { LOCKFILE_EXCLUSIVE_LOCK }; + unsafe { + let mut overlapped = std::mem::zeroed(); + let ret = LockFileEx(file.as_raw_handle(), flag, 0, !0, !0, &mut overlapped); + if ret == 0 { + anyhow::bail!(Error::last_os_error()) + } else { + Ok(()) + } + } + } + + pub(super) fn unlock(file: &File) -> std::io::Result<()> { + unsafe { + let ret = UnlockFile(file.as_raw_handle(), 0, 0, !0, !0); + if ret == 0 { + Err(Error::last_os_error()) + } else { + Ok(()) + } + } + } +} diff --git a/helix-view/src/session/undo.rs b/helix-view/src/session/undo.rs new file mode 100644 index 000000000000..fe241b6c729d --- /dev/null +++ b/helix-view/src/session/undo.rs @@ -0,0 +1,29 @@ +use std::io::Result; +use std::path::PathBuf; + +use helix_core::history::History; +use helix_core::Transaction; + +pub fn serialize(session: &mut Session, editor: &Editor) -> Result<()> { + todo!() +} + +pub fn deserialize(session: &Session, editor: &mut Editor) -> Result<()> { + todo!() +} + +fn serialize_history(history: &History) -> Result<()> { + todo!() +} + +fn deserialize_history() -> Result { + todo!() +} + +fn serialize_transaction(transaction: &Transaction) -> Result<()> { + todo!() +} + +fn deserialize_transaction() -> Result { + todo!() +} From ce14c55800f5e60540970046522b09f859f68c07 Mon Sep 17 00:00:00 2001 From: Shafkath Shuhan Date: Fri, 20 Jan 2023 19:17:24 -0500 Subject: [PATCH 02/35] implement serializing/deserializing undo history --- helix-core/src/history.rs | 83 ++++++++++++ helix-core/src/lib.rs | 1 + helix-core/src/parse.rs | 128 +++++++++++++++++++ helix-core/src/selection.rs | 4 +- helix-core/src/transaction.rs | 101 ++++++++++++++- helix-vcs/Cargo.toml | 2 +- helix-view/src/editor.rs | 12 +- helix-view/src/lib.rs | 2 +- helix-view/src/session/undo.rs | 29 ----- helix-view/src/{session => workspace}/mod.rs | 77 ++++------- helix-view/src/workspace/undo.rs | 70 ++++++++++ 11 files changed, 416 insertions(+), 93 deletions(-) create mode 100644 helix-core/src/parse.rs delete mode 100644 helix-view/src/session/undo.rs rename helix-view/src/{session => workspace}/mod.rs (66%) create mode 100644 helix-view/src/workspace/undo.rs diff --git a/helix-core/src/history.rs b/helix-core/src/history.rs index 1aac38d934c7..633ff3c0e736 100644 --- a/helix-core/src/history.rs +++ b/helix-core/src/history.rs @@ -1,6 +1,8 @@ +use crate::parse::*; use crate::{Assoc, ChangeSet, Range, Rope, Selection, Transaction}; use once_cell::sync::Lazy; use regex::Regex; +use std::io::{Read, Write}; use std::num::NonZeroUsize; use std::time::{Duration, Instant}; @@ -65,6 +67,66 @@ struct Revision { timestamp: Instant, } +const HEADER_TAG: &str = "Helix Undofile 1\n"; + +pub fn serialize_history( + writer: &mut W, + history: &History, + mtime: u64, + hash: [u8; 20], +) -> std::io::Result<()> { + write_string(writer, HEADER_TAG)?; + write_usize(writer, history.current)?; + write_u64(writer, mtime)?; + writer.write_all(&hash)?; + write_vec(writer, &history.revisions, serialize_revision)?; + Ok(()) +} + +pub fn deserialize_history(reader: &mut R) -> std::io::Result { + let header = read_string(reader)?; + if HEADER_TAG != header { + Err(std::io::Error::new( + std::io::ErrorKind::Other, + format!("missing undofile header"), + )) + } else { + let timestamp = Instant::now(); + let current = read_usize(reader)?; + let mtime = read_u64(reader)?; + let mut hash = [0u8; 20]; + reader.read_exact(&mut hash)?; + let revisions = read_vec(reader, |reader| deserialize_revision(reader, timestamp))?; + Ok(History { current, revisions }) + } +} + +fn serialize_revision(writer: &mut W, revision: &Revision) -> std::io::Result<()> { + write_usize(writer, revision.parent)?; + write_usize(writer, revision.last_child.map(|n| n.get()).unwrap_or(0))?; + crate::transaction::serialize_transaction(writer, &revision.transaction)?; + crate::transaction::serialize_transaction(writer, &revision.inversion)?; + + Ok(()) +} + +fn deserialize_revision(reader: &mut R, timestamp: Instant) -> std::io::Result { + let parent = read_usize(reader)?; + let last_child = match read_usize(reader)? { + 0 => None, + n => Some(unsafe { NonZeroUsize::new_unchecked(n) }), + }; + let transaction = crate::transaction::deserialize_transaction(reader)?; + let inversion = crate::transaction::deserialize_transaction(reader)?; + Ok(Revision { + parent, + last_child, + transaction, + inversion, + timestamp, + }) +} + impl Default for History { fn default() -> Self { // Add a dummy root revision with empty transaction @@ -386,6 +448,8 @@ impl std::str::FromStr for UndoKind { #[cfg(test)] mod test { + use quickcheck::quickcheck; + use super::*; use crate::Selection; @@ -630,4 +694,23 @@ mod test { Err("duration too large".to_string()) ); } + + quickcheck!( + fn serde_history(a: String, b: String) -> bool { + let old = Rope::from(a); + let new = Rope::from(b); + let transaction = crate::diff::compare_ropes(&old, &new); + + let mut buf = Vec::new(); + let mut history = History::default(); + let state = State { + doc: old, + selection: Selection::point(0), + }; + history.commit_revision(&transaction, &state); + serialize_history(&mut buf, &history).unwrap(); + deserialize_history(&mut buf.as_slice()).unwrap(); + true + } + ); } diff --git a/helix-core/src/lib.rs b/helix-core/src/lib.rs index e3f862a6054c..4174e88cdc74 100644 --- a/helix-core/src/lib.rs +++ b/helix-core/src/lib.rs @@ -16,6 +16,7 @@ pub mod macros; pub mod match_brackets; pub mod movement; pub mod object; +pub mod parse; pub mod path; mod position; pub mod register; diff --git a/helix-core/src/parse.rs b/helix-core/src/parse.rs new file mode 100644 index 000000000000..214fe6725378 --- /dev/null +++ b/helix-core/src/parse.rs @@ -0,0 +1,128 @@ +use std::io::Error; +use std::io::ErrorKind; +use std::io::Read; +use std::io::Result; +use std::io::Write; + +pub fn write_byte(writer: &mut W, byte: u8) -> Result<()> { + writer.write_all(&[byte])?; + Ok(()) +} + +pub fn write_bool(writer: &mut W, state: bool) -> Result<()> { + write_byte(writer, state as u8) +} + +pub fn write_u32(writer: &mut W, n: u32) -> Result<()> { + writer.write_all(&n.to_ne_bytes())?; + Ok(()) +} + +pub fn write_u64(writer: &mut W, n: u64) -> Result<()> { + writer.write_all(&n.to_ne_bytes())?; + Ok(()) +} + +pub fn write_usize(writer: &mut W, n: usize) -> Result<()> { + writer.write_all(&n.to_ne_bytes())?; + Ok(()) +} + +pub fn write_string(writer: &mut W, s: &str) -> Result<()> { + write_usize(writer, s.len())?; + writer.write_all(s.as_bytes())?; + Ok(()) +} + +pub fn write_vec( + writer: &mut W, + slice: &[T], + f: impl Fn(&mut W, &T) -> Result<()>, +) -> Result<()> { + write_usize(writer, slice.len())?; + for element in slice { + f(writer, element)?; + } + Ok(()) +} + +pub fn write_option( + writer: &mut W, + value: Option, + f: impl Fn(&mut W, T) -> Result<()>, +) -> Result<()> { + write_bool(writer, value.is_some())?; + if let Some(value) = value { + f(writer, value)?; + } + Ok(()) +} + +pub fn read_byte(reader: &mut R) -> Result { + match reader.bytes().next() { + Some(byte) => byte, + None => Err(Error::new(ErrorKind::Other, "end of file")), + } +} + +pub fn read_bool(reader: &mut R) -> Result { + let res = match read_byte(reader)? { + 0 => false, + 1 => true, + _ => { + return Err(Error::new( + ErrorKind::Other, + "invalid byte to bool conversion", + )) + } + }; + Ok(res) +} + +pub fn read_u32(reader: &mut R) -> Result { + let mut buf = [0u8; 4]; + reader.read_exact(&mut buf)?; + Ok(u32::from_ne_bytes(buf)) +} + +pub fn read_u64(reader: &mut R) -> Result { + let mut buf = [0u8; 8]; + reader.read_exact(&mut buf)?; + Ok(u64::from_ne_bytes(buf)) +} + +pub fn read_usize(reader: &mut R) -> Result { + let mut buf = [0u8; 8]; + reader.read_exact(&mut buf)?; + Ok(usize::from_ne_bytes(buf)) +} + +pub fn read_string(reader: &mut R) -> Result { + let len = read_usize(reader)?; + let mut buf = vec![0; len]; + reader.read_exact(&mut buf)?; + + let res = String::from_utf8(buf).map_err(|e| Error::new(ErrorKind::InvalidData, e))?; + Ok(res) +} + +pub fn read_vec(reader: &mut R, f: impl Fn(&mut R) -> Result) -> Result> { + let len = read_usize(reader)?; + let mut res = Vec::with_capacity(len); + for _ in 0..len { + res.push(f(reader)?); + } + Ok(res) +} + +pub fn read_option( + reader: &mut R, + f: impl Fn(&mut R) -> Result, +) -> Result> { + let res = if read_bool(reader)? { + Some(f(reader)?) + } else { + None + }; + Ok(res) +} diff --git a/helix-core/src/selection.rs b/helix-core/src/selection.rs index 7817618fb488..e259aadb2ff3 100644 --- a/helix-core/src/selection.rs +++ b/helix-core/src/selection.rs @@ -389,8 +389,8 @@ impl From<(usize, usize)> for Range { /// invariant: A selection can never be empty (always contains at least primary range). #[derive(Debug, Clone, PartialEq, Eq)] pub struct Selection { - ranges: SmallVec<[Range; 1]>, - primary_index: usize, + pub(crate) ranges: SmallVec<[Range; 1]>, + pub(crate) primary_index: usize, } #[allow(clippy::len_without_is_empty)] // a Selection is never empty diff --git a/helix-core/src/transaction.rs b/helix-core/src/transaction.rs index d2f4de07dbe7..97278727f8ea 100644 --- a/helix-core/src/transaction.rs +++ b/helix-core/src/transaction.rs @@ -1,5 +1,9 @@ +use crate::parse::*; use crate::{Range, Rope, Selection, Tendril}; -use std::borrow::Cow; +use std::{ + borrow::Cow, + io::{Read, Write}, +}; /// (from, to, replacement) pub type Change = (usize, usize, Option); @@ -415,6 +419,101 @@ pub struct Transaction { selection: Option, } +pub fn serialize_transaction( + writer: &mut W, + transaction: &Transaction, +) -> std::io::Result<()> { + write_option( + writer, + transaction.selection.as_ref(), + |writer, selection| { + write_usize(writer, selection.primary_index)?; + write_vec(writer, selection.ranges(), |writer, range| { + write_usize(writer, range.anchor)?; + write_usize(writer, range.head)?; + write_option(writer, range.horiz.as_ref(), |writer, horiz| { + write_u32(writer, *horiz) + })?; + Ok(()) + })?; + + Ok(()) + }, + )?; + + write_usize(writer, transaction.changes.len)?; + write_usize(writer, transaction.changes.len_after)?; + write_vec( + writer, + transaction.changes.changes(), + |writer, operation| { + let variant = match operation { + Operation::Retain(_) => 0, + Operation::Delete(_) => 1, + Operation::Insert(_) => 2, + }; + write_byte(writer, variant)?; + match operation { + Operation::Retain(n) | Operation::Delete(n) => { + write_usize(writer, *n)?; + } + + Operation::Insert(tendril) => { + write_string(writer, tendril.as_str())?; + } + } + + Ok(()) + }, + )?; + + Ok(()) +} + +pub fn deserialize_transaction(reader: &mut R) -> std::io::Result { + let selection = read_option(reader, |reader| { + let primary_index = read_usize(reader)?; + let ranges = read_vec(reader, |reader| { + let anchor = read_usize(reader)?; + let head = read_usize(reader)?; + let horiz = read_option(reader, read_u32)?; + Ok(Range { + anchor, + head, + horiz, + }) + })?; + Ok(Selection { + ranges: ranges.into(), + primary_index, + }) + })?; + + let len = read_usize(reader)?; + let len_after = read_usize(reader)?; + let changes = read_vec(reader, |reader| { + let res = match read_byte(reader)? { + 0 => Operation::Retain(read_usize(reader)?), + 1 => Operation::Delete(read_usize(reader)?), + 2 => Operation::Insert(read_string(reader)?.into()), + _ => { + return Err(std::io::Error::new( + std::io::ErrorKind::Other, + "invalid variant", + )) + } + }; + Ok(res) + })?; + let changes = ChangeSet { + changes, + len, + len_after, + }; + + Ok(Transaction { changes, selection }) +} + impl Transaction { /// Create a new, empty transaction. pub fn new(doc: &Rope) -> Self { diff --git a/helix-vcs/Cargo.toml b/helix-vcs/Cargo.toml index 19b660a60f54..e032f9f14dbf 100644 --- a/helix-vcs/Cargo.toml +++ b/helix-vcs/Cargo.toml @@ -16,7 +16,7 @@ helix-core = { version = "0.6", path = "../helix-core" } tokio = { version = "1", features = ["rt", "rt-multi-thread", "time", "sync", "parking_lot", "macros"] } parking_lot = "0.12" -git-repository = { version = "0.32", default-features = false , optional = true } +git-repository = { version = "0.32", default-features = false, optional = true } imara-diff = "0.1.5" log = "0.4" diff --git a/helix-view/src/editor.rs b/helix-view/src/editor.rs index 118a83a92624..27fa7ef6bf61 100644 --- a/helix-view/src/editor.rs +++ b/helix-view/src/editor.rs @@ -5,11 +5,10 @@ use crate::{ graphics::{CursorKind, Rect}, info::Info, input::KeyEvent, - session::Session, theme::{self, Theme}, tree::{self, Tree}, view::ViewPosition, - Align, Document, DocumentId, View, ViewId, + workspace::Workspace, }; use helix_vcs::DiffProviderRegistry; @@ -975,8 +974,13 @@ impl Editor { } } - pub fn session(&self) -> anyhow::Result { - Session::new(std::env::current_dir()?) + pub fn save_workspace(&self) -> anyhow::Result<()> { + let mut workspace = Workspace::new(std::env::current_dir()?)?; + Ok(()) + } + + pub fn session(&self) -> anyhow::Result { + Workspace::new(std::env::current_dir()?) } /// Current editing mode for the [`Editor`]. diff --git a/helix-view/src/lib.rs b/helix-view/src/lib.rs index 53a1615a9e88..76cf9c39b961 100644 --- a/helix-view/src/lib.rs +++ b/helix-view/src/lib.rs @@ -7,7 +7,7 @@ pub mod editor; pub mod env; pub mod graphics; pub mod gutter; -pub mod session; +pub mod workspace; pub mod handlers { pub mod dap; pub mod lsp; diff --git a/helix-view/src/session/undo.rs b/helix-view/src/session/undo.rs deleted file mode 100644 index fe241b6c729d..000000000000 --- a/helix-view/src/session/undo.rs +++ /dev/null @@ -1,29 +0,0 @@ -use std::io::Result; -use std::path::PathBuf; - -use helix_core::history::History; -use helix_core::Transaction; - -pub fn serialize(session: &mut Session, editor: &Editor) -> Result<()> { - todo!() -} - -pub fn deserialize(session: &Session, editor: &mut Editor) -> Result<()> { - todo!() -} - -fn serialize_history(history: &History) -> Result<()> { - todo!() -} - -fn deserialize_history() -> Result { - todo!() -} - -fn serialize_transaction(transaction: &Transaction) -> Result<()> { - todo!() -} - -fn deserialize_transaction() -> Result { - todo!() -} diff --git a/helix-view/src/session/mod.rs b/helix-view/src/workspace/mod.rs similarity index 66% rename from helix-view/src/session/mod.rs rename to helix-view/src/workspace/mod.rs index 0da2f8d89ce2..cdc564801499 100644 --- a/helix-view/src/session/mod.rs +++ b/helix-view/src/workspace/mod.rs @@ -1,4 +1,3 @@ -pub mod state; pub mod undo; use std::{ @@ -9,24 +8,33 @@ use std::{ use anyhow::{Context, Result}; use sha1_smol::Sha1; -// Needs to mimic borrowing rules. -// Allow multiple read-only references, and only one mutable reference w/ no read-only. -// Should not lock unless actively used. And should be unlocked automatically when all file handles are dropped. -pub struct Session { +pub struct Workspace { path: PathBuf, lock: Option, } -impl Session { +fn path_as_bytes(path: PathBuf) -> Vec { + #[cfg(windows)] + return path.to_str().unwrap().into(); + + #[cfg(unix)] + return std::os::unix::ffi::OsStrExt::as_bytes(path.as_os_str()).into(); +} + +impl Workspace { // TODO: Allow custom session names to be passed. pub fn new(path: PathBuf) -> Result { - let bytes = sys::path_as_bytes(path.as_path()); + let bytes = path_as_bytes(path); let hash = Sha1::from(bytes).digest().to_string(); - let path = helix_loader::cache_dir().join("sessions").join(hash); + let path = helix_loader::cache_dir().join("workspaces").join(hash); Ok(Self { path, lock: None }) } - pub fn get(&mut self, filename: String) -> Result { + pub fn path(&self) -> PathBuf { + self.path.clone() + } + + pub fn get(&mut self, filename: &str) -> Result { if self.lock.is_none() { let lock = FileLock::shared(self.path.join(".helix.lock"))?; lock.lock()?; @@ -40,8 +48,7 @@ impl Session { .context("failed to open file") } - // TODO: Return a FileLockGuard instead. - pub fn get_mut(&mut self, filename: String) -> Result { + pub fn get_mut(&mut self, filename: &str) -> Result { if self.lock.is_none() { let lock = FileLock::exclusive(self.path.join(".helix.lock"))?; lock.lock()?; @@ -102,18 +109,18 @@ impl Drop for FileLock { mod sys { use anyhow::Result; use std::fs::File; - use std::io::{Error, Result}; + use std::io::Error; use std::os::unix::io::AsRawFd; pub(super) fn unlock(file: &File) -> Result<()> { flock(file, libc::LOCK_UN) } - pub(super) fn lock(file: &File) -> Result<()> { - flock(file, libc::LOCK_EX) + pub(super) fn lock(file: &File, shared: bool) -> Result<()> { + let flag = if shared { libc::LOCK_SH } else { libc::LOCK_EX }; + flock(file, flag) } - #[cfg(not(target_os = "solaris"))] fn flock(file: &File, flag: libc::c_int) -> Result<()> { let ret = unsafe { libc::flock(file.as_raw_fd(), flag) }; if ret < 0 { @@ -122,42 +129,6 @@ mod sys { Ok(()) } } - - #[cfg(target_os = "solaris")] - fn flock(file: &File, flag: libc::c_int) -> Result<()> { - // Solaris lacks flock(), so try to emulate using fcntl() - let mut flock = libc::flock { - l_type: 0, - l_whence: 0, - l_start: 0, - l_len: 0, - l_sysid: 0, - l_pid: 0, - l_pad: [0, 0, 0, 0], - }; - flock.l_type = if flag & libc::LOCK_UN != 0 { - libc::F_UNLCK - } else if flag & libc::LOCK_EX != 0 { - libc::F_WRLCK - } else if flag & libc::LOCK_SH != 0 { - libc::F_RDLCK - } else { - panic!("unexpected flock() operation") - }; - - let mut cmd = libc::F_SETLKW; - if (flag & libc::LOCK_NB) != 0 { - cmd = libc::F_SETLK; - } - - let ret = unsafe { libc::fcntl(file.as_raw_fd(), cmd, &flock) }; - - if ret < 0 { - anyhow::bail!(Error::last_os_error()) - } else { - Ok(()) - } - } } #[cfg(windows)] @@ -169,10 +140,6 @@ mod sys { minwinbase::LOCKFILE_EXCLUSIVE_LOCK, }; - pub(super) fn path_as_bytes(path: &Path) -> &[u8] { - path.to_str().unwrap().as_bytes() - } - /// Blocks until the lock is acquired. pub(super) fn lock(file: &File, shared: bool) -> anyhow::Result<()> { let flag = if shared { 0 } else { LOCKFILE_EXCLUSIVE_LOCK }; diff --git a/helix-view/src/workspace/undo.rs b/helix-view/src/workspace/undo.rs new file mode 100644 index 000000000000..55d9f8bef332 --- /dev/null +++ b/helix-view/src/workspace/undo.rs @@ -0,0 +1,70 @@ +// use std::fs::File; +// use std::io::BufReader; +// use std::io::BufWriter; +// use std::path::PathBuf; + +// #[cfg(unix)] +// use std::os::unix::prelude::OsStrExt; + +// use anyhow::Context; +// use anyhow::Result; +// use helix_core::history::deserialize_history; +// use helix_core::history::serialize_history; +// use helix_core::parse::*; + +// use crate::Editor; + +// use super::Session; + +// // TODO: Check if serialized files already exist, and use them. +// // TODO: Maybe have a way to verify that the histories match, and overwrite if they don't. +// pub fn serialize(session: &mut Session, editor: &mut Editor) -> Result<()> { +// let cwd = std::env::current_dir()?; +// for doc in editor.documents_mut().filter(|doc| doc.path().is_some()) { + +// } +// // Handle existing index file to merge. +// let mut index_file = session.get_mut("undo/index")?; +// let mut index = deserialize_index(&index_file).context("failed to parse undo index")?; +// for path in editor.documents().filter_map(|doc| doc.path().cloned()) { +// if !index.iter().any(|(_, value)| *value == path) { +// let key = index.last().map(|(key, _)| key + 1).unwrap_or(0); +// index.push((key, path)); +// } +// } +// serialize_index(&mut index_file, &index)?; + +// for (filename, doc_path) in index { +// let doc = match editor +// .documents_mut() +// .find(|doc| doc.path() == Some(&doc_path)) +// { +// Some(doc) => doc, +// None => continue, +// }; +// let filename = format!("undo/{filename}"); +// let file = session.get_mut(&filename)?; +// let history = doc.history.take(); +// serialize_history(file, &history)?; +// doc.history.set(history); +// } + +// Ok(()) +// } + +// pub fn deserialize(session: &mut Session, editor: &mut Editor) -> Result<()> { +// let index = session +// .get("undo/index") +// .and_then(|file| deserialize_index(&file)) +// .context("failed to parse index file")?; + +// for (filename, doc_path) in index { +// let id = editor.open(&doc_path, crate::editor::Action::Load)?; +// let doc = editor.document_mut(id).unwrap(); +// let filename = format!("undo/{filename}"); +// let file = session.get(&filename)?; +// doc.history = std::cell::Cell::new(deserialize_history(file)?); +// } + +// Ok(()) +// } From 6d73215f3609b3ae1a222aec8d0a9184896e46b4 Mon Sep 17 00:00:00 2001 From: Shafkath Shuhan Date: Sat, 28 Jan 2023 22:50:11 -0500 Subject: [PATCH 03/35] implement workspace commands --- Cargo.lock | 3 +- helix-core/Cargo.toml | 1 + helix-core/src/history.rs | 150 +++++++++++++++++++------------ helix-core/src/path.rs | 26 +++++- helix-term/src/commands/typed.rs | 70 +++++++++++++++ helix-view/Cargo.toml | 2 +- helix-view/src/editor.rs | 119 ++++++++++++++++++++++-- helix-view/src/workspace/mod.rs | 22 ++--- helix-view/src/workspace/undo.rs | 112 +++++++++-------------- 9 files changed, 355 insertions(+), 150 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 6fcea8b14b1f..0dfee71c11ee 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1131,6 +1131,7 @@ dependencies = [ "ropey", "serde", "serde_json", + "sha1_smol", "slotmap", "smallvec", "smartstring", @@ -1267,7 +1268,6 @@ dependencies = [ "chardetng", "clipboard-win", "crossterm", - "either", "futures-util", "helix-core", "helix-dap", @@ -1286,6 +1286,7 @@ dependencies = [ "tokio-stream", "toml", "url", + "walkdir", "which", "winapi", ] diff --git a/helix-core/Cargo.toml b/helix-core/Cargo.toml index 62ec87b485ca..6493b6f55435 100644 --- a/helix-core/Cargo.toml +++ b/helix-core/Cargo.toml @@ -32,6 +32,7 @@ regex = "1" bitflags = "1.3" ahash = "0.8.3" hashbrown = { version = "0.13.2", features = ["raw"] } +sha1_smol = "1.0" log = "0.4" serde = { version = "1.0", features = ["derive"] } diff --git a/helix-core/src/history.rs b/helix-core/src/history.rs index 633ff3c0e736..6003a13b4475 100644 --- a/helix-core/src/history.rs +++ b/helix-core/src/history.rs @@ -5,7 +5,6 @@ use regex::Regex; use std::io::{Read, Write}; use std::num::NonZeroUsize; use std::time::{Duration, Instant}; - #[derive(Debug, Clone)] pub struct State { pub doc: Rope, @@ -69,62 +68,20 @@ struct Revision { const HEADER_TAG: &str = "Helix Undofile 1\n"; -pub fn serialize_history( - writer: &mut W, - history: &History, - mtime: u64, - hash: [u8; 20], -) -> std::io::Result<()> { - write_string(writer, HEADER_TAG)?; - write_usize(writer, history.current)?; - write_u64(writer, mtime)?; - writer.write_all(&hash)?; - write_vec(writer, &history.revisions, serialize_revision)?; - Ok(()) -} +fn get_hash(reader: &mut R) -> std::io::Result<[u8; 20]> { + const BUF_SIZE: usize = 8192; -pub fn deserialize_history(reader: &mut R) -> std::io::Result { - let header = read_string(reader)?; - if HEADER_TAG != header { - Err(std::io::Error::new( - std::io::ErrorKind::Other, - format!("missing undofile header"), - )) - } else { - let timestamp = Instant::now(); - let current = read_usize(reader)?; - let mtime = read_u64(reader)?; - let mut hash = [0u8; 20]; - reader.read_exact(&mut hash)?; - let revisions = read_vec(reader, |reader| deserialize_revision(reader, timestamp))?; - Ok(History { current, revisions }) - } -} - -fn serialize_revision(writer: &mut W, revision: &Revision) -> std::io::Result<()> { - write_usize(writer, revision.parent)?; - write_usize(writer, revision.last_child.map(|n| n.get()).unwrap_or(0))?; - crate::transaction::serialize_transaction(writer, &revision.transaction)?; - crate::transaction::serialize_transaction(writer, &revision.inversion)?; - - Ok(()) -} + let mut buf = [0u8; BUF_SIZE]; + let mut hash = sha1_smol::Sha1::new(); + loop { + let total_read = reader.read(&mut buf)?; + if total_read == 0 { + break; + } -fn deserialize_revision(reader: &mut R, timestamp: Instant) -> std::io::Result { - let parent = read_usize(reader)?; - let last_child = match read_usize(reader)? { - 0 => None, - n => Some(unsafe { NonZeroUsize::new_unchecked(n) }), - }; - let transaction = crate::transaction::deserialize_transaction(reader)?; - let inversion = crate::transaction::deserialize_transaction(reader)?; - Ok(Revision { - parent, - last_child, - transaction, - inversion, - timestamp, - }) + hash.update(&buf[0..total_read]); + } + Ok(hash.digest().bytes()) } impl Default for History { @@ -143,7 +100,82 @@ impl Default for History { } } +impl Revision { + fn serialize(&self, writer: &mut W) -> std::io::Result<()> { + write_usize(writer, self.parent)?; + write_usize(writer, self.last_child.map(|n| n.get()).unwrap_or(0))?; + crate::transaction::serialize_transaction(writer, &self.transaction)?; + crate::transaction::serialize_transaction(writer, &self.inversion)?; + + Ok(()) + } + + fn deserialize(reader: &mut R, timestamp: Instant) -> std::io::Result { + let parent = read_usize(reader)?; + let last_child = match read_usize(reader)? { + 0 => None, + n => Some(unsafe { NonZeroUsize::new_unchecked(n) }), + }; + let transaction = crate::transaction::deserialize_transaction(reader)?; + let inversion = crate::transaction::deserialize_transaction(reader)?; + Ok(Revision { + parent, + last_child, + transaction, + inversion, + timestamp, + }) + } +} + impl History { + pub fn serialize( + &self, + writer: &mut W, + text: &mut R, + last_saved_revision: usize, + last_mtime: u64, + ) -> std::io::Result<()> { + write_string(writer, HEADER_TAG)?; + write_usize(writer, self.current)?; + write_usize(writer, last_saved_revision)?; + write_u64(writer, last_mtime)?; + writer.write_all(&get_hash(text)?)?; + write_vec(writer, &self.revisions, |writer, rev| rev.serialize(writer))?; + Ok(()) + } + + pub fn deserialize( + reader: &mut R, + text: &mut R, + last_mtime: u64, + ) -> std::io::Result<(usize, Self)> { + let header = read_string(reader)?; + if HEADER_TAG != header { + Err(std::io::Error::new( + std::io::ErrorKind::Other, + "missing undofile header", + )) + } else { + let timestamp = Instant::now(); + let current = read_usize(reader)?; + let last_saved_revision = read_usize(reader)?; + let mtime = read_u64(reader)?; + let mut hash = [0u8; 20]; + reader.read_exact(&mut hash)?; + + if mtime != last_mtime && hash != get_hash(text)? { + return Err(std::io::Error::new( + std::io::ErrorKind::Other, + "outdated undo file", + )); + } + + let revisions = read_vec(reader, |reader| Revision::deserialize(reader, timestamp))?; + Ok((last_saved_revision, History { current, revisions })) + } + } + pub fn commit_revision(&mut self, transaction: &Transaction, original: &State) { self.commit_revision_at_timestamp(transaction, original, Instant::now()); } @@ -708,8 +740,12 @@ mod test { selection: Selection::point(0), }; history.commit_revision(&transaction, &state); - serialize_history(&mut buf, &history).unwrap(); - deserialize_history(&mut buf.as_slice()).unwrap(); + + let text = Vec::new(); + history + .serialize(&mut buf, &mut text.as_slice(), 0, 0) + .unwrap(); + History::deserialize(&mut buf.as_slice(), &mut text.as_slice(), 0).unwrap(); true } ); diff --git a/helix-core/src/path.rs b/helix-core/src/path.rs index d59a6baad604..c4d86ed821d1 100644 --- a/helix-core/src/path.rs +++ b/helix-core/src/path.rs @@ -1,5 +1,9 @@ use etcetera::home_dir; -use std::path::{Component, Path, PathBuf}; +use std::{ + ffi::OsString, + path::{Component, Path, PathBuf}, + str::Utf8Error, +}; /// Replaces users home directory from `path` with tilde `~` if the directory /// is available, otherwise returns the path unchanged. @@ -141,3 +145,23 @@ pub fn get_truncated_path>(path: P) -> PathBuf { ret.push(file); ret } + +pub fn path_as_bytes>(path: P) -> Vec { + let path = path.as_ref(); + + #[cfg(windows)] + return path.to_str().unwrap().into(); + + #[cfg(unix)] + return std::os::unix::ffi::OsStrExt::as_bytes(path.as_os_str()).into(); +} + +pub fn path_from_bytes(slice: &[u8]) -> Result { + #[cfg(windows)] + return Ok(PathBuf::from(std::str::from_utf8(slice))); + + #[cfg(unix)] + return Ok(PathBuf::from( + ::from_bytes(slice), + )); +} diff --git a/helix-term/src/commands/typed.rs b/helix-term/src/commands/typed.rs index 0cc1b7432978..2243f79df77b 100644 --- a/helix-term/src/commands/typed.rs +++ b/helix-term/src/commands/typed.rs @@ -1953,6 +1953,62 @@ fn run_shell_command( Ok(()) } +fn save_workspace( + cx: &mut compositor::Context, + _args: &[Cow], + event: PromptEvent, +) -> anyhow::Result<()> { + use helix_view::workspace::undo::UndoIndex; + use helix_view::workspace::Workspace; + + if event != PromptEvent::Validate { + return Ok(()); + } + + let mut workspace = Workspace::new()?; + let mut index_file = workspace.get_mut(".index")?; + + // Create a merged list of key-value tuples from the saved index and the open buffers. + let index = { + let mut saved_files = UndoIndex::deserialize(&mut index_file) + .unwrap_or(UndoIndex::default()) + .0; + let mut last_id = saved_files.last().map(|(id, _)| *id + 1).unwrap_or(0); + let mut new_files = cx + .editor + .documents() + .filter_map(|doc| { + doc.path().filter(|path| { + !saved_files + .iter() + .any(|(_, indexed_path)| indexed_path == *path) + }) + }) + .map(|path| { + let id = last_id; + last_id += 1; + (id, path.clone()) + }) + .collect(); + saved_files.append(&mut new_files); + UndoIndex(saved_files) + }; + + cx.editor.save_workspace() +} + +fn open_workspace( + cx: &mut compositor::Context, + _args: &[Cow], + event: PromptEvent, +) -> anyhow::Result<()> { + if event != PromptEvent::Validate { + return Ok(()); + } + + cx.editor.open_workspace() +} + pub const TYPABLE_COMMAND_LIST: &[TypableCommand] = &[ TypableCommand { name: "quit", @@ -2475,6 +2531,20 @@ pub const TYPABLE_COMMAND_LIST: &[TypableCommand] = &[ fun: run_shell_command, completer: Some(completers::filename), }, + TypableCommand { + name: "save-workspace", + aliases: &["sw"], + doc: "Save open document undo history", + fun: save_workspace, + completer: None, + }, + TypableCommand { + name: "open-workspace", + aliases: &["ow"], + doc: "Open document undo history", + fun: open_workspace, + completer: None, + }, ]; pub static TYPABLE_COMMAND_MAP: Lazy> = diff --git a/helix-view/Cargo.toml b/helix-view/Cargo.toml index 775ccc93bd9a..23195dc8faf1 100644 --- a/helix-view/Cargo.toml +++ b/helix-view/Cargo.toml @@ -25,7 +25,7 @@ helix-vcs = { version = "0.6", path = "../helix-vcs" } winapi = "0.3" sha1_smol = "1.0" -either = "1.8" +walkdir = "2.3" # Conversion traits once_cell = "1.17" diff --git a/helix-view/src/editor.rs b/helix-view/src/editor.rs index 27fa7ef6bf61..88a62f7381b7 100644 --- a/helix-view/src/editor.rs +++ b/helix-view/src/editor.rs @@ -1,5 +1,5 @@ use crate::{ - align_view, + align_view, apply_transaction, clipboard::{get_clipboard_provider, ClipboardProvider}, document::{DocumentSavedEventFuture, DocumentSavedEventResult, Mode}, graphics::{CursorKind, Rect}, @@ -8,7 +8,7 @@ use crate::{ theme::{self, Theme}, tree::{self, Tree}, view::ViewPosition, - workspace::Workspace, + workspace::{undo::UndoIndex, Workspace}, }; use helix_vcs::DiffProviderRegistry; @@ -21,6 +21,7 @@ use std::{ borrow::Cow, cell::Cell, collections::{BTreeMap, HashMap}, + fs::File, io::stdin, num::NonZeroUsize, path::{Path, PathBuf}, @@ -36,7 +37,7 @@ use tokio::{ time::{sleep, Duration, Instant, Sleep}, }; -use anyhow::{anyhow, bail, Error}; +use anyhow::{anyhow, bail, Context, Error}; pub use helix_core::diagnostic::Severity; pub use helix_core::register::Registers; @@ -974,13 +975,117 @@ impl Editor { } } - pub fn save_workspace(&self) -> anyhow::Result<()> { - let mut workspace = Workspace::new(std::env::current_dir()?)?; + // TODO: Async? + pub fn save_workspace(&mut self) -> anyhow::Result<()> { + let mut workspace = Workspace::new()?; + let mut index_file = workspace.get_mut("index.undo")?; + let index = { + let mut current_index = + UndoIndex::deserialize(&mut index_file).unwrap_or(UndoIndex::default()); + let new_files = self.documents().filter_map(|doc| { + doc.path().filter(|path| { + !current_index + .0 + .iter() + .any(|(_, indexed_path)| indexed_path == *path) + }) + }); + let mut last_id = current_index.0.last().map(|(id, _)| *id).unwrap_or(0); + current_index.0.append( + &mut new_files + .map(|path| { + let current_id = last_id; + last_id += 1; + (current_id, path.clone()) + }) + .collect(), + ); + current_index + }; + log::debug!("Saving undo index: {:?}", index); + + index + .serialize(&mut index_file) + .context("failed to serialize index")?; + for doc in self.documents_mut().filter(|doc| doc.path().is_some()) { + let history = doc.history.take(); + let last_saved_revision = doc.get_last_saved_revision(); + let path = doc.path().unwrap(); + let mtime = std::fs::metadata(path.clone())? + .modified()? + .duration_since(std::time::UNIX_EPOCH)? + .as_secs(); + let id = index.find_id(path).unwrap(); + let mut undo_file = workspace.get_mut(&id.to_string())?; + + history + .serialize( + &mut undo_file, + &mut File::open(path)?, + last_saved_revision, + mtime, + ) + .context(format!( + "failed to save history for {}", + path.to_string_lossy() + ))?; + doc.history.set(history); + } Ok(()) } - pub fn session(&self) -> anyhow::Result { - Workspace::new(std::env::current_dir()?) + pub fn open_workspace(&mut self) -> anyhow::Result<()> { + let mut workspace = Workspace::new()?; + let index = UndoIndex::deserialize(&mut workspace.get("index.undo")?) + .context("failed to load index")?; + + let scrolloff = self.config().scrolloff; + for (id, path) in index.0 { + if !path.exists() { + continue; + } + let current_view_id = view!(&self).id; + + let mut undo_file = workspace.get(&id.to_string())?; + let last_mtime = std::fs::metadata(path.clone())? + .modified()? + .duration_since(std::time::UNIX_EPOCH)? + .as_secs(); + let id = self.open(path.as_path(), Action::Load)?; + let doc = doc_mut!(self, &id); + let (last_saved_revision, history) = helix_core::history::History::deserialize( + &mut undo_file, + &mut File::open(path)?, + last_mtime, + ) + .context("failed to load history")?; + + if history.current_revision() != last_saved_revision { + let selections = doc.selections(); + let view_id = if selections.contains_key(¤t_view_id) { + // use current if possible + current_view_id + } else { + // Hack: we take the first available view_id + selections + .keys() + .next() + .copied() + .expect("No view_id available") + }; + let view = view_mut!(self, view_id); + apply_transaction( + &history.changes_since(last_saved_revision).unwrap(), + doc, + &view, + ); + view.ensure_cursor_in_view(&doc, scrolloff); + } + doc.history.set(history); + doc.set_last_saved_revision(last_saved_revision); + } + + Ok(()) } /// Current editing mode for the [`Editor`]. diff --git a/helix-view/src/workspace/mod.rs b/helix-view/src/workspace/mod.rs index cdc564801499..9ec543d84e68 100644 --- a/helix-view/src/workspace/mod.rs +++ b/helix-view/src/workspace/mod.rs @@ -6,6 +6,7 @@ use std::{ }; use anyhow::{Context, Result}; +use helix_core::path::path_as_bytes; use sha1_smol::Sha1; pub struct Workspace { @@ -13,17 +14,10 @@ pub struct Workspace { lock: Option, } -fn path_as_bytes(path: PathBuf) -> Vec { - #[cfg(windows)] - return path.to_str().unwrap().into(); - - #[cfg(unix)] - return std::os::unix::ffi::OsStrExt::as_bytes(path.as_os_str()).into(); -} - impl Workspace { // TODO: Allow custom session names to be passed. - pub fn new(path: PathBuf) -> Result { + pub fn new() -> Result { + let path = std::env::current_dir()?; let bytes = path_as_bytes(path); let hash = Sha1::from(bytes).digest().to_string(); let path = helix_loader::cache_dir().join("workspaces").join(hash); @@ -34,33 +28,35 @@ impl Workspace { self.path.clone() } - pub fn get(&mut self, filename: &str) -> Result { + pub fn get(&mut self, path: &str) -> Result { if self.lock.is_none() { let lock = FileLock::shared(self.path.join(".helix.lock"))?; lock.lock()?; self.lock = Some(lock); } + let path = self.path.join(path); OpenOptions::new() .read(true) - .open(self.path.join(filename)) + .open(path) .context("failed to open file") } - pub fn get_mut(&mut self, filename: &str) -> Result { + pub fn get_mut(&mut self, path: &str) -> Result { if self.lock.is_none() { let lock = FileLock::exclusive(self.path.join(".helix.lock"))?; lock.lock()?; self.lock = Some(lock); } + let path = self.path.join(path); OpenOptions::new() .read(true) .write(true) .create(true) - .open(self.path.join(filename)) + .open(path) .context("failed to open file") } } diff --git a/helix-view/src/workspace/undo.rs b/helix-view/src/workspace/undo.rs index 55d9f8bef332..8ed1cefcae88 100644 --- a/helix-view/src/workspace/undo.rs +++ b/helix-view/src/workspace/undo.rs @@ -1,70 +1,42 @@ -// use std::fs::File; -// use std::io::BufReader; -// use std::io::BufWriter; -// use std::path::PathBuf; - -// #[cfg(unix)] -// use std::os::unix::prelude::OsStrExt; - -// use anyhow::Context; -// use anyhow::Result; -// use helix_core::history::deserialize_history; -// use helix_core::history::serialize_history; -// use helix_core::parse::*; - -// use crate::Editor; - -// use super::Session; - -// // TODO: Check if serialized files already exist, and use them. -// // TODO: Maybe have a way to verify that the histories match, and overwrite if they don't. -// pub fn serialize(session: &mut Session, editor: &mut Editor) -> Result<()> { -// let cwd = std::env::current_dir()?; -// for doc in editor.documents_mut().filter(|doc| doc.path().is_some()) { - -// } -// // Handle existing index file to merge. -// let mut index_file = session.get_mut("undo/index")?; -// let mut index = deserialize_index(&index_file).context("failed to parse undo index")?; -// for path in editor.documents().filter_map(|doc| doc.path().cloned()) { -// if !index.iter().any(|(_, value)| *value == path) { -// let key = index.last().map(|(key, _)| key + 1).unwrap_or(0); -// index.push((key, path)); -// } -// } -// serialize_index(&mut index_file, &index)?; - -// for (filename, doc_path) in index { -// let doc = match editor -// .documents_mut() -// .find(|doc| doc.path() == Some(&doc_path)) -// { -// Some(doc) => doc, -// None => continue, -// }; -// let filename = format!("undo/{filename}"); -// let file = session.get_mut(&filename)?; -// let history = doc.history.take(); -// serialize_history(file, &history)?; -// doc.history.set(history); -// } - -// Ok(()) -// } - -// pub fn deserialize(session: &mut Session, editor: &mut Editor) -> Result<()> { -// let index = session -// .get("undo/index") -// .and_then(|file| deserialize_index(&file)) -// .context("failed to parse index file")?; - -// for (filename, doc_path) in index { -// let id = editor.open(&doc_path, crate::editor::Action::Load)?; -// let doc = editor.document_mut(id).unwrap(); -// let filename = format!("undo/{filename}"); -// let file = session.get(&filename)?; -// doc.history = std::cell::Cell::new(deserialize_history(file)?); -// } - -// Ok(()) -// } +use anyhow::Result; +use std::{ + io::{Error, ErrorKind, Read, Write}, + path::PathBuf, +}; + +use helix_core::{ + parse::*, + path::{path_as_bytes, path_from_bytes}, +}; + +#[derive(Default, Debug)] +pub struct UndoIndex(pub Vec<(usize, PathBuf)>); + +impl UndoIndex { + pub fn serialize(&self, writer: &mut W) -> Result<()> { + write_vec(writer, &self.0, |writer, (id, path)| { + write_usize(writer, *id)?; + write_vec(writer, &path_as_bytes(path), |writer, byte| { + write_byte(writer, *byte) + })?; + Ok(()) + })?; + Ok(()) + } + + pub fn deserialize(reader: &mut R) -> Result { + let res = read_vec(reader, |reader| { + let id = read_usize(reader)?; + let path = path_from_bytes(&read_vec(reader, read_byte)?) + .map_err(|e| Error::new(ErrorKind::InvalidData, e))?; + Ok((id, path)) + })?; + Ok(Self(res)) + } + + pub fn find_id(&self, path: &PathBuf) -> Option { + self.0 + .iter() + .find_map(|(id, index_path)| (index_path == path).then_some(*id)) + } +} From 95a6b7ba45920e0780efba017f7d62a6034a596a Mon Sep 17 00:00:00 2001 From: Shafkath Shuhan Date: Mon, 30 Jan 2023 12:48:53 -0500 Subject: [PATCH 04/35] inline workspace commands --- helix-core/src/path.rs | 1 - helix-term/src/commands/typed.rs | 105 ++++++++++++++++++++++++-- helix-term/tests/test/commands.rs | 19 +++++ helix-view/src/document.rs | 2 +- helix-view/src/editor.rs | 118 +----------------------------- helix-view/src/workspace/undo.rs | 2 +- 6 files changed, 123 insertions(+), 124 deletions(-) diff --git a/helix-core/src/path.rs b/helix-core/src/path.rs index c4d86ed821d1..b3d651903699 100644 --- a/helix-core/src/path.rs +++ b/helix-core/src/path.rs @@ -1,6 +1,5 @@ use etcetera::home_dir; use std::{ - ffi::OsString, path::{Component, Path, PathBuf}, str::Utf8Error, }; diff --git a/helix-term/src/commands/typed.rs b/helix-term/src/commands/typed.rs index 2243f79df77b..ce9167dee782 100644 --- a/helix-term/src/commands/typed.rs +++ b/helix-term/src/commands/typed.rs @@ -1971,7 +1971,7 @@ fn save_workspace( // Create a merged list of key-value tuples from the saved index and the open buffers. let index = { let mut saved_files = UndoIndex::deserialize(&mut index_file) - .unwrap_or(UndoIndex::default()) + .unwrap_or_default() .0; let mut last_id = saved_files.last().map(|(id, _)| *id + 1).unwrap_or(0); let mut new_files = cx @@ -1993,8 +1993,37 @@ fn save_workspace( saved_files.append(&mut new_files); UndoIndex(saved_files) }; - - cx.editor.save_workspace() + log::debug!("Saving undo index: {:?}", index); + + index + .serialize(&mut index_file) + .context("failed to save index")?; + + // Save the histories of open buffers. + for doc in cx.editor.documents_mut().filter(|doc| doc.path().is_some()) { + let path = doc.path().unwrap().clone(); + let last_saved_revision = doc.get_last_saved_revision(); + let history = doc.history.get_mut(); + let mtime = std::fs::metadata(path.clone())? + .modified()? + .duration_since(std::time::UNIX_EPOCH)? + .as_secs(); + + let mut file = workspace.get_mut(&index.find_id(&path).unwrap().to_string())?; + history + .serialize( + &mut file, + &mut std::fs::File::open(&path)?, + last_saved_revision, + mtime, + ) + .context(format!( + "failed to save history for {}", + path.to_string_lossy() + ))?; + log::debug!("Saved history for: {}", path.to_string_lossy()); + } + Ok(()) } fn open_workspace( @@ -2002,11 +2031,77 @@ fn open_workspace( _args: &[Cow], event: PromptEvent, ) -> anyhow::Result<()> { + use helix_view::workspace::undo::UndoIndex; + use helix_view::workspace::Workspace; + if event != PromptEvent::Validate { return Ok(()); } - cx.editor.open_workspace() + let mut workspace = Workspace::new()?; + let index = UndoIndex::deserialize(&mut workspace.get(".index")?) + .context("failed to load index")? + .0; + let scrolloff = cx.editor.config().scrolloff; + log::debug!("Loaded undo index: {:?}", index); + + // Open the documents in the index and load their histories. + for (id, path) in index { + if !path.exists() { + continue; + } + + // Close open buffers for the doc. + let doc_id = cx + .editor + .documents() + .find_map(|doc| (doc.path() == Some(&path)).then(|| doc.id())); + if let Some(id) = doc_id { + buffer_close_by_ids_impl(cx, &[id], false)?; + } + + let mut file = workspace.get(&id.to_string())?; + let last_mtime = std::fs::metadata(path.clone())? + .modified()? + .duration_since(std::time::UNIX_EPOCH)? + .as_secs(); + let id = cx.editor.open(path.as_path(), Action::Load)?; + let doc = doc_mut!(cx.editor, &id); + let (last_saved_revision, history) = match helix_core::history::History::deserialize( + &mut file, + &mut std::fs::File::open(&path)?, + last_mtime, + ) { + Ok(res) => res, + Err(e) => { + cx.editor.set_error(format!( + "failed to load undo file for {} because {e}", + path.to_string_lossy() + )); + continue; + } + }; + + // Jump to saved revision if the doc wasn't saved. + if history.current_revision() != last_saved_revision { + let view_id = doc + .selections() + .keys() + .next() + .copied() + .expect("No view_id available"); + let view = view_mut!(cx.editor, view_id); + doc.apply( + &history.changes_since(last_saved_revision).unwrap(), + view_id, + ); + view.ensure_cursor_in_view(doc, scrolloff); + } + doc.history.set(history); + doc.set_last_saved_revision(last_saved_revision); + log::debug!("Loaded history for: {}", path.to_string_lossy()); + } + Ok(()) } pub const TYPABLE_COMMAND_LIST: &[TypableCommand] = &[ @@ -2541,7 +2636,7 @@ pub const TYPABLE_COMMAND_LIST: &[TypableCommand] = &[ TypableCommand { name: "open-workspace", aliases: &["ow"], - doc: "Open document undo history", + doc: "Open document undo history, overriding open buffers.", fun: open_workspace, completer: None, }, diff --git a/helix-term/tests/test/commands.rs b/helix-term/tests/test/commands.rs index e8d16bfaf2be..64bba22a691a 100644 --- a/helix-term/tests/test/commands.rs +++ b/helix-term/tests/test/commands.rs @@ -470,6 +470,25 @@ async fn test_character_info() -> anyhow::Result<()> { Some(&|app| { assert_eq!(r#""h" Dec 104 Hex 68"#, app.editor.get_status().unwrap().0); }), + ); +} + +async fn test_workspace_serde() -> anyhow::Result<()> { + let file = helpers::new_readonly_tempfile()?; + let mut app = helpers::AppBuilder::new() + .with_file(file.path(), None) + .build()?; + + test_key_sequence( + &mut app, + Some("ihello:sw:bc!:ow"), + Some(&|app| { + let mut docs: Vec<_> = app.editor.documents().collect(); + assert_eq!(1, docs.len()); + + let doc = docs.pop().unwrap(); + assert_eq!(Some(file.path()), doc.path().map(PathBuf::as_path)); + }), false, ) .await?; diff --git a/helix-view/src/document.rs b/helix-view/src/document.rs index 11a0dbf8b964..7d6f87efa152 100644 --- a/helix-view/src/document.rs +++ b/helix-view/src/document.rs @@ -1042,7 +1042,7 @@ impl Document { } /// Get the document's latest saved revision. - pub fn get_last_saved_revision(&mut self) -> usize { + pub fn get_last_saved_revision(&self) -> usize { self.last_saved_revision } diff --git a/helix-view/src/editor.rs b/helix-view/src/editor.rs index 88a62f7381b7..c209c3c3affe 100644 --- a/helix-view/src/editor.rs +++ b/helix-view/src/editor.rs @@ -1,5 +1,5 @@ use crate::{ - align_view, apply_transaction, + align_view, clipboard::{get_clipboard_provider, ClipboardProvider}, document::{DocumentSavedEventFuture, DocumentSavedEventResult, Mode}, graphics::{CursorKind, Rect}, @@ -21,7 +21,6 @@ use std::{ borrow::Cow, cell::Cell, collections::{BTreeMap, HashMap}, - fs::File, io::stdin, num::NonZeroUsize, path::{Path, PathBuf}, @@ -37,7 +36,7 @@ use tokio::{ time::{sleep, Duration, Instant, Sleep}, }; -use anyhow::{anyhow, bail, Context, Error}; +use anyhow::{anyhow, bail, Error}; pub use helix_core::diagnostic::Severity; pub use helix_core::register::Registers; @@ -975,119 +974,6 @@ impl Editor { } } - // TODO: Async? - pub fn save_workspace(&mut self) -> anyhow::Result<()> { - let mut workspace = Workspace::new()?; - let mut index_file = workspace.get_mut("index.undo")?; - let index = { - let mut current_index = - UndoIndex::deserialize(&mut index_file).unwrap_or(UndoIndex::default()); - let new_files = self.documents().filter_map(|doc| { - doc.path().filter(|path| { - !current_index - .0 - .iter() - .any(|(_, indexed_path)| indexed_path == *path) - }) - }); - let mut last_id = current_index.0.last().map(|(id, _)| *id).unwrap_or(0); - current_index.0.append( - &mut new_files - .map(|path| { - let current_id = last_id; - last_id += 1; - (current_id, path.clone()) - }) - .collect(), - ); - current_index - }; - log::debug!("Saving undo index: {:?}", index); - - index - .serialize(&mut index_file) - .context("failed to serialize index")?; - for doc in self.documents_mut().filter(|doc| doc.path().is_some()) { - let history = doc.history.take(); - let last_saved_revision = doc.get_last_saved_revision(); - let path = doc.path().unwrap(); - let mtime = std::fs::metadata(path.clone())? - .modified()? - .duration_since(std::time::UNIX_EPOCH)? - .as_secs(); - let id = index.find_id(path).unwrap(); - let mut undo_file = workspace.get_mut(&id.to_string())?; - - history - .serialize( - &mut undo_file, - &mut File::open(path)?, - last_saved_revision, - mtime, - ) - .context(format!( - "failed to save history for {}", - path.to_string_lossy() - ))?; - doc.history.set(history); - } - Ok(()) - } - - pub fn open_workspace(&mut self) -> anyhow::Result<()> { - let mut workspace = Workspace::new()?; - let index = UndoIndex::deserialize(&mut workspace.get("index.undo")?) - .context("failed to load index")?; - - let scrolloff = self.config().scrolloff; - for (id, path) in index.0 { - if !path.exists() { - continue; - } - let current_view_id = view!(&self).id; - - let mut undo_file = workspace.get(&id.to_string())?; - let last_mtime = std::fs::metadata(path.clone())? - .modified()? - .duration_since(std::time::UNIX_EPOCH)? - .as_secs(); - let id = self.open(path.as_path(), Action::Load)?; - let doc = doc_mut!(self, &id); - let (last_saved_revision, history) = helix_core::history::History::deserialize( - &mut undo_file, - &mut File::open(path)?, - last_mtime, - ) - .context("failed to load history")?; - - if history.current_revision() != last_saved_revision { - let selections = doc.selections(); - let view_id = if selections.contains_key(¤t_view_id) { - // use current if possible - current_view_id - } else { - // Hack: we take the first available view_id - selections - .keys() - .next() - .copied() - .expect("No view_id available") - }; - let view = view_mut!(self, view_id); - apply_transaction( - &history.changes_since(last_saved_revision).unwrap(), - doc, - &view, - ); - view.ensure_cursor_in_view(&doc, scrolloff); - } - doc.history.set(history); - doc.set_last_saved_revision(last_saved_revision); - } - - Ok(()) - } - /// Current editing mode for the [`Editor`]. pub fn mode(&self) -> Mode { self.mode diff --git a/helix-view/src/workspace/undo.rs b/helix-view/src/workspace/undo.rs index 8ed1cefcae88..44864bb3cfb5 100644 --- a/helix-view/src/workspace/undo.rs +++ b/helix-view/src/workspace/undo.rs @@ -37,6 +37,6 @@ impl UndoIndex { pub fn find_id(&self, path: &PathBuf) -> Option { self.0 .iter() - .find_map(|(id, index_path)| (index_path == path).then_some(*id)) + .find_map(|(id, index_path)| (index_path == path).then(|| *id)) } } From eb7c7eb506e4c6aeecf1464fc81aea9b18609d6e Mon Sep 17 00:00:00 2001 From: Shafkath Shuhan Date: Mon, 30 Jan 2023 15:20:18 -0500 Subject: [PATCH 05/35] run `cargo xtask docgen` --- book/src/generated/typable-cmd.md | 2 ++ 1 file changed, 2 insertions(+) diff --git a/book/src/generated/typable-cmd.md b/book/src/generated/typable-cmd.md index 0ff501a33fdb..c47d7db80333 100644 --- a/book/src/generated/typable-cmd.md +++ b/book/src/generated/typable-cmd.md @@ -74,3 +74,5 @@ | `:pipe` | Pipe each selection to the shell command. | | `:pipe-to` | Pipe each selection to the shell command, ignoring output. | | `:run-shell-command`, `:sh` | Run a shell command | +| `:save-workspace`, `:sw` | Save open document undo history | +| `:open-workspace`, `:ow` | Open document undo history, overriding open buffers. | From 859f73e96e555eeb0708bcf5fe60e365a993faa0 Mon Sep 17 00:00:00 2001 From: Shafkath Shuhan Date: Mon, 30 Jan 2023 17:26:01 -0500 Subject: [PATCH 06/35] close buffer after checking if the saved state is valid --- helix-core/src/path.rs | 2 +- helix-term/src/commands/typed.rs | 22 +++++++++++----------- helix-term/tests/test/commands.rs | 5 +---- 3 files changed, 13 insertions(+), 16 deletions(-) diff --git a/helix-core/src/path.rs b/helix-core/src/path.rs index b3d651903699..471d0607df10 100644 --- a/helix-core/src/path.rs +++ b/helix-core/src/path.rs @@ -157,7 +157,7 @@ pub fn path_as_bytes>(path: P) -> Vec { pub fn path_from_bytes(slice: &[u8]) -> Result { #[cfg(windows)] - return Ok(PathBuf::from(std::str::from_utf8(slice))); + return Ok(PathBuf::from(std::str::from_utf8(slice)?)); #[cfg(unix)] return Ok(PathBuf::from( diff --git a/helix-term/src/commands/typed.rs b/helix-term/src/commands/typed.rs index ce9167dee782..42daf49dfc50 100644 --- a/helix-term/src/commands/typed.rs +++ b/helix-term/src/commands/typed.rs @@ -2046,27 +2046,18 @@ fn open_workspace( log::debug!("Loaded undo index: {:?}", index); // Open the documents in the index and load their histories. - for (id, path) in index { + for (index_id, path) in index { if !path.exists() { continue; } // Close open buffers for the doc. - let doc_id = cx - .editor - .documents() - .find_map(|doc| (doc.path() == Some(&path)).then(|| doc.id())); - if let Some(id) = doc_id { - buffer_close_by_ids_impl(cx, &[id], false)?; - } - let mut file = workspace.get(&id.to_string())?; + let mut file = workspace.get(&index_id.to_string())?; let last_mtime = std::fs::metadata(path.clone())? .modified()? .duration_since(std::time::UNIX_EPOCH)? .as_secs(); - let id = cx.editor.open(path.as_path(), Action::Load)?; - let doc = doc_mut!(cx.editor, &id); let (last_saved_revision, history) = match helix_core::history::History::deserialize( &mut file, &mut std::fs::File::open(&path)?, @@ -2082,6 +2073,15 @@ fn open_workspace( } }; + let doc_id = cx + .editor + .documents() + .find_map(|doc| (doc.path() == Some(&path)).then(|| doc.id())); + if let Some(id) = doc_id { + buffer_close_by_ids_impl(cx, &[id], false)?; + } + let id = cx.editor.open(path.as_path(), Action::Load)?; + let doc = doc_mut!(cx.editor, &id); // Jump to saved revision if the doc wasn't saved. if history.current_revision() != last_saved_revision { let view_id = doc diff --git a/helix-term/tests/test/commands.rs b/helix-term/tests/test/commands.rs index 64bba22a691a..c2033e7f58d8 100644 --- a/helix-term/tests/test/commands.rs +++ b/helix-term/tests/test/commands.rs @@ -484,10 +484,7 @@ async fn test_workspace_serde() -> anyhow::Result<()> { Some("ihello:sw:bc!:ow"), Some(&|app| { let mut docs: Vec<_> = app.editor.documents().collect(); - assert_eq!(1, docs.len()); - - let doc = docs.pop().unwrap(); - assert_eq!(Some(file.path()), doc.path().map(PathBuf::as_path)); + assert_eq!(2, docs.len()); }), false, ) From 1b0f1ad48e32c45581e135e193877e385b1a9a54 Mon Sep 17 00:00:00 2001 From: Shafkath Shuhan Date: Tue, 31 Jan 2023 22:04:04 -0500 Subject: [PATCH 07/35] rebase on top of master --- Cargo.lock | 1 + helix-core/Cargo.toml | 1 + helix-core/src/history.rs | 74 +++---- helix-core/src/path.rs | 11 ++ helix-core/src/transaction.rs | 13 +- helix-term/src/commands/typed.rs | 328 +++++++++++++++---------------- helix-view/src/document.rs | 32 +++ helix-view/src/editor.rs | 4 +- helix-view/src/workspace/mod.rs | 12 +- 9 files changed, 273 insertions(+), 203 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 0dfee71c11ee..474a863a7ed3 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1135,6 +1135,7 @@ dependencies = [ "slotmap", "smallvec", "smartstring", + "tempfile", "textwrap", "toml", "tree-sitter", diff --git a/helix-core/Cargo.toml b/helix-core/Cargo.toml index 6493b6f55435..d322ebffdf4c 100644 --- a/helix-core/Cargo.toml +++ b/helix-core/Cargo.toml @@ -50,3 +50,4 @@ textwrap = "0.16.0" [dev-dependencies] quickcheck = { version = "1", default-features = false } +tempfile = "3.3.0" diff --git a/helix-core/src/history.rs b/helix-core/src/history.rs index 6003a13b4475..f4ca661dc100 100644 --- a/helix-core/src/history.rs +++ b/helix-core/src/history.rs @@ -2,8 +2,9 @@ use crate::parse::*; use crate::{Assoc, ChangeSet, Range, Rope, Selection, Transaction}; use once_cell::sync::Lazy; use regex::Regex; -use std::io::{Read, Write}; +use std::io::{Read, Seek, Write}; use std::num::NonZeroUsize; +use std::path::PathBuf; use std::time::{Duration, Instant}; #[derive(Debug, Clone)] pub struct State { @@ -66,24 +67,6 @@ struct Revision { timestamp: Instant, } -const HEADER_TAG: &str = "Helix Undofile 1\n"; - -fn get_hash(reader: &mut R) -> std::io::Result<[u8; 20]> { - const BUF_SIZE: usize = 8192; - - let mut buf = [0u8; BUF_SIZE]; - let mut hash = sha1_smol::Sha1::new(); - loop { - let total_read = reader.read(&mut buf)?; - if total_read == 0 { - break; - } - - hash.update(&buf[0..total_read]); - } - Ok(hash.digest().bytes()) -} - impl Default for History { fn default() -> Self { // Add a dummy root revision with empty transaction @@ -128,28 +111,47 @@ impl Revision { } } +const HEADER_TAG: &str = "Helix Undofile 1\n"; + +fn get_hash(reader: &mut R) -> std::io::Result<[u8; 20]> { + const BUF_SIZE: usize = 8192; + + let mut buf = [0u8; BUF_SIZE]; + let mut hash = sha1_smol::Sha1::new(); + loop { + let total_read = reader.read(&mut buf)?; + if total_read == 0 { + break; + } + + hash.update(&buf[0..total_read]); + } + Ok(hash.digest().bytes()) +} + impl History { - pub fn serialize( + pub fn serialize( &self, writer: &mut W, - text: &mut R, + path: &PathBuf, last_saved_revision: usize, - last_mtime: u64, ) -> std::io::Result<()> { write_string(writer, HEADER_TAG)?; write_usize(writer, self.current)?; write_usize(writer, last_saved_revision)?; + + let last_mtime = std::fs::metadata(path)? + .modified()? + .duration_since(std::time::UNIX_EPOCH) + .unwrap() + .as_secs(); write_u64(writer, last_mtime)?; - writer.write_all(&get_hash(text)?)?; + writer.write_all(&get_hash(&mut std::fs::File::open(path)?)?)?; write_vec(writer, &self.revisions, |writer, rev| rev.serialize(writer))?; Ok(()) } - pub fn deserialize( - reader: &mut R, - text: &mut R, - last_mtime: u64, - ) -> std::io::Result<(usize, Self)> { + pub fn deserialize(reader: &mut R, path: &PathBuf) -> std::io::Result<(usize, Self)> { let header = read_string(reader)?; if HEADER_TAG != header { Err(std::io::Error::new( @@ -161,10 +163,15 @@ impl History { let current = read_usize(reader)?; let last_saved_revision = read_usize(reader)?; let mtime = read_u64(reader)?; + let last_mtime = std::fs::metadata(path)? + .modified()? + .duration_since(std::time::UNIX_EPOCH) + .unwrap() + .as_secs(); let mut hash = [0u8; 20]; reader.read_exact(&mut hash)?; - if mtime != last_mtime && hash != get_hash(text)? { + if mtime != last_mtime && hash != get_hash(&mut std::fs::File::open(path)?)? { return Err(std::io::Error::new( std::io::ErrorKind::Other, "outdated undo file", @@ -172,7 +179,8 @@ impl History { } let revisions = read_vec(reader, |reader| Revision::deserialize(reader, timestamp))?; - Ok((last_saved_revision, History { current, revisions })) + let history = History { current, revisions }; + Ok((last_saved_revision, history)) } } @@ -741,11 +749,11 @@ mod test { }; history.commit_revision(&transaction, &state); - let text = Vec::new(); + let file = tempfile::NamedTempFile::new().unwrap(); history - .serialize(&mut buf, &mut text.as_slice(), 0, 0) + .serialize(&mut buf, &file.path().to_path_buf(), 0) .unwrap(); - History::deserialize(&mut buf.as_slice(), &mut text.as_slice(), 0).unwrap(); + History::deserialize(&mut buf.as_slice(), &file.path().to_path_buf()).unwrap(); true } ); diff --git a/helix-core/src/path.rs b/helix-core/src/path.rs index 471d0607df10..d6ee8402c40c 100644 --- a/helix-core/src/path.rs +++ b/helix-core/src/path.rs @@ -164,3 +164,14 @@ pub fn path_from_bytes(slice: &[u8]) -> Result { ::from_bytes(slice), )); } + +pub fn escape_path>(path: P) -> PathBuf { + let mut res = PathBuf::with_capacity(path.as_ref().as_os_str().len()); + for component in path.as_ref() { + let mut bytes = vec![b'%']; + bytes.append(&mut path_as_bytes(PathBuf::from(component))); + let s = path_from_bytes(&bytes).unwrap(); + res.push(s); + } + res +} diff --git a/helix-core/src/transaction.rs b/helix-core/src/transaction.rs index 97278727f8ea..274626d8e69d 100644 --- a/helix-core/src/transaction.rs +++ b/helix-core/src/transaction.rs @@ -431,8 +431,10 @@ pub fn serialize_transaction( write_vec(writer, selection.ranges(), |writer, range| { write_usize(writer, range.anchor)?; write_usize(writer, range.head)?; - write_option(writer, range.horiz.as_ref(), |writer, horiz| { - write_u32(writer, *horiz) + write_option(writer, range.old_visual_position.as_ref(), |writer, pos| { + write_u32(writer, pos.0)?; + write_u32(writer, pos.1)?; + Ok(()) })?; Ok(()) })?; @@ -476,11 +478,14 @@ pub fn deserialize_transaction(reader: &mut R) -> std::io::Result], - event: PromptEvent, -) -> anyhow::Result<()> { - use helix_view::workspace::undo::UndoIndex; - use helix_view::workspace::Workspace; - - if event != PromptEvent::Validate { - return Ok(()); - } - - let mut workspace = Workspace::new()?; - let mut index_file = workspace.get_mut(".index")?; - - // Create a merged list of key-value tuples from the saved index and the open buffers. - let index = { - let mut saved_files = UndoIndex::deserialize(&mut index_file) - .unwrap_or_default() - .0; - let mut last_id = saved_files.last().map(|(id, _)| *id + 1).unwrap_or(0); - let mut new_files = cx - .editor - .documents() - .filter_map(|doc| { - doc.path().filter(|path| { - !saved_files - .iter() - .any(|(_, indexed_path)| indexed_path == *path) - }) - }) - .map(|path| { - let id = last_id; - last_id += 1; - (id, path.clone()) - }) - .collect(); - saved_files.append(&mut new_files); - UndoIndex(saved_files) - }; - log::debug!("Saving undo index: {:?}", index); - - index - .serialize(&mut index_file) - .context("failed to save index")?; - - // Save the histories of open buffers. - for doc in cx.editor.documents_mut().filter(|doc| doc.path().is_some()) { - let path = doc.path().unwrap().clone(); - let last_saved_revision = doc.get_last_saved_revision(); - let history = doc.history.get_mut(); - let mtime = std::fs::metadata(path.clone())? - .modified()? - .duration_since(std::time::UNIX_EPOCH)? - .as_secs(); - - let mut file = workspace.get_mut(&index.find_id(&path).unwrap().to_string())?; - history - .serialize( - &mut file, - &mut std::fs::File::open(&path)?, - last_saved_revision, - mtime, - ) - .context(format!( - "failed to save history for {}", - path.to_string_lossy() - ))?; - log::debug!("Saved history for: {}", path.to_string_lossy()); - } - Ok(()) -} - -fn open_workspace( - cx: &mut compositor::Context, - _args: &[Cow], - event: PromptEvent, -) -> anyhow::Result<()> { - use helix_view::workspace::undo::UndoIndex; - use helix_view::workspace::Workspace; - - if event != PromptEvent::Validate { - return Ok(()); - } - - let mut workspace = Workspace::new()?; - let index = UndoIndex::deserialize(&mut workspace.get(".index")?) - .context("failed to load index")? - .0; - let scrolloff = cx.editor.config().scrolloff; - log::debug!("Loaded undo index: {:?}", index); - - // Open the documents in the index and load their histories. - for (index_id, path) in index { - if !path.exists() { - continue; - } - - // Close open buffers for the doc. - - let mut file = workspace.get(&index_id.to_string())?; - let last_mtime = std::fs::metadata(path.clone())? - .modified()? - .duration_since(std::time::UNIX_EPOCH)? - .as_secs(); - let (last_saved_revision, history) = match helix_core::history::History::deserialize( - &mut file, - &mut std::fs::File::open(&path)?, - last_mtime, - ) { - Ok(res) => res, - Err(e) => { - cx.editor.set_error(format!( - "failed to load undo file for {} because {e}", - path.to_string_lossy() - )); - continue; - } - }; - - let doc_id = cx - .editor - .documents() - .find_map(|doc| (doc.path() == Some(&path)).then(|| doc.id())); - if let Some(id) = doc_id { - buffer_close_by_ids_impl(cx, &[id], false)?; - } - let id = cx.editor.open(path.as_path(), Action::Load)?; - let doc = doc_mut!(cx.editor, &id); - // Jump to saved revision if the doc wasn't saved. - if history.current_revision() != last_saved_revision { - let view_id = doc - .selections() - .keys() - .next() - .copied() - .expect("No view_id available"); - let view = view_mut!(cx.editor, view_id); - doc.apply( - &history.changes_since(last_saved_revision).unwrap(), - view_id, - ); - view.ensure_cursor_in_view(doc, scrolloff); - } - doc.history.set(history); - doc.set_last_saved_revision(last_saved_revision); - log::debug!("Loaded history for: {}", path.to_string_lossy()); - } - Ok(()) -} +// fn save_workspace( +// cx: &mut compositor::Context, +// _args: &[Cow], +// event: PromptEvent, +// ) -> anyhow::Result<()> { +// use helix_view::workspace::undo::UndoIndex; +// use helix_view::workspace::Workspace; + +// if event != PromptEvent::Validate { +// return Ok(()); +// } + +// let mut workspace = Workspace::new()?; +// let mut index_file = workspace.get_mut(".index")?; + +// // Create a merged list of key-value tuples from the saved index and the open buffers. +// let index = { +// let mut saved_files = UndoIndex::deserialize(&mut index_file) +// .unwrap_or_default() +// .0; +// let mut last_id = saved_files.last().map(|(id, _)| *id + 1).unwrap_or(0); +// let mut new_files = cx +// .editor +// .documents() +// .filter_map(|doc| { +// doc.path().filter(|path| { +// !saved_files +// .iter() +// .any(|(_, indexed_path)| indexed_path == *path) +// }) +// }) +// .map(|path| { +// let id = last_id; +// last_id += 1; +// (id, path.clone()) +// }) +// .collect(); +// saved_files.append(&mut new_files); +// UndoIndex(saved_files) +// }; +// log::debug!("Saving undo index: {:?}", index); + +// index +// .serialize(&mut index_file) +// .context("failed to save index")?; + +// // Save the histories of open buffers. +// for doc in cx.editor.documents_mut().filter(|doc| doc.path().is_some()) { +// let path = doc.path().unwrap().clone(); +// let last_saved_revision = doc.get_last_saved_revision(); +// let history = doc.history.get_mut(); +// let mtime = std::fs::metadata(path.clone())? +// .modified()? +// .duration_since(std::time::UNIX_EPOCH)? +// .as_secs(); + +// let mut file = workspace.get_mut(&index.find_id(&path).unwrap().to_string())?; +// history +// .serialize( +// &mut file, +// &mut std::fs::File::open(&path)?, +// last_saved_revision, +// mtime, +// ) +// .context(format!( +// "failed to save history for {}", +// path.to_string_lossy() +// ))?; +// log::debug!("Saved history for: {}", path.to_string_lossy()); +// } +// Ok(()) +// } + +// fn open_workspace( +// cx: &mut compositor::Context, +// _args: &[Cow], +// event: PromptEvent, +// ) -> anyhow::Result<()> { +// use helix_view::workspace::undo::UndoIndex; +// use helix_view::workspace::Workspace; + +// if event != PromptEvent::Validate { +// return Ok(()); +// } + +// let mut workspace = Workspace::new()?; +// let index = UndoIndex::deserialize(&mut workspace.get(".index")?) +// .context("failed to load index")? +// .0; +// let scrolloff = cx.editor.config().scrolloff; +// log::debug!("Loaded undo index: {:?}", index); + +// // Open the documents in the index and load their histories. +// for (index_id, path) in index { +// if !path.exists() { +// continue; +// } + +// // Close open buffers for the doc. + +// let mut file = workspace.get(&index_id.to_string())?; +// let last_mtime = std::fs::metadata(path.clone())? +// .modified()? +// .duration_since(std::time::UNIX_EPOCH)? +// .as_secs(); +// let (last_saved_revision, history) = match helix_core::history::History::deserialize( +// &mut file, +// &mut std::fs::File::open(&path)?, +// last_mtime, +// ) { +// Ok(res) => res, +// Err(e) => { +// cx.editor.set_error(format!( +// "failed to load undo file for {} because {e}", +// path.to_string_lossy() +// )); +// continue; +// } +// }; + +// let doc_id = cx +// .editor +// .documents() +// .find_map(|doc| (doc.path() == Some(&path)).then(|| doc.id())); +// if let Some(id) = doc_id { +// buffer_close_by_ids_impl(cx, &[id], false)?; +// } +// let id = cx.editor.open(path.as_path(), Action::Load)?; +// let doc = doc_mut!(cx.editor, &id); +// // Jump to saved revision if the doc wasn't saved. +// if history.current_revision() != last_saved_revision { +// let view_id = doc +// .selections() +// .keys() +// .next() +// .copied() +// .expect("No view_id available"); +// let view = view_mut!(cx.editor, view_id); +// doc.apply( +// &history.changes_since(last_saved_revision).unwrap(), +// view_id, +// ); +// view.ensure_cursor_in_view(doc, scrolloff); +// } +// doc.history.set(history); +// doc.set_last_saved_revision(last_saved_revision); +// log::debug!("Loaded history for: {}", path.to_string_lossy()); +// } +// Ok(()) +// } pub const TYPABLE_COMMAND_LIST: &[TypableCommand] = &[ TypableCommand { @@ -2626,20 +2626,20 @@ pub const TYPABLE_COMMAND_LIST: &[TypableCommand] = &[ fun: run_shell_command, completer: Some(completers::filename), }, - TypableCommand { - name: "save-workspace", - aliases: &["sw"], - doc: "Save open document undo history", - fun: save_workspace, - completer: None, - }, - TypableCommand { - name: "open-workspace", - aliases: &["ow"], - doc: "Open document undo history, overriding open buffers.", - fun: open_workspace, - completer: None, - }, + // TypableCommand { + // name: "save-workspace", + // aliases: &["sw"], + // doc: "Save open document undo history", + // fun: save_workspace, + // completer: None, + // }, + // TypableCommand { + // name: "open-workspace", + // aliases: &["ow"], + // doc: "Open document undo history, overriding open buffers.", + // fun: open_workspace, + // completer: None, + // }, ]; pub static TYPABLE_COMMAND_MAP: Lazy> = diff --git a/helix-view/src/document.rs b/helix-view/src/document.rs index 7d6f87efa152..ba2d7cfff32b 100644 --- a/helix-view/src/document.rs +++ b/helix-view/src/document.rs @@ -32,6 +32,7 @@ use helix_core::{ }; use crate::editor::{Config, RedrawHandle}; +use crate::workspace::FileLock; use crate::{DocumentId, Editor, Theme, View, ViewId}; /// 8kB of buffer space for encoding and decoding `Rope`s. @@ -700,6 +701,37 @@ impl Document { Ok(()) } + pub fn undo_file(&self) -> Option { + self.path().map(|path| { + let undo_dir = helix_loader::cache_dir().join("undo"); + let escaped_path = helix_core::path::escape_path(path); + let res = undo_dir.join(escaped_path); + res + }) + } + + pub fn save_history(&mut self) -> anyhow::Result<()> { + if let Some(Ok(mut undo_file)) = self.undo_file().map(FileLock::exclusive) { + let last_saved_revision = self.get_last_saved_revision(); + let path = self.path().unwrap().clone(); + let history = self.history.get_mut(); + history.serialize(undo_file.get_mut()?, &path, last_saved_revision)?; + } + Ok(()) + } + + pub fn load_history(&mut self) -> anyhow::Result<()> { + if let Some(Ok(undo_file)) = self.undo_file().map(FileLock::shared) { + let (last_saved_revision, history) = helix_core::history::History::deserialize( + &mut undo_file.get()?, + self.path().unwrap(), + )?; + self.history.set(history); + self.set_last_saved_revision(last_saved_revision); + } + Ok(()) + } + /// Sets the [`Document`]'s encoding with the encoding correspondent to `label`. pub fn set_encoding(&mut self, label: &str) -> Result<(), Error> { self.encoding = encoding::Encoding::for_label(label.as_bytes()) diff --git a/helix-view/src/editor.rs b/helix-view/src/editor.rs index c209c3c3affe..8d40daf0794a 100644 --- a/helix-view/src/editor.rs +++ b/helix-view/src/editor.rs @@ -8,7 +8,7 @@ use crate::{ theme::{self, Theme}, tree::{self, Tree}, view::ViewPosition, - workspace::{undo::UndoIndex, Workspace}, + Align, Document, DocumentId, View, ViewId, }; use helix_vcs::DiffProviderRegistry; @@ -274,6 +274,7 @@ pub struct Config { /// Whether to color modes with different colors. Defaults to `false`. pub color_modes: bool, pub soft_wrap: SoftWrap, + pub persistent_undo: bool, } #[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)] @@ -764,6 +765,7 @@ impl Default for Config { indent_guides: IndentGuidesConfig::default(), color_modes: false, soft_wrap: SoftWrap::default(), + persistent_undo: true, } } } diff --git a/helix-view/src/workspace/mod.rs b/helix-view/src/workspace/mod.rs index 9ec543d84e68..f6f9596d854b 100644 --- a/helix-view/src/workspace/mod.rs +++ b/helix-view/src/workspace/mod.rs @@ -80,7 +80,17 @@ impl FileLock { Ok(Self { file, shared: true }) } - pub fn lock(&self) -> Result<()> { + pub fn get(&self) -> Result<&File> { + self.lock()?; + Ok(&self.file) + } + + pub fn get_mut(&mut self) -> Result<&mut File> { + self.lock()?; + Ok(&mut self.file) + } + + fn lock(&self) -> Result<()> { sys::lock(&self.file, self.shared) } From 103eb6813d9e1a11f9fd22613f24630768af68d2 Mon Sep 17 00:00:00 2001 From: Shafkath Shuhan Date: Tue, 7 Feb 2023 11:02:30 -0500 Subject: [PATCH 08/35] arc --- Cargo.lock | 1 + helix-core/Cargo.toml | 1 + helix-core/src/history.rs | 49 ++++++++++++++++++-------------- helix-core/src/parse.rs | 4 +-- helix-core/src/path.rs | 28 +++++++++++------- helix-term/src/commands/typed.rs | 6 +++- helix-view/src/document.rs | 37 +++++++++++++++++++----- helix-view/src/editor.rs | 3 ++ helix-view/src/workspace/mod.rs | 10 +++++-- helix-view/src/workspace/undo.rs | 4 +-- 10 files changed, 97 insertions(+), 46 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 474a863a7ed3..ab706fafcff2 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1137,6 +1137,7 @@ dependencies = [ "smartstring", "tempfile", "textwrap", + "tokio", "toml", "tree-sitter", "unicode-general-category", diff --git a/helix-core/Cargo.toml b/helix-core/Cargo.toml index d322ebffdf4c..dea20fd0eddb 100644 --- a/helix-core/Cargo.toml +++ b/helix-core/Cargo.toml @@ -18,6 +18,7 @@ integration = [] helix-loader = { version = "0.6", path = "../helix-loader" } ropey = { version = "1.6.0", default-features = false, features = ["simd"] } +tokio = { version = "1", default-features = false, features = ["io-util"], optional = true } smallvec = "1.10" smartstring = "1.0.1" unicode-segmentation = "1.10" diff --git a/helix-core/src/history.rs b/helix-core/src/history.rs index f4ca661dc100..4c6072fee573 100644 --- a/helix-core/src/history.rs +++ b/helix-core/src/history.rs @@ -2,9 +2,10 @@ use crate::parse::*; use crate::{Assoc, ChangeSet, Range, Rope, Selection, Transaction}; use once_cell::sync::Lazy; use regex::Regex; -use std::io::{Read, Seek, Write}; +use std::io::{Read, Write}; use std::num::NonZeroUsize; -use std::path::PathBuf; +use std::path::Path; +use std::sync::Arc; use std::time::{Duration, Instant}; #[derive(Debug, Clone)] pub struct State { @@ -49,7 +50,7 @@ pub struct State { /// delete, we also store an inversion of the transaction. /// /// Using time to navigate the history: -#[derive(Debug)] +#[derive(Clone, Debug)] pub struct History { revisions: Vec, current: usize, @@ -60,10 +61,10 @@ pub struct History { struct Revision { parent: usize, last_child: Option, - transaction: Transaction, + transaction: Arc, // We need an inversion for undos because delete transactions don't store // the deleted text. - inversion: Transaction, + inversion: Arc, timestamp: Instant, } @@ -74,8 +75,8 @@ impl Default for History { revisions: vec![Revision { parent: 0, last_child: None, - transaction: Transaction::from(ChangeSet::new(&Rope::new())), - inversion: Transaction::from(ChangeSet::new(&Rope::new())), + transaction: Arc::new(Transaction::from(ChangeSet::new(&Rope::new()))), + inversion: Arc::new(Transaction::from(ChangeSet::new(&Rope::new()))), timestamp: Instant::now(), }], current: 0, @@ -99,8 +100,8 @@ impl Revision { 0 => None, n => Some(unsafe { NonZeroUsize::new_unchecked(n) }), }; - let transaction = crate::transaction::deserialize_transaction(reader)?; - let inversion = crate::transaction::deserialize_transaction(reader)?; + let transaction = Arc::new(crate::transaction::deserialize_transaction(reader)?); + let inversion = Arc::new(crate::transaction::deserialize_transaction(reader)?); Ok(Revision { parent, last_child, @@ -130,10 +131,10 @@ fn get_hash(reader: &mut R) -> std::io::Result<[u8; 20]> { } impl History { - pub fn serialize( + pub fn serialize( &self, writer: &mut W, - path: &PathBuf, + path: &Path, last_saved_revision: usize, ) -> std::io::Result<()> { write_string(writer, HEADER_TAG)?; @@ -151,7 +152,7 @@ impl History { Ok(()) } - pub fn deserialize(reader: &mut R, path: &PathBuf) -> std::io::Result<(usize, Self)> { + pub fn deserialize(reader: &mut R, path: &Path) -> std::io::Result<(usize, Self)> { let header = read_string(reader)?; if HEADER_TAG != header { Err(std::io::Error::new( @@ -194,17 +195,19 @@ impl History { original: &State, timestamp: Instant, ) { - let inversion = transaction - .invert(&original.doc) - // Store the current cursor position - .with_selection(original.selection.clone()); + let inversion = Arc::new( + transaction + .invert(&original.doc) + // Store the current cursor position + .with_selection(original.selection.clone()), + ); let new_current = self.revisions.len(); self.revisions[self.current].last_child = NonZeroUsize::new(new_current); self.revisions.push(Revision { parent: self.current, last_child: None, - transaction: transaction.clone(), + transaction: Arc::new(transaction.clone()), inversion, timestamp, }); @@ -230,8 +233,10 @@ impl History { let up_txns = up .iter() .rev() - .map(|&n| self.revisions[n].inversion.clone()); - let down_txns = down.iter().map(|&n| self.revisions[n].transaction.clone()); + .map(|&n| self.revisions[n].inversion.as_ref().clone()); + let down_txns = down + .iter() + .map(|&n| self.revisions[n].transaction.as_ref().clone()); down_txns.chain(up_txns).reduce(|acc, tx| tx.compose(acc)) } @@ -317,11 +322,13 @@ impl History { let up = self.path_up(self.current, lca); let down = self.path_up(to, lca); self.current = to; - let up_txns = up.iter().map(|&n| self.revisions[n].inversion.clone()); + let up_txns = up + .iter() + .map(|&n| self.revisions[n].inversion.as_ref().clone()); let down_txns = down .iter() .rev() - .map(|&n| self.revisions[n].transaction.clone()); + .map(|&n| self.revisions[n].transaction.as_ref().clone()); up_txns.chain(down_txns).collect() } diff --git a/helix-core/src/parse.rs b/helix-core/src/parse.rs index 214fe6725378..42cf266d2190 100644 --- a/helix-core/src/parse.rs +++ b/helix-core/src/parse.rs @@ -60,8 +60,8 @@ pub fn write_option( pub fn read_byte(reader: &mut R) -> Result { match reader.bytes().next() { - Some(byte) => byte, - None => Err(Error::new(ErrorKind::Other, "end of file")), + Some(s) => s, + None => Err(Error::from(ErrorKind::UnexpectedEof)), } } diff --git a/helix-core/src/path.rs b/helix-core/src/path.rs index d6ee8402c40c..1cbfdcaa1121 100644 --- a/helix-core/src/path.rs +++ b/helix-core/src/path.rs @@ -145,14 +145,14 @@ pub fn get_truncated_path>(path: P) -> PathBuf { ret } -pub fn path_as_bytes>(path: P) -> Vec { +pub fn os_str_as_bytes>(path: P) -> Vec { let path = path.as_ref(); #[cfg(windows)] return path.to_str().unwrap().into(); #[cfg(unix)] - return std::os::unix::ffi::OsStrExt::as_bytes(path.as_os_str()).into(); + return std::os::unix::ffi::OsStrExt::as_bytes(path.into()).to_vec(); } pub fn path_from_bytes(slice: &[u8]) -> Result { @@ -165,13 +165,21 @@ pub fn path_from_bytes(slice: &[u8]) -> Result { )); } -pub fn escape_path>(path: P) -> PathBuf { - let mut res = PathBuf::with_capacity(path.as_ref().as_os_str().len()); - for component in path.as_ref() { - let mut bytes = vec![b'%']; - bytes.append(&mut path_as_bytes(PathBuf::from(component))); - let s = path_from_bytes(&bytes).unwrap(); - res.push(s); +pub fn is_sep_byte(b: u8) -> bool { + if cfg!(windows) { + b == b'/' || b == b'\\' + } else { + b == b'/' + } +} + +pub fn escape_path(path: &Path) -> PathBuf { + let s = path.as_os_str().to_os_string(); + let mut bytes = os_str_as_bytes(&s); + for b in bytes.iter_mut() { + if is_sep_byte(*b) { + *b = b'%'; + } } - res + path_from_bytes(&bytes).unwrap() } diff --git a/helix-term/src/commands/typed.rs b/helix-term/src/commands/typed.rs index 3172dea06fd5..c45bdd374881 100644 --- a/helix-term/src/commands/typed.rs +++ b/helix-term/src/commands/typed.rs @@ -1176,7 +1176,11 @@ fn reload( doc.reload(view, &cx.editor.diff_providers, redraw_handle) .map(|_| { view.ensure_cursor_in_view(doc, scrolloff); - }) + })?; + if let Err(_) = doc.load_history() { + cx.editor.set_error("failed to load history from disk"); + } + Ok(()) } fn reload_all( diff --git a/helix-view/src/document.rs b/helix-view/src/document.rs index ba2d7cfff32b..8a5091469e20 100644 --- a/helix-view/src/document.rs +++ b/helix-view/src/document.rs @@ -585,7 +585,17 @@ impl Document { let encoding = self.encoding; +<<<<<<< HEAD let last_saved_time = self.last_saved_time; +======= + let mut undo_file = self + .undo_file() + .ok_or(anyhow!("failed to acquire undo file lock")) + .map(FileLock::exclusive)??; + let history = self.history.get_mut().clone(); + let last_saved_revision = self.get_last_saved_revision(); + let save_history = self.config.load().persistent_undo; +>>>>>>> 974129e6 (wip) // We encode the file according to the `Document`'s encoding. let future = async move { @@ -614,6 +624,14 @@ impl Document { let mut file = File::create(&path).await?; to_writer(&mut file, encoding, &text).await?; + if save_history { + let path = path.clone(); + tokio::task::spawn_blocking(move || -> anyhow::Result<()> { + history.serialize(undo_file.get_mut()?, &path, last_saved_revision)?; + Ok(()) + }) + .await??; + } let event = DocumentSavedEvent { revision: current_rev, @@ -715,19 +733,24 @@ impl Document { let last_saved_revision = self.get_last_saved_revision(); let path = self.path().unwrap().clone(); let history = self.history.get_mut(); - history.serialize(undo_file.get_mut()?, &path, last_saved_revision)?; + let undo_file = undo_file.get_mut()?; + undo_file.set_len(0)?; + history.serialize(undo_file, &path, last_saved_revision)?; } Ok(()) } pub fn load_history(&mut self) -> anyhow::Result<()> { if let Some(Ok(undo_file)) = self.undo_file().map(FileLock::shared) { - let (last_saved_revision, history) = helix_core::history::History::deserialize( - &mut undo_file.get()?, - self.path().unwrap(), - )?; - self.history.set(history); - self.set_last_saved_revision(last_saved_revision); + let mut undo_file = undo_file.get()?; + if undo_file.metadata()?.len() != 0 { + let (last_saved_revision, history) = helix_core::history::History::deserialize( + &mut undo_file, + self.path().unwrap(), + )?; + self.history.set(history); + self.set_last_saved_revision(last_saved_revision); + } } Ok(()) } diff --git a/helix-view/src/editor.rs b/helix-view/src/editor.rs index 8d40daf0794a..d3a1fa0a0aea 100644 --- a/helix-view/src/editor.rs +++ b/helix-view/src/editor.rs @@ -1303,6 +1303,9 @@ impl Editor { Some(self.syn_loader.clone()), self.config.clone(), )?; + if let Err(_) = doc.load_history() { + self.set_error("failed to load history from disk"); + } if let Some(diff_base) = self.diff_providers.get_diff_base(&path) { doc.set_diff_base(diff_base, self.redraw_handle.clone()); diff --git a/helix-view/src/workspace/mod.rs b/helix-view/src/workspace/mod.rs index f6f9596d854b..797354188667 100644 --- a/helix-view/src/workspace/mod.rs +++ b/helix-view/src/workspace/mod.rs @@ -6,7 +6,7 @@ use std::{ }; use anyhow::{Context, Result}; -use helix_core::path::path_as_bytes; +use helix_core::path::os_str_as_bytes; use sha1_smol::Sha1; pub struct Workspace { @@ -18,7 +18,7 @@ impl Workspace { // TODO: Allow custom session names to be passed. pub fn new() -> Result { let path = std::env::current_dir()?; - let bytes = path_as_bytes(path); + let bytes = os_str_as_bytes(path); let hash = Sha1::from(bytes).digest().to_string(); let path = helix_loader::cache_dir().join("workspaces").join(hash); Ok(Self { path, lock: None }) @@ -100,7 +100,11 @@ impl FileLock { std::fs::DirBuilder::new().recursive(true).create(parent)?; } } - OpenOptions::new().write(true).create(true).open(path) + OpenOptions::new() + .read(true) + .write(true) + .create(true) + .open(path) } } diff --git a/helix-view/src/workspace/undo.rs b/helix-view/src/workspace/undo.rs index 44864bb3cfb5..52414a24949a 100644 --- a/helix-view/src/workspace/undo.rs +++ b/helix-view/src/workspace/undo.rs @@ -6,7 +6,7 @@ use std::{ use helix_core::{ parse::*, - path::{path_as_bytes, path_from_bytes}, + path::{os_str_as_bytes, path_from_bytes}, }; #[derive(Default, Debug)] @@ -16,7 +16,7 @@ impl UndoIndex { pub fn serialize(&self, writer: &mut W) -> Result<()> { write_vec(writer, &self.0, |writer, (id, path)| { write_usize(writer, *id)?; - write_vec(writer, &path_as_bytes(path), |writer, byte| { + write_vec(writer, &os_str_as_bytes(path), |writer, byte| { write_byte(writer, *byte) })?; Ok(()) From 8f34005f2a47c6ed78925982bd8214dafed4ca85 Mon Sep 17 00:00:00 2001 From: Shafkath Shuhan Date: Fri, 10 Feb 2023 22:36:16 -0500 Subject: [PATCH 09/35] change test --- helix-term/tests/test/commands.rs | 9 ++++++--- helix-view/src/document.rs | 3 --- 2 files changed, 6 insertions(+), 6 deletions(-) diff --git a/helix-term/tests/test/commands.rs b/helix-term/tests/test/commands.rs index c2033e7f58d8..8f8171f78de6 100644 --- a/helix-term/tests/test/commands.rs +++ b/helix-term/tests/test/commands.rs @@ -473,7 +473,7 @@ async fn test_character_info() -> anyhow::Result<()> { ); } -async fn test_workspace_serde() -> anyhow::Result<()> { +async fn test_persistent_undo() -> anyhow::Result<()> { let file = helpers::new_readonly_tempfile()?; let mut app = helpers::AppBuilder::new() .with_file(file.path(), None) @@ -481,10 +481,13 @@ async fn test_workspace_serde() -> anyhow::Result<()> { test_key_sequence( &mut app, - Some("ihello:sw:bc!:ow"), + Some(&format!( + "ihello:w:bc!:o {}", + file.path().to_string_lossy() + )), Some(&|app| { let mut docs: Vec<_> = app.editor.documents().collect(); - assert_eq!(2, docs.len()); + assert!(!app.editor.is_err()); }), false, ) diff --git a/helix-view/src/document.rs b/helix-view/src/document.rs index 8a5091469e20..5f289c73bd31 100644 --- a/helix-view/src/document.rs +++ b/helix-view/src/document.rs @@ -585,9 +585,7 @@ impl Document { let encoding = self.encoding; -<<<<<<< HEAD let last_saved_time = self.last_saved_time; -======= let mut undo_file = self .undo_file() .ok_or(anyhow!("failed to acquire undo file lock")) @@ -595,7 +593,6 @@ impl Document { let history = self.history.get_mut().clone(); let last_saved_revision = self.get_last_saved_revision(); let save_history = self.config.load().persistent_undo; ->>>>>>> 974129e6 (wip) // We encode the file according to the `Document`'s encoding. let future = async move { From 7a0179cd780c65f63e44bce69f46d5ab7c844789 Mon Sep 17 00:00:00 2001 From: Shafkath Shuhan Date: Fri, 10 Feb 2023 22:44:36 -0500 Subject: [PATCH 10/35] remove workspace mod --- Cargo.lock | 1 + helix-core/Cargo.toml | 3 + .../mod.rs => helix-core/src/flock.rs | 90 ++++--------------- helix-core/src/lib.rs | 1 + helix-view/src/document.rs | 2 +- helix-view/src/lib.rs | 1 - helix-view/src/workspace/undo.rs | 42 --------- 7 files changed, 24 insertions(+), 116 deletions(-) rename helix-view/src/workspace/mod.rs => helix-core/src/flock.rs (53%) delete mode 100644 helix-view/src/workspace/undo.rs diff --git a/Cargo.lock b/Cargo.lock index ab706fafcff2..5c8ce407f848 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1124,6 +1124,7 @@ dependencies = [ "hashbrown 0.13.2", "helix-loader", "imara-diff", + "libc", "log", "once_cell", "quickcheck", diff --git a/helix-core/Cargo.toml b/helix-core/Cargo.toml index dea20fd0eddb..c2950d268bc6 100644 --- a/helix-core/Cargo.toml +++ b/helix-core/Cargo.toml @@ -49,6 +49,9 @@ chrono = { version = "0.4", default-features = false, features = ["alloc", "std" etcetera = "0.4" textwrap = "0.16.0" +[target.'cfg(unix)'.dependencies] +libc = "0.2" + [dev-dependencies] quickcheck = { version = "1", default-features = false } tempfile = "3.3.0" diff --git a/helix-view/src/workspace/mod.rs b/helix-core/src/flock.rs similarity index 53% rename from helix-view/src/workspace/mod.rs rename to helix-core/src/flock.rs index 797354188667..1e68d064b74f 100644 --- a/helix-view/src/workspace/mod.rs +++ b/helix-core/src/flock.rs @@ -1,65 +1,6 @@ -pub mod undo; - -use std::{ - fs::{File, OpenOptions}, - path::PathBuf, -}; - -use anyhow::{Context, Result}; -use helix_core::path::os_str_as_bytes; -use sha1_smol::Sha1; - -pub struct Workspace { - path: PathBuf, - lock: Option, -} - -impl Workspace { - // TODO: Allow custom session names to be passed. - pub fn new() -> Result { - let path = std::env::current_dir()?; - let bytes = os_str_as_bytes(path); - let hash = Sha1::from(bytes).digest().to_string(); - let path = helix_loader::cache_dir().join("workspaces").join(hash); - Ok(Self { path, lock: None }) - } - - pub fn path(&self) -> PathBuf { - self.path.clone() - } - - pub fn get(&mut self, path: &str) -> Result { - if self.lock.is_none() { - let lock = FileLock::shared(self.path.join(".helix.lock"))?; - lock.lock()?; - - self.lock = Some(lock); - } - let path = self.path.join(path); - - OpenOptions::new() - .read(true) - .open(path) - .context("failed to open file") - } - - pub fn get_mut(&mut self, path: &str) -> Result { - if self.lock.is_none() { - let lock = FileLock::exclusive(self.path.join(".helix.lock"))?; - lock.lock()?; - - self.lock = Some(lock); - } - let path = self.path.join(path); - - OpenOptions::new() - .read(true) - .write(true) - .create(true) - .open(path) - .context("failed to open file") - } -} +use std::fs::{File, OpenOptions}; +use std::io::Result; +use std::path::Path; pub struct FileLock { file: File, @@ -67,7 +8,7 @@ pub struct FileLock { } impl FileLock { - pub fn exclusive(path: PathBuf) -> Result { + pub fn exclusive>(path: P) -> Result { let file = Self::open_lock(path)?; Ok(Self { file, @@ -75,7 +16,7 @@ impl FileLock { }) } - pub fn shared(path: PathBuf) -> Result { + pub fn shared>(path: P) -> Result { let file = Self::open_lock(path)?; Ok(Self { file, shared: true }) } @@ -94,8 +35,8 @@ impl FileLock { sys::lock(&self.file, self.shared) } - fn open_lock(path: PathBuf) -> std::io::Result { - if let Some(parent) = path.parent() { + fn open_lock>(path: P) -> Result { + if let Some(parent) = path.as_ref().parent() { if !parent.exists() { std::fs::DirBuilder::new().recursive(true).create(parent)?; } @@ -117,9 +58,9 @@ impl Drop for FileLock { // `sys` impls from https://github.com/rust-lang/cargo/blob/fc2242a8c5606be36aecfd61dd464422271dad9d/src/cargo/util/flock.rs #[cfg(unix)] mod sys { - use anyhow::Result; use std::fs::File; use std::io::Error; + use std::io::Result; use std::os::unix::io::AsRawFd; pub(super) fn unlock(file: &File) -> Result<()> { @@ -134,7 +75,7 @@ mod sys { fn flock(file: &File, flag: libc::c_int) -> Result<()> { let ret = unsafe { libc::flock(file.as_raw_fd(), flag) }; if ret < 0 { - anyhow::bail!(Error::last_os_error()) + Err(Error::last_os_error()) } else { Ok(()) } @@ -143,7 +84,12 @@ mod sys { #[cfg(windows)] mod sys { - use std::{fs::File, io::Error, os::windows::prelude::AsRawHandle, path::Path}; + use std::{ + fs::File, + io::{Error, Result}, + os::windows::prelude::AsRawHandle, + path::Path, + }; use winapi::um::{ fileapi::{LockFileEx, UnlockFile}, @@ -151,20 +97,20 @@ mod sys { }; /// Blocks until the lock is acquired. - pub(super) fn lock(file: &File, shared: bool) -> anyhow::Result<()> { + pub(super) fn lock(file: &File, shared: bool) -> Result<()> { let flag = if shared { 0 } else { LOCKFILE_EXCLUSIVE_LOCK }; unsafe { let mut overlapped = std::mem::zeroed(); let ret = LockFileEx(file.as_raw_handle(), flag, 0, !0, !0, &mut overlapped); if ret == 0 { - anyhow::bail!(Error::last_os_error()) + Err(Error::last_os_error()) } else { Ok(()) } } } - pub(super) fn unlock(file: &File) -> std::io::Result<()> { + pub(super) fn unlock(file: &File) -> Result<()> { unsafe { let ret = UnlockFile(file.as_raw_handle(), 0, 0, !0, !0); if ret == 0 { diff --git a/helix-core/src/lib.rs b/helix-core/src/lib.rs index 4174e88cdc74..85b36cb44623 100644 --- a/helix-core/src/lib.rs +++ b/helix-core/src/lib.rs @@ -7,6 +7,7 @@ pub mod config; pub mod diagnostic; pub mod diff; pub mod doc_formatter; +pub mod flock; pub mod graphemes; pub mod history; pub mod increment; diff --git a/helix-view/src/document.rs b/helix-view/src/document.rs index 5f289c73bd31..1ab828aad5cf 100644 --- a/helix-view/src/document.rs +++ b/helix-view/src/document.rs @@ -4,6 +4,7 @@ use futures_util::future::BoxFuture; use futures_util::FutureExt; use helix_core::auto_pairs::AutoPairs; use helix_core::doc_formatter::TextFormat; +use helix_core::flock::FileLock; use helix_core::syntax::Highlight; use helix_core::text_annotations::TextAnnotations; use helix_core::Range; @@ -32,7 +33,6 @@ use helix_core::{ }; use crate::editor::{Config, RedrawHandle}; -use crate::workspace::FileLock; use crate::{DocumentId, Editor, Theme, View, ViewId}; /// 8kB of buffer space for encoding and decoding `Rope`s. diff --git a/helix-view/src/lib.rs b/helix-view/src/lib.rs index 76cf9c39b961..c3f67345b361 100644 --- a/helix-view/src/lib.rs +++ b/helix-view/src/lib.rs @@ -7,7 +7,6 @@ pub mod editor; pub mod env; pub mod graphics; pub mod gutter; -pub mod workspace; pub mod handlers { pub mod dap; pub mod lsp; diff --git a/helix-view/src/workspace/undo.rs b/helix-view/src/workspace/undo.rs deleted file mode 100644 index 52414a24949a..000000000000 --- a/helix-view/src/workspace/undo.rs +++ /dev/null @@ -1,42 +0,0 @@ -use anyhow::Result; -use std::{ - io::{Error, ErrorKind, Read, Write}, - path::PathBuf, -}; - -use helix_core::{ - parse::*, - path::{os_str_as_bytes, path_from_bytes}, -}; - -#[derive(Default, Debug)] -pub struct UndoIndex(pub Vec<(usize, PathBuf)>); - -impl UndoIndex { - pub fn serialize(&self, writer: &mut W) -> Result<()> { - write_vec(writer, &self.0, |writer, (id, path)| { - write_usize(writer, *id)?; - write_vec(writer, &os_str_as_bytes(path), |writer, byte| { - write_byte(writer, *byte) - })?; - Ok(()) - })?; - Ok(()) - } - - pub fn deserialize(reader: &mut R) -> Result { - let res = read_vec(reader, |reader| { - let id = read_usize(reader)?; - let path = path_from_bytes(&read_vec(reader, read_byte)?) - .map_err(|e| Error::new(ErrorKind::InvalidData, e))?; - Ok((id, path)) - })?; - Ok(Self(res)) - } - - pub fn find_id(&self, path: &PathBuf) -> Option { - self.0 - .iter() - .find_map(|(id, index_path)| (index_path == path).then(|| *id)) - } -} From 2854dae2d754f9962673ca31515101b28e74053b Mon Sep 17 00:00:00 2001 From: Shafkath Shuhan Date: Fri, 10 Feb 2023 22:45:56 -0500 Subject: [PATCH 11/35] remove typed commands --- helix-term/src/commands/typed.rs | 165 ------------------------------- 1 file changed, 165 deletions(-) diff --git a/helix-term/src/commands/typed.rs b/helix-term/src/commands/typed.rs index c45bdd374881..a7a5d5847bf3 100644 --- a/helix-term/src/commands/typed.rs +++ b/helix-term/src/commands/typed.rs @@ -1957,157 +1957,6 @@ fn run_shell_command( Ok(()) } -// fn save_workspace( -// cx: &mut compositor::Context, -// _args: &[Cow], -// event: PromptEvent, -// ) -> anyhow::Result<()> { -// use helix_view::workspace::undo::UndoIndex; -// use helix_view::workspace::Workspace; - -// if event != PromptEvent::Validate { -// return Ok(()); -// } - -// let mut workspace = Workspace::new()?; -// let mut index_file = workspace.get_mut(".index")?; - -// // Create a merged list of key-value tuples from the saved index and the open buffers. -// let index = { -// let mut saved_files = UndoIndex::deserialize(&mut index_file) -// .unwrap_or_default() -// .0; -// let mut last_id = saved_files.last().map(|(id, _)| *id + 1).unwrap_or(0); -// let mut new_files = cx -// .editor -// .documents() -// .filter_map(|doc| { -// doc.path().filter(|path| { -// !saved_files -// .iter() -// .any(|(_, indexed_path)| indexed_path == *path) -// }) -// }) -// .map(|path| { -// let id = last_id; -// last_id += 1; -// (id, path.clone()) -// }) -// .collect(); -// saved_files.append(&mut new_files); -// UndoIndex(saved_files) -// }; -// log::debug!("Saving undo index: {:?}", index); - -// index -// .serialize(&mut index_file) -// .context("failed to save index")?; - -// // Save the histories of open buffers. -// for doc in cx.editor.documents_mut().filter(|doc| doc.path().is_some()) { -// let path = doc.path().unwrap().clone(); -// let last_saved_revision = doc.get_last_saved_revision(); -// let history = doc.history.get_mut(); -// let mtime = std::fs::metadata(path.clone())? -// .modified()? -// .duration_since(std::time::UNIX_EPOCH)? -// .as_secs(); - -// let mut file = workspace.get_mut(&index.find_id(&path).unwrap().to_string())?; -// history -// .serialize( -// &mut file, -// &mut std::fs::File::open(&path)?, -// last_saved_revision, -// mtime, -// ) -// .context(format!( -// "failed to save history for {}", -// path.to_string_lossy() -// ))?; -// log::debug!("Saved history for: {}", path.to_string_lossy()); -// } -// Ok(()) -// } - -// fn open_workspace( -// cx: &mut compositor::Context, -// _args: &[Cow], -// event: PromptEvent, -// ) -> anyhow::Result<()> { -// use helix_view::workspace::undo::UndoIndex; -// use helix_view::workspace::Workspace; - -// if event != PromptEvent::Validate { -// return Ok(()); -// } - -// let mut workspace = Workspace::new()?; -// let index = UndoIndex::deserialize(&mut workspace.get(".index")?) -// .context("failed to load index")? -// .0; -// let scrolloff = cx.editor.config().scrolloff; -// log::debug!("Loaded undo index: {:?}", index); - -// // Open the documents in the index and load their histories. -// for (index_id, path) in index { -// if !path.exists() { -// continue; -// } - -// // Close open buffers for the doc. - -// let mut file = workspace.get(&index_id.to_string())?; -// let last_mtime = std::fs::metadata(path.clone())? -// .modified()? -// .duration_since(std::time::UNIX_EPOCH)? -// .as_secs(); -// let (last_saved_revision, history) = match helix_core::history::History::deserialize( -// &mut file, -// &mut std::fs::File::open(&path)?, -// last_mtime, -// ) { -// Ok(res) => res, -// Err(e) => { -// cx.editor.set_error(format!( -// "failed to load undo file for {} because {e}", -// path.to_string_lossy() -// )); -// continue; -// } -// }; - -// let doc_id = cx -// .editor -// .documents() -// .find_map(|doc| (doc.path() == Some(&path)).then(|| doc.id())); -// if let Some(id) = doc_id { -// buffer_close_by_ids_impl(cx, &[id], false)?; -// } -// let id = cx.editor.open(path.as_path(), Action::Load)?; -// let doc = doc_mut!(cx.editor, &id); -// // Jump to saved revision if the doc wasn't saved. -// if history.current_revision() != last_saved_revision { -// let view_id = doc -// .selections() -// .keys() -// .next() -// .copied() -// .expect("No view_id available"); -// let view = view_mut!(cx.editor, view_id); -// doc.apply( -// &history.changes_since(last_saved_revision).unwrap(), -// view_id, -// ); -// view.ensure_cursor_in_view(doc, scrolloff); -// } -// doc.history.set(history); -// doc.set_last_saved_revision(last_saved_revision); -// log::debug!("Loaded history for: {}", path.to_string_lossy()); -// } -// Ok(()) -// } - pub const TYPABLE_COMMAND_LIST: &[TypableCommand] = &[ TypableCommand { name: "quit", @@ -2630,20 +2479,6 @@ pub const TYPABLE_COMMAND_LIST: &[TypableCommand] = &[ fun: run_shell_command, completer: Some(completers::filename), }, - // TypableCommand { - // name: "save-workspace", - // aliases: &["sw"], - // doc: "Save open document undo history", - // fun: save_workspace, - // completer: None, - // }, - // TypableCommand { - // name: "open-workspace", - // aliases: &["ow"], - // doc: "Open document undo history, overriding open buffers.", - // fun: open_workspace, - // completer: None, - // }, ]; pub static TYPABLE_COMMAND_MAP: Lazy> = From 9a3aa57062c84af4bc07ce9eacaf8615d60d2304 Mon Sep 17 00:00:00 2001 From: Shafkath Shuhan Date: Fri, 10 Feb 2023 22:52:23 -0500 Subject: [PATCH 12/35] remove command refs --- book/src/generated/typable-cmd.md | 2 -- helix-core/Cargo.toml | 1 - 2 files changed, 3 deletions(-) diff --git a/book/src/generated/typable-cmd.md b/book/src/generated/typable-cmd.md index c47d7db80333..0ff501a33fdb 100644 --- a/book/src/generated/typable-cmd.md +++ b/book/src/generated/typable-cmd.md @@ -74,5 +74,3 @@ | `:pipe` | Pipe each selection to the shell command. | | `:pipe-to` | Pipe each selection to the shell command, ignoring output. | | `:run-shell-command`, `:sh` | Run a shell command | -| `:save-workspace`, `:sw` | Save open document undo history | -| `:open-workspace`, `:ow` | Open document undo history, overriding open buffers. | diff --git a/helix-core/Cargo.toml b/helix-core/Cargo.toml index c2950d268bc6..d796e4026389 100644 --- a/helix-core/Cargo.toml +++ b/helix-core/Cargo.toml @@ -18,7 +18,6 @@ integration = [] helix-loader = { version = "0.6", path = "../helix-loader" } ropey = { version = "1.6.0", default-features = false, features = ["simd"] } -tokio = { version = "1", default-features = false, features = ["io-util"], optional = true } smallvec = "1.10" smartstring = "1.0.1" unicode-segmentation = "1.10" From 860c3e9606527c8f01bb20b07f2a65fd522e0aef Mon Sep 17 00:00:00 2001 From: Shafkath Shuhan Date: Fri, 10 Feb 2023 23:01:34 -0500 Subject: [PATCH 13/35] clippy --- Cargo.lock | 2 +- helix-core/Cargo.toml | 2 ++ helix-core/src/history.rs | 6 ++---- helix-core/src/path.rs | 2 +- helix-term/src/commands/typed.rs | 5 +++-- helix-term/tests/test/commands.rs | 9 ++++++--- helix-view/src/document.rs | 15 +++++++-------- helix-view/src/editor.rs | 6 +++--- 8 files changed, 25 insertions(+), 22 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 5c8ce407f848..0c9d26bdfcfd 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1138,12 +1138,12 @@ dependencies = [ "smartstring", "tempfile", "textwrap", - "tokio", "toml", "tree-sitter", "unicode-general-category", "unicode-segmentation", "unicode-width", + "winapi", ] [[package]] diff --git a/helix-core/Cargo.toml b/helix-core/Cargo.toml index d796e4026389..bd140e967f3e 100644 --- a/helix-core/Cargo.toml +++ b/helix-core/Cargo.toml @@ -32,6 +32,8 @@ regex = "1" bitflags = "1.3" ahash = "0.8.3" hashbrown = { version = "0.13.2", features = ["raw"] } + +winapi = "0.3" sha1_smol = "1.0" log = "0.4" diff --git a/helix-core/src/history.rs b/helix-core/src/history.rs index 4c6072fee573..1518907a1dae 100644 --- a/helix-core/src/history.rs +++ b/helix-core/src/history.rs @@ -757,10 +757,8 @@ mod test { history.commit_revision(&transaction, &state); let file = tempfile::NamedTempFile::new().unwrap(); - history - .serialize(&mut buf, &file.path().to_path_buf(), 0) - .unwrap(); - History::deserialize(&mut buf.as_slice(), &file.path().to_path_buf()).unwrap(); + history.serialize(&mut buf, file.path(), 0).unwrap(); + History::deserialize(&mut buf.as_slice(), file.path()).unwrap(); true } ); diff --git a/helix-core/src/path.rs b/helix-core/src/path.rs index 1cbfdcaa1121..a21cc69335b3 100644 --- a/helix-core/src/path.rs +++ b/helix-core/src/path.rs @@ -152,7 +152,7 @@ pub fn os_str_as_bytes>(path: P) -> Vec { return path.to_str().unwrap().into(); #[cfg(unix)] - return std::os::unix::ffi::OsStrExt::as_bytes(path.into()).to_vec(); + return std::os::unix::ffi::OsStrExt::as_bytes(path).to_vec(); } pub fn path_from_bytes(slice: &[u8]) -> Result { diff --git a/helix-term/src/commands/typed.rs b/helix-term/src/commands/typed.rs index a7a5d5847bf3..cfc4ecd70f7f 100644 --- a/helix-term/src/commands/typed.rs +++ b/helix-term/src/commands/typed.rs @@ -1177,8 +1177,9 @@ fn reload( .map(|_| { view.ensure_cursor_in_view(doc, scrolloff); })?; - if let Err(_) = doc.load_history() { - cx.editor.set_error("failed to load history from disk"); + if let Err(e) = doc.load_history() { + cx.editor + .set_error(Cow::Owned(format!("failed to load history from disk: {e}"))); } Ok(()) } diff --git a/helix-term/tests/test/commands.rs b/helix-term/tests/test/commands.rs index 8f8171f78de6..247bb1d790c1 100644 --- a/helix-term/tests/test/commands.rs +++ b/helix-term/tests/test/commands.rs @@ -470,11 +470,15 @@ async fn test_character_info() -> anyhow::Result<()> { Some(&|app| { assert_eq!(r#""h" Dec 104 Hex 68"#, app.editor.get_status().unwrap().0); }), - ); + false, + ) + .await?; + Ok(()) } +#[tokio::test(flavor = "multi_thread")] async fn test_persistent_undo() -> anyhow::Result<()> { - let file = helpers::new_readonly_tempfile()?; + let file = tempfile::NamedTempFile::new()?; let mut app = helpers::AppBuilder::new() .with_file(file.path(), None) .build()?; @@ -486,7 +490,6 @@ async fn test_persistent_undo() -> anyhow::Result<()> { file.path().to_string_lossy() )), Some(&|app| { - let mut docs: Vec<_> = app.editor.documents().collect(); assert!(!app.editor.is_err()); }), false, diff --git a/helix-view/src/document.rs b/helix-view/src/document.rs index 1ab828aad5cf..87f46a7c3482 100644 --- a/helix-view/src/document.rs +++ b/helix-view/src/document.rs @@ -587,8 +587,8 @@ impl Document { let last_saved_time = self.last_saved_time; let mut undo_file = self - .undo_file() - .ok_or(anyhow!("failed to acquire undo file lock")) + .undo_file(Some(&path)) + .ok_or_else(|| anyhow!("failed to acquire undo file lock")) .map(FileLock::exclusive)??; let history = self.history.get_mut().clone(); let last_saved_revision = self.get_last_saved_revision(); @@ -716,17 +716,16 @@ impl Document { Ok(()) } - pub fn undo_file(&self) -> Option { - self.path().map(|path| { + pub fn undo_file(&self, path: Option<&PathBuf>) -> Option { + self.path().or(path).map(|path| { let undo_dir = helix_loader::cache_dir().join("undo"); let escaped_path = helix_core::path::escape_path(path); - let res = undo_dir.join(escaped_path); - res + undo_dir.join(escaped_path) }) } pub fn save_history(&mut self) -> anyhow::Result<()> { - if let Some(Ok(mut undo_file)) = self.undo_file().map(FileLock::exclusive) { + if let Some(Ok(mut undo_file)) = self.undo_file(None).map(FileLock::exclusive) { let last_saved_revision = self.get_last_saved_revision(); let path = self.path().unwrap().clone(); let history = self.history.get_mut(); @@ -738,7 +737,7 @@ impl Document { } pub fn load_history(&mut self) -> anyhow::Result<()> { - if let Some(Ok(undo_file)) = self.undo_file().map(FileLock::shared) { + if let Some(Ok(undo_file)) = self.undo_file(None).map(FileLock::shared) { let mut undo_file = undo_file.get()?; if undo_file.metadata()?.len() != 0 { let (last_saved_revision, history) = helix_core::history::History::deserialize( diff --git a/helix-view/src/editor.rs b/helix-view/src/editor.rs index d3a1fa0a0aea..f23a1c51d837 100644 --- a/helix-view/src/editor.rs +++ b/helix-view/src/editor.rs @@ -765,7 +765,7 @@ impl Default for Config { indent_guides: IndentGuidesConfig::default(), color_modes: false, soft_wrap: SoftWrap::default(), - persistent_undo: true, + persistent_undo: false, } } } @@ -1303,8 +1303,8 @@ impl Editor { Some(self.syn_loader.clone()), self.config.clone(), )?; - if let Err(_) = doc.load_history() { - self.set_error("failed to load history from disk"); + if let Err(e) = doc.load_history() { + self.set_error(Cow::Owned(format!("failed to load history from disk: {e}"))); } if let Some(diff_base) = self.diff_providers.get_diff_base(&path) { From e61256f59a9c7e8cae2000fa857dbb10b9a2c5ba Mon Sep 17 00:00:00 2001 From: Shafkath Shuhan Date: Sat, 11 Feb 2023 11:04:05 -0500 Subject: [PATCH 14/35] cleanup --- Cargo.lock | 3 - helix-core/src/history.rs | 9 +- helix-core/src/transaction.rs | 189 ++++++++++++++---------------- helix-term/src/application.rs | 8 +- helix-term/tests/test/commands.rs | 3 + helix-term/tests/test/write.rs | 5 +- helix-view/Cargo.toml | 4 - helix-view/src/document.rs | 63 +++++----- 8 files changed, 142 insertions(+), 142 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 0c9d26bdfcfd..1d9047c7a375 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1283,15 +1283,12 @@ dependencies = [ "once_cell", "serde", "serde_json", - "sha1_smol", "slotmap", "tokio", "tokio-stream", "toml", "url", - "walkdir", "which", - "winapi", ] [[package]] diff --git a/helix-core/src/history.rs b/helix-core/src/history.rs index 1518907a1dae..77c8c566210a 100644 --- a/helix-core/src/history.rs +++ b/helix-core/src/history.rs @@ -7,6 +7,7 @@ use std::num::NonZeroUsize; use std::path::Path; use std::sync::Arc; use std::time::{Duration, Instant}; + #[derive(Debug, Clone)] pub struct State { pub doc: Rope, @@ -88,8 +89,8 @@ impl Revision { fn serialize(&self, writer: &mut W) -> std::io::Result<()> { write_usize(writer, self.parent)?; write_usize(writer, self.last_child.map(|n| n.get()).unwrap_or(0))?; - crate::transaction::serialize_transaction(writer, &self.transaction)?; - crate::transaction::serialize_transaction(writer, &self.inversion)?; + self.transaction.serialize(writer)?; + self.inversion.serialize(writer)?; Ok(()) } @@ -100,8 +101,8 @@ impl Revision { 0 => None, n => Some(unsafe { NonZeroUsize::new_unchecked(n) }), }; - let transaction = Arc::new(crate::transaction::deserialize_transaction(reader)?); - let inversion = Arc::new(crate::transaction::deserialize_transaction(reader)?); + let transaction = Arc::new(Transaction::deserialize(reader)?); + let inversion = Arc::new(Transaction::deserialize(reader)?); Ok(Revision { parent, last_child, diff --git a/helix-core/src/transaction.rs b/helix-core/src/transaction.rs index 274626d8e69d..9a539752a8ac 100644 --- a/helix-core/src/transaction.rs +++ b/helix-core/src/transaction.rs @@ -419,106 +419,6 @@ pub struct Transaction { selection: Option, } -pub fn serialize_transaction( - writer: &mut W, - transaction: &Transaction, -) -> std::io::Result<()> { - write_option( - writer, - transaction.selection.as_ref(), - |writer, selection| { - write_usize(writer, selection.primary_index)?; - write_vec(writer, selection.ranges(), |writer, range| { - write_usize(writer, range.anchor)?; - write_usize(writer, range.head)?; - write_option(writer, range.old_visual_position.as_ref(), |writer, pos| { - write_u32(writer, pos.0)?; - write_u32(writer, pos.1)?; - Ok(()) - })?; - Ok(()) - })?; - - Ok(()) - }, - )?; - - write_usize(writer, transaction.changes.len)?; - write_usize(writer, transaction.changes.len_after)?; - write_vec( - writer, - transaction.changes.changes(), - |writer, operation| { - let variant = match operation { - Operation::Retain(_) => 0, - Operation::Delete(_) => 1, - Operation::Insert(_) => 2, - }; - write_byte(writer, variant)?; - match operation { - Operation::Retain(n) | Operation::Delete(n) => { - write_usize(writer, *n)?; - } - - Operation::Insert(tendril) => { - write_string(writer, tendril.as_str())?; - } - } - - Ok(()) - }, - )?; - - Ok(()) -} - -pub fn deserialize_transaction(reader: &mut R) -> std::io::Result { - let selection = read_option(reader, |reader| { - let primary_index = read_usize(reader)?; - let ranges = read_vec(reader, |reader| { - let anchor = read_usize(reader)?; - let head = read_usize(reader)?; - let old_visual_position = read_option(reader, |reader| { - let res = (read_u32(reader)?, read_u32(reader)?); - Ok(res) - })?; - Ok(Range { - anchor, - head, - old_visual_position, - }) - })?; - Ok(Selection { - ranges: ranges.into(), - primary_index, - }) - })?; - - let len = read_usize(reader)?; - let len_after = read_usize(reader)?; - let changes = read_vec(reader, |reader| { - let res = match read_byte(reader)? { - 0 => Operation::Retain(read_usize(reader)?), - 1 => Operation::Delete(read_usize(reader)?), - 2 => Operation::Insert(read_string(reader)?.into()), - _ => { - return Err(std::io::Error::new( - std::io::ErrorKind::Other, - "invalid variant", - )) - } - }; - Ok(res) - })?; - let changes = ChangeSet { - changes, - len, - len_after, - }; - - Ok(Transaction { changes, selection }) -} - impl Transaction { /// Create a new, empty transaction. pub fn new(doc: &Rope) -> Self { @@ -627,6 +527,95 @@ impl Transaction { pub fn changes_iter(&self) -> ChangeIterator { self.changes.changes_iter() } + + pub fn serialize(&self, writer: &mut W) -> std::io::Result<()> { + write_option(writer, self.selection.as_ref(), |writer, selection| { + write_usize(writer, selection.primary_index)?; + write_vec(writer, selection.ranges(), |writer, range| { + write_usize(writer, range.anchor)?; + write_usize(writer, range.head)?; + write_option(writer, range.old_visual_position.as_ref(), |writer, pos| { + write_u32(writer, pos.0)?; + write_u32(writer, pos.1)?; + Ok(()) + })?; + Ok(()) + })?; + + Ok(()) + })?; + + write_usize(writer, self.changes.len)?; + write_usize(writer, self.changes.len_after)?; + write_vec(writer, self.changes.changes(), |writer, operation| { + let variant = match operation { + Operation::Retain(_) => 0, + Operation::Delete(_) => 1, + Operation::Insert(_) => 2, + }; + write_byte(writer, variant)?; + match operation { + Operation::Retain(n) | Operation::Delete(n) => { + write_usize(writer, *n)?; + } + + Operation::Insert(tendril) => { + write_string(writer, tendril.as_str())?; + } + } + + Ok(()) + })?; + + Ok(()) + } + + pub fn deserialize(reader: &mut R) -> std::io::Result { + let selection = read_option(reader, |reader| { + let primary_index = read_usize(reader)?; + let ranges = read_vec(reader, |reader| { + let anchor = read_usize(reader)?; + let head = read_usize(reader)?; + let old_visual_position = read_option(reader, |reader| { + let res = (read_u32(reader)?, read_u32(reader)?); + Ok(res) + })?; + Ok(Range { + anchor, + head, + old_visual_position, + }) + })?; + Ok(Selection { + ranges: ranges.into(), + primary_index, + }) + })?; + + let len = read_usize(reader)?; + let len_after = read_usize(reader)?; + let changes = read_vec(reader, |reader| { + let res = match read_byte(reader)? { + 0 => Operation::Retain(read_usize(reader)?), + 1 => Operation::Delete(read_usize(reader)?), + 2 => Operation::Insert(read_string(reader)?.into()), + _ => { + return Err(std::io::Error::new( + std::io::ErrorKind::Other, + "invalid variant", + )) + } + }; + Ok(res) + })?; + let changes = ChangeSet { + changes, + len, + len_after, + }; + + Ok(Transaction { changes, selection }) + } } impl From for Transaction { diff --git a/helix-term/src/application.rs b/helix-term/src/application.rs index a1685fcfa956..f7964162127b 100644 --- a/helix-term/src/application.rs +++ b/helix-term/src/application.rs @@ -506,7 +506,7 @@ impl Application { } } - pub fn handle_document_write(&mut self, doc_save_event: DocumentSavedEventResult) { + pub async fn handle_document_write(&mut self, doc_save_event: DocumentSavedEventResult) { let doc_save_event = match doc_save_event { Ok(event) => event, Err(err) => { @@ -515,6 +515,10 @@ impl Application { } }; + if doc_save_event.serialize_error { + self.editor.set_error("failed to serialize history"); + } + let doc = match self.editor.document_mut(doc_save_event.doc_id) { None => { warn!( @@ -574,7 +578,7 @@ impl Application { match event { EditorEvent::DocumentSaved(event) => { - self.handle_document_write(event); + self.handle_document_write(event).await; self.render().await; } EditorEvent::ConfigEvent(event) => { diff --git a/helix-term/tests/test/commands.rs b/helix-term/tests/test/commands.rs index 247bb1d790c1..44f6978a893e 100644 --- a/helix-term/tests/test/commands.rs +++ b/helix-term/tests/test/commands.rs @@ -479,7 +479,10 @@ async fn test_character_info() -> anyhow::Result<()> { #[tokio::test(flavor = "multi_thread")] async fn test_persistent_undo() -> anyhow::Result<()> { let file = tempfile::NamedTempFile::new()?; + let mut config = Config::default(); + config.editor.persistent_undo = true; let mut app = helpers::AppBuilder::new() + .with_config(config) .with_file(file.path(), None) .build()?; diff --git a/helix-term/tests/test/write.rs b/helix-term/tests/test/write.rs index 81459b2fe846..a38857bbc3df 100644 --- a/helix-term/tests/test/write.rs +++ b/helix-term/tests/test/write.rs @@ -168,9 +168,12 @@ async fn test_write_fail_mod_flag() -> anyhow::Result<()> { #[tokio::test(flavor = "multi_thread")] async fn test_write_scratch_to_new_path() -> anyhow::Result<()> { let mut file = tempfile::NamedTempFile::new()?; + let mut config = Config::default(); + config.editor.persistent_undo = true; + let mut app = helpers::AppBuilder::new().with_config(config).build()?; test_key_sequence( - &mut AppBuilder::new().build()?, + &mut app, Some(format!("ihello:w {}", file.path().to_string_lossy()).as_ref()), Some(&|app| { assert!(!app.editor.is_err()); diff --git a/helix-view/Cargo.toml b/helix-view/Cargo.toml index 23195dc8faf1..17e07e9a2dd6 100644 --- a/helix-view/Cargo.toml +++ b/helix-view/Cargo.toml @@ -23,10 +23,6 @@ helix-dap = { version = "0.6", path = "../helix-dap" } crossterm = { version = "0.25", optional = true } helix-vcs = { version = "0.6", path = "../helix-vcs" } -winapi = "0.3" -sha1_smol = "1.0" -walkdir = "2.3" - # Conversion traits once_cell = "1.17" url = "2" diff --git a/helix-view/src/document.rs b/helix-view/src/document.rs index 87f46a7c3482..3a02983c2f4e 100644 --- a/helix-view/src/document.rs +++ b/helix-view/src/document.rs @@ -99,6 +99,7 @@ pub struct DocumentSavedEvent { pub doc_id: DocumentId, pub path: PathBuf, pub text: Rope, + pub serialize_error: bool, } pub type DocumentSavedEventResult = Result; @@ -586,14 +587,12 @@ impl Document { let encoding = self.encoding; let last_saved_time = self.last_saved_time; - let mut undo_file = self - .undo_file(Some(&path)) - .ok_or_else(|| anyhow!("failed to acquire undo file lock")) - .map(FileLock::exclusive)??; - let history = self.history.get_mut().clone(); - let last_saved_revision = self.get_last_saved_revision(); - let save_history = self.config.load().persistent_undo; - + let history = self + .config + .load() + .persistent_undo + .then(|| self.history.get_mut().clone()); + let undo_file = self.undo_file(Some(&path)).unwrap(); // We encode the file according to the `Document`'s encoding. let future = async move { use tokio::{fs, fs::File}; @@ -621,20 +620,36 @@ impl Document { let mut file = File::create(&path).await?; to_writer(&mut file, encoding, &text).await?; - if save_history { - let path = path.clone(); - tokio::task::spawn_blocking(move || -> anyhow::Result<()> { - history.serialize(undo_file.get_mut()?, &path, last_saved_revision)?; - Ok(()) - }) - .await??; - } + + let mut serialize_error = false; + if let Some(history) = history { + let res = { + let path = path.clone(); + tokio::task::spawn_blocking(move || -> anyhow::Result<()> { + let mut undo_file = std::fs::File::create(&undo_file)?; + history.serialize(&mut undo_file, &path, current_rev)?; + Ok(()) + }) + .await + .map_err(|e| anyhow!(e)) + .and_then(std::convert::identity) + }; + + if let Err(e) = res { + log::error!( + "Failed to serialize history for {}: {e}", + path.to_string_lossy() + ); + serialize_error = true; + } + }; let event = DocumentSavedEvent { revision: current_rev, doc_id, path, text: text.clone(), + serialize_error, }; if let Some(language_server) = language_server { @@ -724,20 +739,12 @@ impl Document { }) } - pub fn save_history(&mut self) -> anyhow::Result<()> { - if let Some(Ok(mut undo_file)) = self.undo_file(None).map(FileLock::exclusive) { - let last_saved_revision = self.get_last_saved_revision(); - let path = self.path().unwrap().clone(); - let history = self.history.get_mut(); - let undo_file = undo_file.get_mut()?; - undo_file.set_len(0)?; - history.serialize(undo_file, &path, last_saved_revision)?; + pub fn load_history(&mut self) -> anyhow::Result<()> { + if !self.config.load().persistent_undo { + return Ok(()); } - Ok(()) - } - pub fn load_history(&mut self) -> anyhow::Result<()> { - if let Some(Ok(undo_file)) = self.undo_file(None).map(FileLock::shared) { + if let Some(undo_file) = self.undo_file(None).map(FileLock::shared).transpose()? { let mut undo_file = undo_file.get()?; if undo_file.metadata()?.len() != 0 { let (last_saved_revision, history) = helix_core::history::History::deserialize( From 8e1f4115f8c115ae992f0eeb270a403c2a6dc02e Mon Sep 17 00:00:00 2001 From: Shafkath Shuhan Date: Sun, 12 Feb 2023 11:23:18 -0500 Subject: [PATCH 15/35] remove flock --- Cargo.lock | 1 - helix-core/Cargo.toml | 1 - helix-core/src/flock.rs | 123 ------------------------------------- helix-core/src/lib.rs | 1 - helix-view/src/document.rs | 5 +- 5 files changed, 2 insertions(+), 129 deletions(-) delete mode 100644 helix-core/src/flock.rs diff --git a/Cargo.lock b/Cargo.lock index 1d9047c7a375..015f667853da 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1143,7 +1143,6 @@ dependencies = [ "unicode-general-category", "unicode-segmentation", "unicode-width", - "winapi", ] [[package]] diff --git a/helix-core/Cargo.toml b/helix-core/Cargo.toml index bd140e967f3e..8da9fbca7cc0 100644 --- a/helix-core/Cargo.toml +++ b/helix-core/Cargo.toml @@ -33,7 +33,6 @@ bitflags = "1.3" ahash = "0.8.3" hashbrown = { version = "0.13.2", features = ["raw"] } -winapi = "0.3" sha1_smol = "1.0" log = "0.4" diff --git a/helix-core/src/flock.rs b/helix-core/src/flock.rs deleted file mode 100644 index 1e68d064b74f..000000000000 --- a/helix-core/src/flock.rs +++ /dev/null @@ -1,123 +0,0 @@ -use std::fs::{File, OpenOptions}; -use std::io::Result; -use std::path::Path; - -pub struct FileLock { - file: File, - shared: bool, -} - -impl FileLock { - pub fn exclusive>(path: P) -> Result { - let file = Self::open_lock(path)?; - Ok(Self { - file, - shared: false, - }) - } - - pub fn shared>(path: P) -> Result { - let file = Self::open_lock(path)?; - Ok(Self { file, shared: true }) - } - - pub fn get(&self) -> Result<&File> { - self.lock()?; - Ok(&self.file) - } - - pub fn get_mut(&mut self) -> Result<&mut File> { - self.lock()?; - Ok(&mut self.file) - } - - fn lock(&self) -> Result<()> { - sys::lock(&self.file, self.shared) - } - - fn open_lock>(path: P) -> Result { - if let Some(parent) = path.as_ref().parent() { - if !parent.exists() { - std::fs::DirBuilder::new().recursive(true).create(parent)?; - } - } - OpenOptions::new() - .read(true) - .write(true) - .create(true) - .open(path) - } -} - -impl Drop for FileLock { - fn drop(&mut self) { - let _ = sys::unlock(&self.file); - } -} - -// `sys` impls from https://github.com/rust-lang/cargo/blob/fc2242a8c5606be36aecfd61dd464422271dad9d/src/cargo/util/flock.rs -#[cfg(unix)] -mod sys { - use std::fs::File; - use std::io::Error; - use std::io::Result; - use std::os::unix::io::AsRawFd; - - pub(super) fn unlock(file: &File) -> Result<()> { - flock(file, libc::LOCK_UN) - } - - pub(super) fn lock(file: &File, shared: bool) -> Result<()> { - let flag = if shared { libc::LOCK_SH } else { libc::LOCK_EX }; - flock(file, flag) - } - - fn flock(file: &File, flag: libc::c_int) -> Result<()> { - let ret = unsafe { libc::flock(file.as_raw_fd(), flag) }; - if ret < 0 { - Err(Error::last_os_error()) - } else { - Ok(()) - } - } -} - -#[cfg(windows)] -mod sys { - use std::{ - fs::File, - io::{Error, Result}, - os::windows::prelude::AsRawHandle, - path::Path, - }; - - use winapi::um::{ - fileapi::{LockFileEx, UnlockFile}, - minwinbase::LOCKFILE_EXCLUSIVE_LOCK, - }; - - /// Blocks until the lock is acquired. - pub(super) fn lock(file: &File, shared: bool) -> Result<()> { - let flag = if shared { 0 } else { LOCKFILE_EXCLUSIVE_LOCK }; - unsafe { - let mut overlapped = std::mem::zeroed(); - let ret = LockFileEx(file.as_raw_handle(), flag, 0, !0, !0, &mut overlapped); - if ret == 0 { - Err(Error::last_os_error()) - } else { - Ok(()) - } - } - } - - pub(super) fn unlock(file: &File) -> Result<()> { - unsafe { - let ret = UnlockFile(file.as_raw_handle(), 0, 0, !0, !0); - if ret == 0 { - Err(Error::last_os_error()) - } else { - Ok(()) - } - } - } -} diff --git a/helix-core/src/lib.rs b/helix-core/src/lib.rs index 85b36cb44623..4174e88cdc74 100644 --- a/helix-core/src/lib.rs +++ b/helix-core/src/lib.rs @@ -7,7 +7,6 @@ pub mod config; pub mod diagnostic; pub mod diff; pub mod doc_formatter; -pub mod flock; pub mod graphemes; pub mod history; pub mod increment; diff --git a/helix-view/src/document.rs b/helix-view/src/document.rs index 3a02983c2f4e..160144178a01 100644 --- a/helix-view/src/document.rs +++ b/helix-view/src/document.rs @@ -4,7 +4,6 @@ use futures_util::future::BoxFuture; use futures_util::FutureExt; use helix_core::auto_pairs::AutoPairs; use helix_core::doc_formatter::TextFormat; -use helix_core::flock::FileLock; use helix_core::syntax::Highlight; use helix_core::text_annotations::TextAnnotations; use helix_core::Range; @@ -744,8 +743,8 @@ impl Document { return Ok(()); } - if let Some(undo_file) = self.undo_file(None).map(FileLock::shared).transpose()? { - let mut undo_file = undo_file.get()?; + if let Some(undo_file) = self.undo_file(None) { + let mut undo_file = std::fs::File::open(undo_file)?; if undo_file.metadata()?.len() != 0 { let (last_saved_revision, history) = helix_core::history::History::deserialize( &mut undo_file, From 1243eeba2934c77ea1876da9d73b43805880f07f Mon Sep 17 00:00:00 2001 From: Shafkath Shuhan Date: Sun, 12 Feb 2023 11:44:13 -0500 Subject: [PATCH 16/35] improve unit test --- helix-core/src/history.rs | 24 +++++++++++++----------- helix-term/src/commands/typed.rs | 2 +- helix-view/src/document.rs | 6 ++++-- 3 files changed, 18 insertions(+), 14 deletions(-) diff --git a/helix-core/src/history.rs b/helix-core/src/history.rs index 77c8c566210a..b5a3cf5f3e70 100644 --- a/helix-core/src/history.rs +++ b/helix-core/src/history.rs @@ -744,19 +744,21 @@ mod test { } quickcheck!( - fn serde_history(a: String, b: String) -> bool { - let old = Rope::from(a); - let new = Rope::from(b); - let transaction = crate::diff::compare_ropes(&old, &new); - - let mut buf = Vec::new(); + fn serde_history(original: String, changes: Vec) -> bool { let mut history = History::default(); - let state = State { - doc: old, - selection: Selection::point(0), - }; - history.commit_revision(&transaction, &state); + let mut original = Rope::from(original); + + for c in changes.into_iter().map(Rope::from) { + let transaction = crate::diff::compare_ropes(&original, &c); + let state = State { + doc: original, + selection: Selection::point(0), + }; + history.commit_revision(&transaction, &state); + original = c; + } + let mut buf = Vec::new(); let file = tempfile::NamedTempFile::new().unwrap(); history.serialize(&mut buf, file.path(), 0).unwrap(); History::deserialize(&mut buf.as_slice(), file.path()).unwrap(); diff --git a/helix-term/src/commands/typed.rs b/helix-term/src/commands/typed.rs index cfc4ecd70f7f..04ec0e1e0c83 100644 --- a/helix-term/src/commands/typed.rs +++ b/helix-term/src/commands/typed.rs @@ -1179,7 +1179,7 @@ fn reload( })?; if let Err(e) = doc.load_history() { cx.editor - .set_error(Cow::Owned(format!("failed to load history from disk: {e}"))); + .set_error(Cow::Owned(format!("failed to deserialize history: {e}"))); } Ok(()) } diff --git a/helix-view/src/document.rs b/helix-view/src/document.rs index 160144178a01..082e5644576f 100644 --- a/helix-view/src/document.rs +++ b/helix-view/src/document.rs @@ -743,8 +743,10 @@ impl Document { return Ok(()); } - if let Some(undo_file) = self.undo_file(None) { - let mut undo_file = std::fs::File::open(undo_file)?; + if let Some(mut undo_file) = self + .undo_file(None) + .and_then(|path| std::fs::File::open(path).ok()) + { if undo_file.metadata()?.len() != 0 { let (last_saved_revision, history) = helix_core::history::History::deserialize( &mut undo_file, From 5259da2c543a2e35e7dc5b894dfac819703fda9e Mon Sep 17 00:00:00 2001 From: Shafkath Shuhan Date: Sun, 12 Feb 2023 22:39:25 -0500 Subject: [PATCH 17/35] create undo directory if it doesn't exist --- helix-view/src/document.rs | 14 ++++++++------ 1 file changed, 8 insertions(+), 6 deletions(-) diff --git a/helix-view/src/document.rs b/helix-view/src/document.rs index 082e5644576f..8feb0c415621 100644 --- a/helix-view/src/document.rs +++ b/helix-view/src/document.rs @@ -591,7 +591,7 @@ impl Document { .load() .persistent_undo .then(|| self.history.get_mut().clone()); - let undo_file = self.undo_file(Some(&path)).unwrap(); + let undo_file = self.undo_file(Some(&path))?.unwrap(); // We encode the file according to the `Document`'s encoding. let future = async move { use tokio::{fs, fs::File}; @@ -730,12 +730,14 @@ impl Document { Ok(()) } - pub fn undo_file(&self, path: Option<&PathBuf>) -> Option { - self.path().or(path).map(|path| { - let undo_dir = helix_loader::cache_dir().join("undo"); + pub fn undo_file(&self, path: Option<&PathBuf>) -> anyhow::Result> { + let undo_dir = helix_loader::cache_dir().join("undo"); + std::fs::create_dir_all(&undo_dir)?; + let res = self.path().or(path).map(|path| { let escaped_path = helix_core::path::escape_path(path); undo_dir.join(escaped_path) - }) + }); + Ok(res) } pub fn load_history(&mut self) -> anyhow::Result<()> { @@ -744,7 +746,7 @@ impl Document { } if let Some(mut undo_file) = self - .undo_file(None) + .undo_file(None)? .and_then(|path| std::fs::File::open(path).ok()) { if undo_file.metadata()?.len() != 0 { From 9dd7597704ec4da4ea8d237439c66876737f7aee Mon Sep 17 00:00:00 2001 From: Shafkath Shuhan Date: Mon, 13 Feb 2023 00:14:54 -0500 Subject: [PATCH 18/35] don't reload history --- helix-term/src/commands/typed.rs | 4 ---- 1 file changed, 4 deletions(-) diff --git a/helix-term/src/commands/typed.rs b/helix-term/src/commands/typed.rs index 04ec0e1e0c83..ebf9a12784d2 100644 --- a/helix-term/src/commands/typed.rs +++ b/helix-term/src/commands/typed.rs @@ -1177,10 +1177,6 @@ fn reload( .map(|_| { view.ensure_cursor_in_view(doc, scrolloff); })?; - if let Err(e) = doc.load_history() { - cx.editor - .set_error(Cow::Owned(format!("failed to deserialize history: {e}"))); - } Ok(()) } From 9f0f7a26515acf8459ecc0d55e12a99fccb8888c Mon Sep 17 00:00:00 2001 From: Shafkath Shuhan Date: Mon, 13 Feb 2023 00:17:42 -0500 Subject: [PATCH 19/35] add to book --- book/src/configuration.md | 1 + 1 file changed, 1 insertion(+) diff --git a/book/src/configuration.md b/book/src/configuration.md index 7514a3d0fcc3..d25f846682c9 100644 --- a/book/src/configuration.md +++ b/book/src/configuration.md @@ -57,6 +57,7 @@ on unix operating systems. | `rulers` | List of column positions at which to display the rulers. Can be overridden by language specific `rulers` in `languages.toml` file. | `[]` | | `bufferline` | Renders a line at the top of the editor displaying open buffers. Can be `always`, `never` or `multiple` (only shown if more than one buffer is in use) | `never` | | `color-modes` | Whether to color the mode indicator with different colors depending on the mode itself | `false` | +| `persistent-undo` | Whether to save/load undo history when writing/opening a file | `false` | ### `[editor.statusline]` Section From c4aa3b8a122d9f04222e6010683d21c3db7b8c42 Mon Sep 17 00:00:00 2001 From: Shafkath Shuhan Date: Mon, 13 Feb 2023 00:23:12 -0500 Subject: [PATCH 20/35] improve quickcheck test --- helix-core/src/history.rs | 36 ++++++++++++++++++++++++++---------- 1 file changed, 26 insertions(+), 10 deletions(-) diff --git a/helix-core/src/history.rs b/helix-core/src/history.rs index b5a3cf5f3e70..618578acbc4e 100644 --- a/helix-core/src/history.rs +++ b/helix-core/src/history.rs @@ -51,7 +51,7 @@ pub struct State { /// delete, we also store an inversion of the transaction. /// /// Using time to navigate the history: -#[derive(Clone, Debug)] +#[derive(Clone, Debug, PartialEq)] pub struct History { revisions: Vec, current: usize, @@ -69,6 +69,15 @@ struct Revision { timestamp: Instant, } +impl PartialEq for Revision { + fn eq(&self, other: &Self) -> bool { + self.parent == other.parent + && self.last_child == other.last_child + && self.transaction == other.transaction + && self.inversion == other.inversion + } +} + impl Default for History { fn default() -> Self { // Add a dummy root revision with empty transaction @@ -88,7 +97,6 @@ impl Default for History { impl Revision { fn serialize(&self, writer: &mut W) -> std::io::Result<()> { write_usize(writer, self.parent)?; - write_usize(writer, self.last_child.map(|n| n.get()).unwrap_or(0))?; self.transaction.serialize(writer)?; self.inversion.serialize(writer)?; @@ -97,15 +105,11 @@ impl Revision { fn deserialize(reader: &mut R, timestamp: Instant) -> std::io::Result { let parent = read_usize(reader)?; - let last_child = match read_usize(reader)? { - 0 => None, - n => Some(unsafe { NonZeroUsize::new_unchecked(n) }), - }; let transaction = Arc::new(Transaction::deserialize(reader)?); let inversion = Arc::new(Transaction::deserialize(reader)?); Ok(Revision { parent, - last_child, + last_child: None, transaction, inversion, timestamp, @@ -180,12 +184,24 @@ impl History { )); } - let revisions = read_vec(reader, |reader| Revision::deserialize(reader, timestamp))?; + let len = read_usize(reader)?; + let mut revisions: Vec = Vec::with_capacity(len); + for _ in 0..len { + let res = Revision::deserialize(reader, timestamp)?; + assert!(res.parent < revisions.len()); + revisions[res.parent].last_child = NonZeroUsize::new(revisions.len()); + revisions.push(res); + } + + // let mut revisions = read_vec(reader, |reader| Revision::deserialize(reader, timestamp))?; + let history = History { current, revisions }; Ok((last_saved_revision, history)) } } +} +impl History { pub fn commit_revision(&mut self, transaction: &Transaction, original: &State) { self.commit_revision_at_timestamp(transaction, original, Instant::now()); } @@ -761,8 +777,8 @@ mod test { let mut buf = Vec::new(); let file = tempfile::NamedTempFile::new().unwrap(); history.serialize(&mut buf, file.path(), 0).unwrap(); - History::deserialize(&mut buf.as_slice(), file.path()).unwrap(); - true + let (_, res) = History::deserialize(&mut buf.as_slice(), file.path()).unwrap(); + history == res } ); } From 2c343a76197519cf414e973956cd6df9aac3831c Mon Sep 17 00:00:00 2001 From: Shafkath Shuhan Date: Thu, 16 Feb 2023 12:29:44 -0500 Subject: [PATCH 21/35] figure out last child in memory --- docs/architecture.md | 16 +----- helix-core/src/history.rs | 110 ++++++++++++++++++++++++------------- helix-view/src/document.rs | 11 +++- 3 files changed, 82 insertions(+), 55 deletions(-) diff --git a/docs/architecture.md b/docs/architecture.md index 5d33cbac06b7..7978f422edd5 100644 --- a/docs/architecture.md +++ b/docs/architecture.md @@ -12,9 +12,6 @@ This document contains a high-level overview of Helix internals. -> NOTE: Use `cargo doc --open` for API documentation as well as dependency -> documentation. - ## Core The core contains basic building blocks used to construct the editor. It is @@ -23,21 +20,13 @@ are functional: most operations won't modify data in place but instead return a new copy. The main data structure used for representing buffers is a `Rope`. We re-export -the excellent [ropey](https://github.com/cessen/ropey) library. Ropes are cheap +asaddthe excellent [ropey](https://github.com/cessen/ropey) library. Ropes are cheap to clone, and allow us to easily make snapshots of a text state. Multiple selections are a core editing primitive. Document selections are represented by a `Selection`. Each `Range` in the selection consists of a moving `head` and an immovable `anchor`. A single cursor in the editor is simply -a selection with a single range, with the head and the anchor in the same -position. - -Ropes are modified by constructing an OT-like `Transaction`. It represents -a single coherent change to the document and can be applied to the rope. -A transaction can be inverted to produce an undo. Selections and marks can be -mapped over a transaction to translate to a position in the new text state after -applying the transaction. - +a selction with a single range, with the head and the anchor in the same > NOTE: `Transaction::change`/`Transaction::change_by_selection` is the main > interface used to generate text edits. @@ -108,4 +97,3 @@ The `main` function sets up a new `Application` that runs the event loop. ## TUI / Term -TODO: document Component and rendering related stuff diff --git a/helix-core/src/history.rs b/helix-core/src/history.rs index 618578acbc4e..fc422a8ca369 100644 --- a/helix-core/src/history.rs +++ b/helix-core/src/history.rs @@ -2,7 +2,7 @@ use crate::parse::*; use crate::{Assoc, ChangeSet, Range, Rope, Selection, Transaction}; use once_cell::sync::Lazy; use regex::Regex; -use std::io::{Read, Write}; +use std::io::{Read, Seek, SeekFrom, Write}; use std::num::NonZeroUsize; use std::path::Path; use std::sync::Arc; @@ -117,7 +117,8 @@ impl Revision { } } -const HEADER_TAG: &str = "Helix Undofile 1\n"; +// Temporarily 3 for review. +const HEADER_TAG: &str = "Helix Undofile 3\n"; fn get_hash(reader: &mut R) -> std::io::Result<[u8; 20]> { const BUF_SIZE: usize = 8192; @@ -136,28 +137,60 @@ fn get_hash(reader: &mut R) -> std::io::Result<[u8; 20]> { } impl History { - pub fn serialize( + pub fn serialize( &self, writer: &mut W, path: &Path, last_saved_revision: usize, + append: bool, ) -> std::io::Result<()> { - write_string(writer, HEADER_TAG)?; - write_usize(writer, self.current)?; - write_usize(writer, last_saved_revision)?; - - let last_mtime = std::fs::metadata(path)? + let mtime = std::fs::metadata(path)? .modified()? .duration_since(std::time::UNIX_EPOCH) .unwrap() .as_secs(); - write_u64(writer, last_mtime)?; + write_string(writer, HEADER_TAG)?; + write_usize(writer, self.current)?; + write_usize(writer, last_saved_revision)?; + write_u64(writer, mtime)?; writer.write_all(&get_hash(&mut std::fs::File::open(path)?)?)?; - write_vec(writer, &self.revisions, |writer, rev| rev.serialize(writer))?; + + if append { + let pos = writer.stream_position()?; + let len = read_usize(writer)?; + writer.seek(SeekFrom::Start(pos))?; + write_usize(writer, self.revisions.len())?; + writer.seek(SeekFrom::End(0))?; + for rev in &self.revisions[len..] { + rev.serialize(writer)?; + } + } else { + write_vec(writer, &self.revisions, |writer, rev| rev.serialize(writer))?; + } Ok(()) } pub fn deserialize(reader: &mut R, path: &Path) -> std::io::Result<(usize, Self)> { + let (current, last_saved_revision) = Self::read_header(reader, path)?; + let timestamp = Instant::now(); + let len = read_usize(reader)?; + let mut revisions: Vec = Vec::with_capacity(len); + for _ in 0..len { + let res = Revision::deserialize(reader, timestamp)?; + debug_assert!(res.parent <= revisions.len()); + + if !revisions.is_empty() { + revisions.get_mut(res.parent).unwrap().last_child = + NonZeroUsize::new(revisions.len()); + } + revisions.push(res); + } + + let history = History { current, revisions }; + Ok((last_saved_revision, history)) + } + + pub fn read_header(reader: &mut R, path: &Path) -> std::io::Result<(usize, usize)> { let header = read_string(reader)?; if HEADER_TAG != header { Err(std::io::Error::new( @@ -165,7 +198,6 @@ impl History { "missing undofile header", )) } else { - let timestamp = Instant::now(); let current = read_usize(reader)?; let last_saved_revision = read_usize(reader)?; let mtime = read_u64(reader)?; @@ -183,20 +215,7 @@ impl History { "outdated undo file", )); } - - let len = read_usize(reader)?; - let mut revisions: Vec = Vec::with_capacity(len); - for _ in 0..len { - let res = Revision::deserialize(reader, timestamp)?; - assert!(res.parent < revisions.len()); - revisions[res.parent].last_child = NonZeroUsize::new(revisions.len()); - revisions.push(res); - } - - // let mut revisions = read_vec(reader, |reader| Revision::deserialize(reader, timestamp))?; - - let history = History { current, revisions }; - Ok((last_saved_revision, history)) + Ok((current, last_saved_revision)) } } } @@ -760,24 +779,37 @@ mod test { } quickcheck!( - fn serde_history(original: String, changes: Vec) -> bool { + fn serde_history(original: String, changes_a: Vec, changes_b: Vec) -> bool { + fn create_changes(history: &mut History, doc: &mut Rope, changes: Vec) { + for c in changes.into_iter().map(Rope::from) { + let transaction = crate::diff::compare_ropes(&doc, &c); + let state = State { + doc: doc.clone(), + selection: Selection::point(0), + }; + history.commit_revision(&transaction, &state); + *doc = c; + } + } + let mut history = History::default(); let mut original = Rope::from(original); - for c in changes.into_iter().map(Rope::from) { - let transaction = crate::diff::compare_ropes(&original, &c); - let state = State { - doc: original, - selection: Selection::point(0), - }; - history.commit_revision(&transaction, &state); - original = c; - } - - let mut buf = Vec::new(); + create_changes(&mut history, &mut original, changes_a); + let mut cursor = std::io::Cursor::new(Vec::new()); let file = tempfile::NamedTempFile::new().unwrap(); - history.serialize(&mut buf, file.path(), 0).unwrap(); - let (_, res) = History::deserialize(&mut buf.as_slice(), file.path()).unwrap(); + history + .serialize(&mut cursor, file.path(), 0, false) + .unwrap(); + let (_, res) = History::deserialize(&mut cursor, file.path()).unwrap(); + assert_eq!(history, res); + + create_changes(&mut history, &mut original, changes_b); + let mut cursor = std::io::Cursor::new(Vec::new()); + history + .serialize(&mut cursor, file.path(), 0, true) + .unwrap(); + let (_, res) = History::deserialize(&mut cursor, file.path()).unwrap(); history == res } ); diff --git a/helix-view/src/document.rs b/helix-view/src/document.rs index 8feb0c415621..9861ad9e6fb4 100644 --- a/helix-view/src/document.rs +++ b/helix-view/src/document.rs @@ -625,8 +625,15 @@ impl Document { let res = { let path = path.clone(); tokio::task::spawn_blocking(move || -> anyhow::Result<()> { - let mut undo_file = std::fs::File::create(&undo_file)?; - history.serialize(&mut undo_file, &path, current_rev)?; + use std::fs; + let append = + History::read_header(&mut fs::File::open(&undo_file)?, &path).is_ok(); + let mut undo_file = std::fs::OpenOptions::new() + .write(true) + .truncate(!append) + .read(true) + .open(&undo_file)?; + history.serialize(&mut undo_file, &path, current_rev, append)?; Ok(()) }) .await From d8be1aa1684293a6dd9f753b8e4cd8f97274b5e1 Mon Sep 17 00:00:00 2001 From: Shafkath Shuhan Date: Sat, 18 Feb 2023 11:46:32 -0500 Subject: [PATCH 22/35] append history if possible --- helix-core/src/history.rs | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/helix-core/src/history.rs b/helix-core/src/history.rs index fc422a8ca369..d5a4b711ed82 100644 --- a/helix-core/src/history.rs +++ b/helix-core/src/history.rs @@ -156,9 +156,8 @@ impl History { writer.write_all(&get_hash(&mut std::fs::File::open(path)?)?)?; if append { - let pos = writer.stream_position()?; let len = read_usize(writer)?; - writer.seek(SeekFrom::Start(pos))?; + writer.seek(SeekFrom::Current(-8))?; write_usize(writer, self.revisions.len())?; writer.seek(SeekFrom::End(0))?; for rev in &self.revisions[len..] { From b49fa5f29b91af655f85e2d4b7d95aa139f251c0 Mon Sep 17 00:00:00 2001 From: Shafkath Shuhan Date: Sat, 18 Feb 2023 11:54:32 -0500 Subject: [PATCH 23/35] update test --- helix-core/src/history.rs | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/helix-core/src/history.rs b/helix-core/src/history.rs index d5a4b711ed82..6e74689afc3c 100644 --- a/helix-core/src/history.rs +++ b/helix-core/src/history.rs @@ -800,14 +800,16 @@ mod test { history .serialize(&mut cursor, file.path(), 0, false) .unwrap(); + cursor.set_position(0); let (_, res) = History::deserialize(&mut cursor, file.path()).unwrap(); assert_eq!(history, res); create_changes(&mut history, &mut original, changes_b); - let mut cursor = std::io::Cursor::new(Vec::new()); + cursor.set_position(0); history .serialize(&mut cursor, file.path(), 0, true) .unwrap(); + cursor.set_position(0); let (_, res) = History::deserialize(&mut cursor, file.path()).unwrap(); history == res } From b83fb40a322626e7153249c835ac6d6ec2af9eaf Mon Sep 17 00:00:00 2001 From: Shafkath Shuhan Date: Sat, 18 Feb 2023 11:59:36 -0500 Subject: [PATCH 24/35] update integration --- helix-core/Cargo.toml | 2 +- helix-core/src/history.rs | 4 ++-- helix-loader/Cargo.toml | 3 +++ helix-loader/src/lib.rs | 10 +++++++--- 4 files changed, 13 insertions(+), 6 deletions(-) diff --git a/helix-core/Cargo.toml b/helix-core/Cargo.toml index 8da9fbca7cc0..9419d1276e76 100644 --- a/helix-core/Cargo.toml +++ b/helix-core/Cargo.toml @@ -12,7 +12,7 @@ include = ["src/**/*", "README.md"] [features] unicode-lines = ["ropey/unicode_lines"] -integration = [] +integration = ["helix-loader/integration"] [dependencies] helix-loader = { version = "0.6", path = "../helix-loader" } diff --git a/helix-core/src/history.rs b/helix-core/src/history.rs index 6e74689afc3c..4a666fbd4160 100644 --- a/helix-core/src/history.rs +++ b/helix-core/src/history.rs @@ -781,7 +781,7 @@ mod test { fn serde_history(original: String, changes_a: Vec, changes_b: Vec) -> bool { fn create_changes(history: &mut History, doc: &mut Rope, changes: Vec) { for c in changes.into_iter().map(Rope::from) { - let transaction = crate::diff::compare_ropes(&doc, &c); + let transaction = crate::diff::compare_ropes(doc, &c); let state = State { doc: doc.clone(), selection: Selection::point(0), @@ -804,8 +804,8 @@ mod test { let (_, res) = History::deserialize(&mut cursor, file.path()).unwrap(); assert_eq!(history, res); - create_changes(&mut history, &mut original, changes_b); cursor.set_position(0); + create_changes(&mut history, &mut original, changes_b); history .serialize(&mut cursor, file.path(), 0, true) .unwrap(); diff --git a/helix-loader/Cargo.toml b/helix-loader/Cargo.toml index 9225ad1a2235..36f1532d20d4 100644 --- a/helix-loader/Cargo.toml +++ b/helix-loader/Cargo.toml @@ -13,6 +13,9 @@ homepage = "https://helix-editor.com" name = "hx-loader" path = "src/main.rs" +[features] +integration = [] + [dependencies] anyhow = "1" serde = { version = "1.0", features = ["derive"] } diff --git a/helix-loader/src/lib.rs b/helix-loader/src/lib.rs index 8dc2928adc9f..b61138c837a4 100644 --- a/helix-loader/src/lib.rs +++ b/helix-loader/src/lib.rs @@ -70,9 +70,13 @@ pub fn local_config_dirs() -> Vec { } pub fn cache_dir() -> PathBuf { - // TODO: allow env var override - let strategy = choose_base_strategy().expect("Unable to find the config directory!"); - let mut path = strategy.cache_dir(); + let mut path = if cfg!(feature = "integration") { + std::env::temp_dir() + } else { + // TODO: allow env var override + let strategy = choose_base_strategy().expect("Unable to find the config directory!"); + strategy.cache_dir() + }; path.push("helix"); path } From 251bc4c8b43e3ac5fb4efb6b0bdd6689f0add3e3 Mon Sep 17 00:00:00 2001 From: Shafkath Shuhan Date: Sat, 18 Feb 2023 13:02:34 -0500 Subject: [PATCH 25/35] reload history with reload command --- helix-core/src/history.rs | 50 +++++++++++++++--------- helix-term/src/application.rs | 7 ++-- helix-term/tests/test/commands.rs | 1 + helix-view/src/document.rs | 63 +++++++++++++++++++++---------- 4 files changed, 80 insertions(+), 41 deletions(-) diff --git a/helix-core/src/history.rs b/helix-core/src/history.rs index 4a666fbd4160..ac45ecf7adaa 100644 --- a/helix-core/src/history.rs +++ b/helix-core/src/history.rs @@ -137,12 +137,12 @@ fn get_hash(reader: &mut R) -> std::io::Result<[u8; 20]> { } impl History { - pub fn serialize( + pub fn serialize( &self, writer: &mut W, path: &Path, + revision: usize, last_saved_revision: usize, - append: bool, ) -> std::io::Result<()> { let mtime = std::fs::metadata(path)? .modified()? @@ -151,20 +151,14 @@ impl History { .as_secs(); write_string(writer, HEADER_TAG)?; write_usize(writer, self.current)?; - write_usize(writer, last_saved_revision)?; + write_usize(writer, revision)?; write_u64(writer, mtime)?; writer.write_all(&get_hash(&mut std::fs::File::open(path)?)?)?; - if append { - let len = read_usize(writer)?; - writer.seek(SeekFrom::Current(-8))?; - write_usize(writer, self.revisions.len())?; - writer.seek(SeekFrom::End(0))?; - for rev in &self.revisions[len..] { - rev.serialize(writer)?; - } - } else { - write_vec(writer, &self.revisions, |writer, rev| rev.serialize(writer))?; + write_usize(writer, self.revisions.len())?; + writer.seek(SeekFrom::End(0))?; + for rev in &self.revisions[last_saved_revision..] { + rev.serialize(writer)?; } Ok(()) } @@ -189,6 +183,23 @@ impl History { Ok((last_saved_revision, history)) } + pub fn merge(&mut self, mut other: History, offset: usize) -> std::io::Result<()> { + let revisions = self.revisions.split_off(offset); + let len = other.revisions.len(); + for r in revisions { + let parent = if r.parent < offset { + r.parent + } else { + len + (r.parent - offset) + }; + other.revisions.get_mut(parent).unwrap().last_child = + NonZeroUsize::new(other.revisions.len()); + other.revisions.push(r); + } + self.revisions = other.revisions; + Ok(()) + } + pub fn read_header(reader: &mut R, path: &Path) -> std::io::Result<(usize, usize)> { let header = read_string(reader)?; if HEADER_TAG != header { @@ -259,6 +270,11 @@ impl History { self.current == 0 } + #[inline] + pub fn is_empty(&self) -> bool { + self.revisions.len() <= 1 + } + /// Returns the changes since the given revision composed into a transaction. /// Returns None if there are no changes between the current and given revisions. pub fn changes_since(&self, revision: usize) -> Option { @@ -797,17 +813,17 @@ mod test { create_changes(&mut history, &mut original, changes_a); let mut cursor = std::io::Cursor::new(Vec::new()); let file = tempfile::NamedTempFile::new().unwrap(); - history - .serialize(&mut cursor, file.path(), 0, false) - .unwrap(); + history.serialize(&mut cursor, file.path(), 0, 0).unwrap(); cursor.set_position(0); let (_, res) = History::deserialize(&mut cursor, file.path()).unwrap(); assert_eq!(history, res); + let last_saved_revision = history.revisions.len(); + cursor.set_position(0); create_changes(&mut history, &mut original, changes_b); history - .serialize(&mut cursor, file.path(), 0, true) + .serialize(&mut cursor, file.path(), 0, last_saved_revision) .unwrap(); cursor.set_position(0); let (_, res) = History::deserialize(&mut cursor, file.path()).unwrap(); diff --git a/helix-term/src/application.rs b/helix-term/src/application.rs index f7964162127b..1c88cba3cd95 100644 --- a/helix-term/src/application.rs +++ b/helix-term/src/application.rs @@ -515,10 +515,6 @@ impl Application { } }; - if doc_save_event.serialize_error { - self.editor.set_error("failed to serialize history"); - } - let doc = match self.editor.document_mut(doc_save_event.doc_id) { None => { warn!( @@ -570,6 +566,9 @@ impl Application { lines, bytes )); + if doc_save_event.serialize_error { + self.editor.set_error("failed to serialize history"); + } } #[inline(always)] diff --git a/helix-term/tests/test/commands.rs b/helix-term/tests/test/commands.rs index 44f6978a893e..2cfe3ac4053e 100644 --- a/helix-term/tests/test/commands.rs +++ b/helix-term/tests/test/commands.rs @@ -486,6 +486,7 @@ async fn test_persistent_undo() -> anyhow::Result<()> { .with_file(file.path(), None) .build()?; + // TODO: Test if the history file is valid. test_key_sequence( &mut app, Some(&format!( diff --git a/helix-view/src/document.rs b/helix-view/src/document.rs index 9861ad9e6fb4..4ddbcf44eba7 100644 --- a/helix-view/src/document.rs +++ b/helix-view/src/document.rs @@ -20,6 +20,7 @@ use std::path::{Path, PathBuf}; use std::str::FromStr; use std::sync::Arc; use std::time::SystemTime; +use tokio::fs::OpenOptions; use helix_core::{ encoding, @@ -590,8 +591,10 @@ impl Document { .config .load() .persistent_undo - .then(|| self.history.get_mut().clone()); - let undo_file = self.undo_file(Some(&path))?.unwrap(); + .then(|| self.history.get_mut().clone()) + .filter(|history| !history.is_empty()); + let undofile_path = self.undo_file(Some(&path))?.unwrap(); + let last_saved_revision = self.get_last_saved_revision(); // We encode the file according to the `Document`'s encoding. let future = async move { use tokio::{fs, fs::File}; @@ -624,16 +627,26 @@ impl Document { if let Some(history) = history { let res = { let path = path.clone(); + let mut undofile = OpenOptions::new() + .write(true) + .read(true) + .create(true) + .open(&undofile_path) + .await? + .into_std() + .await; tokio::task::spawn_blocking(move || -> anyhow::Result<()> { - use std::fs; - let append = - History::read_header(&mut fs::File::open(&undo_file)?, &path).is_ok(); - let mut undo_file = std::fs::OpenOptions::new() - .write(true) - .truncate(!append) - .read(true) - .open(&undo_file)?; - history.serialize(&mut undo_file, &path, current_rev, append)?; + if History::deserialize(&mut std::fs::File::open(&undofile_path)?, &path) + .is_ok() + { + undofile.set_len(0)?; + } + history.serialize( + &mut undofile, + &path, + current_rev, + last_saved_revision, + )?; Ok(()) }) .await @@ -717,14 +730,16 @@ impl Document { let mut file = std::fs::File::open(&path)?; let (rope, ..) = from_reader(&mut file, Some(encoding))?; - // Calculate the difference between the buffer and source text, and apply it. - // This is not considered a modification of the contents of the file regardless - // of the encoding. - let transaction = helix_core::diff::compare_ropes(self.text(), &rope); - self.apply(&transaction, view.id); - self.append_changes_to_history(view); - self.reset_modified(); - + if let Err(e) = self.load_history() { + log::error!("{}", e); + // Calculate the difference between the buffer and source text, and apply it. + // This is not considered a modification of the contents of the file regardless + // of the encoding. + let transaction = helix_core::diff::compare_ropes(self.text(), &rope); + self.apply(&transaction, view.id); + self.append_changes_to_history(view); + self.reset_modified(); + } self.last_saved_time = SystemTime::now(); self.detect_indent_and_line_ending(); @@ -761,7 +776,15 @@ impl Document { &mut undo_file, self.path().unwrap(), )?; - self.history.set(history); + + if self.history.get_mut().is_empty() + || self.get_current_revision() == last_saved_revision + { + self.history.set(history); + } else { + let offset = self.get_last_saved_revision() + 1; + self.history.get_mut().merge(history, offset)?; + } self.set_last_saved_revision(last_saved_revision); } } From 7140e6ac910403222b37b8a02b43578287164142 Mon Sep 17 00:00:00 2001 From: Shafkath Shuhan Date: Sat, 18 Feb 2023 20:50:22 -0500 Subject: [PATCH 26/35] remove libc dependency --- helix-core/Cargo.toml | 3 --- 1 file changed, 3 deletions(-) diff --git a/helix-core/Cargo.toml b/helix-core/Cargo.toml index 9419d1276e76..8465f1e75036 100644 --- a/helix-core/Cargo.toml +++ b/helix-core/Cargo.toml @@ -49,9 +49,6 @@ chrono = { version = "0.4", default-features = false, features = ["alloc", "std" etcetera = "0.4" textwrap = "0.16.0" -[target.'cfg(unix)'.dependencies] -libc = "0.2" - [dev-dependencies] quickcheck = { version = "1", default-features = false } tempfile = "3.3.0" From ca3948db19f49566396ee4b54b907d18eb13f15b Mon Sep 17 00:00:00 2001 From: Shafkath Shuhan Date: Sat, 18 Feb 2023 20:55:25 -0500 Subject: [PATCH 27/35] delete file --- docs/architecture.md | 16 ++++++++++++++-- 1 file changed, 14 insertions(+), 2 deletions(-) diff --git a/docs/architecture.md b/docs/architecture.md index 7978f422edd5..5d33cbac06b7 100644 --- a/docs/architecture.md +++ b/docs/architecture.md @@ -12,6 +12,9 @@ This document contains a high-level overview of Helix internals. +> NOTE: Use `cargo doc --open` for API documentation as well as dependency +> documentation. + ## Core The core contains basic building blocks used to construct the editor. It is @@ -20,13 +23,21 @@ are functional: most operations won't modify data in place but instead return a new copy. The main data structure used for representing buffers is a `Rope`. We re-export -asaddthe excellent [ropey](https://github.com/cessen/ropey) library. Ropes are cheap +the excellent [ropey](https://github.com/cessen/ropey) library. Ropes are cheap to clone, and allow us to easily make snapshots of a text state. Multiple selections are a core editing primitive. Document selections are represented by a `Selection`. Each `Range` in the selection consists of a moving `head` and an immovable `anchor`. A single cursor in the editor is simply -a selction with a single range, with the head and the anchor in the same +a selection with a single range, with the head and the anchor in the same +position. + +Ropes are modified by constructing an OT-like `Transaction`. It represents +a single coherent change to the document and can be applied to the rope. +A transaction can be inverted to produce an undo. Selections and marks can be +mapped over a transaction to translate to a position in the new text state after +applying the transaction. + > NOTE: `Transaction::change`/`Transaction::change_by_selection` is the main > interface used to generate text edits. @@ -97,3 +108,4 @@ The `main` function sets up a new `Application` that runs the event loop. ## TUI / Term +TODO: document Component and rendering related stuff From bbf41d26474d51b2e5f608a09773a957215dfc98 Mon Sep 17 00:00:00 2001 From: Shafkath Shuhan Date: Sun, 19 Feb 2023 10:42:32 -0500 Subject: [PATCH 28/35] write reload unit test --- Cargo.lock | 3 +- helix-core/src/history.rs | 3 +- helix-loader/src/lib.rs | 2 +- helix-view/Cargo.toml | 2 + helix-view/src/document.rs | 96 +++++++++++++++++++++++++++++++++++++- 5 files changed, 102 insertions(+), 4 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 015f667853da..3879c4d2d9db 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1124,7 +1124,6 @@ dependencies = [ "hashbrown 0.13.2", "helix-loader", "imara-diff", - "libc", "log", "once_cell", "quickcheck", @@ -1280,9 +1279,11 @@ dependencies = [ "libc", "log", "once_cell", + "quickcheck", "serde", "serde_json", "slotmap", + "tempfile", "tokio", "tokio-stream", "toml", diff --git a/helix-core/src/history.rs b/helix-core/src/history.rs index ac45ecf7adaa..04b03974aa53 100644 --- a/helix-core/src/history.rs +++ b/helix-core/src/history.rs @@ -118,7 +118,7 @@ impl Revision { } // Temporarily 3 for review. -const HEADER_TAG: &str = "Helix Undofile 3\n"; +const HEADER_TAG: &str = "Helix Undofile 4\n"; fn get_hash(reader: &mut R) -> std::io::Result<[u8; 20]> { const BUF_SIZE: usize = 8192; @@ -155,6 +155,7 @@ impl History { write_u64(writer, mtime)?; writer.write_all(&get_hash(&mut std::fs::File::open(path)?)?)?; + // Append new revisions to the end of the file. write_usize(writer, self.revisions.len())?; writer.seek(SeekFrom::End(0))?; for rev in &self.revisions[last_saved_revision..] { diff --git a/helix-loader/src/lib.rs b/helix-loader/src/lib.rs index b61138c837a4..51d455891bc5 100644 --- a/helix-loader/src/lib.rs +++ b/helix-loader/src/lib.rs @@ -70,7 +70,7 @@ pub fn local_config_dirs() -> Vec { } pub fn cache_dir() -> PathBuf { - let mut path = if cfg!(feature = "integration") { + let mut path = if cfg!(feature = "integration") || cfg!(test) { std::env::temp_dir() } else { // TODO: allow env var override diff --git a/helix-view/Cargo.toml b/helix-view/Cargo.toml index 17e07e9a2dd6..b22078eb5198 100644 --- a/helix-view/Cargo.toml +++ b/helix-view/Cargo.toml @@ -52,4 +52,6 @@ clipboard-win = { version = "4.5", features = ["std"] } libc = "0.2" [dev-dependencies] +quickcheck = { version = "1", default-features = false } +tempfile = "3" helix-tui = { path = "../helix-tui" } diff --git a/helix-view/src/document.rs b/helix-view/src/document.rs index 4ddbcf44eba7..77175820c569 100644 --- a/helix-view/src/document.rs +++ b/helix-view/src/document.rs @@ -636,6 +636,7 @@ impl Document { .into_std() .await; tokio::task::spawn_blocking(move || -> anyhow::Result<()> { + // Truncate the file if it's not a valid undofile. if History::deserialize(&mut std::fs::File::open(&undofile_path)?, &path) .is_ok() { @@ -1386,7 +1387,8 @@ impl Display for FormatterError { #[cfg(test)] mod test { - use arc_swap::ArcSwap; + use arc_swap::{access::Map, ArcSwap}; + use quickcheck::Gen; use super::*; @@ -1556,6 +1558,98 @@ mod test { ); } + #[tokio::test(flavor = "multi_thread")] + async fn reload_history() { + let test_fn: fn(Vec) -> bool = |changes| -> bool { + let len = changes.len() / 3; + let mut original = Rope::new(); + let mut iter = changes.into_iter(); + + let changes_a: Vec<_> = iter + .by_ref() + .take(len) + .map(|c| { + let c = Rope::from(c); + let transaction = helix_core::diff::compare_ropes(&original, &c); + original = c; + transaction + }) + .collect(); + let mut original_concurrent = original.clone(); + + let changes_b: Vec<_> = iter + .by_ref() + .take(len) + .map(|c| { + let c = Rope::from(c); + let transaction = helix_core::diff::compare_ropes(&original, &c); + original = c; + transaction + }) + .collect(); + let changes_c: Vec<_> = iter + .take(len) + .map(|c| { + let c = Rope::from(c); + let transaction = helix_core::diff::compare_ropes(&original_concurrent, &c); + original_concurrent = c; + transaction + }) + .collect(); + + let file = tempfile::NamedTempFile::new().unwrap(); + let mut config = Config::default(); + config.persistent_undo = true; + + let view_id = ViewId::default(); + let config = Arc::new(ArcSwap::new(Arc::new(config))); + let mut doc_1 = Document::open(file.path(), None, None, config.clone()).unwrap(); + doc_1.ensure_view_init(view_id); + + // Make changes & save document A + for c in changes_a { + doc_1.apply(&c, view_id); + } + helix_lsp::block_on(doc_1.save::(None, true).unwrap()).unwrap(); + + let mut doc_2 = Document::open(file.path(), None, None, config.clone()).unwrap(); + let mut doc_3 = Document::open(file.path(), None, None, config.clone()).unwrap(); + doc_2.ensure_view_init(view_id); + doc_3.ensure_view_init(view_id); + + // Make changes in A and B at the same time. + for c in changes_b { + doc_1.apply(&c, view_id); + } + + for c in changes_c { + doc_2.apply(&c, view_id); + } + helix_lsp::block_on(doc_2.save::(None, true).unwrap()).unwrap(); + + doc_1.load_history().unwrap(); + doc_3.load_history().unwrap(); + + assert_eq!(doc_2.history.get_mut(), doc_3.history.get_mut()); + + helix_lsp::block_on(doc_1.save::(None, true).unwrap()).unwrap(); + doc_2.load_history().unwrap(); + doc_3.load_history().unwrap(); + doc_1.history.get_mut() == doc_2.history.get_mut() + && doc_1.history.get_mut() == doc_3.history.get_mut() + }; + let handles: Vec<_> = (0..100) + .map(|_| { + tokio::task::spawn_blocking(move || { + quickcheck::QuickCheck::new() + .max_tests(1) + .quickcheck(test_fn); + }) + }) + .collect(); + futures_util::future::try_join_all(handles).await.unwrap(); + } + macro_rules! decode { ($name:ident, $label:expr, $label_override:expr) => { #[test] From 8a60b79a2c9e279d8ad4bcc3b6e61940b0a7b3f6 Mon Sep 17 00:00:00 2001 From: Shafkath Shuhan Date: Sun, 19 Feb 2023 21:16:12 -0500 Subject: [PATCH 29/35] add merge test --- helix-core/src/history.rs | 29 ++++++++++++++++++++++++++++- helix-view/src/document.rs | 8 +++++--- 2 files changed, 33 insertions(+), 4 deletions(-) diff --git a/helix-core/src/history.rs b/helix-core/src/history.rs index 04b03974aa53..c8418a7f51da 100644 --- a/helix-core/src/history.rs +++ b/helix-core/src/history.rs @@ -547,6 +547,8 @@ impl std::str::FromStr for UndoKind { #[cfg(test)] mod test { + use std::io::Cursor; + use quickcheck::quickcheck; use super::*; @@ -794,6 +796,31 @@ mod test { ); } + #[test] + fn merge_history() { + let file = tempfile::NamedTempFile::new().unwrap(); + let mut undo = Cursor::new(Vec::new()); + let mut history_1 = History::default(); + let mut history_2 = History::default(); + + let state = State { + doc: Rope::new(), + selection: Selection::point(0), + }; + let tx = Transaction::change( + &Rope::new(), + [(0, 0, Some("Hello, world!".into()))].into_iter(), + ); + history_1.commit_revision(&tx, &state); + history_1.serialize(&mut undo, file.path(), 0, 0).unwrap(); + undo.seek(SeekFrom::Start(0)).unwrap(); + + let saved_history = History::deserialize(&mut undo, file.path()).unwrap().1; + history_2.merge(saved_history, 1).unwrap(); + + assert_eq!(history_1.revisions, history_2.revisions); + } + quickcheck!( fn serde_history(original: String, changes_a: Vec, changes_b: Vec) -> bool { fn create_changes(history: &mut History, doc: &mut Rope, changes: Vec) { @@ -812,7 +839,7 @@ mod test { let mut original = Rope::from(original); create_changes(&mut history, &mut original, changes_a); - let mut cursor = std::io::Cursor::new(Vec::new()); + let mut cursor = Cursor::new(Vec::new()); let file = tempfile::NamedTempFile::new().unwrap(); history.serialize(&mut cursor, file.path(), 0, 0).unwrap(); cursor.set_position(0); diff --git a/helix-view/src/document.rs b/helix-view/src/document.rs index 77175820c569..2c6f64c6c41a 100644 --- a/helix-view/src/document.rs +++ b/helix-view/src/document.rs @@ -1598,8 +1598,10 @@ mod test { .collect(); let file = tempfile::NamedTempFile::new().unwrap(); - let mut config = Config::default(); - config.persistent_undo = true; + let config = Config { + persistent_undo: true, + ..Default::default() + }; let view_id = ViewId::default(); let config = Arc::new(ArcSwap::new(Arc::new(config))); @@ -1613,7 +1615,7 @@ mod test { helix_lsp::block_on(doc_1.save::(None, true).unwrap()).unwrap(); let mut doc_2 = Document::open(file.path(), None, None, config.clone()).unwrap(); - let mut doc_3 = Document::open(file.path(), None, None, config.clone()).unwrap(); + let mut doc_3 = Document::open(file.path(), None, None, config).unwrap(); doc_2.ensure_view_init(view_id); doc_3.ensure_view_init(view_id); From cf81ab797fa33f7ed7629325f753cde463ba250e Mon Sep 17 00:00:00 2001 From: Shafkath Shuhan Date: Mon, 20 Feb 2023 10:45:32 -0500 Subject: [PATCH 30/35] reload histories if persistent_undo is dynamically enabled --- helix-core/src/history.rs | 25 +++++++++++++++++++++++-- helix-term/src/application.rs | 26 ++++++++++++++++++++++++++ helix-view/src/document.rs | 7 +++---- 3 files changed, 52 insertions(+), 6 deletions(-) diff --git a/helix-core/src/history.rs b/helix-core/src/history.rs index c8418a7f51da..93e96ff4ddd9 100644 --- a/helix-core/src/history.rs +++ b/helix-core/src/history.rs @@ -96,6 +96,7 @@ impl Default for History { impl Revision { fn serialize(&self, writer: &mut W) -> std::io::Result<()> { + // `timestamp` is ignored since `Instant`s can't be serialized. write_usize(writer, self.parent)?; self.transaction.serialize(writer)?; self.inversion.serialize(writer)?; @@ -117,8 +118,7 @@ impl Revision { } } -// Temporarily 3 for review. -const HEADER_TAG: &str = "Helix Undofile 4\n"; +const HEADER_TAG: &str = "Helix Undofile 1\n"; fn get_hash(reader: &mut R) -> std::io::Result<[u8; 20]> { const BUF_SIZE: usize = 8192; @@ -144,6 +144,7 @@ impl History { revision: usize, last_saved_revision: usize, ) -> std::io::Result<()> { + // Header let mtime = std::fs::metadata(path)? .modified()? .duration_since(std::time::UNIX_EPOCH) @@ -164,9 +165,14 @@ impl History { Ok(()) } + /// Returns the deserialized [`History`] and the last_saved_revision. pub fn deserialize(reader: &mut R, path: &Path) -> std::io::Result<(usize, Self)> { let (current, last_saved_revision) = Self::read_header(reader, path)?; + + // Since `timestamp` can't be serialized, a new timestamp is created. let timestamp = Instant::now(); + + // Read the revisions and construct the tree. let len = read_usize(reader)?; let mut revisions: Vec = Vec::with_capacity(len); for _ in 0..len { @@ -184,6 +190,16 @@ impl History { Ok((last_saved_revision, history)) } + /// If two histories originate from: `A -> B (B is head)` but have deviated since then such that + /// the first history is: `A -> B -> C -> D (D is head)` and the second one is: + /// `A -> B -> E -> F (F is head)`. + /// Then they are merged into + /// ``` + /// A -> B -> C -> D + /// \ + /// \ -> E -> F + /// ``` + /// and retain their revision heads. pub fn merge(&mut self, mut other: History, offset: usize) -> std::io::Result<()> { let revisions = self.revisions.split_off(offset); let len = other.revisions.len(); @@ -823,6 +839,7 @@ mod test { quickcheck!( fn serde_history(original: String, changes_a: Vec, changes_b: Vec) -> bool { + // Constructs a set of transactions and applies them to the history. fn create_changes(history: &mut History, doc: &mut Rope, changes: Vec) { for c in changes.into_iter().map(Rope::from) { let transaction = crate::diff::compare_ropes(doc, &c); @@ -843,6 +860,8 @@ mod test { let file = tempfile::NamedTempFile::new().unwrap(); history.serialize(&mut cursor, file.path(), 0, 0).unwrap(); cursor.set_position(0); + + // Check if the original and deserialized history match. let (_, res) = History::deserialize(&mut cursor, file.path()).unwrap(); assert_eq!(history, res); @@ -854,6 +873,8 @@ mod test { .serialize(&mut cursor, file.path(), 0, last_saved_revision) .unwrap(); cursor.set_position(0); + + // Check if they are the same after appending new changes. let (_, res) = History::deserialize(&mut cursor, file.path()).unwrap(); history == res } diff --git a/helix-term/src/application.rs b/helix-term/src/application.rs index 1c88cba3cd95..05fc91abc3e0 100644 --- a/helix-term/src/application.rs +++ b/helix-term/src/application.rs @@ -385,6 +385,21 @@ impl Application { // the Application can apply it. ConfigEvent::Update(editor_config) => { let mut app_config = (*self.config.load().clone()).clone(); + if self.config.load().editor.persistent_undo == false + && editor_config.persistent_undo == true + { + for doc in self.editor.documents_mut() { + // HAXX: Do this so all revisions in this doc are treated as new. + doc.set_last_saved_revision(0); + if let Err(e) = doc.load_history() { + log::error!( + "failed to reload history for {}: {e}", + doc.path().unwrap().to_string_lossy() + ); + return; + } + } + } app_config.editor = *editor_config; self.config.store(Arc::new(app_config)); } @@ -439,6 +454,17 @@ impl Application { let mut refresh_config = || -> Result<(), Error> { let default_config = Config::load_default() .map_err(|err| anyhow::anyhow!("Failed to load config: {}", err))?; + + // Merge histories of existing docs if persistent undo was enabled. + if self.config.load().editor.persistent_undo == false + && default_config.editor.persistent_undo == true + { + for doc in self.editor.documents_mut() { + // HAXX: Do this so all revisions in this doc are treated as new. + doc.set_last_saved_revision(0); + doc.load_history()?; + } + } self.refresh_language_config()?; self.refresh_theme(&default_config)?; // Store new config diff --git a/helix-view/src/document.rs b/helix-view/src/document.rs index 2c6f64c6c41a..db67103c7e77 100644 --- a/helix-view/src/document.rs +++ b/helix-view/src/document.rs @@ -1387,10 +1387,8 @@ impl Display for FormatterError { #[cfg(test)] mod test { - use arc_swap::{access::Map, ArcSwap}; - use quickcheck::Gen; - use super::*; + use arc_swap::ArcSwap; #[test] fn changeset_to_changes_ignore_line_endings() { @@ -1561,6 +1559,7 @@ mod test { #[tokio::test(flavor = "multi_thread")] async fn reload_history() { let test_fn: fn(Vec) -> bool = |changes| -> bool { + // Divide the vec into 3 sets of changes. let len = changes.len() / 3; let mut original = Rope::new(); let mut iter = changes.into_iter(); @@ -1623,7 +1622,6 @@ mod test { for c in changes_b { doc_1.apply(&c, view_id); } - for c in changes_c { doc_2.apply(&c, view_id); } @@ -1632,6 +1630,7 @@ mod test { doc_1.load_history().unwrap(); doc_3.load_history().unwrap(); + // doc_3 had no diverging edits, so they should be the same. assert_eq!(doc_2.history.get_mut(), doc_3.history.get_mut()); helix_lsp::block_on(doc_1.save::(None, true).unwrap()).unwrap(); From cea1fe55c1bd33deb4775d5d40a6160a15cb3ec4 Mon Sep 17 00:00:00 2001 From: Shafkath Shuhan Date: Mon, 20 Feb 2023 11:52:32 -0500 Subject: [PATCH 31/35] address clippy --- helix-core/src/history.rs | 2 +- helix-term/src/application.rs | 16 +++++++++------- 2 files changed, 10 insertions(+), 8 deletions(-) diff --git a/helix-core/src/history.rs b/helix-core/src/history.rs index 93e96ff4ddd9..0843ebbc1f8e 100644 --- a/helix-core/src/history.rs +++ b/helix-core/src/history.rs @@ -194,7 +194,7 @@ impl History { /// the first history is: `A -> B -> C -> D (D is head)` and the second one is: /// `A -> B -> E -> F (F is head)`. /// Then they are merged into - /// ``` + /// ```md /// A -> B -> C -> D /// \ /// \ -> E -> F diff --git a/helix-term/src/application.rs b/helix-term/src/application.rs index 05fc91abc3e0..16a051aee1ee 100644 --- a/helix-term/src/application.rs +++ b/helix-term/src/application.rs @@ -385,13 +385,13 @@ impl Application { // the Application can apply it. ConfigEvent::Update(editor_config) => { let mut app_config = (*self.config.load().clone()).clone(); - if self.config.load().editor.persistent_undo == false - && editor_config.persistent_undo == true - { + if !self.config.load().editor.persistent_undo && editor_config.persistent_undo { for doc in self.editor.documents_mut() { // HAXX: Do this so all revisions in this doc are treated as new. + let lsr = doc.get_last_saved_revision(); doc.set_last_saved_revision(0); if let Err(e) = doc.load_history() { + doc.set_last_saved_revision(lsr); log::error!( "failed to reload history for {}: {e}", doc.path().unwrap().to_string_lossy() @@ -456,13 +456,15 @@ impl Application { .map_err(|err| anyhow::anyhow!("Failed to load config: {}", err))?; // Merge histories of existing docs if persistent undo was enabled. - if self.config.load().editor.persistent_undo == false - && default_config.editor.persistent_undo == true - { + if !self.config.load().editor.persistent_undo && default_config.editor.persistent_undo { for doc in self.editor.documents_mut() { // HAXX: Do this so all revisions in this doc are treated as new. + let lsr = doc.get_last_saved_revision(); doc.set_last_saved_revision(0); - doc.load_history()?; + if let Err(e) = doc.load_history() { + doc.set_last_saved_revision(lsr); + return Err(e); + } } } self.refresh_language_config()?; From 5dd2ad019439e49032966b7e13d598a18c015df7 Mon Sep 17 00:00:00 2001 From: Shafkath Shuhan Date: Mon, 20 Feb 2023 12:00:19 -0500 Subject: [PATCH 32/35] report error if reload fails --- helix-view/src/document.rs | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/helix-view/src/document.rs b/helix-view/src/document.rs index db67103c7e77..821892fbeb63 100644 --- a/helix-view/src/document.rs +++ b/helix-view/src/document.rs @@ -731,7 +731,7 @@ impl Document { let mut file = std::fs::File::open(&path)?; let (rope, ..) = from_reader(&mut file, Some(encoding))?; - if let Err(e) = self.load_history() { + let e = self.load_history().map_err(|e| { log::error!("{}", e); // Calculate the difference between the buffer and source text, and apply it. // This is not considered a modification of the contents of the file regardless @@ -740,7 +740,8 @@ impl Document { self.apply(&transaction, view.id); self.append_changes_to_history(view); self.reset_modified(); - } + e + }); self.last_saved_time = SystemTime::now(); self.detect_indent_and_line_ending(); @@ -750,7 +751,7 @@ impl Document { None => self.diff_handle = None, } - Ok(()) + e } pub fn undo_file(&self, path: Option<&PathBuf>) -> anyhow::Result> { From e5ea16bcb1b7e134dba252ff27142c17c4115ecb Mon Sep 17 00:00:00 2001 From: Shafkath Shuhan Date: Sun, 26 Feb 2023 13:28:02 -0500 Subject: [PATCH 33/35] wip --- Cargo.lock | 19 ++++++++++ helix-core/src/history.rs | 41 +++++++++++++++++++--- helix-view/Cargo.toml | 1 + helix-view/src/document.rs | 71 ++++++++++++++++++++++++++++++++++++++ 4 files changed, 128 insertions(+), 4 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 3879c4d2d9db..ae59e803427f 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1280,6 +1280,7 @@ dependencies = [ "log", "once_cell", "quickcheck", + "rand", "serde", "serde_json", "slotmap", @@ -1679,6 +1680,12 @@ version = "0.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8b870d8c151b6f2fb93e84a13146138f05d02ed11c7e7c54f8826aaaf7c9f184" +[[package]] +name = "ppv-lite86" +version = "0.2.17" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5b40af805b3121feab8a3c29f04d8ad262fa8e0561883e7653e024ae4479e6de" + [[package]] name = "proc-macro2" version = "1.0.47" @@ -1741,6 +1748,18 @@ version = "0.8.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "34af8d1a0e25924bc5b7c43c079c942339d8f0a8b57c39049bef581b46327404" dependencies = [ + "libc", + "rand_chacha", + "rand_core", +] + +[[package]] +name = "rand_chacha" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e6c10a63a0fa32252be49d21e7709d4d4baf8d231c2dbce1eaa8141b9b127d88" +dependencies = [ + "ppv-lite86", "rand_core", ] diff --git a/helix-core/src/history.rs b/helix-core/src/history.rs index 0843ebbc1f8e..6cd8e6023e8b 100644 --- a/helix-core/src/history.rs +++ b/helix-core/src/history.rs @@ -177,7 +177,16 @@ impl History { let mut revisions: Vec = Vec::with_capacity(len); for _ in 0..len { let res = Revision::deserialize(reader, timestamp)?; - debug_assert!(res.parent <= revisions.len()); + if !revisions.is_empty() && res.parent >= revisions.len() { + return Err(std::io::Error::new( + std::io::ErrorKind::InvalidData, + format!( + "non-contiguous history: {} >= {}", + res.parent, + revisions.len() + ), + )); + } if !revisions.is_empty() { revisions.get_mut(res.parent).unwrap().last_child = @@ -196,19 +205,39 @@ impl History { /// Then they are merged into /// ```md /// A -> B -> C -> D - /// \ - /// \ -> E -> F + /// \ + /// E -> F /// ``` /// and retain their revision heads. pub fn merge(&mut self, mut other: History, offset: usize) -> std::io::Result<()> { + if !self + .revisions + .iter() + .zip(other.revisions.iter()) + .take(offset) + .all(|(a, b)| { + a.parent == b.parent && a.transaction == b.transaction && a.inversion == b.inversion + }) + { + return Err(std::io::Error::new( + std::io::ErrorKind::Other, + "unequal histories", + )); + } + let revisions = self.revisions.split_off(offset); let len = other.revisions.len(); + other.revisions.reserve_exact(revisions.len()); + for r in revisions { + // parent is 0-indexed, while offset is +1. let parent = if r.parent < offset { r.parent } else { len + (r.parent - offset) }; + debug_assert!(parent < other.revisions.len()); + other.revisions.get_mut(parent).unwrap().last_child = NonZeroUsize::new(other.revisions.len()); other.revisions.push(r); @@ -832,7 +861,11 @@ mod test { undo.seek(SeekFrom::Start(0)).unwrap(); let saved_history = History::deserialize(&mut undo, file.path()).unwrap().1; - history_2.merge(saved_history, 1).unwrap(); + let err = format!( + "{:#?} vs. {:#?}", + history_2.revisions, saved_history.revisions + ); + history_2.merge(saved_history, 1).expect(&err); assert_eq!(history_1.revisions, history_2.revisions); } diff --git a/helix-view/Cargo.toml b/helix-view/Cargo.toml index b22078eb5198..c7955a12dc95 100644 --- a/helix-view/Cargo.toml +++ b/helix-view/Cargo.toml @@ -54,4 +54,5 @@ libc = "0.2" [dev-dependencies] quickcheck = { version = "1", default-features = false } tempfile = "3" +rand = "0.8" helix-tui = { path = "../helix-tui" } diff --git a/helix-view/src/document.rs b/helix-view/src/document.rs index 821892fbeb63..13621666eb8f 100644 --- a/helix-view/src/document.rs +++ b/helix-view/src/document.rs @@ -1390,6 +1390,8 @@ impl Display for FormatterError { mod test { use super::*; use arc_swap::ArcSwap; + use rand::{rngs::SmallRng, Rng, SeedableRng}; + use tempfile::NamedTempFile; #[test] fn changeset_to_changes_ignore_line_endings() { @@ -1652,6 +1654,75 @@ mod test { futures_util::future::try_join_all(handles).await.unwrap(); } + // struct DocumentFuzz { + // rng: SmallRng, + // file: NamedTempFile, + // documents: Vec, + // } + + // impl DocumentFuzz { + // fn new() -> Self { + // let mut rng = SmallRng::from_entropy(); + // let file = NamedTempFile::new().unwrap(); + // let range = 0..rng.gen_range(0..100); + // let config = { + // let config = Config { + // persistent_undo: true, + // ..Default::default() + // }; + // Arc::new(ArcSwap::new(Arc::new(config))) + // }; + // let documents = range + // .map(|_| Document::open(file.path(), None, None, config.clone()).unwrap()) + // .collect(); + // Self { + // rng, + // file, + // documents, + // } + // } + + // async fn arbitrary(&mut self) { + // let view = ViewId::default(); + // for doc in &mut self.documents { + // if self.rng.gen() { + // doc.load_history().unwrap(); + // } + + // if self.rng.gen() { + // doc.save::(None, false).unwrap().await.unwrap(); + // } + + // if self.rng.gen() { + // Document::open(self.file.path(), None, None, doc.config.clone()).unwrap(); + // } + + // let tx = Transaction::change( + // doc.text(), + // (0..self.rng.gen_range(0..100)) + // .map(|_| (0, 0, Option::::None)), + // ); + // } + // } + // } + + struct DocumentFuzz { + rng: SmallRng, + documents: Vec, + } + + impl DocumentFuzz { + fn new() -> Self { + Self { + rng: SmallRng::from_entropy(), + documents: Vec::new(), + } + } + } + + #[tokio::test(flavor = "multi_thread")] + async fn fuzz_reload() {} + macro_rules! decode { ($name:ident, $label:expr, $label_override:expr) => { #[test] From c2e54ded9c87b62acc46532ca37b983a774d4c19 Mon Sep 17 00:00:00 2001 From: Shafkath Shuhan Date: Sun, 26 Feb 2023 13:33:19 -0500 Subject: [PATCH 34/35] wip --- helix-view/src/document.rs | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/helix-view/src/document.rs b/helix-view/src/document.rs index 13621666eb8f..aa3eb3b8c228 100644 --- a/helix-view/src/document.rs +++ b/helix-view/src/document.rs @@ -1617,7 +1617,7 @@ mod test { helix_lsp::block_on(doc_1.save::(None, true).unwrap()).unwrap(); let mut doc_2 = Document::open(file.path(), None, None, config.clone()).unwrap(); - let mut doc_3 = Document::open(file.path(), None, None, config).unwrap(); + let mut doc_3 = Document::open(file.path(), None, None, config.clone()).unwrap(); doc_2.ensure_view_init(view_id); doc_3.ensure_view_init(view_id); @@ -1639,6 +1639,8 @@ mod test { helix_lsp::block_on(doc_1.save::(None, true).unwrap()).unwrap(); doc_2.load_history().unwrap(); doc_3.load_history().unwrap(); + + let _ = Document::open(file.path(), None, None, config).unwrap(); doc_1.history.get_mut() == doc_2.history.get_mut() && doc_1.history.get_mut() == doc_3.history.get_mut() }; From 0d36a789fa87bb1072bc8c1777fff253357c1958 Mon Sep 17 00:00:00 2001 From: Shafkath Shuhan Date: Sat, 4 Mar 2023 20:29:19 -0500 Subject: [PATCH 35/35] wip --- Cargo.lock | 1 + helix-core/Cargo.toml | 1 + helix-core/src/history.rs | 64 ++++++++++++--------- helix-view/src/document.rs | 115 ++++++++++++------------------------- 4 files changed, 75 insertions(+), 106 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index ae59e803427f..51e3aa82ab34 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1116,6 +1116,7 @@ name = "helix-core" version = "0.6.0" dependencies = [ "ahash 0.8.3", + "anyhow", "arc-swap", "bitflags", "chrono", diff --git a/helix-core/Cargo.toml b/helix-core/Cargo.toml index 8465f1e75036..daff50f981d0 100644 --- a/helix-core/Cargo.toml +++ b/helix-core/Cargo.toml @@ -17,6 +17,7 @@ integration = ["helix-loader/integration"] [dependencies] helix-loader = { version = "0.6", path = "../helix-loader" } +anyhow = "1.0" ropey = { version = "1.6.0", default-features = false, features = ["simd"] } smallvec = "1.10" smartstring = "1.0.1" diff --git a/helix-core/src/history.rs b/helix-core/src/history.rs index 6cd8e6023e8b..a1d51ec086c3 100644 --- a/helix-core/src/history.rs +++ b/helix-core/src/history.rs @@ -136,6 +136,27 @@ fn get_hash(reader: &mut R) -> std::io::Result<[u8; 20]> { Ok(hash.digest().bytes()) } +#[derive(Debug)] +pub enum StateError { + Outdated, + InvalidHeader, + InvalidOffset, + InvalidData(String), +} + +impl std::fmt::Display for StateError { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + match self { + Self::Outdated => f.write_str("Outdated file"), + Self::InvalidHeader => f.write_str("Invalid undofile header"), + Self::InvalidOffset => f.write_str("Invalid merge offset"), + Self::InvalidData(msg) => f.write_str(msg), + } + } +} + +impl std::error::Error for StateError {} + impl History { pub fn serialize( &self, @@ -166,7 +187,7 @@ impl History { } /// Returns the deserialized [`History`] and the last_saved_revision. - pub fn deserialize(reader: &mut R, path: &Path) -> std::io::Result<(usize, Self)> { + pub fn deserialize(reader: &mut R, path: &Path) -> anyhow::Result<(usize, Self)> { let (current, last_saved_revision) = Self::read_header(reader, path)?; // Since `timestamp` can't be serialized, a new timestamp is created. @@ -177,20 +198,16 @@ impl History { let mut revisions: Vec = Vec::with_capacity(len); for _ in 0..len { let res = Revision::deserialize(reader, timestamp)?; - if !revisions.is_empty() && res.parent >= revisions.len() { - return Err(std::io::Error::new( - std::io::ErrorKind::InvalidData, - format!( + let len = revisions.len(); + match revisions.get_mut(res.parent) { + Some(r) => r.last_child = NonZeroUsize::new(len), + None if len != 0 => { + anyhow::bail!(StateError::InvalidData(format!( "non-contiguous history: {} >= {}", - res.parent, - revisions.len() - ), - )); - } - - if !revisions.is_empty() { - revisions.get_mut(res.parent).unwrap().last_child = - NonZeroUsize::new(revisions.len()); + res.parent, len + ))); + } + None => {} } revisions.push(res); } @@ -209,7 +226,7 @@ impl History { /// E -> F /// ``` /// and retain their revision heads. - pub fn merge(&mut self, mut other: History, offset: usize) -> std::io::Result<()> { + pub fn merge(&mut self, mut other: History, offset: usize) -> anyhow::Result<()> { if !self .revisions .iter() @@ -219,10 +236,7 @@ impl History { a.parent == b.parent && a.transaction == b.transaction && a.inversion == b.inversion }) { - return Err(std::io::Error::new( - std::io::ErrorKind::Other, - "unequal histories", - )); + anyhow::bail!(StateError::InvalidOffset); } let revisions = self.revisions.split_off(offset); @@ -246,13 +260,10 @@ impl History { Ok(()) } - pub fn read_header(reader: &mut R, path: &Path) -> std::io::Result<(usize, usize)> { + pub fn read_header(reader: &mut R, path: &Path) -> anyhow::Result<(usize, usize)> { let header = read_string(reader)?; if HEADER_TAG != header { - Err(std::io::Error::new( - std::io::ErrorKind::Other, - "missing undofile header", - )) + Err(anyhow::anyhow!(StateError::InvalidHeader)) } else { let current = read_usize(reader)?; let last_saved_revision = read_usize(reader)?; @@ -266,10 +277,7 @@ impl History { reader.read_exact(&mut hash)?; if mtime != last_mtime && hash != get_hash(&mut std::fs::File::open(path)?)? { - return Err(std::io::Error::new( - std::io::ErrorKind::Other, - "outdated undo file", - )); + anyhow::bail!(StateError::Outdated); } Ok((current, last_saved_revision)) } diff --git a/helix-view/src/document.rs b/helix-view/src/document.rs index aa3eb3b8c228..35bbc918c8bb 100644 --- a/helix-view/src/document.rs +++ b/helix-view/src/document.rs @@ -637,17 +637,19 @@ impl Document { .await; tokio::task::spawn_blocking(move || -> anyhow::Result<()> { // Truncate the file if it's not a valid undofile. - if History::deserialize(&mut std::fs::File::open(&undofile_path)?, &path) - .is_ok() + let offset = if History::deserialize( + &mut std::fs::File::open(&undofile_path)?, + &path, + ) + .is_ok() { + log::info!("Overwriting undofile for {}", path.to_string_lossy()); undofile.set_len(0)?; - } - history.serialize( - &mut undofile, - &path, - current_rev, - last_saved_revision, - )?; + 0 + } else { + last_saved_revision + }; + history.serialize(&mut undofile, &path, current_rev, offset)?; Ok(()) }) .await @@ -1390,8 +1392,6 @@ impl Display for FormatterError { mod test { use super::*; use arc_swap::ArcSwap; - use rand::{rngs::SmallRng, Rng, SeedableRng}; - use tempfile::NamedTempFile; #[test] fn changeset_to_changes_ignore_line_endings() { @@ -1656,74 +1656,33 @@ mod test { futures_util::future::try_join_all(handles).await.unwrap(); } - // struct DocumentFuzz { - // rng: SmallRng, - // file: NamedTempFile, - // documents: Vec, - // } - - // impl DocumentFuzz { - // fn new() -> Self { - // let mut rng = SmallRng::from_entropy(); - // let file = NamedTempFile::new().unwrap(); - // let range = 0..rng.gen_range(0..100); - // let config = { - // let config = Config { - // persistent_undo: true, - // ..Default::default() - // }; - // Arc::new(ArcSwap::new(Arc::new(config))) - // }; - // let documents = range - // .map(|_| Document::open(file.path(), None, None, config.clone()).unwrap()) - // .collect(); - // Self { - // rng, - // file, - // documents, - // } - // } - - // async fn arbitrary(&mut self) { - // let view = ViewId::default(); - // for doc in &mut self.documents { - // if self.rng.gen() { - // doc.load_history().unwrap(); - // } - - // if self.rng.gen() { - // doc.save::(None, false).unwrap().await.unwrap(); - // } - - // if self.rng.gen() { - // Document::open(self.file.path(), None, None, doc.config.clone()).unwrap(); - // } - - // let tx = Transaction::change( - // doc.text(), - // (0..self.rng.gen_range(0..100)) - // .map(|_| (0, 0, Option::::None)), - // ); - // } - // } - // } - - struct DocumentFuzz { - rng: SmallRng, - documents: Vec, - } - - impl DocumentFuzz { - fn new() -> Self { - Self { - rng: SmallRng::from_entropy(), - documents: Vec::new(), - } - } - } - #[tokio::test(flavor = "multi_thread")] - async fn fuzz_reload() {} + async fn save_history() { + let file = tempfile::NamedTempFile::new().unwrap(); + let config = Config { + persistent_undo: true, + ..Default::default() + }; + + let view_id = ViewId::default(); + let config = Arc::new(ArcSwap::new(Arc::new(config))); + let mut doc = Document::open(file.path(), None, None, config.clone()).unwrap(); + + let tx = Transaction::change(&Rope::new(), [(0, 0, None)].into_iter()); + doc.apply(&tx, view_id); + doc.save::(None, false).unwrap().await.unwrap(); + + // Wipe undo file + tokio::fs::File::create(doc.undo_file(None).unwrap().unwrap()) + .await + .unwrap(); + + // Write it again. + doc.save::(None, false).unwrap().await.unwrap(); + + // Will load history. + Document::open(file.path(), None, None, config.clone()).unwrap(); + } macro_rules! decode { ($name:ident, $label:expr, $label_override:expr) => {