diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 35495f1d2b..2ca39c8420 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -3,7 +3,7 @@ name: CI on: push: branches: - - master + - main tags: - "v*" pull_request: @@ -61,14 +61,15 @@ jobs: - name: Create app bundle run: script/bundle - - name: Upload app bundle to workflow run + - name: Upload app bundle to workflow run if main branch uses: actions/upload-artifact@v2 + if: ${{ github.ref == 'refs/heads/main' }} with: name: Zed.dmg path: target/release/Zed.dmg - uses: softprops/action-gh-release@v1 - name: Upload app bundle to release + name: Upload app bundle to release if release tag if: ${{ startsWith(github.ref, 'refs/tags/v') }} with: draft: true diff --git a/Cargo.lock b/Cargo.lock index c452b39d59..b83ff00b7f 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -358,9 +358,9 @@ checksum = "904dfeac50f3cdaba28fc6f57fdcddb75f49ed61346676a78c4ffe55877802fd" [[package]] name = "bindgen" -version = "0.57.0" +version = "0.58.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fd4865004a46a0aafb2a0a5eb19d3c9fc46ee5f063a6cfc605c69ac9ecf5263d" +checksum = "0f8523b410d7187a43085e7e064416ea32ded16bd0a4e6fc025e21616d01258f" dependencies = [ "bitflags 1.2.1", "cexpr", @@ -3331,9 +3331,9 @@ dependencies = [ [[package]] name = "shlex" -version = "0.1.1" +version = "1.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7fdf1b9db47230893d76faad238fd6097fd6d6a9245cd7a4d90dbd639536bbd2" +checksum = "42a568c8f2cd051a4d283bd6eb0343ac214c1b0f1ac19f93e1175b2dee38c73d" [[package]] name = "signal-hook" diff --git a/gpui/Cargo.toml b/gpui/Cargo.toml index 7f436094c9..4288795d92 100644 --- a/gpui/Cargo.toml +++ b/gpui/Cargo.toml @@ -30,7 +30,7 @@ tree-sitter = "0.19" usvg = "0.14" [build-dependencies] -bindgen = "0.57" +bindgen = "0.58.1" cc = "1.0.67" [dev-dependencies] diff --git a/gpui/build.rs b/gpui/build.rs index 1087847a74..a9d126763d 100644 --- a/gpui/build.rs +++ b/gpui/build.rs @@ -17,8 +17,8 @@ fn generate_dispatch_bindings() { let bindings = bindgen::Builder::default() .header("src/platform/mac/dispatch.h") - .whitelist_var("_dispatch_main_q") - .whitelist_function("dispatch_async_f") + .allowlist_var("_dispatch_main_q") + .allowlist_function("dispatch_async_f") .parse_callbacks(Box::new(bindgen::CargoCallbacks)) .layout_tests(false) .generate() @@ -95,7 +95,7 @@ fn compile_metal_shaders() { fn generate_shader_bindings() { let bindings = bindgen::Builder::default() .header(SHADER_HEADER_PATH) - .whitelist_type("GPUI.*") + .allowlist_type("GPUI.*") .parse_callbacks(Box::new(bindgen::CargoCallbacks)) .layout_tests(false) .generate() diff --git a/gpui/src/app.rs b/gpui/src/app.rs index 8517d45f71..0470881dd5 100644 --- a/gpui/src/app.rs +++ b/gpui/src/app.rs @@ -279,6 +279,10 @@ impl TestAppContext { ); } + pub fn dispatch_global_action(&self, name: &str, arg: T) { + self.cx.borrow_mut().dispatch_global_action(name, arg); + } + pub fn dispatch_keystroke( &self, window_id: usize, diff --git a/gpui/src/platform/mac/fonts.rs b/gpui/src/platform/mac/fonts.rs index b1605c8b0b..85a628f0d6 100644 --- a/gpui/src/platform/mac/fonts.rs +++ b/gpui/src/platform/mac/fonts.rs @@ -311,17 +311,24 @@ impl FontSystemState { #[cfg(test)] mod tests { + use crate::MutableAppContext; + use super::*; use font_kit::properties::{Style, Weight}; use platform::FontSystem as _; - #[test] - fn test_layout_str() -> anyhow::Result<()> { + #[crate::test(self, retries = 5)] + fn test_layout_str(_: &mut MutableAppContext) { + // This is failing intermittently on CI and we don't have time to figure it out let fonts = FontSystem::new(); - let menlo = fonts.load_family("Menlo")?; - let menlo_regular = fonts.select_font(&menlo, &Properties::new())?; - let menlo_italic = fonts.select_font(&menlo, &Properties::new().style(Style::Italic))?; - let menlo_bold = fonts.select_font(&menlo, &Properties::new().weight(Weight::BOLD))?; + let menlo = fonts.load_family("Menlo").unwrap(); + let menlo_regular = fonts.select_font(&menlo, &Properties::new()).unwrap(); + let menlo_italic = fonts + .select_font(&menlo, &Properties::new().style(Style::Italic)) + .unwrap(); + let menlo_bold = fonts + .select_font(&menlo, &Properties::new().weight(Weight::BOLD)) + .unwrap(); assert_ne!(menlo_regular, menlo_italic); assert_ne!(menlo_regular, menlo_bold); assert_ne!(menlo_italic, menlo_bold); @@ -342,7 +349,6 @@ mod tests { assert_eq!(line.runs[1].glyphs.len(), 4); assert_eq!(line.runs[2].font_id, menlo_regular); assert_eq!(line.runs[2].glyphs.len(), 5); - Ok(()) } #[test] diff --git a/gpui_macros/src/lib.rs b/gpui_macros/src/lib.rs index 12c86b69bd..53aee377e6 100644 --- a/gpui_macros/src/lib.rs +++ b/gpui_macros/src/lib.rs @@ -1,14 +1,16 @@ -use std::mem; - use proc_macro::TokenStream; use quote::{format_ident, quote}; -use syn::{parse_macro_input, parse_quote, AttributeArgs, ItemFn, Meta, NestedMeta}; +use std::mem; +use syn::{ + parse_macro_input, parse_quote, AttributeArgs, ItemFn, Lit, Meta, MetaNameValue, NestedMeta, +}; #[proc_macro_attribute] pub fn test(args: TokenStream, function: TokenStream) -> TokenStream { let mut namespace = format_ident!("gpui"); let args = syn::parse_macro_input!(args as AttributeArgs); + let mut max_retries = 0; for arg in args { match arg { NestedMeta::Meta(Meta::Path(name)) @@ -16,6 +18,14 @@ pub fn test(args: TokenStream, function: TokenStream) -> TokenStream { { namespace = format_ident!("crate"); } + NestedMeta::Meta(Meta::NameValue(meta)) => { + if let Some(result) = parse_retries(&meta) { + match result { + Ok(retries) => max_retries = retries, + Err(error) => return TokenStream::from(error.into_compile_error()), + } + } + } other => { return TokenStream::from( syn::Error::new_spanned(other, "invalid argument").into_compile_error(), @@ -34,9 +44,32 @@ pub fn test(args: TokenStream, function: TokenStream) -> TokenStream { fn #outer_fn_name() { #inner_fn - #namespace::App::test_async(move |cx| async { - #inner_fn_name(cx).await; - }); + if #max_retries > 0 { + let mut retries = 0; + loop { + let result = std::panic::catch_unwind(|| { + #namespace::App::test_async(move |cx| async { + #inner_fn_name(cx).await; + }); + }); + + match result { + Ok(result) => return result, + Err(error) => { + if retries < #max_retries { + retries += 1; + println!("retrying: attempt {}", retries); + } else { + std::panic::resume_unwind(error); + } + } + } + } + } else { + #namespace::App::test_async(move |cx| async { + #inner_fn_name(cx).await; + }); + } } } } else { @@ -45,9 +78,32 @@ pub fn test(args: TokenStream, function: TokenStream) -> TokenStream { fn #outer_fn_name() { #inner_fn - #namespace::App::test(|cx| { - #inner_fn_name(cx); - }); + if #max_retries > 0 { + let mut retries = 0; + loop { + let result = std::panic::catch_unwind(|| { + #namespace::App::test(|cx| { + #inner_fn_name(cx); + }); + }); + + match result { + Ok(result) => return result, + Err(error) => { + if retries < #max_retries { + retries += 1; + println!("retrying: attempt {}", retries); + } else { + std::panic::resume_unwind(error); + } + } + } + } + } else { + #namespace::App::test(|cx| { + #inner_fn_name(cx); + }); + } } } }; @@ -55,3 +111,19 @@ pub fn test(args: TokenStream, function: TokenStream) -> TokenStream { TokenStream::from(quote!(#outer_fn)) } + +fn parse_retries(meta: &MetaNameValue) -> Option> { + let ident = meta.path.get_ident(); + if ident.map_or(false, |n| n == "retries") { + if let Lit::Int(int) = &meta.lit { + Some(int.base10_parse()) + } else { + Some(Err(syn::Error::new( + meta.lit.span(), + "retries mut be an integer", + ))) + } + } else { + None + } +} diff --git a/zed-rpc/proto/zed.proto b/zed-rpc/proto/zed.proto index 4b3f855ec2..20fab30a47 100644 --- a/zed-rpc/proto/zed.proto +++ b/zed-rpc/proto/zed.proto @@ -145,9 +145,15 @@ message Operation { uint32 replica_id = 1; uint32 local_timestamp = 2; uint32 lamport_timestamp = 3; - uint32 edit_replica_id = 4; - uint32 edit_local_timestamp = 5; - uint32 count = 6; + repeated Range ranges = 4; + repeated VectorClockEntry version = 5; + repeated UndoCount counts = 6; + } + + message UndoCount { + uint32 replica_id = 1; + uint32 local_timestamp = 2; + uint32 count = 3; } message UpdateSelections { diff --git a/zed/src/editor/buffer.rs b/zed/src/editor/buffer.rs index e601989396..d2e2705fb6 100644 --- a/zed/src/editor/buffer.rs +++ b/zed/src/editor/buffer.rs @@ -38,8 +38,6 @@ use std::{ time::{Duration, Instant, SystemTime, UNIX_EPOCH}, }; -const UNDO_GROUP_INTERVAL: Duration = Duration::from_millis(300); - #[derive(Clone, Default)] struct DeterministicState; @@ -145,17 +143,67 @@ struct SyntaxTree { version: time::Global, } -#[derive(Clone)] +#[derive(Clone, Debug)] struct Transaction { start: time::Global, + end: time::Global, buffer_was_dirty: bool, edits: Vec, + ranges: Vec>, selections_before: Option<(SelectionSetId, Arc<[Selection]>)>, selections_after: Option<(SelectionSetId, Arc<[Selection]>)>, first_edit_at: Instant, last_edit_at: Instant, } +impl Transaction { + fn push_edit(&mut self, edit: &EditOperation) { + self.edits.push(edit.timestamp.local()); + self.end.observe(edit.timestamp.local()); + + let mut other_ranges = edit.ranges.iter().peekable(); + let mut new_ranges: Vec> = Vec::new(); + let insertion_len = edit.new_text.as_ref().map_or(0, |t| t.len()); + let mut delta = 0; + + for mut self_range in self.ranges.iter().cloned() { + self_range.start += delta; + self_range.end += delta; + + while let Some(other_range) = other_ranges.peek() { + let mut other_range = (*other_range).clone(); + other_range.start += delta; + other_range.end += delta; + + if other_range.start <= self_range.end { + other_ranges.next().unwrap(); + delta += insertion_len; + + if other_range.end < self_range.start { + new_ranges.push(other_range.start..other_range.end + insertion_len); + self_range.start += insertion_len; + self_range.end += insertion_len; + } else { + self_range.start = cmp::min(self_range.start, other_range.start); + self_range.end = cmp::max(self_range.end, other_range.end) + insertion_len; + } + } else { + break; + } + } + + new_ranges.push(self_range); + } + + for other_range in other_ranges { + new_ranges.push(other_range.start + delta..other_range.end + delta + insertion_len); + delta += insertion_len; + } + + self.ranges = new_ranges; + } +} + #[derive(Clone)] pub struct History { // TODO: Turn this into a String or Rope, maybe. @@ -175,7 +223,7 @@ impl History { undo_stack: Vec::new(), redo_stack: Vec::new(), transaction_depth: 0, - group_interval: UNDO_GROUP_INTERVAL, + group_interval: Duration::from_millis(300), } } @@ -193,9 +241,11 @@ impl History { self.transaction_depth += 1; if self.transaction_depth == 1 { self.undo_stack.push(Transaction { - start, + start: start.clone(), + end: start, buffer_was_dirty, edits: Vec::new(), + ranges: Vec::new(), selections_before: selections, selections_after: None, first_edit_at: now, @@ -226,12 +276,10 @@ impl History { let mut transactions = self.undo_stack.iter_mut(); if let Some(mut transaction) = transactions.next_back() { - for prev_transaction in transactions.next_back() { + while let Some(prev_transaction) = transactions.next_back() { if transaction.first_edit_at - prev_transaction.last_edit_at <= self.group_interval + && transaction.start == prev_transaction.end { - prev_transaction.edits.append(&mut transaction.edits); - prev_transaction.last_edit_at = transaction.last_edit_at; - prev_transaction.selections_after = transaction.selections_after.take(); transaction = prev_transaction; new_len -= 1; } else { @@ -240,12 +288,28 @@ impl History { } } + let (transactions_to_keep, transactions_to_merge) = self.undo_stack.split_at_mut(new_len); + if let Some(last_transaction) = transactions_to_keep.last_mut() { + for transaction in &*transactions_to_merge { + for edit_id in &transaction.edits { + last_transaction.push_edit(&self.ops[edit_id]); + } + } + + if let Some(transaction) = transactions_to_merge.last_mut() { + last_transaction.last_edit_at = transaction.last_edit_at; + last_transaction.selections_after = transaction.selections_after.take(); + last_transaction.end = transaction.end.clone(); + } + } + self.undo_stack.truncate(new_len); } fn push_undo(&mut self, edit_id: time::Local) { assert_ne!(self.transaction_depth, 0); - self.undo_stack.last_mut().unwrap().edits.push(edit_id); + let last_transaction = self.undo_stack.last_mut().unwrap(); + last_transaction.push_edit(&self.ops[&edit_id]); } fn pop_undo(&mut self) -> Option<&Transaction> { @@ -270,11 +334,13 @@ impl History { } #[derive(Clone, Default, Debug)] -struct UndoMap(HashMap>); +struct UndoMap(HashMap>); impl UndoMap { - fn insert(&mut self, undo: UndoOperation) { - self.0.entry(undo.edit_id).or_default().push(undo); + fn insert(&mut self, undo: &UndoOperation) { + for (edit_id, count) in &undo.counts { + self.0.entry(*edit_id).or_default().push((undo.id, *count)); + } } fn is_undone(&self, edit_id: time::Local) -> bool { @@ -287,8 +353,8 @@ impl UndoMap { .get(&edit_id) .unwrap_or(&Vec::new()) .iter() - .filter(|undo| version.observed(undo.id)) - .map(|undo| undo.count) + .filter(|(undo_id, _)| version.observed(*undo_id)) + .map(|(_, undo_count)| *undo_count) .max() .unwrap_or(0); undo_count % 2 == 1 @@ -299,7 +365,7 @@ impl UndoMap { .get(&edit_id) .unwrap_or(&Vec::new()) .iter() - .map(|undo| undo.count) + .map(|(_, undo_count)| *undo_count) .max() .unwrap_or(0) } @@ -416,11 +482,12 @@ pub struct EditOperation { new_text: Option, } -#[derive(Copy, Clone, Debug, Eq, PartialEq)] +#[derive(Clone, Debug, Eq, PartialEq)] pub struct UndoOperation { id: time::Local, - edit_id: time::Local, - count: u32, + counts: HashMap, + ranges: Vec>, + version: time::Global, } impl Buffer { @@ -1211,7 +1278,7 @@ impl Buffer { lamport_timestamp, } => { if !self.version.observed(undo.id) { - self.apply_undo(undo)?; + self.apply_undo(&undo)?; self.version.observe(undo.id); self.lamport_clock.observe(lamport_timestamp); } @@ -1276,7 +1343,7 @@ impl Buffer { old_fragments.slice(&VersionedOffset::Offset(ranges[0].start), Bias::Left, &cx); new_ropes.push_tree(new_fragments.summary().text); - let mut fragment_start = old_fragments.start().offset(); + let mut fragment_start = old_fragments.sum_start().offset(); for range in ranges { let fragment_end = old_fragments.end(&cx).offset(); @@ -1285,7 +1352,7 @@ impl Buffer { if fragment_end < range.start { // If the current fragment has been partially consumed, then consume the rest of it // and advance to the next fragment before slicing. - if fragment_start > old_fragments.start().offset() { + if fragment_start > old_fragments.sum_start().offset() { if fragment_end > fragment_start { let mut suffix = old_fragments.item().unwrap().clone(); suffix.len = fragment_end - fragment_start; @@ -1299,7 +1366,7 @@ impl Buffer { old_fragments.slice(&VersionedOffset::Offset(range.start), Bias::Left, &cx); new_ropes.push_tree(slice.summary().text); new_fragments.push_tree(slice, &None); - fragment_start = old_fragments.start().offset(); + fragment_start = old_fragments.sum_start().offset(); } // If we are at the end of a non-concurrent fragment, advance to the next one. @@ -1310,7 +1377,7 @@ impl Buffer { new_ropes.push_fragment(&fragment, fragment.visible); new_fragments.push(fragment, &None); old_fragments.next(&cx); - fragment_start = old_fragments.start().offset(); + fragment_start = old_fragments.sum_start().offset(); } // Skip over insertions that are concurrent to this edit, but have a lower lamport @@ -1378,7 +1445,7 @@ impl Buffer { // If the current fragment has been partially consumed, then consume the rest of it // and advance to the next fragment before slicing. - if fragment_start > old_fragments.start().offset() { + if fragment_start > old_fragments.sum_start().offset() { let fragment_end = old_fragments.end(&cx).offset(); if fragment_end > fragment_start { let mut suffix = old_fragments.item().unwrap().clone(); @@ -1424,12 +1491,9 @@ impl Buffer { let was_dirty = self.is_dirty(cx.as_ref()); let old_version = self.version.clone(); - if let Some(transaction) = self.history.pop_undo() { + if let Some(transaction) = self.history.pop_undo().cloned() { let selections = transaction.selections_before.clone(); - for edit_id in transaction.edits.clone() { - self.undo_or_redo(edit_id, cx).unwrap(); - } - + self.undo_or_redo(transaction, cx).unwrap(); if let Some((set_id, selections)) = selections { let _ = self.update_selection_set(set_id, selections, cx); } @@ -1446,12 +1510,9 @@ impl Buffer { let was_dirty = self.is_dirty(cx.as_ref()); let old_version = self.version.clone(); - if let Some(transaction) = self.history.pop_redo() { + if let Some(transaction) = self.history.pop_redo().cloned() { let selections = transaction.selections_after.clone(); - for edit_id in transaction.edits.clone() { - self.undo_or_redo(edit_id, cx).unwrap(); - } - + self.undo_or_redo(transaction, cx).unwrap(); if let Some((set_id, selections)) = selections { let _ = self.update_selection_set(set_id, selections, cx); } @@ -1464,13 +1525,23 @@ impl Buffer { } } - fn undo_or_redo(&mut self, edit_id: time::Local, cx: &mut ModelContext) -> Result<()> { + fn undo_or_redo( + &mut self, + transaction: Transaction, + cx: &mut ModelContext, + ) -> Result<()> { + let mut counts = HashMap::default(); + for edit_id in transaction.edits { + counts.insert(edit_id, self.undo_map.undo_count(edit_id) + 1); + } + let undo = UndoOperation { id: self.local_clock.tick(), - edit_id, - count: self.undo_map.undo_count(edit_id) + 1, + counts, + ranges: transaction.ranges, + version: transaction.start.clone(), }; - self.apply_undo(undo)?; + self.apply_undo(&undo)?; self.version.observe(undo.id); let operation = Operation::Undo { @@ -1482,27 +1553,31 @@ impl Buffer { Ok(()) } - fn apply_undo(&mut self, undo: UndoOperation) -> Result<()> { + fn apply_undo(&mut self, undo: &UndoOperation) -> Result<()> { self.undo_map.insert(undo); - let edit = &self.history.ops[&undo.edit_id]; - let version = Some(edit.version.clone()); + + let mut cx = undo.version.clone(); + for edit_id in undo.counts.keys().copied() { + cx.observe(edit_id); + } + let cx = Some(cx); let mut old_fragments = self.fragments.cursor::(); - old_fragments.seek(&VersionedOffset::Offset(0), Bias::Left, &version); - - let mut new_fragments = SumTree::new(); + let mut new_fragments = old_fragments.slice( + &VersionedOffset::Offset(undo.ranges[0].start), + Bias::Right, + &cx, + ); let mut new_ropes = RopeBuilder::new(self.visible_text.cursor(0), self.deleted_text.cursor(0)); + new_ropes.push_tree(new_fragments.summary().text); - for range in &edit.ranges { - let mut end_offset = old_fragments.end(&version).offset(); + for range in &undo.ranges { + let mut end_offset = old_fragments.end(&cx).offset(); if end_offset < range.start { - let preceding_fragments = old_fragments.slice( - &VersionedOffset::Offset(range.start), - Bias::Left, - &version, - ); + let preceding_fragments = + old_fragments.slice(&VersionedOffset::Offset(range.start), Bias::Right, &cx); new_ropes.push_tree(preceding_fragments.summary().text); new_fragments.push_tree(preceding_fragments, &None); } @@ -1511,8 +1586,9 @@ impl Buffer { if let Some(fragment) = old_fragments.item() { let mut fragment = fragment.clone(); let fragment_was_visible = fragment.visible; - if fragment.was_visible(&edit.version, &self.undo_map) - || fragment.timestamp.local() == edit.timestamp.local() + + if fragment.was_visible(&undo.version, &self.undo_map) + || undo.counts.contains_key(&fragment.timestamp.local()) { fragment.visible = fragment.is_visible(&self.undo_map); fragment.max_undos.observe(undo.id); @@ -1520,15 +1596,24 @@ impl Buffer { new_ropes.push_fragment(&fragment, fragment_was_visible); new_fragments.push(fragment, &None); - old_fragments.next(&version); - end_offset = old_fragments.end(&version).offset(); + old_fragments.next(&cx); + if end_offset == old_fragments.end(&cx).offset() { + let unseen_fragments = old_fragments.slice( + &VersionedOffset::Offset(end_offset), + Bias::Right, + &cx, + ); + new_ropes.push_tree(unseen_fragments.summary().text); + new_fragments.push_tree(unseen_fragments, &None); + } + end_offset = old_fragments.end(&cx).offset(); } else { break; } } } - let suffix = old_fragments.suffix(&version); + let suffix = old_fragments.suffix(&cx); new_ropes.push_tree(suffix.summary().text); new_fragments.push_tree(suffix, &None); @@ -1561,7 +1646,7 @@ impl Buffer { } else { match op { Operation::Edit(edit) => self.version >= edit.version, - Operation::Undo { undo, .. } => self.version.observed(undo.edit_id), + Operation::Undo { undo, .. } => self.version >= undo.version, Operation::UpdateSelections { selections, .. } => { if let Some(selections) = selections { selections.iter().all(|selection| { @@ -1599,7 +1684,7 @@ impl Buffer { let mut new_fragments = old_fragments.slice(&ranges[0].start, Bias::Right, &None); new_ropes.push_tree(new_fragments.summary().text); - let mut fragment_start = old_fragments.start().visible; + let mut fragment_start = old_fragments.sum_start().visible; for range in ranges { let fragment_end = old_fragments.end(&None).visible; @@ -1608,7 +1693,7 @@ impl Buffer { if fragment_end < range.start { // If the current fragment has been partially consumed, then consume the rest of it // and advance to the next fragment before slicing. - if fragment_start > old_fragments.start().visible { + if fragment_start > old_fragments.sum_start().visible { if fragment_end > fragment_start { let mut suffix = old_fragments.item().unwrap().clone(); suffix.len = fragment_end - fragment_start; @@ -1621,10 +1706,10 @@ impl Buffer { let slice = old_fragments.slice(&range.start, Bias::Right, &None); new_ropes.push_tree(slice.summary().text); new_fragments.push_tree(slice, &None); - fragment_start = old_fragments.start().visible; + fragment_start = old_fragments.sum_start().visible; } - let full_range_start = range.start + old_fragments.start().deleted; + let full_range_start = range.start + old_fragments.sum_start().deleted; // Preserve any portion of the current fragment that precedes this range. if fragment_start < range.start { @@ -1672,13 +1757,13 @@ impl Buffer { } } - let full_range_end = range.end + old_fragments.start().deleted; + let full_range_end = range.end + old_fragments.sum_start().deleted; edit.ranges.push(full_range_start..full_range_end); } // If the current fragment has been partially consumed, then consume the rest of it // and advance to the next fragment before slicing. - if fragment_start > old_fragments.start().visible { + if fragment_start > old_fragments.sum_start().visible { let fragment_end = old_fragments.end(&None).visible; if fragment_end > fragment_start { let mut suffix = old_fragments.item().unwrap().clone(); @@ -1717,7 +1802,7 @@ impl Buffer { let mut cursor = self.fragments.cursor::(); cursor.seek(&offset, bias, &None); Anchor { - offset: offset + cursor.start().deleted, + offset: offset + cursor.sum_start().deleted, bias, version: self.version(), } @@ -1725,30 +1810,28 @@ impl Buffer { fn summary_for_anchor(&self, anchor: &Anchor) -> TextSummary { let cx = Some(anchor.version.clone()); - let mut cursor = self - .fragments - .cursor::(); + let mut cursor = self.fragments.cursor::(); cursor.seek(&VersionedOffset::Offset(anchor.offset), anchor.bias, &cx); let overshoot = if cursor.item().map_or(false, |fragment| fragment.visible) { - anchor.offset - cursor.start().0.offset() + anchor.offset - cursor.seek_start().offset() } else { 0 }; - self.text_summary_for_range(0..cursor.start().1 + overshoot) + self.text_summary_for_range(0..*cursor.sum_start() + overshoot) } fn full_offset_for_anchor(&self, anchor: &Anchor) -> usize { let cx = Some(anchor.version.clone()); let mut cursor = self .fragments - .cursor::(); + .cursor::(); cursor.seek(&VersionedOffset::Offset(anchor.offset), anchor.bias, &cx); let overshoot = if cursor.item().is_some() { - anchor.offset - cursor.start().0.offset() + anchor.offset - cursor.seek_start().offset() } else { 0 }; - let summary = cursor.start().1; + let summary = cursor.sum_start(); summary.visible + summary.deleted + overshoot } @@ -2286,9 +2369,24 @@ impl<'a> Into for &'a Operation { replica_id: undo.id.replica_id as u32, local_timestamp: undo.id.value, lamport_timestamp: lamport_timestamp.value, - edit_replica_id: undo.edit_id.replica_id as u32, - edit_local_timestamp: undo.edit_id.value, - count: undo.count, + ranges: undo + .ranges + .iter() + .map(|r| proto::Range { + start: r.start as u64, + end: r.end as u64, + }) + .collect(), + counts: undo + .counts + .iter() + .map(|(edit_id, count)| proto::operation::UndoCount { + replica_id: edit_id.replica_id as u32, + local_timestamp: edit_id.value, + count: *count, + }) + .collect(), + version: From::from(&undo.version), }), Operation::UpdateSelections { set_id, @@ -2329,14 +2427,6 @@ impl<'a> Into for &'a Operation { impl<'a> Into for &'a EditOperation { fn into(self) -> proto::operation::Edit { - let version = self - .version - .iter() - .map(|entry| proto::VectorClockEntry { - replica_id: entry.replica_id as u32, - timestamp: entry.value, - }) - .collect(); let ranges = self .ranges .iter() @@ -2349,7 +2439,7 @@ impl<'a> Into for &'a EditOperation { replica_id: self.timestamp.replica_id as u32, local_timestamp: self.timestamp.local, lamport_timestamp: self.timestamp.lamport, - version, + version: From::from(&self.version), ranges, new_text: self.new_text.clone(), } @@ -2396,11 +2486,25 @@ impl TryFrom for Operation { replica_id: undo.replica_id as ReplicaId, value: undo.local_timestamp, }, - edit_id: time::Local { - replica_id: undo.edit_replica_id as ReplicaId, - value: undo.edit_local_timestamp, - }, - count: undo.count, + counts: undo + .counts + .into_iter() + .map(|c| { + ( + time::Local { + replica_id: c.replica_id as ReplicaId, + value: c.local_timestamp, + }, + c.count, + ) + }) + .collect(), + ranges: undo + .ranges + .into_iter() + .map(|r| r.start as usize..r.end as usize) + .collect(), + version: undo.version.into(), }, }, proto::operation::Variant::UpdateSelections(message) => { @@ -2456,13 +2560,6 @@ impl TryFrom for Operation { impl From for EditOperation { fn from(edit: proto::operation::Edit) -> Self { - let mut version = time::Global::new(); - for entry in edit.version { - version.observe(time::Local { - replica_id: entry.replica_id as ReplicaId, - value: entry.timestamp, - }); - } let ranges = edit .ranges .into_iter() @@ -2474,7 +2571,7 @@ impl From for EditOperation { local: edit.local_timestamp, lamport: edit.lamport_timestamp, }, - version, + version: edit.version.into(), ranges, new_text: edit.new_text, } @@ -2673,6 +2770,7 @@ mod tests { .collect::(); cx.add_model(|cx| { let mut buffer = Buffer::new(0, reference_string.as_str(), cx); + buffer.history.group_interval = Duration::from_millis(rng.gen_range(0..=200)); let mut buffer_versions = Vec::new(); log::info!( "buffer text {:?}, version: {:?}", @@ -2695,6 +2793,11 @@ mod tests { if rng.gen_bool(0.25) { buffer.randomly_undo_redo(rng, cx); reference_string = buffer.text(); + log::info!( + "buffer text {:?}, version: {:?}", + buffer.text(), + buffer.version() + ); } let range = buffer.random_byte_range(0, rng); @@ -3258,35 +3361,36 @@ mod tests { fn test_undo_redo(cx: &mut gpui::MutableAppContext) { cx.add_model(|cx| { let mut buffer = Buffer::new(0, "1234", cx); + // Set group interval to zero so as to not group edits in the undo stack. + buffer.history.group_interval = Duration::from_secs(0); buffer.edit(vec![1..1], "abx", cx); buffer.edit(vec![3..4], "yzef", cx); buffer.edit(vec![3..5], "cd", cx); assert_eq!(buffer.text(), "1abcdef234"); - let edit1 = buffer.operations[0].clone(); - let edit2 = buffer.operations[1].clone(); - let edit3 = buffer.operations[2].clone(); + let transactions = buffer.history.undo_stack.clone(); + assert_eq!(transactions.len(), 3); - buffer.undo_or_redo(edit1.edit_id().unwrap(), cx).unwrap(); + buffer.undo_or_redo(transactions[0].clone(), cx).unwrap(); assert_eq!(buffer.text(), "1cdef234"); - buffer.undo_or_redo(edit1.edit_id().unwrap(), cx).unwrap(); + buffer.undo_or_redo(transactions[0].clone(), cx).unwrap(); assert_eq!(buffer.text(), "1abcdef234"); - buffer.undo_or_redo(edit2.edit_id().unwrap(), cx).unwrap(); + buffer.undo_or_redo(transactions[1].clone(), cx).unwrap(); assert_eq!(buffer.text(), "1abcdx234"); - buffer.undo_or_redo(edit3.edit_id().unwrap(), cx).unwrap(); + buffer.undo_or_redo(transactions[2].clone(), cx).unwrap(); assert_eq!(buffer.text(), "1abx234"); - buffer.undo_or_redo(edit2.edit_id().unwrap(), cx).unwrap(); + buffer.undo_or_redo(transactions[1].clone(), cx).unwrap(); assert_eq!(buffer.text(), "1abyzef234"); - buffer.undo_or_redo(edit3.edit_id().unwrap(), cx).unwrap(); + buffer.undo_or_redo(transactions[2].clone(), cx).unwrap(); assert_eq!(buffer.text(), "1abcdef234"); - buffer.undo_or_redo(edit3.edit_id().unwrap(), cx).unwrap(); + buffer.undo_or_redo(transactions[2].clone(), cx).unwrap(); assert_eq!(buffer.text(), "1abyzef234"); - buffer.undo_or_redo(edit1.edit_id().unwrap(), cx).unwrap(); + buffer.undo_or_redo(transactions[0].clone(), cx).unwrap(); assert_eq!(buffer.text(), "1yzef234"); - buffer.undo_or_redo(edit2.edit_id().unwrap(), cx).unwrap(); + buffer.undo_or_redo(transactions[1].clone(), cx).unwrap(); assert_eq!(buffer.text(), "1234"); buffer @@ -3320,7 +3424,7 @@ mod tests { assert_eq!(buffer.text(), "12cde6"); assert_eq!(buffer.selection_ranges(set_id).unwrap(), vec![1..3]); - now += UNDO_GROUP_INTERVAL + Duration::from_millis(1); + now += buffer.history.group_interval + Duration::from_millis(1); buffer.start_transaction_at(Some(set_id), now, cx).unwrap(); buffer .update_selection_set( @@ -3432,8 +3536,11 @@ mod tests { let mut network = Network::new(StdRng::seed_from_u64(seed)); for i in 0..peers { - let buffer = cx.add_model(|cx| Buffer::new(i as ReplicaId, base_text.as_str(), cx)); - + let buffer = cx.add_model(|cx| { + let mut buf = Buffer::new(i as ReplicaId, base_text.as_str(), cx); + buf.history.group_interval = Duration::from_millis(rng.gen_range(0..=200)); + buf + }); buffers.push(buffer); replica_ids.push(i as u16); network.add_peer(i as u16); @@ -3761,9 +3868,13 @@ mod tests { pub fn randomly_undo_redo(&mut self, rng: &mut impl Rng, cx: &mut ModelContext) { for _ in 0..rng.gen_range(1..=5) { - if let Some(edit_id) = self.history.ops.keys().choose(rng).copied() { - log::info!("undoing buffer {} operation {:?}", self.replica_id, edit_id); - self.undo_or_redo(edit_id, cx).unwrap(); + if let Some(transaction) = self.history.undo_stack.choose(rng).cloned() { + log::info!( + "undoing buffer {} transaction {:?}", + self.replica_id, + transaction + ); + self.undo_or_redo(transaction, cx).unwrap(); } } } @@ -3841,15 +3952,4 @@ mod tests { }) } } - - impl Operation { - fn edit_id(&self) -> Option { - match self { - Operation::Edit(edit) => Some(edit.timestamp.local()), - Operation::Undo { undo, .. } => Some(undo.edit_id), - Operation::UpdateSelections { .. } => None, - Operation::SetActiveSelections { .. } => None, - } - } - } } diff --git a/zed/src/editor/buffer/rope.rs b/zed/src/editor/buffer/rope.rs index 268924c158..4c1a3ed56d 100644 --- a/zed/src/editor/buffer/rope.rs +++ b/zed/src/editor/buffer/rope.rs @@ -128,10 +128,10 @@ impl Rope { pub fn to_point(&self, offset: usize) -> Point { assert!(offset <= self.summary().bytes); - let mut cursor = self.chunks.cursor::(); + let mut cursor = self.chunks.cursor::(); cursor.seek(&offset, Bias::Left, &()); - let overshoot = offset - cursor.start().bytes; - cursor.start().lines + let overshoot = offset - cursor.seek_start(); + *cursor.sum_start() + cursor .item() .map_or(Point::zero(), |chunk| chunk.to_point(overshoot)) @@ -139,17 +139,17 @@ impl Rope { pub fn to_offset(&self, point: Point) -> usize { assert!(point <= self.summary().lines); - let mut cursor = self.chunks.cursor::(); + let mut cursor = self.chunks.cursor::(); cursor.seek(&point, Bias::Left, &()); - let overshoot = point - cursor.start().lines; - cursor.start().bytes + cursor.item().map_or(0, |chunk| chunk.to_offset(overshoot)) + let overshoot = point - cursor.seek_start(); + cursor.sum_start() + cursor.item().map_or(0, |chunk| chunk.to_offset(overshoot)) } pub fn clip_offset(&self, mut offset: usize, bias: Bias) -> usize { - let mut cursor = self.chunks.cursor::(); + let mut cursor = self.chunks.cursor::(); cursor.seek(&offset, Bias::Left, &()); if let Some(chunk) = cursor.item() { - let mut ix = offset - cursor.start(); + let mut ix = offset - cursor.seek_start(); while !chunk.0.is_char_boundary(ix) { match bias { Bias::Left => { @@ -169,11 +169,11 @@ impl Rope { } pub fn clip_point(&self, point: Point, bias: Bias) -> Point { - let mut cursor = self.chunks.cursor::(); + let mut cursor = self.chunks.cursor::(); cursor.seek(&point, Bias::Right, &()); if let Some(chunk) = cursor.item() { - let overshoot = point - cursor.start(); - *cursor.start() + chunk.clip_point(overshoot, bias) + let overshoot = point - cursor.seek_start(); + *cursor.seek_start() + chunk.clip_point(overshoot, bias) } else { self.summary().lines } @@ -190,7 +190,7 @@ impl<'a> From<&'a str> for Rope { pub struct Cursor<'a> { rope: &'a Rope, - chunks: sum_tree::Cursor<'a, Chunk, usize, usize>, + chunks: sum_tree::Cursor<'a, Chunk, usize, ()>, offset: usize, } @@ -222,18 +222,18 @@ impl<'a> Cursor<'a> { let mut slice = Rope::new(); if let Some(start_chunk) = self.chunks.item() { - let start_ix = self.offset - self.chunks.start(); - let end_ix = cmp::min(end_offset, self.chunks.end(&())) - self.chunks.start(); + let start_ix = self.offset - self.chunks.seek_start(); + let end_ix = cmp::min(end_offset, self.chunks.seek_end(&())) - self.chunks.seek_start(); slice.push(&start_chunk.0[start_ix..end_ix]); } - if end_offset > self.chunks.end(&()) { + if end_offset > self.chunks.seek_end(&()) { self.chunks.next(&()); slice.append(Rope { chunks: self.chunks.slice(&end_offset, Bias::Right, &()), }); if let Some(end_chunk) = self.chunks.item() { - let end_ix = end_offset - self.chunks.start(); + let end_ix = end_offset - self.chunks.seek_start(); slice.push(&end_chunk.0[..end_ix]); } } @@ -247,16 +247,16 @@ impl<'a> Cursor<'a> { let mut summary = TextSummary::default(); if let Some(start_chunk) = self.chunks.item() { - let start_ix = self.offset - self.chunks.start(); - let end_ix = cmp::min(end_offset, self.chunks.end(&())) - self.chunks.start(); + let start_ix = self.offset - self.chunks.seek_start(); + let end_ix = cmp::min(end_offset, self.chunks.seek_end(&())) - self.chunks.seek_start(); summary = TextSummary::from(&start_chunk.0[start_ix..end_ix]); } - if end_offset > self.chunks.end(&()) { + if end_offset > self.chunks.seek_end(&()) { self.chunks.next(&()); summary += &self.chunks.summary(&end_offset, Bias::Right, &()); if let Some(end_chunk) = self.chunks.item() { - let end_ix = end_offset - self.chunks.start(); + let end_ix = end_offset - self.chunks.seek_start(); summary += TextSummary::from(&end_chunk.0[..end_ix]); } } @@ -274,7 +274,7 @@ impl<'a> Cursor<'a> { } pub struct Chunks<'a> { - chunks: sum_tree::Cursor<'a, Chunk, usize, usize>, + chunks: sum_tree::Cursor<'a, Chunk, usize, ()>, range: Range, } @@ -286,11 +286,11 @@ impl<'a> Chunks<'a> { } pub fn offset(&self) -> usize { - self.range.start.max(*self.chunks.start()) + self.range.start.max(*self.chunks.seek_start()) } pub fn seek(&mut self, offset: usize) { - if offset >= self.chunks.end(&()) { + if offset >= self.chunks.seek_end(&()) { self.chunks.seek_forward(&offset, Bias::Right, &()); } else { self.chunks.seek(&offset, Bias::Right, &()); @@ -300,10 +300,10 @@ impl<'a> Chunks<'a> { pub fn peek(&self) -> Option<&'a str> { if let Some(chunk) = self.chunks.item() { - let offset = *self.chunks.start(); + let offset = *self.chunks.seek_start(); if self.range.end > offset { - let start = self.range.start.saturating_sub(*self.chunks.start()); - let end = self.range.end - self.chunks.start(); + let start = self.range.start.saturating_sub(*self.chunks.seek_start()); + let end = self.range.end - self.chunks.seek_start(); return Some(&chunk.0[start..chunk.0.len().min(end)]); } } diff --git a/zed/src/editor/display_map/fold_map.rs b/zed/src/editor/display_map/fold_map.rs index 8e94bf1eb2..227553a1b6 100644 --- a/zed/src/editor/display_map/fold_map.rs +++ b/zed/src/editor/display_map/fold_map.rs @@ -210,20 +210,20 @@ impl FoldMap { let buffer = self.buffer.read(cx); let offset = offset.to_offset(buffer); let transforms = self.sync(cx); - let mut cursor = transforms.cursor::(); + let mut cursor = transforms.cursor::(); cursor.seek(&offset, Bias::Right, &()); cursor.item().map_or(false, |t| t.display_text.is_some()) } pub fn is_line_folded(&self, display_row: u32, cx: &AppContext) -> bool { let transforms = self.sync(cx); - let mut cursor = transforms.cursor::(); + let mut cursor = transforms.cursor::(); cursor.seek(&DisplayPoint::new(display_row, 0), Bias::Right, &()); while let Some(transform) = cursor.item() { if transform.display_text.is_some() { return true; } - if cursor.end(&()).row() == display_row { + if cursor.seek_end(&()).row() == display_row { cursor.next(&()) } else { break; @@ -242,20 +242,20 @@ impl FoldMap { pub fn to_buffer_point(&self, display_point: DisplayPoint, cx: &AppContext) -> Point { let transforms = self.sync(cx); - let mut cursor = transforms.cursor::(); + let mut cursor = transforms.cursor::(); cursor.seek(&display_point, Bias::Right, &()); - let overshoot = display_point.0 - cursor.start().display.lines; - cursor.start().buffer.lines + overshoot + let overshoot = display_point.0 - cursor.seek_start().0; + *cursor.sum_start() + overshoot } pub fn to_display_point(&self, point: Point, cx: &AppContext) -> DisplayPoint { let transforms = self.sync(cx); - let mut cursor = transforms.cursor::(); + let mut cursor = transforms.cursor::(); cursor.seek(&point, Bias::Right, &()); - let overshoot = point - cursor.start().buffer.lines; + let overshoot = point - cursor.seek_start(); DisplayPoint(cmp::min( - cursor.start().display.lines + overshoot, - cursor.end(&()).display.lines, + cursor.sum_start().0 + overshoot, + cursor.end(&()).0, )) } @@ -275,20 +275,20 @@ impl FoldMap { let mut new_transforms = SumTree::new(); let mut transforms = self.transforms.lock(); - let mut cursor = transforms.cursor::(); + let mut cursor = transforms.cursor::(); cursor.seek(&0, Bias::Right, &()); while let Some(mut edit) = edits.next() { new_transforms.push_tree(cursor.slice(&edit.old_range.start, Bias::Left, &()), &()); - edit.new_range.start -= edit.old_range.start - cursor.start(); - edit.old_range.start = *cursor.start(); + edit.new_range.start -= edit.old_range.start - cursor.seek_start(); + edit.old_range.start = *cursor.seek_start(); cursor.seek(&edit.old_range.end, Bias::Right, &()); cursor.next(&()); let mut delta = edit.delta(); loop { - edit.old_range.end = *cursor.start(); + edit.old_range.end = *cursor.seek_start(); if let Some(next_edit) = edits.peek() { if next_edit.old_range.start > edit.old_range.end { @@ -443,10 +443,10 @@ impl FoldMapSnapshot { } pub fn chunks_at(&self, offset: DisplayOffset) -> Chunks { - let mut transform_cursor = self.transforms.cursor::(); + let mut transform_cursor = self.transforms.cursor::(); transform_cursor.seek(&offset, Bias::Right, &()); - let overshoot = offset.0 - transform_cursor.start().display.bytes; - let buffer_offset = transform_cursor.start().buffer.bytes + overshoot; + let overshoot = offset.0 - transform_cursor.seek_start().0; + let buffer_offset = transform_cursor.sum_start() + overshoot; Chunks { transform_cursor, buffer_offset, @@ -455,15 +455,15 @@ impl FoldMapSnapshot { } pub fn highlighted_chunks(&mut self, range: Range) -> HighlightedChunks { - let mut transform_cursor = self.transforms.cursor::(); + let mut transform_cursor = self.transforms.cursor::(); transform_cursor.seek(&range.end, Bias::Right, &()); - let overshoot = range.end.0 - transform_cursor.start().display.bytes; - let buffer_end = transform_cursor.start().buffer.bytes + overshoot; + let overshoot = range.end.0 - transform_cursor.seek_start().0; + let buffer_end = transform_cursor.sum_start() + overshoot; transform_cursor.seek(&range.start, Bias::Right, &()); - let overshoot = range.start.0 - transform_cursor.start().display.bytes; - let buffer_start = transform_cursor.start().buffer.bytes + overshoot; + let overshoot = range.start.0 - transform_cursor.seek_start().0; + let buffer_start = transform_cursor.sum_start() + overshoot; HighlightedChunks { transform_cursor, @@ -483,42 +483,41 @@ impl FoldMapSnapshot { pub fn to_display_offset(&self, point: DisplayPoint) -> DisplayOffset { let mut cursor = self.transforms.cursor::(); cursor.seek(&point, Bias::Right, &()); - let overshoot = point.0 - cursor.start().display.lines; - let mut offset = cursor.start().display.bytes; + let overshoot = point.0 - cursor.sum_start().display.lines; + let mut offset = cursor.sum_start().display.bytes; if !overshoot.is_zero() { let transform = cursor.item().expect("display point out of range"); assert!(transform.display_text.is_none()); let end_buffer_offset = self .buffer - .to_offset(cursor.start().buffer.lines + overshoot); - offset += end_buffer_offset - cursor.start().buffer.bytes; + .to_offset(cursor.sum_start().buffer.lines + overshoot); + offset += end_buffer_offset - cursor.sum_start().buffer.bytes; } DisplayOffset(offset) } pub fn to_buffer_offset(&self, point: DisplayPoint) -> usize { - let mut cursor = self.transforms.cursor::(); + let mut cursor = self.transforms.cursor::(); cursor.seek(&point, Bias::Right, &()); - let overshoot = point.0 - cursor.start().display.lines; - self.buffer - .to_offset(cursor.start().buffer.lines + overshoot) + let overshoot = point.0 - cursor.seek_start().0; + self.buffer.to_offset(*cursor.sum_start() + overshoot) } #[cfg(test)] pub fn clip_offset(&self, offset: DisplayOffset, bias: Bias) -> DisplayOffset { - let mut cursor = self.transforms.cursor::(); + let mut cursor = self.transforms.cursor::(); cursor.seek(&offset, Bias::Right, &()); if let Some(transform) = cursor.item() { - let transform_start = cursor.start().display.bytes; + let transform_start = cursor.seek_start().0; if transform.display_text.is_some() { if offset.0 == transform_start || matches!(bias, Bias::Left) { DisplayOffset(transform_start) } else { - DisplayOffset(cursor.end(&()).display.bytes) + DisplayOffset(cursor.seek_end(&()).0) } } else { let overshoot = offset.0 - transform_start; - let buffer_offset = cursor.start().buffer.bytes + overshoot; + let buffer_offset = cursor.sum_start() + overshoot; let clipped_buffer_offset = self.buffer.clip_offset(buffer_offset, bias); DisplayOffset( (offset.0 as isize + (clipped_buffer_offset as isize - buffer_offset as isize)) @@ -531,19 +530,19 @@ impl FoldMapSnapshot { } pub fn clip_point(&self, point: DisplayPoint, bias: Bias) -> DisplayPoint { - let mut cursor = self.transforms.cursor::(); + let mut cursor = self.transforms.cursor::(); cursor.seek(&point, Bias::Right, &()); if let Some(transform) = cursor.item() { - let transform_start = cursor.start().display.lines; + let transform_start = cursor.seek_start().0; if transform.display_text.is_some() { if point.0 == transform_start || matches!(bias, Bias::Left) { DisplayPoint(transform_start) } else { - DisplayPoint(cursor.end(&()).display.lines) + DisplayPoint(cursor.seek_end(&()).0) } } else { let overshoot = point.0 - transform_start; - let buffer_position = cursor.start().buffer.lines + overshoot; + let buffer_position = *cursor.sum_start() + overshoot; let clipped_buffer_position = self.buffer.clip_point(buffer_position, bias); DisplayPoint::new( point.row(), @@ -681,7 +680,7 @@ impl<'a> sum_tree::Dimension<'a, FoldSummary> for usize { } pub struct BufferRows<'a> { - cursor: Cursor<'a, Transform, DisplayPoint, TransformSummary>, + cursor: Cursor<'a, Transform, DisplayPoint, Point>, display_point: Point, } @@ -689,7 +688,7 @@ impl<'a> Iterator for BufferRows<'a> { type Item = u32; fn next(&mut self) -> Option { - while self.display_point > self.cursor.end(&()).display.lines { + while self.display_point > self.cursor.seek_end(&()).0 { self.cursor.next(&()); if self.cursor.item().is_none() { // TODO: Return a bool from next? @@ -698,8 +697,8 @@ impl<'a> Iterator for BufferRows<'a> { } if self.cursor.item().is_some() { - let overshoot = self.display_point - self.cursor.start().display.lines; - let buffer_point = self.cursor.start().buffer.lines + overshoot; + let overshoot = self.display_point - self.cursor.seek_start().0; + let buffer_point = *self.cursor.sum_start() + overshoot; self.display_point.row += 1; Some(buffer_point.row) } else { @@ -709,7 +708,7 @@ impl<'a> Iterator for BufferRows<'a> { } pub struct Chunks<'a> { - transform_cursor: Cursor<'a, Transform, DisplayOffset, TransformSummary>, + transform_cursor: Cursor<'a, Transform, DisplayOffset, usize>, buffer_chunks: buffer::Chunks<'a>, buffer_offset: usize, } @@ -730,7 +729,7 @@ impl<'a> Iterator for Chunks<'a> { self.buffer_offset += transform.summary.buffer.bytes; self.buffer_chunks.seek(self.buffer_offset); - while self.buffer_offset >= self.transform_cursor.end(&()).buffer.bytes + while self.buffer_offset >= self.transform_cursor.end(&()) && self.transform_cursor.item().is_some() { self.transform_cursor.next(&()); @@ -745,7 +744,7 @@ impl<'a> Iterator for Chunks<'a> { chunk = &chunk[offset_in_chunk..]; // Truncate the chunk so that it ends at the next fold. - let region_end = self.transform_cursor.end(&()).buffer.bytes - self.buffer_offset; + let region_end = self.transform_cursor.end(&()) - self.buffer_offset; if chunk.len() >= region_end { chunk = &chunk[0..region_end]; self.transform_cursor.next(&()); @@ -762,7 +761,7 @@ impl<'a> Iterator for Chunks<'a> { } pub struct HighlightedChunks<'a> { - transform_cursor: Cursor<'a, Transform, DisplayOffset, TransformSummary>, + transform_cursor: Cursor<'a, Transform, DisplayOffset, usize>, buffer_chunks: buffer::HighlightedChunks<'a>, buffer_chunk: Option<(usize, &'a str, StyleId)>, buffer_offset: usize, @@ -785,7 +784,7 @@ impl<'a> Iterator for HighlightedChunks<'a> { self.buffer_offset += transform.summary.buffer.bytes; self.buffer_chunks.seek(self.buffer_offset); - while self.buffer_offset >= self.transform_cursor.end(&()).buffer.bytes + while self.buffer_offset >= self.transform_cursor.end(&()) && self.transform_cursor.item().is_some() { self.transform_cursor.next(&()); @@ -809,7 +808,7 @@ impl<'a> Iterator for HighlightedChunks<'a> { chunk = &chunk[offset_in_chunk..]; // Truncate the chunk so that it ends at the next fold. - let region_end = self.transform_cursor.end(&()).buffer.bytes - self.buffer_offset; + let region_end = self.transform_cursor.end(&()) - self.buffer_offset; if chunk.len() >= region_end { chunk = &chunk[0..region_end]; self.transform_cursor.next(&()); diff --git a/zed/src/file_finder.rs b/zed/src/file_finder.rs index 060fd3be50..66d99fc542 100644 --- a/zed/src/file_finder.rs +++ b/zed/src/file_finder.rs @@ -655,7 +655,7 @@ mod tests { finder.read_with(&cx, |f, _| assert_eq!(f.matches.len(), 0)); } - #[gpui::test] + #[gpui::test(retries = 5)] async fn test_multiple_matches_with_same_relative_path(mut cx: gpui::TestAppContext) { let tmp_dir = temp_tree(json!({ "dir1": { "a.txt": "" }, diff --git a/zed/src/menus.rs b/zed/src/menus.rs index 438071c2de..9f22c8ede9 100644 --- a/zed/src/menus.rs +++ b/zed/src/menus.rs @@ -41,7 +41,7 @@ pub fn menus(state: AppState) -> Vec> { name: "New", keystroke: Some("cmd-n"), action: "workspace:new_file", - arg: None, + arg: Some(Box::new(state.clone())), }, MenuItem::Separator, MenuItem::Action { diff --git a/zed/src/sum_tree.rs b/zed/src/sum_tree.rs index 808f572450..3e6c30a6f5 100644 --- a/zed/src/sum_tree.rs +++ b/zed/src/sum_tree.rs @@ -37,18 +37,6 @@ impl<'a, T: Summary> Dimension<'a, T> for () { fn add_summary(&mut self, _: &'a T, _: &T::Context) {} } -impl<'a, S, D1, D2> Dimension<'a, S> for (D1, D2) -where - S: Summary, - D1: Dimension<'a, S>, - D2: Dimension<'a, S>, -{ - fn add_summary(&mut self, summary: &'a S, cx: &S::Context) { - self.0.add_summary(summary, cx); - self.1.add_summary(summary, cx); - } -} - pub trait SeekDimension<'a, T: Summary>: Dimension<'a, T> { fn cmp(&self, other: &Self, cx: &T::Context) -> Ordering; } @@ -671,7 +659,7 @@ mod tests { cursor.seek(&Count(pos), Bias::Right, &()); for i in 0..10 { - assert_eq!(cursor.start().0, pos); + assert_eq!(cursor.sum_start().0, pos); if pos > 0 { assert_eq!(cursor.prev_item().unwrap(), &reference_items[pos - 1]); @@ -730,7 +718,7 @@ mod tests { ); assert_eq!(cursor.item(), None); assert_eq!(cursor.prev_item(), None); - assert_eq!(cursor.start(), &Sum(0)); + assert_eq!(cursor.sum_start(), &Sum(0)); // Single-element tree let mut tree = SumTree::::new(); @@ -742,23 +730,23 @@ mod tests { ); assert_eq!(cursor.item(), Some(&1)); assert_eq!(cursor.prev_item(), None); - assert_eq!(cursor.start(), &Sum(0)); + assert_eq!(cursor.sum_start(), &Sum(0)); cursor.next(&()); assert_eq!(cursor.item(), None); assert_eq!(cursor.prev_item(), Some(&1)); - assert_eq!(cursor.start(), &Sum(1)); + assert_eq!(cursor.sum_start(), &Sum(1)); cursor.prev(&()); assert_eq!(cursor.item(), Some(&1)); assert_eq!(cursor.prev_item(), None); - assert_eq!(cursor.start(), &Sum(0)); + assert_eq!(cursor.sum_start(), &Sum(0)); let mut cursor = tree.cursor::(); assert_eq!(cursor.slice(&Count(1), Bias::Right, &()).items(&()), [1]); assert_eq!(cursor.item(), None); assert_eq!(cursor.prev_item(), Some(&1)); - assert_eq!(cursor.start(), &Sum(1)); + assert_eq!(cursor.sum_start(), &Sum(1)); cursor.seek(&Count(0), Bias::Right, &()); assert_eq!( @@ -769,7 +757,7 @@ mod tests { ); assert_eq!(cursor.item(), None); assert_eq!(cursor.prev_item(), Some(&1)); - assert_eq!(cursor.start(), &Sum(1)); + assert_eq!(cursor.sum_start(), &Sum(1)); // Multiple-element tree let mut tree = SumTree::new(); @@ -779,68 +767,68 @@ mod tests { assert_eq!(cursor.slice(&Count(2), Bias::Right, &()).items(&()), [1, 2]); assert_eq!(cursor.item(), Some(&3)); assert_eq!(cursor.prev_item(), Some(&2)); - assert_eq!(cursor.start(), &Sum(3)); + assert_eq!(cursor.sum_start(), &Sum(3)); cursor.next(&()); assert_eq!(cursor.item(), Some(&4)); assert_eq!(cursor.prev_item(), Some(&3)); - assert_eq!(cursor.start(), &Sum(6)); + assert_eq!(cursor.sum_start(), &Sum(6)); cursor.next(&()); assert_eq!(cursor.item(), Some(&5)); assert_eq!(cursor.prev_item(), Some(&4)); - assert_eq!(cursor.start(), &Sum(10)); + assert_eq!(cursor.sum_start(), &Sum(10)); cursor.next(&()); assert_eq!(cursor.item(), Some(&6)); assert_eq!(cursor.prev_item(), Some(&5)); - assert_eq!(cursor.start(), &Sum(15)); + assert_eq!(cursor.sum_start(), &Sum(15)); cursor.next(&()); cursor.next(&()); assert_eq!(cursor.item(), None); assert_eq!(cursor.prev_item(), Some(&6)); - assert_eq!(cursor.start(), &Sum(21)); + assert_eq!(cursor.sum_start(), &Sum(21)); cursor.prev(&()); assert_eq!(cursor.item(), Some(&6)); assert_eq!(cursor.prev_item(), Some(&5)); - assert_eq!(cursor.start(), &Sum(15)); + assert_eq!(cursor.sum_start(), &Sum(15)); cursor.prev(&()); assert_eq!(cursor.item(), Some(&5)); assert_eq!(cursor.prev_item(), Some(&4)); - assert_eq!(cursor.start(), &Sum(10)); + assert_eq!(cursor.sum_start(), &Sum(10)); cursor.prev(&()); assert_eq!(cursor.item(), Some(&4)); assert_eq!(cursor.prev_item(), Some(&3)); - assert_eq!(cursor.start(), &Sum(6)); + assert_eq!(cursor.sum_start(), &Sum(6)); cursor.prev(&()); assert_eq!(cursor.item(), Some(&3)); assert_eq!(cursor.prev_item(), Some(&2)); - assert_eq!(cursor.start(), &Sum(3)); + assert_eq!(cursor.sum_start(), &Sum(3)); cursor.prev(&()); assert_eq!(cursor.item(), Some(&2)); assert_eq!(cursor.prev_item(), Some(&1)); - assert_eq!(cursor.start(), &Sum(1)); + assert_eq!(cursor.sum_start(), &Sum(1)); cursor.prev(&()); assert_eq!(cursor.item(), Some(&1)); assert_eq!(cursor.prev_item(), None); - assert_eq!(cursor.start(), &Sum(0)); + assert_eq!(cursor.sum_start(), &Sum(0)); cursor.prev(&()); assert_eq!(cursor.item(), None); assert_eq!(cursor.prev_item(), None); - assert_eq!(cursor.start(), &Sum(0)); + assert_eq!(cursor.sum_start(), &Sum(0)); cursor.next(&()); assert_eq!(cursor.item(), Some(&1)); assert_eq!(cursor.prev_item(), None); - assert_eq!(cursor.start(), &Sum(0)); + assert_eq!(cursor.sum_start(), &Sum(0)); let mut cursor = tree.cursor::(); assert_eq!( @@ -851,7 +839,7 @@ mod tests { ); assert_eq!(cursor.item(), None); assert_eq!(cursor.prev_item(), Some(&6)); - assert_eq!(cursor.start(), &Sum(21)); + assert_eq!(cursor.sum_start(), &Sum(21)); cursor.seek(&Count(3), Bias::Right, &()); assert_eq!( @@ -862,7 +850,7 @@ mod tests { ); assert_eq!(cursor.item(), None); assert_eq!(cursor.prev_item(), Some(&6)); - assert_eq!(cursor.start(), &Sum(21)); + assert_eq!(cursor.sum_start(), &Sum(21)); // Seeking can bias left or right cursor.seek(&Count(1), Bias::Left, &()); diff --git a/zed/src/sum_tree/cursor.rs b/zed/src/sum_tree/cursor.rs index 8374546a20..c4cc68e778 100644 --- a/zed/src/sum_tree/cursor.rs +++ b/zed/src/sum_tree/cursor.rs @@ -45,17 +45,31 @@ where self.sum_dimension = U::default(); } - pub fn start(&self) -> &U { + pub fn seek_start(&self) -> &S { + &self.seek_dimension + } + + pub fn seek_end(&self, cx: &::Context) -> S { + if let Some(item_summary) = self.item_summary() { + let mut end = self.seek_start().clone(); + end.add_summary(item_summary, cx); + end + } else { + self.seek_start().clone() + } + } + + pub fn sum_start(&self) -> &U { &self.sum_dimension } pub fn end(&self, cx: &::Context) -> U { if let Some(item_summary) = self.item_summary() { - let mut end = self.start().clone(); + let mut end = self.sum_start().clone(); end.add_summary(item_summary, cx); end } else { - self.start().clone() + self.sum_start().clone() } } @@ -613,7 +627,7 @@ where } pub fn start(&self) -> &U { - self.cursor.start() + self.cursor.sum_start() } pub fn item(&self) -> Option<&'a T> { diff --git a/zed/src/time.rs b/zed/src/time.rs index 00f4e54c18..8b665e8814 100644 --- a/zed/src/time.rs +++ b/zed/src/time.rs @@ -61,6 +61,31 @@ impl<'a> AddAssign<&'a Local> for Local { #[derive(Clone, Default, Hash, Eq, PartialEq)] pub struct Global(SmallVec<[Local; 3]>); +impl From> for Global { + fn from(message: Vec) -> Self { + let mut version = Self::new(); + for entry in message { + version.observe(Local { + replica_id: entry.replica_id as ReplicaId, + value: entry.timestamp, + }); + } + version + } +} + +impl<'a> From<&'a Global> for Vec { + fn from(version: &'a Global) -> Self { + version + .iter() + .map(|entry| zed_rpc::proto::VectorClockEntry { + replica_id: entry.replica_id as u32, + timestamp: entry.value, + }) + .collect() + } +} + impl Global { pub fn new() -> Self { Self::default() diff --git a/zed/src/workspace.rs b/zed/src/workspace.rs index 9b74023ef1..9d86a7288c 100644 --- a/zed/src/workspace.rs +++ b/zed/src/workspace.rs @@ -30,6 +30,7 @@ use std::{ pub fn init(cx: &mut MutableAppContext) { cx.add_global_action("workspace:open", open); cx.add_global_action("workspace:open_paths", open_paths); + cx.add_global_action("workspace:new_file", open_new); cx.add_action("workspace:save", Workspace::save_active_item); cx.add_action("workspace:debug_elements", Workspace::debug_elements); cx.add_action("workspace:new_file", Workspace::open_new_file); @@ -100,6 +101,19 @@ fn open_paths(params: &OpenParams, cx: &mut MutableAppContext) { }); } +fn open_new(app_state: &AppState, cx: &mut MutableAppContext) { + cx.add_window(|cx| { + let mut view = Workspace::new( + app_state.settings.clone(), + app_state.language_registry.clone(), + app_state.rpc.clone(), + cx, + ); + view.open_new_file(&app_state, cx); + view + }); +} + pub trait Item: Entity + Sized { type View: ItemView; @@ -479,7 +493,7 @@ impl Workspace { } } - pub fn open_new_file(&mut self, _: &(), cx: &mut ViewContext) { + pub fn open_new_file(&mut self, _: &AppState, cx: &mut ViewContext) { let buffer = cx.add_model(|cx| Buffer::new(0, "", cx)); let buffer_view = cx.add_view(|cx| Editor::for_buffer(buffer.clone(), self.settings.clone(), cx)); @@ -1173,9 +1187,9 @@ mod tests { let app_state = cx.read(build_app_state); let (_, workspace) = cx.add_window(|cx| { let mut workspace = Workspace::new( - app_state.settings, - app_state.language_registry, - app_state.rpc, + app_state.settings.clone(), + app_state.language_registry.clone(), + app_state.rpc.clone(), cx, ); workspace.add_worktree(dir.path(), cx); @@ -1194,7 +1208,7 @@ mod tests { // Create a new untitled buffer let editor = workspace.update(&mut cx, |workspace, cx| { - workspace.open_new_file(&(), cx); + workspace.open_new_file(&app_state, cx); workspace .active_item(cx) .unwrap() @@ -1202,6 +1216,7 @@ mod tests { .downcast::() .unwrap() }); + editor.update(&mut cx, |editor, cx| { assert!(!editor.is_dirty(cx.as_ref())); assert_eq!(editor.title(cx.as_ref()), "untitled"); @@ -1244,7 +1259,7 @@ mod tests { // Open the same newly-created file in another pane item. The new editor should reuse // the same buffer. workspace.update(&mut cx, |workspace, cx| { - workspace.open_new_file(&(), cx); + workspace.open_new_file(&app_state, cx); workspace.split_pane(workspace.active_pane().clone(), SplitDirection::Right, cx); assert!(workspace .open_entry((tree.id(), Path::new("the-new-name").into()), cx) @@ -1263,6 +1278,25 @@ mod tests { }) } + #[gpui::test] + async fn test_new_empty_workspace(mut cx: gpui::TestAppContext) { + cx.update(init); + + let app_state = cx.read(build_app_state); + cx.dispatch_global_action("workspace:new_file", app_state); + let window_id = *cx.window_ids().first().unwrap(); + let workspace = cx.root_view::(window_id).unwrap(); + workspace.update(&mut cx, |workspace, cx| { + let editor = workspace + .active_item(cx) + .unwrap() + .to_any() + .downcast::() + .unwrap(); + assert!(editor.read(cx).text(cx.as_ref()).is_empty()); + }); + } + #[gpui::test] async fn test_pane_actions(mut cx: gpui::TestAppContext) { cx.update(|cx| pane::init(cx)); diff --git a/zed/src/worktree.rs b/zed/src/worktree.rs index 1d8702d0e7..c20f25b2a0 100644 --- a/zed/src/worktree.rs +++ b/zed/src/worktree.rs @@ -529,7 +529,7 @@ impl LocalWorktree { let file = fs::File::create(&abs_path)?; let mut writer = io::BufWriter::with_capacity(buffer_size, &file); for chunk in text.chunks() { - writer.write(chunk.as_bytes())?; + writer.write_all(chunk.as_bytes())?; } writer.flush()?; @@ -1824,8 +1824,8 @@ impl WorktreeHandle for ModelHandle { } pub enum FileIter<'a> { - All(Cursor<'a, Entry, FileCount, FileCount>), - Visible(Cursor<'a, Entry, VisibleFileCount, VisibleFileCount>), + All(Cursor<'a, Entry, FileCount, ()>), + Visible(Cursor<'a, Entry, VisibleFileCount, ()>), } impl<'a> FileIter<'a> { @@ -1844,11 +1844,11 @@ impl<'a> FileIter<'a> { fn next_internal(&mut self) { match self { Self::All(cursor) => { - let ix = *cursor.start(); + let ix = *cursor.seek_start(); cursor.seek_forward(&FileCount(ix.0 + 1), Bias::Right, &()); } Self::Visible(cursor) => { - let ix = *cursor.start(); + let ix = *cursor.seek_start(); cursor.seek_forward(&VisibleFileCount(ix.0 + 1), Bias::Right, &()); } }