Merge pull request #272 from zed-industries/fold-map-edits

Don't rely on `Buffer::edits_since` to keep `FoldMap` up-to-date
This commit is contained in:
Antonio Scandurra 2021-12-01 16:42:34 +01:00 committed by GitHub
commit 853b636435
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
13 changed files with 650 additions and 478 deletions

3
Cargo.lock generated
View File

@ -4849,8 +4849,11 @@ dependencies = [
"arrayvec 0.7.1", "arrayvec 0.7.1",
"clock", "clock",
"collections", "collections",
"ctor",
"env_logger",
"gpui", "gpui",
"log", "log",
"parking_lot",
"rand 0.8.3", "rand 0.8.3",
"smallvec", "smallvec",
"sum_tree", "sum_tree",

View File

@ -1,6 +1,5 @@
mod block_map; mod block_map;
mod fold_map; mod fold_map;
mod patch;
mod tab_map; mod tab_map;
mod wrap_map; mod wrap_map;
@ -11,7 +10,7 @@ use gpui::{
fonts::{FontId, HighlightStyle}, fonts::{FontId, HighlightStyle},
AppContext, Entity, ModelContext, ModelHandle, AppContext, Entity, ModelContext, ModelHandle,
}; };
use language::{Anchor, Buffer, Point, ToOffset, ToPoint}; use language::{Anchor, Buffer, Point, Subscription as BufferSubscription, ToOffset, ToPoint};
use std::{ use std::{
collections::{HashMap, HashSet}, collections::{HashMap, HashSet},
ops::Range, ops::Range,
@ -28,6 +27,7 @@ pub trait ToDisplayPoint {
pub struct DisplayMap { pub struct DisplayMap {
buffer: ModelHandle<Buffer>, buffer: ModelHandle<Buffer>,
buffer_subscription: BufferSubscription,
fold_map: FoldMap, fold_map: FoldMap,
tab_map: TabMap, tab_map: TabMap,
wrap_map: ModelHandle<WrapMap>, wrap_map: ModelHandle<WrapMap>,
@ -47,13 +47,15 @@ impl DisplayMap {
wrap_width: Option<f32>, wrap_width: Option<f32>,
cx: &mut ModelContext<Self>, cx: &mut ModelContext<Self>,
) -> Self { ) -> Self {
let (fold_map, snapshot) = FoldMap::new(buffer.clone(), cx); let buffer_subscription = buffer.update(cx, |buffer, _| buffer.subscribe());
let (fold_map, snapshot) = FoldMap::new(buffer.read(cx).snapshot());
let (tab_map, snapshot) = TabMap::new(snapshot, tab_size); let (tab_map, snapshot) = TabMap::new(snapshot, tab_size);
let (wrap_map, snapshot) = WrapMap::new(snapshot, font_id, font_size, wrap_width, cx); let (wrap_map, snapshot) = WrapMap::new(snapshot, font_id, font_size, wrap_width, cx);
let block_map = BlockMap::new(buffer.clone(), snapshot); let block_map = BlockMap::new(buffer.clone(), snapshot);
cx.observe(&wrap_map, |_, _, cx| cx.notify()).detach(); cx.observe(&wrap_map, |_, _, cx| cx.notify()).detach();
DisplayMap { DisplayMap {
buffer, buffer,
buffer_subscription,
fold_map, fold_map,
tab_map, tab_map,
wrap_map, wrap_map,
@ -62,7 +64,9 @@ impl DisplayMap {
} }
pub fn snapshot(&self, cx: &mut ModelContext<Self>) -> DisplayMapSnapshot { pub fn snapshot(&self, cx: &mut ModelContext<Self>) -> DisplayMapSnapshot {
let (folds_snapshot, edits) = self.fold_map.read(cx); let buffer_snapshot = self.buffer.read(cx).snapshot();
let edits = self.buffer_subscription.consume().into_inner();
let (folds_snapshot, edits) = self.fold_map.read(buffer_snapshot, edits);
let (tabs_snapshot, edits) = self.tab_map.sync(folds_snapshot.clone(), edits); let (tabs_snapshot, edits) = self.tab_map.sync(folds_snapshot.clone(), edits);
let (wraps_snapshot, edits) = self let (wraps_snapshot, edits) = self
.wrap_map .wrap_map
@ -83,13 +87,15 @@ impl DisplayMap {
ranges: impl IntoIterator<Item = Range<T>>, ranges: impl IntoIterator<Item = Range<T>>,
cx: &mut ModelContext<Self>, cx: &mut ModelContext<Self>,
) { ) {
let (mut fold_map, snapshot, edits) = self.fold_map.write(cx); let snapshot = self.buffer.read(cx).snapshot();
let edits = self.buffer_subscription.consume().into_inner();
let (mut fold_map, snapshot, edits) = self.fold_map.write(snapshot, edits);
let (snapshot, edits) = self.tab_map.sync(snapshot, edits); let (snapshot, edits) = self.tab_map.sync(snapshot, edits);
let (snapshot, edits) = self let (snapshot, edits) = self
.wrap_map .wrap_map
.update(cx, |map, cx| map.sync(snapshot, edits, cx)); .update(cx, |map, cx| map.sync(snapshot, edits, cx));
self.block_map.read(snapshot, edits, cx); self.block_map.read(snapshot, edits, cx);
let (snapshot, edits) = fold_map.fold(ranges, cx); let (snapshot, edits) = fold_map.fold(ranges);
let (snapshot, edits) = self.tab_map.sync(snapshot, edits); let (snapshot, edits) = self.tab_map.sync(snapshot, edits);
let (snapshot, edits) = self let (snapshot, edits) = self
.wrap_map .wrap_map
@ -102,13 +108,15 @@ impl DisplayMap {
ranges: impl IntoIterator<Item = Range<T>>, ranges: impl IntoIterator<Item = Range<T>>,
cx: &mut ModelContext<Self>, cx: &mut ModelContext<Self>,
) { ) {
let (mut fold_map, snapshot, edits) = self.fold_map.write(cx); let snapshot = self.buffer.read(cx).snapshot();
let edits = self.buffer_subscription.consume().into_inner();
let (mut fold_map, snapshot, edits) = self.fold_map.write(snapshot, edits);
let (snapshot, edits) = self.tab_map.sync(snapshot, edits); let (snapshot, edits) = self.tab_map.sync(snapshot, edits);
let (snapshot, edits) = self let (snapshot, edits) = self
.wrap_map .wrap_map
.update(cx, |map, cx| map.sync(snapshot, edits, cx)); .update(cx, |map, cx| map.sync(snapshot, edits, cx));
self.block_map.read(snapshot, edits, cx); self.block_map.read(snapshot, edits, cx);
let (snapshot, edits) = fold_map.unfold(ranges, cx); let (snapshot, edits) = fold_map.unfold(ranges);
let (snapshot, edits) = self.tab_map.sync(snapshot, edits); let (snapshot, edits) = self.tab_map.sync(snapshot, edits);
let (snapshot, edits) = self let (snapshot, edits) = self
.wrap_map .wrap_map
@ -125,7 +133,9 @@ impl DisplayMap {
P: ToOffset + Clone, P: ToOffset + Clone,
T: Into<Rope> + Clone, T: Into<Rope> + Clone,
{ {
let (snapshot, edits) = self.fold_map.read(cx); let snapshot = self.buffer.read(cx).snapshot();
let edits = self.buffer_subscription.consume().into_inner();
let (snapshot, edits) = self.fold_map.read(snapshot, edits);
let (snapshot, edits) = self.tab_map.sync(snapshot, edits); let (snapshot, edits) = self.tab_map.sync(snapshot, edits);
let (snapshot, edits) = self let (snapshot, edits) = self
.wrap_map .wrap_map
@ -143,7 +153,9 @@ impl DisplayMap {
} }
pub fn remove_blocks(&mut self, ids: HashSet<BlockId>, cx: &mut ModelContext<Self>) { pub fn remove_blocks(&mut self, ids: HashSet<BlockId>, cx: &mut ModelContext<Self>) {
let (snapshot, edits) = self.fold_map.read(cx); let snapshot = self.buffer.read(cx).snapshot();
let edits = self.buffer_subscription.consume().into_inner();
let (snapshot, edits) = self.fold_map.read(snapshot, edits);
let (snapshot, edits) = self.tab_map.sync(snapshot, edits); let (snapshot, edits) = self.tab_map.sync(snapshot, edits);
let (snapshot, edits) = self let (snapshot, edits) = self
.wrap_map .wrap_map
@ -525,7 +537,7 @@ mod tests {
} }
} }
_ => { _ => {
buffer.update(&mut cx, |buffer, _| buffer.randomly_edit(&mut rng, 5)); buffer.update(&mut cx, |buffer, cx| buffer.randomly_edit(&mut rng, 5, cx));
} }
} }

View File

@ -1104,7 +1104,7 @@ mod tests {
let text = "aaa\nbbb\nccc\nddd"; let text = "aaa\nbbb\nccc\nddd";
let buffer = cx.add_model(|cx| Buffer::new(0, text, cx)); let buffer = cx.add_model(|cx| Buffer::new(0, text, cx));
let (fold_map, folds_snapshot) = FoldMap::new(buffer.clone(), cx); let (fold_map, folds_snapshot) = FoldMap::new(buffer.read(cx).snapshot());
let (tab_map, tabs_snapshot) = TabMap::new(folds_snapshot.clone(), 1); let (tab_map, tabs_snapshot) = TabMap::new(folds_snapshot.clone(), 1);
let (wrap_map, wraps_snapshot) = WrapMap::new(tabs_snapshot, font_id, 14.0, None, cx); let (wrap_map, wraps_snapshot) = WrapMap::new(tabs_snapshot, font_id, 14.0, None, cx);
let mut block_map = BlockMap::new(buffer.clone(), wraps_snapshot.clone()); let mut block_map = BlockMap::new(buffer.clone(), wraps_snapshot.clone());
@ -1228,11 +1228,13 @@ mod tests {
// Insert a line break, separating two block decorations into separate // Insert a line break, separating two block decorations into separate
// lines. // lines.
buffer.update(cx, |buffer, cx| { let (buffer_snapshot, buffer_edits) = buffer.update(cx, |buffer, cx| {
buffer.edit([Point::new(1, 1)..Point::new(1, 1)], "!!!\n", cx) let v0 = buffer.version();
buffer.edit([Point::new(1, 1)..Point::new(1, 1)], "!!!\n", cx);
(buffer.snapshot(), buffer.edits_since(&v0).collect())
}); });
let (folds_snapshot, fold_edits) = fold_map.read(cx); let (folds_snapshot, fold_edits) = fold_map.read(buffer_snapshot, buffer_edits);
let (tabs_snapshot, tab_edits) = tab_map.sync(folds_snapshot, fold_edits); let (tabs_snapshot, tab_edits) = tab_map.sync(folds_snapshot, fold_edits);
let (wraps_snapshot, wrap_edits) = wrap_map.update(cx, |wrap_map, cx| { let (wraps_snapshot, wrap_edits) = wrap_map.update(cx, |wrap_map, cx| {
wrap_map.sync(tabs_snapshot, tab_edits, cx) wrap_map.sync(tabs_snapshot, tab_edits, cx)
@ -1255,7 +1257,7 @@ mod tests {
let text = "one two three\nfour five six\nseven eight"; let text = "one two three\nfour five six\nseven eight";
let buffer = cx.add_model(|cx| Buffer::new(0, text, cx)); let buffer = cx.add_model(|cx| Buffer::new(0, text, cx));
let (_, folds_snapshot) = FoldMap::new(buffer.clone(), cx); let (_, folds_snapshot) = FoldMap::new(buffer.read(cx).snapshot());
let (_, tabs_snapshot) = TabMap::new(folds_snapshot.clone(), 1); let (_, tabs_snapshot) = TabMap::new(folds_snapshot.clone(), 1);
let (_, wraps_snapshot) = WrapMap::new(tabs_snapshot, font_id, 14.0, Some(60.), cx); let (_, wraps_snapshot) = WrapMap::new(tabs_snapshot, font_id, 14.0, Some(60.), cx);
let mut block_map = BlockMap::new(buffer.clone(), wraps_snapshot.clone()); let mut block_map = BlockMap::new(buffer.clone(), wraps_snapshot.clone());
@ -1317,7 +1319,8 @@ mod tests {
log::info!("initial buffer text: {:?}", text); log::info!("initial buffer text: {:?}", text);
Buffer::new(0, text, cx) Buffer::new(0, text, cx)
}); });
let (fold_map, folds_snapshot) = FoldMap::new(buffer.clone(), cx); let mut buffer_snapshot = buffer.read(cx).snapshot();
let (fold_map, folds_snapshot) = FoldMap::new(buffer_snapshot.clone());
let (tab_map, tabs_snapshot) = TabMap::new(folds_snapshot.clone(), tab_size); let (tab_map, tabs_snapshot) = TabMap::new(folds_snapshot.clone(), tab_size);
let (wrap_map, wraps_snapshot) = let (wrap_map, wraps_snapshot) =
WrapMap::new(tabs_snapshot, font_id, font_size, wrap_width, cx); WrapMap::new(tabs_snapshot, font_id, font_size, wrap_width, cx);
@ -1325,6 +1328,7 @@ mod tests {
let mut expected_blocks = Vec::new(); let mut expected_blocks = Vec::new();
for _ in 0..operations { for _ in 0..operations {
let mut buffer_edits = Vec::new();
match rng.gen_range(0..=100) { match rng.gen_range(0..=100) {
0..=19 => { 0..=19 => {
let wrap_width = if rng.gen_bool(0.2) { let wrap_width = if rng.gen_bool(0.2) {
@ -1375,7 +1379,8 @@ mod tests {
}) })
.collect::<Vec<_>>(); .collect::<Vec<_>>();
let (folds_snapshot, fold_edits) = fold_map.read(cx); let (folds_snapshot, fold_edits) =
fold_map.read(buffer_snapshot.clone(), vec![]);
let (tabs_snapshot, tab_edits) = tab_map.sync(folds_snapshot, fold_edits); let (tabs_snapshot, tab_edits) = tab_map.sync(folds_snapshot, fold_edits);
let (wraps_snapshot, wrap_edits) = wrap_map.update(cx, |wrap_map, cx| { let (wraps_snapshot, wrap_edits) = wrap_map.update(cx, |wrap_map, cx| {
wrap_map.sync(tabs_snapshot, tab_edits, cx) wrap_map.sync(tabs_snapshot, tab_edits, cx)
@ -1396,7 +1401,8 @@ mod tests {
}) })
.collect(); .collect();
let (folds_snapshot, fold_edits) = fold_map.read(cx); let (folds_snapshot, fold_edits) =
fold_map.read(buffer_snapshot.clone(), vec![]);
let (tabs_snapshot, tab_edits) = tab_map.sync(folds_snapshot, fold_edits); let (tabs_snapshot, tab_edits) = tab_map.sync(folds_snapshot, fold_edits);
let (wraps_snapshot, wrap_edits) = wrap_map.update(cx, |wrap_map, cx| { let (wraps_snapshot, wrap_edits) = wrap_map.update(cx, |wrap_map, cx| {
wrap_map.sync(tabs_snapshot, tab_edits, cx) wrap_map.sync(tabs_snapshot, tab_edits, cx)
@ -1405,14 +1411,18 @@ mod tests {
block_map.remove(block_ids_to_remove, cx); block_map.remove(block_ids_to_remove, cx);
} }
_ => { _ => {
buffer.update(cx, |buffer, _| { buffer.update(cx, |buffer, cx| {
buffer.randomly_edit(&mut rng, 1); let v0 = buffer.version();
let edit_count = rng.gen_range(1..=5);
buffer.randomly_edit(&mut rng, edit_count, cx);
log::info!("buffer text: {:?}", buffer.text()); log::info!("buffer text: {:?}", buffer.text());
buffer_edits.extend(buffer.edits_since(&v0));
buffer_snapshot = buffer.snapshot();
}); });
} }
} }
let (folds_snapshot, fold_edits) = fold_map.read(cx); let (folds_snapshot, fold_edits) = fold_map.read(buffer_snapshot.clone(), buffer_edits);
let (tabs_snapshot, tab_edits) = tab_map.sync(folds_snapshot, fold_edits); let (tabs_snapshot, tab_edits) = tab_map.sync(folds_snapshot, fold_edits);
let (wraps_snapshot, wrap_edits) = wrap_map.update(cx, |wrap_map, cx| { let (wraps_snapshot, wrap_edits) = wrap_map.update(cx, |wrap_map, cx| {
wrap_map.sync(tabs_snapshot, tab_edits, cx) wrap_map.sync(tabs_snapshot, tab_edits, cx)

View File

@ -1,9 +1,11 @@
use gpui::{AppContext, ModelHandle}; use language::{
use language::{Anchor, AnchorRangeExt, Buffer, Chunk, Point, PointUtf16, TextSummary, ToOffset}; Anchor, AnchorRangeExt, Chunk, Edit, Point, PointUtf16, Snapshot as BufferSnapshot,
TextSummary, ToOffset,
};
use parking_lot::Mutex; use parking_lot::Mutex;
use std::{ use std::{
cmp::{self, Ordering}, cmp::{self, Ordering},
iter, mem, iter,
ops::Range, ops::Range,
sync::atomic::{AtomicUsize, Ordering::SeqCst}, sync::atomic::{AtomicUsize, Ordering::SeqCst},
}; };
@ -100,11 +102,10 @@ impl<'a> FoldMapWriter<'a> {
pub fn fold<T: ToOffset>( pub fn fold<T: ToOffset>(
&mut self, &mut self,
ranges: impl IntoIterator<Item = Range<T>>, ranges: impl IntoIterator<Item = Range<T>>,
cx: &AppContext,
) -> (Snapshot, Vec<FoldEdit>) { ) -> (Snapshot, Vec<FoldEdit>) {
let mut edits = Vec::new(); let mut edits = Vec::new();
let mut folds = Vec::new(); let mut folds = Vec::new();
let buffer = self.0.buffer.read(cx).snapshot(); let buffer = self.0.buffer.lock().clone();
for range in ranges.into_iter() { for range in ranges.into_iter() {
let range = range.start.to_offset(&buffer)..range.end.to_offset(&buffer); let range = range.start.to_offset(&buffer)..range.end.to_offset(&buffer);
if range.start != range.end { if range.start != range.end {
@ -131,11 +132,11 @@ impl<'a> FoldMapWriter<'a> {
}; };
consolidate_buffer_edits(&mut edits); consolidate_buffer_edits(&mut edits);
let edits = self.0.apply_edits(edits, cx); let edits = self.0.sync(buffer.clone(), edits);
let snapshot = Snapshot { let snapshot = Snapshot {
transforms: self.0.transforms.lock().clone(), transforms: self.0.transforms.lock().clone(),
folds: self.0.folds.clone(), folds: self.0.folds.clone(),
buffer_snapshot: self.0.buffer.read(cx).snapshot(), buffer_snapshot: buffer,
version: self.0.version.load(SeqCst), version: self.0.version.load(SeqCst),
}; };
(snapshot, edits) (snapshot, edits)
@ -144,13 +145,12 @@ impl<'a> FoldMapWriter<'a> {
pub fn unfold<T: ToOffset>( pub fn unfold<T: ToOffset>(
&mut self, &mut self,
ranges: impl IntoIterator<Item = Range<T>>, ranges: impl IntoIterator<Item = Range<T>>,
cx: &AppContext,
) -> (Snapshot, Vec<FoldEdit>) { ) -> (Snapshot, Vec<FoldEdit>) {
let mut edits = Vec::new(); let mut edits = Vec::new();
let mut fold_ixs_to_delete = Vec::new(); let mut fold_ixs_to_delete = Vec::new();
let buffer = self.0.buffer.read(cx).snapshot(); let buffer = self.0.buffer.lock().clone();
for range in ranges.into_iter() { for range in ranges.into_iter() {
// Remove intersecting folds and add their ranges to edits that are passed to apply_edits. // Remove intersecting folds and add their ranges to edits that are passed to sync.
let mut folds_cursor = intersecting_folds(&buffer, &self.0.folds, range, true); let mut folds_cursor = intersecting_folds(&buffer, &self.0.folds, range, true);
while let Some(fold) = folds_cursor.item() { while let Some(fold) = folds_cursor.item() {
let offset_range = fold.0.start.to_offset(&buffer)..fold.0.end.to_offset(&buffer); let offset_range = fold.0.start.to_offset(&buffer)..fold.0.end.to_offset(&buffer);
@ -178,11 +178,11 @@ impl<'a> FoldMapWriter<'a> {
}; };
consolidate_buffer_edits(&mut edits); consolidate_buffer_edits(&mut edits);
let edits = self.0.apply_edits(edits, cx); let edits = self.0.sync(buffer.clone(), edits);
let snapshot = Snapshot { let snapshot = Snapshot {
transforms: self.0.transforms.lock().clone(), transforms: self.0.transforms.lock().clone(),
folds: self.0.folds.clone(), folds: self.0.folds.clone(),
buffer_snapshot: self.0.buffer.read(cx).snapshot(), buffer_snapshot: buffer,
version: self.0.version.load(SeqCst), version: self.0.version.load(SeqCst),
}; };
(snapshot, edits) (snapshot, edits)
@ -190,25 +190,16 @@ impl<'a> FoldMapWriter<'a> {
} }
pub struct FoldMap { pub struct FoldMap {
buffer: ModelHandle<Buffer>, buffer: Mutex<BufferSnapshot>,
transforms: Mutex<SumTree<Transform>>, transforms: Mutex<SumTree<Transform>>,
folds: SumTree<Fold>, folds: SumTree<Fold>,
last_sync: Mutex<SyncState>,
version: AtomicUsize, version: AtomicUsize,
} }
#[derive(Clone)]
struct SyncState {
version: clock::Global,
parse_count: usize,
diagnostics_update_count: usize,
}
impl FoldMap { impl FoldMap {
pub fn new(buffer_handle: ModelHandle<Buffer>, cx: &AppContext) -> (Self, Snapshot) { pub fn new(buffer: BufferSnapshot) -> (Self, Snapshot) {
let buffer = buffer_handle.read(cx);
let this = Self { let this = Self {
buffer: buffer_handle, buffer: Mutex::new(buffer.clone()),
folds: Default::default(), folds: Default::default(),
transforms: Mutex::new(SumTree::from_item( transforms: Mutex::new(SumTree::from_item(
Transform { Transform {
@ -220,147 +211,185 @@ impl FoldMap {
}, },
&(), &(),
)), )),
last_sync: Mutex::new(SyncState { version: Default::default(),
version: buffer.version(), };
parse_count: buffer.parse_count(),
diagnostics_update_count: buffer.diagnostics_update_count(), let snapshot = Snapshot {
}), transforms: this.transforms.lock().clone(),
version: AtomicUsize::new(0), folds: this.folds.clone(),
buffer_snapshot: this.buffer.lock().clone(),
version: this.version.load(SeqCst),
}; };
let (snapshot, _) = this.read(cx);
(this, snapshot) (this, snapshot)
} }
pub fn read(&self, cx: &AppContext) -> (Snapshot, Vec<FoldEdit>) { pub fn read(
let edits = self.sync(cx); &self,
self.check_invariants(cx); buffer: BufferSnapshot,
edits: Vec<Edit<usize>>,
) -> (Snapshot, Vec<FoldEdit>) {
let edits = self.sync(buffer, edits);
self.check_invariants();
let snapshot = Snapshot { let snapshot = Snapshot {
transforms: self.transforms.lock().clone(), transforms: self.transforms.lock().clone(),
folds: self.folds.clone(), folds: self.folds.clone(),
buffer_snapshot: self.buffer.read(cx).snapshot(), buffer_snapshot: self.buffer.lock().clone(),
version: self.version.load(SeqCst), version: self.version.load(SeqCst),
}; };
(snapshot, edits) (snapshot, edits)
} }
pub fn write(&mut self, cx: &AppContext) -> (FoldMapWriter, Snapshot, Vec<FoldEdit>) { pub fn write(
let (snapshot, edits) = self.read(cx); &mut self,
buffer: BufferSnapshot,
edits: Vec<Edit<usize>>,
) -> (FoldMapWriter, Snapshot, Vec<FoldEdit>) {
let (snapshot, edits) = self.read(buffer, edits);
(FoldMapWriter(self), snapshot, edits) (FoldMapWriter(self), snapshot, edits)
} }
fn sync(&self, cx: &AppContext) -> Vec<FoldEdit> { fn check_invariants(&self) {
let buffer = self.buffer.read(cx);
let last_sync = mem::replace(
&mut *self.last_sync.lock(),
SyncState {
version: buffer.version(),
parse_count: buffer.parse_count(),
diagnostics_update_count: buffer.diagnostics_update_count(),
},
);
let edits = buffer
.edits_since(&last_sync.version)
.map(Into::into)
.collect::<Vec<_>>();
if edits.is_empty() {
if last_sync.parse_count != buffer.parse_count()
|| last_sync.diagnostics_update_count != buffer.diagnostics_update_count()
{
self.version.fetch_add(1, SeqCst);
}
Vec::new()
} else {
self.apply_edits(edits, cx)
}
}
fn check_invariants(&self, cx: &AppContext) {
if cfg!(test) { if cfg!(test) {
let buffer = self.buffer.read(cx);
assert_eq!( assert_eq!(
self.transforms.lock().summary().input.bytes, self.transforms.lock().summary().input.bytes,
buffer.len(), self.buffer.lock().len(),
"transform tree does not match buffer's length" "transform tree does not match buffer's length"
); );
} }
} }
fn apply_edits(&self, buffer_edits: Vec<text::Edit<usize>>, cx: &AppContext) -> Vec<FoldEdit> { fn sync(
let buffer = self.buffer.read(cx).snapshot(); &self,
let mut buffer_edits_iter = buffer_edits.iter().cloned().peekable(); new_buffer: BufferSnapshot,
buffer_edits: Vec<text::Edit<usize>>,
) -> Vec<FoldEdit> {
if buffer_edits.is_empty() {
let mut buffer = self.buffer.lock();
if buffer.parse_count() != new_buffer.parse_count()
|| buffer.diagnostics_update_count() != new_buffer.diagnostics_update_count()
{
self.version.fetch_add(1, SeqCst);
}
*buffer = new_buffer;
Vec::new()
} else {
let mut buffer_edits_iter = buffer_edits.iter().cloned().peekable();
let mut new_transforms = SumTree::new(); let mut new_transforms = SumTree::new();
let mut transforms = self.transforms.lock(); let mut transforms = self.transforms.lock();
let mut cursor = transforms.cursor::<usize>(); let mut cursor = transforms.cursor::<usize>();
cursor.seek(&0, Bias::Right, &()); cursor.seek(&0, Bias::Right, &());
while let Some(mut edit) = buffer_edits_iter.next() { while let Some(mut edit) = buffer_edits_iter.next() {
new_transforms.push_tree(cursor.slice(&edit.old.start, Bias::Left, &()), &()); new_transforms.push_tree(cursor.slice(&edit.old.start, Bias::Left, &()), &());
edit.new.start -= edit.old.start - cursor.start(); edit.new.start -= edit.old.start - cursor.start();
edit.old.start = *cursor.start(); edit.old.start = *cursor.start();
cursor.seek(&edit.old.end, Bias::Right, &()); cursor.seek(&edit.old.end, Bias::Right, &());
cursor.next(&()); cursor.next(&());
let mut delta = edit.new.len() as isize - edit.old.len() as isize; let mut delta = edit.new.len() as isize - edit.old.len() as isize;
loop { loop {
edit.old.end = *cursor.start(); edit.old.end = *cursor.start();
if let Some(next_edit) = buffer_edits_iter.peek() { if let Some(next_edit) = buffer_edits_iter.peek() {
if next_edit.old.start > edit.old.end { if next_edit.old.start > edit.old.end {
break;
}
let next_edit = buffer_edits_iter.next().unwrap();
delta += next_edit.new.len() as isize - next_edit.old.len() as isize;
if next_edit.old.end >= edit.old.end {
edit.old.end = next_edit.old.end;
cursor.seek(&edit.old.end, Bias::Right, &());
cursor.next(&());
}
} else {
break; break;
} }
}
let next_edit = buffer_edits_iter.next().unwrap(); edit.new.end = ((edit.new.start + edit.old.len()) as isize + delta) as usize;
delta += next_edit.new.len() as isize - next_edit.old.len() as isize;
if next_edit.old.end >= edit.old.end { let anchor = new_buffer.anchor_before(edit.new.start);
edit.old.end = next_edit.old.end; let mut folds_cursor = self.folds.cursor::<Fold>();
cursor.seek(&edit.old.end, Bias::Right, &()); folds_cursor.seek(&Fold(anchor..Anchor::max()), Bias::Left, &new_buffer);
cursor.next(&());
let mut folds = iter::from_fn({
let buffer = &new_buffer;
move || {
let item = folds_cursor
.item()
.map(|f| f.0.start.to_offset(buffer)..f.0.end.to_offset(buffer));
folds_cursor.next(buffer);
item
}
})
.peekable();
while folds.peek().map_or(false, |fold| fold.start < edit.new.end) {
let mut fold = folds.next().unwrap();
let sum = new_transforms.summary();
assert!(fold.start >= sum.input.bytes);
while folds
.peek()
.map_or(false, |next_fold| next_fold.start <= fold.end)
{
let next_fold = folds.next().unwrap();
if next_fold.end > fold.end {
fold.end = next_fold.end;
}
}
if fold.start > sum.input.bytes {
let text_summary = new_buffer
.text_summary_for_range::<TextSummary, _>(sum.input.bytes..fold.start);
new_transforms.push(
Transform {
summary: TransformSummary {
output: text_summary.clone(),
input: text_summary,
},
output_text: None,
},
&(),
);
}
if fold.end > fold.start {
let output_text = "";
let chars = output_text.chars().count() as u32;
let lines = Point::new(0, output_text.len() as u32);
let lines_utf16 =
PointUtf16::new(0, output_text.encode_utf16().count() as u32);
new_transforms.push(
Transform {
summary: TransformSummary {
output: TextSummary {
bytes: output_text.len(),
lines,
lines_utf16,
first_line_chars: chars,
last_line_chars: chars,
longest_row: 0,
longest_row_chars: chars,
},
input: new_buffer.text_summary_for_range(fold.start..fold.end),
},
output_text: Some(output_text),
},
&(),
);
} }
} else {
break;
} }
}
edit.new.end = ((edit.new.start + edit.old.len()) as isize + delta) as usize;
let anchor = buffer.anchor_before(edit.new.start);
let mut folds_cursor = self.folds.cursor::<Fold>();
folds_cursor.seek(&Fold(anchor..Anchor::max()), Bias::Left, &buffer);
let mut folds = iter::from_fn({
let buffer = &buffer;
move || {
let item = folds_cursor
.item()
.map(|f| f.0.start.to_offset(buffer)..f.0.end.to_offset(buffer));
folds_cursor.next(buffer);
item
}
})
.peekable();
while folds.peek().map_or(false, |fold| fold.start < edit.new.end) {
let mut fold = folds.next().unwrap();
let sum = new_transforms.summary(); let sum = new_transforms.summary();
if sum.input.bytes < edit.new.end {
assert!(fold.start >= sum.input.bytes); let text_summary = new_buffer
.text_summary_for_range::<TextSummary, _>(sum.input.bytes..edit.new.end);
while folds
.peek()
.map_or(false, |next_fold| next_fold.start <= fold.end)
{
let next_fold = folds.next().unwrap();
if next_fold.end > fold.end {
fold.end = next_fold.end;
}
}
if fold.start > sum.input.bytes {
let text_summary = buffer
.text_summary_for_range::<TextSummary, _>(sum.input.bytes..fold.start);
new_transforms.push( new_transforms.push(
Transform { Transform {
summary: TransformSummary { summary: TransformSummary {
@ -372,37 +401,11 @@ impl FoldMap {
&(), &(),
); );
} }
if fold.end > fold.start {
let output_text = "";
let chars = output_text.chars().count() as u32;
let lines = Point::new(0, output_text.len() as u32);
let lines_utf16 = PointUtf16::new(0, output_text.encode_utf16().count() as u32);
new_transforms.push(
Transform {
summary: TransformSummary {
output: TextSummary {
bytes: output_text.len(),
lines,
lines_utf16,
first_line_chars: chars,
last_line_chars: chars,
longest_row: 0,
longest_row_chars: chars,
},
input: buffer.text_summary_for_range(fold.start..fold.end),
},
output_text: Some(output_text),
},
&(),
);
}
} }
let sum = new_transforms.summary(); new_transforms.push_tree(cursor.suffix(&()), &());
if sum.input.bytes < edit.new.end { if new_transforms.is_empty() {
let text_summary = let text_summary = new_buffer.text_summary();
buffer.text_summary_for_range::<TextSummary, _>(sum.input.bytes..edit.new.end);
new_transforms.push( new_transforms.push(
Transform { Transform {
summary: TransformSummary { summary: TransformSummary {
@ -414,73 +417,59 @@ impl FoldMap {
&(), &(),
); );
} }
}
new_transforms.push_tree(cursor.suffix(&()), &()); drop(cursor);
if new_transforms.is_empty() {
let text_summary = buffer.text_summary();
new_transforms.push(
Transform {
summary: TransformSummary {
output: text_summary.clone(),
input: text_summary,
},
output_text: None,
},
&(),
);
}
drop(cursor); let mut fold_edits = Vec::with_capacity(buffer_edits.len());
{
let mut old_transforms = transforms.cursor::<(usize, FoldOffset)>();
let mut new_transforms = new_transforms.cursor::<(usize, FoldOffset)>();
let mut fold_edits = Vec::with_capacity(buffer_edits.len()); for mut edit in buffer_edits {
{ old_transforms.seek(&edit.old.start, Bias::Left, &());
let mut old_transforms = transforms.cursor::<(usize, FoldOffset)>(); if old_transforms.item().map_or(false, |t| t.is_fold()) {
let mut new_transforms = new_transforms.cursor::<(usize, FoldOffset)>(); edit.old.start = old_transforms.start().0;
}
let old_start =
old_transforms.start().1 .0 + (edit.old.start - old_transforms.start().0);
for mut edit in buffer_edits { old_transforms.seek_forward(&edit.old.end, Bias::Right, &());
old_transforms.seek(&edit.old.start, Bias::Left, &()); if old_transforms.item().map_or(false, |t| t.is_fold()) {
if old_transforms.item().map_or(false, |t| t.is_fold()) { old_transforms.next(&());
edit.old.start = old_transforms.start().0; edit.old.end = old_transforms.start().0;
}
let old_end =
old_transforms.start().1 .0 + (edit.old.end - old_transforms.start().0);
new_transforms.seek(&edit.new.start, Bias::Left, &());
if new_transforms.item().map_or(false, |t| t.is_fold()) {
edit.new.start = new_transforms.start().0;
}
let new_start =
new_transforms.start().1 .0 + (edit.new.start - new_transforms.start().0);
new_transforms.seek_forward(&edit.new.end, Bias::Right, &());
if new_transforms.item().map_or(false, |t| t.is_fold()) {
new_transforms.next(&());
edit.new.end = new_transforms.start().0;
}
let new_end =
new_transforms.start().1 .0 + (edit.new.end - new_transforms.start().0);
fold_edits.push(FoldEdit {
old_bytes: FoldOffset(old_start)..FoldOffset(old_end),
new_bytes: FoldOffset(new_start)..FoldOffset(new_end),
});
} }
let old_start =
old_transforms.start().1 .0 + (edit.old.start - old_transforms.start().0);
old_transforms.seek_forward(&edit.old.end, Bias::Right, &()); consolidate_fold_edits(&mut fold_edits);
if old_transforms.item().map_or(false, |t| t.is_fold()) {
old_transforms.next(&());
edit.old.end = old_transforms.start().0;
}
let old_end =
old_transforms.start().1 .0 + (edit.old.end - old_transforms.start().0);
new_transforms.seek(&edit.new.start, Bias::Left, &());
if new_transforms.item().map_or(false, |t| t.is_fold()) {
edit.new.start = new_transforms.start().0;
}
let new_start =
new_transforms.start().1 .0 + (edit.new.start - new_transforms.start().0);
new_transforms.seek_forward(&edit.new.end, Bias::Right, &());
if new_transforms.item().map_or(false, |t| t.is_fold()) {
new_transforms.next(&());
edit.new.end = new_transforms.start().0;
}
let new_end =
new_transforms.start().1 .0 + (edit.new.end - new_transforms.start().0);
fold_edits.push(FoldEdit {
old_bytes: FoldOffset(old_start)..FoldOffset(old_end),
new_bytes: FoldOffset(new_start)..FoldOffset(new_end),
});
} }
consolidate_fold_edits(&mut fold_edits); *transforms = new_transforms;
*self.buffer.lock() = new_buffer;
self.version.fetch_add(1, SeqCst);
fold_edits
} }
*transforms = new_transforms;
self.version.fetch_add(1, SeqCst);
fold_edits
} }
} }
@ -1076,6 +1065,7 @@ impl FoldEdit {
mod tests { mod tests {
use super::*; use super::*;
use crate::{test::sample_text, ToPoint}; use crate::{test::sample_text, ToPoint};
use language::Buffer;
use rand::prelude::*; use rand::prelude::*;
use std::{env, mem}; use std::{env, mem};
use text::RandomCharIter; use text::RandomCharIter;
@ -1084,16 +1074,14 @@ mod tests {
#[gpui::test] #[gpui::test]
fn test_basic_folds(cx: &mut gpui::MutableAppContext) { fn test_basic_folds(cx: &mut gpui::MutableAppContext) {
let buffer = cx.add_model(|cx| Buffer::new(0, sample_text(5, 6), cx)); let buffer = cx.add_model(|cx| Buffer::new(0, sample_text(5, 6), cx));
let mut map = FoldMap::new(buffer.clone(), cx.as_ref()).0; let buffer_snapshot = buffer.read(cx).snapshot();
let mut map = FoldMap::new(buffer_snapshot.clone()).0;
let (mut writer, _, _) = map.write(cx.as_ref()); let (mut writer, _, _) = map.write(buffer_snapshot.clone(), vec![]);
let (snapshot2, edits) = writer.fold( let (snapshot2, edits) = writer.fold(vec![
vec![ Point::new(0, 2)..Point::new(2, 2),
Point::new(0, 2)..Point::new(2, 2), Point::new(2, 4)..Point::new(4, 1),
Point::new(2, 4)..Point::new(4, 1), ]);
],
cx.as_ref(),
);
assert_eq!(snapshot2.text(), "aa…cc…eeeee"); assert_eq!(snapshot2.text(), "aa…cc…eeeee");
assert_eq!( assert_eq!(
edits, edits,
@ -1109,7 +1097,8 @@ mod tests {
] ]
); );
buffer.update(cx, |buffer, cx| { let (buffer_snapshot, edits) = buffer.update(cx, |buffer, cx| {
let v0 = buffer.version();
buffer.edit( buffer.edit(
vec![ vec![
Point::new(0, 0)..Point::new(0, 1), Point::new(0, 0)..Point::new(0, 1),
@ -1118,8 +1107,9 @@ mod tests {
"123", "123",
cx, cx,
); );
(buffer.snapshot(), buffer.edits_since(&v0).collect())
}); });
let (snapshot3, edits) = map.read(cx.as_ref()); let (snapshot3, edits) = map.read(buffer_snapshot.clone(), edits);
assert_eq!(snapshot3.text(), "123a…c123c…eeeee"); assert_eq!(snapshot3.text(), "123a…c123c…eeeee");
assert_eq!( assert_eq!(
edits, edits,
@ -1135,55 +1125,62 @@ mod tests {
] ]
); );
buffer.update(cx, |buffer, cx| { let (buffer_snapshot, edits) = buffer.update(cx, |buffer, cx| {
buffer.edit(vec![Point::new(2, 6)..Point::new(4, 3)], "456", cx) let v0 = buffer.version();
buffer.edit(vec![Point::new(2, 6)..Point::new(4, 3)], "456", cx);
(buffer.snapshot(), buffer.edits_since(&v0).collect())
}); });
let (snapshot4, _) = map.read(cx.as_ref()); let (snapshot4, _) = map.read(buffer_snapshot.clone(), edits);
assert_eq!(snapshot4.text(), "123a…c123456eee"); assert_eq!(snapshot4.text(), "123a…c123456eee");
let (mut writer, _, _) = map.write(cx.as_ref()); let (mut writer, _, _) = map.write(buffer_snapshot.clone(), vec![]);
writer.unfold(Some(Point::new(0, 4)..Point::new(0, 5)), cx.as_ref()); writer.unfold(Some(Point::new(0, 4)..Point::new(0, 5)));
let (snapshot5, _) = map.read(cx.as_ref()); let (snapshot5, _) = map.read(buffer_snapshot.clone(), vec![]);
assert_eq!(snapshot5.text(), "123aaaaa\nbbbbbb\nccc123456eee"); assert_eq!(snapshot5.text(), "123aaaaa\nbbbbbb\nccc123456eee");
} }
#[gpui::test] #[gpui::test]
fn test_adjacent_folds(cx: &mut gpui::MutableAppContext) { fn test_adjacent_folds(cx: &mut gpui::MutableAppContext) {
let buffer = cx.add_model(|cx| Buffer::new(0, "abcdefghijkl", cx)); let buffer = cx.add_model(|cx| Buffer::new(0, "abcdefghijkl", cx));
let buffer_snapshot = buffer.read(cx).snapshot();
{ {
let mut map = FoldMap::new(buffer.clone(), cx.as_ref()).0; let mut map = FoldMap::new(buffer_snapshot.clone()).0;
let (mut writer, _, _) = map.write(cx.as_ref()); let (mut writer, _, _) = map.write(buffer_snapshot.clone(), vec![]);
writer.fold(vec![5..8], cx.as_ref()); writer.fold(vec![5..8]);
let (snapshot, _) = map.read(cx.as_ref()); let (snapshot, _) = map.read(buffer_snapshot.clone(), vec![]);
assert_eq!(snapshot.text(), "abcde…ijkl"); assert_eq!(snapshot.text(), "abcde…ijkl");
// Create an fold adjacent to the start of the first fold. // Create an fold adjacent to the start of the first fold.
let (mut writer, _, _) = map.write(cx.as_ref()); let (mut writer, _, _) = map.write(buffer_snapshot.clone(), vec![]);
writer.fold(vec![0..1, 2..5], cx.as_ref()); writer.fold(vec![0..1, 2..5]);
let (snapshot, _) = map.read(cx.as_ref()); let (snapshot, _) = map.read(buffer_snapshot.clone(), vec![]);
assert_eq!(snapshot.text(), "…b…ijkl"); assert_eq!(snapshot.text(), "…b…ijkl");
// Create an fold adjacent to the end of the first fold. // Create an fold adjacent to the end of the first fold.
let (mut writer, _, _) = map.write(cx.as_ref()); let (mut writer, _, _) = map.write(buffer_snapshot.clone(), vec![]);
writer.fold(vec![11..11, 8..10], cx.as_ref()); writer.fold(vec![11..11, 8..10]);
let (snapshot, _) = map.read(cx.as_ref()); let (snapshot, _) = map.read(buffer_snapshot.clone(), vec![]);
assert_eq!(snapshot.text(), "…b…kl"); assert_eq!(snapshot.text(), "…b…kl");
} }
{ {
let mut map = FoldMap::new(buffer.clone(), cx.as_ref()).0; let mut map = FoldMap::new(buffer_snapshot.clone()).0;
// Create two adjacent folds. // Create two adjacent folds.
let (mut writer, _, _) = map.write(cx.as_ref()); let (mut writer, _, _) = map.write(buffer_snapshot.clone(), vec![]);
writer.fold(vec![0..2, 2..5], cx.as_ref()); writer.fold(vec![0..2, 2..5]);
let (snapshot, _) = map.read(cx.as_ref()); let (snapshot, _) = map.read(buffer_snapshot.clone(), vec![]);
assert_eq!(snapshot.text(), "…fghijkl"); assert_eq!(snapshot.text(), "…fghijkl");
// Edit within one of the folds. // Edit within one of the folds.
buffer.update(cx, |buffer, cx| buffer.edit(vec![0..1], "12345", cx)); let (buffer_snapshot, edits) = buffer.update(cx, |buffer, cx| {
let (snapshot, _) = map.read(cx.as_ref()); let v0 = buffer.version();
buffer.edit(vec![0..1], "12345", cx);
(buffer.snapshot(), buffer.edits_since(&v0).collect())
});
let (snapshot, _) = map.read(buffer_snapshot.clone(), edits);
assert_eq!(snapshot.text(), "12345…fghijkl"); assert_eq!(snapshot.text(), "12345…fghijkl");
} }
} }
@ -1191,61 +1188,57 @@ mod tests {
#[gpui::test] #[gpui::test]
fn test_overlapping_folds(cx: &mut gpui::MutableAppContext) { fn test_overlapping_folds(cx: &mut gpui::MutableAppContext) {
let buffer = cx.add_model(|cx| Buffer::new(0, sample_text(5, 6), cx)); let buffer = cx.add_model(|cx| Buffer::new(0, sample_text(5, 6), cx));
let mut map = FoldMap::new(buffer.clone(), cx.as_ref()).0; let buffer_snapshot = buffer.read(cx).snapshot();
let (mut writer, _, _) = map.write(cx.as_ref()); let mut map = FoldMap::new(buffer_snapshot.clone()).0;
writer.fold( let (mut writer, _, _) = map.write(buffer_snapshot.clone(), vec![]);
vec![ writer.fold(vec![
Point::new(0, 2)..Point::new(2, 2), Point::new(0, 2)..Point::new(2, 2),
Point::new(0, 4)..Point::new(1, 0), Point::new(0, 4)..Point::new(1, 0),
Point::new(1, 2)..Point::new(3, 2), Point::new(1, 2)..Point::new(3, 2),
Point::new(3, 1)..Point::new(4, 1), Point::new(3, 1)..Point::new(4, 1),
], ]);
cx.as_ref(), let (snapshot, _) = map.read(buffer_snapshot.clone(), vec![]);
);
let (snapshot, _) = map.read(cx.as_ref());
assert_eq!(snapshot.text(), "aa…eeeee"); assert_eq!(snapshot.text(), "aa…eeeee");
} }
#[gpui::test] #[gpui::test]
fn test_merging_folds_via_edit(cx: &mut gpui::MutableAppContext) { fn test_merging_folds_via_edit(cx: &mut gpui::MutableAppContext) {
let buffer = cx.add_model(|cx| Buffer::new(0, sample_text(5, 6), cx)); let buffer = cx.add_model(|cx| Buffer::new(0, sample_text(5, 6), cx));
let mut map = FoldMap::new(buffer.clone(), cx.as_ref()).0; let buffer_snapshot = buffer.read(cx).snapshot();
let mut map = FoldMap::new(buffer_snapshot.clone()).0;
let (mut writer, _, _) = map.write(cx.as_ref()); let (mut writer, _, _) = map.write(buffer_snapshot.clone(), vec![]);
writer.fold( writer.fold(vec![
vec![ Point::new(0, 2)..Point::new(2, 2),
Point::new(0, 2)..Point::new(2, 2), Point::new(3, 1)..Point::new(4, 1),
Point::new(3, 1)..Point::new(4, 1), ]);
], let (snapshot, _) = map.read(buffer_snapshot.clone(), vec![]);
cx.as_ref(),
);
let (snapshot, _) = map.read(cx.as_ref());
assert_eq!(snapshot.text(), "aa…cccc\nd…eeeee"); assert_eq!(snapshot.text(), "aa…cccc\nd…eeeee");
buffer.update(cx, |buffer, cx| { let (buffer_snapshot, edits) = buffer.update(cx, |buffer, cx| {
buffer.edit(Some(Point::new(2, 2)..Point::new(3, 1)), "", cx) let v0 = buffer.version();
buffer.edit(Some(Point::new(2, 2)..Point::new(3, 1)), "", cx);
(buffer.snapshot(), buffer.edits_since(&v0).collect())
}); });
let (snapshot, _) = map.read(cx.as_ref()); let (snapshot, _) = map.read(buffer_snapshot.clone(), edits);
assert_eq!(snapshot.text(), "aa…eeeee"); assert_eq!(snapshot.text(), "aa…eeeee");
} }
#[gpui::test] #[gpui::test]
fn test_folds_in_range(cx: &mut gpui::MutableAppContext) { fn test_folds_in_range(cx: &mut gpui::MutableAppContext) {
let buffer = cx.add_model(|cx| Buffer::new(0, sample_text(5, 6), cx)); let buffer = cx.add_model(|cx| Buffer::new(0, sample_text(5, 6), cx));
let mut map = FoldMap::new(buffer.clone(), cx.as_ref()).0; let buffer_snapshot = buffer.read(cx).snapshot();
let mut map = FoldMap::new(buffer_snapshot.clone()).0;
let buffer = buffer.read(cx); let buffer = buffer.read(cx);
let (mut writer, _, _) = map.write(cx.as_ref()); let (mut writer, _, _) = map.write(buffer_snapshot.clone(), vec![]);
writer.fold( writer.fold(vec![
vec![ Point::new(0, 2)..Point::new(2, 2),
Point::new(0, 2)..Point::new(2, 2), Point::new(0, 4)..Point::new(1, 0),
Point::new(0, 4)..Point::new(1, 0), Point::new(1, 2)..Point::new(3, 2),
Point::new(1, 2)..Point::new(3, 2), Point::new(3, 1)..Point::new(4, 1),
Point::new(3, 1)..Point::new(4, 1), ]);
], let (snapshot, _) = map.read(buffer_snapshot.clone(), vec![]);
cx.as_ref(),
);
let (snapshot, _) = map.read(cx.as_ref());
let fold_ranges = snapshot let fold_ranges = snapshot
.folds_in_range(Point::new(1, 0)..Point::new(1, 3)) .folds_in_range(Point::new(1, 0)..Point::new(1, 3))
.map(|fold| fold.start.to_point(buffer)..fold.end.to_point(buffer)) .map(|fold| fold.start.to_point(buffer)..fold.end.to_point(buffer))
@ -1270,37 +1263,41 @@ mod tests {
let text = RandomCharIter::new(&mut rng).take(len).collect::<String>(); let text = RandomCharIter::new(&mut rng).take(len).collect::<String>();
Buffer::new(0, text, cx) Buffer::new(0, text, cx)
}); });
let mut map = FoldMap::new(buffer.clone(), cx.as_ref()).0; let buffer_snapshot = buffer.read(cx).snapshot();
let mut map = FoldMap::new(buffer_snapshot.clone()).0;
let (mut initial_snapshot, _) = map.read(cx.as_ref()); let (mut initial_snapshot, _) = map.read(buffer_snapshot.clone(), vec![]);
let mut snapshot_edits = Vec::new(); let mut snapshot_edits = Vec::new();
for _ in 0..operations { for _ in 0..operations {
log::info!("text: {:?}", buffer.read(cx).text()); log::info!("text: {:?}", buffer.read(cx).text());
match rng.gen_range(0..=100) { let buffer_edits = match rng.gen_range(0..=100) {
0..=59 => { 0..=59 => {
snapshot_edits.extend(map.randomly_mutate(&mut rng, cx.as_ref())); snapshot_edits.extend(map.randomly_mutate(&mut rng));
vec![]
} }
_ => { _ => buffer.update(cx, |buffer, cx| {
let edits = buffer.update(cx, |buffer, _| { let start_version = buffer.version.clone();
let start_version = buffer.version.clone(); let edit_count = rng.gen_range(1..=5);
let edit_count = rng.gen_range(1..=5); buffer.randomly_edit(&mut rng, edit_count, cx);
buffer.randomly_edit(&mut rng, edit_count); let edits = buffer
buffer .edits_since::<Point>(&start_version)
.edits_since::<Point>(&start_version) .collect::<Vec<_>>();
.collect::<Vec<_>>()
});
log::info!("editing {:?}", edits); log::info!("editing {:?}", edits);
} buffer.edits_since::<usize>(&start_version).collect()
} }),
};
let buffer_snapshot = buffer.read(cx).snapshot();
let buffer = map.buffer.read(cx).snapshot(); let (snapshot, edits) = map.read(buffer_snapshot.clone(), buffer_edits);
let mut expected_text: String = buffer.text().to_string(); snapshot_edits.push((snapshot.clone(), edits));
let mut expected_text: String = buffer_snapshot.text().to_string();
let mut expected_buffer_rows = Vec::new(); let mut expected_buffer_rows = Vec::new();
let mut next_row = buffer.max_point().row; let mut next_row = buffer_snapshot.max_point().row;
for fold_range in map.merged_fold_ranges(cx.as_ref()).into_iter().rev() { for fold_range in map.merged_fold_ranges().into_iter().rev() {
let fold_start = buffer.point_for_offset(fold_range.start).unwrap(); let fold_start = buffer_snapshot.point_for_offset(fold_range.start).unwrap();
let fold_end = buffer.point_for_offset(fold_range.end).unwrap(); let fold_end = buffer_snapshot.point_for_offset(fold_range.end).unwrap();
expected_buffer_rows.extend((fold_end.row + 1..=next_row).rev()); expected_buffer_rows.extend((fold_end.row + 1..=next_row).rev());
next_row = fold_start.row; next_row = fold_start.row;
@ -1309,9 +1306,7 @@ mod tests {
expected_buffer_rows.extend((0..=next_row).rev()); expected_buffer_rows.extend((0..=next_row).rev());
expected_buffer_rows.reverse(); expected_buffer_rows.reverse();
let (snapshot, edits) = map.read(cx.as_ref());
assert_eq!(snapshot.text(), expected_text); assert_eq!(snapshot.text(), expected_text);
snapshot_edits.push((snapshot.clone(), edits));
for (output_row, line) in expected_text.lines().enumerate() { for (output_row, line) in expected_text.lines().enumerate() {
let line_len = snapshot.line_len(output_row as u32); let line_len = snapshot.line_len(output_row as u32);
@ -1330,7 +1325,7 @@ mod tests {
let mut char_column = 0; let mut char_column = 0;
for c in expected_text.chars() { for c in expected_text.chars() {
let buffer_point = fold_point.to_buffer_point(&snapshot); let buffer_point = fold_point.to_buffer_point(&snapshot);
let buffer_offset = buffer_point.to_offset(&buffer); let buffer_offset = buffer_point.to_offset(&buffer_snapshot);
assert_eq!( assert_eq!(
buffer_point.to_fold_point(&snapshot, Right), buffer_point.to_fold_point(&snapshot, Right),
fold_point, fold_point,
@ -1400,26 +1395,28 @@ mod tests {
); );
} }
for fold_range in map.merged_fold_ranges(cx.as_ref()) { for fold_range in map.merged_fold_ranges() {
let fold_point = fold_range let fold_point = fold_range
.start .start
.to_point(&buffer) .to_point(&buffer_snapshot)
.to_fold_point(&snapshot, Right); .to_fold_point(&snapshot, Right);
assert!(snapshot.is_line_folded(fold_point.row())); assert!(snapshot.is_line_folded(fold_point.row()));
} }
for _ in 0..5 { for _ in 0..5 {
let end = buffer.clip_offset(rng.gen_range(0..=buffer.len()), Right); let end =
let start = buffer.clip_offset(rng.gen_range(0..=end), Left); buffer_snapshot.clip_offset(rng.gen_range(0..=buffer_snapshot.len()), Right);
let start = buffer_snapshot.clip_offset(rng.gen_range(0..=end), Left);
let expected_folds = map let expected_folds = map
.folds .folds
.items(&buffer) .items(&buffer_snapshot)
.into_iter() .into_iter()
.filter(|fold| { .filter(|fold| {
let start = buffer.anchor_before(start); let start = buffer_snapshot.anchor_before(start);
let end = buffer.anchor_after(end); let end = buffer_snapshot.anchor_after(end);
start.cmp(&fold.0.end, &buffer).unwrap() == Ordering::Less start.cmp(&fold.0.end, &buffer_snapshot).unwrap() == Ordering::Less
&& end.cmp(&fold.0.start, &buffer).unwrap() == Ordering::Greater && end.cmp(&fold.0.start, &buffer_snapshot).unwrap()
== Ordering::Greater
}) })
.map(|fold| fold.0) .map(|fold| fold.0)
.collect::<Vec<_>>(); .collect::<Vec<_>>();
@ -1477,26 +1474,24 @@ mod tests {
let text = sample_text(6, 6) + "\n"; let text = sample_text(6, 6) + "\n";
let buffer = cx.add_model(|cx| Buffer::new(0, text, cx)); let buffer = cx.add_model(|cx| Buffer::new(0, text, cx));
let mut map = FoldMap::new(buffer.clone(), cx.as_ref()).0; let buffer_snapshot = buffer.read(cx).snapshot();
let mut map = FoldMap::new(buffer_snapshot.clone()).0;
let (mut writer, _, _) = map.write(cx.as_ref()); let (mut writer, _, _) = map.write(buffer_snapshot.clone(), vec![]);
writer.fold( writer.fold(vec![
vec![ Point::new(0, 2)..Point::new(2, 2),
Point::new(0, 2)..Point::new(2, 2), Point::new(3, 1)..Point::new(4, 1),
Point::new(3, 1)..Point::new(4, 1), ]);
],
cx.as_ref(),
);
let (snapshot, _) = map.read(cx.as_ref()); let (snapshot, _) = map.read(buffer_snapshot.clone(), vec![]);
assert_eq!(snapshot.text(), "aa…cccc\nd…eeeee\nffffff\n"); assert_eq!(snapshot.text(), "aa…cccc\nd…eeeee\nffffff\n");
assert_eq!(snapshot.buffer_rows(0).collect::<Vec<_>>(), [0, 3, 5, 6]); assert_eq!(snapshot.buffer_rows(0).collect::<Vec<_>>(), [0, 3, 5, 6]);
assert_eq!(snapshot.buffer_rows(3).collect::<Vec<_>>(), [6]); assert_eq!(snapshot.buffer_rows(3).collect::<Vec<_>>(), [6]);
} }
impl FoldMap { impl FoldMap {
fn merged_fold_ranges(&self, cx: &AppContext) -> Vec<Range<usize>> { fn merged_fold_ranges(&self) -> Vec<Range<usize>> {
let buffer = self.buffer.read(cx).snapshot(); let buffer = self.buffer.lock().clone();
let mut folds = self.folds.items(&buffer); let mut folds = self.folds.items(&buffer);
// Ensure sorting doesn't change how folds get merged and displayed. // Ensure sorting doesn't change how folds get merged and displayed.
folds.sort_by(|a, b| a.0.cmp(&b.0, &buffer).unwrap()); folds.sort_by(|a, b| a.0.cmp(&b.0, &buffer).unwrap());
@ -1524,15 +1519,11 @@ mod tests {
merged_ranges merged_ranges
} }
pub fn randomly_mutate( pub fn randomly_mutate(&mut self, rng: &mut impl Rng) -> Vec<(Snapshot, Vec<FoldEdit>)> {
&mut self,
rng: &mut impl Rng,
cx: &AppContext,
) -> Vec<(Snapshot, Vec<FoldEdit>)> {
let mut snapshot_edits = Vec::new(); let mut snapshot_edits = Vec::new();
match rng.gen_range(0..=100) { match rng.gen_range(0..=100) {
0..=39 if !self.folds.is_empty() => { 0..=39 if !self.folds.is_empty() => {
let buffer = self.buffer.read(cx); let buffer = self.buffer.lock().clone();
let mut to_unfold = Vec::new(); let mut to_unfold = Vec::new();
for _ in 0..rng.gen_range(1..=3) { for _ in 0..rng.gen_range(1..=3) {
let end = buffer.clip_offset(rng.gen_range(0..=buffer.len()), Right); let end = buffer.clip_offset(rng.gen_range(0..=buffer.len()), Right);
@ -1540,13 +1531,13 @@ mod tests {
to_unfold.push(start..end); to_unfold.push(start..end);
} }
log::info!("unfolding {:?}", to_unfold); log::info!("unfolding {:?}", to_unfold);
let (mut writer, snapshot, edits) = self.write(cx.as_ref()); let (mut writer, snapshot, edits) = self.write(buffer, vec![]);
snapshot_edits.push((snapshot, edits)); snapshot_edits.push((snapshot, edits));
let (snapshot, edits) = writer.fold(to_unfold, cx.as_ref()); let (snapshot, edits) = writer.fold(to_unfold);
snapshot_edits.push((snapshot, edits)); snapshot_edits.push((snapshot, edits));
} }
_ => { _ => {
let buffer = self.buffer.read(cx); let buffer = self.buffer.lock().clone();
let mut to_fold = Vec::new(); let mut to_fold = Vec::new();
for _ in 0..rng.gen_range(1..=2) { for _ in 0..rng.gen_range(1..=2) {
let end = buffer.clip_offset(rng.gen_range(0..=buffer.len()), Right); let end = buffer.clip_offset(rng.gen_range(0..=buffer.len()), Right);
@ -1554,9 +1545,9 @@ mod tests {
to_fold.push(start..end); to_fold.push(start..end);
} }
log::info!("folding {:?}", to_fold); log::info!("folding {:?}", to_fold);
let (mut writer, snapshot, edits) = self.write(cx.as_ref()); let (mut writer, snapshot, edits) = self.write(buffer, vec![]);
snapshot_edits.push((snapshot, edits)); snapshot_edits.push((snapshot, edits));
let (snapshot, edits) = writer.fold(to_fold, cx.as_ref()); let (snapshot, edits) = writer.fold(to_fold);
snapshot_edits.push((snapshot, edits)); snapshot_edits.push((snapshot, edits));
} }
} }

View File

@ -470,11 +470,12 @@ mod tests {
let text = RandomCharIter::new(&mut rng).take(len).collect::<String>(); let text = RandomCharIter::new(&mut rng).take(len).collect::<String>();
Buffer::new(0, text, cx) Buffer::new(0, text, cx)
}); });
let buffer_snapshot = buffer.read(cx).snapshot();
log::info!("Buffer text: {:?}", buffer.read(cx).text()); log::info!("Buffer text: {:?}", buffer.read(cx).text());
let (mut fold_map, _) = FoldMap::new(buffer.clone(), cx); let (mut fold_map, _) = FoldMap::new(buffer_snapshot.clone());
fold_map.randomly_mutate(&mut rng, cx); fold_map.randomly_mutate(&mut rng);
let (folds_snapshot, _) = fold_map.read(cx); let (folds_snapshot, _) = fold_map.read(buffer_snapshot.clone(), vec![]);
log::info!("FoldMap text: {:?}", folds_snapshot.text()); log::info!("FoldMap text: {:?}", folds_snapshot.text());
let (_, tabs_snapshot) = TabMap::new(folds_snapshot.clone(), tab_size); let (_, tabs_snapshot) = TabMap::new(folds_snapshot.clone(), tab_size);

View File

@ -1,6 +1,5 @@
use super::{ use super::{
fold_map, fold_map,
patch::Patch,
tab_map::{self, Edit as TabEdit, Snapshot as TabSnapshot, TabPoint}, tab_map::{self, Edit as TabEdit, Snapshot as TabSnapshot, TabPoint},
DisplayRow, DisplayRow,
}; };
@ -13,6 +12,7 @@ use lazy_static::lazy_static;
use smol::future::yield_now; use smol::future::yield_now;
use std::{collections::VecDeque, mem, ops::Range, time::Duration}; use std::{collections::VecDeque, mem, ops::Range, time::Duration};
use sum_tree::{Bias, Cursor, SumTree}; use sum_tree::{Bias, Cursor, SumTree};
use text::Patch;
use theme::SyntaxTheme; use theme::SyntaxTheme;
pub use super::tab_map::TextSummary; pub use super::tab_map::TextSummary;
@ -21,8 +21,8 @@ pub type Edit = text::Edit<u32>;
pub struct WrapMap { pub struct WrapMap {
snapshot: Snapshot, snapshot: Snapshot,
pending_edits: VecDeque<(TabSnapshot, Vec<TabEdit>)>, pending_edits: VecDeque<(TabSnapshot, Vec<TabEdit>)>,
interpolated_edits: Patch, interpolated_edits: Patch<u32>,
edits_since_sync: Patch, edits_since_sync: Patch<u32>,
wrap_width: Option<f32>, wrap_width: Option<f32>,
background_task: Option<Task<()>>, background_task: Option<Task<()>>,
font: (FontId, f32), font: (FontId, f32),
@ -204,12 +204,10 @@ impl WrapMap {
} }
let new_rows = self.snapshot.transforms.summary().output.lines.row + 1; let new_rows = self.snapshot.transforms.summary().output.lines.row + 1;
self.snapshot.interpolated = false; self.snapshot.interpolated = false;
self.edits_since_sync = self.edits_since_sync.compose(&unsafe { self.edits_since_sync = self.edits_since_sync.compose(&Patch::new(vec![Edit {
Patch::new_unchecked(vec![Edit { old: 0..old_rows,
old: 0..old_rows, new: 0..new_rows,
new: 0..new_rows, }]));
}])
});
} }
} }
@ -308,7 +306,7 @@ impl Snapshot {
} }
} }
fn interpolate(&mut self, new_tab_snapshot: TabSnapshot, tab_edits: &[TabEdit]) -> Patch { fn interpolate(&mut self, new_tab_snapshot: TabSnapshot, tab_edits: &[TabEdit]) -> Patch<u32> {
let mut new_transforms; let mut new_transforms;
if tab_edits.is_empty() { if tab_edits.is_empty() {
new_transforms = self.transforms.clone(); new_transforms = self.transforms.clone();
@ -383,7 +381,7 @@ impl Snapshot {
tab_edits: &[TabEdit], tab_edits: &[TabEdit],
wrap_width: f32, wrap_width: f32,
line_wrapper: &mut LineWrapper, line_wrapper: &mut LineWrapper,
) -> Patch { ) -> Patch<u32> {
#[derive(Debug)] #[derive(Debug)]
struct RowEdit { struct RowEdit {
old_rows: Range<u32>, old_rows: Range<u32>,
@ -526,7 +524,7 @@ impl Snapshot {
old_snapshot.compute_edits(tab_edits, self) old_snapshot.compute_edits(tab_edits, self)
} }
fn compute_edits(&self, tab_edits: &[TabEdit], new_snapshot: &Snapshot) -> Patch { fn compute_edits(&self, tab_edits: &[TabEdit], new_snapshot: &Snapshot) -> Patch<u32> {
let mut wrap_edits = Vec::new(); let mut wrap_edits = Vec::new();
let mut old_cursor = self.transforms.cursor::<TransformSummary>(); let mut old_cursor = self.transforms.cursor::<TransformSummary>();
let mut new_cursor = new_snapshot.transforms.cursor::<TransformSummary>(); let mut new_cursor = new_snapshot.transforms.cursor::<TransformSummary>();
@ -559,7 +557,7 @@ impl Snapshot {
} }
consolidate_wrap_edits(&mut wrap_edits); consolidate_wrap_edits(&mut wrap_edits);
unsafe { Patch::new_unchecked(wrap_edits) } Patch::new(wrap_edits)
} }
pub fn text_chunks(&self, wrap_row: u32) -> impl Iterator<Item = &str> { pub fn text_chunks(&self, wrap_row: u32) -> impl Iterator<Item = &str> {
@ -1026,7 +1024,8 @@ mod tests {
let text = RandomCharIter::new(&mut rng).take(len).collect::<String>(); let text = RandomCharIter::new(&mut rng).take(len).collect::<String>();
Buffer::new(0, text, cx) Buffer::new(0, text, cx)
}); });
let (mut fold_map, folds_snapshot) = cx.read(|cx| FoldMap::new(buffer.clone(), cx)); let buffer_snapshot = buffer.read_with(&cx, |buffer, _| buffer.snapshot());
let (mut fold_map, folds_snapshot) = FoldMap::new(buffer_snapshot.clone());
let (tab_map, tabs_snapshot) = TabMap::new(folds_snapshot.clone(), tab_size); let (tab_map, tabs_snapshot) = TabMap::new(folds_snapshot.clone(), tab_size);
log::info!( log::info!(
"Unwrapped text (no folds): {:?}", "Unwrapped text (no folds): {:?}",
@ -1067,6 +1066,7 @@ mod tests {
for _i in 0..operations { for _i in 0..operations {
log::info!("{} ==============================================", _i); log::info!("{} ==============================================", _i);
let mut buffer_edits = Vec::new();
match rng.gen_range(0..=100) { match rng.gen_range(0..=100) {
0..=19 => { 0..=19 => {
wrap_width = if rng.gen_bool(0.2) { wrap_width = if rng.gen_bool(0.2) {
@ -1078,9 +1078,7 @@ mod tests {
wrap_map.update(&mut cx, |map, cx| map.set_wrap_width(wrap_width, cx)); wrap_map.update(&mut cx, |map, cx| map.set_wrap_width(wrap_width, cx));
} }
20..=39 => { 20..=39 => {
for (folds_snapshot, fold_edits) in for (folds_snapshot, fold_edits) in fold_map.randomly_mutate(&mut rng) {
cx.read(|cx| fold_map.randomly_mutate(&mut rng, cx))
{
let (tabs_snapshot, tab_edits) = tab_map.sync(folds_snapshot, fold_edits); let (tabs_snapshot, tab_edits) = tab_map.sync(folds_snapshot, fold_edits);
let (mut snapshot, wrap_edits) = wrap_map let (mut snapshot, wrap_edits) = wrap_map
.update(&mut cx, |map, cx| map.sync(tabs_snapshot, tab_edits, cx)); .update(&mut cx, |map, cx| map.sync(tabs_snapshot, tab_edits, cx));
@ -1090,15 +1088,18 @@ mod tests {
} }
} }
_ => { _ => {
buffer.update(&mut cx, |buffer, _| buffer.randomly_mutate(&mut rng)); buffer.update(&mut cx, |buffer, cx| {
let v0 = buffer.version();
let edit_count = rng.gen_range(1..=5);
buffer.randomly_edit(&mut rng, edit_count, cx);
buffer_edits.extend(buffer.edits_since(&v0));
});
} }
} }
log::info!( let buffer_snapshot = buffer.read_with(&cx, |buffer, _| buffer.snapshot());
"Unwrapped text (no folds): {:?}", log::info!("Unwrapped text (no folds): {:?}", buffer_snapshot.text());
buffer.read_with(&cx, |buf, _| buf.text()) let (folds_snapshot, fold_edits) = fold_map.read(buffer_snapshot, buffer_edits);
);
let (folds_snapshot, fold_edits) = cx.read(|cx| fold_map.read(cx));
log::info!( log::info!(
"Unwrapped text (unexpanded tabs): {:?}", "Unwrapped text (unexpanded tabs): {:?}",
folds_snapshot.text() folds_snapshot.text()

View File

@ -3472,7 +3472,7 @@ impl Editor {
language::Event::FileHandleChanged => cx.emit(Event::FileHandleChanged), language::Event::FileHandleChanged => cx.emit(Event::FileHandleChanged),
language::Event::Reloaded => cx.emit(Event::FileHandleChanged), language::Event::Reloaded => cx.emit(Event::FileHandleChanged),
language::Event::Closed => cx.emit(Event::Closed), language::Event::Closed => cx.emit(Event::Closed),
language::Event::Reparsed => {} _ => {}
} }
} }

View File

@ -72,8 +72,10 @@ pub struct Snapshot {
text: text::Snapshot, text: text::Snapshot,
tree: Option<Tree>, tree: Option<Tree>,
diagnostics: AnchorRangeMultimap<Diagnostic>, diagnostics: AnchorRangeMultimap<Diagnostic>,
diagnostics_update_count: usize,
is_parsing: bool, is_parsing: bool,
language: Option<Arc<Language>>, language: Option<Arc<Language>>,
parse_count: usize,
} }
#[derive(Clone, Debug, PartialEq, Eq)] #[derive(Clone, Debug, PartialEq, Eq)]
@ -113,6 +115,7 @@ pub enum Event {
FileHandleChanged, FileHandleChanged,
Reloaded, Reloaded,
Reparsed, Reparsed,
DiagnosticsUpdated,
Closed, Closed,
} }
@ -338,8 +341,10 @@ impl Buffer {
text: self.text.snapshot(), text: self.text.snapshot(),
tree: self.syntax_tree(), tree: self.syntax_tree(),
diagnostics: self.diagnostics.clone(), diagnostics: self.diagnostics.clone(),
diagnostics_update_count: self.diagnostics_update_count,
is_parsing: self.parsing_in_background, is_parsing: self.parsing_in_background,
language: self.language.clone(), language: self.language.clone(),
parse_count: self.parse_count,
} }
} }
@ -805,6 +810,7 @@ impl Buffer {
self.diagnostics_update_count += 1; self.diagnostics_update_count += 1;
cx.notify(); cx.notify();
cx.emit(Event::DiagnosticsUpdated);
Ok(Operation::UpdateDiagnostics(self.diagnostics.clone())) Ok(Operation::UpdateDiagnostics(self.diagnostics.clone()))
} }
@ -1132,6 +1138,10 @@ impl Buffer {
.map_or(false, |file| file.mtime() > self.saved_mtime) .map_or(false, |file| file.mtime() > self.saved_mtime)
} }
pub fn subscribe(&mut self) -> Subscription {
self.text.subscribe()
}
pub fn start_transaction( pub fn start_transaction(
&mut self, &mut self,
selection_set_ids: impl IntoIterator<Item = SelectionSetId>, selection_set_ids: impl IntoIterator<Item = SelectionSetId>,
@ -1316,7 +1326,8 @@ impl Buffer {
was_dirty: bool, was_dirty: bool,
cx: &mut ModelContext<Self>, cx: &mut ModelContext<Self>,
) { ) {
if self.edits_since::<usize>(old_version).next().is_none() { let patch = Patch::new(self.edits_since::<usize>(old_version).collect());
if patch.is_empty() {
return; return;
} }
@ -1461,18 +1472,26 @@ impl Buffer {
#[cfg(any(test, feature = "test-support"))] #[cfg(any(test, feature = "test-support"))]
impl Buffer { impl Buffer {
pub fn randomly_edit<T>(&mut self, rng: &mut T, old_range_count: usize) pub fn randomly_edit<T>(
where &mut self,
rng: &mut T,
old_range_count: usize,
cx: &mut ModelContext<Self>,
) where
T: rand::Rng, T: rand::Rng,
{ {
self.start_transaction(None).unwrap();
self.text.randomly_edit(rng, old_range_count); self.text.randomly_edit(rng, old_range_count);
self.end_transaction(None, cx).unwrap();
} }
pub fn randomly_mutate<T>(&mut self, rng: &mut T) pub fn randomly_mutate<T>(&mut self, rng: &mut T, cx: &mut ModelContext<Self>)
where where
T: rand::Rng, T: rand::Rng,
{ {
self.start_transaction(None).unwrap();
self.text.randomly_mutate(rng); self.text.randomly_mutate(rng);
self.end_transaction(None, cx).unwrap();
} }
} }
@ -1486,30 +1505,6 @@ impl Entity for Buffer {
} }
} }
// TODO: Do we need to clone a buffer?
impl Clone for Buffer {
fn clone(&self) -> Self {
Self {
text: self.text.clone(),
saved_version: self.saved_version.clone(),
saved_mtime: self.saved_mtime,
file: self.file.as_ref().map(|f| f.boxed_clone()),
language: self.language.clone(),
syntax_tree: Mutex::new(self.syntax_tree.lock().clone()),
parsing_in_background: false,
sync_parse_timeout: self.sync_parse_timeout,
parse_count: self.parse_count,
autoindent_requests: Default::default(),
pending_autoindent: Default::default(),
diagnostics: self.diagnostics.clone(),
diagnostics_update_count: self.diagnostics_update_count,
language_server: None,
#[cfg(test)]
operations: self.operations.clone(),
}
}
}
impl Deref for Buffer { impl Deref for Buffer {
type Target = TextBuffer; type Target = TextBuffer;
@ -1699,6 +1694,14 @@ impl Snapshot {
.as_ref() .as_ref()
.and_then(|language| language.grammar.as_ref()) .and_then(|language| language.grammar.as_ref())
} }
pub fn diagnostics_update_count(&self) -> usize {
self.diagnostics_update_count
}
pub fn parse_count(&self) -> usize {
self.parse_count
}
} }
impl Clone for Snapshot { impl Clone for Snapshot {
@ -1707,8 +1710,10 @@ impl Clone for Snapshot {
text: self.text.clone(), text: self.text.clone(),
tree: self.tree.clone(), tree: self.tree.clone(),
diagnostics: self.diagnostics.clone(), diagnostics: self.diagnostics.clone(),
diagnostics_update_count: self.diagnostics_update_count,
is_parsing: self.is_parsing, is_parsing: self.is_parsing,
language: self.language.clone(), language: self.language.clone(),
parse_count: self.parse_count,
} }
} }
} }

View File

@ -20,7 +20,6 @@ use postage::{
prelude::{Sink as _, Stream as _}, prelude::{Sink as _, Stream as _},
watch, watch,
}; };
use serde::Deserialize; use serde::Deserialize;
use smol::channel::{self, Sender}; use smol::channel::{self, Sender};
use std::{ use std::{
@ -3495,14 +3494,14 @@ mod tests {
&[ &[
language::Event::Edited, language::Event::Edited,
language::Event::Dirtied, language::Event::Dirtied,
language::Event::Edited language::Event::Edited,
], ],
); );
events.borrow_mut().clear(); events.borrow_mut().clear();
// TODO - currently, after restoring the buffer to its // TODO - currently, after restoring the buffer to its
// previously-saved state, the is still considered dirty. // previously-saved state, the is still considered dirty.
buffer.edit(vec![1..3], "", cx); buffer.edit([1..3], "", cx);
assert!(buffer.text() == "ac"); assert!(buffer.text() == "ac");
assert!(buffer.is_dirty()); assert!(buffer.is_dirty());
}); });

View File

@ -16,10 +16,13 @@ sum_tree = { path = "../sum_tree" }
anyhow = "1.0.38" anyhow = "1.0.38"
arrayvec = "0.7.1" arrayvec = "0.7.1"
log = "0.4" log = "0.4"
parking_lot = "0.11"
rand = { version = "0.8.3", optional = true } rand = { version = "0.8.3", optional = true }
smallvec = { version = "1.6", features = ["union"] } smallvec = { version = "1.6", features = ["union"] }
[dev-dependencies] [dev-dependencies]
collections = { path = "../collections", features = ["test-support"] } collections = { path = "../collections", features = ["test-support"] }
gpui = { path = "../gpui", features = ["test-support"] } gpui = { path = "../gpui", features = ["test-support"] }
ctor = "0.1"
env_logger = "0.8"
rand = "0.8.3" rand = "0.8.3"

View File

@ -1,16 +1,39 @@
use std::{cmp, mem}; use crate::Edit;
use std::{
cmp, mem,
ops::{Add, AddAssign, Sub},
};
type Edit = text::Edit<u32>; #[derive(Clone, Default, Debug, PartialEq, Eq)]
pub struct Patch<T>(Vec<Edit<T>>);
#[derive(Default, Debug, PartialEq, Eq)] impl<T> Patch<T>
pub struct Patch(Vec<Edit>); where
T: Clone
impl Patch { + Copy
pub unsafe fn new_unchecked(edits: Vec<Edit>) -> Self { + Ord
+ Sub<T, Output = T>
+ Add<T, Output = T>
+ AddAssign
+ Default
+ PartialEq,
{
pub fn new(edits: Vec<Edit<T>>) -> Self {
#[cfg(debug_assertions)]
{
let mut last_edit: Option<&Edit<T>> = None;
for edit in &edits {
if let Some(last_edit) = last_edit {
assert!(edit.old.start > last_edit.old.end);
assert!(edit.new.start > last_edit.new.end);
}
last_edit = Some(edit);
}
}
Self(edits) Self(edits)
} }
pub fn into_inner(self) -> Vec<Edit> { pub fn into_inner(self) -> Vec<Edit<T>> {
self.0 self.0
} }
@ -19,8 +42,8 @@ impl Patch {
let mut new_edits_iter = other.0.iter().cloned().peekable(); let mut new_edits_iter = other.0.iter().cloned().peekable();
let mut composed = Patch(Vec::new()); let mut composed = Patch(Vec::new());
let mut old_start = 0; let mut old_start = T::default();
let mut new_start = 0; let mut new_start = T::default();
loop { loop {
let old_edit = old_edits_iter.peek_mut(); let old_edit = old_edits_iter.peek_mut();
let new_edit = new_edits_iter.peek_mut(); let new_edit = new_edits_iter.peek_mut();
@ -33,8 +56,8 @@ impl Patch {
old_start += catchup; old_start += catchup;
new_start += catchup; new_start += catchup;
let old_end = old_start + old_edit.old.len() as u32; let old_end = old_start + old_edit.old_len();
let new_end = new_start + old_edit.new.len() as u32; let new_end = new_start + old_edit.new_len();
composed.push(Edit { composed.push(Edit {
old: old_start..old_end, old: old_start..old_end,
new: new_start..new_end, new: new_start..new_end,
@ -54,8 +77,8 @@ impl Patch {
old_start += catchup; old_start += catchup;
new_start += catchup; new_start += catchup;
let old_end = old_start + new_edit.old.len() as u32; let old_end = old_start + new_edit.old_len();
let new_end = new_start + new_edit.new.len() as u32; let new_end = new_start + new_edit.new_len();
composed.push(Edit { composed.push(Edit {
old: old_start..old_end, old: old_start..old_end,
new: new_start..new_end, new: new_start..new_end,
@ -82,7 +105,7 @@ impl Patch {
new: new_start..new_end, new: new_start..new_end,
}); });
old_edit.old.start += overshoot; old_edit.old.start = old_end;
old_edit.new.start += overshoot; old_edit.new.start += overshoot;
old_start = old_end; old_start = old_end;
new_start = new_end; new_start = new_end;
@ -100,15 +123,14 @@ impl Patch {
}); });
new_edit.old.start += overshoot; new_edit.old.start += overshoot;
new_edit.new.start += overshoot; new_edit.new.start = new_end;
old_start = old_end; old_start = old_end;
new_start = new_end; new_start = new_end;
} }
if old_edit.new.end > new_edit.old.end { if old_edit.new.end > new_edit.old.end {
let old_end = let old_end = old_start + cmp::min(old_edit.old_len(), new_edit.old_len());
old_start + cmp::min(old_edit.old.len() as u32, new_edit.old.len() as u32); let new_end = new_start + new_edit.new_len();
let new_end = new_start + new_edit.new.len() as u32;
composed.push(Edit { composed.push(Edit {
old: old_start..old_end, old: old_start..old_end,
new: new_start..new_end, new: new_start..new_end,
@ -120,9 +142,8 @@ impl Patch {
new_start = new_end; new_start = new_end;
new_edits_iter.next(); new_edits_iter.next();
} else { } else {
let old_end = old_start + old_edit.old.len() as u32; let old_end = old_start + old_edit.old_len();
let new_end = let new_end = new_start + cmp::min(old_edit.new_len(), new_edit.new_len());
new_start + cmp::min(old_edit.new.len() as u32, new_edit.new.len() as u32);
composed.push(Edit { composed.push(Edit {
old: old_start..old_end, old: old_start..old_end,
new: new_start..new_end, new: new_start..new_end,
@ -153,8 +174,12 @@ impl Patch {
self.0.clear(); self.0.clear();
} }
fn push(&mut self, edit: Edit) { pub fn is_empty(&self) -> bool {
if edit.old.len() == 0 && edit.new.len() == 0 { self.0.is_empty()
}
pub fn push(&mut self, edit: Edit<T>) {
if edit.is_empty() {
return; return;
} }
@ -479,7 +504,7 @@ mod tests {
} }
#[track_caller] #[track_caller]
fn assert_patch_composition(old: Patch, new: Patch, composed: Patch) { fn assert_patch_composition(old: Patch<u32>, new: Patch<u32>, composed: Patch<u32>) {
let original = ('a'..'z').collect::<Vec<_>>(); let original = ('a'..'z').collect::<Vec<_>>();
let inserted = ('A'..'Z').collect::<Vec<_>>(); let inserted = ('A'..'Z').collect::<Vec<_>>();
@ -498,7 +523,7 @@ mod tests {
assert_eq!(old.compose(&new), composed); assert_eq!(old.compose(&new), composed);
} }
fn apply_patch(text: &mut Vec<char>, patch: &Patch, new_text: &[char]) { fn apply_patch(text: &mut Vec<char>, patch: &Patch<u32>, new_text: &[char]) {
for edit in patch.0.iter().rev() { for edit in patch.0.iter().rev() {
text.splice( text.splice(
edit.old.start as usize..edit.old.end as usize, edit.old.start as usize..edit.old.end as usize,

View File

@ -8,6 +8,13 @@ use std::{
time::{Duration, Instant}, time::{Duration, Instant},
}; };
#[cfg(test)]
#[ctor::ctor]
fn init_logger() {
// std::env::set_var("RUST_LOG", "info");
env_logger::init();
}
#[test] #[test]
fn test_edit() { fn test_edit() {
let mut buffer = Buffer::new(0, 0, History::new("abc".into())); let mut buffer = Buffer::new(0, 0, History::new("abc".into()));
@ -72,30 +79,43 @@ fn test_random_edits(mut rng: StdRng) {
); );
if rng.gen_bool(0.3) { if rng.gen_bool(0.3) {
buffer_versions.push(buffer.clone()); buffer_versions.push((buffer.clone(), buffer.subscribe()));
} }
} }
for mut old_buffer in buffer_versions { for (old_buffer, subscription) in buffer_versions {
let edits = buffer let edits = buffer
.edits_since::<usize>(&old_buffer.version) .edits_since::<usize>(&old_buffer.version)
.collect::<Vec<_>>(); .collect::<Vec<_>>();
log::info!( log::info!(
"mutating old buffer version {:?}, text: {:?}, edits since: {:?}", "applying edits since version {:?} to old text: {:?}: {:?}",
old_buffer.version(), old_buffer.version(),
old_buffer.text(), old_buffer.text(),
edits, edits,
); );
let mut text = old_buffer.visible_text.clone();
for edit in edits { for edit in edits {
let new_text: String = buffer.text_for_range(edit.new.clone()).collect(); let new_text: String = buffer.text_for_range(edit.new.clone()).collect();
old_buffer.edit( text.replace(edit.new.start..edit.new.start + edit.old.len(), &new_text);
Some(edit.new.start..edit.new.start + edit.old.len()),
new_text,
);
} }
assert_eq!(old_buffer.text(), buffer.text()); assert_eq!(text.to_string(), buffer.text());
let subscription_edits = subscription.consume();
log::info!(
"applying subscription edits since version {:?} to old text: {:?}: {:?}",
old_buffer.version(),
old_buffer.text(),
subscription_edits,
);
let mut text = old_buffer.visible_text.clone();
for edit in subscription_edits.into_inner() {
let new_text: String = buffer.text_for_range(edit.new.clone()).collect();
text.replace(edit.new.start..edit.new.start + edit.old.len(), &new_text);
}
assert_eq!(text.to_string(), buffer.text());
} }
} }

View File

@ -1,5 +1,6 @@
mod anchor; mod anchor;
mod operation_queue; mod operation_queue;
mod patch;
mod point; mod point;
mod point_utf16; mod point_utf16;
#[cfg(any(test, feature = "test-support"))] #[cfg(any(test, feature = "test-support"))]
@ -14,6 +15,8 @@ use anyhow::{anyhow, Result};
use clock::ReplicaId; use clock::ReplicaId;
use collections::{HashMap, HashSet}; use collections::{HashMap, HashSet};
use operation_queue::OperationQueue; use operation_queue::OperationQueue;
use parking_lot::Mutex;
pub use patch::Patch;
pub use point::*; pub use point::*;
pub use point_utf16::*; pub use point_utf16::*;
#[cfg(any(test, feature = "test-support"))] #[cfg(any(test, feature = "test-support"))]
@ -24,15 +27,14 @@ pub use selection::*;
use std::{ use std::{
cmp::{self, Reverse}, cmp::{self, Reverse},
iter::Iterator, iter::Iterator,
ops::{self, Deref, Range}, ops::{self, Deref, Range, Sub},
str, str,
sync::Arc, sync::{Arc, Weak},
time::{Duration, Instant}, time::{Duration, Instant},
}; };
pub use sum_tree::Bias; pub use sum_tree::Bias;
use sum_tree::{FilterCursor, SumTree}; use sum_tree::{FilterCursor, SumTree};
#[derive(Clone)]
pub struct Buffer { pub struct Buffer {
snapshot: Snapshot, snapshot: Snapshot,
last_edit: clock::Local, last_edit: clock::Local,
@ -44,6 +46,7 @@ pub struct Buffer {
remote_id: u64, remote_id: u64,
local_clock: clock::Local, local_clock: clock::Local,
lamport_clock: clock::Lamport, lamport_clock: clock::Lamport,
subscriptions: Vec<Weak<Mutex<Vec<Patch<usize>>>>>,
} }
#[derive(Clone)] #[derive(Clone)]
@ -307,6 +310,23 @@ pub struct Edit<D> {
pub new: Range<D>, pub new: Range<D>,
} }
impl<D> Edit<D>
where
D: Sub<D, Output = D> + PartialEq + Copy,
{
pub fn old_len(&self) -> D {
self.old.end - self.old.start
}
pub fn new_len(&self) -> D {
self.new.end - self.new.start
}
pub fn is_empty(&self) -> bool {
self.old.start == self.old.end && self.new.start == self.new.end
}
}
impl<D1, D2> Edit<(D1, D2)> { impl<D1, D2> Edit<(D1, D2)> {
pub fn flatten(self) -> (Edit<D1>, Edit<D2>) { pub fn flatten(self) -> (Edit<D1>, Edit<D2>) {
( (
@ -322,6 +342,20 @@ impl<D1, D2> Edit<(D1, D2)> {
} }
} }
#[derive(Clone, Default)]
pub struct Subscription(Arc<Mutex<Vec<Patch<usize>>>>);
impl Subscription {
pub fn consume(&self) -> Patch<usize> {
let mut patches = self.0.lock();
let mut changes = Patch::default();
for patch in patches.drain(..) {
changes = changes.compose(&patch);
}
changes
}
}
#[derive(Copy, Clone, Debug, Default, Eq, PartialEq)] #[derive(Copy, Clone, Debug, Default, Eq, PartialEq)]
pub struct InsertionTimestamp { pub struct InsertionTimestamp {
pub replica_id: ReplicaId, pub replica_id: ReplicaId,
@ -454,13 +488,14 @@ impl Buffer {
}, },
last_edit: clock::Local::default(), last_edit: clock::Local::default(),
history, history,
selections: HashMap::default(), selections: Default::default(),
deferred_ops: OperationQueue::new(), deferred_ops: OperationQueue::new(),
deferred_replicas: HashSet::default(), deferred_replicas: HashSet::default(),
replica_id, replica_id,
remote_id, remote_id,
local_clock, local_clock,
lamport_clock, lamport_clock,
subscriptions: Default::default(),
} }
} }
@ -527,7 +562,8 @@ impl Buffer {
new_text: Option<String>, new_text: Option<String>,
timestamp: InsertionTimestamp, timestamp: InsertionTimestamp,
) -> EditOperation { ) -> EditOperation {
let mut edit = EditOperation { let mut edits = Patch::default();
let mut edit_op = EditOperation {
timestamp, timestamp,
version: self.version(), version: self.version(),
ranges: Vec::with_capacity(ranges.len()), ranges: Vec::with_capacity(ranges.len()),
@ -583,6 +619,11 @@ impl Buffer {
// Insert the new text before any existing fragments within the range. // Insert the new text before any existing fragments within the range.
if let Some(new_text) = new_text.as_deref() { if let Some(new_text) = new_text.as_deref() {
let new_start = new_fragments.summary().text.visible;
edits.push(Edit {
old: fragment_start..fragment_start,
new: new_start..new_start + new_text.len(),
});
new_ropes.push_str(new_text); new_ropes.push_str(new_text);
new_fragments.push( new_fragments.push(
Fragment { Fragment {
@ -609,6 +650,13 @@ impl Buffer {
intersection.visible = false; intersection.visible = false;
} }
if intersection.len > 0 { if intersection.len > 0 {
if fragment.visible && !intersection.visible {
let new_start = new_fragments.summary().text.visible;
edits.push(Edit {
old: fragment_start..intersection_end,
new: new_start..new_start,
});
}
new_ropes.push_fragment(&intersection, fragment.visible); new_ropes.push_fragment(&intersection, fragment.visible);
new_fragments.push(intersection, &None); new_fragments.push(intersection, &None);
fragment_start = intersection_end; fragment_start = intersection_end;
@ -619,7 +667,7 @@ impl Buffer {
} }
let full_range_end = FullOffset(range.end + old_fragments.start().deleted); let full_range_end = FullOffset(range.end + old_fragments.start().deleted);
edit.ranges.push(full_range_start..full_range_end); edit_op.ranges.push(full_range_start..full_range_end);
} }
// If the current fragment has been partially consumed, then consume the rest of it // If the current fragment has been partially consumed, then consume the rest of it
@ -644,8 +692,9 @@ impl Buffer {
self.snapshot.fragments = new_fragments; self.snapshot.fragments = new_fragments;
self.snapshot.visible_text = visible_text; self.snapshot.visible_text = visible_text;
self.snapshot.deleted_text = deleted_text; self.snapshot.deleted_text = deleted_text;
edit.new_text = new_text; self.update_subscriptions(edits);
edit edit_op.new_text = new_text;
edit_op
} }
pub fn apply_ops<I: IntoIterator<Item = Operation>>(&mut self, ops: I) -> Result<()> { pub fn apply_ops<I: IntoIterator<Item = Operation>>(&mut self, ops: I) -> Result<()> {
@ -745,10 +794,11 @@ impl Buffer {
return; return;
} }
let mut edits = Patch::default();
let cx = Some(version.clone()); let cx = Some(version.clone());
let mut new_ropes = let mut new_ropes =
RopeBuilder::new(self.visible_text.cursor(0), self.deleted_text.cursor(0)); RopeBuilder::new(self.visible_text.cursor(0), self.deleted_text.cursor(0));
let mut old_fragments = self.fragments.cursor::<VersionedFullOffset>(); let mut old_fragments = self.fragments.cursor::<(VersionedFullOffset, usize)>();
let mut new_fragments = old_fragments.slice( let mut new_fragments = old_fragments.slice(
&VersionedFullOffset::Offset(ranges[0].start), &VersionedFullOffset::Offset(ranges[0].start),
Bias::Left, Bias::Left,
@ -756,16 +806,16 @@ impl Buffer {
); );
new_ropes.push_tree(new_fragments.summary().text); new_ropes.push_tree(new_fragments.summary().text);
let mut fragment_start = old_fragments.start().full_offset(); let mut fragment_start = old_fragments.start().0.full_offset();
for range in ranges { for range in ranges {
let fragment_end = old_fragments.end(&cx).full_offset(); let fragment_end = old_fragments.end(&cx).0.full_offset();
// If the current fragment ends before this range, then jump ahead to the first fragment // If the current fragment ends before this range, then jump ahead to the first fragment
// that extends past the start of this range, reusing any intervening fragments. // that extends past the start of this range, reusing any intervening fragments.
if fragment_end < range.start { if fragment_end < range.start {
// If the current fragment has been partially consumed, then consume the rest of it // If the current fragment has been partially consumed, then consume the rest of it
// and advance to the next fragment before slicing. // and advance to the next fragment before slicing.
if fragment_start > old_fragments.start().full_offset() { if fragment_start > old_fragments.start().0.full_offset() {
if fragment_end > fragment_start { if fragment_end > fragment_start {
let mut suffix = old_fragments.item().unwrap().clone(); let mut suffix = old_fragments.item().unwrap().clone();
suffix.len = fragment_end.0 - fragment_start.0; suffix.len = fragment_end.0 - fragment_start.0;
@ -779,18 +829,18 @@ impl Buffer {
old_fragments.slice(&VersionedFullOffset::Offset(range.start), Bias::Left, &cx); old_fragments.slice(&VersionedFullOffset::Offset(range.start), Bias::Left, &cx);
new_ropes.push_tree(slice.summary().text); new_ropes.push_tree(slice.summary().text);
new_fragments.push_tree(slice, &None); new_fragments.push_tree(slice, &None);
fragment_start = old_fragments.start().full_offset(); fragment_start = old_fragments.start().0.full_offset();
} }
// If we are at the end of a non-concurrent fragment, advance to the next one. // If we are at the end of a non-concurrent fragment, advance to the next one.
let fragment_end = old_fragments.end(&cx).full_offset(); let fragment_end = old_fragments.end(&cx).0.full_offset();
if fragment_end == range.start && fragment_end > fragment_start { if fragment_end == range.start && fragment_end > fragment_start {
let mut fragment = old_fragments.item().unwrap().clone(); let mut fragment = old_fragments.item().unwrap().clone();
fragment.len = fragment_end.0 - fragment_start.0; fragment.len = fragment_end.0 - fragment_start.0;
new_ropes.push_fragment(&fragment, fragment.visible); new_ropes.push_fragment(&fragment, fragment.visible);
new_fragments.push(fragment, &None); new_fragments.push(fragment, &None);
old_fragments.next(&cx); old_fragments.next(&cx);
fragment_start = old_fragments.start().full_offset(); fragment_start = old_fragments.start().0.full_offset();
} }
// Skip over insertions that are concurrent to this edit, but have a lower lamport // Skip over insertions that are concurrent to this edit, but have a lower lamport
@ -820,6 +870,15 @@ impl Buffer {
// Insert the new text before any existing fragments within the range. // Insert the new text before any existing fragments within the range.
if let Some(new_text) = new_text { if let Some(new_text) = new_text {
let mut old_start = old_fragments.start().1;
if old_fragments.item().map_or(false, |f| f.visible) {
old_start += fragment_start.0 - old_fragments.start().0.full_offset().0;
}
let new_start = new_fragments.summary().text.visible;
edits.push(Edit {
old: old_start..old_start,
new: new_start..new_start + new_text.len(),
});
new_ropes.push_str(new_text); new_ropes.push_str(new_text);
new_fragments.push( new_fragments.push(
Fragment { Fragment {
@ -837,7 +896,7 @@ impl Buffer {
// portions as deleted. // portions as deleted.
while fragment_start < range.end { while fragment_start < range.end {
let fragment = old_fragments.item().unwrap(); let fragment = old_fragments.item().unwrap();
let fragment_end = old_fragments.end(&cx).full_offset(); let fragment_end = old_fragments.end(&cx).0.full_offset();
let mut intersection = fragment.clone(); let mut intersection = fragment.clone();
let intersection_end = cmp::min(range.end, fragment_end); let intersection_end = cmp::min(range.end, fragment_end);
if fragment.was_visible(version, &self.undo_map) { if fragment.was_visible(version, &self.undo_map) {
@ -846,6 +905,15 @@ impl Buffer {
intersection.visible = false; intersection.visible = false;
} }
if intersection.len > 0 { if intersection.len > 0 {
if fragment.visible && !intersection.visible {
let old_start = old_fragments.start().1
+ (fragment_start.0 - old_fragments.start().0.full_offset().0);
let new_start = new_fragments.summary().text.visible;
edits.push(Edit {
old: old_start..old_start + intersection.len,
new: new_start..new_start,
});
}
new_ropes.push_fragment(&intersection, fragment.visible); new_ropes.push_fragment(&intersection, fragment.visible);
new_fragments.push(intersection, &None); new_fragments.push(intersection, &None);
fragment_start = intersection_end; fragment_start = intersection_end;
@ -858,8 +926,8 @@ impl Buffer {
// If the current fragment has been partially consumed, then consume the rest of it // If the current fragment has been partially consumed, then consume the rest of it
// and advance to the next fragment before slicing. // and advance to the next fragment before slicing.
if fragment_start > old_fragments.start().full_offset() { if fragment_start > old_fragments.start().0.full_offset() {
let fragment_end = old_fragments.end(&cx).full_offset(); let fragment_end = old_fragments.end(&cx).0.full_offset();
if fragment_end > fragment_start { if fragment_end > fragment_start {
let mut suffix = old_fragments.item().unwrap().clone(); let mut suffix = old_fragments.item().unwrap().clone();
suffix.len = fragment_end.0 - fragment_start.0; suffix.len = fragment_end.0 - fragment_start.0;
@ -880,9 +948,11 @@ impl Buffer {
self.snapshot.deleted_text = deleted_text; self.snapshot.deleted_text = deleted_text;
self.local_clock.observe(timestamp.local()); self.local_clock.observe(timestamp.local());
self.lamport_clock.observe(timestamp.lamport()); self.lamport_clock.observe(timestamp.lamport());
self.update_subscriptions(edits);
} }
fn apply_undo(&mut self, undo: &UndoOperation) -> Result<()> { fn apply_undo(&mut self, undo: &UndoOperation) -> Result<()> {
let mut edits = Patch::default();
self.snapshot.undo_map.insert(undo); self.snapshot.undo_map.insert(undo);
let mut cx = undo.version.clone(); let mut cx = undo.version.clone();
@ -891,7 +961,7 @@ impl Buffer {
} }
let cx = Some(cx); let cx = Some(cx);
let mut old_fragments = self.fragments.cursor::<VersionedFullOffset>(); let mut old_fragments = self.fragments.cursor::<(VersionedFullOffset, usize)>();
let mut new_fragments = old_fragments.slice( let mut new_fragments = old_fragments.slice(
&VersionedFullOffset::Offset(undo.ranges[0].start), &VersionedFullOffset::Offset(undo.ranges[0].start),
Bias::Right, Bias::Right,
@ -902,7 +972,7 @@ impl Buffer {
new_ropes.push_tree(new_fragments.summary().text); new_ropes.push_tree(new_fragments.summary().text);
for range in &undo.ranges { for range in &undo.ranges {
let mut end_offset = old_fragments.end(&cx).full_offset(); let mut end_offset = old_fragments.end(&cx).0.full_offset();
if end_offset < range.start { if end_offset < range.start {
let preceding_fragments = old_fragments.slice( let preceding_fragments = old_fragments.slice(
@ -925,11 +995,25 @@ impl Buffer {
fragment.visible = fragment.is_visible(&self.undo_map); fragment.visible = fragment.is_visible(&self.undo_map);
fragment.max_undos.observe(undo.id); fragment.max_undos.observe(undo.id);
} }
let old_start = old_fragments.start().1;
let new_start = new_fragments.summary().text.visible;
if fragment_was_visible && !fragment.visible {
edits.push(Edit {
old: old_start..old_start + fragment.len,
new: new_start..new_start,
});
} else if !fragment_was_visible && fragment.visible {
edits.push(Edit {
old: old_start..old_start,
new: new_start..new_start + fragment.len,
});
}
new_ropes.push_fragment(&fragment, fragment_was_visible); new_ropes.push_fragment(&fragment, fragment_was_visible);
new_fragments.push(fragment, &None); new_fragments.push(fragment, &None);
old_fragments.next(&cx); old_fragments.next(&cx);
if end_offset == old_fragments.end(&cx).full_offset() { if end_offset == old_fragments.end(&cx).0.full_offset() {
let unseen_fragments = old_fragments.slice( let unseen_fragments = old_fragments.slice(
&VersionedFullOffset::Offset(end_offset), &VersionedFullOffset::Offset(end_offset),
Bias::Right, Bias::Right,
@ -938,7 +1022,7 @@ impl Buffer {
new_ropes.push_tree(unseen_fragments.summary().text); new_ropes.push_tree(unseen_fragments.summary().text);
new_fragments.push_tree(unseen_fragments, &None); new_fragments.push_tree(unseen_fragments, &None);
} }
end_offset = old_fragments.end(&cx).full_offset(); end_offset = old_fragments.end(&cx).0.full_offset();
} else { } else {
break; break;
} }
@ -954,6 +1038,7 @@ impl Buffer {
self.snapshot.fragments = new_fragments; self.snapshot.fragments = new_fragments;
self.snapshot.visible_text = visible_text; self.snapshot.visible_text = visible_text;
self.snapshot.deleted_text = deleted_text; self.snapshot.deleted_text = deleted_text;
self.update_subscriptions(edits);
Ok(()) Ok(())
} }
@ -1110,6 +1195,23 @@ impl Buffer {
}) })
} }
pub fn subscribe(&mut self) -> Subscription {
let subscription = Subscription(Default::default());
self.subscriptions.push(Arc::downgrade(&subscription.0));
subscription
}
fn update_subscriptions(&mut self, edits: Patch<usize>) {
self.subscriptions.retain(|subscription| {
if let Some(subscription) = subscription.upgrade() {
subscription.lock().push(edits.clone());
true
} else {
false
}
});
}
pub fn selection_set(&self, set_id: SelectionSetId) -> Result<&SelectionSet> { pub fn selection_set(&self, set_id: SelectionSetId) -> Result<&SelectionSet> {
self.selections self.selections
.get(&set_id) .get(&set_id)