mirror of
https://github.com/zed-industries/zed.git
synced 2024-11-07 20:39:04 +03:00
Pull out buffer
into its own crate
This commit is contained in:
parent
034aed053c
commit
becae9feee
27
Cargo.lock
generated
27
Cargo.lock
generated
@ -742,6 +742,29 @@ dependencies = [
|
||||
"memchr",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "buffer"
|
||||
version = "0.1.0"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"arrayvec 0.7.1",
|
||||
"clock",
|
||||
"gpui",
|
||||
"lazy_static",
|
||||
"log",
|
||||
"parking_lot",
|
||||
"rand 0.8.3",
|
||||
"seahash",
|
||||
"serde 1.0.125",
|
||||
"similar",
|
||||
"smallvec",
|
||||
"sum_tree",
|
||||
"tree-sitter",
|
||||
"tree-sitter-rust",
|
||||
"unindent",
|
||||
"zrpc",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "build_const"
|
||||
version = "0.2.2"
|
||||
@ -5912,10 +5935,10 @@ name = "zed"
|
||||
version = "0.1.0"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"arrayvec 0.7.1",
|
||||
"async-recursion",
|
||||
"async-trait",
|
||||
"async-tungstenite",
|
||||
"buffer",
|
||||
"cargo-bundle",
|
||||
"clock",
|
||||
"crossbeam-channel",
|
||||
@ -5941,11 +5964,9 @@ dependencies = [
|
||||
"rand 0.8.3",
|
||||
"rsa",
|
||||
"rust-embed",
|
||||
"seahash",
|
||||
"serde 1.0.125",
|
||||
"serde_json 1.0.64",
|
||||
"serde_path_to_error",
|
||||
"similar",
|
||||
"simplelog",
|
||||
"smallvec",
|
||||
"smol",
|
||||
|
@ -1,5 +1,6 @@
|
||||
[workspace]
|
||||
members = [
|
||||
"buffer",
|
||||
"clock",
|
||||
"fsevent",
|
||||
"fuzzy",
|
||||
|
29
buffer/Cargo.toml
Normal file
29
buffer/Cargo.toml
Normal file
@ -0,0 +1,29 @@
|
||||
[package]
|
||||
name = "buffer"
|
||||
version = "0.1.0"
|
||||
edition = "2018"
|
||||
|
||||
[features]
|
||||
test-support = ["rand"]
|
||||
|
||||
[dependencies]
|
||||
anyhow = "1.0.38"
|
||||
arrayvec = "0.7.1"
|
||||
clock = { path = "../clock" }
|
||||
gpui = { path = "../gpui" }
|
||||
lazy_static = "1.4"
|
||||
log = "0.4"
|
||||
parking_lot = "0.11.1"
|
||||
rand = { version = "0.8.3", optional = true }
|
||||
seahash = "4.1"
|
||||
serde = { version = "1", features = ["derive"] }
|
||||
similar = "1.3"
|
||||
smallvec = { version = "1.6", features = ["union"] }
|
||||
sum_tree = { path = "../sum_tree" }
|
||||
tree-sitter = "0.19.5"
|
||||
zrpc = { path = "../zrpc" }
|
||||
|
||||
[dev-dependencies]
|
||||
rand = "0.8.3"
|
||||
tree-sitter-rust = "0.19.0"
|
||||
unindent = "0.1.7"
|
@ -1,7 +1,7 @@
|
||||
use super::{Buffer, Content};
|
||||
use crate::util::Bias;
|
||||
use anyhow::Result;
|
||||
use std::{cmp::Ordering, ops::Range};
|
||||
use sum_tree::Bias;
|
||||
|
||||
#[derive(Clone, Eq, PartialEq, Debug, Hash)]
|
||||
pub struct Anchor {
|
@ -1,4 +1,4 @@
|
||||
use super::SyntaxTheme;
|
||||
use crate::syntax_theme::SyntaxTheme;
|
||||
use std::sync::Arc;
|
||||
|
||||
#[derive(Clone, Debug)]
|
40
buffer/src/language.rs
Normal file
40
buffer/src/language.rs
Normal file
@ -0,0 +1,40 @@
|
||||
use crate::{HighlightMap, SyntaxTheme};
|
||||
use parking_lot::Mutex;
|
||||
use serde::Deserialize;
|
||||
use std::str;
|
||||
use tree_sitter::{Language as Grammar, Query};
|
||||
pub use tree_sitter::{Parser, Tree};
|
||||
|
||||
#[derive(Default, Deserialize)]
|
||||
pub struct LanguageConfig {
|
||||
pub name: String,
|
||||
pub path_suffixes: Vec<String>,
|
||||
}
|
||||
|
||||
#[derive(Deserialize)]
|
||||
pub struct BracketPair {
|
||||
pub start: String,
|
||||
pub end: String,
|
||||
}
|
||||
|
||||
pub struct Language {
|
||||
pub config: LanguageConfig,
|
||||
pub grammar: Grammar,
|
||||
pub highlight_query: Query,
|
||||
pub brackets_query: Query,
|
||||
pub highlight_map: Mutex<HighlightMap>,
|
||||
}
|
||||
|
||||
impl Language {
|
||||
pub fn name(&self) -> &str {
|
||||
self.config.name.as_str()
|
||||
}
|
||||
|
||||
pub fn highlight_map(&self) -> HighlightMap {
|
||||
self.highlight_map.lock().clone()
|
||||
}
|
||||
|
||||
pub fn set_theme(&self, theme: &SyntaxTheme) {
|
||||
*self.highlight_map.lock() = HighlightMap::new(self.highlight_query.capture_names(), theme);
|
||||
}
|
||||
}
|
@ -1,18 +1,21 @@
|
||||
mod anchor;
|
||||
mod highlight_map;
|
||||
mod language;
|
||||
mod operation_queue;
|
||||
mod point;
|
||||
#[cfg(any(test, feature = "test-support"))]
|
||||
pub mod random_char_iter;
|
||||
pub mod rope;
|
||||
mod selection;
|
||||
mod syntax_theme;
|
||||
|
||||
use crate::{
|
||||
language::{Language, Tree},
|
||||
settings::{HighlightId, HighlightMap},
|
||||
util::Bias,
|
||||
};
|
||||
pub use anchor::*;
|
||||
use anyhow::{anyhow, Result};
|
||||
use clock::ReplicaId;
|
||||
use gpui::{AppContext, Entity, ModelContext, MutableAppContext, Task};
|
||||
pub use highlight_map::{HighlightId, HighlightMap};
|
||||
use language::Tree;
|
||||
pub use language::{Language, LanguageConfig};
|
||||
use lazy_static::lazy_static;
|
||||
use operation_queue::OperationQueue;
|
||||
use parking_lot::Mutex;
|
||||
@ -35,7 +38,8 @@ use std::{
|
||||
sync::Arc,
|
||||
time::{Duration, Instant, SystemTime, UNIX_EPOCH},
|
||||
};
|
||||
use sum_tree::{self, FilterCursor, SumTree};
|
||||
use sum_tree::{self, Bias, FilterCursor, SumTree};
|
||||
pub use syntax_theme::SyntaxTheme;
|
||||
use tree_sitter::{InputEdit, Parser, QueryCursor};
|
||||
use zrpc::proto;
|
||||
|
||||
@ -90,16 +94,16 @@ impl BuildHasher for DeterministicState {
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
#[cfg(any(test, feature = "test-support"))]
|
||||
type HashMap<K, V> = std::collections::HashMap<K, V, DeterministicState>;
|
||||
|
||||
#[cfg(test)]
|
||||
#[cfg(any(test, feature = "test-support"))]
|
||||
type HashSet<T> = std::collections::HashSet<T, DeterministicState>;
|
||||
|
||||
#[cfg(not(test))]
|
||||
#[cfg(not(any(test, feature = "test-support")))]
|
||||
type HashMap<K, V> = std::collections::HashMap<K, V>;
|
||||
|
||||
#[cfg(not(test))]
|
||||
#[cfg(not(any(test, feature = "test-support")))]
|
||||
type HashSet<T> = std::collections::HashSet<T>;
|
||||
|
||||
thread_local! {
|
||||
@ -858,7 +862,7 @@ impl Buffer {
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
#[cfg(any(test, feature = "test-support"))]
|
||||
pub fn is_parsing(&self) -> bool {
|
||||
self.parsing_in_background
|
||||
}
|
||||
@ -1957,6 +1961,170 @@ impl Buffer {
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(any(test, feature = "test-support"))]
|
||||
impl Buffer {
|
||||
fn random_byte_range(&mut self, start_offset: usize, rng: &mut impl rand::Rng) -> Range<usize> {
|
||||
let end = self.clip_offset(rng.gen_range(start_offset..=self.len()), Bias::Right);
|
||||
let start = self.clip_offset(rng.gen_range(start_offset..=end), Bias::Right);
|
||||
start..end
|
||||
}
|
||||
|
||||
pub fn randomly_edit<T>(
|
||||
&mut self,
|
||||
rng: &mut T,
|
||||
old_range_count: usize,
|
||||
cx: &mut ModelContext<Self>,
|
||||
) -> (Vec<Range<usize>>, String)
|
||||
where
|
||||
T: rand::Rng,
|
||||
{
|
||||
let mut old_ranges: Vec<Range<usize>> = Vec::new();
|
||||
for _ in 0..old_range_count {
|
||||
let last_end = old_ranges.last().map_or(0, |last_range| last_range.end + 1);
|
||||
if last_end > self.len() {
|
||||
break;
|
||||
}
|
||||
old_ranges.push(self.random_byte_range(last_end, rng));
|
||||
}
|
||||
let new_text_len = rng.gen_range(0..10);
|
||||
let new_text: String = crate::random_char_iter::RandomCharIter::new(&mut *rng)
|
||||
.take(new_text_len)
|
||||
.collect();
|
||||
log::info!(
|
||||
"mutating buffer {} at {:?}: {:?}",
|
||||
self.replica_id,
|
||||
old_ranges,
|
||||
new_text
|
||||
);
|
||||
self.edit(old_ranges.iter().cloned(), new_text.as_str(), cx);
|
||||
(old_ranges, new_text)
|
||||
}
|
||||
|
||||
pub fn randomly_mutate<T>(
|
||||
&mut self,
|
||||
rng: &mut T,
|
||||
cx: &mut ModelContext<Self>,
|
||||
) -> (Vec<Range<usize>>, String)
|
||||
where
|
||||
T: rand::Rng,
|
||||
{
|
||||
use rand::prelude::*;
|
||||
|
||||
let (old_ranges, new_text) = self.randomly_edit(rng, 5, cx);
|
||||
|
||||
// Randomly add, remove or mutate selection sets.
|
||||
let replica_selection_sets = &self
|
||||
.selection_sets()
|
||||
.map(|(set_id, _)| *set_id)
|
||||
.filter(|set_id| self.replica_id == set_id.replica_id)
|
||||
.collect::<Vec<_>>();
|
||||
let set_id = replica_selection_sets.choose(rng);
|
||||
if set_id.is_some() && rng.gen_bool(1.0 / 6.0) {
|
||||
self.remove_selection_set(*set_id.unwrap(), cx).unwrap();
|
||||
} else {
|
||||
let mut ranges = Vec::new();
|
||||
for _ in 0..5 {
|
||||
ranges.push(self.random_byte_range(0, rng));
|
||||
}
|
||||
let new_selections = self.selections_from_ranges(ranges).unwrap();
|
||||
|
||||
if set_id.is_none() || rng.gen_bool(1.0 / 5.0) {
|
||||
self.add_selection_set(new_selections, cx);
|
||||
} else {
|
||||
self.update_selection_set(*set_id.unwrap(), new_selections, cx)
|
||||
.unwrap();
|
||||
}
|
||||
}
|
||||
|
||||
(old_ranges, new_text)
|
||||
}
|
||||
|
||||
pub fn randomly_undo_redo(&mut self, rng: &mut impl rand::Rng, cx: &mut ModelContext<Self>) {
|
||||
use rand::prelude::*;
|
||||
|
||||
for _ in 0..rng.gen_range(1..=5) {
|
||||
if let Some(transaction) = self.history.undo_stack.choose(rng).cloned() {
|
||||
log::info!(
|
||||
"undoing buffer {} transaction {:?}",
|
||||
self.replica_id,
|
||||
transaction
|
||||
);
|
||||
self.undo_or_redo(transaction, cx).unwrap();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn selections_from_ranges<I>(&self, ranges: I) -> Result<Vec<Selection>>
|
||||
where
|
||||
I: IntoIterator<Item = Range<usize>>,
|
||||
{
|
||||
use std::sync::atomic::{self, AtomicUsize};
|
||||
|
||||
static NEXT_SELECTION_ID: AtomicUsize = AtomicUsize::new(0);
|
||||
|
||||
let mut ranges = ranges.into_iter().collect::<Vec<_>>();
|
||||
ranges.sort_unstable_by_key(|range| range.start);
|
||||
|
||||
let mut selections = Vec::with_capacity(ranges.len());
|
||||
for range in ranges {
|
||||
if range.start > range.end {
|
||||
selections.push(Selection {
|
||||
id: NEXT_SELECTION_ID.fetch_add(1, atomic::Ordering::SeqCst),
|
||||
start: self.anchor_before(range.end),
|
||||
end: self.anchor_before(range.start),
|
||||
reversed: true,
|
||||
goal: SelectionGoal::None,
|
||||
});
|
||||
} else {
|
||||
selections.push(Selection {
|
||||
id: NEXT_SELECTION_ID.fetch_add(1, atomic::Ordering::SeqCst),
|
||||
start: self.anchor_after(range.start),
|
||||
end: self.anchor_before(range.end),
|
||||
reversed: false,
|
||||
goal: SelectionGoal::None,
|
||||
});
|
||||
}
|
||||
}
|
||||
Ok(selections)
|
||||
}
|
||||
|
||||
pub fn selection_ranges<'a>(&'a self, set_id: SelectionSetId) -> Result<Vec<Range<usize>>> {
|
||||
Ok(self
|
||||
.selection_set(set_id)?
|
||||
.selections
|
||||
.iter()
|
||||
.map(move |selection| {
|
||||
let start = selection.start.to_offset(self);
|
||||
let end = selection.end.to_offset(self);
|
||||
if selection.reversed {
|
||||
end..start
|
||||
} else {
|
||||
start..end
|
||||
}
|
||||
})
|
||||
.collect())
|
||||
}
|
||||
|
||||
pub fn all_selection_ranges<'a>(
|
||||
&'a self,
|
||||
) -> impl 'a + Iterator<Item = (SelectionSetId, Vec<Range<usize>>)> {
|
||||
self.selections
|
||||
.keys()
|
||||
.map(move |set_id| (*set_id, self.selection_ranges(*set_id).unwrap()))
|
||||
}
|
||||
|
||||
pub fn enclosing_bracket_point_ranges<T: ToOffset>(
|
||||
&self,
|
||||
range: Range<T>,
|
||||
) -> Option<(Range<Point>, Range<Point>)> {
|
||||
self.enclosing_bracket_ranges(range).map(|(start, end)| {
|
||||
let point_start = start.start.to_point(self)..start.end.to_point(self);
|
||||
let point_end = end.start.to_point(self)..end.end.to_point(self);
|
||||
(point_start, point_end)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
impl Clone for Buffer {
|
||||
fn clone(&self) -> Self {
|
||||
Self {
|
||||
@ -2947,26 +3115,12 @@ impl ToPoint for usize {
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use crate::random_char_iter::RandomCharIter;
|
||||
|
||||
use super::*;
|
||||
use crate::{
|
||||
fs::RealFs,
|
||||
language::LanguageRegistry,
|
||||
rpc,
|
||||
test::temp_tree,
|
||||
util::RandomCharIter,
|
||||
worktree::{Worktree, WorktreeHandle as _},
|
||||
};
|
||||
use gpui::ModelHandle;
|
||||
use rand::prelude::*;
|
||||
use serde_json::json;
|
||||
use std::{
|
||||
cell::RefCell,
|
||||
cmp::Ordering,
|
||||
env, fs, mem,
|
||||
path::Path,
|
||||
rc::Rc,
|
||||
sync::atomic::{self, AtomicUsize},
|
||||
};
|
||||
use std::{cell::RefCell, cmp::Ordering, env, mem, rc::Rc};
|
||||
|
||||
#[gpui::test]
|
||||
fn test_edit(cx: &mut gpui::MutableAppContext) {
|
||||
@ -3410,228 +3564,6 @@ mod tests {
|
||||
});
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
async fn test_is_dirty(mut cx: gpui::TestAppContext) {
|
||||
let dir = temp_tree(json!({
|
||||
"file1": "abc",
|
||||
"file2": "def",
|
||||
"file3": "ghi",
|
||||
}));
|
||||
let tree = Worktree::open_local(
|
||||
rpc::Client::new(),
|
||||
dir.path(),
|
||||
Arc::new(RealFs),
|
||||
Default::default(),
|
||||
&mut cx.to_async(),
|
||||
)
|
||||
.await
|
||||
.unwrap();
|
||||
tree.flush_fs_events(&cx).await;
|
||||
cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
|
||||
.await;
|
||||
|
||||
let buffer1 = tree
|
||||
.update(&mut cx, |tree, cx| tree.open_buffer("file1", cx))
|
||||
.await
|
||||
.unwrap();
|
||||
let events = Rc::new(RefCell::new(Vec::new()));
|
||||
|
||||
// initially, the buffer isn't dirty.
|
||||
buffer1.update(&mut cx, |buffer, cx| {
|
||||
cx.subscribe(&buffer1, {
|
||||
let events = events.clone();
|
||||
move |_, _, event, _| events.borrow_mut().push(event.clone())
|
||||
})
|
||||
.detach();
|
||||
|
||||
assert!(!buffer.is_dirty());
|
||||
assert!(events.borrow().is_empty());
|
||||
|
||||
buffer.edit(vec![1..2], "", cx);
|
||||
});
|
||||
|
||||
// after the first edit, the buffer is dirty, and emits a dirtied event.
|
||||
buffer1.update(&mut cx, |buffer, cx| {
|
||||
assert!(buffer.text() == "ac");
|
||||
assert!(buffer.is_dirty());
|
||||
assert_eq!(*events.borrow(), &[Event::Edited, Event::Dirtied]);
|
||||
events.borrow_mut().clear();
|
||||
buffer.did_save(buffer.version(), buffer.file().unwrap().mtime(), None, cx);
|
||||
});
|
||||
|
||||
// after saving, the buffer is not dirty, and emits a saved event.
|
||||
buffer1.update(&mut cx, |buffer, cx| {
|
||||
assert!(!buffer.is_dirty());
|
||||
assert_eq!(*events.borrow(), &[Event::Saved]);
|
||||
events.borrow_mut().clear();
|
||||
|
||||
buffer.edit(vec![1..1], "B", cx);
|
||||
buffer.edit(vec![2..2], "D", cx);
|
||||
});
|
||||
|
||||
// after editing again, the buffer is dirty, and emits another dirty event.
|
||||
buffer1.update(&mut cx, |buffer, cx| {
|
||||
assert!(buffer.text() == "aBDc");
|
||||
assert!(buffer.is_dirty());
|
||||
assert_eq!(
|
||||
*events.borrow(),
|
||||
&[Event::Edited, Event::Dirtied, Event::Edited],
|
||||
);
|
||||
events.borrow_mut().clear();
|
||||
|
||||
// TODO - currently, after restoring the buffer to its
|
||||
// previously-saved state, the is still considered dirty.
|
||||
buffer.edit(vec![1..3], "", cx);
|
||||
assert!(buffer.text() == "ac");
|
||||
assert!(buffer.is_dirty());
|
||||
});
|
||||
|
||||
assert_eq!(*events.borrow(), &[Event::Edited]);
|
||||
|
||||
// When a file is deleted, the buffer is considered dirty.
|
||||
let events = Rc::new(RefCell::new(Vec::new()));
|
||||
let buffer2 = tree
|
||||
.update(&mut cx, |tree, cx| tree.open_buffer("file2", cx))
|
||||
.await
|
||||
.unwrap();
|
||||
buffer2.update(&mut cx, |_, cx| {
|
||||
cx.subscribe(&buffer2, {
|
||||
let events = events.clone();
|
||||
move |_, _, event, _| events.borrow_mut().push(event.clone())
|
||||
})
|
||||
.detach();
|
||||
});
|
||||
|
||||
fs::remove_file(dir.path().join("file2")).unwrap();
|
||||
buffer2.condition(&cx, |b, _| b.is_dirty()).await;
|
||||
assert_eq!(
|
||||
*events.borrow(),
|
||||
&[Event::Dirtied, Event::FileHandleChanged]
|
||||
);
|
||||
|
||||
// When a file is already dirty when deleted, we don't emit a Dirtied event.
|
||||
let events = Rc::new(RefCell::new(Vec::new()));
|
||||
let buffer3 = tree
|
||||
.update(&mut cx, |tree, cx| tree.open_buffer("file3", cx))
|
||||
.await
|
||||
.unwrap();
|
||||
buffer3.update(&mut cx, |_, cx| {
|
||||
cx.subscribe(&buffer3, {
|
||||
let events = events.clone();
|
||||
move |_, _, event, _| events.borrow_mut().push(event.clone())
|
||||
})
|
||||
.detach();
|
||||
});
|
||||
|
||||
tree.flush_fs_events(&cx).await;
|
||||
buffer3.update(&mut cx, |buffer, cx| {
|
||||
buffer.edit(Some(0..0), "x", cx);
|
||||
});
|
||||
events.borrow_mut().clear();
|
||||
fs::remove_file(dir.path().join("file3")).unwrap();
|
||||
buffer3
|
||||
.condition(&cx, |_, _| !events.borrow().is_empty())
|
||||
.await;
|
||||
assert_eq!(*events.borrow(), &[Event::FileHandleChanged]);
|
||||
cx.read(|cx| assert!(buffer3.read(cx).is_dirty()));
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
async fn test_file_changes_on_disk(mut cx: gpui::TestAppContext) {
|
||||
let initial_contents = "aaa\nbbbbb\nc\n";
|
||||
let dir = temp_tree(json!({ "the-file": initial_contents }));
|
||||
let tree = Worktree::open_local(
|
||||
rpc::Client::new(),
|
||||
dir.path(),
|
||||
Arc::new(RealFs),
|
||||
Default::default(),
|
||||
&mut cx.to_async(),
|
||||
)
|
||||
.await
|
||||
.unwrap();
|
||||
cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
|
||||
.await;
|
||||
|
||||
let abs_path = dir.path().join("the-file");
|
||||
let buffer = tree
|
||||
.update(&mut cx, |tree, cx| {
|
||||
tree.open_buffer(Path::new("the-file"), cx)
|
||||
})
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
// Add a cursor at the start of each row.
|
||||
let selection_set_id = buffer.update(&mut cx, |buffer, cx| {
|
||||
assert!(!buffer.is_dirty());
|
||||
buffer.add_selection_set(
|
||||
(0..3)
|
||||
.map(|row| {
|
||||
let anchor = buffer.anchor_at(Point::new(row, 0), Bias::Right);
|
||||
Selection {
|
||||
id: row as usize,
|
||||
start: anchor.clone(),
|
||||
end: anchor,
|
||||
reversed: false,
|
||||
goal: SelectionGoal::None,
|
||||
}
|
||||
})
|
||||
.collect::<Vec<_>>(),
|
||||
cx,
|
||||
)
|
||||
});
|
||||
|
||||
// Change the file on disk, adding two new lines of text, and removing
|
||||
// one line.
|
||||
buffer.read_with(&cx, |buffer, _| {
|
||||
assert!(!buffer.is_dirty());
|
||||
assert!(!buffer.has_conflict());
|
||||
});
|
||||
let new_contents = "AAAA\naaa\nBB\nbbbbb\n";
|
||||
fs::write(&abs_path, new_contents).unwrap();
|
||||
|
||||
// Because the buffer was not modified, it is reloaded from disk. Its
|
||||
// contents are edited according to the diff between the old and new
|
||||
// file contents.
|
||||
buffer
|
||||
.condition(&cx, |buffer, _| buffer.text() != initial_contents)
|
||||
.await;
|
||||
|
||||
buffer.update(&mut cx, |buffer, _| {
|
||||
assert_eq!(buffer.text(), new_contents);
|
||||
assert!(!buffer.is_dirty());
|
||||
assert!(!buffer.has_conflict());
|
||||
|
||||
let set = buffer.selection_set(selection_set_id).unwrap();
|
||||
let cursor_positions = set
|
||||
.selections
|
||||
.iter()
|
||||
.map(|selection| {
|
||||
assert_eq!(selection.start, selection.end);
|
||||
selection.start.to_point(&*buffer)
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
assert_eq!(
|
||||
cursor_positions,
|
||||
&[Point::new(1, 0), Point::new(3, 0), Point::new(4, 0),]
|
||||
);
|
||||
});
|
||||
|
||||
// Modify the buffer
|
||||
buffer.update(&mut cx, |buffer, cx| {
|
||||
buffer.edit(vec![0..0], " ", cx);
|
||||
assert!(buffer.is_dirty());
|
||||
});
|
||||
|
||||
// Change the file on disk again, adding blank lines to the beginning.
|
||||
fs::write(&abs_path, "\n\n\nAAAA\naaa\nBB\nbbbbb\n").unwrap();
|
||||
|
||||
// Becaues the buffer is modified, it doesn't reload from disk, but is
|
||||
// marked as having a conflict.
|
||||
buffer
|
||||
.condition(&cx, |buffer, _| buffer.has_conflict())
|
||||
.await;
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
async fn test_apply_diff(mut cx: gpui::TestAppContext) {
|
||||
let text = "a\nbb\nccc\ndddd\neeeee\nffffff\n";
|
||||
@ -3800,8 +3732,6 @@ mod tests {
|
||||
|
||||
#[gpui::test(iterations = 100)]
|
||||
fn test_random_concurrent_edits(cx: &mut gpui::MutableAppContext, mut rng: StdRng) {
|
||||
use crate::test::Network;
|
||||
|
||||
let peers = env::var("PEERS")
|
||||
.map(|i| i.parse().expect("invalid `PEERS` variable"))
|
||||
.unwrap_or(5);
|
||||
@ -3889,13 +3819,10 @@ mod tests {
|
||||
|
||||
#[gpui::test]
|
||||
async fn test_reparse(mut cx: gpui::TestAppContext) {
|
||||
let languages = LanguageRegistry::new();
|
||||
let rust_lang = languages.select_language("test.rs");
|
||||
assert!(rust_lang.is_some());
|
||||
|
||||
let rust_lang = rust_lang();
|
||||
let buffer = cx.add_model(|cx| {
|
||||
let text = "fn a() {}".into();
|
||||
Buffer::from_history(0, History::new(text), None, rust_lang.cloned(), cx)
|
||||
Buffer::from_history(0, History::new(text), None, Some(rust_lang.clone()), cx)
|
||||
});
|
||||
|
||||
// Wait for the initial text to parse
|
||||
@ -4031,10 +3958,7 @@ mod tests {
|
||||
async fn test_enclosing_bracket_ranges(mut cx: gpui::TestAppContext) {
|
||||
use unindent::Unindent as _;
|
||||
|
||||
let languages = LanguageRegistry::new();
|
||||
let rust_lang = languages.select_language("test.rs");
|
||||
assert!(rust_lang.is_some());
|
||||
|
||||
let rust_lang = rust_lang();
|
||||
let buffer = cx.add_model(|cx| {
|
||||
let text = "
|
||||
mod x {
|
||||
@ -4045,7 +3969,7 @@ mod tests {
|
||||
"
|
||||
.unindent()
|
||||
.into();
|
||||
Buffer::from_history(0, History::new(text), None, rust_lang.cloned(), cx)
|
||||
Buffer::from_history(0, History::new(text), None, Some(rust_lang.clone()), cx)
|
||||
});
|
||||
buffer
|
||||
.condition(&cx, |buffer, _| !buffer.is_parsing())
|
||||
@ -4075,158 +3999,98 @@ mod tests {
|
||||
});
|
||||
}
|
||||
|
||||
impl Buffer {
|
||||
fn random_byte_range(&mut self, start_offset: usize, rng: &mut impl Rng) -> Range<usize> {
|
||||
let end = self.clip_offset(rng.gen_range(start_offset..=self.len()), Bias::Right);
|
||||
let start = self.clip_offset(rng.gen_range(start_offset..=end), Bias::Right);
|
||||
start..end
|
||||
}
|
||||
#[derive(Clone)]
|
||||
struct Envelope<T: Clone> {
|
||||
message: T,
|
||||
sender: ReplicaId,
|
||||
}
|
||||
|
||||
pub fn randomly_edit<T>(
|
||||
&mut self,
|
||||
rng: &mut T,
|
||||
old_range_count: usize,
|
||||
cx: &mut ModelContext<Self>,
|
||||
) -> (Vec<Range<usize>>, String)
|
||||
where
|
||||
T: Rng,
|
||||
{
|
||||
let mut old_ranges: Vec<Range<usize>> = Vec::new();
|
||||
for _ in 0..old_range_count {
|
||||
let last_end = old_ranges.last().map_or(0, |last_range| last_range.end + 1);
|
||||
if last_end > self.len() {
|
||||
break;
|
||||
}
|
||||
old_ranges.push(self.random_byte_range(last_end, rng));
|
||||
}
|
||||
let new_text_len = rng.gen_range(0..10);
|
||||
let new_text: String = RandomCharIter::new(&mut *rng).take(new_text_len).collect();
|
||||
log::info!(
|
||||
"mutating buffer {} at {:?}: {:?}",
|
||||
self.replica_id,
|
||||
old_ranges,
|
||||
new_text
|
||||
);
|
||||
self.edit(old_ranges.iter().cloned(), new_text.as_str(), cx);
|
||||
(old_ranges, new_text)
|
||||
}
|
||||
struct Network<T: Clone, R: rand::Rng> {
|
||||
inboxes: std::collections::BTreeMap<ReplicaId, Vec<Envelope<T>>>,
|
||||
all_messages: Vec<T>,
|
||||
rng: R,
|
||||
}
|
||||
|
||||
pub fn randomly_mutate<T>(
|
||||
&mut self,
|
||||
rng: &mut T,
|
||||
cx: &mut ModelContext<Self>,
|
||||
) -> (Vec<Range<usize>>, String)
|
||||
where
|
||||
T: Rng,
|
||||
{
|
||||
let (old_ranges, new_text) = self.randomly_edit(rng, 5, cx);
|
||||
|
||||
// Randomly add, remove or mutate selection sets.
|
||||
let replica_selection_sets = &self
|
||||
.selection_sets()
|
||||
.map(|(set_id, _)| *set_id)
|
||||
.filter(|set_id| self.replica_id == set_id.replica_id)
|
||||
.collect::<Vec<_>>();
|
||||
let set_id = replica_selection_sets.choose(rng);
|
||||
if set_id.is_some() && rng.gen_bool(1.0 / 6.0) {
|
||||
self.remove_selection_set(*set_id.unwrap(), cx).unwrap();
|
||||
} else {
|
||||
let mut ranges = Vec::new();
|
||||
for _ in 0..5 {
|
||||
ranges.push(self.random_byte_range(0, rng));
|
||||
}
|
||||
let new_selections = self.selections_from_ranges(ranges).unwrap();
|
||||
|
||||
if set_id.is_none() || rng.gen_bool(1.0 / 5.0) {
|
||||
self.add_selection_set(new_selections, cx);
|
||||
} else {
|
||||
self.update_selection_set(*set_id.unwrap(), new_selections, cx)
|
||||
.unwrap();
|
||||
}
|
||||
}
|
||||
|
||||
(old_ranges, new_text)
|
||||
}
|
||||
|
||||
pub fn randomly_undo_redo(&mut self, rng: &mut impl Rng, cx: &mut ModelContext<Self>) {
|
||||
for _ in 0..rng.gen_range(1..=5) {
|
||||
if let Some(transaction) = self.history.undo_stack.choose(rng).cloned() {
|
||||
log::info!(
|
||||
"undoing buffer {} transaction {:?}",
|
||||
self.replica_id,
|
||||
transaction
|
||||
);
|
||||
self.undo_or_redo(transaction, cx).unwrap();
|
||||
}
|
||||
impl<T: Clone, R: rand::Rng> Network<T, R> {
|
||||
fn new(rng: R) -> Self {
|
||||
Network {
|
||||
inboxes: Default::default(),
|
||||
all_messages: Vec::new(),
|
||||
rng,
|
||||
}
|
||||
}
|
||||
|
||||
fn selections_from_ranges<I>(&self, ranges: I) -> Result<Vec<Selection>>
|
||||
where
|
||||
I: IntoIterator<Item = Range<usize>>,
|
||||
{
|
||||
static NEXT_SELECTION_ID: AtomicUsize = AtomicUsize::new(0);
|
||||
|
||||
let mut ranges = ranges.into_iter().collect::<Vec<_>>();
|
||||
ranges.sort_unstable_by_key(|range| range.start);
|
||||
|
||||
let mut selections = Vec::with_capacity(ranges.len());
|
||||
for range in ranges {
|
||||
if range.start > range.end {
|
||||
selections.push(Selection {
|
||||
id: NEXT_SELECTION_ID.fetch_add(1, atomic::Ordering::SeqCst),
|
||||
start: self.anchor_before(range.end),
|
||||
end: self.anchor_before(range.start),
|
||||
reversed: true,
|
||||
goal: SelectionGoal::None,
|
||||
});
|
||||
} else {
|
||||
selections.push(Selection {
|
||||
id: NEXT_SELECTION_ID.fetch_add(1, atomic::Ordering::SeqCst),
|
||||
start: self.anchor_after(range.start),
|
||||
end: self.anchor_before(range.end),
|
||||
reversed: false,
|
||||
goal: SelectionGoal::None,
|
||||
});
|
||||
}
|
||||
}
|
||||
Ok(selections)
|
||||
fn add_peer(&mut self, id: ReplicaId) {
|
||||
self.inboxes.insert(id, Vec::new());
|
||||
}
|
||||
|
||||
pub fn selection_ranges<'a>(&'a self, set_id: SelectionSetId) -> Result<Vec<Range<usize>>> {
|
||||
Ok(self
|
||||
.selection_set(set_id)?
|
||||
.selections
|
||||
.iter()
|
||||
.map(move |selection| {
|
||||
let start = selection.start.to_offset(self);
|
||||
let end = selection.end.to_offset(self);
|
||||
if selection.reversed {
|
||||
end..start
|
||||
} else {
|
||||
start..end
|
||||
fn is_idle(&self) -> bool {
|
||||
self.inboxes.values().all(|i| i.is_empty())
|
||||
}
|
||||
|
||||
fn broadcast(&mut self, sender: ReplicaId, messages: Vec<T>) {
|
||||
for (replica, inbox) in self.inboxes.iter_mut() {
|
||||
if *replica != sender {
|
||||
for message in &messages {
|
||||
let min_index = inbox
|
||||
.iter()
|
||||
.enumerate()
|
||||
.rev()
|
||||
.find_map(|(index, envelope)| {
|
||||
if sender == envelope.sender {
|
||||
Some(index + 1)
|
||||
} else {
|
||||
None
|
||||
}
|
||||
})
|
||||
.unwrap_or(0);
|
||||
|
||||
// Insert one or more duplicates of this message *after* the previous
|
||||
// message delivered by this replica.
|
||||
for _ in 0..self.rng.gen_range(1..4) {
|
||||
let insertion_index = self.rng.gen_range(min_index..inbox.len() + 1);
|
||||
inbox.insert(
|
||||
insertion_index,
|
||||
Envelope {
|
||||
message: message.clone(),
|
||||
sender,
|
||||
},
|
||||
);
|
||||
}
|
||||
}
|
||||
})
|
||||
.collect())
|
||||
}
|
||||
}
|
||||
self.all_messages.extend(messages);
|
||||
}
|
||||
|
||||
pub fn all_selection_ranges<'a>(
|
||||
&'a self,
|
||||
) -> impl 'a + Iterator<Item = (SelectionSetId, Vec<Range<usize>>)> {
|
||||
self.selections
|
||||
.keys()
|
||||
.map(move |set_id| (*set_id, self.selection_ranges(*set_id).unwrap()))
|
||||
fn has_unreceived(&self, receiver: ReplicaId) -> bool {
|
||||
!self.inboxes[&receiver].is_empty()
|
||||
}
|
||||
|
||||
pub fn enclosing_bracket_point_ranges<T: ToOffset>(
|
||||
&self,
|
||||
range: Range<T>,
|
||||
) -> Option<(Range<Point>, Range<Point>)> {
|
||||
self.enclosing_bracket_ranges(range).map(|(start, end)| {
|
||||
let point_start = start.start.to_point(self)..start.end.to_point(self);
|
||||
let point_end = end.start.to_point(self)..end.end.to_point(self);
|
||||
(point_start, point_end)
|
||||
})
|
||||
fn receive(&mut self, receiver: ReplicaId) -> Vec<T> {
|
||||
let inbox = self.inboxes.get_mut(&receiver).unwrap();
|
||||
let count = self.rng.gen_range(0..inbox.len() + 1);
|
||||
inbox
|
||||
.drain(0..count)
|
||||
.map(|envelope| envelope.message)
|
||||
.collect()
|
||||
}
|
||||
}
|
||||
|
||||
fn rust_lang() -> Arc<Language> {
|
||||
let lang = tree_sitter_rust::language();
|
||||
let brackets_query = r#"
|
||||
("{" @open "}" @close)
|
||||
"#;
|
||||
Arc::new(Language {
|
||||
config: LanguageConfig {
|
||||
name: "Rust".to_string(),
|
||||
path_suffixes: vec!["rs".to_string()],
|
||||
},
|
||||
grammar: tree_sitter_rust::language(),
|
||||
highlight_query: tree_sitter::Query::new(lang.clone(), "").unwrap(),
|
||||
brackets_query: tree_sitter::Query::new(lang.clone(), brackets_query).unwrap(),
|
||||
highlight_map: Default::default(),
|
||||
})
|
||||
}
|
||||
}
|
28
buffer/src/random_char_iter.rs
Normal file
28
buffer/src/random_char_iter.rs
Normal file
@ -0,0 +1,28 @@
|
||||
use rand::prelude::*;
|
||||
|
||||
pub struct RandomCharIter<T: Rng>(T);
|
||||
|
||||
impl<T: Rng> RandomCharIter<T> {
|
||||
pub fn new(rng: T) -> Self {
|
||||
Self(rng)
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: Rng> Iterator for RandomCharIter<T> {
|
||||
type Item = char;
|
||||
|
||||
fn next(&mut self) -> Option<Self::Item> {
|
||||
match self.0.gen_range(0..100) {
|
||||
// whitespace
|
||||
0..=19 => [' ', '\n', '\t'].choose(&mut self.0).copied(),
|
||||
// two-byte greek letters
|
||||
20..=32 => char::from_u32(self.0.gen_range(('α' as u32)..('ω' as u32 + 1))),
|
||||
// three-byte characters
|
||||
33..=45 => ['✋', '✅', '❌', '❎', '⭐'].choose(&mut self.0).copied(),
|
||||
// four-byte characters
|
||||
46..=58 => ['🍐', '🏀', '🍗', '🎉'].choose(&mut self.0).copied(),
|
||||
// ascii letters
|
||||
_ => Some(self.0.gen_range(b'a'..b'z' + 1).into()),
|
||||
}
|
||||
}
|
||||
}
|
@ -1,9 +1,8 @@
|
||||
use super::Point;
|
||||
use crate::util::Bias;
|
||||
use arrayvec::ArrayString;
|
||||
use smallvec::SmallVec;
|
||||
use std::{cmp, ops::Range, str};
|
||||
use sum_tree::{self, SumTree};
|
||||
use sum_tree::{self, Bias, SumTree};
|
||||
|
||||
#[cfg(test)]
|
||||
const CHUNK_BASE: usize = 6;
|
||||
@ -520,7 +519,7 @@ fn find_split_ix(text: &str) -> usize {
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use crate::util::RandomCharIter;
|
||||
use crate::random_char_iter::RandomCharIter;
|
||||
use rand::prelude::*;
|
||||
use std::env;
|
||||
use Bias::{Left, Right};
|
@ -1,7 +1,4 @@
|
||||
use crate::editor::{
|
||||
buffer::{Anchor, Buffer, Point, ToOffset as _, ToPoint as _},
|
||||
Bias, DisplayMapSnapshot, DisplayPoint,
|
||||
};
|
||||
use crate::{Anchor, Buffer, Point, ToOffset as _, ToPoint as _};
|
||||
use std::{cmp::Ordering, mem, ops::Range};
|
||||
|
||||
pub type SelectionSetId = clock::Lamport;
|
||||
@ -14,11 +11,6 @@ pub enum SelectionGoal {
|
||||
ColumnRange { start: u32, end: u32 },
|
||||
}
|
||||
|
||||
pub struct SpannedRows {
|
||||
pub buffer_rows: Range<u32>,
|
||||
pub display_rows: Range<u32>,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Eq, PartialEq)]
|
||||
pub struct Selection {
|
||||
pub id: usize,
|
||||
@ -80,38 +72,4 @@ impl Selection {
|
||||
start..end
|
||||
}
|
||||
}
|
||||
|
||||
pub fn display_range(&self, map: &DisplayMapSnapshot) -> Range<DisplayPoint> {
|
||||
let start = self.start.to_display_point(map, Bias::Left);
|
||||
let end = self.end.to_display_point(map, Bias::Left);
|
||||
if self.reversed {
|
||||
end..start
|
||||
} else {
|
||||
start..end
|
||||
}
|
||||
}
|
||||
|
||||
pub fn spanned_rows(
|
||||
&self,
|
||||
include_end_if_at_line_start: bool,
|
||||
map: &DisplayMapSnapshot,
|
||||
) -> SpannedRows {
|
||||
let display_start = self.start.to_display_point(map, Bias::Left);
|
||||
let mut display_end = self.end.to_display_point(map, Bias::Right);
|
||||
if !include_end_if_at_line_start
|
||||
&& display_end.row() != map.max_point().row()
|
||||
&& display_start.row() != display_end.row()
|
||||
&& display_end.column() == 0
|
||||
{
|
||||
*display_end.row_mut() -= 1;
|
||||
}
|
||||
|
||||
let (display_start, buffer_start) = map.prev_row_boundary(display_start);
|
||||
let (display_end, buffer_end) = map.next_row_boundary(display_end);
|
||||
|
||||
SpannedRows {
|
||||
buffer_rows: buffer_start.row..buffer_end.row + 1,
|
||||
display_rows: display_start.row()..display_end.row() + 1,
|
||||
}
|
||||
}
|
||||
}
|
49
buffer/src/syntax_theme.rs
Normal file
49
buffer/src/syntax_theme.rs
Normal file
@ -0,0 +1,49 @@
|
||||
use std::collections::HashMap;
|
||||
|
||||
use crate::HighlightId;
|
||||
use gpui::fonts::HighlightStyle;
|
||||
use serde::Deserialize;
|
||||
|
||||
pub struct SyntaxTheme {
|
||||
pub(crate) highlights: Vec<(String, HighlightStyle)>,
|
||||
}
|
||||
|
||||
impl SyntaxTheme {
|
||||
pub fn new(highlights: Vec<(String, HighlightStyle)>) -> Self {
|
||||
Self { highlights }
|
||||
}
|
||||
|
||||
pub fn highlight_style(&self, id: HighlightId) -> Option<HighlightStyle> {
|
||||
self.highlights
|
||||
.get(id.0 as usize)
|
||||
.map(|entry| entry.1.clone())
|
||||
}
|
||||
|
||||
#[cfg(any(test, feature = "test-support"))]
|
||||
pub fn highlight_name(&self, id: HighlightId) -> Option<&str> {
|
||||
self.highlights.get(id.0 as usize).map(|e| e.0.as_str())
|
||||
}
|
||||
}
|
||||
|
||||
impl<'de> Deserialize<'de> for SyntaxTheme {
|
||||
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
|
||||
where
|
||||
D: serde::Deserializer<'de>,
|
||||
{
|
||||
let syntax_data: HashMap<String, HighlightStyle> = Deserialize::deserialize(deserializer)?;
|
||||
|
||||
let mut result = Self::new(Vec::new());
|
||||
for (key, style) in syntax_data {
|
||||
match result
|
||||
.highlights
|
||||
.binary_search_by(|(needle, _)| needle.cmp(&key))
|
||||
{
|
||||
Ok(i) | Err(i) => {
|
||||
result.highlights.insert(i, (key, style));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Ok(result)
|
||||
}
|
||||
}
|
@ -14,14 +14,19 @@ name = "Zed"
|
||||
path = "src/main.rs"
|
||||
|
||||
[features]
|
||||
test-support = ["tempdir", "zrpc/test-support", "gpui/test-support"]
|
||||
test-support = [
|
||||
"buffer/test-support",
|
||||
"gpui/test-support",
|
||||
"tempdir",
|
||||
"zrpc/test-support",
|
||||
]
|
||||
|
||||
[dependencies]
|
||||
anyhow = "1.0.38"
|
||||
async-recursion = "0.3"
|
||||
async-trait = "0.1"
|
||||
arrayvec = "0.7.1"
|
||||
async-tungstenite = { version = "0.14", features = ["async-tls"] }
|
||||
buffer = { path = "../buffer" }
|
||||
clock = { path = "../clock" }
|
||||
crossbeam-channel = "0.5.0"
|
||||
ctor = "0.1.20"
|
||||
@ -45,11 +50,9 @@ postage = { version = "0.4.1", features = ["futures-traits"] }
|
||||
rand = "0.8.3"
|
||||
rsa = "0.4"
|
||||
rust-embed = { version = "6.2", features = ["include-exclude"] }
|
||||
seahash = "4.1"
|
||||
serde = { version = "1", features = ["derive"] }
|
||||
serde_json = { version = "1.0.64", features = ["preserve_order"] }
|
||||
serde_path_to_error = "0.1.4"
|
||||
similar = "1.3"
|
||||
simplelog = "0.9"
|
||||
smallvec = { version = "1.6", features = ["union"] }
|
||||
smol = "1.2.5"
|
||||
@ -71,6 +74,7 @@ env_logger = "0.8"
|
||||
serde_json = { version = "1.0.64", features = ["preserve_order"] }
|
||||
tempdir = { version = "0.3.7" }
|
||||
unindent = "0.1.7"
|
||||
buffer = { path = "../buffer", features = ["test-support"] }
|
||||
zrpc = { path = "../zrpc", features = ["test-support"] }
|
||||
gpui = { path = "../gpui", features = ["test-support"] }
|
||||
|
||||
|
@ -1,10 +1,8 @@
|
||||
pub mod buffer;
|
||||
pub mod display_map;
|
||||
mod element;
|
||||
pub mod movement;
|
||||
|
||||
use crate::{
|
||||
language::Language,
|
||||
project::ProjectPath,
|
||||
settings::Settings,
|
||||
theme::Theme,
|
||||
@ -13,7 +11,7 @@ use crate::{
|
||||
worktree::Worktree,
|
||||
};
|
||||
use anyhow::Result;
|
||||
pub use buffer::*;
|
||||
use buffer::*;
|
||||
use clock::ReplicaId;
|
||||
pub use display_map::DisplayPoint;
|
||||
use display_map::*;
|
||||
@ -251,6 +249,20 @@ pub fn init(cx: &mut MutableAppContext) {
|
||||
cx.add_action(Editor::fold_selected_ranges);
|
||||
}
|
||||
|
||||
trait SelectionExt {
|
||||
fn display_range(&self, map: &DisplayMapSnapshot) -> Range<DisplayPoint>;
|
||||
fn spanned_rows(
|
||||
&self,
|
||||
include_end_if_at_line_start: bool,
|
||||
map: &DisplayMapSnapshot,
|
||||
) -> SpannedRows;
|
||||
}
|
||||
|
||||
struct SpannedRows {
|
||||
buffer_rows: Range<u32>,
|
||||
display_rows: Range<u32>,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
pub enum SelectPhase {
|
||||
Begin {
|
||||
@ -2702,6 +2714,42 @@ impl workspace::ItemView for Editor {
|
||||
}
|
||||
}
|
||||
|
||||
impl SelectionExt for Selection {
|
||||
fn display_range(&self, map: &DisplayMapSnapshot) -> Range<DisplayPoint> {
|
||||
let start = self.start.to_display_point(map, Bias::Left);
|
||||
let end = self.end.to_display_point(map, Bias::Left);
|
||||
if self.reversed {
|
||||
end..start
|
||||
} else {
|
||||
start..end
|
||||
}
|
||||
}
|
||||
|
||||
fn spanned_rows(
|
||||
&self,
|
||||
include_end_if_at_line_start: bool,
|
||||
map: &DisplayMapSnapshot,
|
||||
) -> SpannedRows {
|
||||
let display_start = self.start.to_display_point(map, Bias::Left);
|
||||
let mut display_end = self.end.to_display_point(map, Bias::Right);
|
||||
if !include_end_if_at_line_start
|
||||
&& display_end.row() != map.max_point().row()
|
||||
&& display_start.row() != display_end.row()
|
||||
&& display_end.column() == 0
|
||||
{
|
||||
*display_end.row_mut() -= 1;
|
||||
}
|
||||
|
||||
let (display_start, buffer_start) = map.prev_row_boundary(display_start);
|
||||
let (display_end, buffer_end) = map.next_row_boundary(display_end);
|
||||
|
||||
SpannedRows {
|
||||
buffer_rows: buffer_start.row..buffer_end.row + 1,
|
||||
display_rows: display_start.row()..display_end.row() + 1,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
@ -2,14 +2,19 @@ mod fold_map;
|
||||
mod tab_map;
|
||||
mod wrap_map;
|
||||
|
||||
use super::{buffer, Anchor, Bias, Buffer, Point, ToOffset, ToPoint};
|
||||
use fold_map::FoldMap;
|
||||
use buffer::{self, Anchor, Buffer, Point, ToOffset, ToPoint};
|
||||
use fold_map::{FoldMap, ToFoldPoint as _};
|
||||
use gpui::{fonts::FontId, Entity, ModelContext, ModelHandle};
|
||||
use std::ops::Range;
|
||||
use sum_tree::Bias;
|
||||
use tab_map::TabMap;
|
||||
use wrap_map::WrapMap;
|
||||
pub use wrap_map::{BufferRows, HighlightedChunks};
|
||||
|
||||
pub trait ToDisplayPoint {
|
||||
fn to_display_point(&self, map: &DisplayMapSnapshot, bias: Bias) -> DisplayPoint;
|
||||
}
|
||||
|
||||
pub struct DisplayMap {
|
||||
buffer: ModelHandle<Buffer>,
|
||||
fold_map: FoldMap,
|
||||
@ -333,8 +338,8 @@ impl DisplayPoint {
|
||||
}
|
||||
}
|
||||
|
||||
impl Point {
|
||||
pub fn to_display_point(self, map: &DisplayMapSnapshot, bias: Bias) -> DisplayPoint {
|
||||
impl ToDisplayPoint for Point {
|
||||
fn to_display_point(&self, map: &DisplayMapSnapshot, bias: Bias) -> DisplayPoint {
|
||||
let fold_point = self.to_fold_point(&map.folds_snapshot, bias);
|
||||
let tab_point = map.tabs_snapshot.to_tab_point(fold_point);
|
||||
let wrap_point = map.wraps_snapshot.to_wrap_point(tab_point);
|
||||
@ -342,8 +347,8 @@ impl Point {
|
||||
}
|
||||
}
|
||||
|
||||
impl Anchor {
|
||||
pub fn to_display_point(&self, map: &DisplayMapSnapshot, bias: Bias) -> DisplayPoint {
|
||||
impl ToDisplayPoint for Anchor {
|
||||
fn to_display_point(&self, map: &DisplayMapSnapshot, bias: Bias) -> DisplayPoint {
|
||||
self.to_point(&map.buffer_snapshot)
|
||||
.to_display_point(map, bias)
|
||||
}
|
||||
@ -352,14 +357,8 @@ impl Anchor {
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use crate::{
|
||||
editor::movement,
|
||||
language::{Language, LanguageConfig},
|
||||
test::*,
|
||||
theme::SyntaxTheme,
|
||||
util::RandomCharIter,
|
||||
};
|
||||
use buffer::{History, SelectionGoal};
|
||||
use crate::{editor::movement, test::*, util::RandomCharIter};
|
||||
use buffer::{History, Language, LanguageConfig, SelectionGoal, SyntaxTheme};
|
||||
use gpui::{color::Color, MutableAppContext};
|
||||
use rand::{prelude::StdRng, Rng};
|
||||
use std::{env, sync::Arc};
|
||||
|
@ -2,7 +2,7 @@ use super::{
|
||||
buffer::{AnchorRangeExt, TextSummary},
|
||||
Anchor, Buffer, Point, ToOffset,
|
||||
};
|
||||
use crate::{editor::buffer, settings::HighlightId, util::Bias};
|
||||
use buffer::HighlightId;
|
||||
use gpui::{AppContext, ModelHandle};
|
||||
use parking_lot::Mutex;
|
||||
use std::{
|
||||
@ -11,7 +11,11 @@ use std::{
|
||||
ops::Range,
|
||||
sync::atomic::{AtomicUsize, Ordering::SeqCst},
|
||||
};
|
||||
use sum_tree::{self, Cursor, FilterCursor, SumTree};
|
||||
use sum_tree::{self, Bias, Cursor, FilterCursor, SumTree};
|
||||
|
||||
pub trait ToFoldPoint {
|
||||
fn to_fold_point(&self, snapshot: &Snapshot, bias: Bias) -> FoldPoint;
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug, Default, Eq, Ord, PartialOrd, PartialEq)]
|
||||
pub struct FoldPoint(pub super::Point);
|
||||
@ -73,8 +77,8 @@ impl FoldPoint {
|
||||
}
|
||||
}
|
||||
|
||||
impl Point {
|
||||
pub fn to_fold_point(&self, snapshot: &Snapshot, bias: Bias) -> FoldPoint {
|
||||
impl ToFoldPoint for Point {
|
||||
fn to_fold_point(&self, snapshot: &Snapshot, bias: Bias) -> FoldPoint {
|
||||
let mut cursor = snapshot.transforms.cursor::<(Point, FoldPoint)>();
|
||||
cursor.seek(self, Bias::Right, &());
|
||||
if cursor.item().map_or(false, |t| t.is_fold()) {
|
||||
@ -544,6 +548,7 @@ impl Snapshot {
|
||||
summary
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
pub fn len(&self) -> FoldOffset {
|
||||
FoldOffset(self.transforms.summary().output.bytes)
|
||||
}
|
||||
|
@ -1,7 +1,7 @@
|
||||
use parking_lot::Mutex;
|
||||
|
||||
use super::fold_map::{self, FoldEdit, FoldPoint, Snapshot as FoldSnapshot};
|
||||
use crate::{editor::rope, settings::HighlightId, util::Bias};
|
||||
use crate::util::Bias;
|
||||
use buffer::{rope, HighlightId};
|
||||
use parking_lot::Mutex;
|
||||
use std::{mem, ops::Range};
|
||||
|
||||
pub struct TabMap(Mutex<Snapshot>);
|
||||
|
@ -2,7 +2,8 @@ use super::{
|
||||
fold_map,
|
||||
tab_map::{self, Edit as TabEdit, Snapshot as TabSnapshot, TabPoint, TextSummary},
|
||||
};
|
||||
use crate::{editor::Point, settings::HighlightId, util::Bias};
|
||||
use crate::{editor::Point, util::Bias};
|
||||
use buffer::HighlightId;
|
||||
use gpui::{fonts::FontId, text_layout::LineWrapper, Entity, ModelContext, Task};
|
||||
use lazy_static::lazy_static;
|
||||
use smol::future::yield_now;
|
||||
|
@ -2,7 +2,7 @@ use super::{
|
||||
DisplayPoint, Editor, EditorMode, EditorStyle, Insert, Scroll, Select, SelectPhase, Snapshot,
|
||||
MAX_LINE_LEN,
|
||||
};
|
||||
use crate::theme::HighlightId;
|
||||
use buffer::HighlightId;
|
||||
use clock::ReplicaId;
|
||||
use gpui::{
|
||||
color::Color,
|
||||
|
@ -1,5 +1,5 @@
|
||||
use super::editor::Rope;
|
||||
use anyhow::{anyhow, Result};
|
||||
use buffer::Rope;
|
||||
use fsevent::EventStream;
|
||||
use futures::{Stream, StreamExt};
|
||||
use postage::prelude::Sink as _;
|
||||
|
@ -1,53 +1,18 @@
|
||||
use crate::{settings::HighlightMap, theme::SyntaxTheme};
|
||||
use buffer::{HighlightMap, Language, SyntaxTheme};
|
||||
use parking_lot::Mutex;
|
||||
use rust_embed::RustEmbed;
|
||||
use serde::Deserialize;
|
||||
use std::{path::Path, str, sync::Arc};
|
||||
use tree_sitter::{Language as Grammar, Query};
|
||||
use tree_sitter::Query;
|
||||
pub use tree_sitter::{Parser, Tree};
|
||||
|
||||
#[derive(RustEmbed)]
|
||||
#[folder = "languages"]
|
||||
pub struct LanguageDir;
|
||||
|
||||
#[derive(Default, Deserialize)]
|
||||
pub struct LanguageConfig {
|
||||
pub name: String,
|
||||
pub path_suffixes: Vec<String>,
|
||||
}
|
||||
|
||||
#[derive(Deserialize)]
|
||||
pub struct BracketPair {
|
||||
pub start: String,
|
||||
pub end: String,
|
||||
}
|
||||
|
||||
pub struct Language {
|
||||
pub config: LanguageConfig,
|
||||
pub grammar: Grammar,
|
||||
pub highlight_query: Query,
|
||||
pub brackets_query: Query,
|
||||
pub highlight_map: Mutex<HighlightMap>,
|
||||
}
|
||||
|
||||
pub struct LanguageRegistry {
|
||||
languages: Vec<Arc<Language>>,
|
||||
}
|
||||
|
||||
impl Language {
|
||||
pub fn name(&self) -> &str {
|
||||
self.config.name.as_str()
|
||||
}
|
||||
|
||||
pub fn highlight_map(&self) -> HighlightMap {
|
||||
self.highlight_map.lock().clone()
|
||||
}
|
||||
|
||||
pub fn set_theme(&self, theme: &SyntaxTheme) {
|
||||
*self.highlight_map.lock() = HighlightMap::new(self.highlight_query.capture_names(), theme);
|
||||
}
|
||||
}
|
||||
|
||||
impl LanguageRegistry {
|
||||
pub fn new() -> Self {
|
||||
let grammar = tree_sitter_rust::language();
|
||||
@ -104,6 +69,7 @@ impl Default for LanguageRegistry {
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use buffer::LanguageConfig;
|
||||
|
||||
#[test]
|
||||
fn test_select_language() {
|
||||
|
@ -3,8 +3,7 @@ use anyhow::Result;
|
||||
use gpui::font_cache::{FamilyId, FontCache};
|
||||
use postage::watch;
|
||||
use std::sync::Arc;
|
||||
|
||||
pub use theme::{HighlightId, HighlightMap, Theme, ThemeRegistry};
|
||||
pub use theme::{Theme, ThemeRegistry};
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct Settings {
|
||||
|
@ -10,7 +10,6 @@ use crate::{
|
||||
AppState,
|
||||
};
|
||||
use anyhow::{anyhow, Result};
|
||||
use clock::ReplicaId;
|
||||
use futures::{future::BoxFuture, Future};
|
||||
use gpui::{AsyncAppContext, Entity, ModelHandle, MutableAppContext, TestAppContext};
|
||||
use parking_lot::Mutex;
|
||||
@ -34,86 +33,6 @@ fn init_logger() {
|
||||
env_logger::init();
|
||||
}
|
||||
|
||||
#[derive(Clone)]
|
||||
struct Envelope<T: Clone> {
|
||||
message: T,
|
||||
sender: ReplicaId,
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
pub(crate) struct Network<T: Clone, R: rand::Rng> {
|
||||
inboxes: std::collections::BTreeMap<ReplicaId, Vec<Envelope<T>>>,
|
||||
all_messages: Vec<T>,
|
||||
rng: R,
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
impl<T: Clone, R: rand::Rng> Network<T, R> {
|
||||
pub fn new(rng: R) -> Self {
|
||||
Network {
|
||||
inboxes: Default::default(),
|
||||
all_messages: Vec::new(),
|
||||
rng,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn add_peer(&mut self, id: ReplicaId) {
|
||||
self.inboxes.insert(id, Vec::new());
|
||||
}
|
||||
|
||||
pub fn is_idle(&self) -> bool {
|
||||
self.inboxes.values().all(|i| i.is_empty())
|
||||
}
|
||||
|
||||
pub fn broadcast(&mut self, sender: ReplicaId, messages: Vec<T>) {
|
||||
for (replica, inbox) in self.inboxes.iter_mut() {
|
||||
if *replica != sender {
|
||||
for message in &messages {
|
||||
let min_index = inbox
|
||||
.iter()
|
||||
.enumerate()
|
||||
.rev()
|
||||
.find_map(|(index, envelope)| {
|
||||
if sender == envelope.sender {
|
||||
Some(index + 1)
|
||||
} else {
|
||||
None
|
||||
}
|
||||
})
|
||||
.unwrap_or(0);
|
||||
|
||||
// Insert one or more duplicates of this message *after* the previous
|
||||
// message delivered by this replica.
|
||||
for _ in 0..self.rng.gen_range(1..4) {
|
||||
let insertion_index = self.rng.gen_range(min_index..inbox.len() + 1);
|
||||
inbox.insert(
|
||||
insertion_index,
|
||||
Envelope {
|
||||
message: message.clone(),
|
||||
sender,
|
||||
},
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
self.all_messages.extend(messages);
|
||||
}
|
||||
|
||||
pub fn has_unreceived(&self, receiver: ReplicaId) -> bool {
|
||||
!self.inboxes[&receiver].is_empty()
|
||||
}
|
||||
|
||||
pub fn receive(&mut self, receiver: ReplicaId) -> Vec<T> {
|
||||
let inbox = self.inboxes.get_mut(&receiver).unwrap();
|
||||
let count = self.rng.gen_range(0..inbox.len() + 1);
|
||||
inbox
|
||||
.drain(0..count)
|
||||
.map(|envelope| envelope.message)
|
||||
.collect()
|
||||
}
|
||||
}
|
||||
|
||||
pub fn sample_text(rows: usize, cols: usize) -> String {
|
||||
let mut text = String::new();
|
||||
for row in 0..rows {
|
||||
|
@ -1,19 +1,16 @@
|
||||
mod highlight_map;
|
||||
mod resolution;
|
||||
mod theme_registry;
|
||||
|
||||
use crate::editor::{EditorStyle, SelectionStyle};
|
||||
use anyhow::Result;
|
||||
use buffer::SyntaxTheme;
|
||||
use gpui::{
|
||||
color::Color,
|
||||
elements::{ContainerStyle, ImageStyle, LabelStyle},
|
||||
fonts::{HighlightStyle, TextStyle},
|
||||
fonts::TextStyle,
|
||||
Border,
|
||||
};
|
||||
use serde::Deserialize;
|
||||
use std::collections::HashMap;
|
||||
|
||||
pub use highlight_map::*;
|
||||
pub use theme_registry::*;
|
||||
|
||||
pub const DEFAULT_THEME_NAME: &'static str = "black";
|
||||
@ -31,10 +28,6 @@ pub struct Theme {
|
||||
pub syntax: SyntaxTheme,
|
||||
}
|
||||
|
||||
pub struct SyntaxTheme {
|
||||
highlights: Vec<(String, HighlightStyle)>,
|
||||
}
|
||||
|
||||
#[derive(Deserialize)]
|
||||
pub struct Workspace {
|
||||
pub background: Color,
|
||||
@ -220,23 +213,6 @@ pub struct InputEditorStyle {
|
||||
pub selection: SelectionStyle,
|
||||
}
|
||||
|
||||
impl SyntaxTheme {
|
||||
pub fn new(highlights: Vec<(String, HighlightStyle)>) -> Self {
|
||||
Self { highlights }
|
||||
}
|
||||
|
||||
pub fn highlight_style(&self, id: HighlightId) -> Option<HighlightStyle> {
|
||||
self.highlights
|
||||
.get(id.0 as usize)
|
||||
.map(|entry| entry.1.clone())
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
pub fn highlight_name(&self, id: HighlightId) -> Option<&str> {
|
||||
self.highlights.get(id.0 as usize).map(|e| e.0.as_str())
|
||||
}
|
||||
}
|
||||
|
||||
impl InputEditorStyle {
|
||||
pub fn as_editor(&self) -> EditorStyle {
|
||||
EditorStyle {
|
||||
@ -255,26 +231,3 @@ impl InputEditorStyle {
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<'de> Deserialize<'de> for SyntaxTheme {
|
||||
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
|
||||
where
|
||||
D: serde::Deserializer<'de>,
|
||||
{
|
||||
let syntax_data: HashMap<String, HighlightStyle> = Deserialize::deserialize(deserializer)?;
|
||||
|
||||
let mut result = Self::new(Vec::new());
|
||||
for (key, style) in syntax_data {
|
||||
match result
|
||||
.highlights
|
||||
.binary_search_by(|(needle, _)| needle.cmp(&key))
|
||||
{
|
||||
Ok(i) | Err(i) => {
|
||||
result.highlights.insert(i, (key, style));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Ok(result)
|
||||
}
|
||||
}
|
||||
|
@ -4,7 +4,6 @@ pub mod sidebar;
|
||||
|
||||
use crate::{
|
||||
chat_panel::ChatPanel,
|
||||
editor::Buffer,
|
||||
fs::Fs,
|
||||
people_panel::{JoinWorktree, LeaveWorktree, PeoplePanel, ShareWorktree, UnshareWorktree},
|
||||
project::{Project, ProjectPath},
|
||||
@ -17,6 +16,7 @@ use crate::{
|
||||
AppState, Authenticate,
|
||||
};
|
||||
use anyhow::Result;
|
||||
use buffer::Buffer;
|
||||
use gpui::{
|
||||
action,
|
||||
elements::*,
|
||||
|
@ -2,7 +2,6 @@ mod ignore;
|
||||
|
||||
use self::ignore::IgnoreStack;
|
||||
use crate::{
|
||||
editor::{self, buffer, Buffer, History, Operation, Rope},
|
||||
fs::{self, Fs},
|
||||
fuzzy::CharBag,
|
||||
language::LanguageRegistry,
|
||||
@ -11,6 +10,7 @@ use crate::{
|
||||
};
|
||||
use ::ignore::gitignore::{Gitignore, GitignoreBuilder};
|
||||
use anyhow::{anyhow, Result};
|
||||
use buffer::{self, Buffer, History, Operation, Rope};
|
||||
use clock::ReplicaId;
|
||||
use futures::{Stream, StreamExt};
|
||||
use gpui::{
|
||||
@ -630,14 +630,14 @@ impl Worktree {
|
||||
file_changed = true;
|
||||
} else if !file.is_deleted() {
|
||||
if buffer_is_clean {
|
||||
cx.emit(editor::buffer::Event::Dirtied);
|
||||
cx.emit(buffer::Event::Dirtied);
|
||||
}
|
||||
file.set_entry_id(None);
|
||||
file_changed = true;
|
||||
}
|
||||
|
||||
if file_changed {
|
||||
cx.emit(editor::buffer::Event::FileHandleChanged);
|
||||
cx.emit(buffer::Event::FileHandleChanged);
|
||||
}
|
||||
}
|
||||
});
|
||||
@ -2839,6 +2839,8 @@ mod tests {
|
||||
use fs::RealFs;
|
||||
use rand::prelude::*;
|
||||
use serde_json::json;
|
||||
use std::cell::RefCell;
|
||||
use std::rc::Rc;
|
||||
use std::time::UNIX_EPOCH;
|
||||
use std::{env, fmt::Write, time::SystemTime};
|
||||
|
||||
@ -3218,6 +3220,240 @@ mod tests {
|
||||
server.receive::<proto::CloseWorktree>().await.unwrap();
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
async fn test_buffer_is_dirty(mut cx: gpui::TestAppContext) {
|
||||
use std::fs;
|
||||
|
||||
let dir = temp_tree(json!({
|
||||
"file1": "abc",
|
||||
"file2": "def",
|
||||
"file3": "ghi",
|
||||
}));
|
||||
let tree = Worktree::open_local(
|
||||
rpc::Client::new(),
|
||||
dir.path(),
|
||||
Arc::new(RealFs),
|
||||
Default::default(),
|
||||
&mut cx.to_async(),
|
||||
)
|
||||
.await
|
||||
.unwrap();
|
||||
tree.flush_fs_events(&cx).await;
|
||||
cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
|
||||
.await;
|
||||
|
||||
let buffer1 = tree
|
||||
.update(&mut cx, |tree, cx| tree.open_buffer("file1", cx))
|
||||
.await
|
||||
.unwrap();
|
||||
let events = Rc::new(RefCell::new(Vec::new()));
|
||||
|
||||
// initially, the buffer isn't dirty.
|
||||
buffer1.update(&mut cx, |buffer, cx| {
|
||||
cx.subscribe(&buffer1, {
|
||||
let events = events.clone();
|
||||
move |_, _, event, _| events.borrow_mut().push(event.clone())
|
||||
})
|
||||
.detach();
|
||||
|
||||
assert!(!buffer.is_dirty());
|
||||
assert!(events.borrow().is_empty());
|
||||
|
||||
buffer.edit(vec![1..2], "", cx);
|
||||
});
|
||||
|
||||
// after the first edit, the buffer is dirty, and emits a dirtied event.
|
||||
buffer1.update(&mut cx, |buffer, cx| {
|
||||
assert!(buffer.text() == "ac");
|
||||
assert!(buffer.is_dirty());
|
||||
assert_eq!(
|
||||
*events.borrow(),
|
||||
&[buffer::Event::Edited, buffer::Event::Dirtied]
|
||||
);
|
||||
events.borrow_mut().clear();
|
||||
buffer.did_save(buffer.version(), buffer.file().unwrap().mtime(), None, cx);
|
||||
});
|
||||
|
||||
// after saving, the buffer is not dirty, and emits a saved event.
|
||||
buffer1.update(&mut cx, |buffer, cx| {
|
||||
assert!(!buffer.is_dirty());
|
||||
assert_eq!(*events.borrow(), &[buffer::Event::Saved]);
|
||||
events.borrow_mut().clear();
|
||||
|
||||
buffer.edit(vec![1..1], "B", cx);
|
||||
buffer.edit(vec![2..2], "D", cx);
|
||||
});
|
||||
|
||||
// after editing again, the buffer is dirty, and emits another dirty event.
|
||||
buffer1.update(&mut cx, |buffer, cx| {
|
||||
assert!(buffer.text() == "aBDc");
|
||||
assert!(buffer.is_dirty());
|
||||
assert_eq!(
|
||||
*events.borrow(),
|
||||
&[
|
||||
buffer::Event::Edited,
|
||||
buffer::Event::Dirtied,
|
||||
buffer::Event::Edited
|
||||
],
|
||||
);
|
||||
events.borrow_mut().clear();
|
||||
|
||||
// TODO - currently, after restoring the buffer to its
|
||||
// previously-saved state, the is still considered dirty.
|
||||
buffer.edit(vec![1..3], "", cx);
|
||||
assert!(buffer.text() == "ac");
|
||||
assert!(buffer.is_dirty());
|
||||
});
|
||||
|
||||
assert_eq!(*events.borrow(), &[buffer::Event::Edited]);
|
||||
|
||||
// When a file is deleted, the buffer is considered dirty.
|
||||
let events = Rc::new(RefCell::new(Vec::new()));
|
||||
let buffer2 = tree
|
||||
.update(&mut cx, |tree, cx| tree.open_buffer("file2", cx))
|
||||
.await
|
||||
.unwrap();
|
||||
buffer2.update(&mut cx, |_, cx| {
|
||||
cx.subscribe(&buffer2, {
|
||||
let events = events.clone();
|
||||
move |_, _, event, _| events.borrow_mut().push(event.clone())
|
||||
})
|
||||
.detach();
|
||||
});
|
||||
|
||||
fs::remove_file(dir.path().join("file2")).unwrap();
|
||||
buffer2.condition(&cx, |b, _| b.is_dirty()).await;
|
||||
assert_eq!(
|
||||
*events.borrow(),
|
||||
&[buffer::Event::Dirtied, buffer::Event::FileHandleChanged]
|
||||
);
|
||||
|
||||
// When a file is already dirty when deleted, we don't emit a Dirtied event.
|
||||
let events = Rc::new(RefCell::new(Vec::new()));
|
||||
let buffer3 = tree
|
||||
.update(&mut cx, |tree, cx| tree.open_buffer("file3", cx))
|
||||
.await
|
||||
.unwrap();
|
||||
buffer3.update(&mut cx, |_, cx| {
|
||||
cx.subscribe(&buffer3, {
|
||||
let events = events.clone();
|
||||
move |_, _, event, _| events.borrow_mut().push(event.clone())
|
||||
})
|
||||
.detach();
|
||||
});
|
||||
|
||||
tree.flush_fs_events(&cx).await;
|
||||
buffer3.update(&mut cx, |buffer, cx| {
|
||||
buffer.edit(Some(0..0), "x", cx);
|
||||
});
|
||||
events.borrow_mut().clear();
|
||||
fs::remove_file(dir.path().join("file3")).unwrap();
|
||||
buffer3
|
||||
.condition(&cx, |_, _| !events.borrow().is_empty())
|
||||
.await;
|
||||
assert_eq!(*events.borrow(), &[buffer::Event::FileHandleChanged]);
|
||||
cx.read(|cx| assert!(buffer3.read(cx).is_dirty()));
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
async fn test_buffer_file_changes_on_disk(mut cx: gpui::TestAppContext) {
|
||||
use buffer::{Point, Selection, SelectionGoal, ToPoint};
|
||||
use std::fs;
|
||||
|
||||
let initial_contents = "aaa\nbbbbb\nc\n";
|
||||
let dir = temp_tree(json!({ "the-file": initial_contents }));
|
||||
let tree = Worktree::open_local(
|
||||
rpc::Client::new(),
|
||||
dir.path(),
|
||||
Arc::new(RealFs),
|
||||
Default::default(),
|
||||
&mut cx.to_async(),
|
||||
)
|
||||
.await
|
||||
.unwrap();
|
||||
cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
|
||||
.await;
|
||||
|
||||
let abs_path = dir.path().join("the-file");
|
||||
let buffer = tree
|
||||
.update(&mut cx, |tree, cx| {
|
||||
tree.open_buffer(Path::new("the-file"), cx)
|
||||
})
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
// Add a cursor at the start of each row.
|
||||
let selection_set_id = buffer.update(&mut cx, |buffer, cx| {
|
||||
assert!(!buffer.is_dirty());
|
||||
buffer.add_selection_set(
|
||||
(0..3)
|
||||
.map(|row| {
|
||||
let anchor = buffer.anchor_at(Point::new(row, 0), Bias::Right);
|
||||
Selection {
|
||||
id: row as usize,
|
||||
start: anchor.clone(),
|
||||
end: anchor,
|
||||
reversed: false,
|
||||
goal: SelectionGoal::None,
|
||||
}
|
||||
})
|
||||
.collect::<Vec<_>>(),
|
||||
cx,
|
||||
)
|
||||
});
|
||||
|
||||
// Change the file on disk, adding two new lines of text, and removing
|
||||
// one line.
|
||||
buffer.read_with(&cx, |buffer, _| {
|
||||
assert!(!buffer.is_dirty());
|
||||
assert!(!buffer.has_conflict());
|
||||
});
|
||||
let new_contents = "AAAA\naaa\nBB\nbbbbb\n";
|
||||
fs::write(&abs_path, new_contents).unwrap();
|
||||
|
||||
// Because the buffer was not modified, it is reloaded from disk. Its
|
||||
// contents are edited according to the diff between the old and new
|
||||
// file contents.
|
||||
buffer
|
||||
.condition(&cx, |buffer, _| buffer.text() != initial_contents)
|
||||
.await;
|
||||
|
||||
buffer.update(&mut cx, |buffer, _| {
|
||||
assert_eq!(buffer.text(), new_contents);
|
||||
assert!(!buffer.is_dirty());
|
||||
assert!(!buffer.has_conflict());
|
||||
|
||||
let set = buffer.selection_set(selection_set_id).unwrap();
|
||||
let cursor_positions = set
|
||||
.selections
|
||||
.iter()
|
||||
.map(|selection| {
|
||||
assert_eq!(selection.start, selection.end);
|
||||
selection.start.to_point(&*buffer)
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
assert_eq!(
|
||||
cursor_positions,
|
||||
&[Point::new(1, 0), Point::new(3, 0), Point::new(4, 0),]
|
||||
);
|
||||
});
|
||||
|
||||
// Modify the buffer
|
||||
buffer.update(&mut cx, |buffer, cx| {
|
||||
buffer.edit(vec![0..0], " ", cx);
|
||||
assert!(buffer.is_dirty());
|
||||
});
|
||||
|
||||
// Change the file on disk again, adding blank lines to the beginning.
|
||||
fs::write(&abs_path, "\n\n\nAAAA\naaa\nBB\nbbbbb\n").unwrap();
|
||||
|
||||
// Becaues the buffer is modified, it doesn't reload from disk, but is
|
||||
// marked as having a conflict.
|
||||
buffer
|
||||
.condition(&cx, |buffer, _| buffer.has_conflict())
|
||||
.await;
|
||||
}
|
||||
|
||||
#[gpui::test(iterations = 100)]
|
||||
fn test_random(mut rng: StdRng) {
|
||||
let operations = env::var("OPERATIONS")
|
||||
|
Loading…
Reference in New Issue
Block a user