mirror of
https://github.com/zed-industries/zed.git
synced 2024-11-07 20:39:04 +03:00
Merge remote-tracking branch 'origin/main' into lsp
This commit is contained in:
commit
6384950d56
36
Cargo.lock
generated
36
Cargo.lock
generated
@ -759,20 +759,12 @@ dependencies = [
|
||||
"arrayvec 0.7.1",
|
||||
"clock",
|
||||
"gpui",
|
||||
"lazy_static",
|
||||
"log",
|
||||
"parking_lot",
|
||||
"rand 0.8.3",
|
||||
"rpc",
|
||||
"seahash",
|
||||
"serde 1.0.125",
|
||||
"similar",
|
||||
"smallvec",
|
||||
"sum_tree",
|
||||
"theme",
|
||||
"tree-sitter",
|
||||
"tree-sitter-rust",
|
||||
"unindent",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@ -1632,6 +1624,7 @@ dependencies = [
|
||||
"buffer",
|
||||
"clock",
|
||||
"gpui",
|
||||
"language",
|
||||
"lazy_static",
|
||||
"log",
|
||||
"parking_lot",
|
||||
@ -2824,6 +2817,30 @@ dependencies = [
|
||||
"log",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "language"
|
||||
version = "0.1.0"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"buffer",
|
||||
"clock",
|
||||
"futures",
|
||||
"gpui",
|
||||
"lazy_static",
|
||||
"log",
|
||||
"parking_lot",
|
||||
"rand 0.8.3",
|
||||
"rpc",
|
||||
"serde 1.0.125",
|
||||
"similar",
|
||||
"smol",
|
||||
"theme",
|
||||
"tree-sitter",
|
||||
"tree-sitter-rust",
|
||||
"unindent",
|
||||
"util",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "lazy_static"
|
||||
version = "1.4.0"
|
||||
@ -3801,6 +3818,7 @@ dependencies = [
|
||||
"fuzzy",
|
||||
"gpui",
|
||||
"ignore",
|
||||
"language",
|
||||
"lazy_static",
|
||||
"libc",
|
||||
"log",
|
||||
@ -6159,6 +6177,7 @@ dependencies = [
|
||||
"client",
|
||||
"editor",
|
||||
"gpui",
|
||||
"language",
|
||||
"log",
|
||||
"postage",
|
||||
"project",
|
||||
@ -6229,6 +6248,7 @@ dependencies = [
|
||||
"ignore",
|
||||
"image 0.23.14",
|
||||
"indexmap",
|
||||
"language",
|
||||
"lazy_static",
|
||||
"libc",
|
||||
"log",
|
||||
|
@ -4,29 +4,20 @@ version = "0.1.0"
|
||||
edition = "2018"
|
||||
|
||||
[features]
|
||||
test-support = ["rand"]
|
||||
test-support = ["rand", "seahash"]
|
||||
|
||||
[dependencies]
|
||||
clock = { path = "../clock" }
|
||||
gpui = { path = "../gpui" }
|
||||
rpc = { path = "../rpc" }
|
||||
sum_tree = { path = "../sum_tree" }
|
||||
theme = { path = "../theme" }
|
||||
anyhow = "1.0.38"
|
||||
arrayvec = "0.7.1"
|
||||
lazy_static = "1.4"
|
||||
log = "0.4"
|
||||
parking_lot = "0.11.1"
|
||||
rand = { version = "0.8.3", optional = true }
|
||||
seahash = "4.1"
|
||||
serde = { version = "1", features = ["derive"] }
|
||||
similar = "1.3"
|
||||
seahash = { version = "4.1", optional = true }
|
||||
smallvec = { version = "1.6", features = ["union"] }
|
||||
tree-sitter = "0.19.5"
|
||||
|
||||
[dev-dependencies]
|
||||
gpui = { path = "../gpui", features = ["test-support"] }
|
||||
|
||||
seahash = "4.1"
|
||||
rand = "0.8.3"
|
||||
tree-sitter-rust = "0.19.0"
|
||||
unindent = "0.1.7"
|
||||
|
@ -1,3 +1,5 @@
|
||||
use crate::Point;
|
||||
|
||||
use super::{Buffer, Content};
|
||||
use anyhow::Result;
|
||||
use std::{cmp::Ordering, ops::Range};
|
||||
@ -10,6 +12,24 @@ pub struct Anchor {
|
||||
pub version: clock::Global,
|
||||
}
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct AnchorMap<T> {
|
||||
pub(crate) version: clock::Global,
|
||||
pub(crate) entries: Vec<((usize, Bias), T)>,
|
||||
}
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct AnchorSet(pub(crate) AnchorMap<()>);
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct AnchorRangeMap<T> {
|
||||
pub(crate) version: clock::Global,
|
||||
pub(crate) entries: Vec<(Range<(usize, Bias)>, T)>,
|
||||
}
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct AnchorRangeSet(pub(crate) AnchorRangeMap<()>);
|
||||
|
||||
impl Anchor {
|
||||
pub fn min() -> Self {
|
||||
Self {
|
||||
@ -62,6 +82,60 @@ impl Anchor {
|
||||
}
|
||||
}
|
||||
|
||||
impl<T> AnchorMap<T> {
|
||||
pub fn to_points<'a>(
|
||||
&'a self,
|
||||
content: impl Into<Content<'a>> + 'a,
|
||||
) -> impl Iterator<Item = (Point, &'a T)> + 'a {
|
||||
let content = content.into();
|
||||
content
|
||||
.summaries_for_anchors(self)
|
||||
.map(move |(sum, value)| (sum.lines, value))
|
||||
}
|
||||
|
||||
pub fn version(&self) -> &clock::Global {
|
||||
&self.version
|
||||
}
|
||||
}
|
||||
|
||||
impl AnchorSet {
|
||||
pub fn to_points<'a>(
|
||||
&'a self,
|
||||
content: impl Into<Content<'a>> + 'a,
|
||||
) -> impl Iterator<Item = Point> + 'a {
|
||||
self.0.to_points(content).map(move |(point, _)| point)
|
||||
}
|
||||
}
|
||||
|
||||
impl<T> AnchorRangeMap<T> {
|
||||
pub fn to_point_ranges<'a>(
|
||||
&'a self,
|
||||
content: impl Into<Content<'a>> + 'a,
|
||||
) -> impl Iterator<Item = (Range<Point>, &'a T)> + 'a {
|
||||
let content = content.into();
|
||||
content
|
||||
.summaries_for_anchor_ranges(self)
|
||||
.map(move |(range, value)| ((range.start.lines..range.end.lines), value))
|
||||
}
|
||||
|
||||
pub fn version(&self) -> &clock::Global {
|
||||
&self.version
|
||||
}
|
||||
}
|
||||
|
||||
impl AnchorRangeSet {
|
||||
pub fn to_point_ranges<'a>(
|
||||
&'a self,
|
||||
content: impl Into<Content<'a>> + 'a,
|
||||
) -> impl Iterator<Item = Range<Point>> + 'a {
|
||||
self.0.to_point_ranges(content).map(|(range, _)| range)
|
||||
}
|
||||
|
||||
pub fn version(&self) -> &clock::Global {
|
||||
self.0.version()
|
||||
}
|
||||
}
|
||||
|
||||
pub trait AnchorRangeExt {
|
||||
fn cmp<'a>(&self, b: &Range<Anchor>, buffer: impl Into<Content<'a>>) -> Result<Ordering>;
|
||||
}
|
||||
|
File diff suppressed because it is too large
Load Diff
@ -109,21 +109,3 @@ impl Ord for Point {
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Into<tree_sitter::Point> for Point {
|
||||
fn into(self) -> tree_sitter::Point {
|
||||
tree_sitter::Point {
|
||||
row: self.row as usize,
|
||||
column: self.column as usize,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl From<tree_sitter::Point> for Point {
|
||||
fn from(point: tree_sitter::Point) -> Self {
|
||||
Self {
|
||||
row: point.row as u32,
|
||||
column: point.column as u32,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -115,6 +115,11 @@ impl Rope {
|
||||
self.chunks_in_range(start..self.len()).flat_map(str::chars)
|
||||
}
|
||||
|
||||
pub fn reversed_chars_at(&self, start: usize) -> impl Iterator<Item = char> + '_ {
|
||||
self.reversed_chunks_in_range(0..start)
|
||||
.flat_map(|chunk| chunk.chars().rev())
|
||||
}
|
||||
|
||||
pub fn bytes_at(&self, start: usize) -> impl Iterator<Item = u8> + '_ {
|
||||
self.chunks_in_range(start..self.len()).flat_map(str::bytes)
|
||||
}
|
||||
@ -123,8 +128,12 @@ impl Rope {
|
||||
self.chunks_in_range(0..self.len())
|
||||
}
|
||||
|
||||
pub fn chunks_in_range<'a>(&'a self, range: Range<usize>) -> Chunks<'a> {
|
||||
Chunks::new(self, range)
|
||||
pub fn chunks_in_range(&self, range: Range<usize>) -> Chunks {
|
||||
Chunks::new(self, range, false)
|
||||
}
|
||||
|
||||
pub fn reversed_chunks_in_range(&self, range: Range<usize>) -> Chunks {
|
||||
Chunks::new(self, range, true)
|
||||
}
|
||||
|
||||
pub fn to_point(&self, offset: usize) -> Point {
|
||||
@ -268,6 +277,7 @@ impl<'a> Cursor<'a> {
|
||||
}
|
||||
}
|
||||
|
||||
self.offset = end_offset;
|
||||
summary
|
||||
}
|
||||
|
||||
@ -283,38 +293,65 @@ impl<'a> Cursor<'a> {
|
||||
pub struct Chunks<'a> {
|
||||
chunks: sum_tree::Cursor<'a, Chunk, usize>,
|
||||
range: Range<usize>,
|
||||
reversed: bool,
|
||||
}
|
||||
|
||||
impl<'a> Chunks<'a> {
|
||||
pub fn new(rope: &'a Rope, range: Range<usize>) -> Self {
|
||||
pub fn new(rope: &'a Rope, range: Range<usize>, reversed: bool) -> Self {
|
||||
let mut chunks = rope.chunks.cursor();
|
||||
chunks.seek(&range.start, Bias::Right, &());
|
||||
Self { chunks, range }
|
||||
if reversed {
|
||||
chunks.seek(&range.end, Bias::Left, &());
|
||||
} else {
|
||||
chunks.seek(&range.start, Bias::Right, &());
|
||||
}
|
||||
Self {
|
||||
chunks,
|
||||
range,
|
||||
reversed,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn offset(&self) -> usize {
|
||||
self.range.start.max(*self.chunks.start())
|
||||
if self.reversed {
|
||||
self.range.end.min(self.chunks.end(&()))
|
||||
} else {
|
||||
self.range.start.max(*self.chunks.start())
|
||||
}
|
||||
}
|
||||
|
||||
pub fn seek(&mut self, offset: usize) {
|
||||
if offset >= self.chunks.end(&()) {
|
||||
self.chunks.seek_forward(&offset, Bias::Right, &());
|
||||
let bias = if self.reversed {
|
||||
Bias::Left
|
||||
} else {
|
||||
self.chunks.seek(&offset, Bias::Right, &());
|
||||
Bias::Right
|
||||
};
|
||||
|
||||
if offset >= self.chunks.end(&()) {
|
||||
self.chunks.seek_forward(&offset, bias, &());
|
||||
} else {
|
||||
self.chunks.seek(&offset, bias, &());
|
||||
}
|
||||
|
||||
if self.reversed {
|
||||
self.range.end = offset;
|
||||
} else {
|
||||
self.range.start = offset;
|
||||
}
|
||||
self.range.start = offset;
|
||||
}
|
||||
|
||||
pub fn peek(&self) -> Option<&'a str> {
|
||||
if let Some(chunk) = self.chunks.item() {
|
||||
let offset = *self.chunks.start();
|
||||
if self.range.end > offset {
|
||||
let start = self.range.start.saturating_sub(*self.chunks.start());
|
||||
let end = self.range.end - self.chunks.start();
|
||||
return Some(&chunk.0[start..chunk.0.len().min(end)]);
|
||||
}
|
||||
let chunk = self.chunks.item()?;
|
||||
if self.reversed && self.range.start >= self.chunks.end(&()) {
|
||||
return None;
|
||||
}
|
||||
None
|
||||
let chunk_start = *self.chunks.start();
|
||||
if self.range.end <= chunk_start {
|
||||
return None;
|
||||
}
|
||||
|
||||
let start = self.range.start.saturating_sub(chunk_start);
|
||||
let end = self.range.end - chunk_start;
|
||||
Some(&chunk.0[start..chunk.0.len().min(end)])
|
||||
}
|
||||
}
|
||||
|
||||
@ -324,7 +361,11 @@ impl<'a> Iterator for Chunks<'a> {
|
||||
fn next(&mut self) -> Option<Self::Item> {
|
||||
let result = self.peek();
|
||||
if result.is_some() {
|
||||
self.chunks.next(&());
|
||||
if self.reversed {
|
||||
self.chunks.prev(&());
|
||||
} else {
|
||||
self.chunks.next(&());
|
||||
}
|
||||
}
|
||||
result
|
||||
}
|
||||
@ -570,6 +611,16 @@ mod tests {
|
||||
actual.chunks_in_range(start_ix..end_ix).collect::<String>(),
|
||||
&expected[start_ix..end_ix]
|
||||
);
|
||||
|
||||
assert_eq!(
|
||||
actual
|
||||
.reversed_chunks_in_range(start_ix..end_ix)
|
||||
.collect::<Vec<&str>>()
|
||||
.into_iter()
|
||||
.rev()
|
||||
.collect::<String>(),
|
||||
&expected[start_ix..end_ix]
|
||||
);
|
||||
}
|
||||
|
||||
let mut point = Point::new(0, 0);
|
||||
|
658
crates/buffer/src/tests.rs
Normal file
658
crates/buffer/src/tests.rs
Normal file
@ -0,0 +1,658 @@
|
||||
use super::*;
|
||||
use clock::ReplicaId;
|
||||
use rand::prelude::*;
|
||||
use std::{
|
||||
cmp::Ordering,
|
||||
env,
|
||||
iter::Iterator,
|
||||
time::{Duration, Instant},
|
||||
};
|
||||
|
||||
#[test]
|
||||
fn test_edit() {
|
||||
let mut buffer = Buffer::new(0, 0, History::new("abc".into()));
|
||||
assert_eq!(buffer.text(), "abc");
|
||||
buffer.edit(vec![3..3], "def");
|
||||
assert_eq!(buffer.text(), "abcdef");
|
||||
buffer.edit(vec![0..0], "ghi");
|
||||
assert_eq!(buffer.text(), "ghiabcdef");
|
||||
buffer.edit(vec![5..5], "jkl");
|
||||
assert_eq!(buffer.text(), "ghiabjklcdef");
|
||||
buffer.edit(vec![6..7], "");
|
||||
assert_eq!(buffer.text(), "ghiabjlcdef");
|
||||
buffer.edit(vec![4..9], "mno");
|
||||
assert_eq!(buffer.text(), "ghiamnoef");
|
||||
}
|
||||
|
||||
#[gpui::test(iterations = 100)]
|
||||
fn test_random_edits(mut rng: StdRng) {
|
||||
let operations = env::var("OPERATIONS")
|
||||
.map(|i| i.parse().expect("invalid `OPERATIONS` variable"))
|
||||
.unwrap_or(10);
|
||||
|
||||
let reference_string_len = rng.gen_range(0..3);
|
||||
let mut reference_string = RandomCharIter::new(&mut rng)
|
||||
.take(reference_string_len)
|
||||
.collect::<String>();
|
||||
let mut buffer = Buffer::new(0, 0, History::new(reference_string.clone().into()));
|
||||
buffer.history.group_interval = Duration::from_millis(rng.gen_range(0..=200));
|
||||
let mut buffer_versions = Vec::new();
|
||||
log::info!(
|
||||
"buffer text {:?}, version: {:?}",
|
||||
buffer.text(),
|
||||
buffer.version()
|
||||
);
|
||||
|
||||
for _i in 0..operations {
|
||||
let (old_ranges, new_text, _) = buffer.randomly_edit(&mut rng, 5);
|
||||
for old_range in old_ranges.iter().rev() {
|
||||
reference_string.replace_range(old_range.clone(), &new_text);
|
||||
}
|
||||
assert_eq!(buffer.text(), reference_string);
|
||||
log::info!(
|
||||
"buffer text {:?}, version: {:?}",
|
||||
buffer.text(),
|
||||
buffer.version()
|
||||
);
|
||||
|
||||
if rng.gen_bool(0.25) {
|
||||
buffer.randomly_undo_redo(&mut rng);
|
||||
reference_string = buffer.text();
|
||||
log::info!(
|
||||
"buffer text {:?}, version: {:?}",
|
||||
buffer.text(),
|
||||
buffer.version()
|
||||
);
|
||||
}
|
||||
|
||||
let range = buffer.random_byte_range(0, &mut rng);
|
||||
assert_eq!(
|
||||
buffer.text_summary_for_range(range.clone()),
|
||||
TextSummary::from(&reference_string[range])
|
||||
);
|
||||
|
||||
if rng.gen_bool(0.3) {
|
||||
buffer_versions.push(buffer.clone());
|
||||
}
|
||||
}
|
||||
|
||||
for mut old_buffer in buffer_versions {
|
||||
let edits = buffer
|
||||
.edits_since(old_buffer.version.clone())
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
log::info!(
|
||||
"mutating old buffer version {:?}, text: {:?}, edits since: {:?}",
|
||||
old_buffer.version(),
|
||||
old_buffer.text(),
|
||||
edits,
|
||||
);
|
||||
|
||||
let mut delta = 0_isize;
|
||||
for edit in edits {
|
||||
let old_start = (edit.old_bytes.start as isize + delta) as usize;
|
||||
let new_text: String = buffer.text_for_range(edit.new_bytes.clone()).collect();
|
||||
old_buffer.edit(Some(old_start..old_start + edit.deleted_bytes()), new_text);
|
||||
delta += edit.delta();
|
||||
}
|
||||
assert_eq!(old_buffer.text(), buffer.text());
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_line_len() {
|
||||
let mut buffer = Buffer::new(0, 0, History::new("".into()));
|
||||
buffer.edit(vec![0..0], "abcd\nefg\nhij");
|
||||
buffer.edit(vec![12..12], "kl\nmno");
|
||||
buffer.edit(vec![18..18], "\npqrs\n");
|
||||
buffer.edit(vec![18..21], "\nPQ");
|
||||
|
||||
assert_eq!(buffer.line_len(0), 4);
|
||||
assert_eq!(buffer.line_len(1), 3);
|
||||
assert_eq!(buffer.line_len(2), 5);
|
||||
assert_eq!(buffer.line_len(3), 3);
|
||||
assert_eq!(buffer.line_len(4), 4);
|
||||
assert_eq!(buffer.line_len(5), 0);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_text_summary_for_range() {
|
||||
let buffer = Buffer::new(0, 0, History::new("ab\nefg\nhklm\nnopqrs\ntuvwxyz".into()));
|
||||
assert_eq!(
|
||||
buffer.text_summary_for_range(1..3),
|
||||
TextSummary {
|
||||
bytes: 2,
|
||||
lines: Point::new(1, 0),
|
||||
first_line_chars: 1,
|
||||
last_line_chars: 0,
|
||||
longest_row: 0,
|
||||
longest_row_chars: 1,
|
||||
}
|
||||
);
|
||||
assert_eq!(
|
||||
buffer.text_summary_for_range(1..12),
|
||||
TextSummary {
|
||||
bytes: 11,
|
||||
lines: Point::new(3, 0),
|
||||
first_line_chars: 1,
|
||||
last_line_chars: 0,
|
||||
longest_row: 2,
|
||||
longest_row_chars: 4,
|
||||
}
|
||||
);
|
||||
assert_eq!(
|
||||
buffer.text_summary_for_range(0..20),
|
||||
TextSummary {
|
||||
bytes: 20,
|
||||
lines: Point::new(4, 1),
|
||||
first_line_chars: 2,
|
||||
last_line_chars: 1,
|
||||
longest_row: 3,
|
||||
longest_row_chars: 6,
|
||||
}
|
||||
);
|
||||
assert_eq!(
|
||||
buffer.text_summary_for_range(0..22),
|
||||
TextSummary {
|
||||
bytes: 22,
|
||||
lines: Point::new(4, 3),
|
||||
first_line_chars: 2,
|
||||
last_line_chars: 3,
|
||||
longest_row: 3,
|
||||
longest_row_chars: 6,
|
||||
}
|
||||
);
|
||||
assert_eq!(
|
||||
buffer.text_summary_for_range(7..22),
|
||||
TextSummary {
|
||||
bytes: 15,
|
||||
lines: Point::new(2, 3),
|
||||
first_line_chars: 4,
|
||||
last_line_chars: 3,
|
||||
longest_row: 1,
|
||||
longest_row_chars: 6,
|
||||
}
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_chars_at() {
|
||||
let mut buffer = Buffer::new(0, 0, History::new("".into()));
|
||||
buffer.edit(vec![0..0], "abcd\nefgh\nij");
|
||||
buffer.edit(vec![12..12], "kl\nmno");
|
||||
buffer.edit(vec![18..18], "\npqrs");
|
||||
buffer.edit(vec![18..21], "\nPQ");
|
||||
|
||||
let chars = buffer.chars_at(Point::new(0, 0));
|
||||
assert_eq!(chars.collect::<String>(), "abcd\nefgh\nijkl\nmno\nPQrs");
|
||||
|
||||
let chars = buffer.chars_at(Point::new(1, 0));
|
||||
assert_eq!(chars.collect::<String>(), "efgh\nijkl\nmno\nPQrs");
|
||||
|
||||
let chars = buffer.chars_at(Point::new(2, 0));
|
||||
assert_eq!(chars.collect::<String>(), "ijkl\nmno\nPQrs");
|
||||
|
||||
let chars = buffer.chars_at(Point::new(3, 0));
|
||||
assert_eq!(chars.collect::<String>(), "mno\nPQrs");
|
||||
|
||||
let chars = buffer.chars_at(Point::new(4, 0));
|
||||
assert_eq!(chars.collect::<String>(), "PQrs");
|
||||
|
||||
// Regression test:
|
||||
let mut buffer = Buffer::new(0, 0, History::new("".into()));
|
||||
buffer.edit(vec![0..0], "[workspace]\nmembers = [\n \"xray_core\",\n \"xray_server\",\n \"xray_cli\",\n \"xray_wasm\",\n]\n");
|
||||
buffer.edit(vec![60..60], "\n");
|
||||
|
||||
let chars = buffer.chars_at(Point::new(6, 0));
|
||||
assert_eq!(chars.collect::<String>(), " \"xray_wasm\",\n]\n");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_anchors() {
|
||||
let mut buffer = Buffer::new(0, 0, History::new("".into()));
|
||||
buffer.edit(vec![0..0], "abc");
|
||||
let left_anchor = buffer.anchor_before(2);
|
||||
let right_anchor = buffer.anchor_after(2);
|
||||
|
||||
buffer.edit(vec![1..1], "def\n");
|
||||
assert_eq!(buffer.text(), "adef\nbc");
|
||||
assert_eq!(left_anchor.to_offset(&buffer), 6);
|
||||
assert_eq!(right_anchor.to_offset(&buffer), 6);
|
||||
assert_eq!(left_anchor.to_point(&buffer), Point { row: 1, column: 1 });
|
||||
assert_eq!(right_anchor.to_point(&buffer), Point { row: 1, column: 1 });
|
||||
|
||||
buffer.edit(vec![2..3], "");
|
||||
assert_eq!(buffer.text(), "adf\nbc");
|
||||
assert_eq!(left_anchor.to_offset(&buffer), 5);
|
||||
assert_eq!(right_anchor.to_offset(&buffer), 5);
|
||||
assert_eq!(left_anchor.to_point(&buffer), Point { row: 1, column: 1 });
|
||||
assert_eq!(right_anchor.to_point(&buffer), Point { row: 1, column: 1 });
|
||||
|
||||
buffer.edit(vec![5..5], "ghi\n");
|
||||
assert_eq!(buffer.text(), "adf\nbghi\nc");
|
||||
assert_eq!(left_anchor.to_offset(&buffer), 5);
|
||||
assert_eq!(right_anchor.to_offset(&buffer), 9);
|
||||
assert_eq!(left_anchor.to_point(&buffer), Point { row: 1, column: 1 });
|
||||
assert_eq!(right_anchor.to_point(&buffer), Point { row: 2, column: 0 });
|
||||
|
||||
buffer.edit(vec![7..9], "");
|
||||
assert_eq!(buffer.text(), "adf\nbghc");
|
||||
assert_eq!(left_anchor.to_offset(&buffer), 5);
|
||||
assert_eq!(right_anchor.to_offset(&buffer), 7);
|
||||
assert_eq!(left_anchor.to_point(&buffer), Point { row: 1, column: 1 },);
|
||||
assert_eq!(right_anchor.to_point(&buffer), Point { row: 1, column: 3 });
|
||||
|
||||
// Ensure anchoring to a point is equivalent to anchoring to an offset.
|
||||
assert_eq!(
|
||||
buffer.anchor_before(Point { row: 0, column: 0 }),
|
||||
buffer.anchor_before(0)
|
||||
);
|
||||
assert_eq!(
|
||||
buffer.anchor_before(Point { row: 0, column: 1 }),
|
||||
buffer.anchor_before(1)
|
||||
);
|
||||
assert_eq!(
|
||||
buffer.anchor_before(Point { row: 0, column: 2 }),
|
||||
buffer.anchor_before(2)
|
||||
);
|
||||
assert_eq!(
|
||||
buffer.anchor_before(Point { row: 0, column: 3 }),
|
||||
buffer.anchor_before(3)
|
||||
);
|
||||
assert_eq!(
|
||||
buffer.anchor_before(Point { row: 1, column: 0 }),
|
||||
buffer.anchor_before(4)
|
||||
);
|
||||
assert_eq!(
|
||||
buffer.anchor_before(Point { row: 1, column: 1 }),
|
||||
buffer.anchor_before(5)
|
||||
);
|
||||
assert_eq!(
|
||||
buffer.anchor_before(Point { row: 1, column: 2 }),
|
||||
buffer.anchor_before(6)
|
||||
);
|
||||
assert_eq!(
|
||||
buffer.anchor_before(Point { row: 1, column: 3 }),
|
||||
buffer.anchor_before(7)
|
||||
);
|
||||
assert_eq!(
|
||||
buffer.anchor_before(Point { row: 1, column: 4 }),
|
||||
buffer.anchor_before(8)
|
||||
);
|
||||
|
||||
// Comparison between anchors.
|
||||
let anchor_at_offset_0 = buffer.anchor_before(0);
|
||||
let anchor_at_offset_1 = buffer.anchor_before(1);
|
||||
let anchor_at_offset_2 = buffer.anchor_before(2);
|
||||
|
||||
assert_eq!(
|
||||
anchor_at_offset_0
|
||||
.cmp(&anchor_at_offset_0, &buffer)
|
||||
.unwrap(),
|
||||
Ordering::Equal
|
||||
);
|
||||
assert_eq!(
|
||||
anchor_at_offset_1
|
||||
.cmp(&anchor_at_offset_1, &buffer)
|
||||
.unwrap(),
|
||||
Ordering::Equal
|
||||
);
|
||||
assert_eq!(
|
||||
anchor_at_offset_2
|
||||
.cmp(&anchor_at_offset_2, &buffer)
|
||||
.unwrap(),
|
||||
Ordering::Equal
|
||||
);
|
||||
|
||||
assert_eq!(
|
||||
anchor_at_offset_0
|
||||
.cmp(&anchor_at_offset_1, &buffer)
|
||||
.unwrap(),
|
||||
Ordering::Less
|
||||
);
|
||||
assert_eq!(
|
||||
anchor_at_offset_1
|
||||
.cmp(&anchor_at_offset_2, &buffer)
|
||||
.unwrap(),
|
||||
Ordering::Less
|
||||
);
|
||||
assert_eq!(
|
||||
anchor_at_offset_0
|
||||
.cmp(&anchor_at_offset_2, &buffer)
|
||||
.unwrap(),
|
||||
Ordering::Less
|
||||
);
|
||||
|
||||
assert_eq!(
|
||||
anchor_at_offset_1
|
||||
.cmp(&anchor_at_offset_0, &buffer)
|
||||
.unwrap(),
|
||||
Ordering::Greater
|
||||
);
|
||||
assert_eq!(
|
||||
anchor_at_offset_2
|
||||
.cmp(&anchor_at_offset_1, &buffer)
|
||||
.unwrap(),
|
||||
Ordering::Greater
|
||||
);
|
||||
assert_eq!(
|
||||
anchor_at_offset_2
|
||||
.cmp(&anchor_at_offset_0, &buffer)
|
||||
.unwrap(),
|
||||
Ordering::Greater
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_anchors_at_start_and_end() {
|
||||
let mut buffer = Buffer::new(0, 0, History::new("".into()));
|
||||
let before_start_anchor = buffer.anchor_before(0);
|
||||
let after_end_anchor = buffer.anchor_after(0);
|
||||
|
||||
buffer.edit(vec![0..0], "abc");
|
||||
assert_eq!(buffer.text(), "abc");
|
||||
assert_eq!(before_start_anchor.to_offset(&buffer), 0);
|
||||
assert_eq!(after_end_anchor.to_offset(&buffer), 3);
|
||||
|
||||
let after_start_anchor = buffer.anchor_after(0);
|
||||
let before_end_anchor = buffer.anchor_before(3);
|
||||
|
||||
buffer.edit(vec![3..3], "def");
|
||||
buffer.edit(vec![0..0], "ghi");
|
||||
assert_eq!(buffer.text(), "ghiabcdef");
|
||||
assert_eq!(before_start_anchor.to_offset(&buffer), 0);
|
||||
assert_eq!(after_start_anchor.to_offset(&buffer), 3);
|
||||
assert_eq!(before_end_anchor.to_offset(&buffer), 6);
|
||||
assert_eq!(after_end_anchor.to_offset(&buffer), 9);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_undo_redo() {
|
||||
let mut buffer = Buffer::new(0, 0, History::new("1234".into()));
|
||||
// Set group interval to zero so as to not group edits in the undo stack.
|
||||
buffer.history.group_interval = Duration::from_secs(0);
|
||||
|
||||
buffer.edit(vec![1..1], "abx");
|
||||
buffer.edit(vec![3..4], "yzef");
|
||||
buffer.edit(vec![3..5], "cd");
|
||||
assert_eq!(buffer.text(), "1abcdef234");
|
||||
|
||||
let transactions = buffer.history.undo_stack.clone();
|
||||
assert_eq!(transactions.len(), 3);
|
||||
|
||||
buffer.undo_or_redo(transactions[0].clone()).unwrap();
|
||||
assert_eq!(buffer.text(), "1cdef234");
|
||||
buffer.undo_or_redo(transactions[0].clone()).unwrap();
|
||||
assert_eq!(buffer.text(), "1abcdef234");
|
||||
|
||||
buffer.undo_or_redo(transactions[1].clone()).unwrap();
|
||||
assert_eq!(buffer.text(), "1abcdx234");
|
||||
buffer.undo_or_redo(transactions[2].clone()).unwrap();
|
||||
assert_eq!(buffer.text(), "1abx234");
|
||||
buffer.undo_or_redo(transactions[1].clone()).unwrap();
|
||||
assert_eq!(buffer.text(), "1abyzef234");
|
||||
buffer.undo_or_redo(transactions[2].clone()).unwrap();
|
||||
assert_eq!(buffer.text(), "1abcdef234");
|
||||
|
||||
buffer.undo_or_redo(transactions[2].clone()).unwrap();
|
||||
assert_eq!(buffer.text(), "1abyzef234");
|
||||
buffer.undo_or_redo(transactions[0].clone()).unwrap();
|
||||
assert_eq!(buffer.text(), "1yzef234");
|
||||
buffer.undo_or_redo(transactions[1].clone()).unwrap();
|
||||
assert_eq!(buffer.text(), "1234");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_history() {
|
||||
let mut now = Instant::now();
|
||||
let mut buffer = Buffer::new(0, 0, History::new("123456".into()));
|
||||
|
||||
let set_id = if let Operation::UpdateSelections { set_id, .. } =
|
||||
buffer.add_selection_set(buffer.selections_from_ranges(vec![4..4]).unwrap())
|
||||
{
|
||||
set_id
|
||||
} else {
|
||||
unreachable!()
|
||||
};
|
||||
buffer.start_transaction_at(Some(set_id), now).unwrap();
|
||||
buffer.edit(vec![2..4], "cd");
|
||||
buffer.end_transaction_at(Some(set_id), now).unwrap();
|
||||
assert_eq!(buffer.text(), "12cd56");
|
||||
assert_eq!(buffer.selection_ranges(set_id).unwrap(), vec![4..4]);
|
||||
|
||||
buffer.start_transaction_at(Some(set_id), now).unwrap();
|
||||
buffer
|
||||
.update_selection_set(set_id, buffer.selections_from_ranges(vec![1..3]).unwrap())
|
||||
.unwrap();
|
||||
buffer.edit(vec![4..5], "e");
|
||||
buffer.end_transaction_at(Some(set_id), now).unwrap();
|
||||
assert_eq!(buffer.text(), "12cde6");
|
||||
assert_eq!(buffer.selection_ranges(set_id).unwrap(), vec![1..3]);
|
||||
|
||||
now += buffer.history.group_interval + Duration::from_millis(1);
|
||||
buffer.start_transaction_at(Some(set_id), now).unwrap();
|
||||
buffer
|
||||
.update_selection_set(set_id, buffer.selections_from_ranges(vec![2..2]).unwrap())
|
||||
.unwrap();
|
||||
buffer.edit(vec![0..1], "a");
|
||||
buffer.edit(vec![1..1], "b");
|
||||
buffer.end_transaction_at(Some(set_id), now).unwrap();
|
||||
assert_eq!(buffer.text(), "ab2cde6");
|
||||
assert_eq!(buffer.selection_ranges(set_id).unwrap(), vec![3..3]);
|
||||
|
||||
// Last transaction happened past the group interval, undo it on its
|
||||
// own.
|
||||
buffer.undo();
|
||||
assert_eq!(buffer.text(), "12cde6");
|
||||
assert_eq!(buffer.selection_ranges(set_id).unwrap(), vec![1..3]);
|
||||
|
||||
// First two transactions happened within the group interval, undo them
|
||||
// together.
|
||||
buffer.undo();
|
||||
assert_eq!(buffer.text(), "123456");
|
||||
assert_eq!(buffer.selection_ranges(set_id).unwrap(), vec![4..4]);
|
||||
|
||||
// Redo the first two transactions together.
|
||||
buffer.redo();
|
||||
assert_eq!(buffer.text(), "12cde6");
|
||||
assert_eq!(buffer.selection_ranges(set_id).unwrap(), vec![1..3]);
|
||||
|
||||
// Redo the last transaction on its own.
|
||||
buffer.redo();
|
||||
assert_eq!(buffer.text(), "ab2cde6");
|
||||
assert_eq!(buffer.selection_ranges(set_id).unwrap(), vec![3..3]);
|
||||
|
||||
buffer.start_transaction_at(None, now).unwrap();
|
||||
assert!(buffer.end_transaction_at(None, now).is_none());
|
||||
buffer.undo();
|
||||
assert_eq!(buffer.text(), "12cde6");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_concurrent_edits() {
|
||||
let text = "abcdef";
|
||||
|
||||
let mut buffer1 = Buffer::new(1, 0, History::new(text.into()));
|
||||
let mut buffer2 = Buffer::new(2, 0, History::new(text.into()));
|
||||
let mut buffer3 = Buffer::new(3, 0, History::new(text.into()));
|
||||
|
||||
let buf1_op = buffer1.edit(vec![1..2], "12");
|
||||
assert_eq!(buffer1.text(), "a12cdef");
|
||||
let buf2_op = buffer2.edit(vec![3..4], "34");
|
||||
assert_eq!(buffer2.text(), "abc34ef");
|
||||
let buf3_op = buffer3.edit(vec![5..6], "56");
|
||||
assert_eq!(buffer3.text(), "abcde56");
|
||||
|
||||
buffer1.apply_op(Operation::Edit(buf2_op.clone())).unwrap();
|
||||
buffer1.apply_op(Operation::Edit(buf3_op.clone())).unwrap();
|
||||
buffer2.apply_op(Operation::Edit(buf1_op.clone())).unwrap();
|
||||
buffer2.apply_op(Operation::Edit(buf3_op.clone())).unwrap();
|
||||
buffer3.apply_op(Operation::Edit(buf1_op.clone())).unwrap();
|
||||
buffer3.apply_op(Operation::Edit(buf2_op.clone())).unwrap();
|
||||
|
||||
assert_eq!(buffer1.text(), "a12c34e56");
|
||||
assert_eq!(buffer2.text(), "a12c34e56");
|
||||
assert_eq!(buffer3.text(), "a12c34e56");
|
||||
}
|
||||
|
||||
#[gpui::test(iterations = 100)]
|
||||
fn test_random_concurrent_edits(mut rng: StdRng) {
|
||||
let peers = env::var("PEERS")
|
||||
.map(|i| i.parse().expect("invalid `PEERS` variable"))
|
||||
.unwrap_or(5);
|
||||
let operations = env::var("OPERATIONS")
|
||||
.map(|i| i.parse().expect("invalid `OPERATIONS` variable"))
|
||||
.unwrap_or(10);
|
||||
|
||||
let base_text_len = rng.gen_range(0..10);
|
||||
let base_text = RandomCharIter::new(&mut rng)
|
||||
.take(base_text_len)
|
||||
.collect::<String>();
|
||||
let mut replica_ids = Vec::new();
|
||||
let mut buffers = Vec::new();
|
||||
let mut network = Network::new(rng.clone());
|
||||
|
||||
for i in 0..peers {
|
||||
let mut buffer = Buffer::new(i as ReplicaId, 0, History::new(base_text.clone().into()));
|
||||
buffer.history.group_interval = Duration::from_millis(rng.gen_range(0..=200));
|
||||
buffers.push(buffer);
|
||||
replica_ids.push(i as u16);
|
||||
network.add_peer(i as u16);
|
||||
}
|
||||
|
||||
log::info!("initial text: {:?}", base_text);
|
||||
|
||||
let mut mutation_count = operations;
|
||||
loop {
|
||||
let replica_index = rng.gen_range(0..peers);
|
||||
let replica_id = replica_ids[replica_index];
|
||||
let buffer = &mut buffers[replica_index];
|
||||
match rng.gen_range(0..=100) {
|
||||
0..=50 if mutation_count != 0 => {
|
||||
let ops = buffer.randomly_mutate(&mut rng);
|
||||
network.broadcast(buffer.replica_id, ops);
|
||||
log::info!("buffer {} text: {:?}", buffer.replica_id, buffer.text());
|
||||
mutation_count -= 1;
|
||||
}
|
||||
51..=70 if mutation_count != 0 => {
|
||||
let ops = buffer.randomly_undo_redo(&mut rng);
|
||||
network.broadcast(buffer.replica_id, ops);
|
||||
mutation_count -= 1;
|
||||
}
|
||||
71..=100 if network.has_unreceived(replica_id) => {
|
||||
let ops = network.receive(replica_id);
|
||||
if !ops.is_empty() {
|
||||
log::info!(
|
||||
"peer {} applying {} ops from the network.",
|
||||
replica_id,
|
||||
ops.len()
|
||||
);
|
||||
buffer.apply_ops(ops).unwrap();
|
||||
}
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
|
||||
if mutation_count == 0 && network.is_idle() {
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
let first_buffer = &buffers[0];
|
||||
for buffer in &buffers[1..] {
|
||||
assert_eq!(
|
||||
buffer.text(),
|
||||
first_buffer.text(),
|
||||
"Replica {} text != Replica 0 text",
|
||||
buffer.replica_id
|
||||
);
|
||||
assert_eq!(
|
||||
buffer.selection_sets().collect::<HashMap<_, _>>(),
|
||||
first_buffer.selection_sets().collect::<HashMap<_, _>>()
|
||||
);
|
||||
assert_eq!(
|
||||
buffer.all_selection_ranges().collect::<HashMap<_, _>>(),
|
||||
first_buffer
|
||||
.all_selection_ranges()
|
||||
.collect::<HashMap<_, _>>()
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone)]
|
||||
struct Envelope<T: Clone> {
|
||||
message: T,
|
||||
sender: ReplicaId,
|
||||
}
|
||||
|
||||
struct Network<T: Clone, R: rand::Rng> {
|
||||
inboxes: std::collections::BTreeMap<ReplicaId, Vec<Envelope<T>>>,
|
||||
all_messages: Vec<T>,
|
||||
rng: R,
|
||||
}
|
||||
|
||||
impl<T: Clone, R: rand::Rng> Network<T, R> {
|
||||
fn new(rng: R) -> Self {
|
||||
Network {
|
||||
inboxes: Default::default(),
|
||||
all_messages: Vec::new(),
|
||||
rng,
|
||||
}
|
||||
}
|
||||
|
||||
fn add_peer(&mut self, id: ReplicaId) {
|
||||
self.inboxes.insert(id, Vec::new());
|
||||
}
|
||||
|
||||
fn is_idle(&self) -> bool {
|
||||
self.inboxes.values().all(|i| i.is_empty())
|
||||
}
|
||||
|
||||
fn broadcast(&mut self, sender: ReplicaId, messages: Vec<T>) {
|
||||
for (replica, inbox) in self.inboxes.iter_mut() {
|
||||
if *replica != sender {
|
||||
for message in &messages {
|
||||
let min_index = inbox
|
||||
.iter()
|
||||
.enumerate()
|
||||
.rev()
|
||||
.find_map(|(index, envelope)| {
|
||||
if sender == envelope.sender {
|
||||
Some(index + 1)
|
||||
} else {
|
||||
None
|
||||
}
|
||||
})
|
||||
.unwrap_or(0);
|
||||
|
||||
// Insert one or more duplicates of this message *after* the previous
|
||||
// message delivered by this replica.
|
||||
for _ in 0..self.rng.gen_range(1..4) {
|
||||
let insertion_index = self.rng.gen_range(min_index..inbox.len() + 1);
|
||||
inbox.insert(
|
||||
insertion_index,
|
||||
Envelope {
|
||||
message: message.clone(),
|
||||
sender,
|
||||
},
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
self.all_messages.extend(messages);
|
||||
}
|
||||
|
||||
fn has_unreceived(&self, receiver: ReplicaId) -> bool {
|
||||
!self.inboxes[&receiver].is_empty()
|
||||
}
|
||||
|
||||
fn receive(&mut self, receiver: ReplicaId) -> Vec<T> {
|
||||
let inbox = self.inboxes.get_mut(&receiver).unwrap();
|
||||
let count = self.rng.gen_range(0..inbox.len() + 1);
|
||||
inbox
|
||||
.drain(0..count)
|
||||
.map(|envelope| envelope.message)
|
||||
.collect()
|
||||
}
|
||||
}
|
@ -4,12 +4,17 @@ version = "0.1.0"
|
||||
edition = "2018"
|
||||
|
||||
[features]
|
||||
test-support = ["buffer/test-support", "gpui/test-support"]
|
||||
test-support = [
|
||||
"buffer/test-support",
|
||||
"language/test-support",
|
||||
"gpui/test-support",
|
||||
]
|
||||
|
||||
[dependencies]
|
||||
buffer = { path = "../buffer" }
|
||||
clock = { path = "../clock" }
|
||||
gpui = { path = "../gpui" }
|
||||
language = { path = "../language" }
|
||||
sum_tree = { path = "../sum_tree" }
|
||||
theme = { path = "../theme" }
|
||||
util = { path = "../util" }
|
||||
@ -24,6 +29,7 @@ smol = "1.2"
|
||||
|
||||
[dev-dependencies]
|
||||
buffer = { path = "../buffer", features = ["test-support"] }
|
||||
language = { path = "../language", features = ["test-support"] }
|
||||
gpui = { path = "../gpui", features = ["test-support"] }
|
||||
rand = "0.8"
|
||||
unindent = "0.1.7"
|
||||
|
@ -2,9 +2,9 @@ mod fold_map;
|
||||
mod tab_map;
|
||||
mod wrap_map;
|
||||
|
||||
use buffer::{Anchor, Buffer, Point, ToOffset, ToPoint};
|
||||
use fold_map::{FoldMap, ToFoldPoint as _};
|
||||
use gpui::{fonts::FontId, Entity, ModelContext, ModelHandle};
|
||||
use language::{Anchor, Buffer, Point, ToOffset, ToPoint};
|
||||
use std::ops::Range;
|
||||
use sum_tree::Bias;
|
||||
use tab_map::TabMap;
|
||||
@ -109,7 +109,7 @@ impl DisplayMap {
|
||||
}
|
||||
|
||||
pub struct DisplayMapSnapshot {
|
||||
buffer_snapshot: buffer::Snapshot,
|
||||
buffer_snapshot: language::Snapshot,
|
||||
folds_snapshot: fold_map::Snapshot,
|
||||
tabs_snapshot: tab_map::Snapshot,
|
||||
wraps_snapshot: wrap_map::Snapshot,
|
||||
@ -358,8 +358,8 @@ impl ToDisplayPoint for Anchor {
|
||||
mod tests {
|
||||
use super::*;
|
||||
use crate::{movement, test::*};
|
||||
use buffer::{History, Language, LanguageConfig, RandomCharIter, SelectionGoal};
|
||||
use gpui::{color::Color, MutableAppContext};
|
||||
use language::{History, Language, LanguageConfig, RandomCharIter, SelectionGoal};
|
||||
use rand::{prelude::StdRng, Rng};
|
||||
use std::{env, sync::Arc};
|
||||
use theme::SyntaxTheme;
|
||||
@ -436,7 +436,7 @@ mod tests {
|
||||
}
|
||||
}
|
||||
_ => {
|
||||
buffer.update(&mut cx, |buffer, cx| buffer.randomly_mutate(&mut rng, cx));
|
||||
buffer.update(&mut cx, |buffer, _| buffer.randomly_edit(&mut rng, 5));
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -1,5 +1,5 @@
|
||||
use buffer::{Anchor, Buffer, Point, ToOffset, AnchorRangeExt, HighlightId, TextSummary};
|
||||
use gpui::{AppContext, ModelHandle};
|
||||
use language::{Anchor, AnchorRangeExt, Buffer, HighlightId, Point, TextSummary, ToOffset};
|
||||
use parking_lot::Mutex;
|
||||
use std::{
|
||||
cmp::{self, Ordering},
|
||||
@ -485,7 +485,7 @@ impl FoldMap {
|
||||
pub struct Snapshot {
|
||||
transforms: SumTree<Transform>,
|
||||
folds: SumTree<Fold>,
|
||||
buffer_snapshot: buffer::Snapshot,
|
||||
buffer_snapshot: language::Snapshot,
|
||||
pub version: usize,
|
||||
}
|
||||
|
||||
@ -994,7 +994,7 @@ impl<'a> Iterator for Chunks<'a> {
|
||||
|
||||
pub struct HighlightedChunks<'a> {
|
||||
transform_cursor: Cursor<'a, Transform, (FoldOffset, usize)>,
|
||||
buffer_chunks: buffer::HighlightedChunks<'a>,
|
||||
buffer_chunks: language::HighlightedChunks<'a>,
|
||||
buffer_chunk: Option<(usize, &'a str, HighlightId)>,
|
||||
buffer_offset: usize,
|
||||
}
|
||||
@ -1331,10 +1331,10 @@ mod tests {
|
||||
snapshot_edits.extend(map.randomly_mutate(&mut rng, cx.as_ref()));
|
||||
}
|
||||
_ => {
|
||||
let edits = buffer.update(cx, |buffer, cx| {
|
||||
let edits = buffer.update(cx, |buffer, _| {
|
||||
let start_version = buffer.version.clone();
|
||||
let edit_count = rng.gen_range(1..=5);
|
||||
buffer.randomly_edit(&mut rng, edit_count, cx);
|
||||
buffer.randomly_edit(&mut rng, edit_count);
|
||||
buffer.edits_since(start_version).collect::<Vec<_>>()
|
||||
});
|
||||
log::info!("editing {:?}", edits);
|
||||
|
@ -1,5 +1,5 @@
|
||||
use super::fold_map::{self, FoldEdit, FoldPoint, Snapshot as FoldSnapshot};
|
||||
use buffer::{rope, HighlightId};
|
||||
use language::{rope, HighlightId};
|
||||
use parking_lot::Mutex;
|
||||
use std::{mem, ops::Range};
|
||||
use sum_tree::Bias;
|
||||
|
@ -2,8 +2,8 @@ use super::{
|
||||
fold_map,
|
||||
tab_map::{self, Edit as TabEdit, Snapshot as TabSnapshot, TabPoint, TextSummary},
|
||||
};
|
||||
use buffer::{HighlightId, Point};
|
||||
use gpui::{fonts::FontId, text_layout::LineWrapper, Entity, ModelContext, Task};
|
||||
use language::{HighlightId, Point};
|
||||
use lazy_static::lazy_static;
|
||||
use smol::future::yield_now;
|
||||
use std::{collections::VecDeque, ops::Range, time::Duration};
|
||||
@ -899,7 +899,7 @@ mod tests {
|
||||
display_map::{fold_map::FoldMap, tab_map::TabMap},
|
||||
test::Observer,
|
||||
};
|
||||
use buffer::{Buffer, RandomCharIter};
|
||||
use language::{Buffer, RandomCharIter};
|
||||
use rand::prelude::*;
|
||||
use std::env;
|
||||
|
||||
@ -990,7 +990,7 @@ mod tests {
|
||||
}
|
||||
}
|
||||
_ => {
|
||||
buffer.update(&mut cx, |buffer, cx| buffer.randomly_mutate(&mut rng, cx));
|
||||
buffer.update(&mut cx, |buffer, _| buffer.randomly_mutate(&mut rng));
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -2,7 +2,6 @@ use super::{
|
||||
DisplayPoint, Editor, EditorMode, EditorSettings, EditorStyle, Input, Scroll, Select,
|
||||
SelectPhase, Snapshot, MAX_LINE_LEN,
|
||||
};
|
||||
use buffer::HighlightId;
|
||||
use clock::ReplicaId;
|
||||
use gpui::{
|
||||
color::Color,
|
||||
@ -18,6 +17,7 @@ use gpui::{
|
||||
MutableAppContext, PaintContext, Quad, Scene, SizeConstraint, ViewContext, WeakViewHandle,
|
||||
};
|
||||
use json::json;
|
||||
use language::HighlightId;
|
||||
use smallvec::SmallVec;
|
||||
use std::{
|
||||
cmp::{self, Ordering},
|
||||
@ -1043,7 +1043,7 @@ mod tests {
|
||||
test::sample_text,
|
||||
{Editor, EditorSettings},
|
||||
};
|
||||
use buffer::Buffer;
|
||||
use language::Buffer;
|
||||
|
||||
#[gpui::test]
|
||||
fn test_layout_line_numbers(cx: &mut gpui::MutableAppContext) {
|
||||
|
@ -5,7 +5,6 @@ pub mod movement;
|
||||
#[cfg(test)]
|
||||
mod test;
|
||||
|
||||
use buffer::*;
|
||||
use clock::ReplicaId;
|
||||
pub use display_map::DisplayPoint;
|
||||
use display_map::*;
|
||||
@ -15,13 +14,14 @@ use gpui::{
|
||||
text_layout, AppContext, ClipboardItem, Element, ElementBox, Entity, ModelHandle,
|
||||
MutableAppContext, RenderContext, View, ViewContext, WeakViewHandle,
|
||||
};
|
||||
use language::*;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use smallvec::SmallVec;
|
||||
use smol::Timer;
|
||||
use std::{
|
||||
cell::RefCell,
|
||||
cmp::{self, Ordering},
|
||||
mem,
|
||||
iter, mem,
|
||||
ops::{Range, RangeInclusive},
|
||||
rc::Rc,
|
||||
sync::Arc,
|
||||
@ -38,6 +38,8 @@ action!(Cancel);
|
||||
action!(Backspace);
|
||||
action!(Delete);
|
||||
action!(Input, String);
|
||||
action!(Newline);
|
||||
action!(Tab);
|
||||
action!(DeleteLine);
|
||||
action!(DeleteToPreviousWordBoundary);
|
||||
action!(DeleteToNextWordBoundary);
|
||||
@ -95,13 +97,13 @@ pub fn init(cx: &mut MutableAppContext) {
|
||||
Binding::new("ctrl-h", Backspace, Some("Editor")),
|
||||
Binding::new("delete", Delete, Some("Editor")),
|
||||
Binding::new("ctrl-d", Delete, Some("Editor")),
|
||||
Binding::new("enter", Input("\n".into()), Some("Editor && mode == full")),
|
||||
Binding::new("enter", Newline, Some("Editor && mode == full")),
|
||||
Binding::new(
|
||||
"alt-enter",
|
||||
Input("\n".into()),
|
||||
Some("Editor && mode == auto_height"),
|
||||
),
|
||||
Binding::new("tab", Input("\t".into()), Some("Editor")),
|
||||
Binding::new("tab", Tab, Some("Editor")),
|
||||
Binding::new("ctrl-shift-K", DeleteLine, Some("Editor")),
|
||||
Binding::new(
|
||||
"alt-backspace",
|
||||
@ -193,8 +195,10 @@ pub fn init(cx: &mut MutableAppContext) {
|
||||
cx.add_action(Editor::select);
|
||||
cx.add_action(Editor::cancel);
|
||||
cx.add_action(Editor::handle_input);
|
||||
cx.add_action(Editor::newline);
|
||||
cx.add_action(Editor::backspace);
|
||||
cx.add_action(Editor::delete);
|
||||
cx.add_action(Editor::tab);
|
||||
cx.add_action(Editor::delete_line);
|
||||
cx.add_action(Editor::delete_to_previous_word_boundary);
|
||||
cx.add_action(Editor::delete_to_next_word_boundary);
|
||||
@ -292,7 +296,7 @@ pub struct Editor {
|
||||
pending_selection: Option<Selection>,
|
||||
next_selection_id: usize,
|
||||
add_selections_state: Option<AddSelectionsState>,
|
||||
autoclose_stack: Vec<AutoclosePairState>,
|
||||
autoclose_stack: Vec<BracketPairState>,
|
||||
select_larger_syntax_node_stack: Vec<Arc<[Selection]>>,
|
||||
scroll_position: Vector2F,
|
||||
scroll_top_anchor: Anchor,
|
||||
@ -320,9 +324,9 @@ struct AddSelectionsState {
|
||||
stack: Vec<usize>,
|
||||
}
|
||||
|
||||
struct AutoclosePairState {
|
||||
struct BracketPairState {
|
||||
ranges: SmallVec<[Range<Anchor>; 32]>,
|
||||
pair: AutoclosePair,
|
||||
pair: BracketPair,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize)]
|
||||
@ -750,6 +754,130 @@ impl Editor {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn newline(&mut self, _: &Newline, cx: &mut ViewContext<Self>) {
|
||||
self.start_transaction(cx);
|
||||
let mut old_selections = SmallVec::<[_; 32]>::new();
|
||||
{
|
||||
let selections = self.selections(cx);
|
||||
let buffer = self.buffer.read(cx);
|
||||
for selection in selections.iter() {
|
||||
let start_point = selection.start.to_point(buffer);
|
||||
let indent = buffer
|
||||
.indent_column_for_line(start_point.row)
|
||||
.min(start_point.column);
|
||||
let start = selection.start.to_offset(buffer);
|
||||
let end = selection.end.to_offset(buffer);
|
||||
|
||||
let mut insert_extra_newline = false;
|
||||
if let Some(language) = buffer.language() {
|
||||
let leading_whitespace_len = buffer
|
||||
.reversed_chars_at(start)
|
||||
.take_while(|c| c.is_whitespace() && *c != '\n')
|
||||
.map(|c| c.len_utf8())
|
||||
.sum::<usize>();
|
||||
|
||||
let trailing_whitespace_len = buffer
|
||||
.chars_at(end)
|
||||
.take_while(|c| c.is_whitespace() && *c != '\n')
|
||||
.map(|c| c.len_utf8())
|
||||
.sum::<usize>();
|
||||
|
||||
insert_extra_newline = language.brackets().iter().any(|pair| {
|
||||
let pair_start = pair.start.trim_end();
|
||||
let pair_end = pair.end.trim_start();
|
||||
|
||||
pair.newline
|
||||
&& buffer.contains_str_at(end + trailing_whitespace_len, pair_end)
|
||||
&& buffer.contains_str_at(
|
||||
(start - leading_whitespace_len).saturating_sub(pair_start.len()),
|
||||
pair_start,
|
||||
)
|
||||
});
|
||||
}
|
||||
|
||||
old_selections.push((selection.id, start..end, indent, insert_extra_newline));
|
||||
}
|
||||
}
|
||||
|
||||
let mut new_selections = Vec::with_capacity(old_selections.len());
|
||||
self.buffer.update(cx, |buffer, cx| {
|
||||
let mut delta = 0_isize;
|
||||
let mut pending_edit: Option<PendingEdit> = None;
|
||||
for (_, range, indent, insert_extra_newline) in &old_selections {
|
||||
if pending_edit.as_ref().map_or(false, |pending| {
|
||||
pending.indent != *indent
|
||||
|| pending.insert_extra_newline != *insert_extra_newline
|
||||
}) {
|
||||
let pending = pending_edit.take().unwrap();
|
||||
let mut new_text = String::with_capacity(1 + pending.indent as usize);
|
||||
new_text.push('\n');
|
||||
new_text.extend(iter::repeat(' ').take(pending.indent as usize));
|
||||
if pending.insert_extra_newline {
|
||||
new_text = new_text.repeat(2);
|
||||
}
|
||||
buffer.edit_with_autoindent(pending.ranges, new_text, cx);
|
||||
delta += pending.delta;
|
||||
}
|
||||
|
||||
let start = (range.start as isize + delta) as usize;
|
||||
let end = (range.end as isize + delta) as usize;
|
||||
let mut text_len = *indent as usize + 1;
|
||||
if *insert_extra_newline {
|
||||
text_len *= 2;
|
||||
}
|
||||
|
||||
let pending = pending_edit.get_or_insert_with(Default::default);
|
||||
pending.delta += text_len as isize - (end - start) as isize;
|
||||
pending.indent = *indent;
|
||||
pending.insert_extra_newline = *insert_extra_newline;
|
||||
pending.ranges.push(start..end);
|
||||
}
|
||||
|
||||
let pending = pending_edit.unwrap();
|
||||
let mut new_text = String::with_capacity(1 + pending.indent as usize);
|
||||
new_text.push('\n');
|
||||
new_text.extend(iter::repeat(' ').take(pending.indent as usize));
|
||||
if pending.insert_extra_newline {
|
||||
new_text = new_text.repeat(2);
|
||||
}
|
||||
buffer.edit_with_autoindent(pending.ranges, new_text, cx);
|
||||
|
||||
let mut delta = 0_isize;
|
||||
new_selections.extend(old_selections.into_iter().map(
|
||||
|(id, range, indent, insert_extra_newline)| {
|
||||
let start = (range.start as isize + delta) as usize;
|
||||
let end = (range.end as isize + delta) as usize;
|
||||
let text_before_cursor_len = indent as usize + 1;
|
||||
let anchor = buffer.anchor_before(start + text_before_cursor_len);
|
||||
let text_len = if insert_extra_newline {
|
||||
text_before_cursor_len * 2
|
||||
} else {
|
||||
text_before_cursor_len
|
||||
};
|
||||
delta += text_len as isize - (end - start) as isize;
|
||||
Selection {
|
||||
id,
|
||||
start: anchor.clone(),
|
||||
end: anchor,
|
||||
reversed: false,
|
||||
goal: SelectionGoal::None,
|
||||
}
|
||||
},
|
||||
))
|
||||
});
|
||||
|
||||
self.update_selections(new_selections, true, cx);
|
||||
self.end_transaction(cx);
|
||||
|
||||
#[derive(Default)]
|
||||
struct PendingEdit {
|
||||
indent: u32,
|
||||
insert_extra_newline: bool,
|
||||
delta: isize,
|
||||
ranges: SmallVec<[Range<usize>; 32]>,
|
||||
}
|
||||
}
|
||||
|
||||
fn insert(&mut self, text: &str, cx: &mut ViewContext<Self>) {
|
||||
self.start_transaction(cx);
|
||||
let mut old_selections = SmallVec::<[_; 32]>::new();
|
||||
@ -766,7 +894,7 @@ impl Editor {
|
||||
let mut new_selections = Vec::new();
|
||||
self.buffer.update(cx, |buffer, cx| {
|
||||
let edit_ranges = old_selections.iter().map(|(_, range)| range.clone());
|
||||
buffer.edit(edit_ranges, text, cx);
|
||||
buffer.edit_with_autoindent(edit_ranges, text, cx);
|
||||
let text_len = text.len() as isize;
|
||||
let mut delta = 0_isize;
|
||||
new_selections = old_selections
|
||||
@ -797,7 +925,7 @@ impl Editor {
|
||||
let new_autoclose_pair_state = self.buffer.update(cx, |buffer, cx| {
|
||||
let autoclose_pair = buffer.language().and_then(|language| {
|
||||
let first_selection_start = selections.first().unwrap().start.to_offset(&*buffer);
|
||||
let pair = language.autoclose_pairs().iter().find(|pair| {
|
||||
let pair = language.brackets().iter().find(|pair| {
|
||||
buffer.contains_str_at(
|
||||
first_selection_start.saturating_sub(pair.start.len()),
|
||||
&pair.start,
|
||||
@ -832,7 +960,7 @@ impl Editor {
|
||||
buffer.edit(selection_ranges, &pair.end, cx);
|
||||
|
||||
if pair.end.len() == 1 {
|
||||
Some(AutoclosePairState {
|
||||
Some(BracketPairState {
|
||||
ranges: selections
|
||||
.iter()
|
||||
.map(|selection| {
|
||||
@ -950,6 +1078,51 @@ impl Editor {
|
||||
self.end_transaction(cx);
|
||||
}
|
||||
|
||||
pub fn tab(&mut self, _: &Tab, cx: &mut ViewContext<Self>) {
|
||||
self.start_transaction(cx);
|
||||
let tab_size = self.build_settings.borrow()(cx).tab_size;
|
||||
let mut selections = self.selections(cx).to_vec();
|
||||
self.buffer.update(cx, |buffer, cx| {
|
||||
let mut last_indented_row = None;
|
||||
for selection in &mut selections {
|
||||
let mut range = selection.point_range(buffer);
|
||||
if range.is_empty() {
|
||||
let char_column = buffer
|
||||
.chars_for_range(Point::new(range.start.row, 0)..range.start)
|
||||
.count();
|
||||
let chars_to_next_tab_stop = tab_size - (char_column % tab_size);
|
||||
buffer.edit(
|
||||
[range.start..range.start],
|
||||
" ".repeat(chars_to_next_tab_stop),
|
||||
cx,
|
||||
);
|
||||
range.start.column += chars_to_next_tab_stop as u32;
|
||||
|
||||
let head = buffer.anchor_before(range.start);
|
||||
selection.start = head.clone();
|
||||
selection.end = head;
|
||||
} else {
|
||||
for row in range.start.row..=range.end.row {
|
||||
if last_indented_row != Some(row) {
|
||||
let char_column = buffer.indent_column_for_line(row) as usize;
|
||||
let chars_to_next_tab_stop = tab_size - (char_column % tab_size);
|
||||
let row_start = Point::new(row, 0);
|
||||
buffer.edit(
|
||||
[row_start..row_start],
|
||||
" ".repeat(chars_to_next_tab_stop),
|
||||
cx,
|
||||
);
|
||||
last_indented_row = Some(row);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
self.update_selections(selections, true, cx);
|
||||
self.end_transaction(cx);
|
||||
}
|
||||
|
||||
pub fn delete_line(&mut self, _: &DeleteLine, cx: &mut ViewContext<Self>) {
|
||||
self.start_transaction(cx);
|
||||
|
||||
@ -2488,17 +2661,17 @@ impl Editor {
|
||||
fn on_buffer_event(
|
||||
&mut self,
|
||||
_: ModelHandle<Buffer>,
|
||||
event: &buffer::Event,
|
||||
event: &language::Event,
|
||||
cx: &mut ViewContext<Self>,
|
||||
) {
|
||||
match event {
|
||||
buffer::Event::Edited => cx.emit(Event::Edited),
|
||||
buffer::Event::Dirtied => cx.emit(Event::Dirtied),
|
||||
buffer::Event::Saved => cx.emit(Event::Saved),
|
||||
buffer::Event::FileHandleChanged => cx.emit(Event::FileHandleChanged),
|
||||
buffer::Event::Reloaded => cx.emit(Event::FileHandleChanged),
|
||||
buffer::Event::Closed => cx.emit(Event::Closed),
|
||||
buffer::Event::Reparsed => {}
|
||||
language::Event::Edited => cx.emit(Event::Edited),
|
||||
language::Event::Dirtied => cx.emit(Event::Dirtied),
|
||||
language::Event::Saved => cx.emit(Event::Saved),
|
||||
language::Event::FileHandleChanged => cx.emit(Event::FileHandleChanged),
|
||||
language::Event::Reloaded => cx.emit(Event::FileHandleChanged),
|
||||
language::Event::Closed => cx.emit(Event::Closed),
|
||||
language::Event::Reparsed => {}
|
||||
}
|
||||
}
|
||||
|
||||
@ -3507,6 +3680,30 @@ mod tests {
|
||||
assert_eq!(buffer.read(cx).text(), "e t te our");
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
fn test_newline(cx: &mut gpui::MutableAppContext) {
|
||||
let buffer = cx.add_model(|cx| Buffer::new(0, "aaaa\n bbbb\n", cx));
|
||||
let settings = EditorSettings::test(&cx);
|
||||
let (_, view) = cx.add_window(Default::default(), |cx| {
|
||||
build_editor(buffer.clone(), settings, cx)
|
||||
});
|
||||
|
||||
view.update(cx, |view, cx| {
|
||||
view.select_display_ranges(
|
||||
&[
|
||||
DisplayPoint::new(0, 2)..DisplayPoint::new(0, 2),
|
||||
DisplayPoint::new(1, 2)..DisplayPoint::new(1, 2),
|
||||
DisplayPoint::new(1, 6)..DisplayPoint::new(1, 6),
|
||||
],
|
||||
cx,
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
view.newline(&Newline, cx);
|
||||
assert_eq!(view.text(cx), "aa\naa\n \n bb\n bb\n");
|
||||
});
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
fn test_backspace(cx: &mut gpui::MutableAppContext) {
|
||||
let buffer = cx.add_model(|cx| {
|
||||
@ -4355,14 +4552,18 @@ mod tests {
|
||||
let settings = cx.read(EditorSettings::test);
|
||||
let language = Arc::new(Language::new(
|
||||
LanguageConfig {
|
||||
autoclose_pairs: vec![
|
||||
AutoclosePair {
|
||||
brackets: vec![
|
||||
BracketPair {
|
||||
start: "{".to_string(),
|
||||
end: "}".to_string(),
|
||||
close: true,
|
||||
newline: true,
|
||||
},
|
||||
AutoclosePair {
|
||||
BracketPair {
|
||||
start: "/*".to_string(),
|
||||
end: " */".to_string(),
|
||||
close: true,
|
||||
newline: true,
|
||||
},
|
||||
],
|
||||
..Default::default()
|
||||
@ -4461,6 +4662,76 @@ mod tests {
|
||||
});
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
async fn test_extra_newline_insertion(mut cx: gpui::TestAppContext) {
|
||||
let settings = cx.read(EditorSettings::test);
|
||||
let language = Arc::new(Language::new(
|
||||
LanguageConfig {
|
||||
brackets: vec![
|
||||
BracketPair {
|
||||
start: "{".to_string(),
|
||||
end: "}".to_string(),
|
||||
close: true,
|
||||
newline: true,
|
||||
},
|
||||
BracketPair {
|
||||
start: "/* ".to_string(),
|
||||
end: " */".to_string(),
|
||||
close: true,
|
||||
newline: true,
|
||||
},
|
||||
],
|
||||
..Default::default()
|
||||
},
|
||||
tree_sitter_rust::language(),
|
||||
));
|
||||
|
||||
let text = concat!(
|
||||
"{ }\n", // Suppress rustfmt
|
||||
" x\n", //
|
||||
" /* */\n", //
|
||||
"x\n", //
|
||||
"{{} }\n", //
|
||||
);
|
||||
|
||||
let buffer = cx.add_model(|cx| {
|
||||
let history = History::new(text.into());
|
||||
Buffer::from_history(0, history, None, Some(language), cx)
|
||||
});
|
||||
let (_, view) = cx.add_window(|cx| build_editor(buffer, settings, cx));
|
||||
view.condition(&cx, |view, cx| !view.buffer.read(cx).is_parsing())
|
||||
.await;
|
||||
|
||||
view.update(&mut cx, |view, cx| {
|
||||
view.select_display_ranges(
|
||||
&[
|
||||
DisplayPoint::new(0, 2)..DisplayPoint::new(0, 3),
|
||||
DisplayPoint::new(2, 5)..DisplayPoint::new(2, 5),
|
||||
DisplayPoint::new(4, 4)..DisplayPoint::new(4, 4),
|
||||
],
|
||||
cx,
|
||||
)
|
||||
.unwrap();
|
||||
view.newline(&Newline, cx);
|
||||
|
||||
assert_eq!(
|
||||
view.buffer().read(cx).text(),
|
||||
concat!(
|
||||
"{ \n", // Suppress rustfmt
|
||||
"\n", //
|
||||
"}\n", //
|
||||
" x\n", //
|
||||
" /* \n", //
|
||||
" \n", //
|
||||
" */\n", //
|
||||
"x\n", //
|
||||
"{{} \n", //
|
||||
"}\n", //
|
||||
)
|
||||
);
|
||||
});
|
||||
}
|
||||
|
||||
impl Editor {
|
||||
fn selection_ranges(&self, cx: &mut MutableAppContext) -> Vec<Range<DisplayPoint>> {
|
||||
self.selections_in_range(
|
||||
|
32
crates/language/Cargo.toml
Normal file
32
crates/language/Cargo.toml
Normal file
@ -0,0 +1,32 @@
|
||||
[package]
|
||||
name = "language"
|
||||
version = "0.1.0"
|
||||
edition = "2018"
|
||||
|
||||
[features]
|
||||
test-support = ["rand", "buffer/test-support"]
|
||||
|
||||
[dependencies]
|
||||
buffer = { path = "../buffer" }
|
||||
clock = { path = "../clock" }
|
||||
gpui = { path = "../gpui" }
|
||||
rpc = { path = "../rpc" }
|
||||
theme = { path = "../theme" }
|
||||
util = { path = "../util" }
|
||||
anyhow = "1.0.38"
|
||||
futures = "0.3"
|
||||
lazy_static = "1.4"
|
||||
log = "0.4"
|
||||
parking_lot = "0.11.1"
|
||||
rand = { version = "0.8.3", optional = true }
|
||||
serde = { version = "1", features = ["derive"] }
|
||||
similar = "1.3"
|
||||
smol = "1.2"
|
||||
tree-sitter = "0.19.5"
|
||||
|
||||
[dev-dependencies]
|
||||
buffer = { path = "../buffer", features = ["test-support"] }
|
||||
gpui = { path = "../gpui", features = ["test-support"] }
|
||||
rand = "0.8.3"
|
||||
tree-sitter-rust = "0.19.0"
|
||||
unindent = "0.1.7"
|
@ -11,20 +11,23 @@ pub use tree_sitter::{Parser, Tree};
|
||||
pub struct LanguageConfig {
|
||||
pub name: String,
|
||||
pub path_suffixes: Vec<String>,
|
||||
pub autoclose_pairs: Vec<AutoclosePair>,
|
||||
pub brackets: Vec<BracketPair>,
|
||||
}
|
||||
|
||||
#[derive(Clone, Deserialize)]
|
||||
pub struct AutoclosePair {
|
||||
#[derive(Clone, Debug, Deserialize)]
|
||||
pub struct BracketPair {
|
||||
pub start: String,
|
||||
pub end: String,
|
||||
pub close: bool,
|
||||
pub newline: bool,
|
||||
}
|
||||
|
||||
pub struct Language {
|
||||
pub(crate) config: LanguageConfig,
|
||||
pub(crate) grammar: Grammar,
|
||||
pub(crate) highlight_query: Query,
|
||||
pub(crate) highlights_query: Query,
|
||||
pub(crate) brackets_query: Query,
|
||||
pub(crate) indents_query: Query,
|
||||
pub(crate) highlight_map: Mutex<HighlightMap>,
|
||||
}
|
||||
|
||||
@ -68,19 +71,25 @@ impl Language {
|
||||
Self {
|
||||
config,
|
||||
brackets_query: Query::new(grammar, "").unwrap(),
|
||||
highlight_query: Query::new(grammar, "").unwrap(),
|
||||
highlights_query: Query::new(grammar, "").unwrap(),
|
||||
indents_query: Query::new(grammar, "").unwrap(),
|
||||
grammar,
|
||||
highlight_map: Default::default(),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn with_highlights_query(mut self, highlights_query_source: &str) -> Result<Self> {
|
||||
self.highlight_query = Query::new(self.grammar, highlights_query_source)?;
|
||||
pub fn with_highlights_query(mut self, source: &str) -> Result<Self> {
|
||||
self.highlights_query = Query::new(self.grammar, source)?;
|
||||
Ok(self)
|
||||
}
|
||||
|
||||
pub fn with_brackets_query(mut self, brackets_query_source: &str) -> Result<Self> {
|
||||
self.brackets_query = Query::new(self.grammar, brackets_query_source)?;
|
||||
pub fn with_brackets_query(mut self, source: &str) -> Result<Self> {
|
||||
self.brackets_query = Query::new(self.grammar, source)?;
|
||||
Ok(self)
|
||||
}
|
||||
|
||||
pub fn with_indents_query(mut self, source: &str) -> Result<Self> {
|
||||
self.indents_query = Query::new(self.grammar, source)?;
|
||||
Ok(self)
|
||||
}
|
||||
|
||||
@ -88,8 +97,8 @@ impl Language {
|
||||
self.config.name.as_str()
|
||||
}
|
||||
|
||||
pub fn autoclose_pairs(&self) -> &[AutoclosePair] {
|
||||
&self.config.autoclose_pairs
|
||||
pub fn brackets(&self) -> &[BracketPair] {
|
||||
&self.config.brackets
|
||||
}
|
||||
|
||||
pub fn highlight_map(&self) -> HighlightMap {
|
||||
@ -97,7 +106,8 @@ impl Language {
|
||||
}
|
||||
|
||||
pub fn set_theme(&self, theme: &SyntaxTheme) {
|
||||
*self.highlight_map.lock() = HighlightMap::new(self.highlight_query.capture_names(), theme);
|
||||
*self.highlight_map.lock() =
|
||||
HighlightMap::new(self.highlights_query.capture_names(), theme);
|
||||
}
|
||||
}
|
||||
|
||||
@ -110,28 +120,22 @@ mod tests {
|
||||
let grammar = tree_sitter_rust::language();
|
||||
let registry = LanguageRegistry {
|
||||
languages: vec![
|
||||
Arc::new(Language {
|
||||
config: LanguageConfig {
|
||||
Arc::new(Language::new(
|
||||
LanguageConfig {
|
||||
name: "Rust".to_string(),
|
||||
path_suffixes: vec!["rs".to_string()],
|
||||
..Default::default()
|
||||
},
|
||||
grammar,
|
||||
highlight_query: Query::new(grammar, "").unwrap(),
|
||||
brackets_query: Query::new(grammar, "").unwrap(),
|
||||
highlight_map: Default::default(),
|
||||
}),
|
||||
Arc::new(Language {
|
||||
config: LanguageConfig {
|
||||
)),
|
||||
Arc::new(Language::new(
|
||||
LanguageConfig {
|
||||
name: "Make".to_string(),
|
||||
path_suffixes: vec!["Makefile".to_string(), "mk".to_string()],
|
||||
..Default::default()
|
||||
},
|
||||
grammar,
|
||||
highlight_query: Query::new(grammar, "").unwrap(),
|
||||
brackets_query: Query::new(grammar, "").unwrap(),
|
||||
highlight_map: Default::default(),
|
||||
}),
|
||||
)),
|
||||
],
|
||||
};
|
||||
|
1477
crates/language/src/lib.rs
Normal file
1477
crates/language/src/lib.rs
Normal file
File diff suppressed because it is too large
Load Diff
467
crates/language/src/tests.rs
Normal file
467
crates/language/src/tests.rs
Normal file
@ -0,0 +1,467 @@
|
||||
use super::*;
|
||||
use gpui::{ModelHandle, MutableAppContext};
|
||||
use std::rc::Rc;
|
||||
use unindent::Unindent as _;
|
||||
|
||||
#[gpui::test]
|
||||
fn test_edit_events(cx: &mut gpui::MutableAppContext) {
|
||||
let mut now = Instant::now();
|
||||
let buffer_1_events = Rc::new(RefCell::new(Vec::new()));
|
||||
let buffer_2_events = Rc::new(RefCell::new(Vec::new()));
|
||||
|
||||
let buffer1 = cx.add_model(|cx| Buffer::new(0, "abcdef", cx));
|
||||
let buffer2 = cx.add_model(|cx| Buffer::new(1, "abcdef", cx));
|
||||
let buffer_ops = buffer1.update(cx, |buffer, cx| {
|
||||
let buffer_1_events = buffer_1_events.clone();
|
||||
cx.subscribe(&buffer1, move |_, _, event, _| {
|
||||
buffer_1_events.borrow_mut().push(event.clone())
|
||||
})
|
||||
.detach();
|
||||
let buffer_2_events = buffer_2_events.clone();
|
||||
cx.subscribe(&buffer2, move |_, _, event, _| {
|
||||
buffer_2_events.borrow_mut().push(event.clone())
|
||||
})
|
||||
.detach();
|
||||
|
||||
// An edit emits an edited event, followed by a dirtied event,
|
||||
// since the buffer was previously in a clean state.
|
||||
buffer.edit(Some(2..4), "XYZ", cx);
|
||||
|
||||
// An empty transaction does not emit any events.
|
||||
buffer.start_transaction(None).unwrap();
|
||||
buffer.end_transaction(None, cx).unwrap();
|
||||
|
||||
// A transaction containing two edits emits one edited event.
|
||||
now += Duration::from_secs(1);
|
||||
buffer.start_transaction_at(None, now).unwrap();
|
||||
buffer.edit(Some(5..5), "u", cx);
|
||||
buffer.edit(Some(6..6), "w", cx);
|
||||
buffer.end_transaction_at(None, now, cx).unwrap();
|
||||
|
||||
// Undoing a transaction emits one edited event.
|
||||
buffer.undo(cx);
|
||||
|
||||
buffer.operations.clone()
|
||||
});
|
||||
|
||||
// Incorporating a set of remote ops emits a single edited event,
|
||||
// followed by a dirtied event.
|
||||
buffer2.update(cx, |buffer, cx| {
|
||||
buffer.apply_ops(buffer_ops, cx).unwrap();
|
||||
});
|
||||
|
||||
let buffer_1_events = buffer_1_events.borrow();
|
||||
assert_eq!(
|
||||
*buffer_1_events,
|
||||
vec![Event::Edited, Event::Dirtied, Event::Edited, Event::Edited]
|
||||
);
|
||||
|
||||
let buffer_2_events = buffer_2_events.borrow();
|
||||
assert_eq!(*buffer_2_events, vec![Event::Edited, Event::Dirtied]);
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
async fn test_apply_diff(mut cx: gpui::TestAppContext) {
|
||||
let text = "a\nbb\nccc\ndddd\neeeee\nffffff\n";
|
||||
let buffer = cx.add_model(|cx| Buffer::new(0, text, cx));
|
||||
|
||||
let text = "a\nccc\ndddd\nffffff\n";
|
||||
let diff = buffer.read_with(&cx, |b, cx| b.diff(text.into(), cx)).await;
|
||||
buffer.update(&mut cx, |b, cx| b.apply_diff(diff, cx));
|
||||
cx.read(|cx| assert_eq!(buffer.read(cx).text(), text));
|
||||
|
||||
let text = "a\n1\n\nccc\ndd2dd\nffffff\n";
|
||||
let diff = buffer.read_with(&cx, |b, cx| b.diff(text.into(), cx)).await;
|
||||
buffer.update(&mut cx, |b, cx| b.apply_diff(diff, cx));
|
||||
cx.read(|cx| assert_eq!(buffer.read(cx).text(), text));
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
async fn test_reparse(mut cx: gpui::TestAppContext) {
|
||||
let buffer = cx.add_model(|cx| {
|
||||
let text = "fn a() {}".into();
|
||||
Buffer::from_history(0, History::new(text), None, Some(rust_lang()), cx)
|
||||
});
|
||||
|
||||
// Wait for the initial text to parse
|
||||
buffer
|
||||
.condition(&cx, |buffer, _| !buffer.is_parsing())
|
||||
.await;
|
||||
assert_eq!(
|
||||
get_tree_sexp(&buffer, &cx),
|
||||
concat!(
|
||||
"(source_file (function_item name: (identifier) ",
|
||||
"parameters: (parameters) ",
|
||||
"body: (block)))"
|
||||
)
|
||||
);
|
||||
|
||||
buffer.update(&mut cx, |buffer, _| {
|
||||
buffer.set_sync_parse_timeout(Duration::ZERO)
|
||||
});
|
||||
|
||||
// Perform some edits (add parameter and variable reference)
|
||||
// Parsing doesn't begin until the transaction is complete
|
||||
buffer.update(&mut cx, |buf, cx| {
|
||||
buf.start_transaction(None).unwrap();
|
||||
|
||||
let offset = buf.text().find(")").unwrap();
|
||||
buf.edit(vec![offset..offset], "b: C", cx);
|
||||
assert!(!buf.is_parsing());
|
||||
|
||||
let offset = buf.text().find("}").unwrap();
|
||||
buf.edit(vec![offset..offset], " d; ", cx);
|
||||
assert!(!buf.is_parsing());
|
||||
|
||||
buf.end_transaction(None, cx).unwrap();
|
||||
assert_eq!(buf.text(), "fn a(b: C) { d; }");
|
||||
assert!(buf.is_parsing());
|
||||
});
|
||||
buffer
|
||||
.condition(&cx, |buffer, _| !buffer.is_parsing())
|
||||
.await;
|
||||
assert_eq!(
|
||||
get_tree_sexp(&buffer, &cx),
|
||||
concat!(
|
||||
"(source_file (function_item name: (identifier) ",
|
||||
"parameters: (parameters (parameter pattern: (identifier) type: (type_identifier))) ",
|
||||
"body: (block (identifier))))"
|
||||
)
|
||||
);
|
||||
|
||||
// Perform a series of edits without waiting for the current parse to complete:
|
||||
// * turn identifier into a field expression
|
||||
// * turn field expression into a method call
|
||||
// * add a turbofish to the method call
|
||||
buffer.update(&mut cx, |buf, cx| {
|
||||
let offset = buf.text().find(";").unwrap();
|
||||
buf.edit(vec![offset..offset], ".e", cx);
|
||||
assert_eq!(buf.text(), "fn a(b: C) { d.e; }");
|
||||
assert!(buf.is_parsing());
|
||||
});
|
||||
buffer.update(&mut cx, |buf, cx| {
|
||||
let offset = buf.text().find(";").unwrap();
|
||||
buf.edit(vec![offset..offset], "(f)", cx);
|
||||
assert_eq!(buf.text(), "fn a(b: C) { d.e(f); }");
|
||||
assert!(buf.is_parsing());
|
||||
});
|
||||
buffer.update(&mut cx, |buf, cx| {
|
||||
let offset = buf.text().find("(f)").unwrap();
|
||||
buf.edit(vec![offset..offset], "::<G>", cx);
|
||||
assert_eq!(buf.text(), "fn a(b: C) { d.e::<G>(f); }");
|
||||
assert!(buf.is_parsing());
|
||||
});
|
||||
buffer
|
||||
.condition(&cx, |buffer, _| !buffer.is_parsing())
|
||||
.await;
|
||||
assert_eq!(
|
||||
get_tree_sexp(&buffer, &cx),
|
||||
concat!(
|
||||
"(source_file (function_item name: (identifier) ",
|
||||
"parameters: (parameters (parameter pattern: (identifier) type: (type_identifier))) ",
|
||||
"body: (block (call_expression ",
|
||||
"function: (generic_function ",
|
||||
"function: (field_expression value: (identifier) field: (field_identifier)) ",
|
||||
"type_arguments: (type_arguments (type_identifier))) ",
|
||||
"arguments: (arguments (identifier))))))",
|
||||
)
|
||||
);
|
||||
|
||||
buffer.update(&mut cx, |buf, cx| {
|
||||
buf.undo(cx);
|
||||
assert_eq!(buf.text(), "fn a() {}");
|
||||
assert!(buf.is_parsing());
|
||||
});
|
||||
buffer
|
||||
.condition(&cx, |buffer, _| !buffer.is_parsing())
|
||||
.await;
|
||||
assert_eq!(
|
||||
get_tree_sexp(&buffer, &cx),
|
||||
concat!(
|
||||
"(source_file (function_item name: (identifier) ",
|
||||
"parameters: (parameters) ",
|
||||
"body: (block)))"
|
||||
)
|
||||
);
|
||||
|
||||
buffer.update(&mut cx, |buf, cx| {
|
||||
buf.redo(cx);
|
||||
assert_eq!(buf.text(), "fn a(b: C) { d.e::<G>(f); }");
|
||||
assert!(buf.is_parsing());
|
||||
});
|
||||
buffer
|
||||
.condition(&cx, |buffer, _| !buffer.is_parsing())
|
||||
.await;
|
||||
assert_eq!(
|
||||
get_tree_sexp(&buffer, &cx),
|
||||
concat!(
|
||||
"(source_file (function_item name: (identifier) ",
|
||||
"parameters: (parameters (parameter pattern: (identifier) type: (type_identifier))) ",
|
||||
"body: (block (call_expression ",
|
||||
"function: (generic_function ",
|
||||
"function: (field_expression value: (identifier) field: (field_identifier)) ",
|
||||
"type_arguments: (type_arguments (type_identifier))) ",
|
||||
"arguments: (arguments (identifier))))))",
|
||||
)
|
||||
);
|
||||
|
||||
fn get_tree_sexp(buffer: &ModelHandle<Buffer>, cx: &gpui::TestAppContext) -> String {
|
||||
buffer.read_with(cx, |buffer, _| {
|
||||
buffer.syntax_tree().unwrap().root_node().to_sexp()
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
fn test_enclosing_bracket_ranges(cx: &mut MutableAppContext) {
|
||||
let buffer = cx.add_model(|cx| {
|
||||
let text = "
|
||||
mod x {
|
||||
mod y {
|
||||
|
||||
}
|
||||
}
|
||||
"
|
||||
.unindent()
|
||||
.into();
|
||||
Buffer::from_history(0, History::new(text), None, Some(rust_lang()), cx)
|
||||
});
|
||||
let buffer = buffer.read(cx);
|
||||
assert_eq!(
|
||||
buffer.enclosing_bracket_point_ranges(Point::new(1, 6)..Point::new(1, 6)),
|
||||
Some((
|
||||
Point::new(0, 6)..Point::new(0, 7),
|
||||
Point::new(4, 0)..Point::new(4, 1)
|
||||
))
|
||||
);
|
||||
assert_eq!(
|
||||
buffer.enclosing_bracket_point_ranges(Point::new(1, 10)..Point::new(1, 10)),
|
||||
Some((
|
||||
Point::new(1, 10)..Point::new(1, 11),
|
||||
Point::new(3, 4)..Point::new(3, 5)
|
||||
))
|
||||
);
|
||||
assert_eq!(
|
||||
buffer.enclosing_bracket_point_ranges(Point::new(3, 5)..Point::new(3, 5)),
|
||||
Some((
|
||||
Point::new(1, 10)..Point::new(1, 11),
|
||||
Point::new(3, 4)..Point::new(3, 5)
|
||||
))
|
||||
);
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
fn test_edit_with_autoindent(cx: &mut MutableAppContext) {
|
||||
cx.add_model(|cx| {
|
||||
let text = "fn a() {}".into();
|
||||
let mut buffer = Buffer::from_history(0, History::new(text), None, Some(rust_lang()), cx);
|
||||
|
||||
buffer.edit_with_autoindent([8..8], "\n\n", cx);
|
||||
assert_eq!(buffer.text(), "fn a() {\n \n}");
|
||||
|
||||
buffer.edit_with_autoindent([Point::new(1, 4)..Point::new(1, 4)], "b()\n", cx);
|
||||
assert_eq!(buffer.text(), "fn a() {\n b()\n \n}");
|
||||
|
||||
buffer.edit_with_autoindent([Point::new(2, 4)..Point::new(2, 4)], ".c", cx);
|
||||
assert_eq!(buffer.text(), "fn a() {\n b()\n .c\n}");
|
||||
|
||||
buffer
|
||||
});
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
fn test_autoindent_moves_selections(cx: &mut MutableAppContext) {
|
||||
cx.add_model(|cx| {
|
||||
let text = History::new("fn a() {}".into());
|
||||
let mut buffer = Buffer::from_history(0, text, None, Some(rust_lang()), cx);
|
||||
|
||||
let selection_set_id = buffer.add_selection_set(Vec::new(), cx);
|
||||
buffer.start_transaction(Some(selection_set_id)).unwrap();
|
||||
buffer.edit_with_autoindent([5..5, 9..9], "\n\n", cx);
|
||||
buffer
|
||||
.update_selection_set(
|
||||
selection_set_id,
|
||||
vec![
|
||||
Selection {
|
||||
id: 0,
|
||||
start: buffer.anchor_before(Point::new(1, 0)),
|
||||
end: buffer.anchor_before(Point::new(1, 0)),
|
||||
reversed: false,
|
||||
goal: SelectionGoal::None,
|
||||
},
|
||||
Selection {
|
||||
id: 1,
|
||||
start: buffer.anchor_before(Point::new(4, 0)),
|
||||
end: buffer.anchor_before(Point::new(4, 0)),
|
||||
reversed: false,
|
||||
goal: SelectionGoal::None,
|
||||
},
|
||||
],
|
||||
cx,
|
||||
)
|
||||
.unwrap();
|
||||
assert_eq!(buffer.text(), "fn a(\n\n) {}\n\n");
|
||||
|
||||
// Ending the transaction runs the auto-indent. The selection
|
||||
// at the start of the auto-indented row is pushed to the right.
|
||||
buffer.end_transaction(Some(selection_set_id), cx).unwrap();
|
||||
assert_eq!(buffer.text(), "fn a(\n \n) {}\n\n");
|
||||
let selection_ranges = buffer
|
||||
.selection_set(selection_set_id)
|
||||
.unwrap()
|
||||
.selections
|
||||
.iter()
|
||||
.map(|selection| selection.point_range(&buffer))
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
assert_eq!(selection_ranges[0], empty(Point::new(1, 4)));
|
||||
assert_eq!(selection_ranges[1], empty(Point::new(4, 0)));
|
||||
|
||||
buffer
|
||||
});
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
fn test_autoindent_does_not_adjust_lines_with_unchanged_suggestion(cx: &mut MutableAppContext) {
|
||||
cx.add_model(|cx| {
|
||||
let text = "
|
||||
fn a() {
|
||||
c;
|
||||
d;
|
||||
}
|
||||
"
|
||||
.unindent()
|
||||
.into();
|
||||
let mut buffer = Buffer::from_history(0, History::new(text), None, Some(rust_lang()), cx);
|
||||
|
||||
// Lines 2 and 3 don't match the indentation suggestion. When editing these lines,
|
||||
// their indentation is not adjusted.
|
||||
buffer.edit_with_autoindent([empty(Point::new(1, 1)), empty(Point::new(2, 1))], "()", cx);
|
||||
assert_eq!(
|
||||
buffer.text(),
|
||||
"
|
||||
fn a() {
|
||||
c();
|
||||
d();
|
||||
}
|
||||
"
|
||||
.unindent()
|
||||
);
|
||||
|
||||
// When appending new content after these lines, the indentation is based on the
|
||||
// preceding lines' actual indentation.
|
||||
buffer.edit_with_autoindent(
|
||||
[empty(Point::new(1, 1)), empty(Point::new(2, 1))],
|
||||
"\n.f\n.g",
|
||||
cx,
|
||||
);
|
||||
assert_eq!(
|
||||
buffer.text(),
|
||||
"
|
||||
fn a() {
|
||||
c
|
||||
.f
|
||||
.g();
|
||||
d
|
||||
.f
|
||||
.g();
|
||||
}
|
||||
"
|
||||
.unindent()
|
||||
);
|
||||
buffer
|
||||
});
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
fn test_autoindent_adjusts_lines_when_only_text_changes(cx: &mut MutableAppContext) {
|
||||
cx.add_model(|cx| {
|
||||
let text = History::new(
|
||||
"
|
||||
fn a() {}
|
||||
"
|
||||
.unindent()
|
||||
.into(),
|
||||
);
|
||||
let mut buffer = Buffer::from_history(0, text, None, Some(rust_lang()), cx);
|
||||
|
||||
buffer.edit_with_autoindent([5..5], "\nb", cx);
|
||||
assert_eq!(
|
||||
buffer.text(),
|
||||
"
|
||||
fn a(
|
||||
b) {}
|
||||
"
|
||||
.unindent()
|
||||
);
|
||||
|
||||
// The indentation suggestion changed because `@end` node (a close paren)
|
||||
// is now at the beginning of the line.
|
||||
buffer.edit_with_autoindent([Point::new(1, 4)..Point::new(1, 5)], "", cx);
|
||||
assert_eq!(
|
||||
buffer.text(),
|
||||
"
|
||||
fn a(
|
||||
) {}
|
||||
"
|
||||
.unindent()
|
||||
);
|
||||
|
||||
buffer
|
||||
});
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_contiguous_ranges() {
|
||||
assert_eq!(
|
||||
contiguous_ranges([1, 2, 3, 5, 6, 9, 10, 11, 12], 100).collect::<Vec<_>>(),
|
||||
&[1..4, 5..7, 9..13]
|
||||
);
|
||||
|
||||
// Respects the `max_len` parameter
|
||||
assert_eq!(
|
||||
contiguous_ranges([2, 3, 4, 5, 6, 7, 8, 9, 23, 24, 25, 26, 30, 31], 3).collect::<Vec<_>>(),
|
||||
&[2..5, 5..8, 8..10, 23..26, 26..27, 30..32],
|
||||
);
|
||||
}
|
||||
|
||||
impl Buffer {
|
||||
pub fn enclosing_bracket_point_ranges<T: ToOffset>(
|
||||
&self,
|
||||
range: Range<T>,
|
||||
) -> Option<(Range<Point>, Range<Point>)> {
|
||||
self.enclosing_bracket_ranges(range).map(|(start, end)| {
|
||||
let point_start = start.start.to_point(self)..start.end.to_point(self);
|
||||
let point_end = end.start.to_point(self)..end.end.to_point(self);
|
||||
(point_start, point_end)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
fn rust_lang() -> Arc<Language> {
|
||||
Arc::new(
|
||||
Language::new(
|
||||
LanguageConfig {
|
||||
name: "Rust".to_string(),
|
||||
path_suffixes: vec!["rs".to_string()],
|
||||
..Default::default()
|
||||
},
|
||||
tree_sitter_rust::language(),
|
||||
)
|
||||
.with_indents_query(
|
||||
r#"
|
||||
(call_expression) @indent
|
||||
(field_expression) @indent
|
||||
(_ "(" ")" @end) @indent
|
||||
(_ "{" "}" @end) @indent
|
||||
"#,
|
||||
)
|
||||
.unwrap()
|
||||
.with_brackets_query(r#" ("{" @open "}" @close) "#)
|
||||
.unwrap(),
|
||||
)
|
||||
}
|
||||
|
||||
fn empty(point: Point) -> Range<Point> {
|
||||
point..point
|
||||
}
|
@ -4,7 +4,7 @@ version = "0.1.0"
|
||||
edition = "2018"
|
||||
|
||||
[features]
|
||||
test-support = []
|
||||
test-support = ["language/test-support", "buffer/test-support"]
|
||||
|
||||
[dependencies]
|
||||
buffer = { path = "../buffer" }
|
||||
@ -13,6 +13,7 @@ clock = { path = "../clock" }
|
||||
fsevent = { path = "../fsevent" }
|
||||
fuzzy = { path = "../fuzzy" }
|
||||
gpui = { path = "../gpui" }
|
||||
language = { path = "../language" }
|
||||
lsp = { path = "../lsp" }
|
||||
rpc = { path = "../rpc" }
|
||||
sum_tree = { path = "../sum_tree" }
|
||||
@ -34,6 +35,7 @@ toml = "0.5"
|
||||
[dev-dependencies]
|
||||
client = { path = "../client", features = ["test-support"] }
|
||||
gpui = { path = "../gpui", features = ["test-support"] }
|
||||
language = { path = "../language", features = ["test-support"] }
|
||||
lsp = { path = "../lsp", features = ["test-support"] }
|
||||
util = { path = "../util", features = ["test-support"] }
|
||||
rpc = { path = "../rpc", features = ["test-support"] }
|
||||
|
@ -3,11 +3,11 @@ mod ignore;
|
||||
mod worktree;
|
||||
|
||||
use anyhow::Result;
|
||||
use buffer::LanguageRegistry;
|
||||
use client::Client;
|
||||
use futures::Future;
|
||||
use fuzzy::{PathMatch, PathMatchCandidate, PathMatchCandidateSet};
|
||||
use gpui::{AppContext, Entity, ModelContext, ModelHandle, Task};
|
||||
use language::LanguageRegistry;
|
||||
use std::{
|
||||
path::Path,
|
||||
sync::{atomic::AtomicBool, Arc},
|
||||
@ -302,9 +302,9 @@ impl Entity for Project {
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use buffer::LanguageRegistry;
|
||||
use fs::RealFs;
|
||||
use gpui::TestAppContext;
|
||||
use language::LanguageRegistry;
|
||||
use serde_json::json;
|
||||
use std::{os::unix, path::PathBuf};
|
||||
use util::test::temp_tree;
|
||||
|
@ -4,7 +4,6 @@ use super::{
|
||||
};
|
||||
use ::ignore::gitignore::{Gitignore, GitignoreBuilder};
|
||||
use anyhow::{anyhow, Result};
|
||||
use buffer::{Buffer, History, LanguageRegistry, Operation, Rope};
|
||||
use client::{proto, Client, PeerId, TypedEnvelope};
|
||||
use clock::ReplicaId;
|
||||
use futures::{Stream, StreamExt};
|
||||
@ -13,6 +12,7 @@ use gpui::{
|
||||
executor, AppContext, AsyncAppContext, Entity, ModelContext, ModelHandle, MutableAppContext,
|
||||
Task, UpgradeModelHandle, WeakModelHandle,
|
||||
};
|
||||
use language::{Buffer, History, LanguageRegistry, Operation, Rope};
|
||||
use lazy_static::lazy_static;
|
||||
use lsp::LanguageServer;
|
||||
use parking_lot::Mutex;
|
||||
@ -588,54 +588,40 @@ impl Worktree {
|
||||
}
|
||||
};
|
||||
|
||||
let worktree_handle = cx.handle();
|
||||
let mut buffers_to_delete = Vec::new();
|
||||
for (buffer_id, buffer) in open_buffers {
|
||||
if let Some(buffer) = buffer.upgrade(cx) {
|
||||
buffer.update(cx, |buffer, cx| {
|
||||
let buffer_is_clean = !buffer.is_dirty();
|
||||
|
||||
if let Some(file) = buffer.file_mut() {
|
||||
let mut file_changed = false;
|
||||
|
||||
if let Some(entry) = file
|
||||
if let Some(old_file) = buffer.file() {
|
||||
let new_file = if let Some(entry) = old_file
|
||||
.entry_id()
|
||||
.and_then(|entry_id| self.entry_for_id(entry_id))
|
||||
{
|
||||
if entry.path != *file.path() {
|
||||
file.set_path(entry.path.clone());
|
||||
file_changed = true;
|
||||
File {
|
||||
entry_id: Some(entry.id),
|
||||
mtime: entry.mtime,
|
||||
path: entry.path.clone(),
|
||||
worktree: worktree_handle.clone(),
|
||||
}
|
||||
} else if let Some(entry) = self.entry_for_path(old_file.path().as_ref()) {
|
||||
File {
|
||||
entry_id: Some(entry.id),
|
||||
mtime: entry.mtime,
|
||||
path: entry.path.clone(),
|
||||
worktree: worktree_handle.clone(),
|
||||
}
|
||||
} else {
|
||||
File {
|
||||
entry_id: None,
|
||||
path: old_file.path().clone(),
|
||||
mtime: old_file.mtime(),
|
||||
worktree: worktree_handle.clone(),
|
||||
}
|
||||
};
|
||||
|
||||
if entry.mtime != file.mtime() {
|
||||
file.set_mtime(entry.mtime);
|
||||
file_changed = true;
|
||||
if let Some(worktree) = self.as_local() {
|
||||
if buffer_is_clean {
|
||||
let abs_path = worktree.absolutize(file.path().as_ref());
|
||||
refresh_buffer(abs_path, &worktree.fs, cx);
|
||||
}
|
||||
}
|
||||
}
|
||||
} else if let Some(entry) = self.entry_for_path(file.path().as_ref()) {
|
||||
file.set_entry_id(Some(entry.id));
|
||||
file.set_mtime(entry.mtime);
|
||||
if let Some(worktree) = self.as_local() {
|
||||
if buffer_is_clean {
|
||||
let abs_path = worktree.absolutize(file.path().as_ref());
|
||||
refresh_buffer(abs_path, &worktree.fs, cx);
|
||||
}
|
||||
}
|
||||
file_changed = true;
|
||||
} else if !file.is_deleted() {
|
||||
if buffer_is_clean {
|
||||
cx.emit(buffer::Event::Dirtied);
|
||||
}
|
||||
file.set_entry_id(None);
|
||||
file_changed = true;
|
||||
}
|
||||
|
||||
if file_changed {
|
||||
cx.emit(buffer::Event::FileHandleChanged);
|
||||
if let Some(task) = buffer.file_updated(Box::new(new_file), cx) {
|
||||
task.detach();
|
||||
}
|
||||
}
|
||||
});
|
||||
@ -866,7 +852,7 @@ impl LocalWorktree {
|
||||
.update(&mut cx, |this, cx| this.as_local().unwrap().load(&path, cx))
|
||||
.await?;
|
||||
let language = this.read_with(&cx, |this, cx| {
|
||||
use buffer::File;
|
||||
use language::File;
|
||||
|
||||
this.languages()
|
||||
.select_language(file.full_path(cx))
|
||||
@ -913,7 +899,7 @@ impl LocalWorktree {
|
||||
.insert(buffer.id() as u64, buffer.clone());
|
||||
|
||||
Ok(proto::OpenBufferResponse {
|
||||
buffer: Some(buffer.update(cx.as_mut(), |buffer, cx| buffer.to_proto(cx))),
|
||||
buffer: Some(buffer.update(cx.as_mut(), |buffer, _| buffer.to_proto())),
|
||||
})
|
||||
})
|
||||
})
|
||||
@ -1187,24 +1173,6 @@ fn build_gitignore(abs_path: &Path, fs: &dyn Fs) -> Result<Gitignore> {
|
||||
Ok(builder.build()?)
|
||||
}
|
||||
|
||||
pub fn refresh_buffer(abs_path: PathBuf, fs: &Arc<dyn Fs>, cx: &mut ModelContext<Buffer>) {
|
||||
let fs = fs.clone();
|
||||
cx.spawn(|buffer, mut cx| async move {
|
||||
let new_text = fs.load(&abs_path).await;
|
||||
match new_text {
|
||||
Err(error) => log::error!("error refreshing buffer after file changed: {}", error),
|
||||
Ok(new_text) => {
|
||||
buffer
|
||||
.update(&mut cx, |buffer, cx| {
|
||||
buffer.set_text_from_disk(new_text.into(), cx)
|
||||
})
|
||||
.await;
|
||||
}
|
||||
}
|
||||
})
|
||||
.detach()
|
||||
}
|
||||
|
||||
impl Deref for LocalWorktree {
|
||||
type Target = Snapshot;
|
||||
|
||||
@ -1283,7 +1251,7 @@ impl RemoteWorktree {
|
||||
.ok_or_else(|| anyhow!("worktree was closed"))?;
|
||||
let file = File::new(entry.id, this.clone(), entry.path, entry.mtime);
|
||||
let language = this.read_with(&cx, |this, cx| {
|
||||
use buffer::File;
|
||||
use language::File;
|
||||
|
||||
this.languages()
|
||||
.select_language(file.full_path(cx))
|
||||
@ -1794,7 +1762,7 @@ impl File {
|
||||
}
|
||||
}
|
||||
|
||||
impl buffer::File for File {
|
||||
impl language::File for File {
|
||||
fn worktree_id(&self) -> usize {
|
||||
self.worktree.id()
|
||||
}
|
||||
@ -1803,26 +1771,14 @@ impl buffer::File for File {
|
||||
self.entry_id
|
||||
}
|
||||
|
||||
fn set_entry_id(&mut self, entry_id: Option<usize>) {
|
||||
self.entry_id = entry_id;
|
||||
}
|
||||
|
||||
fn mtime(&self) -> SystemTime {
|
||||
self.mtime
|
||||
}
|
||||
|
||||
fn set_mtime(&mut self, mtime: SystemTime) {
|
||||
self.mtime = mtime;
|
||||
}
|
||||
|
||||
fn path(&self) -> &Arc<Path> {
|
||||
&self.path
|
||||
}
|
||||
|
||||
fn set_path(&mut self, path: Arc<Path>) {
|
||||
self.path = path;
|
||||
}
|
||||
|
||||
fn full_path(&self, cx: &AppContext) -> PathBuf {
|
||||
let worktree = self.worktree.read(cx);
|
||||
let mut full_path = PathBuf::new();
|
||||
@ -1891,6 +1847,16 @@ impl buffer::File for File {
|
||||
})
|
||||
}
|
||||
|
||||
fn load_local(&self, cx: &AppContext) -> Option<Task<Result<String>>> {
|
||||
let worktree = self.worktree.read(cx).as_local()?;
|
||||
let abs_path = worktree.absolutize(&self.path);
|
||||
let fs = worktree.fs.clone();
|
||||
Some(
|
||||
cx.background()
|
||||
.spawn(async move { fs.load(&abs_path).await }),
|
||||
)
|
||||
}
|
||||
|
||||
fn buffer_updated(&self, buffer_id: u64, operation: Operation, cx: &mut MutableAppContext) {
|
||||
self.worktree.update(cx, |worktree, cx| {
|
||||
if let Some((rpc, remote_id)) = match worktree {
|
||||
@ -1946,7 +1912,7 @@ impl buffer::File for File {
|
||||
});
|
||||
}
|
||||
|
||||
fn boxed_clone(&self) -> Box<dyn buffer::File> {
|
||||
fn boxed_clone(&self) -> Box<dyn language::File> {
|
||||
Box::new(self.clone())
|
||||
}
|
||||
|
||||
@ -3272,7 +3238,7 @@ mod tests {
|
||||
assert!(buffer.is_dirty());
|
||||
assert_eq!(
|
||||
*events.borrow(),
|
||||
&[buffer::Event::Edited, buffer::Event::Dirtied]
|
||||
&[language::Event::Edited, language::Event::Dirtied]
|
||||
);
|
||||
events.borrow_mut().clear();
|
||||
buffer.did_save(buffer.version(), buffer.file().unwrap().mtime(), None, cx);
|
||||
@ -3281,7 +3247,7 @@ mod tests {
|
||||
// after saving, the buffer is not dirty, and emits a saved event.
|
||||
buffer1.update(&mut cx, |buffer, cx| {
|
||||
assert!(!buffer.is_dirty());
|
||||
assert_eq!(*events.borrow(), &[buffer::Event::Saved]);
|
||||
assert_eq!(*events.borrow(), &[language::Event::Saved]);
|
||||
events.borrow_mut().clear();
|
||||
|
||||
buffer.edit(vec![1..1], "B", cx);
|
||||
@ -3295,9 +3261,9 @@ mod tests {
|
||||
assert_eq!(
|
||||
*events.borrow(),
|
||||
&[
|
||||
buffer::Event::Edited,
|
||||
buffer::Event::Dirtied,
|
||||
buffer::Event::Edited
|
||||
language::Event::Edited,
|
||||
language::Event::Dirtied,
|
||||
language::Event::Edited
|
||||
],
|
||||
);
|
||||
events.borrow_mut().clear();
|
||||
@ -3309,7 +3275,7 @@ mod tests {
|
||||
assert!(buffer.is_dirty());
|
||||
});
|
||||
|
||||
assert_eq!(*events.borrow(), &[buffer::Event::Edited]);
|
||||
assert_eq!(*events.borrow(), &[language::Event::Edited]);
|
||||
|
||||
// When a file is deleted, the buffer is considered dirty.
|
||||
let events = Rc::new(RefCell::new(Vec::new()));
|
||||
@ -3329,7 +3295,7 @@ mod tests {
|
||||
buffer2.condition(&cx, |b, _| b.is_dirty()).await;
|
||||
assert_eq!(
|
||||
*events.borrow(),
|
||||
&[buffer::Event::Dirtied, buffer::Event::FileHandleChanged]
|
||||
&[language::Event::Dirtied, language::Event::FileHandleChanged]
|
||||
);
|
||||
|
||||
// When a file is already dirty when deleted, we don't emit a Dirtied event.
|
||||
@ -3355,7 +3321,7 @@ mod tests {
|
||||
buffer3
|
||||
.condition(&cx, |_, _| !events.borrow().is_empty())
|
||||
.await;
|
||||
assert_eq!(*events.borrow(), &[buffer::Event::FileHandleChanged]);
|
||||
assert_eq!(*events.borrow(), &[language::Event::FileHandleChanged]);
|
||||
cx.read(|cx| assert!(buffer3.read(cx).is_dirty()));
|
||||
}
|
||||
|
||||
@ -3446,12 +3412,13 @@ mod tests {
|
||||
buffer.update(&mut cx, |buffer, cx| {
|
||||
buffer.edit(vec![0..0], " ", cx);
|
||||
assert!(buffer.is_dirty());
|
||||
assert!(!buffer.has_conflict());
|
||||
});
|
||||
|
||||
// Change the file on disk again, adding blank lines to the beginning.
|
||||
fs::write(&abs_path, "\n\n\nAAAA\naaa\nBB\nbbbbb\n").unwrap();
|
||||
|
||||
// Becaues the buffer is modified, it doesn't reload from disk, but is
|
||||
// Because the buffer is modified, it doesn't reload from disk, but is
|
||||
// marked as having a conflict.
|
||||
buffer
|
||||
.condition(&cx, |buffer, _| buffer.has_conflict())
|
||||
|
@ -976,13 +976,13 @@ mod tests {
|
||||
time::Duration,
|
||||
};
|
||||
use zed::{
|
||||
buffer::LanguageRegistry,
|
||||
client::{
|
||||
self, test::FakeHttpClient, Channel, ChannelDetails, ChannelList, Client, Credentials,
|
||||
EstablishConnectionError, UserStore,
|
||||
},
|
||||
editor::{Editor, EditorSettings, Input},
|
||||
fs::{FakeFs, Fs as _},
|
||||
language::LanguageRegistry,
|
||||
people_panel::JoinWorktree,
|
||||
project::{ProjectPath, Worktree},
|
||||
workspace::{Workspace, WorkspaceParams},
|
||||
|
@ -8,7 +8,7 @@ test-support = [
|
||||
"client/test-support",
|
||||
"project/test-support",
|
||||
"tree-sitter",
|
||||
"tree-sitter-rust"
|
||||
"tree-sitter-rust",
|
||||
]
|
||||
|
||||
[dependencies]
|
||||
@ -16,6 +16,7 @@ buffer = { path = "../buffer" }
|
||||
client = { path = "../client" }
|
||||
editor = { path = "../editor" }
|
||||
gpui = { path = "../gpui" }
|
||||
language = { path = "../language" }
|
||||
project = { path = "../project" }
|
||||
theme = { path = "../theme" }
|
||||
anyhow = "1.0.38"
|
||||
|
@ -1,9 +1,9 @@
|
||||
use super::{Item, ItemView};
|
||||
use crate::Settings;
|
||||
use anyhow::Result;
|
||||
use buffer::{Buffer, File as _};
|
||||
use editor::{Editor, EditorSettings, Event};
|
||||
use gpui::{fonts::TextStyle, AppContext, ModelHandle, Task, ViewContext};
|
||||
use language::{Buffer, File as _};
|
||||
use postage::watch;
|
||||
use project::{ProjectPath, Worktree};
|
||||
use std::path::Path;
|
||||
|
@ -5,7 +5,7 @@ pub mod settings;
|
||||
pub mod sidebar;
|
||||
|
||||
use anyhow::Result;
|
||||
use buffer::{Buffer, LanguageRegistry};
|
||||
use language::{Buffer, LanguageRegistry};
|
||||
use client::{Authenticate, ChannelList, Client, UserStore};
|
||||
use gpui::{
|
||||
action, elements::*, json::to_string_pretty, keymap::Binding, platform::CursorStyle,
|
||||
@ -271,8 +271,8 @@ impl WorkspaceParams {
|
||||
#[cfg(any(test, feature = "test-support"))]
|
||||
pub fn test(cx: &mut MutableAppContext) -> Self {
|
||||
let mut languages = LanguageRegistry::new();
|
||||
languages.add(Arc::new(buffer::Language::new(
|
||||
buffer::LanguageConfig {
|
||||
languages.add(Arc::new(language::Language::new(
|
||||
language::LanguageConfig {
|
||||
name: "Rust".to_string(),
|
||||
path_suffixes: vec!["rs".to_string()],
|
||||
..Default::default()
|
||||
|
@ -18,6 +18,7 @@ test-support = [
|
||||
"buffer/test-support",
|
||||
"client/test-support",
|
||||
"gpui/test-support",
|
||||
"language/test-support",
|
||||
"project/test-support",
|
||||
"rpc/test-support",
|
||||
"tempdir",
|
||||
@ -33,6 +34,7 @@ fuzzy = { path = "../fuzzy" }
|
||||
editor = { path = "../editor" }
|
||||
file_finder = { path = "../file_finder" }
|
||||
gpui = { path = "../gpui" }
|
||||
language = { path = "../language" }
|
||||
people_panel = { path = "../people_panel" }
|
||||
project = { path = "../project" }
|
||||
project_panel = { path = "../project_panel" }
|
||||
@ -85,6 +87,7 @@ url = "2.2"
|
||||
buffer = { path = "../buffer", features = ["test-support"] }
|
||||
editor = { path = "../editor", features = ["test-support"] }
|
||||
gpui = { path = "../gpui", features = ["test-support"] }
|
||||
language = { path = "../language", features = ["test-support"] }
|
||||
project = { path = "../project", features = ["test-support"] }
|
||||
rpc = { path = "../rpc", features = ["test-support"] }
|
||||
client = { path = "../client", features = ["test-support"] }
|
||||
|
@ -20,13 +20,13 @@ extends = "_base"
|
||||
[selection]
|
||||
host = { selection = "#3B57BC33", cursor = "$text.0.color" }
|
||||
guests = [
|
||||
{ selection = "#FDF35133", cursor = "#FDF351" },
|
||||
{ selection = "#4EACAD33", cursor = "#4EACAD" },
|
||||
{ selection = "#D0453B33", cursor = "#D0453B" },
|
||||
{ selection = "#3B874B33", cursor = "#3B874B" },
|
||||
{ selection = "#BD7CB433", cursor = "#BD7CB4" },
|
||||
{ selection = "#EE823133", cursor = "#EE8231" },
|
||||
{ selection = "#5A2B9233", cursor = "#5A2B92" },
|
||||
{ selection = "#FDF35133", cursor = "#FDF351" },
|
||||
{ selection = "#4EACAD33", cursor = "#4EACAD" }
|
||||
]
|
||||
|
||||
[status]
|
||||
|
@ -1,9 +1,10 @@
|
||||
name = "Rust"
|
||||
path_suffixes = ["rs"]
|
||||
autoclose_pairs = [
|
||||
{ start = "{", end = "}" },
|
||||
{ start = "[", end = "]" },
|
||||
{ start = "(", end = ")" },
|
||||
{ start = "\"", end = "\"" },
|
||||
{ start = "/*", end = " */" },
|
||||
brackets = [
|
||||
{ start = "{", end = "}", close = true, newline = true },
|
||||
{ start = "[", end = "]", close = true, newline = true },
|
||||
{ start = "(", end = ")", close = true, newline = true },
|
||||
{ start = "<", end = ">", close = false, newline = true },
|
||||
{ start = "\"", end = "\"", close = true, newline = false },
|
||||
{ start = "/*", end = " */", close = true, newline = false },
|
||||
]
|
||||
|
12
crates/zed/languages/rust/indents.scm
Normal file
12
crates/zed/languages/rust/indents.scm
Normal file
@ -0,0 +1,12 @@
|
||||
[
|
||||
((where_clause) _ @end)
|
||||
(field_expression)
|
||||
(call_expression)
|
||||
(assignment_expression)
|
||||
(let_declaration)
|
||||
] @indent
|
||||
|
||||
(_ "[" "]" @end) @indent
|
||||
(_ "<" ">" @end) @indent
|
||||
(_ "{" "}" @end) @indent
|
||||
(_ "(" ")" @end) @indent
|
@ -1,4 +1,4 @@
|
||||
use buffer::{Language, LanguageRegistry};
|
||||
pub use language::{Language, LanguageRegistry};
|
||||
use rust_embed::RustEmbed;
|
||||
use std::borrow::Cow;
|
||||
use std::{str, sync::Arc};
|
||||
@ -22,6 +22,8 @@ fn rust() -> Language {
|
||||
.unwrap()
|
||||
.with_brackets_query(load_query("rust/brackets.scm").as_ref())
|
||||
.unwrap()
|
||||
.with_indents_query(load_query("rust/indents.scm").as_ref())
|
||||
.unwrap()
|
||||
}
|
||||
|
||||
fn load_query(path: &str) -> Cow<'static, str> {
|
||||
|
@ -4,8 +4,7 @@ pub mod menus;
|
||||
#[cfg(any(test, feature = "test-support"))]
|
||||
pub mod test;
|
||||
|
||||
pub use buffer;
|
||||
use buffer::LanguageRegistry;
|
||||
use self::language::LanguageRegistry;
|
||||
use chat_panel::ChatPanel;
|
||||
pub use client;
|
||||
pub use editor;
|
||||
|
@ -1,7 +1,7 @@
|
||||
use crate::{assets::Assets, AppState};
|
||||
use buffer::LanguageRegistry;
|
||||
use client::{http::ServerResponse, test::FakeHttpClient, ChannelList, Client, UserStore};
|
||||
use gpui::{AssetSource, MutableAppContext};
|
||||
use language::LanguageRegistry;
|
||||
use parking_lot::Mutex;
|
||||
use postage::watch;
|
||||
use project::fs::FakeFs;
|
||||
|
Loading…
Reference in New Issue
Block a user