Merge pull request #1115 from zed-industries/offset-out-of-range

Fix `offset out of range` panic during `FoldMap::sync`
This commit is contained in:
Max Brunsfeld 2022-06-06 10:32:18 -07:00 committed by GitHub
commit 075dd83a43
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
5 changed files with 180 additions and 27 deletions

View File

@ -693,7 +693,7 @@ pub mod tests {
}
}
_ => {
buffer.update(cx, |buffer, cx| buffer.randomly_edit(&mut rng, 5, cx));
buffer.update(cx, |buffer, cx| buffer.randomly_mutate(&mut rng, 5, cx));
}
}

View File

@ -17,6 +17,7 @@ use std::{
cell::{Ref, RefCell},
cmp, fmt, io,
iter::{self, FromIterator},
mem,
ops::{Range, RangeBounds, Sub},
str,
sync::Arc,
@ -298,7 +299,7 @@ impl MultiBuffer {
pub fn edit_internal<I, S, T>(
&mut self,
edits_iter: I,
edits: I,
autoindent: bool,
cx: &mut ModelContext<Self>,
) where
@ -310,14 +311,16 @@ impl MultiBuffer {
return;
}
let snapshot = self.read(cx);
let edits = edits.into_iter().map(|(range, new_text)| {
let mut range = range.start.to_offset(&snapshot)..range.end.to_offset(&snapshot);
if range.start > range.end {
mem::swap(&mut range.start, &mut range.end);
}
(range, new_text)
});
if let Some(buffer) = self.as_singleton() {
let snapshot = self.read(cx);
let edits = edits_iter.into_iter().map(|(range, new_text)| {
(
range.start.to_offset(&snapshot)..range.end.to_offset(&snapshot),
new_text,
)
});
return buffer.update(cx, |buffer, cx| {
let language_name = buffer.language().map(|language| language.name());
let indent_size = cx.global::<Settings>().tab_size(language_name.as_deref());
@ -329,29 +332,26 @@ impl MultiBuffer {
});
}
let snapshot = self.read(cx);
let mut buffer_edits: HashMap<usize, Vec<(Range<usize>, Arc<str>, bool)>> =
Default::default();
let mut cursor = snapshot.excerpts.cursor::<usize>();
for (range, new_text) in edits_iter {
for (range, new_text) in edits {
let new_text: Arc<str> = new_text.into();
let start = range.start.to_offset(&snapshot);
let end = range.end.to_offset(&snapshot);
cursor.seek(&start, Bias::Right, &());
if cursor.item().is_none() && start == *cursor.start() {
cursor.seek(&range.start, Bias::Right, &());
if cursor.item().is_none() && range.start == *cursor.start() {
cursor.prev(&());
}
let start_excerpt = cursor.item().expect("start offset out of bounds");
let start_overshoot = start - cursor.start();
let start_overshoot = range.start - cursor.start();
let buffer_start =
start_excerpt.range.start.to_offset(&start_excerpt.buffer) + start_overshoot;
cursor.seek(&end, Bias::Right, &());
if cursor.item().is_none() && end == *cursor.start() {
cursor.seek(&range.end, Bias::Right, &());
if cursor.item().is_none() && range.end == *cursor.start() {
cursor.prev(&());
}
let end_excerpt = cursor.item().expect("end offset out of bounds");
let end_overshoot = end - cursor.start();
let end_overshoot = range.end - cursor.start();
let buffer_end = end_excerpt.range.start.to_offset(&end_excerpt.buffer) + end_overshoot;
if start_excerpt.id == end_excerpt.id {
@ -373,7 +373,7 @@ impl MultiBuffer {
.or_insert(Vec::new())
.push((end_excerpt_range, new_text.clone(), false));
cursor.seek(&start, Bias::Right, &());
cursor.seek(&range.start, Bias::Right, &());
cursor.next(&());
while let Some(excerpt) = cursor.item() {
if excerpt.id == end_excerpt.id {
@ -1310,7 +1310,11 @@ impl MultiBuffer {
let end = snapshot.clip_offset(rng.gen_range(new_start..=snapshot.len()), Bias::Right);
let start = snapshot.clip_offset(rng.gen_range(new_start..=end), Bias::Right);
last_end = Some(end);
let range = start..end;
let mut range = start..end;
if rng.gen_bool(0.2) {
mem::swap(&mut range.start, &mut range.end);
}
let new_text_len = rng.gen_range(0..10);
let new_text: String = RandomCharIter::new(&mut *rng).take(new_text_len).collect();
@ -1414,8 +1418,27 @@ impl MultiBuffer {
mutation_count: usize,
cx: &mut ModelContext<Self>,
) {
use rand::prelude::*;
if rng.gen_bool(0.7) || self.singleton {
self.randomly_edit(rng, mutation_count, cx);
let buffer = self
.buffers
.borrow()
.values()
.choose(rng)
.map(|state| state.buffer.clone());
if rng.gen() && buffer.is_some() {
buffer.unwrap().update(cx, |buffer, cx| {
if rng.gen() {
buffer.randomly_edit(rng, mutation_count, cx);
} else {
buffer.randomly_undo_redo(rng, cx);
}
});
} else {
self.randomly_edit(rng, mutation_count, cx);
}
} else {
self.randomly_edit_excerpts(rng, mutation_count, cx);
}
@ -3330,7 +3353,7 @@ mod tests {
});
buffer_2.update(cx, |buffer, cx| {
buffer.edit([(0..0, "Y")], cx);
buffer.edit([(6..0, "Z")], cx);
buffer.edit([(6..6, "Z")], cx);
});
let new_snapshot = multibuffer.read(cx).snapshot(cx);

View File

@ -23,6 +23,7 @@ use std::{
ffi::OsString,
future::Future,
iter::{Iterator, Peekable},
mem,
ops::{Deref, DerefMut, Range},
path::{Path, PathBuf},
str,
@ -1108,7 +1109,10 @@ impl Buffer {
// Skip invalid edits and coalesce contiguous ones.
let mut edits: Vec<(Range<usize>, Arc<str>)> = Vec::new();
for (range, new_text) in edits_iter {
let range = range.start.to_offset(self)..range.end.to_offset(self);
let mut range = range.start.to_offset(self)..range.end.to_offset(self);
if range.start > range.end {
mem::swap(&mut range.start, &mut range.end);
}
let new_text = new_text.into();
if !new_text.is_empty() || !range.is_empty() {
if let Some((prev_range, prev_text)) = edits.last_mut() {
@ -1452,7 +1456,10 @@ impl Buffer {
}
let new_start = last_end.map_or(0, |last_end| last_end + 1);
let range = self.random_byte_range(new_start, rng);
let mut range = self.random_byte_range(new_start, rng);
if rng.gen_bool(0.2) {
mem::swap(&mut range.start, &mut range.end);
}
last_end = Some(range.end);
let new_text_len = rng.gen_range(0..10);

View File

@ -22,6 +22,7 @@ use serde_json::Value;
use std::{
any::Any,
cell::RefCell,
mem,
ops::Range,
path::{Path, PathBuf},
str,
@ -692,5 +693,10 @@ pub fn range_to_lsp(range: Range<PointUtf16>) -> lsp::Range {
}
pub fn range_from_lsp(range: lsp::Range) -> Range<PointUtf16> {
point_from_lsp(range.start)..point_from_lsp(range.end)
let mut start = point_from_lsp(range.start);
let mut end = point_from_lsp(range.end);
if start > end {
mem::swap(&mut start, &mut end);
}
start..end
}

View File

@ -5164,8 +5164,10 @@ impl Project {
let mut lsp_edits = lsp_edits
.into_iter()
.map(|edit| (range_from_lsp(edit.range), edit.new_text))
.peekable();
.collect::<Vec<_>>();
lsp_edits.sort_by_key(|(range, _)| range.start);
let mut lsp_edits = lsp_edits.into_iter().peekable();
let mut edits = Vec::new();
while let Some((mut range, mut new_text)) = lsp_edits.next() {
// Combine any LSP edits that are adjacent.
@ -6959,6 +6961,121 @@ mod tests {
});
}
#[gpui::test]
async fn test_invalid_edits_from_lsp(cx: &mut gpui::TestAppContext) {
cx.foreground().forbid_parking();
let text = "
use a::b;
use a::c;
fn f() {
b();
c();
}
"
.unindent();
let fs = FakeFs::new(cx.background());
fs.insert_tree(
"/dir",
json!({
"a.rs": text.clone(),
}),
)
.await;
let project = Project::test(fs, ["/dir".as_ref()], cx).await;
let buffer = project
.update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
.await
.unwrap();
// Simulate the language server sending us edits in a non-ordered fashion,
// with ranges sometimes being inverted.
let edits = project
.update(cx, |project, cx| {
project.edits_from_lsp(
&buffer,
[
lsp::TextEdit {
range: lsp::Range::new(
lsp::Position::new(0, 9),
lsp::Position::new(0, 9),
),
new_text: "\n\n".into(),
},
lsp::TextEdit {
range: lsp::Range::new(
lsp::Position::new(0, 8),
lsp::Position::new(0, 4),
),
new_text: "a::{b, c}".into(),
},
lsp::TextEdit {
range: lsp::Range::new(
lsp::Position::new(1, 0),
lsp::Position::new(7, 0),
),
new_text: "".into(),
},
lsp::TextEdit {
range: lsp::Range::new(
lsp::Position::new(0, 9),
lsp::Position::new(0, 9),
),
new_text: "
fn f() {
b();
c();
}"
.unindent(),
},
],
None,
cx,
)
})
.await
.unwrap();
buffer.update(cx, |buffer, cx| {
let edits = edits
.into_iter()
.map(|(range, text)| {
(
range.start.to_point(&buffer)..range.end.to_point(&buffer),
text,
)
})
.collect::<Vec<_>>();
assert_eq!(
edits,
[
(Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
(Point::new(1, 0)..Point::new(2, 0), "".into())
]
);
for (range, new_text) in edits {
buffer.edit([(range, new_text)], cx);
}
assert_eq!(
buffer.text(),
"
use a::{b, c};
fn f() {
b();
c();
}
"
.unindent()
);
});
}
fn chunks_with_diagnostics<T: ToOffset + ToPoint>(
buffer: &Buffer,
range: Range<T>,