mirror of
https://github.com/zed-industries/zed.git
synced 2024-12-28 19:25:33 +03:00
Remove anchor collections
Co-Authored-By: Nathan Sobo <nathan@zed.dev>
This commit is contained in:
parent
67686dd1c2
commit
65711b2256
1
Cargo.lock
generated
1
Cargo.lock
generated
@ -2587,6 +2587,7 @@ dependencies = [
|
||||
"serde",
|
||||
"similar",
|
||||
"smol",
|
||||
"sum_tree",
|
||||
"text",
|
||||
"theme",
|
||||
"tree-sitter",
|
||||
|
@ -398,7 +398,7 @@ struct SelectNextState {
|
||||
|
||||
#[derive(Debug)]
|
||||
struct BracketPairState {
|
||||
ranges: AnchorRangeSet,
|
||||
ranges: Vec<Range<Anchor>>,
|
||||
pair: BracketPair,
|
||||
}
|
||||
|
||||
@ -1285,7 +1285,7 @@ impl Editor {
|
||||
|
||||
fn autoclose_pairs(&mut self, cx: &mut ViewContext<Self>) {
|
||||
let selections = self.selections::<usize>(cx).collect::<Vec<_>>();
|
||||
let new_autoclose_pair_state = self.buffer.update(cx, |buffer, cx| {
|
||||
let new_autoclose_pair = self.buffer.update(cx, |buffer, cx| {
|
||||
let autoclose_pair = buffer.language().and_then(|language| {
|
||||
let first_selection_start = selections.first().unwrap().start;
|
||||
let pair = language.brackets().iter().find(|pair| {
|
||||
@ -1324,15 +1324,14 @@ impl Editor {
|
||||
if pair.end.len() == 1 {
|
||||
let mut delta = 0;
|
||||
Some(BracketPairState {
|
||||
ranges: buffer.anchor_range_set(
|
||||
Bias::Left,
|
||||
Bias::Right,
|
||||
selections.iter().map(move |selection| {
|
||||
ranges: selections
|
||||
.iter()
|
||||
.map(move |selection| {
|
||||
let offset = selection.start + delta;
|
||||
delta += 1;
|
||||
offset..offset
|
||||
}),
|
||||
),
|
||||
buffer.anchor_before(offset)..buffer.anchor_after(offset)
|
||||
})
|
||||
.collect(),
|
||||
pair,
|
||||
})
|
||||
} else {
|
||||
@ -1340,26 +1339,26 @@ impl Editor {
|
||||
}
|
||||
})
|
||||
});
|
||||
self.autoclose_stack.extend(new_autoclose_pair_state);
|
||||
self.autoclose_stack.extend(new_autoclose_pair);
|
||||
}
|
||||
|
||||
fn skip_autoclose_end(&mut self, text: &str, cx: &mut ViewContext<Self>) -> bool {
|
||||
let old_selections = self.selections::<usize>(cx).collect::<Vec<_>>();
|
||||
let autoclose_pair_state = if let Some(autoclose_pair_state) = self.autoclose_stack.last() {
|
||||
autoclose_pair_state
|
||||
let autoclose_pair = if let Some(autoclose_pair) = self.autoclose_stack.last() {
|
||||
autoclose_pair
|
||||
} else {
|
||||
return false;
|
||||
};
|
||||
if text != autoclose_pair_state.pair.end {
|
||||
if text != autoclose_pair.pair.end {
|
||||
return false;
|
||||
}
|
||||
|
||||
debug_assert_eq!(old_selections.len(), autoclose_pair_state.ranges.len());
|
||||
debug_assert_eq!(old_selections.len(), autoclose_pair.ranges.len());
|
||||
|
||||
let buffer = self.buffer.read(cx);
|
||||
if old_selections
|
||||
.iter()
|
||||
.zip(autoclose_pair_state.ranges.ranges::<usize>(buffer))
|
||||
.zip(autoclose_pair.ranges.iter().map(|r| r.to_offset(buffer)))
|
||||
.all(|(selection, autoclose_range)| {
|
||||
let autoclose_range_end = autoclose_range.end.to_offset(buffer);
|
||||
selection.is_empty() && selection.start == autoclose_range_end
|
||||
@ -2826,13 +2825,14 @@ impl Editor {
|
||||
|
||||
loop {
|
||||
let next_group = buffer
|
||||
.diagnostics_in_range::<_, usize>(search_start..buffer.len())
|
||||
.find_map(|(range, diagnostic)| {
|
||||
if diagnostic.is_primary
|
||||
.diagnostics_in_range(search_start..buffer.len())
|
||||
.find_map(|entry| {
|
||||
let range = entry.range.to_offset(buffer);
|
||||
if entry.diagnostic.is_primary
|
||||
&& !range.is_empty()
|
||||
&& Some(range.end) != active_primary_range.as_ref().map(|r| *r.end())
|
||||
{
|
||||
Some((range, diagnostic.group_id))
|
||||
Some((range, entry.diagnostic.group_id))
|
||||
} else {
|
||||
None
|
||||
}
|
||||
@ -2866,12 +2866,13 @@ impl Editor {
|
||||
let buffer = self.buffer.read(cx);
|
||||
let primary_range_start = active_diagnostics.primary_range.start.to_offset(buffer);
|
||||
let is_valid = buffer
|
||||
.diagnostics_in_range::<_, usize>(active_diagnostics.primary_range.clone())
|
||||
.any(|(range, diagnostic)| {
|
||||
diagnostic.is_primary
|
||||
.diagnostics_in_range(active_diagnostics.primary_range.clone())
|
||||
.any(|entry| {
|
||||
let range = entry.range.to_offset(buffer);
|
||||
entry.diagnostic.is_primary
|
||||
&& !range.is_empty()
|
||||
&& range.start == primary_range_start
|
||||
&& diagnostic.message == active_diagnostics.primary_message
|
||||
&& entry.diagnostic.message == active_diagnostics.primary_message
|
||||
});
|
||||
|
||||
if is_valid != active_diagnostics.is_valid {
|
||||
@ -2901,16 +2902,17 @@ impl Editor {
|
||||
let mut primary_message = None;
|
||||
let mut group_end = Point::zero();
|
||||
let diagnostic_group = buffer
|
||||
.diagnostic_group::<Point>(group_id)
|
||||
.map(|(range, diagnostic)| {
|
||||
.diagnostic_group(group_id)
|
||||
.map(|entry| {
|
||||
let range = entry.range.to_point(buffer);
|
||||
if range.end > group_end {
|
||||
group_end = range.end;
|
||||
}
|
||||
if diagnostic.is_primary {
|
||||
if entry.diagnostic.is_primary {
|
||||
primary_range = Some(range.clone());
|
||||
primary_message = Some(diagnostic.message.clone());
|
||||
primary_message = Some(entry.diagnostic.message.clone());
|
||||
}
|
||||
(range, diagnostic.clone())
|
||||
(range, entry.diagnostic.clone())
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
let primary_range = primary_range.unwrap();
|
||||
@ -3165,12 +3167,12 @@ impl Editor {
|
||||
self.add_selections_state = None;
|
||||
self.select_next_state = None;
|
||||
self.select_larger_syntax_node_stack.clear();
|
||||
while let Some(autoclose_pair_state) = self.autoclose_stack.last() {
|
||||
while let Some(autoclose_pair) = self.autoclose_stack.last() {
|
||||
let all_selections_inside_autoclose_ranges =
|
||||
if selections.len() == autoclose_pair_state.ranges.len() {
|
||||
if selections.len() == autoclose_pair.ranges.len() {
|
||||
selections
|
||||
.iter()
|
||||
.zip(autoclose_pair_state.ranges.ranges::<Point>(buffer))
|
||||
.zip(autoclose_pair.ranges.iter().map(|r| r.to_point(buffer)))
|
||||
.all(|(selection, autoclose_range)| {
|
||||
let head = selection.head().to_point(&*buffer);
|
||||
autoclose_range.start <= head && autoclose_range.end >= head
|
||||
|
@ -5,7 +5,7 @@ use gpui::{
|
||||
MutableAppContext, RenderContext, Subscription, Task, View, ViewContext, ViewHandle,
|
||||
WeakModelHandle,
|
||||
};
|
||||
use language::{Buffer, Diagnostic, File as _};
|
||||
use language::{AnchorRangeExt, Buffer, Diagnostic, File as _};
|
||||
use postage::watch;
|
||||
use project::{ProjectPath, Worktree};
|
||||
use std::fmt::Write;
|
||||
@ -314,11 +314,11 @@ impl DiagnosticMessage {
|
||||
|
||||
fn update(&mut self, editor: ViewHandle<Editor>, cx: &mut ViewContext<Self>) {
|
||||
let editor = editor.read(cx);
|
||||
let cursor_position = editor.newest_selection(cx).head();
|
||||
let new_diagnostic = editor
|
||||
.buffer()
|
||||
.read(cx)
|
||||
.diagnostics_in_range::<usize, usize>(cursor_position..cursor_position)
|
||||
let cursor_position = editor.newest_selection::<usize>(cx).head();
|
||||
let buffer = editor.buffer().read(cx);
|
||||
let new_diagnostic = buffer
|
||||
.diagnostics_in_range(cursor_position..cursor_position)
|
||||
.map(|entry| (entry.range.to_offset(buffer), &entry.diagnostic))
|
||||
.filter(|(range, _)| !range.is_empty())
|
||||
.min_by_key(|(range, diagnostic)| (diagnostic.severity, range.len()))
|
||||
.map(|(_, diagnostic)| diagnostic.clone());
|
||||
|
@ -1,7 +1,7 @@
|
||||
[package]
|
||||
name = "language"
|
||||
version = "0.1.0"
|
||||
edition = "2018"
|
||||
edition = "2021"
|
||||
|
||||
[lib]
|
||||
path = "src/language.rs"
|
||||
@ -15,11 +15,12 @@ test-support = [
|
||||
]
|
||||
|
||||
[dependencies]
|
||||
text = { path = "../text" }
|
||||
clock = { path = "../clock" }
|
||||
gpui = { path = "../gpui" }
|
||||
lsp = { path = "../lsp" }
|
||||
rpc = { path = "../rpc" }
|
||||
sum_tree = { path = "../sum_tree" }
|
||||
text = { path = "../text" }
|
||||
theme = { path = "../theme" }
|
||||
util = { path = "../util" }
|
||||
anyhow = "1.0.38"
|
||||
|
@ -1,4 +1,6 @@
|
||||
use crate::diagnostic_set::DiagnosticEntry;
|
||||
pub use crate::{
|
||||
diagnostic_set::DiagnosticSet,
|
||||
highlight_map::{HighlightId, HighlightMap},
|
||||
proto, BracketPair, Grammar, Language, LanguageConfig, LanguageRegistry, LanguageServerConfig,
|
||||
PLAIN_TEXT,
|
||||
@ -28,6 +30,7 @@ use std::{
|
||||
time::{Duration, Instant, SystemTime, UNIX_EPOCH},
|
||||
vec,
|
||||
};
|
||||
use text::operation_queue::OperationQueue;
|
||||
pub use text::{Buffer as TextBuffer, Operation as _, *};
|
||||
use theme::SyntaxTheme;
|
||||
use tree_sitter::{InputEdit, Parser, QueryCursor, Tree};
|
||||
@ -61,9 +64,10 @@ pub struct Buffer {
|
||||
syntax_tree: Mutex<Option<SyntaxTree>>,
|
||||
parsing_in_background: bool,
|
||||
parse_count: usize,
|
||||
diagnostics: AnchorRangeMultimap<Diagnostic>,
|
||||
diagnostics: DiagnosticSet,
|
||||
diagnostics_update_count: usize,
|
||||
language_server: Option<LanguageServerState>,
|
||||
deferred_ops: OperationQueue<Operation>,
|
||||
#[cfg(test)]
|
||||
pub(crate) operations: Vec<Operation>,
|
||||
}
|
||||
@ -71,7 +75,7 @@ pub struct Buffer {
|
||||
pub struct Snapshot {
|
||||
text: text::Snapshot,
|
||||
tree: Option<Tree>,
|
||||
diagnostics: AnchorRangeMultimap<Diagnostic>,
|
||||
diagnostics: DiagnosticSet,
|
||||
diagnostics_update_count: usize,
|
||||
is_parsing: bool,
|
||||
language: Option<Arc<Language>>,
|
||||
@ -101,10 +105,13 @@ struct LanguageServerSnapshot {
|
||||
path: Arc<Path>,
|
||||
}
|
||||
|
||||
#[derive(Clone)]
|
||||
#[derive(Clone, Debug)]
|
||||
pub enum Operation {
|
||||
Buffer(text::Operation),
|
||||
UpdateDiagnostics(AnchorRangeMultimap<Diagnostic>),
|
||||
UpdateDiagnostics {
|
||||
diagnostics: Arc<[DiagnosticEntry]>,
|
||||
lamport_timestamp: clock::Lamport,
|
||||
},
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Eq, PartialEq)]
|
||||
@ -173,8 +180,8 @@ struct SyntaxTree {
|
||||
struct AutoindentRequest {
|
||||
selection_set_ids: HashSet<SelectionSetId>,
|
||||
before_edit: Snapshot,
|
||||
edited: AnchorSet,
|
||||
inserted: Option<AnchorRangeSet>,
|
||||
edited: Vec<Anchor>,
|
||||
inserted: Option<Vec<Range<Anchor>>>,
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
@ -275,9 +282,11 @@ impl Buffer {
|
||||
buffer.add_raw_selection_set(set.id, set);
|
||||
}
|
||||
let mut this = Self::build(buffer, file);
|
||||
if let Some(diagnostics) = message.diagnostics {
|
||||
this.apply_diagnostic_update(proto::deserialize_diagnostics(diagnostics), cx);
|
||||
}
|
||||
this.apply_diagnostic_update(
|
||||
Arc::from(proto::deserialize_diagnostics(message.diagnostics)),
|
||||
cx,
|
||||
);
|
||||
|
||||
Ok(this)
|
||||
}
|
||||
|
||||
@ -294,7 +303,7 @@ impl Buffer {
|
||||
.selection_sets()
|
||||
.map(|(_, set)| proto::serialize_selection_set(set))
|
||||
.collect(),
|
||||
diagnostics: Some(proto::serialize_diagnostics(&self.diagnostics)),
|
||||
diagnostics: proto::serialize_diagnostics(self.diagnostics.iter()),
|
||||
}
|
||||
}
|
||||
|
||||
@ -331,6 +340,7 @@ impl Buffer {
|
||||
diagnostics: Default::default(),
|
||||
diagnostics_update_count: 0,
|
||||
language_server: None,
|
||||
deferred_ops: OperationQueue::new(),
|
||||
#[cfg(test)]
|
||||
operations: Default::default(),
|
||||
}
|
||||
@ -690,6 +700,8 @@ impl Buffer {
|
||||
mut diagnostics: Vec<lsp::Diagnostic>,
|
||||
cx: &mut ModelContext<Self>,
|
||||
) -> Result<Operation> {
|
||||
diagnostics.sort_unstable_by_key(|d| (d.range.start, d.range.end));
|
||||
|
||||
let version = version.map(|version| version as usize);
|
||||
let content = if let Some(version) = version {
|
||||
let language_server = self.language_server.as_mut().unwrap();
|
||||
@ -710,81 +722,79 @@ impl Buffer {
|
||||
.and_then(|language| language.disk_based_diagnostic_sources())
|
||||
.unwrap_or(&empty_set);
|
||||
|
||||
diagnostics.sort_unstable_by_key(|d| (d.range.start, d.range.end));
|
||||
self.diagnostics = {
|
||||
let mut edits_since_save = content
|
||||
.edits_since::<PointUtf16>(&self.saved_version)
|
||||
.peekable();
|
||||
let mut last_edit_old_end = PointUtf16::zero();
|
||||
let mut last_edit_new_end = PointUtf16::zero();
|
||||
let mut group_ids_by_diagnostic_range = HashMap::new();
|
||||
let mut diagnostics_by_group_id = HashMap::new();
|
||||
let mut next_group_id = 0;
|
||||
'outer: for diagnostic in &diagnostics {
|
||||
let mut start = diagnostic.range.start.to_point_utf16();
|
||||
let mut end = diagnostic.range.end.to_point_utf16();
|
||||
let source = diagnostic.source.as_ref();
|
||||
let code = diagnostic.code.as_ref();
|
||||
let group_id = diagnostic_ranges(&diagnostic, abs_path.as_deref())
|
||||
.find_map(|range| group_ids_by_diagnostic_range.get(&(source, code, range)))
|
||||
.copied()
|
||||
.unwrap_or_else(|| {
|
||||
let group_id = post_inc(&mut next_group_id);
|
||||
for range in diagnostic_ranges(&diagnostic, abs_path.as_deref()) {
|
||||
group_ids_by_diagnostic_range.insert((source, code, range), group_id);
|
||||
}
|
||||
group_id
|
||||
});
|
||||
|
||||
if diagnostic
|
||||
.source
|
||||
.as_ref()
|
||||
.map_or(false, |source| disk_based_sources.contains(source))
|
||||
{
|
||||
while let Some(edit) = edits_since_save.peek() {
|
||||
if edit.old.end <= start {
|
||||
last_edit_old_end = edit.old.end;
|
||||
last_edit_new_end = edit.new.end;
|
||||
edits_since_save.next();
|
||||
} else if edit.old.start <= end && edit.old.end >= start {
|
||||
continue 'outer;
|
||||
} else {
|
||||
break;
|
||||
}
|
||||
let mut edits_since_save = content
|
||||
.edits_since::<PointUtf16>(&self.saved_version)
|
||||
.peekable();
|
||||
let mut last_edit_old_end = PointUtf16::zero();
|
||||
let mut last_edit_new_end = PointUtf16::zero();
|
||||
let mut group_ids_by_diagnostic_range = HashMap::new();
|
||||
let mut diagnostics_by_group_id = HashMap::new();
|
||||
let mut next_group_id = 0;
|
||||
'outer: for diagnostic in &diagnostics {
|
||||
let mut start = diagnostic.range.start.to_point_utf16();
|
||||
let mut end = diagnostic.range.end.to_point_utf16();
|
||||
let source = diagnostic.source.as_ref();
|
||||
let code = diagnostic.code.as_ref();
|
||||
let group_id = diagnostic_ranges(&diagnostic, abs_path.as_deref())
|
||||
.find_map(|range| group_ids_by_diagnostic_range.get(&(source, code, range)))
|
||||
.copied()
|
||||
.unwrap_or_else(|| {
|
||||
let group_id = post_inc(&mut next_group_id);
|
||||
for range in diagnostic_ranges(&diagnostic, abs_path.as_deref()) {
|
||||
group_ids_by_diagnostic_range.insert((source, code, range), group_id);
|
||||
}
|
||||
group_id
|
||||
});
|
||||
|
||||
start = last_edit_new_end + (start - last_edit_old_end);
|
||||
end = last_edit_new_end + (end - last_edit_old_end);
|
||||
}
|
||||
|
||||
let mut range = content.clip_point_utf16(start, Bias::Left)
|
||||
..content.clip_point_utf16(end, Bias::Right);
|
||||
if range.start == range.end {
|
||||
range.end.column += 1;
|
||||
range.end = content.clip_point_utf16(range.end, Bias::Right);
|
||||
if range.start == range.end && range.end.column > 0 {
|
||||
range.start.column -= 1;
|
||||
range.start = content.clip_point_utf16(range.start, Bias::Left);
|
||||
if diagnostic
|
||||
.source
|
||||
.as_ref()
|
||||
.map_or(false, |source| disk_based_sources.contains(source))
|
||||
{
|
||||
while let Some(edit) = edits_since_save.peek() {
|
||||
if edit.old.end <= start {
|
||||
last_edit_old_end = edit.old.end;
|
||||
last_edit_new_end = edit.new.end;
|
||||
edits_since_save.next();
|
||||
} else if edit.old.start <= end && edit.old.end >= start {
|
||||
continue 'outer;
|
||||
} else {
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
diagnostics_by_group_id
|
||||
.entry(group_id)
|
||||
.or_insert(Vec::new())
|
||||
.push((
|
||||
range,
|
||||
Diagnostic {
|
||||
severity: diagnostic.severity.unwrap_or(DiagnosticSeverity::ERROR),
|
||||
message: diagnostic.message.clone(),
|
||||
group_id,
|
||||
is_primary: false,
|
||||
},
|
||||
));
|
||||
start = last_edit_new_end + (start - last_edit_old_end);
|
||||
end = last_edit_new_end + (end - last_edit_old_end);
|
||||
}
|
||||
|
||||
content.anchor_range_multimap(
|
||||
Bias::Left,
|
||||
Bias::Right,
|
||||
let mut range = content.clip_point_utf16(start, Bias::Left)
|
||||
..content.clip_point_utf16(end, Bias::Right);
|
||||
if range.start == range.end {
|
||||
range.end.column += 1;
|
||||
range.end = content.clip_point_utf16(range.end, Bias::Right);
|
||||
if range.start == range.end && range.end.column > 0 {
|
||||
range.start.column -= 1;
|
||||
range.start = content.clip_point_utf16(range.start, Bias::Left);
|
||||
}
|
||||
}
|
||||
|
||||
diagnostics_by_group_id
|
||||
.entry(group_id)
|
||||
.or_insert(Vec::new())
|
||||
.push((
|
||||
range,
|
||||
Diagnostic {
|
||||
severity: diagnostic.severity.unwrap_or(DiagnosticSeverity::ERROR),
|
||||
message: diagnostic.message.clone(),
|
||||
group_id,
|
||||
is_primary: false,
|
||||
},
|
||||
));
|
||||
}
|
||||
|
||||
drop(edits_since_save);
|
||||
self.diagnostics
|
||||
.reset(
|
||||
diagnostics_by_group_id
|
||||
.into_values()
|
||||
.flat_map(|mut diagnostics| {
|
||||
@ -793,8 +803,7 @@ impl Buffer {
|
||||
primary_diagnostic.1.is_primary = true;
|
||||
diagnostics
|
||||
}),
|
||||
)
|
||||
};
|
||||
);
|
||||
|
||||
if let Some(version) = version {
|
||||
let language_server = self.language_server.as_mut().unwrap();
|
||||
@ -811,32 +820,24 @@ impl Buffer {
|
||||
self.diagnostics_update_count += 1;
|
||||
cx.notify();
|
||||
cx.emit(Event::DiagnosticsUpdated);
|
||||
Ok(Operation::UpdateDiagnostics(self.diagnostics.clone()))
|
||||
Ok(Operation::UpdateDiagnostics {
|
||||
diagnostics: Arc::from(self.diagnostics.iter().cloned().collect::<Vec<_>>()),
|
||||
lamport_timestamp: self.lamport_timestamp(),
|
||||
})
|
||||
}
|
||||
|
||||
pub fn diagnostics_in_range<'a, T, O>(
|
||||
pub fn diagnostics_in_range<'a, T>(
|
||||
&'a self,
|
||||
search_range: Range<T>,
|
||||
) -> impl Iterator<Item = (Range<O>, &Diagnostic)> + 'a
|
||||
) -> impl Iterator<Item = &DiagnosticEntry>
|
||||
where
|
||||
T: 'a + ToOffset,
|
||||
O: 'a + FromAnchor,
|
||||
{
|
||||
self.diagnostics
|
||||
.intersecting_ranges(search_range, self, true)
|
||||
.map(move |(_, range, diagnostic)| (range, diagnostic))
|
||||
self.diagnostics.range(search_range, self, true)
|
||||
}
|
||||
|
||||
pub fn diagnostic_group<'a, O>(
|
||||
&'a self,
|
||||
group_id: usize,
|
||||
) -> impl Iterator<Item = (Range<O>, &Diagnostic)> + 'a
|
||||
where
|
||||
O: 'a + FromAnchor,
|
||||
{
|
||||
self.diagnostics
|
||||
.filter(self, move |diagnostic| diagnostic.group_id == group_id)
|
||||
.map(move |(_, range, diagnostic)| (range, diagnostic))
|
||||
pub fn diagnostic_group(&self, group_id: usize) -> impl Iterator<Item = &DiagnosticEntry> {
|
||||
self.diagnostics.group(group_id)
|
||||
}
|
||||
|
||||
pub fn diagnostics_update_count(&self) -> usize {
|
||||
@ -879,13 +880,13 @@ impl Buffer {
|
||||
for request in autoindent_requests {
|
||||
let old_to_new_rows = request
|
||||
.edited
|
||||
.iter::<Point>(&request.before_edit)
|
||||
.map(|point| point.row)
|
||||
.iter()
|
||||
.map(|anchor| anchor.summary::<Point>(&request.before_edit).row)
|
||||
.zip(
|
||||
request
|
||||
.edited
|
||||
.iter::<Point>(&snapshot)
|
||||
.map(|point| point.row),
|
||||
.iter()
|
||||
.map(|anchor| anchor.summary::<Point>(&snapshot).row),
|
||||
)
|
||||
.collect::<BTreeMap<u32, u32>>();
|
||||
|
||||
@ -947,7 +948,8 @@ impl Buffer {
|
||||
if let Some(inserted) = request.inserted.as_ref() {
|
||||
let inserted_row_ranges = contiguous_ranges(
|
||||
inserted
|
||||
.ranges::<Point>(&snapshot)
|
||||
.iter()
|
||||
.map(|range| range.to_point(&snapshot))
|
||||
.flat_map(|range| range.start.row..range.end.row + 1),
|
||||
max_rows_between_yields,
|
||||
);
|
||||
@ -1264,17 +1266,17 @@ impl Buffer {
|
||||
self.pending_autoindent.take();
|
||||
let autoindent_request = if autoindent && self.language.is_some() {
|
||||
let before_edit = self.snapshot();
|
||||
let edited = self.anchor_set(
|
||||
Bias::Left,
|
||||
ranges.iter().filter_map(|range| {
|
||||
let edited = ranges
|
||||
.iter()
|
||||
.filter_map(|range| {
|
||||
let start = range.start.to_point(self);
|
||||
if new_text.starts_with('\n') && start.column == self.line_len(start.row) {
|
||||
None
|
||||
} else {
|
||||
Some(range.start)
|
||||
Some(self.anchor_before(range.start))
|
||||
}
|
||||
}),
|
||||
);
|
||||
})
|
||||
.collect();
|
||||
Some((before_edit, edited))
|
||||
} else {
|
||||
None
|
||||
@ -1289,17 +1291,19 @@ impl Buffer {
|
||||
let mut inserted = None;
|
||||
if let Some(first_newline_ix) = first_newline_ix {
|
||||
let mut delta = 0isize;
|
||||
inserted = Some(self.anchor_range_set(
|
||||
Bias::Left,
|
||||
Bias::Right,
|
||||
ranges.iter().map(|range| {
|
||||
let start = (delta + range.start as isize) as usize + first_newline_ix + 1;
|
||||
let end = (delta + range.start as isize) as usize + new_text_len;
|
||||
delta +=
|
||||
(range.end as isize - range.start as isize) + new_text_len as isize;
|
||||
start..end
|
||||
}),
|
||||
));
|
||||
inserted = Some(
|
||||
ranges
|
||||
.iter()
|
||||
.map(|range| {
|
||||
let start =
|
||||
(delta + range.start as isize) as usize + first_newline_ix + 1;
|
||||
let end = (delta + range.start as isize) as usize + new_text_len;
|
||||
delta +=
|
||||
(range.end as isize - range.start as isize) + new_text_len as isize;
|
||||
self.anchor_before(start)..self.anchor_after(end)
|
||||
})
|
||||
.collect(),
|
||||
);
|
||||
}
|
||||
|
||||
let selection_set_ids = self
|
||||
@ -1401,17 +1405,23 @@ impl Buffer {
|
||||
self.pending_autoindent.take();
|
||||
let was_dirty = self.is_dirty();
|
||||
let old_version = self.version.clone();
|
||||
let mut deferred_ops = Vec::new();
|
||||
let buffer_ops = ops
|
||||
.into_iter()
|
||||
.filter_map(|op| match op {
|
||||
Operation::Buffer(op) => Some(op),
|
||||
Operation::UpdateDiagnostics(diagnostics) => {
|
||||
self.apply_diagnostic_update(diagnostics, cx);
|
||||
_ => {
|
||||
if self.can_apply_op(&op) {
|
||||
self.apply_op(op, cx);
|
||||
} else {
|
||||
deferred_ops.push(op);
|
||||
}
|
||||
None
|
||||
}
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
self.text.apply_ops(buffer_ops)?;
|
||||
self.flush_deferred_ops(cx);
|
||||
self.did_edit(&old_version, was_dirty, cx);
|
||||
// Notify independently of whether the buffer was edited as the operations could include a
|
||||
// selection update.
|
||||
@ -1419,12 +1429,49 @@ impl Buffer {
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn flush_deferred_ops(&mut self, cx: &mut ModelContext<Self>) {
|
||||
let mut deferred_ops = Vec::new();
|
||||
for op in self.deferred_ops.drain().iter().cloned() {
|
||||
if self.can_apply_op(&op) {
|
||||
self.apply_op(op, cx);
|
||||
} else {
|
||||
deferred_ops.push(op);
|
||||
}
|
||||
}
|
||||
self.deferred_ops.insert(deferred_ops);
|
||||
}
|
||||
|
||||
fn can_apply_op(&self, operation: &Operation) -> bool {
|
||||
match operation {
|
||||
Operation::Buffer(_) => {
|
||||
unreachable!("buffer operations should never be applied at this layer")
|
||||
}
|
||||
Operation::UpdateDiagnostics { diagnostics, .. } => {
|
||||
diagnostics.iter().all(|diagnostic| {
|
||||
self.text.can_resolve(&diagnostic.range.start)
|
||||
&& self.text.can_resolve(&diagnostic.range.end)
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn apply_op(&mut self, operation: Operation, cx: &mut ModelContext<Self>) {
|
||||
match operation {
|
||||
Operation::Buffer(_) => {
|
||||
unreachable!("buffer operations should never be applied at this layer")
|
||||
}
|
||||
Operation::UpdateDiagnostics { diagnostics, .. } => {
|
||||
self.apply_diagnostic_update(diagnostics, cx);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn apply_diagnostic_update(
|
||||
&mut self,
|
||||
diagnostics: AnchorRangeMultimap<Diagnostic>,
|
||||
diagnostics: Arc<[DiagnosticEntry]>,
|
||||
cx: &mut ModelContext<Self>,
|
||||
) {
|
||||
self.diagnostics = diagnostics;
|
||||
self.diagnostics = DiagnosticSet::from_sorted_entries(diagnostics.iter().cloned(), self);
|
||||
self.diagnostics_update_count += 1;
|
||||
cx.notify();
|
||||
}
|
||||
@ -1632,19 +1679,16 @@ impl Snapshot {
|
||||
let mut highlights = None;
|
||||
let mut diagnostic_endpoints = Vec::<DiagnosticEndpoint>::new();
|
||||
if let Some(theme) = theme {
|
||||
for (_, range, diagnostic) in
|
||||
self.diagnostics
|
||||
.intersecting_ranges(range.clone(), self, true)
|
||||
{
|
||||
for entry in self.diagnostics.range(range.clone(), self, true) {
|
||||
diagnostic_endpoints.push(DiagnosticEndpoint {
|
||||
offset: range.start,
|
||||
offset: entry.range.start.to_offset(self),
|
||||
is_start: true,
|
||||
severity: diagnostic.severity,
|
||||
severity: entry.diagnostic.severity,
|
||||
});
|
||||
diagnostic_endpoints.push(DiagnosticEndpoint {
|
||||
offset: range.end,
|
||||
offset: entry.range.end.to_offset(self),
|
||||
is_start: false,
|
||||
severity: diagnostic.severity,
|
||||
severity: entry.diagnostic.severity,
|
||||
});
|
||||
}
|
||||
diagnostic_endpoints
|
||||
@ -1939,6 +1983,19 @@ impl ToPointUtf16 for lsp::Position {
|
||||
}
|
||||
}
|
||||
|
||||
impl operation_queue::Operation for Operation {
|
||||
fn lamport_timestamp(&self) -> clock::Lamport {
|
||||
match self {
|
||||
Operation::Buffer(_) => {
|
||||
unreachable!("buffer operations should never be deferred at this layer")
|
||||
}
|
||||
Operation::UpdateDiagnostics {
|
||||
lamport_timestamp, ..
|
||||
} => *lamport_timestamp,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn diagnostic_ranges<'a>(
|
||||
diagnostic: &'a lsp::Diagnostic,
|
||||
abs_path: Option<&'a Path>,
|
||||
@ -1968,7 +2025,7 @@ fn diagnostic_ranges<'a>(
|
||||
}
|
||||
|
||||
pub fn contiguous_ranges(
|
||||
values: impl IntoIterator<Item = u32>,
|
||||
values: impl Iterator<Item = u32>,
|
||||
max_len: usize,
|
||||
) -> impl Iterator<Item = Range<u32>> {
|
||||
let mut values = values.into_iter();
|
||||
|
141
crates/language/src/diagnostic_set.rs
Normal file
141
crates/language/src/diagnostic_set.rs
Normal file
@ -0,0 +1,141 @@
|
||||
use crate::Diagnostic;
|
||||
use std::{
|
||||
cmp::{Ordering, Reverse},
|
||||
iter,
|
||||
ops::Range,
|
||||
};
|
||||
use sum_tree::{self, Bias, SumTree};
|
||||
use text::{Anchor, PointUtf16, ToOffset};
|
||||
|
||||
#[derive(Clone, Default)]
|
||||
pub struct DiagnosticSet {
|
||||
diagnostics: SumTree<DiagnosticEntry>,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct DiagnosticEntry {
|
||||
pub range: Range<Anchor>,
|
||||
pub diagnostic: Diagnostic,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct Summary {
|
||||
start: Anchor,
|
||||
end: Anchor,
|
||||
min_start: Anchor,
|
||||
max_end: Anchor,
|
||||
count: usize,
|
||||
}
|
||||
|
||||
impl DiagnosticSet {
|
||||
pub fn from_sorted_entries<I>(iter: I, buffer: &text::Snapshot) -> Self
|
||||
where
|
||||
I: IntoIterator<Item = DiagnosticEntry>,
|
||||
{
|
||||
Self {
|
||||
diagnostics: SumTree::from_iter(iter, buffer),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn reset<I>(&mut self, iter: I)
|
||||
where
|
||||
I: IntoIterator<Item = (Range<PointUtf16>, Diagnostic)>,
|
||||
{
|
||||
let mut entries = iter.into_iter().collect::<Vec<_>>();
|
||||
entries.sort_unstable_by_key(|(range, _)| (range.start, Reverse(range.end)));
|
||||
}
|
||||
|
||||
pub fn iter(&self) -> impl Iterator<Item = &DiagnosticEntry> {
|
||||
self.diagnostics.iter()
|
||||
}
|
||||
|
||||
pub fn range<'a, T>(
|
||||
&'a self,
|
||||
range: Range<T>,
|
||||
buffer: &'a text::Snapshot,
|
||||
inclusive: bool,
|
||||
) -> impl Iterator<Item = &'a DiagnosticEntry>
|
||||
where
|
||||
T: 'a + ToOffset,
|
||||
{
|
||||
let end_bias = if inclusive { Bias::Right } else { Bias::Left };
|
||||
let range = buffer.anchor_before(range.start)..buffer.anchor_at(range.end, end_bias);
|
||||
let mut cursor = self.diagnostics.filter::<_, ()>(
|
||||
{
|
||||
move |summary: &Summary| {
|
||||
let start_cmp = range.start.cmp(&summary.max_end, buffer).unwrap();
|
||||
let end_cmp = range.end.cmp(&summary.min_start, buffer).unwrap();
|
||||
if inclusive {
|
||||
start_cmp <= Ordering::Equal && end_cmp >= Ordering::Equal
|
||||
} else {
|
||||
start_cmp == Ordering::Less && end_cmp == Ordering::Greater
|
||||
}
|
||||
}
|
||||
},
|
||||
buffer,
|
||||
);
|
||||
|
||||
iter::from_fn({
|
||||
move || {
|
||||
if let Some(diagnostic) = cursor.item() {
|
||||
cursor.next(buffer);
|
||||
Some(diagnostic)
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
pub fn group(&self, group_id: usize) -> impl Iterator<Item = &DiagnosticEntry> {
|
||||
self.iter()
|
||||
.filter(move |entry| entry.diagnostic.group_id == group_id)
|
||||
}
|
||||
}
|
||||
|
||||
impl sum_tree::Item for DiagnosticEntry {
|
||||
type Summary = Summary;
|
||||
|
||||
fn summary(&self) -> Self::Summary {
|
||||
Summary {
|
||||
start: self.range.start.clone(),
|
||||
end: self.range.end.clone(),
|
||||
min_start: self.range.start.clone(),
|
||||
max_end: self.range.end.clone(),
|
||||
count: 1,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Default for Summary {
|
||||
fn default() -> Self {
|
||||
Self {
|
||||
start: Anchor::min(),
|
||||
end: Anchor::max(),
|
||||
min_start: Anchor::max(),
|
||||
max_end: Anchor::min(),
|
||||
count: 0,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl sum_tree::Summary for Summary {
|
||||
type Context = text::Snapshot;
|
||||
|
||||
fn add_summary(&mut self, other: &Self, buffer: &Self::Context) {
|
||||
if other
|
||||
.min_start
|
||||
.cmp(&self.min_start, buffer)
|
||||
.unwrap()
|
||||
.is_lt()
|
||||
{
|
||||
self.min_start = other.min_start.clone();
|
||||
}
|
||||
if other.max_end.cmp(&self.max_end, buffer).unwrap().is_gt() {
|
||||
self.max_end = other.max_end.clone();
|
||||
}
|
||||
self.start = other.start.clone();
|
||||
self.end = other.end.clone();
|
||||
self.count += other.count;
|
||||
}
|
||||
}
|
@ -1,4 +1,5 @@
|
||||
mod buffer;
|
||||
mod diagnostic_set;
|
||||
mod highlight_map;
|
||||
pub mod proto;
|
||||
#[cfg(test)]
|
||||
|
@ -1,6 +1,6 @@
|
||||
use std::sync::Arc;
|
||||
|
||||
use crate::{Diagnostic, Operation};
|
||||
use crate::{diagnostic_set::DiagnosticEntry, Diagnostic, Operation};
|
||||
use anyhow::{anyhow, Result};
|
||||
use clock::ReplicaId;
|
||||
use lsp::DiagnosticSeverity;
|
||||
@ -49,14 +49,13 @@ pub fn serialize_operation(operation: &Operation) -> proto::Operation {
|
||||
replica_id: set_id.replica_id as u32,
|
||||
local_timestamp: set_id.value,
|
||||
lamport_timestamp: lamport_timestamp.value,
|
||||
version: selections.version().into(),
|
||||
selections: selections
|
||||
.full_offset_ranges()
|
||||
.map(|(range, state)| proto::Selection {
|
||||
id: state.id as u64,
|
||||
start: range.start.0 as u64,
|
||||
end: range.end.0 as u64,
|
||||
reversed: state.reversed,
|
||||
.iter()
|
||||
.map(|selection| proto::Selection {
|
||||
id: selection.id as u64,
|
||||
start: Some(serialize_anchor(&selection.start)),
|
||||
end: Some(serialize_anchor(&selection.end)),
|
||||
reversed: selection.reversed,
|
||||
})
|
||||
.collect(),
|
||||
}),
|
||||
@ -78,9 +77,14 @@ pub fn serialize_operation(operation: &Operation) -> proto::Operation {
|
||||
lamport_timestamp: lamport_timestamp.value,
|
||||
},
|
||||
),
|
||||
Operation::UpdateDiagnostics(diagnostic_set) => {
|
||||
proto::operation::Variant::UpdateDiagnostics(serialize_diagnostics(diagnostic_set))
|
||||
}
|
||||
Operation::UpdateDiagnostics {
|
||||
diagnostics,
|
||||
lamport_timestamp,
|
||||
} => proto::operation::Variant::UpdateDiagnostics(proto::UpdateDiagnostics {
|
||||
replica_id: lamport_timestamp.replica_id as u32,
|
||||
lamport_timestamp: lamport_timestamp.value,
|
||||
diagnostics: serialize_diagnostics(diagnostics.iter()),
|
||||
}),
|
||||
}),
|
||||
}
|
||||
}
|
||||
@ -105,44 +109,54 @@ pub fn serialize_edit_operation(operation: &EditOperation) -> proto::operation::
|
||||
}
|
||||
|
||||
pub fn serialize_selection_set(set: &SelectionSet) -> proto::SelectionSet {
|
||||
let version = set.selections.version();
|
||||
let entries = set.selections.full_offset_ranges();
|
||||
proto::SelectionSet {
|
||||
replica_id: set.id.replica_id as u32,
|
||||
lamport_timestamp: set.id.value as u32,
|
||||
is_active: set.active,
|
||||
version: version.into(),
|
||||
selections: entries
|
||||
.map(|(range, state)| proto::Selection {
|
||||
id: state.id as u64,
|
||||
start: range.start.0 as u64,
|
||||
end: range.end.0 as u64,
|
||||
reversed: state.reversed,
|
||||
selections: set
|
||||
.selections
|
||||
.iter()
|
||||
.map(|selection| proto::Selection {
|
||||
id: selection.id as u64,
|
||||
start: Some(serialize_anchor(&selection.start)),
|
||||
end: Some(serialize_anchor(&selection.end)),
|
||||
reversed: selection.reversed,
|
||||
})
|
||||
.collect(),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn serialize_diagnostics(map: &AnchorRangeMultimap<Diagnostic>) -> proto::DiagnosticSet {
|
||||
proto::DiagnosticSet {
|
||||
version: map.version().into(),
|
||||
diagnostics: map
|
||||
.full_offset_ranges()
|
||||
.map(|(range, diagnostic)| proto::Diagnostic {
|
||||
start: range.start.0 as u64,
|
||||
end: range.end.0 as u64,
|
||||
message: diagnostic.message.clone(),
|
||||
severity: match diagnostic.severity {
|
||||
DiagnosticSeverity::ERROR => proto::diagnostic::Severity::Error,
|
||||
DiagnosticSeverity::WARNING => proto::diagnostic::Severity::Warning,
|
||||
DiagnosticSeverity::INFORMATION => proto::diagnostic::Severity::Information,
|
||||
DiagnosticSeverity::HINT => proto::diagnostic::Severity::Hint,
|
||||
_ => proto::diagnostic::Severity::None,
|
||||
} as i32,
|
||||
group_id: diagnostic.group_id as u64,
|
||||
is_primary: diagnostic.is_primary,
|
||||
})
|
||||
.collect(),
|
||||
pub fn serialize_diagnostics<'a>(
|
||||
diagnostics: impl IntoIterator<Item = &'a DiagnosticEntry>,
|
||||
) -> Vec<proto::Diagnostic> {
|
||||
diagnostics
|
||||
.into_iter()
|
||||
.map(|entry| proto::Diagnostic {
|
||||
start: Some(serialize_anchor(&entry.range.start)),
|
||||
end: Some(serialize_anchor(&entry.range.end)),
|
||||
message: entry.diagnostic.message.clone(),
|
||||
severity: match entry.diagnostic.severity {
|
||||
DiagnosticSeverity::ERROR => proto::diagnostic::Severity::Error,
|
||||
DiagnosticSeverity::WARNING => proto::diagnostic::Severity::Warning,
|
||||
DiagnosticSeverity::INFORMATION => proto::diagnostic::Severity::Information,
|
||||
DiagnosticSeverity::HINT => proto::diagnostic::Severity::Hint,
|
||||
_ => proto::diagnostic::Severity::None,
|
||||
} as i32,
|
||||
group_id: entry.diagnostic.group_id as u64,
|
||||
is_primary: entry.diagnostic.is_primary,
|
||||
})
|
||||
.collect()
|
||||
}
|
||||
|
||||
fn serialize_anchor(anchor: &Anchor) -> proto::Anchor {
|
||||
proto::Anchor {
|
||||
replica_id: anchor.timestamp.replica_id as u32,
|
||||
local_timestamp: anchor.timestamp.value,
|
||||
offset: anchor.offset as u64,
|
||||
bias: match anchor.bias {
|
||||
Bias::Left => proto::Bias::Left as i32,
|
||||
Bias::Right => proto::Bias::Right as i32,
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
@ -187,27 +201,19 @@ pub fn deserialize_operation(message: proto::Operation) -> Result<Operation> {
|
||||
},
|
||||
}),
|
||||
proto::operation::Variant::UpdateSelections(message) => {
|
||||
let version = message.version.into();
|
||||
let entries = message
|
||||
let selections = message
|
||||
.selections
|
||||
.iter()
|
||||
.map(|selection| {
|
||||
let range = FullOffset(selection.start as usize)
|
||||
..FullOffset(selection.end as usize);
|
||||
let state = SelectionState {
|
||||
.into_iter()
|
||||
.filter_map(|selection| {
|
||||
Some(Selection {
|
||||
id: selection.id as usize,
|
||||
start: deserialize_anchor(selection.start?)?,
|
||||
end: deserialize_anchor(selection.end?)?,
|
||||
reversed: selection.reversed,
|
||||
goal: SelectionGoal::None,
|
||||
};
|
||||
(range, state)
|
||||
})
|
||||
})
|
||||
.collect();
|
||||
let selections = AnchorRangeMap::from_full_offset_ranges(
|
||||
version,
|
||||
Bias::Left,
|
||||
Bias::Left,
|
||||
entries,
|
||||
);
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
Operation::Buffer(text::Operation::UpdateSelections {
|
||||
set_id: clock::Lamport {
|
||||
@ -245,9 +251,13 @@ pub fn deserialize_operation(message: proto::Operation) -> Result<Operation> {
|
||||
},
|
||||
})
|
||||
}
|
||||
proto::operation::Variant::UpdateDiagnostics(message) => {
|
||||
Operation::UpdateDiagnostics(deserialize_diagnostics(message))
|
||||
}
|
||||
proto::operation::Variant::UpdateDiagnostics(message) => Operation::UpdateDiagnostics {
|
||||
diagnostics: Arc::from(deserialize_diagnostics(message.diagnostics)),
|
||||
lamport_timestamp: clock::Lamport {
|
||||
replica_id: message.replica_id as ReplicaId,
|
||||
value: message.lamport_timestamp,
|
||||
},
|
||||
},
|
||||
},
|
||||
)
|
||||
}
|
||||
@ -277,36 +287,30 @@ pub fn deserialize_selection_set(set: proto::SelectionSet) -> SelectionSet {
|
||||
value: set.lamport_timestamp,
|
||||
},
|
||||
active: set.is_active,
|
||||
selections: Arc::new(AnchorRangeMap::from_full_offset_ranges(
|
||||
set.version.into(),
|
||||
Bias::Left,
|
||||
Bias::Left,
|
||||
selections: Arc::from(
|
||||
set.selections
|
||||
.into_iter()
|
||||
.map(|selection| {
|
||||
let range =
|
||||
FullOffset(selection.start as usize)..FullOffset(selection.end as usize);
|
||||
let state = SelectionState {
|
||||
.filter_map(|selection| {
|
||||
Some(Selection {
|
||||
id: selection.id as usize,
|
||||
start: deserialize_anchor(selection.start?)?,
|
||||
end: deserialize_anchor(selection.end?)?,
|
||||
reversed: selection.reversed,
|
||||
goal: SelectionGoal::None,
|
||||
};
|
||||
(range, state)
|
||||
})
|
||||
})
|
||||
.collect(),
|
||||
)),
|
||||
.collect::<Vec<_>>(),
|
||||
),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn deserialize_diagnostics(message: proto::DiagnosticSet) -> AnchorRangeMultimap<Diagnostic> {
|
||||
AnchorRangeMultimap::from_full_offset_ranges(
|
||||
message.version.into(),
|
||||
Bias::Left,
|
||||
Bias::Right,
|
||||
message.diagnostics.into_iter().filter_map(|diagnostic| {
|
||||
Some((
|
||||
FullOffset(diagnostic.start as usize)..FullOffset(diagnostic.end as usize),
|
||||
Diagnostic {
|
||||
pub fn deserialize_diagnostics(diagnostics: Vec<proto::Diagnostic>) -> Vec<DiagnosticEntry> {
|
||||
diagnostics
|
||||
.into_iter()
|
||||
.filter_map(|diagnostic| {
|
||||
Some(DiagnosticEntry {
|
||||
range: deserialize_anchor(diagnostic.start?)?..deserialize_anchor(diagnostic.end?)?,
|
||||
diagnostic: Diagnostic {
|
||||
severity: match proto::diagnostic::Severity::from_i32(diagnostic.severity)? {
|
||||
proto::diagnostic::Severity::Error => DiagnosticSeverity::ERROR,
|
||||
proto::diagnostic::Severity::Warning => DiagnosticSeverity::WARNING,
|
||||
@ -318,7 +322,21 @@ pub fn deserialize_diagnostics(message: proto::DiagnosticSet) -> AnchorRangeMult
|
||||
group_id: diagnostic.group_id as usize,
|
||||
is_primary: diagnostic.is_primary,
|
||||
},
|
||||
))
|
||||
}),
|
||||
)
|
||||
})
|
||||
})
|
||||
.collect()
|
||||
}
|
||||
|
||||
fn deserialize_anchor(anchor: proto::Anchor) -> Option<Anchor> {
|
||||
Some(Anchor {
|
||||
timestamp: clock::Local {
|
||||
replica_id: anchor.replica_id as ReplicaId,
|
||||
value: anchor.local_timestamp,
|
||||
},
|
||||
offset: anchor.offset as usize,
|
||||
bias: match proto::Bias::from_i32(anchor.bias)? {
|
||||
proto::Bias::Left => Bias::Left,
|
||||
proto::Bias::Right => Bias::Right,
|
||||
},
|
||||
})
|
||||
}
|
||||
|
@ -533,6 +533,7 @@ async fn test_diagnostics(mut cx: gpui::TestAppContext) {
|
||||
assert_eq!(
|
||||
buffer
|
||||
.diagnostics_in_range(Point::new(3, 0)..Point::new(5, 0))
|
||||
.map(|entry| (entry.range.to_point(buffer), &entry.diagnostic))
|
||||
.collect::<Vec<_>>(),
|
||||
&[
|
||||
(
|
||||
@ -600,6 +601,7 @@ async fn test_diagnostics(mut cx: gpui::TestAppContext) {
|
||||
assert_eq!(
|
||||
buffer
|
||||
.diagnostics_in_range(Point::new(2, 0)..Point::new(3, 0))
|
||||
.map(|entry| (entry.range.to_point(buffer), &entry.diagnostic))
|
||||
.collect::<Vec<_>>(),
|
||||
&[
|
||||
(
|
||||
@ -679,6 +681,7 @@ async fn test_diagnostics(mut cx: gpui::TestAppContext) {
|
||||
assert_eq!(
|
||||
buffer
|
||||
.diagnostics_in_range(0..buffer.len())
|
||||
.map(|entry| (entry.range.to_point(buffer), &entry.diagnostic))
|
||||
.collect::<Vec<_>>(),
|
||||
&[
|
||||
(
|
||||
@ -863,7 +866,8 @@ async fn test_grouped_diagnostics(mut cx: gpui::TestAppContext) {
|
||||
buffer.update_diagnostics(None, diagnostics, cx).unwrap();
|
||||
assert_eq!(
|
||||
buffer
|
||||
.diagnostics_in_range::<_, Point>(0..buffer.len())
|
||||
.diagnostics_in_range(0..buffer.len())
|
||||
.map(|entry| (entry.range.to_point(&buffer), &entry.diagnostic))
|
||||
.collect::<Vec<_>>(),
|
||||
&[
|
||||
(
|
||||
@ -915,7 +919,10 @@ async fn test_grouped_diagnostics(mut cx: gpui::TestAppContext) {
|
||||
);
|
||||
|
||||
assert_eq!(
|
||||
buffer.diagnostic_group(0).collect::<Vec<_>>(),
|
||||
buffer
|
||||
.diagnostic_group(0)
|
||||
.map(|entry| (entry.range.to_point(&buffer), &entry.diagnostic))
|
||||
.collect::<Vec<_>>(),
|
||||
&[
|
||||
(
|
||||
Point::new(1, 8)..Point::new(1, 9),
|
||||
@ -938,7 +945,10 @@ async fn test_grouped_diagnostics(mut cx: gpui::TestAppContext) {
|
||||
]
|
||||
);
|
||||
assert_eq!(
|
||||
buffer.diagnostic_group(1).collect::<Vec<_>>(),
|
||||
buffer
|
||||
.diagnostic_group(1)
|
||||
.map(|entry| (entry.range.to_point(&buffer), &entry.diagnostic))
|
||||
.collect::<Vec<_>>(),
|
||||
&[
|
||||
(
|
||||
Point::new(1, 13)..Point::new(1, 15),
|
||||
@ -995,13 +1005,17 @@ fn chunks_with_diagnostics<T: ToOffset + ToPoint>(
|
||||
#[test]
|
||||
fn test_contiguous_ranges() {
|
||||
assert_eq!(
|
||||
contiguous_ranges([1, 2, 3, 5, 6, 9, 10, 11, 12], 100).collect::<Vec<_>>(),
|
||||
contiguous_ranges([1, 2, 3, 5, 6, 9, 10, 11, 12].into_iter(), 100).collect::<Vec<_>>(),
|
||||
&[1..4, 5..7, 9..13]
|
||||
);
|
||||
|
||||
// Respects the `max_len` parameter
|
||||
assert_eq!(
|
||||
contiguous_ranges([2, 3, 4, 5, 6, 7, 8, 9, 23, 24, 25, 26, 30, 31], 3).collect::<Vec<_>>(),
|
||||
contiguous_ranges(
|
||||
[2, 3, 4, 5, 6, 7, 8, 9, 23, 24, 25, 26, 30, 31].into_iter(),
|
||||
3
|
||||
)
|
||||
.collect::<Vec<_>>(),
|
||||
&[2..5, 5..8, 8..10, 23..26, 26..27, 30..32],
|
||||
);
|
||||
}
|
||||
|
@ -3005,7 +3005,7 @@ mod tests {
|
||||
use anyhow::Result;
|
||||
use client::test::{FakeHttpClient, FakeServer};
|
||||
use fs::RealFs;
|
||||
use language::{tree_sitter_rust, LanguageServerConfig};
|
||||
use language::{tree_sitter_rust, AnchorRangeExt, LanguageServerConfig};
|
||||
use language::{Diagnostic, LanguageConfig};
|
||||
use lsp::Url;
|
||||
use rand::prelude::*;
|
||||
@ -3722,6 +3722,7 @@ mod tests {
|
||||
buffer.read_with(&cx, |buffer, _| {
|
||||
let diagnostics = buffer
|
||||
.diagnostics_in_range(0..buffer.len())
|
||||
.map(|entry| (entry.range.to_point(buffer), &entry.diagnostic))
|
||||
.collect::<Vec<_>>();
|
||||
assert_eq!(
|
||||
diagnostics,
|
||||
|
@ -229,32 +229,44 @@ message Buffer {
|
||||
string content = 2;
|
||||
repeated Operation.Edit history = 3;
|
||||
repeated SelectionSet selections = 4;
|
||||
DiagnosticSet diagnostics = 5;
|
||||
repeated Diagnostic diagnostics = 5;
|
||||
}
|
||||
|
||||
message SelectionSet {
|
||||
uint32 replica_id = 1;
|
||||
uint32 lamport_timestamp = 2;
|
||||
bool is_active = 3;
|
||||
repeated VectorClockEntry version = 4;
|
||||
repeated Selection selections = 5;
|
||||
repeated Selection selections = 4;
|
||||
}
|
||||
|
||||
message Selection {
|
||||
uint64 id = 1;
|
||||
uint64 start = 2;
|
||||
uint64 end = 3;
|
||||
Anchor start = 2;
|
||||
Anchor end = 3;
|
||||
bool reversed = 4;
|
||||
}
|
||||
|
||||
message DiagnosticSet {
|
||||
repeated VectorClockEntry version = 1;
|
||||
repeated Diagnostic diagnostics = 2;
|
||||
message Anchor {
|
||||
uint32 replica_id = 1;
|
||||
uint32 local_timestamp = 2;
|
||||
uint64 offset = 3;
|
||||
Bias bias = 4;
|
||||
}
|
||||
|
||||
enum Bias {
|
||||
Left = 0;
|
||||
Right = 1;
|
||||
}
|
||||
|
||||
message UpdateDiagnostics {
|
||||
uint32 replica_id = 1;
|
||||
uint32 lamport_timestamp = 2;
|
||||
repeated Diagnostic diagnostics = 3;
|
||||
}
|
||||
|
||||
message Diagnostic {
|
||||
uint64 start = 1;
|
||||
uint64 end = 2;
|
||||
Anchor start = 1;
|
||||
Anchor end = 2;
|
||||
Severity severity = 3;
|
||||
string message = 4;
|
||||
uint64 group_id = 5;
|
||||
@ -268,8 +280,6 @@ message Diagnostic {
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
|
||||
message Operation {
|
||||
oneof variant {
|
||||
Edit edit = 1;
|
||||
@ -277,7 +287,7 @@ message Operation {
|
||||
UpdateSelections update_selections = 3;
|
||||
RemoveSelections remove_selections = 4;
|
||||
SetActiveSelections set_active_selections = 5;
|
||||
DiagnosticSet update_diagnostics = 6;
|
||||
UpdateDiagnostics update_diagnostics = 6;
|
||||
}
|
||||
|
||||
message Edit {
|
||||
@ -308,8 +318,7 @@ message Operation {
|
||||
uint32 replica_id = 1;
|
||||
uint32 local_timestamp = 2;
|
||||
uint32 lamport_timestamp = 3;
|
||||
repeated VectorClockEntry version = 4;
|
||||
repeated Selection selections = 5;
|
||||
repeated Selection selections = 4;
|
||||
}
|
||||
|
||||
message RemoveSelections {
|
||||
|
@ -400,7 +400,7 @@ mod tests {
|
||||
content: "path/one content".to_string(),
|
||||
history: vec![],
|
||||
selections: vec![],
|
||||
diagnostics: None,
|
||||
diagnostics: vec![],
|
||||
}),
|
||||
}
|
||||
);
|
||||
@ -422,7 +422,7 @@ mod tests {
|
||||
content: "path/two content".to_string(),
|
||||
history: vec![],
|
||||
selections: vec![],
|
||||
diagnostics: None,
|
||||
diagnostics: vec![],
|
||||
}),
|
||||
}
|
||||
);
|
||||
@ -453,7 +453,7 @@ mod tests {
|
||||
content: "path/one content".to_string(),
|
||||
history: vec![],
|
||||
selections: vec![],
|
||||
diagnostics: None,
|
||||
diagnostics: vec![],
|
||||
}),
|
||||
}
|
||||
}
|
||||
@ -465,7 +465,7 @@ mod tests {
|
||||
content: "path/two content".to_string(),
|
||||
history: vec![],
|
||||
selections: vec![],
|
||||
diagnostics: None,
|
||||
diagnostics: vec![],
|
||||
}),
|
||||
}
|
||||
}
|
||||
|
@ -947,8 +947,8 @@ mod tests {
|
||||
editor::{Editor, EditorSettings, Input},
|
||||
fs::{FakeFs, Fs as _},
|
||||
language::{
|
||||
tree_sitter_rust, Diagnostic, Language, LanguageConfig, LanguageRegistry,
|
||||
LanguageServerConfig, Point,
|
||||
tree_sitter_rust, AnchorRangeExt, Diagnostic, Language, LanguageConfig,
|
||||
LanguageRegistry, LanguageServerConfig, Point,
|
||||
},
|
||||
lsp,
|
||||
project::{ProjectPath, Worktree},
|
||||
@ -1705,6 +1705,7 @@ mod tests {
|
||||
assert_eq!(
|
||||
buffer
|
||||
.diagnostics_in_range(0..buffer.len())
|
||||
.map(|entry| (entry.range.to_point(buffer), &entry.diagnostic))
|
||||
.collect::<Vec<_>>(),
|
||||
&[
|
||||
(
|
||||
|
@ -18,6 +18,11 @@ pub struct Cursor<'a, T: Item, D> {
|
||||
at_end: bool,
|
||||
}
|
||||
|
||||
pub struct Iter<'a, T: Item> {
|
||||
tree: &'a SumTree<T>,
|
||||
stack: ArrayVec<StackEntry<'a, T, ()>, 16>,
|
||||
}
|
||||
|
||||
impl<'a, T, D> Cursor<'a, T, D>
|
||||
where
|
||||
T: Item,
|
||||
@ -487,6 +492,71 @@ where
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a, T: Item> Iter<'a, T> {
|
||||
pub(crate) fn new(tree: &'a SumTree<T>) -> Self {
|
||||
Self {
|
||||
tree,
|
||||
stack: Default::default(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a, T: Item> Iterator for Iter<'a, T> {
|
||||
type Item = &'a T;
|
||||
|
||||
fn next(&mut self) -> Option<Self::Item> {
|
||||
let mut descend = false;
|
||||
|
||||
if self.stack.is_empty() {
|
||||
self.stack.push(StackEntry {
|
||||
tree: self.tree,
|
||||
index: 0,
|
||||
position: (),
|
||||
});
|
||||
descend = true;
|
||||
}
|
||||
|
||||
while self.stack.len() > 0 {
|
||||
let new_subtree = {
|
||||
let entry = self.stack.last_mut().unwrap();
|
||||
match entry.tree.0.as_ref() {
|
||||
Node::Internal { child_trees, .. } => {
|
||||
if !descend {
|
||||
entry.index += 1;
|
||||
}
|
||||
child_trees.get(entry.index)
|
||||
}
|
||||
Node::Leaf { items, .. } => {
|
||||
if !descend {
|
||||
entry.index += 1;
|
||||
}
|
||||
|
||||
if let Some(next_item) = items.get(entry.index) {
|
||||
return Some(next_item);
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
if let Some(subtree) = new_subtree {
|
||||
descend = true;
|
||||
self.stack.push(StackEntry {
|
||||
tree: subtree,
|
||||
index: 0,
|
||||
position: (),
|
||||
});
|
||||
} else {
|
||||
descend = false;
|
||||
self.stack.pop();
|
||||
}
|
||||
}
|
||||
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a, T, S, D> Iterator for Cursor<'a, T, D>
|
||||
where
|
||||
T: Item<Summary = S>,
|
||||
|
@ -1,8 +1,7 @@
|
||||
mod cursor;
|
||||
|
||||
use arrayvec::ArrayVec;
|
||||
pub use cursor::Cursor;
|
||||
pub use cursor::FilterCursor;
|
||||
pub use cursor::{Cursor, FilterCursor, Iter};
|
||||
use std::marker::PhantomData;
|
||||
use std::{cmp::Ordering, fmt, iter::FromIterator, sync::Arc};
|
||||
|
||||
@ -156,6 +155,10 @@ impl<T: Item> SumTree<T> {
|
||||
items
|
||||
}
|
||||
|
||||
pub fn iter(&self) -> Iter<T> {
|
||||
Iter::new(self)
|
||||
}
|
||||
|
||||
pub fn cursor<'a, S>(&'a self) -> Cursor<T, S>
|
||||
where
|
||||
S: Dimension<'a, T::Summary>,
|
||||
@ -722,6 +725,10 @@ mod tests {
|
||||
};
|
||||
|
||||
assert_eq!(tree.items(&()), reference_items);
|
||||
assert_eq!(
|
||||
tree.iter().collect::<Vec<_>>(),
|
||||
tree.cursor::<()>().collect::<Vec<_>>()
|
||||
);
|
||||
|
||||
let mut filter_cursor =
|
||||
tree.filter::<_, Count>(|summary| summary.contains_even, &());
|
||||
|
@ -1,9 +1,15 @@
|
||||
use super::Operation;
|
||||
use std::{fmt::Debug, ops::Add};
|
||||
use sum_tree::{Cursor, Dimension, Edit, Item, KeyedItem, SumTree, Summary};
|
||||
use sum_tree::{Dimension, Edit, Item, KeyedItem, SumTree, Summary};
|
||||
|
||||
pub trait Operation: Clone + Debug {
|
||||
fn lamport_timestamp(&self) -> clock::Lamport;
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct OperationQueue(SumTree<Operation>);
|
||||
struct OperationItem<T>(T);
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct OperationQueue<T: Operation>(SumTree<OperationItem<T>>);
|
||||
|
||||
#[derive(Clone, Copy, Debug, Default, Eq, Ord, PartialEq, PartialOrd)]
|
||||
pub struct OperationKey(clock::Lamport);
|
||||
@ -20,7 +26,7 @@ impl OperationKey {
|
||||
}
|
||||
}
|
||||
|
||||
impl OperationQueue {
|
||||
impl<T: Operation> OperationQueue<T> {
|
||||
pub fn new() -> Self {
|
||||
OperationQueue(SumTree::new())
|
||||
}
|
||||
@ -29,11 +35,15 @@ impl OperationQueue {
|
||||
self.0.summary().len
|
||||
}
|
||||
|
||||
pub fn insert(&mut self, mut ops: Vec<Operation>) {
|
||||
pub fn insert(&mut self, mut ops: Vec<T>) {
|
||||
ops.sort_by_key(|op| op.lamport_timestamp());
|
||||
ops.dedup_by_key(|op| op.lamport_timestamp());
|
||||
self.0
|
||||
.edit(ops.into_iter().map(Edit::Insert).collect(), &());
|
||||
self.0.edit(
|
||||
ops.into_iter()
|
||||
.map(|op| Edit::Insert(OperationItem(op)))
|
||||
.collect(),
|
||||
&(),
|
||||
);
|
||||
}
|
||||
|
||||
pub fn drain(&mut self) -> Self {
|
||||
@ -42,8 +52,8 @@ impl OperationQueue {
|
||||
clone
|
||||
}
|
||||
|
||||
pub fn cursor(&self) -> Cursor<Operation, ()> {
|
||||
self.0.cursor()
|
||||
pub fn iter(&self) -> impl Iterator<Item = &T> {
|
||||
self.0.cursor::<()>().map(|i| &i.0)
|
||||
}
|
||||
}
|
||||
|
||||
@ -76,22 +86,22 @@ impl<'a> Dimension<'a, OperationSummary> for OperationKey {
|
||||
}
|
||||
}
|
||||
|
||||
impl Item for Operation {
|
||||
impl<T: Operation> Item for OperationItem<T> {
|
||||
type Summary = OperationSummary;
|
||||
|
||||
fn summary(&self) -> Self::Summary {
|
||||
OperationSummary {
|
||||
key: OperationKey::new(self.lamport_timestamp()),
|
||||
key: OperationKey::new(self.0.lamport_timestamp()),
|
||||
len: 1,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl KeyedItem for Operation {
|
||||
impl<T: Operation> KeyedItem for OperationItem<T> {
|
||||
type Key = OperationKey;
|
||||
|
||||
fn key(&self) -> Self::Key {
|
||||
OperationKey::new(self.lamport_timestamp())
|
||||
OperationKey::new(self.0.lamport_timestamp())
|
||||
}
|
||||
}
|
||||
|
||||
@ -107,21 +117,27 @@ mod tests {
|
||||
assert_eq!(queue.len(), 0);
|
||||
|
||||
queue.insert(vec![
|
||||
Operation::Test(clock.tick()),
|
||||
Operation::Test(clock.tick()),
|
||||
TestOperation(clock.tick()),
|
||||
TestOperation(clock.tick()),
|
||||
]);
|
||||
assert_eq!(queue.len(), 2);
|
||||
|
||||
queue.insert(vec![Operation::Test(clock.tick())]);
|
||||
queue.insert(vec![TestOperation(clock.tick())]);
|
||||
assert_eq!(queue.len(), 3);
|
||||
|
||||
drop(queue.drain());
|
||||
assert_eq!(queue.len(), 0);
|
||||
|
||||
queue.insert(vec![Operation::Test(clock.tick())]);
|
||||
queue.insert(vec![TestOperation(clock.tick())]);
|
||||
assert_eq!(queue.len(), 1);
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Eq, PartialEq)]
|
||||
struct TestOperation(clock::Lamport);
|
||||
|
||||
impl Operation for TestOperation {
|
||||
fn lamport_timestamp(&self) -> clock::Lamport {
|
||||
self.0
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -141,13 +141,13 @@ impl SelectionSet {
|
||||
let end = snapshot.anchor_at(range.end.0, range.end.1);
|
||||
let start_ix = match self
|
||||
.selections
|
||||
.binary_search_by(|probe| probe.start.cmp(&start, snapshot).unwrap())
|
||||
.binary_search_by(|probe| probe.end.cmp(&start, snapshot).unwrap())
|
||||
{
|
||||
Ok(ix) | Err(ix) => ix,
|
||||
};
|
||||
let end_ix = match self
|
||||
.selections
|
||||
.binary_search_by(|probe| probe.end.cmp(&end, snapshot).unwrap())
|
||||
.binary_search_by(|probe| probe.start.cmp(&end, snapshot).unwrap())
|
||||
{
|
||||
Ok(ix) | Err(ix) => ix,
|
||||
};
|
||||
|
@ -1,6 +1,6 @@
|
||||
mod anchor;
|
||||
mod locator;
|
||||
mod operation_queue;
|
||||
pub mod operation_queue;
|
||||
mod patch;
|
||||
mod point;
|
||||
mod point_utf16;
|
||||
@ -42,7 +42,7 @@ pub struct Buffer {
|
||||
last_edit: clock::Local,
|
||||
history: History,
|
||||
selections: HashMap<SelectionSetId, SelectionSet>,
|
||||
deferred_ops: OperationQueue,
|
||||
deferred_ops: OperationQueue<Operation>,
|
||||
deferred_replicas: HashSet<ReplicaId>,
|
||||
replica_id: ReplicaId,
|
||||
remote_id: u64,
|
||||
@ -441,8 +441,6 @@ pub enum Operation {
|
||||
set_id: Option<SelectionSetId>,
|
||||
lamport_timestamp: clock::Lamport,
|
||||
},
|
||||
#[cfg(test)]
|
||||
Test(clock::Lamport),
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Eq, PartialEq)]
|
||||
@ -527,6 +525,10 @@ impl Buffer {
|
||||
self.local_clock.replica_id
|
||||
}
|
||||
|
||||
pub fn lamport_timestamp(&self) -> clock::Lamport {
|
||||
self.lamport_clock
|
||||
}
|
||||
|
||||
pub fn remote_id(&self) -> u64 {
|
||||
self.remote_id
|
||||
}
|
||||
@ -808,8 +810,6 @@ impl Buffer {
|
||||
}
|
||||
self.lamport_clock.observe(lamport_timestamp);
|
||||
}
|
||||
#[cfg(test)]
|
||||
Operation::Test(_) => {}
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
@ -1103,7 +1103,7 @@ impl Buffer {
|
||||
fn flush_deferred_ops(&mut self) -> Result<()> {
|
||||
self.deferred_replicas.clear();
|
||||
let mut deferred_ops = Vec::new();
|
||||
for op in self.deferred_ops.drain().cursor().cloned() {
|
||||
for op in self.deferred_ops.drain().iter().cloned() {
|
||||
if self.can_apply_op(&op) {
|
||||
self.apply_op(op)?;
|
||||
} else {
|
||||
@ -1129,13 +1129,11 @@ impl Buffer {
|
||||
Operation::SetActiveSelections { set_id, .. } => {
|
||||
set_id.map_or(true, |set_id| self.selections.contains_key(&set_id))
|
||||
}
|
||||
#[cfg(test)]
|
||||
Operation::Test(_) => true,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn can_resolve(&self, anchor: &Anchor) -> bool {
|
||||
pub fn can_resolve(&self, anchor: &Anchor) -> bool {
|
||||
*anchor == Anchor::min()
|
||||
|| *anchor == Anchor::max()
|
||||
|| self.version.observed(anchor.timestamp)
|
||||
@ -2176,9 +2174,18 @@ impl<'a> sum_tree::SeekTarget<'a, FragmentSummary, Self> for VersionedFullOffset
|
||||
|
||||
impl Operation {
|
||||
fn replica_id(&self) -> ReplicaId {
|
||||
self.lamport_timestamp().replica_id
|
||||
operation_queue::Operation::lamport_timestamp(self).replica_id
|
||||
}
|
||||
|
||||
pub fn is_edit(&self) -> bool {
|
||||
match self {
|
||||
Operation::Edit { .. } => true,
|
||||
_ => false,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl operation_queue::Operation for Operation {
|
||||
fn lamport_timestamp(&self) -> clock::Lamport {
|
||||
match self {
|
||||
Operation::Edit(edit) => edit.timestamp.lamport(),
|
||||
@ -2194,15 +2201,6 @@ impl Operation {
|
||||
Operation::SetActiveSelections {
|
||||
lamport_timestamp, ..
|
||||
} => *lamport_timestamp,
|
||||
#[cfg(test)]
|
||||
Operation::Test(lamport_timestamp) => *lamport_timestamp,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn is_edit(&self) -> bool {
|
||||
match self {
|
||||
Operation::Edit { .. } => true,
|
||||
_ => false,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
Loading…
Reference in New Issue
Block a user