Use a different invalidation strategy for project-wide diagnostics

This commit is contained in:
Antonio Scandurra 2021-12-23 15:32:21 +01:00
parent da460edb8b
commit dcf26acaac
5 changed files with 336 additions and 360 deletions

View File

@ -1,5 +1,5 @@
use anyhow::Result;
use collections::{hash_map, HashMap, HashSet};
use collections::{HashMap, HashSet};
use editor::{
context_header_renderer, diagnostic_block_renderer, diagnostic_header_renderer,
display_map::{BlockDisposition, BlockId, BlockProperties},
@ -9,20 +9,29 @@ use gpui::{
action, elements::*, keymap::Binding, AppContext, Entity, ModelHandle, MutableAppContext,
RenderContext, Task, View, ViewContext, ViewHandle,
};
use language::{Bias, Buffer, Point};
use language::{Bias, Buffer, Diagnostic, DiagnosticEntry, Point};
use postage::watch;
use project::Project;
use std::{ops::Range, path::Path, sync::Arc};
use std::{cmp::Ordering, ops::Range, path::Path, sync::Arc};
use util::TryFutureExt;
use workspace::Workspace;
action!(Toggle);
action!(ClearInvalid);
const CONTEXT_LINE_COUNT: u32 = 1;
pub fn init(cx: &mut MutableAppContext) {
cx.add_bindings([Binding::new("alt-shift-D", Toggle, None)]);
cx.add_bindings([
Binding::new("alt-shift-D", Toggle, None),
Binding::new(
"alt-shift-C",
ClearInvalid,
Some("ProjectDiagnosticsEditor"),
),
]);
cx.add_action(ProjectDiagnosticsEditor::toggle);
cx.add_action(ProjectDiagnosticsEditor::clear_invalid);
}
type Event = editor::Event;
@ -34,20 +43,22 @@ struct ProjectDiagnostics {
struct ProjectDiagnosticsEditor {
editor: ViewHandle<Editor>,
excerpts: ModelHandle<MultiBuffer>,
path_states: Vec<(Arc<Path>, PathState)>,
path_states: Vec<(Arc<Path>, Vec<DiagnosticGroupState>)>,
build_settings: BuildSettings,
}
#[derive(Default)]
struct PathState {
last_excerpt: ExcerptId,
diagnostic_group_states: HashMap<usize, DiagnosticGroupState>,
struct DiagnosticGroupState {
primary_diagnostic: DiagnosticEntry<language::Anchor>,
excerpts: Vec<ExcerptId>,
blocks: HashMap<BlockId, DiagnosticBlock>,
block_count: usize,
is_valid: bool,
}
#[derive(Default)]
struct DiagnosticGroupState {
excerpts: Vec<ExcerptId>,
blocks: Vec<BlockId>,
enum DiagnosticBlock {
Header(Diagnostic),
Inline(Diagnostic),
Context,
}
impl ProjectDiagnostics {
@ -146,6 +157,38 @@ impl ProjectDiagnosticsEditor {
workspace.add_item(diagnostics, cx);
}
fn clear_invalid(&mut self, _: &ClearInvalid, cx: &mut ViewContext<Self>) {
let mut blocks_to_delete = HashSet::default();
let mut excerpts_to_delete = Vec::new();
let mut path_ixs_to_delete = Vec::new();
for (ix, (_, groups)) in self.path_states.iter_mut().enumerate() {
groups.retain(|group| {
if group.is_valid {
true
} else {
blocks_to_delete.extend(group.blocks.keys().copied());
excerpts_to_delete.extend(group.excerpts.iter().cloned());
false
}
});
if groups.is_empty() {
path_ixs_to_delete.push(ix);
}
}
for ix in path_ixs_to_delete.into_iter().rev() {
self.path_states.remove(ix);
}
self.excerpts.update(cx, |excerpts, cx| {
excerpts_to_delete.sort_unstable();
excerpts.remove_excerpts(&excerpts_to_delete, cx)
});
self.editor
.update(cx, |editor, cx| editor.remove_blocks(blocks_to_delete, cx));
}
fn populate_excerpts(&mut self, buffer: ModelHandle<Buffer>, cx: &mut ViewContext<Self>) {
let snapshot;
let path;
@ -165,189 +208,260 @@ impl ProjectDiagnosticsEditor {
{
Ok(ix) => ix,
Err(ix) => {
self.path_states.insert(
ix,
(
path.clone(),
PathState {
last_excerpt: ExcerptId::max(),
diagnostic_group_states: Default::default(),
},
),
);
self.path_states
.insert(ix, (path.clone(), Default::default()));
ix
}
};
let mut prev_excerpt_id = if path_ix > 0 {
self.path_states[path_ix - 1].1.last_excerpt.clone()
let prev_path_last_group = &self.path_states[path_ix - 1].1.last().unwrap();
prev_path_last_group.excerpts.last().unwrap().clone()
} else {
ExcerptId::min()
};
let path_state = &mut self.path_states[path_ix].1;
let groups = &mut self.path_states[path_ix].1;
let mut groups_to_add = Vec::new();
let mut blocks_to_add = Vec::new();
let mut blocks_to_remove = HashSet::default();
let mut excerpts_to_remove = Vec::new();
let mut block_counts_by_group = Vec::new();
let diagnostic_groups = snapshot.diagnostic_groups::<Point>();
let mut blocks_to_restyle = HashMap::default();
let mut diagnostic_blocks = Vec::new();
let excerpts_snapshot = self.excerpts.update(cx, |excerpts, excerpts_cx| {
for group in &diagnostic_groups {
let group_id = group.entries[0].diagnostic.group_id;
let mut old_groups = groups.iter_mut().peekable();
let mut new_groups = snapshot.diagnostic_groups().into_iter().peekable();
let group_state = match path_state.diagnostic_group_states.entry(group_id) {
hash_map::Entry::Occupied(e) => {
prev_excerpt_id = e.get().excerpts.last().unwrap().clone();
block_counts_by_group.push(0);
continue;
}
hash_map::Entry::Vacant(e) => e.insert(DiagnosticGroupState::default()),
};
let mut block_count = 0;
let mut pending_range: Option<(Range<Point>, usize)> = None;
let mut is_first_excerpt_for_group = true;
for (ix, entry) in group.entries.iter().map(Some).chain([None]).enumerate() {
if let Some((range, start_ix)) = &mut pending_range {
if let Some(entry) = entry {
if entry.range.start.row <= range.end.row + 1 + CONTEXT_LINE_COUNT * 2 {
range.end = range.end.max(entry.range.end);
continue;
loop {
let mut to_insert = None;
let mut to_invalidate = None;
let mut to_validate = None;
match (old_groups.peek(), new_groups.peek()) {
(None, None) => break,
(None, Some(_)) => to_insert = new_groups.next(),
(Some(_), None) => to_invalidate = old_groups.next(),
(Some(old_group), Some(new_group)) => {
let old_primary = &old_group.primary_diagnostic;
let new_primary = &new_group.entries[new_group.primary_ix];
match compare_diagnostics(old_primary, new_primary, &snapshot) {
Ordering::Less => to_invalidate = old_groups.next(),
Ordering::Equal => {
to_validate = old_groups.next();
new_groups.next();
}
Ordering::Greater => to_insert = new_groups.next(),
}
let excerpt_start =
Point::new(range.start.row.saturating_sub(CONTEXT_LINE_COUNT), 0);
let excerpt_end = snapshot.clip_point(
Point::new(range.end.row + CONTEXT_LINE_COUNT, u32::MAX),
Bias::Left,
);
let excerpt_id = excerpts.insert_excerpt_after(
&prev_excerpt_id,
ExcerptProperties {
buffer: &buffer,
range: excerpt_start..excerpt_end,
},
excerpts_cx,
);
prev_excerpt_id = excerpt_id.clone();
group_state.excerpts.push(excerpt_id.clone());
let header_position = (excerpt_id.clone(), language::Anchor::min());
if is_first_excerpt_for_group {
is_first_excerpt_for_group = false;
let primary = &group.entries[group.primary_ix].diagnostic;
let mut header = primary.clone();
header.message =
primary.message.split('\n').next().unwrap().to_string();
block_count += 1;
blocks_to_add.push(BlockProperties {
position: header_position,
height: 2,
render: diagnostic_header_renderer(
buffer.clone(),
header,
self.build_settings.clone(),
),
disposition: BlockDisposition::Above,
});
} else {
block_count += 1;
blocks_to_add.push(BlockProperties {
position: header_position,
height: 1,
render: context_header_renderer(self.build_settings.clone()),
disposition: BlockDisposition::Above,
});
}
for entry in &group.entries[*start_ix..ix] {
let mut diagnostic = entry.diagnostic.clone();
if diagnostic.is_primary {
let mut lines = entry.diagnostic.message.split('\n');
lines.next();
diagnostic.message = lines.collect();
}
if !diagnostic.message.is_empty() {
let buffer_anchor = snapshot.anchor_before(entry.range.start);
block_count += 1;
blocks_to_add.push(BlockProperties {
position: (excerpt_id.clone(), buffer_anchor),
height: diagnostic.message.matches('\n').count() as u8 + 1,
render: diagnostic_block_renderer(
diagnostic,
true,
self.build_settings.clone(),
),
disposition: BlockDisposition::Below,
});
}
}
pending_range.take();
}
if let Some(entry) = entry {
pending_range = Some((entry.range.clone(), ix));
}
}
block_counts_by_group.push(block_count);
if let Some(group) = to_insert {
let mut group_state = DiagnosticGroupState {
primary_diagnostic: group.entries[group.primary_ix].clone(),
excerpts: Default::default(),
blocks: Default::default(),
block_count: 0,
is_valid: true,
};
let mut pending_range: Option<(Range<Point>, usize)> = None;
let mut is_first_excerpt_for_group = true;
for (ix, entry) in group.entries.iter().map(Some).chain([None]).enumerate() {
let resolved_entry = entry.map(|e| e.resolve::<Point>(&snapshot));
if let Some((range, start_ix)) = &mut pending_range {
if let Some(entry) = resolved_entry.as_ref() {
if entry.range.start.row
<= range.end.row + 1 + CONTEXT_LINE_COUNT * 2
{
range.end = range.end.max(entry.range.end);
continue;
}
}
let excerpt_start =
Point::new(range.start.row.saturating_sub(CONTEXT_LINE_COUNT), 0);
let excerpt_end = snapshot.clip_point(
Point::new(range.end.row + CONTEXT_LINE_COUNT, u32::MAX),
Bias::Left,
);
let excerpt_id = excerpts.insert_excerpt_after(
&prev_excerpt_id,
ExcerptProperties {
buffer: &buffer,
range: excerpt_start..excerpt_end,
},
excerpts_cx,
);
prev_excerpt_id = excerpt_id.clone();
group_state.excerpts.push(excerpt_id.clone());
let header_position = (excerpt_id.clone(), language::Anchor::min());
if is_first_excerpt_for_group {
is_first_excerpt_for_group = false;
let primary = &group.entries[group.primary_ix].diagnostic;
let mut header = primary.clone();
header.message =
primary.message.split('\n').next().unwrap().to_string();
group_state.block_count += 1;
diagnostic_blocks.push(DiagnosticBlock::Header(header.clone()));
blocks_to_add.push(BlockProperties {
position: header_position,
height: 2,
render: diagnostic_header_renderer(
buffer.clone(),
header,
true,
self.build_settings.clone(),
),
disposition: BlockDisposition::Above,
});
} else {
group_state.block_count += 1;
diagnostic_blocks.push(DiagnosticBlock::Context);
blocks_to_add.push(BlockProperties {
position: header_position,
height: 1,
render: context_header_renderer(self.build_settings.clone()),
disposition: BlockDisposition::Above,
});
}
for entry in &group.entries[*start_ix..ix] {
let mut diagnostic = entry.diagnostic.clone();
if diagnostic.is_primary {
let mut lines = entry.diagnostic.message.split('\n');
lines.next();
diagnostic.message = lines.collect();
}
if !diagnostic.message.is_empty() {
group_state.block_count += 1;
diagnostic_blocks
.push(DiagnosticBlock::Inline(diagnostic.clone()));
blocks_to_add.push(BlockProperties {
position: (excerpt_id.clone(), entry.range.start.clone()),
height: diagnostic.message.matches('\n').count() as u8 + 1,
render: diagnostic_block_renderer(
diagnostic,
true,
self.build_settings.clone(),
),
disposition: BlockDisposition::Below,
});
}
}
pending_range.take();
}
if let Some(entry) = resolved_entry {
pending_range = Some((entry.range.clone(), ix));
}
}
groups_to_add.push(group_state);
} else if let Some(to_invalidate) = to_invalidate {
for (block_id, block) in &to_invalidate.blocks {
match block {
DiagnosticBlock::Header(diagnostic) => {
blocks_to_restyle.insert(
*block_id,
diagnostic_header_renderer(
buffer.clone(),
diagnostic.clone(),
false,
self.build_settings.clone(),
),
);
}
DiagnosticBlock::Inline(diagnostic) => {
blocks_to_restyle.insert(
*block_id,
diagnostic_block_renderer(
diagnostic.clone(),
false,
self.build_settings.clone(),
),
);
}
DiagnosticBlock::Context => {}
}
}
to_invalidate.is_valid = false;
prev_excerpt_id = to_invalidate.excerpts.last().unwrap().clone();
} else if let Some(to_validate) = to_validate {
for (block_id, block) in &to_validate.blocks {
match block {
DiagnosticBlock::Header(diagnostic) => {
blocks_to_restyle.insert(
*block_id,
diagnostic_header_renderer(
buffer.clone(),
diagnostic.clone(),
true,
self.build_settings.clone(),
),
);
}
DiagnosticBlock::Inline(diagnostic) => {
blocks_to_restyle.insert(
*block_id,
diagnostic_block_renderer(
diagnostic.clone(),
true,
self.build_settings.clone(),
),
);
}
DiagnosticBlock::Context => {}
}
}
to_validate.is_valid = true;
prev_excerpt_id = to_validate.excerpts.last().unwrap().clone();
} else {
unreachable!();
}
}
path_state
.diagnostic_group_states
.retain(|group_id, group_state| {
if diagnostic_groups
.iter()
.any(|group| group.entries[0].diagnostic.group_id == *group_id)
{
true
} else {
excerpts_to_remove.extend(group_state.excerpts.drain(..));
blocks_to_remove.extend(group_state.blocks.drain(..));
false
}
});
excerpts_to_remove.sort();
excerpts.remove_excerpts(excerpts_to_remove.iter(), excerpts_cx);
excerpts.snapshot(excerpts_cx)
});
path_state.last_excerpt = prev_excerpt_id;
self.editor.update(cx, |editor, cx| {
editor.remove_blocks(blocks_to_remove, cx);
let block_ids = editor.insert_blocks(
blocks_to_add.into_iter().map(|block| {
let (excerpt_id, text_anchor) = block.position;
BlockProperties {
position: excerpts_snapshot.anchor_in_excerpt(excerpt_id, text_anchor),
height: block.height,
render: block.render,
disposition: block.disposition,
}
}),
cx,
);
editor.replace_blocks(blocks_to_restyle, cx);
let mut block_ids = editor
.insert_blocks(
blocks_to_add.into_iter().map(|block| {
let (excerpt_id, text_anchor) = block.position;
BlockProperties {
position: excerpts_snapshot.anchor_in_excerpt(excerpt_id, text_anchor),
height: block.height,
render: block.render,
disposition: block.disposition,
}
}),
cx,
)
.into_iter()
.zip(diagnostic_blocks);
let mut block_ids = block_ids.into_iter();
let mut block_counts_by_group = block_counts_by_group.into_iter();
for group in &diagnostic_groups {
let group_id = group.entries[0].diagnostic.group_id;
let block_count = block_counts_by_group.next().unwrap();
let group_state = path_state
.diagnostic_group_states
.get_mut(&group_id)
.unwrap();
group_state
.blocks
.extend(block_ids.by_ref().take(block_count));
for group_state in &mut groups_to_add {
group_state.blocks = block_ids.by_ref().take(group_state.block_count).collect();
}
});
groups.extend(groups_to_add);
groups.sort_unstable_by(|a, b| {
let range_a = &a.primary_diagnostic.range;
let range_b = &b.primary_diagnostic.range;
range_a
.start
.cmp(&range_b.start, &snapshot)
.unwrap()
.then_with(|| range_a.end.cmp(&range_b.end, &snapshot).unwrap())
});
if groups.is_empty() {
self.path_states.remove(path_ix);
}
cx.notify();
}
}
@ -415,6 +529,24 @@ impl workspace::ItemView for ProjectDiagnosticsEditor {
}
}
fn compare_diagnostics<L: language::ToOffset, R: language::ToOffset>(
lhs: &DiagnosticEntry<L>,
rhs: &DiagnosticEntry<R>,
snapshot: &language::BufferSnapshot,
) -> Ordering {
lhs.range
.start
.to_offset(&snapshot)
.cmp(&rhs.range.start.to_offset(snapshot))
.then_with(|| {
lhs.range
.end
.to_offset(&snapshot)
.cmp(&rhs.range.end.to_offset(snapshot))
})
.then_with(|| lhs.diagnostic.message.cmp(&rhs.diagnostic.message))
}
#[cfg(test)]
mod tests {
use super::*;

View File

@ -3423,12 +3423,6 @@ impl Editor {
}
}
pub fn remove_blocks(&mut self, block_ids: HashSet<BlockId>, cx: &mut ViewContext<Self>) {
self.display_map.update(cx, |display_map, cx| {
display_map.remove_blocks(block_ids, cx)
});
}
pub fn insert_blocks<P>(
&mut self,
blocks: impl IntoIterator<Item = BlockProperties<P>>,
@ -3444,6 +3438,22 @@ impl Editor {
blocks
}
pub fn replace_blocks(
&mut self,
blocks: HashMap<BlockId, RenderBlock>,
cx: &mut ViewContext<Self>,
) {
self.display_map
.update(cx, |display_map, _| display_map.replace_blocks(blocks));
self.request_autoscroll(Autoscroll::Fit, cx);
}
pub fn remove_blocks(&mut self, block_ids: HashSet<BlockId>, cx: &mut ViewContext<Self>) {
self.display_map.update(cx, |display_map, cx| {
display_map.remove_blocks(block_ids, cx)
});
}
pub fn longest_row(&self, cx: &mut MutableAppContext) -> u32 {
self.display_map
.update(cx, |map, cx| map.snapshot(cx))
@ -3790,12 +3800,13 @@ pub fn diagnostic_block_renderer(
pub fn diagnostic_header_renderer(
buffer: ModelHandle<Buffer>,
diagnostic: Diagnostic,
is_valid: bool,
build_settings: BuildSettings,
) -> RenderBlock {
Arc::new(move |cx| {
let settings = build_settings(cx);
let mut text_style = settings.style.text.clone();
text_style.color = diagnostic_style(diagnostic.severity, true, &settings.style).text;
text_style.color = diagnostic_style(diagnostic.severity, is_valid, &settings.style).text;
let file_path = if let Some(file) = buffer.read(&**cx).file() {
file.path().to_string_lossy().to_string()
} else {

View File

@ -7,7 +7,6 @@ pub use crate::{
};
use anyhow::{anyhow, Result};
use clock::ReplicaId;
use collections::hash_map;
use futures::FutureExt as _;
use gpui::{fonts::HighlightStyle, AppContext, Entity, ModelContext, MutableAppContext, Task};
use lazy_static::lazy_static;
@ -69,8 +68,6 @@ pub struct Buffer {
remote_selections: TreeMap<ReplicaId, Arc<[Selection<Anchor>]>>,
diagnostics: DiagnosticSet,
diagnostics_update_count: usize,
clear_invalid_diagnostics_task: Option<Task<()>>,
next_diagnostic_group_id: usize,
language_server: Option<LanguageServerState>,
deferred_ops: OperationQueue<Operation>,
#[cfg(test)]
@ -363,8 +360,6 @@ impl Buffer {
remote_selections: Default::default(),
diagnostics: Default::default(),
diagnostics_update_count: 0,
next_diagnostic_group_id: 0,
clear_invalid_diagnostics_task: None,
language_server: None,
deferred_ops: OperationQueue::new(),
#[cfg(test)]
@ -777,7 +772,6 @@ impl Buffer {
.peekable();
let mut last_edit_old_end = PointUtf16::zero();
let mut last_edit_new_end = PointUtf16::zero();
let mut has_disk_based_diagnostics = false;
let mut ix = 0;
'outer: while ix < diagnostics.len() {
let entry = &mut diagnostics[ix];
@ -793,7 +787,6 @@ impl Buffer {
.as_ref()
.map_or(false, |source| disk_based_sources.contains(source))
{
has_disk_based_diagnostics = true;
while let Some(edit) = edits_since_save.peek() {
if edit.old.end <= start {
last_edit_old_end = edit.old.end;
@ -826,145 +819,14 @@ impl Buffer {
ix += 1;
}
drop(edits_since_save);
let mut merged_diagnostics = Vec::with_capacity(diagnostics.len());
let mut old_diagnostics = self
.diagnostics
.iter()
.map(|entry| {
(
entry,
entry
.diagnostic
.source
.as_ref()
.map_or(false, |source| disk_based_sources.contains(source)),
)
})
.peekable();
let mut new_diagnostics = diagnostics
.into_iter()
.map(|entry| DiagnosticEntry {
range: content.anchor_before(entry.range.start)
..content.anchor_after(entry.range.end),
diagnostic: entry.diagnostic,
})
.peekable();
// Incorporate the *old* diagnostics into the new diagnostics set, in two ways:
// 1. Recycle group ids - diagnostic groups whose primary diagnostic has not
// changed should use the same group id as before, so that downstream code
// can determine which diagnostics are new.
// 2. Preserve disk-based diagnostics - Some diagnostic sources are reported
// on a less frequent basis than others. If these sources are absent from this
// message, then preserve the previous diagnostics for those sources, but mark
// them as invalid, and set a timer to clear them out.
let mut group_id_replacements = HashMap::new();
let mut merged_old_disk_based_diagnostics = false;
loop {
match (old_diagnostics.peek(), new_diagnostics.peek()) {
(None, None) => break,
(None, Some(_)) => {
merged_diagnostics.push(new_diagnostics.next().unwrap());
}
(Some(_), None) => {
let (old_entry, is_disk_based) = old_diagnostics.next().unwrap();
if is_disk_based && !has_disk_based_diagnostics {
let mut old_entry = old_entry.clone();
old_entry.diagnostic.is_valid = false;
merged_old_disk_based_diagnostics = true;
merged_diagnostics.push(old_entry);
}
}
(Some((old, _)), Some(new)) => {
let ordering = Ordering::Equal
.then_with(|| old.range.start.cmp(&new.range.start, content).unwrap())
.then_with(|| new.range.end.cmp(&old.range.end, content).unwrap())
.then_with(|| compare_diagnostics(&old.diagnostic, &new.diagnostic));
match ordering {
Ordering::Less => {
let (old_entry, is_disk_based) = old_diagnostics.next().unwrap();
if is_disk_based && !has_disk_based_diagnostics {
let mut old_entry = old_entry.clone();
old_entry.diagnostic.is_valid = false;
merged_old_disk_based_diagnostics = true;
merged_diagnostics.push(old_entry);
}
}
Ordering::Equal => {
let (old_entry, _) = old_diagnostics.next().unwrap();
let new_entry = new_diagnostics.next().unwrap();
if new_entry.diagnostic.is_primary {
group_id_replacements.insert(
new_entry.diagnostic.group_id,
old_entry.diagnostic.group_id,
);
}
merged_diagnostics.push(new_entry);
}
Ordering::Greater => {
let new_entry = new_diagnostics.next().unwrap();
merged_diagnostics.push(new_entry);
}
}
}
}
}
drop(old_diagnostics);
// Having determined which group ids should be recycled, renumber all of
// groups. Any new group that does not correspond to an old group receives
// a brand new group id.
let mut next_diagnostic_group_id = self.next_diagnostic_group_id;
for entry in &mut merged_diagnostics {
if entry.diagnostic.is_valid {
match group_id_replacements.entry(entry.diagnostic.group_id) {
hash_map::Entry::Occupied(e) => entry.diagnostic.group_id = *e.get(),
hash_map::Entry::Vacant(e) => {
entry.diagnostic.group_id = post_inc(&mut next_diagnostic_group_id);
e.insert(entry.diagnostic.group_id);
}
}
}
}
self.diagnostics = DiagnosticSet::from_sorted_entries(merged_diagnostics, content);
self.next_diagnostic_group_id = next_diagnostic_group_id;
// If old disk-based diagnostics were included in this new set, then
// set a timer to remove them if enough time passes before the next
// diagnostics update.
if merged_old_disk_based_diagnostics {
self.clear_invalid_diagnostics_task = Some(cx.spawn(|this, mut cx| async move {
smol::Timer::after(Duration::from_secs(2)).await;
this.update(&mut cx, |this, cx| {
let content = this.snapshot();
this.diagnostics = DiagnosticSet::from_sorted_entries(
this.diagnostics
.iter()
.filter(|d| d.diagnostic.is_valid)
.cloned(),
&content,
);
let operation = this.did_update_diagnostics(cx);
this.send_operation(operation, cx);
});
}));
} else if has_disk_based_diagnostics {
self.clear_invalid_diagnostics_task.take();
}
Ok(self.did_update_diagnostics(cx))
}
fn did_update_diagnostics(&mut self, cx: &mut ModelContext<Self>) -> Operation {
self.diagnostics = DiagnosticSet::new(diagnostics, content);
self.diagnostics_update_count += 1;
cx.notify();
cx.emit(Event::DiagnosticsUpdated);
Operation::UpdateDiagnostics {
Ok(Operation::UpdateDiagnostics {
diagnostics: Arc::from(self.diagnostics.iter().cloned().collect::<Vec<_>>()),
lamport_timestamp: self.text.lamport_clock.tick(),
}
})
}
fn request_autoindent(&mut self, cx: &mut ModelContext<Self>) {
@ -1874,10 +1736,7 @@ impl BufferSnapshot {
self.diagnostics.range(search_range, self, true)
}
pub fn diagnostic_groups<O>(&self) -> Vec<DiagnosticGroup<O>>
where
O: FromAnchor + Ord + Copy,
{
pub fn diagnostic_groups(&self) -> Vec<DiagnosticGroup<Anchor>> {
self.diagnostics.groups(self)
}

View File

@ -104,23 +104,19 @@ impl DiagnosticSet {
})
}
pub fn groups<O>(&self, buffer: &text::BufferSnapshot) -> Vec<DiagnosticGroup<O>>
where
O: FromAnchor + Ord + Copy,
{
pub fn groups(&self, buffer: &text::BufferSnapshot) -> Vec<DiagnosticGroup<Anchor>> {
let mut groups = HashMap::default();
for entry in self.diagnostics.iter() {
let entry = entry.resolve(buffer);
groups
.entry(entry.diagnostic.group_id)
.or_insert(Vec::new())
.push(entry);
.push(entry.clone());
}
let mut groups = groups
.into_values()
.filter_map(|mut entries| {
entries.sort_unstable_by_key(|entry| entry.range.start);
entries.sort_unstable_by(|a, b| a.range.start.cmp(&b.range.start, buffer).unwrap());
entries
.iter()
.position(|entry| entry.diagnostic.is_primary)
@ -130,7 +126,13 @@ impl DiagnosticSet {
})
})
.collect::<Vec<_>>();
groups.sort_unstable_by_key(|group| group.entries[group.primary_ix].range.start);
groups.sort_unstable_by(|a, b| {
a.entries[a.primary_ix]
.range
.start
.cmp(&b.entries[b.primary_ix].range.start, buffer)
.unwrap()
});
groups
}

View File

@ -1097,43 +1097,15 @@ impl LocalWorktree {
buffer
});
this.update(&mut cx, |this, cx| {
this.update(&mut cx, |this, _| {
let this = this.as_local_mut().unwrap();
this.open_buffers.insert(buffer.id(), buffer.downgrade());
cx.subscribe(&buffer, |worktree, buffer, event, cx| {
worktree
.as_local_mut()
.unwrap()
.on_buffer_event(buffer, event, cx);
})
.detach();
});
Ok(buffer)
})
}
fn on_buffer_event(
&mut self,
buffer: ModelHandle<Buffer>,
event: &language::Event,
cx: &mut ModelContext<Worktree>,
) {
match event {
language::Event::DiagnosticsUpdated => {
let buffer = buffer.read(cx);
if let Some(path) = buffer.file().map(|file| file.path().clone()) {
let diagnostics = buffer.all_diagnostics();
self.diagnostic_summaries
.insert(path.clone(), DiagnosticSummary::new(diagnostics));
cx.emit(Event::DiagnosticsUpdated(path));
cx.notify();
}
}
_ => {}
}
}
pub fn open_remote_buffer(
&mut self,
envelope: TypedEnvelope<proto::OpenBuffer>,