Merge pull request #393 from zed-industries/autocomplete

Autocomplete
This commit is contained in:
Max Brunsfeld 2022-02-03 15:47:57 -08:00 committed by GitHub
commit 40f9d2fc5d
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
43 changed files with 3207 additions and 666 deletions

14
Cargo.lock generated
View File

@ -1547,11 +1547,14 @@ dependencies = [
"collections",
"ctor",
"env_logger",
"fuzzy",
"gpui",
"itertools",
"language",
"lazy_static",
"log",
"lsp",
"ordered-float",
"parking_lot",
"postage",
"project",
@ -1559,6 +1562,7 @@ dependencies = [
"serde",
"smallvec",
"smol",
"snippet",
"sum_tree",
"text",
"theme",
@ -2631,6 +2635,7 @@ dependencies = [
"rand 0.8.3",
"rpc",
"serde",
"serde_json",
"similar",
"smallvec",
"smol",
@ -4415,6 +4420,14 @@ dependencies = [
"pin-project-lite 0.1.12",
]
[[package]]
name = "snippet"
version = "0.1.0"
dependencies = [
"anyhow",
"smallvec",
]
[[package]]
name = "socket2"
version = "0.3.19"
@ -4891,6 +4904,7 @@ dependencies = [
"lazy_static",
"log",
"parking_lot",
"postage",
"rand 0.8.3",
"smallvec",
"sum_tree",

View File

@ -19,9 +19,12 @@ test-support = [
text = { path = "../text" }
clock = { path = "../clock" }
collections = { path = "../collections" }
fuzzy = { path = "../fuzzy" }
gpui = { path = "../gpui" }
language = { path = "../language" }
lsp = { path = "../lsp" }
project = { path = "../project" }
snippet = { path = "../snippet" }
sum_tree = { path = "../sum_tree" }
theme = { path = "../theme" }
util = { path = "../util" }
@ -31,6 +34,7 @@ anyhow = "1.0"
itertools = "0.10"
lazy_static = "1.4"
log = "0.4"
ordered-float = "2.1.1"
parking_lot = "0.11"
postage = { version = "0.4", features = ["futures-traits"] }
rand = { version = "0.8.3", optional = true }
@ -41,6 +45,7 @@ smol = "1.2"
[dev-dependencies]
text = { path = "../text", features = ["test-support"] }
language = { path = "../language", features = ["test-support"] }
lsp = { path = "../lsp", features = ["test-support"] }
gpui = { path = "../gpui", features = ["test-support"] }
util = { path = "../util", features = ["test-support"] }
ctor = "0.1"

View File

@ -12,7 +12,6 @@ use language::{Point, Subscription as BufferSubscription};
use std::ops::Range;
use sum_tree::Bias;
use tab_map::TabMap;
use theme::SyntaxTheme;
use wrap_map::WrapMap;
pub use block_map::{
@ -251,16 +250,16 @@ impl DisplaySnapshot {
pub fn text_chunks(&self, display_row: u32) -> impl Iterator<Item = &str> {
self.blocks_snapshot
.chunks(display_row..self.max_point().row() + 1, None)
.chunks(display_row..self.max_point().row() + 1, false)
.map(|h| h.text)
}
pub fn chunks<'a>(
&'a self,
display_rows: Range<u32>,
theme: Option<&'a SyntaxTheme>,
language_aware: bool,
) -> DisplayChunks<'a> {
self.blocks_snapshot.chunks(display_rows, theme)
self.blocks_snapshot.chunks(display_rows, language_aware)
}
pub fn chars_at<'a>(&'a self, point: DisplayPoint) -> impl Iterator<Item = char> + 'a {
@ -1122,8 +1121,10 @@ mod tests {
) -> Vec<(String, Option<Color>)> {
let snapshot = map.update(cx, |map, cx| map.snapshot(cx));
let mut chunks: Vec<(String, Option<Color>)> = Vec::new();
for chunk in snapshot.chunks(rows, Some(theme)) {
let color = chunk.highlight_style.map(|s| s.color);
for chunk in snapshot.chunks(rows, true) {
let color = chunk
.highlight_id
.and_then(|id| id.style(theme).map(|s| s.color));
if let Some((last_chunk, last_color)) = chunks.last_mut() {
if color == *last_color {
last_chunk.push_str(chunk.text);

View File

@ -15,7 +15,6 @@ use std::{
};
use sum_tree::{Bias, SumTree};
use text::{Edit, Point};
use theme::SyntaxTheme;
const NEWLINES: &'static [u8] = &[b'\n'; u8::MAX as usize];
@ -461,16 +460,12 @@ impl<'a> BlockMapWriter<'a> {
impl BlockSnapshot {
#[cfg(test)]
pub fn text(&self) -> String {
self.chunks(0..self.transforms.summary().output_rows, None)
self.chunks(0..self.transforms.summary().output_rows, false)
.map(|chunk| chunk.text)
.collect()
}
pub fn chunks<'a>(
&'a self,
rows: Range<u32>,
theme: Option<&'a SyntaxTheme>,
) -> BlockChunks<'a> {
pub fn chunks<'a>(&'a self, rows: Range<u32>, language_aware: bool) -> BlockChunks<'a> {
let max_output_row = cmp::min(rows.end, self.transforms.summary().output_rows);
let mut cursor = self.transforms.cursor::<(BlockRow, WrapRow)>();
let input_end = {
@ -498,7 +493,9 @@ impl BlockSnapshot {
cursor.start().1 .0 + overshoot
};
BlockChunks {
input_chunks: self.wrap_snapshot.chunks(input_start..input_end, theme),
input_chunks: self
.wrap_snapshot
.chunks(input_start..input_end, language_aware),
input_chunk: Default::default(),
transforms: cursor,
output_row: rows.start,
@ -715,7 +712,7 @@ impl<'a> Iterator for BlockChunks<'a> {
return Some(Chunk {
text: unsafe { std::str::from_utf8_unchecked(&NEWLINES[..line_count as usize]) },
highlight_style: None,
highlight_id: None,
diagnostic: None,
});
}
@ -1340,7 +1337,7 @@ mod tests {
for start_row in 0..expected_row_count {
let expected_text = expected_lines[start_row..].join("\n");
let actual_text = blocks_snapshot
.chunks(start_row as u32..expected_row_count as u32, None)
.chunks(start_row as u32..expected_row_count as u32, false)
.map(|chunk| chunk.text)
.collect::<String>();
assert_eq!(

View File

@ -11,7 +11,6 @@ use std::{
sync::atomic::{AtomicUsize, Ordering::SeqCst},
};
use sum_tree::{Bias, Cursor, FilterCursor, SumTree};
use theme::SyntaxTheme;
pub trait ToFoldPoint {
fn to_fold_point(&self, snapshot: &FoldSnapshot, bias: Bias) -> FoldPoint;
@ -490,7 +489,7 @@ impl FoldSnapshot {
#[cfg(test)]
pub fn text(&self) -> String {
self.chunks(FoldOffset(0)..self.len(), None)
self.chunks(FoldOffset(0)..self.len(), false)
.map(|c| c.text)
.collect()
}
@ -630,15 +629,11 @@ impl FoldSnapshot {
pub fn chars_at(&self, start: FoldPoint) -> impl '_ + Iterator<Item = char> {
let start = start.to_offset(self);
self.chunks(start..self.len(), None)
self.chunks(start..self.len(), false)
.flat_map(|chunk| chunk.text.chars())
}
pub fn chunks<'a>(
&'a self,
range: Range<FoldOffset>,
theme: Option<&'a SyntaxTheme>,
) -> FoldChunks<'a> {
pub fn chunks<'a>(&'a self, range: Range<FoldOffset>, language_aware: bool) -> FoldChunks<'a> {
let mut transform_cursor = self.transforms.cursor::<(FoldOffset, usize)>();
transform_cursor.seek(&range.end, Bias::Right, &());
@ -651,7 +646,9 @@ impl FoldSnapshot {
FoldChunks {
transform_cursor,
buffer_chunks: self.buffer_snapshot.chunks(buffer_start..buffer_end, theme),
buffer_chunks: self
.buffer_snapshot
.chunks(buffer_start..buffer_end, language_aware),
buffer_chunk: None,
buffer_offset: buffer_start,
output_offset: range.start.0,
@ -976,7 +973,7 @@ impl<'a> Iterator for FoldChunks<'a> {
self.output_offset += output_text.len();
return Some(Chunk {
text: output_text,
highlight_style: None,
highlight_id: None,
diagnostic: None,
});
}
@ -1398,7 +1395,7 @@ mod tests {
let text = &expected_text[start.0..end.0];
assert_eq!(
snapshot
.chunks(start..end, None)
.chunks(start..end, false)
.map(|c| c.text)
.collect::<String>(),
text,

View File

@ -5,7 +5,6 @@ use parking_lot::Mutex;
use std::{cmp, mem, ops::Range};
use sum_tree::Bias;
use text::Point;
use theme::SyntaxTheme;
pub struct TabMap(Mutex<TabSnapshot>);
@ -35,7 +34,7 @@ impl TabMap {
let mut delta = 0;
for chunk in old_snapshot
.fold_snapshot
.chunks(fold_edit.old.end..max_offset, None)
.chunks(fold_edit.old.end..max_offset, false)
{
let patterns: &[_] = &['\t', '\n'];
if let Some(ix) = chunk.text.find(patterns) {
@ -110,7 +109,7 @@ impl TabSnapshot {
self.max_point()
};
for c in self
.chunks(range.start..line_end, None)
.chunks(range.start..line_end, false)
.flat_map(|chunk| chunk.text.chars())
{
if c == '\n' {
@ -124,7 +123,7 @@ impl TabSnapshot {
last_line_chars = first_line_chars;
} else {
for _ in self
.chunks(TabPoint::new(range.end.row(), 0)..range.end, None)
.chunks(TabPoint::new(range.end.row(), 0)..range.end, false)
.flat_map(|chunk| chunk.text.chars())
{
last_line_chars += 1;
@ -144,11 +143,7 @@ impl TabSnapshot {
self.fold_snapshot.version
}
pub fn chunks<'a>(
&'a self,
range: Range<TabPoint>,
theme: Option<&'a SyntaxTheme>,
) -> TabChunks<'a> {
pub fn chunks<'a>(&'a self, range: Range<TabPoint>, language_aware: bool) -> TabChunks<'a> {
let (input_start, expanded_char_column, to_next_stop) =
self.to_fold_point(range.start, Bias::Left);
let input_start = input_start.to_offset(&self.fold_snapshot);
@ -163,7 +158,9 @@ impl TabSnapshot {
};
TabChunks {
fold_chunks: self.fold_snapshot.chunks(input_start..input_end, theme),
fold_chunks: self
.fold_snapshot
.chunks(input_start..input_end, language_aware),
column: expanded_char_column,
output_position: range.start.0,
max_output_position: range.end.0,
@ -182,7 +179,7 @@ impl TabSnapshot {
#[cfg(test)]
pub fn text(&self) -> String {
self.chunks(TabPoint::zero()..self.max_point(), None)
self.chunks(TabPoint::zero()..self.max_point(), false)
.map(|chunk| chunk.text)
.collect()
}
@ -495,7 +492,7 @@ mod tests {
assert_eq!(
expected_text,
tabs_snapshot
.chunks(start..end, None)
.chunks(start..end, false)
.map(|c| c.text)
.collect::<String>(),
"chunks({:?}..{:?})",

View File

@ -13,7 +13,6 @@ use smol::future::yield_now;
use std::{cmp, collections::VecDeque, mem, ops::Range, time::Duration};
use sum_tree::{Bias, Cursor, SumTree};
use text::Patch;
use theme::SyntaxTheme;
pub use super::tab_map::TextSummary;
pub type WrapEdit = text::Edit<u32>;
@ -436,7 +435,7 @@ impl WrapSnapshot {
let mut remaining = None;
let mut chunks = new_tab_snapshot.chunks(
TabPoint::new(edit.new_rows.start, 0)..new_tab_snapshot.max_point(),
None,
false,
);
let mut edit_transforms = Vec::<Transform>::new();
for _ in edit.new_rows.start..edit.new_rows.end {
@ -562,15 +561,11 @@ impl WrapSnapshot {
}
pub fn text_chunks(&self, wrap_row: u32) -> impl Iterator<Item = &str> {
self.chunks(wrap_row..self.max_point().row() + 1, None)
self.chunks(wrap_row..self.max_point().row() + 1, false)
.map(|h| h.text)
}
pub fn chunks<'a>(
&'a self,
rows: Range<u32>,
theme: Option<&'a SyntaxTheme>,
) -> WrapChunks<'a> {
pub fn chunks<'a>(&'a self, rows: Range<u32>, language_aware: bool) -> WrapChunks<'a> {
let output_start = WrapPoint::new(rows.start, 0);
let output_end = WrapPoint::new(rows.end, 0);
let mut transforms = self.transforms.cursor::<(WrapPoint, TabPoint)>();
@ -583,7 +578,9 @@ impl WrapSnapshot {
.to_tab_point(output_end)
.min(self.tab_snapshot.max_point());
WrapChunks {
input_chunks: self.tab_snapshot.chunks(input_start..input_end, theme),
input_chunks: self
.tab_snapshot
.chunks(input_start..input_end, language_aware),
input_chunk: Default::default(),
output_position: output_start,
max_output_row: rows.end,
@ -1295,7 +1292,7 @@ mod tests {
}
let actual_text = self
.chunks(start_row..end_row, None)
.chunks(start_row..end_row, true)
.map(|c| c.text)
.collect::<String>();
assert_eq!(

File diff suppressed because it is too large Load Diff

View File

@ -300,7 +300,7 @@ impl EditorElement {
&mut self,
bounds: RectF,
visible_bounds: RectF,
layout: &LayoutState,
layout: &mut LayoutState,
cx: &mut PaintContext,
) {
let view = self.view(cx.app);
@ -392,6 +392,28 @@ impl EditorElement {
}
cx.scene.pop_layer();
if let Some((position, completions_list)) = layout.completions.as_mut() {
cx.scene.push_stacking_context(None);
let cursor_row_layout = &layout.line_layouts[(position.row() - start_row) as usize];
let x = cursor_row_layout.x_for_index(position.column() as usize) - scroll_left;
let y = (position.row() + 1) as f32 * layout.line_height - scroll_top;
let mut list_origin = content_origin + vec2f(x, y);
let list_height = completions_list.size().y();
if list_origin.y() + list_height > bounds.lower_left().y() {
list_origin.set_y(list_origin.y() - layout.line_height - list_height);
}
completions_list.paint(
list_origin,
RectF::from_points(Vector2F::zero(), vec2f(f32::MAX, f32::MAX)), // Let content bleed outside of editor
cx,
);
cx.scene.pop_stacking_context();
}
cx.scene.pop_layer();
}
@ -576,31 +598,32 @@ impl EditorElement {
.collect();
} else {
let style = &self.settings.style;
let chunks = snapshot
.chunks(rows.clone(), Some(&style.syntax))
.map(|chunk| {
let highlight = if let Some(severity) = chunk.diagnostic {
let diagnostic_style = super::diagnostic_style(severity, true, style);
let underline = Some(Underline {
color: diagnostic_style.message.text.color,
thickness: 1.0.into(),
squiggly: true,
});
if let Some(mut highlight) = chunk.highlight_style {
highlight.underline = underline;
Some(highlight)
} else {
Some(HighlightStyle {
underline,
color: style.text.color,
font_properties: style.text.font_properties,
})
}
let chunks = snapshot.chunks(rows.clone(), true).map(|chunk| {
let highlight_style = chunk
.highlight_id
.and_then(|highlight_id| highlight_id.style(&style.syntax));
let highlight = if let Some(severity) = chunk.diagnostic {
let diagnostic_style = super::diagnostic_style(severity, true, style);
let underline = Some(Underline {
color: diagnostic_style.message.text.color,
thickness: 1.0.into(),
squiggly: true,
});
if let Some(mut highlight) = highlight_style {
highlight.underline = underline;
Some(highlight)
} else {
chunk.highlight_style
};
(chunk.text, highlight)
});
Some(HighlightStyle {
underline,
color: style.text.color,
font_properties: style.text.font_properties,
})
}
} else {
highlight_style
};
(chunk.text, highlight)
});
layout_highlighted_chunks(
chunks,
&style.text,
@ -667,8 +690,8 @@ impl EditorElement {
}
impl Element for EditorElement {
type LayoutState = Option<LayoutState>;
type PaintState = Option<PaintState>;
type LayoutState = LayoutState;
type PaintState = PaintState;
fn layout(
&mut self,
@ -836,6 +859,7 @@ impl Element for EditorElement {
max_row.saturating_sub(1) as f32,
);
let mut completions = None;
self.update_view(cx.app, |view, cx| {
let clamped = view.clamp_scroll_left(scroll_max.x());
let autoscrolled;
@ -855,8 +879,33 @@ impl Element for EditorElement {
if clamped || autoscrolled {
snapshot = view.snapshot(cx);
}
if view.has_completions() {
let newest_selection_head = view
.newest_selection::<usize>(&snapshot.buffer_snapshot)
.head()
.to_display_point(&snapshot);
if (start_row..end_row).contains(&newest_selection_head.row()) {
let list = view.render_completions(cx).unwrap();
completions = Some((newest_selection_head, list));
}
}
});
if let Some((_, completions_list)) = completions.as_mut() {
completions_list.layout(
SizeConstraint {
min: Vector2F::zero(),
max: vec2f(
f32::INFINITY,
(12. * line_height).min((size.y() - line_height) / 2.),
),
},
cx,
);
}
let blocks = self.layout_blocks(
start_row..end_row,
&snapshot,
@ -873,7 +922,7 @@ impl Element for EditorElement {
(
size,
Some(LayoutState {
LayoutState {
size,
scroll_max,
gutter_size,
@ -891,7 +940,8 @@ impl Element for EditorElement {
em_width,
em_advance,
selections,
}),
completions,
},
)
}
@ -902,7 +952,6 @@ impl Element for EditorElement {
layout: &mut Self::LayoutState,
cx: &mut PaintContext,
) -> Self::PaintState {
let layout = layout.as_mut()?;
cx.scene.push_layer(Some(bounds));
let gutter_bounds = RectF::new(bounds.origin(), layout.gutter_size);
@ -925,46 +974,48 @@ impl Element for EditorElement {
cx.scene.pop_layer();
Some(PaintState {
PaintState {
bounds,
gutter_bounds,
text_bounds,
})
}
}
fn dispatch_event(
&mut self,
event: &Event,
_: RectF,
layout: &mut Self::LayoutState,
paint: &mut Self::PaintState,
layout: &mut LayoutState,
paint: &mut PaintState,
cx: &mut EventContext,
) -> bool {
if let (Some(layout), Some(paint)) = (layout, paint) {
match event {
Event::LeftMouseDown {
position,
alt,
shift,
click_count,
..
} => self.mouse_down(*position, *alt, *shift, *click_count, layout, paint, cx),
Event::LeftMouseUp { position } => self.mouse_up(*position, cx),
Event::LeftMouseDragged { position } => {
self.mouse_dragged(*position, layout, paint, cx)
}
Event::ScrollWheel {
position,
delta,
precise,
} => self.scroll(*position, *delta, *precise, layout, paint, cx),
Event::KeyDown {
chars, keystroke, ..
} => self.key_down(chars, keystroke, cx),
_ => false,
if let Some((_, completion_list)) = &mut layout.completions {
if completion_list.dispatch_event(event, cx) {
return true;
}
} else {
false
}
match event {
Event::LeftMouseDown {
position,
alt,
shift,
click_count,
..
} => self.mouse_down(*position, *alt, *shift, *click_count, layout, paint, cx),
Event::LeftMouseUp { position } => self.mouse_up(*position, cx),
Event::LeftMouseDragged { position } => {
self.mouse_dragged(*position, layout, paint, cx)
}
Event::ScrollWheel {
position,
delta,
precise,
} => self.scroll(*position, *delta, *precise, layout, paint, cx),
Event::KeyDown {
chars, keystroke, ..
} => self.key_down(chars, keystroke, cx),
_ => false,
}
}
@ -1000,6 +1051,7 @@ pub struct LayoutState {
highlighted_ranges: Vec<(Range<DisplayPoint>, Color)>,
selections: HashMap<ReplicaId, Vec<text::Selection<DisplayPoint>>>,
text_offset: Vector2F,
completions: Option<(DisplayPoint, ElementBox)>,
}
fn layout_line(

View File

@ -1,7 +1,7 @@
use super::{Bias, DisplayPoint, DisplaySnapshot, SelectionGoal, ToDisplayPoint};
use crate::{char_kind, CharKind, ToPoint};
use anyhow::Result;
use std::{cmp, ops::Range};
use std::ops::Range;
pub fn left(map: &DisplaySnapshot, mut point: DisplayPoint) -> Result<DisplayPoint> {
if point.column() > 0 {
@ -183,36 +183,20 @@ pub fn is_inside_word(map: &DisplaySnapshot, point: DisplayPoint) -> bool {
prev_char_kind.zip(next_char_kind) == Some((CharKind::Word, CharKind::Word))
}
pub fn surrounding_word(map: &DisplaySnapshot, point: DisplayPoint) -> Range<DisplayPoint> {
let mut start = map.clip_point(point, Bias::Left).to_offset(map, Bias::Left);
let mut end = start;
let text = &map.buffer_snapshot;
let mut next_chars = text.chars_at(start).peekable();
let mut prev_chars = text.reversed_chars_at(start).peekable();
let word_kind = cmp::max(
prev_chars.peek().copied().map(char_kind),
next_chars.peek().copied().map(char_kind),
);
for ch in prev_chars {
if Some(char_kind(ch)) == word_kind {
start -= ch.len_utf8();
} else {
break;
}
}
for ch in next_chars {
if Some(char_kind(ch)) == word_kind {
end += ch.len_utf8();
} else {
break;
}
}
start.to_point(&map.buffer_snapshot).to_display_point(map)
..end.to_point(&map.buffer_snapshot).to_display_point(map)
pub fn surrounding_word(map: &DisplaySnapshot, position: DisplayPoint) -> Range<DisplayPoint> {
let position = map
.clip_point(position, Bias::Left)
.to_offset(map, Bias::Left);
let (range, _) = map.buffer_snapshot.surrounding_word(position);
let start = range
.start
.to_point(&map.buffer_snapshot)
.to_display_point(map);
let end = range
.end
.to_point(&map.buffer_snapshot)
.to_display_point(map);
start..end
}
#[cfg(test)]
@ -406,59 +390,59 @@ mod tests {
assert_eq!(
surrounding_word(&snapshot, DisplayPoint::new(0, 0)),
DisplayPoint::new(0, 0)..DisplayPoint::new(0, 5)
DisplayPoint::new(0, 0)..DisplayPoint::new(0, 5),
);
assert_eq!(
surrounding_word(&snapshot, DisplayPoint::new(0, 2)),
DisplayPoint::new(0, 0)..DisplayPoint::new(0, 5)
DisplayPoint::new(0, 0)..DisplayPoint::new(0, 5),
);
assert_eq!(
surrounding_word(&snapshot, DisplayPoint::new(0, 5)),
DisplayPoint::new(0, 0)..DisplayPoint::new(0, 5)
DisplayPoint::new(0, 0)..DisplayPoint::new(0, 5),
);
assert_eq!(
surrounding_word(&snapshot, DisplayPoint::new(0, 6)),
DisplayPoint::new(0, 6)..DisplayPoint::new(0, 11)
DisplayPoint::new(0, 6)..DisplayPoint::new(0, 11),
);
assert_eq!(
surrounding_word(&snapshot, DisplayPoint::new(0, 7)),
DisplayPoint::new(0, 6)..DisplayPoint::new(0, 11)
DisplayPoint::new(0, 6)..DisplayPoint::new(0, 11),
);
assert_eq!(
surrounding_word(&snapshot, DisplayPoint::new(0, 11)),
DisplayPoint::new(0, 6)..DisplayPoint::new(0, 11)
DisplayPoint::new(0, 6)..DisplayPoint::new(0, 11),
);
assert_eq!(
surrounding_word(&snapshot, DisplayPoint::new(0, 13)),
DisplayPoint::new(0, 11)..DisplayPoint::new(0, 14)
DisplayPoint::new(0, 11)..DisplayPoint::new(0, 14),
);
assert_eq!(
surrounding_word(&snapshot, DisplayPoint::new(0, 14)),
DisplayPoint::new(0, 14)..DisplayPoint::new(0, 19)
DisplayPoint::new(0, 14)..DisplayPoint::new(0, 19),
);
assert_eq!(
surrounding_word(&snapshot, DisplayPoint::new(0, 17)),
DisplayPoint::new(0, 14)..DisplayPoint::new(0, 19)
DisplayPoint::new(0, 14)..DisplayPoint::new(0, 19),
);
assert_eq!(
surrounding_word(&snapshot, DisplayPoint::new(0, 19)),
DisplayPoint::new(0, 14)..DisplayPoint::new(0, 19)
DisplayPoint::new(0, 14)..DisplayPoint::new(0, 19),
);
assert_eq!(
surrounding_word(&snapshot, DisplayPoint::new(1, 0)),
DisplayPoint::new(1, 0)..DisplayPoint::new(1, 4)
DisplayPoint::new(1, 0)..DisplayPoint::new(1, 4),
);
assert_eq!(
surrounding_word(&snapshot, DisplayPoint::new(1, 1)),
DisplayPoint::new(1, 0)..DisplayPoint::new(1, 4)
DisplayPoint::new(1, 0)..DisplayPoint::new(1, 4),
);
assert_eq!(
surrounding_word(&snapshot, DisplayPoint::new(1, 6)),
DisplayPoint::new(1, 4)..DisplayPoint::new(1, 7)
DisplayPoint::new(1, 4)..DisplayPoint::new(1, 7),
);
assert_eq!(
surrounding_word(&snapshot, DisplayPoint::new(1, 7)),
DisplayPoint::new(1, 4)..DisplayPoint::new(1, 7)
DisplayPoint::new(1, 4)..DisplayPoint::new(1, 7),
);
}
}

View File

@ -5,6 +5,7 @@ use anyhow::Result;
use clock::ReplicaId;
use collections::{HashMap, HashSet};
use gpui::{AppContext, Entity, ModelContext, ModelHandle, Task};
pub use language::Completion;
use language::{
Buffer, BufferChunks, BufferSnapshot, Chunk, DiagnosticEntry, Event, File, Language, Outline,
OutlineItem, Selection, ToOffset as _, ToPoint as _, TransactionId,
@ -49,6 +50,14 @@ struct History {
group_interval: Duration,
}
#[derive(Copy, Clone, Eq, PartialEq, PartialOrd, Ord, Debug)]
pub enum CharKind {
Newline,
Punctuation,
Whitespace,
Word,
}
struct Transaction {
id: usize,
buffer_transactions: HashSet<(usize, text::TransactionId)>,
@ -116,7 +125,7 @@ pub struct MultiBufferChunks<'a> {
range: Range<usize>,
excerpts: Cursor<'a, Excerpt, usize>,
excerpt_chunks: Option<ExcerptChunks<'a>>,
theme: Option<&'a SyntaxTheme>,
language_aware: bool,
}
pub struct MultiBufferBytes<'a> {
@ -304,9 +313,9 @@ impl MultiBuffer {
.map(|range| range.start.to_offset(&snapshot)..range.end.to_offset(&snapshot));
return buffer.update(cx, |buffer, cx| {
if autoindent {
buffer.edit_with_autoindent(ranges, new_text, cx)
buffer.edit_with_autoindent(ranges, new_text, cx);
} else {
buffer.edit(ranges, new_text, cx)
buffer.edit(ranges, new_text, cx);
}
});
}
@ -847,6 +856,103 @@ impl MultiBuffer {
})
}
pub fn completions<T>(
&self,
position: T,
cx: &mut ModelContext<Self>,
) -> Task<Result<Vec<Completion<Anchor>>>>
where
T: ToOffset,
{
let anchor = self.read(cx).anchor_before(position);
let buffer = self.buffers.borrow()[&anchor.buffer_id].buffer.clone();
let completions =
buffer.update(cx, |buffer, cx| buffer.completions(anchor.text_anchor, cx));
cx.spawn(|this, cx| async move {
completions.await.map(|completions| {
let snapshot = this.read_with(&cx, |buffer, cx| buffer.snapshot(cx));
completions
.into_iter()
.map(|completion| Completion {
old_range: snapshot.anchor_in_excerpt(
anchor.excerpt_id.clone(),
completion.old_range.start,
)
..snapshot.anchor_in_excerpt(
anchor.excerpt_id.clone(),
completion.old_range.end,
),
new_text: completion.new_text,
label: completion.label,
lsp_completion: completion.lsp_completion,
})
.collect()
})
})
}
pub fn is_completion_trigger<T>(&self, position: T, text: &str, cx: &AppContext) -> bool
where
T: ToOffset,
{
let mut chars = text.chars();
let char = if let Some(char) = chars.next() {
char
} else {
return false;
};
if chars.next().is_some() {
return false;
}
if char.is_alphanumeric() || char == '_' {
return true;
}
let snapshot = self.snapshot(cx);
let anchor = snapshot.anchor_before(position);
let buffer = self.buffers.borrow()[&anchor.buffer_id].buffer.clone();
buffer
.read(cx)
.completion_triggers()
.iter()
.any(|string| string == text)
}
pub fn apply_additional_edits_for_completion(
&self,
completion: Completion<Anchor>,
cx: &mut ModelContext<Self>,
) -> Task<Result<()>> {
let buffer = if let Some(buffer_state) = self
.buffers
.borrow()
.get(&completion.old_range.start.buffer_id)
{
buffer_state.buffer.clone()
} else {
return Task::ready(Ok(()));
};
let apply_edits = buffer.update(cx, |buffer, cx| {
buffer.apply_additional_edits_for_completion(
Completion {
old_range: completion.old_range.start.text_anchor
..completion.old_range.end.text_anchor,
new_text: completion.new_text,
label: completion.label,
lsp_completion: completion.lsp_completion,
},
true,
cx,
)
});
cx.foreground().spawn(async move {
apply_edits.await?;
Ok(())
})
}
pub fn language<'a>(&self, cx: &'a AppContext) -> Option<&'a Arc<Language>> {
self.buffers
.borrow()
@ -1007,7 +1113,7 @@ impl Entity for MultiBuffer {
impl MultiBufferSnapshot {
pub fn text(&self) -> String {
self.chunks(0..self.len(), None)
self.chunks(0..self.len(), false)
.map(|chunk| chunk.text)
.collect()
}
@ -1059,7 +1165,7 @@ impl MultiBufferSnapshot {
&'a self,
range: Range<T>,
) -> impl Iterator<Item = &'a str> {
self.chunks(range, None).map(|chunk| chunk.text)
self.chunks(range, false).map(|chunk| chunk.text)
}
pub fn is_line_blank(&self, row: u32) -> bool {
@ -1081,6 +1187,35 @@ impl MultiBufferSnapshot {
.eq(needle.bytes())
}
pub fn surrounding_word<T: ToOffset>(&self, start: T) -> (Range<usize>, Option<CharKind>) {
let mut start = start.to_offset(self);
let mut end = start;
let mut next_chars = self.chars_at(start).peekable();
let mut prev_chars = self.reversed_chars_at(start).peekable();
let word_kind = cmp::max(
prev_chars.peek().copied().map(char_kind),
next_chars.peek().copied().map(char_kind),
);
for ch in prev_chars {
if Some(char_kind(ch)) == word_kind {
start -= ch.len_utf8();
} else {
break;
}
}
for ch in next_chars {
if Some(char_kind(ch)) == word_kind {
end += ch.len_utf8();
} else {
break;
}
}
(start..end, word_kind)
}
fn as_singleton(&self) -> Option<&Excerpt> {
if self.singleton {
self.excerpts.iter().next()
@ -1179,6 +1314,12 @@ impl MultiBufferSnapshot {
}
}
pub fn bytes_at<'a, T: ToOffset>(&'a self, position: T) -> impl 'a + Iterator<Item = u8> {
self.bytes_in_range(position.to_offset(self)..self.len())
.flatten()
.copied()
}
pub fn buffer_rows<'a>(&'a self, start_row: u32) -> MultiBufferRows<'a> {
let mut result = MultiBufferRows {
buffer_row_range: 0..0,
@ -1191,14 +1332,14 @@ impl MultiBufferSnapshot {
pub fn chunks<'a, T: ToOffset>(
&'a self,
range: Range<T>,
theme: Option<&'a SyntaxTheme>,
language_aware: bool,
) -> MultiBufferChunks<'a> {
let range = range.start.to_offset(self)..range.end.to_offset(self);
let mut chunks = MultiBufferChunks {
range: range.clone(),
excerpts: self.excerpts.cursor(),
excerpt_chunks: None,
theme,
language_aware,
};
chunks.seek(range.start);
chunks
@ -1408,6 +1549,13 @@ impl MultiBufferSnapshot {
D: TextDimension + Ord + Sub<D, Output = D>,
I: 'a + IntoIterator<Item = &'a Anchor>,
{
if let Some(excerpt) = self.as_singleton() {
return excerpt
.buffer
.summaries_for_anchors(anchors.into_iter().map(|a| &a.text_anchor))
.collect();
}
let mut anchors = anchors.into_iter().peekable();
let mut cursor = self.excerpts.cursor::<ExcerptSummary>();
let mut summaries = Vec::new();
@ -1984,7 +2132,7 @@ impl Excerpt {
fn chunks_in_range<'a>(
&'a self,
range: Range<usize>,
theme: Option<&'a SyntaxTheme>,
language_aware: bool,
) -> ExcerptChunks<'a> {
let content_start = self.range.start.to_offset(&self.buffer);
let chunks_start = content_start + range.start;
@ -1999,7 +2147,7 @@ impl Excerpt {
0
};
let content_chunks = self.buffer.chunks(chunks_start..chunks_end, theme);
let content_chunks = self.buffer.chunks(chunks_start..chunks_end, language_aware);
ExcerptChunks {
content_chunks,
@ -2198,7 +2346,7 @@ impl<'a> MultiBufferChunks<'a> {
if let Some(excerpt) = self.excerpts.item() {
self.excerpt_chunks = Some(excerpt.chunks_in_range(
self.range.start - self.excerpts.start()..self.range.end - self.excerpts.start(),
self.theme,
self.language_aware,
));
} else {
self.excerpt_chunks = None;
@ -2218,9 +2366,10 @@ impl<'a> Iterator for MultiBufferChunks<'a> {
} else {
self.excerpts.next(&());
let excerpt = self.excerpts.item()?;
self.excerpt_chunks = Some(
excerpt.chunks_in_range(0..self.range.end - self.excerpts.start(), self.theme),
);
self.excerpt_chunks = Some(excerpt.chunks_in_range(
0..self.range.end - self.excerpts.start(),
self.language_aware,
));
self.next()
}
}
@ -2344,6 +2493,18 @@ impl ToPoint for Point {
}
}
pub fn char_kind(c: char) -> CharKind {
if c == '\n' {
CharKind::Newline
} else if c.is_whitespace() {
CharKind::Whitespace
} else if c.is_alphanumeric() || c == '_' {
CharKind::Word
} else {
CharKind::Punctuation
}
}
#[cfg(test)]
mod tests {
use super::*;
@ -2963,7 +3124,7 @@ mod tests {
let mut buffer_point_utf16 = buffer_start_point_utf16;
for ch in buffer
.snapshot()
.chunks(buffer_range.clone(), None)
.chunks(buffer_range.clone(), false)
.flat_map(|c| c.text.chars())
{
for _ in 0..ch.len_utf8() {

View File

@ -106,7 +106,7 @@ impl Anchor {
}
impl ToOffset for Anchor {
fn to_offset<'a>(&self, snapshot: &MultiBufferSnapshot) -> usize {
fn to_offset(&self, snapshot: &MultiBufferSnapshot) -> usize {
self.summary(snapshot)
}
}

View File

@ -607,7 +607,7 @@ async fn regex_search(
let mut line = String::new();
let mut line_offset = 0;
for (chunk_ix, chunk) in buffer
.chunks(0..buffer.len(), None)
.chunks(0..buffer.len(), false)
.map(|c| c.text)
.chain(["\n"])
.enumerate()

View File

@ -98,6 +98,16 @@ impl<'a> MatchCandidate for PathMatchCandidate<'a> {
}
}
impl StringMatchCandidate {
pub fn new(id: usize, string: String) -> Self {
Self {
id,
char_bag: CharBag::from(string.as_str()),
string,
}
}
}
impl<'a> MatchCandidate for &'a StringMatchCandidate {
fn has_chars(&self, bag: CharBag) -> bool {
self.char_bag.is_superset(bag)
@ -171,6 +181,10 @@ pub async fn match_strings(
cancel_flag: &AtomicBool,
background: Arc<executor::Background>,
) -> Vec<StringMatch> {
if candidates.is_empty() {
return Default::default();
}
let lowercase_query = query.to_lowercase().chars().collect::<Vec<_>>();
let query = query.chars().collect::<Vec<_>>();

View File

@ -12,7 +12,7 @@ use serde::{
};
use serde_json::json;
#[derive(Clone, Copy, Default, PartialEq, Eq, Hash)]
#[derive(Clone, Copy, Default, PartialEq, Eq, Hash, PartialOrd, Ord)]
#[repr(transparent)]
pub struct Color(ColorU);

View File

@ -51,6 +51,7 @@ where
append_items: F,
padding_top: f32,
padding_bottom: f32,
get_width_from_item: Option<usize>,
}
impl<F> UniformList<F>
@ -64,9 +65,15 @@ where
append_items,
padding_top: 0.,
padding_bottom: 0.,
get_width_from_item: None,
}
}
pub fn with_width_from_item(mut self, item_ix: Option<usize>) -> Self {
self.get_width_from_item = item_ix;
self
}
pub fn with_padding_top(mut self, padding: f32) -> Self {
self.padding_top = padding;
self
@ -155,46 +162,70 @@ where
"UniformList does not support being rendered with an unconstrained height"
);
}
let mut size = constraint.max;
let mut item_constraint =
SizeConstraint::new(vec2f(size.x(), 0.0), vec2f(size.x(), f32::INFINITY));
let mut item_height = 0.;
let mut scroll_max = 0.;
let mut items = Vec::new();
(self.append_items)(0..1, &mut items, cx);
if let Some(first_item) = items.first_mut() {
let mut item_size = first_item.layout(item_constraint, cx);
item_size.set_x(size.x());
item_constraint.min = item_size;
item_constraint.max = item_size;
item_height = item_size.y();
let scroll_height = self.item_count as f32 * item_height;
if scroll_height < size.y() {
size.set_y(size.y().min(scroll_height).max(constraint.min.y()));
}
let scroll_height =
item_height * self.item_count as f32 + self.padding_top + self.padding_bottom;
scroll_max = (scroll_height - size.y()).max(0.);
self.autoscroll(scroll_max, size.y(), item_height);
items.clear();
let start = cmp::min(
((self.scroll_top() - self.padding_top) / item_height) as usize,
self.item_count,
if self.item_count == 0 {
return (
constraint.min,
LayoutState {
item_height: 0.,
scroll_max: 0.,
items,
},
);
let end = cmp::min(
self.item_count,
start + (size.y() / item_height).ceil() as usize + 1,
);
(self.append_items)(start..end, &mut items, cx);
for item in &mut items {
item.layout(item_constraint, cx);
}
}
let mut size = constraint.max;
let mut item_size;
if let Some(sample_item_ix) = self.get_width_from_item {
(self.append_items)(sample_item_ix..sample_item_ix + 1, &mut items, cx);
let sample_item = items.get_mut(0).unwrap();
item_size = sample_item.layout(constraint, cx);
size.set_x(item_size.x());
} else {
size = constraint.min;
(self.append_items)(0..1, &mut items, cx);
let first_item = items.first_mut().unwrap();
item_size = first_item.layout(
SizeConstraint::new(
vec2f(constraint.max.x(), 0.0),
vec2f(constraint.max.x(), f32::INFINITY),
),
cx,
);
item_size.set_x(size.x());
}
let item_constraint = SizeConstraint {
min: item_size,
max: vec2f(constraint.max.x(), item_size.y()),
};
let item_height = item_size.y();
let scroll_height = self.item_count as f32 * item_height;
if scroll_height < size.y() {
size.set_y(size.y().min(scroll_height).max(constraint.min.y()));
}
let scroll_height =
item_height * self.item_count as f32 + self.padding_top + self.padding_bottom;
let scroll_max = (scroll_height - size.y()).max(0.);
self.autoscroll(scroll_max, size.y(), item_height);
let start = cmp::min(
((self.scroll_top() - self.padding_top) / item_height) as usize,
self.item_count,
);
let end = cmp::min(
self.item_count,
start + (size.y() / item_height).ceil() as usize + 1,
);
items.clear();
(self.append_items)(start..end, &mut items, cx);
for item in &mut items {
let item_size = item.layout(item_constraint, cx);
if item_size.x() > size.x() {
size.set_x(item_size.x());
}
}
(

View File

@ -5,7 +5,7 @@ use crate::{
text_layout::RunStyle,
FontCache,
};
use anyhow::anyhow;
use anyhow::{anyhow, Result};
pub use font_kit::{
metrics::Metrics,
properties::{Properties, Stretch, Style, Weight},
@ -107,7 +107,7 @@ impl TextStyle {
underline: Option<Underline>,
color: Color,
font_cache: &FontCache,
) -> anyhow::Result<Self> {
) -> Result<Self> {
let font_family_name = font_family_name.into();
let font_family_id = font_cache.load_family(&[&font_family_name])?;
let font_id = font_cache.select_font(font_family_id, &font_properties)?;
@ -127,6 +127,15 @@ impl TextStyle {
self
}
pub fn highlight(mut self, style: HighlightStyle, font_cache: &FontCache) -> Result<Self> {
if self.font_properties != style.font_properties {
self.font_id = font_cache.select_font(self.font_family_id, &style.font_properties)?;
}
self.color = style.color;
self.underline = style.underline;
Ok(self)
}
pub fn to_run(&self) -> RunStyle {
RunStyle {
font_id: self.font_id,
@ -135,7 +144,7 @@ impl TextStyle {
}
}
fn from_json(json: TextStyleJson) -> anyhow::Result<Self> {
fn from_json(json: TextStyleJson) -> Result<Self> {
FONT_CACHE.with(|font_cache| {
if let Some(font_cache) = font_cache.borrow().as_ref() {
let font_properties = properties_from_json(json.weight, json.italic);

View File

@ -34,9 +34,11 @@ impl Event {
const ESCAPE_KEY: u16 = 0x1b;
const TAB_KEY: u16 = 0x09;
const SHIFT_TAB_KEY: u16 = 0x19;
const SPACE_KEY: u16 = b' ' as u16;
#[allow(non_upper_case_globals)]
match first_char as u16 {
SPACE_KEY => "space",
BACKSPACE_KEY => "backspace",
ENTER_KEY => "enter",
ESCAPE_KEY => "escape",

View File

@ -36,6 +36,7 @@ parking_lot = "0.11.1"
postage = { version = "0.4.1", features = ["futures-traits"] }
rand = { version = "0.8.3", optional = true }
serde = { version = "1", features = ["derive"] }
serde_json = { version = "1", features = ["preserve_order"] }
similar = "1.3"
smallvec = { version = "1.6", features = ["union"] }
smol = "1.2"

View File

@ -7,12 +7,12 @@ pub use crate::{
use crate::{
diagnostic_set::{DiagnosticEntry, DiagnosticGroup},
outline::OutlineItem,
range_from_lsp, Outline,
range_from_lsp, CompletionLabel, Outline, ToLspPosition,
};
use anyhow::{anyhow, Result};
use clock::ReplicaId;
use futures::FutureExt as _;
use gpui::{fonts::HighlightStyle, AppContext, Entity, ModelContext, MutableAppContext, Task};
use gpui::{AppContext, Entity, ModelContext, MutableAppContext, Task};
use lazy_static::lazy_static;
use lsp::LanguageServer;
use parking_lot::Mutex;
@ -21,7 +21,6 @@ use similar::{ChangeTag, TextDiff};
use smol::future::yield_now;
use std::{
any::Any,
cell::RefCell,
cmp::{self, Ordering},
collections::{BTreeMap, HashMap},
ffi::OsString,
@ -38,7 +37,7 @@ use sum_tree::TreeMap;
use text::{operation_queue::OperationQueue, rope::TextDimension};
pub use text::{Buffer as TextBuffer, Operation as _, *};
use theme::SyntaxTheme;
use tree_sitter::{InputEdit, Parser, QueryCursor, Tree};
use tree_sitter::{InputEdit, QueryCursor, Tree};
use util::{post_inc, TryFutureExt as _};
#[cfg(any(test, feature = "test-support"))]
@ -46,10 +45,6 @@ pub use tree_sitter_rust;
pub use lsp::DiagnosticSeverity;
thread_local! {
static PARSER: RefCell<Parser> = RefCell::new(Parser::new());
}
lazy_static! {
static ref QUERY_CURSORS: Mutex<Vec<QueryCursor>> = Default::default();
}
@ -74,6 +69,7 @@ pub struct Buffer {
selections_update_count: usize,
diagnostics_update_count: usize,
language_server: Option<LanguageServerState>,
completion_triggers: Vec<String>,
deferred_ops: OperationQueue<Operation>,
#[cfg(test)]
pub(crate) operations: Vec<Operation>,
@ -114,12 +110,20 @@ pub struct Diagnostic {
pub is_disk_based: bool,
}
#[derive(Clone, Debug)]
pub struct Completion<T> {
pub old_range: Range<T>,
pub new_text: String,
pub label: CompletionLabel,
pub lsp_completion: lsp::CompletionItem,
}
struct LanguageServerState {
server: Arc<LanguageServer>,
latest_snapshot: watch::Sender<Option<LanguageServerSnapshot>>,
pending_snapshots: BTreeMap<usize, LanguageServerSnapshot>,
next_version: usize,
_maintain_server: Task<Option<()>>,
_maintain_server: Task<()>,
}
#[derive(Clone)]
@ -141,6 +145,9 @@ pub enum Operation {
selections: Arc<[Selection<Anchor>]>,
lamport_timestamp: clock::Lamport,
},
UpdateCompletionTriggers {
triggers: Vec<String>,
},
}
#[derive(Clone, Debug, Eq, PartialEq)]
@ -158,6 +165,10 @@ pub enum Event {
pub trait File {
fn as_local(&self) -> Option<&dyn LocalFile>;
fn is_local(&self) -> bool {
self.as_local().is_some()
}
fn mtime(&self) -> SystemTime;
/// Returns the path of this file relative to the worktree's root directory.
@ -184,6 +195,21 @@ pub trait File {
fn format_remote(&self, buffer_id: u64, cx: &mut MutableAppContext)
-> Option<Task<Result<()>>>;
fn completions(
&self,
buffer_id: u64,
position: Anchor,
language: Option<Arc<Language>>,
cx: &mut MutableAppContext,
) -> Task<Result<Vec<Completion<Anchor>>>>;
fn apply_additional_edits_for_completion(
&self,
buffer_id: u64,
completion: Completion<Anchor>,
cx: &mut MutableAppContext,
) -> Task<Result<Vec<clock::Local>>>;
fn buffer_updated(&self, buffer_id: u64, operation: Operation, cx: &mut MutableAppContext);
fn buffer_removed(&self, buffer_id: u64, cx: &mut MutableAppContext);
@ -208,6 +234,97 @@ pub trait LocalFile: File {
);
}
#[cfg(feature = "test-support")]
pub struct FakeFile {
pub path: Arc<Path>,
}
#[cfg(feature = "test-support")]
impl File for FakeFile {
fn as_local(&self) -> Option<&dyn LocalFile> {
Some(self)
}
fn mtime(&self) -> SystemTime {
SystemTime::UNIX_EPOCH
}
fn path(&self) -> &Arc<Path> {
&self.path
}
fn full_path(&self, _: &AppContext) -> PathBuf {
self.path.to_path_buf()
}
fn file_name(&self, _: &AppContext) -> OsString {
self.path.file_name().unwrap().to_os_string()
}
fn is_deleted(&self) -> bool {
false
}
fn save(
&self,
_: u64,
_: Rope,
_: clock::Global,
cx: &mut MutableAppContext,
) -> Task<Result<(clock::Global, SystemTime)>> {
cx.spawn(|_| async move { Ok((Default::default(), SystemTime::UNIX_EPOCH)) })
}
fn format_remote(&self, _: u64, _: &mut MutableAppContext) -> Option<Task<Result<()>>> {
None
}
fn completions(
&self,
_: u64,
_: Anchor,
_: Option<Arc<Language>>,
_: &mut MutableAppContext,
) -> Task<Result<Vec<Completion<Anchor>>>> {
Task::ready(Ok(Default::default()))
}
fn apply_additional_edits_for_completion(
&self,
_: u64,
_: Completion<Anchor>,
_: &mut MutableAppContext,
) -> Task<Result<Vec<clock::Local>>> {
Task::ready(Ok(Default::default()))
}
fn buffer_updated(&self, _: u64, _: Operation, _: &mut MutableAppContext) {}
fn buffer_removed(&self, _: u64, _: &mut MutableAppContext) {}
fn as_any(&self) -> &dyn Any {
self
}
fn to_proto(&self) -> rpc::proto::File {
unimplemented!()
}
}
#[cfg(feature = "test-support")]
impl LocalFile for FakeFile {
fn abs_path(&self, _: &AppContext) -> PathBuf {
self.path.to_path_buf()
}
fn load(&self, cx: &AppContext) -> Task<Result<String>> {
cx.background().spawn(async move { Ok(Default::default()) })
}
fn buffer_reloaded(&self, _: u64, _: &clock::Global, _: SystemTime, _: &mut MutableAppContext) {
}
}
pub(crate) struct QueryCursorHandle(Option<QueryCursor>);
#[derive(Clone)]
@ -229,14 +346,13 @@ struct IndentSuggestion {
indent: bool,
}
struct TextProvider<'a>(&'a Rope);
pub(crate) struct TextProvider<'a>(pub(crate) &'a Rope);
struct BufferChunkHighlights<'a> {
captures: tree_sitter::QueryCaptures<'a, 'a, TextProvider<'a>>,
next_capture: Option<(tree_sitter::QueryMatch<'a, 'a>, usize)>,
stack: Vec<(usize, HighlightId)>,
highlight_map: HighlightMap,
theme: &'a SyntaxTheme,
_query_cursor: QueryCursorHandle,
}
@ -254,7 +370,7 @@ pub struct BufferChunks<'a> {
#[derive(Clone, Copy, Debug, Default)]
pub struct Chunk<'a> {
pub text: &'a str,
pub highlight_style: Option<HighlightStyle>,
pub highlight_id: Option<HighlightId>,
pub diagnostic: Option<DiagnosticSeverity>,
}
@ -265,7 +381,7 @@ pub(crate) struct Diff {
}
#[derive(Clone, Copy)]
struct DiagnosticEndpoint {
pub(crate) struct DiagnosticEndpoint {
offset: usize,
is_start: bool,
severity: DiagnosticSeverity,
@ -349,6 +465,8 @@ impl Buffer {
cx,
);
this.completion_triggers = message.completion_triggers;
let deferred_ops = message
.deferred_operations
.into_iter()
@ -397,6 +515,7 @@ impl Buffer {
.map(|op| proto::serialize_operation(&Operation::Buffer(op.clone()))),
)
.collect(),
completion_triggers: self.completion_triggers.clone(),
}
}
@ -439,6 +558,7 @@ impl Buffer {
diagnostics: Default::default(),
diagnostics_update_count: 0,
language_server: None,
completion_triggers: Default::default(),
deferred_ops: OperationQueue::new(),
#[cfg(test)]
operations: Default::default(),
@ -488,20 +608,7 @@ impl Buffer {
if let Some(edits) = edits {
this.update(&mut cx, |this, cx| {
if this.version == version {
for edit in &edits {
let range = range_from_lsp(edit.range);
if this.clip_point_utf16(range.start, Bias::Left) != range.start
|| this.clip_point_utf16(range.end, Bias::Left) != range.end
{
return Err(anyhow!(
"invalid formatting edits received from language server"
));
}
}
for edit in edits.into_iter().rev() {
this.edit([range_from_lsp(edit.range)], edit.new_text, cx);
}
this.apply_lsp_edits(edits, cx)?;
Ok(())
} else {
Err(anyhow!("buffer edited since starting to format"))
@ -554,81 +661,103 @@ impl Buffer {
cx: &mut ModelContext<Self>,
) {
self.language_server = if let Some(server) = language_server {
let (latest_snapshot_tx, mut latest_snapshot_rx) = watch::channel();
let (latest_snapshot_tx, mut latest_snapshot_rx) =
watch::channel::<Option<LanguageServerSnapshot>>();
let maintain_changes = cx.background().spawn({
let server = server.clone();
async move {
let mut prev_snapshot: Option<LanguageServerSnapshot> = None;
while let Some(snapshot) = latest_snapshot_rx.recv().await {
if let Some(snapshot) = snapshot {
let uri = lsp::Url::from_file_path(&snapshot.path).unwrap();
if let Some(prev_snapshot) = prev_snapshot {
let changes = lsp::DidChangeTextDocumentParams {
text_document: lsp::VersionedTextDocumentIdentifier::new(
uri,
snapshot.version as i32,
),
content_changes: snapshot
.buffer_snapshot
.edits_since::<(PointUtf16, usize)>(
prev_snapshot.buffer_snapshot.version(),
)
.map(|edit| {
let edit_start = edit.new.start.0;
let edit_end =
edit_start + (edit.old.end.0 - edit.old.start.0);
let new_text = snapshot
.buffer_snapshot
.text_for_range(edit.new.start.1..edit.new.end.1)
.collect();
lsp::TextDocumentContentChangeEvent {
range: Some(lsp::Range::new(
edit_start.to_lsp_position(),
edit_end.to_lsp_position(),
)),
range_length: None,
text: new_text,
}
})
.collect(),
};
server
.notify::<lsp::notification::DidChangeTextDocument>(changes)
.await?;
} else {
server
.notify::<lsp::notification::DidOpenTextDocument>(
lsp::DidOpenTextDocumentParams {
text_document: lsp::TextDocumentItem::new(
uri,
Default::default(),
snapshot.version as i32,
snapshot.buffer_snapshot.text().to_string(),
),
},
)
.await?;
}
prev_snapshot = Some(snapshot);
}
}
Ok(())
}
});
Some(LanguageServerState {
latest_snapshot: latest_snapshot_tx,
pending_snapshots: Default::default(),
next_version: 0,
server: server.clone(),
_maintain_server: cx.background().spawn(
async move {
let mut prev_snapshot: Option<LanguageServerSnapshot> = None;
while let Some(snapshot) = latest_snapshot_rx.recv().await {
if let Some(snapshot) = snapshot {
let uri = lsp::Url::from_file_path(&snapshot.path).unwrap();
if let Some(prev_snapshot) = prev_snapshot {
let changes = lsp::DidChangeTextDocumentParams {
text_document: lsp::VersionedTextDocumentIdentifier::new(
uri,
snapshot.version as i32,
),
content_changes: snapshot
.buffer_snapshot
.edits_since::<(PointUtf16, usize)>(
prev_snapshot.buffer_snapshot.version(),
)
.map(|edit| {
let edit_start = edit.new.start.0;
let edit_end = edit_start
+ (edit.old.end.0 - edit.old.start.0);
let new_text = snapshot
.buffer_snapshot
.text_for_range(
edit.new.start.1..edit.new.end.1,
)
.collect();
lsp::TextDocumentContentChangeEvent {
range: Some(lsp::Range::new(
lsp::Position::new(
edit_start.row,
edit_start.column,
),
lsp::Position::new(
edit_end.row,
edit_end.column,
),
)),
range_length: None,
text: new_text,
}
})
.collect(),
};
server
.notify::<lsp::notification::DidChangeTextDocument>(changes)
.await?;
} else {
server
.notify::<lsp::notification::DidOpenTextDocument>(
lsp::DidOpenTextDocumentParams {
text_document: lsp::TextDocumentItem::new(
uri,
Default::default(),
snapshot.version as i32,
snapshot.buffer_snapshot.text().to_string(),
),
},
)
.await?;
}
prev_snapshot = Some(snapshot);
_maintain_server: cx.spawn_weak(|this, mut cx| async move {
let mut capabilities = server.capabilities();
loop {
if let Some(capabilities) = capabilities.recv().await.flatten() {
if let Some(this) = this.upgrade(&cx) {
let triggers = capabilities
.completion_provider
.and_then(|c| c.trigger_characters)
.unwrap_or_default();
this.update(&mut cx, |this, cx| {
this.completion_triggers = triggers.clone();
this.send_operation(
Operation::UpdateCompletionTriggers { triggers },
cx,
);
cx.notify();
});
} else {
return;
}
break;
}
Ok(())
}
.log_err(),
),
maintain_changes.log_err().await;
}),
})
} else {
None
@ -759,6 +888,10 @@ impl Buffer {
self.language.as_ref()
}
pub fn language_server(&self) -> Option<&Arc<LanguageServer>> {
self.language_server.as_ref().map(|state| &state.server)
}
pub fn parse_count(&self) -> usize {
self.parse_count
}
@ -801,7 +934,7 @@ impl Buffer {
let parsed_version = self.version();
let parse_task = cx.background().spawn({
let grammar = grammar.clone();
async move { Self::parse_text(&text, old_tree, &grammar) }
async move { grammar.parse_text(&text, old_tree) }
});
match cx
@ -837,26 +970,6 @@ impl Buffer {
false
}
fn parse_text(text: &Rope, old_tree: Option<Tree>, grammar: &Grammar) -> Tree {
PARSER.with(|parser| {
let mut parser = parser.borrow_mut();
parser
.set_language(grammar.ts_language)
.expect("incompatible grammar");
let mut chunks = text.chunks_in_range(0..text.len());
let tree = parser
.parse_with(
&mut move |offset, _| {
chunks.seek(offset);
chunks.next().unwrap_or("").as_bytes()
},
old_tree.as_ref(),
)
.unwrap();
tree
})
}
fn interpolate_tree(&self, tree: &mut SyntaxTree) {
for edit in self.edits_since::<(usize, Point)>(&tree.version) {
let (bytes, lines) = edit.flatten();
@ -1177,7 +1290,9 @@ impl Buffer {
let range = offset..(offset + len);
match tag {
ChangeTag::Equal => offset += len,
ChangeTag::Delete => self.edit(Some(range), "", cx),
ChangeTag::Delete => {
self.edit(Some(range), "", cx);
}
ChangeTag::Insert => {
self.edit(Some(offset..offset), &diff.new_text[range], cx);
offset += len;
@ -1291,7 +1406,12 @@ impl Buffer {
.blocking_send(Some(snapshot));
}
pub fn edit<I, S, T>(&mut self, ranges_iter: I, new_text: T, cx: &mut ModelContext<Self>)
pub fn edit<I, S, T>(
&mut self,
ranges_iter: I,
new_text: T,
cx: &mut ModelContext<Self>,
) -> Option<clock::Local>
where
I: IntoIterator<Item = Range<S>>,
S: ToOffset,
@ -1305,7 +1425,8 @@ impl Buffer {
ranges_iter: I,
new_text: T,
cx: &mut ModelContext<Self>,
) where
) -> Option<clock::Local>
where
I: IntoIterator<Item = Range<S>>,
S: ToOffset,
T: Into<String>,
@ -1313,20 +1434,14 @@ impl Buffer {
self.edit_internal(ranges_iter, new_text, true, cx)
}
/*
impl Buffer
pub fn edit
pub fn edit_internal
pub fn edit_with_autoindent
*/
pub fn edit_internal<I, S, T>(
&mut self,
ranges_iter: I,
new_text: T,
autoindent: bool,
cx: &mut ModelContext<Self>,
) where
) -> Option<clock::Local>
where
I: IntoIterator<Item = Range<S>>,
S: ToOffset,
T: Into<String>,
@ -1350,7 +1465,7 @@ impl Buffer {
}
}
if ranges.is_empty() {
return;
return None;
}
self.start_transaction();
@ -1377,6 +1492,7 @@ impl Buffer {
let new_text_len = new_text.len();
let edit = self.text.edit(ranges.iter().cloned(), new_text);
let edit_id = edit.timestamp.local();
if let Some((before_edit, edited)) = autoindent_request {
let mut inserted = None;
@ -1406,6 +1522,33 @@ impl Buffer {
self.end_transaction(cx);
self.send_operation(Operation::Buffer(text::Operation::Edit(edit)), cx);
Some(edit_id)
}
fn apply_lsp_edits(
&mut self,
edits: Vec<lsp::TextEdit>,
cx: &mut ModelContext<Self>,
) -> Result<Vec<clock::Local>> {
for edit in &edits {
let range = range_from_lsp(edit.range);
if self.clip_point_utf16(range.start, Bias::Left) != range.start
|| self.clip_point_utf16(range.end, Bias::Left) != range.end
{
return Err(anyhow!(
"invalid formatting edits received from language server"
));
}
}
self.start_transaction();
let edit_ids = edits
.into_iter()
.rev()
.filter_map(|edit| self.edit([range_from_lsp(edit.range)], edit.new_text, cx))
.collect();
self.end_transaction(cx);
Ok(edit_ids)
}
fn did_edit(
@ -1492,6 +1635,7 @@ impl Buffer {
Operation::UpdateSelections { selections, .. } => selections
.iter()
.all(|s| self.can_resolve(&s.start) && self.can_resolve(&s.end)),
Operation::UpdateCompletionTriggers { .. } => true,
}
}
@ -1531,6 +1675,9 @@ impl Buffer {
self.text.lamport_clock.observe(lamport_timestamp);
self.selections_update_count += 1;
}
Operation::UpdateCompletionTriggers { triggers } => {
self.completion_triggers = triggers;
}
}
}
@ -1617,6 +1764,155 @@ impl Buffer {
false
}
}
pub fn completions<T>(
&self,
position: T,
cx: &mut ModelContext<Self>,
) -> Task<Result<Vec<Completion<Anchor>>>>
where
T: ToOffset,
{
let file = if let Some(file) = self.file.as_ref() {
file
} else {
return Task::ready(Ok(Default::default()));
};
let language = self.language.clone();
if let Some(file) = file.as_local() {
let server = if let Some(language_server) = self.language_server.as_ref() {
language_server.server.clone()
} else {
return Task::ready(Ok(Default::default()));
};
let abs_path = file.abs_path(cx);
let position = self.offset_to_point_utf16(position.to_offset(self));
cx.spawn(|this, cx| async move {
let completions = server
.request::<lsp::request::Completion>(lsp::CompletionParams {
text_document_position: lsp::TextDocumentPositionParams::new(
lsp::TextDocumentIdentifier::new(
lsp::Url::from_file_path(abs_path).unwrap(),
),
position.to_lsp_position(),
),
context: Default::default(),
work_done_progress_params: Default::default(),
partial_result_params: Default::default(),
})
.await?;
let completions = if let Some(completions) = completions {
match completions {
lsp::CompletionResponse::Array(completions) => completions,
lsp::CompletionResponse::List(list) => list.items,
}
} else {
Default::default()
};
this.read_with(&cx, |this, _| {
Ok(completions.into_iter().filter_map(|lsp_completion| {
let (old_range, new_text) = match lsp_completion.text_edit.as_ref()? {
lsp::CompletionTextEdit::Edit(edit) => (range_from_lsp(edit.range), edit.new_text.clone()),
lsp::CompletionTextEdit::InsertAndReplace(_) => {
log::info!("received an insert and replace completion but we don't yet support that");
return None
},
};
let clipped_start = this.clip_point_utf16(old_range.start, Bias::Left);
let clipped_end = this.clip_point_utf16(old_range.end, Bias::Left) ;
if clipped_start == old_range.start && clipped_end == old_range.end {
Some(Completion {
old_range: this.anchor_before(old_range.start)..this.anchor_after(old_range.end),
new_text,
label: language.as_ref().and_then(|l| l.label_for_completion(&lsp_completion)).unwrap_or_else(|| CompletionLabel::plain(&lsp_completion)),
lsp_completion,
})
} else {
None
}
}).collect())
})
})
} else {
file.completions(
self.remote_id(),
self.anchor_before(position),
language,
cx.as_mut(),
)
}
}
pub fn apply_additional_edits_for_completion(
&mut self,
completion: Completion<Anchor>,
push_to_history: bool,
cx: &mut ModelContext<Self>,
) -> Task<Result<Vec<clock::Local>>> {
let file = if let Some(file) = self.file.as_ref() {
file
} else {
return Task::ready(Ok(Default::default()));
};
if file.is_local() {
let server = if let Some(lang) = self.language_server.as_ref() {
lang.server.clone()
} else {
return Task::ready(Ok(Default::default()));
};
cx.spawn(|this, mut cx| async move {
let resolved_completion = server
.request::<lsp::request::ResolveCompletionItem>(completion.lsp_completion)
.await?;
if let Some(additional_edits) = resolved_completion.additional_text_edits {
this.update(&mut cx, |this, cx| {
if !push_to_history {
this.avoid_grouping_next_transaction();
}
this.start_transaction();
let edit_ids = this.apply_lsp_edits(additional_edits, cx);
if let Some(transaction_id) = this.end_transaction(cx) {
if !push_to_history {
this.text.forget_transaction(transaction_id);
}
}
edit_ids
})
} else {
Ok(Default::default())
}
})
} else {
let apply_edits = file.apply_additional_edits_for_completion(
self.remote_id(),
completion,
cx.as_mut(),
);
cx.spawn(|this, mut cx| async move {
let edit_ids = apply_edits.await?;
this.update(&mut cx, |this, _| this.text.wait_for_edits(&edit_ids))
.await;
if push_to_history {
this.update(&mut cx, |this, _| {
this.text
.push_transaction(edit_ids.iter().copied(), Instant::now());
});
}
Ok(edit_ids)
})
}
}
pub fn completion_triggers(&self) -> &[String] {
&self.completion_triggers
}
}
#[cfg(any(test, feature = "test-support"))]
@ -1799,13 +2095,14 @@ impl BufferSnapshot {
pub fn chunks<'a, T: ToOffset>(
&'a self,
range: Range<T>,
theme: Option<&'a SyntaxTheme>,
language_aware: bool,
) -> BufferChunks<'a> {
let range = range.start.to_offset(self)..range.end.to_offset(self);
let mut highlights = None;
let mut diagnostic_endpoints = Vec::<DiagnosticEndpoint>::new();
if let Some(theme) = theme {
let mut tree = None;
let mut diagnostic_endpoints = Vec::new();
if language_aware {
tree = self.tree.as_ref();
for entry in self.diagnostics_in_range::<_, usize>(range.clone()) {
diagnostic_endpoints.push(DiagnosticEndpoint {
offset: entry.range.start,
@ -1820,43 +2117,15 @@ impl BufferSnapshot {
}
diagnostic_endpoints
.sort_unstable_by_key(|endpoint| (endpoint.offset, !endpoint.is_start));
if let Some((grammar, tree)) = self.grammar().zip(self.tree.as_ref()) {
let mut query_cursor = QueryCursorHandle::new();
// TODO - add a Tree-sitter API to remove the need for this.
let cursor = unsafe {
std::mem::transmute::<_, &'static mut QueryCursor>(query_cursor.deref_mut())
};
let captures = cursor.set_byte_range(range.clone()).captures(
&grammar.highlights_query,
tree.root_node(),
TextProvider(self.text.as_rope()),
);
highlights = Some(BufferChunkHighlights {
captures,
next_capture: None,
stack: Default::default(),
highlight_map: grammar.highlight_map(),
_query_cursor: query_cursor,
theme,
})
}
}
let diagnostic_endpoints = diagnostic_endpoints.into_iter().peekable();
let chunks = self.text.as_rope().chunks_in_range(range.clone());
BufferChunks {
BufferChunks::new(
self.text.as_rope(),
range,
chunks,
tree,
self.grammar(),
diagnostic_endpoints,
error_depth: 0,
warning_depth: 0,
information_depth: 0,
hint_depth: 0,
highlights,
}
)
}
pub fn language(&self) -> Option<&Arc<Language>> {
@ -1897,7 +2166,7 @@ impl BufferSnapshot {
TextProvider(self.as_rope()),
);
let mut chunks = self.chunks(0..self.len(), theme);
let mut chunks = self.chunks(0..self.len(), true);
let item_capture_ix = grammar.outline_query.capture_index_for_name("item")?;
let name_capture_ix = grammar.outline_query.capture_index_for_name("name")?;
@ -1951,7 +2220,11 @@ impl BufferSnapshot {
} else {
offset += chunk.text.len();
}
if let Some(style) = chunk.highlight_style {
let style = chunk
.highlight_id
.zip(theme)
.and_then(|(highlight, theme)| highlight.style(theme));
if let Some(style) = style {
let start = text.len();
let end = start + chunk.text.len();
highlight_ranges.push((start..end, style));
@ -2126,7 +2399,7 @@ impl<'a> tree_sitter::TextProvider<'a> for TextProvider<'a> {
}
}
struct ByteChunks<'a>(rope::Chunks<'a>);
pub(crate) struct ByteChunks<'a>(rope::Chunks<'a>);
impl<'a> Iterator for ByteChunks<'a> {
type Item = &'a [u8];
@ -2139,6 +2412,50 @@ impl<'a> Iterator for ByteChunks<'a> {
unsafe impl<'a> Send for BufferChunks<'a> {}
impl<'a> BufferChunks<'a> {
pub(crate) fn new(
text: &'a Rope,
range: Range<usize>,
tree: Option<&'a Tree>,
grammar: Option<&'a Arc<Grammar>>,
diagnostic_endpoints: Vec<DiagnosticEndpoint>,
) -> Self {
let mut highlights = None;
if let Some((grammar, tree)) = grammar.zip(tree) {
let mut query_cursor = QueryCursorHandle::new();
// TODO - add a Tree-sitter API to remove the need for this.
let cursor = unsafe {
std::mem::transmute::<_, &'static mut QueryCursor>(query_cursor.deref_mut())
};
let captures = cursor.set_byte_range(range.clone()).captures(
&grammar.highlights_query,
tree.root_node(),
TextProvider(text),
);
highlights = Some(BufferChunkHighlights {
captures,
next_capture: None,
stack: Default::default(),
highlight_map: grammar.highlight_map(),
_query_cursor: query_cursor,
})
}
let diagnostic_endpoints = diagnostic_endpoints.into_iter().peekable();
let chunks = text.chunks_in_range(range.clone());
BufferChunks {
range,
chunks,
diagnostic_endpoints,
error_depth: 0,
warning_depth: 0,
information_depth: 0,
hint_depth: 0,
highlights,
}
}
pub fn seek(&mut self, offset: usize) {
self.range.start = offset;
self.chunks.seek(self.range.start);
@ -2247,11 +2564,11 @@ impl<'a> Iterator for BufferChunks<'a> {
let mut chunk_end = (self.chunks.offset() + chunk.len())
.min(next_capture_start)
.min(next_diagnostic_endpoint);
let mut highlight_style = None;
let mut highlight_id = None;
if let Some(highlights) = self.highlights.as_ref() {
if let Some((parent_capture_end, parent_highlight_id)) = highlights.stack.last() {
chunk_end = chunk_end.min(*parent_capture_end);
highlight_style = parent_highlight_id.style(highlights.theme);
highlight_id = Some(*parent_highlight_id);
}
}
@ -2264,7 +2581,7 @@ impl<'a> Iterator for BufferChunks<'a> {
Some(Chunk {
text: slice,
highlight_style,
highlight_id,
diagnostic: self.current_diagnostic_severity(),
})
} else {
@ -2334,6 +2651,9 @@ impl operation_queue::Operation for Operation {
| Operation::UpdateSelections {
lamport_timestamp, ..
} => *lamport_timestamp,
Operation::UpdateCompletionTriggers { .. } => {
unreachable!("updating completion triggers should never be deferred")
}
}
}
}
@ -2352,6 +2672,20 @@ impl Default for Diagnostic {
}
}
impl<T> Completion<T> {
pub fn sort_key(&self) -> (usize, &str) {
let kind_key = match self.lsp_completion.kind {
Some(lsp::CompletionItemKind::VARIABLE) => 0,
_ => 1,
};
(kind_key, &self.label.text[self.label.filter_range.clone()])
}
pub fn is_snippet(&self) -> bool {
self.lsp_completion.insert_text_format == Some(lsp::InsertTextFormat::SNIPPET)
}
}
pub fn contiguous_ranges(
values: impl Iterator<Item = u32>,
max_len: usize,

View File

@ -5,7 +5,7 @@ use theme::SyntaxTheme;
#[derive(Clone, Debug)]
pub struct HighlightMap(Arc<[HighlightId]>);
#[derive(Clone, Copy, Debug)]
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
pub struct HighlightId(pub u32);
const DEFAULT_HIGHLIGHT_ID: HighlightId = HighlightId(u32::MAX);

View File

@ -17,11 +17,15 @@ use lazy_static::lazy_static;
pub use outline::{Outline, OutlineItem};
use parking_lot::Mutex;
use serde::Deserialize;
use std::{ops::Range, path::Path, str, sync::Arc};
use std::{cell::RefCell, ops::Range, path::Path, str, sync::Arc};
use theme::SyntaxTheme;
use tree_sitter::{self, Query};
pub use tree_sitter::{Parser, Tree};
thread_local! {
static PARSER: RefCell<Parser> = RefCell::new(Parser::new());
}
lazy_static! {
pub static ref PLAIN_TEXT: Arc<Language> = Arc::new(Language::new(
LanguageConfig {
@ -39,8 +43,27 @@ pub trait ToPointUtf16 {
fn to_point_utf16(self) -> PointUtf16;
}
pub trait DiagnosticProcessor: 'static + Send + Sync {
pub trait ToLspPosition {
fn to_lsp_position(self) -> lsp::Position;
}
pub trait LspPostProcessor: 'static + Send + Sync {
fn process_diagnostics(&self, diagnostics: &mut lsp::PublishDiagnosticsParams);
fn label_for_completion(
&self,
_: &lsp::CompletionItem,
_: &Language,
) -> Option<CompletionLabel> {
None
}
}
#[derive(Clone, Debug, PartialEq, Eq)]
pub struct CompletionLabel {
pub text: String,
pub runs: Vec<(Range<usize>, HighlightId)>,
pub filter_range: Range<usize>,
pub left_aligned_len: usize,
}
#[derive(Default, Deserialize)]
@ -73,7 +96,7 @@ pub struct BracketPair {
pub struct Language {
pub(crate) config: LanguageConfig,
pub(crate) grammar: Option<Arc<Grammar>>,
pub(crate) diagnostic_processor: Option<Box<dyn DiagnosticProcessor>>,
pub(crate) lsp_post_processor: Option<Box<dyn LspPostProcessor>>,
}
pub struct Grammar {
@ -140,7 +163,7 @@ impl Language {
highlight_map: Default::default(),
})
}),
diagnostic_processor: None,
lsp_post_processor: None,
}
}
@ -184,8 +207,8 @@ impl Language {
Ok(self)
}
pub fn with_diagnostics_processor(mut self, processor: impl DiagnosticProcessor) -> Self {
self.diagnostic_processor = Some(Box::new(processor));
pub fn with_lsp_post_processor(mut self, processor: impl LspPostProcessor) -> Self {
self.lsp_post_processor = Some(Box::new(processor));
self
}
@ -237,11 +260,41 @@ impl Language {
}
pub fn process_diagnostics(&self, diagnostics: &mut lsp::PublishDiagnosticsParams) {
if let Some(processor) = self.diagnostic_processor.as_ref() {
if let Some(processor) = self.lsp_post_processor.as_ref() {
processor.process_diagnostics(diagnostics);
}
}
pub fn label_for_completion(
&self,
completion: &lsp::CompletionItem,
) -> Option<CompletionLabel> {
self.lsp_post_processor
.as_ref()?
.label_for_completion(completion, self)
}
pub fn highlight_text<'a>(
&'a self,
text: &'a Rope,
range: Range<usize>,
) -> Vec<(Range<usize>, HighlightId)> {
let mut result = Vec::new();
if let Some(grammar) = &self.grammar {
let tree = grammar.parse_text(text, None);
let mut offset = 0;
for chunk in BufferChunks::new(text, range, Some(&tree), self.grammar.as_ref(), vec![])
{
let end_offset = offset + chunk.text.len();
if let Some(highlight_id) = chunk.highlight_id {
result.push((offset..end_offset, highlight_id));
}
offset = end_offset;
}
}
result
}
pub fn brackets(&self) -> &[BracketPair] {
&self.config.brackets
}
@ -252,12 +305,57 @@ impl Language {
HighlightMap::new(grammar.highlights_query.capture_names(), theme);
}
}
pub fn grammar(&self) -> Option<&Arc<Grammar>> {
self.grammar.as_ref()
}
}
impl Grammar {
fn parse_text(&self, text: &Rope, old_tree: Option<Tree>) -> Tree {
PARSER.with(|parser| {
let mut parser = parser.borrow_mut();
parser
.set_language(self.ts_language)
.expect("incompatible grammar");
let mut chunks = text.chunks_in_range(0..text.len());
parser
.parse_with(
&mut move |offset, _| {
chunks.seek(offset);
chunks.next().unwrap_or("").as_bytes()
},
old_tree.as_ref(),
)
.unwrap()
})
}
pub fn highlight_map(&self) -> HighlightMap {
self.highlight_map.lock().clone()
}
pub fn highlight_id_for_name(&self, name: &str) -> Option<HighlightId> {
let capture_id = self.highlights_query.capture_index_for_name(name)?;
Some(self.highlight_map.lock().get(capture_id))
}
}
impl CompletionLabel {
pub fn plain(completion: &lsp::CompletionItem) -> Self {
let mut result = Self {
text: completion.label.clone(),
runs: Vec::new(),
left_aligned_len: completion.label.len(),
filter_range: 0..completion.label.len(),
};
if let Some(filter_text) = &completion.filter_text {
if let Some(ix) = completion.label.find(filter_text) {
result.filter_range = ix..ix + filter_text.len();
}
}
result
}
}
#[cfg(any(test, feature = "test-support"))]
@ -265,7 +363,15 @@ impl LanguageServerConfig {
pub async fn fake(
executor: Arc<gpui::executor::Background>,
) -> (Self, lsp::FakeLanguageServer) {
let (server, fake) = lsp::LanguageServer::fake(executor).await;
Self::fake_with_capabilities(Default::default(), executor).await
}
pub async fn fake_with_capabilities(
capabilites: lsp::ServerCapabilities,
executor: Arc<gpui::executor::Background>,
) -> (Self, lsp::FakeLanguageServer) {
let (server, fake) =
lsp::LanguageServer::fake_with_capabilities(capabilites, executor).await;
fake.started
.store(false, std::sync::atomic::Ordering::SeqCst);
let started = fake.started.clone();
@ -286,6 +392,12 @@ impl ToPointUtf16 for lsp::Position {
}
}
impl ToLspPosition for PointUtf16 {
fn to_lsp_position(self) -> lsp::Position {
lsp::Position::new(self.row, self.column)
}
}
pub fn range_from_lsp(range: lsp::Range) -> Range<PointUtf16> {
let start = PointUtf16::new(range.start.line, range.start.character);
let end = PointUtf16::new(range.end.line, range.end.character);

View File

@ -45,16 +45,8 @@ impl<T> Outline<T> {
.map(|range| &item.text[range.start as usize..range.end as usize])
.collect::<String>();
path_candidates.push(StringMatchCandidate {
id,
char_bag: path_text.as_str().into(),
string: path_text.clone(),
});
candidates.push(StringMatchCandidate {
id,
char_bag: candidate_text.as_str().into(),
string: candidate_text,
});
path_candidates.push(StringMatchCandidate::new(id, path_text.clone()));
candidates.push(StringMatchCandidate::new(id, candidate_text));
}
Self {

View File

@ -1,4 +1,6 @@
use crate::{diagnostic_set::DiagnosticEntry, Diagnostic, Operation};
use crate::{
diagnostic_set::DiagnosticEntry, Completion, CompletionLabel, Diagnostic, Language, Operation,
};
use anyhow::{anyhow, Result};
use clock::ReplicaId;
use collections::HashSet;
@ -58,6 +60,13 @@ pub fn serialize_operation(operation: &Operation) -> proto::Operation {
lamport_timestamp: lamport_timestamp.value,
diagnostics: serialize_diagnostics(diagnostics.iter()),
}),
Operation::UpdateCompletionTriggers { triggers } => {
proto::operation::Variant::UpdateCompletionTriggers(
proto::operation::UpdateCompletionTriggers {
triggers: triggers.clone(),
},
)
}
}),
}
}
@ -238,6 +247,11 @@ pub fn deserialize_operation(message: proto::Operation) -> Result<Operation> {
value: message.lamport_timestamp,
},
},
proto::operation::Variant::UpdateCompletionTriggers(message) => {
Operation::UpdateCompletionTriggers {
triggers: message.triggers,
}
}
},
)
}
@ -365,3 +379,35 @@ pub fn deserialize_anchor(anchor: proto::Anchor) -> Option<Anchor> {
},
})
}
pub fn serialize_completion(completion: &Completion<Anchor>) -> proto::Completion {
proto::Completion {
old_start: Some(serialize_anchor(&completion.old_range.start)),
old_end: Some(serialize_anchor(&completion.old_range.end)),
new_text: completion.new_text.clone(),
lsp_completion: serde_json::to_vec(&completion.lsp_completion).unwrap(),
}
}
pub fn deserialize_completion(
completion: proto::Completion,
language: Option<&Arc<Language>>,
) -> Result<Completion<Anchor>> {
let old_start = completion
.old_start
.and_then(deserialize_anchor)
.ok_or_else(|| anyhow!("invalid old start"))?;
let old_end = completion
.old_end
.and_then(deserialize_anchor)
.ok_or_else(|| anyhow!("invalid old end"))?;
let lsp_completion = serde_json::from_slice(&completion.lsp_completion)?;
Ok(Completion {
old_range: old_start..old_end,
new_text: completion.new_text,
label: language
.and_then(|l| l.label_for_completion(&lsp_completion))
.unwrap_or(CompletionLabel::plain(&lsp_completion)),
lsp_completion,
})
}

View File

@ -1090,7 +1090,7 @@ fn chunks_with_diagnostics<T: ToOffset + ToPoint>(
range: Range<T>,
) -> Vec<(String, Option<DiagnosticSeverity>)> {
let mut chunks: Vec<(String, Option<DiagnosticSeverity>)> = Vec::new();
for chunk in buffer.snapshot().chunks(range, Some(&Default::default())) {
for chunk in buffer.snapshot().chunks(range, true) {
if chunks
.last()
.map_or(false, |prev_chunk| prev_chunk.1 == chunk.diagnostic)

View File

@ -2,7 +2,7 @@ use anyhow::{anyhow, Context, Result};
use futures::{io::BufWriter, AsyncRead, AsyncWrite};
use gpui::{executor, Task};
use parking_lot::{Mutex, RwLock};
use postage::{barrier, oneshot, prelude::Stream, sink::Sink};
use postage::{barrier, oneshot, prelude::Stream, sink::Sink, watch};
use serde::{Deserialize, Serialize};
use serde_json::{json, value::RawValue, Value};
use smol::{
@ -34,6 +34,7 @@ type ResponseHandler = Box<dyn Send + FnOnce(Result<&str, Error>)>;
pub struct LanguageServer {
next_id: AtomicUsize,
outbound_tx: RwLock<Option<channel::Sender<Vec<u8>>>>,
capabilities: watch::Receiver<Option<ServerCapabilities>>,
notification_handlers: Arc<RwLock<HashMap<&'static str, NotificationHandler>>>,
response_handlers: Arc<Mutex<HashMap<usize, ResponseHandler>>>,
executor: Arc<executor::Background>,
@ -194,9 +195,11 @@ impl LanguageServer {
);
let (initialized_tx, initialized_rx) = barrier::channel();
let (mut capabilities_tx, capabilities_rx) = watch::channel();
let this = Arc::new(Self {
notification_handlers,
response_handlers,
capabilities: capabilities_rx,
next_id: Default::default(),
outbound_tx: RwLock::new(Some(outbound_tx)),
executor: executor.clone(),
@ -210,7 +213,10 @@ impl LanguageServer {
.spawn({
let this = this.clone();
async move {
this.init(root_uri).log_err().await;
if let Some(capabilities) = this.init(root_uri).log_err().await {
*capabilities_tx.borrow_mut() = Some(capabilities);
}
drop(initialized_tx);
}
})
@ -219,7 +225,7 @@ impl LanguageServer {
Ok(this)
}
async fn init(self: Arc<Self>, root_uri: Url) -> Result<()> {
async fn init(self: Arc<Self>, root_uri: Url) -> Result<ServerCapabilities> {
#[allow(deprecated)]
let params = InitializeParams {
process_id: Default::default(),
@ -232,6 +238,16 @@ impl LanguageServer {
link_support: Some(true),
..Default::default()
}),
completion: Some(CompletionClientCapabilities {
completion_item: Some(CompletionItemCapability {
snippet_support: Some(true),
resolve_support: Some(CompletionItemCapabilityResolveSupport {
properties: vec!["additionalTextEdits".to_string()],
}),
..Default::default()
}),
..Default::default()
}),
..Default::default()
}),
experimental: Some(json!({
@ -256,12 +272,12 @@ impl LanguageServer {
this.outbound_tx.read().as_ref(),
params,
);
request.await?;
let response = request.await?;
Self::notify_internal::<notification::Initialized>(
this.outbound_tx.read().as_ref(),
InitializedParams {},
)?;
Ok(())
Ok(response.capabilities)
}
pub fn shutdown(&self) -> Option<impl 'static + Send + Future<Output = Result<()>>> {
@ -315,6 +331,10 @@ impl LanguageServer {
}
}
pub fn capabilities(&self) -> watch::Receiver<Option<ServerCapabilities>> {
self.capabilities.clone()
}
pub fn request<T: request::Request>(
self: &Arc<Self>,
params: T::Params,
@ -449,6 +469,13 @@ pub struct RequestId<T> {
#[cfg(any(test, feature = "test-support"))]
impl LanguageServer {
pub async fn fake(executor: Arc<executor::Background>) -> (Arc<Self>, FakeLanguageServer) {
Self::fake_with_capabilities(Default::default(), executor).await
}
pub async fn fake_with_capabilities(
capabilities: ServerCapabilities,
executor: Arc<executor::Background>,
) -> (Arc<Self>, FakeLanguageServer) {
let stdin = async_pipe::pipe();
let stdout = async_pipe::pipe();
let mut fake = FakeLanguageServer {
@ -461,7 +488,14 @@ impl LanguageServer {
let server = Self::new_internal(stdin.0, stdout.1, Path::new("/"), executor).unwrap();
let (init_id, _) = fake.receive_request::<request::Initialize>().await;
fake.respond(init_id, InitializeResult::default()).await;
fake.respond(
init_id,
InitializeResult {
capabilities,
..Default::default()
},
)
.await;
fake.receive_notification::<notification::Initialized>()
.await;

View File

@ -1,12 +1,11 @@
use editor::{
display_map::ToDisplayPoint, Anchor, AnchorRangeExt, Autoscroll, DisplayPoint, Editor,
EditorSettings, ToPoint,
combine_syntax_and_fuzzy_match_highlights, display_map::ToDisplayPoint, Anchor, AnchorRangeExt,
Autoscroll, DisplayPoint, Editor, EditorSettings, ToPoint,
};
use fuzzy::StringMatch;
use gpui::{
action,
elements::*,
fonts::{self, HighlightStyle},
geometry::vector::Vector2F,
keymap::{self, Binding},
AppContext, Axis, Entity, MutableAppContext, RenderContext, View, ViewContext, ViewHandle,
@ -17,7 +16,6 @@ use ordered_float::OrderedFloat;
use postage::watch;
use std::{
cmp::{self, Reverse},
ops::Range,
sync::Arc,
};
use workspace::{
@ -362,7 +360,7 @@ impl OutlineView {
.with_highlights(combine_syntax_and_fuzzy_match_highlights(
&outline_item.text,
style.label.text.clone().into(),
&outline_item.highlight_ranges,
outline_item.highlight_ranges.iter().cloned(),
&string_match.positions,
))
.contained()
@ -372,153 +370,3 @@ impl OutlineView {
.boxed()
}
}
fn combine_syntax_and_fuzzy_match_highlights(
text: &str,
default_style: HighlightStyle,
syntax_ranges: &[(Range<usize>, HighlightStyle)],
match_indices: &[usize],
) -> Vec<(Range<usize>, HighlightStyle)> {
let mut result = Vec::new();
let mut match_indices = match_indices.iter().copied().peekable();
for (range, mut syntax_highlight) in syntax_ranges
.iter()
.cloned()
.chain([(usize::MAX..0, Default::default())])
{
syntax_highlight.font_properties.weight(Default::default());
// Add highlights for any fuzzy match characters before the next
// syntax highlight range.
while let Some(&match_index) = match_indices.peek() {
if match_index >= range.start {
break;
}
match_indices.next();
let end_index = char_ix_after(match_index, text);
let mut match_style = default_style;
match_style.font_properties.weight(fonts::Weight::BOLD);
result.push((match_index..end_index, match_style));
}
if range.start == usize::MAX {
break;
}
// Add highlights for any fuzzy match characters within the
// syntax highlight range.
let mut offset = range.start;
while let Some(&match_index) = match_indices.peek() {
if match_index >= range.end {
break;
}
match_indices.next();
if match_index > offset {
result.push((offset..match_index, syntax_highlight));
}
let mut end_index = char_ix_after(match_index, text);
while let Some(&next_match_index) = match_indices.peek() {
if next_match_index == end_index && next_match_index < range.end {
end_index = char_ix_after(next_match_index, text);
match_indices.next();
} else {
break;
}
}
let mut match_style = syntax_highlight;
match_style.font_properties.weight(fonts::Weight::BOLD);
result.push((match_index..end_index, match_style));
offset = end_index;
}
if offset < range.end {
result.push((offset..range.end, syntax_highlight));
}
}
result
}
fn char_ix_after(ix: usize, text: &str) -> usize {
ix + text[ix..].chars().next().unwrap().len_utf8()
}
#[cfg(test)]
mod tests {
use super::*;
use gpui::{color::Color, fonts::HighlightStyle};
#[test]
fn test_combine_syntax_and_fuzzy_match_highlights() {
let string = "abcdefghijklmnop";
let default = HighlightStyle::default();
let syntax_ranges = [
(
0..3,
HighlightStyle {
color: Color::red(),
..default
},
),
(
4..8,
HighlightStyle {
color: Color::green(),
..default
},
),
];
let match_indices = [4, 6, 7, 8];
assert_eq!(
combine_syntax_and_fuzzy_match_highlights(
&string,
default,
&syntax_ranges,
&match_indices,
),
&[
(
0..3,
HighlightStyle {
color: Color::red(),
..default
},
),
(
4..5,
HighlightStyle {
color: Color::green(),
font_properties: *fonts::Properties::default().weight(fonts::Weight::BOLD),
..default
},
),
(
5..6,
HighlightStyle {
color: Color::green(),
..default
},
),
(
6..8,
HighlightStyle {
color: Color::green(),
font_properties: *fonts::Properties::default().weight(fonts::Weight::BOLD),
..default
},
),
(
8..9,
HighlightStyle {
font_properties: *fonts::Properties::default().weight(fonts::Weight::BOLD),
..default
},
),
]
);
}
}

View File

@ -334,6 +334,12 @@ impl Project {
client.subscribe_to_entity(remote_id, cx, Self::handle_save_buffer),
client.subscribe_to_entity(remote_id, cx, Self::handle_buffer_saved),
client.subscribe_to_entity(remote_id, cx, Self::handle_format_buffer),
client.subscribe_to_entity(remote_id, cx, Self::handle_get_completions),
client.subscribe_to_entity(
remote_id,
cx,
Self::handle_apply_additional_edits_for_completion,
),
client.subscribe_to_entity(remote_id, cx, Self::handle_get_definition),
]);
}
@ -1683,6 +1689,114 @@ impl Project {
Ok(())
}
fn handle_get_completions(
&mut self,
envelope: TypedEnvelope<proto::GetCompletions>,
rpc: Arc<Client>,
cx: &mut ModelContext<Self>,
) -> Result<()> {
let receipt = envelope.receipt();
let sender_id = envelope.original_sender_id()?;
let buffer = self
.shared_buffers
.get(&sender_id)
.and_then(|shared_buffers| shared_buffers.get(&envelope.payload.buffer_id).cloned())
.ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))?;
let position = envelope
.payload
.position
.and_then(language::proto::deserialize_anchor)
.ok_or_else(|| anyhow!("invalid position"))?;
cx.spawn(|_, mut cx| async move {
match buffer
.update(&mut cx, |buffer, cx| buffer.completions(position, cx))
.await
{
Ok(completions) => {
rpc.respond(
receipt,
proto::GetCompletionsResponse {
completions: completions
.iter()
.map(language::proto::serialize_completion)
.collect(),
},
)
.await
}
Err(error) => {
rpc.respond_with_error(
receipt,
proto::Error {
message: error.to_string(),
},
)
.await
}
}
})
.detach_and_log_err(cx);
Ok(())
}
fn handle_apply_additional_edits_for_completion(
&mut self,
envelope: TypedEnvelope<proto::ApplyCompletionAdditionalEdits>,
rpc: Arc<Client>,
cx: &mut ModelContext<Self>,
) -> Result<()> {
let receipt = envelope.receipt();
let sender_id = envelope.original_sender_id()?;
let buffer = self
.shared_buffers
.get(&sender_id)
.and_then(|shared_buffers| shared_buffers.get(&envelope.payload.buffer_id).cloned())
.ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))?;
let language = buffer.read(cx).language();
let completion = language::proto::deserialize_completion(
envelope
.payload
.completion
.ok_or_else(|| anyhow!("invalid position"))?,
language,
)?;
cx.spawn(|_, mut cx| async move {
match buffer
.update(&mut cx, |buffer, cx| {
buffer.apply_additional_edits_for_completion(completion, false, cx)
})
.await
{
Ok(edit_ids) => {
rpc.respond(
receipt,
proto::ApplyCompletionAdditionalEditsResponse {
additional_edits: edit_ids
.into_iter()
.map(|edit_id| proto::AdditionalEdit {
replica_id: edit_id.replica_id as u32,
local_timestamp: edit_id.value,
})
.collect(),
},
)
.await
}
Err(error) => {
rpc.respond_with_error(
receipt,
proto::Error {
message: error.to_string(),
},
)
.await
}
}
})
.detach_and_log_err(cx);
Ok(())
}
pub fn handle_get_definition(
&mut self,
envelope: TypedEnvelope<proto::GetDefinition>,

View File

@ -14,7 +14,9 @@ use gpui::{
executor, AppContext, AsyncAppContext, Entity, ModelContext, ModelHandle, MutableAppContext,
Task,
};
use language::{Buffer, DiagnosticEntry, Operation, PointUtf16, Rope};
use language::{
Anchor, Buffer, Completion, DiagnosticEntry, Language, Operation, PointUtf16, Rope,
};
use lazy_static::lazy_static;
use parking_lot::Mutex;
use postage::{
@ -1421,6 +1423,77 @@ impl language::File for File {
}))
}
fn completions(
&self,
buffer_id: u64,
position: Anchor,
language: Option<Arc<Language>>,
cx: &mut MutableAppContext,
) -> Task<Result<Vec<Completion<Anchor>>>> {
let worktree = self.worktree.read(cx);
let worktree = if let Some(worktree) = worktree.as_remote() {
worktree
} else {
return Task::ready(Err(anyhow!(
"remote completions requested on a local worktree"
)));
};
let rpc = worktree.client.clone();
let project_id = worktree.project_id;
cx.foreground().spawn(async move {
let response = rpc
.request(proto::GetCompletions {
project_id,
buffer_id,
position: Some(language::proto::serialize_anchor(&position)),
})
.await?;
response
.completions
.into_iter()
.map(|completion| {
language::proto::deserialize_completion(completion, language.as_ref())
})
.collect()
})
}
fn apply_additional_edits_for_completion(
&self,
buffer_id: u64,
completion: Completion<Anchor>,
cx: &mut MutableAppContext,
) -> Task<Result<Vec<clock::Local>>> {
let worktree = self.worktree.read(cx);
let worktree = if let Some(worktree) = worktree.as_remote() {
worktree
} else {
return Task::ready(Err(anyhow!(
"remote additional edits application requested on a local worktree"
)));
};
let rpc = worktree.client.clone();
let project_id = worktree.project_id;
cx.foreground().spawn(async move {
let response = rpc
.request(proto::ApplyCompletionAdditionalEdits {
project_id,
buffer_id,
completion: Some(language::proto::serialize_completion(&completion)),
})
.await?;
Ok(response
.additional_edits
.into_iter()
.map(|edit| clock::Local {
replica_id: edit.replica_id as ReplicaId,
value: edit.local_timestamp,
})
.collect())
})
}
fn buffer_updated(&self, buffer_id: u64, operation: Operation, cx: &mut MutableAppContext) {
self.worktree.update(cx, |worktree, cx| {
worktree.send_buffer_update(buffer_id, operation, cx);

View File

@ -40,22 +40,26 @@ message Envelope {
BufferSaved buffer_saved = 32;
BufferReloaded buffer_reloaded = 33;
FormatBuffer format_buffer = 34;
GetCompletions get_completions = 35;
GetCompletionsResponse get_completions_response = 36;
ApplyCompletionAdditionalEdits apply_completion_additional_edits = 37;
ApplyCompletionAdditionalEditsResponse apply_completion_additional_edits_response = 38;
GetChannels get_channels = 35;
GetChannelsResponse get_channels_response = 36;
JoinChannel join_channel = 37;
JoinChannelResponse join_channel_response = 38;
LeaveChannel leave_channel = 39;
SendChannelMessage send_channel_message = 40;
SendChannelMessageResponse send_channel_message_response = 41;
ChannelMessageSent channel_message_sent = 42;
GetChannelMessages get_channel_messages = 43;
GetChannelMessagesResponse get_channel_messages_response = 44;
GetChannels get_channels = 39;
GetChannelsResponse get_channels_response = 40;
JoinChannel join_channel = 41;
JoinChannelResponse join_channel_response = 42;
LeaveChannel leave_channel = 43;
SendChannelMessage send_channel_message = 44;
SendChannelMessageResponse send_channel_message_response = 45;
ChannelMessageSent channel_message_sent = 46;
GetChannelMessages get_channel_messages = 47;
GetChannelMessagesResponse get_channel_messages_response = 48;
UpdateContacts update_contacts = 45;
UpdateContacts update_contacts = 49;
GetUsers get_users = 46;
GetUsersResponse get_users_response = 47;
GetUsers get_users = 50;
GetUsersResponse get_users_response = 51;
}
}
@ -203,6 +207,38 @@ message FormatBuffer {
uint64 buffer_id = 2;
}
message GetCompletions {
uint64 project_id = 1;
uint64 buffer_id = 2;
Anchor position = 3;
}
message GetCompletionsResponse {
repeated Completion completions = 1;
}
message ApplyCompletionAdditionalEdits {
uint64 project_id = 1;
uint64 buffer_id = 2;
Completion completion = 3;
}
message ApplyCompletionAdditionalEditsResponse {
repeated AdditionalEdit additional_edits = 1;
}
message AdditionalEdit {
uint32 replica_id = 1;
uint32 local_timestamp = 2;
}
message Completion {
Anchor old_start = 1;
Anchor old_end = 2;
string new_text = 3;
bytes lsp_completion = 4;
}
message UpdateDiagnosticSummary {
uint64 project_id = 1;
uint64 worktree_id = 2;
@ -339,6 +375,7 @@ message BufferState {
repeated Diagnostic diagnostics = 9;
uint32 lamport_timestamp = 10;
repeated Operation deferred_operations = 11;
repeated string completion_triggers = 12;
}
message BufferFragment {
@ -409,6 +446,7 @@ message Operation {
Undo undo = 2;
UpdateSelections update_selections = 3;
UpdateDiagnostics update_diagnostics = 4;
UpdateCompletionTriggers update_completion_triggers = 5;
}
message Edit {
@ -434,6 +472,10 @@ message Operation {
uint32 lamport_timestamp = 2;
repeated Selection selections = 3;
}
message UpdateCompletionTriggers {
repeated string triggers = 1;
}
}
message UndoMapEntry {

View File

@ -122,6 +122,8 @@ macro_rules! entity_messages {
messages!(
Ack,
AddProjectCollaborator,
ApplyCompletionAdditionalEdits,
ApplyCompletionAdditionalEditsResponse,
BufferReloaded,
BufferSaved,
ChannelMessageSent,
@ -134,6 +136,8 @@ messages!(
GetChannelMessagesResponse,
GetChannels,
GetChannelsResponse,
GetCompletions,
GetCompletionsResponse,
GetDefinition,
GetDefinitionResponse,
GetUsers,
@ -167,9 +171,14 @@ messages!(
);
request_messages!(
(
ApplyCompletionAdditionalEdits,
ApplyCompletionAdditionalEditsResponse
),
(FormatBuffer, Ack),
(GetChannelMessages, GetChannelMessagesResponse),
(GetChannels, GetChannelsResponse),
(GetCompletions, GetCompletionsResponse),
(GetDefinition, GetDefinitionResponse),
(GetUsers, GetUsersResponse),
(JoinChannel, JoinChannelResponse),
@ -188,12 +197,14 @@ request_messages!(
entity_messages!(
project_id,
AddProjectCollaborator,
ApplyCompletionAdditionalEdits,
BufferReloaded,
BufferSaved,
CloseBuffer,
DiskBasedDiagnosticsUpdated,
DiskBasedDiagnosticsUpdating,
FormatBuffer,
GetCompletions,
GetDefinition,
JoinProject,
LeaveProject,

View File

@ -2,6 +2,7 @@ mod admin;
mod api;
mod assets;
mod auth;
mod careers;
mod community;
mod db;
mod env;
@ -12,7 +13,6 @@ mod home;
mod releases;
mod rpc;
mod team;
mod careers;
use self::errors::TideResultExt as _;
use ::rpc::Peer;

View File

@ -83,6 +83,8 @@ impl Server {
.add_handler(Server::buffer_saved)
.add_handler(Server::save_buffer)
.add_handler(Server::format_buffer)
.add_handler(Server::get_completions)
.add_handler(Server::apply_additional_edits_for_completion)
.add_handler(Server::get_channels)
.add_handler(Server::get_users)
.add_handler(Server::join_channel)
@ -341,7 +343,7 @@ impl Server {
self.peer.send(
conn_id,
proto::AddProjectCollaborator {
project_id: project_id,
project_id,
collaborator: Some(proto::Collaborator {
peer_id: request.sender_id.0,
replica_id: response.replica_id,
@ -722,6 +724,54 @@ impl Server {
Ok(())
}
async fn get_completions(
self: Arc<Server>,
request: TypedEnvelope<proto::GetCompletions>,
) -> tide::Result<()> {
let host;
{
let state = self.state();
let project = state
.read_project(request.payload.project_id, request.sender_id)
.ok_or_else(|| anyhow!(NO_SUCH_PROJECT))?;
host = project.host_connection_id;
}
let sender = request.sender_id;
let receipt = request.receipt();
let response = self
.peer
.forward_request(sender, host, request.payload.clone())
.await?;
self.peer.respond(receipt, response).await?;
Ok(())
}
async fn apply_additional_edits_for_completion(
self: Arc<Server>,
request: TypedEnvelope<proto::ApplyCompletionAdditionalEdits>,
) -> tide::Result<()> {
let host;
{
let state = self.state();
let project = state
.read_project(request.payload.project_id, request.sender_id)
.ok_or_else(|| anyhow!(NO_SUCH_PROJECT))?;
host = project.host_connection_id;
}
let sender = request.sender_id;
let receipt = request.receipt();
let response = self
.peer
.forward_request(sender, host, request.payload.clone())
.await?;
self.peer.respond(receipt, response).await?;
Ok(())
}
async fn update_buffer(
self: Arc<Server>,
request: TypedEnvelope<proto::UpdateBuffer>,
@ -2247,6 +2297,231 @@ mod tests {
});
}
#[gpui::test]
async fn test_collaborating_with_completion(
mut cx_a: TestAppContext,
mut cx_b: TestAppContext,
) {
cx_a.foreground().forbid_parking();
let mut lang_registry = Arc::new(LanguageRegistry::new());
let fs = Arc::new(FakeFs::new(cx_a.background()));
// Set up a fake language server.
let (language_server_config, mut fake_language_server) =
LanguageServerConfig::fake_with_capabilities(
lsp::ServerCapabilities {
completion_provider: Some(lsp::CompletionOptions {
trigger_characters: Some(vec![".".to_string()]),
..Default::default()
}),
..Default::default()
},
cx_a.background(),
)
.await;
Arc::get_mut(&mut lang_registry)
.unwrap()
.add(Arc::new(Language::new(
LanguageConfig {
name: "Rust".to_string(),
path_suffixes: vec!["rs".to_string()],
language_server: Some(language_server_config),
..Default::default()
},
Some(tree_sitter_rust::language()),
)));
// Connect to a server as 2 clients.
let mut server = TestServer::start(cx_a.foreground()).await;
let client_a = server.create_client(&mut cx_a, "user_a").await;
let client_b = server.create_client(&mut cx_b, "user_b").await;
// Share a project as client A
fs.insert_tree(
"/a",
json!({
".zed.toml": r#"collaborators = ["user_b"]"#,
"main.rs": "fn main() { a }",
"other.rs": "",
}),
)
.await;
let project_a = cx_a.update(|cx| {
Project::local(
client_a.clone(),
client_a.user_store.clone(),
lang_registry.clone(),
fs.clone(),
cx,
)
});
let (worktree_a, _) = project_a
.update(&mut cx_a, |p, cx| {
p.find_or_create_local_worktree("/a", false, cx)
})
.await
.unwrap();
worktree_a
.read_with(&cx_a, |tree, _| tree.as_local().unwrap().scan_complete())
.await;
let project_id = project_a.update(&mut cx_a, |p, _| p.next_remote_id()).await;
let worktree_id = worktree_a.read_with(&cx_a, |tree, _| tree.id());
project_a
.update(&mut cx_a, |p, cx| p.share(cx))
.await
.unwrap();
// Join the worktree as client B.
let project_b = Project::remote(
project_id,
client_b.clone(),
client_b.user_store.clone(),
lang_registry.clone(),
fs.clone(),
&mut cx_b.to_async(),
)
.await
.unwrap();
// Open a file in an editor as the guest.
let buffer_b = project_b
.update(&mut cx_b, |p, cx| {
p.open_buffer((worktree_id, "main.rs"), cx)
})
.await
.unwrap();
let (window_b, _) = cx_b.add_window(|_| EmptyView);
let editor_b = cx_b.add_view(window_b, |cx| {
Editor::for_buffer(
cx.add_model(|cx| MultiBuffer::singleton(buffer_b.clone(), cx)),
Arc::new(|cx| EditorSettings::test(cx)),
cx,
)
});
// Type a completion trigger character as the guest.
editor_b.update(&mut cx_b, |editor, cx| {
editor.select_ranges([13..13], None, cx);
editor.handle_input(&Input(".".into()), cx);
cx.focus(&editor_b);
});
// Receive a completion request as the host's language server.
let (request_id, params) = fake_language_server
.receive_request::<lsp::request::Completion>()
.await;
assert_eq!(
params.text_document_position.text_document.uri,
lsp::Url::from_file_path("/a/main.rs").unwrap(),
);
assert_eq!(
params.text_document_position.position,
lsp::Position::new(0, 14),
);
// Return some completions from the host's language server.
fake_language_server
.respond(
request_id,
Some(lsp::CompletionResponse::Array(vec![
lsp::CompletionItem {
label: "first_method(…)".into(),
detail: Some("fn(&mut self, B) -> C".into()),
text_edit: Some(lsp::CompletionTextEdit::Edit(lsp::TextEdit {
new_text: "first_method($1)".to_string(),
range: lsp::Range::new(
lsp::Position::new(0, 14),
lsp::Position::new(0, 14),
),
})),
insert_text_format: Some(lsp::InsertTextFormat::SNIPPET),
..Default::default()
},
lsp::CompletionItem {
label: "second_method(…)".into(),
detail: Some("fn(&mut self, C) -> D<E>".into()),
text_edit: Some(lsp::CompletionTextEdit::Edit(lsp::TextEdit {
new_text: "second_method()".to_string(),
range: lsp::Range::new(
lsp::Position::new(0, 14),
lsp::Position::new(0, 14),
),
})),
insert_text_format: Some(lsp::InsertTextFormat::SNIPPET),
..Default::default()
},
])),
)
.await;
// Open the buffer on the host.
let buffer_a = project_a
.update(&mut cx_a, |p, cx| {
p.open_buffer((worktree_id, "main.rs"), cx)
})
.await
.unwrap();
buffer_a
.condition(&cx_a, |buffer, _| buffer.text() == "fn main() { a. }")
.await;
// Confirm a completion on the guest.
editor_b.next_notification(&cx_b).await;
editor_b.update(&mut cx_b, |editor, cx| {
assert!(editor.has_completions());
editor.confirm_completion(Some(0), cx);
assert_eq!(editor.text(cx), "fn main() { a.first_method() }");
});
buffer_a
.condition(&cx_a, |buffer, _| {
buffer.text() == "fn main() { a.first_method() }"
})
.await;
// Receive a request resolve the selected completion on the host's language server.
let (request_id, params) = fake_language_server
.receive_request::<lsp::request::ResolveCompletionItem>()
.await;
assert_eq!(params.label, "first_method(…)");
// Return a resolved completion from the host's language server.
// The resolved completion has an additional text edit.
fake_language_server
.respond(
request_id,
lsp::CompletionItem {
label: "first_method(…)".into(),
detail: Some("fn(&mut self, B) -> C".into()),
text_edit: Some(lsp::CompletionTextEdit::Edit(lsp::TextEdit {
new_text: "first_method($1)".to_string(),
range: lsp::Range::new(
lsp::Position::new(0, 14),
lsp::Position::new(0, 14),
),
})),
additional_text_edits: Some(vec![lsp::TextEdit {
new_text: "use d::SomeTrait;\n".to_string(),
range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(0, 0)),
}]),
insert_text_format: Some(lsp::InsertTextFormat::SNIPPET),
..Default::default()
},
)
.await;
// The additional edit is applied.
buffer_b
.condition(&cx_b, |buffer, _| {
buffer.text() == "use d::SomeTrait;\nfn main() { a.first_method() }"
})
.await;
assert_eq!(
buffer_a.read_with(&cx_a, |buffer, _| buffer.text()),
buffer_b.read_with(&cx_b, |buffer, _| buffer.text()),
);
}
#[gpui::test]
async fn test_formatting_buffer(mut cx_a: TestAppContext, mut cx_b: TestAppContext) {
cx_a.foreground().forbid_parking();

11
crates/snippet/Cargo.toml Normal file
View File

@ -0,0 +1,11 @@
[package]
name = "snippet"
version = "0.1.0"
edition = "2021"
[lib]
path = "src/snippet.rs"
[dependencies]
anyhow = "1.0"
smallvec = { version = "1.6", features = ["union"] }

View File

@ -0,0 +1,167 @@
use anyhow::{anyhow, Context, Result};
use smallvec::SmallVec;
use std::{collections::BTreeMap, ops::Range};
#[derive(Default)]
pub struct Snippet {
pub text: String,
pub tabstops: Vec<TabStop>,
}
type TabStop = SmallVec<[Range<isize>; 2]>;
impl Snippet {
pub fn parse(source: &str) -> Result<Self> {
let mut text = String::with_capacity(source.len());
let mut tabstops = BTreeMap::new();
parse_snippet(source, false, &mut text, &mut tabstops)
.context("failed to parse snippet")?;
let last_tabstop = tabstops
.remove(&0)
.unwrap_or_else(|| SmallVec::from_iter([text.len() as isize..text.len() as isize]));
Ok(Snippet {
text,
tabstops: tabstops.into_values().chain(Some(last_tabstop)).collect(),
})
}
}
fn parse_snippet<'a>(
mut source: &'a str,
nested: bool,
text: &mut String,
tabstops: &mut BTreeMap<usize, TabStop>,
) -> Result<&'a str> {
loop {
match source.chars().next() {
None => return Ok(""),
Some('$') => {
source = parse_tabstop(&source[1..], text, tabstops)?;
}
Some('}') => {
if nested {
return Ok(source);
} else {
text.push('}');
source = &source[1..];
}
}
Some(_) => {
let chunk_end = source.find(&['}', '$']).unwrap_or(source.len());
let (chunk, rest) = source.split_at(chunk_end);
text.push_str(chunk);
source = rest;
}
}
}
}
fn parse_tabstop<'a>(
mut source: &'a str,
text: &mut String,
tabstops: &mut BTreeMap<usize, TabStop>,
) -> Result<&'a str> {
let tabstop_start = text.len();
let tabstop_index;
if source.chars().next() == Some('{') {
let (index, rest) = parse_int(&source[1..])?;
tabstop_index = index;
source = rest;
if source.chars().next() == Some(':') {
source = parse_snippet(&source[1..], true, text, tabstops)?;
}
if source.chars().next() == Some('}') {
source = &source[1..];
} else {
return Err(anyhow!("expected a closing brace"));
}
} else {
let (index, rest) = parse_int(&source)?;
tabstop_index = index;
source = rest;
}
tabstops
.entry(tabstop_index)
.or_default()
.push(tabstop_start as isize..text.len() as isize);
Ok(source)
}
fn parse_int(source: &str) -> Result<(usize, &str)> {
let len = source
.find(|c: char| !c.is_ascii_digit())
.unwrap_or(source.len());
if len == 0 {
return Err(anyhow!("expected an integer"));
}
let (prefix, suffix) = source.split_at(len);
Ok((prefix.parse()?, suffix))
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_snippet_without_tabstops() {
let snippet = Snippet::parse("one-two-three").unwrap();
assert_eq!(snippet.text, "one-two-three");
assert_eq!(tabstops(&snippet), &[vec![13..13]]);
}
#[test]
fn test_snippet_with_tabstops() {
let snippet = Snippet::parse("one$1two").unwrap();
assert_eq!(snippet.text, "onetwo");
assert_eq!(tabstops(&snippet), &[vec![3..3], vec![6..6]]);
// Multi-digit numbers
let snippet = Snippet::parse("one$123-$99-two").unwrap();
assert_eq!(snippet.text, "one--two");
assert_eq!(tabstops(&snippet), &[vec![4..4], vec![3..3], vec![8..8]]);
}
#[test]
fn test_snippet_with_explicit_final_tabstop() {
let snippet = Snippet::parse(r#"<div class="$1">$0</div>"#).unwrap();
assert_eq!(snippet.text, r#"<div class=""></div>"#);
assert_eq!(tabstops(&snippet), &[vec![12..12], vec![14..14]]);
}
#[test]
fn test_snippet_with_placeholders() {
let snippet = Snippet::parse("one${1:two}three${2:four}").unwrap();
assert_eq!(snippet.text, "onetwothreefour");
assert_eq!(
tabstops(&snippet),
&[vec![3..6], vec![11..15], vec![15..15]]
);
}
#[test]
fn test_snippet_with_nested_placeholders() {
let snippet = Snippet::parse(
"for (${1:var ${2:i} = 0; ${2:i} < ${3:${4:array}.length}; ${2:i}++}) {$0}",
)
.unwrap();
assert_eq!(snippet.text, "for (var i = 0; i < array.length; i++) {}");
assert_eq!(
tabstops(&snippet),
&[
vec![5..37],
vec![9..10, 16..17, 34..35],
vec![20..32],
vec![20..25],
vec![40..40],
]
);
}
fn tabstops(snippet: &Snippet) -> Vec<Vec<Range<isize>>> {
snippet.tabstops.iter().map(|t| t.to_vec()).collect()
}
}

View File

@ -18,6 +18,7 @@ arrayvec = "0.7.1"
lazy_static = "1.4"
log = "0.4"
parking_lot = "0.11"
postage = { version = "0.4.1", features = ["futures-traits"] }
rand = { version = "0.8.3", optional = true }
smallvec = { version = "1.6", features = ["union"] }

View File

@ -21,6 +21,7 @@ use operation_queue::OperationQueue;
pub use patch::Patch;
pub use point::*;
pub use point_utf16::*;
use postage::{oneshot, prelude::*};
#[cfg(any(test, feature = "test-support"))]
pub use random_char_iter::*;
use rope::TextDimension;
@ -28,6 +29,7 @@ pub use rope::{Chunks, Rope, TextSummary};
pub use selection::*;
use std::{
cmp::{self, Ordering},
future::Future,
iter::Iterator,
ops::{self, Deref, Range, Sub},
str,
@ -50,6 +52,7 @@ pub struct Buffer {
local_clock: clock::Local,
pub lamport_clock: clock::Lamport,
subscriptions: Topic,
edit_id_resolvers: HashMap<clock::Local, Vec<oneshot::Sender<()>>>,
}
#[derive(Clone, Debug)]
@ -233,6 +236,20 @@ impl History {
}
}
fn push_transaction(&mut self, edit_ids: impl IntoIterator<Item = clock::Local>, now: Instant) {
assert_eq!(self.transaction_depth, 0);
let mut edit_ids = edit_ids.into_iter().peekable();
if let Some(first_edit_id) = edit_ids.peek() {
let version = self.ops[first_edit_id].version.clone();
self.start_transaction(version, now);
for edit_id in edit_ids {
self.push_undo(edit_id);
}
self.end_transaction(now);
}
}
fn push_undo(&mut self, edit_id: clock::Local) {
assert_ne!(self.transaction_depth, 0);
let last_transaction = self.undo_stack.last_mut().unwrap();
@ -260,6 +277,17 @@ impl History {
}
}
fn forget(&mut self, transaction_id: TransactionId) {
assert_eq!(self.transaction_depth, 0);
if let Some(transaction_ix) = self.undo_stack.iter().rposition(|t| t.id == transaction_id) {
self.undo_stack.remove(transaction_ix);
} else if let Some(transaction_ix) =
self.redo_stack.iter().rposition(|t| t.id == transaction_id)
{
self.undo_stack.remove(transaction_ix);
}
}
fn pop_redo(&mut self) -> Option<&Transaction> {
assert_eq!(self.transaction_depth, 0);
if let Some(transaction) = self.redo_stack.pop() {
@ -377,14 +405,14 @@ pub struct InsertionTimestamp {
}
impl InsertionTimestamp {
fn local(&self) -> clock::Local {
pub fn local(&self) -> clock::Local {
clock::Local {
replica_id: self.replica_id,
value: self.local,
}
}
fn lamport(&self) -> clock::Lamport {
pub fn lamport(&self) -> clock::Lamport {
clock::Lamport {
replica_id: self.replica_id,
value: self.lamport,
@ -513,6 +541,7 @@ impl Buffer {
local_clock,
lamport_clock,
subscriptions: Default::default(),
edit_id_resolvers: Default::default(),
}
}
@ -554,6 +583,7 @@ impl Buffer {
value: lamport_timestamp,
},
subscriptions: Default::default(),
edit_id_resolvers: Default::default(),
snapshot: BufferSnapshot {
replica_id,
visible_text,
@ -808,6 +838,7 @@ impl Buffer {
edit.timestamp,
);
self.snapshot.version.observe(edit.timestamp.local());
self.resolve_edit(edit.timestamp.local());
self.history.push(edit);
}
}
@ -1205,6 +1236,10 @@ impl Buffer {
}
}
pub fn forget_transaction(&mut self, transaction_id: TransactionId) {
self.history.forget(transaction_id);
}
pub fn redo(&mut self) -> Option<(TransactionId, Operation)> {
if let Some(transaction) = self.history.pop_redo().cloned() {
let transaction_id = transaction.id;
@ -1245,9 +1280,48 @@ impl Buffer {
})
}
pub fn push_transaction(
&mut self,
edit_ids: impl IntoIterator<Item = clock::Local>,
now: Instant,
) {
self.history.push_transaction(edit_ids, now);
}
pub fn subscribe(&mut self) -> Subscription {
self.subscriptions.subscribe()
}
pub fn wait_for_edits(
&mut self,
edit_ids: &[clock::Local],
) -> impl 'static + Future<Output = ()> {
let mut futures = Vec::new();
for edit_id in edit_ids {
if !self.version.observed(*edit_id) {
let (tx, rx) = oneshot::channel();
self.edit_id_resolvers.entry(*edit_id).or_default().push(tx);
futures.push(rx);
}
}
async move {
for mut future in futures {
future.recv().await;
}
}
}
fn resolve_edit(&mut self, edit_id: clock::Local) {
for mut tx in self
.edit_id_resolvers
.remove(&edit_id)
.into_iter()
.flatten()
{
let _ = tx.try_send(());
}
}
}
#[cfg(any(test, feature = "test-support"))]

View File

@ -292,6 +292,7 @@ pub struct EditorStyle {
pub invalid_information_diagnostic: DiagnosticStyle,
pub hint_diagnostic: DiagnosticStyle,
pub invalid_hint_diagnostic: DiagnosticStyle,
pub autocomplete: AutocompleteStyle,
}
#[derive(Clone, Deserialize, Default)]
@ -321,6 +322,16 @@ pub struct DiagnosticStyle {
pub text_scale_factor: f32,
}
#[derive(Clone, Deserialize, Default)]
pub struct AutocompleteStyle {
#[serde(flatten)]
pub container: ContainerStyle,
pub item: ContainerStyle,
pub selected_item: ContainerStyle,
pub hovered_item: ContainerStyle,
pub match_highlight: HighlightStyle,
}
#[derive(Clone, Copy, Default, Deserialize)]
pub struct SelectionStyle {
pub cursor: Color,
@ -408,6 +419,7 @@ impl InputEditorStyle {
invalid_information_diagnostic: default_diagnostic_style.clone(),
hint_diagnostic: default_diagnostic_style.clone(),
invalid_hint_diagnostic: default_diagnostic_style.clone(),
autocomplete: Default::default(),
}
}
}

View File

@ -314,6 +314,26 @@ extends = "$editor.hint_diagnostic"
message.text.color = "$text.3.color"
message.highlight_text.color = "$text.3.color"
[editor.autocomplete]
background = "$surface.2"
border = { width = 2, color = "$border.1" }
corner_radius = 6
padding = 6
match_highlight = { color = "$editor.syntax.keyword.color", weight = "$editor.syntax.keyword.weight" }
margin.left = -14
[editor.autocomplete.item]
padding = { left = 6, right = 6, top = 2, bottom = 2 }
corner_radius = 6
[editor.autocomplete.selected_item]
extends = "$editor.autocomplete.item"
background = "$state.selected"
[editor.autocomplete.hovered_item]
extends = "$editor.autocomplete.item"
background = "$state.hover"
[project_diagnostics]
background = "$surface.1"
empty_message = { extends = "$text.0", size = 18 }

View File

@ -40,6 +40,7 @@ bad = "#b7372e"
active_line = "#161313"
highlighted_line = "#faca5033"
hover = "#00000033"
selected = "#00000088"
[editor.syntax]
keyword = { color = "#0086c0", weight = "bold" }

View File

@ -40,6 +40,7 @@ bad = "#b7372e"
active_line = "#00000022"
highlighted_line = "#faca5033"
hover = "#00000033"
selected = "#00000088"
[editor.syntax]
keyword = { color = "#0086c0", weight = "bold" }
@ -51,7 +52,6 @@ comment = "#6a9955"
property = "#4e94ce"
variant = "#4fc1ff"
constant = "#9cdcfe"
title = { color = "#9cdcfe", weight = "bold" }
emphasis = "#4ec9b0"
"emphasis.strong" = { color = "#4ec9b0", weight = "bold" }

View File

@ -40,6 +40,7 @@ bad = "#b7372e"
active_line = "#00000008"
highlighted_line = "#faca5033"
hover = "#0000000D"
selected = "#0000001c"
[editor.syntax]
keyword = { color = "#0000fa", weight = "bold" }
@ -51,7 +52,6 @@ comment = "#6a9955"
property = "#4e94ce"
variant = "#4fc1ff"
constant = "#5a9ccc"
title = { color = "#5a9ccc", weight = "bold" }
emphasis = "#267f29"
"emphasis.strong" = { color = "#267f29", weight = "bold" }

View File

@ -9,9 +9,9 @@ use std::{str, sync::Arc};
#[folder = "languages"]
struct LanguageDir;
struct RustDiagnosticProcessor;
struct RustPostProcessor;
impl DiagnosticProcessor for RustDiagnosticProcessor {
impl LspPostProcessor for RustPostProcessor {
fn process_diagnostics(&self, params: &mut lsp::PublishDiagnosticsParams) {
lazy_static! {
static ref REGEX: Regex = Regex::new("(?m)`([^`]+)\n`$").unwrap();
@ -31,6 +31,85 @@ impl DiagnosticProcessor for RustDiagnosticProcessor {
}
}
}
fn label_for_completion(
&self,
completion: &lsp::CompletionItem,
language: &Language,
) -> Option<CompletionLabel> {
match completion.kind {
Some(lsp::CompletionItemKind::FIELD) if completion.detail.is_some() => {
let detail = completion.detail.as_ref().unwrap();
let name = &completion.label;
let text = format!("{}: {}", name, detail);
let source = Rope::from(format!("struct S {{ {} }}", text).as_str());
let runs = language.highlight_text(&source, 11..11 + text.len());
return Some(CompletionLabel {
text,
runs,
filter_range: 0..name.len(),
left_aligned_len: name.len(),
});
}
Some(lsp::CompletionItemKind::CONSTANT | lsp::CompletionItemKind::VARIABLE)
if completion.detail.is_some() =>
{
let detail = completion.detail.as_ref().unwrap();
let name = &completion.label;
let text = format!("{}: {}", name, detail);
let source = Rope::from(format!("let {} = ();", text).as_str());
let runs = language.highlight_text(&source, 4..4 + text.len());
return Some(CompletionLabel {
text,
runs,
filter_range: 0..name.len(),
left_aligned_len: name.len(),
});
}
Some(lsp::CompletionItemKind::FUNCTION | lsp::CompletionItemKind::METHOD)
if completion.detail.is_some() =>
{
lazy_static! {
static ref REGEX: Regex = Regex::new("\\(…?\\)").unwrap();
}
let detail = completion.detail.as_ref().unwrap();
if detail.starts_with("fn(") {
let text = REGEX.replace(&completion.label, &detail[2..]).to_string();
let source = Rope::from(format!("fn {} {{}}", text).as_str());
let runs = language.highlight_text(&source, 3..3 + text.len());
return Some(CompletionLabel {
left_aligned_len: text.find("->").unwrap_or(text.len()),
filter_range: 0..completion.label.find('(').unwrap_or(text.len()),
text,
runs,
});
}
}
Some(kind) => {
let highlight_name = match kind {
lsp::CompletionItemKind::STRUCT
| lsp::CompletionItemKind::INTERFACE
| lsp::CompletionItemKind::ENUM => Some("type"),
lsp::CompletionItemKind::ENUM_MEMBER => Some("variant"),
lsp::CompletionItemKind::KEYWORD => Some("keyword"),
lsp::CompletionItemKind::VALUE | lsp::CompletionItemKind::CONSTANT => {
Some("constant")
}
_ => None,
};
let highlight_id = language.grammar()?.highlight_id_for_name(highlight_name?)?;
let mut label = CompletionLabel::plain(&completion);
label.runs.push((
0..label.text.rfind('(').unwrap_or(label.text.len()),
highlight_id,
));
return Some(label);
}
_ => {}
}
None
}
}
pub fn build_language_registry() -> LanguageRegistry {
@ -52,7 +131,7 @@ fn rust() -> Language {
.unwrap()
.with_outline_query(load_query("rust/outline.scm").as_ref())
.unwrap()
.with_diagnostics_processor(RustDiagnosticProcessor)
.with_lsp_post_processor(RustPostProcessor)
}
fn markdown() -> Language {
@ -72,9 +151,10 @@ fn load_query(path: &str) -> Cow<'static, str> {
#[cfg(test)]
mod tests {
use language::DiagnosticProcessor;
use super::RustDiagnosticProcessor;
use super::*;
use gpui::color::Color;
use language::LspPostProcessor;
use theme::SyntaxTheme;
#[test]
fn test_process_rust_diagnostics() {
@ -100,7 +180,7 @@ mod tests {
},
],
};
RustDiagnosticProcessor.process_diagnostics(&mut params);
RustPostProcessor.process_diagnostics(&mut params);
assert_eq!(params.diagnostics[0].message, "use of moved value `a`");
@ -116,4 +196,82 @@ mod tests {
"cannot borrow `self.d` as mutable\n`self` is a `&` reference"
);
}
#[test]
fn test_process_rust_completions() {
let language = rust();
let grammar = language.grammar().unwrap();
let theme = SyntaxTheme::new(vec![
("type".into(), Color::green().into()),
("keyword".into(), Color::blue().into()),
("function".into(), Color::red().into()),
("property".into(), Color::white().into()),
]);
language.set_theme(&theme);
let highlight_function = grammar.highlight_id_for_name("function").unwrap();
let highlight_type = grammar.highlight_id_for_name("type").unwrap();
let highlight_keyword = grammar.highlight_id_for_name("keyword").unwrap();
let highlight_field = grammar.highlight_id_for_name("property").unwrap();
assert_eq!(
language.label_for_completion(&lsp::CompletionItem {
kind: Some(lsp::CompletionItemKind::FUNCTION),
label: "hello(…)".to_string(),
detail: Some("fn(&mut Option<T>) -> Vec<T>".to_string()),
..Default::default()
}),
Some(CompletionLabel {
text: "hello(&mut Option<T>) -> Vec<T>".to_string(),
filter_range: 0..5,
runs: vec![
(0..5, highlight_function),
(7..10, highlight_keyword),
(11..17, highlight_type),
(18..19, highlight_type),
(25..28, highlight_type),
(29..30, highlight_type),
],
left_aligned_len: 22,
})
);
assert_eq!(
language.label_for_completion(&lsp::CompletionItem {
kind: Some(lsp::CompletionItemKind::FIELD),
label: "len".to_string(),
detail: Some("usize".to_string()),
..Default::default()
}),
Some(CompletionLabel {
text: "len: usize".to_string(),
filter_range: 0..3,
runs: vec![(0..3, highlight_field), (5..10, highlight_type),],
left_aligned_len: 3,
})
);
assert_eq!(
language.label_for_completion(&lsp::CompletionItem {
kind: Some(lsp::CompletionItemKind::FUNCTION),
label: "hello(…)".to_string(),
detail: Some("fn(&mut Option<T>) -> Vec<T>".to_string()),
..Default::default()
}),
Some(CompletionLabel {
text: "hello(&mut Option<T>) -> Vec<T>".to_string(),
filter_range: 0..5,
runs: vec![
(0..5, highlight_function),
(7..10, highlight_keyword),
(11..17, highlight_type),
(18..19, highlight_type),
(25..28, highlight_type),
(29..30, highlight_type),
],
left_aligned_len: 22,
})
);
}
}