Merge pull request #1559 from zed-industries/language-injection

Add language injection support
This commit is contained in:
Max Brunsfeld 2022-08-30 11:54:00 -07:00 committed by GitHub
commit d4bbf21650
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
16 changed files with 2513 additions and 412 deletions

2
Cargo.lock generated
View File

@ -5850,7 +5850,7 @@ dependencies = [
[[package]] [[package]]
name = "tree-sitter" name = "tree-sitter"
version = "0.20.8" version = "0.20.8"
source = "git+https://github.com/tree-sitter/tree-sitter?rev=1f1b1eb4501ed0a2d195d37f7de15f72aa10acd0#1f1b1eb4501ed0a2d195d37f7de15f72aa10acd0" source = "git+https://github.com/tree-sitter/tree-sitter?rev=366210ae925d7ea0891bc7a0c738f60c77c04d7b#366210ae925d7ea0891bc7a0c738f60c77c04d7b"
dependencies = [ dependencies = [
"cc", "cc",
"regex", "regex",

View File

@ -4,7 +4,7 @@ default-members = ["crates/zed"]
resolver = "2" resolver = "2"
[patch.crates-io] [patch.crates-io]
tree-sitter = { git = "https://github.com/tree-sitter/tree-sitter", rev = "1f1b1eb4501ed0a2d195d37f7de15f72aa10acd0" } tree-sitter = { git = "https://github.com/tree-sitter/tree-sitter", rev = "366210ae925d7ea0891bc7a0c738f60c77c04d7b" }
async-task = { git = "https://github.com/zed-industries/async-task", rev = "341b57d6de98cdfd7b418567b8de2022ca993a6e" } async-task = { git = "https://github.com/zed-industries/async-task", rev = "341b57d6de98cdfd7b418567b8de2022ca993a6e" }
# TODO - Remove when a version is released with this PR: https://github.com/servo/core-foundation-rs/pull/457 # TODO - Remove when a version is released with this PR: https://github.com/servo/core-foundation-rs/pull/457

View File

@ -6,13 +6,15 @@ pub use crate::{
use crate::{ use crate::{
diagnostic_set::{DiagnosticEntry, DiagnosticGroup}, diagnostic_set::{DiagnosticEntry, DiagnosticGroup},
outline::OutlineItem, outline::OutlineItem,
syntax_map::{
SyntaxMap, SyntaxMapCapture, SyntaxMapCaptures, SyntaxSnapshot, ToTreeSitterPoint,
},
CodeLabel, Outline, CodeLabel, Outline,
}; };
use anyhow::{anyhow, Result}; use anyhow::{anyhow, Result};
use clock::ReplicaId; use clock::ReplicaId;
use futures::FutureExt as _; use futures::FutureExt as _;
use gpui::{fonts::HighlightStyle, AppContext, Entity, ModelContext, MutableAppContext, Task}; use gpui::{fonts::HighlightStyle, AppContext, Entity, ModelContext, MutableAppContext, Task};
use lazy_static::lazy_static;
use parking_lot::Mutex; use parking_lot::Mutex;
use settings::Settings; use settings::Settings;
use similar::{ChangeTag, TextDiff}; use similar::{ChangeTag, TextDiff};
@ -25,7 +27,7 @@ use std::{
future::Future, future::Future,
iter::{self, Iterator, Peekable}, iter::{self, Iterator, Peekable},
mem, mem,
ops::{Deref, DerefMut, Range}, ops::{Deref, Range},
path::{Path, PathBuf}, path::{Path, PathBuf},
str, str,
sync::Arc, sync::Arc,
@ -36,7 +38,6 @@ use sum_tree::TreeMap;
use text::operation_queue::OperationQueue; use text::operation_queue::OperationQueue;
pub use text::{Buffer as TextBuffer, BufferSnapshot as TextBufferSnapshot, Operation as _, *}; pub use text::{Buffer as TextBuffer, BufferSnapshot as TextBufferSnapshot, Operation as _, *};
use theme::SyntaxTheme; use theme::SyntaxTheme;
use tree_sitter::{InputEdit, QueryCursor, Tree};
use util::TryFutureExt as _; use util::TryFutureExt as _;
#[cfg(any(test, feature = "test-support"))] #[cfg(any(test, feature = "test-support"))]
@ -44,10 +45,6 @@ pub use {tree_sitter_rust, tree_sitter_typescript};
pub use lsp::DiagnosticSeverity; pub use lsp::DiagnosticSeverity;
lazy_static! {
static ref QUERY_CURSORS: Mutex<Vec<QueryCursor>> = Default::default();
}
pub struct Buffer { pub struct Buffer {
text: TextBuffer, text: TextBuffer,
file: Option<Arc<dyn File>>, file: Option<Arc<dyn File>>,
@ -60,7 +57,7 @@ pub struct Buffer {
autoindent_requests: Vec<Arc<AutoindentRequest>>, autoindent_requests: Vec<Arc<AutoindentRequest>>,
pending_autoindent: Option<Task<()>>, pending_autoindent: Option<Task<()>>,
sync_parse_timeout: Duration, sync_parse_timeout: Duration,
syntax_tree: Mutex<Option<SyntaxTree>>, syntax_map: Mutex<SyntaxMap>,
parsing_in_background: bool, parsing_in_background: bool,
parse_count: usize, parse_count: usize,
diagnostics: DiagnosticSet, diagnostics: DiagnosticSet,
@ -76,7 +73,7 @@ pub struct Buffer {
pub struct BufferSnapshot { pub struct BufferSnapshot {
text: text::BufferSnapshot, text: text::BufferSnapshot,
tree: Option<Tree>, pub(crate) syntax: SyntaxSnapshot,
file: Option<Arc<dyn File>>, file: Option<Arc<dyn File>>,
diagnostics: DiagnosticSet, diagnostics: DiagnosticSet,
diagnostics_update_count: usize, diagnostics_update_count: usize,
@ -222,14 +219,6 @@ pub trait LocalFile: File {
); );
} }
pub(crate) struct QueryCursorHandle(Option<QueryCursor>);
#[derive(Clone)]
struct SyntaxTree {
tree: Tree,
version: clock::Global,
}
#[derive(Clone, Debug)] #[derive(Clone, Debug)]
pub enum AutoindentMode { pub enum AutoindentMode {
/// Indent each line of inserted text. /// Indent each line of inserted text.
@ -269,14 +258,11 @@ struct IndentSuggestion {
delta: Ordering, delta: Ordering,
} }
pub(crate) struct TextProvider<'a>(pub(crate) &'a Rope);
struct BufferChunkHighlights<'a> { struct BufferChunkHighlights<'a> {
captures: tree_sitter::QueryCaptures<'a, 'a, TextProvider<'a>>, captures: SyntaxMapCaptures<'a>,
next_capture: Option<(tree_sitter::QueryMatch<'a, 'a>, usize)>, next_capture: Option<SyntaxMapCapture<'a>>,
stack: Vec<(usize, HighlightId)>, stack: Vec<(usize, HighlightId)>,
highlight_map: HighlightMap, highlight_maps: Vec<HighlightMap>,
_query_cursor: QueryCursorHandle,
} }
pub struct BufferChunks<'a> { pub struct BufferChunks<'a> {
@ -433,7 +419,7 @@ impl Buffer {
was_dirty_before_starting_transaction: None, was_dirty_before_starting_transaction: None,
text: buffer, text: buffer,
file, file,
syntax_tree: Mutex::new(None), syntax_map: Mutex::new(SyntaxMap::new()),
parsing_in_background: false, parsing_in_background: false,
parse_count: 0, parse_count: 0,
sync_parse_timeout: Duration::from_millis(1), sync_parse_timeout: Duration::from_millis(1),
@ -453,9 +439,14 @@ impl Buffer {
} }
pub fn snapshot(&self) -> BufferSnapshot { pub fn snapshot(&self) -> BufferSnapshot {
let text = self.text.snapshot();
let mut syntax_map = self.syntax_map.lock();
syntax_map.interpolate(&text);
let syntax = syntax_map.snapshot();
BufferSnapshot { BufferSnapshot {
text: self.text.snapshot(), text,
tree: self.syntax_tree(), syntax,
file: self.file.clone(), file: self.file.clone(),
remote_selections: self.remote_selections.clone(), remote_selections: self.remote_selections.clone(),
diagnostics: self.diagnostics.clone(), diagnostics: self.diagnostics.clone(),
@ -511,11 +502,17 @@ impl Buffer {
} }
pub fn set_language(&mut self, language: Option<Arc<Language>>, cx: &mut ModelContext<Self>) { pub fn set_language(&mut self, language: Option<Arc<Language>>, cx: &mut ModelContext<Self>) {
*self.syntax_tree.lock() = None; self.syntax_map.lock().clear();
self.language = language; self.language = language;
self.reparse(cx); self.reparse(cx);
} }
pub fn set_language_registry(&mut self, language_registry: Arc<LanguageRegistry>) {
self.syntax_map
.lock()
.set_language_registry(language_registry);
}
pub fn did_save( pub fn did_save(
&mut self, &mut self,
version: clock::Global, version: clock::Global,
@ -660,15 +657,6 @@ impl Buffer {
self.file_update_count self.file_update_count
} }
pub(crate) fn syntax_tree(&self) -> Option<Tree> {
if let Some(syntax_tree) = self.syntax_tree.lock().as_mut() {
self.interpolate_tree(syntax_tree);
Some(syntax_tree.tree.clone())
} else {
None
}
}
#[cfg(any(test, feature = "test-support"))] #[cfg(any(test, feature = "test-support"))]
pub fn is_parsing(&self) -> bool { pub fn is_parsing(&self) -> bool {
self.parsing_in_background self.parsing_in_background
@ -679,75 +667,73 @@ impl Buffer {
self.sync_parse_timeout = timeout; self.sync_parse_timeout = timeout;
} }
fn reparse(&mut self, cx: &mut ModelContext<Self>) -> bool { fn reparse(&mut self, cx: &mut ModelContext<Self>) {
if self.parsing_in_background { if self.parsing_in_background {
return false; return;
} }
let language = if let Some(language) = self.language.clone() {
language
} else {
return;
};
if let Some(grammar) = self.grammar().cloned() { let text = self.text_snapshot();
let old_tree = self.syntax_tree(); let parsed_version = self.version();
let text = self.as_rope().clone();
let parsed_version = self.version();
let parse_task = cx.background().spawn({
let grammar = grammar.clone();
async move { grammar.parse_text(&text, old_tree) }
});
match cx let mut syntax_map = self.syntax_map.lock();
.background() syntax_map.interpolate(&text);
.block_with_timeout(self.sync_parse_timeout, parse_task) let language_registry = syntax_map.language_registry();
{ let mut syntax_snapshot = syntax_map.snapshot();
Ok(new_tree) => { let syntax_map_version = syntax_map.parsed_version();
self.did_finish_parsing(new_tree, parsed_version, cx); drop(syntax_map);
return true;
}
Err(parse_task) => {
self.parsing_in_background = true;
cx.spawn(move |this, mut cx| async move {
let new_tree = parse_task.await;
this.update(&mut cx, move |this, cx| {
let grammar_changed = this
.grammar()
.map_or(true, |curr_grammar| !Arc::ptr_eq(&grammar, curr_grammar));
let parse_again =
this.version.changed_since(&parsed_version) || grammar_changed;
this.parsing_in_background = false;
this.did_finish_parsing(new_tree, parsed_version, cx);
if parse_again && this.reparse(cx) {} let parse_task = cx.background().spawn({
}); let language = language.clone();
}) async move {
.detach(); syntax_snapshot.reparse(&syntax_map_version, &text, language_registry, language);
} syntax_snapshot
}
});
match cx
.background()
.block_with_timeout(self.sync_parse_timeout, parse_task)
{
Ok(new_syntax_snapshot) => {
self.did_finish_parsing(new_syntax_snapshot, parsed_version, cx);
return;
}
Err(parse_task) => {
self.parsing_in_background = true;
cx.spawn(move |this, mut cx| async move {
let new_syntax_map = parse_task.await;
this.update(&mut cx, move |this, cx| {
let grammar_changed =
this.language.as_ref().map_or(true, |current_language| {
!Arc::ptr_eq(&language, current_language)
});
let parse_again =
this.version.changed_since(&parsed_version) || grammar_changed;
this.did_finish_parsing(new_syntax_map, parsed_version, cx);
this.parsing_in_background = false;
if parse_again {
this.reparse(cx);
}
});
})
.detach();
} }
} }
false
}
fn interpolate_tree(&self, tree: &mut SyntaxTree) {
for edit in self.edits_since::<(usize, Point)>(&tree.version) {
let (bytes, lines) = edit.flatten();
tree.tree.edit(&InputEdit {
start_byte: bytes.new.start,
old_end_byte: bytes.new.start + bytes.old.len(),
new_end_byte: bytes.new.end,
start_position: lines.new.start.to_ts_point(),
old_end_position: (lines.new.start + (lines.old.end - lines.old.start))
.to_ts_point(),
new_end_position: lines.new.end.to_ts_point(),
});
}
tree.version = self.version();
} }
fn did_finish_parsing( fn did_finish_parsing(
&mut self, &mut self,
tree: Tree, syntax_snapshot: SyntaxSnapshot,
version: clock::Global, version: clock::Global,
cx: &mut ModelContext<Self>, cx: &mut ModelContext<Self>,
) { ) {
self.parse_count += 1; self.parse_count += 1;
*self.syntax_tree.lock() = Some(SyntaxTree { tree, version }); self.syntax_map.lock().did_parse(syntax_snapshot, version);
self.request_autoindent(cx); self.request_autoindent(cx);
cx.emit(Event::Reparsed); cx.emit(Event::Reparsed);
cx.notify(); cx.notify();
@ -786,10 +772,7 @@ impl Buffer {
fn compute_autoindents(&self) -> Option<impl Future<Output = BTreeMap<u32, IndentSize>>> { fn compute_autoindents(&self) -> Option<impl Future<Output = BTreeMap<u32, IndentSize>>> {
let max_rows_between_yields = 100; let max_rows_between_yields = 100;
let snapshot = self.snapshot(); let snapshot = self.snapshot();
if snapshot.language.is_none() if snapshot.syntax.is_empty() || self.autoindent_requests.is_empty() {
|| snapshot.tree.is_none()
|| self.autoindent_requests.is_empty()
{
return None; return None;
} }
@ -1288,10 +1271,6 @@ impl Buffer {
cx.notify(); cx.notify();
} }
fn grammar(&self) -> Option<&Arc<Grammar>> {
self.language.as_ref().and_then(|l| l.grammar.as_ref())
}
pub fn apply_ops<I: IntoIterator<Item = Operation>>( pub fn apply_ops<I: IntoIterator<Item = Operation>>(
&mut self, &mut self,
ops: I, ops: I,
@ -1626,38 +1605,38 @@ impl BufferSnapshot {
&self, &self,
row_range: Range<u32>, row_range: Range<u32>,
) -> Option<impl Iterator<Item = Option<IndentSuggestion>> + '_> { ) -> Option<impl Iterator<Item = Option<IndentSuggestion>> + '_> {
let language = self.language.as_ref()?; let config = &self.language.as_ref()?.config;
let grammar = language.grammar.as_ref()?;
let config = &language.config;
let prev_non_blank_row = self.prev_non_blank_row(row_range.start); let prev_non_blank_row = self.prev_non_blank_row(row_range.start);
// Find the suggested indentation ranges based on the syntax tree. // Find the suggested indentation ranges based on the syntax tree.
let indents_query = grammar.indents_query.as_ref()?; let start = Point::new(prev_non_blank_row.unwrap_or(row_range.start), 0);
let mut query_cursor = QueryCursorHandle::new(); let end = Point::new(row_range.end, 0);
let indent_capture_ix = indents_query.capture_index_for_name("indent"); let range = (start..end).to_offset(&self.text);
let end_capture_ix = indents_query.capture_index_for_name("end"); let mut matches = self.syntax.matches(range, &self.text, |grammar| {
query_cursor.set_point_range( Some(&grammar.indents_config.as_ref()?.query)
Point::new(prev_non_blank_row.unwrap_or(row_range.start), 0).to_ts_point() });
..Point::new(row_range.end, 0).to_ts_point(), let indent_configs = matches
); .grammars()
.iter()
.map(|grammar| grammar.indents_config.as_ref().unwrap())
.collect::<Vec<_>>();
let mut indent_ranges = Vec::<Range<Point>>::new(); let mut indent_ranges = Vec::<Range<Point>>::new();
for mat in query_cursor.matches( while let Some(mat) = matches.peek() {
indents_query,
self.tree.as_ref()?.root_node(),
TextProvider(self.as_rope()),
) {
let mut start: Option<Point> = None; let mut start: Option<Point> = None;
let mut end: Option<Point> = None; let mut end: Option<Point> = None;
let config = &indent_configs[mat.grammar_index];
for capture in mat.captures { for capture in mat.captures {
if Some(capture.index) == indent_capture_ix { if capture.index == config.indent_capture_ix {
start.get_or_insert(Point::from_ts_point(capture.node.start_position())); start.get_or_insert(Point::from_ts_point(capture.node.start_position()));
end.get_or_insert(Point::from_ts_point(capture.node.end_position())); end.get_or_insert(Point::from_ts_point(capture.node.end_position()));
} else if Some(capture.index) == end_capture_ix { } else if Some(capture.index) == config.end_capture_ix {
end = Some(Point::from_ts_point(capture.node.start_position())); end = Some(Point::from_ts_point(capture.node.start_position()));
} }
} }
matches.advance();
if let Some((start, end)) = start.zip(end) { if let Some((start, end)) = start.zip(end) {
if start.row == end.row { if start.row == end.row {
continue; continue;
@ -1789,10 +1768,18 @@ impl BufferSnapshot {
pub fn chunks<T: ToOffset>(&self, range: Range<T>, language_aware: bool) -> BufferChunks { pub fn chunks<T: ToOffset>(&self, range: Range<T>, language_aware: bool) -> BufferChunks {
let range = range.start.to_offset(self)..range.end.to_offset(self); let range = range.start.to_offset(self)..range.end.to_offset(self);
let mut tree = None; let mut syntax = None;
let mut diagnostic_endpoints = Vec::new(); let mut diagnostic_endpoints = Vec::new();
if language_aware { if language_aware {
tree = self.tree.as_ref(); let captures = self.syntax.captures(range.clone(), &self.text, |grammar| {
grammar.highlights_query.as_ref()
});
let highlight_maps = captures
.grammars()
.into_iter()
.map(|grammar| grammar.highlight_map())
.collect();
syntax = Some((captures, highlight_maps));
for entry in self.diagnostics_in_range::<_, usize>(range.clone(), false) { for entry in self.diagnostics_in_range::<_, usize>(range.clone(), false) {
diagnostic_endpoints.push(DiagnosticEndpoint { diagnostic_endpoints.push(DiagnosticEndpoint {
offset: entry.range.start, offset: entry.range.start,
@ -1811,13 +1798,7 @@ impl BufferSnapshot {
.sort_unstable_by_key(|endpoint| (endpoint.offset, !endpoint.is_start)); .sort_unstable_by_key(|endpoint| (endpoint.offset, !endpoint.is_start));
} }
BufferChunks::new( BufferChunks::new(self.text.as_rope(), range, syntax, diagnostic_endpoints)
self.text.as_rope(),
range,
tree,
self.grammar(),
diagnostic_endpoints,
)
} }
pub fn for_each_line(&self, range: Range<Point>, mut callback: impl FnMut(u32, &str)) { pub fn for_each_line(&self, range: Range<Point>, mut callback: impl FnMut(u32, &str)) {
@ -1843,12 +1824,6 @@ impl BufferSnapshot {
self.language.as_ref() self.language.as_ref()
} }
fn grammar(&self) -> Option<&Arc<Grammar>> {
self.language
.as_ref()
.and_then(|language| language.grammar.as_ref())
}
pub fn surrounding_word<T: ToOffset>(&self, start: T) -> (Range<usize>, Option<CharKind>) { pub fn surrounding_word<T: ToOffset>(&self, start: T) -> (Range<usize>, Option<CharKind>) {
let mut start = start.to_offset(self); let mut start = start.to_offset(self);
let mut end = start; let mut end = start;
@ -1879,61 +1854,71 @@ impl BufferSnapshot {
} }
pub fn range_for_syntax_ancestor<T: ToOffset>(&self, range: Range<T>) -> Option<Range<usize>> { pub fn range_for_syntax_ancestor<T: ToOffset>(&self, range: Range<T>) -> Option<Range<usize>> {
let tree = self.tree.as_ref()?;
let range = range.start.to_offset(self)..range.end.to_offset(self); let range = range.start.to_offset(self)..range.end.to_offset(self);
let mut cursor = tree.root_node().walk(); let mut result: Option<Range<usize>> = None;
'outer: for (_, _, node) in self.syntax.layers_for_range(range.clone(), &self.text) {
let mut cursor = node.walk();
// Descend to the first leaf that touches the start of the range, // Descend to the first leaf that touches the start of the range,
// and if the range is non-empty, extends beyond the start. // and if the range is non-empty, extends beyond the start.
while cursor.goto_first_child_for_byte(range.start).is_some() { while cursor.goto_first_child_for_byte(range.start).is_some() {
if !range.is_empty() && cursor.node().end_byte() == range.start { if !range.is_empty() && cursor.node().end_byte() == range.start {
cursor.goto_next_sibling(); cursor.goto_next_sibling();
}
} }
}
// Ascend to the smallest ancestor that strictly contains the range. // Ascend to the smallest ancestor that strictly contains the range.
loop { loop {
let node_range = cursor.node().byte_range(); let node_range = cursor.node().byte_range();
if node_range.start <= range.start if node_range.start <= range.start
&& node_range.end >= range.end && node_range.end >= range.end
&& node_range.len() > range.len() && node_range.len() > range.len()
{ {
break; break;
} }
if !cursor.goto_parent() {
break;
}
}
let left_node = cursor.node();
// For an empty range, try to find another node immediately to the right of the range.
if left_node.end_byte() == range.start {
let mut right_node = None;
while !cursor.goto_next_sibling() {
if !cursor.goto_parent() { if !cursor.goto_parent() {
break; continue 'outer;
} }
} }
while cursor.node().start_byte() == range.start { let left_node = cursor.node();
right_node = Some(cursor.node()); let mut layer_result = left_node.byte_range();
if !cursor.goto_first_child() {
break; // For an empty range, try to find another node immediately to the right of the range.
if left_node.end_byte() == range.start {
let mut right_node = None;
while !cursor.goto_next_sibling() {
if !cursor.goto_parent() {
break;
}
}
while cursor.node().start_byte() == range.start {
right_node = Some(cursor.node());
if !cursor.goto_first_child() {
break;
}
}
// If there is a candidate node on both sides of the (empty) range, then
// decide between the two by favoring a named node over an anonymous token.
// If both nodes are the same in that regard, favor the right one.
if let Some(right_node) = right_node {
if right_node.is_named() || !left_node.is_named() {
layer_result = right_node.byte_range();
}
} }
} }
// If there is a candidate node on both sides of the (empty) range, then if let Some(previous_result) = &result {
// decide between the two by favoring a named node over an anonymous token. if previous_result.len() < layer_result.len() {
// If both nodes are the same in that regard, favor the right one. continue;
if let Some(right_node) = right_node {
if right_node.is_named() || !left_node.is_named() {
return Some(right_node.byte_range());
} }
} }
result = Some(layer_result);
} }
Some(left_node.byte_range()) result
} }
pub fn outline(&self, theme: Option<&SyntaxTheme>) -> Option<Outline<Anchor>> { pub fn outline(&self, theme: Option<&SyntaxTheme>) -> Option<Outline<Anchor>> {
@ -1947,8 +1932,10 @@ impl BufferSnapshot {
theme: Option<&SyntaxTheme>, theme: Option<&SyntaxTheme>,
) -> Option<Vec<OutlineItem<Anchor>>> { ) -> Option<Vec<OutlineItem<Anchor>>> {
let position = position.to_offset(self); let position = position.to_offset(self);
let mut items = let mut items = self.outline_items_containing(
self.outline_items_containing(position.saturating_sub(1)..position + 1, theme)?; position.saturating_sub(1)..self.len().min(position + 1),
theme,
)?;
let mut prev_depth = None; let mut prev_depth = None;
items.retain(|item| { items.retain(|item| {
let result = prev_depth.map_or(true, |prev_depth| item.depth > prev_depth); let result = prev_depth.map_or(true, |prev_depth| item.depth > prev_depth);
@ -1963,109 +1950,107 @@ impl BufferSnapshot {
range: Range<usize>, range: Range<usize>,
theme: Option<&SyntaxTheme>, theme: Option<&SyntaxTheme>,
) -> Option<Vec<OutlineItem<Anchor>>> { ) -> Option<Vec<OutlineItem<Anchor>>> {
let tree = self.tree.as_ref()?; let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
let grammar = self grammar.outline_config.as_ref().map(|c| &c.query)
.language });
.as_ref() let configs = matches
.and_then(|language| language.grammar.as_ref())?; .grammars()
.iter()
let outline_query = grammar.outline_query.as_ref()?; .map(|g| g.outline_config.as_ref().unwrap())
let mut cursor = QueryCursorHandle::new(); .collect::<Vec<_>>();
cursor.set_byte_range(range.clone());
let matches = cursor.matches(
outline_query,
tree.root_node(),
TextProvider(self.as_rope()),
);
let mut chunks = self.chunks(0..self.len(), true); let mut chunks = self.chunks(0..self.len(), true);
let item_capture_ix = outline_query.capture_index_for_name("item")?;
let name_capture_ix = outline_query.capture_index_for_name("name")?;
let context_capture_ix = outline_query
.capture_index_for_name("context")
.unwrap_or(u32::MAX);
let mut stack = Vec::<Range<usize>>::new(); let mut stack = Vec::<Range<usize>>::new();
let items = matches let mut items = Vec::new();
.filter_map(|mat| { while let Some(mat) = matches.peek() {
let item_node = mat.nodes_for_capture_index(item_capture_ix).next()?; let config = &configs[mat.grammar_index];
let item_range = item_node.start_byte()..item_node.end_byte(); let item_node = mat.captures.iter().find_map(|cap| {
if item_range.end < range.start || item_range.start > range.end { if cap.index == config.item_capture_ix {
return None; Some(cap.node)
} else {
None
} }
let mut text = String::new(); })?;
let mut name_ranges = Vec::new();
let mut highlight_ranges = Vec::new();
for capture in mat.captures { let item_range = item_node.byte_range();
let node_is_name; if item_range.end < range.start || item_range.start > range.end {
if capture.index == name_capture_ix { matches.advance();
node_is_name = true; continue;
} else if capture.index == context_capture_ix { }
node_is_name = false;
// TODO - move later, after processing captures
let mut text = String::new();
let mut name_ranges = Vec::new();
let mut highlight_ranges = Vec::new();
for capture in mat.captures {
let node_is_name;
if capture.index == config.name_capture_ix {
node_is_name = true;
} else if Some(capture.index) == config.context_capture_ix {
node_is_name = false;
} else {
continue;
}
let range = capture.node.start_byte()..capture.node.end_byte();
if !text.is_empty() {
text.push(' ');
}
if node_is_name {
let mut start = text.len();
let end = start + range.len();
// When multiple names are captured, then the matcheable text
// includes the whitespace in between the names.
if !name_ranges.is_empty() {
start -= 1;
}
name_ranges.push(start..end);
}
let mut offset = range.start;
chunks.seek(offset);
for mut chunk in chunks.by_ref() {
if chunk.text.len() > range.end - offset {
chunk.text = &chunk.text[0..(range.end - offset)];
offset = range.end;
} else { } else {
continue; offset += chunk.text.len();
} }
let style = chunk
let range = capture.node.start_byte()..capture.node.end_byte(); .syntax_highlight_id
if !text.is_empty() { .zip(theme)
text.push(' '); .and_then(|(highlight, theme)| highlight.style(theme));
if let Some(style) = style {
let start = text.len();
let end = start + chunk.text.len();
highlight_ranges.push((start..end, style));
} }
if node_is_name { text.push_str(chunk.text);
let mut start = text.len(); if offset >= range.end {
let end = start + range.len(); break;
// When multiple names are captured, then the matcheable text
// includes the whitespace in between the names.
if !name_ranges.is_empty() {
start -= 1;
}
name_ranges.push(start..end);
}
let mut offset = range.start;
chunks.seek(offset);
for mut chunk in chunks.by_ref() {
if chunk.text.len() > range.end - offset {
chunk.text = &chunk.text[0..(range.end - offset)];
offset = range.end;
} else {
offset += chunk.text.len();
}
let style = chunk
.syntax_highlight_id
.zip(theme)
.and_then(|(highlight, theme)| highlight.style(theme));
if let Some(style) = style {
let start = text.len();
let end = start + chunk.text.len();
highlight_ranges.push((start..end, style));
}
text.push_str(chunk.text);
if offset >= range.end {
break;
}
} }
} }
}
while stack.last().map_or(false, |prev_range| { matches.advance();
prev_range.start > item_range.start || prev_range.end < item_range.end while stack.last().map_or(false, |prev_range| {
}) { prev_range.start > item_range.start || prev_range.end < item_range.end
stack.pop(); }) {
} stack.pop();
stack.push(item_range.clone()); }
stack.push(item_range.clone());
Some(OutlineItem { items.push(OutlineItem {
depth: stack.len() - 1, depth: stack.len() - 1,
range: self.anchor_after(item_range.start)..self.anchor_before(item_range.end), range: self.anchor_after(item_range.start)..self.anchor_before(item_range.end),
text, text,
highlight_ranges, highlight_ranges,
name_ranges, name_ranges,
})
}) })
.collect::<Vec<_>>(); }
Some(items) Some(items)
} }
@ -2073,28 +2058,48 @@ impl BufferSnapshot {
&self, &self,
range: Range<T>, range: Range<T>,
) -> Option<(Range<usize>, Range<usize>)> { ) -> Option<(Range<usize>, Range<usize>)> {
let (grammar, tree) = self.grammar().zip(self.tree.as_ref())?;
let brackets_query = grammar.brackets_query.as_ref()?;
let open_capture_ix = brackets_query.capture_index_for_name("open")?;
let close_capture_ix = brackets_query.capture_index_for_name("close")?;
// Find bracket pairs that *inclusively* contain the given range. // Find bracket pairs that *inclusively* contain the given range.
let range = range.start.to_offset(self).saturating_sub(1)..range.end.to_offset(self) + 1; let range = range.start.to_offset(self).saturating_sub(1)..range.end.to_offset(self) + 1;
let mut cursor = QueryCursorHandle::new(); let mut matches = self.syntax.matches(range, &self.text, |grammar| {
let matches = cursor.set_byte_range(range).matches( grammar.brackets_config.as_ref().map(|c| &c.query)
brackets_query, });
tree.root_node(), let configs = matches
TextProvider(self.as_rope()), .grammars()
); .iter()
.map(|grammar| grammar.brackets_config.as_ref().unwrap())
.collect::<Vec<_>>();
// Get the ranges of the innermost pair of brackets. // Get the ranges of the innermost pair of brackets.
matches let mut result: Option<(Range<usize>, Range<usize>)> = None;
.filter_map(|mat| { while let Some(mat) = matches.peek() {
let open = mat.nodes_for_capture_index(open_capture_ix).next()?; let mut open = None;
let close = mat.nodes_for_capture_index(close_capture_ix).next()?; let mut close = None;
Some((open.byte_range(), close.byte_range())) let config = &configs[mat.grammar_index];
}) for capture in mat.captures {
.min_by_key(|(open_range, close_range)| close_range.end - open_range.start) if capture.index == config.open_capture_ix {
open = Some(capture.node.byte_range());
} else if capture.index == config.close_capture_ix {
close = Some(capture.node.byte_range());
}
}
matches.advance();
if let Some((open, close)) = open.zip(close) {
let len = close.end - open.start;
if let Some((existing_open, existing_close)) = &result {
let existing_len = existing_close.end - existing_open.start;
if len > existing_len {
continue;
}
}
result = Some((open, close));
}
}
result
} }
#[allow(clippy::type_complexity)] #[allow(clippy::type_complexity)]
@ -2206,7 +2211,7 @@ impl Clone for BufferSnapshot {
fn clone(&self) -> Self { fn clone(&self) -> Self {
Self { Self {
text: self.text.clone(), text: self.text.clone(),
tree: self.tree.clone(), syntax: self.syntax.clone(),
file: self.file.clone(), file: self.file.clone(),
remote_selections: self.remote_selections.clone(), remote_selections: self.remote_selections.clone(),
diagnostics: self.diagnostics.clone(), diagnostics: self.diagnostics.clone(),
@ -2227,56 +2232,23 @@ impl Deref for BufferSnapshot {
} }
} }
impl<'a> tree_sitter::TextProvider<'a> for TextProvider<'a> {
type I = ByteChunks<'a>;
fn text(&mut self, node: tree_sitter::Node) -> Self::I {
ByteChunks(self.0.chunks_in_range(node.byte_range()))
}
}
pub(crate) struct ByteChunks<'a>(rope::Chunks<'a>);
impl<'a> Iterator for ByteChunks<'a> {
type Item = &'a [u8];
fn next(&mut self) -> Option<Self::Item> {
self.0.next().map(str::as_bytes)
}
}
unsafe impl<'a> Send for BufferChunks<'a> {} unsafe impl<'a> Send for BufferChunks<'a> {}
impl<'a> BufferChunks<'a> { impl<'a> BufferChunks<'a> {
pub(crate) fn new( pub(crate) fn new(
text: &'a Rope, text: &'a Rope,
range: Range<usize>, range: Range<usize>,
tree: Option<&'a Tree>, syntax: Option<(SyntaxMapCaptures<'a>, Vec<HighlightMap>)>,
grammar: Option<&'a Arc<Grammar>>,
diagnostic_endpoints: Vec<DiagnosticEndpoint>, diagnostic_endpoints: Vec<DiagnosticEndpoint>,
) -> Self { ) -> Self {
let mut highlights = None; let mut highlights = None;
if let Some((grammar, tree)) = grammar.zip(tree) { if let Some((captures, highlight_maps)) = syntax {
if let Some(highlights_query) = grammar.highlights_query.as_ref() { highlights = Some(BufferChunkHighlights {
let mut query_cursor = QueryCursorHandle::new(); captures,
next_capture: None,
// TODO - add a Tree-sitter API to remove the need for this. stack: Default::default(),
let cursor = unsafe { highlight_maps,
std::mem::transmute::<_, &'static mut QueryCursor>(query_cursor.deref_mut()) })
};
let captures = cursor.set_byte_range(range.clone()).captures(
highlights_query,
tree.root_node(),
TextProvider(text),
);
highlights = Some(BufferChunkHighlights {
captures,
next_capture: None,
stack: Default::default(),
highlight_map: grammar.highlight_map(),
_query_cursor: query_cursor,
})
}
} }
let diagnostic_endpoints = diagnostic_endpoints.into_iter().peekable(); let diagnostic_endpoints = diagnostic_endpoints.into_iter().peekable();
@ -2302,14 +2274,13 @@ impl<'a> BufferChunks<'a> {
highlights highlights
.stack .stack
.retain(|(end_offset, _)| *end_offset > offset); .retain(|(end_offset, _)| *end_offset > offset);
if let Some((mat, capture_ix)) = &highlights.next_capture { if let Some(capture) = &highlights.next_capture {
let capture = mat.captures[*capture_ix as usize];
if offset >= capture.node.start_byte() { if offset >= capture.node.start_byte() {
let next_capture_end = capture.node.end_byte(); let next_capture_end = capture.node.end_byte();
if offset < next_capture_end { if offset < next_capture_end {
highlights.stack.push(( highlights.stack.push((
next_capture_end, next_capture_end,
highlights.highlight_map.get(capture.index), highlights.highlight_maps[capture.grammar_index].get(capture.index),
)); ));
} }
highlights.next_capture.take(); highlights.next_capture.take();
@ -2385,13 +2356,13 @@ impl<'a> Iterator for BufferChunks<'a> {
highlights.next_capture = highlights.captures.next(); highlights.next_capture = highlights.captures.next();
} }
while let Some((mat, capture_ix)) = highlights.next_capture.as_ref() { while let Some(capture) = highlights.next_capture.as_ref() {
let capture = mat.captures[*capture_ix as usize];
if self.range.start < capture.node.start_byte() { if self.range.start < capture.node.start_byte() {
next_capture_start = capture.node.start_byte(); next_capture_start = capture.node.start_byte();
break; break;
} else { } else {
let highlight_id = highlights.highlight_map.get(capture.index); let highlight_id =
highlights.highlight_maps[capture.grammar_index].get(capture.index);
highlights highlights
.stack .stack
.push((capture.node.end_byte(), highlight_id)); .push((capture.node.end_byte(), highlight_id));
@ -2443,52 +2414,6 @@ impl<'a> Iterator for BufferChunks<'a> {
} }
} }
impl QueryCursorHandle {
pub(crate) fn new() -> Self {
let mut cursor = QUERY_CURSORS.lock().pop().unwrap_or_else(QueryCursor::new);
cursor.set_match_limit(64);
QueryCursorHandle(Some(cursor))
}
}
impl Deref for QueryCursorHandle {
type Target = QueryCursor;
fn deref(&self) -> &Self::Target {
self.0.as_ref().unwrap()
}
}
impl DerefMut for QueryCursorHandle {
fn deref_mut(&mut self) -> &mut Self::Target {
self.0.as_mut().unwrap()
}
}
impl Drop for QueryCursorHandle {
fn drop(&mut self) {
let mut cursor = self.0.take().unwrap();
cursor.set_byte_range(0..usize::MAX);
cursor.set_point_range(Point::zero().to_ts_point()..Point::MAX.to_ts_point());
QUERY_CURSORS.lock().push(cursor)
}
}
trait ToTreeSitterPoint {
fn to_ts_point(self) -> tree_sitter::Point;
fn from_ts_point(point: tree_sitter::Point) -> Self;
}
impl ToTreeSitterPoint for Point {
fn to_ts_point(self) -> tree_sitter::Point {
tree_sitter::Point::new(self.row as usize, self.column as usize)
}
fn from_ts_point(point: tree_sitter::Point) -> Self {
Point::new(point.row as u32, point.column as u32)
}
}
impl operation_queue::Operation for Operation { impl operation_queue::Operation for Operation {
fn lamport_timestamp(&self) -> clock::Lamport { fn lamport_timestamp(&self) -> clock::Lamport {
match self { match self {

View File

@ -3,6 +3,7 @@ mod diagnostic_set;
mod highlight_map; mod highlight_map;
mod outline; mod outline;
pub mod proto; pub mod proto;
mod syntax_map;
#[cfg(test)] #[cfg(test)]
mod tests; mod tests;
@ -29,8 +30,12 @@ use std::{
ops::Range, ops::Range,
path::{Path, PathBuf}, path::{Path, PathBuf},
str, str,
sync::Arc, sync::{
atomic::{AtomicUsize, Ordering::SeqCst},
Arc,
},
}; };
use syntax_map::SyntaxSnapshot;
use theme::{SyntaxTheme, Theme}; use theme::{SyntaxTheme, Theme};
use tree_sitter::{self, Query}; use tree_sitter::{self, Query};
use util::ResultExt; use util::ResultExt;
@ -49,6 +54,7 @@ thread_local! {
} }
lazy_static! { lazy_static! {
pub static ref NEXT_GRAMMAR_ID: AtomicUsize = Default::default();
pub static ref PLAIN_TEXT: Arc<Language> = Arc::new(Language::new( pub static ref PLAIN_TEXT: Arc<Language> = Arc::new(Language::new(
LanguageConfig { LanguageConfig {
name: "Plain Text".into(), name: "Plain Text".into(),
@ -285,14 +291,42 @@ pub struct Language {
} }
pub struct Grammar { pub struct Grammar {
id: usize,
pub(crate) ts_language: tree_sitter::Language, pub(crate) ts_language: tree_sitter::Language,
pub(crate) highlights_query: Option<Query>, pub(crate) highlights_query: Option<Query>,
pub(crate) brackets_query: Option<Query>, pub(crate) brackets_config: Option<BracketConfig>,
pub(crate) indents_query: Option<Query>, pub(crate) indents_config: Option<IndentConfig>,
pub(crate) outline_query: Option<Query>, pub(crate) outline_config: Option<OutlineConfig>,
pub(crate) injection_config: Option<InjectionConfig>,
pub(crate) highlight_map: Mutex<HighlightMap>, pub(crate) highlight_map: Mutex<HighlightMap>,
} }
struct IndentConfig {
query: Query,
indent_capture_ix: u32,
end_capture_ix: Option<u32>,
}
struct OutlineConfig {
query: Query,
item_capture_ix: u32,
name_capture_ix: u32,
context_capture_ix: Option<u32>,
}
struct InjectionConfig {
query: Query,
content_capture_ix: u32,
language_capture_ix: Option<u32>,
languages_by_pattern_ix: Vec<Option<Box<str>>>,
}
struct BracketConfig {
query: Query,
open_capture_ix: u32,
close_capture_ix: u32,
}
#[derive(Clone)] #[derive(Clone)]
pub enum LanguageServerBinaryStatus { pub enum LanguageServerBinaryStatus {
CheckingForUpdate, CheckingForUpdate,
@ -490,6 +524,13 @@ impl LanguageRegistry {
} }
} }
#[cfg(any(test, feature = "test-support"))]
impl Default for LanguageRegistry {
fn default() -> Self {
Self::test()
}
}
async fn get_server_binary_path( async fn get_server_binary_path(
adapter: Arc<CachedLspAdapter>, adapter: Arc<CachedLspAdapter>,
language: Arc<Language>, language: Arc<Language>,
@ -567,10 +608,12 @@ impl Language {
config, config,
grammar: ts_language.map(|ts_language| { grammar: ts_language.map(|ts_language| {
Arc::new(Grammar { Arc::new(Grammar {
id: NEXT_GRAMMAR_ID.fetch_add(1, SeqCst),
highlights_query: None, highlights_query: None,
brackets_query: None, brackets_config: None,
indents_query: None, outline_config: None,
outline_query: None, indents_config: None,
injection_config: None,
ts_language, ts_language,
highlight_map: Default::default(), highlight_map: Default::default(),
}) })
@ -594,19 +637,104 @@ impl Language {
pub fn with_brackets_query(mut self, source: &str) -> Result<Self> { pub fn with_brackets_query(mut self, source: &str) -> Result<Self> {
let grammar = self.grammar_mut(); let grammar = self.grammar_mut();
grammar.brackets_query = Some(Query::new(grammar.ts_language, source)?); let query = Query::new(grammar.ts_language, source)?;
let mut open_capture_ix = None;
let mut close_capture_ix = None;
get_capture_indices(
&query,
&mut [
("open", &mut open_capture_ix),
("close", &mut close_capture_ix),
],
);
if let Some((open_capture_ix, close_capture_ix)) = open_capture_ix.zip(close_capture_ix) {
grammar.brackets_config = Some(BracketConfig {
query,
open_capture_ix,
close_capture_ix,
});
}
Ok(self) Ok(self)
} }
pub fn with_indents_query(mut self, source: &str) -> Result<Self> { pub fn with_indents_query(mut self, source: &str) -> Result<Self> {
let grammar = self.grammar_mut(); let grammar = self.grammar_mut();
grammar.indents_query = Some(Query::new(grammar.ts_language, source)?); let query = Query::new(grammar.ts_language, source)?;
let mut indent_capture_ix = None;
let mut end_capture_ix = None;
get_capture_indices(
&query,
&mut [
("indent", &mut indent_capture_ix),
("end", &mut end_capture_ix),
],
);
if let Some(indent_capture_ix) = indent_capture_ix {
grammar.indents_config = Some(IndentConfig {
query,
indent_capture_ix,
end_capture_ix,
});
}
Ok(self) Ok(self)
} }
pub fn with_outline_query(mut self, source: &str) -> Result<Self> { pub fn with_outline_query(mut self, source: &str) -> Result<Self> {
let grammar = self.grammar_mut(); let grammar = self.grammar_mut();
grammar.outline_query = Some(Query::new(grammar.ts_language, source)?); let query = Query::new(grammar.ts_language, source)?;
let mut item_capture_ix = None;
let mut name_capture_ix = None;
let mut context_capture_ix = None;
get_capture_indices(
&query,
&mut [
("item", &mut item_capture_ix),
("name", &mut name_capture_ix),
("context", &mut context_capture_ix),
],
);
if let Some((item_capture_ix, name_capture_ix)) = item_capture_ix.zip(name_capture_ix) {
grammar.outline_config = Some(OutlineConfig {
query,
item_capture_ix,
name_capture_ix,
context_capture_ix,
});
}
Ok(self)
}
pub fn with_injection_query(mut self, source: &str) -> Result<Self> {
let grammar = self.grammar_mut();
let query = Query::new(grammar.ts_language, source)?;
let mut language_capture_ix = None;
let mut content_capture_ix = None;
get_capture_indices(
&query,
&mut [
("language", &mut language_capture_ix),
("content", &mut content_capture_ix),
],
);
let languages_by_pattern_ix = (0..query.pattern_count())
.map(|ix| {
query.property_settings(ix).iter().find_map(|setting| {
if setting.key.as_ref() == "language" {
return setting.value.clone();
} else {
None
}
})
})
.collect();
if let Some(content_capture_ix) = content_capture_ix {
grammar.injection_config = Some(InjectionConfig {
query,
language_capture_ix,
content_capture_ix,
languages_by_pattern_ix,
});
}
Ok(self) Ok(self)
} }
@ -685,9 +813,16 @@ impl Language {
let mut result = Vec::new(); let mut result = Vec::new();
if let Some(grammar) = &self.grammar { if let Some(grammar) = &self.grammar {
let tree = grammar.parse_text(text, None); let tree = grammar.parse_text(text, None);
let captures = SyntaxSnapshot::single_tree_captures(
range.clone(),
text,
&tree,
grammar,
|grammar| grammar.highlights_query.as_ref(),
);
let highlight_maps = vec![grammar.highlight_map()];
let mut offset = 0; let mut offset = 0;
for chunk in BufferChunks::new(text, range, Some(&tree), self.grammar.as_ref(), vec![]) for chunk in BufferChunks::new(text, range, Some((captures, highlight_maps)), vec![]) {
{
let end_offset = offset + chunk.text.len(); let end_offset = offset + chunk.text.len();
if let Some(highlight_id) = chunk.syntax_highlight_id { if let Some(highlight_id) = chunk.syntax_highlight_id {
if !highlight_id.is_default() { if !highlight_id.is_default() {
@ -727,6 +862,10 @@ impl Language {
} }
impl Grammar { impl Grammar {
pub fn id(&self) -> usize {
self.id
}
fn parse_text(&self, text: &Rope, old_tree: Option<Tree>) -> Tree { fn parse_text(&self, text: &Rope, old_tree: Option<Tree>) -> Tree {
PARSER.with(|parser| { PARSER.with(|parser| {
let mut parser = parser.borrow_mut(); let mut parser = parser.borrow_mut();
@ -826,6 +965,17 @@ impl LspAdapter for Arc<FakeLspAdapter> {
} }
} }
fn get_capture_indices(query: &Query, captures: &mut [(&str, &mut Option<u32>)]) {
for (ix, name) in query.capture_names().iter().enumerate() {
for (capture_name, index) in captures.iter_mut() {
if capture_name == name {
**index = Some(ix as u32);
break;
}
}
}
}
pub fn point_to_lsp(point: PointUtf16) -> lsp::Position { pub fn point_to_lsp(point: PointUtf16) -> lsp::Position {
lsp::Position::new(point.row, point.column) lsp::Position::new(point.row, point.column)
} }

File diff suppressed because it is too large Load Diff

View File

@ -999,6 +999,7 @@ fn test_autoindent_language_without_indents_query(cx: &mut MutableAppContext) {
Arc::new(Language::new( Arc::new(Language::new(
LanguageConfig { LanguageConfig {
name: "Markdown".into(), name: "Markdown".into(),
auto_indent_using_last_non_empty_line: false,
..Default::default() ..Default::default()
}, },
Some(tree_sitter_json::language()), Some(tree_sitter_json::language()),
@ -1437,7 +1438,9 @@ fn json_lang() -> Language {
fn get_tree_sexp(buffer: &ModelHandle<Buffer>, cx: &gpui::TestAppContext) -> String { fn get_tree_sexp(buffer: &ModelHandle<Buffer>, cx: &gpui::TestAppContext) -> String {
buffer.read_with(cx, |buffer, _| { buffer.read_with(cx, |buffer, _| {
buffer.syntax_tree().unwrap().root_node().to_sexp() let snapshot = buffer.snapshot();
let layers = snapshot.syntax.layers(buffer.as_text_snapshot());
layers[0].2.to_sexp()
}) })
} }

View File

@ -2067,6 +2067,7 @@ impl Project {
let full_path = buffer.read(cx).file()?.full_path(cx); let full_path = buffer.read(cx).file()?.full_path(cx);
let language = self.languages.select_language(&full_path)?; let language = self.languages.select_language(&full_path)?;
buffer.update(cx, |buffer, cx| { buffer.update(cx, |buffer, cx| {
buffer.set_language_registry(self.languages.clone());
buffer.set_language(Some(language.clone()), cx); buffer.set_language(Some(language.clone()), cx);
}); });

View File

@ -608,9 +608,9 @@ where
impl<'a, F, T, S, U> Iterator for FilterCursor<'a, F, T, U> impl<'a, F, T, S, U> Iterator for FilterCursor<'a, F, T, U>
where where
F: Fn(&T::Summary) -> bool, F: FnMut(&T::Summary) -> bool,
T: Item<Summary = S>, T: Item<Summary = S>,
S: Summary<Context = ()>, S: Summary<Context = ()>, //Context for the summary must be unit type, as .next() doesn't take arguments
U: Dimension<'a, T::Summary>, U: Dimension<'a, T::Summary>,
{ {
type Item = &'a T; type Item = &'a T;
@ -621,7 +621,7 @@ where
} }
if let Some(item) = self.item() { if let Some(item) = self.item() {
self.cursor.next_internal(&self.filter_node, &()); self.cursor.next_internal(&mut self.filter_node, &());
Some(item) Some(item)
} else { } else {
None None

View File

@ -168,6 +168,8 @@ impl<T: Item> SumTree<T> {
Cursor::new(self) Cursor::new(self)
} }
/// Note: If the summary type requires a non `()` context, then the filter cursor
/// that is returned cannot be used with Rust's iterators.
pub fn filter<'a, F, U>(&'a self, filter_node: F) -> FilterCursor<F, T, U> pub fn filter<'a, F, U>(&'a self, filter_node: F) -> FilterCursor<F, T, U>
where where
F: FnMut(&T::Summary) -> bool, F: FnMut(&T::Summary) -> bool,

View File

@ -382,6 +382,7 @@ struct Edits<'a, D: TextDimension, F: FnMut(&FragmentSummary) -> bool> {
old_end: D, old_end: D,
new_end: D, new_end: D,
range: Range<(&'a Locator, usize)>, range: Range<(&'a Locator, usize)>,
buffer_id: u64,
} }
#[derive(Clone, Debug, Default, Eq, PartialEq)] #[derive(Clone, Debug, Default, Eq, PartialEq)]
@ -1917,11 +1918,33 @@ impl BufferSnapshot {
self.edits_since_in_range(since, Anchor::MIN..Anchor::MAX) self.edits_since_in_range(since, Anchor::MIN..Anchor::MAX)
} }
pub fn anchored_edits_since<'a, D>(
&'a self,
since: &'a clock::Global,
) -> impl 'a + Iterator<Item = (Edit<D>, Range<Anchor>)>
where
D: TextDimension + Ord,
{
self.anchored_edits_since_in_range(since, Anchor::MIN..Anchor::MAX)
}
pub fn edits_since_in_range<'a, D>( pub fn edits_since_in_range<'a, D>(
&'a self, &'a self,
since: &'a clock::Global, since: &'a clock::Global,
range: Range<Anchor>, range: Range<Anchor>,
) -> impl 'a + Iterator<Item = Edit<D>> ) -> impl 'a + Iterator<Item = Edit<D>>
where
D: TextDimension + Ord,
{
self.anchored_edits_since_in_range(since, range)
.map(|item| item.0)
}
pub fn anchored_edits_since_in_range<'a, D>(
&'a self,
since: &'a clock::Global,
range: Range<Anchor>,
) -> impl 'a + Iterator<Item = (Edit<D>, Range<Anchor>)>
where where
D: TextDimension + Ord, D: TextDimension + Ord,
{ {
@ -1961,6 +1984,7 @@ impl BufferSnapshot {
old_end: Default::default(), old_end: Default::default(),
new_end: Default::default(), new_end: Default::default(),
range: (start_fragment_id, range.start.offset)..(end_fragment_id, range.end.offset), range: (start_fragment_id, range.start.offset)..(end_fragment_id, range.end.offset),
buffer_id: self.remote_id,
} }
} }
} }
@ -2019,10 +2043,10 @@ impl<'a> RopeBuilder<'a> {
} }
impl<'a, D: TextDimension + Ord, F: FnMut(&FragmentSummary) -> bool> Iterator for Edits<'a, D, F> { impl<'a, D: TextDimension + Ord, F: FnMut(&FragmentSummary) -> bool> Iterator for Edits<'a, D, F> {
type Item = Edit<D>; type Item = (Edit<D>, Range<Anchor>);
fn next(&mut self) -> Option<Self::Item> { fn next(&mut self) -> Option<Self::Item> {
let mut pending_edit: Option<Edit<D>> = None; let mut pending_edit: Option<Self::Item> = None;
let cursor = self.fragments_cursor.as_mut()?; let cursor = self.fragments_cursor.as_mut()?;
while let Some(fragment) = cursor.item() { while let Some(fragment) = cursor.item() {
@ -2041,11 +2065,25 @@ impl<'a, D: TextDimension + Ord, F: FnMut(&FragmentSummary) -> bool> Iterator fo
if pending_edit if pending_edit
.as_ref() .as_ref()
.map_or(false, |change| change.new.end < self.new_end) .map_or(false, |(change, _)| change.new.end < self.new_end)
{ {
break; break;
} }
let timestamp = fragment.insertion_timestamp.local();
let start_anchor = Anchor {
timestamp,
offset: fragment.insertion_offset,
bias: Bias::Right,
buffer_id: Some(self.buffer_id),
};
let end_anchor = Anchor {
timestamp,
offset: fragment.insertion_offset + fragment.len,
bias: Bias::Left,
buffer_id: Some(self.buffer_id),
};
if !fragment.was_visible(self.since, self.undos) && fragment.visible { if !fragment.was_visible(self.since, self.undos) && fragment.visible {
let mut visible_end = cursor.end(&None).visible; let mut visible_end = cursor.end(&None).visible;
if fragment.id == *self.range.end.0 { if fragment.id == *self.range.end.0 {
@ -2058,13 +2096,17 @@ impl<'a, D: TextDimension + Ord, F: FnMut(&FragmentSummary) -> bool> Iterator fo
let fragment_summary = self.visible_cursor.summary(visible_end); let fragment_summary = self.visible_cursor.summary(visible_end);
let mut new_end = self.new_end.clone(); let mut new_end = self.new_end.clone();
new_end.add_assign(&fragment_summary); new_end.add_assign(&fragment_summary);
if let Some(pending_edit) = pending_edit.as_mut() { if let Some((edit, range)) = pending_edit.as_mut() {
pending_edit.new.end = new_end.clone(); edit.new.end = new_end.clone();
range.end = end_anchor;
} else { } else {
pending_edit = Some(Edit { pending_edit = Some((
old: self.old_end.clone()..self.old_end.clone(), Edit {
new: self.new_end.clone()..new_end.clone(), old: self.old_end.clone()..self.old_end.clone(),
}); new: self.new_end.clone()..new_end.clone(),
},
start_anchor..end_anchor,
));
} }
self.new_end = new_end; self.new_end = new_end;
@ -2083,13 +2125,17 @@ impl<'a, D: TextDimension + Ord, F: FnMut(&FragmentSummary) -> bool> Iterator fo
let fragment_summary = self.deleted_cursor.summary(deleted_end); let fragment_summary = self.deleted_cursor.summary(deleted_end);
let mut old_end = self.old_end.clone(); let mut old_end = self.old_end.clone();
old_end.add_assign(&fragment_summary); old_end.add_assign(&fragment_summary);
if let Some(pending_edit) = pending_edit.as_mut() { if let Some((edit, range)) = pending_edit.as_mut() {
pending_edit.old.end = old_end.clone(); edit.old.end = old_end.clone();
range.end = end_anchor;
} else { } else {
pending_edit = Some(Edit { pending_edit = Some((
old: self.old_end.clone()..old_end.clone(), Edit {
new: self.new_end.clone()..self.new_end.clone(), old: self.old_end.clone()..old_end.clone(),
}); new: self.new_end.clone()..self.new_end.clone(),
},
start_anchor..end_anchor,
));
} }
self.old_end = old_end; self.old_end = old_end;
@ -2435,7 +2481,7 @@ impl ToOffset for PointUtf16 {
impl ToOffset for usize { impl ToOffset for usize {
fn to_offset<'a>(&self, snapshot: &BufferSnapshot) -> usize { fn to_offset<'a>(&self, snapshot: &BufferSnapshot) -> usize {
assert!(*self <= snapshot.len(), "offset is out of range"); assert!(*self <= snapshot.len(), "offset {self} is out of range");
*self *self
} }
} }

View File

@ -134,6 +134,11 @@ pub(crate) fn language(
.with_outline_query(query.as_ref()) .with_outline_query(query.as_ref())
.expect("failed to load outline query"); .expect("failed to load outline query");
} }
if let Some(query) = load_query(name, "/injections") {
language = language
.with_injection_query(query.as_ref())
.expect("failed to load injection query");
}
if let Some(lsp_adapter) = lsp_adapter { if let Some(lsp_adapter) = lsp_adapter {
language = language.with_lsp_adapter(lsp_adapter) language = language.with_lsp_adapter(lsp_adapter)
} }

View File

@ -0,0 +1,7 @@
(preproc_def
value: (preproc_arg) @content
(#set! "language" "c"))
(preproc_function_def
value: (preproc_arg) @content
(#set! "language" "c"))

View File

@ -1,3 +1,5 @@
(identifier) @variable
(call_expression (call_expression
function: (qualified_identifier function: (qualified_identifier
name: (identifier) @function)) name: (identifier) @function))
@ -34,8 +36,6 @@
(auto) @type (auto) @type
(type_identifier) @type (type_identifier) @type
(identifier) @variable
((identifier) @constant ((identifier) @constant
(#match? @constant "^[A-Z][A-Z\\d_]*$")) (#match? @constant "^[A-Z][A-Z\\d_]*$"))

View File

@ -0,0 +1,7 @@
(preproc_def
value: (preproc_arg) @content
(#set! "language" "c++"))
(preproc_function_def
value: (preproc_arg) @content
(#set! "language" "c++"))

View File

@ -1,6 +1,6 @@
(type_identifier) @type (type_identifier) @type
(primitive_type) @type.builtin (primitive_type) @type.builtin
(self) @variable.builtin
(field_identifier) @property (field_identifier) @property
(call_expression (call_expression
@ -15,6 +15,16 @@
(function_item name: (identifier) @function.definition) (function_item name: (identifier) @function.definition)
(function_signature_item name: (identifier) @function.definition) (function_signature_item name: (identifier) @function.definition)
(macro_invocation
macro: [
(identifier) @function.special
(scoped_identifier
name: (identifier) @function.special)
])
(macro_definition
name: (identifier) @function.special.definition)
; Identifier conventions ; Identifier conventions
; Assume uppercase names are enum constructors ; Assume uppercase names are enum constructors
@ -71,6 +81,7 @@
"mod" "mod"
"move" "move"
"pub" "pub"
"ref"
"return" "return"
"static" "static"
"struct" "struct"
@ -91,6 +102,13 @@
(char_literal) (char_literal)
] @string ] @string
[
(integer_literal)
(float_literal)
] @number
(boolean_literal) @constant
[ [
(line_comment) (line_comment)
(block_comment) (block_comment)

View File

@ -0,0 +1,7 @@
(macro_invocation
(token_tree) @content
(#set! "language" "rust"))
(macro_rule
(token_tree) @content
(#set! "language" "rust"))