mirror of
https://github.com/zed-industries/zed.git
synced 2024-11-08 07:35:01 +03:00
Fill in some missing methods on MultiBuffer, MultiBufferSnapshot
This commit is contained in:
parent
5b31c1ba4e
commit
a758bd4f8d
@ -54,7 +54,7 @@ impl DisplayMap {
|
||||
let (fold_map, snapshot) = FoldMap::new(buffer.read(cx).snapshot(cx));
|
||||
let (tab_map, snapshot) = TabMap::new(snapshot, tab_size);
|
||||
let (wrap_map, snapshot) = WrapMap::new(snapshot, font_id, font_size, wrap_width, cx);
|
||||
let block_map = BlockMap::new(buffer.clone(), snapshot);
|
||||
let block_map = BlockMap::new(snapshot);
|
||||
cx.observe(&wrap_map, |_, _, cx| cx.notify()).detach();
|
||||
DisplayMap {
|
||||
buffer,
|
||||
|
@ -1,7 +1,7 @@
|
||||
use super::wrap_map::{self, WrapEdit, WrapPoint, WrapSnapshot};
|
||||
use gpui::{AppContext, ElementBox, ModelHandle};
|
||||
use gpui::{AppContext, ElementBox};
|
||||
use language::{
|
||||
multi_buffer::{Anchor, MultiBuffer, ToOffset, ToPoint as _},
|
||||
multi_buffer::{Anchor, ToOffset, ToPoint as _},
|
||||
Chunk,
|
||||
};
|
||||
use parking_lot::Mutex;
|
||||
@ -22,7 +22,6 @@ use theme::SyntaxTheme;
|
||||
const NEWLINES: &'static [u8] = &[b'\n'; u8::MAX as usize];
|
||||
|
||||
pub struct BlockMap {
|
||||
buffer: ModelHandle<MultiBuffer>,
|
||||
next_block_id: AtomicUsize,
|
||||
wrap_snapshot: Mutex<WrapSnapshot>,
|
||||
blocks: Vec<Arc<Block>>,
|
||||
@ -112,9 +111,8 @@ pub struct BlockBufferRows<'a> {
|
||||
}
|
||||
|
||||
impl BlockMap {
|
||||
pub fn new(buffer: ModelHandle<MultiBuffer>, wrap_snapshot: WrapSnapshot) -> Self {
|
||||
pub fn new(wrap_snapshot: WrapSnapshot) -> Self {
|
||||
Self {
|
||||
buffer,
|
||||
next_block_id: AtomicUsize::new(0),
|
||||
blocks: Vec::new(),
|
||||
transforms: Mutex::new(SumTree::from_item(
|
||||
@ -869,6 +867,7 @@ mod tests {
|
||||
use super::*;
|
||||
use crate::display_map::{fold_map::FoldMap, tab_map::TabMap, wrap_map::WrapMap};
|
||||
use gpui::{elements::Empty, Element};
|
||||
use language::multi_buffer::MultiBuffer;
|
||||
use rand::prelude::*;
|
||||
use std::env;
|
||||
use text::RandomCharIter;
|
||||
@ -902,7 +901,7 @@ mod tests {
|
||||
let (fold_map, folds_snapshot) = FoldMap::new(buffer.read(cx).snapshot(cx));
|
||||
let (tab_map, tabs_snapshot) = TabMap::new(folds_snapshot.clone(), 1);
|
||||
let (wrap_map, wraps_snapshot) = WrapMap::new(tabs_snapshot, font_id, 14.0, None, cx);
|
||||
let mut block_map = BlockMap::new(buffer.clone(), wraps_snapshot.clone());
|
||||
let mut block_map = BlockMap::new(wraps_snapshot.clone());
|
||||
|
||||
let mut writer = block_map.write(wraps_snapshot.clone(), vec![]);
|
||||
writer.insert(vec![
|
||||
@ -1069,7 +1068,7 @@ mod tests {
|
||||
let (_, folds_snapshot) = FoldMap::new(buffer.read(cx).snapshot(cx));
|
||||
let (_, tabs_snapshot) = TabMap::new(folds_snapshot.clone(), 1);
|
||||
let (_, wraps_snapshot) = WrapMap::new(tabs_snapshot, font_id, 14.0, Some(60.), cx);
|
||||
let mut block_map = BlockMap::new(buffer.clone(), wraps_snapshot.clone());
|
||||
let mut block_map = BlockMap::new(wraps_snapshot.clone());
|
||||
|
||||
let mut writer = block_map.write(wraps_snapshot.clone(), vec![]);
|
||||
writer.insert(vec![
|
||||
@ -1127,7 +1126,7 @@ mod tests {
|
||||
let (tab_map, tabs_snapshot) = TabMap::new(folds_snapshot.clone(), tab_size);
|
||||
let (wrap_map, wraps_snapshot) =
|
||||
WrapMap::new(tabs_snapshot, font_id, font_size, wrap_width, cx);
|
||||
let mut block_map = BlockMap::new(buffer.clone(), wraps_snapshot);
|
||||
let mut block_map = BlockMap::new(wraps_snapshot);
|
||||
let mut expected_blocks = Vec::new();
|
||||
|
||||
for _ in 0..operations {
|
||||
|
@ -1224,7 +1224,6 @@ mod tests {
|
||||
let buffer = MultiBuffer::build_simple(&sample_text(5, 6, 'a'), cx);
|
||||
let buffer_snapshot = buffer.read(cx).snapshot(cx);
|
||||
let mut map = FoldMap::new(buffer_snapshot.clone()).0;
|
||||
let buffer = buffer.read(cx);
|
||||
|
||||
let (mut writer, _, _) = map.write(buffer_snapshot.clone(), vec![]);
|
||||
writer.fold(vec![
|
||||
|
@ -559,7 +559,7 @@ impl Editor {
|
||||
}
|
||||
|
||||
pub fn language<'a>(&self, cx: &'a AppContext) -> Option<&'a Arc<Language>> {
|
||||
self.buffer.read(cx).read(cx).language()
|
||||
self.buffer.read(cx).language(cx)
|
||||
}
|
||||
|
||||
pub fn set_placeholder_text(
|
||||
@ -2996,7 +2996,7 @@ impl Editor {
|
||||
let buffer = self.buffer.read(cx);
|
||||
let replica_id = buffer.replica_id();
|
||||
buffer
|
||||
.selection_sets()
|
||||
.selection_sets(cx)
|
||||
.filter(move |(set_id, set)| {
|
||||
set.active && (set_id.replica_id != replica_id || **set_id == self.selection_set_id)
|
||||
})
|
||||
|
@ -76,9 +76,7 @@ impl ItemHandle for BufferItemHandle {
|
||||
font_properties,
|
||||
underline: None,
|
||||
};
|
||||
let language = buffer
|
||||
.upgrade(cx)
|
||||
.and_then(|buf| buf.read(cx).read(cx).language());
|
||||
let language = buffer.upgrade(cx).and_then(|buf| buf.read(cx).language(cx));
|
||||
let soft_wrap = match settings.soft_wrap(language) {
|
||||
settings::SoftWrap::None => crate::SoftWrap::None,
|
||||
settings::SoftWrap::EditorWidth => crate::SoftWrap::EditorWidth,
|
||||
@ -222,11 +220,11 @@ impl ItemView for Editor {
|
||||
}
|
||||
|
||||
fn is_dirty(&self, cx: &AppContext) -> bool {
|
||||
self.buffer().read(cx).is_dirty()
|
||||
self.buffer().read(cx).is_dirty(cx)
|
||||
}
|
||||
|
||||
fn has_conflict(&self, cx: &AppContext) -> bool {
|
||||
self.buffer().read(cx).has_conflict()
|
||||
self.buffer().read(cx).has_conflict(cx)
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -829,31 +829,6 @@ impl Buffer {
|
||||
})
|
||||
}
|
||||
|
||||
pub fn diagnostics_in_range<'a, T, O>(
|
||||
&'a self,
|
||||
search_range: Range<T>,
|
||||
) -> impl 'a + Iterator<Item = DiagnosticEntry<O>>
|
||||
where
|
||||
T: 'a + ToOffset,
|
||||
O: 'a + FromAnchor,
|
||||
{
|
||||
self.diagnostics.range(search_range, self, true)
|
||||
}
|
||||
|
||||
pub fn diagnostic_group<'a, O>(
|
||||
&'a self,
|
||||
group_id: usize,
|
||||
) -> impl 'a + Iterator<Item = DiagnosticEntry<O>>
|
||||
where
|
||||
O: 'a + FromAnchor,
|
||||
{
|
||||
self.diagnostics.group(group_id, self)
|
||||
}
|
||||
|
||||
pub fn diagnostics_update_count(&self) -> usize {
|
||||
self.diagnostics_update_count
|
||||
}
|
||||
|
||||
fn request_autoindent(&mut self, cx: &mut ModelContext<Self>) {
|
||||
if let Some(indent_columns) = self.compute_autoindents() {
|
||||
let indent_columns = cx.background().spawn(indent_columns);
|
||||
@ -1057,47 +1032,6 @@ impl Buffer {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn range_for_syntax_ancestor<T: ToOffset>(&self, range: Range<T>) -> Option<Range<usize>> {
|
||||
if let Some(tree) = self.syntax_tree() {
|
||||
let root = tree.root_node();
|
||||
let range = range.start.to_offset(self)..range.end.to_offset(self);
|
||||
let mut node = root.descendant_for_byte_range(range.start, range.end);
|
||||
while node.map_or(false, |n| n.byte_range() == range) {
|
||||
node = node.unwrap().parent();
|
||||
}
|
||||
node.map(|n| n.byte_range())
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
pub fn enclosing_bracket_ranges<T: ToOffset>(
|
||||
&self,
|
||||
range: Range<T>,
|
||||
) -> Option<(Range<usize>, Range<usize>)> {
|
||||
let (grammar, tree) = self.grammar().zip(self.syntax_tree())?;
|
||||
let open_capture_ix = grammar.brackets_query.capture_index_for_name("open")?;
|
||||
let close_capture_ix = grammar.brackets_query.capture_index_for_name("close")?;
|
||||
|
||||
// Find bracket pairs that *inclusively* contain the given range.
|
||||
let range = range.start.to_offset(self).saturating_sub(1)..range.end.to_offset(self) + 1;
|
||||
let mut cursor = QueryCursorHandle::new();
|
||||
let matches = cursor.set_byte_range(range).matches(
|
||||
&grammar.brackets_query,
|
||||
tree.root_node(),
|
||||
TextProvider(self.as_rope()),
|
||||
);
|
||||
|
||||
// Get the ranges of the innermost pair of brackets.
|
||||
matches
|
||||
.filter_map(|mat| {
|
||||
let open = mat.nodes_for_capture_index(open_capture_ix).next()?;
|
||||
let close = mat.nodes_for_capture_index(close_capture_ix).next()?;
|
||||
Some((open.byte_range(), close.byte_range()))
|
||||
})
|
||||
.min_by_key(|(open_range, close_range)| close_range.end - open_range.start)
|
||||
}
|
||||
|
||||
pub(crate) fn diff(&self, new_text: Arc<str>, cx: &AppContext) -> Task<Diff> {
|
||||
// TODO: it would be nice to not allocate here.
|
||||
let old_text = self.text();
|
||||
@ -1745,12 +1679,78 @@ impl BufferSnapshot {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn language(&self) -> Option<&Arc<Language>> {
|
||||
self.language.as_ref()
|
||||
}
|
||||
|
||||
fn grammar(&self) -> Option<&Arc<Grammar>> {
|
||||
self.language
|
||||
.as_ref()
|
||||
.and_then(|language| language.grammar.as_ref())
|
||||
}
|
||||
|
||||
pub fn range_for_syntax_ancestor<T: ToOffset>(&self, range: Range<T>) -> Option<Range<usize>> {
|
||||
if let Some(tree) = self.tree.as_ref() {
|
||||
let root = tree.root_node();
|
||||
let range = range.start.to_offset(self)..range.end.to_offset(self);
|
||||
let mut node = root.descendant_for_byte_range(range.start, range.end);
|
||||
while node.map_or(false, |n| n.byte_range() == range) {
|
||||
node = node.unwrap().parent();
|
||||
}
|
||||
node.map(|n| n.byte_range())
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
pub fn enclosing_bracket_ranges<T: ToOffset>(
|
||||
&self,
|
||||
range: Range<T>,
|
||||
) -> Option<(Range<usize>, Range<usize>)> {
|
||||
let (grammar, tree) = self.grammar().zip(self.tree.as_ref())?;
|
||||
let open_capture_ix = grammar.brackets_query.capture_index_for_name("open")?;
|
||||
let close_capture_ix = grammar.brackets_query.capture_index_for_name("close")?;
|
||||
|
||||
// Find bracket pairs that *inclusively* contain the given range.
|
||||
let range = range.start.to_offset(self).saturating_sub(1)..range.end.to_offset(self) + 1;
|
||||
let mut cursor = QueryCursorHandle::new();
|
||||
let matches = cursor.set_byte_range(range).matches(
|
||||
&grammar.brackets_query,
|
||||
tree.root_node(),
|
||||
TextProvider(self.as_rope()),
|
||||
);
|
||||
|
||||
// Get the ranges of the innermost pair of brackets.
|
||||
matches
|
||||
.filter_map(|mat| {
|
||||
let open = mat.nodes_for_capture_index(open_capture_ix).next()?;
|
||||
let close = mat.nodes_for_capture_index(close_capture_ix).next()?;
|
||||
Some((open.byte_range(), close.byte_range()))
|
||||
})
|
||||
.min_by_key(|(open_range, close_range)| close_range.end - open_range.start)
|
||||
}
|
||||
|
||||
pub fn diagnostics_in_range<'a, T, O>(
|
||||
&'a self,
|
||||
search_range: Range<T>,
|
||||
) -> impl 'a + Iterator<Item = DiagnosticEntry<O>>
|
||||
where
|
||||
T: 'a + ToOffset,
|
||||
O: 'a + FromAnchor,
|
||||
{
|
||||
self.diagnostics.range(search_range, self, true)
|
||||
}
|
||||
|
||||
pub fn diagnostic_group<'a, O>(
|
||||
&'a self,
|
||||
group_id: usize,
|
||||
) -> impl 'a + Iterator<Item = DiagnosticEntry<O>>
|
||||
where
|
||||
O: 'a + FromAnchor,
|
||||
{
|
||||
self.diagnostics.group(group_id, self)
|
||||
}
|
||||
|
||||
pub fn diagnostics_update_count(&self) -> usize {
|
||||
self.diagnostics_update_count
|
||||
}
|
||||
|
@ -6,7 +6,6 @@ use crate::{
|
||||
BufferSnapshot, DiagnosticEntry, File, Language,
|
||||
};
|
||||
pub use anchor::{Anchor, AnchorRangeExt};
|
||||
use anyhow::anyhow;
|
||||
use anyhow::Result;
|
||||
use clock::ReplicaId;
|
||||
use collections::HashMap;
|
||||
@ -15,6 +14,7 @@ pub use selection::SelectionSet;
|
||||
use std::{
|
||||
cell::{Ref, RefCell},
|
||||
cmp, io,
|
||||
iter::Peekable,
|
||||
ops::{Range, Sub},
|
||||
sync::Arc,
|
||||
time::SystemTime,
|
||||
@ -58,7 +58,6 @@ struct BufferState {
|
||||
#[derive(Clone, Default)]
|
||||
pub struct MultiBufferSnapshot {
|
||||
excerpts: SumTree<Excerpt>,
|
||||
replica_id: ReplicaId,
|
||||
}
|
||||
|
||||
pub struct ExcerptProperties<'a, T> {
|
||||
@ -91,7 +90,7 @@ pub struct MultiBufferChunks<'a> {
|
||||
}
|
||||
|
||||
pub struct MultiBufferBytes<'a> {
|
||||
chunks: MultiBufferChunks<'a>,
|
||||
chunks: Peekable<MultiBufferChunks<'a>>,
|
||||
}
|
||||
|
||||
impl MultiBuffer {
|
||||
@ -336,12 +335,46 @@ impl MultiBuffer {
|
||||
set_id: Option<SelectionSetId>,
|
||||
cx: &mut ModelContext<Self>,
|
||||
) -> Result<()> {
|
||||
todo!()
|
||||
self.as_singleton()
|
||||
.unwrap()
|
||||
.update(cx, |buffer, cx| buffer.set_active_selection_set(set_id, cx))
|
||||
}
|
||||
|
||||
pub fn selection_sets(&self) -> impl Iterator<Item = (&SelectionSetId, &SelectionSet)> {
|
||||
todo!();
|
||||
None.into_iter()
|
||||
pub fn selection_sets(
|
||||
&self,
|
||||
cx: &AppContext,
|
||||
) -> impl Iterator<Item = (&SelectionSetId, &SelectionSet)> {
|
||||
let excerpt_id = self.snapshot.borrow().excerpts.first().unwrap().id.clone();
|
||||
let selection_sets: &mut HashMap<SelectionSetId, SelectionSet> =
|
||||
unsafe { &mut *(&self.selection_sets as *const _ as *mut _) };
|
||||
selection_sets.clear();
|
||||
for (selection_set_id, set) in self.as_singleton().unwrap().read(cx).selection_sets() {
|
||||
selection_sets.insert(
|
||||
*selection_set_id,
|
||||
SelectionSet {
|
||||
id: set.id,
|
||||
active: set.active,
|
||||
selections: set
|
||||
.selections
|
||||
.iter()
|
||||
.map(|selection| Selection {
|
||||
id: selection.id,
|
||||
start: Anchor {
|
||||
excerpt_id: excerpt_id.clone(),
|
||||
text_anchor: selection.start.clone(),
|
||||
},
|
||||
end: Anchor {
|
||||
excerpt_id: excerpt_id.clone(),
|
||||
text_anchor: selection.end.clone(),
|
||||
},
|
||||
reversed: selection.reversed,
|
||||
goal: selection.goal,
|
||||
})
|
||||
.collect(),
|
||||
},
|
||||
);
|
||||
}
|
||||
self.selection_sets.iter()
|
||||
}
|
||||
|
||||
pub fn push<O>(&mut self, props: ExcerptProperties<O>, cx: &mut ModelContext<Self>) -> ExcerptId
|
||||
@ -382,7 +415,13 @@ impl MultiBuffer {
|
||||
&mut self,
|
||||
cx: &mut ModelContext<Self>,
|
||||
) -> Result<Task<Result<(clock::Global, SystemTime)>>> {
|
||||
todo!()
|
||||
self.as_singleton()
|
||||
.unwrap()
|
||||
.update(cx, |buffer, cx| buffer.save(cx))
|
||||
}
|
||||
|
||||
pub fn language<'a>(&self, cx: &'a AppContext) -> Option<&'a Arc<Language>> {
|
||||
self.as_singleton().unwrap().read(cx).language()
|
||||
}
|
||||
|
||||
pub fn file<'a>(&self, cx: &'a AppContext) -> Option<&'a dyn File> {
|
||||
@ -390,16 +429,16 @@ impl MultiBuffer {
|
||||
.and_then(|buffer| buffer.read(cx).file())
|
||||
}
|
||||
|
||||
pub fn is_dirty(&self) -> bool {
|
||||
todo!()
|
||||
pub fn is_dirty(&self, cx: &AppContext) -> bool {
|
||||
self.as_singleton().unwrap().read(cx).is_dirty()
|
||||
}
|
||||
|
||||
pub fn has_conflict(&self) -> bool {
|
||||
todo!()
|
||||
pub fn has_conflict(&self, cx: &AppContext) -> bool {
|
||||
self.as_singleton().unwrap().read(cx).has_conflict()
|
||||
}
|
||||
|
||||
pub fn is_parsing(&self, _: &AppContext) -> bool {
|
||||
todo!()
|
||||
pub fn is_parsing(&self, cx: &AppContext) -> bool {
|
||||
self.as_singleton().unwrap().read(cx).is_parsing()
|
||||
}
|
||||
|
||||
fn sync(&self, cx: &AppContext) {
|
||||
@ -473,12 +512,21 @@ impl MultiBuffer {
|
||||
|
||||
#[cfg(any(test, feature = "test-support"))]
|
||||
impl MultiBuffer {
|
||||
pub fn randomly_edit<R: rand::Rng>(&mut self, _: &mut R, _: usize, _: &mut ModelContext<Self>) {
|
||||
todo!()
|
||||
pub fn randomly_edit<R: rand::Rng>(
|
||||
&mut self,
|
||||
rng: &mut R,
|
||||
count: usize,
|
||||
cx: &mut ModelContext<Self>,
|
||||
) {
|
||||
self.as_singleton()
|
||||
.unwrap()
|
||||
.update(cx, |buffer, cx| buffer.randomly_edit(rng, count, cx))
|
||||
}
|
||||
|
||||
pub fn randomly_mutate<R: rand::Rng>(&mut self, rng: &mut R, cx: &mut ModelContext<Self>) {
|
||||
todo!()
|
||||
self.as_singleton()
|
||||
.unwrap()
|
||||
.update(cx, |buffer, cx| buffer.randomly_mutate(rng, cx))
|
||||
}
|
||||
}
|
||||
|
||||
@ -487,10 +535,6 @@ impl Entity for MultiBuffer {
|
||||
}
|
||||
|
||||
impl MultiBufferSnapshot {
|
||||
pub fn replica_id(&self) -> ReplicaId {
|
||||
todo!()
|
||||
}
|
||||
|
||||
pub fn text(&self) -> String {
|
||||
self.chunks(0..self.len(), None)
|
||||
.map(|chunk| chunk.text)
|
||||
@ -501,8 +545,9 @@ impl MultiBufferSnapshot {
|
||||
&'a self,
|
||||
position: T,
|
||||
) -> impl Iterator<Item = char> + 'a {
|
||||
todo!();
|
||||
None.into_iter()
|
||||
// TODO
|
||||
let offset = position.to_offset(self);
|
||||
self.as_singleton().unwrap().reversed_chars_at(offset)
|
||||
}
|
||||
|
||||
pub fn chars_at<'a, T: ToOffset>(&'a self, position: T) -> impl Iterator<Item = char> + 'a {
|
||||
@ -523,11 +568,22 @@ impl MultiBufferSnapshot {
|
||||
.all(|chunk| chunk.matches(|c: char| !c.is_whitespace()).next().is_none())
|
||||
}
|
||||
|
||||
pub fn contains_str_at<T>(&self, _: T, _: &str) -> bool
|
||||
pub fn contains_str_at<T>(&self, position: T, needle: &str) -> bool
|
||||
where
|
||||
T: ToOffset,
|
||||
{
|
||||
todo!()
|
||||
let offset = position.to_offset(self);
|
||||
self.as_singleton().unwrap().contains_str_at(offset, needle)
|
||||
}
|
||||
|
||||
fn as_singleton(&self) -> Option<&BufferSnapshot> {
|
||||
let mut excerpts = self.excerpts.iter();
|
||||
let buffer = excerpts.next().map(|excerpt| &excerpt.buffer);
|
||||
if excerpts.next().is_none() {
|
||||
buffer
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
pub fn len(&self) -> usize {
|
||||
@ -610,7 +666,9 @@ impl MultiBufferSnapshot {
|
||||
}
|
||||
|
||||
pub fn bytes_in_range<'a, T: ToOffset>(&'a self, range: Range<T>) -> MultiBufferBytes<'a> {
|
||||
todo!()
|
||||
MultiBufferBytes {
|
||||
chunks: self.chunks(range, None).peekable(),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn chunks<'a, T: ToOffset>(
|
||||
@ -618,48 +676,15 @@ impl MultiBufferSnapshot {
|
||||
range: Range<T>,
|
||||
theme: Option<&'a SyntaxTheme>,
|
||||
) -> MultiBufferChunks<'a> {
|
||||
let range = range.start.to_offset(self)..range.end.to_offset(self);
|
||||
let mut cursor = self.excerpts.cursor::<usize>();
|
||||
cursor.seek(&range.start, Bias::Right, &());
|
||||
|
||||
let mut header_height: u8 = 0;
|
||||
let excerpt_chunks = cursor.item().map(|excerpt| {
|
||||
let buffer_range = excerpt.range.to_offset(&excerpt.buffer);
|
||||
header_height = excerpt.header_height;
|
||||
|
||||
let buffer_start;
|
||||
let start_overshoot = range.start - cursor.start();
|
||||
if start_overshoot < excerpt.header_height as usize {
|
||||
header_height -= start_overshoot as u8;
|
||||
buffer_start = buffer_range.start;
|
||||
} else {
|
||||
buffer_start =
|
||||
buffer_range.start + start_overshoot - excerpt.header_height as usize;
|
||||
header_height = 0;
|
||||
}
|
||||
|
||||
let buffer_end;
|
||||
let end_overshoot = range.end - cursor.start();
|
||||
if end_overshoot < excerpt.header_height as usize {
|
||||
header_height -= excerpt.header_height - end_overshoot as u8;
|
||||
buffer_end = buffer_start;
|
||||
} else {
|
||||
buffer_end = cmp::min(
|
||||
buffer_range.end,
|
||||
buffer_range.start + end_overshoot - excerpt.header_height as usize,
|
||||
);
|
||||
}
|
||||
|
||||
excerpt.buffer.chunks(buffer_start..buffer_end, theme)
|
||||
});
|
||||
|
||||
MultiBufferChunks {
|
||||
range,
|
||||
cursor,
|
||||
header_height,
|
||||
excerpt_chunks,
|
||||
let mut result = MultiBufferChunks {
|
||||
range: 0..range.end.to_offset(self),
|
||||
cursor: self.excerpts.cursor::<usize>(),
|
||||
header_height: 0,
|
||||
excerpt_chunks: None,
|
||||
theme,
|
||||
}
|
||||
};
|
||||
result.seek(range.start.to_offset(self));
|
||||
result
|
||||
}
|
||||
|
||||
pub fn offset_to_point(&self, offset: usize) -> Point {
|
||||
@ -736,33 +761,43 @@ impl MultiBufferSnapshot {
|
||||
}
|
||||
|
||||
pub fn indent_column_for_line(&self, row: u32) -> u32 {
|
||||
todo!()
|
||||
if let Some((buffer, range)) = self.buffer_line_for_row(row) {
|
||||
buffer
|
||||
.indent_column_for_line(range.start.row)
|
||||
.min(range.end.column)
|
||||
.saturating_sub(range.start.column)
|
||||
} else {
|
||||
0
|
||||
}
|
||||
}
|
||||
|
||||
pub fn line_len(&self, row: u32) -> u32 {
|
||||
if let Some((_, range)) = self.buffer_line_for_row(row) {
|
||||
range.end.column - range.start.column
|
||||
} else {
|
||||
0
|
||||
}
|
||||
}
|
||||
|
||||
fn buffer_line_for_row(&self, row: u32) -> Option<(&BufferSnapshot, Range<Point>)> {
|
||||
let mut cursor = self.excerpts.cursor::<Point>();
|
||||
cursor.seek(&Point::new(row, 0), Bias::Right, &());
|
||||
if let Some(excerpt) = cursor.item() {
|
||||
let overshoot = row - cursor.start().row;
|
||||
let header_height = excerpt.header_height as u32;
|
||||
if overshoot < header_height {
|
||||
0
|
||||
} else {
|
||||
if overshoot >= header_height {
|
||||
let excerpt_start = excerpt.range.start.to_point(&excerpt.buffer);
|
||||
let excerpt_end = excerpt.range.end.to_point(&excerpt.buffer);
|
||||
let buffer_row = excerpt_start.row + overshoot - header_height;
|
||||
let mut len = excerpt.buffer.line_len(buffer_row);
|
||||
if buffer_row == excerpt_end.row {
|
||||
len = excerpt_end.column;
|
||||
}
|
||||
if buffer_row == excerpt_start.row {
|
||||
len -= excerpt_start.column
|
||||
}
|
||||
len
|
||||
let line_start = Point::new(buffer_row, 0);
|
||||
let line_end = Point::new(buffer_row, excerpt.buffer.line_len(buffer_row));
|
||||
return Some((
|
||||
&excerpt.buffer,
|
||||
line_start.max(excerpt_start)..line_end.min(excerpt_end),
|
||||
));
|
||||
}
|
||||
} else {
|
||||
0
|
||||
}
|
||||
None
|
||||
}
|
||||
|
||||
pub fn max_point(&self) -> Point {
|
||||
@ -940,26 +975,42 @@ impl MultiBufferSnapshot {
|
||||
}
|
||||
|
||||
pub fn anchor_at<T: ToOffset>(&self, position: T, bias: Bias) -> Anchor {
|
||||
todo!()
|
||||
let offset = position.to_offset(self);
|
||||
let mut cursor = self.excerpts.cursor::<(usize, Option<&ExcerptId>)>();
|
||||
cursor.seek(&offset, bias, &());
|
||||
if let Some(excerpt) = cursor.item() {
|
||||
let overshoot =
|
||||
(offset - cursor.start().0).saturating_sub(excerpt.header_height as usize);
|
||||
let buffer_start = excerpt.range.start.to_offset(&excerpt.buffer);
|
||||
Anchor {
|
||||
excerpt_id: excerpt.id.clone(),
|
||||
text_anchor: excerpt.buffer.anchor_at(buffer_start + overshoot, bias),
|
||||
}
|
||||
} else if offset == 0 && bias == Bias::Left {
|
||||
Anchor::min()
|
||||
} else {
|
||||
Anchor::max()
|
||||
}
|
||||
}
|
||||
|
||||
pub fn parse_count(&self) -> usize {
|
||||
todo!()
|
||||
self.as_singleton().unwrap().parse_count()
|
||||
}
|
||||
|
||||
pub fn enclosing_bracket_ranges<T: ToOffset>(
|
||||
&self,
|
||||
range: Range<T>,
|
||||
) -> Option<(Range<usize>, Range<usize>)> {
|
||||
todo!()
|
||||
let range = range.start.to_offset(self)..range.end.to_offset(self);
|
||||
self.as_singleton().unwrap().enclosing_bracket_ranges(range)
|
||||
}
|
||||
|
||||
pub fn diagnostics_update_count(&self) -> usize {
|
||||
todo!()
|
||||
self.as_singleton().unwrap().diagnostics_update_count()
|
||||
}
|
||||
|
||||
pub fn language<'a>(&self) -> Option<&'a Arc<Language>> {
|
||||
todo!()
|
||||
pub fn language(&self) -> Option<&Arc<Language>> {
|
||||
self.as_singleton().unwrap().language()
|
||||
}
|
||||
|
||||
pub fn diagnostic_group<'a, O>(
|
||||
@ -967,26 +1018,28 @@ impl MultiBufferSnapshot {
|
||||
group_id: usize,
|
||||
) -> impl Iterator<Item = DiagnosticEntry<O>> + 'a
|
||||
where
|
||||
O: 'a,
|
||||
O: text::FromAnchor + 'a,
|
||||
{
|
||||
todo!();
|
||||
None.into_iter()
|
||||
self.as_singleton().unwrap().diagnostic_group(group_id)
|
||||
}
|
||||
|
||||
pub fn diagnostics_in_range<'a, T, O>(
|
||||
&'a self,
|
||||
search_range: Range<T>,
|
||||
range: Range<T>,
|
||||
) -> impl Iterator<Item = DiagnosticEntry<O>> + 'a
|
||||
where
|
||||
T: 'a + ToOffset,
|
||||
O: 'a,
|
||||
O: 'a + text::FromAnchor,
|
||||
{
|
||||
todo!();
|
||||
None.into_iter()
|
||||
let range = range.start.to_offset(self)..range.end.to_offset(self);
|
||||
self.as_singleton().unwrap().diagnostics_in_range(range)
|
||||
}
|
||||
|
||||
pub fn range_for_syntax_ancestor<T: ToOffset>(&self, range: Range<T>) -> Option<Range<usize>> {
|
||||
todo!()
|
||||
let range = range.start.to_offset(self)..range.end.to_offset(self);
|
||||
self.as_singleton()
|
||||
.unwrap()
|
||||
.range_for_syntax_ancestor(range)
|
||||
}
|
||||
|
||||
fn buffer_snapshot_for_excerpt<'a>(
|
||||
@ -996,7 +1049,7 @@ impl MultiBufferSnapshot {
|
||||
let mut cursor = self.excerpts.cursor::<Option<&ExcerptId>>();
|
||||
cursor.seek(&Some(excerpt_id), Bias::Left, &());
|
||||
if let Some(excerpt) = cursor.item() {
|
||||
if *cursor.start() == Some(excerpt_id) {
|
||||
if excerpt.id == *excerpt_id {
|
||||
return Some(&excerpt.buffer);
|
||||
}
|
||||
}
|
||||
@ -1114,11 +1167,43 @@ impl<'a> sum_tree::Dimension<'a, ExcerptSummary> for Option<&'a ExcerptId> {
|
||||
|
||||
impl<'a> MultiBufferChunks<'a> {
|
||||
pub fn offset(&self) -> usize {
|
||||
todo!()
|
||||
self.range.start
|
||||
}
|
||||
|
||||
pub fn seek(&mut self, offset: usize) {
|
||||
todo!()
|
||||
self.range.start = offset;
|
||||
self.cursor.seek_forward(&offset, Bias::Right, &());
|
||||
self.header_height = 0;
|
||||
self.excerpt_chunks = None;
|
||||
if let Some(excerpt) = self.cursor.item() {
|
||||
let buffer_range = excerpt.range.to_offset(&excerpt.buffer);
|
||||
self.header_height = excerpt.header_height;
|
||||
|
||||
let buffer_start;
|
||||
let start_overshoot = self.range.start - self.cursor.start();
|
||||
if start_overshoot < excerpt.header_height as usize {
|
||||
self.header_height -= start_overshoot as u8;
|
||||
buffer_start = buffer_range.start;
|
||||
} else {
|
||||
buffer_start =
|
||||
buffer_range.start + start_overshoot - excerpt.header_height as usize;
|
||||
self.header_height = 0;
|
||||
}
|
||||
|
||||
let buffer_end;
|
||||
let end_overshoot = self.range.end - self.cursor.start();
|
||||
if end_overshoot < excerpt.header_height as usize {
|
||||
self.header_height -= excerpt.header_height - end_overshoot as u8;
|
||||
buffer_end = buffer_start;
|
||||
} else {
|
||||
buffer_end = cmp::min(
|
||||
buffer_range.end,
|
||||
buffer_range.start + end_overshoot - excerpt.header_height as usize,
|
||||
);
|
||||
}
|
||||
|
||||
self.excerpt_chunks = Some(excerpt.buffer.chunks(buffer_start..buffer_end, self.theme));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@ -1134,16 +1219,19 @@ impl<'a> Iterator for MultiBufferChunks<'a> {
|
||||
},
|
||||
..Default::default()
|
||||
};
|
||||
self.range.start += self.header_height as usize;
|
||||
self.header_height = 0;
|
||||
return Some(chunk);
|
||||
}
|
||||
|
||||
if let Some(excerpt_chunks) = self.excerpt_chunks.as_mut() {
|
||||
if let Some(chunk) = excerpt_chunks.next() {
|
||||
self.range.start += chunk.text.len();
|
||||
return Some(chunk);
|
||||
}
|
||||
self.excerpt_chunks.take();
|
||||
if self.cursor.end(&()) <= self.range.end {
|
||||
self.range.start += 1;
|
||||
return Some(Chunk {
|
||||
text: "\n",
|
||||
..Default::default()
|
||||
@ -1180,7 +1268,7 @@ impl<'a> Iterator for MultiBufferBytes<'a> {
|
||||
type Item = &'a [u8];
|
||||
|
||||
fn next(&mut self) -> Option<Self::Item> {
|
||||
todo!()
|
||||
self.chunks.next().map(|chunk| chunk.text.as_bytes())
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -539,6 +539,7 @@ async fn test_diagnostics(mut cx: gpui::TestAppContext) {
|
||||
// The diagnostics have moved down since they were created.
|
||||
assert_eq!(
|
||||
buffer
|
||||
.snapshot()
|
||||
.diagnostics_in_range::<_, Point>(Point::new(3, 0)..Point::new(5, 0))
|
||||
.collect::<Vec<_>>(),
|
||||
&[
|
||||
@ -606,6 +607,7 @@ async fn test_diagnostics(mut cx: gpui::TestAppContext) {
|
||||
.unwrap();
|
||||
assert_eq!(
|
||||
buffer
|
||||
.snapshot()
|
||||
.diagnostics_in_range::<_, Point>(Point::new(2, 0)..Point::new(3, 0))
|
||||
.collect::<Vec<_>>(),
|
||||
&[
|
||||
@ -685,6 +687,7 @@ async fn test_diagnostics(mut cx: gpui::TestAppContext) {
|
||||
.unwrap();
|
||||
assert_eq!(
|
||||
buffer
|
||||
.snapshot()
|
||||
.diagnostics_in_range::<_, Point>(0..buffer.len())
|
||||
.collect::<Vec<_>>(),
|
||||
&[
|
||||
@ -870,6 +873,7 @@ async fn test_grouped_diagnostics(mut cx: gpui::TestAppContext) {
|
||||
buffer.update_diagnostics(None, diagnostics, cx).unwrap();
|
||||
assert_eq!(
|
||||
buffer
|
||||
.snapshot()
|
||||
.diagnostics_in_range::<_, Point>(0..buffer.len())
|
||||
.collect::<Vec<_>>(),
|
||||
&[
|
||||
@ -922,7 +926,10 @@ async fn test_grouped_diagnostics(mut cx: gpui::TestAppContext) {
|
||||
);
|
||||
|
||||
assert_eq!(
|
||||
buffer.diagnostic_group::<Point>(0).collect::<Vec<_>>(),
|
||||
buffer
|
||||
.snapshot()
|
||||
.diagnostic_group::<Point>(0)
|
||||
.collect::<Vec<_>>(),
|
||||
&[
|
||||
DiagnosticEntry {
|
||||
range: Point::new(1, 8)..Point::new(1, 9),
|
||||
@ -945,7 +952,10 @@ async fn test_grouped_diagnostics(mut cx: gpui::TestAppContext) {
|
||||
]
|
||||
);
|
||||
assert_eq!(
|
||||
buffer.diagnostic_group::<Point>(1).collect::<Vec<_>>(),
|
||||
buffer
|
||||
.snapshot()
|
||||
.diagnostic_group::<Point>(1)
|
||||
.collect::<Vec<_>>(),
|
||||
&[
|
||||
DiagnosticEntry {
|
||||
range: Point::new(1, 13)..Point::new(1, 15),
|
||||
@ -1022,11 +1032,13 @@ impl Buffer {
|
||||
&self,
|
||||
range: Range<T>,
|
||||
) -> Option<(Range<Point>, Range<Point>)> {
|
||||
self.enclosing_bracket_ranges(range).map(|(start, end)| {
|
||||
let point_start = start.start.to_point(self)..start.end.to_point(self);
|
||||
let point_end = end.start.to_point(self)..end.end.to_point(self);
|
||||
(point_start, point_end)
|
||||
})
|
||||
self.snapshot()
|
||||
.enclosing_bracket_ranges(range)
|
||||
.map(|(start, end)| {
|
||||
let point_start = start.start.to_point(self)..start.end.to_point(self);
|
||||
let point_end = end.start.to_point(self)..end.end.to_point(self);
|
||||
(point_start, point_end)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -3721,6 +3721,7 @@ mod tests {
|
||||
|
||||
buffer.read_with(&cx, |buffer, _| {
|
||||
let diagnostics = buffer
|
||||
.snapshot()
|
||||
.diagnostics_in_range::<_, Point>(0..buffer.len())
|
||||
.collect::<Vec<_>>();
|
||||
assert_eq!(
|
||||
|
@ -1707,6 +1707,7 @@ mod tests {
|
||||
buffer_b.read_with(&cx_b, |buffer, _| {
|
||||
assert_eq!(
|
||||
buffer
|
||||
.snapshot()
|
||||
.diagnostics_in_range::<_, Point>(0..buffer.len())
|
||||
.collect::<Vec<_>>(),
|
||||
&[
|
||||
|
Loading…
Reference in New Issue
Block a user