WIP: Insert blocks in BlockMap for MultiBuffer headers

This commit is contained in:
Antonio Scandurra 2021-12-21 17:38:03 +01:00
parent 8534a9cc41
commit 91c786a8db
4 changed files with 60 additions and 77 deletions

View File

@ -345,7 +345,7 @@ mod tests {
view.update(cx, |view, cx| {
view.populate_excerpts(buffer, cx);
assert_eq!(
view.excerpts.read(cx).read(cx).text(),
view.editor.update(cx, |editor, cx| editor.display_text(cx)),
concat!(
"\n", // primary diagnostic message
"\n", // filename

View File

@ -3,9 +3,7 @@ mod fold_map;
mod tab_map;
mod wrap_map;
use crate::{
multi_buffer::RenderHeaderFn, Anchor, MultiBuffer, MultiBufferSnapshot, ToOffset, ToPoint,
};
use crate::{Anchor, MultiBuffer, MultiBufferSnapshot, ToOffset, ToPoint};
use block_map::{BlockMap, BlockPoint};
use fold_map::{FoldMap, ToFoldPoint as _};
use gpui::{fonts::FontId, Entity, ModelContext, ModelHandle};
@ -344,22 +342,6 @@ impl DisplaySnapshot {
self.blocks_snapshot.blocks_in_range(rows)
}
pub fn excerpt_headers_in_range<'a>(
&'a self,
rows: Range<u32>,
) -> impl 'a + Iterator<Item = (Range<u32>, RenderHeaderFn)> {
todo!();
let start_row = DisplayPoint::new(rows.start, 0).to_point(self).row;
let end_row = DisplayPoint::new(rows.end, 0).to_point(self).row;
self.buffer_snapshot
.excerpt_headers_in_range(start_row..end_row)
.map(move |(row, header_height, render)| {
let start_row = Point::new(rows.start, 0).to_display_point(self).row();
let end_row = Point::new(rows.end, 0).to_display_point(self).row();
(start_row..end_row, render)
})
}
pub fn intersects_fold<T: ToOffset>(&self, offset: T) -> bool {
self.folds_snapshot.intersects_fold(offset)
}

View File

@ -227,8 +227,9 @@ impl BlockMap {
}
// Find the blocks within this edited region.
let new_start = wrap_snapshot.to_point(WrapPoint::new(new_start.0, 0), Bias::Left);
let start_anchor = buffer.anchor_before(new_start);
let new_buffer_start =
wrap_snapshot.to_point(WrapPoint::new(new_start.0, 0), Bias::Left);
let start_anchor = buffer.anchor_before(new_buffer_start);
let start_block_ix = match self.blocks[last_block_ix..].binary_search_by(|probe| {
probe
.position
@ -238,11 +239,14 @@ impl BlockMap {
}) {
Ok(ix) | Err(ix) => last_block_ix + ix,
};
let new_buffer_end;
let end_block_ix = if new_end.0 > wrap_snapshot.max_point().row() {
new_buffer_end = wrap_snapshot.buffer_snapshot().max_point() + Point::new(1, 0);
self.blocks.len()
} else {
let new_end = wrap_snapshot.to_point(WrapPoint::new(new_end.0, 0), Bias::Left);
let end_anchor = buffer.anchor_before(new_end);
new_buffer_end = wrap_snapshot.to_point(WrapPoint::new(new_end.0, 0), Bias::Left);
let end_anchor = buffer.anchor_before(new_buffer_end);
match self.blocks[start_block_ix..].binary_search_by(|probe| {
probe
.position
@ -254,7 +258,26 @@ impl BlockMap {
}
};
last_block_ix = end_block_ix;
blocks_in_edit.clear();
debug_assert!(blocks_in_edit.is_empty());
blocks_in_edit.extend(
wrap_snapshot
.buffer_snapshot()
.excerpt_headers_in_range(new_buffer_start.row..new_buffer_end.row)
.map(|(start_row, header_height, render_header)| {
(
start_row,
0,
Arc::new(Block {
id: Default::default(),
position: Anchor::min(),
height: header_height,
render: Mutex::new(Arc::new(move |cx| render_header(cx))),
disposition: BlockDisposition::Above,
}),
)
}),
);
blocks_in_edit.extend(
self.blocks[start_block_ix..end_block_ix]
.iter()
@ -268,22 +291,21 @@ impl BlockMap {
}
}
let position = wrap_snapshot.from_point(position, Bias::Left);
(position.row(), column, block)
(position.row(), column, block.clone())
}),
);
blocks_in_edit
.sort_unstable_by_key(|(row, _, block)| (*row, block.disposition, block.id));
blocks_in_edit.sort_by_key(|(row, _, block)| (*row, block.disposition, block.id));
// For each of these blocks, insert a new isomorphic transform preceding the block,
// and then insert the block itself.
for (block_row, column, block) in blocks_in_edit.iter().copied() {
for (block_row, column, block) in blocks_in_edit.drain(..) {
let insertion_row = match block.disposition {
BlockDisposition::Above => block_row,
BlockDisposition::Below => block_row + 1,
};
let extent_before_block = insertion_row - new_transforms.summary().input_rows;
push_isomorphic(&mut new_transforms, extent_before_block);
new_transforms.push(Transform::block(block.clone(), column), &());
new_transforms.push(Transform::block(block, column), &());
}
old_end = WrapRow(old_end.0.min(old_row_count));

View File

@ -631,55 +631,34 @@ impl EditorElement {
line_layouts: &[text_layout::Line],
cx: &mut LayoutContext,
) -> Vec<(u32, ElementBox)> {
let mut blocks = Vec::new();
snapshot
.blocks_in_range(rows.clone())
.map(|(start_row, block)| {
let anchor_row = block
.position()
.to_point(&snapshot.buffer_snapshot)
.to_display_point(snapshot)
.row();
blocks.extend(
snapshot
.blocks_in_range(rows.clone())
.map(|(start_row, block)| {
let anchor_row = block
.position()
.to_point(&snapshot.buffer_snapshot)
.to_display_point(snapshot)
.row();
let anchor_x = if rows.contains(&anchor_row) {
line_layouts[(anchor_row - rows.start) as usize]
.x_for_index(block.column() as usize)
} else {
layout_line(anchor_row, snapshot, style, cx.text_layout_cache)
.x_for_index(block.column() as usize)
};
let anchor_x = if rows.contains(&anchor_row) {
line_layouts[(anchor_row - rows.start) as usize]
.x_for_index(block.column() as usize)
} else {
layout_line(anchor_row, snapshot, style, cx.text_layout_cache)
.x_for_index(block.column() as usize)
};
let mut element = block.render(&BlockContext { cx, anchor_x });
element.layout(
SizeConstraint {
min: Vector2F::zero(),
max: vec2f(text_width, block.height() as f32 * line_height),
},
cx,
);
(start_row, element)
}),
);
blocks.extend(
snapshot
.excerpt_headers_in_range(rows.clone())
.map(|(rows, render)| {
let mut element = render(cx);
element.layout(
SizeConstraint {
min: Vector2F::zero(),
max: vec2f(text_width, rows.len() as f32 * line_height),
},
cx,
);
(rows.start, element)
}),
);
blocks
let mut element = block.render(&BlockContext { cx, anchor_x });
element.layout(
SizeConstraint {
min: Vector2F::zero(),
max: vec2f(text_width, block.height() as f32 * line_height),
},
cx,
);
(start_row, element)
})
.collect()
}
}