Start adding concept of Unclipped text coordinates

Co-Authored-By: Max Brunsfeld <max@zed.dev>
This commit is contained in:
Julia 2022-11-16 22:20:16 -05:00
parent 436c89650a
commit 1c84e77c37
7 changed files with 139 additions and 124 deletions

View File

@ -6,7 +6,7 @@ use std::{
ops::Range, ops::Range,
}; };
use sum_tree::{self, Bias, SumTree}; use sum_tree::{self, Bias, SumTree};
use text::{Anchor, FromAnchor, PointUtf16, ToOffset}; use text::{Anchor, FromAnchor, PointUtf16, ToOffset, Unclipped};
#[derive(Clone, Debug, Default)] #[derive(Clone, Debug, Default)]
pub struct DiagnosticSet { pub struct DiagnosticSet {
@ -63,15 +63,15 @@ impl DiagnosticSet {
pub fn new<I>(iter: I, buffer: &text::BufferSnapshot) -> Self pub fn new<I>(iter: I, buffer: &text::BufferSnapshot) -> Self
where where
I: IntoIterator<Item = DiagnosticEntry<PointUtf16>>, I: IntoIterator<Item = DiagnosticEntry<Unclipped<PointUtf16>>>,
{ {
let mut entries = iter.into_iter().collect::<Vec<_>>(); let mut entries = iter.into_iter().collect::<Vec<_>>();
entries.sort_unstable_by_key(|entry| (entry.range.start, Reverse(entry.range.end))); entries.sort_unstable_by_key(|entry| (entry.range.start, Reverse(entry.range.end)));
Self { Self {
diagnostics: SumTree::from_iter( diagnostics: SumTree::from_iter(
entries.into_iter().map(|entry| DiagnosticEntry { entries.into_iter().map(|entry| DiagnosticEntry {
range: buffer.clamped_anchor_before(entry.range.start) range: buffer.anchor_before(entry.range.start)
..buffer.clamped_anchor_after(entry.range.end), ..buffer.anchor_before(entry.range.end),
diagnostic: entry.diagnostic, diagnostic: entry.diagnostic,
}), }),
buffer, buffer,

View File

@ -1053,8 +1053,8 @@ pub fn point_to_lsp(point: PointUtf16) -> lsp::Position {
lsp::Position::new(point.row, point.column) lsp::Position::new(point.row, point.column)
} }
pub fn point_from_lsp(point: lsp::Position) -> PointUtf16 { pub fn point_from_lsp(point: lsp::Position) -> Unclipped<PointUtf16> {
PointUtf16::new(point.line, point.character) Unclipped(PointUtf16::new(point.line, point.character))
} }
pub fn range_to_lsp(range: Range<PointUtf16>) -> lsp::Range { pub fn range_to_lsp(range: Range<PointUtf16>) -> lsp::Range {
@ -1064,7 +1064,7 @@ pub fn range_to_lsp(range: Range<PointUtf16>) -> lsp::Range {
} }
} }
pub fn range_from_lsp(range: lsp::Range) -> Range<PointUtf16> { pub fn range_from_lsp(range: lsp::Range) -> Range<Unclipped<PointUtf16>> {
let mut start = point_from_lsp(range.start); let mut start = point_from_lsp(range.start);
let mut end = point_from_lsp(range.end); let mut end = point_from_lsp(range.end);
if start > end { if start > end {

View File

@ -128,11 +128,11 @@ impl LspCommand for PrepareRename {
) = message ) = message
{ {
let Range { start, end } = range_from_lsp(range); let Range { start, end } = range_from_lsp(range);
if buffer.clip_point_utf16(start, Bias::Left) == start if buffer.clip_point_utf16(start, Bias::Left) == start.0
&& buffer.clip_point_utf16(end, Bias::Left) == end && buffer.clip_point_utf16(end, Bias::Left) == end.0
{ {
return Ok(Some( return Ok(Some(
buffer.clamped_anchor_after(start)..buffer.clamped_anchor_before(end), buffer.anchor_after(start)..buffer.anchor_before(end),
)); ));
} }
} }
@ -145,7 +145,7 @@ impl LspCommand for PrepareRename {
project_id, project_id,
buffer_id: buffer.remote_id(), buffer_id: buffer.remote_id(),
position: Some(language::proto::serialize_anchor( position: Some(language::proto::serialize_anchor(
&buffer.clamped_anchor_before(self.position), &buffer.anchor_before(self.position),
)), )),
version: serialize_version(&buffer.version()), version: serialize_version(&buffer.version()),
} }
@ -264,7 +264,7 @@ impl LspCommand for PerformRename {
project_id, project_id,
buffer_id: buffer.remote_id(), buffer_id: buffer.remote_id(),
position: Some(language::proto::serialize_anchor( position: Some(language::proto::serialize_anchor(
&buffer.clamped_anchor_before(self.position), &buffer.anchor_before(self.position),
)), )),
new_name: self.new_name.clone(), new_name: self.new_name.clone(),
version: serialize_version(&buffer.version()), version: serialize_version(&buffer.version()),
@ -362,7 +362,7 @@ impl LspCommand for GetDefinition {
project_id, project_id,
buffer_id: buffer.remote_id(), buffer_id: buffer.remote_id(),
position: Some(language::proto::serialize_anchor( position: Some(language::proto::serialize_anchor(
&buffer.clamped_anchor_before(self.position), &buffer.anchor_before(self.position),
)), )),
version: serialize_version(&buffer.version()), version: serialize_version(&buffer.version()),
} }
@ -448,7 +448,7 @@ impl LspCommand for GetTypeDefinition {
project_id, project_id,
buffer_id: buffer.remote_id(), buffer_id: buffer.remote_id(),
position: Some(language::proto::serialize_anchor( position: Some(language::proto::serialize_anchor(
&buffer.clamped_anchor_before(self.position), &buffer.anchor_before(self.position),
)), )),
version: serialize_version(&buffer.version()), version: serialize_version(&buffer.version()),
} }
@ -631,8 +631,8 @@ async fn location_links_from_lsp(
origin_buffer.clip_point_utf16(point_from_lsp(origin_range.end), Bias::Left); origin_buffer.clip_point_utf16(point_from_lsp(origin_range.end), Bias::Left);
Location { Location {
buffer: buffer.clone(), buffer: buffer.clone(),
range: origin_buffer.clamped_anchor_after(origin_start) range: origin_buffer.anchor_after(origin_start)
..origin_buffer.clamped_anchor_before(origin_end), ..origin_buffer.anchor_before(origin_end),
} }
}); });
@ -643,8 +643,8 @@ async fn location_links_from_lsp(
target_buffer.clip_point_utf16(point_from_lsp(target_range.end), Bias::Left); target_buffer.clip_point_utf16(point_from_lsp(target_range.end), Bias::Left);
let target_location = Location { let target_location = Location {
buffer: target_buffer_handle, buffer: target_buffer_handle,
range: target_buffer.clamped_anchor_after(target_start) range: target_buffer.anchor_after(target_start)
..target_buffer.clamped_anchor_before(target_end), ..target_buffer.anchor_before(target_end),
}; };
definitions.push(LocationLink { definitions.push(LocationLink {
@ -743,8 +743,8 @@ impl LspCommand for GetReferences {
.clip_point_utf16(point_from_lsp(lsp_location.range.end), Bias::Left); .clip_point_utf16(point_from_lsp(lsp_location.range.end), Bias::Left);
references.push(Location { references.push(Location {
buffer: target_buffer_handle, buffer: target_buffer_handle,
range: target_buffer.clamped_anchor_after(target_start) range: target_buffer.anchor_after(target_start)
..target_buffer.clamped_anchor_before(target_end), ..target_buffer.anchor_before(target_end),
}); });
}); });
} }
@ -758,7 +758,7 @@ impl LspCommand for GetReferences {
project_id, project_id,
buffer_id: buffer.remote_id(), buffer_id: buffer.remote_id(),
position: Some(language::proto::serialize_anchor( position: Some(language::proto::serialize_anchor(
&buffer.clamped_anchor_before(self.position), &buffer.anchor_before(self.position),
)), )),
version: serialize_version(&buffer.version()), version: serialize_version(&buffer.version()),
} }
@ -884,8 +884,8 @@ impl LspCommand for GetDocumentHighlights {
let end = buffer let end = buffer
.clip_point_utf16(point_from_lsp(lsp_highlight.range.end), Bias::Left); .clip_point_utf16(point_from_lsp(lsp_highlight.range.end), Bias::Left);
DocumentHighlight { DocumentHighlight {
range: buffer.clamped_anchor_after(start) range: buffer.anchor_after(start)
..buffer.clamped_anchor_before(end), ..buffer.anchor_before(end),
kind: lsp_highlight kind: lsp_highlight
.kind .kind
.unwrap_or(lsp::DocumentHighlightKind::READ), .unwrap_or(lsp::DocumentHighlightKind::READ),
@ -900,7 +900,7 @@ impl LspCommand for GetDocumentHighlights {
project_id, project_id,
buffer_id: buffer.remote_id(), buffer_id: buffer.remote_id(),
position: Some(language::proto::serialize_anchor( position: Some(language::proto::serialize_anchor(
&buffer.clamped_anchor_before(self.position), &buffer.anchor_before(self.position),
)), )),
version: serialize_version(&buffer.version()), version: serialize_version(&buffer.version()),
} }
@ -1020,8 +1020,8 @@ impl LspCommand for GetHover {
let token_start = let token_start =
buffer.clip_point_utf16(point_from_lsp(range.start), Bias::Left); buffer.clip_point_utf16(point_from_lsp(range.start), Bias::Left);
let token_end = buffer.clip_point_utf16(point_from_lsp(range.end), Bias::Left); let token_end = buffer.clip_point_utf16(point_from_lsp(range.end), Bias::Left);
buffer.clamped_anchor_after(token_start) buffer.anchor_after(token_start)
..buffer.clamped_anchor_before(token_end) ..buffer.anchor_before(token_end)
}) })
}); });
@ -1103,7 +1103,7 @@ impl LspCommand for GetHover {
project_id, project_id,
buffer_id: buffer.remote_id(), buffer_id: buffer.remote_id(),
position: Some(language::proto::serialize_anchor( position: Some(language::proto::serialize_anchor(
&buffer.clamped_anchor_before(self.position), &buffer.anchor_before(self.position),
)), )),
version: serialize_version(&buffer.version), version: serialize_version(&buffer.version),
} }

View File

@ -25,8 +25,8 @@ use language::{
range_from_lsp, range_to_lsp, Anchor, Bias, Buffer, CachedLspAdapter, CharKind, CodeAction, range_from_lsp, range_to_lsp, Anchor, Bias, Buffer, CachedLspAdapter, CharKind, CodeAction,
CodeLabel, Completion, Diagnostic, DiagnosticEntry, DiagnosticSet, Event as BufferEvent, CodeLabel, Completion, Diagnostic, DiagnosticEntry, DiagnosticSet, Event as BufferEvent,
File as _, Language, LanguageRegistry, LanguageServerName, LocalFile, OffsetRangeExt, File as _, Language, LanguageRegistry, LanguageServerName, LocalFile, OffsetRangeExt,
Operation, Patch, PointUtf16, TextBufferSnapshot, ToOffset, ToOffsetClipped, ToPointUtf16, Operation, Patch, PointUtf16, TextBufferSnapshot, ToOffset, ToPointUtf16,
Transaction, Transaction, Unclipped,
}; };
use lsp::{ use lsp::{
DiagnosticSeverity, DiagnosticTag, DocumentHighlightKind, LanguageServer, LanguageString, DiagnosticSeverity, DiagnosticTag, DocumentHighlightKind, LanguageServer, LanguageString,
@ -2598,7 +2598,7 @@ impl Project {
language_server_id: usize, language_server_id: usize,
abs_path: PathBuf, abs_path: PathBuf,
version: Option<i32>, version: Option<i32>,
diagnostics: Vec<DiagnosticEntry<PointUtf16>>, diagnostics: Vec<DiagnosticEntry<Unclipped<PointUtf16>>>,
cx: &mut ModelContext<Project>, cx: &mut ModelContext<Project>,
) -> Result<(), anyhow::Error> { ) -> Result<(), anyhow::Error> {
let (worktree, relative_path) = self let (worktree, relative_path) = self
@ -2636,7 +2636,7 @@ impl Project {
fn update_buffer_diagnostics( fn update_buffer_diagnostics(
&mut self, &mut self,
buffer: &ModelHandle<Buffer>, buffer: &ModelHandle<Buffer>,
mut diagnostics: Vec<DiagnosticEntry<PointUtf16>>, mut diagnostics: Vec<DiagnosticEntry<Unclipped<PointUtf16>>>,
version: Option<i32>, version: Option<i32>,
cx: &mut ModelContext<Self>, cx: &mut ModelContext<Self>,
) -> Result<()> { ) -> Result<()> {
@ -2677,16 +2677,14 @@ impl Project {
end = entry.range.end; end = entry.range.end;
} }
let mut range = snapshot.clip_point_utf16(start, Bias::Left) let mut range = start..end;
..snapshot.clip_point_utf16(end, Bias::Right);
// Expand empty ranges by one character // Expand empty ranges by one codepoint
if range.start == range.end { if range.start == range.end {
// This will be go to the next boundary when being clipped
range.end.column += 1; range.end.column += 1;
range.end = snapshot.clip_point_utf16(range.end, Bias::Right);
if range.start == range.end && range.end.column > 0 { if range.start == range.end && range.end.column > 0 {
range.start.column -= 1; range.start.column -= 1;
range.start = snapshot.clip_point_utf16(range.start, Bias::Left);
} }
} }
@ -3290,7 +3288,7 @@ impl Project {
}; };
let position = position.to_point_utf16(source_buffer); let position = position.to_point_utf16(source_buffer);
let anchor = source_buffer.clamped_anchor_after(position); let anchor = source_buffer.anchor_after(position);
if worktree.read(cx).as_local().is_some() { if worktree.read(cx).as_local().is_some() {
let buffer_abs_path = buffer_abs_path.unwrap(); let buffer_abs_path = buffer_abs_path.unwrap();
@ -3356,7 +3354,7 @@ impl Project {
return None; return None;
} }
( (
snapshot.clamped_anchor_before(start)..snapshot.clamped_anchor_after(end), snapshot.anchor_before(start)..snapshot.anchor_after(end),
edit.new_text.clone(), edit.new_text.clone(),
) )
} }
@ -5779,11 +5777,11 @@ impl Project {
} }
} }
} else if range.end == range.start { } else if range.end == range.start {
let anchor = snapshot.clamped_anchor_after(range.start); let anchor = snapshot.anchor_after(range.start);
edits.push((anchor..anchor, new_text)); edits.push((anchor..anchor, new_text));
} else { } else {
let edit_start = snapshot.clamped_anchor_after(range.start); let edit_start = snapshot.anchor_after(range.start);
let edit_end = snapshot.clamped_anchor_before(range.end); let edit_end = snapshot.anchor_before(range.end);
edits.push((edit_start..edit_end, new_text)); edits.push((edit_start..edit_end, new_text));
} }
} }

View File

@ -20,6 +20,7 @@ use gpui::{
executor, AppContext, AsyncAppContext, Entity, ModelContext, ModelHandle, MutableAppContext, executor, AppContext, AsyncAppContext, Entity, ModelContext, ModelHandle, MutableAppContext,
Task, Task,
}; };
use language::Unclipped;
use language::{ use language::{
proto::{deserialize_version, serialize_line_ending, serialize_version}, proto::{deserialize_version, serialize_line_ending, serialize_version},
Buffer, DiagnosticEntry, PointUtf16, Rope, Buffer, DiagnosticEntry, PointUtf16, Rope,
@ -65,7 +66,7 @@ pub struct LocalWorktree {
_background_scanner_task: Option<Task<()>>, _background_scanner_task: Option<Task<()>>,
poll_task: Option<Task<()>>, poll_task: Option<Task<()>>,
share: Option<ShareState>, share: Option<ShareState>,
diagnostics: HashMap<Arc<Path>, Vec<DiagnosticEntry<PointUtf16>>>, diagnostics: HashMap<Arc<Path>, Vec<DiagnosticEntry<Unclipped<PointUtf16>>>>,
diagnostic_summaries: TreeMap<PathKey, DiagnosticSummary>, diagnostic_summaries: TreeMap<PathKey, DiagnosticSummary>,
client: Arc<Client>, client: Arc<Client>,
fs: Arc<dyn Fs>, fs: Arc<dyn Fs>,
@ -499,7 +500,7 @@ impl LocalWorktree {
}) })
} }
pub fn diagnostics_for_path(&self, path: &Path) -> Option<Vec<DiagnosticEntry<PointUtf16>>> { pub fn diagnostics_for_path(&self, path: &Path) -> Option<Vec<DiagnosticEntry<Unclipped<PointUtf16>>>> {
self.diagnostics.get(path).cloned() self.diagnostics.get(path).cloned()
} }
@ -507,7 +508,7 @@ impl LocalWorktree {
&mut self, &mut self,
language_server_id: usize, language_server_id: usize,
worktree_path: Arc<Path>, worktree_path: Arc<Path>,
diagnostics: Vec<DiagnosticEntry<PointUtf16>>, diagnostics: Vec<DiagnosticEntry<Unclipped<PointUtf16>>>,
_: &mut ModelContext<Worktree>, _: &mut ModelContext<Worktree>,
) -> Result<bool> { ) -> Result<bool> {
self.diagnostics.remove(&worktree_path); self.diagnostics.remove(&worktree_path);

View File

@ -12,6 +12,9 @@ pub use offset_utf16::OffsetUtf16;
pub use point::Point; pub use point::Point;
pub use point_utf16::PointUtf16; pub use point_utf16::PointUtf16;
#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
pub struct Unclipped<T>(pub T);
#[cfg(test)] #[cfg(test)]
const CHUNK_BASE: usize = 6; const CHUNK_BASE: usize = 6;
@ -259,7 +262,15 @@ impl Rope {
.map_or(0, |chunk| chunk.point_to_offset(overshoot)) .map_or(0, |chunk| chunk.point_to_offset(overshoot))
} }
pub fn point_utf16_to_offset_clipped(&self, point: PointUtf16) -> usize { pub fn point_utf16_to_offset(&self, point: PointUtf16) -> usize {
self.point_utf16_to_offset_impl(point, false)
}
pub fn unclipped_point_utf16_to_offset(&self, point: Unclipped<PointUtf16>) -> usize {
self.point_utf16_to_offset_impl(point.0, true)
}
fn point_utf16_to_offset_impl(&self, point: PointUtf16, clip: bool) -> usize {
if point >= self.summary().lines_utf16() { if point >= self.summary().lines_utf16() {
return self.summary().len; return self.summary().len;
} }
@ -269,10 +280,10 @@ impl Rope {
cursor.start().1 cursor.start().1
+ cursor + cursor
.item() .item()
.map_or(0, |chunk| chunk.point_utf16_to_offset_clipped(overshoot)) .map_or(0, |chunk| chunk.point_utf16_to_offset(overshoot, clip))
} }
pub fn point_utf16_to_point(&self, point: PointUtf16) -> Point { pub fn point_utf16_to_point_clipped(&self, point: PointUtf16) -> Point {
if point >= self.summary().lines_utf16() { if point >= self.summary().lines_utf16() {
return self.summary().lines; return self.summary().lines;
} }
@ -280,9 +291,9 @@ impl Rope {
cursor.seek(&point, Bias::Left, &()); cursor.seek(&point, Bias::Left, &());
let overshoot = point - cursor.start().0; let overshoot = point - cursor.start().0;
cursor.start().1 cursor.start().1
+ cursor + cursor.item().map_or(Point::zero(), |chunk| {
.item() chunk.point_utf16_to_point_clipped(overshoot)
.map_or(Point::zero(), |chunk| chunk.point_utf16_to_point(overshoot)) })
} }
pub fn clip_offset(&self, mut offset: usize, bias: Bias) -> usize { pub fn clip_offset(&self, mut offset: usize, bias: Bias) -> usize {
@ -330,11 +341,11 @@ impl Rope {
} }
} }
pub fn clip_point_utf16(&self, point: PointUtf16, bias: Bias) -> PointUtf16 { pub fn clip_point_utf16(&self, point: Unclipped<PointUtf16>, bias: Bias) -> PointUtf16 {
let mut cursor = self.chunks.cursor::<PointUtf16>(); let mut cursor = self.chunks.cursor::<PointUtf16>();
cursor.seek(&point, Bias::Right, &()); cursor.seek(&point.0, Bias::Right, &());
if let Some(chunk) = cursor.item() { if let Some(chunk) = cursor.item() {
let overshoot = point - cursor.start(); let overshoot = Unclipped(point.0 - cursor.start());
*cursor.start() + chunk.clip_point_utf16(overshoot, bias) *cursor.start() + chunk.clip_point_utf16(overshoot, bias)
} else { } else {
self.summary().lines_utf16() self.summary().lines_utf16()
@ -711,7 +722,7 @@ impl Chunk {
point_utf16 point_utf16
} }
fn point_utf16_to_offset_clipped(&self, target: PointUtf16) -> usize { fn point_utf16_to_offset(&self, target: PointUtf16, clip: bool) -> usize {
let mut offset = 0; let mut offset = 0;
let mut point = PointUtf16::new(0, 0); let mut point = PointUtf16::new(0, 0);
@ -723,14 +734,26 @@ impl Chunk {
if ch == '\n' { if ch == '\n' {
point.row += 1; point.row += 1;
point.column = 0; point.column = 0;
if point.row > target.row {
if clip {
// Return the offset of the newline
return offset;
}
panic!(
"point {:?} is beyond the end of a line with length {}",
target, point.column
);
}
} else { } else {
point.column += ch.len_utf16() as u32; point.column += ch.len_utf16() as u32;
} }
if point > target { if point > target {
// If the point is past the end of a line or inside of a code point, if clip {
// return the last valid offset before the point. // Return the offset of the codepoint which we have landed within, bias left
return offset; return offset;
}
panic!("point {:?} is inside of codepoint {:?}", target, ch);
} }
offset += ch.len_utf8(); offset += ch.len_utf8();
@ -739,17 +762,21 @@ impl Chunk {
offset offset
} }
fn point_utf16_to_point(&self, target: PointUtf16) -> Point { fn point_utf16_to_point_clipped(&self, target: PointUtf16) -> Point {
let mut point = Point::zero(); let mut point = Point::zero();
let mut point_utf16 = PointUtf16::zero(); let mut point_utf16 = PointUtf16::zero();
for ch in self.0.chars() { for ch in self.0.chars() {
if point_utf16 >= target { if point_utf16 == target {
if point_utf16 > target {
panic!("point {:?} is inside of character {:?}", target, ch);
}
break; break;
} }
if point_utf16 > target {
// If the point is past the end of a line or inside of a code point,
// return the last valid point before the target.
return point;
}
if ch == '\n' { if ch == '\n' {
point_utf16 += PointUtf16::new(1, 0); point_utf16 += PointUtf16::new(1, 0);
point += Point::new(1, 0); point += Point::new(1, 0);
@ -758,6 +785,7 @@ impl Chunk {
point += Point::new(0, ch.len_utf8() as u32); point += Point::new(0, ch.len_utf8() as u32);
} }
} }
point point
} }
@ -777,11 +805,11 @@ impl Chunk {
unreachable!() unreachable!()
} }
fn clip_point_utf16(&self, target: PointUtf16, bias: Bias) -> PointUtf16 { fn clip_point_utf16(&self, target: Unclipped<PointUtf16>, bias: Bias) -> PointUtf16 {
for (row, line) in self.0.split('\n').enumerate() { for (row, line) in self.0.split('\n').enumerate() {
if row == target.row as usize { if row == target.0.row as usize {
let mut code_units = line.encode_utf16(); let mut code_units = line.encode_utf16();
let mut column = code_units.by_ref().take(target.column as usize).count(); let mut column = code_units.by_ref().take(target.0.column as usize).count();
if char::decode_utf16(code_units).next().transpose().is_err() { if char::decode_utf16(code_units).next().transpose().is_err() {
match bias { match bias {
Bias::Left => column -= 1, Bias::Left => column -= 1,
@ -1114,15 +1142,15 @@ mod tests {
); );
assert_eq!( assert_eq!(
rope.clip_point_utf16(PointUtf16::new(0, 1), Bias::Left), rope.clip_point_utf16(Unclipped(PointUtf16::new(0, 1)), Bias::Left),
PointUtf16::new(0, 0) PointUtf16::new(0, 0)
); );
assert_eq!( assert_eq!(
rope.clip_point_utf16(PointUtf16::new(0, 1), Bias::Right), rope.clip_point_utf16(Unclipped(PointUtf16::new(0, 1)), Bias::Right),
PointUtf16::new(0, 2) PointUtf16::new(0, 2)
); );
assert_eq!( assert_eq!(
rope.clip_point_utf16(PointUtf16::new(0, 3), Bias::Right), rope.clip_point_utf16(Unclipped(PointUtf16::new(0, 3)), Bias::Right),
PointUtf16::new(0, 2) PointUtf16::new(0, 2)
); );
@ -1210,7 +1238,7 @@ mod tests {
point point
); );
assert_eq!( assert_eq!(
actual.point_utf16_to_offset_clipped(point_utf16), actual.point_utf16_to_offset(point_utf16),
ix, ix,
"point_utf16_to_offset({:?})", "point_utf16_to_offset({:?})",
point_utf16 point_utf16
@ -1238,7 +1266,7 @@ mod tests {
} }
let mut offset_utf16 = OffsetUtf16(0); let mut offset_utf16 = OffsetUtf16(0);
let mut point_utf16 = PointUtf16::zero(); let mut point_utf16 = Unclipped(PointUtf16::zero());
for unit in expected.encode_utf16() { for unit in expected.encode_utf16() {
let left_offset = actual.clip_offset_utf16(offset_utf16, Bias::Left); let left_offset = actual.clip_offset_utf16(offset_utf16, Bias::Left);
let right_offset = actual.clip_offset_utf16(offset_utf16, Bias::Right); let right_offset = actual.clip_offset_utf16(offset_utf16, Bias::Right);
@ -1250,15 +1278,15 @@ mod tests {
let left_point = actual.clip_point_utf16(point_utf16, Bias::Left); let left_point = actual.clip_point_utf16(point_utf16, Bias::Left);
let right_point = actual.clip_point_utf16(point_utf16, Bias::Right); let right_point = actual.clip_point_utf16(point_utf16, Bias::Right);
assert!(right_point >= left_point); assert!(right_point >= left_point);
// Ensure translating UTF-16 points to offsets doesn't panic. // Ensure translating valid UTF-16 points to offsets doesn't panic.
actual.point_utf16_to_offset_clipped(left_point); actual.point_utf16_to_offset(left_point);
actual.point_utf16_to_offset_clipped(right_point); actual.point_utf16_to_offset(right_point);
offset_utf16.0 += 1; offset_utf16.0 += 1;
if unit == b'\n' as u16 { if unit == b'\n' as u16 {
point_utf16 += PointUtf16::new(1, 0); point_utf16.0 += PointUtf16::new(1, 0);
} else { } else {
point_utf16 += PointUtf16::new(0, 1); point_utf16.0 += PointUtf16::new(0, 1);
} }
} }

View File

@ -1590,12 +1590,16 @@ impl BufferSnapshot {
self.visible_text.point_to_offset(point) self.visible_text.point_to_offset(point)
} }
pub fn point_utf16_to_offset_clipped(&self, point: PointUtf16) -> usize { pub fn point_utf16_to_offset(&self, point: PointUtf16) -> usize {
self.visible_text.point_utf16_to_offset_clipped(point) self.visible_text.point_utf16_to_offset(point)
} }
pub fn point_utf16_to_point(&self, point: PointUtf16) -> Point { pub fn unclipped_point_utf16_to_offset(&self, point: Unclipped<PointUtf16>) -> usize {
self.visible_text.point_utf16_to_point(point) self.visible_text.unclipped_point_utf16_to_offset(point)
}
pub fn point_utf16_to_point_clipped(&self, point: PointUtf16) -> Point {
self.visible_text.point_utf16_to_point_clipped(point)
} }
pub fn offset_utf16_to_offset(&self, offset: OffsetUtf16) -> usize { pub fn offset_utf16_to_offset(&self, offset: OffsetUtf16) -> usize {
@ -1649,12 +1653,6 @@ impl BufferSnapshot {
self.visible_text.chunks_in_range(start..end) self.visible_text.chunks_in_range(start..end)
} }
pub fn text_for_clamped_range<T: ToOffsetClipped>(&self, range: Range<T>) -> Chunks<'_> {
let start = range.start.to_offset_clipped(self);
let end = range.end.to_offset_clipped(self);
self.visible_text.chunks_in_range(start..end)
}
pub fn line_len(&self, row: u32) -> u32 { pub fn line_len(&self, row: u32) -> u32 {
let row_start_offset = Point::new(row, 0).to_offset(self); let row_start_offset = Point::new(row, 0).to_offset(self);
let row_end_offset = if row >= self.max_point().row { let row_end_offset = if row >= self.max_point().row {
@ -1804,18 +1802,10 @@ impl BufferSnapshot {
self.anchor_at(position, Bias::Left) self.anchor_at(position, Bias::Left)
} }
pub fn clamped_anchor_before<T: ToOffsetClipped>(&self, position: T) -> Anchor {
self.anchor_at_offset(position.to_offset_clipped(self), Bias::Left)
}
pub fn anchor_after<T: ToOffset>(&self, position: T) -> Anchor { pub fn anchor_after<T: ToOffset>(&self, position: T) -> Anchor {
self.anchor_at(position, Bias::Right) self.anchor_at(position, Bias::Right)
} }
pub fn clamped_anchor_after<T: ToOffsetClipped>(&self, position: T) -> Anchor {
self.anchor_at_offset(position.to_offset_clipped(self), Bias::Right)
}
pub fn anchor_at<T: ToOffset>(&self, position: T, bias: Bias) -> Anchor { pub fn anchor_at<T: ToOffset>(&self, position: T, bias: Bias) -> Anchor {
self.anchor_at_offset(position.to_offset(self), bias) self.anchor_at_offset(position.to_offset(self), bias)
} }
@ -1857,7 +1847,7 @@ impl BufferSnapshot {
self.visible_text.clip_offset_utf16(offset, bias) self.visible_text.clip_offset_utf16(offset, bias)
} }
pub fn clip_point_utf16(&self, point: PointUtf16, bias: Bias) -> PointUtf16 { pub fn clip_point_utf16(&self, point: Unclipped<PointUtf16>, bias: Bias) -> PointUtf16 {
self.visible_text.clip_point_utf16(point, bias) self.visible_text.clip_point_utf16(point, bias)
} }
@ -2395,13 +2385,15 @@ impl<'a, T: ToOffset> ToOffset for &'a T {
} }
} }
pub trait ToOffsetClipped { impl ToOffset for PointUtf16 {
fn to_offset_clipped(&self, snapshot: &BufferSnapshot) -> usize; fn to_offset(&self, snapshot: &BufferSnapshot) -> usize {
snapshot.point_utf16_to_offset(*self)
}
} }
impl ToOffsetClipped for PointUtf16 { impl ToOffset for Unclipped<PointUtf16> {
fn to_offset_clipped<'a>(&self, snapshot: &BufferSnapshot) -> usize { fn to_offset(&self, snapshot: &BufferSnapshot) -> usize {
snapshot.point_utf16_to_offset_clipped(*self) snapshot.unclipped_point_utf16_to_offset(*self)
} }
} }
@ -2427,13 +2419,9 @@ impl ToPoint for Point {
} }
} }
pub trait ToPointClamped { impl ToPoint for Unclipped<PointUtf16> {
fn to_point_clamped(&self, snapshot: &BufferSnapshot) -> Point; fn to_point<'a>(&self, snapshot: &BufferSnapshot) -> Point {
} snapshot.point_utf16_to_point_clipped(self.0)
impl ToPointClamped for PointUtf16 {
fn to_point_clamped<'a>(&self, snapshot: &BufferSnapshot) -> Point {
snapshot.point_utf16_to_point(*self)
} }
} }
@ -2487,27 +2475,27 @@ impl ToOffsetUtf16 for OffsetUtf16 {
} }
} }
pub trait Clip { // pub trait Clip {
fn clip(&self, bias: Bias, snapshot: &BufferSnapshot) -> Self; // fn clip(&self, bias: Bias, snapshot: &BufferSnapshot) -> Self;
} // }
impl Clip for usize { // impl Clip for usize {
fn clip(&self, bias: Bias, snapshot: &BufferSnapshot) -> Self { // fn clip(&self, bias: Bias, snapshot: &BufferSnapshot) -> Self {
snapshot.clip_offset(*self, bias) // snapshot.clip_offset(*self, bias)
} // }
} // }
impl Clip for Point { // impl Clip for Point {
fn clip(&self, bias: Bias, snapshot: &BufferSnapshot) -> Self { // fn clip(&self, bias: Bias, snapshot: &BufferSnapshot) -> Self {
snapshot.clip_point(*self, bias) // snapshot.clip_point(*self, bias)
} // }
} // }
impl Clip for PointUtf16 { // impl Clip for Unclipped<PointUtf16> {
fn clip(&self, bias: Bias, snapshot: &BufferSnapshot) -> Self { // fn clip(&self, bias: Bias, snapshot: &BufferSnapshot) -> Self {
snapshot.clip_point_utf16(*self, bias) // snapshot.clip_point_utf16(self.0, bias)
} // }
} // }
pub trait FromAnchor { pub trait FromAnchor {
fn from_anchor(anchor: &Anchor, snapshot: &BufferSnapshot) -> Self; fn from_anchor(anchor: &Anchor, snapshot: &BufferSnapshot) -> Self;