mirror of
https://github.com/oxalica/nil.git
synced 2024-11-23 03:57:06 +03:00
Tweak VFS API to simplify usage
This commit is contained in:
parent
d2ce10e9af
commit
8a1a899cff
@ -201,7 +201,7 @@ pub trait SourceDatabase {
|
||||
#[derive(Default, Clone, PartialEq, Eq)]
|
||||
pub struct Change {
|
||||
pub roots: Option<Vec<SourceRoot>>,
|
||||
pub file_changes: Vec<(FileId, Option<Arc<str>>)>,
|
||||
pub file_changes: Vec<(FileId, Arc<str>)>,
|
||||
}
|
||||
|
||||
impl Change {
|
||||
@ -217,7 +217,7 @@ impl Change {
|
||||
self.roots = Some(roots);
|
||||
}
|
||||
|
||||
pub fn change_file(&mut self, file_id: FileId, content: Option<Arc<str>>) {
|
||||
pub fn change_file(&mut self, file_id: FileId, content: Arc<str>) {
|
||||
self.file_changes.push((file_id, content));
|
||||
}
|
||||
|
||||
@ -232,7 +232,6 @@ impl Change {
|
||||
}
|
||||
}
|
||||
for (file_id, content) in self.file_changes {
|
||||
let content = content.unwrap_or_else(|| String::new().into());
|
||||
db.set_file_content_with_durability(file_id, content, Durability::LOW);
|
||||
}
|
||||
}
|
||||
@ -243,7 +242,7 @@ impl fmt::Debug for Change {
|
||||
let modified = self
|
||||
.file_changes
|
||||
.iter()
|
||||
.filter(|(_, content)| content.is_some())
|
||||
.filter(|(_, content)| !content.is_empty())
|
||||
.count();
|
||||
let cleared = self.file_changes.len() - modified;
|
||||
f.debug_struct("Change")
|
||||
|
@ -34,7 +34,7 @@ impl TestDB {
|
||||
for (i, (path, text)) in (0u32..).zip(&f.files) {
|
||||
let file = FileId(i);
|
||||
file_set.insert(file, path.clone());
|
||||
change.change_file(file, Some(text.to_owned().into()));
|
||||
change.change_file(file, text.to_owned().into());
|
||||
}
|
||||
let entry = file_set
|
||||
.get_file_for_path(&"/default.nix".try_into().unwrap())
|
||||
|
@ -1,4 +1,6 @@
|
||||
use crate::{semantic_tokens, LineMap, LspError, Result, StateSnapshot, Vfs};
|
||||
use std::sync::Arc;
|
||||
|
||||
use crate::{semantic_tokens, LineMap, LspError, Result, Vfs};
|
||||
use ide::{
|
||||
CompletionItem, CompletionItemKind, Diagnostic, FileId, FilePos, FileRange, HlRange, Severity,
|
||||
TextEdit, WorkspaceEdit,
|
||||
@ -11,37 +13,39 @@ use lsp_types::{
|
||||
};
|
||||
use text_size::{TextRange, TextSize};
|
||||
|
||||
pub(crate) fn from_file(snap: &StateSnapshot, doc: &TextDocumentIdentifier) -> Result<FileId> {
|
||||
let vfs = snap.vfs.read().unwrap();
|
||||
pub(crate) fn from_file(vfs: &Vfs, doc: &TextDocumentIdentifier) -> Result<FileId> {
|
||||
vfs.get_file_for_uri(&doc.uri)
|
||||
}
|
||||
|
||||
pub(crate) fn from_pos(snap: &StateSnapshot, file: FileId, pos: Position) -> Result<TextSize> {
|
||||
let vfs = snap.vfs.read().unwrap();
|
||||
let line_map = vfs.file_line_map(file);
|
||||
let pos = line_map.pos(pos.line, pos.character);
|
||||
Ok(pos)
|
||||
pub(crate) fn from_pos(line_map: &LineMap, pos: Position) -> Result<TextSize> {
|
||||
Ok(line_map.pos(pos.line, pos.character))
|
||||
}
|
||||
|
||||
pub(crate) fn from_file_pos(
|
||||
snap: &StateSnapshot,
|
||||
vfs: &Vfs,
|
||||
params: &TextDocumentPositionParams,
|
||||
) -> Result<FilePos> {
|
||||
let file = from_file(snap, ¶ms.text_document)?;
|
||||
let pos = from_pos(snap, file, params.position)?;
|
||||
Ok(FilePos::new(file, pos))
|
||||
) -> Result<(Arc<LineMap>, FilePos)> {
|
||||
let file = from_file(vfs, ¶ms.text_document)?;
|
||||
let line_map = vfs.file_line_map(file);
|
||||
let pos = from_pos(&line_map, params.position)?;
|
||||
Ok((line_map, FilePos::new(file, pos)))
|
||||
}
|
||||
|
||||
pub(crate) fn from_range(snap: &StateSnapshot, file: FileId, range: Range) -> Result<TextRange> {
|
||||
let start = from_pos(snap, file, range.start)?;
|
||||
let end = from_pos(snap, file, range.end)?;
|
||||
Ok(TextRange::new(start, end))
|
||||
pub(crate) fn from_range(
|
||||
vfs: &Vfs,
|
||||
file: FileId,
|
||||
range: Range,
|
||||
) -> Result<(Arc<LineMap>, TextRange)> {
|
||||
let line_map = vfs.file_line_map(file);
|
||||
let start = from_pos(&line_map, range.start)?;
|
||||
let end = from_pos(&line_map, range.end)?;
|
||||
Ok((line_map, TextRange::new(start, end)))
|
||||
}
|
||||
|
||||
pub(crate) fn to_location(vfs: &Vfs, frange: FileRange) -> Location {
|
||||
let uri = vfs.uri_for_file(frange.file_id);
|
||||
let line_map = vfs.file_line_map(frange.file_id);
|
||||
Location::new(uri, to_range(line_map, frange.range))
|
||||
Location::new(uri, to_range(&line_map, frange.range))
|
||||
}
|
||||
|
||||
pub(crate) fn to_range(line_map: &LineMap, range: TextRange) -> Range {
|
||||
@ -64,7 +68,7 @@ pub(crate) fn to_diagnostics(
|
||||
Severity::Warning => Some(DiagnosticSeverity::WARNING),
|
||||
Severity::IncompleteSyntax => continue,
|
||||
},
|
||||
range: to_range(line_map, diag.range),
|
||||
range: to_range(&line_map, diag.range),
|
||||
code: None,
|
||||
code_description: None,
|
||||
source: None,
|
||||
@ -102,7 +106,7 @@ pub(crate) fn to_diagnostics(
|
||||
|
||||
ret.push(lsp::Diagnostic {
|
||||
severity: Some(DiagnosticSeverity::HINT),
|
||||
range: to_range(line_map, frange.range),
|
||||
range: to_range(&line_map, frange.range),
|
||||
code: primary_diag.code.clone(),
|
||||
code_description: primary_diag.code_description.clone(),
|
||||
source: primary_diag.source.clone(),
|
||||
@ -156,12 +160,10 @@ pub(crate) fn to_rename_error(message: String) -> LspError {
|
||||
}
|
||||
|
||||
pub(crate) fn to_prepare_rename_response(
|
||||
vfs: &Vfs,
|
||||
file: FileId,
|
||||
line_map: &LineMap,
|
||||
range: TextRange,
|
||||
text: String,
|
||||
) -> PrepareRenameResponse {
|
||||
let line_map = vfs.file_line_map(file);
|
||||
let range = to_range(line_map, range);
|
||||
PrepareRenameResponse::RangeWithPlaceholder {
|
||||
range,
|
||||
@ -179,7 +181,7 @@ pub(crate) fn to_workspace_edit(vfs: &Vfs, ws_edit: WorkspaceEdit) -> lsp::Works
|
||||
.into_iter()
|
||||
.map(|edit| {
|
||||
let line_map = vfs.file_line_map(file);
|
||||
to_text_edit(line_map, edit)
|
||||
to_text_edit(&line_map, edit)
|
||||
})
|
||||
.collect();
|
||||
(uri, edits)
|
||||
@ -199,14 +201,7 @@ pub(crate) fn to_text_edit(line_map: &LineMap, edit: TextEdit) -> lsp::TextEdit
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn to_semantic_tokens(
|
||||
snap: &StateSnapshot,
|
||||
file: FileId,
|
||||
hls: &[HlRange],
|
||||
) -> Vec<SemanticToken> {
|
||||
let vfs = snap.vfs.read().unwrap();
|
||||
let line_map = vfs.file_line_map(file);
|
||||
|
||||
pub(crate) fn to_semantic_tokens(line_map: &LineMap, hls: &[HlRange]) -> Vec<SemanticToken> {
|
||||
// We must now exceed the last line.
|
||||
let line_count = line_map.line_count();
|
||||
if line_count == 0 {
|
||||
|
@ -52,12 +52,12 @@ pub(crate) fn goto_definition(
|
||||
snap: StateSnapshot,
|
||||
params: GotoDefinitionParams,
|
||||
) -> Result<Option<GotoDefinitionResponse>> {
|
||||
let fpos = convert::from_file_pos(&snap, ¶ms.text_document_position_params)?;
|
||||
let (_, fpos) = convert::from_file_pos(&snap.vfs(), ¶ms.text_document_position_params)?;
|
||||
let targets = match snap.analysis.goto_definition(fpos)? {
|
||||
None => return Ok(None),
|
||||
Some(targets) => targets,
|
||||
};
|
||||
let vfs = snap.vfs.read().unwrap();
|
||||
let vfs = snap.vfs();
|
||||
let targets = targets
|
||||
.into_iter()
|
||||
.map(|target| {
|
||||
@ -71,12 +71,12 @@ pub(crate) fn references(
|
||||
snap: StateSnapshot,
|
||||
params: ReferenceParams,
|
||||
) -> Result<Option<Vec<Location>>> {
|
||||
let fpos = convert::from_file_pos(&snap, ¶ms.text_document_position)?;
|
||||
let (_, fpos) = convert::from_file_pos(&snap.vfs(), ¶ms.text_document_position)?;
|
||||
let refs = match snap.analysis.references(fpos)? {
|
||||
None => return Ok(None),
|
||||
Some(refs) => refs,
|
||||
};
|
||||
let vfs = snap.vfs.read().unwrap();
|
||||
let vfs = snap.vfs();
|
||||
let locs = refs
|
||||
.into_iter()
|
||||
.map(|frange| convert::to_location(&vfs, frange))
|
||||
@ -88,16 +88,14 @@ pub(crate) fn completion(
|
||||
snap: StateSnapshot,
|
||||
params: CompletionParams,
|
||||
) -> Result<Option<CompletionResponse>> {
|
||||
let fpos = convert::from_file_pos(&snap, ¶ms.text_document_position)?;
|
||||
let (line_map, fpos) = convert::from_file_pos(&snap.vfs(), ¶ms.text_document_position)?;
|
||||
let items = match snap.analysis.completions(fpos)? {
|
||||
None => return Ok(None),
|
||||
Some(items) => items,
|
||||
};
|
||||
let vfs = snap.vfs.read().unwrap();
|
||||
let line_map = vfs.file_line_map(fpos.file_id);
|
||||
let items = items
|
||||
.into_iter()
|
||||
.map(|item| convert::to_completion_item(line_map, item))
|
||||
.map(|item| convert::to_completion_item(&line_map, item))
|
||||
.collect::<Vec<_>>();
|
||||
Ok(Some(CompletionResponse::Array(items)))
|
||||
}
|
||||
@ -106,12 +104,13 @@ pub(crate) fn selection_range(
|
||||
snap: StateSnapshot,
|
||||
params: SelectionRangeParams,
|
||||
) -> Result<Option<Vec<SelectionRange>>> {
|
||||
let file = convert::from_file(&snap, ¶ms.text_document)?;
|
||||
let file = convert::from_file(&snap.vfs(), ¶ms.text_document)?;
|
||||
let line_map = snap.vfs().file_line_map(file);
|
||||
let ret = params
|
||||
.positions
|
||||
.iter()
|
||||
.map(|&pos| {
|
||||
let pos = convert::from_pos(&snap, file, pos)?;
|
||||
let pos = convert::from_pos(&line_map, pos)?;
|
||||
let frange = FileRange::new(file, TextRange::empty(pos));
|
||||
|
||||
let mut ranges = snap.analysis.expand_selection(frange)?.unwrap_or_default();
|
||||
@ -119,16 +118,13 @@ pub(crate) fn selection_range(
|
||||
ranges.push(TextRange::empty(pos));
|
||||
}
|
||||
|
||||
// FIXME: Use Arc for LineMap.
|
||||
let vfs = snap.vfs.read().unwrap();
|
||||
let line_map = vfs.file_line_map(file);
|
||||
let mut ret = SelectionRange {
|
||||
range: convert::to_range(line_map, *ranges.last().unwrap()),
|
||||
range: convert::to_range(&line_map, *ranges.last().unwrap()),
|
||||
parent: None,
|
||||
};
|
||||
for &r in ranges.iter().rev().skip(1) {
|
||||
ret = SelectionRange {
|
||||
range: convert::to_range(line_map, r),
|
||||
range: convert::to_range(&line_map, r),
|
||||
parent: Some(ret.into()),
|
||||
};
|
||||
}
|
||||
@ -143,24 +139,22 @@ pub(crate) fn prepare_rename(
|
||||
snap: StateSnapshot,
|
||||
params: TextDocumentPositionParams,
|
||||
) -> Result<Option<PrepareRenameResponse>> {
|
||||
let fpos = convert::from_file_pos(&snap, ¶ms)?;
|
||||
let (line_map, fpos) = convert::from_file_pos(&snap.vfs(), ¶ms)?;
|
||||
let (range, text) = snap
|
||||
.analysis
|
||||
.prepare_rename(fpos)?
|
||||
.map_err(convert::to_rename_error)?;
|
||||
let vfs = snap.vfs.read().unwrap();
|
||||
let resp = convert::to_prepare_rename_response(&vfs, fpos.file_id, range, text.into());
|
||||
let resp = convert::to_prepare_rename_response(&line_map, range, text.into());
|
||||
Ok(Some(resp))
|
||||
}
|
||||
|
||||
pub(crate) fn rename(snap: StateSnapshot, params: RenameParams) -> Result<Option<WorkspaceEdit>> {
|
||||
let fpos = convert::from_file_pos(&snap, ¶ms.text_document_position)?;
|
||||
let (_, fpos) = convert::from_file_pos(&snap.vfs(), ¶ms.text_document_position)?;
|
||||
let ws_edit = snap
|
||||
.analysis
|
||||
.rename(fpos, ¶ms.new_name)?
|
||||
.map_err(convert::to_rename_error)?;
|
||||
let vfs = snap.vfs.read().unwrap();
|
||||
let resp = convert::to_workspace_edit(&vfs, ws_edit);
|
||||
let resp = convert::to_workspace_edit(&snap.vfs(), ws_edit);
|
||||
Ok(Some(resp))
|
||||
}
|
||||
|
||||
@ -168,9 +162,10 @@ pub(crate) fn semantic_token_full(
|
||||
snap: StateSnapshot,
|
||||
params: SemanticTokensParams,
|
||||
) -> Result<Option<SemanticTokensResult>> {
|
||||
let file = convert::from_file(&snap, ¶ms.text_document)?;
|
||||
let file = convert::from_file(&snap.vfs(), ¶ms.text_document)?;
|
||||
let line_map = snap.vfs().file_line_map(file);
|
||||
let hls = snap.analysis.syntax_highlight(file, None)?;
|
||||
let toks = convert::to_semantic_tokens(&snap, file, &hls);
|
||||
let toks = convert::to_semantic_tokens(&line_map, &hls);
|
||||
Ok(Some(SemanticTokensResult::Tokens(SemanticTokens {
|
||||
result_id: None,
|
||||
data: toks,
|
||||
@ -181,10 +176,10 @@ pub(crate) fn semantic_token_range(
|
||||
snap: StateSnapshot,
|
||||
params: SemanticTokensRangeParams,
|
||||
) -> Result<Option<SemanticTokensRangeResult>> {
|
||||
let file = convert::from_file(&snap, ¶ms.text_document)?;
|
||||
let range = convert::from_range(&snap, file, params.range)?;
|
||||
let file = convert::from_file(&snap.vfs(), ¶ms.text_document)?;
|
||||
let (line_map, range) = convert::from_range(&snap.vfs(), file, params.range)?;
|
||||
let hls = snap.analysis.syntax_highlight(file, Some(range))?;
|
||||
let toks = convert::to_semantic_tokens(&snap, file, &hls);
|
||||
let toks = convert::to_semantic_tokens(&line_map, &hls);
|
||||
Ok(Some(SemanticTokensRangeResult::Tokens(SemanticTokens {
|
||||
result_id: None,
|
||||
data: toks,
|
||||
|
@ -55,7 +55,7 @@ impl State {
|
||||
|
||||
let relative_path = entry.path().strip_prefix(root).ok()?;
|
||||
let vpath = VfsPath::from_path(relative_path)?;
|
||||
let text = fs::read_to_string(entry.path()).ok();
|
||||
let text = fs::read_to_string(entry.path()).ok().unwrap_or_default();
|
||||
vfs.set_path_content(vpath, text);
|
||||
Some(())
|
||||
})();
|
||||
@ -112,7 +112,7 @@ impl State {
|
||||
.on_sync_mut::<notif::DidOpenTextDocument>(|st, params| {
|
||||
let uri = ¶ms.text_document.uri;
|
||||
st.opened_files.write().unwrap().insert(uri.clone());
|
||||
st.set_vfs_file_content(uri, Some(params.text_document.text))?;
|
||||
st.set_vfs_file_content(uri, params.text_document.text)?;
|
||||
Ok(())
|
||||
})?
|
||||
.on_sync_mut::<notif::DidCloseTextDocument>(|st, params| {
|
||||
@ -125,7 +125,7 @@ impl State {
|
||||
})?
|
||||
.on_sync_mut::<notif::DidChangeTextDocument>(|st, params| {
|
||||
if let Some(chg) = params.content_changes.into_iter().next() {
|
||||
st.set_vfs_file_content(¶ms.text_document.uri, Some(chg.text))?;
|
||||
st.set_vfs_file_content(¶ms.text_document.uri, chg.text)?;
|
||||
}
|
||||
Ok(())
|
||||
})?
|
||||
@ -145,7 +145,7 @@ impl State {
|
||||
}
|
||||
}
|
||||
|
||||
fn set_vfs_file_content(&mut self, uri: &Url, text: Option<String>) -> Result<()> {
|
||||
fn set_vfs_file_content(&mut self, uri: &Url, text: String) -> Result<()> {
|
||||
self.vfs.write().unwrap().set_uri_content(uri, text)?;
|
||||
self.apply_vfs_change();
|
||||
Ok(())
|
||||
@ -157,7 +157,7 @@ impl State {
|
||||
let file_changes = change
|
||||
.file_changes
|
||||
.iter()
|
||||
.map(|(file, text)| (*file, text.is_some()))
|
||||
.map(|(file, text)| (*file, !text.is_empty()))
|
||||
.collect::<Vec<_>>();
|
||||
tracing::debug!("Change: {:?}", change);
|
||||
self.host.apply_change(change);
|
||||
@ -320,5 +320,11 @@ fn result_to_response(id: RequestId, ret: Result<impl Serialize>) -> Result<Resp
|
||||
#[derive(Debug)]
|
||||
pub struct StateSnapshot {
|
||||
pub(crate) analysis: Analysis,
|
||||
pub(crate) vfs: Arc<RwLock<Vfs>>,
|
||||
vfs: Arc<RwLock<Vfs>>,
|
||||
}
|
||||
|
||||
impl StateSnapshot {
|
||||
pub(crate) fn vfs(&self) -> impl std::ops::Deref<Target = Vfs> + '_ {
|
||||
self.vfs.read().unwrap()
|
||||
}
|
||||
}
|
||||
|
@ -9,7 +9,7 @@ use text_size::TextSize;
|
||||
|
||||
pub struct Vfs {
|
||||
// FIXME: Currently this list is append-only.
|
||||
files: Vec<Option<(Arc<str>, LineMap)>>,
|
||||
files: Vec<(Arc<str>, Arc<LineMap>)>,
|
||||
/// The root directory, which must be absolute.
|
||||
local_root: PathBuf,
|
||||
local_file_set: FileSet,
|
||||
@ -38,12 +38,6 @@ impl Vfs {
|
||||
}
|
||||
}
|
||||
|
||||
fn alloc_file_id(&mut self) -> FileId {
|
||||
let id = u32::try_from(self.files.len()).expect("Length overflow");
|
||||
self.files.push(None);
|
||||
FileId(id)
|
||||
}
|
||||
|
||||
fn uri_to_vpath(&self, uri: &Url) -> Result<VfsPath> {
|
||||
let path = uri
|
||||
.to_file_path()
|
||||
@ -54,35 +48,38 @@ impl Vfs {
|
||||
Ok(VfsPath::from_path(relative_path).expect("URI is UTF-8"))
|
||||
}
|
||||
|
||||
pub fn set_uri_content(&mut self, uri: &Url, text: Option<String>) -> Result<()> {
|
||||
pub fn set_uri_content(&mut self, uri: &Url, text: String) -> Result<()> {
|
||||
let vpath = self.uri_to_vpath(uri)?;
|
||||
self.set_path_content(vpath, text);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn set_path_content(&mut self, path: VfsPath, text: Option<String>) -> Option<FileId> {
|
||||
let content = text.and_then(LineMap::normalize);
|
||||
let (file, (text, line_map)) = match (self.local_file_set.get_file_for_path(&path), content)
|
||||
{
|
||||
(Some(file), None) => {
|
||||
self.local_file_set.remove_file(file);
|
||||
self.root_changed = true;
|
||||
self.files[file.0 as usize] = None;
|
||||
return None;
|
||||
pub fn set_path_content(&mut self, path: VfsPath, text: String) {
|
||||
// For invalid files (currently, too large), we store them as empty files in database,
|
||||
// but remove them from `local_file_set`. Thus any interactions on them would fail.
|
||||
let (text, line_map, is_valid) = LineMap::normalize(text)
|
||||
.map(|(text, line_map)| (text, line_map, true))
|
||||
.unwrap_or_default();
|
||||
let text = <Arc<str>>::from(text);
|
||||
let line_map = Arc::new(line_map);
|
||||
match self.local_file_set.get_file_for_path(&path) {
|
||||
Some(file) => {
|
||||
self.files[file.0 as usize] = (text.clone(), line_map);
|
||||
self.change.change_file(file, text);
|
||||
if !is_valid {
|
||||
self.local_file_set.remove_file(file);
|
||||
}
|
||||
}
|
||||
(None, None) => return None,
|
||||
(Some(file), Some(content)) => (file, content),
|
||||
(None, Some(content)) => {
|
||||
let file = self.alloc_file_id();
|
||||
None => {
|
||||
if !is_valid {
|
||||
return;
|
||||
}
|
||||
let file = FileId(u32::try_from(self.files.len()).expect("Length overflow"));
|
||||
self.local_file_set.insert(file, path);
|
||||
self.root_changed = true;
|
||||
(file, content)
|
||||
self.files.push((text.clone(), line_map));
|
||||
self.change.change_file(file, text);
|
||||
}
|
||||
};
|
||||
let text = <Arc<str>>::from(text);
|
||||
self.change.change_file(file, Some(text.clone()));
|
||||
self.files[file.0 as usize] = Some((text, line_map));
|
||||
Some(file)
|
||||
}
|
||||
|
||||
pub fn get_file_for_uri(&self, uri: &Url) -> Result<FileId> {
|
||||
@ -116,15 +113,12 @@ impl Vfs {
|
||||
change
|
||||
}
|
||||
|
||||
pub fn file_line_map(&self, file_id: FileId) -> &LineMap {
|
||||
&self.files[file_id.0 as usize]
|
||||
.as_ref()
|
||||
.expect("File must be valid")
|
||||
.1
|
||||
pub fn file_line_map(&self, file_id: FileId) -> Arc<LineMap> {
|
||||
self.files[file_id.0 as usize].1.clone()
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Default, Debug, PartialEq, Eq)]
|
||||
#[derive(Debug, PartialEq, Eq)]
|
||||
pub struct LineMap {
|
||||
line_starts: Vec<u32>,
|
||||
char_diffs: HashMap<u32, Vec<(u32, CodeUnitsDiff)>>,
|
||||
@ -136,6 +130,12 @@ enum CodeUnitsDiff {
|
||||
Two = 2,
|
||||
}
|
||||
|
||||
impl Default for LineMap {
|
||||
fn default() -> Self {
|
||||
Self::normalize(String::new()).unwrap().1
|
||||
}
|
||||
}
|
||||
|
||||
impl LineMap {
|
||||
fn normalize(text: String) -> Option<(String, Self)> {
|
||||
// Too large for `TextSize`.
|
||||
|
Loading…
Reference in New Issue
Block a user