mirror of
https://github.com/oxalica/nil.git
synced 2024-11-23 03:57:06 +03:00
Unify naming of bijection getters to A_for_B
for B -> A
This commit is contained in:
parent
8a1a899cff
commit
0ab60fdff0
@ -95,7 +95,7 @@ impl FileSet {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn get_file_for_path(&self, path: &VfsPath) -> Option<FileId> {
|
pub fn file_for_path(&self, path: &VfsPath) -> Option<FileId> {
|
||||||
self.files.get(path).copied()
|
self.files.get(path).copied()
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -126,8 +126,8 @@ impl SourceRoot {
|
|||||||
Self { file_set, entry }
|
Self { file_set, entry }
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn get_file_for_path(&self, path: &VfsPath) -> Option<FileId> {
|
pub fn file_for_path(&self, path: &VfsPath) -> Option<FileId> {
|
||||||
self.file_set.get_file_for_path(path)
|
self.file_set.file_for_path(path)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn path_for_file(&self, file: FileId) -> &VfsPath {
|
pub fn path_for_file(&self, file: FileId) -> &VfsPath {
|
||||||
|
@ -44,11 +44,11 @@ impl LivenessCheckResult {
|
|||||||
diags.extend(
|
diags.extend(
|
||||||
self.names
|
self.names
|
||||||
.iter()
|
.iter()
|
||||||
.flat_map(|&def| source_map.name_nodes(def))
|
.flat_map(|&def| source_map.nodes_for_name(def))
|
||||||
.map(|ptr| Diagnostic::new(ptr.text_range(), DiagnosticKind::UnusedBinding)),
|
.map(|ptr| Diagnostic::new(ptr.text_range(), DiagnosticKind::UnusedBinding)),
|
||||||
);
|
);
|
||||||
diags.extend(self.withs.iter().map(|&expr| {
|
diags.extend(self.withs.iter().map(|&expr| {
|
||||||
let ptr = source_map.expr_node(expr).unwrap();
|
let ptr = source_map.node_for_expr(expr).unwrap();
|
||||||
let node = ast::With::cast(ptr.to_node(&root)).unwrap();
|
let node = ast::With::cast(ptr.to_node(&root)).unwrap();
|
||||||
let header_range = match (node.with_token(), node.semicolon_token()) {
|
let header_range = match (node.with_token(), node.semicolon_token()) {
|
||||||
(Some(start), Some(end)) => start.text_range().cover(end.text_range()),
|
(Some(start), Some(end)) => start.text_range().cover(end.text_range()),
|
||||||
@ -57,7 +57,7 @@ impl LivenessCheckResult {
|
|||||||
Diagnostic::new(header_range, DiagnosticKind::UnusedWith)
|
Diagnostic::new(header_range, DiagnosticKind::UnusedWith)
|
||||||
}));
|
}));
|
||||||
diags.extend(self.rec_attrsets.iter().map(|&expr| {
|
diags.extend(self.rec_attrsets.iter().map(|&expr| {
|
||||||
let ptr = source_map.expr_node(expr).unwrap();
|
let ptr = source_map.node_for_expr(expr).unwrap();
|
||||||
let node = ast::AttrSet::cast(ptr.to_node(&root)).unwrap();
|
let node = ast::AttrSet::cast(ptr.to_node(&root)).unwrap();
|
||||||
let range = node.rec_token().map_or_else(
|
let range = node.rec_token().map_or_else(
|
||||||
|| TextRange::empty(ptr.text_range().start()),
|
|| TextRange::empty(ptr.text_range().start()),
|
||||||
|
@ -580,7 +580,7 @@ impl MergingEntry {
|
|||||||
MergingValue::Final(value) => {
|
MergingValue::Final(value) => {
|
||||||
let mut set = MergingSet::new(name_kind);
|
let mut set = MergingSet::new(name_kind);
|
||||||
if let BindingValue::Expr(expr) = *value {
|
if let BindingValue::Expr(expr) = *value {
|
||||||
if let Some(ptr) = ctx.source_map.expr_node(expr) {
|
if let Some(ptr) = ctx.source_map.node_for_expr(expr) {
|
||||||
set.recover_error(ctx, expr, ptr);
|
set.recover_error(ctx, expr, ptr);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -633,7 +633,7 @@ impl MergingEntry {
|
|||||||
let mut diag = Diagnostic::new(new_def_ptr.text_range(), DiagnosticKind::DuplicatedKey);
|
let mut diag = Diagnostic::new(new_def_ptr.text_range(), DiagnosticKind::DuplicatedKey);
|
||||||
if let Some(prev_ptr) = self
|
if let Some(prev_ptr) = self
|
||||||
.name
|
.name
|
||||||
.and_then(|name| ctx.source_map.name_nodes(name).next())
|
.and_then(|name| ctx.source_map.nodes_for_name(name).next())
|
||||||
{
|
{
|
||||||
diag = diag.with_note(
|
diag = diag.with_note(
|
||||||
FileRange::new(ctx.file_id, prev_ptr.text_range()),
|
FileRange::new(ctx.file_id, prev_ptr.text_range()),
|
||||||
|
@ -163,19 +163,19 @@ pub struct ModuleSourceMap {
|
|||||||
}
|
}
|
||||||
|
|
||||||
impl ModuleSourceMap {
|
impl ModuleSourceMap {
|
||||||
pub fn node_expr(&self, node: AstPtr) -> Option<ExprId> {
|
pub fn expr_for_node(&self, node: AstPtr) -> Option<ExprId> {
|
||||||
self.expr_map.get(&node).copied()
|
self.expr_map.get(&node).copied()
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn expr_node(&self, expr_id: ExprId) -> Option<AstPtr> {
|
pub fn node_for_expr(&self, expr_id: ExprId) -> Option<AstPtr> {
|
||||||
self.expr_map_rev.get(&expr_id).cloned()
|
self.expr_map_rev.get(&expr_id).cloned()
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn node_name(&self, node: AstPtr) -> Option<NameId> {
|
pub fn name_for_node(&self, node: AstPtr) -> Option<NameId> {
|
||||||
self.name_map.get(&node).copied()
|
self.name_map.get(&node).copied()
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn name_nodes(&self, name_id: NameId) -> impl Iterator<Item = AstPtr> + '_ {
|
pub fn nodes_for_name(&self, name_id: NameId) -> impl Iterator<Item = AstPtr> + '_ {
|
||||||
self.name_map_rev
|
self.name_map_rev
|
||||||
.get(name_id)
|
.get(name_id)
|
||||||
.into_iter()
|
.into_iter()
|
||||||
|
@ -33,7 +33,7 @@ impl ModuleScopes {
|
|||||||
Arc::new(this)
|
Arc::new(this)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn scope_by_expr(&self, expr_id: ExprId) -> Option<ScopeId> {
|
pub fn scope_for_expr(&self, expr_id: ExprId) -> Option<ScopeId> {
|
||||||
self.scope_by_expr.get(expr_id).copied()
|
self.scope_by_expr.get(expr_id).copied()
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -43,7 +43,7 @@ impl ModuleScopes {
|
|||||||
|
|
||||||
/// Resolve a name in the scope of an Expr.
|
/// Resolve a name in the scope of an Expr.
|
||||||
fn resolve_name(&self, expr_id: ExprId, name: &SmolStr) -> Option<ResolveResult> {
|
fn resolve_name(&self, expr_id: ExprId, name: &SmolStr) -> Option<ResolveResult> {
|
||||||
let scope = self.scope_by_expr(expr_id)?;
|
let scope = self.scope_for_expr(expr_id)?;
|
||||||
// 1. Local defs.
|
// 1. Local defs.
|
||||||
if let Some(name) = self
|
if let Some(name) = self
|
||||||
.ancestors(scope)
|
.ancestors(scope)
|
||||||
@ -246,7 +246,7 @@ impl NameResolution {
|
|||||||
.iter()
|
.iter()
|
||||||
.filter(|(_, res)| res.is_none())
|
.filter(|(_, res)| res.is_none())
|
||||||
.filter_map(move |(&e, _)| {
|
.filter_map(move |(&e, _)| {
|
||||||
let ptr = source_map.expr_node(e)?;
|
let ptr = source_map.node_for_expr(e)?;
|
||||||
let range = ptr.text_range();
|
let range = ptr.text_range();
|
||||||
Some(Diagnostic::new(range, DiagnosticKind::UndefinedName))
|
Some(Diagnostic::new(range, DiagnosticKind::UndefinedName))
|
||||||
})
|
})
|
||||||
@ -314,7 +314,7 @@ mod tests {
|
|||||||
let scopes = db.scopes(f[0].file_id);
|
let scopes = db.scopes(f[0].file_id);
|
||||||
|
|
||||||
// "innermost@pos var@pos | middle@pos | outmost@pos"
|
// "innermost@pos var@pos | middle@pos | outmost@pos"
|
||||||
let scope_id = scopes.scope_by_expr(expr_id).expect("No scope data");
|
let scope_id = scopes.scope_for_expr(expr_id).expect("No scope data");
|
||||||
let def_poses = scopes
|
let def_poses = scopes
|
||||||
.ancestors(scope_id)
|
.ancestors(scope_id)
|
||||||
.flat_map(|scope| match &scope.kind {
|
.flat_map(|scope| match &scope.kind {
|
||||||
@ -323,7 +323,7 @@ mod tests {
|
|||||||
.iter()
|
.iter()
|
||||||
.map(|(_, name)| {
|
.map(|(_, name)| {
|
||||||
source_map
|
source_map
|
||||||
.name_nodes(*name)
|
.nodes_for_name(*name)
|
||||||
.next()
|
.next()
|
||||||
.unwrap()
|
.unwrap()
|
||||||
.text_range()
|
.text_range()
|
||||||
@ -334,7 +334,7 @@ mod tests {
|
|||||||
poses
|
poses
|
||||||
}
|
}
|
||||||
&ScopeKind::WithExpr(expr) => {
|
&ScopeKind::WithExpr(expr) => {
|
||||||
vec![source_map.expr_node(expr).unwrap().text_range().start()]
|
vec![source_map.node_for_expr(expr).unwrap().text_range().start()]
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
.collect::<Vec<_>>();
|
.collect::<Vec<_>>();
|
||||||
@ -369,14 +369,14 @@ mod tests {
|
|||||||
.map(|ret| {
|
.map(|ret| {
|
||||||
match ret {
|
match ret {
|
||||||
&ResolveResult::Definition(name) => source_map
|
&ResolveResult::Definition(name) => source_map
|
||||||
.name_nodes(name)
|
.nodes_for_name(name)
|
||||||
.map(|ptr| ptr.to_node(&parse.syntax_node()).text_range().start())
|
.map(|ptr| ptr.to_node(&parse.syntax_node()).text_range().start())
|
||||||
.collect(),
|
.collect(),
|
||||||
ResolveResult::WithExprs(exprs) => exprs
|
ResolveResult::WithExprs(exprs) => exprs
|
||||||
.iter()
|
.iter()
|
||||||
.map(|&e| {
|
.map(|&e| {
|
||||||
source_map
|
source_map
|
||||||
.expr_node(e)
|
.node_for_expr(e)
|
||||||
.unwrap()
|
.unwrap()
|
||||||
.to_node(&parse.syntax_node())
|
.to_node(&parse.syntax_node())
|
||||||
.text_range()
|
.text_range()
|
||||||
|
@ -29,9 +29,9 @@ impl Path {
|
|||||||
vpath.pop()?;
|
vpath.pop()?;
|
||||||
}
|
}
|
||||||
vpath.push(&data.relative);
|
vpath.push(&data.relative);
|
||||||
root.get_file_for_path(&vpath).or_else(|| {
|
root.file_for_path(&vpath).or_else(|| {
|
||||||
vpath.push_segment("default.nix").unwrap();
|
vpath.push_segment("default.nix").unwrap();
|
||||||
root.get_file_for_path(&vpath)
|
root.file_for_path(&vpath)
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -109,9 +109,9 @@ fn complete_expr(
|
|||||||
) -> Option<Vec<CompletionItem>> {
|
) -> Option<Vec<CompletionItem>> {
|
||||||
let module = db.module(file_id);
|
let module = db.module(file_id);
|
||||||
let source_map = db.source_map(file_id);
|
let source_map = db.source_map(file_id);
|
||||||
let expr_id = source_map.node_expr(AstPtr::new(ref_node.syntax()))?;
|
let expr_id = source_map.expr_for_node(AstPtr::new(ref_node.syntax()))?;
|
||||||
let scopes = db.scopes(file_id);
|
let scopes = db.scopes(file_id);
|
||||||
let scope_id = scopes.scope_by_expr(expr_id)?;
|
let scope_id = scopes.scope_for_expr(expr_id)?;
|
||||||
|
|
||||||
let prefix = SmolStr::from(ref_node.token()?.text());
|
let prefix = SmolStr::from(ref_node.token()?.text());
|
||||||
let mut items = Vec::new();
|
let mut items = Vec::new();
|
||||||
|
@ -26,7 +26,7 @@ pub(crate) fn goto_definition(
|
|||||||
})?;
|
})?;
|
||||||
|
|
||||||
let source_map = db.source_map(file_id);
|
let source_map = db.source_map(file_id);
|
||||||
let expr_id = source_map.node_expr(ptr)?;
|
let expr_id = source_map.expr_for_node(ptr)?;
|
||||||
|
|
||||||
if tok.kind() == SyntaxKind::PATH {
|
if tok.kind() == SyntaxKind::PATH {
|
||||||
let module = db.module(file_id);
|
let module = db.module(file_id);
|
||||||
@ -48,7 +48,7 @@ pub(crate) fn goto_definition(
|
|||||||
match name_res.get(expr_id)? {
|
match name_res.get(expr_id)? {
|
||||||
&ResolveResult::Definition(name) => {
|
&ResolveResult::Definition(name) => {
|
||||||
let targets = source_map
|
let targets = source_map
|
||||||
.name_nodes(name)
|
.nodes_for_name(name)
|
||||||
.filter_map(|ptr| {
|
.filter_map(|ptr| {
|
||||||
let name_node = ptr.to_node(&parse.syntax_node());
|
let name_node = ptr.to_node(&parse.syntax_node());
|
||||||
let full_node = name_node.ancestors().find(|n| {
|
let full_node = name_node.ancestors().find(|n| {
|
||||||
@ -74,7 +74,7 @@ pub(crate) fn goto_definition(
|
|||||||
// ^--^ focus
|
// ^--^ focus
|
||||||
// ^--------^ full
|
// ^--------^ full
|
||||||
let with_node = source_map
|
let with_node = source_map
|
||||||
.expr_node(with_expr)
|
.node_for_expr(with_expr)
|
||||||
.expect("WithExprs must be valid")
|
.expect("WithExprs must be valid")
|
||||||
.to_node(&parse.syntax_node());
|
.to_node(&parse.syntax_node());
|
||||||
let with_node = ast::With::cast(with_node).expect("WithExprs must be valid");
|
let with_node = ast::With::cast(with_node).expect("WithExprs must be valid");
|
||||||
|
@ -39,11 +39,11 @@ pub(crate) fn references(
|
|||||||
let name_ref = db.name_reference(file_id);
|
let name_ref = db.name_reference(file_id);
|
||||||
let refs = match kind {
|
let refs = match kind {
|
||||||
DefKind::Attr(ptr) => {
|
DefKind::Attr(ptr) => {
|
||||||
let name = source_map.node_name(ptr)?;
|
let name = source_map.name_for_node(ptr)?;
|
||||||
name_ref.name_references(name)
|
name_ref.name_references(name)
|
||||||
}
|
}
|
||||||
DefKind::With(ptr) => {
|
DefKind::With(ptr) => {
|
||||||
let expr = source_map.node_expr(ptr)?;
|
let expr = source_map.expr_for_node(ptr)?;
|
||||||
name_ref.with_references(expr)
|
name_ref.with_references(expr)
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
@ -52,7 +52,7 @@ pub(crate) fn references(
|
|||||||
let refs = refs.map_or(Vec::new(), |refs| {
|
let refs = refs.map_or(Vec::new(), |refs| {
|
||||||
refs.iter()
|
refs.iter()
|
||||||
.map(|&expr| {
|
.map(|&expr| {
|
||||||
let ptr = source_map.expr_node(expr).expect("Id must be valid");
|
let ptr = source_map.node_for_expr(expr).expect("Id must be valid");
|
||||||
FileRange::new(file_id, ptr.text_range())
|
FileRange::new(file_id, ptr.text_range())
|
||||||
})
|
})
|
||||||
.collect()
|
.collect()
|
||||||
|
@ -36,7 +36,7 @@ pub(crate) fn rename(
|
|||||||
let mut edits = Vec::new();
|
let mut edits = Vec::new();
|
||||||
|
|
||||||
// Rename definitions.
|
// Rename definitions.
|
||||||
for ptr in source_map.name_nodes(name) {
|
for ptr in source_map.nodes_for_name(name) {
|
||||||
let node = ptr.to_node(&parse.syntax_node());
|
let node = ptr.to_node(&parse.syntax_node());
|
||||||
if matches!(node.parent(), Some(p) if p.kind() == SyntaxKind::INHERIT) {
|
if matches!(node.parent(), Some(p) if p.kind() == SyntaxKind::INHERIT) {
|
||||||
return Err("Renaming `inherit`ed variables is not supported yet".into());
|
return Err("Renaming `inherit`ed variables is not supported yet".into());
|
||||||
@ -55,7 +55,7 @@ pub(crate) fn rename(
|
|||||||
}
|
}
|
||||||
for &expr in refs {
|
for &expr in refs {
|
||||||
let ptr = source_map
|
let ptr = source_map
|
||||||
.expr_node(expr)
|
.node_for_expr(expr)
|
||||||
.expect("Must be a valid Expr::Reference");
|
.expect("Must be a valid Expr::Reference");
|
||||||
let node = ptr.to_node(&parse.syntax_node());
|
let node = ptr.to_node(&parse.syntax_node());
|
||||||
if matches!(node.parent(), Some(p) if p.kind() == SyntaxKind::INHERIT) {
|
if matches!(node.parent(), Some(p) if p.kind() == SyntaxKind::INHERIT) {
|
||||||
@ -115,11 +115,11 @@ fn find_name(
|
|||||||
let ptr = AstPtr::new(&node);
|
let ptr = AstPtr::new(&node);
|
||||||
|
|
||||||
let source_map = db.source_map(file_id);
|
let source_map = db.source_map(file_id);
|
||||||
if let Some(name) = source_map.node_name(ptr.clone()) {
|
if let Some(name) = source_map.name_for_node(ptr.clone()) {
|
||||||
return Some((ptr.text_range(), name));
|
return Some((ptr.text_range(), name));
|
||||||
}
|
}
|
||||||
|
|
||||||
if let Some(expr) = source_map.node_expr(ptr.clone()) {
|
if let Some(expr) = source_map.expr_for_node(ptr.clone()) {
|
||||||
let nameres = db.name_resolution(file_id);
|
let nameres = db.name_resolution(file_id);
|
||||||
if let Some(ResolveResult::Definition(name)) = nameres.get(expr) {
|
if let Some(ResolveResult::Definition(name)) = nameres.get(expr) {
|
||||||
return Some((ptr.text_range(), *name));
|
return Some((ptr.text_range(), *name));
|
||||||
|
@ -110,7 +110,7 @@ pub(crate) fn highlight(
|
|||||||
|
|
||||||
SyntaxKind::IDENT => match tok.parent() {
|
SyntaxKind::IDENT => match tok.parent() {
|
||||||
Some(node) if node.kind() == SyntaxKind::REF => {
|
Some(node) if node.kind() == SyntaxKind::REF => {
|
||||||
let expr = source_map.node_expr(AstPtr::new(&node))?;
|
let expr = source_map.expr_for_node(AstPtr::new(&node))?;
|
||||||
match nameres.get(expr) {
|
match nameres.get(expr) {
|
||||||
None => HlTag::UnresolvedRef,
|
None => HlTag::UnresolvedRef,
|
||||||
Some(ResolveResult::Definition(def)) => HlTag::NameRef(module[*def].kind),
|
Some(ResolveResult::Definition(def)) => HlTag::NameRef(module[*def].kind),
|
||||||
@ -120,10 +120,10 @@ pub(crate) fn highlight(
|
|||||||
}
|
}
|
||||||
Some(node) if node.kind() == SyntaxKind::NAME => {
|
Some(node) if node.kind() == SyntaxKind::NAME => {
|
||||||
let ptr = AstPtr::new(&node);
|
let ptr = AstPtr::new(&node);
|
||||||
match source_map.node_name(ptr.clone()) {
|
match source_map.name_for_node(ptr.clone()) {
|
||||||
Some(name) => HlTag::NameDef(module[name].kind),
|
Some(name) => HlTag::NameDef(module[name].kind),
|
||||||
None => {
|
None => {
|
||||||
match source_map.node_expr(ptr) {
|
match source_map.expr_for_node(ptr) {
|
||||||
// `Attr`s are converted into string literals.
|
// `Attr`s are converted into string literals.
|
||||||
Some(expr) if matches!(&module[expr], Expr::Literal(_)) => {
|
Some(expr) if matches!(&module[expr], Expr::Literal(_)) => {
|
||||||
HlTag::AttrField
|
HlTag::AttrField
|
||||||
|
@ -37,7 +37,7 @@ impl TestDB {
|
|||||||
change.change_file(file, text.to_owned().into());
|
change.change_file(file, text.to_owned().into());
|
||||||
}
|
}
|
||||||
let entry = file_set
|
let entry = file_set
|
||||||
.get_file_for_path(&"/default.nix".try_into().unwrap())
|
.file_for_path(&"/default.nix".try_into().unwrap())
|
||||||
.context("Missing entry file")?;
|
.context("Missing entry file")?;
|
||||||
change.set_roots(vec![SourceRoot::new_local(file_set, Some(entry))]);
|
change.set_roots(vec![SourceRoot::new_local(file_set, Some(entry))]);
|
||||||
change.apply(&mut db);
|
change.apply(&mut db);
|
||||||
|
@ -14,11 +14,11 @@ use lsp_types::{
|
|||||||
use text_size::{TextRange, TextSize};
|
use text_size::{TextRange, TextSize};
|
||||||
|
|
||||||
pub(crate) fn from_file(vfs: &Vfs, doc: &TextDocumentIdentifier) -> Result<FileId> {
|
pub(crate) fn from_file(vfs: &Vfs, doc: &TextDocumentIdentifier) -> Result<FileId> {
|
||||||
vfs.get_file_for_uri(&doc.uri)
|
vfs.file_for_uri(&doc.uri)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn from_pos(line_map: &LineMap, pos: Position) -> Result<TextSize> {
|
pub(crate) fn from_pos(line_map: &LineMap, pos: Position) -> Result<TextSize> {
|
||||||
Ok(line_map.pos(pos.line, pos.character))
|
Ok(line_map.pos_for_line_col(pos.line, pos.character))
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn from_file_pos(
|
pub(crate) fn from_file_pos(
|
||||||
@ -26,7 +26,7 @@ pub(crate) fn from_file_pos(
|
|||||||
params: &TextDocumentPositionParams,
|
params: &TextDocumentPositionParams,
|
||||||
) -> Result<(Arc<LineMap>, FilePos)> {
|
) -> Result<(Arc<LineMap>, FilePos)> {
|
||||||
let file = from_file(vfs, ¶ms.text_document)?;
|
let file = from_file(vfs, ¶ms.text_document)?;
|
||||||
let line_map = vfs.file_line_map(file);
|
let line_map = vfs.line_map_for_file(file);
|
||||||
let pos = from_pos(&line_map, params.position)?;
|
let pos = from_pos(&line_map, params.position)?;
|
||||||
Ok((line_map, FilePos::new(file, pos)))
|
Ok((line_map, FilePos::new(file, pos)))
|
||||||
}
|
}
|
||||||
@ -36,7 +36,7 @@ pub(crate) fn from_range(
|
|||||||
file: FileId,
|
file: FileId,
|
||||||
range: Range,
|
range: Range,
|
||||||
) -> Result<(Arc<LineMap>, TextRange)> {
|
) -> Result<(Arc<LineMap>, TextRange)> {
|
||||||
let line_map = vfs.file_line_map(file);
|
let line_map = vfs.line_map_for_file(file);
|
||||||
let start = from_pos(&line_map, range.start)?;
|
let start = from_pos(&line_map, range.start)?;
|
||||||
let end = from_pos(&line_map, range.end)?;
|
let end = from_pos(&line_map, range.end)?;
|
||||||
Ok((line_map, TextRange::new(start, end)))
|
Ok((line_map, TextRange::new(start, end)))
|
||||||
@ -44,13 +44,13 @@ pub(crate) fn from_range(
|
|||||||
|
|
||||||
pub(crate) fn to_location(vfs: &Vfs, frange: FileRange) -> Location {
|
pub(crate) fn to_location(vfs: &Vfs, frange: FileRange) -> Location {
|
||||||
let uri = vfs.uri_for_file(frange.file_id);
|
let uri = vfs.uri_for_file(frange.file_id);
|
||||||
let line_map = vfs.file_line_map(frange.file_id);
|
let line_map = vfs.line_map_for_file(frange.file_id);
|
||||||
Location::new(uri, to_range(&line_map, frange.range))
|
Location::new(uri, to_range(&line_map, frange.range))
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn to_range(line_map: &LineMap, range: TextRange) -> Range {
|
pub(crate) fn to_range(line_map: &LineMap, range: TextRange) -> Range {
|
||||||
let (line1, col1) = line_map.line_col(range.start());
|
let (line1, col1) = line_map.line_col_for_pos(range.start());
|
||||||
let (line2, col2) = line_map.line_col(range.end());
|
let (line2, col2) = line_map.line_col_for_pos(range.end());
|
||||||
Range::new(Position::new(line1, col1), Position::new(line2, col2))
|
Range::new(Position::new(line1, col1), Position::new(line2, col2))
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -59,7 +59,7 @@ pub(crate) fn to_diagnostics(
|
|||||||
file: FileId,
|
file: FileId,
|
||||||
diags: &[Diagnostic],
|
diags: &[Diagnostic],
|
||||||
) -> Vec<lsp::Diagnostic> {
|
) -> Vec<lsp::Diagnostic> {
|
||||||
let line_map = vfs.file_line_map(file);
|
let line_map = vfs.line_map_for_file(file);
|
||||||
let mut ret = Vec::with_capacity(diags.len() * 2);
|
let mut ret = Vec::with_capacity(diags.len() * 2);
|
||||||
for diag in diags {
|
for diag in diags {
|
||||||
let primary_diag = lsp::Diagnostic {
|
let primary_diag = lsp::Diagnostic {
|
||||||
@ -180,7 +180,7 @@ pub(crate) fn to_workspace_edit(vfs: &Vfs, ws_edit: WorkspaceEdit) -> lsp::Works
|
|||||||
let edits = edits
|
let edits = edits
|
||||||
.into_iter()
|
.into_iter()
|
||||||
.map(|edit| {
|
.map(|edit| {
|
||||||
let line_map = vfs.file_line_map(file);
|
let line_map = vfs.line_map_for_file(file);
|
||||||
to_text_edit(&line_map, edit)
|
to_text_edit(&line_map, edit)
|
||||||
})
|
})
|
||||||
.collect();
|
.collect();
|
||||||
@ -222,7 +222,7 @@ pub(crate) fn to_semantic_tokens(line_map: &LineMap, hls: &[HlRange]) -> Vec<Sem
|
|||||||
}
|
}
|
||||||
|
|
||||||
let mut start = 0;
|
let mut start = 0;
|
||||||
let mut end = line_map.line_end_col(line);
|
let mut end = line_map.end_col_for_line(line);
|
||||||
if line == range.start.line {
|
if line == range.start.line {
|
||||||
start = start.max(range.start.character);
|
start = start.max(range.start.character);
|
||||||
}
|
}
|
||||||
|
@ -105,7 +105,7 @@ pub(crate) fn selection_range(
|
|||||||
params: SelectionRangeParams,
|
params: SelectionRangeParams,
|
||||||
) -> Result<Option<Vec<SelectionRange>>> {
|
) -> Result<Option<Vec<SelectionRange>>> {
|
||||||
let file = convert::from_file(&snap.vfs(), ¶ms.text_document)?;
|
let file = convert::from_file(&snap.vfs(), ¶ms.text_document)?;
|
||||||
let line_map = snap.vfs().file_line_map(file);
|
let line_map = snap.vfs().line_map_for_file(file);
|
||||||
let ret = params
|
let ret = params
|
||||||
.positions
|
.positions
|
||||||
.iter()
|
.iter()
|
||||||
@ -163,7 +163,7 @@ pub(crate) fn semantic_token_full(
|
|||||||
params: SemanticTokensParams,
|
params: SemanticTokensParams,
|
||||||
) -> Result<Option<SemanticTokensResult>> {
|
) -> Result<Option<SemanticTokensResult>> {
|
||||||
let file = convert::from_file(&snap.vfs(), ¶ms.text_document)?;
|
let file = convert::from_file(&snap.vfs(), ¶ms.text_document)?;
|
||||||
let line_map = snap.vfs().file_line_map(file);
|
let line_map = snap.vfs().line_map_for_file(file);
|
||||||
let hls = snap.analysis.syntax_highlight(file, None)?;
|
let hls = snap.analysis.syntax_highlight(file, None)?;
|
||||||
let toks = convert::to_semantic_tokens(&line_map, &hls);
|
let toks = convert::to_semantic_tokens(&line_map, &hls);
|
||||||
Ok(Some(SemanticTokensResult::Tokens(SemanticTokens {
|
Ok(Some(SemanticTokensResult::Tokens(SemanticTokens {
|
||||||
|
@ -62,7 +62,7 @@ impl Vfs {
|
|||||||
.unwrap_or_default();
|
.unwrap_or_default();
|
||||||
let text = <Arc<str>>::from(text);
|
let text = <Arc<str>>::from(text);
|
||||||
let line_map = Arc::new(line_map);
|
let line_map = Arc::new(line_map);
|
||||||
match self.local_file_set.get_file_for_path(&path) {
|
match self.local_file_set.file_for_path(&path) {
|
||||||
Some(file) => {
|
Some(file) => {
|
||||||
self.files[file.0 as usize] = (text.clone(), line_map);
|
self.files[file.0 as usize] = (text.clone(), line_map);
|
||||||
self.change.change_file(file, text);
|
self.change.change_file(file, text);
|
||||||
@ -82,10 +82,10 @@ impl Vfs {
|
|||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn get_file_for_uri(&self, uri: &Url) -> Result<FileId> {
|
pub fn file_for_uri(&self, uri: &Url) -> Result<FileId> {
|
||||||
let vpath = self.uri_to_vpath(uri)?;
|
let vpath = self.uri_to_vpath(uri)?;
|
||||||
self.local_file_set
|
self.local_file_set
|
||||||
.get_file_for_path(&vpath)
|
.file_for_path(&vpath)
|
||||||
.ok_or_else(|| format!("URI not found: {}", uri).into())
|
.ok_or_else(|| format!("URI not found: {}", uri).into())
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -103,7 +103,7 @@ impl Vfs {
|
|||||||
// TODO: Configurable.
|
// TODO: Configurable.
|
||||||
let entry = ["/flake.nix", "/default.nix"].iter().find_map(|&path| {
|
let entry = ["/flake.nix", "/default.nix"].iter().find_map(|&path| {
|
||||||
let path = VfsPath::new(path).unwrap();
|
let path = VfsPath::new(path).unwrap();
|
||||||
self.local_file_set.get_file_for_path(&path)
|
self.local_file_set.file_for_path(&path)
|
||||||
});
|
});
|
||||||
change.set_roots(vec![SourceRoot::new_local(
|
change.set_roots(vec![SourceRoot::new_local(
|
||||||
self.local_file_set.clone(),
|
self.local_file_set.clone(),
|
||||||
@ -113,7 +113,7 @@ impl Vfs {
|
|||||||
change
|
change
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn file_line_map(&self, file_id: FileId) -> Arc<LineMap> {
|
pub fn line_map_for_file(&self, file_id: FileId) -> Arc<LineMap> {
|
||||||
self.files[file_id.0 as usize].1.clone()
|
self.files[file_id.0 as usize].1.clone()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -187,7 +187,7 @@ impl LineMap {
|
|||||||
self.line_starts.len() as u32 - 1
|
self.line_starts.len() as u32 - 1
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn pos(&self, line: u32, mut col: u32) -> TextSize {
|
pub fn pos_for_line_col(&self, line: u32, mut col: u32) -> TextSize {
|
||||||
let pos = self.line_starts.get(line as usize).copied().unwrap_or(0);
|
let pos = self.line_starts.get(line as usize).copied().unwrap_or(0);
|
||||||
if let Some(diffs) = self.char_diffs.get(&line) {
|
if let Some(diffs) = self.char_diffs.get(&line) {
|
||||||
for &(char_pos, diff) in diffs {
|
for &(char_pos, diff) in diffs {
|
||||||
@ -199,7 +199,7 @@ impl LineMap {
|
|||||||
(pos + col).into()
|
(pos + col).into()
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn line_col(&self, pos: TextSize) -> (u32, u32) {
|
pub fn line_col_for_pos(&self, pos: TextSize) -> (u32, u32) {
|
||||||
let pos = u32::from(pos);
|
let pos = u32::from(pos);
|
||||||
let line = self
|
let line = self
|
||||||
.line_starts
|
.line_starts
|
||||||
@ -216,7 +216,7 @@ impl LineMap {
|
|||||||
(line as u32, col)
|
(line as u32, col)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn line_end_col(&self, line: u32) -> u32 {
|
pub fn end_col_for_line(&self, line: u32) -> u32 {
|
||||||
let mut len = self.line_starts[line as usize + 1] - self.line_starts[line as usize];
|
let mut len = self.line_starts[line as usize + 1] - self.line_starts[line as usize];
|
||||||
if let Some(diffs) = self.char_diffs.get(&line) {
|
if let Some(diffs) = self.char_diffs.get(&line) {
|
||||||
len -= diffs.iter().map(|&(_, diff)| diff as u32).sum::<u32>();
|
len -= diffs.iter().map(|&(_, diff)| diff as u32).sum::<u32>();
|
||||||
@ -251,8 +251,8 @@ mod tests {
|
|||||||
(12, 2, 0),
|
(12, 2, 0),
|
||||||
];
|
];
|
||||||
for (pos, line, col) in mapping {
|
for (pos, line, col) in mapping {
|
||||||
assert_eq!(map.line_col(pos.into()), (line, col));
|
assert_eq!(map.line_col_for_pos(pos.into()), (line, col));
|
||||||
assert_eq!(map.pos(line, col), pos.into());
|
assert_eq!(map.pos_for_line_col(line, col), pos.into());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -291,8 +291,8 @@ mod tests {
|
|||||||
(14, 0, 9),
|
(14, 0, 9),
|
||||||
];
|
];
|
||||||
for (pos, line, col) in mapping {
|
for (pos, line, col) in mapping {
|
||||||
assert_eq!(map.line_col(pos.into()), (line, col));
|
assert_eq!(map.line_col_for_pos(pos.into()), (line, col));
|
||||||
assert_eq!(map.pos(line, col), pos.into());
|
assert_eq!(map.pos_for_line_col(line, col), pos.into());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -312,9 +312,9 @@ mod tests {
|
|||||||
fn line_end_col() {
|
fn line_end_col() {
|
||||||
// See comments in `line_map_unicode`.
|
// See comments in `line_map_unicode`.
|
||||||
let (_, map) = LineMap::normalize("hello\nAßℝ💣\n\nend".into()).unwrap();
|
let (_, map) = LineMap::normalize("hello\nAßℝ💣\n\nend".into()).unwrap();
|
||||||
assert_eq!(map.line_end_col(0), 5);
|
assert_eq!(map.end_col_for_line(0), 5);
|
||||||
assert_eq!(map.line_end_col(1), 5);
|
assert_eq!(map.end_col_for_line(1), 5);
|
||||||
assert_eq!(map.line_end_col(2), 0);
|
assert_eq!(map.end_col_for_line(2), 0);
|
||||||
assert_eq!(map.line_end_col(3), 3);
|
assert_eq!(map.end_col_for_line(3), 3);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
Loading…
Reference in New Issue
Block a user