1
1
mirror of https://github.com/tweag/nickel.git synced 2024-10-06 08:07:37 +03:00

Move goto away from the linearizer (#1610)

* Move goto away from the linearizer

* Add regression test for 881

* Add tests, add ident lookup

* Review comments

* Better name
This commit is contained in:
jneem 2023-09-19 15:30:56 -05:00 committed by GitHub
parent b02961324d
commit 7937e5a175
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
18 changed files with 342 additions and 130 deletions

View File

@ -1934,8 +1934,8 @@ impl IntoDiagnostics<FileId> for TypecheckError {
};
let note1 = if let TypeF::Record(rrows) = &expd.typ {
match rrows.row_find_path(path.as_slice()) {
Some(ty) => mk_expected_row_msg(&field, ty),
match rrows.find_path(path.as_slice()) {
Some(row) => mk_expected_row_msg(&field, row.typ),
None => mk_expected_msg(&expd),
}
} else {
@ -1943,8 +1943,8 @@ impl IntoDiagnostics<FileId> for TypecheckError {
};
let note2 = if let TypeF::Record(rrows) = &actual.typ {
match rrows.row_find_path(path.as_slice()) {
Some(ty) => mk_inferred_row_msg(&field, ty),
match rrows.find_path(path.as_slice()) {
Some(row) => mk_inferred_row_msg(&field, row.typ),
None => mk_inferred_msg(&actual),
}
} else {

View File

@ -852,21 +852,21 @@ impl RecordRows {
/// - self: ` {a : {b : Number }}`
/// - path: `["a", "b"]`
/// - result: `Some(Number)`
pub fn row_find_path(&self, path: &[Ident]) -> Option<Type> {
pub fn find_path(&self, path: &[Ident]) -> Option<RecordRowF<&Type>> {
if path.is_empty() {
return None;
}
let next = self.iter().find_map(|item| match item {
RecordRowsIteratorItem::Row(row) if row.id.ident() == path[0] => Some(row.typ.clone()),
RecordRowsIteratorItem::Row(row) if row.id.ident() == path[0] => Some(row.clone()),
_ => None,
});
if path.len() == 1 {
next
} else {
match next.map(|ty| ty.typ) {
Some(TypeF::Record(rrows)) => rrows.row_find_path(&path[1..]),
match next.map(|row| &row.typ.typ) {
Some(TypeF::Record(rrows)) => rrows.find_path(&path[1..]),
_ => None,
}
}

View File

@ -4,7 +4,8 @@ mod output;
pub use jsonrpc::Server;
use log::error;
use lsp_types::{
CompletionParams, DocumentFormattingParams, GotoDefinitionParams, HoverParams, Url,
CompletionParams, DocumentFormattingParams, GotoDefinitionParams, HoverParams, ReferenceParams,
Url,
};
pub use output::LspDebug;
use serde::Deserialize;
@ -30,6 +31,7 @@ pub struct TestFile {
#[serde(tag = "type")]
pub enum Request {
GotoDefinition(GotoDefinitionParams),
References(ReferenceParams),
Completion(CompletionParams),
Formatting(DocumentFormattingParams),
Hover(HoverParams),

View File

@ -48,6 +48,12 @@ impl<T: LspDebug, I: Iterator<Item = T> + Clone> LspDebug for Iter<I> {
}
}
impl<T: LspDebug> LspDebug for Vec<T> {
fn debug(&self, w: impl Write) -> std::io::Result<()> {
Iter(self.iter()).debug(w)
}
}
impl LspDebug for lsp_types::Range {
fn debug(&self, mut w: impl Write) -> std::io::Result<()> {
write!(
@ -116,12 +122,6 @@ impl LspDebug for lsp_types::TextEdit {
}
}
impl LspDebug for Vec<lsp_types::TextEdit> {
fn debug(&self, w: impl Write) -> std::io::Result<()> {
Iter(self.iter()).debug(w)
}
}
impl LspDebug for lsp_types::Hover {
fn debug(&self, mut w: impl Write) -> std::io::Result<()> {
write!(w, "<{}>", self.range.debug_str())?;

View File

@ -3,6 +3,7 @@ use std::ops::Range;
use codespan::{FileId, Files};
use codespan_reporting::diagnostic::{self, Diagnostic};
use lsp_types::{NumberOrString, Position};
use nickel_lang_core::position::RawSpan;
/// Convert [codespan_reporting::diagnostic::Diagnostic] into a list of another type
/// Diagnostics tend to contain a list of labels pointing to errors in the code which
@ -13,8 +14,16 @@ pub trait DiagnosticCompat: Sized {
/// Determine the position of a [codespan_reporting::diagnostic::Label] by looking it up
/// in the file cache
pub trait LocationCompat {
pub trait LocationCompat: Sized {
fn from_codespan(file_id: &FileId, range: &Range<usize>, files: &Files<String>) -> Self;
fn from_span(span: &RawSpan, files: &Files<String>) -> Self {
Self::from_codespan(
&span.src_id,
&(span.start.to_usize()..span.end.to_usize()),
files,
)
}
}
impl LocationCompat for lsp_types::Range {
@ -45,6 +54,15 @@ impl LocationCompat for lsp_types::Range {
}
}
impl LocationCompat for lsp_types::Location {
fn from_codespan(file_id: &FileId, range: &Range<usize>, files: &Files<String>) -> Self {
lsp_types::Location {
uri: lsp_types::Url::from_file_path(files.name(*file_id)).unwrap(),
range: lsp_types::Range::from_codespan(file_id, range, files),
}
}
}
impl DiagnosticCompat for lsp_types::Diagnostic {
fn from_codespan(diagnostic: Diagnostic<FileId>, files: &mut Files<String>) -> Vec<Self> {
let severity = Some(match diagnostic.severity {

View File

@ -30,7 +30,20 @@ impl FieldHaver {
.get(&id)
.map(|field| FieldContent::RecordField(field.clone())),
FieldHaver::Dict(ty) => Some(FieldContent::Type(ty.clone())),
FieldHaver::RecordType(rows) => rows.row_find_path(&[id]).map(FieldContent::Type),
FieldHaver::RecordType(rows) => rows
.find_path(&[id])
.map(|row| FieldContent::Type(row.typ.clone())),
}
}
pub fn get_definition_pos(&self, id: Ident) -> Option<LocIdent> {
match self {
FieldHaver::RecordTerm(data) => data
.fields
.get_key_value(&id)
.map(|(id, _field)| (*id).into()),
FieldHaver::RecordType(rows) => rows.find_path(&[id]).map(|r| r.id.into()),
FieldHaver::Dict(_) => None,
}
}
@ -197,7 +210,7 @@ impl<'a> FieldResolver<'a> {
/// This a best-effort thing; it doesn't do full evaluation but it has some reasonable
/// heuristics. For example, it knows that the fields defined on a merge of two records
/// are the fields defined on either record.
fn resolve_term(&self, rt: &RichTerm) -> Vec<FieldHaver> {
pub fn resolve_term(&self, rt: &RichTerm) -> Vec<FieldHaver> {
let term_fields = match rt.term.as_ref() {
Term::Record(data) | Term::RecRecord(data, ..) => {
vec![FieldHaver::RecordTerm(data.clone())]

View File

@ -79,6 +79,17 @@ impl LinRegistry {
self.usage_lookups.get(&file)?.def(ident)
}
pub fn get_usages(&self, ident: &LocIdent) -> impl Iterator<Item = &LocIdent> {
fn inner<'a>(
slf: &'a LinRegistry,
ident: &LocIdent,
) -> Option<impl Iterator<Item = &'a LocIdent>> {
let file = ident.pos.as_opt_ref()?.src_id;
Some(slf.usage_lookups.get(&file)?.usages(ident))
}
inner(self, ident).into_iter().flatten()
}
pub fn get_env(&self, rt: &RichTerm) -> Option<&crate::usage::Environment> {
let file = rt.pos.as_opt_ref()?.src_id;
self.usage_lookups.get(&file)?.env(rt)

View File

@ -3,10 +3,10 @@ use std::ops::Range;
use codespan::ByteIndex;
use nickel_lang_core::{
position::TermPos,
term::{RichTerm, Traverse, TraverseControl},
term::{RichTerm, Term, Traverse, TraverseControl},
};
use crate::term::RichTermPtr;
use crate::{identifier::LocIdent, term::RichTermPtr};
/// Turn a collection of "nested" ranges into a collection of disjoint ranges.
///
@ -102,16 +102,18 @@ fn make_disjoint<T: Clone>(mut all_ranges: Vec<(Range<u32>, T)>) -> Vec<(Range<u
#[derive(Clone, Debug)]
pub struct PositionLookup {
// The intervals here are sorted and disjoint.
ranges: Vec<(Range<u32>, RichTermPtr)>,
term_ranges: Vec<(Range<u32>, RichTermPtr)>,
ident_ranges: Vec<(Range<u32>, LocIdent)>,
}
impl PositionLookup {
/// Create a position lookup table for looking up subterms of `rt` based on their positions.
pub fn new(rt: &RichTerm) -> Self {
let mut all_ranges = Vec::new();
let mut all_term_ranges = Vec::new();
let mut idents = Vec::new();
let mut fun = |term: &RichTerm| {
if let TermPos::Original(pos) = &term.pos {
all_ranges.push((
all_term_ranges.push((
Range {
start: pos.start.0,
end: pos.end.0,
@ -119,13 +121,44 @@ impl PositionLookup {
RichTermPtr(term.clone()),
));
}
match term.as_ref() {
Term::Fun(id, _) | Term::Let(id, _, _, _) => idents.push(*id),
Term::FunPattern(id, pat, _) | Term::LetPattern(id, pat, _, _) => {
let ids = pat.matches.iter().flat_map(|m| {
m.to_flattened_bindings()
.into_iter()
.map(|(_path, id, _)| id)
});
idents.extend(ids.chain(*id).chain(pat.rest))
}
Term::Var(id) => idents.push(*id),
Term::Record(data) | Term::RecRecord(data, _, _) => {
idents.extend(data.fields.keys().cloned());
}
Term::Match { cases, .. } => idents.extend(cases.keys().cloned()),
_ => {}
}
TraverseControl::<()>::Continue
};
rt.traverse_ref(&mut fun);
let mut ident_ranges: Vec<_> = idents
.into_iter()
.filter_map(|id| {
id.pos
.into_opt()
.map(|span| (span.start.0..span.end.0, id.into()))
})
.collect();
// Ident ranges had better be disjoint, so we can just sort by the start position.
ident_ranges.sort_by_key(|(range, _id)| range.start);
ident_ranges.dedup();
PositionLookup {
ranges: make_disjoint(all_ranges),
term_ranges: make_disjoint(all_term_ranges),
ident_ranges,
}
}
@ -134,19 +167,27 @@ impl PositionLookup {
/// Note that some positions (for example, positions belonging to top-level comments)
/// may not be enclosed by any term.
pub fn get(&self, index: ByteIndex) -> Option<&RichTerm> {
self.ranges
.binary_search_by(|(range, _term)| {
if range.end <= index.0 {
std::cmp::Ordering::Less
} else if range.start > index.0 {
std::cmp::Ordering::Greater
} else {
std::cmp::Ordering::Equal
}
})
.ok()
.map(|idx| &self.ranges[idx].1 .0)
search(&self.term_ranges, index).map(|rt| &rt.0)
}
/// Returns the ident at the given position, if there is one.
pub fn get_ident(&self, index: ByteIndex) -> Option<LocIdent> {
search(&self.ident_ranges, index).cloned()
}
}
fn search<T>(vec: &[(Range<u32>, T)], index: ByteIndex) -> Option<&T> {
vec.binary_search_by(|(range, _payload)| {
if range.end <= index.0 {
std::cmp::Ordering::Less
} else if range.start > index.0 {
std::cmp::Ordering::Greater
} else {
std::cmp::Ordering::Equal
}
})
.ok()
.map(|idx| &vec[idx].1)
}
#[cfg(test)]

View File

@ -1,20 +1,51 @@
use codespan::ByteIndex;
use log::debug;
use lsp_server::{RequestId, Response, ResponseError};
use lsp_types::{
GotoDefinitionParams, GotoDefinitionResponse, Location, Range, ReferenceParams, Url,
};
use nickel_lang_core::position::RawSpan;
use lsp_types::{GotoDefinitionParams, GotoDefinitionResponse, Location, ReferenceParams};
use nickel_lang_core::term::{RichTerm, Term, UnaryOp};
use serde_json::Value;
use crate::{
cache::CacheExt,
diagnostic::LocationCompat,
linearization::interface::{TermKind, UsageState},
cache::CacheExt, diagnostic::LocationCompat, field_walker::FieldResolver, identifier::LocIdent,
server::Server,
trace::{Enrich, Trace},
};
fn get_defs(term: &RichTerm, server: &Server) -> Vec<LocIdent> {
match term.as_ref() {
Term::Var(id) => {
if let Some(loc) = server
.lin_registry
.get_def(&(*id).into())
.map(|def| def.ident)
{
vec![loc]
} else {
vec![]
}
}
Term::Op1(UnaryOp::StaticAccess(id), parent) => {
let resolver = FieldResolver::new(server);
let parents = resolver.resolve_term(parent);
parents
.iter()
.filter_map(|parent| parent.get_definition_pos(id.ident()))
.collect()
}
_ => vec![],
}
}
fn ids_to_locations(ids: impl IntoIterator<Item = LocIdent>, server: &Server) -> Vec<Location> {
let mut spans: Vec<_> = ids.into_iter().filter_map(|id| id.pos.into_opt()).collect();
// The sort order of our response is a little arbitrary. But we want to deduplicate, and we
// don't want the response to be random.
spans.sort_by_key(|span| (server.cache.files().name(span.src_id), span.start, span.end));
spans.dedup();
spans
.iter()
.map(|loc| Location::from_span(loc, server.cache.files()))
.collect()
}
pub fn handle_to_definition(
params: GotoDefinitionParams,
id: RequestId,
@ -23,101 +54,58 @@ pub fn handle_to_definition(
let pos = server
.cache
.position(&params.text_document_position_params)?;
let linearization = server.lin_cache_get(&pos.src_id)?;
Trace::enrich(&id, linearization);
let locations = server
.lookup_term_by_position(pos)?
.map(|term| ids_to_locations(get_defs(term, server), server))
.unwrap_or_default();
let Some(item) = linearization.item_at(pos) else {
server.reply(Response::new_ok(id, Value::Null));
return Ok(());
};
debug!("found referencing item: {:?}", item);
let location = match item.kind {
TermKind::Usage(UsageState::Resolved(usage_id)) => {
let definition = linearization
.get_item_with_reg(usage_id, &server.lin_registry)
.unwrap();
if server.cache.is_stdlib_module(definition.id.file_id) {
// The standard library files are embedded in the executable,
// so we can't possibly go to their definition on disk.
server.reply(Response::new_ok(id, Value::Null));
return Ok(());
}
let RawSpan {
start: ByteIndex(start),
end: ByteIndex(end),
src_id,
} = definition.pos.unwrap();
let location = Location {
uri: Url::from_file_path(server.cache.name(src_id)).unwrap(),
range: Range::from_codespan(
&src_id,
&(start as usize..end as usize),
server.cache.files(),
),
};
Some(location)
}
_ => None,
};
debug!("referenced location: {:?}", location);
if let Some(response) = location.map(GotoDefinitionResponse::Scalar) {
server.reply(Response::new_ok(id, response));
let response = if locations.is_empty() {
Response::new_ok(id, Value::Null)
} else if locations.len() == 1 {
Response::new_ok(id, GotoDefinitionResponse::Scalar(locations[0].clone()))
} else {
server.reply(Response::new_ok(id, Value::Null));
}
Response::new_ok(id, GotoDefinitionResponse::Array(locations))
};
server.reply(response);
Ok(())
}
pub fn handle_to_usages(
pub fn handle_references(
params: ReferenceParams,
id: RequestId,
server: &mut Server,
) -> Result<(), ResponseError> {
let pos = server.cache.position(&params.text_document_position)?;
let linearization = server.lin_cache_get(&pos.src_id)?;
let Some(item) = linearization.item_at(pos) else {
// The "references" of a symbol are all the usages if its definitions,
// so first find the definitions and then find their usages.
let mut def_locs = server
.lookup_term_by_position(pos)?
.map(|term| get_defs(term, server))
.unwrap_or_default();
// Maybe the position is pointing straight at the definition already.
// In that case, def_locs won't have the definition yet; so add it.
def_locs.extend(server.lookup_ident_by_position(pos)?);
let mut usages: Vec<_> = def_locs
.iter()
.flat_map(|id| server.lin_registry.get_usages(id))
.cloned()
.collect();
if params.context.include_declaration {
usages.extend(def_locs.iter().cloned());
}
let locations = ids_to_locations(usages, server);
if locations.is_empty() {
server.reply(Response::new_ok(id, Value::Null));
return Ok(());
};
debug!("found referencing item: {:?}", item);
let locations: Option<Vec<Location>> = match &item.kind {
TermKind::Declaration { usages, .. } | TermKind::RecordField { usages, .. } => Some(
usages
.iter()
.filter_map(|reference_id| {
linearization
.get_item_with_reg(*reference_id, &server.lin_registry)
.unwrap()
.pos
.as_opt_ref()
.map(|RawSpan { start, end, src_id }| Location {
uri: Url::from_file_path(server.cache.name(*src_id)).unwrap(),
range: Range::from_codespan(
src_id,
&((*start).into()..(*end).into()),
server.cache.files(),
),
})
})
.collect(),
),
_ => None,
};
debug!("referencing locations: {:?}", locations);
if let Some(response) = locations {
server.reply(Response::new_ok(id, response));
} else {
server.reply(Response::new_ok(id, Value::Null));
server.reply(Response::new_ok(id, locations));
}
Ok(())
}

View File

@ -231,7 +231,7 @@ impl Server {
References::METHOD => {
debug!("handle goto defnition");
let params: ReferenceParams = serde_json::from_value(req.params).unwrap();
goto::handle_to_usages(params, req.id.clone(), self)
goto::handle_references(params, req.id.clone(), self)
}
Completion::METHOD => {
@ -289,6 +289,22 @@ impl Server {
.get(pos.index))
}
pub fn lookup_ident_by_position(
&self,
pos: RawPos,
) -> Result<Option<crate::identifier::LocIdent>, ResponseError> {
Ok(self
.lin_registry
.position_lookups
.get(&pos.src_id)
.ok_or_else(|| ResponseError {
data: None,
message: "File has not yet been parsed or cached.".to_owned(),
code: ErrorCode::ParseError as i32,
})?
.get_ident(pos.index))
}
pub fn issue_diagnostics(&mut self, file_id: FileId, diagnostics: Vec<Diagnostic<FileId>>) {
let Some(uri) = self.file_uris.get(&file_id).cloned() else {
warn!("tried to issue diagnostics for unknown file id {file_id:?}");

View File

@ -102,7 +102,6 @@ impl UsageLookup {
}
/// Return all the usages of `ident`.
#[allow(dead_code)] // Not used yet...
pub fn usages(&self, ident: &LocIdent) -> impl Iterator<Item = &LocIdent> {
self.usage_table
.get(ident)

View File

@ -17,3 +17,39 @@ in
### type = "GotoDefinition"
### textDocument.uri = "file:///goto.ncl"
### position = { line = 3, character = 10 }
###
### [[request]]
### type = "References"
### textDocument.uri = "file:///goto.ncl"
### position = { line = 1, character = 3 }
### context = { includeDeclaration = true }
###
### [[request]]
### type = "References"
### textDocument.uri = "file:///goto.ncl"
### position = { line = 1, character = 3 }
### context = { includeDeclaration = false }
###
### [[request]]
### type = "References"
### textDocument.uri = "file:///goto.ncl"
### position = { line = 3, character = 3 }
### context = { includeDeclaration = true }
###
### [[request]]
### type = "References"
### textDocument.uri = "file:///goto.ncl"
### position = { line = 3, character = 3 }
### context = { includeDeclaration = false }
###
### [[request]]
### type = "References"
### textDocument.uri = "file:///goto.ncl"
### position = { line = 3, character = 9 }
### context = { includeDeclaration = true }
###
### [[request]]
### type = "References"
### textDocument.uri = "file:///goto.ncl"
### position = { line = 3, character = 9 }
### context = { includeDeclaration = false }

View File

@ -0,0 +1,24 @@
### /goto.ncl
let record = { foo = 1, bar = { baz = "hi" } } in
let toMerge = { bar = { baz = "hi", quux = "bye" } } in
let Contract = { baz | String } in
let another | Contract = (record & toMerge).bar in
[
another.baz,
another.quux,
(toMerge.bar | Contract).baz,
]
### [[request]]
### type = "GotoDefinition"
### textDocument.uri = "file:///goto.ncl"
### position = { line = 5, character = 12 }
###
### [[request]]
### type = "GotoDefinition"
### textDocument.uri = "file:///goto.ncl"
### position = { line = 6, character = 12 }
###
### [[request]]
### type = "GotoDefinition"
### textDocument.uri = "file:///goto.ncl"
### position = { line = 7, character = 29 }

View File

@ -0,0 +1,38 @@
### /goto.ncl
let person | { name: String } = null in
# The "person" on the rhs here refers to the person in the line above
let person = person in
person
### [[request]]
### type = "GotoDefinition"
### textDocument.uri = "file:///goto.ncl"
### position = { line = 2, character = 16 }
###
### [[request]]
### type = "GotoDefinition"
### textDocument.uri = "file:///goto.ncl"
### position = { line = 3, character = 3 }
###
### [[request]]
### type = "References"
### textDocument.uri = "file:///goto.ncl"
### position = { line = 2, character = 16 }
### context = { includeDeclaration = true }
###
### [[request]]
### type = "References"
### textDocument.uri = "file:///goto.ncl"
### position = { line = 2, character = 16 }
### context = { includeDeclaration = false }
###
### [[request]]
### type = "References"
### textDocument.uri = "file:///goto.ncl"
### position = { line = 3, character = 3 }
### context = { includeDeclaration = true }
###
### [[request]]
### type = "References"
### textDocument.uri = "file:///goto.ncl"
### position = { line = 3, character = 3 }
### context = { includeDeclaration = false }

View File

@ -2,7 +2,7 @@ use std::collections::{hash_map::Entry, HashMap};
use assert_cmd::cargo::CommandCargoExt;
use lsp_types::request::{
Completion, Formatting, GotoDefinition, HoverRequest, Request as LspRequest,
Completion, Formatting, GotoDefinition, HoverRequest, References, Request as LspRequest,
};
use nickel_lang_utils::project_root::project_root;
use test_generator::test_resources;
@ -39,6 +39,7 @@ impl TestHarness {
Request::Completion(c) => self.request::<Completion>(c),
Request::Formatting(f) => self.request::<Formatting>(f),
Request::Hover(h) => self.request::<HoverRequest>(h),
Request::References(r) => self.request::<References>(r),
}
}

View File

@ -5,4 +5,10 @@ expression: output
file:///goto.ncl:1:2-1:5
file:///goto.ncl:1:2-1:5
file:///goto.ncl:1:2-1:5
[file:///goto.ncl:1:2-1:5, file:///goto.ncl:3:8-3:11]
[file:///goto.ncl:3:8-3:11]
[file:///goto.ncl:3:2-3:5]
None
[file:///goto.ncl:1:2-1:5, file:///goto.ncl:3:8-3:11]
[file:///goto.ncl:3:8-3:11]

View File

@ -0,0 +1,8 @@
---
source: lsp/nls/tests/main.rs
expression: output
---
[file:///goto.ncl:0:32-0:35, file:///goto.ncl:1:24-1:27, file:///goto.ncl:2:17-2:20]
file:///goto.ncl:1:36-1:40
[file:///goto.ncl:1:24-1:27, file:///goto.ncl:2:17-2:20]

View File

@ -0,0 +1,11 @@
---
source: lsp/nls/tests/main.rs
expression: output
---
file:///goto.ncl:0:4-0:10
file:///goto.ncl:2:4-2:10
[file:///goto.ncl:0:4-0:10, file:///goto.ncl:2:13-2:19]
[file:///goto.ncl:2:13-2:19]
[file:///goto.ncl:2:4-2:10, file:///goto.ncl:3:0-3:6]
[file:///goto.ncl:3:0-3:6]