1
1
mirror of https://github.com/tweag/nickel.git synced 2024-10-03 22:57:11 +03:00

Thunks for resolved imports (#2052)

* Closurize resolved imports so they detect recursion

* Add a test

* Update core/src/cache.rs

Co-authored-by: Yann Hamdaoui <yann.hamdaoui@tweag.io>

---------

Co-authored-by: Yann Hamdaoui <yann.hamdaoui@tweag.io>
This commit is contained in:
jneem 2024-09-26 17:59:49 +07:00 committed by GitHub
parent c50647d281
commit a285d9e923
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
3 changed files with 56 additions and 1 deletions

View File

@ -1,5 +1,6 @@
//! Source cache.
use crate::closurize::Closurize as _;
use crate::error::{Error, ImportError, ParseError, ParseErrors, TypecheckError};
use crate::eval::cache::Cache as EvalCache;
use crate::eval::Closure;
@ -231,6 +232,8 @@ pub enum EntryState {
Transforming,
/// The entry and its transitive imports have been transformed.
Transformed,
/// The entry has been closurized.
Closurized,
}
pub enum EntryOrigin {}
@ -811,6 +814,47 @@ impl Cache {
}
}
/// Replace a cache entry by a closurized version of itself. If it contains imports,
/// closurize them recursively.
///
/// Closurization is not required before evaluation, but it has two benefits:
/// - the closurized term uses the evaluation cache, so if it is imported in multiple
/// places then they will share a cache
/// - the eval cache's built-in mechanism for preventing infinite recursion will also
/// apply to recursive imports.
///
/// The main disadvantage of closurization is that it makes the AST less useful. You
/// wouldn't want to closurize before pretty-printing, for example.
pub fn closurize<C: eval::cache::Cache>(
&mut self,
file_id: FileId,
cache: &mut C,
) -> Result<CacheOp<()>, CacheError<()>> {
match self.entry_state(file_id) {
Some(state) if state >= EntryState::Closurized => Ok(CacheOp::Cached(())),
Some(state) if state >= EntryState::Parsed => {
let cached_term = self.terms.remove(&file_id).unwrap();
let term = cached_term.term.closurize(cache, eval::Environment::new());
self.terms.insert(
file_id,
TermEntry {
term,
state: EntryState::Closurized,
..cached_term
},
);
if let Some(imports) = self.imports.get(&file_id).cloned() {
for f in imports.into_iter() {
self.closurize(f, cache)?;
}
}
Ok(CacheOp::Done(()))
}
_ => Err(CacheError::NotParsed),
}
}
/// Resolve every imports of an entry of the cache, and update its state accordingly, or do
/// nothing if the imports of the entry have already been resolved. Require that the
/// corresponding source has been parsed.
@ -895,7 +939,8 @@ impl Cache {
| EntryState::Typechecking
| EntryState::Typechecked
| EntryState::Transforming
| EntryState::Transformed,
| EntryState::Transformed
| EntryState::Closurized,
) => Ok(CacheOp::Cached((Vec::new(), Vec::new()))),
None => Err(CacheError::NotParsed),
}

View File

@ -1022,6 +1022,11 @@ impl<C: Cache> VirtualMachine<ImportCache, C> {
type_ctxt,
} = self.import_resolver.prepare_stdlib(&mut self.cache)?;
self.import_resolver.prepare(main_id, &type_ctxt)?;
// Unwrap: closurization only fails if the input wasn't parsed, and we just
// parsed it.
self.import_resolver
.closurize(main_id, &mut self.cache)
.unwrap();
self.initial_env = eval_env;
Ok(self.import_resolver().get(main_id).unwrap())
}

View File

@ -0,0 +1,5 @@
# test.type = 'error'
#
# [test.metadata]
# error = 'EvalError::InfiniteRecursion'
let x = import "./recursive.ncl" in x