1
1
mirror of https://github.com/tweag/nickel.git synced 2024-10-06 08:07:37 +03:00

Sort out idents and positions (#1531)

* Make Symbol available.

Ident is a symbol plus a position. The position isn't intended to carry
any semantics -- it's really just for diagnostics -- so it's ignored in
comparison and hashing. This is mostly useful, but sometimes
(particularly in the LSP) we do want meaningful positions. Also, a lot
of code that completely doesn't care about the positions were carrying
one around "by accident."

This PR makes Symbol publicly available, and updates many usages of
Ident to use Symbol instead. There are probably more places that
Symbol could be used.

We do not yet introduce a Symbol + meaningful position type. That will
be done in a future PR, and probably restricted to the LSP.

* two more conversions

* Update core/src/eval/operation.rs

Co-authored-by: Yann Hamdaoui <yann.hamdaoui@tweag.io>

* The big rename

---------

Co-authored-by: Yann Hamdaoui <yann.hamdaoui@tweag.io>
This commit is contained in:
jneem 2023-08-16 16:06:30 -05:00 committed by GitHub
parent ecf667ecda
commit a55c6488b0
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
47 changed files with 612 additions and 521 deletions

View File

@ -8,7 +8,7 @@ use serde::de::{
VariantAccess, Visitor,
};
use crate::identifier::Ident;
use crate::identifier::LocIdent;
use crate::term::array::{self, Array};
use crate::term::record::Field;
use crate::term::{IndexMap, RichTerm, Term};
@ -386,12 +386,12 @@ where
}
struct RecordDeserializer {
iter: <IndexMap<Ident, Field> as IntoIterator>::IntoIter,
iter: <IndexMap<LocIdent, Field> as IntoIterator>::IntoIter,
field: Option<Field>,
}
impl RecordDeserializer {
fn new(map: IndexMap<Ident, Field>) -> Self {
fn new(map: IndexMap<LocIdent, Field>) -> Self {
RecordDeserializer {
iter: map.into_iter(),
field: None,
@ -443,7 +443,7 @@ impl<'de> MapAccess<'de> for RecordDeserializer {
}
fn visit_record<'de, V>(
record: IndexMap<Ident, Field>,
record: IndexMap<LocIdent, Field>,
visitor: V,
) -> Result<V::Value, RustDeserializationError>
where

View File

@ -4,7 +4,7 @@
use std::collections::{hash_map::Entry, HashMap};
use crate::{
identifier::Ident,
identifier::LocIdent,
label::Label,
parser::error::ParseError,
position::{RawSpan, TermPos},
@ -18,12 +18,12 @@ use crate::{
#[derive(Debug, PartialEq, Clone)]
pub enum FieldPattern {
/// An assignment match like `{ ..., a = b, ... }`
Ident(Ident),
Ident(LocIdent),
/// A nested record pattern like `{ ..., a = { b, c }, ... }`
RecordPattern(RecordPattern),
/// An aliased nested record pattern like `{ ..., a = b @ { c, d }, ... }`
AliasedRecordPattern {
alias: Ident,
alias: LocIdent,
pattern: RecordPattern,
},
}
@ -35,13 +35,13 @@ pub enum Match {
/// `{..., a=b, ...}` will bind the field `a` of the record to variable `b`. Here, `a` is the
/// first field of this variant and `b` the optional one. The last field can actualy be a
/// nested destruct pattern.
Assign(Ident, Field, FieldPattern),
Assign(LocIdent, Field, FieldPattern),
/// Simple binding. the `Ident` is bind to a variable with the same name.
Simple(Ident, Field),
Simple(LocIdent, Field),
}
impl Match {
fn ident(&self) -> Ident {
fn ident(&self) -> LocIdent {
match self {
Match::Assign(ident, ..) | Match::Simple(ident, ..) => *ident,
}
@ -56,7 +56,7 @@ pub enum LastMatch {
Match(Box<Match>),
/// The pattern is "open" `, ..}`. Optionally you can bind a record containing the remaining
/// fields to an `Identifier` using the syntax `, ..y}`.
Ellipsis(Option<Ident>),
Ellipsis(Option<LocIdent>),
}
/// A destructured record pattern
@ -64,7 +64,7 @@ pub enum LastMatch {
pub struct RecordPattern {
pub matches: Vec<Match>,
pub open: bool,
pub rest: Option<Ident>,
pub rest: Option<LocIdent>,
pub span: RawSpan,
}
@ -158,7 +158,7 @@ impl RecordPattern {
impl Match {
/// Convert the `Match` to a field binding with metadata. It's used to generate the record
/// contract representing a record pattern destructuring.
pub fn as_binding(self) -> (Ident, Field) {
pub fn as_binding(self) -> (LocIdent, Field) {
match self {
Match::Assign(id, field, FieldPattern::Ident(_)) | Match::Simple(id, field) => {
(id, field)
@ -191,17 +191,17 @@ impl Match {
/// Returns info about each variable bound in a particular pattern.
/// It also tells the "path" to the bound variable; this is just the
/// record field names traversed to get to a pattern.
pub fn as_flattened_bindings(self) -> Vec<(Vec<Ident>, Option<Ident>, Field)> {
fn get_label(id: &Ident, pattern: &RecordPattern) -> Label {
pub fn as_flattened_bindings(self) -> Vec<(Vec<LocIdent>, Option<LocIdent>, Field)> {
fn get_label(id: &LocIdent, pattern: &RecordPattern) -> Label {
let mut label = pattern.label();
label.span = RawSpan::fuse(id.pos.unwrap(), label.span).unwrap();
label
}
fn flatten_matches(
id: &Ident,
id: &LocIdent,
matches: &[Match],
) -> Vec<(Vec<Ident>, Option<Ident>, Field)> {
) -> Vec<(Vec<LocIdent>, Option<LocIdent>, Field)> {
matches
.iter()
.flat_map(|m| m.clone().as_flattened_bindings())

View File

@ -9,7 +9,7 @@ use malachite::num::conversion::traits::ToSci;
use crate::{
eval::callstack::CallStack,
identifier::Ident,
identifier::LocIdent,
label::{
self,
ty_path::{self, PathSpan},
@ -54,7 +54,7 @@ pub enum EvalError {
},
/// A field required by a record contract is missing a definition.
MissingFieldDef {
id: Ident,
id: LocIdent,
metadata: FieldMetadata,
pos_record: TermPos,
pos_access: TermPos,
@ -114,7 +114,7 @@ pub enum EvalError {
merge_label: MergeLabel,
},
/// An unbound identifier was referenced.
UnboundIdentifier(Ident, TermPos),
UnboundIdentifier(LocIdent, TermPos),
/// An element in the evaluation Cache was entered during its own update.
InfiniteRecursion(CallStack, TermPos),
/// A serialization error occurred during a call to the builtin `serialize`.
@ -139,7 +139,7 @@ pub enum EvalError {
/// Position of the original unevaluated expression.
pos: TermPos,
/// The identifier that we tried to query.
id: Ident,
id: LocIdent,
/// Evaluated expression
value: RichTerm,
},
@ -180,10 +180,10 @@ const UNKNOWN_SOURCE_NAME: &str = "<unknown> (generated by evaluation)";
#[derive(Debug, PartialEq, Clone)]
pub enum TypecheckError {
/// An unbound identifier was referenced.
UnboundIdentifier(Ident, TermPos),
UnboundIdentifier(LocIdent, TermPos),
/// A specific row was expected to be in the type of an expression, but was not.
MissingRow(
Ident,
LocIdent,
/* the expected type */ Type,
/* the inferred/annotated type */ Type,
TermPos,
@ -196,7 +196,7 @@ pub enum TypecheckError {
),
/// A specific row was not expected to be in the type of an expression.
ExtraRow(
Ident,
LocIdent,
/* the expected type */ Type,
/* the inferred/annotated type */ Type,
TermPos,
@ -226,7 +226,7 @@ pub enum TypecheckError {
pos: TermPos,
},
/// An unbound type variable was referenced.
UnboundTypeVariable(Ident),
UnboundTypeVariable(LocIdent),
/// The actual (inferred or annotated) type of an expression is incompatible with its expected
/// type.
TypeMismatch(
@ -236,7 +236,7 @@ pub enum TypecheckError {
),
/// Two incompatible kind (enum vs record) have been deduced for the same identifier of a row type.
RowMismatch(
Ident,
LocIdent,
/* the expected row type (whole) */ Type,
/* the actual row type (whole) */ Type,
/* error at the given row */ Box<TypecheckError>,
@ -256,7 +256,7 @@ pub enum TypecheckError {
/// original `field: Type` declaration, as opposed to `RowKindMismatch`, which corresponds to
/// the direct failure to unify `{ .. , x: T1, .. }` and `{ .., x: T2, .. }`.
RowConflict(
Ident,
LocIdent,
/* the second type assignment which violates the constraint */ Type,
/* the expected type of the subexpression */ Type,
/* the actual type of the subexpression */ Type,
@ -334,7 +334,7 @@ pub enum TypecheckError {
VarLevelMismatch {
/// The user-defined type variable (the rigid type variable during unification) that
/// couldn't be unified.
type_var: Ident,
type_var: LocIdent,
/// The position of the expression that was being typechecked as `type_var`.
pos: TermPos,
},
@ -436,7 +436,7 @@ pub enum ParseError {
Option<RawSpan>,
),
/// Unbound type variable
UnboundTypeVariables(Vec<Ident>),
UnboundTypeVariables(Vec<LocIdent>),
/// Illegal record type literal.
///
/// This occurs when failing to convert from the uniterm syntax to a record type literal.
@ -459,7 +459,7 @@ pub enum ParseError {
/// e.g. in the signature `forall r. { ; r } -> r`,
/// - a variable is used as both a record and enum row variable, e.g. in the
/// signature `forall r. [| ; r |] -> { ; r }`.
TypeVariableKindMismatch { ty_var: Ident, span: RawSpan },
TypeVariableKindMismatch { ty_var: LocIdent, span: RawSpan },
/// A record literal, which isn't a record type, has a field with a type annotation but without
/// a definition. While we could technically handle this situation, this is most probably an
/// error from the user, because this type annotation is useless and, maybe non-intuitively,
@ -489,9 +489,9 @@ pub enum ParseError {
/// A duplicate binding was encountered in a record destructuring pattern.
DuplicateIdentInRecordPattern {
/// The duplicate identifier.
ident: Ident,
ident: LocIdent,
/// The previous instance of the duplicated identifier.
prev_ident: Ident,
prev_ident: LocIdent,
},
}
@ -1945,10 +1945,10 @@ impl IntoDiagnostics<FileId> for TypecheckError {
// showing a cascade of similar error messages, we determine the full path of the
// nested field (e.g. `pkg.subpkg1.meta.url`) and only show once the row mismatch
// error followed by the underlying error.
let mut path = vec![ident];
let mut path = vec![ident.symbol()];
while let TypecheckError::RowMismatch(id_next, _, _, next, _) = *err {
path.push(id_next);
path.push(id_next.symbol());
err = next;
}

View File

@ -1,7 +1,7 @@
//! Thunks and associated devices used to implement lazy evaluation.
use super::{BlackholedError, Cache, CacheIndex, Closure, Environment, IdentKind};
use crate::{
identifier::Ident,
identifier::{Ident, LocIdent},
term::{record::FieldDeps, BindingType, RichTerm, Term},
};
use std::cell::{Ref, RefCell, RefMut};
@ -198,9 +198,9 @@ impl ThunkData {
///
/// If `orig` is `foo + bar + a` and `args` correspond to `bar, foo`, this functions returns a
/// standard thunk containing `fun bar foo => foo + bar + a`.
fn revthunk_as_explicit_fun<'a, I>(self, args: I) -> Self
fn revthunk_as_explicit_fun<I>(self, args: I) -> Self
where
I: DoubleEndedIterator<Item = &'a Ident>,
I: DoubleEndedIterator<Item = Ident>,
{
match self.inner {
InnerThunkData::Standard(_) => self,
@ -212,8 +212,9 @@ impl ThunkData {
// the original iterator. If the identifiers inside `args` are `a`, `b` and `c`, in
// that order, we want to build `fun a => (fun b => (fun c => body))`. We thus need a
// reverse fold.
let as_function =
args.rfold(body, |built, id| RichTerm::from(Term::Fun(*id, built)));
let as_function = args.rfold(body, |built, id| {
RichTerm::from(Term::Fun(id.into(), built))
});
ThunkData::new(Closure {
body: as_function,
@ -499,7 +500,7 @@ impl Thunk {
/// `self` (in particular, `a` is bound)
/// - allocates a fresh variable, say `%1`, and binds it to the previous thunk in `env`
/// - returns the term `%1 foo bar`
pub fn saturate<'a, I: DoubleEndedIterator<Item = &'a Ident> + Clone>(
pub fn saturate<I: DoubleEndedIterator<Item = Ident> + Clone>(
self,
env: &mut Environment,
fields: I,
@ -509,9 +510,9 @@ impl Thunk {
.map(RefCell::into_inner)
.unwrap_or_else(|rc| rc.borrow().clone());
let mut deps_filter: Box<dyn FnMut(&&Ident) -> bool> = match deps {
FieldDeps::Known(deps) => Box::new(move |id: &&Ident| deps.contains(id)),
FieldDeps::Unknown => Box::new(|_: &&Ident| true),
let mut deps_filter: Box<dyn FnMut(&Ident) -> bool> = match deps {
FieldDeps::Known(deps) => Box::new(move |id: &Ident| deps.contains(id)),
FieldDeps::Unknown => Box::new(|_: &Ident| true),
};
let thunk_as_function = Thunk {
@ -521,11 +522,12 @@ impl Thunk {
ident_kind: self.ident_kind,
};
let fresh_var = Ident::fresh();
env.insert(fresh_var, thunk_as_function);
let fresh_var = LocIdent::fresh();
env.insert(fresh_var.symbol(), thunk_as_function);
let as_function_closurized = RichTerm::from(Term::Var(fresh_var));
let args = fields.filter_map(|id| deps_filter(&id).then(|| RichTerm::from(Term::Var(*id))));
let args =
fields.filter_map(|id| deps_filter(&id).then(|| RichTerm::from(Term::Var(id.into()))));
args.fold(as_function_closurized, |partial_app, arg| {
RichTerm::from(Term::App(partial_app, arg))
@ -669,7 +671,7 @@ impl Cache for CBNCache {
idx.ident_kind()
}
fn saturate<'a, I: DoubleEndedIterator<Item = &'a Ident> + Clone>(
fn saturate<'a, I: DoubleEndedIterator<Item = Ident> + Clone>(
&mut self,
idx: CacheIndex,
env: &mut Environment,

View File

@ -75,7 +75,7 @@ pub trait Cache: Clone {
/// and apply the function to the given variables. The function part is allocated in a new
/// cache entry, stored as a generated variable, with the same environment as the original
/// expression.
fn saturate<'a, I: DoubleEndedIterator<Item = &'a Ident> + Clone>(
fn saturate<I: DoubleEndedIterator<Item = Ident> + Clone>(
&mut self,
idx: CacheIndex,
env: &mut Environment,

View File

@ -3,7 +3,7 @@
//! stored in a call stack solely for better error reporting.
use super::IdentKind;
use crate::{
identifier::Ident,
identifier::LocIdent,
position::{RawSpan, TermPos},
};
use codespan::FileId;
@ -15,7 +15,7 @@ pub struct CallStack(pub Vec<StackElem>);
/// Basic description of a function call. Used for error reporting.
pub struct CallDescr {
/// The name of the called function, if any.
pub head: Option<Ident>,
pub head: Option<LocIdent>,
/// The position of the application.
pub span: RawSpan,
}
@ -30,12 +30,12 @@ pub enum StackElem {
/// A variable was entered.
Var {
kind: IdentKind,
id: Ident,
id: LocIdent,
pos: TermPos,
},
/// A record field was entered.
Field {
id: Ident,
id: LocIdent,
pos_record: TermPos,
pos_field: TermPos,
pos_access: TermPos,
@ -48,7 +48,7 @@ impl CallStack {
}
/// Push a marker to indicate that a var was entered.
pub fn enter_var(&mut self, kind: IdentKind, id: Ident, pos: TermPos) {
pub fn enter_var(&mut self, kind: IdentKind, id: LocIdent, pos: TermPos) {
self.0.push(StackElem::Var { kind, id, pos });
}
@ -73,7 +73,7 @@ impl CallStack {
/// Push a marker to indicate that a record field was entered.
pub fn enter_field(
&mut self,
id: Ident,
id: LocIdent,
pos_record: TermPos,
pos_field: TermPos,
pos_access: TermPos,

View File

@ -18,7 +18,7 @@ fn patch_term<C: Cache>(
if let Term::Var(var_id) = &*term.term {
// TODO: Shouldn't be mutable, [`CBNCache`] abstraction is leaking.
let mut idx = env
.get(var_id)
.get(&var_id.symbol())
.cloned()
.ok_or(EvalError::UnboundIdentifier(*var_id, term.pos))?;
@ -51,7 +51,7 @@ fn patch_term<C: Cache>(
/// happen, we have to ensure the pending contract `Schema` is applied to the recursive occurrence
/// `foo` used in the definition `baz = foo.bar`. That is, we must apply pending contracts to their
/// corresponding field when building the recursive environment.
pub fn rec_env<'a, I: Iterator<Item = (&'a Ident, &'a Field)>, C: Cache>(
pub fn rec_env<'a, I: Iterator<Item = (&'a LocIdent, &'a Field)>, C: Cache>(
cache: &mut C,
bindings: I,
env: &Environment,
@ -62,7 +62,7 @@ pub fn rec_env<'a, I: Iterator<Item = (&'a Ident, &'a Field)>, C: Cache>(
if let Some(ref value) = field.value {
let idx = match value.as_ref() {
Term::Var(ref var_id) => env
.get(var_id)
.get(&var_id.symbol())
.cloned()
.ok_or(EvalError::UnboundIdentifier(*var_id, value.pos))?,
_ => {
@ -82,8 +82,8 @@ pub fn rec_env<'a, I: Iterator<Item = (&'a Ident, &'a Field)>, C: Cache>(
// Pending contracts might use identifiers from the current record's environment,
// so we start from in the environment of the original record.
let mut final_env = env.clone();
let id_value = Ident::fresh();
final_env.insert(id_value, idx);
let id_value = LocIdent::fresh();
final_env.insert(id_value.symbol(), idx);
let with_ctr_applied = RuntimeContract::apply_all(
RichTerm::new(Term::Var(id_value), value.pos),
@ -132,7 +132,7 @@ pub fn rec_env<'a, I: Iterator<Item = (&'a Ident, &'a Field)>, C: Cache>(
};
Ok((
*id,
id.symbol(),
cache.add(final_closure, IdentKind::Record, BindingType::Normal),
))
} else {
@ -154,7 +154,7 @@ pub fn rec_env<'a, I: Iterator<Item = (&'a Ident, &'a Field)>, C: Cache>(
};
Ok((
*id,
id.symbol(),
cache.add(closure, IdentKind::Record, BindingType::Normal),
))
}

View File

@ -24,6 +24,7 @@
//! One can think of merge to be defined on metadata as well. When merging two fields, the
//! resulting metadata is the result of merging the two original field's metadata. The semantics
//! depend on each metadata.
use super::*;
use crate::error::{EvalError, IllegalPolymorphicTailAction};
use crate::label::{Label, MergeLabel};
@ -349,7 +350,7 @@ Append `, ..` at the end of the record contract, as in `{some_field | SomeContra
/// values. Apply the required saturate, revert or closurize operation, including on the final
/// field returned.
#[allow(clippy::too_many_arguments)]
fn merge_fields<'a, C: Cache, I: DoubleEndedIterator<Item = &'a Ident> + Clone>(
fn merge_fields<'a, C: Cache, I: DoubleEndedIterator<Item = &'a LocIdent> + Clone>(
cache: &mut C,
merge_label: MergeLabel,
field1: Field,
@ -469,7 +470,7 @@ trait Saturate: Sized {
/// If the expression is not a variable referring to an element in the cache
/// (this can happen e.g. for numeric constants), we just return the term as it is, which
/// falls into the zero dependencies special case.
fn saturate<'a, I: DoubleEndedIterator<Item = &'a Ident> + Clone, C: Cache>(
fn saturate<'a, I: DoubleEndedIterator<Item = &'a LocIdent> + Clone, C: Cache>(
self,
cache: &mut C,
env: &mut Environment,
@ -479,7 +480,7 @@ trait Saturate: Sized {
}
impl Saturate for RichTerm {
fn saturate<'a, I: DoubleEndedIterator<Item = &'a Ident> + Clone, C: Cache>(
fn saturate<'a, I: DoubleEndedIterator<Item = &'a LocIdent> + Clone, C: Cache>(
self,
cache: &mut C,
env: &mut Environment,
@ -488,11 +489,13 @@ impl Saturate for RichTerm {
) -> Result<RichTerm, EvalError> {
if let Term::Var(var_id) = &*self.term {
let idx = local_env
.get(var_id)
.get(&var_id.symbol())
.cloned()
.ok_or(EvalError::UnboundIdentifier(*var_id, self.pos))?;
Ok(cache.saturate(idx, env, fields).with_pos(self.pos))
Ok(cache
.saturate(idx, env, fields.map(LocIdent::symbol))
.with_pos(self.pos))
} else {
Ok(self)
}
@ -507,7 +510,7 @@ fn field_deps<C: Cache>(
) -> Result<FieldDeps, EvalError> {
if let Term::Var(var_id) = &*rt.term {
local_env
.get(var_id)
.get(&var_id.symbol())
.map(|idx| cache.deps(idx).unwrap_or_else(FieldDeps::empty))
.ok_or(EvalError::UnboundIdentifier(*var_id, rt.pos))
} else {
@ -525,7 +528,7 @@ fn field_deps<C: Cache>(
/// The fields are saturated (see [saturate]) to properly propagate recursive dependencies down to
/// `t1` and `t2` in the final, merged record.
#[allow(clippy::too_many_arguments)]
fn fields_merge_closurize<'a, I: DoubleEndedIterator<Item = &'a Ident> + Clone, C: Cache>(
fn fields_merge_closurize<'a, I: DoubleEndedIterator<Item = &'a LocIdent> + Clone, C: Cache>(
cache: &mut C,
merge_label: MergeLabel,
env: &mut Environment,
@ -549,11 +552,11 @@ fn fields_merge_closurize<'a, I: DoubleEndedIterator<Item = &'a Ident> + Clone,
body,
env: local_env,
};
let fresh_var = Ident::fresh();
let fresh_var = LocIdent::fresh();
// new_rev takes care of not creating a revertible element in the cache if the dependencies are empty.
env.insert(
fresh_var,
fresh_var.symbol(),
cache.add(
closure,
IdentKind::Record,
@ -584,9 +587,9 @@ impl RevertClosurize for RichTerm {
) -> RichTerm {
if let Term::Var(id) = self.as_ref() {
// This create a fresh variable which is bound to a reverted copy of the original element
let reverted = cache.revert(with_env.get(id).unwrap());
let fresh_id = Ident::fresh();
env.insert(fresh_id, reverted);
let reverted = cache.revert(with_env.get(&id.symbol()).unwrap());
let fresh_id = LocIdent::fresh();
env.insert(fresh_id.symbol(), reverted);
RichTerm::new(Term::Var(fresh_id), self.pos)
} else {
// Otherwise, if it is not a variable after the share normal form transformations, it

View File

@ -73,13 +73,14 @@
//! appear inside recursive records in the future. An adapted garbage collector is probably
//! something to consider at some point.
use crate::identifier::Ident;
use crate::term::record::FieldMetadata;
use crate::term::string::NickelString;
use crate::{
cache::{Cache as ImportCache, Envs, ImportResolver},
environment::Environment as GenericEnvironment,
error::{Error, EvalError},
identifier::Ident,
identifier::LocIdent,
match_sharedterm,
position::TermPos,
program::QueryPath,
@ -381,8 +382,8 @@ impl<R: ImportResolver, C: Cache> VirtualMachine<R, C> {
}
Term::Var(x) => {
let mut idx = env
.get(x)
.or_else(|| initial_env.get(x))
.get(&x.symbol())
.or_else(|| initial_env.get(&x.symbol()))
.cloned()
.ok_or(EvalError::UnboundIdentifier(*x, pos))?;
std::mem::drop(env); // idx may be a 1RC pointer
@ -453,10 +454,10 @@ impl<R: ImportResolver, C: Cache> VirtualMachine<R, C> {
if *rec {
let idx_ = idx.clone();
self.cache
.patch(idx_.clone(), |cl| cl.env.insert(*x, idx_.clone()));
.patch(idx_.clone(), |cl| cl.env.insert(x.symbol(), idx_.clone()));
}
env.insert(*x, idx);
env.insert(x.symbol(), idx);
Closure {
body: t.clone(),
env,
@ -734,7 +735,7 @@ impl<R: ImportResolver, C: Cache> VirtualMachine<R, C> {
Term::Fun(x, t) => {
if let Some((idx, pos_app)) = self.stack.pop_arg_as_idx(&mut self.cache) {
self.call_stack.enter_fun(pos_app);
env.insert(*x, idx);
env.insert(x.symbol(), idx);
Closure {
body: t.clone(),
env,
@ -896,7 +897,7 @@ pub fn env_add_record<C: Cache>(
let ext = record.fields.into_iter().filter_map(|(id, field)| {
field.value.map(|value|
(
id,
id.symbol(),
cache.add(
Closure { body: value, env: closure.env.clone() },
IdentKind::Record,
@ -916,7 +917,7 @@ pub fn env_add_record<C: Cache>(
pub fn env_add<C: Cache>(
cache: &mut C,
env: &mut Environment,
id: Ident,
id: LocIdent,
rt: RichTerm,
local_env: Environment,
) {
@ -924,7 +925,10 @@ pub fn env_add<C: Cache>(
body: rt,
env: local_env,
};
env.insert(id, cache.add(closure, IdentKind::Let, BindingType::Normal));
env.insert(
id.symbol(),
cache.add(closure, IdentKind::Let, BindingType::Normal),
);
}
/// Pop and update all the indices on the top of the stack with the given closure.
@ -945,8 +949,8 @@ pub fn subst<C: Cache>(
match term.into_owned() {
Term::Var(id) => env
.get(&id)
.or_else(|| initial_env.get(&id))
.get(&id.symbol())
.or_else(|| initial_env.get(&id.symbol()))
.map(|idx| {
let closure = cache.get(idx.clone());
subst(cache, closure.body, initial_env, &closure.env)

View File

@ -14,7 +14,7 @@ use super::{
use crate::{
error::{EvalError, IllegalPolymorphicTailAction},
identifier::Ident,
identifier::LocIdent,
label::{ty_path, Polarity, TypeVarData},
match_sharedterm, mk_app, mk_fun, mk_opn, mk_record,
parser::utils::parse_number,
@ -235,7 +235,7 @@ impl<R: ImportResolver, C: Cache> VirtualMachine<R, C> {
_ => "Other",
};
Ok(Closure::atomic_closure(RichTerm::new(
Term::Enum(Ident::from(result)),
Term::Enum(LocIdent::from(result)),
pos_op_inh,
)))
}
@ -450,7 +450,7 @@ impl<R: ImportResolver, C: Cache> VirtualMachine<R, C> {
Ok(Closure { body: value, env })
}
None => match record.sealed_tail.as_ref() {
Some(t) if t.has_field(&id) => {
Some(t) if t.has_field(&id.symbol()) => {
Err(EvalError::IllegalPolymorphicTailAccess {
action: IllegalPolymorphicTailAction::FieldAccess {
field: id.to_string(),
@ -505,10 +505,7 @@ impl<R: ImportResolver, C: Cache> VirtualMachine<R, C> {
.collect::<Result<Vec<_>, _>>()
.map_err(|missing_def_err| missing_def_err.into_eval_err(pos, pos_op))?;
// Although it seems that sort_by_key would be easier here, it would actually
// require to copy the identifiers because of the lack of HKT. See
// https://github.com/rust-lang/rust/issues/34162.
values.sort_by(|(id1, _), (id2, _)| id1.cmp(id2));
values.sort_by_key(|(id, _)| *id);
let terms = values.into_iter().map(|(_, value)| value).collect();
Ok(Closure {
@ -896,7 +893,7 @@ impl<R: ImportResolver, C: Cache> VirtualMachine<R, C> {
UnaryOp::EnumFromStr() => {
if let Term::Str(s) = &*t {
Ok(Closure::atomic_closure(RichTerm::new(
Term::Enum(Ident::from(s)),
Term::Enum(LocIdent::from(s)),
pos_op_inh,
)))
} else {
@ -908,7 +905,7 @@ impl<R: ImportResolver, C: Cache> VirtualMachine<R, C> {
let re = regex::Regex::new(s)
.map_err(|err| EvalError::Other(err.to_string(), pos_op))?;
let param = Ident::fresh();
let param = LocIdent::fresh();
let matcher = Term::Fun(
param,
RichTerm::new(
@ -930,7 +927,7 @@ impl<R: ImportResolver, C: Cache> VirtualMachine<R, C> {
let re = regex::Regex::new(s)
.map_err(|err| EvalError::Other(err.to_string(), pos_op))?;
let param = Ident::fresh();
let param = LocIdent::fresh();
let matcher = Term::Fun(
param,
RichTerm::new(
@ -1555,7 +1552,7 @@ impl<R: ImportResolver, C: Cache> VirtualMachine<R, C> {
// and again, which is not optimal. The same thing happens with array
// contracts. There are several way to improve this, but this is left
// as future work.
let ident = Ident::from(&id);
let ident = LocIdent::from(&id);
match record.get_value_with_ctrs(&ident).map_err(|missing_field_err| missing_field_err.into_eval_err(pos2, pos_op))? {
Some(value) => {
self.call_stack.enter_field(ident, pos2, value.pos, pos_op);
@ -1628,7 +1625,7 @@ impl<R: ImportResolver, C: Cache> VirtualMachine<R, C> {
None
};
match fields.insert(Ident::from(id), Field {value, metadata, pending_contracts }) {
match fields.insert(LocIdent::from(id), Field {value, metadata, pending_contracts }) {
//TODO: what to do on insertion where an empty optional field
//exists? Temporary: we fail with existing field exception
Some(t) => Err(EvalError::Other(format!("record_insert: tried to extend a record with the field {id}, but it already exists"), pos_op)),
@ -1650,7 +1647,7 @@ impl<R: ImportResolver, C: Cache> VirtualMachine<R, C> {
Term::Str(id) => match_sharedterm! {t2, with {
Term::Record(record) => {
let mut fields = record.fields;
let fetched = fields.remove(&Ident::from(&id));
let fetched = fields.remove(&LocIdent::from(&id));
match fetched {
None
| Some(Field {
@ -1696,7 +1693,7 @@ impl<R: ImportResolver, C: Cache> VirtualMachine<R, C> {
Term::Str(id) => {
if let Term::Record(record) = &*t2 {
Ok(Closure::atomic_closure(RichTerm::new(
Term::Bool(matches!(record.fields.get(&Ident::from(id.into_inner())), Some(field) if !field.is_empty_optional())),
Term::Bool(matches!(record.fields.get(&LocIdent::from(id.into_inner())), Some(field) if !field.is_empty_optional())),
pos_op_inh,
)))
} else {
@ -2035,7 +2032,7 @@ impl<R: ImportResolver, C: Cache> VirtualMachine<R, C> {
// documentation
let mut record_data = record_data;
let mut contract_at_field = |id: Ident| {
let mut contract_at_field = |id: LocIdent| {
let pos = contract_term.pos;
mk_app!(
contract_term.clone(),
@ -2419,7 +2416,7 @@ impl<R: ImportResolver, C: Cache> VirtualMachine<R, C> {
&mut env,
env4,
);
let fields = tail.fields.keys().cloned().collect();
let fields = tail.fields.keys().map(|s| s.symbol()).collect();
r.sealed_tail = Some(record::SealedTail::new(
*s,
label.clone(),
@ -2846,7 +2843,7 @@ impl RecPriority {
field.value = field.value.take().map(|value| {
if let Term::Var(id_inner) = value.as_ref() {
let idx = env.get(id_inner).unwrap();
let idx = env.get(&id_inner.symbol()).unwrap();
let new_idx =
cache.map_at_index(idx, |cache, inner| match inner.body.as_ref() {
@ -2863,8 +2860,8 @@ impl RecPriority {
_ => panic!("rec_priority: expected an evaluated form"),
});
let fresh_id = Ident::fresh();
new_env.insert(fresh_id, new_idx);
let fresh_id = LocIdent::fresh();
new_env.insert(fresh_id.symbol(), new_idx);
RichTerm::new(Term::Var(fresh_id), pos)
} else {
// A record field that doesn't contain a variable is a constant (a number,
@ -3146,14 +3143,14 @@ trait MapValuesClosurize: Sized {
shared_env: &mut Environment,
env: &Environment,
f: F,
) -> Result<IndexMap<Ident, Field>, record::MissingFieldDefError>
) -> Result<IndexMap<LocIdent, Field>, record::MissingFieldDefError>
where
F: FnMut(Ident, RichTerm) -> RichTerm;
F: FnMut(LocIdent, RichTerm) -> RichTerm;
}
impl<Iter> MapValuesClosurize for Iter
where
Iter: IntoIterator<Item = (Ident, Field)>,
Iter: IntoIterator<Item = (LocIdent, Field)>,
{
fn map_values_closurize<F, C: Cache>(
self,
@ -3161,9 +3158,9 @@ where
shared_env: &mut Environment,
env: &Environment,
mut f: F,
) -> Result<IndexMap<Ident, Field>, record::MissingFieldDefError>
) -> Result<IndexMap<LocIdent, Field>, record::MissingFieldDefError>
where
F: FnMut(Ident, RichTerm) -> RichTerm,
F: FnMut(LocIdent, RichTerm) -> RichTerm,
{
self.into_iter()
.map(|(id, field)| {

View File

@ -179,7 +179,10 @@ fn imports() {
let mk_import_lib = mk_import(
"x",
"lib",
mk_term::op1(UnaryOp::StaticAccess(Ident::from("f")), mk_term::var("x")),
mk_term::op1(
UnaryOp::StaticAccess(LocIdent::from("f")),
mk_term::var("x"),
),
&mut vm,
);
vm.reset();

View File

@ -1,67 +1,32 @@
//! Define the type of an identifier.
use once_cell::sync::Lazy;
use serde::{Deserialize, Serialize};
use std::fmt::{self, Debug};
use std::hash::Hash;
use std::{
fmt::{self, Debug},
hash::Hash,
};
use crate::{position::TermPos, term::string::NickelString};
simple_counter::generate_counter!(GeneratedCounter, usize);
static INTERNER: Lazy<interner::Interner> = Lazy::new(interner::Interner::new);
#[derive(Clone, Copy, Deserialize, Serialize)]
/// An interned string.
//
// Implementation-wise, this is just a wrapper around interner::Symbol that uses a hard-coded,
// static `Interner`.
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, Serialize, Deserialize)]
#[serde(into = "String", from = "String")]
pub struct Ident {
symbol: interner::Symbol,
pub pos: TermPos,
generated: bool,
}
impl Debug for Ident {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_struct("Ident")
.field("label", &self.label())
.finish()
}
}
pub struct Ident(interner::Symbol);
impl Ident {
pub fn new_with_pos(label: impl AsRef<str>, pos: TermPos) -> Self {
let generated = label.as_ref().starts_with(GEN_PREFIX);
Self {
symbol: INTERNER.intern(label),
pos,
generated,
}
pub fn new(s: impl AsRef<str>) -> Self {
Self(INTERNER.intern(s.as_ref()))
}
pub fn new(label: impl AsRef<str>) -> Self {
Self::new_with_pos(label, TermPos::None)
}
/// Create an identifier with the same label as this one, but no position.
pub fn without_pos(self) -> Ident {
Ident {
pos: TermPos::None,
..self
}
}
/// Create an identifier with the same label as this one, but a specified position.
pub fn with_pos(self, pos: TermPos) -> Ident {
Ident { pos, ..self }
}
/// Create a new fresh identifier. This identifier is unique and is guaranteed not to collide
/// with any identifier defined before. Generated identifiers start with a special prefix that
/// can't be used by normal, user-defined identifiers.
pub fn fresh() -> Self {
Self::new(format!("{}{}", GEN_PREFIX, GeneratedCounter::next()))
}
/// Return the string representation of this identifier.
/// Return the string representation of this symbol.
pub fn label(&self) -> &str {
INTERNER.lookup(self.symbol)
INTERNER.lookup(self.0)
}
pub fn into_label(self) -> String {
@ -69,9 +34,21 @@ impl Ident {
}
}
/// Special character used for generating fresh identifiers. It must be syntactically impossible to
/// use to write in a standard Nickel program, to avoid name clashes.
pub const GEN_PREFIX: char = '%';
impl fmt::Display for Ident {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "{}", self.label())
}
}
impl From<Ident> for LocIdent {
fn from(symbol: Ident) -> Self {
LocIdent {
symbol,
pos: TermPos::None,
generated: symbol.label().starts_with(GEN_PREFIX),
}
}
}
impl PartialOrd for Ident {
fn partial_cmp(&self, other: &Self) -> Option<std::cmp::Ordering> {
@ -85,27 +62,118 @@ impl Ord for Ident {
}
}
impl PartialEq for Ident {
impl From<Ident> for NickelString {
fn from(sym: Ident) -> Self {
sym.to_string().into()
}
}
impl<F> From<F> for Ident
where
String: From<F>,
{
fn from(val: F) -> Self {
Self(INTERNER.intern(String::from(val)))
}
}
#[allow(clippy::from_over_into)]
impl Into<String> for Ident {
fn into(self) -> String {
self.into_label()
}
}
/// An identifier with a location.
///
/// The location is ignored for equality comparison and hashing; it's mainly
/// intended for error messages.
#[derive(Clone, Copy, Debug, Deserialize, Serialize)]
#[serde(into = "String", from = "String")]
pub struct LocIdent {
symbol: Ident,
pub pos: TermPos,
generated: bool,
}
impl LocIdent {
pub fn new_with_pos(label: impl AsRef<str>, pos: TermPos) -> Self {
let generated = label.as_ref().starts_with(GEN_PREFIX);
Self {
symbol: Ident::new(label),
pos,
generated,
}
}
pub fn new(label: impl AsRef<str>) -> Self {
Self::new_with_pos(label, TermPos::None)
}
/// Create an identifier with the same label as this one, but a specified position.
pub fn with_pos(self, pos: TermPos) -> LocIdent {
LocIdent { pos, ..self }
}
/// Create a new fresh identifier. This identifier is unique and is guaranteed not to collide
/// with any identifier defined before. Generated identifiers start with a special prefix that
/// can't be used by normal, user-defined identifiers.
pub fn fresh() -> Self {
Self::new(format!("{}{}", GEN_PREFIX, GeneratedCounter::next()))
}
/// Return this identifier's symbol.
pub fn symbol(&self) -> Ident {
self.symbol
}
/// Return the string representation of this identifier.
pub fn label(&self) -> &str {
self.symbol.label()
}
pub fn into_label(self) -> String {
self.label().to_owned()
}
}
/// Special character used for generating fresh identifiers. It must be syntactically impossible to
/// use to write in a standard Nickel program, to avoid name clashes.
pub const GEN_PREFIX: char = '%';
impl PartialOrd for LocIdent {
fn partial_cmp(&self, other: &Self) -> Option<std::cmp::Ordering> {
self.label().partial_cmp(other.label())
}
}
impl Ord for LocIdent {
fn cmp(&self, other: &Self) -> std::cmp::Ordering {
self.label().cmp(other.label())
}
}
impl PartialEq for LocIdent {
fn eq(&self, other: &Self) -> bool {
self.symbol == other.symbol
}
}
impl Eq for Ident {}
impl Eq for LocIdent {}
impl Hash for Ident {
impl Hash for LocIdent {
fn hash<H: std::hash::Hasher>(&self, state: &mut H) {
self.symbol.hash(state);
self.symbol.hash(state)
}
}
impl fmt::Display for Ident {
impl fmt::Display for LocIdent {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "{}", self.label())
}
}
impl<F> From<F> for Ident
impl<F> From<F> for LocIdent
where
String: From<F>,
{
@ -120,25 +188,25 @@ where
// `From<Ident> for Ident` which is incoherent with the
// blanket implementation of `From<T> for T`.
#[allow(clippy::from_over_into)]
impl Into<String> for Ident {
impl Into<String> for LocIdent {
fn into(self) -> String {
self.into_label()
}
}
impl From<Ident> for NickelString {
fn from(id: Ident) -> Self {
impl From<LocIdent> for NickelString {
fn from(id: LocIdent) -> Self {
id.to_string().into()
}
}
impl Ident {
impl LocIdent {
pub fn is_generated(&self) -> bool {
self.generated
}
}
impl AsRef<str> for Ident {
impl AsRef<str> for LocIdent {
fn as_ref(&self) -> &str {
self.label()
}

View File

@ -6,7 +6,7 @@ use std::{collections::HashMap, rc::Rc};
use crate::{
eval::cache::{Cache as EvalCache, CacheIndex},
identifier::Ident,
identifier::LocIdent,
mk_uty_enum, mk_uty_record,
position::{RawSpan, TermPos},
term::{
@ -53,7 +53,7 @@ pub mod ty_path {
//! particular field.
use crate::{
identifier::Ident,
identifier::LocIdent,
position::RawSpan,
typ::{RecordRowF, RecordRowsIteratorItem, Type, TypeF},
};
@ -63,7 +63,7 @@ pub mod ty_path {
pub enum Elem {
Domain,
Codomain,
Field(Ident),
Field(LocIdent),
Array,
Dict,
}
@ -192,8 +192,8 @@ pub mod ty_path {
}
panic!(
"span: current type path element indicates to go to field `{}`,\
but this field doesn't exist in {}",
"span: current type path element indicates to go to field `{}`, \
but this field doesn't exist in {}",
ident,
Type::from(TypeF::Record(rows.clone())),
)
@ -302,7 +302,7 @@ pub struct Label {
/// The name of the record field to report in blame errors. This is set
/// while first transforming a record as part of the pending contract generation.
/// Contract applications outside of records will have this field set to `None`.
pub field_name: Option<Ident>,
pub field_name: Option<LocIdent>,
}
/// Data about type variables that is needed for polymorphic contracts to decide which actions to take.
@ -315,7 +315,7 @@ impl From<&TypeVarData> for Term {
fn from(value: &TypeVarData) -> Self {
Term::Record(RecordData {
fields: [(
Ident::new("polarity"),
LocIdent::new("polarity"),
Field::from(RichTerm::from(Term::from(value.polarity))),
)]
.into(),
@ -355,8 +355,8 @@ impl Polarity {
impl From<Polarity> for Term {
fn from(value: Polarity) -> Self {
match value {
Polarity::Positive => Term::Enum(Ident::new("Positive")),
Polarity::Negative => Term::Enum(Ident::new("Negative")),
Polarity::Positive => Term::Enum(LocIdent::new("Positive")),
Polarity::Negative => Term::Enum(LocIdent::new("Negative")),
}
}
}
@ -494,7 +494,7 @@ impl Label {
}
}
pub fn with_field_name(self, field_name: Option<Ident>) -> Self {
pub fn with_field_name(self, field_name: Option<LocIdent>) -> Self {
Label { field_name, ..self }
}
}

View File

@ -1,7 +1,7 @@
use codespan::FileId;
use codespan_reporting::diagnostic::Label;
use crate::{identifier::Ident, position::RawSpan};
use crate::{identifier::LocIdent, position::RawSpan};
use std::ops::Range;
#[derive(Clone, PartialEq, Eq, Debug)]
@ -86,7 +86,7 @@ pub enum ParseError {
/// A specific lexical error
Lexical(LexicalError),
/// Unbound type variable(s)
UnboundTypeVariables(Vec<Ident>),
UnboundTypeVariables(Vec<LocIdent>),
/// Illegal record type literal.
///
/// This occurs when failing to convert from the uniterm syntax to a record type literal.
@ -102,9 +102,9 @@ pub enum ParseError {
/// A duplicate binding was encountered in a record destructuring pattern.
DuplicateIdentInRecordPattern {
/// The duplicate identifier.
ident: Ident,
ident: LocIdent,
/// The previous instance of the duplicated identifier.
prev_ident: Ident,
prev_ident: LocIdent,
},
/// A type variable is used in ways that imply it has muiltiple different kinds.
///
@ -113,7 +113,7 @@ pub enum ParseError {
/// e.g. in the signature `forall r. { ; r } -> r`,
/// - a variable is used as both a record and enum row variable, e.g. in the
/// signature `forall r. [| ; r |] -> { ; r }`.
TypeVariableKindMismatch { ty_var: Ident, span: RawSpan },
TypeVariableKindMismatch { ty_var: LocIdent, span: RawSpan },
/// A record literal, which isn't a record type, has a field with a type annotation but without
/// a definition. While we could technically handle this situation, this is most probably an
/// error from the user, because this type annotation is useless and, maybe non-intuitively,

View File

@ -54,7 +54,7 @@ use crate::{
mk_app,
mk_opn,
mk_fun,
identifier::Ident,
identifier::LocIdent,
destructuring::{Match, FieldPattern, LastMatch, RecordPattern},
term::{
*,
@ -511,8 +511,8 @@ DefaultAnnot: Field = "?" <t: Term> => Field {
..Default::default()
};
Ident: Ident = <l:@L> <i: "identifier"> <r:@R> =>
Ident::new_with_pos(i, mk_pos(src_id, l, r));
Ident: LocIdent = <l:@L> <i: "identifier"> <r:@R> =>
LocIdent::new_with_pos(i, mk_pos(src_id, l, r));
Bool: bool = {
"true" => true,
@ -624,7 +624,7 @@ StaticString : String = {
StringEnumTag = DelimitedStaticString<"'\"", "\"">;
EnumTag: Ident = {
EnumTag: LocIdent = {
"raw enum tag" => <>.into(),
<StringEnumTag> => <>.into(),
};

View File

@ -1,5 +1,5 @@
use crate::error::{ParseError, ParseErrors};
use crate::identifier::Ident;
use crate::identifier::LocIdent;
use crate::term::RichTerm;
use crate::typ::Type;
use codespan::FileId;
@ -31,7 +31,7 @@ mod tests;
/// ```
pub enum ExtendedTerm {
RichTerm(RichTerm),
ToplevelLet(Ident, RichTerm),
ToplevelLet(LocIdent, RichTerm),
}
// The interface of LALRPOP-generated parsers, for each public rule. This trait is used as a facade

View File

@ -3,7 +3,7 @@ use std::rc::Rc;
use super::lexer::{Lexer, MultiStringToken, NormalToken, StringToken, SymbolicStringStart, Token};
use super::utils::{build_record, FieldPathElem};
use crate::error::ParseError;
use crate::identifier::Ident;
use crate::identifier::LocIdent;
use crate::parser::{error::ParseError as InternalParseError, ErrorTolerantParser};
use crate::term::array::Array;
use crate::term::Number;
@ -209,23 +209,23 @@ fn enum_terms() {
(
"simple raw enum tag",
"'foo",
Enum(Ident::from("foo")).into(),
Enum(LocIdent::from("foo")).into(),
),
(
"raw enum tag with keyword ident",
"'if",
Enum(Ident::from("if")).into(),
Enum(LocIdent::from("if")).into(),
),
("empty string tag", "'\"\"", Enum(Ident::from("")).into()),
("empty string tag", "'\"\"", Enum(LocIdent::from("")).into()),
(
"string tag with non-ident chars",
"'\"foo:bar\"",
Enum(Ident::from("foo:bar")).into(),
Enum(LocIdent::from("foo:bar")).into(),
),
(
"string with spaces",
"'\"this works!\"",
Enum(Ident::from("this works!")).into(),
Enum(LocIdent::from("this works!")).into(),
),
(
"match with raw tags",
@ -273,12 +273,11 @@ fn record_terms() {
RecRecord(
record::RecordData::with_field_values(
vec![
(Ident::from("a"), mk_term::integer(1)),
(Ident::from("b"), mk_term::integer(2)),
(Ident::from("c"), mk_term::integer(3)),
(LocIdent::from("a"), mk_term::integer(1)),
(LocIdent::from("b"), mk_term::integer(2)),
(LocIdent::from("c"), mk_term::integer(3)),
]
.into_iter()
.collect()
),
Vec::new(),
None,
@ -291,11 +290,10 @@ fn record_terms() {
RecRecord(
record::RecordData::with_field_values(
vec![
(Ident::from("a"), mk_term::integer(1)),
(Ident::from("d"), mk_term::integer(42)),
(LocIdent::from("a"), mk_term::integer(1)),
(LocIdent::from("d"), mk_term::integer(42)),
]
.into_iter()
.collect()
),
vec![(
StrChunks(vec![StrChunk::expr(mk_term::integer(123))]).into(),
@ -315,11 +313,10 @@ fn record_terms() {
RecRecord(
record::RecordData::with_field_values(
vec![
(Ident::from("a"), mk_term::integer(1)),
(Ident::from("\"%}%"), mk_term::integer(2)),
(LocIdent::from("a"), mk_term::integer(1)),
(LocIdent::from("\"%}%"), mk_term::integer(2)),
]
.into_iter()
.collect()
),
Vec::new(),
None,

View File

@ -6,6 +6,7 @@ use utils::{build_record, FieldDef, FieldPathElem};
use crate::{
environment::Environment,
identifier::Ident,
position::{RawSpan, TermPos},
term::{
record::{Field, FieldMetadata, RecordAttrs},
@ -49,7 +50,7 @@ use std::{
/// `UniTermNode::Term(Term::Op2(..))`.
pub enum UniTermNode {
/// A variable. Can refer both to a term variable or a type variable.
Var(Ident),
Var(LocIdent),
/// A record. Can refer both to a record literal or a record type.
Record(UniRecord),
/// A uniterm that has been determined to be a term.
@ -89,7 +90,7 @@ impl TryFrom<UniTerm> for Type {
fn try_from(ut: UniTerm) -> Result<Self, ParseError> {
let ty_without_pos = match ut.node {
UniTermNode::Var(id) => Type::from(TypeF::Var(id)),
UniTermNode::Var(id) => Type::from(TypeF::Var(id.symbol())),
UniTermNode::Record(r) => Type::try_from(r)?,
UniTermNode::Type(ty) => ty,
UniTermNode::Term(rt) => Type::from(TypeF::Flat(rt)),
@ -236,7 +237,7 @@ impl UniRecord {
// we might already have found a definition for this field, or might do later
// in the loop.
if let Some(ident) = path_as_ident {
match candidate_fields.entry(ident) {
match candidate_fields.entry(ident.symbol()) {
// If the hashmap is occupied, we've met this field before. Either
// there is another definition without annotation, in which case
// there's no need to replace it, or there is a `Defined` element,
@ -259,7 +260,7 @@ impl UniRecord {
}
field => {
if let (Some(ident), Some(_)) = (path_as_ident, &field.value) {
candidate_fields.insert(ident, FieldState::Defined);
candidate_fields.insert(ident.symbol(), FieldState::Defined);
}
None
@ -321,7 +322,7 @@ impl UniRecord {
/// `{foo.bar.baz : Type}.into_type_strict()` returns an `Err`.
pub fn into_type_strict(self) -> Result<Type, InvalidRecordTypeError> {
fn term_to_record_rows(
id: Ident,
id: LocIdent,
field_def: FieldDef,
tail: RecordRows,
) -> Result<RecordRows, InvalidRecordTypeError> {
@ -414,10 +415,10 @@ impl UniRecord {
field_def.pos.unwrap(),
),
)?;
Ident::new_with_pos(name, expr.pos)
LocIdent::new_with_pos(name, expr.pos)
}
};
if let Some(prev_id) = fields_seen.insert(id, id) {
if let Some(prev_id) = fields_seen.insert(id.symbol(), id) {
return Err(InvalidRecordTypeError::RepeatedField {
// Because we're iterating backwards, `id` came first.
orig: id.pos.unwrap(),
@ -679,14 +680,13 @@ impl FixTypeVars for Type {
(*t).fix_type_vars_env(bound_vars, span)?;
Ok(())
}
TypeF::Var(ref mut id) => {
if let Some(cell) = bound_vars.get(id) {
TypeF::Var(sym) => {
if let Some(cell) = bound_vars.get(&sym) {
cell.try_set(VarKind::Type)
.map_err(|_| ParseError::TypeVariableKindMismatch { ty_var: *id, span })?;
.map_err(|_| ParseError::TypeVariableKindMismatch { ty_var: LocIdent::from(sym).with_pos(self.pos), span })?;
} else {
let id = *id;
let pos = id.pos;
self.typ = TypeF::Flat(RichTerm::new(Term::Var(id), pos));
let id = LocIdent::from(sym).with_pos(self.pos);
self.typ = TypeF::Flat(RichTerm::new(Term::Var(id), id.pos));
}
Ok(())
}
@ -698,7 +698,7 @@ impl FixTypeVars for Type {
// We span a new VarKindCell and put it in the environment. The recursive calls to
// fix_type_vars will fill this cell with the correct kind, which we get afterwards
// to set the right value for `var_kind`.
bound_vars.insert(*var, VarKindCell::new());
bound_vars.insert(var.symbol(), VarKindCell::new());
// let x : forall a. { _foo: forall a. a, bar: { ; a } }
(*body).fix_type_vars_env(bound_vars.clone(), span)?;
// unwrap(): We just inserted a value for `var` above, and environment can never
@ -707,7 +707,7 @@ impl FixTypeVars for Type {
// access to this VarKindCell in bound_vars. We can avoid a clone by taking
// the var_kind out. We could also take the whole key value pair out of the
// `Environment`, but ownership there is trickier.
*var_kind = bound_vars.get(var).unwrap().take_var_kind().unwrap_or_default();
*var_kind = bound_vars.get(&var.symbol()).unwrap().take_var_kind().unwrap_or_default();
Ok(())
}
@ -738,7 +738,7 @@ impl FixTypeVars for RecordRows {
// We can't have a contract in tail position, so we don't fix `TailVar`. However, we
// have to set the correct kind for the corresponding forall binder.
RecordRowsF::TailVar(ref id) => {
if let Some(cell) = bound_vars.get(id) {
if let Some(cell) = bound_vars.get(&id.symbol()) {
cell.try_set(VarKind::RecordRows {
excluded: maybe_excluded,
})
@ -750,7 +750,7 @@ impl FixTypeVars for RecordRows {
ref mut row,
ref mut tail,
} => {
maybe_excluded.insert(row.id);
maybe_excluded.insert(row.id.symbol());
row.typ.fix_type_vars_env(bound_vars.clone(), span)?;
helper(tail, bound_vars, span, maybe_excluded)
}
@ -777,7 +777,7 @@ impl FixTypeVars for EnumRows {
.skip_while(|item| matches!(item, EnumRowsIteratorItem::Row(_)));
match iter.next() {
Some(EnumRowsIteratorItem::TailVar(id)) => {
if let Some(cell) = bound_vars.get(id) {
if let Some(cell) = bound_vars.get(&id.symbol()) {
cell.try_set(VarKind::EnumRows)
.map_err(|_| ParseError::TypeVariableKindMismatch { ty_var: *id, span })?;
}

View File

@ -11,7 +11,7 @@ use super::error::ParseError;
use crate::{
destructuring::FieldPattern,
eval::operation::RecPriority,
identifier::Ident,
identifier::LocIdent,
label::{Label, MergeKind, MergeLabel},
mk_app, mk_fun,
position::{RawSpan, TermPos},
@ -72,7 +72,7 @@ pub enum StringEndDelimiter {
/// Distinguish between a normal case `id => exp` and a default case `_ => exp`.
#[derive(Clone, Debug)]
pub enum MatchCase {
Normal(Ident, RichTerm),
Normal(LocIdent, RichTerm),
Default(RichTerm),
}
@ -80,7 +80,7 @@ pub enum MatchCase {
#[derive(Clone, Debug)]
pub enum FieldPathElem {
/// A static field declaration: `{ foo = .. }`
Ident(Ident),
Ident(LocIdent),
/// A quoted field declaration: `{ "%{protocol}" = .. }`
///
/// In practice, the argument must always be `StrChunks`, but since we also need to keep track
@ -165,7 +165,7 @@ impl FieldDef {
let static_access = exp.term.as_ref().try_str_chunk_as_static_str();
if let Some(static_access) = static_access {
let id = Ident::new_with_pos(static_access, exp.pos);
let id = LocIdent::new_with_pos(static_access, exp.pos);
let mut fields = IndexMap::new();
fields.insert(id, acc);
Field::from(RichTerm::new(
@ -192,7 +192,7 @@ impl FieldDef {
}
/// Returns the identifier corresponding to this definition if the path is composed of exactly one element which is a static identifier. Returns `None` otherwise.
pub fn path_as_ident(&self) -> Option<Ident> {
pub fn path_as_ident(&self) -> Option<LocIdent> {
if self.path.len() > 1 {
return None;
}
@ -459,7 +459,7 @@ pub fn mk_access(access: RichTerm, root: RichTerm) -> RichTerm {
if let Some(label) = label {
mk_term::op1(
UnaryOp::StaticAccess(Ident::new_with_pos(label, access.pos)),
UnaryOp::StaticAccess(LocIdent::new_with_pos(label, access.pos)),
root,
)
} else {
@ -476,7 +476,11 @@ where
let mut static_fields = IndexMap::new();
let mut dynamic_fields = Vec::new();
fn insert_static_field(static_fields: &mut IndexMap<Ident, Field>, id: Ident, field: Field) {
fn insert_static_field(
static_fields: &mut IndexMap<LocIdent, Field>,
id: LocIdent,
field: Field,
) {
match static_fields.entry(id) {
Entry::Occupied(mut occpd) => {
// temporarily putting an empty field in the entry to take the previous value.
@ -523,7 +527,7 @@ where
if is_static {
insert_static_field(
&mut static_fields,
Ident::new_with_pos(buffer, e.pos),
LocIdent::new_with_pos(buffer, e.pos),
t,
)
} else {
@ -538,14 +542,7 @@ where
});
Term::RecRecord(
RecordData::new(
static_fields
.into_iter()
.map(|(id, value)| (id, value))
.collect(),
attrs,
None,
),
RecordData::new(static_fields, attrs, None),
dynamic_fields,
None,
)
@ -624,7 +621,7 @@ pub fn mk_let(
FieldPattern::Ident(id) => Ok(mk_term::let_in(id, t1, t2)),
_ if rec => Err(ParseError::RecursiveLetPattern(span)),
FieldPattern::RecordPattern(pat) => {
let id: Option<Ident> = None;
let id: Option<LocIdent> = None;
Ok(mk_term::let_pat(id, pat, t1, t2))
}
FieldPattern::AliasedRecordPattern { alias, pattern } => {

View File

@ -1,8 +1,7 @@
use std::fmt;
use crate::destructuring::{self, FieldPattern, RecordPattern};
use crate::identifier::Ident;
use crate::identifier::LocIdent;
use crate::parser::lexer::KEYWORDS;
use crate::term::{
record::{Field, FieldMetadata},
@ -59,7 +58,7 @@ static QUOTING_REGEX: Lazy<Regex> = Lazy::new(|| Regex::new("^_?[a-zA-Z][_a-zA-Z
/// Return the string representation of an identifier, and add enclosing double quotes if the
/// label isn't a valid identifier according to the parser, for example if it contains a
/// special character like a space.
pub fn ident_quoted(ident: &Ident) -> String {
pub fn ident_quoted(ident: &LocIdent) -> String {
let label = ident.label();
if QUOTING_REGEX.is_match(label) && !KEYWORDS.contains(&label) {
String::from(label)
@ -205,7 +204,7 @@ where
.group()
}
fn field(&'a self, id: &Ident, field: &Field, with_doc: bool) -> DocBuilder<'a, Self, A> {
fn field(&'a self, id: &LocIdent, field: &Field, with_doc: bool) -> DocBuilder<'a, Self, A> {
self.text(ident_quoted(id))
.append(self.field_body(field, with_doc))
}
@ -246,7 +245,7 @@ where
fn fields(
&'a self,
fields: &IndexMap<Ident, Field>,
fields: &IndexMap<LocIdent, Field>,
with_doc: bool,
) -> DocBuilder<'a, Self, A> {
self.intersperse(

View File

@ -25,7 +25,7 @@ use crate::error::{Error, IntoDiagnostics, ParseError};
use crate::eval;
use crate::eval::cache::Cache as EvalCache;
use crate::eval::VirtualMachine;
use crate::identifier::Ident;
use crate::identifier::LocIdent;
use crate::term::{record::Field, RichTerm};
use atty;
use codespan::FileId;
@ -45,7 +45,7 @@ impl From<clap::ColorChoice> for ColorOpt {
/// Attribute path provided when querying metadata.
#[derive(Clone, Default, PartialEq, Eq, Debug)]
pub struct QueryPath(pub Vec<Ident>);
pub struct QueryPath(pub Vec<LocIdent>);
impl QueryPath {
pub fn new() -> Self {
@ -82,7 +82,7 @@ impl QueryPath {
)
})?;
let path_as_idents: Result<Vec<Ident>, ParseError> = field_path
let path_as_idents: Result<Vec<LocIdent>, ParseError> = field_path
.into_iter()
.map(|elem| match elem {
FieldPathElem::Ident(ident) => Ok(ident),
@ -93,7 +93,7 @@ impl QueryPath {
pos_path_elem: expr.pos,
},
)?;
Ok(Ident::from(as_string))
Ok(LocIdent::from(as_string))
}
})
.collect();
@ -703,6 +703,7 @@ mod tests {
use super::*;
use crate::error::EvalError;
use crate::eval::cache::CacheImpl;
use crate::identifier::LocIdent;
use crate::position::TermPos;
use crate::term::array::ArrayAttrs;
use assert_matches::assert_matches;

View File

@ -10,7 +10,7 @@ use crate::cache::{Cache, Envs, ErrorTolerance};
use crate::error::{Error, EvalError, IOError, ParseError, ParseErrors, ReplError};
use crate::eval::cache::Cache as EvalCache;
use crate::eval::{Closure, VirtualMachine};
use crate::identifier::Ident;
use crate::identifier::LocIdent;
use crate::parser::{grammar, lexer, ErrorTolerantParser, ExtendedTerm};
use crate::program::QueryPath;
use crate::term::{record::Field, RichTerm, Term, Traverse};
@ -43,7 +43,7 @@ pub enum EvalResult {
/// The input has been evaluated to a term.
Evaluated(RichTerm),
/// The input was a toplevel let, which has been bound in the environment.
Bound(Ident),
Bound(LocIdent),
}
impl From<RichTerm> for EvalResult {
@ -107,7 +107,7 @@ impl<EC: EvalCache> ReplImpl<EC> {
// `id` must be set to `None` for normal expressions and to `Some(id_)` for top-level lets. In the
// latter case, we need to update the current type environment before doing program
// transformations in the case of a top-level let.
fn prepare(&mut self, id: Option<Ident>, t: RichTerm) -> Result<RichTerm, Error> {
fn prepare(&mut self, id: Option<LocIdent>, t: RichTerm) -> Result<RichTerm, Error> {
let import_resolution::strict::ResolveResult {
transformed_term: t,
resolved_ids: pending,
@ -132,11 +132,10 @@ impl<EC: EvalCache> ReplImpl<EC> {
&self.env.type_ctxt.term_env,
self.vm.import_resolver(),
);
self.env
.type_ctxt
.term_env
.0
.insert(id, (t.clone(), self.env.type_ctxt.term_env.clone()));
self.env.type_ctxt.term_env.0.insert(
id.symbol(),
(t.clone(), self.env.type_ctxt.term_env.clone()),
);
}
for id in &pending {

View File

@ -1,5 +1,5 @@
//! Rendering of the results of a metadata query.
use crate::identifier::Ident;
use crate::identifier::{Ident, LocIdent};
use crate::term::{
record::{Field, FieldMetadata},
MergePriority, Term,
@ -14,9 +14,9 @@ pub trait QueryPrinter {
/// Print the documentation attribute.
fn write_doc(&self, out: &mut impl Write, content: &str) -> io::Result<()>;
/// Print the list of fields of a record.
fn write_fields<'a, I>(&self, out: &mut impl Write, fields: I) -> io::Result<()>
fn write_fields<I>(&self, out: &mut impl Write, fields: I) -> io::Result<()>
where
I: Iterator<Item = &'a Ident>;
I: Iterator<Item = Ident>;
}
#[cfg(feature = "markdown")]
@ -41,9 +41,9 @@ impl QueryPrinter for SimpleRenderer {
}
}
fn write_fields<'a, I>(&self, out: &mut impl Write, fields: I) -> io::Result<()>
fn write_fields<I>(&self, out: &mut impl Write, fields: I) -> io::Result<()>
where
I: Iterator<Item = &'a Ident>,
I: Iterator<Item = Ident>,
{
writeln!(out, "Available fields:")?;
@ -111,9 +111,9 @@ impl QueryPrinter for MarkdownRenderer {
}
}
fn write_fields<'a, I>(&self, out: &mut impl Write, fields: I) -> io::Result<()>
fn write_fields<I>(&self, out: &mut impl Write, fields: I) -> io::Result<()>
where
I: Iterator<Item = &'a Ident>,
I: Iterator<Item = Ident>,
{
use minimad::*;
use termimad::*;
@ -194,13 +194,13 @@ fn render_query_result<R: QueryPrinter>(
Term::Record(record) if !record.fields.is_empty() => {
let mut fields: Vec<_> = record.fields.keys().collect();
fields.sort();
renderer.write_fields(out, fields.into_iter())
renderer.write_fields(out, fields.into_iter().map(LocIdent::symbol))
}
Term::RecRecord(record, dyn_fields, ..) if !record.fields.is_empty() => {
let mut fields: Vec<_> = record.fields.keys().collect();
let mut fields: Vec<_> = record.fields.keys().map(LocIdent::symbol).collect();
fields.sort();
let dynamic = Ident::from("<dynamic>");
fields.extend(dyn_fields.iter().map(|_| &dynamic));
fields.extend(dyn_fields.iter().map(|_| dynamic));
renderer.write_fields(out, fields.into_iter())
}
Term::Record(..) | Term::RecRecord(..) => renderer.write_metadata(out, "value", "{}"),

View File

@ -4,6 +4,7 @@ use once_cell::sync::Lazy;
use crate::{
error::ExportError,
identifier::LocIdent,
term::{
array::{Array, ArrayAttrs},
record::RecordData,
@ -133,7 +134,7 @@ pub fn deserialize_record<'de, D>(deserializer: D) -> Result<RecordData, D::Erro
where
D: Deserializer<'de>,
{
let fields = IndexMap::deserialize(deserializer)?;
let fields = IndexMap::<LocIdent, _>::deserialize(deserializer)?;
Ok(RecordData::with_field_values(fields))
}

View File

@ -20,7 +20,7 @@ use string::NickelString;
use crate::{
destructuring::RecordPattern,
error::{EvalError, ParseError},
identifier::Ident,
identifier::LocIdent,
label::{Label, MergeLabel},
match_sharedterm,
position::TermPos,
@ -82,29 +82,29 @@ pub enum Term {
StrChunks(Vec<StrChunk<RichTerm>>),
/// A standard function.
#[serde(skip)]
Fun(Ident, RichTerm),
Fun(LocIdent, RichTerm),
/// A function able to destruct its arguments.
#[serde(skip)]
FunPattern(Option<Ident>, RecordPattern, RichTerm),
FunPattern(Option<LocIdent>, RecordPattern, RichTerm),
/// A blame label.
#[serde(skip)]
Lbl(Label),
/// A let binding.
#[serde(skip)]
Let(Ident, RichTerm, RichTerm, LetAttrs),
Let(LocIdent, RichTerm, RichTerm, LetAttrs),
/// A destructuring let-binding.
#[serde(skip)]
LetPattern(Option<Ident>, RecordPattern, RichTerm, RichTerm),
LetPattern(Option<LocIdent>, RecordPattern, RichTerm, RichTerm),
/// An application.
#[serde(skip)]
App(RichTerm, RichTerm),
/// A variable.
#[serde(skip)]
Var(Ident),
Var(LocIdent),
/// An enum variant.
Enum(Ident),
Enum(LocIdent),
/// A record, mapping identifiers to terms.
#[serde(serialize_with = "crate::serialize::serialize_record")]
@ -123,7 +123,7 @@ pub enum Term {
/// able to handle yet unapplied match expressions.
#[serde(skip)]
Match {
cases: IndexMap<Ident, RichTerm>,
cases: IndexMap<LocIdent, RichTerm>,
default: Option<RichTerm>,
},
@ -434,7 +434,7 @@ pub struct LabeledType {
impl LabeledType {
/// Modify the label's `field_name` field.
pub fn with_field_name(self, ident: Option<Ident>) -> Self {
pub fn with_field_name(self, ident: Option<LocIdent>) -> Self {
LabeledType {
label: self.label.with_field_name(ident),
..self
@ -522,7 +522,7 @@ impl TypeAnnotation {
}
/// Set the `field_name` attribute of the labels of the type and contracts annotations.
pub fn with_field_name(self, field_name: Option<Ident>) -> Self {
pub fn with_field_name(self, field_name: Option<LocIdent>) -> Self {
TypeAnnotation {
typ: self.typ.map(|t| t.with_field_name(field_name)),
contracts: self
@ -915,14 +915,14 @@ pub enum UnaryOp {
/// `Embed` is used to upcast enums. For example, if a value `x` has enum type `a | b`, then
/// `embed c x` will have enum type `a | b | c`. It only affects typechecking as at runtime
/// `embed someId` act like the identity.
Embed(Ident),
Embed(LocIdent),
/// Evaluate a match block applied to an argument.
Match { has_default: bool },
/// Static access to a record field.
///
/// Static means that the field identifier is a statically known string inside the source.
StaticAccess(Ident),
StaticAccess(LocIdent),
/// Map a function on each element of an array.
ArrayMap(),
@ -1395,12 +1395,15 @@ impl RichTerm {
}
}
/// Erase recursively the positional information.
/// Erase recursively (most of) the positional information.
///
/// It allows to use rust `Eq` trait to compare the values of the underlying terms.
///
/// This is currently only used in test code, but because it's used from integration
/// tests we cannot hide it behind cfg(test).
///
/// Note that `Ident`s retain their position. This position is ignored in comparison, so it's
/// good enough for the tests.
pub fn without_pos(self) -> Self {
self.traverse::<_, _, ()>(
&|t: Type, _| {
@ -1547,7 +1550,7 @@ impl Traverse<RichTerm> for RichTerm {
Term::Match { cases, default } => {
// The annotation on `map_res` use Result's corresponding trait to convert from
// Iterator<Result> to a Result<Iterator>
let cases_result : Result<IndexMap<Ident, RichTerm>, E> = cases
let cases_result : Result<IndexMap<LocIdent, RichTerm>, E> = cases
.into_iter()
// For the conversion to work, note that we need a Result<(Ident,RichTerm), E>
.map(|(id, t)| t.traverse(f, state, order).map(|t_ok| (id, t_ok)))
@ -1594,7 +1597,7 @@ impl Traverse<RichTerm> for RichTerm {
Term::Record(record) => {
// The annotation on `fields_res` uses Result's corresponding trait to convert from
// Iterator<Result> to a Result<Iterator>
let fields_res: Result<IndexMap<Ident, Field>, E> = record.fields
let fields_res: Result<IndexMap<LocIdent, Field>, E> = record.fields
.into_iter()
// For the conversion to work, note that we need a Result<(Ident,RichTerm), E>
.map(|(id, field)| {
@ -1607,7 +1610,7 @@ impl Traverse<RichTerm> for RichTerm {
Term::RecRecord(record, dyn_fields, deps) => {
// The annotation on `map_res` uses Result's corresponding trait to convert from
// Iterator<Result> to a Result<Iterator>
let static_fields_res: Result<IndexMap<Ident, Field>, E> = record.fields
let static_fields_res: Result<IndexMap<LocIdent, Field>, E> = record.fields
.into_iter()
// For the conversion to work, note that we need a Result<(Ident,Field), E>
.map(|(id, field)| {
@ -1867,10 +1870,10 @@ pub mod make {
#[macro_export]
macro_rules! mk_fun {
( $id:expr, $body:expr ) => {
$crate::term::RichTerm::from($crate::term::Term::Fun($crate::identifier::Ident::from($id), $crate::term::RichTerm::from($body)))
$crate::term::RichTerm::from($crate::term::Term::Fun($crate::identifier::LocIdent::from($id), $crate::term::RichTerm::from($body)))
};
( $id1:expr, $id2:expr , $( $rest:expr ),+ ) => {
mk_fun!($crate::identifier::Ident::from($id1), mk_fun!($id2, $( $rest ),+))
mk_fun!($crate::identifier::LocIdent::from($id1), mk_fun!($id2, $( $rest ),+))
};
}
@ -1882,7 +1885,7 @@ pub mod make {
macro_rules! mk_record {
( $( ($id:expr, $body:expr) ),* ) => {
{
let mut fields = indexmap::IndexMap::new();
let mut fields = indexmap::IndexMap::<LocIdent, RichTerm>::new();
$(
fields.insert($id.into(), $body.into());
)*
@ -1936,7 +1939,7 @@ pub mod make {
pub fn var<I>(v: I) -> RichTerm
where
I: Into<Ident>,
I: Into<LocIdent>,
{
Term::Var(v.into()).into()
}
@ -1945,7 +1948,7 @@ pub mod make {
where
T1: Into<RichTerm>,
T2: Into<RichTerm>,
I: Into<Ident>,
I: Into<LocIdent>,
{
let attrs = LetAttrs {
binding_type: BindingType::Normal,
@ -1958,7 +1961,7 @@ pub mod make {
where
T1: Into<RichTerm>,
T2: Into<RichTerm>,
I: Into<Ident>,
I: Into<LocIdent>,
{
let_in_(false, id, t1, t2)
}
@ -1967,7 +1970,7 @@ pub mod make {
where
T1: Into<RichTerm>,
T2: Into<RichTerm>,
I: Into<Ident>,
I: Into<LocIdent>,
{
let_in_(true, id, t1, t2)
}
@ -1977,7 +1980,7 @@ pub mod make {
T1: Into<RichTerm>,
T2: Into<RichTerm>,
D: Into<RecordPattern>,
I: Into<Ident>,
I: Into<LocIdent>,
{
Term::LetPattern(id.map(|i| i.into()), pat.into(), t1.into(), t2.into()).into()
}
@ -2050,7 +2053,7 @@ pub mod make {
where
I: IntoIterator<Item = S>,
I::IntoIter: DoubleEndedIterator,
S: Into<Ident>,
S: Into<LocIdent>,
T: Into<RichTerm>,
{
let mut term = record.into();

View File

@ -1,5 +1,9 @@
use super::*;
use crate::{error::EvalError, identifier::Ident, label::Label};
use crate::{
error::EvalError,
identifier::{Ident, LocIdent},
label::Label,
};
use std::{collections::HashSet, rc::Rc};
/// Additional attributes for record.
@ -194,7 +198,7 @@ impl Field {
}
}
pub fn with_name(self, field_name: Option<Ident>) -> Self {
pub fn with_name(self, field_name: Option<LocIdent>) -> Self {
Field {
metadata: FieldMetadata {
annotation: self.metadata.annotation.with_field_name(field_name),
@ -254,7 +258,7 @@ impl Traverse<RichTerm> for Field {
#[derive(Clone, Debug, Default, PartialEq)]
pub struct RecordData {
/// Fields whose names are known statically.
pub fields: IndexMap<Ident, Field>,
pub fields: IndexMap<LocIdent, Field>,
/// Attributes which may be applied to a record.
pub attrs: RecordAttrs,
/// The hidden part of a record under a polymorphic contract.
@ -265,7 +269,7 @@ pub struct RecordData {
/// definition and isn't optional.
#[derive(Clone, Debug)]
pub struct MissingFieldDefError {
pub id: Ident,
pub id: LocIdent,
pub metadata: FieldMetadata,
}
@ -282,7 +286,7 @@ impl MissingFieldDefError {
impl RecordData {
pub fn new(
fields: IndexMap<Ident, Field>,
fields: IndexMap<LocIdent, Field>,
attrs: RecordAttrs,
sealed_tail: Option<SealedTail>,
) -> Self {
@ -299,7 +303,7 @@ impl RecordData {
}
/// A record with the provided fields and the default set of attributes.
pub fn with_field_values(field_values: IndexMap<Ident, RichTerm>) -> Self {
pub fn with_field_values(field_values: impl IntoIterator<Item = (LocIdent, RichTerm)>) -> Self {
let fields = field_values
.into_iter()
.map(|(id, value)| {
@ -326,7 +330,7 @@ impl RecordData {
/// external state while iterating.
pub fn map_values<F>(self, mut f: F) -> Self
where
F: FnMut(Ident, Option<RichTerm>) -> Option<RichTerm>,
F: FnMut(LocIdent, Option<RichTerm>) -> Option<RichTerm>,
{
let fields = self
.fields
@ -348,7 +352,7 @@ impl RecordData {
/// defined value. Fields without a value are left unchanged.
pub fn map_defined_values<F>(self, mut f: F) -> Self
where
F: FnMut(Ident, RichTerm) -> RichTerm,
F: FnMut(LocIdent, RichTerm) -> RichTerm,
{
self.map_values(|id, value| value.map(|v| f(id, v)))
}
@ -369,7 +373,7 @@ impl RecordData {
Some(v) => {
let pos = v.pos;
Some(Ok((
id,
id.symbol(),
RuntimeContract::apply_all(v, field.pending_contracts.into_iter(), pos),
)))
}
@ -387,11 +391,11 @@ impl RecordData {
/// `MissingFieldDefError`.
pub fn iter_serializable(
&self,
) -> impl Iterator<Item = Result<(&Ident, &RichTerm), MissingFieldDefError>> {
) -> impl Iterator<Item = Result<(Ident, &RichTerm), MissingFieldDefError>> {
self.fields.iter().filter_map(|(id, field)| {
debug_assert!(field.pending_contracts.is_empty());
match field.value {
Some(ref v) if !field.metadata.not_exported => Some(Ok((id, v))),
Some(ref v) if !field.metadata.not_exported => Some(Ok((id.symbol(), v))),
None if !field.metadata.opt && !field.metadata.not_exported => {
Some(Err(MissingFieldDefError {
id: *id,
@ -410,7 +414,7 @@ impl RecordData {
/// This method automatically applies the potential pending contracts
pub fn get_value_with_ctrs(
&self,
id: &Ident,
id: &LocIdent,
) -> Result<Option<RichTerm>, MissingFieldDefError> {
match self.fields.get(id) {
Some(Field {

View File

@ -31,7 +31,7 @@
//! ) in ...
//! ```
use crate::destructuring::{FieldPattern, Match, RecordPattern};
use crate::identifier::Ident;
use crate::identifier::LocIdent;
use crate::match_sharedterm;
use crate::term::make::{op1, op2};
use crate::term::{BinaryOp::DynRemove, RichTerm, Term, TypeAnnotation, UnaryOp::StaticAccess};
@ -58,7 +58,7 @@ pub fn desugar_fun(rt: RichTerm) -> RichTerm {
match_sharedterm! { rt.term,
with {
Term::FunPattern(x, pat, t_) => {
let x = x.unwrap_or_else(Ident::fresh);
let x = x.unwrap_or_else(LocIdent::fresh);
let t_pos = t_.pos;
RichTerm::new(
Term::Fun(
@ -109,7 +109,7 @@ pub fn desugar(rt: RichTerm) -> RichTerm {
with {
Term::LetPattern(x, pat, t_, body) => {
let pos = body.pos;
let x = x.unwrap_or_else(Ident::fresh);
let x = x.unwrap_or_else(LocIdent::fresh);
RichTerm::new(
Term::Let(
x,
@ -129,7 +129,7 @@ pub fn desugar(rt: RichTerm) -> RichTerm {
/// `x` is the identifier pointing to the full record. If having `val @ {...} = ... in ...` the
/// variable x should be `Ident("val")` but if we have a `@` binding less form, you will probably
/// generate a fresh variable.
fn bind_open_field(x: Ident, pat: &RecordPattern, body: RichTerm) -> RichTerm {
fn bind_open_field(x: LocIdent, pat: &RecordPattern, body: RichTerm) -> RichTerm {
let (matches, var) = match pat {
RecordPattern {
matches,
@ -142,7 +142,7 @@ fn bind_open_field(x: Ident, pat: &RecordPattern, body: RichTerm) -> RichTerm {
open: true,
rest: None,
..
} => (matches, Ident::fresh()),
} => (matches, LocIdent::fresh()),
RecordPattern {
open: false,
rest: None,
@ -165,7 +165,7 @@ fn bind_open_field(x: Ident, pat: &RecordPattern, body: RichTerm) -> RichTerm {
/// Core of the destructuring. Bind all the variables of the pattern except the "open" (`..y`)
/// part. For that, see `bind_open_field`.
fn destruct_term(x: Ident, pat: &RecordPattern, body: RichTerm) -> RichTerm {
fn destruct_term(x: LocIdent, pat: &RecordPattern, body: RichTerm) -> RichTerm {
let pos = body.pos;
let RecordPattern { matches, .. } = pat;
matches.iter().fold(body, move |t, m| match m {

View File

@ -30,7 +30,7 @@ impl CollectFreeVars for RichTerm {
fn collect_free_vars(&mut self, free_vars: &mut HashSet<Ident>) {
match SharedTerm::make_mut(&mut self.term) {
Term::Var(id) => {
free_vars.insert(*id);
free_vars.insert(id.symbol());
}
Term::ParseError(_)
| Term::RuntimeError(_)
@ -47,7 +47,7 @@ impl CollectFreeVars for RichTerm {
let mut fresh = HashSet::new();
t.collect_free_vars(&mut fresh);
fresh.remove(id);
fresh.remove(&id.symbol());
free_vars.extend(fresh);
}
@ -57,7 +57,7 @@ impl CollectFreeVars for RichTerm {
body.collect_free_vars(&mut fresh);
bind_pattern(dest_pat, &mut fresh);
if let Some(id) = id {
fresh.remove(id);
fresh.remove(&id.symbol());
}
free_vars.extend(fresh);
@ -72,7 +72,7 @@ impl CollectFreeVars for RichTerm {
}
t2.collect_free_vars(&mut fresh);
fresh.remove(id);
fresh.remove(&id.symbol());
free_vars.extend(fresh);
}
@ -83,7 +83,7 @@ impl CollectFreeVars for RichTerm {
t2.collect_free_vars(&mut fresh);
bind_pattern(dest_pat, &mut fresh);
if let Some(id) = id {
fresh.remove(id);
fresh.remove(&id.symbol());
}
free_vars.extend(fresh);
@ -114,7 +114,8 @@ impl CollectFreeVars for RichTerm {
}
}
Term::RecRecord(record, dyn_fields, deps) => {
let rec_fields: HashSet<Ident> = record.fields.keys().cloned().collect();
let rec_fields: HashSet<Ident> =
record.fields.keys().map(|id| id.symbol()).collect();
let mut fresh = HashSet::new();
let mut new_deps = RecordDeps {
stat_fields: IndexMap::with_capacity(record.fields.len()),
@ -127,7 +128,7 @@ impl CollectFreeVars for RichTerm {
t.collect_free_vars(&mut fresh);
new_deps
.stat_fields
.insert(*id, FieldDeps::from(&fresh & &rec_fields));
.insert(id.symbol(), FieldDeps::from(&fresh & &rec_fields));
free_vars.extend(&fresh - &rec_fields);
}
@ -241,7 +242,7 @@ fn bind_pattern(dest_pat: &RecordPattern, free_vars: &mut HashSet<Ident>) {
}
if let Some(rest) = rest {
free_vars.remove(rest);
free_vars.remove(&rest.symbol());
}
}
@ -250,7 +251,7 @@ fn bind_pattern(dest_pat: &RecordPattern, free_vars: &mut HashSet<Ident>) {
fn bind_match(m: &Match, free_vars: &mut HashSet<Ident>) {
match m {
Match::Assign(_, _, FieldPattern::Ident(ident)) => {
free_vars.remove(ident);
free_vars.remove(&ident.symbol());
}
Match::Assign(_, _, FieldPattern::RecordPattern(sub_pattern)) => {
bind_pattern(sub_pattern, free_vars);
@ -263,11 +264,11 @@ fn bind_match(m: &Match, free_vars: &mut HashSet<Ident>) {
pattern: sub_pattern,
},
) => {
free_vars.remove(alias);
free_vars.remove(&alias.symbol());
bind_pattern(sub_pattern, free_vars);
}
Match::Simple(id, _) => {
free_vars.remove(id);
free_vars.remove(&id.symbol());
}
}
}

View File

@ -12,7 +12,7 @@
//! The `gen_pending_contracts` phase implemented by this module must be run before
//! `share_normal_form` so that newly generated pending contracts are transformed as well.
use crate::{
identifier::Ident,
identifier::LocIdent,
match_sharedterm,
term::{
record::{Field, RecordData},
@ -48,8 +48,8 @@ pub fn transform_one(rt: RichTerm) -> Result<RichTerm, UnboundTypeVariableError>
}
fn attach_to_fields(
fields: IndexMap<Ident, Field>,
) -> Result<IndexMap<Ident, Field>, UnboundTypeVariableError> {
fields: IndexMap<LocIdent, Field>,
) -> Result<IndexMap<LocIdent, Field>, UnboundTypeVariableError> {
fields
.into_iter()
.map(|(id, field)| Ok((id, attach_to_field(field)?)))

View File

@ -2,7 +2,7 @@
use crate::{
cache::ImportResolver,
eval::{cache::Cache, Closure, Environment, IdentKind},
identifier::Ident,
identifier::LocIdent,
term::{record::Field, BindingType, RichTerm, RuntimeContract, Term, Traverse, TraverseOrder},
typ::UnboundTypeVariableError,
typecheck::Wildcards,
@ -142,15 +142,17 @@ impl Closurizable for RichTerm {
// affect the invariant mentioned above, because the share normal form must ensure that the
// fields of a record all contain generated variables (or constant), but never
// user-supplied variables.
let var = Ident::fresh();
let var = LocIdent::fresh();
let pos = self.pos;
let idx = match self.as_ref() {
Term::Var(id) if id.is_generated() => with_env.get(id).cloned().unwrap_or_else(|| {
panic!(
Term::Var(id) if id.is_generated() => {
with_env.get(&id.symbol()).cloned().unwrap_or_else(|| {
panic!(
"Internal error(closurize) : generated identifier {id} not found in the environment"
)
}),
})
}
_ => {
let closure: Closure = Closure {
body: self,
@ -160,7 +162,7 @@ impl Closurizable for RichTerm {
}
};
env.insert(var, idx);
env.insert(var.symbol(), idx);
RichTerm::new(Term::Var(var), pos.into_inherited())
}
}

View File

@ -28,7 +28,7 @@
//! Newly introduced variables begin with a special character to avoid clashing with user-defined
//! variables.
use crate::{
identifier::Ident,
identifier::LocIdent,
match_sharedterm,
position::TermPos,
term::{
@ -38,7 +38,7 @@ use crate::{
};
struct Binding {
fresh_var: Ident,
fresh_var: LocIdent,
term: RichTerm,
binding_type: BindingType,
}
@ -78,7 +78,7 @@ pub fn transform_one(rt: RichTerm) -> RichTerm {
let mut bindings = Vec::with_capacity(record_data.fields.len());
let fields = record_data.fields.into_iter().map(|(id, field)| {
let field_deps = deps.as_ref().and_then(|deps| deps.stat_fields.get(&id)).cloned();
let field_deps = deps.as_ref().and_then(|deps| deps.stat_fields.get(&id.symbol())).cloned();
(id, transform_rec_field(field, field_deps, &mut bindings))
}).collect();
@ -100,7 +100,7 @@ pub fn transform_one(rt: RichTerm) -> RichTerm {
.into_iter()
.map(|t| {
if should_share(&t.term) {
let fresh_var = Ident::fresh();
let fresh_var = LocIdent::fresh();
let pos_t = t.pos;
bindings.push(Binding {fresh_var, term: t, binding_type: BindingType::Normal});
RichTerm::new(Term::Var(fresh_var), pos_t)
@ -113,7 +113,7 @@ pub fn transform_one(rt: RichTerm) -> RichTerm {
with_bindings(Term::Array(ts, attrs), bindings, pos)
},
Term::Annotated(annot, t) if should_share(&t.term) => {
let fresh_var = Ident::fresh();
let fresh_var = LocIdent::fresh();
let shared = RichTerm::new(Term::Var(fresh_var), t.pos);
let inner = RichTerm::new(Term::Annotated(annot, shared), pos);
RichTerm::new(Term::Let(fresh_var, t, inner, LetAttrs::default()), pos)
@ -146,7 +146,7 @@ fn transform_rec_field(
// CHANGE THIS CONDITION CAREFULLY. Doing so can break the post-condition
// explained in this method's documentation above.
if !rt.as_ref().is_constant() {
let fresh_var = Ident::fresh();
let fresh_var = LocIdent::fresh();
let pos_contract = rt.pos;
let binding_type = mk_binding_type(field_deps.clone());
bindings.push(Binding {

View File

@ -42,7 +42,7 @@
//! only be equated with itself.
use crate::{
error::{EvalError, ParseError, ParseErrors, TypecheckError},
identifier::Ident,
identifier::{Ident, LocIdent},
label::Polarity,
mk_app, mk_fun,
position::TermPos,
@ -67,7 +67,7 @@ use std::{
/// unfoldings (here, `Ty` for `TypeF`). See [`TypeF`] for more details.
#[derive(Clone, PartialEq, Eq, Debug)]
pub struct RecordRowF<Ty> {
pub id: Ident,
pub id: LocIdent,
pub typ: Ty,
}
@ -77,7 +77,7 @@ pub struct RecordRowF<Ty> {
/// `EnumRowF` is the same as `EnumRow` and doesn't have any type parameter. We introduce the alias
/// nonetheless for consistency with other parametrized type definitions. See [`TypeF`] for more
/// details.
pub type EnumRowF = Ident;
pub type EnumRowF = LocIdent;
pub type EnumRow = EnumRowF;
/// Generic sequence of record rows potentially with a type variable or `Dyn` in tail position.
@ -95,7 +95,7 @@ pub type EnumRow = EnumRowF;
pub enum RecordRowsF<Ty, RRows> {
Empty,
Extend { row: RecordRowF<Ty>, tail: RRows },
TailVar(Ident),
TailVar(LocIdent),
TailDyn,
}
@ -112,7 +112,7 @@ pub enum RecordRowsF<Ty, RRows> {
pub enum EnumRowsF<ERows> {
Empty,
Extend { row: EnumRowF, tail: ERows },
TailVar(Ident),
TailVar(LocIdent),
}
/// The kind of a quantified type variable.
@ -273,7 +273,7 @@ pub enum TypeF<Ty, RRows, ERows> {
Var(Ident),
/// A forall binder.
Forall {
var: Ident,
var: LocIdent,
var_kind: VarKind,
body: Ty,
},
@ -636,7 +636,7 @@ impl Traverse<Type> for RecordRows {
}
#[derive(Clone, Debug)]
pub struct UnboundTypeVariableError(pub Ident);
pub struct UnboundTypeVariableError(pub LocIdent);
impl From<UnboundTypeVariableError> for EvalError {
fn from(err: UnboundTypeVariableError) -> Self {
@ -671,7 +671,7 @@ pub struct RecordRowsIterator<'a, Ty, RRows> {
pub enum RecordRowsIteratorItem<'a, Ty> {
TailDyn,
TailVar(&'a Ident),
TailVar(&'a LocIdent),
Row(RecordRowF<&'a Ty>),
}
@ -708,7 +708,7 @@ pub struct EnumRowsIterator<'a, ERows> {
}
pub enum EnumRowsIteratorItem<'a> {
TailVar(&'a Ident),
TailVar(&'a LocIdent),
Row(&'a EnumRowF),
}
@ -737,9 +737,13 @@ impl<'a> Iterator for EnumRowsIterator<'a, EnumRows> {
/// Helper used by the `subcontract` functions.
fn get_var_contract(
vars: &HashMap<Ident, RichTerm>,
id: &Ident,
sym: Ident,
pos: TermPos,
) -> Result<RichTerm, UnboundTypeVariableError> {
Ok(vars.get(id).ok_or(UnboundTypeVariableError(*id))?.clone())
Ok(vars
.get(&sym)
.ok_or(UnboundTypeVariableError(LocIdent::from(sym).with_pos(pos)))?
.clone())
}
impl EnumRows {
@ -748,8 +752,8 @@ impl EnumRows {
let mut cases = IndexMap::new();
let mut has_tail = false;
let value_arg = Ident::from("x");
let label_arg = Ident::from("l");
let value_arg = LocIdent::from("x");
let label_arg = LocIdent::from("l");
for row in self.iter() {
match row {
@ -831,7 +835,7 @@ impl RecordRows {
let tail = match &rrows.0 {
RecordRowsF::Empty => internals::empty_tail(),
RecordRowsF::TailDyn => internals::dyn_tail(),
RecordRowsF::TailVar(id) => get_var_contract(&vars, id)?,
RecordRowsF::TailVar(id) => get_var_contract(&vars, id.symbol(), id.pos)?,
// Safety: the while above excludes that `tail` can have the form `Extend`.
RecordRowsF::Extend { .. } => unreachable!(),
};
@ -855,7 +859,7 @@ impl RecordRows {
}
let next = self.iter().find_map(|item| match item {
RecordRowsIteratorItem::Row(row) if row.id == path[0] => Some(row.typ.clone()),
RecordRowsIteratorItem::Row(row) if row.id.symbol() == path[0] => Some(row.typ.clone()),
_ => None,
});
@ -969,7 +973,7 @@ impl Type {
t.subcontract(vars, pol, sy)?
),
TypeF::Flat(ref t) => t.clone(),
TypeF::Var(ref id) => get_var_contract(&vars, id)?,
TypeF::Var(id) => get_var_contract(&vars, id, self.pos)?,
TypeF::Forall {
ref var,
ref body,
@ -997,7 +1001,7 @@ impl Type {
mk_app!(internals::forall_tail(), sealing_key.clone(), excluded_ncl)
}
};
vars.insert(*var, contract);
vars.insert(var.symbol(), contract);
*sy += 1;
mk_app!(

View File

@ -1,7 +1,7 @@
use crate::{
destructuring::{FieldPattern, Match, RecordPattern},
error::TypecheckError,
identifier::Ident,
identifier::LocIdent,
mk_uty_row,
term::{IndexMap, LabeledType},
typ::{RecordRowF, RecordRowsF, TypeF},
@ -151,11 +151,11 @@ pub fn inject_pattern_variables(
pat.matches.iter().for_each(|m| match m {
Match::Simple(id, ..) => {
let ty = type_map.get_type(id);
env.insert(*id, ty);
env.insert(id.symbol(), ty);
}
Match::Assign(id, _, FieldPattern::Ident(bind_id)) => {
let ty = type_map.get_type(id);
env.insert(*bind_id, ty);
env.insert(bind_id.symbol(), ty);
}
Match::Assign(id, _, FieldPattern::RecordPattern(pat)) => {
let ty = type_map.get_type(id);
@ -180,7 +180,7 @@ pub fn inject_pattern_variables(
Match::Assign(id, _, FieldPattern::AliasedRecordPattern { alias, pattern }) => {
let ty = type_map.get_type(id);
env.insert(*alias, ty.clone());
env.insert(alias.symbol(), ty.clone());
let UnifType::Concrete{ typ: TypeF::Record(rs), .. } = ty else {
unreachable!("since this is a destructured record, \
@ -193,7 +193,7 @@ pub fn inject_pattern_variables(
if let Some(id) = pat.rest {
let rest_ty = type_map.rest();
env.insert(id, rest_ty);
env.insert(id.symbol(), rest_ty);
}
}
@ -204,7 +204,7 @@ pub fn inject_pattern_variables(
/// have already been "used" in the pattern, to ensure that we can
/// correctly construct the type of a `..rest` match, if it exists.
struct RecordTypes {
known_types: IndexMap<Ident, UnifType>,
known_types: IndexMap<LocIdent, UnifType>,
tail: UnifRecordRows,
}
@ -243,7 +243,7 @@ impl RecordTypes {
/// In the case of `RecordTypes::Rows`, `id` is also removed from the
/// map, so that it won't be considered as part of the "tail type"
/// when `rest` is called.
fn get_type(&mut self, id: &Ident) -> UnifType {
fn get_type(&mut self, id: &LocIdent) -> UnifType {
self.known_types
.remove(id)
.expect("Scopes of identifiers in destruct patterns should be checked already")

View File

@ -45,6 +45,7 @@
use super::*;
use crate::{
eval::{self, cache::Cache},
identifier::LocIdent,
term::{self, record::Field, IndexMap, UnaryOp},
};
@ -62,7 +63,7 @@ pub const MAX_GAS: u8 = 8;
/// `TermEnvironment::get_then` has to take a closure representing the continuation of the task to
/// do with the result instead of merely returning it.
pub trait TermEnvironment: Clone {
fn get_then<F, T>(&self, id: &Ident, f: F) -> T
fn get_then<F, T>(&self, id: Ident, f: F) -> T
where
F: FnOnce(Option<(&RichTerm, &Self)>) -> T;
}
@ -79,11 +80,11 @@ impl SimpleTermEnvironment {
}
impl TermEnvironment for SimpleTermEnvironment {
fn get_then<F, T>(&self, id: &Ident, f: F) -> T
fn get_then<F, T>(&self, id: Ident, f: F) -> T
where
F: FnOnce(Option<(&RichTerm, &SimpleTermEnvironment)>) -> T,
{
f(self.0.get(id).map(|(rt, env)| (rt, env)))
f(self.0.get(&id).map(|(rt, env)| (rt, env)))
}
}
@ -240,8 +241,8 @@ fn contract_eq_bounded<E: TermEnvironment>(
// if they have the same identifier: whatever global environment the term will be put in,
// free variables are not redefined locally and will be bound to the same value in any case.
(Var(id1), Var(id2)) => {
env1.get_then(id1, |binding1| {
env2.get_then(id2, |binding2| {
env1.get_then(id1.symbol(), |binding1| {
env2.get_then(id2.symbol(), |binding2| {
match (binding1, binding2) {
(None, None) => id1 == id2,
(Some((t1, env1)), Some((t2, env2))) => {
@ -259,7 +260,7 @@ fn contract_eq_bounded<E: TermEnvironment>(
}
(Var(id), _) => {
state.use_gas()
&& env1.get_then(id, |binding| {
&& env1.get_then(id.symbol(), |binding| {
binding
.map(|(t1, env1)| contract_eq_bounded(state, t1, env1, t2, env2))
.unwrap_or(false)
@ -267,7 +268,7 @@ fn contract_eq_bounded<E: TermEnvironment>(
}
(_, Var(id)) => {
state.use_gas()
&& env2.get_then(id, |binding| {
&& env2.get_then(id.symbol(), |binding| {
binding
.map(|(t2, env2)| contract_eq_bounded(state, t1, env1, t2, env2))
.unwrap_or(false)
@ -351,9 +352,9 @@ fn contract_eq_bounded<E: TermEnvironment>(
fn map_eq<V, F, E>(
mut f: F,
state: &mut State,
map1: &IndexMap<Ident, V>,
map1: &IndexMap<LocIdent, V>,
env1: &E,
map2: &IndexMap<Ident, V>,
map2: &IndexMap<LocIdent, V>,
env2: &E,
) -> bool
where
@ -373,8 +374,8 @@ where
/// returned. `None` is returned as well if a type encountered is not row, or if it is a enum row.
fn rows_as_map<E: TermEnvironment>(
erows: &GenericUnifRecordRows<E>,
) -> Option<IndexMap<Ident, &GenericUnifType<E>>> {
let map: Option<IndexMap<Ident, _>> = erows
) -> Option<IndexMap<LocIdent, &GenericUnifType<E>>> {
let map: Option<IndexMap<LocIdent, _>> = erows
.iter()
.map(|item| match item {
GenericUnifRecordRowsIteratorItem::Row(RecordRowF { id, typ: types }) => {
@ -392,7 +393,7 @@ fn rows_as_map<E: TermEnvironment>(
/// Require the rows to be closed (i.e. the last element must be `RowEmpty`), otherwise `None` is
/// returned. `None` is returned as well if a type encountered is not row type, or if it is a
/// record row.
fn rows_as_set(erows: &UnifEnumRows) -> Option<HashSet<Ident>> {
fn rows_as_set(erows: &UnifEnumRows) -> Option<HashSet<LocIdent>> {
let set: Option<HashSet<_>> = erows
.iter()
.map(|item| match item {

View File

@ -2,7 +2,7 @@
use super::{reporting, State, UnifType, VarId};
use crate::{
error::TypecheckError,
identifier::Ident,
identifier::LocIdent,
label::ty_path,
position::TermPos,
term::RichTerm,
@ -13,23 +13,23 @@ use crate::{
#[derive(Debug, PartialEq)]
pub enum RowUnifError {
/// The LHS had a binding that was missing in the RHS.
MissingRow(Ident),
MissingRow(LocIdent),
/// The LHS had a `Dyn` tail that was missing in the RHS.
MissingDynTail(),
/// The RHS had a binding that was not in the LHS.
ExtraRow(Ident),
ExtraRow(LocIdent),
/// The RHS had a additional `Dyn` tail.
ExtraDynTail(),
/// There were two incompatible definitions for the same row.
RowMismatch(Ident, Box<UnifError>),
RowMismatch(LocIdent, Box<UnifError>),
/// A [row constraint][super::RowConstr] was violated.
UnsatConstr(Ident, UnifType),
UnsatConstr(LocIdent, UnifType),
/// Tried to unify a type constant with another different type.
WithConst(VarKindDiscriminant, usize, UnifType),
/// Tried to unify two distinct type constants.
ConstMismatch(VarKindDiscriminant, usize, usize),
/// An unbound type variable was referenced.
UnboundTypeVariable(Ident),
UnboundTypeVariable(LocIdent),
/// Tried to unify a constant with a unification variable with a strictly lower level.
VarLevelMismatch {
constant_id: VarId,
@ -75,27 +75,27 @@ pub enum UnifError {
/// Tried to unify two incompatible types.
TypeMismatch(UnifType, UnifType),
/// There are two incompatible definitions for the same row.
RowMismatch(Ident, UnifType, UnifType, Box<UnifError>),
RowMismatch(LocIdent, UnifType, UnifType, Box<UnifError>),
/// Tried to unify two distinct type constants.
ConstMismatch(VarKindDiscriminant, usize, usize),
/// Tried to unify two rows, but an identifier of the LHS was absent from the RHS.
MissingRow(Ident, UnifType, UnifType),
MissingRow(LocIdent, UnifType, UnifType),
/// Tried to unify two rows, but the `Dyn` tail of the RHS was absent from the LHS.
MissingDynTail(UnifType, UnifType),
/// Tried to unify two rows, but an identifier of the RHS was absent from the LHS.
ExtraRow(Ident, UnifType, UnifType),
ExtraRow(LocIdent, UnifType, UnifType),
/// Tried to unify two rows, but the `Dyn` tail of the RHS was absent from the LHS.
ExtraDynTail(UnifType, UnifType),
/// Tried to unify a unification variable with a row type violating the [row
/// constraints][super::RowConstr] of the variable.
RowConflict(Ident, UnifType, UnifType, UnifType),
RowConflict(LocIdent, UnifType, UnifType, UnifType),
/// Tried to unify a type constant with another different type.
WithConst(VarKindDiscriminant, usize, UnifType),
/// A flat type, which is an opaque type corresponding to custom contracts, contained a Nickel
/// term different from a variable. Only a variables is a legal inner term of a flat type.
IncomparableFlatTypes(RichTerm, RichTerm),
/// An unbound type variable was referenced.
UnboundTypeVariable(Ident),
UnboundTypeVariable(LocIdent),
/// An error occurred when unifying the domains of two arrows.
DomainMismatch(UnifType, UnifType, Box<UnifError>),
/// An error occurred when unifying the codomains of two arrows.
@ -216,7 +216,7 @@ impl UnifError {
constant_id,
var_kind,
} => TypecheckError::VarLevelMismatch {
type_var: names_reg.gen_cst_name(constant_id, var_kind),
type_var: names_reg.gen_cst_name(constant_id, var_kind).into(),
pos: pos_opt,
},
}

View File

@ -26,7 +26,7 @@ use std::ops::{Deref, DerefMut};
use super::UnifType;
use crate::term::RichTerm;
use crate::{identifier::Ident, term::record::Field};
use crate::{identifier::LocIdent, term::record::Field};
/// Holds the state of a linearization, either in progress or finalized
/// Restricts the possible states of a linearization to entities marked
@ -110,7 +110,7 @@ pub trait Linearizer {
fn retype_ident(
&mut self,
_lin: &mut Linearization<Self::Building>,
_ident: &Ident,
_ident: &LocIdent,
_new_type: UnifType,
) {
}

View File

@ -34,7 +34,7 @@ macro_rules! mk_uty_enum_row {
( $id:expr $(, $ids:expr )* $(; $tail:expr)?) => {
$crate::typecheck::UnifEnumRows::concrete(
$crate::typ::EnumRowsF::Extend {
row: Ident::from($id),
row: LocIdent::from($id),
tail: Box::new($crate::mk_uty_enum_row!($( $ids ),* $(; $tail)?))
}
)
@ -59,7 +59,7 @@ macro_rules! mk_uty_row {
$crate::typecheck::UnifRecordRows::concrete(
$crate::typ::RecordRowsF::Extend {
row: $crate::typ::RecordRowF {
id: Ident::from($id),
id: LocIdent::from($id),
typ: Box::new($ty.into()),
},
tail: Box::new($crate::mk_uty_row!($(($ids, $tys)),* $(; $tail)?)),

View File

@ -57,7 +57,7 @@ use crate::{
cache::ImportResolver,
environment::Environment as GenericEnvironment,
error::TypecheckError,
identifier::Ident,
identifier::{Ident, LocIdent},
stdlib as nickel_stdlib,
term::{
record::Field, LabeledType, RichTerm, StrChunk, Term, Traverse, TraverseOrder,
@ -495,23 +495,23 @@ impl<E: TermEnvironment> GenericUnifRecordRows<E> {
// A type which contains variables that can be substituted with values of type `T`.
trait Subst<T: Clone>: Sized {
// Substitute all variables of identifier `id` with `to`.
fn subst(self, id: &Ident, to: &T) -> Self {
fn subst(self, id: &LocIdent, to: &T) -> Self {
self.subst_levels(id, to).0
}
// Must be implemented by implementers of this trait.
// In addition to performing substitution, this method threads variable levels upper bounds to
// compute new upper bounds efficiently.
fn subst_levels(self, id: &Ident, to: &T) -> (Self, VarLevel);
fn subst_levels(self, id: &LocIdent, to: &T) -> (Self, VarLevel);
}
impl<E: TermEnvironment> Subst<GenericUnifType<E>> for GenericUnifType<E> {
fn subst_levels(self, id: &Ident, to: &GenericUnifType<E>) -> (Self, VarLevel) {
fn subst_levels(self, id: &LocIdent, to: &GenericUnifType<E>) -> (Self, VarLevel) {
match self {
GenericUnifType::Concrete {
typ: TypeF::Var(var_id),
var_levels_data,
} if var_id == *id => {
} if var_id == id.symbol() => {
debug_assert!(var_levels_data.upper_bound == VarLevel::NO_VAR);
(to.clone(), to.var_level_upper_bound())
}
@ -553,7 +553,7 @@ impl<E: TermEnvironment> Subst<GenericUnifType<E>> for GenericUnifType<E> {
}
impl<E: TermEnvironment> Subst<GenericUnifType<E>> for GenericUnifRecordRows<E> {
fn subst_levels(self, id: &Ident, to: &GenericUnifType<E>) -> (Self, VarLevel) {
fn subst_levels(self, id: &LocIdent, to: &GenericUnifType<E>) -> (Self, VarLevel) {
match self {
GenericUnifRecordRows::Concrete {
rrows,
@ -594,7 +594,7 @@ impl<E: TermEnvironment> Subst<GenericUnifType<E>> for GenericUnifRecordRows<E>
}
impl<E: TermEnvironment> Subst<GenericUnifRecordRows<E>> for GenericUnifType<E> {
fn subst_levels(self, id: &Ident, to: &GenericUnifRecordRows<E>) -> (Self, VarLevel) {
fn subst_levels(self, id: &LocIdent, to: &GenericUnifRecordRows<E>) -> (Self, VarLevel) {
match self {
GenericUnifType::Concrete {
typ,
@ -633,7 +633,7 @@ impl<E: TermEnvironment> Subst<GenericUnifRecordRows<E>> for GenericUnifType<E>
}
impl<E: TermEnvironment> Subst<GenericUnifRecordRows<E>> for GenericUnifRecordRows<E> {
fn subst_levels(self, id: &Ident, to: &GenericUnifRecordRows<E>) -> (Self, VarLevel) {
fn subst_levels(self, id: &LocIdent, to: &GenericUnifRecordRows<E>) -> (Self, VarLevel) {
match self {
GenericUnifRecordRows::Concrete {
rrows: RecordRowsF::TailVar(var_id),
@ -681,7 +681,7 @@ impl<E: TermEnvironment> Subst<GenericUnifRecordRows<E>> for GenericUnifRecordRo
}
impl<E: TermEnvironment> Subst<UnifEnumRows> for GenericUnifType<E> {
fn subst_levels(self, id: &Ident, to: &UnifEnumRows) -> (Self, VarLevel) {
fn subst_levels(self, id: &LocIdent, to: &UnifEnumRows) -> (Self, VarLevel) {
match self {
GenericUnifType::Concrete {
typ,
@ -727,7 +727,7 @@ impl<E: TermEnvironment> Subst<UnifEnumRows> for GenericUnifType<E> {
}
impl<E: TermEnvironment> Subst<UnifEnumRows> for GenericUnifRecordRows<E> {
fn subst_levels(self, id: &Ident, to: &UnifEnumRows) -> (Self, VarLevel) {
fn subst_levels(self, id: &LocIdent, to: &UnifEnumRows) -> (Self, VarLevel) {
match self {
GenericUnifRecordRows::Concrete {
rrows,
@ -769,7 +769,7 @@ impl<E: TermEnvironment> Subst<UnifEnumRows> for GenericUnifRecordRows<E> {
}
impl Subst<UnifEnumRows> for UnifEnumRows {
fn subst_levels(self, id: &Ident, to: &UnifEnumRows) -> (Self, VarLevel) {
fn subst_levels(self, id: &LocIdent, to: &UnifEnumRows) -> (Self, VarLevel) {
match self {
UnifEnumRows::Concrete {
erows: EnumRowsF::TailVar(var_id),
@ -987,7 +987,7 @@ impl From<EnumRowsF<Box<UnifEnumRows>>> for UnifEnumRows {
/// Iterator items produced by [RecordRowsIterator] on [GenericUnifRecordRows].
pub enum GenericUnifRecordRowsIteratorItem<'a, E: TermEnvironment> {
TailDyn,
TailVar(&'a Ident),
TailVar(&'a LocIdent),
TailUnifVar { id: VarId, init_level: VarLevel },
TailConstant(VarId),
Row(RecordRowF<&'a GenericUnifType<E>>),
@ -1041,7 +1041,7 @@ impl<'a, E: TermEnvironment> Iterator
/// Iterator items produced by [`EnumRowsIterator`].
pub enum UnifEnumRowsIteratorItem<'a> {
TailVar(&'a Ident),
TailVar(&'a LocIdent),
TailUnifVar { id: VarId, init_level: VarLevel },
TailConstant(VarId),
Row(&'a EnumRow),
@ -1152,14 +1152,14 @@ pub fn mk_initial_ctxt(
let term_env = bindings
.iter()
.cloned()
.map(|(id, rt)| (id, (rt, SimpleTermEnvironment::new())))
.map(|(id, rt)| (id.symbol(), (rt, SimpleTermEnvironment::new())))
.collect();
let type_env = bindings
.into_iter()
.map(|(id, rt)| {
(
id,
id.symbol(),
infer_record_type(&rt, &term_env, INFER_RECORD_MAX_DEPTH),
)
})
@ -1190,7 +1190,7 @@ pub fn env_add_term(
field_apparent_type(field, Some(env), Some(resolver)),
term_env,
);
env.insert(*id, uty);
env.insert(id.symbol(), uty);
}
Ok(())
@ -1202,13 +1202,13 @@ pub fn env_add_term(
/// Bind one term in a typing environment.
pub fn env_add(
env: &mut Environment,
id: Ident,
id: LocIdent,
rt: &RichTerm,
term_env: &SimpleTermEnvironment,
resolver: &dyn ImportResolver,
) {
env.insert(
id,
id.symbol(),
UnifType::from_apparent_type(
apparent_type(rt.as_ref(), Some(env), Some(resolver)),
term_env,
@ -1347,7 +1347,7 @@ fn walk<L: Linearizer>(
| Term::Import(_)
| Term::ResolvedImport(_) => Ok(()),
Term::Var(x) => ctxt.type_env
.get(x)
.get(&x.symbol())
.ok_or(TypecheckError::UnboundIdentifier(*x, *pos))
.map(|_| ()),
Term::StrChunks(chunks) => {
@ -1364,13 +1364,13 @@ fn walk<L: Linearizer>(
}
Term::Fun(id, t) => {
// The parameter of an unannotated function is always assigned type `Dyn`.
ctxt.type_env.insert(*id, mk_uniftype::dynamic());
ctxt.type_env.insert(id.symbol(), mk_uniftype::dynamic());
walk(state, ctxt, lin, linearizer, t)
}
Term::FunPattern(id, pat, t) => {
if let Some(id) = id {
// The parameter of an unannotated function is always assigned type `Dyn`.
ctxt.type_env.insert(*id, mk_uniftype::dynamic());
ctxt.type_env.insert(id.symbol(), mk_uniftype::dynamic());
}
let pattern_ty = destructuring::build_pattern_type_walk_mode(state, &ctxt, pat)?;
@ -1392,17 +1392,17 @@ fn walk<L: Linearizer>(
// allocate all the term environments inside an arena, local to each statically typed
// block, and use bare references to represent cycles. Then everything would be cleaned
// at the end of the block.
ctxt.term_env.0.insert(*x, (re.clone(), ctxt.term_env.clone()));
ctxt.term_env.0.insert(x.symbol(), (re.clone(), ctxt.term_env.clone()));
if attrs.rec {
ctxt.type_env.insert(*x, ty_let.clone());
ctxt.type_env.insert(x.symbol(), ty_let.clone());
}
linearizer.retype_ident(lin, x, ty_let.clone());
walk(state, ctxt.clone(), lin, linearizer.scope(), re)?;
if !attrs.rec {
ctxt.type_env.insert(*x, ty_let);
ctxt.type_env.insert(x.symbol(), ty_let);
}
walk(state, ctxt, lin, linearizer, rt)
@ -1413,7 +1413,7 @@ fn walk<L: Linearizer>(
if let Some(x) = x {
linearizer.retype_ident(lin, x, ty_let.clone());
ctxt.type_env.insert(*x, ty_let);
ctxt.type_env.insert(x.symbol(), ty_let);
}
let pattern_ty = destructuring::build_pattern_type_walk_mode(state, &ctxt, pat)?;
@ -1438,7 +1438,7 @@ fn walk<L: Linearizer>(
&ctxt,
false,
);
ctxt.type_env.insert(*id, field_type.clone());
ctxt.type_env.insert(id.symbol(), field_type.clone());
linearizer.retype_ident(lin, id, field_type);
}
@ -1723,7 +1723,7 @@ fn check<L: Linearizer>(
ty.unify(arr, state, &ctxt)
.map_err(|err| err.into_typecheck_err(state, rt.pos))?;
ctxt.type_env.insert(*x, src);
ctxt.type_env.insert(x.symbol(), src);
check(state, ctxt, lin, linearizer, t, trg)
}
Term::FunPattern(x, pat, t) => {
@ -1734,7 +1734,7 @@ fn check<L: Linearizer>(
if let Some(x) = x {
linearizer.retype_ident(lin, x, src.clone());
ctxt.type_env.insert(*x, src);
ctxt.type_env.insert(x.symbol(), src);
}
destructuring::inject_pattern_variables(state, &mut ctxt.type_env, pat, src_rows_ty);
@ -1773,10 +1773,10 @@ fn check<L: Linearizer>(
// `Let` case in `walk`.
ctxt.term_env
.0
.insert(*x, (re.clone(), ctxt.term_env.clone()));
.insert(x.symbol(), (re.clone(), ctxt.term_env.clone()));
if attrs.rec {
ctxt.type_env.insert(*x, ty_let.clone());
ctxt.type_env.insert(x.symbol(), ty_let.clone());
}
linearizer.retype_ident(lin, x, ty_let.clone());
@ -1790,7 +1790,7 @@ fn check<L: Linearizer>(
)?;
if !attrs.rec {
ctxt.type_env.insert(*x, ty_let);
ctxt.type_env.insert(x.symbol(), ty_let);
}
check(state, ctxt, lin, linearizer, rt, ty)
}
@ -1818,7 +1818,7 @@ fn check<L: Linearizer>(
if let Some(x) = x {
linearizer.retype_ident(lin, x, ty_let.clone());
ctxt.type_env.insert(*x, ty_let);
ctxt.type_env.insert(x.symbol(), ty_let);
}
destructuring::inject_pattern_variables(
@ -1908,7 +1908,7 @@ fn check<L: Linearizer>(
//TODO: should we insert in the environment the checked type, or the actual type?
for id in record.fields.keys() {
ctxt.type_env.insert(*id, ty_dict.clone());
ctxt.type_env.insert(id.symbol(), ty_dict.clone());
linearizer.retype_ident(lin, id, ty_dict.clone())
}
@ -1938,7 +1938,7 @@ fn check<L: Linearizer>(
if let Term::RecRecord(..) = t.as_ref() {
for (id, field) in &record.fields {
let uty = field_type(state, field, &ctxt, true);
ctxt.type_env.insert(*id, uty.clone());
ctxt.type_env.insert(id.symbol(), uty.clone());
linearizer.retype_ident(lin, id, uty);
}
}
@ -1971,7 +1971,7 @@ fn check<L: Linearizer>(
})
} else {
// Building the type {id1 : ?a1, id2: ?a2, .., idn: ?an}
let mut field_types: HashMap<Ident, UnifType> = record
let mut field_types: HashMap<LocIdent, UnifType> = record
.fields
.keys()
.map(|id| (*id, state.table.fresh_type_uvar(ctxt.var_level)))
@ -1987,7 +1987,7 @@ fn check<L: Linearizer>(
for (id, field) in record.fields.iter() {
if let Term::RecRecord(..) = t.as_ref() {
let affected_type = ctxt.type_env.get(id).cloned().unwrap();
let affected_type = ctxt.type_env.get(&id.symbol()).cloned().unwrap();
field_types
.get(id)
@ -2081,7 +2081,7 @@ fn check_field<L: Linearizer>(
ctxt: Context,
lin: &mut Linearization<L::Building>,
mut linearizer: L,
id: Ident,
id: LocIdent,
field: &Field,
ty: UnifType,
) -> Result<(), TypecheckError> {
@ -2219,7 +2219,7 @@ fn infer<L: Linearizer>(
Term::Var(x) => {
let x_ty = ctxt
.type_env
.get(x)
.get(&x.symbol())
.cloned()
.ok_or(TypecheckError::UnboundIdentifier(*x, *pos))?;
@ -2495,7 +2495,7 @@ pub fn apparent_type(
Type::from(TypeF::Dyn),
)))),
Term::Var(id) => env
.and_then(|envs| envs.get(id).cloned())
.and_then(|envs| envs.get(&id.symbol()).cloned())
.map(ApparentType::FromEnv)
.unwrap_or(ApparentType::Approximated(Type::from(TypeF::Dyn))),
Term::ResolvedImport(file_id) => match resolver {
@ -2647,7 +2647,7 @@ fn instantiate_foralls(
init_level: ctxt.var_level,
},
};
state.names.insert((fresh_uid, kind), var);
state.names.insert((fresh_uid, kind), var.symbol());
ty = body.subst(&var, &uvar);
}
VarKind::RecordRows { excluded } => {
@ -2659,7 +2659,7 @@ fn instantiate_foralls(
init_level: ctxt.var_level,
},
};
state.names.insert((fresh_uid, kind), var);
state.names.insert((fresh_uid, kind), var.symbol());
ty = body.subst(&var, &uvar);
if inst == ForallInst::UnifVar {
@ -2675,7 +2675,7 @@ fn instantiate_foralls(
init_level: ctxt.var_level,
},
};
state.names.insert((fresh_uid, kind), var);
state.names.insert((fresh_uid, kind), var.symbol());
ty = body.subst(&var, &uvar);
}
};

View File

@ -91,9 +91,9 @@ impl NameReg {
}
}
let ident = Ident::from(name);
self.insert(id, kind, ident);
ident
let sym = Ident::from(name);
self.insert(id, kind, sym);
sym
}
/// Either retrieve or generate a new fresh name for a unification variable for error reporting,
@ -139,10 +139,10 @@ impl NameReg {
match rrows {
UnifRecordRows::UnifVar { id, .. } => RecordRows(RecordRowsF::TailVar(
reg.gen_var_name(id, VarKindDiscriminant::RecordRows),
reg.gen_var_name(id, VarKindDiscriminant::RecordRows).into(),
)),
UnifRecordRows::Constant(id) => RecordRows(RecordRowsF::TailVar(
reg.gen_cst_name(id, VarKindDiscriminant::RecordRows),
reg.gen_cst_name(id, VarKindDiscriminant::RecordRows).into(),
)),
UnifRecordRows::Concrete { rrows, .. } => {
let mapped = rrows.map_state(
@ -160,10 +160,10 @@ impl NameReg {
match erows {
UnifEnumRows::UnifVar { id, .. } => EnumRows(EnumRowsF::TailVar(
reg.gen_var_name(id, VarKindDiscriminant::EnumRows),
reg.gen_var_name(id, VarKindDiscriminant::EnumRows).into(),
)),
UnifEnumRows::Constant(id) => EnumRows(EnumRowsF::TailVar(
reg.gen_cst_name(id, VarKindDiscriminant::EnumRows),
reg.gen_cst_name(id, VarKindDiscriminant::EnumRows).into(),
)),
UnifEnumRows::Concrete { erows, .. } => {
let mapped = erows.map(|erows| Box::new(erows_to_type(reg, table, *erows)));

View File

@ -920,7 +920,7 @@ pub fn constr_unify_rrows(
UnifRecordRows::Concrete {
rrows: RecordRowsF::Extend { row, .. },
..
} if p_constr.contains(&row.id) => Err(RowUnifError::UnsatConstr(
} if p_constr.contains(&row.id.symbol()) => Err(RowUnifError::UnsatConstr(
row.id,
UnifType::concrete(TypeF::Record(rrows.clone())),
)),
@ -1077,7 +1077,7 @@ impl Unify for UnifType {
substd1.unify(substd2, state, ctxt)
}
(TypeF::Var(ident), _) | (_, TypeF::Var(ident)) => {
Err(UnifError::UnboundTypeVariable(ident))
Err(UnifError::UnboundTypeVariable(ident.into()))
}
(ty1, ty2) => Err(UnifError::TypeMismatch(
UnifType::concrete(ty1),
@ -1335,7 +1335,7 @@ trait RemoveRow: Sized {
// - Otherwise, raise a missing row error.
fn remove_row(
self,
row_id: &Ident,
row_id: &LocIdent,
state: &mut State,
var_level: VarLevel,
) -> Result<(Self::RowContent, Self), Self::Error>;
@ -1356,7 +1356,7 @@ impl RemoveRow for UnifRecordRows {
fn remove_row(
self,
target: &Ident,
target: &LocIdent,
state: &mut State,
var_level: VarLevel,
) -> Result<(UnifType, UnifRecordRows), RemoveRRowError> {
@ -1371,7 +1371,7 @@ impl RemoveRow for UnifRecordRows {
row: next_row,
tail,
} => {
if *target == next_row.id {
if target.symbol() == next_row.id.symbol() {
Ok((*next_row.typ, *tail))
} else {
let (extracted_row, rest) = tail.remove_row(target, state, var_level)?;
@ -1388,7 +1388,7 @@ impl RemoveRow for UnifRecordRows {
UnifRecordRows::UnifVar { id: var_id, .. } => {
let excluded = state.constr.entry(var_id).or_default();
if !excluded.insert(*target) {
if !excluded.insert(target.symbol()) {
return Err(RemoveRRowError::Conflict);
}
@ -1423,7 +1423,7 @@ impl RemoveRow for UnifEnumRows {
fn remove_row(
self,
target: &Ident,
target: &LocIdent,
state: &mut State,
var_level: VarLevel,
) -> Result<((), UnifEnumRows), RowUnifError> {

View File

@ -1,5 +1,8 @@
use nickel_lang_core::term::{record::FieldDeps, IndexMap};
use nickel_lang_core::{identifier::Ident, term::Term, transform::free_vars};
use nickel_lang_core::{
identifier::Ident,
term::{record::FieldDeps, IndexMap, Term},
transform::free_vars,
};
use std::collections::HashSet;
use std::iter::IntoIterator;
@ -18,7 +21,7 @@ fn stat_free_vars_incl(
) -> bool {
stat_fields
.iter()
.all(|(id, set)| free_vars_eq(set, expected.remove(id.as_ref()).unwrap()))
.all(|(id, set)| free_vars_eq(set, expected.remove(id.label()).unwrap()))
}
fn dyn_free_vars_incl(dyn_fields: &[FieldDeps], mut expected: Vec<Vec<&str>>) -> bool {

View File

@ -2,7 +2,7 @@ use std::collections::{hash_map::Entry, HashMap};
use lsp_types::{CompletionItem, CompletionItemKind, Documentation, MarkupContent, MarkupKind};
use nickel_lang_core::{
identifier::Ident,
identifier::{Ident, LocIdent},
pretty::ident_quoted,
term::{record::FieldMetadata, BinaryOp, RichTerm, Term},
};
@ -17,7 +17,7 @@ pub struct Def {
/// Remember that an `Ident` has a position; this one points to the identifier
/// at the position where it is bound. For example, in `{ foo = 1 }`, this ident
/// might point at the `foo`.
pub ident: Ident,
pub ident: LocIdent,
/// The value assigned by the definition, if there is one.
///
/// For example, in `{ foo = 1 }`, this could point at the `1`.
@ -64,15 +64,13 @@ impl Def {
/// A map from identifiers to the defs that they refer to.
#[derive(Clone, Debug, Default)]
struct FieldDefs {
// The key to this map is really a Symbol rather than an Ident. Since the interner is not
// public, we use an Ident that has had its location removed.
fields: HashMap<Ident, Vec<Def>>,
}
/// Resolve a record path iteratively, returning the names of all the fields defined on the final path element.
pub fn resolve_path<'a>(
rt: &'a RichTerm,
mut path: &'a [Ident],
mut path: &'a [LocIdent],
linearization: &Completed,
server: &Server,
) -> impl Iterator<Item = Def> {
@ -80,7 +78,7 @@ pub fn resolve_path<'a>(
while let Some((id, tail)) = path.split_first() {
path = tail;
let defs = fields.fields.remove(&id.without_pos()).unwrap_or_default();
let defs = fields.fields.remove(&id.symbol()).unwrap_or_default();
fields.fields.clear();
for rt in defs.into_iter().filter_map(|d| d.value) {
@ -108,7 +106,7 @@ impl FieldDefs {
.iter()
.map(|(&ident, field)| {
(
ident.without_pos(),
ident.symbol(),
vec![Def {
ident,
value: field.value.clone(),

View File

@ -4,7 +4,7 @@ use codespan::FileId;
use log::debug;
use nickel_lang_core::{
cache::Cache,
identifier::Ident,
identifier::{Ident, LocIdent},
position::TermPos,
term::{record::Field, IndexMap, RichTerm},
typ::TypeF,
@ -115,7 +115,7 @@ impl<'b> Building<'b> {
&mut self,
current_file: FileId,
record_term: &RichTerm,
record_fields: &IndexMap<Ident, Field>,
record_fields: &IndexMap<LocIdent, Field>,
record: ItemId,
env: &mut Environment,
) {
@ -139,9 +139,8 @@ impl<'b> Building<'b> {
},
metadata: Some(field.metadata.clone()),
});
let key = *ident;
env.insert(key, id);
self.add_record_field(current_file, record, (*ident, id))
env.insert(ident.symbol(), id);
self.add_record_field(current_file, record, (ident.symbol(), id))
}
}
@ -189,7 +188,7 @@ impl<'b> Building<'b> {
while let Some(id) = ids.pop() {
match curr_item {
TermKind::Record(ref fields) => {
let item = fields.get(&id)?;
let item = fields.get(&id.symbol())?;
let item_kind = self.get_item_kind(current_file, *item)?;
match item_kind {
TermKind::RecordField {
@ -236,9 +235,9 @@ impl<'b> Building<'b> {
pub(super) fn resolve_record_references(
&mut self,
current_file: FileId,
mut defers: Vec<(ItemId, ItemId, Ident)>,
) -> Vec<(ItemId, ItemId, Ident)> {
let mut unresolved: Vec<(ItemId, ItemId, Ident)> = Vec::new();
mut defers: Vec<(ItemId, ItemId, LocIdent)>,
) -> Vec<(ItemId, ItemId, LocIdent)> {
let mut unresolved: Vec<(ItemId, ItemId, LocIdent)> = Vec::new();
while let Some(deferred) = defers.pop() {
// child_item: current deferred usage item
@ -275,9 +274,11 @@ impl<'b> Building<'b> {
// get record field
.and_then(|parent_declaration| match &parent_declaration {
TermKind::Record(fields) => {
fields.get(child_ident).and_then(|child_declaration_id| {
self.get_item_kind_with_id(current_file, *child_declaration_id)
})
fields
.get(&child_ident.symbol())
.and_then(|child_declaration_id| {
self.get_item_kind_with_id(current_file, *child_declaration_id)
})
}
_ => None,
});

View File

@ -1,6 +1,10 @@
use std::collections::HashMap;
use nickel_lang_core::{identifier::Ident, typ::Type, typecheck::UnifType};
use nickel_lang_core::{
identifier::{Ident, LocIdent},
typ::Type,
typecheck::UnifType,
};
use super::ItemId;
@ -24,7 +28,7 @@ impl ResolutionState for Resolved {}
#[derive(Debug, Clone, PartialEq)]
pub enum TermKind {
Declaration {
id: Ident,
id: LocIdent,
usages: Vec<ItemId>,
value: ValueState,
// This is the path to a bound variable. If we have
@ -33,12 +37,12 @@ pub enum TermKind {
// If we have `let { a = {b = {c = somevar, ..}, ..}, ..} = ...`
// instead, the `path` remains the same, but the ident will be `somevar`
// If there is no pattern variable bound, the `path` is `None`
path: Option<Vec<Ident>>,
path: Option<Vec<LocIdent>>,
},
Usage(UsageState),
Record(HashMap<Ident, ItemId>),
RecordField {
ident: Ident,
ident: LocIdent,
record: ItemId,
usages: Vec<ItemId>,
value: ValueState,
@ -67,7 +71,7 @@ impl ValueState {
pub enum UsageState {
Unbound,
Resolved(ItemId),
Deferred { parent: ItemId, child: Ident },
Deferred { parent: ItemId, child: LocIdent },
}
impl From<Option<ItemId>> for UsageState {

View File

@ -3,7 +3,7 @@ use std::{collections::HashMap, marker::PhantomData};
use codespan::FileId;
use log::debug;
use nickel_lang_core::{
identifier::Ident,
identifier::{Ident, LocIdent},
position::TermPos,
term::{
record::{Field, FieldMetadata},
@ -108,7 +108,7 @@ pub struct AnalysisHost<'a> {
/// in their own scope immediately after the record, which
/// gives the corresponding record field _term_ to the ident
/// useable to construct a vale declaration.
record_fields: Option<(ItemId, Vec<(ItemId, Ident)>)>,
record_fields: Option<(ItemId, Vec<(ItemId, LocIdent)>)>,
bindings: Option<Vec<ItemId>>,
/// Accesses to nested records are recorded recursively.
///
@ -119,7 +119,7 @@ pub struct AnalysisHost<'a> {
/// To resolve those inner fields, accessors (`inner`, `middle`)
/// are recorded first until a variable (`outer`). is found.
/// Then, access to all nested records are resolved at once.
access: Option<Vec<Ident>>,
access: Option<Vec<LocIdent>>,
}
impl<'a> AnalysisHost<'a> {
@ -251,7 +251,7 @@ impl<'a> Linearizer for AnalysisHost<'a> {
file_id: self.file,
index: id_gen.get_and_advance(),
};
self.env.insert(ident.to_owned(), id);
self.env.insert(ident.symbol(), id);
let kind = TermKind::Declaration {
id: ident.to_owned(),
@ -294,7 +294,7 @@ impl<'a> Linearizer for AnalysisHost<'a> {
let_pattern_bindings.push(id);
let new_ident = bind_ident.unwrap_or(*ident);
self.env.insert(new_ident, id);
self.env.insert(new_ident.symbol(), id);
lin.push(LinearizationItem {
env: self.env.clone(),
term: rt.clone(),
@ -343,7 +343,7 @@ impl<'a> Linearizer for AnalysisHost<'a> {
_ => unreachable!(),
};
self.env.insert(
ident.to_owned(),
ident.symbol(),
ItemId {
file_id: self.file,
index: id_gen.get(),
@ -380,8 +380,7 @@ impl<'a> Linearizer for AnalysisHost<'a> {
ident, self.access
);
let key = ident.to_owned();
let pointed = self.env.get(&key).copied();
let pointed = self.env.get(&ident.symbol()).copied();
lin.push(LinearizationItem {
env: self.env.clone(),
term: rt.clone(),
@ -567,7 +566,7 @@ impl<'a> Linearizer for AnalysisHost<'a> {
let mut name_reg = NameReg::new(reported_names);
// TODO: Storing defers while linearizing?
let mut defers: Vec<(ItemId, ItemId, Ident)> = lin
let mut defers: Vec<(ItemId, ItemId, LocIdent)> = lin
.linearization
.iter()
.filter_map(|item| match &item.kind {
@ -678,12 +677,12 @@ impl<'a> Linearizer for AnalysisHost<'a> {
fn retype_ident(
&mut self,
lin: &mut Linearization<Building>,
ident: &Ident,
ident: &LocIdent,
new_type: UnifType,
) {
if let Some(item) = self
.env
.get(&ident.to_owned())
.get(&ident.symbol())
.and_then(|item_id| lin.linearization.get_mut(item_id.index))
{
debug!("retyping {:?} to {:?}", ident, new_type);

View File

@ -5,7 +5,7 @@ use lsp_types::{
CompletionItem, CompletionItemKind, CompletionParams, Documentation, MarkupContent, MarkupKind,
};
use nickel_lang_core::{
identifier::Ident,
identifier::{Ident, LocIdent},
term::{
record::{Field, FieldMetadata},
RichTerm, Term, TypeAnnotation, UnaryOp,
@ -120,7 +120,7 @@ pub struct ComplCtx<'a> {
/// using lexical scoping rules.
fn find_fields_from_term_kind(
id: ItemId,
path: &mut Vec<Ident>,
path: &mut Vec<LocIdent>,
info @ ComplCtx {
linearization,
lin_registry,
@ -150,7 +150,7 @@ fn find_fields_from_term_kind(
})
.collect()
} else {
let id = path.pop().and_then(|name| fields.get(&name));
let id = path.pop().and_then(|name| fields.get(&name.symbol()));
match id {
Some(id) => find_fields_from_term_kind(
*id,
@ -195,7 +195,7 @@ fn find_fields_from_term_kind(
/// its contract information.
fn find_fields_from_contract(
id: ItemId,
path: &mut Vec<Ident>,
path: &mut Vec<LocIdent>,
info @ ComplCtx {
linearization,
lin_registry: lin_cache,
@ -227,7 +227,7 @@ fn find_fields_from_contract(
/// contracts.
fn find_fields_from_contracts(
annot: &TypeAnnotation,
path: &[Ident],
path: &[LocIdent],
info @ ComplCtx { .. }: ComplCtx<'_>,
) -> Vec<IdentWithType> {
annot
@ -242,7 +242,7 @@ fn find_fields_from_contracts(
/// Find the fields that can be found from a type.
fn find_fields_from_type(
ty: &Type,
path: &mut Vec<Ident>,
path: &mut Vec<LocIdent>,
info @ ComplCtx { .. }: ComplCtx<'_>,
) -> Vec<IdentWithType> {
match &ty.typ {
@ -259,7 +259,7 @@ fn find_fields_from_type(
/// Extract the fields from a given record type.
fn find_fields_from_rrows(
rrows: &RecordRows,
path: &mut Vec<Ident>,
path: &mut Vec<LocIdent>,
info @ ComplCtx { .. }: ComplCtx<'_>,
) -> Vec<IdentWithType> {
if let Some(current) = path.pop() {
@ -287,7 +287,7 @@ fn find_fields_from_rrows(
_ => None,
})
.map(|(ident, types)| IdentWithType {
ident,
ident: ident.symbol(),
meta: None,
ty: types.clone(),
})
@ -297,7 +297,7 @@ fn find_fields_from_rrows(
fn find_fields_from_field(
field: &Field,
path: &mut Vec<Ident>,
path: &mut Vec<LocIdent>,
info: ComplCtx<'_>,
) -> Vec<IdentWithType> {
find_fields_from_term_with_annot(&field.metadata.annotation, field.value.as_ref(), path, info)
@ -307,7 +307,7 @@ fn find_fields_from_field(
fn find_fields_from_term_with_annot(
annot: &TypeAnnotation,
value: Option<&RichTerm>,
path: &mut Vec<Ident>,
path: &mut Vec<LocIdent>,
info: ComplCtx<'_>,
) -> Vec<IdentWithType> {
let mut info_from_metadata = find_fields_from_contracts(annot, path, info);
@ -322,7 +322,7 @@ fn find_fields_from_term_with_annot(
/// Extract record fields from a record term.
fn find_fields_from_term(
term: &RichTerm,
path: &mut Vec<Ident>,
path: &mut Vec<LocIdent>,
info @ ComplCtx { lin_registry, .. }: ComplCtx<'_>,
) -> Vec<IdentWithType> {
match term.as_ref() {
@ -331,7 +331,7 @@ fn find_fields_from_term(
.fields
.iter()
.map(|(ident, field)| IdentWithType {
ident: *ident,
ident: ident.symbol(),
// This Dyn type is only displayed if the metadata's
// contract or type annotation is not present.
ty: Type::from(TypeF::Dyn),
@ -440,7 +440,7 @@ fn remove_duplicates(items: &Vec<CompletionItem>) -> Vec<CompletionItem> {
fn collect_record_info(
linearization: &Completed,
id: ItemId,
path: &mut Vec<Ident>,
path: &mut Vec<LocIdent>,
lin_registry: &LinRegistry,
) -> Vec<IdentWithType> {
let info = ComplCtx {
@ -505,8 +505,8 @@ fn accumulate_record_meta_data<'a>(
linearization,
lin_registry,
}: ComplCtx<'a>,
mut path: Vec<Ident>,
result: &mut Vec<(&'a LinearizationItem<Type>, Vec<Ident>)>,
mut path: Vec<LocIdent>,
result: &mut Vec<(&'a LinearizationItem<Type>, Vec<LocIdent>)>,
) {
// This unwrap is safe: we know a `RecordField` must have a containing `Record`.
let parent = linearization
@ -547,11 +547,11 @@ fn get_completion_identifiers(
) -> Result<Vec<CompletionItem>, ResponseError> {
fn complete(
item: &LinearizationItem<Type>,
name: Ident,
name: LocIdent,
server: &Server,
path: &mut Vec<Ident>,
path: &mut Vec<LocIdent>,
) -> Vec<IdentWithType> {
let Some(item_id) = item.env.get(&name) else {
let Some(item_id) = item.env.get(&name.symbol()) else {
return Vec::new()
};
let lin = server.lin_cache_get(&item_id.file_id).unwrap();
@ -561,7 +561,7 @@ fn get_completion_identifiers(
fn context_complete(
item: &LinearizationItem<Type>,
info: ComplCtx<'_>,
path: Vec<Ident>,
path: Vec<LocIdent>,
) -> Vec<IdentWithType> {
if let (&TermKind::RecordField { record, .. }, _) | (TermKind::Record(..), record) =
(&item.kind, item.id)
@ -595,7 +595,7 @@ fn get_completion_identifiers(
let Some(path) = get_identifier_path(source) else {
return Ok(Vec::new())
};
let mut path: Vec<_> = path.iter().rev().cloned().map(Ident::from).collect();
let mut path: Vec<_> = path.iter().rev().cloned().map(LocIdent::from).collect();
let context_path = path.clone();
let contextual_result = context_complete(item, info, context_path);
@ -613,7 +613,7 @@ fn get_completion_identifiers(
// This is also record completion, but it is in the form
// <record path>.<partially-typed-field>
// we also want to give completion based on <record path> in this case.
let mut path: Vec<_> = path.iter().rev().cloned().map(Ident::from).collect();
let mut path: Vec<_> = path.iter().rev().cloned().map(LocIdent::from).collect();
// TODO: We need to adjust the linearization item here.
// Say we have: `config.dat`, this parses as a nested record.
@ -639,7 +639,7 @@ fn get_completion_identifiers(
.filter_map(|i| match i.kind {
TermKind::Declaration { id: ident, .. }
| TermKind::RecordField { ident, .. } => Some(IdentWithType {
ident,
ident: ident.symbol(),
meta: item.metadata.clone(),
ty: ty.clone(),
}),
@ -658,7 +658,7 @@ fn get_completion_identifiers(
Ok(remove_duplicates(&in_scope))
}
fn extract_static_path(mut rt: RichTerm) -> (RichTerm, Vec<Ident>) {
fn extract_static_path(mut rt: RichTerm) -> (RichTerm, Vec<LocIdent>) {
let mut path = Vec::new();
loop {
@ -851,7 +851,7 @@ mod tests {
let b_record_type = mk_uty_record!(("b", c_record_type));
let a_record_type = mk_uty_row!(("a", b_record_type));
let mut path = vec![Ident::from("b"), Ident::from("a")];
let mut path = vec![LocIdent::from("b"), LocIdent::from("a")];
// unwrap: the conversion must succeed because we built a type without unification variable
// nor type constants
let info = ComplCtx {
@ -960,7 +960,7 @@ mod tests {
let a = make_lin_item(
ItemId { file_id, index: 0 },
TermKind::Declaration {
id: Ident::from("a"),
id: LocIdent::from("a"),
usages: vec![ItemId { file_id, index: 3 }],
value: ValueState::Known(ItemId { file_id, index: 1 }),
path: None,
@ -980,7 +980,7 @@ mod tests {
let d = make_lin_item(
ItemId { file_id, index: 3 },
TermKind::Declaration {
id: Ident::from("d"),
id: LocIdent::from("d"),
usages: Vec::new(),
value: ValueState::Known(ItemId { file_id, index: 4 }),
path: None,
@ -1007,7 +1007,7 @@ mod tests {
let a = make_lin_item(
ItemId { file_id, index: 0 },
TermKind::Declaration {
id: Ident::from("a"),
id: LocIdent::from("a"),
usages: Vec::new(),
value: ValueState::Known(ItemId { file_id, index: 1 }),
path: None,
@ -1028,7 +1028,7 @@ mod tests {
let d = make_lin_item(
ItemId { file_id, index: 3 },
TermKind::Declaration {
id: Ident::from("d"),
id: LocIdent::from("d"),
usages: Vec::new(),
value: ValueState::Known(ItemId { file_id, index: 13 }),
path: None,
@ -1038,7 +1038,7 @@ mod tests {
let e = make_lin_item(
ItemId { file_id, index: 4 },
TermKind::Declaration {
id: Ident::from("e"),
id: LocIdent::from("e"),
usages: Vec::new(),
value: ValueState::Known(ItemId { file_id, index: 14 }),
path: None,
@ -1048,7 +1048,7 @@ mod tests {
let f = make_lin_item(
ItemId { file_id, index: 5 },
TermKind::Declaration {
id: Ident::from("f"),
id: LocIdent::from("f"),
usages: Vec::new(),
value: ValueState::Known(ItemId { file_id, index: 15 }),
path: None,
@ -1097,7 +1097,7 @@ mod tests {
let a = make_lin_item(
id,
TermKind::Declaration {
id: Ident::from("a"),
id: LocIdent::from("a"),
usages: Vec::new(),
value: ValueState::Known(ItemId { file_id, index: 1 }),
path: None,

View File

@ -20,7 +20,7 @@ use lsp_types::{
use nickel_lang_core::{
cache::{Cache, ErrorTolerance},
identifier::Ident,
identifier::LocIdent,
position::RawPos,
stdlib::StdlibModule,
term::RichTerm,
@ -99,11 +99,11 @@ impl Server {
}
// The module is bound to its name in the environment.
let name: Ident = Ident::from(module.name());
let name: LocIdent = LocIdent::from(module.name());
let file_id = self.cache.get_submodule_file_id(module)?;
// We're using the ID 0 to get the top-level value, which is the body of the module.
let content_id = ItemId { file_id, index: 0 };
self.initial_env.insert(name, content_id);
self.initial_env.insert(name.symbol(), content_id);
}
Some(())
}