1
1
mirror of https://github.com/tweag/nickel.git synced 2024-10-03 22:57:11 +03:00

Update flake inputs (#2058)

Update various flake inputs and the flake.lock file. Adapt the flake.nix
file, as well as the Rust source code, to accomodate the latest changes
(new clippy warnings, etc.).

Topiary is getting hard to use from the flake, because there are two
conflicting versions: the one that is pulled from Nix to be used in the
CI (checking that files are properly formatted), and the one built into
Nickel via cargo. Both must agree (or at least there might be a
difference in formatting between the two if they aren't the same
version). Since the addition of dynamic loading of grammars, latest
Topiary has become harder to build from Nix.

To avoid all those pitfalls, this commit gets rid of the Topiary as a
flake input, and use `nickel format` instead, ensuring that the
formatting is consistent. As a consequence, Topiary isn't included in
the development shell anymore, but it's arguably not an issue: it was
included before `nickel format`, as we needed a third party formatter,
but now one can just build Nickel locally with their preferred method
and use `nickel format`.
This commit is contained in:
Yann Hamdaoui 2024-10-02 11:12:03 +02:00 committed by GitHub
parent 378ece30b3
commit 927ee23993
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
18 changed files with 178 additions and 187 deletions

17
Cargo.lock generated
View File

@ -298,9 +298,12 @@ checksum = "37b2a672a2cb129a2e41c10b1224bb368f9f37a2b16b612598138befd7b37eb5"
[[package]]
name = "cc"
version = "1.0.90"
version = "1.1.24"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8cd6604a82acf3039f1144f54b8eb34e91ffba622051189e71b781822d5ee1f5"
checksum = "812acba72f0a070b003d3697490d2b55b837230ae7c6c6497f05cc2ddbb8d938"
dependencies = [
"shlex",
]
[[package]]
name = "cfg-if"
@ -2391,9 +2394,9 @@ dependencies = [
[[package]]
name = "regex"
version = "1.10.3"
version = "1.10.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b62dbe01f0b06f9d8dc7d49e05a0785f153b00b2c227856282f671e0318c9b15"
checksum = "4219d74c6b67a3654a9fbebc4b419e22126d13d2f3c4a07ee0cb61ff79a79619"
dependencies = [
"aho-corasick",
"memchr",
@ -2654,6 +2657,12 @@ version = "1.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "24188a676b6ae68c3b2cb3a01be17fbf7240ce009799bb56d5b1409051e78fde"
[[package]]
name = "shlex"
version = "1.3.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0fda2ff0d084019ba4d7c6f371c95d8fd75ce3524c3cb8fb653a3023f6323e64"
[[package]]
name = "signal-hook"
version = "0.3.17"

View File

@ -101,13 +101,13 @@ metrics = "0.21"
metrics-util = "0.15"
topiary-core = "0.4.0"
topiary-queries = { version = "0.4.2", default-features = false, features = ["nickel"] }
topiary-queries = { version = "=0.4.2", default-features = false, features = ["nickel"] }
# This version should agree with the topiary-query version: Nickel queries
# target a specific version of the Nickel grammar (though that dependency
# doesn't appear explicitly in `topiary-queries`'s Cargo.toml file). For now you
# might have to look at Topiary's commit messages, but in a near future, we'll
# try to make this data more accessible, e.g. in the query file
tree-sitter-nickel = "0.2.0"
tree-sitter-nickel = "=0.2.0"
tempfile = "3.5.0"
[profile.dev.package.lalrpop]

View File

@ -101,9 +101,9 @@ impl InputFormat {
/// the corresponding parsed terms. The storage comprises three elements:
///
/// - The file database, holding the string content of sources indexed by unique `FileId`
/// identifiers.
/// identifiers.
/// - The name-id table, associating source names for standalone inputs, or paths and timestamps
/// for files, to `FileId`s.
/// for files, to `FileId`s.
/// - The term cache, holding parsed terms indexed by `FileId`s.
///
/// Terms possibly undergo typechecking and program transformation. The state of each entry (that
@ -759,7 +759,7 @@ impl Cache {
/// # Preconditions
///
/// - the entry must syntactically be a record (`Record` or `RecRecord`). Otherwise, this
/// function panics
/// function panics
pub fn transform_inner(
&mut self,
file_id: FileId,

View File

@ -979,7 +979,7 @@ fn secondary(span: &RawSpan) -> Label<FileId> {
/// additional text placed at the end of diagnostic. What you lose:
/// - pretty formatting of annotations for such snippets
/// - style consistency: the style of the error now depends on the term being from the source or
/// a byproduct of evaluation
/// a byproduct of evaluation
/// 3. Add the term to files, take 1: pass a reference to files so that the code building the
/// diagnostic can itself add arbitrary snippets if necessary, and get back their `FileId`. This
/// is what is done here.

View File

@ -59,7 +59,7 @@ impl Default for ColorOpt {
/// # Arguments
///
/// - `cache` is the file cache used during the evaluation, which is required by the reporting
/// infrastructure to point at specific locations and print snippets when needed.
/// infrastructure to point at specific locations and print snippets when needed.
pub fn report<E: IntoDiagnostics<FileId>>(
cache: &mut Cache,
error: E,
@ -83,7 +83,7 @@ pub fn report<E: IntoDiagnostics<FileId>>(
/// # Arguments
///
/// - `cache` is the file cache used during the evaluation, which is required by the reporting
/// infrastructure to point at specific locations and print snippets when needed.
/// infrastructure to point at specific locations and print snippets when needed.
pub fn report_to_stdout<E: IntoDiagnostics<FileId>>(
cache: &mut Cache,
error: E,

View File

@ -11,13 +11,13 @@
//! - All the fields of `r1` that are not in `r2`
//! - All the fields of `r2` that are not in `r1`
//! - Fields that are both in `r1` and `r2` are recursively merged: for a field `f`, the result
//! contains the binding `f = r1.f & r2.f`
//! contains the binding `f = r1.f & r2.f`
//!
//! As fields are recursively merged, merge needs to operate on any value, not only on records:
//!
//! - *function*: merging a function with anything else fails
//! - *values*: merging any other values succeeds if and only if these two values are equals, in
//! which case it evaluates to this common value.
//! which case it evaluates to this common value.
//!
//! ## Metadata
//!

View File

@ -44,15 +44,15 @@
//! be performed:
//!
//! - `exp1` needs to be evaluated. The result must be saved somewhere, together with the resulting
//! environment
//! environment
//! - same thing for `exp2`
//! - Finally, the implementation of `+` can proceed with the computation
//!
//! We detail the case of binary operators, as the case of unary ones is similar and simpler.
//!
//! - **Op(op, first, second)**: pushes an `OpFirst` element on the stack, which saves the operator
//! `op`, the second argument `second` and the current environment, and proceed with the evaluation
//! of `first`
//! `op`, the second argument `second` and the current environment, and proceed with the evaluation
//! of `first`
//! - **OpFirst on stack**: if the evaluation of the current term is done and there is an `OpFirst`
//! marker on the stack, then:
//! 1. Extract the saved operator, the second argument and the environment `env2` from the

View File

@ -260,10 +260,10 @@ pub mod ty_path {
/// but we want to report the failures of the two introduced subcontracts in a different way:
///
/// - The inner one (on the argument) says that `f` has been misused: it has been applied to
/// something that is not a `Number`.
/// something that is not a `Number`.
/// - The outer one says that `f` failed to satisfy its contract, as it has been provided with a
/// `Number` (otherwise the inner contracts would have failed before) but failed to deliver a
/// `Number`.
/// `Number` (otherwise the inner contracts would have failed before) but failed to deliver a
/// `Number`.
///
/// This duality caller/callee or function/context is indicated by the polarity: the outer
/// corresponds to a *positive* polarity (the contract is on the term), while the inner corresponds

View File

@ -11,6 +11,10 @@ using namespace nix;
#include <nix/value-to-json.hh>
#include <nix/command.hh>
#include <nix/value.hh>
// We will need this include when we update to latest Nix, but for now it's
// pinned to a previous version where `initGC()` is still exported by already
// imported headers, so we don't need it yet.
// #include <nix/eval-gc.hh>
#include "nickel-lang-core/src/nix_ffi/mod.rs.h"

View File

@ -157,7 +157,7 @@ impl TermPos {
/// Fuse two positions if they are from the same source file.
///
/// - If both positions are defined and from the same file, the resulting position is the
/// smallest span that contain both.
/// smallest span that contain both.
/// - If both positions are defined but aren't from the same file, this returns `TermPos::None`
/// - If at most one position is defined, the other is returned (whether defined or not).
pub fn fuse(self, other: Self) -> Self {

View File

@ -640,7 +640,7 @@ impl<EC: EvalCache> Program<EC> {
/// - If the result is a record, we recursively evaluate subfields to record spines
/// - If the result isn't a record, it is returned as it is
/// - If the evaluation fails with [crate::error::EvalError::MissingFieldDef], the original
/// term is returned unevaluated[^missing-field-def]
/// term is returned unevaluated[^missing-field-def]
/// - If any other error occurs, the evaluation fails and returns the error.
///
/// [^missing-field-def]: Because we want to handle partial configurations as well,

View File

@ -192,12 +192,12 @@ pub enum Term {
///
/// - Assign a unique identifier to each type variable: say `a => 1`, `b => 2`
/// - For each cast on a negative occurrence of a type variable `a` or `b` (corresponding to an
/// argument position), tag the argument with the associated identifier. In our example, `f
/// true "a"` will push `Sealed(1, true)` then `Sealed(2, "a")` on the stack.
/// argument position), tag the argument with the associated identifier. In our example, `f
/// true "a"` will push `Sealed(1, true)` then `Sealed(2, "a")` on the stack.
/// - For each cast on a positive occurrence of a type variable, this contract check that the
/// term is of the form `Sealed(id, term)` where `id` corresponds to the identifier of the
/// type variable. In our example, the last cast to `a` finds `Sealed(2, "a")`, while it
/// expected `Sealed(1, _)`, hence it raises a positive blame.
/// term is of the form `Sealed(id, term)` where `id` corresponds to the identifier of the
/// type variable. In our example, the last cast to `a` finds `Sealed(2, "a")`, while it
/// expected `Sealed(1, _)`, hence it raises a positive blame.
#[serde(skip)]
Sealed(SealingKey, RichTerm, Label),
@ -1831,9 +1831,9 @@ pub enum BinaryOp {
///
/// - `left_only`: fields of the left argument but not in the right
/// - `left_center`: fields of the left argument that happens to also be in the right (but the
/// value and the metadata are taken from the left)
/// value and the metadata are taken from the left)
/// - `right_center`: fields of the right argument that happens to also be in the left (but the
/// value and the metadata are taken from the right)
/// value and the metadata are taken from the right)
/// - `right_only`: fields of the right argument but not in the left
///
/// As opposed to an equivalent user-defined implementation, this primop has better performance

View File

@ -958,10 +958,12 @@ impl EnumRows {
/// The following simplification are applied:
///
/// - the type of the argument of each enum variant is simplified as well
/// - if the polarity is positive and the rows are composed entirely of enum tags and enum variants whose argument's simplified type is `Dyn`, the entire rows are elided by returning `None`
/// - if the polarity is positive and the rows are composed entirely of enum tags and enum
/// variants whose argument's simplified type is `Dyn`, the entire rows are elided by returning
/// `None`
/// - a tail variable in tail position is currently left unchanged, because it doesn't give
/// rise to any sealing at runtime currently (see documentation of `$forall_enum_tail` in the
/// internals module of the stdlib)
/// rise to any sealing at runtime currently (see documentation of `$forall_enum_tail` in the
/// internals module of the stdlib)
fn simplify(
self,
contract_env: &mut Environment<Ident, RichTerm>,

View File

@ -271,7 +271,7 @@ impl UnifError {
/// # Parameters
///
/// - `state`: the state of unification. Used to access the unification table, and the original
/// names of of unification variable or type constant.
/// names of of unification variable or type constant.
/// - `pos_opt`: the position span of the expression that failed to typecheck.
pub fn into_typecheck_err(self, state: &State, pos_opt: TermPos) -> TypecheckError {
let mut names = reporting::NameReg::new(state.names.clone());

View File

@ -86,15 +86,14 @@ impl UnifTable {
/// # Preconditions
///
/// - This method doesn't check for the variable level conditions. This is the responsibility
/// of the caller.
/// of the caller.
/// - If the target type is a unification variable as well, it must not be assigned to another
/// unification type. That is, `assign` should always be passed a root type. Otherwise, the
/// handling of variable levels will be messed up.
/// unification type. That is, `assign` should always be passed a root type. Otherwise, the
/// handling of variable levels will be messed up.
/// - This method doesn't force pending level updates when needed (calling to
/// `force_type_updates`), i.e.
/// when `uty` is a rigid type variable. Having pending variable level updates and using
/// `assign_type` might make typechecking incorrect in some situation by unduely allowing
/// unsound generalization. This is the responsibility of the caller.
/// `force_type_updates`), i.e. when `uty` is a rigid type variable. Having pending variable
/// level updates and using `assign_type` might make typechecking incorrect in some situation
/// by unduely allowing unsound generalization. This is the responsibility of the caller.
pub fn assign_type(&mut self, var: VarId, uty: UnifType) {
// Unifying a free variable with itself is a no-op.
if matches!(uty, UnifType::UnifVar { id, ..} if id == var) {
@ -157,15 +156,14 @@ impl UnifTable {
/// # Preconditions
///
/// - This method doesn't check for the variable level conditions. This is the responsibility
/// of the caller.
/// of the caller.
/// - If the target type is a unification variable as well, it must not be assigned to another
/// unification type. That is, `assign` should always be passed a root type. Otherwise, the
/// handling of variable levels will be messed up.
/// unification type. That is, `assign` should always be passed a root type. Otherwise, the
/// handling of variable levels will be messed up.
/// - This method doesn't force pending level updates when needed (calling to
/// `force_rrows_updates`), i.e.
/// when `uty` is a rigid type variable. Having pending variable level updates and using
/// `assign_type` might make typechecking incorrect in some situation by unduly allowing
/// unsound generalization. This is the responsibility of the caller.
/// `force_rrows_updates`), i.e. when `uty` is a rigid type variable. Having pending variable
/// level updates and using `assign_type` might make typechecking incorrect in some situation
/// by unduly allowing unsound generalization. This is the responsibility of the caller.
pub fn assign_rrows(&mut self, var: VarId, rrows: UnifRecordRows) {
// Unifying a free variable with itself is a no-op.
if matches!(rrows, UnifRecordRows::UnifVar { id, ..} if id == var) {
@ -207,15 +205,14 @@ impl UnifTable {
/// # Preconditions
///
/// - This method doesn't check for the variable level conditions. This is the responsibility
/// of the caller.
/// of the caller.
/// - If the target type is a unification variable as well, it must not be assigned to another
/// unification type. That is, `assign` should always be passed a root type. Otherwise, the
/// handling of variable levels will be messed up.
/// unification type. That is, `assign` should always be passed a root type. Otherwise, the
/// handling of variable levels will be messed up.
/// - This method doesn't force pending level updates when needed (calling to
/// `force_erows_updates`), i.e.
/// when `uty` is a rigid type variable. Having pending variable level updates and using
/// `assign_type` might make typechecking incorrect in some situation by unduly allowing
/// unsound generalization. This is the responsibility of the caller.
/// `force_erows_updates`), i.e. when `uty` is a rigid type variable. Having pending variable
/// level updates and using `assign_type` might make typechecking incorrect in some situation
/// by unduly allowing unsound generalization. This is the responsibility of the caller.
pub fn assign_erows(&mut self, var: VarId, erows: UnifEnumRows) {
// Unifying a free variable with itself is a no-op.
if matches!(erows, UnifEnumRows::UnifVar { id, .. } if id == var) {
@ -1621,6 +1618,7 @@ pub(super) trait RemoveRow: Sized {
/// the original row type without the found row.
///
/// If the searched row isn't found:
///
/// - If the row type is extensible, i.e. it ends with a free unification variable in tail
/// position, this function adds the missing row (with `row.types` as a type for record rows,
/// if allowed by row constraints) and then acts as if `remove_row` was called again on
@ -1636,12 +1634,12 @@ pub(super) trait RemoveRow: Sized {
///
/// For those to unify, we must have either:
///
/// - `r1` is somewhere in `tail2`, and `tail1` unifies with `{..tail2'}` where `tail2'` is
/// `tail2` without `r1`.
/// - `tail2` is extensible, in which case we can extend `tail2` with `r1`, assuming that
/// `tail1` unifies with `{..tail2'}`, where `tail2'` is `tail2` after extending with `r1` and
/// then removing it. Modulo fresh unification variable shuffling, `tail2'` is in fact
/// isomorphic to `tail2` before it was extended.
/// - `r1` is somewhere in `tail2`, and `tail1` unifies with `{..tail2'}` where `tail2'` is
/// `tail2` without `r1`.
/// - `tail2` is extensible, in which case we can extend `tail2` with `r1`, assuming that
/// `tail1` unifies with `{..tail2'}`, where `tail2'` is `tail2` after extending with `r1`
/// and then removing it. Modulo fresh unification variable shuffling, `tail2'` is in fact
/// isomorphic to `tail2` before it was extended.
///
/// When we unify two row types, we destructure the left hand side to extract the head `r1` and
/// the tail `tail1`. Then, we try to find and extract `r1` from `tail2`. If `r1` was found, we

View File

@ -1,33 +1,12 @@
{
"nodes": {
"advisory-db": {
"flake": false,
"locked": {
"lastModified": 1714183630,
"narHash": "sha256-1BVft7ggSN2XXFeXQjazU3jN9wVECd9qp2mZx/8GDMk=",
"owner": "rustsec",
"repo": "advisory-db",
"rev": "35e7459a331d3e0c585e56dabd03006b9b354088",
"type": "github"
},
"original": {
"owner": "rustsec",
"repo": "advisory-db",
"type": "github"
}
},
"crane": {
"inputs": {
"nixpkgs": [
"nixpkgs"
]
},
"locked": {
"lastModified": 1717535930,
"narHash": "sha256-1hZ/txnbd/RmiBPNUs7i8UQw2N89uAK3UzrGAWdnFfU=",
"lastModified": 1727060013,
"narHash": "sha256-/fC5YlJy4IoAW9GhkJiwyzk0K/gQd9Qi4rRcoweyG9E=",
"owner": "ipetkov",
"repo": "crane",
"rev": "55e7754ec31dac78980c8be45f8a28e80e370946",
"rev": "6b40cc876c929bfe1e3a24bf538ce3b5622646ba",
"type": "github"
},
"original": {
@ -60,11 +39,11 @@
]
},
"locked": {
"lastModified": 1712014858,
"narHash": "sha256-sB4SWl2lX95bExY2gMFG5HIzvva5AVMJd4Igm+GpZNw=",
"lastModified": 1719994518,
"narHash": "sha256-pQMhCCHyQGRzdfAkdJ4cIWiw+JNuWsTX7f0ZYSyz0VY=",
"owner": "hercules-ci",
"repo": "flake-parts",
"rev": "9126214d0a59633752a136528f5f3b9aa8565b7d",
"rev": "9227223f6d922fee3c7b190b2cc238a99527bbb7",
"type": "github"
},
"original": {
@ -78,11 +57,11 @@
"systems": "systems"
},
"locked": {
"lastModified": 1710146030,
"narHash": "sha256-SZ5L6eA7HJ/nmkzGG7/ISclqe6oZdOZTNoesiInkXPQ=",
"lastModified": 1726560853,
"narHash": "sha256-X6rJYSESBVr3hBoH0WbKE5KvhPU5bloyZ2L4K60/fPQ=",
"owner": "numtide",
"repo": "flake-utils",
"rev": "b1d9ab70662946ef0850d488da1c9019f3a9752a",
"rev": "c1dfcf08411b08f6b8615f7d8971a2bfa81d5e8a",
"type": "github"
},
"original": {
@ -91,6 +70,37 @@
"type": "github"
}
},
"git-hooks-nix": {
"inputs": {
"flake-compat": [
"nix-input"
],
"gitignore": [
"nix-input"
],
"nixpkgs": [
"nix-input",
"nixpkgs"
],
"nixpkgs-stable": [
"nix-input",
"nixpkgs"
]
},
"locked": {
"lastModified": 1721042469,
"narHash": "sha256-6FPUl7HVtvRHCCBQne7Ylp4p+dpP3P/OYuzjztZ4s70=",
"owner": "cachix",
"repo": "git-hooks.nix",
"rev": "f451c19376071a90d8c58ab1a953c6e9840527fd",
"type": "github"
},
"original": {
"owner": "cachix",
"repo": "git-hooks.nix",
"type": "github"
}
},
"gitignore": {
"inputs": {
"nixpkgs": [
@ -115,34 +125,20 @@
"libgit2": {
"flake": false,
"locked": {
"lastModified": 1697646580,
"narHash": "sha256-oX4Z3S9WtJlwvj0uH9HlYcWv+x1hqp8mhXl7HsLu2f0=",
"lastModified": 1715853528,
"narHash": "sha256-J2rCxTecyLbbDdsyBWn9w7r3pbKRMkI9E7RvRgAqBdY=",
"owner": "libgit2",
"repo": "libgit2",
"rev": "45fd9ed7ae1a9b74b957ef4f337bc3c8b3df01b5",
"rev": "36f7e21ad757a3dacc58cf7944329da6bc1d6e96",
"type": "github"
},
"original": {
"owner": "libgit2",
"ref": "v1.8.1",
"repo": "libgit2",
"type": "github"
}
},
"nix-filter": {
"locked": {
"lastModified": 1710156097,
"narHash": "sha256-1Wvk8UP7PXdf8bCCaEoMnOT1qe5/Duqgj+rL8sRQsSM=",
"owner": "numtide",
"repo": "nix-filter",
"rev": "3342559a24e85fc164b295c3444e8a139924675b",
"type": "github"
},
"original": {
"owner": "numtide",
"repo": "nix-filter",
"type": "github"
}
},
"nix-input": {
"inputs": {
"flake-compat": [
@ -150,22 +146,20 @@
"flake-compat"
],
"flake-parts": "flake-parts",
"git-hooks-nix": "git-hooks-nix",
"libgit2": "libgit2",
"nixpkgs": [
"nixpkgs"
],
"nixpkgs-23-11": "nixpkgs-23-11",
"nixpkgs-regression": "nixpkgs-regression",
"pre-commit-hooks": [
"pre-commit-hooks"
]
"nixpkgs-regression": "nixpkgs-regression"
},
"locked": {
"lastModified": 1717862855,
"narHash": "sha256-DHM/6HZsuJO+k32vBfPzzhEwgMVpYdHiMD5r6qoCk9k=",
"lastModified": 1727696274,
"narHash": "sha256-H+EeGBRV87NRDXgOQP/aZfof9svbYCSQktpMiLBrqCQ=",
"owner": "nixos",
"repo": "nix",
"rev": "0ab9369572f64b1ab70a8db29f79ae730ff31ab6",
"rev": "c116030605bf7fecd232d0ff3b6fe066f23e4620",
"type": "github"
},
"original": {
@ -176,11 +170,11 @@
},
"nixpkgs": {
"locked": {
"lastModified": 1717786204,
"narHash": "sha256-4q0s6m0GUcN7q+Y2DqD27iLvbcd1G50T2lv08kKxkSI=",
"lastModified": 1726937504,
"narHash": "sha256-bvGoiQBvponpZh8ClUcmJ6QnsNKw0EMrCQJARK3bI1c=",
"owner": "NixOS",
"repo": "nixpkgs",
"rev": "051f920625ab5aabe37c920346e3e69d7d34400e",
"rev": "9357f4f23713673f310988025d9dc261c20e70c6",
"type": "github"
},
"original": {
@ -223,16 +217,16 @@
},
"nixpkgs-stable": {
"locked": {
"lastModified": 1710695816,
"narHash": "sha256-3Eh7fhEID17pv9ZxrPwCLfqXnYP006RKzSs0JptsN84=",
"lastModified": 1720386169,
"narHash": "sha256-NGKVY4PjzwAa4upkGtAMz1npHGoRzWotlSnVlqI40mo=",
"owner": "NixOS",
"repo": "nixpkgs",
"rev": "614b4613980a522ba49f0d194531beddbb7220d3",
"rev": "194846768975b7ad2c4988bdb82572c00222c0d7",
"type": "github"
},
"original": {
"owner": "NixOS",
"ref": "nixos-23.11",
"ref": "nixos-24.05",
"repo": "nixpkgs",
"type": "github"
}
@ -247,11 +241,11 @@
"nixpkgs-stable": "nixpkgs-stable"
},
"locked": {
"lastModified": 1717664902,
"narHash": "sha256-7XfBuLULizXjXfBYy/VV+SpYMHreNRHk9nKMsm1bgb4=",
"lastModified": 1726745158,
"narHash": "sha256-D5AegvGoEjt4rkKedmxlSEmC+nNLMBPWFxvmYnVLhjk=",
"owner": "cachix",
"repo": "pre-commit-hooks.nix",
"rev": "cc4d466cb1254af050ff7bdf47f6d404a7c646d1",
"rev": "4e743a6920eab45e8ba0fbe49dc459f1423a4b74",
"type": "github"
},
"original": {
@ -267,8 +261,7 @@
"nix-input": "nix-input",
"nixpkgs": "nixpkgs",
"pre-commit-hooks": "pre-commit-hooks",
"rust-overlay": "rust-overlay",
"topiary": "topiary"
"rust-overlay": "rust-overlay"
}
},
"rust-overlay": {
@ -278,11 +271,11 @@
]
},
"locked": {
"lastModified": 1719973106,
"narHash": "sha256-IGCdN/m7DfwUfxZjFnlTiTtpwSHCb01P/LWavAKD2jw=",
"lastModified": 1727144949,
"narHash": "sha256-uMZMjoCS2nf40TAE1686SJl3OXWfdfM+BDEfRdr+uLc=",
"owner": "oxalica",
"repo": "rust-overlay",
"rev": "fb733500aead50880b9b301f34a0061bf997d6f2",
"rev": "2e19799819104b46019d339e78d21c14372d3666",
"type": "github"
},
"original": {
@ -305,37 +298,6 @@
"repo": "default",
"type": "github"
}
},
"topiary": {
"inputs": {
"advisory-db": "advisory-db",
"crane": [
"crane"
],
"flake-utils": [
"flake-utils"
],
"nix-filter": "nix-filter",
"nixpkgs": [
"nixpkgs"
],
"rust-overlay": [
"rust-overlay"
]
},
"locked": {
"lastModified": 1725634392,
"narHash": "sha256-xpxdzcIXR/ISpfE74tc+8N225RSPnZbFPEay78kcnC4=",
"owner": "tweag",
"repo": "topiary",
"rev": "cff73fc3d58490b260935a4d9968a180acf75b8b",
"type": "github"
},
"original": {
"owner": "tweag",
"repo": "topiary",
"type": "github"
}
}
},
"root": "root",

View File

@ -12,23 +12,12 @@
};
crane = {
url = "github:ipetkov/crane";
inputs.nixpkgs.follows = "nixpkgs";
};
topiary = {
url = "github:tweag/topiary";
inputs = {
nixpkgs.follows = "nixpkgs";
crane.follows = "crane";
flake-utils.follows = "flake-utils";
rust-overlay.follows = "rust-overlay";
};
};
nix-input = {
url = "github:nixos/nix";
inputs = {
nixpkgs.follows = "nixpkgs";
flake-compat.follows = "pre-commit-hooks/flake-compat";
pre-commit-hooks.follows = "pre-commit-hooks";
};
};
};
@ -45,7 +34,6 @@
, pre-commit-hooks
, rust-overlay
, crane
, topiary
, nix-input
}:
let
@ -170,10 +158,15 @@
];
};
# we could use pre-commit-hook's built-in topiary, be for now, Topiary
# is evolving quickly and we prefer to have the latest version.
# This might change once the Nickel support is stabilized.
topiary-latest = topiary.lib.${system}.pre-commit-hook // {
# We could use Topiary here, but the Topiary version pulled from Nix
# and the one baked in Nickel could differ. It's saner that what we
# check in the CI is matching exactly the formatting performed by the
# `nickel` binary of this repo.
nickel-format = {
name = "nickel-format";
description = "The nickel formatter";
entry = "${pkgs.lib.getExe self.packages."${system}".default} format";
types = [ "text" ];
enable = true;
# Some tests are currently failing the idempotency check, and
# formatting is less important there. We at least want the examples
@ -321,15 +314,31 @@
# pyo3 needs a Python interpreter in the build environment
# https://pyo3.rs/v0.17.3/building_and_distribution#configuring-the-python-version
nativeBuildInputs = with pkgs; [ pkg-config python3 ];
buildInputs = with pkgs; [
(nix-input.packages.${system}.default.overrideAttrs
# SEE: https://github.com/NixOS/nix/issues/9107
(_: lib.optionalAttrs (system == "x86_64-darwin") {
doCheck = false;
})
)
boost # implicit dependency of nix
];
buildInputs =
# SEE: https://github.com/NixOS/nix/issues/9107
let
disableChecksOnDarwin =
pkgList: builtins.map
(pkg: pkg.overrideAttrs (_: pkgs.lib.optionalAttrs (system == "x86_64-darwin") {
doCheck = false;
}))
pkgList;
in
disableChecksOnDarwin [
nix-input.packages.${system}.nix
# When updating to latest Nix, we'll need to use the following
# additional output. For now, we pinned `nix-input` to a
# previous tag, where the outputs are still grouped in the
# default package, so we leave them commented out.
# nix-input.packages.${system}.nix-store
# nix-input.packages.${system}.nix-expr
# nix-input.packages.${system}.nix-flake
# nix-input.packages.${system}.nix-cmd
]
++ [
pkgs.boost # implicit dependency of nix
];
# seems to be needed for consumer cargoArtifacts to be able to use
# zstd mode properly
@ -489,7 +498,6 @@
pkgs.yarn2nix
pkgs.nodePackages.markdownlint-cli
pkgs.python3
topiary.packages.${system}.default
];
shellHook = (pre-commit-builder { inherit rust; checkFormat = true; }).shellHook + ''

View File

@ -41,6 +41,12 @@ pub struct ParentLookup {
}
impl ParentLookup {
// [^disable-clippy-mutable-key-type]: We use `RichTermPtr` as the key type, which is a wrapper
// around `RichTerm`, which contains `Closure` and is thus theoretically at risk of being
// mutated (interior mutability). However, we are in the case cited in the "false positives" of
// the clippy documentation, which is that `RichTermPtr` has a custom implementation of `Hash`
// that doesn't rely on the content of the term, but just on the pointer to it, which is safe.
#[allow(clippy::mutable_key_type)]
pub fn new(rt: &RichTerm) -> Self {
let mut table = HashMap::new();
@ -383,6 +389,8 @@ impl TypeCollector {
}
};
// See [^disable-clippy-mutable-key-type]
#[allow(clippy::mutable_key_type)]
let terms = self
.tables
.terms