add alias tracking and bulk tests

This commit is contained in:
hsjobeki 2023-11-28 17:28:51 +01:00
parent abe7df20dc
commit effa51cb39
No known key found for this signature in database
38 changed files with 11662 additions and 1129 deletions

View File

@ -1,7 +1,7 @@
{
perSystem = { pkgs, self', config, ... }: {
devShells.default = pkgs.mkShell {
packages = [ ];
packages = [ pkgs.treefmt ];
shellHook = ''
${self'.checks.pre-commit-check.shellHook}
'';

File diff suppressed because it is too large Load Diff

View File

@ -3,7 +3,8 @@
let
nix = inputs'.nix.packages.nix-clangStdenv;
nixpkgs = inputs.nixpkgs-master;
in {
in
{
packages = {
pasta = pkgs.callPackage ./default.nix { inherit nixpkgs nix pkgs; };
};

View File

@ -1,16 +1,19 @@
{ pkgs ?
# import (builtins.fetchTree {
# repo = "nixpkgs";
# ref = "migrate-doc-comments";
# owner = "hsjobeki";
# type = "github";
# }) {},
import (builtins.fetchTree {
repo = "nixpkgs";
ref = "master";
owner = "nixos";
type = "github";
}) { }, }:
{ pkgs ? # import (builtins.fetchTree {
# repo = "nixpkgs";
# ref = "migrate-doc-comments";
# owner = "hsjobeki";
# type = "github";
# }) {},
import
(builtins.fetchTree {
repo = "nixpkgs";
ref = "master";
owner = "nixos";
type = "github";
})
{ }
,
}:
let
inherit pkgs;
inherit (pkgs) lib;
@ -34,4 +37,10 @@ let
getDocsFromSet pkgs.pkgs.pythonPackages [ "pkgs" "pythonPackages" ];
};
in { inherit tools pkgs docs toFile; }
# generate test_data for pesto
test_data = {
attrsets = getDocsFromSet lib.attrsets [ "lib" "attrsets" ];
};
in
{ inherit tools pkgs docs toFile test_data; }

View File

@ -21,12 +21,14 @@ let
*/
getDocs = parent: name:
let
lambda = if lib.isFunction parent.${name} then
builtins.lambdaMeta parent.${name}
else
null;
lambda =
if lib.isFunction parent.${name} then
builtins.lambdaMeta parent.${name}
else
null;
attr = { position = builtins.unsafeGetAttrPos name parent; };
in { inherit lambda attr; };
in
{ inherit lambda attr; };
/* *
Recursively collect documentation for all values
@ -42,7 +44,8 @@ let
path = initialPath ++ fn.path;
inherit (fn) docs;
});
in getFnDocs (filterFns (builtins.genericClosure {
in
getFnDocs (filterFns (builtins.genericClosure {
startSet = [{
__initial = true;
key = [ ];
@ -60,35 +63,40 @@ let
let
currVal = force item.value;
# Dont traverse into: "derivations", "option types"
in if lib.isDerivation currVal || lib.isOptionType currVal || currVal
== null then
in
if lib.isDerivation currVal || lib.isOptionType currVal || currVal
== null then
[ ]
# Doc support for named key value pairs (sets)
# Doc support for named key value pairs (sets)
else if builtins.typeOf currVal == "set" then
map (name:
# NEXT ITEM
let
nextVal = force item.value.${name};
# calling lib.unique prevents infinite recursion
path = lib.unique (item.key ++ [ name ]);
in if lib.isDerivation nextVal || name == "__functor"
|| (limit != null && item.depth >= limit) then
# skipping all more nested values by
# returning the previous item
item
else {
key = path;
value = item.value.${name};
# Propagate some values.
type = if lib.isFunction nextVal then
"lambda"
else
builtins.typeOf nextVal;
docs = getDocs (lib.attrByPath (dropBack path) null set) name;
inherit name path;
parent = currVal;
depth = item.depth + 1;
}) (builtins.attrNames item.value)
map
(name:
# NEXT ITEM
let
nextVal = force item.value.${name};
# calling lib.unique prevents infinite recursion
path = lib.unique (item.key ++ [ name ]);
in
if lib.isDerivation nextVal || name == "__functor"
|| (limit != null && item.depth >= limit) then
# skipping all more nested values by
# returning the previous item
item
else {
key = path;
value = item.value.${name};
# Propagate some values.
type =
if lib.isFunction nextVal then
"lambda"
else
builtins.typeOf nextVal;
docs = getDocs (lib.attrByPath (dropBack path) null set) name;
inherit name path;
parent = currVal;
depth = item.depth + 1;
})
(builtins.attrNames item.value)
else
[ ];
}));
@ -105,14 +113,17 @@ let
# Call getDocs for each name value pair
(lib.mapAttrs (n: v: getDocs s n))
];
in lib.pipe docs [
in
lib.pipe docs [
# Transform into list
builtins.attrNames
# Collect all values
(builtins.foldl' (res: name:
res ++ [{
path = path ++ [ name ];
docs = docs.${name};
}]) [ ])
(builtins.foldl'
(res: name:
res ++ [{
path = path ++ [ name ];
docs = docs.${name};
}]) [ ])
];
in { inherit toFile collectFns getDocsFromSet; }
in
{ inherit toFile collectFns getDocsFromSet; }

View File

@ -5,6 +5,8 @@ edition = "2021"
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
[profile.release]
debug = true
[dependencies]
rnix = "0.11.0"
@ -14,7 +16,7 @@ regex = "1.9.5"
textwrap = "0.16.0"
walkdir = "2.4.0"
clap = { version = "4.4.4", features = ["derive"] }
serde = { version = "1.0", features = ["derive"] }
serde = { version = "1.0", features = ["derive", "rc"] }
serde_json = "1.0"
expect-test = "1.4.0"
serde_with = "3.4.0"

View File

@ -1,12 +0,0 @@
[
{
"file": "./test.nix",
"line": 5,
"column": 9
},
{
"file": "./test.nix",
"line": 5,
"column": 12
}
]

View File

@ -1,5 +0,0 @@
let
# *Docs
# TODO: fix it later
foo = x: y: x;
in foo

View File

@ -22,7 +22,8 @@
partitionType = "count";
});
};
in {
in
{
packages = { inherit pesto; };
inherit checks;
devShells.pesto = craneLib.devShell {

227
pesto/out.json Normal file
View File

@ -0,0 +1,227 @@
[
{
"docs": {
"lambda": {
"isPrimop": true,
"name": "add",
"args": [
"e1",
"e2"
],
"experimental": false,
"arity": 2,
"content": "\n Return the sum of the numbers *e1* and *e2*.\n "
},
"attr": {
"position": {
"file": "test_data/assets/default.nix",
"line": 68,
"column": 23
},
"content": null
}
},
"aliases": [
[
"lib",
"trivial",
"add"
],
[
"builtins",
"add"
]
],
"path": [
"lib",
"add"
]
},
{
"docs": {
"lambda": {
"isPrimop": true,
"name": "add",
"args": [
"e1",
"e2"
],
"experimental": false,
"arity": 2,
"content": "\n Return the sum of the numbers *e1* and *e2*.\n "
},
"attr": {
"position": {
"file": "test_data/assets/trivial.nix",
"line": 269,
"column": 21
},
"content": null
}
},
"aliases": [
[
"lib",
"add"
],
[
"builtins",
"add"
]
],
"path": [
"lib",
"trivial",
"add"
]
},
{
"docs": {
"lambda": {
"isPrimop": false,
"position": {
"file": "test_data/assets/lists.nix",
"line": 204,
"column": 5
},
"content": "\n The binary operation to run, where the two arguments are:\n 1. `acc`: The current accumulator value: Either the initial one for the first iteration, or the result of the previous iteration\n 2. `x`: The corresponding list element for this iteration\n ",
"countApplied": 0
},
"attr": {
"position": {
"file": "test_data/assets/default.nix",
"line": 92,
"column": 25
},
"content": null
}
},
"aliases": [
[
"lib",
"lists",
"foldl'"
]
],
"path": [
"lib",
"foldl'"
]
},
{
"docs": {
"lambda": {
"isPrimop": false,
"position": {
"file": "test_data/assets/lists.nix",
"line": 204,
"column": 5
},
"content": "\n The binary operation to run, where the two arguments are:\n 1. `acc`: The current accumulator value: Either the initial one for the first iteration, or the result of the previous iteration\n 2. `x`: The corresponding list element for this iteration\n ",
"countApplied": 0
},
"attr": {
"position": {
"file": "test_data/assets/lists.nix",
"line": 198,
"column": 3
},
"content": "\n Reduce a list by applying a binary operator from left to right,\n starting with an initial accumulator.\n Before each application of the operator, the accumulator value is evaluated.\n This behavior makes this function stricter than [`foldl`](#function-library-lib.lists.foldl).\n Unlike [`builtins.foldl'`](https://nixos.org/manual/nix/unstable/language/builtins.html#builtins-foldl'),\n the initial accumulator argument is evaluated before the first iteration.\n A call like\n ```nix\n foldl' op acc₀ [ x₀ x₁ x₂ ... xₙ₋₁ xₙ ]\n ```\n is (denotationally) equivalent to the following,\n but with the added benefit that `foldl'` itself will never overflow the stack.\n ```nix\n let\n acc₁ = builtins.seq acc₀ (op acc₀ x₀ );\n acc₂ = builtins.seq acc₁ (op acc₁ x₁ );\n acc₃ = builtins.seq acc₂ (op acc₂ x₂ );\n ...\n accₙ = builtins.seq accₙ₋₁ (op accₙ₋₁ xₙ₋₁);\n accₙ₊₁ = builtins.seq accₙ (op accₙ xₙ );\n in\n accₙ₊₁\n # Or ignoring builtins.seq\n op (op (... (op (op (op acc₀ x₀) x₁) x₂) ...) xₙ₋₁) xₙ\n ```\n\n # Example\n\n ```nix\n foldl' (acc: x: acc + x) 0 [1 2 3]\n => 6\n ```\n\n # Type\n\n ```\n foldl' :: (acc -> x -> acc) -> acc -> [x] -> acc\n ```\n\n # Arguments\n\n - [op] The binary operation to run, where the two arguments are:\n\n1. `acc`: The current accumulator value: Either the initial one for the first iteration, or the result of the previous iteration\n2. `x`: The corresponding list element for this iteration\n - [acc] The initial accumulator value\n - [list] The list to fold\n\n "
}
},
"aliases": [
[
"lib",
"foldl'"
]
],
"path": [
"lib",
"lists",
"foldl'"
]
},
{
"docs": {
"lambda": {
"isPrimop": false,
"position": {
"file": "test_data/assets/strings.nix",
"line": 84,
"column": 25
},
"content": "\n Map a function over a list and concatenate the resulting strings.\n\n # Example\n\n ```nix\n concatMapStrings (x: \"a\" + x) [\"foo\" \"bar\"]\n => \"afooabar\"\n ```\n\n # Type\n\n ```\n concatMapStrings :: (a -> string) -> [a] -> string\n ```\n\n # Arguments\n\n - [f] \n - [list] \n\n ",
"countApplied": 1
},
"attr": {
"position": {
"file": "test_data/assets/strings.nix",
"line": 243,
"column": 3
},
"content": "\n Concatenate a list of strings, adding a newline at the end of each one.\n Defined as `concatMapStrings (s: s + \"\\n\")`.\n\n # Example\n\n ```nix\n concatLines [ \"foo\" \"bar\" ]\n => \"foo\\nbar\\n\"\n ```\n\n # Type\n\n ```\n concatLines :: [string] -> string\n ```\n "
}
},
"aliases": [],
"path": [
"lib",
"strings",
"concatLines"
]
},
{
"docs": {
"lambda": {
"isPrimop": false,
"position": {
"file": "test_data/assets/strings.nix",
"line": 84,
"column": 25
},
"content": "\n Map a function over a list and concatenate the resulting strings.\n\n # Example\n\n ```nix\n concatMapStrings (x: \"a\" + x) [\"foo\" \"bar\"]\n => \"afooabar\"\n ```\n\n # Type\n\n ```\n concatMapStrings :: (a -> string) -> [a] -> string\n ```\n\n # Arguments\n\n - [f] \n - [list] \n\n ",
"countApplied": 1
},
"attr": {
"position": {
"file": "test_data/assets/default.nix",
"line": 98,
"column": 27
},
"content": null
}
},
"aliases": [],
"path": [
"lib",
"concatLines"
]
},
{
"docs": {
"lambda": {
"isPrimop": true,
"content": "\n Return the sum of the numbers *e1* and *e2*.\n ",
"countApplied": 0
},
"attr": {
"position": null,
"content": ""
}
},
"aliases": [
[
"lib",
"add"
],
[
"lib",
"trivial",
"add"
]
],
"path": [
"builtins",
"add"
]
}
]

304
pesto/src/bulk.rs Normal file
View File

@ -0,0 +1,304 @@
use std::{
collections::HashMap, fs::File, io::Write, path::PathBuf, println, rc::Rc, time::Instant,
};
use crate::{
pasta::{read_pasta, Docs, LambdaMeta},
position::{DocComment, DocIndex, FilePosition, NixDocComment},
};
#[derive(Debug)]
enum FieldType {
Attr,
Lambda,
}
#[derive(Debug)]
struct LookupReason<'a> {
// docs: &'a Docs,
position: &'a FilePosition,
// field: FieldType,
}
pub struct DocBulk {
pub docs: HashMap<Rc<Vec<String>>, Docs>,
}
pub trait Parse {
fn new(path: &PathBuf) -> Self;
}
fn insert_position<'a>(
mut file_map: HashMap<&'a PathBuf, Vec<LookupReason<'a>>>,
position: &'a FilePosition,
item: LookupReason<'a>,
) -> HashMap<&'a PathBuf, Vec<LookupReason<'a>>> {
match file_map.get_mut(&position.file) {
Some(list) => {
list.push(item);
}
None => {
file_map.insert(&position.file, vec![item]);
}
};
file_map
}
fn build_file_map(data: &Vec<Docs>) -> HashMap<&PathBuf, Vec<LookupReason>> {
let mut file_map: HashMap<&PathBuf, Vec<LookupReason>> = HashMap::new();
for doc_item in data.iter() {
if let Some(position) = &doc_item.docs.attr.position {
file_map = insert_position(
file_map,
position,
LookupReason {
// docs: doc_item,
position: position,
// field: FieldType::Attr,
},
);
}
if let Some(lambda) = &doc_item.docs.lambda {
if let Some(position) = &lambda.position {
file_map = insert_position(
file_map,
position,
LookupReason {
// docs: doc_item,
position: position,
// field: FieldType::Lambda,
},
);
}
}
}
file_map
}
/// Collect all positions that need to be looked up in advance
/// This saves time afterwards, since we can iterate over the whole file only once and save all interesting ast positions
fn collect_file_positions(lookups: &Vec<LookupReason>) -> HashMap<usize, Vec<usize>> {
let mut positions: HashMap<usize, Vec<usize>> = HashMap::new();
for lookup in lookups {
match positions.get_mut(&lookup.position.line) {
Some(cols) => {
cols.push(lookup.position.column);
}
None => {
positions.insert(lookup.position.line, vec![lookup.position.column]);
}
}
}
positions
}
fn fill_docs(
data: &Vec<Docs>,
pos_doc_map: &HashMap<&FilePosition, Option<NixDocComment>>,
) -> Vec<Docs> {
let mut filled_docs = data.clone();
for item in filled_docs.iter_mut() {
if let Some(position) = &item.docs.attr.position {
if let Some(Some(doc_comment)) = pos_doc_map.get(&position) {
item.docs.attr.content = doc_comment.content.clone();
}
}
if let Some(lambda) = item.docs.lambda.as_mut() {
if let Some(position) = &lambda.position {
if let Some(Some(doc_comment)) = pos_doc_map.get(&position) {
lambda.content = doc_comment.content.clone();
lambda.countApplied = doc_comment.count_applied;
}
}
}
}
filled_docs
}
/// Build categories for efficiently finding aliases. (This is very expensive O(n^2). )
/// Aliases can only exist within one subgroup, iterating over other items is a waste of time.
/// With the current value introspection, any value that is an alias of a builtin, also inherits the builtins docs and the isPrimop flag set.
///
/// Group docs into the following subgroups
/// 1. primop_lambdas
/// e.g, lib.add, builtins.add
///
/// 2.non_primop_lambdas
/// e.g, lib.attrByPath
///
/// 3.partially_applied lambdas
/// e.g., concatLines (is concatMapStrings applied with f := Lambda<(s: s + "\n");>)
/// This is a special case, it is very hard, to properly detect aliases at this level. Although the alias must also be found in this subgroup.
///
fn categorize(data: &Vec<Docs>) -> (Vec<&Docs>, Vec<&Docs>, Vec<&Docs>) {
// For finding aliases.
// Group docs into these subgroups.
// Aliases can only exist within one subgroup, iterating over other items is a waste of time.
let mut primop_lambdas: Vec<&Docs> = vec![];
let mut non_primop_lambdas: Vec<&Docs> = vec![];
let mut partially_applieds: Vec<&Docs> = vec![];
for item in data.iter() {
if let Some(lambda) = &item.docs.lambda {
match lambda.countApplied {
Some(0) | None => {
if lambda.isPrimop {
primop_lambdas.push(&item);
}
if !lambda.isPrimop {
non_primop_lambdas.push(&item);
}
}
_ => {
// #
partially_applieds.push(&item);
}
}
}
}
(primop_lambdas, non_primop_lambdas, partially_applieds)
}
fn init_alias_map(
data: &Vec<Docs>,
categories: (Vec<&Docs>, Vec<&Docs>, Vec<&Docs>),
) -> HashMap<Rc<Vec<String>>, Vec<Rc<Vec<String>>>> {
let (primop_lambdas, non_primop_lambdas, partially_applieds) = categories;
let mut alias_map: HashMap<Rc<Vec<String>>, Vec<Rc<Vec<String>>>> = HashMap::new();
for item in data.iter() {
if let Some(lambda) = &item.docs.lambda {
match lambda.countApplied {
Some(0) | None => {
if lambda.isPrimop {
alias_map.insert(item.path.clone(), find_aliases(&item, &primop_lambdas));
}
if !lambda.isPrimop {
alias_map
.insert(item.path.clone(), find_aliases(&item, &non_primop_lambdas));
}
}
_ => {
alias_map.insert(item.path.clone(), find_aliases(&item, &partially_applieds));
}
};
}
}
alias_map
}
impl Parse for DocBulk {
fn new(path: &PathBuf) -> Self {
let start_time = Instant::now();
let data = read_pasta(path);
let file_map = build_file_map(&data);
let mut pos_doc_map: HashMap<&FilePosition, Option<NixDocComment>> = HashMap::new();
for (path, lookups) in file_map.iter() {
let positions = collect_file_positions(lookups);
println!("File {:?}: Lookups {:?}", path.file_name(), positions.len());
let doc_index = DocIndex::new(path, positions);
for lookup in lookups {
pos_doc_map.insert(
lookup.position,
doc_index.get_docs(lookup.position.line, lookup.position.column),
);
}
}
let mut filled_docs = fill_docs(&data, &pos_doc_map);
let categories = categorize(&filled_docs);
let alias_map = init_alias_map(&data, categories);
let mut docs: HashMap<Rc<Vec<String>>, Docs> = HashMap::new();
for item in filled_docs.iter_mut() {
item.aliases = alias_map.get(&item.path).map(|i| i.to_owned());
docs.insert(Rc::clone(&item.path), item.clone());
}
let mut file = File::create("out.json").unwrap();
file.write_all(
serde_json::to_string_pretty(&filled_docs)
.unwrap()
.as_bytes(),
)
.unwrap();
let end_time = Instant::now();
println!(
"parsed: {} doc comments / AST Positions, from {} files in {:?}",
pos_doc_map.len(),
file_map.len(),
end_time - start_time
);
Self { docs }
}
}
/// How to find aliases:
/// Match
/// partially applied functions -> special case, don't know how it is "correct". Would need access to the upvalues?
/// Simple lambdas (not partially applied)
/// Match primop: (Doesnt have source position)
/// Eq countApplied,
/// Eq content
/// Other isPrimop,
/// Content not empty
/// Match Non-Primop
/// Eq position
fn find_aliases(item: &Docs, list: &Vec<&Docs>) -> Vec<Rc<Vec<String>>> {
let res: Vec<Rc<Vec<String>>> = list
.iter()
.filter_map(|other| {
if let (Some(s_meta), Some(o_meta)) = (&item.docs.lambda, &other.docs.lambda) {
// Avoid creating an alias for the same item.
if item.path == other.path {
return None;
}
if count_applied(s_meta) != 0
// Use less accurate name aliases. This can lead to false positives
// TODO: figure out the proper way
&& count_applied(o_meta) == count_applied(s_meta)
&& item.path.last().unwrap() == other.path.last().unwrap()
{
return Some(other.path.clone());
}
return match s_meta.isPrimop {
true => {
let is_empty = match &s_meta.content {
Some(c) => c.is_empty(),
None => true,
};
if o_meta.isPrimop
&& o_meta.content == s_meta.content
&& !is_empty
&& count_applied(s_meta) == 0
&& count_applied(o_meta) == 0
{
return Some(other.path.clone());
}
None
}
false => {
if s_meta.position == o_meta.position
&& count_applied(s_meta) == 0
&& count_applied(o_meta) == 0
{
return Some(other.path.clone());
}
None
}
};
}
None
})
.collect();
res
}
fn count_applied(meta: &LambdaMeta) -> usize {
meta.countApplied.unwrap_or(0)
}

View File

@ -1,10 +1,11 @@
mod bulk;
mod comment;
mod pasta;
mod position;
mod tests;
use clap::Parser;
use std::{fs::File, io::Write, path::PathBuf, println};
use std::{collections::HashMap, path::PathBuf, println};
use crate::{
pasta::read_pasta,
@ -52,72 +53,19 @@ pub fn main() {
let opts = Options::parse();
if let Some(nix_file) = opts.file {
let pos = DocIndex::new(&nix_file, vec![(opts.line.unwrap(), opts.column.unwrap())]);
let mut positions = HashMap::new();
positions.insert(opts.line.unwrap(), vec![opts.column.unwrap()]);
let pos = DocIndex::new(&nix_file, positions);
if let Some(docs) = pos.get_docs(opts.line.unwrap(), opts.column.unwrap()) {
println!("{:?}", docs);
}
}
if let Some(pos_file) = opts.pos_file {
let mut data = read_pasta(&pos_file);
let data = read_pasta(&pos_file);
println!("data length: {}", data.len());
// Iter mutable, to change the doc "content" field
for (idx, item) in data.iter_mut().enumerate() {
println!("{}", idx);
if let Some(position) = &item.docs.attr.position {
let index = DocIndex::new(&position.file, vec![(position.line, position.column)]);
let doc_comment = index.get_docs(position.line, position.column);
if let Some(meta) = doc_comment {
item.docs.attr.content = meta.content;
}
}
if let Some(lambda) = &mut item.docs.lambda {
if let Some(position) = &lambda.position {
let index =
DocIndex::new(&position.file, vec![(position.line, position.column)]);
let doc_comment = index.get_docs(position.line, position.column);
if let Some(meta) = doc_comment {
lambda.content = meta.content;
lambda.countApplied = Some(usize::try_from(meta.count_applied).unwrap());
}
}
}
}
println!("{:?}", data.get(0));
let mut out_file = File::create("out.json").unwrap();
out_file.write(serde_json::to_string(&data).unwrap().as_bytes());
// Group positions by file, to avoid opening the same file again.
// let mut positions_by_file: HashMap<&PathBuf, Vec<&Docs>> = HashMap::new();
// for item in &data {
// if let Some(ref position) = item.docs.attr.position {
// positions_by_file = populate_map(positions_by_file, &position, item);
// }
// if let Some(ref position) = item.docs.lambda.position {
// positions_by_file = populate_map(positions_by_file, &position, item);
// }
// }
// if let Some((f, _p)) = positions_by_file.get_key_value(&PathBuf::from(
// "/nix/store/9g6kcr0ny3k369ydl36c8mz2gf69432x-source/lib/strings-with-deps.nix",
// )) {
// println!("{:?} {:?}", f, _p);
// }
// positions_by_file.iter().for_each(|(file, positions)| {
// let index = DocIndex::new(&file);
// positions.iter().for_each(|reason: &LookupReason| {
// let docs = index.get_docs(reason.position.line, reason.position.column);
// println!("{:?}", docs);
// data.iter()
// .map(|item| if item.path == reason.id { item } else { item });
// })
// });
// println!("{:?}", res);
// if let Some(docs) = pos.get_docs() {
// println!("{:?}", docs);
// }
// println!("{:?}", data.get(10));
}
}

View File

@ -1,14 +1,14 @@
use std::{fs, path::PathBuf};
use std::{fs, path::PathBuf, rc::Rc};
use serde::{Deserialize, Serialize};
use crate::position::FilePosition;
#[serde_with::skip_serializing_none]
#[derive(Debug, Serialize, Deserialize)]
#[derive(Debug, Serialize, Deserialize, Clone)]
pub struct LambdaMeta {
#[allow(non_snake_case)]
pub isPrimop: Option<bool>,
pub isPrimop: bool,
pub name: Option<String>,
pub position: Option<FilePosition>,
pub args: Option<Vec<String>>,
@ -20,22 +20,23 @@ pub struct LambdaMeta {
#[allow(non_snake_case)]
pub countApplied: Option<usize>,
}
#[derive(Debug, Serialize, Deserialize)]
#[derive(Debug, Serialize, Deserialize, Clone)]
pub struct AttrMeta {
pub position: Option<FilePosition>,
/// I want to add this
pub content: Option<String>,
}
#[derive(Debug, Serialize, Deserialize)]
#[derive(Debug, Serialize, Deserialize, Clone)]
pub struct DocsMeta {
pub lambda: Option<LambdaMeta>,
pub attr: AttrMeta,
}
#[derive(Debug, Serialize, Deserialize)]
#[derive(Debug, Serialize, Deserialize, Clone)]
pub struct Docs {
pub docs: DocsMeta,
pub path: Vec<String>,
pub aliases: Option<Vec<Rc<Vec<String>>>>,
pub path: Rc<Vec<String>>,
}
pub fn read_pasta(path: &PathBuf) -> Vec<Docs> {
@ -46,7 +47,7 @@ pub fn read_pasta(path: &PathBuf) -> Vec<Docs> {
data
}
Err(e) => {
panic!("error, {}", e)
panic!("Could not parse input data: {}", e)
}
}
}

View File

@ -8,8 +8,9 @@ use std::collections::HashMap;
use std::fs::File;
use std::io::{BufRead, BufReader};
use std::rc::Rc;
use std::time::Instant;
use std::{fs, path::PathBuf, println};
use std::{format, fs, path::PathBuf, println};
use crate::comment::get_expr_docs;
@ -19,7 +20,7 @@ pub struct TextPosition {
pub column: usize,
}
#[derive(Debug, Serialize, Deserialize)]
#[derive(Debug, Serialize, Deserialize, PartialEq, Eq, Hash, Clone)]
pub struct FilePosition {
pub file: PathBuf,
pub line: usize,
@ -29,82 +30,83 @@ pub struct FilePosition {
#[derive(Debug)]
pub struct DocIndex<'a> {
file: &'a PathBuf,
src: Rc<String>,
ast: SyntaxNode,
pos_idx: HashMap<(usize, usize), TextSize>,
node_idx: HashMap<TextSize, Option<SyntaxNode>>,
}
pub trait DocComment<'a> {
fn new(file: &'a PathBuf, positions: Vec<(usize, usize)>) -> Self;
// Single item methods
fn get_pos_idx(&self, line: usize, column: usize) -> Option<TextSize>;
fn node_at_pos(&self, line: usize, column: usize) -> Option<SyntaxNode>;
fn get_docs(&self, line: usize, column: usize) -> Option<LambdaDoc>;
// fn nodes_at_pos(&self, pos: &Vec<TextSize>) -> HashMap<TextSize, Option<SyntaxNode>>;
// fn init_pos_idx(&self, positions: Vec<(usize, usize)>) -> HashMap<(usize, usize), TextSize>;
fn get_docs_list(&self, idx: Vec<TextSize>) -> HashMap<TextSize, Option<LambdaDoc>>;
fn new(file: &'a PathBuf, positions: HashMap<usize, Vec<usize>>) -> Self;
fn get_docs(&self, line: usize, column: usize) -> Option<NixDocComment>;
}
fn get_src(path: &PathBuf) -> Option<String> {
fn get_src(path: &PathBuf) -> String {
if let Ok(src) = fs::read_to_string(path) {
return Some(src);
return src;
}
panic!("could not read file");
}
/// Initializes a HashMap for lookup operation between L:C and absolute position.
/// Returns both
/// Position HashMap from l:c -> abs
/// Reverse Position HashMap from abs -> l:c
fn init_pos_idx(
path: &PathBuf,
positions: Vec<(usize, usize)>,
) -> HashMap<(usize, usize), TextSize> {
positions: HashMap<usize, Vec<usize>>,
) -> (
HashMap<(usize, usize), TextSize>,
HashMap<TextSize, (usize, usize)>,
) {
let mut res = HashMap::new();
let mut inverse: HashMap<TextSize, (usize, usize)> = HashMap::new();
let file = File::open(path).unwrap();
let reader = BufReader::new(file);
let mut res = HashMap::new();
let mut curr_line = 0;
let mut curr_position = 0;
for line in reader.lines() {
positions.iter().for_each(|(line, col)| {
if *line == curr_line + 1 {
res.insert(
(*line, *col),
TextSize::from(u32::try_from(curr_position + col - 1).unwrap()),
);
for (curr_line, line) in reader.lines().enumerate() {
match line {
Ok(line) => {
if let Some(cols) = positions.get(&(curr_line + 1)) {
cols.iter().for_each(|col| {
let lc_tuple = (curr_line + 1, *col);
let absolute =
TextSize::from(u32::try_from(curr_position + col - 1).unwrap());
res.insert(lc_tuple, absolute);
inverse.insert(absolute, lc_tuple);
});
}
curr_position += line.len() + 1;
}
});
curr_line += 1;
curr_position += line.unwrap().chars().count() + 1;
_ => {}
}
}
res
(res, inverse)
}
// Take a list of lookup operations
// Since iterating over the AST can be expensive
fn init_node_idx(ast: &SyntaxNode, pos: &Vec<TextSize>) -> HashMap<TextSize, Option<SyntaxNode>> {
let mut res = HashMap::new();
fn init_node_idx(
ast: &SyntaxNode,
pos: &HashMap<TextSize, (usize, usize)>,
) -> HashMap<TextSize, Option<SyntaxNode>> {
let mut res: HashMap<TextSize, Option<SyntaxNode>> = HashMap::new();
for ev in ast.preorder() {
match ev {
WalkEvent::Enter(node) => {
if let Some(pos_key) = pos
.iter()
.find(|position| node.text_range().start() == **position)
{
if res.get(pos_key).is_none() {
res.insert(*pos_key, Some(node));
let cursor = node.text_range().start();
if let Some(_) = pos.get(&cursor) {
if res.get(&cursor).is_none() {
res.insert(cursor, Some(node));
}
}
}
WalkEvent::Leave(node) => {
if let Some(pos_key) = pos
.iter()
.find(|position| node.text_range().end() == **position)
{
if res.get(pos_key).is_none() {
res.insert(*pos_key, Some(node));
let cursor = node.text_range().end();
if let Some(_) = pos.get(&cursor) {
if res.get(&cursor).is_none() {
res.insert(cursor, Some(node));
}
}
}
@ -114,101 +116,79 @@ fn init_node_idx(ast: &SyntaxNode, pos: &Vec<TextSize>) -> HashMap<TextSize, Opt
}
impl<'a> DocComment<'a> for DocIndex<'a> {
fn new(file: &'a PathBuf, positions: Vec<(usize, usize)>) -> Self {
if let Some(src) = get_src(file) {
let rc: Rc<String> = Rc::new(src);
let ast = rnix::Root::parse(Rc::clone(&rc).as_str()).syntax();
let pos_idx = init_pos_idx(&file, positions);
let ast_positions: Vec<TextSize> = pos_idx.values().map(|t| *t).collect();
let node_idx = init_node_idx(&ast, &ast_positions);
fn new(file: &'a PathBuf, positions: HashMap<usize, Vec<usize>>) -> Self {
let src = get_src(file);
let rc: Rc<String> = Rc::new(src);
let mut start_time = Instant::now();
let ast = rnix::Root::parse(Rc::clone(&rc).as_str()).syntax();
let mut end_time = Instant::now();
// println!("{:?} - Parsed ast", end_time - start_time);
return Self {
file,
ast,
src: rc,
pos_idx,
node_idx,
};
} else {
panic!("cannot open file");
}
start_time = Instant::now();
let (pos_idx, inverse_pos_idx) = init_pos_idx(&file, positions);
end_time = Instant::now();
// println!(
// "{:?} - Translated col,line into abs positions",
// end_time - start_time
// );
// Call your function here
start_time = Instant::now();
let node_idx = init_node_idx(&ast, &inverse_pos_idx);
end_time = Instant::now();
// println!(
// "{:?} - Find all ast nodes for positions",
// end_time - start_time
// );
return Self {
file,
pos_idx,
node_idx,
};
}
fn get_pos_idx(&self, l: usize, c: usize) -> Option<TextSize> {
let src = &self.src;
let mut result: usize = 0;
let mut pos: Option<usize> = None;
for (line, content) in src.lines().enumerate() {
if line + 1 == l {
pos = Some(result + c - 1);
break;
}
result += content.len() + 1;
fn get_docs(&self, line: usize, column: usize) -> Option<NixDocComment> {
let idx = self.pos_idx.get(&(line, column));
if idx.is_none() {
let msg = format!(
"Position {} {} may not exist in file {:?}",
line, column, self.file
);
panic!("{:?} @ {}", self.file, msg);
}
return pos.map(|pos| TextSize::from(u32::try_from(pos).unwrap()));
}
fn node_at_pos(&self, line: usize, column: usize) -> Option<SyntaxNode> {
let pos_idx = &self.get_pos_idx(line, column).unwrap();
let mut expr = None;
for ev in self.ast.preorder() {
match ev {
WalkEvent::Enter(node) => {
if node.text_range().start() == *pos_idx {
expr = Some(node);
break;
if let Some(idx) = idx {
let expr = self.node_idx.get(idx);
// println!("L{}:C{}, expr: {:?}", line, column, expr);
if let Some(Some(expr)) = expr {
let doc = match expr.kind() {
rnix::SyntaxKind::NODE_LAMBDA => {
let (outer_lambda, count_applied) = get_parent_lambda(&expr);
NixDocComment {
content: get_expr_docs(&outer_lambda),
count_applied: Some(count_applied),
}
}
}
WalkEvent::Leave(node) => {
if node.text_range().end() == *pos_idx {
expr = Some(node);
break;
}
}
}
}
return expr;
}
fn get_docs_list(&self, positions: Vec<TextSize>) -> HashMap<TextSize, Option<LambdaDoc>> {
let mut res = HashMap::new();
let ast_map = init_node_idx(&self.ast, &positions);
positions.iter().for_each(|position| {
if let Some(Some(expr)) = ast_map.get(position) {
let (outer_lambda, count_applied) = get_parent_lambda(&expr);
let doc = LambdaDoc {
content: get_expr_docs(&outer_lambda),
count_applied,
_ => NixDocComment {
content: get_expr_docs(&expr),
count_applied: None,
},
};
res.insert(*position, Some(doc));
return Some(doc);
}
});
res
}
fn get_docs(&self, line: usize, column: usize) -> Option<LambdaDoc> {
let idx = self.pos_idx.get(&(line, column)).unwrap();
let expr = self.node_idx.get(idx);
println!("L{}:C{}, expr: {:?}", line, column, expr);
if let Some(Some(e)) = expr {
let (outer_lambda, count_applied) = get_parent_lambda(&e);
return Some(LambdaDoc {
content: get_expr_docs(&outer_lambda),
count_applied,
});
}
return None;
}
}
#[derive(Debug)]
pub struct LambdaDoc {
pub struct NixDocComment {
pub content: Option<String>,
pub count_applied: i32,
pub count_applied: Option<usize>,
}
fn get_parent_lambda(expr: &SyntaxNode) -> (SyntaxNode, i32) {
fn get_parent_lambda(expr: &SyntaxNode) -> (SyntaxNode, usize) {
let mut count_outer_lambda = 0;
let mut lambda_parent = peek_parent_lambda(expr);
let mut res = expr.to_owned();

View File

@ -1,16 +1,20 @@
#[cfg(test)]
mod tests {
use std::{ffi::OsStr, format, fs, path::PathBuf, println};
use std::{collections::HashMap, ffi::OsStr, format, fs, path::PathBuf, println};
use crate::position::{DocComment, DocIndex, TextPosition};
use crate::{
bulk::{DocBulk, Parse},
position::{DocComment, DocIndex, TextPosition},
};
use expect_test::expect_file;
fn dir_tests<F>(dir: &str, get_actual: F)
fn dir_tests<F>(dir: &str, ext: &str, get_actual: F)
where
F: Fn(&PathBuf, (usize, usize)) -> String,
F: Fn(&PathBuf) -> String,
{
println!("{:?}", env!("CARGO_MANIFEST_DIR"));
let base_path: PathBuf = [env!("CARGO_MANIFEST_DIR"), "test_data", dir]
.iter()
.collect();
@ -20,18 +24,13 @@ mod tests {
for entry in entries {
let path = entry.unwrap().path();
if path.extension() != Some(OsStr::new("nix")) {
if path.extension() != Some(OsStr::new(ext)) {
continue;
}
println!("testing: {}", path.display());
let mut pos_path = path.clone();
pos_path.set_extension("pos");
let raw_pos = fs::read_to_string(&pos_path).unwrap();
let pos: TextPosition = serde_json::from_str(&raw_pos).unwrap();
let actual = get_actual(&path, (pos.line, pos.column));
let actual = get_actual(&path);
expect_file![path.with_extension("expect")].assert_eq(&actual);
}
@ -39,12 +38,30 @@ mod tests {
#[test]
fn test_main() {
dir_tests("atom", |path, (line, column)| {
let pos = DocIndex::new(path, vec![(line, column)]);
dir_tests("atom", "nix", |path| {
let mut pos_path = path.clone();
pos_path.set_extension("pos");
let pos_str = fs::read_to_string(&pos_path).unwrap();
// let idx = pos.get_pos_idxs_buffer(vec![(line, column), (1, 1)]);
// println!("{:?}", idx);
let test_position: TextPosition = serde_json::from_str(&pos_str).unwrap();
let line = test_position.line;
let column = test_position.column;
let mut positions: HashMap<usize, Vec<usize>> = HashMap::new();
positions.insert(line, vec![column]);
let pos = DocIndex::new(path, positions);
format!("{:?}", pos.get_docs(line, column))
})
}
#[test]
fn test_bulk() {
dir_tests("bulk", "json", |path| {
let bulk = DocBulk::new(&PathBuf::from(path));
let mut res: String = String::new();
for (k, item) in bulk.docs.iter() {
res += &format!("{:?} {:#?}\n", k, item);
}
res
})
}
}

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,170 @@
/**
Library of low-level helper functions for nix expressions.
*
* Please implement (mostly) exhaustive unit tests
* for new functions in `./tests.nix`.
*/
let
inherit (import ./fixed-points.nix { inherit lib; }) makeExtensible;
lib = makeExtensible (self: let
callLibs = file: import file { lib = self; };
in {
# often used, or depending on very little
trivial = callLibs ./trivial.nix;
fixedPoints = callLibs ./fixed-points.nix;
# datatypes
attrsets = callLibs ./attrsets.nix;
lists = callLibs ./lists.nix;
strings = callLibs ./strings.nix;
stringsWithDeps = callLibs ./strings-with-deps.nix;
# packaging
customisation = callLibs ./customisation.nix;
derivations = callLibs ./derivations.nix;
maintainers = import ../maintainers/maintainer-list.nix;
teams = callLibs ../maintainers/team-list.nix;
meta = callLibs ./meta.nix;
versions = callLibs ./versions.nix;
# module system
modules = callLibs ./modules.nix;
options = callLibs ./options.nix;
types = callLibs ./types.nix;
# constants
licenses = callLibs ./licenses.nix;
sourceTypes = callLibs ./source-types.nix;
systems = callLibs ./systems;
# serialization
cli = callLibs ./cli.nix;
gvariant = callLibs ./gvariant.nix;
generators = callLibs ./generators.nix;
# misc
asserts = callLibs ./asserts.nix;
debug = callLibs ./debug.nix;
misc = callLibs ./deprecated.nix;
# domain-specific
fetchers = callLibs ./fetchers.nix;
# Eval-time filesystem handling
path = callLibs ./path;
filesystem = callLibs ./filesystem.nix;
fileset = callLibs ./fileset;
sources = callLibs ./sources.nix;
# back-compat aliases
platforms = self.systems.doubles;
# linux kernel configuration
kernel = callLibs ./kernel.nix;
inherit (builtins) add addErrorContext attrNames concatLists
deepSeq elem elemAt filter genericClosure genList getAttr
hasAttr head isAttrs isBool isInt isList isPath isString length
lessThan listToAttrs pathExists readFile replaceStrings seq
stringLength sub substring tail trace;
inherit (self.trivial) id const pipe concat or and bitAnd bitOr bitXor
bitNot boolToString mergeAttrs flip mapNullable inNixShell isFloat min max
importJSON importTOML warn warnIf warnIfNot throwIf throwIfNot checkListOfEnum
info showWarnings nixpkgsVersion version isInOldestRelease
mod compare splitByAndCompare
functionArgs setFunctionArgs isFunction toFunction
toHexString toBaseDigits inPureEvalMode;
inherit (self.fixedPoints) fix fix' converge extends composeExtensions
composeManyExtensions makeExtensible makeExtensibleWithCustomName;
inherit (self.attrsets) attrByPath hasAttrByPath setAttrByPath
getAttrFromPath attrVals attrValues getAttrs catAttrs filterAttrs
filterAttrsRecursive foldlAttrs foldAttrs collect nameValuePair mapAttrs
mapAttrs' mapAttrsToList attrsToList concatMapAttrs mapAttrsRecursive
mapAttrsRecursiveCond genAttrs isDerivation toDerivation optionalAttrs
zipAttrsWithNames zipAttrsWith zipAttrs recursiveUpdateUntil
recursiveUpdate matchAttrs overrideExisting showAttrPath getOutput getBin
getLib getDev getMan chooseDevOutputs zipWithNames zip
recurseIntoAttrs dontRecurseIntoAttrs cartesianProductOfSets
updateManyAttrsByPath;
inherit (self.lists) singleton forEach foldr fold foldl foldl' imap0 imap1
concatMap flatten remove findSingle findFirst any all count
optional optionals toList range replicate partition zipListsWith zipLists
reverseList listDfs toposort sort naturalSort compareLists take
drop sublist last init crossLists unique intersectLists
subtractLists mutuallyExclusive groupBy groupBy';
inherit (self.strings) concatStrings concatMapStrings concatImapStrings
intersperse concatStringsSep concatMapStringsSep
concatImapStringsSep concatLines makeSearchPath makeSearchPathOutput
makeLibraryPath makeBinPath optionalString
hasInfix hasPrefix hasSuffix stringToCharacters stringAsChars escape
escapeShellArg escapeShellArgs
isStorePath isStringLike
isValidPosixName toShellVar toShellVars
escapeRegex escapeURL escapeXML replaceChars lowerChars
upperChars toLower toUpper addContextFrom splitString
removePrefix removeSuffix versionOlder versionAtLeast
getName getVersion
cmakeOptionType cmakeBool cmakeFeature
mesonOption mesonBool mesonEnable
nameFromURL enableFeature enableFeatureAs withFeature
withFeatureAs fixedWidthString fixedWidthNumber
toInt toIntBase10 readPathsFromFile fileContents;
inherit (self.stringsWithDeps) textClosureList textClosureMap
noDepEntry fullDepEntry packEntry stringAfter;
inherit (self.customisation) overrideDerivation makeOverridable
callPackageWith callPackagesWith extendDerivation hydraJob
makeScope makeScopeWithSplicing makeScopeWithSplicing';
inherit (self.derivations) lazyDerivation;
inherit (self.meta) addMetaAttrs dontDistribute setName updateName
appendToName mapDerivationAttrset setPrio lowPrio lowPrioSet hiPrio
hiPrioSet getLicenseFromSpdxId getExe getExe';
inherit (self.filesystem) pathType pathIsDirectory pathIsRegularFile;
inherit (self.sources) cleanSourceFilter
cleanSource sourceByRegex sourceFilesBySuffices
commitIdFromGitRepo cleanSourceWith pathHasContext
canCleanSource pathIsGitRepo;
inherit (self.modules) evalModules setDefaultModuleLocation
unifyModuleSyntax applyModuleArgsIfFunction mergeModules
mergeModules' mergeOptionDecls evalOptionValue mergeDefinitions
pushDownProperties dischargeProperties filterOverrides
sortProperties fixupOptionType mkIf mkAssert mkMerge mkOverride
mkOptionDefault mkDefault mkImageMediaOverride mkForce mkVMOverride
mkFixStrictness mkOrder mkBefore mkAfter mkAliasDefinitions
mkAliasAndWrapDefinitions fixMergeModules mkRemovedOptionModule
mkRenamedOptionModule mkRenamedOptionModuleWith
mkMergedOptionModule mkChangedOptionModule
mkAliasOptionModule mkDerivedConfig doRename
mkAliasOptionModuleMD;
inherit (self.options) isOption mkEnableOption mkSinkUndeclaredOptions
mergeDefaultOption mergeOneOption mergeEqualOption mergeUniqueOption
getValues getFiles
optionAttrSetToDocList optionAttrSetToDocList'
scrubOptionValue literalExpression literalExample
showOption showOptionWithDefLocs showFiles
unknownModule mkOption mkPackageOption mkPackageOptionMD
mdDoc literalMD;
inherit (self.types) isType setType defaultTypeMerge defaultFunctor
isOptionType mkOptionType;
inherit (self.asserts)
assertMsg assertOneOf;
inherit (self.debug) traceIf traceVal traceValFn
traceSeq traceSeqN traceValSeq
traceValSeqFn traceValSeqN traceValSeqNFn traceFnSeqN
runTests testAllTrue;
inherit (self.misc) maybeEnv defaultMergeArg defaultMerge foldArgs
maybeAttrNullable maybeAttr ifEnable checkFlag getValue
checkReqs uniqList uniqListExt condConcat lazyGenericClosure
innerModifySumArgs modifySumArgs innerClosePropagation
closePropagation mapAttrsFlatten nvs setAttr setAttrMerge
mergeAttrsWithFunc mergeAttrsConcatenateValues
mergeAttrsNoOverride mergeAttrByFunc mergeAttrsByFuncDefaults
mergeAttrsByFuncDefaultsClean mergeAttrBy
fakeHash fakeSha256 fakeSha512
nixType imap;
inherit (self.versions)
splitVersion;
});
in lib

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,825 @@
{ lib }:
rec {
## Simple (higher order) functions
/**
The identity function
For when you need a function that does nothing.
# Type
```
id :: a -> a
```
# Arguments
- [x] The value to return
*/
id =
# The value to return
x: x;
/**
The constant function
Ignores the second argument. If called with only one argument,
constructs a function that always returns a static value.
# Example
```nix
let f = const 5; in f 10
=> 5
```
# Type
```
const :: a -> b -> a
```
# Arguments
- [x] Value to return
- [y] Value to ignore
*/
const =
# Value to return
x:
# Value to ignore
y: x;
/**
Pipes a value through a list of functions, left to right.
# Example
```nix
pipe 2 [
(x: x + 2) # 2 + 2 = 4
(x: x * 2) # 4 * 2 = 8
]
=> 8
# ideal to do text transformations
pipe [ "a/b" "a/c" ] [
# create the cp command
(map (file: ''cp "${src}/${file}" $out\n''))
# concatenate all commands into one string
lib.concatStrings
# make that string into a nix derivation
(pkgs.runCommand "copy-to-out" {})
]
=> <drv which copies all files to $out>
The output type of each function has to be the input type
of the next function, and the last function returns the
final value.
```
# Type
```
pipe :: a -> [<functions>] -> <return type of last function>
```
# Arguments
- [val]
- [functions]
*/
pipe = val: functions:
let reverseApply = x: f: f x;
in builtins.foldl' reverseApply val functions;
# note please dont add a function like `compose = flip pipe`.
# This would confuse users, because the order of the functions
# in the list is not clear. With pipe, its obvious that it
# goes first-to-last. With `compose`, not so much.
## Named versions corresponding to some builtin operators.
/**
Concatenate two lists
# Example
```nix
concat [ 1 2 ] [ 3 4 ]
=> [ 1 2 3 4 ]
```
# Type
```
concat :: [a] -> [a] -> [a]
```
# Arguments
- [x]
- [y]
*/
concat = x: y: x ++ y;
/**
boolean or
# Arguments
- [x]
- [y]
*/
or = x: y: x || y;
/**
boolean and
# Arguments
- [x]
- [y]
*/
and = x: y: x && y;
/**
bitwise and
*/
bitAnd = builtins.bitAnd
or (import ./zip-int-bits.nix
(a: b: if a==1 && b==1 then 1 else 0));
/**
bitwise or
*/
bitOr = builtins.bitOr
or (import ./zip-int-bits.nix
(a: b: if a==1 || b==1 then 1 else 0));
/**
bitwise xor
*/
bitXor = builtins.bitXor
or (import ./zip-int-bits.nix
(a: b: if a!=b then 1 else 0));
/**
bitwise not
*/
bitNot = builtins.sub (-1);
/**
Convert a boolean to a string.
This function uses the strings "true" and "false" to represent
boolean values. Calling `toString` on a bool instead returns "1"
and "" (sic!).
# Type
```
boolToString :: bool -> string
```
# Arguments
- [b]
*/
boolToString = b: if b then "true" else "false";
/**
Merge two attribute sets shallowly, right side trumps left
mergeAttrs :: attrs -> attrs -> attrs
# Example
```nix
mergeAttrs { a = 1; b = 2; } { b = 3; c = 4; }
=> { a = 1; b = 3; c = 4; }
```
# Arguments
- [x] Left attribute set
- [y] Right attribute set (higher precedence for equal keys)
*/
mergeAttrs =
# Left attribute set
x:
# Right attribute set (higher precedence for equal keys)
y: x // y;
/**
Flip the order of the arguments of a binary function.
# Example
```nix
flip concat [1] [2]
=> [ 2 1 ]
```
# Type
```
flip :: (a -> b -> c) -> (b -> a -> c)
```
# Arguments
- [f]
- [a]
- [b]
*/
flip = f: a: b: f b a;
/**
Apply function if the supplied argument is non-null.
# Example
```nix
mapNullable (x: x+1) null
=> null
mapNullable (x: x+1) 22
=> 23
```
# Arguments
- [f] Function to call
- [a] Argument to check for null before passing it to `f`
*/
mapNullable =
# Function to call
f:
# Argument to check for null before passing it to `f`
a: if a == null then a else f a;
# Pull in some builtins not included elsewhere.
inherit (builtins)
pathExists readFile isBool
isInt isFloat add sub lessThan
seq deepSeq genericClosure;
## nixpkgs version strings
/**
Returns the current full nixpkgs version number.
*/
version = release + versionSuffix;
/**
Returns the current nixpkgs release number as string.
*/
release = lib.strings.fileContents ../.version;
/**
The latest release that is supported, at the time of release branch-off,
if applicable.
Ideally, out-of-tree modules should be able to evaluate cleanly with all
supported Nixpkgs versions (master, release and old release until EOL).
So if possible, deprecation warnings should take effect only when all
out-of-tree expressions/libs/modules can upgrade to the new way without
losing support for supported Nixpkgs versions.
This release number allows deprecation warnings to be implemented such that
they take effect as soon as the oldest release reaches end of life.
*/
oldestSupportedRelease =
# Update on master only. Do not backport.
2305;
/**
Whether a feature is supported in all supported releases (at the time of
release branch-off, if applicable). See `oldestSupportedRelease`.
# Arguments
- [release] Release number of feature introduction as an integer, e.g. 2111 for 21.11.
Set it to the upcoming release, matching the nixpkgs/.version file.
*/
isInOldestRelease =
/**
Release number of feature introduction as an integer, e.g. 2111 for 21.11.
Set it to the upcoming release, matching the nixpkgs/.version file.
*/
release:
release <= lib.trivial.oldestSupportedRelease;
/**
Returns the current nixpkgs release code name.
On each release the first letter is bumped and a new animal is chosen
starting with that new letter.
*/
codeName = "Tapir";
/**
Returns the current nixpkgs version suffix as string.
*/
versionSuffix =
let suffixFile = ../.version-suffix;
in if pathExists suffixFile
then lib.strings.fileContents suffixFile
else "pre-git";
/**
Attempts to return the the current revision of nixpkgs and
returns the supplied default value otherwise.
# Type
```
revisionWithDefault :: string -> string
```
# Arguments
- [default] Default value to return if revision can not be determined
*/
revisionWithDefault =
# Default value to return if revision can not be determined
default:
let
revisionFile = "${toString ./..}/.git-revision";
gitRepo = "${toString ./..}/.git";
in if lib.pathIsGitRepo gitRepo
then lib.commitIdFromGitRepo gitRepo
else if lib.pathExists revisionFile then lib.fileContents revisionFile
else default;
nixpkgsVersion = builtins.trace "`lib.nixpkgsVersion` is deprecated, use `lib.version` instead!" version;
/**
Determine whether the function is being called from inside a Nix
shell.
# Type
```
inNixShell :: bool
```
*/
inNixShell = builtins.getEnv "IN_NIX_SHELL" != "";
/**
Determine whether the function is being called from inside pure-eval mode
by seeing whether `builtins` contains `currentSystem`. If not, we must be in
pure-eval mode.
# Type
```
inPureEvalMode :: bool
```
*/
inPureEvalMode = ! builtins ? currentSystem;
## Integer operations
/**
Return minimum of two numbers.
# Arguments
- [x]
- [y]
*/
min = x: y: if x < y then x else y;
/**
Return maximum of two numbers.
# Arguments
- [x]
- [y]
*/
max = x: y: if x > y then x else y;
/**
Integer modulus
# Example
```nix
mod 11 10
=> 1
mod 1 10
=> 1
```
# Arguments
- [base]
- [int]
*/
mod = base: int: base - (int * (builtins.div base int));
## Comparisons
/**
C-style comparisons
a < b, compare a b => -1
a == b, compare a b => 0
a > b, compare a b => 1
# Arguments
- [a]
- [b]
*/
compare = a: b:
if a < b
then -1
else if a > b
then 1
else 0;
/**
Split type into two subtypes by predicate `p`, take all elements
of the first subtype to be less than all the elements of the
second subtype, compare elements of a single subtype with `yes`
and `no` respectively.
# Example
```nix
let cmp = splitByAndCompare (hasPrefix "foo") compare compare; in
cmp "a" "z" => -1
cmp "fooa" "fooz" => -1
cmp "f" "a" => 1
cmp "fooa" "a" => -1
# while
compare "fooa" "a" => 1
```
# Type
```
(a -> bool) -> (a -> a -> int) -> (a -> a -> int) -> (a -> a -> int)
```
# Arguments
- [p] Predicate
- [yes] Comparison function if predicate holds for both values
- [no] Comparison function if predicate holds for neither value
- [a] First value to compare
- [b] Second value to compare
*/
splitByAndCompare =
# Predicate
p:
# Comparison function if predicate holds for both values
yes:
# Comparison function if predicate holds for neither value
no:
# First value to compare
a:
# Second value to compare
b:
if p a
then if p b then yes a b else -1
else if p b then 1 else no a b;
/**
Reads a JSON file.
# Type
```
importJSON :: path -> any
```
# Arguments
- [path]
*/
importJSON = path:
builtins.fromJSON (builtins.readFile path);
/**
Reads a TOML file.
# Type
```
importTOML :: path -> any
```
# Arguments
- [path]
*/
importTOML = path:
builtins.fromTOML (builtins.readFile path);
## Warnings
# See https://github.com/NixOS/nix/issues/749. Eventually we'd like these
# to expand to Nix builtins that carry metadata so that Nix can filter out
# the INFO messages without parsing the message string.
#
# Usage:
# {
# foo = lib.warn "foo is deprecated" oldFoo;
# bar = lib.warnIf (bar == "") "Empty bar is deprecated" bar;
# }
#
# TODO: figure out a clever way to integrate location information from
# something like __unsafeGetAttrPos.
/**
Print a warning before returning the second argument. This function behaves
like `builtins.trace`, but requires a string message and formats it as a
warning, including the `warning: ` prefix.
To get a call stack trace and abort evaluation, set the environment variable
`NIX_ABORT_ON_WARN=true` and set the Nix options `--option pure-eval false --show-trace`
# Type
```
string -> a -> a
```
*/
warn =
if lib.elem (builtins.getEnv "NIX_ABORT_ON_WARN") ["1" "true" "yes"]
then msg: builtins.trace "warning: ${msg}" (abort "NIX_ABORT_ON_WARN=true; warnings are treated as unrecoverable errors.")
else msg: builtins.trace "warning: ${msg}";
/**
Like warn, but only warn when the first argument is `true`.
# Type
```
bool -> string -> a -> a
```
# Arguments
- [cond]
- [msg]
*/
warnIf = cond: msg: if cond then warn msg else x: x;
/**
Like warnIf, but negated (warn if the first argument is `false`).
# Type
```
bool -> string -> a -> a
```
# Arguments
- [cond]
- [msg]
*/
warnIfNot = cond: msg: if cond then x: x else warn msg;
/**
Like the `assert b; e` expression, but with a custom error message and
without the semicolon.
If true, return the identity function, `r: r`.
If false, throw the error message.
Calls can be juxtaposed using function application, as `(r: r) a = a`, so
`(r: r) (r: r) a = a`, and so forth.
# Example
```nix
throwIfNot (lib.isList overlays) "The overlays argument to nixpkgs must be a list."
lib.foldr (x: throwIfNot (lib.isFunction x) "All overlays passed to nixpkgs must be functions.") (r: r) overlays
pkgs
```
# Type
```
bool -> string -> a -> a
```
# Arguments
- [cond]
- [msg]
*/
throwIfNot = cond: msg: if cond then x: x else throw msg;
/**
Like throwIfNot, but negated (throw if the first argument is `true`).
# Type
```
bool -> string -> a -> a
```
# Arguments
- [cond]
- [msg]
*/
throwIf = cond: msg: if cond then throw msg else x: x;
/**
Check if the elements in a list are valid values from a enum, returning the identity function, or throwing an error message otherwise.
# Example
```nix
let colorVariants = ["bright" "dark" "black"]
in checkListOfEnum "color variants" [ "standard" "light" "dark" ] colorVariants;
=>
error: color variants: bright, black unexpected; valid ones: standard, light, dark
```
# Type
```
String -> List ComparableVal -> List ComparableVal -> a -> a
```
# Arguments
- [msg]
- [valid]
- [given]
*/
checkListOfEnum = msg: valid: given:
let
unexpected = lib.subtractLists valid given;
in
lib.throwIfNot (unexpected == [])
"${msg}: ${builtins.concatStringsSep ", " (builtins.map builtins.toString unexpected)} unexpected; valid ones: ${builtins.concatStringsSep ", " (builtins.map builtins.toString valid)}";
info = msg: builtins.trace "INFO: ${msg}";
showWarnings = warnings: res: lib.foldr (w: x: warn w x) res warnings;
## Function annotations
/**
Add metadata about expected function arguments to a function.
The metadata should match the format given by
builtins.functionArgs, i.e. a set from expected argument to a bool
representing whether that argument has a default or not.
setFunctionArgs : (a b) Map String Bool (a b)
This function is necessary because you can't dynamically create a
function of the { a, b ? foo, ... }: format, but some facilities
like callPackage expect to be able to query expected arguments.
# Arguments
- [f]
- [args]
*/
setFunctionArgs = f: args:
{ # TODO: Should we add call-time "type" checking like built in?
__functor = self: f;
__functionArgs = args;
};
/**
Extract the expected function arguments from a function.
This works both with nix-native { a, b ? foo, ... }: style
functions and functions with args set with 'setFunctionArgs'. It
has the same return type and semantics as builtins.functionArgs.
setFunctionArgs : (a b) Map String Bool.
# Arguments
- [f]
*/
functionArgs = f:
if f ? __functor
then f.__functionArgs or (lib.functionArgs (f.__functor f))
else builtins.functionArgs f;
/**
Check whether something is a function or something
annotated with function args.
# Arguments
- [f]
*/
isFunction = f: builtins.isFunction f ||
(f ? __functor && isFunction (f.__functor f));
/**
Turns any non-callable values into constant functions.
Returns callable values as is.
# Example
```nix
nix-repl> lib.toFunction 1 2
1
nix-repl> lib.toFunction (x: x + 1) 2
3
```
# Arguments
- [v] Any value
*/
toFunction =
# Any value
v:
if isFunction v
then v
else k: v;
/**
Convert the given positive integer to a string of its hexadecimal
representation. For example:
toHexString 0 => "0"
toHexString 16 => "10"
toHexString 250 => "FA"
# Arguments
- [i]
*/
toHexString = i:
let
toHexDigit = d:
if d < 10
then toString d
else
{
"10" = "A";
"11" = "B";
"12" = "C";
"13" = "D";
"14" = "E";
"15" = "F";
}.${toString d};
in
lib.concatMapStrings toHexDigit (toBaseDigits 16 i);
/**
`toBaseDigits base i` converts the positive integer i to a list of its
digits in the given base. For example:
toBaseDigits 10 123 => [ 1 2 3 ]
toBaseDigits 2 6 => [ 1 1 0 ]
toBaseDigits 16 250 => [ 15 10 ]
# Arguments
- [base]
- [i]
*/
toBaseDigits = base: i:
let
go = i:
if i < base
then [i]
else
let
r = i - ((i / base) * base);
q = (i - r) / base;
in
[r] ++ go q;
in
assert (isInt base);
assert (isInt i);
assert (base >= 2);
assert (i >= 0);
lib.reverseList (go i);
}

View File

@ -1 +1 @@
Some(LambdaDoc { content: None, count_applied: 0 })
Some(NixDocComment { content: None, count_applied: None })

View File

@ -1 +1,3 @@
let inherit (self.asserts) assertMsg assertOneOf; in 1
let
inherit (self.asserts) assertMsg assertOneOf;
in 1

View File

@ -1,4 +1,4 @@
{
"line": 1,
"column": 27
"line": 2,
"column": 25
}

View File

@ -1 +1 @@
Some(LambdaDoc { content: None, count_applied: 1 })
Some(NixDocComment { content: Some("Docs"), count_applied: Some(1) })

View File

@ -1,4 +1,4 @@
let
# *Docs
/**Docs*/
foo = x: y: z: b: x;
in foo

View File

@ -1 +1 @@
Some(LambdaDoc { content: None, count_applied: 3 })
Some(NixDocComment { content: Some("Docs"), count_applied: Some(3) })

View File

@ -1,4 +1,4 @@
let
# *Docs
/**Docs*/
foo = x: y: z: b: x;
in foo

View File

@ -1 +1 @@
Some(LambdaDoc { content: None, count_applied: 0 })
Some(NixDocComment { content: Some("Docs"), count_applied: Some(0) })

View File

@ -1,5 +1,5 @@
let
# *Docs
/**Docs*/
# TODO: fix it later
foo = x: x;
in foo

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,344 @@
["builtins", "add"] Docs {
docs: DocsMeta {
lambda: Some(
LambdaMeta {
isPrimop: true,
name: None,
position: None,
args: None,
experimental: None,
arity: None,
content: Some(
"\n Return the sum of the numbers *e1* and *e2*.\n ",
),
countApplied: Some(
0,
),
},
),
attr: AttrMeta {
position: None,
content: Some(
"",
),
},
},
aliases: Some(
[
[
"lib",
"add",
],
[
"lib",
"trivial",
"add",
],
],
),
path: [
"builtins",
"add",
],
}
["lib", "add"] Docs {
docs: DocsMeta {
lambda: Some(
LambdaMeta {
isPrimop: true,
name: Some(
"add",
),
position: None,
args: Some(
[
"e1",
"e2",
],
),
experimental: Some(
false,
),
arity: Some(
2,
),
content: Some(
"\n Return the sum of the numbers *e1* and *e2*.\n ",
),
countApplied: None,
},
),
attr: AttrMeta {
position: Some(
FilePosition {
file: "test_data/assets/default.nix",
line: 68,
column: 23,
},
),
content: None,
},
},
aliases: Some(
[
[
"lib",
"trivial",
"add",
],
[
"builtins",
"add",
],
],
),
path: [
"lib",
"add",
],
}
["lib", "trivial", "add"] Docs {
docs: DocsMeta {
lambda: Some(
LambdaMeta {
isPrimop: true,
name: Some(
"add",
),
position: None,
args: Some(
[
"e1",
"e2",
],
),
experimental: Some(
false,
),
arity: Some(
2,
),
content: Some(
"\n Return the sum of the numbers *e1* and *e2*.\n ",
),
countApplied: None,
},
),
attr: AttrMeta {
position: Some(
FilePosition {
file: "test_data/assets/trivial.nix",
line: 269,
column: 21,
},
),
content: None,
},
},
aliases: Some(
[
[
"lib",
"add",
],
[
"builtins",
"add",
],
],
),
path: [
"lib",
"trivial",
"add",
],
}
["lib", "concatLines"] Docs {
docs: DocsMeta {
lambda: Some(
LambdaMeta {
isPrimop: false,
name: None,
position: Some(
FilePosition {
file: "test_data/assets/strings.nix",
line: 84,
column: 25,
},
),
args: None,
experimental: None,
arity: None,
content: Some(
"\n Map a function over a list and concatenate the resulting strings.\n\n # Example\n\n ```nix\n concatMapStrings (x: \"a\" + x) [\"foo\" \"bar\"]\n => \"afooabar\"\n ```\n\n # Type\n\n ```\n concatMapStrings :: (a -> string) -> [a] -> string\n ```\n\n # Arguments\n\n - [f] \n - [list] \n\n ",
),
countApplied: Some(
1,
),
},
),
attr: AttrMeta {
position: Some(
FilePosition {
file: "test_data/assets/default.nix",
line: 98,
column: 27,
},
),
content: None,
},
},
aliases: Some(
[],
),
path: [
"lib",
"concatLines",
],
}
["lib", "strings", "concatLines"] Docs {
docs: DocsMeta {
lambda: Some(
LambdaMeta {
isPrimop: false,
name: None,
position: Some(
FilePosition {
file: "test_data/assets/strings.nix",
line: 84,
column: 25,
},
),
args: None,
experimental: None,
arity: None,
content: Some(
"\n Map a function over a list and concatenate the resulting strings.\n\n # Example\n\n ```nix\n concatMapStrings (x: \"a\" + x) [\"foo\" \"bar\"]\n => \"afooabar\"\n ```\n\n # Type\n\n ```\n concatMapStrings :: (a -> string) -> [a] -> string\n ```\n\n # Arguments\n\n - [f] \n - [list] \n\n ",
),
countApplied: Some(
1,
),
},
),
attr: AttrMeta {
position: Some(
FilePosition {
file: "test_data/assets/strings.nix",
line: 243,
column: 3,
},
),
content: Some(
"\n Concatenate a list of strings, adding a newline at the end of each one.\n Defined as `concatMapStrings (s: s + \"\\n\")`.\n\n # Example\n\n ```nix\n concatLines [ \"foo\" \"bar\" ]\n => \"foo\\nbar\\n\"\n ```\n\n # Type\n\n ```\n concatLines :: [string] -> string\n ```\n ",
),
},
},
aliases: Some(
[],
),
path: [
"lib",
"strings",
"concatLines",
],
}
["lib", "foldl'"] Docs {
docs: DocsMeta {
lambda: Some(
LambdaMeta {
isPrimop: false,
name: None,
position: Some(
FilePosition {
file: "test_data/assets/lists.nix",
line: 204,
column: 5,
},
),
args: None,
experimental: None,
arity: None,
content: Some(
"\n The binary operation to run, where the two arguments are:\n 1. `acc`: The current accumulator value: Either the initial one for the first iteration, or the result of the previous iteration\n 2. `x`: The corresponding list element for this iteration\n ",
),
countApplied: Some(
0,
),
},
),
attr: AttrMeta {
position: Some(
FilePosition {
file: "test_data/assets/default.nix",
line: 92,
column: 25,
},
),
content: None,
},
},
aliases: Some(
[
[
"lib",
"lists",
"foldl'",
],
],
),
path: [
"lib",
"foldl'",
],
}
["lib", "lists", "foldl'"] Docs {
docs: DocsMeta {
lambda: Some(
LambdaMeta {
isPrimop: false,
name: None,
position: Some(
FilePosition {
file: "test_data/assets/lists.nix",
line: 204,
column: 5,
},
),
args: None,
experimental: None,
arity: None,
content: Some(
"\n The binary operation to run, where the two arguments are:\n 1. `acc`: The current accumulator value: Either the initial one for the first iteration, or the result of the previous iteration\n 2. `x`: The corresponding list element for this iteration\n ",
),
countApplied: Some(
0,
),
},
),
attr: AttrMeta {
position: Some(
FilePosition {
file: "test_data/assets/lists.nix",
line: 198,
column: 3,
},
),
content: Some(
"\n Reduce a list by applying a binary operator from left to right,\n starting with an initial accumulator.\n Before each application of the operator, the accumulator value is evaluated.\n This behavior makes this function stricter than [`foldl`](#function-library-lib.lists.foldl).\n Unlike [`builtins.foldl'`](https://nixos.org/manual/nix/unstable/language/builtins.html#builtins-foldl'),\n the initial accumulator argument is evaluated before the first iteration.\n A call like\n ```nix\n foldl' op acc₀ [ x₀ x₁ x₂ ... xₙ₋₁ xₙ ]\n ```\n is (denotationally) equivalent to the following,\n but with the added benefit that `foldl'` itself will never overflow the stack.\n ```nix\n let\n acc₁ = builtins.seq acc₀ (op acc₀ x₀ );\n acc₂ = builtins.seq acc₁ (op acc₁ x₁ );\n acc₃ = builtins.seq acc₂ (op acc₂ x₂ );\n ...\n accₙ = builtins.seq accₙ₋₁ (op accₙ₋₁ xₙ₋₁);\n accₙ₊₁ = builtins.seq accₙ (op accₙ xₙ );\n in\n accₙ₊₁\n # Or ignoring builtins.seq\n op (op (... (op (op (op acc₀ x₀) x₁) x₂) ...) xₙ₋₁) xₙ\n ```\n\n # Example\n\n ```nix\n foldl' (acc: x: acc + x) 0 [1 2 3]\n => 6\n ```\n\n # Type\n\n ```\n foldl' :: (acc -> x -> acc) -> acc -> [x] -> acc\n ```\n\n # Arguments\n\n - [op] The binary operation to run, where the two arguments are:\n\n1. `acc`: The current accumulator value: Either the initial one for the first iteration, or the result of the previous iteration\n2. `x`: The corresponding list element for this iteration\n - [acc] The initial accumulator value\n - [list] The list to fold\n\n ",
),
},
},
aliases: Some(
[
[
"lib",
"foldl'",
],
],
),
path: [
"lib",
"lists",
"foldl'",
],
}

View File

@ -0,0 +1,136 @@
[
{
"docs": {
"attr": {
"position": {
"column": 23,
"file": "test_data/assets/default.nix",
"line": 68
}
},
"lambda": {
"args": ["e1", "e2"],
"arity": 2,
"content": "\n Return the sum of the numbers *e1* and *e2*.\n ",
"experimental": false,
"isPrimop": true,
"name": "add",
"position": null
}
},
"path": ["lib", "add"]
},
{
"docs": {
"attr": {
"position": {
"column": 21,
"file": "test_data/assets/trivial.nix",
"line": 269
}
},
"lambda": {
"args": ["e1", "e2"],
"arity": 2,
"content": "\n Return the sum of the numbers *e1* and *e2*.\n ",
"experimental": false,
"isPrimop": true,
"name": "add",
"position": null
}
},
"path": ["lib", "trivial", "add"]
},
{
"docs": {
"attr": {
"position": {
"column": 25,
"file": "test_data/assets/default.nix",
"line": 92
}
},
"lambda": {
"isPrimop": false,
"position": {
"column": 5,
"file": "test_data/assets/lists.nix",
"line": 204
}
}
},
"path": ["lib", "foldl'"]
},
{
"docs": {
"attr": {
"position": {
"column": 3,
"file": "test_data/assets/lists.nix",
"line": 198
}
},
"lambda": {
"isPrimop": false,
"position": {
"column": 5,
"file": "test_data/assets/lists.nix",
"line": 204
}
}
},
"path": ["lib", "lists", "foldl'"]
},
{
"docs": {
"attr": {
"position": {
"column": 3,
"file": "test_data/assets/strings.nix",
"line": 243
}
},
"lambda": {
"isPrimop": false,
"position": {
"column": 25,
"file": "test_data/assets/strings.nix",
"line": 84
}
}
},
"path": ["lib", "strings", "concatLines"]
},
{
"docs": {
"attr": {
"position": {
"column": 27,
"file": "test_data/assets/default.nix",
"line": 98
}
},
"lambda": {
"isPrimop": false,
"position": {
"column": 25,
"file": "test_data/assets/strings.nix",
"line": 84
}
}
},
"path": ["lib", "concatLines"]
},
{
"docs": {
"attr": { "content": "", "position": null },
"lambda": {
"content": "\n Return the sum of the numbers *e1* and *e2*.\n ",
"countApplied": 0,
"isPrimop": true,
"position": null
}
},
"path": ["builtins", "add"]
}
]

View File

@ -1,17 +1,41 @@
{ self, ... }: {
perSystem = { pkgs, self', system, ... }: {
checks.pre-commit-check = self.inputs.pre-commit-hooks.lib.${system}.run {
src = ./.;
hooks = {
nixfmt.enable = true;
statix.enable = true;
# markdownlint.enable = true;
};
excludes = [ "indexer/test" ".github" "scripts/data" ];
settings = {
statix.ignore = [ "indexer/test" ];
nixfmt.width = 80;
perSystem =
{ pkgs
, self'
, system
, ...
}:
let
formatters = with pkgs; [
nixfmt
nodePackages.prettier #3.0.0 installed over nixpkgs
rustfmt
];
in
{
checks.pre-commit-check = self.inputs.pre-commit-hooks.lib.${system}.run {
src = ./.;
hooks = {
treefmt.enable = true;
statix.enable = true;
};
excludes = [ "indexer/test" ".github" "scripts/data" ];
settings = {
statix.ignore = [ "indexer/test" ];
nixfmt.width = 80;
treefmt.package = pkgs.writeShellApplication {
name = "treefmt";
runtimeInputs =
[
pkgs.treefmt
]
++ formatters;
text = ''
exec treefmt "$@"
'';
};
};
};
};
};
}

4
treefmt.toml Normal file
View File

@ -0,0 +1,4 @@
[formatter.nix]
command = "nixpkgs-fmt"
includes = ["*.nix"]
excludes = ["pesto/test_data/*.nix"]

View File

@ -3,14 +3,16 @@ let
inherit (floco) lib;
pjs = let
msg = "default.nix: Expected to find `package.json' to lookup "
+ "package name/version, but no such file exists at: "
+ (toString ./package.json);
in if builtins.pathExists ./package.json then
lib.importJSON ./package.json
else
throw msg;
pjs =
let
msg = "default.nix: Expected to find `package.json' to lookup "
+ "package name/version, but no such file exists at: "
+ (toString ./package.json);
in
if builtins.pathExists ./package.json then
lib.importJSON ./package.json
else
throw msg;
ident = pjs.name;
inherit (pjs) version;
@ -32,4 +34,5 @@ let
# We'll expose these below to the CLI.
pkg = fmod.config.floco.packages.${ident}.${version};
in { inherit pkg fmod; }
in
{ inherit pkg fmod; }

View File

@ -6,7 +6,8 @@
inherit floco;
nooglePkgs = self'.packages;
};
in {
in
{
packages = { ui = base.pkg.global; };
devShells.ui = pkgs.callPackage ./shell.nix {
inherit pkgs;

View File

@ -1,13 +1,15 @@
{ lib, config, ... }:
let
pjs = let
msg = "foverrides.nix: Expected to find `package.json' to lookup "
+ "package name/version, but no such file exists at: "
+ (toString ../package.json);
in if builtins.pathExists ../package.json then
lib.importJSON ../package.json
else
throw msg;
pjs =
let
msg = "foverrides.nix: Expected to find `package.json' to lookup "
+ "package name/version, but no such file exists at: "
+ (toString ../package.json);
in
if builtins.pathExists ../package.json then
lib.importJSON ../package.json
else
throw msg;
ident = pjs.name;
inherit (pjs) version;
@ -25,7 +27,8 @@ let
"x86_64-darwin" = "@next/swc-darwin-x64";
"aarch64-darwin" = "@next/swc-darwin-arm64";
}.${config.floco.settings.system};
in {
in
{
config.floco.packages.${ident}.${version} =
let cfg = config.floco.packages.${ident}.${version};
in {
@ -44,20 +47,22 @@ in {
export HOME=./home
'';
tree = let
customOverrides = cfg.trees.dev.overrideAttrs (prev: {
treeInfo = prev.treeInfo // {
"node_modules/${swcArch}" = {
key = "${swcArch}/${nextVersion}";
link = false;
optional = false;
dev = true;
tree =
let
customOverrides = cfg.trees.dev.overrideAttrs (prev: {
treeInfo = prev.treeInfo // {
"node_modules/${swcArch}" = {
key = "${swcArch}/${nextVersion}";
link = false;
optional = false;
dev = true;
};
# We can inject dependencies here
#
};
# We can inject dependencies here
#
};
});
in lib.mkForce customOverrides;
});
in
lib.mkForce customOverrides;
};
};
}