remove deprecated submodules

This commit is contained in:
Johannes Kirschbauer 2024-04-29 23:10:56 +02:00 committed by mergify[bot]
parent 7c9048b90c
commit 01a17da2f8
22 changed files with 0 additions and 2780 deletions

View File

@ -37,8 +37,6 @@
./pesto/flake-module.nix
# Deprecated. Will be removed.
./codemod/flake-module.nix
./indexer/flake-module.nix
./scripts/flake-module.nix
];
});
}

4
indexer/.gitignore vendored
View File

@ -1,4 +0,0 @@
target/
**/*.rs.bk
data.json

View File

@ -1,2 +0,0 @@
language: nix
sudo: true

446
indexer/Cargo.lock generated
View File

@ -1,446 +0,0 @@
# This file is automatically @generated by Cargo.
# It is not intended for manual editing.
version = 3
[[package]]
name = "ansi_term"
version = "0.12.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d52a9bb7ec0cf484c551830a7ce27bd20d67eac647e1befb56b0be4ee39a55d2"
dependencies = [
"winapi",
]
[[package]]
name = "arenatree"
version = "0.1.1"
source = "git+https://gitlab.com/jD91mZM2/arenatree#f9bf7efa9a5ef4c2dd9e2acc5a4cc79a987cb648"
[[package]]
name = "arrayvec"
version = "0.4.7"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a1e964f9e24d588183fcb43503abda40d288c8657dfc27311516ce2f05675aef"
dependencies = [
"nodrop",
]
[[package]]
name = "atty"
version = "0.2.11"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9a7d5b8723950951411ee34d271d99dddcc2035a16ab25310ea2c8cfd4369652"
dependencies = [
"libc",
"termion",
"winapi",
]
[[package]]
name = "backtrace"
version = "0.3.9"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "89a47830402e9981c5c41223151efcced65a0510c13097c769cede7efb34782a"
dependencies = [
"backtrace-sys",
"cfg-if",
"libc",
"rustc-demangle",
"winapi",
]
[[package]]
name = "backtrace-sys"
version = "0.1.24"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c66d56ac8dabd07f6aacdaf633f4b8262f5b3601a810a0dcddffd5c22c69daa0"
dependencies = [
"cc",
"libc",
]
[[package]]
name = "bitflags"
version = "1.0.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "228047a76f468627ca71776ecdebd732a3423081fcf5125585bcd7c49886ce12"
[[package]]
name = "cc"
version = "1.0.25"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f159dfd43363c4d08055a07703eb7a3406b0dac4d0584d96965a3262db3c9d16"
[[package]]
name = "cfg-if"
version = "0.1.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "082bb9b28e00d3c9d39cc03e64ce4cea0f1bb9b3fde493f0cbc008472d22bdf4"
[[package]]
name = "clap"
version = "2.34.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a0610544180c38b88101fecf2dd634b174a62eef6946f84dfc6a7127512b381c"
dependencies = [
"ansi_term",
"atty",
"bitflags",
"strsim",
"textwrap",
"unicode-width",
"vec_map",
]
[[package]]
name = "failure"
version = "0.1.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6dd377bcc1b1b7ce911967e3ec24fa19c3224394ec05b54aa7b083d498341ac7"
dependencies = [
"backtrace",
"failure_derive",
]
[[package]]
name = "failure_derive"
version = "0.1.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "64c2d913fe8ed3b6c6518eedf4538255b989945c14c2a7d5cbff62a5e2120596"
dependencies = [
"proc-macro2 0.4.20",
"quote 0.6.8",
"syn 0.15.15",
"synstructure",
]
[[package]]
name = "heck"
version = "0.3.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6d621efb26863f0e9924c6ac577e8275e5e6b77455db64ffa6c65c904e9e132c"
dependencies = [
"unicode-segmentation",
]
[[package]]
name = "indexer"
version = "0.1.0"
dependencies = [
"rnix",
"serde",
"serde_json",
"structopt",
]
[[package]]
name = "itoa"
version = "1.0.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4217ad341ebadf8d8e724e264f13e593e0648f5b3e94b3896a5df283be015ecc"
[[package]]
name = "lazy_static"
version = "1.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e2abad23fbc42b3700f2f279844dc832adb2b2eb069b2df918f455c4e18cc646"
[[package]]
name = "libc"
version = "0.2.43"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "76e3a3ef172f1a0b9a9ff0dd1491ae5e6c948b94479a3021819ba7d860c8645d"
[[package]]
name = "nodrop"
version = "0.1.12"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9a2228dca57108069a5262f2ed8bd2e82496d2e074a06d1ccc7ce1687b6ae0a2"
[[package]]
name = "proc-macro-error"
version = "1.0.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "da25490ff9892aab3fcf7c36f08cfb902dd3e71ca0f9f9517bea02a73a5ce38c"
dependencies = [
"proc-macro-error-attr",
"proc-macro2 1.0.67",
"quote 1.0.33",
"syn 1.0.105",
"version_check",
]
[[package]]
name = "proc-macro-error-attr"
version = "1.0.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a1be40180e52ecc98ad80b184934baf3d0d29f979574e439af5a55274b35f869"
dependencies = [
"proc-macro2 1.0.67",
"quote 1.0.33",
"version_check",
]
[[package]]
name = "proc-macro2"
version = "0.4.20"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3d7b7eaaa90b4a90a932a9ea6666c95a389e424eff347f0f793979289429feee"
dependencies = [
"unicode-xid",
]
[[package]]
name = "proc-macro2"
version = "1.0.67"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3d433d9f1a3e8c1263d9456598b16fec66f4acc9a74dacffd35c7bb09b3a1328"
dependencies = [
"unicode-ident",
]
[[package]]
name = "quote"
version = "0.6.8"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "dd636425967c33af890042c483632d33fa7a18f19ad1d7ea72e8998c6ef8dea5"
dependencies = [
"proc-macro2 0.4.20",
]
[[package]]
name = "quote"
version = "1.0.33"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5267fca4496028628a95160fc423a33e8b2e6af8a5302579e322e4b520293cae"
dependencies = [
"proc-macro2 1.0.67",
]
[[package]]
name = "redox_syscall"
version = "0.1.40"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c214e91d3ecf43e9a4e41e578973adeb14b474f2bee858742d127af75a0112b1"
[[package]]
name = "redox_termios"
version = "0.1.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7e891cfe48e9100a70a3b6eb652fef28920c117d366339687bd5576160db0f76"
dependencies = [
"redox_syscall",
]
[[package]]
name = "rnix"
version = "0.4.1"
source = "git+https://gitlab.com/jD91mZM2/rnix.git?rev=10b86c94291b4864470158ef8750de85ddd8d4ba#10b86c94291b4864470158ef8750de85ddd8d4ba"
dependencies = [
"arenatree",
"arrayvec",
"failure",
"smol_str",
]
[[package]]
name = "rustc-demangle"
version = "0.1.9"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "bcfe5b13211b4d78e5c2cadfebd7769197d95c639c35a50057eb4c05de811395"
[[package]]
name = "ryu"
version = "1.0.11"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4501abdff3ae82a1c1b477a17252eb69cee9e66eb915c1abaa4f44d873df9f09"
[[package]]
name = "serde"
version = "1.0.188"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "cf9e0fcba69a370eed61bcf2b728575f726b50b55cba78064753d708ddc7549e"
dependencies = [
"serde_derive",
]
[[package]]
name = "serde_derive"
version = "1.0.188"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4eca7ac642d82aa35b60049a6eccb4be6be75e599bd2e9adb5f875a737654af2"
dependencies = [
"proc-macro2 1.0.67",
"quote 1.0.33",
"syn 2.0.37",
]
[[package]]
name = "serde_json"
version = "1.0.107"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6b420ce6e3d8bd882e9b243c6eed35dbc9a6110c9769e74b584e0d68d1f20c65"
dependencies = [
"itoa",
"ryu",
"serde",
]
[[package]]
name = "smol_str"
version = "0.1.7"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f3ed6f19b800d76574926e458d5f8e2dbea86c2b58c08d33a982448f09ac8d0c"
[[package]]
name = "strsim"
version = "0.8.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8ea5119cdb4c55b55d432abb513a0429384878c15dde60cc77b1c99de1a95a6a"
[[package]]
name = "structopt"
version = "0.3.26"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0c6b5c64445ba8094a6ab0c3cd2ad323e07171012d9c98b0b15651daf1787a10"
dependencies = [
"clap",
"lazy_static",
"structopt-derive",
]
[[package]]
name = "structopt-derive"
version = "0.4.18"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "dcb5ae327f9cc13b68763b5749770cb9e048a99bd9dfdfa58d0cf05d5f64afe0"
dependencies = [
"heck",
"proc-macro-error",
"proc-macro2 1.0.67",
"quote 1.0.33",
"syn 1.0.105",
]
[[package]]
name = "syn"
version = "0.15.15"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0a9c2bf1e53c21704a7cce1b2a42768f1ae32a6777108a0d7f1faa4bfe7f7c04"
dependencies = [
"proc-macro2 0.4.20",
"quote 0.6.8",
"unicode-xid",
]
[[package]]
name = "syn"
version = "1.0.105"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "60b9b43d45702de4c839cb9b51d9f529c5dd26a4aff255b42b1ebc03e88ee908"
dependencies = [
"proc-macro2 1.0.67",
"quote 1.0.33",
"unicode-ident",
]
[[package]]
name = "syn"
version = "2.0.37"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7303ef2c05cd654186cb250d29049a24840ca25d2747c25c0381c8d9e2f582e8"
dependencies = [
"proc-macro2 1.0.67",
"quote 1.0.33",
"unicode-ident",
]
[[package]]
name = "synstructure"
version = "0.10.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "73687139bf99285483c96ac0add482c3776528beac1d97d444f6e91f203a2015"
dependencies = [
"proc-macro2 0.4.20",
"quote 0.6.8",
"syn 0.15.15",
"unicode-xid",
]
[[package]]
name = "termion"
version = "1.5.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "689a3bdfaab439fd92bc87df5c4c78417d3cbe537487274e9b0b2dce76e92096"
dependencies = [
"libc",
"redox_syscall",
"redox_termios",
]
[[package]]
name = "textwrap"
version = "0.11.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d326610f408c7a4eb6f51c37c330e496b08506c9457c9d34287ecc38809fb060"
dependencies = [
"unicode-width",
]
[[package]]
name = "unicode-ident"
version = "1.0.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6ceab39d59e4c9499d4e5a8ee0e2735b891bb7308ac83dfb4e80cad195c9f6f3"
[[package]]
name = "unicode-segmentation"
version = "1.10.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1dd624098567895118886609431a7c3b8f516e41d30e0643f03d94592a147e36"
[[package]]
name = "unicode-width"
version = "0.1.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "882386231c45df4700b275c7ff55b6f3698780a650026380e72dabe76fa46526"
[[package]]
name = "unicode-xid"
version = "0.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "fc72304796d0818e357ead4e000d19c9c174ab23dc11093ac919054d20a6a7fc"
[[package]]
name = "vec_map"
version = "0.8.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "05c78687fb1a80548ae3250346c3db86a80a7cdd77bda190189f2d0a0987c81a"
[[package]]
name = "version_check"
version = "0.9.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "49874b5167b65d7193b8aba1567f5c7d93d001cafc34600cee003eda787e483f"
[[package]]
name = "winapi"
version = "0.3.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "92c1eb33641e276cfa214a0522acad57be5c56b10cb348b3c5117db75f3ac4b0"
dependencies = [
"winapi-i686-pc-windows-gnu",
"winapi-x86_64-pc-windows-gnu",
]
[[package]]
name = "winapi-i686-pc-windows-gnu"
version = "0.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ac3b87c63620426dd9b991e5ce0329eff545bccbbb34f3be09ff6fb6ab51b7b6"
[[package]]
name = "winapi-x86_64-pc-windows-gnu"
version = "0.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f"

View File

@ -1,14 +0,0 @@
[package]
name = "indexer"
version = "0.1.0"
authors = ["Johannes Kirschbauer <hsjobeki@gmail.com>"]
edition = "2021"
[dependencies]
structopt = "0.3"
serde_json = "1.0.107"
serde = { version = "1.0.188", features = ["derive"] }
[dependencies.rnix]
git = "https://gitlab.com/jD91mZM2/rnix.git"
rev = "10b86c94291b4864470158ef8750de85ddd8d4ba"

View File

@ -1,65 +0,0 @@
{ inputs, ... }: {
perSystem = { self', inputs', pkgs, system, ... }:
let
craneLib = inputs.crane.lib.${system};
src = craneLib.cleanCargoSource (craneLib.path ./.);
commonArgs = {
inherit src;
strictDeps = true;
cargoArtifacts = craneLib.buildDepsOnly commonArgs;
};
indexer = craneLib.buildPackage commonArgs;
checks = {
inherit indexer;
indexer-clippy = craneLib.cargoClippy (commonArgs // {
cargoClippyExtraArgs = "--all-targets -- --deny warnings";
});
indexer-fmt = craneLib.cargoFmt { inherit src; };
indexer-nextest = craneLib.cargoNextest (commonArgs // {
partitions = 1;
partitionType = "count";
});
};
nixpkgs-data = pkgs.stdenv.mkDerivation {
pname = "data";
version = "0.1.0";
description = ''
wrapper around the indexer.
Calls the indexer with '<nixpkgs>'/path.
and defines one output for every specified input path
currently this list is manually maintained below.
'';
src = inputs.nixpkgs-master;
outputs = [ "out" "lib" "trivial_builders" "build_support" ];
nativeBuildInputs = [ indexer ];
buildPhase = ''
echo "running nix metadata collect in nixpkgs/lib"
${indexer}/bin/indexer --dir ./lib
${indexer}/bin/indexer --dir ./pkgs/build-support/trivial-builders
${indexer}/bin/indexer --dir ./pkgs/build-support
'';
installPhase = ''
cat lib.json > $lib
cat trivial-builders.json > $trivial_builders
cat build-support.json > $build_support
mkdir $out
ln -s $lib $out/lib
ln -s $trivial_builders $out/trivial_builders
ln -s $build_support $out/build_support
'';
};
in
{
packages = { inherit indexer nixpkgs-data; };
inherit checks;
devShells.indexer = craneLib.devShell {
# Inherit inputs from checks.
inherit checks;
};
};
}

View File

@ -1,36 +0,0 @@
// simple script
// test.json in generated with "cargo run -- --dir ./test"
const data = require("./test.json");
const fs = require("fs");
const all_docs = data.reduce(
(acc, doc) => `${acc}
## ${doc.name}
### Description
\`\`\`nix
${doc.description}
\`\`\`
### Example
\`\`\`nix
${doc.example}
\`\`\`
### Type
\`\`\`nix
${doc.fn_type}
\`\`\`
`,
""
);
fs.writeFile("content.md", `# Functions\n${all_docs}`, (err) => {
if (err) {
console.error(err);
}
});

View File

@ -1,293 +0,0 @@
extern crate rnix;
extern crate serde;
extern crate serde_json;
extern crate structopt;
use rnix::parser::{ASTKind, ASTNode, Arena, Data};
use rnix::tokenizer::Trivia;
use rnix::tokenizer::{Meta, Span};
use serde::{Deserialize, Serialize};
use std::fs::{self, File};
use std::path::PathBuf;
use structopt::StructOpt;
/// Command line arguments for the indexer
#[derive(Debug, StructOpt)]
#[structopt(name = "indexer", about = "Generate Metadata from Nix files")]
struct Options {
/// directory to process.
#[structopt(short = "D", long = "dir", parse(from_os_str))]
dir: PathBuf,
}
#[derive(Serialize, Deserialize, Debug)]
pub struct ManualEntry {
pub id: String,
pub line: Option<usize>,
pub category: String,
pub name: String,
pub fn_type: Option<String>,
pub description: String,
pub example: Option<String>,
}
#[derive(Debug)]
struct DocComment {
/// Primary documentation string.
doc: String,
/// Optional type annotation for the thing being documented.
doc_type: Option<String>,
/// Usage example(s) (interpreted as a single code block)
example: Option<String>,
}
#[derive(Debug)]
struct DocItem {
name: String,
span: Span,
comment: DocComment,
}
/// Retrieve documentation comments. For now only multiline comments are considered
fn retrieve_doc_comment(allow_single_line: bool, meta: &Meta) -> Option<String> {
for item in meta.leading.iter() {
if let Trivia::Comment {
multiline, content, ..
} = item
{
if *multiline || allow_single_line {
return Some(content.to_string());
}
}
}
return None;
}
/// Transforms an AST node into a `DocItem` if it has a leading
/// documentation comment.
fn retrieve_doc_item(node: &ASTNode) -> Option<DocItem> {
// We are only interested in identifiers.
if let Data::Ident(meta, name) = &node.data {
let comment = retrieve_doc_comment(false, meta)?;
return Some(DocItem {
span: node.span,
name: name.to_string(),
comment: parse_doc_comment(&comment),
});
}
return None;
}
fn get_indentation(line: &str) -> usize {
return line
.char_indices()
.find(|(_, ch)| !(ch.is_whitespace() && *ch != '\n'))
.map(|(i, _)| i)
.unwrap_or_else(|| line.len());
}
/// *Really* dumb, mutable, hacky doc comment "parser".
fn parse_doc_comment(raw: &str) -> DocComment {
enum ParseState {
Doc,
Type,
Example,
}
let mut doc = String::new();
let mut doc_type = String::new();
let mut example = String::new();
let mut state = ParseState::Doc;
let mut type_ident = 0;
let mut doc_ident = 0;
let mut example_ident = 0;
for line in raw.clone().lines() {
let mut line = line.clone().trim_end();
let trimmed = line.clone().trim();
if trimmed.starts_with("Type:") {
state = ParseState::Type;
line = &trimmed[5..]; // trim 'Type:'
}
if trimmed.starts_with("Example:") {
state = ParseState::Example;
line = &trimmed[8..]; // trim 'Example:'
}
match state {
ParseState::Type => {
let curr_indent = get_indentation(line);
if curr_indent > 0 && type_ident == 0 {
type_ident = curr_indent;
}
}
ParseState::Doc => {
let curr_indent = get_indentation(line);
if curr_indent > 0 && doc_ident == 0 {
doc_ident = curr_indent;
}
}
ParseState::Example => {
let curr_indent = get_indentation(line);
if curr_indent > 0 && example_ident == 0 {
example_ident = curr_indent;
}
}
}
}
state = ParseState::Doc;
for line in raw.lines() {
let mut line = line.trim_end();
let trimmed = line.clone().trim();
if trimmed.starts_with("Type:") {
state = ParseState::Type;
line = &trimmed[5..]; // trim 'Type:'
}
if trimmed.starts_with("Example:") {
state = ParseState::Example;
line = &trimmed[8..]; // trim 'Example:'
}
match state {
// important: trim only trailing whitespaces; as leading ones might be markdown formatting or code examples.
ParseState::Type => {
let stripped = line.trim_end();
let formatted = stripped.replacen(&" ".repeat(type_ident), "", 1);
doc_type.push_str(&formatted);
doc_type.push('\n');
}
ParseState::Doc => {
let stripped = line.trim_end();
let formatted = stripped.replacen(&" ".repeat(doc_ident), "", 1);
doc.push_str(&formatted);
doc.push('\n');
}
ParseState::Example => {
let stripped = line.trim_end();
let formatted = stripped.replacen(&" ".repeat(example_ident), "", 1);
example.push_str(&formatted);
example.push('\n');
}
}
}
let f = |mut s: String| {
if s.is_empty() {
None
} else {
let len = s.trim_end_matches(&['\r', '\n'][..]).len();
s.truncate(len);
return Some(s.trim_start().into());
}
};
// let doc_f = f(doc);
DocComment {
doc: f(doc).unwrap_or("".to_owned()),
doc_type: f(doc_type),
example: f(example),
}
}
fn get_line(span: Span, src: &String) -> Option<usize> {
let mut line_nr: usize = 1;
for (count, char) in src.chars().enumerate() {
if char == '\n' {
line_nr = line_nr + 1;
}
if count == span.start as usize {
return Some(line_nr);
}
}
None
}
/// Traverse a pattern argument, collecting its argument names.
/// Traverse a Nix lambda and collect the identifiers of arguments
/// until an unexpected AST node is encountered.
///
/// This will collect the argument names for curried functions in the
/// `a: b: c: ...`-style, but does not currently work with pattern
/// functions (`{ a, b, c }: ...`).
///
/// In the AST representation used by rnix, any lambda node has an
/// immediate child that is the identifier of its argument. The "body"
/// of the lambda is two steps to the right from that identifier, if
/// it is a lambda the function is curried and we can recurse.
/// Traverse the arena from a top-level SetEntry and collect, where
/// possible:
///
/// 1. The identifier of the set entry itself.
/// 2. The attached doc comment on the entry.
/// 3. The argument names of any curried functions (pattern functions
/// not yet supported).
fn collect_entry_information<'a>(arena: &Arena<'a>, entry_node: &ASTNode) -> Option<DocItem> {
// The "root" of any attribute set entry is this `SetEntry` node.
// It has an `Attribute` child, which in turn has the identifier
// (on which the documentation comment is stored) as its child.
let attr_node = &arena[entry_node.node.child?];
let ident_node = &arena[attr_node.node.child?];
// At this point we can retrieve the `DocItem` from the identifier
// node - this already contains most of the information we are
// interested in.
let doc_item = retrieve_doc_item(ident_node)?;
// From our entry we can walk two nodes to the right and check
// whether we are dealing with a lambda. If so, we can start
// collecting the function arguments - otherwise we're done.
// let assign_node = &arena[attr_node.node.sibling?];
// let content_node = &arena[assign_node.node.sibling?];
Some(doc_item)
}
fn main() {
let opts = Options::from_args();
let paths = fs::read_dir(&opts.dir).unwrap();
let mut data: Vec<ManualEntry> = vec![];
for path in paths {
let file_path = path.unwrap();
let file_type = file_path.file_type().unwrap();
let file = file_path.path();
if file_type.is_file() && file.extension().unwrap() == "nix" {
// sources.push(file);
let src = fs::read_to_string(&file).unwrap();
let nix = rnix::parse(&src).unwrap();
let filename = file.file_stem().unwrap().to_str().unwrap();
let parent = file
.parent()
.unwrap()
.file_name()
.unwrap()
.to_str()
.unwrap();
let entries: Vec<ManualEntry> = nix
.arena
.into_iter()
.filter(|node| node.kind == ASTKind::SetEntry)
.filter_map(|node| collect_entry_information(&nix.arena, node))
.map(|d| ManualEntry {
id: {
if filename == "default" {
//If filename is default.nix only use the parent directory as id
format!("{}.{}", parent, d.name)
} else {
format!("{}.{}.{}", parent, filename, d.name)
}
},
line: Some(get_line(d.span, &src)).unwrap_or(None),
category: file.display().to_string(),
name: d.name,
description: d.comment.doc,
fn_type: d.comment.doc_type,
example: d.comment.example,
})
.collect();
data.extend(entries);
}
}
let json_file =
File::create(opts.dir.file_name().unwrap().to_str().unwrap().to_owned() + ".json").unwrap();
::serde_json::to_writer(&json_file, &data).unwrap();
}

File diff suppressed because it is too large Load Diff

View File

@ -1,10 +0,0 @@
source_up
files=(../../flake.nix flake-module.nix package.json)
if type nix_direnv_watch_file &>/dev/null; then
nix_direnv_watch_file "${files[@]}"
else
watch_file "${files[@]}"
fi
use flake .#builtins-data --builders ''

2
scripts/.gitignore vendored
View File

@ -1,2 +0,0 @@
data.json
node_modules

View File

@ -1,114 +0,0 @@
[
"abort",
"add",
"addErrorContext",
"all",
"any",
"appendContext",
"attrNames",
"attrValues",
"baseNameOf",
"bitAnd",
"bitOr",
"bitXor",
"break",
"builtins",
"catAttrs",
"ceil",
"compareVersions",
"concatLists",
"concatMap",
"concatStringsSep",
"currentSystem",
"currentTime",
"deepSeq",
"derivation",
"derivationStrict",
"dirOf",
"div",
"elem",
"elemAt",
"false",
"fetchGit",
"fetchMercurial",
"fetchTarball",
"fetchTree",
"fetchurl",
"filter",
"filterSource",
"findFile",
"floor",
"foldl'",
"fromJSON",
"fromTOML",
"functionArgs",
"genList",
"genericClosure",
"getAttr",
"getContext",
"getEnv",
"getFlake",
"groupBy",
"hasAttr",
"hasContext",
"hashFile",
"hashString",
"head",
"import",
"intersectAttrs",
"isAttrs",
"isBool",
"isFloat",
"isFunction",
"isInt",
"isList",
"isNull",
"isPath",
"isString",
"langVersion",
"length",
"lessThan",
"listToAttrs",
"map",
"mapAttrs",
"match",
"mul",
"nixPath",
"nixVersion",
"null",
"parseDrvName",
"partition",
"path",
"pathExists",
"placeholder",
"readDir",
"readFile",
"removeAttrs",
"replaceStrings",
"scopedImport",
"seq",
"sort",
"split",
"splitVersion",
"storeDir",
"storePath",
"stringLength",
"sub",
"substring",
"tail",
"throw",
"toFile",
"toJSON",
"toPath",
"toString",
"toXML",
"trace",
"traceVerbose",
"true",
"tryEval",
"typeOf",
"unsafeDiscardOutputDependency",
"unsafeDiscardStringContext",
"unsafeGetAttrPos",
"zipAttrsWith"
]

View File

@ -1,447 +0,0 @@
{
"abort": {
"args": ["s"],
"arity": 1,
"doc": "Abort Nix expression evaluation and print the error message *s*."
},
"add": {
"args": ["e1", "e2"],
"arity": 2,
"doc": "Return the sum of the numbers *e1* and *e2*."
},
"all": {
"args": ["pred", "list"],
"arity": 2,
"doc": "Return `true` if the function *pred* returns `true` for all elements\nof *list*, and `false` otherwise."
},
"any": {
"args": ["pred", "list"],
"arity": 2,
"doc": "Return `true` if the function *pred* returns `true` for at least one\nelement of *list*, and `false` otherwise."
},
"attrNames": {
"args": ["set"],
"arity": 1,
"doc": "Return the names of the attributes in the set *set* in an\nalphabetically sorted list. For instance, `builtins.attrNames { y\n= 1; x = \"foo\"; }` evaluates to `[ \"x\" \"y\" ]`."
},
"attrValues": {
"args": ["set"],
"arity": 1,
"doc": "Return the values of the attributes in the set *set* in the order\ncorresponding to the sorted attribute names."
},
"baseNameOf": {
"args": ["s"],
"arity": 1,
"doc": "Return the *base name* of the string *s*, that is, everything\nfollowing the final slash in the string. This is similar to the GNU\n`basename` command."
},
"bitAnd": {
"args": ["e1", "e2"],
"arity": 2,
"doc": "Return the bitwise AND of the integers *e1* and *e2*."
},
"bitOr": {
"args": ["e1", "e2"],
"arity": 2,
"doc": "Return the bitwise OR of the integers *e1* and *e2*."
},
"bitXor": {
"args": ["e1", "e2"],
"arity": 2,
"doc": "Return the bitwise XOR of the integers *e1* and *e2*."
},
"break": {
"args": ["v"],
"arity": 1,
"doc": "In debug mode (enabled using `--debugger`), pause Nix expression evaluation and enter the REPL.\nOtherwise, return the argument `v`."
},
"catAttrs": {
"args": ["attr", "list"],
"arity": 2,
"doc": "Collect each attribute named *attr* from a list of attribute\nsets. Attrsets that don't contain the named attribute are\nignored. For example,\n\n```nix\nbuiltins.catAttrs \"a\" [{a = 1;} {b = 0;} {a = 2;}]\n```\n\nevaluates to `[1 2]`."
},
"ceil": {
"args": ["double"],
"arity": 1,
"doc": "Converts an IEEE-754 double-precision floating-point number (*double*) to\nthe next higher integer.\n\nIf the datatype is neither an integer nor a \"float\", an evaluation error will be\nthrown."
},
"compareVersions": {
"args": ["s1", "s2"],
"arity": 2,
"doc": "Compare two strings representing versions and return `-1` if\nversion *s1* is older than version *s2*, `0` if they are the same,\nand `1` if *s1* is newer than *s2*. The version comparison\nalgorithm is the same as the one used by [`nix-env\n-u`](../command-ref/nix-env.md#operation---upgrade)."
},
"concatLists": {
"args": ["lists"],
"arity": 1,
"doc": "Concatenate a list of lists into a single list."
},
"concatMap": {
"args": ["f", "list"],
"arity": 2,
"doc": "This function is equivalent to `builtins.concatLists (map f list)`\nbut is more efficient."
},
"concatStringsSep": {
"args": ["separator", "list"],
"arity": 2,
"doc": "Concatenate a list of strings with a separator between each\nelement, e.g. `concatStringsSep \"/\" [\"usr\" \"local\" \"bin\"] ==\n\"usr/local/bin\"`."
},
"deepSeq": {
"args": ["e1", "e2"],
"arity": 2,
"doc": "This is like `seq e1 e2`, except that *e1* is evaluated *deeply*:\nif its a list or set, its elements or attributes are also\nevaluated recursively."
},
"dirOf": {
"args": ["s"],
"arity": 1,
"doc": "Return the directory part of the string *s*, that is, everything\nbefore the final slash in the string. This is similar to the GNU\n`dirname` command."
},
"div": {
"args": ["e1", "e2"],
"arity": 2,
"doc": "Return the quotient of the numbers *e1* and *e2*."
},
"elem": {
"args": ["x", "xs"],
"arity": 2,
"doc": "Return `true` if a value equal to *x* occurs in the list *xs*, and\n`false` otherwise."
},
"elemAt": {
"args": ["xs", "n"],
"arity": 2,
"doc": "Return element *n* from the list *xs*. Elements are counted starting\nfrom 0. A fatal error occurs if the index is out of bounds."
},
"fetchClosure": {
"args": ["args"],
"arity": 1,
"doc": "Fetch a Nix store closure from a binary cache, rewriting it into\ncontent-addressed form. For example,\n\n```nix\nbuiltins.fetchClosure {\n fromStore = \"https://cache.nixos.org\";\n fromPath = /nix/store/r2jd6ygnmirm2g803mksqqjm4y39yi6i-git-2.33.1;\n toPath = /nix/store/ldbhlwhh39wha58rm61bkiiwm6j7211j-git-2.33.1;\n}\n```\n\nfetches `/nix/store/r2jd...` from the specified binary cache,\nand rewrites it into the content-addressed store path\n`/nix/store/ldbh...`.\n\nIf `fromPath` is already content-addressed, or if you are\nallowing impure evaluation (`--impure`), then `toPath` may be\nomitted.\n\nTo find out the correct value for `toPath` given a `fromPath`,\nyou can use `nix store make-content-addressed`:\n\n```console\n# nix store make-content-addressed --from https://cache.nixos.org /nix/store/r2jd6ygnmirm2g803mksqqjm4y39yi6i-git-2.33.1\nrewrote '/nix/store/r2jd6ygnmirm2g803mksqqjm4y39yi6i-git-2.33.1' to '/nix/store/ldbhlwhh39wha58rm61bkiiwm6j7211j-git-2.33.1'\n```\n\nThis function is similar to `builtins.storePath` in that it\nallows you to use a previously built store path in a Nix\nexpression. However, it is more reproducible because it requires\nspecifying a binary cache from which the path can be fetched.\nAlso, requiring a content-addressed final store path avoids the\nneed for users to configure binary cache public keys.\n\nThis function is only available if you enable the experimental\nfeature `fetch-closure`."
},
"fetchGit": {
"args": ["args"],
"arity": 1,
"doc": "Fetch a path from git. *args* can be a URL, in which case the HEAD\nof the repo at that URL is fetched. Otherwise, it can be an\nattribute with the following attributes (all except `url` optional):\n\n - url\\\n The URL of the repo.\n\n - name\\\n The name of the directory the repo should be exported to in the\n store. Defaults to the basename of the URL.\n\n - rev\\\n The git revision to fetch. Defaults to the tip of `ref`.\n\n - ref\\\n The git ref to look for the requested revision under. This is\n often a branch or tag name. Defaults to `HEAD`.\n\n By default, the `ref` value is prefixed with `refs/heads/`. As\n of Nix 2.3.0 Nix will not prefix `refs/heads/` if `ref` starts\n with `refs/`.\n\n - submodules\\\n A Boolean parameter that specifies whether submodules should be\n checked out. Defaults to `false`.\n\n - shallow\\\n A Boolean parameter that specifies whether fetching a shallow clone\n is allowed. Defaults to `false`.\n\n - allRefs\\\n Whether to fetch all refs of the repository. With this argument being\n true, it's possible to load a `rev` from *any* `ref` (by default only\n `rev`s from the specified `ref` are supported).\n\nHere are some examples of how to use `fetchGit`.\n\n - To fetch a private repository over SSH:\n\n ```nix\n builtins.fetchGit {\n url = \"git@github.com:my-secret/repository.git\";\n ref = \"master\";\n rev = \"adab8b916a45068c044658c4158d81878f9ed1c3\";\n }\n ```\n\n - To fetch an arbitrary reference:\n\n ```nix\n builtins.fetchGit {\n url = \"https://github.com/NixOS/nix.git\";\n ref = \"refs/heads/0.5-release\";\n }\n ```\n\n - If the revision you're looking for is in the default branch of\n the git repository you don't strictly need to specify the branch\n name in the `ref` attribute.\n\n However, if the revision you're looking for is in a future\n branch for the non-default branch you will need to specify the\n the `ref` attribute as well.\n\n ```nix\n builtins.fetchGit {\n url = \"https://github.com/nixos/nix.git\";\n rev = \"841fcbd04755c7a2865c51c1e2d3b045976b7452\";\n ref = \"1.11-maintenance\";\n }\n ```\n\n > **Note**\n >\n > It is nice to always specify the branch which a revision\n > belongs to. Without the branch being specified, the fetcher\n > might fail if the default branch changes. Additionally, it can\n > be confusing to try a commit from a non-default branch and see\n > the fetch fail. If the branch is specified the fault is much\n > more obvious.\n\n - If the revision you're looking for is in the default branch of\n the git repository you may omit the `ref` attribute.\n\n ```nix\n builtins.fetchGit {\n url = \"https://github.com/nixos/nix.git\";\n rev = \"841fcbd04755c7a2865c51c1e2d3b045976b7452\";\n }\n ```\n\n - To fetch a specific tag:\n\n ```nix\n builtins.fetchGit {\n url = \"https://github.com/nixos/nix.git\";\n ref = \"refs/tags/1.9\";\n }\n ```\n\n - To fetch the latest version of a remote branch:\n\n ```nix\n builtins.fetchGit {\n url = \"ssh://git@github.com/nixos/nix.git\";\n ref = \"master\";\n }\n ```\n\n > **Note**\n >\n > Nix will refetch the branch in accordance with\n > the option `tarball-ttl`.\n\n > **Note**\n >\n > This behavior is disabled in *Pure evaluation mode*."
},
"fetchTarball": {
"args": ["args"],
"arity": 1,
"doc": "Download the specified URL, unpack it and return the path of the\nunpacked tree. The file must be a tape archive (`.tar`) compressed\nwith `gzip`, `bzip2` or `xz`. The top-level path component of the\nfiles in the tarball is removed, so it is best if the tarball\ncontains a single directory at top level. The typical use of the\nfunction is to obtain external Nix expression dependencies, such as\na particular version of Nixpkgs, e.g.\n\n```nix\nwith import (fetchTarball https://github.com/NixOS/nixpkgs/archive/nixos-14.12.tar.gz) {};\n\nstdenv.mkDerivation { … }\n```\n\nThe fetched tarball is cached for a certain amount of time (1\nhour by default) in `~/.cache/nix/tarballs/`. You can change the\ncache timeout either on the command line with `--tarball-ttl`\n*number-of-seconds* or in the Nix configuration file by adding\nthe line `tarball-ttl = ` *number-of-seconds*.\n\nNote that when obtaining the hash with `nix-prefetch-url` the\noption `--unpack` is required.\n\nThis function can also verify the contents against a hash. In that\ncase, the function takes a set instead of a URL. The set requires\nthe attribute `url` and the attribute `sha256`, e.g.\n\n```nix\nwith import (fetchTarball {\n url = \"https://github.com/NixOS/nixpkgs/archive/nixos-14.12.tar.gz\";\n sha256 = \"1jppksrfvbk5ypiqdz4cddxdl8z6zyzdb2srq8fcffr327ld5jj2\";\n}) {};\n\nstdenv.mkDerivation { … }\n```\n\nThis function is not available if [restricted evaluation\nmode](../command-ref/conf-file.md) is enabled."
},
"fetchurl": {
"args": ["url"],
"arity": 1,
"doc": "Download the specified URL and return the path of the downloaded\nfile. This function is not available if [restricted evaluation\nmode](../command-ref/conf-file.md) is enabled."
},
"filter": {
"args": ["f", "list"],
"arity": 2,
"doc": "Return a list consisting of the elements of *list* for which the\nfunction *f* returns `true`."
},
"filterSource": {
"args": ["e1", "e2"],
"arity": 2,
"doc": "> **Warning**\n>\n> `filterSource` should not be used to filter store paths. Since\n> `filterSource` uses the name of the input directory while naming\n> the output directory, doing so will produce a directory name in\n> the form of `<hash2>-<hash>-<name>`, where `<hash>-<name>` is\n> the name of the input directory. Since `<hash>` depends on the\n> unfiltered directory, the name of the output directory will\n> indirectly depend on files that are filtered out by the\n> function. This will trigger a rebuild even when a filtered out\n> file is changed. Use `builtins.path` instead, which allows\n> specifying the name of the output directory.\n\nThis function allows you to copy sources into the Nix store while\nfiltering certain files. For instance, suppose that you want to use\nthe directory `source-dir` as an input to a Nix expression, e.g.\n\n```nix\nstdenv.mkDerivation {\n ...\n src = ./source-dir;\n}\n```\n\nHowever, if `source-dir` is a Subversion working copy, then all\nthose annoying `.svn` subdirectories will also be copied to the\nstore. Worse, the contents of those directories may change a lot,\ncausing lots of spurious rebuilds. With `filterSource` you can\nfilter out the `.svn` directories:\n\n```nix\nsrc = builtins.filterSource\n (path: type: type != \"directory\" || baseNameOf path != \".svn\")\n ./source-dir;\n```\n\nThus, the first argument *e1* must be a predicate function that is\ncalled for each regular file, directory or symlink in the source\ntree *e2*. If the function returns `true`, the file is copied to the\nNix store, otherwise it is omitted. The function is called with two\narguments. The first is the full path of the file. The second is a\nstring that identifies the type of the file, which is either\n`\"regular\"`, `\"directory\"`, `\"symlink\"` or `\"unknown\"` (for other\nkinds of files such as device nodes or fifos — but note that those\ncannot be copied to the Nix store, so if the predicate returns\n`true` for them, the copy will fail). If you exclude a directory,\nthe entire corresponding subtree of *e2* will be excluded."
},
"floor": {
"args": ["double"],
"arity": 1,
"doc": "Converts an IEEE-754 double-precision floating-point number (*double*) to\nthe next lower integer.\n\nIf the datatype is neither an integer nor a \"float\", an evaluation error will be\nthrown."
},
"foldl'": {
"args": ["op", "nul", "list"],
"arity": 3,
"doc": "Reduce a list by applying a binary operator, from left to right,\ne.g. `foldl' op nul [x0 x1 x2 ...] = op (op (op nul x0) x1) x2)\n...`. The operator is applied strictly, i.e., its arguments are\nevaluated first. For example, `foldl' (x: y: x + y) 0 [1 2 3]`\nevaluates to 6."
},
"fromJSON": {
"args": ["e"],
"arity": 1,
"doc": "Convert a JSON string to a Nix value. For example,\n\n```nix\nbuiltins.fromJSON ''{\"x\": [1, 2, 3], \"y\": null}''\n```\n\nreturns the value `{ x = [ 1 2 3 ]; y = null; }`."
},
"functionArgs": {
"args": ["f"],
"arity": 1,
"doc": "Return a set containing the names of the formal arguments expected\nby the function *f*. The value of each attribute is a Boolean\ndenoting whether the corresponding argument has a default value. For\ninstance, `functionArgs ({ x, y ? 123}: ...) = { x = false; y =\ntrue; }`.\n\n\"Formal argument\" here refers to the attributes pattern-matched by\nthe function. Plain lambdas are not included, e.g. `functionArgs (x:\n...) = { }`."
},
"genList": {
"args": ["generator", "length"],
"arity": 2,
"doc": "Generate list of size *length*, with each element *i* equal to the\nvalue returned by *generator* `i`. For example,\n\n```nix\nbuiltins.genList (x: x * x) 5\n```\n\nreturns the list `[ 0 1 4 9 16 ]`."
},
"genericClosure": {
"args": ["attrset"],
"arity": 1,
"doc": "Take an *attrset* with values named `startSet` and `operator` in order to\nreturn a *list of attrsets* by starting with the `startSet`, recursively\napplying the `operator` function to each element. The *attrsets* in the\n`startSet` and produced by the `operator` must each contain value named\n`key` which are comparable to each other. The result is produced by\nrepeatedly calling the operator for each element encountered with a\nunique key, terminating when no new elements are produced. For example,\n\n```\nbuiltins.genericClosure {\n startSet = [ {key = 5;} ];\n operator = item: [{\n key = if (item.key / 2 ) * 2 == item.key\n then item.key / 2\n else 3 * item.key + 1;\n }];\n}\n```\nevaluates to\n```\n[ { key = 5; } { key = 16; } { key = 8; } { key = 4; } { key = 2; } { key = 1; } ]\n```"
},
"getAttr": {
"args": ["s", "set"],
"arity": 2,
"doc": "`getAttr` returns the attribute named *s* from *set*. Evaluation\naborts if the attribute doesnt exist. This is a dynamic version of\nthe `.` operator, since *s* is an expression rather than an\nidentifier."
},
"getEnv": {
"args": ["s"],
"arity": 1,
"doc": "`getEnv` returns the value of the environment variable *s*, or an\nempty string if the variable doesnt exist. This function should be\nused with care, as it can introduce all sorts of nasty environment\ndependencies in your Nix expression.\n\n`getEnv` is used in Nix Packages to locate the file\n`~/.nixpkgs/config.nix`, which contains user-local settings for Nix\nPackages. (That is, it does a `getEnv \"HOME\"` to locate the users\nhome directory.)"
},
"getFlake": {
"args": ["args"],
"arity": 1,
"doc": "Fetch a flake from a flake reference, and return its output attributes and some metadata. For example:\n\n```nix\n(builtins.getFlake \"nix/55bc52401966fbffa525c574c14f67b00bc4fb3a\").packages.x86_64-linux.nix\n```\n\nUnless impure evaluation is allowed (`--impure`), the flake reference\nmust be \"locked\", e.g. contain a Git revision or content hash. An\nexample of an unlocked usage is:\n\n```nix\n(builtins.getFlake \"github:edolstra/dwarffs\").rev\n```\n\nThis function is only available if you enable the experimental feature\n`flakes`."
},
"groupBy": {
"args": ["f", "list"],
"arity": 2,
"doc": "Groups elements of *list* together by the string returned from the\nfunction *f* called on each element. It returns an attribute set\nwhere each attribute value contains the elements of *list* that are\nmapped to the same corresponding attribute name returned by *f*.\n\nFor example,\n\n```nix\nbuiltins.groupBy (builtins.substring 0 1) [\"foo\" \"bar\" \"baz\"]\n```\n\nevaluates to\n\n```nix\n{ b = [ \"bar\" \"baz\" ]; f = [ \"foo\" ]; }\n```"
},
"hasAttr": {
"args": ["s", "set"],
"arity": 2,
"doc": "`hasAttr` returns `true` if *set* has an attribute named *s*, and\n`false` otherwise. This is a dynamic version of the `?` operator,\nsince *s* is an expression rather than an identifier."
},
"hashFile": {
"args": ["type", "p"],
"arity": 2,
"doc": "Return a base-16 representation of the cryptographic hash of the\nfile at path *p*. The hash algorithm specified by *type* must be one\nof `\"md5\"`, `\"sha1\"`, `\"sha256\"` or `\"sha512\"`."
},
"hashString": {
"args": ["type", "s"],
"arity": 2,
"doc": "Return a base-16 representation of the cryptographic hash of string\n*s*. The hash algorithm specified by *type* must be one of `\"md5\"`,\n`\"sha1\"`, `\"sha256\"` or `\"sha512\"`."
},
"head": {
"args": ["list"],
"arity": 1,
"doc": "Return the first element of a list; abort evaluation if the argument\nisnt a list or is an empty list. You can test whether a list is\nempty by comparing it with `[]`."
},
"import": {
"args": ["path"],
"arity": 1,
"doc": "Load, parse and return the Nix expression in the file *path*. If\n*path* is a directory, the file ` default.nix ` in that directory\nis loaded. Evaluation aborts if the file doesnt exist or contains\nan incorrect Nix expression. `import` implements Nixs module\nsystem: you can put any Nix expression (such as a set or a\nfunction) in a separate file, and use it from Nix expressions in\nother files.\n\n> **Note**\n>\n> Unlike some languages, `import` is a regular function in Nix.\n> Paths using the angle bracket syntax (e.g., `import` *\\<foo\\>*)\n> are [normal path values](language-values.md).\n\nA Nix expression loaded by `import` must not contain any *free\nvariables* (identifiers that are not defined in the Nix expression\nitself and are not built-in). Therefore, it cannot refer to\nvariables that are in scope at the call site. For instance, if you\nhave a calling expression\n\n```nix\nrec {\n x = 123;\n y = import ./foo.nix;\n}\n```\n\nthen the following `foo.nix` will give an error:\n\n```nix\nx + 456\n```\n\nsince `x` is not in scope in `foo.nix`. If you want `x` to be\navailable in `foo.nix`, you should pass it as a function argument:\n\n```nix\nrec {\n x = 123;\n y = import ./foo.nix x;\n}\n```\n\nand\n\n```nix\nx: x + 456\n```\n\n(The function argument doesnt have to be called `x` in `foo.nix`;\nany name would work.)"
},
"intersectAttrs": {
"args": ["e1", "e2"],
"arity": 2,
"doc": "Return a set consisting of the attributes in the set *e2* that also\nexist in the set *e1*."
},
"isAttrs": {
"args": ["e"],
"arity": 1,
"doc": "Return `true` if *e* evaluates to a set, and `false` otherwise."
},
"isBool": {
"args": ["e"],
"arity": 1,
"doc": "Return `true` if *e* evaluates to a bool, and `false` otherwise."
},
"isFloat": {
"args": ["e"],
"arity": 1,
"doc": "Return `true` if *e* evaluates to a float, and `false` otherwise."
},
"isFunction": {
"args": ["e"],
"arity": 1,
"doc": "Return `true` if *e* evaluates to a function, and `false` otherwise."
},
"isInt": {
"args": ["e"],
"arity": 1,
"doc": "Return `true` if *e* evaluates to an integer, and `false` otherwise."
},
"isList": {
"args": ["e"],
"arity": 1,
"doc": "Return `true` if *e* evaluates to a list, and `false` otherwise."
},
"isNull": {
"args": ["e"],
"arity": 1,
"doc": "Return `true` if *e* evaluates to `null`, and `false` otherwise.\n\n> **Warning**\n>\n> This function is *deprecated*; just write `e == null` instead."
},
"isPath": {
"args": ["e"],
"arity": 1,
"doc": "Return `true` if *e* evaluates to a path, and `false` otherwise."
},
"isString": {
"args": ["e"],
"arity": 1,
"doc": "Return `true` if *e* evaluates to a string, and `false` otherwise."
},
"length": {
"args": ["e"],
"arity": 1,
"doc": "Return the length of the list *e*."
},
"lessThan": {
"args": ["e1", "e2"],
"arity": 2,
"doc": "Return `true` if the number *e1* is less than the number *e2*, and\n`false` otherwise. Evaluation aborts if either *e1* or *e2* does not\nevaluate to a number."
},
"listToAttrs": {
"args": ["e"],
"arity": 1,
"doc": "Construct a set from a list specifying the names and values of each\nattribute. Each element of the list should be a set consisting of a\nstring-valued attribute `name` specifying the name of the attribute,\nand an attribute `value` specifying its value. Example:\n\n```nix\nbuiltins.listToAttrs\n [ { name = \"foo\"; value = 123; }\n { name = \"bar\"; value = 456; }\n ]\n```\n\nevaluates to\n\n```nix\n{ foo = 123; bar = 456; }\n```"
},
"map": {
"args": ["f", "list"],
"arity": 2,
"doc": "Apply the function *f* to each element in the list *list*. For\nexample,\n\n```nix\nmap (x: \"foo\" + x) [ \"bar\" \"bla\" \"abc\" ]\n```\n\nevaluates to `[ \"foobar\" \"foobla\" \"fooabc\" ]`."
},
"mapAttrs": {
"args": ["f", "attrset"],
"arity": 2,
"doc": "Apply function *f* to every element of *attrset*. For example,\n\n```nix\nbuiltins.mapAttrs (name: value: value * 10) { a = 1; b = 2; }\n```\n\nevaluates to `{ a = 10; b = 20; }`."
},
"match": {
"args": ["regex", "str"],
"arity": 2,
"doc": "Returns a list if the [extended POSIX regular\nexpression](http://pubs.opengroup.org/onlinepubs/9699919799/basedefs/V1_chap09.html#tag_09_04)\n*regex* matches *str* precisely, otherwise returns `null`. Each item\nin the list is a regex group.\n\n```nix\nbuiltins.match \"ab\" \"abc\"\n```\n\nEvaluates to `null`.\n\n```nix\nbuiltins.match \"abc\" \"abc\"\n```\n\nEvaluates to `[ ]`.\n\n```nix\nbuiltins.match \"a(b)(c)\" \"abc\"\n```\n\nEvaluates to `[ \"b\" \"c\" ]`.\n\n```nix\nbuiltins.match \"[[:space:]]+([[:upper:]]+)[[:space:]]+\" \" FOO \"\n```\n\nEvaluates to `[ \"FOO\" ]`."
},
"mul": {
"args": ["e1", "e2"],
"arity": 2,
"doc": "Return the product of the numbers *e1* and *e2*."
},
"parseDrvName": {
"args": ["s"],
"arity": 1,
"doc": "Split the string *s* into a package name and version. The package\nname is everything up to but not including the first dash followed\nby a digit, and the version is everything following that dash. The\nresult is returned in a set `{ name, version }`. Thus,\n`builtins.parseDrvName \"nix-0.12pre12876\"` returns `{ name =\n\"nix\"; version = \"0.12pre12876\"; }`."
},
"partition": {
"args": ["pred", "list"],
"arity": 2,
"doc": "Given a predicate function *pred*, this function returns an\nattrset containing a list named `right`, containing the elements\nin *list* for which *pred* returned `true`, and a list named\n`wrong`, containing the elements for which it returned\n`false`. For example,\n\n```nix\nbuiltins.partition (x: x > 10) [1 23 9 3 42]\n```\n\nevaluates to\n\n```nix\n{ right = [ 23 42 ]; wrong = [ 1 9 3 ]; }\n```"
},
"path": {
"args": ["args"],
"arity": 1,
"doc": "An enrichment of the built-in path type, based on the attributes\npresent in *args*. All are optional except `path`:\n\n - path\\\n The underlying path.\n\n - name\\\n The name of the path when added to the store. This can used to\n reference paths that have nix-illegal characters in their names,\n like `@`.\n\n - filter\\\n A function of the type expected by `builtins.filterSource`,\n with the same semantics.\n\n - recursive\\\n When `false`, when `path` is added to the store it is with a\n flat hash, rather than a hash of the NAR serialization of the\n file. Thus, `path` must refer to a regular file, not a\n directory. This allows similar behavior to `fetchurl`. Defaults\n to `true`.\n\n - sha256\\\n When provided, this is the expected hash of the file at the\n path. Evaluation will fail if the hash is incorrect, and\n providing a hash allows `builtins.path` to be used even when the\n `pure-eval` nix config option is on."
},
"pathExists": {
"args": ["path"],
"arity": 1,
"doc": "Return `true` if the path *path* exists at evaluation time, and\n`false` otherwise."
},
"placeholder": {
"args": ["output"],
"arity": 1,
"doc": "Return a placeholder string for the specified *output* that will be\nsubstituted by the corresponding output path at build time. Typical\noutputs would be `\"out\"`, `\"bin\"` or `\"dev\"`."
},
"readDir": {
"args": ["path"],
"arity": 1,
"doc": "Return the contents of the directory *path* as a set mapping\ndirectory entries to the corresponding file type. For instance, if\ndirectory `A` contains a regular file `B` and another directory\n`C`, then `builtins.readDir ./A` will return the set\n\n```nix\n{ B = \"regular\"; C = \"directory\"; }\n```\n\nThe possible values for the file type are `\"regular\"`,\n`\"directory\"`, `\"symlink\"` and `\"unknown\"`."
},
"readFile": {
"args": ["path"],
"arity": 1,
"doc": "Return the contents of the file *path* as a string."
},
"removeAttrs": {
"args": ["set", "list"],
"arity": 2,
"doc": "Remove the attributes listed in *list* from *set*. The attributes\ndont have to exist in *set*. For instance,\n\n```nix\nremoveAttrs { x = 1; y = 2; z = 3; } [ \"a\" \"x\" \"z\" ]\n```\n\nevaluates to `{ y = 2; }`."
},
"replaceStrings": {
"args": ["from", "to", "s"],
"arity": 3,
"doc": "Given string *s*, replace every occurrence of the strings in *from*\nwith the corresponding string in *to*. For example,\n\n```nix\nbuiltins.replaceStrings [\"oo\" \"a\"] [\"a\" \"i\"] \"foobar\"\n```\n\nevaluates to `\"fabir\"`."
},
"seq": {
"args": ["e1", "e2"],
"arity": 2,
"doc": "Evaluate *e1*, then evaluate and return *e2*. This ensures that a\ncomputation is strict in the value of *e1*."
},
"sort": {
"args": ["comparator", "list"],
"arity": 2,
"doc": "Return *list* in sorted order. It repeatedly calls the function\n*comparator* with two elements. The comparator should return `true`\nif the first element is less than the second, and `false` otherwise.\nFor example,\n\n```nix\nbuiltins.sort builtins.lessThan [ 483 249 526 147 42 77 ]\n```\n\nproduces the list `[ 42 77 147 249 483 526 ]`.\n\nThis is a stable sort: it preserves the relative order of elements\ndeemed equal by the comparator."
},
"split": {
"args": ["regex", "str"],
"arity": 2,
"doc": "Returns a list composed of non matched strings interleaved with the\nlists of the [extended POSIX regular\nexpression](http://pubs.opengroup.org/onlinepubs/9699919799/basedefs/V1_chap09.html#tag_09_04)\n*regex* matches of *str*. Each item in the lists of matched\nsequences is a regex group.\n\n```nix\nbuiltins.split \"(a)b\" \"abc\"\n```\n\nEvaluates to `[ \"\" [ \"a\" ] \"c\" ]`.\n\n```nix\nbuiltins.split \"([ac])\" \"abc\"\n```\n\nEvaluates to `[ \"\" [ \"a\" ] \"b\" [ \"c\" ] \"\" ]`.\n\n```nix\nbuiltins.split \"(a)|(c)\" \"abc\"\n```\n\nEvaluates to `[ \"\" [ \"a\" null ] \"b\" [ null \"c\" ] \"\" ]`.\n\n```nix\nbuiltins.split \"([[:upper:]]+)\" \" FOO \"\n```\n\nEvaluates to `[ \" \" [ \"FOO\" ] \" \" ]`."
},
"splitVersion": {
"args": ["s"],
"arity": 1,
"doc": "Split a string representing a version into its components, by the\nsame version splitting logic underlying the version comparison in\n[`nix-env -u`](../command-ref/nix-env.md#operation---upgrade)."
},
"storePath": {
"args": ["path"],
"arity": 1,
"doc": "This function allows you to define a dependency on an already\nexisting store path. For example, the derivation attribute `src\n= builtins.storePath /nix/store/f1d18v1y…-source` causes the\nderivation to depend on the specified path, which must exist or\nbe substitutable. Note that this differs from a plain path\n(e.g. `src = /nix/store/f1d18v1y…-source`) in that the latter\ncauses the path to be *copied* again to the Nix store, resulting\nin a new path (e.g. `/nix/store/ld01dnzc…-source-source`).\n\nThis function is not available in pure evaluation mode."
},
"stringLength": {
"args": ["e"],
"arity": 1,
"doc": "Return the length of the string *e*. If *e* is not a string,\nevaluation is aborted."
},
"sub": {
"args": ["e1", "e2"],
"arity": 2,
"doc": "Return the difference between the numbers *e1* and *e2*."
},
"substring": {
"args": ["start", "len", "s"],
"arity": 3,
"doc": "Return the substring of *s* from character position *start*\n(zero-based) up to but not including *start + len*. If *start* is\ngreater than the length of the string, an empty string is returned,\nand if *start + len* lies beyond the end of the string, only the\nsubstring up to the end of the string is returned. *start* must be\nnon-negative. For example,\n\n```nix\nbuiltins.substring 0 3 \"nixos\"\n```\n\nevaluates to `\"nix\"`."
},
"tail": {
"args": ["list"],
"arity": 1,
"doc": "Return the second to last elements of a list; abort evaluation if\nthe argument isnt a list or is an empty list.\n\n> **Warning**\n>\n> This function should generally be avoided since it's inefficient:\n> unlike Haskell's `tail`, it takes O(n) time, so recursing over a\n> list by repeatedly calling `tail` takes O(n^2) time."
},
"throw": {
"args": ["s"],
"arity": 1,
"doc": "Throw an error message *s*. This usually aborts Nix expression\nevaluation, but in `nix-env -qa` and other commands that try to\nevaluate a set of derivations to get information about those\nderivations, a derivation that throws an error is silently skipped\n(which is not the case for `abort`)."
},
"toFile": {
"args": ["name", "s"],
"arity": 2,
"doc": "Store the string *s* in a file in the Nix store and return its\npath. The file has suffix *name*. This file can be used as an\ninput to derivations. One application is to write builders\n“inline”. For instance, the following Nix expression combines the\n[Nix expression for GNU Hello](expression-syntax.md) and its\n[build script](build-script.md) into one file:\n\n```nix\n{ stdenv, fetchurl, perl }:\n\nstdenv.mkDerivation {\n name = \"hello-2.1.1\";\n\n builder = builtins.toFile \"builder.sh\" \"\n source $stdenv/setup\n\n PATH=$perl/bin:$PATH\n\n tar xvfz $src\n cd hello-*\n ./configure --prefix=$out\n make\n make install\n \";\n\n src = fetchurl {\n url = \"http://ftp.nluug.nl/pub/gnu/hello/hello-2.1.1.tar.gz\";\n sha256 = \"1md7jsfd8pa45z73bz1kszpp01yw6x5ljkjk2hx7wl800any6465\";\n };\n inherit perl;\n}\n```\n\nIt is even possible for one file to refer to another, e.g.,\n\n```nix\nbuilder = let\n configFile = builtins.toFile \"foo.conf\" \"\n # This is some dummy configuration file.\n ...\n \";\nin builtins.toFile \"builder.sh\" \"\n source $stdenv/setup\n ...\n cp ${configFile} $out/etc/foo.conf\n\";\n```\n\nNote that `${configFile}` is an\n[antiquotation](language-values.md), so the result of the\nexpression `configFile`\n(i.e., a path like `/nix/store/m7p7jfny445k...-foo.conf`) will be\nspliced into the resulting string.\n\nIt is however *not* allowed to have files mutually referring to each\nother, like so:\n\n```nix\nlet\n foo = builtins.toFile \"foo\" \"...${bar}...\";\n bar = builtins.toFile \"bar\" \"...${foo}...\";\nin foo\n```\n\nThis is not allowed because it would cause a cyclic dependency in\nthe computation of the cryptographic hashes for `foo` and `bar`.\n\nIt is also not possible to reference the result of a derivation. If\nyou are using Nixpkgs, the `writeTextFile` function is able to do\nthat."
},
"toJSON": {
"args": ["e"],
"arity": 1,
"doc": "Return a string containing a JSON representation of *e*. Strings,\nintegers, floats, booleans, nulls and lists are mapped to their JSON\nequivalents. Sets (except derivations) are represented as objects.\nDerivations are translated to a JSON string containing the\nderivations output path. Paths are copied to the store and\nrepresented as a JSON string of the resulting store path."
},
"toPath": {
"args": ["s"],
"arity": 1,
"doc": "**DEPRECATED.** Use `/. + \"/path\"` to convert a string into an absolute\npath. For relative paths, use `./. + \"/path\"`."
},
"toString": {
"args": ["e"],
"arity": 1,
"doc": "Convert the expression *e* to a string. *e* can be:\n\n - A string (in which case the string is returned unmodified).\n\n - A path (e.g., `toString /foo/bar` yields `\"/foo/bar\"`.\n\n - A set containing `{ __toString = self: ...; }` or `{ outPath = ...; }`.\n\n - An integer.\n\n - A list, in which case the string representations of its elements\n are joined with spaces.\n\n - A Boolean (`false` yields `\"\"`, `true` yields `\"1\"`).\n\n - `null`, which yields the empty string."
},
"toXML": {
"args": ["e"],
"arity": 1,
"doc": "Return a string containing an XML representation of *e*. The main\napplication for `toXML` is to communicate information with the\nbuilder in a more structured format than plain environment\nvariables.\n\nHere is an example where this is the case:\n\n```nix\n{ stdenv, fetchurl, libxslt, jira, uberwiki }:\n\nstdenv.mkDerivation (rec {\n name = \"web-server\";\n\n buildInputs = [ libxslt ];\n\n builder = builtins.toFile \"builder.sh\" \"\n source $stdenv/setup\n mkdir $out\n echo \"$servlets\" | xsltproc ${stylesheet} - > $out/server-conf.xml ①\n \";\n\n stylesheet = builtins.toFile \"stylesheet.xsl\" ②\n \"<?xml version='1.0' encoding='UTF-8'?>\n <xsl:stylesheet xmlns:xsl='http://www.w3.org/1999/XSL/Transform' version='1.0'>\n <xsl:template match='/'>\n <Configure>\n <xsl:for-each select='/expr/list/attrs'>\n <Call name='addWebApplication'>\n <Arg><xsl:value-of select=\\\"attr[@name = 'path']/string/@value\\\" /></Arg>\n <Arg><xsl:value-of select=\\\"attr[@name = 'war']/path/@value\\\" /></Arg>\n </Call>\n </xsl:for-each>\n </Configure>\n </xsl:template>\n </xsl:stylesheet>\n \";\n\n servlets = builtins.toXML [ ③\n { path = \"/bugtracker\"; war = jira + \"/lib/atlassian-jira.war\"; }\n { path = \"/wiki\"; war = uberwiki + \"/uberwiki.war\"; }\n ];\n})\n```\n\nThe builder is supposed to generate the configuration file for a\n[Jetty servlet container](http://jetty.mortbay.org/). A servlet\ncontainer contains a number of servlets (`*.war` files) each\nexported under a specific URI prefix. So the servlet configuration\nis a list of sets containing the `path` and `war` of the servlet\n(①). This kind of information is difficult to communicate with the\nnormal method of passing information through an environment\nvariable, which just concatenates everything together into a\nstring (which might just work in this case, but wouldnt work if\nfields are optional or contain lists themselves). Instead the Nix\nexpression is converted to an XML representation with `toXML`,\nwhich is unambiguous and can easily be processed with the\nappropriate tools. For instance, in the example an XSLT stylesheet\n(at point ②) is applied to it (at point ①) to generate the XML\nconfiguration file for the Jetty server. The XML representation\nproduced at point ③ by `toXML` is as follows:\n\n```xml\n<?xml version='1.0' encoding='utf-8'?>\n<expr>\n <list>\n <attrs>\n <attr name=\"path\">\n <string value=\"/bugtracker\" />\n </attr>\n <attr name=\"war\">\n <path value=\"/nix/store/d1jh9pasa7k2...-jira/lib/atlassian-jira.war\" />\n </attr>\n </attrs>\n <attrs>\n <attr name=\"path\">\n <string value=\"/wiki\" />\n </attr>\n <attr name=\"war\">\n <path value=\"/nix/store/y6423b1yi4sx...-uberwiki/uberwiki.war\" />\n </attr>\n </attrs>\n </list>\n</expr>\n```\n\nNote that we used the `toFile` built-in to write the builder and\nthe stylesheet “inline” in the Nix expression. The path of the\nstylesheet is spliced into the builder using the syntax `xsltproc\n${stylesheet}`."
},
"trace": {
"args": ["e1", "e2"],
"arity": 2,
"doc": "Evaluate *e1* and print its abstract syntax representation on\nstandard error. Then return *e2*. This function is useful for\ndebugging."
},
"traceVerbose": {
"args": ["e1", "e2"],
"arity": 2,
"doc": "Evaluate *e1* and print its abstract syntax representation on standard\nerror if `--trace-verbose` is enabled. Then return *e2*. This function\nis useful for debugging."
},
"tryEval": {
"args": ["e"],
"arity": 1,
"doc": "Try to shallowly evaluate *e*. Return a set containing the\nattributes `success` (`true` if *e* evaluated successfully,\n`false` if an error was thrown) and `value`, equalling *e* if\nsuccessful and `false` otherwise. `tryEval` will only prevent\nerrors created by `throw` or `assert` from being thrown.\nErrors `tryEval` will not catch are for example those created\nby `abort` and type errors generated by builtins. Also note that\nthis doesn't evaluate *e* deeply, so `let e = { x = throw \"\"; };\nin (builtins.tryEval e).success` will be `true`. Using\n`builtins.deepSeq` one can get the expected result:\n`let e = { x = throw \"\"; }; in\n(builtins.tryEval (builtins.deepSeq e e)).success` will be\n`false`."
},
"typeOf": {
"args": ["e"],
"arity": 1,
"doc": "Return a string representing the type of the value *e*, namely\n`\"int\"`, `\"bool\"`, `\"string\"`, `\"path\"`, `\"null\"`, `\"set\"`,\n`\"list\"`, `\"lambda\"` or `\"float\"`."
},
"zipAttrsWith": {
"args": ["f", "list"],
"arity": 2,
"doc": "Transpose a list of attribute sets into an attribute set of lists,\nthen apply `mapAttrs`.\n\n`f` receives two arguments: the attribute name and a non-empty\nlist of all values encountered for that attribute name.\n\nThe result is an attribute set where the attribute names are the\nunion of the attribute names in each element of `list`. The attribute\nvalues are the return values of `f`.\n\n```nix\nbuiltins.zipAttrsWith\n (name: values: { inherit name values; })\n [ { a = \"x\"; } { a = \"y\"; b = \"z\"; } ]\n```\n\nevaluates to\n\n```\n{\n a = { name = \"a\"; values = [ \"x\" \"y\" ]; };\n b = { name = \"b\"; values = [ \"z\" ]; };\n}\n```"
}
}

View File

@ -1,116 +0,0 @@
{
"abort": { "fn_type": "abort :: String" },
"add": { "fn_type": "add :: Number -> Number -> Number" },
"all": { "fn_type": "all :: (a -> Bool) -> [a] -> Bool" },
"Any": { "fn_type": "Any :: (a -> Bool) -> [a] -> Bool" },
"attrNames": { "fn_type": "attrNames :: AttrSet -> [a]" },
"attrValues": { "fn_type": "attrValues :: AttrSet -> [a]" },
"baseNameOf": { "fn_type": "baseNameOf :: String -> String" },
"bitAnd": { "fn_type": "bitAnd :: Int -> Int -> Int" },
"bitOr": { "fn_type": "bitOr :: Int -> Int -> Int" },
"bitXor": { "fn_type": "bitXor :: Int -> Int -> Int" },
"break": { "fn_type": "break :: a -> a" },
"catAttrs": {
"fn_type": "cattAtrs :: String -> [ { ${name} :: a } ] -> [a]"
},
"ceil": { "fn_type": "ceil :: Float -> Int" },
"compareVersions": {
"fn_type": "compareVersions :: String -> String -> Int"
},
"concatLists": { "fn_type": "concatLists :: [List] -> []" },
"concatMap": { "fn_type": "concatMap :: (a -> b) -> [a] -> [b]" },
"concatStringsSep": {
"fn_type": "concatStringsSep :: String -> [String] -> String"
},
"deepSeq": { "fn_type": "deepSeq :: a -> b -> b" },
"derivation": {
"fn_type": "let \n Derivation :: {\n all :: [ Derivation ];\n builder :: String;\n drvAttrs :: {\n builder = String; \n name = String;\n outputs = [ output :: String ]; \n system = String;\n ${additionalArgs} :: String;\n }\n drvPath :: String;\n name :: String;\n outPath :: String;\n outputName :: String;\n outputs :: [ output :: String ];\n system :: String;\n type :: \"derivation\";\n ${output} :: Derivation;\n ${additionalArgs} :: String;\n };\nin\n builtins.derivation :: {\n name :: String;\n outputs :: [ output :: String ] ? [ \"out\" ];\n builder :: String;\n system :: String;\n ${additionalArgs} :: String;\n } -> Derivation"
},
"dirOf": { "fn_type": "dirOf :: String -> String" },
"div": { "fn_type": "div :: Number -> Number -> Number" },
"elem": { "fn_type": "elem :: a -> [b] -> Bool" },
"elemAt": { "fn_type": "elemAt :: [a] -> Int -> b" },
"fetchClosure": { "fn_type": "fetchClosure :: AttrSet -> AttrSet" },
"fetchGit": { "fn_type": "fetchgit :: AttrSet -> AttrSet" },
"fetchTarball": { "fn_type": "fetchTarball :: AttrSet -> AttrSet" },
"fetchurl": { "fn_type": "fetchurl :: String -> AttrSet" },
"filter": { "fn_type": "filter :: (a -> Bool) -> [a] -> [b]" },
"filterSource": {
"fn_type": "filterSource :: (Path -> String -> Bool) -> Path -> StorePath"
},
"floor": { "fn_type": "floor :: Float -> Int" },
"foldl'": {
"fn_type": "foldl' :: (a -> b -> c) -> a -> [b] -> c"
},
"fromJSON": { "fn_type": "fromJSON :: String -> a" },
"functionArgs": { "fn_type": "functionArgs :: (a) -> AttrSet" },
"genList": { "fn_type": "genList :: (a -> b) -> a -> [b]" },
"genericClosure": { "fn_type": "genericClosure :: AttrSet -> [AttrSet]" },
"getAttr": { "fn_type": "getAttr :: String -> AttrSet -> a" },
"getEnv": { "fn_type": "getEnv :: String -> String" },
"getFlake": { "fn_type": "getFlake :: AttrSet -> AttrSet" },
"groupBy": { "fn_type": "groupBy :: (a -> b) -> [a] -> AttrSet" },
"hasAttr": { "fn_type": "hasAttr :: String -> AttrSet -> Bool" },
"hashFile": { "fn_type": "hashFile :: String -> Path -> String" },
"hashString": { "fn_type": "hashString :: String -> String -> String" },
"head": { "fn_type": "head :: [a] -> a" },
"import": { "fn_type": "import :: Path -> a" },
"intersectAttrs": {
"fn_type": "intersectAttrs :: AttrSet -> AttrSet -> AttrSet"
},
"isAttrs": { "fn_type": "isAttrs :: a -> Bool" },
"isBool": { "fn_type": "isBool :: a -> Bool" },
"isFloat": { "fn_type": "isFloat :: a -> Bool" },
"isFunction": { "fn_type": "isFunction :: a -> Bool" },
"isInt": { "fn_type": "isInt :: a -> Bool" },
"isList": { "fn_type": "isList :: a -> Bool" },
"isNull": { "fn_type": "isNull :: a -> Bool" },
"isPath": { "fn_type": "isPath :: a -> Bool" },
"isString": { "fn_type": "isString :: a -> Bool" },
"length": { "fn_type": "length :: [a] -> Int" },
"lessThan": { "fn_type": "lessThan :: Number -> Number -> Bool" },
"listToAttrs": {
"fn_type": "listToAttrs :: [{name :: String; value :: a}] -> AttrSet"
},
"map": { "fn_type": "map :: (a -> b) -> [a] -> [b]" },
"mapAttrs": { "fn_type": "mapAttrs :: (a -> b -> c) -> AttrSet -> AttrSet" },
"match": { "fn_type": "match :: String -> String -> Bool" },
"mul": { "fn_type": "mul :: Number -> Number -> Number" },
"parseDrvName": { "fn_type": "parseDrvName :: String -> AttrSet" },
"partition": { "fn_type": "partition :: (a -> Bool) -> [a] -> AttrSet" },
"Path": { "fn_type": "Path :: AttrSet -> StorePath" },
"pathExists": { "fn_type": "pathExists :: Path -> Bool" },
"placeholder": { "fn_type": "placeholder :: String -> String" },
"readDir": { "fn_type": "readDir :: Path -> AttrSet" },
"readFile": { "fn_type": "readFile :: Path -> String" },
"removeAttrs": { "fn_type": "removeAttrs :: AttrSet -> [a] -> AttrSet" },
"replaceStrings": {
"fn_type": "replaceStrings :: [String] -> [String] -> String -> String"
},
"seq": { "fn_type": "seq :: a -> b -> b" },
"sort": { "fn_type": "sort :: (a -> b -> Bool) -> [a] -> [b]" },
"split": { "fn_type": "split :: String -> String -> [String]" },
"splitVersion": { "fn_type": "splitVersion :: String -> [String]" },
"StorePath": { "fn_type": "StorePath :: StorePath -> StorePath" },
"stringLength": { "fn_type": "stringLength :: String -> Int" },
"sub": { "fn_type": "sub :: Number -> Number -> Number" },
"substring": { "fn_type": "substring :: Int -> Int -> String -> String" },
"tail": { "fn_type": "tail :: [a] -> a" },
"throw": { "fn_type": "throw :: String" },
"toFile": { "fn_type": "toFile :: Path -> String -> StorePath " },
"toJSON": { "fn_type": "toJSON :: a -> String" },
"toPath": { "fn_type": "toPath :: String -> Path" },
"toString": { "fn_type": "toString :: a -> String" },
"toXML": { "fn_type": "toXML :: a -> String" },
"trace": { "fn_type": "trace :: a -> b -> b" },
"traceVerbose": { "fn_type": "traceVerbose :: a -> b -> b" },
"tryEval": { "fn_type": "tryEval :: a" },
"typeOf": { "fn_type": "typeOf :: a -> String" },
"zipAttrsWith": {
"fn_type": "zipAttrsWith :: (String -> [a] ) -> [a] -> AttrSet"
},
"fromTOML": {
"fn_type": "fromTOML :: String -> { Any }"
}
}

File diff suppressed because one or more lines are too long

View File

@ -1,29 +0,0 @@
let
Derivation :: {
all :: [ Derivation ];
builder :: String;
drvAttrs :: {
builder = String;
name = String;
outputs = [ output :: String ];
system = String;
${additionalArgs} :: String;
}
drvPath :: String;
name :: String;
outPath :: String;
outputName :: String;
outputs :: [ output :: String ];
system :: String;
type :: "derivation";
${output} :: Derivation;
${additionalArgs} :: String;
};
in
builtins.derivation :: {
name :: String;
outputs :: [ output :: String ] ? [ "out" ];
builder :: String;
system :: String;
${additionalArgs} :: String;
} -> Derivation

File diff suppressed because one or more lines are too long

View File

@ -1,32 +0,0 @@
# Parse a TOML-configuration from String
```
builtins.fromTOML ''
# Toplevel
foo = "bar"
# Simple Attrset
[set]
info = "foobar"
# Nested Attrset
[set.nested]
meta = 42
''
->
{
foo = "bar";
set = {
info = "foobar";
nested = {
meta = 42;
};
};
}
```
Also works nicely with `readFile`:
```
builtins.fromTOML (builtins.readFile ./config.toml)
```

View File

@ -1,47 +0,0 @@
{
"addErrorContext": {
"args": ["s", "c"],
"arity": 2,
"doc": ""
},
"appendContext": {
"args": ["s", "c"],
"arity": 2,
"doc": ""
},
"builtins": {
"args": [],
"arity": 0,
"doc": "The set `builtins` contains all the built-in functions and values.\nYou can use `builtins` to test for the availability of features in\nthe Nix installation, e.g.,\n\n```nix\nif builtins ? getEnv then builtins.getEnv \"PATH\" else \"\"\n```\n\nThis allows a Nix expression to fall back gracefully on older Nix\ninstallations that dont have the desired built-in function."
},
"currentSystem": {
"args": [],
"arity": 0,
"doc": "The built-in value `currentSystem` evaluates to the Nix platform\nidentifier for the Nix installation on which the expression is being\nevaluated, such as `\"i686-linux\"` or `\"x86_64-darwin\"`."
},
"currentTime": {
"args": [],
"arity": 0,
"doc": "The built-in value `currentSystem` evaluates to the current seconds since Jan 01 1970. (UTC)."
},
"false": {
"args": [],
"arity": 0,
"doc": "The built-in boolean value `false`."
},
"true": {
"args": [],
"arity": 0,
"doc": "The built-in boolean value `true`."
},
"null": {
"args": [],
"arity": 0,
"doc": "The built-in value `null`."
},
"fromTOML": {
"args": ["s"],
"arity": 1,
"doc": "# Parse a TOML-configuration from String\n\n```\nbuiltins.fromTOML ''\n# Toplevel\nfoo = \"bar\"\n\n# Simple Attrset\n[set]\ninfo = \"foobar\"\n\n# Nested Attrset\n[set.nested]\nmeta = 42\n''\n-> \n{\n foo = \"bar\";\n set = {\n info = \"foobar\";\n nested = {\n meta = 42;\n };\n };\n}\n```\n\nAlso works nicely with `readFile`:\n\n```\nbuiltins.fromTOML (builtins.readFile ./config.toml)\n```\n"
}
}

View File

@ -1,25 +0,0 @@
_: {
perSystem = { self', inputs', pkgs, ... }:
let
builtins-data = pkgs.stdenv.mkDerivation {
pname = "builtins-data";
version = "0.1.0";
src = ./.;
nativeBuildInputs = [ pkgs.nodejs_20 ];
buildPhase = ''
npm run build
'';
installPhase = ''
cat data.json > $out
'';
};
in
{
packages = {
inherit builtins-data;
};
devShells.builtins-data = pkgs.mkShell {
inputsFrom = [ builtins-data ];
};
};
}

View File

@ -1,59 +0,0 @@
const builtins = require("./data/builtins.json");
const names = require("./data/builtins-names.json");
const types = require("./data/builtins.types.json");
const fs = require("fs");
const { exit } = require("process");
const { derivation } = require("./data/derivation.json");
const more = require("./data/more.json");
builtins["derivation"] = derivation;
Object.entries(more).forEach((e) => {
const [name, meta] = e;
builtins[name] = meta;
});
const DATA_PATH = process.argv.length >= 3 && process.argv[2];
if (!DATA_PATH) {
console.error("argument OUT_PATH is not set");
console.info("usage: node make-builtins.js <OUT_PATH>");
exit(1);
} else {
let leftover = names;
const info = Object.entries(builtins).map(([name, meta]) => {
leftover = leftover.filter((e) => e !== name);
const fn_type = types[name]?.fn_type || null;
const { args, arity, doc } = meta;
console.log(
"arity:" +
`${
arity > 0
? `takes ${arity} arguments: __\`${args.join("` `")}\`__ \n`
: ""
}`
);
return {
id: `builtins.${name}`,
category: "builtins",
fn_type,
name,
description: [
`${
arity > 0
? `takes ${arity} arguments: __\`${args.join("` `")}\`__ \n`
: ""
}`,
`${doc}\n`,
],
};
});
console.log({ leftover });
fs.writeFile(`${DATA_PATH}/data.json`, JSON.stringify(info), (err) => {
if (err) {
console.error(err);
}
});
}

View File

@ -1,11 +0,0 @@
{
"name": "builtins-data",
"version": "1.0.0",
"description": "",
"main": "make-builtins.js",
"scripts": {
"build": "node ./make-builtins.js ./"
},
"author": "",
"license": "ISC"
}