mirror of
https://github.com/nix-community/noogle.git
synced 2024-11-22 05:33:32 +03:00
test & refacotring
This commit is contained in:
parent
8a3cb24bf9
commit
d4b799d1b1
@ -155,11 +155,11 @@
|
||||
"nixpkgs-regression": "nixpkgs-regression"
|
||||
},
|
||||
"locked": {
|
||||
"lastModified": 1700475714,
|
||||
"narHash": "sha256-8OXrkNaIpdtErfLN4u1Ew1IThTbk6n8POYRQfNatkSA=",
|
||||
"lastModified": 1702384142,
|
||||
"narHash": "sha256-AzZ/FLCMp8rqfY5wyFW+VvyEUA8OoZ43aBkAWp7NdPg=",
|
||||
"owner": "hsjobeki",
|
||||
"repo": "nix",
|
||||
"rev": "9bf2153e696d88c6beb8e34709bb743af5cdd940",
|
||||
"rev": "1c33d81594c08fc62e3e94e5496a53524beedf7a",
|
||||
"type": "github"
|
||||
},
|
||||
"original": {
|
||||
|
@ -5,7 +5,7 @@ pkgs.stdenv.mkDerivation {
|
||||
nativeBuildInputs = [ nix ];
|
||||
buildPhase = ''
|
||||
nix-instantiate --eval --strict --json --store $PWD \
|
||||
eval.nix --arg 'pkgs' 'import ${nixpkgs} {}' -A docs.lib \
|
||||
eval.nix --arg 'pkgs' 'import ${nixpkgs} {}' -A all \
|
||||
> $out
|
||||
'';
|
||||
}
|
||||
|
@ -25,17 +25,20 @@ let
|
||||
docs = {
|
||||
############# Recusive analysis sets
|
||||
lib = collectFns lib { initialPath = [ "lib" ]; };
|
||||
rustTools = collectFns pkgs.pkgs.rustPackages {
|
||||
rustTools = collectFns pkgs.rustPackages {
|
||||
initialPath = [ "pkgs" "rustPackages" ];
|
||||
};
|
||||
stdenvTools = getDocsFromSet pkgs.stdenv [ "pkgs" "stdenv" ];
|
||||
|
||||
############# Non-recursive analysis sets
|
||||
pkgs = getDocsFromSet pkgs [ "pkgs" ];
|
||||
dockerTools = getDocsFromSet pkgs.pkgs.dockerTools [ "pkgs" "dockerTools" ];
|
||||
dockerTools = getDocsFromSet pkgs.dockerTools [ "pkgs" "dockerTools" ];
|
||||
pythonTools =
|
||||
getDocsFromSet pkgs.pkgs.pythonPackages [ "pkgs" "pythonPackages" ];
|
||||
getDocsFromSet pkgs.pythonPackages [ "pkgs" "pythonPackages" ];
|
||||
builtins =
|
||||
getDocsFromSet builtins [ "builtins" ];
|
||||
};
|
||||
all = builtins.foldl' (acc: name: acc ++ docs.${name}) [ ] (builtins.attrNames docs);
|
||||
|
||||
# generate test_data for pesto
|
||||
test_data = {
|
||||
@ -43,4 +46,4 @@ let
|
||||
};
|
||||
|
||||
in
|
||||
{ inherit tools pkgs docs toFile test_data; }
|
||||
{ inherit tools pkgs docs toFile getDocsFromSet collectFns all test_data; }
|
||||
|
@ -37,8 +37,8 @@ let
|
||||
{ initialPath ? [ ], limit ? null, }:
|
||||
let
|
||||
filterFns = builtins.filter (item:
|
||||
item.docs != null
|
||||
# item.type == "lambda"
|
||||
item.docs != null &&
|
||||
item.type == "lambda"
|
||||
);
|
||||
getFnDocs = map (fn: {
|
||||
path = initialPath ++ fn.path;
|
||||
|
3
pesto/.gitignore
vendored
3
pesto/.gitignore
vendored
@ -1 +1,2 @@
|
||||
target
|
||||
target
|
||||
out
|
20
pesto/Cargo.lock
generated
20
pesto/Cargo.lock
generated
@ -395,6 +395,7 @@ dependencies = [
|
||||
"serde",
|
||||
"serde_json",
|
||||
"serde_with",
|
||||
"serde_yaml",
|
||||
"textwrap",
|
||||
"walkdir",
|
||||
]
|
||||
@ -555,6 +556,19 @@ dependencies = [
|
||||
"syn",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "serde_yaml"
|
||||
version = "0.9.27"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "3cc7a1570e38322cfe4154732e5110f887ea57e22b76f4bfd32b5bdd3368666c"
|
||||
dependencies = [
|
||||
"indexmap 2.1.0",
|
||||
"itoa",
|
||||
"ryu",
|
||||
"serde",
|
||||
"unsafe-libyaml",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "smawk"
|
||||
version = "0.3.2"
|
||||
@ -642,6 +656,12 @@ version = "0.1.11"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "e51733f11c9c4f72aa0c160008246859e340b00807569a0da0e7a1079b27ba85"
|
||||
|
||||
[[package]]
|
||||
name = "unsafe-libyaml"
|
||||
version = "0.2.9"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "f28467d3e1d3c6586d8f25fa243f544f5800fec42d97032474e17222c2b75cfa"
|
||||
|
||||
[[package]]
|
||||
name = "utf8parse"
|
||||
version = "0.2.1"
|
||||
|
@ -1,5 +1,6 @@
|
||||
[package]
|
||||
name = "pesto"
|
||||
description = "Gien a list of positions extract doc-comment into markdown and frontmatter."
|
||||
version = "0.1.0"
|
||||
edition = "2021"
|
||||
|
||||
@ -20,6 +21,7 @@ serde = { version = "1.0", features = ["derive", "rc"] }
|
||||
serde_json = "1.0"
|
||||
expect-test = "1.4.0"
|
||||
serde_with = "3.4.0"
|
||||
serde_yaml = "0.9.27"
|
||||
|
||||
|
||||
# [dev-dependencies]
|
||||
|
167
pesto/src/alias.rs
Normal file
167
pesto/src/alias.rs
Normal file
@ -0,0 +1,167 @@
|
||||
use std::{collections::HashMap, rc::Rc};
|
||||
|
||||
use crate::pasta::{AliasList, Docs, ValuePath};
|
||||
|
||||
// pub trait Aliases {
|
||||
// fn find_aliases(self) -> AliasList;
|
||||
// }
|
||||
|
||||
// impl<'a> Aliases for DocIndex<'a> {
|
||||
// fn find_aliases(self) -> AliasList {
|
||||
|
||||
// }
|
||||
// }
|
||||
|
||||
/// Match
|
||||
/// partially applied functions -> special case, don't know how it is "correct". Would need access to the upvalues?
|
||||
/// Simple lambdas (not partially applied)
|
||||
/// Match primop: (Doesnt have source position)
|
||||
/// Eq countApplied,
|
||||
/// Eq content
|
||||
/// Other isPrimop,
|
||||
/// Content not empty
|
||||
/// Match Non-Primop
|
||||
/// Eq position
|
||||
pub fn find_aliases(item: &Docs, list: &Vec<&Docs>) -> AliasList {
|
||||
let res: AliasList = list
|
||||
.iter()
|
||||
.filter_map(|other| {
|
||||
if let (Some(s_meta), Some(o_meta)) = (&item.docs.lambda, &other.docs.lambda) {
|
||||
// Avoid creating an alias for the same item.
|
||||
if item.path == other.path {
|
||||
return None;
|
||||
}
|
||||
// We cannot safely compare using the current value introspection if countApplied not eq 0.
|
||||
if (s_meta.countApplied != Some(0))
|
||||
// Use less accurate name (only) aliases. This can lead to potentially false positives.
|
||||
// e.g. lib.lists.last <=> lib.last
|
||||
// comparing only the "last" string. Don't use any introspection
|
||||
// TODO: find out how to compare partially applied values.
|
||||
// A correct solution would include comparing the upValues ?
|
||||
&& item.path.last() == other.path.last()
|
||||
{
|
||||
return Some(other.path.clone());
|
||||
}
|
||||
return match s_meta.isPrimop {
|
||||
true => {
|
||||
let is_empty = match &s_meta.content {
|
||||
Some(c) => c.is_empty(),
|
||||
None => true,
|
||||
};
|
||||
|
||||
if o_meta.isPrimop && o_meta.content == s_meta.content && !is_empty {
|
||||
return Some(other.path.clone());
|
||||
}
|
||||
None
|
||||
}
|
||||
false => {
|
||||
if s_meta.position == o_meta.position
|
||||
&& s_meta.countApplied == Some(0)
|
||||
&& s_meta.countApplied == o_meta.countApplied
|
||||
{
|
||||
return Some(other.path.clone());
|
||||
}
|
||||
None
|
||||
}
|
||||
};
|
||||
}
|
||||
None
|
||||
})
|
||||
.collect();
|
||||
res
|
||||
}
|
||||
|
||||
pub struct FnCategories<'a> {
|
||||
pub primop: Vec<&'a Docs>,
|
||||
pub casual: Vec<&'a Docs>,
|
||||
pub partial: Vec<&'a Docs>,
|
||||
}
|
||||
/// Build categories for efficiently finding aliases. (This is very expensive O(n^2). )
|
||||
/// Aliases can only exist within one subgroup, iterating over other items is a waste of time.
|
||||
/// With the current value introspection, any value that is an alias of a builtin, also inherits the builtins docs and the isPrimop flag set.
|
||||
///
|
||||
/// Group docs into the following subgroups
|
||||
/// 1. primop_lambdas
|
||||
/// e.g, lib.add, builtins.add
|
||||
///
|
||||
/// 2.non_primop_lambdas
|
||||
/// e.g, lib.attrByPath
|
||||
///
|
||||
/// 3.partially_applied lambdas
|
||||
/// e.g., concatLines (is concatMapStrings applied with f := Lambda<(s: s + "\n");>)
|
||||
/// This is a special case, it is very hard, to properly detect aliases at this level. Although the alias must also be found in this subgroup.
|
||||
pub fn categorize(data: &Vec<Docs>) -> FnCategories {
|
||||
// For finding aliases.
|
||||
// Group docs into these subgroups.
|
||||
// Aliases can only exist within one subgroup, iterating over other items is a waste of time.
|
||||
let mut primop_lambdas: Vec<&Docs> = vec![];
|
||||
let mut non_primop_lambdas: Vec<&Docs> = vec![];
|
||||
let mut partially_applieds: Vec<&Docs> = vec![];
|
||||
|
||||
for item in data.iter() {
|
||||
if let Some(lambda) = &item.docs.lambda {
|
||||
match lambda.countApplied {
|
||||
Some(0) | None => {
|
||||
if lambda.isPrimop {
|
||||
primop_lambdas.push(&item);
|
||||
}
|
||||
if !lambda.isPrimop {
|
||||
non_primop_lambdas.push(&item);
|
||||
}
|
||||
}
|
||||
_ => {
|
||||
// #
|
||||
partially_applieds.push(&item);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
FnCategories {
|
||||
primop: primop_lambdas,
|
||||
casual: non_primop_lambdas,
|
||||
partial: partially_applieds,
|
||||
}
|
||||
}
|
||||
|
||||
pub type AliasMap = HashMap<Rc<ValuePath>, AliasList>;
|
||||
|
||||
pub fn init_alias_map(data: &Vec<Docs>, categories: FnCategories) -> AliasMap {
|
||||
let primop_lambdas = categories.primop;
|
||||
let non_primop_lambdas = categories.casual;
|
||||
let partially_applieds = categories.partial;
|
||||
|
||||
let mut primops: Vec<&Docs> = vec![];
|
||||
primops.extend(primop_lambdas.iter().chain(&partially_applieds));
|
||||
|
||||
let mut non_primops: Vec<&Docs> = vec![];
|
||||
non_primops.extend(non_primop_lambdas.iter().chain(&partially_applieds));
|
||||
|
||||
let mut alias_map: AliasMap = HashMap::new();
|
||||
for item in data.iter() {
|
||||
if let Some(lambda) = &item.docs.lambda {
|
||||
match lambda.countApplied {
|
||||
Some(0) => {
|
||||
if lambda.isPrimop {
|
||||
alias_map.insert(item.path.clone(), find_aliases(&item, &primop_lambdas));
|
||||
}
|
||||
if !lambda.isPrimop {
|
||||
alias_map
|
||||
.insert(item.path.clone(), find_aliases(&item, &non_primop_lambdas));
|
||||
}
|
||||
}
|
||||
None => {
|
||||
if lambda.isPrimop {
|
||||
alias_map.insert(item.path.clone(), find_aliases(&item, &primops));
|
||||
}
|
||||
if !lambda.isPrimop {
|
||||
alias_map.insert(item.path.clone(), find_aliases(&item, &non_primops));
|
||||
}
|
||||
}
|
||||
Some(_) => {
|
||||
alias_map.insert(item.path.clone(), find_aliases(&item, &partially_applieds));
|
||||
}
|
||||
};
|
||||
}
|
||||
}
|
||||
alias_map
|
||||
}
|
@ -1,7 +1,8 @@
|
||||
use std::{collections::HashMap, path::PathBuf, println, rc::Rc, time::Instant, vec};
|
||||
|
||||
use crate::{
|
||||
pasta::{Docs, Files, LambdaMeta, Pasta},
|
||||
alias::{categorize, init_alias_map},
|
||||
pasta::{Docs, Files, Pasta},
|
||||
position::{DocComment, DocIndex, FilePosition, NixDocComment},
|
||||
};
|
||||
|
||||
@ -103,93 +104,6 @@ fn fill_docs(
|
||||
filled_docs
|
||||
}
|
||||
|
||||
/// Build categories for efficiently finding aliases. (This is very expensive O(n^2). )
|
||||
/// Aliases can only exist within one subgroup, iterating over other items is a waste of time.
|
||||
/// With the current value introspection, any value that is an alias of a builtin, also inherits the builtins docs and the isPrimop flag set.
|
||||
///
|
||||
/// Group docs into the following subgroups
|
||||
/// 1. primop_lambdas
|
||||
/// e.g, lib.add, builtins.add
|
||||
///
|
||||
/// 2.non_primop_lambdas
|
||||
/// e.g, lib.attrByPath
|
||||
///
|
||||
/// 3.partially_applied lambdas
|
||||
/// e.g., concatLines (is concatMapStrings applied with f := Lambda<(s: s + "\n");>)
|
||||
/// This is a special case, it is very hard, to properly detect aliases at this level. Although the alias must also be found in this subgroup.
|
||||
///
|
||||
fn categorize(data: &Vec<Docs>) -> (Vec<&Docs>, Vec<&Docs>, Vec<&Docs>) {
|
||||
// For finding aliases.
|
||||
// Group docs into these subgroups.
|
||||
// Aliases can only exist within one subgroup, iterating over other items is a waste of time.
|
||||
let mut primop_lambdas: Vec<&Docs> = vec![];
|
||||
let mut non_primop_lambdas: Vec<&Docs> = vec![];
|
||||
let mut partially_applieds: Vec<&Docs> = vec![];
|
||||
|
||||
for item in data.iter() {
|
||||
if let Some(lambda) = &item.docs.lambda {
|
||||
match lambda.countApplied {
|
||||
Some(0) | None => {
|
||||
if lambda.isPrimop {
|
||||
primop_lambdas.push(&item);
|
||||
}
|
||||
if !lambda.isPrimop {
|
||||
non_primop_lambdas.push(&item);
|
||||
}
|
||||
}
|
||||
_ => {
|
||||
// #
|
||||
partially_applieds.push(&item);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
(primop_lambdas, non_primop_lambdas, partially_applieds)
|
||||
}
|
||||
|
||||
fn init_alias_map(
|
||||
data: &Vec<Docs>,
|
||||
categories: (Vec<&Docs>, Vec<&Docs>, Vec<&Docs>),
|
||||
) -> HashMap<Rc<Vec<String>>, Vec<Rc<Vec<String>>>> {
|
||||
let (primop_lambdas, non_primop_lambdas, partially_applieds) = categories;
|
||||
|
||||
let mut primops: Vec<&Docs> = vec![];
|
||||
primops.extend(primop_lambdas.iter());
|
||||
primops.extend(partially_applieds.iter());
|
||||
|
||||
let mut non_primops: Vec<&Docs> = vec![];
|
||||
non_primops.extend(non_primop_lambdas.iter());
|
||||
non_primops.extend(partially_applieds.iter());
|
||||
|
||||
let mut alias_map: HashMap<Rc<Vec<String>>, Vec<Rc<Vec<String>>>> = HashMap::new();
|
||||
for item in data.iter() {
|
||||
if let Some(lambda) = &item.docs.lambda {
|
||||
match lambda.countApplied {
|
||||
Some(0) => {
|
||||
if lambda.isPrimop {
|
||||
alias_map.insert(item.path.clone(), find_aliases(&item, &primop_lambdas));
|
||||
}
|
||||
if !lambda.isPrimop {
|
||||
alias_map
|
||||
.insert(item.path.clone(), find_aliases(&item, &non_primop_lambdas));
|
||||
}
|
||||
}
|
||||
None => {
|
||||
if lambda.isPrimop {
|
||||
alias_map.insert(item.path.clone(), find_aliases(&item, &primops));
|
||||
}
|
||||
if !lambda.isPrimop {
|
||||
alias_map.insert(item.path.clone(), find_aliases(&item, &non_primops));
|
||||
}
|
||||
}
|
||||
Some(_) => {
|
||||
alias_map.insert(item.path.clone(), find_aliases(&item, &partially_applieds));
|
||||
}
|
||||
};
|
||||
}
|
||||
}
|
||||
alias_map
|
||||
}
|
||||
impl<'a> BulkProcessing for Pasta {
|
||||
fn new(path: &PathBuf) -> Self {
|
||||
let start_time = Instant::now();
|
||||
@ -199,7 +113,13 @@ impl<'a> BulkProcessing for Pasta {
|
||||
|
||||
let mut pos_doc_map: HashMap<&FilePosition, Option<NixDocComment>> = HashMap::new();
|
||||
for (path, lookups) in file_map.iter() {
|
||||
if !path.exists() {
|
||||
println!("file does not exist: {:?} Skipping.", path);
|
||||
continue;
|
||||
}
|
||||
|
||||
let positions = collect_file_positions(lookups);
|
||||
|
||||
println!(
|
||||
"{:?}: Lookups {:?}",
|
||||
path.file_name().unwrap(),
|
||||
@ -240,70 +160,3 @@ impl<'a> BulkProcessing for Pasta {
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// How to find aliases:
|
||||
/// Match
|
||||
/// partially applied functions -> special case, don't know how it is "correct". Would need access to the upvalues?
|
||||
/// Simple lambdas (not partially applied)
|
||||
/// Match primop: (Doesnt have source position)
|
||||
/// Eq countApplied,
|
||||
/// Eq content
|
||||
/// Other isPrimop,
|
||||
/// Content not empty
|
||||
/// Match Non-Primop
|
||||
/// Eq position
|
||||
fn find_aliases(item: &Docs, list: &Vec<&Docs>) -> Vec<Rc<Vec<String>>> {
|
||||
// println!("find aliases for {:?} \n\n in {:?}", item, list);
|
||||
let res: Vec<Rc<Vec<String>>> = list
|
||||
.iter()
|
||||
.filter_map(|other| {
|
||||
if let (Some(s_meta), Some(o_meta)) = (&item.docs.lambda, &other.docs.lambda) {
|
||||
// Avoid creating an alias for the same item.
|
||||
if item.path == other.path {
|
||||
return None;
|
||||
}
|
||||
if count_applied(s_meta) != 0
|
||||
// Use less accurate name aliases. This can lead to false positives
|
||||
// TODO: figure out the proper way
|
||||
&& count_applied(o_meta) == count_applied(s_meta)
|
||||
&& item.path.last().unwrap() == other.path.last().unwrap()
|
||||
{
|
||||
return Some(other.path.clone());
|
||||
}
|
||||
return match s_meta.isPrimop {
|
||||
true => {
|
||||
let is_empty = match &s_meta.content {
|
||||
Some(c) => c.is_empty(),
|
||||
None => true,
|
||||
};
|
||||
|
||||
if o_meta.isPrimop
|
||||
&& o_meta.content == s_meta.content
|
||||
&& !is_empty
|
||||
&& count_applied(s_meta) == 0
|
||||
&& count_applied(o_meta) == 0
|
||||
{
|
||||
return Some(other.path.clone());
|
||||
}
|
||||
None
|
||||
}
|
||||
false => {
|
||||
if s_meta.position == o_meta.position
|
||||
&& count_applied(s_meta) == 0
|
||||
&& count_applied(o_meta) == 0
|
||||
{
|
||||
return Some(other.path.clone());
|
||||
}
|
||||
None
|
||||
}
|
||||
};
|
||||
}
|
||||
None
|
||||
})
|
||||
.collect();
|
||||
res
|
||||
}
|
||||
|
||||
fn count_applied(meta: &LambdaMeta) -> usize {
|
||||
meta.countApplied.unwrap_or(0)
|
||||
}
|
||||
|
@ -1,11 +1,24 @@
|
||||
mod alias;
|
||||
mod bulk;
|
||||
mod comment;
|
||||
mod pasta;
|
||||
mod position;
|
||||
mod tests;
|
||||
|
||||
use clap::Parser;
|
||||
use std::{collections::HashMap, path::PathBuf, println};
|
||||
use clap::{Parser, ValueEnum};
|
||||
use pasta::{AliasList, ContentSource, Docs, Lookups, PositionType, SourceOrigin, ValuePath};
|
||||
use position::FilePosition;
|
||||
use serde::Serialize;
|
||||
use std::{
|
||||
collections::HashMap,
|
||||
fs::{create_dir_all, File},
|
||||
io::Write,
|
||||
path::PathBuf,
|
||||
println,
|
||||
process::exit,
|
||||
rc::Rc,
|
||||
};
|
||||
use textwrap::dedent;
|
||||
|
||||
use crate::{
|
||||
bulk::BulkProcessing,
|
||||
@ -13,6 +26,12 @@ use crate::{
|
||||
position::{DocComment, DocIndex},
|
||||
};
|
||||
|
||||
#[derive(ValueEnum, Clone, Debug, PartialEq, Eq)]
|
||||
enum Format {
|
||||
JSON,
|
||||
DIR,
|
||||
}
|
||||
|
||||
#[derive(Debug, Parser)]
|
||||
#[command(author, version, about)]
|
||||
struct Options {
|
||||
@ -21,6 +40,11 @@ struct Options {
|
||||
#[arg(long, conflicts_with_all=["line", "column", "file"])]
|
||||
pos_file: Option<PathBuf>,
|
||||
|
||||
#[arg(long)]
|
||||
format: Format,
|
||||
/// Path to a directory for the output file(s).
|
||||
out: String,
|
||||
|
||||
/// Path to the *.nix file that should be inspected.
|
||||
/// If provided, --line and --column must also be set.
|
||||
#[arg(long, requires_all=["line", "column", "file"])]
|
||||
@ -38,6 +62,10 @@ pub fn main() {
|
||||
let opts = Options::parse();
|
||||
|
||||
if let Some(nix_file) = opts.file {
|
||||
if !nix_file.exists() {
|
||||
println!("file does not exist: {:?}", nix_file);
|
||||
exit(1);
|
||||
}
|
||||
let mut positions = HashMap::new();
|
||||
positions.insert(opts.line.unwrap(), vec![opts.column.unwrap()]);
|
||||
|
||||
@ -50,5 +78,196 @@ pub fn main() {
|
||||
|
||||
if let Some(pos_file) = opts.pos_file {
|
||||
let data = Pasta::new(&pos_file);
|
||||
// data.doc_map
|
||||
|
||||
let mut json_list: Vec<CompressedDocument> = vec![];
|
||||
for item in data.docs.iter() {
|
||||
let document = Document::new(&item, &data.doc_map);
|
||||
let matter = document.meta;
|
||||
let content = document.content;
|
||||
match opts.format {
|
||||
Format::DIR => {
|
||||
if let Some((_, dir)) = item.path.split_last() {
|
||||
let dir_dest = format!("{}/{}", opts.out, dir.join("/"));
|
||||
let file_dest = format!("{}/{}.md", opts.out, item.path.join("/"));
|
||||
create_dir_all(dir_dest).unwrap();
|
||||
let mut file = File::create(file_dest).unwrap();
|
||||
|
||||
file.write_all("---\n".as_bytes()).unwrap();
|
||||
file.write_all(serde_yaml::to_string(&matter).unwrap().as_bytes())
|
||||
.unwrap();
|
||||
file.write_all("---\n".as_bytes()).unwrap();
|
||||
|
||||
if let Some(content) = content.as_ref().map(|ref i| i.content).flatten() {
|
||||
file.write_all(dedent(content).as_bytes()).unwrap();
|
||||
}
|
||||
}
|
||||
}
|
||||
Format::JSON => json_list.push(CompressedDocument {
|
||||
m: CompressedDocumentFrontmatter {
|
||||
al: matter.aliases,
|
||||
ip: matter.is_primop,
|
||||
pm: matter.primop_meta.map(|m| CompressedPrimopMatter {
|
||||
ar: m.args,
|
||||
ay: m.arity,
|
||||
}),
|
||||
pa: matter.path,
|
||||
},
|
||||
c: content.map(|c| c.clone()),
|
||||
}),
|
||||
}
|
||||
}
|
||||
if opts.format == Format::JSON {
|
||||
let mut file = File::create(opts.out).unwrap();
|
||||
file.write_all(serde_json::to_string(&json_list).unwrap().as_bytes())
|
||||
.unwrap();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Find the content which should be displayed.
|
||||
/// The own attribute content is the correct one usually.
|
||||
/// Sometimes there is no attribute content.
|
||||
/// The we search all the aliases for their attribute content.
|
||||
/// As a fallback we can display the own lambda content.
|
||||
fn find_document_content<'a>(
|
||||
item: &'a Docs,
|
||||
all: &'a HashMap<Rc<ValuePath>, Docs>,
|
||||
) -> Option<ContentSource<'a>> {
|
||||
match &item.docs.attr.content {
|
||||
Some(ref c) if !c.is_empty() => Some(ContentSource {
|
||||
content: Some(c),
|
||||
source: Some(SourceOrigin {
|
||||
position: item.docs.attr.position.as_ref(),
|
||||
path: Some(&item.path),
|
||||
pos_type: Some(PositionType::Attribute),
|
||||
}),
|
||||
}),
|
||||
_ => match item.fst_alias_content(&all) {
|
||||
Some(d) => Some(d),
|
||||
None => item.lambda_content(),
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Serialize, Debug, Clone)]
|
||||
struct Document<'a> {
|
||||
meta: DocumentFrontmatter<'a>,
|
||||
content: Option<ContentSource<'a>>,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Debug, Clone)]
|
||||
struct DocumentFrontmatter<'a> {
|
||||
path: &'a Rc<ValuePath>,
|
||||
aliases: Option<&'a AliasList>,
|
||||
/// If an item is primop then it should have the PrimopMeta field.
|
||||
is_primop: Option<bool>,
|
||||
primop_meta: Option<PrimopMatter<'a>>,
|
||||
/// Where the attribute is defined at.
|
||||
attr_position: Option<&'a FilePosition>,
|
||||
/// Where the original lambda is defined at.
|
||||
lambda_position: Option<&'a FilePosition>,
|
||||
/// How many times the function is applied.
|
||||
count_applied: Option<usize>,
|
||||
content_meta: Option<SourceOrigin<'a>>,
|
||||
// content_position: Option<&'a FilePosition>,
|
||||
}
|
||||
|
||||
pub trait FromDocs<'a> {
|
||||
fn new(docs: &'a Docs, data: &'a HashMap<Rc<ValuePath>, Docs>) -> Self;
|
||||
}
|
||||
|
||||
impl<'a> FromDocs<'a> for Document<'a> {
|
||||
fn new(item: &'a Docs, data: &'a HashMap<Rc<ValuePath>, Docs>) -> Self {
|
||||
let content = find_document_content(item, &data);
|
||||
Self {
|
||||
meta: DocumentFrontmatter {
|
||||
// content_position: content
|
||||
// .as_ref()
|
||||
// .map(|t| t.source.as_ref().map(|s| s.position).flatten())
|
||||
// .flatten(),
|
||||
content_meta: content.as_ref().map(|inner| inner.source.clone()).flatten(),
|
||||
path: &item.path,
|
||||
aliases: item.aliases.as_ref(),
|
||||
attr_position: item.docs.attr.position.as_ref(),
|
||||
lambda_position: item
|
||||
.docs
|
||||
.lambda
|
||||
.as_ref()
|
||||
.map(|i| i.position.as_ref())
|
||||
.flatten(),
|
||||
is_primop: item.docs.lambda.as_ref().map(|i| i.isPrimop),
|
||||
count_applied: item.docs.lambda.as_ref().map(|i| i.countApplied).flatten(),
|
||||
primop_meta: match &item.docs.lambda {
|
||||
None => None,
|
||||
Some(lambda) if lambda.isPrimop => Some(PrimopMatter {
|
||||
name: lambda.name.as_ref(),
|
||||
args: lambda.args.as_ref(),
|
||||
experimental: lambda.experimental,
|
||||
arity: lambda.arity,
|
||||
}),
|
||||
_ => None,
|
||||
},
|
||||
},
|
||||
content,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Serialize, Debug, Clone)]
|
||||
struct PrimopMatter<'a> {
|
||||
pub name: Option<&'a String>,
|
||||
pub args: Option<&'a Vec<String>>,
|
||||
pub experimental: Option<bool>,
|
||||
pub arity: Option<usize>,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Debug, Clone)]
|
||||
struct CompressedDocument<'a> {
|
||||
/// meta
|
||||
m: CompressedDocumentFrontmatter<'a>,
|
||||
/// content
|
||||
c: Option<ContentSource<'a>>,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Debug, Clone)]
|
||||
struct CompressedDocumentFrontmatter<'a> {
|
||||
/// path
|
||||
pa: &'a Rc<ValuePath>,
|
||||
// aliases
|
||||
al: Option<&'a AliasList>,
|
||||
/// If an item is primop then it should have the PrimopMeta field.
|
||||
ip: Option<bool>,
|
||||
/// primop meta
|
||||
pm: Option<CompressedPrimopMatter<'a>>,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Debug, Clone)]
|
||||
struct CompressedContentSource<'a> {
|
||||
// content
|
||||
c: Option<&'a String>,
|
||||
// position
|
||||
p: Option<&'a FilePosition>,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Debug, Clone)]
|
||||
struct CompressedPrimopMatter<'a> {
|
||||
// arguments
|
||||
pub ar: Option<&'a Vec<String>>,
|
||||
// arity
|
||||
pub ay: Option<usize>,
|
||||
}
|
||||
|
||||
// Translation matrix
|
||||
// m: meta
|
||||
// c: content
|
||||
// p: position
|
||||
//
|
||||
// al: aliases
|
||||
// ar: arguments
|
||||
// ay: arity
|
||||
//
|
||||
// ip: is primop
|
||||
//
|
||||
// pa: path
|
||||
// pm: primop meta
|
||||
|
@ -40,16 +40,112 @@ pub struct DocsMeta {
|
||||
pub attr: AttrMeta,
|
||||
}
|
||||
|
||||
pub type ValuePath = Vec<String>;
|
||||
pub type AliasList = Vec<Rc<ValuePath>>;
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize, Clone)]
|
||||
pub struct Docs {
|
||||
pub docs: DocsMeta,
|
||||
pub aliases: Option<Vec<Rc<Vec<String>>>>,
|
||||
pub path: Rc<Vec<String>>,
|
||||
pub aliases: Option<AliasList>,
|
||||
pub path: Rc<ValuePath>,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Debug, Clone)]
|
||||
pub struct ContentSource<'a> {
|
||||
pub content: Option<&'a String>,
|
||||
pub source: Option<SourceOrigin<'a>>,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Debug, Clone)]
|
||||
pub struct SourceOrigin<'a> {
|
||||
pub position: Option<&'a FilePosition>,
|
||||
pub path: Option<&'a Rc<ValuePath>>,
|
||||
pub pos_type: Option<PositionType>,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Debug, Clone)]
|
||||
pub enum PositionType {
|
||||
Attribute,
|
||||
Lambda,
|
||||
}
|
||||
|
||||
pub trait Lookups<'a> {
|
||||
/// Returns the Lambda ContentSource.
|
||||
///
|
||||
/// If there is a correct [ContentSource] return it.
|
||||
///
|
||||
/// Partially applied functions still cary the underlying documentation which is wrong.
|
||||
/// This inherited (but wrong) documentation is discarded
|
||||
fn lambda_content(self: &'a Self) -> Option<ContentSource<'a>>;
|
||||
|
||||
/// Return the docs of the first alias with docs.
|
||||
///
|
||||
/// Only look at the aliases with content in the following order.
|
||||
/// Return content from an alias with (1) attribute content, or (2) lambda content.
|
||||
fn fst_alias_content(
|
||||
self: &'a Self,
|
||||
data: &'a HashMap<Rc<ValuePath>, Docs>,
|
||||
) -> Option<ContentSource<'a>>;
|
||||
}
|
||||
|
||||
impl<'a> Lookups<'a> for Docs {
|
||||
fn lambda_content(self: &'a Self) -> Option<ContentSource<'a>> {
|
||||
self.docs
|
||||
.lambda
|
||||
.as_ref()
|
||||
.map(|i| {
|
||||
if i.countApplied == Some(0) || (i.countApplied == None && i.isPrimop) {
|
||||
Some(ContentSource {
|
||||
content: i.content.as_ref(),
|
||||
source: Some(SourceOrigin {
|
||||
position: i.position.as_ref(),
|
||||
path: Some(&self.path),
|
||||
pos_type: Some(PositionType::Lambda),
|
||||
}),
|
||||
})
|
||||
} else {
|
||||
None
|
||||
}
|
||||
})
|
||||
.flatten()
|
||||
}
|
||||
|
||||
fn fst_alias_content(
|
||||
self: &'a Self,
|
||||
data: &'a HashMap<Rc<ValuePath>, Docs>,
|
||||
) -> Option<ContentSource<'a>> {
|
||||
match &self.aliases {
|
||||
Some(aliases) => {
|
||||
let x = aliases
|
||||
.iter()
|
||||
.find_map(|alias_path| {
|
||||
data.get(alias_path).map(|i| {
|
||||
if i.docs.attr.content.is_some() {
|
||||
Some(ContentSource {
|
||||
content: i.docs.attr.content.as_ref(),
|
||||
source: Some(SourceOrigin {
|
||||
position: i.docs.attr.position.as_ref(),
|
||||
path: Some(&i.path),
|
||||
pos_type: Some(PositionType::Attribute),
|
||||
}),
|
||||
})
|
||||
} else {
|
||||
// i.lambda_content()
|
||||
None
|
||||
}
|
||||
})
|
||||
})
|
||||
.flatten();
|
||||
x
|
||||
}
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub struct Pasta {
|
||||
pub docs: Vec<Docs>,
|
||||
pub doc_map: HashMap<Rc<Vec<String>>, Docs>,
|
||||
pub doc_map: HashMap<Rc<ValuePath>, Docs>,
|
||||
}
|
||||
|
||||
pub trait Files {
|
||||
|
@ -8,7 +8,6 @@ use std::fs::File;
|
||||
use std::io::{BufRead, BufReader};
|
||||
use std::process::exit;
|
||||
use std::rc::Rc;
|
||||
use std::time::Instant;
|
||||
|
||||
use std::{format, fs, path::PathBuf, println};
|
||||
|
||||
@ -120,28 +119,12 @@ impl<'a> DocComment<'a> for DocIndex<'a> {
|
||||
fn new(file: &'a PathBuf, positions: HashMap<usize, Vec<usize>>) -> Self {
|
||||
let src = get_src(file);
|
||||
let rc: Rc<String> = Rc::new(src);
|
||||
let mut start_time = Instant::now();
|
||||
|
||||
let ast = rnix::Root::parse(Rc::clone(&rc).as_str()).syntax();
|
||||
let mut end_time = Instant::now();
|
||||
// println!("{:?} - Parsed ast", end_time - start_time);
|
||||
|
||||
start_time = Instant::now();
|
||||
let (pos_idx, inverse_pos_idx) = init_pos_idx(&file, positions);
|
||||
end_time = Instant::now();
|
||||
// println!(
|
||||
// "{:?} - Translated col,line into abs positions",
|
||||
// end_time - start_time
|
||||
// );
|
||||
|
||||
// Call your function here
|
||||
start_time = Instant::now();
|
||||
let node_idx = init_node_idx(&ast, &inverse_pos_idx);
|
||||
end_time = Instant::now();
|
||||
|
||||
// println!(
|
||||
// "{:?} - Find all ast nodes for positions",
|
||||
// end_time - start_time
|
||||
// );
|
||||
|
||||
return Self {
|
||||
file,
|
||||
@ -162,7 +145,6 @@ impl<'a> DocComment<'a> for DocIndex<'a> {
|
||||
}
|
||||
if let Some(idx) = idx {
|
||||
let expr = self.node_idx.get(idx);
|
||||
// println!("L{}:C{}, expr: {:?}", line, column, expr);
|
||||
if let Some(Some(expr)) = expr {
|
||||
let doc = match expr.kind() {
|
||||
rnix::SyntaxKind::NODE_LAMBDA => {
|
||||
|
@ -1,12 +1,13 @@
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
|
||||
use serde::Serialize;
|
||||
use std::{collections::HashMap, ffi::OsStr, format, fs, path::PathBuf, println, rc::Rc};
|
||||
|
||||
use crate::{
|
||||
bulk::BulkProcessing,
|
||||
pasta::Pasta,
|
||||
pasta::{AliasList, Pasta, ValuePath},
|
||||
position::{DocComment, DocIndex, TextPosition},
|
||||
Document, FromDocs,
|
||||
};
|
||||
|
||||
use expect_test::expect_file;
|
||||
@ -38,7 +39,7 @@ mod tests {
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_main() {
|
||||
fn test_atoms() {
|
||||
dir_tests("atom", "nix", |path| {
|
||||
let mut pos_path = path.clone();
|
||||
pos_path.set_extension("pos");
|
||||
@ -54,11 +55,66 @@ mod tests {
|
||||
format!("{:?}", pos.get_docs(line, column))
|
||||
})
|
||||
}
|
||||
#[derive(Serialize, Debug)]
|
||||
struct TestAlias {
|
||||
aliases: Option<AliasList>,
|
||||
path: Rc<ValuePath>,
|
||||
}
|
||||
#[test]
|
||||
fn test_aliases() {
|
||||
dir_tests("aliases", "json", |path| {
|
||||
let data: Pasta = Pasta::new(&PathBuf::from(path));
|
||||
serde_json::to_string_pretty(&data.docs).unwrap()
|
||||
let aliases: Vec<TestAlias> = data
|
||||
.docs
|
||||
.into_iter()
|
||||
.map(|i| TestAlias {
|
||||
aliases: i.aliases.clone(),
|
||||
path: i.path.clone(),
|
||||
})
|
||||
.collect();
|
||||
|
||||
serde_json::to_string_pretty(&aliases).unwrap()
|
||||
})
|
||||
}
|
||||
|
||||
#[derive(Serialize, Debug)]
|
||||
struct TestContent {
|
||||
name: String,
|
||||
content: Option<String>,
|
||||
source: Option<String>,
|
||||
}
|
||||
#[test]
|
||||
fn test_content_inheritance() {
|
||||
dir_tests("inheritance", "json", |path| {
|
||||
let data: Pasta = Pasta::new(&PathBuf::from(path));
|
||||
let contents: Vec<TestContent> = data
|
||||
.docs
|
||||
.into_iter()
|
||||
.map(|ref i| {
|
||||
let document = &Document::new(i, &data.doc_map);
|
||||
return TestContent {
|
||||
name: document.meta.path.join("."),
|
||||
content: document
|
||||
.content
|
||||
.as_ref()
|
||||
.map(|inner| inner.content.map(|i| i.clone()))
|
||||
.flatten(),
|
||||
source: document
|
||||
.content
|
||||
.as_ref()
|
||||
.map(|inner| {
|
||||
inner
|
||||
.source
|
||||
.as_ref()
|
||||
.map(|i| i.path.map(|p| p.join(".")))
|
||||
.flatten()
|
||||
})
|
||||
.flatten(),
|
||||
};
|
||||
})
|
||||
.collect();
|
||||
|
||||
serde_json::to_string_pretty(&contents).unwrap()
|
||||
})
|
||||
}
|
||||
|
||||
|
@ -1,26 +1,5 @@
|
||||
[
|
||||
{
|
||||
"docs": {
|
||||
"lambda": {
|
||||
"isPrimop": true,
|
||||
"name": "add",
|
||||
"args": [
|
||||
"e1",
|
||||
"e2"
|
||||
],
|
||||
"experimental": false,
|
||||
"arity": 2,
|
||||
"content": "\n Return the sum of the numbers *e1* and *e2*.\n "
|
||||
},
|
||||
"attr": {
|
||||
"position": {
|
||||
"file": "test_data/assets/default.nix",
|
||||
"line": 68,
|
||||
"column": 23
|
||||
},
|
||||
"content": null
|
||||
}
|
||||
},
|
||||
"aliases": [
|
||||
[
|
||||
"lib",
|
||||
@ -38,27 +17,6 @@
|
||||
]
|
||||
},
|
||||
{
|
||||
"docs": {
|
||||
"lambda": {
|
||||
"isPrimop": true,
|
||||
"name": "add",
|
||||
"args": [
|
||||
"e1",
|
||||
"e2"
|
||||
],
|
||||
"experimental": false,
|
||||
"arity": 2,
|
||||
"content": "\n Return the sum of the numbers *e1* and *e2*.\n "
|
||||
},
|
||||
"attr": {
|
||||
"position": {
|
||||
"file": "test_data/assets/trivial.nix",
|
||||
"line": 269,
|
||||
"column": 21
|
||||
},
|
||||
"content": null
|
||||
}
|
||||
},
|
||||
"aliases": [
|
||||
[
|
||||
"lib",
|
||||
@ -76,17 +34,6 @@
|
||||
]
|
||||
},
|
||||
{
|
||||
"docs": {
|
||||
"lambda": {
|
||||
"isPrimop": true,
|
||||
"content": "\n Return the sum of the numbers *e1* and *e2*.\n ",
|
||||
"countApplied": 0
|
||||
},
|
||||
"attr": {
|
||||
"position": null,
|
||||
"content": ""
|
||||
}
|
||||
},
|
||||
"aliases": [
|
||||
[
|
||||
"lib",
|
||||
|
@ -1,25 +1,5 @@
|
||||
[
|
||||
{
|
||||
"docs": {
|
||||
"lambda": {
|
||||
"isPrimop": false,
|
||||
"position": {
|
||||
"file": "test_data/assets/lists.nix",
|
||||
"line": 204,
|
||||
"column": 5
|
||||
},
|
||||
"content": "\n The binary operation to run, where the two arguments are:\n 1. `acc`: The current accumulator value: Either the initial one for the first iteration, or the result of the previous iteration\n 2. `x`: The corresponding list element for this iteration\n ",
|
||||
"countApplied": 0
|
||||
},
|
||||
"attr": {
|
||||
"position": {
|
||||
"file": "test_data/assets/default.nix",
|
||||
"line": 92,
|
||||
"column": 25
|
||||
},
|
||||
"content": null
|
||||
}
|
||||
},
|
||||
"aliases": [
|
||||
[
|
||||
"lib",
|
||||
@ -33,26 +13,6 @@
|
||||
]
|
||||
},
|
||||
{
|
||||
"docs": {
|
||||
"lambda": {
|
||||
"isPrimop": false,
|
||||
"position": {
|
||||
"file": "test_data/assets/lists.nix",
|
||||
"line": 204,
|
||||
"column": 5
|
||||
},
|
||||
"content": "\n The binary operation to run, where the two arguments are:\n 1. `acc`: The current accumulator value: Either the initial one for the first iteration, or the result of the previous iteration\n 2. `x`: The corresponding list element for this iteration\n ",
|
||||
"countApplied": 0
|
||||
},
|
||||
"attr": {
|
||||
"position": {
|
||||
"file": "test_data/assets/lists.nix",
|
||||
"line": 198,
|
||||
"column": 3
|
||||
},
|
||||
"content": "\n Reduce a list by applying a binary operator from left to right,\n starting with an initial accumulator.\n Before each application of the operator, the accumulator value is evaluated.\n This behavior makes this function stricter than [`foldl`](#function-library-lib.lists.foldl).\n Unlike [`builtins.foldl'`](https://nixos.org/manual/nix/unstable/language/builtins.html#builtins-foldl'),\n the initial accumulator argument is evaluated before the first iteration.\n A call like\n ```nix\n foldl' op acc₀ [ x₀ x₁ x₂ ... xₙ₋₁ xₙ ]\n ```\n is (denotationally) equivalent to the following,\n but with the added benefit that `foldl'` itself will never overflow the stack.\n ```nix\n let\n acc₁ = builtins.seq acc₀ (op acc₀ x₀ );\n acc₂ = builtins.seq acc₁ (op acc₁ x₁ );\n acc₃ = builtins.seq acc₂ (op acc₂ x₂ );\n ...\n accₙ = builtins.seq accₙ₋₁ (op accₙ₋₁ xₙ₋₁);\n accₙ₊₁ = builtins.seq accₙ (op accₙ xₙ );\n in\n accₙ₊₁\n # Or ignoring builtins.seq\n op (op (... (op (op (op acc₀ x₀) x₁) x₂) ...) xₙ₋₁) xₙ\n ```\n\n # Example\n\n ```nix\n foldl' (acc: x: acc + x) 0 [1 2 3]\n => 6\n ```\n\n # Type\n\n ```\n foldl' :: (acc -> x -> acc) -> acc -> [x] -> acc\n ```\n\n # Arguments\n\n - [op] The binary operation to run, where the two arguments are:\n\n1. `acc`: The current accumulator value: Either the initial one for the first iteration, or the result of the previous iteration\n2. `x`: The corresponding list element for this iteration\n - [acc] The initial accumulator value\n - [list] The list to fold\n\n "
|
||||
}
|
||||
},
|
||||
"aliases": [
|
||||
[
|
||||
"lib",
|
||||
@ -66,24 +26,6 @@
|
||||
]
|
||||
},
|
||||
{
|
||||
"docs": {
|
||||
"lambda": {
|
||||
"isPrimop": true,
|
||||
"name": "foldl'",
|
||||
"args": [
|
||||
"op",
|
||||
"nul",
|
||||
"list"
|
||||
],
|
||||
"experimental": false,
|
||||
"arity": 3,
|
||||
"content": "\n Reduce a list by applying a binary operator, from left to right,\n e.g. `foldl' op nul [x0 x1 x2 ...] : op (op (op nul x0) x1) x2)\n ...`. For example, `foldl' (x: y: x + y) 0 [1 2 3]` evaluates to 6.\n The return value of each application of `op` is evaluated immediately,\n even for intermediate values.\n "
|
||||
},
|
||||
"attr": {
|
||||
"position": null,
|
||||
"content": null
|
||||
}
|
||||
},
|
||||
"aliases": [],
|
||||
"path": [
|
||||
"builtins",
|
||||
|
@ -1,26 +1,11 @@
|
||||
[
|
||||
{
|
||||
"docs": {
|
||||
"lambda": {
|
||||
"isPrimop": false,
|
||||
"position": {
|
||||
"file": "test_data/assets/strings.nix",
|
||||
"line": 84,
|
||||
"column": 25
|
||||
},
|
||||
"content": "\n Map a function over a list and concatenate the resulting strings.\n\n # Example\n\n ```nix\n concatMapStrings (x: \"a\" + x) [\"foo\" \"bar\"]\n => \"afooabar\"\n ```\n\n # Type\n\n ```\n concatMapStrings :: (a -> string) -> [a] -> string\n ```\n\n # Arguments\n\n - [f] \n - [list] \n\n ",
|
||||
"countApplied": 1
|
||||
},
|
||||
"attr": {
|
||||
"position": {
|
||||
"file": "test_data/assets/strings.nix",
|
||||
"line": 243,
|
||||
"column": 3
|
||||
},
|
||||
"content": "\n Concatenate a list of strings, adding a newline at the end of each one.\n Defined as `concatMapStrings (s: s + \"\\n\")`.\n\n # Example\n\n ```nix\n concatLines [ \"foo\" \"bar\" ]\n => \"foo\\nbar\\n\"\n ```\n\n # Type\n\n ```\n concatLines :: [string] -> string\n ```\n "
|
||||
}
|
||||
},
|
||||
"aliases": [],
|
||||
"aliases": [
|
||||
[
|
||||
"lib",
|
||||
"concatLines"
|
||||
]
|
||||
],
|
||||
"path": [
|
||||
"lib",
|
||||
"strings",
|
||||
@ -28,27 +13,13 @@
|
||||
]
|
||||
},
|
||||
{
|
||||
"docs": {
|
||||
"lambda": {
|
||||
"isPrimop": false,
|
||||
"position": {
|
||||
"file": "test_data/assets/strings.nix",
|
||||
"line": 84,
|
||||
"column": 25
|
||||
},
|
||||
"content": "\n Map a function over a list and concatenate the resulting strings.\n\n # Example\n\n ```nix\n concatMapStrings (x: \"a\" + x) [\"foo\" \"bar\"]\n => \"afooabar\"\n ```\n\n # Type\n\n ```\n concatMapStrings :: (a -> string) -> [a] -> string\n ```\n\n # Arguments\n\n - [f] \n - [list] \n\n ",
|
||||
"countApplied": 1
|
||||
},
|
||||
"attr": {
|
||||
"position": {
|
||||
"file": "test_data/assets/default.nix",
|
||||
"line": 98,
|
||||
"column": 27
|
||||
},
|
||||
"content": null
|
||||
}
|
||||
},
|
||||
"aliases": [],
|
||||
"aliases": [
|
||||
[
|
||||
"lib",
|
||||
"strings",
|
||||
"concatLines"
|
||||
]
|
||||
],
|
||||
"path": [
|
||||
"lib",
|
||||
"concatLines"
|
||||
|
17
pesto/test_data/inheritance/add.expect
Normal file
17
pesto/test_data/inheritance/add.expect
Normal file
@ -0,0 +1,17 @@
|
||||
[
|
||||
{
|
||||
"name": "lib.add",
|
||||
"content": "\n Return the sum of the numbers *e1* and *e2*.\n ",
|
||||
"source": "lib.add"
|
||||
},
|
||||
{
|
||||
"name": "lib.trivial.add",
|
||||
"content": "\n Return the sum of the numbers *e1* and *e2*.\n ",
|
||||
"source": "lib.trivial.add"
|
||||
},
|
||||
{
|
||||
"name": "builtins.add",
|
||||
"content": "\n Return the sum of the numbers *e1* and *e2*.\n ",
|
||||
"source": "builtins.add"
|
||||
}
|
||||
]
|
56
pesto/test_data/inheritance/add.json
Normal file
56
pesto/test_data/inheritance/add.json
Normal file
@ -0,0 +1,56 @@
|
||||
[
|
||||
{
|
||||
"docs": {
|
||||
"attr": {
|
||||
"position": {
|
||||
"column": 23,
|
||||
"file": "test_data/assets/default.nix",
|
||||
"line": 68
|
||||
}
|
||||
},
|
||||
"lambda": {
|
||||
"args": ["e1", "e2"],
|
||||
"arity": 2,
|
||||
"content": "\n Return the sum of the numbers *e1* and *e2*.\n ",
|
||||
"experimental": false,
|
||||
"isPrimop": true,
|
||||
"name": "add",
|
||||
"position": null
|
||||
}
|
||||
},
|
||||
"path": ["lib", "add"]
|
||||
},
|
||||
{
|
||||
"docs": {
|
||||
"attr": {
|
||||
"position": {
|
||||
"column": 21,
|
||||
"file": "test_data/assets/trivial.nix",
|
||||
"line": 269
|
||||
}
|
||||
},
|
||||
"lambda": {
|
||||
"args": ["e1", "e2"],
|
||||
"arity": 2,
|
||||
"content": "\n Return the sum of the numbers *e1* and *e2*.\n ",
|
||||
"experimental": false,
|
||||
"isPrimop": true,
|
||||
"name": "add",
|
||||
"position": null
|
||||
}
|
||||
},
|
||||
"path": ["lib", "trivial", "add"]
|
||||
},
|
||||
{
|
||||
"docs": {
|
||||
"attr": { "content": "", "position": null },
|
||||
"lambda": {
|
||||
"content": "\n Return the sum of the numbers *e1* and *e2*.\n ",
|
||||
"countApplied": 0,
|
||||
"isPrimop": true,
|
||||
"position": null
|
||||
}
|
||||
},
|
||||
"path": ["builtins", "add"]
|
||||
}
|
||||
]
|
7
pesto/test_data/inheritance/concatStrings.expect
Normal file
7
pesto/test_data/inheritance/concatStrings.expect
Normal file
@ -0,0 +1,7 @@
|
||||
[
|
||||
{
|
||||
"name": "pkgs.lib.strings.concatStrings",
|
||||
"content": "\n Concatenate a list of strings.\n\n # Example\n\n ```nix\n concatStrings [\"foo\" \"bar\"]\n => \"foobar\"\n ```\n\n # Type\n\n ```\n concatStrings :: [string] -> string\n ```\n ",
|
||||
"source": "pkgs.lib.strings.concatStrings"
|
||||
}
|
||||
]
|
20
pesto/test_data/inheritance/concatStrings.json
Normal file
20
pesto/test_data/inheritance/concatStrings.json
Normal file
@ -0,0 +1,20 @@
|
||||
[
|
||||
{
|
||||
"docs": {
|
||||
"attr": {
|
||||
"position": {
|
||||
"column": 3,
|
||||
"file": "test_data/assets/strings.nix",
|
||||
"line": 60
|
||||
}
|
||||
},
|
||||
"lambda": {
|
||||
"content": "\n Concatenate a list of strings with a separator between each\n element, e.g. `concatStringsSep \"/\" [\"usr\" \"local\" \"bin\"] ==\n \"usr/local/bin\"`.\n ",
|
||||
"countApplied": 1,
|
||||
"isPrimop": true,
|
||||
"position": null
|
||||
}
|
||||
},
|
||||
"path": ["pkgs", "lib", "strings", "concatStrings"]
|
||||
}
|
||||
]
|
17
pesto/test_data/inheritance/foldl.expect
Normal file
17
pesto/test_data/inheritance/foldl.expect
Normal file
@ -0,0 +1,17 @@
|
||||
[
|
||||
{
|
||||
"name": "lib.foldl'",
|
||||
"content": "\n Reduce a list by applying a binary operator from left to right,\n starting with an initial accumulator.\n Before each application of the operator, the accumulator value is evaluated.\n This behavior makes this function stricter than [`foldl`](#function-library-lib.lists.foldl).\n Unlike [`builtins.foldl'`](https://nixos.org/manual/nix/unstable/language/builtins.html#builtins-foldl'),\n the initial accumulator argument is evaluated before the first iteration.\n A call like\n ```nix\n foldl' op acc₀ [ x₀ x₁ x₂ ... xₙ₋₁ xₙ ]\n ```\n is (denotationally) equivalent to the following,\n but with the added benefit that `foldl'` itself will never overflow the stack.\n ```nix\n let\n acc₁ = builtins.seq acc₀ (op acc₀ x₀ );\n acc₂ = builtins.seq acc₁ (op acc₁ x₁ );\n acc₃ = builtins.seq acc₂ (op acc₂ x₂ );\n ...\n accₙ = builtins.seq accₙ₋₁ (op accₙ₋₁ xₙ₋₁);\n accₙ₊₁ = builtins.seq accₙ (op accₙ xₙ );\n in\n accₙ₊₁\n # Or ignoring builtins.seq\n op (op (... (op (op (op acc₀ x₀) x₁) x₂) ...) xₙ₋₁) xₙ\n ```\n\n # Example\n\n ```nix\n foldl' (acc: x: acc + x) 0 [1 2 3]\n => 6\n ```\n\n # Type\n\n ```\n foldl' :: (acc -> x -> acc) -> acc -> [x] -> acc\n ```\n\n # Arguments\n\n - [op] The binary operation to run, where the two arguments are:\n\n1. `acc`: The current accumulator value: Either the initial one for the first iteration, or the result of the previous iteration\n2. `x`: The corresponding list element for this iteration\n - [acc] The initial accumulator value\n - [list] The list to fold\n\n ",
|
||||
"source": "lib.lists.foldl'"
|
||||
},
|
||||
{
|
||||
"name": "lib.lists.foldl'",
|
||||
"content": "\n Reduce a list by applying a binary operator from left to right,\n starting with an initial accumulator.\n Before each application of the operator, the accumulator value is evaluated.\n This behavior makes this function stricter than [`foldl`](#function-library-lib.lists.foldl).\n Unlike [`builtins.foldl'`](https://nixos.org/manual/nix/unstable/language/builtins.html#builtins-foldl'),\n the initial accumulator argument is evaluated before the first iteration.\n A call like\n ```nix\n foldl' op acc₀ [ x₀ x₁ x₂ ... xₙ₋₁ xₙ ]\n ```\n is (denotationally) equivalent to the following,\n but with the added benefit that `foldl'` itself will never overflow the stack.\n ```nix\n let\n acc₁ = builtins.seq acc₀ (op acc₀ x₀ );\n acc₂ = builtins.seq acc₁ (op acc₁ x₁ );\n acc₃ = builtins.seq acc₂ (op acc₂ x₂ );\n ...\n accₙ = builtins.seq accₙ₋₁ (op accₙ₋₁ xₙ₋₁);\n accₙ₊₁ = builtins.seq accₙ (op accₙ xₙ );\n in\n accₙ₊₁\n # Or ignoring builtins.seq\n op (op (... (op (op (op acc₀ x₀) x₁) x₂) ...) xₙ₋₁) xₙ\n ```\n\n # Example\n\n ```nix\n foldl' (acc: x: acc + x) 0 [1 2 3]\n => 6\n ```\n\n # Type\n\n ```\n foldl' :: (acc -> x -> acc) -> acc -> [x] -> acc\n ```\n\n # Arguments\n\n - [op] The binary operation to run, where the two arguments are:\n\n1. `acc`: The current accumulator value: Either the initial one for the first iteration, or the result of the previous iteration\n2. `x`: The corresponding list element for this iteration\n - [acc] The initial accumulator value\n - [list] The list to fold\n\n ",
|
||||
"source": "lib.lists.foldl'"
|
||||
},
|
||||
{
|
||||
"name": "builtins.foldl'",
|
||||
"content": "\n Reduce a list by applying a binary operator, from left to right,\n e.g. `foldl' op nul [x0 x1 x2 ...] : op (op (op nul x0) x1) x2)\n ...`. For example, `foldl' (x: y: x + y) 0 [1 2 3]` evaluates to 6.\n The return value of each application of `op` is evaluated immediately,\n even for intermediate values.\n ",
|
||||
"source": "builtins.foldl'"
|
||||
}
|
||||
]
|
57
pesto/test_data/inheritance/foldl.json
Normal file
57
pesto/test_data/inheritance/foldl.json
Normal file
@ -0,0 +1,57 @@
|
||||
[
|
||||
{
|
||||
"docs": {
|
||||
"attr": {
|
||||
"position": {
|
||||
"column": 25,
|
||||
"file": "test_data/assets/default.nix",
|
||||
"line": 92
|
||||
}
|
||||
},
|
||||
"lambda": {
|
||||
"isPrimop": false,
|
||||
"position": {
|
||||
"column": 5,
|
||||
"file": "test_data/assets/lists.nix",
|
||||
"line": 204
|
||||
}
|
||||
}
|
||||
},
|
||||
"path": ["lib", "foldl'"]
|
||||
},
|
||||
{
|
||||
"docs": {
|
||||
"attr": {
|
||||
"position": {
|
||||
"column": 3,
|
||||
"file": "test_data/assets/lists.nix",
|
||||
"line": 198
|
||||
}
|
||||
},
|
||||
"lambda": {
|
||||
"isPrimop": false,
|
||||
"position": {
|
||||
"column": 5,
|
||||
"file": "test_data/assets/lists.nix",
|
||||
"line": 204
|
||||
}
|
||||
}
|
||||
},
|
||||
"path": ["lib", "lists", "foldl'"]
|
||||
},
|
||||
{
|
||||
"docs": {
|
||||
"attr": { "position": null },
|
||||
"lambda": {
|
||||
"args": ["op", "nul", "list"],
|
||||
"arity": 3,
|
||||
"content": "\n Reduce a list by applying a binary operator, from left to right,\n e.g. `foldl' op nul [x0 x1 x2 ...] : op (op (op nul x0) x1) x2)\n ...`. For example, `foldl' (x: y: x + y) 0 [1 2 3]` evaluates to 6.\n The return value of each application of `op` is evaluated immediately,\n even for intermediate values.\n ",
|
||||
"experimental": false,
|
||||
"isPrimop": true,
|
||||
"name": "foldl'",
|
||||
"position": null
|
||||
}
|
||||
},
|
||||
"path": ["builtins", "foldl'"]
|
||||
}
|
||||
]
|
12
pesto/test_data/inheritance/strings.expect
Normal file
12
pesto/test_data/inheritance/strings.expect
Normal file
@ -0,0 +1,12 @@
|
||||
[
|
||||
{
|
||||
"name": "lib.strings.concatLines",
|
||||
"content": "\n Concatenate a list of strings, adding a newline at the end of each one.\n Defined as `concatMapStrings (s: s + \"\\n\")`.\n\n # Example\n\n ```nix\n concatLines [ \"foo\" \"bar\" ]\n => \"foo\\nbar\\n\"\n ```\n\n # Type\n\n ```\n concatLines :: [string] -> string\n ```\n ",
|
||||
"source": "lib.strings.concatLines"
|
||||
},
|
||||
{
|
||||
"name": "lib.concatLines",
|
||||
"content": "\n Concatenate a list of strings, adding a newline at the end of each one.\n Defined as `concatMapStrings (s: s + \"\\n\")`.\n\n # Example\n\n ```nix\n concatLines [ \"foo\" \"bar\" ]\n => \"foo\\nbar\\n\"\n ```\n\n # Type\n\n ```\n concatLines :: [string] -> string\n ```\n ",
|
||||
"source": "lib.strings.concatLines"
|
||||
}
|
||||
]
|
42
pesto/test_data/inheritance/strings.json
Normal file
42
pesto/test_data/inheritance/strings.json
Normal file
@ -0,0 +1,42 @@
|
||||
[
|
||||
{
|
||||
"docs": {
|
||||
"attr": {
|
||||
"position": {
|
||||
"column": 3,
|
||||
"file": "test_data/assets/strings.nix",
|
||||
"line": 243
|
||||
}
|
||||
},
|
||||
"lambda": {
|
||||
"isPrimop": false,
|
||||
"position": {
|
||||
"column": 25,
|
||||
"file": "test_data/assets/strings.nix",
|
||||
"line": 84
|
||||
}
|
||||
}
|
||||
},
|
||||
"path": ["lib", "strings", "concatLines"]
|
||||
},
|
||||
{
|
||||
"docs": {
|
||||
"attr": {
|
||||
"position": {
|
||||
"column": 27,
|
||||
"file": "test_data/assets/default.nix",
|
||||
"line": 98
|
||||
}
|
||||
},
|
||||
"lambda": {
|
||||
"isPrimop": false,
|
||||
"position": {
|
||||
"column": 25,
|
||||
"file": "test_data/assets/strings.nix",
|
||||
"line": 84
|
||||
}
|
||||
}
|
||||
},
|
||||
"path": ["lib", "concatLines"]
|
||||
}
|
||||
]
|
@ -1,7 +1,7 @@
|
||||
// import { DocsFrontmatter, getMdxMeta } from "@/components/ListGroup";
|
||||
import {
|
||||
docsDir,
|
||||
extractHeadings,
|
||||
getMdxMeta,
|
||||
getMdxSource,
|
||||
mdxRenderOptions,
|
||||
} from "@/utils";
|
||||
@ -39,20 +39,22 @@ interface TocProps {
|
||||
const Toc = async (props: TocProps) => {
|
||||
const { mdxSource } = props;
|
||||
const headings = await extractHeadings(mdxSource);
|
||||
|
||||
return (
|
||||
<Box
|
||||
sx={{
|
||||
top: 70,
|
||||
right: 0,
|
||||
position: "absolute",
|
||||
order: 2,
|
||||
width: "19rem",
|
||||
py: 4,
|
||||
px: 2,
|
||||
mr: 8,
|
||||
}}
|
||||
component={"aside"}
|
||||
>
|
||||
<Box
|
||||
sx={{
|
||||
position: "fixed",
|
||||
top: 0,
|
||||
pt: 4,
|
||||
pl: 2,
|
||||
}}
|
||||
@ -85,20 +87,52 @@ const Toc = async (props: TocProps) => {
|
||||
// using the `params` returned by `generateStaticParams`
|
||||
export default async function Page(props: { params: { id: string[] } }) {
|
||||
const { mdxSource } = await getMdxSource(props.params.id);
|
||||
const meta = await getMdxMeta(props.params.id);
|
||||
console.log("matter", meta.compiled.frontmatter);
|
||||
const { frontmatter } = meta.compiled;
|
||||
return (
|
||||
<>
|
||||
<Toc mdxSource={mdxSource} />
|
||||
<Box sx={{ display: "flex" }}>
|
||||
<Toc mdxSource={mdxSource} />
|
||||
<Box sx={{ order: 1, width: "60rem", marginInline: "auto", py: 2 }}>
|
||||
<Typography variant="h2" component={"h1"}>
|
||||
{frontmatter.path ? frontmatter.path.join(".") : frontmatter.title}
|
||||
</Typography>
|
||||
<MDXRemote
|
||||
options={{
|
||||
parseFrontmatter: true,
|
||||
mdxOptions: mdxRenderOptions,
|
||||
}}
|
||||
source={mdxSource}
|
||||
components={{
|
||||
h1: (p) => (
|
||||
// @ts-ignore
|
||||
<Typography variant="h3" component={"h2"} {...p} />
|
||||
),
|
||||
h2: (p) => (
|
||||
// @ts-ignore
|
||||
<Typography variant="h4" component={"h3"} {...p} />
|
||||
),
|
||||
h3: (p) => (
|
||||
// @ts-ignore
|
||||
<Typography variant="h5" component={"h4"} {...p} />
|
||||
),
|
||||
h4: (p) => (
|
||||
// @ts-ignore
|
||||
<Typography variant="h6" component={"h5"} {...p} />
|
||||
),
|
||||
h5: (p) => (
|
||||
// @ts-ignore
|
||||
<Typography variant="subtitle1" component={"h6"} {...p} />
|
||||
),
|
||||
h6: (p) => (
|
||||
// @ts-ignore
|
||||
<Typography variant="subtitle2" component={"h6"} {...p} />
|
||||
),
|
||||
}}
|
||||
/>
|
||||
<Button sx={{ textTransform: "none", my: 4 }} startIcon={<Edit />}>
|
||||
Edit source
|
||||
<Button sx={{ textTransform: "none", my: 4 }} startIcon={<Edit />} >
|
||||
Edit source {frontmatter.}
|
||||
</Button>
|
||||
</Box>
|
||||
</Box>
|
||||
|
@ -17,6 +17,7 @@ export const ListGroup = async (props: ListGroupProps) => {
|
||||
return sorted.map(async ([name, entry], idx) => {
|
||||
if (Array.isArray(entry)) {
|
||||
const matter = await getMdxMeta(entry);
|
||||
const { frontmatter } = matter.compiled;
|
||||
return (
|
||||
<Link key={`${idx}`} href={`/ref/${entry.join("/")}`}>
|
||||
<ListItem
|
||||
@ -31,7 +32,13 @@ export const ListGroup = async (props: ListGroupProps) => {
|
||||
}}
|
||||
>
|
||||
<ListEntry currentPath={`/ref/${entry.join("/")}`}>
|
||||
<ListItemText primary={matter.compiled.frontmatter.title} />
|
||||
<ListItemText
|
||||
primary={
|
||||
frontmatter.path
|
||||
? frontmatter.path.join(".")
|
||||
: frontmatter.title
|
||||
}
|
||||
/>
|
||||
</ListEntry>
|
||||
</ListItem>
|
||||
</Link>
|
||||
|
25
website/src/docs/lib/add.md
Normal file
25
website/src/docs/lib/add.md
Normal file
@ -0,0 +1,25 @@
|
||||
---
|
||||
path:
|
||||
- lib
|
||||
- trivial
|
||||
- add
|
||||
aliases:
|
||||
- - lib
|
||||
- add
|
||||
is_primop: true
|
||||
primop_meta:
|
||||
name: add
|
||||
args:
|
||||
- e1
|
||||
- e2
|
||||
experimental: false
|
||||
arity: 2
|
||||
attr_position:
|
||||
file: /nix/store/knnp4h12pk09vfn18lrrrnh54zsvw3ba-source/lib/trivial.nix
|
||||
line: 269
|
||||
column: 21
|
||||
lambda_position: null
|
||||
count_applied: null
|
||||
---
|
||||
Return the sum of the numbers *e1* and *e2*.
|
||||
|
@ -11,6 +11,17 @@ Return an attribute from nested attribute sets.
|
||||
|
||||
# Example
|
||||
|
||||
## H2
|
||||
|
||||
### H3
|
||||
|
||||
#### H4
|
||||
|
||||
##### H5
|
||||
|
||||
###### H6
|
||||
|
||||
|
||||
```nix
|
||||
x = { a = { b = 3; }; }
|
||||
# ["a" "b"] is equivalent to x.a.b
|
||||
|
@ -67,7 +67,7 @@ export async function generateStaticSidebarEntries() {
|
||||
return paths;
|
||||
}
|
||||
|
||||
export type DocsFrontmatter = { title: String };
|
||||
export type DocsFrontmatter = { title: String; path?: string[] };
|
||||
|
||||
export const getMdxMeta = async (
|
||||
parts: string[]
|
||||
|
Loading…
Reference in New Issue
Block a user