patch abnf to 0.12.0, uncomment leo-abnf crate

This commit is contained in:
damirka 2021-07-17 02:37:34 +03:00
parent 9d8fdc7efb
commit 052b0a142b
3 changed files with 237 additions and 177 deletions

56
Cargo.lock generated
View File

@ -2,6 +2,25 @@
# It is not intended for manual editing.
version = 3
[[package]]
name = "abnf"
version = "0.12.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "33741baa462d86e43fdec5e8ffca7c6ac82847ad06cbfb382c1bdbf527de9e6b"
dependencies = [
"abnf-core",
"nom",
]
[[package]]
name = "abnf-core"
version = "0.5.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c44e09c43ae1c368fb91a03a566472d0087c26cf7e1b9e8e289c14ede681dd7d"
dependencies = [
"nom",
]
[[package]]
name = "addr2line"
version = "0.15.2"
@ -50,6 +69,12 @@ version = "1.0.42"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "595d3cfa7a60d4555cb5067b99f07142a08ea778de5cf993f7b75c7d8fabc486"
[[package]]
name = "arrayvec"
version = "0.5.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "23b62fc65de8e4e7f52534fb52b0f3ed04746ae267519eef2a83941e8085068b"
[[package]]
name = "assert_cmd"
version = "1.0.7"
@ -1101,6 +1126,7 @@ checksum = "830d08ce1d1d941e6b30645f1a0eb5643013d835ce3779a5fc208261dbe10f55"
name = "leo-abnf"
version = "1.5.2"
dependencies = [
"abnf",
"anyhow",
]
@ -1322,6 +1348,19 @@ dependencies = [
"serde_yaml",
]
[[package]]
name = "lexical-core"
version = "0.7.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6607c62aa161d23d17a9072cc5da0be67cdfc89d3afb1e8d9c842bebc2525ffe"
dependencies = [
"arrayvec",
"bitflags",
"cfg-if 1.0.0",
"ryu",
"static_assertions",
]
[[package]]
name = "libc"
version = "0.2.95"
@ -1517,6 +1556,17 @@ version = "0.5.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ab250442c86f1850815b5d268639dff018c0627022bc1940eb2d642ca1ce12f0"
[[package]]
name = "nom"
version = "7.0.0-alpha1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "dd43cd1e53168596e629accc602ada1297f5125fed588d62cf8be81175b46002"
dependencies = [
"lexical-core",
"memchr",
"version_check",
]
[[package]]
name = "notify"
version = "4.0.17"
@ -2499,6 +2549,12 @@ dependencies = [
"winapi 0.3.9",
]
[[package]]
name = "static_assertions"
version = "1.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a2eb9349b6444b326872e140eb1cf5e7c522154d69e7a0ffb0fb81c06b37543f"
[[package]]
name = "strsim"
version = "0.8.0"

View File

@ -19,3 +19,6 @@ edition = "2018"
[dependencies.anyhow]
version = "1.0"
[dependencies.abnf]
version = "0.12.0"

View File

@ -38,180 +38,181 @@
// ;;;;;;;;;
// ```
//
// use abnf::types::{Node, Rule};
// use anyhow::Result;
// use std::collections::{HashMap, HashSet};
//
// /// Processor's scope. Used when code block or definition starts or ends.
// #[derive(Debug, Clone)]
// enum Scope {
// Free,
// Code,
// Definition(Rule),
// }
//
// /// Transforms abnf file into Markdown.
// #[derive(Debug, Clone)]
// struct Processor<'a> {
// rules: HashMap<String, Rule>,
// grammar: &'a str,
// scope: Scope,
// line: u32,
// out: String,
// }
//
// impl<'a> Processor<'a> {
// fn new(grammar: &'a str, abnf: Vec<Rule>) -> Processor<'a> {
// // we need a hashmap to pull rules easily
// let rules: HashMap<String, Rule> = abnf.into_iter().map(|rule| (rule.name().to_string(), rule)).collect();
//
// Processor {
// grammar,
// line: 0,
// out: String::new(),
// rules,
// scope: Scope::Free,
// }
// }
//
// /// Main function for this struct.
// /// Goes through each line and transforms it into proper markdown.
// fn process(&mut self) {
// let lines = self.grammar.lines();
// let mut prev = "";
//
// for line in lines {
// self.line += 1;
//
// // code block in comment (not highlighted as abnf)
// if let Some(code) = line.strip_prefix("; ") {
// self.enter_scope(Scope::Code);
// self.append_str(code);
//
// // just comment. end of code block
// } else if let Some(code) = line.strip_prefix("; ") {
// self.enter_scope(Scope::Free);
// self.append_str(code);
//
// // horizontal rule - section separator
// } else if line.starts_with(";;;;;;;;;;") {
// self.enter_scope(Scope::Free);
// self.append_str("\n--------\n");
//
// // empty line in comment. end of code block
// } else if line.starts_with(';') {
// self.enter_scope(Scope::Free);
// self.append_str("\n\n");
//
// // just empty line. end of doc, start of definition
// } else if line.is_empty() {
// self.enter_scope(Scope::Free);
// self.append_str("");
//
// // definition (may be multiline)
// } else {
// // if there's an equality sign and previous line was empty
// if line.contains('=') && prev.is_empty() {
// let (def, _) = line.split_at(line.find('=').unwrap());
// let def = def.trim();
//
// // try to find rule matching definition or fail
// let rule = self.rules.get(&def.to_string()).cloned().unwrap();
//
// self.enter_scope(Scope::Definition(rule));
// }
//
// self.append_str(line);
// }
//
// prev = line;
// }
// }
//
// /// Append new line into output, add newline character.
// fn append_str(&mut self, line: &str) {
// self.out.push_str(line);
// self.out.push('\n');
// }
//
// /// Enter new scope (definition or code block). Allows customizing
// /// pre and post lines for each scope entered or exited.
// fn enter_scope(&mut self, new_scope: Scope) {
// match (&self.scope, &new_scope) {
// // exchange scopes between Free and Code
// (Scope::Free, Scope::Code) => self.append_str("```"),
// (Scope::Code, Scope::Free) => self.append_str("```"),
// // exchange scopes between Free and Definition
// (Scope::Free, Scope::Definition(rule)) => {
// self.append_str(&format!("<a name=\"{}\"></a>", rule.name()));
// self.append_str("```abnf");
// }
// (Scope::Definition(rule), Scope::Free) => {
// let mut rules: Vec<String> = Vec::new();
// parse_abnf_node(rule.node(), &mut rules);
//
// // 1. leave only unique keys
// // 2. map each rule into a link
// // 3. join results as a list
// // Note: GitHub only allows custom tags with 'user-content-' prefix
// let keys = rules
// .into_iter()
// .collect::<HashSet<_>>()
// .into_iter()
// .map(|tag| format!("[{}](#user-content-{})", &tag, tag))
// .collect::<Vec<String>>()
// .join(", ");
//
// self.append_str("```");
//
// if !keys.is_empty() {
// self.append_str(&format!("\nGo to: _{}_;\n", keys));
// }
// }
// (_, _) => (),
// };
//
// self.scope = new_scope;
// }
// }
//
// /// Recursively parse ABNF Node and fill sum vec with found rule names.
// fn parse_abnf_node(node: &Node, sum: &mut Vec<String>) {
// match node {
// // these two are just vectors of rules
// Node::Alternatives(vec) | Node::Concatenation(vec) => {
// for node in vec {
// parse_abnf_node(node, sum);
// }
// }
// Node::Group(node) | Node::Optional(node) => parse_abnf_node(node.as_ref(), sum),
//
// // push rulename if it is known
// Node::Rulename(name) => sum.push(name.clone()),
//
// // do nothing for other nodes
// _ => (),
// }
// }
//
// fn main() -> Result<()> {
// // Take Leo ABNF grammar file.
// let grammar = include_str!("../abnf-grammar.txt");
//
// // Parse ABNF to get list of all definitions.
// // Rust ABNF does not provide support for `%s` (case sensitive strings, part of
// // the standard); so we need to remove all occurrences before parsing.
// let parsed = abnf::rulelist(&str::replace(grammar, "%s", "")).map_err(|e| {
// eprintln!("{}", &e);
// anyhow::anyhow!(e)
// })?;
//
// // Init parser and run it. That's it.
// let mut parser = Processor::new(grammar, parsed);
// parser.process();
//
// // Print result of conversion to STDOUT.
// println!("{}", parser.out);
//
// Ok(())
// }
use abnf::types::{Node, Rule};
use anyhow::Result;
use std::collections::{HashMap, HashSet};
/// Processor's scope. Used when code block or definition starts or ends.
#[derive(Debug, Clone)]
enum Scope {
Free,
Code,
Definition(Rule),
}
/// Transforms abnf file into Markdown.
#[derive(Debug, Clone)]
struct Processor<'a> {
rules: HashMap<String, Rule>,
grammar: &'a str,
scope: Scope,
line: u32,
out: String,
}
impl<'a> Processor<'a> {
fn new(grammar: &'a str, abnf: Vec<Rule>) -> Processor<'a> {
// we need a hashmap to pull rules easily
let rules: HashMap<String, Rule> = abnf.into_iter().map(|rule| (rule.name().to_string(), rule)).collect();
Processor {
grammar,
line: 0,
out: String::new(),
rules,
scope: Scope::Free,
}
}
/// Main function for this struct.
/// Goes through each line and transforms it into proper markdown.
fn process(&mut self) {
let lines = self.grammar.lines();
let mut prev = "";
for line in lines {
self.line += 1;
// code block in comment (not highlighted as abnf)
if let Some(code) = line.strip_prefix("; ") {
self.enter_scope(Scope::Code);
self.append_str(code);
// just comment. end of code block
} else if let Some(code) = line.strip_prefix("; ") {
self.enter_scope(Scope::Free);
self.append_str(code);
// horizontal rule - section separator
} else if line.starts_with(";;;;;;;;;;") {
self.enter_scope(Scope::Free);
self.append_str("\n--------\n");
// empty line in comment. end of code block
} else if line.starts_with(';') {
self.enter_scope(Scope::Free);
self.append_str("\n\n");
// just empty line. end of doc, start of definition
} else if line.is_empty() {
self.enter_scope(Scope::Free);
self.append_str("");
// definition (may be multiline)
} else {
// if there's an equality sign and previous line was empty
if line.contains('=') && prev.is_empty() {
let (def, _) = line.split_at(line.find('=').unwrap());
let def = def.trim();
// try to find rule matching definition or fail
let rule = self.rules.get(&def.to_string()).cloned().unwrap();
self.enter_scope(Scope::Definition(rule));
}
self.append_str(line);
}
prev = line;
}
}
/// Append new line into output, add newline character.
fn append_str(&mut self, line: &str) {
self.out.push_str(line);
self.out.push('\n');
}
/// Enter new scope (definition or code block). Allows customizing
/// pre and post lines for each scope entered or exited.
fn enter_scope(&mut self, new_scope: Scope) {
match (&self.scope, &new_scope) {
// exchange scopes between Free and Code
(Scope::Free, Scope::Code) => self.append_str("```"),
(Scope::Code, Scope::Free) => self.append_str("```"),
// exchange scopes between Free and Definition
(Scope::Free, Scope::Definition(rule)) => {
self.append_str(&format!("<a name=\"{}\"></a>", rule.name()));
self.append_str("```abnf");
}
(Scope::Definition(rule), Scope::Free) => {
let mut rules: Vec<String> = Vec::new();
parse_abnf_node(rule.node(), &mut rules);
// 1. leave only unique keys
// 2. map each rule into a link
// 3. join results as a list
// Note: GitHub only allows custom tags with 'user-content-' prefix
let keys = rules
.into_iter()
.collect::<HashSet<_>>()
.into_iter()
.map(|tag| format!("[{}](#user-content-{})", &tag, tag))
.collect::<Vec<String>>()
.join(", ");
self.append_str("```");
if !keys.is_empty() {
self.append_str(&format!("\nGo to: _{}_;\n", keys));
}
}
(_, _) => (),
};
self.scope = new_scope;
}
}
/// Recursively parse ABNF Node and fill sum vec with found rule names.
fn parse_abnf_node(node: &Node, sum: &mut Vec<String>) {
match node {
// these two are just vectors of rules
Node::Alternatives(vec) | Node::Concatenation(vec) => {
for node in vec {
parse_abnf_node(node, sum);
}
}
Node::Group(node) | Node::Optional(node) => parse_abnf_node(node.as_ref(), sum),
// push rulename if it is known
Node::Rulename(name) => sum.push(name.clone()),
// do nothing for other nodes
_ => (),
}
}
fn main() -> Result<()> {
// Take Leo ABNF grammar file.
let grammar = include_str!("../abnf-grammar.txt");
// Parse ABNF to get list of all definitions.
// Rust ABNF does not provide support for `%s` (case sensitive strings, part of
// the standard); so we need to remove all occurrences before parsing.
let parsed = abnf::rulelist(&str::replace(grammar, "%s", "")).map_err(|e| {
eprintln!("{}", &e);
anyhow::anyhow!(e)
})?;
// Init parser and run it. That's it.
let mut parser = Processor::new(grammar, parsed);
parser.process();
// Print result of conversion to STDOUT.
println!("{}", parser.out);
Ok(())
}