Merge pull request #407 from Kindelia/experimental

New architecture for the compiler
This commit is contained in:
Felipe G 2022-12-01 11:27:09 -03:00 committed by GitHub
commit da2b235448
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
295 changed files with 19159 additions and 12918 deletions

View File

@ -7,7 +7,7 @@ jobs:
cargo_check:
name: 👁️‍🗨️ Cargo Check
runs-on: ${{ matrix.os }}
timeout-minutes: 5
timeout-minutes: 10
strategy:
matrix:
os: [macos-latest, ubuntu-latest]
@ -16,7 +16,7 @@ jobs:
- uses: actions-rs/toolchain@v1
with:
profile: minimal
toolchain: stable
toolchain: nightly
override: true
- uses: Swatinem/rust-cache@v1
- uses: actions-rs/cargo@v1
@ -26,7 +26,7 @@ jobs:
cargo_test:
name: 🧪 Cargo Test
runs-on: ${{ matrix.os }}
timeout-minutes: 5
timeout-minutes: 10
strategy:
matrix:
os: [macos-latest, ubuntu-latest]
@ -35,7 +35,7 @@ jobs:
- uses: actions-rs/toolchain@v1
with:
profile: minimal
toolchain: stable
toolchain: nightly
override: true
- uses: Swatinem/rust-cache@v2
- uses: actions-rs/cargo@v1

View File

@ -1,2 +1,5 @@
# Kind2 0.2.76
The main.rs and language.rs files have been broken into several parts.
The main.rs and language.rs files have been broken into several parts.
# Kind2 0.2.79
New architecture and better error messages.

1576
Cargo.lock generated

File diff suppressed because it is too large Load Diff

View File

@ -1,29 +1,26 @@
[package]
name = "kind2"
version = "0.2.79"
edition = "2021"
description = "A pure functional functional language that uses the HVM."
repository = "https://github.com/Kindelia/Kind2"
license = "MIT"
keywords = ["functional", "language", "type-theory", "proof-assistant"]
[workspace]
members = [
"crates/kind-cli",
"crates/kind-tree",
"crates/kind-span",
"crates/kind-parser",
"crates/kind-pass",
"crates/kind-report",
"crates/kind-checker",
"crates/kind-driver",
"crates/kind-report",
"crates/kind-target-kdl",
"crates/kind-target-hvm",
"crates/kind-derive",
"crates/kind-tests",
# "crates/kind-optimization",
# "crates/kind-lint",
# "crates/kind-query",
# "crates/kind-macros",
]
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
[dependencies]
hvm = "0.1.81"
#hvm = { path = "../hvm" }
highlight_error = "0.1.1"
clap = { version = "3.1.8", features = ["derive"] }
tiny-keccak = { version = "2.0.2", features = ["keccak"] }
[dev-dependencies]
pretty_assertions = "1.3.0"
ntest = "0.8.1"
walkdir = "2"
[profile.dev.package.hvm]
[profile.test.package.hvm]
opt-level = 3
[[test]]
name = "kind2-tests"
path = "tests/mod.rs"
[profile.bench.package.hvm]
opt-level = 3

77
FEATURES.md Normal file
View File

@ -0,0 +1,77 @@
It's a entirely new compiler in the 0.3 version. A lot of the features are just simple increments to the old ones but they really help with DX. Lets start by the lexical features:
- Identifiers cannot start with dot
- We can have numbers in a lot of formats now like:
- `0xFF`, `0XFF`, `0o17`, `0O17`, `0b10`, `0B10` and decimals.
- `0u60` and `0u120`, `0n` that describes u120 and u60 literals.
- Numbers can contain lots of underscores (just use one between digits please,
we will change it in the future) e.g `100_000_000`
- There's a distinction between Upper identifiers and Lower identifiers. Upper cased identifiers
can contain a single `'/'` between two parts, if the second part is available then the first one is the name that will be replaced by an 'use' statement.
- each string char and the char inside a char token can contain escape sequences in a looot of format. like `\x12` `\u1234` `\n` `\r` `\t` `\0` `\\` `\'` `\"`
- Comments with `/* */` that can be nested :)
The syntatic features are almost all the same with some small changes.
- Attributes are a little bit more complex and can be seen in some formats.
- Single identifier like: #inline
- With arguments like: #derive[match, open]
- With value like: #kdl_name = Joestar
- Use statements are in the format `use A as B` and they rename upper cased identifiers like `B/c` to `A.c`
- Type definitions now support indices and are in the .kind2 files! e.g:
```js
// Parameters are always in the context like `t` but `n` isnt.
type Vec (t: Type) ~ (n: Nat) {
cons <size: Nat> (x: t) (xs: Vec t size) : Vec t (Nat.succ size)
nil : Vec t Nat.zero
}
```
You can use the `match` eliminator to destruct this vec without having to pattern match on this (but you have to derive `match`).
```js
Main : U60
Main =
match Vec (Vec.cons 1 Vec.nil) {
cons xs .. => 0
nil => 1
}
```
Take a look at the section about `match patterns` in order to understand the `xs` and `..` inside the `cons` case.
- Record definitions :D
```js
record User {
constructor new
name : String
age : U60
}
```
You can use the `destruct` notation if you want to destruct a record but you have to derive `open` to make this feature work. `#derive[open]` before the record definition.
```js
// Using
Main : U60
Main = let User.new name .. = User.new "abe" 21
name
```
- Entries stay all the same, except that you cannot put repeated names because it would make the named parameter process a bit harder.
Btw, you can make something like
```js
Dio (n: U60) (i: U60) : Type
// Named parameters :sunglasses:
Main {
Dio (i = 2) (n = 4)
}
```
- All the current syntax sugars are:
- Sigma types
- Substitutions
- Do notation
- Match
- Let
- If
- Doc strings (useful for the future) using `///`

View File

@ -0,0 +1,16 @@
[package]
name = "kind-checker"
version = "0.1.0"
edition = "2021"
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
[dependencies]
kind-tree = { path = "../kind-tree" }
kind-span = { path = "../kind-span" }
kind-report = { path = "../kind-report" }
hvm = { git = "https://github.com/Kindelia/HVM.git" }
fxhash = "0.2.1"
im-rc = "15.1.0"

View File

@ -0,0 +1,581 @@
//! This module compiles all of the code to a format
//! that can run on the HVM and inside the checker.hvm
//! file.
use self::tags::EvalTag;
use self::tags::{operator_to_constructor, TermTag};
use hvm::syntax::Term;
use kind_span::Range;
use kind_tree::desugared::{self, Book, Expr};
use kind_tree::symbol::{Ident, QualifiedIdent};
use hvm::{syntax as lang, u60};
mod tags;
macro_rules! vec_preppend {
($($f:expr),*; $e:expr) => {
vec![[$($f),*].as_slice(), $e.as_slice()].concat()
};
}
/// Transforms the TermTag into EvalTag if it's quoted.
fn eval_ctr(quote: bool, head: TermTag) -> String {
if quote {
head.to_string()
} else {
match head {
TermTag::Binary => EvalTag::Op.to_string(),
TermTag::Let => EvalTag::Let.to_string(),
TermTag::Ann => EvalTag::Ann.to_string(),
TermTag::Sub => EvalTag::Sub.to_string(),
TermTag::App => EvalTag::App.to_string(),
other => other.to_string(),
}
}
}
// Helpers
/// Just lifts the spine into an `args` constructor that is useful
/// to avoid the arity limit of the type checker.
fn lift_spine(spine: Vec<Box<Term>>) -> Vec<Box<Term>> {
if spine.len() > 16 {
let mut start = spine[..2].to_vec();
start.push(Box::new(Term::Ctr {
name: format!("Kind.Term.args{}", spine.len() - 2),
args: spine[2..].to_vec(),
}));
start
} else {
spine
}
}
fn mk_lifted_ctr(head: String, spine: Vec<Box<Term>>) -> Box<Term> {
let args = lift_spine(spine);
Box::new(Term::Ctr { name: head, args })
}
fn mk_ctr(name: String, args: Vec<Box<Term>>) -> Box<Term> {
Box::new(lang::Term::Ctr { name, args })
}
fn mk_var(ident: &str) -> Box<Term> {
Box::new(Term::Var {
name: ident.to_string(),
})
}
fn mk_u60(numb: u64) -> Box<Term> {
Box::new(Term::U6O {
numb: u60::new(numb),
})
}
fn mk_single_ctr(head: String) -> Box<Term> {
Box::new(Term::Ctr {
name: head,
args: vec![],
})
}
fn mk_ctr_name(ident: &QualifiedIdent) -> Box<Term> {
// Adds an empty segment (so it just appends a dot in the end)
mk_single_ctr(format!("{}.", ident))
}
fn mk_ctr_name_from_str(ident: &str) -> Box<Term> {
// Adds an empty segment (so it just appends a dot in the end)
mk_single_ctr(format!("{}.", ident))
}
fn range_to_num(range: Range) -> Box<Term> {
Box::new(Term::U6O {
numb: u60::new(range.encode().0),
})
}
fn set_origin(ident: &Ident) -> Box<Term> {
mk_lifted_ctr(
"Kind.Term.set_origin".to_owned(),
vec![range_to_num(ident.range), mk_var(ident.to_str())],
)
}
fn lam(name: &Ident, body: Box<Term>) -> Box<Term> {
Box::new(Term::Lam {
name: name.to_string(),
body,
})
}
fn desugar_str(input: &str, range: Range) -> Box<desugared::Expr> {
let nil = QualifiedIdent::new_static("String.nil", None, range);
let cons = QualifiedIdent::new_static("String.cons", None, range);
input
.chars()
.rfold(desugared::Expr::ctr(range, nil, vec![]), |right, chr| {
desugared::Expr::ctr(
range,
cons.clone(),
vec![desugared::Expr::num_u60(range, chr as u64), right],
)
})
}
fn codegen_str(input: &str) -> Box<Term> {
input.chars().rfold(
Box::new(Term::Ctr {
name: "String.nil".to_string(),
args: vec![],
}),
|right, chr| {
Box::new(Term::Ctr {
name: "String.cons".to_string(),
args: vec![mk_u60(chr as u64), right],
})
},
)
}
fn codegen_all_expr(
lhs_rule: bool,
lhs: bool,
num: &mut usize,
quote: bool,
expr: &Expr,
) -> Box<Term> {
use kind_tree::desugared::ExprKind::*;
match &expr.data {
Typ => mk_lifted_ctr(
eval_ctr(quote, TermTag::Typ),
vec![range_to_num(expr.range)],
),
NumTypeU60 => mk_lifted_ctr(
eval_ctr(quote, TermTag::U60),
vec![range_to_num(expr.range)],
),
NumTypeF60 => todo!(),
Var { name } => {
if quote && !lhs {
set_origin(name)
} else if lhs_rule {
*num += 1;
mk_lifted_ctr(
eval_ctr(quote, TermTag::Var),
vec![
range_to_num(expr.range),
mk_u60(name.encode()),
mk_u60((*num - 1) as u64),
],
)
} else {
mk_var(name.to_str())
}
}
All {
param,
typ,
body,
erased: _,
} => mk_lifted_ctr(
eval_ctr(quote, TermTag::All),
vec![
range_to_num(expr.range),
mk_u60(param.encode()),
codegen_all_expr(lhs_rule, lhs, num, quote, typ),
lam(param, codegen_all_expr(lhs_rule, lhs, num, quote, body)),
],
),
Lambda {
param,
body,
erased: _,
} => mk_lifted_ctr(
eval_ctr(quote, TermTag::Lambda),
vec![
range_to_num(expr.range),
mk_u60(param.encode()),
lam(param, codegen_all_expr(lhs_rule, lhs, num, quote, body)),
],
),
App { fun, args } => args.iter().fold(
codegen_all_expr(lhs_rule, lhs, num, quote, fun),
|left, right| {
mk_lifted_ctr(
eval_ctr(quote, TermTag::App),
vec![
range_to_num(expr.range),
left,
codegen_all_expr(lhs_rule, lhs, num, quote, &right.data),
],
)
},
),
Ctr { name, args } => mk_lifted_ctr(
eval_ctr(quote, TermTag::Ctr(args.len())),
vec_preppend![
mk_ctr_name(name),
if lhs { mk_var("orig") } else { range_to_num(expr.range) };
args.iter().cloned().map(|x| codegen_all_expr(lhs_rule, lhs, num, quote, &x)).collect::<Vec<Box<Term>>>()
],
),
Fun { name, args } => {
let new_spine: Vec<Box<Term>> = args
.iter()
.cloned()
.map(|x| codegen_all_expr(lhs_rule, lhs, num, quote, &x))
.collect();
if quote {
mk_lifted_ctr(
eval_ctr(quote, TermTag::Fun(new_spine.len())),
vec_preppend![
mk_ctr_name(name),
range_to_num(expr.range);
new_spine
],
)
} else {
mk_ctr(
TermTag::HoasF(name.to_string()).to_string(),
vec_preppend![
range_to_num(expr.range);
new_spine
],
)
}
}
Let { name, val, next } => mk_ctr(
eval_ctr(quote, TermTag::Let),
vec![
range_to_num(expr.range),
mk_u60(name.encode()),
codegen_all_expr(lhs_rule, lhs, num, quote, val),
lam(name, codegen_all_expr(lhs_rule, lhs, num, quote, next)),
],
),
Ann { expr, typ } => mk_ctr(
eval_ctr(quote, TermTag::Ann),
vec![
range_to_num(expr.range),
codegen_all_expr(lhs_rule, lhs, num, quote, expr),
codegen_all_expr(lhs_rule, lhs, num, quote, typ),
],
),
Sub {
name,
indx,
redx,
expr,
} => mk_ctr(
eval_ctr(quote, TermTag::Sub),
vec![
range_to_num(expr.range),
mk_u60(name.encode()),
mk_u60(*indx as u64),
mk_u60(*redx as u64),
codegen_all_expr(lhs_rule, lhs, num, quote, expr),
],
),
NumU60 { numb } => mk_lifted_ctr(
eval_ctr(quote, TermTag::Num),
vec![range_to_num(expr.range), mk_u60(*numb)],
),
NumF60 { numb: _ } => todo!(),
Binary { op, left, right } => mk_lifted_ctr(
eval_ctr(quote, TermTag::Binary),
vec![
range_to_num(expr.range),
mk_single_ctr(operator_to_constructor(*op).to_owned()),
codegen_all_expr(lhs_rule, lhs, num, quote, left),
codegen_all_expr(lhs_rule, lhs, num, quote, right),
],
),
Hole { num } => mk_lifted_ctr(
eval_ctr(quote, TermTag::Hole),
vec![range_to_num(expr.range), mk_u60(*num)],
),
Str { val } => codegen_all_expr(lhs_rule, lhs, num, quote, &desugar_str(val, expr.range)),
Hlp(_) => mk_lifted_ctr(
eval_ctr(quote, TermTag::Hlp),
vec![range_to_num(expr.range)],
),
Err => panic!("Internal Error: Was not expecting an ERR node inside the HVM checker"),
}
}
fn codegen_expr(quote: bool, expr: &Expr) -> Box<Term> {
codegen_all_expr(false, false, &mut 0, quote, expr)
}
fn codegen_pattern(args: &mut usize, quote: bool, expr: &Expr) -> Box<Term> {
codegen_all_expr(false, true, args, quote, expr)
}
fn codegen_type(args: &[desugared::Argument], typ: &desugared::Expr) -> Box<lang::Term> {
if !args.is_empty() {
let arg = &args[0];
mk_lifted_ctr(
eval_ctr(true, TermTag::All),
vec![
range_to_num(arg.range),
mk_u60(arg.name.encode()),
codegen_expr(true, &arg.typ),
lam(&arg.name, codegen_type(&args[1..], typ)),
],
)
} else {
codegen_expr(true, typ)
}
}
fn codegen_vec<T>(exprs: T) -> Box<Term>
where
T: Iterator<Item = Box<Term>>,
{
exprs.fold(mk_ctr("List.nil".to_string(), vec![]), |left, right| {
mk_ctr("List.cons".to_string(), vec![right, left])
})
}
fn codegen_rule_end(file: &mut lang::File, rule: &desugared::Rule) {
let base_vars = (0..rule.pats.len())
.map(|x| mk_var(&format!("x{}", x)))
.collect::<Vec<Box<lang::Term>>>();
file.rules.push(lang::Rule {
lhs: mk_ctr(
TermTag::HoasQ(rule.name.to_string()).to_string(),
vec_preppend![
mk_var("orig");
base_vars
],
),
rhs: mk_lifted_ctr(
eval_ctr(false, TermTag::Fun(base_vars.len())),
vec_preppend![
mk_ctr_name(&rule.name),
mk_var("orig");
base_vars
],
),
});
file.rules.push(lang::Rule {
lhs: mk_ctr(
TermTag::HoasF(rule.name.to_string()).to_string(),
vec_preppend![
mk_var("orig");
base_vars
],
),
rhs: mk_lifted_ctr(
eval_ctr(false, TermTag::Fun(base_vars.len())),
vec_preppend![
mk_ctr_name(&rule.name),
mk_var("orig");
base_vars
],
),
});
}
fn codegen_rule(file: &mut lang::File, rule: &desugared::Rule) {
let mut count = 0;
let lhs_args = rule
.pats
.iter()
.map(|x| codegen_pattern(&mut count, false, x))
.collect::<Vec<Box<Term>>>();
file.rules.push(lang::Rule {
lhs: mk_ctr(
TermTag::HoasQ(rule.name.to_string()).to_string(),
vec_preppend![
mk_var("orig");
lhs_args
],
),
rhs: codegen_expr(true, &rule.body),
});
if rule.name.to_string().as_str() == "HVM.log" {
file.rules.push(lang::Rule {
lhs: mk_ctr(
TermTag::HoasF(rule.name.to_string()).to_string(),
vec![
mk_var("orig"),
mk_var("a"),
mk_var("r"),
mk_var("log"),
mk_var("ret"),
],
),
rhs: mk_ctr(
"HVM.put".to_owned(),
vec![
mk_ctr("Kind.Term.show".to_owned(), vec![mk_var("log")]),
mk_var("ret"),
],
),
});
} else {
file.rules.push(lang::Rule {
lhs: mk_ctr(
TermTag::HoasF(rule.name.to_string()).to_string(),
vec_preppend![
mk_var("orig");
lhs_args
],
),
rhs: codegen_expr(false, &rule.body),
});
}
}
fn codegen_entry_rules(
count: &mut usize,
index: usize,
args: &mut Vec<Box<Term>>,
entry: &desugared::Rule,
pats: &[Box<desugared::Expr>],
) -> Box<Term> {
if pats.is_empty() {
mk_ctr(
"Kind.Rule.rhs".to_owned(),
vec![mk_ctr(
format!("QT{}", index),
vec_preppend![
mk_ctr_name(&entry.name),
range_to_num(entry.range);
args
],
)],
)
} else {
let pat = &pats[0];
let expr = codegen_all_expr(true, false, count, false, pat);
args.push(expr.clone());
mk_ctr(
"Kind.Rule.lhs".to_owned(),
vec![
expr,
codegen_entry_rules(count, index + 1, args, entry, &pats[1..]),
],
)
}
}
fn codegen_entry(file: &mut lang::File, entry: &desugared::Entry) {
file.rules.push(lang::Rule {
lhs: mk_ctr("NameOf".to_owned(), vec![mk_ctr_name(&entry.name)]),
rhs: codegen_str(entry.name.to_string().as_str()),
});
file.rules.push(lang::Rule {
lhs: mk_ctr("HashOf".to_owned(), vec![mk_ctr_name(&entry.name)]),
rhs: mk_u60(fxhash::hash64(entry.name.to_string().as_str())),
});
file.rules.push(lang::Rule {
lhs: mk_ctr("TypeOf".to_owned(), vec![mk_ctr_name(&entry.name)]),
rhs: codegen_type(&entry.args, &entry.typ),
});
let base_vars = (0..entry.args.len())
.map(|x| mk_var(&format!("x{}", x)))
.collect::<Vec<Box<lang::Term>>>();
file.rules.push(lang::Rule {
lhs: mk_lifted_ctr(
format!("Kind.Term.FN{}", entry.args.len()),
vec_preppend![
mk_ctr_name(&entry.name),
mk_var("orig");
base_vars
],
),
rhs: mk_ctr(
TermTag::HoasF(entry.name.to_string()).to_string(),
vec_preppend![
mk_var("orig");
base_vars
],
),
});
file.rules.push(lang::Rule {
lhs: mk_ctr(
format!("QT{}", entry.args.len()),
vec_preppend![
mk_ctr_name(&entry.name),
mk_var("orig");
base_vars
],
),
rhs: mk_ctr(
TermTag::HoasQ(entry.name.to_string()).to_string(),
vec_preppend![
mk_var("orig");
base_vars
],
),
});
for rule in &entry.rules {
codegen_rule(file, rule);
}
if !entry.rules.is_empty() {
codegen_rule_end(file, &entry.rules[0])
}
let rules = entry
.rules
.iter()
.map(|rule| codegen_entry_rules(&mut 0, 0, &mut Vec::new(), rule, &rule.pats));
file.rules.push(lang::Rule {
lhs: mk_ctr("RuleOf".to_owned(), vec![mk_ctr_name(&entry.name)]),
rhs: codegen_vec(rules),
});
}
/// Compiles a book into an format that is executed by the
/// type checker in HVM.
pub fn codegen_book(book: &Book, functions_to_check: Vec<String>) -> lang::File {
let mut file = lang::File {
rules: vec![],
smaps: vec![],
};
let functions_entry = lang::Rule {
lhs: mk_ctr("Functions".to_owned(), vec![]),
rhs: codegen_vec(functions_to_check.iter().map(|x| mk_ctr_name_from_str(x))),
};
for entry in book.entrs.values() {
codegen_entry(&mut file, entry)
}
file.rules.push(functions_entry);
file.rules.push(lang::Rule {
lhs: mk_ctr("HoleInit".to_owned(), vec![]),
rhs: mk_u60(book.holes),
});
for rule in &file.rules {
match &*rule.lhs {
Term::Ctr { name, args } => {
file.smaps.push((name.clone(), vec![false; args.len()]));
}
_ => todo!(),
}
}
file
}

View File

@ -0,0 +1,102 @@
///! This module describes tags for internal use
/// during compilation
use core::fmt;
use kind_tree::Operator;
/// Tags for each one of the terms inside
/// HVM. It's useful to split the code between
/// the representation and the actual name of each
/// node.
#[derive(Debug)]
pub enum TermTag {
Var,
All,
Lambda,
App,
Fun(usize),
Ctr(usize),
Let,
Ann,
Sub,
Typ,
U60,
Num,
Binary,
Hole,
Hlp,
// HOAS Tags
HoasF(String),
HoasQ(String),
}
/// Some of the tags can be directly translated
/// to a function that evaluates them so it's the name
/// of each function.
pub enum EvalTag {
Op,
App,
Let,
Ann,
Sub,
}
impl fmt::Display for TermTag {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {
TermTag::Var => write!(f, "Kind.Term.var"),
TermTag::All => write!(f, "Kind.Term.all"),
TermTag::Lambda => write!(f, "Kind.Term.lam"),
TermTag::App => write!(f, "Kind.Term.app"),
TermTag::Fun(n) => write!(f, "Kind.Term.fn{}", n),
TermTag::Ctr(n) => write!(f, "Kind.Term.ct{}", n),
TermTag::Let => write!(f, "Kind.Term.let"),
TermTag::Ann => write!(f, "Kind.Term.ann"),
TermTag::Sub => write!(f, "Kind.Term.sub"),
TermTag::Typ => write!(f, "Kind.Term.typ"),
TermTag::U60 => write!(f, "Kind.Term.u60"),
TermTag::Num => write!(f, "Kind.Term.num"),
TermTag::Binary => write!(f, "Kind.Term.op2"),
TermTag::Hole => write!(f, "Kind.Term.hol"),
TermTag::Hlp => write!(f, "Kind.Term.hlp"),
TermTag::HoasF(name) => write!(f, "F${}", name),
TermTag::HoasQ(name) => write!(f, "Q${}", name),
}
}
}
impl fmt::Display for EvalTag {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {
EvalTag::Op => write!(f, "Kind.Term.eval_op"),
EvalTag::App => write!(f, "Kind.Term.eval_app"),
EvalTag::Let => write!(f, "Kind.Term.eval_let"),
EvalTag::Ann => write!(f, "Kind.Term.eval_ann"),
EvalTag::Sub => write!(f, "Kind.Term.eval_sub"),
}
}
}
/// Translates the operator to the tag that is used internally
/// by the checker.
pub fn operator_to_constructor<'a>(operator: Operator) -> &'a str {
match operator {
Operator::Add => "Kind.Operator.add",
Operator::Sub => "Kind.Operator.sub",
Operator::Mul => "Kind.Operator.mul",
Operator::Div => "Kind.Operator.div",
Operator::Mod => "Kind.Operator.mod",
Operator::And => "Kind.Operator.and",
Operator::Xor => "Kind.Operator.xor",
Operator::Shl => "Kind.Operator.shl",
Operator::Shr => "Kind.Operator.shr",
Operator::Ltn => "Kind.Operator.ltn",
Operator::Lte => "Kind.Operator.lte",
Operator::Eql => "Kind.Operator.eql",
Operator::Gte => "Kind.Operator.gte",
Operator::Gtn => "Kind.Operator.gtn",
Operator::Neq => "Kind.Operator.neq",
Operator::Or => "Kind.Operator.or",
}
}

View File

@ -0,0 +1,236 @@
//! Errors created by the type checker.
use kind_report::data::{Color, Diagnostic, DiagnosticFrame, Marker, Severity, Subtitle, Word};
use kind_span::Range;
use kind_tree::desugared::Expr;
use crate::report::Context;
#[derive(Debug)]
pub(crate) enum TypeError {
UnboundVariable(Context, Range),
CantInferHole(Context, Range),
CantInferLambda(Context, Range),
InvalidCall(Context, Range),
ImpossibleCase(Context, Range, Box<Expr>, Box<Expr>),
Inspection(Context, Range, Box<Expr>),
TooManyArguments(Context, Range),
TypeMismatch(Context, Range, Box<Expr>, Box<Expr>),
}
fn context_to_subtitles(ctx: &Context, subtitles: &mut Vec<Subtitle>) {
subtitles.push(Subtitle::LineBreak);
if !ctx.0.is_empty() {
subtitles.push(Subtitle::Phrase(
Color::Snd,
vec![Word::White("Context:".to_string())],
));
}
let biggest = ctx
.0
.iter()
.max_by_key(|p| p.0.len())
.map(|x| x.0.len())
.unwrap_or(0);
for (name, typ, vals) in &ctx.0 {
subtitles.push(Subtitle::Phrase(
Color::Snd,
vec![
Word::Dimmed(" ".to_string()),
Word::White(format!("{:<width$} :", name, width = biggest)),
Word::Painted(Color::Snd, typ.to_string()),
],
));
for val in vals {
subtitles.push(Subtitle::Phrase(
Color::Snd,
vec![
Word::Dimmed(" ".to_string()),
Word::Dimmed(format!("{:<width$} =", name, width = biggest)),
Word::Dimmed(val.to_string()),
],
))
}
}
}
impl Diagnostic for TypeError {
fn get_syntax_ctx(&self) -> Option<kind_span::SyntaxCtxIndex> {
match self {
TypeError::UnboundVariable(_, range) => Some(range.ctx),
TypeError::CantInferHole(_, range) => Some(range.ctx),
TypeError::CantInferLambda(_, range) => Some(range.ctx),
TypeError::InvalidCall(_, range) => Some(range.ctx),
TypeError::ImpossibleCase(_, range, _, _) => Some(range.ctx),
TypeError::Inspection(_, range, _) => Some(range.ctx),
TypeError::TooManyArguments(_, range) => Some(range.ctx),
TypeError::TypeMismatch(_, range, _, _) => Some(range.ctx),
}
}
fn to_diagnostic_frame(&self) -> DiagnosticFrame {
match self {
TypeError::TypeMismatch(ctx, range, detected, expected) => {
let mut subtitles = vec![
Subtitle::Phrase(
Color::Fst,
vec![
Word::White("Got :".to_string()),
Word::Painted(Color::Fst, expected.to_string()),
],
),
Subtitle::Phrase(
Color::Snd,
vec![
Word::White("Expected :".to_string()),
Word::Painted(Color::Snd, detected.to_string()),
],
),
];
context_to_subtitles(ctx, &mut subtitles);
DiagnosticFrame {
code: 101,
severity: Severity::Error,
title: "Type mismatch".to_string(),
subtitles,
hints: vec![],
positions: vec![Marker {
position: *range,
color: Color::Fst,
text: "Here!".to_string(),
no_code: false,
main: true,
}],
}
}
TypeError::Inspection(ctx, range, expected) => {
let mut subtitles = vec![Subtitle::Phrase(
Color::Snd,
vec![
Word::White("Expected:".to_string()),
Word::Painted(Color::Snd, expected.to_string()),
],
)];
context_to_subtitles(ctx, &mut subtitles);
DiagnosticFrame {
code: 101,
severity: Severity::Info,
title: "Inspection.".to_string(),
subtitles,
hints: vec![],
positions: vec![Marker {
position: *range,
color: Color::Snd,
text: "Here!".to_string(),
no_code: false,
main: true,
}],
}
}
TypeError::ImpossibleCase(_, range, detected, expected) => DiagnosticFrame {
code: 101,
severity: Severity::Error,
title: "Impossible case.".to_string(),
subtitles: vec![
Subtitle::Phrase(
Color::Fst,
vec![
Word::White("Expected :".to_string()),
Word::Painted(Color::Fst, detected.to_string()),
],
),
Subtitle::Phrase(
Color::Snd,
vec![
Word::White("Got :".to_string()),
Word::Painted(Color::Snd, expected.to_string()),
],
),
],
hints: vec![],
positions: vec![Marker {
position: *range,
color: Color::Fst,
text: "Here!".to_string(),
no_code: false,
main: true,
}],
},
TypeError::CantInferHole(_, range) => DiagnosticFrame {
code: 101,
severity: Severity::Error,
title: "Can't infer hole.".to_string(),
subtitles: vec![],
hints: vec![],
positions: vec![Marker {
position: *range,
color: Color::Fst,
text: "Here!".to_string(),
no_code: false,
main: true,
}],
},
TypeError::InvalidCall(_, range) => DiagnosticFrame {
code: 101,
severity: Severity::Error,
title: "Cannot call this".to_string(),
subtitles: vec![],
hints: vec![],
positions: vec![Marker {
position: *range,
color: Color::Fst,
text: "Here!".to_string(),
no_code: false,
main: true,
}],
},
TypeError::CantInferLambda(_, range) => DiagnosticFrame {
code: 101,
severity: Severity::Error,
title: "Can't infer lambda.".to_string(),
subtitles: vec![],
hints: vec![],
positions: vec![Marker {
position: *range,
color: Color::Fst,
text: "Here!".to_string(),
no_code: false,
main: true,
}],
},
TypeError::TooManyArguments(_, range) => DiagnosticFrame {
code: 101,
severity: Severity::Error,
title: "Too many arguments".to_string(),
subtitles: vec![],
hints: vec![],
positions: vec![Marker {
position: *range,
color: Color::Fst,
text: "Here!".to_string(),
no_code: false,
main: true,
}],
},
TypeError::UnboundVariable(_, range) => DiagnosticFrame {
code: 101,
severity: Severity::Error,
title: "Unbound variable.".to_string(),
subtitles: vec![],
hints: vec![],
positions: vec![Marker {
position: *range,
color: Color::Fst,
text: "Here!".to_string(),
no_code: false,
main: true,
}],
},
}
}
}

View File

@ -0,0 +1,120 @@
//! A type checker for the kind2 language. It has some utilities
//! to [compile kind2 code][compiler] into a version that the checker
//! can understand and [transform the answer back][report] into a
//! version that the Rust side can manipulate.
pub mod compiler;
mod errors;
pub mod report;
use std::sync::mpsc::Sender;
use hvm::language;
use hvm::{runtime, syntax::Term};
use kind_report::data::Diagnostic;
use kind_tree::desugared::Book;
use report::parse_report;
pub const CHECKER: &str = include_str!("checker.hvm");
pub fn eval(file: &str, term: &str, dbug: bool, tids: Option<usize>) -> Result<Box<Term>, String> {
let file = language::syntax::read_file(&format!("{}\nHVM_MAIN_CALL = {}", file, term))?;
let book = language::rulebook::gen_rulebook(&file);
let mut prog = runtime::Program::new();
prog.add_book(&book);
let size = runtime::default_heap_size();
let tids = tids.unwrap_or(1);
let heap = runtime::new_heap(size, tids);
let tids = runtime::new_tids(tids);
runtime::link(
&heap,
0,
runtime::Fun(*book.name_to_id.get("HVM_MAIN_CALL").unwrap(), 0),
);
let host = 0;
runtime::normalize(&heap, &prog, &tids, host, dbug);
let code = language::readback::as_term(&heap, &prog, host);
runtime::collect(&heap, &prog.aris, tids[0], runtime::load_ptr(&heap, host));
runtime::free(&heap, 0, 0, 1);
Ok(code)
}
/// Generates the checker in a string format that can be
/// parsed by HVM.
pub fn gen_checker(book: &Book, functions_to_check: Vec<String>) -> String {
let mut checker = CHECKER.to_string();
checker.push_str(&compiler::codegen_book(book, functions_to_check).to_string());
checker
}
/// Type checks a dessugared book. It spawns an HVM instance in order
/// to run a compiled version of the book
pub fn type_check(
book: &Book,
tx: Sender<Box<dyn Diagnostic>>,
functions_to_check: Vec<String>,
tids: Option<usize>
) -> bool {
let file = gen_checker(book, functions_to_check);
match eval(&file, "Main", false, tids) {
Ok(term) => {
let errs = parse_report(&term).unwrap_or_else(|_| {
panic!(
"Internal Error: Cannot parse the report message from the type checker: {}",
term
)
});
let succeeded = errs.is_empty();
for err in errs {
tx.send(Box::new(err)).unwrap()
}
succeeded
}
Err(res) => panic!("{}", res),
}
}
/// Runs the type checker but instead of running the check all function
/// we run the "eval_main" that runs the generated version that both HVM and
/// and the checker can understand.
pub fn eval_api(book: &Book) -> Box<Term> {
let file = gen_checker(book, Vec::new());
let file = language::syntax::read_file(&file).unwrap();
let book = language::rulebook::gen_rulebook(&file);
let mut prog = runtime::Program::new();
prog.add_book(&book);
let heap = runtime::new_heap(runtime::default_heap_size(), runtime::default_heap_tids());
let tids = runtime::new_tids(runtime::default_heap_tids());
// Allocates the main term
runtime::link(
&heap,
0,
runtime::Fun(*book.name_to_id.get("Kind.API.eval_main").unwrap(), 0),
);
let host = 0;
// Normalizes it
runtime::normalize(&heap, &prog, &tids, host, false);
// Reads it back to a string
let term = language::readback::as_term(&heap, &prog, host);
// Frees used memory
runtime::collect(&heap, &prog.aris, tids[0], runtime::load_ptr(&heap, host));
runtime::free(&heap, 0, 0, 1);
term
}

View File

@ -0,0 +1,268 @@
//! Transforms a answer from the type checker in
//! a Expr of the kind-tree package.
use kind_span::{EncodedRange, Range};
use kind_tree::backend::Term;
use kind_tree::symbol::{Ident, QualifiedIdent};
use kind_tree::{desugared, Operator};
use crate::errors::TypeError;
use desugared::Expr;
type Entry = (String, Box<Expr>, Vec<Box<Expr>>);
#[derive(Debug)]
pub struct Context(pub Vec<Entry>);
macro_rules! match_opt {
($expr:expr, $pat:pat => $end:expr) => {{
match $expr {
$pat => Ok($end),
_ => Err("Error while matching opt".to_string()),
}
}};
}
fn parse_orig(term: &Term) -> Result<Range, String> {
match_opt!(term, Term::U6O { numb } => EncodedRange(*numb).to_range())
}
fn parse_num(term: &Term) -> Result<u64, String> {
match_opt!(term, Term::U6O { numb } => *numb)
}
fn parse_op(term: &Term) -> Result<Operator, String> {
match term {
Term::Ctr { name, args: _ } => match name.as_str() {
"Kind.Operator.add" => Ok(Operator::Add),
"Kind.Operator.sub" => Ok(Operator::Sub),
"Kind.Operator.mul" => Ok(Operator::Mul),
"Kind.Operator.div" => Ok(Operator::Div),
"Kind.Operator.mod" => Ok(Operator::Mod),
"Kind.Operator.and" => Ok(Operator::And),
"Kind.Operator.or" => Ok(Operator::Or),
"Kind.Operator.xor" => Ok(Operator::Xor),
"Kind.Operator.shl" => Ok(Operator::Shl),
"Kind.Operator.shr" => Ok(Operator::Shr),
"Kind.Operator.ltn" => Ok(Operator::Ltn),
"Kind.Operator.lte" => Ok(Operator::Lte),
"Kind.Operator.eql" => Ok(Operator::Eql),
"Kind.Operator.gte" => Ok(Operator::Gte),
"Kind.Operator.gtn" => Ok(Operator::Gtn),
"Kind.Operator.neq" => Ok(Operator::Neq),
_ => Err("Cannot recognized operator".to_string()),
},
_ => Err("Error parsing operator".to_string()),
}
}
fn parse_name(term: &Term) -> Result<String, String> {
match term {
Term::U6O { numb } => Ok(Ident::decode(*numb)),
Term::Ctr { name, args: _ } => Ok(name.to_string()),
_ => Err("Error while matching ident".to_string()),
}
}
fn parse_qualified(term: &Term) -> Result<QualifiedIdent, String> {
match term {
Term::U6O { numb } => Ok(QualifiedIdent::new_static(
&Ident::decode(*numb),
None,
Range::ghost_range(),
)),
Term::Ctr { name, args: _ } => Ok(QualifiedIdent::new_static(
&name[..name.len() - 1],
None,
Range::ghost_range(),
)),
_ => Err("Error while matching qualified".to_string()),
}
}
fn parse_expr(term: &Term) -> Result<Box<desugared::Expr>, String> {
parse_all_expr(Default::default(), term)
}
fn parse_all_expr(
names: im_rc::HashMap<String, String>,
term: &Term,
) -> Result<Box<desugared::Expr>, String> {
match term {
Term::Ctr { name, args } => match name.as_str() {
"Kind.Term.Quoted.all" => Ok(Expr::all(
parse_orig(&args[0])?,
Ident::generate(&parse_name(&args[1])?),
parse_all_expr(names.clone(), &args[2])?,
parse_all_expr(names, &args[3])?,
false, // TODO: Fix
)),
"Kind.Term.Quoted.lam" => Ok(Expr::lambda(
parse_orig(&args[0])?,
Ident::generate(&parse_name(&args[1])?),
parse_all_expr(names, &args[2])?,
false, // TODO: Fix
)),
"Kind.Term.Quoted.let" => Ok(Expr::let_(
parse_orig(&args[0])?,
Ident::generate(&parse_name(&args[1])?),
parse_all_expr(names.clone(), &args[2])?,
parse_all_expr(names, &args[3])?,
)),
"Kind.Term.Quoted.typ" => Ok(Expr::typ(parse_orig(&args[0])?)),
"Kind.Term.Quoted.var" => Ok(Expr::var(Ident::new(
parse_name(&args[1])?,
parse_orig(&args[0])?,
))),
"Kind.Term.Quoted.hol" => Ok(Expr::hole(parse_orig(&args[0])?, parse_num(&args[1])?)),
"Kind.Term.Quoted.ann" => Ok(Expr::ann(
parse_orig(&args[0])?,
parse_all_expr(names.clone(), &args[1])?,
parse_all_expr(names, &args[2])?,
)),
"Kind.Term.Quoted.sub" => Ok(Expr::sub(
parse_orig(&args[0])?,
Ident::generate(&parse_name(&args[1])?),
parse_num(&args[2])? as usize,
parse_num(&args[3])? as usize,
parse_all_expr(names, &args[4])?,
)),
"Kind.Term.Quoted.app" => Ok(Expr::app(
parse_orig(&args[0])?,
parse_all_expr(names.clone(), &args[1])?,
vec![desugared::AppBinding {
data: parse_all_expr(names, &args[2])?,
erased: false,
}],
)),
"Kind.Term.Quoted.ctr" => {
let name = parse_qualified(&args[0])?;
let orig = parse_orig(&args[1])?;
let mut res = Vec::new();
for arg in parse_list(&args[2])? {
res.push(parse_all_expr(names.clone(), &arg)?);
}
Ok(Expr::ctr(orig, name, res))
}
"Kind.Term.Quoted.fun" => Ok(Expr::fun(
parse_orig(&args[1])?,
parse_qualified(&args[0])?,
{
let mut res = Vec::new();
for arg in parse_list(&args[2])? {
res.push(parse_all_expr(names.clone(), &arg)?);
}
res
},
)),
"Kind.Term.Quoted.hlp" => Ok(Expr::hlp(parse_orig(&args[0])?, Ident::generate("?"))),
"Kind.Term.Quoted.u60" => Ok(Expr::type_u60(parse_orig(&args[0])?)),
"Kind.Term.Quoted.num" => Ok(Expr::num_u60(parse_orig(&args[0])?, parse_num(&args[1])?)),
// TODO: Change quoting to support floats
"Kind.Term.Quoted.op2" => Ok(Expr::binary(
parse_orig(&args[0])?,
parse_op(&args[1])?,
parse_all_expr(names.clone(), &args[2])?,
parse_all_expr(names, &args[3])?,
)),
tag => Err(format!(
"Unexpected tag on transforming quoted term {:?}",
tag
)),
},
_ => Err("Unexpected term on transforming quoted term".to_string()),
}
}
fn parse_list(term: &Term) -> Result<Vec<Box<Term>>, String> {
let mut vec = Vec::new();
let mut cur = term;
loop {
match cur {
Term::Ctr { name, args } => {
if name == "List.nil" {
break;
} else if name == "List.cons" {
vec.push(args[0].clone());
cur = &args[1];
} else {
return Err(format!("Unexpected constructor on list '{:?}'", name));
}
}
_ => return Err("Unexpected value on list".to_string()),
}
}
Ok(vec)
}
/// Transforms a HVM quoted entry into a easy to manipulate structure.
pub fn transform_entry(term: &Term) -> Result<Entry, String> {
match term {
Term::Ctr { name, args } if name == "Pair.new" => {
let fst = parse_name(&args[0])?;
match &*args[1] {
Term::Ctr { name, args } if name == "Pair.new" => {
let snd = parse_expr(&args[0])?;
let trd = parse_list(&args[1])?;
let trd = trd.iter().flat_map(|x| parse_expr(x)).collect();
Ok((fst, snd, trd))
}
_ => Err("Unexpected value on entry second pair".to_string()),
}
}
_ => Err("Unexpected value on entry first pair".to_string()),
}
}
fn parse_type_error(expr: &Term) -> Result<TypeError, String> {
match expr {
Term::Ctr { name, args } => {
if args.len() < 2 {
return Err("Invalid argument length for constructor".to_string());
}
let ls = parse_list(&args[0])?;
let entries = ls.iter().flat_map(|x| transform_entry(x));
let ctx = Context(entries.collect());
let orig = match_opt!(*args[1], Term::U6O { numb } => EncodedRange(numb).to_range())?;
match name.as_str() {
"Kind.Error.Quoted.unbound_variable" => Ok(TypeError::UnboundVariable(ctx, orig)),
"Kind.Error.Quoted.cant_infer_hole" => Ok(TypeError::CantInferHole(ctx, orig)),
"Kind.Error.Quoted.cant_infer_lambda" => Ok(TypeError::CantInferLambda(ctx, orig)),
"Kind.Error.Quoted.invalid_call" => Ok(TypeError::InvalidCall(ctx, orig)),
"Kind.Error.Quoted.impossible_case" => Ok(TypeError::ImpossibleCase(
ctx,
orig,
parse_all_expr(im_rc::HashMap::new(), &args[2])?,
parse_all_expr(im_rc::HashMap::new(), &args[3])?,
)),
"Kind.Error.Quoted.inspection" => Ok(TypeError::Inspection(
ctx,
orig,
parse_all_expr(im_rc::HashMap::new(), &args[2])?,
)),
"Kind.Error.Quoted.too_many_arguments" => {
Ok(TypeError::TooManyArguments(ctx, orig))
}
"Kind.Error.Quoted.type_mismatch" => Ok(TypeError::TypeMismatch(
ctx,
orig,
parse_all_expr(im_rc::HashMap::new(), &args[2])?,
parse_all_expr(im_rc::HashMap::new(), &args[3])?,
)),
_ => Err("Unexpected tag on quoted value".to_string()),
}
}
_ => Err("Unexpected value on quoted value".to_string()),
}
}
pub(crate) fn parse_report(expr: &Term) -> Result<Vec<TypeError>, String> {
let args = parse_list(expr)?;
let mut errs = Vec::new();
for arg in args {
errs.push(parse_type_error(&arg)?);
}
Ok(errs)
}

View File

@ -0,0 +1,24 @@
[package]
name = "kind-cli"
version = "0.3.0"
edition = "2021"
description = "A pure functional functional language that uses the HVM."
repository = "https://github.com/Kindelia/Kind2"
license = "MIT"
keywords = ["functional", "language", "type-theory", "proof-assistant"]
[[bin]]
name = "kind"
path = "src/main.rs"
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
[dependencies]
kind-driver = { path = "../kind-driver" }
kind-report = { path = "../kind-report" }
kind-checker = { path = "../kind-checker" }
kind-query = { path = "../kind-query" }
clap = { version = "4.0.10", features = ["derive"] }
anyhow = "1.0.66"
exitcode = "1.1.2"

280
crates/kind-cli/src/main.rs Normal file
View File

@ -0,0 +1,280 @@
use std::path::PathBuf;
use std::time::Instant;
use std::{fmt, io};
use clap::{Parser, Subcommand};
use driver::resolution::ResolutionError;
use kind_driver::session::Session;
use kind_report::data::{Diagnostic, Log};
use kind_report::report::{FileCache, Report};
use kind_report::RenderConfig;
use kind_driver as driver;
#[derive(Parser, Debug)]
#[command(author, version, about, long_about = None)]
#[clap(propagate_version = true)]
pub struct Cli {
/// Configuration file to change information about
/// pretty printing or project root.
#[arg(short, long, value_name = "FILE")]
pub config: Option<PathBuf>,
/// Turn on the debugging information generated
/// by the compiler.
#[arg(short, long)]
pub debug: bool,
/// Show warning messages
#[arg(short, long)]
pub warning: bool,
/// Disable colors in error messages
#[arg(short, long)]
pub no_color: bool,
/// How much concurrency in HVM
#[arg(long)]
pub tids: Option<usize>,
/// Prints all of the functions and their evaluation
#[arg(short, long)]
pub trace: bool,
/// Only ascii characters in error messages
#[arg(short, long)]
pub ascii: bool,
/// Entrypoint of the file that makes the erasure checker
/// not remove the entry.
#[arg(short, long)]
entrypoint: Option<String>,
#[arg(short, long, value_name = "FILE")]
pub root: Option<PathBuf>,
#[command(subcommand)]
pub command: Command,
}
#[derive(Subcommand, Debug)]
pub enum Command {
/// Check a file
#[clap(aliases = &["c"])]
Check { file: String },
/// Evaluates Main on Kind2
#[clap(aliases = &["er"])]
Eval { file: String },
#[clap(aliases = &["k"])]
ToKindCore { file: String },
#[clap(aliases = &["e"])]
Erase { file: String },
/// Runs Main on the HVM
#[clap(aliases = &["r"])]
Run { file: String },
/// Generates a checker (.hvm) for a file
#[clap(aliases = &["gc"])]
GenChecker { file: String },
/// Stringifies a file
#[clap(aliases = &["s"])]
Show { file: String },
/// Compiles a file to Kindelia (.kdl)
#[clap(aliases = &["kdl"])]
ToKDL {
file: String,
/// If given, a namespace that goes before each compiled name. Can be at most 10 charaters long.
#[clap(long, aliases = &["ns"])]
namespace: Option<String>,
},
/// Compiles a file to HVM (.hvm)
#[clap(aliases = &["hvm"])]
ToHVM { file: String },
}
/// Helper structure to use stderr as fmt::Write
struct ToWriteFmt<T>(pub T);
impl<T> fmt::Write for ToWriteFmt<T>
where
T: io::Write,
{
fn write_str(&mut self, s: &str) -> fmt::Result {
self.0.write_all(s.as_bytes()).map_err(|_| fmt::Error)
}
}
pub fn render_to_stderr<T, E>(render_config: &RenderConfig, session: &T, err: &E)
where
T: FileCache,
E: Report,
{
Report::render(
err,
session,
render_config,
&mut ToWriteFmt(std::io::stderr()),
)
.unwrap();
}
pub fn compile_in_session<T>(
render_config: RenderConfig,
root: PathBuf,
file: String,
compiled: bool,
fun: &mut dyn FnMut(&mut Session) -> anyhow::Result<T>,
) -> anyhow::Result<T> {
let (rx, tx) = std::sync::mpsc::channel();
let mut session = Session::new(root, rx);
eprintln!();
render_to_stderr(
&render_config,
&session,
&Log::Checking(format!("the file '{}'", file)),
);
let start = Instant::now();
let res = fun(&mut session);
let diagnostics = tx.try_iter().collect::<Vec<Box<dyn Diagnostic>>>();
if diagnostics.is_empty() {
render_to_stderr(
&render_config,
&session,
&if compiled {
Log::Compiled(start.elapsed())
} else {
Log::Checked(start.elapsed())
},
);
eprintln!();
res
} else {
render_to_stderr(&render_config, &session, &Log::Failed(start.elapsed()));
eprintln!();
for diagnostic in diagnostics {
render_to_stderr(&render_config, &session, &diagnostic)
}
match res {
Ok(_) => Err(ResolutionError.into()),
Err(res) => Err(res)
}
}
}
pub fn run_cli(config: Cli) -> anyhow::Result<()> {
kind_report::check_if_colors_are_supported(config.no_color);
let render_config = kind_report::check_if_utf8_is_supported(config.ascii, 2);
let root = config.root.unwrap_or_else(|| PathBuf::from("."));
let mut entrypoints = vec!["Main".to_string()];
if let Some(res) = &config.entrypoint {
entrypoints.push(res.clone())
}
match config.command {
Command::Check { file } => {
compile_in_session(render_config, root, file.clone(), false, &mut |session| {
driver::type_check_book(session, &PathBuf::from(file.clone()), entrypoints.clone(), config.tids)
})?;
}
Command::ToHVM { file } => {
let result = compile_in_session(render_config, root, file.clone(), true, &mut |session| {
let book =
driver::erase_book(session, &PathBuf::from(file.clone()), entrypoints.clone())?;
Ok(driver::compile_book_to_hvm(book, config.trace))
})?;
println!("{}", result);
}
Command::Run { file } => {
let res = compile_in_session(render_config, root, file.clone(), true, &mut |session| {
let book =
driver::erase_book(session, &PathBuf::from(file.clone()), entrypoints.clone())?;
driver::check_main_entry(session, &book)?;
Ok(driver::compile_book_to_hvm(book, config.trace))
})?;
match driver::execute_file(&res.to_string(), config.tids) {
Ok(res) => println!("{}", res),
Err(err) => println!("{}", err),
}
}
Command::Show { file } => {
compile_in_session(render_config, root, file.clone(), true, &mut |session| {
driver::to_book(session, &PathBuf::from(file.clone()))
})
.map(|res| {
print!("{}", res);
res
})?;
}
Command::ToKindCore { file } => {
let res = compile_in_session(render_config, root, file.clone(), true, &mut |session| {
driver::desugar_book(session, &PathBuf::from(file.clone()))
})?;
print!("{}", res);
}
Command::Erase { file } => {
let res = compile_in_session(render_config, root, file.clone(), true, &mut |session| {
driver::erase_book(session, &PathBuf::from(file.clone()), entrypoints.clone())
})?;
print!("{}", res);
}
Command::GenChecker { file } => {
let res = compile_in_session(render_config, root, file.clone(), true, &mut |session| {
driver::check_erasure_book(session, &PathBuf::from(file.clone()))
})?;
print!("{}", driver::generate_checker(&res));
}
Command::Eval { file } => {
let res = compile_in_session(render_config, root, file.clone(), true, &mut |session| {
let book = driver::desugar_book(session, &PathBuf::from(file.clone()))?;
driver::check_main_desugared_entry(session, &book)?;
Ok(book)
})?;
println!("{}", driver::eval_in_checker(&res));
}
Command::ToKDL { file, namespace } => {
let res = compile_in_session(render_config, root, file.clone(), true, &mut |session| {
driver::compile_book_to_kdl(
&PathBuf::from(file.clone()),
session,
&namespace.clone().unwrap_or("".to_string()),
entrypoints.clone(),
)
})?;
println!("{}", res);
}
}
Ok(())
}
pub fn main() {
match run_cli(Cli::parse()) {
Ok(_) => std::process::exit(0),
Err(_) => std::process::exit(1),
}
}

View File

@ -0,0 +1,13 @@
[package]
name = "kind-derive"
version = "0.1.0"
edition = "2021"
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
[dependencies]
kind-span = { path = "../kind-span" }
kind-tree = { path = "../kind-tree" }
kind-report = { path = "../kind-report" }
fxhash = "0.2.1"
im-rc = "*"

View File

@ -0,0 +1,33 @@
use kind_report::data::{Color, Diagnostic, DiagnosticFrame, Marker, Severity};
use kind_span::Range;
pub(crate) enum DeriveError {
CannotUseNamedVariable(Range),
}
impl Diagnostic for DeriveError {
fn get_syntax_ctx(&self) -> Option<kind_span::SyntaxCtxIndex> {
match self {
DeriveError::CannotUseNamedVariable(range) => Some(range.ctx),
}
}
fn to_diagnostic_frame(&self) -> DiagnosticFrame {
match self {
DeriveError::CannotUseNamedVariable(range) => DiagnosticFrame {
code: 103,
severity: Severity::Error,
title: "Cannot use named variable on match derivations".to_string(),
subtitles: vec![],
hints: vec![],
positions: vec![Marker {
position: *range,
color: Color::Fst,
text: "Here!".to_string(),
no_code: false,
main: true,
}],
},
}
}
}

View File

@ -0,0 +1,117 @@
//! Module to derive a "open" function for records.
use kind_span::Range;
use kind_tree::concrete::expr::Expr;
use kind_tree::concrete::pat::{Pat, PatIdent};
use kind_tree::concrete::*;
use kind_tree::concrete::{self};
use kind_tree::symbol::{Ident, QualifiedIdent};
pub fn derive_getters(range: Range, rec: &RecordDecl) -> Vec<concrete::Entry> {
let mk_var = |name: Ident| -> Box<Expr> {
Box::new(Expr {
data: ExprKind::Var { name },
range,
})
};
let mk_cons = |name: QualifiedIdent, args: Vec<Binding>| -> Box<Expr> {
Box::new(Expr {
data: ExprKind::Constr { name, args },
range,
})
};
let mut types = Telescope::default();
for arg in rec.parameters.iter() {
types.push(arg.to_implicit())
}
// The type
let all_args = rec.parameters.clone();
let res_motive_ty = mk_cons(
rec.name.clone(),
all_args
.iter()
.cloned()
.map(|x| Binding::Positional(mk_var(x.name)))
.collect(),
);
// Sccrutinzies
types.push(Argument {
hidden: false,
erased: false,
name: Ident::generate("scrutinizer"),
typ: Some(res_motive_ty),
range,
});
// Motive with indices
let mut pats: Vec<Box<Pat>> = Vec::new();
let spine: Vec<_> = rec
.fields
.iter()
.map(|(name, _, ty)| (name, ty))
.collect();
pats.push(Box::new(Pat {
data: concrete::pat::PatKind::App(
rec.name.add_segment(rec.constructor.to_str()),
spine
.iter()
.cloned()
.map(|x| {
Box::new(Pat {
data: concrete::pat::PatKind::Var(PatIdent(x.0.clone().with_name(|f| format!("{}_", f)))),
range,
})
})
.collect(),
),
range,
}));
let mut entries = vec![];
for (arg, typ) in spine {
let body = mk_var(arg.with_name(|f| format!("{}_", f)).clone());
let mut name = rec
.name
.add_segment(arg.to_str())
.add_segment("get");
name.range = rec.constructor.range;
let rules = vec![Box::new(Rule {
name: name.clone(),
pats: pats.clone(),
body,
range: rec.constructor.range,
})];
let entry = Entry {
name: name.clone(),
docs: Vec::new(),
args: types.clone(),
typ: typ.clone(),
rules,
range: rec.constructor.range,
attrs: Vec::new(),
generated_by: Some(rec.name.to_string().clone()),
};
entries.push(entry)
}
entries
}

View File

@ -0,0 +1,8 @@
//! Utility to derive functions from their definitions.
pub mod errors;
pub mod matching;
pub mod open;
pub mod subst;
pub mod getters;
pub mod setters;

View File

@ -0,0 +1,390 @@
//! Module to derive a dependent
//! eliminator out of a sum type declaration.
use fxhash::FxHashMap;
use kind_report::data::Diagnostic;
use kind_span::Range;
use kind_tree::concrete::expr::Expr;
use kind_tree::concrete::pat::{Pat, PatIdent};
use kind_tree::concrete::*;
use kind_tree::concrete::{self};
use kind_tree::symbol::{Ident, QualifiedIdent};
use crate::errors::DeriveError;
use crate::subst::substitute_in_expr;
/// Derives an eliminator from a sum type declaration.
pub fn derive_match(
range: Range,
sum: &SumTypeDecl,
) -> (concrete::Entry, Vec<Box<dyn Diagnostic>>) {
let mut errs: Vec<Box<dyn Diagnostic>> = Vec::new();
let mk_var = |name: Ident| -> Box<Expr> {
Box::new(Expr {
data: ExprKind::Var { name },
range,
})
};
let mk_cons = |name: QualifiedIdent, args: Vec<Binding>| -> Box<Expr> {
Box::new(Expr {
data: ExprKind::Constr { name, args },
range,
})
};
let mk_app = |fun: Box<Expr>, args: Vec<AppBinding>, range: Range| -> Box<Expr> {
Box::new(Expr {
data: ExprKind::App { fun, args },
range,
})
};
let mk_pi = |name: Ident, typ: Box<Expr>, body: Box<Expr>| -> Box<Expr> {
Box::new(Expr {
data: ExprKind::All {
param: Some(name),
typ,
body,
erased: false,
},
range,
})
};
let mk_typ = || -> Box<Expr> {
Box::new(Expr {
data: ExprKind::Lit { lit: Literal::Type },
range,
})
};
let name = sum.name.add_segment("match");
let mut types = Telescope::default();
for arg in sum.parameters.iter() {
types.push(arg.to_implicit())
}
for arg in sum.indices.iter() {
types.push(arg.to_implicit())
}
// The type
let all_args = sum.parameters.extend(&sum.indices);
let res_motive_ty = mk_cons(
sum.name.clone(),
all_args
.iter()
.cloned()
.map(|x| Binding::Positional(mk_var(x.name)))
.collect(),
);
let indice_names: Vec<AppBinding> = sum
.indices
.iter()
.map(|x| AppBinding::explicit(mk_var(x.name.clone())))
.collect();
// Sccrutinzies
types.push(Argument {
hidden: false,
erased: false,
name: Ident::generate("scrutinizer"),
typ: Some(res_motive_ty.clone()),
range,
});
// Motive with indices
let motive_ident = Ident::new_static("motive", range);
let motive_type = sum.indices.iter().rfold(
mk_pi(Ident::new_static("val_", range), res_motive_ty, mk_typ()),
|out, arg| {
mk_pi(
arg.name.clone(),
arg.typ.clone().unwrap_or_else(mk_typ),
out,
)
},
);
types.push(Argument {
hidden: false,
erased: true,
name: motive_ident.clone(),
typ: Some(motive_type),
range,
});
let params = sum
.parameters
.map(|x| Binding::Positional(mk_var(x.name.clone())));
let indices = sum
.indices
.map(|x| Binding::Positional(mk_var(x.name.clone())));
// Constructors type
for cons in &sum.constructors {
let vars: Vec<Binding> = cons
.args
.iter()
.map(|x| Binding::Positional(mk_var(x.name.clone())))
.collect();
let cons_inst = mk_cons(
sum.name.add_segment(cons.name.to_str()),
[
params.as_slice(),
if cons.typ.is_none() {
indices.as_slice()
} else {
&[]
},
vars.as_slice(),
]
.concat(),
);
let mut indices_of_cons = match cons.typ.clone().map(|x| x.data) {
Some(ExprKind::Constr { name: _, args }) => {
let mut new_args = Vec::with_capacity(args.len());
for arg in &args[sum.parameters.len()..].to_vec() {
new_args.push(match arg {
Binding::Positional(expr) => AppBinding::explicit(expr.clone()),
Binding::Named(range, _, expr) => {
errs.push(Box::new(DeriveError::CannotUseNamedVariable(*range)));
AppBinding::explicit(expr.clone())
}
});
}
new_args
}
_ => [indice_names.as_slice()].concat(),
};
indices_of_cons.push(AppBinding::explicit(cons_inst));
let cons_tipo = mk_app(mk_var(motive_ident.clone()), indices_of_cons, range);
let args = if cons.typ.is_some() {
cons.args.clone()
} else {
sum.indices.extend(&cons.args)
};
let cons_type = args.iter().rfold(cons_tipo, |out, arg| {
mk_pi(
arg.name.clone(),
arg.typ.clone().unwrap_or_else(mk_typ),
out,
)
});
types.push(Argument {
hidden: false,
erased: false,
name: Ident::new_static(&format!("{}_", cons.name), range),
typ: Some(cons_type),
range,
});
}
if !errs.is_empty() {
let entry = Entry {
name,
docs: Vec::new(),
args: types,
typ: Box::new(Expr {
data: ExprKind::Hole,
range,
}),
rules: vec![],
range,
attrs: Vec::new(),
generated_by: Some(sum.name.to_string()),
};
return (entry, errs);
}
let mut res: Vec<AppBinding> = [indice_names.as_slice()].concat();
res.push(AppBinding::explicit(mk_var(Ident::generate("scrutinizer"))));
let ret_ty = mk_app(mk_var(motive_ident.clone()), res, range);
let mut rules = Vec::new();
for cons in &sum.constructors {
let cons_ident = sum.name.add_segment(cons.name.to_str());
let mut pats: Vec<Box<Pat>> = Vec::new();
let irrelev: Vec<bool>;
let spine_params: Vec<Ident>;
let spine: Vec<Ident>;
let mut args_indices: Vec<AppBinding>;
match &cons.typ {
Some(expr) => match &**expr {
Expr {
data: ExprKind::Constr { args, .. },
..
} => {
irrelev = cons.args.map(|x| x.erased).to_vec();
spine_params = sum
.parameters
.extend(&cons.args)
.map(|x| x.name.with_name(|f| format!("{}_", f)))
.to_vec();
spine = cons
.args
.map(|x| x.name.with_name(|f| format!("{}_", f)))
.to_vec();
args_indices = args
.iter()
.map(|x| match x {
Binding::Positional(expr) => AppBinding {
erased: false,
data: expr.clone(),
},
Binding::Named(_, _, _) => unreachable!(),
})
.collect::<Vec<AppBinding>>();
args_indices = {
let mut indices = args_indices[sum.parameters.len()..].to_vec();
let renames = FxHashMap::from_iter(
sum.parameters
.extend(&cons.args)
.map(|x| (x.name.to_string(), format!("{}_", x.name)))
.iter()
.cloned(),
);
for indice in &mut indices {
substitute_in_expr(&mut indice.data, &renames)
}
indices
};
}
_ => unreachable!(),
},
None => {
irrelev = sum.indices.extend(&cons.args).map(|x| x.erased).to_vec();
spine_params = sum
.parameters
.extend(&sum.indices)
.extend(&cons.args)
.map(|x| x.name.with_name(|f| format!("{}_", f)))
.to_vec();
spine = sum
.indices
.extend(&cons.args)
.map(|x| x.name.with_name(|f| format!("{}_", f)))
.to_vec();
args_indices = sum
.indices
.clone()
.map(|x| AppBinding {
data: mk_var(x.name.clone()),
erased: false,
})
.to_vec();
}
}
pats.push(Box::new(Pat {
data: concrete::pat::PatKind::App(
cons_ident.clone(),
spine_params
.iter()
.cloned()
.map(|x| {
Box::new(Pat {
data: concrete::pat::PatKind::Var(PatIdent(x)),
range,
})
})
.collect(),
),
range,
}));
pats.push(Box::new(Pat {
data: concrete::pat::PatKind::Var(PatIdent(Ident::generate("motive"))),
range,
}));
for cons2 in &sum.constructors {
pats.push(Box::new(Pat {
data: concrete::pat::PatKind::Var(PatIdent(cons2.name.clone())),
range,
}));
}
let mut args = args_indices.clone();
args.push(AppBinding {
data: Box::new(Expr {
data: ExprKind::Constr {
name: cons_ident.clone(),
args: spine_params
.iter()
.cloned()
.map(|x| Binding::Positional(mk_var(x)))
.collect(),
},
range,
}),
erased: false,
});
let body = Box::new(Expr {
data: ExprKind::Ann {
val: mk_app(
mk_var(cons.name.clone()),
spine
.iter()
.zip(irrelev)
.map(|(arg, erased)| AppBinding {
data: mk_var(arg.clone()),
erased,
})
.collect(),
cons.name.range,
),
typ: mk_app(mk_var(motive_ident.clone()), args, range),
},
range,
});
let rule = Box::new(Rule {
name: name.clone(),
pats,
body,
range: cons.name.range,
});
rules.push(rule)
}
// Rules
let entry = Entry {
name,
docs: Vec::new(),
args: types,
typ: ret_ty,
rules,
range,
attrs: Vec::new(),
generated_by: Some(sum.name.to_string().clone()),
};
(entry, errs)
}

View File

@ -0,0 +1,164 @@
//! Module to derive a "open" function for records.
use kind_span::Range;
use kind_tree::concrete::expr::Expr;
use kind_tree::concrete::pat::{Pat, PatIdent};
use kind_tree::concrete::*;
use kind_tree::concrete::{self};
use kind_tree::symbol::{Ident, QualifiedIdent};
pub fn derive_open(range: Range, rec: &RecordDecl) -> concrete::Entry {
let mk_var = |name: Ident| -> Box<Expr> {
Box::new(Expr {
data: ExprKind::Var { name },
range,
})
};
let mk_cons = |name: QualifiedIdent, args: Vec<Binding>| -> Box<Expr> {
Box::new(Expr {
data: ExprKind::Constr { name, args },
range,
})
};
let mk_app = |fun: Box<Expr>, args: Vec<AppBinding>| -> Box<Expr> {
Box::new(Expr {
data: ExprKind::App { fun, args },
range,
})
};
let mk_pi = |name: Ident, typ: Box<Expr>, body: Box<Expr>, erased: bool| -> Box<Expr> {
Box::new(Expr {
data: ExprKind::All {
param: Some(name),
typ,
body,
erased,
},
range,
})
};
let mut name = rec
.name
.add_segment(rec.constructor.to_str())
.add_segment("open");
name.range = rec.constructor.range;
let mut types = Telescope::default();
for arg in rec.parameters.iter() {
types.push(arg.to_implicit())
}
// The type
let all_args = rec.parameters.clone();
let res_motive_ty = mk_cons(
rec.name.clone(),
all_args
.iter()
.cloned()
.map(|x| Binding::Positional(mk_var(x.name)))
.collect(),
);
types.push(Argument {
hidden: true,
erased: true,
name: Ident::generate("res_"),
typ: None,
range,
});
let cons_tipo = mk_var(Ident::generate("res_"));
let cons_type = rec.fields.iter().rfold(cons_tipo, |out, (name, _, typ)| {
mk_pi(name.clone(), typ.clone(), out, false)
});
// Sccrutinzies
types.push(Argument {
hidden: false,
erased: false,
name: Ident::generate("scrutinizer"),
typ: Some(res_motive_ty),
range,
});
types.push(Argument {
hidden: false,
erased: false,
name: Ident::generate("fun"),
typ: Some(cons_type),
range,
});
// Motive with indices
let ret_ty = mk_var(Ident::generate("res_"));
let mut pats: Vec<Box<Pat>> = Vec::new();
let spine: Vec<Ident> = rec
.fields
.iter()
.map(|(name, _, _)| name.with_name(|f| format!("{}_", f)))
.collect();
pats.push(Box::new(Pat {
data: concrete::pat::PatKind::App(
rec.name.add_segment(rec.constructor.to_str()),
spine
.iter()
.cloned()
.map(|x| {
Box::new(Pat {
data: concrete::pat::PatKind::Var(PatIdent(x)),
range,
})
})
.collect(),
),
range,
}));
pats.push(Box::new(Pat {
data: concrete::pat::PatKind::Var(PatIdent(Ident::generate("fun_"))),
range,
}));
let body = mk_app(
mk_var(Ident::generate("fun_")),
spine
.iter()
.map(|arg| AppBinding {
data: mk_var(arg.clone()),
erased: false,
})
.collect(),
);
let rules = vec![Box::new(Rule {
name: name.clone(),
pats,
body,
range: rec.constructor.range,
})];
Entry {
name,
docs: Vec::new(),
args: types,
typ: ret_ty,
rules,
range: rec.constructor.range,
attrs: Vec::new(),
generated_by: Some(rec.name.to_string()),
}
}

View File

@ -0,0 +1,174 @@
//! Module to derive a "open" function for records.
use kind_span::Range;
use kind_tree::concrete::expr::Expr;
use kind_tree::concrete::pat::{Pat, PatIdent};
use kind_tree::concrete::*;
use kind_tree::concrete::{self};
use kind_tree::symbol::{Ident, QualifiedIdent};
pub fn derive_setters(range: Range, rec: &RecordDecl) -> Vec<concrete::Entry> {
let mk_var = |name: Ident| -> Box<Expr> {
Box::new(Expr {
data: ExprKind::Var { name },
range,
})
};
let mk_cons = |name: QualifiedIdent, args: Vec<Binding>| -> Box<Expr> {
Box::new(Expr {
data: ExprKind::Constr { name, args },
range,
})
};
let typ = |range: Range| -> Box<Expr> {
Box::new(Expr {
data: ExprKind::Lit { lit: Literal::Type },
range,
})
};
let mk_pat_var = |name: Ident| {
Box::new(Pat {
range: name.range,
data: concrete::pat::PatKind::Var(PatIdent(name)),
})
};
let mut types = Telescope::default();
for arg in rec.parameters.iter() {
types.push(arg.to_implicit())
}
// The type
let all_args = rec.parameters.clone();
let res_motive_ty = mk_cons(
rec.name.clone(),
all_args
.iter()
.cloned()
.map(|x| Binding::Positional(mk_var(x.name)))
.collect(),
);
// Sccrutinzies
types.push(Argument {
hidden: false,
erased: false,
name: Ident::generate("scrutinizer"),
typ: Some(res_motive_ty.clone()),
range,
});
// Motive with indices
let mut pats: Vec<Box<Pat>> = Vec::new();
let fields_spine: Vec<_> = rec
.fields
.iter()
.map(|(name, _, typ)| (name.clone(), typ.clone()))
.collect();
let params_spine: Vec<_> = rec
.parameters
.iter()
.map(|arg| {
(
arg.name.clone(),
arg.typ.clone().unwrap_or_else(|| typ(arg.range.clone())),
)
})
.collect();
let spine = [params_spine.as_slice(), fields_spine.as_slice()].concat();
pats.push(Box::new(Pat {
data: concrete::pat::PatKind::App(
rec.name.add_segment(rec.constructor.to_str()),
spine
.iter()
.cloned()
.map(|(name, _)| mk_pat_var(name))
.collect(),
),
range,
}));
let mut entries = vec![];
let mut cons_name = rec.name.add_segment(rec.constructor.to_str());
cons_name.range = rec.constructor.range;
for (i, (arg, cons_typ)) in fields_spine.iter().enumerate() {
let mut types = types.clone();
let place = rec.parameters.len() + i;
types.push(Argument {
hidden: false,
erased: false,
name: Ident::generate("set"),
typ: Some(cons_typ.clone()),
range,
});
let new_var = Ident::generate("_new_var");
let mut pats = pats.clone();
pats.push(Box::new(Pat {
data: concrete::pat::PatKind::Var(PatIdent(new_var.clone())),
range,
}));
let mut args: Vec<_> = spine
.iter()
.cloned()
.map(|x| Binding::Positional(mk_var(x.0)))
.collect();
args[place] = Binding::Positional(mk_var(new_var));
let body = Box::new(Expr {
data: ExprKind::Constr {
name: cons_name.clone(),
args,
},
range,
});
let mut name = rec.name.add_segment(arg.to_str()).add_segment("set");
name.range = rec.constructor.range;
let rules = vec![Box::new(Rule {
name: name.clone(),
pats: pats.clone(),
body,
range: rec.constructor.range,
})];
let entry = Entry {
name: name.clone(),
docs: Vec::new(),
args: types.clone(),
typ: res_motive_ty.clone(),
rules,
range: rec.constructor.range,
attrs: Vec::new(),
generated_by: Some(rec.name.to_string().clone()),
};
entries.push(entry)
}
entries
}

View File

@ -0,0 +1,278 @@
use fxhash::FxHashMap;
use kind_span::Range;
use kind_tree::concrete::expr::{Binding, Case, CaseBinding, Destruct, Expr, ExprKind, SttmKind};
use kind_tree::concrete::pat::{Pat, PatIdent, PatKind};
use kind_tree::concrete::visitor::Visitor;
use kind_tree::symbol::{Ident, QualifiedIdent, Symbol};
use kind_tree::visit_vec;
pub struct Subst<'a> {
pub context_vars: Vec<(Range, String)>,
pub names: &'a FxHashMap<String, String>,
}
impl<'a> Visitor for Subst<'a> {
fn visit_attr(&mut self, _: &mut kind_tree::concrete::Attribute) {}
fn visit_ident(&mut self, ident: &mut Ident) {
let name = ident.to_str();
if self.context_vars.iter().all(|x| x.1 != name) {
if let Some(res) = self.names.get(name) {
ident.data = Symbol::new(res.clone())
}
}
}
fn visit_pat_ident(&mut self, ident: &mut PatIdent) {
self.visit_ident(&mut ident.0)
}
fn visit_destruct(&mut self, destruct: &mut Destruct) {
match destruct {
Destruct::Destruct(range, ty, bindings, _) => {
self.visit_qualified_ident(
&mut QualifiedIdent::add_segment(ty, "open").to_generated(),
);
self.visit_range(range);
self.visit_qualified_ident(ty);
for bind in bindings {
self.visit_case_binding(bind)
}
}
Destruct::Ident(ident) => self.context_vars.push((ident.range, ident.to_string())),
}
}
fn visit_sttm(&mut self, sttm: &mut kind_tree::concrete::expr::Sttm) {
match &mut sttm.data {
SttmKind::Ask(ident, val, next) => {
self.visit_expr(val);
let vars = self.context_vars.clone();
self.visit_destruct(ident);
self.visit_sttm(next);
self.context_vars = vars;
}
SttmKind::Let(ident, val, next) => {
self.visit_expr(val);
let vars = self.context_vars.clone();
self.visit_destruct(ident);
self.visit_sttm(next);
self.context_vars = vars;
}
SttmKind::Expr(expr, next) => {
self.visit_expr(expr);
self.visit_sttm(next);
}
SttmKind::Return(expr) => {
self.visit_expr(expr);
}
SttmKind::RetExpr(expr) => {
self.visit_expr(expr);
}
}
}
fn visit_pat(&mut self, pat: &mut Pat) {
match &mut pat.data {
PatKind::Var(ident) => self.visit_pat_ident(ident),
PatKind::Str(_) => (),
PatKind::U60(_) => (),
PatKind::U120(_) => (),
PatKind::F60(_) => (),
PatKind::Char(_) => (),
PatKind::Hole => (),
PatKind::List(ls) => {
for pat in ls {
self.visit_pat(pat)
}
}
PatKind::Pair(fst, snd) => {
self.visit_pat(fst);
self.visit_pat(snd);
}
PatKind::App(t, ls) => {
self.visit_qualified_ident(t);
for pat in ls {
self.visit_pat(pat)
}
}
}
}
fn visit_case_binding(&mut self, case_binding: &mut CaseBinding) {
match case_binding {
CaseBinding::Field(ident) | CaseBinding::Renamed(_, ident) => {
self.context_vars.push((ident.range, ident.to_string()))
}
}
}
fn visit_case(&mut self, case: &mut Case) {
let vars = self.context_vars.clone();
for binding in &mut case.bindings {
self.visit_case_binding(binding);
}
self.visit_expr(&mut case.value);
self.context_vars = vars;
}
fn visit_match(&mut self, matcher: &mut kind_tree::concrete::expr::Match) {
self.visit_expr(&mut matcher.scrutinizer);
for case in &mut matcher.cases {
self.visit_case(case);
}
match &mut matcher.motive {
Some(x) => self.visit_expr(x),
None => (),
}
}
fn visit_binding(&mut self, binding: &mut Binding) {
match binding {
Binding::Positional(e) => self.visit_expr(e),
Binding::Named(_, _, e) => self.visit_expr(e),
}
}
fn visit_expr(&mut self, expr: &mut Expr) {
match &mut expr.data {
ExprKind::Var { name } => self.visit_ident(name),
ExprKind::Constr { name, args } => {
self.visit_qualified_ident(name);
visit_vec!(args.iter_mut(), arg => self.visit_binding(arg));
}
ExprKind::All {
param: None,
typ,
body,
..
} => {
self.visit_expr(typ);
self.visit_expr(body);
}
ExprKind::All {
param: Some(ident),
typ,
body,
..
} => {
self.visit_expr(typ);
self.context_vars.push((ident.range, ident.to_string()));
self.visit_expr(body);
self.context_vars.pop();
}
ExprKind::Lambda {
param, typ, body, ..
} => {
match typ {
Some(x) => self.visit_expr(x),
None => (),
}
self.context_vars.push((param.range, param.to_string()));
self.visit_expr(body);
self.context_vars.pop();
}
ExprKind::App { fun, args } => {
self.visit_expr(fun);
visit_vec!(args.iter_mut(), arg => self.visit_expr(&mut arg.data));
}
ExprKind::Ann { val, typ } => {
self.visit_expr(val);
self.visit_expr(typ);
}
ExprKind::Lit { lit } => self.visit_literal(lit),
ExprKind::Binary { op: _, fst, snd } => {
self.visit_expr(fst);
self.visit_expr(snd);
}
ExprKind::Let { name, val, next } => {
self.visit_expr(val);
let vars = self.context_vars.clone();
self.visit_destruct(name);
self.visit_expr(next);
self.context_vars = vars;
}
ExprKind::Sigma {
param: None,
fst,
snd,
} => {
self.visit_qualified_ident(&mut QualifiedIdent::new_static(
"Sigma", None, expr.range,
));
self.visit_expr(fst);
self.visit_expr(snd);
}
ExprKind::Sigma {
param: Some(ident),
fst,
snd,
} => {
self.visit_qualified_ident(&mut QualifiedIdent::new_static(
"Sigma", None, expr.range,
));
self.visit_expr(fst);
self.context_vars.push((ident.range, ident.to_string()));
self.visit_expr(snd);
self.context_vars.pop();
}
ExprKind::Match(matcher) => {
self.visit_qualified_ident(&mut matcher.typ.add_segment("match"));
self.visit_match(matcher)
}
ExprKind::Subst(subst) => {
self.visit_ident(&mut subst.name);
if let Some(pos) = self
.context_vars
.iter()
.position(|x| x.1 == subst.name.to_string())
{
subst.indx = pos;
}
self.visit_expr(&mut subst.expr)
}
ExprKind::Hole => {}
ExprKind::Do { typ, sttm } => {
self.visit_qualified_ident(&mut typ.add_segment("pure").to_generated());
self.visit_qualified_ident(&mut typ.add_segment("bind").to_generated());
self.visit_sttm(sttm)
}
ExprKind::If { cond, then_, else_ } => {
self.visit_qualified_ident(&mut QualifiedIdent::new_sugared(
"Bool", "if", expr.range,
));
self.visit_expr(cond);
self.visit_expr(then_);
self.visit_expr(else_);
}
ExprKind::Pair { fst, snd } => {
self.visit_qualified_ident(&mut QualifiedIdent::new_sugared(
"Pair", "new", expr.range,
));
self.visit_expr(fst);
self.visit_expr(snd);
}
ExprKind::List { args } => {
self.visit_qualified_ident(&mut QualifiedIdent::new_sugared(
"List", "nil", expr.range,
));
self.visit_qualified_ident(&mut QualifiedIdent::new_sugared(
"List", "cons", expr.range,
));
visit_vec!(args.iter_mut(), arg => self.visit_expr(arg));
}
}
}
}
pub fn substitute_in_expr(expr: &mut Expr, names: &FxHashMap<String, String>) {
let mut session = Subst {
context_vars: Default::default(),
names,
};
session.visit_expr(expr)
}

View File

@ -0,0 +1,24 @@
[package]
name = "kind-driver"
version = "0.1.0"
edition = "2021"
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
[dependencies]
kind-parser = { path = "../kind-parser" }
kind-tree = { path = "../kind-tree" }
kind-span = { path = "../kind-span" }
kind-report = { path = "../kind-report" }
kind-checker = { path = "../kind-checker" }
kind-pass = { path = "../kind-pass" }
kind-target-hvm = { path = "../kind-target-hvm" }
kind-target-kdl = { path = "../kind-target-kdl" }
hvm = { git = "https://github.com/Kindelia/HVM.git" }
anyhow = "1.0.66"
strsim = "0.10.0"
fxhash = "0.2.1"
dashmap = "5.4.0"

View File

@ -0,0 +1,115 @@
//! Errors created by the driver. All of them
//! are related with paths and unbounded variables.
use std::path::PathBuf;
use kind_report::data::{Color, Diagnostic, DiagnosticFrame, Marker, Severity, Subtitle, Word};
use kind_tree::symbol::{Ident, QualifiedIdent};
/// Describes all of the possible errors inside each
/// of the passes inside this crate.
pub(crate) enum DriverError {
CannotFindFile(String),
UnboundVariable(Vec<Ident>, Vec<String>),
MultiplePaths(QualifiedIdent, Vec<PathBuf>),
DefinedMultipleTimes(QualifiedIdent, QualifiedIdent),
ThereIsntAMain,
}
impl Diagnostic for DriverError {
fn get_syntax_ctx(&self) -> Option<kind_span::SyntaxCtxIndex> {
match self {
DriverError::CannotFindFile(_) => None,
DriverError::ThereIsntAMain => None,
DriverError::UnboundVariable(v, _) => Some(v[0].range.ctx),
DriverError::MultiplePaths(id, _) => Some(id.range.ctx),
DriverError::DefinedMultipleTimes(fst, _) => Some(fst.range.ctx),
}
}
fn to_diagnostic_frame(&self) -> DiagnosticFrame {
match self {
DriverError::UnboundVariable(idents, suggestions) => DiagnosticFrame {
code: 100,
severity: Severity::Error,
title: format!("Cannot find the definition '{}'.", idents[0].to_str()),
subtitles: vec![],
hints: vec![if !suggestions.is_empty() {
format!(
"Maybe you're looking for {}",
suggestions.iter().map(|x| format!("'{}'", x)).collect::<Vec<String>>().join(", ")
)
} else {
"Take a look at the rules for name searching at https://github.com/Kindelia/Kind2/blob/master/guide/naming.md".to_string()
}],
positions: idents
.iter()
.map(|ident| Marker {
position: ident.range,
color: Color::Fst,
text: "Here!".to_string(),
no_code: false,
main: true,
})
.collect(),
},
DriverError::MultiplePaths(ident, paths) => DiagnosticFrame {
code: 101,
severity: Severity::Error,
title: "Ambiguous definition location for the same name".to_string(),
subtitles: paths
.iter()
.map(|path| Subtitle::Phrase(Color::Fst, vec![Word::White(path.display().to_string())]))
.collect(),
hints: vec!["Take a look at the rules for name searching at https://github.com/Kindelia/Kind2/blob/master/guide/naming.md".to_string()],
positions: vec![Marker {
position: ident.range,
color: Color::Fst,
text: "Here!".to_string(),
no_code: false,
main: true,
}],
},
DriverError::DefinedMultipleTimes(fst, snd) => DiagnosticFrame {
code: 102,
severity: Severity::Error,
title: "Defined multiple times for the same name".to_string(),
subtitles: vec![],
hints: vec!["Rename one of the definitions or remove and look at how names work in Kind at https://github.com/Kindelia/Kind2/blob/master/guide/naming.md".to_string()],
positions: vec![
Marker {
position: fst.range,
color: Color::Fst,
text: "The first ocorrence".to_string(),
no_code: false,
main: true,
},
Marker {
position: snd.range,
color: Color::Snd,
text: "Second occorrence here!".to_string(),
no_code: false,
main: false,
},
],
},
DriverError::CannotFindFile(file) => DiagnosticFrame {
code: 103,
severity: Severity::Error,
title: format!("Cannot find file '{}'", file),
subtitles: vec![],
hints: vec![],
positions: vec![],
},
DriverError::ThereIsntAMain => DiagnosticFrame {
code: 103,
severity: Severity::Error,
title: "Cannot find 'Main' function to run the file.".to_string(),
subtitles: vec![],
hints: vec![],
positions: vec![],
},
}
}
}

View File

@ -0,0 +1,156 @@
use checker::eval;
use errors::DriverError;
use kind_pass::{desugar, erasure, inline::inline_book};
use kind_report::report::FileCache;
use kind_span::SyntaxCtxIndex;
use kind_tree::{backend, concrete, desugared, untyped};
use resolution::ResolutionError;
use session::Session;
use std::path::PathBuf;
use kind_checker as checker;
pub mod errors;
pub mod resolution;
pub mod session;
impl FileCache for Session {
fn fetch(&self, ctx: SyntaxCtxIndex) -> Option<(PathBuf, &String)> {
let path = self.loaded_paths[ctx.0].as_ref().to_owned();
Some((path, &self.loaded_sources[ctx.0]))
}
}
pub fn type_check_book(
session: &mut Session,
path: &PathBuf,
entrypoints: Vec<String>,
tids: Option<usize>,
) -> anyhow::Result<untyped::Book> {
let concrete_book = to_book(session, path)?;
let desugared_book = desugar::desugar_book(session.diagnostic_sender.clone(), &concrete_book)?;
let all = desugared_book.entrs.iter().map(|x| x.0).cloned().collect();
let succeeded = checker::type_check(
&desugared_book,
session.diagnostic_sender.clone(),
all,
tids,
);
if !succeeded {
return Err(ResolutionError.into());
}
let mut book = erasure::erase_book(
&desugared_book,
session.diagnostic_sender.clone(),
entrypoints,
)?;
inline_book(&mut book);
Ok(book)
}
pub fn to_book(session: &mut Session, path: &PathBuf) -> anyhow::Result<concrete::Book> {
let mut concrete_book = resolution::parse_and_store_book(session, path)?;
resolution::check_unbound_top_level(session, &mut concrete_book)?;
Ok(concrete_book)
}
pub fn erase_book(
session: &mut Session,
path: &PathBuf,
entrypoints: Vec<String>,
) -> anyhow::Result<untyped::Book> {
let concrete_book = to_book(session, path)?;
let desugared_book = desugar::desugar_book(session.diagnostic_sender.clone(), &concrete_book)?;
let mut book = erasure::erase_book(
&desugared_book,
session.diagnostic_sender.clone(),
entrypoints,
)?;
inline_book(&mut book);
Ok(book)
}
pub fn desugar_book(session: &mut Session, path: &PathBuf) -> anyhow::Result<desugared::Book> {
let concrete_book = to_book(session, path)?;
desugar::desugar_book(session.diagnostic_sender.clone(), &concrete_book)
}
pub fn check_erasure_book(
session: &mut Session,
path: &PathBuf,
) -> anyhow::Result<desugared::Book> {
let concrete_book = to_book(session, path)?;
desugar::desugar_book(session.diagnostic_sender.clone(), &concrete_book)
}
pub fn compile_book_to_hvm(book: untyped::Book, trace: bool) -> backend::File {
kind_target_hvm::compile_book(book, trace)
}
pub fn compile_book_to_kdl(
path: &PathBuf,
session: &mut Session,
namespace: &str,
entrypoints: Vec<String>,
) -> anyhow::Result<kind_target_kdl::File> {
let concrete_book = to_book(session, path)?;
let desugared_book = desugar::desugar_book(session.diagnostic_sender.clone(), &concrete_book)?;
let mut book = erasure::erase_book(
&desugared_book,
session.diagnostic_sender.clone(),
entrypoints,
)?;
inline_book(&mut book);
let res = kind_target_kdl::compile_book(book, session.diagnostic_sender.clone(), namespace)?;
Ok(res)
}
pub fn check_main_entry(session: &mut Session, book: &untyped::Book) -> anyhow::Result<()> {
if !book.entrs.contains_key("Main") {
let err = Box::new(DriverError::ThereIsntAMain);
session.diagnostic_sender.send(err).unwrap();
Err(ResolutionError.into())
} else {
Ok(())
}
}
pub fn check_main_desugared_entry(
session: &mut Session,
book: &desugared::Book,
) -> anyhow::Result<()> {
if !book.entrs.contains_key("Main") {
let err = Box::new(DriverError::ThereIsntAMain);
session.diagnostic_sender.send(err).unwrap();
Err(ResolutionError.into())
} else {
Ok(())
}
}
pub fn execute_file(file: &str, tids: Option<usize>) -> Result<String, String> {
let res = eval(file, "Main", false, tids)?;
Ok(res.to_string())
}
pub fn eval_in_checker(book: &desugared::Book) -> Box<backend::Term> {
checker::eval_api(book)
}
pub fn generate_checker(book: &desugared::Book) -> String {
checker::gen_checker(book, book.entrs.keys().cloned().collect())
}

View File

@ -0,0 +1,302 @@
//! Transforms a single book into a book by
//! reading it and it's dependencies. In the end
//! it returns a desugared book of all of the
//! depedencies.
use core::fmt;
use fxhash::FxHashSet;
use kind_pass::expand::expand_module;
use kind_pass::expand::uses::expand_uses;
use std::error::Error;
use std::fs;
use std::path::{Path, PathBuf};
use std::rc::Rc;
use strsim::jaro;
use kind_pass::unbound::{self, UnboundCollector};
use kind_report::data::Diagnostic;
use kind_tree::concrete::visitor::Visitor;
use kind_tree::concrete::{Book, Module, TopLevel};
use kind_tree::symbol::{Ident, QualifiedIdent};
use crate::{errors::DriverError, session::Session};
#[derive(Debug)]
pub struct ResolutionError;
impl fmt::Display for ResolutionError {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "resolution error")
}
}
impl Error for ResolutionError {}
/// The extension of kind2 files.
const EXT: &str = "kind2";
/// Tries to accumulate on a buffer all of the
/// paths that exists (so we can just throw an
/// error about ambiguous resolution to the user)
fn accumulate_neighbour_paths(
ident: &QualifiedIdent,
raw_path: &Path,
) -> Result<Option<PathBuf>, Box<dyn Diagnostic>> {
let mut canon_path = raw_path.to_path_buf();
let mut dir_file_path = canon_path.clone();
let dir_path = canon_path.clone();
canon_path.set_extension(EXT);
dir_file_path.push("_");
dir_file_path.set_extension(EXT);
if canon_path.exists() && dir_path.exists() && canon_path.is_file() && dir_path.is_dir() {
Err(Box::new(DriverError::MultiplePaths(
ident.clone(),
vec![canon_path, dir_path],
)))
} else if canon_path.is_file() {
Ok(Some(canon_path))
} else if dir_file_path.is_file() {
Ok(Some(dir_file_path))
} else {
Ok(None)
}
}
/// Gets an identifier and tries to get all of the
/// paths that it can refer into a single path. If
/// multiple paths are found then we just throw an
/// error about ambiguous paths.
fn ident_to_path(
root: &Path,
ident: &QualifiedIdent,
search_on_parent: bool,
) -> Result<Option<PathBuf>, Box<dyn Diagnostic>> {
let name = ident.to_string();
let segments = name.as_str().split('.').collect::<Vec<&str>>();
let mut raw_path = root.to_path_buf();
raw_path.push(PathBuf::from(segments.join("/")));
match accumulate_neighbour_paths(ident, &raw_path) {
Ok(None) if search_on_parent => {
raw_path.pop();
accumulate_neighbour_paths(ident, &raw_path)
}
rest => rest,
}
}
fn try_to_insert_new_name<'a>(
failed: &mut bool,
session: &'a Session,
ident: QualifiedIdent,
book: &'a mut Book,
) -> bool {
if let Some(first_occorence) = book.names.get(ident.to_string().as_str()) {
let err = Box::new(DriverError::DefinedMultipleTimes(
first_occorence.clone(),
ident,
));
session.diagnostic_sender.send(err).unwrap();
*failed = true;
false
} else {
book.names.insert(ident.to_string(), ident);
true
}
}
fn module_to_book<'a>(
failed: &mut bool,
session: &'a Session,
module: Module,
book: &'a mut Book,
) -> FxHashSet<String> {
let mut public_names = FxHashSet::default();
for entry in module.entries {
match entry {
TopLevel::SumType(sum) => {
let name = sum.name.to_string();
public_names.insert(name.clone());
for cons in &sum.constructors {
let mut cons_ident = sum.name.add_segment(cons.name.to_str());
cons_ident.range = cons.name.range;
if try_to_insert_new_name(failed, session, cons_ident.clone(), book) {
let cons_name = cons_ident.to_string();
public_names.insert(cons_name.clone());
book.count.insert(cons_name, cons.extract_book_info(&sum));
}
}
if try_to_insert_new_name(failed, session, sum.name.clone(), book) {
book.count.insert(name.clone(), sum.extract_book_info());
book.entries.insert(name, TopLevel::SumType(sum));
}
}
TopLevel::RecordType(rec) => {
let name = rec.name.to_string();
public_names.insert(name.clone());
book.count.insert(name.clone(), rec.extract_book_info());
try_to_insert_new_name(failed, session, rec.name.clone(), book);
let cons_ident = rec.name.add_segment(rec.constructor.to_str());
public_names.insert(cons_ident.to_string());
book.count.insert(
cons_ident.to_string(),
rec.extract_book_info_of_constructor(),
);
try_to_insert_new_name(failed, session, cons_ident, book);
book.entries.insert(name.clone(), TopLevel::RecordType(rec));
}
TopLevel::Entry(entr) => {
let name = entr.name.to_string();
try_to_insert_new_name(failed, session, entr.name.clone(), book);
public_names.insert(name.clone());
book.count.insert(name.clone(), entr.extract_book_info());
book.entries.insert(name, TopLevel::Entry(entr));
}
}
}
public_names
}
fn parse_and_store_book_by_identifier(
session: &mut Session,
ident: &QualifiedIdent,
book: &mut Book,
) -> bool {
if book.entries.contains_key(ident.to_string().as_str()) {
return false;
}
match ident_to_path(&session.root, ident, true) {
Ok(Some(path)) => parse_and_store_book_by_path(session, &path, book),
Ok(None) => false,
Err(err) => {
session.diagnostic_sender.send(err).unwrap();
true
}
}
}
fn parse_and_store_book_by_path(session: &mut Session, path: &PathBuf, book: &mut Book) -> bool {
if !path.exists() {
let err = Box::new(DriverError::CannotFindFile(
path.to_str().unwrap().to_string(),
));
session.diagnostic_sender.send(err).unwrap();
return true;
}
let canon_path = &fs::canonicalize(path).unwrap();
if session.loaded_paths_map.contains_key(canon_path) {
return false;
}
let input = match fs::read_to_string(path) {
Ok(res) => res,
Err(_) => {
session
.diagnostic_sender
.send(Box::new(DriverError::CannotFindFile(
path.to_str().unwrap().to_string(),
)))
.unwrap();
return true;
}
};
let ctx_id = session.book_counter;
session.add_path(Rc::new(fs::canonicalize(path).unwrap()), input.clone());
let tx = session.diagnostic_sender.clone();
let (mut module, mut failed) = kind_parser::parse_book(tx.clone(), ctx_id, &input);
expand_uses(&mut module, tx.clone());
expand_module(tx.clone(), &mut module);
let mut state = UnboundCollector::new(tx.clone(), false);
state.visit_module(&mut module);
for idents in state.unbound.values() {
unbound_variable(session, book, idents);
failed = true;
}
module_to_book(&mut failed, session, module, book);
for idents in state.unbound_top_level.values() {
let fst = idents.iter().next().unwrap();
if !book.names.contains_key(&fst.to_string()) {
failed |= parse_and_store_book_by_identifier(session, fst, book);
}
}
failed
}
fn unbound_variable(session: &mut Session, book: &Book, idents: &[Ident]) {
let mut similar_names = book
.names
.keys()
.map(|x| (jaro(x, idents[0].to_str()).abs(), x))
.filter(|x| x.0 > 0.8)
.collect::<Vec<_>>();
similar_names.sort_by(|x, y| x.0.total_cmp(&y.0));
let err = Box::new(DriverError::UnboundVariable(
idents.to_vec(),
similar_names.iter().take(5).map(|x| x.1.clone()).collect(),
));
session.diagnostic_sender.send(err).unwrap();
}
pub fn parse_and_store_book(session: &mut Session, path: &PathBuf) -> anyhow::Result<Book> {
let mut book = Book::default();
if parse_and_store_book_by_path(session, path, &mut book) {
Err(ResolutionError.into())
} else {
Ok(book)
}
}
pub fn check_unbound_top_level(session: &mut Session, book: &mut Book) -> anyhow::Result<()> {
let mut failed = false;
let (_, unbound_tops) =
unbound::get_book_unbound(session.diagnostic_sender.clone(), book, true);
for unbound in unbound_tops.values() {
let res: Vec<Ident> = unbound
.iter()
.filter(|x| !x.generated)
.map(|x| x.to_ident())
.collect();
if !res.is_empty() {
unbound_variable(session, book, &res);
failed = true;
}
}
if failed {
Err(ResolutionError.into())
} else {
Ok(())
}
}

View File

@ -0,0 +1,50 @@
//! Describes a compilation session. It's not the finished
//! model because I want to change it to a query based compiler
//! later.
use std::path::PathBuf;
use std::rc::Rc;
use std::sync::mpsc::Sender;
use fxhash::{FxHashMap, FxHashSet};
use kind_report::data::Diagnostic;
#[derive(Debug, Clone)]
pub struct Session {
pub loaded_paths: Vec<Rc<PathBuf>>,
pub loaded_sources: Vec<String>,
pub loaded_paths_map: FxHashMap<PathBuf, usize>,
/// It will be useful in the future
/// to make the public and private decls
pub public_names: FxHashSet<String>,
pub diagnostic_sender: Sender<Box<dyn Diagnostic>>,
pub root: PathBuf,
pub book_counter: usize,
}
impl Session {
pub fn new(root: PathBuf, sender: Sender<Box<dyn Diagnostic>>) -> Session {
Session {
loaded_paths: Vec::new(),
loaded_sources: Vec::new(),
loaded_paths_map: FxHashMap::default(),
public_names: FxHashSet::default(),
root,
book_counter: 0,
diagnostic_sender: sender,
}
}
pub fn add_path(&mut self, path: Rc<PathBuf>, code: String) -> usize {
let id = self.book_counter;
self.book_counter += 1;
self.loaded_paths_map
.insert((*path).clone(), self.book_counter);
self.loaded_paths.push(path);
self.loaded_sources.push(code);
id
}
}

View File

@ -0,0 +1,13 @@
[package]
name = "kind-parser"
version = "0.1.0"
edition = "2021"
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
[dependencies]
kind-span = { path = "../kind-span" }
kind-tree = { path = "../kind-tree" }
kind-report = { path = "../kind-report" }
fxhash = "0.2.1"

View File

@ -0,0 +1,329 @@
//! All of the sintatic erros both from the
//! lexer and the parser.
use kind_report::data::{Color, Diagnostic, DiagnosticFrame, Marker, Severity};
use kind_span::{Range, SyntaxCtxIndex};
use crate::lexer::tokens::Token;
#[derive(Debug, Clone)]
pub enum EncodeSequence {
Hexa,
Decimal,
Octal,
Binary,
Unicode,
}
#[derive(Debug, Clone)]
pub enum SyntaxDiagnostic {
UnfinishedString(Range),
UnfinishedChar(Range),
UnfinishedComment(Range),
InvalidEscapeSequence(EncodeSequence, Range),
InvalidNumberRepresentation(EncodeSequence, Range),
UnexpectedChar(char, Range),
UnexpectedToken(Token, Range, Vec<Token>),
LowerCasedDefinition(String, Range),
NotAClauseOfDef(Range, Range),
Unclosed(Range),
IgnoreRestShouldBeOnTheEnd(Range),
UnusedDocString(Range),
CannotUseUse(Range),
ImportsCannotHaveAlias(Range),
InvalidNumberType(String, Range),
}
fn encode_name(encode: EncodeSequence) -> &'static str {
match encode {
EncodeSequence::Hexa => "hexadecimal",
EncodeSequence::Decimal => "decimal",
EncodeSequence::Octal => "octal",
EncodeSequence::Binary => "binary",
EncodeSequence::Unicode => "unicode",
}
}
impl Diagnostic for SyntaxDiagnostic {
fn get_syntax_ctx(&self) -> Option<SyntaxCtxIndex> {
match self {
SyntaxDiagnostic::UnfinishedString(range) => Some(range.ctx),
SyntaxDiagnostic::UnfinishedChar(range) => Some(range.ctx),
SyntaxDiagnostic::UnfinishedComment(range) => Some(range.ctx),
SyntaxDiagnostic::InvalidEscapeSequence(_, range) => Some(range.ctx),
SyntaxDiagnostic::InvalidNumberRepresentation(_, range) => Some(range.ctx),
SyntaxDiagnostic::UnexpectedChar(_, range) => Some(range.ctx),
SyntaxDiagnostic::UnexpectedToken(_, range, _) => Some(range.ctx),
SyntaxDiagnostic::LowerCasedDefinition(_, range) => Some(range.ctx),
SyntaxDiagnostic::NotAClauseOfDef(range, _) => Some(range.ctx),
SyntaxDiagnostic::Unclosed(range) => Some(range.ctx),
SyntaxDiagnostic::IgnoreRestShouldBeOnTheEnd(range) => Some(range.ctx),
SyntaxDiagnostic::UnusedDocString(range) => Some(range.ctx),
SyntaxDiagnostic::CannotUseUse(range) => Some(range.ctx),
SyntaxDiagnostic::ImportsCannotHaveAlias(range) => Some(range.ctx),
SyntaxDiagnostic::InvalidNumberType(_, range) => Some(range.ctx),
}
}
fn to_diagnostic_frame(&self) -> DiagnosticFrame {
match self {
SyntaxDiagnostic::UnfinishedString(range) => DiagnosticFrame {
code: 1,
severity: Severity::Error,
title: "Unfinished String".to_string(),
subtitles: vec![],
hints: vec!["You need to close the string with another quote, take a look at the beggining".to_string()],
positions: vec![Marker {
position: *range,
color: Color::Fst,
text: "The string starts in this position!".to_string(),
no_code: false,
main: true,
}],
},
SyntaxDiagnostic::IgnoreRestShouldBeOnTheEnd(range) => DiagnosticFrame {
code: 2,
severity: Severity::Error,
title: "Invalid position of the '..' operator".to_string(),
subtitles: vec![],
hints: vec!["Put it on the end of the clause or remove it.".to_string()],
positions: vec![Marker {
position: *range,
color: Color::Fst,
text: "It should not be in the middle of this!".to_string(),
no_code: false,
main: true,
}],
},
SyntaxDiagnostic::UnusedDocString(range) => DiagnosticFrame {
code: 3,
severity: Severity::Warning,
title: "This entire documentation comment is in a invalid position".to_string(),
subtitles: vec![],
hints: vec!["Take a look at the rules for doc comments at https://github.com/Kindelia/Kind2/blob/master/guide/doc_strings.md".to_string()],
positions: vec![Marker {
position: *range,
color: Color::For,
text: "Remove the entire comment or transform it in a simple comment with '//'".to_string(),
no_code: false,
main: true,
}],
},
SyntaxDiagnostic::UnfinishedChar(range) => DiagnosticFrame {
code: 4,
severity: Severity::Error,
title: "Unfinished Char".to_string(),
subtitles: vec![],
hints: vec!["You need to close the character with another quote, take a look at the beginning".to_string()],
positions: vec![Marker {
position: *range,
color: Color::Fst,
text: "The char starts in this position!".to_string(),
no_code: false,
main: true,
}],
},
SyntaxDiagnostic::LowerCasedDefinition(name, range) => DiagnosticFrame {
code: 5,
severity: Severity::Error,
title: "The definition name must be capitalized.".to_string(),
subtitles: vec![],
hints: vec![{
let mut c = name.chars();
let fst = c.next().unwrap().to_uppercase();
format!("Change it to '{}{}'", fst, c.as_str())
}],
positions: vec![Marker {
position: *range,
color: Color::Fst,
text: "Wrong case for this name".to_string(),
no_code: false,
main: true,
}],
},
SyntaxDiagnostic::NotAClauseOfDef(fst, snd) => DiagnosticFrame {
code: 6,
severity: Severity::Error,
title: "Unexpected capitalized name that does not refer to the definition".to_string(),
subtitles: vec![],
hints: vec!["If you indend to make another clause, just replace the name in red.".to_string()],
positions: vec![
Marker {
position: *snd,
color: Color::Fst,
text: "This is the unexpected token".to_string(),
no_code: false,
main: true,
},
Marker {
position: *fst,
color: Color::Snd,
text: "This is the definition. All clauses should use the same name.".to_string(),
no_code: false,
main: false,
},
],
},
SyntaxDiagnostic::UnfinishedComment(range) => DiagnosticFrame {
code: 7,
severity: Severity::Error,
title: "Unfinished Comment".to_string(),
subtitles: vec![],
hints: vec!["You need to close the string with '*/', take a look at the beggining".to_string()],
positions: vec![Marker {
position: *range,
color: Color::Fst,
text: "The comment starts in this position!".to_string(),
no_code: false,
main: true,
}],
},
SyntaxDiagnostic::InvalidEscapeSequence(kind, range) => DiagnosticFrame {
code: 8,
severity: Severity::Error,
title: format!("The {} character sequence is invalid!", encode_name(kind.clone())),
subtitles: vec![],
hints: vec![],
positions: vec![Marker {
position: *range,
color: Color::Fst,
text: "Here!".to_string(),
no_code: false,
main: true,
}],
},
SyntaxDiagnostic::InvalidNumberRepresentation(repr, range) => DiagnosticFrame {
code: 9,
severity: Severity::Error,
title: format!("The {} number sequence is invalid!", encode_name(repr.clone())),
subtitles: vec![],
hints: vec![],
positions: vec![Marker {
position: *range,
color: Color::Fst,
text: "Here!".to_string(),
no_code: false,
main: true,
}],
},
SyntaxDiagnostic::UnexpectedChar(chr, range) => DiagnosticFrame {
code: 10,
severity: Severity::Error,
title: format!("The char '{}' is invalid", chr),
subtitles: vec![],
hints: vec!["Try to remove it!".to_string()],
positions: vec![Marker {
position: *range,
color: Color::Fst,
text: "Here!".to_string(),
no_code: false,
main: true,
}],
},
SyntaxDiagnostic::UnexpectedToken(Token::Eof, range, _expect) => DiagnosticFrame {
code: 11,
severity: Severity::Error,
title: "Unexpected end of file.".to_string(),
subtitles: vec![],
hints: vec![],
positions: vec![Marker {
position: *range,
color: Color::Fst,
text: "Here!".to_string(),
no_code: true,
main: true,
}],
},
SyntaxDiagnostic::UnexpectedToken(Token::Comment(_, _), range, _expect) => DiagnosticFrame {
code: 12,
severity: Severity::Error,
title: "Unexpected documentation comment.".to_string(),
subtitles: vec![],
hints: vec!["Remove this documentation comment or place it in a correct place.".to_string()],
positions: vec![Marker {
position: *range,
color: Color::Fst,
text: "Here!".to_string(),
no_code: false,
main: true,
}],
},
SyntaxDiagnostic::UnexpectedToken(token, range, _expect) => DiagnosticFrame {
code: 13,
severity: Severity::Error,
title: format!("Unexpected token '{}'.", token),
subtitles: vec![],
hints: vec![],
positions: vec![Marker {
position: *range,
color: Color::Fst,
text: "Here!".to_string(),
no_code: false,
main: true,
}],
},
SyntaxDiagnostic::Unclosed(range) => DiagnosticFrame {
code: 14,
severity: Severity::Error,
title: "Unclosed parenthesis.".to_string(),
subtitles: vec![],
hints: vec![],
positions: vec![Marker {
position: *range,
color: Color::Fst,
text: "Starts here! try to add another one".to_string(),
no_code: false,
main: true,
}],
},
SyntaxDiagnostic::CannotUseUse(range) => DiagnosticFrame {
code: 15,
severity: Severity::Error,
title: "Can only use the 'use' statement in the beggining of the file".to_string(),
subtitles: vec![],
hints: vec![],
positions: vec![Marker {
position: *range,
color: Color::Fst,
text: "Move it to the beggining".to_string(),
no_code: false,
main: true,
}],
},
SyntaxDiagnostic::ImportsCannotHaveAlias(range) => DiagnosticFrame {
code: 16,
severity: Severity::Error,
title: "The upper cased name cannot have an alias".to_string(),
subtitles: vec![],
hints: vec![],
positions: vec![Marker {
position: *range,
color: Color::Fst,
text: "Use the entire name here!".to_string(),
no_code: false,
main: true,
}],
},
SyntaxDiagnostic::InvalidNumberType(type_, range) => DiagnosticFrame {
code: 17,
severity: Severity::Error,
title: format!("The {} number type is invalid", type_),
subtitles: vec![],
hints: vec![],
positions: vec![Marker {
position: *range,
color: Color::Fst,
text: "Here!".to_string(),
no_code: false,
main: true,
}],
},
}
}
}
impl From<Box<SyntaxDiagnostic>> for DiagnosticFrame {
fn from(err: Box<SyntaxDiagnostic>) -> Self {
(err).into()
}
}

View File

@ -0,0 +1,835 @@
use kind_span::{Locatable, Range};
use kind_tree::concrete::expr::*;
use kind_tree::symbol::{Ident, QualifiedIdent};
use kind_tree::Operator;
use crate::errors::SyntaxDiagnostic;
use crate::lexer::tokens::Token;
use crate::macros::eat_single;
use crate::state::Parser;
impl<'a> Parser<'a> {
// We always look through the parenthesis in the
// matching with is_operator
fn is_operator(&self) -> bool {
matches!(
self.peek(1),
Token::Plus
| Token::Minus
| Token::Star
| Token::Slash
| Token::Percent
| Token::Ampersand
| Token::Bar
| Token::Hat
| Token::GreaterGreater
| Token::LessLess
| Token::Less
| Token::LessEq
| Token::EqEq
| Token::GreaterEq
| Token::Greater
| Token::BangEq
)
}
fn eat_operator(&mut self) -> Result<Operator, SyntaxDiagnostic> {
self.eat(|token| match token {
Token::Plus => Some(Operator::Add),
Token::Minus => Some(Operator::Sub),
Token::Star => Some(Operator::Mul),
Token::Slash => Some(Operator::Div),
Token::Percent => Some(Operator::Mod),
Token::Ampersand => Some(Operator::Add),
Token::Bar => Some(Operator::Or),
Token::Hat => Some(Operator::Xor),
Token::GreaterGreater => Some(Operator::Shr),
Token::LessLess => Some(Operator::Shl),
Token::Less => Some(Operator::Ltn),
Token::LessEq => Some(Operator::Lte),
Token::EqEq => Some(Operator::Eql),
Token::GreaterEq => Some(Operator::Gte),
Token::Greater => Some(Operator::Gtn),
Token::BangEq => Some(Operator::Neq),
_ => None,
})
}
fn ignore_docs(&mut self) {
let start = self.range();
let mut last = self.range();
let mut unused = false;
while let Token::Comment(_, _) = &self.get() {
last = self.range();
self.advance();
unused = true;
}
if unused {
self.send_dignostic(SyntaxDiagnostic::UnusedDocString(start.mix(last)))
}
}
fn is_pi_type(&self) -> bool {
self.get().same_variant(&Token::LPar)
&& self.peek(1).is_lower_id()
&& self.peek(2).same_variant(&Token::Colon)
}
fn is_named_parameter(&self) -> bool {
self.get().same_variant(&Token::LPar)
&& self.peek(1).is_lower_id()
&& self.peek(2).same_variant(&Token::Eq)
}
fn is_lambda(&self) -> bool {
self.get().is_lower_id() && self.peek(1).same_variant(&Token::FatArrow)
}
fn is_sigma_type(&self) -> bool {
self.get().same_variant(&Token::LBracket)
&& self.peek(1).is_lower_id()
&& self.peek(2).same_variant(&Token::Colon)
}
fn is_substitution(&self) -> bool {
self.get().same_variant(&Token::HashHash)
}
fn parse_substitution(&mut self) -> Result<Box<Expr>, SyntaxDiagnostic> {
let start = self.range();
self.advance(); // '##'
let name = self.parse_id()?;
self.eat_variant(Token::Slash)?;
let redx = self.parse_num_lit()?;
let expr = self.parse_expr(false)?;
let range = start.mix(expr.range);
Ok(Box::new(Expr {
data: ExprKind::Subst(Substitution {
name,
redx,
indx: 0,
expr,
}),
range,
}))
}
pub fn parse_id(&mut self) -> Result<Ident, SyntaxDiagnostic> {
let range = self.range();
let id = eat_single!(self, Token::LowerId(x) => x.clone())?;
let ident = Ident::new_static(&id, range);
Ok(ident)
}
pub fn parse_any_id(&mut self) -> Result<Ident, SyntaxDiagnostic> {
let range = self.range();
let id = eat_single!(self, Token::LowerId(x) | Token::UpperId(x, None) => x.clone())?;
let ident = Ident::new_static(&id, range);
Ok(ident)
}
pub fn parse_upper_id(&mut self) -> Result<QualifiedIdent, SyntaxDiagnostic> {
let range = self.range();
let (start, end) =
eat_single!(self, Token::UpperId(start, end) => (start.clone(), end.clone()))?;
let ident = QualifiedIdent::new_static(start.as_str(), end, range);
Ok(ident)
}
fn parse_lambda(&mut self, erased: bool) -> Result<Box<Expr>, SyntaxDiagnostic> {
let name_span = self.range();
let param = self.parse_id()?;
self.advance(); // '=>'
let body = self.parse_expr(false)?;
let end_range = body.range;
Ok(Box::new(Expr {
data: ExprKind::Lambda {
param,
typ: None,
body,
erased,
},
range: name_span.mix(end_range),
}))
}
fn parse_pi_or_lambda(&mut self, erased: bool) -> Result<Box<Expr>, SyntaxDiagnostic> {
let range = self.range();
self.advance(); // '('
let param = self.parse_id()?;
self.advance(); // ':'
let typ = self.parse_expr(false)?;
let par_range = self.range();
self.eat_closing_keyword(Token::RPar, range)?;
if self.check_and_eat(Token::FatArrow) {
let body = self.parse_expr(false)?;
Ok(Box::new(Expr {
range: range.mix(body.range),
data: ExprKind::Lambda {
param,
typ: Some(typ),
body,
erased,
},
}))
} else if self.check_and_eat(Token::RightArrow) {
let body = self.parse_expr(false)?;
Ok(Box::new(Expr {
range: range.mix(body.range),
data: ExprKind::All {
param: Some(param),
typ,
body,
erased,
},
}))
} else {
Ok(Box::new(Expr {
range: range.mix(typ.range),
data: ExprKind::Ann {
val: Box::new(Expr {
range: range.mix(par_range),
data: ExprKind::Var { name: param },
}),
typ,
},
}))
}
}
fn parse_sigma_type(&mut self) -> Result<Box<Expr>, SyntaxDiagnostic> {
let range = self.range();
self.advance(); // '['
let ident = self.parse_id()?;
self.advance(); // ':'
let fst = self.parse_expr(false)?;
self.eat_closing_keyword(Token::RBracket, range)?;
self.eat_variant(Token::RightArrow)?;
let snd = self.parse_expr(false)?;
Ok(Box::new(Expr {
range: range.mix(snd.locate()),
data: ExprKind::Sigma {
param: Some(ident),
fst,
snd,
},
}))
}
fn parse_var(&mut self) -> Result<Box<Expr>, SyntaxDiagnostic> {
let name = self.parse_id()?;
Ok(Box::new(Expr {
range: name.range,
data: ExprKind::Var { name },
}))
}
fn parse_single_upper(&mut self) -> Result<Box<Expr>, SyntaxDiagnostic> {
let id = self.parse_upper_id()?;
let data = match id.to_string().as_str() {
"Type" => ExprKind::Lit { lit: Literal::Type },
"U60" => ExprKind::Lit {
lit: Literal::NumTypeU60,
},
"F60" => ExprKind::Lit {
lit: Literal::NumTypeF60,
},
_ => ExprKind::Constr {
name: id.clone(),
args: vec![],
},
};
Ok(Box::new(Expr {
range: id.range,
data,
}))
}
fn parse_data(&mut self, multiline: bool) -> Result<Box<Expr>, SyntaxDiagnostic> {
let id = self.parse_upper_id()?;
let mut range = id.range;
let data = match id.to_string().as_str() {
"Type" => ExprKind::Lit { lit: Literal::Type },
"U60" => ExprKind::Lit {
lit: Literal::NumTypeU60,
},
"F60" => ExprKind::Lit {
lit: Literal::NumTypeF60,
},
_ => {
let (range_end, spine) = self.parse_call_tail(id.range, multiline)?;
range = range.mix(range_end);
ExprKind::Constr {
name: id,
args: spine,
}
}
};
Ok(Box::new(Expr { range, data }))
}
fn parse_num60(&mut self, num: u64) -> Result<Box<Expr>, SyntaxDiagnostic> {
let range = self.range();
self.advance();
Ok(Box::new(Expr {
range,
data: ExprKind::Lit {
lit: Literal::NumU60(num),
},
}))
}
fn parse_num120(&mut self, num: u128) -> Result<Box<Expr>, SyntaxDiagnostic> {
let range = self.range();
self.advance();
Ok(Box::new(Expr {
range,
data: ExprKind::Lit {
lit: Literal::NumU120(num),
},
}))
}
fn parse_char(&mut self, chr: char) -> Result<Box<Expr>, SyntaxDiagnostic> {
let range = self.range();
self.advance();
Ok(Box::new(Expr {
range,
data: ExprKind::Lit {
lit: Literal::Char(chr),
},
}))
}
fn parse_binary_op(&mut self) -> Result<Box<Expr>, SyntaxDiagnostic> {
let range = self.range();
self.advance(); // '('
let op = self.eat_operator()?;
let fst = self.parse_atom()?;
let snd = self.parse_atom()?;
let end = self.range();
self.eat_closing_keyword(Token::RPar, range)?;
Ok(Box::new(Expr {
range: range.mix(end),
data: ExprKind::Binary { op, fst, snd },
}))
}
fn parse_list(&mut self) -> Result<Box<Expr>, SyntaxDiagnostic> {
let range = self.range();
self.advance(); // '['
let mut args = Vec::new();
if self.check_actual(Token::RBracket) {
let range = self.advance().1.mix(range);
return Ok(Box::new(Expr {
range,
data: ExprKind::List { args },
}));
}
args.push(*self.parse_atom()?);
let mut initialized = false;
let mut with_comma = false;
loop {
let ate_comma = self.check_and_eat(Token::Comma);
if !initialized {
initialized = true;
with_comma = ate_comma;
}
if with_comma {
self.check_and_eat(Token::Comma);
match self.try_single(&|x| x.parse_expr(false))? {
Some(res) => args.push(*res),
None => break,
}
} else {
// TODO: Error when someone tries to use a comma after not using it.
match self.try_single(&|x| x.parse_atom())? {
Some(res) => args.push(*res),
None => break,
}
}
}
let end = self.eat_variant(Token::RBracket)?.1;
let range = range.mix(end);
Ok(Box::new(Expr {
range,
data: ExprKind::List { args },
}))
}
fn parse_paren(&mut self) -> Result<Box<Expr>, SyntaxDiagnostic> {
if self.is_operator() {
self.parse_binary_op()
} else {
let range = self.range();
self.advance(); // '('
let mut expr = self.parse_expr(true)?;
let end = self.range();
self.eat_closing_keyword(Token::RPar, range)?;
expr.range = range.mix(end);
Ok(expr)
}
}
fn parse_help(&mut self, str: String) -> Result<Box<Expr>, SyntaxDiagnostic> {
let range = self.range();
self.advance();
Ok(Box::new(Expr {
range,
data: ExprKind::Lit {
lit: Literal::Help(Ident::new(str, range)),
},
}))
}
fn parse_str(&mut self, str: String) -> Result<Box<Expr>, SyntaxDiagnostic> {
let range = self.range();
self.advance();
Ok(Box::new(Expr {
range,
data: ExprKind::Lit {
lit: Literal::String(str),
},
}))
}
fn parse_num_lit(&mut self) -> Result<usize, SyntaxDiagnostic> {
self.ignore_docs();
match self.get().clone() {
Token::Num60(num) => {
self.advance();
Ok(num as usize)
}
_ => self.fail(vec![]),
}
}
fn parse_atom(&mut self) -> Result<Box<Expr>, SyntaxDiagnostic> {
self.ignore_docs();
match self.get().clone() {
Token::UpperId(_, _) => self.parse_single_upper(),
Token::LowerId(_) => self.parse_var(),
Token::Num60(num) => self.parse_num60(num),
Token::Num120(num) => self.parse_num120(num),
Token::Char(chr) => self.parse_char(chr),
Token::Str(str) => self.parse_str(str),
Token::Help(str) => self.parse_help(str),
Token::LBracket => self.parse_list(),
Token::LPar => self.parse_paren(),
Token::Hole => self.parse_hole(),
Token::Float(_, _) => todo!(),
_ => self.fail(vec![Token::LowerId("".to_string())]),
}
}
fn parse_binding(&mut self) -> Result<Binding, SyntaxDiagnostic> {
self.ignore_docs();
if self.is_named_parameter() {
let start = self.range();
self.advance(); // '('
let name = self.parse_id()?;
self.advance(); // '='
let expr = self.parse_expr(true)?;
let end = self.range();
self.eat_closing_keyword(Token::RPar, start)?;
Ok(Binding::Named(start.mix(end), name, expr))
} else {
Ok(Binding::Positional(self.parse_atom()?))
}
}
fn parse_app_binding(&mut self) -> Result<AppBinding, SyntaxDiagnostic> {
self.ignore_docs();
let (erased, data) = if self.check_and_eat(Token::Tilde) {
let start = self.range();
self.eat_variant(Token::LPar)?;
let atom = self.parse_expr(true)?;
self.eat_closing_keyword(Token::RPar, start)?;
(true, atom)
} else {
(false, self.parse_atom()?)
};
Ok(AppBinding { data, erased })
}
fn parse_call(&mut self, multiline: bool) -> Result<Box<Expr>, SyntaxDiagnostic> {
if self.get().is_upper_id() {
self.parse_data(multiline)
} else {
let fun = self.parse_atom()?;
let start = fun.range;
let mut args = Vec::new();
let mut end = start;
while (!self.is_linebreak() || multiline) && !self.get().same_variant(&Token::Eof) {
if let Some(atom) = self.try_single(&|parser| parser.parse_app_binding())? {
end = atom.data.range;
args.push(atom)
} else {
break;
}
}
if args.is_empty() {
Ok(fun)
} else {
Ok(Box::new(Expr {
data: ExprKind::App { fun, args },
range: start.mix(end),
}))
}
}
}
fn parse_call_tail(
&mut self,
start: Range,
multiline: bool,
) -> Result<(Range, Vec<Binding>), SyntaxDiagnostic> {
let mut spine = Vec::new();
let mut end = start;
while (!self.is_linebreak() || multiline) && !self.get().same_variant(&Token::Eof) {
if let Some(atom) = self.try_single(&|parser| parser.parse_binding())? {
end = atom.locate();
spine.push(atom)
} else {
break;
}
}
Ok((end, spine))
}
fn parse_arrow(&mut self, multiline: bool) -> Result<Box<Expr>, SyntaxDiagnostic> {
let mut expr = self.parse_call(multiline)?;
while self.check_and_eat(Token::RightArrow) {
let body = self.parse_expr(false)?;
let range = expr.range.mix(body.range);
expr = Box::new(Expr {
data: ExprKind::All {
param: None,
typ: expr,
body,
erased: false,
},
range,
});
}
Ok(expr)
}
fn parse_ann(&mut self, multiline: bool) -> Result<Box<Expr>, SyntaxDiagnostic> {
let expr = self.parse_arrow(multiline)?;
if self.check_and_eat(Token::ColonColon) {
let typ = self.parse_arrow(multiline)?;
let range = expr.range.mix(typ.range);
Ok(Box::new(Expr {
data: ExprKind::Ann { val: expr, typ },
range,
}))
} else {
Ok(expr)
}
}
fn parse_ask(&mut self) -> Result<Box<Sttm>, SyntaxDiagnostic> {
let start = self.range();
self.advance(); // 'ask'
let name = self.parse_destruct()?;
self.eat_variant(Token::Eq)?;
let expr = self.parse_expr(false)?;
self.check_and_eat(Token::Semi);
let next = self.parse_sttm()?;
let end = expr.range;
Ok(Box::new(Sttm {
data: SttmKind::Ask(name, expr, next),
range: start.mix(end),
}))
}
fn parse_destruct(&mut self) -> Result<Destruct, SyntaxDiagnostic> {
if self.get().is_upper_id() {
let upper = self.parse_upper_id()?;
let (range, bindings, ignore_rest) = self.parse_pat_destruct_bindings()?;
Ok(Destruct::Destruct(
upper.range.mix(range.unwrap_or(upper.range)),
upper,
bindings,
ignore_rest,
))
} else {
let name = self.parse_id()?;
Ok(Destruct::Ident(name))
}
}
fn parse_monadic_let(&mut self) -> Result<Box<Sttm>, SyntaxDiagnostic> {
let start = self.range();
self.advance(); // 'let'
let destruct = self.parse_destruct()?;
self.eat_variant(Token::Eq)?;
let val = self.parse_expr(false)?;
self.check_and_eat(Token::Semi);
let next = self.parse_sttm()?;
let end = destruct.locate();
Ok(Box::new(Sttm {
data: SttmKind::Let(destruct, val, next),
range: start.mix(end),
}))
}
fn parse_return(&mut self) -> Result<Box<Sttm>, SyntaxDiagnostic> {
let start = self.range();
self.advance(); // 'return'
let expr = self.parse_expr(false)?;
let end = expr.range;
self.check_and_eat(Token::Semi);
Ok(Box::new(Sttm {
data: SttmKind::Return(expr),
range: start.mix(end),
}))
}
fn parse_sttm(&mut self) -> Result<Box<Sttm>, SyntaxDiagnostic> {
let start = self.range();
if self.check_actual(Token::Ask) {
self.parse_ask()
} else if self.check_actual(Token::Return) {
self.parse_return()
} else if self.check_actual_id("let") {
self.parse_monadic_let()
} else {
let expr = self.parse_expr(false)?;
if self.get().same_variant(&Token::RBrace) {
let end = expr.range;
Ok(Box::new(Sttm {
data: SttmKind::RetExpr(expr),
range: start.mix(end),
}))
} else {
self.check_and_eat(Token::Semi);
let next = self.parse_sttm()?;
let end = next.range;
Ok(Box::new(Sttm {
data: SttmKind::Expr(expr, next),
range: start.mix(end),
}))
}
}
}
fn parse_do(&mut self) -> Result<Box<Expr>, SyntaxDiagnostic> {
let start = self.range();
self.advance(); // 'do'
let typ = self.parse_upper_id()?;
self.eat_variant(Token::LBrace)?;
let sttm = self.parse_sttm()?;
let end = self.eat_variant(Token::RBrace)?.1;
Ok(Box::new(Expr {
data: ExprKind::Do { typ, sttm },
range: start.mix(end),
}))
}
fn parse_pat_destruct_bindings(
&mut self,
) -> Result<(Option<Range>, Vec<CaseBinding>, Option<Range>), SyntaxDiagnostic> {
let mut ignore_rest_range = None;
let mut bindings = Vec::new();
let mut range = None;
loop {
match self.get() {
Token::LowerId(_) => {
range = Some(self.range());
let name = self.parse_id()?;
bindings.push(CaseBinding::Field(name));
}
Token::LPar => {
let start = self.range();
self.advance();
let name = self.parse_id()?;
self.eat_variant(Token::Eq)?;
let renamed = self.parse_id()?;
range = Some(self.range());
self.eat_closing_keyword(Token::RPar, start)?;
bindings.push(CaseBinding::Renamed(name, renamed));
}
Token::DotDot => {
ignore_rest_range = Some(self.range());
range = Some(self.range());
self.advance();
continue;
}
_ => break,
}
if let Some(range) = ignore_rest_range {
return Err(SyntaxDiagnostic::IgnoreRestShouldBeOnTheEnd(range));
}
}
Ok((range, bindings, ignore_rest_range))
}
fn parse_match(&mut self) -> Result<Box<Expr>, SyntaxDiagnostic> {
let start = self.range();
self.advance(); // 'match'
let typ = self.parse_upper_id()?;
let scrutinizer = self.parse_expr(false)?;
self.eat_variant(Token::LBrace)?;
let mut cases = Vec::new();
while !self.get().same_variant(&Token::RBrace) {
let constructor = self.parse_any_id()?;
let (_range, bindings, ignore_rest) = self.parse_pat_destruct_bindings()?;
self.eat_variant(Token::FatArrow)?;
let value = self.parse_expr(false)?;
self.check_and_eat(Token::Semi);
cases.push(Case {
constructor,
bindings,
value,
ignore_rest,
})
}
let mut end = self.eat_variant(Token::RBrace)?.1;
let motive = if self.check_and_eat(Token::Colon) {
let expr = self.parse_expr(false)?;
end = expr.range;
Some(self.parse_expr(false)?)
} else {
None
};
let match_ = Box::new(Match {
typ,
scrutinizer,
cases,
motive,
});
Ok(Box::new(Expr {
data: ExprKind::Match(match_),
range: start.mix(end),
}))
}
fn parse_let(&mut self) -> Result<Box<Expr>, SyntaxDiagnostic> {
let start = self.range();
self.advance(); // 'let'
let name = self.parse_destruct()?;
self.eat_variant(Token::Eq)?;
let val = self.parse_expr(false)?;
self.check_and_eat(Token::Semi);
let next = self.parse_expr(false)?;
let end = next.range;
Ok(Box::new(Expr {
data: ExprKind::Let { name, val, next },
range: start.mix(end),
}))
}
fn parse_sigma_pair(&mut self) -> Result<Box<Expr>, SyntaxDiagnostic> {
let start = self.range();
self.advance(); // '$'
let fst = self.parse_atom()?;
let snd = self.parse_atom()?;
let end = snd.range;
Ok(Box::new(Expr {
data: ExprKind::Pair { fst, snd },
range: start.mix(end),
}))
}
fn parse_hole(&mut self) -> Result<Box<Expr>, SyntaxDiagnostic> {
let start = self.range();
self.advance(); // '_'
Ok(Box::new(Expr {
data: ExprKind::Hole,
range: start,
}))
}
fn parse_if(&mut self) -> Result<Box<Expr>, SyntaxDiagnostic> {
let start = self.range();
self.advance(); // 'if'
let cond = self.parse_expr(false)?;
self.eat_variant(Token::LBrace)?;
let then_ = self.parse_expr(false)?;
self.eat_variant(Token::RBrace)?;
self.eat_id("else")?;
self.eat_variant(Token::LBrace)?;
let else_ = self.parse_expr(false)?;
let end = self.eat_variant(Token::RBrace)?.1;
let range = start.mix(end);
Ok(Box::new(Expr {
data: ExprKind::If { cond, then_, else_ },
range,
}))
}
fn parse_erased(&mut self) -> Result<Box<Expr>, SyntaxDiagnostic> {
self.advance(); // '~';
if self.is_lambda() {
self.parse_lambda(true)
} else if self.is_pi_type() {
self.parse_pi_or_lambda(true)
} else {
self.fail(vec![])
}
}
/// The infinite hell of else ifs. But it's the most readable way
/// to check if the queue of tokens match a pattern as we need
/// some looakhead tokens.
pub fn parse_expr(&mut self, multiline: bool) -> Result<Box<Expr>, SyntaxDiagnostic> {
self.ignore_docs();
if self.check_actual_id("do") {
self.parse_do()
} else if self.check_actual_id("match") {
self.parse_match()
} else if self.check_actual_id("let") {
self.parse_let()
} else if self.check_actual_id("if") {
self.parse_if()
} else if self.check_actual(Token::Dollar) {
self.parse_sigma_pair()
} else if self.is_lambda() {
self.parse_lambda(false)
} else if self.is_pi_type() {
self.parse_pi_or_lambda(false)
} else if self.is_sigma_type() {
self.parse_sigma_type()
} else if self.is_substitution() {
self.parse_substitution()
} else if self.check_actual(Token::Tilde) {
self.parse_erased()
} else {
self.parse_ann(multiline)
}
}
}

View File

@ -0,0 +1,75 @@
//! Lexes single line and multi line comments
//! including documentation strings (only in single line mode).
use kind_span::Range;
use crate::errors::SyntaxDiagnostic;
use crate::lexer::tokens::Token;
use crate::Lexer;
impl<'a> Lexer<'a> {
/// Single line comments
pub fn lex_comment(&mut self, start: usize) -> (Token, Range) {
self.next_char();
let mut is_doc = false;
if let Some('/') = self.peekable.peek() {
self.next_char();
is_doc = true;
}
let cmt = self.accumulate_while(&|x| x != '\n');
(
Token::Comment(is_doc, cmt.to_string()),
self.mk_range(start),
)
}
/// Parses multi line comments with nested comments
/// really useful
pub fn lex_multiline_comment(&mut self, start: usize) -> (Token, Range) {
let mut size = 0;
self.next_char();
let mut next = |p: &mut Lexer<'a>, x: char| {
size += x.len_utf8();
p.peekable.next();
};
self.comment_depth += 1;
while let Some(&x) = self.peekable.peek() {
match x {
'*' => {
next(self, x);
if let Some('/') = self.peekable.peek() {
self.comment_depth -= 1;
if self.comment_depth == 0 {
next(self, '/');
break;
}
}
}
'/' => {
next(self, x);
if let Some('*') = self.peekable.peek() {
self.comment_depth += 1;
}
}
_ => (),
}
next(self, x);
}
self.pos += size;
if self.comment_depth != 0 {
(
Token::Error(Box::new(SyntaxDiagnostic::UnfinishedComment(
self.mk_range(start),
))),
self.mk_range(start),
)
} else {
let str = &self.input[..size - 2];
self.input = &self.input[size..];
(Token::Comment(false, str.to_string()), self.mk_range(start))
}
}
}

View File

@ -0,0 +1,221 @@
//! Lexes some types of literals. It's a isolated
//! module because it requires a lot of code to
//! parse some specific things like escaped characters
//! inside of strings.
use kind_span::Range;
use crate::errors::{EncodeSequence, SyntaxDiagnostic};
use crate::lexer::tokens::Token;
use crate::Lexer;
impl<'a> Lexer<'a> {
/// Lex a sequence of digits of the base @base@ with
/// maximum length of @size@ and turns it into a char.
fn lex_char_encoded(
&mut self,
start: usize,
size: usize,
base: u32,
err: EncodeSequence,
) -> Result<char, SyntaxDiagnostic> {
let string = self.next_chars(size);
let to_chr = string.and_then(|x| u32::from_str_radix(x, base).ok());
if let Some(chr) = to_chr.and_then(char::from_u32) {
return Ok(chr);
}
Err(SyntaxDiagnostic::InvalidEscapeSequence(
err,
self.mk_range(start),
))
}
/// Turns a escaped char into a normal char.
fn lex_escaped_char(&mut self, start: usize) -> Result<char, SyntaxDiagnostic> {
match self.peekable.peek() {
None => Err(SyntaxDiagnostic::UnfinishedString(
self.mk_one_column_range(start),
)),
Some(&x) => {
self.next_char();
match x {
'\'' => Ok('\''),
'\"' => Ok('\"'),
'n' => Ok('\n'),
'r' => Ok('\r'),
't' => Ok('\t'),
'0' => Ok('\0'),
'\\' => Ok('\\'),
'x' => self.lex_char_encoded(start, 2, 16, EncodeSequence::Hexa),
'u' => self.lex_char_encoded(start, 4, 16, EncodeSequence::Unicode),
other => Ok(other),
}
}
}
}
/// Lexes a number of base @base@, figuring out it's type
/// Lexes 0 if not at a digit position
fn lex_num_and_type_with_base(
&mut self,
num_start: usize,
base: u32,
err: EncodeSequence,
) -> (Token, Range) {
let num = self.accumulate_while(&|x| x.is_digit(base) || x == '_');
let num = if num.is_empty() { "0" } else { num };
let num = num.to_string();
let type_start = self.span();
let make_num_err = |x: &Self| {
(
Token::Error(Box::new(SyntaxDiagnostic::InvalidNumberRepresentation(
err,
x.mk_range(num_start),
))),
x.mk_range(num_start),
)
};
match self.peekable.peek() {
Some('n' | 'N') => {
self.next_char();
if let Ok(res) = u128::from_str_radix(&num.replace('_', ""), base) {
(Token::Nat(res), self.mk_range(num_start))
} else {
make_num_err(self)
}
}
Some('U' | 'u') => {
self.next_char();
let type_ = self.accumulate_while(&|x| x.is_ascii_digit());
match type_ {
"60" => {
if let Ok(res) = u64::from_str_radix(&num.replace('_', ""), base) {
(Token::Num60(res), self.mk_range(num_start))
} else {
make_num_err(self)
}
}
"120" => {
if let Ok(res) = u128::from_str_radix(&num.replace('_', ""), base) {
(Token::Num120(res), self.mk_range(num_start))
} else {
make_num_err(self)
}
}
_ => (
Token::Error(Box::new(SyntaxDiagnostic::InvalidNumberType(
format!("u{}", type_),
self.mk_range(type_start),
))),
self.mk_range(type_start),
),
}
}
Some(_) | None => {
if let Ok(res) = u64::from_str_radix(&num.replace('_', ""), base) {
(Token::Num60(res), self.mk_range(num_start))
} else {
make_num_err(self)
}
}
}
}
/// Lex numbers with decimal, hexadecimal, binary or octal.
pub fn lex_number(&mut self) -> (Token, Range) {
let start = self.span();
match self.peekable.peek() {
None => (Token::Eof, self.mk_range(start)),
Some('0') => {
self.next_char();
match self.peekable.peek() {
Some('x' | 'X') => {
self.next_char();
self.lex_num_and_type_with_base(start, 16, EncodeSequence::Hexa)
}
Some('o' | 'O') => {
self.next_char();
self.lex_num_and_type_with_base(start, 8, EncodeSequence::Octal)
}
Some('b' | 'B') => {
self.next_char();
self.lex_num_and_type_with_base(start, 2, EncodeSequence::Binary)
}
Some('0'..='9' | _) | None => {
self.lex_num_and_type_with_base(start, 10, EncodeSequence::Decimal)
}
}
}
Some('0'..='9' | _) => {
self.lex_num_and_type_with_base(start, 10, EncodeSequence::Decimal)
}
}
}
pub fn lex_char(&mut self) -> Result<char, SyntaxDiagnostic> {
let start = self.span();
if let Some(&x) = self.peekable.peek() {
let chr_start = self.span();
match x {
'\\' => {
self.next_char();
match self.lex_escaped_char(chr_start) {
Ok(x) => Ok(x),
Err(t) => Err(t),
}
}
x => {
self.next_char();
Ok(x)
}
}
} else {
Err(SyntaxDiagnostic::UnfinishedChar(self.mk_range(start)))
}
}
/// Lexes a string that starts with '"' and ends with the
/// same char. each string item can contain a escaped char
/// and if the esaped char is not well-formed then it will
/// acummulate the error until the end of the string.
/// TODO: Accumulate multiple encoding errors?
pub fn lex_string(&mut self) -> (Token, Range) {
let start = self.span();
self.next_char();
let mut string = String::new();
let mut error: Option<(Token, Range)> = None;
while let Some(&x) = self.peekable.peek() {
let chr_start = self.span();
match x {
'\"' => break,
'\\' => {
self.next_char();
match self.lex_escaped_char(chr_start) {
Ok(x) => string.push(x),
Err(t) => {
self.accumulate_while(&|x| x != '"');
error = Some((Token::Error(Box::new(t)), self.mk_range(start)));
}
}
continue;
}
x => string.push(x),
}
// FIXME: Not sure if it causes a bug!
self.next_char();
}
match (self.next_char(), error) {
(_, Some(err)) => err,
(Some('"'), _) => (Token::Str(string), self.mk_range(start)),
_ => (
Token::Error(Box::new(SyntaxDiagnostic::UnfinishedString(
self.mk_one_column_range(start),
))),
self.mk_range(start),
),
}
}
}

View File

@ -0,0 +1,239 @@
//! The entry point for the parsing. it parses all of the
//! trivial tokens like ponctuations or identifiers. Some
//! other construtions like [comments] and [literals] are
//! stored in other modules for better mantenaiblity.
//!
//! [comments]: crate::lexer::literals
//! [literals]: crate::lexer::literals
use std::sync::mpsc::Sender;
use kind_report::data::Diagnostic;
use kind_span::Range;
use crate::errors::SyntaxDiagnostic;
use self::{state::Lexer, tokens::Token};
pub mod comments;
pub mod literals;
pub mod state;
pub mod tokens;
fn is_whitespace(chr: char) -> bool {
matches!(chr, ' ' | '\r' | '\t')
}
fn is_valid_id(chr: char) -> bool {
chr.is_ascii_alphanumeric() || matches!(chr, '_' | '$' | '.')
}
fn is_valid_upper_start(chr: char) -> bool {
chr.is_ascii_uppercase()
}
fn is_valid_id_start(chr: char) -> bool {
chr.is_ascii_alphanumeric() || matches!(chr, '_')
}
impl<'a> Lexer<'a> {
pub fn single_token(&mut self, token: Token, start: usize) -> (Token, Range) {
self.next_char();
(token, self.mk_range(start))
}
pub fn is_linebreak(&mut self) -> bool {
self.accumulate_while(&is_whitespace);
let count = self.accumulate_while(&|x| x == '\n').len();
self.accumulate_while(&is_whitespace);
count > 0
}
pub fn to_keyword(data: &str) -> Token {
match data {
"return" => Token::Return,
"ask" => Token::Ask,
_ => Token::LowerId(data.to_string()),
}
}
pub fn get_next_no_error(&mut self, vec: Sender<Box<dyn Diagnostic>>) -> (Token, Range) {
loop {
let (token, span) = self.lex_token();
match token {
Token::Error(x) => {
vec.send(x).unwrap();
continue;
}
Token::Comment(false, _) => continue,
_ => (),
}
return (token, span);
}
}
pub fn lex_token(&mut self) -> (Token, Range) {
let start = self.span();
match self.peekable.peek() {
None => (Token::Eof, self.mk_range(start)),
Some(chr) => match chr {
c if is_whitespace(*c) => {
self.accumulate_while(&is_whitespace);
self.lex_next()
}
'\n' => {
self.accumulate_while(&|x| x == '\n' || x == '\r');
self.lex_next()
}
'.' => {
self.next_char();
match self.peekable.peek() {
Some('.') => self.single_token(Token::DotDot, start),
_ => (Token::Dot, self.mk_range(start)),
}
}
c if c.is_ascii_digit() => self.lex_number(),
c if is_valid_upper_start(*c) => {
let first_part = self.accumulate_while(&is_valid_id).to_string();
let peek = self.peekable.peek().cloned();
let auxiliar_part = match peek {
Some('/') => {
self.next_char();
let aux = self.accumulate_while(&is_valid_id);
Some(aux.to_string())
}
_ => None,
};
(
Token::UpperId(first_part, auxiliar_part),
self.mk_range(start),
)
}
'_' => {
self.accumulate_while(&is_valid_id);
(Token::Hole, self.mk_range(start))
}
c if is_valid_id_start(*c) => {
let str = self.accumulate_while(&is_valid_id);
(Lexer::to_keyword(str), self.mk_range(start))
}
'(' => self.single_token(Token::LPar, start),
')' => self.single_token(Token::RPar, start),
'[' => self.single_token(Token::LBracket, start),
']' => self.single_token(Token::RBracket, start),
'~' => self.single_token(Token::Tilde, start),
'{' => self.single_token(Token::LBrace, start),
'}' => self.single_token(Token::RBrace, start),
'#' => {
self.next_char();
match self.peekable.peek() {
Some('#') => self.single_token(Token::HashHash, start),
_ => (Token::Hash, self.mk_range(start)),
}
}
'=' => {
self.next_char();
match self.peekable.peek() {
Some('>') => self.single_token(Token::FatArrow, start),
Some('=') => self.single_token(Token::EqEq, start),
_ => (Token::Eq, self.mk_range(start)),
}
}
'>' => {
self.next_char();
match self.peekable.peek() {
Some('>') => self.single_token(Token::GreaterGreater, start),
Some('=') => self.single_token(Token::GreaterEq, start),
_ => (Token::Greater, self.mk_range(start)),
}
}
'<' => {
self.next_char();
match self.peekable.peek() {
Some('<') => self.single_token(Token::LessLess, start),
Some('=') => self.single_token(Token::LessEq, start),
_ => (Token::Less, self.mk_range(start)),
}
}
'/' => {
self.next_char();
match self.peekable.peek() {
Some('/') => self.lex_comment(start),
Some('*') => self.lex_multiline_comment(start),
_ => (Token::Slash, self.mk_range(start)),
}
}
':' => {
self.next_char();
match self.peekable.peek() {
Some(':') => self.single_token(Token::ColonColon, start),
_ => (Token::Colon, self.mk_range(start)),
}
}
';' => self.single_token(Token::Semi, start),
'$' => self.single_token(Token::Dollar, start),
',' => self.single_token(Token::Comma, start),
'+' => self.single_token(Token::Plus, start),
'-' => {
self.next_char();
match self.peekable.peek() {
Some('>') => self.single_token(Token::RightArrow, start),
_ => (Token::Minus, self.mk_range(start)),
}
}
'*' => self.single_token(Token::Star, start),
'%' => self.single_token(Token::Percent, start),
'&' => self.single_token(Token::Ampersand, start),
'|' => self.single_token(Token::Bar, start),
'^' => self.single_token(Token::Hat, start),
'"' => self.lex_string(),
'?' => {
self.next_char();
let str = self.accumulate_while(&is_valid_id);
(Token::Help(str.to_string()), self.mk_range(start))
}
'\'' => {
let start = self.span();
self.next_char();
let chr = match self.lex_char() {
Ok(res) => res,
Err(err) => return (Token::Error(err.into()), self.mk_range(start)),
};
match self.peekable.peek() {
Some('\'') => self.single_token(Token::Char(chr), start),
Some(c) => (
Token::Error(Box::new(SyntaxDiagnostic::UnexpectedChar(
*c,
self.mk_range(start),
))),
self.mk_range(start),
),
None => (
Token::Error(Box::new(SyntaxDiagnostic::UnfinishedChar(
self.mk_range(start),
))),
self.mk_range(start),
),
}
}
'!' => {
self.next_char();
match self.peekable.peek() {
Some('=') => self.single_token(Token::BangEq, start),
_ => (Token::Bang, self.mk_range(start)),
}
}
&c => {
self.next_char();
(
Token::Error(Box::new(SyntaxDiagnostic::UnexpectedChar(
c,
self.mk_range(start),
))),
self.mk_range(start),
)
}
},
}
}
}

View File

@ -0,0 +1,104 @@
//! Describes the lexer mutable state.
use std::{iter::Peekable, str::Chars};
use kind_span::{Pos, Range, SyntaxCtxIndex};
use crate::lexer::tokens::Token;
/// The lexer state.
pub struct Lexer<'a> {
pub input: &'a str,
pub peekable: Peekable<Chars<'a>>,
pub pos: usize,
pub ctx: SyntaxCtxIndex,
// Modes
pub comment_depth: u16,
}
impl<'a> Lexer<'a> {
pub fn new(input: &'a str, peekable: Peekable<Chars<'a>>, ctx: SyntaxCtxIndex) -> Lexer<'a> {
Lexer {
input,
pos: 0,
ctx,
peekable,
comment_depth: 0,
}
}
pub fn span(&self) -> usize {
self.pos
}
pub fn mk_range(&self, start: usize) -> Range {
Range::new(
Pos {
index: start as u32,
},
Pos {
index: self.pos as u32,
},
self.ctx,
)
}
pub fn mk_one_column_range(&self, start: usize) -> Range {
Range::new(
Pos {
index: start as u32,
},
Pos {
index: (start + 1) as u32,
},
self.ctx,
)
}
pub fn next_char(&mut self) -> Option<char> {
match self.peekable.next() {
Some(chr) if !self.input.is_empty() => {
self.input = &self.input[chr.len_utf8()..];
self.pos += chr.len_utf8();
Some(chr)
}
_ => None,
}
}
pub fn accumulate_while(&mut self, condition: &dyn Fn(char) -> bool) -> &str {
let mut size = 0;
while let Some(&x) = self.peekable.peek() {
if !condition(x) {
break;
}
size += x.len_utf8();
self.peekable.next();
}
self.pos += size;
let str = &self.input[..size];
self.input = &self.input[size..];
str
}
pub fn next_chars(&mut self, size: usize) -> Option<&str> {
for _ in 0..size {
if let Some(&x) = self.peekable.peek() {
self.pos += x.len_utf8();
self.peekable.next();
} else {
return None;
}
}
let str = &self.input[..size];
self.input = &self.input[size..];
Some(str)
}
/// Useful as entrypoint
#[inline]
pub fn lex_next(&mut self) -> (Token, Range) {
self.lex_token()
}
}

View File

@ -0,0 +1,179 @@
//! Describes all of the tokens required
//! to parse kind2 after version 0.2.8.
use core::fmt;
use crate::errors::SyntaxDiagnostic;
#[derive(Debug, Clone)]
pub enum Token {
LPar, // (
RPar, // )
LBracket, // [
RBracket, // ]
LBrace, // {
RBrace, // }
Eq, // =
Colon, // :
Semi, // ;
FatArrow, // =>
Dollar, // $
Comma, // ,
RightArrow, // ->
DotDot, // ..
Dot, // .
Tilde, // ~
ColonColon, // ::
Help(String),
LowerId(String),
UpperId(String, Option<String>),
// Strong keywords because they lead to better
// error messages.
Return,
Ask,
// Keywords
// Do,
// If,
// Else,
// Match,
// Let,
// Type,
// Record,
// Constructor,
// Use,
// As,
// Literals
Char(char),
Str(String),
Num60(u64),
Num120(u128),
Nat(u128),
Float(u64, u64),
Hole,
// Operators
Plus,
Minus,
Star,
Slash,
Percent,
Ampersand,
Bar,
Hat,
GreaterGreater,
LessLess,
Less,
LessEq,
EqEq,
GreaterEq,
Greater,
BangEq,
Bang,
HashHash,
Hash,
Comment(bool, String),
Eof,
// The error token that is useful to error recovery.
Error(Box<SyntaxDiagnostic>),
}
impl Token {
pub fn same_variant(&self, b: &Token) -> bool {
std::mem::discriminant(self) == std::mem::discriminant(b)
}
pub fn is_lower_id(&self) -> bool {
matches!(self, Token::LowerId(_))
}
pub fn is_doc(&self) -> bool {
matches!(self, Token::Comment(true, _))
}
pub fn is_upper_id(&self) -> bool {
matches!(self, Token::UpperId(_, _))
}
pub fn is_str(&self) -> bool {
matches!(self, Token::Str(_))
}
pub fn is_num60(&self) -> bool {
matches!(self, Token::Num60(_))
}
pub fn is_num120(&self) -> bool {
matches!(self, Token::Num120(_))
}
pub fn is_eof(&self) -> bool {
matches!(self, Token::Eof)
}
}
impl fmt::Display for Token {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {
Token::LPar => write!(f, "("),
Token::RPar => write!(f, ")"),
Token::LBracket => write!(f, "["),
Token::RBracket => write!(f, "]"),
Token::LBrace => write!(f, "{{"),
Token::RBrace => write!(f, "}}"),
Token::Eq => write!(f, "="),
Token::Colon => write!(f, ":"),
Token::Semi => write!(f, ";"),
Token::FatArrow => write!(f, "=>"),
Token::Dollar => write!(f, "$"),
Token::Comma => write!(f, ","),
Token::RightArrow => write!(f, "->"),
Token::DotDot => write!(f, ".."),
Token::Dot => write!(f, "."),
Token::Tilde => write!(f, "~"),
Token::ColonColon => write!(f, "::"),
Token::Help(text) => write!(f, "?{}", text),
Token::LowerId(id) => write!(f, "{}", id),
Token::UpperId(main, Some(aux)) => write!(f, "{}/{}", main, aux),
Token::UpperId(main, None) => write!(f, "{}", main),
Token::Char(c) => write!(f, "'{}'", c),
Token::Str(s) => write!(f, "\"{}\"", s),
Token::Num60(n) => write!(f, "{}", n),
Token::Num120(n) => write!(f, "{}u120", n),
Token::Nat(n) => write!(f, "{}n", n),
Token::Float(start, end) => write!(f, "{}.{}", start, end),
Token::Hole => write!(f, "_"),
Token::Plus => write!(f, "+"),
Token::Minus => write!(f, "-"),
Token::Star => write!(f, "*"),
Token::Slash => write!(f, "/"),
Token::Percent => write!(f, "%"),
Token::Ampersand => write!(f, "&"),
Token::Bar => write!(f, "|"),
Token::Hat => write!(f, "^"),
Token::GreaterGreater => write!(f, ">>"),
Token::LessLess => write!(f, "<<"),
Token::Less => write!(f, "<"),
Token::LessEq => write!(f, "<="),
Token::EqEq => write!(f, "=="),
Token::GreaterEq => write!(f, ">="),
Token::Greater => write!(f, ">"),
Token::BangEq => write!(f, "!="),
Token::Bang => write!(f, "!"),
Token::HashHash => write!(f, "##"),
Token::Hash => write!(f, "#"),
Token::Comment(_, _) => write!(f, "docstring comment"),
Token::Eof => write!(f, "End of file"),
Token::Error(_) => write!(f, "ERROR"),
Token::Return => write!(f, "return"),
Token::Ask => write!(f, "ask"),
}
}
}

View File

@ -0,0 +1,23 @@
//! Crate to parse the kind2 grammar.
mod errors;
mod expr;
mod lexer;
mod macros;
mod pat;
mod state;
mod top_level;
use std::sync::mpsc::Sender;
use kind_report::data::Diagnostic;
use kind_span::SyntaxCtxIndex;
use kind_tree::concrete::Module;
use lexer::state::*;
use state::Parser;
pub fn parse_book(errs: Sender<Box<dyn Diagnostic>>, ctx_id: usize, input: &str) -> (Module, bool) {
let peekable = input.chars().peekable();
let lexer = Lexer::new(input, peekable, SyntaxCtxIndex::new(ctx_id));
let mut parser = Parser::new(lexer, errs);
(parser.parse_module(), parser.failed)
}

View File

@ -0,0 +1,16 @@
macro_rules! eat_single {
($x:expr, $pattern:pat) => {
$x.eat(|x| match x {
$pattern => Some(()),
_ => None,
})
};
($x:expr, $pattern:pat => $then:expr) => {
$x.eat(|x| match x {
$pattern => Some($then),
_ => None,
})
};
}
pub(crate) use eat_single;

View File

@ -0,0 +1,152 @@
use kind_tree::concrete::pat::{Pat, PatIdent, PatKind};
use crate::errors::SyntaxDiagnostic;
use crate::lexer::tokens::Token;
use crate::macros::eat_single;
use crate::state::Parser;
impl<'a> Parser<'a> {
fn is_pat_cons(&self) -> bool {
self.get().same_variant(&Token::LPar) && self.peek(1).is_upper_id()
}
fn parse_pat_constructor(&mut self) -> Result<Box<Pat>, SyntaxDiagnostic> {
let start = self.range();
self.advance(); // '('
let name = self.parse_upper_id()?;
let mut pats = Vec::new();
while let Some(res) = self.try_single(&|s| s.parse_pat())? {
pats.push(res)
}
let end = self.eat_variant(Token::RPar)?.1;
Ok(Box::new(Pat {
range: start.mix(end),
data: PatKind::App(name, pats),
}))
}
fn parse_pat_u60(&mut self) -> Result<Box<Pat>, SyntaxDiagnostic> {
let start = self.range();
let num = eat_single!(self, Token::Num60(n) => *n)?;
Ok(Box::new(Pat {
range: start,
data: PatKind::U60(num),
}))
}
fn parse_pat_u120(&mut self) -> Result<Box<Pat>, SyntaxDiagnostic> {
let start = self.range();
let num = eat_single!(self, Token::Num120(n) => *n)?;
Ok(Box::new(Pat {
range: start,
data: PatKind::U120(num),
}))
}
fn parse_pat_str(&mut self) -> Result<Box<Pat>, SyntaxDiagnostic> {
let start = self.range();
let string = eat_single!(self, Token::Str(str) => str.clone())?;
Ok(Box::new(Pat {
range: start,
data: PatKind::Str(string),
}))
}
fn parse_pat_group(&mut self) -> Result<Box<Pat>, SyntaxDiagnostic> {
let start = self.range();
self.advance(); // '('
let mut pat = self.parse_pat()?;
let end = self.eat_variant(Token::RPar)?.1;
pat.range = start.mix(end);
Ok(pat)
}
fn parse_pat_var(&mut self) -> Result<Box<Pat>, SyntaxDiagnostic> {
let id = self.parse_id()?;
Ok(Box::new(Pat {
range: id.range,
data: PatKind::Var(PatIdent(id)),
}))
}
fn parse_pat_single_cons(&mut self) -> Result<Box<Pat>, SyntaxDiagnostic> {
let id = self.parse_upper_id()?;
Ok(Box::new(Pat {
range: id.range,
data: PatKind::App(id, vec![]),
}))
}
fn parse_pat_hole(&mut self) -> Result<Box<Pat>, SyntaxDiagnostic> {
let range = self.range();
self.eat_variant(Token::Hole)?;
Ok(Box::new(Pat {
range,
data: PatKind::Hole,
}))
}
fn parse_pat_list(&mut self) -> Result<Box<Pat>, SyntaxDiagnostic> {
let range = self.range();
self.advance(); // '['
let mut vec = Vec::new();
if self.check_actual(Token::RBracket) {
let range = self.advance().1.mix(range);
return Ok(Box::new(Pat {
range,
data: PatKind::List(vec),
}));
}
vec.push(*self.parse_pat()?);
let mut initialized = false;
let mut with_comma = false;
loop {
let ate_comma = self.check_and_eat(Token::Comma);
if !initialized {
initialized = true;
with_comma = ate_comma;
}
if with_comma {
self.check_and_eat(Token::Comma);
}
match self.try_single(&|x| x.parse_pat())? {
Some(res) => vec.push(*res),
None => break,
}
}
let range = self.eat_variant(Token::RBracket)?.1.mix(range);
Ok(Box::new(Pat {
range,
data: PatKind::List(vec),
}))
}
pub fn parse_pat(&mut self) -> Result<Box<Pat>, SyntaxDiagnostic> {
if self.is_pat_cons() {
self.parse_pat_constructor()
} else if self.get().is_str() {
self.parse_pat_str()
} else if self.get().is_num60() {
self.parse_pat_u60()
} else if self.get().is_num120() {
self.parse_pat_u120()
} else if self.check_actual(Token::LPar) {
self.parse_pat_group()
} else if self.get().is_lower_id() {
self.parse_pat_var()
} else if self.get().is_upper_id() {
self.parse_pat_single_cons()
} else if self.check_actual(Token::LBrace) {
self.parse_pat_list()
} else if self.check_actual(Token::Hole) {
self.parse_pat_hole()
} else {
self.fail(vec![])
}
}
}

View File

@ -0,0 +1,162 @@
//! Describes the state of the parser.
use std::{collections::VecDeque, sync::mpsc::Sender};
use kind_report::data::Diagnostic;
use kind_span::Range;
use crate::{errors::SyntaxDiagnostic, lexer::tokens::Token, Lexer};
/// The parser state. it current have some parameters
/// that makes the behaviour change
/// - eaten: It counts how much tokens it has eaten
/// it's useful to all of the rules that use "try_local"
/// and similar functions
pub struct Parser<'a> {
lexer: Lexer<'a>,
/// We have to shift these things one position
/// to the left so idk what i should use it here
/// probably the movement will not affect it so much
/// because it's a ring buffer.
queue: VecDeque<(Token, Range)>,
breaks: VecDeque<bool>,
dignostic_channel: Sender<Box<dyn Diagnostic>>,
/// It's useful when we have to try to parse something
/// that fails in the first token. as the parser ignores some
/// tokens, we cannot rely on the count provided by the
/// lexer.
eaten: u32,
pub failed: bool,
}
impl<'a> Parser<'a> {
pub fn new(mut lexer: Lexer<'a>, sender: Sender<Box<dyn Diagnostic>>) -> Parser<'a> {
let mut queue = VecDeque::with_capacity(3);
let mut breaks = VecDeque::with_capacity(3);
for _ in 0..3 {
breaks.push_back(lexer.is_linebreak());
queue.push_back(lexer.get_next_no_error(sender.clone()));
}
Parser {
lexer,
queue,
breaks,
dignostic_channel: sender,
eaten: 0,
failed: false,
}
}
pub fn advance(&mut self) -> (Token, Range) {
let cur = self.queue.pop_front().unwrap();
self.breaks.pop_front();
self.breaks.push_back(self.lexer.is_linebreak());
self.queue
.push_back(self.lexer.get_next_no_error(self.dignostic_channel.clone()));
self.eaten += 1;
cur
}
pub fn is_linebreak(&self) -> bool {
self.breaks[0]
}
#[inline]
pub fn get(&self) -> &Token {
&self.queue[0].0
}
#[inline]
pub fn peek(&self, lookhead: usize) -> &Token {
&self.queue[lookhead].0
}
#[inline]
pub fn range(&self) -> Range {
self.queue[0].1
}
#[inline]
pub fn fail<T>(&mut self, expect: Vec<Token>) -> Result<T, SyntaxDiagnostic> {
Err(SyntaxDiagnostic::UnexpectedToken(
self.get().clone(),
self.range(),
expect,
))
}
pub fn send_dignostic(&mut self, diagnostic: SyntaxDiagnostic) {
self.dignostic_channel.send(Box::new(diagnostic)).unwrap();
self.failed = true;
}
pub fn eat_closing_keyword(
&mut self,
expect: Token,
range: Range,
) -> Result<(), SyntaxDiagnostic> {
if !self.check_and_eat(expect.clone()) {
if self.get().is_eof() {
Err(SyntaxDiagnostic::Unclosed(range))
} else {
self.fail(vec![expect])
}
} else {
Ok(())
}
}
pub fn eat_variant(&mut self, expect: Token) -> Result<(Token, Range), SyntaxDiagnostic> {
if self.get().same_variant(&expect) {
Ok(self.advance())
} else {
self.fail(vec![expect])
}
}
pub fn eat_id(&mut self, expect: &str) -> Result<(Token, Range), SyntaxDiagnostic> {
match self.get() {
Token::LowerId(x) if x == expect => Ok(self.advance()),
_ => self.fail(vec![Token::LowerId(expect.to_string())]),
}
}
pub fn eat<T>(&mut self, expect: fn(&Token) -> Option<T>) -> Result<T, SyntaxDiagnostic> {
match expect(self.get()) {
None => self.fail(vec![]),
Some(res) => {
self.advance();
Ok(res)
}
}
}
pub fn check_and_eat(&mut self, expect: Token) -> bool {
if self.get().same_variant(&expect) {
self.advance();
true
} else {
false
}
}
pub fn check_actual(&mut self, expect: Token) -> bool {
self.get().same_variant(&expect)
}
pub fn check_actual_id(&self, expect: &str) -> bool {
matches!(self.get(), Token::LowerId(x) if x == expect)
}
pub fn try_single<T>(
&mut self,
fun: &dyn Fn(&mut Parser<'a>) -> Result<T, SyntaxDiagnostic>,
) -> Result<Option<T>, SyntaxDiagnostic> {
let current = self.eaten;
match fun(self) {
Err(_) if current == self.eaten => Ok(None),
Err(err) => Err(err),
Ok(res) => Ok(Some(res)),
}
}
}

View File

@ -0,0 +1,96 @@
use kind_span::{Locatable, Range};
use kind_tree::concrete::{Attribute, AttributeStyle};
use crate::errors::SyntaxDiagnostic;
use crate::lexer::tokens::Token;
use crate::state::Parser;
impl<'a> Parser<'a> {
fn parse_attr_args(&mut self) -> Result<(Vec<AttributeStyle>, Range), SyntaxDiagnostic> {
let mut attrs = Vec::new();
let mut range = self.range();
if self.check_and_eat(Token::LBracket) {
while let Some(res) = self.try_single(&|fun| fun.parse_attr_style())? {
attrs.push(res);
if !self.check_and_eat(Token::Comma) {
break;
}
}
let start = range;
range = self.range();
self.eat_closing_keyword(Token::RBracket, start)?;
}
Ok((attrs, range))
}
fn parse_attr_style(&mut self) -> Result<AttributeStyle, SyntaxDiagnostic> {
match self.get().clone() {
Token::LowerId(_) | Token::UpperId(_, None) => {
let range = self.range();
let ident = self.parse_any_id()?;
Ok(AttributeStyle::Ident(range, ident))
}
Token::Num60(num) => {
let range = self.range();
self.advance();
Ok(AttributeStyle::Number(range, num))
}
Token::Str(str) => {
let range = self.range();
self.advance();
Ok(AttributeStyle::String(range, str))
}
Token::LBracket => {
let range = self.range();
self.advance();
let mut attrs = Vec::new();
while let Some(res) = self.try_single(&|fun| fun.parse_attr_style())? {
attrs.push(res);
if !self.check_and_eat(Token::Comma) {
break;
}
}
let end = self.range();
self.eat_closing_keyword(Token::RBracket, range)?;
Ok(AttributeStyle::List(range.mix(end), attrs))
}
_ => self.fail(Vec::new()),
}
}
fn parse_attr(&mut self) -> Result<Attribute, SyntaxDiagnostic> {
let start = self.range();
self.eat_variant(Token::Hash)?;
let name = self.parse_id()?;
let (args, mut last) = self.parse_attr_args()?;
let style = if self.check_and_eat(Token::Eq) {
let res = self.parse_attr_style()?;
last = res.locate();
Some(res)
} else {
None
};
Ok(Attribute {
range: start.mix(last),
value: style,
args,
name,
})
}
pub fn parse_attrs(&mut self) -> Result<Vec<Attribute>, SyntaxDiagnostic> {
let mut attrs = Vec::new();
while let Some(res) = self.try_single(&|fun| fun.parse_attr())? {
attrs.push(res);
}
Ok(attrs)
}
}

View File

@ -0,0 +1,312 @@
use fxhash::FxHashMap;
use kind_tree::concrete::expr::Expr;
use kind_tree::concrete::pat::{Pat, PatIdent, PatKind};
use kind_tree::concrete::*;
use kind_tree::symbol::QualifiedIdent;
use crate::errors::SyntaxDiagnostic;
use crate::lexer::tokens::Token;
use crate::state::Parser;
pub mod attributes;
pub mod type_decl;
fn is_hidden_arg(token: &Token) -> bool {
matches!(token, Token::Greater)
}
impl<'a> Parser<'a> {
fn is_top_level_entry_continuation(&self) -> bool {
self.peek(1).same_variant(&Token::Colon) // ':'
|| self.peek(1).same_variant(&Token::LPar) // '('
|| self.peek(1).same_variant(&Token::LBrace) // '{'
|| self.peek(1).same_variant(&Token::Less) // '<'
|| self.peek(1).same_variant(&Token::Minus) // '-'
|| self.peek(1).same_variant(&Token::Plus) // '+'
}
fn is_top_level_entry(&self) -> bool {
self.get().is_upper_id() && self.is_top_level_entry_continuation()
}
fn is_safe_level_start(&self) -> bool {
self.check_actual_id("type")
|| self.check_actual_id("record")
|| self.get().same_variant(&Token::Hash)
|| self.get().is_doc()
}
fn complement_binding_op(&self) -> Option<Token> {
match self.get() {
Token::LPar => Some(Token::RPar),
Token::Less => Some(Token::Greater),
_ => None,
}
}
fn parse_argument(&mut self) -> Result<Argument, SyntaxDiagnostic> {
let start = self.range();
let erased = self.check_and_eat(Token::Minus);
let keep = self.check_and_eat(Token::Plus);
let complement = self.complement_binding_op();
match &complement {
Some(_) => Ok(self.advance()),
None => self.fail(vec![Token::Plus, Token::Minus, Token::LPar, Token::Less]), // TODO: Add multiple
}?;
let hidden = is_hidden_arg(complement.as_ref().unwrap());
let name = self.parse_id()?;
let typ = if self.check_and_eat(Token::Colon) {
Some(self.parse_expr(false)?)
} else {
None
};
let erased = if hidden { !keep } else { erased };
let res = self.eat_variant(complement.unwrap())?.1;
let range = start.mix(res);
Ok(Argument {
hidden,
erased,
name,
typ,
range,
})
}
fn parse_rule(&mut self, name: String) -> Result<Box<Rule>, SyntaxDiagnostic> {
let start = self.range();
let ident;
if let Token::UpperId(name_id, ext) = self.get() {
let qual = QualifiedIdent::new_static(name_id.as_str(), ext.clone(), start);
if qual.to_string() == name {
ident = self.parse_upper_id()?;
} else {
return self.fail(vec![]);
}
} else {
return self.fail(vec![]);
}
let mut pats = Vec::new();
while !self.get().same_variant(&Token::Eq) && !self.get().same_variant(&Token::Eof) {
pats.push(self.parse_pat()?);
}
self.eat_variant(Token::Eq)?;
let body = self.parse_expr(false)?;
let end = start.mix(body.range);
Ok(Box::new(Rule {
name: ident,
pats,
body,
range: end,
}))
}
fn parse_arguments(&mut self) -> Result<Vec<Argument>, SyntaxDiagnostic> {
let mut args = Vec::new();
while let Some(res) = self.try_single(&|fun| fun.parse_argument())? {
args.push(res);
}
Ok(args)
}
fn parse_docs(&mut self) -> Result<Vec<String>, SyntaxDiagnostic> {
let mut docs = Vec::new();
while let Token::Comment(_, str) = &self.get() {
docs.push(str.clone());
self.advance();
}
Ok(docs)
}
pub fn parse_entry(
&mut self,
docs: Vec<String>,
attrs: Vec<Attribute>,
) -> Result<Entry, SyntaxDiagnostic> {
let start = self.range();
if self.get().is_lower_id() && self.is_top_level_entry_continuation() {
let ident = self.parse_id()?;
return Err(SyntaxDiagnostic::LowerCasedDefinition(
ident.to_string(),
ident.range,
));
}
// Just to make errors more localized
if !self.is_top_level_entry() {
self.fail(vec![])?
}
let ident = self.parse_upper_id()?;
let args = self.parse_arguments()?;
if !self.get().same_variant(&Token::Colon) && !self.get().same_variant(&Token::LBrace) {
return self.fail(vec![])?;
}
let typ = if self.check_and_eat(Token::Colon) {
self.parse_expr(false)?
} else {
Box::new(Expr {
data: ExprKind::Hole,
range: start,
})
};
if self.check_actual(Token::LBrace) {
let start = self.range();
self.eat_variant(Token::LBrace)?;
let body = self.parse_expr(true)?;
let end = self.range();
self.eat_closing_keyword(Token::RBrace, start)?;
let mut rules = vec![Box::new(Rule {
name: ident.clone(),
pats: args
.iter()
.map(|x| {
Box::new(Pat {
range: x.range,
data: PatKind::Var(PatIdent(x.name.clone())),
})
})
.collect(),
body,
range: end,
})];
loop {
let res = self.try_single(&|parser| parser.parse_rule(ident.to_string()))?;
match res {
Some(res) => rules.push(res),
None => break,
}
}
let end = rules.last().as_ref().map(|x| x.range).unwrap_or(typ.range);
// Better error message when you have change the name of the function
if self.get().is_upper_id() && !self.is_top_level_entry_continuation() {
return Err(SyntaxDiagnostic::NotAClauseOfDef(ident.range, self.range()));
}
Ok(Entry {
name: ident,
docs,
args: Telescope::new(args),
typ,
rules,
attrs,
range: start.mix(end),
generated_by: None,
})
} else {
let mut rules = Vec::new();
loop {
let res = self.try_single(&|parser| parser.parse_rule(ident.to_string()))?;
match res {
Some(res) => rules.push(res),
None => break,
}
}
let end = rules.last().as_ref().map(|x| x.range).unwrap_or(typ.range);
// Better error message when you have change the name of the function
if self.get().is_upper_id() && !self.is_top_level_entry_continuation() {
return Err(SyntaxDiagnostic::NotAClauseOfDef(ident.range, self.range()));
}
Ok(Entry {
name: ident,
docs,
args: Telescope::new(args),
typ,
rules,
attrs,
range: start.mix(end),
generated_by: None,
})
}
}
fn parse_top_level(&mut self) -> Result<TopLevel, SyntaxDiagnostic> {
let docs = self.parse_docs()?;
let attrs = self.parse_attrs()?;
if self.check_actual_id("type") {
Ok(TopLevel::SumType(self.parse_sum_type_def(docs, attrs)?))
} else if self.check_actual_id("record") {
Ok(TopLevel::RecordType(self.parse_record_def(docs, attrs)?))
} else if self.is_top_level_entry_continuation() {
Ok(TopLevel::Entry(self.parse_entry(docs, attrs)?))
} else if self.check_actual_id("use") {
Err(SyntaxDiagnostic::CannotUseUse(self.range()))
} else {
self.fail(vec![])
}
}
fn parse_use(&mut self) -> Result<(String, String), SyntaxDiagnostic> {
self.eat_id("use")?;
let origin = self.parse_upper_id()?;
self.eat_id("as")?;
let alias = self.parse_upper_id()?;
if origin.get_aux().is_some() {
Err(SyntaxDiagnostic::ImportsCannotHaveAlias(origin.range))
} else if alias.get_aux().is_some() {
Err(SyntaxDiagnostic::ImportsCannotHaveAlias(alias.range))
} else {
Ok((origin.to_string(), alias.to_string()))
}
}
pub fn parse_module(&mut self) -> Module {
let mut entries: Vec<TopLevel> = Vec::new();
let mut uses: FxHashMap<String, String> = Default::default();
while self.check_actual_id("use") {
match self.parse_use() {
Ok((origin, alias)) => {
uses.insert(alias, origin);
}
Err(err) => {
self.send_dignostic(err);
break;
}
}
}
while !self.get().same_variant(&Token::Eof) {
match self.parse_top_level() {
Ok(entry) => entries.push(entry),
Err(err) => {
self.advance();
self.send_dignostic(err);
while (!self.is_safe_level_start() || !self.is_linebreak())
&& !self.get().same_variant(&Token::Eof)
{
self.advance();
}
}
}
}
if let Err(err) = self.eat_variant(Token::Eof) {
self.send_dignostic(err);
}
Module { entries, uses }
}
}

View File

@ -0,0 +1,118 @@
use kind_tree::concrete::{Attribute, Constructor, RecordDecl, SumTypeDecl, Telescope};
use kind_tree::symbol::Ident;
use crate::errors::SyntaxDiagnostic;
use crate::lexer::tokens::Token;
use crate::state::Parser;
impl<'a> Parser<'a> {
pub fn parse_constructor(&mut self) -> Result<Constructor, SyntaxDiagnostic> {
let attrs = self.parse_attrs()?;
let docs = self.parse_docs()?;
let name = self.parse_any_id()?;
let args = self.parse_arguments()?;
let typ = if self.check_and_eat(Token::Colon) {
Some(self.parse_expr(false)?)
} else {
None
};
self.check_and_eat(Token::Semi);
Ok(Constructor {
name,
attrs,
docs,
args: Telescope::new(args),
typ,
})
}
pub fn parse_sum_type_def(
&mut self,
docs: Vec<String>,
attrs: Vec<Attribute>,
) -> Result<SumTypeDecl, SyntaxDiagnostic> {
self.eat_id("type")?;
let name = self.parse_upper_id()?;
let parameters = self.parse_arguments()?;
let indices = if self.check_and_eat(Token::Tilde) {
self.parse_arguments()?
} else {
Vec::new()
};
let range = self.range();
self.eat_variant(Token::LBrace)?;
let mut constructors = vec![];
while !self.get().same_variant(&Token::RBrace) && !self.get().same_variant(&Token::Eof) {
constructors.push(self.parse_constructor()?);
}
self.eat_closing_keyword(Token::RBrace, range)?;
Ok(SumTypeDecl {
name,
docs,
parameters: Telescope::new(parameters),
indices: Telescope::new(indices),
constructors,
attrs,
})
}
pub fn parse_record_def(
&mut self,
docs: Vec<String>,
attrs: Vec<Attribute>,
) -> Result<RecordDecl, SyntaxDiagnostic> {
self.eat_id("record")?;
let name = self.parse_upper_id()?;
let parameters = self.parse_arguments()?;
let range = self.range();
self.eat_variant(Token::LBrace)?;
let cons_attrs = self.parse_attrs()?;
let constructor = if self.check_actual_id("constructor") {
self.eat_id("constructor")?;
let res = self.parse_id()?;
self.check_and_eat(Token::Comma);
res
} else {
Ident::new("new".to_string(), name.range)
};
let mut fields = vec![];
while !self.get().same_variant(&Token::RBrace) && !self.get().same_variant(&Token::Eof) {
let docs = self.parse_docs()?;
let name = self.parse_id()?;
self.eat_variant(Token::Colon)?;
let typ = self.parse_expr(false)?;
fields.push((name, docs, typ))
}
self.eat_closing_keyword(Token::RBrace, range)?;
Ok(RecordDecl {
name,
docs,
constructor,
parameters: Telescope::new(parameters),
fields,
attrs,
cons_attrs,
})
}
}

View File

@ -0,0 +1,17 @@
[package]
name = "kind-pass"
version = "0.1.0"
edition = "2021"
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
[dependencies]
kind-span = { path = "../kind-span" }
kind-tree = { path = "../kind-tree" }
kind-report = { path = "../kind-report" }
kind-derive = { path = "../kind-derive" }
linked-hash-map = "0.5.6"
fxhash = "0.2.1"
im-rc = "15.1.0"
anyhow = "1.0.66"

View File

@ -0,0 +1,192 @@
use fxhash::FxHashMap;
use kind_span::{Locatable, Range};
use kind_tree::concrete::expr::Expr;
use kind_tree::concrete::{Binding, ExprKind};
use kind_tree::desugared;
use kind_tree::symbol::QualifiedIdent;
use crate::errors::PassError;
use super::DesugarState;
impl<'a> DesugarState<'a> {
pub(crate) fn make_desugared_spine(
&mut self,
range: Range,
head: QualifiedIdent,
spine: Vec<Box<desugared::Expr>>,
create_var: bool,
) -> Option<Vec<Box<desugared::Expr>>> {
let entry = self.old_book.get_count_garanteed(head.to_string().as_str());
let mut arguments = Vec::new();
let (hidden, _erased) = entry.arguments.count_implicits();
if spine.len() == entry.arguments.len() - hidden {
let mut spine_iter = spine.iter();
for arg in entry.arguments.iter() {
if arg.hidden {
if create_var {
arguments.push(desugared::Expr::var(self.gen_name(arg.range)))
} else {
arguments.push(self.gen_hole_expr(arg.range))
}
} else {
arguments.push(spine_iter.next().unwrap().to_owned())
}
}
} else if spine.len() != entry.arguments.len() {
// The expected size is the one provided by the desugar.
self.send_err(PassError::SugarIsBadlyImplemented(
entry.range,
range,
spine.len(),
));
}
Some(arguments)
}
pub(crate) fn mk_desugared_ctr(
&mut self,
range: Range,
head: QualifiedIdent,
spine: Vec<Box<desugared::Expr>>,
create_var_on_hidden: bool,
) -> Box<desugared::Expr> {
match self.make_desugared_spine(range, head.clone(), spine, create_var_on_hidden) {
Some(spine) => desugared::Expr::ctr(range, head, spine),
None => desugared::Expr::err(range),
}
}
pub(crate) fn mk_desugared_fun(
&mut self,
range: Range,
head: QualifiedIdent,
spine: Vec<Box<desugared::Expr>>,
create_var_on_hidden: bool,
) -> Box<desugared::Expr> {
match self.make_desugared_spine(range, head.clone(), spine, create_var_on_hidden) {
Some(spine) => desugared::Expr::fun(range, head, spine),
None => desugared::Expr::err(range),
}
}
pub(crate) fn desugar_app(&mut self, range: Range, head: &Expr) -> Box<desugared::Expr> {
match &head.data {
ExprKind::Constr { name, args } => {
let entry = self.old_book.get_count_garanteed(name.to_string().as_str());
let mut positions = FxHashMap::default();
let mut arguments = vec![None; entry.arguments.len()];
let (hidden, _erased) = entry.arguments.count_implicits();
// Check if we should just fill all the implicits
let fill_hidden = args.len() == entry.arguments.len() - hidden;
if fill_hidden {
for i in 0..entry.arguments.len() {
if entry.arguments[i].hidden {
// It's not expected that positional arguments require the range so
// it's the reason why we are using a terrible "ghost range"
arguments[i] = Some((range, self.gen_hole_expr(range)))
}
}
} else if entry.arguments.len() != args.len() {
self.send_err(PassError::IncorrectArity(
name.range,
args.iter().map(|x| x.locate()).collect(),
entry.arguments.len(),
hidden,
));
return desugared::Expr::err(range);
}
for i in 0..entry.arguments.len() {
positions.insert(entry.arguments[i].name.to_str(), i);
}
for arg in args {
match arg {
Binding::Positional(_) => (),
Binding::Named(r, name, v) => {
let pos = match positions.get(name.to_str()) {
Some(pos) => *pos,
None => {
self.send_err(PassError::CannotFindField(
name.range,
name.range,
name.to_string(),
));
continue;
}
};
if let Some((range, _)) = arguments[pos] {
self.send_err(PassError::DuplicatedNamed(range, *r));
} else {
arguments[pos] = Some((*r, self.desugar_expr(v)))
}
}
}
}
for arg in args {
match arg {
Binding::Positional(v) => {
for i in 0..entry.arguments.len() {
let arg_decl = &entry.arguments[i];
if (fill_hidden && arg_decl.hidden) || arguments[i].is_some() {
continue;
}
arguments[i] = Some((v.range, self.desugar_expr(v)));
break;
}
}
Binding::Named(_, _, _) => (),
}
}
if arguments.iter().any(|x| x.is_none()) {
return Box::new(desugared::Expr {
data: desugared::ExprKind::Err,
range,
});
}
let new_spine = arguments.iter().map(|x| x.clone().unwrap().1).collect();
Box::new(desugared::Expr {
data: if entry.is_ctr {
desugared::ExprKind::Ctr {
name: name.clone(),
args: new_spine,
}
} else {
desugared::ExprKind::Fun {
name: name.clone(),
args: new_spine,
}
},
range,
})
}
ExprKind::App { fun, args } => {
let mut new_spine = Vec::new();
let new_head = self.desugar_expr(fun);
for arg in args {
new_spine.push(desugared::AppBinding {
data: self.desugar_expr(&arg.data),
erased: arg.erased,
})
}
desugared::Expr::app(range, new_head, new_spine)
}
_ => panic!("Internal Error: This function should be used with app and constr"),
}
}
}

View File

@ -0,0 +1,105 @@
use kind_span::Locatable;
use kind_tree::concrete::{self, Attribute, AttributeStyle};
use kind_tree::Attributes;
use crate::errors::PassError;
use super::DesugarState;
impl<'a> DesugarState<'a> {
fn args_should_be_empty(&mut self, attr: &Attribute) {
if !attr.args.is_empty() {
self.send_err(PassError::AttributeDoesNotExpectArgs(attr.range))
};
}
fn attr_without_value(&mut self, attr: &Attribute) {
if attr.value.is_some() {
self.send_err(PassError::AttributeDoesNotExpectEqual(attr.range))
};
}
fn attr_invalid_argument(&mut self, attr: &Attribute) {
if attr.value.is_none() {
self.send_err(PassError::InvalidAttributeArgument(attr.range))
};
}
fn attr_expects_a_value(&mut self, attr: &Attribute) {
if attr.value.is_none() {
self.send_err(PassError::AttributeExpectsAValue(attr.range))
};
}
pub fn desugar_attributes(&mut self, attrs: &[concrete::Attribute]) -> Attributes {
let mut attributes: Attributes = Default::default();
for attr in attrs {
match attr.name.to_str() {
// The derive attribute is treated by the expand
// pass so here we just ignore it.
"derive" => (),
"inline" => {
self.args_should_be_empty(attr);
self.attr_without_value(attr);
attributes.inlined = true;
}
"keep" => {
self.args_should_be_empty(attr);
self.attr_without_value(attr);
attributes.keep = true;
}
"kdl_run" => {
self.args_should_be_empty(attr);
self.attr_without_value(attr);
attributes.kdl_run = true;
}
"kdl_erase" => {
self.args_should_be_empty(attr);
self.attr_without_value(attr);
attributes.kdl_erase = true;
}
"kdl_name" => {
self.args_should_be_empty(attr);
match &attr.value {
Some(AttributeStyle::Ident(_, ident)) => {
attributes.kdl_name = Some(ident.clone());
}
Some(_) => self.attr_invalid_argument(attr),
None => self.attr_expects_a_value(attr),
}
}
"kdl_state" => {
self.args_should_be_empty(attr);
match &attr.value {
Some(AttributeStyle::Ident(_, ident)) => {
attributes.kdl_state = Some(ident.clone());
}
Some(_) => self.attr_invalid_argument(attr),
None => self.attr_expects_a_value(attr),
}
}
"trace" => {
self.args_should_be_empty(attr);
match &attr.value {
Some(AttributeStyle::Ident(_, id)) if id.to_string() == "true" => {
attributes.trace = Some(true);
}
Some(AttributeStyle::Ident(_, id)) if id.to_string() == "false" => {
attributes.trace = Some(false);
}
Some(other) => {
self.send_err(PassError::InvalidAttributeArgument(other.locate()))
}
None => {
attributes.trace = Some(false);
}
}
}
_ => self.send_err(PassError::AttributeDoesNotExists(attr.range)),
}
}
attributes
}
}

View File

@ -0,0 +1,267 @@
use fxhash::FxHashMap;
use kind_span::{Locatable, Range};
use kind_tree::concrete::{expr, CaseBinding, Destruct, TopLevel};
use kind_tree::desugared;
use kind_tree::symbol::Ident;
use crate::errors::{PassError, Sugar};
use super::DesugarState;
impl<'a> DesugarState<'a> {
pub(crate) fn order_case_arguments(
&mut self,
type_info: (&Range, String),
fields: &[(String, bool)],
cases: &[CaseBinding],
jump_rest: Option<Range>,
) -> Vec<Option<(Range, Ident)>> {
let mut ordered_fields = vec![None; fields.len()];
let mut names = FxHashMap::default();
for (i, field) in fields.iter().enumerate() {
names.insert(fields[i].clone().0, (i, field.clone().1));
}
for arg in cases {
let (name, alias) = match arg {
CaseBinding::Field(name) => (name.clone(), name.clone()),
CaseBinding::Renamed(name, alias) => (name.clone(), alias.clone()),
};
if let Some((idx, _)) = names.get(name.to_str()) {
if let Some((range, _)) = ordered_fields[*idx] {
self.send_err(PassError::DuplicatedNamed(range, name.range));
} else {
ordered_fields[*idx] = Some((name.locate(), alias.clone()))
}
} else {
self.send_err(PassError::CannotFindField(
name.range,
*type_info.0,
type_info.1.to_string(),
))
}
}
let names: Vec<String> = names
.iter()
.filter(|(_, (idx, hidden))| ordered_fields[*idx].is_none() && !hidden)
.map(|(name, _)| name.clone())
.collect();
if jump_rest.is_none() && !names.is_empty() {
self.send_err(PassError::NoFieldCoverage(*type_info.0, names))
}
ordered_fields
}
pub(crate) fn desugar_destruct(
&mut self,
range: Range,
binding: &expr::Destruct,
val: Box<desugared::Expr>,
next: &dyn Fn(&mut Self) -> Box<desugared::Expr>,
on_ident: &dyn Fn(&mut Self, &Ident) -> Box<desugared::Expr>,
) -> Box<desugared::Expr> {
match binding {
Destruct::Destruct(_, typ, case, jump_rest) => {
let count = self.old_book.count.get(&typ.to_string()).unwrap();
let open_id = typ.add_segment("open");
let rec = count
.is_record_cons_of
.clone()
.and_then(|name| self.old_book.entries.get(&name.to_string()));
let record = if let Some(TopLevel::RecordType(record)) = rec {
record
} else {
self.send_err(PassError::LetDestructOnlyForRecord(typ.range));
return desugared::Expr::err(typ.range);
};
if self.old_book.count.get(&open_id.to_string()).is_none() {
self.send_err(PassError::NeedToImplementMethods(
binding.locate(),
Sugar::Open(typ.to_string()),
));
return desugared::Expr::err(range);
}
let ordered_fields = self.order_case_arguments(
(&typ.range, typ.to_string()),
&record
.fields
.iter()
.map(|x| (x.0.to_string(), false))
.collect::<Vec<(String, bool)>>(),
case,
*jump_rest,
);
let mut arguments = Vec::new();
for arg in ordered_fields {
if let Some((_, name)) = arg {
arguments.push(name)
} else {
arguments.push(self.gen_name(jump_rest.unwrap_or(typ.range)))
}
}
let mut irrelev = count.arguments.map(|x| x.erased).to_vec();
irrelev = irrelev[record.parameters.len()..].to_vec();
let spine = vec![
val,
desugared::Expr::unfold_lambda(&irrelev, &arguments, next(self)),
];
self.mk_desugared_fun(range, open_id, spine, false)
}
Destruct::Ident(name) => on_ident(self, name),
}
}
pub(crate) fn desugar_let(
&mut self,
range: Range,
binding: &expr::Destruct,
val: &expr::Expr,
next: &expr::Expr,
) -> Box<desugared::Expr> {
let res_val = self.desugar_expr(val);
self.desugar_destruct(
next.range,
binding,
res_val,
&|this| this.desugar_expr(next),
&|this, name| {
desugared::Expr::let_(
range,
name.clone(),
this.desugar_expr(val),
this.desugar_expr(next),
)
},
)
}
pub(crate) fn desugar_match(
&mut self,
range: Range,
match_: &expr::Match,
) -> Box<desugared::Expr> {
let entry = self.old_book.entries.get(&match_.typ.to_string()).unwrap();
let match_id = match_.typ.add_segment("match");
if self.old_book.entries.get(&match_id.to_string()).is_none() {
self.send_err(PassError::NeedToImplementMethods(
range,
Sugar::Match(match_.typ.to_string()),
));
return desugared::Expr::err(range);
}
let sum = if let TopLevel::SumType(sum) = entry {
sum
} else {
self.send_err(PassError::LetDestructOnlyForSum(match_.typ.range));
return desugared::Expr::err(match_.typ.range);
};
let mut cases_args = Vec::new();
let mut positions = FxHashMap::default();
for case in &sum.constructors {
positions.insert(case.name.to_str(), cases_args.len());
cases_args.push(None)
}
for case in &match_.cases {
let index = match positions.get(case.constructor.to_str()) {
Some(pos) => *pos,
None => {
self.send_err(PassError::CannotFindConstructor(
case.constructor.range,
match_.typ.range,
match_.typ.to_string(),
));
continue;
}
};
if let Some((range, _, _)) = cases_args[index] {
self.send_err(PassError::DuplicatedNamed(range, case.constructor.range));
} else {
let sum_constructor = &sum.constructors[index];
let ordered = self.order_case_arguments(
(&case.constructor.range, case.constructor.to_string()),
&sum_constructor
.args
.iter()
.map(|x| (x.name.to_string(), x.hidden))
.collect::<Vec<(String, bool)>>(),
&case.bindings,
case.ignore_rest,
);
let mut arguments = Vec::new();
for arg in ordered {
if let Some((_, name)) = arg {
arguments.push(name)
} else {
arguments.push(self.gen_name(case.ignore_rest.unwrap_or(match_.typ.range)));
}
}
cases_args[index] = Some((case.constructor.range, arguments, &case.value));
}
}
let mut unbound = Vec::new();
let mut lambdas = Vec::new();
for (i, case_arg) in cases_args.iter().enumerate() {
let case = &sum.constructors[i];
if let Some((_, arguments, val)) = &case_arg {
let case: Vec<bool> = case.args.iter().map(|x| x.erased).rev().collect();
lambdas.push(desugared::Expr::unfold_lambda(
&case,
arguments,
self.desugar_expr(val),
))
} else {
unbound.push(case.name.to_string())
}
}
if !unbound.is_empty() {
self.send_err(PassError::NoCoverage(range, unbound))
}
let motive = if let Some(res) = &match_.motive {
self.desugar_expr(res)
} else {
let mut idx: Vec<Ident> = sum.indices.iter().map(|x| x.name.clone()).collect();
idx.push(Ident::generate("val_"));
idx.iter()
.rfold(self.gen_hole_expr(match_.typ.range), |expr, l| {
desugared::Expr::lambda(l.range, l.clone(), expr, false)
})
};
let prefix = [self.desugar_expr(&match_.scrutinizer), motive];
self.mk_desugared_fun(
range,
match_id,
[prefix.as_slice(), lambdas.as_slice()].concat(),
false,
)
}
}

View File

@ -0,0 +1,317 @@
use kind_span::{Locatable, Range};
use kind_tree::concrete::{self, expr, Literal};
use kind_tree::desugared;
use kind_tree::symbol::{Ident, QualifiedIdent};
use crate::errors::{PassError, Sugar};
use super::DesugarState;
impl<'a> DesugarState<'a> {
pub fn check_implementation(&mut self, name: &str, range: Range, sugar: Sugar) -> bool {
if !self.old_book.names.contains_key(&name.to_string()) {
self.send_err(PassError::NeedToImplementMethods(range, sugar));
false
} else {
true
}
}
pub(crate) fn desugar_literal(
&mut self,
range: Range,
literal: &expr::Literal,
) -> Box<desugared::Expr> {
match literal {
Literal::String(string) => {
if !self.check_implementation("String.cons", range, Sugar::String)
|| !self.check_implementation("String.nil", range, Sugar::String)
{
return desugared::Expr::err(range);
}
desugared::Expr::str(range, string.clone())
}
Literal::Type => desugared::Expr::typ(range),
Literal::Help(name) => desugared::Expr::hlp(range, name.clone()),
Literal::NumTypeU60 => desugared::Expr::type_u60(range),
Literal::NumTypeF60 => desugared::Expr::type_f60(range),
Literal::NumU60(num) => desugared::Expr::num_u60(range, *num),
Literal::NumU120(num) => {
if !self.check_implementation("U120.new", range, Sugar::U120) {
return desugared::Expr::err(range);
}
desugared::Expr::num_u120(range, *num)
}
Literal::NumF60(num) => desugared::Expr::num_f60(range, *num),
Literal::Char(cht) => desugared::Expr::num_u60(range, *cht as u64),
}
}
pub(crate) fn desugar_sub(
&mut self,
range: Range,
sub: &expr::Substitution,
) -> Box<desugared::Expr> {
desugared::Expr::sub(
range,
sub.name.clone(),
sub.indx,
sub.redx,
self.desugar_expr(&sub.expr),
)
}
pub(crate) fn desugar_sttm(
&mut self,
bind_ident: &QualifiedIdent,
pure_ident: &QualifiedIdent,
sttm: &expr::Sttm,
) -> Box<desugared::Expr> {
type Exp = Box<desugared::Expr>;
// Creates a bind constructor
let bind = |this: &mut Self, range: Range, name: Ident, expr: Exp, next: Exp| -> Exp {
this.mk_desugared_fun(
range,
bind_ident.clone(),
vec![expr, desugared::Expr::lambda(range, name, next, false)],
false,
)
};
match &sttm.data {
concrete::SttmKind::Expr(expr, next) => {
let res_expr = self.desugar_expr(expr);
let res_sttm = self.desugar_sttm(bind_ident, pure_ident, next);
let name = self.gen_name(sttm.range);
bind(self, sttm.range, name, res_expr, res_sttm)
}
concrete::SttmKind::Ask(concrete::Destruct::Destruct(a, b, c, d), val, next) => {
let res_val = self.desugar_expr(val);
let name = self.gen_name(sttm.range);
let res_destruct = self.desugar_destruct(
next.range,
&concrete::Destruct::Destruct(*a, b.to_owned(), c.to_owned(), *d),
desugared::Expr::var(name.clone()),
&|this| this.desugar_sttm(bind_ident, pure_ident, next),
&|_, _| unreachable!(),
);
bind(self, sttm.range, name, res_val, res_destruct)
}
concrete::SttmKind::Ask(concrete::Destruct::Ident(name), val, next) => {
let res_expr = self.desugar_expr(val);
let res_sttm = self.desugar_sttm(bind_ident, pure_ident, next);
bind(self, sttm.range, name.clone(), res_expr, res_sttm)
}
concrete::SttmKind::Let(destruct, val, next) => {
let res_val = self.desugar_expr(&val.clone());
self.desugar_destruct(
next.range,
destruct,
res_val,
&|this| this.desugar_sttm(bind_ident, pure_ident, next),
&|this, ident| {
desugared::Expr::let_(
destruct.locate(),
ident.clone(),
this.desugar_expr(val),
this.desugar_sttm(bind_ident, pure_ident, next),
)
},
)
}
concrete::SttmKind::Return(expr) => {
let res_expr = self.desugar_expr(expr);
self.mk_desugared_fun(expr.locate(), pure_ident.clone(), vec![res_expr], false)
}
concrete::SttmKind::RetExpr(expr) => self.desugar_expr(expr),
}
}
pub(crate) fn desugar_do(
&mut self,
range: Range,
typ: &QualifiedIdent,
sttm: &expr::Sttm,
) -> Box<desugared::Expr> {
let bind_ident = typ.add_segment("bind");
let pure_ident = typ.add_segment("pure");
let bind = self.old_book.names.get(bind_ident.to_str());
let pure = self.old_book.names.get(pure_ident.to_str());
if bind.is_none() || pure.is_none() {
self.send_err(PassError::NeedToImplementMethods(range, Sugar::DoNotation));
return desugared::Expr::err(range);
}
self.desugar_sttm(&bind_ident, &pure_ident, sttm)
}
pub(crate) fn desugar_sigma(
&mut self,
range: Range,
name: &Option<Ident>,
typ: &expr::Expr,
body: &expr::Expr,
) -> Box<desugared::Expr> {
let sigma = QualifiedIdent::new_static("Sigma", None, range);
if !self.check_implementation(sigma.to_str(), range, Sugar::Sigma) {
return desugared::Expr::err(range);
}
let name = match name {
Some(ident) => ident.clone(),
None => Ident::generate("_var"),
};
let spine = vec![
self.desugar_expr(typ),
desugared::Expr::lambda(range, name, self.desugar_expr(body), true),
];
self.mk_desugared_ctr(range, sigma, spine, false)
}
pub(crate) fn desugar_list(
&mut self,
range: Range,
expr: &[expr::Expr],
) -> Box<desugared::Expr> {
let list_ident = QualifiedIdent::new_static("List", None, range);
let cons_ident = list_ident.add_segment("cons");
let nil_ident = list_ident.add_segment("nil");
let list = self.old_book.names.get(list_ident.to_str());
let nil = self.old_book.names.get(cons_ident.to_str());
let cons = self.old_book.names.get(nil_ident.to_str());
if list.is_none() || nil.is_none() || cons.is_none() {
self.send_err(PassError::NeedToImplementMethods(range, Sugar::List));
return desugared::Expr::err(range);
}
expr.iter().rfold(
self.mk_desugared_ctr(range, nil_ident, Vec::new(), false),
|res, elem| {
let spine = vec![self.desugar_expr(elem), res];
self.mk_desugared_ctr(range, cons_ident.clone(), spine, false)
},
)
}
pub(crate) fn desugar_if(
&mut self,
range: Range,
cond: &expr::Expr,
if_: &expr::Expr,
else_: &expr::Expr,
) -> Box<desugared::Expr> {
let boolean = QualifiedIdent::new_static("Bool", None, range);
let bool_if_ident = boolean.add_segment("if");
let bool_if = self.old_book.names.get(bool_if_ident.to_str());
if bool_if.is_none() {
self.send_err(PassError::NeedToImplementMethods(range, Sugar::BoolIf));
return desugared::Expr::err(range);
}
let spine = vec![
self.desugar_expr(cond),
self.desugar_expr(if_),
self.desugar_expr(else_),
];
self.mk_desugared_fun(range, bool_if_ident, spine, false)
}
pub(crate) fn desugar_pair(
&mut self,
range: Range,
fst: &expr::Expr,
snd: &expr::Expr,
) -> Box<desugared::Expr> {
let sigma_new = QualifiedIdent::new_sugared("Sigma", "new", range);
if !self.check_implementation(sigma_new.to_str(), range, Sugar::Pair) {
return desugared::Expr::err(range);
}
let spine = vec![self.desugar_expr(fst), self.desugar_expr(snd)];
self.mk_desugared_ctr(range, sigma_new, spine, false)
}
pub(crate) fn desugar_expr(&mut self, expr: &expr::Expr) -> Box<desugared::Expr> {
use expr::ExprKind::*;
match &expr.data {
Constr { .. } | App { .. } => self.desugar_app(expr.range, expr),
All {
param,
typ,
body,
erased,
} => desugared::Expr::all(
expr.range,
param.clone().unwrap_or_else(|| self.gen_name(expr.range)),
self.desugar_expr(typ),
self.desugar_expr(body),
*erased,
),
Binary { op, fst, snd } => desugared::Expr::binary(
expr.range,
*op,
self.desugar_expr(fst),
self.desugar_expr(snd),
),
Lambda {
param,
typ: None,
body,
erased,
} => {
desugared::Expr::lambda(expr.range, param.clone(), self.desugar_expr(body), *erased)
}
Lambda {
param,
typ: Some(typ),
body,
erased,
} => desugared::Expr::ann(
expr.range,
desugared::Expr::lambda(
expr.range,
param.clone(),
self.desugar_expr(body),
*erased,
),
desugared::Expr::all(
typ.range,
self.gen_name(expr.range),
self.desugar_expr(typ),
self.gen_hole_expr(typ.range),
*erased,
),
),
Ann { val, typ } => {
desugared::Expr::ann(expr.range, self.desugar_expr(val), self.desugar_expr(typ))
}
Var { name } => desugared::Expr::var(name.clone()),
Hole => desugared::Expr::hole(expr.range, self.gen_hole()),
Lit { lit } => self.desugar_literal(expr.range, lit),
Let { name, val, next } => self.desugar_let(expr.range, name, val, next),
Do { typ, sttm } => self.desugar_do(expr.range, typ, sttm),
Sigma { param, fst, snd } => self.desugar_sigma(expr.range, param, fst, snd),
List { args } => self.desugar_list(expr.range, args),
If { cond, then_, else_ } => self.desugar_if(expr.range, cond, then_, else_),
Pair { fst, snd } => self.desugar_pair(expr.range, fst, snd),
Match(matcher) => self.desugar_match(expr.range, matcher),
Subst(sub) => self.desugar_sub(expr.range, sub),
}
}
}

View File

@ -0,0 +1,79 @@
//! This pass transforms a sugared tree into a simpler tree.
//!
//! It does a lot of things like:
//! * Setting an unique number for each of the holes
//! * Desugar of lets and matchs
//! * Untyped derivations for types and records
//! * Checking of hidden and erased arguments
use std::sync::mpsc::Sender;
use kind_report::data::Diagnostic;
use kind_span::Range;
use kind_tree::{
concrete::{self},
desugared,
symbol::Ident,
};
use crate::errors::{PassError, GenericPassError};
pub mod app;
pub mod attributes;
pub mod destruct;
pub mod expr;
pub mod top_level;
pub struct DesugarState<'a> {
pub errors: Sender<Box<dyn Diagnostic>>,
pub old_book: &'a concrete::Book,
pub new_book: desugared::Book,
pub name_count: u64,
pub failed: bool,
}
pub fn desugar_book(
errors: Sender<Box<dyn Diagnostic>>,
book: &concrete::Book,
) -> anyhow::Result<desugared::Book> {
let mut state = DesugarState {
errors,
old_book: book,
new_book: Default::default(),
name_count: 0,
failed: false,
};
state.desugar_book(book);
if state.failed {
Err(GenericPassError.into())
} else {
Ok(state.new_book)
}
}
impl<'a> DesugarState<'a> {
fn gen_hole(&mut self) -> u64 {
self.new_book.holes += 1;
self.new_book.holes - 1
}
fn gen_name(&mut self, range: Range) -> Ident {
self.name_count += 1;
Ident::new(format!("x_{}", self.name_count), range)
}
fn gen_hole_expr(&mut self, range: Range) -> Box<desugared::Expr> {
desugared::Expr::hole(range, self.gen_hole())
}
fn send_err(&mut self, err: PassError) {
self.errors.send(Box::new(err)).unwrap();
self.failed = true;
}
pub fn desugar_book(&mut self, book: &concrete::Book) {
for top_level in book.entries.values() {
self.desugar_top_level(top_level)
}
}
}

View File

@ -0,0 +1,401 @@
use kind_span::Range;
use kind_tree::concrete::{self, Telescope};
use kind_tree::desugared::{self, ExprKind};
use kind_tree::symbol::QualifiedIdent;
use crate::errors::{PassError, Sugar};
use super::DesugarState;
/// States if a given expression is a type constructor
/// of the inductive type family definition only sintatically.
/// TODO: It does not work wit HIT (We will probably have to change it in the future).
/// NOTE: Does not work with Pi types.
pub fn is_data_constructor_of(expr: concrete::expr::Expr, type_name: &str) -> bool {
match expr.data {
concrete::ExprKind::Var { name } => name.to_string().as_str() == type_name,
concrete::ExprKind::App { fun, args: _ } => {
if let concrete::expr::Expr {
data: concrete::ExprKind::Var { name },
..
} = *fun
{
name.to_string().as_str() == type_name
} else {
false
}
}
_ => false,
}
}
impl<'a> DesugarState<'a> {
pub fn desugar_argument(&mut self, argument: &concrete::Argument) -> desugared::Argument {
let typ = match &argument.typ {
None => desugared::Expr::typ(argument.range),
Some(ty) => self.desugar_expr(ty),
};
desugared::Argument {
hidden: argument.hidden,
erased: argument.erased,
name: argument.name.clone(),
typ,
range: argument.range,
}
}
pub fn desugar_sum_type(&mut self, sum_type: &concrete::SumTypeDecl) {
let params = sum_type.parameters.clone();
let indices = sum_type.indices.clone();
let desugared_params = params.map(|arg| self.desugar_argument(arg));
let desugared_indices = indices.map(|arg| self.desugar_argument(arg));
let type_constructor = desugared::Entry {
name: sum_type.name.clone(),
args: desugared_params.extend(&desugared_indices).to_vec(),
typ: desugared::Expr::typ(sum_type.name.range),
rules: Vec::new(),
range: sum_type.name.range,
attrs: self.desugar_attributes(&sum_type.attrs),
};
self.new_book
.entrs
.insert(sum_type.name.to_string(), Box::new(type_constructor));
let irrelevant_params: Vec<desugared::Argument> =
desugared_params.map(|x| x.to_irrelevant()).to_vec();
let irelevant_indices: Vec<desugared::Argument> = indices
.map(|arg| self.desugar_argument(arg).to_irrelevant())
.to_vec();
for cons in &sum_type.constructors {
let cons_ident = sum_type.name.add_segment(cons.name.to_str());
let pre_indices = if cons.typ.is_none() {
irelevant_indices.as_slice()
} else {
&[]
};
let typ = match cons.typ.clone() {
Some(expr) => {
let res = self.desugar_expr(&expr);
match &res.data {
ExprKind::Ctr { name, args }
if name.to_string() == sum_type.name.to_string() =>
{
for (i, parameter) in sum_type.parameters.iter().enumerate() {
match &args[i].data {
ExprKind::Var { name }
if name.to_string() == parameter.name.to_string() => {}
_ => {
self.send_err(PassError::ShouldBeAParameter(
Some(args[i].range),
parameter.range,
));
}
}
}
}
_ => self.send_err(PassError::NotATypeConstructor(
expr.range,
sum_type.name.range,
)),
}
res
}
None => {
let args = [irrelevant_params.as_slice(), pre_indices]
.concat()
.iter()
.map(|x| desugared::Expr::var(x.name.clone()))
.collect::<Vec<Box<desugared::Expr>>>();
desugared::Expr::ctr(cons.name.range, sum_type.name.clone(), args)
}
};
let data_constructor = desugared::Entry {
name: cons_ident.clone(),
args: [
irrelevant_params.as_slice(),
pre_indices,
cons.args.map(|arg| self.desugar_argument(arg)).as_slice(),
]
.concat(),
typ,
rules: Vec::new(),
attrs: self.desugar_attributes(&cons.attrs),
range: cons.name.range,
};
self.new_book
.entrs
.insert(cons_ident.to_string(), Box::new(data_constructor));
}
}
pub fn desugar_record_type(&mut self, rec_type: &concrete::RecordDecl) {
let params = rec_type.parameters.clone();
let desugared_params = params.map(|arg| self.desugar_argument(arg));
let type_constructor = desugared::Entry {
name: rec_type.name.clone(),
args: desugared_params.clone().to_vec(),
typ: desugared::Expr::typ(rec_type.name.range),
rules: Vec::new(),
range: rec_type.name.range,
attrs: self.desugar_attributes(&rec_type.attrs),
};
self.new_book
.entrs
.insert(rec_type.name.to_string(), Box::new(type_constructor));
let irrelevant_params = desugared_params.map(|x| x.to_irrelevant());
let args = [irrelevant_params.as_slice()]
.concat()
.iter()
.map(|x| desugared::Expr::var(x.name.clone()))
.collect::<Vec<Box<desugared::Expr>>>();
let typ = desugared::Expr::ctr(rec_type.name.range, rec_type.name.clone(), args);
let cons_ident = rec_type.name.add_segment(rec_type.constructor.to_str());
let fields_args = rec_type
.fields
.iter()
.map(|(ident, _docs, ty)| {
desugared::Argument::from_field(
ident,
self.desugar_expr(ty),
ident.range.mix(ty.range),
)
})
.collect::<Vec<desugared::Argument>>();
let data_constructor = desugared::Entry {
name: cons_ident.clone(),
args: [irrelevant_params.as_slice(), fields_args.as_slice()].concat(),
typ,
rules: Vec::new(),
range: rec_type.constructor.range,
attrs: self.desugar_attributes(&rec_type.cons_attrs),
};
self.new_book
.entrs
.insert(cons_ident.to_string(), Box::new(data_constructor));
}
pub fn desugar_pair_pat(
&mut self,
range: Range,
fst: &concrete::pat::Pat,
snd: &concrete::pat::Pat,
) -> Box<desugared::Expr> {
let sigma_new = QualifiedIdent::new_static("Sigma", Some("new".to_string()), range);
let entry = self.old_book.entries.get(sigma_new.to_string().as_str());
if entry.is_none() {
self.send_err(PassError::NeedToImplementMethods(range, Sugar::Pair));
return desugared::Expr::err(range);
}
let spine = vec![self.desugar_pat(fst), self.desugar_pat(snd)];
self.mk_desugared_ctr(range, sigma_new, spine, true)
}
pub fn desugar_list_pat(
&mut self,
range: Range,
expr: &[concrete::pat::Pat],
) -> Box<desugared::Expr> {
let list_ident = QualifiedIdent::new_static("List", None, range);
let cons_ident = list_ident.add_segment("cons");
let nil_ident = list_ident.add_segment("nil");
let list = self.old_book.entries.get(list_ident.to_string().as_str());
let nil = self.old_book.entries.get(cons_ident.to_string().as_str());
let cons = self.old_book.entries.get(nil_ident.to_string().as_str());
if list.is_none() || nil.is_none() || cons.is_none() {
self.send_err(PassError::NeedToImplementMethods(range, Sugar::List));
return desugared::Expr::err(range);
}
expr.iter().rfold(
self.mk_desugared_ctr(range, nil_ident, Vec::new(), true),
|res, elem| {
let spine = vec![self.desugar_pat(elem), res];
self.mk_desugared_ctr(range, cons_ident.clone(), spine, true)
},
)
}
pub fn desugar_pat(&mut self, pat: &concrete::pat::Pat) -> Box<desugared::Expr> {
use concrete::pat::PatKind;
match &pat.data {
PatKind::App(head, spine) => {
let entry = self
.old_book
.count
.get(head.to_string().as_str())
.expect("Internal Error: Cannot find definition");
if !entry.is_ctr {
// TODO: Check if only data constructors declared inside
// inductive types can be used in patterns.
}
let fill_hidden = spine.len() == entry.arguments.len() - entry.hiddens;
let mut new_spine = Vec::new();
if fill_hidden {
let mut count = 0;
for i in 0..entry.arguments.len() {
if entry.arguments[i].hidden {
let name = self.gen_name(entry.arguments[i].range);
new_spine.push(desugared::Expr::var(name))
} else {
new_spine.push(self.desugar_pat(&spine[count]));
count += 1;
}
}
} else if entry.arguments.len() != spine.len() {
self.send_err(PassError::IncorrectArity(
head.range,
spine.iter().map(|x| x.range).collect(),
entry.arguments.len(),
entry.hiddens,
));
return desugared::Expr::err(pat.range);
} else {
for arg in spine {
new_spine.push(self.desugar_pat(arg));
}
}
desugared::Expr::ctr(pat.range, head.clone(), new_spine)
}
PatKind::Hole => {
let name = self.gen_name(pat.range);
desugared::Expr::var(name)
}
PatKind::Var(ident) => desugared::Expr::var(ident.0.clone()),
PatKind::U60(n) => desugared::Expr::num_u60(pat.range, *n),
PatKind::U120(n) => desugared::Expr::num_u120(pat.range, *n),
PatKind::F60(n) => desugared::Expr::num_f60(pat.range, *n),
PatKind::Char(n) => desugared::Expr::num_u60(pat.range, *n as u64),
PatKind::Pair(fst, snd) => self.desugar_pair_pat(pat.range, fst, snd),
PatKind::List(ls) => self.desugar_list_pat(pat.range, ls),
PatKind::Str(string) => desugared::Expr::str(pat.range, string.to_owned()),
}
}
pub fn desugar_rule(
&mut self,
args: &Telescope<concrete::Argument>,
rule: &concrete::Rule,
) -> desugared::Rule {
let pats = rule
.pats
.iter()
.map(|x| self.desugar_pat(x))
.collect::<Vec<Box<desugared::Expr>>>();
let (hidden, _) = args.count_implicits();
if pats.len() == args.len() {
desugared::Rule {
name: rule.name.clone(),
pats,
body: self.desugar_expr(&rule.body),
range: rule.range,
}
} else if pats.len() == args.len() - hidden {
let mut res_pats = Vec::new();
let mut pat_iter = pats.iter();
for arg in args.iter() {
if arg.hidden {
res_pats.push(desugared::Expr::var(self.gen_name(arg.range)))
} else {
res_pats.push(pat_iter.next().unwrap().to_owned());
}
}
desugared::Rule {
name: rule.name.clone(),
pats: res_pats,
body: self.desugar_expr(&rule.body),
range: rule.range,
}
} else {
self.send_err(PassError::RuleWithIncorrectArity(
rule.range,
pats.len(),
args.len(),
hidden,
));
// TODO: Probably we should just a sentinel rule?
desugared::Rule {
name: rule.name.clone(),
pats,
body: self.desugar_expr(&rule.body),
range: rule.range,
}
}
}
pub fn desugar_entry(&mut self, entry: &concrete::Entry) {
self.name_count = 0;
let rules = entry
.rules
.iter()
.map(|x| self.desugar_rule(&entry.args, x))
.collect();
let res_entry = desugared::Entry {
name: entry.name.clone(),
args: entry.args.map(|x| self.desugar_argument(x)).to_vec(),
typ: self.desugar_expr(&entry.typ),
range: entry.range,
attrs: self.desugar_attributes(&entry.attrs),
rules,
};
let rule_numbers = entry
.rules
.iter()
.map(|x| (x.range, x.pats.len()))
.collect::<Vec<(Range, usize)>>();
let diff = rule_numbers.iter().filter(|x| rule_numbers[0].1 != x.1);
if !rule_numbers.is_empty() && diff.clone().count() >= 1 {
self.send_err(PassError::RulesWithInconsistentArity(
diff.cloned().collect(),
));
}
self.new_book
.entrs
.insert(res_entry.name.to_string(), Box::new(res_entry));
}
pub fn desugar_top_level(&mut self, top_level: &concrete::TopLevel) {
match top_level {
concrete::TopLevel::SumType(sum) => self.desugar_sum_type(sum),
concrete::TopLevel::RecordType(rec) => self.desugar_record_type(rec),
concrete::TopLevel::Entry(entry) => self.desugar_entry(entry),
}
}
}

View File

@ -0,0 +1,519 @@
use std::sync::mpsc::Sender;
use fxhash::{FxHashMap, FxHashSet};
use kind_report::data::Diagnostic;
use kind_span::Range;
use kind_tree::desugared;
use kind_tree::symbol::QualifiedIdent;
use kind_tree::untyped::{self};
use crate::errors::{PassError, GenericPassError};
#[derive(Copy, Clone, PartialEq, Eq, Hash, Debug)]
enum Relevance {
Relevant,
Irrelevant,
}
#[derive(Copy, Clone, PartialEq, Eq, Hash, Debug)]
enum Ambient {
Unknown,
Irrelevant,
Relevant,
}
impl Ambient {
pub fn as_relevance(&self) -> Relevance {
match self {
Ambient::Irrelevant => Relevance::Irrelevant,
Ambient::Unknown | Ambient::Relevant => Relevance::Relevant,
}
}
}
pub struct Edge {
name: String,
relevance: FxHashMap<Relevance, Vec<Range>>,
connections: FxHashMap<usize, Vec<(Range, Ambient)>>,
}
pub struct ErasureState<'a> {
errs: Sender<Box<dyn Diagnostic>>,
book: &'a desugared::Book,
edges: Vec<Edge>,
names: FxHashMap<String, (Range, usize)>,
ctx: im_rc::HashMap<String, Relevance>,
failed: bool,
}
pub fn erase_book(
book: &desugared::Book,
errs: Sender<Box<dyn Diagnostic>>,
entrypoints: Vec<String>,
) -> anyhow::Result<untyped::Book> {
let mut state = ErasureState {
errs,
book,
edges: Default::default(),
names: Default::default(),
ctx: Default::default(),
failed: Default::default(),
};
state.erase_book(book, entrypoints)
}
impl<'a> ErasureState<'a> {
fn get_edge_or_create(&mut self, name: &QualifiedIdent) -> usize {
if let Some(id) = self.names.get(&name.to_string()) {
id.1
} else {
let id = self.edges.len();
self.names.insert(name.to_string(), (name.range, id));
self.edges.push(Edge {
name: name.to_string(),
relevance: Default::default(),
connections: Default::default(),
});
id
}
}
fn set_relevance(&mut self, id: usize, relevance: Relevance, on: Range) {
let edge = self.edges.get_mut(id).unwrap();
let entry = edge.relevance.entry(relevance).or_default();
entry.push(on)
}
fn connect_with(&mut self, id: usize, name: &QualifiedIdent, ambient: Ambient) {
let new_id = self.get_edge_or_create(name);
let entry = self.edges[id].connections.entry(new_id).or_default();
entry.push((name.range, ambient))
}
pub fn erase_book(
&mut self,
book: &'a desugared::Book,
named_entrypoints: Vec<String>,
) -> anyhow::Result<untyped::Book> {
let mut vals = FxHashMap::default();
let mut entrypoints = Vec::new();
for name in named_entrypoints {
if let Some(entr) = book.entrs.get(&name) {
let id = self.get_edge_or_create(&entr.name);
self.set_relevance(id, Relevance::Relevant, entr.name.range);
entrypoints.push(id);
}
}
// Kdl specific things.
for entr in book.entrs.values() {
if let Some(name) = &entr.attrs.kdl_state {
if book.entrs.contains_key(name.to_str()) {
let id = self.get_edge_or_create(&name.to_qualified_ident());
self.set_relevance(id, Relevance::Relevant, name.range);
entrypoints.push(id);
}
}
if entr.attrs.kdl_run || entr.attrs.keep {
let id = self.get_edge_or_create(&entr.name);
self.set_relevance(id, Relevance::Relevant, entr.name.range);
entrypoints.push(id);
}
}
for entr in book.entrs.values() {
vals.insert(entr.name.to_string(), self.erase_entry(entr));
}
let mut visited = FxHashSet::<usize>::default();
let mut new_book = untyped::Book {
entrs: Default::default(),
names: Default::default(),
};
let mut queue = entrypoints
.iter()
.map(|x| (x, Ambient::Relevant))
.collect::<Vec<_>>();
while !queue.is_empty() {
let (fst, relev) = queue.pop().unwrap();
if visited.contains(fst) {
continue;
}
visited.insert(*fst);
let edge = &self.edges[*fst];
if relev != Ambient::Irrelevant {
if let Some(res) = edge.relevance.get(&Relevance::Irrelevant) {
self.errs
.send(Box::new(PassError::CannotUseIrrelevant(None, res[0], None)))
.unwrap();
self.failed = true;
}
}
let entry = vals.remove(&edge.name).unwrap();
new_book
.names
.insert(entry.name.to_str().to_string(), new_book.entrs.len());
new_book
.entrs
.insert(entry.name.to_str().to_string(), entry);
for (to, relevs) in &edge.connections {
for (_, relev) in relevs.iter() {
match relev {
Ambient::Irrelevant => (),
Ambient::Unknown | Ambient::Relevant => {
if !visited.contains(to) {
queue.push((to, *relev));
}
}
}
}
}
}
if self.failed {
Err(GenericPassError.into())
} else {
Ok(new_book)
}
}
fn erase_entry(&mut self, entry: &'a desugared::Entry) -> Box<untyped::Entry> {
let id = self.get_edge_or_create(&entry.name);
let mut args = Vec::new();
let backup = self.ctx.clone();
for arg in &entry.args {
self.erase_expr(Ambient::Irrelevant, id, &arg.typ);
self.ctx.insert(arg.name.to_string(), Relevance::Irrelevant);
if !arg.erased {
args.push((arg.name.to_string(), arg.range, false))
}
}
self.erase_expr(Ambient::Irrelevant, id, &entry.typ);
self.ctx = backup;
let mut rules = Vec::new();
for rule in &entry.rules {
rules.push(self.erase_rule(entry, id, rule));
}
Box::new(untyped::Entry {
name: entry.name.clone(),
args,
rules,
attrs: entry.attrs.clone(),
range: entry.range,
})
}
fn erase_rule(
&mut self,
entr: &desugared::Entry,
edge: usize,
rule: &'a desugared::Rule,
) -> untyped::Rule {
let backup = self.ctx.clone();
let has_relevance = self.edges[edge]
.relevance
.contains_key(&Relevance::Relevant);
let relev = |hidden: bool| -> Ambient {
if hidden {
Ambient::Irrelevant
} else if has_relevance {
Ambient::Relevant
} else {
Ambient::Unknown
}
};
let pats = rule
.pats
.iter()
.zip(&entr.args)
.map(|(pat, arg)| (self.erase_pat(relev(arg.erased), edge, pat), arg))
.filter(|(_, arg)| !arg.erased)
.map(|res| res.0)
.collect::<Vec<_>>();
let body = self.erase_expr(relev(false), edge, &rule.body);
self.ctx = backup;
untyped::Rule {
name: entr.name.clone(),
pats,
body,
range: rule.range,
}
}
fn erase_pat(
&mut self,
relevance: Ambient,
edge: usize,
expr: &'a desugared::Expr,
) -> Box<untyped::Expr> {
let relev = |hidden: bool| -> Ambient {
if hidden {
Ambient::Irrelevant
} else {
relevance
}
};
use desugared::ExprKind::*;
match &expr.data {
Var { name } => {
self.ctx.insert(
name.to_string(),
if relevance == Ambient::Irrelevant {
Relevance::Irrelevant
} else {
Relevance::Relevant
},
);
untyped::Expr::var(name.clone())
}
Hole { num: _ } => untyped::Expr::err(expr.range),
Fun { name, args } => {
self.connect_with(edge, name, relevance);
// We cannot pattern match on functions in a relevant function.
// it would change its behaviour.
if relevance == Ambient::Irrelevant {
self.set_relevance(edge, Relevance::Irrelevant, expr.range)
}
let params = self.book.entrs.get(name.to_str()).unwrap();
let args = args
.iter()
.zip(&params.args)
.map(|(arg, param)| (self.erase_pat(relev(param.erased), edge, arg), param))
.filter(|(_, param)| !param.erased)
.map(|x| x.0)
.collect::<Vec<_>>();
untyped::Expr::fun(expr.range, name.clone(), args)
}
Ctr { name, args } => {
self.connect_with(edge, name, relevance);
// We cannot pattenr match on functions in a relevant function.
// it would change its behaviour.
if relevance == Ambient::Irrelevant {
self.set_relevance(edge, Relevance::Irrelevant, expr.range)
}
let params = self.book.entrs.get(name.to_str()).unwrap();
let args = args
.iter()
.zip(&params.args)
.map(|(arg, param)| (self.erase_pat(relev(param.erased), edge, arg), param))
.filter(|(_, param)| !param.erased)
.map(|x| x.0)
.collect::<Vec<_>>();
untyped::Expr::ctr(expr.range, name.clone(), args)
}
NumU60 { numb } => untyped::Expr::u60(expr.range, *numb),
NumF60 { numb } => untyped::Expr::f60(expr.range, *numb),
Str { val } => {
let nil = QualifiedIdent::new_static("String.nil", None, expr.range);
let cons = QualifiedIdent::new_static("String.cons", None, expr.range);
self.connect_with(edge, &nil, relevance);
self.connect_with(edge, &cons, relevance);
untyped::Expr::str(expr.range, val.clone())
}
_ => todo!("Cannot be parsed {}", expr),
}
}
fn erase_expr(
&mut self,
ambient: Ambient,
edge: usize,
expr: &'a desugared::Expr,
) -> Box<untyped::Expr> {
use desugared::ExprKind::*;
match &expr.data {
All {
param,
typ,
body,
erased: _,
} => {
let backup = self.ctx.clone();
self.ctx.insert(param.to_string(), Relevance::Irrelevant);
if ambient != Ambient::Irrelevant {
self.set_relevance(edge, Relevance::Irrelevant, expr.range);
}
self.erase_expr(Ambient::Irrelevant, edge, typ);
self.erase_expr(Ambient::Irrelevant, edge, body);
self.ctx = backup;
untyped::Expr::err(expr.range)
}
Lambda {
param,
body,
erased,
} => {
let backup = self.ctx.clone();
let relev = if ambient == Ambient::Irrelevant || *erased {
Relevance::Irrelevant
} else {
Relevance::Relevant
};
self.ctx.insert(param.to_string(), relev);
let body = self.erase_expr(ambient, edge, body);
self.ctx = backup;
untyped::Expr::lambda(expr.range, param.clone(), body, *erased)
}
Let { name, val, next } => {
let backup = self.ctx.clone();
self.ctx.insert(name.to_string(), ambient.as_relevance());
let val = self.erase_expr(ambient, edge, val);
let next = self.erase_expr(ambient, edge, next);
self.ctx = backup;
untyped::Expr::let_(expr.range, name.clone(), val, next)
}
Fun { name, args } => {
self.connect_with(edge, name, ambient);
let params = &self.book.entrs.get(name.to_str()).unwrap().args;
let relev = |hidden| {
if hidden {
Ambient::Irrelevant
} else {
ambient
}
};
let args = params
.iter()
.zip(args)
.map(|(param, arg)| (param, self.erase_expr(relev(param.erased), edge, arg)))
.filter(|(param, _)| !param.erased)
.map(|res| res.1)
.collect::<Vec<_>>();
untyped::Expr::fun(expr.range, name.clone(), args)
}
Ctr { name, args } => {
self.connect_with(edge, name, ambient);
let params = &self.book.entrs.get(name.to_str()).unwrap().args;
let relev = |hidden| {
if hidden {
Ambient::Irrelevant
} else {
ambient
}
};
let args = params
.iter()
.zip(args)
.map(|(param, arg)| (param, self.erase_expr(relev(param.erased), edge, arg)))
.filter(|(param, _)| !param.erased)
.map(|res| res.1)
.collect::<Vec<_>>();
untyped::Expr::ctr(expr.range, name.clone(), args)
}
Var { name } => {
let var_rev = self
.ctx
.get(&name.to_string())
.unwrap();
if *var_rev == Relevance::Irrelevant && ambient != Ambient::Irrelevant {
self.set_relevance(edge, Relevance::Irrelevant, name.range)
}
untyped::Expr::var(name.clone())
}
Ann { expr, typ } => {
let expr = self.erase_expr(ambient, edge, expr);
self.erase_expr(Ambient::Irrelevant, edge, typ);
expr
}
NumU60 { numb } => untyped::Expr::u60(expr.range, *numb),
NumF60 { numb } => untyped::Expr::f60(expr.range, *numb),
Str { val } => {
let nil = QualifiedIdent::new_static("String.nil", None, expr.range);
let cons = QualifiedIdent::new_static("String.cons", None, expr.range);
self.connect_with(edge, &nil, ambient);
self.connect_with(edge, &cons, ambient);
untyped::Expr::str(expr.range, val.clone())
}
App { fun, args } => {
let fun = self.erase_expr(ambient, edge, fun);
let mut spine = Vec::new();
for arg in args {
spine.push(self.erase_expr(ambient, edge, &arg.data))
}
untyped::Expr::app(expr.range, fun, spine)
}
Sub { expr, .. } => self.erase_expr(ambient, edge, expr),
Binary { op, left, right } => {
let left = self.erase_expr(ambient, edge, left);
let right = self.erase_expr(ambient, edge, right);
untyped::Expr::binary(expr.range, *op, left, right)
}
Typ | NumTypeU60 { .. } | NumTypeF60 { .. } | Hole { .. } | Hlp(_) | Err => {
if ambient != Ambient::Irrelevant {
self.set_relevance(edge, Relevance::Irrelevant, expr.range);
}
untyped::Expr::err(expr.range)
}
}
}
}

View File

@ -0,0 +1,600 @@
use std::{fmt::Display, error::Error};
use kind_report::data::{Color, Diagnostic, DiagnosticFrame, Marker, Severity};
use kind_span::{Range, SyntaxCtxIndex};
use kind_tree::symbol::Ident;
#[derive(Debug)]
pub struct GenericPassError;
impl Display for GenericPassError {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "generic pass error")
}
}
impl Error for GenericPassError { }
pub enum Sugar {
DoNotation,
List,
Sigma,
Pair,
BoolIf,
String,
U120,
Match(String),
Open(String),
}
/// Describes all of the possible errors inside each
/// of the passes inside this crate.
pub enum PassError {
RepeatedVariable(Range, Range),
IncorrectArity(Range, Vec<Range>, usize, usize),
DuplicatedNamed(Range, Range),
LetDestructOnlyForRecord(Range),
LetDestructOnlyForSum(Range),
NoCoverage(Range, Vec<String>),
CannotFindField(Range, Range, String),
CannotFindConstructor(Range, Range, String),
NeedToImplementMethods(Range, Sugar),
RuleWithIncorrectArity(Range, usize, usize, usize),
RulesWithInconsistentArity(Vec<(Range, usize)>),
SugarIsBadlyImplemented(Range, Range, usize),
CannotUseIrrelevant(Option<Range>, Range, Option<Range>),
CannotFindAlias(String, Range),
NotATypeConstructor(Range, Range),
ShouldBeAParameter(Option<Range>, Range),
NoFieldCoverage(Range, Vec<String>),
UnboundVariable(Vec<Ident>, Vec<String>),
AttributeDoesNotExpectEqual(Range),
AttributeDoesNotExpectArgs(Range),
InvalidAttributeArgument(Range),
AttributeExpectsAValue(Range),
DuplicatedAttributeArgument(Range, Range),
CannotDerive(String, Range),
AttributeDoesNotExists(Range),
}
// TODO: A way to build an error message with methods
impl Diagnostic for PassError {
fn get_syntax_ctx(&self) -> Option<SyntaxCtxIndex> {
match self {
PassError::RepeatedVariable(range, _) => Some(range.ctx),
PassError::IncorrectArity(range, _, _, _) => Some(range.ctx),
PassError::DuplicatedNamed(range, _) => Some(range.ctx),
PassError::LetDestructOnlyForRecord(range) => Some(range.ctx),
PassError::LetDestructOnlyForSum(range) => Some(range.ctx),
PassError::NoCoverage(range, _) => Some(range.ctx),
PassError::CannotFindField(range, _, _) => Some(range.ctx),
PassError::CannotFindConstructor(range, _, _) => Some(range.ctx),
PassError::NeedToImplementMethods(range, _) => Some(range.ctx),
PassError::RuleWithIncorrectArity(range, _, _, _) => Some(range.ctx),
PassError::RulesWithInconsistentArity(range) => Some(range[0].0.ctx),
PassError::SugarIsBadlyImplemented(range, _, _) => Some(range.ctx),
PassError::CannotUseIrrelevant(_, range, _) => Some(range.ctx),
PassError::CannotFindAlias(_, range) => Some(range.ctx),
PassError::NotATypeConstructor(range, _) => Some(range.ctx),
PassError::ShouldBeAParameter(_, range) => Some(range.ctx),
PassError::NoFieldCoverage(range, _) => Some(range.ctx),
PassError::UnboundVariable(ranges, _) => Some(ranges[0].range.ctx),
PassError::AttributeDoesNotExpectEqual(range) => Some(range.ctx),
PassError::AttributeDoesNotExpectArgs(range) => Some(range.ctx),
PassError::InvalidAttributeArgument(range) => Some(range.ctx),
PassError::AttributeExpectsAValue(range) => Some(range.ctx),
PassError::DuplicatedAttributeArgument(range, _) => Some(range.ctx),
PassError::CannotDerive(_, range) => Some(range.ctx),
PassError::AttributeDoesNotExists(range) => Some(range.ctx),
}
}
fn to_diagnostic_frame(&self) -> DiagnosticFrame {
match self {
PassError::UnboundVariable(idents, suggestions) => DiagnosticFrame {
code: 100,
severity: Severity::Error,
title: format!("Cannot find the definition '{}'.", idents[0].to_str()),
subtitles: vec![],
hints: vec![if !suggestions.is_empty() {
format!(
"Maybe you're looking for {}",
suggestions.iter().map(|x| format!("'{}'", x)).collect::<Vec<String>>().join(", ")
)
} else {
"Take a look at naming rules at https://github.com/Kindelia/Kind2/blob/master/guide/naming.md".to_string()
}],
positions: idents
.iter()
.map(|ident| Marker {
position: ident.range,
color: Color::Fst,
text: "Here!".to_string(),
no_code: false,
main: true,
})
.collect(),
},
PassError::CannotUseIrrelevant(var_decl, place, declarated_place) => {
let mut positions = vec![Marker {
position: *place,
color: Color::Fst,
text: "It's in relevant position!".to_string(),
no_code: false,
main: true,
}];
if let Some(range) = declarated_place {
positions.push(Marker {
position: *range,
color: Color::Snd,
text: "Declared here as erased (or implicit without '+')".to_string(),
no_code: false,
main: false,
})
}
if let Some(range) = var_decl {
positions.push(Marker {
position: *range,
color: Color::Thr,
text: "This variable corresponds to the erased argument".to_string(),
no_code: false,
main: false,
});
}
DiagnosticFrame {
code: 200,
severity: Severity::Error,
title: "This irrelevant parameter should not be used in a relevant position.".to_string(),
subtitles: vec![],
hints: vec![],
positions,
}
}
PassError::LetDestructOnlyForRecord(place) => DiagnosticFrame {
code: 200,
severity: Severity::Error,
title: "Can only destruct record types.".to_string(),
subtitles: vec![],
hints: vec![],
positions: vec![Marker {
position: *place,
color: Color::Fst,
text: "Here!".to_string(),
no_code: false,
main: true,
}],
},
PassError::RulesWithInconsistentArity(arities) => DiagnosticFrame {
code: 201,
severity: Severity::Error,
title: "All of the rules of a entry should have the same number of patterns.".to_string(),
subtitles: vec![],
hints: vec!["Check if you're trying to use a function that manipulats erased variables.".to_string()],
positions: arities
.iter()
.map(|(range, size)| Marker {
position: *range,
color: Color::Fst,
text: format!("This rule contains {} patterns", size),
no_code: false,
main: true,
})
.collect(),
},
PassError::RuleWithIncorrectArity(place, _got, expected, hidden) => DiagnosticFrame {
code: 203,
severity: Severity::Error,
title: "This rule is with the incorrect arity.".to_string(),
subtitles: vec![],
hints: vec![if *expected == 0 {
"This rule expects no arguments".to_string()
} else if *hidden == 0 {
format!("This rule expects {} arguments", expected)
} else {
format!("This rule expects {} arguments or {} (without hidden ones)", expected, expected - hidden)
}],
positions: vec![Marker {
position: *place,
color: Color::Fst,
text: "Here!".to_string(),
no_code: false,
main: true,
}],
},
PassError::NeedToImplementMethods(expr_place, sugar) => DiagnosticFrame {
code: 204,
severity: Severity::Error,
title: "Required functions are not implemented for this type.".to_string(),
subtitles: vec![],
hints: vec![match sugar {
Sugar::DoNotation => "You must implement 'bind' and 'pure' for this type in order to use the do notation.".to_string(),
Sugar::List => "You must implement 'List', 'List.cons' and 'List.nil' for this type in order to use the list notation.".to_string(),
Sugar::Sigma => "You must implement 'Sigma' in order to use the sigma notation.".to_string(),
Sugar::Pair => "You must implement 'Sigma' and 'Sigma.new' in order to use the sigma notation.".to_string(),
Sugar::BoolIf => "You must implement 'Bool.if' in order to use the if notation.".to_string(),
Sugar::String => "You must implement 'String.cons' in order to use the string notation.".to_string(),
Sugar::U120 => "You must implement 'U120.new' in order to use the u120 notation.".to_string(),
Sugar::Match(name) => format!("You must implement '{}.match' in order to use the match notation (or derive match with #derive[match]).", name),
Sugar::Open(name) => format!("You must implement '{}.open' in order to use the open notation (or derive open with #derive[open]).", name),
}],
positions: vec![Marker {
position: *expr_place,
color: Color::Fst,
text: "You cannot use this expression!".to_string(),
no_code: false,
main: true,
}],
},
PassError::LetDestructOnlyForSum(place) => DiagnosticFrame {
code: 206,
severity: Severity::Error,
title: "Can only use match on sum types.".to_string(),
subtitles: vec![],
hints: vec![],
positions: vec![Marker {
position: *place,
color: Color::Fst,
text: "Here!".to_string(),
no_code: false,
main: true,
}],
},
PassError::CannotFindField(place, def_name, ty) => DiagnosticFrame {
code: 207,
severity: Severity::Error,
title: format!("Cannot find this field in the definition '{}'.", ty),
subtitles: vec![],
hints: vec![],
positions: vec![
Marker {
position: *place,
color: Color::Fst,
text: "Here!".to_string(),
no_code: false,
main: true,
},
Marker {
position: *def_name,
color: Color::Snd,
text: "This is the definition name".to_string(),
no_code: false,
main: false,
},
],
},
PassError::CannotFindConstructor(place, def_name, ty) => DiagnosticFrame {
code: 208,
severity: Severity::Error,
title: format!("Cannot find this constructor in the type definition '{}'.", ty),
subtitles: vec![],
hints: vec![],
positions: vec![
Marker {
position: *place,
color: Color::Fst,
text: "Here!".to_string(),
no_code: false,
main: true,
},
Marker {
position: *def_name,
color: Color::Snd,
text: "This is the definition name".to_string(),
no_code: false,
main: false,
},
],
},
PassError::NoCoverage(place, other) => DiagnosticFrame {
code: 209,
severity: Severity::Error,
title: "The match is not covering all of the possibilities!".to_string(),
subtitles: vec![],
hints: vec![format!("Need a case for {}", other.iter().map(|x| format!("'{}'", x)).collect::<Vec<String>>().join(", "))],
positions: vec![Marker {
position: *place,
color: Color::Fst,
text: "This is the incomplete case".to_string(),
no_code: false,
main: true,
}],
},
PassError::IncorrectArity(head_range, got, expected, hidden) => {
let positions = vec![Marker {
position: *head_range,
color: Color::Fst,
text: "This function requires a fixed number of arguments".to_string(),
no_code: false,
main: true,
}];
DiagnosticFrame {
code: 210,
severity: Severity::Error,
title: "Incorrect arity.".to_string(),
subtitles: vec![],
hints: vec![if *expected == 0 {
format!("This function expects no arguments but got {}", got.len())
} else if *hidden == 0 {
format!("This function expects {} arguments but got {}", expected, got.len())
} else {
format!(
"This function expects {} arguments or {} (without hidden ones) but got {}.",
expected,
expected - hidden,
got.len()
)
}],
positions,
}
}
PassError::SugarIsBadlyImplemented(head_range, place_range, expected) => DiagnosticFrame {
code: 211,
severity: Severity::Error,
title: "Incorrect arity in the sugar definition".to_string(),
subtitles: vec![],
hints: vec![format!(
"Take a look at how sugar functions should be implemented at https://github.com/Kindelia/Kind2/blob/master/guide/sugars.md"
)],
positions: vec![
Marker {
position: *head_range,
color: Color::Fst,
text: if *expected == 0 {
"This rule expects no arguments".to_string()
} else {
format!("This rule expects {} explicit arguments", expected)
},
no_code: false,
main: true,
},
Marker {
position: *place_range,
color: Color::Snd,
text: "This is what triggers the sugar".to_string(),
no_code: false,
main: false,
},
],
},
PassError::DuplicatedNamed(first_decl, last_decl) => DiagnosticFrame {
code: 212,
severity: Severity::Error,
title: "Repeated named variable".to_string(),
subtitles: vec![],
hints: vec![],
positions: vec![
Marker {
position: *last_decl,
color: Color::Fst,
text: "Second occurence".to_string(),
no_code: false,
main: true,
},
Marker {
position: *first_decl,
color: Color::Snd,
text: "First occurence".to_string(),
no_code: false,
main: false,
},
],
},
PassError::RepeatedVariable(first_decl, last_decl) => DiagnosticFrame {
code: 214,
severity: Severity::Error,
title: "Repeated name".to_string(),
subtitles: vec![],
hints: vec!["Rename one of the occurences".to_string()],
positions: vec![
Marker {
position: *last_decl,
color: Color::Fst,
text: "Second occurence".to_string(),
no_code: false,
main: true,
},
Marker {
position: *first_decl,
color: Color::Snd,
text: "First occurence".to_string(),
no_code: false,
main: false,
},
],
},
PassError::CannotFindAlias(name, range) => DiagnosticFrame {
code: 214,
severity: Severity::Error,
title: "Cannot find alias".to_string(),
subtitles: vec![],
hints: vec![],
positions: vec![Marker {
position: *range,
color: Color::Fst,
text: format!("Cannot find alias for '{}'", name),
no_code: false,
main: true,
}],
},
PassError::ShouldBeAParameter(error_range, declaration_range) => {
let mut positions = vec![];
if let Some(error_range) = error_range {
positions.push(Marker {
position: *error_range,
color: Color::Fst,
text: "This expression is not the parameter".to_string(),
no_code: false,
main: true,
})
}
positions.push(Marker {
position:* declaration_range,
color: Color::Snd,
text: "This is the parameter that should be used".to_string(),
no_code: false,
main: false,
});
DiagnosticFrame {
code: 214,
severity: Severity::Error,
title: "The expression is not the parameter declared in the type constructor".to_string(),
subtitles: vec![],
hints: vec![],
positions,
}
}
PassError::NotATypeConstructor(error_range, declaration_range) => DiagnosticFrame {
code: 214,
severity: Severity::Error,
title: "This is not the type that is being declared.".to_string(),
subtitles: vec![],
hints: vec![],
positions: vec![
Marker {
position: *error_range,
color: Color::Fst,
text: "This is not the type that is being declared".to_string(),
no_code: false,
main: true,
},
Marker {
position: *declaration_range,
color: Color::Snd,
text: "This is the type that should be used instead".to_string(),
no_code: false,
main: false,
},
],
},
PassError::NoFieldCoverage(place, other) => DiagnosticFrame {
code: 209,
severity: Severity::Error,
title: "The case is not covering all the values inside of it!".to_string(),
subtitles: vec![],
hints: vec![format!(
"Need variables for {}",
other.iter().map(|x| format!("'{}'", x)).collect::<Vec<String>>().join(", ")
)],
positions: vec![Marker {
position: *place,
color: Color::Fst,
text: "This is the incomplete case".to_string(),
no_code: false,
main: true,
}],
},
PassError::AttributeDoesNotExpectEqual(place) => DiagnosticFrame {
code: 209,
severity: Severity::Error,
title: "This attribute does not support values!".to_string(),
subtitles: vec![],
hints: vec![],
positions: vec![Marker {
position: *place,
color: Color::Fst,
text: "Try to remove everything after the equal".to_string(),
no_code: false,
main: true,
}],
},
PassError::AttributeDoesNotExpectArgs(place) => DiagnosticFrame {
code: 209,
severity: Severity::Error,
title: "This attribute does not expect arguments".to_string(),
subtitles: vec![],
hints: vec![],
positions: vec![Marker {
position: *place,
color: Color::Fst,
text: "Try to remove all of the arguments".to_string(),
no_code: false,
main: true,
}],
},
PassError::InvalidAttributeArgument(place) => DiagnosticFrame {
code: 209,
severity: Severity::Error,
title: "Invalid attribute argument".to_string(),
subtitles: vec![],
hints: vec![],
positions: vec![Marker {
position: *place,
color: Color::Fst,
text: "Remove it or replace".to_string(),
no_code: false,
main: true,
}],
},
PassError::CannotDerive(name, place) => DiagnosticFrame {
code: 209,
severity: Severity::Error,
title: format!("Cannot derive '{}' for this definition", name),
subtitles: vec![],
hints: vec![],
positions: vec![Marker {
position: *place,
color: Color::Fst,
text: "Here!".to_string(),
no_code: false,
main: true,
}],
},
PassError::AttributeExpectsAValue(place) => DiagnosticFrame {
code: 209,
severity: Severity::Error,
title: "This attribute expects a value".to_string(),
subtitles: vec![],
hints: vec![],
positions: vec![Marker {
position: *place,
color: Color::Fst,
text: "Here!".to_string(),
no_code: false,
main: true,
}],
},
PassError::AttributeDoesNotExists(place) => DiagnosticFrame {
code: 209,
severity: Severity::Error,
title: "This attribute does not exists".to_string(),
subtitles: vec![],
hints: vec![],
positions: vec![Marker {
position: *place,
color: Color::Fst,
text: "Here!".to_string(),
no_code: false,
main: true,
}],
},
PassError::DuplicatedAttributeArgument(first, sec) => DiagnosticFrame {
code: 209,
severity: Severity::Warning,
title: "Duplicated attribute argument".to_string(),
subtitles: vec![],
hints: vec![],
positions: vec![Marker {
position: *sec,
color: Color::For,
text: "Second declaration".to_string(),
no_code: false,
main: true,
},Marker {
position: *first,
color: Color::For,
text: "First declaration!".to_string(),
no_code: false,
main: true,
}],
},
}
}
}

View File

@ -0,0 +1,205 @@
//! Expand some attributes and derivations of each construction.
//! Currently it just derives `match` and `open` for sum type
//! and record types respectively.
use std::fmt::Display;
use std::sync::mpsc::Sender;
use fxhash::FxHashMap;
use kind_derive::getters::derive_getters;
use kind_derive::matching::derive_match;
use kind_derive::open::derive_open;
use kind_derive::setters::derive_setters;
use kind_report::data::Diagnostic;
use kind_span::Locatable;
use kind_span::Range;
use kind_tree::concrete::Module;
use kind_tree::concrete::{Attribute, TopLevel};
use crate::errors::PassError;
/// Expands sum type and record definitions to a lot of
/// helper definitions like eliminators and replace qualified identifiers
/// by their module names.
pub mod uses;
#[derive(Debug, Hash, PartialEq, Eq)]
pub enum Derive {
Match,
Open,
Getters,
Setters
}
impl Display for Derive {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match self {
Derive::Match => write!(f, "match"),
Derive::Open => write!(f, "open"),
Derive::Getters => write!(f, "getters"),
Derive::Setters => write!(f, "setters"),
}
}
}
pub fn insert_or_report(
channel: Sender<Box<dyn Diagnostic>>,
hashmap: &mut FxHashMap<Derive, Range>,
key: Derive,
range: Range,
) {
match hashmap.get(&key) {
Some(last_range) => {
channel
.send(Box::new(PassError::DuplicatedAttributeArgument(
*last_range,
range,
)))
.unwrap();
}
None => {
hashmap.insert(key, range);
}
}
}
fn string_to_derive(name: &str) -> Option<Derive> {
match name {
"match" => Some(Derive::Match),
"open" => Some(Derive::Open),
"getters" => Some(Derive::Getters),
"setters" => Some(Derive::Setters),
_ => None,
}
}
pub fn expand_derive(
error_channel: Sender<Box<dyn Diagnostic>>,
attrs: &[Attribute],
) -> Option<FxHashMap<Derive, Range>> {
let mut failed = false;
let mut def = FxHashMap::default();
for attr in attrs {
if attr.name.to_str() != "derive" {
continue;
}
if let Some(attr) = &attr.value {
error_channel
.send(Box::new(PassError::AttributeDoesNotExpectEqual(
attr.locate(),
)))
.unwrap();
failed = true;
}
use kind_tree::concrete::AttributeStyle::*;
for arg in &attr.args {
match arg {
Ident(range, ident) => match string_to_derive(ident.to_str()) {
Some(key) => {
insert_or_report(error_channel.clone(), &mut def, key, *range)
}
_ => {
error_channel
.send(Box::new(PassError::InvalidAttributeArgument(
ident.locate(),
)))
.unwrap();
failed = true;
}
},
other => {
error_channel
.send(Box::new(PassError::InvalidAttributeArgument(
other.locate(),
)))
.unwrap();
failed = true;
}
}
}
}
if failed {
None
} else {
Some(def)
}
}
pub fn expand_module(error_channel: Sender<Box<dyn Diagnostic>>, module: &mut Module) -> bool {
let mut failed = false;
let mut entries = FxHashMap::default();
for entry in &module.entries {
match entry {
TopLevel::SumType(sum) => {
if let Some(derive) = expand_derive(error_channel.clone(), &sum.attrs) {
for (key, val) in derive {
match key {
Derive::Match => {
let (res, errs) = derive_match(sum.name.range, sum);
let info = res.extract_book_info();
entries.insert(res.name.to_string(), (res, info));
for err in errs {
error_channel.send(err).unwrap();
failed = true;
}
}
other => {
error_channel
.send(Box::new(PassError::CannotDerive(other.to_string(), val)))
.unwrap();
failed = true;
}
}
}
} else {
failed = true;
}
}
TopLevel::RecordType(rec) => {
if let Some(derive) = expand_derive(error_channel.clone(), &rec.attrs) {
for (key, val) in derive {
match key {
Derive::Open => {
let res = derive_open(rec.name.range, rec);
let info = res.extract_book_info();
entries.insert(res.name.to_string(), (res, info));
}
Derive::Getters => {
for res in derive_getters(rec.name.range, rec) {
let info = res.extract_book_info();
entries.insert(res.name.to_string(), (res, info));
}
}
Derive::Setters => {
for res in derive_setters(rec.name.range, rec) {
let info = res.extract_book_info();
entries.insert(res.name.to_string(), (res, info));
}
}
other => {
error_channel
.send(Box::new(PassError::CannotDerive(other.to_string(), val)))
.unwrap();
failed = true;
}
}
}
} else {
failed = true;
}
}
TopLevel::Entry(_) => (),
}
}
for (_, (tl, _)) in entries {
module.entries.push(TopLevel::Entry(tl));
}
failed
}

View File

@ -0,0 +1,55 @@
use fxhash::FxHashMap;
use kind_report::data::Diagnostic;
use kind_tree::concrete::{visitor::Visitor, Module};
/// Expands sum type and record definitions to a lot of
/// helper definitions like eliminators and replace qualified identifiers
/// by their module names.
use std::sync::mpsc::Sender;
use crate::errors::PassError;
pub struct Expand {
pub names: FxHashMap<String, String>,
pub errors: Sender<Box<dyn Diagnostic>>,
pub failed: bool,
}
impl Visitor for Expand {
fn visit_qualified_ident(&mut self, ident: &mut kind_tree::symbol::QualifiedIdent) {
if ident.get_aux().is_none() {
return;
}
let alias = match self.names.get(&ident.get_root()) {
Some(path) => path,
None => {
self.errors
.send(Box::new(PassError::CannotFindAlias(
ident.get_root(),
ident.range,
)))
.unwrap();
self.failed = true;
return;
}
};
match &ident.get_aux() {
Some(post) => {
ident.change_root(format!("{}.{}", alias, post));
ident.reset_aux()
}
None => ident.change_root(alias.clone()),
}
}
}
pub fn expand_uses(module: &mut Module, errors: Sender<Box<dyn Diagnostic>>) -> bool {
let mut session = Expand {
names: module.uses.clone(),
errors,
failed: false,
};
for entry in module.entries.iter_mut() {
session.visit_top_level(entry)
}
session.failed
}

View File

@ -0,0 +1,98 @@
use fxhash::FxHashMap;
use kind_tree::untyped;
use crate::unbound::subst::subst_on_expr;
struct Inlinable {
names: Vec<String>,
body: Box<untyped::Expr>,
}
struct InlineState {
funs: FxHashMap<String, Inlinable>,
}
fn inlinable(entry: &untyped::Entry) -> Option<Inlinable> {
if entry.rules.len() == 1 {
let mut names = Vec::new();
for pat in &entry.rules[0].pats {
match &pat.data {
untyped::ExprKind::Var { name } => names.push(name.to_string()),
_ => return None,
}
}
// TODO: Check if is recursive
Some(Inlinable {
names,
body: entry.rules[0].body.clone(),
})
} else {
None
}
}
pub fn inline_book(book: &mut untyped::Book) {
let mut funs = FxHashMap::default();
let mut to_remove = Vec::new();
for entr in book.entrs.values() {
if entr.attrs.inlined {
if let Some(inlinable) = inlinable(entr) {
funs.insert(entr.name.to_string(), inlinable);
to_remove.push(entr.name.to_string());
}
}
}
for name in &to_remove {
book.entrs.remove(name);
}
let mut state = InlineState { funs };
for entr in &mut book.entrs {
state.inline_entry(entr.1)
}
}
impl InlineState {
fn inline_entry(&mut self, entry: &mut untyped::Entry) {
for rule in &mut entry.rules {
self.inline_expr(&mut rule.body)
}
}
fn inline_expr(&mut self, expr: &mut Box<untyped::Expr>) {
use untyped::ExprKind::*;
match &mut expr.data {
Lambda { body, .. } => self.inline_expr(body),
App { fun, args } => {
self.inline_expr(fun);
for arg in args {
self.inline_expr(arg);
}
}
Fun { name, args } | Ctr { name, args } => {
if let Some(inlinable) = self.funs.get(name.to_str()) {
let subst =
FxHashMap::from_iter(inlinable.names.iter().cloned().zip(args.clone()));
*expr = inlinable.body.clone();
subst_on_expr(expr, subst);
} else {
for arg in args {
self.inline_expr(arg);
}
}
}
Let { val, next, .. } => {
self.inline_expr(val);
self.inline_expr(next);
}
Binary { left, right, .. } => {
self.inline_expr(left);
self.inline_expr(right);
}
_ => (),
}
}
}

View File

@ -0,0 +1,12 @@
//! A lot of transformations that we can apply into kind trees.
//! * [desugar][desugar] - That desugars the sugared tree into a version that does not contain a lot of constructions like match, inductive types etc.
//! * [erasure][erasure] - Erases all of the definitions that are marked as erased from the runtime.
//! * [expand][expand] - Expand some attributes and derivations of each construction.
//! * [unbound][unbound] - Collects all of the unbound definitions and check the linearity of them.
pub mod desugar;
pub mod erasure;
mod errors;
pub mod expand;
pub mod inline;
pub mod unbound;

View File

@ -0,0 +1,525 @@
//! Collects all the unbound variables,
//! check if patterns are linear and check
//! if the name belongs to the current module.
//!
//! It also gets all of the identifiers used
//! by sugars because it's useful to name resolution
//! phase.
use std::sync::mpsc::Sender;
use fxhash::{FxHashMap, FxHashSet};
use kind_report::data::Diagnostic;
use kind_span::Range;
use kind_tree::concrete::expr::{Binding, Case, CaseBinding, Destruct, Expr, ExprKind, SttmKind};
use kind_tree::concrete::pat::{Pat, PatIdent, PatKind};
use kind_tree::concrete::visitor::Visitor;
use kind_tree::concrete::{Argument, Book, Entry, Module, Rule, TopLevel};
use kind_tree::symbol::{Ident, QualifiedIdent};
use kind_tree::{visit_opt, visit_vec};
use crate::errors::PassError;
pub mod subst;
pub struct UnboundCollector {
pub errors: Sender<Box<dyn Diagnostic>>,
// Utils for keeping variables tracking and report duplicated ones.
pub context_vars: Vec<(Range, String)>,
// Keep track of top level definitions.
pub top_level_defs: FxHashMap<String, Range>,
pub unbound_top_level: FxHashMap<String, FxHashSet<QualifiedIdent>>,
pub unbound: FxHashMap<String, Vec<Ident>>,
pub emit_errs: bool,
}
impl UnboundCollector {
pub fn new(
diagnostic_sender: Sender<Box<dyn Diagnostic>>,
emit_errs: bool,
) -> UnboundCollector {
Self {
errors: diagnostic_sender,
context_vars: Default::default(),
top_level_defs: Default::default(),
unbound_top_level: Default::default(),
unbound: Default::default(),
emit_errs,
}
}
}
/// Collects all of the unbound variables in a module.
///
/// Invariant: All qualified ident should be expanded.
pub fn collect_module_info(
diagnostic_sender: Sender<Box<dyn Diagnostic>>,
module: &mut Module,
emit_errs: bool,
) -> UnboundCollector {
let mut state = UnboundCollector::new(diagnostic_sender.clone(), emit_errs);
state.visit_module(module);
for idents in state.unbound.values() {
diagnostic_sender
.send(Box::new(PassError::UnboundVariable(
idents.to_vec(),
vec![],
)))
.unwrap();
}
state
}
/// Collects all of the unbound variables in a book.
///
/// Invariant: All qualified ident should be expanded.
pub fn get_book_unbound(
diagnostic_sender: Sender<Box<dyn Diagnostic>>,
book: &mut Book,
emit_errs: bool,
) -> (
FxHashMap<String, Vec<Ident>>,
FxHashMap<String, FxHashSet<QualifiedIdent>>,
) {
let mut state = UnboundCollector::new(diagnostic_sender, emit_errs);
state.visit_book(book);
(state.unbound, state.unbound_top_level)
}
impl UnboundCollector {
fn visit_top_level_names(&mut self, toplevel: &mut TopLevel) {
match toplevel {
TopLevel::SumType(sum) => {
debug_assert!(sum.name.get_aux().is_none());
self.top_level_defs
.insert(sum.name.get_root(), sum.name.range);
for cons in &sum.constructors {
let name_cons = sum.name.add_segment(cons.name.to_str());
debug_assert!(name_cons.get_aux().is_none());
self.top_level_defs
.insert(name_cons.get_root(), name_cons.range);
}
}
TopLevel::RecordType(rec) => {
let name_cons = rec.name.add_segment(rec.constructor.to_str());
debug_assert!(rec.name.get_aux().is_none());
debug_assert!(name_cons.get_aux().is_none());
self.top_level_defs
.insert(rec.name.get_root(), rec.name.range);
self.top_level_defs
.insert(name_cons.get_root(), name_cons.range);
}
TopLevel::Entry(entry) => {
debug_assert!(entry.name.get_aux().is_none());
self.top_level_defs
.insert(entry.name.get_root(), entry.name.range);
}
}
}
}
impl Visitor for UnboundCollector {
fn visit_attr(&mut self, _: &mut kind_tree::concrete::Attribute) {}
fn visit_ident(&mut self, ident: &mut Ident) {
let name = ident.to_str();
if self.context_vars.iter().all(|x| x.1 != name) {
let entry = self
.unbound
.entry(name.to_string())
.or_insert_with(Vec::new);
entry.push(ident.clone());
}
}
fn visit_qualified_ident(&mut self, ident: &mut QualifiedIdent) {
debug_assert!(ident.get_aux().is_none());
if !self.top_level_defs.contains_key(&ident.get_root()) {
let entry = self.unbound_top_level.entry(ident.get_root()).or_default();
entry.insert(ident.clone());
}
}
fn visit_pat_ident(&mut self, ident: &mut PatIdent) {
let name = ident.0.to_str();
if let Some(fst) = self.context_vars.iter().find(|x| x.1 == name) {
if self.emit_errs {
self.errors
.send(Box::new(PassError::RepeatedVariable(fst.0, ident.0.range)))
.unwrap()
}
} else {
self.context_vars.push((ident.0.range, name.to_string()))
}
}
fn visit_argument(&mut self, argument: &mut Argument) {
match &mut argument.typ {
Some(res) => self.visit_expr(res),
None => (),
}
let res = self
.context_vars
.iter()
.find(|x| x.1 == argument.name.to_str());
if let Some(fst) = res {
if self.emit_errs {
self.errors
.send(Box::new(PassError::RepeatedVariable(
fst.0,
argument.name.range,
)))
.unwrap()
}
} else {
self.context_vars
.push((argument.name.range, argument.name.to_string()))
}
self.context_vars
.push((argument.name.range, argument.name.to_string()));
}
fn visit_rule(&mut self, rule: &mut Rule) {
let vars = self.context_vars.clone();
for pat in &mut rule.pats {
self.visit_pat(pat);
}
self.visit_expr(&mut rule.body);
self.context_vars = vars;
}
fn visit_entry(&mut self, entry: &mut Entry) {
let vars = self.context_vars.clone();
for arg in entry.args.iter_mut() {
self.visit_argument(arg)
}
self.visit_expr(&mut entry.typ);
self.context_vars = vars;
for rule in &mut entry.rules {
self.visit_rule(rule)
}
}
fn visit_top_level(&mut self, toplevel: &mut TopLevel) {
match toplevel {
TopLevel::SumType(entr) => {
let mut repeated_names = FxHashMap::<String, Range>::default();
let mut failed = false;
for cons in &entr.constructors {
match repeated_names.get(&cons.name.to_string()) {
Some(_) => {
failed = true;
}
None => {
repeated_names.insert(cons.name.to_string(), cons.name.range);
}
}
let name_cons = entr.name.add_segment(cons.name.to_str());
self.context_vars
.push((name_cons.range, name_cons.to_string()));
}
if failed {
return;
}
let vars = self.context_vars.clone();
visit_vec!(entr.parameters.iter_mut(), arg => self.visit_argument(arg));
let inside_vars = self.context_vars.clone();
visit_vec!(entr.indices.iter_mut(), arg => self.visit_argument(arg));
visit_vec!(entr.constructors.iter_mut(), cons => {
self.context_vars = inside_vars.clone();
visit_vec!(cons.args.iter_mut(), arg => self.visit_argument(arg));
visit_opt!(&mut cons.typ, arg => self.visit_expr(arg));
});
self.context_vars = vars;
}
TopLevel::RecordType(entr) => {
let inside_vars = self.context_vars.clone();
visit_vec!(entr.parameters.iter_mut(), arg => self.visit_argument(arg));
visit_vec!(entr.fields.iter_mut(), (_, _, typ) => {
self.visit_expr(typ);
});
self.context_vars = inside_vars;
}
TopLevel::Entry(entr) => self.visit_entry(entr),
}
}
fn visit_module(&mut self, book: &mut kind_tree::concrete::Module) {
for entr in &mut book.entries {
self.visit_top_level_names(entr);
}
for entr in &mut book.entries {
self.visit_top_level(entr)
}
}
fn visit_book(&mut self, book: &mut Book) {
for entr in book.entries.values_mut() {
self.visit_top_level_names(entr);
}
for entr in book.entries.values_mut() {
self.visit_top_level(entr)
}
}
fn visit_destruct(&mut self, destruct: &mut Destruct) {
match destruct {
Destruct::Destruct(range, ty, bindings, _) => {
self.visit_qualified_ident(
&mut QualifiedIdent::add_segment(ty, "open").to_generated(),
);
self.visit_range(range);
self.visit_qualified_ident(ty);
for bind in bindings {
self.visit_case_binding(bind)
}
}
Destruct::Ident(ident) => self.context_vars.push((ident.range, ident.to_string())),
}
}
fn visit_sttm(&mut self, sttm: &mut kind_tree::concrete::expr::Sttm) {
match &mut sttm.data {
SttmKind::Ask(ident, val, next) => {
self.visit_expr(val);
let vars = self.context_vars.clone();
self.visit_destruct(ident);
self.visit_sttm(next);
self.context_vars = vars;
}
SttmKind::Let(ident, val, next) => {
self.visit_expr(val);
let vars = self.context_vars.clone();
self.visit_destruct(ident);
self.visit_sttm(next);
self.context_vars = vars;
}
SttmKind::Expr(expr, next) => {
self.visit_expr(expr);
self.visit_sttm(next);
}
SttmKind::Return(expr) => {
self.visit_expr(expr);
}
SttmKind::RetExpr(expr) => {
self.visit_expr(expr);
}
}
}
fn visit_pat(&mut self, pat: &mut Pat) {
match &mut pat.data {
PatKind::Var(ident) => self.visit_pat_ident(ident),
PatKind::Str(_) => (),
PatKind::U60(_) => (),
PatKind::U120(_) => (),
PatKind::F60(_) => (),
PatKind::Char(_) => (),
PatKind::Hole => (),
PatKind::List(ls) => {
for pat in ls {
self.visit_pat(pat)
}
}
PatKind::Pair(fst, snd) => {
self.visit_pat(fst);
self.visit_pat(snd);
}
PatKind::App(t, ls) => {
self.visit_qualified_ident(t);
for pat in ls {
self.visit_pat(pat)
}
}
}
}
fn visit_case_binding(&mut self, case_binding: &mut CaseBinding) {
match case_binding {
CaseBinding::Field(ident) | CaseBinding::Renamed(_, ident) => {
self.context_vars.push((ident.range, ident.to_string()))
}
}
}
fn visit_case(&mut self, case: &mut Case) {
let vars = self.context_vars.clone();
for binding in &mut case.bindings {
self.visit_case_binding(binding);
}
self.visit_expr(&mut case.value);
self.context_vars = vars;
}
fn visit_match(&mut self, matcher: &mut kind_tree::concrete::expr::Match) {
self.visit_expr(&mut matcher.scrutinizer);
for case in &mut matcher.cases {
self.visit_case(case);
}
match &mut matcher.motive {
Some(x) => self.visit_expr(x),
None => (),
}
}
fn visit_binding(&mut self, binding: &mut Binding) {
match binding {
Binding::Positional(e) => self.visit_expr(e),
Binding::Named(_, _, e) => self.visit_expr(e),
}
}
fn visit_expr(&mut self, expr: &mut Expr) {
match &mut expr.data {
ExprKind::Var { name } => self.visit_ident(name),
ExprKind::Constr { name, args } => {
self.visit_qualified_ident(name);
visit_vec!(args.iter_mut(), arg => self.visit_binding(arg));
}
ExprKind::All {
param: None,
typ,
body,
..
} => {
self.visit_expr(typ);
self.visit_expr(body);
}
ExprKind::All {
param: Some(ident),
typ,
body,
..
} => {
self.visit_expr(typ);
self.context_vars.push((ident.range, ident.to_string()));
self.visit_expr(body);
self.context_vars.pop();
}
ExprKind::Lambda {
param, typ, body, ..
} => {
match typ {
Some(x) => self.visit_expr(x),
None => (),
}
self.context_vars.push((param.range, param.to_string()));
self.visit_expr(body);
self.context_vars.pop();
}
ExprKind::App { fun, args } => {
self.visit_expr(fun);
visit_vec!(args.iter_mut(), arg => self.visit_expr(&mut arg.data));
}
ExprKind::Ann { val, typ } => {
self.visit_expr(val);
self.visit_expr(typ);
}
ExprKind::Lit { lit } => self.visit_literal(lit),
ExprKind::Binary { op: _, fst, snd } => {
self.visit_expr(fst);
self.visit_expr(snd);
}
ExprKind::Let { name, val, next } => {
self.visit_expr(val);
let vars = self.context_vars.clone();
self.visit_destruct(name);
self.visit_expr(next);
self.context_vars = vars;
}
ExprKind::Sigma {
param: None,
fst,
snd,
} => {
self.visit_qualified_ident(&mut QualifiedIdent::new_static(
"Sigma", None, expr.range,
));
self.visit_expr(fst);
self.visit_expr(snd);
}
ExprKind::Sigma {
param: Some(ident),
fst,
snd,
} => {
self.visit_qualified_ident(&mut QualifiedIdent::new_static(
"Sigma", None, expr.range,
));
self.visit_expr(fst);
self.context_vars.push((ident.range, ident.to_string()));
self.visit_expr(snd);
self.context_vars.pop();
}
ExprKind::Match(matcher) => {
self.visit_qualified_ident(&mut matcher.typ.add_segment("match").to_generated());
self.visit_match(matcher)
}
ExprKind::Subst(subst) => {
self.visit_ident(&mut subst.name);
if let Some(pos) = self
.context_vars
.iter()
.position(|x| x.1 == subst.name.to_string())
{
subst.indx = pos;
}
self.visit_expr(&mut subst.expr)
}
ExprKind::Hole => {}
ExprKind::Do { typ, sttm } => {
self.visit_qualified_ident(&mut typ.add_segment("pure").to_generated());
self.visit_qualified_ident(&mut typ.add_segment("bind").to_generated());
self.visit_sttm(sttm)
}
ExprKind::If { cond, then_, else_ } => {
let typ = QualifiedIdent::new_static("Bool", None, expr.range);
self.visit_qualified_ident(&mut typ.add_segment("if").to_generated());
self.visit_expr(cond);
self.visit_expr(then_);
self.visit_expr(else_);
}
ExprKind::Pair { fst, snd } => {
let typ = QualifiedIdent::new_static("Pair", None, expr.range);
self.visit_qualified_ident(&mut typ.add_segment("new").to_generated());
self.visit_expr(fst);
self.visit_expr(snd);
}
ExprKind::List { args } => {
let mut typ = QualifiedIdent::new_static("List", None, expr.range);
self.visit_qualified_ident(&mut typ);
self.visit_qualified_ident(&mut typ.add_segment("nil").to_generated());
self.visit_qualified_ident(&mut typ.add_segment("cons").to_generated());
visit_vec!(args.iter_mut(), arg => self.visit_expr(arg));
}
}
}
}

View File

@ -0,0 +1,100 @@
use fxhash::FxHashMap;
use kind_tree::untyped::*;
pub struct Subst {
vars: FxHashMap<String, Box<Expr>>,
ctx: im_rc::HashSet<String>,
}
pub fn subst_on_expr(expr: &mut Box<Expr>, vars: FxHashMap<String, Box<Expr>>) {
let mut state = Subst {
vars,
ctx: Default::default(),
};
state.subst_expr(expr)
}
impl Subst {
pub fn subst_entry(&mut self, entry: &mut Entry) {
let backup = self.ctx.clone();
self.ctx = backup;
for rule in &mut entry.rules {
self.subst_rule(rule);
}
}
pub fn subst_rule(&mut self, rule: &mut Rule) {
let backup = self.ctx.clone();
for pat in &mut rule.pats {
self.subst_expr(pat)
}
self.subst_expr(&mut rule.body);
self.ctx = backup;
}
pub fn subst_pat(&mut self, expr: &mut Box<Expr>) {
use ExprKind::*;
match &mut expr.data {
Var { name } => {
self.ctx.insert(name.to_string());
}
Fun { name: _, args } | Ctr { name: _, args } => {
for arg in args {
self.subst_pat(arg);
}
}
_ => (),
}
}
pub fn subst_expr(&mut self, expr: &mut Box<Expr>) {
use ExprKind::*;
match &mut expr.data {
Var { name } => {
if self.ctx.contains(name.to_str()) {
return;
}
if let Some(res) = self.vars.get(name.to_str()) {
*expr = res.clone();
}
}
Lambda { param, body, .. } => {
let backup = self.ctx.clone();
self.ctx.insert(param.to_string());
self.subst_expr(body);
self.ctx = backup;
}
App { fun, args } => {
self.subst_expr(fun);
for arg in args {
self.subst_expr(arg);
}
}
Fun { name: _, args } | Ctr { name: _, args } => {
for arg in args {
self.subst_expr(arg);
}
}
Let { name, val, next } => {
let backup = self.ctx.clone();
self.ctx.insert(name.to_string());
self.subst_expr(val);
self.subst_expr(next);
self.ctx = backup;
}
Binary { left, right, .. } => {
self.subst_expr(left);
self.subst_expr(right);
}
_ => (),
}
}
}

View File

@ -0,0 +1,18 @@
[package]
name = "kind-query"
version = "0.1.0"
edition = "2021"
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
[dependencies]
kind-parser = { path = "../kind-parser" }
kind-tree = { path = "../kind-tree" }
kind-span = { path = "../kind-span" }
kind-report = { path = "../kind-report" }
kind-checker = { path = "../kind-checker" }
kind-pass = { path = "../kind-pass" }
kind-target-hvm = { path = "../kind-target-hvm" }
fxhash = "0.2.1"
pathdiff = "0.2.1"

View File

@ -0,0 +1,95 @@
//! Errors created by the driver. All of them
//! are related with paths and unbounded variables.
use std::path::PathBuf;
use kind_report::data::{Color, Diagnostic, DiagnosticFrame, Marker, Severity, Subtitle, Word};
use kind_tree::symbol::{Ident, QualifiedIdent};
/// Describes all of the possible errors inside each
/// of the passes inside this crate.
pub(crate) enum DriverError {
UnboundVariable(Vec<Ident>, Vec<String>),
MultiplePaths(QualifiedIdent, Vec<PathBuf>),
DefinedMultipleTimes(QualifiedIdent, QualifiedIdent),
}
impl Diagnostic for DriverError {
fn get_syntax_ctx(&self) -> Option<kind_span::SyntaxCtxIndex> {
match self {
DriverError::UnboundVariable(v, _) => Some(v[0].range.ctx),
DriverError::MultiplePaths(id, _) => Some(id.range.ctx),
DriverError::DefinedMultipleTimes(fst, _) => Some(fst.range.ctx),
}
}
fn to_diagnostic_frame(&self) -> DiagnosticFrame {
match self {
DriverError::UnboundVariable(idents, suggestions) => DiagnosticFrame {
code: 100,
severity: Severity::Error,
title: format!("Cannot find the definition '{}'.", idents[0].to_str()),
subtitles: vec![],
hints: vec![if !suggestions.is_empty() {
format!(
"Maybe you're looking for {}",
suggestions.iter().map(|x| format!("'{}'", x)).collect::<Vec<String>>().join(", ")
)
} else {
"Take a look at the rules for name searching at https://github.com/Kindelia/Kind2/blob/master/guide/naming.md".to_string()
}],
positions: idents
.iter()
.map(|ident| Marker {
position: ident.range,
color: Color::Fst,
text: "Here!".to_string(),
no_code: false,
main: true,
})
.collect(),
},
DriverError::MultiplePaths(ident, paths) => DiagnosticFrame {
code: 101,
severity: Severity::Error,
title: "Multiple definitions for the same name".to_string(),
subtitles: paths
.iter()
.map(|path| Subtitle::Phrase(Color::Fst, vec![Word::White(path.display().to_string())]))
.collect(),
hints: vec!["Take a look at the rules for name searching at https://github.com/Kindelia/Kind2/blob/master/guide/naming.md".to_string()],
positions: vec![Marker {
position: ident.range,
color: Color::Fst,
text: "Here!".to_string(),
no_code: false,
main: true,
}],
},
DriverError::DefinedMultipleTimes(fst, snd) => DiagnosticFrame {
code: 102,
severity: Severity::Error,
title: "Defined multiple times for the same name".to_string(),
subtitles: vec![],
hints: vec!["Rename one of the definitions or remove and look at how names work in Kind at https://github.com/Kindelia/Kind2/blob/master/guide/naming.md".to_string()],
positions: vec![
Marker {
position: fst.range,
color: Color::Fst,
text: "The first ocorrence".to_string(),
no_code: false,
main: true,
},
Marker {
position: snd.range,
color: Color::Snd,
text: "Second occorrence here!".to_string(),
no_code: false,
main: false,
},
],
},
}
}
}

View File

@ -0,0 +1,82 @@
use fxhash::{FxHashSet, FxHashMap};
#[derive(Debug)]
pub struct Node<T> {
pub children: FxHashSet<usize>,
pub parents: FxHashSet<usize>,
pub data: T,
pub invalidated: bool,
pub hash: u64,
pub root: bool,
pub failed: bool,
}
#[derive(Debug)]
pub struct Graph<T> {
nodes: FxHashMap<usize, Node<T>>,
count: usize,
}
impl<T> Default for Graph<T> {
fn default() -> Self {
Self { nodes: Default::default(), count: Default::default() }
}
}
impl<T> Graph<T> {
pub fn get(&self, id: &usize) -> Option<&Node<T>> {
self.nodes.get(id)
}
pub fn get_mut(&mut self, id: &usize) -> Option<&mut Node<T>> {
self.nodes.get_mut(id)
}
pub fn add(&mut self, data: T, hash: u64, root: bool) {
self.nodes.insert(
self.count,
Node {
data,
invalidated: false,
children: FxHashSet::default(),
parents: FxHashSet::default(),
hash,
failed: false,
root
},
);
self.count += 1;
}
pub fn connect(&mut self, parent: usize, child: usize) {
if let Some(parent) = self.nodes.get_mut(&parent) {
parent.children.insert(child);
}
if let Some(child) = self.nodes.get_mut(&child) {
child.parents.insert(parent);
}
}
pub fn disconnect(&mut self, child: usize, parent: usize) -> bool {
if let Some(parent) = self.nodes.get_mut(&parent) {
parent.children.remove(&child);
}
if let Some(child) = self.nodes.get_mut(&child) {
child.parents.remove(&parent);
child.parents.len() == 0
} else {
false
}
}
pub fn flood_invalidation(&mut self, node: usize) {
if let Some(node) = self.nodes.get_mut(&node) {
if !node.invalidated {
node.invalidated = true;
for parent in node.parents.clone() {
self.flood_invalidation(parent)
}
}
}
}
}

View File

@ -0,0 +1,3 @@
//! This module is a generalization of the driver
//! module. It is useful both for LSPs, Watch, Repl
//! and many other things.

View File

@ -0,0 +1,55 @@
use std::path::{Path, PathBuf};
use kind_report::data::Diagnostic;
use kind_tree::symbol::QualifiedIdent;
use crate::errors::DriverError;
const EXT: &str = "kind2";
/// Tries to accumulate on a buffer all of the
/// paths that exists (so we can just throw an
/// error about ambiguous resolution to the user)
pub(crate) fn get_module_path(
ident: &QualifiedIdent,
raw_path: &Path,
) -> Result<Option<PathBuf>, Box<dyn Diagnostic>> {
let mut canon_path = raw_path.to_path_buf();
let mut dir_file_path = raw_path.to_path_buf();
let dir_path = raw_path.to_path_buf();
canon_path.set_extension(EXT);
dir_file_path.push("_");
dir_file_path.set_extension(EXT);
if canon_path.exists() && dir_path.exists() && canon_path.is_file() && dir_path.is_dir() {
Err(Box::new(DriverError::MultiplePaths(
ident.clone(),
vec![canon_path, dir_path],
)))
} else if canon_path.is_file() {
Ok(Some(canon_path))
} else if dir_file_path.is_file() {
Ok(Some(dir_file_path))
} else {
Ok(None)
}
}
/// Transforms an ident into a path
pub(crate) fn ident_to_path(
root: &Path,
ident: &QualifiedIdent,
) -> Result<Option<PathBuf>, Box<dyn Diagnostic>> {
let name = ident.root.to_string();
let segments = name.as_str().split('.').collect::<Vec<&str>>();
let mut raw_path = root.to_path_buf();
raw_path.push(PathBuf::from(segments.join("/")));
match get_module_path(&ident, &raw_path) {
Ok(None) => {
raw_path.pop();
get_module_path(&ident, &raw_path)
}
rest => rest,
}
}

View File

@ -0,0 +1,15 @@
[package]
name = "kind-report"
version = "0.1.0"
edition = "2021"
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
[dependencies]
kind-span = { path = "../kind-span" }
unicode-width = "0.1.10"
yansi = "0.5.1"
pathdiff = "0.2.1"
fxhash = "0.2.1"
termsize = "0.1"

View File

@ -0,0 +1,64 @@
use std::time::Duration;
use kind_span::{Range, SyntaxCtxIndex};
#[derive(Debug, Clone)]
pub enum Severity {
Error,
Warning,
Info,
}
#[derive(Debug, Clone)]
pub enum Color {
Fst,
Snd,
Thr,
For,
Fft,
}
#[derive(Debug, Clone)]
pub enum Word {
Dimmed(String),
White(String),
Normal(String),
Painted(Color, String),
}
#[derive(Debug, Clone)]
pub enum Subtitle {
Normal(Color, String),
Bold(Color, String),
Phrase(Color, Vec<Word>),
LineBreak,
}
#[derive(Debug, Clone)]
pub struct Marker {
pub position: Range,
pub color: Color,
pub text: String,
pub no_code: bool,
pub main: bool,
}
#[derive(Debug, Clone)]
pub struct DiagnosticFrame {
pub code: u32,
pub severity: Severity,
pub title: String,
pub subtitles: Vec<Subtitle>,
pub hints: Vec<String>,
pub positions: Vec<Marker>,
}
pub enum Log {
Checking(String),
Checked(Duration),
Compiled(Duration),
Failed(Duration),
}
pub trait Diagnostic {
fn get_syntax_ctx(&self) -> Option<SyntaxCtxIndex>;
fn to_diagnostic_frame(&self) -> DiagnosticFrame;
}

View File

@ -0,0 +1,80 @@
use yansi::Paint;
/// Data structures
pub mod data;
/// Render
pub mod report;
#[derive(Debug)]
pub struct Chars {
pub vbar: char,
pub hbar: char,
pub dbar: char,
pub trline: char,
pub bxline: char,
pub brline: char,
pub ylline: char,
pub bullet: char,
}
impl Chars {
pub fn unicode() -> &'static Chars {
&Chars {
vbar: '',
hbar: '',
dbar: '',
trline: '',
bxline: '',
brline: '',
ylline: '',
bullet: '',
}
}
pub fn ascii() -> &'static Chars {
&Chars {
vbar: '|',
hbar: '-',
dbar: ':',
trline: '\\',
bxline: 'v',
brline: '/',
ylline: '-',
bullet: '*',
}
}
}
#[derive(Debug)]
pub struct RenderConfig<'a> {
pub chars: &'a Chars,
pub indent: usize,
}
impl<'a> RenderConfig<'a> {
pub fn unicode(indent: usize) -> RenderConfig<'a> {
RenderConfig {
chars: Chars::unicode(),
indent,
}
}
pub fn ascii(indent: usize) -> RenderConfig<'a> {
RenderConfig {
chars: Chars::ascii(),
indent,
}
}
}
pub fn check_if_colors_are_supported(disable: bool) {
if disable || (cfg!(windows) && !Paint::enable_windows_ascii()) {
Paint::disable();
}
}
pub fn check_if_utf8_is_supported<'a>(disable: bool, indent: usize) -> RenderConfig<'a> {
if disable || (cfg!(windows) && !Paint::enable_windows_ascii()) {
RenderConfig::ascii(indent)
} else {
RenderConfig::unicode(indent)
}
}

View File

@ -0,0 +1,540 @@
//! Renders error messages.
// The code is not so good ..
// pretty printers are always a disaster to write. expect
// that in the future i can rewrite it in a better way.
use std::fmt::{Display, Write};
use std::path::{Path, PathBuf};
use std::str;
use fxhash::{FxHashMap, FxHashSet};
use kind_span::{Pos, SyntaxCtxIndex};
use unicode_width::UnicodeWidthStr;
use yansi::Paint;
use crate::{data::*, RenderConfig};
type SortedMarkers = FxHashMap<SyntaxCtxIndex, Vec<Marker>>;
#[derive(Debug, Clone)]
struct Point {
pub line: usize,
pub column: usize,
}
pub trait FileCache {
fn fetch(&self, ctx: SyntaxCtxIndex) -> Option<(PathBuf, &String)>;
}
impl Display for Point {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "{}:{}", self.line + 1, self.column + 1)
}
}
fn group_markers(markers: &[Marker]) -> SortedMarkers {
let mut file_group = SortedMarkers::default();
for marker in markers {
let group = file_group
.entry(marker.position.ctx)
.or_insert_with(Vec::new);
group.push(marker.clone())
}
for group in file_group.values_mut() {
group.sort_by(|x, y| x.position.start.cmp(&y.position.end));
}
file_group
}
fn get_code_line_guide(code: &str) -> Vec<usize> {
let mut guide = Vec::new();
let mut size = 0;
for chr in code.chars() {
size += chr.len_utf8();
if chr == '\n' {
guide.push(size);
}
}
guide.push(code.len());
guide
}
fn find_in_line_guide(pos: Pos, guide: &Vec<usize>) -> Point {
for i in 0..guide.len() {
if guide[i] > pos.index as usize {
return Point {
line: i,
column: pos.index as usize - (if i == 0 { 0 } else { guide[i - 1] }),
};
}
}
let line = guide.len() - 1;
Point {
line,
column: pos.index as usize - (if line == 0 { 0 } else { guide[line - 1] }),
}
}
// Get color
fn get_colorizer<T>(color: &Color) -> &dyn Fn(T) -> Paint<T> {
match color {
Color::Fst => &|str| yansi::Paint::red(str).bold(),
Color::Snd => &|str| yansi::Paint::blue(str).bold(),
Color::Thr => &|str| yansi::Paint::green(str).bold(),
Color::For => &|str| yansi::Paint::yellow(str).bold(),
Color::Fft => &|str| yansi::Paint::cyan(str).bold(),
}
}
// TODO: Remove common indentation.
// TODO: Prioritize inline marcations.
fn colorize_code<T: Write + Sized>(
markers: &mut [&(Point, Point, &Marker)],
code_line: &str,
modify: &dyn Fn(&str) -> String,
fmt: &mut T,
) -> std::fmt::Result {
markers.sort_by(|x, y| x.0.column.cmp(&y.0.column));
let mut start = 0;
for marker in markers {
if start < marker.0.column {
write!(fmt, "{}", modify(&code_line[start..marker.0.column]))?;
start = marker.0.column;
}
let end = if marker.0.line == marker.1.line {
marker.1.column
} else {
code_line.len()
};
if start < end {
let colorizer = get_colorizer(&marker.2.color);
write!(fmt, "{}", colorizer(&code_line[start..end]).bold())?;
start = end;
}
}
if start < code_line.len() {
write!(fmt, "{}", modify(&code_line[start..code_line.len()]))?;
}
writeln!(fmt)?;
Ok(())
}
fn paint_line<T>(data: T) -> Paint<T> {
Paint::new(data).fg(yansi::Color::Cyan).dimmed()
}
fn mark_inlined<T: Write + Sized>(
prefix: &str,
code: &str,
config: &RenderConfig,
inline_markers: &mut [&(Point, Point, &Marker)],
fmt: &mut T,
) -> std::fmt::Result {
inline_markers.sort_by(|x, y| x.0.column.cmp(&y.0.column));
let mut start = 0;
write!(
fmt,
"{:>5} {} {}",
"",
paint_line(config.chars.vbar),
prefix
)?;
for marker in inline_markers.iter_mut() {
if start < marker.0.column {
let pad = UnicodeWidthStr::width(&code[start..marker.0.column]);
write!(fmt, "{:pad$}", "", pad = pad)?;
start = marker.0.column;
}
if start < marker.1.column {
let pad = UnicodeWidthStr::width(&code[start..marker.1.column]);
let colorizer = get_colorizer(&marker.2.color);
write!(fmt, "{}", colorizer(config.chars.bxline.to_string()))?;
write!(
fmt,
"{}",
colorizer(config.chars.hbar.to_string().repeat(pad.saturating_sub(1)))
)?;
start = marker.1.column;
}
}
writeln!(fmt)?;
// Pretty print the marker
for i in 0..inline_markers.len() {
write!(
fmt,
"{:>5} {} {}",
"",
paint_line(config.chars.vbar),
prefix
)?;
let mut start = 0;
for j in 0..(inline_markers.len() - i) {
let marker = inline_markers[j];
if start < marker.0.column {
let pad = UnicodeWidthStr::width(&code[start..marker.0.column]);
write!(fmt, "{:pad$}", "", pad = pad)?;
start = marker.0.column;
}
if start < marker.1.column {
let colorizer = get_colorizer(&marker.2.color);
if j == (inline_markers.len() - i).saturating_sub(1) {
write!(
fmt,
"{}",
colorizer(format!("{}{}", config.chars.trline, marker.2.text))
)?;
} else {
write!(fmt, "{}", colorizer(config.chars.vbar.to_string()))?;
}
start += 1;
}
}
writeln!(fmt)?;
}
Ok(())
}
fn write_code_block<'a, T: Write + Sized>(
file_name: &Path,
config: &RenderConfig,
markers: &[Marker],
group_code: &'a str,
fmt: &mut T,
) -> std::fmt::Result {
let guide = get_code_line_guide(group_code);
let point = find_in_line_guide(markers[0].position.start, &guide);
let no_code = markers.iter().all(|x| x.no_code);
let header = format!(
"{:>5} {}{}[{}:{}]",
"",
if no_code {
config.chars.hbar
} else {
config.chars.brline
},
config.chars.hbar.to_string().repeat(2),
file_name.to_str().unwrap(),
point
);
writeln!(fmt, "{}", paint_line(header))?;
if no_code {
return Ok(());
}
writeln!(fmt, "{:>5} {}", "", paint_line(config.chars.vbar))?;
let mut lines_set = FxHashSet::default();
let mut markers_by_line: FxHashMap<usize, Vec<(Point, Point, &Marker)>> = FxHashMap::default();
let mut multi_line_markers: Vec<(Point, Point, &Marker)> = Vec::new();
for marker in markers {
let start = find_in_line_guide(marker.position.start, &guide);
let end = find_in_line_guide(marker.position.end, &guide);
if let Some(row) = markers_by_line.get_mut(&start.line) {
row.push((start.clone(), end.clone(), marker))
} else {
markers_by_line.insert(start.line, vec![(start.clone(), end.clone(), marker)]);
}
if end.line != start.line {
multi_line_markers.push((start.clone(), end.clone(), marker));
} else if marker.main {
// Just to make errors a little bit better
let start = start.line.saturating_sub(1);
let end = if start + 2 >= guide.len() {
guide.len() - 1
} else {
start + 2
};
for i in start..=end {
lines_set.insert(i);
}
}
if end.line - start.line <= 3 {
for i in start.line..=end.line {
lines_set.insert(i);
}
} else {
lines_set.insert(start.line);
lines_set.insert(end.line);
}
}
let code_lines: Vec<&'a str> = group_code.lines().collect();
let mut lines = lines_set
.iter()
.filter(|x| **x < code_lines.len())
.collect::<Vec<&usize>>();
lines.sort();
for i in 0..lines.len() {
let line = lines[i];
let mut prefix = " ".to_string();
let mut empty_vec = Vec::new();
let row = markers_by_line.get_mut(line).unwrap_or(&mut empty_vec);
let mut inline_markers: Vec<&(Point, Point, &Marker)> =
row.iter().filter(|x| x.0.line == x.1.line).collect();
let mut current = None;
for marker in &multi_line_markers {
if marker.0.line == *line {
writeln!(
fmt,
"{:>5} {} {} ",
"",
paint_line(config.chars.vbar),
get_colorizer(&marker.2.color)(config.chars.brline)
)?;
}
if *line >= marker.0.line && *line <= marker.1.line {
prefix = format!(" {} ", get_colorizer(&marker.2.color)(config.chars.vbar));
current = Some(marker);
break;
}
}
write!(
fmt,
"{:>5} {} {}",
line + 1,
paint_line(config.chars.vbar),
prefix,
)?;
let modify: Box<dyn Fn(&str) -> String> = if let Some(marker) = current {
prefix = format!(" {} ", get_colorizer(&marker.2.color)(config.chars.vbar));
Box::new(|str: &str| get_colorizer(&marker.2.color)(str).to_string())
} else {
Box::new(|str: &str| str.to_string())
};
if !inline_markers.is_empty() {
colorize_code(&mut inline_markers, code_lines[*line], &modify, fmt)?;
mark_inlined(&prefix, code_lines[*line], config, &mut inline_markers, fmt)?;
if markers_by_line.contains_key(&(line + 1)) {
writeln!(
fmt,
"{:>5} {} {} ",
"",
paint_line(config.chars.dbar),
prefix
)?;
}
} else {
writeln!(fmt, "{}", modify(code_lines[*line]))?;
}
if let Some(marker) = current {
if marker.1.line == *line {
let col = get_colorizer(&marker.2.color);
writeln!(
fmt,
"{:>5} {} {} ",
"",
paint_line(config.chars.dbar),
prefix
)?;
writeln!(
fmt,
"{:>5} {} {} ",
"",
paint_line(config.chars.dbar),
col(format!(" {} {}", config.chars.trline, marker.2.text))
)?;
prefix = " ".to_string();
}
}
if i < lines.len() - 1 && lines[i + 1] - line > 1 {
writeln!(
fmt,
"{:>5} {} {} ",
"",
paint_line(config.chars.dbar),
prefix
)?;
}
}
Ok(())
}
fn render_tag<T: Write + Sized>(severity: &Severity, fmt: &mut T) -> std::fmt::Result {
write!(
fmt,
" {} ",
match severity {
Severity::Error => Paint::new(" ERROR ").bg(yansi::Color::Red).bold(),
Severity::Warning => Paint::new(" WARN ").bg(yansi::Color::Yellow).bold(),
Severity::Info => Paint::new(" INFO ").bg(yansi::Color::Blue).bold(),
}
)
}
pub trait Report {
fn render<T: Write + Sized, C: FileCache>(
&self,
cache: &C,
config: &RenderConfig,
fmt: &mut T,
) -> std::fmt::Result;
}
impl Report for Box<dyn Diagnostic> {
fn render<T: Write + Sized, C: FileCache>(
&self,
cache: &C,
config: &RenderConfig,
fmt: &mut T,
) -> std::fmt::Result {
write!(fmt, " ")?;
let frame = self.to_diagnostic_frame();
render_tag(&frame.severity, fmt)?;
writeln!(fmt, "{}", Paint::new(&frame.title).bold())?;
if !frame.subtitles.is_empty() {
writeln!(fmt)?;
}
for subtitle in &frame.subtitles {
match subtitle {
Subtitle::Normal(color, phr) => {
let colorizer = get_colorizer(color);
writeln!(
fmt,
"{:>5} {} {}",
"",
colorizer(config.chars.bullet),
Paint::new(phr)
)?;
}
Subtitle::Bold(color, phr) => {
let colorizer = get_colorizer(color);
writeln!(
fmt,
"{:>5} {} {}",
"",
colorizer(config.chars.bullet),
Paint::new(phr).bold()
)?;
}
Subtitle::Phrase(color, words) => {
let colorizer = get_colorizer(color);
write!(fmt, "{:>5} {} ", "", colorizer(config.chars.bullet))?;
for word in words {
match word {
Word::Normal(str) => write!(fmt, "{} ", Paint::new(str))?,
Word::Dimmed(str) => write!(fmt, "{} ", Paint::new(str).dimmed())?,
Word::White(str) => write!(fmt, "{} ", Paint::new(str).bold())?,
Word::Painted(color, str) => {
let colorizer = get_colorizer(color);
write!(fmt, "{} ", colorizer(str))?
}
}
}
writeln!(fmt)?;
}
Subtitle::LineBreak => {
writeln!(fmt)?;
}
}
}
let groups = group_markers(&frame.positions);
let is_empty = groups.is_empty();
for (ctx, group) in groups {
writeln!(fmt)?;
let (file, code) = cache.fetch(ctx).unwrap();
let diff =
pathdiff::diff_paths(&file.clone(), PathBuf::from(".").canonicalize().unwrap())
.unwrap_or(file);
write_code_block(&diff, config, &group, code, fmt)?;
}
if !is_empty {
writeln!(fmt)?;
}
for hint in &frame.hints {
writeln!(
fmt,
"{:>5} {} {}",
"",
Paint::new("Hint:").fg(yansi::Color::Cyan).bold(),
Paint::new(hint).fg(yansi::Color::Cyan)
)?;
}
writeln!(fmt)?;
Ok(())
}
}
impl Report for Log {
fn render<T: Write + Sized, C: FileCache>(
&self,
_cache: &C,
_config: &RenderConfig,
fmt: &mut T,
) -> std::fmt::Result {
match self {
Log::Checking(file) => {
writeln!(
fmt,
" {} {}",
Paint::new(" CHECKING ").bg(yansi::Color::Green).bold(),
file
)
}
Log::Compiled(duration) => {
writeln!(
fmt,
" {} All relevant terms compiled. took {:.2}s",
Paint::new(" COMPILED ").bg(yansi::Color::Green).bold(),
duration.as_secs_f32()
)
}
Log::Checked(duration) => {
writeln!(
fmt,
" {} All terms checked. took {:.2}s",
Paint::new(" CHECKED ").bg(yansi::Color::Green).bold(),
duration.as_secs_f32()
)
}
Log::Failed(duration) => {
writeln!(
fmt,
" {} took {}s",
Paint::new(" FAILED ").bg(yansi::Color::Red).bold(),
duration.as_secs()
)
}
}
}
}

View File

@ -0,0 +1,8 @@
[package]
name = "kind-span"
version = "0.1.0"
edition = "2021"
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
[dependencies]

View File

@ -0,0 +1,89 @@
/// Position in a syntax context.
#[derive(Clone, Copy, PartialEq, Eq, Hash, PartialOrd, Ord, Debug)]
pub struct Pos {
pub index: u32,
}
/// A syntax context index.
#[derive(Clone, Copy, PartialEq, Eq, Hash, PartialOrd, Ord, Debug)]
pub struct SyntaxCtxIndex(pub usize);
impl SyntaxCtxIndex {
pub fn new(size: usize) -> SyntaxCtxIndex {
SyntaxCtxIndex(size)
}
}
/// A span in the encoded format that is required by
/// kind2.
#[derive(Clone, Copy, PartialEq, Eq, Hash, PartialOrd, Ord, Debug)]
pub struct EncodedRange(pub u64);
/// Describes a position in a source code (syntax context). It's useful
/// to generate error messages.
#[derive(Clone, Debug, Copy, Hash, PartialEq, Eq)]
pub struct Range {
pub start: Pos,
pub end: Pos,
pub ctx: SyntaxCtxIndex,
}
pub trait Locatable {
fn locate(&self) -> Range;
}
impl Range {
#[inline]
pub fn new(start: Pos, end: Pos, ctx: SyntaxCtxIndex) -> Range {
Range { start, end, ctx }
}
pub fn ghost_range() -> Range {
Range::new(Pos { index: 0 }, Pos { index: 0 }, SyntaxCtxIndex(0))
}
/// Joins two ranges. It keeps the syntax context
/// of the first one.
#[inline]
pub fn mix(&self, next: Range) -> Range {
Range {
start: self.start,
end: next.end,
ctx: self.ctx,
}
}
/// Sets the context of the range,
#[inline]
pub fn set_ctx(&self, ctx: SyntaxCtxIndex) -> Range {
Range {
start: self.start,
end: self.end,
ctx,
}
}
#[inline]
pub fn encode(&self) -> EncodedRange {
EncodedRange(
((self.ctx.0 as u64) << 48)
| ((self.start.index as u64) & 0xFFFFFF)
| (((self.end.index as u64) & 0xFFFFFF) << 24),
)
}
}
impl EncodedRange {
/// Transforms a encoded span back into a range.
pub fn to_range(&self) -> Range {
Range {
ctx: SyntaxCtxIndex((self.0 >> 48) as usize),
start: Pos {
index: (self.0 & 0xFFFFFF) as u32,
},
end: Pos {
index: ((self.0 >> 24) & 0xFFFFFF) as u32,
},
}
}
}

View File

@ -0,0 +1,14 @@
[package]
name = "kind-target-hvm"
version = "0.1.0"
edition = "2021"
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
[dependencies]
kind-span = { path = "../kind-span" }
kind-tree = { path = "../kind-tree" }
kind-report = { path = "../kind-report" }
kind-derive = { path = "../kind-derive" }
hvm = { git = "https://github.com/Kindelia/HVM.git" }

View File

@ -0,0 +1,125 @@
use hvm::u60;
use kind_tree::{
backend::{File, Rule, Term},
untyped,
};
pub fn compile_book(book: untyped::Book, trace: bool) -> File {
let mut file = File {
rules: Default::default(),
smaps: Default::default(),
};
for (_, entry) in book.entrs {
compile_entry(&mut file, entry, trace);
}
file
}
pub fn compile_str(val: &str) -> Box<Term> {
let nil = Box::new(Term::Ctr {
name: String::from("String.nil"),
args: vec![],
});
let cons = |numb, next| {
Box::new(Term::Ctr {
name: String::from("String.cons"),
args: vec![Box::new(Term::U6O { numb }), next],
})
};
val.chars().rfold(nil, |rest, chr| cons(chr as u64, rest))
}
pub fn compile_term(expr: &untyped::Expr) -> Box<Term> {
use untyped::ExprKind::*;
match &expr.data {
Var { name } => Box::new(Term::Var {
name: name.to_string(),
}),
Lambda { param, body, .. } => Box::new(Term::Lam {
name: param.to_string(),
body: compile_term(body),
}),
App { fun, args } => args.iter().fold(compile_term(fun), |func, arg| {
Box::new(Term::App {
func,
argm: compile_term(arg),
})
}),
Fun { name, args } | Ctr { name, args } => Box::new(Term::Ctr {
name: name.to_string(),
args: args.iter().map(|x| compile_term(x)).collect(),
}),
Let { name, val, next } => Box::new(Term::Let {
name: name.to_string(),
expr: compile_term(val),
body: compile_term(next),
}),
U60 { numb } => Box::new(Term::U6O {
numb: u60::new(*numb),
}),
F60 { numb: _ } => todo!(),
Binary { op, left, right } => Box::new(Term::Ctr {
name: op.to_string(),
args: vec![compile_term(left), compile_term(right)],
}),
Str { val } => compile_str(val),
Err => unreachable!("Internal Error: 'ERR' cannot be a relevant term"),
}
}
fn compile_rule(name: String, rule: untyped::Rule) -> Rule {
Rule {
lhs: Box::new(Term::Ctr {
name,
args: rule.pats.iter().map(|x| compile_term(x)).collect(),
}),
rhs: compile_term(&rule.body),
}
}
fn compile_entry(file: &mut File, entry: Box<untyped::Entry>, trace: bool) {
if entry.attrs.trace.is_some() || trace {
let _with_args = entry.attrs.trace.unwrap_or(false);
let name_trace = format!("{}__trace", entry.name);
for rule in entry.rules {
file.rules.push(compile_rule(name_trace.clone(), rule))
}
let args = entry
.args
.iter()
.enumerate()
.map(|(i, x)| {
Box::new(Term::Var {
name: format!("_{}{}", i, x.0.clone()),
})
})
.collect::<Vec<_>>();
file.rules.push(Rule {
lhs: Box::new(Term::Ctr {
name: entry.name.to_string(),
args: args.clone(),
}),
rhs: Box::new(Term::Ctr {
name: "HVM.log".to_string(),
args: vec![
compile_str(entry.name.to_str()),
Box::new(Term::Ctr {
name: name_trace,
args,
}),
],
}),
})
} else {
let name = entry.name.to_string();
for rule in entry.rules {
file.rules.push(compile_rule(name.clone(), rule))
}
}
}

View File

@ -0,0 +1,17 @@
[package]
name = "kind-target-kdl"
version = "0.1.0"
edition = "2021"
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
[dependencies]
kind-span = { path = "../kind-span" }
kind-tree = { path = "../kind-tree" }
kind-report = { path = "../kind-report" }
kind-derive = { path = "../kind-derive" }
kindelia_lang = { git = "https://github.com/developedby/Kindelia/", branch = "kdl-lang-crate" }
linked-hash-map = "0.5.6"
tiny-keccak = "2.0.2"
fxhash = "0.2.1"

View File

@ -0,0 +1,558 @@
use std::{fmt::Display, sync::mpsc::Sender};
use fxhash::FxHashMap;
use kind_report::data::Diagnostic;
use kind_tree::{symbol::QualifiedIdent, untyped};
use linked_hash_map::LinkedHashMap;
use tiny_keccak::Hasher;
pub use kindelia_lang::ast as kdl;
use crate::{errors::KdlError, GenericCompilationToHVMError};
pub const KDL_NAME_LEN: usize = 12;
const U60_MAX: kdl::U120 = kdl::U120(0xFFFFFFFFFFFFFFF);
#[derive(Debug)]
pub struct File {
pub ctrs: LinkedHashMap<String, kdl::Statement>,
pub funs: LinkedHashMap<String, kdl::Statement>,
pub runs: Vec<kdl::Statement>,
}
pub struct CompileCtx<'a> {
file: File,
kdl_names: FxHashMap<String, kdl::Name>,
kdl_states: Vec<String>,
book: &'a untyped::Book,
sender: Sender<Box<dyn Diagnostic>>,
failed: bool,
}
impl<'a> CompileCtx<'a> {
pub fn new(book: &'a untyped::Book, sender: Sender<Box<dyn Diagnostic>>) -> CompileCtx<'a> {
CompileCtx {
file: File {
ctrs: Default::default(),
funs: Default::default(),
runs: Default::default(),
},
kdl_names: Default::default(),
kdl_states: Default::default(),
book,
sender,
failed: false,
}
}
pub fn send_err(&mut self, err: Box<dyn Diagnostic>) {
self.sender.send(err).unwrap();
self.failed = true;
}
}
// Functions to generate a new name
fn encode_base64_u8(num: u8) -> char {
match num {
0..=9 => (num + b'0') as char,
10..=35 => (num - 10 + b'A') as char,
36..=61 => (num - 36 + b'a') as char,
62.. => '_',
}
}
fn u128_to_kdl_name(mut num: u128) -> String {
let mut encoded = [0 as char; 12];
for item in &mut encoded {
*item = encode_base64_u8((num & 0x3f) as u8);
num >>= 6;
}
encoded.into_iter().collect()
}
fn keccak128(data: &[u8]) -> [u8; 16] {
let mut hasher = tiny_keccak::Keccak::v256();
let mut output = [0u8; 16];
hasher.update(data);
hasher.finalize(&mut output);
output
}
fn name_shortener(name: &QualifiedIdent, namespace: &str) -> QualifiedIdent {
let max_fn_name = KDL_NAME_LEN - namespace.len();
if name.to_str().len() > max_fn_name {
let name_hash = keccak128(name.to_str().as_bytes());
let name_hash = u128::from_le_bytes(name_hash);
let name_hash = u128_to_kdl_name(name_hash);
QualifiedIdent::new_static(&name_hash[..max_fn_name], None, name.range)
} else {
name.clone()
}
}
pub fn compile_book(
book: &untyped::Book,
sender: Sender<Box<dyn Diagnostic>>,
namespace: &str,
) -> Result<File, GenericCompilationToHVMError> {
let mut ctx = CompileCtx::new(book, sender);
for (name, entry) in &book.entrs {
let new_name = entry
.attrs
.kdl_name
.clone()
.map(|x| x.to_string())
.unwrap_or_else(|| name_shortener(&entry.name, namespace).to_string());
if let Ok(new_name) = kdl::Name::from_str(&new_name) {
ctx.kdl_names.insert(name.clone(), new_name);
} else {
ctx.send_err(Box::new(KdlError::InvalidVarName(entry.name.range)));
}
}
for (_name, entry) in &book.entrs {
compile_entry(&mut ctx, entry);
}
if ctx.failed {
return Err(GenericCompilationToHVMError);
}
Ok(ctx.file)
}
pub fn compile_rule(ctx: &mut CompileCtx, rule: &untyped::Rule) -> kdl::Rule {
let name = *ctx.kdl_names.get(rule.name.to_str()).unwrap();
let mut args = Vec::new();
for pat in &rule.pats {
let arg = compile_expr(ctx, pat);
args.push(arg);
}
let lhs = kdl::Term::fun(name, args);
let rhs = compile_expr(ctx, &rule.body);
kdl::Rule { lhs, rhs }
}
pub fn err_term() -> kdl::Term {
kdl::Term::Num {
numb: kdl::U120::new(99999).unwrap(),
}
}
pub fn compile_expr(ctx: &mut CompileCtx, expr: &untyped::Expr) -> kdl::Term {
use crate::untyped::ExprKind as From;
use kdl::Term as To;
match &expr.data {
From::App { fun, args } => {
let mut expr = compile_expr(ctx, fun);
for binding in args {
let body = compile_expr(ctx, binding);
expr = To::App {
func: Box::new(expr),
argm: Box::new(body),
};
}
expr
}
From::Binary { op, left, right } => {
use kind_tree::Operator as Op;
let oper = compile_oper(op);
match op {
// These operations occupy more bits on overflow
// So we truncate them
Op::Add | Op::Sub | Op::Mul => {
let val0 = Box::new(compile_expr(ctx, left));
let val1 = Box::new(compile_expr(ctx, right));
let expr = Box::new(To::Op2 { oper, val0, val1 });
let trunc = Box::new(To::Num { numb: U60_MAX });
To::Op2 {
oper: kdl::Oper::And,
val0: expr,
val1: trunc,
}
}
// These operations need to wrap around every 60 bits
// Eg: (<< n 60) = n
Op::Shl | Op::Shr => {
let val0 = Box::new(compile_expr(ctx, left));
let right = Box::new(compile_expr(ctx, right));
let sixty = Box::new(To::Num {
numb: kdl::U120(60),
});
let val1 = Box::new(To::Op2 {
oper: kdl::Oper::Mod,
val0: right,
val1: sixty,
});
To::Op2 { oper, val0, val1 }
}
// Other operations don't overflow
// Div, Mod, And, Or, Xor, Eql, Neq, Gtn, Gte, Ltn, Lte
_ => {
let val0 = Box::new(compile_expr(ctx, left));
let val1 = Box::new(compile_expr(ctx, right));
To::Op2 { oper, val0, val1 }
}
}
}
From::Ctr { name, args } => {
// Convert U120 numbers into the native kindelia representation
// Only possible if both U60s are U60 terms
if name.to_str() == "U120.new" {
if let (From::U60 { numb: hi }, From::U60 { numb: lo }) =
(&args[0].data, &args[1].data)
{
let numb = kdl::U120(((*hi as u128) << 60) | (*lo as u128));
return To::Num { numb };
}
}
let name = ctx.kdl_names.get(name.to_str()).unwrap().clone();
let args = args.iter().map(|x| compile_expr(ctx, &x)).collect();
To::Ctr { name, args }
}
From::Fun { name, args } => {
match name.to_str() {
// Special inline compilation for
// some numeric function applications
// Add with no boundary check is just a normal add
"U60.add_unsafe" => To::Op2 {
oper: kdl::Oper::Add,
val0: Box::new(compile_expr(ctx, &args[0])),
val1: Box::new(compile_expr(ctx, &args[1])),
},
// U60s are already stored in 120 bits
"U60.to_u120" => compile_expr(ctx, &args[0]),
// Truncate to 60 bits
"U120.to_u60" => To::Op2 {
oper: kdl::Oper::And,
val0: Box::new(compile_expr(ctx, &args[0])),
val1: Box::new(To::Num { numb: U60_MAX }),
},
// Compilation for U120 numeric operations
"U120.add" => To::Op2 {
oper: kdl::Oper::Add,
val0: Box::new(compile_expr(ctx, &args[0])),
val1: Box::new(compile_expr(ctx, &args[1])),
},
"U120.sub" => To::Op2 {
oper: kdl::Oper::Add,
val0: Box::new(compile_expr(ctx, &args[0])),
val1: Box::new(compile_expr(ctx, &args[1])),
},
"U120.mul" => To::Op2 {
oper: kdl::Oper::Mul,
val0: Box::new(compile_expr(ctx, &args[0])),
val1: Box::new(compile_expr(ctx, &args[1])),
},
"U120.div" => To::Op2 {
oper: kdl::Oper::Div,
val0: Box::new(compile_expr(ctx, &args[0])),
val1: Box::new(compile_expr(ctx, &args[1])),
},
"U120.mod" => To::Op2 {
oper: kdl::Oper::Mod,
val0: Box::new(compile_expr(ctx, &args[0])),
val1: Box::new(compile_expr(ctx, &args[1])),
},
"U120.num_equal" => To::Op2 {
oper: kdl::Oper::Eql,
val0: Box::new(compile_expr(ctx, &args[0])),
val1: Box::new(compile_expr(ctx, &args[1])),
},
"U120.num_not_equal" => To::Op2 {
oper: kdl::Oper::Neq,
val0: Box::new(compile_expr(ctx, &args[0])),
val1: Box::new(compile_expr(ctx, &args[1])),
},
"U120.shift_left" => To::Op2 {
oper: kdl::Oper::Shl,
val0: Box::new(compile_expr(ctx, &args[0])),
val1: Box::new(compile_expr(ctx, &args[1])),
},
"U120.shift_right" => To::Op2 {
oper: kdl::Oper::Shr,
val0: Box::new(compile_expr(ctx, &args[0])),
val1: Box::new(compile_expr(ctx, &args[1])),
},
"U120.num_less_than" => To::Op2 {
oper: kdl::Oper::Ltn,
val0: Box::new(compile_expr(ctx, &args[0])),
val1: Box::new(compile_expr(ctx, &args[1])),
},
"U120.num_less_equal" => To::Op2 {
oper: kdl::Oper::Lte,
val0: Box::new(compile_expr(ctx, &args[0])),
val1: Box::new(compile_expr(ctx, &args[1])),
},
"U120.num_greater_than" => To::Op2 {
oper: kdl::Oper::Gtn,
val0: Box::new(compile_expr(ctx, &args[0])),
val1: Box::new(compile_expr(ctx, &args[1])),
},
"U120.num_greater_equal" => To::Op2 {
oper: kdl::Oper::Gte,
val0: Box::new(compile_expr(ctx, &args[0])),
val1: Box::new(compile_expr(ctx, &args[1])),
},
"U120.bitwise_and" => To::Op2 {
oper: kdl::Oper::And,
val0: Box::new(compile_expr(ctx, &args[0])),
val1: Box::new(compile_expr(ctx, &args[1])),
},
"U120.bitwise_or" => To::Op2 {
oper: kdl::Oper::Or,
val0: Box::new(compile_expr(ctx, &args[0])),
val1: Box::new(compile_expr(ctx, &args[1])),
},
"U120.bitwise_xor" => To::Op2 {
oper: kdl::Oper::Xor,
val0: Box::new(compile_expr(ctx, &args[0])),
val1: Box::new(compile_expr(ctx, &args[1])),
},
_ => {
let name = ctx.kdl_names.get(name.to_str()).unwrap().clone();
let args = args.iter().map(|x| compile_expr(ctx, x)).collect();
To::Fun { name, args }
}
}
}
From::Lambda {
param,
body,
erased: _,
} => {
let name = kdl::Name::from_str(param.to_str());
if let Ok(name) = name {
let body = Box::new(compile_expr(ctx, body));
To::Lam { name, body }
} else {
ctx.send_err(Box::new(KdlError::InvalidVarName(param.range)));
err_term()
}
}
From::Let { name, val, next } => {
let res_name = kdl::Name::from_str(name.to_str());
if let Ok(name) = res_name {
let expr = Box::new(compile_expr(ctx, next));
let func = Box::new(To::Lam { name, body: expr });
let argm = Box::new(compile_expr(ctx, &val));
To::App { func, argm }
} else {
ctx.send_err(Box::new(KdlError::InvalidVarName(name.range)));
err_term()
}
}
From::U60 { numb } => To::Num {
numb: kdl::U120(*numb as u128),
},
From::F60 { numb: _ } => {
ctx.send_err(Box::new(KdlError::FloatUsed(expr.range)));
err_term()
}
From::Var { name } => {
let res_name = kdl::Name::from_str(name.to_str());
if let Ok(name) = res_name {
To::Var { name }
} else {
ctx.send_err(Box::new(KdlError::InvalidVarName(name.range)));
err_term()
}
}
From::Str { val } => {
let nil = kdl::Term::Ctr {
name: *ctx.kdl_names.get("String.nil").unwrap(),
args: vec![],
};
let cons_name = *ctx.kdl_names.get("String.cons").unwrap();
let cons = |numb: u128, next| kdl::Term::Ctr {
name: cons_name,
args: vec![
kdl::Term::Num {
numb: kdl::U120::new(numb).unwrap(),
},
next,
],
};
val.chars().rfold(nil, |rest, chr| cons(chr as u128, rest))
}
From::Err => unreachable!("Should not have errors inside generation"),
}
}
pub fn compile_entry(ctx: &mut CompileCtx, entry: &untyped::Entry) {
if entry.attrs.kdl_erase {
return;
}
if entry.attrs.kdl_run {
if !entry.args.is_empty() {
ctx.send_err(Box::new(KdlError::ShouldNotHaveArguments(entry.range)));
} else if entry.rules.len() != 1 {
ctx.send_err(Box::new(KdlError::ShouldHaveOnlyOneRule(entry.range)));
} else {
let expr = compile_expr(ctx, &entry.rules[0].body);
let statement = kdl::Statement::Run { expr, sign: None };
ctx.file.runs.push(statement);
}
} else {
match entry.name.to_str() {
"U120.new" => compile_u120_new(ctx, entry),
_ => compile_common_function(ctx, entry),
}
}
}
fn compile_common_function(ctx: &mut CompileCtx, entry: &untyped::Entry) {
let name = ctx.kdl_names.get(entry.name.to_str()).cloned().unwrap();
let mut args = Vec::new();
for (name, range, _strictness) in &entry.args {
if let Ok(name) = kdl::Name::from_str(name) {
args.push(name)
} else {
ctx.send_err(Box::new(KdlError::InvalidVarName(*range)));
}
}
if entry.rules.is_empty() {
// Functions with no rules become Ctr
let sttm = kdl::Statement::Ctr {
name,
args,
sign: None,
};
ctx.file.ctrs.insert(entry.name.to_string(), sttm);
} else {
// Functions with rules become Fun
let rules = entry
.rules
.iter()
.map(|rule| compile_rule(ctx, rule))
.collect::<Vec<_>>();
let func = kdl::Func { rules };
let init = if let Some(state_name) = &entry.attrs.kdl_state {
let init_entry = ctx.book.entrs.get(state_name.to_str());
if let Some(entry) = init_entry {
if !entry.args.is_empty() {
ctx.send_err(Box::new(KdlError::ShouldNotHaveArguments(entry.range)));
None
} else if entry.rules.len() != 1 {
ctx.send_err(Box::new(KdlError::ShouldHaveOnlyOneRule(entry.range)));
None
} else {
ctx.kdl_states.push(state_name.to_string());
Some(compile_expr(ctx, &entry.rules[0].body))
}
} else {
ctx.send_err(Box::new(KdlError::NoInitEntry(state_name.range)));
None
}
} else {
None
};
let sttm = kdl::Statement::Fun {
name,
args,
func,
init,
sign: None,
};
ctx.file.funs.insert(entry.name.to_string(), sttm);
}
}
fn compile_u120_new(ctx: &mut CompileCtx, entry: &untyped::Entry) {
// U120.new hi lo = (hi << 60) | lo
let hi_name = kdl::Name::from_str("hi").unwrap();
let lo_name = kdl::Name::from_str("lo").unwrap();
let hi_var = kdl::Term::Var {
name: hi_name.clone(),
};
let lo_var = kdl::Term::Var {
name: lo_name.clone(),
};
let name = ctx.kdl_names.get(entry.name.to_str()).cloned().unwrap();
let args = vec![hi_name, lo_name];
let rules = vec![kdl::Rule {
lhs: kdl::Term::Fun {
name: name.clone(),
args: vec![hi_var.clone(), lo_var.clone()],
},
rhs: kdl::Term::Op2 {
oper: kdl::Oper::Or,
val0: Box::new(kdl::Term::Op2 {
oper: kdl::Oper::Shl,
val0: Box::new(hi_var),
val1: Box::new(kdl::Term::Num {
numb: kdl::U120(60),
}),
}),
val1: Box::new(lo_var),
},
}];
let func = kdl::Func { rules };
let sttm = kdl::Statement::Fun {
name,
args,
func,
init: None,
sign: None,
};
ctx.file.funs.insert(entry.name.to_string(), sttm);
}
impl Display for File {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
for ctr in &self.ctrs {
writeln!(f, "{}", ctr.1)?;
}
if !self.ctrs.is_empty() && !self.funs.is_empty() {
writeln!(f)?;
}
for fun in &self.funs {
writeln!(f, "{}", fun.1)?;
}
for run in &self.runs {
writeln!(f, "{}", run)?;
}
Ok(())
}
}
fn compile_oper(oper: &kind_tree::Operator) -> kdl::Oper {
use kdl::Oper as To;
use kind_tree::Operator as From;
match oper {
From::Add => To::Add,
From::Sub => To::Sub,
From::Mul => To::Mul,
From::Div => To::Div,
From::Mod => To::Mod,
From::Shl => To::Shl,
From::Shr => To::Shr,
From::Eql => To::Eql,
From::Neq => To::Neq,
From::Ltn => To::Ltn,
From::Lte => To::Lte,
From::Gte => To::Gte,
From::Gtn => To::Gtn,
From::And => To::And,
From::Xor => To::Xor,
From::Or => To::Or,
}
}

View File

@ -0,0 +1,97 @@
use kind_report::data::{Color, Diagnostic, DiagnosticFrame, Marker, Severity};
use kind_span::Range;
pub enum KdlError {
InvalidVarName(Range),
ShouldNotHaveArguments(Range),
ShouldHaveOnlyOneRule(Range),
NoInitEntry(Range),
FloatUsed(Range),
}
impl Diagnostic for KdlError {
fn get_syntax_ctx(&self) -> Option<kind_span::SyntaxCtxIndex> {
match self {
KdlError::InvalidVarName(range) => Some(range.ctx),
KdlError::ShouldNotHaveArguments(range) => Some(range.ctx),
KdlError::ShouldHaveOnlyOneRule(range) => Some(range.ctx),
KdlError::NoInitEntry(range) => Some(range.ctx),
KdlError::FloatUsed(range) => Some(range.ctx),
}
}
fn to_diagnostic_frame(&self) -> kind_report::data::DiagnosticFrame {
match self {
KdlError::InvalidVarName(range) => DiagnosticFrame {
code: 600,
severity: Severity::Error,
title: "Invalid variable name for Kindelia.".to_string(),
subtitles: vec![],
hints: vec![],
positions: vec![Marker {
position: *range,
color: Color::Fst,
text: "Here!".to_string(),
no_code: false,
main: true,
}],
},
KdlError::ShouldNotHaveArguments(range) => DiagnosticFrame {
code: 601,
severity: Severity::Error,
title: "This type of entry should not have arguments".to_string(),
subtitles: vec![],
hints: vec![],
positions: vec![Marker {
position: *range,
color: Color::Fst,
text: "Here!".to_string(),
no_code: false,
main: true,
}],
},
KdlError::ShouldHaveOnlyOneRule(range) => DiagnosticFrame {
code: 603,
severity: Severity::Error,
title: "This entry should only have one rule.".to_string(),
subtitles: vec![],
hints: vec![],
positions: vec![Marker {
position: *range,
color: Color::Fst,
text: "Here!".to_string(),
no_code: false,
main: true,
}],
},
KdlError::NoInitEntry(range) => DiagnosticFrame {
code: 604,
severity: Severity::Error,
title: "This entry must have a init entry".to_string(),
subtitles: vec![],
hints: vec![],
positions: vec![Marker {
position: *range,
color: Color::Fst,
text: "Here!".to_string(),
no_code: false,
main: true,
}],
},
KdlError::FloatUsed(range) => DiagnosticFrame {
code: 605,
severity: Severity::Error,
title: "Found F60 in kindelia program".to_string(),
subtitles: vec![],
hints: vec![],
positions: vec![Marker {
position: *range,
color: Color::Fst,
text: "Here!".to_string(),
no_code: false,
main: true,
}],
},
}
}
}

View File

@ -0,0 +1,267 @@
use fxhash::{FxHashMap, FxHashSet};
use kind_span::Range;
use kind_tree::symbol::{Ident, QualifiedIdent};
use kind_tree::untyped::{self, Book, Entry, Expr, ExprKind, Rule};
use linked_hash_map::LinkedHashMap;
use crate::subst::subst;
fn must_split(rule: &Rule) -> bool {
for pat in &rule.pats {
if let ExprKind::Ctr { args, .. } = &pat.data {
for arg in args {
if matches!(
arg.data,
ExprKind::Ctr { .. } | ExprKind::U60 { .. } | ExprKind::F60 { .. }
) {
return true;
}
}
}
}
false
}
fn matches_together(a: &Rule, b: &Rule) -> (bool, bool) {
let mut same_shape = true;
for (a_pat, b_pat) in a.pats.iter().zip(&b.pats) {
match (&a_pat.data, &b_pat.data) {
(ExprKind::Ctr { name: an, .. }, ExprKind::Ctr { name: bn, .. }) if an != bn => {
return (false, false);
}
(ExprKind::U60 { numb: a_numb }, ExprKind::U60 { numb: b_numb })
if a_numb != b_numb =>
{
return (false, false);
}
(ExprKind::F60 { numb: a_numb }, ExprKind::F60 { numb: b_numb })
if a_numb != b_numb =>
{
return (false, false);
}
(ExprKind::Ctr { .. }, ExprKind::U60 { .. }) => {
return (false, false);
}
(ExprKind::Ctr { .. }, ExprKind::F60 { .. }) => {
return (false, false);
}
(ExprKind::U60 { .. }, ExprKind::Ctr { .. }) => {
return (false, false);
}
(ExprKind::U60 { .. }, ExprKind::F60 { .. }) => {
return (false, false);
}
(ExprKind::F60 { .. }, ExprKind::U60 { .. }) => {
return (false, false);
}
(ExprKind::F60 { .. }, ExprKind::Ctr { .. }) => {
return (false, false);
}
(ExprKind::Ctr { .. }, ExprKind::Var { .. }) => {
same_shape = false;
}
(ExprKind::U60 { .. }, ExprKind::Var { .. }) => {
same_shape = false;
}
(ExprKind::F60 { .. }, ExprKind::Var { .. }) => {
same_shape = false;
}
_ => {}
}
}
(true, same_shape)
}
fn split_rule(
rule: &Rule,
entry: &Entry,
i: usize,
name_count: &mut u64,
skip: &mut FxHashSet<usize>,
) -> (Rule, Vec<Entry>) {
let num = *name_count;
*name_count += 1;
let new_entry_name = QualifiedIdent::new_static(
&format!("{}{}_", entry.name.to_str(), num),
None,
entry.range,
);
let mut new_entry_attrs = entry.attrs.clone();
new_entry_attrs.kdl_name = None;
let mut new_entry_rules: Vec<Rule> = Vec::new();
let mut old_rule_pats: Vec<Box<Expr>> = Vec::new();
let mut old_rule_body_args: Vec<Box<Expr>> = Vec::new();
let mut var_count = 0;
for pat in &rule.pats {
match &pat.data {
ExprKind::Var { name } => {
old_rule_pats.push(pat.clone());
old_rule_body_args.push(Expr::var(name.clone()));
}
ExprKind::U60 { .. } | ExprKind::F60 { .. } => {
old_rule_pats.push(pat.clone());
}
ExprKind::Ctr { name, args } => {
let mut new_pat_args = Vec::new();
for field in args {
let arg = match &field.data {
ExprKind::Ctr { .. } | ExprKind::U60 { .. } | ExprKind::F60 { .. } => {
let name = Ident::new(format!(".x{}", var_count), field.range);
var_count += 1;
Expr::var(name)
}
ExprKind::Var { .. } => field.clone(),
_ => panic!(
"Internal Error: Cannot use this kind of expression during flattening"
),
};
new_pat_args.push(arg.clone());
old_rule_body_args.push(arg);
}
old_rule_pats.push(Expr::ctr(pat.range, name.clone(), new_pat_args));
}
_ => unreachable!("Internal Error: Invalid constructor while decoding pats"),
}
}
let old_rule_body = Expr::fun(rule.range, new_entry_name.clone(), old_rule_body_args);
let old_rule = Rule {
name: entry.name.clone(),
pats: old_rule_pats,
body: old_rule_body,
range: rule.range,
};
for (j, other) in entry.rules.iter().enumerate().skip(i) {
let (compatible, same_shape) = matches_together(rule, other);
if compatible {
if same_shape {
skip.insert(j);
}
let mut new_rule_pats = Vec::new();
let mut new_rule_body = other.body.clone();
for (rule_pat, other_pat) in rule.pats.iter().zip(&other.pats) {
match (&rule_pat.data, &other_pat.data) {
(ExprKind::Ctr { .. }, ExprKind::Ctr { args: pat_args, .. }) => {
new_rule_pats.extend(pat_args.clone());
}
(ExprKind::Ctr { name, args }, ExprKind::Var { name: opat_name }) => {
let mut new_ctr_args = vec![];
for _ in 0..args.len() {
let new_arg =
Expr::var(Ident::new(format!(".x{}", var_count), rule_pat.range));
var_count += 1;
new_ctr_args.push(new_arg.clone());
new_rule_pats.push(new_arg);
}
let new_ctr = Expr::ctr(name.range, name.clone(), new_ctr_args);
subst(&mut new_rule_body, opat_name, &new_ctr);
}
(ExprKind::Var { .. }, _) => {
new_rule_pats.push(other_pat.clone());
}
(ExprKind::U60 { .. }, ExprKind::U60 { .. }) => (),
(ExprKind::F60 { .. }, ExprKind::F60 { .. }) => (),
(ExprKind::U60 { .. }, ExprKind::Var { name }) => {
subst(&mut new_rule_body, &name, rule_pat);
}
(ExprKind::F60 { .. }, ExprKind::Var { name }) => {
subst(&mut new_rule_body, &name, rule_pat);
}
_ => {
panic!("Internal error. Please report."); // not possible since it matches
}
}
}
let new_rule = Rule {
name: new_entry_name.clone(),
pats: new_rule_pats,
body: new_rule_body,
range: new_entry_name.range,
};
new_entry_rules.push(new_rule);
}
}
assert!(!new_entry_rules.is_empty());
let new_entry_args = (0..new_entry_rules[0].pats.len())
.map(|n| (format!("x{}", n), Range::ghost_range(), false))
.collect();
let new_entry = Entry {
name: new_entry_name,
args: new_entry_args,
rules: new_entry_rules,
attrs: new_entry_attrs,
range: entry.range,
};
let new_split_entries = flatten_entry(&new_entry);
(old_rule, new_split_entries)
}
fn flatten_entry(entry: &Entry) -> Vec<Entry> {
let mut name_count = 0;
let mut skip: FxHashSet<usize> = FxHashSet::default();
let mut new_entries: Vec<Entry> = Vec::new();
let mut old_entry_rules: Vec<Rule> = Vec::new();
for i in 0..entry.rules.len() {
if !skip.contains(&i) {
let rule = &entry.rules[i];
if must_split(rule) {
let (old_rule, split_entries) =
split_rule(rule, entry, i, &mut name_count, &mut skip);
old_entry_rules.push(old_rule);
new_entries.extend(split_entries);
} else {
old_entry_rules.push(entry.rules[i].clone());
}
}
}
let old_entry = Entry {
name: entry.name.clone(),
args: entry.args.clone(),
rules: old_entry_rules,
range: entry.range,
attrs: entry.attrs.clone(),
};
new_entries.push(old_entry);
new_entries
}
pub fn flatten(book: untyped::Book) -> untyped::Book {
let mut book = book;
let mut names = FxHashMap::default();
let mut entrs = LinkedHashMap::default();
for name in book.names.keys() {
let entry = book.entrs.remove(name).unwrap();
for entry in flatten_entry(&entry) {
names.insert(entry.name.to_string(), entrs.len());
entrs.insert(entry.name.to_string(), Box::new(entry));
}
}
Book { names, entrs }
}

View File

@ -0,0 +1,39 @@
use std::{sync::mpsc::Sender, fmt::Display, error::Error};
use flatten::flatten;
use kind_report::data::Diagnostic;
use kind_tree::untyped;
pub use compile::File;
mod compile;
mod errors;
mod flatten;
mod linearize;
mod subst;
#[derive(Debug)]
pub struct GenericCompilationToHVMError;
impl Display for GenericCompilationToHVMError {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "generic compilation to hvm error")
}
}
impl Error for GenericCompilationToHVMError { }
pub fn compile_book(
book: untyped::Book,
sender: Sender<Box<dyn Diagnostic>>,
namespace: &str,
) -> Result<compile::File, GenericCompilationToHVMError> {
// TODO: Remove kdl_states (maybe check if they're ever called?)
// TODO: Convert to some sort of Kindelia.Contract
let flattened = flatten(book);
let file = compile::compile_book(&flattened, sender, namespace)?;
let file = linearize::linearize_file(file);
Ok(file)
}

View File

@ -0,0 +1,331 @@
// This modules makes all variable usages linear and with a unique name. That has the following effect:
// - All variables are renamed to have a global unique name.
// - All variables are linearized.
// - If they're used more than once, dups are inserted.
// - If they're used once, nothing changes.
// - If they're never used, their name is changed to "*"
// Example:
// - sanitizing: `(Foo a b) = (+ a a)`
// - results in: `(Foo x0 *) = dup x0.0 x0.1 = x0; (+ x0.0 x0.1)`
// The algorithm was copied from the hvm
// TODO: This is inserting unneeded `let`s for all linear rule variables
use crate::File;
use fxhash::FxHashMap;
use kindelia_lang::ast::{Func, Name, Rule, Statement, Term};
use linked_hash_map::LinkedHashMap;
pub struct LinearizeCtx {
uses: FxHashMap<Name, u64>,
name_table: LinkedHashMap<Name, Name>,
name_count: u64,
}
impl LinearizeCtx {
fn create_name(&mut self) -> Name {
let name = Name::from_str(&format!("x{}", self.name_count)).unwrap();
self.name_count += 1;
name
}
fn new() -> Self {
LinearizeCtx {
uses: Default::default(),
name_table: Default::default(),
name_count: 0,
}
}
// Pass through the lhs of the function generating new names
// for every variable found in the style described before with
// the fresh function. Also checks if rule's left side is valid.
fn create_param_names(&mut self, rule: &Rule) {
if let Term::Fun { name: _, args } = &rule.lhs {
for arg in args {
match arg {
Term::Var { name } => {
let new_name = self.create_name();
self.name_table.insert(*name, new_name);
}
Term::Ctr { name: _, args } => {
for arg in args {
if let Term::Var { name } = arg {
let new_name = self.create_name();
self.name_table.insert(*name, new_name);
} else {
unreachable!(); // We expect a flat rule
}
}
}
Term::Num { .. } => (),
_ => unreachable!(
"Invalid left-hand side parameter. Expected Var, Ctr or Num, got {:?}",
arg
),
}
}
} else {
unreachable!(
"Invalid left-hand side term. Expected Ctr, got {:?}",
rule.lhs
);
}
}
}
pub fn linearize_file(file: File) -> File {
let mut runs = Vec::new();
for stmt in file.runs {
if let Statement::Run { expr, sign: _ } = stmt {
let expr = linearize_term_independent(&expr);
let stmt = Statement::Run {
expr: *expr,
sign: None,
};
runs.push(stmt);
} else {
unreachable!();
}
}
let mut funs: LinkedHashMap<_, _> = Default::default();
for (kind_name, stmt) in file.funs {
if let Statement::Fun {
name,
args,
func,
init,
sign: _,
} = stmt
{
let init = init.map(|x| *linearize_term_independent(&x));
let mut rules: Vec<_> = Default::default();
for rule in func.rules {
let rule = linearize_rule(rule);
rules.push(rule);
}
let func = Func { rules };
let stmt = Statement::Fun {
name,
args,
func,
init,
sign: None,
};
funs.insert(kind_name, stmt);
} else {
unreachable!("Expected list of Funs, found {:?}", stmt);
}
}
let ctrs = file.ctrs;
File { ctrs, funs, runs }
}
pub fn linearize_rule(rule: Rule) -> Rule {
let mut ctx = LinearizeCtx::new();
ctx.create_param_names(&rule);
let mut rhs = linearize_term(&mut ctx, &rule.rhs, false);
let lhs = linearize_term(&mut ctx, &rule.lhs, true);
let vals: Vec<Name> = ctx.name_table.values().map(Name::clone).collect();
for val in vals {
let expr = Box::new(Term::Var { name: val });
rhs = dup_var(&mut ctx, &val, expr, rhs);
}
Rule {
lhs: *lhs,
rhs: *rhs,
}
}
pub fn linearize_term(ctx: &mut LinearizeCtx, term: &Term, lhs: bool) -> Box<Term> {
let term = match term {
Term::Var { name } => {
if lhs {
let mut name = *ctx.name_table.get(name).unwrap_or(name);
rename_erased(ctx, &mut name);
Term::Var { name }
} else {
// create a var with the name generated before
// concatenated with '.{{times_used}}'
if let Some(name) = ctx.name_table.get(name) {
let used = *ctx
.uses
.entry(*name)
.and_modify(|x| *x += 1)
.or_insert(1);
let name = Name::from_str(&format!("{}.{}", name, used - 1)).unwrap(); // TODO: Think if this errs or not
Term::Var { name }
} else {
unreachable!("Unbound variable '{}' in kdl compilation", name.to_string());
}
}
}
Term::Dup {
nam0,
nam1,
expr,
body,
} => {
let new_nam0 = ctx.create_name();
let new_nam1 = ctx.create_name();
let expr = linearize_term(ctx, expr, lhs);
let got_0 = ctx.name_table.remove(nam0);
let got_1 = ctx.name_table.remove(nam0);
ctx.name_table.insert(*nam0, new_nam0);
ctx.name_table.insert(*nam1, new_nam1);
let body = linearize_term(ctx, body, lhs);
ctx.name_table.remove(nam0);
if let Some(x) = got_0 {
ctx.name_table.insert(*nam0, x);
}
ctx.name_table.remove(nam1);
if let Some(x) = got_1 {
ctx.name_table.insert(*nam1, x);
}
let nam0 = Name::from_str(&format!("{}{}", new_nam0, ".0")).unwrap();
let nam1 = Name::from_str(&format!("{}{}", new_nam1, ".0")).unwrap();
Term::Dup {
nam0,
nam1,
expr,
body,
}
}
Term::Lam { name, body } => {
let mut new_name = ctx.create_name();
let got_name = ctx.name_table.remove(name);
ctx.name_table.insert(*name, new_name);
let body = linearize_term(ctx, body, lhs);
ctx.name_table.remove(name);
if let Some(x) = got_name {
ctx.name_table.insert(*name, x);
}
let expr = Box::new(Term::Var {
name: new_name,
});
let body = dup_var(ctx, &new_name, expr, body);
rename_erased(ctx, &mut new_name);
Term::Lam {
name: new_name,
body,
}
}
Term::App { func, argm } => {
let func = linearize_term(ctx, func, lhs);
let argm = linearize_term(ctx, argm, lhs);
Term::App { func, argm }
}
Term::Ctr { name, args } => {
let mut new_args = Vec::with_capacity(args.len());
for arg in args {
let arg = linearize_term(ctx, arg, lhs);
new_args.push(*arg);
}
Term::Ctr {
name: *name,
args: new_args,
}
}
Term::Fun { name, args } => {
let mut new_args = Vec::with_capacity(args.len());
for arg in args {
let arg = linearize_term(ctx, arg, lhs);
new_args.push(*arg);
}
Term::Fun {
name: *name,
args: new_args,
}
}
Term::Num { numb } => {
Term::Num { numb: *numb }
}
Term::Op2 { oper, val0, val1 } => {
let val0 = linearize_term(ctx, val0, lhs);
let val1 = linearize_term(ctx, val1, lhs);
Term::Op2 {
oper: *oper,
val0,
val1,
}
}
};
Box::new(term)
}
// Linearize a term that is not part of a rule, so it doesn't need a shared context
pub fn linearize_term_independent(term: &Term) -> Box<Term> {
linearize_term(&mut LinearizeCtx::new(), term, false)
}
pub fn rename_erased(ctx: &LinearizeCtx, name: &mut Name) {
if ctx.uses.get(name).copied() <= Some(0) {
*name = Name::NONE;
}
}
// Duplicates all variables that are used more than once.
// The process is done generating auxiliary variables and
// applying dup on them.
pub fn dup_var(ctx: &mut LinearizeCtx, name: &Name, expr: Box<Term>, body: Box<Term>) -> Box<Term> {
if let Some(amount) = ctx.uses.get(name).copied() {
match amount {
// if not used nothing is done
0 => body,
// if used once just make a let (lambda then app)
1 => {
let name = Name::from_str(&format!("{}.0", name)).unwrap(); // TODO: handle err
let func = Box::new(Term::Lam { name, body });
let term = Term::App { func, argm: expr };
Box::new(term)
}
// if used more than once, duplicate
_ => {
let dup_times = amount - 1;
let aux_amount = amount - 2; // quantity of aux variables
let mut vars = vec![];
// generate name for duplicated variables
for i in (aux_amount..(dup_times * 2)).rev() {
let i = i - aux_amount; // moved to 0,1,..
let key = Name::from_str(&format!("{}.{}", name, i)).unwrap();
vars.push(key);
}
// generate name for aux variables
for i in (0..aux_amount).rev() {
let key = Name::from_str(&format!("c.{}", i)).unwrap();
vars.push(key);
}
// use aux variables to duplicate the variable
let term = Term::Dup {
nam0: vars.pop().unwrap(),
nam1: vars.pop().unwrap(),
expr,
body: dup_var_go(1, dup_times, body, &mut vars),
};
Box::new(term)
}
}
} else {
body
}
}
// Recursive aux function to duplicate one variable
// an amount of times
fn dup_var_go(idx: u64, dup_times: u64, body: Box<Term>, vars: &mut Vec<Name>) -> Box<Term> {
if idx == dup_times {
body
} else {
let nam0 = vars.pop().unwrap();
let nam1 = vars.pop().unwrap();
let var_name = Name::from_str(&format!("c.{}", idx - 1)).unwrap();
let expr = Box::new(Term::Var { name: var_name });
let dup = Term::Dup {
nam0,
nam1,
expr,
body: dup_var_go(idx + 1, dup_times, body, vars),
};
Box::new(dup)
}
}

View File

@ -0,0 +1,43 @@
use kind_tree::{symbol::Ident, untyped::Expr};
pub fn subst(term: &mut Expr, from: &Ident, to: &Expr) {
use kind_tree::untyped::ExprKind::*;
match &mut term.data {
Var { name } if from.to_str() == name.to_str() => *term = to.clone(),
App { fun, args } => {
subst(fun, from, to);
for arg in args {
subst(arg, from, to);
}
}
Fun { args, .. } | Ctr { args, .. } => {
for arg in args {
subst(arg, from, to);
}
}
Let { name, val, next } => {
subst(val, from, to);
if name.to_str() != from.to_str() {
subst(next, from, to);
}
}
Binary { op: _, left, right } => {
subst(left, from, to);
subst(right, from, to);
}
Lambda { param, body, .. } if param.to_str() != from.to_str() => subst(body, from, to),
U60 { .. } => (),
F60 { .. } => (),
Str { .. } => (),
Var { .. } => (),
Lambda { .. } => (),
Err => unreachable!("Err should not be used inside the compiler"),
}
}

View File

@ -0,0 +1,22 @@
[package]
name = "kind-tests"
version = "0.1.0"
edition = "2021"
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
[dev-dependencies]
kind-driver = { path = "../kind-driver" }
kind-parser = { path = "../kind-parser" }
kind-tree = { path = "../kind-tree" }
kind-span = { path = "../kind-span" }
kind-report = { path = "../kind-report" }
kind-checker = { path = "../kind-checker" }
kind-pass = { path = "../kind-pass" }
kind-target-hvm = { path = "../kind-target-hvm" }
kind-target-kdl = { path = "../kind-target-kdl" }
pretty_assertions = "1.3.0"
ntest = "0.8.1"
walkdir = "2"

View File

@ -0,0 +1,2 @@
Main : U60
Main = HVM.log 222 2

View File

@ -0,0 +1,201 @@
#![feature(test)]
extern crate test;
use std::{fs, path::{PathBuf}};
use driver::{resolution};
use kind_driver::session::Session;
use kind_pass::{expand::{self, uses::expand_uses}, desugar, erasure};
use test::Bencher;
use kind_driver as driver;
fn new_session() -> Session {
let (rx, _) = std::sync::mpsc::channel();
let root = PathBuf::from("./suite/lib").canonicalize().unwrap();
Session::new(root, rx)
}
fn exp_paths() -> Vec<&'static str> {
vec![
"./suite/eval/Getters.kind2",
"./suite/eval/Setters.kind2",
"./suite/eval/DoNotation.kind2",
"./suite/eval/User.kind2",
]
}
#[bench]
fn bench_exp_pure_parsing(b: &mut Bencher) {
let paths = exp_paths();
let paths: Vec<_> = paths.iter().map(|x| fs::read_to_string(x).unwrap()).collect();
b.iter(|| {
paths.iter().map(|input| {
let session = new_session();
kind_parser::parse_book(session.diagnostic_sender.clone(), 0, &input)
}).fold(0, |n, _| n + 1)
})
}
#[bench]
fn bench_exp_pure_use_expansion(b: &mut Bencher) {
let paths = exp_paths();
let mut paths: Vec<_> = paths.iter().map(|x| {
let input = fs::read_to_string(x).unwrap();
let (rx, _) = std::sync::mpsc::channel();
let (modu, failed) = kind_parser::parse_book(rx, 0, &input);
assert!(!failed);
modu
}).collect();
b.iter(|| {
paths.iter_mut().map(|module| {
let (rx, _) = std::sync::mpsc::channel();
expand_uses(module, rx);
}).fold(0, |n, _| n + 1)
})
}
#[bench]
fn bench_exp_pure_derive_expansion(b: &mut Bencher) {
let paths = exp_paths();
let mut books: Vec<_> = paths.iter().map(|x| {
let input = fs::read_to_string(x).unwrap();
let (rx, _) = std::sync::mpsc::channel();
let (mut module, failed) = kind_parser::parse_book(rx.clone(), 0, &input);
assert!(!failed);
expand_uses(&mut module, rx);
module
}).collect();
b.iter(|| {
books.iter_mut().map(|module| {
let (rx, tx) = std::sync::mpsc::channel();
expand::expand_module(rx, module);
assert!(tx.iter().collect::<Vec<_>>().is_empty())
}).fold(0, |n, _| n + 1)
})
}
#[bench]
fn bench_exp_pure_check_unbound(b: &mut Bencher) {
let paths = exp_paths();
let mut books: Vec<_> = paths.iter().map(|x| {
let mut session = new_session();
let book = resolution::parse_and_store_book(&mut session, &PathBuf::from(x)).unwrap();
(session, book)
}).collect();
b.iter(|| {
books.iter_mut().map(|(session, book)| {
let result = resolution::check_unbound_top_level(session, book);
assert!(result.is_ok());
}).fold(0, |n, _| n + 1)
})
}
#[bench]
fn bench_exp_pure_desugar(b: &mut Bencher) {
let paths = exp_paths();
let mut books: Vec<_> = paths.iter().map(|x| {
let mut session = new_session();
let mut book = resolution::parse_and_store_book(&mut session, &PathBuf::from(x)).unwrap();
let result = resolution::check_unbound_top_level(&mut session, &mut book);
assert!(result.is_ok());
(session, book)
}).collect();
b.iter(|| {
books.iter_mut().map(|(session, book)| {
desugar::desugar_book(session.diagnostic_sender.clone(), &book).unwrap()
}).fold(0, |n, _| n + 1)
})
}
#[bench]
fn bench_exp_pure_erase(b: &mut Bencher) {
let paths = exp_paths();
let books: Vec<_> = paths.iter().map(|x| {
let mut session = new_session();
let mut book = resolution::parse_and_store_book(&mut session, &PathBuf::from(x)).unwrap();
let result = resolution::check_unbound_top_level(&mut session, &mut book);
let book = desugar::desugar_book(session.diagnostic_sender.clone(), &book).unwrap();
assert!(result.is_ok());
(session, book)
}).collect();
b.iter(|| {
books.iter().map(|(session, book)| {
erasure::erase_book(
book,
session.diagnostic_sender.clone(),
vec!["Main".to_string()],
).unwrap();
}).fold(0, |n, _| n + 1)
})
}
#[bench]
fn bench_exp_pure_to_hvm(b: &mut Bencher) {
let paths = exp_paths();
let books: Vec<_> = paths.iter().map(|x| {
let mut session = new_session();
let mut book = resolution::parse_and_store_book(&mut session, &PathBuf::from(x)).unwrap();
let result = resolution::check_unbound_top_level(&mut session, &mut book);
let book = desugar::desugar_book(session.diagnostic_sender.clone(), &book).unwrap();
assert!(result.is_ok());
let book = erasure::erase_book(
&book,
session.diagnostic_sender.clone(),
vec!["Main".to_string()],
).unwrap();
(session, book)
}).collect();
b.iter(move || {
books.iter().map(move |(_, book)| {
kind_target_hvm::compile_book(book.to_owned(), false)
}).fold(0, |n, _| n + 1)
})
}
#[bench]
fn bench_exp_pure_gen_checker(b: &mut Bencher) {
let paths = exp_paths();
let books: Vec<_> = paths.iter().map(|x| {
let mut session = new_session();
let mut book = resolution::parse_and_store_book(&mut session, &PathBuf::from(x)).unwrap();
let result = resolution::check_unbound_top_level(&mut session, &mut book);
let book = desugar::desugar_book(session.diagnostic_sender.clone(), &book).unwrap();
assert!(result.is_ok());
(session, book)
}).collect();
b.iter(move || {
books.iter().map(move |(_, book)| {
kind_checker::gen_checker(book, book.names.keys().cloned().collect())
}).fold(0, |n, _| n + 1)
})
}

View File

@ -0,0 +1 @@
Ok!

View File

@ -0,0 +1,44 @@
#derive[match]
type Algebra.Laws.Inverse <t> <concat: (t -> t -> t)> <inverse: (t -> t)> <empty: t> {
new (left_inverse: ((x : t) -> (Equal empty (concat x (inverse x))))) (right_inverse: ((x : t) -> (Equal empty (concat (inverse x) x))))
}
type Algebra.Magma <t: Type> {
new (concat: (t -> t -> t))
}
type Algebra.Semigroup <t: Type> {
new (magma: (Algebra.Magma t)) (associativity: (Algebra.Laws.associativity.eta (Algebra.Magma.concat magma)))
}
Algebra.Group.concat <t> (group: (Algebra.Group t)) : (t -> t -> t)
Algebra.Group.concat t (Algebra.Group.new t_ monoid inverse inverse_proof) = (Algebra.Monoid.concat monoid)
Algebra.Laws.associativity.eta <t> (concat: (t -> t -> t)) : Type
Algebra.Laws.associativity.eta t concat = ((a : t) -> (b : t) -> (c : t) -> (Equal (concat (concat a b) c) (concat a (concat b c))))
type Algebra.Laws.Identity <t> <concat: (t -> t -> t)> <empty: t> {
new (left_identity: ((x : t) -> (Equal x (concat empty x)))) (right_identity: ((x : t) -> (Equal x (concat x empty))))
}
Algebra.Monoid.empty <t> (monoid: (Algebra.Monoid t)) : t
Algebra.Monoid.empty t (Algebra.Monoid.new t_ sg empty id) = empty
type Algebra.Monoid <t: Type> {
new (sg: (Algebra.Semigroup t)) (empty: t) (identity: (Algebra.Laws.Identity t (Algebra.Semigroup.concat sg) empty))
}
Algebra.Semigroup.concat <t> (semigroup: (Algebra.Semigroup t)) : (t -> t -> t)
Algebra.Semigroup.concat t (Algebra.Semigroup.new t_ magma assoc) = (Algebra.Magma.concat magma)
Algebra.Monoid.concat <t> (monoid: (Algebra.Monoid t)) : (t -> t -> t)
Algebra.Monoid.concat t (Algebra.Monoid.new t_ sg empty id) = (Algebra.Semigroup.concat sg)
Algebra.Magma.concat <t> (magma: (Algebra.Magma t)) : (t -> t -> t)
Algebra.Magma.concat t (Algebra.Magma.new t_ concat) = concat
Equal <t> (a: t) (b: t) : Type
type Algebra.Group <t> {
new (monoid: (Algebra.Monoid t)) (invert: (t -> t)) (inverse: (Algebra.Laws.Inverse t (Algebra.Monoid.concat monoid) invert (Algebra.Monoid.empty monoid)))
}

View File

@ -0,0 +1 @@
Ok!

View File

@ -0,0 +1 @@
Ok!

View File

@ -0,0 +1,3 @@
type Equal <t: Type> (a: t) ~ (b: t) {
refl : Equal t a a
}

View File

@ -0,0 +1,13 @@
INFO Inspection.
* Expected: U60
/--[suite/checker/Inspection.kind2:3:3]
|
2 | Main =
3 | ?
| v
| \Here!

View File

@ -0,0 +1 @@
Ok!

View File

@ -0,0 +1,8 @@
type Nat {
succ (pred: Nat)
zero
}
Nat.pred (n: Nat) : Nat
Nat.pred Nat.zero = Nat.zero
Nat.pred (Nat.succ n) = n

View File

@ -0,0 +1 @@
Ok!

View File

@ -1,50 +1,52 @@
// From: https://github.com/Kindelia/Functional-Benchmarks/blob/master/Runtime/quicksort.kind2
List (t: Type) : Type
Cons <t: Type> (head: t) (tail: List t) : List t
Nil <t: Type> : List t
type List (t: Type) {
Cons (head: t) (tail: List t)
Nil
}
Tree (t: Type) : Type
Empty <t: Type> : Tree t
Single <t: Type> (value: t) : Tree t
Concat <t: Type> (left: Tree t) (right: Tree t) : Tree t
type Tree (t: Type) {
Empty
Single (value: t)
Concat (left: Tree t) (right: Tree t)
}
// Generates a random list
Randoms (s: U60) (n: U60) : List U60
Randoms s 0 = Nil
Randoms s l = Cons s (Randoms (% (+ (* s 1664525) 1013904223) 4294967296) (- l 1))
Randoms s 0 = List.Nil
Randoms s l = List.Cons s (Randoms (% (+ (* s 1664525) 1013904223) 4294967296) (- l 1))
// Sums all elements in a concatenation tree
Sum (tree: Tree U60) : U60
Sum (Empty t) = 0
Sum (Single t a) = a
Sum (Concat t a b) = (+ (Sum a) (Sum b))
Sum (Tree.Empty t) = 0
Sum (Tree.Single t a) = a
Sum (Tree.Concat t a b) = (+ (Sum a) (Sum b))
//// The initial pivot
Pivot : U60
Pivot = 2147483648
QSort (p: U60) (s: U60) (l: List U60): Tree U60
QSort p s Nil = Empty
QSort p s (Cons x Nil) = Single x
QSort p s (Cons x xs) = Split p s (Cons x xs) Nil Nil
QSort p s List.Nil = Tree.Empty
QSort p s (List.Cons x List.Nil) = Tree.Single x
QSort p s (List.Cons x xs) = Split p s (List.Cons x xs) List.Nil List.Nil
//// Splits list in two partitions
Split (p: U60) (s: U60) (l: List U60) (min: List U60) (max: List U60) : Tree U60
Split p s Nil min max =
Split p s List.Nil min max =
let s = (>> s 1)
let min = (QSort (- p s) s min)
let max = (QSort (+ p s) s max)
Concat min max
Tree.Concat min max
Split p s (Cons x xs) min max =
Split p s (List.Cons x xs) min max =
Place p s (< p x) x xs min max
//// Moves element to its partition
Place (p: U60) (s: U60) (y: U60) (x: U60) (xs: List U60) (min: List U60) (max: List U60) : Tree U60
Place p s 0 x xs min max = Split p s xs (Cons x min) max
Place p s 1 x xs min max = Split p s xs min (Cons x max)
Place p s 0 x xs min max = Split p s xs (List.Cons x min) max
Place p s 1 x xs min max = Split p s xs min (List.Cons x max)
//// Sorts and sums n random numbers
Main : U60

View File

@ -0,0 +1 @@
Ok!

View File

@ -0,0 +1,12 @@
Main : Maybe U60
Main =
do Maybe {
Maybe.some 3
Maybe.pure 2
ask res = Maybe.pure 2
ask res2 = Maybe.pure 3
match Maybe (Maybe.some 4) {
some val => Maybe.pure (+ 1000 (+ val (+ res res2)))
none => Maybe.none
}
}

View File

@ -0,0 +1 @@
Ok!

View File

@ -0,0 +1,13 @@
#derive[open]
record User {
constructor new
name : U60
ttt : U60
e : U60
}
Main : U60
Main =
let User.new (ttt = e) e .. = User.new 2 4 1
let User.new (ttt = f) .. = User.new 6 7 3
e

View File

@ -0,0 +1 @@
Ok!

View File

@ -0,0 +1,9 @@
type Nat {
succ (pred : Nat)
zero
}
type Vec (t: Type) ~ (n: Nat) {
cons <size : Nat> (x : t) (xs : Vec t size) : Vec t (Nat.succ size)
nil : Vec t Nat.zero
}

View File

@ -0,0 +1,12 @@
ERROR The case is not covering all the values inside of it!
/--[suite/checker/derive/fail/IncompleteCase.kind2:12:9]
|
11 | let User.new (ttt = e) e .. = User.new 2 4 1
12 | let User.new (ttt = f) name = User.new 6 7 3
| v-------
| \This is the incomplete case
13 | e
Hint: Need variables for 'e'

Some files were not shown because too many files have changed in this diff Show More