wip: upgraded to hvm splitted into multiple crates and fixed code style of some parts of the compiler

This commit is contained in:
felipegchi 2022-11-22 09:56:06 -03:00
parent 2c69eb3341
commit c91d6a09e8
58 changed files with 1352 additions and 1981 deletions

964
Cargo.lock generated

File diff suppressed because it is too large Load Diff

View File

@ -16,4 +16,7 @@ members = [
# "src/kind-lint",
# "src/kind-query",
# "src/kind-macros",
]
]
[profile.release]
debug = true

View File

@ -9,6 +9,8 @@ edition = "2021"
kind-tree = { path = "../kind-tree" }
kind-span = { path = "../kind-span" }
kind-report = { path = "../kind-report" }
hvm = "0.1.89"
hvm = { path = "../../../HVM/crates/hvm", features = ["derive"] }
fxhash = "0.2.1"
im = "15.1.0"

View File

@ -4,12 +4,12 @@
use self::tags::EvalTag;
use self::tags::{operator_to_constructor, TermTag};
use hvm::Term;
use hvm::syntax::Term;
use kind_span::Span;
use kind_tree::desugared::{self, Book, Expr};
use kind_tree::symbol::{Ident, QualifiedIdent};
use hvm::language as lang;
use hvm::syntax as lang;
mod tags;
@ -68,7 +68,7 @@ fn mk_var(ident: &str) -> Box<Term> {
}
fn mk_u60(numb: u64) -> Box<Term> {
Box::new(Term::Num { numb })
Box::new(Term::U6O { numb })
}
fn mk_single_ctr(head: String) -> Box<Term> {
@ -89,7 +89,7 @@ fn mk_ctr_name_from_str(ident: &str) -> Box<Term> {
}
fn span_to_num(span: Span) -> Box<Term> {
Box::new(Term::Num {
Box::new(Term::U6O {
numb: span.encode().0,
})
}
@ -157,7 +157,7 @@ fn codegen_all_expr(
mk_var(ident.to_str())
}
}
All(name, typ, body) => mk_lifted_ctr(
All(name, typ, body, _erased) => mk_lifted_ctr(
eval_ctr(quote, TermTag::All),
vec![
span_to_num(expr.span),

View File

@ -9,37 +9,59 @@ pub mod report;
use std::sync::mpsc::Sender;
use hvm::Term;
use hvm::derive::pre_compute_file;
use hvm::{runtime::runtime, syntax::Term};
use kind_report::data::Diagnostic;
use kind_tree::desugared::Book;
use report::parse_report;
use crate::report::parse_report;
const CHECKER_HVM: &str = include_str!("checker.hvm");
pre_compute_file!(PRECOMPILED_TYPE_CHECKER, "checker.hvm");
/// Generates the checker in a string format that can be
/// parsed by HVM.
pub fn gen_checker(book: &Book, functions_to_check: Vec<String>) -> String {
let base_check_code = compiler::codegen_book(book, functions_to_check);
let mut check_code = CHECKER_HVM.to_string();
check_code.push_str(&base_check_code.to_string());
check_code
let file = compiler::codegen_book(book, functions_to_check);
file.to_string()
}
/// Type checks a dessugared book. It spawns an HVM instance in order
/// to run a compiled version of the book
pub fn type_check(book: &Book, tx: Sender<Box<dyn Diagnostic>>, functions_to_check: Vec<String>) -> bool {
let check_code = gen_checker(book, functions_to_check);
pub fn type_check(
book: &Book,
tx: Sender<Box<dyn Diagnostic>>,
functions_to_check: Vec<String>,
) -> bool {
let file = compiler::codegen_book(book, functions_to_check);
let book = language::rulebook::gen_rulebook(&file, PRECOMPILED_TYPE_CHECKER);
let mut runtime = hvm::Runtime::from_code(&check_code).unwrap();
let main = runtime.alloc_code("Kind.API.check_all").unwrap();
runtime.run_io(main);
runtime.normalize(main);
let term = runtime.readback(main);
let mut prog = runtime::Program::new(PRECOMPILED_TYPE_CHECKER);
prog.add_book(&book);
let errs = parse_report(&term)
.expect(&format!("Internal Error: Cannot parse the report message from the type checker: {}", term));
let heap = runtime::new_heap(runtime::default_heap_size(), runtime::default_heap_tids());
let tids = runtime::new_tids(runtime::default_heap_tids());
// Allocates the main term
runtime::link(
&heap,
0,
runtime::Fun(*book.name_to_id.get("Main").unwrap(), 0),
);
let host = 0;
// Normalizes it
runtime::normalize(&heap, &prog, &tids, host, false);
// Reads it back to a string
let term = language::readback::as_term(&heap, &prog, host);
// Frees used memory
runtime::collect(&heap, &prog.arit, tids[0], runtime::load_ptr(&heap, host));
runtime::free(&heap, 0, 0, 1);
let errs = parse_report(&term).expect(&format!(
"Internal Error: Cannot parse the report message from the type checker: {}",
term
));
let succeeded = errs.is_empty();
for err in errs {
@ -53,12 +75,32 @@ pub fn type_check(book: &Book, tx: Sender<Box<dyn Diagnostic>>, functions_to_che
/// we run the "eval_main" that runs the generated version that both HVM and
/// and the checker can understand.
pub fn eval_api(book: &Book) -> Box<Term> {
let check_code = gen_checker(book, book.names.keys().cloned().collect());
let file = compiler::codegen_book(book, Vec::new());
let book = language::rulebook::gen_rulebook(&file, PRECOMPILED_TYPE_CHECKER);
let mut runtime = hvm::Runtime::from_code(&check_code).unwrap();
let main = runtime.alloc_code("Kind.API.eval_main").unwrap();
let mut prog = runtime::Program::new(PRECOMPILED_TYPE_CHECKER);
prog.add_book(&book);
runtime.run_io(main);
runtime.normalize(main);
runtime.readback(main)
let heap = runtime::new_heap(runtime::default_heap_size(), runtime::default_heap_tids());
let tids = runtime::new_tids(runtime::default_heap_tids());
// Allocates the main term
runtime::link(
&heap,
0,
runtime::Fun(*book.name_to_id.get("Kind.API.eval_main").unwrap(), 0),
);
let host = 0;
// Normalizes it
runtime::normalize(&heap, &prog, &tids, host, false);
// Reads it back to a string
let term = language::readback::as_term(&heap, &prog, host);
// Frees used memory
runtime::collect(&heap, &prog.arit, tids[0], runtime::load_ptr(&heap, host));
runtime::free(&heap, 0, 0, 1);
term
}

View File

@ -24,11 +24,11 @@ macro_rules! match_opt {
}
fn parse_orig(term: &Term) -> Result<Range, String> {
match_opt!(term, Term::Num { numb } => EncodedSpan(*numb).to_range())
match_opt!(term, Term::U6O { numb } => EncodedSpan(*numb).to_range())
}
fn parse_num(term: &Term) -> Result<u64, String> {
match_opt!(term, Term::Num { numb } => *numb)
match_opt!(term, Term::U6O { numb } => *numb)
}
fn parse_op(term: &Term) -> Result<Operator, String> {
@ -58,7 +58,7 @@ fn parse_op(term: &Term) -> Result<Operator, String> {
fn parse_name(term: &Term) -> Result<String, String> {
match term {
Term::Num { numb } => Ok(Ident::decode(*numb)),
Term::U6O { numb } => Ok(Ident::decode(*numb)),
Term::Ctr { name, args: _ } => Ok(name.to_string()),
_ => Err("Error while matching ident".to_string()),
}
@ -66,7 +66,7 @@ fn parse_name(term: &Term) -> Result<String, String> {
fn parse_qualified(term: &Term) -> Result<QualifiedIdent, String> {
match term {
Term::Num { numb } => Ok(QualifiedIdent::new_static(
Term::U6O { numb } => Ok(QualifiedIdent::new_static(
&Ident::decode(*numb),
None,
Range::ghost_range(),
@ -93,6 +93,7 @@ fn parse_all_expr(
Ident::generate(&parse_name(&args[1])?),
parse_all_expr(names.clone(), &args[2])?,
parse_all_expr(names, &args[3])?,
false, // TODO: Fix
)),
"Kind.Quoted.lam" => Ok(Expr::lambda(
parse_orig(&args[0])?,
@ -215,10 +216,13 @@ pub fn transform_entry(term: &Term) -> Result<Entry, String> {
fn parse_type_error(expr: &Term) -> Result<TypeError, String> {
match expr {
Term::Ctr { name, args } => {
if args.len() < 2 {
return Err("Invalid argument length for constructor".to_string());
}
let ls = parse_list(&args[0])?;
let entries = ls.iter().flat_map(|x| transform_entry(x));
let ctx = Context(entries.collect());
let orig = match_opt!(*args[1], Term::Num { numb } => EncodedSpan(numb).to_range())?;
let orig = match_opt!(*args[1], Term::U6O { numb } => EncodedSpan(numb).to_range())?;
match name.as_str() {
"Kind.Error.Quoted.unbound_variable" => Ok(TypeError::UnboundVariable(ctx, orig)),
"Kind.Error.Quoted.cant_infer_hole" => Ok(TypeError::CantInferHole(ctx, orig)),

View File

@ -9,9 +9,6 @@ use kind_report::report::{FileCache, Report};
use kind_report::RenderConfig;
use kind_driver as driver;
use watch::watch_session;
mod watch;
#[derive(Parser, Debug)]
#[command(author, version, about, long_about = None)]
@ -88,9 +85,7 @@ pub enum Command {
/// Compiles a file to HVM (.hvm)
#[clap(aliases = &["hvm"])]
ToHVM {
file: String
},
ToHVM { file: String },
/// Watch for file changes and then
/// check when some file change.
@ -195,7 +190,7 @@ pub fn run_cli(config: Cli) {
});
}
Command::Run { file } => {
compile_in_session(render_config, root, file.clone(), &mut |session| {
let res = compile_in_session(render_config, root, file.clone(), &mut |session| {
let book = driver::erase_book(
session,
&PathBuf::from(file.clone()),
@ -203,22 +198,14 @@ pub fn run_cli(config: Cli) {
)?;
driver::check_main_entry(session, &book)?;
Some(driver::compile_book_to_hvm(book))
})
.map(|res| {
println!("{}", driver::execute_file(&res));
res
});
}
Command::Eval { file } => {
compile_in_session(render_config, root, file.clone(), &mut |session| {
let book = driver::desugar_book(session, &PathBuf::from(file.clone()))?;
driver::check_main_entry(session, &book)?;
Some(book)
})
.map(|res| {
println!("{}", driver::eval_in_checker(&res));
res
});
if let Some(res) = res {
match driver::execute_file(res) {
Ok(res) => println!("{}", res),
Err(err) => println!("{}", err),
}
}
}
Command::Show { file } => {
compile_in_session(render_config, root, file.clone(), &mut |session| {
@ -256,8 +243,16 @@ pub fn run_cli(config: Cli) {
res
});
}
Command::Watch { file } => {
watch_session(root, PathBuf::from(file.clone()), render_config)
Command::Eval { file } => {
compile_in_session(render_config, root, file.clone(), &mut |session| {
let book = driver::desugar_book(session, &PathBuf::from(file.clone()))?;
driver::check_main_entry(session, &book)?;
Some(book)
})
.map(|res| println!("{}", driver::eval_in_checker(&res)));
}
Command::Watch { file: _ } => {
todo!()
}
Command::Repl => {
todo!()

View File

@ -1,86 +0,0 @@
use core::fmt;
use std::{io, path::PathBuf, collections::HashMap};
use kind_query::{SessionHandler, Storage, Session};
use kind_report::{report::{FileCache, Report}, RenderConfig, data::Diagnostic};
struct ToWriteFmt<T>(pub T);
impl<T> fmt::Write for ToWriteFmt<T>
where
T: io::Write,
{
fn write_str(&mut self, s: &str) -> fmt::Result {
self.0.write_all(s.as_bytes()).map_err(|_| fmt::Error)
}
}
pub fn render_to_stderr<T, E>(render_config: &RenderConfig, session: &T, err: &E)
where
T: FileCache,
E: Report,
{
Report::render(
err,
session,
render_config,
&mut ToWriteFmt(std::io::stderr()),
)
.unwrap();
}
pub struct WatchServer<'a> {
config: RenderConfig<'static>,
mapper: &'a mut HashMap<PathBuf, usize>,
inverse: &'a mut HashMap<usize, PathBuf>,
}
impl<'a> SessionHandler for WatchServer<'a> {
fn on_errors(&mut self, storage: &Storage, _uri: usize, errs: Vec<Box<dyn Diagnostic>>) {
for err in errs {
render_to_stderr(&self.config, storage, &err)
}
}
fn on_add_file(&mut self, file: PathBuf, id: usize) {
println!("File added {:?} ~ {:?}", file.clone(), id);
self.mapper.insert(file.clone(), id);
self.inverse.insert(id, file);
}
fn on_rem_file(&mut self, id: usize) {
println!("File remove {:?}", id);
if let Some(res) = self.inverse.remove(&id) {
self.mapper.remove(&res);
}
}
fn get_id_by_path(&mut self, path: &PathBuf) -> Option<usize> {
self.mapper.get(path).cloned()
}
}
pub fn watch_session(root: PathBuf, path: PathBuf, config: RenderConfig<'static>) {
let mut mapper = Default::default();
let mut inverse = Default::default();
let mut handler = WatchServer { config, mapper: &mut mapper, inverse: &mut inverse };
let mut storage = Storage::default();
let mut session = Session::new(&mut handler, &mut storage, root);
let main = session.init_project(path);
session.check_module(main);
let id = session.get_id_by_path(&PathBuf::from("./D.kind2")).unwrap();
session.remove_node(id);
for (place, node) in &session.storage.graph.nodes {
println!("{} = {:?}", place, node)
}
session.check_module(main);
}

View File

@ -75,7 +75,8 @@ fn test_eval() -> Result<(), Error> {
let root = PathBuf::from(".");
let mut session = Session::new(root, rx);
let check = driver::erase_book(&mut session, &PathBuf::from(path), &["Main".to_string()]).map(driver::compile_book_to_hvm);
let check = driver::erase_book(&mut session, &PathBuf::from(path), &["Main".to_string()])
.map(driver::compile_book_to_hvm);
let diagnostics = tx.try_iter().collect::<Vec<_>>();
let render = RenderConfig::ascii(2);
@ -83,7 +84,9 @@ fn test_eval() -> Result<(), Error> {
kind_report::check_if_colors_are_supported(true);
match check {
Some(file) if diagnostics.is_empty() => driver::execute_file(&file).to_string(),
Some(file) if diagnostics.is_empty() => {
driver::execute_file(file).map_or_else(|e| e, |f| f)
}
_ => {
let mut res_string = String::new();

View File

@ -1,3 +1,4 @@
#derive[open]
record User {
constructor new
name : U60

View File

@ -1,12 +1,12 @@
ERROR The case is not covering all the values inside of it!
/--[tests/suite/checker/derive/fail/IncompleteCase.kind2:11:9]
/--[tests/suite/checker/derive/fail/IncompleteCase.kind2:12:9]
|
10 | let User.new (ttt = e) e .. = User.new 2 4 1
11 | let User.new (ttt = f) name = User.new 6 7 3
11 | let User.new (ttt = e) e .. = User.new 2 4 1
12 | let User.new (ttt = f) name = User.new 6 7 3
| v-------
| \ This is the incomplete case
12 | e
| \This is the incomplete case
13 | e
Hint: Need variables for 'e'

View File

@ -1,3 +1,4 @@
#derive[open]
record User {
constructor new
name : U60

View File

@ -1,23 +1,16 @@
ERROR Repeated named variable
ERROR Cannot find the definition 'User.new.open'.
/--[tests/suite/checker/derive/fail/Repeated.kind2:11:19]
|
10 | let User.new (ttt = e) e .. = User.new 2 4 1
11 | let User.new (ttt = f) ttt = User.new 6 7 3
| v-- v--
| | \ Second occurence
| \ First occurence
12 | e
ERROR The case is not covering all the values inside of it!
/--[tests/suite/checker/derive/fail/Repeated.kind2:11:9]
/--[tests/suite/checker/derive/fail/Repeated.kind2:10:9]
|
9 | Main =
10 | let User.new (ttt = e) e .. = User.new 2 4 1
| v-------
| \Here!
:
11 | let User.new (ttt = f) ttt = User.new 6 7 3
| v-------
| \ This is the incomplete case
| \Here!
12 | e
Hint: Need variables for 'e', 'name'
Hint: Maybe you're looking for 'User.new'

View File

@ -5,12 +5,12 @@
1 | type Nat {
2 | zero
| v---
| \ The first ocorrence
| \The first ocorrence
3 | succ
:
6 | Nat.zero : U60
| v-------
| \ Second occorrence here!
| \Second occorrence here!
Hint: Rename one of the definitions or remove and look at how names work in Kind at https://kind.kindelia.org/hints/names

View File

@ -1,3 +1,4 @@
#derive[open]
record User {
constructor new
name : U60

View File

@ -0,0 +1,33 @@
use kind_report::data::{Color, Diagnostic, DiagnosticFrame, Marker, Severity};
use kind_span::Range;
pub(crate) enum DeriveError {
CannotUseNamedVariable(Range),
}
impl Diagnostic for DeriveError {
fn get_syntax_ctx(&self) -> Option<kind_span::SyntaxCtxIndex> {
match self {
DeriveError::CannotUseNamedVariable(range) => Some(range.ctx),
}
}
fn to_diagnostic_frame(&self) -> DiagnosticFrame {
match self {
DeriveError::CannotUseNamedVariable(range) => DiagnosticFrame {
code: 103,
severity: Severity::Error,
title: format!("Cannot use named variable on match derivations"),
subtitles: vec![],
hints: vec![],
positions: vec![Marker {
position: range.clone(),
color: Color::Fst,
text: "Here!".to_string(),
no_code: false,
main: true,
}],
},
}
}
}

View File

@ -1,5 +1,6 @@
//! Utility to derive functions from their definitions.
pub mod errors;
pub mod matching;
pub mod open;
pub mod subst;

View File

@ -2,6 +2,7 @@
//! eliminator out of a sum type declaration.
use fxhash::FxHashMap;
use kind_report::data::Diagnostic;
use kind_span::Range;
use kind_tree::concrete::expr::Expr;
@ -10,41 +11,52 @@ use kind_tree::concrete::*;
use kind_tree::concrete::{self};
use kind_tree::symbol::{Ident, QualifiedIdent};
use crate::subst::{substitute_in_expr};
use crate::errors::DeriveError;
use crate::subst::substitute_in_expr;
/// Derives an eliminator from a sum type declaration.
pub fn derive_match(range: Range, sum: &SumTypeDecl) -> concrete::Entry {
pub fn derive_match(
range: Range,
sum: &SumTypeDecl,
) -> (concrete::Entry, Vec<Box<dyn Diagnostic>>) {
let mut errs: Vec<Box<dyn Diagnostic>> = Vec::new();
let mk_var = |name: Ident| -> Box<Expr> {
Box::new(Expr {
data: ExprKind::Var(name),
data: ExprKind::Var { name },
range,
})
};
let mk_cons = |name: QualifiedIdent, spine: Vec<Binding>| -> Box<Expr> {
let mk_cons = |name: QualifiedIdent, args: Vec<Binding>| -> Box<Expr> {
Box::new(Expr {
data: ExprKind::Constr(name, spine),
data: ExprKind::Constr { name, args },
range,
})
};
let mk_app = |left: Box<Expr>, right: Vec<AppBinding>, range: Range| -> Box<Expr> {
let mk_app = |fun: Box<Expr>, args: Vec<AppBinding>, range: Range| -> Box<Expr> {
Box::new(Expr {
data: ExprKind::App(left, right),
data: ExprKind::App { fun, args },
range,
})
};
let mk_pi = |name: Ident, left: Box<Expr>, right: Box<Expr>| -> Box<Expr> {
let mk_pi = |name: Ident, typ: Box<Expr>, body: Box<Expr>| -> Box<Expr> {
Box::new(Expr {
data: ExprKind::All(Some(name), left, right),
data: ExprKind::All {
param: Some(name),
typ,
body,
erased: false,
},
range,
})
};
let mk_typ = || -> Box<Expr> {
Box::new(Expr {
data: ExprKind::Lit(Literal::Type),
data: ExprKind::Lit { lit: Literal::Type },
range,
})
};
@ -142,13 +154,19 @@ pub fn derive_match(range: Range, sum: &SumTypeDecl) -> concrete::Entry {
);
let mut indices_of_cons = match cons.typ.clone().map(|x| x.data) {
Some(ExprKind::Constr(_, spine)) => spine[sum.parameters.len()..]
.iter()
.map(|x| match x {
Binding::Positional(expr) => AppBinding::explicit(expr.clone()),
Binding::Named(_, _, _) => todo!("Incomplete feature: Need to reorder"),
})
.collect(),
Some(ExprKind::Constr { name: _, args }) => {
let mut new_args = Vec::with_capacity(args.len());
for arg in &args[sum.parameters.len()..].to_vec() {
new_args.push(match arg {
Binding::Positional(expr) => AppBinding::explicit(expr.clone()),
Binding::Named(range, _, expr) => {
errs.push(Box::new(DeriveError::CannotUseNamedVariable(range.clone())));
AppBinding::explicit(expr.clone())
}
});
}
new_args
}
_ => [indice_names.as_slice()].concat(),
};
@ -179,6 +197,22 @@ pub fn derive_match(range: Range, sum: &SumTypeDecl) -> concrete::Entry {
});
}
if !errs.is_empty() {
let entry = Entry {
name,
docs: Vec::new(),
args: types,
typ: Box::new(Expr {
data: ExprKind::Hole,
range,
}),
rules: vec![],
range,
attrs: Vec::new(),
};
return (entry, errs);
}
let mut res: Vec<AppBinding> = [indice_names.as_slice()].concat();
res.push(AppBinding::explicit(mk_var(Ident::generate("scrutinizer"))));
let ret_ty = mk_app(mk_var(motive_ident.clone()), res, range);
@ -198,7 +232,7 @@ pub fn derive_match(range: Range, sum: &SumTypeDecl) -> concrete::Entry {
match &cons.typ {
Some(expr) => match &**expr {
Expr {
data: ExprKind::Constr(_, sp),
data: ExprKind::Constr { args, .. },
..
} => {
irrelev = cons.args.map(|x| x.erased).to_vec();
@ -211,7 +245,7 @@ pub fn derive_match(range: Range, sum: &SumTypeDecl) -> concrete::Entry {
.args
.map(|x| x.name.with_name(|f| format!("{}_", f)))
.to_vec();
args_indices = sp
args_indices = args
.iter()
.map(|x| match x {
Binding::Positional(expr) => AppBinding {
@ -297,22 +331,22 @@ pub fn derive_match(range: Range, sum: &SumTypeDecl) -> concrete::Entry {
args.push(AppBinding {
data: Box::new(Expr {
data: ExprKind::Constr(
cons_ident.clone(),
spine_params
data: ExprKind::Constr {
name: cons_ident.clone(),
args: spine_params
.iter()
.cloned()
.map(|x| Binding::Positional(mk_var(x)))
.collect(),
),
},
range,
}),
erased: false,
});
let body = Box::new(Expr {
data: ExprKind::Ann(
mk_app(
data: ExprKind::Ann {
val: mk_app(
mk_var(cons.name.clone()),
spine
.iter()
@ -324,8 +358,8 @@ pub fn derive_match(range: Range, sum: &SumTypeDecl) -> concrete::Entry {
.collect(),
cons.name.range,
),
mk_app(mk_var(motive_ident.clone()), args, range),
),
typ: mk_app(mk_var(motive_ident.clone()), args, range),
},
range,
});
@ -350,5 +384,5 @@ pub fn derive_match(range: Range, sum: &SumTypeDecl) -> concrete::Entry {
attrs: Vec::new(),
};
entry
(entry, errs)
}

View File

@ -11,28 +11,33 @@ use kind_tree::symbol::{Ident, QualifiedIdent};
pub fn derive_open(range: Range, rec: &RecordDecl) -> concrete::Entry {
let mk_var = |name: Ident| -> Box<Expr> {
Box::new(Expr {
data: ExprKind::Var(name),
data: ExprKind::Var { name },
range,
})
};
let mk_cons = |name: QualifiedIdent, spine: Vec<Binding>| -> Box<Expr> {
let mk_cons = |name: QualifiedIdent, args: Vec<Binding>| -> Box<Expr> {
Box::new(Expr {
data: ExprKind::Constr(name, spine),
data: ExprKind::Constr { name, args },
range,
})
};
let mk_app = |left: Box<Expr>, right: Vec<AppBinding>| -> Box<Expr> {
let mk_app = |fun: Box<Expr>, args: Vec<AppBinding>| -> Box<Expr> {
Box::new(Expr {
data: ExprKind::App(left, right),
data: ExprKind::App { fun, args },
range,
})
};
let mk_pi = |name: Ident, left: Box<Expr>, right: Box<Expr>| -> Box<Expr> {
let mk_pi = |name: Ident, typ: Box<Expr>, body: Box<Expr>, erased: bool| -> Box<Expr> {
Box::new(Expr {
data: ExprKind::All(Some(name), left, right),
data: ExprKind::All {
param: Some(name),
typ,
body,
erased,
},
range,
})
};
@ -71,7 +76,7 @@ pub fn derive_open(range: Range, rec: &RecordDecl) -> concrete::Entry {
let cons_tipo = mk_var(Ident::generate("res_"));
let cons_type = rec.fields.iter().rfold(cons_tipo, |out, (name, _, typ)| {
mk_pi(name.clone(), typ.clone(), out)
mk_pi(name.clone(), typ.clone(), out, false)
});
// Sccrutinzies

View File

@ -1,5 +1,8 @@
use fxhash::FxHashMap;
use kind_tree::{concrete::{visitor::Visitor, expr::Expr}, symbol::Symbol};
use kind_tree::{
concrete::{expr::Expr, visitor::Visitor},
symbol::Symbol,
};
pub struct Subst<'a> {
pub names: &'a FxHashMap<String, String>,
@ -14,8 +17,6 @@ impl<'a> Visitor for Subst<'a> {
}
pub fn substitute_in_expr(expr: &mut Expr, names: &FxHashMap<String, String>) {
let mut session = Subst {
names,
};
let mut session = Subst { names };
session.visit_expr(expr)
}

View File

@ -14,7 +14,8 @@ kind-checker = { path = "../kind-checker" }
kind-pass = { path = "../kind-pass" }
kind-target-hvm = { path = "../kind-target-hvm" }
hvm = "0.1.81"
hvm = { path = "../../../HVM/crates/hvm" }
strsim = "0.10.0"
fxhash = "0.2.1"
dashmap = "5.4.0"

View File

@ -6,7 +6,7 @@ use kind_span::SyntaxCtxIndex;
use kind_tree::{backend, concrete, desugared};
use session::Session;
use std::{path::PathBuf};
use std::path::PathBuf;
use kind_checker as checker;
@ -25,7 +25,7 @@ pub fn type_check_book(session: &mut Session, path: &PathBuf) -> Option<desugare
let concrete_book = to_book(session, path)?;
let desugared_book = desugar::desugar_book(session.diagnostic_sender.clone(), &concrete_book)?;
let all = desugared_book.names.keys().cloned().collect();
let all = desugared_book.entrs.iter().map(|x| x.0).cloned().collect();
let succeeded = checker::type_check(&desugared_book, session.diagnostic_sender.clone(), all);
@ -34,7 +34,7 @@ pub fn type_check_book(session: &mut Session, path: &PathBuf) -> Option<desugare
}
let erased = erasure::erase_book(
&desugared_book,
desugared_book,
session.diagnostic_sender.clone(),
FxHashSet::from_iter(vec!["Main".to_string()]),
)?;
@ -64,7 +64,7 @@ pub fn erase_book(
let concrete_book = to_book(session, path)?;
let desugared_book = desugar::desugar_book(session.diagnostic_sender.clone(), &concrete_book)?;
erasure::erase_book(
&desugared_book,
desugared_book,
session.diagnostic_sender.clone(),
FxHashSet::from_iter(entrypoint.to_owned()),
)
@ -79,7 +79,7 @@ pub fn check_erasure_book(session: &mut Session, path: &PathBuf) -> Option<desug
let concrete_book = to_book(session, path)?;
let desugared_book = desugar::desugar_book(session.diagnostic_sender.clone(), &concrete_book)?;
erasure::erase_book(
&desugared_book,
desugared_book.clone(),
session.diagnostic_sender.clone(),
FxHashSet::from_iter(vec!["Main".to_string()]),
)?;
@ -102,13 +102,9 @@ pub fn check_main_entry(session: &mut Session, book: &desugared::Book) -> Option
}
}
pub fn execute_file(file: &backend::File) -> Box<backend::Term> {
// TODO: Change to from_file when hvm support it
let mut runtime = hvm::Runtime::from_code(&file.to_string()).unwrap();
let main = runtime.alloc_code("Main").unwrap();
runtime.run_io(main);
runtime.normalize(main);
runtime.readback(main)
pub fn execute_file(file: backend::File) -> Result<String, String> {
let res = hvm::eval_main_default(file)?;
Ok(res.0)
}
pub fn eval_in_checker(book: &desugared::Book) -> Box<backend::Term> {

View File

@ -117,11 +117,12 @@ fn module_to_book<'a>(
.insert(cons_ident.to_string(), cons.extract_book_info(&sum));
}
}
if try_to_insert_new_name(failed, session, sum.name.clone(), book) {
book.count
.insert(sum.name.to_string(), sum.extract_book_info());
book.entries.insert(sum.name.to_string(), TopLevel::SumType(sum));
book.entries
.insert(sum.name.to_string(), TopLevel::SumType(sum));
}
}
TopLevel::RecordType(rec) => {
@ -129,7 +130,7 @@ fn module_to_book<'a>(
book.count
.insert(rec.name.to_string(), rec.extract_book_info());
try_to_insert_new_name(failed, session, rec.name.clone(), book);
let cons_ident = rec.name.add_segment(rec.constructor.to_str());
public_names.insert(cons_ident.to_string());
book.count.insert(
@ -138,14 +139,16 @@ fn module_to_book<'a>(
);
try_to_insert_new_name(failed, session, cons_ident, book);
book.entries.insert(rec.name.to_string(), TopLevel::RecordType(rec));
book.entries
.insert(rec.name.to_string(), TopLevel::RecordType(rec));
}
TopLevel::Entry(entr) => {
try_to_insert_new_name(failed, session, entr.name.clone(), book);
public_names.insert(entr.name.to_string());
book.count
.insert(entr.name.to_string(), entr.extract_book_info());
book.entries.insert(entr.name.to_string(), TopLevel::Entry(entr));
book.entries
.insert(entr.name.to_string(), TopLevel::Entry(entr));
}
}
}
@ -221,14 +224,14 @@ fn parse_and_store_book_by_path<'a>(
failed = true;
}
for idents in state.unbound_top_level.values() {
failed |= parse_and_store_book_by_identifier(session, &idents.iter().nth(0).unwrap(), book);
}
expand_uses(&mut module, session.diagnostic_sender.clone());
module_to_book(&mut failed, session, module, book);
for idents in state.unbound_top_level.values() {
failed |= parse_and_store_book_by_identifier(session, &idents.iter().nth(0).unwrap(), book);
}
failed
}
@ -241,7 +244,7 @@ fn unbound_variable(session: &mut Session, book: &Book, idents: &[Ident]) {
.collect::<Vec<_>>();
similar_names.sort_by(|x, y| x.0.total_cmp(&y.0));
session
.diagnostic_sender
.send(Box::new(DriverError::UnboundVariable(
@ -267,11 +270,7 @@ pub fn check_unbound_top_level(session: &mut Session, book: &mut Book) -> bool {
unbound::get_book_unbound(session.diagnostic_sender.clone(), book, true);
for (_, unbound) in unbound_tops {
let res: Vec<Ident> = unbound
.iter()
.filter(|x| !x.used_by_sugar)
.map(|x| x.to_ident())
.collect();
let res: Vec<Ident> = unbound.iter().map(|x| x.to_ident()).collect();
if !res.is_empty() {
unbound_variable(session, &book, &res);
failed = true;

View File

@ -16,7 +16,7 @@ pub enum EncodeSequence {
}
#[derive(Debug, Clone)]
pub enum SyntaxError {
pub enum SyntaxDiagnostic {
UnfinishedString(Range),
UnfinishedChar(Range),
UnfinishedComment(Range),
@ -44,31 +44,30 @@ fn encode_name(encode: EncodeSequence) -> &'static str {
}
}
impl Diagnostic for SyntaxError {
impl Diagnostic for SyntaxDiagnostic {
fn get_syntax_ctx(&self) -> Option<SyntaxCtxIndex> {
match self {
SyntaxError::UnfinishedString(range) => Some(range.ctx),
SyntaxError::UnfinishedChar(range) => Some(range.ctx),
SyntaxError::UnfinishedComment(range) => Some(range.ctx),
SyntaxError::InvalidEscapeSequence(_, range) => Some(range.ctx),
SyntaxError::InvalidNumberRepresentation(_, range) => Some(range.ctx),
SyntaxError::UnexpectedChar(_, range) => Some(range.ctx),
SyntaxError::UnexpectedToken(_,range, _) => Some(range.ctx),
SyntaxError::LowerCasedDefinition(_, range) => Some(range.ctx),
SyntaxError::NotAClauseOfDef(range, _) => Some(range.ctx),
SyntaxError::Unclosed(range) => Some(range.ctx),
SyntaxError::IgnoreRestShouldBeOnTheEnd(range) => Some(range.ctx),
SyntaxError::UnusedDocString(range) => Some(range.ctx),
SyntaxError::CannotUseUse(range) => Some(range.ctx),
SyntaxError::ImportsCannotHaveAlias(range) => Some(range.ctx),
SyntaxError::InvalidNumberType(_, range) => Some(range.ctx),
SyntaxDiagnostic::UnfinishedString(range) => Some(range.ctx),
SyntaxDiagnostic::UnfinishedChar(range) => Some(range.ctx),
SyntaxDiagnostic::UnfinishedComment(range) => Some(range.ctx),
SyntaxDiagnostic::InvalidEscapeSequence(_, range) => Some(range.ctx),
SyntaxDiagnostic::InvalidNumberRepresentation(_, range) => Some(range.ctx),
SyntaxDiagnostic::UnexpectedChar(_, range) => Some(range.ctx),
SyntaxDiagnostic::UnexpectedToken(_, range, _) => Some(range.ctx),
SyntaxDiagnostic::LowerCasedDefinition(_, range) => Some(range.ctx),
SyntaxDiagnostic::NotAClauseOfDef(range, _) => Some(range.ctx),
SyntaxDiagnostic::Unclosed(range) => Some(range.ctx),
SyntaxDiagnostic::IgnoreRestShouldBeOnTheEnd(range) => Some(range.ctx),
SyntaxDiagnostic::UnusedDocString(range) => Some(range.ctx),
SyntaxDiagnostic::CannotUseUse(range) => Some(range.ctx),
SyntaxDiagnostic::ImportsCannotHaveAlias(range) => Some(range.ctx),
SyntaxDiagnostic::InvalidNumberType(_, range) => Some(range.ctx),
}
}
fn to_diagnostic_frame(&self) -> DiagnosticFrame {
match self {
SyntaxError::UnfinishedString(range) => DiagnosticFrame {
SyntaxDiagnostic::UnfinishedString(range) => DiagnosticFrame {
code: 1,
severity: Severity::Error,
title: "Unfinished String".to_string(),
@ -82,7 +81,7 @@ impl Diagnostic for SyntaxError {
main: true,
}],
},
SyntaxError::IgnoreRestShouldBeOnTheEnd(range) => DiagnosticFrame {
SyntaxDiagnostic::IgnoreRestShouldBeOnTheEnd(range) => DiagnosticFrame {
code: 2,
severity: Severity::Error,
title: "Invalid position of the '..' operator".to_string(),
@ -96,7 +95,7 @@ impl Diagnostic for SyntaxError {
main: true,
}],
},
SyntaxError::UnusedDocString(range) => DiagnosticFrame {
SyntaxDiagnostic::UnusedDocString(range) => DiagnosticFrame {
code: 3,
severity: Severity::Warning,
title: "This entire documentation comment is in a invalid position".to_string(),
@ -110,7 +109,7 @@ impl Diagnostic for SyntaxError {
main: true,
}],
},
SyntaxError::UnfinishedChar(range) => DiagnosticFrame {
SyntaxDiagnostic::UnfinishedChar(range) => DiagnosticFrame {
code: 4,
severity: Severity::Error,
title: "Unfinished Char".to_string(),
@ -124,7 +123,7 @@ impl Diagnostic for SyntaxError {
main: true,
}],
},
SyntaxError::LowerCasedDefinition(name, range) => DiagnosticFrame {
SyntaxDiagnostic::LowerCasedDefinition(name, range) => DiagnosticFrame {
code: 5,
severity: Severity::Error,
title: "The definition name must be capitalized.".to_string(),
@ -142,7 +141,7 @@ impl Diagnostic for SyntaxError {
main: true,
}],
},
SyntaxError::NotAClauseOfDef(fst, snd) => DiagnosticFrame {
SyntaxDiagnostic::NotAClauseOfDef(fst, snd) => DiagnosticFrame {
code: 6,
severity: Severity::Error,
title: "Unexpected capitalized name that does not refer to the definition".to_string(),
@ -165,7 +164,7 @@ impl Diagnostic for SyntaxError {
},
],
},
SyntaxError::UnfinishedComment(range) => DiagnosticFrame {
SyntaxDiagnostic::UnfinishedComment(range) => DiagnosticFrame {
code: 7,
severity: Severity::Error,
title: "Unfinished Comment".to_string(),
@ -179,7 +178,7 @@ impl Diagnostic for SyntaxError {
main: true,
}],
},
SyntaxError::InvalidEscapeSequence(kind, range) => DiagnosticFrame {
SyntaxDiagnostic::InvalidEscapeSequence(kind, range) => DiagnosticFrame {
code: 8,
severity: Severity::Error,
title: format!("The {} character sequence is invalid!", encode_name(kind.clone())),
@ -193,7 +192,7 @@ impl Diagnostic for SyntaxError {
main: true,
}],
},
SyntaxError::InvalidNumberRepresentation(repr, range) => DiagnosticFrame {
SyntaxDiagnostic::InvalidNumberRepresentation(repr, range) => DiagnosticFrame {
code: 9,
severity: Severity::Error,
title: format!("The {} number sequence is invalid!", encode_name(repr.clone())),
@ -207,7 +206,7 @@ impl Diagnostic for SyntaxError {
main: true,
}],
},
SyntaxError::UnexpectedChar(chr, range) => DiagnosticFrame {
SyntaxDiagnostic::UnexpectedChar(chr, range) => DiagnosticFrame {
code: 10,
severity: Severity::Error,
title: format!("The char '{}' is invalid", chr),
@ -221,7 +220,7 @@ impl Diagnostic for SyntaxError {
main: true,
}],
},
SyntaxError::UnexpectedToken(Token::Eof, range, _expect) => DiagnosticFrame {
SyntaxDiagnostic::UnexpectedToken(Token::Eof, range, _expect) => DiagnosticFrame {
code: 11,
severity: Severity::Error,
title: "Unexpected end of file.".to_string(),
@ -235,7 +234,7 @@ impl Diagnostic for SyntaxError {
main: true,
}],
},
SyntaxError::UnexpectedToken(Token::Comment(_, _), range, _expect) => DiagnosticFrame {
SyntaxDiagnostic::UnexpectedToken(Token::Comment(_, _), range, _expect) => DiagnosticFrame {
code: 12,
severity: Severity::Error,
title: "Unexpected documentation comment.".to_string(),
@ -249,7 +248,7 @@ impl Diagnostic for SyntaxError {
main: true,
}],
},
SyntaxError::UnexpectedToken(token, range, _expect) => DiagnosticFrame {
SyntaxDiagnostic::UnexpectedToken(token, range, _expect) => DiagnosticFrame {
code: 13,
severity: Severity::Error,
title: format!("Unexpected token '{}'.", token),
@ -263,7 +262,7 @@ impl Diagnostic for SyntaxError {
main: true,
}],
},
SyntaxError::Unclosed(range) => DiagnosticFrame {
SyntaxDiagnostic::Unclosed(range) => DiagnosticFrame {
code: 14,
severity: Severity::Error,
title: "Unclosed parenthesis.".to_string(),
@ -277,7 +276,7 @@ impl Diagnostic for SyntaxError {
main: true,
}],
},
SyntaxError::CannotUseUse(range) => DiagnosticFrame {
SyntaxDiagnostic::CannotUseUse(range) => DiagnosticFrame {
code: 15,
severity: Severity::Error,
title: "Can only use the 'use' statement in the beggining of the file".to_string(),
@ -291,7 +290,7 @@ impl Diagnostic for SyntaxError {
main: true,
}],
},
SyntaxError::ImportsCannotHaveAlias(range) => DiagnosticFrame {
SyntaxDiagnostic::ImportsCannotHaveAlias(range) => DiagnosticFrame {
code: 16,
severity: Severity::Error,
title: "The upper cased name cannot have an alias".to_string(),
@ -305,7 +304,7 @@ impl Diagnostic for SyntaxError {
main: true,
}],
},
SyntaxError::InvalidNumberType(type_, range) => DiagnosticFrame {
SyntaxDiagnostic::InvalidNumberType(type_, range) => DiagnosticFrame {
code: 17,
severity: Severity::Error,
title: format!("The {} number type is invalid", type_),
@ -323,8 +322,8 @@ impl Diagnostic for SyntaxError {
}
}
impl From<Box<SyntaxError>> for DiagnosticFrame {
fn from(err: Box<SyntaxError>) -> Self {
impl From<Box<SyntaxDiagnostic>> for DiagnosticFrame {
fn from(err: Box<SyntaxDiagnostic>) -> Self {
(err).into()
}
}

View File

@ -3,7 +3,7 @@ use kind_tree::concrete::expr::*;
use kind_tree::symbol::{Ident, QualifiedIdent};
use kind_tree::{NumType, Number, Operator};
use crate::errors::SyntaxError;
use crate::errors::SyntaxDiagnostic;
use crate::lexer::tokens::Token;
use crate::macros::eat_single;
use crate::state::Parser;
@ -11,7 +11,7 @@ use crate::state::Parser;
impl<'a> Parser<'a> {
// We always look through the parenthesis in the
// matching with is_operator
pub fn is_operator(&self) -> bool {
fn is_operator(&self) -> bool {
matches!(
self.peek(1),
Token::Plus
@ -33,7 +33,7 @@ impl<'a> Parser<'a> {
)
}
pub fn eat_operator(&mut self) -> Result<Operator, SyntaxError> {
fn eat_operator(&mut self) -> Result<Operator, SyntaxDiagnostic> {
self.eat(|token| match token {
Token::Plus => Some(Operator::Add),
Token::Minus => Some(Operator::Sub),
@ -55,7 +55,7 @@ impl<'a> Parser<'a> {
})
}
pub fn ignore_docs(&mut self) {
fn ignore_docs(&mut self) {
let start = self.range();
let mut last = self.range();
let mut unused = false;
@ -65,39 +65,37 @@ impl<'a> Parser<'a> {
unused = true;
}
if unused {
self.errs
.send(Box::new(SyntaxError::UnusedDocString(start.mix(last))))
.unwrap()
self.send_dignostic(SyntaxDiagnostic::UnusedDocString(start.mix(last)))
}
}
pub fn is_pi_type(&self) -> bool {
fn is_pi_type(&self) -> bool {
self.get().same_variant(&Token::LPar)
&& self.peek(1).is_lower_id()
&& self.peek(2).same_variant(&Token::Colon)
}
pub fn is_named_parameter(&self) -> bool {
fn is_named_parameter(&self) -> bool {
self.get().same_variant(&Token::LPar)
&& self.peek(1).is_lower_id()
&& self.peek(2).same_variant(&Token::Eq)
}
pub fn is_lambda(&self) -> bool {
fn is_lambda(&self) -> bool {
self.get().is_lower_id() && self.peek(1).same_variant(&Token::FatArrow)
}
pub fn is_sigma_type(&self) -> bool {
fn is_sigma_type(&self) -> bool {
self.get().same_variant(&Token::LBracket)
&& self.peek(1).is_lower_id()
&& self.peek(2).same_variant(&Token::Colon)
}
pub fn is_substitution(&self) -> bool {
fn is_substitution(&self) -> bool {
self.get().same_variant(&Token::HashHash)
}
pub fn parse_substitution(&mut self) -> Result<Box<Expr>, SyntaxError> {
fn parse_substitution(&mut self) -> Result<Box<Expr>, SyntaxDiagnostic> {
let start = self.range();
self.advance(); // '##'
let name = self.parse_id()?;
@ -116,21 +114,21 @@ impl<'a> Parser<'a> {
}))
}
pub fn parse_id(&mut self) -> Result<Ident, SyntaxError> {
pub fn parse_id(&mut self) -> Result<Ident, SyntaxDiagnostic> {
let range = self.range();
let id = eat_single!(self, Token::LowerId(x) => x.clone())?;
let ident = Ident::new_static(&id, range);
Ok(ident)
}
pub fn parse_any_id(&mut self) -> Result<Ident, SyntaxError> {
pub fn parse_any_id(&mut self) -> Result<Ident, SyntaxDiagnostic> {
let range = self.range();
let id = eat_single!(self, Token::LowerId(x) | Token::UpperId(x, None) => x.clone())?;
let ident = Ident::new_static(&id, range);
Ok(ident)
}
pub fn parse_upper_id(&mut self) -> Result<QualifiedIdent, SyntaxError> {
pub fn parse_upper_id(&mut self) -> Result<QualifiedIdent, SyntaxDiagnostic> {
let range = self.range();
let (start, end) =
eat_single!(self, Token::UpperId(start, end) => (start.clone(), end.clone()))?;
@ -138,25 +136,30 @@ impl<'a> Parser<'a> {
Ok(ident)
}
fn parse_lambda(&mut self) -> Result<Box<Expr>, SyntaxError> {
fn parse_lambda(&mut self, erased: bool) -> Result<Box<Expr>, SyntaxDiagnostic> {
let name_span = self.range();
let ident = self.parse_id()?;
let param = self.parse_id()?;
self.advance(); // '=>'
let expr = self.parse_expr(false)?;
let end_range = expr.range;
let body = self.parse_expr(false)?;
let end_range = body.range;
Ok(Box::new(Expr {
data: ExprKind::Lambda(ident, None, expr, false),
data: ExprKind::Lambda {
param,
typ: None,
body,
erased,
},
range: name_span.mix(end_range),
}))
}
fn parse_pi_or_lambda(&mut self) -> Result<Box<Expr>, SyntaxError> {
fn parse_pi_or_lambda(&mut self, erased: bool) -> Result<Box<Expr>, SyntaxDiagnostic> {
let range = self.range();
self.advance(); // '('
let ident = self.parse_id()?;
let param = self.parse_id()?;
self.advance(); // ':'
let typ = self.parse_expr(false)?;
@ -167,62 +170,83 @@ impl<'a> Parser<'a> {
let body = self.parse_expr(false)?;
Ok(Box::new(Expr {
range: range.mix(body.range),
data: ExprKind::Lambda(ident, Some(typ), body, false),
data: ExprKind::Lambda {
param,
typ: Some(typ),
body,
erased,
},
}))
} else if self.check_and_eat(Token::RightArrow) {
let body = self.parse_expr(false)?;
Ok(Box::new(Expr {
range: range.mix(body.range),
data: ExprKind::All(Some(ident), typ, body),
data: ExprKind::All {
param: Some(param),
typ,
body,
erased,
},
}))
} else {
Ok(Box::new(Expr {
range: range.mix(typ.range),
data: ExprKind::Ann(
Box::new(Expr {
data: ExprKind::Ann {
val: Box::new(Expr {
range: range.mix(par_range),
data: ExprKind::Var(ident),
data: ExprKind::Var { name: param },
}),
typ,
),
},
}))
}
}
fn parse_sigma_type(&mut self) -> Result<Box<Expr>, SyntaxError> {
fn parse_sigma_type(&mut self) -> Result<Box<Expr>, SyntaxDiagnostic> {
let range = self.range();
self.advance(); // '['
let ident = self.parse_id()?;
self.advance(); // ':'
let typ = self.parse_expr(false)?;
let fst = self.parse_expr(false)?;
self.eat_closing_keyword(Token::RBracket, range)?;
self.eat_variant(Token::RightArrow)?;
let body = self.parse_expr(false)?;
let snd = self.parse_expr(false)?;
Ok(Box::new(Expr {
range: range.mix(body.locate()),
data: ExprKind::Sigma(Some(ident), typ, body),
range: range.mix(snd.locate()),
data: ExprKind::Sigma {
param: Some(ident),
fst,
snd,
},
}))
}
fn parse_var(&mut self) -> Result<Box<Expr>, SyntaxError> {
let id = self.parse_id()?;
fn parse_var(&mut self) -> Result<Box<Expr>, SyntaxDiagnostic> {
let name = self.parse_id()?;
Ok(Box::new(Expr {
range: id.range,
data: ExprKind::Var(id),
range: name.range,
data: ExprKind::Var { name },
}))
}
fn parse_single_upper(&mut self) -> Result<Box<Expr>, SyntaxError> {
fn parse_single_upper(&mut self) -> Result<Box<Expr>, SyntaxDiagnostic> {
let id = self.parse_upper_id()?;
let data = match id.to_string().as_str() {
"Type" => ExprKind::Lit(Literal::Type),
"U60" => ExprKind::Lit(Literal::NumType(NumType::U60)),
"U120" => ExprKind::Lit(Literal::NumType(NumType::U120)),
_ => ExprKind::Constr(id.clone(), vec![]),
"Type" => ExprKind::Lit { lit: Literal::Type },
"U60" => ExprKind::Lit {
lit: Literal::NumType(NumType::U60),
},
"U120" => ExprKind::Lit {
lit: Literal::NumType(NumType::U120),
},
_ => ExprKind::Constr {
name: id.clone(),
args: vec![],
},
};
Ok(Box::new(Expr {
range: id.range,
@ -230,50 +254,63 @@ impl<'a> Parser<'a> {
}))
}
fn parse_data(&mut self, multiline: bool) -> Result<Box<Expr>, SyntaxError> {
fn parse_data(&mut self, multiline: bool) -> Result<Box<Expr>, SyntaxDiagnostic> {
let id = self.parse_upper_id()?;
let mut range = id.range;
let data = match id.to_string().as_str() {
"Type" => ExprKind::Lit(Literal::Type),
"U60" => ExprKind::Lit(Literal::NumType(NumType::U60)),
"U120" => ExprKind::Lit(Literal::NumType(NumType::U120)),
"Type" => ExprKind::Lit { lit: Literal::Type },
"U60" => ExprKind::Lit {
lit: Literal::NumType(NumType::U60),
},
"U120" => ExprKind::Lit {
lit: Literal::NumType(NumType::U120),
},
_ => {
let (range_end, spine) = self.parse_call_tail(id.range, multiline)?;
range = range_end;
ExprKind::Constr(id, spine)
ExprKind::Constr {
name: id,
args: spine,
}
}
};
Ok(Box::new(Expr { range, data }))
}
fn parse_num60(&mut self, num: u64) -> Result<Box<Expr>, SyntaxError> {
fn parse_num60(&mut self, num: u64) -> Result<Box<Expr>, SyntaxDiagnostic> {
let range = self.range();
self.advance();
Ok(Box::new(Expr {
range,
data: ExprKind::Lit(Literal::Number(Number::U60(num))),
data: ExprKind::Lit {
lit: Literal::Number(Number::U60(num)),
},
}))
}
fn parse_num120(&mut self, num: u128) -> Result<Box<Expr>, SyntaxError> {
fn parse_num120(&mut self, num: u128) -> Result<Box<Expr>, SyntaxDiagnostic> {
let range = self.range();
self.advance();
Ok(Box::new(Expr {
range,
data: ExprKind::Lit(Literal::Number(Number::U120(num))),
data: ExprKind::Lit {
lit: Literal::Number(Number::U120(num)),
},
}))
}
fn parse_char(&mut self, chr: char) -> Result<Box<Expr>, SyntaxError> {
fn parse_char(&mut self, chr: char) -> Result<Box<Expr>, SyntaxDiagnostic> {
let range = self.range();
self.advance();
Ok(Box::new(Expr {
range,
data: ExprKind::Lit(Literal::Char(chr)),
data: ExprKind::Lit {
lit: Literal::Char(chr),
},
}))
}
fn parse_binary_op(&mut self) -> Result<Box<Expr>, SyntaxError> {
fn parse_binary_op(&mut self) -> Result<Box<Expr>, SyntaxDiagnostic> {
let range = self.range();
self.advance(); // '('
let op = self.eat_operator()?;
@ -285,24 +322,24 @@ impl<'a> Parser<'a> {
Ok(Box::new(Expr {
range: range.mix(end),
data: ExprKind::Binary(op, fst, snd),
data: ExprKind::Binary { op, fst, snd },
}))
}
fn parse_list(&mut self) -> Result<Box<Expr>, SyntaxError> {
fn parse_list(&mut self) -> Result<Box<Expr>, SyntaxDiagnostic> {
let range = self.range();
self.advance(); // '['
let mut vec = Vec::new();
let mut args = Vec::new();
if self.check_actual(Token::RBracket) {
let range = self.advance().1.mix(range);
return Ok(Box::new(Expr {
range,
data: ExprKind::List(vec),
data: ExprKind::List { args },
}));
}
vec.push(*self.parse_atom()?);
args.push(*self.parse_atom()?);
let mut initialized = false;
let mut with_comma = false;
@ -315,13 +352,13 @@ impl<'a> Parser<'a> {
if with_comma {
self.check_and_eat(Token::Comma);
match self.try_single(&|x| x.parse_expr(false))? {
Some(res) => vec.push(*res),
Some(res) => args.push(*res),
None => break,
}
} else {
// TODO: Error when someone tries to use a comma after not using it.
match self.try_single(&|x| x.parse_atom())? {
Some(res) => vec.push(*res),
Some(res) => args.push(*res),
None => break,
}
}
@ -332,11 +369,11 @@ impl<'a> Parser<'a> {
Ok(Box::new(Expr {
range,
data: ExprKind::List(vec),
data: ExprKind::List { args },
}))
}
fn parse_paren(&mut self) -> Result<Box<Expr>, SyntaxError> {
fn parse_paren(&mut self) -> Result<Box<Expr>, SyntaxDiagnostic> {
if self.is_operator() {
self.parse_binary_op()
} else {
@ -351,7 +388,7 @@ impl<'a> Parser<'a> {
self.eat_closing_keyword(Token::RPar, range)?;
Ok(Box::new(Expr {
data: ExprKind::Ann(expr, typ),
data: ExprKind::Ann { val: expr, typ },
range,
}))
} else {
@ -363,25 +400,29 @@ impl<'a> Parser<'a> {
}
}
pub fn parse_help(&mut self, str: String) -> Result<Box<Expr>, SyntaxError> {
fn parse_help(&mut self, str: String) -> Result<Box<Expr>, SyntaxDiagnostic> {
let range = self.range();
self.advance();
Ok(Box::new(Expr {
range,
data: ExprKind::Lit(Literal::Help(Ident::new(str, range))),
data: ExprKind::Lit {
lit: Literal::Help(Ident::new(str, range)),
},
}))
}
pub fn parse_str(&mut self, str: String) -> Result<Box<Expr>, SyntaxError> {
fn parse_str(&mut self, str: String) -> Result<Box<Expr>, SyntaxDiagnostic> {
let range = self.range();
self.advance();
Ok(Box::new(Expr {
range,
data: ExprKind::Lit(Literal::String(str)),
data: ExprKind::Lit {
lit: Literal::String(str),
},
}))
}
pub fn parse_num_lit(&mut self) -> Result<usize, SyntaxError> {
fn parse_num_lit(&mut self) -> Result<usize, SyntaxDiagnostic> {
self.ignore_docs();
match self.get().clone() {
Token::Num60(num) => {
@ -392,7 +433,7 @@ impl<'a> Parser<'a> {
}
}
pub fn parse_atom(&mut self) -> Result<Box<Expr>, SyntaxError> {
fn parse_atom(&mut self) -> Result<Box<Expr>, SyntaxDiagnostic> {
self.ignore_docs();
match self.get().clone() {
Token::UpperId(_, _) => self.parse_single_upper(),
@ -410,7 +451,7 @@ impl<'a> Parser<'a> {
}
}
fn parse_binding(&mut self) -> Result<Binding, SyntaxError> {
fn parse_binding(&mut self) -> Result<Binding, SyntaxDiagnostic> {
self.ignore_docs();
if self.is_named_parameter() {
let start = self.range();
@ -426,13 +467,12 @@ impl<'a> Parser<'a> {
}
}
fn parse_app_binding(&mut self) -> Result<AppBinding, SyntaxError> {
fn parse_app_binding(&mut self) -> Result<AppBinding, SyntaxDiagnostic> {
self.ignore_docs();
let (erased, data) = if self.get().same_variant(&Token::Minus) {
self.advance();
let (erased, data) = if self.check_and_eat(Token::Tilde) {
let start = self.range();
self.eat_variant(Token::LPar)?;
let atom = self.parse_atom()?;
let atom = self.parse_expr(true)?;
self.eat_closing_keyword(Token::RPar, start)?;
(true, atom)
} else {
@ -441,30 +481,30 @@ impl<'a> Parser<'a> {
Ok(AppBinding { data, erased })
}
fn parse_call(&mut self, multiline: bool) -> Result<Box<Expr>, SyntaxError> {
fn parse_call(&mut self, multiline: bool) -> Result<Box<Expr>, SyntaxDiagnostic> {
if self.get().is_upper_id() {
self.parse_data(multiline)
} else {
let head = self.parse_atom()?;
let start = head.range;
let fun = self.parse_atom()?;
let start = fun.range;
let mut spine = Vec::new();
let mut args = Vec::new();
let mut end = start;
while (!self.is_linebreak() || multiline) && !self.get().same_variant(&Token::Eof) {
if let Some(atom) = self.try_single(&|parser| parser.parse_app_binding())? {
end = atom.data.range;
spine.push(atom)
args.push(atom)
} else {
break;
}
}
if spine.is_empty() {
Ok(head)
if args.is_empty() {
Ok(fun)
} else {
Ok(Box::new(Expr {
data: ExprKind::App(head, spine),
data: ExprKind::App { fun, args },
range: start.mix(end),
}))
}
@ -475,7 +515,7 @@ impl<'a> Parser<'a> {
&mut self,
start: Range,
multiline: bool,
) -> Result<(Range, Vec<Binding>), SyntaxError> {
) -> Result<(Range, Vec<Binding>), SyntaxDiagnostic> {
let mut spine = Vec::new();
let mut end = start;
while (!self.is_linebreak() || multiline) && !self.get().same_variant(&Token::Eof) {
@ -489,20 +529,25 @@ impl<'a> Parser<'a> {
Ok((end, spine))
}
fn parse_arrow(&mut self, multiline: bool) -> Result<Box<Expr>, SyntaxError> {
let mut head = self.parse_call(multiline)?;
fn parse_arrow(&mut self, multiline: bool) -> Result<Box<Expr>, SyntaxDiagnostic> {
let mut typ = self.parse_call(multiline)?;
while self.check_and_eat(Token::RightArrow) {
let next = self.parse_expr(false)?;
let range = head.range.mix(next.range);
head = Box::new(Expr {
data: ExprKind::All(None, head, next),
let body = self.parse_expr(false)?;
let range = typ.range.mix(body.range);
typ = Box::new(Expr {
data: ExprKind::All {
param: None,
typ,
body,
erased: false,
},
range,
});
}
Ok(head)
Ok(typ)
}
pub fn parse_ask(&mut self) -> Result<Box<Sttm>, SyntaxError> {
fn parse_ask(&mut self) -> Result<Box<Sttm>, SyntaxDiagnostic> {
let start = self.range();
self.advance();
let name = self.parse_destruct()?;
@ -517,7 +562,7 @@ impl<'a> Parser<'a> {
}))
}
pub fn parse_destruct(&mut self) -> Result<Destruct, SyntaxError> {
fn parse_destruct(&mut self) -> Result<Destruct, SyntaxDiagnostic> {
if self.get().is_upper_id() {
let upper = self.parse_upper_id()?;
let (range, bindings, ignore_rest) = self.parse_pat_destruct_bindings()?;
@ -533,7 +578,7 @@ impl<'a> Parser<'a> {
}
}
pub fn parse_monadic_let(&mut self) -> Result<Box<Sttm>, SyntaxError> {
fn parse_monadic_let(&mut self) -> Result<Box<Sttm>, SyntaxDiagnostic> {
let start = self.range();
self.advance(); // 'let'
let destruct = self.parse_destruct()?;
@ -548,7 +593,7 @@ impl<'a> Parser<'a> {
}))
}
pub fn parse_return(&mut self) -> Result<Box<Sttm>, SyntaxError> {
fn parse_return(&mut self) -> Result<Box<Sttm>, SyntaxDiagnostic> {
let start = self.range();
self.advance(); // 'return'
let expr = self.parse_expr(false)?;
@ -560,7 +605,7 @@ impl<'a> Parser<'a> {
}))
}
pub fn parse_sttm(&mut self) -> Result<Box<Sttm>, SyntaxError> {
fn parse_sttm(&mut self) -> Result<Box<Sttm>, SyntaxDiagnostic> {
let start = self.range();
if self.check_actual_id("ask") {
self.parse_ask()
@ -588,7 +633,7 @@ impl<'a> Parser<'a> {
}
}
pub fn parse_do(&mut self) -> Result<Box<Expr>, SyntaxError> {
fn parse_do(&mut self) -> Result<Box<Expr>, SyntaxDiagnostic> {
let start = self.range();
self.advance(); // 'do'
let typ = self.parse_upper_id()?;
@ -596,14 +641,14 @@ impl<'a> Parser<'a> {
let sttm = self.parse_sttm()?;
let end = self.eat_variant(Token::RBrace)?.1;
Ok(Box::new(Expr {
data: ExprKind::Do(typ, sttm),
data: ExprKind::Do { typ, sttm },
range: start.mix(end),
}))
}
pub fn parse_pat_destruct_bindings(
fn parse_pat_destruct_bindings(
&mut self,
) -> Result<(Option<Range>, Vec<CaseBinding>, Option<Range>), SyntaxError> {
) -> Result<(Option<Range>, Vec<CaseBinding>, Option<Range>), SyntaxDiagnostic> {
let mut ignore_rest_range = None;
let mut bindings = Vec::new();
let mut range = None;
@ -633,13 +678,13 @@ impl<'a> Parser<'a> {
_ => break,
}
if let Some(range) = ignore_rest_range {
return Err(SyntaxError::IgnoreRestShouldBeOnTheEnd(range));
return Err(SyntaxDiagnostic::IgnoreRestShouldBeOnTheEnd(range));
}
}
Ok((range, bindings, ignore_rest_range))
}
pub fn parse_match(&mut self) -> Result<Box<Expr>, SyntaxError> {
fn parse_match(&mut self) -> Result<Box<Expr>, SyntaxDiagnostic> {
let start = self.range();
self.advance(); // 'match'
@ -689,34 +734,34 @@ impl<'a> Parser<'a> {
}))
}
pub fn parse_let(&mut self) -> Result<Box<Expr>, SyntaxError> {
fn parse_let(&mut self) -> Result<Box<Expr>, SyntaxDiagnostic> {
let start = self.range();
self.advance(); // 'let'
let name = self.parse_destruct()?;
self.eat_variant(Token::Eq)?;
let expr = self.parse_expr(false)?;
let val = self.parse_expr(false)?;
self.check_and_eat(Token::Semi);
let next = self.parse_expr(false)?;
let end = next.range;
Ok(Box::new(Expr {
data: ExprKind::Let(name, expr, next),
data: ExprKind::Let { name, val, next },
range: start.mix(end),
}))
}
fn parse_sigma_pair(&mut self) -> Result<Box<Expr>, SyntaxError> {
fn parse_sigma_pair(&mut self) -> Result<Box<Expr>, SyntaxDiagnostic> {
let start = self.range();
self.advance(); // '$'
let fst = self.parse_atom()?;
let snd = self.parse_atom()?;
let end = snd.range;
Ok(Box::new(Expr {
data: ExprKind::Pair(fst, snd),
data: ExprKind::Pair { fst, snd },
range: start.mix(end),
}))
}
fn parse_hole(&mut self) -> Result<Box<Expr>, SyntaxError> {
fn parse_hole(&mut self) -> Result<Box<Expr>, SyntaxDiagnostic> {
let start = self.range();
self.advance(); // '_'
Ok(Box::new(Expr {
@ -725,28 +770,39 @@ impl<'a> Parser<'a> {
}))
}
fn parse_if(&mut self) -> Result<Box<Expr>, SyntaxError> {
fn parse_if(&mut self) -> Result<Box<Expr>, SyntaxDiagnostic> {
let start = self.range();
self.advance(); // 'if'
let cond = self.parse_expr(false)?;
self.eat_variant(Token::LBrace)?;
let if_ = self.parse_expr(false)?;
let then_ = self.parse_expr(false)?;
self.eat_variant(Token::RBrace)?;
self.eat_id("else")?;
self.eat_variant(Token::LBrace)?;
let els_ = self.parse_expr(false)?;
let else_ = self.parse_expr(false)?;
let end = self.eat_variant(Token::RBrace)?.1;
let range = start.mix(end);
Ok(Box::new(Expr {
data: ExprKind::If(cond, if_, els_),
data: ExprKind::If { cond, then_, else_ },
range,
}))
}
fn parse_erased(&mut self) -> Result<Box<Expr>, SyntaxDiagnostic> {
self.advance(); // '~';
if self.is_lambda() {
self.parse_lambda(true)
} else if self.is_pi_type() {
self.parse_pi_or_lambda(true)
} else {
self.fail(vec![])
}
}
/// The infinite hell of else ifs. But it's the most readable way
/// to check if the queue of tokens match a pattern as we need
/// some looakhead tokens.
pub fn parse_expr(&mut self, multiline: bool) -> Result<Box<Expr>, SyntaxError> {
pub fn parse_expr(&mut self, multiline: bool) -> Result<Box<Expr>, SyntaxDiagnostic> {
self.ignore_docs();
if self.check_actual_id("do") {
self.parse_do()
@ -759,13 +815,15 @@ impl<'a> Parser<'a> {
} else if self.check_actual(Token::Dollar) {
self.parse_sigma_pair()
} else if self.is_lambda() {
self.parse_lambda()
self.parse_lambda(false)
} else if self.is_pi_type() {
self.parse_pi_or_lambda()
self.parse_pi_or_lambda(false)
} else if self.is_sigma_type() {
self.parse_sigma_type()
} else if self.is_substitution() {
self.parse_substitution()
} else if self.check_actual(Token::Tilde) {
self.parse_erased()
} else {
self.parse_arrow(multiline)
}

View File

@ -3,7 +3,7 @@
use kind_span::Range;
use crate::errors::SyntaxError;
use crate::errors::SyntaxDiagnostic;
use crate::lexer::tokens::Token;
use crate::Lexer;
@ -61,7 +61,7 @@ impl<'a> Lexer<'a> {
self.pos += size;
if self.comment_depth != 0 {
(
Token::Error(Box::new(SyntaxError::UnfinishedComment(
Token::Error(Box::new(SyntaxDiagnostic::UnfinishedComment(
self.mk_range(start),
))),
self.mk_range(start),

View File

@ -5,7 +5,7 @@
use kind_span::Range;
use crate::errors::{EncodeSequence, SyntaxError};
use crate::errors::{EncodeSequence, SyntaxDiagnostic};
use crate::lexer::tokens::Token;
use crate::Lexer;
@ -18,22 +18,22 @@ impl<'a> Lexer<'a> {
size: usize,
base: u32,
err: EncodeSequence,
) -> Result<char, SyntaxError> {
) -> Result<char, SyntaxDiagnostic> {
let string = self.next_chars(size);
let to_chr = string.and_then(|x| u32::from_str_radix(x, base).ok());
if let Some(chr) = to_chr.and_then(char::from_u32) {
return Ok(chr);
}
Err(SyntaxError::InvalidEscapeSequence(
Err(SyntaxDiagnostic::InvalidEscapeSequence(
err,
self.mk_range(start),
))
}
/// Turns a escaped char into a normal char.
fn lex_escaped_char(&mut self, start: usize) -> Result<char, SyntaxError> {
fn lex_escaped_char(&mut self, start: usize) -> Result<char, SyntaxDiagnostic> {
match self.peekable.peek() {
None => Err(SyntaxError::UnfinishedString(
None => Err(SyntaxDiagnostic::UnfinishedString(
self.mk_one_column_range(start),
)),
Some(&x) => {
@ -68,7 +68,7 @@ impl<'a> Lexer<'a> {
let type_start = self.span();
let make_num_err = |x: &Self| {
(
Token::Error(Box::new(SyntaxError::InvalidNumberRepresentation(
Token::Error(Box::new(SyntaxDiagnostic::InvalidNumberRepresentation(
err,
x.mk_range(num_start),
))),
@ -95,7 +95,7 @@ impl<'a> Lexer<'a> {
}
}
_ => (
Token::Error(Box::new(SyntaxError::InvalidNumberType(
Token::Error(Box::new(SyntaxDiagnostic::InvalidNumberType(
format!("u{}", type_),
self.mk_range(type_start),
))),
@ -144,7 +144,7 @@ impl<'a> Lexer<'a> {
}
}
pub fn lex_char(&mut self) -> Result<char, SyntaxError> {
pub fn lex_char(&mut self) -> Result<char, SyntaxDiagnostic> {
let start = self.span();
if let Some(&x) = self.peekable.peek() {
let chr_start = self.span();
@ -153,16 +153,16 @@ impl<'a> Lexer<'a> {
self.next_char();
match self.lex_escaped_char(chr_start) {
Ok(x) => Ok(x),
Err(t) => Err(t)
Err(t) => Err(t),
}
}
x => {
self.next_char();
Ok(x)
},
}
}
} else {
Err(SyntaxError::UnfinishedChar(self.mk_range(start)))
Err(SyntaxDiagnostic::UnfinishedChar(self.mk_range(start)))
}
}
/// Lexes a string that starts with '"' and ends with the
@ -203,7 +203,7 @@ impl<'a> Lexer<'a> {
(_, Some(err)) => err,
(Some('"'), _) => (Token::Str(string), self.mk_range(start)),
_ => (
Token::Error(Box::new(SyntaxError::UnfinishedString(
Token::Error(Box::new(SyntaxDiagnostic::UnfinishedString(
self.mk_one_column_range(start),
))),
self.mk_range(start),

View File

@ -11,7 +11,7 @@ use std::sync::mpsc::Sender;
use kind_report::data::Diagnostic;
use kind_span::Range;
use crate::errors::SyntaxError;
use crate::errors::SyntaxDiagnostic;
use self::{state::Lexer, tokens::Token};
@ -50,10 +50,7 @@ impl<'a> Lexer<'a> {
}
pub fn to_keyword(data: &str) -> Token {
match data {
"_" => Token::Hole,
_ => Token::LowerId(data.to_string()),
}
Token::LowerId(data.to_string())
}
pub fn get_next_no_error(&mut self, vec: Sender<Box<dyn Diagnostic>>) -> (Token, Range) {
@ -197,14 +194,14 @@ impl<'a> Lexer<'a> {
match self.peekable.peek() {
Some('\'') => self.single_token(Token::Char(chr), start),
Some(c) => (
Token::Error(Box::new(SyntaxError::UnexpectedChar(
Token::Error(Box::new(SyntaxDiagnostic::UnexpectedChar(
*c,
self.mk_range(start),
))),
self.mk_range(start),
),
None => (
Token::Error(Box::new(SyntaxError::UnfinishedChar(
Token::Error(Box::new(SyntaxDiagnostic::UnfinishedChar(
self.mk_range(start),
))),
self.mk_range(start),
@ -221,7 +218,7 @@ impl<'a> Lexer<'a> {
&c => {
self.next_char();
(
Token::Error(Box::new(SyntaxError::UnexpectedChar(
Token::Error(Box::new(SyntaxDiagnostic::UnexpectedChar(
c,
self.mk_range(start),
))),

View File

@ -3,7 +3,7 @@
use core::fmt;
use crate::errors::SyntaxError;
use crate::errors::SyntaxDiagnostic;
#[derive(Debug, Clone)]
pub enum Token {
@ -78,7 +78,7 @@ pub enum Token {
Eof,
// The error token that is useful to error recovery.
Error(Box<SyntaxError>),
Error(Box<SyntaxDiagnostic>),
}
impl Token {

View File

@ -1,18 +1,18 @@
//! Crate to parse the kind2 grammar.
pub mod errors;
pub mod expr;
pub mod macros;
pub mod pat;
pub mod state;
pub mod top_level;
mod errors;
mod expr;
mod lexer;
mod macros;
mod pat;
mod state;
mod top_level;
pub mod lexer;
use std::sync::mpsc::Sender;
use kind_report::data::Diagnostic;
use kind_span::SyntaxCtxIndex;
use kind_tree::concrete::Module;
pub use lexer::state::*;
use lexer::state::*;
use state::Parser;
pub fn parse_book(errs: Sender<Box<dyn Diagnostic>>, ctx_id: usize, input: &str) -> (Module, bool) {

View File

@ -1,16 +1,16 @@
use kind_tree::concrete::pat::{Pat, PatIdent, PatKind};
use crate::errors::SyntaxError;
use crate::errors::SyntaxDiagnostic;
use crate::lexer::tokens::Token;
use crate::macros::eat_single;
use crate::state::Parser;
impl<'a> Parser<'a> {
pub fn is_pat_cons(&self) -> bool {
fn is_pat_cons(&self) -> bool {
self.get().same_variant(&Token::LPar) && self.peek(1).is_upper_id()
}
pub fn parse_pat_constructor(&mut self) -> Result<Box<Pat>, SyntaxError> {
fn parse_pat_constructor(&mut self) -> Result<Box<Pat>, SyntaxDiagnostic> {
let start = self.range();
self.advance(); // '('
let name = self.parse_upper_id()?;
@ -25,7 +25,7 @@ impl<'a> Parser<'a> {
}))
}
pub fn parse_pat_num60(&mut self) -> Result<Box<Pat>, SyntaxError> {
fn parse_pat_num60(&mut self) -> Result<Box<Pat>, SyntaxDiagnostic> {
let start = self.range();
let num = eat_single!(self, Token::Num60(n) => *n)?;
Ok(Box::new(Pat {
@ -34,7 +34,7 @@ impl<'a> Parser<'a> {
}))
}
pub fn parse_pat_num120(&mut self) -> Result<Box<Pat>, SyntaxError> {
fn parse_pat_num120(&mut self) -> Result<Box<Pat>, SyntaxDiagnostic> {
let start = self.range();
let num = eat_single!(self, Token::Num120(n) => *n)?;
Ok(Box::new(Pat {
@ -43,7 +43,7 @@ impl<'a> Parser<'a> {
}))
}
pub fn parse_pat_str(&mut self) -> Result<Box<Pat>, SyntaxError> {
fn parse_pat_str(&mut self) -> Result<Box<Pat>, SyntaxDiagnostic> {
let start = self.range();
let string = eat_single!(self, Token::Str(str) => str.clone())?;
Ok(Box::new(Pat {
@ -52,7 +52,7 @@ impl<'a> Parser<'a> {
}))
}
pub fn parse_pat_group(&mut self) -> Result<Box<Pat>, SyntaxError> {
fn parse_pat_group(&mut self) -> Result<Box<Pat>, SyntaxDiagnostic> {
let start = self.range();
self.advance(); // '('
let mut pat = self.parse_pat()?;
@ -61,7 +61,7 @@ impl<'a> Parser<'a> {
Ok(pat)
}
pub fn parse_pat_var(&mut self) -> Result<Box<Pat>, SyntaxError> {
fn parse_pat_var(&mut self) -> Result<Box<Pat>, SyntaxDiagnostic> {
let id = self.parse_id()?;
Ok(Box::new(Pat {
range: id.range,
@ -69,7 +69,7 @@ impl<'a> Parser<'a> {
}))
}
pub fn parse_pat_single_cons(&mut self) -> Result<Box<Pat>, SyntaxError> {
fn parse_pat_single_cons(&mut self) -> Result<Box<Pat>, SyntaxDiagnostic> {
let id = self.parse_upper_id()?;
Ok(Box::new(Pat {
range: id.range,
@ -77,7 +77,7 @@ impl<'a> Parser<'a> {
}))
}
pub fn parse_pat_hole(&mut self) -> Result<Box<Pat>, SyntaxError> {
fn parse_pat_hole(&mut self) -> Result<Box<Pat>, SyntaxDiagnostic> {
let range = self.range();
self.eat_variant(Token::Hole)?;
Ok(Box::new(Pat {
@ -86,7 +86,7 @@ impl<'a> Parser<'a> {
}))
}
fn parse_pat_list(&mut self) -> Result<Box<Pat>, SyntaxError> {
fn parse_pat_list(&mut self) -> Result<Box<Pat>, SyntaxDiagnostic> {
let range = self.range();
self.advance(); // '['
let mut vec = Vec::new();
@ -126,7 +126,7 @@ impl<'a> Parser<'a> {
}))
}
pub fn parse_pat(&mut self) -> Result<Box<Pat>, SyntaxError> {
pub fn parse_pat(&mut self) -> Result<Box<Pat>, SyntaxDiagnostic> {
if self.is_pat_cons() {
self.parse_pat_constructor()
} else if self.get().is_str() {

View File

@ -5,7 +5,7 @@ use std::{collections::VecDeque, sync::mpsc::Sender};
use kind_report::data::Diagnostic;
use kind_span::Range;
use crate::{errors::SyntaxError, lexer::tokens::Token, Lexer};
use crate::{errors::SyntaxDiagnostic, lexer::tokens::Token, Lexer};
/// The parser state. it current have some parameters
/// that makes the behaviour change
@ -13,19 +13,19 @@ use crate::{errors::SyntaxError, lexer::tokens::Token, Lexer};
/// it's useful to all of the rules that use "try_local"
/// and similar functions
pub struct Parser<'a> {
pub lexer: Lexer<'a>,
lexer: Lexer<'a>,
/// We have to shift these things one position
/// to the left so idk what i should use it here
/// probably the movement will not affect it so much
/// because it's a ring buffer.
pub queue: VecDeque<(Token, Range)>,
pub breaks: VecDeque<bool>,
pub errs: Sender<Box<dyn Diagnostic>>,
queue: VecDeque<(Token, Range)>,
breaks: VecDeque<bool>,
dignostic_channel: Sender<Box<dyn Diagnostic>>,
/// It's useful when we have to try to parse something
/// that fails in the first token. as the parser ignores some
/// tokens, we cannot rely on the count provided by the
/// lexer.
pub eaten: u32,
eaten: u32,
pub failed: bool,
}
@ -41,7 +41,7 @@ impl<'a> Parser<'a> {
lexer,
queue,
breaks,
errs: sender,
dignostic_channel: sender,
eaten: 0,
failed: false,
}
@ -52,7 +52,7 @@ impl<'a> Parser<'a> {
self.breaks.pop_front();
self.breaks.push_back(self.lexer.is_linebreak());
self.queue
.push_back(self.lexer.get_next_no_error(self.errs.clone()));
.push_back(self.lexer.get_next_no_error(self.dignostic_channel.clone()));
self.eaten += 1;
cur
}
@ -77,18 +77,27 @@ impl<'a> Parser<'a> {
}
#[inline]
pub fn fail<T>(&mut self, expect: Vec<Token>) -> Result<T, SyntaxError> {
Err(SyntaxError::UnexpectedToken(
pub fn fail<T>(&mut self, expect: Vec<Token>) -> Result<T, SyntaxDiagnostic> {
Err(SyntaxDiagnostic::UnexpectedToken(
self.get().clone(),
self.range(),
expect,
))
}
pub fn eat_closing_keyword(&mut self, expect: Token, range: Range) -> Result<(), SyntaxError> {
pub fn send_dignostic(&mut self, diagnostic: SyntaxDiagnostic) {
self.dignostic_channel.send(Box::new(diagnostic)).unwrap();
self.failed = true;
}
pub fn eat_closing_keyword(
&mut self,
expect: Token,
range: Range,
) -> Result<(), SyntaxDiagnostic> {
if !self.check_and_eat(expect.clone()) {
if self.get().is_eof() {
Err(SyntaxError::Unclosed(range))
Err(SyntaxDiagnostic::Unclosed(range))
} else {
self.fail(vec![expect])
}
@ -97,7 +106,7 @@ impl<'a> Parser<'a> {
}
}
pub fn eat_variant(&mut self, expect: Token) -> Result<(Token, Range), SyntaxError> {
pub fn eat_variant(&mut self, expect: Token) -> Result<(Token, Range), SyntaxDiagnostic> {
if self.get().same_variant(&expect) {
Ok(self.advance())
} else {
@ -105,14 +114,14 @@ impl<'a> Parser<'a> {
}
}
pub fn eat_id(&mut self, expect: &str) -> Result<(Token, Range), SyntaxError> {
pub fn eat_id(&mut self, expect: &str) -> Result<(Token, Range), SyntaxDiagnostic> {
match self.get() {
Token::LowerId(x) if x == expect => Ok(self.advance()),
_ => self.fail(vec![Token::LowerId(expect.to_string())])
_ => self.fail(vec![Token::LowerId(expect.to_string())]),
}
}
pub fn eat<T>(&mut self, expect: fn(&Token) -> Option<T>) -> Result<T, SyntaxError> {
pub fn eat<T>(&mut self, expect: fn(&Token) -> Option<T>) -> Result<T, SyntaxDiagnostic> {
match expect(self.get()) {
None => self.fail(vec![]),
Some(res) => {
@ -141,8 +150,8 @@ impl<'a> Parser<'a> {
pub fn try_single<T>(
&mut self,
fun: &dyn Fn(&mut Parser<'a>) -> Result<T, SyntaxError>,
) -> Result<Option<T>, SyntaxError> {
fun: &dyn Fn(&mut Parser<'a>) -> Result<T, SyntaxDiagnostic>,
) -> Result<Option<T>, SyntaxDiagnostic> {
let current = self.eaten;
match fun(self) {
Err(_) if current == self.eaten => Ok(None),

View File

@ -1,12 +1,12 @@
use kind_span::{Locatable, Range};
use kind_tree::concrete::{Attribute, AttributeStyle};
use crate::errors::SyntaxError;
use crate::errors::SyntaxDiagnostic;
use crate::lexer::tokens::Token;
use crate::state::Parser;
impl<'a> Parser<'a> {
pub fn parse_attr_args(&mut self) -> Result<(Vec<AttributeStyle>, Range), SyntaxError> {
fn parse_attr_args(&mut self) -> Result<(Vec<AttributeStyle>, Range), SyntaxDiagnostic> {
let mut attrs = Vec::new();
let mut range = self.range();
@ -23,10 +23,10 @@ impl<'a> Parser<'a> {
self.eat_closing_keyword(Token::RBracket, start)?;
}
Ok((attrs, range))
Ok((attrs, range))
}
pub fn parse_attr_style(&mut self) -> Result<AttributeStyle, SyntaxError> {
fn parse_attr_style(&mut self) -> Result<AttributeStyle, SyntaxDiagnostic> {
match self.get().clone() {
Token::LowerId(_) | Token::UpperId(_, None) => {
let range = self.range();
@ -63,7 +63,7 @@ impl<'a> Parser<'a> {
}
}
pub fn parse_attr(&mut self) -> Result<Attribute, SyntaxError> {
fn parse_attr(&mut self) -> Result<Attribute, SyntaxDiagnostic> {
let start = self.range();
self.eat_variant(Token::Hash)?;
@ -86,7 +86,7 @@ impl<'a> Parser<'a> {
})
}
pub fn parse_attrs(&mut self) -> Result<Vec<Attribute>, SyntaxError> {
pub fn parse_attrs(&mut self) -> Result<Vec<Attribute>, SyntaxDiagnostic> {
let mut attrs = Vec::new();
while let Some(res) = self.try_single(&|fun| fun.parse_attr())? {
attrs.push(res);

View File

@ -1,11 +1,14 @@
use fxhash::FxHashMap;
use kind_tree::concrete::expr::Expr;
use kind_tree::concrete::pat::{Pat, PatIdent, PatKind};
/// Parses all of the top level structures
/// like Book, Entry, Rule and Argument.
use kind_tree::concrete::{Argument, Attribute, Entry, Module, Rule, Telescope, TopLevel};
use kind_tree::concrete::{
Argument, Attribute, Entry, ExprKind, Module, Rule, Telescope, TopLevel,
};
use kind_tree::symbol::QualifiedIdent;
use crate::errors::SyntaxError;
use crate::errors::SyntaxDiagnostic;
use crate::lexer::tokens::Token;
use crate::state::Parser;
@ -17,7 +20,7 @@ fn is_hidden_arg(token: &Token) -> bool {
}
impl<'a> Parser<'a> {
pub fn is_top_level_entry_continuation(&self) -> bool {
fn is_top_level_entry_continuation(&self) -> bool {
self.peek(1).same_variant(&Token::Colon) // ':'
|| self.peek(1).same_variant(&Token::LPar) // '('
|| self.peek(1).same_variant(&Token::LBrace) // '{'
@ -26,26 +29,18 @@ impl<'a> Parser<'a> {
|| self.peek(1).same_variant(&Token::Plus) // '+'
}
pub fn is_top_level_entry(&self) -> bool {
fn is_top_level_entry(&self) -> bool {
self.get().is_upper_id() && self.is_top_level_entry_continuation()
}
pub fn is_top_level_start(&self) -> bool {
self.is_top_level_entry()
|| self.check_actual_id("type")
fn is_safe_level_start(&self) -> bool {
self.check_actual_id("type")
|| self.check_actual_id("record")
|| self.get().same_variant(&Token::Hash)
|| self.get().is_doc()
}
pub fn is_safe_level_start(&self) -> bool {
self.check_actual_id("type")
|| self.check_actual_id("record")
|| self.get().same_variant(&Token::Hash)
|| self.get().is_doc()
}
pub fn complement_binding_op(&self) -> Option<Token> {
fn complement_binding_op(&self) -> Option<Token> {
match self.get() {
Token::LPar => Some(Token::RPar),
Token::Less => Some(Token::Greater),
@ -53,7 +48,7 @@ impl<'a> Parser<'a> {
}
}
pub fn parse_argument(&mut self) -> Result<Argument, SyntaxError> {
fn parse_argument(&mut self) -> Result<Argument, SyntaxDiagnostic> {
let start = self.range();
let erased = self.check_and_eat(Token::Minus);
@ -87,7 +82,7 @@ impl<'a> Parser<'a> {
})
}
pub fn parse_rule(&mut self, name: String) -> Result<Box<Rule>, SyntaxError> {
fn parse_rule(&mut self, name: String) -> Result<Box<Rule>, SyntaxDiagnostic> {
let start = self.range();
let ident;
if let Token::UpperId(name_id, ext) = self.get() {
@ -115,7 +110,7 @@ impl<'a> Parser<'a> {
}))
}
pub fn parse_arguments(&mut self) -> Result<Vec<Argument>, SyntaxError> {
fn parse_arguments(&mut self) -> Result<Vec<Argument>, SyntaxDiagnostic> {
let mut args = Vec::new();
while let Some(res) = self.try_single(&|fun| fun.parse_argument())? {
args.push(res);
@ -123,7 +118,7 @@ impl<'a> Parser<'a> {
Ok(args)
}
pub fn parse_docs(&mut self) -> Result<Vec<String>, SyntaxError> {
fn parse_docs(&mut self) -> Result<Vec<String>, SyntaxDiagnostic> {
let mut docs = Vec::new();
while let Token::Comment(_, str) = &self.get() {
docs.push(str.clone());
@ -136,12 +131,12 @@ impl<'a> Parser<'a> {
&mut self,
docs: Vec<String>,
attrs: Vec<Attribute>,
) -> Result<Entry, SyntaxError> {
) -> Result<Entry, SyntaxDiagnostic> {
let start = self.range();
if self.get().is_lower_id() && self.is_top_level_entry_continuation() {
let ident = self.parse_id()?;
return Err(SyntaxError::LowerCasedDefinition(
return Err(SyntaxDiagnostic::LowerCasedDefinition(
ident.to_string(),
ident.range,
));
@ -156,8 +151,18 @@ impl<'a> Parser<'a> {
let args = self.parse_arguments()?;
self.eat_variant(Token::Colon)?;
let typ = self.parse_expr(false)?;
if !self.get().same_variant(&Token::Colon) && !self.get().same_variant(&Token::LBrace) {
return self.fail(vec![])?;
}
let typ = if self.check_and_eat(Token::Colon) {
self.parse_expr(false)?
} else {
Box::new(Expr {
data: ExprKind::Hole,
range: start,
})
};
if self.check_actual(Token::LBrace) {
let start = self.range();
@ -194,7 +199,7 @@ impl<'a> Parser<'a> {
// Better error message when you have change the name of the function
if self.get().is_upper_id() && !self.is_top_level_entry_continuation() {
return Err(SyntaxError::NotAClauseOfDef(ident.range, self.range()));
return Err(SyntaxDiagnostic::NotAClauseOfDef(ident.range, self.range()));
}
Ok(Entry {
@ -219,7 +224,7 @@ impl<'a> Parser<'a> {
// Better error message when you have change the name of the function
if self.get().is_upper_id() && !self.is_top_level_entry_continuation() {
return Err(SyntaxError::NotAClauseOfDef(ident.range, self.range()));
return Err(SyntaxDiagnostic::NotAClauseOfDef(ident.range, self.range()));
}
Ok(Entry {
@ -234,7 +239,7 @@ impl<'a> Parser<'a> {
}
}
pub fn parse_top_level(&mut self) -> Result<TopLevel, SyntaxError> {
fn parse_top_level(&mut self) -> Result<TopLevel, SyntaxDiagnostic> {
let docs = self.parse_docs()?;
let attrs = self.parse_attrs()?;
@ -245,13 +250,13 @@ impl<'a> Parser<'a> {
} else if self.is_top_level_entry_continuation() {
Ok(TopLevel::Entry(self.parse_entry(docs, attrs)?))
} else if self.check_actual_id("use") {
Err(SyntaxError::CannotUseUse(self.range()))
Err(SyntaxDiagnostic::CannotUseUse(self.range()))
} else {
self.fail(vec![])
}
}
pub fn parse_use(&mut self) -> Result<(String, String), SyntaxError> {
fn parse_use(&mut self) -> Result<(String, String), SyntaxDiagnostic> {
self.eat_id("use")?;
let origin = self.parse_upper_id()?;
self.eat_id("as")?;
@ -273,7 +278,7 @@ impl<'a> Parser<'a> {
range,
..
},
) => Err(SyntaxError::ImportsCannotHaveAlias(range)),
) => Err(SyntaxDiagnostic::ImportsCannotHaveAlias(range)),
(origin, alias) => Ok((origin.to_string(), alias.to_string())),
}
}
@ -288,8 +293,7 @@ impl<'a> Parser<'a> {
uses.insert(alias, origin);
}
Err(err) => {
self.errs.send(Box::new(err)).unwrap();
self.failed = true;
self.send_dignostic(err);
break;
}
}
@ -300,8 +304,7 @@ impl<'a> Parser<'a> {
Ok(entry) => entries.push(entry),
Err(err) => {
self.advance();
self.errs.send(Box::new(err)).unwrap();
self.failed = true;
self.send_dignostic(err);
while (!self.is_safe_level_start() || !self.is_linebreak())
&& !self.get().same_variant(&Token::Eof)
{
@ -311,14 +314,8 @@ impl<'a> Parser<'a> {
}
}
let res = self.eat_variant(Token::Eof);
match res {
Ok(_) => (),
Err(err) => {
self.errs.send(Box::new(err)).unwrap();
self.failed = true;
}
if let Err(err) = self.eat_variant(Token::Eof) {
self.send_dignostic(err);
}
Module { entries, uses }

View File

@ -1,11 +1,11 @@
use kind_tree::concrete::{Attribute, Constructor, RecordDecl, SumTypeDecl, Telescope};
use crate::errors::SyntaxError;
use crate::errors::SyntaxDiagnostic;
use crate::lexer::tokens::Token;
use crate::state::Parser;
impl<'a> Parser<'a> {
pub fn parse_constructor(&mut self) -> Result<Constructor, SyntaxError> {
pub fn parse_constructor(&mut self) -> Result<Constructor, SyntaxDiagnostic> {
let docs = self.parse_docs()?;
let name = self.parse_any_id()?;
let args = self.parse_arguments()?;
@ -17,7 +17,7 @@ impl<'a> Parser<'a> {
};
self.check_and_eat(Token::Semi);
Ok(Constructor {
name,
docs,
@ -30,8 +30,9 @@ impl<'a> Parser<'a> {
&mut self,
docs: Vec<String>,
attrs: Vec<Attribute>,
) -> Result<SumTypeDecl, SyntaxError> {
) -> Result<SumTypeDecl, SyntaxDiagnostic> {
self.eat_id("type")?;
let name = self.parse_upper_id()?;
let parameters = self.parse_arguments()?;
@ -67,7 +68,7 @@ impl<'a> Parser<'a> {
&mut self,
docs: Vec<String>,
attrs: Vec<Attribute>,
) -> Result<RecordDecl, SyntaxError> {
) -> Result<RecordDecl, SyntaxDiagnostic> {
self.eat_id("record")?;
let name = self.parse_upper_id()?;

View File

@ -77,10 +77,8 @@ impl<'a> DesugarState<'a> {
pub(crate) fn desugar_app(&mut self, range: Range, head: &Expr) -> Box<desugared::Expr> {
match &head.data {
ExprKind::Constr(entry_name, spine) => {
let entry = self
.old_book
.get_count_garanteed(entry_name.to_string().as_str());
ExprKind::Constr { name, args } => {
let entry = self.old_book.get_count_garanteed(name.to_string().as_str());
let mut positions = FxHashMap::default();
let mut arguments = vec![None; entry.arguments.len()];
@ -88,7 +86,7 @@ impl<'a> DesugarState<'a> {
let (hidden, _erased) = entry.arguments.count_implicits();
// Check if we should just fill all the implicits
let fill_hidden = spine.len() == entry.arguments.len() - hidden;
let fill_hidden = args.len() == entry.arguments.len() - hidden;
if fill_hidden {
for i in 0..entry.arguments.len() {
@ -98,10 +96,10 @@ impl<'a> DesugarState<'a> {
arguments[i] = Some((Range::ghost_range(), self.gen_hole_expr()))
}
}
} else if entry.arguments.len() != spine.len() {
} else if entry.arguments.len() != args.len() {
self.send_err(PassError::IncorrectArity(
entry_name.range,
spine.iter().map(|x| x.locate()).collect(),
name.range,
args.iter().map(|x| x.locate()).collect(),
entry.arguments.len(),
hidden,
));
@ -112,7 +110,7 @@ impl<'a> DesugarState<'a> {
positions.insert(entry.arguments[i].name.to_str(), i);
}
for arg in spine {
for arg in args {
match arg {
Binding::Positional(_) => (),
Binding::Named(r, name, v) => {
@ -121,8 +119,8 @@ impl<'a> DesugarState<'a> {
None => {
self.send_err(PassError::CannotFindField(
name.range,
entry_name.range,
entry_name.to_string(),
name.range,
name.to_string(),
));
continue;
}
@ -137,7 +135,7 @@ impl<'a> DesugarState<'a> {
}
}
for arg in spine {
for arg in args {
match arg {
Binding::Positional(v) => {
for i in 0..entry.arguments.len() {
@ -145,7 +143,7 @@ impl<'a> DesugarState<'a> {
if (fill_hidden && arg_decl.hidden) || arguments[i].is_some() {
continue;
}
arguments[i] = Some((v.range, self.desugar_expr(v)));
arguments[i] = Some((v.range, self.desugar_expr(&v)));
break;
}
}
@ -164,17 +162,17 @@ impl<'a> DesugarState<'a> {
Box::new(desugared::Expr {
data: if entry.is_ctr {
desugared::ExprKind::Ctr(entry_name.clone(), new_spine)
desugared::ExprKind::Ctr(name.clone(), new_spine)
} else {
desugared::ExprKind::Fun(entry_name.clone(), new_spine)
desugared::ExprKind::Fun(name.clone(), new_spine)
},
span: Span::Locatable(range),
})
}
ExprKind::App(head, spine) => {
ExprKind::App { fun, args } => {
let mut new_spine = Vec::new();
let new_head = self.desugar_expr(head);
for arg in spine {
let new_head = self.desugar_expr(fun);
for arg in args {
new_spine.push(desugared::AppBinding {
data: self.desugar_expr(&arg.data),
erased: arg.erased,

View File

@ -1,11 +1,13 @@
use kind_tree::{concrete::{self, Attribute, AttributeStyle}, desugared};
use kind_tree::{
concrete::{self, Attribute, AttributeStyle},
desugared,
};
use crate::errors::PassError;
use super::DesugarState;
impl<'a> DesugarState<'a> {
fn args_should_be_empty(&mut self, attr: &Attribute) {
if !attr.args.is_empty() {
self.send_err(PassError::AttributeDoesNotExpectArgs(attr.range))
@ -51,7 +53,7 @@ impl<'a> DesugarState<'a> {
self.attr_without_value(attr);
vec.push(desugared::Attribute::KdlRun);
}
"kdl_erase" => {
"kdl_erase" => {
self.args_should_be_empty(attr);
self.attr_without_value(attr);
vec.push(desugared::Attribute::KdlErase);
@ -61,22 +63,22 @@ impl<'a> DesugarState<'a> {
match &attr.value {
Some(AttributeStyle::Ident(_, ident)) => {
vec.push(desugared::Attribute::KdlState(ident.clone()));
},
}
Some(_) => self.attr_invalid_argument(attr),
None => self.attr_expects_a_value(attr)
None => self.attr_expects_a_value(attr),
}
},
}
"kdl_state" => {
self.args_should_be_empty(attr);
match &attr.value {
Some(AttributeStyle::Ident(_, ident)) => {
vec.push(desugared::Attribute::KdlState(ident.clone()));
},
}
Some(_) => self.attr_invalid_argument(attr),
None => self.attr_expects_a_value(attr)
None => self.attr_expects_a_value(attr),
}
},
_ => self.send_err(PassError::AttributeDoesNotExists(attr.range))
}
_ => self.send_err(PassError::AttributeDoesNotExists(attr.range)),
}
}
vec

View File

@ -83,7 +83,10 @@ impl<'a> DesugarState<'a> {
};
if self.old_book.count.get(&open_id.to_string()).is_none() {
self.send_err(PassError::NeedToImplementMethods(binding.locate(), Sugar::Open(typ.to_string())));
self.send_err(PassError::NeedToImplementMethods(
binding.locate(),
Sugar::Open(typ.to_string()),
));
return desugared::Expr::err(range);
}
@ -156,7 +159,10 @@ impl<'a> DesugarState<'a> {
let match_id = match_.typ.add_segment("match");
if self.old_book.entries.get(&match_id.to_string()).is_none() {
self.send_err(PassError::NeedToImplementMethods(range, Sugar::Match(match_.typ.to_string())));
self.send_err(PassError::NeedToImplementMethods(
range,
Sugar::Match(match_.typ.to_string()),
));
return desugared::Expr::err(range);
}
@ -238,14 +244,10 @@ impl<'a> DesugarState<'a> {
self.send_err(PassError::NoCoverage(range, unbound))
}
let motive = if let Some(res) = &match_.motive {
self.desugar_expr(res)
} else {
let mut idx: Vec<Ident> = sum.indices
.iter()
.map(|x| x.name.clone())
.collect();
let mut idx: Vec<Ident> = sum.indices.iter().map(|x| x.name.clone()).collect();
idx.push(Ident::generate("_val"));
idx.iter().rfold(self.gen_hole_expr(), |expr, l| {
desugared::Expr::lambda(l.range, l.clone(), expr, false)

View File

@ -8,21 +8,24 @@ use crate::errors::{PassError, Sugar};
use super::DesugarState;
impl<'a> DesugarState<'a> {
pub(crate) fn desugar_str(&self, range: Range, input: &str) -> Box<desugared::Expr> {
let cons = QualifiedIdent::new_static("String.cons", None, range);
input.chars().rfold(
desugared::Expr::ctr(range, QualifiedIdent::new_static("String.nil", None, range), vec![]),
desugared::Expr::ctr(
range,
QualifiedIdent::new_static("String.nil", None, range),
vec![],
),
|right, chr| {
desugared::Expr::ctr(range, cons.clone(), vec![
desugared::Expr::num60(range, chr as u64),
right
])
desugared::Expr::ctr(
range,
cons.clone(),
vec![desugared::Expr::num60(range, chr as u64), right],
)
},
)
}
pub(crate) fn desugar_literal(
&mut self,
range: Range,
@ -209,7 +212,10 @@ impl<'a> DesugarState<'a> {
let boolean = QualifiedIdent::new_static("Bool", None, range);
let bool_if_ident = boolean.add_segment("if");
let bool_if = self.old_book.entries.get(bool_if_ident.to_string().as_str());
let bool_if = self
.old_book
.entries
.get(bool_if_ident.to_string().as_str());
if bool_if.is_none() {
self.send_err(PassError::NeedToImplementMethods(range, Sugar::BoolIf));
@ -248,36 +254,68 @@ impl<'a> DesugarState<'a> {
pub(crate) fn desugar_expr(&mut self, expr: &expr::Expr) -> Box<desugared::Expr> {
use expr::ExprKind::*;
match &expr.data {
Constr(_, _) | App(_, _) => self.desugar_app(expr.range, expr),
All(ident, typ, body) => desugared::Expr::all(
Constr { .. } | App { .. } => self.desugar_app(expr.range, expr),
All {
param,
typ,
body,
erased,
} => desugared::Expr::all(
expr.range,
ident.clone().unwrap_or_else(|| self.gen_name(expr.range)),
param.clone().unwrap_or_else(|| self.gen_name(expr.range)),
self.desugar_expr(typ),
self.desugar_expr(body),
*erased,
),
Binary(op, left, right) => desugared::Expr::binary(
Binary { op, fst, snd } => desugared::Expr::binary(
expr.range,
*op,
self.desugar_expr(left),
self.desugar_expr(right),
self.desugar_expr(fst),
self.desugar_expr(snd),
),
Lambda(ident, _typ, body, erased) => {
desugared::Expr::lambda(expr.range, ident.clone(), self.desugar_expr(body), *erased)
Lambda {
param,
typ: None,
body,
erased,
} => {
desugared::Expr::lambda(expr.range, param.clone(), self.desugar_expr(body), *erased)
}
Ann(val, typ) => {
Lambda {
param,
typ: Some(typ),
body,
erased,
} => desugared::Expr::ann(
expr.range,
desugared::Expr::lambda(
expr.range,
param.clone(),
self.desugar_expr(body),
*erased,
),
desugared::Expr::all(
typ.range,
self.gen_name(expr.range),
self.desugar_expr(typ),
self.gen_hole_expr(),
*erased,
),
),
Ann { val, typ } => {
desugared::Expr::ann(expr.range, self.desugar_expr(val), self.desugar_expr(typ))
}
Var(ident) => desugared::Expr::var(ident.clone()),
Var { name } => desugared::Expr::var(name.clone()),
Hole => desugared::Expr::hole(expr.range, self.gen_hole()),
Lit(literal) => self.desugar_literal(expr.range, literal),
Lit { lit } => self.desugar_literal(expr.range, lit),
Let { name, val, next } => self.desugar_let(expr.range, name, val, next),
Do { typ, sttm } => self.desugar_do(expr.range, typ, sttm),
Sigma { param, fst, snd } => self.desugar_sigma(expr.range, param, fst, snd),
List { args } => self.desugar_list(expr.range, args),
If { cond, then_, else_ } => self.desugar_if(expr.range, cond, then_, else_),
Pair { fst, snd } => self.desugar_pair(expr.range, fst, snd),
Match(matcher) => self.desugar_match(expr.range, matcher),
Let(destruct, val, next) => self.desugar_let(expr.range, destruct, val, next),
Subst(sub) => self.desugar_sub(expr.range, sub),
Do(typ, sttm) => self.desugar_do(expr.range, typ, sttm),
Sigma(name, typ, body) => self.desugar_sigma(expr.range, name, typ, body),
List(ls) => self.desugar_list(expr.range, ls),
If(cond, if_, else_) => self.desugar_if(expr.range, cond, if_, else_),
Pair(fst, snd) => self.desugar_pair(expr.range, fst, snd),
}
}
}

View File

@ -13,12 +13,12 @@ use super::DesugarState;
/// NOTE: Does not work with Pi types.
pub fn is_data_constructor_of(expr: concrete::expr::Expr, type_name: &str) -> bool {
match expr.data {
concrete::ExprKind::Var(name) => name.to_string().as_str() == type_name,
concrete::ExprKind::App(head, _) => {
concrete::ExprKind::Var { name } => name.to_string().as_str() == type_name,
concrete::ExprKind::App { fun, args: _ } => {
if let concrete::expr::Expr {
data: concrete::ExprKind::Var(name),
data: concrete::ExprKind::Var { name },
..
} = *head
} = *fun
{
name.to_string().as_str() == type_name
} else {

View File

@ -43,13 +43,13 @@ pub struct ErasureState<'a> {
}
pub fn erase_book(
book: &Book,
book: Book,
errs: Sender<Box<dyn Diagnostic>>,
entrypoint: FxHashSet<String>,
) -> Option<Book> {
let mut state = ErasureState {
errs,
book,
book: &book,
ctx: Default::default(),
names: Default::default(),
holes: Default::default(),
@ -74,15 +74,15 @@ pub fn erase_book(
for (name, v) in &book.entrs {
entries.insert(name, state.erase_entry(v));
}
if state.failed {
return None;
}
for (name, (_, relev)) in &state.names {
if let Some(Relevance::Relevant) = state.normalize(*relev) {
if let Some(res) = entries.get(name) {
new_book.entrs.insert(name.to_string(), res.clone());
if let Some(res) = entries.remove(name) {
new_book.entrs.insert(name.to_string(), res);
}
}
}
@ -90,7 +90,6 @@ pub fn erase_book(
Some(new_book)
}
impl<'a> ErasureState<'a> {
pub fn new_hole(&mut self, range: Range, name: String) -> (Option<Range>, Relevance) {
let count = self.holes.len();
@ -152,11 +151,11 @@ impl<'a> ErasureState<'a> {
// "erased" in the sense that we can just remove them from the runtime and it'll
// be fine.
(None, Relevance::Irrelevant) => false,
(None, Relevance::Hole(n)) => {
self.holes[hole] = Some(Relevance::Hole(n));
true
},
}
(_, _) => true,
},
@ -194,8 +193,7 @@ impl<'a> ErasureState<'a> {
name: &QualifiedIdent,
spine: &Vec<Box<Expr>>,
) -> Vec<Box<Expr>> {
let fun = match self.names.get(&name.to_string()) {
let fun = match self.names.get(name.to_str()) {
Some(res) => *res,
None => self.new_hole(name.range, name.to_string()),
};
@ -204,7 +202,7 @@ impl<'a> ErasureState<'a> {
self.err_irrelevant(None, name.range, None)
}
let entry = self.book.entrs.get(name.to_string().as_str()).unwrap();
let entry = self.book.entrs.get(name.to_str()).unwrap();
let erased = entry.args.iter();
spine
@ -274,7 +272,7 @@ impl<'a> ErasureState<'a> {
}
},
Var(name) => {
let relev = self.ctx.get(&name.to_string()).unwrap();
let relev = self.ctx.get(name.to_str()).unwrap();
let declared_ty = (relev.1).0;
let declared_val = relev.0;
if !self.unify(name.range, *on, relev.1, false) {
@ -282,7 +280,7 @@ impl<'a> ErasureState<'a> {
}
Box::new(expr.clone())
}
All(name, typ, body) => {
All(name, typ, body, _erased) => {
let ctx = self.ctx.clone();
// Relevant inside the context that is it's being used?
@ -353,15 +351,9 @@ impl<'a> ErasureState<'a> {
})
}
Fun(head, spine) => {
let args = self
.book
.entrs
.get(head.to_string().as_str())
.unwrap()
.args
.iter();
let args = self.book.entrs.get(head.to_str()).unwrap().args.iter();
let fun = match self.names.get(&head.to_string()) {
let fun = match self.names.get(head.to_str()) {
Some(res) => *res,
None => self.new_hole(head.range, head.to_string()),
};
@ -391,13 +383,7 @@ impl<'a> ErasureState<'a> {
})
}
Ctr(head, spine) => {
let args = self
.book
.entrs
.get(head.to_string().as_str())
.unwrap()
.args
.iter();
let args = self.book.entrs.get(head.to_str()).unwrap().args.iter();
let fun = match self.names.get(&head.to_string()) {
Some(res) => *res,
@ -455,7 +441,17 @@ impl<'a> ErasureState<'a> {
.map(|((range, erased), expr)| {
(
erased,
self.erase_pat((Some(*range), if *erased { Relevance::Irrelevant} else { place.1.clone() }), expr),
self.erase_pat(
(
Some(*range),
if *erased {
Relevance::Irrelevant
} else {
place.1.clone()
},
),
expr,
),
)
})
.filter(|(erased, _)| !*erased)
@ -475,7 +471,7 @@ impl<'a> ErasureState<'a> {
}
pub fn erase_entry(&mut self, entry: &Entry) -> Box<Entry> {
let place = if let Some(res) = self.names.get(&entry.name.to_string()) {
let place = if let Some(res) = self.names.get(entry.name.to_str()) {
*res
} else {
self.new_hole(entry.name.range, entry.name.to_string())

View File

@ -34,14 +34,14 @@ pub enum PassError {
NoFieldCoverage(Range, Vec<String>),
CannotPatternMatchOnErased(Range),
UnboundVariable(Vec<Ident>, Vec<String>),
AttributeDoesNotExpectEqual(Range),
AttributeDoesNotExpectArgs(Range),
InvalidAttributeArgument(Range),
AttributeExpectsAValue(Range),
DuplicatedAttributeArgument(Range, Range),
CannotDerive(String, Range),
AttributeDoesNotExists(Range)
AttributeDoesNotExists(Range),
}
// TODO: A way to build an error message with methods
@ -76,7 +76,7 @@ impl Diagnostic for PassError {
PassError::AttributeDoesNotExists(range) => Some(range.ctx),
}
}
fn to_diagnostic_frame(&self) -> DiagnosticFrame {
match self {
PassError::UnboundVariable(idents, suggestions) => DiagnosticFrame {

View File

@ -22,7 +22,7 @@ pub mod uses;
#[derive(Debug, Hash, PartialEq, Eq)]
pub enum Derive {
Match,
Open
Open,
}
impl Display for Derive {
@ -43,12 +43,15 @@ pub fn insert_or_report(
match hashmap.get(&key) {
Some(last_range) => {
channel
.send(Box::new(PassError::DuplicatedAttributeArgument(last_range.clone(), range)))
.unwrap();
},
.send(Box::new(PassError::DuplicatedAttributeArgument(
last_range.clone(),
range,
)))
.unwrap();
}
None => {
hashmap.insert(key, range);
},
}
}
}
@ -60,7 +63,6 @@ fn string_to_derive(name: &str) -> Option<Derive> {
}
}
pub fn expand_derive(
error_channel: Sender<Box<dyn Diagnostic>>,
attrs: &[Attribute],
@ -86,7 +88,9 @@ pub fn expand_derive(
for arg in &attr.args {
match arg {
Ident(range, ident) => match string_to_derive(ident.to_str()) {
Some(key) => insert_or_report(error_channel.clone(), &mut def, key, range.clone()),
Some(key) => {
insert_or_report(error_channel.clone(), &mut def, key, range.clone())
}
_ => {
error_channel
.send(Box::new(PassError::InvalidAttributeArgument(
@ -113,7 +117,6 @@ pub fn expand_derive(
} else {
Some(def)
}
}
pub fn expand_book(error_channel: Sender<Box<dyn Diagnostic>>, book: &mut Book) -> bool {
@ -128,15 +131,17 @@ pub fn expand_book(error_channel: Sender<Box<dyn Diagnostic>>, book: &mut Book)
for (key, val) in derive {
match key {
Derive::Match => {
let res = derive_match(sum.name.range, sum);
let (res, errs) = derive_match(sum.name.range, sum);
let info = res.extract_book_info();
entries.insert(res.name.to_string(), (res, info));
for err in errs {
error_channel.send(err).unwrap();
failed = true;
}
}
other => {
error_channel
.send(Box::new(PassError::CannotDerive(
other.to_string(), val,
)))
.send(Box::new(PassError::CannotDerive(other.to_string(), val)))
.unwrap();
failed = true;
}
@ -157,9 +162,7 @@ pub fn expand_book(error_channel: Sender<Box<dyn Diagnostic>>, book: &mut Book)
}
other => {
error_channel
.send(Box::new(PassError::CannotDerive(
other.to_string(), val,
)))
.send(Box::new(PassError::CannotDerive(other.to_string(), val)))
.unwrap();
failed = true;
}

View File

@ -6,6 +6,6 @@
pub mod desugar;
pub mod erasure;
mod errors;
pub mod expand;
pub mod unbound;
mod errors;

View File

@ -11,17 +11,12 @@ use std::sync::mpsc::Sender;
use fxhash::{FxHashMap, FxHashSet};
use kind_report::data::Diagnostic;
use kind_span::Range;
use kind_tree::concrete::expr::{Binding, Case, CaseBinding, Destruct};
use kind_tree::concrete::pat::PatIdent;
use kind_tree::concrete::{Book, Module, TopLevel};
use kind_tree::symbol::{Ident, QualifiedIdent};
use kind_tree::concrete::{
expr::{Expr, ExprKind, SttmKind},
pat::{Pat, PatKind},
visitor::Visitor,
Argument, Entry, Rule,
};
use kind_tree::concrete::expr::{Binding, Case, CaseBinding, Destruct, Expr, ExprKind, SttmKind};
use kind_tree::concrete::pat::{Pat, PatIdent, PatKind};
use kind_tree::concrete::visitor::Visitor;
use kind_tree::concrete::{Argument, Book, Entry, Module, Rule, TopLevel};
use kind_tree::symbol::{Ident, QualifiedIdent};
use kind_tree::{visit_opt, visit_vec};
use crate::errors::PassError;
@ -68,11 +63,12 @@ pub fn collect_module_info(
state.visit_module(module);
for idents in state.unbound.values() {
diagnostic_sender.send(Box::new(PassError::UnboundVariable(
idents.to_vec(),
vec![],
)))
.unwrap();
diagnostic_sender
.send(Box::new(PassError::UnboundVariable(
idents.to_vec(),
vec![],
)))
.unwrap();
}
state
@ -293,9 +289,7 @@ impl Visitor for UnboundCollector {
match destruct {
Destruct::Destruct(range, ty, bindings, _) => {
self.visit_qualified_ident(
&mut QualifiedIdent::add_segment(ty, "open")
.to_sugar()
.to_generated(),
&mut QualifiedIdent::add_segment(ty, "open").to_generated(),
);
self.visit_range(range);
self.visit_qualified_ident(ty);
@ -397,68 +391,88 @@ impl Visitor for UnboundCollector {
fn visit_expr(&mut self, expr: &mut Expr) {
match &mut expr.data {
ExprKind::Var(ident) => self.visit_ident(ident),
ExprKind::Constr(ident, spine) => {
self.visit_qualified_ident(ident);
visit_vec!(spine.iter_mut(), arg => self.visit_binding(arg));
ExprKind::Var { name } => self.visit_ident(name),
ExprKind::Constr { name, args } => {
self.visit_qualified_ident(name);
visit_vec!(args.iter_mut(), arg => self.visit_binding(arg));
}
ExprKind::All(None, typ, body) => {
ExprKind::All {
param: None,
typ,
body,
..
} => {
self.visit_expr(typ);
self.visit_expr(body);
}
ExprKind::All(Some(ident), typ, body) => {
ExprKind::All {
param: Some(ident),
typ,
body,
..
} => {
self.visit_expr(typ);
self.context_vars.push((ident.range, ident.to_string()));
self.visit_expr(body);
self.context_vars.pop();
}
ExprKind::Lambda(ident, binder, body, _erased) => {
match binder {
ExprKind::Lambda {
param, typ, body, ..
} => {
match typ {
Some(x) => self.visit_expr(x),
None => (),
}
self.context_vars.push((ident.range, ident.to_string()));
self.context_vars.push((param.range, param.to_string()));
self.visit_expr(body);
self.context_vars.pop();
}
ExprKind::App(head, spine) => {
self.visit_expr(head);
visit_vec!(spine.iter_mut(), arg => self.visit_expr(&mut arg.data));
ExprKind::App { fun, args } => {
self.visit_expr(fun);
visit_vec!(args.iter_mut(), arg => self.visit_expr(&mut arg.data));
}
ExprKind::Ann(val, ty) => {
ExprKind::Ann { val, typ } => {
self.visit_expr(val);
self.visit_expr(ty);
self.visit_expr(typ);
}
ExprKind::Lit(lit) => self.visit_literal(lit),
ExprKind::Binary(_, l, r) => {
self.visit_expr(l);
self.visit_expr(r);
ExprKind::Lit { lit } => self.visit_literal(lit),
ExprKind::Binary { op: _, fst, snd } => {
self.visit_expr(fst);
self.visit_expr(snd);
}
ExprKind::Let(ident, val, body) => {
ExprKind::Let { name, val, next } => {
self.visit_expr(val);
let vars = self.context_vars.clone();
self.visit_destruct(ident);
self.visit_expr(body);
self.visit_destruct(name);
self.visit_expr(next);
self.context_vars = vars;
}
ExprKind::Sigma(None, typ, body) => {
self.visit_qualified_ident(
QualifiedIdent::new_static("Sigma", None, expr.range).to_sugar(),
);
self.visit_expr(typ);
self.visit_expr(body);
ExprKind::Sigma {
param: None,
fst,
snd,
} => {
self.visit_qualified_ident(&mut QualifiedIdent::new_static(
"Sigma", None, expr.range,
));
self.visit_expr(fst);
self.visit_expr(snd);
}
ExprKind::Sigma(Some(ident), typ, body) => {
self.visit_qualified_ident(
QualifiedIdent::new_static("Sigma", None, expr.range).to_sugar(),
);
self.visit_expr(typ);
ExprKind::Sigma {
param: Some(ident),
fst,
snd,
} => {
self.visit_qualified_ident(&mut QualifiedIdent::new_static(
"Sigma", None, expr.range,
));
self.visit_expr(fst);
self.context_vars.push((ident.range, ident.to_string()));
self.visit_expr(body);
self.visit_expr(snd);
self.context_vars.pop();
}
ExprKind::Match(matcher) => {
self.visit_qualified_ident(matcher.typ.add_segment("match").to_sugar());
self.visit_qualified_ident(&mut matcher.typ.add_segment("match"));
self.visit_match(matcher)
}
ExprKind::Subst(subst) => {
@ -475,34 +489,34 @@ impl Visitor for UnboundCollector {
self.visit_expr(&mut subst.expr)
}
ExprKind::Hole => {}
ExprKind::Do(typ, sttm) => {
self.visit_qualified_ident(typ.add_segment("pure").to_sugar());
self.visit_qualified_ident(typ.add_segment("bind").to_sugar());
ExprKind::Do { typ, sttm } => {
self.visit_qualified_ident(&mut typ.add_segment("pure"));
self.visit_qualified_ident(&mut typ.add_segment("bind"));
self.visit_sttm(sttm)
}
ExprKind::If(cond, if_, else_) => {
ExprKind::If { cond, then_, else_ } => {
self.visit_qualified_ident(&mut QualifiedIdent::new_sugared(
"Bool", "if", expr.range,
));
self.visit_expr(cond);
self.visit_expr(if_);
self.visit_expr(then_);
self.visit_expr(else_);
}
ExprKind::Pair(l, r) => {
ExprKind::Pair { fst, snd } => {
self.visit_qualified_ident(&mut QualifiedIdent::new_sugared(
"Pair", "new", expr.range,
));
self.visit_expr(l);
self.visit_expr(r);
self.visit_expr(fst);
self.visit_expr(snd);
}
ExprKind::List(spine) => {
ExprKind::List { args } => {
self.visit_qualified_ident(&mut QualifiedIdent::new_sugared(
"List", "nil", expr.range,
));
self.visit_qualified_ident(&mut QualifiedIdent::new_sugared(
"List", "cons", expr.range,
));
visit_vec!(spine.iter_mut(), arg => self.visit_expr(arg));
visit_vec!(args.iter_mut(), arg => self.visit_expr(arg));
}
}
}

View File

@ -14,7 +14,7 @@ pub struct Node<T> {
#[derive(Debug)]
pub struct Graph<T> {
// Using a hashmap to make it easier to add or remove node.s
pub nodes: FxHashMap<usize, Node<T>>,
nodes: FxHashMap<usize, Node<T>>,
count: usize,
}
@ -85,9 +85,22 @@ impl<T> Graph<T> {
pub fn remove(&mut self, node_idx: usize) -> FxHashSet<usize> {
let mut fx = Default::default();
self.remove_recursive(node_idx, &mut fx);
fx.insert(node_idx);
fx
}
pub fn disconnect_parent(&mut self, child: usize, parent: usize) -> bool {
if let Some(parent) = self.nodes.get_mut(&parent) {
parent.children.remove(&child);
}
if let Some(child) = self.nodes.get_mut(&child) {
child.parents.remove(&parent);
child.parents.len() == 0
} else {
false
}
}
pub fn flood_invalidation(&mut self, node: usize) {
if let Some(node) = self.nodes.get_mut(&node) {
if !node.invalidated {

View File

@ -1,420 +1,3 @@
//! This module is a generalization of the driver
//! module. It is useful both for LSPs, Watch, Repl
//! and many other things.
mod errors;
mod graph;
mod names;
use std::fs;
use std::path::PathBuf;
use std::rc::Rc;
use std::sync::mpsc::Sender;
use errors::DriverError;
use fxhash::FxHashMap;
use fxhash::FxHashSet;
use graph::Graph;
use kind_pass::desugar;
use kind_pass::erasure;
use kind_pass::expand;
use kind_pass::unbound;
use kind_report::data::Diagnostic;
use kind_report::report::FileCache;
use kind_span::Range;
use kind_tree::concrete;
use kind_tree::concrete::Book;
use kind_tree::concrete::Module;
use kind_tree::concrete::TopLevel;
use kind_tree::symbol::QualifiedIdent;
pub enum Status {
Module,
Entry,
}
pub struct Resource {
concrete_tree: concrete::Module,
exposed_entries: FxHashSet<String>,
}
pub struct File {
path: PathBuf,
input: String,
hash: u64,
}
pub enum Resolution<T> {
Added(T),
Reuse(T),
Fail,
}
impl<T> Resolution<T> {
pub fn is_fail(&self) -> bool {
matches!(self, Resolution::Fail)
}
}
#[derive(Default)]
pub struct Storage {
pub graph: Graph<usize>,
resources: FxHashMap<usize, Rc<Resource>>,
files: FxHashMap<usize, File>,
entries: FxHashMap<String, (Range, usize)>,
}
pub struct Session<'a> {
pub storage: &'a mut Storage,
handler: &'a mut dyn SessionHandler,
root: PathBuf,
count: usize,
}
impl FileCache for Storage {
fn fetch(&self, ctx: kind_span::SyntaxCtxIndex) -> Option<(PathBuf, &String)> {
let file = self.files.get(&ctx.0).unwrap();
Some((file.path.clone(), &file.input))
}
}
pub trait SessionHandler {
fn on_add_file(&mut self, file: PathBuf, id: usize);
fn on_rem_file(&mut self, id: usize);
fn on_errors(&mut self, storage: &Storage, uri: usize, errs: Vec<Box<dyn Diagnostic>>);
fn get_id_by_path(&mut self, path: &PathBuf) -> Option<usize>;
}
pub fn add_module_to_book(book: &mut Book, module: &Module) {
for entry in &module.entries {
match entry {
TopLevel::SumType(sum) => {
for cons in &sum.constructors {
let name = sum.name.add_segment(cons.name.to_str()).to_string();
book.count.insert(name, cons.extract_book_info(&sum));
}
let name = sum.name.to_string();
book.count.insert(name.clone(), sum.extract_book_info());
book.entries.insert(name, entry.clone());
}
TopLevel::RecordType(rec) => {
let cons_ident = rec.name.add_segment(rec.constructor.to_str()).to_string();
book.count
.insert(cons_ident, rec.extract_book_info_of_constructor());
let name = rec.name.to_string();
book.count.insert(name.clone(), rec.extract_book_info());
book.entries.insert(name, entry.clone());
}
TopLevel::Entry(entr) => {
let name = entr.name.to_string();
book.count.insert(name.clone(), entr.extract_book_info());
book.entries.insert(name, entry.clone());
}
}
}
}
impl<'a> Session<'a> {
pub fn new(
handler: &'a mut dyn SessionHandler,
storage: &'a mut Storage,
root: PathBuf,
) -> Session<'a> {
Session {
storage,
handler,
root,
count: 0,
}
}
pub fn set_input(&mut self, module_id: usize, input: String) {
if let Some(file) = self.storage.files.get_mut(&module_id) {
self.storage.graph.flood_invalidation(module_id);
file.hash = fxhash::hash64(&input);
file.input = input;
} else {
todo!()
}
}
pub fn remove_node(&mut self, module_id: usize) {
self.storage.graph.remove(module_id);
self.storage.files.remove(&module_id);
if let Some(res) = self.storage.resources.remove(&module_id) {
for entry in &res.exposed_entries {
self.storage.entries.remove(entry);
}
};
self.handler.on_rem_file(module_id)
}
fn register_module(&mut self, path: PathBuf, _module_name: String, input: String) -> usize {
let module_id = self.count;
self.count += 1;
let file = File {
path,
hash: fxhash::hash64(&input),
input,
};
self.storage.graph.add(module_id, 1, false);
self.storage.files.insert(module_id, file);
module_id
}
fn register_new_module(
&mut self,
errs: Sender<Box<dyn Diagnostic>>,
module_name: &QualifiedIdent,
) -> Resolution<usize> {
let name = module_name.to_string();
let node = self.storage.entries.get(&name);
if let Some((_, module_id)) = node {
Resolution::Reuse(*module_id)
} else {
let path = match names::ident_to_path(&self.root, module_name) {
Ok(Some(res)) => res,
Ok(None) => {
errs.send(Box::new(DriverError::UnboundVariable(
vec![module_name.to_ident()],
vec![],
)))
.unwrap();
return Resolution::Fail;
}
Err(err) => {
errs.send(err).unwrap();
return Resolution::Fail;
}
};
let input = fs::read_to_string(&path).unwrap();
let id = self.register_module(path.clone(), name, input);
self.handler.on_add_file(path, id);
Resolution::Added(id)
}
}
fn register_names(
&mut self,
errs: Sender<Box<dyn Diagnostic>>,
names: FxHashMap<String, Range>,
module_id: usize,
) -> bool {
// Pre check to avoid the register of bad inputs.
let mut failed = false;
for (name, range) in &names {
if let Some((first, _)) = self.storage.entries.get(name) {
errs.send(Box::new(DriverError::DefinedMultipleTimes(
QualifiedIdent::new_static(name, None, first.clone()),
QualifiedIdent::new_static(name, None, range.clone()),
)))
.unwrap();
failed = true;
}
}
if !failed {
for (name, range) in names {
self.storage.entries.insert(name, (range, module_id));
}
}
failed
}
fn collect_resources(&mut self, root: usize, modules: &mut FxHashMap<usize, Rc<Resource>>) {
if !modules.contains_key(&root) {
let resource = self.storage.resources.get(&root).unwrap().clone();
modules.insert(root, resource);
for child in &self.storage.graph.get(&root).unwrap().children.clone() {
self.collect_resources(*child, modules);
}
}
}
pub fn get_id_by_path(&mut self, path: &PathBuf) -> Option<usize> {
self.handler.get_id_by_path(path)
}
pub fn check_module(&mut self, module_id: usize) -> Option<()> {
let mut added = Vec::new();
let failed = self.compile_module(module_id, &mut added);
if !failed {
let mut resources = Default::default();
self.collect_resources(module_id, &mut resources);
let mut concrete_book = Book::default();
for module in resources.values() {
add_module_to_book(&mut concrete_book, &module.concrete_tree)
}
let (rx, tx) = std::sync::mpsc::channel();
let desugared_book = desugar::desugar_book(rx.clone(), &concrete_book)?;
let changed_functions = added
.iter()
.map(|x| {
resources
.get(x)
.unwrap()
.exposed_entries
.iter()
.cloned()
.collect::<Vec<_>>()
})
.flatten()
.collect::<Vec<_>>();
kind_checker::type_check(&desugared_book, rx.clone(), changed_functions);
let entrypoints = FxHashSet::from_iter(["Main".to_string()]);
erasure::erase_book(&desugared_book, rx.clone(), entrypoints)?;
let errs = tx.try_iter().collect::<Vec<_>>();
let mut groups = FxHashMap::default();
for err in errs {
if let Some(ctx) = err.get_syntax_ctx() {
let res: &mut Vec<_> = groups.entry(ctx).or_default();
res.push(err);
}
}
for (ctx, errs) in groups {
self.handler.on_errors(self.storage, ctx.0, errs)
}
}
Some(())
}
pub fn init_project(&mut self, path: PathBuf) -> usize {
let input = fs::read_to_string(&path).unwrap();
self.register_module(path, "Main".to_string(), input)
}
fn compile_module(&mut self, module_id: usize, added: &mut Vec<usize>) -> bool {
let file = self.storage.files.get(&module_id).unwrap();
let hash = file.hash;
if let Some(node) = self.storage.graph.get(&module_id) {
if !node.invalidated && node.hash == file.hash {
return false;
}
}
let (rx, tx) = std::sync::mpsc::channel();
// Parses the "module"
let (mut module, mut failed) = kind_parser::parse_book(rx.clone(), module_id, &file.input);
// Expand aliases
failed |= expand::uses::expand_uses(&mut module, rx.clone());
// Collects all of the unbound variables and top level
// in order to recursively get all of the unbound files.
let state = unbound::collect_module_info(rx.clone(), &mut module, false);
let module_definitions = state.top_level_defs.clone();
let last_names = if let Some(res) = self.storage.resources.get(&module_id) {
res.exposed_entries.clone()
} else {
FxHashSet::default()
};
let mut diff = module_definitions.clone();
for name in last_names {
diff.remove(&name);
}
failed |= self.register_names(rx.clone(), diff, module_id);
if !failed {
let mut nodes = FxHashSet::default();
for (_, idents) in state.unbound_top_level {
let first = idents.iter().last().unwrap();
let result = self.register_new_module(rx.clone(), &first);
failed |= result.is_fail();
failed |= match result {
Resolution::Reuse(id) => {
added.push(id);
nodes.insert(id);
self.compile_module(id, added)
}
Resolution::Added(id) => {
let file = self.storage.files.get(&id).unwrap();
added.push(id);
nodes.insert(id);
self.compile_module(id, added)
}
Resolution::Fail => true,
};
}
let node = self.storage.graph.get_mut(&module_id).unwrap();
node.hash = hash;
node.failed = false;
let removed = node
.children
.difference(&nodes)
.cloned()
.collect::<Vec<_>>();
let added = nodes
.difference(&node.children)
.cloned()
.collect::<Vec<_>>();
node.children.extend(nodes);
for id in added {
self.storage.graph.connect(module_id, id)
}
for id in removed {
self.remove_node(id)
}
}
let errs = tx.try_iter().collect::<Vec<_>>();
let node = self.storage.graph.get_mut(&module_id).unwrap();
node.failed = failed;
if errs.is_empty() {
self.storage.resources.insert(
module_id,
Rc::new(Resource {
concrete_tree: module,
exposed_entries: FxHashSet::from_iter(module_definitions.keys().cloned()),
}),
);
}
self.handler.on_errors(self.storage, module_id, errs);
failed
}
}

View File

@ -11,4 +11,5 @@ kind-span = { path = "../kind-span" }
unicode-width = "0.1.10"
yansi = "0.5.1"
pathdiff = "0.2.1"
fxhash = "0.2.1"
fxhash = "0.2.1"
termsize = "0.1"

View File

@ -52,13 +52,11 @@ pub struct DiagnosticFrame {
pub hints: Vec<String>,
pub positions: Vec<Marker>,
}
pub enum Log {
Checking(String),
Checked(Duration),
Failed(Duration),
}
pub trait Diagnostic {
fn get_syntax_ctx(&self) -> Option<SyntaxCtxIndex>;
fn to_diagnostic_frame(&self) -> DiagnosticFrame;

View File

@ -186,7 +186,7 @@ fn mark_inlined<T: Write + Sized>(
write!(
fmt,
"{}",
colorizer(format!("{} {}", config.chars.trline, marker.2.text))
colorizer(format!("{}{}", config.chars.trline, marker.2.text))
)?;
} else {
write!(fmt, "{}", colorizer(config.chars.vbar.to_string()))?;
@ -459,7 +459,9 @@ impl Report for Box<dyn Diagnostic> {
for (ctx, group) in groups {
writeln!(fmt)?;
let (file, code) = cache.fetch(ctx).unwrap();
let diff =file.clone();
let diff =
pathdiff::diff_paths(&file.clone(), PathBuf::from(".").canonicalize().unwrap())
.unwrap_or(file);
write_code_block(&diff, config, &group, code, fmt)?;
}

View File

@ -3,6 +3,16 @@ use kind_tree::{
desugared,
};
pub fn compile_book(book: desugared::Book) -> File {
let mut file = File {
rules: Default::default(),
};
for (_, entry) in book.entrs {
compile_entry(&mut file, *entry);
}
file
}
pub fn compile_term(expr: &desugared::Expr) -> Box<Term> {
use desugared::ExprKind::*;
match &expr.data {
@ -30,12 +40,12 @@ pub fn compile_term(expr: &desugared::Expr) -> Box<Term> {
}),
Ann(left, _) => compile_term(left),
Sub(_, _, _, expr) => compile_term(expr),
Num(kind_tree::Number::U60(numb)) => Box::new(Term::Num { numb: *numb }),
Num(kind_tree::Number::U60(numb)) => Box::new(Term::U6O { numb: *numb }),
Num(kind_tree::Number::U120(numb)) => {
let hi = Box::new(Term::Num {
let hi = Box::new(Term::U6O {
numb: (numb >> 60) as u64,
});
let lo = Box::new(Term::Num {
let lo = Box::new(Term::U6O {
numb: (numb & 0xFFFFFFFFFFFFFFF) as u64,
});
Box::new(Term::Ctr {
@ -47,17 +57,31 @@ pub fn compile_term(expr: &desugared::Expr) -> Box<Term> {
name: op.to_string(),
args: vec![compile_term(l), compile_term(r)],
}),
Str(str) => {
let nil = Box::new(Term::Ctr {
name: String::from("String.nil"),
args: vec![],
});
let cons = |numb, next| {
Box::new(Term::Ctr {
name: String::from("U120.new"),
args: vec![Box::new(Term::U6O { numb }), next],
})
};
str.chars().rfold(nil, |rest, chr| cons(chr as u64, rest))
}
Hole(_) => unreachable!("Internal Error: 'Hole' cannot be a relevant term"),
Typ => unreachable!("Internal Error: 'Typ' cannot be a relevant term"),
NumType(typ) => unreachable!("Internal Error: '{:?}' cannot be a relevant term", typ),
All(_, _, _) => unreachable!("Internal Error: 'All' cannot be a relevant term"),
Str(_) => unreachable!("Internal Error: 'Str' cannot be a relevant term"),
All(_, _, _, _) => unreachable!("Internal Error: 'All' cannot be a relevant term"),
Hlp(_) => unreachable!("Internal Error: 'Hlp' cannot be a relevant term"),
Err => unreachable!("Internal Error: 'Err' cannot be a relevant term"),
}
}
pub fn compile_rule(rule: desugared::Rule) -> Rule {
fn compile_rule(rule: desugared::Rule) -> Rule {
Rule {
lhs: Box::new(Term::Ctr {
name: rule.name.to_string(),
@ -67,18 +91,8 @@ pub fn compile_rule(rule: desugared::Rule) -> Rule {
}
}
pub fn compile_entry(file: &mut File, entry: desugared::Entry) {
fn compile_entry(file: &mut File, entry: desugared::Entry) {
for rule in entry.rules {
file.rules.push(compile_rule(rule))
}
}
pub fn compile_book(book: desugared::Book) -> File {
let mut file = File {
rules: Default::default(),
};
for (_, entry) in book.entrs {
compile_entry(&mut file, *entry);
}
file
}

View File

@ -6,7 +6,8 @@ edition = "2021"
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
[dependencies]
hvm = { path = "../../../HVM/crates/hvm" }
kind-span = {path = "../kind-span"}
linked-hash-map = "0.5.6"
fxhash = "0.2.1"
hvm = "0.1.81"
fxhash = "0.2.1"

View File

@ -4,10 +4,8 @@
//! error messages.
use super::pat::PatIdent;
use crate::{
symbol::{Ident, QualifiedIdent},
Operator,
};
use crate::symbol::{Ident, QualifiedIdent};
use crate::Operator;
use kind_span::{Locatable, Range};
use std::fmt::{Display, Error, Formatter};
@ -126,40 +124,72 @@ pub struct Sttm {
#[derive(Clone, Debug, Hash, PartialEq, Eq)]
pub enum ExprKind {
/// Name of a variable
Var(Ident),
Var { name: Ident },
/// Name of a function/constructor
Constr(QualifiedIdent, Spine),
Constr { name: QualifiedIdent, args: Spine },
/// The dependent function space (e.g. (x : Int) -> y)
All(Option<Ident>, Box<Expr>, Box<Expr>),
All {
param: Option<Ident>,
typ: Box<Expr>,
body: Box<Expr>,
erased: bool,
},
/// The dependent product space (e.g. [x : Int] -> y)
Sigma(Option<Ident>, Box<Expr>, Box<Expr>),
Sigma {
param: Option<Ident>,
fst: Box<Expr>,
snd: Box<Expr>,
},
/// A anonymous function that receives one argument
Lambda(Ident, Option<Box<Expr>>, Box<Expr>, bool),
Lambda {
param: Ident,
typ: Option<Box<Expr>>,
body: Box<Expr>,
erased: bool,
},
/// Application of a expression to a spine of expressions
App(Box<Expr>, Vec<AppBinding>),
App {
fun: Box<Expr>,
args: Vec<AppBinding>,
},
/// Declaration of a local variable
Let(Destruct, Box<Expr>, Box<Expr>),
Let {
name: Destruct,
val: Box<Expr>,
next: Box<Expr>,
},
/// Type ascription (x : y)
Ann(Box<Expr>, Box<Expr>),
Ann { val: Box<Expr>, typ: Box<Expr> },
/// Literal
Lit(Literal),
Lit { lit: Literal },
/// Binary operation (e.g. 2 + 3)
Binary(Operator, Box<Expr>, Box<Expr>),
Binary {
op: Operator,
fst: Box<Expr>,
snd: Box<Expr>,
},
/// A expression open to unification (e.g. _)
Hole,
/// Do notation
Do {
typ: QualifiedIdent,
sttm: Box<Sttm>,
},
/// If else statement
If {
cond: Box<Expr>,
then_: Box<Expr>,
else_: Box<Expr>,
},
/// If else statement
Pair { fst: Box<Expr>, snd: Box<Expr> },
/// Array
List { args: Vec<Expr> },
/// Substituion
Subst(Substitution),
/// A match block that will be translated
/// into an eliminator of a datatype.
Match(Box<Match>),
/// Do notation
Do(QualifiedIdent, Box<Sttm>),
/// If else statement
If(Box<Expr>, Box<Expr>, Box<Expr>),
/// If else statement
Pair(Box<Expr>, Box<Expr>),
/// Array
List(Vec<Expr>),
}
/// Describes a single expression inside Kind2.
@ -197,13 +227,27 @@ impl Display for Operator {
impl Expr {
pub fn traverse_pi_types(&self) -> String {
match &self.data {
ExprKind::All(binder, typ, body) => match binder {
None => format!("{} -> {}", typ, body.traverse_pi_types()),
Some(binder) => format!("({} : {}) -> {}", binder, typ, body.traverse_pi_types()),
},
ExprKind::Sigma(binder, typ, body) => match binder {
None => format!("{} -> {}", typ, body.traverse_pi_types()),
Some(binder) => format!("[{} : {}] -> {}", binder, typ, body.traverse_pi_types()),
ExprKind::All {
param,
typ,
body,
erased,
} => {
let tilde = if *erased { "~" } else { "" };
match param {
None => format!("{}{} -> {}", tilde, typ, body.traverse_pi_types()),
Some(binder) => format!(
"{}({} : {}) -> {}",
tilde,
binder,
typ,
body.traverse_pi_types()
),
}
}
ExprKind::Sigma { param, fst, snd } => match param {
None => format!("{} -> {}", fst, snd.traverse_pi_types()),
Some(binder) => format!("[{} : {}] -> {}", binder, fst, snd.traverse_pi_types()),
},
_ => format!("{}", self),
}
@ -393,44 +437,66 @@ impl Display for Expr {
fn fmt(&self, f: &mut Formatter<'_>) -> Result<(), Error> {
use ExprKind::*;
match &self.data {
Do(id, sttms) => write!(f, "(do {} {{{}}})", id, sttms),
All(_, _, _) => write!(f, "({})", self.traverse_pi_types()),
Sigma(_, _, _) => write!(f, "({})", self.traverse_pi_types()),
Lit(lit) => write!(f, "{}", lit),
Var(name) => write!(f, "{}", name),
Constr(head, spine) => write!(
Do { typ, sttm } => write!(f, "(do {} {{{}}})", typ, sttm),
All { .. } => write!(f, "({})", self.traverse_pi_types()),
Sigma { .. } => write!(f, "({})", self.traverse_pi_types()),
Lit { lit } => write!(f, "{}", lit),
Var { name } => write!(f, "{}", name),
Constr { name, args } => write!(
f,
"({}{})",
head,
spine.iter().map(|x| format!(" {}", x)).collect::<String>()
name,
args.iter().map(|x| format!(" {}", x)).collect::<String>()
),
Lambda(binder, None, body, false) => write!(f, "({} => {})", binder, body),
Lambda(binder, Some(typ), body, false) => {
write!(f, "(({} : {}) => {})", binder, typ, body)
Lambda {
param,
typ: None,
body,
erased: false,
} => write!(f, "({} => {})", param, body),
Lambda {
param,
typ: Some(typ),
body,
erased: false,
} => {
write!(f, "(({} : {}) => {})", param, typ, body)
}
Lambda(binder, None, body, true) => write!(f, "({{{}}} => {})", binder, body),
Lambda(binder, Some(typ), body, true) => {
write!(f, "({{{} : {}}} => {})", binder, typ, body)
Lambda {
param,
typ: None,
body,
erased: true,
} => write!(f, "(-({}) => {})", param, body),
Lambda {
param,
typ: Some(typ),
body,
erased: true,
} => {
write!(f, "({{{} : {}}} => {})", param, typ, body)
}
Pair(fst, snd) => write!(f, "($ {} {})", fst, snd),
App(head, spine) => write!(
Pair { fst, snd } => write!(f, "($ {} {})", fst, snd),
App { fun, args } => write!(
f,
"({}{})",
head,
spine.iter().map(|x| format!(" {}", x)).collect::<String>()
fun,
args.iter().map(|x| format!(" {}", x)).collect::<String>()
),
Let(name, expr, body) => write!(f, "(let {} = {}; {})", name, expr, body),
If(cond, if_, else_) => write!(f, "(if {} {{{}}} else {{{}}})", cond, if_, else_),
List(vec) => write!(
Let { name, val, next } => write!(f, "(let {} = {}; {})", name, val, next),
If { cond, then_, else_ } => {
write!(f, "(if {} {{{}}} else {{{}}})", cond, then_, else_)
}
List { args } => write!(
f,
"[{}]",
vec.iter()
args.iter()
.map(|x| format!("{}", x))
.collect::<Vec<String>>()
.join(" ")
),
Ann(expr, typ) => write!(f, "({} :: {})", expr, typ),
Binary(op, expr, typ) => write!(f, "({} {} {})", op, expr, typ),
Ann { val: name, typ } => write!(f, "({} :: {})", name, typ),
Binary { op, fst, snd } => write!(f, "({} {} {})", op, fst, snd),
Match(matcher) => write!(f, "({})", matcher),
Subst(subst) => write!(f, "({})", subst),
Hole => write!(f, "_"),

View File

@ -51,7 +51,10 @@ impl<T> Telescope<T> {
}
}
impl<T> Telescope<T> where T : Clone {
impl<T> Telescope<T>
where
T: Clone,
{
pub fn drop(self, num: usize) -> Telescope<T> {
Telescope(self.0[num..].to_vec())
}

View File

@ -373,78 +373,98 @@ pub fn walk_sttm<T: Visitor>(ctx: &mut T, sttm: &mut Sttm) {
pub fn walk_expr<T: Visitor>(ctx: &mut T, expr: &mut Expr) {
ctx.visit_range(&mut expr.range);
match &mut expr.data {
ExprKind::Var(ident) => ctx.visit_ident(ident),
ExprKind::Constr(ident, spine) => {
ctx.visit_qualified_ident(ident);
for arg in spine {
ExprKind::Var { name } => ctx.visit_ident(name),
ExprKind::Constr { name, args } => {
ctx.visit_qualified_ident(name);
for arg in args {
ctx.visit_binding(arg);
}
}
ExprKind::All(None, typ, body) => {
ctx.visit_expr(typ);
ctx.visit_expr(body);
}
ExprKind::Pair(fst, snd) => {
ExprKind::Pair { fst, snd } => {
ctx.visit_expr(fst);
ctx.visit_expr(snd);
}
ExprKind::All(Some(ident), typ, body) => {
ExprKind::All {
param: None,
typ,
body,
..
} => {
ctx.visit_expr(typ);
ctx.visit_expr(body);
}
ExprKind::All {
param: Some(ident),
typ,
body,
..
} => {
ctx.visit_ident(ident);
ctx.visit_expr(typ);
ctx.visit_expr(body);
}
ExprKind::Sigma(None, typ, body) => {
ctx.visit_expr(typ);
ctx.visit_expr(body);
ExprKind::Sigma {
param: None,
fst,
snd,
} => {
ctx.visit_expr(fst);
ctx.visit_expr(snd);
}
ExprKind::If(cond, if_, else_) => {
ExprKind::If { cond, then_, else_ } => {
ctx.visit_expr(cond);
ctx.visit_expr(if_);
ctx.visit_expr(then_);
ctx.visit_expr(else_);
}
ExprKind::Sigma(Some(ident), typ, body) => {
ExprKind::Sigma {
param: Some(ident),
fst,
snd,
} => {
ctx.visit_ident(ident);
ctx.visit_expr(typ);
ctx.visit_expr(body);
ctx.visit_expr(fst);
ctx.visit_expr(snd);
}
ExprKind::Do(ident, sttm) => {
ctx.visit_qualified_ident(ident);
ExprKind::Do { typ, sttm } => {
ctx.visit_qualified_ident(typ);
ctx.visit_sttm(sttm)
}
ExprKind::Lambda(ident, binder, body, _erased) => {
ctx.visit_ident(ident);
match binder {
ExprKind::Lambda {
param, typ, body, ..
} => {
ctx.visit_ident(param);
match typ {
Some(x) => ctx.visit_expr(x),
None => (),
}
ctx.visit_expr(body);
}
ExprKind::App(expr, spine) => {
ctx.visit_expr(expr);
for arg in spine {
ExprKind::App { fun, args } => {
ctx.visit_expr(fun);
for arg in args {
ctx.visit_app_binding(arg);
}
}
ExprKind::List(spine) => {
for arg in spine {
ExprKind::List { args } => {
for arg in args {
ctx.visit_expr(arg);
}
}
ExprKind::Let(destruct, val, body) => {
ctx.visit_destruct(destruct);
ExprKind::Let { name, val, next } => {
ctx.visit_destruct(name);
ctx.visit_expr(val);
ctx.visit_expr(body);
ctx.visit_expr(next);
}
ExprKind::Ann(val, ty) => {
ExprKind::Ann { val, typ } => {
ctx.visit_expr(val);
ctx.visit_expr(ty);
ctx.visit_expr(typ);
}
ExprKind::Lit(lit) => {
ExprKind::Lit { lit } => {
ctx.visit_literal(lit);
}
ExprKind::Binary(_op, a, b) => {
ctx.visit_expr(a);
ctx.visit_expr(b);
ExprKind::Binary { op: _, fst, snd } => {
ctx.visit_expr(fst);
ctx.visit_expr(snd);
}
ExprKind::Hole => {}
ExprKind::Subst(subst) => ctx.visit_substitution(subst),

View File

@ -28,7 +28,7 @@ pub enum ExprKind {
/// Name of a variable
Var(Ident),
/// The dependent function space (e.g. (x : Int) -> y)
All(Ident, Box<Expr>, Box<Expr>),
All(Ident, Box<Expr>, Box<Expr>, bool),
/// A anonymous function that receives one argument
Lambda(Ident, Box<Expr>, bool),
/// Application of a expression to a spine of expressions
@ -84,10 +84,16 @@ impl Expr {
})
}
pub fn all(range: Range, ident: Ident, typ: Box<Expr>, body: Box<Expr>) -> Box<Expr> {
pub fn all(
range: Range,
ident: Ident,
typ: Box<Expr>,
body: Box<Expr>,
erased: bool,
) -> Box<Expr> {
Box::new(Expr {
span: Span::Locatable(range),
data: ExprKind::All(ident, typ, body),
data: ExprKind::All(ident, typ, body, erased),
})
}
@ -297,11 +303,13 @@ impl Expr {
pub fn traverse_pi_types(&self) -> String {
match &self.data {
ExprKind::All(binder, typ, body) => {
ExprKind::All(binder, typ, body, erased) => {
let tilde = if *erased { "~" } else { "" };
if binder.to_string().starts_with('_') {
format!("{} -> {}", typ, body.traverse_pi_types())
format!("{}{} -> {}", tilde, typ, body.traverse_pi_types())
} else {
format!("({} : {}) -> {}", binder, typ, body.traverse_pi_types())
let body = body.traverse_pi_types();
format!("({}{} : {}) -> {}", tilde, binder, typ, body)
}
}
_ => format!("{}", self),
@ -329,10 +337,10 @@ impl Display for Expr {
Str(n) => write!(f, "\"{}\"", n),
Num(crate::Number::U60(n)) => write!(f, "{}", n),
Num(crate::Number::U120(n)) => write!(f, "{}u120", n),
All(_, _, _) => write!(f, "({})", self.traverse_pi_types()),
All(_, _, _, _) => write!(f, "({})", self.traverse_pi_types()),
Var(name) => write!(f, "{}", name),
Lambda(binder, body, false) => write!(f, "({} => {})", binder, body),
Lambda(binder, body, true) => write!(f, "({{{}}} => {})", binder, body),
Lambda(binder, body, true) => write!(f, "(~{} => {})", binder, body),
Sub(name, _, redx, expr) => write!(f, "(## {}/{} {})", name, redx, expr),
App(head, spine) => write!(
f,

View File

@ -16,7 +16,7 @@ pub mod desugared;
/// instead of a string due to optimizations.
pub mod symbol;
pub use hvm::language as backend;
pub use hvm::syntax as backend;
/// Enum of binary operators.
#[derive(Copy, Clone, Debug, Hash, PartialEq, Eq)]

View File

@ -1,23 +1,42 @@
//! Describes identifiers and symbols inside the language.
use std::fmt::Display;
use kind_span::{Range, SyntaxCtxIndex};
use std::fmt::Display;
use std::hash::Hash;
/// Stores the name of a variable or constructor.
/// It's simply a string because in the future i plan
/// to store all the names and only reference them with
/// a u64.
#[derive(Clone, PartialEq, Eq, Hash, Debug)]
pub struct Symbol(String);
#[derive(Clone, Debug)]
pub struct Symbol {
data: String,
hash: u64,
}
impl Symbol {
pub fn new(str: String) -> Symbol {
Symbol(str)
Symbol {
hash: fxhash::hash64(&str),
data: str,
}
}
}
impl PartialEq for Symbol {
fn eq(&self, other: &Self) -> bool {
self.hash == other.hash
}
}
impl Hash for Symbol {
fn hash<H: std::hash::Hasher>(&self, state: &mut H) {
state.write_u64(self.hash);
}
}
impl Eq for Symbol {}
/// Identifier inside a syntax context.
#[derive(Clone, Debug, Hash, PartialEq, Eq)]
pub struct Ident {
@ -37,7 +56,6 @@ pub struct QualifiedIdent {
/// Flag that is useful to avoid unbound errors while
/// trying to collect names created by each of the sintatic
/// sugars.
pub used_by_sugar: bool,
pub generated: bool,
}
@ -47,7 +65,6 @@ impl QualifiedIdent {
root,
aux,
range,
used_by_sugar: false,
generated: false,
}
}
@ -57,16 +74,16 @@ impl QualifiedIdent {
/// and avoid a copy of the string.
#[inline]
pub fn to_str(&self) -> &str {
&self.root.0
&self.root.data
}
#[inline]
pub fn get_root(&self) -> String {
self.root.0.clone()
self.root.data.clone()
}
pub fn change_root(&mut self, str: String) {
self.root = Symbol(str);
self.root = Symbol::new(str);
}
pub fn to_generated(&self) -> Self {
@ -78,7 +95,7 @@ impl QualifiedIdent {
/// Avoid this function. It transforms a QualifiedIdent into a Ident
pub fn to_ident(&self) -> Ident {
Ident {
data: Symbol(self.to_string()),
data: Symbol::new(self.to_string()),
range: self.range,
generated: self.generated,
}
@ -86,44 +103,36 @@ impl QualifiedIdent {
pub fn new_static(root: &str, aux: Option<String>, range: Range) -> QualifiedIdent {
QualifiedIdent {
root: Symbol(root.to_string()),
aux: aux.map(Symbol),
root: Symbol::new(root.to_string()),
aux: aux.map(Symbol::new),
range,
used_by_sugar: false,
generated: false,
}
}
pub fn new_sugared(root: &str, extension: &str, range: Range) -> QualifiedIdent {
QualifiedIdent {
root: Symbol(format!("{}.{}", root, extension)),
root: Symbol::new(format!("{}.{}", root, extension)),
aux: None,
range,
used_by_sugar: true,
generated: true,
}
}
pub fn add_segment(&self, extension: &str) -> QualifiedIdent {
QualifiedIdent {
root: Symbol(format!("{}.{}", self.root.0, extension)),
root: Symbol::new(format!("{}.{}", self.root.data, extension)),
aux: self.aux.clone(),
range: self.range,
used_by_sugar: self.used_by_sugar,
generated: self.generated,
}
}
pub fn to_sugar(&mut self) -> &mut QualifiedIdent {
self.used_by_sugar = true;
self
}
}
impl Ident {
pub fn new(data: String, range: Range) -> Ident {
Ident {
data: Symbol(data),
data: Symbol::new(data),
range,
generated: false,
}
@ -131,7 +140,7 @@ impl Ident {
pub fn new_static(data: &str, range: Range) -> Ident {
Ident {
data: Symbol(data.to_string()),
data: Symbol::new(data.to_string()),
range,
generated: false,
}
@ -139,7 +148,7 @@ impl Ident {
pub fn new_by_sugar(data: &str, range: Range) -> Ident {
Ident {
data: Symbol(data.to_string()),
data: Symbol::new(data.to_string()),
range,
generated: true,
}
@ -147,13 +156,13 @@ impl Ident {
pub fn with_name(&self, f: fn(String) -> String) -> Ident {
let mut new = self.clone();
new.data = Symbol(f(new.data.0));
new.data = Symbol::new(f(new.data.data));
new
}
#[inline]
pub fn to_str(&self) -> &str {
&self.data.0
&self.data.data
}
pub fn to_generated(&self) -> Self {
@ -217,7 +226,7 @@ impl Ident {
pub fn add_segment(&self, name: &str) -> Ident {
Ident {
data: Symbol(format!("{}.{}", self.data.0, name)),
data: Symbol::new(format!("{}.{}", self.data.data, name)),
range: self.range,
generated: false,
}
@ -225,7 +234,7 @@ impl Ident {
pub fn generate(data: &str) -> Ident {
Ident {
data: Symbol(data.to_string()),
data: Symbol::new(data.to_string()),
range: Range::ghost_range(),
generated: true,
}
@ -234,7 +243,7 @@ impl Ident {
impl Display for Symbol {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "{}", self.0)
write!(f, "{}", self.data)
}
}