fix: some fixes and added docs

This commit is contained in:
Felipe g 2022-11-13 21:38:27 -03:00
parent b17046aa9d
commit 856343b1cf
21 changed files with 111 additions and 52 deletions

View File

@ -1,8 +1,6 @@
//! This module compiles all of the code to a format //! This module compiles all of the code to a format
//! that can run on the HVM. in order to run the type //! that can run on the HVM and inside the checker.hvm
//! checker fast we dont rely on elaboration, instead //! file.
//! we just remove all the types and this is the function
//! that will run when requested by a type during type checking.
use self::tags::EvalTag; use self::tags::EvalTag;
use self::tags::{operator_to_constructor, TermTag}; use self::tags::{operator_to_constructor, TermTag};
@ -134,7 +132,7 @@ fn codegen_all_expr(lhs_rule: bool, lhs: bool, num: &mut usize, quote: bool, exp
lam(name, codegen_all_expr(lhs_rule, lhs, num, quote, body)), lam(name, codegen_all_expr(lhs_rule, lhs, num, quote, body)),
], ],
), ),
Lambda(name, body, erased) => mk_quoted_ctr( Lambda(name, body, _erased) => mk_quoted_ctr(
eval_ctr(quote, TermTag::Lambda), eval_ctr(quote, TermTag::Lambda),
vec![span_to_num(expr.span), mk_u60(name.encode()), lam(name, codegen_all_expr(lhs_rule, lhs, num, quote, body))], vec![span_to_num(expr.span), mk_u60(name.encode()), lam(name, codegen_all_expr(lhs_rule, lhs, num, quote, body))],
), ),

View File

@ -18,7 +18,7 @@ pub(crate) enum TypeError {
TypeMismatch(Context, Range, Box<Expr>, Box<Expr>), TypeMismatch(Context, Range, Box<Expr>, Box<Expr>),
} }
pub fn context_to_subtitles(ctx: Context, subtitles: &mut Vec<Subtitle>) { fn context_to_subtitles(ctx: Context, subtitles: &mut Vec<Subtitle>) {
subtitles.push(Subtitle::LineBreak); subtitles.push(Subtitle::LineBreak);
if !ctx.0.is_empty() { if !ctx.0.is_empty() {

View File

@ -1,6 +1,11 @@
//! A type checker for the kind2 language. It has some utilities
//! to [compile kind2 code][compiler] into a version that the checker
//! can understand and [transform the answer back][report] into a
//! version that the Rust side can manipulate.
pub mod compiler; pub mod compiler;
pub mod errors;
pub mod report; pub mod report;
mod errors;
use std::sync::mpsc::Sender; use std::sync::mpsc::Sender;
@ -12,8 +17,8 @@ use crate::report::parse_report;
const CHECKER_HVM: &str = include_str!("checker.hvm"); const CHECKER_HVM: &str = include_str!("checker.hvm");
/// Type checks a dessugared book. It spawns an HVM instance in order /// Generates the checker in a string format that can be
/// to run a compiled version of the book /// parsed by HVM.
pub fn gen_checker(book: &Book) -> String { pub fn gen_checker(book: &Book) -> String {
let base_check_code = compiler::codegen_book(book); let base_check_code = compiler::codegen_book(book);
let mut check_code = CHECKER_HVM.to_string(); let mut check_code = CHECKER_HVM.to_string();
@ -33,8 +38,7 @@ pub fn type_check(book: &Book, tx: Sender<DiagnosticFrame>) -> bool {
runtime.normalize(main); runtime.normalize(main);
let term = runtime.readback(main); let term = runtime.readback(main);
let errs = parse_report(&term) let errs = parse_report(&term).expect("Internal Error: Cannot parse the report message from the type checker");
.expect("Internal Error: Cannot parse the report message from the type checker");
let succeeded = errs.is_empty(); let succeeded = errs.is_empty();
for err in errs { for err in errs {
@ -44,8 +48,9 @@ pub fn type_check(book: &Book, tx: Sender<DiagnosticFrame>) -> bool {
succeeded succeeded
} }
/// Type checks a dessugared book. It spawns an HVM instance in order /// Runs the type checker but instead of running the check all function
/// to run a compiled version of the book /// we run the "eval_main" that runs the generated version that both HVM and
/// and the checker can understand.
pub fn eval_api(book: &Book) -> Box<Term> { pub fn eval_api(book: &Book) -> Box<Term> {
let check_code = gen_checker(book); let check_code = gen_checker(book);

View File

@ -1,3 +1,6 @@
//! Transforms a answer from the type checker in
//! a Expr of the kind-tree package.
use kind_span::{EncodedSpan, Range}; use kind_span::{EncodedSpan, Range};
use kind_tree::backend::Term; use kind_tree::backend::Term;
use kind_tree::symbol::{Ident, QualifiedIdent}; use kind_tree::symbol::{Ident, QualifiedIdent};
@ -169,7 +172,8 @@ fn parse_list(term: &Term) -> Result<Vec<Box<Term>>, String> {
Ok(vec) Ok(vec)
} }
pub fn parse_entry(term: &Term) -> Result<Entry, String> { /// Transforms a HVM quoted entry into a easy to manipulate structure.
pub fn transform_entry(term: &Term) -> Result<Entry, String> {
match term { match term {
Term::Ctr { name, args } if name == "Pair.new" => { Term::Ctr { name, args } if name == "Pair.new" => {
let fst = parse_name(&args[0])?; let fst = parse_name(&args[0])?;
@ -191,7 +195,7 @@ fn parse_type_error(expr: &Term) -> Result<TypeError, String> {
match expr { match expr {
Term::Ctr { name, args } => { Term::Ctr { name, args } => {
let ls = parse_list(&args[0])?; let ls = parse_list(&args[0])?;
let entries = ls.iter().flat_map(|x| parse_entry(x)); let entries = ls.iter().flat_map(|x| transform_entry(x));
let ctx = Context(entries.collect()); let ctx = Context(entries.collect());
let orig = match_opt!(*args[1], Term::Num { numb } => EncodedSpan(numb).to_range())?; let orig = match_opt!(*args[1], Term::Num { numb } => EncodedSpan(numb).to_range())?;
match name.as_str() { match name.as_str() {

View File

@ -138,7 +138,7 @@ pub fn compile_in_session<T>(
let diagnostics = tx.try_iter().collect::<Vec<DiagnosticFrame>>(); let diagnostics = tx.try_iter().collect::<Vec<DiagnosticFrame>>();
if diagnostics.is_empty() { if diagnostics.is_empty() && res.is_some() {
render_to_stderr(&render_config, &session, &Log::Checked(start.elapsed())); render_to_stderr(&render_config, &session, &Log::Checked(start.elapsed()));
eprintln!(); eprintln!();
res res

View File

@ -1,2 +1,4 @@
//! Utility to derive functions from their definitions.
pub mod matching; pub mod matching;
pub mod open; pub mod open;

View File

@ -1,5 +1,6 @@
///! Module to derive a dependent //! Module to derive a dependent
/// eliminator out of a sum type declaration. //! eliminator out of a sum type declaration.
use kind_span::Range; use kind_span::Range;
use kind_tree::concrete::expr::Expr; use kind_tree::concrete::expr::Expr;

View File

@ -1,4 +1,5 @@
///! Module to derive a "open" function for records. //! Module to derive a "open" function for records.
use kind_span::Range; use kind_span::Range;
use kind_tree::concrete::expr::Expr; use kind_tree::concrete::expr::Expr;

View File

@ -43,6 +43,12 @@ pub fn to_book(session: &mut Session, path: &PathBuf) -> Option<concrete::Book>
expand::expand_book(&mut concrete_book); expand::expand_book(&mut concrete_book);
let failed = resolution::check_unbound_top_level(session, &mut concrete_book);
if failed {
return None
}
Some(concrete_book) Some(concrete_book)
} }

View File

@ -152,7 +152,7 @@ fn parse_and_store_book_by_identifier<'a>(
match ident_to_path(&session.root, ident, true) { match ident_to_path(&session.root, ident, true) {
Ok(Some(path)) => parse_and_store_book_by_path(session, &path, book), Ok(Some(path)) => parse_and_store_book_by_path(session, &path, book),
Ok(None) => true, Ok(None) => false,
Err(err) => { Err(err) => {
session.diagnostic_sender.send(err).unwrap(); session.diagnostic_sender.send(err).unwrap();
true true
@ -195,7 +195,7 @@ fn parse_and_store_book_by_path<'a>(
} }
for idents in unbound_top_level.values() { for idents in unbound_top_level.values() {
failed |= parse_and_store_book_by_identifier(session, &idents[0], book); failed |= parse_and_store_book_by_identifier(session, &idents.iter().nth(0).unwrap(), book);
} }
expand_uses(&mut module, session.diagnostic_sender.clone()); expand_uses(&mut module, session.diagnostic_sender.clone());
@ -220,22 +220,25 @@ fn unbound_variable(session: &mut Session, book: &Book, idents: &[Ident]) {
pub fn parse_and_store_book(session: &mut Session, path: &PathBuf) -> Option<Book> { pub fn parse_and_store_book(session: &mut Session, path: &PathBuf) -> Option<Book> {
let mut book = Book::default(); let mut book = Book::default();
if parse_and_store_book_by_path(session, path, &mut book) {
None
} else {
Some(book)
}
}
let mut failed = parse_and_store_book_by_path(session, path, &mut book); pub fn check_unbound_top_level(session: &mut Session, book : &mut Book) -> bool {
let mut failed = false;
let (_, unbound_tops) = unbound::get_book_unbound(session.diagnostic_sender.clone(), &mut book, true); let (_, unbound_tops) = unbound::get_book_unbound(session.diagnostic_sender.clone(), book, true);
for (_, unbound) in unbound_tops { for (_, unbound) in unbound_tops {
let res: Vec<Ident> = unbound.iter().filter(|x| !x.used_by_sugar).map(|x| x.to_ident()).collect(); let res: Vec<Ident> = unbound.iter().map(|x| x.to_ident()).collect();
if !res.is_empty() { if !res.is_empty() {
unbound_variable(session, &book, &res); unbound_variable(session, &book, &res);
failed = true; failed = true;
} }
} }
if failed { failed
None }
} else {
Some(book)
}
}

View File

@ -244,10 +244,6 @@ impl<'a> Parser<'a> {
})) }))
} }
fn parse_num_lit(&mut self) -> Result<u64, SyntaxError> {
eat_single!(self, Token::Num(x) => *x)
}
fn parse_binary_op(&mut self) -> Result<Box<Expr>, SyntaxError> { fn parse_binary_op(&mut self) -> Result<Box<Expr>, SyntaxError> {
let range = self.range(); let range = self.range();
self.advance(); // '(' self.advance(); // '('
@ -277,6 +273,7 @@ impl<'a> Parser<'a> {
vec.push(*self.parse_expr(false)?); vec.push(*self.parse_expr(false)?);
let mut initialized = false; let mut initialized = false;
let mut with_comma = false; let mut with_comma = false;
loop { loop {
let ate_comma = self.check_and_eat(Token::Comma); let ate_comma = self.check_and_eat(Token::Comma);
if !initialized { if !initialized {

View File

@ -9,7 +9,7 @@ use crate::lexer::tokens::Token;
/// The lexer state. /// The lexer state.
pub struct Lexer<'a> { pub struct Lexer<'a> {
pub input: &'a str, pub input: &'a str,
pub peekable: &'a mut Peekable<Chars<'a>>, pub peekable: Peekable<Chars<'a>>,
pub pos: usize, pub pos: usize,
pub ctx: SyntaxCtxIndex, pub ctx: SyntaxCtxIndex,
@ -20,7 +20,7 @@ pub struct Lexer<'a> {
impl<'a> Lexer<'a> { impl<'a> Lexer<'a> {
pub fn new( pub fn new(
input: &'a str, input: &'a str,
peekable: &'a mut Peekable<Chars<'a>>, peekable: Peekable<Chars<'a>>,
ctx: SyntaxCtxIndex, ctx: SyntaxCtxIndex,
) -> Lexer<'a> { ) -> Lexer<'a> {
Lexer { Lexer {

View File

@ -1,3 +1,5 @@
//! Crate to parse the kind2 grammar.
pub mod errors; pub mod errors;
pub mod expr; pub mod expr;
pub mod macros; pub mod macros;
@ -15,8 +17,8 @@ pub use lexer::state::*;
use state::Parser; use state::Parser;
pub fn parse_book(errs: Sender<DiagnosticFrame>, ctx_id: usize, input: &str) -> (Module, bool) { pub fn parse_book(errs: Sender<DiagnosticFrame>, ctx_id: usize, input: &str) -> (Module, bool) {
let mut peekable = input.chars().peekable(); let peekable = input.chars().peekable();
let lexer = Lexer::new(input, &mut peekable, SyntaxCtxIndex::new(ctx_id)); let lexer = Lexer::new(input, peekable, SyntaxCtxIndex::new(ctx_id));
let mut parser = Parser::new(lexer, errs); let mut parser = Parser::new(lexer, errs);
(parser.parse_module(), parser.failed) (parser.parse_module(), parser.failed)
} }

View File

@ -1,8 +1,10 @@
//! This pass does a lot of things including: //! This pass transforms a sugared tree into a simpler tree.
//! - Setting an unique number for each of the holes //!
//! - Desugar of lets and matchs //! It does a lot of things like:
//! - Untyped derivations for types and records //! * Setting an unique number for each of the holes
//! - Checking of hidden and erased arguments //! * Desugar of lets and matchs
//! * Untyped derivations for types and records
//! * Checking of hidden and erased arguments
use std::sync::mpsc::Sender; use std::sync::mpsc::Sender;

View File

@ -1,3 +1,7 @@
//! Expand some attributes and derivations of each construction.
//! Currently it just derives `match` and `open` for sum type
//! and record types respectively.
use fxhash::FxHashMap; use fxhash::FxHashMap;
use kind_derive::matching::derive_match; use kind_derive::matching::derive_match;
use kind_derive::open::derive_open; use kind_derive::open::derive_open;

View File

@ -1,5 +1,11 @@
//! A lot of transformations that we can apply into kind trees.
//! * [desugar][desugar] - That desugars the sugared tree into a version that does not contain a lot of constructions like match, inductive types etc.
//! * [erasure][erasure] - Erases all of the definitions that are marked as erased from the runtime.
//! * [expand][expand] - Expand some attributes and derivations of each construction.
//! * [unbound][unbound] - Collects all of the unbound definitions and check the linearity of them.
pub mod desugar; pub mod desugar;
pub mod erasure; pub mod erasure;
pub mod errors;
pub mod expand; pub mod expand;
pub mod unbound; pub mod unbound;
mod errors;

View File

@ -5,9 +5,10 @@
//! by sugars because it's useful to name resolution //! by sugars because it's useful to name resolution
//! phase. //! phase.
use std::collections::HashSet;
use std::sync::mpsc::Sender; use std::sync::mpsc::Sender;
use fxhash::FxHashMap; use fxhash::{FxHashMap, FxHashSet};
use kind_report::data::DiagnosticFrame; use kind_report::data::DiagnosticFrame;
use kind_span::Range; use kind_span::Range;
use kind_tree::concrete::expr::{Binding, Case, CaseBinding, Destruct}; use kind_tree::concrete::expr::{Binding, Case, CaseBinding, Destruct};
@ -28,7 +29,7 @@ use crate::errors::PassError;
pub struct UnboundCollector { pub struct UnboundCollector {
pub errors: Sender<DiagnosticFrame>, pub errors: Sender<DiagnosticFrame>,
pub context_vars: Vec<(Range, String)>, pub context_vars: Vec<(Range, String)>,
pub unbound_top_level: FxHashMap<String, Vec<QualifiedIdent>>, pub unbound_top_level: FxHashMap<String, FxHashSet<QualifiedIdent>>,
pub unbound: FxHashMap<String, Vec<Ident>>, pub unbound: FxHashMap<String, Vec<Ident>>,
pub emit_errs: bool pub emit_errs: bool
} }
@ -51,7 +52,7 @@ pub fn get_module_unbound(
emit_errs: bool, emit_errs: bool,
) -> ( ) -> (
FxHashMap<String, Vec<Ident>>, FxHashMap<String, Vec<Ident>>,
FxHashMap<String, Vec<QualifiedIdent>>, FxHashMap<String, FxHashSet<QualifiedIdent>>,
) { ) {
let mut state = UnboundCollector::new(diagnostic_sender, emit_errs); let mut state = UnboundCollector::new(diagnostic_sender, emit_errs);
state.visit_module(module); state.visit_module(module);
@ -64,7 +65,7 @@ pub fn get_book_unbound(
emit_errs: bool, emit_errs: bool,
) -> ( ) -> (
FxHashMap<String, Vec<Ident>>, FxHashMap<String, Vec<Ident>>,
FxHashMap<String, Vec<QualifiedIdent>>, FxHashMap<String, FxHashSet<QualifiedIdent>>,
) { ) {
let mut state = UnboundCollector::new(diagnostic_sender, emit_errs); let mut state = UnboundCollector::new(diagnostic_sender, emit_errs);
state.visit_book(book); state.visit_book(book);
@ -93,8 +94,8 @@ impl Visitor for UnboundCollector {
let entry = self let entry = self
.unbound_top_level .unbound_top_level
.entry(ident.to_string()) .entry(ident.to_string())
.or_insert_with(Vec::new); .or_default();
entry.push(ident.clone()); entry.insert(ident.clone());
} }
} }

View File

@ -7,7 +7,8 @@ edition = "2021"
[dependencies] [dependencies]
kind-span = { path = "../kind-span" } kind-span = { path = "../kind-span" }
unicode-width = "0.1.10" unicode-width = "0.1.10"
yansi = "0.5.1" yansi = "0.5.1"
pathdiff = "0.2.1"
fxhash = "0.2.1" fxhash = "0.2.1"

View File

@ -458,7 +458,8 @@ impl<'a> Report for Diagnostic<'a> {
for (ctx, group) in groups { for (ctx, group) in groups {
writeln!(fmt)?; writeln!(fmt)?;
let (file, code) = cache.fetch(ctx).unwrap(); let (file, code) = cache.fetch(ctx).unwrap();
write_code_block(&file.clone(), config, &group, code, fmt)?; let diff = pathdiff::diff_paths(&file.clone(), PathBuf::from(".").canonicalize().unwrap()).unwrap();
write_code_block(&diff, config, &group, code, fmt)?;
} }
if !is_empty { if !is_empty {

0
src/kind-tree/README.md Normal file
View File

View File

@ -16,6 +16,7 @@ pub struct Symbol(String);
pub struct Ident { pub struct Ident {
pub data: Symbol, pub data: Symbol,
pub range: Range, pub range: Range,
pub generated: bool,
} }
/// Qualified Identifiers always refer to top level /// Qualified Identifiers always refer to top level
@ -30,6 +31,7 @@ pub struct QualifiedIdent {
/// trying to collect names created by each of the sintatic /// trying to collect names created by each of the sintatic
/// sugars. /// sugars.
pub used_by_sugar: bool, pub used_by_sugar: bool,
pub generated: bool,
} }
impl QualifiedIdent { impl QualifiedIdent {
@ -39,6 +41,7 @@ impl QualifiedIdent {
aux, aux,
range, range,
used_by_sugar: false, used_by_sugar: false,
generated: false
} }
} }
@ -46,11 +49,18 @@ impl QualifiedIdent {
self.root = Symbol(str); self.root = Symbol(str);
} }
pub fn to_generated(&self) -> Self {
let mut new = self.clone();
new.generated = true;
new
}
/// Avoid this function. It transforms a QualifiedIdent into a Ident /// Avoid this function. It transforms a QualifiedIdent into a Ident
pub fn to_ident(&self) -> Ident { pub fn to_ident(&self) -> Ident {
Ident { Ident {
data: Symbol(self.to_string()), data: Symbol(self.to_string()),
range: self.range, range: self.range,
generated: self.generated
} }
} }
@ -60,6 +70,7 @@ impl QualifiedIdent {
aux: aux.map(Symbol), aux: aux.map(Symbol),
range, range,
used_by_sugar: false, used_by_sugar: false,
generated: false
} }
} }
@ -69,6 +80,7 @@ impl QualifiedIdent {
aux: Some(Symbol(aux.to_string())), aux: Some(Symbol(aux.to_string())),
range, range,
used_by_sugar: true, used_by_sugar: true,
generated: true
} }
} }
@ -78,6 +90,7 @@ impl QualifiedIdent {
aux: self.aux.clone(), aux: self.aux.clone(),
range: self.range, range: self.range,
used_by_sugar: self.used_by_sugar, used_by_sugar: self.used_by_sugar,
generated: self.generated
} }
} }
@ -92,6 +105,7 @@ impl Ident {
Ident { Ident {
data: Symbol(data), data: Symbol(data),
range, range,
generated: false
} }
} }
@ -99,6 +113,7 @@ impl Ident {
Ident { Ident {
data: Symbol(data.to_string()), data: Symbol(data.to_string()),
range, range,
generated: false
} }
} }
@ -106,6 +121,7 @@ impl Ident {
Ident { Ident {
data: Symbol(data.to_string()), data: Symbol(data.to_string()),
range, range,
generated: true
} }
} }
@ -119,6 +135,12 @@ impl Ident {
&self.data.0 &self.data.0
} }
pub fn to_generated(&self) -> Self {
let mut old = self.clone();
old.generated = true;
old
}
pub fn decode(num: u64) -> String { pub fn decode(num: u64) -> String {
let mut num = num; let mut num = num;
let mut name = String::new(); let mut name = String::new();
@ -168,6 +190,7 @@ impl Ident {
Ident { Ident {
data: self.data.clone(), data: self.data.clone(),
range, range,
generated: false
} }
} }
@ -175,6 +198,7 @@ impl Ident {
Ident { Ident {
data: Symbol(format!("{}.{}", self.data.0, name)), data: Symbol(format!("{}.{}", self.data.0, name)),
range: self.range, range: self.range,
generated: false
} }
} }
@ -182,6 +206,7 @@ impl Ident {
Ident { Ident {
data: Symbol(data.to_string()), data: Symbol(data.to_string()),
range: Range::ghost_range(), range: Range::ghost_range(),
generated: true
} }
} }
} }