fix: some fixes and added docs

This commit is contained in:
Felipe g 2022-11-13 21:38:27 -03:00
parent b17046aa9d
commit 856343b1cf
21 changed files with 111 additions and 52 deletions

View File

@ -1,8 +1,6 @@
//! This module compiles all of the code to a format
//! that can run on the HVM. in order to run the type
//! checker fast we dont rely on elaboration, instead
//! we just remove all the types and this is the function
//! that will run when requested by a type during type checking.
//! that can run on the HVM and inside the checker.hvm
//! file.
use self::tags::EvalTag;
use self::tags::{operator_to_constructor, TermTag};
@ -134,7 +132,7 @@ fn codegen_all_expr(lhs_rule: bool, lhs: bool, num: &mut usize, quote: bool, exp
lam(name, codegen_all_expr(lhs_rule, lhs, num, quote, body)),
],
),
Lambda(name, body, erased) => mk_quoted_ctr(
Lambda(name, body, _erased) => mk_quoted_ctr(
eval_ctr(quote, TermTag::Lambda),
vec![span_to_num(expr.span), mk_u60(name.encode()), lam(name, codegen_all_expr(lhs_rule, lhs, num, quote, body))],
),

View File

@ -18,7 +18,7 @@ pub(crate) enum TypeError {
TypeMismatch(Context, Range, Box<Expr>, Box<Expr>),
}
pub fn context_to_subtitles(ctx: Context, subtitles: &mut Vec<Subtitle>) {
fn context_to_subtitles(ctx: Context, subtitles: &mut Vec<Subtitle>) {
subtitles.push(Subtitle::LineBreak);
if !ctx.0.is_empty() {

View File

@ -1,6 +1,11 @@
//! A type checker for the kind2 language. It has some utilities
//! to [compile kind2 code][compiler] into a version that the checker
//! can understand and [transform the answer back][report] into a
//! version that the Rust side can manipulate.
pub mod compiler;
pub mod errors;
pub mod report;
mod errors;
use std::sync::mpsc::Sender;
@ -12,8 +17,8 @@ use crate::report::parse_report;
const CHECKER_HVM: &str = include_str!("checker.hvm");
/// Type checks a dessugared book. It spawns an HVM instance in order
/// to run a compiled version of the book
/// Generates the checker in a string format that can be
/// parsed by HVM.
pub fn gen_checker(book: &Book) -> String {
let base_check_code = compiler::codegen_book(book);
let mut check_code = CHECKER_HVM.to_string();
@ -33,8 +38,7 @@ pub fn type_check(book: &Book, tx: Sender<DiagnosticFrame>) -> bool {
runtime.normalize(main);
let term = runtime.readback(main);
let errs = parse_report(&term)
.expect("Internal Error: Cannot parse the report message from the type checker");
let errs = parse_report(&term).expect("Internal Error: Cannot parse the report message from the type checker");
let succeeded = errs.is_empty();
for err in errs {
@ -44,8 +48,9 @@ pub fn type_check(book: &Book, tx: Sender<DiagnosticFrame>) -> bool {
succeeded
}
/// Type checks a dessugared book. It spawns an HVM instance in order
/// to run a compiled version of the book
/// Runs the type checker but instead of running the check all function
/// we run the "eval_main" that runs the generated version that both HVM and
/// and the checker can understand.
pub fn eval_api(book: &Book) -> Box<Term> {
let check_code = gen_checker(book);

View File

@ -1,3 +1,6 @@
//! Transforms a answer from the type checker in
//! a Expr of the kind-tree package.
use kind_span::{EncodedSpan, Range};
use kind_tree::backend::Term;
use kind_tree::symbol::{Ident, QualifiedIdent};
@ -169,7 +172,8 @@ fn parse_list(term: &Term) -> Result<Vec<Box<Term>>, String> {
Ok(vec)
}
pub fn parse_entry(term: &Term) -> Result<Entry, String> {
/// Transforms a HVM quoted entry into a easy to manipulate structure.
pub fn transform_entry(term: &Term) -> Result<Entry, String> {
match term {
Term::Ctr { name, args } if name == "Pair.new" => {
let fst = parse_name(&args[0])?;
@ -191,7 +195,7 @@ fn parse_type_error(expr: &Term) -> Result<TypeError, String> {
match expr {
Term::Ctr { name, args } => {
let ls = parse_list(&args[0])?;
let entries = ls.iter().flat_map(|x| parse_entry(x));
let entries = ls.iter().flat_map(|x| transform_entry(x));
let ctx = Context(entries.collect());
let orig = match_opt!(*args[1], Term::Num { numb } => EncodedSpan(numb).to_range())?;
match name.as_str() {

View File

@ -138,7 +138,7 @@ pub fn compile_in_session<T>(
let diagnostics = tx.try_iter().collect::<Vec<DiagnosticFrame>>();
if diagnostics.is_empty() {
if diagnostics.is_empty() && res.is_some() {
render_to_stderr(&render_config, &session, &Log::Checked(start.elapsed()));
eprintln!();
res

View File

@ -1,2 +1,4 @@
//! Utility to derive functions from their definitions.
pub mod matching;
pub mod open;

View File

@ -1,5 +1,6 @@
///! Module to derive a dependent
/// eliminator out of a sum type declaration.
//! Module to derive a dependent
//! eliminator out of a sum type declaration.
use kind_span::Range;
use kind_tree::concrete::expr::Expr;

View File

@ -1,4 +1,5 @@
///! Module to derive a "open" function for records.
//! Module to derive a "open" function for records.
use kind_span::Range;
use kind_tree::concrete::expr::Expr;

View File

@ -43,6 +43,12 @@ pub fn to_book(session: &mut Session, path: &PathBuf) -> Option<concrete::Book>
expand::expand_book(&mut concrete_book);
let failed = resolution::check_unbound_top_level(session, &mut concrete_book);
if failed {
return None
}
Some(concrete_book)
}

View File

@ -152,7 +152,7 @@ fn parse_and_store_book_by_identifier<'a>(
match ident_to_path(&session.root, ident, true) {
Ok(Some(path)) => parse_and_store_book_by_path(session, &path, book),
Ok(None) => true,
Ok(None) => false,
Err(err) => {
session.diagnostic_sender.send(err).unwrap();
true
@ -195,7 +195,7 @@ fn parse_and_store_book_by_path<'a>(
}
for idents in unbound_top_level.values() {
failed |= parse_and_store_book_by_identifier(session, &idents[0], book);
failed |= parse_and_store_book_by_identifier(session, &idents.iter().nth(0).unwrap(), book);
}
expand_uses(&mut module, session.diagnostic_sender.clone());
@ -220,22 +220,25 @@ fn unbound_variable(session: &mut Session, book: &Book, idents: &[Ident]) {
pub fn parse_and_store_book(session: &mut Session, path: &PathBuf) -> Option<Book> {
let mut book = Book::default();
if parse_and_store_book_by_path(session, path, &mut book) {
None
} else {
Some(book)
}
}
let mut failed = parse_and_store_book_by_path(session, path, &mut book);
pub fn check_unbound_top_level(session: &mut Session, book : &mut Book) -> bool {
let mut failed = false;
let (_, unbound_tops) = unbound::get_book_unbound(session.diagnostic_sender.clone(), &mut book, true);
let (_, unbound_tops) = unbound::get_book_unbound(session.diagnostic_sender.clone(), book, true);
for (_, unbound) in unbound_tops {
let res: Vec<Ident> = unbound.iter().filter(|x| !x.used_by_sugar).map(|x| x.to_ident()).collect();
let res: Vec<Ident> = unbound.iter().map(|x| x.to_ident()).collect();
if !res.is_empty() {
unbound_variable(session, &book, &res);
failed = true;
}
}
if failed {
None
} else {
Some(book)
}
}
failed
}

View File

@ -244,10 +244,6 @@ impl<'a> Parser<'a> {
}))
}
fn parse_num_lit(&mut self) -> Result<u64, SyntaxError> {
eat_single!(self, Token::Num(x) => *x)
}
fn parse_binary_op(&mut self) -> Result<Box<Expr>, SyntaxError> {
let range = self.range();
self.advance(); // '('
@ -277,6 +273,7 @@ impl<'a> Parser<'a> {
vec.push(*self.parse_expr(false)?);
let mut initialized = false;
let mut with_comma = false;
loop {
let ate_comma = self.check_and_eat(Token::Comma);
if !initialized {

View File

@ -9,7 +9,7 @@ use crate::lexer::tokens::Token;
/// The lexer state.
pub struct Lexer<'a> {
pub input: &'a str,
pub peekable: &'a mut Peekable<Chars<'a>>,
pub peekable: Peekable<Chars<'a>>,
pub pos: usize,
pub ctx: SyntaxCtxIndex,
@ -20,7 +20,7 @@ pub struct Lexer<'a> {
impl<'a> Lexer<'a> {
pub fn new(
input: &'a str,
peekable: &'a mut Peekable<Chars<'a>>,
peekable: Peekable<Chars<'a>>,
ctx: SyntaxCtxIndex,
) -> Lexer<'a> {
Lexer {

View File

@ -1,3 +1,5 @@
//! Crate to parse the kind2 grammar.
pub mod errors;
pub mod expr;
pub mod macros;
@ -15,8 +17,8 @@ pub use lexer::state::*;
use state::Parser;
pub fn parse_book(errs: Sender<DiagnosticFrame>, ctx_id: usize, input: &str) -> (Module, bool) {
let mut peekable = input.chars().peekable();
let lexer = Lexer::new(input, &mut peekable, SyntaxCtxIndex::new(ctx_id));
let peekable = input.chars().peekable();
let lexer = Lexer::new(input, peekable, SyntaxCtxIndex::new(ctx_id));
let mut parser = Parser::new(lexer, errs);
(parser.parse_module(), parser.failed)
}

View File

@ -1,8 +1,10 @@
//! This pass does a lot of things including:
//! - Setting an unique number for each of the holes
//! - Desugar of lets and matchs
//! - Untyped derivations for types and records
//! - Checking of hidden and erased arguments
//! This pass transforms a sugared tree into a simpler tree.
//!
//! It does a lot of things like:
//! * Setting an unique number for each of the holes
//! * Desugar of lets and matchs
//! * Untyped derivations for types and records
//! * Checking of hidden and erased arguments
use std::sync::mpsc::Sender;

View File

@ -1,3 +1,7 @@
//! Expand some attributes and derivations of each construction.
//! Currently it just derives `match` and `open` for sum type
//! and record types respectively.
use fxhash::FxHashMap;
use kind_derive::matching::derive_match;
use kind_derive::open::derive_open;

View File

@ -1,5 +1,11 @@
//! A lot of transformations that we can apply into kind trees.
//! * [desugar][desugar] - That desugars the sugared tree into a version that does not contain a lot of constructions like match, inductive types etc.
//! * [erasure][erasure] - Erases all of the definitions that are marked as erased from the runtime.
//! * [expand][expand] - Expand some attributes and derivations of each construction.
//! * [unbound][unbound] - Collects all of the unbound definitions and check the linearity of them.
pub mod desugar;
pub mod erasure;
pub mod errors;
pub mod expand;
pub mod unbound;
mod errors;

View File

@ -5,9 +5,10 @@
//! by sugars because it's useful to name resolution
//! phase.
use std::collections::HashSet;
use std::sync::mpsc::Sender;
use fxhash::FxHashMap;
use fxhash::{FxHashMap, FxHashSet};
use kind_report::data::DiagnosticFrame;
use kind_span::Range;
use kind_tree::concrete::expr::{Binding, Case, CaseBinding, Destruct};
@ -28,7 +29,7 @@ use crate::errors::PassError;
pub struct UnboundCollector {
pub errors: Sender<DiagnosticFrame>,
pub context_vars: Vec<(Range, String)>,
pub unbound_top_level: FxHashMap<String, Vec<QualifiedIdent>>,
pub unbound_top_level: FxHashMap<String, FxHashSet<QualifiedIdent>>,
pub unbound: FxHashMap<String, Vec<Ident>>,
pub emit_errs: bool
}
@ -51,7 +52,7 @@ pub fn get_module_unbound(
emit_errs: bool,
) -> (
FxHashMap<String, Vec<Ident>>,
FxHashMap<String, Vec<QualifiedIdent>>,
FxHashMap<String, FxHashSet<QualifiedIdent>>,
) {
let mut state = UnboundCollector::new(diagnostic_sender, emit_errs);
state.visit_module(module);
@ -64,7 +65,7 @@ pub fn get_book_unbound(
emit_errs: bool,
) -> (
FxHashMap<String, Vec<Ident>>,
FxHashMap<String, Vec<QualifiedIdent>>,
FxHashMap<String, FxHashSet<QualifiedIdent>>,
) {
let mut state = UnboundCollector::new(diagnostic_sender, emit_errs);
state.visit_book(book);
@ -93,8 +94,8 @@ impl Visitor for UnboundCollector {
let entry = self
.unbound_top_level
.entry(ident.to_string())
.or_insert_with(Vec::new);
entry.push(ident.clone());
.or_default();
entry.insert(ident.clone());
}
}

View File

@ -7,7 +7,8 @@ edition = "2021"
[dependencies]
kind-span = { path = "../kind-span" }
unicode-width = "0.1.10"
yansi = "0.5.1"
pathdiff = "0.2.1"
fxhash = "0.2.1"

View File

@ -458,7 +458,8 @@ impl<'a> Report for Diagnostic<'a> {
for (ctx, group) in groups {
writeln!(fmt)?;
let (file, code) = cache.fetch(ctx).unwrap();
write_code_block(&file.clone(), config, &group, code, fmt)?;
let diff = pathdiff::diff_paths(&file.clone(), PathBuf::from(".").canonicalize().unwrap()).unwrap();
write_code_block(&diff, config, &group, code, fmt)?;
}
if !is_empty {

0
src/kind-tree/README.md Normal file
View File

View File

@ -16,6 +16,7 @@ pub struct Symbol(String);
pub struct Ident {
pub data: Symbol,
pub range: Range,
pub generated: bool,
}
/// Qualified Identifiers always refer to top level
@ -30,6 +31,7 @@ pub struct QualifiedIdent {
/// trying to collect names created by each of the sintatic
/// sugars.
pub used_by_sugar: bool,
pub generated: bool,
}
impl QualifiedIdent {
@ -39,6 +41,7 @@ impl QualifiedIdent {
aux,
range,
used_by_sugar: false,
generated: false
}
}
@ -46,11 +49,18 @@ impl QualifiedIdent {
self.root = Symbol(str);
}
pub fn to_generated(&self) -> Self {
let mut new = self.clone();
new.generated = true;
new
}
/// Avoid this function. It transforms a QualifiedIdent into a Ident
pub fn to_ident(&self) -> Ident {
Ident {
data: Symbol(self.to_string()),
range: self.range,
generated: self.generated
}
}
@ -60,6 +70,7 @@ impl QualifiedIdent {
aux: aux.map(Symbol),
range,
used_by_sugar: false,
generated: false
}
}
@ -69,6 +80,7 @@ impl QualifiedIdent {
aux: Some(Symbol(aux.to_string())),
range,
used_by_sugar: true,
generated: true
}
}
@ -78,6 +90,7 @@ impl QualifiedIdent {
aux: self.aux.clone(),
range: self.range,
used_by_sugar: self.used_by_sugar,
generated: self.generated
}
}
@ -92,6 +105,7 @@ impl Ident {
Ident {
data: Symbol(data),
range,
generated: false
}
}
@ -99,6 +113,7 @@ impl Ident {
Ident {
data: Symbol(data.to_string()),
range,
generated: false
}
}
@ -106,6 +121,7 @@ impl Ident {
Ident {
data: Symbol(data.to_string()),
range,
generated: true
}
}
@ -119,6 +135,12 @@ impl Ident {
&self.data.0
}
pub fn to_generated(&self) -> Self {
let mut old = self.clone();
old.generated = true;
old
}
pub fn decode(num: u64) -> String {
let mut num = num;
let mut name = String::new();
@ -168,6 +190,7 @@ impl Ident {
Ident {
data: self.data.clone(),
range,
generated: false
}
}
@ -175,6 +198,7 @@ impl Ident {
Ident {
data: Symbol(format!("{}.{}", self.data.0, name)),
range: self.range,
generated: false
}
}
@ -182,6 +206,7 @@ impl Ident {
Ident {
data: Symbol(data.to_string()),
range: Range::ghost_range(),
generated: true
}
}
}