Merge branch 'master' of github.com:kindelia/kind2

This commit is contained in:
Victor Maia 2022-09-30 11:57:59 -03:00
commit c40fb4eb11
126 changed files with 13108 additions and 4618 deletions

60
.github/workflows/cargo.yml vendored Normal file
View File

@ -0,0 +1,60 @@
name: Cargo
on:
workflow_call:
jobs:
cargo_check:
name: 👁️‍🗨️ Cargo Check
runs-on: ${{ matrix.os }}
timeout-minutes: 5
strategy:
matrix:
os: [macos-latest, ubuntu-latest]
steps:
- uses: actions/checkout@v2
- uses: actions-rs/toolchain@v1
with:
profile: minimal
toolchain: stable
override: true
- uses: Swatinem/rust-cache@v1
- uses: actions-rs/cargo@v1
with:
command: check
cargo_test:
name: 🧪 Cargo Test
runs-on: ${{ matrix.os }}
timeout-minutes: 5
strategy:
matrix:
os: [macos-latest, ubuntu-latest]
steps:
- uses: actions/checkout@v2
- uses: actions-rs/toolchain@v1
with:
profile: minimal
toolchain: stable
override: true
- uses: Swatinem/rust-cache@v2
- uses: actions-rs/cargo@v1
with:
command: test
# cargo_fmt:
# name: 💅 Cargo Fmt
# continue-on-error: true
# runs-on: ubuntu-latest
# steps:
# - uses: actions/checkout@v2
# - uses: actions-rs/toolchain@v1
# with:
# profile: minimal
# toolchain: stable
# override: true
# - run: rustup component add rustfmt
# - uses: actions-rs/cargo@v1
# with:
# command: fmt
# args: --all -- --check

12
.github/workflows/ci.yml vendored Normal file
View File

@ -0,0 +1,12 @@
name: CI
on:
workflow_dispatch:
push:
pull_request:
types: [opened, review_requested, ready_for_review]
jobs:
cargo:
if: github.event.pull_request.draft == false
uses: ./.github/workflows/cargo.yml

2
CHANGELOG.md Normal file
View File

@ -0,0 +1,2 @@
# Kind2 0.2.76
The main.rs and language.rs files have been broken into several parts.

960
Cargo.lock generated

File diff suppressed because it is too large Load Diff

View File

@ -15,3 +15,15 @@ hvm = "0.1.81"
highlight_error = "0.1.1"
clap = { version = "3.1.8", features = ["derive"] }
rand = "0.8.5"
[dev-dependencies]
pretty_assertions = "1.3.0"
ntest = "0.8.1"
walkdir = "2"
[profile.dev.package.hvm]
opt-level = 3
[[test]]
name = "kind2-tests"
path = "tests/mod.rs"

View File

@ -28,7 +28,7 @@ map a b (Cons head tail) f = Cons (f x) (map tail f)
Side-effective programs are written via monadic monads, resembling [Rust](https://www.rust-lang.org/) and [TypeScript](https://www.typescriptlang.org/):
```javascript
// Prints the double of every numbet up to a limit
// Prints the double of every number up to a limit
Main : IO (Result () String) {
ask limit = IO.prompt "Enter limit:"
for x in (List.range limit) {

666
SYNTAX.md Normal file
View File

@ -0,0 +1,666 @@
All syntaxes
------------
This document lists all the high-level syntaxes available on the Kind language. Every syntax listed below is expanded (desugared) to either a primitive term, or to one of the functions available on the base library.
A Kind program consists of a collection of top-level function definitions, usually with a function called `Main` which acts as the entry point of the program. Here's an example of a program that reads a number `n` from user input and outputs the nth fibonacci number.
```
Main {
do IO {
ask inp = IO.prompt "Which fibonacci number to calculate? "
let res = Parser.run Parser.u60_decimal inp
match Either res {
left =>
let n = res.val
let fib_n = Fib n
IO.output ((U60.show fib_n) "")
right =>
IO.output "Input text is not a number"
}
}
}
Fib (n: U60) : U60 {
Fib.go n 0 1
}
Fib.go (n: U60) (f1: U60) (f2: U60) : U60
Fib.go 0 f1 f2 = f1
Fib.go n f1 f2 = Fib.go (- n 1) f2 (+ f1 f2)
```
Not all programs are necessarily made to be run. Proofs for example may only exist to be type-checked, which already validates the thing they're proving.
For many more real code examples, check the Wikind repository.
Top-level definition
--------------------
```
Name (arg0: Type0) (arg1: Type1) : ReturnType
Name (Type0.ctr subarg0 subarg1) arg1 = rule0_body
Name arg0 Type1.ctr = rule1_body
...
```
Kind programs and proofs are composed of a number of top-level definitions containing a `Name`, followed by a number of arguments, followed by a `:`, followed by a `ReturnType`, followed by a sequence of rewrite rules. For example:
```
MyName: String
MyName = "Victor"
```
Creates a top-level definition called `MyName`, of type `String` and value `"Victor"`. And:
```
GetFirst (fst: String) (snd: String): String
GetFirst fst snd = fst
```
Creates a top-level function called `GetFirst`, which receives two arguments, `fst` and `snd` of type `String`, and returns a `String`, which is the first argument.
Every rule must pattern match on each argument and they may match on a specific type constructor or match everything on a variable. For example, given the type:
```
Bool : Type
Bool.true : Bool
Bool.false : Bool
```
We could write the following function:
```
Bool.and (a: Bool) (b: Bool) : Bool
Bool.and Bool.true Bool.true = Bool.true
Bool.and x y = Bool.false
```
This defines a top-level function `Bool.and` with arguments `a` and `b`, and two rules. The first rule is selected only when `a` is `Bool.true` and `b` is `Bool.true`, and returns `Bool.true`. All other cases match with the second rule, where the variable `x` is bound to the value of the first argument and the variable `y` is bound to the value of the second argument. Both variables are unused and the second rule always return `Bool.false`.
A function with no rules behaves like a type constructor and is considered to be always correct by the type checker. In the example above, we defined one way to create a value of type `Type` called `Bool` and two ways of creating values of type `Bool` called `Bool.false` and `Bool.true`.
The name of the top-level definition also specifies the file where the definition is. For example `Physics.Verlet.step` must be either in `Physics/Verlet/step.kind2` or `Physics/Verlet/step/_.kind2`. All top-level definitions must start with a capital letter, while all variables must start with a lowercase letter.
An argument may be defined to be erased, meaning it will be removed on runtime, being used only for type checking, by adding a `-` in front of the argument. For example:
```
MyFun -(a: Type) (b: a) : ReturnType
```
This defines a function with an argument `a` that gets erased during runtime, and an argument `b` of type `a`. All arguments of type `Type` should be erased. Also, because Kind has dependent types, we can write an argument that depends on the value of another, like is the case with `b` in the example above.
An argument may also be defined to be implicit, meaning it's value will be inferred and we don't need to pass it explicitly when calling a function. An argument that is both erased and implicit is defined by writing it between `<>`. For example:
```
List.tail <a: Type> (xs: List a) : List a
```
Here `a` is an implicit and erased argument. When calling a function with an implicit argument, we can pass it explicitly like `List.tail Bool bool_list` or we can ommit it and let its value be implied from the context like `List.tail bool_list`.
We can write an argument that is implicit but not erased with `+<arg: ArgType>`, but the use cases for this construction are very uncommon.
When writing the patterns of a function's rule, we can ommit arguments that are erased. For example, we can define a rule for the `List.tail` function above in two equivalent ways:
```
List.tail <a: Type> (xs: List a) : List a
List.tail a (List.nil t) = List.nil
List.tail a (List.cons t x xs) = xs
```
or
```
List.tail <a: Type> (xs: List a) : List a
List.tail List.nil = List.nil
List.tail (List.cons x xs) = xs
```
When type-checking, the ommited variables will be filled with an actual variable, but it won't be available for use inside the rule's body. Also, as the example above shows, we can ommit erased variables both in a root position of the pattern as well as inside of a constructor.
When pattern matching in a rule, if we don't use one of the bound variables, we can write it simply as `_` and avoid having to give it a name. Internally, this variable is given a name that is not accessible inside of the rule body. In the `List.tail` example, the `x` variable is not used in the second rule, so we could write it as `List.tail (List.cons _ xs) = xs` to make explicit that we don't care about this value.
It is also possible to ommit any of the type annotations in a function definition. Any variables that don't have a explicit type have their types infered during type checking. Combining everything together, we could write the `List.tail` example as:
```
List.tail <a> (xs: List a) : List a
List.tail List.nil = List.nil
List.tail (List.cons _ xs) = xs
```
This is how this function is defined in the Wikind repository, which contains a wide collection of Kind definitions.
For functions that have only one rule that doesn't do any pattern matching at all on the arguments, there is a concise syntax to define them:
```
Hello (name: String) : IO U60 {
IO.output (String.concat "Hello, " name)
}
```
This is equivalent to the following:
```
Hello (name: String) : IO U60
Hello name = IO.output (String.concat "Hello, " name)
```
Top-level function definition is the only syntax that isn't an expression, which means they can't appear anywhere in the program and, instead, must appear at the "global scope" of a file.
Lambda
------
```
x => body
```
A lambda represents an inline function. It is written as a variable name, followed by `=>`, followed by a term. Currently, there are no mukti-argument lambdas in Kind, they must be written like `a => b => c => body`.
Usually, the type of a lambda argument is infered, but we may optionally anotate its type to help the type-checker. For example:
The type inference of the lambda `x => x` will likely fail since there isn't enough information here to know what exactly `x` is. We can write this expression as `(x: SomeType) => x` to avoid needing to infer this type.
Application
-----------
```
(func argm)
```
A function application is written in lisp style, `(f x)`. If you want to apply a bigger expression to an argument, you can wrap `()` around it. For example: `((x => body) argm)` would apply the `x => body` function to `argm`.
In Kind2, lambdas are essentially different to functions, for some important optimizations reasons. There is no automatic currying of functions and we must always call a function with either all its explicit arguments or with all explicit and all implicit arguments. A 3-argument function `f3`, for example, is called with `(f3 a0 a1 a2)`. If we want to curry the last argument of this function, it must be done explicitly with `(a2 => (f3 a0 a1 a2))`.
For lambdas, although they are not multi-argument, we can call multiple lambdas in a row like we would a multi argument function. For example:
Consider a variable holding a lambda `let lmb = a0 => a1 => a2 => a3 => (f4 a0 a1 a2 a3)`. We could call it with `(((lmb x) y) z)`, but for conciseness, we can ommit the parens and call it simply with `(lmb x y z)`. Note that, unlike was the case for functions, we don't need to resolve every single lambda at once.
When writing a sequence of nested function applications, the first layer of parenthesis can be ommited (except for binary operators on native numbers). For example, the following function definitions are equivalent:
```
List.flatten <a> (xs: (List (List a))) : (List a)
List.flatten (List.nil) = (List.nil)
List.flatten (List.cons head tail) = (List.concat head (List.flatten tail))
```
```
List.flatten <a> (xs: List (List a)) : List a
List.flatten List.nil = List.nil
List.flatten (List.cons head tail) = List.concat head (List.flatten tail)
```
One thing to be careful of, is with function types. `List a -> List a` is interpreted as `(List (a -> (List a)))` which may cause errors when not being careful, so it's best to use parenthesis in this case, like so `(List a) -> (List a)`.
Let
---
```
let x = value
body
```
Let expressions define local values. They allow an expression to be reused multiple times, and computed only once at runtime. For example:
```
let x = (Heavy 1000000)
x + x
```
Will only evaluate `(Heavy 1000000)` once. Since `let` is just an expression, you can chain it any way you like. A `;` can be used for clarity to separate the value and the body, and `()` can be used to wrap an inline `let` expression, but neither are mandatory.
```
let a = 1
let b = (let x = 2; x)
let c = 3
(+ a (+ b c))
```
A `let` expression introduces a new variable in the context. That variable will appear in error messages and is **not** considered equal to the expression it assigns (for theorem proving and type-aliasing purposes).
Since a `let` is a normal expression like all others, it may be used anywhere an expression is expected. This can be useful for example, for defining very complex return types for proofs. For example, the following type can be made more readable by adding some `let` expressions:
#### TODO: Find an example. I know we have one somewhere either on Wikind or on Kind1's base.
#### TODO: Write a warning about dups
Forall (dependent function type)
-------------------------------------
```
(name: type) -> body
```
Forall, or Pi, or dependent function type, is the type of a function.
```
Nat.add (n: Nat) (m: Nat) : Nat
```
`Nat.add` is a function which takes two `Nat`s and returns its sum. It has type `(n: Nat) -> (m: Nat) -> Nat`.
```
Bool.double_negation (b: Bool) : Equal Bool (Bool.not (Bool.not b)) b
```
`Bool.double_negation` is a proof that for all `Bool`, its double negation is equal to itself. It has type `(b: Bool) -> (Equal Bool (Bool.not (Bool.not b)) b)`.
Since Kind functions are dependently typed, you can give a name to the input variable, and use it in the body of the dependent type. For example:
```
(n: Nat) -> Vector Bool n
```
Is the type of a function that receives a `n: Nat` and returns a `Vector` of `n` `Bool`s.
If you're not using dependent types, you can omit the names, parenthesis and colon, and write just:
```
Nat -> Nat
```
Which is a function that receives a `Nat` and returns a `Nat`. This is converted into `(_: Nat) -> Nat`, which is a way of not giving a name to a variable.
While the arrow `->` is actually optional when defining a named forall (ie: `(x: a) -> (f x)` is equivalent to `(x: a) (f x)`), it is usually recommended for clarity.
Annotation
----------
```
x :: A
```
An inline type annotation. Has no runtime effect, but can be useful to help the
type-checker when it can't infer a type. For example:
```
let fn = (x => x + x) :: Nat -> Nat
fn 4
```
The code above uses an inline annotation to annotate the type of the `x => x + x`
function named `fn`.
If, then, else
--------------
```
if b { t } else { f }
```
The syntax above is equivalent to a ternary operator. It evaluates the bool `b` and returns `t` if it is true, `f` otherwise. It expands to the function application `Bool.if b t f`.
Type Derivation
---------------
#### TODO: Explain the syntax for the `kind2 derive` command
Match (pattern matching)
-----------------------
```
match ExprType name = expression {
ctr0 => body0
ctr1 => body1
...
ctrN => bodyN
}: motive
```
The `match` syntax is a convenient way of branching on each constructor of a type, and accessing their inner values without having to write an auxiliary function. By using the motive, it can also be very helpful with proving theorems. A simple example is:
```
let x = Bool.true
match Bool x {
true => "x is true"
false => "x is false"
}
```
When a matched constructor has fields, you can access it on the respective
branch as `name.field`. For example, when matching a `List`, we gain access to
its head and tail as `list.head` and `list.tail`:
```
List.sum (list: List Nat) : Nat {
match List list {
nil => Nat.zero
cons => Nat.add list.head (List.sum list.tail)
}
}
```
This syntax can be useful in many cases, but here this function would be better expressed as:
```
List.sum (list: List Nat) : Nat
List.sum List.nil = Nat.zero
List.sum (List.cons head tail) = Nat.add head (List.sum tail)
```
Instead of using a `let` expression like in the `Bool` case above, we can give the matched expression a name inside the `match` expression itself:
```
match List xs = [1 2 3] {
nil => Nat.zero
cons => xs.head
}
```
You may also provide a return type, called motive. Since Kind has dependent
types, the motive has access to the value of the matched variable, allowing you
to return a different type on each branch. For example:
```
match Bool x = Bool.true {
true => "i'm a string"
false => 42
}: if x { String } else { U60 }
```
Here, Kind evaluated `if x then String else Nat` with each possible value of `x` (in this case, `true` or `false`) to determine the return type of each branch.
Notice that the `true` case and the `false` case return different types. This
is very useful for theorem proving. For example:
```
DoubleNegation (b: Bool) : Equal Bool (Bool.not (Bool.not b)) b {
match Bool b {
true => ?a
false => ?b
}
}
```
To prove this theorem, Kind demands you to provide a proof of
`not not b = b` on both cases. This isn't possible. But if you write a motive:
```
DoubleNegation (b: Bool) : Equal Bool (Bool.not (Bool.not b)) b {
match Bool b {
true => ?a
false => ?b
}: Equal Bool (Bool.not (Bool.not b)) b
}
```
Then Kind demands a proof of `not not true = true` on the `?a` branch, and
a proof of `not not false = false` on the `?b` branch. Since these equalities
reduce to `true = true` and `false = false`, you can complete the proof with just `refl`.
#### TODO: Write a document explaining theorem-proving concepts using Kind
Inspection
----------
```
?name
```
We can ask the type-checker for what it infers that the type of an expression should be by using the inspection syntax, which is written as `?` and optionally followed by a name to help you find it. Goals are extremely useful when developing algorithms and proofs, as they allow you to keep a part of your program incomplete while you work on the rest. They also allow you to inspect the context and expected type on that part. For example, if you write:
```
Add (a: Nat) (b: Nat) : Nat
Add Nat.zero b = ?i0
Add (Nat.succ a.pred) b = ?i1
```
Kind will display:
```
Inspection.
- Goal: Nat
Context:
- b : Nat
On 'your_file.kind2':
2 | Add Nat.zero b = ?i0
Inspection.
- Goal: Nat
Context:
- a.pred : Nat
- b : Nat
On 'your_file.kind2':
3 | Add (Nat.succ a.pred) b = ?i1
```
Notice how it shows the type it expects on each inspect (`Nat`), as well as the
context available there. Note also, how the context in the `Nat.succ` case also has the variable `a.pred`, that was bound in the rule with the value from inside the `Nat.succ` constructor.
Hole
----
```
_
```
A `hole` is written as a single underscore. It stands for "complete this for me".
Holes are extremely useful to let Kind fill the "obvious" parts of your
program for you. Without holes, Kind would be extremely more verbose. For
example, the list of lists `[[1 2] [3 4]]`, in its full form, would be:
```
(List.cons (List U60) (List.cons U60 1 (List.cons U60 2 (List.nil U60)))
(List.cons (List U60) (List.cons U60 3 (List.cons U60 4 (List.nil U60)))
(List.nil (List U60)))
```
With holes, you can write just:
```
(List.cons _ (List.cons _ 1 (List.cons _ 2 (List.nil _)))
(List.cons _ (List.cons _ 3 (List.cons _ 4 (List.nil _)))
(List.nil _))
```
Of course, since these arguments that we filled with holes are all implicit, we could simply not write them, like in:
```
(List.cons (List.cons 1 (List.cons 2 List.nil))
(List.cons (List.cons 3 (List.cons 4 List.nil))
List.nil)
```
But underneath the hood, what an implicit argument actually does is automatically put holes in these places.
Moreover, single holes can be shortened as `!`. So it can also be written as:
```
List.cons!(List.cons!(1, List.cons!(2, List.nil!)),
List.cons!(List.cons!(3, List.cons!(4, List.nil!)),
List.nil!))
```
Of course, in this particular example, we can just use the list notation directly:
```
[[1 2] [3 4]]
```
But in this list syntax, as well as in many others, the holes would also be put there automatically.
Kind's holes work by unifying immediate values only. That is, whenever
you'd have an error such as:
```
Expected: Bool
Detected: _
```
Kind will replace `_` by `Bool` and try again. That is all it does, which
means it does no complex unification. Turns out this covers all cases required
to keep Kind's syntax clean and free from bloated type annotations, even
things like equality rewrites and vectors, while also keeping the type-checker
fast. But if you want more advanced hole-filling features as seen in Agda or
Idris, Kind won't do that and you need explicit types.
Logging
-------
```
(HVM.log logged result)
```
The logging feature of the runtime implementation is exposed as a Kind function. It allows you to print a string at runtime without using the `IO` type. It is very useful for debugging and inspecting the execution of an algorithm. However, because it causes a hidden side-effect, ignoring the type system, it should be used carefully and not be present on finished programs. Note that the printing happens when this function is reduced during runtime, which may not happen at the trivially expected time, may change if Kind is being run on multithreaded mode and may also happen when executing the type-checker since it's implemented as a special Kind program.
Do notation
-----------
```
do MonadicType {
statements
}
```
Do blocks, or the do-notation, is extremely useful to "flatten" cascades of
callbacks. In Kind, a `do` block requires the name of a monad and a series
of statements. Inside it, you may use `ask x = monad` to bind the result of a
monadic computation to the name `x`. You may also write `ask monad` directly to
execute a monadic computation and drop the result. You can also use local
`let`s, as you'd expect. It will then be converted to a series of applications
of `Monad.bind` and `Monad.pure`. For example,
```
ask_user_age: IO U60
do IO {
ask name = IO.prompt "What is your name?"
ask IO.output (String.concat "Welcome, " name)
ask year = IO.prompt "When you were born?"
let age = 2020 - (Maybe.default (U60.read_decimal year) 0)
return Maybe.default (U60.read_decimal age) 0
}
```
Is converted to:
```
IO.bind ((IO.prompt "What is your name?") (name =>
IO.bind ((IO.output (String.concat "Welcome, " name)) (_ =>
IO.bind ((IO.prompt "When you were born?") (year =>
let age = 2020 - (Maybe.default (U60.read_decimal year) 0)
IO.pure (Maybe.default (U60.read_decimal age) 0)
))))))
```
To be able to use the `do` syntax with a type, it must implement a function called `bind` which does the monadic bind operation and a function called `pure`, which should simply return the value encapsulated by the monad. Note that this syntax doesn't actually require your operation to be a monad, but you should not implement these functions in case your operation is not monadic. In the future we may change this to actually require a proof of the monad properties.
Numbers and operators
---------------------
Currently, Kind has one primitive number type, U60, the unsigned integers of length 60. When writing numbers in an expression like `583` and `34957`, they are interpreted as U60. These numbers are compiled into very efficient machine code and should be used whenever you need performant number operations.
There are also some primitive number operators that work on U60s, that are used like any other function, except the `()` are always needed. They are:
Operation | Syntax
--------- | ------
Addition | (+ a b)
Subtraction | (- a b)
Multiplication | (* a b)
Division | (/ a b)
Remainder | (% a b)
Shift left | (<< a b)
Shift right | (>> a b)
Bitwise and | (& a b)
Bitwise or | (| a b)
Bitwise xor | (^ a b)
Greater than | (> a b)
Greater equal | (>= a b)
Less than | (< a b)
Less equal | (<= a b)
Equal | (== a b)
Not equal | (!= a b)
Note that all of these are of type `U60 -> U60 -> U60`, that is, they all return a number. For example, `(== 2 2)` returns `1` and `(<= 30 4)` returns `0`. If you need functions that return a boolean, check the Wikind repository for things like `U60.equal (a: U60) (b: U60) : Bool`.
#### TODO: Write about U120 compilation for kindelia
Char literal
------------
```
'a'
```
A character literal is an ascii character surrounded with `''`. Characters are currently implemented as U60 numbers, which makes them fast, but wastes a lot of space since only 8 bits are used at most.
String literal
--------------
```
"Hello"
```
A string literal is a sequence of ascii characters surrounded with `""`. Strings aren't primitives in Kind either. Instead, they are currently implemented as:
```
String : Type
String.nil : String
String.cons (head: Char) (tail: String) : String
```
String literals are expanded into a sequence of `String.cons`. For example, `"Hello"` is desugared to `String.cons 'H' (String.cons 'e' (String.cons 'l' (String.cons 'l' (String.cons 'o' String.nil))))`.
Sigma type
----------
```
[name: type] -> body
```
Sigma literals can be used to write sigma types or dependent pairs. They are
expanded to:
```
Sigma type (name => body)
```
With `Sigma` in Wikind defined as `Sigma (a: Type) (b: a -> Type) : Type
`.
In the same way that forall (aka Pi, aka the dependent function type) can be read as "forall", `Sigma`s can be read as "there exists". So, for example, the program below:
```
ThereIsEvenNat : [x: Nat] (Equal (Nat.mod x Nat.two) Nat.zero)
$Nat.zero Equal.refl
```
Can be read as `there exists an (x: Nat) such that x mod 2 is equal to zero`. Sigmas can also be used to create subset types:
```
EvenNat: Type
[x: Nat] (Equal (Nat.mod x Nat.two) Nat.zero)
```
New sigma
---------
```
$val_a val_b
```
`Sigma.new` literals can be used to create values for sigma types, or dependent pairs. They are
expanded to:
```
(Sigma.new _ _ val_a val_b)
```
With `Sigma.new` defined as `Sigma.new <a: Type> <b: a -> Type> (fst: a) (snd: b fst) : Sigma a b` in Wikind.
List literal
------------
```
[1, 2, 3]
```
The syntax above expands to:
```
(List.cons 1 (List.cons 2 (List.cons 3 List.nil)))
```
The `,` is optional.

View File

@ -1,22 +0,0 @@
#!/bin/sh
set -e
CURRENT=$(realpath .)
KIND2=$(realpath $CURRENT/target/release/kind2)
CHECKER=$(realpath ../)
#echo "Building Kind2 without the new checker.hvm"
#cargo build --release
echo "Building Kind2 type checker"
# Probably we should just use git clone in Wikind?
cd ../Wikind
#$KIND2 check Kind/TypeChecker.kind2
$KIND2 to-hvm Kind/TypeChecker.kind2 > ../Kind2/src/checker.hvm
cargo install --path $CURRENT
#cd $CURRENT
#cargo build --release

1
rustfmt.toml Normal file
View File

@ -0,0 +1 @@
max_width = 180

217
src/book.rs Normal file
View File

@ -0,0 +1,217 @@
// The location of things inside the source code
pub mod span;
// Description of all the terms inside the language
pub mod term;
// Types of names.
pub mod name;
// Types of names.
pub mod new_type;
use crate::book::name::Ident;
use crate::book::span::{FileOffset, Localized, Span};
use crate::book::term::Term;
use std::collections::HashMap;
use std::fmt::{Display, Error, Formatter};
// A book is a collection of entries.
#[derive(Clone, Debug, Default)]
pub struct Book {
pub names: Vec<String>,
pub entrs: HashMap<Ident, Box<Entry>>,
pub holes: u64,
}
// A entry describes a function that has
// rules and a type.
#[derive(Clone, Debug)]
pub struct Entry {
pub name: Ident,
pub orig: Span,
pub kdln: Option<String>,
pub args: Vec<Box<Argument>>,
pub tipo: Box<Term>,
pub rules: Vec<Box<Rule>>,
}
#[derive(Clone, Debug)]
pub struct Rule {
pub orig: Span,
pub name: Ident,
pub pats: Vec<Box<Term>>,
pub body: Box<Term>,
}
#[derive(Clone, Debug)]
pub struct Argument {
pub hide: bool,
pub orig: Span,
pub eras: bool,
pub name: Ident,
pub tipo: Box<Term>,
}
impl Book {
pub fn set_origin_file(&mut self, file: FileOffset) {
for entr in self.entrs.values_mut() {
entr.set_origin_file(file);
}
}
}
// Some constructors that are really useful.
impl Argument {
pub fn new_hidden(name: Ident, tipo: Box<Term>) -> Argument {
Argument {
hide: true,
orig: Span::Generated,
eras: true,
name,
tipo
}
}
pub fn new_accessible(name: Ident, tipo: Box<Term>) -> Argument {
Argument {
hide: false,
orig: Span::Generated,
eras: false,
name,
tipo
}
}
pub fn new_erased(name: Ident, tipo: Box<Term>) -> Argument {
Argument {
hide: false,
orig: Span::Generated,
eras: true,
name,
tipo
}
}
}
impl Entry {
pub fn count_implicits(&self) -> (usize, usize) {
let mut hiddens = 0;
let mut eraseds = 0;
for arg in &self.args {
if arg.hide {
hiddens += 1;
}
if arg.eras {
eraseds += 1;
}
}
(hiddens, eraseds)
}
pub fn new_type_signature(name: Ident, args: Vec<Box<Argument>>) -> Entry {
Entry {
name,
orig: Span::Generated,
kdln: None,
args,
tipo: Box::new(Term::Typ { orig: Span::Generated }),
rules: Vec::new(),
}
}
}
impl Display for Rule {
fn fmt(&self, f: &mut Formatter<'_>) -> Result<(), Error> {
write!(f, "{}", self.name)?;
for pat in &self.pats {
write!(f, " {}", pat)?;
}
write!(f, " = {}", self.body)
}
}
impl Display for Argument {
fn fmt(&self, f: &mut Formatter<'_>) -> Result<(), Error> {
let (open, close) = match (self.eras, self.hide) {
(false, false) => ("(", ")"),
(false, true) => ("+<", ">"),
(true, false) => ("-(", ")"),
(true, true) => ("<", ">"),
};
write!(f, "{}{}: {}{}", open, self.name, &self.tipo, close)
}
}
impl Display for Entry {
fn fmt(&self, f: &mut Formatter<'_>) -> Result<(), Error> {
if let Some(kdln) = &self.kdln {
write!(f, "{} #{}", self.name, kdln)?
} else {
write!(f, "{}", self.name.clone())?
};
for arg in &self.args {
write!(f, " {}", arg)?;
}
write!(f, " : {}", &self.tipo)?;
for rule in &self.rules {
write!(f, "\n{}", rule)?
}
Ok(())
}
}
impl Display for Book {
fn fmt(&self, f: &mut Formatter<'_>) -> Result<(), Error> {
for name in &self.names {
writeln!(f, "{}\n", self.entrs.get(&Ident(name.clone())).unwrap())?;
}
Ok(())
}
}
impl Localized for Rule {
fn get_origin(&self) -> Span {
self.orig
}
fn set_origin_file(&mut self, file: FileOffset) {
self.orig = self.orig.set_file(file);
for pat in &mut self.pats {
pat.set_origin_file(file);
}
self.body.set_origin_file(file);
}
}
impl Localized for Entry {
fn get_origin(&self) -> Span {
self.orig
}
fn set_origin_file(&mut self, file: FileOffset) {
self.orig = self.orig.set_file(file);
for arg in &mut self.args {
arg.set_origin_file(file);
}
for rule in &mut self.rules {
rule.set_origin_file(file);
}
self.tipo.set_origin_file(file);
}
}
impl Localized for Argument {
fn get_origin(&self) -> Span {
self.orig
}
fn set_origin_file(&mut self, file: FileOffset) {
self.tipo.set_origin_file(file);
}
}

111
src/book/name.rs Normal file
View File

@ -0,0 +1,111 @@
use std::fmt::{Display, Error, Formatter};
#[derive(Clone, Debug)]
pub struct EncodedName(u64);
/// Describes an identifier of the language.
#[derive(Clone, PartialEq, Eq, Hash, Debug)]
pub struct Ident(pub String);
#[derive(Clone)]
pub enum Path {
Qualified(String, String),
Local(String)
}
impl EncodedName {
pub fn u64_to_name(&self) -> String {
let mut name = String::new();
let mut num = self.0;
while num > 0 {
let chr = (num % 64) as u8;
let chr = match chr {
0 => '.',
1..=10 => (chr - 1 + b'0') as char,
11..=36 => (chr - 11 + b'A') as char,
37..=62 => (chr - 37 + b'a') as char,
63 => '_',
64.. => panic!("impossible character value"),
};
name.push(chr);
num /= 64;
}
name.chars().rev().collect()
}
pub fn from_string(name: &str) -> EncodedName {
fn char_to_u64(chr: char) -> u64 {
match chr {
'.' => 0,
'0'..='9' => 1 + chr as u64 - '0' as u64,
'A'..='Z' => 11 + chr as u64 - 'A' as u64,
'a'..='z' => 37 + chr as u64 - 'a' as u64,
'_' => 63,
_ => panic!("Invalid name character."),
}
}
let mut num: u64 = 0;
for (i, chr) in name.chars().enumerate() {
if i < 10 {
num = (num << 6) + char_to_u64(chr);
}
}
EncodedName(num)
}
}
impl Ident {
pub fn encode(&self) -> EncodedName {
EncodedName::from_string(&self.0)
}
pub fn new_path(path: &str, name: &str) -> Ident {
Ident(format!("{}.{}", path, name))
}
pub fn to_path(&self) -> String {
self.0.replace('.', "/")
}
pub fn is_ctr(&self) -> bool {
if !self.0.is_empty() {
let chr = self.0.chars().next().unwrap();
chr == '/' || ('A'..='Z').contains(&chr)
} else {
false
}
}
}
impl Path {
pub fn encode(&self) -> EncodedName {
EncodedName::from_string(&format!("{}", self))
}
pub fn new_path(path: &str, name: &str) -> Path {
Path::Qualified(path.to_string(), name.to_string())
}
}
impl Display for Ident {
fn fmt(&self, f: &mut Formatter<'_>) -> Result<(), Error> {
write!(f, "{}", self.0)
}
}
impl Display for Path {
fn fmt(&self, f: &mut Formatter<'_>) -> Result<(), Error> {
match self {
Path::Qualified(p, e) => write!(f, "{}.{}", p, e),
Path::Local(e) => write!(f, "{}", e)
}
}
}
impl Display for EncodedName {
fn fmt(&self, f: &mut Formatter<'_>) -> Result<(), Error> {
write!(f, "{}", self.0)
}
}

36
src/book/new_type.rs Normal file
View File

@ -0,0 +1,36 @@
use std::path::{PathBuf};
use crate::book::name::Ident;
use crate::book::{Argument, Entry};
#[derive(Clone, Debug)]
pub enum NewType {
Sum(SumType),
Prod(ProdType)
}
#[derive(Clone, Debug)]
pub struct SumType {
pub name: Ident,
pub pars: Vec<Box<Argument>>,
pub ctrs: Vec<Box<Constructor>>,
}
#[derive(Clone, Debug)]
pub struct ProdType {
pub name: Ident,
pub pars: Vec<Box<Argument>>,
pub fields: Vec<Box<Argument>>,
}
#[derive(Clone, Debug)]
pub struct Constructor {
pub name: Ident,
pub args: Vec<Box<Argument>>,
}
#[derive(Clone, Debug)]
pub struct Derived {
pub path: PathBuf,
pub entr: Entry,
}

58
src/book/span.rs Normal file
View File

@ -0,0 +1,58 @@
#[derive(Clone, PartialEq, Eq, Copy, Debug)]
pub struct ByteOffset(pub u32);
#[derive(Clone, PartialEq, Eq, Copy, Debug)]
pub struct FileOffset(pub u32);
#[derive(Clone, PartialEq, Eq, Copy, Debug)]
pub struct SpanData {
pub start: ByteOffset,
pub end: ByteOffset,
pub file: FileOffset,
}
#[derive(Clone, PartialEq, Eq, Copy, Debug)]
pub enum Span {
Generated,
Localized(SpanData),
}
impl Span {
#[inline]
pub fn new(start: ByteOffset, end: ByteOffset, file: FileOffset) -> Span {
Span::Localized(SpanData { start, end, file })
}
#[inline]
pub fn new_off(start: ByteOffset, end: ByteOffset) -> Span {
Span::Localized(SpanData { start, end, file: FileOffset(0) })
}
pub fn set_file(&self, new_file: FileOffset) -> Span {
match self {
Span::Generated => Span::Generated,
Span::Localized(SpanData { start, end, .. }) => Span::Localized(SpanData {
start: *start,
end: *end,
file: new_file,
}),
}
}
#[inline]
pub fn generated() -> Span {
Span::Generated
}
pub fn encode(&self) -> u64 {
match self {
Span::Generated => 0,
Span::Localized(data) => ((data.file.0 as u64) << 48) | ((data.start.0 as u64) & 0xFFFFFF) | (((data.end.0 as u64) & 0xFFFFFF) << 24),
}
}
}
pub trait Localized {
fn get_origin(&self) -> Span;
fn set_origin_file(&mut self, file: FileOffset);
}

329
src/book/term.rs Normal file
View File

@ -0,0 +1,329 @@
use crate::book::name::Ident;
use crate::book::span::{FileOffset, Localized, Span};
use std::ascii;
use std::fmt::{Display, Error, Formatter};
#[derive(Copy, Clone, Debug)]
pub enum Operator {
Add,
Sub,
Mul,
Div,
Mod,
And,
Or,
Xor,
Shl,
Shr,
Ltn,
Lte,
Eql,
Gte,
Gtn,
Neq,
}
#[derive(Clone, Debug)]
pub enum Term {
Typ {
orig: Span,
},
Var {
orig: Span,
name: Ident,
},
All {
orig: Span,
name: Ident,
tipo: Box<Term>,
body: Box<Term>,
},
Lam {
orig: Span,
name: Ident,
body: Box<Term>,
},
App {
orig: Span,
func: Box<Term>,
argm: Box<Term>,
},
Let {
orig: Span,
name: Ident,
expr: Box<Term>,
body: Box<Term>,
},
Ann {
orig: Span,
expr: Box<Term>,
tipo: Box<Term>,
},
Sub {
orig: Span,
name: Ident,
indx: u64,
redx: u64,
expr: Box<Term>,
},
Ctr {
orig: Span,
name: Ident,
args: Vec<Box<Term>>,
},
Fun {
orig: Span,
name: Ident,
args: Vec<Box<Term>>,
},
Hlp {
orig: Span,
},
U60 {
orig: Span,
},
Num {
orig: Span,
numb: u64,
},
Op2 {
orig: Span,
oper: Operator,
val0: Box<Term>,
val1: Box<Term>,
},
Hol {
orig: Span,
numb: u64,
},
Mat {
orig: Span,
tipo: Ident,
name: Ident,
expr: Box<Term>,
cses: Vec<(Ident, Box<Term>)>,
moti: Box<Term>,
},
Open {
orig: Span,
tipo: Ident,
name: Ident,
expr: Box<Term>,
moti: Box<Term>,
body: Box<Term>
}
}
impl Term {
pub fn new_var(name: Ident) -> Term {
Term::Var {
orig: Span::Generated,
name
}
}
pub fn interpret_as_string(&self) -> Option<String> {
let mut text = String::new();
let mut term = self;
let string_nil = Ident::new_path("String", "nil");
let string_cons = Ident::new_path("String", "cons");
loop {
if let Term::Ctr { name, args, .. } = term {
if *name == string_cons && args.len() == 2 {
if let Term::Num { numb, .. } = *args[0] {
if ascii::escape_default(numb as u8).count() > 1 {
return None;
} else {
text.push(char::from_u32(numb as u32).unwrap_or('\0'));
term = &*args[1];
continue;
}
} else {
return None;
}
} else if *name == string_nil && args.is_empty() {
return Some(text);
}
}
return None;
}
}
}
impl Display for Operator {
fn fmt(&self, f: &mut Formatter<'_>) -> Result<(), Error> {
use Operator::*;
match self {
Add => write!(f, "+"),
Sub => write!(f, "-"),
Mul => write!(f, "*"),
Div => write!(f, "/"),
Mod => write!(f, "%"),
And => write!(f, "&"),
Or => write!(f, "|"),
Xor => write!(f, "^"),
Shl => write!(f, "<<"),
Shr => write!(f, ">>"),
Ltn => write!(f, "<"),
Lte => write!(f, "<="),
Eql => write!(f, "=="),
Gte => write!(f, ">="),
Gtn => write!(f, ">"),
Neq => write!(f, "!="),
}
}
}
impl Display for Term {
fn fmt(&self, f: &mut Formatter<'_>) -> Result<(), Error> {
if let Some(str) = self.interpret_as_string() {
write!(f, "\"{}\"", str)
} else {
use Term::*;
match self {
Typ { orig: _ } => write!(f, "Type"),
Hlp { orig: _ } => write!(f, "?"),
U60 { orig: _ } => write!(f, "U60"),
Hol { orig: _, .. } => write!(f, "_"),
Var { orig: _, name } => write!(f, "{}", name),
Num { orig: _, numb } => write!(f, "{}", numb),
Lam { orig: _, name, body } => write!(f, "({} => {})", name, body),
Ann { orig: _, expr, tipo } => write!(f, "({} :: {})", expr, tipo),
Op2 { orig: _, oper, val0, val1 } => write!(f, "({} {} {})", oper, val0, val1),
All { orig: _, name, tipo, body } => write!(f, "({}: {}) {}", name, tipo, body),
Let { orig: _, name, expr, body } => write!(f, "(let {} = {}; {})", name, expr, body),
Sub { name, redx, expr, .. } => write!(f, "({} ## {}/{})", expr, name, redx),
Ctr { orig: _, name, args } => write!(f, "({}{})", name, args.iter().map(|x| format!(" {}", x)).collect::<String>()),
Fun { orig: _, name, args } => write!(f, "({}{})", name, args.iter().map(|x| format!(" {}", x)).collect::<String>()),
App { func, argm, .. } => {
let mut args = vec![argm];
let mut expr = func;
while let App { func, argm, .. } = &**expr {
args.push(argm);
expr = func;
}
args.reverse();
write!(f, "({} {})", expr, args.iter().map(|x| format!("{}", x)).collect::<Vec<String>>().join(" "))
}
Mat { .. } => panic!("Internal Error: Cannot display a Term::Mat because it's removed after adjust."),
Open { .. } => panic!("Internal Error: Cannot display a Term::Open because it's removed after adjust."),
}
}
}
}
impl Localized for Term {
fn get_origin(&self) -> Span {
use Term::*;
match self {
Typ { orig } => *orig,
Hlp { orig } => *orig,
U60 { orig } => *orig,
Hol { orig, .. } => *orig,
Var { orig, .. } => *orig,
Num { orig, .. } => *orig,
Lam { orig, .. } => *orig,
Ann { orig, .. } => *orig,
Op2 { orig, .. } => *orig,
All { orig, .. } => *orig,
Let { orig, .. } => *orig,
Sub { orig, .. } => *orig,
Ctr { orig, .. } => *orig,
Fun { orig, .. } => *orig,
App { orig, .. } => *orig,
Mat { orig, .. } => *orig,
Open { orig, .. } => *orig,
}
}
fn set_origin_file(&mut self, file: FileOffset) {
use Term::*;
match self {
Typ { orig } => {
*orig = orig.set_file(file);
}
Hlp { orig } => {
*orig = orig.set_file(file);
}
U60 { orig } => {
*orig = orig.set_file(file);
}
Hol { orig, .. } => {
*orig = orig.set_file(file);
}
Var { orig, .. } => {
*orig = orig.set_file(file);
}
Num { orig, .. } => {
*orig = orig.set_file(file);
}
Lam { orig, body, .. } => {
*orig = orig.set_file(file);
body.set_origin_file(file);
}
Ann { orig, expr, tipo } => {
*orig = orig.set_file(file);
expr.set_origin_file(file);
tipo.set_origin_file(file);
}
Op2 { orig, oper: _, val0, val1 } => {
*orig = orig.set_file(file);
val0.set_origin_file(file);
val1.set_origin_file(file);
}
All { orig, name: _, tipo, body } => {
*orig = orig.set_file(file);
tipo.set_origin_file(file);
body.set_origin_file(file);
}
Let { orig, name: _, expr, body } => {
*orig = orig.set_file(file);
expr.set_origin_file(file);
body.set_origin_file(file);
}
Sub {
orig,
name: _,
indx: _,
redx: _,
expr,
} => {
*orig = orig.set_file(file);
expr.set_origin_file(file);
}
Ctr { orig, name: _, args } => {
*orig = orig.set_file(file);
for arg in args {
arg.set_origin_file(file);
}
}
Fun { orig, name: _, args } => {
*orig = orig.set_file(file);
for arg in args {
arg.set_origin_file(file);
}
}
App { orig, func, argm } => {
*orig = orig.set_file(file);
func.set_origin_file(file);
argm.set_origin_file(file);
}
Mat { orig, expr, cses, moti, .. } => {
*orig = orig.set_file(file);
expr.set_origin_file(file);
for cse in cses {
cse.1.set_origin_file(file);
}
moti.set_origin_file(file);
}
Open { orig, expr, .. } => {
*orig = orig.set_file(file);
expr.set_origin_file(file);
}
}
}
}

File diff suppressed because it is too large Load Diff

383
src/checker.rs Normal file
View File

@ -0,0 +1,383 @@
use crate::book::name::Ident;
use crate::book::span::Span;
use crate::book::term::{Operator, Term};
use crate::book::{Argument, Book, Entry, Rule};
use std::fmt::Write;
pub fn to_checker_oper(oper: &Operator) -> String {
match oper {
Operator::Add => "Kind.Operator.add".to_string(),
Operator::Sub => "Kind.Operator.sub".to_string(),
Operator::Mul => "Kind.Operator.mul".to_string(),
Operator::Div => "Kind.Operator.div".to_string(),
Operator::Mod => "Kind.Operator.mod".to_string(),
Operator::And => "Kind.Operator.and".to_string(),
Operator::Or => "Kind.Operator.or".to_string(),
Operator::Xor => "Kind.Operator.xor".to_string(),
Operator::Shl => "Kind.Operator.shl".to_string(),
Operator::Shr => "Kind.Operator.shr".to_string(),
Operator::Ltn => "Kind.Operator.ltn".to_string(),
Operator::Lte => "Kind.Operator.lte".to_string(),
Operator::Eql => "Kind.Operator.eql".to_string(),
Operator::Gte => "Kind.Operator.gte".to_string(),
Operator::Gtn => "Kind.Operator.gtn".to_string(),
Operator::Neq => "Kind.Operator.neq".to_string(),
}
}
fn hide_orig(orig: &Span, lhs: bool) -> String {
if lhs {
"orig".to_string()
} else {
format!("{}", orig.encode())
}
}
pub fn to_checker_term(term: &Term, quote: bool, lhs: bool) -> String {
match term {
Term::Typ { orig } => {
format!("(Kind.Term.typ {})", hide_orig(orig, lhs))
}
Term::Var { orig, name } => {
if lhs {
format!("{}", name)
} else if quote {
format!("(Kind.Term.set_origin {} {})", orig.encode(), name.clone())
} else {
format!("{}", name.clone()) // spaces to align with quoted version
}
}
Term::All { orig, name, tipo, body } => {
format!(
"(Kind.Term.all {} {} {} λ{} {})",
hide_orig(orig, lhs),
name.encode(),
to_checker_term(tipo, quote, lhs),
name,
to_checker_term(body, quote, lhs)
)
}
Term::Lam { orig, name, body } => {
format!("(Kind.Term.lam {} {} λ{} {})", hide_orig(orig, lhs), name.encode(), name, to_checker_term(body, quote, lhs))
}
Term::App { orig, func, argm } => {
format!(
"({} {} {} {})",
if quote { "Kind.Term.app" } else { "Kind.Term.eval_app" },
hide_orig(orig, lhs),
to_checker_term(func, quote, lhs),
to_checker_term(argm, quote, lhs)
)
}
Term::Let { orig, name, expr, body } => {
format!(
"({} {} {} {} λ{} {})",
if quote { "Kind.Term.let" } else { "Kind.Term.eval_let" },
hide_orig(orig, lhs),
name.encode(),
to_checker_term(expr, quote, lhs),
name,
to_checker_term(body, quote, lhs)
)
}
Term::Ann { orig, expr, tipo } => {
format!(
"({} {} {} {})",
if quote { "Kind.Term.ann" } else { "Kind.Term.eval_ann" },
hide_orig(orig, lhs),
to_checker_term(expr, quote, lhs),
to_checker_term(tipo, quote, lhs)
)
}
Term::Sub { orig, expr, name, indx, redx } => {
format!(
"({} {} {} {} {} {})",
if quote { "Kind.Term.sub" } else { "Kind.Term.eval_sub" },
hide_orig(orig, lhs),
name.encode(),
indx,
redx,
to_checker_term(expr, quote, lhs)
)
}
Term::Ctr { orig, name, args } => {
let mut args_strs: Vec<String> = Vec::new();
for arg in args {
args_strs.push(format!(" {}", to_checker_term(arg, quote, lhs)));
}
if args.len() >= 15 {
format!(
"(Kind.Term.ct{} {}. {} (Kind.Term.args{}{}))",
args.len(),
name,
hide_orig(orig, lhs),
args.len(),
args_strs.join("")
)
} else {
format!("(Kind.Term.ct{} {}. {}{})", args.len(), name, hide_orig(orig, lhs), args_strs.join(""))
}
}
Term::Fun { orig, name, args } => {
let mut args_strs: Vec<String> = Vec::new();
for arg in args {
args_strs.push(format!(" {}", to_checker_term(arg, quote, lhs)));
}
if quote {
if args.len() >= 15 {
format!(
"(Kind.Term.fn{} {}. {}(Kind.Term.args{} {}))",
args.len(),
name,
hide_orig(orig, lhs),
args.len(),
args_strs.join("")
)
} else {
format!("(Kind.Term.fn{} {}. {}{})", args.len(), name, hide_orig(orig, lhs), args_strs.join(""))
}
} else {
format!("(F${} {}{})", name, hide_orig(orig, lhs), args_strs.join(""))
}
}
Term::Hlp { orig } => {
format!("(Kind.Term.hlp {})", hide_orig(orig, lhs))
}
Term::U60 { orig } => {
format!("(Kind.Term.u60 {})", hide_orig(orig, lhs))
}
Term::Num { orig, numb } => {
format!("(Kind.Term.num {} {})", hide_orig(orig, lhs), numb)
}
Term::Op2 { orig, oper, val0, val1 } => {
format!(
"({} {} {} {} {})",
if quote { "Kind.Term.op2" } else { "Kind.Term.eval_op" },
hide_orig(orig, lhs),
to_checker_oper(oper),
to_checker_term(val0, quote, lhs),
to_checker_term(val1, quote, lhs)
)
}
Term::Hol { orig, numb } => {
format!("(Kind.Term.hol {} {})", orig.encode(), numb)
}
Term::Mat { .. } => {
panic!("Internal error: Mat cannot be compiled to a checker because it should be removed in the adjust phase!");
}
Term::Open { .. } => {
panic!("Internal error: Open cannot be compiled to a checker because it should be removed in the adjust phase!");
}
}
}
fn to_checker_rule_chk(rule: &Rule, index: usize, vars: &mut u64, args: &mut Vec<String>) -> String {
if index < rule.pats.len() {
let (inp_patt_str, var_patt_str) = to_checker_patt_chk(&rule.pats[index], vars);
args.push(var_patt_str);
let head = inp_patt_str;
let tail = to_checker_rule_chk(rule, index + 1, vars, args);
format!("(Kind.Rule.lhs {} {})", head, tail)
} else {
format!(
"(Kind.Rule.rhs (QT{} {}. 0{}))",
index,
rule.name,
args.iter().map(|x| format!(" {}", x)).collect::<Vec<String>>().join("")
)
}
}
fn to_checker_patt_chk(patt: &Term, vars: &mut u64) -> (String, String) {
// FIXME: remove redundancy
match patt {
Term::Var { orig, name } => {
let inp = format!("(Kind.Term.var {} {} {})", orig.encode(), name.encode(), vars);
let var = format!("(Kind.Term.var {} {} {})", orig.encode(), name.encode(), vars);
*vars += 1;
(inp, var)
}
Term::Ctr { orig, name, args } => {
let mut inp_args_str = String::new();
let mut var_args_str = String::new();
for arg in args {
let (inp_arg_str, var_arg_str) = to_checker_patt_chk(arg, vars);
write!(inp_args_str, " {}", inp_arg_str).ok();
write!(var_args_str, " {}", var_arg_str).ok();
}
if args.len() >= 15 {
let inp_str = format!("(Kind.Term.ct{} {}. {} (Kind.Term.args{}{}))", args.len(), name, orig.encode(), args.len(), inp_args_str);
let var_str = format!("(Kind.Term.ct{} {}. {} (Kind.Term.args{}{}))", args.len(), name, orig.encode(), args.len(), var_args_str);
(inp_str, var_str)
} else {
let inp_str = format!("(Kind.Term.ct{} {}. {}{})", args.len(), name, orig.encode(), inp_args_str);
let var_str = format!("(Kind.Term.ct{} {}. {}{})", args.len(), name, orig.encode(), var_args_str);
(inp_str, var_str)
}
}
Term::Num { orig, numb } => {
let inp = format!("(Kind.Term.num {} {})", orig.encode(), numb);
let var = format!("(Kind.Term.num {} {})", orig.encode(), numb);
(inp, var)
}
_ => {
// TODO: This should return a proper error instead of panicking
panic!("Invalid left-hand side pattern: {}", patt);
}
}
}
fn to_checker_rule_end(name: &Ident, size: u64) -> String {
let mut vars = vec![];
for idx in 0..size {
vars.push(format!(" x{}", idx));
}
let mut text = String::new();
if size >= 15 {
writeln!(
text,
"(Q${} orig{}) = (Kind.Term.fn{} {}. orig (Kind.Term.args{}{}))",
name,
vars.join(""),
size,
name,
size,
vars.join("")
)
.ok();
writeln!(
text,
"(F${} orig{}) = (Kind.Term.fn{} {}. orig (Kind.Term.args{}{}))",
name,
vars.join(""),
size,
name,
size,
vars.join("")
)
.ok();
} else {
writeln!(text, "(Q${} orig{}) = (Kind.Term.fn{} {}. orig{})", name, vars.join(""), size, name, vars.join("")).ok();
writeln!(text, "(F${} orig{}) = (Kind.Term.fn{} {}. orig{})", name, vars.join(""), size, name, vars.join("")).ok();
}
text
}
fn to_checker_type(args: &Vec<Box<Argument>>, tipo: &Term, index: usize) -> String {
if index < args.len() {
let arg = &args[index];
format!(
"(Kind.Term.all {} {} {} λ{} {})",
0,
arg.name.encode(),
to_checker_term(&arg.tipo, true, false),
arg.name,
to_checker_type(args, tipo, index + 1)
)
} else {
to_checker_term(tipo, true, false)
}
}
fn to_checker_rule(rule: &Rule) -> String {
let mut pats = vec![];
for pat in &rule.pats {
pats.push(format!(" {}", to_checker_term(pat, false, true)));
}
let body_rhs = to_checker_term(&rule.body, true, false);
let rule_rhs = to_checker_term(&rule.body, false, false);
let mut text = String::new();
writeln!(text, "(Q${} orig{}) = {}", rule.name, pats.join(""), body_rhs).ok();
if rule.name.0 == "HVM.log" {
write!(text, "(F$HVM.log orig a r log ret) = (HVM.put (Kind.Term.show log) ret)").ok();
} else {
writeln!(text, "(F${} orig{}) = {}", rule.name, pats.join(""), rule_rhs).ok();
}
//for size in 0 .. 9 {
//let mut vars = vec![];
//for idx in 0 .. size {
//vars.push(format!(" x{}", idx));
//}
//write!(result,"(QT{} name orig{}) = (Fn{} name orig{})\n", size, vars.join(""), size, vars.join(""));
//write!(result,"(FN{} name orig{}) = (Fn{} name orig{})\n", size, vars.join(""), size, vars.join(""));
//}
text
}
pub fn to_checker_entry(entry: &Entry) -> String {
let mut result = String::new();
writeln!(result, "(NameOf {}.) = \"{}\"", entry.name, entry.name).ok();
writeln!(result, "(HashOf {}.) = %{}", entry.name, entry.name).ok();
writeln!(result, "(TypeOf {}.) = {}", entry.name, to_checker_type(&entry.args, &entry.tipo, 0)).ok();
let base_vars = (0..entry.args.len()).map(|x| format!(" x{}", x)).collect::<Vec<String>>().join("");
if entry.args.len() >= 15 {
writeln!(
result,
"(Kind.Term.FN{} {}. orig (Kind.Term.args{}{})) = (F${} orig{})",
entry.args.len(),
entry.name,
entry.args.len(),
base_vars,
entry.name,
base_vars
)
.ok();
} else {
writeln!(
result,
"(Kind.Term.FN{} {}. orig{}) = (F${} orig{})",
entry.args.len(),
entry.name,
base_vars,
entry.name,
base_vars
)
.ok();
}
writeln!(result, "(QT{} {}. orig{}) = (Q${} orig{})", entry.args.len(), entry.name, base_vars, entry.name, base_vars).ok();
for rule in &entry.rules {
write!(result, "{}", &to_checker_rule(rule)).ok();
}
if !entry.rules.is_empty() {
write!(result, "{}", &to_checker_rule_end(&entry.name, entry.rules[0].pats.len() as u64,)).ok();
}
write!(result, "(RuleOf {}.) =", entry.name).ok();
for rule in &entry.rules {
write!(result, " (List.cons {}", to_checker_rule_chk(rule, 0, &mut 0, &mut vec![])).ok();
}
write!(result, " List.nil{}", ")".repeat(entry.rules.len())).ok();
result
}
// Vendo oq da pra fazer pra
pub fn to_checker_book(book: &Book) -> String {
let mut result = String::new();
writeln!(result, "// NOTE: functions with names starting with 'F$' are evaluated differently by the").ok();
writeln!(result, "// HVM, as a specific optimization targetting Kind2. See 'HOAS_OPT' on HVM's code.\n").ok();
writeln!(result, "Functions =").ok();
writeln!(result, " let fns = List.nil").ok();
for name in &book.names {
let entry = book.entrs.get(&Ident(name.to_string())).unwrap();
writeln!(result, " let fns = (List.cons {}. fns)", entry.name).ok();
}
result.push_str(" fns\n\n");
for name in &book.names {
let entry = book.entrs.get(&Ident(name.to_string())).unwrap();
write!(result, "\n// {}", name).ok();
writeln!(result, "\n// {}", "-".repeat(name.len())).ok();
writeln!(result).ok();
write!(result, "{}", &to_checker_entry(entry)).ok();
writeln!(result).ok();
}
write!(result, "HoleInit = {}", book.holes).ok();
result
}

3
src/codegen.rs Normal file
View File

@ -0,0 +1,3 @@
pub mod kdl;
pub mod hvm;

129
src/codegen/hvm.rs Normal file
View File

@ -0,0 +1,129 @@
use crate::book::name::Ident;
use crate::book::term::{Operator, Term};
use crate::book::{Book, Entry, Rule};
pub fn to_hvm_term(book: &Book, term: &Term) -> String {
if let Some(as_string) = term.interpret_as_string() {
return format!("\"{}\"", as_string);
}
match term {
Term::Typ { .. } => "Type".to_string(),
Term::Var { orig: _, name } => name.to_string(),
Term::Lam { orig: _, name, body } => {
let body = to_hvm_term(book, body);
format!("@{} {}", name, body)
}
Term::App { orig: _, func, argm } => {
let func = to_hvm_term(book, func);
let argm = to_hvm_term(book, argm);
format!("({} {})", func, argm)
}
Term::All { orig: _, name: _, tipo: _, body } => {
let _body = to_hvm_term(book, body);
"0".to_string()
}
Term::Let { orig: _, name, expr, body } => {
let expr = to_hvm_term(book, expr);
let body = to_hvm_term(book, body);
format!("let {} = {}; {}", name, expr, body)
}
Term::Ann { orig: _, expr, tipo: _ } => to_hvm_term(book, expr),
Term::Sub {
orig: _,
expr,
name: _,
indx: _,
redx: _,
} => to_hvm_term(book, expr),
Term::Ctr { orig: _, name, args } => {
let entr = book.entrs.get(name).unwrap();
let args = args.iter().enumerate().filter(|(i, _x)| !entr.args[*i].eras).map(|x| &**x.1).collect::<Vec<&Term>>();
format!("({}{})", name, args.iter().map(|x| format!(" {}", to_hvm_term(book, x))).collect::<String>())
}
Term::Fun { orig: _, name, args } => {
let entr = book.entrs.get(name).unwrap();
let args = args.iter().enumerate().filter(|(i, _x)| !entr.args[*i].eras).map(|x| &**x.1).collect::<Vec<&Term>>();
format!("({}{})", name, args.iter().map(|x| format!(" {}", to_hvm_term(book, x))).collect::<String>())
}
Term::Hlp { orig: _ } => "0".to_string(),
Term::U60 { orig: _ } => "0".to_string(),
Term::Num { orig: _, numb } => {
format!("{}", numb)
}
Term::Op2 { orig: _, oper, val0, val1 } => {
let val0 = to_hvm_term(book, val0);
let val1 = to_hvm_term(book, val1);
format!("({} {} {})", oper, val0, val1)
}
Term::Hol { orig: _, numb: _ } => "_".to_string(),
Term::Mat { .. } => panic!("Internal error: Term::Mat is removed after adjust"),
Term::Open { .. } => panic!("Internal error: Term::Open is removed after adjust")
}
}
pub fn to_hvm_oper(oper: &Operator) -> String {
match oper {
Operator::Add => "+".to_string(),
Operator::Sub => "-".to_string(),
Operator::Mul => "*".to_string(),
Operator::Div => "/".to_string(),
Operator::Mod => "%".to_string(),
Operator::And => "&".to_string(),
Operator::Or => "|".to_string(),
Operator::Xor => "^".to_string(),
Operator::Shl => "<<".to_string(),
Operator::Shr => ">>".to_string(),
Operator::Ltn => "<".to_string(),
Operator::Lte => "<=".to_string(),
Operator::Eql => "==".to_string(),
Operator::Gte => ">=".to_string(),
Operator::Gtn => ">".to_string(),
Operator::Neq => "!=".to_string(),
}
}
pub fn to_hvm_rule(book: &Book, rule: &Rule) -> String {
let name = &rule.name;
let entry = book.entrs.get(name).unwrap();
let mut pats = vec![];
for (arg, pat) in entry.args.iter().zip(rule.pats.iter()) {
if !arg.eras {
pats.push(" ".to_string());
pats.push(to_hvm_term(book, pat));
}
}
let body = to_hvm_term(book, &rule.body);
format!("({}{}) = {}", name, pats.join(""), body)
}
pub fn to_hvm_entry(book: &Book, entry: &Entry) -> String {
let kind_name = if let Some(kdln) = &entry.kdln {
Ident(format!("{} #{}", entry.name, kdln))
} else {
entry.name.clone()
};
let hvm_name = &entry.name;
if hvm_name.0 == "HVM.log" {
return "".to_string();
}
let mut args = vec![];
for arg in &entry.args {
args.push(format!(" {}({}: {})", if arg.eras { "-" } else { "" }, arg.name, &arg.tipo));
}
if !entry.rules.is_empty() {
let mut rules = vec![];
for rule in &entry.rules {
rules.push(format!("\n{}", to_hvm_rule(book, rule)));
}
return format!("// {}{} : {}{}\n\n", kind_name, args.join(""), &entry.tipo, rules.join(""));
}
"".to_string()
}
pub fn to_hvm_book(book: &Book) -> String {
let mut lines = vec![];
for name in &book.names {
lines.push(to_hvm_entry(book, book.entrs.get(&Ident(name.to_string())).unwrap()));
}
lines.join("")
}

217
src/codegen/kdl.rs Normal file
View File

@ -0,0 +1,217 @@
mod book;
use crate::book::name::Ident;
use crate::book::Book;
pub use crate::codegen::kdl::book::*;
use rand::Rng;
use std::collections::{HashMap, HashSet};
pub const KDL_NAME_LEN: usize = 12;
pub fn to_kdl_term(kdl_names: &HashMap<String, String>, term: &CompTerm) -> Result<String, String> {
let term = match term {
CompTerm::Var { name } => name.clone(),
CompTerm::Lam { name, body } => {
let body = to_kdl_term(kdl_names, body)?;
format!("@{} {}", name, body)
}
CompTerm::App { func, argm } => {
let func = to_kdl_term(kdl_names, func)?;
let argm = to_kdl_term(kdl_names, argm)?;
format!("({} {})", func, argm)
}
CompTerm::Dup { nam0, nam1, expr, body } => {
let expr = to_kdl_term(kdl_names, expr)?;
let body = to_kdl_term(kdl_names, body)?;
format!("dup {} {} = {}; {}", nam0, nam1, expr, body)
}
CompTerm::Let { name, expr, body } => {
let expr = to_kdl_term(kdl_names, expr)?;
let body = to_kdl_term(kdl_names, body)?;
format!("let {} = {}; {}", name, expr, body)
}
CompTerm::Ctr { name, args } => {
let kdl_name = kdl_names.get(name).unwrap_or_else(|| panic!("{}", name));
let args = args.iter().map(|x| to_kdl_term(kdl_names, x)).collect::<Result<Vec<String>, String>>()?;
let args = args.iter().map(|x| format!(" {}", x)).collect::<String>();
format!("{{{}{}}}", kdl_name, args)
}
CompTerm::Fun { name, args } => {
let kdl_name = kdl_names.get(name).unwrap_or_else(|| panic!("{}", name));
let args = args.iter().map(|x| to_kdl_term(kdl_names, x)).collect::<Result<Vec<String>, String>>()?;
let args = args.iter().map(|x| format!(" {}", x)).collect::<String>();
format!("({}{})", kdl_name, args)
}
CompTerm::Num { numb } => {
format!("#{}", numb)
}
CompTerm::Op2 { oper, val0, val1 } => {
let val0 = to_kdl_term(kdl_names, val0)?;
let val1 = to_kdl_term(kdl_names, val1)?;
format!("({} {} {})", oper, val0, val1)
}
CompTerm::Nil => {
return Err("Found nil term in compiled term while converting to kindelia".to_string());
}
};
Ok(term)
}
pub fn to_kdl_rule(_book: &Book, kdl_names: &HashMap<String, String>, rule: &CompRule) -> Result<String, String> {
let name = &rule.name;
let kdl_name = kdl_names.get(name).unwrap();
let mut pats = vec![]; // stringified pattern args
for pat in rule.pats.iter() {
let pat = to_kdl_term(kdl_names, pat)?;
pats.push(" ".to_string());
pats.push(pat);
}
let body = to_kdl_term(kdl_names, &rule.body)?;
let rule = format!("({}{}) = {}", kdl_name, pats.join(""), body);
Ok(rule)
}
pub fn to_kdl_entry(book: &Book, kdl_names: &HashMap<String, String>, entry: &CompEntry) -> Result<String, String> {
let entry = match entry.name.as_str() {
_ => {
let kdl_name = kdl_names.get(&entry.name).unwrap();
let args_names = entry.args.iter().map(|arg| format!(" {}", arg)).collect::<String>();
// If this entry existed in the original kind code, add some annotations as comments
let kind_entry = book.entrs.get(&Ident(entry.name.clone()));
let is_knd_ent = matches!(kind_entry, Some(_));
let cmnt = if is_knd_ent {
let kind_entry = kind_entry.unwrap();
let args_typed = kind_entry
.args
.iter()
.map(|arg| format!(" {}({}: {})", if arg.eras { "-" } else { "" }, arg.name, &arg.tipo))
.collect::<String>();
let kind_name = format!("{} #{}", entry.name, kdl_name);
format!("// {}{} : {}\n", kind_name, args_typed, &kind_entry.tipo)
} else {
String::new()
};
// Entries with no rules become constructors
// Entries with rules become functions
let fun = if entry.rules.is_empty() {
format!("ctr {{{}{}}}\n\n", kdl_name, args_names)
} else {
let mut rules = vec![];
for rule in &entry.rules {
rules.push(format!("\n {}", to_kdl_rule(book, kdl_names, rule)?));
}
format!("fun ({}{}) {{{}\n}}\n\n", kdl_name, args_names, rules.join(""))
};
cmnt + &fun
}
};
Ok(entry)
}
pub fn to_kdl_book(book: &Book, kdl_names: &HashMap<String, String>, comp_book: &CompBook) -> Result<String, String> {
let mut lines = vec![];
let mut run = String::new();
let gen_blk_names: HashSet<String> = HashSet::from_iter(["Unit.new", "Pair.new", "U60.if", "Kindelia.IO.done", "Kindelia.IO.do_save", "Kindelia.IO.do_take"].map(String::from));
for name in &comp_book.names {
let entry = comp_book.entrs.get(name).unwrap();
// Skip names in the genesis block
// TODO: Do this through some entry attribute, like how kdl names are done
if gen_blk_names.contains(&entry.name) {
continue;
}
// Main is compiled to a run block, which goes at the end
// TODO: Maybe we should have run blocks come from a specific type of function instead
if name == "Main" {
let stmnt = format!("run {{\n {}\n}}\n\n", to_kdl_term(kdl_names, &*entry.rules[0].body)?);
run.push_str(&stmnt);
continue;
}
lines.push(to_kdl_entry(book, kdl_names, entry)?);
}
Ok(lines.join("") + &run)
}
// Utils
// -----
// Returns a map of kind names to kindelia names
// Returns an err if any of the names can't be converted
pub fn get_kdl_names(book: &CompBook, namespace: &Option<String>) -> Result<HashMap<String, String>, String> {
// Fits a name to the max size allowed by kindelia.
// If the name is too large, truncates and replaces the last characters by random chars.
fn rand_shorten(name: &String, ns: &str) -> Result<String, String> {
let max_fn_name = KDL_NAME_LEN - ns.len();
// If the name doesn't fit, truncate and insert some random characters at the end
let name = if name.len() > max_fn_name {
let n_rnd_chrs = usize::min(3, max_fn_name);
let name_cut = name[..max_fn_name - n_rnd_chrs].to_string();
let mut rng = rand::thread_rng();
let rnd_chrs = (0..n_rnd_chrs).map(|_| rng.gen_range(0..63)).map(encode_base64).collect::<String>();
format!("{}{}", name_cut, rnd_chrs)
} else {
name.clone()
};
Ok(format!("{}{}", ns, name))
}
fn get_kdl_name(entry: &CompEntry, ns: &str) -> Result<String, String> {
let kind_name = &entry.name;
// If the entry uses a kindelia name, use it
let kdln = if let Some(kdln) = &entry.kdln {
if !kdln.chars().next().unwrap().is_uppercase() {
let err = format!("Kindelia name \"{}\" doesn't start with an uppercase letter.", kdln);
return Err(err);
}
if entry.orig {
let max_len = KDL_NAME_LEN - ns.len();
if kdln.len() > max_len {
let mut err = format!("Kindelia name \"{}\" for \"{}\" has more than {} characters.", kdln, kind_name, max_len);
if ns.len() > 0 {
err = format!("{} (Namespace \"{}\" has {})", err, ns, ns.len());
}
return Err(err);
}
format!("{}{}", ns, kdln)
} else {
// For entries created by the flattener, we shorten even the kindelia name
// TODO: Since these rules can come first,
// if the kdln is too large the err will happen in the generated function,
// potentially confusing the user.
rand_shorten(kdln, ns)?
}
}
// Otherwise, try to fit the normal kind name
else {
rand_shorten(&kind_name.replace('.', "_"), ns)?
};
Ok(kdln)
}
fn encode_base64(num: u8) -> char {
match num {
0..=9 => (num + b'0') as char,
10..=35 => (num - 10 + b'A') as char,
36..=61 => (num - 36 + b'a') as char,
62.. => '_',
}
}
let mut errors = Vec::new();
let mut kdl_names = HashMap::new();
let ns = namespace.as_ref().map_or(String::new(), |ns| format!("{}.", ns));
for name in &book.names {
let entry = book.entrs.get(name).unwrap();
let kdln = get_kdl_name(entry, &ns);
match kdln {
Ok(kdln) => kdl_names.insert(name.clone(), kdln).map(|_| ()).unwrap_or(()),
Err(err) => errors.push(err),
}
}
if errors.is_empty() {
Ok(kdl_names)
} else {
Err(errors.join("\n"))
}
}

926
src/codegen/kdl/book.rs Normal file
View File

@ -0,0 +1,926 @@
use crate::book::name::Ident;
use crate::book::term::{Operator, Term};
use crate::book::{Entry, Rule};
use crate::codegen::kdl::Book;
use std::collections::HashMap;
use std::collections::HashSet;
#[derive(Clone, Debug)]
pub enum CompTerm {
Var {
name: String,
},
Lam {
name: String,
body: Box<CompTerm>,
},
App {
func: Box<CompTerm>,
argm: Box<CompTerm>,
},
Dup {
nam0: String,
nam1: String,
expr: Box<CompTerm>,
body: Box<CompTerm>,
},
Let {
name: String,
expr: Box<CompTerm>,
body: Box<CompTerm>,
},
Ctr {
name: String,
args: Vec<Box<CompTerm>>,
},
Fun {
name: String,
args: Vec<Box<CompTerm>>,
},
Num {
numb: u128,
},
Op2 {
oper: Operator,
val0: Box<CompTerm>,
val1: Box<CompTerm>,
},
Nil,
}
#[derive(Clone, Debug)]
pub struct CompRule {
pub name: String,
pub pats: Vec<Box<CompTerm>>,
pub body: Box<CompTerm>,
}
#[derive(Clone, Debug)]
pub struct CompEntry {
pub name: String,
pub kdln: Option<String>,
pub args: Vec<String>,
pub rules: Vec<CompRule>,
pub orig: bool,
}
#[derive(Clone, Debug)]
pub struct CompBook {
pub names: Vec<String>,
pub entrs: HashMap<String, CompEntry>,
}
pub fn compile_book(book: &Book) -> Result<CompBook, String> {
let mut comp_book = CompBook {
names: Vec::new(),
entrs: HashMap::new(),
};
for name in &book.names {
let entry = book.entrs.get(&Ident(name.clone())).unwrap();
// Don't compile primitive U120 operations
// TODO: If this compiler eventually gets used for other targets (like HVM), this will need to be separated.
// We could do passes of compiler features (like flattening, linearizing, etc) also separately.
if u120_to_oper(&entry.name.0).is_some() {
continue;
}
// Skip over useless entries
// TODO: This doesn't cover all cases. We need something like `erase` but for a Book.
// Also maybe there are functions of type Type that should be compiled?
if let Term::Typ { orig: _ } = &*entry.tipo {
continue;
}
let entrs = compile_entry(book, entry)?;
for entry in entrs {
comp_book.names.push(entry.name.clone());
comp_book.entrs.insert(entry.name.clone(), entry);
}
}
Ok(comp_book)
}
// Can become multiple entries after flatenning
pub fn compile_entry(book: &Book, entry: &Entry) -> Result<Vec<CompEntry>, String> {
fn compile_rule(book: &Book, entry: &Entry, rule: &Rule) -> CompRule {
let name = rule.name.0.clone();
let mut pats = Vec::new();
for (arg, pat) in entry.args.iter().zip(rule.pats.iter()) {
if !arg.eras {
let pat = erase(book, pat);
// TODO: Check if the pattern has some invalid term (anything other than num, ctr or var)
pats.push(pat);
}
}
let body = erase(book, &rule.body);
CompRule { name, pats, body }
}
fn make_u120_new() -> CompEntry {
// U120.new hi lo = (+ (<< hi 60) (>> (<< lo 60) 60))
CompEntry {
name: "U120.new".to_string(),
kdln: None,
args: vec!["hi".to_string(), "lo".to_string()],
rules: vec![CompRule {
name: "U120.new".to_string(),
pats: vec![Box::new(CompTerm::Var { name: "hi".to_string() }), Box::new(CompTerm::Var { name: "lo".to_string() })],
body: Box::new(CompTerm::Op2 {
oper: Operator::Add,
val0: Box::new(CompTerm::Op2 {
oper: Operator::Shl,
val0: Box::new(CompTerm::Var { name: "hi".to_string() }),
val1: Box::new(CompTerm::Num { numb: 60 }),
}),
val1: Box::new(CompTerm::Op2 {
oper: Operator::Shr,
val0: Box::new(CompTerm::Op2 {
oper: Operator::Shl,
val0: Box::new(CompTerm::Var { name: "lo".to_string() }),
val1: Box::new(CompTerm::Num { numb: 60 }),
}),
val1: Box::new(CompTerm::Num { numb: 60 }),
}),
}),
}],
orig: true,
}
}
fn make_u120_low() -> CompEntry {
// U120.low n = (>> (<< n 60) 60))
CompEntry {
name: "U120.low".to_string(),
kdln: None,
args: vec!["n".to_string()],
rules: vec![CompRule {
name: "U120.low".to_string(),
pats: vec![Box::new(CompTerm::Var { name: "n".to_string() })],
body: Box::new(CompTerm::Op2 {
oper: Operator::Shr,
val0: Box::new(CompTerm::Op2 {
oper: Operator::Shl,
val0: Box::new(CompTerm::Var { name: "n".to_string() }),
val1: Box::new(CompTerm::Num { numb: 60 }),
}),
val1: Box::new(CompTerm::Num { numb: 60 }),
}),
}],
orig: true,
}
}
fn make_u120_high() -> CompEntry {
// U120.high n = (>> n 60)
CompEntry {
name: "U120.high".to_string(),
kdln: None,
args: vec!["n".to_string()],
rules: vec![CompRule {
name: "U120.high".to_string(),
pats: vec![Box::new(CompTerm::Var { name: "n".to_string() })],
body: Box::new(CompTerm::Op2 {
oper: Operator::Shr,
val0: Box::new(CompTerm::Var { name: "n".to_string() }),
val1: Box::new(CompTerm::Num { numb: 60 }),
}),
}],
orig: true,
}
}
match entry.name.0.as_str() {
// Some U120 functions should have a special compilation
"U120.new" => Ok(vec![make_u120_new()]),
// U120.new becomes a special function that joins two numbers as if they were U60s
// TODO: We could rewrite these both to not need this workaround, but it would become rather slow on normal HVM (~100 rewrites instead of 1)
"U120.high" => Ok(vec![make_u120_high()]),
// high and low are used for type compatibility with u60
"U120.low" => Ok(vec![make_u120_low()]),
_ => {
let new_entry = CompEntry {
name: entry.name.0.clone(),
kdln: entry.kdln.clone(),
args: entry.args.iter().filter(|x| !x.eras).map(|x| x.name.0.clone()).collect(),
rules: entry.rules.iter().map(|rule| compile_rule(book, entry, rule)).collect(),
orig: true,
};
// TODO: We probably need to handle U60 separately as well.
// Since they compile to U120, it wont overflow as expected and conversion to signed will fail.
let new_entry = convert_u120_entry(new_entry)?;
let mut new_entrs = flatten(new_entry);
for entry in &mut new_entrs {
for rule in &mut entry.rules {
linearize_rule(rule);
}
}
Ok(new_entrs)
}
}
}
// Splits an entry with rules with nested cases into multiple entries with flattened rules.
pub fn flatten(entry: CompEntry) -> Vec<CompEntry> {
fn post_inc(n: &mut u64) -> u64 {
let old_n = *n;
*n += 1;
old_n
}
fn must_split(rule: &CompRule) -> bool {
for pat in &rule.pats {
if let CompTerm::Ctr { args, .. } = &**pat {
for arg in args {
if matches!(&**arg, CompTerm::Ctr { .. } | CompTerm::Num { .. }) {
return true;
}
}
}
}
false
}
// return true on the first if both rules always match together
fn matches_together(a: &CompRule, b: &CompRule) -> (bool, bool) {
let mut same_shape = true;
for (a_pat, b_pat) in a.pats.iter().zip(&b.pats) {
match (&**a_pat, &**b_pat) {
(CompTerm::Ctr { name: a_name, .. }, CompTerm::Ctr { name: b_name, .. }) => {
if a_name != b_name {
return (false, false);
}
}
(CompTerm::Num { numb: a_numb }, CompTerm::Num { numb: b_numb }) => {
if a_numb != b_numb {
return (false, false);
}
}
(CompTerm::Ctr { .. }, CompTerm::Num { .. }) => {
return (false, false);
}
(CompTerm::Num { .. }, CompTerm::Ctr { .. }) => {
return (false, false);
}
(CompTerm::Ctr { .. }, CompTerm::Var { .. }) => {
same_shape = false;
}
(CompTerm::Num { .. }, CompTerm::Var { .. }) => {
same_shape = false;
}
_ => {}
}
}
(true, same_shape)
}
fn split_rule(rule: &CompRule, entry: &CompEntry, i: usize, name_count: &mut u64, skip: &mut HashSet<usize>) -> (CompRule, Vec<CompEntry>) {
// Each rule that must be split creates a new entry that inspects one layer of Ctrs
// The old rule is rewritten to be flat and call the new entry
let n = post_inc(name_count);
let new_entry_name = format!("{}{}_", entry.name, n);
let new_entry_kdln = entry.kdln.clone().map(|kdln| format!("{}{}_", kdln, n));
let mut new_entry_rules: Vec<CompRule> = Vec::new();
// Rewrite the old rule to be flat and point to the new entry
let mut old_rule_pats: Vec<Box<CompTerm>> = Vec::new();
let mut old_rule_body_args: Vec<Box<CompTerm>> = Vec::new();
let mut var_count = 0;
for pat in &rule.pats {
match &**pat {
CompTerm::Ctr { name: pat_name, args: pat_args } => {
let mut new_pat_args = Vec::new();
for field in pat_args {
let arg = match &**field {
CompTerm::Ctr { .. } | CompTerm::Num { .. } => {
let name = format!(".{}", post_inc(&mut var_count));
Box::new(CompTerm::Var { name })
}
CompTerm::Var { .. } => field.clone(),
_ => {
panic!("?");
}
};
new_pat_args.push(arg.clone());
old_rule_body_args.push(arg);
}
old_rule_pats.push(Box::new(CompTerm::Ctr {
name: pat_name.clone(),
args: new_pat_args,
}));
}
CompTerm::Var { name } => {
old_rule_pats.push(Box::new(*pat.clone()));
old_rule_body_args.push(Box::new(CompTerm::Var { name: name.clone() }));
}
// TODO: It'd be better to check for Num and handle other (invalid) options
_ => {}
}
}
let old_rule_body = Box::new(CompTerm::Fun {
name: new_entry_name.clone(),
args: old_rule_body_args,
});
let old_rule = CompRule {
name: entry.name.clone(),
pats: old_rule_pats,
body: old_rule_body,
};
//(Foo Tic (Bar a b) (Haz c d)) = A
//(Foo Tic x y) = B
//---------------------------------
//(Foo Tic (Bar a b) (Haz c d)) = B[x <- (Bar a b), y <- (Haz c d)]
//
//(Foo.0 a b c d) = ...
// Check the rules to see if there's any that will be covered by the new entry, including the rule itself.
// Skips previously checked rules to avoid duplication.
// For each unique matching rule, creates a new flattening rule for the entry.
// Ex: (Fun (Ctr1 (Ctr2))) and (Fun (Ctr1 (Ctr3))) will both flatten to (Fun (Ctr1 .0)) and can be merged
for (j, other) in entry.rules.iter().enumerate().skip(i) {
let (compatible, same_shape) = matches_together(rule, other);
if compatible {
// (Foo a (B x P) (C y0 y1)) = F
// (Foo (A k) (B x Q) y ) = G
// -----------------------------
// (Foo a (B x u) (C y0 y1)) = (Foo.0 a x u y0 y1)
// (Foo.0 a x P y0 y1) = F
// (Foo.0 (A k) x Q f0 f1) = G [y <- (C f0 f1)] // f0 and f1 are fresh
// Skip identical rules
if same_shape {
skip.insert(j);
}
let mut new_rule_pats = Vec::new();
let mut new_rule_body = other.body.clone();
for (rule_pat, other_pat) in rule.pats.iter().zip(&other.pats) {
match (&**rule_pat, &**other_pat) {
(CompTerm::Ctr { .. }, CompTerm::Ctr { args: other_pat_args, .. }) => {
// Bring the arguments of a constructor outside
new_rule_pats.extend(other_pat_args.clone());
}
(
CompTerm::Ctr {
name: rule_pat_name,
args: rule_pat_args,
},
CompTerm::Var { name: other_pat_name },
) => {
let mut new_ctr_args = vec![];
for _ in 0..rule_pat_args.len() {
let new_arg = CompTerm::Var {
name: format!(".{}", post_inc(&mut var_count)),
};
new_ctr_args.push(Box::new(new_arg.clone()));
new_rule_pats.push(Box::new(new_arg));
}
let new_ctr = CompTerm::Ctr {
name: rule_pat_name.clone(),
args: new_ctr_args,
};
subst(&mut new_rule_body, other_pat_name, &new_ctr);
}
(CompTerm::Var { .. }, _) => {
new_rule_pats.push(other_pat.clone());
}
(CompTerm::Num { numb: rule_pat_numb }, CompTerm::Num { numb: other_pat_numb }) => {
if rule_pat_numb == other_pat_numb {
new_rule_pats.push(Box::new(*other_pat.clone()));
} else {
panic!("Internal error. Please report.");
// not possible since it matches
}
}
(CompTerm::Num { .. }, CompTerm::Var { name: other_pat_name }) => {
subst(&mut new_rule_body, other_pat_name, rule_pat);
}
_ => {
panic!("Internal error. Please report."); // not possible since it matches
}
}
}
let new_rule = CompRule {
name: new_entry_name.clone(),
pats: new_rule_pats,
body: new_rule_body,
};
new_entry_rules.push(new_rule);
}
}
assert!(!new_entry_rules.is_empty()); // There's at least one rule, since rules always match with themselves
let new_entry_args = (0..new_entry_rules[0].pats.len()).map(|n| format!("x{}", n)).collect();
let new_entry = CompEntry {
name: new_entry_name,
kdln: new_entry_kdln,
args: new_entry_args,
rules: new_entry_rules,
orig: false,
};
let new_split_entries = flatten(new_entry);
(old_rule, new_split_entries)
}
let mut name_count = 0;
let mut skip: HashSet<usize> = HashSet::new();
let mut new_entries: Vec<CompEntry> = Vec::new();
let mut old_entry_rules: Vec<CompRule> = Vec::new();
let old_entry_args: Vec<String> = entry.args.clone();
for i in 0..entry.rules.len() {
if !skip.contains(&i) {
let rule = &entry.rules[i];
if must_split(rule) {
let (old_rule, split_entries) = split_rule(rule, &entry, i, &mut name_count, &mut skip);
old_entry_rules.push(old_rule);
new_entries.extend(split_entries);
} else {
old_entry_rules.push(entry.rules[i].clone());
}
}
}
let old_entry = CompEntry {
name: entry.name,
kdln: entry.kdln,
args: old_entry_args,
rules: old_entry_rules,
orig: entry.orig,
};
new_entries.push(old_entry);
new_entries
}
// Substitute all instances of a variable in a term with another term
pub fn subst(term: &mut CompTerm, sub_name: &str, value: &CompTerm) {
match term {
CompTerm::Var { name } => {
if sub_name == name {
*term = value.clone();
}
}
CompTerm::Dup { nam0, nam1, expr, body } => {
subst(&mut *expr, sub_name, value);
if nam0 != sub_name && nam1 != sub_name {
subst(&mut *body, sub_name, value);
}
}
CompTerm::Let { name, expr, body } => {
subst(&mut *expr, sub_name, value);
if name != sub_name {
subst(&mut *body, sub_name, value);
}
}
CompTerm::Lam { name, body } => {
if name != sub_name {
subst(&mut *body, sub_name, value);
}
}
CompTerm::App { func, argm } => {
subst(&mut *func, sub_name, value);
subst(&mut *argm, sub_name, value);
}
CompTerm::Ctr { args, .. } => {
for arg in args {
subst(&mut *arg, sub_name, value);
}
}
CompTerm::Fun { args, .. } => {
for arg in args {
subst(&mut *arg, sub_name, value);
}
}
CompTerm::Num { .. } => {}
CompTerm::Op2 { val0, val1, .. } => {
subst(&mut *val0, sub_name, value);
subst(&mut *val1, sub_name, value);
}
CompTerm::Nil => {}
}
}
// Removes proof-irrelevant parts of the term
pub fn erase(book: &Book, term: &Term) -> Box<CompTerm> {
match term {
Term::Typ { .. } => Box::new(CompTerm::Nil),
Term::Var { orig: _, name } => {
let name = name.0.clone();
Box::new(CompTerm::Var { name })
}
Term::Lam { orig: _, name, body } => {
let name = name.0.clone();
let body = erase(book, body);
Box::new(CompTerm::Lam { name, body })
}
Term::App { orig: _, func, argm } => {
let func = erase(book, func);
let argm = erase(book, argm);
Box::new(CompTerm::App { func, argm })
}
Term::All {
orig: _,
name: _,
tipo: _,
body: _,
} => Box::new(CompTerm::Nil),
Term::Let { orig: _, name, expr, body } => {
let name = name.0.clone();
let expr = erase(book, expr);
let body = erase(book, body);
Box::new(CompTerm::Let { name, expr, body })
}
Term::Ann { orig: _, expr, tipo: _ } => erase(book, expr),
Term::Sub {
orig: _,
expr,
name: _,
indx: _,
redx: _,
} => erase(book, expr),
Term::Ctr { orig: _, name, args: term_args } => {
let name = name.0.clone();
let entr = book.entrs.get(&Ident(name.clone())).unwrap();
let mut args = vec![];
for (idx, arg) in term_args.iter().enumerate() {
if !entr.args[idx].eras {
args.push(erase(book, arg));
}
}
Box::new(CompTerm::Ctr { name, args })
}
Term::Fun { orig: _, name, args: term_args } => {
let name = name.0.clone();
let entr = book.entrs.get(&Ident(name.clone())).unwrap();
let mut args = vec![];
for (idx, arg) in term_args.iter().enumerate() {
if !entr.args[idx].eras {
args.push(erase(book, arg));
}
}
Box::new(CompTerm::Fun { name, args })
}
Term::Hlp { orig: _ } => Box::new(CompTerm::Nil),
Term::U60 { orig: _ } => Box::new(CompTerm::Nil),
Term::Num { orig: _, numb } => {
let numb = *numb as u128;
Box::new(CompTerm::Num { numb })
}
Term::Op2 { orig: _, oper, val0, val1 } => {
let oper = *oper;
let val0 = erase(book, val0);
let val1 = erase(book, val1);
Box::new(CompTerm::Op2 { oper, val0, val1 })
}
Term::Hol { orig: _, numb: _ } => Box::new(CompTerm::Nil),
Term::Mat { .. } => Box::new(CompTerm::Nil),
Term::Open { .. } => Box::new(CompTerm::Nil),
}
}
// Counts usages of a name in an erased term
pub fn count_uses(term: &CompTerm, count_name: &str) -> usize {
match term {
CompTerm::Var { name } => {
if name == count_name {
1
} else {
0
}
}
CompTerm::Lam { name, body } => {
if name == count_name {
0
} else {
count_uses(body, count_name)
}
}
CompTerm::App { func, argm } => count_uses(func, count_name) + count_uses(argm, count_name),
CompTerm::Dup { nam0, nam1, expr, body } => {
let expr_count = count_uses(expr, count_name);
let body_count = if nam0 == count_name || nam1 == count_name { 0 } else { count_uses(body, count_name) };
expr_count + body_count
}
CompTerm::Let { name, expr, body } => {
let expr_count = count_uses(expr, count_name);
let body_count = if name == count_name { 0 } else { count_uses(body, count_name) };
expr_count + body_count
}
CompTerm::Ctr { name: _, args } => {
let mut sum = 0;
for arg in args {
sum += count_uses(arg, count_name);
}
sum
}
CompTerm::Fun { name: _, args } => {
let mut sum = 0;
for arg in args {
sum += count_uses(arg, count_name);
}
sum
}
CompTerm::Op2 { oper: _, val0, val1 } => count_uses(val0, count_name) + count_uses(val1, count_name),
CompTerm::Num { .. } => 0,
CompTerm::Nil => 0,
}
}
// Renames a target variable using the fresh names in a vector
pub fn rename_clones(term: &mut CompTerm, target: &str, names: &mut Vec<String>) {
match term {
CompTerm::Var { name } => {
if name == target {
*name = names.pop().unwrap();
}
}
CompTerm::Lam { name, body } => {
if name != target {
rename_clones(body, target, names);
}
}
CompTerm::App { func, argm } => {
rename_clones(func, target, names);
rename_clones(argm, target, names);
}
CompTerm::Dup { nam0, nam1, expr, body } => {
rename_clones(expr, target, names);
if nam0 != target && nam1 != target {
rename_clones(body, target, names);
}
}
CompTerm::Let { name, expr, body } => {
rename_clones(expr, target, names);
if name != target {
rename_clones(body, target, names);
}
}
CompTerm::Ctr { name: _, args } => {
for arg in args {
rename_clones(arg, target, names);
}
}
CompTerm::Fun { name: _, args } => {
for arg in args {
rename_clones(arg, target, names);
}
}
CompTerm::Op2 { oper: _, val0, val1 } => {
rename_clones(val0, target, names);
rename_clones(val1, target, names);
}
CompTerm::Num { .. } => {}
CompTerm::Nil => {}
}
}
pub fn linearize_rule(rule: &mut CompRule) {
// Returns left-hand side variables
fn collect_lhs_vars<'a>(term: &'a mut CompTerm, vars: &mut HashMap<String, &'a mut CompTerm>) {
match term {
CompTerm::Var { name } => {
vars.insert(name.clone(), term);
}
CompTerm::App { func, argm } => {
collect_lhs_vars(func, vars);
collect_lhs_vars(argm, vars);
}
CompTerm::Ctr { args, .. } => {
for arg in args {
collect_lhs_vars(arg, vars);
}
}
CompTerm::Num { .. } => {}
_ => {
panic!("Invalid left-hand side.");
}
}
}
// linearize_name (Foo x x x x) 'x' 0
// ----------------------------------------------------------------
// dup x0 x1 = x; dup x2 x3 = x0; dup x4 x5 = x1; (Foo x2 x3 x4 x5)
// Returns the number of times the variable was used in the body.
pub fn linearize_name(body: &mut CompTerm, name: &mut String, fresh: &mut u64) -> usize {
fn fresh_name(fresh: &mut u64) -> String {
let name = format!("_{}", fresh);
*fresh += 1;
name
}
let uses = count_uses(body, name);
if uses > 1 {
let mut names = vec![];
for _ in 0..(uses - 1) * 2 {
names.push(fresh_name(fresh));
}
//println!("-> uses is {}, names is {:?}", uses, names);
let mut renames = vec![];
for rename in names[names.len() - uses..].iter().rev() {
renames.push(rename.clone());
}
rename_clones(body, name, &mut renames);
for i in (0..uses - 1).rev() {
let nam0 = names[i * 2].clone();
let nam1 = names[i * 2 + 1].clone();
let expr = Box::new(CompTerm::Var {
name: if i == 0 { name.to_string() } else { names[i - 1].clone() },
});
let new_body = CompTerm::Dup {
nam0,
nam1,
expr,
body: Box::new(CompTerm::Nil),
};
let old_body = std::mem::replace(body, new_body);
if let CompTerm::Dup { ref mut body, .. } = body {
let _ = std::mem::replace(body, Box::new(old_body));
}
}
} else if uses == 0 {
*name = String::from("~")
}
uses
}
// Linearies an erased term, replacing cloned variables by dups
pub fn linearize_term(term: &mut CompTerm, fresh: &mut u64) {
//println!("Linearizing: {:?}", term);
match term {
CompTerm::Var { name: _ } => {}
CompTerm::Lam { ref mut name, body } => {
linearize_term(body, fresh);
linearize_name(body, name, fresh);
}
CompTerm::App { func, argm } => {
linearize_term(func, fresh);
linearize_term(argm, fresh);
}
CompTerm::Let { ref mut name, expr, body } => {
linearize_term(expr, fresh);
linearize_term(body, fresh);
linearize_name(body, name, fresh);
}
CompTerm::Ctr { name: _, args } => {
for arg in args {
linearize_term(arg, fresh);
}
}
CompTerm::Fun { name: _, args } => {
for arg in args {
linearize_term(arg, fresh);
}
}
CompTerm::Op2 { oper: _, val0, val1 } => {
linearize_term(val0, fresh);
linearize_term(val1, fresh);
}
CompTerm::Dup {
ref mut nam0,
ref mut nam1,
expr,
body,
..
} => {
// should be unreachable under normal usage, but I made it anyway
linearize_term(expr, fresh);
linearize_term(body, fresh);
linearize_name(body, nam0, fresh);
linearize_name(body, nam1, fresh);
}
CompTerm::Num { .. } => {}
CompTerm::Nil => {}
}
}
let mut vars = HashMap::new(); // rule pattern vars
for pat in &mut rule.pats {
collect_lhs_vars(&mut **pat, &mut vars);
}
let mut fresh = 0;
for (mut name, var) in vars.drain() {
// linearizes rule pattern vars
// The &mut here doesn't do anything because we're dropping var immediately afterwards.
// To linearize rule variables, we'll have to replace all LHS occurrences by ~ if the amount of uses is zero
let uses = linearize_name(&mut rule.body, &mut name, &mut fresh);
if uses == 0 {
if let CompTerm::Var { name } = var {
*name = String::from("~");
}
}
// The reason why we don't simply pass a real mutable reference to our variable
// (instead of a mutable reference of a clone)
// to linearize_name is because since `var` is in `body`, we would
// be borrowing `var` mutably twice, which is not allowed.
// The reason why linearize_name takes in a mutable reference is
// to replace unused vars by ~. This is useful, for example, in
// lambdas. (@x0 #0 should be linearized to @~ #0)
}
linearize_term(&mut rule.body, &mut fresh); // linearizes internal bound vars
}
// Swaps u120 numbers and functions for primitive operations for kindelia compilation
pub fn convert_u120_entry(entry: CompEntry) -> Result<CompEntry, String> {
let CompEntry { name, kdln, args, rules, orig } = entry;
let mut new_rules = Vec::new();
for CompRule { name, pats, body } in rules {
let body = convert_u120_term(&body, true)?;
let mut new_pats = Vec::new();
for pat in pats {
new_pats.push(convert_u120_term(&pat, false)?);
}
new_rules.push(CompRule { name, pats: new_pats, body });
}
Ok(CompEntry {
name,
kdln,
args,
rules: new_rules,
orig,
})
}
pub fn convert_u120_term(term: &CompTerm, rhs: bool) -> Result<Box<CompTerm>, String> {
let term = Box::new(match term {
// Swap U120.new by a number
CompTerm::Ctr { name, args } => {
if name == "U120.new" {
if let (CompTerm::Num { numb: num1 }, CompTerm::Num { numb: num2 }) = (&*args[0], &*args[1]) {
CompTerm::Num { numb: (num1 << 60) + num2 }
} else if rhs {
let args = args.iter().map(|x| convert_u120_term(x, rhs)).collect::<Result<Vec<Box<CompTerm>>, String>>()?;
CompTerm::Fun { name: name.clone(), args }
} else {
return Err("Can't compile pattern match on U120 to kindelia".to_string());
}
} else {
let args = args.iter().map(|x| convert_u120_term(x, rhs)).collect::<Result<Vec<Box<CompTerm>>, String>>()?;
CompTerm::Ctr { name: name.clone(), args }
}
}
// Swap U120 functions by primitive operations
CompTerm::Fun { name, args } => {
if let Some(oper) = u120_to_oper(name) {
let val0 = convert_u120_term(&*args[0], rhs)?;
let val1 = convert_u120_term(&*args[1], rhs)?;
CompTerm::Op2 { oper, val0, val1 }
} else {
let args = args.iter().map(|x| convert_u120_term(x, rhs)).collect::<Result<Vec<Box<CompTerm>>, String>>()?;
CompTerm::Fun { name: name.clone(), args }
}
}
CompTerm::Var { name: _ } => term.clone(),
CompTerm::Lam { name, body } => {
let body = convert_u120_term(body, rhs)?;
CompTerm::Lam { name: name.clone(), body }
}
CompTerm::App { func, argm } => {
let func = convert_u120_term(func, rhs)?;
let argm = convert_u120_term(argm, rhs)?;
CompTerm::App { func, argm }
}
CompTerm::Dup { nam0, nam1, expr, body } => {
let expr = convert_u120_term(expr, rhs)?;
let body = convert_u120_term(body, rhs)?;
CompTerm::Dup {
nam0: nam0.clone(),
nam1: nam1.clone(),
expr,
body,
}
}
CompTerm::Let { name, expr, body } => {
let expr = convert_u120_term(expr, rhs)?;
let body = convert_u120_term(body, rhs)?;
CompTerm::Let { name: name.clone(), expr, body }
}
CompTerm::Num { numb: _ } => term.clone(),
CompTerm::Op2 { oper, val0, val1 } => {
let val0 = convert_u120_term(val0, rhs)?;
let val1 = convert_u120_term(val1, rhs)?;
CompTerm::Op2 { oper: *oper, val0, val1 }
}
CompTerm::Nil => {
return Err("Found nil term during compilation".to_string());
}
});
Ok(term)
}
// Converts a U120 function name to the corresponding primitive operation
// None if the name is not of an operation
pub fn u120_to_oper(name: &str) -> Option<Operator> {
match name {
"U120.add" => Some(Operator::Add),
"U120.sub" => Some(Operator::Sub),
"U120.mul" => Some(Operator::Mul),
"U120.div" => Some(Operator::Div),
"U120.mod" => Some(Operator::Mod),
"U120.bitwise_and" => Some(Operator::And),
"U120.bitwise_or" => Some(Operator::Or),
"U120.bitwise_xor" => Some(Operator::Xor),
"U120.shift_left" => Some(Operator::Shl),
"U120.shift_right" => Some(Operator::Shr),
"U120.num_less_than" => Some(Operator::Ltn),
"U120.num_less_equal" => Some(Operator::Lte),
"U120.num_greater_than" => Some(Operator::Gtn),
"U120.num_greater_equal" => Some(Operator::Gte),
"U120.num_equal" => Some(Operator::Eql),
"U120.num_not_equal" => Some(Operator::Neq),
_ => None,
}
}

5
src/derive.rs Normal file
View File

@ -0,0 +1,5 @@
pub mod sum_type;
pub mod prod_type;
pub use sum_type::*;
pub use prod_type::*;

181
src/derive/prod_type.rs Normal file
View File

@ -0,0 +1,181 @@
use std::path::{Path, PathBuf};
use crate::book::{Entry, new_type::{ProdType, Derived, SumType, Constructor}, Argument, name::Ident, term::Term, span::Span, Rule};
use super::derive_match;
fn args_to_vars(vec: &Vec<Box<Argument>>) -> Vec<Box<Term>> {
vec
.iter()
.map(|x| {
Box::new(Term::Var {
orig: Span::Generated,
name: x.name.clone(),
})
})
.collect()
}
pub fn derive_prod_type(path: &str, tipo: &ProdType) -> Derived {
let root = Path::new(path).join(tipo.name.to_path());
let path = root.join("_.kind2");
let name = tipo.name.clone();
Derived {
path,
entr: Entry::new_type_signature(name, tipo.pars.clone())
}
}
pub fn derive_prod_constructor(prod: &ProdType) -> Derived {
let name = Ident::new_path(&prod.name.0, "new");
let path = format!("{}/new.kind2", prod.name.0.replace('.', "/"));
let mut args = prod.pars.clone();
for field in &prod.fields {
args.push(field.clone())
}
let tipo = Box::new(Term::Ctr {
orig: Span::Generated,
name: prod.name.clone(),
args: args_to_vars(&prod.pars)
});
Derived {
path: PathBuf::from(path),
entr: Entry {
name,
orig: Span::Generated,
kdln: None,
args: args.clone(),
tipo,
rules: vec![],
}
}
}
pub fn derive_getters(prod: &ProdType) -> Vec<Derived> {
let mut args = prod.pars.clone();
let name_lower = prod.name.0.split('.').collect::<Vec<&str>>().pop().unwrap().to_lowercase();
let tipo = Box::new(Term::Ctr {
orig: Span::Generated,
name: prod.name.clone(),
args: args_to_vars(&prod.pars)
});
args.push(Box::new(Argument::new_accessible(Ident(name_lower), tipo)));
let mut derived = Vec::new();
for field in &prod.fields {
let name = Ident::new_path(&prod.name.0, &format!("get.{}", &field.name.0));
let path = format!("{}/get/{}.kind2", prod.name.0.replace('.', "/"), &field.name.0);
let pat = Box::new(Term::Ctr {
orig: Span::Generated,
name: Ident::new_path(&prod.name.0, "new"),
args: args_to_vars(&prod.fields)
});
let body = Box::new(Term::Var {
orig: Span::Generated,
name: field.name.clone(),
});
derived.push(Derived {
path: PathBuf::from(path),
entr: Entry {
name: name.clone(),
orig: Span::Generated,
kdln: None,
args: args.clone(),
tipo: field.tipo.clone(),
rules: vec![Box::new(Rule { orig: Span::Generated, name, pats: vec![pat], body })],
}
})
}
derived
}
pub fn derive_setters(prod: &ProdType) -> Vec<Derived> {
let mut args = prod.pars.clone();
let tipo = Box::new(Term::Ctr {
orig: Span::Generated,
name: prod.name.clone(),
args: args_to_vars(&prod.pars)
});
let name_lower = prod.name.0.split('.').collect::<Vec<&str>>().pop().unwrap().to_lowercase();
args.push(Box::new(Argument {
hide: false,
orig: Span::Generated,
name: Ident(name_lower),
eras: false,
tipo: tipo.clone()
}));
let mut derived = Vec::new();
for i in 0..prod.fields.len() {
let field = &prod.fields[i];
let name = Ident::new_path(&prod.name.0, &format!("set.{}", &field.name.0));
let path = format!("{}/set/{}.kind2", prod.name.0.replace('.', "/"), &field.name.0);
let new_name = Ident(format!("new_{}", field.name.clone()));
let mut args = args.clone();
args.push(Box::new(Argument { hide: false, orig: Span::Generated, eras: false, name: new_name.clone(), tipo: field.tipo.clone()}));
let pat = Box::new(Term::Ctr {
orig: Span::Generated,
name: Ident::new_path(&prod.name.0, "new"),
args: args_to_vars(&prod.fields)
});
let new_pat = Box::new(Term::Var { orig: Span::Generated, name: new_name });
let mut new_args = args_to_vars(&prod.fields);
new_args[i] = new_pat.clone();
let body = Box::new(Term::Ctr {
orig: Span::Generated,
name: Ident::new_path(&prod.name.0, "new"),
args: new_args
});
derived.push(Derived {
path: PathBuf::from(path),
entr: Entry {
name: name.clone(),
orig: Span::Generated,
kdln: None,
args: args.clone(),
tipo: tipo.clone(),
rules: vec![Box::new(Rule { orig: Span::Generated, name, pats: vec![pat, new_pat], body })],
}
})
}
derived
}
pub fn derive_prod_match(prod: &ProdType) -> Derived {
// We just use the same generator as the sum type.
let sum_type = SumType {
name: prod.name.clone(),
pars: prod.pars.clone(),
ctrs: vec![Box::new(Constructor {
name: Ident("new".to_string()),
args: prod.fields.clone()
})],
};
derive_match(&sum_type)
}

239
src/derive/sum_type.rs Normal file
View File

@ -0,0 +1,239 @@
use std::path::{Path, PathBuf};
use crate::book::name::Ident;
use crate::book::new_type::{Constructor, Derived, SumType};
use crate::book::span::Span;
use crate::book::term::Term;
use crate::book::{Argument, Entry, Rule};
pub fn derive_sum_type(path: &str, tipo: &SumType) -> Derived {
let root = Path::new(path).join(tipo.name.to_path());
let path = root.join("_.kind2");
let mut args = vec![];
for par in &tipo.pars {
args.push(Box::new(Argument {
hide: false,
orig: Span::Generated,
eras: false,
name: par.name.clone(),
tipo: par.tipo.clone(),
}));
}
let entr = Entry::new_type_signature(tipo.name.clone(), args);
Derived {
path,
entr,
}
}
fn args_to_vars(vec: &Vec<Box<Argument>>) -> Vec<Box<Term>> {
vec
.iter()
.map(|x| {
Box::new(Term::Var {
orig: Span::Generated,
name: x.name.clone(),
})
})
.collect()
}
pub fn derive_ctr(tipo: &SumType, index: usize) -> Derived {
if let Some(ctr) = tipo.ctrs.get(index) {
let path = format!("{}/{}.kind2", tipo.name.to_path(), ctr.name);
let name = format!("{}.{}", tipo.name, ctr.name);
let kdln = None;
let mut args = vec![];
for arg in &tipo.pars {
args.push(arg.clone());
}
for arg in &ctr.args {
args.push(arg.clone());
}
let tipo = Box::new(Term::Ctr {
orig: Span::Generated,
name: tipo.name.clone(),
args: tipo
.pars
.iter()
.map(|x| {
Box::new(Term::Var {
orig: Span::Generated,
name: x.name.clone(),
})
})
.collect(),
});
let rules = vec![];
let entr = Entry {
name: Ident(name),
orig: Span::Generated,
kdln,
args,
tipo,
rules,
};
Derived { path: Path::new(&path).to_owned(), entr }
} else {
panic!("Constructor out of bounds.");
}
}
pub fn derive_match(ntyp: &SumType) -> Derived {
let path = format!("{}/match.kind2", ntyp.name.0.replace('.', "/"));
fn gen_type_ctr(ntyp: &SumType) -> Box<Term> {
Box::new(Term::Ctr {
orig: Span::Generated,
name: ntyp.name.clone(),
args: args_to_vars(&ntyp.pars),
})
}
fn gen_ctr_value(ntyp: &SumType, ctr: &Constructor, _: usize, suffix: &str) -> Box<Term> {
let mut ctr_value_args = vec![];
for par in &ntyp.pars {
ctr_value_args.push(Box::new(Term::new_var(Ident(format!("{}{}", par.name, suffix)))));
}
for fld in &ctr.args {
ctr_value_args.push(Box::new(Term::new_var(Ident(format!("{}{}", fld.name, suffix)))));
}
Box::new(Term::Ctr {
orig: Span::Generated,
name: Ident::new_path(&ntyp.name.0, &ctr.name.0),
args: ctr_value_args,
})
}
// List.match
let name = Ident::new_path(&ntyp.name.0, "match");
let kdln = None;
let mut args = vec![];
// <t: Type>
for par in &ntyp.pars {
args.push(Box::new(Argument::new_hidden(par.name.clone(), par.tipo.clone())));
}
// (x: (List t))
args.push(Box::new(Argument::new_accessible(Ident("x".to_string()), gen_type_ctr(ntyp))));
let motive_type = Box::new(Term::All {
orig: Span::Generated,
name: Ident("x".to_string()),
tipo: gen_type_ctr(ntyp),
body: Box::new(Term::Typ { orig: Span::Generated }),
});
// -(p: (List t) -> Type)
args.push(Box::new(Argument::new_erased(Ident("p".to_string()), motive_type)));
// (nil: (p (List.nil t)))
// (cons: (head t) (tail: (List t)) (p (List.cons t head tail)))
for ctr in &ntyp.ctrs {
fn ctr_case_type(ntyp: &SumType, ctr: &Constructor, index: usize) -> Box<Term> {
if index < ctr.args.len() {
// for nil = ...
// for cons = (head: t) (tail: (List t))
let arg = ctr.args.get(index).unwrap();
Box::new(Term::All {
orig: Span::Generated,
name: arg.name.clone(),
tipo: arg.tipo.clone(),
body: ctr_case_type(ntyp, ctr, index + 1),
})
} else {
// for nil = (p (List.nil t))
// for cons = (p (List.cons t head tail))
Box::new(Term::App {
orig: Span::Generated,
func: Box::new(Term::Var {
orig: Span::Generated,
name: Ident("p".to_string()),
}),
argm: gen_ctr_value(ntyp, ctr, index, ""),
})
}
}
args.push(Box::new(Argument {
eras: false,
orig: Span::Generated,
hide: false,
name: ctr.name.clone(),
tipo: ctr_case_type(ntyp, ctr, 0),
}));
}
// : (p x)
let tipo = Box::new(Term::App {
orig: Span::Generated,
func: Box::new(Term::Var {
orig: Span::Generated,
name: Ident("p".to_string()),
}),
argm: Box::new(Term::Var {
orig: Span::Generated,
name: Ident("x".to_string()),
}),
});
// List.match t (List.nil t) p nil cons = nil
// List.match t (List.cons t head tail) p nil cons = (cons head tail)
let mut rules = vec![];
for idx in 0..ntyp.ctrs.len() {
let ctr = &ntyp.ctrs[idx];
let orig = Span::Generated;
let name = format!("{}.match", ntyp.name);
let mut pats = vec![];
for par in &ntyp.pars {
pats.push(Box::new(Term::Var {
orig: Span::Generated,
name: par.name.clone(),
}));
}
pats.push(gen_ctr_value(ntyp, ctr, idx, "_"));
pats.push(Box::new(Term::Var {
orig: Span::Generated,
name: Ident("p".to_string()),
}));
for ctr in &ntyp.ctrs {
pats.push(Box::new(Term::Var {
orig: Span::Generated,
name: ctr.name.clone(),
}));
}
let mut body_args = vec![];
for arg in &ctr.args {
body_args.push(Box::new(Term::Var {
orig: Span::Generated,
name: Ident(format!("{}_", arg.name)),
}));
}
let body = Box::new(Term::Ctr {
orig: Span::Generated,
name: ctr.name.clone(),
args: body_args,
});
rules.push(Box::new(Rule {
orig,
name: Ident(name),
pats,
body,
}));
}
let entr = Entry {
name,
orig: Span::Generated,
kdln,
args,
tipo,
rules,
};
Derived { path: PathBuf::from(path), entr }
}

231
src/driver.rs Normal file
View File

@ -0,0 +1,231 @@
pub mod config;
pub mod loader;
use crate::book::name::Ident;
use crate::book::new_type::{Derived, NewType};
use crate::book::Book;
use crate::checker::to_checker_book;
use crate::codegen;
use crate::derive;
use crate::driver::loader::{load, File};
use crate::parser::new_type;
use crate::codegen::kdl::KDL_NAME_LEN;
use crate::driver::config::Config;
const CHECKER_HVM: &str = include_str!("checker.hvm");
pub struct RunResult {
pub output: String,
pub rewrites: u64,
}
pub fn highlight(should: bool, text: &str) -> String {
if should {
format!("\x1b[4m{}\x1b[0m", text)
} else {
text.to_string()
}
}
pub fn readback_string(rt: &hvm::Runtime, host: u64) -> String {
let str_cons = rt.get_id("String.cons");
let str_nil = rt.get_id("String.nil");
let mut term = rt.at(host);
let mut text = String::new();
loop {
if hvm::get_tag(term) == hvm::CTR {
let fid = hvm::get_ext(term);
if fid == str_cons {
let head = rt.at(hvm::get_loc(term, 0));
let tail = rt.at(hvm::get_loc(term, 1));
if hvm::get_tag(head) == hvm::NUM {
text.push(std::char::from_u32(hvm::get_num(head) as u32).unwrap_or('?'));
term = tail;
continue;
}
}
if fid == str_nil {
break;
}
}
panic!("Invalid output: {} {}", hvm::get_tag(term), rt.show(host));
}
text
}
fn inject_highlights(file: &[File], target: &str) -> String {
let mut code = String::new();
let mut cout = target;
// Replaces file ids by names
loop {
let mut injected = false;
if let (Some(init_file_index), Some(last_file_index)) = (cout.find("{{#F"), cout.find("F#}}")) {
let file_text = &cout[init_file_index + 4..last_file_index];
let file_numb = file_text.parse::<u64>().unwrap() as usize;
code.push_str(&cout[0..init_file_index]);
code.push_str(&file[file_numb].path);
cout = &cout[last_file_index + 4..];
injected = true;
}
if let (Some(init_range_index), Some(last_range_index)) = (cout.find("{{#R"), cout.find("R#}}")) {
let range_text = &cout[init_range_index + 4..last_range_index];
let range_text = range_text.split(':').map(|x| x.parse::<u64>().unwrap()).collect::<Vec<u64>>();
let range_file = range_text[0] as usize;
let range_init = range_text[1] as usize;
let range_last = range_text[2] as usize;
code.push_str(&cout[0..init_range_index]);
code.push_str(&highlight_error::highlight_error(range_init, range_last, &file[range_file].code));
cout = &cout[last_range_index + 4..];
injected = true;
}
if !injected {
break;
}
}
code.push_str(cout);
code
}
// Generates a .hvm checker for a Book
pub fn gen_checker(book: &Book) -> String {
// Compile the Kind2 file to HVM checker
let base_check_code = to_checker_book(book);
let mut check_code = CHECKER_HVM.to_string();
check_code.push_str(&base_check_code);
check_code
}
pub fn run_with_hvm(code: &str, main: &str, read_string: bool) -> Result<RunResult, String> {
let mut rt = hvm::Runtime::from_code(code)?;
let main = rt.alloc_code(main)?;
rt.run_io(main);
rt.normalize(main);
Ok(RunResult {
output: if read_string { readback_string(&rt, main) } else { rt.show(main) },
rewrites: rt.get_rewrites(),
})
}
pub fn cmd_to_hvm(config: &Config, path: &str) -> Result<(), String> {
let loaded = load(config, path)?;
let result = codegen::hvm::to_hvm_book(&loaded.book);
print!("{}", result);
Ok(())
}
pub fn cmd_show(config: &Config, path: &str) -> Result<(), String> {
let loaded = load(config, path)?;
println!("{}", loaded.book);
Ok(())
}
pub fn cmd_gen_checker(config: &Config, path: &str) -> Result<(), String> {
let loaded = load(config, path)?;
let gen_path = format!("{}.hvm", path.replace(".kind2", ".check"));
println!("Generated '{}'.", gen_path);
std::fs::write(gen_path, gen_checker(&loaded.book)).ok();
Ok(())
}
pub fn cmd_derive(config: &Config, path: &str) -> Result<(), String> {
let color = config.color_output;
let newcode = match std::fs::read_to_string(&path) {
Err(_) => {
return Err(format!("File not found: '{}'.", path));
}
Ok(code) => code,
};
let newtype = match new_type::read_newtype(&newcode) {
Err(err) => {
return Err(format!("[{}]\n{}", highlight(color, path), err));
}
Ok(book) => book,
};
fn save_derived(color: bool, path: &str, derived: &Derived) {
let dir = &derived.path;
let txt = format!("// Automatically derived from {}\n{}", path, derived.entr);
println!("Derived '{}':", highlight(color, derived.path.to_str().unwrap()));
println!("{}\n", txt);
std::fs::create_dir_all(dir.parent().unwrap()).unwrap();
std::fs::write(dir, txt).ok();
}
match *newtype {
NewType::Sum(sum) => {
// TODO: Remove this kind2_path because it's wrong.
save_derived(color, path, &derive::derive_sum_type(&config.kind2_path, &sum));
for i in 0..sum.ctrs.len() {
save_derived(color, path, &derive::derive_ctr(&sum, i));
}
save_derived(color, path, &derive::derive_match(&sum));
},
NewType::Prod(prod) => {
save_derived(color, path, &derive::derive_prod_type(&config.kind2_path, &prod));
save_derived(color, path, &derive::derive_prod_constructor(&prod));
save_derived(color, path, &derive::derive_prod_match(&prod));
let getters = derive::derive_getters(&prod);
for getter in getters {
save_derived(color, path, &getter);
}
let setters = derive::derive_setters(&prod);
for setter in setters {
save_derived(color, path, &setter);
}
}
}
Ok(())
}
pub fn cmd_check_all(config: &Config, path: &str) -> Result<(), String> {
let loaded = load(config, path)?;
let result = run_with_hvm(&gen_checker(&loaded.book), "Kind.API.check_all", true)?;
print!("{}", inject_highlights(&loaded.file, &result.output));
println!("Rewrites: {}", result.rewrites);
Ok(())
}
// Evaluates Main on Kind2
pub fn cmd_eval_main(config: &Config, path: &str) -> Result<(), String> {
let loaded = load(config, path)?;
if loaded.book.entrs.contains_key(&Ident("Main".to_string())) {
let result = run_with_hvm(&gen_checker(&loaded.book), "Kind.API.eval_main", true)?;
print!("{}", result.output);
println!("Rewrites: {}", result.rewrites);
Ok(())
} else {
Err("Main not found.".to_string())
}
}
pub fn cmd_run_main(config: &Config, path: &str) -> Result<(), String> {
let loaded = load(config, path)?;
if loaded.book.entrs.contains_key(&Ident("Main".to_string())) {
let result = codegen::hvm::to_hvm_book(&loaded.book);
let result = run_with_hvm(&result, "Main", false)?;
println!("{}", result.output);
println!("Rewrites: {}", result.rewrites);
Ok(())
} else {
Err("Main not found.".to_string())
}
}
pub fn cmd_to_kdl(config: &Config, path: &str, namespace: &Option<String>) -> Result<(), String> {
if let Some(ns) = namespace {
if ns.len() > KDL_NAME_LEN - 2 {
return Err(format!("Given namespace \"{}\"has more than {} characters.", ns, KDL_NAME_LEN - 2));
}
}
let loaded = load(config, path)?;
let comp_book = codegen::kdl::compile_book(&loaded.book)?;
let kdl_names = codegen::kdl::get_kdl_names(&comp_book, namespace)?;
let result = codegen::kdl::to_kdl_book(&loaded.book, &kdl_names, &comp_book)?;
print!("{}", result);
Ok(())
}

8
src/driver/config.rs Normal file
View File

@ -0,0 +1,8 @@
// The configuration needed to customize
// the compiler experience
#[derive(Clone, Debug)]
pub struct Config {
pub no_high_line: bool,
pub color_output: bool,
pub kind2_path: String,
}

143
src/driver/loader.rs Normal file
View File

@ -0,0 +1,143 @@
use std::path::{Path, PathBuf};
use crate::book::name::Ident;
use crate::book::span::{FileOffset, Span, SpanData};
use crate::book::Book;
use crate::lowering::adjust::{AdjustErrorKind, AdjustError};
use crate::lowering::resolve::Resolve;
use crate::parser::read_book;
use super::config::Config;
#[derive(Debug, Clone)]
pub struct File {
pub path: String,
pub code: String,
}
pub struct Load {
pub file: Vec<File>,
pub book: Book,
}
impl Load {
pub fn new_empty() -> Load {
Load {
file: Vec::new(),
book: Book::default(),
}
}
}
pub fn render_error(config: &Config, files: &[File], err: AdjustError) -> String {
let high_line = match err.orig {
Span::Localized(SpanData { file, start, end }) if !config.no_high_line => format!(
"On '{}'\n{}",
files[file.0 as usize].path,
highlight_error::highlight_error(start.0 as usize, end.0 as usize, &files[file.0 as usize].code)
),
_ if !config.no_high_line => "Cannot find the source of the error.".to_string(),
_ => "".to_string(),
};
return match err.kind {
AdjustErrorKind::IncorrectArity => format!("Incorrect arity.\n{}", high_line),
AdjustErrorKind::UnboundVariable { name } => format!("Unbound variable '{}'.\n{}", name, high_line),
AdjustErrorKind::RepeatedVariable => format!("Repeated variable.\n{}", high_line),
AdjustErrorKind::CantLoadType => format!("Can't load type.\n{}", high_line),
AdjustErrorKind::NoCoverage => format!("Incomplete constructor coverage.\n{}", high_line),
AdjustErrorKind::UseOpenInstead => format!("You should use `open` instead of `match` on record types.\n{}", high_line),
AdjustErrorKind::UseMatchInstead => format!("You should use `match` instead of `open` on sum types.\n{}", high_line),
AdjustErrorKind::CannotFindAlias { name } => format!("Cannot find alias '{}' try to add an 'use' statement.\n{}", name,high_line),
};
}
pub fn to_current_namespace(config: &Config, path: &PathBuf) -> String {
let base = Path::new(&config.kind2_path);
let mut cur = path.clone();
cur.set_extension("");
let cur_path = cur.strip_prefix(base);
cur_path.map(| x | {
let mut arr = x.into_iter().map(|x| x.to_str().unwrap()).collect::<Vec<&str>>();
arr.pop();
arr.join(".")
}).unwrap_or("".to_string())
}
pub fn load_entry(config: &Config, name: &str, load: &mut Load) -> Result<(), String> {
if !load.book.entrs.contains_key(&Ident(name.to_string())) {
let path: PathBuf;
let base = Path::new(&config.kind2_path);
if name.ends_with(".kind2") {
path = PathBuf::from(&name.to_string());
} else {
let mut normal_path = base.join(&name.replace('.', "/"));
let inside_path = normal_path.clone().join("_.kind2"); // path ending with 'Name/_.kind'
normal_path.set_extension("kind2");
if inside_path.is_file() {
if normal_path.is_file() {
return Err(format!(
"The following files can't exist simultaneously:\n- {}\n- {}\nPlease delete one and try again.",
inside_path.display(),
normal_path.display()
));
}
path = inside_path;
} else {
path = normal_path;
}
};
let newcode = match std::fs::read_to_string(&path) {
Err(_) => {
return Ok(());
}
Ok(code) => code,
};
let (mut new_book, uses) = match read_book(&newcode) {
Err(err) => {
return Err(format!("\x1b[1m[{}]\x1b[0m\n{}", path.display(), err));
}
Ok(book) => book,
};
let file = File {
path: path.to_str().unwrap().into(),
code: newcode,
};
let cur_mod = to_current_namespace(config, &path.clone());
new_book.resolve(&cur_mod, &uses).map_err(|err| render_error(config, &vec![file.clone()], err))?;
new_book.set_origin_file(FileOffset(load.file.len() as u32));
load.file.push(file);
for name in &new_book.names {
load.book.names.push(name.clone());
load.book.entrs.insert(Ident(name.clone()), new_book.entrs.get(&Ident(name.to_string())).unwrap().clone());
}
for unbound in &new_book.get_unbounds(config) {
load_entry(config, &unbound.0, load)?;
}
}
Ok(())
}
pub fn load(config: &Config, name: &str) -> Result<Load, String> {
let mut load = Load::new_empty();
if !std::path::Path::new(name).is_file() {
return Err(format!("File not found: '{}'", name));
}
load_entry(config, name, &mut load)?;
match load.book.adjust(config) {
Ok(book) => {
load.book = book;
Ok(load)
}
Err(err) => Err(render_error(config, &load.file, err))
}
}

File diff suppressed because it is too large Load Diff

7
src/lib.rs Normal file
View File

@ -0,0 +1,7 @@
pub mod book;
pub mod checker;
pub mod codegen;
pub mod derive;
pub mod driver;
pub mod lowering;
pub mod parser;

213
src/lowering.rs Normal file
View File

@ -0,0 +1,213 @@
pub mod resolve;
pub mod adjust;
pub mod load;
use crate::book::name::Ident;
use crate::book::new_type::{NewType, SumType, ProdType};
use crate::book::term::Term;
use crate::book::{Argument, Book, Entry, Rule};
use crate::driver::config::Config;
use crate::lowering::load::load_newtype_cached;
use std::collections::{HashMap, HashSet};
use std::rc::Rc;
// The state that adjusts uses and update a term, book, rule or entry.
pub struct UnboundState<'a> {
// All the vars that are bound in the context.
vars: Vec<Ident>,
// TODO: Describe
unbound: HashSet<Ident>,
// Definitions of types that are useful to the
// "match" expression.
types: HashMap<Ident, Rc<NewType>>,
config: &'a Config,
}
impl<'a> UnboundState<'a> {
pub fn new(types: HashMap<Ident, Rc<NewType>>, config: &'a Config) -> UnboundState<'a> {
UnboundState {
vars: Vec::new(),
unbound: HashSet::new(),
types,
config,
}
}
}
pub trait Unbound {
fn fill_unbound(&self, rhs: bool, state: &mut UnboundState);
fn get_unbounds(&self, types: HashMap<Ident, Rc<NewType>>, config: &Config) -> HashSet<Ident> {
let mut state = UnboundState::new(types, config);
self.fill_unbound(false, &mut state);
state.unbound
}
}
impl Unbound for Term {
fn fill_unbound<'a>(&self, rhs: bool, state: &mut UnboundState) {
match self {
Term::Typ { .. } => {}
Term::Var { ref name, .. } => {
// Is constructor name
if ('A'..='Z').contains(&name.0.chars().next().unwrap_or(' ')) {
state.unbound.insert(name.clone());
// Is unbound variable
} else if !state.vars.iter().any(|x| x == name) {
if rhs {
state.unbound.insert(name.clone());
} else {
state.vars.push(name.clone());
}
}
}
Term::Let { ref name, ref expr, ref body, .. } => {
expr.fill_unbound(rhs, state);
state.vars.push(name.clone());
body.fill_unbound(rhs, state);
state.vars.pop();
}
Term::Ann { ref expr, ref tipo, .. } => {
expr.fill_unbound(rhs, state);
tipo.fill_unbound(rhs, state);
}
Term::Sub { name: _, ref expr, .. } => {
expr.fill_unbound(rhs, state);
}
Term::All { ref name, ref tipo, ref body, .. } => {
tipo.fill_unbound(rhs, state);
state.vars.push(name.clone());
body.fill_unbound(rhs, state);
state.vars.pop();
}
Term::Lam { ref name, ref body, .. } => {
state.vars.push(name.clone());
body.fill_unbound(rhs, state);
state.vars.pop();
}
Term::App { ref func, ref argm, .. } => {
func.fill_unbound(rhs, state);
argm.fill_unbound(rhs, state);
}
// not reached normally
Term::Ctr { ref name, ref args, .. } => {
state.unbound.insert(Ident(name.to_string()));
for arg in args {
arg.fill_unbound(rhs, state);
}
}
// not reached normally
Term::Fun { ref name, ref args, .. } => {
state.unbound.insert(Ident(name.to_string()));
for arg in args {
arg.fill_unbound(rhs, state);
}
}
Term::Op2 { ref val0, ref val1, .. } => {
val0.fill_unbound(rhs, state);
val1.fill_unbound(rhs, state);
}
Term::Hlp { .. } => {}
Term::U60 { .. } => {}
Term::Num { .. } => {}
Term::Hol { .. } => {}
Term::Mat {
ref tipo,
ref name,
ref expr,
ref cses,
ref moti,
..
} => {
//println!("finding unbounds of match {} {}", tipo, name);
if let Ok(newtype) = load_newtype_cached(state.config, &mut state.types, tipo) {
state.unbound.insert(Ident(format!("{}.match", tipo.clone())));
// Expr
expr.fill_unbound(rhs, state);
// Motive
state.vars.push(name.clone());
moti.fill_unbound(rhs, state);
state.vars.pop();
// Cases
if let NewType::Sum(SumType { name: _, ctrs, pars: _ }) = &*newtype {
for ctr in ctrs {
if let Some(cse) = cses.iter().find(|x| x.0 == ctr.name) {
for arg in ctr.args.iter().rev() {
state.vars.push(arg.name.clone());
}
cse.1.fill_unbound(rhs, state);
for _ in ctr.args.iter().rev() {
state.vars.pop();
}
}
}
}
}
},
Term::Open { orig: _, tipo, name, expr, moti, body } => {
if let Ok(newtype) = load_newtype_cached(state.config, &mut state.types, tipo) {
state.unbound.insert(Ident(format!("{}.match", tipo.clone())));
expr.fill_unbound(rhs, state);
state.vars.push(name.clone());
moti.fill_unbound(rhs, state);
state.vars.pop();
if let NewType::Prod(ProdType { name: _, fields, .. }) = &*newtype {
for arg in fields.iter().rev() {
state.vars.push(arg.name.clone());
}
body.fill_unbound(rhs, state);
for _ in fields.iter().rev() {
state.vars.pop();
}
}
}
}
}
}
}
impl Unbound for Rule {
fn fill_unbound<'a>(&self, _rhs: bool, state: &mut UnboundState) {
for pat in &self.pats {
pat.fill_unbound(false, state);
}
self.body.fill_unbound(true, state);
}
}
impl Unbound for Entry {
fn fill_unbound<'a>(&self, _rhs: bool, state: &mut UnboundState) {
state.vars = Vec::new();
for arg in &self.args {
arg.fill_unbound(true, state);
state.vars.push(arg.name.clone());
}
self.tipo.fill_unbound(true, state);
for rule in &self.rules {
state.vars = Vec::new();
rule.fill_unbound(true, state);
}
}
}
impl Unbound for Argument {
fn fill_unbound<'a>(&self, _rhs: bool, state: &mut UnboundState) {
self.tipo.fill_unbound(true, state);
}
}
impl Book {
pub fn get_unbounds(&self, config: &Config) -> HashSet<Ident> {
let mut state = UnboundState::new(HashMap::new(), config);
for name in &self.names {
let entry = self.entrs.get(&Ident(name.clone())).unwrap();
entry.fill_unbound(false, &mut state);
}
state.unbound
}
}

570
src/lowering/adjust.rs Normal file
View File

@ -0,0 +1,570 @@
use crate::book::name::Ident;
use crate::book::new_type::NewType;
use crate::book::span::{Localized, Span};
use crate::book::term::Term;
use crate::book::{Argument, Book, Entry, Rule};
use crate::driver::config::Config;
use crate::lowering::load::load_newtype_cached;
use std::collections::HashMap;
use std::rc::Rc;
#[derive(Clone, Debug)]
pub struct AdjustError {
pub orig: Span,
pub kind: AdjustErrorKind,
}
#[derive(Clone, Debug)]
pub enum AdjustErrorKind {
IncorrectArity,
UnboundVariable { name: String },
CannotFindAlias { name: String },
UseOpenInstead,
UseMatchInstead,
RepeatedVariable,
CantLoadType,
NoCoverage,
}
// The state that adjusts uses and update a term, book, rule or entry.
pub struct AdjustState<'a> {
// The book that we are adjusting now.
book: &'a Book,
// TODO:
eras: u64,
// How much holes we created
holes: u64,
// All the vars that are bound in the context.
vars: Vec<Ident>,
// Definitions of types that are useful to the
// "match" expression.
types: HashMap<Ident, Rc<NewType>>,
// Configuration provided by the user. It's useful
// to load paths correctly.
config: &'a Config,
}
impl<'a> AdjustState<'a> {
pub fn new(book: &'a Book, config: &'a Config) -> AdjustState<'a> {
AdjustState {
book,
eras: 0,
holes: 0,
vars: Vec::new(),
types: HashMap::new(),
config,
}
}
}
pub trait Adjust {
fn adjust<'a>(&self, rhs: bool, state: &mut AdjustState<'a>) -> Result<Self, AdjustError>
where
Self: Sized;
fn adjust_with_book(&self, book: &Book, config: &Config) -> Result<Self, AdjustError>
where
Self: Sized,
{
self.adjust(
false,
&mut AdjustState {
book,
eras: 0,
holes: 0,
vars: Vec::new(),
types: HashMap::new(),
config,
},
)
}
}
fn convert_apps_to_ctr(term: &Term) -> Option<Term> {
let mut term = term;
let ctr_name;
let mut ctr_orig = term.get_origin();
let mut ctr_args = vec![];
loop {
match term {
Term::App { ref orig, ref func, ref argm } => {
ctr_args.push(argm);
if ctr_orig == Span::Generated {
ctr_orig = *orig;
}
term = func;
}
Term::Var { ref name, .. } => {
if !name.0.chars().next().unwrap_or(' ').is_uppercase() {
return None;
} else {
ctr_name = name.clone();
break;
}
}
_ => {
return None;
}
}
}
if ctr_name.to_string() == "Type" {
Some(Term::Typ { orig: ctr_orig })
} else if ctr_name.0 == "U60" {
Some(Term::U60 { orig: ctr_orig })
} else {
Some(Term::Ctr {
orig: ctr_orig,
name: ctr_name,
args: ctr_args.iter().rev().map(|x| (*x).clone()).collect(),
})
}
}
impl Adjust for Term {
fn adjust<'a>(&self, rhs: bool, state: &mut AdjustState<'a>) -> Result<Self, AdjustError> {
if let Some(new_term) = convert_apps_to_ctr(self) {
return new_term.adjust(rhs, state);
}
match *self {
Term::Typ { orig } => Ok(Term::Typ { orig }),
Term::Var { ref orig, ref name } => {
let orig = *orig;
if rhs && !state.vars.iter().any(|x| x == name) {
return Err(AdjustError {
orig,
kind: AdjustErrorKind::UnboundVariable { name: name.to_string() },
});
} else if !rhs && state.vars.iter().any(|x| x == name) {
return Err(AdjustError {
orig,
kind: AdjustErrorKind::RepeatedVariable,
});
} else if !rhs {
state.vars.push(name.clone());
}
Ok(Term::Var { orig, name: name.clone() })
}
Term::Let {
ref orig,
ref name,
ref expr,
ref body,
} => {
let orig = *orig;
let expr = Box::new(expr.adjust(rhs, state)?);
state.vars.push(name.clone());
let body = Box::new(body.adjust(rhs, state)?);
state.vars.pop();
Ok(Term::Let {
orig,
name: name.clone(),
expr,
body,
})
}
Term::Ann { ref orig, ref expr, ref tipo } => {
let orig = *orig;
let expr = Box::new(expr.adjust(rhs, state)?);
let tipo = Box::new(tipo.adjust(rhs, state)?);
Ok(Term::Ann { orig, expr, tipo })
}
Term::Sub {
ref orig,
ref name,
indx: _,
ref redx,
ref expr,
} => {
let orig = *orig;
let expr = Box::new(expr.adjust(rhs, state)?);
match state.vars.iter().position(|x| x == name) {
None => Err(AdjustError {
orig,
kind: AdjustErrorKind::UnboundVariable { name: name.to_string() },
}),
Some(indx) => {
let name = name.clone();
let indx = indx as u64;
let redx = *redx;
Ok(Term::Sub { orig, name, indx, redx, expr })
}
}
}
Term::All {
ref orig,
ref name,
ref tipo,
ref body,
} => {
let orig = *orig;
let tipo = Box::new(tipo.adjust(rhs, state)?);
state.vars.push(name.clone());
let body = Box::new(body.adjust(rhs, state)?);
state.vars.pop();
Ok(Term::All {
orig,
name: name.clone(),
tipo,
body,
})
}
Term::Lam { ref orig, ref name, ref body } => {
let orig = *orig;
state.vars.push(name.clone());
let body = Box::new(body.adjust(rhs, state)?);
state.vars.pop();
Ok(Term::Lam { orig, name: name.clone(), body })
}
Term::App { ref orig, ref func, ref argm } => {
let orig = *orig;
let func = Box::new(func.adjust(rhs, state)?);
let argm = Box::new(argm.adjust(rhs, state)?);
Ok(Term::App { orig, func, argm })
}
Term::Ctr { ref orig, ref name, ref args } => {
let orig = *orig;
if let Some(entry) = state.book.entrs.get(name) {
let mut new_args = Vec::new();
for arg in args {
// On lhs, switch holes for vars
if let (false, Term::Hol { orig, numb: _ }) = (rhs, &**arg) {
let name = format!("x{}_", state.eras);
state.eras += 1;
let arg = Box::new(Term::Var { orig: *orig, name: Ident(name) });
new_args.push(Box::new(arg.adjust(rhs, state)?));
} else {
new_args.push(Box::new(arg.adjust(rhs, state)?));
}
}
let (hiddens, eraseds) = entry.count_implicits();
// Fill implicit arguments (on rhs)
if rhs && args.len() == entry.args.len() - hiddens {
new_args.reverse();
let mut aux_args = Vec::new();
for arg in &entry.args {
if arg.hide {
let numb = state.holes;
state.holes += 1;
aux_args.push(Box::new(Term::Hol { orig, numb }));
} else {
aux_args.push(new_args.pop().unwrap());
}
}
new_args = aux_args;
}
// Fill erased arguments (on lhs)
if !rhs && args.len() == entry.args.len() - eraseds {
new_args.reverse();
let mut aux_args = Vec::new();
for arg in &entry.args {
if arg.eras {
let name = format!("{}{}_", arg.name, state.eras);
state.eras += 1;
let arg = Term::Var { orig, name: Ident(name) };
aux_args.push(Box::new(arg.adjust(rhs, state)?));
} else {
aux_args.push(new_args.pop().unwrap());
}
}
new_args = aux_args;
}
if new_args.len() != entry.args.len() {
Err(AdjustError {
orig,
kind: AdjustErrorKind::IncorrectArity,
})
} else if !entry.rules.is_empty() {
Ok(Term::Fun {
orig,
name: name.clone(),
args: new_args,
})
} else {
Ok(Term::Ctr {
orig,
name: name.clone(),
args: new_args,
})
}
} else {
Err(AdjustError {
orig,
kind: AdjustErrorKind::UnboundVariable { name: name.to_string() },
})
}
}
Term::Fun { .. } => {
panic!("Internal error."); // shouldn't happen since we can't parse Fun{}
}
Term::Hol { ref orig, numb: _ } => {
let orig = *orig;
let numb = state.holes;
state.holes += 1;
Ok(Term::Hol { orig, numb })
}
Term::Hlp { ref orig } => {
let orig = *orig;
Ok(Term::Hlp { orig })
}
Term::U60 { ref orig } => {
let orig = *orig;
Ok(Term::U60 { orig })
}
Term::Num { ref orig, ref numb } => {
let orig = *orig;
let numb = *numb;
Ok(Term::Num { orig, numb })
}
Term::Op2 {
ref orig,
ref oper,
ref val0,
ref val1,
} => {
let orig = *orig;
let oper = *oper;
let val0 = Box::new(val0.adjust(rhs, state)?);
let val1 = Box::new(val1.adjust(rhs, state)?);
Ok(Term::Op2 { orig, oper, val0, val1 })
}
Term::Mat {
ref orig,
ref name,
ref tipo,
ref expr,
ref cses,
ref moti,
} => {
let orig = *orig;
if let Ok(res) = load_newtype_cached(state.config, &mut state.types, tipo) {
match &*res {
NewType::Sum(newtype) => {
let mut args = vec![];
args.push(expr.clone());
args.push(Box::new(Term::Lam {
orig: moti.get_origin(),
name: name.clone(),
body: moti.clone(),
}));
if newtype.ctrs.len() != cses.len() {
return Err(AdjustError {
orig,
kind: AdjustErrorKind::NoCoverage,
});
}
for ctr in &newtype.ctrs {
if let Some(cse) = cses.iter().find(|x| x.0 == ctr.name) {
let mut case_term = cse.1.clone();
for arg in ctr.args.iter().rev() {
case_term = Box::new(Term::Lam {
orig: case_term.get_origin(),
name: Ident(format!("{}.{}", name, arg.name)),
body: case_term,
});
}
args.push(case_term);
} else {
return Err(AdjustError {
orig,
kind: AdjustErrorKind::NoCoverage,
});
}
}
let result = Term::Ctr {
orig,
name: Ident::new_path(&tipo.to_string(), "match"),
args,
};
result.adjust(rhs, state)
},
_ => Err(AdjustError {
orig,
kind: AdjustErrorKind::UseOpenInstead,
})
}
} else {
Err(AdjustError {
orig,
kind: AdjustErrorKind::CantLoadType,
})
}
},
Term::Open {
ref orig,
ref name,
ref tipo,
ref expr,
ref body,
ref moti
} => {
let orig = *orig;
if let Ok(res) = load_newtype_cached(state.config, &mut state.types, tipo) {
match &*res {
NewType::Prod(prod) => {
let mut args = vec![];
args.push(expr.clone());
args.push(Box::new(Term::Lam {
orig: moti.get_origin(),
name: name.clone(),
body: moti.clone(),
}));
let mut case_term = body.clone();
for arg in prod.fields.iter().rev() {
case_term = Box::new(Term::Lam {
orig: case_term.get_origin(),
name: Ident(format!("{}.{}", name, arg.name)),
body: case_term,
});
}
args.push(case_term);
let result = Term::Ctr {
orig,
name: Ident::new_path(&tipo.to_string(), "match"),
args,
};
result.adjust(rhs, state)
},
_ => Err(AdjustError {
orig,
kind: AdjustErrorKind::UseMatchInstead,
})
}
} else {
Err(AdjustError {
orig,
kind: AdjustErrorKind::CantLoadType,
})
}
}
}
}
}
impl Adjust for Rule {
fn adjust<'a>(&self, _rhs: bool, state: &mut AdjustState<'a>) -> Result<Self, AdjustError> {
let name = self.name.clone();
let orig = self.orig;
// shouldn't panic, because we only parse rules after the type annotation
let entry = state.book.entrs.get(&self.name).expect("Untyped rule.");
let mut pats = Vec::new();
for pat in &self.pats {
if let Term::Hol { orig, numb: _ } = &**pat {
// On lhs, switch holes for vars
// TODO: This duplicates of adjust_term because the lhs of a rule is not a term
let name = Ident(format!("x{}_", state.eras));
state.eras += 1;
let pat = Term::Var { orig: *orig, name };
pats.push(Box::new(pat.adjust(false, state)?));
} else {
pats.push(Box::new(pat.adjust(false, state)?));
}
}
// Fill erased arguments
let (_, eraseds) = entry.count_implicits();
if self.pats.len() == entry.args.len() - eraseds {
pats.reverse();
let mut aux_pats = Vec::new();
for arg in &entry.args {
if arg.eras {
let name = Ident(format!("{}{}_", arg.name, state.eras));
state.eras += 1;
let pat = Box::new(Term::Var { orig, name });
aux_pats.push(Box::new(pat.adjust(false, state)?));
} else {
aux_pats.push(pats.pop().unwrap());
}
}
pats = aux_pats;
}
if pats.len() != entry.args.len() {
return Err(AdjustError {
orig,
kind: AdjustErrorKind::IncorrectArity,
});
}
let body = Box::new(self.body.adjust(true, state)?);
Ok(Rule { orig, name, pats, body })
}
}
impl Adjust for Argument {
fn adjust<'a>(&self, _rhs: bool, state: &mut AdjustState<'a>) -> Result<Self, AdjustError> {
state.eras = 0;
let tipo = Box::new(self.tipo.adjust(true, state)?);
Ok(Argument {
orig: self.orig,
hide: self.hide,
eras: self.eras,
name: self.name.clone(),
tipo,
})
}
}
impl Adjust for Entry {
fn adjust<'a>(&self, rhs: bool, state: &mut AdjustState<'a>) -> Result<Self, AdjustError> {
let name = self.name.clone();
let kdln = self.kdln.clone();
let mut args = Vec::new();
state.vars = Vec::new();
for arg in &self.args {
args.push(Box::new(arg.adjust(rhs, state)?));
state.vars.push(arg.name.clone());
}
state.eras = 0;
let tipo = Box::new(self.tipo.adjust(true, state)?);
let mut rules = Vec::new();
for rule in &self.rules {
state.vars = Vec::new();
rules.push(Box::new(rule.adjust(rhs, state)?));
}
Ok(Entry {
name,
kdln,
orig: self.orig,
args,
tipo,
rules,
})
}
}
impl Book {
pub fn adjust(&mut self, config: &Config) -> Result<Self, AdjustError> {
let mut names = Vec::new();
let mut entrs = HashMap::new();
let mut state = AdjustState::new(self, config);
for name in &self.names {
let ident = Ident(name.clone());
let entry = self.entrs.get(&ident).unwrap();
names.push(name.clone());
entrs.insert(ident, Box::new(entry.adjust(false, &mut state)?));
}
Ok(Book { names, entrs, holes: state.holes })
}
}

38
src/lowering/load.rs Normal file
View File

@ -0,0 +1,38 @@
use crate::book::name::Ident;
use crate::book::new_type::NewType;
use crate::driver::config::Config;
use crate::parser::new_type::read_newtype;
use std::collections::HashMap;
use std::path::Path;
use std::rc::Rc;
// TODO: Remove this from the adjust layer. I think that we need to move it
// to the driver.
fn load_newtype(config: &Config, name: &Ident) -> Result<Box<NewType>, String> {
let path = config.kind2_path.clone();
let root = Path::new(&path).join(name.to_string().replace('.', "/"));
let path = root.join("_.type");
let newcode = match std::fs::read_to_string(&path) {
Err(_) => {
return Err(format!("File not found: '{}'.", path.display()));
}
Ok(code) => code,
};
let newtype = match read_newtype(&newcode) {
Err(err) => {
return Err(format!("\x1b[1m[{}]\x1b[0m\n{}", path.display(), err));
}
Ok(book) => book,
};
Ok(newtype)
}
pub fn load_newtype_cached(config: &Config, cache: &mut HashMap<Ident, Rc<NewType>>, name: &Ident) -> Result<Rc<NewType>, String> {
if !cache.contains_key(name) {
let newtype = Rc::new(*load_newtype(config, name)?);
cache.insert(name.clone(), newtype);
}
return Ok(cache.get(name).unwrap().clone());
}

179
src/lowering/resolve.rs Normal file
View File

@ -0,0 +1,179 @@
use std::collections::HashMap;
use crate::book::term::Term;
use crate::book::name::Ident;
use crate::book::span::Span;
use crate::book::{Rule, Entry, Book};
use super::adjust::AdjustError;
pub trait Resolve {
fn resolve(&mut self, current: &str, map: &HashMap<String, String>) -> Result<(), AdjustError>;
}
pub fn find_alias(orig: Span, path: &str, map: &HashMap<String, String>) -> Result<String, AdjustError> {
if let Some(path) = map.get(path) {
Ok(path.clone())
} else {
Err(AdjustError {
orig,
kind: super::adjust::AdjustErrorKind::CannotFindAlias { name: path.to_string() },
})
}
}
impl Ident {
fn resolve(&mut self, current: &str, orig: Span, map: &HashMap<String, String>) -> Result<(), AdjustError> {
if self.is_ctr() {
let mut iter = self.0.split("/");
let path = iter.next().unwrap();
match (path, iter.next()) {
("", Some(id)) => {
*self = if current == "" {
Ident(id.to_string())
} else {
Ident(format!("{}.{}", current, id).to_string())
};
}
(path, Some("")) => {
let alias = find_alias(orig, path, map)?;
*self = Ident(alias);
}
(path, Some(id)) => {
let alias = find_alias(orig, path, map)?;
*self = Ident(format!("{}.{}", alias, id).to_string());
}
_ => ()
}
}
Ok(())
}
}
// Todo: Put a better orig inside each ident
impl Resolve for Term {
fn resolve(&mut self, current: &str, map: &HashMap<String, String>) -> Result<(), AdjustError> {
match self {
Term::Num { .. } => Ok(()),
Term::Hol { .. } => Ok(()),
Term::Hlp { .. } => Ok(()),
Term::U60 { .. } => Ok(()),
Term::Typ { .. } => Ok(()),
Term::Var { name, orig } => name.resolve(current, *orig, map),
Term::Let { expr, body,.. } => {
body.resolve(current, map)?;
expr.resolve(current, map)
},
Term::Ann { expr, tipo, .. } => {
expr.resolve(current, map)?;
tipo.resolve(current, map)
},
Term::Sub { expr, .. } => {
// TODO: Not sure.
expr.resolve(current, map)
},
Term::All { tipo, body, .. } => {
body.resolve(current, map)?;
tipo.resolve(current, map)
},
Term::Lam { body, .. } => {
body.resolve(current, map)
},
Term::App { func, argm, .. } => {
func.resolve(current, map)?;
argm.resolve(current, map)
},
Term::Ctr { args, name, orig, .. } => {
name.resolve(current, *orig, map)?;
for arg in args {
arg.resolve(current, map)?;
}
Ok(())
},
Term::Fun { args, name, orig, .. } => {
name.resolve(current, *orig, map)?;
for arg in args {
arg.resolve(current, map)?;
}
Ok(())
},
Term::Op2 { val0, val1, .. } => {
val0.resolve(current, map)?;
val1.resolve(current, map)
},
Term::Mat {
tipo,
expr,
cses,
moti,
orig,
..
} => {
tipo.resolve(current, *orig, map)?;
moti.resolve(current, map)?;
expr.resolve(current, map)?;
for (_, arg) in cses {
arg.resolve(current, map)?;
}
Ok(())
},
Term::Open {
tipo,
expr,
body,
moti,
orig,
..
} => {
tipo.resolve(current, *orig, map)?;
moti.resolve(current, map)?;
body.resolve(current, map)?;
expr.resolve(current, map)
},
}
}
}
impl Resolve for Rule {
fn resolve(&mut self, current: &str, map: &HashMap<String, String>) -> Result<(), AdjustError> {
self.body.resolve(current, map)?;
self.name.resolve(current, self.orig, map)?;
for pat in self.pats.as_mut_slice() {
pat.resolve(current, map)?;
}
Ok(())
}
}
impl Resolve for Entry {
fn resolve(&mut self, current: &str, map: &HashMap<String, String>) -> Result<(), AdjustError> {
self.tipo.resolve(current, map)?;
self.name.resolve(current, self.orig, map)?;
for rule in self.rules.as_mut_slice() {
rule.resolve(current, map)?;
}
Ok(())
}
}
impl Resolve for Book {
fn resolve(&mut self, current: &str, map: &HashMap<String, String>) -> Result<(), AdjustError> {
let mut new_entrs = HashMap::new();
let mut new_names = Vec::new();
for (name, entr) in self.entrs.iter_mut() {
entr.resolve(current, map)?;
let mut new_name = name.clone();
new_name.resolve(current, entr.orig, map)?;
new_entrs.insert(new_name.clone(), entr.clone());
}
// Just to change the order of each name.
for name in &self.names {
let mut new_name = Ident(name.clone());
new_name.resolve(current, Span::Generated, map)?;
new_names.push(new_name.0);
}
self.entrs = new_entrs;
self.names = new_names;
Ok(())
}
}

View File

@ -1,390 +1,92 @@
#![allow(dead_code)]
#![allow(unused_variables)]
pub mod book;
pub mod checker;
pub mod codegen;
pub mod derive;
pub mod driver;
pub mod lowering;
pub mod parser;
mod language;
mod to_kdl;
mod to_hvm;
use std::env;
use crate::driver::config::Config;
use crate::driver::*;
use language::{*};
use std::collections::HashMap;
use clap::{Parser, Subcommand};
const CHECKER_HVM: &str = include_str!("checker.hvm");
#[derive(Parser)]
#[clap(author, version, about, long_about = None)]
#[clap(propagate_version = true)]
pub struct Cli {
#[clap(subcommand)]
pub command: Command,
#[clap(subcommand)]
pub command: Command,
}
#[derive(Subcommand)]
pub enum Command {
/// Check a file
#[clap(aliases = &["c"])]
Check { file: String },
/// Check a file
#[clap(aliases = &["c"])]
Check { file: String },
/// Evaluates Main on Kind2
#[clap(aliases = &["r"])]
Eval { file: String },
/// Evaluates Main on Kind2
#[clap(aliases = &["e"])]
Eval { file: String },
/// Runs Main on the HVM
#[clap(aliases = &["r"])]
Run { file: String },
/// Runs Main on the HVM
#[clap(aliases = &["r"])]
Run { file: String },
/// Derives .kind2 files from a .type file
#[clap(aliases = &["c"])]
Derive { file: String },
/// Derives .kind2 files from a .type file
#[clap(aliases = &["der"])]
Derive { file: String },
/// Generates a checker (.hvm) for a file
#[clap(aliases = &["c"])]
GenChecker { file: String },
/// Generates a checker (.hvm) for a file
#[clap(aliases = &["gc"])]
GenChecker { file: String },
/// Stringifies a file
#[clap(aliases = &["c"])]
Show { file: String },
/// Stringifies a file
#[clap(aliases = &["show"])]
Show { file: String },
/// Compiles a file to Kindelia (.kdl)
#[clap(aliases = &["c"])]
ToKDL { file: String },
/// Compiles a file to Kindelia (.kdl)
#[clap(aliases = &["kdl"])]
ToKDL {
file: String,
/// If given, a namespace that goes before each compiled name. Can be at most 10 charaters long.
#[clap(long, aliases = &["ns"])]
namespace: Option<String>,
},
/// Compiles a file to HVM (.hvm)
#[clap(aliases = &["c"])]
ToHVM { file: String },
}
fn main() {
match run_cli() {
Ok(..) => {}
Err(err) => {
eprintln!("{}", err);
}
};
/// Compiles a file to HVM (.hvm)
#[clap(aliases = &["hvm"])]
ToHVM { file: String },
}
fn run_cli() -> Result<(), String> {
let cli_matches = Cli::parse();
let cli_matches = Cli::parse();
match cli_matches.command {
Command::Eval { file: path } => {
cmd_eval_main(&path)
let config = Config {
no_high_line: false,
color_output: true,
kind2_path: env::var_os("KIND2_PATH").map(|x| x.into_string().unwrap()).unwrap_or_else(|| "".to_string()),
};
match cli_matches.command {
Command::Eval { file: path } => cmd_eval_main(&config, &path),
Command::Run { file: path } => cmd_run_main(&config, &path),
Command::Check { file: path } => cmd_check_all(&config, &path),
Command::Derive { file: path } => cmd_derive(&config, &path),
Command::GenChecker { file: path } => cmd_gen_checker(&config, &path),
Command::Show { file: path } => cmd_show(&config, &path),
Command::ToKDL { file: path, namespace } => cmd_to_kdl(&config, &path, &namespace),
Command::ToHVM { file: path } => cmd_to_hvm(&config, &path),
}
Command::Run { file: path } => {
cmd_run_main(&path)
}
Command::Check { file: path } => {
cmd_check_all(&path)
}
Command::Derive { file: path } => {
cmd_derive(&path)
}
Command::GenChecker { file: path } => {
cmd_gen_checker(&path)
}
Command::Show { file: path } => {
cmd_show(&path)
}
Command::ToKDL { file: path } => {
cmd_to_kdl(&path)
}
Command::ToHVM { file: path } => {
cmd_to_hvm(&path)
}
}
}
// Commands
// --------
// Checks all definitions of a Kind2 file
fn cmd_check_all(path: &str) -> Result<(), String> {
let loaded = load(path)?;
let result = run_with_hvm(&gen_checker(&loaded.book), "Kind.API.check_all", true)?;
print!("{}", inject_highlights(&loaded.file, &result.output));
println!("Rewrites: {}", result.rewrites);
Ok(())
}
// Evaluates Main on Kind2
fn cmd_eval_main(path: &str) -> Result<(), String> {
let loaded = load(path)?;
if loaded.book.entrs.contains_key("Main") {
let result = run_with_hvm(&gen_checker(&loaded.book), "Kind.API.eval_main", true)?;
print!("{}", result.output);
println!("Rewrites: {}", result.rewrites);
Ok(())
} else {
Err("Main not found.".to_string())
}
}
// Runs Main on HVM
fn cmd_run_main(path: &str) -> Result<(), String> {
let loaded = load(path)?;
if loaded.book.entrs.contains_key("Main") {
let result = to_hvm::to_hvm_book(&loaded.book);
let result = run_with_hvm(&result, "Main", false)?;
println!("{}", result.output);
println!("Rewrites: {}", result.rewrites);
Ok(())
} else {
Err("Main not found.".to_string())
}
}
// Generates the checker file (`file.kind2` -> `file.checker.hvm`)
fn cmd_gen_checker(path: &str) -> Result<(), String> {
let loaded = load(path)?;
let gen_path = format!("{}.hvm", path.replace(".kind2",".check"));
println!("Generated '{}'.", gen_path);
std::fs::write(gen_path, gen_checker(&loaded.book)).ok();
Ok(())
}
// Stringifies a file
fn cmd_show(path: &str) -> Result<(), String> {
let loaded = load(path)?;
let result = show_book(&loaded.book);
println!("{}", result);
Ok(())
}
// Compiles a file to Kindelia (.kdl)
fn cmd_to_kdl(path: &str) -> Result<(), String> {
let loaded = load(path)?;
let comp_book = language::compile_book(&loaded.book)?;
let kdl_names = to_kdl::get_kdl_names(&comp_book)?;
let result = to_kdl::to_kdl_book(&loaded.book, &kdl_names, &comp_book)?;
print!("{}", result);
Ok(())
}
// Compiles a file to Kindelia (.kdl)
fn cmd_to_hvm(path: &str) -> Result<(), String> {
let loaded = load(path)?;
let result = to_hvm::to_hvm_book(&loaded.book);
print!("{}", result);
Ok(())
}
// Derives generic functions
fn cmd_derive(path: &str) -> Result<(), String> {
let newcode = match std::fs::read_to_string(&path) {
Err(err) => { return Err(format!("File not found: '{}'.", path)); }
Ok(code) => { code }
};
let newtype = match read_newtype(&newcode) {
Err(err) => { return Err(format!("\x1b[1m[{}]\x1b[0m\n{}", path, err)); }
Ok(book) => { book }
};
fn save_derived(path: &str, derived: &Derived) {
let dir = std::path::Path::new(&derived.path);
let txt = show_entry(&derived.entr);
let txt = format!("// Automatically derived from {}\n{}", path, txt);
println!("\x1b[4m\x1b[1mDerived '{}':\x1b[0m", derived.path);
println!("{}\n", txt);
std::fs::create_dir_all(dir.parent().unwrap()).unwrap();
std::fs::write(dir, txt).ok();
}
save_derived(path, &derive_type(&newtype));
for i in 0 .. newtype.ctrs.len() {
save_derived(path, &derive_ctr(&newtype, i));
}
save_derived(path, &derive_match(&newtype));
return Ok(());
}
// Utils
// -----
pub struct RunResult {
output: String,
rewrites: u64,
}
// Replaces line ranges `{{123:456}}` on `target` by slices of `file_code`
fn inject_highlights(file: &Vec<File>, target: &str) -> String {
let mut code = String::new();
let mut cout = target;
// Replaces file ids by names
loop {
let mut injected = false;
if let (Some(init_file_index), Some(last_file_index)) = (cout.find("{{#F"), cout.find("F#}}")) {
let file_text = &cout[init_file_index + 4 .. last_file_index];
let file_numb = file_text.parse::<u64>().unwrap() as usize;
code.push_str(&cout[0 .. init_file_index]);
code.push_str(&file[file_numb].path);
cout = &cout[last_file_index + 4 ..];
injected = true;
}
if let (Some(init_range_index), Some(last_range_index)) = (cout.find("{{#R"), cout.find("R#}}")) {
let range_text = &cout[init_range_index + 4 .. last_range_index];
let range_text = range_text.split(":").map(|x| x.parse::<u64>().unwrap()).collect::<Vec<u64>>();
let range_file = range_text[0] as usize;
let range_init = range_text[1] as usize;
let range_last = range_text[2] as usize;
code.push_str(&cout[0 .. init_range_index]);
code.push_str(&highlight_error::highlight_error(range_init, range_last, &file[range_file].code));
cout = &cout[last_range_index + 4 ..];
injected = true;
}
if !injected {
break;
}
}
code.push_str(cout);
return code;
}
// Given an HVM source, runs an expression
fn run_with_hvm(code: &str, main: &str, read_string: bool) -> Result<RunResult, String> {
let mut rt = hvm::Runtime::from_code(code)?;
let main = rt.alloc_code(main)?;
rt.run_io(main);
rt.normalize(main);
return Ok(RunResult {
output: if read_string { readback_string(&rt, main) } else { rt.show(main) },
rewrites: rt.get_rewrites(),
});
}
// Converts a HVM string to a Rust string
pub fn readback_string(rt: &hvm::Runtime, host: u64) -> String {
let str_cons = rt.get_id("String.cons");
let str_nil = rt.get_id("String.nil");
let mut term = rt.at(host);
let mut text = String::new();
loop {
if hvm::get_tag(term) == hvm::CTR {
let fid = hvm::get_ext(term);
if fid == str_cons {
let head = rt.at(hvm::get_loc(term, 0));
let tail = rt.at(hvm::get_loc(term, 1));
if hvm::get_tag(head) == hvm::NUM {
text.push(std::char::from_u32(hvm::get_num(head) as u32).unwrap_or('?'));
term = tail;
continue;
fn main() {
match run_cli() {
Ok(..) => {}
Err(err) => {
eprintln!("{}", err);
}
}
if fid == str_nil {
break;
}
}
panic!("Invalid output: {} {}", hvm::get_tag(term), rt.show(host));
}
return text;
}
// Generates a .hvm checker for a Book
fn gen_checker(book: &Book) -> String {
// Compile the Kind2 file to HVM checker
let base_check_code = to_checker_book(&book);
let mut check_code = CHECKER_HVM.to_string();
check_code.push_str(&base_check_code);
return check_code;
}
// Loader
// ======
pub struct File {
path: String,
code: String,
}
pub struct Load {
file: Vec<File>,
book: Book,
}
pub fn load(name: &str) -> Result<Load, String> {
let mut load = Load {
file: Vec::new(),
book: Book {
names: vec![],
entrs: HashMap::new(),
holes: 0,
}
};
if !std::path::Path::new(name).is_file() {
return Err(format!("File not found: '{}'", name));
}
load_entry(name, &mut load)?;
// Adjusts the Kind2 book
match adjust_book(&load.book) {
Ok(book) => {
load.book = book;
}
Err(err) => {
let (file, init, last) = get_origin_range(err.orig);
let high_line = highlight_error::highlight_error(init, last, &load.file[file].code);
return match err.kind {
AdjustErrorKind::IncorrectArity => Err(format!("Incorrect arity.\n{}", high_line)),
AdjustErrorKind::UnboundVariable { name } => Err(format!("Unbound variable '{}'.\n{}", name, high_line)),
AdjustErrorKind::RepeatedVariable => Err(format!("Repeated variable.\n{}", high_line)),
AdjustErrorKind::CantLoadType => Err(format!("Can't load type.\n{}", high_line)),
AdjustErrorKind::NoCoverage => Err(format!("Incomplete constructor coverage.\n{}", high_line)),
};
}
};
return Ok(load);
}
pub fn load_entry(name: &str, load: &mut Load) -> Result<(), String> {
if !load.book.entrs.contains_key(name) {
let path : String;
if name.ends_with(".kind2") {
path = name.to_string();
} else {
let inside_path = format!("{}/_.kind2", &name.replace(".","/")); // path ending with 'Name/_.kind'
let normal_path = format!("{}.kind2", &name.replace(".","/")); // path ending with 'Name.kind'
if std::path::Path::new(&inside_path).is_file() {
if std::path::Path::new(&normal_path).is_file() {
return Err(format!("The following files can't exist simultaneously:\n- {}\n- {}\nPlease delete one and try again.", inside_path, normal_path));
}
path = inside_path;
} else {
path = normal_path;
}
};
let newcode = match std::fs::read_to_string(&path) {
Err(err) => { return Ok(()); }
Ok(code) => { code }
};
let mut new_book = match read_book(&newcode) {
Err(err) => { return Err(format!("\x1b[1m[{}]\x1b[0m\n{}", path, err)); }
Ok(book) => { book }
};
book_set_origin_file(&mut new_book, load.file.len());
load.file.push(File { path: path.clone(), code: newcode });
for name in &new_book.names {
load.book.names.push(name.clone());
load.book.entrs.insert(name.clone(), new_book.entrs.get(name).unwrap().clone());
}
for unbound in book_get_unbounds(&new_book) {
load_entry(&unbound, load)?;
}
}
return Ok(());
}

202
src/parser.rs Normal file
View File

@ -0,0 +1,202 @@
pub mod new_type;
pub mod term;
pub mod utils;
pub mod name;
use crate::book::name::Ident;
use crate::book::span::{ByteOffset, Span};
use crate::book::term::Term;
use crate::book::{Argument, Book, Entry, Rule};
use crate::parser::term::{parse_apps, parse_term};
use crate::parser::utils::{get_init_index, get_last_index};
use hvm::parser;
use std::collections::HashMap;
use self::name::parse_path_str;
pub fn parse_use<'a>(state: parser::State<'a>, map: &mut HashMap<String, String>) -> Result<parser::State<'a>, String> {
let (state, name) = parser::name1(state)?;
let (state, _) = parser::consume("as", state)?;
let (state, val) = parser::name1(state)?;
map.insert(name, val);
Ok(state)
}
pub fn parse_uses<'a>(state: parser::State<'a>, map: &mut HashMap<String, String>) -> Result<parser::State<'a>, String> {
let mut vec = Vec::new();
let mut state = state;
loop {
let (state_i, attr) = parser::text("use ", state)?;
if attr {
let state_i = parse_use(state_i, map)?;
vec.push(attr);
state = state_i;
} else {
return Ok(state);
}
}
}
pub fn parse_rule(state: parser::State, name: String, init: ByteOffset) -> parser::Answer<Box<Rule>> {
let (state, pats) = parser::until(parser::text_parser("="), Box::new(parse_term), state)?;
let (state, last) = get_last_index(state)?;
let orig = Span::new_off(init, last);
let (state, body) = parse_apps(state)?;
Ok((
state,
Box::new(Rule {
orig,
name: Ident(name),
pats,
body,
}),
))
}
pub fn parse_entry(state: parser::State) -> parser::Answer<Box<Entry>> {
let (state, init) = get_init_index(state)?;
let (state, name) = parse_path_str(state)?;
let (state, last) = get_last_index(state)?;
let name_orig = Span::new_off(init, last);
let (state, kdl) = parser::text("#", state)?;
let (state, kdln) = if kdl {
let (state, name) = parser::name1(state)?;
(state, Some(name))
} else {
(state, None)
};
let (state, args) = parser::until(
Box::new(|state| {
let (state, end_0) = parser::dry(Box::new(|state| parser::text(":", state)), state)?;
let (state, end_1) = parser::dry(Box::new(|state| parser::text("{", state)), state)?;
Ok((state, end_0 || end_1))
}),
Box::new(parse_argument),
state,
)?;
let (state, next) = parser::peek_char(state)?;
let (state, tipo) = if next == ':' {
let (state, _) = parser::consume(":", state)?;
parse_apps(state)?
} else {
(
state,
Box::new(Term::Hol {
orig: Span::Generated,
numb: u64::MAX,
}),
) // TODO: set orig
};
let (state, head) = parser::peek_char(state)?;
if head == '{' {
let (state, _) = parser::consume("{", state)?;
let (state, body) = parse_apps(state)?;
let (state, _) = parser::consume("}", state)?;
let mut pats = vec![];
for arg in &args {
pats.push(Box::new(Term::Var {
orig: arg.orig,
name: arg.name.clone(),
}));
// TODO: set orig
}
let rules = vec![Box::new(Rule {
orig: name_orig,
name: Ident(name.clone()),
pats,
body,
})];
Ok((
state,
Box::new(Entry {
name: Ident(name),
kdln,
args,
tipo,
rules,
orig: name_orig,
}),
))
} else {
let mut rules = Vec::new();
let rule_prefix = &format!("{} ", name);
let mut state = state;
loop {
let (loop_state, init) = get_init_index(state)?;
let (loop_state, cont) = parser::text(rule_prefix, loop_state)?;
if cont {
let (loop_state, rule) = parse_rule(loop_state, name.clone(), init)?;
rules.push(rule);
state = loop_state;
} else {
state = loop_state;
break;
}
}
let entry = Box::new(Entry {
name: Ident(name),
kdln,
args,
tipo,
rules,
orig: name_orig,
});
Ok((state, entry))
}
}
pub fn parse_argument(state: parser::State) -> parser::Answer<Box<Argument>> {
let (state, init) = get_init_index(state)?;
let (state, eras) = parser::text("-", state)?;
let (state, keep) = parser::text("+", state)?;
let (state, next) = parser::peek_char(state)?;
let (open, close) = if next == '(' { ("(", ")") } else { ("<", ">") };
let (state, _) = parser::consume(open, state)?;
let (state, name) = parser::name1(state)?;
let (state, last) = get_last_index(state)?;
let (state, anno) = parser::text(":", state)?;
let (state, tipo) = if anno {
parse_apps(state)?
} else {
(state, Box::new(Term::Typ { orig: Span::new_off(init, last) }))
};
let (state, _) = parser::consume(close, state)?;
let hide = open == "<";
let eras = if hide { !keep } else { eras };
let (state, last) = get_last_index(state)?;
let orig = Span::new_off(init, last);
Ok((
state,
Box::new(Argument {
hide,
orig,
eras,
name: Ident(name),
tipo,
}),
))
}
pub fn parse_book(state: parser::State) -> parser::Answer<(Box<Book>, HashMap<String, String>)> {
let mut map = HashMap::new();
let state = parse_uses(state, &mut map)?;
let (state, entry_vec) = parser::until(Box::new(parser::done), Box::new(parse_entry), state)?;
let mut names = Vec::new();
let mut entrs = HashMap::new();
for entry in entry_vec {
if !entrs.contains_key(&entry.name) {
names.push(entry.name.to_string().clone());
entrs.insert(entry.name.clone(), entry);
} else {
println!("\x1b[33mwarning\x1b[0m: ignored redefinition of '{}'.", entry.name);
}
}
Ok((state, (Box::new(Book { holes: 0, names, entrs }), map)))
}
pub fn read_book(code: &str) -> Result<(Box<Book>, HashMap<String, String>), String> {
parser::read(Box::new(parse_book), code)
}

41
src/parser/name.rs Normal file
View File

@ -0,0 +1,41 @@
use hvm::parser;
fn is_letter(chr: char) -> bool {
chr.is_ascii_alphanumeric() || chr == '_' || chr == '.' || chr == '$'
}
/// Parses a name right after the parsing cursor.
fn name_here(state: parser::State) -> parser::Answer<String> {
let mut name: String = String::new();
let mut state = state;
let mut already_seen_slash = false;
while let Some(got) = parser::head(state) {
if is_letter(got) || (got == '/' && !already_seen_slash) {
if got == '/' {
already_seen_slash = true;
}
name.push(got);
state = parser::tail(state);
} else {
if got == '/' {
return parser::expected("name", 1, state);
}
break;
}
}
Ok((state, name))
}
/// Parses a name after skipping comments and whitespace.
fn name(state: parser::State) -> parser::Answer<String> {
let (state, _) = parser::skip(state)?;
name_here(state)
}
pub fn parse_path_str(state: parser::State) -> parser::Answer<String> {
let (state, name1) = name(state)?;
if !name1.is_empty() || name1 == "/" {
Ok((state, name1))
} else {
parser::expected("name", 1, state)
}
}

102
src/parser/new_type.rs Normal file
View File

@ -0,0 +1,102 @@
use crate::book::name::Ident;
use crate::book::new_type::{Constructor, NewType, SumType, ProdType};
use crate::parser::*;
pub fn parse_sum_type(state: parser::State) -> parser::Answer<Option<Box<NewType>>> {
parser::guard(
parser::text_parser("type "),
Box::new(|state| {
let (state, _) = parser::consume("type", state)?;
let (state, name) = parser::name1(state)?;
let (state, pars) = parser::until(parser::text_parser("{"), Box::new(parse_argument), state)?;
let mut ctrs = vec![];
let mut state = state;
loop {
let state_i = state;
let (state_i, ctr_name) = parser::name(state_i)?;
if ctr_name.is_empty() {
break;
}
let mut ctr_args = vec![];
let mut state_i = state_i;
loop {
let state_j = state_i;
let (state_j, head) = parser::peek_char(state_j)?;
if head != '(' {
break;
}
let (state_j, ctr_arg) = parse_argument(state_j)?;
ctr_args.push(ctr_arg);
state_i = state_j;
}
ctrs.push(Box::new(Constructor {
name: Ident(ctr_name),
args: ctr_args,
}));
state = state_i;
}
Ok((state, Box::new(NewType::Sum(SumType { name: Ident(name), pars, ctrs }))))
}),
state,
)
}
pub fn parse_prod_type(state: parser::State) -> parser::Answer<Option<Box<NewType>>> {
parser::guard(
parser::text_parser("record "),
Box::new(|state| {
let (state, _) = parser::consume("record", state)?;
let (state, name) = parser::name1(state)?;
let (state, pars) = parser::until(parser::text_parser("{"), Box::new(parse_argument), state)?;
let mut state = state;
let mut fields = Vec::new();
loop {
let state_i = state;
let (state_i, init) = get_init_index(state_i)?;
let (state_i, ctr_name) = parser::name(state_i)?;
let (state_i, _) = parser::consume(":", state_i)?;
let (state_i, tipo) = parse_apps(state_i)?;
let (state_i, last) = get_last_index(state_i)?;
let orig = Span::new_off(init, last);
fields.push(Box::new(Argument {
hide: false,
eras: false,
orig,
name: Ident(ctr_name),
tipo,
}));
let (state_i, head) = parser::peek_char(state_i)?;
state = state_i;
if head == '}' {
break;
}
}
Ok((state, Box::new(NewType::Prod(ProdType {
name: Ident(name),
pars,
fields
}))))
}),
state,
)
}
pub fn parse_newtype(state: parser::State) -> parser::Answer<Box<NewType>> {
parser::grammar(
"Newtype",
&[
Box::new(parse_sum_type), // `type `
Box::new(parse_prod_type), // `record `
Box::new(|state| Ok((state, None))),
],
state,
)
}
pub fn read_newtype(code: &str) -> Result<Box<NewType>, String> {
parser::read(Box::new(parse_newtype), code)
}

886
src/parser/term.rs Normal file
View File

@ -0,0 +1,886 @@
use crate::book::name::Ident;
use crate::book::span::{ByteOffset, Span};
use crate::book::term::{Operator, Term};
use crate::parser::utils::{get_init_index, get_last_index, is_ctr_head};
use hvm::parser;
use hvm::parser::{Answer, State};
use super::name::parse_path_str;
type TermPrefix = Box<dyn Fn(ByteOffset, Box<Term>) -> Box<Term>>;
type TermComplete = Box<dyn Fn(&str) -> Box<Term>>;
pub fn parse_var(state: State) -> Answer<Option<Box<Term>>> {
parser::guard(
Box::new(|state| Ok((state, true))),
Box::new(|state| {
let (state, init) = get_init_index(state)?;
let (state, name) = parse_path_str(state)?;
let (state, last) = get_last_index(state)?;
let orig = Span::new_off(init, last);
if let Ok(numb) = name.parse::<u64>() {
Ok((state, Box::new(Term::Num { orig, numb })))
} else {
Ok((state, Box::new(Term::Var { orig, name: Ident(name) })))
}
}),
state,
)
}
pub fn parse_hol(state: State) -> Answer<Option<Box<Term>>> {
parser::guard(
parser::text_parser("_"),
Box::new(|state| {
let (state, init) = get_init_index(state)?;
let (state, _) = parser::consume("_", state)?;
let (state, last) = get_last_index(state)?;
let orig = Span::new_off(init, last);
Ok((state, Box::new(Term::Hol { orig, numb: 0 })))
}),
state,
)
}
pub fn parse_hlp(state: State) -> Answer<Option<Box<Term>>> {
return parser::guard(
parser::text_parser("?"),
Box::new(|state| {
let (state, init) = get_init_index(state)?;
let (state, _) = parser::consume("?", state)?;
let (state, _) = parser::name_here(state)?;
let (state, last) = get_last_index(state)?;
let orig = Span::new_off(init, last);
Ok((state, Box::new(Term::Hlp { orig })))
}),
state,
);
}
pub fn parse_str(state: State) -> Answer<Option<Box<Term>>> {
parser::guard(
Box::new(|state| {
let (state, head) = parser::get_char(state)?;
Ok((state, head == '"' || head == '`'))
}),
Box::new(|state| {
let (state, init) = get_init_index(state)?;
let delim = parser::head(state).unwrap_or('\0');
let state = parser::tail(state);
let mut chars: Vec<char> = Vec::new();
let mut state = state;
loop {
if let Some(next) = parser::head(state) {
if next == delim || next == '\0' {
state = parser::tail(state);
break;
} else {
chars.push(next);
state = parser::tail(state);
}
}
}
let (state, last) = get_last_index(state)?;
let orig = Span::new_off(init, last);
let empty = Term::Ctr {
orig,
name: Ident::new_path("String", "nil"),
args: Vec::new(),
};
let list = Box::new(chars.iter().rfold(empty, |t, h| Term::Ctr {
orig,
name: Ident::new_path("String", "cons"),
args: vec![Box::new(Term::Num { orig, numb: *h as u64 }), Box::new(t)],
}));
Ok((state, list))
}),
state,
)
}
pub fn parse_grp(state: State) -> Answer<Option<Box<Term>>> {
parser::guard(
parser::text_parser("("),
Box::new(|state| {
let (state, _) = parser::consume("(", state)?;
let (state, term) = parse_apps(state)?;
let (state, _) = parser::consume(")", state)?;
Ok((state, term))
}),
state,
)
}
pub fn parse_apps(state: State) -> Answer<Box<Term>> {
let (state, init) = get_init_index(state)?;
let (mut state, mut term) = parse_term(state)?;
loop {
let loop_state = state;
let (loop_state, _) = parser::skip_while(loop_state, Box::new(|x| *x == ' '))?;
let head = parser::head(loop_state).unwrap_or(' ');
let is_term_initializer // NOTE: this must cover all characters that can start a term
= ('a'..='z').contains(&head)
|| ('A'..='Z').contains(&head)
|| ('0'..='9').contains(&head)
|| ['(','[','"','\'','@','?','_','#'].contains(&head);
if is_term_initializer {
let (loop_state, argm) = parse_term(loop_state)?;
let (loop_state, last) = get_last_index(loop_state)?;
let orig = Span::new_off(init, last);
term = Box::new(Term::App { orig, func: term, argm });
state = loop_state;
} else {
state = loop_state;
break;
}
}
Ok((state, term))
}
pub fn parse_ann(state: State) -> Answer<Option<TermPrefix>> {
return parser::guard(
parser::text_parser("::"),
Box::new(|state| {
let (state, _) = parser::consume("::", state)?;
let (state, tipo) = parse_apps(state)?;
let (state, last) = get_last_index(state)?;
Ok((
state,
Box::new(move |init, expr| {
let orig = Span::new_off(init, last);
let expr = expr;
let tipo = tipo.clone();
Box::new(Term::Ann { orig, expr, tipo })
}),
))
}),
state,
);
}
pub fn parse_term_prefix(state: State) -> Answer<Box<Term>> {
// NOTE: all characters that can start a term must be listed on `parse_term_applys()`
parser::grammar(
"Term",
&[
Box::new(parse_all), // `(name:`
Box::new(parse_ctr), // `(Name`
Box::new(parse_op2), // `(+`
Box::new(parse_grp), // `(`
Box::new(parse_sig), // `[name:`
Box::new(parse_new), // `$`
Box::new(parse_lst), // `[`
Box::new(parse_str), // `"`
Box::new(parse_chr), // `'`
Box::new(parse_lam), // `@`
Box::new(parse_let), // `let `
Box::new(parse_if), // `if `
Box::new(parse_mat), // `match `
Box::new(parse_open), // `match `
Box::new(parse_do), // `do `
Box::new(parse_hlp), // `?`
Box::new(parse_hol), // `_`
Box::new(parse_var), // x
Box::new(|state| Ok((state, None))),
],
state,
)
}
pub fn parse_term_suffix(state: State) -> Answer<TermPrefix> {
parser::grammar(
"Term",
&[
Box::new(parse_arr), // `->`
Box::new(parse_sub), // `# `
Box::new(parse_ann), // `::`
Box::new(|state| Ok((state, Some(Box::new(|_, term| term))))),
],
state,
)
}
pub fn parse_arr(state: State) -> Answer<Option<TermPrefix>> {
return parser::guard(
parser::text_parser("->"),
Box::new(|state| {
let (state, _) = parser::consume("->", state)?;
let (state, body) = parse_apps(state)?;
let (state, last) = get_last_index(state)?;
Ok((
state,
Box::new(move |init, tipo| {
let orig = Span::new_off(init, last);
let name = "_".to_string();
let body = body.clone();
Box::new(Term::All {
orig,
name: Ident(name),
tipo,
body,
})
}),
))
}),
state,
);
}
pub fn parse_sub(state: State) -> Answer<Option<TermPrefix>> {
return parser::guard(
parser::text_parser("##"),
Box::new(|state| {
let (state, _) = parser::consume("##", state)?;
let (state, name) = parser::name1(state)?;
let (state, _) = parser::consume("/", state)?;
let (state, redx) = parser::name1(state)?;
if let Ok(redx) = redx.parse::<u64>() {
let (state, last) = get_last_index(state)?;
Ok((
state,
Box::new(move |init, expr| {
let orig = Span::new_off(init, last);
let name = name.clone();
let indx = 0;
let expr = expr;
Box::new(Term::Sub {
orig,
name: Ident(name),
indx,
redx,
expr,
})
}),
))
} else {
parser::expected("number", name.len(), state)
}
}),
state,
);
}
pub fn parse_let_st(state: State) -> Answer<Option<TermComplete>> {
return parser::guard(
parser::text_parser("let "),
Box::new(|state| {
let (state, init) = get_init_index(state)?;
let (state, _) = parser::consume("let ", state)?;
let (state, name) = parser::name1(state)?;
let (state, _) = parser::consume("=", state)?;
let (state, expr) = parse_apps(state)?;
let (state, _) = parser::text(";", state)?;
let (state, body) = parse_term_st(state)?;
let (state, last) = get_last_index(state)?;
let orig = Span::new_off(init, last);
Ok((
state,
Box::new(move |monad| {
Box::new(Term::Let {
orig,
name: Ident(name.clone()),
expr: expr.clone(),
body: body(monad),
})
}),
))
}),
state,
);
}
pub fn parse_return_st(state: State) -> Answer<Option<TermComplete>> {
return parser::guard(
parser::text_parser("return "),
Box::new(move |state| {
let (state, init) = get_init_index(state)?;
let (state, _) = parser::consume("return ", state)?;
let (state, term) = parse_apps(state)?;
let (state, last) = get_last_index(state)?;
let orig = Span::new_off(init, last);
Ok((
state,
Box::new(move |monad| {
Box::new(Term::Ctr {
orig,
name: Ident::new_path(monad, "pure"),
args: vec![term.clone()],
})
}),
))
}),
state,
);
}
pub fn parse_ask_named_st(state: State) -> Answer<Option<TermComplete>> {
parser::guard(
Box::new(|state| {
let (state, all0) = parser::text("ask ", state)?;
let (state, name) = parser::name(state)?;
let (state, all1) = parser::text("=", state)?;
Ok((state, all0 && !name.is_empty() && all1))
}),
Box::new(move |state| {
let (state, init) = get_init_index(state)?;
let (state, _) = parser::consume("ask", state)?;
let (state, name) = parser::name(state)?;
let (state, _) = parser::consume("=", state)?;
let (state, acti) = parse_apps(state)?;
let (state, body) = parse_term_st(state)?;
let (state, last) = get_last_index(state)?;
let orig = Span::new_off(init, last);
Ok((
state,
Box::new(move |monad| {
Box::new(Term::Ctr {
orig,
name: Ident::new_path(monad, "bind"),
args: vec![
acti.clone(),
Box::new(Term::Lam {
orig,
name: Ident(name.clone()),
body: body(monad),
}),
],
})
}),
))
}),
state,
)
}
pub fn parse_ask_anon_st(state: State) -> Answer<Option<TermComplete>> {
parser::guard(
parser::text_parser("ask "),
Box::new(move |state| {
let (state, init) = get_init_index(state)?;
let (state, _) = parser::consume("ask", state)?;
let (state, acti) = parse_apps(state)?;
let (state, body) = parse_term_st(state)?;
let (state, last) = get_last_index(state)?;
let name = "_".to_string();
let orig = Span::new_off(init, last);
Ok((
state,
Box::new(move |monad| {
Box::new(Term::Ctr {
orig,
name: Ident::new_path(monad, "bind"),
args: vec![
acti.clone(),
Box::new(Term::Lam {
orig,
name: Ident(name.clone()),
body: body(monad),
}),
],
})
}),
))
}),
state,
)
}
pub fn parse_term_st(state: State) -> Answer<TermComplete> {
parser::grammar(
"Statement",
&[
Box::new(parse_return_st),
Box::new(parse_ask_named_st),
Box::new(parse_ask_anon_st),
Box::new(parse_let_st),
Box::new(|state| {
let (state, term) = parse_apps(state)?;
Ok((state, Some(Box::new(move |_| term.clone()))))
}),
],
state,
)
}
pub fn parse_term(state: State) -> Answer<Box<Term>> {
let (state, init) = get_init_index(state)?;
let (state, prefix) = parse_term_prefix(state)?;
let (state, suffix) = parse_term_suffix(state)?;
Ok((state, suffix(init, prefix)))
}
pub fn parse_do(state: State) -> Answer<Option<Box<Term>>> {
parser::guard(
parser::text_parser("do "),
Box::new(|state| {
let (state, _) = parser::text("do", state)?;
let (state, name) = parse_path_str(state)?;
let (state, _) = parser::text("{", state)?;
let (state, term) = parse_term_st(state)?;
let (state, _) = parser::text("}", state)?;
Ok((state, term(&name)))
}),
state,
)
}
pub fn parse_mat(state: State) -> Answer<Option<Box<Term>>> {
return parser::guard(
parser::text_parser("match "),
Box::new(|state| {
let (state, init) = get_init_index(state)?;
let (state, _) = parser::consume("match ", state)?;
let (state, tipo) = parse_path_str(state)?;
let (state, nm_i) = get_init_index(state)?;
let (state, name) = parser::name1(state)?;
let (state, next) = parser::peek_char(state)?;
let (state, expr) = if next == '=' {
let (state, _) = parser::consume("=", state)?;
let (state, expr) = parse_apps(state)?;
(state, expr)
} else {
let (state, nm_j) = get_last_index(state)?;
(
state,
Box::new(Term::Var {
orig: Span::new_off(nm_i, nm_j),
name: Ident(name.clone()),
}),
)
};
let (state, _) = parser::consume("{", state)?;
let (state, cses) = parser::until(
parser::text_parser("}"),
Box::new(|state| {
let (state, name) = parser::name1(state)?;
let (state, _) = parser::consume("=>", state)?;
let (state, body) = parse_apps(state)?;
let (state, _) = parser::text(";", state)?;
Ok((state, (Ident(name), body)))
}),
state,
)?;
let (state, next) = peek_char_local(state)?;
let (state, moti) = if next == ':' {
let (state, _) = parser::consume(":", state)?;
let (state, moti) = parse_apps(state)?;
(state, moti)
} else {
(state, Box::new(Term::Hol { orig: Span::generated(), numb: 0 }))
};
let (state, last) = get_last_index(state)?;
let orig = Span::new_off(init, last);
Ok((
state,
Box::new(Term::Mat {
orig,
tipo: Ident(tipo),
name: Ident(name),
expr,
cses,
moti,
}),
))
}),
state,
);
}
pub fn parse_open(state: State) -> Answer<Option<Box<Term>>> {
return parser::guard(
parser::text_parser("open "),
Box::new(|state| {
let (state, init) = get_init_index(state)?;
let (state, _) = parser::consume("open ", state)?;
let (state, tipo) = parse_path_str(state)?;
let (state, nm_i) = get_init_index(state)?;
let (state, name) = parser::name1(state)?;
let (state, next) = parser::peek_char(state)?;
let (state, expr) = if next == '=' {
let (state, _) = parser::consume("=", state)?;
let (state, expr) = parse_apps(state)?;
(state, expr)
} else {
let (state, nm_j) = get_last_index(state)?;
(
state,
Box::new(Term::Var {
orig: Span::new_off(nm_i, nm_j),
name: Ident(name.clone()),
}),
)
};
let (state, next) = peek_char_local(state)?;
let (state, moti) = if next == ':' {
let (state, _) = parser::consume(":", state)?;
let (state, moti) = parse_apps(state)?;
(state, moti)
} else {
(state, Box::new(Term::Hol { orig: Span::generated(), numb: 0 }))
};
let (state, last) = get_last_index(state)?;
let orig = Span::new_off(init, last);
let (state, body) = parse_apps(state)?;
Ok((
state,
Box::new(Term::Open {
orig,
tipo: Ident(tipo),
name: Ident(name),
expr,
moti,
body,
}),
))
}),
state,
);
}
pub fn peek_char_local(state: State) -> Answer<char> {
let (state, _) = parser::skip_while(state, Box::new(|x| *x == ' '))?;
if let Some(got) = parser::head(state) {
Ok((state, got))
} else {
Ok((state, '\0'))
}
}
pub fn parse_all(state: State) -> Answer<Option<Box<Term>>> {
parser::guard(
Box::new(|state| {
let (state, all0) = parser::text("(", state)?;
let (state, name) = parser::name(state)?;
let (state, all1) = parser::text(":", state)?;
Ok((state, all0 && all1 && !name.is_empty()))
//Ok((state, all0 && all1 && name.len() > 0 && is_var_head(name.chars().nth(0).unwrap_or(' '))))
}),
Box::new(|state| {
let (state, init) = get_init_index(state)?;
let (state, _) = parser::consume("(", state)?;
let (state, name) = parser::name1(state)?;
let (state, _) = parser::consume(":", state)?;
let (state, tipo) = parse_apps(state)?;
let (state, _) = parser::consume(")", state)?;
let (state, isfn) = parser::text("=>", state)?;
if isfn {
let (state, body) = parse_apps(state)?;
let (state, last) = get_last_index(state)?;
let orig = Span::new_off(init, last);
Ok((
state,
Box::new(Term::Ann {
orig,
expr: Box::new(Term::Lam {
orig,
name: Ident(name.clone()),
body,
}),
tipo: Box::new(Term::All {
orig,
name: Ident(name),
tipo,
body: Box::new(Term::Hol { orig, numb: 0 }),
}),
}),
))
} else {
let (state, _) = parser::text("->", state)?;
let (state, body) = parse_apps(state)?;
let (state, last) = get_last_index(state)?;
let orig = Span::new_off(init, last);
Ok((
state,
Box::new(Term::All {
orig,
name: Ident(name),
tipo,
body,
}),
))
}
}),
state,
)
}
pub fn parse_if(state: State) -> Answer<Option<Box<Term>>> {
return parser::guard(
parser::text_parser("if "),
Box::new(|state| {
let (state, init) = get_init_index(state)?;
let (state, _) = parser::consume("if ", state)?;
let (state, cond) = parse_apps(state)?;
let (state, _) = parser::consume("{", state)?;
let (state, if_t) = parse_apps(state)?;
let (state, _) = parser::text("}", state)?;
let (state, _) = parser::text("else", state)?;
let (state, _) = parser::consume("{", state)?;
let (state, if_f) = parse_apps(state)?;
let (state, _) = parser::text("}", state)?;
let (state, last) = get_last_index(state)?;
let orig = Span::new_off(init, last);
let moti = Box::new(Term::Hol { orig, numb: 0 });
Ok((
state,
Box::new(Term::Ctr {
orig,
name: Ident::new_path("Bool", "if"),
args: vec![moti, cond, if_t, if_f],
}),
))
}),
state,
);
}
pub fn parse_let(state: State) -> Answer<Option<Box<Term>>> {
return parser::guard(
parser::text_parser("let "),
Box::new(|state| {
let (state, init) = get_init_index(state)?;
let (state, _) = parser::consume("let ", state)?;
let (state, name) = parser::name1(state)?;
let (state, _) = parser::consume("=", state)?;
let (state, expr) = parse_apps(state)?;
let (state, _) = parser::text(";", state)?;
let (state, body) = parse_apps(state)?;
let (state, last) = get_last_index(state)?;
let orig = Span::new_off(init, last);
Ok((
state,
Box::new(Term::Let {
orig,
name: Ident(name),
expr,
body,
}),
))
}),
state,
);
}
pub fn parse_lam(state: State) -> Answer<Option<Box<Term>>> {
parser::guard(
Box::new(|state| {
let (state, name) = parser::name(state)?;
let (state, arro) = parser::text("=>", state)?;
Ok((state, !name.is_empty() && arro))
//Ok((state, all0 && all1 && name.len() > 0 && is_var_head(name.chars().nth(0).unwrap_or(' '))))
}),
Box::new(move |state| {
let (state, init) = get_init_index(state)?;
let (state, name) = parser::name1(state)?;
let (state, _) = parser::consume("=>", state)?;
let (state, body) = parse_apps(state)?;
let (state, last) = get_last_index(state)?;
let orig = Span::new_off(init, last);
Ok((state, Box::new(Term::Lam { orig, name: Ident(name), body })))
}),
state,
)
}
pub fn parse_lst(state: State) -> Answer<Option<Box<Term>>> {
parser::guard(
Box::new(|state| {
let (state, head) = parser::get_char(state)?;
Ok((state, head == '['))
}),
Box::new(|state| {
let (state, init) = get_init_index(state)?;
let (state, _head) = parser::text("[", state)?;
let state = state;
let (state, elems) = parser::until(
Box::new(|x| parser::text("]", x)),
Box::new(|x| {
let (state, term) = parse_term(x)?;
let (state, _) = parser::maybe(Box::new(|x| parser::text(",", x)), state)?;
Ok((state, term))
}),
state,
)?;
let (state, last) = get_last_index(state)?;
let orig = Span::new_off(init, last);
let empty = Term::Ctr {
orig,
name: Ident::new_path("List", "nil"),
args: Vec::new(),
};
let list = Box::new(elems.iter().rfold(empty, |t, h| Term::Ctr {
orig,
name: Ident::new_path("List", "cons"),
args: vec![h.clone(), Box::new(t)],
}));
Ok((state, list))
}),
state,
)
}
pub fn parse_new(state: State) -> Answer<Option<Box<Term>>> {
parser::guard(
parser::text_parser("$"),
Box::new(move |state| {
let (state, init) = get_init_index(state)?;
let (state, _) = parser::consume("$", state)?;
let (state, val0) = parse_term(state)?;
let (state, val1) = parse_term(state)?;
let (state, last) = get_last_index(state)?;
let orig = Span::new_off(init, last);
Ok((
state,
Box::new(Term::Ctr {
orig,
name: Ident::new_path("Sigma", "new"),
args: vec![Box::new(Term::Hol { orig, numb: 0 }), Box::new(Term::Hol { orig, numb: 0 }), val0, val1],
}),
))
}),
state,
)
}
pub fn parse_ctr(state: State) -> Answer<Option<Box<Term>>> {
parser::guard(
Box::new(|state| {
let (state, open) = parser::text("(", state)?;
let (state, head) = parser::get_char(state)?;
//let (state, next) = parser::peek_char(state)?;
Ok((state, open && is_ctr_head(head)))
}),
Box::new(|state| {
let (state, init) = get_init_index(state)?;
let (state, open) = parser::text("(", state)?;
let (state, name) = parse_path_str(state)?;
let (state, args) = if open {
parser::until(parser::text_parser(")"), Box::new(parse_term), state)?
} else {
(state, Vec::new())
};
let (state, last) = get_last_index(state)?;
let orig = Span::new_off(init, last);
Ok((state, Box::new(Term::Ctr { orig, name: Ident(name), args })))
}),
state,
)
}
pub fn parse_chr(state: State) -> Answer<Option<Box<Term>>> {
parser::guard(
Box::new(|state| {
let (state, head) = parser::get_char(state)?;
Ok((state, head == '\''))
}),
Box::new(|state| {
let (state, init) = get_init_index(state)?;
let (state, _) = parser::text("'", state)?;
let (state, last) = get_last_index(state)?;
let orig = Span::new_off(init, last);
if let Some(c) = parser::head(state) {
let state = parser::tail(state);
let (state, _) = parser::text("'", state)?;
Ok((state, Box::new(Term::Num { orig, numb: c as u64 })))
} else {
parser::expected("character", 1, state)
}
}),
state,
)
}
pub fn parse_op2(state: State) -> Answer<Option<Box<Term>>> {
fn is_op_char(chr: char) -> bool {
matches!(chr, '+' | '-' | '*' | '/' | '%' | '&' | '|' | '^' | '<' | '>' | '=' | '!')
}
fn parse_oper(state: State) -> Answer<Operator> {
fn op<'a>(symbol: &'static str, oper: Operator) -> parser::Parser<'a, Option<Operator>> {
Box::new(move |state| {
let (state, done) = parser::text(symbol, state)?;
Ok((state, if done { Some(oper) } else { None }))
})
}
parser::grammar(
"Oper",
&[
op("+", Operator::Add),
op("-", Operator::Sub),
op("*", Operator::Mul),
op("/", Operator::Div),
op("%", Operator::Mod),
op("&", Operator::And),
op("|", Operator::Or),
op("^", Operator::Xor),
op("<<", Operator::Shl),
op(">>", Operator::Shr),
op("<=", Operator::Lte),
op("<", Operator::Ltn),
op("==", Operator::Eql),
op(">=", Operator::Gte),
op(">", Operator::Gtn),
op("!=", Operator::Neq),
],
state,
)
}
parser::guard(
Box::new(|state| {
let (state, open) = parser::text("(", state)?;
let (state, head) = parser::get_char(state)?;
Ok((state, open && is_op_char(head)))
}),
Box::new(|state| {
let (state, init) = get_init_index(state)?;
let (state, _) = parser::consume("(", state)?;
let (state, oper) = parse_oper(state)?;
let (state, val0) = parse_term(state)?;
let (state, val1) = parse_term(state)?;
let (state, _) = parser::consume(")", state)?;
let (state, last) = get_last_index(state)?;
let orig = Span::new_off(init, last);
Ok((state, Box::new(Term::Op2 { orig, oper, val0, val1 })))
}),
state,
)
}
pub fn parse_sig(state: State) -> Answer<Option<Box<Term>>> {
parser::guard(
Box::new(|state| {
let (state, all0) = parser::text("[", state)?;
let (state, name) = parser::name(state)?;
let (state, all1) = parser::text(":", state)?;
Ok((state, all0 && all1 && !name.is_empty()))
//Ok((state, all0 && all1 && name.len() > 0 && is_var_head(name.chars().nth(0).unwrap_or(' '))))
}),
Box::new(|state| {
let (state, init) = get_init_index(state)?;
let (state, _) = parser::consume("[", state)?;
let (state, name) = parser::name1(state)?;
let (state, _) = parser::consume(":", state)?;
let (state, tipo) = parse_apps(state)?;
let (state, _) = parser::consume("]", state)?;
let (state, _) = parser::text("->", state)?;
let (state, body) = parse_apps(state)?;
let (state, last) = get_last_index(state)?;
let orig = Span::new_off(init, last);
Ok((
state,
Box::new(Term::Ctr {
orig,
name: Ident("Sigma".to_string()),
args: vec![tipo, Box::new(Term::Lam { orig, name: Ident(name), body })],
}),
))
}),
state,
)
}

16
src/parser/utils.rs Normal file
View File

@ -0,0 +1,16 @@
use crate::book::span::ByteOffset;
use hvm::parser;
pub fn is_ctr_head(head: char) -> bool {
('A'..='Z').contains(&head)
}
pub fn get_init_index(state: parser::State) -> parser::Answer<ByteOffset> {
let (state, _) = parser::skip(state)?;
Ok((state, ByteOffset(state.index as u32)))
}
pub fn get_last_index(state: parser::State) -> parser::Answer<ByteOffset> {
Ok((state, ByteOffset(state.index as u32)))
}

View File

@ -1,141 +0,0 @@
// TODO: linearize variables, adding dups
// TODO: U120?
use crate::language::{*};
pub fn to_hvm_term(book: &Book, term: &Term) -> String {
if let Some(as_string) = interpret_as_string(term) {
return format!("\"{}\"", as_string);
}
match term {
Term::Typ { .. } => {
format!("Type")
}
Term::Var { orig: _, name } => {
format!("{}", name)
}
Term::Lam { orig: _, name, body } => {
let body = to_hvm_term(book, body);
format!("@{} {}", name, body)
}
Term::App { orig: _, func, argm } => {
let func = to_hvm_term(book, func);
let argm = to_hvm_term(book, argm);
format!("({} {})", func, argm)
}
Term::All { orig: _, name, tipo, body } => {
let body = to_hvm_term(book, body);
format!("0")
}
Term::Let { orig: _, name, expr, body } => {
let expr = to_hvm_term(book, expr);
let body = to_hvm_term(book, body);
format!("let {} = {}; {}", name, expr, body)
}
Term::Ann { orig: _, expr, tipo: _ } => {
let expr = to_hvm_term(book, expr);
format!("{}", expr)
}
Term::Sub { orig: _, expr, name: _, indx: _, redx: _ } => {
let expr = to_hvm_term(book, expr);
format!("{}", expr)
}
Term::Ctr { orig: _, name, args } => {
let entr = book.entrs.get(name).unwrap();
let args = args.iter().enumerate().filter(|(i,x)| !entr.args[*i].eras).map(|x| &**x.1).collect::<Vec<&Term>>();
format!("({}{})", name, args.iter().map(|x| format!(" {}", to_hvm_term(book, x))).collect::<String>())
}
Term::Fun { orig: _, name, args } => {
let entr = book.entrs.get(name).unwrap();
let args = args.iter().enumerate().filter(|(i,x)| !entr.args[*i].eras).map(|x| &**x.1).collect::<Vec<&Term>>();
format!("({}{})", name, args.iter().map(|x| format!(" {}", to_hvm_term(book, x))).collect::<String>())
}
Term::Hlp { orig: _ } => {
format!("0")
}
Term::U60 { orig: _ } => {
format!("0")
}
Term::Num { orig: _, numb } => {
format!("{}", numb)
}
Term::Op2 { orig: _, oper, val0, val1 } => {
let val0 = to_hvm_term(book, val0);
let val1 = to_hvm_term(book, val1);
format!("({} {} {})", show_oper(&oper), val0, val1)
}
Term::Hol { orig: _, numb } => {
format!("_")
}
Term::Mat { .. } => {
panic!("Internal error."); // removed after adjust()
}
}
}
pub fn to_hvm_oper(oper: &Oper) -> String {
match oper {
Oper::Add => format!("+"),
Oper::Sub => format!("-"),
Oper::Mul => format!("*"),
Oper::Div => format!("/"),
Oper::Mod => format!("%"),
Oper::And => format!("&"),
Oper::Or => format!("|"),
Oper::Xor => format!("^"),
Oper::Shl => format!("<<"),
Oper::Shr => format!(">>"),
Oper::Ltn => format!("<"),
Oper::Lte => format!("<="),
Oper::Eql => format!("=="),
Oper::Gte => format!(">="),
Oper::Gtn => format!(">"),
Oper::Neq => format!("!="),
}
}
pub fn to_hvm_rule(book: &Book, rule: &Rule) -> String {
let name = &rule.name;
let entry = book.entrs.get(name).unwrap();
let mut pats = vec![];
for (arg,pat) in entry.args.iter().zip(rule.pats.iter()) {
if !arg.eras {
pats.push(" ".to_string());
pats.push(to_hvm_term(book, pat));
}
}
let body = to_hvm_term(book, &rule.body);
format!("({}{}) = {}", name, pats.join(""), body)
}
pub fn to_hvm_entry(book: &Book, entry: &Entry) -> String {
let kind_name = if let Some(kdln) = &entry.kdln {
format!("{} #{}", entry.name, kdln)
} else {
entry.name.clone()
};
let hvm_name = &entry.name;
if hvm_name == "HVM.log" {
return "".to_string();
}
let mut args = vec![];
for arg in &entry.args {
args.push(format!(" {}({}: {})", if arg.eras { "-" } else { "" }, arg.name, show_term(&arg.tipo)));
}
if entry.rules.len() > 0 {
let mut rules = vec![];
for rule in &entry.rules {
rules.push(format!("\n{}", to_hvm_rule(book, rule)));
}
return format!("// {}{} : {}{}\n\n", kind_name, args.join(""), show_term(&entry.tipo), rules.join(""))
}
return "".to_string();
}
pub fn to_hvm_book(book: &Book) -> String {
let mut lines = vec![];
for name in &book.names {
lines.push(to_hvm_entry(book, book.entrs.get(name).unwrap()));
}
lines.join("")
}

View File

@ -1,200 +0,0 @@
use crate::language::{*};
use std::collections::HashMap;
use rand::Rng;
pub const KDL_NAME_LEN: usize = 12;
pub fn to_kdl_term(kdl_names: &HashMap<String, String>, term: &CompTerm) -> Result<String, String> {
let term = match term {
CompTerm::Var { name } => {
format!("{}", name)
}
CompTerm::Lam { name, body } => {
let body = to_kdl_term(kdl_names, body)?;
format!("@{} {}", name, body)
}
CompTerm::App { func, argm } => {
let func = to_kdl_term(kdl_names, func)?;
let argm = to_kdl_term(kdl_names, argm)?;
format!("({} {})", func, argm)
}
CompTerm::Dup { nam0, nam1, expr, body } => {
let expr = to_kdl_term(kdl_names, expr)?;
let body = to_kdl_term(kdl_names, body)?;
format!("dup {} {} = {}; {}", nam0, nam1, expr, body)
}
CompTerm::Let { name, expr, body } => {
let expr = to_kdl_term(kdl_names, expr)?;
let body = to_kdl_term(kdl_names, body)?;
format!("let {} = {}; {}", name, expr, body)
}
CompTerm::Ctr { name, args } => {
let kdl_name = kdl_names.get(name).expect(&format!("{}", name));
let args = args.iter().map(|x| to_kdl_term(kdl_names, x)).collect::<Result<Vec<String>, String>>()?;
let args = args.iter().map(|x| format!(" {}", x)).collect::<String>();
format!("{{{}{}}}", kdl_name, args)
}
CompTerm::Fun { name, args } => {
let kdl_name = kdl_names.get(name).expect(&format!("{}", name));
let args = args.iter().map(|x| to_kdl_term(kdl_names, x)).collect::<Result<Vec<String>, String>>()?;
let args = args.iter().map(|x| format!(" {}", x)).collect::<String>();
format!("({}{})", kdl_name, args)
}
CompTerm::Num { numb } => {
format!("#{}", numb)
}
CompTerm::Op2 { oper, val0, val1 } => {
let oper = show_oper(&oper);
let val0 = to_kdl_term(kdl_names, val0)?;
let val1 = to_kdl_term(kdl_names, val1)?;
format!("({} {} {})", oper, val0, val1)
}
CompTerm::Nil => {
return Err("Found nil term in compiled term while converting to kindelia".to_string());
}
};
Ok(term)
}
pub fn to_kdl_rule(book: &Book, kdl_names: &HashMap<String, String>, rule: &CompRule) -> Result<String, String> {
let name = &rule.name;
let kdl_name = kdl_names.get(name).unwrap();
let mut pats = vec![]; // stringified pattern args
for pat in rule.pats.iter() {
let pat = to_kdl_term(kdl_names, &pat)?;
pats.push(" ".to_string());
pats.push(pat);
}
let body = to_kdl_term(kdl_names, &rule.body)?;
let rule = format!("({}{}) = {}", kdl_name, pats.join(""), body);
Ok(rule)
}
pub fn to_kdl_entry(book: &Book, kdl_names: &HashMap<String, String>, entry: &CompEntry) -> Result<String, String> {
let entry = match entry.name.as_str() {
// Main is compiled to a run block
// TODO: Maybe we should have run blocks come from a specific type of function instead
// TODO: run statements should always come last in the block
"Main" => format!("run {{\n {}\n}}\n\n", to_kdl_term(kdl_names, &*entry.rules[0].body)?),
_ => {
let kdl_name = kdl_names.get(&entry.name).unwrap();
let args_names = entry.args.iter().map(|arg| format!(" {}", arg)).collect::<String>();
// If this entry existed in the original kind code, add some annotations as comments
let kind_entry = book.entrs.get(&entry.name);
let is_knd_ent = matches!(kind_entry, Some(_));
let cmnt = if is_knd_ent {
let kind_entry = kind_entry.unwrap();
let args_typed = kind_entry.args.iter().map(|arg|
format!(" {}({}: {})", if arg.eras { "-" } else { "" }, arg.name, show_term(&arg.tipo))
).collect::<String>();
let kind_name = format!("{} #{}", entry.name, kdl_name);
format!("// {}{} : {}\n", kind_name, args_typed, show_term(&kind_entry.tipo))
} else {
String::new()
};
// Entries with no rules become constructors
// Entries with rules become functions
let fun = if entry.rules.is_empty() {
format!("ctr {{{}{}}}\n\n", kdl_name, args_names)
} else {
let mut rules = vec![];
for rule in &entry.rules {
rules.push(format!("\n {}", to_kdl_rule(book, kdl_names, rule)?));
}
format!("fun ({}{}) {{{}\n}}\n\n", kdl_name, args_names, rules.join(""))
};
cmnt + &fun
}
};
Ok(entry)
}
pub fn to_kdl_book(book: &Book, kdl_names: &HashMap<String, String>, comp_book: &CompBook) -> Result<String, String> {
let mut lines = vec![];
for name in &comp_book.names {
let entry = comp_book.entrs.get(name).unwrap();
lines.push(to_kdl_entry(book, kdl_names, entry)?);
}
Ok(lines.join(""))
}
// Utils
// -----
// Returns a map of kind names to kindelia names
// Returns an err if any of the names can't be converted
pub fn get_kdl_names(book: &CompBook) -> Result<HashMap<String, String>, String> {
// Fits a name to the max size allowed by kindelia.
// If the name is too large, truncates and replaces the last characters by random chars.
// Fails if the namespace is too large.
fn rand_shorten(name: &String) -> Result<String, String> {
let (ns, fun) = name.rsplit_once('.').unwrap_or(("", name));
let ns = if !ns.is_empty() { format!("{}.", ns) } else { ns.to_string() };
if ns.len() > KDL_NAME_LEN - 1 {
let err = format!("Namespace for \"{}\" has more than {} characters.", name, KDL_NAME_LEN - 1);
return Err(err);
}
let max_fn_name = KDL_NAME_LEN - ns.len();
// If the name doesn't fit, truncate and insert some random characters at the end
let fun = if fun.len() > max_fn_name {
let n_rnd_chrs = usize::min(3, max_fn_name);
let fun_cut = fun[..max_fn_name - n_rnd_chrs].to_string();
let mut rng = rand::thread_rng();
let rnd_chrs = (0..n_rnd_chrs)
.map(|_| rng.gen_range(0..63))
.map(|n| encode_base64(n))
.collect::<String>();
format!("{}{}", fun_cut, rnd_chrs)
} else {
fun.to_string()
};
Ok(format!("{}{}", ns, fun))
}
fn get_kdl_name(entry: &CompEntry) -> Result<String, String> {
let kind_name = &entry.name;
let kdln = match &entry.kdln {
Some(kdln) => {
// If the entry uses a kindelia name, use it
if !kdln.chars().next().unwrap().is_uppercase() {
let err = format!("Kindelia name \"{}\" doesn't start with an uppercase letter.", kdln);
return Err(err);
}
if entry.orig {
if kdln.len() > KDL_NAME_LEN {
let err = format!("Kindelia name \"{}\" for \"{}\" has more than {} characters.", kdln, kind_name, KDL_NAME_LEN - 1);
return Err(err);
}
kdln.clone()
} else {
// For entries created by the flattener, we shorten even the kindelia name
// TODO: Since these rules can come first,
// if the kdln is too large the err will happen in the generated function,
// potentially confusing the user.
rand_shorten(kdln)?
}
},
// Otherwise, try to fit the normal kind name
None => rand_shorten(kind_name)?,
};
Ok(kdln)
}
fn encode_base64(num: u8) -> char {
match num {
0 ..= 9 => (num + b'0') as char,
10 ..= 35 => (num - 10 + b'A') as char,
36 ..= 61 => (num - 36 + b'a') as char,
62 .. => '_',
}
}
let mut kdl_names = HashMap::new();
for name in &book.names {
let kdln = get_kdl_name(book.entrs.get(name).unwrap())?;
kdl_names.insert(name.clone(), kdln);
}
Ok(kdl_names)
}

102
tests/mod.rs Normal file
View File

@ -0,0 +1,102 @@
use std::{fs::{self, File}, path::{Path}, io::Write};
use walkdir::{WalkDir, Error};
use pretty_assertions::{assert_eq};
use ntest::timeout;
use kind2::driver::{self, config::Config};
use kind2::codegen;
fn golden_test(path: &Path, run: fn(&Path) -> String) {
let result = run(path);
let golden_path = path.with_extension("golden");
if let Ok(to_check) = fs::read_to_string(golden_path.clone()) {
assert_eq!(result, to_check, "Testing file '{}'", path.display());
} else {
let mut file = File::create(golden_path).unwrap();
file.write_all(result.as_bytes()).unwrap();
}
}
fn test_kind2(path: &Path, run: fn(&Path) -> String) -> Result<(), Error> {
for entry in WalkDir::new(path).follow_links(true) {
let entry = entry?;
let path = entry.path();
if path.is_file() && path.extension().map(|x| x == "kind2").unwrap_or(false) {
golden_test(path, run);
}
}
Ok(())
}
fn compile_kdl(config: &Config, path: &str) -> Result<String, String> {
let loaded = driver::loader::load(&config, path)?;
let comp_book = codegen::kdl::compile_book(&loaded.book)?;
let kdl_names = codegen::kdl::get_kdl_names(&comp_book, &None)?;
let result = codegen::kdl::to_kdl_book(&loaded.book, &kdl_names, &comp_book)?;
Ok(result)
}
#[test]
#[timeout(15000)]
fn test_checker() -> Result<(), Error> {
test_kind2(Path::new("./tests/suite/checker"), | path | {
let config = Config {
no_high_line: true,
color_output: false,
kind2_path: ".".to_string()
};
let result = driver::loader::load(&config, path.to_str().unwrap());
let result = result.and_then(| x | driver::run_with_hvm(&driver::gen_checker(&x.book), "Kind.API.check_all", true));
result.map_or_else(|d| d, |e| e.output)
})?;
Ok(())
}
#[test]
#[timeout(10000)]
fn test_to_hvm() -> Result<(), Error> {
test_kind2(Path::new("./tests/suite/to_hvm"), | path | {
let config = Config {
no_high_line: true,
color_output: false,
kind2_path: "./tests/suite/lib".to_string()
};
let result = driver::loader::load(&config, path.to_str().unwrap());
let result = result.map(|loaded| codegen::hvm::to_hvm_book(&loaded.book));
result.map_or_else(|d| d, |e| e)
})?;
Ok(())
}
#[test]
#[timeout(10000)]
fn test_to_kdl() -> Result<(), Error> {
test_kind2(Path::new("./tests/suite/to_kdl"), | path | {
let config = Config {
no_high_line: true,
color_output: false,
kind2_path: ".".to_string()
};
let result = compile_kdl(&config, path.to_str().unwrap());
result.map_or_else(|d| d, |e| e)
})?;
Ok(())
}
#[test]
#[timeout(10000)]
fn test_run_hvm() -> Result<(), Error> {
test_kind2(Path::new("./tests/suite/eval"), | path | {
let config = Config {
no_high_line: true,
color_output: false,
kind2_path: "./tests/suite/lib".to_string()
};
let result = driver::loader::load(&config, path.to_str().unwrap());
let result = result.and_then(| x | driver::run_with_hvm(&driver::gen_checker(&x.book), "Kind.API.eval_main", true));
result.map_or_else(|d| d, |e| e.output)
})?;
Ok(())
}

View File

@ -0,0 +1,2 @@
All terms check.

View File

@ -0,0 +1,45 @@
Algebra.Group.concat <t: Type> (group: (Algebra.Group t)) : (_: t) (_: t) t
Algebra.Group.concat t (Algebra.Group.new t_ monoid inverse inverse_proof) = (Algebra.Monoid.concat _ monoid)
Algebra.Group.new <t: Type> (monoid: (Algebra.Monoid t)) (invert: (_: t) t) (inverse: (Algebra.Laws.Inverse t (Algebra.Monoid.concat _ monoid) invert (Algebra.Monoid.empty _ monoid))) : (Algebra.Group t)
Algebra.Group (t: Type) : Type
Algebra.Monoid.concat <t: Type> (monoid: (Algebra.Monoid t)) : (_: t) (_: t) t
Algebra.Monoid.concat t (Algebra.Monoid.new t_ sg empty id) = (Algebra.Semigroup.concat _ sg)
Algebra.Monoid.new <t: Type> (sg: (Algebra.Semigroup t)) (empty: t) (identity: (Algebra.Laws.Identity t (Algebra.Semigroup.concat _ sg) empty)) : (Algebra.Monoid t)
Algebra.Semigroup (t: Type) : Type
Algebra.Laws.Identity (t: Type) (concat: (_: t) (_: t) t) (empty: t) : Type
Algebra.Monoid (t: Type) : Type
Algebra.Semigroup.concat <t: Type> (semigroup: (Algebra.Semigroup t)) : (_: t) (_: t) t
Algebra.Semigroup.concat t (Algebra.Semigroup.new t_ magma assoc) = (Algebra.Magma.concat _ magma)
Algebra.Semigroup.new <t: Type> (magma: (Algebra.Magma t)) (associativity: (Algebra.Laws.associativity.eta _ (Algebra.Magma.concat _ magma))) : (Algebra.Semigroup t)
Algebra.Magma (t: Type) : Type
Algebra.Laws.associativity.eta <t: Type> (concat: (_: t) (_: t) t) : Type
Algebra.Laws.associativity.eta t concat = (a: t) (b: t) (c: t) (Equal _ (concat (concat a b) c) (concat a (concat b c)))
Equal <t: Type> (a: t) (b: t) : Type
Algebra.Magma.concat <t: Type> (magma: (Algebra.Magma t)) : (_: t) (_: t) t
Algebra.Magma.concat t (Algebra.Magma.new t_ concat) = concat
Algebra.Magma.new <t: Type> (concat: (_: t) (_: t) t) : (Algebra.Magma t)
Algebra.Monoid.empty <t: Type> (monoid: (Algebra.Monoid t)) : t
Algebra.Monoid.empty t (Algebra.Monoid.new t_ sg empty id) = empty
Algebra.Laws.Inverse (t: Type) (concat: (_: t) (_: t) t) (inverse: (_: t) t) (empty: t) : Type

View File

@ -0,0 +1,2 @@
All terms check.

View File

@ -0,0 +1,7 @@
// We should open an issue for this?
Arity3 -(e: U60) -(f: U60) <g> <h> <i> (d: U60) : U60
Arity3 e f g h i d = d
Main : U60
Main = Arity3 1 2 3

View File

@ -0,0 +1,2 @@
All terms check.

View File

View File

@ -0,0 +1,2 @@
All terms check.

View File

@ -0,0 +1,243 @@
// From https://github.com/Kindelia/Functional-Benchmarks/blob/master/Checker/Base.kind2
//
// Types
// =====
// Equality
// --------
Base.Equal <t: Type> (a: t) (b: t) : Type
Base.refl <t: Type> <a: t> : Base.Equal t a a
// Boolean
// -------
Base.Bool : Type
Base.true : Base.Bool
Base.false : Base.Bool
// Natural Number
// --------------
Base.Nat : Type
Base.zero : Base.Nat
Base.succ (pred: Base.Nat) : Base.Nat
// Binary Tree
// -----------
Base.Tree : Type
Base.leaf : Base.Tree
Base.node (l: Base.Tree) (r: Base.Tree) : Base.Tree
// Vector
// ------
Base.Vector (t: Type) (len: Base.Nat) : Type
Base.cons <t: Type> <len: Base.Nat> (head: t) (tail: Base.Vector t len) : Base.Vector t (Base.succ len)
Base.nil <t: Type> : Base.Vector t Base.zero
// Church Boolean
// --------------
Base.Church.Bool : Type
Base.Church.Bool = (p: Type) -> (t: p) -> (f: p) -> p
Base.Church.true : Base.Church.Bool
Base.Church.true = p => t => f => t
Base.Church.false : Base.Church.Bool
Base.Church.false = p => t => f => f
// Church Natural Number
// ---------------------
Base.Church.Nat : Type
Base.Church.Nat = (p: Type) -> (f: p -> p) -> (z: p) -> p
Base.Church.zero : Base.Church.Nat
Base.Church.zero = p => f => z => z
Base.Church.succ (n: Base.Church.Nat) : Base.Church.Nat
Base.Church.succ n = p => f => z => f (n p f z)
// Church Tree
// -----------
Base.Church.Tree : Type
Base.Church.Tree = (p: Type) -> (n: p -> p -> p) -> (l: p) -> p
Base.Church.leaf : Base.Church.Tree
Base.Church.leaf = p => n => l => l
Base.Church.node (a: Base.Church.Tree) (b: Base.Church.Tree) : Base.Church.Tree
Base.Church.node a b = p => n => l => n (a p n l) (b p n l)
// Church Vector
// -------------
Base.Church.Vector (t: Type) (len: Base.Nat) : Type
Base.Church.Vector t n = (p: Base.Nat -> Type) -> (cons: (len: Base.Nat) -> (head: t) -> (tail: p len) -> p (Base.succ len)) -> (nil: p Base.zero) -> p n
Base.Church.nil <t: Type> : Base.Church.Vector t Base.zero
Base.Church.nil t = p => cons => nil => nil
Base.Church.cons <t: Type> <len: Base.Nat> (head: t) (tail: Base.Church.Vector t len) : Base.Church.Vector t (Base.succ len)
Base.Church.cons t len head tail = p => cons => nil => cons len head (tail p cons nil)
// Functions
// =========
Base.not (b: Base.Bool) : Base.Bool
Base.not Base.false = Base.true
Base.not Base.true = Base.false
Base.and (a: Base.Bool) (b: Base.Bool) : Base.Bool
Base.and Base.false Base.false = Base.false
Base.and Base.false Base.true = Base.false
Base.and Base.true Base.false = Base.false
Base.and Base.true Base.true = Base.true
Base.add (a: Base.Nat) (b: Base.Nat) : Base.Nat
Base.add x Base.zero = x
Base.add x (Base.succ y) = Base.succ (Base.add x y)
Base.mul (a: Base.Nat) (b: Base.Nat) : Base.Nat
Base.mul a Base.zero = Base.zero
Base.mul a (Base.succ b) = Base.add a (Base.mul a b)
Base.exp (a: Base.Nat) (b: Base.Nat) : Base.Nat
Base.exp a Base.zero = Base.succ Base.zero
Base.exp a (Base.succ b) = Base.mul a (Base.exp a b)
Base.is_even (a: Base.Nat) : Base.Bool
Base.is_even Base.zero = Base.true
Base.is_even (Base.succ a) = Base.not (Base.is_even a)
Base.full_tree (d: Base.Nat) : Base.Tree
Base.full_tree Base.zero = Base.leaf
Base.full_tree (Base.succ d) = let branch = Base.full_tree d; Base.node branch branch
Base.tree_fold (a: Base.Tree) (p: Type) (n: p -> p -> p) (l: p) : p
Base.tree_fold Base.leaf p n l = l
Base.tree_fold (Base.node a b) p n l = n (Base.tree_fold a p n l) (Base.tree_fold b p n l)
Base.force_tree (a: Base.Tree) : Base.Bool
Base.force_tree t = Base.tree_fold t Base.Bool (a => b => Base.and a b) Base.true
Base.Church.not (b: Base.Church.Bool) : Base.Church.Bool
Base.Church.not b = p => t => f => b p f t
Base.Church.and (a: Base.Church.Bool) (b: Base.Church.Bool) : Base.Church.Bool
Base.Church.and a b = p => t => f => a p (b p t f) f
Base.Church.add (a: Base.Church.Nat) (b: Base.Church.Nat) : Base.Church.Nat
Base.Church.add a b = p => f => z => a p f (b p f z)
Base.Church.mul (a: Base.Church.Nat) (b: Base.Church.Nat) : Base.Church.Nat
Base.Church.mul a b = p => f => a p (b p f)
Base.Church.exp (a: Base.Church.Nat) (b: Base.Church.Nat) : Base.Church.Nat
Base.Church.exp a b = p => b (p -> p) (a p)
Base.Church.is_even (a: Base.Church.Nat) : Base.Church.Bool
Base.Church.is_even a = a Base.Church.Bool (x => Base.Church.not x) Base.Church.true
Base.Church.full_tree (d: Base.Church.Nat) : Base.Church.Tree
Base.Church.full_tree d = p => n => l => d p (t => n t t) l
Base.Church.tree_fold (a: Base.Church.Tree) (p: Type) (n: p -> p -> p) (l: p) : p
Base.Church.tree_fold t p n l = t p n l
Base.Church.force_tree (a: Base.Church.Tree) : Base.Church.Bool
Base.Church.force_tree t = Base.Church.tree_fold t Base.Church.Bool (a => b => Base.Church.and a b) Base.Church.true
// Elaboration
// ===========
//Id : (t : Type) -> t -> t
//Id = t => x => x
//Bad : (t : Type) -> t -> t
//Bad = ((Id) _ Id) _ Id
// Constants
// =========
Base.N0 : Base.Nat { Base.zero }
Base.N1 : Base.Nat { Base.succ Base.N0 }
Base.N2 : Base.Nat { Base.succ Base.N1 }
Base.N3 : Base.Nat { Base.succ Base.N2 }
Base.N4 : Base.Nat { Base.succ Base.N3 }
Base.N5 : Base.Nat { Base.succ Base.N4 }
Base.N6 : Base.Nat { Base.succ Base.N5 }
Base.N7 : Base.Nat { Base.succ Base.N6 }
Base.N8 : Base.Nat { Base.succ Base.N7 }
Base.N9 : Base.Nat { Base.succ Base.N8 }
Base.N10 : Base.Nat { Base.succ Base.N9 }
Base.N11 : Base.Nat { Base.succ Base.N10 }
Base.N12 : Base.Nat { Base.succ Base.N11 }
Base.N13 : Base.Nat { Base.succ Base.N12 }
Base.N14 : Base.Nat { Base.succ Base.N13 }
Base.N15 : Base.Nat { Base.succ Base.N14 }
Base.N16 : Base.Nat { Base.succ Base.N15 }
Base.N17 : Base.Nat { Base.succ Base.N16 }
Base.N18 : Base.Nat { Base.succ Base.N17 }
Base.N19 : Base.Nat { Base.succ Base.N18 }
Base.N20 : Base.Nat { Base.succ Base.N19 }
Base.N21 : Base.Nat { Base.succ Base.N20 }
Base.N22 : Base.Nat { Base.succ Base.N21 }
Base.N23 : Base.Nat { Base.succ Base.N22 }
Base.N24 : Base.Nat { Base.succ Base.N23 }
Base.N25 : Base.Nat { Base.succ Base.N24 }
Base.N26 : Base.Nat { Base.succ Base.N25 }
Base.N27 : Base.Nat { Base.succ Base.N26 }
Base.N28 : Base.Nat { Base.succ Base.N27 }
Base.N29 : Base.Nat { Base.succ Base.N28 }
Base.N30 : Base.Nat { Base.succ Base.N29 }
Base.N31 : Base.Nat { Base.succ Base.N30 }
Base.N32 : Base.Nat { Base.succ Base.N31 }
Base.Church.N0 : Base.Church.Nat { Base.Church.zero }
Base.Church.N1 : Base.Church.Nat { Base.Church.succ Base.Church.N0 }
Base.Church.N2 : Base.Church.Nat { Base.Church.succ Base.Church.N1 }
Base.Church.N3 : Base.Church.Nat { Base.Church.succ Base.Church.N2 }
Base.Church.N4 : Base.Church.Nat { Base.Church.succ Base.Church.N3 }
Base.Church.N5 : Base.Church.Nat { Base.Church.succ Base.Church.N4 }
Base.Church.N6 : Base.Church.Nat { Base.Church.succ Base.Church.N5 }
Base.Church.N7 : Base.Church.Nat { Base.Church.succ Base.Church.N6 }
Base.Church.N8 : Base.Church.Nat { Base.Church.succ Base.Church.N7 }
Base.Church.N9 : Base.Church.Nat { Base.Church.succ Base.Church.N8 }
Base.Church.N10 : Base.Church.Nat { Base.Church.succ Base.Church.N9 }
Base.Church.N11 : Base.Church.Nat { Base.Church.succ Base.Church.N10 }
Base.Church.N12 : Base.Church.Nat { Base.Church.succ Base.Church.N11 }
Base.Church.N13 : Base.Church.Nat { Base.Church.succ Base.Church.N12 }
Base.Church.N14 : Base.Church.Nat { Base.Church.succ Base.Church.N13 }
Base.Church.N15 : Base.Church.Nat { Base.Church.succ Base.Church.N14 }
Base.Church.N16 : Base.Church.Nat { Base.Church.succ Base.Church.N15 }
Base.Church.N17 : Base.Church.Nat { Base.Church.succ Base.Church.N16 }
Base.Church.N18 : Base.Church.Nat { Base.Church.succ Base.Church.N17 }
Base.Church.N19 : Base.Church.Nat { Base.Church.succ Base.Church.N18 }
Base.Church.N20 : Base.Church.Nat { Base.Church.succ Base.Church.N19 }
Base.Church.N21 : Base.Church.Nat { Base.Church.succ Base.Church.N20 }
Base.Church.N22 : Base.Church.Nat { Base.Church.succ Base.Church.N21 }
Base.Church.N23 : Base.Church.Nat { Base.Church.succ Base.Church.N22 }
Base.Church.N24 : Base.Church.Nat { Base.Church.succ Base.Church.N23 }
Base.Church.N25 : Base.Church.Nat { Base.Church.succ Base.Church.N24 }
Base.Church.N26 : Base.Church.Nat { Base.Church.succ Base.Church.N25 }
Base.Church.N27 : Base.Church.Nat { Base.Church.succ Base.Church.N26 }
Base.Church.N28 : Base.Church.Nat { Base.Church.succ Base.Church.N27 }
Base.Church.N29 : Base.Church.Nat { Base.Church.succ Base.Church.N28 }
Base.Church.N30 : Base.Church.Nat { Base.Church.succ Base.Church.N29 }
Base.Church.N31 : Base.Church.Nat { Base.Church.succ Base.Church.N30 }
Base.Church.N32 : Base.Church.Nat { Base.Church.succ Base.Church.N31 }
Test {
(Base.cons Type (Base.cons Type (Base.cons Type (Base.cons Type (Base.cons Type (Base.cons Type
Base.nil
))))))
}
Base { 0 }

View File

@ -0,0 +1,2 @@
All terms check.

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,2 @@
All terms check.

View File

View File

@ -0,0 +1 @@
Can't load type.

View File

@ -0,0 +1,6 @@
Main : U60
Main =
let k = 2
match U60 k {
s => ?
}

View File

@ -0,0 +1 @@
Incorrect arity.

View File

@ -0,0 +1,2 @@
Arity <a> <b> <c> (d: U60) : U60
Arity a b d = d

View File

@ -0,0 +1 @@
Repeated variable.

View File

@ -0,0 +1,6 @@
List <a> : Type
List.nil <a> : List a
List.cons <a> (head: a) (tail: List a) : List a
Main (x: List U60) (x: List U60) : List U60
Main (List.cons x (List.cons y (List.cons z a))) (List.cons b (List.cons c (List.cons d x))) = List.nil

View File

@ -0,0 +1,46 @@
Type mismatch
- Expected: Type
- Detected: t
Kind.Context:
- t : Type
- g : t
- h : t
On '{{#F0F#}}':
{{#R0:1364:1365R#}}
Type mismatch
- Expected: ((concat a) b)
- Detected: a
Kind.Context:
- t : Type
- concat : ((g: t) -> ((h: t) -> g))
- a : t
- b : t
- c : t
On '{{#F0F#}}':
{{#R0:1471:1492R#}}
Type mismatch
- Expected: t
- Detected: b
Kind.Context:
- t : Type
- concat : ((g: t) -> ((h: t) -> g))
- a : t
- b : t
- c : t
On '{{#F0F#}}':
{{#R0:1481:1491R#}}
Type mismatch
- Expected: t
- Detected: a
Kind.Context:
- t : Type
- concat : ((g: t) -> ((h: t) -> g))
- a : t
- b : t
- c : t
On '{{#F0F#}}':
{{#R0:1455:1465R#}}

View File

@ -0,0 +1,45 @@
Algebra.Group.concat <t: Type> (group: (Algebra.Group t)) : (_: t) (_: t) t
Algebra.Group.concat t (Algebra.Group.new t_ monoid inverse inverse_proof) = (Algebra.Monoid.concat _ monoid)
Algebra.Group.new <t: Type> (monoid: (Algebra.Monoid t)) (invert: (_: t) t) (inverse: (Algebra.Laws.Inverse t (Algebra.Monoid.concat _ monoid) invert (Algebra.Monoid.empty _ monoid))) : (Algebra.Group t)
Algebra.Group (t: Type) : Type
Algebra.Monoid.concat <t: Type> (monoid: (Algebra.Monoid t)) : (_: t) (_: t) t
Algebra.Monoid.concat t (Algebra.Monoid.new t_ sg empty id) = (Algebra.Semigroup.concat _ sg)
Algebra.Monoid.new <t: Type> (sg: (Algebra.Semigroup t)) (empty: t) (identity: (Algebra.Laws.Identity t (Algebra.Semigroup.concat _ sg) empty)) : (Algebra.Monoid t)
Algebra.Semigroup (t: Type) : Type
Algebra.Laws.Identity (t: Type) (concat: (_: t) (_: t) t) (empty: t) : Type
Algebra.Monoid (t: Type) : Type
Algebra.Semigroup.concat <t: Type> (semigroup: (Algebra.Semigroup t)) : (_: t) (_: t) t
Algebra.Semigroup.concat t (Algebra.Semigroup.new t_ magma assoc) = (Algebra.Magma.concat _ magma)
Algebra.Semigroup.new <t: Type> (magma: (Algebra.Magma t)) (associativity: (Algebra.Laws.associativity.eta _ (Algebra.Magma.concat _ magma))) : (Algebra.Semigroup t)
Algebra.Magma (t: Type) : Type
Algebra.Laws.associativity.eta <t: Type> (concat: (g: t) (h: t) g) : Type
Algebra.Laws.associativity.eta t concat = (a: t) (b: t) (c: t) (Equal _ (concat (concat a b) c) (concat a (concat b c)))
Equal <t: Type> (a: t) (b: t) : Type
Algebra.Magma.concat <t: Type> (magma: (Algebra.Magma t)) : (_: t) (_: t) t
Algebra.Magma.concat t (Algebra.Magma.new t_ concat) = concat
Algebra.Magma.new <t: Type> (concat: (_: t) (_: t) t) : (Algebra.Magma t)
Algebra.Monoid.empty <t: Type> (monoid: (Algebra.Monoid t)) : t
Algebra.Monoid.empty t (Algebra.Monoid.new t_ sg empty id) = empty
Algebra.Laws.Inverse (t: Type) (concat: (_: t) (_: t) t) (inverse: (_: t) t) (empty: t) : Type

View File

@ -0,0 +1,2 @@
All terms check.

View File

View File

@ -0,0 +1 @@
Unbound variable 'c'.

View File

@ -0,0 +1,2 @@
Main (a: U60): U60 -> U60
Main a = (b => (+ (+ a c) b))

View File

@ -0,0 +1 @@
Unbound variable 'b'.

View File

@ -0,0 +1,2 @@
Main (a: U60): U60
Main a = (+ a b)

View File

@ -0,0 +1,2 @@
All terms check.

View File

@ -0,0 +1,254 @@
Kind.Term.eval (term: (Kind.Term)) : (Kind.Term)
Kind.Term.eval (Kind.Term.typ orig) = (Kind.Term.typ orig)
Kind.Term.eval (Kind.Term.var orig name index) = (Kind.Term.var orig name index)
Kind.Term.eval (Kind.Term.hol orig numb) = (Kind.Term.hol orig numb)
Kind.Term.eval (Kind.Term.all orig name typ body) = (Kind.Term.all orig name (Kind.Term.eval typ) (x => (Kind.Term.eval (body x))))
Kind.Term.eval (Kind.Term.lam orig name body) = (Kind.Term.lam orig name (x => (Kind.Term.eval (body x))))
Kind.Term.eval (Kind.Term.let orig name expr body) = (Kind.Term.eval_let orig name (Kind.Term.eval expr) (x => (Kind.Term.eval (body x))))
Kind.Term.eval (Kind.Term.ann orig expr typ) = (Kind.Term.eval_ann orig (Kind.Term.eval expr) (Kind.Term.eval typ))
Kind.Term.eval (Kind.Term.sub orig name indx redx expr) = (Kind.Term.eval_sub orig name indx redx (Kind.Term.eval expr))
Kind.Term.eval (Kind.Term.app orig expr typ) = (Kind.Term.eval_app orig (Kind.Term.eval expr) (Kind.Term.eval typ))
Kind.Term.eval (Kind.Term.hlp orig) = (Kind.Term.hlp orig)
Kind.Term.eval (Kind.Term.u60 orig) = (Kind.Term.u60 orig)
Kind.Term.eval (Kind.Term.num orig num) = (Kind.Term.num orig num)
Kind.Term.eval (Kind.Term.op2 orig op left right) = (Kind.Term.eval_op orig op (Kind.Term.eval left) (Kind.Term.eval right))
Kind.Term.eval (Kind.Term.ct0 ctid orig) = (Kind.Term.ct0 ctid orig)
Kind.Term.eval (Kind.Term.ct1 ctid orig x0) = (Kind.Term.ct1 ctid orig (Kind.Term.eval x0))
Kind.Term.eval (Kind.Term.ct2 ctid orig x0 x1) = (Kind.Term.ct2 ctid orig (Kind.Term.eval x0) (Kind.Term.eval x1))
Kind.Term.eval (Kind.Term.ct3 ctid orig x0 x1 x2) = (Kind.Term.ct3 ctid orig (Kind.Term.eval x0) (Kind.Term.eval x1) (Kind.Term.eval x2))
Kind.Term.eval (Kind.Term.ct4 ctid orig x0 x1 x2 x3) = (Kind.Term.ct4 ctid orig (Kind.Term.eval x0) (Kind.Term.eval x1) (Kind.Term.eval x2) (Kind.Term.eval x3))
Kind.Term.eval (Kind.Term.ct5 ctid orig x0 x1 x2 x3 x4) = (Kind.Term.ct5 ctid orig (Kind.Term.eval x0) (Kind.Term.eval x1) (Kind.Term.eval x2) (Kind.Term.eval x3) (Kind.Term.eval x4))
Kind.Term.eval (Kind.Term.ct6 ctid orig x0 x1 x2 x3 x4 x5) = (Kind.Term.ct6 ctid orig (Kind.Term.eval x0) (Kind.Term.eval x1) (Kind.Term.eval x2) (Kind.Term.eval x3) (Kind.Term.eval x4) (Kind.Term.eval x5))
Kind.Term.eval (Kind.Term.ct7 ctid orig x0 x1 x2 x3 x4 x5 x6) = (Kind.Term.ct7 ctid orig (Kind.Term.eval x0) (Kind.Term.eval x1) (Kind.Term.eval x2) (Kind.Term.eval x3) (Kind.Term.eval x4) (Kind.Term.eval x5) (Kind.Term.eval x6))
Kind.Term.eval (Kind.Term.ct8 ctid orig x0 x1 x2 x3 x4 x5 x6 x7) = (Kind.Term.ct8 ctid orig (Kind.Term.eval x0) (Kind.Term.eval x1) (Kind.Term.eval x2) (Kind.Term.eval x3) (Kind.Term.eval x4) (Kind.Term.eval x5) (Kind.Term.eval x6) (Kind.Term.eval x7))
Kind.Term.eval (Kind.Term.ct9 ctid orig x0 x1 x2 x3 x4 x5 x6 x7 x8) = (Kind.Term.ct9 ctid orig (Kind.Term.eval x0) (Kind.Term.eval x1) (Kind.Term.eval x2) (Kind.Term.eval x3) (Kind.Term.eval x4) (Kind.Term.eval x5) (Kind.Term.eval x6) (Kind.Term.eval x7) (Kind.Term.eval x8))
Kind.Term.eval (Kind.Term.ct10 ctid orig x0 x1 x2 x3 x4 x5 x6 x7 x8 x9) = (Kind.Term.ct10 ctid orig (Kind.Term.eval x0) (Kind.Term.eval x1) (Kind.Term.eval x2) (Kind.Term.eval x3) (Kind.Term.eval x4) (Kind.Term.eval x5) (Kind.Term.eval x6) (Kind.Term.eval x7) (Kind.Term.eval x8) (Kind.Term.eval x9))
Kind.Term.eval (Kind.Term.ct11 ctid orig x0 x1 x2 x3 x4 x5 x6 x7 x8 x9 x10) = (Kind.Term.ct11 ctid orig (Kind.Term.eval x0) (Kind.Term.eval x1) (Kind.Term.eval x2) (Kind.Term.eval x3) (Kind.Term.eval x4) (Kind.Term.eval x5) (Kind.Term.eval x6) (Kind.Term.eval x7) (Kind.Term.eval x8) (Kind.Term.eval x9) (Kind.Term.eval x10))
Kind.Term.eval (Kind.Term.ct12 ctid orig x0 x1 x2 x3 x4 x5 x6 x7 x8 x9 x10 x11) = (Kind.Term.ct12 ctid orig (Kind.Term.eval x0) (Kind.Term.eval x1) (Kind.Term.eval x2) (Kind.Term.eval x3) (Kind.Term.eval x4) (Kind.Term.eval x5) (Kind.Term.eval x6) (Kind.Term.eval x7) (Kind.Term.eval x8) (Kind.Term.eval x9) (Kind.Term.eval x10) (Kind.Term.eval x11))
Kind.Term.eval (Kind.Term.ct13 ctid orig x0 x1 x2 x3 x4 x5 x6 x7 x8 x9 x10 x11 x12) = (Kind.Term.ct13 ctid orig (Kind.Term.eval x0) (Kind.Term.eval x1) (Kind.Term.eval x2) (Kind.Term.eval x3) (Kind.Term.eval x4) (Kind.Term.eval x5) (Kind.Term.eval x6) (Kind.Term.eval x7) (Kind.Term.eval x8) (Kind.Term.eval x9) (Kind.Term.eval x10) (Kind.Term.eval x11) (Kind.Term.eval x12))
Kind.Term.eval (Kind.Term.ct14 ctid orig x0 x1 x2 x3 x4 x5 x6 x7 x8 x9 x10 x11 x12 x13) = (Kind.Term.ct14 ctid orig (Kind.Term.eval x0) (Kind.Term.eval x1) (Kind.Term.eval x2) (Kind.Term.eval x3) (Kind.Term.eval x4) (Kind.Term.eval x5) (Kind.Term.eval x6) (Kind.Term.eval x7) (Kind.Term.eval x8) (Kind.Term.eval x9) (Kind.Term.eval x10) (Kind.Term.eval x11) (Kind.Term.eval x12) (Kind.Term.eval x13))
Kind.Term.eval (Kind.Term.ct15 fnid orig x0) = (Kind.Term.ct15 fnid orig (Kind.Term.eval x0))
Kind.Term.eval (Kind.Term.ct16 fnid orig x0) = (Kind.Term.ct16 fnid orig (Kind.Term.eval x0))
Kind.Term.eval (Kind.Term.fn0 fnid orig) = (Kind.Term.FN0 fnid orig)
Kind.Term.eval (Kind.Term.fn1 fnid orig x0) = (Kind.Term.FN1 fnid orig (Kind.Term.eval x0))
Kind.Term.eval (Kind.Term.fn2 fnid orig x0 x1) = (Kind.Term.FN2 fnid orig (Kind.Term.eval x0) (Kind.Term.eval x1))
Kind.Term.eval (Kind.Term.fn3 fnid orig x0 x1 x2) = (Kind.Term.FN3 fnid orig (Kind.Term.eval x0) (Kind.Term.eval x1) (Kind.Term.eval x2))
Kind.Term.eval (Kind.Term.fn4 fnid orig x0 x1 x2 x3) = (Kind.Term.FN4 fnid orig (Kind.Term.eval x0) (Kind.Term.eval x1) (Kind.Term.eval x2) (Kind.Term.eval x3))
Kind.Term.eval (Kind.Term.fn5 fnid orig x0 x1 x2 x3 x4) = (Kind.Term.FN5 fnid orig (Kind.Term.eval x0) (Kind.Term.eval x1) (Kind.Term.eval x2) (Kind.Term.eval x3) (Kind.Term.eval x4))
Kind.Term.eval (Kind.Term.fn6 fnid orig x0 x1 x2 x3 x4 x5) = (Kind.Term.FN6 fnid orig (Kind.Term.eval x0) (Kind.Term.eval x1) (Kind.Term.eval x2) (Kind.Term.eval x3) (Kind.Term.eval x4) (Kind.Term.eval x5))
Kind.Term.eval (Kind.Term.fn7 fnid orig x0 x1 x2 x3 x4 x5 x6) = (Kind.Term.FN7 fnid orig (Kind.Term.eval x0) (Kind.Term.eval x1) (Kind.Term.eval x2) (Kind.Term.eval x3) (Kind.Term.eval x4) (Kind.Term.eval x5) (Kind.Term.eval x6))
Kind.Term.eval (Kind.Term.fn8 fnid orig x0 x1 x2 x3 x4 x5 x6 x7) = (Kind.Term.FN8 fnid orig (Kind.Term.eval x0) (Kind.Term.eval x1) (Kind.Term.eval x2) (Kind.Term.eval x3) (Kind.Term.eval x4) (Kind.Term.eval x5) (Kind.Term.eval x6) (Kind.Term.eval x7))
Kind.Term.eval (Kind.Term.fn9 fnid orig x0 x1 x2 x3 x4 x5 x6 x7 x8) = (Kind.Term.FN9 fnid orig (Kind.Term.eval x0) (Kind.Term.eval x1) (Kind.Term.eval x2) (Kind.Term.eval x3) (Kind.Term.eval x4) (Kind.Term.eval x5) (Kind.Term.eval x6) (Kind.Term.eval x7) (Kind.Term.eval x8))
Kind.Term.eval (Kind.Term.fn10 fnid orig x0 x1 x2 x3 x4 x5 x6 x7 x8 x9) = (Kind.Term.FN10 fnid orig (Kind.Term.eval x0) (Kind.Term.eval x1) (Kind.Term.eval x2) (Kind.Term.eval x3) (Kind.Term.eval x4) (Kind.Term.eval x5) (Kind.Term.eval x6) (Kind.Term.eval x7) (Kind.Term.eval x8) (Kind.Term.eval x9))
Kind.Term.eval (Kind.Term.fn11 fnid orig x0 x1 x2 x3 x4 x5 x6 x7 x8 x9 x10) = (Kind.Term.FN11 fnid orig (Kind.Term.eval x0) (Kind.Term.eval x1) (Kind.Term.eval x2) (Kind.Term.eval x3) (Kind.Term.eval x4) (Kind.Term.eval x5) (Kind.Term.eval x6) (Kind.Term.eval x7) (Kind.Term.eval x8) (Kind.Term.eval x9) (Kind.Term.eval x10))
Kind.Term.eval (Kind.Term.fn12 fnid orig x0 x1 x2 x3 x4 x5 x6 x7 x8 x9 x10 x11) = (Kind.Term.FN12 fnid orig (Kind.Term.eval x0) (Kind.Term.eval x1) (Kind.Term.eval x2) (Kind.Term.eval x3) (Kind.Term.eval x4) (Kind.Term.eval x5) (Kind.Term.eval x6) (Kind.Term.eval x7) (Kind.Term.eval x8) (Kind.Term.eval x9) (Kind.Term.eval x10) (Kind.Term.eval x11))
Kind.Term.eval (Kind.Term.fn13 fnid orig x0 x1 x2 x3 x4 x5 x6 x7 x8 x9 x10 x11 x12) = (Kind.Term.FN13 fnid orig (Kind.Term.eval x0) (Kind.Term.eval x1) (Kind.Term.eval x2) (Kind.Term.eval x3) (Kind.Term.eval x4) (Kind.Term.eval x5) (Kind.Term.eval x6) (Kind.Term.eval x7) (Kind.Term.eval x8) (Kind.Term.eval x9) (Kind.Term.eval x10) (Kind.Term.eval x11) (Kind.Term.eval x12))
Kind.Term.eval (Kind.Term.fn14 fnid orig x0 x1 x2 x3 x4 x5 x6 x7 x8 x9 x10 x11 x12 x13) = (Kind.Term.FN14 fnid orig (Kind.Term.eval x0) (Kind.Term.eval x1) (Kind.Term.eval x2) (Kind.Term.eval x3) (Kind.Term.eval x4) (Kind.Term.eval x5) (Kind.Term.eval x6) (Kind.Term.eval x7) (Kind.Term.eval x8) (Kind.Term.eval x9) (Kind.Term.eval x10) (Kind.Term.eval x11) (Kind.Term.eval x12) (Kind.Term.eval x13))
Kind.Term.eval (Kind.Term.fn15 fnid orig x0) = (Kind.Term.FN15 fnid orig (Kind.Term.eval x0))
Kind.Term.eval (Kind.Term.fn16 fnid orig x0) = (Kind.Term.FN16 fnid orig (Kind.Term.eval x0))
Kind.Term.eval (Kind.Term.args15 x0 x1 x2 x3 x4 x5 x6 x7 x8 x9 x10 x11 x12 x13 x14) = (Kind.Term.args15 (Kind.Term.eval x0) (Kind.Term.eval x1) (Kind.Term.eval x2) (Kind.Term.eval x3) (Kind.Term.eval x4) (Kind.Term.eval x5) (Kind.Term.eval x6) (Kind.Term.eval x7) (Kind.Term.eval x8) (Kind.Term.eval x9) (Kind.Term.eval x10) (Kind.Term.eval x11) (Kind.Term.eval x12) (Kind.Term.eval x13) (Kind.Term.eval x14))
Kind.Term.eval (Kind.Term.args16 x0 x1 x2 x3 x4 x5 x6 x7 x8 x9 x10 x11 x12 x13 x14 x15) = (Kind.Term.args16 (Kind.Term.eval x0) (Kind.Term.eval x1) (Kind.Term.eval x2) (Kind.Term.eval x3) (Kind.Term.eval x4) (Kind.Term.eval x5) (Kind.Term.eval x6) (Kind.Term.eval x7) (Kind.Term.eval x8) (Kind.Term.eval x9) (Kind.Term.eval x10) (Kind.Term.eval x11) (Kind.Term.eval x12) (Kind.Term.eval x13) (Kind.Term.eval x14) (Kind.Term.eval x15))
Kind.Term.eval_app (orig: U60) (left: (Kind.Term)) (right: (Kind.Term)) : (Kind.Term)
Kind.Term.eval_app orig (Kind.Term.lam orig1 name body) arg = (body arg)
Kind.Term.eval_app orig func arg = (Kind.Term.app orig func arg)
Kind.Term : Type
Kind.Term.typ (orig: U60) : (Kind.Term)
Kind.Term.hol (orig: U60) (number: U60) : (Kind.Term)
Kind.Term.var (orig: U60) (name: U60) (index: U60) : (Kind.Term)
Kind.Term.all (orig: U60) (name: U60) (typ: (Kind.Term)) (body: ((_: (Kind.Term)) (Kind.Term))) : (Kind.Term)
Kind.Term.lam (orig: U60) (name: U60) (body: ((_: (Kind.Term)) (Kind.Term))) : (Kind.Term)
Kind.Term.app (orig: U60) (func: (Kind.Term)) (arg: (Kind.Term)) : (Kind.Term)
Kind.Term.let (orig: U60) (name: U60) (expr: (Kind.Term)) (body: ((_: (Kind.Term)) (Kind.Term))) : (Kind.Term)
Kind.Term.ann (orig: U60) (expr: (Kind.Term)) (typ: (Kind.Term)) : (Kind.Term)
Kind.Term.sub (orig: U60) (name: U60) (indx: U60) (redx: U60) (expr: (Kind.Term)) : (Kind.Term)
Kind.Term.ct0 (ctid: U60) (orig: U60) : (Kind.Term)
Kind.Term.ct1 (ctid: U60) (orig: U60) (x0: (Kind.Term)) : (Kind.Term)
Kind.Term.ct2 (ctid: U60) (orig: U60) (x0: (Kind.Term)) (x1: (Kind.Term)) : (Kind.Term)
Kind.Term.ct3 (ctid: U60) (orig: U60) (x0: (Kind.Term)) (x1: (Kind.Term)) (x2: (Kind.Term)) : (Kind.Term)
Kind.Term.ct4 (ctid: U60) (orig: U60) (x0: (Kind.Term)) (x1: (Kind.Term)) (x2: (Kind.Term)) (x3: (Kind.Term)) : (Kind.Term)
Kind.Term.ct5 (ctid: U60) (orig: U60) (x0: (Kind.Term)) (x1: (Kind.Term)) (x2: (Kind.Term)) (x3: (Kind.Term)) (x4: (Kind.Term)) : (Kind.Term)
Kind.Term.ct6 (ctid: U60) (orig: U60) (x0: (Kind.Term)) (x1: (Kind.Term)) (x2: (Kind.Term)) (x3: (Kind.Term)) (x4: (Kind.Term)) (x5: (Kind.Term)) : (Kind.Term)
Kind.Term.ct7 (ctid: U60) (orig: U60) (x0: (Kind.Term)) (x1: (Kind.Term)) (x2: (Kind.Term)) (x3: (Kind.Term)) (x4: (Kind.Term)) (x5: (Kind.Term)) (x6: (Kind.Term)) : (Kind.Term)
Kind.Term.ct8 (ctid: U60) (orig: U60) (x0: (Kind.Term)) (x1: (Kind.Term)) (x2: (Kind.Term)) (x3: (Kind.Term)) (x4: (Kind.Term)) (x5: (Kind.Term)) (x6: (Kind.Term)) (x7: (Kind.Term)) : (Kind.Term)
Kind.Term.ct9 (ctid: U60) (orig: U60) (x0: (Kind.Term)) (x1: (Kind.Term)) (x2: (Kind.Term)) (x3: (Kind.Term)) (x4: (Kind.Term)) (x5: (Kind.Term)) (x6: (Kind.Term)) (x7: (Kind.Term)) (x8: (Kind.Term)) : (Kind.Term)
Kind.Term.ct10 (ctid: U60) (orig: U60) (x0: (Kind.Term)) (x1: (Kind.Term)) (x2: (Kind.Term)) (x3: (Kind.Term)) (x4: (Kind.Term)) (x5: (Kind.Term)) (x6: (Kind.Term)) (x7: (Kind.Term)) (x8: (Kind.Term)) (x9: (Kind.Term)) : (Kind.Term)
Kind.Term.ct11 (ctid: U60) (orig: U60) (x0: (Kind.Term)) (x1: (Kind.Term)) (x2: (Kind.Term)) (x3: (Kind.Term)) (x4: (Kind.Term)) (x5: (Kind.Term)) (x6: (Kind.Term)) (x7: (Kind.Term)) (x8: (Kind.Term)) (x9: (Kind.Term)) (x10: (Kind.Term)) : (Kind.Term)
Kind.Term.ct12 (ctid: U60) (orig: U60) (x0: (Kind.Term)) (x1: (Kind.Term)) (x2: (Kind.Term)) (x3: (Kind.Term)) (x4: (Kind.Term)) (x5: (Kind.Term)) (x6: (Kind.Term)) (x7: (Kind.Term)) (x8: (Kind.Term)) (x9: (Kind.Term)) (x10: (Kind.Term)) (x11: (Kind.Term)) : (Kind.Term)
Kind.Term.ct13 (ctid: U60) (orig: U60) (x0: (Kind.Term)) (x1: (Kind.Term)) (x2: (Kind.Term)) (x3: (Kind.Term)) (x4: (Kind.Term)) (x5: (Kind.Term)) (x6: (Kind.Term)) (x7: (Kind.Term)) (x8: (Kind.Term)) (x9: (Kind.Term)) (x10: (Kind.Term)) (x11: (Kind.Term)) (x12: (Kind.Term)) : (Kind.Term)
Kind.Term.ct14 (ctid: U60) (orig: U60) (x0: (Kind.Term)) (x1: (Kind.Term)) (x2: (Kind.Term)) (x3: (Kind.Term)) (x4: (Kind.Term)) (x5: (Kind.Term)) (x6: (Kind.Term)) (x7: (Kind.Term)) (x8: (Kind.Term)) (x9: (Kind.Term)) (x10: (Kind.Term)) (x11: (Kind.Term)) (x12: (Kind.Term)) (x13: (Kind.Term)) : (Kind.Term)
Kind.Term.ct15 (ctid: U60) (orig: U60) (args: (Kind.Term)) : (Kind.Term)
Kind.Term.ct16 (ctid: U60) (orig: U60) (args: (Kind.Term)) : (Kind.Term)
Kind.Term.fn0 (fnid: U60) (orig: U60) : (Kind.Term)
Kind.Term.fn1 (fnid: U60) (orig: U60) (x0: (Kind.Term)) : (Kind.Term)
Kind.Term.fn2 (fnid: U60) (orig: U60) (x0: (Kind.Term)) (x1: (Kind.Term)) : (Kind.Term)
Kind.Term.fn3 (fnid: U60) (orig: U60) (x0: (Kind.Term)) (x1: (Kind.Term)) (x2: (Kind.Term)) : (Kind.Term)
Kind.Term.fn4 (fnid: U60) (orig: U60) (x0: (Kind.Term)) (x1: (Kind.Term)) (x2: (Kind.Term)) (x3: (Kind.Term)) : (Kind.Term)
Kind.Term.fn5 (fnid: U60) (orig: U60) (x0: (Kind.Term)) (x1: (Kind.Term)) (x2: (Kind.Term)) (x3: (Kind.Term)) (x4: (Kind.Term)) : (Kind.Term)
Kind.Term.fn6 (fnid: U60) (orig: U60) (x0: (Kind.Term)) (x1: (Kind.Term)) (x2: (Kind.Term)) (x3: (Kind.Term)) (x4: (Kind.Term)) (x5: (Kind.Term)) : (Kind.Term)
Kind.Term.fn7 (fnid: U60) (orig: U60) (x0: (Kind.Term)) (x1: (Kind.Term)) (x2: (Kind.Term)) (x3: (Kind.Term)) (x4: (Kind.Term)) (x5: (Kind.Term)) (x6: (Kind.Term)) : (Kind.Term)
Kind.Term.fn8 (fnid: U60) (orig: U60) (x0: (Kind.Term)) (x1: (Kind.Term)) (x2: (Kind.Term)) (x3: (Kind.Term)) (x4: (Kind.Term)) (x5: (Kind.Term)) (x6: (Kind.Term)) (x7: (Kind.Term)) : (Kind.Term)
Kind.Term.fn9 (fnid: U60) (orig: U60) (x0: (Kind.Term)) (x1: (Kind.Term)) (x2: (Kind.Term)) (x3: (Kind.Term)) (x4: (Kind.Term)) (x5: (Kind.Term)) (x6: (Kind.Term)) (x7: (Kind.Term)) (x8: (Kind.Term)) : (Kind.Term)
Kind.Term.fn10 (fnid: U60) (orig: U60) (x0: (Kind.Term)) (x1: (Kind.Term)) (x2: (Kind.Term)) (x3: (Kind.Term)) (x4: (Kind.Term)) (x5: (Kind.Term)) (x6: (Kind.Term)) (x7: (Kind.Term)) (x8: (Kind.Term)) (x9: (Kind.Term)) : (Kind.Term)
Kind.Term.fn11 (fnid: U60) (orig: U60) (x0: (Kind.Term)) (x1: (Kind.Term)) (x2: (Kind.Term)) (x3: (Kind.Term)) (x4: (Kind.Term)) (x5: (Kind.Term)) (x6: (Kind.Term)) (x7: (Kind.Term)) (x8: (Kind.Term)) (x9: (Kind.Term)) (x10: (Kind.Term)) : (Kind.Term)
Kind.Term.fn12 (fnid: U60) (orig: U60) (x0: (Kind.Term)) (x1: (Kind.Term)) (x2: (Kind.Term)) (x3: (Kind.Term)) (x4: (Kind.Term)) (x5: (Kind.Term)) (x6: (Kind.Term)) (x7: (Kind.Term)) (x8: (Kind.Term)) (x9: (Kind.Term)) (x10: (Kind.Term)) (x11: (Kind.Term)) : (Kind.Term)
Kind.Term.fn13 (fnid: U60) (orig: U60) (x0: (Kind.Term)) (x1: (Kind.Term)) (x2: (Kind.Term)) (x3: (Kind.Term)) (x4: (Kind.Term)) (x5: (Kind.Term)) (x6: (Kind.Term)) (x7: (Kind.Term)) (x8: (Kind.Term)) (x9: (Kind.Term)) (x10: (Kind.Term)) (x11: (Kind.Term)) (x12: (Kind.Term)) : (Kind.Term)
Kind.Term.fn14 (fnid: U60) (orig: U60) (x0: (Kind.Term)) (x1: (Kind.Term)) (x2: (Kind.Term)) (x3: (Kind.Term)) (x4: (Kind.Term)) (x5: (Kind.Term)) (x6: (Kind.Term)) (x7: (Kind.Term)) (x8: (Kind.Term)) (x9: (Kind.Term)) (x10: (Kind.Term)) (x11: (Kind.Term)) (x12: (Kind.Term)) (x13: (Kind.Term)) : (Kind.Term)
Kind.Term.fn15 (fnid: U60) (orig: U60) (args: (Kind.Term)) : (Kind.Term)
Kind.Term.fn16 (fnid: U60) (orig: U60) (args: (Kind.Term)) : (Kind.Term)
Kind.Term.FN0 (fnid: U60) (orig: U60) : (Kind.Term)
Kind.Term.FN1 (fnid: U60) (orig: U60) (x0: (Kind.Term)) : (Kind.Term)
Kind.Term.FN2 (fnid: U60) (orig: U60) (x0: (Kind.Term)) (x1: (Kind.Term)) : (Kind.Term)
Kind.Term.FN3 (fnid: U60) (orig: U60) (x0: (Kind.Term)) (x1: (Kind.Term)) (x2: (Kind.Term)) : (Kind.Term)
Kind.Term.FN4 (fnid: U60) (orig: U60) (x0: (Kind.Term)) (x1: (Kind.Term)) (x2: (Kind.Term)) (x3: (Kind.Term)) : (Kind.Term)
Kind.Term.FN5 (fnid: U60) (orig: U60) (x0: (Kind.Term)) (x1: (Kind.Term)) (x2: (Kind.Term)) (x3: (Kind.Term)) (x4: (Kind.Term)) : (Kind.Term)
Kind.Term.FN6 (fnid: U60) (orig: U60) (x0: (Kind.Term)) (x1: (Kind.Term)) (x2: (Kind.Term)) (x3: (Kind.Term)) (x4: (Kind.Term)) (x5: (Kind.Term)) : (Kind.Term)
Kind.Term.FN7 (fnid: U60) (orig: U60) (x0: (Kind.Term)) (x1: (Kind.Term)) (x2: (Kind.Term)) (x3: (Kind.Term)) (x4: (Kind.Term)) (x5: (Kind.Term)) (x6: (Kind.Term)) : (Kind.Term)
Kind.Term.FN8 (fnid: U60) (orig: U60) (x0: (Kind.Term)) (x1: (Kind.Term)) (x2: (Kind.Term)) (x3: (Kind.Term)) (x4: (Kind.Term)) (x5: (Kind.Term)) (x6: (Kind.Term)) (x7: (Kind.Term)) : (Kind.Term)
Kind.Term.FN9 (fnid: U60) (orig: U60) (x0: (Kind.Term)) (x1: (Kind.Term)) (x2: (Kind.Term)) (x3: (Kind.Term)) (x4: (Kind.Term)) (x5: (Kind.Term)) (x6: (Kind.Term)) (x7: (Kind.Term)) (x8: (Kind.Term)) : (Kind.Term)
Kind.Term.FN10 (fnid: U60) (orig: U60) (x0: (Kind.Term)) (x1: (Kind.Term)) (x2: (Kind.Term)) (x3: (Kind.Term)) (x4: (Kind.Term)) (x5: (Kind.Term)) (x6: (Kind.Term)) (x7: (Kind.Term)) (x8: (Kind.Term)) (x9: (Kind.Term)) : (Kind.Term)
Kind.Term.FN11 (fnid: U60) (orig: U60) (x0: (Kind.Term)) (x1: (Kind.Term)) (x2: (Kind.Term)) (x3: (Kind.Term)) (x4: (Kind.Term)) (x5: (Kind.Term)) (x6: (Kind.Term)) (x7: (Kind.Term)) (x8: (Kind.Term)) (x9: (Kind.Term)) (x10: (Kind.Term)) : (Kind.Term)
Kind.Term.FN12 (fnid: U60) (orig: U60) (x0: (Kind.Term)) (x1: (Kind.Term)) (x2: (Kind.Term)) (x3: (Kind.Term)) (x4: (Kind.Term)) (x5: (Kind.Term)) (x6: (Kind.Term)) (x7: (Kind.Term)) (x8: (Kind.Term)) (x9: (Kind.Term)) (x10: (Kind.Term)) (x11: (Kind.Term)) : (Kind.Term)
Kind.Term.FN13 (fnid: U60) (orig: U60) (x0: (Kind.Term)) (x1: (Kind.Term)) (x2: (Kind.Term)) (x3: (Kind.Term)) (x4: (Kind.Term)) (x5: (Kind.Term)) (x6: (Kind.Term)) (x7: (Kind.Term)) (x8: (Kind.Term)) (x9: (Kind.Term)) (x10: (Kind.Term)) (x11: (Kind.Term)) (x12: (Kind.Term)) : (Kind.Term)
Kind.Term.FN14 (fnid: U60) (orig: U60) (x0: (Kind.Term)) (x1: (Kind.Term)) (x2: (Kind.Term)) (x3: (Kind.Term)) (x4: (Kind.Term)) (x5: (Kind.Term)) (x6: (Kind.Term)) (x7: (Kind.Term)) (x8: (Kind.Term)) (x9: (Kind.Term)) (x10: (Kind.Term)) (x11: (Kind.Term)) (x12: (Kind.Term)) (x13: (Kind.Term)) : (Kind.Term)
Kind.Term.FN15 (fnid: U60) (orig: U60) (args: (Kind.Term)) : (Kind.Term)
Kind.Term.FN16 (fnid: U60) (orig: U60) (args: (Kind.Term)) : (Kind.Term)
Kind.Term.hlp (orig: U60) : (Kind.Term)
Kind.Term.u60 (orig: U60) : (Kind.Term)
Kind.Term.num (orig: U60) (num: U60) : (Kind.Term)
Kind.Term.op2 (orig: U60) (operator: (Kind.Operator)) (left: (Kind.Term)) (right: (Kind.Term)) : (Kind.Term)
Kind.Term.args15 (x0: (Kind.Term)) (x1: (Kind.Term)) (x2: (Kind.Term)) (x3: (Kind.Term)) (x4: (Kind.Term)) (x5: (Kind.Term)) (x6: (Kind.Term)) (x7: (Kind.Term)) (x8: (Kind.Term)) (x9: (Kind.Term)) (x10: (Kind.Term)) (x11: (Kind.Term)) (x12: (Kind.Term)) (x13: (Kind.Term)) (x14: (Kind.Term)) : (Kind.Term)
Kind.Term.args16 (x0: (Kind.Term)) (x1: (Kind.Term)) (x2: (Kind.Term)) (x3: (Kind.Term)) (x4: (Kind.Term)) (x5: (Kind.Term)) (x6: (Kind.Term)) (x7: (Kind.Term)) (x8: (Kind.Term)) (x9: (Kind.Term)) (x10: (Kind.Term)) (x11: (Kind.Term)) (x12: (Kind.Term)) (x13: (Kind.Term)) (x14: (Kind.Term)) (x15: (Kind.Term)) : (Kind.Term)
Kind.Operator : Type
Kind.Term.eval_sub (orig: U60) (name: U60) (indx: U60) (redx: U60) (expr: (Kind.Term)) : (Kind.Term)
Kind.Term.eval_sub orig name indx redx expr = expr
Kind.Term.eval_ann (orig: U60) (expr: (Kind.Term)) (type: (Kind.Term)) : (Kind.Term)
Kind.Term.eval_ann orig expr type = expr
Kind.Term.eval_let (orig: U60) (name: U60) (expr: (Kind.Term)) (body: ((_: (Kind.Term)) (Kind.Term))) : (Kind.Term)
Kind.Term.eval_let orig name expr body = (body expr)
Kind.Term.eval_op (orig: U60) (op: (Kind.Operator)) (left: (Kind.Term)) (right: (Kind.Term)) : (Kind.Term)
Kind.Term.eval_op orig (Kind.Operator.add) (Kind.Term.num a.orig a.num) (Kind.Term.num b.orig b.num) = (Kind.Term.num 0 (* a.num b.num))
Kind.Term.eval_op orig (Kind.Operator.sub) (Kind.Term.num a.orig a.num) (Kind.Term.num b.orig b.num) = (Kind.Term.num 0 (- a.num b.num))
Kind.Term.eval_op orig (Kind.Operator.mul) (Kind.Term.num a.orig a.num) (Kind.Term.num b.orig b.num) = (Kind.Term.num 0 (* a.num b.num))
Kind.Term.eval_op orig (Kind.Operator.div) (Kind.Term.num a.orig a.num) (Kind.Term.num b.orig b.num) = (Kind.Term.num 0 (/ a.num b.num))
Kind.Term.eval_op orig (Kind.Operator.mod) (Kind.Term.num a.orig a.num) (Kind.Term.num b.orig b.num) = (Kind.Term.num 0 (% a.num b.num))
Kind.Term.eval_op orig (Kind.Operator.and) (Kind.Term.num a.orig a.num) (Kind.Term.num b.orig b.num) = (Kind.Term.num 0 (& a.num b.num))
Kind.Term.eval_op orig (Kind.Operator.or) (Kind.Term.num a.orig a.num) (Kind.Term.num b.orig b.num) = (Kind.Term.num 0 (| a.num b.num))
Kind.Term.eval_op orig (Kind.Operator.xor) (Kind.Term.num a.orig a.num) (Kind.Term.num b.orig b.num) = (Kind.Term.num 0 (^ a.num b.num))
Kind.Term.eval_op orig (Kind.Operator.shl) (Kind.Term.num a.orig a.num) (Kind.Term.num b.orig b.num) = (Kind.Term.num 0 (<< a.num b.num))
Kind.Term.eval_op orig (Kind.Operator.shr) (Kind.Term.num a.orig a.num) (Kind.Term.num b.orig b.num) = (Kind.Term.num 0 (>> a.num b.num))
Kind.Term.eval_op orig (Kind.Operator.ltn) (Kind.Term.num a.orig a.num) (Kind.Term.num b.orig b.num) = (Kind.Term.num 0 (< a.num b.num))
Kind.Term.eval_op orig (Kind.Operator.lte) (Kind.Term.num a.orig a.num) (Kind.Term.num b.orig b.num) = (Kind.Term.num 0 (<= a.num b.num))
Kind.Term.eval_op orig (Kind.Operator.eql) (Kind.Term.num a.orig a.num) (Kind.Term.num b.orig b.num) = (Kind.Term.num 0 (== a.num b.num))
Kind.Term.eval_op orig (Kind.Operator.gte) (Kind.Term.num a.orig a.num) (Kind.Term.num b.orig b.num) = (Kind.Term.num 0 (>= a.num b.num))
Kind.Term.eval_op orig (Kind.Operator.gtn) (Kind.Term.num a.orig a.num) (Kind.Term.num b.orig b.num) = (Kind.Term.num 0 (> a.num b.num))
Kind.Term.eval_op orig (Kind.Operator.neq) (Kind.Term.num a.orig a.num) (Kind.Term.num b.orig b.num) = (Kind.Term.num 0 (!= a.num b.num))
Kind.Term.eval_op orig op left right = (Kind.Term.op2 orig op left right)
Kind.Operator.mod : (Kind.Operator)
Kind.Operator.lte : (Kind.Operator)
Kind.Operator.eql : (Kind.Operator)
Kind.Operator.xor : (Kind.Operator)
Kind.Operator.gte : (Kind.Operator)
Kind.Operator.shl : (Kind.Operator)
Kind.Operator.shr : (Kind.Operator)
Kind.Operator.div : (Kind.Operator)
Kind.Operator.neq : (Kind.Operator)
Kind.Operator.add : (Kind.Operator)
Kind.Operator.sub : (Kind.Operator)
Kind.Operator.ltn : (Kind.Operator)
Kind.Operator.mul : (Kind.Operator)
Kind.Operator.gtn : (Kind.Operator)
Kind.Operator.and : (Kind.Operator)
Kind.Operator.or : (Kind.Operator)

View File

@ -0,0 +1,2 @@
All terms check.

View File

@ -0,0 +1,247 @@
// From: https://github.com/Kindelia/Functional-Benchmarks/blob/master/Checker/nat_exp_church.kind2
// Types
// =====
// Equality
// --------
Base.Equal <t: Type> (a: t) (b: t) : Type
Base.refl <t: Type> <a: t> : Base.Equal t a a
// Boolean
// -------
Base.Bool : Type
Base.true : Base.Bool
Base.false : Base.Bool
// Natural Number
// --------------
Base.Nat : Type
Base.zero : Base.Nat
Base.succ (pred: Base.Nat) : Base.Nat
// Binary Tree
// -----------
Base.Tree : Type
Base.leaf : Base.Tree
Base.node (l: Base.Tree) (r: Base.Tree) : Base.Tree
// Vector
// ------
Base.Vector (t: Type) (len: Base.Nat) : Type
Base.cons <t: Type> <len: Base.Nat> (head: t) (tail: Base.Vector t len) : Base.Vector t (Base.succ len)
Base.nil <t: Type> : Base.Vector t Base.zero
// Church Boolean
// --------------
Base.Church.Bool : Type
Base.Church.Bool = (p: Type) -> (t: p) -> (f: p) -> p
Base.Church.true : Base.Church.Bool
Base.Church.true = p => t => f => t
Base.Church.false : Base.Church.Bool
Base.Church.false = p => t => f => f
// Church Natural Number
// ---------------------
Base.Church.Nat : Type
Base.Church.Nat = (p: Type) -> (f: p -> p) -> (z: p) -> p
Base.Church.zero : Base.Church.Nat
Base.Church.zero = p => f => z => z
Base.Church.succ (n: Base.Church.Nat) : Base.Church.Nat
Base.Church.succ n = p => f => z => f (n p f z)
Base.Church.to_u60 (n: Base.Church.Nat) : U60
Base.Church.to_u60 n = n U60 (x => (+ x 1)) 0
// Church Tree
// -----------
Base.Church.Tree : Type
Base.Church.Tree = (p: Type) -> (n: p -> p -> p) -> (l: p) -> p
Base.Church.leaf : Base.Church.Tree
Base.Church.leaf = p => n => l => l
Base.Church.node (a: Base.Church.Tree) (b: Base.Church.Tree) : Base.Church.Tree
Base.Church.node a b = p => n => l => n (a p n l) (b p n l)
// Church Vector
// -------------
Base.Church.Vector (t: Type) (len: Base.Nat) : Type
Base.Church.Vector t n = (p: Base.Nat -> Type) -> (cons: (len: Base.Nat) -> (head: t) -> (tail: p len) -> p (Base.succ len)) -> (nil: p Base.zero) -> p n
Base.Church.nil <t: Type> : Base.Church.Vector t Base.zero
Base.Church.nil t = p => cons => nil => nil
Base.Church.cons <t: Type> (len: Base.Nat) (head: t) (tail: Base.Church.Vector t len) : Base.Church.Vector t (Base.succ len)
Base.Church.cons t len head tail = p => cons => nil => cons len head (tail p cons nil)
// Functions
// =========
Base.not (b: Base.Bool) : Base.Bool
Base.not Base.false = Base.true
Base.not Base.true = Base.false
Base.and (a: Base.Bool) (b: Base.Bool) : Base.Bool
Base.and Base.false Base.false = Base.false
Base.and Base.false Base.true = Base.false
Base.and Base.true Base.false = Base.false
Base.and Base.true Base.true = Base.true
Base.add (a: Base.Nat) (b: Base.Nat) : Base.Nat
Base.add x Base.zero = x
Base.add x (Base.succ y) = Base.succ (Base.add x y)
Base.mul (a: Base.Nat) (b: Base.Nat) : Base.Nat
Base.mul a Base.zero = Base.zero
Base.mul a (Base.succ b) = Base.add a (Base.mul a b)
Base.exp (a: Base.Nat) (b: Base.Nat) : Base.Nat
Base.exp a Base.zero = Base.succ Base.zero
Base.exp a (Base.succ b) = Base.mul a (Base.exp a b)
Base.is_even (a: Base.Nat) : Base.Bool
Base.is_even Base.zero = Base.true
Base.is_even (Base.succ a) = Base.not (Base.is_even a)
Base.full_tree (d: Base.Nat) : Base.Tree
Base.full_tree Base.zero = Base.leaf
Base.full_tree (Base.succ d) = let branch = Base.full_tree d; Base.node branch branch
Base.tree_fold (a: Base.Tree) (p: Type) (n: p -> p -> p) (l: p) : p
Base.tree_fold Base.leaf p n l = l
Base.tree_fold (Base.node a b) p n l = n (Base.tree_fold a p n l) (Base.tree_fold b p n l)
Base.force_tree (a: Base.Tree) : Base.Bool
Base.force_tree t = Base.tree_fold t Base.Bool (a => b => Base.and a b) Base.true
Base.Church.not (b: Base.Church.Bool) : Base.Church.Bool
Base.Church.not b = p => t => f => b p f t
Base.Church.and (a: Base.Church.Bool) (b: Base.Church.Bool) : Base.Church.Bool
Base.Church.and a b = p => t => f => a p (b p t f) f
Base.Church.add (a: Base.Church.Nat) (b: Base.Church.Nat) : Base.Church.Nat
Base.Church.add a b = p => f => z => a p f (b p f z)
Base.Church.mul (a: Base.Church.Nat) (b: Base.Church.Nat) : Base.Church.Nat
Base.Church.mul a b = p => f => a p (b p f)
Base.Church.exp (a: Base.Church.Nat) (b: Base.Church.Nat) : Base.Church.Nat
Base.Church.exp a b = p => b (p -> p) (a p)
Base.Church.is_even (a: Base.Church.Nat) : Base.Church.Bool
Base.Church.is_even a = a Base.Church.Bool (x => Base.Church.not x) Base.Church.true
Base.Church.full_tree (d: Base.Church.Nat) : Base.Church.Tree
Base.Church.full_tree d = p => n => l => d p (t => n t t) l
Base.Church.tree_fold (a: Base.Church.Tree) (p: Type) (n: p -> p -> p) (l: p) : p
Base.Church.tree_fold t p n l = t p n l
Base.Church.force_tree (a: Base.Church.Tree) : Base.Church.Bool
Base.Church.force_tree t = Base.Church.tree_fold t Base.Church.Bool (a => b => Base.Church.and a b) Base.Church.true
// Elaboration
// ===========
//Id : (t : Type) -> t -> t
//Id = t => x => x
//Bad : (t : Type) -> t -> t
//Bad = ((Id) _ Id) _ Id
// Constants
// =========
Base.N0 : Base.Nat { Base.zero }
Base.N1 : Base.Nat { Base.succ Base.N0 }
Base.N2 : Base.Nat { Base.succ Base.N1 }
Base.N3 : Base.Nat { Base.succ Base.N2 }
Base.N4 : Base.Nat { Base.succ Base.N3 }
Base.N5 : Base.Nat { Base.succ Base.N4 }
Base.N6 : Base.Nat { Base.succ Base.N5 }
Base.N7 : Base.Nat { Base.succ Base.N6 }
Base.N8 : Base.Nat { Base.succ Base.N7 }
Base.N9 : Base.Nat { Base.succ Base.N8 }
Base.N10 : Base.Nat { Base.succ Base.N9 }
Base.N11 : Base.Nat { Base.succ Base.N10 }
Base.N12 : Base.Nat { Base.succ Base.N11 }
Base.N13 : Base.Nat { Base.succ Base.N12 }
Base.N14 : Base.Nat { Base.succ Base.N13 }
Base.N15 : Base.Nat { Base.succ Base.N14 }
Base.N16 : Base.Nat { Base.succ Base.N15 }
Base.N17 : Base.Nat { Base.succ Base.N16 }
Base.N18 : Base.Nat { Base.succ Base.N17 }
Base.N19 : Base.Nat { Base.succ Base.N18 }
Base.N20 : Base.Nat { Base.succ Base.N19 }
Base.N21 : Base.Nat { Base.succ Base.N20 }
Base.N22 : Base.Nat { Base.succ Base.N21 }
Base.N23 : Base.Nat { Base.succ Base.N22 }
Base.N24 : Base.Nat { Base.succ Base.N23 }
Base.N25 : Base.Nat { Base.succ Base.N24 }
Base.N26 : Base.Nat { Base.succ Base.N25 }
Base.N27 : Base.Nat { Base.succ Base.N26 }
Base.N28 : Base.Nat { Base.succ Base.N27 }
Base.N29 : Base.Nat { Base.succ Base.N28 }
Base.N30 : Base.Nat { Base.succ Base.N29 }
Base.N31 : Base.Nat { Base.succ Base.N30 }
Base.N32 : Base.Nat { Base.succ Base.N31 }
Base.Church.N0 : Base.Church.Nat { Base.Church.zero }
Base.Church.N1 : Base.Church.Nat { Base.Church.succ Base.Church.N0 }
Base.Church.N2 : Base.Church.Nat { Base.Church.succ Base.Church.N1 }
Base.Church.N3 : Base.Church.Nat { Base.Church.succ Base.Church.N2 }
Base.Church.N4 : Base.Church.Nat { Base.Church.succ Base.Church.N3 }
Base.Church.N5 : Base.Church.Nat { Base.Church.succ Base.Church.N4 }
Base.Church.N6 : Base.Church.Nat { Base.Church.succ Base.Church.N5 }
Base.Church.N7 : Base.Church.Nat { Base.Church.succ Base.Church.N6 }
Base.Church.N8 : Base.Church.Nat { Base.Church.succ Base.Church.N7 }
Base.Church.N9 : Base.Church.Nat { Base.Church.succ Base.Church.N8 }
Base.Church.N10 : Base.Church.Nat { Base.Church.succ Base.Church.N9 }
Base.Church.N11 : Base.Church.Nat { Base.Church.succ Base.Church.N10 }
Base.Church.N12 : Base.Church.Nat { Base.Church.succ Base.Church.N11 }
Base.Church.N13 : Base.Church.Nat { Base.Church.succ Base.Church.N12 }
Base.Church.N14 : Base.Church.Nat { Base.Church.succ Base.Church.N13 }
Base.Church.N15 : Base.Church.Nat { Base.Church.succ Base.Church.N14 }
Base.Church.N16 : Base.Church.Nat { Base.Church.succ Base.Church.N15 }
Base.Church.N17 : Base.Church.Nat { Base.Church.succ Base.Church.N16 }
Base.Church.N18 : Base.Church.Nat { Base.Church.succ Base.Church.N17 }
Base.Church.N19 : Base.Church.Nat { Base.Church.succ Base.Church.N18 }
Base.Church.N20 : Base.Church.Nat { Base.Church.succ Base.Church.N19 }
Base.Church.N21 : Base.Church.Nat { Base.Church.succ Base.Church.N20 }
Base.Church.N22 : Base.Church.Nat { Base.Church.succ Base.Church.N21 }
Base.Church.N23 : Base.Church.Nat { Base.Church.succ Base.Church.N22 }
Base.Church.N24 : Base.Church.Nat { Base.Church.succ Base.Church.N23 }
Base.Church.N25 : Base.Church.Nat { Base.Church.succ Base.Church.N24 }
Base.Church.N26 : Base.Church.Nat { Base.Church.succ Base.Church.N25 }
Base.Church.N27 : Base.Church.Nat { Base.Church.succ Base.Church.N26 }
Base.Church.N28 : Base.Church.Nat { Base.Church.succ Base.Church.N27 }
Base.Church.N29 : Base.Church.Nat { Base.Church.succ Base.Church.N28 }
Base.Church.N30 : Base.Church.Nat { Base.Church.succ Base.Church.N29 }
Base.Church.N31 : Base.Church.Nat { Base.Church.succ Base.Church.N30 }
Base.Church.N32 : Base.Church.Nat { Base.Church.succ Base.Church.N31 }
Base { 0 }
Size : Base.Church.Nat
Size = Base.Church.N15
Main : Base.Equal (Base.Church.to_u60 (Base.Church.exp Base.Church.N2 Size)) 32768
Main = Base.refl

View File

@ -0,0 +1,2 @@
All terms check.

View File

@ -0,0 +1,179 @@
Kind.Checker.fill (index: U60) (val: (Kind.Term)) : (Kind.Checker (Unit))
Kind.Checker.fill index val = (context => (depth => (rhs => (subst => (eqts => (errs => (Kind.Result.checked _ context depth rhs (Kind.Subst.fill subst index val) eqts errs (Unit.new))))))))
Unit : Type
Kind.Checker (a: Type) : Type
Kind.Checker a = ((_: (Kind.Context)) ((_: U60) ((_: (Bool)) ((_: (Kind.Subst)) ((_: (List (Kind.Equation))) ((_: (List (Kind.Error))) (Kind.Result a)))))))
Bool : Type
Kind.Equation : Type
Kind.Error : Type
Kind.Context : Type
Kind.Subst : Type
List (t: Type) : Type
Kind.Result (a: Type) : Type
Kind.Result.checked <a: Type> (ctx: (Kind.Context)) (depth: U60) (rhs: (Bool)) (sub: (Kind.Subst)) (equations: (List (Kind.Equation))) (errors: (List (Kind.Error))) (ret: a) : (Kind.Result a)
Kind.Term : Type
Kind.Term.typ (orig: U60) : (Kind.Term)
Kind.Term.hol (orig: U60) (number: U60) : (Kind.Term)
Kind.Term.var (orig: U60) (name: U60) (index: U60) : (Kind.Term)
Kind.Term.all (orig: U60) (name: U60) (typ: (Kind.Term)) (body: ((_: (Kind.Term)) (Kind.Term))) : (Kind.Term)
Kind.Term.lam (orig: U60) (name: U60) (body: ((_: (Kind.Term)) (Kind.Term))) : (Kind.Term)
Kind.Term.app (orig: U60) (func: (Kind.Term)) (arg: (Kind.Term)) : (Kind.Term)
Kind.Term.let (orig: U60) (name: U60) (expr: (Kind.Term)) (body: ((_: (Kind.Term)) (Kind.Term))) : (Kind.Term)
Kind.Term.ann (orig: U60) (expr: (Kind.Term)) (typ: (Kind.Term)) : (Kind.Term)
Kind.Term.sub (orig: U60) (name: U60) (indx: U60) (redx: U60) (expr: (Kind.Term)) : (Kind.Term)
Kind.Term.ct0 (ctid: U60) (orig: U60) : (Kind.Term)
Kind.Term.ct1 (ctid: U60) (orig: U60) (x0: (Kind.Term)) : (Kind.Term)
Kind.Term.ct2 (ctid: U60) (orig: U60) (x0: (Kind.Term)) (x1: (Kind.Term)) : (Kind.Term)
Kind.Term.ct3 (ctid: U60) (orig: U60) (x0: (Kind.Term)) (x1: (Kind.Term)) (x2: (Kind.Term)) : (Kind.Term)
Kind.Term.ct4 (ctid: U60) (orig: U60) (x0: (Kind.Term)) (x1: (Kind.Term)) (x2: (Kind.Term)) (x3: (Kind.Term)) : (Kind.Term)
Kind.Term.ct5 (ctid: U60) (orig: U60) (x0: (Kind.Term)) (x1: (Kind.Term)) (x2: (Kind.Term)) (x3: (Kind.Term)) (x4: (Kind.Term)) : (Kind.Term)
Kind.Term.ct6 (ctid: U60) (orig: U60) (x0: (Kind.Term)) (x1: (Kind.Term)) (x2: (Kind.Term)) (x3: (Kind.Term)) (x4: (Kind.Term)) (x5: (Kind.Term)) : (Kind.Term)
Kind.Term.ct7 (ctid: U60) (orig: U60) (x0: (Kind.Term)) (x1: (Kind.Term)) (x2: (Kind.Term)) (x3: (Kind.Term)) (x4: (Kind.Term)) (x5: (Kind.Term)) (x6: (Kind.Term)) : (Kind.Term)
Kind.Term.ct8 (ctid: U60) (orig: U60) (x0: (Kind.Term)) (x1: (Kind.Term)) (x2: (Kind.Term)) (x3: (Kind.Term)) (x4: (Kind.Term)) (x5: (Kind.Term)) (x6: (Kind.Term)) (x7: (Kind.Term)) : (Kind.Term)
Kind.Term.ct9 (ctid: U60) (orig: U60) (x0: (Kind.Term)) (x1: (Kind.Term)) (x2: (Kind.Term)) (x3: (Kind.Term)) (x4: (Kind.Term)) (x5: (Kind.Term)) (x6: (Kind.Term)) (x7: (Kind.Term)) (x8: (Kind.Term)) : (Kind.Term)
Kind.Term.ct10 (ctid: U60) (orig: U60) (x0: (Kind.Term)) (x1: (Kind.Term)) (x2: (Kind.Term)) (x3: (Kind.Term)) (x4: (Kind.Term)) (x5: (Kind.Term)) (x6: (Kind.Term)) (x7: (Kind.Term)) (x8: (Kind.Term)) (x9: (Kind.Term)) : (Kind.Term)
Kind.Term.ct11 (ctid: U60) (orig: U60) (x0: (Kind.Term)) (x1: (Kind.Term)) (x2: (Kind.Term)) (x3: (Kind.Term)) (x4: (Kind.Term)) (x5: (Kind.Term)) (x6: (Kind.Term)) (x7: (Kind.Term)) (x8: (Kind.Term)) (x9: (Kind.Term)) (x10: (Kind.Term)) : (Kind.Term)
Kind.Term.ct12 (ctid: U60) (orig: U60) (x0: (Kind.Term)) (x1: (Kind.Term)) (x2: (Kind.Term)) (x3: (Kind.Term)) (x4: (Kind.Term)) (x5: (Kind.Term)) (x6: (Kind.Term)) (x7: (Kind.Term)) (x8: (Kind.Term)) (x9: (Kind.Term)) (x10: (Kind.Term)) (x11: (Kind.Term)) : (Kind.Term)
Kind.Term.ct13 (ctid: U60) (orig: U60) (x0: (Kind.Term)) (x1: (Kind.Term)) (x2: (Kind.Term)) (x3: (Kind.Term)) (x4: (Kind.Term)) (x5: (Kind.Term)) (x6: (Kind.Term)) (x7: (Kind.Term)) (x8: (Kind.Term)) (x9: (Kind.Term)) (x10: (Kind.Term)) (x11: (Kind.Term)) (x12: (Kind.Term)) : (Kind.Term)
Kind.Term.ct14 (ctid: U60) (orig: U60) (x0: (Kind.Term)) (x1: (Kind.Term)) (x2: (Kind.Term)) (x3: (Kind.Term)) (x4: (Kind.Term)) (x5: (Kind.Term)) (x6: (Kind.Term)) (x7: (Kind.Term)) (x8: (Kind.Term)) (x9: (Kind.Term)) (x10: (Kind.Term)) (x11: (Kind.Term)) (x12: (Kind.Term)) (x13: (Kind.Term)) : (Kind.Term)
Kind.Term.ct15 (ctid: U60) (orig: U60) (args: (Kind.Term)) : (Kind.Term)
Kind.Term.ct16 (ctid: U60) (orig: U60) (args: (Kind.Term)) : (Kind.Term)
Kind.Term.fn0 (fnid: U60) (orig: U60) : (Kind.Term)
Kind.Term.fn1 (fnid: U60) (orig: U60) (x0: (Kind.Term)) : (Kind.Term)
Kind.Term.fn2 (fnid: U60) (orig: U60) (x0: (Kind.Term)) (x1: (Kind.Term)) : (Kind.Term)
Kind.Term.fn3 (fnid: U60) (orig: U60) (x0: (Kind.Term)) (x1: (Kind.Term)) (x2: (Kind.Term)) : (Kind.Term)
Kind.Term.fn4 (fnid: U60) (orig: U60) (x0: (Kind.Term)) (x1: (Kind.Term)) (x2: (Kind.Term)) (x3: (Kind.Term)) : (Kind.Term)
Kind.Term.fn5 (fnid: U60) (orig: U60) (x0: (Kind.Term)) (x1: (Kind.Term)) (x2: (Kind.Term)) (x3: (Kind.Term)) (x4: (Kind.Term)) : (Kind.Term)
Kind.Term.fn6 (fnid: U60) (orig: U60) (x0: (Kind.Term)) (x1: (Kind.Term)) (x2: (Kind.Term)) (x3: (Kind.Term)) (x4: (Kind.Term)) (x5: (Kind.Term)) : (Kind.Term)
Kind.Term.fn7 (fnid: U60) (orig: U60) (x0: (Kind.Term)) (x1: (Kind.Term)) (x2: (Kind.Term)) (x3: (Kind.Term)) (x4: (Kind.Term)) (x5: (Kind.Term)) (x6: (Kind.Term)) : (Kind.Term)
Kind.Term.fn8 (fnid: U60) (orig: U60) (x0: (Kind.Term)) (x1: (Kind.Term)) (x2: (Kind.Term)) (x3: (Kind.Term)) (x4: (Kind.Term)) (x5: (Kind.Term)) (x6: (Kind.Term)) (x7: (Kind.Term)) : (Kind.Term)
Kind.Term.fn9 (fnid: U60) (orig: U60) (x0: (Kind.Term)) (x1: (Kind.Term)) (x2: (Kind.Term)) (x3: (Kind.Term)) (x4: (Kind.Term)) (x5: (Kind.Term)) (x6: (Kind.Term)) (x7: (Kind.Term)) (x8: (Kind.Term)) : (Kind.Term)
Kind.Term.fn10 (fnid: U60) (orig: U60) (x0: (Kind.Term)) (x1: (Kind.Term)) (x2: (Kind.Term)) (x3: (Kind.Term)) (x4: (Kind.Term)) (x5: (Kind.Term)) (x6: (Kind.Term)) (x7: (Kind.Term)) (x8: (Kind.Term)) (x9: (Kind.Term)) : (Kind.Term)
Kind.Term.fn11 (fnid: U60) (orig: U60) (x0: (Kind.Term)) (x1: (Kind.Term)) (x2: (Kind.Term)) (x3: (Kind.Term)) (x4: (Kind.Term)) (x5: (Kind.Term)) (x6: (Kind.Term)) (x7: (Kind.Term)) (x8: (Kind.Term)) (x9: (Kind.Term)) (x10: (Kind.Term)) : (Kind.Term)
Kind.Term.fn12 (fnid: U60) (orig: U60) (x0: (Kind.Term)) (x1: (Kind.Term)) (x2: (Kind.Term)) (x3: (Kind.Term)) (x4: (Kind.Term)) (x5: (Kind.Term)) (x6: (Kind.Term)) (x7: (Kind.Term)) (x8: (Kind.Term)) (x9: (Kind.Term)) (x10: (Kind.Term)) (x11: (Kind.Term)) : (Kind.Term)
Kind.Term.fn13 (fnid: U60) (orig: U60) (x0: (Kind.Term)) (x1: (Kind.Term)) (x2: (Kind.Term)) (x3: (Kind.Term)) (x4: (Kind.Term)) (x5: (Kind.Term)) (x6: (Kind.Term)) (x7: (Kind.Term)) (x8: (Kind.Term)) (x9: (Kind.Term)) (x10: (Kind.Term)) (x11: (Kind.Term)) (x12: (Kind.Term)) : (Kind.Term)
Kind.Term.fn14 (fnid: U60) (orig: U60) (x0: (Kind.Term)) (x1: (Kind.Term)) (x2: (Kind.Term)) (x3: (Kind.Term)) (x4: (Kind.Term)) (x5: (Kind.Term)) (x6: (Kind.Term)) (x7: (Kind.Term)) (x8: (Kind.Term)) (x9: (Kind.Term)) (x10: (Kind.Term)) (x11: (Kind.Term)) (x12: (Kind.Term)) (x13: (Kind.Term)) : (Kind.Term)
Kind.Term.fn15 (fnid: U60) (orig: U60) (args: (Kind.Term)) : (Kind.Term)
Kind.Term.fn16 (fnid: U60) (orig: U60) (args: (Kind.Term)) : (Kind.Term)
Kind.Term.FN0 (fnid: U60) (orig: U60) : (Kind.Term)
Kind.Term.FN1 (fnid: U60) (orig: U60) (x0: (Kind.Term)) : (Kind.Term)
Kind.Term.FN2 (fnid: U60) (orig: U60) (x0: (Kind.Term)) (x1: (Kind.Term)) : (Kind.Term)
Kind.Term.FN3 (fnid: U60) (orig: U60) (x0: (Kind.Term)) (x1: (Kind.Term)) (x2: (Kind.Term)) : (Kind.Term)
Kind.Term.FN4 (fnid: U60) (orig: U60) (x0: (Kind.Term)) (x1: (Kind.Term)) (x2: (Kind.Term)) (x3: (Kind.Term)) : (Kind.Term)
Kind.Term.FN5 (fnid: U60) (orig: U60) (x0: (Kind.Term)) (x1: (Kind.Term)) (x2: (Kind.Term)) (x3: (Kind.Term)) (x4: (Kind.Term)) : (Kind.Term)
Kind.Term.FN6 (fnid: U60) (orig: U60) (x0: (Kind.Term)) (x1: (Kind.Term)) (x2: (Kind.Term)) (x3: (Kind.Term)) (x4: (Kind.Term)) (x5: (Kind.Term)) : (Kind.Term)
Kind.Term.FN7 (fnid: U60) (orig: U60) (x0: (Kind.Term)) (x1: (Kind.Term)) (x2: (Kind.Term)) (x3: (Kind.Term)) (x4: (Kind.Term)) (x5: (Kind.Term)) (x6: (Kind.Term)) : (Kind.Term)
Kind.Term.FN8 (fnid: U60) (orig: U60) (x0: (Kind.Term)) (x1: (Kind.Term)) (x2: (Kind.Term)) (x3: (Kind.Term)) (x4: (Kind.Term)) (x5: (Kind.Term)) (x6: (Kind.Term)) (x7: (Kind.Term)) : (Kind.Term)
Kind.Term.FN9 (fnid: U60) (orig: U60) (x0: (Kind.Term)) (x1: (Kind.Term)) (x2: (Kind.Term)) (x3: (Kind.Term)) (x4: (Kind.Term)) (x5: (Kind.Term)) (x6: (Kind.Term)) (x7: (Kind.Term)) (x8: (Kind.Term)) : (Kind.Term)
Kind.Term.FN10 (fnid: U60) (orig: U60) (x0: (Kind.Term)) (x1: (Kind.Term)) (x2: (Kind.Term)) (x3: (Kind.Term)) (x4: (Kind.Term)) (x5: (Kind.Term)) (x6: (Kind.Term)) (x7: (Kind.Term)) (x8: (Kind.Term)) (x9: (Kind.Term)) : (Kind.Term)
Kind.Term.FN11 (fnid: U60) (orig: U60) (x0: (Kind.Term)) (x1: (Kind.Term)) (x2: (Kind.Term)) (x3: (Kind.Term)) (x4: (Kind.Term)) (x5: (Kind.Term)) (x6: (Kind.Term)) (x7: (Kind.Term)) (x8: (Kind.Term)) (x9: (Kind.Term)) (x10: (Kind.Term)) : (Kind.Term)
Kind.Term.FN12 (fnid: U60) (orig: U60) (x0: (Kind.Term)) (x1: (Kind.Term)) (x2: (Kind.Term)) (x3: (Kind.Term)) (x4: (Kind.Term)) (x5: (Kind.Term)) (x6: (Kind.Term)) (x7: (Kind.Term)) (x8: (Kind.Term)) (x9: (Kind.Term)) (x10: (Kind.Term)) (x11: (Kind.Term)) : (Kind.Term)
Kind.Term.FN13 (fnid: U60) (orig: U60) (x0: (Kind.Term)) (x1: (Kind.Term)) (x2: (Kind.Term)) (x3: (Kind.Term)) (x4: (Kind.Term)) (x5: (Kind.Term)) (x6: (Kind.Term)) (x7: (Kind.Term)) (x8: (Kind.Term)) (x9: (Kind.Term)) (x10: (Kind.Term)) (x11: (Kind.Term)) (x12: (Kind.Term)) : (Kind.Term)
Kind.Term.FN14 (fnid: U60) (orig: U60) (x0: (Kind.Term)) (x1: (Kind.Term)) (x2: (Kind.Term)) (x3: (Kind.Term)) (x4: (Kind.Term)) (x5: (Kind.Term)) (x6: (Kind.Term)) (x7: (Kind.Term)) (x8: (Kind.Term)) (x9: (Kind.Term)) (x10: (Kind.Term)) (x11: (Kind.Term)) (x12: (Kind.Term)) (x13: (Kind.Term)) : (Kind.Term)
Kind.Term.FN15 (fnid: U60) (orig: U60) (args: (Kind.Term)) : (Kind.Term)
Kind.Term.FN16 (fnid: U60) (orig: U60) (args: (Kind.Term)) : (Kind.Term)
Kind.Term.hlp (orig: U60) : (Kind.Term)
Kind.Term.u60 (orig: U60) : (Kind.Term)
Kind.Term.num (orig: U60) (num: U60) : (Kind.Term)
Kind.Term.op2 (orig: U60) (operator: (Kind.Operator)) (left: (Kind.Term)) (right: (Kind.Term)) : (Kind.Term)
Kind.Term.args15 (x0: (Kind.Term)) (x1: (Kind.Term)) (x2: (Kind.Term)) (x3: (Kind.Term)) (x4: (Kind.Term)) (x5: (Kind.Term)) (x6: (Kind.Term)) (x7: (Kind.Term)) (x8: (Kind.Term)) (x9: (Kind.Term)) (x10: (Kind.Term)) (x11: (Kind.Term)) (x12: (Kind.Term)) (x13: (Kind.Term)) (x14: (Kind.Term)) : (Kind.Term)
Kind.Term.args16 (x0: (Kind.Term)) (x1: (Kind.Term)) (x2: (Kind.Term)) (x3: (Kind.Term)) (x4: (Kind.Term)) (x5: (Kind.Term)) (x6: (Kind.Term)) (x7: (Kind.Term)) (x8: (Kind.Term)) (x9: (Kind.Term)) (x10: (Kind.Term)) (x11: (Kind.Term)) (x12: (Kind.Term)) (x13: (Kind.Term)) (x14: (Kind.Term)) (x15: (Kind.Term)) : (Kind.Term)
Kind.Operator : Type
Kind.Subst.fill (subst: (Kind.Subst)) (depth: U60) (term: (Kind.Term)) : (Kind.Subst)
Kind.Subst.fill (Kind.Subst.end) 0 term = (Kind.Subst.sub term (Kind.Subst.end))
Kind.Subst.fill (Kind.Subst.unfilled rest) 0 term = (Kind.Subst.sub term rest)
Kind.Subst.fill (Kind.Subst.sub lost rest) 0 term = (Kind.Subst.sub term rest)
Kind.Subst.fill (Kind.Subst.end) n term = (Kind.Subst.unfilled (Kind.Subst.fill (Kind.Subst.end) (- n 1) term))
Kind.Subst.fill (Kind.Subst.unfilled rest) n term = (Kind.Subst.unfilled (Kind.Subst.fill rest (- n 1) term))
Kind.Subst.fill (Kind.Subst.sub keep rest) n term = (Kind.Subst.sub keep (Kind.Subst.fill rest (- n 1) term))
Kind.Subst.end : (Kind.Subst)
Kind.Subst.unfilled (rest: (Kind.Subst)) : (Kind.Subst)
Kind.Subst.sub (term: (Kind.Term)) (rest: (Kind.Subst)) : (Kind.Subst)
Unit.new : (Unit)

View File

@ -0,0 +1,2 @@
All terms check.

View File

@ -0,0 +1,6 @@
String : Type
String.cons (head: U60) (tail: String) : String
String.nil : String
Main.hello : String
Main.hello = "Hello, World"

View File

@ -0,0 +1,2 @@
All terms check.

View File

@ -0,0 +1,7 @@
// We should open an issue for this?
Arity3 -(e: U60) -(f: U60) <g> <h> <i> (d: U60) : U60
Arity3 e f g h i d = d
Arity2 -(e: U60) -(f: U60) <g> <h> <i> (d: U60) : U60
Arity2 d = d

View File

@ -0,0 +1,5 @@
Inspection.
- Goal: U60
On '{{#F0F#}}':
{{#R0:20:21R#}}

View File

@ -0,0 +1,3 @@
Main : U60
Main =
?

View File

@ -0,0 +1 @@
Unbound variable 'Maybe'.

View File

@ -0,0 +1,4 @@
use A as Maybe
/Test : A/ U60
/Test = A/pure 2

View File

@ -0,0 +1,2 @@
All terms check.

View File

@ -0,0 +1,53 @@
// From: https://github.com/Kindelia/Functional-Benchmarks/blob/master/Runtime/quicksort.kind2
List (t: Type) : Type
Cons <t: Type> (head: t) (tail: List t) : List t
Nil <t: Type> : List t
Tree (t: Type) : Type
Empty <t: Type> : Tree t
Single <t: Type> (value: t) : Tree t
Concat <t: Type> (left: Tree t) (right: Tree t) : Tree t
// Generates a random list
Randoms (s: U60) (n: U60) : List U60
Randoms s 0 = Nil
Randoms s l = Cons s (Randoms (% (+ (* s 1664525) 1013904223) 4294967296) (- l 1))
// Sums all elements in a concatenation tree
Sum (tree: Tree U60) : U60
Sum (Empty t) = 0
Sum (Single t a) = a
Sum (Concat t a b) = (+ (Sum a) (Sum b))
//// The initial pivot
Pivot : U60
Pivot = 2147483648
QSort (p: U60) (s: U60) (l: List U60): Tree U60
QSort p s Nil = Empty
QSort p s (Cons x Nil) = Single x
QSort p s (Cons x xs) = Split p s (Cons x xs) Nil Nil
//// Splits list in two partitions
Split (p: U60) (s: U60) (l: List U60) (min: List U60) (max: List U60) : Tree U60
Split p s Nil min max =
let s = (>> s 1)
let min = (QSort (- p s) s min)
let max = (QSort (+ p s) s max)
Concat min max
Split p s (Cons x xs) min max =
Place p s (< p x) x xs min max
//// Moves element to its partition
Place (p: U60) (s: U60) (y: U60) (x: U60) (xs: List U60) (min: List U60) (max: List U60) : Tree U60
Place p s 0 x xs min max = Split p s xs (Cons x min) max
Place p s 1 x xs min max = Split p s xs min (Cons x max)
//// Sorts and sums n random numbers
Main : U60
Main =
let list = Randoms 1 254
Sum (QSort Pivot Pivot list)

View File

@ -0,0 +1,2 @@
(Maybe.some _ 5)

View File

@ -0,0 +1,7 @@
Main : Maybe U60
Main =
do Maybe {
ask a = Maybe.pure 2
ask b = Maybe.pure 3
Maybe.some (+ a b)
}

View File

@ -0,0 +1,2 @@
(Maybe.some _ 5)

View File

@ -0,0 +1,7 @@
Main : Maybe U60
Main =
do Maybe {
ask a = Maybe.pure 2
ask b = Maybe.pure 3
(Maybe.some (+ a b))
}

View File

@ -0,0 +1,2 @@
(Maybe.none _)

View File

@ -0,0 +1,7 @@
Main : Maybe U60
Main =
do Maybe {
ask a = Maybe.pure 2
ask b = Maybe.pure 3
Maybe.none
}

View File

@ -0,0 +1,2 @@
93961

View File

@ -0,0 +1,23 @@
List (t: Type) : Type
Cons <t: Type> (head: t) (tail: List t) : List t
Nil <t: Type> : List t
// Folds over a list
Fold <t: Type> (list: List t) <p: Type> (cons: t -> p -> p) (nil: p) : p
Fold t (Nil u) p c n = n
Fold t (Cons u x xs) p c n = c x (Fold xs c n)
// A list from 0 to 0
Range (n: U60) (list: List U60) : List U60
Range 0 xs = xs
Range n xs =
let m = (- n 1)
Range m (Cons m xs)
// Sums a big list with fold
Main {
let size = 434
let list = Range size Nil
Fold list (a => b => (+ a b)) 0
}

View File

@ -0,0 +1,2 @@
546249386527

View File

@ -0,0 +1,53 @@
// From: https://github.com/Kindelia/Functional-Benchmarks/blob/master/Runtime/quicksort.kind2
List (t: Type) : Type
Cons <t: Type> (head: t) (tail: List t) : List t
Nil <t: Type> : List t
Tree (t: Type) : Type
Empty <t: Type> : Tree t
Single <t: Type> (value: t) : Tree t
Concat <t: Type> (left: Tree t) (right: Tree t) : Tree t
// Generates a random list
Randoms (s: U60) (n: U60) : List U60
Randoms s 0 = Nil
Randoms s l = Cons s (Randoms (% (+ (* s 1664525) 1013904223) 4294967296) (- l 1))
// Sums all elements in a concatenation tree
Sum (tree: Tree U60) : U60
Sum (Empty t) = 0
Sum (Single t a) = a
Sum (Concat t a b) = (+ (Sum a) (Sum b))
//// The initial pivot
Pivot : U60
Pivot = 2147483648
QSort (p: U60) (s: U60) (l: List U60): Tree U60
QSort p s Nil = Empty
QSort p s (Cons x Nil) = Single x
QSort p s (Cons x xs) = Split p s (Cons x xs) Nil Nil
//// Splits list in two partitions
Split (p: U60) (s: U60) (l: List U60) (min: List U60) (max: List U60) : Tree U60
Split p s Nil min max =
let s = (>> s 1)
let min = (QSort (- p s) s min)
let max = (QSort (+ p s) s max)
Concat min max
Split p s (Cons x xs) min max =
Place p s (< p x) x xs min max
//// Moves element to its partition
Place (p: U60) (s: U60) (y: U60) (x: U60) (xs: List U60) (min: List U60) (max: List U60) : Tree U60
Place p s 0 x xs min max = Split p s xs (Cons x min) max
Place p s 1 x xs min max = Split p s xs min (Cons x max)
//// Sorts and sums n random numbers
Main : U60
Main =
let list = Randoms 1 254
Sum (QSort Pivot Pivot list)

View File

@ -0,0 +1,2 @@
Arity2 -<U60> (d: U60) : U60
Arity2 d = d

View File

@ -0,0 +1,2 @@
// Automatically derived from Maybe/_.type
Maybe (t: Type) : Type

View File

@ -0,0 +1,4 @@
type Maybe <t: Type> {
none
some (value: t)
}

View File

@ -0,0 +1,3 @@
Maybe.bind <a> <b> (ma: Maybe a) (mb: a -> Maybe b) : Maybe b
Maybe.bind a b (Maybe.none t) mb = Maybe.none
Maybe.bind a b (Maybe.some t val) mb = (mb val)

View File

@ -0,0 +1,4 @@
// Automatically derived from Maybe/_.type
Maybe.match <t: Type> (x: (Maybe t)) -(p: (x: (Maybe t)) Type) (none: (p (Maybe.none t))) (some: (value: t) (p (Maybe.some t value))) : (p x)
Maybe.match t (Maybe.none t_) p none some = (none)
Maybe.match t (Maybe.some t_ value_) p none some = (some value_)

View File

@ -0,0 +1,2 @@
// Automatically derived from Maybe/_.type
Maybe.none <t: Type> : (Maybe t)

View File

@ -0,0 +1,2 @@
Maybe.pure <a: Type> (x: a) : Maybe a
Maybe.pure a x = Maybe.some x

Some files were not shown because too many files have changed in this diff Show More