mirror of
https://github.com/unisonweb/unison.git
synced 2024-11-13 22:29:35 +03:00
add more Stream builtins and thread env through (see more...)
added `fs_p`, `fpps_p`, etc and the `StackDecode` typeclass, which describes how to decode a value which needs access to a stack. builtin function adapter helper naming convention guide: `u` = unison unboxed `b` = Scala Boolean as unboxed booleana `l` = Scala Long as unboxed int64 `n` = Scala Int as unboxed uint64 `d` = Scala Double as unboxed float `p` = "polymorphic", some generic type not known to be unboxed `s` = "stack decode", some input parameter requiring access to the runtime stack to be decoded `z` = laZy, a result type for which the decompiled form has to be recorded with each operation, as opposed to being derived from a Value. `p` is used when a function accepts a an `Env => Stream` that will not be consumed `s` is used when a function accepts a `Stream` (no `Env`) that will be consumed `z` is used when a function returns a `Stream` add pretty printing for `Tuple` and `Sequence`
This commit is contained in:
parent
e05752740c
commit
3063e7e57f
@ -125,6 +125,11 @@ builtins = Map.fromList $
|
||||
, ("Stream.drop", "forall a . UInt64 -> Stream a -> Stream a")
|
||||
, ("Stream.map", "forall a b . (a -> b) -> Stream a -> Stream b")
|
||||
, ("Stream.fold-left", "forall a b . b -> (b -> a -> b) -> Stream a -> b")
|
||||
, ("Stream.iterate", "forall a . a -> (a -> a) -> Stream a")
|
||||
, ("Stream.reduce", "forall a . a -> (a -> a -> a) -> Stream a -> a")
|
||||
, ("Stream.to-sequence", "forall a . Stream a -> Sequence a")
|
||||
, ("Stream.filter", "forall a . (a -> Boolean) -> Stream a -> Stream a")
|
||||
, ("Stream.scan-left", "forall a b . b -> (b -> a -> b) -> Stream a -> Stream b")
|
||||
|
||||
, ("Sequence.empty", "forall a . Sequence a")
|
||||
, ("Sequence.cons", "forall a . a -> Sequence a -> Sequence a")
|
||||
|
@ -7,7 +7,7 @@ import compilation.{Computation, Return, Requested}
|
||||
object BuiltinTypes {
|
||||
|
||||
abstract class Constructor(_cid: Int) {
|
||||
def cid = ConstructorId(_cid)
|
||||
val cid = ConstructorId(_cid)
|
||||
}
|
||||
|
||||
object Unit extends Constructor(0) {
|
||||
|
@ -58,9 +58,45 @@ object Builtins {
|
||||
}
|
||||
)
|
||||
|
||||
abstract class FPPP_P[A,B,C,D] { def apply(a: A, b: B, c: C): D }
|
||||
def fppp_p[A,B,C,D](name: Name, arg1: Name, arg2: Name, arg3: Name,
|
||||
f: FPPP_P[A,B,C,D])
|
||||
// Stream.iterate : a -> (a -> a) -> Stream a
|
||||
val Stream_iterate =
|
||||
fpp_z("Stream.iterate", "start", "f",
|
||||
(start: Value, f: Value) =>
|
||||
(env: Env) =>
|
||||
Stream.iterate(start)(UnisonToScala.unsafeToUnboxed1(f)(env))
|
||||
)
|
||||
|
||||
// Stream.reduce : a -> (a -> a -> a) -> Stream a -> a
|
||||
val Stream_reduce =
|
||||
fpps_p("Stream.reduce", "zero", "f", "stream",
|
||||
(zero: Value, f: Value, s: Stream[Value]) => {
|
||||
val env = _env
|
||||
s.reduce(zero)(UnisonToScala.unsafeToUnboxed2(f)(env))
|
||||
}
|
||||
)
|
||||
|
||||
// Stream.to-sequence : Stream a -> Sequence a
|
||||
val Stream_toSequence =
|
||||
fs_p("Stream.to-sequence", "stream",
|
||||
(s: Stream[Value]) => s.toSequence[Value])
|
||||
|
||||
// Stream.filter : (a -> Boolean) -> Stream a -> Stream a
|
||||
val Stream_filter =
|
||||
fpp_z("Stream.filter", "f", "stream",
|
||||
(f: Value, s: StreamRepr) =>
|
||||
(env: Env) =>
|
||||
s(env).filter(UnisonToScala.unsafeToUnboxed1(f)(env))
|
||||
)
|
||||
|
||||
// Stream.scan-left : b -> (b -> a -> b) -> Stream a -> Stream b
|
||||
val Stream_scanLeft =
|
||||
fppp_z("Stream.scan-left", "acc", "f", "stream",
|
||||
(acc: Value, f: Value, s: StreamRepr) =>
|
||||
(env: Env) =>
|
||||
s(env).scanLeft(acc)(UnisonToScala.unsafeToUnboxed2(f)(env))
|
||||
)
|
||||
|
||||
def fppp_p[A,B,C,D](name: Name, arg1: Name, arg2: Name, arg3: Name, f: (A,B,C) => D)
|
||||
(implicit
|
||||
A: Decode[A],
|
||||
B: Decode[B],
|
||||
@ -82,9 +118,7 @@ object Builtins {
|
||||
name -> Return(lambda)
|
||||
}
|
||||
|
||||
abstract class FPPS_P[A,B,C,D] { def apply(a: A, b: B, c: C): D }
|
||||
def fpps_p[A,B,C,D](name: Name, arg1: Name, arg2: Name, arg3: Name,
|
||||
f: FPPS_P[A,B,C,D])
|
||||
def fpps_p[A,B,C,D](name: Name, arg1: Name, arg2: Name, arg3: Name, f: (A,B,C) => D)
|
||||
(implicit
|
||||
A: Decode[A],
|
||||
B: Decode[B],
|
||||
@ -106,6 +140,22 @@ object Builtins {
|
||||
name -> Return(lambda)
|
||||
}
|
||||
|
||||
def fs_p[C,D](name: Name, arg1: Name, f: C => D)
|
||||
(implicit
|
||||
C: StackDecode[C],
|
||||
D: Encode[D]): (Name, Computation) = {
|
||||
val body: Computation =
|
||||
(r,rec,top,stackU,x1,x0,stackB,x1b,x0b) => {
|
||||
D.encode(r, f(
|
||||
C.stackDecode(x0, x0b)((stackU, stackB, top, r))
|
||||
))
|
||||
}
|
||||
val decompiled = Term.Id(name)
|
||||
val lambda =
|
||||
new Value.Lambda.ClosureForming(List(arg1), body, decompiled)
|
||||
name -> Return(lambda)
|
||||
}
|
||||
|
||||
val streamBuiltins = Map(
|
||||
Stream_empty,
|
||||
Stream_fromInt64,
|
||||
@ -115,6 +165,11 @@ object Builtins {
|
||||
Stream_take,
|
||||
Stream_map,
|
||||
Stream_foldLeft,
|
||||
Stream_iterate,
|
||||
Stream_reduce,
|
||||
Stream_toSequence,
|
||||
Stream_filter,
|
||||
Stream_scanLeft,
|
||||
)
|
||||
|
||||
// Sequence.empty : Sequence a
|
||||
@ -171,10 +226,10 @@ object Builtins {
|
||||
fl_l("Int64.increment", "x", _ + 1)
|
||||
|
||||
val Int64_isEven =
|
||||
fl_b("Int64.isEven", "x", _ % 2 == 0)
|
||||
fl_b("Int64.is-even", "x", _ % 2 == 0)
|
||||
|
||||
val Int64_isOdd =
|
||||
fl_b("Int64.isOdd", "x", _ % 2 != 0)
|
||||
fl_b("Int64.is-odd", "x", _ % 2 != 0)
|
||||
|
||||
val Int64_add =
|
||||
fll_l("Int64.+", "x", "y", _ + _)
|
||||
@ -216,16 +271,16 @@ object Builtins {
|
||||
def uint(n: Long): Term = Term.Unboxed(longToUnboxed(n), UnboxedType.UInt64)
|
||||
|
||||
val UInt64_toInt64 =
|
||||
fl_l("UInt64.toInt64", "x", x => x)
|
||||
fl_l("UInt64.to-int64", "x", x => x)
|
||||
|
||||
val UInt64_inc =
|
||||
fn_n("UInt64.increment", "x", _ + 1)
|
||||
|
||||
val UInt64_isEven =
|
||||
fl_b("UInt64.isEven", "x", _ % 2 == 0)
|
||||
fl_b("UInt64.is-even", "x", _ % 2 == 0)
|
||||
|
||||
val UInt64_isOdd =
|
||||
fl_b("UInt64.isOdd", "x", _ % 2 != 0)
|
||||
fl_b("UInt64.is-odd", "x", _ % 2 != 0)
|
||||
|
||||
val UInt64_add =
|
||||
fnn_n("UInt64.+", "x", "y", _ + _)
|
||||
@ -496,6 +551,31 @@ object Builtins {
|
||||
name -> Return(lambda)
|
||||
}
|
||||
|
||||
def fppp_z[A,B,C,D](name: Name, arg1: String, arg2: String, arg3: String, f: (A,B,C) => D)
|
||||
(implicit
|
||||
A: Decode[A],
|
||||
B: Decode[B],
|
||||
C: Decode[C],
|
||||
D: LazyEncode[D]): (Name, Computation) = {
|
||||
val body: Computation =
|
||||
(r,rec,top,stackU,x1,x0,stackB,x1b,x0b) => {
|
||||
val x2 = top.u(stackU, 2)
|
||||
val x2b = top.b(stackB, 2)
|
||||
val a = A.decode(x2, x2b)
|
||||
val b = B.decode(x1, x1b)
|
||||
val c = C.decode(x0, x0b)
|
||||
D.encodeOp(r, f(a, b, c), name,
|
||||
Value.fromParam(x2, x2b),
|
||||
Value.fromParam(x1, x1b),
|
||||
Value.fromParam(x0, x0b))
|
||||
}
|
||||
val decompiled = Term.Id(name)
|
||||
val lambda = new Lambda.ClosureForming(List(arg1, arg2, arg3), body, decompiled)
|
||||
name -> Return(lambda)
|
||||
}
|
||||
|
||||
|
||||
|
||||
abstract class FLP_P[A,B] { def apply(l: Long, a: A): B }
|
||||
def flp_p[A:Decode,B:Encode](name: Name, arg1: Name, arg2: Name, f: FLP_P[A,B]) =
|
||||
_fup_p(name, arg1, arg2, (u, p: A) => f(unboxedToLong(u), p))
|
||||
@ -522,7 +602,6 @@ object Builtins {
|
||||
name -> Return(new Value.Lambda.ClosureForming(List(arg1, arg2), body, Term.Id(name)))
|
||||
}
|
||||
|
||||
|
||||
def flp_z[A:Decode,B:LazyEncode](name: Name, arg1: Name, arg2: Name, f: FLP_P[A,B]) =
|
||||
_fup_z[A,B](name, arg1, arg2, (u, a) => f(unboxedToLong(u), a))
|
||||
|
||||
|
@ -10,6 +10,9 @@ object UnisonToScala {
|
||||
def toUnboxed1(p: (Term.Name, Computation)): Env => Unboxed.F1[Param,Value] =
|
||||
toUnboxed1(Builtins.lambdaFor(p))
|
||||
|
||||
def unsafeToUnboxed1(f: Value): Env => Unboxed.F1[Param,Value] =
|
||||
toUnboxed1(f.asInstanceOf[Value.Lambda])
|
||||
|
||||
def toUnboxed1(f: Value.Lambda): Env => Unboxed.F1[Param,Value] = {
|
||||
require (f.arity == 1)
|
||||
f.body match {
|
||||
@ -30,6 +33,9 @@ object UnisonToScala {
|
||||
}
|
||||
}
|
||||
|
||||
def unsafeToUnboxed2(f: Value): Env => Unboxed.F2[Value,Value,Value] =
|
||||
toUnboxed2(f.asInstanceOf[Value.Lambda])
|
||||
|
||||
def toUnboxed2(p: (Term.Name, Computation)): Env => Unboxed.F2[Value,Value,Value] =
|
||||
toUnboxed2(Builtins.lambdaFor(p))
|
||||
|
||||
|
@ -98,6 +98,9 @@ object PrettyPrint {
|
||||
val semicolon = Breakable("; ")
|
||||
def semicolons(docs: Seq[PrettyPrint]): PrettyPrint = docs.reduce(_ <> semicolon <> _)
|
||||
|
||||
val comma = Breakable(", ")
|
||||
def commas(docs: Seq[PrettyPrint]): PrettyPrint = docs.reduce(_ <> comma <> _)
|
||||
|
||||
def prettyName(name: Name) = parenthesizeIf(isOperatorName(name.toString))(name.toString)
|
||||
|
||||
def unqualifiedName(name: Name): String =
|
||||
@ -205,6 +208,9 @@ object PrettyPrint {
|
||||
parenthesizeGroupIf(precedence > 5) {
|
||||
prettyTerm(arg1, 5) <> " " <> infixName(name) <> softbreak <> prettyTerm(arg2, 6).nest(" ")
|
||||
}
|
||||
case Tuple(args) => "(" <> commas(args.map(prettyTerm)) <> ")"
|
||||
case Term.Apply(Term.Constructor(BuiltinTypes.Tuple.Id, BuiltinTypes.Tuple.cid), args) =>
|
||||
"(" <> commas(args.map(prettyTerm)) <> ")"
|
||||
case Term.Apply(f, args) => parenthesizeGroupIf(precedence > 9) {
|
||||
prettyTerm(f, 9) <> softbreak <>
|
||||
softbreaks(args.map(arg => prettyTerm(arg, 10).nest(" ")))
|
||||
@ -236,6 +242,7 @@ object PrettyPrint {
|
||||
Term.Var(prettyId(typeId, ctorId).renderUnbroken)(
|
||||
fields.map(_.decompile):_*), precedence)
|
||||
case Term.Text(txt) => '"' + Text.toString(txt) + '"'
|
||||
case Term.Sequence(seq) => "[" <> commas(seq.map(prettyTerm).toList) <> "]"
|
||||
case t => t.toString
|
||||
}
|
||||
|
||||
@ -247,5 +254,30 @@ object PrettyPrint {
|
||||
case _ => None
|
||||
}
|
||||
}
|
||||
|
||||
object Tuple {
|
||||
def unapply(term: Term): Option[Seq[Term]] = {
|
||||
val B = BuiltinTypes
|
||||
|
||||
def go(term: Term, elements: Seq[Term]): Seq[Term] = {
|
||||
term match {
|
||||
case Term.Apply(Term.Constructor(B.Tuple.Id, B.Tuple.cid), args) =>
|
||||
args match {
|
||||
case element :: term :: Nil => go(term, elements :+ element)
|
||||
case _ => throw new Exception("tuple wasn't a cons")
|
||||
}
|
||||
|
||||
case _ => elements
|
||||
}
|
||||
}
|
||||
term match {
|
||||
case Term.Apply(Term.Constructor(B.Tuple.Id, B.Tuple.cid), args) =>
|
||||
Some(go(term, Seq.empty))
|
||||
|
||||
case _ => None
|
||||
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -1,7 +1,7 @@
|
||||
package org.unisonweb
|
||||
|
||||
import org.unisonweb.EasyTest._
|
||||
import org.unisonweb.util.{StreamTests, UtilTests}
|
||||
import org.unisonweb.util.UtilTests
|
||||
|
||||
object AllTests {
|
||||
val tests = suite(
|
||||
@ -10,7 +10,6 @@ object AllTests {
|
||||
CompilationTests.tests,
|
||||
FileCompilationTests.tests,
|
||||
CodecsTests.tests,
|
||||
StreamTests.tests,
|
||||
)
|
||||
}
|
||||
|
||||
|
@ -1,19 +1,33 @@
|
||||
package org.unisonweb
|
||||
|
||||
import java.io.File
|
||||
import java.nio.file.{Files, Path}
|
||||
import java.nio.file.{Files, Path, Paths}
|
||||
|
||||
import org.unisonweb.Term.Syntax._
|
||||
import org.unisonweb.Term.Term
|
||||
import org.unisonweb.util.PrettyPrint.prettyTerm
|
||||
import org.unisonweb.util.Sequence
|
||||
|
||||
object FileCompilationTests {
|
||||
import EasyTest._
|
||||
val testFiles = new File("../unison-src/tests").toPath
|
||||
val testFiles = Paths.get("../unison-src/tests")
|
||||
|
||||
val checkResultTests = Map[String, Term](
|
||||
"fib4" -> 2249999.u,
|
||||
"stream-shouldnt-damage-stack" -> ((4950.u, 9999.u)),
|
||||
"stream/iterate-increment-take-drop-reduce" ->
|
||||
scala.Stream.from(0).take(5).drop(3).sum,
|
||||
"stream/fromint64-take-map-tosequence" ->
|
||||
Term.Sequence(Sequence(
|
||||
scala.Stream.from(0)
|
||||
.take(10)
|
||||
.map(i => (i + 1l): Term).toList: _*
|
||||
)),
|
||||
"stream/iterate-increment-take-filter-reduce" ->
|
||||
scala.Stream.from(0).take(10000).filter(_ % 2 == 0).sum.u,
|
||||
"stream/fromint64-take-foldleft-plus" ->
|
||||
(0 until 10000).sum.u,
|
||||
"stream/scan-left" ->
|
||||
scala.Stream.from(1).take(10000).scanLeft(0l)(_+_).sum.u,
|
||||
)
|
||||
|
||||
def tests = suite("compilation.file")(
|
||||
@ -28,7 +42,7 @@ object FileCompilationTests {
|
||||
p => p.toString.endsWith(".u") &&
|
||||
// (_:Path).toString.dropRight is very different from
|
||||
// (_:Path).dropRight
|
||||
!checkResultTests.contains(p.getFileName.toString.dropRight(2))
|
||||
!checkResultTests.contains(p.toString.drop(testFiles.toString.size + 1).dropRight(2))
|
||||
}
|
||||
.map(normalize)
|
||||
.toSeq
|
||||
|
@ -150,65 +150,6 @@ object StreamTests {
|
||||
(scala.Stream.from(0).take(10)).toList
|
||||
)
|
||||
}
|
||||
),
|
||||
{
|
||||
import Term.Syntax._
|
||||
|
||||
suite("unison") (
|
||||
test("take/drop") { implicit T =>
|
||||
equal[Term](
|
||||
s"""Stream.iterate +0 Int64.inc
|
||||
| |> Stream.take 5
|
||||
| |> Stream.drop 3
|
||||
| |> Stream.reduce 0 (+)
|
||||
""".stripMargin.runPipes,
|
||||
scala.Stream.from(0).take(5).drop(3).sum
|
||||
)
|
||||
},
|
||||
test("map") { implicit T =>
|
||||
equal[Term](
|
||||
s"""Stream.fromInt64 +0
|
||||
| |> Stream.take 10
|
||||
| |> Stream.map Int64.inc
|
||||
| |> Stream.toSequence
|
||||
""".stripMargin.runPipes,
|
||||
Term.Sequence(Sequence(
|
||||
scala.Stream.from(0)
|
||||
.take(10)
|
||||
.map(i => (i + 1l): Term).toList: _*
|
||||
))
|
||||
)
|
||||
},
|
||||
// test("filter") { implicit T =>
|
||||
// equal[List[Long]](
|
||||
// Stream.iterate(0)(incU(env)).take(10000).filter(evenU(env))
|
||||
// .toSequence[Long].toList,
|
||||
// scala.Stream.from(0).map(_.toLong).take(10000).filter(_ % 2 == 0).toList
|
||||
// )
|
||||
// },
|
||||
// test("foldLeft Int64_add") { implicit T =>
|
||||
// val plusU = UnisonToScala.toUnboxed2(Builtins.Int64_add)
|
||||
// equal(
|
||||
// Stream.fromInt64(0).take(10000).foldLeft(Value(0))(plusU(env)),
|
||||
// Value((0 until 10000).sum)
|
||||
// )
|
||||
// },
|
||||
// test("scanLeft Int64_add") { implicit T =>
|
||||
// val int64add = UnisonToScala.toUnboxed2(Builtins.Int64_add)(env)
|
||||
// equal(
|
||||
// Stream.fromInt64(1).take(10000).scanLeft(Value(0))(int64add).reduce(Value(0))(int64add),
|
||||
// Value(scala.Stream.from(1).take(10000).scanLeft(0l)(_+_).sum)
|
||||
// )
|
||||
// },
|
||||
// test("iterate Int64_inc, reduce Int64_add") { implicit T =>
|
||||
// val incU = UnisonToScala.toUnboxed1(Builtins.Int64_inc)
|
||||
// val plusU = UnisonToScala.toUnboxed2(Builtins.Int64_add)
|
||||
// equal[Value](
|
||||
// Stream.iterate(0l)(incU(env)).take(10).reduce(zero = Value(0))(plusU(env)),
|
||||
// Value((scala.Stream.from(0).take(10)).sum)
|
||||
// )
|
||||
// }
|
||||
)
|
||||
}
|
||||
)
|
||||
)
|
||||
}
|
||||
|
4
unison-src/tests/stream/fromint64-take-foldleft-plus.u
Normal file
4
unison-src/tests/stream/fromint64-take-foldleft-plus.u
Normal file
@ -0,0 +1,4 @@
|
||||
a |> f = f a
|
||||
Stream.from-uint64 0
|
||||
|> Stream.take 10000
|
||||
|> Stream.fold-left 0 (+_UInt64)
|
5
unison-src/tests/stream/fromint64-take-map-tosequence.u
Normal file
5
unison-src/tests/stream/fromint64-take-map-tosequence.u
Normal file
@ -0,0 +1,5 @@
|
||||
a |> f = f a
|
||||
Stream.from-int64 +0
|
||||
|> Stream.take 10
|
||||
|> Stream.map Int64.increment
|
||||
|> Stream.to-sequence
|
@ -0,0 +1,6 @@
|
||||
(+) = (+_Int64)
|
||||
a |> f = f a
|
||||
Stream.iterate +0 Int64.increment
|
||||
|> Stream.take 5
|
||||
|> Stream.drop 3
|
||||
|> Stream.reduce +0 (+)
|
@ -0,0 +1,5 @@
|
||||
a |> f = f a
|
||||
Stream.iterate 0 UInt64.increment
|
||||
|> Stream.take 10000
|
||||
|> Stream.filter UInt64.is-even
|
||||
|> Stream.reduce 0 (+_UInt64)
|
5
unison-src/tests/stream/scan-left.u
Normal file
5
unison-src/tests/stream/scan-left.u
Normal file
@ -0,0 +1,5 @@
|
||||
a |> f = f a
|
||||
Stream.from-uint64 1
|
||||
|> Stream.take 10000
|
||||
|> Stream.scan-left 0 (+_UInt64)
|
||||
|> Stream.reduce 0 (+_UInt64)
|
Loading…
Reference in New Issue
Block a user