This commit is contained in:
Wojciech Daniło 2019-09-12 16:47:25 +02:00 committed by Josef
parent 432c47576e
commit 457a8128e8
77 changed files with 9472 additions and 3169 deletions

6
.gitignore vendored
View File

@ -84,3 +84,9 @@ bench-report.xml
############## ##############
/enso /enso
#########
## IDE ##
#########
.editorconfig

2
.jvmopts Normal file
View File

@ -0,0 +1,2 @@
-Xss8M
-Xmx3072M

View File

@ -15,8 +15,16 @@ align.tokens = [
{code = "%%", owner = "Term.ApplyInfix"} {code = "%%", owner = "Term.ApplyInfix"}
{code = "="} {code = "="}
{code = "<-"} {code = "<-"}
{code = "->"}
{code = "||"}
{code = "+:="}
{code = "?="}
{code = "extends"} {code = "extends"}
{code = "//"}
{code = "{"}
{code = "}"}
{code = ":", owner = "Defn.Def"} {code = ":", owner = "Defn.Def"}
{code = ":", owner = "Decl.Def"}
] ]
maxColumn = 80 maxColumn = 80
verticalAlignMultilineOperators = true verticalAlignMultilineOperators = true
@ -30,8 +38,28 @@ continuationIndent.callSite = 2
continuationIndent.defnSite = 2 continuationIndent.defnSite = 2
// Newlines // Newlines
newlines.alwaysBeforeElseAfterCurlyIf = true newlines.neverInResultType = false
newlines.alwaysBeforeTopLevelStatements = true newlines.neverBeforeJsNative = false
newlines.sometimesBeforeColonInMethodReturnType = true
newlines.penalizeSingleSelectMultiArgList = true
newlines.alwaysBeforeCurlyBraceLambdaParams = false
newlines.alwaysBeforeTopLevelStatements = false
newlines.afterCurlyLambda = never
newlines.afterImplicitKWInVerticalMultiline = false
newlines.beforeImplicitKWInVerticalMultiline = false
newlines.alwaysBeforeElseAfterCurlyIf = false
newlines.avoidAfterYield = true
verticalMultilineAtDefinitionSite = false
verticalMultilineAtDefinitionSiteArityThreshold = 100
verticalMultiline.atDefnSite = false
verticalMultiline.arityThreshold = 100
verticalMultiline.newlineBeforeImplicitKW = false
verticalMultiline.newlineAfterImplicitKW = false
verticalMultiline.newlineAfterOpenParen = false
verticalMultiline.excludeDanglingParens = [
"`class`"
"`trait`"
]
// Rewrite Rules // Rewrite Rules
rewrite.rules = [ rewrite.rules = [
@ -46,8 +74,9 @@ rewrite.sortModifiers.order = [
] ]
// Multiline Configuration // Multiline Configuration
verticalMultiline.atDefnSite = true verticalMultiline.atDefnSite = false
verticalMultiline.arityThreshold = 4 verticalMultiline.arityThreshold = 6
// Please remember that `//format: off` and `//format: on` directives should be // Please remember that `//format: off` and `//format: on` directives should be
// used sparingly, if at all. // used sparingly, if at all.

View File

@ -3,6 +3,7 @@ package org.enso.interpreter
import java.util.Optional import java.util.Optional
import scala.collection.JavaConverters._ import scala.collection.JavaConverters._
import scala.language.postfixOps
import scala.util.parsing.combinator._ import scala.util.parsing.combinator._
trait AstExpressionVisitor[+T] { trait AstExpressionVisitor[+T] {

View File

@ -1,16 +1,10 @@
package org.enso.pkg package org.enso.pkg
import java.io.File
import java.io.PrintWriter
import io.circe.yaml
import io.circe.generic.auto._ import io.circe.generic.auto._
import io.circe.syntax._ import io.circe.syntax._
import io.circe.yaml
import io.circe.yaml.syntax._ import io.circe.yaml.syntax._
import scala.io.Source
import scala.util.Try
case class Config( case class Config(
author: String, author: String,
maintainer: String, maintainer: String,

View File

@ -3,13 +3,7 @@ package org.enso.pkg
import java.io.File import java.io.File
object Main extends App { object Main extends App {
override def main(args: Array[String]): Unit = { Package.getOrCreate(new File("/Users/marcinkostrzewa/765Luna__$%%$#Project"))
Package.getOrCreate( Package.getOrCreate(new File("/Users/marcinkostrzewa/proper_%%$##%#project"))
new File("/Users/marcinkostrzewa/765Luna__$%%$#Project") Package.getOrCreate(new File("/Users/marcinkostrzewa/Properproject"))
)
Package.getOrCreate(
new File("/Users/marcinkostrzewa/proper_%%$##%#project")
)
Package.getOrCreate(new File("/Users/marcinkostrzewa/Properproject"))
}
} }

View File

@ -1,11 +1,8 @@
package org.enso.projectmanager package org.enso.projectmanager
import java.util.UUID
import akka.http.scaladsl.model.Uri import akka.http.scaladsl.model.Uri
import akka.http.scaladsl.model.Uri.Path import akka.http.scaladsl.model.Uri.Path
import akka.http.scaladsl.server.PathMatcher0 import akka.http.scaladsl.server.{PathMatcher0, PathMatcher1}
import akka.http.scaladsl.server.PathMatcher1
import akka.http.scaladsl.server.PathMatchers.JavaUUID import akka.http.scaladsl.server.PathMatchers.JavaUUID
import org.enso.projectmanager.model.ProjectId import org.enso.projectmanager.model.ProjectId

View File

@ -1,44 +1,25 @@
package org.enso.projectmanager package org.enso.projectmanager
import java.io.File import java.io.File
import java.util.UUID
import java.util.concurrent.TimeUnit import java.util.concurrent.TimeUnit
import akka.actor.ActorSystem import akka.actor.{ActorSystem, Scheduler}
import akka.actor.Scheduler
import com.typesafe.config.ConfigFactory
import akka.actor.typed.ActorRef import akka.actor.typed.ActorRef
import akka.actor.typed.scaladsl.adapter._
import akka.actor.typed.scaladsl.AskPattern._ import akka.actor.typed.scaladsl.AskPattern._
import akka.actor.typed.scaladsl.adapter._
import akka.http.scaladsl.Http import akka.http.scaladsl.Http
import akka.http.scaladsl.model.HttpResponse import akka.http.scaladsl.model.{HttpResponse, StatusCodes, Uri}
import akka.http.scaladsl.model.StatusCodes import akka.http.scaladsl.server.{Directives, Route}
import akka.http.scaladsl.model.Uri
import akka.http.scaladsl.server.Directives
import akka.http.scaladsl.server.Route
import akka.stream.ActorMaterializer import akka.stream.ActorMaterializer
import akka.util.Timeout import akka.util.Timeout
import org.enso.projectmanager.api.ProjectFactory import com.typesafe.config.ConfigFactory
import org.enso.projectmanager.api.ProjectJsonSupport import org.enso.projectmanager.api.{ProjectFactory, ProjectJsonSupport}
import org.enso.projectmanager.model.Project import org.enso.projectmanager.model.{Project, ProjectId}
import org.enso.projectmanager.model.ProjectId import org.enso.projectmanager.services._
import org.enso.projectmanager.services.CreateTemporary
import org.enso.projectmanager.services.CreateTemporaryResponse
import org.enso.projectmanager.services.GetProjectById
import org.enso.projectmanager.services.GetProjectResponse
import org.enso.projectmanager.services.ListProjectsRequest
import org.enso.projectmanager.services.ListProjectsResponse
import org.enso.projectmanager.services.ListTutorialsRequest
import org.enso.projectmanager.services.ProjectsCommand
import org.enso.projectmanager.services.ProjectsService
import org.enso.projectmanager.services.StorageManager
import org.enso.projectmanager.services.TutorialsDownloader
import scala.concurrent.ExecutionContext import scala.concurrent.{ExecutionContext, Future}
import scala.concurrent.Future
import scala.concurrent.duration._ import scala.concurrent.duration._
import scala.util.Failure import scala.util.{Failure, Success}
import scala.util.Success
case class Server( case class Server(
host: String, host: String,

View File

@ -1,11 +1,8 @@
package org.enso.projectmanager.api package org.enso.projectmanager.api
import java.util.UUID
import akka.http.scaladsl.marshallers.sprayjson.SprayJsonSupport import akka.http.scaladsl.marshallers.sprayjson.SprayJsonSupport
import akka.http.scaladsl.model.Uri import akka.http.scaladsl.model.Uri
import org.enso.projectmanager.RouteHelper import org.enso.projectmanager.{RouteHelper, model}
import org.enso.projectmanager.model
import org.enso.projectmanager.model.ProjectId import org.enso.projectmanager.model.ProjectId
import spray.json.DefaultJsonProtocol import spray.json.DefaultJsonProtocol

View File

@ -1,14 +1,8 @@
package org.enso.projectmanager.services package org.enso.projectmanager.services
import java.util.UUID import akka.actor.typed.{ActorRef, Behavior}
import akka.actor.typed.scaladsl.{Behaviors, StashBuffer}
import akka.actor.typed.scaladsl.Behaviors import org.enso.projectmanager.model.{Project, ProjectId, ProjectsRepository}
import akka.actor.typed.scaladsl.StashBuffer
import akka.actor.typed.ActorRef
import akka.actor.typed.Behavior
import org.enso.projectmanager.model.Project
import org.enso.projectmanager.model.ProjectId
import org.enso.projectmanager.model.ProjectsRepository
import scala.collection.immutable.HashMap import scala.collection.immutable.HashMap

View File

@ -1,17 +1,9 @@
package org.enso.projectmanager.services package org.enso.projectmanager.services
import java.io.File import java.io.File
import java.util.UUID
import org.enso.pkg.Package import org.enso.pkg.Package
import org.enso.projectmanager.model.Local import org.enso.projectmanager.model._
import org.enso.projectmanager.model.Project
import org.enso.projectmanager.model.ProjectType
import org.enso.projectmanager.model.ProjectsRepository
import org.enso.projectmanager.model.Temporary
import org.enso.projectmanager.model.Tutorial
import scala.collection.immutable.HashMap
case class StorageManager( case class StorageManager(
localProjectsPath: File, localProjectsPath: File,

View File

@ -35,7 +35,7 @@ trait GithubJsonProtocol extends SprayJsonSupport with DefaultJsonProtocol {
jsonFormat(GithubTutorial, "name", "pushed_at") jsonFormat(GithubTutorial, "name", "pushed_at")
} }
case class HttpHelper( case class HttpHelper()(
implicit val executor: ExecutionContext, implicit val executor: ExecutionContext,
implicit val system: ActorSystem, implicit val system: ActorSystem,
implicit val materializer: ActorMaterializer) { implicit val materializer: ActorMaterializer) {

View File

@ -0,0 +1,14 @@
package org.enso.data
object ADT {
import reflect.runtime.universe.TypeTag
def constructors[T](implicit ttag: TypeTag[T]) = {
val subs = ttag.tpe.typeSymbol.asClass.knownDirectSubclasses
subs.map { symbol =>
val module = reflect.runtime.currentMirror.staticModule(symbol.fullName)
val clazz = reflect.runtime.currentMirror.reflectModule(module)
clazz.instance.asInstanceOf[T]
}
}
}

View File

@ -0,0 +1,13 @@
package org.enso.data
sealed trait Compare
object Compare {
case object LT extends Compare
case object GT extends Compare
case object EQ extends Compare
def compare[T: Ordering](a: T, b: T): Compare = {
if (implicitly[Ordering[T]].lt(a, b)) LT
else if (implicitly[Ordering[T]].gt(a, b)) GT
else EQ
}
}

View File

@ -0,0 +1,39 @@
package org.enso
import cats.data.NonEmptyList
package object data {
type List1[+T] = NonEmptyList[T]
object List1 {
def apply[T](el: T, tail: List[T]): List1[T] = new List1(el, tail)
def apply[T](el: T, tail: T*): List1[T] = new List1(el, tail.toList)
def apply[T](list: List[T]): Option[List1[T]] = fromListOption(list)
def unapply[T](t: List1[T]): Option[(T, List[T])] = Some((t.head, t.tail))
def fromListOption[T](lst: List[T]): Option[List1[T]] = lst match {
case Nil => None
case t :: ts => Some(List1(t, ts))
}
implicit class List1_ops[+T](lst: List1[T]) {
def mapInit[B >: T](f: T => B): List1[B] =
if (lst.tail.isEmpty) lst
else List1(f(lst.head), lst.tail.init.map(f) :+ lst.tail.last)
def mapLast[B >: T](f: T => B): List1[B] =
if (lst.tail.isEmpty) List1(f(lst.head), lst.tail)
else List1(lst.head, lst.tail.init :+ f(lst.tail.last))
def intersperse[B >: T](t: B): List1[B] =
List1(lst.head, lst.tail.flatMap(s => List(t, s)))
def +:[B >: T](that: List[B]): List1[B] = that match {
case Nil => lst
case s :: ss => List1(s, ss ++ lst.toList)
}
}
}
}

View File

@ -0,0 +1,48 @@
package org.enso.data
import org.enso.data
case class Shifted[+T](off: Int, el: T) {
def map[S](f: T => S): Shifted[S] =
Shifted(off, f(el))
}
object Shifted {
def apply[T](el: T): Shifted[T] = Shifted(0, el)
case class List1[+T](head: T, tail: List[Shifted[T]]) {
def map[S](f: T => S): List1[S] =
List1(f(head), tail.map(_.map(f)))
def toList(off: Int = 0): List[Shifted[T]] =
toList1(off).toList
def toList1(off: Int = 0): data.List1[Shifted[T]] =
data.List1(Shifted(off, head), tail)
def +:[B >: T](t: (Int, B)): List1[B] =
List1(t._2, Shifted(t._1, head) :: tail)
def +:[B >: T](t: Shifted[B]): List1[B] =
List1(t.el, Shifted(t.off, head) :: tail)
def +[B >: T](that: List1[B]): List1[B] =
List1(head, tail ++ that.toList())
def +[B >: T](that: List[Shifted[B]]): List1[B] =
List1(head, tail ++ that)
def :+[B >: T](that: Shifted[B]): List1[B] =
List1(head, tail :+ that)
}
object List1 {
def apply[T](head: T): List1[T] = List1(head, Nil)
implicit def fromTuple[T](t: (T, List[Shifted[T]])): List1[T] =
List1(t._1, t._2)
def fromListDropHead[T](lst: List[Shifted[T]]) =
List1(lst.head.el, lst.tail)
}
}

View File

@ -0,0 +1,37 @@
package org.enso.data
case class Tree[K, V](value: Option[V], branches: Map[K, Tree[K, V]]) {
def +(item: (List[K], V)): Tree[K, V] = item._1 match {
case Nil => this.copy(value = Some(item._2))
case p :: ps => {
val newBranch = branches.getOrElse(p, Tree[K, V]()) + (ps -> item._2)
this.copy(branches = branches + (p -> newBranch))
}
}
def map[S](f: V => S): Tree[K, S] =
Tree(value.map(f), branches.mapValues(_.map(f)))
def dropValues(): Tree[K, Unit] =
map(_ => ())
def get(key: K): Option[Tree[K, V]] =
branches.get(key)
def get(path: List[K]): Option[Tree[K, V]] = path match {
case Nil => Some(this)
case p :: ps => branches.get(p).flatMap(_.get(ps))
}
def getValue(path: List[K]): Option[V] =
get(path).flatMap(_.value)
def isLeaf: Boolean =
branches.isEmpty
}
object Tree {
def apply[K, V](): Tree[K, V] = new Tree(None, Map())
def apply[K, V](deps: (List[K], V)*): Tree[K, V] =
deps.foldLeft(Tree[K, V]())(_ + _)
}

View File

@ -0,0 +1,38 @@
package org.enso.data
import org.enso.data.VectorIntMap.Index
/** Read only zipper-like map.
*
* Sparse, sorted, vector based map
* with O(1) access, when accessing adjacent keys
* with O(N) access, when accessing random keys
* this is achieved by remembering the index of
* last accessed key in class[[Index]]
*/
final class VectorIntMap[V](values: Seq[(Int, V)] = Seq()) {
private val vector = values.toVector.sortBy(_._1)
def get(key: Int, i: Index): Option[V] = {
def _key = vector(i.index)._1 + i.offset
def value = vector(i.index)._2
while (i.index < vector.length && _key <= key) {
if (_key == key)
return Some(value)
i.index += 1
}
i.index -= 1
while (i.index >= 0 && _key >= key) {
if (_key == key)
return Some(value)
i.index -= 1
}
i.index += 1
None
}
}
object VectorIntMap {
class Index(var index: Int, val offset: Int = 0)
}

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,43 @@
package org.enso.syntax.text.ast
import org.enso.syntax.text.AST
object DSL {
import AST.conversions._
implicit final class ASTHelper(self: AST) {
private def smartApp(off: Int, r: AST): AST = self match {
case AST.App.Section.Left.any(t) =>
AST.App.Infix(t.arg, t.off, t.opr, off, r)
case _ => smartAppRaw(off, r)
}
private def smartAppRaw(off: Int, r: AST): AST = (self, r) match {
case (l, AST.Opr.any(r)) => AST.App.Left(l, off, r)
case (AST.Opr.any(l), r) => AST.App.Right(l, off, r)
case (l, r) => AST.App.Prefix(l, off, r)
}
def $(t: AST) = smartApp(0, t)
def $_(t: AST) = smartApp(1, t)
def $__(t: AST) = smartApp(2, t)
def $___(t: AST) = smartApp(3, t)
def $$(t: AST) = smartAppRaw(0, t)
def $$_(t: AST) = smartAppRaw(1, t)
def $$__(t: AST) = smartAppRaw(2, t)
def $$___(t: AST) = smartAppRaw(3, t)
}
implicit final class StringHelpers(self: String) {
def $(t: AST) = (self: AST) $ t
def $_(t: AST) = (self: AST) $_ t
def $__(t: AST) = (self: AST) $__ t
def $___(t: AST) = (self: AST) $___ t
def $$(t: AST) = (self: AST) $$ t
def $$_(t: AST) = (self: AST) $$_ t
def $$__(t: AST) = (self: AST) $$__ t
def $$___(t: AST) = (self: AST) $$___ t
}
}

View File

@ -0,0 +1,566 @@
package org.enso.syntax.text.ast
import org.enso.data.ADT
import org.enso.data.List1
import org.enso.syntax.text.ast.Repr.R
import scalatags.Text.all._
import scalatags.Text.TypedTag
import scalatags.Text.{all => HTML}
import scalatags.generic
import scalatags.text.Builder
import scala.util.Random
////////////////////////////////////////////////////////////////////////////////
//// Doc ///////////////////////////////////////////////////////////////////////
////////////////////////////////////////////////////////////////////////////////
/** Doc - The highest level container, the output of Doc Parser
*
* Doc can be made of up to 3 elements:
* @param tags - If exists, holds applied tags to documented text
* @param synopsis - If exists, holds synopsis of documented text
* @param body - If exists, holds body of documented text
*/
final case class Doc(
tags: Option[Doc.Tags],
synopsis: Option[Doc.Synopsis],
body: Option[Doc.Body]
) extends Doc.Symbol {
val repr: Repr.Builder = R + tags + synopsis + body
val html: Doc.HTML = Seq(
HTML.div(htmlCls())(tags.html)(synopsis.html)(body.html)
)
}
object Doc {
def apply(): Doc = Doc(None, None, None)
def apply(tags: Tags): Doc = Doc(Some(tags), None, None)
def apply(synopsis: Synopsis): Doc =
Doc(None, Some(synopsis), None)
def apply(synopsis: Synopsis, body: Body): Doc =
Doc(None, Some(synopsis), Some(body))
def apply(tags: Tags, synopsis: Synopsis): Doc =
Doc(Some(tags), Some(synopsis), None)
def apply(tags: Tags, synopsis: Synopsis, body: Body): Doc =
Doc(Some(tags), Some(synopsis), Some(body))
type HTML = Seq[Modifier]
type HTMLTag = TypedTag[String]
//////////////////////////////////////////////////////////////////////////////
//// Symbol //////////////////////////////////////////////////////////////////
//////////////////////////////////////////////////////////////////////////////
/** Symbol - the most low-level element, on top of which every other element
* is built
*
* It extends Repr.Provider, so it also contain repr method, as well as
* span and show values. In addition to that it specifies html method for
* extending tokens and renderHTML method for creating ready-to-deploy HTML
* file from documentation
*/
sealed trait Symbol extends Repr.Provider {
def show() = repr.build()
def html: HTML
def renderHTML(cssLink: String): HTMLTag = {
val metaEquiv = HTML.httpEquiv := "Content-Type"
val metaCont = HTML.content := "text/html"
val metaChar = HTML.charset := "UTF-8"
val meta = HTML.meta(metaEquiv)(metaCont)(metaChar)
val cssRel = HTML.rel := "stylesheet"
val cssHref = HTML.href := cssLink
val css = HTML.link(cssRel)(cssHref)
HTML.html(HTML.head(meta, css), HTML.body(html))
}
def htmlCls(): generic.AttrPair[Builder, String] =
HTML.`class` := getClass.toString.split('$').last.split('.').last
}
implicit final class ExtForSymbol[T <: Symbol](val self: Option[T]) {
val dummyText = Elem.Text("")
val html: HTML = self.getOrElse(dummyText).html
}
//////////////////////////////////////////////////////////////////////////////
//// Elem ////////////////////////////////////////////////////////////////////
//////////////////////////////////////////////////////////////////////////////
/** Elem - the trait for proper element of Doc, which elements can be used in
* higher level elements
* Invalid - trait for invalid element of Doc, which elements can be used in
* higher level elements
*/
sealed trait Elem extends Symbol
object Elem {
sealed trait Invalid extends Elem
////////////////////////////////////////////////////////////////////////////
//// Normal text & Newline /////////////////////////////////////////////////
////////////////////////////////////////////////////////////////////////////
/* Text - used to hold normal string as Elem
* Newline - used to hold newline ('\n') as elem
*/
final case class Text(text: String) extends Elem {
val repr: Repr.Builder = text
val html: HTML = Seq(text)
}
implicit def stringToText(str: String): Elem.Text = Elem.Text(str)
case object Newline extends Elem {
val repr: Repr.Builder = R + "\n"
val html: HTML = Seq(" ")
}
////////////////////////////////////////////////////////////////////////////
//// Text Formatter - Bold, Italic, Strikeout //////////////////////////////
////////////////////////////////////////////////////////////////////////////
/** Formatter - element used to hold formatted text
*
* @param typ - specifies type of formatter (Bold, Italic, Strikeout)
* @param elems - elems which make up formatter
*/
final case class Formatter(typ: Formatter.Type, elems: scala.List[Elem])
extends Elem {
val repr: Repr.Builder = R + typ.marker + elems + typ.marker
val html: HTML = Seq(typ.htmlMarker(elems.html))
}
object Formatter {
def apply(typ: Type): Formatter = Formatter(typ, Nil)
def apply(typ: Type, elem: Elem): Formatter =
Formatter(typ, elem :: Nil)
def apply(typ: Type, elems: Elem*): Formatter =
Formatter(typ, elems.toList)
abstract class Type(val marker: Char, val htmlMarker: HTMLTag)
case object Bold extends Type('*', HTML.b)
case object Italic extends Type('_', HTML.i)
case object Strikeout extends Type('~', HTML.s)
/** Unclosed - Invalid formatter made by parser if user has invoked
* formatter but hasn't ended it
*
* @param typ - specifies type of formatter (Bold, Italic, Strikeout)
* @param elems - elems which make up unclosed formatter
*/
final case class Unclosed(typ: Type, elems: scala.List[Elem])
extends Elem.Invalid {
val repr: Repr.Builder = R + typ.marker + elems
val html: HTML = Seq(HTML.div(htmlCls())(typ.htmlMarker(elems.html)))
}
object Unclosed {
def apply(typ: Type): Unclosed = Unclosed(typ, Nil)
def apply(typ: Type, elem: Elem): Unclosed = Unclosed(typ, elem :: Nil)
def apply(typ: Type, elems: Elem*): Unclosed =
Unclosed(typ, elems.toList)
}
}
implicit final class ExtForListOfElem(val self: scala.List[Elem])
extends Symbol {
val repr: Repr.Builder = R + self.map(_.repr)
val html: HTML = Seq(self.map(_.html))
}
////////////////////////////////////////////////////////////////////////////
//// Code //////////////////////////////////////////////////////////////////
////////////////////////////////////////////////////////////////////////////
/** Code - block used to hold lines of code in Documentation
*
* @param elems - lines of code
*/
/*TODO [MM]: Next PR
Code showing button - we need other design here.
Basically we don't want to display always button
we want to be able to display it maybe as a button on website
and completely differently in gui, it should be configurable*/
final case class CodeBlock(elems: List1[CodeBlock.Line]) extends Elem {
val newLn: Elem = Elem.Newline
val repr: Repr.Builder = R + elems.head + elems.tail.map(R + newLn + _)
val html: HTML = {
val uniqueIDCode = Random.alphanumeric.take(8).mkString("")
val uniqueIDBtn = Random.alphanumeric.take(8).mkString("")
val htmlIdCode = HTML.`id` := uniqueIDCode
val htmlIdBtn = HTML.`id` := uniqueIDBtn
val elemsHTML = elems.toList.map(elem => elem.html)
val btnAction = onclick :=
s"""var code = document.getElementById("$uniqueIDCode");
|var btn = document.getElementById("$uniqueIDBtn").firstChild;
|btn.data = btn.data == "Show" ? "Hide" : "Show";
|code.style.display = code.style.display ==
|"inline-block" ? "none" : "inline-block";""".stripMargin
.replaceAll("\n", "")
val btn = HTML.button(btnAction)(htmlIdBtn)("Show")
Seq(HTML.div(btn, HTML.div(htmlCls())(htmlIdCode)(elemsHTML)))
}
}
object CodeBlock {
def apply(elem: CodeBlock.Line): CodeBlock = CodeBlock(List1(elem))
def apply(elems: CodeBlock.Line*): CodeBlock =
CodeBlock(List1(elems.head, elems.tail.toList))
/* Inline - line of code which is in line with other elements
* Line - elem which is a part of Code Block
*/
final case class Inline(str: String) extends Elem {
val marker = '`'
val repr: Repr.Builder = R + marker + str + marker
val html: HTML = Seq(HTML.code(str))
}
final case class Line(indent: Int, elem: String) extends Elem {
val repr: Repr.Builder = R + indent + elem
val html: HTML = Seq(HTML.code(elem), HTML.br)
}
}
////////////////////////////////////////////////////////////////////////////
//// Link - URL & Image ////////////////////////////////////////////////////
////////////////////////////////////////////////////////////////////////////
/** Link - element used to hold links
*
* @param name - specifies where does the link take us
* @param url - specifies address
*
* there are two kinds of links - normal URL and Image embedded in text
*
* Link.Invalid - something that couldn't be pattern matched to create link
*/
abstract class Link(name: String, url: String, val marker: Option[String])
extends Elem {
val repr: Repr.Builder = R + marker + "[" + name + "](" + url + ")"
val html: HTML = this match {
case _: Link.URL => Seq(HTML.a(HTML.href := url)(name))
case _: Link.Image => Seq(HTML.img(HTML.src := url), name)
}
}
object Link {
final case class URL(name: String, url: String)
extends Link(name, url, None)
object URL {
def apply(): URL = URL("", "")
}
final case class Image(name: String, url: String)
extends Link(name, url, Some("!"))
object Image {
def apply(): Image = Image("", "")
}
final case class Invalid(elem: String) extends Elem {
val repr: Repr.Builder = R + elem
val html: HTML = {
val htmlClass = HTML.`class` := this.productPrefix + getObjectName
Seq(HTML.div(htmlClass)(elem.html))
}
}
def getObjectName: String = {
getClass.toString.split('$').last
}
}
////////////////////////////////////////////////////////////////////////////
//// List - Ordered & Unordered, Invalid Indent ////////////////////////////
////////////////////////////////////////////////////////////////////////////
/** List - block used to hold ordered and unordered lists
*
* @param indent - specifies indentation of list
* @param typ - type of list
* @param elems - elements which make up list
*
* Indent.Invalid - holds list element with invalid indent
*/
final case class List(indent: Int, typ: List.Type, elems: List1[Elem])
extends Elem {
val repr: Repr.Builder = R + indent + typ.marker + elems.head + elems.tail
.map {
case elem @ (_: Elem.Invalid) => R + Newline + elem
case elem @ (_: List) => R + Newline + elem
case elem =>
R + Newline + indent + typ.marker + elem
}
val html: HTML = {
val elemsHTML = elems.toList.map {
case elem @ (_: List) => elem.html
case elem => Seq(HTML.li(elem.html))
}
Seq(typ.HTMLMarker(elemsHTML))
}
}
object List {
def apply(indent: Int, listType: Type, elem: Elem): List =
List(indent, listType, List1(elem))
def apply(indent: Int, listType: Type, elems: Elem*): List =
List(indent, listType, List1(elems.head, elems.tail.toList))
abstract class Type(val marker: Char, val HTMLMarker: HTMLTag)
final case object Unordered extends Type('-', HTML.ul)
final case object Ordered extends Type('*', HTML.ol)
object Indent {
final case class Invalid(indent: Int, typ: Type, elem: Elem)
extends Elem.Invalid {
val repr: Repr.Builder = R + indent + typ.marker + elem
val html: HTML = {
val className = this.productPrefix
val htmlCls = HTML.`class` := className + getObjectName
Seq(HTML.div(htmlCls)(elem.html))
}
}
def getObjectName: String = {
getClass.getEnclosingClass.toString.split('$').last
}
}
}
}
//////////////////////////////////////////////////////////////////////////////
//// Sections - Raw & Marked /////////////////////////////////////////////////
//////////////////////////////////////////////////////////////////////////////
/** Section - block used to hold one section of text
*
* indent - specifies indentation of section
* elems - elements which make up section
*
* Marked - Section which is marked as Important, Info or Example
* Raw - normal, unmarked block of text
*/
sealed trait Section extends Symbol {
def indent: Int
def elems: List[Elem]
def reprOfNormalText(elem: Elem, prevElem: Elem): Repr.Builder = {
prevElem match {
case Elem.Newline => R + indent + elem
case _ => R + elem
}
}
val html: HTML = Seq(HTML.div(htmlCls())(elems.map(_.html)))
}
object Section {
/** Header - element used to hold header for section
*
* @param elems - elements which make up header
*/
final case class Header(elems: List[Elem]) extends Elem {
val repr: Repr.Builder = R + elems.map(_.repr)
val html: HTML = Seq(HTML.div(htmlCls())(elems.map(_.html)))
}
object Header {
def apply(elem: Elem): Header = Header(elem :: Nil)
def apply(elems: Elem*): Header = Header(elems.toList)
}
final case class Marked(
indentBeforeMarker: Int,
indentAfterMarker: Int,
typ: Marked.Type,
elems: List[Elem]
) extends Section {
val marker: String = typ.marker.toString
val firstIndentRepr
: Repr.Builder = R + indentBeforeMarker + marker + indentAfterMarker
val dummyElem = Elem.Text("")
val elemsRepr: List[Repr.Builder] = elems.zip(dummyElem :: elems).map {
case (elem @ (_: Elem.List), _) => R + elem
case (elem @ (_: Elem.CodeBlock), _) => R + elem
case (elem, prevElem) => reprOfNormalText(elem, prevElem)
}
val repr: Repr.Builder = R + firstIndentRepr + elemsRepr
override def htmlCls(): generic.AttrPair[Builder, String] = {
HTML.`class` := typ.toString
}
override def indent: Int =
indentBeforeMarker + marker.length + indentAfterMarker
}
object Marked {
def apply(
indentBeforeMarker: Int,
indentAfterMarker: Int,
typ: Type
): Marked = Marked(indentBeforeMarker, indentAfterMarker, typ, Nil)
def apply(
indentBeforeMarker: Int,
indentAfterMarker: Int,
typ: Type,
elem: Elem
): Marked =
Marked(indentBeforeMarker, indentAfterMarker, typ, elem :: Nil)
def apply(
indentBeforeMarker: Int,
indentAfterMarker: Int,
typ: Type,
elems: Elem*
): Marked =
Marked(indentBeforeMarker, indentAfterMarker, typ, elems.toList)
val defaultIndent = 0
def apply(typ: Type): Marked =
Marked(defaultIndent, defaultIndent, typ, Nil)
def apply(typ: Type, elem: Elem): Marked =
Marked(defaultIndent, defaultIndent, typ, elem :: Nil)
def apply(typ: Type, elems: Elem*): Marked =
Marked(defaultIndent, defaultIndent, typ, elems.toList)
abstract class Type(val marker: Char)
case object Important extends Type('!')
case object Info extends Type('?')
case object Example extends Type('>')
}
final case class Raw(indent: Int, elems: List[Elem]) extends Section {
val dummyElem = Elem.Text("")
val newLn: Elem = Elem.Newline
val elemsRepr: List[Repr.Builder] = elems.zip(dummyElem :: elems).map {
case (elem @ (_: Section.Header), _) => R + newLn + indent + elem
case (elem @ (_: Elem.List), _) => R + elem
case (elem @ (_: Elem.CodeBlock), _) => R + elem
case (elem, prevElem) => reprOfNormalText(elem, prevElem)
}
val repr: Repr.Builder = R + indent + elemsRepr
}
object Raw {
def apply(indent: Int): Raw = Raw(indent, Nil)
def apply(indent: Int, elem: Elem): Raw = Raw(indent, elem :: Nil)
def apply(indent: Int, elems: Elem*): Raw = Raw(indent, elems.toList)
val defaultIndent = 0
def apply(): Raw = Raw(defaultIndent, Nil)
def apply(elem: Elem): Raw = Raw(defaultIndent, elem :: Nil)
def apply(elems: Elem*): Raw = Raw(defaultIndent, elems.toList)
}
}
//////////////////////////////////////////////////////////////////////////////
//// Synopsis ////////////////////////////////////////////////////////////////
//////////////////////////////////////////////////////////////////////////////
/** Synopsis - block used to hold section as a synopsis of documentation
*
* @param elems - sections which make up synopsis
*/
final case class Synopsis(elems: List1[Section]) extends Symbol {
val newLn: Elem = Elem.Newline
val repr: Repr.Builder = R + elems.head + elems.tail.map(R + newLn + _)
val html: HTML = {
Seq(HTML.div(htmlCls())(elems.toList.map(_.html)))
}
}
object Synopsis {
def apply(elem: Section): Synopsis = Synopsis(List1(elem))
def apply(elems: Section*): Synopsis =
Synopsis(List1(elems.head, elems.tail.toList))
}
//////////////////////////////////////////////////////////////////////////////
//// Body ////////////////////////////////////////////////////////////////////
//////////////////////////////////////////////////////////////////////////////
/** Body - block used to hold proper body of documentation
*
* @param elems - sections which make up body
*/
final case class Body(elems: List1[Section]) extends Symbol {
val newLn: Elem = Elem.Newline
val repr: Repr.Builder = R + newLn + elems.head + elems.tail.map(
R + newLn + _
)
val html: HTML = Seq(
HTML.div(htmlCls())(HTML.h2("Overview"))(elems.toList.map(_.html))
)
}
object Body {
def apply(elem: Section): Body = Body(List1(elem))
def apply(elems: Section*): Body =
Body(List1(elems.head, elems.tail.toList))
}
//////////////////////////////////////////////////////////////////////////////
//// Tags ////////////////////////////////////////////////////////////////////
//////////////////////////////////////////////////////////////////////////////
/** Tags - block used to hold tags for documentation
*
* @param elems - list of Tag of which Tags is made of
*/
final case class Tags(elems: List1[Tags.Tag]) extends Symbol {
val newLn: Elem = Elem.Newline
val repr
: Repr.Builder = R + elems.head + elems.tail.map(R + newLn + _) + newLn
val html: HTML = Seq(HTML.div(htmlCls())(elems.toList.map(_.html)))
}
object Tags {
def apply(elem: Tag): Tags = Tags(List1(elem))
def apply(elems: Tag*): Tags = Tags(List1(elems.head, elems.tail.toList))
/** Tag - one single tag for Tags
*
* @param indent - indent of tag
* @param typ - type of tag, which can be
* Deprecated, Added, Removed, Modified, Upcoming or Unrecognized
* @param details - optional information for tag
*/
final case class Tag(indent: Int, typ: Tag.Type, details: Option[String])
extends Elem {
val name: String = typ.toString.toUpperCase
val repr: Repr.Builder = typ match {
case Tag.Unrecognized => R + indent + details
case _ => R + indent + name + details
}
val html: HTML = typ match {
case Tag.Unrecognized =>
Seq(HTML.div(HTML.`class` := name)(details.html))
case _ => Seq(HTML.div(HTML.`class` := name)(name)(details.html))
}
}
object Tag {
val defaultIndent = 0
def apply(typ: Type): Tag = Tag(defaultIndent, typ, None)
def apply(typ: Type, details: String): Tag =
Tag(defaultIndent, typ, Some(details))
def apply(indent: Int, typ: Type): Tag = Tag(indent, typ, None)
def apply(indent: Int, typ: Type, details: String): Tag =
Tag(indent, typ, Some(details))
sealed trait Type
object Type {
case object Deprecated extends Type
case object Added extends Type
case object Removed extends Type
case object Modified extends Type
case object Upcoming extends Type
val codes = ADT.constructors[Type]
}
case object Unrecognized extends Type
}
implicit final class ExtForTagDetails(val self: Option[String]) {
val html: HTML = {
val htmlCls = HTML.`class` := this.getClass.toString.split('$').last
Seq(self.map(HTML.div(htmlCls)(_)))
}
}
}
}

View File

@ -0,0 +1,140 @@
package org.enso.syntax.text.ast
import java.nio.charset.StandardCharsets
import org.enso.data.List1
import org.enso.data.Shifted
import cats.Monoid
import cats.implicits._
import scala.annotation.tailrec
////////////////////////////////////////////////////////////////////////////////
//// Repr //////////////////////////////////////////////////////////////////////
////////////////////////////////////////////////////////////////////////////////
trait Repr[T] {
def repr(a: T): Repr.Builder
}
object Repr {
//// Smart Constructors ////
def apply[T: Repr](t: T): Builder = implicitly[Repr[T]].repr(t)
val R = Repr.Builder.Empty()
//// Operations ////
implicit class ToReprOps[T: Repr](t: T) {
def repr: Builder = Repr(t)
def span: Int = repr.span
}
///// Instances ////
implicit def reprForUnit: Repr[Unit] =
_ => Repr.Builder.Empty()
implicit def reprForString: Repr[String] =
Repr.Builder.Text(_)
implicit def reprForInt: Repr[Int] = {
case 0 => R
case i => Repr.Builder.Space(i)
}
implicit def reprForChar: Repr[Char] =
Repr.Builder.Letter(_)
implicit def reprForTuple2[T1: Repr, T2: Repr]: Repr[(T1, T2)] =
t => Repr.Builder.Seq(Repr(t._1), Repr(t._2))
implicit def reprForProvider[T <: Repr.Provider]: Repr[T] =
_.repr
implicit def reprForList[T: Repr]: Repr[List[T]] =
_.map(_.repr).fold(R: Builder)(Repr.Builder.Seq(_, _))
implicit def reprForList1[T: Repr]: Repr[List1[T]] =
t => R + t.head + t.tail
implicit def reprForShifted[T: Repr]: Repr[Shifted[T]] =
t => R + t.off + t.el
implicit def reprForShiftedList1[T: Repr]: Repr[Shifted.List1[T]] =
t => R + t.head + t.tail
implicit def reprForOption[T: Repr]: Repr[Option[T]] =
_.map(_.repr).getOrElse(R)
implicit def reprForNone: Repr[None.type] =
_ => R
implicit def reprForSome[T: Repr]: Repr[Some[T]] =
_.map(_.repr).getOrElse(R)
//////////////////////////////////////////////////////////////////////////////
//// Provider ////////////////////////////////////////////////////////////////
//////////////////////////////////////////////////////////////////////////////
trait Provider {
val repr: Builder
}
//////////////////////////////////////////////////////////////////////////////
//// Builder /////////////////////////////////////////////////////////////////
//////////////////////////////////////////////////////////////////////////////
sealed trait Builder {
import Builder._
val byteSpan: Int
val span: Int
def +[T: Repr](that: T): Builder = this |+| Repr(that)
def build(): String = {
val bldr = new StringBuilder()
@tailrec
def go(lst: List[Builder]): Unit = lst match {
case Nil =>
case r :: rs =>
r match {
case _: Empty => go(rs)
case r: Letter => bldr += r.char; go(rs)
case r: Space => for (_ <- 1 to r.span) { bldr += ' ' }; go(rs)
case r: Text => bldr ++= r.str; go(rs)
case r: Seq => go(r.first :: r.second :: rs)
}
}
go(List(this))
bldr.result()
}
}
object Builder {
//// Constructors ////
final case class Empty() extends Builder {
val byteSpan = 0
val span = 0
}
final case class Letter(char: Char) extends Builder {
val byteSpan = char.toString.getBytes(StandardCharsets.UTF_8).length
val span = 1
}
final case class Space(span: Int) extends Builder {
val byteSpan = span
}
final case class Text(str: String) extends Builder {
val byteSpan = str.getBytes(StandardCharsets.UTF_8).length
val span = str.length
}
final case class Seq(first: Builder, second: Builder) extends Builder {
val byteSpan = first.byteSpan + second.byteSpan
val span = first.span + second.span
}
//// Instances ////
implicit def fromString(a: String): Builder = Repr(a)
implicit def fromChar(a: Char): Builder = Repr(a)
implicit def reprForBuilder[T <: Builder]: Repr[T] = identity(_)
implicit val monoidForBuilder: Monoid[Builder] = new Monoid[Builder] {
def empty: Builder = R
def combine(l: Builder, r: Builder): Builder = (l, r) match {
case (_: Empty, t) => t
case (t, _: Empty) => t
case _ => Seq(l, r)
}
}
}
}

View File

@ -0,0 +1,173 @@
package org.enso.syntax.text.ast2
//// README ////
//
// This is a WORK IN PROGRESS in-place replacement for Repr implementation.
// It is meant to lower the memory consumption of AST by removing Repr cache
// from AST and keeping only the span information. In order to do so, we have
// to do the following steps:
// 1. ASTOf should keep val span:Int
// 2. ASTOf should keep def repr instead of val repr
// 3. ASTOf should implement the Spanned type class
// 4. In every definition of AST Shape we should annotate the Child to be both
// Spanned as well as Repr. Otherwise, the Spanned instance will be
// automatically created from Repr. See below for explanation.
//
// This implementation works even if the above points are not done, because
// Spanned implementation fallbacks to Repr when needed. However, this may not
// be the best design as its very error prone. If we provide `Repr` as super
// class constraint, the `Spanned` is auto-derived, which may make the code
// run very slow.
//
// Anyway, before moving to this implementation we need to solve another
// problem. Printing of the AST throws stackoverflow here, as we are not caching
// everything in memory anymore, but we are recursively printing each component,
// and for a deeply nested components (like 100k open braces) we reach the limit
// of the recursion in the lambda created inside of [[Repr.Builder.+]]
// implementation.
//
// def newBuiltMe(bldr: StringBuilder): Unit = {
// buildMe(bldr)
// sr.repr.repr(that).buildMe(bldr)
// }
//
// We should probably apply CPS transformation to the code builder here,
// unless there is any other better solution.
import cats.Monoid
import cats.implicits._
import org.enso.data.{List1, Shifted}
trait SpannedRepr[T] {
val spanned: Spanned[T]
val repr: Repr[T]
}
object SpannedRepr {
def apply[T: SpannedRepr]: SpannedRepr[T] = implicitly[SpannedRepr[T]]
implicit def auto[T: Repr: Spanned]: SpannedRepr[T] = new SpannedRepr[T] {
val spanned = implicitly[Spanned[T]]
val repr = implicitly[Repr[T]]
}
implicit def asSpanned[T: SpannedRepr]: Spanned[T] = SpannedRepr[T].spanned
implicit def asRepr[T: SpannedRepr]: Repr[T] = SpannedRepr[T].repr
}
trait Spanned[T] {
def span(t: T): Int
}
object Spanned {
def apply[T: Spanned](t: T): Int = implicitly[Spanned[T]].span(t)
implicit def spannedForInt: Spanned[Int] = identity(_)
implicit def spannedForList[T: Spanned]: Spanned[List[T]] =
_.map(Spanned(_)).sum
implicit def spannedForList1[T: Spanned]: Spanned[List1[T]] =
_.map(Spanned(_)).foldLeft(0)(_ + _)
implicit def spannedForShifted[T: Spanned]: Spanned[Shifted[T]] =
t => t.off + Spanned(t.el)
implicit def spannedForRepr[T: Repr]: Spanned[T] = t => Repr(t).span
}
////////////////////////////////////////////////////////////////////////////////
//// Repr //////////////////////////////////////////////////////////////////////
////////////////////////////////////////////////////////////////////////////////
trait Repr[T] {
def repr(a: T): Repr.Builder
}
object Repr {
//// Smart Constructors ////
def apply[T: Repr](t: T): Builder = implicitly[Repr[T]].repr(t)
val R = Monoid[Builder].empty
//// Operations ////
implicit class ToReprOps[T: Repr](t: T) {
def repr: Builder = Repr(t)
def span: Int = repr.span
}
///// Instances ////
implicit def reprForUnit: Repr[Unit] =
_ => Monoid[Builder].empty
implicit def reprForString: Repr[String] =
Repr.Builder.Text(_)
implicit def reprForInt: Repr[Int] = {
case 0 => R
case i => Repr.Builder.Space(i)
}
implicit def reprForChar: Repr[Char] =
Repr.Builder.Letter(_)
implicit def reprForTuple2[T1: Repr, T2: Repr]: Repr[(T1, T2)] =
t => Repr(t._1) |+| Repr(t._2)
implicit def reprForProvider[T <: Repr.Provider]: Repr[T] =
_.repr
implicit def reprForList[T: Repr]: Repr[List[T]] =
_.map(_.repr).fold(R: Builder)(Repr.Builder.Seq)
implicit def reprForList1[T: Repr: Spanned]: Repr[List1[T]] =
t => R + t.head + t.tail
implicit def reprForShifted[T: Repr: Spanned]: Repr[Shifted[T]] =
t => R + t.off + t.el
implicit def reprForShiftedList1[T: Repr: Spanned]: Repr[Shifted.List1[T]] =
t => R + t.head + t.tail
implicit def reprForOption[T: Repr]: Repr[Option[T]] =
_.map(_.repr).getOrElse(R)
implicit def reprForNone: Repr[None.type] =
_ => R
implicit def reprForSome[T: Repr]: Repr[Some[T]] =
_.map(_.repr).getOrElse(R)
//////////////////////////////////////////////////////////////////////////////
//// Provider ////////////////////////////////////////////////////////////////
//////////////////////////////////////////////////////////////////////////////
trait Provider {
val repr: Builder
}
case class Builder(span: Int, buildMe: StringBuilder => Unit) {
def build(): String = {
val bldr = new StringBuilder()
buildMe(bldr)
bldr.result()
}
def +[T: SpannedRepr](that: T) = {
val sr = SpannedRepr[T]
val newSpan = span + sr.spanned.span(that)
def newBuiltMe(bldr: StringBuilder): Unit = {
buildMe(bldr)
sr.repr.repr(that).buildMe(bldr)
}
Builder(newSpan, newBuiltMe)
}
def ++[T: Repr](that: T): Builder = this + " " + that
}
object Builder {
implicit val monoidForBuilder: Monoid[Builder] = new Monoid[Builder] {
def empty: Builder = Empty()
def combine(l: Builder, r: Builder): Builder =
Builder(l.span + r.span, (bldr: StringBuilder) => {
l.buildMe(bldr)
r.buildMe(bldr)
})
}
def Empty() = Builder(0, identity(_))
def Space(span: Int) = Builder(span, _ ++= (" " * span))
def Letter(char: Char) = Builder(1, _ += char)
def Text(str: String) = Builder(str.length, _ ++= str)
def Seq(l: Builder, r: Builder) = l |+| r
implicit def fromString(a: String): Builder = Repr(a)
implicit def fromChar(a: Char): Builder = Repr(a)
implicit def reprForBuilder[T <: Builder]: Repr[T] = identity(_)
}
}

View File

@ -0,0 +1,212 @@
package org.enso.syntax.text.ast3
//// README ////
//
// SEE THE README OF Repr2.scala first.
//
// Here is a simple Repr implementation with a trampoline in Shape._Seq.
import java.nio.charset.StandardCharsets
import org.enso.data.List1
import org.enso.data.Shifted
import cats.Monoid
import cats.implicits._
import scala.annotation.tailrec
trait SpannedRepr[T] {
val spanned: Spanned[T]
val repr: Repr[T]
}
object SpannedRepr {
def apply[T: SpannedRepr]: SpannedRepr[T] = implicitly[SpannedRepr[T]]
implicit def auto[T: Repr: Spanned]: SpannedRepr[T] = new SpannedRepr[T] {
val spanned = implicitly[Spanned[T]]
val repr = implicitly[Repr[T]]
}
implicit def asSpanned[T: SpannedRepr]: Spanned[T] = SpannedRepr[T].spanned
implicit def asRepr[T: SpannedRepr]: Repr[T] = SpannedRepr[T].repr
}
trait Spanned[T] {
def span(t: T): Int
}
object Spanned {
def apply[T: Spanned](t: T): Int = implicitly[Spanned[T]].span(t)
implicit def spannedForInt: Spanned[Int] = identity(_)
implicit def spannedForList[T: Spanned]: Spanned[List[T]] =
_.map(Spanned(_)).sum
implicit def spannedForList1[T: Spanned]: Spanned[List1[T]] =
_.map(Spanned(_)).foldLeft(0)(_ + _)
implicit def spannedForShifted[T: Spanned]: Spanned[Shifted[T]] =
t => t.off + Spanned(t.el)
implicit def spannedForRepr[T: Repr]: Spanned[T] = t => Repr(t).span
}
////////////////////////////////////////////////////////////////////////////////
//// Repr //////////////////////////////////////////////////////////////////////
////////////////////////////////////////////////////////////////////////////////
trait Repr[T] {
def repr(a: T): Repr.Builder
}
object Repr {
//// Smart Constructors ////
def apply[T: Repr](t: T): Builder = implicitly[Repr[T]].repr(t)
val R = Monoid[Builder].empty
//// Operations ////
implicit class ToReprOps[T: Repr](t: T) {
def repr: Builder = Repr(t)
def span: Int = repr.span
}
///// Instances ////
implicit def reprForUnit: Repr[Unit] =
_ => Monoid[Builder].empty
implicit def reprForString: Repr[String] =
Repr.Builder.Text(_)
implicit def reprForInt: Repr[Int] = {
case 0 => R
case i => Repr.Builder.Space(i)
}
implicit def reprForChar: Repr[Char] =
Repr.Builder.Letter(_)
implicit def reprForTuple2[T1: Repr, T2: Repr]: Repr[(T1, T2)] =
t => Repr(t._1) |+| Repr(t._2)
implicit def reprForProvider[T <: Repr.Provider]: Repr[T] =
_.repr
implicit def reprForList[T: Repr]: Repr[List[T]] =
_.map(_.repr).fold(R: Builder)(Repr.Builder.Seq)
implicit def reprForList1[T: Repr: Spanned]: Repr[List1[T]] =
t => R + t.head + t.tail
implicit def reprForShifted[T: Repr: Spanned]: Repr[Shifted[T]] =
t => R + t.off + t.el
implicit def reprForShiftedList1[T: Repr: Spanned]: Repr[Shifted.List1[T]] =
t => R + t.head + t.tail
implicit def reprForOption[T: Repr]: Repr[Option[T]] =
_.map(_.repr).getOrElse(R)
implicit def reprForNone: Repr[None.type] =
_ => R
implicit def reprForSome[T: Repr]: Repr[Some[T]] =
_.map(_.repr).getOrElse(R)
//////////////////////////////////////////////////////////////////////////////
//// Provider ////////////////////////////////////////////////////////////////
//////////////////////////////////////////////////////////////////////////////
trait Provider {
val repr: Builder
}
case class Builder(span: Int, shape: () => Shape) {
def build(): String = shape().build()
def +[T: SpannedRepr](that: T) = {
val sr = SpannedRepr[T]
val newSpan = span + sr.spanned.span(that)
def newShape(): Shape = Shape._Seq(shape, sr.repr.repr(that).shape)
Builder(newSpan, newShape)
}
def ++[T: Repr](that: T): Builder = this + " " + that
}
object Builder {
implicit val monoidForBuilder: Monoid[Builder] = new Monoid[Builder] {
def empty: Builder = Empty()
def combine(l: Builder, r: Builder): Builder =
Builder(
l.span + r.span,
() => Shape._Seq(() => l.shape(), () => r.shape())
)
}
def Empty() = Builder(0, () => Shape.Empty())
def Space(span: Int) = Builder(span, () => Shape.Space(span))
def Letter(char: Char) = Builder(1, () => Shape.Letter(char))
def Text(str: String) = Builder(str.length, () => Shape.Text(str))
def Seq(l: Builder, r: Builder) = l |+| r
implicit def fromString(a: String): Builder = Repr(a)
implicit def fromChar(a: Char): Builder = Repr(a)
implicit def reprForBuilder[T <: Builder]: Repr[T] = identity(_)
}
//////////////////////////////////////////////////////////////////////////////
//// Shape ///////////////////////////////////////////////////////////////////
//////////////////////////////////////////////////////////////////////////////
sealed trait Shape {
import Shape._
// val byteSpan: Int
// val span: Int
def +[T: Repr](that: T): Shape = Seq(this, Repr(that).shape())
def ++[T: Repr](that: T): Shape = this + " " + that
def build(): String = {
val bldr = new StringBuilder()
@tailrec
def go(lst: List[Shape]): Unit = lst match {
case Nil =>
case r :: rs =>
r match {
case _: Empty => go(rs)
case r: Letter => bldr += r.char; go(rs)
case r: Space => for (_ <- 1 to r.span) { bldr += ' ' }; go(rs)
case r: Text => bldr ++= r.str; go(rs)
case r: _Seq => go(r.first() :: r.second() :: rs)
}
}
go(List(this))
bldr.result()
}
}
object Shape {
//// Constructors ////
final case class Empty() extends Shape {
val byteSpan = 0
val span = 0
}
final case class Letter(char: Char) extends Shape {
val byteSpan = char.toString.getBytes(StandardCharsets.UTF_8).length
val span = 1
}
final case class Space(span: Int) extends Shape {
val byteSpan = span
}
final case class Text(str: String) extends Shape {
val byteSpan = str.getBytes(StandardCharsets.UTF_8).length
val span = str.length
}
final case class _Seq(first: () => Shape, second: () => Shape)
extends Shape {
// val byteSpan = first.byteSpan + second.byteSpan
// val span = first.span + second.span
}
object Seq { def apply(l: Shape, r: Shape): Shape = l |+| r }
//// Instances ////
implicit val monoidForShape: Monoid[Shape] = new Monoid[Shape] {
def empty: Shape = Empty()
def combine(l: Shape, r: Shape): Shape = (l, r) match {
case (_: Empty, t) => t
case (t, _: Empty) => t
case _ => _Seq(() => l, () => r)
}
}
}
}

View File

@ -0,0 +1,192 @@
package org.enso.syntax.text.ast.meta
import org.enso.data
import org.enso.data.List1
import org.enso.data.Shifted
import org.enso.syntax.text.AST
import org.enso.syntax.text.AST.Ident
import org.enso.syntax.text.AST.Macro
import Pattern.streamShift
import scala.annotation.tailrec
/////////////////
//// Builder ////
/////////////////
class Builder(
head: Ident,
offset: Int = 0,
lineBegin: Boolean = false,
val isModuleBuilder: Boolean = false
) {
var context: Builder.Context = Builder.Context()
var macroDef: Option[Macro.Definition] = None
var current: Builder.Segment = new Builder.Segment(head, offset, lineBegin)
var revSegs: List[Builder.Segment] = List()
def beginSegment(ast: Ident, off: Int): Unit = {
revSegs ::= current
current = new Builder.Segment(ast)
current.offset = off
}
def merge(that: Builder): Unit = {
val revLeftStream = current.revStream
val (revUnusedLeftTgt, matched, rightUnusedTgt) =
that.build(revLeftStream)
val result = List1(matched: AST.SAST, rightUnusedTgt)
current.revStream = result.toList.reverse ++ revUnusedLeftTgt
}
def build(
revStreamL: AST.Stream
): (AST.Stream, Shifted[Macro], AST.Stream) = {
val revSegBldrs = List1(current, revSegs)
macroDef match {
case None =>
val revSegs = revSegBldrs.map { segBldr =>
val optAst = segBldr.buildAST()
val seg = Macro.Ambiguous.Segment(segBldr.ast, optAst)
Shifted(segBldr.offset, seg)
}
val segments = revSegs.reverse
val head = segments.head
val tail = segments.tail
val paths = context.tree.dropValues()
val stream = Shifted.List1(head.el, tail)
val template = Macro.Ambiguous(stream, paths)
val newTok = Shifted(head.off, template)
(revStreamL, newTok, List())
case Some(mdef) =>
val revSegPats = mdef.fwdPats.reverse
val revSegsOuts = revSegBldrs.zipWith(revSegPats)(_.build(_))
val revSegs = revSegsOuts.map(_._1)
val revSegStreams = revSegsOuts.map(_._2)
val tailStream = revSegStreams.head
val segs = revSegs.reverse
val (segs2, pfxMatch, newLeftStream) = mdef.back match {
case None => (segs, None, revStreamL)
case Some(pat) =>
val fstSegOff = segs.head.off
val (revStreamL2, lastLOff) = streamShift(fstSegOff, revStreamL)
val pfxMatch = pat.matchRevUnsafe(revStreamL2)
val revStreamL3 = pfxMatch.stream
val streamL3 = revStreamL3.reverse
val (streamL4, newFstSegOff) = streamShift(lastLOff, streamL3)
val revStreamL4 = streamL4.reverse
val newFirstSeg = segs.head.copy(off = newFstSegOff)
val newSegs = segs.copy(head = newFirstSeg)
(newSegs, Some(pfxMatch.elem), revStreamL4)
}
val shiftSegs = Shifted.List1(segs2.head.el, segs2.tail)
if (!revSegStreams.tail.forall(_.isEmpty)) {
throw new Error(
"Internal error: not all template segments were fully matched"
)
}
// val resolved = mdef.fin(pfxMatch, shiftSegs.toList().map(_.el))
val template = Macro.Match(pfxMatch, shiftSegs, null)
val newTok = Shifted(segs2.head.off, template)
(newLeftStream, newTok, tailStream)
}
}
if (isModuleBuilder)
macroDef = Some(
Macro.Definition((AST.Blank(): AST) -> Pattern.Expr()) { ctx =>
ctx.body match {
case List(seg) =>
seg.body.toStream match {
case List(mod) => mod.el
case _ => throw new scala.Error("Impossible happened")
}
}
}
)
def buildAsModule(): AST = {
build(List())._2.el match {
case Macro.Match.any(m) =>
m.segs.head.body.toStream match {
case s :: Nil => s.el
case _ => throw new scala.Error("Impossible happened.")
}
case _ => throw new scala.Error("Impossible happened.")
}
}
}
object Builder {
def moduleBuilder(): Builder =
new Builder(AST.Blank(), isModuleBuilder = true, lineBegin = true)
/////////////////
//// Context ////
/////////////////
case class Context(tree: Registry.Tree, parent: Option[Context]) {
def lookup(t: AST): Option[Registry.Tree] = tree.get(t)
def isEmpty: Boolean = tree.isLeaf
@tailrec
final def parentLookup(t: AST): Boolean = {
parent match {
case None => false
case Some(p) =>
p.lookup(t) match {
case None => p.parentLookup(t)
case Some(_) => true
}
}
}
}
object Context {
def apply(): Context = Context(data.Tree(), None)
def apply(tree: Registry.Tree): Context = Context(tree, None)
}
/////////////////
//// Segment ////
/////////////////
class Segment(
val ast: Ident,
var offset: Int = 0,
val lineBegin: Boolean = false
) {
import Macro._
var revStream: AST.Stream = List()
def buildAST(): Option[Shifted[AST]] =
Pattern.buildASTFrom(revStream.reverse)
def build(
pat: Pattern,
reversed: Boolean = false
): (Shifted[Match.Segment], AST.Stream) = {
val stream = revStream.reverse
pat.matchOpt(stream, lineBegin, reversed) match {
case None =>
throw new Error(
s"Internal error: template pattern segment was unmatched"
)
case Some(rr) =>
(Shifted(offset, Match.SegmentOf(ast, rr.elem)), rr.stream)
}
}
//////////////////////////////////////
override def toString: String =
s"SegmentBuilder($offset, $revStream)"
}
}

View File

@ -0,0 +1,242 @@
package org.enso.syntax.text.ast.meta
import org.enso.data.List1
import org.enso.data.Shifted
import org.enso.syntax.text.AST
import org.enso.syntax.text.AST.Macro.Definition
import org.enso.syntax.text.AST.Opr
import org.enso.syntax.text.AST.Var
import org.enso.syntax.text.ast.Repr
import scala.annotation.tailrec
/** It contains definitions of built-in macros, like if-then-else or (-). These
* macros might get moved to stdlib in the future.
*/
object Builtin {
val registry: Registry = {
def internalError = throw new Error("Internal error")
val group = Definition(Opr("(") -> Pattern.Expr().opt, Opr(")")) { ctx =>
ctx.body match {
case List(st1, _) =>
st1.body.toStream match {
case List() => AST.Group()
case List(t) => AST.Group(t)
case _ => internalError
}
case _ => internalError
}
}
val defn = Definition(Var("def") -> {
val head = Pattern.Cons().or("missing name").tag("name")
val args =
Pattern.NonSpacedExpr_().tag("parameter").many.tag("parameters")
val body = Pattern.Block().tag("body").opt
head :: args :: body
}) { ctx =>
ctx.body match {
case List(st1) =>
import Pattern.Match._
st1.body match {
case Seq(_, (namePat, Seq(_, (argsPat, bodyPat)))) =>
val args = argsPat.toStream.map(_.el)
val body = bodyPat.toStream match {
case List(Shifted(_, AST.Block.any(block))) => Some(block)
case List() => None
case _ => internalError
}
namePat.toStream match {
case List(Shifted(_, AST.Cons.any(n))) => AST.Def(n, args, body)
case _ => internalError
}
case _ => internalError
}
}
}
val imp = Definition(
Var("import") -> Pattern
.SepList(Pattern.Cons(), AST.Opr("."): AST, "expected module name")
) { ctx =>
ctx.body match {
case List(s1) =>
import Pattern.Match._
s1.body match {
case Seq(_, (headMatch, Many(_, tailMatch))) =>
def unwrapSeg(lseg: Pattern.Match): AST.Cons =
lseg.toStream match {
case List(Shifted(_, AST.Cons.any(t))) => t
case _ => internalError
}
val head = unwrapSeg(headMatch)
val tail = tailMatch.map {
case Seq(_, (Tok(_, Shifted(_, AST.Opr("."))), seg)) =>
unwrapSeg(seg)
case _ => internalError
}
AST.Import(head, tail)
}
case _ => internalError
}
}
val if_then = Definition(
Var("if") -> Pattern.Expr(),
Var("then") -> Pattern.Expr()
) { ctx =>
ctx.body match {
case List(s1, s2) =>
(s1.body.toStream, s2.body.toStream) match {
case (List(t1), List(t2)) =>
AST.Mixfix(List1(s1.head, s2.head), List1(t1.el, t2.el))
case _ => internalError
}
case _ => internalError
}
}
val if_then_else = Definition(
Var("if") -> Pattern.Expr(),
Var("then") -> Pattern.Expr(),
Var("else") -> Pattern.Expr()
) { ctx =>
ctx.body match {
case List(s1, s2, s3) =>
(s1.body.toStream, s2.body.toStream, s3.body.toStream) match {
case (List(t1), List(t2), List(t3)) =>
AST.Mixfix(
List1(s1.head, s2.head, s3.head),
List1(t1.el, t2.el, t3.el)
)
case _ => internalError
}
case _ => internalError
}
}
val nonSpacedExpr = Pattern.Any(Some(false)).many1.build
val arrow = Definition(
Some(nonSpacedExpr.or(Pattern.OprExpr("->"))),
Opr("->") -> Pattern.NonSpacedExpr().or(Pattern.Expr())
) { ctx =>
(ctx.prefix, ctx.body) match {
case (Some(pfx), List(s1)) =>
(pfx.toStream, s1.body.toStream) match {
case (List(l), List(r)) => AST.App.Infix(l.el, Opr("->"), r.el)
case _ => internalError
}
}
}
val foreign = Definition(
Var("foreign") -> (Pattern.Cons() :: Pattern.Block())
) { ctx =>
ctx.body match {
case List(s1) =>
s1.body.toStream match {
case List(langAST, Shifted(_, AST.Block.any(bodyAST))) =>
val indent = bodyAST.indent
val lang = langAST.el.show()
val body = bodyAST.show()
val bodyLines = body.split("\\r?\\n").toList.drop(1)
val bodyLines2 = bodyLines.map(_.drop(indent))
AST.Foreign(indent, lang, bodyLines2)
case _ => internalError
}
case _ => internalError
}
}
val skip = Definition(
Var("skip") -> Pattern.Expr()
) { ctx =>
ctx.body match {
case List(s1) =>
s1.body.toStream match {
case List(Shifted(_, body: AST)) =>
@tailrec
def go(t: AST): AST = t match {
case AST.App.Prefix(_, arg) => arg
case AST.App.Infix(self, _, _) => go(self)
case AST.Macro.Match.any(m) => go(m.resolved)
case AST.Group(None) => t
case AST.Group(Some(s)) => go(s)
case _ => t
}
go(body)
case _ => internalError
}
case _ => internalError
}
}
val freeze = Definition(
Var("freeze") -> Pattern.Expr()
) { ctx =>
ctx.body match {
case List(s1) =>
s1.body.toStream match {
case List(Shifted(_, _)) =>
// TODO: Ability to do parsing here
Var(s"Save to file using ${ctx.id}")
case _ => internalError
}
case _ => internalError
}
}
val docComment = Definition(
Opr("##") -> Pattern
.Any()
.many
.fromBegin
.or(Pattern.Any().but(Pattern.Block()).many)
.tag("comment")
) { ctx =>
ctx.body match {
case List(s1) =>
val stream = s1.body.toStream
val indent = 2
val text = Repr(stream).build()
val lines = text.split("\n").toList
val lines2 = lines.head :: lines.tail.map(_.drop(indent))
AST.Comment(lines2)
case _ => internalError
}
}
// TODO
// We may want to better represent empty AST. Moreover, there should be a
// way to generate multiple top-level entities from macros (like multiple
// atom definitions). One of the solutions to consider is to make AST
// instance of Monoid, add a `Nothing` node, and replace all lines in a
// block with a `Seq` node. This would allow us here to return `Nothing`,
// and also return many top-level defs connected with a `Seq`.
val disableComment =
Definition(Opr("#") -> Pattern.Expr().tag("disable")) { _ =>
AST.Blank()
}
Registry(
group,
if_then,
if_then_else,
imp,
defn,
arrow,
foreign,
docComment,
disableComment,
skip,
freeze
)
}
}

View File

@ -0,0 +1,523 @@
package org.enso.syntax.text.ast.meta
import org.enso.syntax.text.AST
import org.enso.syntax.text.AST.SAST
import org.enso.syntax.text.prec.Operator
import scala.annotation.tailrec
import org.enso.data.Shifted
import org.enso.syntax.text.ast.Repr
////////////////////////////////////////////////////////////////////////////////
//// Pattern ///////////////////////////////////////////////////////////////////
////////////////////////////////////////////////////////////////////////////////
object Pattern {
import cats.Functor
import cats.Foldable
import cats.Traverse
import cats.derived._
type P = Pattern
type Spaced = Option[Boolean]
// TODO: Refactorme
def streamShift_(off: Int, revStream: AST.Stream): AST.Stream =
streamShift(off, revStream)._1
def streamShift(off: Int, revStream: AST.Stream): (AST.Stream, Int) = {
@tailrec
def go(off: Int, str: AST.Stream, out: AST.Stream): (AST.Stream, Int) =
str match {
case Nil => (out, off)
case t :: ts => go(t.off, ts, Shifted(off, t.el) :: out)
}
val (nStream, nOff) = go(off, revStream, List())
(nStream.reverse, nOff)
}
trait Class
object Class {
final case object Normal extends Class
final case object Pattern extends Class
}
//// Primitive Constructors ////
// format: off
/** Boundary Patterns */
final case class Begin () extends P
final case class End () extends P
/** Structural Patterns */
final case class Nothing () extends P
final case class Seq (pat1 : P , pat2 : P) extends P
final case class Or (pat1 : P , pat2 : P) extends P
final case class Many (pat : P) extends P
final case class Except (not : P , pat : P) extends P
/** Meta Patterns */
final case class Build (pat : P) extends P
final case class Err (msg : String, pat : P) extends P
final case class Tag (tag : String, pat : P) extends P
final case class Cls (cls : Class , pat : P) extends P
/** Token Patterns */
final case class Tok (spaced : Spaced, ast : AST) extends P
final case class Blank (spaced : Spaced) extends P
final case class Var (spaced : Spaced) extends P
final case class Cons (spaced : Spaced) extends P
final case class Opr (spaced : Spaced, maxPrec: Option[Int]) extends P
final case class Mod (spaced : Spaced) extends P
final case class Num (spaced : Spaced) extends P
final case class Text (spaced : Spaced) extends P
final case class Block (spaced : Spaced) extends P
final case class Macro (spaced : Spaced) extends P
final case class Invalid (spaced : Spaced) extends P
// format: on
//// Smart Constructors ////
object Tok {
def apply(ast: AST): Tok = Tok(None, ast)
}
object Var {
def apply(): Var = Var(None)
def apply(spaced: Boolean): Var = Var(Some(spaced))
}
object Cons {
def apply(): Cons = Cons(None)
def apply(spaced: Boolean): Cons = Cons(Some(spaced))
}
object Opr {
def apply(): Opr = Opr(None, None)
def apply(spaced: Spaced): Opr = Opr(spaced, None)
def apply(spaced: Boolean): Opr = Opr(Some(spaced))
}
object Num {
def apply(): Num = Num(None)
def apply(spaced: Boolean): Num = Num(Some(spaced))
}
object Text {
def apply(): Text = Text(None)
def apply(spaced: Boolean): Text = Text(Some(spaced))
}
object Block {
def apply(): Block = Block(None)
def apply(spaced: Boolean): Block = Block(Some(spaced))
}
def Any(spaced: Spaced = None): Pattern =
Blank(spaced) |
Var(spaced) |
Cons(spaced) |
Opr(spaced) |
Mod(spaced) |
Num(spaced) |
Text(spaced) |
Block(spaced) |
Macro(spaced) |
Invalid(spaced)
def Any(spaced: Boolean): Pattern = Any(Some(spaced))
def ErrTillEnd(msg: String) = Any().tillEnd.err(msg)
def ErrUnmatched(msg: String) = End() | ErrTillEnd(msg)
def Expr() = Any().many1.build
def NonSpacedExpr() = Any(spaced = false).many1.build
def NonSpacedExpr_() = (Any().but(Block()) :: Any(spaced = false).many).build
def SepList(pat: Pattern, div: Pattern): Pattern = pat :: (div :: pat).many
def SepList(pat: Pattern, div: Pattern, err: String): Pattern = {
val seg = pat | Any().till(div).err(err)
SepList(seg, div)
}
def OprExpr(opr: String) = {
val base = Except(Opr(None, Some(AST.Opr(opr).prec)), Any())
base.many1.build
}
//// Utils ////
def buildASTFrom(stream: AST.Stream): Option[Shifted[AST]] =
Operator.rebuild(stream)
//// Conversions ////
implicit def fromAST(ast: AST): Pattern = Tok(ast)
//////////////////////////////////////////////////////////////////////////////
//// Pattern.Match ///////////////////////////////////////////////////////////
//////////////////////////////////////////////////////////////////////////////
object Match {
type Switch[T] = Either[T, T]
type M[T] = MatchOf[T]
type P = Pattern
val P = Pattern
val A = AST
//// Primitive Constructors ////
// format: off
/** Boundary Matches */
final case class Begin [T](pat:P.Begin) extends M[T]
final case class End [T](pat:P.End) extends M[T]
/** Structural Matches */
final case class Nothing [T](pat:P.Nothing) extends M[T]
final case class Seq [T](pat:P.Seq , elem:(M[T], M[T])) extends M[T]
final case class Or [T](pat:P.Or , elem:Switch[M[T]]) extends M[T]
final case class Many [T](pat:P.Many , elem:List[M[T]]) extends M[T]
final case class Except [T](pat:P.Except, elem:M[T]) extends M[T]
/** Meta Matches */
final case class Build [T](pat:P.Build , elem:T) extends M[T]
final case class Err [T](pat:P.Err , elem:T) extends M[T]
final case class Tag [T](pat:P.Tag , elem:M[T]) extends M[T]
final case class Cls [T](pat:P.Cls , elem:M[T]) extends M[T]
/** Token Matches */
final case class Tok [T](pat:P.Tok , elem:T) extends M[T]
final case class Blank [T](pat:P.Blank , elem:T) extends M[T]
final case class Var [T](pat:P.Var , elem:T) extends M[T]
final case class Cons [T](pat:P.Cons , elem:T) extends M[T]
final case class Opr [T](pat:P.Opr , elem:T) extends M[T]
final case class Mod [T](pat:P.Mod , elem:T) extends M[T]
final case class Num [T](pat:P.Num , elem:T) extends M[T]
final case class Text [T](pat:P.Text , elem:T) extends M[T]
final case class Block [T](pat:P.Block , elem:T) extends M[T]
final case class Macro [T](pat:P.Macro , elem:T) extends M[T]
final case class Invalid [T](pat:P.Invalid , elem:T) extends M[T]
// format: on
//// Smart Constructors ////
object Nothing {
def apply[T](): Match.Nothing[T] = Match.Nothing(Pattern.Nothing())
}
//// Result ////
case class Result(elem: Match, stream: AST.Stream) {
def map(fn: Match => Match): Result = copy(elem = fn(elem))
}
}
type Match = MatchOf[SAST]
sealed trait MatchOf[T] {
import cats.implicits._
import MatchOf._
val M = Match
val pat: Pattern
override def toString = s"Pattern.Match(${this.toStream})"
def toStream: List[T] = this.map(List(_)).fold
def mapStruct(f: MatchOf[T] => MatchOf[T]): MatchOf[T] =
f(this.mapStructShallow(_.mapStruct(f)))
def mapStructShallow(f: MatchOf[T] => MatchOf[T]): MatchOf[T] =
this match {
case m: M.Begin[T] => m
case m: M.End[T] => m
case m: M.Nothing[T] => m
case m: M.Seq[T] => m.copy(elem = m.elem.bimap(f, f))
case m: M.Or[T] => m.copy(elem = m.elem.bimap(f, f))
case m: M.Many[T] => m.copy(elem = m.elem.map(f))
case m: M.Except[T] => m.copy(elem = f(m.elem))
case m: M.Build[T] => m
case m: M.Err[T] => m
case m: M.Tag[T] => m.copy(elem = f(m.elem))
case m: M.Cls[T] => m.copy(elem = f(m.elem))
case m: M.Tok[T] => m
case m: M.Blank[T] => m
case m: M.Var[T] => m
case m: M.Cons[T] => m
case m: M.Opr[T] => m
case m: M.Mod[T] => m
case m: M.Num[T] => m
case m: M.Text[T] => m
case m: M.Block[T] => m
case m: M.Macro[T] => m
case m: M.Invalid[T] => m
}
def isValid: Boolean = {
var out = true
this.mapStruct {
case m: M.Err[_] => out = false; m
case m => m
}
out
}
}
object MatchOf {
import cats.implicits._
implicit def reprMatch[T: Repr]: Repr[MatchOf[T]] =
_.map(Repr(_)).fold
implicit def ftorMatch: Functor[MatchOf] = _MatchOf.ftorMatch
implicit def travMatch: Traverse[MatchOf] = _MatchOf.travMatch
implicit def foldMatch: Foldable[MatchOf] = _MatchOf.foldMatch
implicit def offZipMatch[T: Repr]: AST.OffsetZip[MatchOf, T] = t => {
val s = t.map(Shifted(0, _))
val s2 = mapWithOff(s) { case (i, el) => Shifted(i, el.el) }
val s3 = s2.map(t => (t.off, t.el))
s3
}
val M = Match
// format: off
def mapWithOff[T:Repr](self:MatchOf[T])(f: (Int,T) => T): MatchOf[T] =
mapWithOff_(self)(f,0)._1
def mapWithOff_[T:Repr](self:MatchOf[T])(f: (Int,T) => T, off:Int): (MatchOf[T], Int) = self match {
case m: M.Build[T] => (m.copy(elem = f(off,m.elem)), off + Repr(m.elem).span)
case m: M.Err[T] => (m.copy(elem = f(off,m.elem)), off + Repr(m.elem).span)
case m: M.Tok[T] => (m.copy(elem = f(off,m.elem)), off + Repr(m.elem).span)
case m: M.Blank[T] => (m.copy(elem = f(off,m.elem)), off + Repr(m.elem).span)
case m: M.Var[T] => (m.copy(elem = f(off,m.elem)), off + Repr(m.elem).span)
case m: M.Cons[T] => (m.copy(elem = f(off,m.elem)), off + Repr(m.elem).span)
case m: M.Opr[T] => (m.copy(elem = f(off,m.elem)), off + Repr(m.elem).span)
case m: M.Mod[T] => (m.copy(elem = f(off,m.elem)), off + Repr(m.elem).span)
case m: M.Num[T] => (m.copy(elem = f(off,m.elem)), off + Repr(m.elem).span)
case m: M.Text[T] => (m.copy(elem = f(off,m.elem)), off + Repr(m.elem).span)
case m: M.Block[T] => (m.copy(elem = f(off,m.elem)), off + Repr(m.elem).span)
case m: M.Macro[T] => (m.copy(elem = f(off,m.elem)), off + Repr(m.elem).span)
case m: M.Invalid[T] => (m.copy(elem = f(off,m.elem)), off + Repr(m.elem).span)
case m: Pattern.MatchOf[T] =>
var loff = off
val out = m.mapStructShallow {p =>
val (nmatch, noff) = mapWithOff_(p)(f, loff)
loff = noff
nmatch
}
(out, loff)
}
// format: on
}
object _MatchOf {
def ftorMatch: Functor[MatchOf] = semi.functor
def travMatch: Traverse[MatchOf] = semi.traverse[MatchOf]
def foldMatch: Foldable[MatchOf] = {
import cats.derived.auto.foldable._
semi.foldable[MatchOf]
}
}
}
////////////////////////////////////////////////////////////////////////////////
//// API ///////////////////////////////////////////////////////////////////////
////////////////////////////////////////////////////////////////////////////////
sealed trait Pattern {
import Pattern._
implicit class OptionWhen(v: Option.type) {
def when[A](cond: Boolean)(a: => A): Option[A] = if (cond) Some(a) else None
}
////////////////////////////
//// Smart Constructors ////
////////////////////////////
def ::(that: Pattern): Pattern = Seq(that, this)
def !(that: Pattern): Pattern = Except(that, this)
def |(that: Pattern): Pattern = Or(this, that)
def |(msg: String): Pattern = this | Err(msg, Nothing())
def |?(tag: String): Pattern = Tag(tag, this)
def or(that: Pattern): Pattern = Or(this, that)
def or(msg: String): Pattern = this | Err(msg, Nothing())
def err(msg: String): Pattern = Err(msg, this)
def but(pat: Pattern): Pattern = Except(pat, this)
def many: Pattern = Many(this)
def many1: Pattern = this :: this.many
def tag(tag: String): Pattern = Tag(tag, this)
def opt: Pattern = this | Nothing()
def build: Pattern = Build(this)
def till(end: Pattern): Pattern = this.but(end).many
def tillEnd: Pattern = this :: End() // fixme: rename
def fromBegin: Pattern = Begin() :: this
def matchRevUnsafe(
stream: AST.Stream,
lineBegin: Boolean = false
): Match.Result =
this.matchUnsafe(stream, lineBegin = lineBegin, reversed = true)
//////////////////////////////////
//// Pattern Match Resolution ////
//////////////////////////////////
/** Unsafe variant of AST Macro tokens pattern matching. If you want to use
* patterns that could not match all input tokens, use [[matchOpt]] instead.
*/
def matchUnsafe(
stream: AST.Stream,
lineBegin: Boolean = false,
reversed: Boolean = false
): Match.Result = {
matchOpt(stream, lineBegin, reversed).getOrElse {
val msg = "Internal error: template pattern segment was unmatched"
throw new Error(msg)
}
}
/** This function takes a pattern and applies it to AST input stream. The
* optional parameter 'reversed' is used for prefix (reverse) matching and is
* used for prefix macro matching. The function assumes that the pattern does
* not fail.
*/
def matchOpt(
stream0: AST.Stream,
lineBegin: Boolean,
reversed: Boolean
): Option[Match.Result] = {
val P = Pattern
val M = Match
def matchList(p: Pattern, stream: AST.Stream): (List[Match], AST.Stream) = {
@tailrec
def go(
stream: AST.Stream,
revOut: List[Match]
): (List[Match], AST.Stream) =
step(p, stream) match {
case None => (revOut.reverse, stream)
case Some(t) => go(t.stream, t.elem :: revOut)
}
go(stream, Nil)
}
def stepWith(p: Pattern, stream: AST.Stream)(
f: Match => Match
): Option[Match.Result] = step(p, stream).map(_.map(f))
def step(p: Pattern, stream: AST.Stream): Option[Match.Result] = {
def out(m: Match, s: AST.Stream) = Match.Result(m, s)
def ret(m: Match, s: AST.Stream) = Some(Match.Result(m, s))
def ret_(m: Match) = Some(Match.Result(m, stream))
def retIf(b: Boolean)(m: Match, s: AST.Stream) = Option.when(b)(out(m, s))
def retIf_(b: Boolean)(m: Match) = retIf(b)(m, stream)
def matchByCls_[T: AST.UnapplyByType](
spaced: Pattern.Spaced,
f: Shifted[T] => Match
) = matchByCls[T](spaced)(a => Some(f(a)))
def matchByCls[T](spaced: Pattern.Spaced)(
f: Shifted[T] => Option[Match]
)(implicit pat: AST.UnapplyByType[T]): Option[Match.Result] =
stream match {
case Shifted(off, pat(t)) :: ss =>
val ok = spaced match {
case None => true
case Some(s) =>
val isBlock = t match {
case AST.Block.any(_) => true
case _ => false
}
(s == (off > 0)) && (!isBlock)
}
if (ok) f(Shifted(off, t)).map(out(_, ss)) else None
case _ => None
}
p match {
//// Boundary Matches ////
case p @ P.Begin() => retIf_(lineBegin)(M.Begin(p))
case p @ P.End() => retIf_(stream.isEmpty)(M.End(p))
//// Structural Matches ////
case p @ P.Nothing() => ret_(M.Nothing(p))
case p @ P.Seq(p1, p2) =>
for {
r1 <- step(p1, stream)
r2 <- step(p2, r1.stream)
} yield out(M.Seq(p, (r1.elem, r2.elem)), r2.stream)
case p @ P.Or(p1, p2) =>
val m1 = stepWith(p1, stream)(r => M.Or(p, Left(r)))
m1.orElse(stepWith(p2, stream)(r => M.Or(p, Right(r))))
case p @ P.Many(p1) =>
val (lst, rest) = matchList(p1, stream)
ret(M.Many(p, lst), rest)
case p @ P.Except(p1, p2) =>
step(p1, stream) match {
case Some(_) => None
case None => stepWith(p2, stream)(M.Except(p, _))
}
//// Meta Matches ////
// When performing reverse pattern match, tokens use right-offsets
// instead of left ones, so we need to push them back before computing
// AST.
case p @ P.Build(p1) =>
stepWith(p1, stream) { patMatch =>
val stream = patMatch.toStream
val ast =
if (!reversed) buildASTFrom(stream).get
else {
val (shiftedStream, off) = streamShift(0, stream.reverse)
val shiftedAst = buildASTFrom(shiftedStream).get
shiftedAst.copy(off = off)
}
M.Build(p, ast)
}
case p @ P.Err(msg, p1) =>
step(p1, stream).map {
_.map(
m => M.Err(p, Shifted(AST.Invalid.Unexpected(msg, m.toStream)))
)
}
case p @ P.Tag(_, p1) => stepWith(p1, stream)(M.Tag(p, _))
case p @ P.Cls(_, p1) => stepWith(p1, stream)(M.Cls(p, _))
//// Token Matches ////
case p @ P.Tok(spaced, tok) =>
stream match {
case Shifted(off, t) :: ss =>
val ok = spaced.forall(_ == (off > 0))
Option.when(tok == t && ok)(out(M.Tok(p, Shifted(off, t)), ss))
case _ => None
}
case p @ P.Blank(spaced) =>
matchByCls_[AST.Blank](spaced, M.Blank(p, _))
case p @ P.Var(spaced) => matchByCls_[AST.Var](spaced, M.Var(p, _))
case p @ P.Cons(spaced) => matchByCls_[AST.Cons](spaced, M.Cons(p, _))
case p @ P.Num(spaced) => matchByCls_[AST.Number](spaced, M.Num(p, _))
case p @ P.Text(spaced) => matchByCls_[AST.Text](spaced, M.Text(p, _))
case p @ P.Block(spaced) =>
matchByCls_[AST.Block](spaced, M.Block(p, _))
case p @ P.Opr(spaced, maxPrec) =>
matchByCls[AST.Opr](spaced) { sast =>
Option.when(maxPrec.forall(_ >= sast.el.prec))(M.Opr(p, sast))
}
case p @ P.Mod(spaced) => matchByCls_[AST.Mod](spaced, M.Mod(p, _))
case p @ P.Macro(spaced) =>
matchByCls_[AST.Macro](spaced, M.Macro(p, _))
case p @ P.Invalid(spaced) =>
matchByCls_[AST.Invalid](spaced, M.Invalid(p, _))
}
}
step(this, stream0)
}
}

View File

@ -0,0 +1,28 @@
package org.enso.syntax.text.ast.meta
import org.enso.data
import org.enso.data.List1
import org.enso.syntax.text.AST
import org.enso.syntax.text.AST.Macro.Definition
final case class Registry() {
var tree: Registry.Tree = data.Tree()
override def toString: String =
tree.toString
def insert(defn: Definition): Unit =
tree += defn.path.toList -> defn
def get(path: List1[AST]): Option[Definition] =
tree.getValue(path.toList)
}
object Registry {
type Tree = data.Tree[AST, Definition]
def apply(defs: Definition*): Registry = {
val registry = new Registry()
defs.foreach(registry.insert)
registry
}
}

View File

@ -0,0 +1,27 @@
package org.enso.syntax.text.ast.opr
sealed trait Assoc
object Assoc {
case object Left extends Assoc
case object Right extends Assoc
private val applicativePat = "<?[+*$]>?".r
private def isApplicative(s: String) = s match {
case applicativePat() => s.length > 1
case _ => false
}
private def charAssoc(c: Char) = c match {
case '=' => -1
case ',' => -1
case '>' => -1
case '<' => 1
case _ => 0
}
def of(op: String): Assoc =
if (isApplicative(op)) Assoc.Left
else if (op.foldLeft(0)(_ + charAssoc(_)) >= 0) Assoc.Left
else Assoc.Right
}

View File

@ -0,0 +1,9 @@
package org.enso.syntax.text.ast.opr
object Info {
val map: Map[String, (Int, Assoc)] = Prec.map.map {
case (name, prec) => name -> ((prec, Assoc.of(name)))
}
def of(op: String) =
map.getOrElse(op, (Prec.default, Assoc.of(op)))
}

View File

@ -0,0 +1,27 @@
package org.enso.syntax.text.ast.opr
object Prec {
val hierarchy = List(
List("=", "#="),
List("->", "<-"),
List("~>", "<~"),
List("|"),
List("&"),
List("!", "?", "~"),
List("<*", "<*>", "*>", "<$", "<$>", "$>", "<+", "<+>", "+>"),
List("<", ">"),
List(":", ","),
List("+", "-"),
List("*", "/", "\\", "%"),
List("^"),
List("."),
List(" ")
)
val map: Map[String, Int] =
hierarchy.zipWithIndex.flatMap {
case (ops, prec) => ops.map(_ -> prec)
}.toMap
val default = map.getOrElse("^", 0)
}

View File

@ -0,0 +1,142 @@
package org.enso.syntax.text.ast.text
import org.enso.data.ADT
sealed trait Escape {
val repr: String
}
object Escape {
final case class Invalid(str: String) extends Escape {
val repr = str
}
final case class Number(int: Int) extends Escape {
val repr = int.toString
}
// Reference: https://en.wikipedia.org/wiki/String_literal
sealed trait Unicode extends Escape
object Unicode {
final case class Invalid(unicode: Unicode) extends Unicode {
val repr = unicode.repr
}
abstract class U(pfx: String, sfx: String = "") extends Unicode {
val digits: String
val repr = pfx + digits + sfx
}
final case class U16 private (digits: String) extends U("u")
final case class U32 private (digits: String) extends U("U")
final case class U21 private (digits: String) extends U("u{", "}")
object Validator {
val hexChars =
(('a' to 'f') ++ ('A' to 'F') ++ ('0' to '9')).toSet
def isHexChar(char: Char) =
hexChars.contains(char)
}
object U16 {
def apply(digits: String): Unicode =
if (validate(digits)) U16(digits)
else Invalid(U16(digits))
def validate(digits: String) = {
import Validator._
val validLength = digits.length == 4
val validChars = digits.forall(isHexChar)
validLength && validChars
}
}
object U32 {
def apply(digits: String): Unicode =
if (validate(digits)) U32(digits)
else Invalid(U32(digits))
def validate(digits: String) = {
import Validator._
val validLength = digits.length == 8
val validPrefix = digits.startsWith("00")
val validChars = digits.forall(isHexChar)
validLength && validPrefix && validChars
}
}
object U21 {
def apply(digits: String): Unicode =
if (validate(digits)) U21(digits)
else Invalid(U21(digits))
def validate(digits: String) = {
import Validator._
val validLength = digits.length >= 1 && digits.length <= 6
val validChars = digits.forall(isHexChar)
validLength && validChars
}
}
}
abstract class Simple(val code: Int) extends Escape{
def name = toString
val repr = name
}
case object Slash extends Simple('\\') { override val repr = "\\" }
case object Quote extends Simple('\'') { override val repr = "\'" }
case object RawQuote extends Simple('"') { override val repr = "\"" }
// Reference: https://en.wikipedia.org/wiki/String_literal
sealed trait Character extends Simple
object Character {
case object a extends Simple('\u0007') with Character
case object b extends Simple('\u0008') with Character
case object f extends Simple('\u000C') with Character
case object n extends Simple('\n') with Character
case object r extends Simple('\r') with Character
case object t extends Simple('\u0009') with Character
case object v extends Simple('\u000B') with Character
case object e extends Simple('\u001B') with Character
val codes = ADT.constructors[Character]
}
// Reference: https://en.wikipedia.org/wiki/Control_character
sealed trait Control extends Simple
object Control {
case object NUL extends Simple(0x00) with Control
case object SOH extends Simple(0x01) with Control
case object STX extends Simple(0x02) with Control
case object ETX extends Simple(0x03) with Control
case object EOT extends Simple(0x04) with Control
case object ENQ extends Simple(0x05) with Control
case object ACK extends Simple(0x06) with Control
case object BEL extends Simple(0x07) with Control
case object BS extends Simple(0x08) with Control
case object TAB extends Simple(0x09) with Control
case object LF extends Simple(0x0A) with Control
case object VT extends Simple(0x0B) with Control
case object FF extends Simple(0x0C) with Control
case object CR extends Simple(0x0D) with Control
case object SO extends Simple(0x0E) with Control
case object SI extends Simple(0x0F) with Control
case object DLE extends Simple(0x10) with Control
case object DC1 extends Simple(0x11) with Control
case object DC2 extends Simple(0x12) with Control
case object DC3 extends Simple(0x13) with Control
case object DC4 extends Simple(0x14) with Control
case object NAK extends Simple(0x15) with Control
case object SYN extends Simple(0x16) with Control
case object ETB extends Simple(0x17) with Control
case object CAN extends Simple(0x18) with Control
case object EM extends Simple(0x19) with Control
case object SUB extends Simple(0x1A) with Control
case object ESC extends Simple(0x1B) with Control
case object FS extends Simple(0x1C) with Control
case object GS extends Simple(0x1D) with Control
case object RS extends Simple(0x1E) with Control
case object US extends Simple(0x1F) with Control
case object DEL extends Simple(0x7F) with Control
val codes = ADT.constructors[Control]
}
}

View File

@ -0,0 +1,7 @@
package org.enso.syntax
import org.enso.syntax.text.{AST => ASTDef}
package object text {
type AST = ASTDef._AST
}

View File

@ -0,0 +1,37 @@
package org.enso.syntax.text.prec
import org.enso.syntax.text.AST
import org.enso.data.List1
import org.enso.data.Shifted
import scala.annotation.tailrec
object Distance {
/** Segment is a list of AST tokens which are not separated with spaces */
type Segment = List1[AST]
def partition(lst: List1[Shifted[AST]]): List1[Shifted[Segment]] = {
@tailrec
def go(
input: List[Shifted[AST]],
currentOff: Int,
current: List1[AST],
out: List[Shifted[Segment]]
): List1[Shifted[Segment]] = input match {
case Nil => List1(Shifted(currentOff, current.reverse), out).reverse
case ast1 :: ast2_ =>
val isBlock = ast1.el match {
case AST.Block.any(_) => true
case _ => false
}
val isGlued = (ast1.off == 0) && (!isBlock)
isGlued match {
case true => go(ast2_, currentOff, ast1.el :: current, out)
case false =>
val out2 = Shifted(currentOff, current.reverse) :: out
go(ast2_, ast1.off, List1(ast1.el), out2)
}
}
go(lst.tail, lst.head.off, List1(lst.head.el), Nil)
}
}

View File

@ -0,0 +1,146 @@
package org.enso.syntax.text.prec
import org.enso.Logger
import org.enso.data.Shifted
import org.enso.syntax.text.AST
import org.enso.syntax.text.ast.meta.Builder
import org.enso.syntax.text.ast.meta.Builtin
import scala.annotation.tailrec
object Macro {
val logger = new Logger()
//////////////////
//// Registry ////
//////////////////
def run(module: AST.Module): AST.Module =
module.map(transform)
private def transform(t: AST): AST = {
val root = Builder.Context(Builtin.registry.tree)
var builder: Builder = Builder.moduleBuilder()
var builderStack: List[Builder] = Nil
def pushBuilder(name: AST.Ident, off: Int, lineBegin: Boolean): Unit =
logger.trace {
builderStack +:= builder
builder = new Builder(name, off, lineBegin)
}
def popBuilder(): Option[Builder] = logger.trace {
builderStack match {
case Nil => None
case b :: bs =>
val out = builder
builder = b
builderStack = bs
Some(out)
}
}
var isLineBegin: Boolean = true
@tailrec
def finalize(): AST = {
popBuilder() match {
case Some(bldr) =>
logger.log("End of input (in stack)")
builder.merge(bldr)
finalize()
case None =>
logger.log("End of input (not in stack)")
builder.buildAsModule()
}
}
@tailrec
def go(input: AST.Stream): AST = {
input match {
case Nil =>
val builders = builder :: builderStack
var newRevBuilders: List[Builder] = List()
var subStream: AST.Stream = List()
for (bldr <- builders) {
val noLastPattern = bldr.macroDef.map(_.last.pattern).contains(None)
if (noLastPattern) {
val (revLeftUnusedStream, matched, rightUnusedStream) =
bldr.build(List())
subStream = subStream ++ (rightUnusedStream.reverse :+ matched) ++ revLeftUnusedStream
} else {
bldr.current.revStream = subStream ++ bldr.current.revStream
subStream = List()
newRevBuilders +:= bldr
}
}
val newBuilders = newRevBuilders.reverse
builder = newBuilders.head
builderStack = newBuilders.tail
finalize()
case (t1 @ Shifted(off, AST.Ident.any(el1))) :: t2_ =>
logger.log(s"Token $t1")
logger.beginGroup()
val wasLineBegin = isLineBegin
isLineBegin = false
builder.context.lookup(el1) match {
case Some(tr) =>
logger.log("New segment")
builder.beginSegment(el1, off)
builder.macroDef =
tr.value.map(Some(_)).getOrElse(builder.macroDef)
builder.context = builder.context.copy(tree = tr)
logger.endGroup()
go(t2_)
case None =>
root.lookup(el1) match {
case Some(tr) =>
logger.log("New macro")
val context = builder.context
pushBuilder(el1, t1.off, wasLineBegin)
builder.macroDef = tr.value
builder.context = Builder.Context(tr, Some(context))
logger.endGroup()
go(t2_)
case _ =>
val currentClosed = builder.context.isEmpty
val parentPrecWin = (builder.current.ast, el1) match {
case (AST.Opr.any(_), _) => false
case (_, AST.Opr.any(_)) => true
case _ => false
}
val parentBreak = builder.context.parentLookup(el1)
(currentClosed || parentPrecWin) && parentBreak match {
case true =>
logger.log("Parent close")
val subBuilder = builder
popBuilder()
builder.merge(subBuilder)
logger.endGroup()
go(input)
case false =>
logger.log("Add token")
builder.current.revStream +:= t1
logger.endGroup()
go(t2_)
}
}
}
case (Shifted(off, AST.Block.any(el1))) :: t2_ =>
val nt1 = Shifted(off, el1.map(transform))
builder.current.revStream +:= nt1
go(t2_)
case t1 :: t2_ =>
builder.current.revStream +:= t1
go(t2_)
}
}
val stream = AST.tokenize(t).toList()
go(stream)
}
}

View File

@ -0,0 +1,131 @@
package org.enso.syntax.text.prec
import org.enso.data.Compare._
import org.enso.data.List1
import org.enso.data.Shifted
import org.enso.syntax.text.AST
import org.enso.syntax.text.prec
import scala.annotation.tailrec
// format: off
// Without it, the code is a mess:
// https://github.com/scalameta/scalafmt/issues/1454
object Operator {
import Internal._
/** Build a single AST node from AST stream by applying operator precedence
* rules, including per-operator precedence and distance-based precedence.
*/
def rebuild(stream: AST.Stream1): Shifted[AST] = {
val stream2 = rebuildNonSpaced(stream)
val stream3 = rebuildSpaced(stream2)
stream3
}
def rebuildNonSpaced(stream: AST.Stream1): AST.Stream1 = {
val segs = prec.Distance.partition(stream)
segs.map(_.map(rebuildSubExpr))
}
def rebuildSpaced(flatExpr: AST.Stream1): Shifted[AST] = {
val flatExpr2 = Shifted(flatExpr.head.off, Shifted.List1(flatExpr.head.el,flatExpr.tail))
flatExpr2.map(rebuildExpr)
}
def rebuild(stream: AST.Stream): Option[Shifted[AST]] =
List1(stream).map(rebuild)
final object Internal {
def oprToToken(ast: AST): AST.Opr = ast match {
case AST.Opr.any(t) => t
case _ => AST.Opr.app
}
def rebuildSubExpr(seg: Distance.Segment): AST =
rebuildExpr(seg) match {
case AST.App.Sides.any(t) => t.opr
case t => t
}
def rebuildExpr(seg: Distance.Segment): AST =
rebuildExpr(Shifted.List1(seg.head, seg.tail.map(Shifted(_))))
def rebuildExpr(seg: Shifted.List1[AST]): AST = {
final case class Input(seg: List[Shifted[AST]], stack: Shifted.List1[AST])
implicit def _input(t: (List[Shifted[AST]], Shifted.List1[AST])): Input =
Input(t._1, t._2)
@tailrec
def go(input: Input): AST = input.seg match {
case Nil => reduceAll(input.stack)
case seg1 :: seg2_ =>
val shift = (seg2_, seg1 +: input.stack)
val reduce = (input.seg, reduceHead(input.stack))
def handleAssoc(ast1: AST, ast2: AST) = {
val op1 = oprToToken(ast1)
val op2 = oprToToken(ast2)
compare(op1.prec, op2.prec) match {
case GT => shift
case LT => reduce
case EQ => (op1.assoc, op2.assoc) match {
case (AST.Assoc.Left, AST.Assoc.Left) => reduce
case _ => shift
}
}
}
input.stack.head match {
case AST.Opr.any(stack1) => seg1.el match {
case AST.Opr.any(seg1) => go(handleAssoc(seg1, stack1))
case _ => go(shift)
}
case _ => input.stack.tail match {
case Nil => go(shift)
case stack2 :: _ => go(handleAssoc(seg1.el, stack2.el))
}
}
}
go((seg.tail, Shifted.List1(seg.head)))
}
def reduceHead(stack: Shifted.List1[AST]): Shifted.List1[AST] = {
stack.head match {
case AST.Opr.any(s1) => stack.tail match {
case Nil => (AST.App.Sides(s1), Nil)
case s2 :: s3_ => s2.el match {
case AST.Opr.any(_) => (AST.App.Sides(s1), s2 :: s3_)
case _ => (AST.App.Left(s2.el, s2.off, s1), s3_)
}
}
case t1 => stack.tail match {
case Nil => stack
case s2 :: s3 :: s4_ => s2.el match {
case AST.Opr.any(v2) => s3.el match {
case AST.Opr.any(_) => (AST.App.Right(v2, s2.off, t1), s3 :: s4_)
case _ => (AST.App.Infix(s3.el, s3.off, v2, s2.off, t1), s4_)
}
case v2 => (AST.App.Prefix(v2, s2.off, t1), s3 :: s4_)
}
case s2 :: s3_ => s2.el match {
case AST.Opr.any(v2) => (AST.App.Right(v2, s2.off, t1), s3_)
case v2 => (AST.App.Prefix(v2, s2.off, t1), s3_)
}
}
}
}
@tailrec
def reduceAll(stack: Shifted.List1[AST]): AST = {
stack.tail match {
case Nil => reduceHead(stack).head
case _ => reduceAll(reduceHead(stack))
}
}
}
}

View File

@ -0,0 +1,808 @@
package org.enso.syntax.text.spec
import org.enso.flexer._
import org.enso.flexer.automata.Pattern
import org.enso.flexer.automata.Pattern._
import org.enso.data.List1
import org.enso.syntax.text.ast.Doc._
import org.enso.syntax.text.ast.Doc
import scala.reflect.runtime.universe.reify
case class DocParserDef() extends Parser[Doc] {
//////////////////////////////////////////////////////////////////////////////
//// Result //////////////////////////////////////////////////////////////////
//////////////////////////////////////////////////////////////////////////////
override def getResult(): Option[Doc] = result.doc
/** result - used to manage result from Doc Parser
*
* current - used to hold elem parser works on
* doc - used to hold ready to get Doc after parsing
* stack - used to hold stack of elems
*/
final object result {
var current: Option[Elem] = None
var doc: Option[Doc] = None
var stack: List[Elem] = Nil
def push(): Unit = logger.trace {
if (current.isDefined) {
logger.log(s"Pushed: $current")
stack +:= current.get
current = None
} else {
logger.err("Undefined current")
}
}
def pop(): Unit = logger.trace {
if (stack.nonEmpty) {
current = Some(stack.head)
stack = stack.tail
logger.log(s"New result: $current")
} else {
logger.err("Trying to pop empty AST stack")
}
}
}
//////////////////////////////////////////////////////////////////////////////
//// Basic Char Classification ///////////////////////////////////////////////
//////////////////////////////////////////////////////////////////////////////
val lowerChar: Pattern = range('a', 'z')
val upperChar: Pattern = range('A', 'Z')
val digit: Pattern = range('0', '9')
val whitespace: Pattern = ' '.many1
val newline: Char = '\n'
val char: Pattern = lowerChar | upperChar
val specialChars
: Pattern = "," | "." | ":" | "/" | "" | "=" | "'" | "|" | "+" | "-"
val possibleChars: Pattern = char | digit | whitespace | specialChars
val normalText: Pattern = possibleChars.many1
//////////////////////////////////////////////////////////////////////////////
//// Text ////////////////////////////////////////////////////////////////////
//////////////////////////////////////////////////////////////////////////////
/** text - used to manage normal text, made of Strings
*/
final object text {
def onPushing(in: String): Unit = logger.trace {
if (documentation.isBeginning()) {
if (!tags.checkIfTagExistInPushedText(in)) {
val text = removeWhitespaces(in)
push(text)
}
} else if (section.isBeginning()) {
val text = removeWhitespaces(in)
push(text)
} else {
push(in)
}
}
def removeWhitespaces(in: String): String = logger.trace {
var text = in
if (text.nonEmpty) {
while (text.head == ' ' && text.length > 1) {
text = text.tail
}
}
text
}
def push(in: String): Unit = logger.trace {
result.current = Some(Elem.Text(in))
result.push()
}
}
ROOT || normalText || reify { text.onPushing(currentMatch) }
//////////////////////////////////////////////////////////////////////////////
//// Tags ////////////////////////////////////////////////////////////////////
//////////////////////////////////////////////////////////////////////////////
/** tags - used to manage potentially tagged documentation
*
* possibleTagsList - holds every correct tag possible to create
* stack - holds applied tags
*/
final object tags {
val possibleTagsList: List[Tags.Tag.Type] =
Tags.Tag.Type.codes.-(Tags.Tag.Unrecognized).toList
var stack: List[Tags.Tag] = Nil
def pushTag(indent: Int, tagType: Tags.Tag.Type, details: String): Unit =
logger.trace {
if (details.replaceAll("\\s", "").length == 0) {
stack +:= Tags.Tag(indent, tagType)
} else {
if (details.nonEmpty) {
var det = text.removeWhitespaces(details)
if (tagType != Tags.Tag.Unrecognized) {
det = ' ' + det
}
stack +:= Tags.Tag(indent, tagType, Some(det))
} else {
Tags.Tag(indent, tagType, None)
}
}
result.current = None
}
def checkIfTagExistInPushedText(in: String): Boolean = logger.trace {
val inArray = in.split(" ")
var containsTag = false
def tryFindingTagInAvailableTags(elem: String): Unit = logger.trace {
for (tagType <- possibleTagsList) {
if (elem == tagType.toString.toUpperCase) {
containsTag = true
val tagDet = in.replaceFirst(tagType.toString.toUpperCase, "")
pushTag(section.currentIndentRaw, tagType, tagDet)
}
}
if (!containsTag && !elem.contains(newline)) {
pushTag(section.currentIndentRaw, Tags.Tag.Unrecognized, in)
containsTag = true
}
}
for (elem <- inArray) {
if (elem.isEmpty) {
section.currentIndentRaw += 1
} else if (elem == elem.toUpperCase) {
tryFindingTagInAvailableTags(elem)
}
}
containsTag
}
}
//////////////////////////////////////////////////////////////////////////////
//// Code ////////////////////////////////////////////////////////////////////
//////////////////////////////////////////////////////////////////////////////
/** code - used to manage code in documentation
*/
final object code {
def onPushingInline(in: String): Unit = logger.trace {
val code = in.substring(1).dropRight(1)
result.current = Some(Elem.CodeBlock.Inline(code))
result.push()
}
def onPushingMultiline(in: String): Unit = logger.trace {
val dummyLine = Elem.CodeBlock.Line(0, "")
do {
result.pop()
} while (result.current.get == Elem.Newline)
result.current match {
case Some(code @ (_: Elem.CodeBlock)) =>
val newElem = Elem.CodeBlock.Line(indent.latest, in)
if (code.elems.head == dummyLine) {
result.current = Some(Elem.CodeBlock(newElem))
} else {
result.current = Some(Elem.CodeBlock(code.elems.append(newElem)))
}
case Some(_) | None => result.push()
}
result.push()
}
val inlineCodeTrigger = '`'
val inlinePattern
: Pattern = inlineCodeTrigger >> not(inlineCodeTrigger).many >> inlineCodeTrigger
}
val notNewLine: Pattern = not(newline).many1
val CODE: State = state.define("Code")
ROOT || code.inlinePattern || reify { code.onPushingInline(currentMatch) }
CODE || newline || reify { state.end(); state.begin(NEWLINE) }
CODE || notNewLine || reify { code.onPushingMultiline(currentMatch) }
CODE || eof || reify { state.end(); documentation.onEOF() }
//////////////////////////////////////////////////////////////////////////////
//// Formatter ///////////////////////////////////////////////////////////////
//////////////////////////////////////////////////////////////////////////////
/** formatter - used to manage text formatters
*
* stack - holds applied formatters until they're closed
*/
final object formatter {
var stack: List[Elem.Formatter.Type] = Nil
def onPushing(typ: Elem.Formatter.Type): Unit =
logger.trace {
val unclosedFormattersToCheck = decideWhichToCheckIfUnclosed(typ)
if (stack.contains(typ)) {
unclosedFormattersToCheck.foreach(checkForUnclosed)
val listOfFormattedAST: List[Elem] = getElemsFromStack(typ)
result.pop()
result.current = Some(Elem.Formatter(typ, listOfFormattedAST))
stack = stack.tail
result.push()
} else {
addEmptyToStack(typ)
}
}
def getElemsFromStack(typ: Elem.Formatter.Type): List[Elem] =
logger.trace {
var listOfFormattedAST: List[Elem] = Nil
while (result.stack.head != Elem.Formatter(typ) && result.stack.nonEmpty) {
result.pop()
result.current match {
case Some(value) => listOfFormattedAST +:= value
case _ =>
}
}
listOfFormattedAST
}
def addEmptyToStack(typ: Elem.Formatter.Type): Unit = logger.trace {
stack +:= typ
result.current = Some(Elem.Formatter(typ))
result.push()
}
def decideWhichToCheckIfUnclosed(
typ: Elem.Formatter.Type
): List[Elem.Formatter.Type] = logger.trace {
typ match {
case Elem.Formatter.Strikeout =>
List(Elem.Formatter.Bold, Elem.Formatter.Italic)
case Elem.Formatter.Italic =>
List(Elem.Formatter.Bold, Elem.Formatter.Strikeout)
case Elem.Formatter.Bold =>
List(Elem.Formatter.Italic, Elem.Formatter.Strikeout)
case _ => throw new Error("Trying to use non-existing formatter")
}
}
def checkForUnclosed(typ: Elem.Formatter.Type): Unit = logger.trace {
if (stack.nonEmpty) {
if (stack.head == typ) {
val listOfFormattedAST: List[Elem] = getElemsFromStack(typ)
result.pop()
result.current =
Some(Elem.Formatter.Unclosed(typ, listOfFormattedAST))
stack = stack.tail
result.push()
}
}
}
val boldTrigger: Char = Elem.Formatter.Bold.marker
val italicTrigger: Char = Elem.Formatter.Italic.marker
val strikeoutTrigger: Char = Elem.Formatter.Strikeout.marker
}
ROOT || formatter.boldTrigger || reify {
formatter.onPushing(Elem.Formatter.Bold)
}
ROOT || formatter.italicTrigger || reify {
formatter.onPushing(Elem.Formatter.Italic)
}
ROOT || formatter.strikeoutTrigger || reify {
formatter.onPushing(Elem.Formatter.Strikeout)
}
//////////////////////////////////////////////////////////////////////////////
//// Header //////////////////////////////////////////////////////////////////
//////////////////////////////////////////////////////////////////////////////
/** header - used to create section headers in Documentation
*/
final object header {
def create(): Unit = logger.trace {
section.current match {
case Some(_) => loopThroughStackToFindHeader()
case None =>
result.pop()
result.current match {
case Some(_: Section.Header) => loopThroughStackToFindHeader()
case _ => result.push()
}
}
}
def loopThroughStackToFindHeader(): Unit = logger.trace {
var listForHeader: List[Elem] = Nil
do {
result.pop()
listForHeader +:= result.current.get
} while (result.current.get != Elem.Newline && result.stack.nonEmpty)
if (result.current.get == Elem.Newline) {
result.push()
listForHeader = listForHeader.tail
}
result.current = Some(Section.Header(listForHeader.reverse))
result.push()
}
}
//////////////////////////////////////////////////////////////////////////////
//// Links ///////////////////////////////////////////////////////////////////
//////////////////////////////////////////////////////////////////////////////
/** link - used to create links in Documentation
*
* there are 2 possible link types - Image and normal URL
*/
final object link {
def onCreatingURL(): Unit = logger.trace {
if (currentMatch.contains("]") && currentMatch.contains("(")) {
val in = currentMatch.substring(1).dropRight(1).split(']')
val name = in(0)
val url = in(1).substring(1)
pushURL(name, url)
} else {
onInvalidLink()
}
}
def pushURL(name: String, url: String): Unit = logger.trace {
result.current = Some(Elem.Link.URL(name, url))
result.push()
}
def onCreatingImage(): Unit = logger.trace {
if (currentMatch.contains("]") && currentMatch.contains("(")) {
val in = currentMatch.substring(2).dropRight(1).split(']')
val name = in(0)
val url = in(1).substring(1)
pushImage(name, url)
} else {
onInvalidLink()
}
}
def pushImage(name: String, url: String): Unit = logger.trace {
result.current = Some(Elem.Link.Image(name, url))
result.push()
}
def onInvalidLink(): Unit = logger.trace {
result.current = Some(Elem.Link.Invalid(currentMatch))
result.push()
}
def onInvalidLinkNewline(): Unit = logger.trace {
result.current = Some(Elem.Link.Invalid(currentMatch.dropRight(1)))
result.push()
indent.onPushingNewLine()
}
def onInvalidLinkEOF(): Unit = logger.trace {
onInvalidLink()
documentation.onEOF()
}
val urlNameTrigger: String = "["
val imageNameTrigger: String = Elem.Link.Image().marker.get + urlNameTrigger
val imagePattern: Pattern = imageNameTrigger >> not(')').many1 >> ')'
val urlPattern: Pattern = urlNameTrigger >> not(')').many1 >> ')'
val invalidPatternNewline
: Pattern = (imageNameTrigger | urlNameTrigger) >> not(
')'
).many1 >> newline
val invalidPatternEOF: Pattern = (imageNameTrigger | urlNameTrigger) >> not(
')'
).many1 >> eof
}
ROOT || link.imagePattern || reify { link.onCreatingImage() }
ROOT || link.urlPattern || reify { link.onCreatingURL() }
ROOT || link.invalidPatternNewline || reify { link.onInvalidLinkNewline() }
ROOT || link.invalidPatternEOF || reify { link.onInvalidLinkEOF() }
//////////////////////////////////////////////////////////////////////////////
//// Indent Management & New line ////////////////////////////////////////////
//////////////////////////////////////////////////////////////////////////////
/** indent - used to manage text and block indentation
*
* latest - holds last found indent
* inListFlag - used to check if currently creating list
*/
final object indent {
var latest: Int = 0
val listIndent: Int = 2
def onIndent(): Unit = logger.trace {
val diff = currentMatch.length - latest
if (diff == -listIndent && list.inListFlag) {
list.appendInnerToOuter()
latest = currentMatch.length
} else if (currentMatch.length > section.currentIndentRaw && result.stack.nonEmpty) {
tryToFindCodeInStack()
state.begin(CODE)
} else {
section.currentIndentRaw = currentMatch.length
}
latest = currentMatch.length
}
def tryToFindCodeInStack(): Unit = logger.trace {
result.pop()
if (!result.stack.head.isInstanceOf[Elem.CodeBlock]) {
result.push()
val dummyLine = Elem.CodeBlock.Line(0, "")
result.current = Some(Elem.CodeBlock(dummyLine))
}
result.push()
}
def onIndentForListCreation(
indent: Int,
typ: Elem.List.Type,
content: String
): Unit = logger.trace {
var wantToChangeIndent = true
val diff = indent - latest
if (diff == listIndent) {
/* NOTE
* Used to push new line before pushing first list
*/
if (!list.inListFlag) onPushingNewLine()
list.inListFlag = true
list.addNew(indent, typ, content)
} else if (diff == 0 && list.inListFlag) {
list.addContent(content)
} else if (diff == -listIndent && list.inListFlag) {
list.appendInnerToOuter()
list.addContent(content)
} else {
onInvalidIndent(indent, typ, content)
wantToChangeIndent = false
}
if (wantToChangeIndent) latest = indent
}
def onInvalidIndent(
indent: Int,
typ: Elem.List.Type,
content: String
): Unit = {
if (list.inListFlag) {
list.addContent(Elem.List.Indent.Invalid(indent, typ, content))
} else {
onPushingNewLine()
if (typ == Elem.List.Ordered) {
formatter.onPushing(Elem.Formatter.Bold)
result.current = Some(content)
result.push()
} else {
result.current = Some(" " * indent + typ.marker + content)
result.push()
}
}
}
def onPushingNewLine(): Unit = logger.trace {
result.current = Some(Elem.Newline)
result.push()
}
def onEmptyLine(): Unit = logger.trace {
if (list.inListFlag) {
list.appendInnerToOuter()
list.inListFlag = false
}
onPushingNewLine()
section.onEOS()
}
def onIndentPattern(): Unit = logger.trace {
state.end()
if (result.stack.nonEmpty) {
indent.onPushingNewLine()
}
indent.onIndent()
}
def onEOFPattern(): Unit = logger.trace {
state.end()
indent.onPushingNewLine()
documentation.onEOF()
}
val emptyLine: Pattern = whitespace.opt >> newline
val indentPattern: Pattern = whitespace.opt.many
val EOFPattern: Pattern = indentPattern >> eof
}
val NEWLINE: State = state.define("Newline")
ROOT || newline || reify { state.begin(NEWLINE) }
NEWLINE || indent.EOFPattern || reify { indent.onEOFPattern() }
NEWLINE || indent.indentPattern || reify { indent.onIndentPattern() }
//////////////////////////////////////////////////////////////////////////////
//// Lists ///////////////////////////////////////////////////////////////////
//////////////////////////////////////////////////////////////////////////////
/** list - used to create lists for documentation
*
* there are 2 possible types of lists - ordered and unordered
*/
final object list {
var inListFlag: Boolean = false
def addNew(indent: Int, listType: Elem.List.Type, content: Elem): Unit =
logger.trace {
result.current = Some(Elem.List(indent, listType, content))
result.push()
}
def addContent(content: Elem): Unit = logger.trace {
result.pop()
result.current match {
case Some(list @ (_: Elem.List)) =>
var currentContent = list.elems
currentContent = currentContent.append(content)
result.current =
Some(Elem.List(list.indent, list.typ, currentContent))
case _ =>
}
result.push()
}
def appendInnerToOuter(): Unit = logger.trace {
result.pop()
val innerList = result.current.orNull
result.stack.head match {
case outerList @ (_: Elem.List) =>
var outerContent = outerList.elems
outerContent = outerContent.append(innerList)
result.pop()
result.current =
Some(Elem.List(outerList.indent, outerList.typ, outerContent))
case _ =>
}
result.push()
}
def onOrdered(): Unit = logger.trace {
state.end()
val matchedContent = currentMatch.split(orderedListTrigger)
val listIndent = matchedContent(0).length
val listElems = matchedContent(1)
indent.onIndentForListCreation(listIndent, Elem.List.Ordered, listElems)
}
def onUnordered(): Unit = logger.trace {
state.end()
val matchedContent = currentMatch.split(unorderedListTrigger)
val listIndent = matchedContent(0).length
val listElems = matchedContent(1)
indent.onIndentForListCreation(listIndent, Elem.List.Unordered, listElems)
}
val orderedListTrigger: Char = Elem.List.Ordered.marker
val unorderedListTrigger: Char = Elem.List.Unordered.marker
val orderedPattern
: Pattern = indent.indentPattern >> orderedListTrigger >> notNewLine
val unorderedPattern
: Pattern = indent.indentPattern >> unorderedListTrigger >> notNewLine
}
NEWLINE || list.orderedPattern || reify { list.onOrdered() }
NEWLINE || list.unorderedPattern || reify { list.onUnordered() }
//////////////////////////////////////////////////////////////////////////////
//// Section /////////////////////////////////////////////////////////////////
//////////////////////////////////////////////////////////////////////////////
/** section - used to manage sections in Documentation
*
* there are 2 possible types of sections - marked and raw.
* there are 3 possible types of marked sections:
* - important
* - info
* - example
*
* stack - holds every section in document
* current - holds current section type
* currentIndentRaw - holds indent for Raw
* indentBeforeM & indentAfterM - holds appropriate indents for Marked
*/
final object section {
var stack: List[Section] = Nil
var current: Option[Section.Marked.Type] = None
var currentIndentRaw: Int = 0
var indentBeforeMarker: Int = 0
var indentAfterMarker: Int = 0
//// Section Beginning /////
def onNew(typ: Option[Section.Marked.Type]): Unit =
logger.trace {
result.pop()
current = typ
}
def onNewMarked(typ: Section.Marked.Type): Unit = logger.trace {
createMarkedSectionIndent(typ)
onNew(Some(typ))
currentIndentRaw += currentMatch.length
}
def createMarkedSectionIndent(typ: Section.Marked.Type): Unit =
logger.trace {
/* NOTE
* We are adding here '_' in front and end in case there was no
* indent on one side or another, and then remove this added char
* from calculation.
* We also add currentIndentRaw as for some reason
* it may be the left indent
*/
val in = "_" + currentMatch + "_"
val inArr = in.split(typ.marker)
indentBeforeMarker = currentIndentRaw + inArr.head.length - 1
indentAfterMarker = inArr.tail.head.length - 1
}
def onNewRaw(): Unit = logger.trace {
indent.onEmptyLine()
onNew(None)
}
def onNewRawWithHeader(): Unit = logger.trace {
state.end()
onNewRaw()
result.current = Some(Section.Header())
result.push()
}
def isBeginning(): Boolean = logger.trace {
result.stack.isEmpty || result.stack.head.isInstanceOf[Section.Header]
}
//// End of Section ////
def checkForUnclosedFormattersOnEOS(): Unit = logger.trace {
formatter.checkForUnclosed(Elem.Formatter.Bold)
formatter.checkForUnclosed(Elem.Formatter.Italic)
formatter.checkForUnclosed(Elem.Formatter.Strikeout)
}
def reverseStackOnEOS(): Unit = logger.trace {
result.stack = result.stack.reverse
}
def push(): Unit = logger.trace {
result.stack match {
case Nil =>
/* NOTE
* We don't want to push an empty section into stack
* in case of parsing for example empty file
* Then we want to get back Doc(None) and not Doc(Section())
*/
case _ =>
section.current match {
case Some(marker) =>
section.stack +:= Section.Marked(
indentBeforeMarker,
indentAfterMarker,
marker,
result.stack
)
case None =>
section.stack +:= Section.Raw(currentIndentRaw, result.stack)
}
}
}
def cleanupOnEOS(): Unit = logger.trace {
result.current = None
result.stack = Nil
formatter.stack = Nil
}
def onEOS(): Unit = logger.trace {
checkForUnclosedFormattersOnEOS()
reverseStackOnEOS()
header.create()
push()
cleanupOnEOS()
}
val importantTrigger: Char = Section.Marked.Important.marker
val infoTrigger: Char = Section.Marked.Info.marker
val exampleTrigger: Char = Section.Marked.Example.marker
val importantPattern
: Pattern = indent.indentPattern >> importantTrigger >> indent.indentPattern
val infoPattern
: Pattern = indent.indentPattern >> infoTrigger >> indent.indentPattern
val examplePattern
: Pattern = indent.indentPattern >> exampleTrigger >> indent.indentPattern
}
NEWLINE || indent.emptyLine || reify { section.onNewRaw() }
NEWLINE || indent.emptyLine >> indent.emptyLine || reify {
section.onNewRawWithHeader()
}
ROOT || section.importantPattern || reify {
section.onNewMarked(Section.Marked.Important)
}
ROOT || section.infoPattern || reify {
section.onNewMarked(Section.Marked.Info)
}
ROOT || section.examplePattern || reify {
section.onNewMarked(Section.Marked.Example)
}
//////////////////////////////////////////////////////////////////////////////
//// Documentation ///////////////////////////////////////////////////////////
//////////////////////////////////////////////////////////////////////////////
/** documentation - used to manage every action in case of end of file
* prepares data to be ready to output to user, also depicts type of
* documentation - is it invoked from Parser as Multi Line or Single Line or
* is it just ran as DocParser - for example in test suite
*/
final object documentation {
def reverseSectionsStackOnEOF(): Unit = logger.trace {
section.stack = section.stack.reverse
}
def reverseTagsStackOnEOF(): Unit = logger.trace {
tags.stack = tags.stack.reverse
}
def createDoc(): Unit = logger.trace {
val tags: Option[Tags] = createTags()
val synopsis: Option[Synopsis] = createSynopsis()
val body: Option[Body] = createBody()
result.doc = Some(Doc(tags, synopsis, body))
}
def createTags(): Option[Tags] = logger.trace {
tags.stack match {
case Nil => None
case x :: xs => Some(Tags(List1(x, xs)))
}
}
def createSynopsis(): Option[Synopsis] = logger.trace {
section.stack match {
case Nil => None
case x :: _ => Some(Synopsis(x))
}
}
def createBody(): Option[Body] = logger.trace {
section.stack match {
case Nil => None
case _ :: xs =>
xs match {
case Nil => None
case y :: ys => Some(Body(List1(y, ys)))
}
}
}
def onEOF(): Unit = logger.trace {
section.onEOS()
reverseSectionsStackOnEOF()
reverseTagsStackOnEOF()
createDoc()
}
def isBeginning(): Boolean = logger.trace {
result.stack.isEmpty && section.stack.isEmpty
}
}
ROOT || eof || reify { documentation.onEOF() }
}

View File

@ -0,0 +1,689 @@
package org.enso.syntax.text.spec
import org.enso.data.List1
import org.enso.flexer
import org.enso.flexer.Reader
import org.enso.flexer.State
import org.enso.flexer.automata.Pattern
import org.enso.flexer.automata.Pattern._
import org.enso.syntax.text.AST
import scala.annotation.tailrec
import scala.reflect.runtime.universe.reify
case class ParserDef() extends flexer.Parser[AST.Module] {
import ParserDef2._
final def unwrap[T](opt: Option[T]): T = opt match {
case None => throw new Error("Internal Error")
case Some(t) => t
}
/////////////
//// API ////
/////////////
override def run(input: Reader): Result[AST.Module] = {
state.begin(block.MODULE)
super.run(input)
}
///////////////////////////////////
//// Basic Char Classification ////
///////////////////////////////////
val lowerLetter: Pattern = range('a', 'z')
val upperLetter: Pattern = range('A', 'Z')
val digit: Pattern = range('0', '9')
val hex: Pattern = digit | range('a', 'f') | range('A', 'F')
val alphaNum: Pattern = digit | lowerLetter | upperLetter
val whitespace0: Pattern = ' '.many
val space: Pattern = ' '.many1
val newline: Pattern = '\n'
////////////////
//// Result ////
////////////////
override def getResult() = result.current.flatMap {
case mod: AST.Module => Some(mod)
case _ => None
}
final object result {
var current: Option[AST] = None
var stack: List[Option[AST]] = Nil
def push(): Unit = logger.trace {
logger.log(s"Pushed: $current")
stack +:= current
current = None
}
def pop(): Unit = logger.trace {
current = stack.head
stack = stack.tail
logger.log(s"New result: $current")
}
def app(fn: String => AST): Unit =
app(fn(currentMatch))
def app(ast: AST): Unit = logger.trace {
current = Some(current match {
case None => ast
case Some(r) => AST.App.Prefix(r, off.use(), ast)
})
}
def last(): Option[AST] = {
@tailrec
def go(ast: AST): AST = ast match {
case AST.App.Prefix.any(t) => go(t.arg)
case t => t
}
current.map(go)
}
}
////////////////
//// Offset ////
////////////////
final object off {
var current: Int = 0
var stack: List[Int] = Nil
def push(): Unit = logger.trace {
stack +:= current
current = 0
}
def pop(): Unit = logger.trace {
current = stack.head
stack = stack.tail
logger.log(s"New offset: $current")
}
def use(): Int = logger.trace {
val offset = current
current = 0
offset
}
def on(): Unit = on(0)
def on(shift: Int): Unit = logger.trace {
val diff = currentMatch.length + shift
current += diff
logger.log(s"lastOffset + $diff = $current")
}
}
////////////////////
//// IDENTIFIER ////
////////////////////
final object ident {
var current: Option[AST.Ident] = None
def on(cons: String => AST.Ident): Unit = logger.trace_ {
on(cons(currentMatch))
}
def on(ast: AST.Ident): Unit = logger.trace {
current = Some(ast)
state.begin(SFX_CHECK)
}
def submit(): Unit = logger.trace {
result.app(unwrap(current))
current = None
}
def onErrSfx(): Unit = logger.trace {
val ast = AST.Ident.InvalidSuffix(unwrap(current), currentMatch)
result.app(ast)
current = None
state.end()
}
def onNoErrSfx(): Unit = logger.trace {
submit()
state.end()
}
def finalizer(): Unit = logger.trace {
if (current.isDefined) submit()
}
val char: Pattern = alphaNum | '_'
val body: Pattern = char.many >> '\''.many
val _var: Pattern = lowerLetter >> body
val cons: Pattern = upperLetter >> body
val breaker: String = "^`!@#$%^&*()-=+[]{}|;:<>,./ \t\r\n\\"
val errSfx: Pattern = noneOf(breaker).many1
val SFX_CHECK = state.define("Identifier Suffix Check")
}
ROOT || ident._var || reify { ident.on(AST.Var(_)) }
ROOT || ident.cons || reify { ident.on(AST.Cons(_)) }
ROOT || "_" || reify { ident.on(AST.Blank()) }
ident.SFX_CHECK || ident.errSfx || reify { ident.onErrSfx() }
ident.SFX_CHECK || always || reify { ident.onNoErrSfx() }
//////////////////
//// Operator ////
//////////////////
final object opr {
def on(cons: String => AST.Ident): Unit = logger.trace {
on(cons(currentMatch))
}
def onNoMod(cons: String => AST.Ident): Unit = logger.trace {
onNoMod(cons(currentMatch))
}
def on(ast: AST.Ident): Unit = logger.trace {
ident.current = Some(ast)
state.begin(MOD_CHECK)
}
def onNoMod(ast: AST.Ident): Unit = logger.trace {
ident.current = Some(ast)
state.begin(SFX_CHECK)
}
def onMod(): Unit = logger.trace {
val opr = AST.Mod(unwrap(ident.current).asInstanceOf[AST.Opr].name)
ident.current = Some(opr)
}
val char: Pattern = anyOf("!$%&*+-/<>?^~|:\\")
val errChar: Pattern = char | "=" | "," | "."
val errSfx: Pattern = errChar.many1
val body: Pattern = char.many1
val opsEq: Pattern = "=" | "==" | ">=" | "<=" | "/=" | "#="
val opsDot: Pattern = "." | ".." | "..." | ","
val opsGrp: Pattern = anyOf("()[]{}")
val opsCmm: Pattern = "#" | "##"
val opsNoMod: Pattern = opsEq | opsDot | opsGrp | opsCmm
val SFX_CHECK = state.define("Operator Suffix Check")
val MOD_CHECK = state.define("Operator Modifier Check")
MOD_CHECK.parent = SFX_CHECK
}
ROOT || opr.body || reify { opr.on(AST.Opr(_)) }
ROOT || opr.opsNoMod || reify { opr.onNoMod(AST.Opr(_)) }
opr.MOD_CHECK || "=" || reify { opr.onMod() }
opr.SFX_CHECK || opr.errSfx || reify { ident.onErrSfx() }
opr.SFX_CHECK || always || reify { ident.onNoErrSfx() }
////////////////
//// NUMBER ////
////////////////
final object num {
var part1: String = ""
var part2: String = ""
def reset(): Unit = logger.trace {
part1 = ""
part2 = ""
}
def submit(): Unit = logger.trace {
val base = if (part1 == "") None else Some(part1)
result.app(AST.Number(base, part2))
reset()
}
def onDanglingBase(): Unit = logger.trace {
state.end()
result.app(AST.Number.DanglingBase(part2))
reset()
}
def onDecimal(): Unit = logger.trace {
part2 = currentMatch
state.begin(PHASE2)
}
def onExplicitBase(): Unit = logger.trace {
state.end()
part1 = part2
part2 = currentMatch.substring(1)
submit()
}
def onNoExplicitBase(): Unit = logger.trace {
state.end()
submit()
}
val decimal: Pattern = digit.many1
val PHASE2: State = state.define("Number Phase 2")
}
ROOT || num.decimal || reify { num.onDecimal() }
num.PHASE2 || "_" >> alphaNum.many1 || reify { num.onExplicitBase() }
num.PHASE2 || "_" || reify { num.onDanglingBase() }
num.PHASE2 || always || reify { num.onNoExplicitBase() }
//////////////
//// Text ////
//////////////
import AST.Text.Quote
class TextState(
var lines: List[AST.Text.LineOf[AST.Text.Segment._Fmt[AST]]],
var lineBuilder: List[AST.Text.Segment.Fmt],
val quote: Quote
)
final object text {
val Segment = AST.Text.Segment
var stack: List[TextState] = Nil
var current = new TextState(Nil, Nil, Quote.Single)
def push(): Unit = logger.trace {
stack +:= current
}
def pop(): Unit = logger.trace {
current = stack.head
stack = stack.tail
}
def submitEmpty(groupIx: State, quoteNum: Quote): Unit = logger.trace {
if (groupIx == RAW)
result.app(AST.Text.Raw(AST.Text.Body(quoteNum)))
else
result.app(AST.Text.Fmt(AST.Text.Body(quoteNum)))
}
def finishCurrent(): AST.Text = logger.trace {
onSubmitLine()
val t = current
val body = AST.Text.BodyOf(t.quote, List1(t.lines.reverse).get)
val isRaw = state.current == RAW
pop()
off.pop()
state.end()
if (isRaw)
AST.Text.Raw(body.asInstanceOf[AST.Text.BodyOf[Segment._Raw[AST]]])
else
AST.Text.Fmt(body)
}
def submit(): Unit = logger.trace {
result.app(finishCurrent())
}
def submit(segment: Segment.Fmt): Unit = logger.trace {
current.lineBuilder +:= segment
}
def submitUnclosed(): Unit = logger.trace {
result.app(AST.Text.UnclosedOf(finishCurrent()))
}
def onBegin(grp: State, quoteSize: Quote): Unit = logger.trace {
push()
off.push()
off.push()
current = new TextState(Nil, Nil, quoteSize)
state.begin(grp)
}
def submitPlainSegment(): Unit = logger.trace {
current.lineBuilder = current.lineBuilder match {
case Segment._Plain(t) :: _ =>
Segment.Plain(t + currentMatch) :: current.lineBuilder.tail
case _ => Segment.Plain(currentMatch) :: current.lineBuilder
}
}
def onQuote(quoteSize: Quote): Unit = logger.trace {
if (current.quote == Quote.Triple
&& quoteSize == Quote.Single)
submitPlainSegment()
else if (current.quote == Quote.Single
&& quoteSize == Quote.Triple) {
val groupIx = state.current
submit()
submitEmpty(groupIx, Quote.Single)
} else
submit()
}
def onEscape(code: Segment.Escape): Unit = logger.trace {
submit(Segment._Escape(code))
}
def onEscapeU16(): Unit = logger.trace {
val code = currentMatch.drop(2)
onEscape(Segment.Escape.Unicode.U16(code))
}
def onEscapeU32(): Unit = logger.trace {
val code = currentMatch.drop(2)
onEscape(Segment.Escape.Unicode.U32(code))
}
def onEscapeInt(): Unit = logger.trace {
val int = currentMatch.drop(1).toInt
onEscape(Segment.Escape.Number(int))
}
def onInvalidEscape(): Unit = logger.trace {
val str = currentMatch.drop(1)
onEscape(Segment.Escape.Invalid(str))
}
def onEscapeSlash(): Unit = logger.trace {
onEscape(Segment.Escape.Slash)
}
def onEscapeQuote(): Unit = logger.trace {
onEscape(Segment.Escape.Quote)
}
def onEscapeRawQuote(): Unit = logger.trace {
onEscape(Segment.Escape.RawQuote)
}
def onInterpolateBegin(): Unit = logger.trace {
result.push()
off.push()
state.begin(INTERPOLATE)
}
def onInterpolateEnd(): Unit = logger.trace {
if (state.isInside(INTERPOLATE)) {
state.endTill(INTERPOLATE)
submit(Segment.Expr(result.current))
result.pop()
off.pop()
state.end()
} else {
onUnrecognized()
}
}
def onEOF(): Unit = logger.trace {
submitUnclosed()
rewind()
}
def onSubmitLine(): Unit = logger.trace {
off.pop()
current.lines +:= AST.Text.LineOf(off.use(), current.lineBuilder.reverse)
current.lineBuilder = Nil
}
def onNewLine(): Unit = logger.trace {
state.end()
onSubmitLine()
off.on()
off.push()
}
val stringChar = noneOf("'`\"\\\n")
val seg = stringChar.many1
val escape_int = "\\" >> num.decimal
val escape_u16 = "\\u" >> repeat(stringChar, 0, 4)
val escape_u32 = "\\U" >> repeat(stringChar, 0, 8)
val FMT: State = state.define("Formatted Text")
val RAW: State = state.define("Raw Text")
val NEWLINE: State = state.define("Text Newline")
val INTERPOLATE: State = state.define("Interpolate")
INTERPOLATE.parent = ROOT
}
ROOT || '`' || reify { text.onInterpolateEnd() }
text.FMT || '`' || reify { text.onInterpolateBegin() }
ROOT || "'" || reify { text.onBegin(text.FMT, Quote.Single) }
ROOT || "'''" || reify { text.onBegin(text.FMT, Quote.Triple) }
text.FMT || "'" || reify { text.onQuote(Quote.Single) }
text.FMT || "'''" || reify { text.onQuote(Quote.Triple) }
text.FMT || text.seg || reify { text.submitPlainSegment() }
text.FMT || eof || reify { text.onEOF() }
text.FMT || '\n' || reify { state.begin(text.NEWLINE) }
ROOT || "\"" || reify { text.onBegin(text.RAW, Quote.Single) }
ROOT || "\"\"\"" || reify { text.onBegin(text.RAW, Quote.Triple) }
text.RAW || "\"" || reify { text.onQuote(Quote.Single) }
text.RAW || "$$$$$" || reify {}
text.RAW || "\"\"\"" || reify { text.onQuote(Quote.Triple) }
text.RAW || noneOf("\"\n") || reify { text.submitPlainSegment() }
text.RAW || eof || reify { text.onEOF() }
text.RAW || '\n' || reify { state.begin(text.NEWLINE) }
text.NEWLINE || space.opt || reify { text.onNewLine() }
AST.Text.Segment.Escape.Character.codes.foreach { code =>
import scala.reflect.runtime.universe._
val name = TermName(code.toString)
val char = q"text.Segment.Escape.Character.$name"
text.FMT || s"\\$code" || q"text.onEscape($char)"
}
AST.Text.Segment.Escape.Control.codes.foreach { code =>
import scala.reflect.runtime.universe._
val name = TermName(code.toString)
val ctrl = q"text.Segment.Escape.Control.$name"
text.FMT || s"\\$code" || q"text.onEscape($ctrl)"
}
text.FMT || text.escape_u16 || reify { text.onEscapeU16() }
text.FMT || text.escape_u32 || reify { text.onEscapeU32() }
text.FMT || text.escape_int || reify { text.onEscapeInt() }
text.FMT || "\\\\" || reify { text.onEscapeSlash() }
text.FMT || "\\'" || reify { text.onEscapeQuote() }
text.FMT || "\\\"" || reify { text.onEscapeRawQuote() }
text.FMT || ("\\" >> text.stringChar) || reify { text.onInvalidEscape() }
text.FMT || "\\" || reify { text.submitPlainSegment() }
//////////////
/// Blocks ///
//////////////
// because of bug in macroContext.eval it cannot be part of object block
class BlockState(
val isOrphan: Boolean,
var isValid: Boolean,
var indent: Int,
var emptyLines: List[Int],
var firstLine: Option[AST.Block.Line.NonEmpty],
var lines: List[AST.Block.OptLine]
)
final object block {
var stack: List[BlockState] = Nil
var emptyLines: List[Int] = Nil
var current: BlockState = new BlockState(false, true, 0, Nil, None, Nil)
def push(newIndent: Int, orphan: Boolean): Unit =
logger.trace {
stack +:= current
current =
new BlockState(orphan, true, newIndent, emptyLines.reverse, None, Nil)
emptyLines = Nil
}
def pop(): Unit = logger.trace {
current = stack.head
stack = stack.tail
}
def build(): AST.Block = logger.trace {
submitLine()
AST.Block(
current.isOrphan,
AST.Block.Continuous,
current.indent,
current.emptyLines,
unwrap(current.firstLine),
current.lines.reverse
)
}
def submit(): Unit = logger.trace {
val block = build()
result.pop()
off.pop()
pop()
val block2 = result.last() match {
case None => block
case Some(ast) =>
ast match {
case AST.Opr.any(_) =>
block.replaceType(AST.Block.Discontinuous): AST.Block
case _ => block
}
}
result.app(block2)
off.push()
logger.endGroup()
}
def submitModule(): Unit = logger.trace {
val body = current.firstLine match {
case None => current.lines.reverse
case Some(line) => line.toOptional +: current.lines.reverse
}
val line :: lines = (current.emptyLines
.map(AST.Block.Line(None, _)): List[AST.Block.OptLine]) ++ body
val module = AST.Module(line, lines)
result.current = Some(module)
logger.endGroup()
}
def submitLine(): Unit = logger.trace {
result.current match {
case None =>
case Some(r) =>
off.pop()
current.firstLine match {
case None =>
current.firstLine = Some(AST.Block.Line.Required(r, off.use()))
case Some(_) =>
current.lines +:= AST.Block.Line(result.current, off.use())
}
}
emptyLines.reverse.foreach(current.lines +:= AST.Block.OptLine(_))
emptyLines = Nil
result.current = None
}
def onEmptyLine(): Unit = logger.trace {
off.on(-1)
emptyLines +:= off.use()
}
def onModuleBegin(): Unit = logger.trace {
current.emptyLines = emptyLines.reverse
emptyLines = Nil
rewind()
off.push()
state.end()
state.begin(NEWLINE)
}
def onBegin(newIndent: Int): Unit = logger.trace {
val isOrphan = result.current.isEmpty
result.push()
push(newIndent, isOrphan)
logger.beginGroup()
}
def onEOFLine(): Unit = logger.trace {
state.end()
submitLine()
off.on()
current.lines +:= AST.Block.OptLine(off.use())
off.pop()
onEOF()
}
def onEndLine(): Unit = logger.trace {
off.push()
state.begin(NEWLINE)
}
def onNewLine(): Unit = logger.trace {
state.end()
off.on()
if (off.current == current.indent)
submitLine()
else if (off.current > current.indent)
onBegin(off.use())
else
onEnd(off.use())
state.begin(FIRSTCHAR)
}
def onEnd(newIndent: Int): Unit = logger.trace {
while (newIndent < current.indent) submit()
if (newIndent > current.indent) {
logger.log("Block with invalid indentation")
onBegin(newIndent)
current.isValid = false
} else {
off.push()
submitLine()
}
}
val MODULE = state.define("Module")
val NEWLINE = state.define("Newline")
val FIRSTCHAR = state.define("First Char")
}
ROOT || newline || reify { block.onEndLine() }
block.NEWLINE || space.opt >> newline || reify { block.onEmptyLine() }
block.NEWLINE || space.opt >> eof || reify { block.onEOFLine() }
block.NEWLINE || space.opt || reify { block.onNewLine() }
block.MODULE || space.opt >> newline || reify { block.onEmptyLine() }
block.MODULE || space.opt || reify { block.onModuleBegin() }
block.FIRSTCHAR || always || reify { state.end() }
////////////////
/// Defaults ///
////////////////
final def onUnrecognized(): Unit = logger.trace {
result.app(AST.Invalid.Unrecognized(_))
}
final def onEOF(): Unit = logger.trace {
ident.finalizer()
off.push()
block.submitLine()
block.onEnd(0)
block.submitModule()
}
ROOT || space || reify { off.on() }
ROOT || eof || reify { onEOF() }
ROOT || any || reify { onUnrecognized() }
}
object ParserDef2 {
type Result[T] = flexer.Parser.Result[T]
}

View File

@ -0,0 +1,60 @@
package org.enso.syntax
import org.enso.syntax.text.DocParser
import org.enso.syntax.text.DocParser.Result
import org.enso.syntax.text.ast.Doc
import org.scalameter.api._
import org.scalameter.execution.LocalExecutor
import org.scalameter.picklers.Implicits._
import scala.math.pow
object DocParserBench extends Bench.OfflineRegressionReport {
override def executor = new LocalExecutor(warmer, aggregator, measurer)
val range = 0
def exp(i: Int): Gen[Int] =
Gen.exponential("size")(pow(2, i - range).toInt, pow(2, i).toInt, 2)
def gen(range: Gen[Int], f: Int => String): Gen[String] =
for { i <- range } yield f(i)
val tests = List(
"formatters" -> gen(exp(14), i => "*foo bar*\n" * i),
"unclosed" -> gen(exp(14), i => "*_foobar*\n" * i),
"combined" -> gen(exp(14), i => "*_~fo0~_*\n" * i),
"normal" -> gen(exp(14), i => "test12345\n" * i),
"link" -> gen(exp(14), i => "[foo](bo)\n" * i),
"tags" -> gen(exp(14), i => "ADDED\nfoo\n" * i),
"list" -> gen(
exp(13),
i => """foo
| - A
| - B
| - C
|""".stripMargin * i
),
"list_nested" -> gen(
exp(12),
i => """foo
| - A
| - B
| * CA
| * CB
| - D
|""".stripMargin * i
),
"sections" -> gen(
exp(13),
i => "Foo\n\n!B\n\n?C \n\n>D \n\n" * i
)
)
def run(str: String): Result[Doc] = DocParser.run(str)
performance of "DocParser" in {
tests.foreach {
case (name, gen) => measure method name in (using(gen) in run)
}
}
}

View File

@ -0,0 +1,124 @@
package org.enso.syntax
import java.io.BufferedReader
import java.io.File
import java.io.FileReader
import java.io.PrintWriter
import org.enso.syntax.text.AST
import org.enso.flexer
import org.enso.syntax.text.Parser
import org.scalameter.api._
import org.enso.syntax.text.ast.DSL._
import org.scalameter.execution.LocalExecutor
import org.scalameter.picklers.Implicits._
import scala.math.pow
object ParserBenchmark extends Bench.OfflineRegressionReport {
override def executor = new LocalExecutor(warmer, aggregator, measurer)
val range = 0
def exp(i: Int) =
Gen.exponential("size")(pow(2, i - range).toInt, pow(2, i).toInt, 2)
def gen(range: Gen[Int], f: Int => String): Gen[String] =
for { i <- range } yield f(i)
def gen(range: Gen[Int]): Gen[Int] =
for { i <- range } yield i
def patternMatching0(i: Int): Unit = {
val n1 = "foo" + i.toString
val n2 = n1 + "!"
var v: AST = AST.Var(n1)
for { j <- 0 to i } {
if (j % 2 == 0) v = n2 $_ v
else v = n1 $__ v
}
for { _ <- 0 to i } v = v match {
case AST.Var(_) => v
case AST.App.Prefix(_, arg) => arg
}
}
def patternMatching1(i: Int): Unit = {
val n1 = "foo" + i.toString
val n2 = n1 + "!"
var v: AST = AST.Var(n1)
for { _ <- 0 to i } v = n2 $_ v
for { _ <- 0 to i } v = v match {
case AST.App.Prefix(_, arg) => arg
}
}
val parserInput = List(
"text" -> gen(exp(10), i => "'ab #$ 60'" * i),
"number" -> gen(exp(10), i => "123456789 " * i),
"codeBlock" -> gen(exp(10), i => "foo0\nBar2\n" * i),
"codeBlock with macros" -> gen(exp(10), i => "a = x\nb++\n" * i),
"allRules" -> gen(
exp(7),
i => """
| string = "ABCD"
| number = 123_4.67
| fib : Int -> Int
| fib n = fib n-1 + fib n-2
|""".stripMargin * i
)
)
val filename = "Syntax/specialization/target/bench-input.txt"
if (!new File(filename).exists()) {
val file = new PrintWriter(new File(filename))
for (_ <- 1 to 10000) {
file.print("rewuipf\uD800\uDF1Edsahkjlzcvxm,/\uD800\uDF1E.m,';k]o1&**&$")
file.print("6!@#&*()+|{}QWERYUI\uD800\uDF1EOIO\uD800\uDF1E}ASFDJKM>>?\n")
}
}
def runReader() = new flexer.Reader(new File(filename)).toString()
def runReaderUTF() = new flexer.ReaderUTF(new File(filename)).toString()
def runBufferedReader() = {
val reader = new BufferedReader(new FileReader(filename))
val builder = new java.lang.StringBuilder()
var char = 0
while ({ char = reader.read(); char != -1 }) {
builder.append(char.toChar)
if (char.toChar.isHighSurrogate)
builder.append(reader.read().toChar)
}
builder.toString
}
///////////////////////////////////////
//// Tests ////////////////////////////
///////////////////////////////////////
val exp18 = gen(exp(18))
performance of "pattern match" in {
measure method "Var and App" in (using(exp18) in patternMatching0)
measure method "Var" in (using(exp18) in patternMatching1)
}
val dummy = gen(exp(0))
performance of "reader" in {
measure method "Buffered" in { using(dummy) in (_ => runBufferedReader()) }
measure method "FlexerUTF" in { using(dummy) in (_ => runReaderUTF()) }
measure method "Flexer" in { using(dummy) in (_ => runReader()) }
}
def run(str: String) = Parser().run(new flexer.Reader(str))
performance of "parser" in {
parserInput.foreach {
case (name, gen) => measure method name in (using(gen) in run)
}
}
}

View File

@ -0,0 +1,51 @@
package org.enso.syntax.text
import java.io.File
import java.io.PrintWriter
import org.enso.flexer
import org.enso.flexer.Reader
import org.enso.syntax.text.ast.Doc
import org.enso.syntax.text.spec.DocParserDef
import scalatags.Text.TypedTag
import scala.util.Random
////////////////////////////////////////////////////////////////////////////////
//// DocParser /////////////////////////////////////////////////////////////////
////////////////////////////////////////////////////////////////////////////////
class DocParser {
import DocParser._
private val engine = newEngine()
def parserRun(input: String) = run(input) match {
case flexer.Parser.Result(_, flexer.Parser.Result.Success(v)) =>
println(v.renderHTML("style.css"))
val path =
"syntax/specialization/src/main/scala/org/enso/syntax/text/DocParserHTMLOut/"
saveHTMLCodeToLocalFile(path, v.renderHTML("style.css"))
v
case _ => Doc()
}
def run(input: String): Result[Doc] = engine.run(new Reader(input))
}
object DocParser {
type Result[T] = flexer.Parser.Result[T]
private val newEngine = flexer.Parser.compile(DocParserDef())
def parserRun(input: String) = new DocParser().parserRun(input)
def run(input: String): Result[Doc] = new DocParser().run(input)
def saveHTMLCodeToLocalFile(path: String, code: TypedTag[String]): Unit = {
val writer = new PrintWriter(
new File(
path + Random.alphanumeric.take(8).mkString("") + ".html"
)
)
writer.write(code.toString)
writer.close()
}
}

View File

@ -0,0 +1,333 @@
/*//////////////////////////////////////////////////////////////////////////////
//// UNDER CONSTRUCTION ////
//////////////////////////////////////////////////////////////////////////////*/
/*///////////
//// DOM ////
///////////*/
body {
-webkit-font-smoothing: antialiased;
font-style: normal;
word-wrap: break-word;
font-size: 17px;
line-height: 1.52947;
font-weight: 400;
letter-spacing: -0.021em;
font-family: "SF Pro Text", "SF Pro Icons", "Helvetica Neue", "Helvetica",
"Arial", sans-serif;
background-color: white;
color: #333333;
font-style: normal;
}
p {
display: block;
margin-block-start: 1em;
margin-block-end: 1em;
margin-inline-start: 0px;
margin-inline-end: 0px;
}
a:hover {
color: #35a5ff !important;
text-decoration: none;
}
a {
color: #0070c9;
background-color: transparent;
text-decoration: none;
display: inline-block;
transition: all 0.3s ease;
}
img {
display: block;
}
code {
color: #0070c9;
background-color: transparent;
font-size: inherit;
font-family: monospace;
line-height: inherit;
display: inline-block;
white-space: pre-wrap;
}
button {
display: inline-block;
padding: 8px 30px;
margin: 10px 0;
outline: none;
background-color: #777;
border: none;
color: #fafafa;
border-radius: 5px;
box-shadow: 5px 0 #555;
font-size: 13px;
vertical-align: top;
transition: all 0.3s ease;
}
button:hover {
background-color: #333;
}
b {
font-weight: 600;
}
h1 {
font-size: 34px;
line-height: 1.08824;
font-weight: 500;
letter-spacing: 0.01em;
}
h2 {
font-size: 28px;
line-height: 1.1073;
font-weight: 500;
letter-spacing: 0.012em;
}
.Body h2 {
margin: 0;
margin-top: 0.65rem;
}
/*///////////////////
//// Invalid AST ////
///////////////////*/
.creator .Unclosed,
.creator .invalidIndent,
.creator .invalidLink{
display: inline;
color: orangered;
}
.Unclosed,
.invalidIndent,
.invalidLink {
display: inline;
}
/*//////////////
//// Header ////
//////////////*/
.Header {
font-size: 19px;
font-weight: 500;
}
.Important .Header,
.Info .Header,
.Example .Header {
margin-bottom: 0.7em;
font-weight: 600;
letter-spacing: -0.021em;
line-height: 17px;
font-synthesis: none;
font-family: "SF Pro Text", "SF Pro Icons", "Helvetica Neue", "Helvetica",
"Arial", sans-serif;
}
/*////////////
//// Tags ////
////////////*/
.Doc .Tags {
margin-left: auto;
margin-right: auto;
margin-bottom: 20px;
padding: 15px 0;
text-align: center;
background-color: #fafafa;
}
.Doc .ExtForTagDetails {
margin: 0 3px;
color: #999999;
}
.Doc .Tags .DEPRECATED,
.Doc .Tags .MODIFIED,
.Doc .Tags .ADDED,
.Doc .Tags .UPCOMING,
.Doc .Tags .REMOVED,
.Doc .Tags .UNRECOGNIZED {
line-height: 1.5;
font-weight: 400;
border-radius: 3px;
font-size: 12px;
letter-spacing: -0.021em;
border: 1px solid;
display: inline-flex;
padding: 3px 15px;
margin: 2px;
white-space: nowrap;
background: transparent;
}
.Doc .Tags .DEPRECATED {
border-color: #c35400;
color: #c35400;
}
.Doc .Tags .MODIFIED {
border-color: #8A82CF;
color: #8A82CF;
}
.Doc .Tags .ADDED {
border-color: #79A129;
color: #79A129;
}
.Doc .Tags .UPCOMING,
.Doc .Tags .REMOVED,
.Doc .Tags .UNRECOGNIZED {
border-color: #888888;
color: #666666;
}
.creator .Doc .Tags .UNRECOGNIZED {
border: 2px solid;
color: orangered;
}
/*////////////////
//// Sections ////
////////////////*/
.Raw,
.Important,
.Info,
.CodeBlock,
.Example {
margin-top: 0;
margin-left: auto;
margin-right: auto;
position: relative;
}
.Body .Raw {
padding: 0.75em 0;
margin-bottom: 0.6rem;
font-size: 17px;
line-height: 1.52947;
font-weight: 400;
letter-spacing: -0.021em;
font-family: "SF Pro Text", "SF Pro Icons", "Helvetica Neue", "Helvetica",
"Arial", sans-serif;
background-color: white;
color: #333333;
font-style: normal;
}
.Synopsis .Raw {
margin-bottom: 2rem;
}
.Important,
.Info,
.CodeBlock,
.Example {
font-size: 17;
padding-top: 0.94118rem;
padding-bottom: 0.94118rem;
padding-left: 18px;
padding-right: 10px;
border: 1px solid #8a82cf;
border-left: 6px solid #8a82cf;
border-radius: 6px;
margin: 0.7em 0;
}
.Important {
border-right: 0px;
border-top: 0px;
border-bottom: 0px;
border-color: #fee450;
background-color: #fbf8e8;
}
.Info {
border-color: 69c5e8;
}
.Example {
border-color: #8a82cf;
}
.CodeBlock {
border-color: #7cd58b;
margin: 10px 20px;
display: none;
}
/*/////////////////////////
//// Synopsis & Detail ////
/////////////////////////*/
.Synopsis,
.Body {
margin: 0 auto;
padding: 5px;
margin-bottom: 20px;
text-align: left;
}
.Synopsis {
font-size: 20px;
line-height: 1.5;
font-weight: 300;
letter-spacing: 0.017em;
border-bottom: 1px solid #d6d6d6
}
.Doc {
margin: 0;
width: 100%;
background-color: #ffffff;
}
@media (min-width: 300px) {
.Synopsis,
.Body {
width: 380px
}
}
@media (min-width: 500px) {
.Synopsis,
.Body {
width: 440px
}
}
@media (min-width: 600px) {
.Synopsis,
.Body {
width: 490px
}
}
@media (min-width: 900px) {
.Synopsis,
.Body {
width: 670px
}
}
@media (min-width: 1300px) {
.Synopsis,
.Body {
width: 780px
}
}

View File

@ -0,0 +1,327 @@
package org.enso.syntax.text
import org.enso.flexer
import org.enso.flexer.Reader
import org.enso.syntax.text.ast.meta.Builtin
import org.enso.syntax.text.ast.meta.Pattern
import org.enso.syntax.text.ast.opr.Prec
import org.enso.syntax.text.prec.Distance
import org.enso.syntax.text.prec.Macro
import org.enso.syntax.text.prec.Operator
import org.enso.syntax.text.spec.ParserDef
import scala.annotation.tailrec
////////////////////////////////
class InternalError(reason: String, cause: Throwable = None.orNull)
extends Exception(s"Internal error $reason", cause)
////////////////
//// Parser ////
////////////////
/** This is the main Parser class.
*
* ==The Macro System==
*
* The parser bases on a very sophisticated Macro mechanism, allowing users for
* unparalleled control and flexibility. The macro systems allows advanced
* users to create new syntax or new domain-specific languages. In a similar
* fashion to Lisp, Enso macros can freely transform the syntactic structure of
* the program. In short, anything that Enso can do to a data structure, Enso
* macros can do to code. In contrast, in most other languages, the parser's
* output is purely internal to the language implementation and cannot be
* manipulated by the programmer.
*
* Macro resolution steps:
*
* 1. Parser is executed by using the [[Parser#run]] function. It reads source
* code and outputs a token stream [[AST.Stream]]. The token stream contains a
* very narrow range of possible elements: [[AST.Blank]], [[AST.Var]],
* [[AST.Cons]], [[AST.Opr]], [[AST.Number]], [[AST.Text]], and [[AST.Block]],
* which contains lines of streams of these elements. Every other AST structure
* is build by the macro system. Please note that the stream in this step is
* encoded by using [[AST.App]] on subsequent elements.
*
* 2. Parser prepares [[Builtin.registry]] containing predefined set of macro
* [[AST.Macro.Definition]], which define such constructs as comments, parensed
* expressions, imports, new data definitions, if-then-else mixfix functions,
* or even foreign languages support. During this step parser will be also
* asking interpreter to fill the registry with definitions from other modules.
* Each [[AST.Macro.Definition]] contains macro segment descriptions and a
* finalizer, a function transforming matched tokens to final AST. Finalizer is
* used only if all macro segments were matched correctly.
*
* 3. The token stream is partitioned according to registered macros segments.
* Each macro contains multiple segments. A segment contains of an identifier,
* like "if" or "then" and a macro [[Pattern]]. Patterns are not used in this
* step. The AST stream is partitioned solely by segment identifiers. Macros
* can overlap, for example, [[Builtin.registry]] contains both "if-then" and
* "if-then-else" macro. When it is impossible to decide which macro to choose,
* like for the input "(if a) b", [[AST.Macro.Ambiguous]] is returned.
* Otherwise, each macro segment is matched against corresponding [[Pattern]]
* and [[AST.Macro.Match]] is returned and stored back in the [[AST.Stream]].
* Please note, that even if pattern matching fails, the [[AST.Macro.Match]]
* will be the result. It will contain information about failed patterns.
*
* 4. The segment [[Pattern]] is similar to regular expression. It contains
* around 10 building blocks, such as [[Pattern.Nothing]], which does not
* consume any input, or [[Pattern.Tok]], allowing matching a specific token,
* like the "if" keyword. The result, [[Pattern.Match]] is stored in
* [[AST.Macro.Match]]. The [[Pattern.Match.Err]] is used to mark unsuccessful
* pattern match fragments, while the [[Pattern.Match.Tok]] is used to provide
* additional help messages to the end-user. Please note that it is impossible
* for the pattern match mechanism to break even on malformed user
* [[AST.Macro.Definition]]. Each definition contains a pre-process step inside
* of [[AST.Macro.Definition]] constructor, which modifies the user provided
* rules with checks if the pattern succeed and in case the pattern was used
* between segments, if it consumed all tokens. In case either of validators
* fail, all tokens are consumed and marked as an invalid match.
*
* 5. A very special pattern is the [[Pattern.Build]] construction, which tells
* the pattern match mechanism that it should build a single [[AST]] expression
* out of matched tokens. For example, a pattern
* [[Pattern.Build(Pattern.Cls[AST.Opr])]] will match an operator token and
* build a side-section AST from it. The [[Pattern.Build]] blocks are resolved
* during the pattern match step. After this step is finished and
* [[AST.Macro.Match]] or [[AST.Macro.Ambiguous]] is stored back in the
* [[AST.Stream]], nothing more happens, parsing is done! It is important to
* note, that there is a special module parsing macro, which runs
* [[Pattern.Build]] on every line.
*
*
*
* ==Pattern Build Mechanism==
*
* The resolution of [[Pattern.Build]] is as interesting as the macro system.
* It contains of the following stages:
*
* 1. First, the [[AST.Stream]] is partitioned byt the [[Distance]] processor
* according to the spacing information. All non-spaced tokens are grouped
* together and processed first. After their processing is done and each group
* will be transformed to a single [[AST]], it is put back to the original
* [[AST.Stream]] and the whole stream is processed the same way (described in
* the following points).
*
* 2. Each token of a chosen stream is then processed by the
* [[https://en.wikipedia.org/wiki/Shunting-yard_algorithm Shunting-yard
* algorithm]]. Basically, it re-shuffles the [[AST]] stream to combination of
* [[AST.App]], [[AST.App.Left]], [[AST.App.Right]], and [[AST.App.Sides]],
* according to the operator precedence. Please note that the precedence of
* user defined operators is fixed in Enso and depends on the shape of the
* operator. For example, all "arrows" like "<-", "<-<", or "<=<", have the
* same precedence. The associativity is inferred by the operator direction,
* where both "=" and "," operators are considered right-associative. See
* [[Operator]] and [[Prec]] for more information.
*
*
*
* ==Finalizers==
*
* A careful reader will notice that there was no description of how finalizers
* (mentioned in the first section) are used. Finalizers are user-provided AST
* transformations which are applied to valid AST Macro matches. After
* finalizer is applied, the spacing information might be lost.
*
* ==Space-unaware AST===
*
* That's because they are NOT used during parsing. A very important design
* decision is that Enso AST contains all information allowing for printing the
* code back from the AST, while keeping all whitespaces as they were before
* parsing. This is why each space-aware AST, like [[AST.App]] records all
* positional information. For convenient usage, all space-aware [[AST]]
* definitions end with "Of", like [[AST.App.PrefixOf]] and have a counterpart
* without "Of" allowing for pattern matching without thinking about the
* spacing information. Because macro system is end-user extensible, we cannot
* assume that the end-user will care about recording valid spacing when
* transforming [[AST]] to another form. That's why there are also
* space-unaware [[AST]] structures, which are handy to work with by automated
* tools like the interpreter, while all the spacing information is stored only
* in the basic set of tokens and [[AST.Macro]] tokens. Each AST node has a
* [[AST.map]] function for mapping over sub-nodes, which allows easy building
* of AST traversals. The [[Parser#resolveMacros]] is such a traversal, which
* applies [[AST.Macro.Definition.Resolver]] to each [[AST.Macro.Match]] found
* in the AST, while loosing a lot of positional information.
*/
class Parser {
import Parser._
private val engine = newEngine()
def run(input: Reader): AST.Module =
run(input, Map())
def run(input: Reader, idMap: Map[(Int, Int), AST.ID]): AST.Module =
engine.run(input).map(Macro.run) match {
case flexer.Parser.Result(_, flexer.Parser.Result.Success(mod)) =>
val mod2 = annotateModule(idMap, mod)
resolveMacros(mod2).asInstanceOf[AST.Module]
case _ => throw ParsingFailed
}
def annotateModule(
idMap: Map[(Int, Int), AST.ID],
mod: AST.Module
): AST.Module = mod.traverseWithOff { (off, ast) =>
idMap.get((off, ast.repr.span)) match {
case Some(id) => ast.setID(id)
case None =>
ast match {
case AST.Macro.Match.any(_) => ast.withNewID()
case _ => ast
}
}
}
/** Although this function does not use any Parser-specific API now, it will
* use such in the future when the interpreter will provide information about
* defined macros other than [[Builtin.registry]].
*/
def resolveMacros(ast: AST): AST =
ast match {
case AST.Macro.Match.any(ast) =>
val resolvedAST = ast.map(resolveMacros)
Builtin.registry.get(resolvedAST.path) match {
case None => throw MissingMacroDefinition
case Some(spec) =>
val id = resolvedAST.id.getOrElse(throw new Error(s"Missing ID"))
val segments = resolvedAST.segs.toList().map(_.el)
val ctx = AST.Macro.Resolver.Context(resolvedAST.pfx, segments, id)
resolvedAST.copy(shape = resolvedAST.shape.copy[AST](resolved = {
resolveMacros(spec.resolver(ctx))
}))
}
case _ => ast.map(resolveMacros)
}
/** Drops macros metadata keeping only resolved macros in the AST.
* WARNING: this transformation drops the information about AST spacing.
*/
def dropMacroMeta(ast: AST.Module): AST.Module = {
def go: AST => AST = {
case AST.Macro.Match.any(t) => go(t.resolved)
case t => t.map(go)
}
ast.map(go)
}
}
object Parser {
def apply(): Parser = new Parser()
private val newEngine = flexer.Parser.compile(ParserDef())
//// Exceptions ////
case object ParsingFailed extends ParserError("parsing failed")
case object MissingMacroDefinition
extends ParserError("macro definition not found")
class ParserError(reason: String, cause: Throwable = None.orNull)
extends InternalError(s"in parser $reason", cause)
}
////////////////////////////////////////////////////////////////////////////////
//// Interactive Testing Utilities /////////////////////////////////////////////
////////////////////////////////////////////////////////////////////////////////
//////////////
//// Main ////
//////////////
object Main extends App {
def pretty(str: String): String = {
def checkClosing(in: List[Char]): Int = {
@tailrec
def go(i: Int, rest: Int, in: List[Char], bias: Int): Int =
(rest, bias, in) match {
case (0, _, _) => 0
case (_, 0, _) => i
case (_, _, Nil) => i
case (_, _, s :: ss) =>
s match {
case '(' => go(i + 1, rest - 1, ss, bias - 1)
case ')' => go(i + 1, rest - 1, ss, bias + 1)
case _ => go(i + 1, rest - 1, ss, bias)
}
}
go(0, 10, in, -1)
}
@tailrec
def go(ind: Int, in: List[Char], out: List[String]): List[String] = {
def newline(i: Int) = "\n" + " " * i * 2
in match {
case Nil => out
case s :: ss =>
val s2 = s.toString
s match {
case '(' =>
checkClosing(ss) match {
case 0 => go(ind + 1, ss, newline(ind + 1) :: s2 :: out)
case i =>
go(
ind,
ss.drop(i),
ss.take(i).mkString("") :: s2 :: out
)
}
case ')' => go(ind - 1, ss, s2 :: newline(ind - 1) :: out)
case ',' => go(ind, ss, newline(ind) :: s2 :: out)
case _ => go(ind, ss, s2 :: out)
}
}
}
go(0, str.toList, List()).reverse.mkString("")
}
println("--- START ---")
val parser = new Parser()
val in_def_maybe =
"""## Foo bar baz
| bax
|def Maybe a
| ## test
| def Just val:a
| def Nothing
""".stripMargin
val in_arr1 = "a = b -> c d"
val in3 = "(a) b = c"
val in4 = "if a then (b)"
val in2 = "(a) b = c]"
val inp = "a (b (c)) x"
println("--- PARSING ---")
val mod = parser.run(
new Reader(inp),
Map()
)
println(pretty(mod.toString))
println("=========================")
println(pretty(parser.dropMacroMeta(mod).toString))
val rmod = parser.resolveMacros(mod)
if (mod != rmod) {
println("\n---\n")
println(pretty(rmod.toString))
}
println("------")
println(mod.show() == inp)
println("------")
println(mod.show())
println("------")
println()
AST.main()
}

View File

@ -0,0 +1,804 @@
package org.enso.syntax.text
import org.enso.syntax.text.ast.Doc
import org.enso.syntax.text.ast.Doc._
import org.enso.syntax.text.ast.Doc.Elem._
import org.enso.Logger
import org.enso.flexer.Parser.Result
import org.scalatest.FlatSpec
import org.scalatest.Matchers
import org.scalatest.Assertion
//class DocParserTests extends FlatSpec with Matchers {
// val logger = new Logger()
//
// def assertExpr(input: String, result: Doc): Assertion = {
// val output = DocParser.run(input)
// output match {
// case Result(_, Result.Success(value)) =>
// assert(value == result)
// assert(value.show() == input)
// case _ =>
// fail(s"Parsing documentation failed, consumed ${output.offset} chars")
// }
// }
//
// implicit class TestString(input: String) {
// def parseDocumentation(str: String): String = {
// val escape = (str: String) => str.replace("\n", "\\n")
// s"parse `${escape(str)}`"
// }
//
// private val testBase = it should parseDocumentation(input)
//
// def ?=(out: Doc): Unit = testBase in {
// assertExpr(input, out)
// }
// }
//
// //////////////////////////////////////////////////////////////////////////////
// //// Documentation Parser Test Suite /////
// //////////////////////////////////////////////////////////////////////////////
//
// //////////////////////////////////////////////////////////////////////////////
// //// Formatters //////////////////////////////////////////////////////////////
// //////////////////////////////////////////////////////////////////////////////
//
// "*Foo*" ?= Doc(Synopsis(Section.Raw(Formatter(Formatter.Bold, "Foo"))))
// "_Foo_" ?= Doc(Synopsis(Section.Raw(Formatter(Formatter.Italic, "Foo"))))
// "~Foo~" ?= Doc(
// Synopsis(Section.Raw(Formatter(Formatter.Strikeout, "Foo")))
// )
// "`Foo`" ?= Doc(Synopsis(Section.Raw(CodeBlock.Inline("Foo"))))
// "~*Foo*~" ?= Doc(
// Synopsis(
// Section.Raw(
// Formatter(Formatter.Strikeout, Formatter(Formatter.Bold, "Foo"))
// )
// )
// )
// "~_Foo_~" ?= Doc(
// Synopsis(
// Section.Raw(
// Formatter(Formatter.Strikeout, Formatter(Formatter.Italic, "Foo"))
// )
// )
// )
// "_~Foo~_" ?= Doc(
// Synopsis(
// Section.Raw(
// Formatter(Formatter.Italic, Formatter(Formatter.Strikeout, "Foo"))
// )
// )
// )
// "_*Foo*_" ?= Doc(
// Synopsis(
// Section.Raw(
// Formatter(Formatter.Italic, Formatter(Formatter.Bold, "Foo"))
// )
// )
// )
// "*_Foo_*" ?= Doc(
// Synopsis(
// Section.Raw(
// Formatter(Formatter.Bold, Formatter(Formatter.Italic, "Foo"))
// )
// )
// )
// "*~Foo~*" ?= Doc(
// Synopsis(
// Section.Raw(
// Formatter(Formatter.Bold, Formatter(Formatter.Strikeout, "Foo"))
// )
// )
// )
// "_~*Foo*~_" ?= Doc(
// Synopsis(
// Section.Raw(
// Formatter(
// Formatter.Italic,
// Formatter(Formatter.Strikeout, Formatter(Formatter.Bold, "Foo"))
// )
// )
// )
// )
// "`import foo`" ?= Doc(
// Synopsis(
// Section.Raw(CodeBlock.Inline("import foo"))
// )
// )
//
// //////////////////////////////////////////////////////////////////////////////
// //// Unclosed formatters /////////////////////////////////////////////////////
// //////////////////////////////////////////////////////////////////////////////
//
// "_*Foo*" ?= Doc(
// Synopsis(
// Section.Raw(
// Formatter.Unclosed(Formatter.Italic, Formatter(Formatter.Bold, "Foo"))
// )
// )
// )
// "~*Foo*" ?= Doc(
// Synopsis(
// Section.Raw(
// Formatter
// .Unclosed(Formatter.Strikeout, Formatter(Formatter.Bold, "Foo"))
// )
// )
// )
// "***Foo" ?= Doc(
// Synopsis(
// Section.Raw(
// Formatter(Formatter.Bold),
// Formatter.Unclosed(Formatter.Bold, "Foo")
// )
// )
// )
// "*_Foo_" ?= Doc(
// Synopsis(
// Section.Raw(
// Formatter.Unclosed(Formatter.Bold, Formatter(Formatter.Italic, "Foo"))
// )
// )
// )
// "~_Foo_" ?= Doc(
// Synopsis(
// Section.Raw(
// Formatter
// .Unclosed(Formatter.Strikeout, Formatter(Formatter.Italic, "Foo"))
// )
// )
// )
// "___Foo" ?= Doc(
// Synopsis(
// Section.Raw(
// Formatter(Formatter.Italic),
// Formatter.Unclosed(Formatter.Italic, "Foo")
// )
// )
// )
// "*~Foo~" ?= Doc(
// Synopsis(
// Section.Raw(
// Formatter
// .Unclosed(Formatter.Bold, Formatter(Formatter.Strikeout, "Foo"))
// )
// )
// )
// "_~Foo~" ?= Doc(
// Synopsis(
// Section.Raw(
// Formatter
// .Unclosed(Formatter.Italic, Formatter(Formatter.Strikeout, "Foo"))
// )
// )
// )
// "~~~Foo" ?= Doc(
// Synopsis(
// Section.Raw(
// Formatter(Formatter.Strikeout),
// Formatter.Unclosed(Formatter.Strikeout, "Foo")
// )
// )
// )
// " foo *bar* _baz *bo*_" ?= Doc(
// Synopsis(
// Section.Raw(
// 1,
// "foo ",
// Formatter(Formatter.Bold, "bar"),
// " ",
// Formatter(Formatter.Italic, "baz ", Formatter(Formatter.Bold, "bo"))
// )
// )
// )
// """foo *bar
// |*""".stripMargin ?= Doc(
// Synopsis(Section.Raw("foo ", Formatter(Formatter.Bold, "bar", Newline)))
// )
//
// """foo _foo
// |_foo2""".stripMargin ?= Doc(
// Synopsis(
// Section
// .Raw("foo ", Formatter(Formatter.Italic, "foo", Newline), "foo2")
// )
// )
//
// """foo *foo
// |*foo2""".stripMargin ?= Doc(
// Synopsis(
// Section
// .Raw("foo ", Formatter(Formatter.Bold, "foo", Newline), "foo2")
// )
// )
//
// """foo ~foo
// |~foo2""".stripMargin ?= Doc(
// Synopsis(
// Section
// .Raw("foo ", Formatter(Formatter.Strikeout, "foo", Newline), "foo2")
// )
// )
//
// //////////////////////////////////////////////////////////////////////////////
// //// Segments ////////////////////////////////////////////////////////////////
// //////////////////////////////////////////////////////////////////////////////
//
// "!Important" ?= Doc(
// Synopsis(
// Section.Marked(Section.Marked.Important, Section.Header("Important"))
// )
// )
// " ! Important" ?= Doc(
// Synopsis(
// Section
// .Marked(1, 1, Section.Marked.Important, Section.Header("Important"))
// )
// )
// " ! Important" ?= Doc(
// Synopsis(
// Section
// .Marked(3, 1, Section.Marked.Important, Section.Header("Important"))
// )
// )
// " ! Important" ?= Doc(
// Synopsis(
// Section
// .Marked(1, 4, Section.Marked.Important, Section.Header("Important"))
// )
// )
// "?Info" ?= Doc(
// Synopsis(Section.Marked(Section.Marked.Info, Section.Header("Info")))
// )
// ">Example" ?= Doc(
// Synopsis(
// Section.Marked(Section.Marked.Example, Section.Header("Example"))
// )
// )
// """?Info
// |
// |!Important""".stripMargin ?= Doc(
// Synopsis(
// Section.Marked(Section.Marked.Info, Section.Header("Info"), Newline)
// ),
// Body(
// Section.Marked(Section.Marked.Important, Section.Header("Important"))
// )
// )
// """?Info
// |
// |!Important
// |
// |>Example""".stripMargin ?= Doc(
// Synopsis(
// Section.Marked(Section.Marked.Info, Section.Header("Info"), Newline)
// ),
// Body(
// Section.Marked(
// Section.Marked.Important,
// Section.Header("Important"),
// Newline
// ),
// Section.Marked(Section.Marked.Example, Section.Header("Example"))
// )
// )
// """Foo *Foo* ~*Bar~ `foo bar baz bo`
// |
// |
// |Hello Section
// |
// |!important
// |
// |?info
// |
// |>Example""".stripMargin ?= Doc(
// Synopsis(
// Section.Raw(
// "Foo ",
// Formatter(Formatter.Bold, "Foo"),
// " ",
// Formatter(
// Formatter.Strikeout,
// Formatter.Unclosed(Formatter.Bold, "Bar")
// ),
// " ",
// CodeBlock.Inline("foo bar baz bo"),
// Newline
// )
// ),
// Body(
// Section.Raw(Section.Header("Hello Section"), Newline),
// Section
// .Marked(
// Section.Marked.Important,
// Section.Header("important"),
// Newline
// ),
// Section.Marked(Section.Marked.Info, Section.Header("info"), Newline),
// Section.Marked(Section.Marked.Example, Section.Header("Example"))
// )
// )
//
// //////////////////////////////////////////////////////////////////////////////
// //// Lists ///////////////////////////////////////////////////////////////////
// //////////////////////////////////////////////////////////////////////////////
//
// "ul:\n - Foo\n - Bar" ?= Doc(
// Synopsis(
// Section.Raw(
// "ul:",
// Newline,
// List(2, List.Unordered, " Foo", " Bar")
// )
// )
// )
// "ol:\n * Foo\n * Bar" ?= Doc(
// Synopsis(
// Section.Raw(
// "ol:",
// Newline,
// List(2, List.Ordered, " Foo", " Bar")
// )
// )
// )
// """List
// | - First unordered item
// | - Second unordered item
// | * First ordered sub item
// | * Second ordered sub item
// | - Third unordered item""".stripMargin ?= Doc(
// Synopsis(
// Section.Raw(
// "List",
// Newline,
// List(
// 2,
// List.Unordered,
// " First unordered item",
// " Second unordered item",
// List(
// 4,
// List.Ordered,
// " First ordered sub item",
// " Second ordered sub item"
// ),
// " Third unordered item"
// )
// )
// )
// )
// """List
// | - First unordered item
// | - Second unordered item
// | * First ordered sub item
// | * Second ordered sub item
// | - Third unordered item""".stripMargin ?= Doc(
// Synopsis(
// Section.Raw(
// "List",
// Newline,
// List(
// 2,
// List.Unordered,
// " First unordered item",
// " Second unordered item",
// List(
// 4,
// List.Ordered,
// " First ordered sub item",
// " Second ordered sub item"
// ),
// " Third unordered item"
// )
// )
// )
// )
// """List
// | - First unordered item
// | - Second unordered item
// | * First ordered sub item
// | * Second ordered sub item
// | - Third unordered item
// | * First ordered sub item
// | * Second ordered sub item
// | - First unordered sub item
// | - Second unordered sub item
// | * Third ordered sub item
// | - Fourth unordered item""".stripMargin ?= Doc(
// Synopsis(
// Section.Raw(
// "List",
// Newline,
// List(
// 2,
// List.Unordered,
// " First unordered item",
// " Second unordered item",
// List(
// 4,
// List.Ordered,
// " First ordered sub item",
// " Second ordered sub item"
// ),
// " Third unordered item",
// List(
// 4,
// List.Ordered,
// " First ordered sub item",
// " Second ordered sub item",
// List(
// 6,
// List.Unordered,
// " First unordered sub item",
// " Second unordered sub item"
// ),
// " Third ordered sub item"
// ),
// " Fourth unordered item"
// )
// )
// )
// )
//
// //////////////////////////////////////////////////////////////////////////////
// //// Wrong indent ////////////////////////////////////////////////////////////
// //////////////////////////////////////////////////////////////////////////////
//
// """List
// | - First unordered item
// | - Second unordered item
// | * First ordered sub item
// | * Second ordered sub item
// | - Third unordered item
// | * First ordered sub item
// | * Second ordered sub item
// | - First unordered sub item
// | - Second unordered sub item
// | * Third ordered sub item
// | * Wrong Indent Item
// | - Fourth unordered item""".stripMargin ?= Doc(
// Synopsis(
// Section.Raw(
// "List",
// Newline,
// List(
// 2,
// List.Unordered,
// " First unordered item",
// " Second unordered item",
// List(
// 4,
// List.Ordered,
// " First ordered sub item",
// " Second ordered sub item"
// ),
// " Third unordered item",
// List(
// 4,
// List.Ordered,
// " First ordered sub item",
// " Second ordered sub item",
// List(
// 6,
// List.Unordered,
// " First unordered sub item",
// " Second unordered sub item"
// ),
// " Third ordered sub item",
// List.Indent.Invalid(3, List.Ordered, " Wrong Indent Item")
// ),
// " Fourth unordered item"
// )
// )
// )
// )
//
// //////////////////////////////////////////////////////////////////////////////
// //// Links ///////////////////////////////////////////////////////////////////
// //////////////////////////////////////////////////////////////////////////////
//
// "[Hello](Http://Google.com)" ?= Doc(
// Synopsis(
// Section.Raw(
// Link.URL(
// "Hello",
// "Http://Google.com"
// )
// )
// )
// )
// "![Media](http://foo.com)" ?= Doc(
// Synopsis(
// Section.Raw(
// Link.Image(
// "Media",
// "http://foo.com"
// )
// )
// )
// )
// "![foo)" ?= Doc(
// Synopsis(
// Section.Raw(
// Link.Invalid(
// "![foo)"
// )
// )
// )
// )
// "[foo)" ?= Doc(
// Synopsis(
// Section.Raw(
// Link.Invalid(
// "[foo)"
// )
// )
// )
// )
// "[foo]bo)" ?= Doc(
// Synopsis(
// Section.Raw(
// Link.Invalid(
// "[foo]bo)"
// )
// )
// )
// )
// "![foo]google" ?= Doc(
// Synopsis(
// Section.Raw(
// Link.Invalid(
// "![foo]google"
// )
// )
// )
// )
//
// "[foo]google" ?= Doc(
// Synopsis(
// Section.Raw(
// Link.Invalid(
// "[foo]google"
// )
// )
// )
// )
//
// """[foo]bo)
// |basdbasd""".stripMargin ?= Doc(
// Synopsis(
// Section.Raw(
// Link.Invalid(
// "[foo]bo)"
// ),
// Newline,
// "basdbasd"
// )
// )
// )
//
// //////////////////////////////////////////////////////////////////////////////
// //// Tags ////////////////////////////////////////////////////////////////////
// //////////////////////////////////////////////////////////////////////////////
//
// val allPossibleTags = Tags.Tag.Type.codes.-(Tags.Tag.Unrecognized)
//
// allPossibleTags.foreach(
// t =>
// s"${t.toString.toUpperCase()}\nFoo" ?= Doc(
// Tags(Tags.Tag(t)),
// Synopsis(Section.Raw("Foo"))
// )
// )
// "DEPRECATED in 1.0\nFoo" ?= Doc(
// Tags(Tags.Tag(Tags.Tag.Type.Deprecated, " in 1.0")),
// Synopsis(Section.Raw("Foo"))
// )
// "DEPRECATED in 1.0\nMODIFIED\nFoo" ?= Doc(
// Tags(
// Tags.Tag(Tags.Tag.Type.Deprecated, " in 1.0"),
// Tags.Tag(Tags.Tag.Type.Modified)
// ),
// Synopsis(Section.Raw("Foo"))
// )
// """ ALAMAKOTA a kot ma ale
// | foo bar""".stripMargin ?= Doc(
// Tags(Tags.Tag(3, Tags.Tag.Unrecognized, "ALAMAKOTA a kot ma ale")),
// Synopsis(Section.Raw(1, "foo bar"))
// )
//
// //////////////////////////////////////////////////////////////////////////////
// //// Multiline code //////////////////////////////////////////////////////////
// //////////////////////////////////////////////////////////////////////////////
//
// """afsfasfsfjanfjanfa
// |jfnajnfjadnbfjabnf
// | siafjaifhjiasjf
// | fasfknfanfijnf""".stripMargin ?= Doc(
// Synopsis(
// Section.Raw(
// "afsfasfsfjanfjanfa",
// Newline,
// "jfnajnfjadnbfjabnf",
// Newline,
// CodeBlock(
// CodeBlock.Line(3, "siafjaifhjiasjf"),
// CodeBlock.Line(3, "fasfknfanfijnf")
// )
// )
// )
// )
// """afsfasfsfjanfjanfa
// |jfnajnfjadnbfjabnf
// | siafjaifhjiasjf
// | fasfknfanfijnf
// | fasfknfanfijnf""".stripMargin ?= Doc(
// Synopsis(
// Section.Raw(
// "afsfasfsfjanfjanfa",
// Newline,
// "jfnajnfjadnbfjabnf",
// Newline,
// CodeBlock(
// CodeBlock.Line(3, "siafjaifhjiasjf"),
// CodeBlock.Line(5, "fasfknfanfijnf"),
// CodeBlock.Line(3, "fasfknfanfijnf")
// )
// )
// )
// )
// """afsfasfsfjanfjanfa
// |jfnajnfjadnbfjabnf
// | fasfknfanfijnf
// | fasfknfanfijnf
// | fasfknfanfijnf
// | fasfknfanfijnf
// | fasfknfanfijnf""".stripMargin ?= Doc(
// Synopsis(
// Section.Raw(
// "afsfasfsfjanfjanfa",
// Newline,
// "jfnajnfjadnbfjabnf",
// Newline,
// CodeBlock(
// CodeBlock.Line(3, "fasfknfanfijnf"),
// CodeBlock.Line(5, "fasfknfanfijnf"),
// CodeBlock.Line(10, "fasfknfanfijnf"),
// CodeBlock.Line(5, "fasfknfanfijnf"),
// CodeBlock.Line(3, "fasfknfanfijnf")
// )
// )
// )
// )
// """afsfasfsfjanfjanfa
// |jfnajnfjadnbfjabnf
// | fasfknfanfijnf
// | fasfknfanfijnf
// | fasfknfanfijnf
// | fasfknfanfijnf
// | fasfknfanfijnf""".stripMargin ?= Doc(
// Synopsis(
// Section.Raw(
// "afsfasfsfjanfjanfa",
// Newline,
// "jfnajnfjadnbfjabnf",
// Newline,
// CodeBlock(
// CodeBlock.Line(3, "fasfknfanfijnf"),
// CodeBlock.Line(5, "fasfknfanfijnf"),
// CodeBlock.Line(2, "fasfknfanfijnf"),
// CodeBlock.Line(5, "fasfknfanfijnf"),
// CodeBlock.Line(3, "fasfknfanfijnf")
// )
// )
// )
// )
//
// //////////////////////////////////////////////////////////////////////////////
// //// Unclassified tests //////////////////////////////////////////////////////
// //////////////////////////////////////////////////////////////////////////////
//
// """
// | - bar
// | baz
// |""".stripMargin ?= Doc(
// Synopsis(
// Section.Raw(
// Newline,
// " - bar",
// Newline,
// CodeBlock(CodeBlock.Line(1, "baz")),
// Newline
// )
// )
// )
//
// """ DEPRECATED das sfa asf
// |REMOVED fdsdf
// |Construct and manage a graphical, event-driven user interface for your iOS or
// |tvOS app.
// |
// | foo *foo*""".stripMargin ?= Doc(
// Tags(
// Tags.Tag(3, Tags.Tag.Type.Deprecated, " das sfa asf"),
// Tags.Tag(0, Tags.Tag.Type.Removed, " fdsdf")
// ),
// Synopsis(
// Section.Raw(
// "Construct and manage a graphical, event-driven user interface for your iOS or",
// Newline,
// "tvOS app.",
// Newline
// )
// ),
// Body(Section.Raw(1, "foo ", Formatter(Formatter.Bold, "foo")))
// )
//
// """ DEPRECATED das sfa asf
// |REMOVED fdsdf
// |Construct and manage a graphical, event-driven user interface for your iOS or
// |tvOS app.
// |
// | foo *foo""".stripMargin ?= Doc(
// Tags(
// Tags.Tag(3, Tags.Tag.Type.Deprecated, " das sfa asf"),
// Tags.Tag(0, Tags.Tag.Type.Removed, " fdsdf")
// ),
// Synopsis(
// Section.Raw(
// "Construct and manage a graphical, event-driven user interface for your iOS or",
// Newline,
// "tvOS app.",
// Newline
// )
// ),
// Body(Section.Raw(1, "foo ", Formatter.Unclosed(Formatter.Bold, "foo")))
// )
//
// """ DEPRECATED das sfa asf
// | REMOVED
// | Foo""".stripMargin ?= Doc(
// Tags(
// Tags.Tag(4, Tags.Tag.Type.Deprecated, " das sfa asf"),
// Tags.Tag(2, Tags.Tag.Type.Removed)
// ),
// Synopsis(Section.Raw(1, "Foo"))
// )
//
// """ DEPRECATED das sfa asf
// |REMOVED fdsdf
// |Construct and manage a graphical user interface for your iOS or
// |tvOS app.
// |
// | fooo bar baz
// | dsadasfsaf asfasfas
// | asfasfa sf
// | asfas fasf """.stripMargin ?= Doc(
// Tags(
// Tags.Tag(3, Tags.Tag.Type.Deprecated, " das sfa asf"),
// Tags.Tag(0, Tags.Tag.Type.Removed, " fdsdf")
// ),
// Synopsis(
// Section.Raw(
// "Construct and manage a graphical user interface for your iOS or",
// Newline,
// "tvOS app.",
// Newline
// )
// ),
// Body(
// Section.Raw(
// 3,
// "fooo bar baz",
// Newline,
// "dsadasfsaf asfasfas",
// Newline,
// "asfasfa sf",
// Newline,
// "asfas fasf "
// )
// )
// )
//}

View File

@ -0,0 +1,463 @@
package org.enso.syntax.text
import org.enso.data.List1
import org.enso.data.Shifted
import org.enso.data.Tree
import org.enso.flexer.Reader
import org.enso.syntax.text.AST.Block.OptLine
import org.enso.syntax.text.AST._
import org.enso.syntax.text.AST.conversions._
import org.enso.syntax.text.ast.DSL._
import org.scalatest._
class ParserTest extends FlatSpec with Matchers {
def assertModule(input: String, result: AST): Assertion = {
val parser = Parser()
val module = parser.run(new Reader(input))
val rmodule = parser.dropMacroMeta(module)
assert(rmodule == result)
assert(module.show() == new Reader(input).toString())
}
def assertExpr(input: String, result: AST): Assertion = {
val parser = Parser()
val module = parser.run(new Reader(input))
val rmodule = parser.dropMacroMeta(module)
val tail = module.lines.tail
if (!tail.forall(_.elem.isEmpty)) fail("Multi-line block")
else {
rmodule.lines.head.elem match {
case None => fail("Empty expression")
case Some(e) =>
assert(e == result)
assert(module.show() == new Reader(input).toString())
}
}
}
def assertIdentity(input: String): Assertion = {
val module = Parser().run(new Reader(input))
assert(module.show() == new Reader(input).toString())
}
implicit class TestString(input: String) {
def parseTitle(str: String): String = {
val maxChars = 20
val escape = (str: String) => str.replace("\n", "\\n")
val str2 = escape(str)
val str3 =
if (str2.length < maxChars) str2
else str2.take(maxChars) + "..."
s"parse `$str3`"
}
private val testBase = it should parseTitle(input)
def ?=(out: AST) = testBase in { assertExpr(input, out) }
def ??=(out: Module) = testBase in { assertModule(input, out) }
def testIdentity = testBase in { assertIdentity(input) }
}
//////////////////////////////////////////////////////////////////////////////
//// Identifiers /////////////////////////////////////////////////////////////
//////////////////////////////////////////////////////////////////////////////
"_" ?= "_"
"Name" ?= "Name"
"name" ?= "name"
"name'" ?= "name'"
"name''" ?= "name''"
"name'a" ?= Ident.InvalidSuffix("name'", "a")
"name_" ?= "name_"
"name_'" ?= "name_'"
"name'_" ?= Ident.InvalidSuffix("name'", "_")
"name`" ?= "name" $ Invalid.Unrecognized("`")
//////////////////////////////////////////////////////////////////////////////
//// Operators ///////////////////////////////////////////////////////////////
//////////////////////////////////////////////////////////////////////////////
import App.Section._
import App.{Section => Sect}
"++" ?= Sides("++")
"==" ?= Sides("==")
":" ?= Sides(":")
"," ?= Sides(",")
"." ?= Sides(".")
".." ?= Sides("..")
"..." ?= Sides("...")
">=" ?= Sides(">=")
"<=" ?= Sides("<=")
"/=" ?= Sides("/=")
"+=" ?= Mod("+")
"-=" ?= Mod("-")
"===" ?= Ident.InvalidSuffix("==", "=")
"...." ?= Ident.InvalidSuffix("...", ".")
">==" ?= Ident.InvalidSuffix(">=", "=")
"+==" ?= Ident.InvalidSuffix("+", "==")
//////////////////////////////////////////////////////////////////////////////
//// Precedence + Associativity //////////////////////////////////////////////
//////////////////////////////////////////////////////////////////////////////
"a b" ?= ("a" $_ "b")
"a + b" ?= ("a" $_ "+") $__ "b"
"a + b + c" ?= ("a" $_ "+" $_ "b") $_ "+" $_ "c"
"a , b , c" ?= "a" $_ "," $_ ("b" $_ "," $_ "c")
"a + b * c" ?= "a" $_ "+" $_ ("b" $_ "*" $_ "c")
"a * b + c" ?= ("a" $_ "*" $_ "b") $_ "+" $_ "c"
"a+ b" ?= ("a" $ "+") $$_ "b"
"a +b" ?= "a" $_ ("+" $ "b")
"a+ +b" ?= ("a" $ "+") $$_ ("+" $ "b")
"*a+" ?= ("*" $ "a") $ "+"
"+a*" ?= "+" $ ("a" $ "*")
"+ <$> a <*> b" ?= (Sides("+") $_ "<$>" $_ "a") $_ "<*>" $_ "b"
"+ * ^" ?= Sect.Right("+", 1, Sect.Right("*", 1, Sides("^")))
"+ ^ *" ?= Sect.Right("+", 1, Sect.Left(Sides("^"), 1, "*"))
"^ * +" ?= Sect.Left(Sect.Left(Sides("^"), 1, "*"), 1, "+")
"* ^ +" ?= Sect.Left(Sect.Right("*", 1, Sides("^")), 1, "+")
"^ + *" ?= App.Infix(Sides("^"), 1, "+", 1, Sides("*"))
"* + ^" ?= App.Infix(Sides("*"), 1, "+", 1, Sides("^"))
"a = b.c.d = 10" ?= "a" $_ "=" $_ (("b" $ "." $ "c" $ "." $ "d") $_ "=" $_ 10)
"v = f x=1 y=2" ?= "v" $_ "=" $_ ("f" $_ ("x" $ "=" $ 1) $_ ("y" $ "=" $ 2))
"v' = v .x=1" ?= "v'" $_ "=" $_ ("v" $_ ("." $ "x" $ "=" $ 1))
//////////////////////////////////////////////////////////////////////////////
//// Arrows //////////////////////////////////////////////////////////////////
//////////////////////////////////////////////////////////////////////////////
"a -> b" ?= "a" $_ "->" $_ "b"
"a -> b -> c" ?= "a" $_ "->" $_ ("b" $_ "->" $_ "c")
"a b -> c d" ?= ("a" $_ "b") $_ "->" $_ ("c" $_ "d")
"a b-> c d" ?= "a" $_ ("b" $_ "->" $_ ("c" $_ "d"))
"a = b -> c d" ?= "a" $_ "=" $_ ("b" $_ "->" $_ ("c" $_ "d"))
"a = b-> c d" ?= "a" $_ "=" $_ ("b" $_ "->" $_ ("c" $_ "d"))
"a + b -> c d" ?= ("a" $_ "+" $_ "b") $_ "->" $_ ("c" $_ "d")
"a + b-> c d" ?= "a" $_ "+" $_ ("b" $_ "->" $_ ("c" $_ "d"))
"a + b-> c = d" ?= "a" $_ "+" $_ ("b" $_ "->" $_ ("c" $_ "=" $_ "d"))
"a = b -> c = d" ?= "a" $_ "=" $_ ("b" $_ "->" $_ ("c" $_ "=" $_ "d"))
//////////////////////////////////////////////////////////////////////////////
//// Layout //////////////////////////////////////////////////////////////////
//////////////////////////////////////////////////////////////////////////////
"" ??= Module(OptLine())
"\n" ??= Module(OptLine(), OptLine())
" \n " ??= Module(OptLine(2), OptLine(1))
"\n\n" ??= Module(OptLine(), OptLine(), OptLine())
" \n \n " ??= Module(OptLine(1), OptLine(2), OptLine(3))
//////////////////////////////////////////////////////////////////////////////
//// Numbers /////////////////////////////////////////////////////////////////
//////////////////////////////////////////////////////////////////////////////
"7" ?= 7
"07" ?= Number("07")
"10_7" ?= Number(10, 7)
"16_ff" ?= Number(16, "ff")
"16_" ?= Number.DanglingBase("16")
"7.5" ?= App.Infix(7, 0, Opr("."), 0, 5)
//////////////////////////////////////////////////////////////////////////////
//// UTF Surrogates //////////////////////////////////////////////////////////
//////////////////////////////////////////////////////////////////////////////
"\uD800\uDF1E" ?= Invalid.Unrecognized("\uD800\uDF1E")
//////////////////////////////////////////////////////////////////////////////
//// Text ////////////////////////////////////////////////////////////////////
//////////////////////////////////////////////////////////////////////////////
import Text.Segment.implicits.txtFromString
val q1 = Text.Quote.Single
val q3 = Text.Quote.Triple
"'" ?= Text.Unclosed(Text(Text.Body(q1)))
"''" ?= Text(Text.Body(q1))
"'''" ?= Text.Unclosed(Text(Text.Body(q3)))
"''''" ?= Text.Unclosed(Text(Text.Body(q3, "'")))
"'''''" ?= Text.Unclosed(Text(Text.Body(q3, "''")))
"''''''" ?= Text(Text.Body(q3))
"'''''''" ?= Text(Text.Body(q3)) $ Text.Unclosed(Text(Text.Body(q1)))
"'a'" ?= Text(Text.Body(q1, "a"))
"'a" ?= Text.Unclosed(Text(Text.Body(q1, "a")))
"'a'''" ?= Text(Text.Body(q1, "a")) $ Text(Text.Body(q1))
"'''a'''" ?= Text(Text.Body(q3, "a"))
"'''a'" ?= Text.Unclosed(Text(Text.Body(q3, "a'")))
"'''a''" ?= Text.Unclosed(Text(Text.Body(q3, "a''")))
"\"" ?= Text.Unclosed(Text.Raw(Text.Body(q1)))
"\"\"" ?= Text.Raw(Text.Body(q1))
"\"\"\"" ?= Text.Unclosed(Text.Raw(Text.Body(q3)))
"\"\"\"\"" ?= Text.Unclosed(Text.Raw(Text.Body(q3, "\"")))
"\"\"\"\"\"" ?= Text.Unclosed(Text.Raw(Text.Body(q3, "\"\"")))
"\"\"\"\"\"\"" ?= Text.Raw(Text.Body(q3))
"\"a\"" ?= Text.Raw(Text.Body(q1, "a"))
"\"a" ?= Text.Unclosed(Text.Raw(Text.Body(q1, "a")))
"\"a\"\"\"" ?= Text.Raw(Text.Body(q1, "a")) $ Text.Raw(Text.Body(q1))
"\"\"\"a\"\"\"" ?= Text.Raw(Text.Body(q3, "a"))
"\"\"\"a\"" ?= Text.Unclosed(Text.Raw(Text.Body(q3, "a\"")))
"\"\"\"a\"\"" ?= Text.Unclosed(Text.Raw(Text.Body(q3, "a\"\"")))
"\"\"\"\"\"\"\"" ?= Text.Raw(Text.Body(q3)) $ Text.Unclosed(
Text.Raw(Text.Body(q1))
)
"'''\nX\n Y\n'''" ?= Text(
Text.BodyOf(
q3,
List1(
Text.LineOf(0, Nil),
Text.LineOf(0, List("X")),
Text.LineOf(1, List("Y")),
Text.LineOf(0, Nil)
)
)
)
//// Escapes ////
Text.Segment.Escape.Character.codes.foreach(
i => s"'\\$i'" ?= Text(Text.Body(q1, Text.Segment._Escape(i)))
)
Text.Segment.Escape.Control.codes.foreach(
i => s"'\\$i'" ?= Text(Text.Body(q1, Text.Segment._Escape(i)))
)
"'\\\\'" ?= Text(
Text.Body(q1, Text.Segment._Escape(Text.Segment.Escape.Slash))
)
"'\\''" ?= Text(
Text.Body(q1, Text.Segment._Escape(Text.Segment.Escape.Quote))
)
"'\\\"'" ?= Text(
Text.Body(q1, Text.Segment._Escape(Text.Segment.Escape.RawQuote))
)
"'\\" ?= Text.Unclosed(Text(Text.Body(q1, "\\")))
"'\\c'" ?= Text(
Text.Body(q1, Text.Segment._Escape(Text.Segment.Escape.Invalid("c")))
)
"'\\cd'" ?= Text(
Text.Body(q1, Text.Segment._Escape(Text.Segment.Escape.Invalid("c")), "d")
)
"'\\123d'" ?= Text(
Text.Body(q1, Text.Segment._Escape(Text.Segment.Escape.Number(123)), "d")
)
//// Interpolation ////
"'a`b`c'" ?= Text(Text.Body(q1, "a", Text.Segment._Expr(Some("b")), "c"))
"'a`b 'c`d`e' f`g'" ?= {
val bd = "b" $_ Text(Text.Body(q1, "c", Text.Segment._Expr(Some("d")), "e")) $_ "f"
Text(Text.Body(q1, "a", Text.Segment._Expr(Some(bd)), "g"))
}
//// // // Comments
////// expr("#" , Comment)
////// expr("#c" , Comment :: CommentBody("c"))
//// // expr("#c\na" , Comment :: CommentBody("c") :: EOL :: Var("a"))
//// // expr("#c\n a" , Comment :: CommentBody("c") :: EOL :: CommentBody(" a"))
//// // expr(" #c\n a" , Comment :: CommentBody("c") :: EOL :: Var("a"))
//// // expr(" #c\n a" , Comment :: CommentBody("c") :: EOL :: CommentBody(" a"))
//// // expr("a#c" , Var("a") :: Comment :: CommentBody("c"))
//// // expr("a # c" , Var("a") :: Comment :: CommentBody(" c"))
//// // expr("a#" , Var("a") :: Comment)
//// // expr("a#\nb" , Var("a") :: Comment :: EOL :: Var("b"))
//// // expr("a#\n b" , Var("a") :: Comment :: EOL :: CommentBody(" b"))
//// //
//// // // Disabled
//// // expr("a #= b" , Var("a") :: DisabledAssignment :: Var("b"))
//// //
//
// //////////////////////////////////////////////////////////////////////////////
// //// Comments ////////////////////////////////////////////////////////////////
// //////////////////////////////////////////////////////////////////////////////
//
//// "foo ##L1" ?= "foo" $___ Comment.SingleLine("L1")
//// "##\n L1\n L2" ?= Comment.MultiLine(0, List("", " L1", " L2"))
//// "##L1\nL2" ??= Module(OptLine(Comment.SingleLine("L1")), OptLine(Cons("L2")))
//// "foo #a b" ?= "foo" $_ Comment.Disable("a" $_ "b")
//
//////////////////////////////////////////////////////////////////////////////
//// Flags ///////////////////////////////////////////////////////////////////
//////////////////////////////////////////////////////////////////////////////
"x = skip a" ?= "x" $_ "=" $_ "a"
"x = skip a.fn" ?= "x" $_ "=" $_ "a"
"x = skip fn a" ?= "x" $_ "=" $_ "a"
"x = skip (a)" ?= "x" $_ "=" $_ "a"
"x = skip (a.fn)" ?= "x" $_ "=" $_ "a"
"x = skip (a + b)" ?= "x" $_ "=" $_ "a"
"x = skip ((a + b) + c)" ?= "x" $_ "=" $_ "a"
"x = skip ()" ?= "x" $_ "=" $_ Group()
// "a = freeze b c" ?= "a" $_ "#=" $_ ("b" $_ "c") // freeze
//////////////////////////////////////////////////////////////////////////////
//// Mixfixes ////////////////////////////////////////////////////////////////
//////////////////////////////////////////////////////////////////////////////
def amb(head: AST, lst: List[List[AST]]): Macro.Ambiguous =
Macro.Ambiguous(
Shifted.List1(Macro.Ambiguous.Segment(head)),
Tree(lst.map(_ -> (())): _*)
)
def amb(head: AST, lst: List[List[AST]], body: SAST): Macro.Ambiguous =
Macro.Ambiguous(
Shifted.List1(Macro.Ambiguous.Segment(head, Some(body))),
Tree(lst.map(_ -> (())): _*)
)
def _amb_group_(i: Int)(t: AST): Macro.Ambiguous =
amb("(", List(List(")")), Shifted(i, t))
val amb_group = _amb_group_(0)(_)
val amb_group_ = _amb_group_(1)(_)
val amb_group__ = _amb_group_(2)(_)
def group_(): Macro.Ambiguous = amb("(", List(List(")")))
def _amb_if(i: Int)(t: AST) =
amb("if", List(List("then"), List("then", "else")), Shifted(i, t))
val amb_if = _amb_if(0)(_)
val amb_if_ = _amb_if(1)(_)
val amb_if__ = _amb_if(2)(_)
"()" ?= Group()
"( )" ?= Group()
"( ( ) )" ?= Group(Group())
"(a)" ?= Group("a")
"((a))" ?= Group(Group("a"))
"(((a)))" ?= Group(Group(Group("a")))
"( ( a ))" ?= Group(Group("a"))
"(a) (b)" ?= Group("a") $_ Group("b")
"(" ?= amb("(", List(List(")")))
"((" ?= amb_group(group_())
"import Std . Math .Vector".stripMargin ?= Import("Std", "Math", "Vector")
"""def Maybe a
| def Just val:a
| def Nothing""".stripMargin ?= {
val defJust = Def("Just", List("val" $ ":" $ "a"))
val defNothing = Def("Nothing")
Def(
"Maybe",
List("a"),
Some(
Block(
Block.Continuous,
4,
Block.Line(defJust),
List(Block.Line(Some(defNothing)))
)
)
)
}
//
// """foo ->
// | bar
// |""".stripMargin ?= "foo" $_ "->" $_ Block(
// Block.Discontinuous,
// 4,
// "bar",
// None
// )
//
"if a then b" ?= Mixfix(
List1[AST.Ident]("if", "then"),
List1[AST]("a", "b")
)
"if a then b else c" ?= Mixfix(
List1[AST.Ident]("if", "then", "else"),
List1[AST]("a", "b", "c")
)
"if a" ?= amb_if_("a": AST)
"(if a) b" ?= Group(amb_if_("a": AST)) $_ "b"
"if (a then b " ?= amb_if_(amb_group("a" $_ "then" $_ "b"))
// //////////////////////////////////////////////////////////////////////////////
// //// Foreign /////////////////////////////////////////////////////////////////
// //////////////////////////////////////////////////////////////////////////////
//
// "f = foreign Python3\n a" ?= "f" $_ "=" $_ Foreign(1, "Python3", List("a"))
//
// val pyLine1 = "import re"
// val pyLine2 = """re.match(r"[^@]+@[^@]+\.[^@]+", "foo@ds.pl") != None"""
// s"""validateEmail address = foreign Python3
// | $pyLine1
// | $pyLine2""".stripMargin ?= ("validateEmail" $_ "address") $_ "=" $_
// Foreign(4, "Python3", List(pyLine1, pyLine2))
//////////////////////////////////////////////////////////////////////////////
//// Large Input /////////////////////////////////////////////////////////////
//////////////////////////////////////////////////////////////////////////////
// ("(" * 33000).testIdentity // FIXME Stack Overflow
// ("OVERFLOW " * 5000).testIdentity
// ("\uD800\uDF1E " * 10000).testIdentity
//////////////////////////////////////////////////////////////////////////////
//// OTHER (TO BE PARTITIONED)////////////////////////////////////////////////
//////////////////////////////////////////////////////////////////////////////
"\na \nb \n".testIdentity
"f = \n\n\n".testIdentity
" \n\n\n f\nf".testIdentity
"f = \n\n x ".testIdentity
" a\n b\n c".testIdentity
"f =\n\n x\n\n y".testIdentity
"""
a
b
c
d
e
f g h
""".testIdentity
"""
# pop1: adults
# pop2: children
# pop3: mutants
Selects the 'fittest' individuals from population and kills the rest!
log
'''
keepBest
`pop1`
`pop2`
`pop3`
'''
unique xs
= xs.at(0.0) +: [1..length xs -1] . filter (isUnique xs) . map xs.at
isUnique xs i ####
= xs.at(i).score != xs.at(i-1).score
pop1<>pop2<>pop3 . sorted . unique . take (length pop1) . pure
""".testIdentity
///////////////////////
//// Preprocessing ////
///////////////////////
"\t" ??= Module(OptLine(4))
"\r" ??= Module(OptLine(), OptLine())
"\r\n" ??= Module(OptLine(), OptLine())
}
////////////////////////////////////////////////////////////////////////////
// TODO TODO TODO TODO TODO TODO TODO TODO TODO TODO TODO TODO TODO TODO //
////////////////////////////////////////////////////////////////////////////
// [ ] operator blocks
// [ ] warnings in scala code
// [ ] Undefined parsing
// [ ] All block types
// [ ] Unary minus

View File

@ -1,65 +0,0 @@
package org.enso.syntax.text.lexer
import org.scalameter.api._
import scala.math.pow
import org.enso.syntax.text.parser.Parser
import org.enso.syntax.text.parser.BParser
import java.io.StringReader
object RangeBenchmark extends Bench.OfflineReport {
val exp14 = Gen.exponential("size")(pow(2, 14).toInt, pow(2, 16).toInt, 2)
val exp15 = Gen.exponential("size")(pow(2, 15).toInt, pow(2, 17).toInt, 2)
val exp16 = Gen.exponential("size")(pow(2, 16).toInt, pow(2, 18).toInt, 2)
val longVars = for { i <- exp16 } yield "test12" * i
val multipleVars = for { i <- exp16 } yield "test1 (A " * i
val exprs1 = for { i <- exp14 } yield "a += 1\nb == '\n'\n" * i
val exprs2 = for { i <- exp14 } yield "a += 1\nb == '`a`!'\n" * i
performance of "Range" in {
// measure method "exprs1" in {
// using(exprs1) in {
// input => new Lexer(input).lexAll()
// }
// }
// measure method "exprs2" in {
// using(exprs2) in {
// input => new Lexer(input).lexAll()
// }
// }
// measure method "longVar" in {
// using(longVars) in {
// input => new Lexer(input).lexAll()
// }
// }
// measure method "multipleVars" in {
// using(multipleVars) in {
// input => new Lexer(input).lexAll()
// }
// }
// measure method "Parser-multipleVars" in {
// using(multipleVars) in { input =>
// {
// (new Parser(new StringReader(input))).parse
// }
// }
// }
measure method "BParser-multipleVars" in {
using(multipleVars) in { input =>
{
(new BParser(new StringReader(input))).parse
}
}
}
measure method "SParser-multipleVars" in {
using(multipleVars) in { input =>
{
(new SParser(new StringReader(input))).parse
}
}
}
}
}

File diff suppressed because it is too large Load Diff

View File

@ -1,579 +0,0 @@
package org.enso.syntax.text.lexer;
import java.util.Stack;
import static org.enso.syntax.text.xx.Parser.Lexer.*;
/**
* Enso lexical scanner
*/
%%
%{
int currentBlock = 0;
///////////////////////
// Indent Management //
///////////////////////
private Stack<Integer> indentStack = new Stack<Integer>();
public final void pushIndent(int i) {
indentStack.push(i);
}
public final Integer popIndent() {
return indentStack.pop();
}
public final Integer indent() {
return indentStack.peek();
}
/////////////////////////////
// Lexing State Management //
/////////////////////////////
private Stack<Integer> zzLexicalStateStack = new Stack<Integer>();
public final void pushState(int state) {
zzLexicalStateStack.push(zzLexicalState);
yybegin(state);
}
public final void popState() {
yybegin(zzLexicalStateStack.pop());
}
///////////////////////
// Quote Managemenet //
///////////////////////
private Stack<Integer> stringQuoteSizeStack = new Stack<Integer>();
public final void pushQuoteSize(int quote) {
stringQuoteSizeStack.push(quote);
}
public final Integer popQuoteSize() {
return stringQuoteSizeStack.pop();
}
public final Integer quoteSize() {
return stringQuoteSizeStack.peek();
}
////////////////////
// Number Parsing //
////////////////////
private String numberPart1 = "";
private String numberPart2 = "";
private String numberPart3 = "";
/////////
// API //
/////////
public boolean done(){
return zzAtEOF;
}
public int lex() throws java.io.IOException {
return yylex();
}
private int lastOffset = 0;
public Token token(Symbol symbol) {
int offset = lastOffset;
lastOffset = 0;
return new Token(symbol,offset,yylength());
}
private void rewind() {
zzMarkedPos -= yylength();
}
public Token value;
//////////////////
// Constructors //
//////////////////
int var() {value = token(new Var (yytext())); return VAR;}
int cons() {value = token(new Cons (yytext())); return CONS;}
// Utils
void whitespace() {lastOffset += yylength();}
// Symbol invalid(InvalidReason reason) {
// return new Invalid (reason);
// }
// // Identifiers
// Token var_() {return token(new Var (yytext()));}
// Token cons_() {return token(new Cons (yytext()));}
// Token wildcard_() {return token(Wildcard$.MODULE$);}
// Token var() {pushState(CHECK_IDENT_SFX); return var_();}
// Token cons() {pushState(CHECK_IDENT_SFX); return cons_();}
// Token wildcard() {pushState(CHECK_IDENT_SFX); return wildcard_();}
// Token errorSfx() {return token(invalid(new UnexpectedSuffix(yytext())));}
// // Operators
// Token operator_() {return token(new Operator(yytext()));}
// Token modifier_() {return token(new Modifier(yytext()));}
// Token disabled_() {return token(DisabledAssignment$.MODULE$);}
// Token operator() {pushState(CHECK_OP_SFX); return operator_();}
// Token modifier() {pushState(CHECK_OP_SFX); return modifier_();}
// Token disabled() {pushState(CHECK_OP_SFX); return disabled_();}
// Layout
int blockBegin(int i) {pushIndent(i); value = token(BlockBegin$.MODULE$); return BLOCK_BEGIN;}
int blockEnd() {popIndent(); value = token(BlockEnd$.MODULE$); return BLOCK_END;}
int blockInvalid() {value = token(BlockInvalid$.MODULE$); return BLOCK_INVALID;}
int newline() {value = token(EOL$.MODULE$); return EOL;}
int groupBegin() {value = token(GroupBegin$.MODULE$); return GROUP_BEGIN;}
int groupEnd() {value = token(GroupEnd$.MODULE$); return GROUP_END;}
// Token groupEnd() {return token(GroupEnd$.MODULE$);}
// Token listBegin() {return token(ListBegin$.MODULE$);}
// Token listEnd() {return token(ListEnd$.MODULE$);}
// Token recordBegin() {return token(RecordBegin$.MODULE$);}
// Token recordEnd() {return token(RecordEnd$.MODULE$);}
// Token unmatched() {return token(new Unmatched(yytext()));}
// // Numbers
// Token number() {
// Token num = token(Number.fromString(numberPart1,numberPart2,numberPart3));
// numberPart1 = "";
// numberPart2 = "";
// numberPart3 = "";
// return num;
// }
// // Text
// Token textBegin() {return token(TextBegin$.MODULE$);}
// Token textEnd() {return token(TextEnd$.MODULE$);}
// Token textRawBegin() {return token(TextRawBegin$.MODULE$);}
// Token textRawEnd() {return token(TextRawEnd$.MODULE$);}
// Token text() {return token(new Text(yytext()));}
// Token textIntBegin() {return token(TextInterpolateBegin$.MODULE$);}
// Token textIntEnd() {return token(TextInterpolateEnd$.MODULE$);}
// // Text Escapes
// Token slashEsc() {return token(new TextEscape(SlashEscape$.MODULE$));}
// Token quoteEsc() {return token(new TextEscape(QuoteEscape$.MODULE$));}
// Token rawQuoteEsc() {return token(new TextEscape(RawQuoteEscape$.MODULE$));}
// Token charEsc(char c) {return token(new TextEscape(CharEscape.fromChar(c)));}
// Token ctrlEsc(int c) {return token(new TextEscape(new CtrlEscape(c)));}
// Token intEsc() {
// return token(new TextEscape(IntEscape.fromString(yytext().substring(1))));
// }
// Token uni16Esc() {
// String scode = yytext().substring(2);
// return token(new TextEscape(new Uni16Escape (Integer.parseInt(scode,16))));
// }
// Token uni32Esc() {
// return token(new TextEscape(Uni32Escape.fromString(yytext().substring(2))));
// }
// Token uni21Esc() {
// String scode = yytext();
// scode = scode.substring(3,scode.length()-1);
// return token(new TextEscape(Uni21Escape.fromString(scode)));
// }
// Token invalidCharEsc(){
// return token(new TextEscape(new InvalidCharEscape(yytext().charAt(1))));
// }
// // Comment
// Token comment() {return token(Comment$.MODULE$);}
// Token commentBody() {return token(new CommentBody(yytext()));}
%}
%init{
// pushState(NEWLINE);
pushIndent(0);
%init}
/////////////
// Options //
/////////////
%class Scanner
%int
%public
// %type int
%line
%column
%char
%unicode
// %apiprivate
// %debug
/////////////////
// Definitions //
/////////////////
// Prims
alpha_upper = [A-Z]
alpha_lower = [a-z]
alpha = {alpha_lower} | {alpha_upper}
alphanum = {alpha} | {digit}
whitespace = [\ ]
newline = \r|\n|\r\n
// Identifiers
ident_body_char = {alphanum} | _
ident_body = {ident_body_char}*(\')*
var = {alpha_lower}{ident_body}
cons = {alpha_upper}{ident_body}
wildcard = _
ident_err_sfx_c = [^\`\!\@\#\$\%\^\&\*\(\)\-\=\+\[\]\{\}\|\;\:\<\>\,\.\/\ \t\r\n\\]
ident_err_sfx = {ident_err_sfx_c}+
// Operators
operator_char = [\!\$\%\&\*\+\-\/\<\>\?\^\~\|\:\\]
operator = {operator_char}+
modifier = {operator}=
operator_err_sfx_c = {operator_char} | (\=) | (\,) | (\.)
operator_err_sfx = {operator_err_sfx_c}+
// Numbers
digit = [0-9]
hex = [0-9a-fA-F]
decimal = {digit}+
////////////
// States //
////////////
%xstate CHECK_IDENT_SFX
%xstate CHECK_OP_SFX
%xstate NUMBER_PHASE2
%xstate NUMBER_PHASE3
%xstate TEXT
%xstate TEXT_RAW
%xstate TEXT_ESCAPE
%xstate COMMENT
%xstate COMMENT_LINE
%xstate NEWLINE
%xstate BLOCK_ENDING
%state TEXT_INTERPOLATE
%%
// ///////////////////////
// // Unexpected Suffix //
// ///////////////////////
// <CHECK_IDENT_SFX> {
// {ident_err_sfx} {return errorSfx();}
// [^] {rewind(); popState();}
// }
// <CHECK_OP_SFX> {
// {operator_err_sfx} {return errorSfx();}
// [^] {rewind(); popState();}
// }
// //////////
// // Text //
// //////////
// <TEXT_INTERPOLATE> {
// (\`) {popState(); return textIntEnd();}
// }
// <TEXT> {
// (\')+ {
// if (yylength() == quoteSize()) {
// popState();
// popQuoteSize();
// return textEnd();
// } else {
// return text();
// }
// }
// // Prim Escapes
// (\\\\) {return slashEsc();}
// (\\\') {return quoteEsc();}
// (\\\") {return rawQuoteEsc();}
// (\\[0-9]+) {return intEsc();}
// // Escape Characters (https://en.wikipedia.org/wiki/String_literal)
// (\\a) {return charEsc('\u0007');} // alert
// (\\b) {return charEsc('\u0008');} // backspace
// (\\f) {return charEsc('\u000C');} // form feed
// (\\n) {return charEsc('\n') ;} // line feed
// (\\r) {return charEsc('\r') ;} // carriage return
// (\\t) {return charEsc('\u0009');} // horizontal tab
// (\\v) {return charEsc('\u000B');} // vertical tab
// (\\e) {return charEsc('\u001B');} // escape character
// // Unicode Escapes
// (\\u{hex}{hex}{hex}{hex}) {return uni16Esc();}
// (\\U{hex}{hex}{hex}{hex}{hex}{hex}{hex}{hex}) {return uni32Esc();}
// (\\u\{{hex}*\}) {return uni21Esc();}
// // Control Characters (https://en.wikipedia.org/wiki/Control_character)
// (\\NUL) {return ctrlEsc(0x00);}
// (\\SOH) {return ctrlEsc(0x01);}
// (\\STX) {return ctrlEsc(0x02);}
// (\\ETX) {return ctrlEsc(0x03);}
// (\\EOT) {return ctrlEsc(0x04);}
// (\\ENQ) {return ctrlEsc(0x05);}
// (\\ACK) {return ctrlEsc(0x06);}
// (\\BEL) {return ctrlEsc(0x07);}
// (\\BS) {return ctrlEsc(0x08);}
// (\\TAB) {return ctrlEsc(0x09);}
// (\\LF) {return ctrlEsc(0x0A);}
// (\\VT) {return ctrlEsc(0x0B);}
// (\\FF) {return ctrlEsc(0x0C);}
// (\\CR) {return ctrlEsc(0x0D);}
// (\\SO) {return ctrlEsc(0x0E);}
// (\\SI) {return ctrlEsc(0x0F);}
// (\\DLE) {return ctrlEsc(0x10);}
// (\\DC1) {return ctrlEsc(0x11);}
// (\\DC2) {return ctrlEsc(0x12);}
// (\\DC3) {return ctrlEsc(0x13);}
// (\\DC4) {return ctrlEsc(0x14);}
// (\\NAK) {return ctrlEsc(0x15);}
// (\\SYN) {return ctrlEsc(0x16);}
// (\\ETB) {return ctrlEsc(0x17);}
// (\\CAN) {return ctrlEsc(0x18);}
// (\\EM) {return ctrlEsc(0x19);}
// (\\SUB) {return ctrlEsc(0x1A);}
// (\\ESC) {return ctrlEsc(0x1B);}
// (\\FS) {return ctrlEsc(0x1C);}
// (\\GS) {return ctrlEsc(0x1D);}
// (\\RS) {return ctrlEsc(0x1E);}
// (\\US) {return ctrlEsc(0x1F);}
// (\\DEL) {return ctrlEsc(0x7F);}
// // Invalid Escapes
// (\\([a-z]|[A-Z])) {return invalidCharEsc();}
// {newline} {return newline();}
// [^\'\`\n\r\\]+ {return text();}
// (\`) {
// pushState(TEXT_INTERPOLATE);
// return textIntBegin();
// }
// }
// <TEXT_RAW> {
// (\")+ {
// if (yylength() == quoteSize()) {
// popState();
// popQuoteSize();
// return textRawEnd();
// } else {
// return text();
// }
// }
// // Prim Escapes
// (\\\') {return quoteEsc();}
// (\\\") {return rawQuoteEsc();}
// (\\) {return text();}
// {newline} {return newline();}
// [^\"\n\r\\]+ {return text();}
// }
// ////////////////////////////////
// // Number (e.g. 16_ff0000.ff) //
// ////////////////////////////////
// <NUMBER_PHASE2> {
// _[a-zA-Z0-9]+ {
// numberPart1 = numberPart2;
// numberPart2 = yytext().substring(1);
// popState();
// pushState(NUMBER_PHASE3);
// }
// [^] {rewind(); popState(); return number();}
// <<EOF>> {return number();}
// }
// <NUMBER_PHASE3> {
// .[a-zA-Z0-9]+ {
// numberPart3=yytext().substring(1);
// popState();
// return number();
// }
// [^] {rewind(); popState(); return number();}
// <<EOF>> {return number();}
// }
// //////////////
// // Comments //
// //////////////
// <COMMENT> {
// [^\n\r]+ {return commentBody();}
// {newline} {popState(); pushState(COMMENT_LINE); return newline();}
// }
// <COMMENT_LINE> {
// {whitespace}+ {
// popState();
// if(yylength() > indent) {
// pushState(COMMENT);
// } else {
// pushState(NEWLINE);
// }
// rewind();
// }
// [^] {
// popState();
// pushState(NEWLINE);
// rewind();
// }
// }
///////////////////////
// Indent Management //
///////////////////////
<NEWLINE> {
{whitespace}+{newline} {
whitespace();
return newline();
}
{whitespace}+ {
whitespace();
popState();
currentBlock = yylength();
if (currentBlock > indent()) {
return blockBegin(currentBlock);
} else if (currentBlock < indent()) {
pushState(BLOCK_ENDING);
}
}
[^] {
rewind();
popState();
currentBlock = 0;
if (indent() > 0) {
pushState(BLOCK_ENDING);
} else {
return newline();
}
}
}
<BLOCK_ENDING> {
[^] {
rewind();
if(currentBlock == indent()) {
popState();
} else if(currentBlock < indent()) {
return blockEnd();
} else {
popState();
return blockInvalid();
}
}
}
// ///////////////////
// // Default Rules //
// ///////////////////
// // Identifiers
{var} {return var();}
{cons} {return cons();}
// {wildcard} {return wildcard();}
// // Operators
// {operator} {return operator();}
// (\=) {return operator();}
// (\=\=) {return operator();}
// (\>\=) {return operator();}
// (\<\=) {return operator();}
// (\/\=) {return operator();}
// (\,) {return operator();}
// (\.) {return operator_();}
// (\.\.) {return operator();}
// (\.\.\.) {return operator();}
// {modifier} {return modifier();}
// (\#\=) {return disabled();}
// Layout
(\() {return groupBegin();}
(\)) {return groupEnd();}
// (\[) {return listBegin();}
// (\]) {return listEnd();}
// (\{) {return recordBegin();}
// (\}) {return recordEnd();}
// // Numbers
// {decimal} {numberPart2=yytext(); pushState(NUMBER_PHASE2);}
// // Text
// (\')+ {
// int size = yylength();
// if(size == 2) {
// size = 1;
// yypushback(1);
// }
// pushQuoteSize(size);
// pushState(TEXT);
// return textBegin();
// }
// // Raw Text
// (\")+ {
// int size = yylength();
// if(size == 2) {
// size = 1;
// yypushback(1);
// }
// pushQuoteSize(size);
// pushState(TEXT_RAW);
// return textRawBegin();
// }
// // Comments
// (\#) {
// pushState(COMMENT);
// return comment();
// }
// Layout
{whitespace}+ {whitespace();}
{newline} {pushState(NEWLINE);return newline();}
// // Unknown
// [^] {
// return unmatched();
// }

View File

@ -1,176 +0,0 @@
package org.enso.syntax.text.parser;
import org.enso.syntax.text.lexer.Token;import java.util.Stack;
import static org.enso.syntax.text.xx.Parser.Lexer.*;
/**
* Enso lexical scanner
*/
%%
%{
public abstract Token tokenAt(int i);
public abstract AST astFromChar(int i, char s);
public abstract AST astFromStr(int i, String s);
public abstract void appendExprSegment(AST ast);
public abstract void pushAST();
public abstract void popAST();
public AST ast;
/////////////////////////////
// Lexing State Management //
/////////////////////////////
private Stack<Integer> zzLexicalStateStack = new Stack<Integer>();
public final void pushState(int state) {
zzLexicalStateStack.push(zzLexicalState);
yybegin(state);
}
public final void popState() {
yybegin(zzLexicalStateStack.pop());
}
////////////////////////////
// Group Begin Management //
////////////////////////////
private Stack<Token> groupBeginStack = new Stack<Token>();
public final void pushGroupBegin(Token tok) {
groupBeginStack.push(tok);
}
public final Token popGroupBegin() {
return groupBeginStack.pop();
}
/////////
// API //
/////////
public final Token token() {
return token(1);
}
public final Token token(int offset) {
return tokenAt(yychar - yylength() + offset);
}
public final AST ast() {
return ast(1);
}
public final AST ast(int offset) {
String text = yytext();
return astFromChar(yychar,text.charAt(offset - 1));
}
private void rewind() {
zzMarkedPos -= yylength();
}
private void groupBegin() {
pushAST();
pushGroupBegin(token());
pushState(GROUPED);
}
private void groupEnd(boolean closed) {
AST body = ast;
Token begin = popGroupBegin();
popAST();
AST group;
if(closed) {
group = AST.grouped(begin,body,token());
} else {
group = AST.grouped(begin,body);
}
appendExprSegment(group);
popState();
}
private void blockBegin() {
System.out.println("block begin!");
pushAST();
pushState(BLOCK);
}
%}
%init{
%init}
/////////////
// Options //
/////////////
%abstract
%class Scanner2
//%int
%public
%type AST
%line
%column
%char
%unicode
// %apiprivate
// %debug
/////////////////
// Definitions //
/////////////////
VAR = a
BLOCK_BEGIN = b
BLOCK_END = c
EOL = d
EOF = e
GROUP_BEGIN = f
GROUP_END = g
CONS = h
IDENT = {VAR} | {CONS}
////////////
// States //
////////////
%state BLOCK
%state GROUPED
%%
{IDENT} {appendExprSegment(ast());}
{GROUP_BEGIN} {groupBegin();}
{EOL}+{BLOCK_BEGIN} {blockBegin();}
<GROUPED> {
{GROUP_END} {groupEnd(true);}
[^] {rewind(); groupEnd(false);}
}
<BLOCK> {
{EOL}+ {System.out.println("push line!");}
}
{EOF} {}
[^] {System.out.println("UNRECOGNIZED"); System.out.println(yytext());}

View File

@ -1,59 +0,0 @@
package org.enso.syntax
import java.io.Reader
import java.io.StringReader
import org.enso.syntax.text.lexer.Lexer
import org.enso.syntax.text.parser.Parser
import org.enso.syntax.text.parser.BParser
import scala.language.implicitConversions
import org.enso.syntax.text.lexer.SParser
object Main extends App {
var indent = 0
def pprint(s: String) {
print(" " * indent)
val (l, r2) = s.span(x => x != '(' && x != ')')
print(l)
if (r2 == "") {
println
return
}
val (m, r) = r2.splitAt(1)
if (m == "(") {
indent += 1
println(m)
pprint(r)
} else if (m == ")") {
indent -= 1
println(m)
pprint(r)
}
}
// val str = "a (b"
val str = "a\n b\n c" // .stripMargin
println(str)
val reader = new StringReader(str)
val ss = new Lexer(reader)
val toks = ss.lexAll()
pprint(toks.toString)
val sparser = new SParser(new StringReader(str))
val bparser = new BParser(new StringReader(str))
val parser = new Parser(new StringReader(str))
pprint(bparser.parse.toString())
pprint(parser.parse.toString())
pprint("!")
println(sparser.strInput)
pprint(sparser.parse.toString)
}

View File

@ -1,90 +0,0 @@
package org.enso.syntax.text.lexer
import java.io.StringReader
import java.io.Reader
import scala.collection.immutable.Vector
import org.enso.syntax.text.xx.Parser
class Lexer(reader: Reader) extends Scanner(reader) with Parser.Lexer {
private var _done = false
def getLVal(): Token = {
value
}
def yyerror(s: String) {
println("!!! ERROR !!!")
println(s)
}
def lexTok(): Token = {
val tok = yylex
if (tok == -1) {
_done = true;
return Token(EOF, 0, 0)
}
return getLVal
}
def lexAll(): Vector[Token] = {
var builder = Vector.newBuilder[Token]
do {
builder += lexTok
} while (!_done)
builder.result
}
def lexAll2(): (Vector[Int], Vector[Token]) = {
var builder_t = Vector.newBuilder[Int]
var builder = Vector.newBuilder[Token]
do {
val tok = yylex
var tval = getLVal
if (tok == -1) {
tval = Token(EOF, 0, 0)
_done = true
}
builder_t += tok
builder += tval
} while (!_done)
(builder_t.result, builder.result)
}
}
// class Lexer (reader:Reader) {
// val scanner = new Scanner(reader)
// private var _done = false
// def this(str:String) {
// this(new StringReader(str))
// }
// def lex(): Token = {
// if (done) {
// return Token(EOF,0,0)
// }
// if (scanner.done) {
// _done = true
// return lex
// }
// val token = scanner.lex
// if (token == null) {
// _done = true
// return lex
// } else {
// return token
// }
// }
// def lexAll(): Vector[Token] = {
// var builder = Vector.newBuilder[Token]
// do {
// builder += lex
// } while (!done)
// builder.result
// }
// def done(): Boolean = {
// return _done;
// }
// }

View File

@ -1,95 +0,0 @@
package org.enso.syntax.text.lexer
import java.io.Reader
import java.io.StringReader
import org.enso.syntax.Main.str
import org.enso.syntax.text.lexer.Lexer
import org.enso.syntax.text.parser.Parser
import org.enso.syntax.text.parser.BParser
import org.enso.syntax.text.parser.Scanner2
import org.enso.syntax.text.lexer.Token
import org.enso.syntax.text.lexer._
import org.enso.syntax.text.parser.AST
import scala.language.implicitConversions
class SS(reader: Reader, tokens: Vector[Token]) extends Scanner2(reader) {
// var ast: AST = null;
var astStack: List[AST] = List()
def pushAST(): Unit = {
astStack = ast +: astStack
ast = null
}
def popAST(): Unit = {
ast = astStack.head
astStack = astStack.tail
}
def tokenAt(i: Int): Token = tokens(i)
def astFromChar(i: Int, chr: Char): AST = {
chr match {
case 'a' => AST.fromToken(tokenAt(i))
case 'h' => AST.fromToken(tokenAt(i))
}
}
def astFromStr(i: Int, str: String): AST = {
val vv = str.to[Vector]
null
}
def appendExprSegment(ast2: AST): Unit = {
if (ast == null) {
ast = ast2
} else {
ast = AST.app(ast, ast2)
}
}
}
class SParser(lexreader: Reader) {
val ss = new Lexer(lexreader)
val tokens = ss.lexAll()
private var _done = false
val strInput = tokens.map(toChar).mkString("")
val reader = new StringReader(strInput)
val scanner = new SS(reader, tokens)
def toChar(tok: Token): Char = {
tok.symbol match {
case Var(name) => 'a'
case BlockBegin => 'b'
case BlockEnd => 'c'
case EOL => 'd'
case EOF => 'e'
case GroupBegin => 'f'
case GroupEnd => 'g'
case Cons(name) => 'h'
}
}
def lexAll(): (Vector[AST]) = {
var builder = Vector.newBuilder[AST]
do {
val tok = scanner.yylex
if (tok == null) {
_done = true
}
builder += tok
} while (!_done)
builder.result
}
def parse(): AST = {
lexAll()
scanner.ast
}
}

View File

@ -1,143 +0,0 @@
package org.enso.syntax.text.lexer
///////////
// Token //
///////////
case class Token(symbol: Symbol, offset: Int, span: Int)
////////////
// Symbol //
////////////
abstract class Symbol
// Identifiers
case class Var(name: String) extends Symbol
case class Cons(name: String) extends Symbol
case object Wildcard extends Symbol
// Operators
case class Operator(name: String) extends Symbol
case class Modifier(name: String) extends Symbol
case object DisabledAssignment extends Symbol
// Layout
case object EOL extends Symbol
case object BOF extends Symbol
case object EOF extends Symbol
case object BlockBegin extends Symbol
case object BlockEnd extends Symbol
case object BlockInvalid extends Symbol
case object GroupBegin extends Symbol
case object GroupEnd extends Symbol
case object ListBegin extends Symbol
case object ListEnd extends Symbol
case object RecordBegin extends Symbol
case object RecordEnd extends Symbol
// Literals
case object TextBegin extends Symbol
case object TextEnd extends Symbol
case object TextRawBegin extends Symbol
case object TextRawEnd extends Symbol
case class Text(text: String) extends Symbol
case class TextEscape(esc: TextEscapeType) extends Symbol
case object TextInterpolateBegin extends Symbol
case object TextInterpolateEnd extends Symbol
case class Number(base: Int, intPart: List[Int], fracPart: List[Int])
extends Symbol
// Invalid
case class Invalid(reason: InvalidReason) extends Symbol
case class Unmatched(char: String) extends Symbol
// Comments
case object Comment extends Symbol
case class CommentBody(text: String) extends Symbol
//////////////////
// Text Escapes //
//////////////////
abstract class TextEscapeType
case class CharEscape(code: Int) extends TextEscapeType
case class CtrlEscape(code: Int) extends TextEscapeType
case class IntEscape(code: Int) extends TextEscapeType
case class Uni16Escape(code: Int) extends TextEscapeType
case class Uni32Escape(code: Int) extends TextEscapeType
case class Uni21Escape(code: Int) extends TextEscapeType
case object QuoteEscape extends TextEscapeType
case object RawQuoteEscape extends TextEscapeType
case object SlashEscape extends TextEscapeType
case class InvalidCharEscape(char: Char) extends TextEscapeType
case class InvalidUni32Escape(str: String) extends TextEscapeType
case class InvalidUni21Escape(str: String) extends TextEscapeType
/////////////
// Invalid //
/////////////
abstract class InvalidReason
case class UnexpectedSuffix(text: String) extends InvalidReason
////////////////
// Companions //
////////////////
object Number {
def charToDigit(char: Char): Int = {
val i = char.toInt
if (i >= 48 && i <= 57) { return i - 48 } // 0 to 9
if (i >= 65 && i <= 90) { return i - 55 } // A to Z
if (i >= 97 && i <= 122) { return i - 87 } // a to z
return -1
}
def stringToDigits(str: String): List[Int] = {
str.toList.map(charToDigit)
}
def fromString(base: String, intPart: String, fracPart: String): Number = {
val base2 = if (base == "") 10 else base.toInt
return Number(base2, stringToDigits(intPart), stringToDigits(fracPart))
}
}
object IntEscape {
def fromString(code: String): IntEscape = {
IntEscape(code.toInt)
}
}
object CharEscape {
def fromChar(c: Char): CharEscape = {
CharEscape(c.toInt)
}
}
object Uni32Escape {
def fromString(str: String): TextEscapeType = {
try {
return Uni32Escape(Integer.parseInt(str, 16))
} catch {
case e: Exception => return InvalidUni32Escape(str)
}
}
}
object Uni21Escape {
def fromString(str: String): TextEscapeType = {
try {
return Uni21Escape(Integer.parseInt(str, 16))
} catch {
case e: Exception => return InvalidUni21Escape(str)
}
}
}

View File

@ -1,66 +0,0 @@
package org.enso.syntax.text.parser
import org.enso.syntax.text.lexer.Token
import org.enso.syntax.text.{lexer => token}
/////////
// AST //
/////////
case class AST(offset: Int, span: Int, symbol: Symbol)
// class Sym[T](offset:Int, span:Int, element:T)
////////////
// Symbol //
////////////
trait Symbol
case object NONE extends Symbol
// Identifiers
case class Var(name: String) extends Symbol
case class Cons(name: String) extends Symbol
case class Operator(name: String) extends Symbol
case class App(func: AST, arg: AST) extends Symbol
case class Block(body: Vector[AST]) extends Symbol
case class Grouped(body: AST) extends Symbol
//
object AST {
def fromToken(tok: Token): AST = {
tok.symbol match {
case token.Var(name) => AST(tok.offset, tok.span, Var(name))
case token.Cons(name) => AST(tok.offset, tok.span, Cons(name))
}
}
def app(fn: AST, arg: AST): AST = {
AST(fn.offset, fn.span + arg.span, App(fn.copy(offset = 0), arg))
}
def emptyBlock(): AST = {
AST(0, 0, Block(Vector()))
}
def block(lines: Vector[AST]): AST = {
AST(0, 0, Block(lines))
}
def grouped(begin: Token, body: AST, end: Token): AST = {
val offset = begin.offset
val span = begin.span + body.offset + body.span + end.offset + end.span
AST(offset, span, Grouped(body))
}
// FIXME - should report error about lack of closing paren
def grouped(begin: Token, body: AST): AST = {
val offset = begin.offset
val span = begin.span + body.offset + body.span // + end.offset + end.span
AST(offset, span, Grouped(body))
}
}

View File

@ -1,157 +0,0 @@
package org.enso.syntax.text.parser
import org.enso.syntax.text.{xx => bison}
import java.io.Reader
import org.enso.syntax.text.xx.Parser.Lexer._
import org.enso.syntax.text.lexer.Token
import org.enso.syntax.text.lexer.Lexer
import scala.collection.immutable.VectorBuilder
import scala.collection.mutable
//////////////////
// Parser Rules //
//////////////////
///////////////
// GenParser //
///////////////
class GenParser(reader: Reader) {
private val lexer = new Lexer(reader)
private val (itokens, tokens) = lexer.lexAll2()
final def token(t: Int): Rule[Token] =
Rule((ix) =>
if (itokens(ix) == t) {
Some(Result(ix + 1, tokens(ix)))
} else {
None
})
case class Result[T](offset: Int, value: T) {
final def map[S](f: T => S): Result[S] = this.copy(value = f(value))
}
case class Rule[T](unwrap: (Int) => Option[Result[T]]) {
final def run(ix: Int): Option[Result[T]] = unwrap(ix)
final def mapOption[S](f: Option[Result[T]] => Option[Result[S]]): Rule[S] =
Rule((ix) => f(run(ix)))
final def mapResult[S](f: Result[T] => Result[S]): Rule[S] =
mapOption(_.map(f))
final def map[S](f: T => S): Rule[S] = mapResult(_.map(f))
final def flatMap[S](f: T => Rule[S]): Rule[S] =
Rule(ix => {
run(ix).flatMap(r => f(r.value).run(r.offset))
})
final def flatMapResult[S](f: Result[T] => Rule[S]): Rule[S] =
Rule(ix => {
run(ix).flatMap(r => f(r).run(r.offset))
})
final def next[S](f: Rule[S]): Rule[S] = flatMap(_ => f)
final def |(that: Rule[T]): Rule[T] = this or that
final def or(that: Rule[T]): Rule[T] =
Rule(ix => this.run(ix).orElse(that.run(ix)))
final def orElse(v: T): Rule[T] = or(Rule(ix => Some(Result(ix, v))))
@scala.annotation.tailrec
final def manyWith(ix: Int, f: (T, T) => T, t: Result[T]): Result[T] =
this.run(ix) match {
case None => t
case Some(r) => manyWith(r.offset, f, r.map(f(t.value, _)))
}
final def many1_(): Rule[Unit] = this.flatMap(_ => many_)
final def many_(): Rule[Unit] = Rule(ix => Some(_many_(ix)))
@scala.annotation.tailrec
final def _many_(ix: Int): Result[Unit] =
this.run(ix) match {
case None => Result(ix, Unit)
case Some(r) => _many_(r.offset)
}
final def fold(concat: (T, T) => T): Rule[T] =
Rule(ix => run(ix).map(r => manyWith(r.offset, concat, r)))
}
}
////////////
// Parser //
////////////
class Parser(reader: Reader) extends GenParser(reader) {
final def parse(): Option[Result[AST]] = expr.run(0)
// final def expr(): Rule[AST] = vvar.next(vvar).or(vvar.next(cons))
final def expr(): Rule[AST] = tok.fold(AST.app)
final def tok(): Rule[AST] = vvar | cons | group | block
final def vvar(): Rule[AST] = token(VAR).map(AST.fromToken)
final def cons(): Rule[AST] = token(CONS).map(AST.fromToken)
//
final def group(): Rule[AST] =
token(GROUP_BEGIN).flatMap(
beg =>
expr.flatMap(
exp =>
token(GROUP_END)
.map(AST.grouped(beg, exp, _))
.orElse(AST.grouped(beg, exp))))
final def block(): Rule[AST] = {
token(EOL).many1_
.next(token(BLOCK_BEGIN))
.mapResult(r => blockLines(r.offset).map(AST.block))
}
final def blockLines(ix: Int): Result[Vector[AST]] = {
expr.run(ix) match {
case None => Result(ix, Vector[AST]())
case Some(firstLine) => {
val lines = Vector.newBuilder[AST]
lines += firstLine.value
_blockLines(firstLine.offset, lines)
}
}
}
@scala.annotation.tailrec
final def _blockLines(
startIx: Int,
lines: mutable.Builder[AST, Vector[AST]]): Result[Vector[AST]] = {
var ix = startIx
var body = true
token(EOL).many1_.flatMap(_ => expr).run(ix) match {
case None => Result(ix, lines.result)
case Some(r) => _blockLines(r.offset, lines)
}
}
}
class BParser(reader: Reader) {
val lexer = new Lexer(reader)
val parser = new bison.Parser(lexer)
def parse(): Option[AST] = {
if (parser.parse) {
Some(parser.result)
} else {
None
}
}
}

View File

@ -1,96 +0,0 @@
package org.enso.syntax.text.parser
import java.io.Reader
//////////////////
// Parser Rules //
//////////////////
trait Result[Input, Output]
case class Fail[Input, Output](rest: Input,
contexts: List[String],
error: String)
extends Result[Input, Output]
case class Partial[Input, Output](cont: Input => Result[Input, Output])
extends Result[Input, Output]
case class Done[Input, Output](rest: Input, output: Output)
extends Result[Input, Output]
case class Pos(offset: Int)
trait More
case object Complete extends More
case object Incomplete extends More
class PP() {
type Input = Int
type Output = Int
type State = Int
type Failure =
(State, Pos, More, List[String], String) => Result[Input, Output]
type Success = (State, Pos, More, Output) => Result[Input, Output]
//failK :: Failure a
//failK t (Pos pos) _more stack msg = Fail (Buf.dropWord16 pos t) stack msg
def failK(state: State,
pos: Pos,
more: More,
stack: List[String],
msg: String): Result[Input, Output] = Fail(0 /*!*/, stack, msg)
//successK :: Success a a
//successK t (Pos pos) _more a = Done (Buf.dropWord16 pos t) a
def successK(state: State,
pos: Pos,
more: More,
a: Input): Result[Input, Output] =
Done(0 /*!*/, a)
// ensure :: Int -> Parser (Pos, Text)
// ensure n = T.Parser $ \t pos more lose succ ->
// case lengthAtLeast pos n t of
// Just n' -> succ t pos more (n', substring pos n' t)
// -- The uncommon case is kept out-of-line to reduce code size:
// Nothing -> ensureSuspended n t pos more lose succ
// def ensure(n:Int): Parser2 =
//
// class Parser2(
// run: (State, Pos, More, Failure, Success) => Result[Input, Output]) {
//
// //parse :: Parser a -> Text -> Result a
// //parse m s = runParser m (buffer s) 0 Incomplete failK successK
// def parse(input: Input): Result[Input, Output] =
// this.run(input, Pos(0), Incomplete, failK, successK)
// }
}
//
//-- | Run a parser.
//parse :: Parser a -> Text -> Result a
//parse m s = runParser m (buffer s) 0 Incomplete failK successK
//newtype Parser i a = Parser {
//runParser :: forall r.
//State i -> Pos -> More
//-> Failure i (State i) r
//-> Success i (State i) a r
//-> IResult i r
//}
//
//type family State i
//type instance State ByteString = B.Buffer
//type instance State Text = T.Buffer
//
//type Failure i t r = t -> Pos -> More -> [String] -> String
//-> IResult i r
//type Success i t a r = t -> Pos -> More -> a -> IResult i r
//
//-- | Have we read all available input?
//data More = Complete | Incomplete
//deriving (Eq, Show)
//newtype Parser i a = Parser {
// runParser :: forall r.
// State i -> Pos -> More
// -> Failure i (State i) r
// -> Success i (State i) a r
// -> IResult i r
//}

View File

@ -1,198 +0,0 @@
%language "Java"
%name-prefix "Parser"
%define parser_class_name "Parser"
%define public
%code imports {
package org.enso.syntax.text.xx;
import org.enso.syntax.text.parser.AST;
import org.enso.syntax.text.lexer.Token;
}
%code {
public AST result;
}
// public static void main (String args[]) throws IOException
// {
// CalcLexer l = new CalcLexer (System.in);
// Calc p = new Calc (l);
// p.parse ();
// }
// }
// %define api.value.type {Token}
/* Bison Declarations */
%token <Token> VAR
%token <Token> CONS
%token <Token> EOL
%token <Token> GROUP_BEGIN
%token <Token> GROUP_END
%token <Token> BLOCK_BEGIN
%token <Token> BLOCK_END
%token <Token> BLOCK_INVALID
%type <AST> expr
%type <AST> exprItem
%type <AST> expr_group
%type <AST> block
%type <AST> blockBody
%type <AST> tok
%start program
%right GROUP_BEGIN GROUP_END
/////////////
// Grammar //
/////////////
%%
program:
expr {result=$1;}
| /* empty */
expr:
exprItem {$$=$1;}
| expr exprItem {$$=AST.app($1,$2);}
exprItem:
tok {$$=$1;};
| block {$$=$1;};
| GROUP_BEGIN expr_group GROUP_END {$$=AST.grouped($1,$2,$3);};
| GROUP_BEGIN expr_group {$$=AST.grouped($1,$2);};
expr_group:
tok {$$=$1;}
| expr_group tok {$$=AST.app($1,$2);}
| GROUP_BEGIN expr_group GROUP_END {$$=AST.grouped($1,$2,$3);};
block:
BLOCK_BEGIN blockBody {$$=$2;}
blockBody:
expr EOL blockBody {$$=$1;}
| expr BLOCK_END {$$=AST.emptyBlock();}
tok:
VAR {$$=AST.fromToken($1);}
| CONS {$$=AST.fromToken($1);}
// {
// if ($1.intValue () != $3.intValue ())
// yyerror ( "calc: error: " + $1 + " != " + $3);
// }
// | exp '-' exp { $$ = new Integer ($1.intValue () - $3.intValue ()); }
// | exp '*' exp { $$ = new Integer ($1.intValue () * $3.intValue ()); }
// | exp '/' exp { $$ = new Integer ($1.intValue () / $3.intValue ()); }
// | '-' exp %prec NEG { $$ = new Integer (-$2.intValue ()); }
// | exp '^' exp { $$ = new Integer ((int)
// Math.pow ($1.intValue (),
// $3.intValue ())); }
// | '(' exp ')' { $$ = $2; }
// | '(' error ')' { $$ = new Integer (1111); }
// | '!' { $$ = new Integer (0); return YYERROR; }
// | '-' error { $$ = new Integer (0); return YYERROR; }
// ;
%%
// class CalcLexer implements Calc.Lexer {
// StreamTokenizer st;
// public CalcLexer (InputStream is)
// {
// st = new StreamTokenizer (new InputStreamReader (is));
// st.resetSyntax ();
// st.eolIsSignificant (true);
// st.whitespaceChars (9, 9);
// st.whitespaceChars (32, 32);
// st.wordChars (48, 57);
// }
// public void yyerror (String s)
// {
// System.err.println (s);
// }
// Integer yylval;
// public Object getLVal() {
// return yylval;
// }
// public int yylex () throws IOException {
// int ttype = st.nextToken ();
// if (ttype == st.TT_EOF)
// return Calc.EOF;
// else if (ttype == st.TT_EOL)
// {
// return (int) '\n';
// }
// else if (ttype == st.TT_WORD)
// {
// yylval = new Integer (st.sval);
// return Calc.NUMX;
// }
// else
// return st.ttype;
// }
// }
// class Position {
// public int line;
// public int token;
// public Position ()
// {
// line = 0;
// token = 0;
// }
// public Position (int l, int t)
// {
// line = l;
// token = t;
// }
// public boolean equals (Position l)
// {
// return l.line == line && l.token == token;
// }
// public String toString ()
// {
// return Integer.toString (line) + "." + Integer.toString(token);
// }
// public int lineno ()
// {
// return line;
// }
// public int token ()
// {
// return token;
// }
// }

View File

@ -1,217 +0,0 @@
package org.enso.syntax.text.lexer
//
//import org.scalatest._
//
//class LexerSpec extends FlatSpec with Matchers {
//
// def lex (input:String): List[Token] = {
// new Lexer(input).lexAll().to[List]
// }
//
// def lex_ (input:String): List[Symbol] = {
// lex(input).map(tok => tok.symbol)
// }
//
// def assertLex (input:String, result:List[Symbol]) = {
// assert(lex_(input) == (result :+ EOF))
// }
//
// def check (input:String, result:List[Symbol]) = {
// it should escape(input) in { assertLex (input, result) }
// }
//
// def unexpectedSuffix (input:String): Symbol = {
// Invalid(UnexpectedSuffix(input))
// }
//
// def escape(raw: String): String = {
// import scala.reflect.runtime.universe._
// Literal(Constant(raw)).toString
// }
//
//
//
// /////////////////
// // Identifiers //
// /////////////////
//
// check("_" , Wildcard :: Nil)
// check("Name" , Cons("Name") :: Nil)
// check("name" , Var("name") :: Nil)
// check("name'" , Var("name'") :: Nil)
// check("name''" , Var("name''") :: Nil)
// check("name'a" , Var("name'") :: unexpectedSuffix("a") :: Nil)
// check("name_" , Var("name_") :: Nil)
// check("name_'" , Var("name_'") :: Nil)
// check("name'_" , Var("name'") :: unexpectedSuffix("_") :: Nil)
// check("name`" , Var("name") :: Unmatched("`") :: Nil)
//
//
//
// ///////////////
// // Operators //
// ///////////////
//
// check("=" , Operator("=") :: Nil)
// check("==" , Operator("==") :: Nil)
// check("===" , Operator("==") :: unexpectedSuffix("=") :: Nil)
// check(":" , Operator(":") :: Nil)
// check("," , Operator(",") :: Nil)
// check("." , Operator(".") :: Nil)
// check(".." , Operator("..") :: Nil)
// check("..." , Operator("...") :: Nil)
// check("...." , Operator("...") :: unexpectedSuffix(".") :: Nil)
// check(">=" , Operator(">=") :: Nil)
// check("<=" , Operator("<=") :: Nil)
// check("/=" , Operator("/=") :: Nil)
// check("+=" , Modifier("+=") :: Nil)
// check("-=" , Modifier("-=") :: Nil)
// check("-=-" , Modifier("-=") :: unexpectedSuffix("-") :: Nil)
//
//
//
// ////////////
// // Layout //
// ////////////
//
// check("" , Nil)
// check("\n" , EOL :: Nil)
// check("\n\n" , EOL :: EOL :: Nil)
// check("\r" , EOL :: Nil)
// check("\r\n" , EOL :: Nil)
// check("\n\r" , EOL :: EOL :: Nil)
// check("(a)" , GroupBegin :: Var("a") :: GroupEnd :: Nil)
// check("[a]" , ListBegin :: Var("a") :: ListEnd :: Nil)
// check("{a}" , RecordBegin :: Var("a") :: RecordEnd :: Nil)
//
//
//
// /////////////
// // Numbers //
// /////////////
//
// check("7" , Number(10,7::Nil,Nil) :: Nil)
// check("7.5" , Number(10,7::Nil,Nil) :: Operator(".") :: Number(10,5::Nil,Nil) :: Nil)
// check("7_12" , Number(7,1::2::Nil,Nil) :: Nil)
// check("7_12.34" , Number(7,1::2::Nil,3::4::Nil) :: Nil)
// check("16_9acdf" , Number(16,9::10::12::13::15::Nil,Nil) :: Nil)
//
//
//
// //////////
// // Text //
// //////////
//
// // Basic
// check("'" , TextBegin :: Nil)
// check("\"" , TextRawBegin :: Nil)
// check("''" , TextBegin :: TextEnd :: Nil)
// check("\"\"" , TextRawBegin :: TextRawEnd :: Nil)
// check("'''" , TextBegin :: Nil)
// check("\"\"\"" , TextRawBegin :: Nil)
// check("' '" , TextBegin :: Text(" ") :: TextEnd :: Nil)
// check("\" \"" , TextRawBegin :: Text(" ") :: TextRawEnd :: Nil)
// check("'' ''" , TextBegin :: TextEnd :: TextBegin :: TextEnd :: Nil)
// check("\"\" \"\"" , TextRawBegin :: TextRawEnd :: TextRawBegin :: TextRawEnd :: Nil)
// check("'\n'" , TextBegin :: EOL :: TextEnd :: Nil)
// check("\"\n\"" , TextRawBegin :: EOL :: TextRawEnd :: Nil)
// check("'\\\\'" , TextBegin :: TextEscape(SlashEscape) :: TextEnd :: Nil)
// check("\"\\\\\"" , TextRawBegin :: Text("\\") :: TextEscape(RawQuoteEscape) :: Nil)
// check("'\\\''" , TextBegin :: TextEscape(QuoteEscape) :: TextEnd :: Nil)
// check("\"\\\'\"" , TextRawBegin :: TextEscape(QuoteEscape) :: TextRawEnd :: Nil)
// check("'\\\"'" , TextBegin :: TextEscape(RawQuoteEscape) :: TextEnd :: Nil)
// check("\"\\\"\"" , TextRawBegin :: TextEscape(RawQuoteEscape) :: TextRawEnd :: Nil)
// check("''' '''" , TextBegin :: Text(" ") :: TextEnd :: Nil)
// check("\"\"\" \"\"\"" , TextRawBegin :: Text(" ") :: TextRawEnd :: Nil)
// check("''' '' '''" , TextBegin :: Text(" ") :: Text("''") :: Text(" ") :: TextEnd :: Nil)
// check("\"\"\" \"\" \"\"\"" , TextRawBegin :: Text(" ") :: Text("\"\"") :: Text(" ") :: TextRawEnd :: Nil)
//
// // Int Escapes
// check("'\\12'" , TextBegin :: TextEscape(IntEscape(12)) :: TextEnd :: Nil)
// check("\"\\12\"" , TextRawBegin :: Text("\\") :: Text("12") :: TextRawEnd :: Nil)
//
// // Char Escapes
// check("'\\a'" , TextBegin :: TextEscape(CharEscape(7)) :: TextEnd :: Nil)
// check("'\\b'" , TextBegin :: TextEscape(CharEscape(8)) :: TextEnd :: Nil)
// check("'\\f'" , TextBegin :: TextEscape(CharEscape(12)) :: TextEnd :: Nil)
// check("'\\n'" , TextBegin :: TextEscape(CharEscape(10)) :: TextEnd :: Nil)
// check("'\\r'" , TextBegin :: TextEscape(CharEscape(13)) :: TextEnd :: Nil)
// check("'\\t'" , TextBegin :: TextEscape(CharEscape(9)) :: TextEnd :: Nil)
// check("'\\v'" , TextBegin :: TextEscape(CharEscape(11)) :: TextEnd :: Nil)
// check("'\\e'" , TextBegin :: TextEscape(CharEscape(27)) :: TextEnd :: Nil)
// check("'\\q'" , TextBegin :: TextEscape(InvalidCharEscape('q')) :: TextEnd :: Nil)
// check("\"\\a\"" , TextRawBegin :: Text("\\") :: Text("a") :: TextRawEnd :: Nil)
//
// // Control Escapes
// check("'\\NUL'" , TextBegin :: TextEscape(CtrlEscape(0)) :: TextEnd :: Nil)
// check("'\\SOH'" , TextBegin :: TextEscape(CtrlEscape(1)) :: TextEnd :: Nil)
// check("'\\STX'" , TextBegin :: TextEscape(CtrlEscape(2)) :: TextEnd :: Nil)
// check("'\\ETX'" , TextBegin :: TextEscape(CtrlEscape(3)) :: TextEnd :: Nil)
// check("'\\EOT'" , TextBegin :: TextEscape(CtrlEscape(4)) :: TextEnd :: Nil)
// check("'\\ENQ'" , TextBegin :: TextEscape(CtrlEscape(5)) :: TextEnd :: Nil)
// check("'\\ACK'" , TextBegin :: TextEscape(CtrlEscape(6)) :: TextEnd :: Nil)
// check("'\\BEL'" , TextBegin :: TextEscape(CtrlEscape(7)) :: TextEnd :: Nil)
// check("'\\BS'" , TextBegin :: TextEscape(CtrlEscape(8)) :: TextEnd :: Nil)
// check("'\\TAB'" , TextBegin :: TextEscape(CtrlEscape(9)) :: TextEnd :: Nil)
// check("'\\LF'" , TextBegin :: TextEscape(CtrlEscape(10)) :: TextEnd :: Nil)
// check("'\\VT'" , TextBegin :: TextEscape(CtrlEscape(11)) :: TextEnd :: Nil)
// check("'\\FF'" , TextBegin :: TextEscape(CtrlEscape(12)) :: TextEnd :: Nil)
// check("'\\CR'" , TextBegin :: TextEscape(CtrlEscape(13)) :: TextEnd :: Nil)
// check("'\\SO'" , TextBegin :: TextEscape(CtrlEscape(14)) :: TextEnd :: Nil)
// check("'\\SI'" , TextBegin :: TextEscape(CtrlEscape(15)) :: TextEnd :: Nil)
// check("'\\DLE'" , TextBegin :: TextEscape(CtrlEscape(16)) :: TextEnd :: Nil)
// check("'\\DC1'" , TextBegin :: TextEscape(CtrlEscape(17)) :: TextEnd :: Nil)
// check("'\\DC2'" , TextBegin :: TextEscape(CtrlEscape(18)) :: TextEnd :: Nil)
// check("'\\DC3'" , TextBegin :: TextEscape(CtrlEscape(19)) :: TextEnd :: Nil)
// check("'\\DC4'" , TextBegin :: TextEscape(CtrlEscape(20)) :: TextEnd :: Nil)
// check("'\\NAK'" , TextBegin :: TextEscape(CtrlEscape(21)) :: TextEnd :: Nil)
// check("'\\SYN'" , TextBegin :: TextEscape(CtrlEscape(22)) :: TextEnd :: Nil)
// check("'\\ETB'" , TextBegin :: TextEscape(CtrlEscape(23)) :: TextEnd :: Nil)
// check("'\\CAN'" , TextBegin :: TextEscape(CtrlEscape(24)) :: TextEnd :: Nil)
// check("'\\EM'" , TextBegin :: TextEscape(CtrlEscape(25)) :: TextEnd :: Nil)
// check("'\\SUB'" , TextBegin :: TextEscape(CtrlEscape(26)) :: TextEnd :: Nil)
// check("'\\ESC'" , TextBegin :: TextEscape(CtrlEscape(27)) :: TextEnd :: Nil)
// check("'\\FS'" , TextBegin :: TextEscape(CtrlEscape(28)) :: TextEnd :: Nil)
// check("'\\GS'" , TextBegin :: TextEscape(CtrlEscape(29)) :: TextEnd :: Nil)
// check("'\\RS'" , TextBegin :: TextEscape(CtrlEscape(30)) :: TextEnd :: Nil)
// check("'\\US'" , TextBegin :: TextEscape(CtrlEscape(31)) :: TextEnd :: Nil)
// check("'\\DEL'" , TextBegin :: TextEscape(CtrlEscape(127)) :: TextEnd :: Nil)
// check("\"\\NUL\"" , TextRawBegin :: Text("\\") :: Text("NUL") :: TextRawEnd :: Nil)
//
// // Unicode Escapes
// check("'\\uF'" , TextBegin :: TextEscape(InvalidCharEscape('u')) :: Text("F") :: TextEnd :: Nil)
// check("'\\uFF00'" , TextBegin :: TextEscape(Uni16Escape(0xFF00)) :: TextEnd :: Nil)
// check("'\\U00ABCDEF'" , TextBegin :: TextEscape(Uni32Escape(0x00ABCDEF)) :: TextEnd :: Nil)
// check("'\\UFFFFFFFF'" , TextBegin :: TextEscape(InvalidUni32Escape("FFFFFFFF")) :: TextEnd :: Nil)
// check("'\\u{FF0}'" , TextBegin :: TextEscape(Uni21Escape(0xFF0)) :: TextEnd :: Nil)
// check("'\\u{FFFFFFFF}'" , TextBegin :: TextEscape(InvalidUni21Escape("FFFFFFFF")) :: TextEnd :: Nil)
// check("'\\u{}'" , TextBegin :: TextEscape(InvalidUni21Escape("")) :: TextEnd :: Nil)
//
// // Interpolation
// check("'`{ }`'" , TextBegin :: TextInterpolateBegin :: RecordBegin :: RecordEnd :: TextInterpolateEnd :: TextEnd :: Nil)
// check("'`{ }'" , TextBegin :: TextInterpolateBegin :: RecordBegin :: RecordEnd :: TextBegin :: Nil)
// check("'` `}'" , TextBegin :: TextInterpolateBegin :: TextInterpolateEnd :: Text("}") :: TextEnd :: Nil)
// check("'`a`'" , TextBegin :: TextInterpolateBegin :: Var("a") :: TextInterpolateEnd :: TextEnd :: Nil)
// check("'`'a'`'" , TextBegin :: TextInterpolateBegin :: TextBegin :: Text("a") :: TextEnd :: TextInterpolateEnd :: TextEnd :: Nil)
// check("'''`'a'`'''" , TextBegin :: TextInterpolateBegin :: TextBegin :: Text("a") :: TextEnd :: TextInterpolateEnd :: TextEnd :: Nil)
// check("'`'''a'''`'" , TextBegin :: TextInterpolateBegin :: TextBegin :: Text("a") :: TextEnd :: TextInterpolateEnd :: TextEnd :: Nil)
// check("\"``\"" , TextRawBegin :: Text("``") :: TextRawEnd :: Nil)
// check("'`'`a`'`'" , TextBegin :: TextInterpolateBegin :: TextBegin :: TextInterpolateBegin :: Var("a") :: TextInterpolateEnd :: TextEnd :: TextInterpolateEnd :: TextEnd :: Nil)
//
// // Comments
// check("#" , Comment :: Nil)
// check("#c" , Comment :: CommentBody("c") :: Nil)
// check("#c\na" , Comment :: CommentBody("c") :: EOL :: Var("a") :: Nil)
// check("#c\n a" , Comment :: CommentBody("c") :: EOL :: CommentBody(" a") :: Nil)
// check(" #c\n a" , Comment :: CommentBody("c") :: EOL :: Var("a") :: Nil)
// check(" #c\n a" , Comment :: CommentBody("c") :: EOL :: CommentBody(" a") :: Nil)
// check("a#c" , Var("a") :: Comment :: CommentBody("c") :: Nil)
// check("a # c" , Var("a") :: Comment :: CommentBody(" c") :: Nil)
// check("a#" , Var("a") :: Comment :: Nil)
// check("a#\nb" , Var("a") :: Comment :: EOL :: Var("b") :: Nil)
// check("a#\n b" , Var("a") :: Comment :: EOL :: CommentBody(" b") :: Nil)
//
// // Disabled
// check("a #= b" , Var("a") :: DisabledAssignment :: Var("b") :: Nil)
//
//}

View File

@ -22,6 +22,7 @@ variables:
jobs: jobs:
- job: Linux - job: Linux
timeoutInMinutes: 90
pool: pool:
vmImage: 'Ubuntu-16.04' vmImage: 'Ubuntu-16.04'
container: container:
@ -39,8 +40,12 @@ jobs:
displayName: sbt test displayName: sbt test
continueOnError: true continueOnError: true
- script: | - script: |
sbt -no-colors bench sbt -no-colors syntax/bench
displayName: sbt bench displayName: sbt bench parser
continueOnError: true
- script: |
sbt -no-colors interpreter/bench
displayName: sbt bench interpreter
continueOnError: true continueOnError: true
- task: PublishTestResults@2 - task: PublishTestResults@2
inputs: inputs:
@ -51,6 +56,7 @@ jobs:
condition: eq(variables['Agent.JobStatus'], 'SucceededWithIssues') condition: eq(variables['Agent.JobStatus'], 'SucceededWithIssues')
displayName: "Fail if there were issues" displayName: "Fail if there were issues"
- job: macOS - job: macOS
timeoutInMinutes: 90
pool: pool:
vmImage: macOS-10.13 vmImage: macOS-10.13
steps: steps:
@ -71,8 +77,12 @@ jobs:
displayName: sbt test displayName: sbt test
continueOnError: true continueOnError: true
- script: | - script: |
sbt -no-colors bench sbt -no-colors syntax/bench
displayName: sbt bench displayName: sbt bench parser
continueOnError: true
- script: |
sbt -no-colors interpreter/bench
displayName: sbt bench interpreter
continueOnError: true continueOnError: true
- task: PublishTestResults@2 - task: PublishTestResults@2
inputs: inputs:
@ -83,6 +93,7 @@ jobs:
condition: eq(variables['Agent.JobStatus'], 'SucceededWithIssues') condition: eq(variables['Agent.JobStatus'], 'SucceededWithIssues')
displayName: "Fail if there were issues" displayName: "Fail if there were issues"
- job: Windows - job: Windows
timeoutInMinutes: 90
pool: pool:
vmImage: windows-2019 vmImage: windows-2019
steps: steps:
@ -103,9 +114,13 @@ jobs:
continueOnError: true continueOnError: true
displayName: "sbt test" displayName: "sbt test"
- script: | - script: |
sbt bench sbt syntax/bench
displayName: sbt bench parser
continueOnError: true
- script: |
sbt interpreter/bench
displayName: sbt bench interpreter
continueOnError: true continueOnError: true
displayName: "sbt bench"
- task: PublishTestResults@2 - task: PublishTestResults@2
inputs: inputs:
testResultsFormat: 'JUnit' testResultsFormat: 'JUnit'

258
build.sbt
View File

@ -1,81 +1,205 @@
import sbt.Keys.scalacOptions
import scala.sys.process._ import scala.sys.process._
import org.enso.build.BenchTasks._ import org.enso.build.BenchTasks._
import org.enso.build.WithDebugCommand import org.enso.build.WithDebugCommand
// Global Configuration //////////////////////////////
organization := "org.enso" //// Global Configuration ////
scalaVersion := "2.12.8" //////////////////////////////
// Compiler Options val scalacVersion = "2.12.8"
scalacOptions ++= Seq( organization in ThisBuild := "org.enso"
"-deprecation", scalaVersion in ThisBuild := scalacVersion
"-feature",
"-unchecked", //////////////////////////
"-Xlint" //// Compiler Options ////
//////////////////////////
scalacOptions in ThisBuild ++= Seq(
"-deprecation", // Emit warning and location for usages of deprecated APIs.
"-encoding", // Provide explicit encoding (the next line)
"utf-8", // Specify character encoding used by source files.
"-explaintypes", // Explain type errors in more detail.
"-feature", // Emit warning and location for usages of features that should be imported explicitly.
"-language:existentials", // Existential types (besides wildcard types) can be written and inferred
"-language:experimental.macros", // Allow macro definition (besides implementation and application)
"-language:higherKinds", // Allow higher-kinded types
"-language:implicitConversions", // Allow definition of implicit functions called views
"-unchecked", // Enable additional warnings where generated code depends on assumptions.
"-Xlint:adapted-args", // Warn if an argument list is modified to match the receiver.
"-Xlint:by-name-right-associative", // By-name parameter of right associative operator.
"-Xlint:constant", // Evaluation of a constant arithmetic expression results in an error.
"-Xlint:delayedinit-select", // Selecting member of DelayedInit.
"-Xlint:doc-detached", // A Scaladoc comment appears to be detached from its element.
"-Xlint:inaccessible", // Warn about inaccessible types in method signatures.
"-Xlint:infer-any", // Warn when a type argument is inferred to be `Any`.
"-Xlint:missing-interpolator", // A string literal appears to be missing an interpolator id.
"-Xlint:nullary-override", // Warn when non-nullary `def f()' overrides nullary `def f'.
"-Xlint:nullary-unit", // Warn when nullary methods return Unit.
"-Xlint:option-implicit", // Option.apply used implicit view.
"-Xlint:package-object-classes", // Class or object defined in package object.
"-Xlint:poly-implicit-overload", // Parameterized overloaded implicit methods are not visible as view bounds.
"-Xlint:private-shadow", // A private field (or class parameter) shadows a superclass field.
"-Xlint:stars-align", // Pattern sequence wildcard must align with sequence component.
"-Xlint:type-parameter-shadow", // A local type parameter shadows a type already in scope.
"-Xlint:unsound-match", // Pattern match may not be typesafe.
"-Yno-adapted-args", // Do not adapt an argument list (either by inserting () or creating a tuple) to match the receiver.
"-Ypartial-unification", // Enable partial unification in type constructor inference
"-Ywarn-dead-code", // Warn when dead code is identified.
"-Ywarn-extra-implicit", // Warn when more than one implicit parameter section is defined.
"-Ywarn-inaccessible", // Warn about inaccessible types in method signatures.
"-Ywarn-infer-any", // Warn when a type argument is inferred to be `Any`.
"-Ywarn-nullary-override", // Warn when non-nullary `def f()' overrides nullary `def f'.
"-Ywarn-nullary-unit", // Warn when nullary methods return Unit.
"-Ywarn-numeric-widen", // Warn when numerics are widened.
"-Ywarn-unused:implicits", // Warn if an implicit parameter is unused.
"-Ywarn-unused:imports", // Warn if an import selector is not referenced.
"-Ywarn-unused:locals", // Warn if a local definition is unused.
"-Ywarn-unused:params", // Warn if a value parameter is unused.
"-Ywarn-unused:patvars", // Warn if a variable bound in a pattern is unused.
"-Ywarn-unused:privates", // Warn if a private member is unused.
"-Ywarn-value-discard", // Warn when non-Unit expression results are unused.
"-Ypartial-unification", // Enable partial unification (which is enabled by default in Scala 2.13).
"-Xmacro-settings:-logging@org.enso", // Disable the debug logging globally.
"-Xcheckinit" // Wrap field accessors to throw an exception on uninitialized access.
) )
javacOptions ++= Seq("-source", "12", "-target", "1.8") /////////////////////////////////
//// Benchmark Configuration ////
/////////////////////////////////
// Benchmark Configuration
lazy val Benchmark = config("bench") extend sbt.Test lazy val Benchmark = config("bench") extend sbt.Test
// Native Image Generation // Native Image Generation
lazy val buildNativeImage = lazy val buildNativeImage =
taskKey[Unit]("Build native image for the Enso executable") taskKey[Unit]("Build native image for the Enso executable")
// Global Project ////////////////////////
//// Global Project ////
////////////////////////
lazy val enso = (project in file(".")) lazy val enso = (project in file("."))
.settings(version := "0.1") .settings(version := "0.1")
.aggregate( .aggregate(syntax, pkg, interpreter)
syntax, .settings(Global / concurrentRestrictions += Tags.exclusive(Exclusive))
pkg,
interpreter, ////////////////////////////
projectManager, //// Dependency Bundles ////
fileManager ////////////////////////////
val monocle = {
val monocleVersion = "1.6.0"
Seq(
"com.github.julien-truffaut" %% "monocle-core" % monocleVersion,
"com.github.julien-truffaut" %% "monocle-macro" % monocleVersion,
"com.github.julien-truffaut" %% "monocle-law" % monocleVersion % "test"
)
}
val cats = {
Seq(
"org.typelevel" %% "cats-core" % "2.0.0-RC1",
"org.typelevel" %% "kittens" % "1.2.1"
)
}
val scala_compiler = Seq(
"org.scala-lang" % "scala-reflect" % scalacVersion,
"org.scala-lang" % "scala-compiler" % scalacVersion
)
val circe = Seq("circe-core", "circe-generic", "circe-yaml")
.map("io.circe" %% _ % "0.10.0")
def akkaPkg(name: String) = akkaURL %% s"akka-$name" % akkaVersion
def akkaHTTPPkg(name: String) = akkaURL %% s"akka-$name" % akkaHTTPVersion
val akkaURL = "com.typesafe.akka"
val akkaVersion = "2.5.23"
val akkaHTTPVersion = "10.1.8"
val akkaActor = akkaPkg("actor")
val akkaStream = akkaPkg("stream")
val akkaTyped = akkaPkg("actor-typed")
val akkaTestkit = akkaPkg("testkit")
val akkaSLF4J = akkaPkg("slf4j")
val akkaTestkitTyped = akkaPkg("actor-testkit-typed") % Test
val akkaHttp = akkaHTTPPkg("http")
val akkaSpray = akkaHTTPPkg("http-spray-json")
val akka = Seq(akkaActor, akkaStream, akkaHttp, akkaSpray, akkaTyped)
val jmh = Seq(
"org.openjdk.jmh" % "jmh-core" % "1.21" % Benchmark,
"org.openjdk.jmh" % "jmh-generator-annprocess" % "1.21" % Benchmark
)
//////////////////////
//// Sub-Projects ////
//////////////////////
lazy val logger = (project in file("lib/logger"))
.dependsOn(unused)
.settings(
version := "0.1",
libraryDependencies ++= scala_compiler
) )
// Sub-Projects lazy val flexer = (project in file("lib/flexer"))
lazy val syntax = (project in file("Syntax")) .dependsOn(logger)
.settings( .settings(
mainClass in (Compile, run) := Some("org.enso.syntax.Main"), version := "0.1",
version := "0.1" scalacOptions -= "-deprecation", // FIXME
resolvers += Resolver.sonatypeRepo("releases"),
libraryDependencies ++= scala_compiler ++ Seq(
"org.feijoas" %% "mango" % "0.14"
)
) )
lazy val unused = (project in file("lib/unused"))
.settings(version := "0.1", scalacOptions += "-nowarn")
lazy val syntax_definition = (project in file("Syntax/definition"))
.dependsOn(logger, flexer)
.settings( .settings(
libraryDependencies ++= monocle ++ cats ++ scala_compiler ++ Seq(
"com.lihaoyi" %% "scalatags" % "0.7.0"
)
)
lazy val syntax = (project in file("Syntax/specialization"))
.dependsOn(logger, flexer, syntax_definition)
.configs(Test)
.configs(Benchmark)
.settings(
mainClass in (Compile, run) := Some("org.enso.syntax.text.Main"),
version := "0.1",
testFrameworks += new TestFramework("org.scalameter.ScalaMeterFramework"),
logBuffered := false,
inConfig(Benchmark)(Defaults.testSettings),
bench := (test in Benchmark).tag(Exclusive).value,
parallelExecution in Benchmark := false,
libraryDependencies ++= Seq( libraryDependencies ++= Seq(
"com.storm-enroute" %% "scalameter" % "0.17" % "bench", "com.storm-enroute" %% "scalameter" % "0.17" % "bench",
"org.typelevel" %% "cats-core" % "1.6.0",
"org.scalatest" %% "scalatest" % "3.0.5" % Test, "org.scalatest" %% "scalatest" % "3.0.5" % Test,
"com.lihaoyi" %% "pprint" % "0.5.3" "com.lihaoyi" %% "pprint" % "0.5.3"
), ),
resolvers ++= Seq( (Compile / compile) := (Compile / compile)
"Sonatype OSS Snapshots" at .dependsOn(Def.taskDyn {
"https://oss.sonatype.org/content/repositories/snapshots", val parserCompile =
"Sonatype OSS Releases" at (syntax_definition / Compile / compileIncremental).value
"https://oss.sonatype.org/content/repositories/releases" if (parserCompile.hasModified) {
) Def.task {
) streams.value.log.info("Parser changed, forcing recompilation.")
.settings(SbtJFlexPlugin.jflexSettings) clean.value
.configs(Test) }
.settings( } else Def.task {}
testFrameworks += new TestFramework("org.scalameter.ScalaMeterFramework"), })
logBuffered := false .value
)
.configs(Benchmark)
.settings(
inConfig(Benchmark)(Defaults.testSettings),
bench := (test in Benchmark).value,
parallelExecution in Benchmark := false
) )
lazy val pkg = (project in file("Pkg")) lazy val pkg = (project in file("Pkg"))
.settings( .settings(
mainClass in (Compile, run) := Some("org.enso.pkg.Main"), mainClass in (Compile, run) := Some("org.enso.pkg.Main"),
version := "0.1" version := "0.1",
) libraryDependencies ++= circe ++ Seq("commons-io" % "commons-io" % "2.6")
.settings(
libraryDependencies ++= Seq("circe-core", "circe-generic", "circe-yaml")
.map("io.circe" %% _ % "0.10.0"),
libraryDependencies += "commons-io" % "commons-io" % "2.6"
) )
val truffleRunOptions = Seq( val truffleRunOptions = Seq(
@ -85,19 +209,16 @@ val truffleRunOptions = Seq(
javaOptions += s"-Dgraal.TruffleBackgroundCompilation=false" javaOptions += s"-Dgraal.TruffleBackgroundCompilation=false"
) )
val jmh = Seq(
"org.openjdk.jmh" % "jmh-core" % "1.21" % Benchmark,
"org.openjdk.jmh" % "jmh-generator-annprocess" % "1.21" % Benchmark
)
lazy val interpreter = (project in file("Interpreter")) lazy val interpreter = (project in file("Interpreter"))
.settings( .settings(
mainClass in (Compile, run) := Some("org.enso.interpreter.Main"), mainClass in (Compile, run) := Some("org.enso.interpreter.Main"),
version := "0.1" version := "0.1",
) commands += WithDebugCommand.withDebug,
.settings(commands += WithDebugCommand.withDebug) inConfig(Compile)(truffleRunOptions),
.settings( inConfig(Test)(truffleRunOptions),
libraryDependencies ++= Seq( parallelExecution in Test := false,
logBuffered in Test := false,
libraryDependencies ++= jmh ++ Seq(
"com.chuusai" %% "shapeless" % "2.3.3", "com.chuusai" %% "shapeless" % "2.3.3",
"org.apache.commons" % "commons-lang3" % "3.9", "org.apache.commons" % "commons-lang3" % "3.9",
"org.apache.tika" % "tika-core" % "1.21", "org.apache.tika" % "tika-core" % "1.21",
@ -127,12 +248,6 @@ lazy val interpreter = (project in file("Interpreter"))
.dependsOn(Def.task { (Compile / sourceManaged).value.mkdirs }) .dependsOn(Def.task { (Compile / sourceManaged).value.mkdirs })
.value .value
) )
.settings(
inConfig(Compile)(truffleRunOptions),
inConfig(Test)(truffleRunOptions),
parallelExecution in Test := false,
logBuffered in Test := false
)
.settings( .settings(
buildNativeImage := Def buildNativeImage := Def
.task { .task {
@ -151,7 +266,7 @@ lazy val interpreter = (project in file("Interpreter"))
logBuffered := false, logBuffered := false,
inConfig(Benchmark)(Defaults.testSettings), inConfig(Benchmark)(Defaults.testSettings),
inConfig(Benchmark)(truffleRunOptions), inConfig(Benchmark)(truffleRunOptions),
bench := (test in Benchmark).value, bench := (test in Benchmark).tag(Exclusive).value,
benchOnly := Def.inputTaskDyn { benchOnly := Def.inputTaskDyn {
import complete.Parsers.spaceDelimited import complete.Parsers.spaceDelimited
val name = spaceDelimited("<name>").parsed match { val name = spaceDelimited("<name>").parsed match {
@ -166,21 +281,6 @@ lazy val interpreter = (project in file("Interpreter"))
) )
.dependsOn(pkg) .dependsOn(pkg)
val akkaActor = "com.typesafe.akka" %% "akka-actor" % "2.5.23"
val akkaStream = "com.typesafe.akka" %% "akka-stream" % "2.5.23"
val akkaHttp = "com.typesafe.akka" %% "akka-http" % "10.1.8"
val akkaSpray = "com.typesafe.akka" %% "akka-http-spray-json" % "10.1.8"
val akkaTyped = "com.typesafe.akka" %% "akka-actor-typed" % "2.5.23"
val akkaTestkit = "com.typesafe.akka" %% "akka-testkit" % "2.5.23"
val akkaSLF4J = "com.typesafe.akka" %% "akka-slf4j" % "2.5.23"
val akkaTestkitTyped = "com.typesafe.akka" %% "akka-actor-testkit-typed" % "2.5.23" % Test
val akka = Seq(akkaActor, akkaStream, akkaHttp, akkaSpray, akkaTyped)
val circe = Seq("circe-core", "circe-generic", "circe-yaml").map(
"io.circe" %% _ % "0.10.0"
)
lazy val fileManager = (project in file("FileManager")) lazy val fileManager = (project in file("FileManager"))
.settings( .settings(
(Compile / mainClass) := Some("org.enso.filemanager.FileManager") (Compile / mainClass) := Some("org.enso.filemanager.FileManager")

View File

@ -0,0 +1,166 @@
package org.enso.flexer
import org.enso.Logger
import debug.Escape
import spec.Macro
import ReaderUTF.ENDOFINPUT
import scala.collection.mutable
trait Parser[T] {
import Parser._
var reader: Reader = _
var status = State.Status.Exit.OK
val stateDefs = new Array[Int => Int](256)
val logger = new Logger()
var currentMatch = ""
def getResult(): Option[T]
def run(input: Reader): Result[T] = {
reader = input
reader.rewind.matched.set()
reader.nextChar()
while (state.runCurrent() == State.Status.Exit.OK) Unit
val value: Result.Value[T] = getResult() match {
case None => Result.Failure(None)
case Some(result) =>
status match {
case State.Status.Exit.FINISHED => Result.Success(result)
case State.Status.Exit.FAIL => Result.Failure(Some(result))
case _ => Result.Partial(result)
}
}
Result(reader.offset, value)
}
final def rewind(): Unit =
reader.rewind.matched.run()
//// State management ////
// FIXME: This is a hack. Without it sbt crashes and needs to be completely
// cleaned to compile again.
val state = _state
final object _state {
var registry = new mutable.ArrayBuffer[State]()
def define(label: String = "unnamed", finish: => Unit = {}): State = {
val groupIndex = registry.length
val newState = new State(label, groupIndex, () => finish)
registry.append(newState)
newState
}
var stack: List[State] = Nil
var current: State = define("Root")
def begin(state: State): Unit = {
logger.log(s"Begin ${state.label}")
stack +:= current
current = state
}
def end(): Unit = stack match {
case Nil => logger.err("Trying to end root state")
case head :: tail =>
logger.log(s"End ${current.label}, back to ${head.label}")
current = head
stack = tail
}
final def endTill(s: State): Unit = logger.trace {
while (s != state.current) {
state.current.finish()
state.end()
}
}
def isInside(state: State): Boolean =
current == state || stack.contains(state)
def runCurrent(): Int = {
val cstate = state.current
var finished = false
val nextState = stateDefs(cstate.ix)
status = State.Status.INITIAL
while (State.valid(status)) {
logger.log(
s"Step (${cstate.ix}:$status) "
+ s"${Escape.str(reader.currentStr)} (${reader.charCode})"
)
status = nextState(status)
if (finished && !reader.rewind.rewinded)
status = State.Status.Exit.FINISHED
finished = reader.charCode == ENDOFINPUT
if (State.valid(status)) {
if (reader.charCode != ENDOFINPUT)
reader.result.appendCodePoint(reader.charCode)
reader.nextChar()
}
}
status
}
def call(rule: () => Unit): State.Status.Exit = {
currentMatch = reader.result.toString
rule()
reader.result.setLength(0)
reader.rewind.matched.set()
State.Status.Exit.OK
}
}
val ROOT = state.current
}
object Parser {
val BUFFER_SIZE = 16384
val UTF_CHAR_SIZE = 2
val eofCodePoint: Int = -1
val etxCodePoint: Int = -2
object State {
object Status {
val INITIAL = 0
type Exit = Int
object Exit {
val OK = -1
val FAIL = -2
val FINISHED = -3
}
}
def valid(i: Int): Boolean =
i >= 0
}
def compile[T, P](p: P)(implicit ev: P <:< Parser[T]): () => P =
macro Macro.compileImpl[T, P]
case class Result[T](offset: Int, value: Result.Value[T]) {
def map[S](fn: T => S): Result[S] = copy(value = value.map(fn))
}
object Result {
sealed trait Value[T] {
def map[S](fn: T => S): Value[S]
}
final case class Success[T](result: T) extends Value[T] {
def map[S](fn: T => S) = copy(fn(result))
}
final case class Partial[T](result: T) extends Value[T] {
def map[S](fn: T => S) = copy(fn(result))
}
final case class Failure[T](result: Option[T]) extends Value[T] {
def map[S](fn: T => S) = copy(result.map(fn))
}
}
}

View File

@ -0,0 +1,98 @@
package org.enso.flexer
import java.io._
import java.nio.charset.StandardCharsets.UTF_8
import org.enso.Logger
class Reader(input: InputStream) extends ReaderUTF(input) {
import org.enso.flexer.Reader._
lazy val logger = new Logger()
lazy val result = new java.lang.StringBuilder()
def this(file: File) = this(new FileInputStream(file))
def this(str: String) = this(new ByteArrayInputStream(str.getBytes(UTF_8)))
final override def empty: Boolean =
copyByte == 0 && super.empty
final override def fill(off: Int): Unit = {
if (rewind.maxRewindOffset == 0)
throw new OutOfMemoryError("Rewind is impossible. Buffer is too small.")
val keepchars = length - rewind.maxRewindOffset
rewind.decreaseOffset(length - keepchars)
for (i <- 1 to keepchars)
buffer(keepchars - i) = buffer(length - i)
super.fill(keepchars)
}
final override def nextChar(): Int = {
rewind.rewinded = false
super.nextChar()
}
final override def nextByte(): Int = nextByteHelper() match {
case '\r' => '\n'
case '\t' => ' '
case byte => byte
}
private var lastByte = 0
private var copyByte = 0
final private def nextByteHelper(): Int = {
if (copyByte > 0) {
copyByte -= 1
return lastByte
}
lastByte = (lastByte, super.nextByte()) match {
case ('\r', '\n') => nextByteHelper()
case (_, '\t') => copyByte = TABSIZE - 1; '\t'
case (_, byte) => byte
}
lastByte
}
final def charOffset: Int = offset - charSize
// because of bug in macroContext.eval it cannot be part of object rewind
class Rewinder(index: Int) {
import rewind._
final def set(): Unit = logger.trace {
rewindBy(index)(0) = charOffset
rewindBy(index)(1) = result.length
}
final def run(): Unit = logger.trace {
result.setLength(rewindBy(index)(1))
offset = rewindBy(index)(0)
nextChar()
rewinded = true
}
}
final object rewind {
var rewinded = false
lazy val rewindBy = Array(Array(0, -1), Array(0, -1))
lazy val matched = new Rewinder(0)
lazy val rule = new Rewinder(1)
def maxRewindOffset =
if (rewindBy(0)(1) == -1) length else rewindBy(0)(0)
def decreaseOffset(off: Int): Unit =
for (i <- rewind.rewindBy.indices)
rewind.rewindBy(i)(0) -= off
}
}
object Reader {
val TABSIZE = 4
}

View File

@ -0,0 +1,88 @@
package org.enso.flexer
import java.io._
import java.nio.charset.StandardCharsets.UTF_8
/** Fast UTF8 reader and preprocessor.
* It uses unboxed byte buffer under the hood,
* deals correctly with variable length UTF chars
* and replaces \r(\n) with \n and \t with 4 spaces.
*/
class ReaderUTF(val input: InputStream) {
import ReaderUTF._
// buffer will be unboxed as long as we don't use any fancy scala collection methods on it
val buffer = new Array[Byte](BUFFERSIZE)
var offset = 0
var length = BUFFERSIZE
var charSize = 0
var charCode = ENDOFINPUT
def this(file: File) = this(new FileInputStream(file))
def this(str: String) = this(new ByteArrayInputStream(str.getBytes(UTF_8)))
fill(0)
protected def fill(off: Int): Unit = {
length = off + input.read(buffer, off, BUFFERSIZE - off)
offset = off
}
protected def nextByte(): Int = {
if (offset >= length)
if (!empty) fill(0)
else return ENDOFINPUT
val byte = buffer(offset)
offset += 1
byte.toInt
}
def empty: Boolean =
offset >= length && length < BUFFERSIZE
def nextChar(): Int = {
charCode = nextByte()
charSize = charLength(charCode.toByte.toInt)
charCode = charCode & charMask(charSize)
for (_ <- 1 until charSize)
charCode = charCode << UTFBYTESIZE | nextByte() & charMask(-1)
charCode
}
override def toString(): String = {
val builder = new java.lang.StringBuilder()
while (nextChar() != ENDOFINPUT) builder.appendCodePoint(charCode)
builder.toString
}
final def currentStr: String =
if (charCode < 0) "" else new String(Character.toChars(charCode))
}
object ReaderUTF {
val ENDOFINPUT = -1
val BUFFERSIZE = 32768
val UTFBYTESIZE = 6
/** For more info on UTF decoding look at: https://en.wikipedia.org/wiki/UTF-8 */
def charLength(char: Int): Int =
if (char == ENDOFINPUT) 0
else
~char >> 4 match {
case 0 => 4
case 1 => 3
case 2 | 3 => 2
case _ => 1
}
def charMask(size: Int): Int = size match {
case 0 => -1 // do not mask end of input
case 1 => 127 // 0111 1111
case 2 => 63 // 0011 1111
case 3 => 31 // 0001 1111
case 4 => 15 // 0000 1111
case _ => 63 // 0011 1111
}
}

View File

@ -0,0 +1,141 @@
package org.enso.flexer
import org.enso.flexer.automata.DFA
import org.enso.flexer.automata.State
import scala.collection.immutable.Range
import scala.collection.mutable
import scala.reflect.runtime.universe._
/** Creates update functions for given DFA ~ nextState : state -> state.
* Each state has a pattern match on current utf code point.
* ASCII characters are explicitly matched, so that we get O(1) lookup.
* The rest of UTF characters is dispatched by tree of if-else,
* with O(log(N)) lookup.
*/
case class Spec(dfa: DFA) {
import Spec._
val stateHasOverlappingRules = mutable.Map(0 -> false)
case class Branch(range: Range, body: Tree)
def genBranchBody(
targetState: Int,
maybeState: Option[State.Desc],
rulesOverlap: Boolean
): Tree = (targetState, maybeState, rulesOverlap) match {
case (State.missing, None, _) =>
Literal(Constant(Parser.State.Status.Exit.FAIL))
case (State.missing, Some(state), false) =>
q"state.call(${TermName(state.rule)})"
case (State.missing, Some(state), true) =>
q"reader.rewind.rule.run(); state.call(${TermName(state.rule)})"
case _ =>
val targetStateHasNoRule = maybeState match {
case Some(state) if !dfa.endStatePriorityMap.contains(targetState) =>
dfa.endStatePriorityMap += targetState -> state
stateHasOverlappingRules += targetState -> true
true
case _ => false
}
val trgState = Literal(Constant(targetState))
if (targetStateHasNoRule && !rulesOverlap)
q"reader.rewind.rule.set(); $trgState"
else
q"$trgState"
}
def genSwitch(branchs: Seq[Branch]): Seq[CaseDef] = {
branchs.map {
case Branch(range, body) =>
val pattern =
Alternative(range.map(i => q"${Literal(Constant(i))}").toList)
cq"$pattern => $body"
}
}
def genIf(branchs: Seq[Branch]): Branch = {
branchs match {
case b +: Seq() => b
case a +: b +: rest =>
val range = a.range.start to b.range.end
val body = q"if (charCode <= ${a.range.end}) ${a.body} else ${b.body}"
genIf(Branch(range, body) +: rest)
}
}
def generateCaseBody(stateIx: Int): Tree = {
val overlaps = stateHasOverlappingRules.getOrElse(stateIx, false)
val state = dfa.endStatePriorityMap.get(stateIx)
var trgState = dfa.links(stateIx)(0)
var rStart = Int.MinValue
val branches = dfa.vocabulary.toVector.flatMap {
case (range, vocIx) =>
val newTrgState = dfa.links(stateIx)(vocIx)
if (newTrgState != trgState) {
val rEnd = range.start - 1
val xtrgState = trgState
val xrStart = rStart
trgState = newTrgState
rStart = range.start
Some(
Branch(xrStart to rEnd, genBranchBody(xtrgState, state, overlaps))
)
} else None
}
val allBranches = branches :+
Branch(rStart to Int.MaxValue, genBranchBody(trgState, state, overlaps))
val (utf1 :+ b1, rest) = allBranches.span(_.range.start < MIN_MATCH_CODE)
val (asci, utf2) = rest.span(_.range.end <= MAX_MATCH_CODE)
utf2 match {
case b2 +: utf2 =>
val b1UTF = Branch(b1.range.start to MIN_MATCH_CODE - 1, b1.body)
val b1ASC = Branch(MIN_MATCH_CODE to b1.range.end, b1.body)
val b2ASC = Branch(b2.range.start to MAX_MATCH_CODE, b2.body)
val b2UTF = Branch(MAX_MATCH_CODE + 1 to b2.range.end, b2.body)
val emptyB1ASC = b1ASC.range.end < MIN_MATCH_CODE
val emptyB2UTF = b2UTF.range.start <= MAX_MATCH_CODE
val ascii = if (emptyB1ASC) asci :+ b2ASC else b1ASC +: asci :+ b2ASC
val utfMiddle = if (emptyB2UTF) Vector(b1UTF) else Vector(b1UTF, b2UTF)
val utf = utf1 ++ utfMiddle ++ utf2
val body = genSwitch(ascii) :+ cq"charCode => ${genIf(utf).body}"
q"${Match(q"reader.charCode", body.toList)}"
case _ =>
genIf(utf1 :+ b1).body
}
}
def generate(i: Int): Tree = {
val stateNames =
dfa.links.indices.toList
.map(st => (st, TermName(s"state${i}_${st}")))
val stateMatch = Match(q"state", stateNames.map {
case (st, fun) => cq"$st => $fun"
})
val stateBodies = stateNames.map {
case (st, fun) => q"def $fun = {${generateCaseBody(st)}}"
}
q"""
stateDefs($i) = ${TermName(s"nextState$i")}
def ${TermName(s"nextState$i")}(state: Int): Int = $stateMatch
..$stateBodies
"""
}
}
object Spec {
/** Covers all ASCII characters (0 - 255) and End Of Input (-1) */
val MIN_MATCH_CODE = -1
val MAX_MATCH_CODE = 255
}

View File

@ -0,0 +1,96 @@
package org.enso.flexer
import org.enso.flexer.automata.NFA
import org.enso.flexer.automata.Pattern
import org.enso.flexer.state.Rule
import scala.reflect.runtime.universe.Tree
class State(val label: String, val ix: Int, val finish: () => Unit) {
var parent: Option[State] = None
private var revRules: List[Rule] = List()
def parent_=(p: State): Unit =
parent = Some(p)
def addRule(rule: Rule): Unit =
revRules = rule +: revRules
def rule(expr: Pattern): Rule.Builder =
Rule.Builder(expr, addRule)
def ||(expr: Pattern): Rule.Builder =
rule(expr)
def rules(): List[Rule] = {
val myRules = revRules.reverse
parent.map(myRules ++ _.rules()).getOrElse(myRules)
}
private def ruleName(ruleIx: Int): String =
s"group${ix}_rule$ruleIx"
private def buildAutomata(): NFA = {
val nfa = new NFA
val start = nfa.addState()
val endpoints = rules().zipWithIndex.map {
case (rule, ix) => buildRuleAutomata(nfa, start, ix, rule)
}
val end = nfa.addState()
nfa.state(end).rule = Some("")
for (endpoint <- endpoints) {
nfa.link(endpoint, end)
}
nfa
}
def buildRuleAutomata(nfa: NFA, last: Int, ruleIx: Int, rule: Rule): Int = {
val end = buildExprAutomata(nfa, last, rule.pattern)
nfa.state(end).rule = Some(ruleName(ruleIx))
end
}
def buildExprAutomata(nfa: NFA, last: Int, expr: Pattern): Int = {
import Pattern._
val current = nfa.addState()
nfa.link(last, current)
expr match {
case Always => current
case Range(start, end) =>
val state = nfa.addState()
nfa.link(current, state, scala.Range(start, end))
state
case Seq(first, second) =>
val s1 = buildExprAutomata(nfa, current, first)
buildExprAutomata(nfa, s1, second)
case Many(body) =>
val s1 = nfa.addState()
val s2 = buildExprAutomata(nfa, s1, body)
val s3 = nfa.addState()
nfa.link(current, s1)
nfa.link(current, s3)
nfa.link(s2, s3)
nfa.link(s3, s1)
s3
case Or(first, second) =>
val s1 = buildExprAutomata(nfa, current, first)
val s2 = buildExprAutomata(nfa, current, second)
val s3 = nfa.addState()
nfa.link(s1, s3)
nfa.link(s2, s3)
s3
}
}
def generate(): Tree = {
import scala.reflect.runtime.universe._
val nfa = buildAutomata()
val dfa = nfa.toDFA()
val state = Spec(dfa).generate(ix)
val rs = rules.zipWithIndex.map {
case (rule, ruleIx) =>
q"def ${TermName(ruleName(ruleIx))}() = ${rule.tree}"
}
q"..$state; ..$rs"
}
}

View File

@ -0,0 +1,9 @@
package org.enso.flexer.automata
import scala.collection.mutable
case class DFA(
vocabulary: Dict,
links: Array[Array[Int]],
endStatePriorityMap: mutable.Map[Int, State.Desc]
)

View File

@ -0,0 +1,23 @@
package org.enso.flexer.automata
import scala.collection.immutable
final class Dict extends Iterable[(Range, Int)] {
private var divisions = immutable.SortedSet[Int](0, Int.MaxValue)
def insert(range: Range): Unit = {
divisions = divisions + range.start
divisions = divisions + (range.end + 1)
}
override def size: Int =
divisions.size - 1
override def iterator: Iterator[(Range, Int)] =
divisions.iterator.zip(divisions.iterator.drop(1)).zipWithIndex.map {
case ((start, end), ix) => (start until end, ix)
}
override def toString: String =
"Dict(" + divisions.toList.map(_.toString).mkString(",") + ")"
}

View File

@ -0,0 +1,186 @@
package org.enso.flexer.automata
import org.enso.Logger
import scala.collection.mutable
final class NFA {
val logger: Logger = new Logger()
val states: mutable.ArrayBuffer[State] = new mutable.ArrayBuffer()
val vocabulary = new Dict()
//// API ////
def addState(): Int = {
val state = new State()
states += state
states.length - 1
}
def state(ix: Int): State =
states(ix)
def link(start: Int, end: Int, charRange: Range): Unit = {
vocabulary.insert(charRange)
state(start).links.add(end, charRange)
}
def link(start: Int, end: Int): Unit =
state(start).links.add(end)
//// NFA -> DFA ////
final private class EpsMatrix {
var links: Set[Int] = Set()
var computed: Boolean = false
}
private def fillEpsMatrix(i: Int, stateToMat: Array[EpsMatrix]): Unit = {
val epsGroupIxMap: mutable.Map[Set[Int], Int] = mutable.Map()
def go(i: Int): Unit = {
var epsLinks = Set[Int](i)
if (stateToMat(i) == null) {
var circular = false
val epsMatrix = new EpsMatrix()
stateToMat(i) = epsMatrix
state(i).links.epsilon.foreach { tgt =>
go(tgt)
val tgtEpsMatrix = stateToMat(tgt)
epsLinks = epsLinks + tgt ++ tgtEpsMatrix.links
if (!tgtEpsMatrix.computed) {
circular = true
}
}
epsMatrix.links = epsLinks
if (!circular) {
if (epsGroupIxMap.get(epsLinks).isEmpty)
epsGroupIxMap += (epsLinks -> epsGroupIxMap.size)
epsMatrix.computed = true
}
}
}
go(i)
}
private def epsMatrix(): IndexedSeq[Set[Int]] = {
val arr = new Array[EpsMatrix](states.size)
states.indices.foreach(fillEpsMatrix(_, arr))
arr.map(_.links)
}
private def nfaMatrix(): Array[Array[Int]] = {
logger.group("Computing NFA Matrix") {
val matrix = Array.ofDim[Int](states.length, vocabulary.size)
for (stateIx <- states.indices) {
val s = state(stateIx)
for ((range, vocIx) <- vocabulary) {
s.links.ranged.get(range.start) match {
case Some(tgt) => matrix(stateIx)(vocIx) = tgt
case None => matrix(stateIx)(vocIx) = State.missing
}
}
}
matrix
}
}
def toDFA(): DFA = {
logger.group("Computing DFA Matrix") {
val epsMat = epsMatrix()
val nfaMat = nfaMatrix()
var dfaRows = 0
var dfaMat = Array[Array[Int]]()
val dfaEpsMap = mutable.Map[Set[Int], Int]()
val dfaEpsIxs = mutable.ArrayBuffer[Set[Int]]()
def addDFAKey(epsSet: Set[Int]): Int = {
val id = dfaEpsMap.size
dfaEpsMap += (epsSet -> id)
dfaEpsIxs += epsSet
dfaRows += 1
dfaMat :+= Array.fill(vocabulary.size)(State.missing)
logger.log(s"DFA[$id] = $epsSet")
id
}
logger.group(s"Preparing start points") {
val initEpsSet = epsMat(0)
addDFAKey(initEpsSet)
}
var i = 0
while (i < dfaRows) {
val epsIxs = dfaEpsIxs(i)
logger.group(s"Computing DFA[$i]") {
for ((voc, vocIx) <- vocabulary) {
logger.group(s"Vocabulary '$voc'") {
var epsSet = Set[Int]()
for (epsIx <- epsIxs) {
val tgt = nfaMat(epsIx)(vocIx)
if (tgt != State.missing)
epsSet = epsSet ++ epsMat(tgt)
}
if (epsSet.nonEmpty) {
dfaMat(i)(vocIx) = dfaEpsMap.get(epsSet) match {
case None => addDFAKey(epsSet)
case Some(id) =>
logger.log(s"Existing DFA ID $id")
id
}
}
}
}
}
i += 1
}
val nfaEndStatePriorityMap = mutable.Map[Int, Int]()
for (i <- nfaMat.indices) {
if (state(i).rule.isDefined)
nfaEndStatePriorityMap += (i -> (nfaMat.length - i))
}
val dfaEndStatePriorityMap = mutable.Map[Int, State.Desc]()
for ((epss, dfaIx) <- dfaEpsIxs.zipWithIndex) {
val eps = epss.maxBy(nfaEndStatePriorityMap.getOrElse(_, State.missing))
nfaEndStatePriorityMap.get(eps).foreach { priority =>
val rule = state(eps).rule.getOrElse("")
dfaEndStatePriorityMap += dfaIx -> State.Desc(priority, rule)
}
}
DFA(vocabulary, dfaMat, dfaEndStatePriorityMap)
}
}
def visualize(): String = {
import java.awt.Desktop
import java.net.URI
import java.net.URLEncoder
val gray = "#AAAAAA"
val lines = mutable.ArrayBuffer[String]()
lines += "digraph G {"
lines += "node [shape=circle width=0.8]"
for ((state, source) <- states.zipWithIndex) {
if (state.links.ranged.isEmpty) {
lines += s"""$source [color="$gray" fontcolor="$gray"]"""
} else {
lines += s"""$source"""
}
for ((range, target) <- state.links.ranged.asMapOfRanges()) {
lines += s"""$source -> $target [label="$range"]"""
}
for (target <- state.links.epsilon) {
lines += s"""$source -> $target [style="dashed" color="$gray"]"""
}
}
lines += "}"
val code = lines.mkString("\n")
var webCode = code
webCode = URLEncoder.encode(webCode, "UTF-8")
webCode = webCode.replaceAll("[+]", "%20")
val address = "https://dreampuf.github.io/GraphvizOnline/#" + webCode
Desktop.getDesktop().browse(new URI(address))
code
}
}

View File

@ -0,0 +1,82 @@
package org.enso.flexer.automata
import org.enso.flexer.Parser
import scala.annotation.tailrec
trait Pattern {
import Pattern._
def |(that: Pattern): Pattern = Or(this, that)
def >>(that: Pattern): Pattern = Seq(this, that)
def many: Pattern = Many(this)
def many1: Pattern = this >> many
def opt: Pattern = this | always
}
object Pattern {
case object Always extends Pattern
case class Range(start: Int, end: Int) extends Pattern
case class Or(left: Pattern, right: Pattern) extends Pattern
case class Seq(first: Pattern, second: Pattern) extends Pattern
case class Many(body: Pattern) extends Pattern
//// API ////
val always: Pattern = Always
def range(start: Char, end: Char): Range = Range(start.toInt, end.toInt)
def range(start: Int, end: Int): Range = Range(start, end)
def range(end: Int): Range = range(0, end)
def range(end: Char): Range = range(0, end.toInt)
def char(char: Char): Range = range(char.toInt, char.toInt)
def char(char: Int): Range = range(char, char)
val never: Pattern = range(-1)
val any: Range = range(Int.MaxValue)
val eof: Range = char(Parser.eofCodePoint)
def anyOf(chars: String): Pattern = anyOf(chars.map(char))
def anyOf(alts: scala.Seq[Pattern]): Pattern = alts.fold(never)(_ | _)
def noneOf(chars: String): Pattern = {
val pointCodes = chars.map(_.toInt).sorted
val startPoints = 0 +: pointCodes.map(_ + 1)
val endPoints = pointCodes.map(_ - 1) :+ Int.MaxValue
val ranges = startPoints.zip(endPoints)
val validRanges = ranges.filter { case (s, e) => e >= s }
val patterns = validRanges.map { case (s, e) => range(s, e) }
anyOf(patterns)
}
final def not(char: Char): Pattern =
noneOf(char.toString)
def repeat(pat: Pattern, min: Int, max: Int): Pattern = {
@tailrec
def go(i: Int, ch: Pattern, out: Pattern): Pattern =
i match {
case 0 => out
case _ =>
val ch2 = ch >> pat
go(i - 1, ch2, out | ch2)
}
val minPat = repeat(pat, min)
go(max - min, minPat, minPat)
}
def repeat(pat: Pattern, num: Int): Pattern =
0.until(num).foldLeft(always)((t, _) => t >> pat)
//// Implicits ////
implicit class ExtendedChar(_this: Char) {
final def ||(that: Char): Pattern =
Or(char(_this), char(that))
}
implicit def automataPtternFromChar(char: Char): Pattern =
range(char, char)
implicit def automataPatternFromString(str: String): Pattern =
str.toList match {
case Nil => always
case s :: ss => ss.foldLeft(char(s): Pattern)(_ >> _)
}
}

View File

@ -0,0 +1,32 @@
package org.enso.flexer.automata
import org.feijoas.mango.common.{collect => Guava}
import scala.collection.mutable
class State {
val links: State.Link.Registry = new State.Link.Registry()
var rule: Option[String] = None
}
object State {
val missing = -1
case class Desc(priority: Int, rule: String)
object Link {
class Registry {
private type IntOrd = Ordering.Int.type
val epsilon: mutable.ArrayBuffer[Int] = new mutable.ArrayBuffer()
val ranged: Guava.mutable.RangeMap[Int, Int, IntOrd] =
Guava.mutable.RangeMap()
def add(target: Int): Unit =
epsilon += target
def add(target: Int, range: Range) =
if (range.start <= range.end)
ranged.put(Guava.Range.closed(range.start, range.end), target)
}
}
}

View File

@ -0,0 +1,21 @@
package org.enso.flexer.debug
object Escape {
def char(ch: Char): String = ch match {
case '\b' => "\\b"
case '\t' => "\\t"
case '\n' => "\\n"
case '\f' => "\\f"
case '\r' => "\\r"
case '"' => "\\\""
case '\'' => "\\\'"
case '\\' => "\\\\"
case _ =>
if (ch.isControl) "\\0" + Integer.toOctalString(ch.toInt)
else String.valueOf(ch)
}
def char(str: String): String = str.flatMap(char(_))
def str(str: String): String = s"'${char(str)}'"
}

View File

@ -0,0 +1,60 @@
package org.enso.flexer.spec
import org.enso.flexer.Parser
import org.enso.lint.Unused
import scala.reflect.macros.blackbox.Context
import scala.reflect.runtime.universe
// FIXME: Needs to be refactored. Contains deprecated API usage
object Macro {
def compileImpl[T: c.WeakTypeTag, P: c.WeakTypeTag](
c: Context
)(p: c.Expr[P])(ev: c.Expr[P <:< Parser[T]]): c.Expr[() => P] = {
import c.universe._
Unused(ev)
val tree = p.tree
val expr = q"$tree"
val parser = c.eval(c.Expr[Parser[T]](c.untypecheck(expr.duplicate)))
val groups = c.internal
.createImporter(universe)
.importTree(universe.Block(parser.state.registry.map(_.generate()): _*))
val (superClassName, tree2) = tree match {
case Apply(Select(tree2 @ Select(_, name), _), _) => (name, tree2)
case _ =>
throw new Error(
s""" ERROR: Wrong shape
| Expected Apply(Select(Select(_, name), _), _), got:
| ${showRaw(tree)}
|""".stripMargin
)
}
val groupsRebind = new Transformer {
override def transform(tree: Tree): Tree = tree match {
case Select(Ident(base), name) =>
val base2 = if (base == superClassName) q"this" else Ident(base)
super.transform(Select(base2, name))
case node => super.transform(node)
}
}
val reboundGroups = groupsRebind.transform(groups)
val addGroupDefs = new Transformer {
override def transform(tree: Tree): Tree = tree match {
case Template(parents, self, body) =>
val exprs = q"..$reboundGroups;None".asInstanceOf[Block].stats
Template(parents, self, body ++ exprs)
case node => super.transform(node)
}
}
val clsDef = c.parse(s"final class __Parser__ extends $tree2")
val tgtDef = addGroupDefs.transform(clsDef)
c.Expr[() => P](q"$tgtDef; () => { new __Parser__ () }")
}
}

View File

@ -0,0 +1,16 @@
package org.enso.flexer.state
import org.enso.flexer.automata.Pattern
import scala.reflect.runtime.universe.Expr
import scala.reflect.runtime.universe.Tree
final case class Rule(pattern: Pattern, tree: Tree)
object Rule {
final case class Builder(pattern: Pattern, finalizer: Rule => Unit) {
def run(expr: Expr[_]): Unit = run(expr.tree)
def run(tree: Tree): Unit = finalizer(Rule(pattern, tree))
def ||(expr: Expr[_]) = run(expr)
def ||(expr: Tree) = run(expr)
}
}

View File

@ -0,0 +1,168 @@
package org.enso
import scala.language.experimental.macros
import scala.reflect.macros.blackbox.Context
import org.enso.lint.Unused
class Logger {
import Logger._
var nesting = 0
def log(s: String): Unit =
macro funcRedirect
def warn(s: String): Unit =
macro funcRedirect
def err(s: String): Unit =
macro funcRedirect
def group[T](msg: String)(body: => T): T =
macro groupRedirect[T]
def trace[T](body: => T): T =
macro targetRedirect[T]
def trace_[T](body: => T): T =
macro targetRedirect_[T]
def _log(msg: String): Unit =
println("| " * nesting + msg)
def _warn(msg: String): Unit =
println("| " * nesting + Console.YELLOW + msg + Console.RESET)
def _err(msg: String): Unit =
println("| " * nesting + Console.RED + msg + Console.RESET)
def _group[T](msg: String)(body: => T): T = {
_log(msg)
beginGroup()
val out = body
endGroup()
out
}
def _trace[T](msg: String)(body: => T): T = {
_log(msg)
beginGroup()
val out = body
endGroup()
out
}
def _trace_[T](msg: String)(body: => T): T = {
_log(msg)
beginGroup()
val out = body
endGroup()
out
}
def beginGroup(): Unit =
nesting += 1
def endGroup(): Unit =
nesting -= 1
}
object Logger {
def groupRedirect[R: c.WeakTypeTag](
c: Context
)(msg: c.Tree)(body: c.Tree): c.Expr[R] = {
import c.universe._
Unused(msg)
val target = c.macroApplication match {
case Apply(Apply(TypeApply(Select(base, name), typ), msg2), body2) =>
val newName = TermName("_" + name.toString)
Apply(Apply(TypeApply(Select(base, newName), typ), msg2), body2)
case _ => throw new Error("Unsupported shape")
}
if (checkEnabled(c)) c.Expr(q"$target") else c.Expr(q"$body")
}
def targetRedirect[R: c.WeakTypeTag](c: Context)(body: c.Tree): c.Expr[R] = {
import c.universe._
val target = c.macroApplication match {
case Apply(TypeApply(Select(base, name), typ), body2) =>
val newName = TermName("_" + name.toString)
val owner = c.internal.enclosingOwner.asMethod
val owner2 = owner.owner
val parentObject = !owner2.isStatic
val oname =
if (parentObject) owner2.name.toString + "." + owner.name.toString
else owner.name.toString
val ownerName = Literal(Constant(oname))
owner.paramLists match {
case lst :: _ =>
val lst2 = lst.map(x => q"$x")
val msg =
if (lst2.isEmpty) List(q"$ownerName")
else List(q"$ownerName + $lst2.toString().drop(4)")
Apply(Apply(TypeApply(Select(base, newName), typ), msg), body2)
case _ => throw new Error("Unsupported shape")
}
case _ => throw new Error("Unsupported shape")
}
if (checkEnabled(c)) c.Expr(q"$target") else c.Expr(q"$body")
}
def targetRedirect_[R: c.WeakTypeTag](c: Context)(body: c.Tree): c.Expr[R] = {
import c.universe._
val target = c.macroApplication match {
case Apply(TypeApply(Select(base, name), typ), body2) =>
val newName = TermName("_" + name.toString)
val owner = c.internal.enclosingOwner.asMethod
val owner2 = owner.owner
val parentObject = !owner2.isStatic
val oname =
if (parentObject) owner2.name.toString + "." + owner.name.toString
else owner.name.toString
val ownerName = Literal(Constant(oname))
owner.paramLists match {
case _ :: _ =>
val msg = List(q"$ownerName")
Apply(Apply(TypeApply(Select(base, newName), typ), msg), body2)
case _ => throw new Error("Unsupported shape")
}
case _ => throw new Error("Unsupported shape")
}
if (checkEnabled(c)) c.Expr(q"$target") else c.Expr(q"$body")
}
def funcRedirect(c: Context)(s: c.Tree): c.Expr[Unit] = {
import c.universe._
Unused(s)
val target = c.macroApplication match {
case Apply(Select(base, name), args) =>
val newName = TermName("_" + name.toString)
Apply(Select(base, newName), args)
case _ => throw new Error("Unsupported shape")
}
if (checkEnabled(c)) c.Expr(q"$target") else c.Expr(q"{}")
}
def checkEnabled(c: Context): Boolean = {
val optPfx = "logging"
val opts = c.settings.filter(_.matches(s"(\\+|\\-)$optPfx.*"))
val owner = c.internal.enclosingOwner.fullName
var enabled = true
opts.foreach { opt =>
val sign = opt.head
val body = opt.tail.drop(optPfx.length)
val status = sign == '+'
val applies =
if (body == "") true
else {
val pathPfx = body.head
val path = body.tail
pathPfx == '@' && owner.startsWith(path)
}
if (applies) enabled = status
}
enabled
}
}

View File

@ -0,0 +1,5 @@
package org.enso.lint
object Unused {
def apply(arg: Any): Unit = {}
}

View File

@ -1,12 +1,12 @@
package org.enso.build package org.enso.build
import sbt.inputKey import sbt.{Tags, inputKey, taskKey}
import sbt.taskKey
/** /**
* Defines benchmarking related task keys. * Defines benchmarking related task keys.
*/ */
object BenchTasks { object BenchTasks {
lazy val Exclusive = Tags.Tag("Exclusive")
lazy val bench = taskKey[Unit]("Run Benchmarks") lazy val bench = taskKey[Unit]("Run Benchmarks")
lazy val benchOnly = inputKey[Unit]("Run benchmarks by name substring") lazy val benchOnly = inputKey[Unit]("Run benchmarks by name substring")
} }

View File

@ -1,4 +1 @@
addSbtPlugin("de.sciss" % "sbt-jflex" % "0.4.0")
addSbtPlugin(
"com.thoughtworks.sbt-api-mappings" % "sbt-api-mappings" % "2.0.0"
)