mirror of
https://github.com/enso-org/enso.git
synced 2024-12-23 10:42:05 +03:00
Remove Runtime Reflection In Parser. (#267)
This commit is contained in:
parent
3961738c15
commit
59bcabeb21
@ -1,14 +0,0 @@
|
|||||||
package org.enso.data
|
|
||||||
|
|
||||||
object ADT {
|
|
||||||
import reflect.runtime.universe.TypeTag
|
|
||||||
|
|
||||||
def constructors[T](implicit ttag: TypeTag[T]) = {
|
|
||||||
val subs = ttag.tpe.typeSymbol.asClass.knownDirectSubclasses
|
|
||||||
subs.map { symbol =>
|
|
||||||
val module = reflect.runtime.currentMirror.staticModule(symbol.fullName)
|
|
||||||
val clazz = reflect.runtime.currentMirror.reflectModule(module)
|
|
||||||
clazz.instance.asInstanceOf[T]
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
@ -1,7 +1,7 @@
|
|||||||
package org.enso.syntax.text.ast
|
package org.enso.syntax.text.ast
|
||||||
|
|
||||||
import org.enso.data.ADT
|
|
||||||
import org.enso.data.List1
|
import org.enso.data.List1
|
||||||
|
import org.enso.flexer.ADT
|
||||||
import org.enso.syntax.text.ast.Repr.R
|
import org.enso.syntax.text.ast.Repr.R
|
||||||
import scalatags.Text.all._
|
import scalatags.Text.all._
|
||||||
import scalatags.Text.TypedTag
|
import scalatags.Text.TypedTag
|
||||||
@ -194,7 +194,7 @@ object Doc {
|
|||||||
s"""var code = document.getElementById("$uniqueIDCode");
|
s"""var code = document.getElementById("$uniqueIDCode");
|
||||||
|var btn = document.getElementById("$uniqueIDBtn").firstChild;
|
|var btn = document.getElementById("$uniqueIDBtn").firstChild;
|
||||||
|btn.data = btn.data == "Show" ? "Hide" : "Show";
|
|btn.data = btn.data == "Show" ? "Hide" : "Show";
|
||||||
|code.style.display = code.style.display ==
|
|code.style.display = code.style.display ==
|
||||||
|"inline-block" ? "none" : "inline-block";""".stripMargin
|
|"inline-block" ? "none" : "inline-block";""".stripMargin
|
||||||
.replaceAll("\n", "")
|
.replaceAll("\n", "")
|
||||||
val btn = HTML.button(btnAction)(htmlIdBtn)("Show")
|
val btn = HTML.button(btnAction)(htmlIdBtn)("Show")
|
||||||
@ -319,11 +319,10 @@ object Doc {
|
|||||||
Seq(HTML.div(htmlCls)(elem.html))
|
Seq(HTML.div(htmlCls)(elem.html))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
def getObjectName: String = {
|
|
||||||
getClass.getEnclosingClass.toString.split('$').last
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
def getObjectName: String =
|
||||||
|
getClass.toString.split('$').last
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1,6 +1,6 @@
|
|||||||
package org.enso.syntax.text.ast.text
|
package org.enso.syntax.text.ast.text
|
||||||
|
|
||||||
import org.enso.data.ADT
|
import org.enso.flexer.ADT
|
||||||
|
|
||||||
sealed trait Escape {
|
sealed trait Escape {
|
||||||
val repr: String
|
val repr: String
|
||||||
|
@ -7,8 +7,6 @@ import org.enso.data.List1
|
|||||||
import org.enso.syntax.text.ast.Doc._
|
import org.enso.syntax.text.ast.Doc._
|
||||||
import org.enso.syntax.text.ast.Doc
|
import org.enso.syntax.text.ast.Doc
|
||||||
|
|
||||||
import scala.reflect.runtime.universe.reify
|
|
||||||
|
|
||||||
case class DocParserDef() extends Parser[Doc] {
|
case class DocParserDef() extends Parser[Doc] {
|
||||||
|
|
||||||
//////////////////////////////////////////////////////////////////////////////
|
//////////////////////////////////////////////////////////////////////////////
|
||||||
@ -103,7 +101,7 @@ case class DocParserDef() extends Parser[Doc] {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
ROOT || normalText || reify { text.onPushing(currentMatch) }
|
ROOT || normalText || text.onPushing(currentMatch)
|
||||||
|
|
||||||
//////////////////////////////////////////////////////////////////////////////
|
//////////////////////////////////////////////////////////////////////////////
|
||||||
//// Tags ////////////////////////////////////////////////////////////////////
|
//// Tags ////////////////////////////////////////////////////////////////////
|
||||||
@ -205,10 +203,10 @@ case class DocParserDef() extends Parser[Doc] {
|
|||||||
val notNewLine: Pattern = not(newline).many1
|
val notNewLine: Pattern = not(newline).many1
|
||||||
val CODE: State = state.define("Code")
|
val CODE: State = state.define("Code")
|
||||||
|
|
||||||
ROOT || code.inlinePattern || reify { code.onPushingInline(currentMatch) }
|
ROOT || code.inlinePattern || code.onPushingInline(currentMatch)
|
||||||
CODE || newline || reify { state.end(); state.begin(NEWLINE) }
|
CODE || newline || { state.end(); state.begin(NEWLINE) }
|
||||||
CODE || notNewLine || reify { code.onPushingMultiline(currentMatch) }
|
CODE || notNewLine || code.onPushingMultiline(currentMatch)
|
||||||
CODE || eof || reify { state.end(); documentation.onEOF() }
|
CODE || eof || { state.end(); documentation.onEOF() }
|
||||||
|
|
||||||
//////////////////////////////////////////////////////////////////////////////
|
//////////////////////////////////////////////////////////////////////////////
|
||||||
//// Formatter ///////////////////////////////////////////////////////////////
|
//// Formatter ///////////////////////////////////////////////////////////////
|
||||||
@ -287,15 +285,14 @@ case class DocParserDef() extends Parser[Doc] {
|
|||||||
val strikeoutTrigger: Char = Elem.Formatter.Strikeout.marker
|
val strikeoutTrigger: Char = Elem.Formatter.Strikeout.marker
|
||||||
}
|
}
|
||||||
|
|
||||||
ROOT || formatter.boldTrigger || reify {
|
ROOT || formatter.boldTrigger || formatter
|
||||||
formatter.onPushing(Elem.Formatter.Bold)
|
.onPushing(Elem.Formatter.Bold)
|
||||||
}
|
|
||||||
ROOT || formatter.italicTrigger || reify {
|
ROOT || formatter.italicTrigger || formatter
|
||||||
formatter.onPushing(Elem.Formatter.Italic)
|
.onPushing(Elem.Formatter.Italic)
|
||||||
}
|
|
||||||
ROOT || formatter.strikeoutTrigger || reify {
|
ROOT || formatter.strikeoutTrigger || formatter
|
||||||
formatter.onPushing(Elem.Formatter.Strikeout)
|
.onPushing(Elem.Formatter.Strikeout)
|
||||||
}
|
|
||||||
|
|
||||||
//////////////////////////////////////////////////////////////////////////////
|
//////////////////////////////////////////////////////////////////////////////
|
||||||
//// Header //////////////////////////////////////////////////////////////////
|
//// Header //////////////////////////////////////////////////////////////////
|
||||||
@ -401,10 +398,10 @@ case class DocParserDef() extends Parser[Doc] {
|
|||||||
).many1 >> eof
|
).many1 >> eof
|
||||||
}
|
}
|
||||||
|
|
||||||
ROOT || link.imagePattern || reify { link.onCreatingImage() }
|
ROOT || link.imagePattern || link.onCreatingImage()
|
||||||
ROOT || link.urlPattern || reify { link.onCreatingURL() }
|
ROOT || link.urlPattern || link.onCreatingURL()
|
||||||
ROOT || link.invalidPatternNewline || reify { link.onInvalidLinkNewline() }
|
ROOT || link.invalidPatternNewline || link.onInvalidLinkNewline()
|
||||||
ROOT || link.invalidPatternEOF || reify { link.onInvalidLinkEOF() }
|
ROOT || link.invalidPatternEOF || link.onInvalidLinkEOF()
|
||||||
|
|
||||||
//////////////////////////////////////////////////////////////////////////////
|
//////////////////////////////////////////////////////////////////////////////
|
||||||
//// Indent Management & New line ////////////////////////////////////////////
|
//// Indent Management & New line ////////////////////////////////////////////
|
||||||
@ -524,9 +521,9 @@ case class DocParserDef() extends Parser[Doc] {
|
|||||||
|
|
||||||
val NEWLINE: State = state.define("Newline")
|
val NEWLINE: State = state.define("Newline")
|
||||||
|
|
||||||
ROOT || newline || reify { state.begin(NEWLINE) }
|
ROOT || newline || state.begin(NEWLINE)
|
||||||
NEWLINE || indent.EOFPattern || reify { indent.onEOFPattern() }
|
NEWLINE || indent.EOFPattern || indent.onEOFPattern()
|
||||||
NEWLINE || indent.indentPattern || reify { indent.onIndentPattern() }
|
NEWLINE || indent.indentPattern || indent.onIndentPattern()
|
||||||
|
|
||||||
//////////////////////////////////////////////////////////////////////////////
|
//////////////////////////////////////////////////////////////////////////////
|
||||||
//// Lists ///////////////////////////////////////////////////////////////////
|
//// Lists ///////////////////////////////////////////////////////////////////
|
||||||
@ -597,8 +594,8 @@ case class DocParserDef() extends Parser[Doc] {
|
|||||||
: Pattern = indent.indentPattern >> unorderedListTrigger >> notNewLine
|
: Pattern = indent.indentPattern >> unorderedListTrigger >> notNewLine
|
||||||
}
|
}
|
||||||
|
|
||||||
NEWLINE || list.orderedPattern || reify { list.onOrdered() }
|
NEWLINE || list.orderedPattern || list.onOrdered()
|
||||||
NEWLINE || list.unorderedPattern || reify { list.onUnordered() }
|
NEWLINE || list.unorderedPattern || list.onUnordered()
|
||||||
|
|
||||||
//////////////////////////////////////////////////////////////////////////////
|
//////////////////////////////////////////////////////////////////////////////
|
||||||
//// Section /////////////////////////////////////////////////////////////////
|
//// Section /////////////////////////////////////////////////////////////////
|
||||||
@ -728,19 +725,19 @@ case class DocParserDef() extends Parser[Doc] {
|
|||||||
: Pattern = indent.indentPattern >> exampleTrigger >> indent.indentPattern
|
: Pattern = indent.indentPattern >> exampleTrigger >> indent.indentPattern
|
||||||
}
|
}
|
||||||
|
|
||||||
NEWLINE || indent.emptyLine || reify { section.onNewRaw() }
|
NEWLINE || indent.emptyLine || section.onNewRaw()
|
||||||
NEWLINE || indent.emptyLine >> indent.emptyLine || reify {
|
|
||||||
section.onNewRawWithHeader()
|
NEWLINE || indent.emptyLine >> indent.emptyLine || section
|
||||||
}
|
.onNewRawWithHeader()
|
||||||
ROOT || section.importantPattern || reify {
|
|
||||||
section.onNewMarked(Section.Marked.Important)
|
ROOT || section.importantPattern || section
|
||||||
}
|
.onNewMarked(Section.Marked.Important)
|
||||||
ROOT || section.infoPattern || reify {
|
|
||||||
section.onNewMarked(Section.Marked.Info)
|
ROOT || section.infoPattern || section
|
||||||
}
|
.onNewMarked(Section.Marked.Info)
|
||||||
ROOT || section.examplePattern || reify {
|
|
||||||
section.onNewMarked(Section.Marked.Example)
|
ROOT || section.examplePattern || section
|
||||||
}
|
.onNewMarked(Section.Marked.Example)
|
||||||
|
|
||||||
//////////////////////////////////////////////////////////////////////////////
|
//////////////////////////////////////////////////////////////////////////////
|
||||||
//// Documentation ///////////////////////////////////////////////////////////
|
//// Documentation ///////////////////////////////////////////////////////////
|
||||||
@ -804,5 +801,5 @@ case class DocParserDef() extends Parser[Doc] {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
ROOT || eof || reify { documentation.onEOF() }
|
ROOT || eof || documentation.onEOF()
|
||||||
}
|
}
|
||||||
|
@ -9,7 +9,6 @@ import org.enso.flexer.automata.Pattern._
|
|||||||
import org.enso.syntax.text.AST
|
import org.enso.syntax.text.AST
|
||||||
|
|
||||||
import scala.annotation.tailrec
|
import scala.annotation.tailrec
|
||||||
import scala.reflect.runtime.universe.reify
|
|
||||||
|
|
||||||
case class ParserDef() extends flexer.Parser[AST.Module] {
|
case class ParserDef() extends flexer.Parser[AST.Module] {
|
||||||
import ParserDef2._
|
import ParserDef2._
|
||||||
@ -169,11 +168,11 @@ case class ParserDef() extends flexer.Parser[AST.Module] {
|
|||||||
val SFX_CHECK = state.define("Identifier Suffix Check")
|
val SFX_CHECK = state.define("Identifier Suffix Check")
|
||||||
}
|
}
|
||||||
|
|
||||||
ROOT || ident._var || reify { ident.on(AST.Var(_)) }
|
ROOT || ident._var || ident.on(AST.Var(_))
|
||||||
ROOT || ident.cons || reify { ident.on(AST.Cons(_)) }
|
ROOT || ident.cons || ident.on(AST.Cons(_))
|
||||||
ROOT || "_" || reify { ident.on(AST.Blank()) }
|
ROOT || "_" || ident.on(AST.Blank())
|
||||||
ident.SFX_CHECK || ident.errSfx || reify { ident.onErrSfx() }
|
ident.SFX_CHECK || ident.errSfx || ident.onErrSfx()
|
||||||
ident.SFX_CHECK || always || reify { ident.onNoErrSfx() }
|
ident.SFX_CHECK || always || ident.onNoErrSfx()
|
||||||
|
|
||||||
//////////////////
|
//////////////////
|
||||||
//// Operator ////
|
//// Operator ////
|
||||||
@ -223,12 +222,12 @@ case class ParserDef() extends flexer.Parser[AST.Module] {
|
|||||||
MOD_CHECK.parent = SFX_CHECK
|
MOD_CHECK.parent = SFX_CHECK
|
||||||
}
|
}
|
||||||
|
|
||||||
ROOT || opr.body || reify { opr.on(AST.Opr(_)) }
|
ROOT || opr.body || opr.on(AST.Opr(_))
|
||||||
ROOT || opr.opsNoMod || reify { opr.onNoMod(AST.Opr(_)) }
|
ROOT || opr.opsNoMod || opr.onNoMod(AST.Opr(_))
|
||||||
ROOT || opr.opsGrp || reify { opr.onGrp(AST.Opr(_)) }
|
ROOT || opr.opsGrp || opr.onGrp(AST.Opr(_))
|
||||||
opr.MOD_CHECK || "=" || reify { opr.onMod() }
|
opr.MOD_CHECK || "=" || opr.onMod()
|
||||||
opr.SFX_CHECK || opr.errSfx || reify { ident.onErrSfx() }
|
opr.SFX_CHECK || opr.errSfx || ident.onErrSfx()
|
||||||
opr.SFX_CHECK || always || reify { ident.onNoErrSfx() }
|
opr.SFX_CHECK || always || ident.onNoErrSfx()
|
||||||
|
|
||||||
////////////////
|
////////////////
|
||||||
//// NUMBER ////
|
//// NUMBER ////
|
||||||
@ -278,10 +277,10 @@ case class ParserDef() extends flexer.Parser[AST.Module] {
|
|||||||
val PHASE2: State = state.define("Number Phase 2")
|
val PHASE2: State = state.define("Number Phase 2")
|
||||||
}
|
}
|
||||||
|
|
||||||
ROOT || num.decimal || reify { num.onDecimal() }
|
ROOT || num.decimal || num.onDecimal()
|
||||||
num.PHASE2 || "_" >> alphaNum.many1 || reify { num.onExplicitBase() }
|
num.PHASE2 || "_" >> alphaNum.many1 || num.onExplicitBase()
|
||||||
num.PHASE2 || "_" || reify { num.onDanglingBase() }
|
num.PHASE2 || "_" || num.onDanglingBase()
|
||||||
num.PHASE2 || always || reify { num.onNoExplicitBase() }
|
num.PHASE2 || always || num.onNoExplicitBase()
|
||||||
|
|
||||||
//////////////
|
//////////////
|
||||||
//// Text ////
|
//// Text ////
|
||||||
@ -459,49 +458,45 @@ case class ParserDef() extends flexer.Parser[AST.Module] {
|
|||||||
INTERPOLATE.parent = ROOT
|
INTERPOLATE.parent = ROOT
|
||||||
}
|
}
|
||||||
|
|
||||||
ROOT || '`' || reify { text.onInterpolateEnd() }
|
ROOT || '`' || text.onInterpolateEnd()
|
||||||
text.FMT || '`' || reify { text.onInterpolateBegin() }
|
text.FMT || '`' || text.onInterpolateBegin()
|
||||||
ROOT || "'" || reify { text.onBegin(text.FMT, Quote.Single) }
|
ROOT || "'" || text.onBegin(text.FMT, Quote.Single)
|
||||||
ROOT || "'''" || reify { text.onBegin(text.FMT, Quote.Triple) }
|
ROOT || "'''" || text.onBegin(text.FMT, Quote.Triple)
|
||||||
text.FMT || "'" || reify { text.onQuote(Quote.Single) }
|
text.FMT || "'" || text.onQuote(Quote.Single)
|
||||||
text.FMT || "'''" || reify { text.onQuote(Quote.Triple) }
|
text.FMT || "'''" || text.onQuote(Quote.Triple)
|
||||||
text.FMT || text.fmtSeg || reify { text.submitPlainSegment() }
|
text.FMT || text.fmtSeg || text.submitPlainSegment()
|
||||||
text.FMT || eof || reify { text.onEOF() }
|
text.FMT || eof || text.onEOF()
|
||||||
text.FMT || '\n' || reify { state.begin(text.NEWLINE) }
|
text.FMT || '\n' || state.begin(text.NEWLINE)
|
||||||
|
|
||||||
ROOT || "\"" || reify { text.onBegin(text.RAW, Quote.Single) }
|
ROOT || "\"" || text.onBegin(text.RAW, Quote.Single)
|
||||||
ROOT || "\"\"\"" || reify { text.onBegin(text.RAW, Quote.Triple) }
|
ROOT || "\"\"\"" || text.onBegin(text.RAW, Quote.Triple)
|
||||||
text.RAW || "\"" || reify { text.onQuote(Quote.Single) }
|
text.RAW || "\"" || text.onQuote(Quote.Single)
|
||||||
text.RAW || "$$$$$" || reify {}
|
text.RAW || "$$$$$" || {}
|
||||||
text.RAW || "\"\"\"" || reify { text.onQuote(Quote.Triple) }
|
text.RAW || "\"\"\"" || text.onQuote(Quote.Triple)
|
||||||
text.RAW || text.rawSeg || reify { text.submitPlainSegment() }
|
text.RAW || text.rawSeg || text.submitPlainSegment()
|
||||||
text.RAW || eof || reify { text.onEOF() }
|
text.RAW || eof || text.onEOF()
|
||||||
text.RAW || '\n' || reify { state.begin(text.NEWLINE) }
|
text.RAW || '\n' || state.begin(text.NEWLINE)
|
||||||
|
|
||||||
text.NEWLINE || space.opt || reify { text.onNewLine() }
|
text.NEWLINE || space.opt || text.onNewLine()
|
||||||
|
|
||||||
AST.Text.Segment.Escape.Character.codes.foreach { code =>
|
AST.Text.Segment.Escape.Character.codes.foreach { code =>
|
||||||
import scala.reflect.runtime.universe._
|
val char = s"text.Segment.Escape.Character.$code"
|
||||||
val name = TermName(code.toString)
|
text.FMT || s"\\$code" run s"text.onEscape($char)"
|
||||||
val char = q"text.Segment.Escape.Character.$name"
|
|
||||||
text.FMT || s"\\$code" || q"text.onEscape($char)"
|
|
||||||
}
|
}
|
||||||
|
|
||||||
AST.Text.Segment.Escape.Control.codes.foreach { code =>
|
AST.Text.Segment.Escape.Control.codes.foreach { code =>
|
||||||
import scala.reflect.runtime.universe._
|
val ctrl = s"text.Segment.Escape.Control.$code"
|
||||||
val name = TermName(code.toString)
|
text.FMT || s"\\$code" run s"text.onEscape($ctrl)"
|
||||||
val ctrl = q"text.Segment.Escape.Control.$name"
|
|
||||||
text.FMT || s"\\$code" || q"text.onEscape($ctrl)"
|
|
||||||
}
|
}
|
||||||
|
|
||||||
text.FMT || text.escape_u16 || reify { text.onEscapeU16() }
|
text.FMT || text.escape_u16 || text.onEscapeU16()
|
||||||
text.FMT || text.escape_u32 || reify { text.onEscapeU32() }
|
text.FMT || text.escape_u32 || text.onEscapeU32()
|
||||||
text.FMT || text.escape_int || reify { text.onEscapeInt() }
|
text.FMT || text.escape_int || text.onEscapeInt()
|
||||||
text.FMT || "\\\\" || reify { text.onEscapeSlash() }
|
text.FMT || "\\\\" || text.onEscapeSlash()
|
||||||
text.FMT || "\\'" || reify { text.onEscapeQuote() }
|
text.FMT || "\\'" || text.onEscapeQuote()
|
||||||
text.FMT || "\\\"" || reify { text.onEscapeRawQuote() }
|
text.FMT || "\\\"" || text.onEscapeRawQuote()
|
||||||
text.FMT || ("\\" >> text.fmtChar) || reify { text.onInvalidEscape() }
|
text.FMT || ("\\" >> text.fmtChar) || text.onInvalidEscape()
|
||||||
text.FMT || "\\" || reify { text.submitPlainSegment() }
|
text.FMT || "\\" || text.submitPlainSegment()
|
||||||
|
|
||||||
//////////////
|
//////////////
|
||||||
/// Blocks ///
|
/// Blocks ///
|
||||||
@ -661,13 +656,13 @@ case class ParserDef() extends flexer.Parser[AST.Module] {
|
|||||||
val FIRSTCHAR = state.define("First Char")
|
val FIRSTCHAR = state.define("First Char")
|
||||||
}
|
}
|
||||||
|
|
||||||
ROOT || newline || reify { block.onEndLine() }
|
ROOT || newline || block.onEndLine()
|
||||||
block.NEWLINE || space.opt >> newline || reify { block.onEmptyLine() }
|
block.NEWLINE || space.opt >> newline || block.onEmptyLine()
|
||||||
block.NEWLINE || space.opt >> eof || reify { block.onEOFLine() }
|
block.NEWLINE || space.opt >> eof || block.onEOFLine()
|
||||||
block.NEWLINE || space.opt || reify { block.onNewLine() }
|
block.NEWLINE || space.opt || block.onNewLine()
|
||||||
block.MODULE || space.opt >> newline || reify { block.onEmptyLine() }
|
block.MODULE || space.opt >> newline || block.onEmptyLine()
|
||||||
block.MODULE || space.opt || reify { block.onModuleBegin() }
|
block.MODULE || space.opt || block.onModuleBegin()
|
||||||
block.FIRSTCHAR || always || reify { state.end() }
|
block.FIRSTCHAR || always || state.end()
|
||||||
|
|
||||||
////////////////
|
////////////////
|
||||||
/// Defaults ///
|
/// Defaults ///
|
||||||
@ -685,9 +680,9 @@ case class ParserDef() extends flexer.Parser[AST.Module] {
|
|||||||
block.submitModule()
|
block.submitModule()
|
||||||
}
|
}
|
||||||
|
|
||||||
ROOT || space || reify { off.on() }
|
ROOT || space || off.on()
|
||||||
ROOT || eof || reify { onEOF() }
|
ROOT || eof || onEOF()
|
||||||
ROOT || any || reify { onUnrecognized() }
|
ROOT || any || onUnrecognized()
|
||||||
}
|
}
|
||||||
|
|
||||||
object ParserDef2 {
|
object ParserDef2 {
|
||||||
|
@ -7,8 +7,6 @@ import org.enso.syntax.text.spec.DocParserDef
|
|||||||
import scalatags.Text.TypedTag
|
import scalatags.Text.TypedTag
|
||||||
import scalatags.Text.{all => HTML}
|
import scalatags.Text.{all => HTML}
|
||||||
import HTML._
|
import HTML._
|
||||||
import java.io.File
|
|
||||||
import java.io.PrintWriter
|
|
||||||
import flexer.Parser.{Result => res}
|
import flexer.Parser.{Result => res}
|
||||||
import org.enso.data.List1
|
import org.enso.data.List1
|
||||||
import org.enso.syntax.text.AST.Block.{LineOf => Line}
|
import org.enso.syntax.text.AST.Block.{LineOf => Line}
|
||||||
@ -54,6 +52,7 @@ class DocParser {
|
|||||||
//// HTML Rendering of Documentation /////////////////////////////////////////
|
//// HTML Rendering of Documentation /////////////////////////////////////////
|
||||||
//////////////////////////////////////////////////////////////////////////////
|
//////////////////////////////////////////////////////////////////////////////
|
||||||
|
|
||||||
|
// TODO remove this functionality from parser
|
||||||
/**
|
/**
|
||||||
* Used to create HTML files from Doc with or without title after Doc Parser
|
* Used to create HTML files from Doc with or without title after Doc Parser
|
||||||
* Runner finished it's job
|
* Runner finished it's job
|
||||||
@ -66,7 +65,7 @@ class DocParser {
|
|||||||
val htmlCode = renderHTML(documented.ast, documented.doc, cssFileName)
|
val htmlCode = renderHTML(documented.ast, documented.doc, cssFileName)
|
||||||
val astLines = documented.ast.show().split("\n")
|
val astLines = documented.ast.show().split("\n")
|
||||||
val fileName = astLines.head.replaceAll("/", "")
|
val fileName = astLines.head.replaceAll("/", "")
|
||||||
saveHTMLToFile(path, fileName, htmlCode)
|
htmlCode
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@ -119,22 +118,6 @@ object DocParser {
|
|||||||
def runMatched(input: String): Doc = new DocParser().runMatched(input)
|
def runMatched(input: String): Doc = new DocParser().runMatched(input)
|
||||||
def run(input: String): Result[Doc] = new DocParser().run(input)
|
def run(input: String): Result[Doc] = new DocParser().run(input)
|
||||||
|
|
||||||
/**
|
|
||||||
* Saves HTML code to file
|
|
||||||
*
|
|
||||||
* @param path - path to file
|
|
||||||
* @param name - file name
|
|
||||||
* @param code - HTML code generated with Doc Parser
|
|
||||||
*/
|
|
||||||
def saveHTMLToFile(
|
|
||||||
path: String,
|
|
||||||
name: String,
|
|
||||||
code: TypedTag[String]
|
|
||||||
): Unit = {
|
|
||||||
val writer = new PrintWriter(new File(path + name + ".html"))
|
|
||||||
writer.write(code.toString)
|
|
||||||
writer.close()
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
////////////////////////////////////////////////////////////////////////////////
|
////////////////////////////////////////////////////////////////////////////////
|
||||||
|
17
lib/flexer/src/main/scala/org/enso/flexer/ADT.scala
Normal file
17
lib/flexer/src/main/scala/org/enso/flexer/ADT.scala
Normal file
@ -0,0 +1,17 @@
|
|||||||
|
package org.enso.flexer
|
||||||
|
|
||||||
|
import scala.reflect.macros.blackbox.Context
|
||||||
|
|
||||||
|
object ADT {
|
||||||
|
def constructors[T]: Set[T] = macro constructorsImpl[T]
|
||||||
|
|
||||||
|
def constructorsImpl[T: c.WeakTypeTag](c: Context): c.Expr[Set[T]] = {
|
||||||
|
import c.universe._
|
||||||
|
|
||||||
|
val subs = weakTypeTag[T].tpe.typeSymbol.asClass.knownDirectSubclasses.map {
|
||||||
|
symbol =>
|
||||||
|
q"${c.mirror.staticModule(symbol.fullName)}"
|
||||||
|
}
|
||||||
|
c.Expr[Set[T]](q"Set(..$subs)")
|
||||||
|
}
|
||||||
|
}
|
@ -5,7 +5,7 @@ import org.enso.flexer.automata.State
|
|||||||
|
|
||||||
import scala.collection.immutable.Range
|
import scala.collection.immutable.Range
|
||||||
import scala.collection.mutable
|
import scala.collection.mutable
|
||||||
import scala.reflect.runtime.universe._
|
import scala.reflect.macros.blackbox.Context
|
||||||
|
|
||||||
/** Creates update functions for given DFA ~ nextState : state -> state.
|
/** Creates update functions for given DFA ~ nextState : state -> state.
|
||||||
* Each state has a pattern match on current utf code point.
|
* Each state has a pattern match on current utf code point.
|
||||||
@ -13,7 +13,8 @@ import scala.reflect.runtime.universe._
|
|||||||
* The rest of UTF characters is dispatched by tree of if-else,
|
* The rest of UTF characters is dispatched by tree of if-else,
|
||||||
* with O(log(N)) lookup.
|
* with O(log(N)) lookup.
|
||||||
*/
|
*/
|
||||||
case class Spec(dfa: DFA) {
|
case class Spec[C <: Context](c: C, dfa: DFA) {
|
||||||
|
import c.universe._
|
||||||
import Spec._
|
import Spec._
|
||||||
|
|
||||||
val stateHasOverlappingRules = mutable.Map(0 -> false)
|
val stateHasOverlappingRules = mutable.Map(0 -> false)
|
||||||
|
@ -4,7 +4,7 @@ import org.enso.flexer.automata.NFA
|
|||||||
import org.enso.flexer.automata.Pattern
|
import org.enso.flexer.automata.Pattern
|
||||||
import org.enso.flexer.state.Rule
|
import org.enso.flexer.state.Rule
|
||||||
|
|
||||||
import scala.reflect.runtime.universe.Tree
|
import scala.reflect.macros.blackbox.Context
|
||||||
|
|
||||||
class State(val label: String, val ix: Int, val finish: () => Unit) {
|
class State(val label: String, val ix: Int, val finish: () => Unit) {
|
||||||
var parent: Option[State] = None
|
var parent: Option[State] = None
|
||||||
@ -22,9 +22,9 @@ class State(val label: String, val ix: Int, val finish: () => Unit) {
|
|||||||
def ||(expr: Pattern): Rule.Builder =
|
def ||(expr: Pattern): Rule.Builder =
|
||||||
rule(expr)
|
rule(expr)
|
||||||
|
|
||||||
def rules(): List[Rule] = {
|
def rules: List[Rule] = {
|
||||||
val myRules = revRules.reverse
|
val myRules = revRules.reverse
|
||||||
parent.map(myRules ++ _.rules()).getOrElse(myRules)
|
parent.map(myRules ++ _.rules).getOrElse(myRules)
|
||||||
}
|
}
|
||||||
|
|
||||||
private def ruleName(ruleIx: Int): String =
|
private def ruleName(ruleIx: Int): String =
|
||||||
@ -33,7 +33,7 @@ class State(val label: String, val ix: Int, val finish: () => Unit) {
|
|||||||
private def buildAutomata(): NFA = {
|
private def buildAutomata(): NFA = {
|
||||||
val nfa = new NFA
|
val nfa = new NFA
|
||||||
val start = nfa.addState()
|
val start = nfa.addState()
|
||||||
val endpoints = rules().zipWithIndex.map {
|
val endpoints = rules.zipWithIndex.map {
|
||||||
case (rule, ix) => buildRuleAutomata(nfa, start, ix, rule)
|
case (rule, ix) => buildRuleAutomata(nfa, start, ix, rule)
|
||||||
}
|
}
|
||||||
val end = nfa.addState()
|
val end = nfa.addState()
|
||||||
@ -82,14 +82,16 @@ class State(val label: String, val ix: Int, val finish: () => Unit) {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
def generate(): Tree = {
|
def generate[C <: Context](c: C): c.Tree = {
|
||||||
import scala.reflect.runtime.universe._
|
import c.universe._
|
||||||
|
|
||||||
val nfa = buildAutomata()
|
val nfa = buildAutomata()
|
||||||
val dfa = nfa.toDFA()
|
val dfa = nfa.toDFA()
|
||||||
val state = Spec(dfa).generate(ix)
|
val state = Spec[c.type](c, dfa).generate(ix)
|
||||||
val rs = rules.zipWithIndex.map {
|
val rs = rules.zipWithIndex.map {
|
||||||
case (rule, ruleIx) =>
|
case (rule, ruleIx) =>
|
||||||
q"def ${TermName(ruleName(ruleIx))}() = ${rule.tree}"
|
val tree = c.parse(rule.tree)
|
||||||
|
q"def ${TermName(ruleName(ruleIx))}() = $tree"
|
||||||
}
|
}
|
||||||
q"..$state; ..$rs"
|
q"..$state; ..$rs"
|
||||||
}
|
}
|
||||||
|
@ -4,11 +4,28 @@ import org.enso.flexer.Parser
|
|||||||
import org.enso.lint.Unused
|
import org.enso.lint.Unused
|
||||||
|
|
||||||
import scala.reflect.macros.blackbox.Context
|
import scala.reflect.macros.blackbox.Context
|
||||||
import scala.reflect.runtime.universe
|
|
||||||
|
|
||||||
// FIXME: Needs to be refactored. Contains deprecated API usage
|
// FIXME: Needs to be refactored. Contains deprecated API usage
|
||||||
object Macro {
|
object Macro {
|
||||||
|
|
||||||
|
def print(c: Context, msg: String) = c.echo(c.enclosingPosition, msg)
|
||||||
|
|
||||||
|
def runRule(c: Context)(program: c.Tree): c.Tree = {
|
||||||
|
import c.universe._
|
||||||
|
val tree = new Transformer {
|
||||||
|
override def transform(tree: Tree): Tree = tree match {
|
||||||
|
case Select(This(TypeName(_)), name) =>
|
||||||
|
super.transform(Ident(name))
|
||||||
|
case node => super.transform(node)
|
||||||
|
}
|
||||||
|
}.transform(program)
|
||||||
|
|
||||||
|
c.macroApplication match {
|
||||||
|
case Apply(Select(lhs, _), _) => q"$lhs.run(${showCode(tree)})"
|
||||||
|
case x => throw new Error("Unsupported shape")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
def compileImpl[T: c.WeakTypeTag, P: c.WeakTypeTag](
|
def compileImpl[T: c.WeakTypeTag, P: c.WeakTypeTag](
|
||||||
c: Context
|
c: Context
|
||||||
)(p: c.Expr[P])(ev: c.Expr[P <:< Parser[T]]): c.Expr[() => P] = {
|
)(p: c.Expr[P])(ev: c.Expr[P <:< Parser[T]]): c.Expr[() => P] = {
|
||||||
@ -17,10 +34,7 @@ object Macro {
|
|||||||
val tree = p.tree
|
val tree = p.tree
|
||||||
val expr = q"$tree"
|
val expr = q"$tree"
|
||||||
val parser = c.eval(c.Expr[Parser[T]](c.untypecheck(expr.duplicate)))
|
val parser = c.eval(c.Expr[Parser[T]](c.untypecheck(expr.duplicate)))
|
||||||
val groups = c.internal
|
val groups = q"..${parser.state.registry.map(_.generate(c))}"
|
||||||
.createImporter(universe)
|
|
||||||
.importTree(universe.Block(parser.state.registry.map(_.generate()): _*))
|
|
||||||
|
|
||||||
val (superClassName, tree2) = tree match {
|
val (superClassName, tree2) = tree match {
|
||||||
case Apply(Select(tree2 @ Select(_, name), _), _) => (name, tree2)
|
case Apply(Select(tree2 @ Select(_, name), _), _) => (name, tree2)
|
||||||
case _ =>
|
case _ =>
|
||||||
@ -32,21 +46,10 @@ object Macro {
|
|||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
val groupsRebind = new Transformer {
|
|
||||||
override def transform(tree: Tree): Tree = tree match {
|
|
||||||
case Select(Ident(base), name) =>
|
|
||||||
val base2 = if (base == superClassName) q"this" else Ident(base)
|
|
||||||
super.transform(Select(base2, name))
|
|
||||||
case node => super.transform(node)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
val reboundGroups = groupsRebind.transform(groups)
|
|
||||||
|
|
||||||
val addGroupDefs = new Transformer {
|
val addGroupDefs = new Transformer {
|
||||||
override def transform(tree: Tree): Tree = tree match {
|
override def transform(tree: Tree): Tree = tree match {
|
||||||
case Template(parents, self, body) =>
|
case Template(parents, self, body) =>
|
||||||
val exprs = q"..$reboundGroups;None".asInstanceOf[Block].stats
|
val exprs = q"..$groups;None".asInstanceOf[Block].stats
|
||||||
Template(parents, self, body ++ exprs)
|
Template(parents, self, body ++ exprs)
|
||||||
case node => super.transform(node)
|
case node => super.transform(node)
|
||||||
}
|
}
|
||||||
|
@ -1,16 +1,12 @@
|
|||||||
package org.enso.flexer.state
|
package org.enso.flexer.state
|
||||||
|
|
||||||
import org.enso.flexer.automata.Pattern
|
import org.enso.flexer.automata.Pattern
|
||||||
|
import org.enso.flexer.spec.Macro
|
||||||
|
|
||||||
import scala.reflect.runtime.universe.Expr
|
final case class Rule(pattern: Pattern, tree: String)
|
||||||
import scala.reflect.runtime.universe.Tree
|
|
||||||
|
|
||||||
final case class Rule(pattern: Pattern, tree: Tree)
|
|
||||||
object Rule {
|
object Rule {
|
||||||
final case class Builder(pattern: Pattern, finalizer: Rule => Unit) {
|
final case class Builder(pattern: Pattern, finalizer: Rule => Unit) {
|
||||||
def run(expr: Expr[_]): Unit = run(expr.tree)
|
def run(program: String): Unit = finalizer(Rule(pattern, program))
|
||||||
def run(tree: Tree): Unit = finalizer(Rule(pattern, tree))
|
def ||(program: => Unit): Unit = macro Macro.runRule
|
||||||
def ||(expr: Expr[_]) = run(expr)
|
|
||||||
def ||(expr: Tree) = run(expr)
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
Loading…
Reference in New Issue
Block a user